+ ./ya make . -T --test-size=small --test-size=medium --stat --test-threads 52 --link-threads 12 -DUSE_EAT_MY_DATA --build release --sanitize=address -DDEBUGINFO_LINES_ONLY --bazel-remote-store --bazel-remote-base-uri http://cachesrv.internal:8081 --bazel-remote-username cache_user --bazel-remote-password-file /tmp/tmp.Ub2FXGwaso --bazel-remote-put --dist-cache-max-file-size=209715200 -A --retest --stat -DCONSISTENT_DEBUG --no-dir-outputs --test-failure-code 0 --build-all --cache-size 2TB --force-build-depends --log-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/ya_log.txt --evlog-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/ya_evlog.jsonl --junit /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/junit.xml --build-results-report /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/report.json --output /home/runner/actions_runner/_work/ydb/ydb/tmp/out Output root is subdirectory of Arcadia root, this may cause non-idempotent build Configuring dependencies for platform default-linux-x86_64-release-asan Configuring dependencies for platform tools [2 ymakes processing] [7938/7938 modules configured] [2070/4390 modules rendered] [2 ymakes processing] [7938/7938 modules configured] [4313/4390 modules rendered] [2 ymakes processing] [7938/7938 modules configured] [4390/4390 modules rendered] Configuring dependencies for platform test_tool_tc1-global [0 ymakes processing] [7944/7944 modules configured] [4390/4390 modules rendered] Configuring tests execution Configuring local and dist store caches Configuration done. Preparing for execution |33.3%| CLEANING SYMRES | 3.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut | 5.2%| [AR] {BAZEL_DOWNLOAD} $(B)/certs/libcerts.global.a | 6.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipv6_address/liblibrary-cpp-ipv6_address.a | 6.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain64/liblibs-base64-plain64.a | 7.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipmath/liblibrary-cpp-ipmath.a | 7.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/iterator/liblibrary-cpp-iterator.a | 7.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/common/libcpp-json-common.a | 7.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxrt/liblibs-cxxsupp-libcxxrt.a | 8.0%| PREPARE $(VCS) | 7.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/easy_parse/libcpp-json-easy_parse.a | 8.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/fast_sax/libcpp-json-fast_sax.a | 8.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/l2_distance/liblibrary-cpp-l2_distance.a | 8.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/yson/libcpp-json-yson.a | 8.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/paged_vector/libcpp-containers-paged_vector.a | 8.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lcs/liblibrary-cpp-lcs.a | 8.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering | 8.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/int128/liblibrary-cpp-int128.a | 8.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/writer/libcpp-json-writer.a | 8.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/server/libcpp-http-server.a | 8.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/misc/libcpp-http-misc.a | 9.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/simple/libcpp-http-simple.a | 9.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/liblibrary-cpp-codecs.a | 9.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libcontrib-libs-openldap.a | 9.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.global.a |12.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc/liblibs-apache-orc.a |12.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/pcdata/libcpp-html-pcdata.a |13.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/enumbitset/liblibrary-cpp-enumbitset.a |13.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/execprofile/liblibrary-cpp-execprofile.a |13.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr4_cpp_runtime/libcontrib-libs-antlr4_cpp_runtime.a |13.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libxml/libcontrib-libs-libxml.a |13.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/compact_vector/libcpp-containers-compact_vector.a |13.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/disjoint_interval_tree/libcpp-containers-disjoint_interval_tree.a |14.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/alloc_profiler/libcpp-lfalloc-alloc_profiler.a |14.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/crc32c/libcpp-digest-crc32c.a |14.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/liblibrary-cpp-json.a |13.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/libcpp-digest-argonish.a |14.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/dbg_info/libcpp-lfalloc-dbg_info.a |14.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/comptrie/libcpp-containers-comptrie.a |14.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tenacity/py3/libpy3python-tenacity-py3.global.a |14.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/quoter/public/libcore-quoter-public.a |14.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/ring_buffer/libcpp-containers-ring_buffer.a |14.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/fetch/libcpp-http-fetch.a |14.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pycparser/py3/libpy3python-pycparser-py3.global.a |14.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lua/liblibrary-cpp-lua.a |14.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/global/libcpp-logger-global.a |14.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/sorted_vector/libcpp-containers-sorted_vector.a |15.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/liblibrary-cpp-getopt.global.a |15.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/analytics/liblwtrace-mon-analytics.a |15.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_rb_tree/libcpp-containers-intrusive_rb_tree.a |15.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/lower_case/libcpp-digest-lower_case.a |15.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_array/libcpp-containers-stack_array.a |16.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.a |16.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/api/libcpp-malloc-api.a |16.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/system/libsystem_allocator.a |16.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_vector/libcpp-containers-stack_vector.a |16.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/actor/libmessagebus_actor.a |16.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/md5/libcpp-digest-md5.a |16.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ssse3/libinternal-proxies-ssse3.a |16.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/str_map/libcpp-containers-str_map.a |16.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/test_connection/events/liblibs-test_connection-events.a |16.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.global.a |16.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/monitoring/libcpp-messagebus-monitoring.a |16.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/config/libcpp-messagebus-config.a |16.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/protos/libcpp-lwtrace-protos.a |17.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/liblibrary-cpp-lwtrace.a |17.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_avl_tree/libcpp-containers-intrusive_avl_tree.a |17.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/protos/libfq-libs-protos.a |17.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/raw_socket/libydb-core-raw_socket.a |17.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/balance_coverage/libcore-tx-balance_coverage.a |17.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/protobuf/libmessagebus_protobuf.a |17.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/hdr/libcpp-histogram-hdr.a |17.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_common/libpy3python-testing-yatest_common.global.a |17.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/engine/libcpp-coroutine-engine.a |17.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/oldmodule/libcpp-messagebus-oldmodule.a |17.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/murmur/libcpp-digest-murmur.a |17.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpy3public-issue-protos.global.a |17.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/escape/libcpp-html-escape.a |17.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/scheduler/libcpp-messagebus-scheduler.a |18.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libpy3providers-common-proto.global.a |18.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/events/libcolumnshard-bg_tasks-events.a |18.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/python-dateutil/py3/libpy3python-python-dateutil-py3.global.a |18.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/listener/libcpp-coroutine-listener.a |18.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/abstract/libcolumnshard-bg_tasks-abstract.a |18.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libpy3yql-essentials-protos.global.a |18.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.a |18.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libpy3core-issue-protos.global.a |19.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libpy3dq-actors-protos.global.a |19.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/libpy3library-ydb_issue-proto.global.a |19.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.a |19.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/reservoir_sampling/libpy3library-python-reservoir_sampling.global.a |19.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/libpy3libs-config-protos.global.a |19.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/quoter/libydb-core-quoter.a |19.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/small/libcpp-getopt-small.a |19.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/lib/libpy3python-import_tracing-lib.global.a |19.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/constructor/libpy3python-import_tracing-constructor.global.a |19.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/liblibrary-cpp-messagebus.a |20.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/func/libpy3library-python-func.global.a |21.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.global.a |21.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libpy3core-config-protos.global.a |21.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libpy3columnshard-engines-protos.global.a |21.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/resource/libpy3library-python-resource.global.a |21.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/libpy3library-login-protos.global.a |21.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/schema/libyt-lib-schema.a |21.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/expr_traits/libyt-lib-expr_traits.a |21.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libpy3core-scheme-protos.global.a |22.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/libpy3core-protos-schemeshard.global.a |22.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/threading/libessentials-utils-threading.a |22.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/opt/libproviders-yt-opt.a |22.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/libessentials-utils-fetch.a |22.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libpy3columnshard-common-protos.global.a |22.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.a |23.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/proto/libutils-log-proto.a |23.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/failure_injector/libessentials-utils-failure_injector.a |23.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/libpy3library-folder_service-proto.global.a |23.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/libpy3library-formats-arrow-protos.global.a |23.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/libpy3library-actors-protos.global.a |23.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/libpy3ydb-core-protos.global.a |23.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libpy3scheme-defaults-protos.global.a |24.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr4_ansi/libv1-proto_parser-antlr4_ansi.a |23.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/wardens/libpy3tests-library-wardens.global.a |23.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/libsql-v1-lexer.a |24.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/libpy3ydb-tests-library.global.a |24.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_ansi/libv1-lexer-antlr4_ansi.a |24.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libpy3yql-dq-proto.global.a |24.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libpy3api-grpc-draft.global.a |24.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/libpy3api-service-protos.global.a |24.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libpy3api-protos.global.a |24.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/libpy3providers-s3-proto.global.a |25.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/mkql/libcommon-schema-mkql.a |25.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libpy3api-protos-annotations.global.a |25.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/clients/libpy3tests-library-clients.global.a |25.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/exception_policy/libudf-service-exception_policy.global.a |25.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libpy3api-grpc.global.a |25.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/python/enable_v3_new_behavior/libpy3sdk-python-enable_v3_new_behavior.global.a |25.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/libpy3ydb-tools-cfg.global.a |26.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/canonical/libpy3tests-oss-canonical.global.a |26.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/ydb_sdk_import/libpy3tests-oss-ydb_sdk_import.global.a |25.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/k8s_api/libpy3tools-cfg-k8s_api.global.a |25.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libpy3core-file_storage-proto.global.a |25.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/walle/libpy3tools-cfg-walle.global.a |26.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/ydbd_slice/libpy3ydbd_slice.global.a |26.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/gateways_utils/libproviders-common-gateways_utils.a |26.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/libproviders-common-metrics.a |26.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/expr/libcommon-schema-expr.a |26.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/protos/libcommon-metrics-protos.a |26.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/gateway/libproviders-common-gateway.a |26.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.global.a |26.6%| PREPARE $(YMAKE_PYTHON3-4256832079) |26.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/config/libproviders-common-config.a |26.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libyql-essentials-protos.a |27.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/psutil/py3/libpy3python-psutil-py3.a |27.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/platformdirs/libpy3contrib-python-platformdirs.global.a |27.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/libproviders-common-codec.a |27.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pexpect/py3/libpy3python-pexpect-py3.global.a |27.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.global.a |27.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/provider/libproviders-result-provider.a |27.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/iniconfig/libpy3contrib-python-iniconfig.global.a |27.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/importlib-resources/libpy3contrib-python-importlib-resources.global.a |26.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.a |26.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/comp_nodes/libproviders-common-comp_nodes.a |26.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/idna/py3/libpy3python-idna-py3.global.a |26.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/common/libessentials-parser-common.a |26.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyJWT/py3/libpy3python-PyJWT-py3.global.a |26.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |26.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libproviders-common-proto.a |27.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyHamcrest/py3/libpy3python-PyHamcrest-py3.global.a |27.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.global.a |27.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protoc/libpy3protobuf-builtin_proto-protos_from_protoc.global.a |27.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/provider/libproviders-common-provider.a |27.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/libminikql-jsonpath-rewrapper.a |27.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protobuf/libpy3protobuf-builtin_proto-protos_from_protobuf.global.a |27.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/hyperscan/libjsonpath-rewrapper-hyperscan.global.a |27.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/datetime/libessentials-minikql-datetime.a |27.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |27.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/arrow/libessentials-minikql-arrow.a |28.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/pg_settings/libessentials-core-pg_settings.a |28.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/codegen/llvm16/libminikql-codegen-llvm16.a |28.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/parser/libminikql-jsonpath-parser.a |28.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/http_download/proto/libfile_storage-http_download-proto.a |28.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes/libessentials-core-expr_nodes.a |28.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/download/libcore-file_storage-download.a |28.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |28.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/llvm16/libminikql-computation-llvm16.a |28.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/libessentials-core-file_storage.a |28.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/accessor/libmetadata-secret-accessor.a |29.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/replication/libydb-services-replication.a |29.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/request/libservices-metadata-request.a |29.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/common/libservices-metadata-common.a |29.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/facade/libessentials-core-facade.a |29.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.global.a |29.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/monitoring/libydb-services-monitoring.a |29.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |29.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/common/libservices-ext_index-common.a |29.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.global.a |29.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/client/libcpp-mapreduce-client.a |30.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/operation_id/libsrc-library-operation_id.a |30.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/common/libclient-topic-common.a |30.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/common/antlr4/libparser-common-antlr4.a |30.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/service/libservices-ext_index-service.a |30.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/libessentials-minikql-jsonpath.a |30.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/result/libsrc-client-result.a |30.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/oauth2_token_exchange/libtypes-credentials-oauth2_token_exchange.a |30.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/proto/libjsonpath-rewrapper-proto.a |30.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/re2/libjsonpath-rewrapper-re2.global.a |30.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |30.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/ss_tasks/libsrc-client-ss_tasks.a |30.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/dom/libessentials-minikql-dom.a |31.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/lexer_common/libessentials-parser-lexer_common.a |30.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/jwt/libsrc-library-jwt.a |31.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/params/libsrc-client-params.a |31.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.global.a |31.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.a |31.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.a |31.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/scheme/libsrc-client-scheme.a |31.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/import/libsrc-client-import.a |31.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Jinja2/py3/libpy3python-Jinja2-py3.global.a |31.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.a |31.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/scheme_types/libpublic-lib-scheme_types.a |31.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/include/ydb-cpp-sdk/client/topic/libydb-cpp-sdk-client-topic.a |31.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/adapters/issue/libcpp-adapters-issue.a |32.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_factory.cpp |31.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_append.cpp |32.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_aggrcount.cpp |32.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_count.cpp |32.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_apply.cpp |32.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_addmember.cpp |32.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_coalesce.cpp |32.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/deprecated/kicli/liblib-deprecated-kicli.a |32.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_safe_circular_buffer.cpp |32.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_zip.cpp |32.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_some.cpp |32.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_visitall.cpp |32.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_withcontext.cpp |32.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_switch.cpp |32.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_reverse.cpp |32.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_chain_map.cpp |32.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_chopper.cpp |33.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/peephole_opt/libessentials-core-peephole_opt.a |33.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_weakmember.cpp |33.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_tooptional.cpp |33.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_iterable.cpp |33.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_sum.cpp |33.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_rh_hash.cpp |33.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_while.cpp |33.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_replicate.cpp |33.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_heap.cpp |33.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_round.cpp |33.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/libyql-essentials-minikql.a |33.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_next_value.cpp |34.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_range.cpp |34.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_pickle.cpp |34.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_scalar_apply.cpp |34.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_iterator.cpp |34.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_hasitems.cpp |34.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_check_args.cpp |34.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_minmax.cpp |34.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/fq/libpublic-lib-fq.a |34.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_fromyson.cpp |34.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_chain_map.cpp |34.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0_proto_split/libproto_ast-gen-v0_proto_split.a |34.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_flow.cpp |34.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_enumerate.cpp |34.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_ensure.cpp |34.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_coalesce.cpp |34.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sqlite3/libcontrib-libs-sqlite3.a |34.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_chopper.cpp |35.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_element.cpp |35.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_chain1_map.cpp |35.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_fold1.cpp |35.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/compressors/libproviders-s3-compressors.a |35.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_exists.cpp |35.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_container.cpp |35.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_func.cpp |35.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/credentials/libproviders-s3-credentials.a |36.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/events/libproviders-s3-events.a |36.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_decimal.cpp |36.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/common/libproviders-s3-common.a |36.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/libessentials-minikql-computation.a |36.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors_factory/libproviders-s3-actors_factory.a |36.6%| PREPARE $(LLD_ROOT-3808007503) |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/task_meta/libproviders-pq-task_meta.a |36.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_compress.cpp |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/expr_nodes/libproviders-pq-expr_nodes.a |36.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/expr_nodes/libproviders-generic-expr_nodes.a |37.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/interface/libdq-worker_manager-interface.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/events/libdq-actors-events.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/libapi-service-protos.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/libcpp/libgeneric-connector-libcpp.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/libproviders-dq-worker_manager.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/db_id_async_resolver/libproviders-common-db_id_async_resolver.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/grpc/libcommon-token_accessor-grpc.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/client/libcommon-token_accessor-client.a |37.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_join.cpp |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.global.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/liblibrary-ydb_issue-proto.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/common/libdq-actors-common.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/protos/liblibrary-schlab-protos.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/http_gateway/libproviders-common-http_gateway.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/input_transforms/libdq-actors-input_transforms.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.global.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/libydb-library-schlab.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pretty_types_print/protobuf/liblibrary-pretty_types_print-protobuf.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/protobuf_printer/libydb-library-protobuf_printer.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/deprecated/read_batch_converter/libpersqueue-deprecated-read_batch_converter.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/probes/liblibrary-schlab-probes.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/counter_time_keeper/liblibrary-persqueue-counter_time_keeper.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/protos/liblibrary-pdisk_io-protos.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/liblibrary-login-protos.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/topic_parser/liblibrary-persqueue-topic_parser.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Columns/liblibrary-arrow_clickhouse-Columns.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/validation/liblibrary-formats-arrow-validation.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/hash/liblibrary-formats-arrow-hash.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/protos/liblibrary-db_pool-protos.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/common/libactors-testlib-common.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/util/liblibrary-actors-util.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/wilson/liblibrary-actors-wilson.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Common/liblibrary-arrow_clickhouse-Common.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libdq-actors-protos.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/query_actor/libydb-library-query_actor.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/accessor/libydb-library-accessor.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/csv/converter/libarrow-csv-converter.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/mock/liblibrary-folder_service-mock.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/libydb-library-db_pool.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/protos/libdq-api-protos.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/common/libymq-queues-common.a |39.3%| PREPARE $(CLANG_FORMAT-2212207123) |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/fifo/libymq-queues-fifo.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/liblibrary-actors-testlib.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/events/libcore-wrappers-events.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/proto/libcore-ymq-proto.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors/libproviders-s3-actors.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/liblibrary-formats-arrow.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/provider/libproviders-clickhouse-provider.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/evlog/libcore-util-evlog.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/libproviders-dq-actors.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/http/libcore-ymq-http.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_allocator_client/libcore-tx-tx_allocator_client.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/json/libcore-viewer-json.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/conclusion/libydb-library-conclusion.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/schema/libschemeshard-olap-schema.a |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/actorsys_test/libcore-util-actorsys_test.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/log_backend/liblibrary-actors-log_backend.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/public/libtx-coordinator-public.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/yaml/libcore-viewer-yaml.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/compute/libdq-actors-compute.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/ut_helpers/libcore-wrappers-ut_helpers.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/http/liblibrary-actors-http.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/libydb-core-util.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/common/libcolumnshard-export-common.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/actor_type/liblibrary-actors-actor_type.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/actor/libcolumnshard-export-actor.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/protos/libcore-viewer-protos.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnsresolver/liblibrary-actors-dnsresolver.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/libydb-library-aclib.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ydb_convert/libydb-core-ydb_convert.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/std/libymq-queues-std.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/base/libcore-ymq-base.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/protos/liblibrary-aclib-protos.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/extractor/libindexes-portions-extractor.global.a |41.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg.cpp |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/libydb-core-wrappers.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnscachelib/liblibrary-actors-dnscachelib.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/helpers/liblibrary-actors-helpers.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/common/libstorage-actualizer-common.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/index/libstorage-actualizer-index.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/prof/liblibrary-actors-prof.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/memory_log/liblibrary-actors-memory_log.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.global.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/mock/libactors-interconnect-mock.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/chunks_limiter/libydb-library-chunks_limiter.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_parquet/libydb-library-arrow_parquet.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/liblibrary-actors-protos.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/DataStreams/liblibrary-arrow_clickhouse-DataStreams.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/granule/libengines-storage-granule.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/core/harmonizer/libactors-core-harmonizer.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/libydb-library-folder_service.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/liblibrary-folder_service-proto.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/loading/libcolumnshard-engines-loading.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_locks/manager/libcolumnshard-data_locks-manager.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_kernels/libydb-library-arrow_kernels.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/transformer/liblibrary-formats-arrow-transformer.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/libcolumnshard-engines-scheme.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.global.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sparsed/libchanges-compaction-sparsed.global.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/libengines-changes-compaction.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/splitter/liblibrary-formats-arrow-splitter.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/construction/libchanges-actualization-construction.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/modifier/liblibrary-formats-arrow-modifier.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/events/libdata_sharing-source-events.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/scalar/liblibrary-formats-arrow-scalar.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/liblibrary-formats-arrow-protos.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/switch/liblibrary-formats-arrow-switch.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/events/libdata_sharing-modification-events.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/core/liblibrary-actors-core.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_proxy/libcore-tx-tx_proxy.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/simple_builder/liblibrary-formats-arrow-simple_builder.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/portions/libcolumnshard-engines-portions.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/libtx-columnshard-engines.a |43.2%| PREPARE $(PYTHON) |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/password_checker/liblibrary-login-password_checker.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/actors/libcore-testlib-actors.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/error/liblibrary-http_proxy-error.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/manager/libcolumnshard-data_sharing-manager.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/fyamlcpp/libydb-library-fyamlcpp.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/liblibrary-grpc-server.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/libydb-library-arrow_clickhouse.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/abstract/libcolumnshard-blobs_action-abstract.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/libcolumnshard-engines-changes.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/actors/libgrpc-server-actors.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/authorization/liblibrary-http_proxy-authorization.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/account_lockout/liblibrary-login-account_lockout.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/cache/liblibrary-login-cache.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/address_classification/libcore-mind-address_classification.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/tier/libcolumnshard-blobs_action-tier.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/libydb-library-login.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/liblibrary-mkql_proto-protos.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ncloud/impl/liblibrary-ncloud-impl.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/naming_conventions/libydb-library-naming_conventions.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/tables/libkqp-workload_service-tables.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/libydb-library-pdisk_io.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/utils/libkqp-gateway-utils.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/executer_actor/shards_resolver/libkqp-executer_actor-shards_resolver.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/libydb-library-mkql_proto.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/signal_backtrace/libydb-library-signal_backtrace.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/liblibrary-actors-interconnect.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/protos/libyaml-config-protos.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schoot/liblibrary-schlab-schoot.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/logger/libydb-library-logger.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schemu/liblibrary-schlab-schemu.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.global.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/table_creator/libydb-library-table_creator.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/public/liblibrary-yaml_config-public.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schine/liblibrary-schlab-schine.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libydb-library-services.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.global.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/security/libydb-library-security.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.global.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/libcore-kqp-proxy_service.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/abstract/liblibrary-workload-abstract.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_proto_split/libproto_ast-gen-v1_proto_split.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/libydb-library-yaml_config.a |45.3%| PREPARE $(FLAKE8_PY3-715603131) |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_json/libydb-library-yaml_json.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/peephole/libkqp-opt-peephole.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/query_compiler/libcore-kqp-query_compiler.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/metrics/libfq-libs-metrics.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/hmac/libfq-libs-hmac.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/logs/libfq-libs-logs.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/control_plane_service/liblibs-rate_limiter-control_plane_service.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/state/libyql-dq-state.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/init/libfq-libs-init.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/health/libfq-libs-health.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/libyql-dq-actors.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/common/libyql-dq-common.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/libyql-dq-comp_nodes.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/events/liblibs-rate_limiter-events.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/health_check/libydb-core-health_check.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/expr_nodes/libproviders-clickhouse-expr_nodes.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/transform/libyql-dq-transform.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/task_runner/libdq-actors-task_runner.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/tasks/libyql-dq-tasks.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/spilling/libdq-actors-spilling.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner_actor/libproviders-dq-task_runner_actor.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/proto/libproviders-clickhouse-proto.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/libfq-libs-control_plane_config.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ycloud/impl/liblibrary-ycloud-impl.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.global.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/cloud_audit/libfq-libs-cloud_audit.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/reader/libformats-arrow-reader.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libyql-dq-proto.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/libydb-core-control.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/helper/libproviders-dq-helper.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/config/libproviders-dq-config.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/grpc/libdq-api-grpc.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner/libproviders-dq-task_runner.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/common/libproviders-dq-common.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/pushdown/libproviders-common-pushdown.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/planner/libproviders-dq-planner.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/counters/libproviders-dq-counters.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/validators/libcms-console-validators.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/invoke_builtins/llvm16/libminikql-invoke_builtins-llvm16.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/discovery/libydb-core-discovery.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/interface/libproviders-dq-interface.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/mkql/libproviders-dq-mkql.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/metadata/libcore-client-metadata.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/protos/libblobstorage-vdisk-protos.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/interface/libcommon-arrow-interface.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/groupinfo/libcore-blobstorage-groupinfo.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/runtime/libproviders-dq-runtime.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet/libydb-core-tablet.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/type_ann/libyql-dq-type_ann.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/thread_local/libcpp-threading-thread_local.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/opt/libproviders-dq-opt.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/task_scheduler/libcpp-threading-task_scheduler.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/plain_text_formatter/libyt-logging-plain_text_formatter.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/libconnector-api-service.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/skip_list/libcpp-threading-skip_list.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/common/libproviders-pq-common.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base32/libcpp-string_utils-base32.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/proto/libproviders-generic-proto.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/light_rw_lock/libcpp-threading-light_rw_lock.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/cron/libcpp-threading-cron.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/equeue/libcpp-threading-equeue.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/actors/libproviders-generic-actors.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/hot_swap/libcpp-threading-hot_swap.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/exec/libdq-provider-exec.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/pushdown/libproviders-generic-pushdown.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/cm_client/libproviders-pq-cm_client.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/runtime/libyql-dq-runtime.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest_extensions/libcpp-testing-gtest_extensions.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/ztstrbuf/libcpp-string_utils-ztstrbuf.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/scan/libcpp-string_utils-scan.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tdigest/liblibrary-cpp-tdigest.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/relaxed_escaper/libcpp-string_utils-relaxed_escaper.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/backpressure/libcore-blobstorage-backpressure.a |48.2%| PREPARE $(TEST_TOOL_HOST-sbr:8330113388) |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/util/libyutil.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pcre/libcpp-regex-pcre.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/quote/libcpp-string_utils-quote.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/proto/libproviders-pq-proto.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/native/libpq-gateway-native.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/expr_nodes/libproviders-s3-expr_nodes.a |48.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_condense1.cpp |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packers/liblibrary-cpp-packers.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/proto/libprotobuf-util-proto.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packedtypes/liblibrary-cpp-packedtypes.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/query/libblobstorage-vdisk-query.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/method/libcpp-openssl-method.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lz4/libblockcodecs-codecs-lz4.global.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/libproviders-dq-provider.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/legacy_zstd06/libblockcodecs-codecs-legacy_zstd06.global.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/libproviders-s3-proto.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/path_generator/libproviders-s3-path_generator.a |48.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_getelem.cpp |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/object_listers/libproviders-s3-object_listers.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/brotli/libblockcodecs-codecs-brotli.global.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bit_io/liblibrary-cpp-bit_io.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/lite/libcpp-charset-lite.a |49.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_just.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_logical.cpp |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/async_io/libproviders-pq-async_io.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/bzip/libblockcodecs-codecs-bzip.global.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googlemock/librestricted-googletest-googlemock.a |49.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_skiptake.cpp |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cgiparam/liblibrary-cpp-cgiparam.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/ucontext_impl/libboost-context-ucontext_impl.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/impl_common/libboost-context-impl_common.a |49.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_if.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_blocks.cpp |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/profiling/libabseil-cpp-absl-profiling.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-cal/librestricted-aws-aws-c-cal.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/regex/librestricted-boost-regex.a |49.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_condense.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_map_join.cpp |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/utf8proc/libcontrib-libs-utf8proc.a |49.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_decimal_mul.cpp |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yajl/libcontrib-libs-yajl.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzmasdk/libcontrib-libs-lzmasdk.a |49.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_dynamic_variant.cpp |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/provider/libproviders-generic-provider.a |49.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_top.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_dictitems.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_discard.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_decimal_div.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_combine.cpp |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/logical/libkqp-opt-logical.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzma/libcontrib-libs-lzma.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/provider/libproviders-pq-provider.a |49.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_decimal_mod.cpp |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/ngtcp2/libcontrib-libs-ngtcp2.a |49.5%| [CP] {default-linux-x86_64, release, asan} $(B)/common_test.context |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Coroutines/liblib-Transforms-Coroutines.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Remarks/libllvm16-lib-Remarks.a |49.2%| [CF] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/build_info.cpp |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/AsmParser/libTarget-X86-AsmParser.a |49.3%| [CF] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/sandbox.cpp |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/BinaryFormat/libllvm16-lib-BinaryFormat.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Frontend/OpenMP/liblib-Frontend-OpenMP.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/original/liblibs-linuxvdso-original.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/libcontrib-libs-linuxvdso.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libaio/static/liblibs-libaio-static.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/provider/libproviders-s3-provider.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/Disassembler/libTarget-X86-Disassembler.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitcode/Reader/liblib-Bitcode-Reader.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libiconv/static/liblibs-libiconv-static.a |49.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_flatmap.cpp |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ProfileData/libllvm16-lib-ProfileData.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/PDB/liblib-DebugInfo-PDB.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Passes/libllvm16-lib-Passes.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/address_sorting/libgrpc-third_party-address_sorting.a |49.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_fold.cpp |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/upb/libgrpc-third_party-upb.a |49.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_fromstring.cpp |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clang18-rt/lib/asan_static/libclang_rt.asan_static-x86_64.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/crcutil/libcontrib-libs-crcutil.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Object/libllvm16-lib-Object.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Support/libllvm16-lib-Support.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/builtins/liblibs-cxxsupp-builtins.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_core2/liblibs-hyperscan-runtime_core2.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/double-conversion/libcontrib-libs-double-conversion.a |49.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_factory.cpp |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse41/libfarmhash-arch-sse41.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42/libfarmhash-arch-sse42.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/common/liblibs-brotli-common.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42_aesni/libfarmhash-arch-sse42_aesni.a |49.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_exists.cpp |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain32/liblibs-base64-plain32.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/ssse3/liblibs-base64-ssse3.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon64/liblibs-base64-neon64.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/actor/libcore-ymq-actor.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/dec/liblibs-brotli-dec.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/libcontrib-libs-farmhash.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/c-ares/libcontrib-libs-c-ares.a |49.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_extend.cpp |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxabi-parts/liblibs-cxxsupp-libcxxabi-parts.a |49.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_map.cpp |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/enc/liblibs-brotli-enc.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon32/liblibs-base64-neon32.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/avx2/liblibs-base64-avx2.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/expat/libcontrib-libs-expat.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fastlz/libcontrib-libs-fastlz.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libc_compat/libcontrib-libs-libc_compat.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr3_cpp_runtime/libcontrib-libs-antlr3_cpp_runtime.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fmt/libcontrib-libs-fmt.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_openssl/liblibs-libevent-event_openssl.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clang18-rt/lib/asan_cxx/libclang_rt.asan_cxx-x86_64.a |49.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_callable.cpp |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libidn/static/liblibs-libidn-static.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc-format/liblibs-apache-orc-format.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clang18-rt/lib/asan/libclang_rt.asan-x86_64.a |49.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_contains.cpp |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/libcontrib-libs-cctz.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_thread/liblibs-libevent-event_thread.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/curl/libcontrib-libs-curl.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/tzdata/liblibs-cctz-tzdata.global.a |49.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_filter.cpp |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/MSF/liblib-DebugInfo-MSF.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libbz2/libcontrib-libs-libbz2.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_core/liblibs-libevent-event_core.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/flatbuffers/libcontrib-libs-flatbuffers.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/CFGuard/liblib-Transforms-CFGuard.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libfyaml/libcontrib-libs-libfyaml.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/test_helper/libtx-columnshard-test_helper.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Linker/libllvm16-lib-Linker.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/TextAPI/libllvm16-lib-TextAPI.a |49.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_hopping.cpp |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitstream/Reader/liblib-Bitstream-Reader.a |49.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_group.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_collect.cpp |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libunwind/libcontrib-libs-libunwind.a |49.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_grace_join_imp.cpp |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/avro/liblibs-apache-avro.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/RuntimeDyld/liblib-ExecutionEngine-RuntimeDyld.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitcode/Writer/liblib-Bitcode-Writer.a |49.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_guess.cpp |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxx/liblibs-cxxsupp-libcxx.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/MCDisassembler/liblib-MC-MCDisassembler.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/TargetInfo/libTarget-X86-TargetInfo.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_extra/liblibs-libevent-event_extra.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/CodeView/liblib-DebugInfo-CodeView.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_avx2/liblibs-hyperscan-runtime_avx2.a |49.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_invoke.cpp |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_corei7/liblibs-hyperscan-runtime_corei7.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/libcontrib-libs-googleapis-common-protos.a |49.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_if.cpp |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/backtrace/libcontrib-libs-backtrace.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/liburing/libcontrib-libs-liburing.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hdr_histogram/libcontrib-libs-hdr_histogram.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.a |49.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_grace_join.cpp |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/libllvm16-lib-ExecutionEngine.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/AsmPrinter/liblib-CodeGen-AsmPrinter.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/AsmParser/libllvm16-lib-AsmParser.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/opt/libyql-dq-opt.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lz4/libcontrib-libs-lz4.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nghttp2/libcontrib-libs-nghttp2.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/MCJIT/liblib-ExecutionEngine-MCJIT.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/MCParser/liblib-MC-MCParser.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/GlobalISel/liblib-CodeGen-GlobalISel.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/Symbolize/liblib-DebugInfo-Symbolize.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre16/liblibs-pcre-pcre16.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre32/liblibs-pcre-pcre32.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/DWARF/liblib-DebugInfo-DWARF.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Demangle/libllvm16-lib-Demangle.a |49.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_ifpresent.cpp |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-core/liblibs-aws-sdk-cpp-aws-cpp-sdk-core.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/PerfJITEvents/liblib-ExecutionEngine-PerfJITEvents.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/libllvm16-lib-MC.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/Orc/Shared/libExecutionEngine-Orc-Shared.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/InstCombine/liblib-Transforms-InstCombine.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/Orc/TargetProcess/libExecutionEngine-Orc-TargetProcess.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IRReader/libllvm16-lib-IRReader.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IRPrinter/libllvm16-lib-IRPrinter.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/ObjCARC/liblib-Transforms-ObjCARC.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/t1ha/libcontrib-libs-t1ha.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/simdjson/libcontrib-libs-simdjson.a |49.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_multihopping.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_frombytes.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_match_recognize_rows_formatter.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_lazy_list.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_listfromrange.cpp |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lua/libcontrib-libs-lua.a |49.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_length.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_join_dict.cpp |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/libllvm16-lib-Target.a |49.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_lookup.cpp |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml/libcontrib-libs-yaml.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/container/libabseil-cpp-tstring-y_absl-container.a |49.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_logical.cpp |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nghttp3/libcontrib-libs-nghttp3.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/numeric/libabseil-cpp-tstring-y_absl-numeric.a |49.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_match_recognize_measure_arg.cpp |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/AggressiveInstCombine/liblib-Transforms-AggressiveInstCombine.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/profiling/libabseil-cpp-tstring-y_absl-profiling.a |49.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_match_recognize_list.cpp |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nayuki_md5/libcontrib-libs-nayuki_md5.a |49.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_mapnext.cpp |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/synchronization/libabseil-cpp-tstring-y_absl-synchronization.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/TargetParser/libllvm16-lib-TargetParser.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libraries/liblber/libopenldap-libraries-liblber.a |49.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_multimap.cpp |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/synchronization/libabseil-cpp-absl-synchronization.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml-cpp/libcontrib-libs-yaml-cpp.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/flags/libabseil-cpp-absl-flags.a |49.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_match_recognize.cpp |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/log/libabseil-cpp-absl-log.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-compression/librestricted-aws-aws-c-compression.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/types/libabseil-cpp-absl-types.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Vectorize/liblib-Transforms-Vectorize.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Crypto/liblibs-poco-Crypto.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Net/liblibs-poco-Net.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/JSON/liblibs-poco-JSON.a |49.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_now.cpp |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-s3/liblibs-aws-sdk-cpp-aws-cpp-sdk-s3.a |49.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_null.cpp |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/chrono/librestricted-boost-chrono.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-s3/librestricted-aws-aws-c-s3.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/libcontrib-libs-pcre.a |49.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_nop.cpp |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.global.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/numeric/libabseil-cpp-absl-numeric.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/NetSSL_OpenSSL/liblibs-poco-NetSSL_OpenSSL.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/malloc_extension/liblibs-tcmalloc-malloc_extension.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/XML/liblibs-poco-XML.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/xxhash/libcontrib-libs-xxhash.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/snappy/libcontrib-libs-snappy.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd/libcontrib-libs-zstd.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-http/librestricted-aws-aws-c-http.a |49.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_random.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_queue.cpp |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Util/liblibs-poco-Util.a |49.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_prepend.cpp |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/MCTargetDesc/libTarget-X86-MCTargetDesc.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/libcontrib-libs-opentelemetry-proto.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/iostreams/librestricted-boost-iostreams.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Analysis/libllvm16-lib-Analysis.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/dragonbox/libdragonbox.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zlib/libcontrib-libs-zlib.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sasl/libcontrib-libs-sasl.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/liblib-Target-X86.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openssl/libcontrib-libs-openssl.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/debugging/libabseil-cpp-tstring-y_absl-debugging.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/uriparser/libcontrib-restricted-uriparser.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/ut_helpers/libtx-schemeshard-ut_helpers.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/SelectionDAG/liblib-CodeGen-SelectionDAG.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/base/libabseil-cpp-tstring-y_absl-base.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/fastlz/libblockcodecs-codecs-fastlz.global.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/archive/liblibrary-cpp-archive.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Utils/liblib-Transforms-Utils.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zstd/libblockcodecs-codecs-zstd.global.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/re2/libcontrib-libs-re2.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/liblibrary-cpp-blockcodecs.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/program_options/librestricted-boost-program_options.a |49.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_reduce.cpp |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/s2n/librestricted-aws-s2n.a |49.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_removemember.cpp |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd06/libcontrib-libs-zstd06.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/log/libabseil-cpp-tstring-y_absl-log.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/case_insensitive_string/liblibrary-cpp-case_insensitive_string.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/liblibrary-cpp-charset.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Instrumentation/liblib-Transforms-Instrumentation.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/flags/libabseil-cpp-tstring-y_absl-flags.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Foundation/liblibs-poco-Foundation.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/locale/librestricted-boost-locale.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/colorizer/liblibrary-cpp-colorizer.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/hash/libabseil-cpp-tstring-y_absl-hash.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/compproto/liblibrary-cpp-compproto.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/random/libabseil-cpp-tstring-y_absl-random.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/status/libabseil-cpp-tstring-y_absl-status.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/2d_array/libcpp-containers-2d_array.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/proto/libprotobuf-json-proto.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/io/libcpp-openssl-io.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/hyperscan/libcpp-regex-hyperscan.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/protos/libcpp-retry-protos.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/hash/libabseil-cpp-absl-hash.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/libabseil-cpp-tstring-y_absl-strings.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/base/libabseil-cpp-absl-base.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/types/libabseil-cpp-tstring-y_absl-types.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/time/libabseil-cpp-tstring-y_absl-time.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/skiff/liblibrary-cpp-skiff.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/debugging/libabseil-cpp-absl-debugging.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/container/libabseil-cpp-absl-container.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/bzip2/libcpp-streams-bzip2.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zc_memory_input/libcpp-streams-zc_memory_input.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/parse_size/libcpp-string_utils-parse_size.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/levenshtein_diff/libcpp-string_utils-levenshtein_diff.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zstd/libcpp-streams-zstd.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.global.a |49.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_varitem.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_toindexdict.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_time_order_recover.cpp |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/random/libabseil-cpp-absl-random.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest/libcpp-testing-unittest.a |49.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_size.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_seq.cpp |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/blocking_queue/libcpp-threading-blocking_queue.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/poor_man_openmp/libcpp-threading-poor_man_openmp.a |49.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_tobytes.cpp |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/queue/libcpp-threading-queue.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/cancellation/libcpp-threading-cancellation.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/status/libabseil-cpp-absl-status.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.global.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/init/libcpp-xml-init.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-common/librestricted-aws-aws-c-common.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/strings/libabseil-cpp-absl-strings.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/assert/libcpp-yt-assert.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/cursors/libunwind/libbacktrace-cursors-libunwind.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/time/libabseil-cpp-absl-time.a |49.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_map_join.cpp |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/libcpp-yt-logging.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/malloc/libcpp-yt-malloc.a |49.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_timezone.cpp |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/liblibrary-cpp-yson.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-auth/librestricted-aws-aws-c-auth.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/libcontrib-libs-hyperscan.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-event-stream/librestricted-aws-aws-c-event-stream.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/system/libcpp-yt-system.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-mqtt/librestricted-aws-aws-c-mqtt.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/threading/libcpp-yt-threading.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/error/libcpp-yt-error.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-sdkutils/librestricted-aws-aws-c-sdkutils.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/IPO/liblib-Transforms-IPO.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-checksums/librestricted-aws-aws-checksums.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-io/librestricted-aws-aws-c-io.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/binsaver/liblibrary-cpp-binsaver.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson_string/libcpp-yt-yson_string.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/audit/libydb-core-audit.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/fcontext_impl/libboost-context-fcontext_impl.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/atomic/librestricted-boost-atomic.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/container/librestricted-boost-container.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/util/charset/libutil-charset.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/controller/libcore-backup-controller.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/exception/librestricted-boost-exception.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/generated/libcore-base-generated.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/coroutine/librestricted-boost-coroutine.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/actorlib_impl/libydb-core-actorlib_impl.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/random/librestricted-boost-random.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/chacha_512/libblobstorage-crypto-chacha_512.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IR/libllvm16-lib-IR.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/thread/librestricted-boost-thread.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/http-parser/libcontrib-restricted-http-parser.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-crt-cpp/librestricted-aws-aws-crt-cpp.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zlib/libblockcodecs-codecs-zlib.global.a |50.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_tostring.cpp |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/dsproxy/mock/libblobstorage-dsproxy-mock.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/graph/librestricted-boost-graph.a |50.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_unwrap.cpp |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lzma/libblockcodecs-codecs-lzma.global.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/other/libcore-blobstorage-other.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/snappy/libblockcodecs-codecs-snappy.global.a |50.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_udf.cpp |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/cityhash-1.0.2/libcontrib-restricted-cityhash-1.0.2.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cache/liblibrary-cpp-cache.a |50.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_way.cpp |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/core/libcpp-blockcodecs-core.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/serialization/librestricted-boost-serialization.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/greedy_dict/libcpp-codecs-greedy_dict.a |50.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_take.cpp |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/libvdisk-hullop-hullcompdelete.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googletest/librestricted-googletest-googletest.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/comptable/liblibrary-cpp-comptable.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/thrift/libcontrib-restricted-thrift.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/balance/libblobstorage-vdisk-balance.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/libvdisk-hulldb-compstrat.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/interop/libcpp-protobuf-interop.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/defrag/libblobstorage-vdisk-defrag.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/libcpp-protobuf-json.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/url/libcpp-string_utils-url.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/config/liblibrary-cpp-config.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/random_provider/liblibrary-cpp-random_provider.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sighandler/liblibrary-cpp-sighandler.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Scalar/liblib-Transforms-Scalar.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/liblibrary-cpp-retry.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/util/libcms-console-util.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/localrecovery/libblobstorage-vdisk-localrecovery.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/resource/liblibrary-cpp-resource.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libcore-config-protos.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/libydb-core-base.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/lib/libcore-control-lib.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/validation/libcore-config-validation.a |50.3%| {BAZEL_DOWNLOAD} $(B)/library/cpp/sanitizer/plugin/sanitizer.py.pyplugin |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/indent_text/libcpp-string_utils-indent_text.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/brotli/libcpp-streams-brotli.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sse/liblibrary-cpp-sse.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/scheme/liblibrary-cpp-scheme.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sliding_window/liblibrary-cpp-sliding_window.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/csv/libcpp-string_utils-csv.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/scrub/libblobstorage-vdisk-scrub.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lzma/libcpp-streams-lzma.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/syncer/libblobstorage-vdisk-syncer.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/icu/libcontrib-libs-icu.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base64/libcpp-string_utils-base64.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_base/libcli_base.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/composite_serial/libarrow-accessor-composite_serial.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/common/liblibrary-formats-arrow-accessor-common.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/composite/liblibrary-formats-arrow-accessor-composite.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/inference/libexternal_sources-object_storage-inference.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/common/libformats-arrow-common.a |50.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_condense.cpp |50.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |50.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/future/libcpp-threading-future.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.global.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/hook/libcpp-testing-hook.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/atomic/libcpp-threading-atomic.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/switch/libformats-arrow-switch.a |50.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_combine.cpp |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.global.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/document/libcpp-xml-document.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/common/libcpp-testing-common.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest_main/libcpp-testing-unittest_main.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/events/liblibs-audit-events.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/time_provider/liblibrary-cpp-time_provider.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/libcontrib-libs-grpc.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/proto/liblibs-checkpoint_storage-proto.a |50.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing_common/libfq-libs-checkpointing_common.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/uri/liblibrary-cpp-uri.a |50.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_filter.cpp |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/json/libcpp-yson-json.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/global/libcpp-yt-global.a |50.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_map.cpp |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/libcpp-yt-backtrace.a |50.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/exception/libcpp-yt-exception.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/proto/libcpp-unified_agent_client-proto.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/type_info/liblibrary-cpp-type_info.a |50.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_top_sort.cpp |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/node/libcpp-yson-node.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/proto/liblibs-control_plane_storage-proto.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/gateway/libfq-libs-gateway.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/util/draft/libutil-draft.a |50.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_todict.cpp |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/common/librow_dispatcher-format_handler-common.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson/libcpp-yt-yson.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/string/libcpp-yt-string.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/misc/libcpp-yt-misc.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/graph_params/proto/liblibs-graph_params-proto.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/grpc/libfq-libs-grpc.a |50.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ytalloc/api/libcpp-ytalloc-api.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/memory/libcpp-yt-memory.a |50.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/libllvm16-lib-CodeGen.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/tools/enum_parser/enum_serialization_runtime/libtools-enum_parser-enum_serialization_runtime.a |50.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_squeeze_state.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_source.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_skip.cpp |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/version/libversion_definition.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/cpu_clock/libcpp-yt-cpu_clock.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/libfq-libs-quota_manager.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson_pull/libyson_pull.a |50.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_sort.cpp |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/common/libcore-backup-common.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/protos/liblibs-row_dispatcher-protos.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/libfq-libs-shared_resources.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/signer/libfq-libs-signer.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/libcore-grpc_services-cancelation.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/common/proto/libbackup-common-proto.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/libydb-core-grpc_streaming.a |50.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_squeeze_to_list.cpp |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/impl/libcore-backup-impl.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/jaeger_tracing/libydb-core-jaeger_tracing.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/common/libcore-blobstorage-common.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/recovery/libvdisk-hulldb-recovery.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/test_connection/libfq-libs-test_connection.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/libcore-blobstorage-crypto.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/internal/liblibs-control_plane_storage-internal.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/events/libkqp-common-events.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/shutdown/libkqp-common-shutdown.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kesus/proxy/libcore-kesus-proxy.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/skeleton/libblobstorage-vdisk-skeleton.a |50.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/lwtrace_probes/libcore-blobstorage-lwtrace_probes.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.global.a |50.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/libvdisk-hulldb-cache_block.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/libvdisk-hulldb-barriers.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/libvdisk-hulldb-bulksst_add.a |50.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/incrhuge/libcore-blobstorage-incrhuge.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/counters/libcore-kqp-counters.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/libblobstorage-vdisk-anubis_osiris.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/ut/common/libpersqueue-ut-common.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/libvdisk-hulldb-base.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/mock/libblobstorage-pdisk-mock.a |50.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/libfq-libs-control_plane_proxy.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/libydb-core-kqp.global.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/local_rpc/libkqp-gateway-local_rpc.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.global.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/events/liblibs-control_plane_config-events.a |50.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/node_service/libcore-kqp-node_service.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/huge/libblobstorage-vdisk-huge.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/common/libblobstorage-vdisk-common.a |50.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/base/libcore-blobstorage-base.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/libcore-blobstorage-vdisk.a |50.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/ingress/libblobstorage-vdisk-ingress.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/libydb-core-external_sources.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/log_backend/libydb-core-log_backend.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/change_exchange/libydb-core-change_exchange.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.global.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/libcore-kqp-workload_service.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/libvdisk-hulldb-fresh.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/scheme_cache_lib/libcore-client-scheme_cache_lib.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compute_actor/libcore-kqp-compute_actor.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_config_base/libcore-driver_lib-cli_config_base.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/debug/libydb-core-debug.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/partition_key_range/libcore-persqueue-partition_key_range.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/libvdisk-hulldb-generic.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/docapi/libydb-core-docapi.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/purecalc/libcore-persqueue-purecalc.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/minikql_compile/libcore-client-minikql_compile.a |49.3%| PREPARE $(OS_SDK_ROOT-sbr:243881345) |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/base_utils/libbase_utils.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/common/libkqp-ut-common.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/erasure/libydb-core-erasure.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blockstore/core/libcore-blockstore-core.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/libcore-protos-schemeshard.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/version/libversion.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/repl/libblobstorage-vdisk-repl.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/libydb-core-scheme.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/filestore/core/libcore-filestore-core.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/init/libcore-config-init.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/libcore-external_sources-object_storage.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.global.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/engine/minikql/libcore-engine-minikql.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/abstract/libarrow-accessor-abstract.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sub_columns/libarrow-accessor-sub_columns.global.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/hash/libformats-arrow-hash.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.global.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/host/libcore-kqp-host.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/synclog/libblobstorage-vdisk-synclog.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kesus/tablet/libcore-kesus-tablet.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/splitter/libformats-arrow-splitter.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/pg_tables/libcore-sys_view-pg_tables.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/save_load/libformats-arrow-save_load.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/protos/libcore-tablet_flat-protos.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/tablets/libcore-sys_view-tablets.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/libfq-libs-config.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/transformer/libformats-arrow-transformer.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/libydb-core-cms.global.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/engine/libydb-core-engine.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/session/libcolumnshard-bg_tasks-session.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/libfq-libs-audit.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/events/liblibs-checkpoint_storage-events.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tracing/libydb-core-tracing.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/libydb-core-formats.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/common/libcore-sys_view-common.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/liblibs-config-protos.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/libydb-core-security.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/common/libfq-libs-common.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libcolumnshard-common-protos.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/protos/libcolumnshard-blobs_action-protos.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/protos/libgrpc_services-cancelation-protos.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sub_columns/libarrow-accessor-sub_columns.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/synchronization_service/libcompute-ydb-synchronization_service.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/table/libgateway-behaviour-table.global.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/nodewarden/libcore-blobstorage-nodewarden.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_reader/libtx-columnshard-blobs_reader.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/abstract/libchanges-compaction-abstract.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/events/liblibs-row_dispatcher-events.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/common/liblibs-compute-common.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/libblobstorage-vdisk-hullop.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_schema/libfq-libs-db_schema.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/events/liblibs-control_plane_storage-events.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/events/liblibs-control_plane_proxy-events.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing/libfq-libs-checkpointing.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.global.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/run/librun.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/events/libfq-libs-events.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/dictionary/libformats-arrow-dictionary.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/transactions/libdata_sharing-source-transactions.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/processor/libcore-sys_view-processor.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/libfq-libs-db_id_async_resolver_impl.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/session/libdata_sharing-destination-session.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.global.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/liblibs-control_plane_proxy-actors.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/transactions/libdata_sharing-destination-transactions.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/tasks/libdata_sharing-modification-tasks.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/control_plane/libcompute-ydb-control_plane.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/libcore-formats-arrow.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/controller/libchanges-actualization-controller.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/proto/liblibs-quota_manager-proto.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/counters/libengines-changes-counters.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_utils/libcli_utils.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/utils/liblibs-rate_limiter-utils.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/mock/libfq-libs-mock.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/read_rule/libfq-libs-read_rule.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/liblibs-compute-ydb.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/test_tablet/libydb-core-test_tablet.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/libengines-changes-abstract.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/libfq-libs-checkpoint_storage.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_caching/libydb-core-grpc_caching.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/libcore-blobstorage-pdisk.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/quoter_service/liblibs-rate_limiter-quoter_service.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/server/libcore-client-server.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.global.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/predicate/libcolumnshard-engines-predicate.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/libydb-core-mind.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/result_formatter/libfq-libs-result_formatter.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/liblibs-row_dispatcher-format_handler.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/dsproxy/libcore-blobstorage-dsproxy.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.global.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/protos/libgraph-shard-protos.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/actors/libfq-libs-actors.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/private_client/libfq-libs-private_client.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/filters/librow_dispatcher-format_handler-filters.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_compilation/liblibs-row_dispatcher-purecalc_compilation.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/parsers/librow_dispatcher-format_handler-parsers.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/tasks_packer/libfq-libs-tasks_packer.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blob_depot/libydb-core-blob_depot.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/compilation/libkqp-common-compilation.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/service/libcore-graph-service.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/libfq-libs-row_dispatcher.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/interface/liblibs-shared_resources-interface.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/ydb_dump/libcore-io_formats-ydb_dump.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/common/libengines-scheme-common.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/libcore-graph-shard.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/common/libscheme-defaults-common.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libscheme-defaults-protos.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/counters/libcore-grpc_services-counters.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/federated_query/libcore-kqp-federated_query.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_no_pg_wrapper/liblibs-row_dispatcher-purecalc_no_pg_wrapper.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/tablet/libcore-grpc_services-tablet.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/libstorage-actualizer-abstract.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/libstorage-actualizer-scheme.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/chunks/libengines-storage-chunks.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/cell_maker/libcore-io_formats-cell_maker.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/arrow/scheme/libio_formats-arrow-scheme.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/libstorage-indexes-count_min_sketch.global.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/extractor/libindexes-portions-extractor.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/protos/libcore-graph-protos.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/libstorage-indexes-bloom.global.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/buffer/libkqp-common-buffer.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/libydb-core-cms.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.global.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/protos/libcore-keyvalue-protos.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/liboptimizer-lbuckets-constructor.global.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/ydb/libfq-libs-ydb.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/simple/libkqp-common-simple.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/expr_nodes/libcore-kqp-expr_nodes.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/optimizer/liboptimizer-sbuckets-optimizer.global.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/proto/libkqp-proxy_service-proto.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.global.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/libcolumnshard-engines-writer.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/buffer/libengines-writer-buffer.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/abstract/libengines-scheme-abstract.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/libcore-cms-console.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.global.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/common/libkqp-workload_service-common.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.global.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compile_service/libcore-kqp-compile_service.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/resources/libtx-columnshard-resources.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tables/libcolumnshard-normalizer-tables.global.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_allocator/libcore-tx-tx_allocator.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/topics/libcore-kqp-topics.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/libcore-kqp-common.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/actors/libkqp-gateway-actors.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/physical/effects/libopt-physical-effects.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/s3/libsession-storage-s3.global.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/batch_builder/libcolumnshard-operations-batch_builder.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/resource_subscriber/libtx-columnshard-resource_subscriber.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/events/libcore-persqueue-events.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/finalize_script_service/libcore-kqp-finalize_script_service.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/memory_controller/libydb-core-memory_controller.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tx_reader/libtx-columnshard-tx_reader.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/query_data/libcore-kqp-query_data.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/metering/libydb-core-metering.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/libydb-core-keyvalue.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/protos/libtx-coordinator-protos.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/rm_service/libcore-kqp-rm_service.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/liblibs-apache-arrow.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/config/libcore-persqueue-config.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.global.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/storage/libcore-sys_view-storage.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/codecs/libcore-persqueue-codecs.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/actors/libkqp-workload_service-actors.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.global.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/libfq-libs-control_plane_storage.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/long_tx_service/public/libtx-long_tx_service-public.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/common/libtx-replication-common.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/libtx-columnshard-operations.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/libcore-kqp-gateway.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/database/libcore-statistics-database.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/libolap-bg_tasks-tx_chain.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon/libydb-core-mon.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon_alloc/libydb-core-mon_alloc.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/protos/libcore-pgproxy-protos.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/common/libtx-schemeshard-common.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/resource_pools/libydb-core-resource_pools.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.global.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/show_create/libcore-sys_view-show_create.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/libydb-core-pgproxy.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/run_script_actor/libcore-kqp-run_script_actor.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/program/libcore-tx-program.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/protos/libcore-public_http-protos.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/public/libtx-sequenceshard-public.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libcore-scheme-protos.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/ldap_auth_provider/libcore-security-ldap_auth_provider.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/certificate_check/libcore-security-certificate_check.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme_types/libydb-core-scheme_types.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/session_actor/libcore-kqp-session_actor.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/sessions/libcore-sys_view-sessions.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/out/libcore-protos-out.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/common/liboperations-alter-common.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/local_pgwire/libydb-core-local_pgwire.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/libydb-core-sys_view.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/libalter-in_store-config_shards.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/nodes/libcore-sys_view-nodes.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/options/libschemeshard-olap-options.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/resource_pool_classifiers/libcore-sys_view-resource_pool_classifiers.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/writer/libcore-persqueue-writer.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/common/libschemeshard-olap-common.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/service/libcore-statistics-service.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/public/libtx-sequenceproxy-public.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/service/libcore-sys_view-service.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/runtime/libcore-kqp-runtime.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/query_stats/libcore-sys_view-query_stats.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/storages_manager/libcolumnshard-blobs_action-storages_manager.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/events/libdata_sharing-destination-events.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/libcore-tx-sequenceproxy.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/partition_stats/libcore-sys_view-partition_stats.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/resource_pools/libcore-sys_view-resource_pools.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/local/libcolumnshard-blobs_action-local.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/protos/libcolumnshard-bg_tasks-protos.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/transaction/libengines-reader-transaction.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/transactions/libcolumnshard-bg_tasks-transactions.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/common/libcolumnshard-blobs_action-common.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/counters/libcolumnshard-blobs_action-counters.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/libydb-core-http_proxy.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/basics/libcore-testlib-basics.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/manager/libcolumnshard-bg_tasks-manager.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/events/libcolumnshard-blobs_action-events.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/aggregator/libcore-statistics-aggregator.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/load_test/libydb-core-load_test.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/libdata_sharing-common-session.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/libschemeshard-olap-operations.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/libtx-columnshard-blobs_action.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/counters/common/libcolumnshard-counters-common.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/abstract/libcolumnshard-data_accessor-abstract.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/auth/libcore-sys_view-auth.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/libtx-columnshard-common.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/bs/libcolumnshard-blobs_action-bs.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/context/libdata_sharing-common-context.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.global.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_locks/locks/libcolumnshard-data_locks-locks.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/libtx-columnshard-data_accessor.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_reader/libtx-columnshard-data_reader.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/status/libdata_sharing-initiator-status.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/transaction/libcolumnshard-blobs_action-transaction.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.global.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kafka_proxy/libydb-core-kafka_proxy.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/portions/libreader-sys_view-portions.global.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/common/libchanges-compaction-common.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/transactions/libdata_sharing-modification-transactions.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libcolumnshard-engines-protos.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common/libengines-reader-common.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/counters/libtx-columnshard-counters.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/libcolumnshard-data_sharing-protos.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/session/libdata_sharing-source-session.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/libreader-common_reader-constructor.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/libcore-tx-columnshard.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/libengines-reader-abstract.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.global.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/optimizer/libreader-sys_view-optimizer.global.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/actor/libengines-reader-actor.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/constructor/libreader-sys_view-constructor.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/insert_table/libcolumnshard-engines-insert_table.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/usage/libtx-tracing-usage.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/slices/libsbuckets-logic-slices.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/granules/libreader-sys_view-granules.global.a |43.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/chunks/libreader-sys_view-chunks.global.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/counters/libstorage-actualizer-counters.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.global.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/tiering/libengines-scheme-tiering.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/abstract/libreader-sys_view-abstract.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/column/libengines-scheme-column.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bits_storage/libstorage-indexes-bits_storage.global.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bits_storage/libstorage-indexes-bits_storage.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/max/libstorage-indexes-max.global.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.global.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/versions/libengines-scheme-versions.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/libstorage-actualizer-tiering.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/one_head/libsbuckets-logic-one_head.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/libscheme-indexes-abstract.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/libstorage-optimizer-abstract.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/liboptimizer-lcbuckets-constructor.global.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.global.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/abstract/libsbuckets-logic-abstract.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/skip_index/libstorage-indexes-skip_index.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/libstorage-indexes-portions.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/physical/libkqp-opt-physical.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/constructor/liboptimizer-sbuckets-constructor.global.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/common/liboptimizer-sbuckets-common.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/libreader-plain_reader-iterator.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/counters/liboptimizer-sbuckets-counters.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/libreader-simple_reader-iterator.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/libydb-core-testlib.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/hooks/abstract/libcolumnshard-hooks-abstract.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/index/liboptimizer-sbuckets-index.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.global.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/events/libcolumnshard-export-events.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/protos/libcolumnshard-export-protos.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/abstract/libsession-storage-abstract.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/splitter/abstract/libcolumnshard-splitter-abstract.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/selector/abstract/libsession-selector-abstract.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/events/libsubscriber-abstract-events.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/libcore-kqp-opt.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/schema_version/libcolumnshard-normalizer-schema_version.global.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/loading/libtx-columnshard-loading.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/selector/backup/libsession-selector-backup.global.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/common/libcolumnshard-operations-common.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/events/tables_erased/libsubscriber-events-tables_erased.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/abstract/libcolumnshard-normalizer-abstract.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/insert_table/libcolumnshard-normalizer-insert_table.global.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tablet/libcolumnshard-normalizer-tablet.global.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/granule/libcolumnshard-normalizer-granule.global.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/splitter/libtx-columnshard-splitter.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/subscriber/libsubscriber-abstract-subscriber.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/libtx-columnshard-transactions.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/tier/libsession-storage-tier.global.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tablet/libtx-columnshard-tablet.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/service/libtx-tracing-service.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/slice_builder/libcolumnshard-operations-slice_builder.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/protos/libcolumnshard-transactions-protos.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/transactions/libcolumnshard-transactions-transactions.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/service/liblimiter-grouped_memory-service.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/usage/libtx-conveyor-usage.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/data_events/common/libtx-data_events-common.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/usage/libtx-limiter-usage.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/service/libtx-conveyor-service.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/usage/liblimiter-grouped_memory-usage.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/libydb-core-tx.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_cache/libcore-tx-scheme_cache.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/libolap-bg_tasks-transactions.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/locks/libcore-tx-locks.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/libydb-core-persqueue.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/long_tx_service/libcore-tx-long_tx_service.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/priorities/usage/libtx-priorities-usage.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.global.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/data_events/libcore-tx-data_events.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/hive/libcore-mind-hive.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/priorities/service/libtx-priorities-service.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/adapter/libolap-bg_tasks-adapter.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/events/libolap-bg_tasks-events.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/protos/libolap-bg_tasks-protos.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/service/libtx-limiter-service.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/time_cast/libcore-tx-time_cast.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/liboperations-alter-abstract.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/liboperations-alter-standalone.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/ydb_proxy/libtx-replication-ydb_proxy.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/column_families/libschemeshard-olap-column_families.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/manager/libschemeshard-olap-manager.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/provider/libcore-kqp-provider.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/layout/libschemeshard-olap-layout.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/columns/libschemeshard-olap-columns.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/libalter-in_store-common.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/libalter-in_store-resharding.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/service/libtx-replication-service.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/libalter-in_store-schema.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/liboperations-alter-in_store.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/libalter-in_store-transfer.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/tier/libtx-tiering-tier.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/ttl/libschemeshard-olap-ttl.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/bscontroller/libcore-mind-bscontroller.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/mediator/libcore-tx-mediator.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/table/libschemeshard-olap-table.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/abstract/libtx-tiering-abstract.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.global.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/store/libschemeshard-olap-store.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.global.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/indexes/libschemeshard-olap-indexes.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/interface/libqplayer-storage-interface.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/libcore-tx-tiering.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/libcore-tx-coordinator.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/config_clusters/libyt-lib-config_clusters.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libpy3ydb-library-services.global.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/cores/libpy3library-python-cores.global.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/libyt-library-profiling.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.global.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/proto/libproviders-ydb-proto.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/actors/libservices-lib-actors.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/http_download/libcore-file_storage-http_download.a |43.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/operation_queue_timer.h_serialized.cpp |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.global.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_board/libcore-tx-scheme_board.a |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__clean_pathes.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_populator.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__background_cleaning.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_index.cpp |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.a |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__serverless_storage_billing.cpp |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/libcore-tx-sequenceshard.a |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_build_index.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/user_attributes.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_xxport__helpers.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_validate_ttl.cpp |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/libydb-core-tablet_flat.a |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_scheme_getter.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_common.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_scan.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__get.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__list.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__forget.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index_tx_base.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__progress.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__sync_update_tenants.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__state_changed_reply.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_initiate_build_index.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__root_data_erasure_manager.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/controller/libtx-replication-controller.a |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_unsafe.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_view.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_index.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_sequence.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__describe_scheme.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup_collection.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_continuous_backup.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_cancel_tx.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__background_compaction.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_table.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_cdc_stream.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__data_erasure_manager.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__login.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__make_access_database_no_inheritable.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__borrowed_compaction.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__find_subdomain_path_id.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__backup_collection_common.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__delete_tablet_reply.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_extsubdomain.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__fix_bad_paths.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_cansel_build_index.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__list_users.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_data_source.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__conditional_erase.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_bsv.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_cdc_stream.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_blob_depot.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__notify.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_resource_pool.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_root.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_kesus.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_login.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_apply_build_index.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_fs.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__monitoring.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_replication.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_assign_bsv.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_incremental_backup_collection.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_solomon.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_sequence.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_continuous_backup.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_subdomain.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_data_source.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_bsv.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_cdc_stream.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_indexed_table.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_bsv.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_consistent_copy_tables.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_pq.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_resource_pool.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_table.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_fs.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_schema.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_solomon.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_backup_collection.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_cdc_stream.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_sequence.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_table.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_replication.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_lock.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_bsv.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_rtmr.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_pq.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore_incremental_backup.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_backup_collection.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_db_changes.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_solomon.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_index.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_extsubdomain.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_table.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_continuous_backup.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_replication.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_data_source.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_indexed_table.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_fs.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_resource_pool.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_kesus.cpp |41.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_lock.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_memory_changes.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_sequence.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_subdomain.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_tables.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_sequence.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_just_reject.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table_index.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_table.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_index.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__publish_to_scheme_board.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_split_merge.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_part.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__pq_stats.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_bg_tasks__list.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_access_database.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_upgrade_subdomain.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_schema.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__unmark_restore_tables.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats_histogram.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_backup.cpp |41.7%| PREPARE $(WITH_JDK17-sbr:7832760150) |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__tenant_data_erasure_manager.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_restore_backup_collection.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_audit_log.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_audit_log_fragment.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_billing_helpers.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__cancel.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_side_effects.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_flow_proposals.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__list.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export_uploaders.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__forget.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_effective_acl.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__cancel.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__create.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_domain_links.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__create.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__get.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__get.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__list.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_identificators.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__cancel.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export_flow_proposals.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_info_types.h_serialized.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_svp_migration.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_self_pinger.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__forget.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_types.h_serialized.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path_element.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_types.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_utils.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_scheme_query_executor.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__create.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_shard_deleter.cpp |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/libydb-services-metadata.a |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_info_types.cpp |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/more-itertools/py3/libpy3python-more-itertools-py3.global.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/boto3/py3/libpy3python-boto3-py3.global.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/view/libydb-services-view.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.global.a |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path_describer.cpp |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/auth/libyt-library-auth.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/events/liblibs-quota_manager-events.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jedi/py3/libpy3python-jedi-py3.global.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/cbo/libessentials-core-cbo.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/abstract/libservices-bg_tasks-abstract.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/retry/libimpl-ydb_internal-retry.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/setuptools/py3/libpy3python-setuptools-py3.global.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/grpc/libapi-grpc-persqueue-deprecated.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/discovery/libydb-services-discovery.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/query_stats/libclient-table-query_stats.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yson_helpers/libyt-lib-yson_helpers.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/operation/libclient-types-operation.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/init_yt_api/libyt-lib-init_yt_api.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/config/libydb-services-config.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/column_converters/libyt-library-column_converters.a |41.2%| PREPARE $(JDK_DEFAULT-472926544) |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/include/libclient-persqueue_public-include.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/proto/libsrc-client-proto.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/query_tracker_client/libyt-client-query_tracker_client.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/row_spec/libyt-lib-row_spec.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/kmp/libcpp-deprecated-kmp.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/query/impl/libclient-query-impl.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/backup/libydb-services-backup.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/asttokens/libpy3contrib-python-asttokens.global.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/skiff_ext/libyt-library-skiff_ext.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/providers/stat/expr_nodes/libproviders-stat-expr_nodes.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.global.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/attrs/py3/libpy3python-attrs-py3.global.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiosignal/libpy3contrib-python-aiosignal.global.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/settings/libessentials-sql-settings.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/iam_private/libsrc-client-iam_private.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.global.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extension_common/libsrc-client-extension_common.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/libservices-deprecated-persqueue_v0.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/sdk_core_access/libydb_sdk_core_access.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/out/libapi-protos-out.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/codegen/llvm16/libcodec-codegen-llvm16.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/command_base/libydb_cli_command_base.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/dynamic_config/libydb-services-dynamic_config.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/libffi/libcontrib-restricted-libffi.a |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_impl.cpp |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/db_driver_state/libimpl-ydb_internal-db_driver_state.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/libproviders-common-schema.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/common_client/impl/libclient-common_client-impl.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cachetools/py3/libpy3python-cachetools-py3.global.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/prometheus/libmonlib-encode-prometheus.a |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt.cpp |41.1%| PREPARE $(CLANG-1922233694) |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_field_subset.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource_exec.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_finalize.cpp |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.a |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_cbo_helpers.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_key_range.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_weak_fields.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_wide_flow.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_epoch.cpp |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Pygments/py3/libpy3python-Pygments-py3.global.a |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_dq_optimize.cpp |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/common/libproviders-yt-common.a |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_lambda.cpp |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Lib/libpy3tools-python3-Lib.global.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.a |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_join.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_push.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_dq_integration.cpp |39.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/impl/libclient-persqueue_public-impl.a |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_io_utils.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_content.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_partition.cpp |39.9%| PREPARE $(WITH_JDK-sbr:7832760150) |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_io_filter.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_write.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_dq_hybrid.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_merge.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_input.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_ytql.cpp |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_v1/actors/libservices-persqueue_v1-actors.a |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_misc.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_helper.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_fuse.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_map.cpp |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_output.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_constraints.cpp |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource_constraints.cpp |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_trackable.cpp |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink.cpp |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_sort.cpp |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource_type_ann.cpp |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_ytflow_optimize.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_type_ann.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_op_hash.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_gateway.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_forwarding_gateway.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_exec.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_optimize.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_join_reorder.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_key.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_intent_determination.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_load_table_meta.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/provider/yql_yt_op_settings.h_serialized.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_provider_impl.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_load_columnar_stats.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_mkql_compiler.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_provider_context.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_table_desc.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_op_settings.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_ytflow_integration.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_physical_optimize.cpp |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/xmltodict/py3/libpy3python-xmltodict-py3.global.a |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_io_discovery_walk_folders.cpp |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr3_ansi/libv1-lexer-antlr3_ansi.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/lexer/libsql-v0-lexer.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.global.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_lib/libpy3python-testing-yatest_lib.global.a |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_provider.cpp |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/google-auth/py3/libpy3python-google-auth-py3.global.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/backtrace/libessentials-utils-backtrace.a |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_peephole.cpp |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.global.a |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_io_discovery.cpp |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/datastreams_helpers/libpy3tests-tools-datastreams_helpers.global.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/datastreams/libydb-services-datastreams.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/kqp_session_common/libimpl-ydb_internal-kqp_session_common.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes_gen/libessentials-core-expr_nodes_gen.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/operation/libclient-yc_private-operation.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/request/libcore-arrow_kernels-request.a |40.7%| PREPARE $(JDK17-472926544) |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.functools/py3/libpy3python-jaraco.functools-py3.global.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/future/py3/libpy3python-future-py3.global.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actor_log/libyql-utils-actor_log.a |40.7%| PREPARE $(GDB) |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/matplotlib-inline/libpy3contrib-python-matplotlib-inline.global.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/arrow_resolve/libproviders-common-arrow_resolve.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jsonschema/py3/libpy3python-jsonschema-py3.global.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/events/libproviders-solomon-events.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/qplayer/libyt-gateway-qplayer.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/botocore/py3/libpy3python-botocore-py3.global.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/statistics/libproviders-s3-statistics.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/udf_resolve/libproviders-common-udf_resolve.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.context/libpy3contrib-python-jaraco.context.global.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.text/libpy3contrib-python-jaraco.text.global.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/defs/libcore-file_storage-defs.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/gateway/libproviders-solomon-gateway.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libpy3essentials-public-types.global.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/result_format/libessentials-public-result_format.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/libyql-essentials-sql.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actors/libyql-utils-actors.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/expr_nodes/libproviders-ydb-expr_nodes.a |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_horizontal_join.cpp |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.global.a |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_helpers.cpp |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.collections/libpy3contrib-python-jaraco.collections.global.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/public/ydb_issue/libyql-public-ydb_issue.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libessentials-public-types.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/decimal/libessentials-public-decimal.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/proto/libproviders-solomon-proto.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/dynamic_counters/libcpp-monlib-dynamic_counters.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/parso/py3/libpy3python-parso-py3.global.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/solomon_accessor/grpc/libsolomon-solomon_accessor-grpc.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/binary_json/libessentials-types-binary_json.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pluggy/py3/libpy3python-pluggy-py3.global.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/config/libessentials-providers-config.a |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_table.cpp |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/oauthlib/libpy3contrib-python-oauthlib.global.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/arrow/libcommon-codec-arrow.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/packaging/py3/libpy3python-packaging-py3.global.a |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_join_impl.cpp |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/actors/libproviders-solomon-actors.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ptyprocess/py3/libpy3python-ptyprocess-py3.global.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ipdb/py3/libpy3python-ipdb-py3.global.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/accessservice/libclient-yc_private-accessservice.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jmespath/py3/libpy3python-jmespath-py3.global.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/solomon_accessor/client/libsolomon-solomon_accessor-client.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/plan/libyql-utils-plan.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ipython/py3/libpy3python-ipython-py3.global.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/libclient-yc_private-iam.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/enum_codegen/libcpp-deprecated-enum_codegen.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/prompt-toolkit/py3/libpy3python-prompt-toolkit-py3.global.a |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/DateLUT.cpp |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/proto/libparser-pg_catalog-proto.a |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_logical_optimize.cpp |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/JSON.cpp |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ISimpleTransform.cpp |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/io/libcpp-mapreduce-io.a |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_physical_finalizing.cpp |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/resourcemanager/libclient-yc_private-resourcemanager.a |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/RowInputFormatWithDiagnosticInfo.cpp |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/http/libcpp-mapreduce-http.a |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/CommonParsers.cpp |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTWithAlias.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/OutputStreamToOutputFormat.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/TabSeparatedRowInputFormat.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTWithElement.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSubquery.cpp |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/kubernetes/libpy3contrib-python-kubernetes.global.a |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTShowGrantsQuery.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTShowTablesQuery.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSettingsProfileElement.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ExpressionElementParsers.cpp |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSetRoleQuery.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSetQuery.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSelectWithUnionQuery.cpp |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/provider/libproviders-ydb-provider.a |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQualifiedAsterisk.cpp |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0/libproto_ast-gen-v0.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/provider/libproviders-solomon-provider.a |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQueryWithTableAndOutput.cpp |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libapi-grpc-draft.a |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTLiteral.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/IFunction.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/extractTimeZoneFromFunctionArguments.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/FunctionHelpers.cpp |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/experimental/libpublic-lib-experimental.a |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/FunctionFactory.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/registerFormats.cpp |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clapack/part1/liblibs-clapack-part1.a |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/NativeFormat.cpp |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/libcommon.a |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/BlockStreamProfileInfo.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/getLeastSupertype.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/ExecutionSpeedLimits.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationWrapper.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/ColumnGathererStream.cpp |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/SettingsFields.cpp |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/BaseSettings.cpp |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/SettingsEnums.cpp |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressionFactory.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/Field.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/ICompressionCodec.cpp |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libapi-grpc.a |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/quoteString.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/parseAddress.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/randomSeed.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/createHardLink.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ProcfsMetricsProvider.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/PipeFDs.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/PODArray.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/Settings.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnString.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnFixedString.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/OpenSSLHelpers.cpp |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Exception.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/CurrentThread.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/DNSResolver.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/IColumn.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/CurrentMetrics.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnTuple.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnNullable.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/FilterDescription.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnMap.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnFunction.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/IPv6ToBinary.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnConst.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/sleep.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/AggregateFunctions/IAggregateFunction.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/mremap.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnLowCardinality.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/preciseExp10.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnAggregateFunction.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/getThreadId.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/shift10.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/AggregateFunctions/AggregateFunctionFactory.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/getResource.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/demangle.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/getPageSize.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/errnoToString.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/DateLUTImpl.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/CurrentMemoryTracker.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnDecimal.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Config/AbstractConfigurationComparison.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/StringRef.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/isLocalAddress.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnCompressed.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ClickHouseRevision.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Epoll.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Allocator.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/MaskOperations.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/FieldVisitorWriteBinary.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/checkStackSize.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/IntervalKind.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/FieldVisitorDump.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/AlignedBuffer.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Throttler.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/RemoteHostFilter.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ErrorCodes.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnArray.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ThreadStatus.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ThreadPool.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/escapeForFileName.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/formatReadable.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/FieldVisitorToString.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/getNumberOfPhysicalCPUCores.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnVector.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ZooKeeper/IKeeper.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/MemoryTracker.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/getMultipleKeysFromConfig.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/hex.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressionCodecNone.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/hasLinuxCapability.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ThreadProfileEvents.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ProfileEvents.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressedReadBuffer.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/formatIPv6.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressedReadBufferBase.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/ColumnWithTypeAndName.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/setThreadName.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/thread_local_rng.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressionCodecMultiple.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressedWriteBuffer.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressionCodecLZ4.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/BlockInfo.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/LZ4_decompress_faster.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressedReadBufferFromFile.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationMap.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/Block.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/FunctionsConversion.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeCustomGeo.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeEnum.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/NamesAndTypes.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/NativeBlockOutputStream.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/NativeBlockInputStream.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/SizeLimits.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/IBlockInputStream.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/EnumValues.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeArray.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/materializeBlock.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDecimalBase.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeAggregateFunction.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDate.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeCustomSimpleAggregateFunction.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeCustomIPv4AndIPv6.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDate32.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDateTime.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/TaskStatsInfoGetter.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDateTime64.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeNested.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypesNumber.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeInterval.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeFixedString.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeFactory.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeFunction.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeMap.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeLowCardinalityHelpers.cpp |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/libydb-core-protos.a |39.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeTuple.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypesDecimal.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeNumberBase.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeNothing.cpp |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.a |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeLowCardinality.cpp |39.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeUUID.cpp |39.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationNothing.cpp |39.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeString.cpp |39.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationLowCardinality.cpp |39.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDate32.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/IDataType.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationAggregateFunction.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/NestedUtils.cpp |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/libessentials-sql-v1.a |39.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/ISerialization.cpp |39.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDate.cpp |39.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationCustomSimpleText.cpp |39.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationArray.cpp |39.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationEnum.cpp |39.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDecimalBase.cpp |39.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDateTime.cpp |39.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDateTime64.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationIP.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationFixedString.cpp |39.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDecimal.cpp |39.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationUUID.cpp |39.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationTupleElement.cpp |39.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationTuple.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationNumber.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationNullable.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationString.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/JSONEachRowUtils.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/registerDataTypeDateTime.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/FormatFactory.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/verbosePrintString.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/ProtobufReader.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/ProtobufWriter.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTInsertQuery.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTAlterQuery.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromFileDescriptor.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/CompressionMethod.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMapReadBufferFromFileWithCache.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/AsynchronousReadBufferFromFileDescriptor.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMapReadBufferFromFileDescriptor.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/toFixedString.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/AsynchronousReadBufferFromFile.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMapReadBufferFromFile.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/DoubleConverter.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromFileBase.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/PeekableReadBuffer.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMappedFile.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromFile.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/Progress.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/OpenedFile.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromFile.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMappedFileDescriptor.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromMemory.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/UseSSL.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromPocoSocket.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/TimeoutSetter.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/SynchronousReader.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadSettings.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/TablesStatus.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ThreadPoolReader.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/ProfileEventsExt.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromFileBase.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadHelpers.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTColumnDeclaration.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTDictionaryAttributeDeclaration.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTDictionary.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/QueryThreadLog.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/InternalTextLogsQueue.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTAsterisk.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTBackupQuery.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTExpressionList.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/QueryLog.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTColumnsTransformers.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTColumnsMatcher.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTConstraintDeclaration.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTIdentifier.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTFunctionWithKeyValueArguments.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTCreateQuery.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTDatabaseOrNone.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTIndexDeclaration.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTFunction.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTKillQueryQuery.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTPartition.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTNameTypePair.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTOrderByElement.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTOptimizeQuery.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTProjectionSelectQuery.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTRolesOrUsersSet.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTWindowDefinition.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTTTLElement.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTProjectionDeclaration.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSampleRatio.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTTablesInSelectQuery.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTUserNameWithHost.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSystemQuery.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSystemQuery.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSelectQuery.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/TSKVRowOutputFormat.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserInsertQuery.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/InsertQuerySettingsPushDownVisitor.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserCheckQuery.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/IParserBase.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserCase.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserBackupQuery.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/Lexer.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserAlterQuery.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDatabaseOrNone.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserExternalDDLQuery.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDescribeTableQuery.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDataType.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ExpressionListParsers.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserKillQueryQuery.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserCreateQuery.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSelectWithUnionQuery.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserShowTablesQuery.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserProjectionSelectQuery.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDictionary.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTDropQuery.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserPartition.cpp |39.0%| PREPARE $(CLANG-2518231432) |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserOptimizeQuery.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSampleRatio.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserRolesOrUsersSet.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserRenameQuery.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserQuery.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserShowPrivilegesQuery.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSettingsProfileElement.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSetQuery.cpp |38.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSelectQuery.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSetRoleQuery.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserShowGrantsQuery.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ArrowBufferedStreams.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ConcatProcessor.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ArrowColumnToCHColumn.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/formatAST.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/QueryWithOutputSettingsPushDownVisitor.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/TokenIterator.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserTablePropertiesQuery.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseDatabaseAndTableName.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/formatSettingName.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseIdentifierOrStringLiteral.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/IRowOutputFormat.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/IInputFormat.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/IRowInputFormat.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Executors/PollingQueue.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ArrowBlockInputFormat.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ParquetBlockInputFormat.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ParquetBlockOutputFormat.cpp |38.8%| [BI] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/buildinfo_data.h |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/JSONAsStringRowInputFormat.cpp |38.8%| PREPARE $(CLANG18-3363451693) |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libapi-protos.a |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/CHColumnToArrowColumn.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ORCBlockInputFormat.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/CSVRowOutputFormat.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/CSVRowInputFormat.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/JSONEachRowRowOutputFormat.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/TabSeparatedRowOutputFormat.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/IOutputFormat.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/AvroRowInputFormat.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/JSONEachRowRowInputFormat.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/castColumn.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/RawBLOBRowInputFormat.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/TSKVRowInputFormat.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/getFQDNOrHostName.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/AggregateFunctions/AggregateFunctionCombinatorFactory.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/copyData.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/createReadBufferFromFileBase.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserUseQuery.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ISink.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/IProcessor.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ISource.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/IAccumulatingTransform.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserUnionQueryElement.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/queryToString.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ResizeProcessor.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/clickhouse_client_udf.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Port.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseQuery.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseIntervalKind.cpp |38.9%| [UN] {default-linux-x86_64, release, asan} $(B)/yql/essentials/tests/common/test_framework/udfs_deps/common-test_framework-udfs_deps.pkg.fake |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseUserName.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserTablesInSelectQuery.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/LimitTransform.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Chunk.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDropQuery.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQueryWithOnCluster.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserWithElement.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserWatchQuery.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserUserNameWithHost.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/IAST.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDictionaryAttributeDeclaration.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromFileDescriptorDiscardOnFailure.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteHelpers.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromPocoSocket.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQueryParameter.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserExplainQuery.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferValidUTF8.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromFileDescriptor.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQueryWithOutput.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeNullable.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnsCommon.cpp |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr3/libparser-proto_ast-antlr3.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/range_helpers/libproviders-s3-range_helpers.a |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/TimerDescriptor.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/parseDateTimeBestEffort.cpp |39.0%| [CC] {default-linux-x86_64, release, asan} $(S)/library/cpp/svnversion/svn_interface.c |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/ClientInfo.cpp |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/activation/libproviders-common-activation.a |39.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/readFloatText.cpp |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/executing/libpy3contrib-python-executing.global.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/libcpp-monlib-encode.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/library/user_job_statistics/libmapreduce-library-user_job_statistics.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/interface/libparser-pg_wrapper-interface.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg_dummy/libessentials-sql-pg_dummy.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/dq/libproviders-common-dq.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/common/libclient-yc_public-common.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/expr_nodes/libproviders-result-expr_nodes.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/skiff/libcommon-schema-skiff.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/deprecated/json/libmonlib-deprecated-json.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/value_helpers/libimpl-ydb_internal-value_helpers.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/logger/libimpl-ydb_internal-logger.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/parser/libcommon-schema-parser.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_endpoints/libclient-impl-ydb_endpoints.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/spack/libmonlib-encode-spack.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_stats/libclient-impl-ydb_stats.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/util/libydb_cli-dump-util.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/discovery/libsrc-client-discovery.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/structured_token/libproviders-common-structured_token.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/export/libsrc-client-export.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/disjoint_sets/liblibrary-cpp-disjoint_sets.a |39.2%| [CC] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/build_info.cpp |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/dynumber/libessentials-types-dynumber.a |39.2%| [CC] {default-linux-x86_64, release, asan} $(S)/library/cpp/svnversion/svnversion.cpp |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cblas/libcontrib-libs-cblas.a |39.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/CastOverloadResolver.cpp |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/transform/libproviders-common-transform.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/coordination/libsrc-client-coordination.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/operation/libsrc-client-operation.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/arrow/libpublic-udf-arrow.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/infer_schema/libyt-lib-infer_schema.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/provider/libproviders-pg-provider.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/jsonpath/libproto_ast-gen-jsonpath.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/mkql/libproviders-common-mkql.a |39.3%| RESOURCE $(sbr:4966407557) |39.3%| [CC] {default-linux-x86_64, release, asan} $(S)/library/cpp/build_info/build_info_static.cpp |39.3%| [CC] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/sandbox.cpp |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/libyql-essentials-ast.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/datastreams/libsrc-client-datastreams.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/libsrc-client-types.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/impl/libclient-table-impl.a |39.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/dicts/libdicts_udf.so |39.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/test/simple/libsimple_udf.so |39.3%| PREPARE $(CLANG16-1380963495) |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/messagebus/libcpp-monlib-messagebus.a |39.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/dummylog/libdummylog.so |39.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/logs/dsv/libdsv_udf.so |39.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/vector/libvector_udf.so |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/login/libtypes-credentials-login.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/fatal_error_handlers/libclient-types-fatal_error_handlers.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/support/libpublic-udf-support.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/resources/libsrc-client-resources.global.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/codecs/libclient-topic-codecs.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr4/libparser-proto_ast-antlr4.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/json/libmonlib-encode-json.a |39.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.so |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/arrow/fbs/libclient-arrow-fbs.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/libessentials-public-udf.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/codegen/llvm16/libcodec-codegen-llvm16.global.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/persqueue/topic_parser_public/libsdk-library-persqueue-topic_parser_public-v3.a |39.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/lists/liblists_udf.so |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/exceptions/libclient-types-exceptions.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_ansi_antlr4/libproto_ast-gen-v1_ansi_antlr4.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_antlr4/libproto_ast-gen-v1_antlr4.a |39.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/type_inspection/libtype_inspection_udf.so |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/libclient-types-credentials.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dbg_output/liblibrary-cpp-dbg_output.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/libessentials-public-issue.a |39.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/callables/libcallables_udf.so |39.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/structs/libstructs_udf.so |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/native/libyt-gateway-native.a |39.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/libtopfreq_udf.so |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/build/libyt-yt-build.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/legacy_protobuf/protos/libencode-legacy_protobuf-protos.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/rate_limiter/libsrc-client-rate_limiter.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/big_integer/libcpp-openssl-big_integer.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr3/libv1-lexer-antlr3.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/fq_runner/libpy3tests-tools-fq_runner.global.a |39.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/set/libset_udf.so |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1/py3/libpy3python-pyasn1-py3.global.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/ut_common/libtx-datashard-ut_common.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/logging/libmapreduce-interface-logging.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/python/libpy3cpython-symbols-python.global.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr4/libv1-proto_parser-antlr4.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr3/libv1-proto_parser-antlr3.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/mkql_dq/libproviders-yt-mkql_dq.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/cluster_ordering/libservices-persqueue_cluster_discovery-cluster_ordering.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/iam/libclient-yc_public-iam.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr3_ansi/libv1-proto_parser-antlr3_ansi.a |39.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/streaming/libstreaming_udf.so |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/stack-data/libpy3contrib-python-stack-data.global.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/uuid/libessentials-types-uuid.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/accessors/libcpp-deprecated-accessors.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/old_crc/libcpp-digest-old_crc.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cpuid_check/liblibrary-cpp-cpuid_check.global.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/comp_nodes/llvm16/libyt-comp_nodes-llvm16.a |39.6%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperloglog/libhyperloglog_udf.so |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libapi-protos-annotations.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/text/libmonlib-encode-text.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/traitlets/py3/libpy3python-traitlets-py3.global.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/hash/libyt-lib-hash.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/query/libsrc-client-query.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/mime/types/libcpp-mime-types.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/tz/libpublic-udf-tz.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/windows/libpy3library-python-windows.global.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/resource_tracker/liblibrary-profiling-resource_tracker.global.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/buffered/libmonlib-encode-buffered.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/libc/libpython-symbols-libc.global.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/libsrc-client-topic.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/libsql-v1-proto_parser.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tracing/libyt-library-tracing.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/accessservice/libclient-nc_private-accessservice.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/libyql-essentials-utils.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/common/libcpp-mapreduce-common.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.global.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/base/libpublic-lib-base.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/avx2/libinternal-proxies-avx2.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.global.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libcore-file_storage-proto.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/files/libydb_cli-dump-files.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4/libv1-lexer-antlr4.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.global.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/tablet/libydb-services-tablet.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/skiff/libyt-lib-skiff.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/servicecontrol/libclient-yc_private-servicecontrol.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/udf_resolver/libcore-qplayer-udf_resolver.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/kesus/libydb-services-kesus.a |39.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/digest/libdigest_udf.so |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/value/libpublic-lib-value.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/atomizer/libcpp-containers-atomizer.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/common_client/libsrc-client-common_client.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/driver/libsrc-client-driver.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/proto/libproviders-yt-proto.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/expr_nodes/libproviders-yt-expr_nodes.a |39.7%| PREPARE $(FLAKE8_LINTER-sbr:6561765464) |39.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/top/libtop_udf.so |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/libcpp-monlib-service.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/iam/libsrc-client-iam.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/key_filter/libyt-lib-key_filter.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/rate_limiter/libydb-services-rate_limiter.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/grpc/client/libsdk-library-grpc-client-v3.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/make_request/libimpl-ydb_internal-make_request.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/keyvalue/libydb-services-keyvalue.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/initializer/libservices-metadata-initializer.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/job/libproviders-yt-job.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/grpc_connections/libimpl-ydb_internal-grpc_connections.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extensions/solomon_stats/libclient-extensions-solomon_stats.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/libcpp-mapreduce-interface.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/lib/libyt-gateway-lib.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/libproviders-yt-codec.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/graph_reorder/libyt-lib-graph_reorder.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/lib2/py/libpy3python3-lib2-py.global.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/session_pool/libimpl-ydb_internal-session_pool.a |39.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/libstat_udf.so |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/impl/libclient-topic-impl.a |39.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/yson2/libyson2_udf.so |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/thread_pool/libimpl-ydb_internal-thread_pool.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ymq/libydb-services-ymq.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/draft/libsrc-client-draft.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/plain_status/libimpl-ydb_internal-plain_status.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/yson_value/libpublic-lib-yson_value.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/resources/libsrc-client-resources.a |39.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/histogram/libhistogram_udf.so |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1/libproto_ast-gen-v1.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1-modules/py3/libpy3python-pyasn1-modules-py3.global.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/expr_nodes/libproviders-pg-expr_nodes.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_ansi/libproto_ast-gen-v1_ansi.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyrsistent/py3/libpy3python-pyrsistent-py3.global.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/libpy3library-mkql_proto-protos.global.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/deprecated/client/liblib-deprecated-client.a |39.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/client/libyt-yt-client.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/protos/libservices-bg_tasks-protos.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/lambda_builder/libyt-lib-lambda_builder.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.global.a |38.4%| PREPARE $(BLACK_LINTER-sbr:8107723363) |38.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/core/libyt-yt-core.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/res_pull/libyt-lib-res_pull.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/ydb_discovery/libydb_cli_command_ydb_discovery.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/llhttp/libcontrib-restricted-llhttp.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpublic-issue-protos.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/url_mapper/libyt-lib-url_mapper.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/status/libclient-types-status.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/libessentials-sql-v0.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yt_download/libyt-lib-yt_download.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/decimal/libsrc-library-decimal.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/resource_tracker/liblibrary-profiling-resource_tracker.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/persqueue/obfuscate/libsdk-library-persqueue-obfuscate-v3.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests-oauthlib/libpy3contrib-python-requests-oauthlib.global.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/value/libsrc-client-value.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/libsrc-client-table.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/issue/libsrc-library-issue.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pure-eval/libpy3contrib-python-pure-eval.global.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/expr_nodes/libproviders-ytflow-expr_nodes.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/string_utils/helpers/liblibrary-string_utils-helpers.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/integration/interface/libytflow-integration-interface.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/metrics/libcpp-monlib-metrics.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/integration/proto/libytflow-integration-proto.a |39.3%| [ld] {default-linux-x86_64, release, asan} $(B)/tools/flake8_linter/flake8_linter |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/uuid/libsrc-library-uuid.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/libmonlib-service-pages.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/maintenance/libydb-services-maintenance.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/resources/libservice-pages-resources.global.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/arrow/libyt-client-arrow.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/type_ann/libessentials-core-type_ann.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/auth/libydb-services-auth.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/auth/libservices-lib-auth.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/tablesorter/libservice-pages-tablesorter.global.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/sharding/libservices-lib-sharding.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/crypto/libcpp-openssl-crypto.a |39.5%| [SB] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/postgresql/psql/psql |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/wcwidth/py3/libpy3python-wcwidth-py3.global.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/on_disk/chunks/libcpp-on_disk-chunks.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/protos/libapi-protos-persqueue-deprecated.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/erasure/libyt-library-erasure.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/numeric/libyt-library-numeric.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.global.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/typing-extensions/py3/libpy3python-typing-extensions-py3.global.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/split/libcpp-deprecated-split.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/decimal/libyt-library-decimal.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/transform/libcore-dq_integration-transform.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/serialize/libessentials-ast-serialize.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/libessentials-core-dq_integration.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/credentials/libessentials-core-credentials.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/registry/libcore-arrow_kernels-registry.a |39.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/file/libfile_udf.so |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.global.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/log/libyt-lib-log.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/quantile_digest/libyt-library-quantile_digest.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/exception/libcpp-monlib-exception.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/fs/libpy3library-python-fs.global.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/ytprof/api/liblibrary-ytprof-api.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/mkql_helpers/libyt-lib-mkql_helpers.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/absl_flat_hash/libcpp-containers-absl_flat_hash.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/init/libcpp-openssl-init.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/manager/libservices-metadata-manager.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/libcpp-protobuf-util.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libcore-issue-protos.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/formats/libyt_proto-yt-formats.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/minsketch/libessentials-core-minsketch.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/libydb-services-persqueue_cluster_discovery.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/local_discovery/libydb-services-local_discovery.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ref/libinternal-proxies-ref.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/cms/libydb-services-cms.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/certifi/libpy3library-python-certifi.global.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/filelock/libpy3library-python-filelock.global.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dwarf_backtrace/liblibrary-cpp-dwarf_backtrace.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/bitseq/libcpp-containers-bitseq.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/sql_types/libessentials-core-sql_types.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_lister/interface/libcore-url_lister-interface.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/diff/liblibrary-cpp-diff.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/operation_id/protos/liblibrary-operation_id-protos.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_preprocessing/interface/libcore-url_preprocessing-interface.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests/py3/libpy3python-requests-py3.global.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/ds_table/libservices-metadata-ds_table.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/user_data/libessentials-core-user_data.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/undumpable/libyt-library-undumpable.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/formats/libyt-client-formats.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/rsa/py3/libpy3python-rsa-py3.global.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/extract_predicate/libessentials-core-extract_predicate.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml.clib/py3/libpy3python-ruamel.yaml.clib-py3.global.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/core/libyt_proto-yt-core.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytest/py3/libpy3python-pytest-py3.global.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml.clib/py3/libpy3python-ruamel.yaml.clib-py3.a |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/misc/isa_crc64/libisa-l_crc_yt_patch.a |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml/py3/libpy3python-ruamel.yaml-py3.global.a |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dns/liblibrary-cpp-dns.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/libessentials-core-services.a |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/six/py3/libpy3python-six-py3.global.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/typeguard/libpy3contrib-python-typeguard.global.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/abstract/libservices-metadata-abstract.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/common/libimpl-ydb_internal-common.a |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/https/libyt-core-https.a |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.global.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/re2/libyt-library-re2.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/urllib3/py3/libpy3python-urllib3-py3.global.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.global.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/libessentials-utils-log.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/http/libyt-core-http.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blob_depot/agent/libcore-blob_depot-agent.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/charset-normalizer/libpy3contrib-python-charset-normalizer.global.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tvm/libyt-library-tvm.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/decorator/py3/libpy3python-decorator-py3.global.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/websocket-client/libpy3contrib-python-websocket-client.global.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/holders/libcpp-openssl-holders.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/registry/libpython-symbols-registry.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/json_value/libpublic-lib-json_value.a |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_update.cpp |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse2/libinternal-proxies-sse2.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/formats/libyt-library-formats.a |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client.cpp |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/http_client/libcpp-mapreduce-http_client.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/events/libclient-yc_public-events.a |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/exception_helpers.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_table_client.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_consumer.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_attribute_owner.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_common.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/serialize.cpp |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/convert.cpp |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attributes.cpp |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_unknown_fields.cpp |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_filter.cpp |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/forwarding_consumer.cpp |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/config.cpp |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_node_factory.cpp |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/depth_limiting_yson_consumer.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/helpers.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_writer.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limits.cpp |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_v1/libydb-services-persqueue_v1.a |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/profiling/timing.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/server_detail.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/permission.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/socket.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/helpers.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/interned_attributes.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limiter.cpp |41.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/python/python3_small/libpython3_udf.so |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/size.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_resolver.cpp |41.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperscan/libhyperscan_udf.so |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/proc.cpp |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/fq/libydb-services-fq.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/libyt-yt-core.global.a |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/static_service_dispatcher.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/arithmetic_formula.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node_detail.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/validate_logical_type.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/system_attribute_provider.cpp |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/comp_nodes/dq/llvm16/libcomp_nodes-dq-llvm16.a |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/fluent_log.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_builder.cpp |41.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/tools/astdiff/astdiff |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/codicil.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_visitor.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_queue_scheduler_thread.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_detail.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/virtual.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/service_combiner.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/delayed_executor.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_action_queue.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_queue.cpp |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/libcontrib-tools-python3.a |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/public.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_service.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_detail.cpp |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/client/libyt_proto-yt-client.a |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_client.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/options.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unordered_schemaful_reader.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/helpers.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/comparator.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/public.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/value_consumer.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_value.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_row.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/access_control.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_reader.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/wire_protocol.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_reader.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_row.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/helpers.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_io_options.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/helpers.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/file_client/config.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/config.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/watermark_runtime_data.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/config.cpp |41.4%| [UN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/postgresql/psql/psql |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/timestamp_provider_base.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/read_limit.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/noop_timestamp_provider.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache_detail.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/batching_timestamp_provider.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/rich.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/table_partition_reader.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/sticky_transaction_pool.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/security_client.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/transaction.cpp |41.5%| [ld] {default-linux-x86_64, release, asan} $(B)/tools/black_linter/black_linter |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_writer.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/public.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/private.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/timestamp_provider.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/remote_timestamp_provider.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/data_statistics.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/helpers.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_cache.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_transaction.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/parser_detail.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/helpers.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/dynamic_table_transaction_mixin.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/queue_transaction_mixin.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_client.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/etc_client.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/internal_client.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/address_helpers.cpp |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/libyql-essentials-core.a |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/wire_row_stream.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_table_session.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rowset.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/logical_type.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/journal_client.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_writer.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/skynet.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection_impl.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/query_tracker_client.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/public.cpp >> test.py::flake8 [GOOD] |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_reader.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_reader.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_reader.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/infinite_entity.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/table_client.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/chunk_replica.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/time_text.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_stream.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/config.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/persistent_queue.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_writer.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction_impl.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/config.cpp >> test_example.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/config.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/election/public.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/helpers.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_mount_cache.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_cache.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/operation_client.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_writer.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/shuffle_client.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/config.cpp |41.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/liburl_udf.so >> test_tpcds.py::flake8 [GOOD] |41.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/libunicode_udf.so >> test_tpch_spilling.py::flake8 [GOOD] |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_settings.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/statistics_producer.cpp >> column_table_helper.py::flake8 [GOOD] >> range_allocator.py::flake8 [GOOD] >> s3_client.py::flake8 [GOOD] >> thread_helper.py::flake8 [GOOD] >> time_histogram.py::flake8 [GOOD] >> utils.py::flake8 [GOOD] >> ydb_client.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/config.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card.cpp >> hive_matchers.py::flake8 [GOOD] >> test_commit.py::flake8 [GOOD] >> test_timeout.py::flake8 [GOOD] >> test_create_tablets.py::flake8 [GOOD] >> test_drain.py::flake8 [GOOD] |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/helpers.cpp >> test_kill_tablets.py::flake8 [GOOD] |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/method_helpers.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_client.cpp |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/common_opt/libessentials-core-common_opt.a >> conftest.py::flake8 [GOOD] >> test_insert_restarts.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_cp_ic.py::flake8 [GOOD] >> test_dispatch.py::flake8 [GOOD] >> test_retry.py::flake8 [GOOD] >> test_clickbench.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_retry_high_rate.py::flake8 [GOOD] >> test_schemeshard_limits.py::flake8 [GOOD] >> test_tpcds.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_tpch.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_builder.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/config.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_statistics.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_base.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/node_directory.cpp >> conftest.py::flake8 [GOOD] >> select_datetime_with_service_name.py::flake8 [GOOD] >> test_clickbench.py::flake8 [GOOD] >> test_tpch.py::flake8 [GOOD] >> select_positive_with_service_name.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_clickhouse.py::flake8 [GOOD] >> test_greenplum.py::flake8 [GOOD] >> test_kqprun_recipe.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_join.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_postgresql.py::flake8 [GOOD] >> test_ydb.py::flake8 [GOOD] >> docker_wrapper_test.py::flake8 [GOOD] >> test_disk.py::flake8 [GOOD] >> test_tablet.py::flake8 [GOOD] |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/consumer_client.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_id_or_alias.cpp >> test.py::flake8 [GOOD] >> test_bulkupserts_tpch.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> helpers.py::flake8 [GOOD] >> test_ctas.py::flake8 [GOOD] >> test_insert_delete_duplicate_records.py::flake8 [GOOD] >> test_yt_reading.py::flake8 [GOOD] >> test_insertinto_selectfrom.py::flake8 [GOOD] >> test_tiering.py::flake8 [GOOD] >> test_workload_manager.py::flake8 [GOOD] |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_yson_token.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/ares_dns_resolver.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_consumer.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/ready_event_reader_base.cpp |41.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part4/flake8 >> test.py::flake8 [GOOD] >> helpers.py::flake8 [GOOD] >> test_base.py::flake8 [GOOD] >> test_query.py::flake8 [GOOD] >> test_s3.py::flake8 [GOOD] >> base.py::flake8 [GOOD] >> test_tpch_import.py::flake8 [GOOD] >> test_alloc_default.py::flake8 [GOOD] >> test_dc_local.py::flake8 [GOOD] >> test_result_limits.py::flake8 [GOOD] >> test_scheduling.py::flake8 [GOOD] |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/cypress_client/public.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/public.cpp |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ydb/libydb-services-ydb.a |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/yson_format_conversion.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_type_compatibility.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/uuid_text.cpp >> test_compatibility.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> test_cms_erasure.py::flake8 [GOOD] >> test_followers.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_cms_restart.py::flake8 [GOOD] >> test_stress.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> test_cms_state_storage.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> utils.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_ttl.py::flake8 [GOOD] |41.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json2/libjson2_udf.so |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/spec_patch.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/requests.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/merge_complex_types.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/signature.cpp |41.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part5/flake8 >> test.py::flake8 [GOOD] >> kikimr_config.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> select_positive_with_schema.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |41.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/example/flake8 >> test_example.py::flake8 [GOOD] |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/helpers.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/protocol.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/chunk_stripe_statistics.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hydra/version.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hive/timestamp_map.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/public.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/packet.cpp >> conftest.py::flake8 [GOOD] >> test_serverless.py::flake8 [GOOD] >> test_encryption.py::flake8 [GOOD] |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_rename_descriptor.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_sort_schema.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/public.cpp |41.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/protobuf/libprotobuf_udf.so |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/public.cpp |41.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/tpc/large/flake8 >> test_tpch_spilling.py::flake8 [GOOD] |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar_statistics.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/io_tags.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/pipe.cpp |41.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/common/flake8 >> ydb_client.py::flake8 [GOOD] |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/public.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_batch.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/queue_rowset.cpp >> collection.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> scenario.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_case.py::flake8 [GOOD] >> tablet_scheme_tests.py::flake8 [GOOD] >> allure_utils.py::flake8 [GOOD] >> results_processor.py::flake8 [GOOD] >> utils.py::flake8 [GOOD] >> ydb_cli.py::flake8 [GOOD] >> ydb_cluster.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_cache.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/workload.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_output.cpp |41.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/stress/kv/tests/flake8 >> test_workload.py::flake8 [GOOD] |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/common.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/current_invoker.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/serialize.cpp |42.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/pq_read/test/flake8 >> test_timeout.py::flake8 [GOOD] |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/helpers.cpp |42.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/flake8 >> test_kill_tablets.py::flake8 [GOOD] |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/config.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/public.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/generator.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/codicil_guarded_invoker.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/helpers.cpp >> test_common.py::flake8 [GOOD] >> test_yandex_cloud_mode.py::flake8 [GOOD] >> test_yandex_cloud_queue_counters.py::flake8 [GOOD] |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_serialization.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/blob_reader.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/partition_reader.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_upload_options.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/producer_client.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/public.cpp |42.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/stress/oltp_workload/tests/flake8 >> test_workload.py::flake8 [GOOD] |42.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/multi_plane/flake8 >> test_retry_high_rate.py::flake8 [GOOD] |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_util.cpp |42.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/restarts/flake8 >> test_insert_restarts.py::flake8 [GOOD] |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/validator.cpp |42.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/load/flake8 >> test_tpch.py::flake8 [GOOD] |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/adapters.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/helpers.cpp |42.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part9/flake8 >> test.py::flake8 [GOOD] |42.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/flake8 >> test_schemeshard_limits.py::flake8 [GOOD] >> base.py::flake8 [GOOD] >> data_correctness.py::flake8 [GOOD] >> data_migration_when_alter_ttl.py::flake8 [GOOD] >> ttl_delete_s3.py::flake8 [GOOD] >> ttl_unavailable_s3.py::flake8 [GOOD] >> unstable_connection.py::flake8 [GOOD] |42.1%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/oracle/flake8 >> test.py::flake8 [GOOD] |42.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part12/flake8 >> test.py::flake8 [GOOD] |42.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/tpc/medium/flake8 >> test_tpch.py::flake8 [GOOD] |42.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part15/flake8 >> test.py::flake8 [GOOD] |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/check_schema_compatibility.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/composite_compare.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key.cpp |42.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/generic/analytics/flake8 >> test_ydb.py::flake8 [GOOD] |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/bzip2.cpp |42.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/kqprun/tests/flake8 >> test_kqprun_recipe.py::flake8 [GOOD] |42.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part13/flake8 >> test.py::flake8 [GOOD] |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/server.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lzma.cpp |42.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/postgres_integrations/go-libpq/flake8 >> docker_wrapper_test.py::flake8 [GOOD] |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/connection.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_base.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_row_reorderer.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/helpers.cpp >> conftest.py::flake8 [GOOD] >> test_alter_compression.py::flake8 [GOOD] >> test_alter_tiering.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_read_update_write_load.py::flake8 [GOOD] >> test_scheme_load.py::flake8 [GOOD] >> test_simple.py::flake8 [GOOD] |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/merge_table_schemas.cpp |42.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/flake8 >> test_tablet.py::flake8 [GOOD] |42.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/solomon/flake8 >> test.py::flake8 [GOOD] |42.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/sql/large/flake8 >> test_workload_manager.py::flake8 [GOOD] |42.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_import/flake8 >> test_yt_reading.py::flake8 [GOOD] |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/acl.cpp |42.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/sql/lib/flake8 >> test_s3.py::flake8 [GOOD] |42.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/bin/solomon_emulator |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/name_table.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/config.cpp |42.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/s3_import/flake8 >> test_tpch_import.py::flake8 [GOOD] |42.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/mem_alloc/flake8 >> test_scheduling.py::flake8 [GOOD] |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_helpers.cpp |42.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/merge_split_common_table/std/flake8 >> test.py::flake8 [GOOD] |42.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/compatibility/flake8 >> test_stress.py::flake8 [GOOD] |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_codegen_cpp.cpp |42.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/flake8 >> test_ttl.py::flake8 [GOOD] |42.3%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/mysql/flake8 >> test.py::flake8 [GOOD] |42.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/flake8 >> utils.py::flake8 [GOOD] |42.3%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/clickhouse/flake8 >> test.py::flake8 [GOOD] |42.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/library/ut/flake8 >> kikimr_config.py::flake8 [GOOD] |42.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/stress/olap_workload/tests/flake8 >> test_workload.py::flake8 [GOOD] |42.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part6/flake8 >> test.py::flake8 [GOOD] |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_buffer.cpp |42.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/supp/ydb_supp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_rw_lock.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_dynamic_table_writer.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/coroutine.cpp |42.4%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/postgresql/flake8 >> test.py::flake8 [GOOD] |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/execution_stack.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema_serialization_helpers.cpp |42.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/flake8 >> test_serverless.py::flake8 [GOOD] |42.4%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/join/flake8 >> test_case.py::flake8 [GOOD] |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_barrier.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/config.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_impl.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/two_level_fair_share_thread_pool.cpp |42.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/encryption/flake8 >> test_encryption.py::flake8 [GOOD] |42.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/lib/flake8 >> ydb_cluster.py::flake8 [GOOD] |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema.cpp |42.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/flake8 >> test_yandex_cloud_queue_counters.py::flake8 [GOOD] |42.5%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/ms_sql_server/flake8 >> test.py::flake8 [GOOD] |42.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_tests/flake8 >> tablet_scheme_tests.py::flake8 [GOOD] |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/packet.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelation_token.cpp |42.5%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/ydb/flake8 >> test.py::flake8 [GOOD] |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_pool.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelable_context.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/timestamped_schema_helpers.cpp >> test_crud.py::flake8 [GOOD] >> test_discovery.py::flake8 [GOOD] >> test_execute_scheme.py::flake8 [GOOD] >> test_indexes.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_isolation.py::flake8 [GOOD] >> test_public_api.py::flake8 [GOOD] |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_detail.cpp |42.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/flake8 >> unstable_connection.py::flake8 [GOOD] |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_executor.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/future.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/profiling_helpers.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_yielder.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/propagating_storage.cpp >> test_read_table.py::flake8 [GOOD] >> test_session_grace_shutdown.py::flake8 [GOOD] >> test_session_pool.py::flake8 [GOOD] |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduled_executor.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_parser.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/local_bypass.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/public.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/suspendable_action_queue.cpp >> conftest.py::black [GOOD] >> test_clickhouse.py::black [GOOD] >> test_greenplum.py::black [GOOD] >> test_join.py::black [GOOD] >> test_postgresql.py::black [GOOD] >> test_ydb.py::black [GOOD] |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_detail.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/client.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher.cpp |42.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/flake8 >> test_simple.py::flake8 [GOOD] |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lz.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/ssl_context.cpp >> conftest.py::black [GOOD] >> test_join.py::black [GOOD] |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/ssl_helpers.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/brotli.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/dictionary_codec.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_looper.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/config.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/dns_resolver.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_callbacks.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher_impl.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/tls.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/formatter.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/stream.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/codec.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/log_writer_detail.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_log_writer.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zlib.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/action_queue.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packed_unsigned_vector.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/file_log_writer.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/snappy.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_manager.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zstd.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/crypto.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/serializable_logger.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/notify_manager.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/nonblocking_batcher.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_semaphore.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_pool.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/digest.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_alarm.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/configurable_singleton_def.cpp |42.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/streaming_optimize/ydb-tests-fq-streaming_optimize |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_thread_pool.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream_pipe.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/id_generator.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fls.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pool_allocator.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_scheduler_thread.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/lease_manager.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_throttler.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/pollable_detail.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_queue.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/quantized_executor.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_profiler.cpp >> test.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_auditlog.py::flake8 [GOOD] |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/system_invokers.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/config.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/retrying_periodic_executor.cpp |42.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/api/flake8 >> test_session_pool.py::flake8 [GOOD] |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduler_thread.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/single_queue_scheduler_thread.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistic_path.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/random.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_poller.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/serialize.cpp >> test.py::flake8 [GOOD] >> test_multinode_cluster.py::flake8 [GOOD] >> test_recompiles_requests.py::flake8 [GOOD] |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_affinity.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/throughput_throttler.cpp |42.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/generic/analytics/black >> test_ydb.py::black [GOOD] |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/helpers.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/config.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/connection.cpp >> test_pdisk_format_info.py::flake8 [GOOD] >> test_replication.py::flake8 [GOOD] >> test_self_heal.py::flake8 [GOOD] >> test_tablet_channel_migration.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_2_selects_limit.py::flake8 [GOOD] >> test_3_selects.py::flake8 [GOOD] >> test_bad_syntax.py::flake8 [GOOD] >> test_base.py::flake8 [GOOD] >> test_big_state.py::flake8 [GOOD] >> test_continue_mode.py::flake8 [GOOD] >> test_cpu_quota.py::flake8 [GOOD] >> test_delete_read_rules_after_abort_by_system.py::flake8 [GOOD] >> test_eval.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_invalid_consumer.py::flake8 [GOOD] |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_def.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/bindings.cpp |42.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/generic/streaming/black >> test_join.py::black [GOOD] |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/helpers.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/backoff_strategy.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/histogram.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/adjusted_exponential_moving_average.cpp >> test_kill_pq_bill.py::flake8 [GOOD] >> common.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_mem_alloc.py::flake8 [GOOD] >> test_rename.py::flake8 [GOOD] >> test_metrics_cleanup.py::flake8 [GOOD] >> test_pq_read_write.py::flake8 [GOOD] >> test_public_metrics.py::flake8 [GOOD] >> test_read_rules_deletion.py::flake8 [GOOD] >> test_recovery.py::flake8 [GOOD] >> test_recovery_match_recognize.py::flake8 [GOOD] >> test_recovery_mz.py::flake8 [GOOD] >> test_restart_query.py::flake8 [GOOD] >> test_row_dispatcher.py::flake8 [GOOD] >> test_select_1.py::flake8 [GOOD] >> test_select_limit.py::flake8 [GOOD] >> test_select_limit_db_id.py::flake8 [GOOD] >> test_select_timings.py::flake8 [GOOD] >> test_stop.py::flake8 [GOOD] >> test_watermarks.py::flake8 [GOOD] >> test_yds_bindings.py::flake8 [GOOD] >> test_yq_streaming.py::flake8 [GOOD] |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/compression.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/config.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/channel.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_registry.cpp >> test_actorsystem.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_writer.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/logger_owner.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_output.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/error.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/random_access_gzip.cpp >> overlapping_portions.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/zstd_compression.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/config.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/system_log_event_provider.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound_compressor.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hedging_manager.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/client.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bitmap.cpp |42.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part7/flake8 >> test.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] >> test_liveness_wardens.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/config.cpp >> test_ydb_backup.py::flake8 [GOOD] |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packing.cpp >> test_ydb_flame_graph.py::flake8 [GOOD] |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bloom_filter.cpp >> test_ydb_impex.py::flake8 [GOOD] |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/null_channel.cpp >> test_ydb_recursive_remove.py::flake8 [GOOD] >> test_ydb_scheme.py::flake8 [GOOD] >> test_ydb_scripting.py::flake8 [GOOD] >> test_ydb_sql.py::flake8 [GOOD] >> test_ydb_table.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_join.py::flake8 [GOOD] >> test_base.py::flake8 [GOOD] >> test_http_api.py::flake8 [GOOD] >> alter_compression.py::flake8 [GOOD] >> base.py::flake8 [GOOD] |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/protocol_version.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hazard_ptr.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/coro_pipe.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/blob_output.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fair_share_hierarchical_queue.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/config.cpp |43.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/flake8 >> test_auditlog.py::flake8 [GOOD] |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/roaming_channel.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pattern_formatter.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/linear_probe.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/public.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/cache_config.cpp >> test_query_cache.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] >> parser.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fs.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistics.cpp |43.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part1/flake8 >> test.py::flake8 [GOOD] |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/parser_helpers.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/relaxed_mpsc_queue.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/service_discovery/service_discovery.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/schemas.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/protobuf_helpers.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/public.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/process_exit_profiler.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_consumer.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/slab_allocator.cpp |43.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/multinode/flake8 >> test_recompiles_requests.py::flake8 [GOOD] |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/tokenizer.cpp >> test_leader_start_inflight.py::flake8 [GOOD] >> test_restarts.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] >> test_quota_exhaustion.py::flake8 [GOOD] |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_statistics_producer.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/signal_registry.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_options.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/shutdown.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/public.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/utf8_decoder.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/config.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/zerocopy_output_writer.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/stream.cpp |43.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/flake8 >> test_tablet_channel_migration.py::flake8 [GOOD] |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/local_address.cpp >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_alter_ops.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_sql_streaming.py::flake8 [GOOD] >> test_stats_mode.py::flake8 [GOOD] >> test_copy_ops.py::flake8 [GOOD] >> test_scheme_shard_operations.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_unknown_data_source.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/dialer.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/checksum.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/syntax_checker.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/parser.cpp |43.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yds/flake8 >> test_yq_streaming.py::flake8 [GOOD] |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/descriptors.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/address.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/load.cpp |43.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part11/flake8 >> test.py::flake8 [GOOD] |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/listener.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/yson_builder.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/context.cpp >> test_sql.py::flake8 [GOOD] >> test_update_script_tables.py::flake8 [GOOD] >> test_postgres.py::flake8 [GOOD] >> test_crud.py::flake8 [GOOD] >> test_inserts.py::flake8 [GOOD] >> test_kv.py::flake8 [GOOD] |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/channel_detail.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_server.cpp |43.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/flake8 >> test_rename.py::flake8 [GOOD] |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/viable_peer_registry.cpp |43.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/kqprun/recipe/flake8 >> __main__.py::flake8 [GOOD] |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authenticator.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authentication_identity.cpp |43.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/autoconfig/flake8 >> test_actorsystem.py::flake8 [GOOD] |43.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/balancing_channel.cpp |43.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/oom/flake8 >> overlapping_portions.py::flake8 [GOOD] |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/caching_channel_factory.cpp |43.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part18/flake8 >> test.py::flake8 [GOOD] |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/server.cpp |43.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part19/flake8 >> test.py::flake8 [GOOD] |43.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/merge_split_common_table/fifo/flake8 >> test.py::flake8 [GOOD] >> test_quoting.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |43.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dispatcher.cpp |43.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/stress/simple_queue/tests/flake8 >> test_workload.py::flake8 [GOOD] |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_channel.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/hedging_channel.cpp |43.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/wardens/flake8 >> test_liveness_wardens.py::flake8 [GOOD] |43.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/helpers.cpp |43.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_filtering_consumer.cpp |43.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part0/flake8 >> test.py::flake8 [GOOD] |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/throttling_channel.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/per_key_request_queue_provider.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dynamic_channel_pool.cpp |43.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/generic/streaming/flake8 >> test_join.py::flake8 [GOOD] |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/retrying_channel.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message_format.cpp |43.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydbd/ydbd |43.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part16/flake8 >> test.py::flake8 [GOOD] |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/peer_discovery.cpp |43.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |43.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/public.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/response_keeper.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/request_queue_provider.cpp |43.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api >> test_config_with_metadata.py::flake8 [GOOD] |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/static_channel_factory.cpp |43.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part14/flake8 >> test.py::flake8 [GOOD] |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service.cpp |43.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/http_api/flake8 >> test_http_api.py::flake8 [GOOD] |43.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/query_cache/flake8 >> test_query_cache.py::flake8 [GOOD] |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/config.cpp |43.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/docs/generator/flake8 >> parser.py::flake8 [GOOD] |43.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/flake8 >> base.py::flake8 [GOOD] |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/stream.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/serialized_channel.cpp |43.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part2/flake8 >> test.py::flake8 [GOOD] |43.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/flake8 >> test_restarts.py::flake8 [GOOD] |43.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/ydb_cli/flake8 >> test_ydb_table.py::flake8 [GOOD] >> test_generate_dynamic_config.py::flake8 [GOOD] |43.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part3/flake8 >> test.py::flake8 [GOOD] |43.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/serializable/flake8 >> test.py::flake8 [GOOD] |43.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/large/flake8 >> test_leader_start_inflight.py::flake8 [GOOD] |43.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part17/flake8 >> test.py::flake8 [GOOD] |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/allocation_tags.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/token.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/spin_wait_slow_path_logger.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/thread.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/utilex/random.cpp |43.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/stress/log/tests/flake8 >> test_workload.py::flake8 [GOOD] |43.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/flake8 >> test_quota_exhaustion.py::flake8 [GOOD] |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/stack.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/null_consumer.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/consumer.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attributes_stripper.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attribute_consumer.cpp |43.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part8/flake8 >> test.py::flake8 [GOOD] |43.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_data_cleanup/ydb-core-tx-datashard-ut_data_cleanup |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/lexer.cpp |43.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |43.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/flake8 >> test_sql_streaming.py::flake8 [GOOD] |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/producer.cpp |43.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |43.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/canonical/flake8 >> test_sql.py::flake8 [GOOD] |43.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |43.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/plans/flake8 >> test_stats_mode.py::flake8 [GOOD] |43.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/flake8 >> test_scheme_shard_operations.py::flake8 [GOOD] |43.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |43.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/flake8 >> test_update_script_tables.py::flake8 [GOOD] |43.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/sql/flake8 >> test_kv.py::flake8 [GOOD] |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service_detail.cpp |43.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part10/flake8 >> test.py::flake8 [GOOD] |43.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/postgresql/flake8 >> test_postgres.py::flake8 [GOOD] |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/list_verb_lazy_yson_consumer.cpp |43.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/common/flake8 >> test_unknown_data_source.py::flake8 [GOOD] |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_designated_consumer.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_merger.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser_deserialize.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_filter.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_builder_stream.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/tokenizer.cpp |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/expr_nodes/libproviders-solomon-expr_nodes.a |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string.cpp |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dot_product/liblibrary-cpp-dot_product.a |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/writer.cpp |43.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/flake8 >> test_quoting.py::flake8 [GOOD] |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/wheel/libpy3contrib-python-wheel.global.a |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token_writer.cpp |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/main/libpython-runtime_py3-main.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/filter/libpy3python-testing-filter.global.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.global.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/serializations/libproviders-s3-serializations.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/libproviders-common-arrow.a |43.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/viewer/tests/flake8 >> test.py::flake8 [GOOD] |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser.cpp |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ydb/py3/libpy3python-ydb-py3.global.a |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncquorum_ut.cpp |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/sqs/libpy3tests-library-sqs.global.a |43.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_3efa41af97c0510be1d2e99f05.o |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/pytest/libpy3library-python-pytest.global.a |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/sha256_ut.cpp |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tornado/tornado-4/libpy3python-tornado-tornado-4.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse41/libinternal-proxies-sse41.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/psutil/py3/libpy3python-psutil-py3.global.a |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/feature_flags_configurator_ut.cpp |43.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_5865a174a6c25ca1a2d6386702.o |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_localwriter_ut.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/ut/group_test_ut.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/message_delay_stats_ut.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/address_classification/net_classifier_ut.cpp |43.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_9c56ea1b7d34c7d8f6329bfcfd.o |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/numpy/random/libpy3py3-numpy-random.global.a |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_filter_ut.cpp |43.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/config/flake8 >> test_generate_dynamic_config.py::flake8 [GOOD] |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_move_reboots/ut_move_reboots.cpp >> conftest.py::flake8 [GOOD] >> test_auth_system_views.py::flake8 [GOOD] >> test_create_users.py::flake8 [GOOD] >> test_create_users_strict_acl_checks.py::flake8 [GOOD] >> test_db_counters.py::flake8 [GOOD] >> test_dynamic_tenants.py::flake8 [GOOD] >> test_publish_into_schemeboard_with_common_ssring.py::flake8 [GOOD] >> test_storage_config.py::flake8 [GOOD] >> test_system_views.py::flake8 [GOOD] >> test_tenants.py::flake8 [GOOD] >> test_user_administration.py::flake8 [GOOD] >> test_users_groups_with_acl.py::flake8 [GOOD] |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/s3transfer/py3/libpy3python-s3transfer-py3.global.a >> test_fifo_messaging.py::flake8 [GOOD] |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_replication_reboots/ut_replication_reboots.cpp |43.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/objcopy_245adf3e28f56e6467e034d9f2.o |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_table_reboots/ut_external_table_reboots.cpp |43.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ttl_tiering/objcopy_52647c3535f2451207dfa29a87.o |43.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/objcopy_1c18035bb4b3759d5e029db746.o >> test_generic_messaging.py::flake8 [GOOD] >> test_polling.py::flake8 [GOOD] |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator_ext_blobs.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_info_types.cpp |43.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |43.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans |43.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/sql/ydb-tests-sql |43.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/ut_sequence/dsproxy_config_retrieval.cpp |43.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_pg_types.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_commit_redo_limit.cpp >> test_account_actions.py::flake8 [GOOD] |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_decimal_types.cpp |43.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/olap/high_load/read_update_write.cpp |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libf2c/libcontrib-libs-libf2c.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/common/libpy3tests-olap-common.global.a |44.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication >> test_acl.py::flake8 [GOOD] >> test_counters.py::flake8 [GOOD] >> test_format_without_version.py::flake8 [GOOD] >> test_garbage_collection.py::flake8 [GOOD] >> test_multiplexing_tables_format.py::flake8 [GOOD] >> test_ping.py::flake8 [GOOD] >> test_queue_attributes_validation.py::flake8 [GOOD] >> test_queue_counters.py::flake8 [GOOD] >> test_queue_tags.py::flake8 [GOOD] >> test_queues_managing.py::flake8 [GOOD] >> test_throttling.py::flake8 [GOOD] |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/format_handler_ut.cpp |44.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/objcopy_12d01741952bd4afa836364d84.o |44.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/console_ut_configs.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/infly_ut.cpp |44.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_5db899a01c2ec6f53648af6840.o |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/sse41/libhighwayhash-arch-sse41.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/punycode/libcpp-unicode-punycode.a |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator.cpp |44.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/lib/libcommon-ip_base-lib.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/logs/dsv/libdsv_udf.global.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/lib/libcommon-compress_base-lib.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/numpy/random/libpy3py3-numpy-random.a |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/storage_tenant_ut.cpp |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/sfh/libcpp-digest-sfh.a |44.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg/libessentials-sql-pg.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/croaring/libcontrib-libs-croaring.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/roaring/libroaring.global.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/hyperloglog/liblibrary-cpp-hyperloglog.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/libcpp-histogram-adaptive.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/top_keeper/libcpp-containers-top_keeper.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/protos/libhistogram-adaptive-protos.a |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_parser_ut.cpp |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/avx2/libhighwayhash-arch-avx2.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/knn/libknn_udf.global.a |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_followers.cpp |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/datetime/libdatetime_udf.global.a |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydbd/export.cpp |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/libcontrib-libs-highwayhash.a |44.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_ec3163328cb5ab8f222e66dd41.o |44.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_d1ba757d227a70ff4910717854.o |44.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/xz/libcpp-streams-xz.a |44.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/yson2/libyson2_udf.global.a |44.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_4f92526e13553482736b942b2c.o |44.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/http_api_client/libpy3fq-libs-http_api_client.global.a |44.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_7eade8c49389813f8c36b72b5b.o |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/keys/libydb-library-keys.a |44.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_3209cda00462f2963f3cbbc912.o |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/timezone_conversion/liblibrary-cpp-timezone_conversion.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/set/libcpp-unicode-set.a |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydbd/main.cpp |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tld/liblibrary-cpp-tld.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperscan/libhyperscan_udf.global.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clapack/part2/liblibs-clapack-part2.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_topic/include/libclient-ydb_topic-include.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/normalization/libcpp-unicode-normalization.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/digest/libdigest_udf.global.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/libcompress_udf.global.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/recipe/libpy3python-testing-recipe.global.a |44.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/tenants/flake8 >> test_users_groups_with_acl.py::flake8 [GOOD] |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_restart_tablet_ut.cpp |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pire/libcpp-regex-pire.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperloglog/libhyperloglog_udf.global.a |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_execute_mkql_ut.cpp |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/libip_udf.global.a |44.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/0dff0b13f2d02975a4a973a1e8_raw.auxcpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_change_schema_ut.cpp |44.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_bf578b7161cc94bf18488d04ca.o |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json/libjson_udf.global.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.global.a |44.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/objcopy_1a397c908c9859dc40a771ddf1.o |44.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/objcopy_cd9abca883cad9b25e20bf2f08.o |44.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/objcopy_e31620202d3ba8df14ff2a18e1.o |44.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/objcopy_6508d12aaafde6f0a60fe8fff3.o |44.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/flake8 >> test_polling.py::flake8 [GOOD] |44.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/objcopy_96d3f689fe7d6b16f9da31e376.o |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json2/libjson2_udf.global.a |44.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/objcopy_f8b2cbafb1fed0e25bf9683c2d.o |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/histogram/libhistogram_udf.global.a |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceshard/ut_helpers.cpp |44.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |44.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |44.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_55f2556d6eafcd77ebc4c517d4.o |44.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_f928a40774b17a9d6cd7cabd2c.o |44.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |44.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/lib/libcommon-math-lib.a |44.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_e7477203b27fa0321cf18fd7ee.o |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.global.a |44.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/test/test_import/libtest_import_udf.so |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_data_cleanup.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceshard/ut_sequenceshard.cpp |44.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.global.a |44.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/objcopy_9818d2b70aad7db98a0f9c044c.o |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/libstat_udf.global.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/static/libcommon-stat-static.a |44.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/objcopy_45b6981aed17dda33d43217f52.o |44.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/objcopy_023a23fcfdf79043d814bb8aab.o |44.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/test_meta/libpy3tests-library-test_meta.global.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/lib/libcommon-url_base-lib.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/lib/libcommon-unicode_base-lib.a |44.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/common/flake8 >> test_throttling.py::flake8 [GOOD] |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/libtopfreq_udf.global.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.global.a |44.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/objcopy_19dadf8afeb30502d735b660ce.o |44.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/objcopy_0553360a969b2c9633badb428d.o |44.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_base.cpp |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/set/libset_udf.global.a |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/configs_cache_ut.cpp |44.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/objcopy_31d605682329607481eb568ed0.o |44.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |44.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydb/ydb |44.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |44.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |44.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |44.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_filestore_reboots/ut_filestore_reboots.cpp |44.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_tables_ut.cpp |44.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/static/libcommon-topfreq-static.a |44.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |44.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_prefix_kmeans/ydb-core-tx-datashard-ut_prefix_kmeans |44.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |44.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/liburl_udf.global.a |44.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |44.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |44.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ttl_tiering/objcopy_d1da8f48b4e80ef5678b1197a3.o |44.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/libunicode_udf.global.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/ut/common/libworkload_service-ut-common.a |44.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/query_replay/ydb_query_replay |44.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |44.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ut/ydb-core-security-ut |44.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |44.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_broker_ut.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/45be6e48ea8f2ac38577085d0d_raw.auxcpp |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/fixtures/libpy3tests-library-fixtures.global.a |44.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_bfa810e70cd1de18c5d4a18a62.o |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/minikql_compile/yql_expr_minikql_compile_ut.cpp |44.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ttl_tiering/objcopy_0664e2ab2eb37ae9f02538e483.o |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tornado/tornado-4/libpy3python-tornado-tornado-4.global.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/lib/libpy3tests-sql-lib.global.a |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/metering_ut.cpp |44.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/objcopy_f738234258cd034cd5383f92ad.o |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/responses/py3/libpy3python-responses-py3.global.a |44.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/objcopy_bf6c9c02784d65e20a01685ce8.o |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Werkzeug/py3/libpy3python-Werkzeug-py3.global.a |44.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/objcopy_2f0e0ac8198858b9ec9901778e.o |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_rs.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_reshuffle_kmeans.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_ut.cpp |44.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/log/tests/objcopy_2f7ac0f750374152d13c6bfbcf.o |44.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/log/tests/objcopy_4f9d76a39d2f7ba2b9f198f28c.o |44.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/log/tests/objcopy_854d6cc7a0cc5cdd793cfc1e6d.o |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/kesus_quoter_ut.cpp |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/find_root/libpy3library-python-find_root.global.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/libpy3python-numpy-py3.global.a |44.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/objcopy_340b457b8174f6293d5748588e.o |44.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/objcopy_6b37760fb6a28054d0feafd61d.o |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytz/py3/libpy3python-pytz-py3.global.a |44.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/objcopy_5923b362516b6632b9769a5db2.o |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/log_settings_configurator_ut.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/ut_sequence/datashard_ut_sequence.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_replication.cpp |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/pytest/plugins/libpy3python-pytest-plugins.global.a |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_build_index.cpp |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/retry/libpy3library-python-retry.global.a |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/jaeger_tracing_configurator_ut.cpp |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/lib/libblobstorage-ut_blobstorage-lib.a |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/immediate_controls_configurator_ut.cpp |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/libpy3kqprun_recipe.global.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/libconnector-libcpp-ut_helpers.a |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_stats.cpp |44.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/objcopy_dcbdf62672440a626e79a64e14.o |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/long_tx_service/long_tx_service_ut.cpp |44.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/objcopy_10b0cfa01297f7d7392eb4d9e4.o |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/top/libtop_udf.global.a |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/ut_helpers.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/configs_dispatcher_ut.cpp |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/py/py3/libpy3python-py-py3.global.a |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/generic/actors/ut/yql_generic_lookup_actor_ut.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/console_ut_tenants.cpp |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/common/libformat_handler-ut-common.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/io/libcpp-http-io.a |44.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/objcopy_15e284a8ecb30c90903e842e70.o |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/moto/py3/libpy3python-moto-py3.global.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/recipes/common/libpy3library-recipes-common.global.a |44.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/objcopy_b306c2955ce13e6db6cae73363.o |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_data_ut.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/attributes_md5_ut.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/net_classifier_updater_ut.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_actors_ut.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/graph/ut/graph_ut.cpp |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/chunk_queue/libcpp-threading-chunk_queue.a |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/modifications_validator_ut.cpp |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/colorama/py3/libpy3python-colorama-py3.global.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/allure-python-commons/libpy3contrib-python-allure-python-commons.global.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Flask/py3/libpy3python-Flask-py3.global.a >> conftest.py::flake8 [GOOD] >> s3_helpers.py::flake8 [GOOD] >> test_bindings_0.py::flake8 [GOOD] >> test_bindings_1.py::flake8 [GOOD] >> test_compressions.py::flake8 [GOOD] >> test_early_finish.py::flake8 [GOOD] >> test_empty.py::flake8 [GOOD] >> test_explicit_partitioning_0.py::flake8 [GOOD] >> test_explicit_partitioning_1.py::flake8 [GOOD] >> test_format_setting.py::flake8 [GOOD] >> test_formats.py::flake8 [GOOD] >> test_inflight.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_public_metrics.py::flake8 [GOOD] >> test_push_down.py::flake8 [GOOD] >> test_s3_0.py::flake8 [GOOD] >> test_s3_1.py::flake8 [GOOD] >> test_size_limit.py::flake8 [GOOD] >> test_statistics.py::flake8 [GOOD] >> test_streaming_join.py::flake8 [GOOD] >> test_test_connection.py::flake8 [GOOD] >> test_ydb_over_fq.py::flake8 [GOOD] >> test_yq_v2.py::flake8 [GOOD] |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_kafka_functions.cpp |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Flask-Cors/py3/libpy3python-Flask-Cors-py3.global.a |44.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/objcopy_c7c229be41e9b028572ad1aab3.o |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/allure-pytest/libpy3contrib-python-allure-pytest.global.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/libpy3contrib-libs-googleapis-common-protos.global.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/helpers/libpy3olap-scenario-helpers.global.a |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/solomon/actors/ut/ut_helpers.cpp |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/libpersqueue_public-ut-ut_utils.a |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/metarequest_ut.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_serialization.cpp |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/common/libpy3client-yc_public-common.global.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/itsdangerous/py3/libpy3python-itsdangerous-py3.global.a |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/kafka_test_client.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/port_discovery_ut.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_ut.cpp |44.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_36807918bd7a86c1ea37310c9c.o |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_local_kmeans.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/solomon/actors/ut/dq_solomon_write_actor_ut.cpp |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/click/py3/libpy3python-click-py3.global.a |44.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/01e1cebcd98e239de10ed70b94_raw.auxcpp |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/ut_helpers/libproviders-common-ut_helpers.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/iam/libpy3client-yc_public-iam.global.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/lib/libpy3tests-olap-lib.global.a |44.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/objcopy_71d73932c95681fccfc7215041.o |44.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/objcopy_5294a064c14cf5a49516321590.o |44.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_8e9f839326d1a9224e4b2e15e2.o |44.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/datetime/libdatetime_udf.so |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/time_cast/time_cast_ut.cpp |44.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.so |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/transfer_workload/libtransfer_workload.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.global.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/log/liblibrary-workload-log.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.a |44.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.so |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.global.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/csv/table/libarrow-csv-table.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpch-dbgen/libbenchmarks-gen-tpch-dbgen.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpcds/libbenchmarks-queries-tpcds.global.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/benchmark_base/liblibrary-workload-benchmark_base.a |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/keyvalue/grpc_service_ut.cpp |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ut_helpers/libpublic-lib-ut_helpers.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpch/libbenchmarks-queries-tpch.global.a |44.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/objcopy_774cbd1f10ee287899289ecb3f.o |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bucket_quoter/liblibrary-cpp-bucket_quoter.a |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/sentinel_ut_unstable.cpp |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/lz4/libstreams-lz-lz4.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/topic_workload/libtopic_workload.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/snappy/libstreams-lz-snappy.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/local_executor/libcpp-threading-local_executor.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.global.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/libcpp-streams-lz.a |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |44.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr4-c3/libcontrib-libs-antlr4-c3.a |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_protocol.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/main.cpp |44.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/core/libyt-yt-core.a |44.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/liblib-ydb_cli-dump.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.a |44.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |44.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_by_signature/libstreams-factory-open_by_signature.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/commands/libcommands.a |44.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |44.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |44.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |44.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |44.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/patched/replxx/librestricted-patched-replxx.a |44.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/log/liblibrary-workload-log.global.a |44.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |44.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/ut/ydb-core-client-ut |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.global.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.a |44.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |44.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k |44.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/stat_visualization/libpublic-lib-stat_visualization.a |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/query_actor/query_actor_ut.cpp |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/libydb_cli-commands-interactive.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/complete/libcommands-interactive-complete.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/libpy3ydb_recipe.global.a |44.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/libpy3python-numpy-py3.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/antlr_ast/gen/v1_antlr4/libantlr_ast-gen-v1_antlr4.a |44.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/highlight/libcommands-interactive-highlight.a |44.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |44.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.global.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/ut_helpers/liblibs-quota_manager-ut_helpers.a |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_prefix_kmeans.cpp |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/common/libpy3tests-stress-common.global.a |44.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/monitoring/libsrc-client-monitoring.a |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/locks/range_treap_ut.cpp |44.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/topic/libtopic.a |44.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |44.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |44.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/lib/cmds/libpy3tools-lib-cmds.global.a |44.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |44.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |44.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/workload/libpy3stress-olap_workload-workload.global.a |44.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |44.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |44.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |44.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |44.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |44.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/config/libsrc-client-config.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/import/liblib-ydb_cli-import.a |44.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |44.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/tests/objcopy_8649dacdf340abe7c53df69638.o |44.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |44.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/tests/objcopy_8e19d47784789c55156c57f816.o |44.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |44.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |44.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |44.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |44.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/objcopy_c55121179eeb3b5753498290c4.o |44.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/tests/objcopy_9be8b6745d0fa150928bab4206.o |44.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_0ab925f82bbba07bf3b749dc3c.o |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/cms/libsrc-client-cms.a |44.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/debug/libsrc-client-debug.a |44.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |44.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |44.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |44.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/stress_tool/ydb_stress_tool |44.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/text/libv1-complete-text.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/libsql-v1-complete.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/static/libcomplete-name-static.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_pure_ansi/libv1-lexer-antlr4_pure_ansi.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_pure/libv1-lexer-antlr4_pure.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/syntax/libv1-complete-syntax.a |44.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_656baae3c1e24959f5bcc457d7.o |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_common/libstreams-factory-open_common.a |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_range_ops.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/main.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_column_stats.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_replay.cpp |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tbb/libcontrib-libs-tbb.a |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_compiler.cpp |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/backup/libkikimr_backup.a |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_proccessor.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_http_request.cpp |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/oauth/libclient-yc_private-oauth.a |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_rtmr/ut_rtmr.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/helper.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_basic_statistics.cpp |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/ut_common/libcore-statistics-ut_common.a |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_errors.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_script.cpp |45.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/s3/flake8 >> test_yq_v2.py::flake8 [GOOD] |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_insert_table.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_column_statistics.cpp |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/antlr_ast/gen/v1_ansi_antlr4/libantlr_ast-gen-v1_ansi_antlr4.a |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceproxy/sequenceproxy_ut.cpp |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/hooks/testing/libcolumnshard-hooks-testing.a |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/ut/control_plane_proxy_ut.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_logs_engine.cpp |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_program.cpp |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/sdk_sessions_ut/sdk_sessions_ut.cpp |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/ut/ut_utils.cpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/codicil_guarded_invoker.cpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/snappy.cpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ticket_parser_ut.cpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/pollable_detail.cpp |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pattern_formatter.cpp |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/stub/libudf-service-stub.global.a |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/topic_data_ut.cpp |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest_main/libcpp-testing-gtest_main.a |45.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/objcopy_ab26d720c654ba47c2acacaa33.o |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_detail.cpp |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest/libcpp-testing-gtest.a |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/gateway/ut/metadata_conversion.cpp |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/ut_ycsb.cpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_stream_lookup.cpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compress_executor_ut.cpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct.cpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp.cpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_designated_consumer.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/exception_helpers.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_update.cpp |45.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/objcopy_e9b644ce7912a7deb04bbf40a8.o |45.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/objcopy_903d4758faea71f1363e296b3f.o |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/common_ut.cpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/config.cpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/virtual.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/helpers.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token_writer.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/health_check/health_check_ut.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_state_storage_ut.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/service_combiner.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/yson_builder.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_node_factory.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_service.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_client.cpp |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/yql_testlib/libydb-core-yql_testlib.a |45.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/objcopy_4ea639aebd19c36ee3cdb4479d.o |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/workload/libpy3stress-oltp_workload-workload.global.a |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_olapstore_ut.cpp |45.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/objcopy_242486256e1af973cd1d5376d1.o |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/ut_helpers/libtx-replication-ut_helpers.a |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/writer.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_resolver.cpp |45.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/objcopy_2efdf95387a81f55cf9c81071a.o |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/rm_service/kqp_rm_ut.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp |45.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/objcopy_895e78a038dc7069fda56c2e82.o |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_detail.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/retry_policy_ut.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/allocation_tags.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/subscriber_ut.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/service_discovery/service_discovery.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/throttling_channel.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_split_ut.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/viable_peer_registry.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message_format.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/hedging_channel.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/group_mapper_ut.cpp |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/libclicommands.a |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_def.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/schemas.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/client.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/slab_allocator.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_callbacks.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fair_share_hierarchical_queue.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_inplace_update_ut.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/helpers.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/ares_dns_resolver.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fs.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/dns_resolver.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/config.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/read_session_ut.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_engine_flat_ut.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_login/ut_login.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/config.cpp |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/libessentials-parser-pg_wrapper.a |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/config.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/tls.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_restore/ut_restore.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_yielder.cpp |45.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/recipe/solomon_recipe |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/profiling_helpers.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_manager.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_executor.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/delayed_executor.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_pool.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_rw_lock.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/execution_stack.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_action_queue.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/current_invoker.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_throttler.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_queue.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_queue_scheduler_thread.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/stream.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zlib.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_barrier.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zstd.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lzma.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lz.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/nonblocking_batcher.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_alarm.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/dictionary_codec.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fls.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/bzip2.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/notify_manager.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/lease_manager.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/codec.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/brotli.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_scheduler_thread.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_queue.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/ssl_helpers.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_affinity.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/suspendable_action_queue.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/ssl_context.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/packet.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/local_bypass.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_util.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/public.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_pool.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/server.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/client.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/retrying_periodic_executor.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/system_invokers.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher_impl.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/single_queue_scheduler_thread.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduled_executor.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/propagating_storage.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduler_thread.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/quantized_executor.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_detail.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/future.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelation_token.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/two_level_fair_share_thread_pool.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_poller.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_detail.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/crypto.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelable_context.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_thread_pool.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_parser.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/throughput_throttler.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_writer.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/adjusted_exponential_moving_average.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/connection.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/logger_owner.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/config.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/compression.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/secret/ut/ut_secret.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_output.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/process_exit_profiler.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/log_writer_detail.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/fluent_log.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/random_access_gzip.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packed_unsigned_vector.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/formatter.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_statistics_producer.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/file_log_writer.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/zstd_compression.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bitmap.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/serializable_logger.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_log_writer.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bloom_filter.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/blob_output.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/system_log_event_provider.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/backoff_strategy.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/codicil.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/configurable_singleton_def.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/error.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packing.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/public.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/coro_pipe.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/arithmetic_formula.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/parser_helpers.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/linear_probe.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hedging_manager.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/signal_registry.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pool_allocator.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/id_generator.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/shutdown.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/cache_config.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hazard_ptr.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/protobuf_helpers.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/digest.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/histogram.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/config.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_profiler.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/random.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/serialize.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/checksum.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/proc.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/load.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/zerocopy_output_writer.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/dialer.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/viewer_ut.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistics.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/config.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistic_path.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/utf8_decoder.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/connection.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/config.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/context.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/address.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/socket.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/listener.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/descriptors.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/helpers.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authenticator.cpp |46.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/pq_read |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authentication_identity.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/local_address.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/profiling/timing.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/relaxed_mpsc_queue.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_registry.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/balancing_channel.cpp |46.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.so |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/channel_detail.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/server.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_server.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dispatcher.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/public.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/helpers.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/public.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/serialized_channel.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service_detail.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_channel.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/peer_discovery.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/roaming_channel.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/per_key_request_queue_provider.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/channel.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/protocol_version.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dynamic_channel_pool.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/request_queue_provider.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/retrying_channel.cpp |46.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.so |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/static_channel_factory.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/utilex/random.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/syntax_checker.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/public.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/forwarding_consumer.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/response_keeper.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/tokenizer.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/thread.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/caching_channel_factory.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/server_detail.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/spin_wait_slow_path_logger.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/null_channel.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_writer.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/stack.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/helpers.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/token.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/depth_limiting_yson_consumer.cpp |46.5%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/python/moto/bin/moto_server |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attribute_consumer.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_consumer.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attributes_stripper.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/config.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/consumer.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/null_consumer.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_options.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/list_verb_lazy_yson_consumer.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/producer.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/lexer.cpp |46.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/s3_recipe/s3_recipe |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_merger.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_unknown_fields.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/e01aded916ad04e888f13223cf_raw.auxcpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/stream.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/grouper_ut.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_filter.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_builder_stream.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser_deserialize.cpp |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/libpy3functional-sqs-merge_split_common_table.global.a |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/parser.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/stream.cpp |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyarrow/libpy3contrib-python-pyarrow.global.a |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/mv_object_map_ut.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/bind_queue_ut.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/impl/local_partition_reader_ut.cpp |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/ut/helpers/libmkql_proto-ut-helpers.a |46.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/objcopy_0446f521b26a2e8128f94ac50f.o |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/common.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/idx_test/ydb_index_ut.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_engine_flat_host_ut.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_flowcontrol_ut.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/stress_tool/device_test_tool_ut.cpp |46.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/objcopy_49a1ca9559288648fba9cf7b65.o |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_write_ut.cpp |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/test/libs/table/libtest-libs-table.a |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_proto_ut.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_effects_ut.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_sequence/ut_sequence.cpp |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/hive_metastore/libcore-external_sources-hive_metastore.a |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/ut_backup_collection_reboots.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_fetcher_ut.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_filter.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_col_ut.cpp |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/stress_tool/lib/libydb_device_test.a |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/stress_tool/device_test_tool.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/kikimr_program_builder_ut.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_user_attributes/ut_user_attributes.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup/ut_backup.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/data_integrity/kqp_data_integrity_trails_ut.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_client_ut.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/ut_bscontroller/main.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_consumer.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_logstore_ut.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_immediate_effects_ut.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_table/ut_external_table.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/query_stats/query_stats_ut.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_move/ut_move.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_scan_ut.cpp |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_export/ut_export.cpp |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |46.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_indexes/ydb-tests-functional-kqp-kqp_indexes |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ydb-core-tx-schemeshard-ut_data_erasure_reboots |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/stress_tool/proto/libtools-stress_tool-proto.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/cpp/src/arrow/python/libpy3src-arrow-python.a |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/tokenizer.cpp |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/idx_test/libpublic-lib-idx_test.a |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |46.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_query_svc/ydb-tests-functional-kqp-kqp_query_svc |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/statistics_producer.cpp |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |46.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/objcopy_c77713875cf17988efd8fc0fb3.o |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_filtering_consumer.cpp |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |46.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/objcopy_133cf22e149f008572dab30674.o |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_attribute_owner.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/bindings.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attributes.cpp |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/convert.cpp |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build_reboots/ut_index_build_reboots.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/permission.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/config.cpp |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/size.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/blobstorage_node_warden_ut.cpp |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |46.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_index_table_ut.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/interned_attributes.cpp |46.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/solomon/ydb-tests-fq-solomon |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limits.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limiter.cpp |46.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/objcopy_a4b303e939cc32858d35564cac.o |46.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/objcopy_ccde7a40b2fd2886f22cd46a85.o |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node_detail.cpp |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/service_mocks/ldap_mock/libtestlib-service_mocks-ldap_mock.a |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/quota_requester.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_visitor.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_import_ut.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_query_ut.cpp |47.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/objcopy_e5d897582dc0fbda7c578cb53f.o |47.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/objcopy_c8e04cf4d110f8c670988beb0f.o |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_monitoring_ut.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_ldap_login_ut.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_login_ut.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_object_storage_ut.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_bulk_upsert_olap_ut.cpp |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyarrow/libpy3contrib-python-pyarrow.a |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/server.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/main.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_scripting_ut.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_builder.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/cancel_tx_ut.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_coordination_ut.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_sample_k.cpp |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/test/libs/rows/libtest-libs-rows.a |47.1%| [PK] {default-linux-x86_64, release, asan} $(B)/yql/essentials/tests/common/test_framework/udfs_deps/{common-test_framework-udfs_deps.final.pkg.fake ... yql/essentials/udfs/common/hyperscan/libhyperscan_udf.so} |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_stats_ut.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/lib/libpy3functional-tpc-lib.global.a |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/public.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_bulk_upsert_ut.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_register_node_ut.cpp |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/load/lib/libpy3olap-load-lib.global.a |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_semaphore.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/system_attribute_provider.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/gc_ut.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/coroutine.cpp |47.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/objcopy_703c8e1d9a9a2b271b8b995a29.o |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream_pipe.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/static_service_dispatcher.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/action_queue.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_looper.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_checkpoint_storage_ut.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/serialize.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/locks_ut.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/storage_service_ydb_ut.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/config.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/object_storage_listing_ut.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/flat_ut.cpp |47.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_query_session/ydb-tests-functional-kqp-kqp_query_session |47.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/objcopy_0aefef587c181350d3a25f70e0.o |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/operation_helpers_ut.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_calls_ut.cpp |47.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/objcopy_c068ee86eb127df13256bfbe45.o |47.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/objcopy_488333b1ebd4c1d6d8ec5bcb8f.o |47.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/transfer/ydb-tests-functional-transfer |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_change_exchange.cpp |47.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/objcopy_1555e67a3dd43a3e7f09bf8eee.o |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_serverless/ut_serverless.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_ut.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_pragma_ut.cpp |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/yson/libpy3python-yt-yson.global.a |47.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_9ea5b1fb7a4f8e1b0b8d7cf345.o |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_collector_ut.cpp |47.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/basic_example/public-sdk-cpp-tests-integration-basic_example |47.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_ac7eeedcbf7038a60a7673762a.o |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_yql_ut.cpp |47.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/server_restart/public-sdk-cpp-tests-integration-server_restart |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/ut_helpers.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/downtime_ut.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/pg/pg_catalog_ut.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base_reboots/ut_base_reboots.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_scripting_ut.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_types_ut.cpp |47.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/objcopy_5fddfa8f171a3216cad65e02ab.o |47.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/objcopy_49bad8251d240ad7c49d384b91.o |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_table_ut.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/wrappers/s3_wrapper_ut.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/quota_tracker_ut.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/federated_topic/ut/basic_usage_ut.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_backup.cpp |47.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/objcopy_1c0f807c059fe226699115f242.o |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_init.cpp |47.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/objcopy_790c6ea4aad5e761d21421b25d.o |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_object_storage_listing.cpp |47.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/objcopy_1515671fe2dfb16894dfbe901e.o |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge/ut_split_merge.cpp |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/monlib/libpy3library-python-monlib.global.a |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_normalizer.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_common_pq.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/server/msgbus_server_pq_metarequest_ut.cpp |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/unistat/libmonlib-encode-unistat.a |47.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup/ydb-tests-functional-backup |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_stat.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_stats_ut.cpp |47.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/objcopy_484246668d943fbae3b476ec7d.o |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_versions.cpp |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/monlib/libpy3library-python-monlib.a |47.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sdk/cpp/sdk_credprovider/ydb-tests-functional-sdk-cpp-sdk_credprovider |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/result_formatter/result_formatter_ut.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/inside_ydb_ut/inside_ydb_ut.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partition_ut.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_order.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/cache_eviction_ut.cpp |47.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/objcopy_1a75593bdb000d1e31dd6e96d5.o |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/hive_metastore/hive_metastore_native/libexternal_sources-hive_metastore-hive_metastore_native.a |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/fetch_request_ut.cpp |47.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_import/ydb-tests-fq-yt-kqp_yt_import |47.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_0a29eb8c456ab5b998f2d12ba1.o |47.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/load/objcopy_a99732b1d02edd62e674483ffe.o |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_self.cpp |47.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/load/objcopy_2e1dd9c9bc385e6efd22b78136.o |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/libsrc-client-federated_topic.a |47.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/ut/objcopy_1d0482d354dc270d18e7123281.o |47.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_34efc91ed920a8b27d971c44a6.o |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/certificate_check/cert_utils_ut.cpp |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/utils/libpy3connector-tests-utils.global.a |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/row_dispatcher_ut.cpp |47.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/objcopy_181bdcd1743e9a1a78fafe4b60.o |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/database/ut/ut_database.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/e294827eb799173498fe26d398_raw.auxcpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/populator_ut.cpp |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/prctl/libpy3library-python-prctl.global.a |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/ymq_ut.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/topic_session_ut.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqtablet_mock.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partitiongraph_ut.cpp |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/simplejson/py3/libpy3python-simplejson-py3.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/dummy/libpq-gateway-dummy.a |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/certificate_check/cert_check_ut.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/user_info_ut.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/type_codecs_ut.cpp |47.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/objcopy_41295709119857c2e0f1a41f31.o |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/utils_ut.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqrb_describes_ut.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_db_scheme.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_limits_ut.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_database_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_memtable.cpp |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/library/libpy3tools-nemesis-library.global.a |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partition_chooser_ut.cpp |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/tests/liblibrary-persqueue-tests.a |47.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/objcopy_c98e5b95c64b8486a12f10d408.o |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_clock_pro_ut.cpp |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ut_utils/libtopic-ut-ut_utils.a |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_sausage.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ncloud/impl/access_service_ut.cpp |47.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/objcopy_b06d27009e49b9ba3df883a226.o |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream/ut_cdc_stream.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_redo.cpp |47.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_9caa7583d1e4955730dbd6f3fd.o |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/sourceid_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/json_proto_conversion_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqtablet_ut.cpp |47.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/objcopy_643fa2679e88d9b2d33558b050.o |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_backup.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_transfer/ut_transfer.cpp |47.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_6e536fb2c379a4ebe79c499de8.o |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_range_cache_ut.cpp |47.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_938861be99a6cedecb22904193.o |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_switchable_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_volatile.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_scan.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/proxy/ut_helpers.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/kinesis_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_slice_loader.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_explain_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_maintenance_api_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cluster_info_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_pages.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_decimal.cpp |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/impl/libclient-federated_topic-impl.a |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/sdk_sessions_pool_ut/sdk_sessions_pool_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pq_ut.cpp |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |47.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |47.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain/ut_extsubdomain.cpp |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_columnshard_read_write.cpp |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |47.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/replication/ydb-tests-functional-replication |47.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/counters_ut.cpp |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |47.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_ut.cpp |47.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_btree_index_iter_charge.cpp |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_query_ut.cpp |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/utils/libpy3fq-generic-utils.global.a |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |47.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_b9fd5c62781ec3b78d111a0ba7.o |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/ydb-tests-olap |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |47.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_a5874452d3dbd6f6e49cd08be6.o |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/internals_ut.cpp |47.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/objcopy_6f577a0a3d7a659599df51626e.o |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_compute_scheduler_ut.cpp |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/type_info/libpy3python-yt-type_info.global.a |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_scan_data_ut.cpp |47.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/bulk_upsert/ydb-public-sdk-cpp-tests-integration-bulk_upsert |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/libpy3yt-python-yt.global.a |47.6%| [AR] {default-linux-x86_64, release, asan, pic} $(B)/yt/yt/core/libyt-yt-core.a |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_replication/ut_replication.cpp |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |47.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tests/kikimr_tpch/ydb-core-kqp-tests-kikimr_tpch |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/simplejson/py3/libpy3python-simplejson-py3.global.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/utils/types/libpy3tests-utils-types.global.a |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/docs/generator/generator |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |47.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/objcopy_0665be2c60952715f39eb25568.o |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ut_data_erasure_reboots.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_proto.cpp |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_storage_read_request_ut.cpp |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/prctl/libpy3library-python-prctl.a |47.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |47.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/ut/objcopy_9f29b589555ed64086e5eadccf.o |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/pq_async_io/libtests-fq-pq_async_io.a |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_gclogic_ut.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/flat_test_db.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_leases_ut.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_handle_ut.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_s3fifo_ut.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_row_versions_ut.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_table_part_ut.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_btree_index_nodes.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_cxx_database_ut.cpp |47.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/objcopy_53073eb93c76466fca8f474c5f.o |47.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/objcopy_3e7b0e88092417daa72b89bfde.o |47.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/objcopy_791e2f78c18891d943ecce5e41.o |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_other.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_part_multi.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/datastreams/datastreams_ut.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_ut.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_stats/ut_stats.cpp |47.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/objcopy_a0bee0ed11edab150a8172af5c.o |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_slice.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_charge.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_datetime.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_compaction.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_analyze_ut.cpp |47.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_2d296dfaf373f7f15e6312517a.o |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_rename_table_column.cpp |47.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/objcopy_7211c23d9494c46f0f60063e9e.o |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_data_cleanup.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/552c373b422666221556a5a9bd_raw.auxcpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_comp_gen.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_forward.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_screen.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/table_writer_ut.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_bloom.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_iterator.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_db_iface.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/leader_election_ut.cpp |48.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/load/objcopy_323a17e94d8d570989807d19d3.o |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/microseconds_sliding_window_ut.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_part.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_tenants_ut.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/make_config.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/proxy/proxy_actor_ut.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/coordinator_ut.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_compaction_multi.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/metering_sink_ut.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/list_all_topics_ut.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_shared_sausagecache.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_params_ut.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut.cpp |48.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/objcopy_994fcbd53c4e2174c302bdb5ab.o |48.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/objcopy_7c328c2741f9dd7697a2e0e8b1.o |48.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/objcopy_4c839b0fc6ee60e0bb4adc7079.o |48.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/objcopy_5333c1912ecbac0f64ff97551f.o |48.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/objcopy_48884f6b745ced4d3e78997cb1.o |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/ut_external_data_source_reboots.cpp |48.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/objcopy_53273ad3976098fa8cbd55f5a9.o |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator_client/ut_helpers.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator_client/actor_client_ut.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl_utility.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/encrypted_storage_ut.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_sequence_reboots/ut_sequence_reboots.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/ydb/ut/ydb_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_common_new_schemecache_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/sentinel_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ut_bsvolume_reboots.cpp |48.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_9a3dabea847c21e0b4fa4cda26.o |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/board_subscriber_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |48.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_family/compression/objcopy_1ab2a5a6dd84a6c9ff5d5c50b0.o |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_counters.cpp |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/federated_query/common/libut-federated_query-common.a |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/ut_vector_index_build_reboots.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_olap/ut_olap.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/slow/autopartitioning_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_new_schemecache_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp |48.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/objcopy_5aa2f431b8ed27f6c5b5d8a131.o |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/s3_recipe_helper/liblibrary-testlib-s3_recipe_helper.a |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/ut_fat/blobstorage_node_warden_ut_fat.cpp |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/google/benchmark/librestricted-google-benchmark.a |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/ut_user_attributes_reboots.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_node_enumeration_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/coordinator/coordinator_volatile_ut.cpp |48.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/objcopy_0c4ce75555cfd5c0dd63e9dfbd.o |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/runlib/libtools-kqprun-runlib.a |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/in_memory_control_plane_storage_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/ydb_proxy_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/perf/kqp_workload_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/functions_executor_wrapper.cpp |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/docs/generator/libpy3olap-docs-generator.global.a |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/impl/table_writer_ut.cpp |48.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_0e928e66807fd553d7fcaa58a3.o |48.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/4129dc9878c2058404494fb088_raw.auxcpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/s3_recipe_ut_helpers.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_kqp.cpp |48.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |48.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |48.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/e1ff312a3308444783623a7c6e_raw.auxcpp |48.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/objcopy_1574e8a5a6c530c7bfd6378c4d.o |48.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/objcopy_452efd8b0828678a61ff4e0569.o |48.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/docs/generator/objcopy_ac8dbe7f54a2cb7efb6636f75f.o |48.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator_ut.cpp |48.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_8f2fbd9f79880fbfa3c1838d80.o |48.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |48.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_b08299d456f3448b368e814cb8.o |48.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/objcopy_504b845d57f1a23561e970de61.o |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_serverless_reboots/ut_serverless_reboots.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/kqp_mock.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_scheme_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_olap_reboots/ut_olap_reboots.cpp |48.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_52476c20dac0af4f59edc2917e.o |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/table_creator/table_creator_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/first_class_src_ids_ut.cpp |48.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_s3_plan_ut.cpp |48.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_ut.cpp |48.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |48.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_496e4638abf3c5ef12eafab52c.o |48.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/pqtablet_mock.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/topic_yql_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_fixture.cpp |48.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_5dc9c76fd90ae0562084321e87.o |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/perf/kqp_query_perf_ut.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_common_ut.cpp |48.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |48.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |48.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_6b8c453743f8fd2c5380af70c6.o |48.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator_grpc/solomon_recipe_grpc |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_compat_ut.cpp |48.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_696078ddd4c2d0788472b3ebfe.o |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_permissions_ut.cpp |48.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |48.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_restore_scan.cpp |48.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_locks.cpp |48.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |48.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_ut.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator_ut_helpers.cpp |48.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_f363a941fa24746cadffc60594.o |48.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |48.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure |48.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/topic_service_ut.cpp |48.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |48.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_quotas_ut.cpp |48.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup/s3_path_style/ydb-tests-functional-backup-s3_path_style |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_permissions_ut.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/demo_tx.cpp |48.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/target_discoverer_ut.cpp |48.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |48.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/tests/objcopy_87b299e07b15c86f4f50f458ef.o |48.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/tests/objcopy_1b4bf9f1f46a6111d16337dee0.o |48.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/yql/essentials/tools/sql2yql/sql2yql |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_view/ut_view.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_change_collector.cpp |48.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/objcopy_c84c8d511807425dc18073129b.o |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |48.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/tests/objcopy_f3c323ef80ada193284f036d44.o |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/fqrun/src/libtools-fqrun-src.a |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_subdomain/ut_subdomain.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/partition_end_watcher_ut.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_ut_local.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_ut_pool.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_compaction.cpp |48.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_aggregation/ut_aggregate_statistics.cpp |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |48.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/objcopy_5a4a401f33f46c70417a65f584.o |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/trace_ut.cpp |48.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/objcopy_2cc418e8604751e5b8f9029a81.o |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/flavours/libpy3tests-library-flavours.global.a |48.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/objcopy_50f818df501fa237b5369d0e33.o |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/node_broker_ut.cpp |48.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/objcopy_0a1f127d9343562caddfbacf79.o |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/local_partition_ut.cpp |48.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |48.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/describe_topic_ut.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/coordinator/coordinator_ut.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_ut.cpp |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |48.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/objcopy_79d897640a3a634a87f173e2f4.o |48.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/objcopy_a54664d42025a3be375f961b82.o |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_ut.cpp |48.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_family/compression/objcopy_f42b1add98328abd34a53e4aef.o |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/assign_tx_id_ut.cpp |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_fat/dsproxy_ut.cpp |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |48.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/basic_usage_ut.cpp |48.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/objcopy_1406195445f45d950dda89fcd8.o |48.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |48.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/config/ut/ydb-services-config-ut |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_internal_ut.cpp |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |48.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_family/compression/objcopy_3bdea7737a87c43bfaa0aaf4c3.o |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_acl_ut.cpp |48.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |48.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |48.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |48.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/ydb_convert_ut.cpp |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/query_replay_yt/query_replay_yt |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/balance_coverage/balance_coverage_builder_ut.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_erase_rows.cpp |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/topic_to_table_ut.cpp |48.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/objcopy_178e64ce5db822fc6aa8b3e608.o |48.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/objcopy_86ad37399122e504f3e6d8378d.o |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/compression_ut.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/table_description_ut.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_query_ut.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_common.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_permissions_ut.cpp |48.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_labeled.cpp |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_minstep.cpp |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |48.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |48.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |48.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |48.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime |48.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |48.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |48.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |48.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |48.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |48.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/ut/ydb-core-control-ut |48.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |48.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/df691ac52d0b755cb039db39b5_raw.auxcpp |48.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_cca8dcd66462c9ca3c57fcb78e.o |48.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/kqprun |48.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_b34c6a8a5501db208eebc5d8e4.o |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gbenchmark/libcpp-testing-gbenchmark.a |48.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |48.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/slow/pq_ut.cpp |48.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_constraints_ut.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_service_ut.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/s3/ut/s3_aws_credentials_ut.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_ftol/dsproxy_fault_tolerance_ut.cpp |48.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_3e8bf44ed681ff82ae143aaec3.o |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_ut.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/benchmark/b_part.cpp |48.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/objcopy_816e2dba53f55d924139cdb3c5.o |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/json_change_record_ut.cpp |48.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/objcopy_e4166f3d104a6751b45e7e712f.o |48.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/objcopy_93197284f82f9ae9fc0256ee95.o |48.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/objcopy_7cbdf366fff58ab43b08c0aaa3.o |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_unique_index.cpp |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/common/libpy3functional-postgresql-common.global.a |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/initializer/ut/ut_init.cpp |48.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/objcopy_5309010d16487b3f4dcf314c15.o |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_data_source/ut_external_data_source.cpp |48.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/objcopy_1007df29dec27b0b7a1587d49f.o |48.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/c52bef66453eb652f14989b79d_raw.auxcpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_vector_index.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/dynamic_config/dynamic_config_ut.cpp |48.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/oom/objcopy_0221134ccf2a949614ce2a2056.o |48.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/oom/objcopy_a0543c2dc30365e9b2ad3d0ca6.o |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_async_index.cpp |48.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/oom/objcopy_e0331f455507fe5ac3b71d0537.o |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_ext_blobs_multiple_channels.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_flip_join_ut.cpp |48.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/objcopy_065e9244d685c2b8f0ab66e414.o |48.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/objcopy_4cf502b19212965f14d6660a20.o |48.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/objcopy_461999da7ba13deab5689c18ec.o |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/ut_vdisk/lib/libblobstorage-ut_vdisk-lib.a |48.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/objcopy_5b5c3367c789898aa5a6cae866.o |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_data_erasure/ut_data_erasure.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_index_lookup_join_ut.cpp |48.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/objcopy_8e57113197bb359e3999b04aab.o |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/ut/grpc/libgrpc_streaming-ut-grpc.a |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_trace.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/ut/database_resolver_ut.cpp |48.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/objcopy_a40d299361b06d7622f78b2238.o |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_upload_rows.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_ut.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ut_subdomain_reboots.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ut_extsubdomain_reboots.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yql/essentials/tools/sql2yql/sql2yql.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ut_export_reboots_s3.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/splitter/ut/batch_slice.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_streaming/grpc_streaming_ut.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/splitter/ut/ut_splitter.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ut_cdc_stream_reboots.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_utils_ut.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/schemereq_ut.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/describes_ut/ic_cache_ut.cpp |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/network/libessentials-utils-network.a |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_write.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/fq/ut_integration/ut_utils.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ut_rtmr_reboots.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sharding/ut/ut_sharding.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/monitoring_ut.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/data/kqp_read_null_ut.cpp |48.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/objcopy_93dc3386250916dfae1ecb9b13.o |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/describes_ut/describe_topic_ut.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_ut.cpp |48.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/objcopy_a5c82b9ecb3bf738ea9e628123.o |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/worker_ut.cpp |48.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/objcopy_3d6916930a438b51675ef6dda7.o |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_write_ut.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_ut_trace.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_read_ut.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/config/bsconfig_ut.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_background_compaction.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/bootstrapper_ut.cpp |48.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/objcopy_e2acb41e7099c0db4fe54a1587.o |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/fq/ut_integration/fq_ut.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider_ut.cpp |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/protobuf/libprotobuf_udf.global.a |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/cache_ut.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/driver_lib/run/auto_config_initializer_ut.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_pipe_ut.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/b9d4e191a9fd03221b46c5af49_raw.auxcpp |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/streaming/libstreaming_udf.global.a |48.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/objcopy_d0255dda539959b69d421868a2.o |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ut_split_merge_reboots.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpointing/ut/checkpoint_coordinator_ut.cpp |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/protobuf_udf/libessentials-minikql-protobuf_udf.a |49.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/7179c606fb7373cb8f04d9971a_raw.auxcpp |48.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/objcopy_173de88696c8239b22567e7ece.o |48.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/objcopy_6a5c78aa9f679a0920be5264fe.o |48.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/objcopy_96b8686cd075e874d95d4aa5c5.o |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/main.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_logic_ut.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_req_blockbs_ut.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/query_replay.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_reassign.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tiering/ut/ut_object.cpp |49.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/objcopy_7c81cbfa6b5ce112674cb0a849.o |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tiering/ut/ut_tiers.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_metrics_ut.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_bootstrapped_ut.cpp |49.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/tests/objcopy_3df021aac8504049c53286aea0.o |49.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/tests/objcopy_2492aafb6862566a2398c9f27e.o |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/workload/libpy3stress-simple_queue-workload.global.a |49.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/objcopy_79fff48e52404c1611400b8a2c.o |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/query_compiler.cpp |49.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/objcopy_d52256d4fa9895f38df6030445.o |49.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/objcopy_199ab4be3deaff025e1ab92143.o |49.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/objcopy_5051832ffa0b6b13cebe014eb1.o |49.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/objcopy_14c03c6aecffbe39cb01ddf2ed.o |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ru_calculator/ut_ru_calculator.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters_aggregator_ut.cpp |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/file/libfile_udf.global.a |49.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/objcopy_8ac5034640eee44b1cd5fa5253.o |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vpatch_actor_ut.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_patch_ut.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/quoter_resource_tree_ut.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/test_interconnect_ut.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_pq_reboots/ut_pq_reboots.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/runtime/kqp_scan_logging_ut.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_vector_index_build.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_arrow_in_channels_ut.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/datatime64_ut.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/user_account_service_ut.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/runtime/kqp_scan_spilling_ut.cpp |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/mkql_simple_file/libproviders-common-mkql_simple_file.a |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/kqp_olap_stats_ut.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_write.cpp |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/src/proto/libkqprun-src-proto.a |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/replica_ut.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/folder_service_ut.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/aggregations_ut.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_reboots/ut_reboots.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/tools/kqprun/kqprun.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/stream_creator_ut.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/public_http/http_router_ut.cpp |49.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_1a637ae81b754dfa4e06b949b8.o |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |49.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_6cfba3dbee97ec121b2f346459.o |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |49.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/32049c3ef1f885f0e34984b3bf_raw.auxcpp |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |49.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_eff0a6b0f75ccb9a2cb742007c.o |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |49.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_ce073e3cc612363936bdd04210.o |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/fqrun/fqrun |49.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |49.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |49.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |49.1%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/src/libtools-kqprun-src.a |49.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |49.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |49.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |49.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_transfer_writer/core-tx-replication-service-ut_transfer_writer |49.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/example/ydb-tests-example |49.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |49.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |49.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |49.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |49.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |49.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |49.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |49.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/file/libyt-gateway-file.a |49.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |49.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |49.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |49.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |49.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_7dbead413d0eb2c0f2ebe75a93.o |49.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_7e7e709046fe8acad91d924675.o |49.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_999b0e05144f29a542dbe4b3f5.o |49.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_1a867878d783e80bc2d70bd8d0.o |49.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/objcopy_a3fc9153ce93c876df4c755b36.o |49.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/objcopy_f364ff47dd846bb94c3e83f2a8.o |49.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/objcopy_ae5b9f6e7a00f305f01a3dde87.o |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/control/immediate_control_board_actor_ut.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_ut.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/access_service_ut.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_continuous_backup/ut_continuous_backup.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_types_arrow_ut.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/service_account_service_ut.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/decimal_ut.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/delete_ut.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/indexes_ut.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/optimizer_ut.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/locks_ut.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/clickbench_ut.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_table.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/json_ut.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_quorum_tracker_ut.cpp |49.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/large/objcopy_b0a88dfa3c67850033b8c21ce7.o |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_snapshot.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/statistics_ut.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/topic_reader_ut.cpp |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/olap/helpers/libut-olap-helpers.a |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/sparsed_ut.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_index_build.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_bsvolume/ut_bsvolume.cpp |49.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/objcopy_422ca1effff14e5a08952658d0.o |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/tiering_ut.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_background_cleaning/ut_background_cleaning.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/common_ut.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/resource_broker_ut.cpp |49.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/tests/objcopy_2de2accab39327e9b10680901f.o |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/sys_view_ut.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/read_only_vdisk.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/read_attributes_utils_ut.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ut_topic_splitmerge.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/write_ut.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_provider_ut.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_tracker_ut.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_activity_ut.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/test_protocols_ut.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ext_index/ut/ut_ext_index.cpp |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/dynamic_prototype/libcpp-protobuf-dynamic_prototype.a |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_keys.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/compression_ut.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup_collection/ut_backup_collection.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compress_executor_ut.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters_ut.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/pipe_tracker_ut.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/retry_policy_ut.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp |49.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/large/objcopy_7eab954373d77ffb1fab95ca0d.o |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_pipecache_ut.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compression_ut.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/kqp_olap_ut.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_counters_ut.cpp |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/yql/libcpp-protobuf-yql.a |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_resolver_ut.cpp |49.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/large/objcopy_27c0687ceeb7ce4ff5e4cea90a.o |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/rate_limiter/rate_limiter_ut.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_request_reporting_ut.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_gateway_ut.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_compaction/ut_compaction.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_get_ut.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tablet_ut.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_sequence_ut.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_put_ut.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/rows_proto_splitter_ut.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/7fdc9492198d5f306aa05e0de1_raw.auxcpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/read_session_ut.cpp |49.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/objcopy_4352b8b3e3cf61532c865b371b.o |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/entity_id_ut.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_agg_ut.cpp |49.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/objcopy_2b60b599fc27771d93e79090fc.o |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/cache_ut.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_auditsettings/ut_auditsettings.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/util_ut.cpp |49.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/objcopy_3ddbad334a37a829b3772ddb05.o |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_kv_ut.cpp |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/docker/libpy3contrib-python-docker.global.a |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_merge_ut.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/balancing_ut.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_extract_predicate_unpack_ut.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_sqlin_ut.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/quoter_performance_test/main.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/graph/shard/ut/shard_ut.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_sort_ut.cpp |49.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_7897d1b03fc78e49620c18f81a.o |49.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/objcopy_c65a9d5efe13dc05c1466090ba.o |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_write_actor_ut.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/mirrorer_ut.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/tools/fqrun/fqrun.cpp |49.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_253d734e8c901d319d84fcc6e9.o |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_document_api_ut.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_locks_ut.cpp |49.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/example/objcopy_c623700776b43ee95ec93c56f9.o |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/partition_stats/partition_stats_ut.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_script_executions_ut.cpp |49.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/objcopy_0b6bc206b470900b0b94249ade.o |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_login_large/ut_login_large.cpp |49.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/objcopy_e3640190fc6b98b359c2a9e990.o |49.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/example/objcopy_5525925030ba2866c1b1040841.o |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_read_actor_ut.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_locks_ut.cpp |49.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/objcopy_51000f45ee1f1ab0908a7e71c9.o |49.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/objcopy_f05ead59375a9db120b95dd730.o |49.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/compatibility/objcopy_4246ee6b3505ab22753eb44ce7.o |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_rd_read_actor_ut.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/yc_search_ut/index_events_processor_ut.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_column_build/ut_column_build.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_ranges_ut.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/storagepoolmon/ut/storagepoolmon_ut.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_datashard.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/transfer_writer_ut.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_mvcc_ut.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/storage_pool_info_ut.cpp |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_common.cpp |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/object_distribution_ut.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/scale_recommender_policy_ut.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_service_ut.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/memory_controller/memory_controller_ut.cpp |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/hive_impl_ut.cpp |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/view/view_ut.cpp |49.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/objcopy_d3af02c7d57ea2cbbe5d381baa.o |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/serializability/libpy3tests-library-serializability.global.a |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/autoscaling_ut.cpp |49.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/objcopy_d1dee10c0c00d50989b086bd3f.o |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |49.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/objcopy_277b7e8f79021687bec95be8db.o |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_qs_scripts_ut.cpp |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_minikql.cpp |49.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/objcopy_3ea8aa67e7c24c4f0e3b0406b9.o |49.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/objcopy_445797246443360525d31550d1.o |49.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/objcopy_88a0e187d0d0f8235a5e3f2fff.o |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/lib/libpy3tools-ydb_serializable-lib.global.a |49.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/objcopy_5831cbd77ecc92a241b6cf1ea2.o |49.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/objcopy_0359848ae21601186c5b0d9873.o |50.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/objcopy_8120ef49e7e653ed0601604313.o |50.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/compatibility/objcopy_69ec8108bd4bdc059abab5b374.o |50.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/compatibility/objcopy_083605b223ce507d0fef919d0d.o |50.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/objcopy_1aeeb50f676472f975830c135d.o |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_large.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/dst_creator_ut.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/dread_cache_service/ut/caching_proxy_ut.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_datashard.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/sequencer_ut.cpp |50.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/objcopy_6e0da74b1512d0ffe19c5dc500.o |50.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/example/objcopy_2b682e146a665bfa19210b0fd9.o |50.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_e2a089b95d9316f6e26025d3e3.o |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/memory_controller/memtable_collection_ut.cpp |50.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/objcopy_b8aa61f402be805d2e3e9e75a2.o |50.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/objcopy_f580ed931409135de17b6aff8b.o |50.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_909bbfbd36bf4d7cf0544f0406.o |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/yc_search_ut/test_events_writer.cpp |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/library/libpy3tests-postgres_integrations-library.global.a |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_not_null_ut.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_columnshard.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_columnshard.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_returning_ut.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_table_split_ut.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_locks_tricky_ut.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/cms/cms_ut.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_tx_ut.cpp |48.8%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |48.8%| COMPACTING CACHE 445.7MiB |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/kqp_generic_provider_ut.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_ut.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_ne_ut.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/mediator/mediator_ut.cpp |48.8%| [TS] {RESULT} ydb/tests/stress/oltp_workload/tests/flake8 |48.8%| [TS] {RESULT} ydb/tests/fq/multi_plane/flake8 |48.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part16/flake8 |48.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part13/flake8 |48.8%| [TS] {RESULT} ydb/tests/functional/sqs/multinode/flake8 |48.8%| [TS] {RESULT} ydb/tests/olap/scenario/flake8 |48.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part15/flake8 |48.9%| [TS] {RESULT} ydb/tests/postgres_integrations/go-libpq/flake8 |48.9%| [TS] {RESULT} ydb/tests/tools/kqprun/tests/flake8 |48.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part3/flake8 |48.9%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/oracle/flake8 |48.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part6/flake8 |48.9%| [TS] {RESULT} ydb/tests/fq/solomon/flake8 |48.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_import/flake8 |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/hive_ut.cpp |48.9%| [TS] {RESULT} ydb/tests/functional/ydb_cli/flake8 |48.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part10/flake8 |48.9%| [TS] {RESULT} ydb/tests/functional/sqs/messaging/flake8 |48.9%| [TS] {RESULT} ydb/tests/olap/lib/flake8 |48.9%| [TS] {RESULT} ydb/tests/functional/sqs/cloud/flake8 |48.9%| [TS] {RESULT} ydb/tests/functional/postgresql/flake8 |48.9%| [TS] {RESULT} ydb/tests/fq/restarts/flake8 |48.9%| [TS] {RESULT} ydb/tests/functional/limits/flake8 |48.9%| [TS] {RESULT} ydb/tests/functional/rename/flake8 |48.9%| [TS] {RESULT} ydb/tests/functional/tenants/flake8 |48.9%| [TS] {RESULT} ydb/tests/functional/scheme_shard/flake8 |49.0%| [TS] {RESULT} ydb/tests/olap/s3_import/flake8 |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp |49.0%| [TS] {RESULT} ydb/tests/functional/script_execution/flake8 |49.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part18/flake8 |49.0%| [TS] {RESULT} ydb/tests/sql/lib/flake8 |49.0%| [TS] {RESULT} ydb/tests/functional/tpc/medium/flake8 |49.0%| [TS] {RESULT} ydb/tests/fq/generic/streaming/flake8 |49.0%| [TS] {RESULT} ydb/tests/fq/generic/analytics/black |49.0%| [TS] {RESULT} ydb/tests/functional/encryption/flake8 |49.0%| [TS] {RESULT} ydb/tests/functional/hive/flake8 |49.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part9/flake8 |49.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part1/flake8 |49.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part7/flake8 |49.0%| [TS] {RESULT} ydb/tests/example/flake8 |49.0%| [TS] {RESULT} ydb/tests/fq/streaming_optimize/flake8 |49.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part8/flake8 |49.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part14/flake8 |49.0%| [TS] {RESULT} ydb/tests/olap/docs/generator/flake8 |49.1%| [TS] {RESULT} ydb/tests/functional/canonical/flake8 |49.1%| [TS] {RESULT} ydb/tests/fq/yds/flake8 |49.1%| [TS] {RESULT} ydb/tests/olap/column_family/compression/flake8 |49.1%| [TS] {RESULT} ydb/tests/functional/sqs/large/flake8 |49.1%| [TS] {RESULT} ydb/tests/fq/mem_alloc/flake8 |49.1%| [TS] {RESULT} ydb/tests/functional/config/flake8 |49.1%| [TS] {RESULT} ydb/tests/fq/common/flake8 |49.1%| [TS] {RESULT} ydb/tests/functional/sqs/with_quotas/flake8 |49.1%| [TS] {RESULT} ydb/tests/sql/large/flake8 |49.1%| [TS] {RESULT} ydb/tests/fq/generic/analytics/flake8 |49.1%| [TS] {RESULT} ydb/tests/functional/tpc/large/flake8 |49.1%| [TS] {RESULT} ydb/tests/olap/common/flake8 |49.1%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/std/flake8 |49.1%| [TS] {RESULT} ydb/tests/functional/serializable/flake8 |49.1%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part11/flake8 |49.1%| [TS] {RESULT} ydb/tests/fq/generic/streaming/black |49.1%| [TS] {RESULT} ydb/tests/library/ut/flake8 |49.1%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part0/flake8 |49.1%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/fifo/flake8 |49.2%| [TS] {RESULT} ydb/tests/functional/ttl/flake8 |49.2%| [TS] {RESULT} ydb/tests/sql/flake8 |49.2%| [TS] {RESULT} ydb/tests/functional/autoconfig/flake8 |49.2%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part2/flake8 |49.2%| [TS] {RESULT} ydb/tests/functional/query_cache/flake8 |49.2%| [TS] {RESULT} ydb/tests/tools/nemesis/ut/flake8 |49.2%| [TS] {RESULT} ydb/tests/functional/sqs/common/flake8 |49.2%| [TS] {RESULT} ydb/tests/fq/s3/flake8 |49.2%| [TS] {RESULT} ydb/tests/stress/log/tests/flake8 |49.2%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part12/flake8 |49.2%| [TS] {RESULT} ydb/tests/olap/load/flake8 |49.2%| [TS] {RESULT} ydb/tests/functional/compatibility/flake8 |49.2%| [TS] {RESULT} ydb/tests/fq/plans/flake8 |49.2%| [TS] {RESULT} ydb/tests/functional/restarts/flake8 |49.2%| [TS] {RESULT} ydb/tests/olap/flake8 |49.2%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/postgresql/flake8 |49.2%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/join/flake8 |49.2%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/clickhouse/flake8 |49.2%| [TS] {RESULT} ydb/tests/olap/oom/flake8 |49.3%| [TS] {RESULT} ydb/tests/functional/blobstorage/flake8 |49.3%| [TS] {RESULT} ydb/tests/functional/audit/flake8 |49.3%| [TS] {RESULT} ydb/tests/functional/api/flake8 |49.3%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part4/flake8 |49.3%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/mysql/flake8 |49.3%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part5/flake8 |49.3%| [TS] {RESULT} ydb/tests/functional/serverless/flake8 |49.3%| [TS] {RESULT} ydb/tests/functional/scheme_tests/flake8 |49.3%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/ms_sql_server/flake8 |49.3%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part17/flake8 |49.3%| [TS] {RESULT} ydb/tests/fq/http_api/flake8 |49.3%| [TS] {RESULT} ydb/tests/stress/olap_workload/tests/flake8 |49.3%| [TS] {RESULT} ydb/tests/tools/kqprun/recipe/flake8 |49.4%| [TS] {RESULT} ydb/tests/functional/wardens/flake8 |49.4%| [TS] {RESULT} ydb/tests/stress/simple_queue/tests/flake8 |49.4%| [TS] {RESULT} ydb/tests/stress/kv/tests/flake8 |49.5%| [TS] {RESULT} ydb/core/viewer/tests/flake8 |49.6%| [TS] {RESULT} ydb/tests/tools/pq_read/test/flake8 |49.6%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part19/flake8 |49.7%| [TS] {RESULT} ydb/tests/functional/cms/flake8 |49.7%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/ydb/flake8 |49.7%| [TS] {RESULT} ydb/tests/olap/ttl_tiering/flake8 |59.6%| [AR] {default-linux-x86_64, release, asan} $(B)/yt/yt/client/libyt-yt-client.a |59.7%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/client/libyt-yt-client.a |59.7%| [AR] {RESULT} $(B)/yt/yt/client/libyt-yt-client.a |60.5%| [AR] {default-linux-x86_64, release, asan} $(B)/library/cpp/svnversion/liblibrary-cpp-svnversion.a |62.4%| [AR] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/liblibrary-cpp-build_info.a |65.4%| [AR] {default-linux-x86_64, release, asan} $(B)/yt/yt/core/libyt-yt-core.a |65.4%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |65.4%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |65.4%| [AR] {default-linux-x86_64, release, asan} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |65.4%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |65.4%| [AR] {RESULT} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |65.4%| [AR] {default-linux-x86_64, release, asan} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |65.4%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |65.5%| [AR] {RESULT} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |65.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |65.5%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |65.5%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |65.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |65.5%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |65.5%| [AR] {RESULT} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |65.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |65.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |65.5%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |65.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |65.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |65.5%| [LD] {RESULT} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |65.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |65.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |65.5%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |65.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |65.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |65.5%| [LD] {RESULT} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |65.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |65.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |65.5%| [LD] {RESULT} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |65.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |65.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |65.6%| [LD] {RESULT} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |65.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/ydb/ydb |65.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydb/ydb |65.6%| [LD] {RESULT} $(B)/ydb/apps/ydb/ydb |65.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |65.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |65.6%| [LD] {RESULT} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |65.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |65.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |65.6%| [LD] {RESULT} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |65.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |65.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |65.6%| [LD] {RESULT} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |65.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |65.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |65.7%| [LD] {RESULT} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |65.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |65.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |65.7%| [LD] {RESULT} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |65.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |65.7%| [LD] {RESULT} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |65.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |65.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |65.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |65.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |65.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest |65.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest |65.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestWrongSessionOrGeneration |65.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestDeregister >> TPQCachingProxyTest::TestPublishAndForget >> TPQCachingProxyTest::OutdatedSession |65.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::MultipleSessions |65.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |65.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |65.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |65.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |65.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |65.7%| [LD] {RESULT} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut >> TExternalDataSourceTest::ParallelCreateSameExternalDataSource >> TPQCachingProxyTest::TestWrongSessionOrGeneration [GOOD] >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists >> TExternalDataSourceTest::SchemeErrors >> TPQCachingProxyTest::TestDeregister [GOOD] >> TExternalDataSourceTest::RemovingReferencesFromDataSources >> TPQCachingProxyTest::TestPublishAndForget [GOOD] >> TExternalDataSourceTest::ReadOnlyMode >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists >> TExternalDataSourceTest::CreateExternalDataSource ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestWrongSessionOrGeneration [GOOD] Test command err: 2025-03-31T15:53:46.919308Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T15:53:46.919411Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T15:53:46.934823Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T15:53:46.934920Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 2 2025-03-31T15:53:46.935004Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 1 for session: session1 2025-03-31T15:53:46.935047Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1, Generation: 2 2025-03-31T15:53:46.935139Z node 1 :PQ_READ_PROXY INFO: Direct read cache: attempted to register server session: session1:1 with stale generation 1, ignored 2025-03-31T15:53:46.935194Z node 1 :PQ_READ_PROXY ALERT: Direct read cache: tried to stage direct read for session session1 with generation 1, previously had this session with generation 2. Data ignored 2025-03-31T15:53:46.935234Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1, Generation: 1 2025-03-31T15:53:46.935304Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: forget read: 1 for session session1 >> TPQCachingProxyTest::MultipleSessions [GOOD] >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet >> TPQCachingProxyTest::OutdatedSession [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestDeregister [GOOD] Test command err: 2025-03-31T15:53:47.496313Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T15:53:47.496396Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T15:53:47.527258Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T15:53:47.527539Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 1 2025-03-31T15:53:47.527882Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session2:1 with generation 1 2025-03-31T15:53:47.528297Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: session1 |65.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |65.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |65.8%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestPublishAndForget [GOOD] Test command err: 2025-03-31T15:53:47.531217Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T15:53:47.531897Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T15:53:47.611426Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T15:53:47.611541Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 1 2025-03-31T15:53:47.611639Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 1 for session: session1 2025-03-31T15:53:47.611675Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1, Generation: 1 2025-03-31T15:53:47.611775Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: forget read: 1 for session session1 >> TExternalDataSourceTest::CreateExternalDataSourceWithProperties >> TExternalDataSourceTest::DropTableTwice ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::OutdatedSession [GOOD] Test command err: 2025-03-31T15:53:48.803505Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T15:53:48.803586Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T15:53:48.869280Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T15:53:48.869605Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 1 2025-03-31T15:53:48.869845Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 1 for session: session1 2025-03-31T15:53:48.869875Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1, Generation: 1 2025-03-31T15:53:48.870326Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 2, killed existing session with older generation >> TExternalDataSourceTest::ParallelCreateSameExternalDataSource [GOOD] >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::MultipleSessions [GOOD] Test command err: 2025-03-31T15:53:48.689279Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T15:53:48.689817Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T15:53:48.794123Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T15:53:48.794237Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 1 2025-03-31T15:53:48.794319Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 1 for session: session1 2025-03-31T15:53:48.794372Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 2 for session: session1 2025-03-31T15:53:48.794407Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1, Generation: 1 2025-03-31T15:53:48.794476Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 2 for session session1, Generation: 1 2025-03-31T15:53:48.794535Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session2:1 with generation 2 2025-03-31T15:53:48.794590Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 3 for session: session2 2025-03-31T15:53:48.794623Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 3 for session session2, Generation: 2 |65.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::ReducedSizeClassCalcTest [GOOD] |65.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |65.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |65.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TExternalDataSourceTest::SchemeErrors [GOOD] >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists [GOOD] |65.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |65.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |65.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |65.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |65.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::SizeClassCalcTest [GOOD] |65.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |65.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |65.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |65.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |65.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> PartitionStats::Collector >> TExternalDataSourceTest::ReadOnlyMode [GOOD] |65.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |65.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |65.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |65.9%| [TA] $(B)/ydb/core/persqueue/dread_cache_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |65.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] >> PartitionStats::Collector [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:53:49.692950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:53:49.693057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:53:49.693099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:53:49.693132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:53:49.693193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:53:49.693222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:53:49.693274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:53:49.693344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:53:49.693660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:53:49.773337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:53:49.773400Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:53:49.777730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:53:49.777844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:53:49.778007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:53:49.782610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:53:49.782823Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:53:49.783416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:53:49.783637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:53:49.785132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:53:49.786064Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:53:49.786117Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:53:49.786285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:53:49.786329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:53:49.786386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:53:49.786544Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:53:49.793349Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:53:49.952928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:53:49.953158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:49.953360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:53:49.953594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:53:49.953664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:49.955808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:53:49.955932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:53:49.956119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:49.956165Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:53:49.956246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:53:49.956277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:53:49.958227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:49.958288Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:53:49.958322Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:53:49.960315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:49.960374Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:49.960421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:53:49.960468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:53:49.970419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:53:49.972481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:53:49.972664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:53:49.973658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:53:49.973774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:53:49.973833Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:53:49.974163Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:53:49.974216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:53:49.974370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:53:49.974438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:53:49.976664Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:53:49.976707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:53:49.976896Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:53:49.976937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:53:49.977139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:49.977183Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:53:49.977272Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:53:49.977301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:53:49.977335Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:53:49.977366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:53:49.977404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:53:49.977455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:53:49.977486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:53:49.977547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:53:49.977613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:53:49.977648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:53:49.977683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:53:49.979541Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:53:49.979672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:53:49.979711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 50.028010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:53:50.028062Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002 2025-03-31T15:53:50.028197Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-03-31T15:53:50.028374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:53:50.028439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 101 2025-03-31T15:53:50.031665Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:53:50.031717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:53:50.031880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T15:53:50.031975Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:53:50.032011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-03-31T15:53:50.032051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-03-31T15:53:50.032243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.032286Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-31T15:53:50.032379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T15:53:50.032417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T15:53:50.032474Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T15:53:50.032520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T15:53:50.032562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-03-31T15:53:50.032608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T15:53:50.032641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-31T15:53:50.032683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-31T15:53:50.032777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T15:53:50.032815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-03-31T15:53:50.032846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-03-31T15:53:50.032874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-03-31T15:53:50.033713Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:53:50.033810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:53:50.033863Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-03-31T15:53:50.033902Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-03-31T15:53:50.033940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:53:50.034844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:53:50.034932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:53:50.034956Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-31T15:53:50.034984Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-03-31T15:53:50.035011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T15:53:50.035075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-03-31T15:53:50.039480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T15:53:50.039840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-31T15:53:50.040078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-31T15:53:50.040139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-31T15:53:50.040530Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-31T15:53:50.040617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T15:53:50.040650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:297:2288] TestWaitNotification: OK eventTxId 101 2025-03-31T15:53:50.041094Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/UniqueName" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:53:50.041324Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/UniqueName" took 204us result status StatusSuccess 2025-03-31T15:53:50.041722Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/UniqueName" PathDescription { Self { Name: "UniqueName" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "UniqueName" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2025-03-31T15:53:50.045148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "UniqueName" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:53:50.045426Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "UniqueName" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } 2025-03-31T15:53:50.045520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TAlterExternalDataSource Propose: opId# 102:0, path# /MyRoot/UniqueName 2025-03-31T15:53:50.045670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource, at schemeshard: 72057594046678944 2025-03-31T15:53:50.047776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/UniqueName\', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource" TxId: 102 SchemeshardId: 72057594046678944 PathId: 2 PathCreateTxId: 101, at schemeshard: 72057594046678944 2025-03-31T15:53:50.047946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/UniqueName TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-31T15:53:50.048199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-31T15:53:50.048256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-31T15:53:50.048746Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-31T15:53:50.048830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T15:53:50.048863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:305:2296] TestWaitNotification: OK eventTxId 102 |65.9%| [TA] {RESULT} $(B)/ydb/core/persqueue/dread_cache_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |65.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |65.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |65.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::ReducedSizeClassCalcTest [GOOD] >> TExternalDataSourceTest::RemovingReferencesFromDataSources [GOOD] >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources [GOOD] |65.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> TExternalDataSourceTest::CreateExternalDataSource [GOOD] >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:111:2142] 2025-03-31T15:53:49.994424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:53:49.994527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:53:49.994570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:53:49.994606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:53:49.994643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:53:49.994681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:53:49.994745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:53:49.994821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:53:49.995132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:53:50.079970Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-31T15:53:50.080034Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:53:50.084944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:53:50.085110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:53:50.085266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:53:50.089791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:53:50.089973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:53:50.090570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:53:50.090743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:53:50.092515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:53:50.093439Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:53:50.093524Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:53:50.093787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:53:50.093829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:53:50.093868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:53:50.094004Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.100299Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:238:2058] recipient: [1:15:2062] 2025-03-31T15:53:50.286739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:53:50.286984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.287215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:53:50.287448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:53:50.287506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.295506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:53:50.295641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:53:50.295809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.295860Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:53:50.295894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:53:50.295928Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:53:50.298328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.298412Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:53:50.298456Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:53:50.300356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.300417Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.300456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:53:50.300518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:53:50.304714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:53:50.306883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:53:50.307098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:53:50.308168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:53:50.308311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969446 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:53:50.308359Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:53:50.308651Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:53:50.308705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:53:50.308886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:53:50.308963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:53:50.311351Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:53:50.311402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:53:50.311591Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:53:50.311627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:53:50.311898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.311941Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:53:50.312037Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:53:50.312068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:53:50.312102Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:53:50.312133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:53:50.312166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:53:50.312221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:53:50.312278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:53:50.312304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:53:50.312387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:53:50.312426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:53:50.312456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:53:50.320537Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... SHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-31T15:53:50.381833Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-03-31T15:53:50.381872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:53:50.383211Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T15:53:50.383300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T15:53:50.383337Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-31T15:53:50.383370Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-31T15:53:50.383406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T15:53:50.383473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-03-31T15:53:50.385251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-03-31T15:53:50.385389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-03-31T15:53:50.386268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:53:50.386383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969446 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:53:50.386431Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TAlterExternalDataSource TPropose, operationId: 102:0HandleReply TEvOperationPlan: step# 5000003 2025-03-31T15:53:50.386542Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-03-31T15:53:50.386699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:53:50.386761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T15:53:50.387791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T15:53:50.388598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-03-31T15:53:50.389828Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:53:50.389862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:53:50.389996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T15:53:50.390084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T15:53:50.390187Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:53:50.390216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-03-31T15:53:50.390252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-31T15:53:50.390284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-31T15:53:50.390460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.390509Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-31T15:53:50.390602Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T15:53:50.390649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T15:53:50.390688Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T15:53:50.390720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T15:53:50.390758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-03-31T15:53:50.390799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T15:53:50.390833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-31T15:53:50.390865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-31T15:53:50.390931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T15:53:50.390971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-03-31T15:53:50.391004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-03-31T15:53:50.391042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-03-31T15:53:50.392155Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T15:53:50.392248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T15:53:50.392280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-03-31T15:53:50.392317Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-03-31T15:53:50.392395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:53:50.393495Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T15:53:50.393603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T15:53:50.393644Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-03-31T15:53:50.393674Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-31T15:53:50.393701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T15:53:50.393776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-03-31T15:53:50.396507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T15:53:50.396913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-31T15:53:50.397105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-31T15:53:50.397145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-31T15:53:50.397633Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-31T15:53:50.397742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T15:53:50.397785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:331:2322] TestWaitNotification: OK eventTxId 102 2025-03-31T15:53:50.398307Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:53:50.398494Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 217us result status StatusSuccess 2025-03-31T15:53:50.398801Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 2 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |65.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> PartitionStats::CollectorOverload [GOOD] >> TExternalDataSourceTest::CreateExternalDataSourceWithProperties [GOOD] >> TExternalDataSourceTest::DropExternalDataSource ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::SchemeErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:111:2142] 2025-03-31T15:53:49.666085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:53:49.666172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:53:49.666208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:53:49.666238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:53:49.666285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:53:49.666328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:53:49.666399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:53:49.666481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:53:49.666763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:53:49.757172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-31T15:53:49.757239Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:53:49.762294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:53:49.762512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:53:49.762641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:53:49.767361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:53:49.767567Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:53:49.768160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:53:49.768350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:53:49.770171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:53:49.771124Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:53:49.771178Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:53:49.771441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:53:49.771483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:53:49.771518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:53:49.771681Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:53:49.778120Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:238:2058] recipient: [1:15:2062] 2025-03-31T15:53:49.903515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:53:49.903737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:49.903983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:53:49.904177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:53:49.904218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:49.906333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:53:49.906460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:53:49.906668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:49.906728Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:53:49.906760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:53:49.906794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:53:49.908963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:49.909017Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:53:49.909055Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:53:49.910879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:49.910940Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:49.910979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:53:49.911024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:53:49.914604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:53:49.916684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:53:49.916857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:53:49.917856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:53:49.917982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969446 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:53:49.918074Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:53:49.918337Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:53:49.918388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:53:49.918554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:53:49.918678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:53:49.920825Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:53:49.920876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:53:49.921047Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:53:49.921103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:53:49.921355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:49.921409Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:53:49.921495Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:53:49.921545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:53:49.921579Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:53:49.921607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:53:49.921637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:53:49.921693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:53:49.921738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:53:49.921772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:53:49.921841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:53:49.921875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:53:49.921904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:53:49.923807Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... e DataStream was not found" TxId: 125 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:53:49.990794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 125, database: /MyRoot, subject: , status: StatusSchemeError, reason: (NKikimr::NExternalSource::TExternalSourceException) External source with type DataStream was not found, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 125, wait until txId: 125 TestModificationResults wait txId: 126 2025-03-31T15:53:49.995264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" } } TxId: 126 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:53:49.995853Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 126:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" } 2025-03-31T15:53:49.995986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalDataSource Propose: opId# 126:0, path# /MyRoot/DirA/MyExternalDataSource 2025-03-31T15:53:49.996187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 126:1, propose status:StatusSchemeError, reason: Authorization method isn't specified, at schemeshard: 72057594046678944 2025-03-31T15:53:49.999200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 126, response: Status: StatusSchemeError Reason: "Authorization method isn\'t specified" TxId: 126 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:53:49.999410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 126, database: /MyRoot, subject: , status: StatusSchemeError, reason: Authorization method isn't specified, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 2025-03-31T15:53:50.004172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } } TxId: 127 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:53:50.005058Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 127:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } 2025-03-31T15:53:50.005311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalDataSource Propose: opId# 127:0, path# /MyRoot/DirA/MyExternalDataSource 2025-03-31T15:53:50.005830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 127:1, propose status:StatusSchemeError, reason: Maximum length of location must be less or equal equal to 1000 but got 1001, at schemeshard: 72057594046678944 2025-03-31T15:53:50.010036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 127, response: Status: StatusSchemeError Reason: "Maximum length of location must be less or equal equal to 1000 but got 1001" TxId: 127 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:53:50.010869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 127, database: /MyRoot, subject: , status: StatusSchemeError, reason: Maximum length of location must be less or equal equal to 1000 but got 1001, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 127, wait until txId: 127 TestModificationResults wait txId: 128 2025-03-31T15:53:50.016964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Installation: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:53:50.018018Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Installation: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } 2025-03-31T15:53:50.018301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalDataSource Propose: opId# 128:0, path# /MyRoot/DirA/MyExternalDataSource 2025-03-31T15:53:50.018812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 128:1, propose status:StatusSchemeError, reason: Maximum length of installation must be less or equal equal to 1000 but got 1001, at schemeshard: 72057594046678944 2025-03-31T15:53:50.030473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 128, response: Status: StatusSchemeError Reason: "Maximum length of installation must be less or equal equal to 1000 but got 1001" TxId: 128 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:53:50.031461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusSchemeError, reason: Maximum length of installation must be less or equal equal to 1000 but got 1001, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 128, wait until txId: 128 TestModificationResults wait txId: 129 2025-03-31T15:53:50.049454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } } } TxId: 129 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:53:50.050100Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 129:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } } 2025-03-31T15:53:50.050504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalDataSource Propose: opId# 129:0, path# /MyRoot/DirA/ 2025-03-31T15:53:50.050864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 129:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/DirA/', error: path part shouldn't be empty, at schemeshard: 72057594046678944 2025-03-31T15:53:50.064238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 129, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/DirA/\', error: path part shouldn\'t be empty" TxId: 129 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:53:50.064469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 129, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/DirA/', error: path part shouldn't be empty, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/ TestModificationResult got TxId: 129, wait until txId: 129 |66.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::SizeClassCalcTest [GOOD] |66.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |66.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> PartitionStats::Collector [GOOD] >> TExternalDataSourceTest::DropTableTwice [GOOD] >> TExternalDataSourceTest::ParallelCreateExternalDataSource ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-03-31T15:53:49.749268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:53:49.749362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:53:49.749423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:53:49.749476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:53:49.749532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:53:49.749560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:53:49.749616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:53:49.749710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:53:49.750027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:53:49.832269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-31T15:53:49.832323Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:53:49.853226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:53:49.853527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:53:49.853701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:53:49.878966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:53:49.879188Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:53:49.879863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:53:49.880573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:53:49.883106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:53:49.884317Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:53:49.884379Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:53:49.884481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:53:49.884549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:53:49.884593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:53:49.884818Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:53:49.893180Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:240:2058] recipient: [1:15:2062] 2025-03-31T15:53:50.039308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:53:50.039548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.039790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:53:50.040080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:53:50.040136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.042190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:53:50.042305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:53:50.042476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.042552Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:53:50.042589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:53:50.042645Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:53:50.044711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.044773Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:53:50.044808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:53:50.046935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.046987Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.047047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:53:50.047113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:53:50.050447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:53:50.051986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:53:50.052117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:53:50.053089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:53:50.053236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:53:50.053357Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:53:50.053664Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:53:50.053738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:53:50.053914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:53:50.053987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:53:50.056519Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:53:50.056570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:53:50.056769Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:53:50.056809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:53:50.057167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.057215Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:53:50.057314Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:53:50.057360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:53:50.057405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:53:50.057438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:53:50.057490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:53:50.057553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:53:50.057595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:53:50.057622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:53:50.057689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:53:50.057727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:53:50.057757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:53:50.059565Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... T_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 128:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:53:50.515864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-31T15:53:50.516317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-03-31T15:53:50.521550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 128, response: Status: StatusAccepted TxId: 128 SchemeshardId: 72057594046678944 PathId: 4, at schemeshard: 72057594046678944 2025-03-31T15:53:50.521708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /MyRoot/SubDirBBBB 2025-03-31T15:53:50.521977Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:53:50.522021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 128, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:53:50.522190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 128, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-03-31T15:53:50.522301Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:53:50.522342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:452:2410], at schemeshard: 72057594046678944, txId: 128, path id: 1 2025-03-31T15:53:50.522402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:452:2410], at schemeshard: 72057594046678944, txId: 128, path id: 4 2025-03-31T15:53:50.522484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 128:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.522625Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 128:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:53:50.522684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 128 ready parts: 1/1 2025-03-31T15:53:50.522913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 128 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:53:50.523954Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 128 2025-03-31T15:53:50.524055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 128 2025-03-31T15:53:50.524093Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 128 2025-03-31T15:53:50.524141Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 128, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-03-31T15:53:50.524180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-03-31T15:53:50.524944Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 4 Version: 2 PathOwnerId: 72057594046678944, cookie: 128 2025-03-31T15:53:50.525021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 4 Version: 2 PathOwnerId: 72057594046678944, cookie: 128 2025-03-31T15:53:50.525046Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 128 2025-03-31T15:53:50.525076Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 128, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 2 2025-03-31T15:53:50.525110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-03-31T15:53:50.525174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 128, ready parts: 0/1, is published: true 2025-03-31T15:53:50.527711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 128:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:128 msg type: 269090816 2025-03-31T15:53:50.527907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 128, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 128 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 128 at step: 5000004 2025-03-31T15:53:50.532044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 128 2025-03-31T15:53:50.532450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:53:50.532668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 128 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:53:50.532736Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 128:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000004, at schemeshard: 72057594046678944 2025-03-31T15:53:50.532889Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 128:0 128 -> 240 2025-03-31T15:53:50.533060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-31T15:53:50.533148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-03-31T15:53:50.533514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 128 2025-03-31T15:53:50.537549Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:53:50.537620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 128, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:53:50.537830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 128, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-03-31T15:53:50.537953Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:53:50.537990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:452:2410], at schemeshard: 72057594046678944, txId: 128, path id: 1 2025-03-31T15:53:50.538031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:452:2410], at schemeshard: 72057594046678944, txId: 128, path id: 4 FAKE_COORDINATOR: Erasing txId 128 2025-03-31T15:53:50.538432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 128:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.538476Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 128:0 ProgressState 2025-03-31T15:53:50.538587Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#128:0 progress is 1/1 2025-03-31T15:53:50.538627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 128 ready parts: 1/1 2025-03-31T15:53:50.538681Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#128:0 progress is 1/1 2025-03-31T15:53:50.538714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 128 ready parts: 1/1 2025-03-31T15:53:50.538752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 128, ready parts: 1/1, is published: false 2025-03-31T15:53:50.538802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 128 ready parts: 1/1 2025-03-31T15:53:50.538862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 128:0 2025-03-31T15:53:50.538899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 128:0 2025-03-31T15:53:50.538993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-03-31T15:53:50.539034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 128, publications: 2, subscribers: 0 2025-03-31T15:53:50.539069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 128, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-03-31T15:53:50.539124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 128, [OwnerId: 72057594046678944, LocalPathId: 4], 3 2025-03-31T15:53:50.539854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 128 2025-03-31T15:53:50.539959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 128 2025-03-31T15:53:50.539995Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 128 2025-03-31T15:53:50.540038Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 128, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-03-31T15:53:50.540079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-03-31T15:53:50.540979Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 128 2025-03-31T15:53:50.541057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 128 2025-03-31T15:53:50.541087Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 128 2025-03-31T15:53:50.541123Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 128, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-03-31T15:53:50.541161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-03-31T15:53:50.541247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 128, subscribers: 0 2025-03-31T15:53:50.546241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 128 2025-03-31T15:53:50.547294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 128 TestModificationResult got TxId: 128, wait until txId: 128 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:111:2142] 2025-03-31T15:53:50.113960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:53:50.114045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:53:50.114100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:53:50.114137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:53:50.114180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:53:50.114209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:53:50.114269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:53:50.114350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:53:50.114655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:53:50.278333Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-31T15:53:50.278392Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:53:50.286529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:53:50.286748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:53:50.286910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:53:50.293872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:53:50.294081Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:53:50.294779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:53:50.295002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:53:50.302278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:53:50.304328Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:53:50.304458Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:53:50.304872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:53:50.304947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:53:50.305011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:53:50.305361Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.318232Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:238:2058] recipient: [1:15:2062] 2025-03-31T15:53:50.521730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:53:50.522017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.522277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:53:50.522885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:53:50.522967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.525540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:53:50.525660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:53:50.525845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.525916Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:53:50.525954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:53:50.525993Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:53:50.528176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.528233Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:53:50.528280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:53:50.530349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.530409Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.530450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:53:50.530501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:53:50.542189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:53:50.545245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:53:50.545456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:53:50.546414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:53:50.546550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969446 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:53:50.546596Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:53:50.546897Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:53:50.546958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:53:50.547147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:53:50.547250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:53:50.550587Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:53:50.550672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:53:50.550960Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:53:50.551005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:53:50.553779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.553857Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:53:50.553972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:53:50.554009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:53:50.554050Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:53:50.554104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:53:50.554148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:53:50.554215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:53:50.554258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:53:50.554288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:53:50.554385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:53:50.554427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:53:50.554461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:53:50.564681Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:53:50.564936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:53:50.564985Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-31T15:53:50.565028Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-31T15:53:50.565073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:53:50.565217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-31T15:53:50.570075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-31T15:53:50.570567Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-03-31T15:53:50.571758Z node 1 :TX_PROXY DEBUG: actor# [1:268:2259] Bootstrap 2025-03-31T15:53:50.593952Z node 1 :TX_PROXY DEBUG: actor# [1:268:2259] Become StateWork (SchemeCache [1:273:2264]) 2025-03-31T15:53:50.597621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:53:50.598054Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 101:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } 2025-03-31T15:53:50.598133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 101:0, explain: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2025-03-31T15:53:50.598184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusPreconditionFailed, reason: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2025-03-31T15:53:50.599582Z node 1 :TX_PROXY DEBUG: actor# [1:268:2259] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T15:53:50.606392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusPreconditionFailed Reason: "Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:53:50.606591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/MyExternalDataSource 2025-03-31T15:53:50.607310Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-31T15:53:50.607532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-31T15:53:50.607578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-31T15:53:50.608046Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-31T15:53:50.608143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T15:53:50.608188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:283:2274] TestWaitNotification: OK eventTxId 101 2025-03-31T15:53:50.608722Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:53:50.608916Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 210us result status StatusPathDoesNotExist 2025-03-31T15:53:50.609132Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |66.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |66.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |66.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> PartitionStats::CollectorOverload [GOOD] |66.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |66.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::RemovingReferencesFromDataSources [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-03-31T15:53:50.221950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:53:50.222046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:53:50.222094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:53:50.222139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:53:50.222178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:53:50.222206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:53:50.222256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:53:50.222329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:53:50.222664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:53:50.325341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-31T15:53:50.325390Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:53:50.347806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:53:50.348300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:53:50.348699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:53:50.360431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:53:50.360639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:53:50.361356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:53:50.361991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:53:50.364348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:53:50.365622Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:53:50.365690Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:53:50.365824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:53:50.365884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:53:50.365929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:53:50.366160Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.373306Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:240:2058] recipient: [1:15:2062] 2025-03-31T15:53:50.540963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:53:50.541197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.541405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:53:50.541659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:53:50.541709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.544870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:53:50.544991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:53:50.545190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.545242Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:53:50.545296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:53:50.545332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:53:50.548750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.548865Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:53:50.548931Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:53:50.551279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.551335Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.551390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:53:50.551446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:53:50.554619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:53:50.559218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:53:50.559502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:53:50.560391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:53:50.560500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:53:50.560534Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:53:50.560835Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:53:50.560894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:53:50.561021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:53:50.561083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:53:50.562717Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:53:50.562749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:53:50.562873Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:53:50.562904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:53:50.563228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.563283Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:53:50.563371Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:53:50.563407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:53:50.563445Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:53:50.563478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:53:50.563525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:53:50.563563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:53:50.563594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:53:50.563622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:53:50.563707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:53:50.563750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:53:50.563782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:53:50.565473Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... thId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T15:53:50.689224Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-03-31T15:53:50.689261Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-03-31T15:53:50.689303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:53:50.690176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T15:53:50.690260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T15:53:50.690290Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-03-31T15:53:50.690342Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-03-31T15:53:50.690375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T15:53:50.690436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-03-31T15:53:50.697132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2025-03-31T15:53:50.697341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000005 2025-03-31T15:53:50.699978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:53:50.700162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:53:50.700232Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalDataSource TPropose opId# 104:0 HandleReply TEvOperationPlan: step# 5000005 2025-03-31T15:53:50.700393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T15:53:50.700492Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-03-31T15:53:50.700701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:53:50.700776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T15:53:50.702920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-31T15:53:50.703172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 FAKE_COORDINATOR: Erasing txId 104 2025-03-31T15:53:50.708251Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:53:50.708313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:53:50.708532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T15:53:50.708762Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:53:50.708802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 104, path id: 1 2025-03-31T15:53:50.708840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 104, path id: 2 2025-03-31T15:53:50.709183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.709243Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-03-31T15:53:50.709443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-31T15:53:50.709510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T15:53:50.709588Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-31T15:53:50.709666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T15:53:50.709757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-03-31T15:53:50.709919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T15:53:50.709980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-03-31T15:53:50.710068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-03-31T15:53:50.710182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T15:53:50.710255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-03-31T15:53:50.710315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-03-31T15:53:50.710375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-03-31T15:53:50.711002Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T15:53:50.711123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T15:53:50.711162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-03-31T15:53:50.711203Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-31T15:53:50.711244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T15:53:50.711524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:53:50.711572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T15:53:50.711635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:53:50.711900Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T15:53:50.711964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T15:53:50.711992Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-03-31T15:53:50.712019Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-03-31T15:53:50.712043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:53:50.712099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-03-31T15:53:50.723324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-31T15:53:50.723646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-31T15:53:50.723813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-03-31T15:53:50.724799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-03-31T15:53:50.724930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-03-31T15:53:50.727285Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-03-31T15:53:50.727710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-03-31T15:53:50.727832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:386:2377] TestWaitNotification: OK eventTxId 104 2025-03-31T15:53:50.729988Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:53:50.730539Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 485us result status StatusPathDoesNotExist 2025-03-31T15:53:50.730892Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |66.0%| [LD] {RESULT} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists [GOOD] |66.0%| [TA] $(B)/ydb/core/blobstorage/storagepoolmon/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:111:2142] 2025-03-31T15:53:49.517747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:53:49.517833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:53:49.517862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:53:49.517886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:53:49.517924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:53:49.517945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:53:49.517995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:53:49.518052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:53:49.518269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:53:49.603920Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-31T15:53:49.604109Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:53:49.624499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:53:49.624686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:53:49.624822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:53:49.636613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:53:49.636778Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:53:49.637240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:53:49.637382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:53:49.639407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:53:49.640398Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:53:49.640473Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:53:49.640722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:53:49.640768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:53:49.640805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:53:49.640952Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:53:49.652053Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:238:2058] recipient: [1:15:2062] 2025-03-31T15:53:49.788983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:53:49.789153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:49.789304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:53:49.789451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:53:49.789496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:49.792091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:53:49.792231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:53:49.792427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:49.792473Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:53:49.792505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:53:49.792535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:53:49.796911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:49.796980Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:53:49.797019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:53:49.801378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:49.801437Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:49.801481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:53:49.801545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:53:49.804944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:53:49.811947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:53:49.812148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:53:49.813144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:53:49.813280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969446 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:53:49.813369Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:53:49.813661Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:53:49.813710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:53:49.813877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:53:49.813945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:53:49.820207Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:53:49.820276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:53:49.820456Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:53:49.820493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:53:49.825615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:49.825695Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:53:49.825786Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:53:49.825822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:53:49.825868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:53:49.825898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:53:49.825934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:53:49.826000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:53:49.826035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:53:49.826061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:53:49.826137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:53:49.826175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:53:49.826204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:53:49.828162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... ation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-03-31T15:53:50.931693Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T15:53:50.931729Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-31T15:53:50.931757Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-31T15:53:50.931830Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-31T15:53:50.931864Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T15:53:50.931901Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 3, subscribers: 0 2025-03-31T15:53:50.931932Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-03-31T15:53:50.931959Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-03-31T15:53:50.931977Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-03-31T15:53:50.933992Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:53:50.934081Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:53:50.934111Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 101 2025-03-31T15:53:50.934156Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-03-31T15:53:50.934190Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-31T15:53:50.936817Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:53:50.936923Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:53:50.936960Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-03-31T15:53:50.936994Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-03-31T15:53:50.937029Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T15:53:50.938140Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:53:50.938221Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:53:50.938248Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-31T15:53:50.938277Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-03-31T15:53:50.938307Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T15:53:50.938382Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-03-31T15:53:50.939608Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T15:53:50.941563Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T15:53:50.941655Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-31T15:53:50.941884Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-31T15:53:50.941927Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-31T15:53:50.942398Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-31T15:53:50.942495Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T15:53:50.942548Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:334:2325] TestWaitNotification: OK eventTxId 101 2025-03-31T15:53:50.943075Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:53:50.943297Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 239us result status StatusSuccess 2025-03-31T15:53:50.943611Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-03-31T15:53:50.946669Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpDropExternalDataSource Drop { Name: "ExternalDataSource" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:53:50.946828Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TDropExternalDataSource Propose: opId# 103:0, path# /MyRoot/ExternalDataSource 2025-03-31T15:53:50.946930Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusSchemeError, reason: Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable, at schemeshard: 72057594046678944 2025-03-31T15:53:50.949596Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusSchemeError Reason: "Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:53:50.949758Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusSchemeError, reason: Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable, operation: DROP EXTERNAL DATA SOURCE, path: /MyRoot/ExternalDataSource TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-03-31T15:53:50.950039Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-03-31T15:53:50.950084Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-03-31T15:53:50.950485Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-31T15:53:50.950587Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-31T15:53:50.950622Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:342:2333] TestWaitNotification: OK eventTxId 103 2025-03-31T15:53:50.951151Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:53:50.951297Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 229us result status StatusSuccess 2025-03-31T15:53:50.951513Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |66.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> TExternalDataSourceTest::DropExternalDataSource [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression |66.0%| [TA] $(B)/ydb/core/sys_view/partition_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:111:2142] 2025-03-31T15:53:50.456876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:53:50.456966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:53:50.457023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:53:50.457062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:53:50.457114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:53:50.457145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:53:50.457214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:53:50.457284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:53:50.457609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:53:50.548100Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-31T15:53:50.548167Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:53:50.554830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:53:50.555048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:53:50.555211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:53:50.561584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:53:50.561759Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:53:50.562234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:53:50.562426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:53:50.564487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:53:50.565193Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:53:50.565235Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:53:50.565457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:53:50.565528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:53:50.565570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:53:50.565724Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.573368Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:238:2058] recipient: [1:15:2062] 2025-03-31T15:53:50.710267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:53:50.710498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.710694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:53:50.710908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:53:50.710970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.717561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:53:50.718077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:53:50.718560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.718767Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:53:50.718871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:53:50.718964Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:53:50.726135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.726263Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:53:50.726359Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:53:50.730931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.731001Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.731051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:53:50.731143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:53:50.741991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:53:50.747129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:53:50.747376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:53:50.748412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:53:50.748542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969446 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:53:50.748597Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:53:50.748896Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:53:50.748956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:53:50.749169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:53:50.749242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:53:50.752032Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:53:50.752118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:53:50.752335Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:53:50.752377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:53:50.752684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.752729Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:53:50.752832Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:53:50.752867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:53:50.752912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:53:50.752953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:53:50.752993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:53:50.753054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:53:50.753092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:53:50.753123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:53:50.753204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:53:50.753246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:53:50.753293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:53:50.755604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... opId# 101:0 ProgressState 2025-03-31T15:53:51.507811Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T15:53:51.507839Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T15:53:51.507865Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T15:53:51.507889Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T15:53:51.507912Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-03-31T15:53:51.507953Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T15:53:51.507981Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-31T15:53:51.508007Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-31T15:53:51.508051Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T15:53:51.508074Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-03-31T15:53:51.508098Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-03-31T15:53:51.508119Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-03-31T15:53:51.508610Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:53:51.508667Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:53:51.508692Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-03-31T15:53:51.508719Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-31T15:53:51.508743Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:53:51.509959Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:53:51.510039Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:53:51.510072Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-31T15:53:51.510098Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-03-31T15:53:51.510123Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T15:53:51.510184Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-03-31T15:53:51.513022Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T15:53:51.513951Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-31T15:53:51.514185Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-31T15:53:51.514235Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-31T15:53:51.514640Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-31T15:53:51.514740Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T15:53:51.514790Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:304:2295] TestWaitNotification: OK eventTxId 101 2025-03-31T15:53:51.515316Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:53:51.515504Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 220us result status StatusSuccess 2025-03-31T15:53:51.515781Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2025-03-31T15:53:51.518417Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Auth { None { } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:53:51.518681Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Auth { None { } } } 2025-03-31T15:53:51.518755Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalDataSource Propose: opId# 102:0, path# /MyRoot/MyExternalDataSource 2025-03-31T15:53:51.518909Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/MyExternalDataSource', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeExternalDataSource, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-03-31T15:53:51.521733Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeExternalDataSource, state: EPathStateNoChanges)" TxId: 102 SchemeshardId: 72057594046678944 PathId: 2 PathCreateTxId: 101, at schemeshard: 72057594046678944 2025-03-31T15:53:51.521892Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/MyExternalDataSource', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeExternalDataSource, state: EPathStateNoChanges), operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/MyExternalDataSource TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-31T15:53:51.522149Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-31T15:53:51.522188Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-31T15:53:51.522537Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-31T15:53:51.522626Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T15:53:51.522661Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:312:2303] TestWaitNotification: OK eventTxId 102 2025-03-31T15:53:51.523108Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:53:51.523271Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 218us result status StatusSuccess 2025-03-31T15:53:51.523514Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TExternalDataSourceTest::ParallelCreateExternalDataSource [GOOD] >> ReadSessionImplTest::DecompressRaw [GOOD] >> ReadSessionImplTest::DecompressGzip [GOOD] >> ReadSessionImplTest::DecompressZstd [GOOD] >> ReadSessionImplTest::DecompressRawEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressGzipEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressWithSynchronousExecutor [GOOD] >> ReadSessionImplTest::DataReceivedCallbackReal >> CompressExecutor::TestReorderedExecutor >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::DropExternalDataSource [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:111:2142] 2025-03-31T15:53:50.561626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:53:50.561758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:53:50.561808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:53:50.561845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:53:50.561900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:53:50.561936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:53:50.562006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:53:50.562101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:53:50.562403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:53:50.664674Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-31T15:53:50.664746Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:53:50.671149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:53:50.671350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:53:50.671540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:53:50.677385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:53:50.677616Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:53:50.678330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:53:50.678505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:53:50.680443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:53:50.681528Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:53:50.681604Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:53:50.681884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:53:50.681940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:53:50.681985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:53:50.682185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.690187Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:238:2058] recipient: [1:15:2062] 2025-03-31T15:53:50.871831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:53:50.872118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.872373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:53:50.872622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:53:50.872704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.875800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:53:50.875961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:53:50.876176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.876248Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:53:50.876281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:53:50.876309Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:53:50.878718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.878777Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:53:50.878811Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:53:50.881141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.881214Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.881262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:53:50.881336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:53:50.892959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:53:50.895608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:53:50.895813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:53:50.896898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:53:50.897038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969446 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:53:50.897088Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:53:50.897432Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:53:50.897519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:53:50.897734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:53:50.897814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:53:50.900964Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:53:50.901023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:53:50.901170Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:53:50.901211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:53:50.901457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.901518Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:53:50.901611Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:53:50.901660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:53:50.901699Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:53:50.901729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:53:50.901763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:53:50.901824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:53:50.901864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:53:50.901893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:53:50.901965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:53:50.902002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:53:50.902038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:53:50.903932Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... Id: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T15:53:51.742749Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-31T15:53:51.742776Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-03-31T15:53:51.742802Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:53:51.743815Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T15:53:51.743876Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T15:53:51.743898Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-31T15:53:51.743917Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-03-31T15:53:51.743936Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T15:53:51.743984Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-03-31T15:53:51.745103Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-03-31T15:53:51.745195Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-03-31T15:53:51.745516Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:53:51.745603Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 8589936748 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:53:51.745639Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalDataSource TPropose opId# 102:0 HandleReply TEvOperationPlan: step# 5000003 2025-03-31T15:53:51.745703Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T15:53:51.745758Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-03-31T15:53:51.745864Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:53:51.745905Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T15:53:51.746487Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T15:53:51.747256Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-03-31T15:53:51.748551Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:53:51.748587Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:53:51.748689Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T15:53:51.748805Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:53:51.748828Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:207:2209], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-03-31T15:53:51.748850Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:207:2209], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-31T15:53:51.749057Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T15:53:51.749108Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-31T15:53:51.749198Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T15:53:51.749231Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T15:53:51.749265Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T15:53:51.749291Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T15:53:51.749323Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-03-31T15:53:51.749364Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T15:53:51.749406Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-31T15:53:51.749438Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-31T15:53:51.749522Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T15:53:51.749569Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-03-31T15:53:51.749601Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-03-31T15:53:51.749632Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-03-31T15:53:51.750051Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T15:53:51.750125Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T15:53:51.750161Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-03-31T15:53:51.750197Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-31T15:53:51.750233Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T15:53:51.750484Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:53:51.750517Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T15:53:51.750559Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:53:51.750698Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T15:53:51.750738Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T15:53:51.750754Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-03-31T15:53:51.750771Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-03-31T15:53:51.750787Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:53:51.750824Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-03-31T15:53:51.754165Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T15:53:51.754324Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-31T15:53:51.754389Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-31T15:53:51.754642Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-31T15:53:51.754679Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-31T15:53:51.755054Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-31T15:53:51.755171Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T15:53:51.755209Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:332:2323] TestWaitNotification: OK eventTxId 102 2025-03-31T15:53:51.755644Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:53:51.755818Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 216us result status StatusPathDoesNotExist 2025-03-31T15:53:51.755964Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> ReadSessionImplTest::UsesOnRetryStateDuringRetries [GOOD] >> RetryPolicy::TWriteSession_TestPolicy >> ReadSessionImplTest::ForcefulDestroyPartitionStream [GOOD] >> ReadSessionImplTest::DestroyPartitionStreamRequest [GOOD] >> ReadSessionImplTest::DecompressZstdEmptyMessage [GOOD] >> ReadSessionImplTest::PacksBatches_BatchABitBiggerThanLimit [GOOD] >> ReadSessionImplTest::PacksBatches_BatchesEqualToServerBatches [GOOD] >> ReadSessionImplTest::HoleBetweenOffsets >> ReadSessionImplTest::ProperlyOrdersDecompressedData >> ReadSessionImplTest::HoleBetweenOffsets [GOOD] >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] >> ReadSessionImplTest::ProperlyOrdersDecompressedData [GOOD] >> ReadSessionImplTest::PacksBatches_ExactlyTwoMessagesInBatch [GOOD] >> ReadSessionImplTest::PacksBatches_OneMessageInEveryBatch [GOOD] >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks >> ReadSessionImplTest::SuccessfulInit [GOOD] >> ReadSessionImplTest::SuccessfulInitAndThenTimeoutCallback [GOOD] >> ReadSessionImplTest::StopsRetryAfterFailedAttempt [GOOD] >> ReadSessionImplTest::StopsRetryAfterTimeout [GOOD] >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ParallelCreateExternalDataSource [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:111:2142] 2025-03-31T15:53:50.678874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:53:50.678967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:53:50.679026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:53:50.679068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:53:50.679226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:53:50.679261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:53:50.679344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:53:50.679433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:53:50.679829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:53:50.804116Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-31T15:53:50.804206Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:53:50.810330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:53:50.810542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:53:50.810703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:53:50.816233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:53:50.816485Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:53:50.817289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:53:50.817559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:53:50.820037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:53:50.821170Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:53:50.821235Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:53:50.821549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:53:50.821615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:53:50.821659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:53:50.821872Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:53:50.829858Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:238:2058] recipient: [1:15:2062] 2025-03-31T15:53:51.007835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:53:51.008118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:51.008362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:53:51.008616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:53:51.008685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:51.011741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:53:51.011911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:53:51.012110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:51.012165Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:53:51.012212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:53:51.012249Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:53:51.016067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:51.016155Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:53:51.016199Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:53:51.018564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:51.018631Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:51.018677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:53:51.018735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:53:51.023043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:53:51.025511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:53:51.025731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:53:51.026944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:53:51.027120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969446 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:53:51.027176Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:53:51.027500Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:53:51.027576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:53:51.027777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:53:51.027859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:53:51.030819Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:53:51.030881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:53:51.031120Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:53:51.031162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:53:51.031491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:53:51.031541Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:53:51.031645Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:53:51.031688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:53:51.031732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:53:51.031779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:53:51.031867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:53:51.031925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:53:51.031965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:53:51.031997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:53:51.032072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:53:51.032113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:53:51.032148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:53:51.034495Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... end EvNotifyTxCompletion 2025-03-31T15:53:52.030336Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 126 2025-03-31T15:53:52.031526Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 124, at schemeshard: 72057594046678944 2025-03-31T15:53:52.031675Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2025-03-31T15:53:52.031735Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 124: got EvNotifyTxCompletionResult 2025-03-31T15:53:52.031769Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 124: satisfy waiter [2:341:2332] 2025-03-31T15:53:52.032173Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2025-03-31T15:53:52.032197Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [2:341:2332] 2025-03-31T15:53:52.032349Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2025-03-31T15:53:52.032536Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2025-03-31T15:53:52.032558Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [2:341:2332] TestWaitNotification: OK eventTxId 124 TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 2025-03-31T15:53:52.033442Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:53:52.033838Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource1" took 477us result status StatusSuccess 2025-03-31T15:53:52.034235Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource1" PathDescription { Self { Name: "MyExternalDataSource1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 125 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource1" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:53:52.035812Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:53:52.036251Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource2" took 481us result status StatusSuccess 2025-03-31T15:53:52.036597Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource2" PathDescription { Self { Name: "MyExternalDataSource2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource2" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:53:52.037836Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:53:52.038091Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 318us result status StatusSuccess 2025-03-31T15:53:52.038905Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 124 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: true } Children { Name: "MyExternalDataSource1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 125 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "MyExternalDataSource2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:53:52.039712Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:53:52.040159Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource1" took 454us result status StatusSuccess 2025-03-31T15:53:52.040570Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource1" PathDescription { Self { Name: "MyExternalDataSource1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 125 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource1" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:53:52.041455Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:53:52.041684Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource2" took 273us result status StatusSuccess 2025-03-31T15:53:52.041922Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource2" PathDescription { Self { Name: "MyExternalDataSource2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource2" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |66.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |66.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |66.1%| [TA] {RESULT} $(B)/ydb/core/blobstorage/storagepoolmon/ut/test-results/unittest/{meta.json ... results_accumulator.log} |66.1%| [TA] {RESULT} $(B)/ydb/core/sys_view/partition_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |66.1%| [LD] {RESULT} $(B)/ydb/services/cms/ut/ydb-services-cms-ut >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit >> ApplyClusterEndpointTest::NoPorts [GOOD] >> ApplyClusterEndpointTest::PortFromCds [GOOD] >> ApplyClusterEndpointTest::PortFromDriver [GOOD] >> BasicUsage::MaxByteSizeEqualZero >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] Test command err: 2025-03-31T15:53:52.601456Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.601499Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.601531Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:53:52.601893Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:53:52.602394Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:53:52.653102Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.654468Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-31T15:53:52.655865Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.655889Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.655944Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:53:52.656329Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:53:52.656751Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:53:52.656903Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.657066Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-31T15:53:52.657412Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-03-31T15:53:52.658441Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.658464Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.658490Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:53:52.658752Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:53:52.659372Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:53:52.659476Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.659713Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-31T15:53:52.660400Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.660640Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-31T15:53:52.660785Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T15:53:52.660833Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-03-31T15:53:52.662111Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.662340Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.662365Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:53:52.662692Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:53:52.663186Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:53:52.663315Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.663621Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 11 Compressed message data size: 31 2025-03-31T15:53:52.664704Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-03-31T15:53:52.664921Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-03-31T15:53:52.665239Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-03-31T15:53:52.665455Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-03-31T15:53:52.665589Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T15:53:52.665623Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-03-31T15:53:52.665660Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-03-31T15:53:52.665812Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 GOT RANGE 0 3 Getting new event 2025-03-31T15:53:52.665902Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-03-31T15:53:52.665929Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-03-31T15:53:52.665957Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-03-31T15:53:52.666090Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 GOT RANGE 3 5 Getting new event 2025-03-31T15:53:52.666141Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-03-31T15:53:52.666162Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-03-31T15:53:52.666180Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-03-31T15:53:52.666268Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 GOT RANGE 5 7 Getting new event 2025-03-31T15:53:52.666329Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-03-31T15:53:52.666348Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-03-31T15:53:52.666367Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-03-31T15:53:52.666461Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 GOT RANGE 7 9 2025-03-31T15:53:52.667933Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.667961Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.667981Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:53:52.674127Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:53:52.674669Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:53:52.674820Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.675240Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 10 Compressed message data size: 30 2025-03-31T15:53:52.676300Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-03-31T15:53:52.676475Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-03-31T15:53:52.676828Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-03-31T15:53:52.677727Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-03-31T15:53:52.678206Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T15:53:52.678298Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-03-31T15:53:52.678367Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-03-31T15:53:52.678384Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-03-31T15:53:52.678466Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-03-31T15:53:52.679256Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 5). Partition stream id: 1 GOT RANGE 0 5 Getting new event 2025-03-31T15:53:52.679437Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-03-31T15:53:52.679549Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-03-31T15:53:52.679618Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-03-31T15:53:52.679643Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-03-31T15:53:52.679670Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-03-31T15:53:52.680124Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 9). Partition stream id: 1 GOT RANGE 5 9 2025-03-31T15:53:52.682887Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.682983Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.683342Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:53:52.686002Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:53:52.691331Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:53:52.691924Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.692272Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-31T15:53:52.693569Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-03-31T15:53:52.694290Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-03-31T15:53:52.694594Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (10-11) 2025-03-31T15:53:52.694710Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-03-31T15:53:52.695184Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T15:53:52.695224Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-03-31T15:53:52.695251Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (10-10) 2025-03-31T15:53:52.695271Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (11-11) 2025-03-31T15:53:52.695317Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes 2025-03-31T15:53:52.695343Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes got data event: DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 11 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2025-03-31T15:53:52.695534Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 Got commit req { offset_ranges { assign_id: 1 end_offset: 3 } } RANGE 0 3 2025-03-31T15:53:52.695706Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 12). Partition stream id: 1 Got commit req { offset_ranges { assign_id: 1 start_offset: 3 end_offset: 12 } } RANGE 3 12 >> ReadSessionImplTest::ReconnectOnTmpError >> ReadSessionImplTest::ReconnectOnTmpError [GOOD] >> ReadSessionImplTest::ReconnectOnTmpErrorAndThenTimeout >> ReadSessionImplTest::ReconnectOnTmpErrorAndThenTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeout >> ReadSessionImplTest::ReconnectOnTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeoutAndThenCreate [GOOD] >> ReadSessionImplTest::ReconnectsAfterFailure |66.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_external_data_source/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] Test command err: 2025-03-31T15:53:52.834742Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.834771Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.834798Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:53:52.856389Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:53:52.856915Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-03-31T15:53:52.857004Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.858100Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.858121Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.858140Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:53:52.859043Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:53:52.859434Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-03-31T15:53:52.859508Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.861047Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.861069Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.861105Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:53:52.861655Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-03-31T15:53:52.861701Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.861744Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.861864Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: INTERNAL_ERROR Issues: "
: Error: Failed to establish connection to server "" ( cluster cluster). Attempts done: 1 " } 2025-03-31T15:53:52.863141Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.863162Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.863182Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:53:52.864000Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-03-31T15:53:52.864038Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.864068Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.864139Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: TIMEOUT Issues: "
: Error: Failed to establish connection to server. Attempts done: 1 " } 2025-03-31T15:53:52.865357Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-03-31T15:53:52.865386Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-03-31T15:53:52.865412Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:53:52.865853Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:53:52.866909Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:53:52.880764Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-03-31T15:53:52.881222Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-31T15:53:52.881545Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 2. Cluster: "TestCluster". Topic: "TestTopic". Partition: 2. Read offset: (NULL) 2025-03-31T15:53:52.885897Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-50) 2025-03-31T15:53:52.886188Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T15:53:52.886241Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-03-31T15:53:52.886267Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-03-31T15:53:52.886291Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2025-03-31T15:53:52.886320Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2025-03-31T15:53:52.886338Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2025-03-31T15:53:52.886360Z :DEBUG: Take Data. Partition 1. Read: {0, 6} (7-7) 2025-03-31T15:53:52.886381Z :DEBUG: Take Data. Partition 1. Read: {0, 7} (8-8) 2025-03-31T15:53:52.886410Z :DEBUG: Take Data. Partition 1. Read: {0, 8} (9-9) 2025-03-31T15:53:52.886428Z :DEBUG: Take Data. Partition 1. Read: {0, 9} (10-10) 2025-03-31T15:53:52.886462Z :DEBUG: Take Data. Partition 1. Read: {0, 10} (11-11) 2025-03-31T15:53:52.886498Z :DEBUG: Take Data. Partition 1. Read: {0, 11} (12-12) 2025-03-31T15:53:52.886517Z :DEBUG: Take Data. Partition 1. Read: {0, 12} (13-13) 2025-03-31T15:53:52.886532Z :DEBUG: Take Data. Partition 1. Read: {0, 13} (14-14) 2025-03-31T15:53:52.886548Z :DEBUG: Take Data. Partition 1. Read: {0, 14} (15-15) 2025-03-31T15:53:52.886566Z :DEBUG: Take Data. Partition 1. Read: {0, 15} (16-16) 2025-03-31T15:53:52.886606Z :DEBUG: Take Data. Partition 1. Read: {0, 16} (17-17) 2025-03-31T15:53:52.886625Z :DEBUG: Take Data. Partition 1. Read: {0, 17} (18-18) 2025-03-31T15:53:52.886645Z :DEBUG: Take Data. Partition 1. Read: {0, 18} (19-19) 2025-03-31T15:53:52.886667Z :DEBUG: Take Data. Partition 1. Read: {0, 19} (20-20) 2025-03-31T15:53:52.886684Z :DEBUG: Take Data. Partition 1. Read: {0, 20} (21-21) 2025-03-31T15:53:52.886723Z :DEBUG: Take Data. Partition 1. Read: {0, 21} (22-22) 2025-03-31T15:53:52.886748Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (23-23) 2025-03-31T15:53:52.886767Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (24-24) 2025-03-31T15:53:52.886783Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (25-25) 2025-03-31T15:53:52.886816Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (26-26) 2025-03-31T15:53:52.886837Z :DEBUG: Take Data. Partition 1. Read: {1, 4} (27-27) 2025-03-31T15:53:52.886856Z :DEBUG: Take Data. Partition 1. Read: {1, 5} (28-28) 2025-03-31T15:53:52.886874Z :DEBUG: Take Data. Partition 1. Read: {1, 6} (29-29) 2025-03-31T15:53:52.886892Z :DEBUG: Take Data. Partition 1. Read: {1, 7} (30-30) 2025-03-31T15:53:52.886907Z :DEBUG: Take Data. Partition 1. Read: {1, 8} (31-31) 2025-03-31T15:53:52.886923Z :DEBUG: Take Data. Partition 1. Read: {1, 9} (32-32) 2025-03-31T15:53:52.886994Z :DEBUG: Take Data. Partition 1. Read: {1, 10} (33-33) 2025-03-31T15:53:52.887024Z :DEBUG: Take Data. Partition 1. Read: {1, 11} (34-34) 2025-03-31T15:53:52.887046Z :DEBUG: Take Data. Partition 1. Read: {1, 12} (35-35) 2025-03-31T15:53:52.887073Z :DEBUG: Take Data. Partition 1. Read: {1, 13} (36-36) 2025-03-31T15:53:52.887111Z :DEBUG: Take Data. Partition 1. Read: {1, 14} (37-37) 2025-03-31T15:53:52.887136Z :DEBUG: Take Data. Partition 1. Read: {1, 15} (38-38) 2025-03-31T15:53:52.887161Z :DEBUG: Take Data. Partition 1. Read: {1, 16} (39-39) 2025-03-31T15:53:52.887179Z :DEBUG: Take Data. Partition 1. Read: {1, 17} (40-40) 2025-03-31T15:53:52.887197Z :DEBUG: Take Data. Partition 1. Read: {1, 18} (41-41) 2025-03-31T15:53:52.887223Z :DEBUG: Take Data. Partition 1. Read: {1, 19} (42-42) 2025-03-31T15:53:52.887243Z :DEBUG: Take Data. Partition 1. Read: {1, 20} (43-43) 2025-03-31T15:53:52.887258Z :DEBUG: Take Data. Partition 1. Read: {1, 21} (44-44) 2025-03-31T15:53:52.887272Z :DEBUG: Take Data. Partition 1. Read: {1, 22} (45-45) 2025-03-31T15:53:52.887290Z :DEBUG: Take Data. Partition 1. Read: {1, 23} (46-46) 2025-03-31T15:53:52.887302Z :DEBUG: Take Data. Partition 1. Read: {1, 24} (47-47) 2025-03-31T15:53:52.887325Z :DEBUG: Take Data. Partition 1. Read: {1, 25} (48-48) 2025-03-31T15:53:52.887351Z :DEBUG: Take Data. Partition 1. Read: {1, 26} (49-49) 2025-03-31T15:53:52.887379Z :DEBUG: Take Data. Partition 1. Read: {1, 27} (50-50) 2025-03-31T15:53:52.887436Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2025-03-31T15:53:52.889830Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 2 (51-100) 2025-03-31T15:53:52.890017Z :DEBUG: Take Data. Partition 2. Read: {0, 0} (51-51) 2025-03-31T15:53:52.890048Z :DEBUG: Take Data. Partition 2. Read: {0, 1} (52-52) 2025-03-31T15:53:52.890064Z :DEBUG: Take Data. Partition 2. Read: {0, 2} (53-53) 2025-03-31T15:53:52.890076Z :DEBUG: Take Data. Partition 2. Read: {0, 3} (54-54) 2025-03-31T15:53:52.890090Z :DEBUG: Take Data. Partition 2. Read: {0, 4} (55-55) 2025-03-31T15:53:52.890101Z :DEBUG: Take Data. Partition 2. Read: {0, 5} (56-56) 2025-03-31T15:53:52.890119Z :DEBUG: Take Data. Partition 2. Read: {0, 6} (57-57) 2025-03-31T15:53:52.890149Z :DEBUG: Take Data. Partition 2. Read: {0, 7} (58-58) 2025-03-31T15:53:52.890174Z :DEBUG: Take Data. Partition 2. Read: {0, 8} (59-59) 2025-03-31T15:53:52.890190Z :DEBUG: Take Data. Partition 2. Read: {0, 9} (60-60) 2025-03-31T15:53:52.890209Z :DEBUG: Take Data. Partition 2. Read: {0, 10} (61-61) 2025-03-31T15:53:52.890227Z :DEBUG: Take Data. Partition 2. Read: {0, 11} (62-62) 2025-03-31T15:53:52.890237Z :DEBUG: Take Data. Partition 2. Read: {0, 12} (63-63) 2025-03-31T15:53:52.890255Z :DEBUG: Take Data. Partition 2. Read: {0, 13} (64-64) 2025-03-31T15:53:52.890278Z :DEBUG: Take Data. Partition 2. Read: {0, 14} (65-65) 2025-03-31T15:53:52.890289Z :DEBUG: Take Data. Partition 2. Read: {0, 15} (66-66) 2025-03-31T15:53:52.890311Z :DEBUG: Take Data. Partition 2. Read: {0, 16} (67-67) 2025-03-31T15:53:52.890323Z :DEBUG: Take Data. Partition 2. Read: {0, 17} (68-68) 2025-03-31T15:53:52.890334Z :DEBUG: Take Data. Partition 2. Read: {0, 18} (69-69) 2025-03-31T15:53:52.890346Z :DEBUG: Take Data. Partition 2. Read: {0, 19} (70-70) 2025-03-31T15:53:52.890356Z :DEBUG: Take Data. Partition 2. Read: {0, 20} (71-71) 2025-03-31T15:53:52.890367Z :DEBUG: Take Data. Partition 2. Read: {0, 21} (72-72) 2025-03-31T15:53:52.890384Z :DEBUG: Take Data. Partition 2. Read: {1, 0} (73-73) 2025-03-31T15:53:52.890397Z :DEBUG: Take Data. Partition 2. Read: {1, 1} (74-74) 2025-03-31T15:53:52.890406Z :DEBUG: Take Data. Partition 2. Read: {1, 2} (75-75) 2025-03-31T15:53:52.890414Z :DEBUG: Take Data. Partition 2. Read: {1, 3} (76-76) 2025-03-31T15:53:52.890425Z :DEBUG: Take Data. Partition 2. Read: {1, 4} (77-77) 2025-03-31T15:53:52.890443Z :DEBUG: Take Data. Partition 2. Read: {1, 5} (78-78) 2025-03-31T15:53:52.890457Z :DEBUG: Take Data. Partition 2. Read: {1, 6} (79-79) 2025-03-31T15:53:52.890479Z :DEBUG: Take Data. Partition 2. Read: {1, 7} (80-80) 2025-03-31T15:53:52.890505Z :DEBUG: Take Data. Partition 2. Read: {1, 8} (81-81) 2025-03-31T15:53:52.890521Z :DEBUG: Take Data. Partition 2. Read: {1, 9} (82-82) 2025-03-31T15:53:52.890582Z :DEBUG: Take Data. Partition 2. Read: {1, 10} (83-83) 2025-03-31T15:53:52.890604Z :DEBUG: Take Data. Partition 2. Read: {1, 11} (84-84) 2025-03-31T15:53:52.890622Z :DEBUG: Take Data. Partition 2. Read: {1, 12} (85-85) 2025-03-31T15:53:52.890637Z :DEBUG: Take Data. Partition 2. Read: {1, 13} (86-86) 2025-03-31T15:53:52.890653Z :DEBUG: Take Data. Partition 2. Read: {1, 14} (87-87) 2025-03-31T15:53:52.890669Z :DEBUG: Take Data. Partition 2. Read: {1, 15} (88-88) 2025-03-31T15:53:52.890695Z :DEBUG: Take Data. Partition 2. Read: {1, 16} (89-89) 2025-03-31T15:53:52.890720Z :DEBUG: Take Data. Partition 2. Read: {1, 17} (90-90) 2025-03-31T15:53:52.890739Z :DEBUG: Take Data. Partition 2. Read: {1, 18} (91-91) 2025-03-31T15:53:52.890761Z :DEBUG: Take Data. Partition 2. Read: {1, 19} (92-92) 2025-03-31T15:53:52.890778Z :DEBUG: Take Data. Partition 2. Read: {1, 20} (93-93) 2025-03-31T15:53:52.890799Z :DEBUG: Take Data. Partition 2. Read: {1, 21} (94-94) 2025-03-31T15:53:52.890815Z :DEBUG: Take Data. Partition 2. Read: {1, 22} (95-95) 2025-03-31T15:53:52.890838Z :DEBUG: Take Data. Partition 2. Read: {1, 23} (96-96) 2025-03-31T15:53:52.890856Z :DEBUG: Take Data. Partition 2. Read: {1, 24} (97-97) 2025-03-31T15:53:52.890908Z :DEBUG: Take Data. Partition 2. Read: {1, 25} (98-98) 2025-03-31T15:53:52.890922Z :DEBUG: Take Data. Partition 2. Read: {1, 26} (99-99) 2025-03-31T15:53:52.890932Z :DEBUG: Take Data. Partition 2. Read: {1, 27} (100-100) 2025-03-31T15:53:52.890969Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2025-03-31T15:53:52.891111Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-03-31T15:53:52.892824Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.892891Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.892935Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:53:52.893262Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:53:52.894016Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:53:52.894172Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.895037Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-31T15:53:53.027978Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:53.030607Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-03-31T15:53:53.031195Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T15:53:53.031769Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-03-31T15:53:53.032150Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-03-31T15:53:53.235183Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 2025-03-31T15:53:53.339572Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2025-03-31T15:53:53.340140Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-03-31T15:53:53.341111Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-03-31T15:53:53.342343Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:53.342364Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:53.342385Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:53:53.365272Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:53:53.377801Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:53:53.378893Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:53.388248Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-31T15:53:53.499872Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:53.500692Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-03-31T15:53:53.500758Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T15:53:53.500798Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-03-31T15:53:53.500877Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 2025-03-31T15:53:53.500971Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-03-31T15:53:53.503416Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2025-03-31T15:53:53.503519Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-03-31T15:53:53.503661Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster >> ReadSessionImplTest::ReconnectsAfterFailure [GOOD] >> ReadSessionImplTest::SimpleDataHandlers >> ReadSessionImplTest::SimpleDataHandlers [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithCommit >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] Test command err: 2025-03-31T15:53:54.312084Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:54.312122Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:54.312282Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:53:54.326501Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-03-31T15:53:54.326566Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:54.327395Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:54.376118Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.006404s 2025-03-31T15:53:54.400796Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:53:54.418156Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-03-31T15:53:54.419948Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:54.424072Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:54.424091Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:54.424107Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:53:54.440193Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-03-31T15:53:54.440243Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:54.440271Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:54.440918Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007385s 2025-03-31T15:53:54.454663Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:53:54.469859Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-03-31T15:53:54.474552Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:54.496230Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:54.496419Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:54.496445Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:53:54.506984Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-03-31T15:53:54.515853Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:54.516311Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:54.517014Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.269066s 2025-03-31T15:53:54.533300Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:53:54.550976Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-03-31T15:53:54.552673Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:54.560930Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:54.560952Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:54.560971Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:53:54.580130Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-03-31T15:53:54.580819Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:54.581229Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:54.582057Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.190801s 2025-03-31T15:53:54.600594Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:53:54.615297Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-03-31T15:53:54.615414Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:54.626906Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:54.627065Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:54.627411Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:53:54.648109Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:53:54.662609Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:53:55.081913Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:55.096104Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TRANSPORT_UNAVAILABLE. Description:
: Error: GRpc error: (14): 2025-03-31T15:53:55.096440Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:55.096461Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:55.097182Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.162554s 2025-03-31T15:53:55.097946Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-03-31T15:53:55.103066Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:55.103102Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:55.103122Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:53:55.115939Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:53:55.118153Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:53:55.118322Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:55.122060Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-31T15:53:55.273028Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:55.275469Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-03-31T15:53:55.276010Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T15:53:55.279397Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-03-31T15:53:55.279491Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-03-31T15:53:55.388526Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-03-31T15:53:55.389160Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-03-31T15:53:55.390240Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:55.390263Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:55.390285Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:53:55.422078Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:53:55.442669Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:53:55.452548Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:55.464501Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-31T15:53:55.624218Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:55.631442Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-03-31T15:53:55.631868Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T15:53:55.632660Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-03-31T15:53:55.633249Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 2025-03-31T15:53:55.634301Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-03-31T15:53:55.634974Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2025-03-31T15:53:55.635052Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-03-31T15:53:55.635405Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster >> THiveTest::TestLocalDisconnect >> THiveTest::TestDrain >> THiveTest::TestNoMigrationToSelf >> THiveTest::TestHiveBalancerWithPrefferedDC1 >> TCutHistoryRestrictions::EmptyDenyList >> THiveTest::TestFollowers >> TCutHistoryRestrictions::EmptyDenyList [GOOD] >> TCutHistoryRestrictions::SameTabletInBothLists [GOOD] >> THeavyPerfTest::TTestLoadEverything >> TCutHistoryRestrictions::BasicTest >> THiveTest::TestUpdateChannelValues >> TCutHistoryRestrictions::BasicTest [GOOD] >> TCutHistoryRestrictions::EmptyAllowList [GOOD] >> TCutHistoryRestrictions::BothListsEmpty [GOOD] >> ObjectDistribution::TestImbalanceCalcualtion >> ObjectDistribution::TestImbalanceCalcualtion [GOOD] >> ObjectDistribution::TestAllowedDomainsAndDown >> ObjectDistribution::TestAllowedDomainsAndDown [GOOD] >> ObjectDistribution::TestAddSameNode [GOOD] >> ObjectDistribution::TestManyIrrelevantNodes >> THiveTest::TestCreateTablet >> TargetTrackingScaleRecommenderPolicy::ScaleOut >> TargetTrackingScaleRecommenderPolicy::ScaleOut [GOOD] >> TargetTrackingScaleRecommenderPolicy::ScaleIn >> TargetTrackingScaleRecommenderPolicy::ScaleIn [GOOD] >> TargetTrackingScaleRecommenderPolicy::BigNumbersScaleOut |66.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TargetTrackingScaleRecommenderPolicy::BigNumbersScaleOut [GOOD] >> TargetTrackingScaleRecommenderPolicy::BigNumbersScaleIn [GOOD] >> TargetTrackingScaleRecommenderPolicy::SpikeResistance >> TargetTrackingScaleRecommenderPolicy::SpikeResistance [GOOD] >> TargetTrackingScaleRecommenderPolicy::NearTarget >> TargetTrackingScaleRecommenderPolicy::NearTarget [GOOD] >> TargetTrackingScaleRecommenderPolicy::AtTarget >> TargetTrackingScaleRecommenderPolicy::AtTarget [GOOD] >> TargetTrackingScaleRecommenderPolicy::Fluctuations >> TargetTrackingScaleRecommenderPolicy::Fluctuations [GOOD] >> TargetTrackingScaleRecommenderPolicy::FluctuationsBigNumbers >> TargetTrackingScaleRecommenderPolicy::FluctuationsBigNumbers [GOOD] >> TargetTrackingScaleRecommenderPolicy::ScaleInToMaxSeen >> TargetTrackingScaleRecommenderPolicy::ScaleInToMaxSeen [GOOD] >> TargetTrackingScaleRecommenderPolicy::Idle >> TargetTrackingScaleRecommenderPolicy::Idle [GOOD] >> TStorageBalanceTest::TestScenario2 |66.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest |66.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::SimpleTenantsTestSyncOperation >> THiveTest::TestLocalDisconnect [GOOD] >> THiveTest::TestLocalReplacement >> TGRpcCmsTest::DisabledTxTest >> THiveTest::TestUpdateChannelValues [GOOD] >> THiveTest::TestStorageBalancer >> THiveTest::TestCreateTablet [GOOD] >> THiveTest::TestCreate100Tablets >> TGRpcCmsTest::DescribeOptionsTest >> TGRpcCmsTest::AuthTokenTest >> TGRpcCmsTest::SimpleTenantsTest >> TGRpcCmsTest::AlterRemoveTest >> THiveTest::TestFollowers [GOOD] >> THiveTest::TestFollowersReconfiguration >> THiveTest::TestNoMigrationToSelf [GOOD] >> THiveTest::TestReCreateTablet >> TGRpcCmsTest::RemoveWithAnotherTokenTest >> THiveTest::TestLocalReplacement [GOOD] >> THiveTest::TestHiveRestart |66.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |66.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks [GOOD] >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime |66.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/test-results/unittest/{meta.json ... results_accumulator.log} |66.1%| [LD] {RESULT} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut >> THiveTest::TestFollowersReconfiguration [GOOD] >> THiveTest::TestFollowerPromotion >> THiveTest::TestReCreateTablet [GOOD] >> THiveTest::TestReCreateTabletError >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime [GOOD] >> ReadSessionImplTest::PartitionStreamStatus >> ReadSessionImplTest::PartitionStreamStatus [GOOD] >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] |66.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |66.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |66.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx >> THiveTest::TestHiveRestart [GOOD] >> THiveTest::TestLimitedNodeList ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] Test command err: 2025-03-31T15:53:52.712904Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.712960Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.713008Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:53:52.713509Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:53:52.714195Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:53:52.726787Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.727185Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-31T15:53:52.727863Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-03-31T15:53:52.728158Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-03-31T15:53:52.728356Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2025-03-31T15:53:52.728489Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-31T15:53:52.728578Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T15:53:52.728603Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2025-03-31T15:53:52.728627Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-03-31T15:53:52.728647Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-03-31T15:53:52.730306Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.730334Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.730380Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:53:52.730619Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:53:52.730959Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:53:52.731057Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.731266Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 10 Compressed message data size: 30 2025-03-31T15:53:52.731869Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-03-31T15:53:52.732005Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-03-31T15:53:52.732260Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-03-31T15:53:52.732429Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-03-31T15:53:52.732528Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T15:53:52.732554Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-03-31T15:53:52.732577Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-03-31T15:53:52.732708Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 GOT RANGE 0 3 Getting new event 2025-03-31T15:53:52.732807Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-03-31T15:53:52.732826Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-03-31T15:53:52.732849Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-03-31T15:53:52.732971Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 GOT RANGE 3 5 Getting new event 2025-03-31T15:53:52.733036Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-03-31T15:53:52.733059Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-03-31T15:53:52.733078Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-03-31T15:53:52.733161Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 GOT RANGE 5 7 Getting new event 2025-03-31T15:53:52.733207Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-03-31T15:53:52.733230Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-03-31T15:53:52.733243Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-03-31T15:53:52.733311Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 GOT RANGE 7 9 2025-03-31T15:53:52.734791Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.734820Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.734848Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:53:52.735146Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:53:52.735488Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:53:52.735592Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.735764Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 100 Compressed message data size: 91 2025-03-31T15:53:52.736398Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-03-31T15:53:52.736585Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-03-31T15:53:52.736838Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-03-31T15:53:52.736993Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-03-31T15:53:52.737062Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T15:53:52.737095Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-03-31T15:53:52.737171Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 2). Partition stream id: 1 GOT RANGE 0 2 Getting new event 2025-03-31T15:53:52.737228Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-03-31T15:53:52.737241Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-03-31T15:53:52.737284Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [2, 3). Partition stream id: 1 GOT RANGE 2 3 Getting new event 2025-03-31T15:53:52.737310Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-03-31T15:53:52.737328Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-03-31T15:53:52.737418Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 4). Partition stream id: 1 GOT RANGE 3 4 Getting new event 2025-03-31T15:53:52.737479Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-03-31T15:53:52.737493Z :DEBUG: [db] [sessionid] [cluster] The application data ... er". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 190 SeqNo: 231 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 191 SeqNo: 232 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 192 SeqNo: 233 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 193 SeqNo: 234 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 194 SeqNo: 235 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 195 SeqNo: 236 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 196 SeqNo: 237 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 197 SeqNo: 238 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 198 SeqNo: 239 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 199 SeqNo: 240 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 200 SeqNo: 241 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-03-31T15:54:12.807853Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 201). Partition stream id: 1 GOT RANGE 0 201 2025-03-31T15:54:12.914396Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-03-31T15:54:12.914486Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-03-31T15:54:12.914538Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:54:12.919664Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:54:12.920340Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:54:12.920497Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-03-31T15:54:12.920880Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 1000000 Compressed message data size: 3028 Post function Getting new event 2025-03-31T15:54:13.326230Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-10) 2025-03-31T15:54:13.329502Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T15:54:13.350859Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-03-31T15:54:13.372995Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-03-31T15:54:13.374324Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2025-03-31T15:54:13.388981Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2025-03-31T15:54:13.389858Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2025-03-31T15:54:13.390737Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (7-7) 2025-03-31T15:54:13.397747Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (8-8) 2025-03-31T15:54:13.439156Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (9-9) 2025-03-31T15:54:13.440018Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (10-10) 2025-03-31T15:54:13.440098Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 10, size 10000000 bytes 2025-03-31T15:54:13.440273Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 9 SeqNo: 50 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 51 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-03-31T15:54:13.452369Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 11). Partition stream id: 1 GOT RANGE 0 11 2025-03-31T15:54:13.458196Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:54:13.458247Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:54:13.458294Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:54:13.463483Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:54:13.468646Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:54:13.468937Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:54:13.471423Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-31T15:54:13.472083Z :DEBUG: [db] [sessionid] [cluster] Requesting status for partition stream id: 1 2025-03-31T15:54:13.481072Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:54:13.481120Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:54:13.481184Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:54:13.481513Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:54:13.481930Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:54:13.482066Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:54:13.482676Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:54:13.491453Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-31T15:54:13.491651Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T15:54:13.491728Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-03-31T15:54:13.491935Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 |66.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |66.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |66.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |66.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |66.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |66.2%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator >> THiveTest::TestStorageBalancer [GOOD] >> THiveTest::TestRestartsWithFollower >> THiveTest::TestReCreateTabletError [GOOD] >> THiveTest::TestNodeDisconnect |66.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> THiveTest::TestCreate100Tablets [GOOD] >> THiveTest::TestCreateSubHiveCreateTablet >> AnalyzeColumnshard::AnalyzeRebootSaBeforeResolve >> THiveTest::TestLimitedNodeList [GOOD] >> THiveTest::TestHiveFollowersWithChangingDC |66.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |66.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> THiveTest::TestFollowerPromotion [GOOD] >> THiveTest::TestFollowerPromotionFollowerDies >> THiveTest::TestHiveBalancerWithPrefferedDC1 [GOOD] >> THiveTest::TestHiveBalancerWithPrefferedDC2 >> AnalyzeColumnshard::AnalyzeTable |66.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeSave >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAggregate >> TraverseColumnShard::TraverseColumnTable >> TraverseDatashard::TraverseTwoTablesTwoServerlessDbs >> THiveTest::TestNodeDisconnect [GOOD] >> THiveTest::TestReassignGroupsWithRecreateTablet >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeAggregate >> TraverseDatashard::TraverseTwoTables >> TGRpcCmsTest::DisabledTxTest [GOOD] >> AnalyzeColumnshard::AnalyzeServerless >> TraverseColumnShard::TraverseServerlessColumnTable >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeResolve >> AnalyzeColumnshard::AnalyzeTwoColumnTables >> TraverseColumnShard::TraverseColumnTableHiveDistributionZeroNodes |66.2%| [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> AnalyzeColumnshard::Analyze ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::DisabledTxTest [GOOD] Test command err: 2025-03-31T15:54:11.290920Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002541731386176:2205];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:11.290950Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ae7/r3tmp/tmpNnPwbM/pdisk_1.dat 2025-03-31T15:54:11.794815Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:11.827374Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:11.827474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:11.841336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10627, node 1 2025-03-31T15:54:11.959876Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:54:11.959899Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:54:11.959906Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:54:11.960026Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26499 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:54:12.231763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:54:12.398760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:1, at schemeshard: 72057594046644480 2025-03-31T15:54:12.456815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 >> AnalyzeColumnshard::AnalyzeDeadline >> TGRpcCmsTest::DescribeOptionsTest [GOOD] >> FolderServiceTest::TFolderService >> TAccessServiceTest::PassRequestId >> FolderServiceTest::TFolderServiceAdapter >> THiveTest::TestRestartsWithFollower [GOOD] >> THiveTest::TestStartTabletTwiceInARow >> TraverseColumnShard::TraverseColumnTableRebootColumnshard >> THiveTest::TestFollowerPromotionFollowerDies [GOOD] >> THiveTest::TestHiveBalancer |66.2%| [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TAccessServiceTest::Authenticate >> THiveTest::TestCreateSubHiveCreateTablet [GOOD] >> THiveTest::TestCreateSubHiveCreateManyTablets >> THiveTest::TestReassignGroupsWithRecreateTablet [GOOD] >> THiveTest::TestReassignUseRelativeSpace |66.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |66.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TUserAccountServiceTest::Get ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::DescribeOptionsTest [GOOD] Test command err: 2025-03-31T15:54:11.717144Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002543170396678:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:11.717200Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b72/r3tmp/tmpdTgTao/pdisk_1.dat 2025-03-31T15:54:12.419752Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:12.419884Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:12.424559Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:12.493052Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61324, node 1 2025-03-31T15:54:12.515571Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T15:54:12.518919Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T15:54:12.571769Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:54:12.571795Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:54:12.571804Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:54:12.571910Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18207 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:54:12.932688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:18207 2025-03-31T15:54:13.888174Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) is now locking 2025-03-31T15:54:13.888202Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) is now locked by parent 2025-03-31T15:54:13.902152Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) is now active 2025-03-31T15:54:14.115190Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285140, Sender [1:7488002556055299391:2316], Recipient [1:7488002547465364459:2194]: NKikimr::NConsole::TEvConsole::TEvDescribeTenantOptionsRequest { Request { } UserToken: "" } 2025-03-31T15:54:14.115306Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvDescribeTenantOptionsRequest 2025-03-31T15:54:14.124124Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvDescribeTenantOptionsResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.DescribeDatabaseOptionsResult] { storage_units { kind: "hdd2" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "hdd" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "hdd1" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "ssd" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "test" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } availability_zones { name: "dc-1" labels { key: "collocation" value: "disabled" } labels { key: "fixed_data_center" value: "DC-1" } } availability_zones { name: "any" labels { key: "any_data_center" value: "true" } labels { key: "collocation" value: "disabled" } } computational_units { kind: "slot" labels { key: "slot_type" value: "default" } labels { key: "type" value: "dynamic_slot" } allowed_availability_zones: "any" allowed_availability_zones: "dc-1" } } } } } >> TraverseColumnShard::TraverseColumnTableAggrStatUnavailableNode >> THiveTest::TestStartTabletTwiceInARow [GOOD] >> THiveTest::TestSpreadNeighboursWithUpdateTabletsObject >> THiveTest::TestReassignUseRelativeSpace [GOOD] >> THiveTest::TestManyFollowersOnOneNode |66.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |66.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |66.2%| [LD] {RESULT} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut >> KqpLocks::DifferentKeyUpdate >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite-withSink >> TServiceAccountServiceTest::Get >> TGRpcCmsTest::AlterRemoveTest [GOOD] >> TGRpcCmsTest::SimpleTenantsTest [GOOD] |66.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |66.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |66.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut >> TServiceAccountServiceTest::Get [GOOD] >> DstCreator::ColumnsSizeMismatch >> TGRpcCmsTest::SimpleTenantsTestSyncOperation [GOOD] >> THiveTest::TestHiveFollowersWithChangingDC [GOOD] >> THiveTest::TestHiveNoBalancingWithLowResourceUsage >> THiveTest::TestManyFollowersOnOneNode [GOOD] >> THiveTest::TestLockTabletExecutionTimeout >> THiveTest::TestDrain [GOOD] >> THiveTest::TestDrainWithMaxTabletsScheduled >> DstCreator::GlobalConsistency >> DstCreator::WithSyncIndexAndIntermediateDir >> FolderServiceTest::TFolderServiceTransitional ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::AlterRemoveTest [GOOD] Test command err: 2025-03-31T15:54:12.583271Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002547965900443:2141];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:12.587309Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000af3/r3tmp/tmpp830xz/pdisk_1.dat 2025-03-31T15:54:13.592656Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:13.592742Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:13.597122Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:13.598046Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:13.605760Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14555, node 1 2025-03-31T15:54:14.069576Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:54:14.069598Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:54:14.069607Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:54:14.069904Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:54:19.202475Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002547965900443:2141];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:19.202587Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:5266 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:54:19.640601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:54:20.268113Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285120, Sender [1:7488002582325639591:2316], Recipient [1:7488002552260868117:2188]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" } 2025-03-31T15:54:20.268154Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2025-03-31T15:54:20.268356Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-31T15:54:20.268367Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-31T15:54:20.269099Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" 2025-03-31T15:54:20.269724Z node 1 :CMS_TENANTS DEBUG: Add tenant /Root/users/user-1 (txid = 1743436460267144) 2025-03-31T15:54:20.288072Z node 1 :CMS_TENANTS TRACE: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1743436460267144 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2025-03-31T15:54:20.288988Z node 1 :CMS_TENANTS TRACE: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2025-03-31T15:54:20.331701Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant Complete 2025-03-31T15:54:20.340221Z node 1 :CMS_TENANTS TRACE: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436460267144&action=1" } } } 2025-03-31T15:54:20.341139Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-31T15:54:20.341215Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-03-31T15:54:20.342108Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-03-31T15:54:20.360026Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2025-03-31T15:54:20.360662Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-03-31T15:54:20.423817Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2025-03-31T15:54:20.423872Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-03-31T15:54:20.424292Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435079, Sender [1:7488002582325639600:2188], Recipient [1:7488002552260868117:2188]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-03-31T15:54:20.424311Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolAllocated 2025-03-31T15:54:20.424477Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-31T15:54:20.424673Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-31T15:54:20.425061Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2025-03-31T15:54:20.425092Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2025-03-31T15:54:20.425354Z node 1 :CMS_TENANTS TRACE: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2025-03-31T15:54:20.631391Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-03-31T15:54:20.631415Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-31T15:54:20.631423Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-31T15:54:20.631591Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-31T15:54:20.631649Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2025-03-31T15:54:20.631846Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1743436460267144 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-03-31T15:54:20.756805Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7488002582325639614:2320], Recipient [1:7488002552260868117:2188]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436460267144&action=1" } UserToken: "" } 2025-03-31T15:54:20.756836Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-03-31T15:54:20.757400Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436460267144&action=1" } } 2025-03-31T15:54:20.888125Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-03-31T15:54:20.888892Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-31T15:54:20.888943Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1)::Bootstrap 2025-03-31T15:54:20.888953Z node 1 :CMS_TENANTS DEBUG: TSubDomainManip(/Root/users/user-1) create subdomain 2025-03-31T15:54:20.921362Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 DatabaseName: "Root" 2025-03-31T15:54:20.975603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:1, at schemeshard: 72057594046644480 2025-03-31T15:54:20.987378Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7488002582325639655:2322], Recipient [1:7488002552260868117:2188]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436460267144&action=1" } UserToken: "" } 2025-03-31T15:54:20.987394Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-03-31T15:54:20.987542Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436460267144&action=1" } } 2025-03-31T15:54:21.016192Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-03-31T15:54:21.016563Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976710658 2025-03-31T15:54:21.100798Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976710658 2025-03-31T15:54:21.112094Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7488002586620606990:2324], Recipient [1:7488002552260868117:2188]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436460267144&action=1" } UserToken: "" } 2025-03-31T15:54:21.112122Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-03-31T15:54: ... Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send subdomain drop cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root/users" OperationType: ESchemeOpForceDropExtSubDomain Drop { Name: "user-1" } } } ExecTimeoutPeriod: 18446744073709551615 DatabaseName: "Root" 2025-03-31T15:54:21.706442Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-03-31T15:54:21.706452Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-31T15:54:21.709630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Dropping path id [OwnerId: 72057594046644480, LocalPathId: 3] by tx: 281474976710660 2025-03-31T15:54:21.710915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpForceDropExtSubDomain, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T15:54:21.748425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Dropping path id [OwnerId: 72057594046644480, LocalPathId: 3] by tx: 281474976710660 2025-03-31T15:54:21.750245Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710659 2025-03-31T15:54:21.750254Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2025-03-31T15:54:21.750291Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2025-03-31T15:54:21.750339Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 2025-03-31T15:54:21.750363Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976710660 2025-03-31T15:54:21.760696Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435076, Sender [1:7488002586620607020:2188], Recipient [1:7488002552260868117:2188]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2025-03-31T15:54:21.760953Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvSubdomainReady 2025-03-31T15:54:21.760970Z node 1 :CMS_TENANTS DEBUG: Ignoring ready subdomain for tenant /Root/users/user-1 in REMOVING_SUBDOMAIN state 2025-03-31T15:54:21.805722Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976710660 2025-03-31T15:54:21.832877Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7488002586620607167:2336], Recipient [1:7488002552260868117:2188]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436461507208&action=2" } UserToken: "" } 2025-03-31T15:54:21.832905Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-03-31T15:54:21.833069Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436461507208&action=2" } } 2025-03-31T15:54:21.940766Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7488002586620607193:2338], Recipient [1:7488002552260868117:2188]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436461507208&action=2" } UserToken: "" } 2025-03-31T15:54:21.940792Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-03-31T15:54:21.940914Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436461507208&action=2" } } 2025-03-31T15:54:21.988793Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710660 2025-03-31T15:54:21.988815Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2025-03-31T15:54:21.988854Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-03-31T15:54:21.989154Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435077, Sender [1:7488002586620607094:2188], Recipient [1:7488002552260868117:2188]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-03-31T15:54:21.989169Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2025-03-31T15:54:21.989182Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-31T15:54:21.989190Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-31T15:54:21.989225Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Execute /Root/users/user-1 2025-03-31T15:54:21.989422Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1743436461507208 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-03-31T15:54:21.989479Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1743436461507208 issue= 2025-03-31T15:54:22.079746Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Complete /Root/users/user-1 2025-03-31T15:54:22.080152Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2025-03-31T15:54:22.080165Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-31T15:54:22.080347Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7488002547965900699:2187], Recipient [1:7488002552260868117:2188]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-03-31T15:54:22.080544Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-03-31T15:54:22.080559Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-31T15:54:22.080567Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-31T15:54:22.080777Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2025-03-31T15:54:22.080795Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1743436461507208 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-03-31T15:54:22.095878Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-03-31T15:54:22.095926Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-31T15:54:22.095964Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-03-31T15:54:22.097407Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-03-31T15:54:22.098197Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 2 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 3 } ItemConfigGeneration: 3 } } Success: true ConfigTxSeqNo: 13 2025-03-31T15:54:22.107405Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 3 } } } 2025-03-31T15:54:22.107739Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7488002590915574511:2340], Recipient [1:7488002552260868117:2188]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436461507208&action=2" } UserToken: "" } 2025-03-31T15:54:22.107753Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-03-31T15:54:22.107890Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436461507208&action=2" } } 2025-03-31T15:54:22.127708Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 14 2025-03-31T15:54:22.127800Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435081, Sender [1:7488002590915574517:2188], Recipient [1:7488002552260868117:2188]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2025-03-31T15:54:22.127980Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolDeleted 2025-03-31T15:54:22.127994Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-31T15:54:22.128320Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-31T15:54:22.128356Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2025-03-31T15:54:22.128373Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2025-03-31T15:54:22.323607Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-03-31T15:54:22.323648Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-31T15:54:22.323654Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-31T15:54:22.323661Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-31T15:54:22.323726Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1743436461507208 2025-03-31T15:54:22.323739Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1743436461507208 issue= 2025-03-31T15:54:22.327325Z node 1 :CMS_TENANTS TRACE: Remove tenant /Root/users/user-1 from database txid=1743436461507208 issue= 2025-03-31T15:54:22.327362Z node 1 :CMS_TENANTS TRACE: Remove pool /Root/users/user-1:hdd from database 2025-03-31T15:54:22.327449Z node 1 :CMS_TENANTS TRACE: Add tenant removal info for /Root/users/user-1 txid=1743436461507208 code=SUCCESS errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-03-31T15:54:22.342405Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7488002590915574535:2343], Recipient [1:7488002552260868117:2188]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436461507208&action=2" } UserToken: "" } 2025-03-31T15:54:22.342441Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-03-31T15:54:22.342597Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436461507208&action=2" } } 2025-03-31T15:54:22.359435Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone Complete 2025-03-31T15:54:22.359559Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-31T15:54:22.459785Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7488002590915574543:2345], Recipient [1:7488002552260868117:2188]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436461507208&action=2" } UserToken: "" } 2025-03-31T15:54:22.459815Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-03-31T15:54:22.459981Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436461507208&action=2" ready: true status: SUCCESS } } ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::SimpleTenantsTestSyncOperation [GOOD] Test command err: 2025-03-31T15:54:11.033753Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002542990457630:2279];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:11.033800Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b20/r3tmp/tmpcgdYYo/pdisk_1.dat 2025-03-31T15:54:11.579772Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:11.579882Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:11.585432Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:11.595442Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26008, node 1 2025-03-31T15:54:12.228418Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:54:12.228440Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:54:12.228447Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:54:12.228584Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10392 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:54:12.627011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:54:12.707309Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285120, Sender [1:7488002547285425488:2314], Recipient [1:7488002542990457885:2196]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { operation_params { operation_mode: SYNC } path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" } 2025-03-31T15:54:12.707360Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2025-03-31T15:54:12.707381Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-31T15:54:12.707399Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-31T15:54:12.707524Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant: Request { operation_params { operation_mode: SYNC } path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" 2025-03-31T15:54:12.707685Z node 1 :CMS_TENANTS DEBUG: Add tenant /Root/users/user-1 (txid = 1743436452706908) 2025-03-31T15:54:12.708307Z node 1 :CMS_TENANTS TRACE: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1743436452706908 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2025-03-31T15:54:12.708485Z node 1 :CMS_TENANTS TRACE: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2025-03-31T15:54:12.715837Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant Complete 2025-03-31T15:54:12.716514Z node 1 :CMS_TENANTS TRACE: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436452706908&action=1" } } } 2025-03-31T15:54:12.716672Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-31T15:54:12.716766Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-03-31T15:54:12.716932Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-03-31T15:54:12.717448Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2025-03-31T15:54:12.717590Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-03-31T15:54:12.719304Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285139, Sender [1:7488002547285425488:2314], Recipient [1:7488002542990457885:2196]: NKikimr::NConsole::TEvConsole::TEvNotifyOperationCompletionRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436452706908&action=1" } UserToken: "" } 2025-03-31T15:54:12.719330Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvNotifyOperationCompletionRequest 2025-03-31T15:54:12.719531Z node 1 :CMS_TENANTS DEBUG: Add subscription to /Root/users/user-1 for [1:7488002547285425488:2314] 2025-03-31T15:54:12.719613Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvNotifyOperationCompletionResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436452706908&action=1" } } 2025-03-31T15:54:12.724874Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2025-03-31T15:54:12.724924Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-03-31T15:54:12.725023Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435079, Sender [1:7488002547285425493:2196], Recipient [1:7488002542990457885:2196]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-03-31T15:54:12.725042Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolAllocated 2025-03-31T15:54:12.725055Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-31T15:54:12.725064Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-31T15:54:12.725108Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2025-03-31T15:54:12.725142Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2025-03-31T15:54:12.725215Z node 1 :CMS_TENANTS TRACE: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2025-03-31T15:54:12.732912Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-03-31T15:54:12.732946Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-31T15:54:12.732954Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-31T15:54:12.732959Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-31T15:54:12.733020Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2025-03-31T15:54:12.733042Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1743436452706908 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-03-31T15:54:12.737642Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-03-31T15:54:12.737756Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-31T15:54:12.737791Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1)::Bootstrap 2025-03-31T15:54:12.737823Z node 1 :CMS_TENANTS DEBUG: TSubDomainManip(/Root/users/user-1) create subdomain 2025-03-31T15:54:12.740936Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 DatabaseName: "Root" 2025-03-31T15:54:12.742491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:1, at schemeshard: 72057594046644480 2025-03-31T15:54:12.745201Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-03-31T15:54:12.745255Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976710658 2025-03-31T15:54:12.749586Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976710658 2025-03-31T15:54:12.764276Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710658 2025-03-31T15:54:12.764795Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got describe result: Status: StatusSuccess Path: "/Root/users/user-1" PathDescription { Self { Name: "user-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743436452793 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046644480 PathId: 3 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 3 PathOwnerId: 72057594046644480 2025-03-31T15:54:12.764812Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2025-03-31T15:54:12.764851Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user ... 644480 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 3 PathOwnerId: 72057594046644480 2025-03-31T15:54:16.914063Z node 1 :CMS_TENANTS DEBUG: TSubDomainManip(/Root/users/user-1) drop subdomain 2025-03-31T15:54:16.914600Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send subdomain drop cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root/users" OperationType: ESchemeOpForceDropExtSubDomain Drop { Name: "user-1" } } } ExecTimeoutPeriod: 18446744073709551615 DatabaseName: "Root" 2025-03-31T15:54:16.939371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Dropping path id [OwnerId: 72057594046644480, LocalPathId: 3] by tx: 281474976710660 2025-03-31T15:54:16.940531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpForceDropExtSubDomain, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T15:54:17.034248Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-03-31T15:54:17.034276Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-31T15:54:17.109726Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 2025-03-31T15:54:17.109767Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976710660 2025-03-31T15:54:17.161499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Dropping path id [OwnerId: 72057594046644480, LocalPathId: 3] by tx: 281474976710660 2025-03-31T15:54:17.299859Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976710660 2025-03-31T15:54:17.528134Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710660 2025-03-31T15:54:17.528154Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2025-03-31T15:54:17.528203Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-03-31T15:54:17.528269Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435077, Sender [1:7488002564465295406:2196], Recipient [1:7488002542990457885:2196]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-03-31T15:54:17.528287Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2025-03-31T15:54:17.528299Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-31T15:54:17.528307Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-31T15:54:17.528342Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Execute /Root/users/user-1 2025-03-31T15:54:17.528358Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1743436456859382 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-03-31T15:54:17.528402Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1743436456859382 issue= 2025-03-31T15:54:17.529507Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found - using supplied 72075186224037888 2025-03-31T15:54:17.537367Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Complete /Root/users/user-1 2025-03-31T15:54:17.537443Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2025-03-31T15:54:17.537459Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-31T15:54:17.537643Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7488002542990457749:2195], Recipient [1:7488002542990457885:2196]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-03-31T15:54:17.537663Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-03-31T15:54:17.537679Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-31T15:54:17.537687Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-31T15:54:17.537709Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2025-03-31T15:54:17.537729Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1743436456859382 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-03-31T15:54:17.554507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:1 2025-03-31T15:54:17.556468Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037895 not found 2025-03-31T15:54:17.556501Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2025-03-31T15:54:17.556512Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037896 not found 2025-03-31T15:54:17.556522Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037897 not found 2025-03-31T15:54:17.556532Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2025-03-31T15:54:17.556543Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037893 not found 2025-03-31T15:54:17.556553Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037894 not found 2025-03-31T15:54:17.572583Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2025-03-31T15:54:17.573623Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2025-03-31T15:54:17.685174Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-03-31T15:54:17.685373Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-31T15:54:17.685416Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-03-31T15:54:17.685516Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-03-31T15:54:17.686577Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 2 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 3 } ItemConfigGeneration: 3 } } Success: true ConfigTxSeqNo: 13 2025-03-31T15:54:17.686787Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 3 } } } 2025-03-31T15:54:17.786589Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488002547918832372:2110];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:17.786660Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/users/user-1/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:54:17.833469Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-03-31T15:54:17.944109Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 14 2025-03-31T15:54:17.944209Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435081, Sender [1:7488002568760262805:2196], Recipient [1:7488002542990457885:2196]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2025-03-31T15:54:17.944238Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolDeleted 2025-03-31T15:54:17.944251Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-31T15:54:17.944260Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-31T15:54:17.944489Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2025-03-31T15:54:17.944704Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2025-03-31T15:54:18.220571Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-03-31T15:54:18.220603Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-31T15:54:18.220611Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-31T15:54:18.220617Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-31T15:54:18.220680Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1743436456859382 2025-03-31T15:54:18.220692Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1743436456859382 issue= 2025-03-31T15:54:18.220702Z node 1 :CMS_TENANTS TRACE: Remove tenant /Root/users/user-1 from database txid=1743436456859382 issue= 2025-03-31T15:54:18.220886Z node 1 :CMS_TENANTS TRACE: Remove pool /Root/users/user-1:hdd from database 2025-03-31T15:54:18.221133Z node 1 :CMS_TENANTS TRACE: Add tenant removal info for /Root/users/user-1 txid=1743436456859382 code=SUCCESS errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-03-31T15:54:18.240100Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone Complete 2025-03-31T15:54:18.240656Z node 1 :CMS_TENANTS TRACE: Send /Root/users/user-1 notification to [1:7488002564465295388:2403]: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436456859382&action=2" ready: true status: SUCCESS } } 2025-03-31T15:54:18.240732Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-31T15:54:18.299059Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7488002573055230168:2412], Recipient [1:7488002542990457885:2196]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "" } 2025-03-31T15:54:18.299096Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-03-31T15:54:18.299556Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: NOT_FOUND issues { message: "Unknown tenant /Root/users/user-1" severity: 1 } } } 2025-03-31T15:54:18.365430Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285123, Sender [1:7488002573055230173:2413], Recipient [1:7488002542990457885:2196]: NKikimr::NConsole::TEvConsole::TEvListTenantsRequest { Request { } UserToken: "" } 2025-03-31T15:54:18.365457Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvListTenantsRequest 2025-03-31T15:54:18.365613Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvListTenantsResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.ListDatabasesResult] { } } } } 2025-03-31T15:54:18.414459Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-03-31T15:54:18.414740Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected |66.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/control/ut/ydb-core-control-ut |66.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/control/ut/ydb-core-control-ut |66.3%| [LD] {RESULT} $(B)/ydb/core/control/ut/ydb-core-control-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::SimpleTenantsTest [GOOD] Test command err: 2025-03-31T15:54:11.976391Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002541964739923:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:11.976431Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b1c/r3tmp/tmp8ryqNa/pdisk_1.dat 2025-03-31T15:54:12.500574Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:12.500686Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:12.507556Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:12.509714Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25739, node 1 2025-03-31T15:54:12.618677Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:54:12.618705Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:54:12.618713Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:54:12.618856Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21697 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:54:13.092414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:54:13.422632Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285120, Sender [1:7488002550554675281:2314], Recipient [1:7488002546259707668:2195]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" } 2025-03-31T15:54:13.427373Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2025-03-31T15:54:13.427454Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-31T15:54:13.427479Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-31T15:54:13.427724Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" 2025-03-31T15:54:13.431302Z node 1 :CMS_TENANTS DEBUG: Add tenant /Root/users/user-1 (txid = 1743436453431160) 2025-03-31T15:54:13.432395Z node 1 :CMS_TENANTS TRACE: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1743436453431160 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2025-03-31T15:54:13.432607Z node 1 :CMS_TENANTS TRACE: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2025-03-31T15:54:13.437944Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant Complete 2025-03-31T15:54:13.438850Z node 1 :CMS_TENANTS TRACE: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436453431160&action=1" } } } 2025-03-31T15:54:13.439029Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-31T15:54:13.439120Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-03-31T15:54:13.439281Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-03-31T15:54:13.439794Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2025-03-31T15:54:13.439956Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-03-31T15:54:13.460008Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2025-03-31T15:54:13.460098Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-03-31T15:54:13.460211Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435079, Sender [1:7488002550554675288:2195], Recipient [1:7488002546259707668:2195]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-03-31T15:54:13.460247Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolAllocated 2025-03-31T15:54:13.460262Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-31T15:54:13.460273Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-31T15:54:13.460327Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2025-03-31T15:54:13.460368Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2025-03-31T15:54:13.460441Z node 1 :CMS_TENANTS TRACE: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2025-03-31T15:54:13.461440Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7488002550554675295:2315], Recipient [1:7488002546259707668:2195]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436453431160&action=1" } UserToken: "" } 2025-03-31T15:54:13.461476Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-03-31T15:54:13.461708Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436453431160&action=1" } } 2025-03-31T15:54:13.473956Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-03-31T15:54:13.473990Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-31T15:54:13.473998Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-31T15:54:13.474011Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-31T15:54:13.474072Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2025-03-31T15:54:13.474093Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1743436453431160 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-03-31T15:54:13.487857Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-03-31T15:54:13.488023Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-31T15:54:13.488066Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1)::Bootstrap 2025-03-31T15:54:13.488207Z node 1 :CMS_TENANTS DEBUG: TSubDomainManip(/Root/users/user-1) create subdomain 2025-03-31T15:54:13.496738Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 DatabaseName: "Root" 2025-03-31T15:54:13.498178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:1, at schemeshard: 72057594046644480 2025-03-31T15:54:13.508683Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-03-31T15:54:13.508766Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976710658 2025-03-31T15:54:13.525536Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976710658 2025-03-31T15:54:13.527534Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7488002550554675366:2318], Recipient [1:7488002546259707668:2195]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436453431160&action=1" } UserToken: "" } 2025-03-31T15:54:13.527561Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-03-31T15:54:13.527736Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436453431160&action=1" } } 2025-03-31T15:54:13.542895Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710658 2025-03-31T15:54:13.543664Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got describe result: Status: StatusSuccess Path: "/Root/users/user-1" PathDescription { Self { Name: "user-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743436453570 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046644480 PathId: 3 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057 ... ot/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-03-31T15:54:22.712881Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435077, Sender [1:7488002589209381758:2195], Recipient [1:7488002546259707668:2195]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-03-31T15:54:22.712898Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2025-03-31T15:54:22.712912Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-31T15:54:22.712920Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-31T15:54:22.713171Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Execute /Root/users/user-1 2025-03-31T15:54:22.713192Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1743436462499098 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-03-31T15:54:22.713236Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1743436462499098 issue= 2025-03-31T15:54:22.713031Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found - using supplied 72075186224037888 2025-03-31T15:54:22.719889Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Complete /Root/users/user-1 2025-03-31T15:54:22.720156Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2025-03-31T15:54:22.720171Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-31T15:54:22.720329Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7488002546259707563:2201], Recipient [1:7488002546259707668:2195]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-03-31T15:54:22.720344Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-03-31T15:54:22.720358Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-31T15:54:22.720365Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-31T15:54:22.720386Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2025-03-31T15:54:22.720589Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1743436462499098 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-03-31T15:54:22.733993Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7488002589209381832:2412], Recipient [1:7488002546259707668:2195]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436462499098&action=2" } UserToken: "" } 2025-03-31T15:54:22.734033Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-03-31T15:54:22.734157Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436462499098&action=2" } } 2025-03-31T15:54:22.783477Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-03-31T15:54:22.783530Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-31T15:54:22.783567Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-03-31T15:54:22.783675Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-03-31T15:54:22.792117Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 2 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 3 } ItemConfigGeneration: 3 } } Success: true ConfigTxSeqNo: 13 2025-03-31T15:54:22.805949Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2025-03-31T15:54:22.805979Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037893 not found 2025-03-31T15:54:22.805994Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037894 not found 2025-03-31T15:54:22.815261Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 3 } } } 2025-03-31T15:54:22.817045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:1 2025-03-31T15:54:22.837258Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037897 not found 2025-03-31T15:54:22.837295Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037895 not found 2025-03-31T15:54:22.837308Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2025-03-31T15:54:22.837321Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2025-03-31T15:54:22.837333Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2025-03-31T15:54:22.837350Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037896 not found 2025-03-31T15:54:22.837701Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7488002589209381840:2414], Recipient [1:7488002546259707668:2195]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436462499098&action=2" } UserToken: "" } 2025-03-31T15:54:22.837732Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-03-31T15:54:22.837893Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436462499098&action=2" } } 2025-03-31T15:54:23.022656Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-03-31T15:54:23.040947Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 14 2025-03-31T15:54:23.041238Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435081, Sender [1:7488002589209381837:2195], Recipient [1:7488002546259707668:2195]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2025-03-31T15:54:23.041264Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolDeleted 2025-03-31T15:54:23.041277Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-31T15:54:23.041286Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-31T15:54:23.041495Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2025-03-31T15:54:23.041516Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2025-03-31T15:54:23.064614Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult retrying for 72075186224037888 because of ERROR 2025-03-31T15:54:23.128356Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7488002593504349183:2418], Recipient [1:7488002546259707668:2195]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436462499098&action=2" } UserToken: "" } 2025-03-31T15:54:23.128387Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-03-31T15:54:23.128526Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436462499098&action=2" } } 2025-03-31T15:54:23.129013Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-03-31T15:54:23.129031Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-31T15:54:23.129038Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-31T15:54:23.129045Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-31T15:54:23.129087Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1743436462499098 2025-03-31T15:54:23.129100Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1743436462499098 issue= 2025-03-31T15:54:23.129109Z node 1 :CMS_TENANTS TRACE: Remove tenant /Root/users/user-1 from database txid=1743436462499098 issue= 2025-03-31T15:54:23.129117Z node 1 :CMS_TENANTS TRACE: Remove pool /Root/users/user-1:hdd from database 2025-03-31T15:54:23.129352Z node 1 :CMS_TENANTS TRACE: Add tenant removal info for /Root/users/user-1 txid=1743436462499098 code=SUCCESS errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-03-31T15:54:23.158852Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone Complete 2025-03-31T15:54:23.159136Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-31T15:54:23.247443Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7488002593504349204:2420], Recipient [1:7488002546259707668:2195]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436462499098&action=2" } UserToken: "" } 2025-03-31T15:54:23.247473Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-03-31T15:54:23.247630Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436462499098&action=2" ready: true status: SUCCESS } } 2025-03-31T15:54:23.399535Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7488002593504349207:2422], Recipient [1:7488002546259707668:2195]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "" } 2025-03-31T15:54:23.399566Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-03-31T15:54:23.403621Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: NOT_FOUND issues { message: "Unknown tenant /Root/users/user-1" severity: 1 } } } 2025-03-31T15:54:23.483892Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285123, Sender [1:7488002593504349210:2423], Recipient [1:7488002546259707668:2195]: NKikimr::NConsole::TEvConsole::TEvListTenantsRequest { Request { } UserToken: "" } 2025-03-31T15:54:23.483917Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvListTenantsRequest 2025-03-31T15:54:23.484075Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvListTenantsResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.ListDatabasesResult] { } } } } 2025-03-31T15:54:23.631733Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-03-31T15:54:23.651882Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected >> THiveTest::TestHiveBalancerWithPrefferedDC2 [GOOD] >> THiveTest::TestHiveBalancerWithPreferredDC3 >> DstCreator::NonExistentSrc >> THiveTest::TestSpreadNeighboursWithUpdateTabletsObject [GOOD] >> THiveTest::TestSpreadNeighboursDifferentOwners >> CompressExecutor::TestReorderedExecutor [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort >> THiveTest::TestHiveBalancer [GOOD] >> THiveTest::TestHiveBalancerDifferentResources >> TGRpcCmsTest::RemoveWithAnotherTokenTest [GOOD] >> THiveTest::TestLockTabletExecutionTimeout [GOOD] >> THiveTest::TestLockTabletExecutionRebootTimeout >> TGRpcCmsTest::AuthTokenTest [GOOD] >> TAccessServiceTest::PassRequestId [GOOD] >> TUserAccountServiceTest::Get [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::RemoveWithAnotherTokenTest [GOOD] Test command err: 2025-03-31T15:54:12.642377Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002547819188078:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:12.642457Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b2d/r3tmp/tmpkXoSCE/pdisk_1.dat 2025-03-31T15:54:13.624310Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:13.668048Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:13.668161Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:13.671394Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:13.712130Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5857, node 1 2025-03-31T15:54:14.328875Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:54:14.329066Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:54:14.329190Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:54:14.329321Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:54:17.647704Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002547819188078:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:17.647947Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:24462 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:54:22.465826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:24462 2025-03-31T15:54:23.934103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T15:54:24.134297Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285120, Sender [1:7488002599358796556:2330], Recipient [1:7488002552114155842:2197]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)" } 2025-03-31T15:54:24.134337Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2025-03-31T15:54:24.134360Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-31T15:54:24.134372Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-31T15:54:24.137431Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)" 2025-03-31T15:54:24.137656Z node 1 :CMS_TENANTS DEBUG: Add tenant /Root/users/user-1 (txid = 1743436464134586) 2025-03-31T15:54:24.138253Z node 1 :CMS_TENANTS TRACE: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1743436464134586 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2025-03-31T15:54:24.138417Z node 1 :CMS_TENANTS TRACE: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2025-03-31T15:54:24.159516Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant Complete 2025-03-31T15:54:24.165737Z node 1 :CMS_TENANTS TRACE: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436464134586&action=1" } } } 2025-03-31T15:54:24.166080Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-31T15:54:24.166150Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-03-31T15:54:24.166845Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-03-31T15:54:24.167839Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2025-03-31T15:54:24.174119Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-03-31T15:54:24.214576Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2025-03-31T15:54:24.227159Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-03-31T15:54:24.227987Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435079, Sender [1:7488002599358796562:2197], Recipient [1:7488002552114155842:2197]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-03-31T15:54:24.228004Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolAllocated 2025-03-31T15:54:24.228019Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-31T15:54:24.228029Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-31T15:54:24.228062Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2025-03-31T15:54:24.228245Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2025-03-31T15:54:24.228280Z node 1 :CMS_TENANTS TRACE: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2025-03-31T15:54:24.259312Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-03-31T15:54:24.259505Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-31T15:54:24.259517Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-31T15:54:24.259525Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-31T15:54:24.259572Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2025-03-31T15:54:24.259596Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1743436464134586 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-03-31T15:54:24.307605Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-03-31T15:54:24.307915Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-31T15:54:24.307952Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1)::Bootstrap 2025-03-31T15:54:24.308145Z node 1 :CMS_TENANTS DEBUG: TSubDomainManip(/Root/users/user-1) create subdomain 2025-03-31T15:54:24.344211Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)" DatabaseName: "Root" 2025-03-31T15:54:24.345413Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7488002599358796579:2332], Recipient [1:7488002552114155842:2197]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436464134586&action=1" } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)" } 2025-03-31T15:54:24.345432Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-03-31T15:54:24.346259Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436464134586&action=1" } } 2025-03-31T15:54:24.391572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710659:1, at schemeshard: 72057594046644480 2025-03-31T15:54:24.399477Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-03-31T15:54:24.399536Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976710659 2025-03-31T15:54:24.467460Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976710659 2025-03-31T15:54:24.518532Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710659 2025-03-31T15:54:24.519497Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got describe result: Status: StatusSuccess Path: "/Root/users/user-1" PathDescription { Self { Name: "user-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId ... 6.702871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Dropping path id [OwnerId: 72057594046644480, LocalPathId: 3] by tx: 281474976710663 2025-03-31T15:54:26.703459Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976710663 2025-03-31T15:54:26.706463Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7488002607948731902:2422], Recipient [1:7488002552114155842:2197]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436466691077&action=2" } UserToken: "" } 2025-03-31T15:54:26.706494Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-03-31T15:54:26.706640Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436466691077&action=2" } } 2025-03-31T15:54:26.768318Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7488002607948731946:2425], Recipient [1:7488002552114155842:2197]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436466691077&action=2" } UserToken: "" } 2025-03-31T15:54:26.768349Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-03-31T15:54:26.768515Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436466691077&action=2" } } 2025-03-31T15:54:26.769224Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710663 2025-03-31T15:54:26.769249Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2025-03-31T15:54:26.769295Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-03-31T15:54:26.769396Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435077, Sender [1:7488002607948731885:2197], Recipient [1:7488002552114155842:2197]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-03-31T15:54:26.769411Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2025-03-31T15:54:26.769429Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-31T15:54:26.769440Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-31T15:54:26.769494Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Execute /Root/users/user-1 2025-03-31T15:54:26.769514Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1743436466691077 errorcode=UNAUTHORIZED issue=AccessDenied: Access denied for request 2025-03-31T15:54:26.769578Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1743436466691077 issue=AccessDenied: Access denied for request 2025-03-31T15:54:26.769037Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found - using supplied 72075186224037888 2025-03-31T15:54:26.783789Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Complete /Root/users/user-1 2025-03-31T15:54:26.783871Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2025-03-31T15:54:26.783885Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-31T15:54:26.784115Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7488002547819188414:2193], Recipient [1:7488002552114155842:2197]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-03-31T15:54:26.784131Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-03-31T15:54:26.784153Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-31T15:54:26.784162Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-31T15:54:26.784189Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2025-03-31T15:54:26.784211Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1743436466691077 errorcode=UNAUTHORIZED issue=AccessDenied: Access denied for request 2025-03-31T15:54:26.795819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:1 2025-03-31T15:54:26.800037Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-03-31T15:54:26.800102Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-31T15:54:26.800136Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-03-31T15:54:26.800264Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-03-31T15:54:26.800817Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 3 } ItemConfigGeneration: 2 } } Success: true ConfigTxSeqNo: 10 2025-03-31T15:54:26.800966Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 2 } } } 2025-03-31T15:54:26.806427Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2025-03-31T15:54:26.806475Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037895 not found 2025-03-31T15:54:26.806488Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2025-03-31T15:54:26.806499Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037896 not found 2025-03-31T15:54:26.806525Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037897 not found 2025-03-31T15:54:26.806544Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2025-03-31T15:54:26.806563Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037893 not found 2025-03-31T15:54:26.806576Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037894 not found 2025-03-31T15:54:26.806590Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2025-03-31T15:54:26.826098Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-03-31T15:54:26.827570Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 11 2025-03-31T15:54:26.827694Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435081, Sender [1:7488002607948731968:2197], Recipient [1:7488002552114155842:2197]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2025-03-31T15:54:26.827739Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolDeleted 2025-03-31T15:54:26.827755Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-31T15:54:26.827764Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-31T15:54:26.827806Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2025-03-31T15:54:26.827826Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2025-03-31T15:54:26.832749Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult retrying for 72075186224037888 because of ERROR 2025-03-31T15:54:26.840408Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7488002607948731997:2428], Recipient [1:7488002552114155842:2197]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436466691077&action=2" } UserToken: "" } 2025-03-31T15:54:26.840444Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-03-31T15:54:26.849606Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436466691077&action=2" } } 2025-03-31T15:54:26.855896Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-03-31T15:54:26.861473Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-31T15:54:26.861495Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-31T15:54:26.861503Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-31T15:54:26.861625Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1743436466691077 2025-03-31T15:54:26.879179Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1743436466691077 issue=AccessDenied: Access denied for request 2025-03-31T15:54:26.879238Z node 1 :CMS_TENANTS TRACE: Remove tenant /Root/users/user-1 from database txid=1743436466691077 issue=AccessDenied: Access denied for request 2025-03-31T15:54:26.879254Z node 1 :CMS_TENANTS TRACE: Remove pool /Root/users/user-1:hdd from database 2025-03-31T15:54:26.879363Z node 1 :CMS_TENANTS TRACE: Add tenant removal info for /Root/users/user-1 txid=1743436466691077 code=SUCCESS errorcode=UNAUTHORIZED issue=AccessDenied: Access denied for request 2025-03-31T15:54:26.905028Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone Complete 2025-03-31T15:54:26.905096Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-31T15:54:26.941335Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7488002607948732009:2431], Recipient [1:7488002552114155842:2197]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436466691077&action=2" } UserToken: "" } 2025-03-31T15:54:26.941370Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-03-31T15:54:26.941538Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436466691077&action=2" ready: true status: SUCCESS } } 2025-03-31T15:54:26.995567Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-03-31T15:54:26.995770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-31T15:54:29.630095Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488002598517814682:2221];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:29.630213Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/users/user-1/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::AuthTokenTest [GOOD] Test command err: 2025-03-31T15:54:12.663383Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002544460556974:2277];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:12.663429Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000c00/r3tmp/tmpZNLPNb/pdisk_1.dat 2025-03-31T15:54:13.891565Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:14.142366Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:14.142440Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:14.156553Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6992, node 1 2025-03-31T15:54:18.264696Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002544460556974:2277];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:18.264934Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:54:18.288228Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:18.333746Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:54:18.333761Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:54:18.334114Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:54:18.334396Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15014 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:54:22.341414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:54:22.858497Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285120, Sender [1:7488002587410230637:2323], Recipient [1:7488002548755524584:2207]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-03-31T15:54:22.858538Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2025-03-31T15:54:22.858603Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-31T15:54:22.858620Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-31T15:54:22.858726Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" 2025-03-31T15:54:22.858945Z node 1 :CMS_TENANTS DEBUG: Add tenant /Root/users/user-1 (txid = 1743436462853662) 2025-03-31T15:54:22.867569Z node 1 :CMS_TENANTS TRACE: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1743436462853662 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2025-03-31T15:54:22.867775Z node 1 :CMS_TENANTS TRACE: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2025-03-31T15:54:22.891794Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant Complete 2025-03-31T15:54:22.892906Z node 1 :CMS_TENANTS TRACE: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436462853662&action=1" } } } 2025-03-31T15:54:22.893089Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-31T15:54:22.893157Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-03-31T15:54:22.893271Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-03-31T15:54:22.893689Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2025-03-31T15:54:22.893799Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-03-31T15:54:22.907130Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7488002587410230648:2324], Recipient [1:7488002548755524584:2207]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436462853662&action=1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-03-31T15:54:22.907163Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-03-31T15:54:22.907349Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436462853662&action=1" } } 2025-03-31T15:54:22.917041Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2025-03-31T15:54:22.917093Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-03-31T15:54:22.917473Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435079, Sender [1:7488002587410230642:2207], Recipient [1:7488002548755524584:2207]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-03-31T15:54:22.917494Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolAllocated 2025-03-31T15:54:22.917508Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-31T15:54:22.917520Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-31T15:54:22.917702Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2025-03-31T15:54:22.917718Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2025-03-31T15:54:22.917774Z node 1 :CMS_TENANTS TRACE: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2025-03-31T15:54:22.927945Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-03-31T15:54:22.927966Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-31T15:54:22.928185Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-31T15:54:22.928193Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-31T15:54:22.928250Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2025-03-31T15:54:22.928456Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1743436462853662 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-03-31T15:54:23.040026Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-03-31T15:54:23.059470Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-31T15:54:23.063257Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1)::Bootstrap 2025-03-31T15:54:23.063269Z node 1 :CMS_TENANTS DEBUG: TSubDomainManip(/Root/users/user-1) create subdomain 2025-03-31T15:54:23.102738Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "Root" 2025-03-31T15:54:23.102922Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7488002591705197991:2327], Recipient [1:7488002548755524584:2207]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436462853662&action=1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-03-31T15:54:23.102935Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-03-31T15:54:23.103072Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743436462853662&action=1" } } 2025-03-31T15:54:23.128835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:1, at schemeshard: 72057594046644480 2025-03-31T15:54:23.172638Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-03-31T15:54:23.172890Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976710658 2025-03-31T15:54:23.210290Z node 1 :CMS_TENANTS TRACE: StateWork, r ... hdd" count: 1 } } generation: 1 } } } } 2025-03-31T15:54:25.080438Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7488002600295133029:2373], Recipient [1:7488002548755524584:2207]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-03-31T15:54:25.080481Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-03-31T15:54:25.080542Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2025-03-31T15:54:25.080707Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7488002548755524427:2195], Recipient [1:7488002548755524584:2207]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-03-31T15:54:25.080737Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-03-31T15:54:25.081440Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2025-03-31T15:54:25.086715Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7488002600295133041:2374], Recipient [1:7488002548755524584:2207]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-03-31T15:54:25.086750Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-03-31T15:54:25.086789Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2025-03-31T15:54:25.086896Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7488002548755524427:2195], Recipient [1:7488002548755524584:2207]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-03-31T15:54:25.086909Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-03-31T15:54:25.087579Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2025-03-31T15:54:25.092768Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7488002600295133049:2375], Recipient [1:7488002548755524584:2207]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-03-31T15:54:25.092797Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-03-31T15:54:25.092830Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2025-03-31T15:54:25.092951Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7488002548755524427:2195], Recipient [1:7488002548755524584:2207]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-03-31T15:54:25.092979Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-03-31T15:54:25.093486Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2025-03-31T15:54:25.098076Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7488002600295133059:2376], Recipient [1:7488002548755524584:2207]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-03-31T15:54:25.098115Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-03-31T15:54:25.098148Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2025-03-31T15:54:25.098252Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7488002548755524427:2195], Recipient [1:7488002548755524584:2207]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-03-31T15:54:25.098267Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-03-31T15:54:25.098805Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2025-03-31T15:54:25.101871Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7488002600295133066:2377], Recipient [1:7488002548755524584:2207]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-03-31T15:54:25.101902Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-03-31T15:54:25.101964Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2025-03-31T15:54:25.102610Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7488002548755524427:2195], Recipient [1:7488002548755524584:2207]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-03-31T15:54:25.102637Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-03-31T15:54:25.103477Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2025-03-31T15:54:25.108353Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710659 2025-03-31T15:54:25.108926Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2025-03-31T15:54:25.108987Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2025-03-31T15:54:25.109062Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435076, Sender [1:7488002591705198073:2207], Recipient [1:7488002548755524584:2207]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2025-03-31T15:54:25.109078Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvSubdomainReady 2025-03-31T15:54:25.109100Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-31T15:54:25.109108Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-31T15:54:25.109142Z node 1 :CMS_TENANTS DEBUG: TTxUpdateConfirmedSubdomain for tenant /Root/users/user-1 to 2 2025-03-31T15:54:25.109163Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=RUNNING txid=1743436462853662 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-03-31T15:54:25.109239Z node 1 :CMS_TENANTS TRACE: Update database for /Root/users/user-1 confirmedsubdomain=2 2025-03-31T15:54:25.113329Z node 1 :CMS_TENANTS DEBUG: TTxUpdateConfirmedSubdomain complete for /Root/users/user-1 2025-03-31T15:54:25.113360Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-31T15:54:25.114004Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7488002600295133084:2378], Recipient [1:7488002548755524584:2207]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-03-31T15:54:25.114039Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-03-31T15:54:25.114111Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2025-03-31T15:54:25.114238Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7488002548755524427:2195], Recipient [1:7488002548755524584:2207]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-03-31T15:54:25.114251Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-03-31T15:54:25.114765Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: RUNNING required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } TClient is connected to server localhost:15014 TClient::Ls request: /Root/users/user-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root/users/user-1" PathId: 1 SchemeshardId: 72075186224037897 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72075186224037897 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 3 ProcessingParams { Version: 3 PlanReso... (TRUNCATED) 2025-03-31T15:54:25.793738Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-03-31T15:54:25.794137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-31T15:54:28.545948Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488002594193576872:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:28.545995Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/users/user-1/.metadata/initialization/migrations;error=timeout; >> FolderServiceTest::TFolderService [GOOD] >> THiveTest::TestSpreadNeighboursDifferentOwners [GOOD] >> THiveTest::TestUpdateTabletsObjectUpdatesMetrics >> DstCreator::SameOwner ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TAccessServiceTest::PassRequestId [GOOD] Test command err: 2025-03-31T15:54:25.403725Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002601647798911:2183];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:25.406328Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001b36/r3tmp/tmpgI3CId/pdisk_1.dat 2025-03-31T15:54:26.656768Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:26.663324Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:26.669531Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:26.669637Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:26.674792Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27598 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:54:27.246112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:54:27.264534Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:54:27.316768Z node 1 :GRPC_CLIENT DEBUG: [51700007c508]{trololo} Connect to grpc://localhost:1329 2025-03-31T15:54:27.321567Z node 1 :GRPC_CLIENT DEBUG: [51700007c508]{trololo} Request AuthenticateRequest { iam_token: "**** (717F937C)" } 2025-03-31T15:54:27.351471Z node 1 :GRPC_CLIENT DEBUG: [51700007c508]{trololo} Response AuthenticateResponse { subject { user_account { id: "1234" } } } |66.3%| [TA] $(B)/ydb/services/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> THiveTest::TestHiveBalancerWithPreferredDC3 [GOOD] >> THiveTest::TestHiveBalancerWithSystemTablets >> ObjectDistribution::TestManyIrrelevantNodes [GOOD] >> Sequencer::Basic1 [GOOD] >> StoragePool::TestDistributionRandomProbability ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TUserAccountServiceTest::Get [GOOD] Test command err: 2025-03-31T15:54:25.751664Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002603064919181:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:25.752055Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001b5a/r3tmp/tmpT9dKb7/pdisk_1.dat 2025-03-31T15:54:26.663918Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:26.685935Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:26.686054Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:26.696229Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31913 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:54:28.458778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... >> TAccessServiceTest::Authenticate [GOOD] >> DstCreator::Basic ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderService [GOOD] Test command err: 2025-03-31T15:54:25.036378Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002601099998302:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:25.036422Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001b3b/r3tmp/tmp8mlRKG/pdisk_1.dat 2025-03-31T15:54:26.559389Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:26.670083Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:26.670214Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:26.674791Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:26.790849Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:23806 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:54:28.264220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:54:28.384320Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Connect to grpc://localhost:3416 2025-03-31T15:54:28.444696Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2025-03-31T15:54:28.954860Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:3416: Failed to connect to remote host: Connection refused 2025-03-31T15:54:28.982058Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2025-03-31T15:54:29.012838Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:3416: Failed to connect to remote host: Connection refused 2025-03-31T15:54:30.018213Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2025-03-31T15:54:30.039480Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002601099998302:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:30.039549Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:54:30.104422Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Status 5 Not Found 2025-03-31T15:54:30.106440Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Request ResolveFoldersRequest { folder_ids: "i_am_exists" } 2025-03-31T15:54:30.132064Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Response ResolveFoldersResponse { resolved_folders { cloud_id: "response_cloud_id" } } >> FolderServiceTest::TFolderServiceAdapter [GOOD] >> THiveTest::TestUpdateTabletsObjectUpdatesMetrics [GOOD] >> THiveTest::TestRestartTablets >> DstCreator::ExistingDst ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TAccessServiceTest::Authenticate [GOOD] Test command err: 2025-03-31T15:54:25.805354Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002603853044535:2130];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:25.820877Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001b44/r3tmp/tmp9hSffP/pdisk_1.dat 2025-03-31T15:54:27.371543Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:27.521977Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:27.522179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:27.536900Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:27.643232Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:2652 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:54:29.064429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:54:29.205309Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Connect to grpc://localhost:28661 2025-03-31T15:54:29.207354Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Request AuthenticateRequest { iam_token: "**** (047D44F1)" } 2025-03-31T15:54:29.252648Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Status 7 Permission Denied 2025-03-31T15:54:29.253479Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Request AuthenticateRequest { iam_token: "**** (342498C1)" } 2025-03-31T15:54:29.275357Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Response AuthenticateResponse { subject { user_account { id: "1234" } } } >> BasicUsage::MaxByteSizeEqualZero [GOOD] >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage >> DstCreator::WithSyncIndex |66.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |66.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderServiceAdapter [GOOD] Test command err: 2025-03-31T15:54:25.405880Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002601325716202:2276];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:25.405935Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001b42/r3tmp/tmpqlNl4h/pdisk_1.dat 2025-03-31T15:54:26.679259Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:26.707893Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:26.708066Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:26.710233Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:26.856384Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:7694 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:54:27.985648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:54:28.152185Z node 1 :GRPC_CLIENT DEBUG: [51700004fb08] Connect to grpc://localhost:2550 2025-03-31T15:54:28.153082Z node 1 :GRPC_CLIENT DEBUG: [51700004fb08] Request ListFoldersRequest { id: "i_am_exists" } 2025-03-31T15:54:28.275983Z node 1 :GRPC_CLIENT DEBUG: [51700004fb08] Response ListFoldersResponse { result { cloud_id: "cloud_from_old_service" } } 2025-03-31T15:54:28.277978Z node 1 :GRPC_CLIENT DEBUG: [517000064088] Connect to grpc://localhost:12212 2025-03-31T15:54:28.278790Z node 1 :GRPC_CLIENT DEBUG: [517000064088] Request ResolveFoldersRequest { folder_ids: "i_am_exists" } 2025-03-31T15:54:28.289017Z node 1 :GRPC_CLIENT DEBUG: [517000064088] Response ResolveFoldersResponse { resolved_folders { cloud_id: "cloud_from_new_service" } } 2025-03-31T15:54:28.299597Z node 1 :GRPC_CLIENT DEBUG: [517000064088] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2025-03-31T15:54:28.304820Z node 1 :GRPC_CLIENT DEBUG: [517000064088] Status 5 Not Found 2025-03-31T15:54:28.305457Z node 1 :GRPC_CLIENT DEBUG: [51700004fb08] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-03-31T15:54:28.307497Z node 1 :GRPC_CLIENT DEBUG: [51700004fb08] Status 5 Not Found |66.4%| [TA] {RESULT} $(B)/ydb/services/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} |66.4%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant >> THiveTest::TestRestartTablets [GOOD] >> THiveTest::TestServerlessComputeResourcesMode >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression >> DstCreator::NonExistentSrc [GOOD] >> DstCreator::KeyColumnsSizeMismatch |66.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> DstCreator::WithIntermediateDir >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly [GOOD] >> PersQueueSdkReadSessionTest::StopResumeReadingData >> DstCreator::GlobalConsistency [GOOD] >> DstCreator::KeyColumnNameMismatch >> THiveTest::TestHiveBalancerDifferentResources [GOOD] >> THiveTest::TestFollowersCrossDC_Easy >> DstCreator::WithSyncIndexAndIntermediateDir [GOOD] >> DstCreator::ReplicationModeMismatch >> THiveTest::TestHiveBalancerWithSystemTablets [GOOD] >> THiveTest::TestHiveBalancerWithFollowers >> IcbAsActorTests::TestHttpGetResponse >> IcbAsActorTests::TestHttpGetResponse [GOOD] >> GenericFederatedQuery::ClickHouseManagedSelectAll >> TServiceAccountServiceTest::IssueToken >> TServiceAccountServiceTest::IssueToken [GOOD] |66.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::WithSyncIndexAndIntermediateDir [GOOD] Test command err: 2025-03-31T15:54:29.763809Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002618353114904:2183];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:29.764189Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00136d/r3tmp/tmpW0txKj/pdisk_1.dat 2025-03-31T15:54:34.846645Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002618353114904:2183];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:34.846684Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:54:36.071337Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:36.071594Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:37.347220Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:37.347247Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:37.352714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:37.352830Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:37.354204Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:37.446890Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:18161 TServer::EnableGrpc on GrpcPort 18103, node 1 2025-03-31T15:54:42.313400Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:54:42.313417Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:54:42.313435Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:54:42.313557Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18161 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:54:44.986182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:54:45.004396Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:54:45.008042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743436485637 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyCo... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743436485042 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743436485637 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: ".sys" PathId: 18446... (TRUNCATED) 2025-03-31T15:54:46.079530Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T15:54:46.079669Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T15:54:46.079682Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-03-31T15:54:46.087220Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-03-31T15:54:50.213899Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743436485637, tx_id: 281474976710658 } } } 2025-03-31T15:54:50.215456Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-03-31T15:54:50.293001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480 2025-03-31T15:54:50.310101Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-03-31T15:54:50.310332Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-03-31T15:54:50.721198Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-03-31T15:54:50.731252Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dir/Replicated" PathDescription { Self { Name: "Replicated" PathId: 6 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743436490712 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase ... 943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 7 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 8 PathOwnerId: 72057594046644480 } 2025-03-31T15:54:50.811632Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 2] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 8] TClient::Ls request: /Root/Dir/Replicated/index_by_value TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "index_by_value" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743436490712 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 8 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743436490712 ParentPathId: 7 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { ... (TRUNCATED) TClient::Ls request: /Root/Dir/Replicated/index_by_value/indexImplTable TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 8 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743436490712 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } ... (TRUNCATED) Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 8 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743436490712 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 7 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } Path: "/Root/Dir/Replicated/index_by_value/indexImplTable" |66.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> IcbAsActorTests::TestHttpGetResponse [GOOD] >> DstCreator::SameOwner [GOOD] >> DstCreator::SamePartitionCount >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] >> DstCreator::ColumnsSizeMismatch [GOOD] >> DstCreator::ColumnTypeMismatch >> THiveTest::TestServerlessComputeResourcesMode [GOOD] >> THiveTest::TestResetServerlessComputeResourcesMode >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite-withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1+withSink ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TServiceAccountServiceTest::Get [GOOD] Test command err: 2025-03-31T15:54:25.588660Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002602808815106:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:25.589079Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001b2b/r3tmp/tmpJgkCjl/pdisk_1.dat 2025-03-31T15:54:26.270777Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:26.270863Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:26.276147Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:26.295228Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:8863 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:54:27.414666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:54:41.641299Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488002672660627144:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:41.700703Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001b2b/r3tmp/tmpx7bfPB/pdisk_1.dat 2025-03-31T15:54:44.403249Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:44.430366Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:44.501021Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:44.501279Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:44.514853Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21126 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:54:46.306575Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:54:46.647066Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488002672660627144:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:46.647218Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> GenericFederatedQuery::YdbManagedSelectConstant >> FolderServiceTest::TFolderServiceTransitional [GOOD] >> DstCreator::Basic [GOOD] >> DstCreator::CannotFindColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] Test command err: 2025-03-31T15:53:52.340296Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.340326Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.340352Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:53:52.340724Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:53:52.349792Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:53:52.349941Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.350250Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-31T15:53:52.350799Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.350937Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-31T15:53:52.351029Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T15:53:52.351077Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-03-31T15:53:52.351866Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.351892Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.351930Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:53:52.352241Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:53:52.352759Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:53:52.352853Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.353051Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-31T15:53:52.353518Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.353627Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-31T15:53:52.353726Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T15:53:52.353783Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-03-31T15:53:52.354755Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.354802Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.354831Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:53:52.355127Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:53:52.355590Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:53:52.355685Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.355887Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-31T15:53:52.356676Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.356921Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-31T15:53:52.357037Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T15:53:52.357079Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-03-31T15:53:52.358119Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.358164Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.358196Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:53:52.358491Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:53:52.358977Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:53:52.359066Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.359263Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-31T15:53:52.360808Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.361236Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-31T15:53:52.361349Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T15:53:52.361399Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-03-31T15:53:52.362478Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.362525Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.362558Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:53:52.362834Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:53:52.363362Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:53:52.363459Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.363654Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-31T15:53:52.364101Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.364201Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-31T15:53:52.364299Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T15:53:52.364340Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-03-31T15:53:52.365062Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.365092Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.365125Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:53:52.365360Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:53:52.365961Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:53:52.366115Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.366311Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-31T15:53:52.366716Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.366811Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-31T15:53:52.366907Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T15:53:52.366959Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-03-31T15:53:52.367925Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.367951Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.367972Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:53:52.368266Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:53:52.368762Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:53:52.368862Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.369051Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-31T15:53:52.369846Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.370011Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-31T15:53:52.370118Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T15:53:52.370164Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-03-31T15:53:52.371146Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.371177Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.371214Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:53:52.371581Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:53:52.372493Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:53:52.372582Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.372784Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-31T15:53:52.374419Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.375030Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-31T15:53:52.375154Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T15:53:52.375197Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-03-31T15:53:52.381961Z :ReadSession INFO: Random seed for debugging is 1743436432381925 2025-03-31T15:53:52.765422Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002459193221557:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:53:52.765489Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:53:52.991363Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488002460905776501:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:53:52.991446Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error ... [/Root] [c6730028-d36ce195-81f70687-61a83336] [dc1] Commit offsets [2, 3). Partition stream id: 1 2025-03-31T15:54:43.632764Z :DEBUG: [/Root] [/Root] [c6730028-d36ce195-81f70687-61a83336] [dc1] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-03-31T15:54:43.636315Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_6358713241488977232_v1 grpc read done: success# 1, data# { commit { offset_ranges { assign_id: 1 start_offset: 2 end_offset: 3 } } } 2025-03-31T15:54:43.636498Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_6358713241488977232_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) committing to position 3 prev 2 end 3 by cookie 4 2025-03-31T15:54:43.639530Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-03-31T15:54:43.639565Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-03-31T15:54:43.639658Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user offset is set to 3 (startOffset 0) session shared/user_1_1_6358713241488977232_v1 2025-03-31T15:54:43.639735Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T15:54:43.639746Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T15:54:43.639757Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T15:54:43.639767Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] i0000000000 2025-03-31T15:54:43.639776Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000cuser 2025-03-31T15:54:43.639783Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000uuser 2025-03-31T15:54:43.639793Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T15:54:43.639804Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] =========================== 2025-03-31T15:54:43.639822Z node 2 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-31T15:54:43.648438Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 3 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-31T15:54:43.648507Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T15:54:43.648551Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 4 2025-03-31T15:54:43.654562Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_6358713241488977232_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 4 } 2025-03-31T15:54:43.654614Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_6358713241488977232_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) commit done to position 3 endOffset 3 with cookie 4 2025-03-31T15:54:43.654851Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_6358713241488977232_v1 replying for commits: assignId# 1, from# 4, to# 4, offset# 3 2025-03-31T15:54:43.685683Z :DEBUG: [/Root] [/Root] [c6730028-d36ce195-81f70687-61a83336] [dc1] Committed response: { offset_ranges { assign_id: 1 start_offset: 2 end_offset: 3 } } 2025-03-31T15:54:43.708606Z :INFO: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|b9323b40-3fa156a7-8941f684-18eabf55_0] Write session will now close 2025-03-31T15:54:43.708678Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|b9323b40-3fa156a7-8941f684-18eabf55_0] Write session: aborting 2025-03-31T15:54:43.709064Z :INFO: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|b9323b40-3fa156a7-8941f684-18eabf55_0] Write session: gracefully shut down, all writes complete 2025-03-31T15:54:43.709100Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|b9323b40-3fa156a7-8941f684-18eabf55_0] Write session: destroy 2025-03-31T15:54:43.736176Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7488002678236556878:2665] destroyed 2025-03-31T15:54:43.736231Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-03-31T15:54:43.734853Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: test-message-group-id|b9323b40-3fa156a7-8941f684-18eabf55_0 grpc read done: success: 0 data: 2025-03-31T15:54:43.734874Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|b9323b40-3fa156a7-8941f684-18eabf55_0 grpc read failed 2025-03-31T15:54:43.734891Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|b9323b40-3fa156a7-8941f684-18eabf55_0 grpc closed 2025-03-31T15:54:43.734901Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|b9323b40-3fa156a7-8941f684-18eabf55_0 is DEAD 2025-03-31T15:54:43.735314Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-31T15:54:45.110883Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_6358713241488977232_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 5 from offset3 2025-03-31T15:54:53.628425Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_6358713241488977232_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 6 from offset3 2025-03-31T15:54:53.723429Z :INFO: [/Root] [/Root] [c6730028-d36ce195-81f70687-61a83336] Closing read session. Close timeout: 0.000000s 2025-03-31T15:54:53.723498Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:3 2025-03-31T15:54:53.723772Z :INFO: [/Root] [/Root] [c6730028-d36ce195-81f70687-61a83336] Counters: { Errors: 0 CurrentSessionLifetimeMs: 18191 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T15:54:53.724579Z :NOTICE: [/Root] [/Root] [c6730028-d36ce195-81f70687-61a83336] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-03-31T15:54:53.725284Z :DEBUG: [/Root] [/Root] [c6730028-d36ce195-81f70687-61a83336] [dc1] Abort session to cluster 2025-03-31T15:54:53.738305Z :NOTICE: [/Root] [/Root] [c6730028-d36ce195-81f70687-61a83336] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-31T15:54:53.739320Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_6358713241488977232_v1 grpc read done: success# 0, data# { } 2025-03-31T15:54:53.743664Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_6358713241488977232_v1 grpc read failed 2025-03-31T15:54:53.743749Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_6358713241488977232_v1 closed 2025-03-31T15:54:53.743821Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_6358713241488977232_v1 is DEAD 2025-03-31T15:54:53.796111Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_1_1_6358713241488977232_v1 2025-03-31T15:54:53.796167Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7488002648171785363:2585] destroyed 2025-03-31T15:54:53.783382Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [1:7488002648171785360:2582] disconnected; active server actors: 1 2025-03-31T15:54:53.800022Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_1_1_6358713241488977232_v1 2025-03-31T15:54:53.783417Z node 1 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [1:7488002648171785360:2582] client user disconnected session shared/user_1_1_6358713241488977232_v1 2025-03-31T15:54:54.679394Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710721, task: 1, CA Id [1:7488002725481197629:2753]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2025-03-31T15:54:54.716013Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710721, task: 1, CA Id [1:7488002725481197629:2753]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-31T15:54:54.771831Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710721, task: 1, CA Id [1:7488002725481197629:2753]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-31T15:54:54.859287Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710721, task: 1, CA Id [1:7488002725481197629:2753]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-31T15:54:54.995772Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710721, task: 1, CA Id [1:7488002725481197629:2753]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-31T15:54:55.135427Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710721, task: 1, CA Id [1:7488002725481197629:2753]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-31T15:54:55.459205Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710721, task: 1, CA Id [1:7488002725481197629:2753]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-31T15:54:55.820404Z node 1 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976710722. Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-31T15:54:55.820730Z node 1 :KQP_EXECUTER WARN: ActorId: [1:7488002729776164992:2754] TxId: 281474976710722. Ctx: { TraceId: 01jqpeke0118a6t6d3ejr18k9p, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjA0YTQyMTUtZDg0YWViMDYtNmIwYzJkODgtNjExN2EzMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-31T15:54:55.821170Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjA0YTQyMTUtZDg0YWViMDYtNmIwYzJkODgtNjExN2EzMGY=, ActorId: [1:7488002725481197652:2754], ActorState: ExecuteState, TraceId: 01jqpeke0118a6t6d3ejr18k9p, Create QueryResponse for error on request, msg: 2025-03-31T15:54:55.825413Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jqpekepzd1cgpfrb4fgkw4e2" } } YdbStatus: UNAVAILABLE ConsumedRu: 474 } 2025-03-31T15:54:55.912747Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710721, task: 1, CA Id [1:7488002725481197629:2753]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderServiceTransitional [GOOD] Test command err: 2025-03-31T15:54:32.331631Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002631479870474:2179];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:32.343789Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001b0e/r3tmp/tmpJuL6fj/pdisk_1.dat 2025-03-31T15:54:38.303896Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002631479870474:2179];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:38.303942Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:54:40.721135Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:40.721163Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:41.740273Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:41.740392Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:41.741162Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:41.741181Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:41.748644Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:41.761721Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:28862 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:54:45.310428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:54:45.479730Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Connect to grpc://localhost:10302 2025-03-31T15:54:45.484425Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-03-31T15:54:45.679898Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:10302: Failed to connect to remote host: Connection refused 2025-03-31T15:54:45.719609Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-03-31T15:54:45.723489Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:10302: Failed to connect to remote host: Connection refused 2025-03-31T15:54:46.731532Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-03-31T15:54:46.763122Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Status 5 Not Found 2025-03-31T15:54:46.825379Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Request ListFoldersRequest { id: "i_am_exists" } 2025-03-31T15:54:46.890189Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Response ListFoldersResponse { result { cloud_id: "response_cloud_id" } } |66.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest |66.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest |66.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_transfer_writer/core-tx-replication-service-ut_transfer_writer |66.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_transfer_writer/core-tx-replication-service-ut_transfer_writer |66.4%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_transfer_writer/core-tx-replication-service-ut_transfer_writer |66.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> KqpLocks::DifferentKeyUpdate [GOOD] >> KqpLocks::EmptyRange |66.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/fqrun/fqrun |66.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/fqrun/fqrun |66.4%| [LD] {RESULT} $(B)/ydb/tests/tools/fqrun/fqrun |66.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |66.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |66.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build >> THiveTest::TestResetServerlessComputeResourcesMode [GOOD] >> THiveTest::TestSkipBadNode |66.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> THiveTest::TestFollowersCrossDC_Easy [GOOD] >> THiveTest::TestFollowers_LocalNodeOnly >> DstCreator::KeyColumnsSizeMismatch [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::KeyColumnsSizeMismatch [GOOD] Test command err: 2025-03-31T15:54:34.616276Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002642037687877:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:34.626320Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00136e/r3tmp/tmpbs1j2w/pdisk_1.dat 2025-03-31T15:54:41.387502Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:41.388051Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:41.459239Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:41.790721Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002642037687877:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:41.793720Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:54:42.242803Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:42.274908Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:26522 TServer::EnableGrpc on GrpcPort 16370, node 1 2025-03-31T15:54:43.755730Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:54:43.755750Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:54:43.755761Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:54:43.755885Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26522 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:54:45.419337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743436485497 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743436485497 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version... (TRUNCATED) 2025-03-31T15:54:45.467913Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T15:54:45.468014Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T15:54:45.468025Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-03-31T15:54:45.468554Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-03-31T15:54:48.204819Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { status: SCHEME_ERROR, issues: } } 2025-03-31T15:54:48.204864Z node 1 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Cannot describe table: status: SCHEME_ERROR, issue: 2025-03-31T15:54:50.260672Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488002710303739102:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:50.260713Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00136e/r3tmp/tmpa09GNr/pdisk_1.dat 2025-03-31T15:54:51.851047Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:52.051502Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:52.378855Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:52.379289Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:52.389711Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30028 TServer::EnableGrpc on GrpcPort 18983, node 2 2025-03-31T15:54:55.317479Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488002710303739102:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:55.356832Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:54:56.243526Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:54:56.243733Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:54:56.243741Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:54:56.243828Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30028 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:54:57.065508Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:54:57.095283Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:54:57.397007Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743436497138 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743436497481 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743436497138 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743436497481 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-03-31T15:54:57.460091Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T15:54:57.460192Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T15:54:57.460203Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-03-31T15:54:57.460695Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-03-31T15:54:59.810997Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743436497292, tx_id: 281474976710658 } } } 2025-03-31T15:54:59.811644Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-03-31T15:54:59.822054Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-03-31T15:54:59.827033Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743436497481 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnNames: "value" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 3 PathOwnerId: 72057594046644480 2025-03-31T15:54:59.829616Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Key columns size mismatch: expected: 1, got: 2 >> DstCreator::ExistingDst [GOOD] >> DstCreator::EmptyReplicationConfig ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TServiceAccountServiceTest::IssueToken [GOOD] Test command err: 2025-03-31T15:54:30.644620Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002624122514882:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:30.683192Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001b85/r3tmp/tmpaJqeN5/pdisk_1.dat 2025-03-31T15:54:35.631429Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002624122514882:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:35.631494Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:54:37.447510Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:37.447629Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:39.194197Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:39.204095Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:39.204355Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:39.317489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:39.317994Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:39.465854Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7115 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:54:42.777290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:54:43.351888Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:54:54.508223Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488002728546607262:2289];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001b85/r3tmp/tmp4CEoK8/pdisk_1.dat 2025-03-31T15:54:54.990165Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:54:56.704986Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:56.726366Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:56.734469Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:56.734546Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:56.737591Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1167 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:54:56.924417Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... >> DstCreator::WithSyncIndex [GOOD] >> AnalyzeColumnshard::AnalyzeTable [GOOD] >> DstCreator::ColumnTypeMismatch [GOOD] >> TSubDomainTest::StartAndStopTenanNode >> DstCreator::KeyColumnNameMismatch [GOOD] >> TSubDomainTest::DeleteTableAndThenForceDeleteSubDomain >> DstCreator::CannotFindColumn [GOOD] >> DstCreator::WithIntermediateDir [GOOD] >> DstCreator::WithAsyncIndex |66.5%| [TA] $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeTable [GOOD] Test command err: 2025-03-31T15:54:29.104416Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:54:29.104812Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:29.104986Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000e84/r3tmp/tmp2gESnf/pdisk_1.dat 2025-03-31T15:54:29.585384Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30240, node 1 2025-03-31T15:54:31.700241Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:54:31.700310Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:54:31.700346Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:54:31.700890Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:54:31.734632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:54:32.010401Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:32.011246Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:32.037139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13137 2025-03-31T15:54:35.108415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:54:47.067391Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T15:54:47.206356Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:47.206482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:47.269561Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T15:54:47.288155Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:48.741308Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:48.742749Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:48.753789Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:48.753955Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:48.754154Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:48.754816Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:48.754953Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:48.755293Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:48.755379Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:49.091091Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:49.091433Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:49.107500Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:49.652157Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:49.900913Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T15:54:49.901009Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T15:54:49.929778Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T15:54:49.930737Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T15:54:49.930924Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T15:54:49.930971Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T15:54:49.931011Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T15:54:49.931057Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T15:54:49.931125Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T15:54:49.931177Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T15:54:49.931517Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T15:54:50.019937Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T15:54:50.020047Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1872:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T15:54:50.030100Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1883:2604] 2025-03-31T15:54:50.047513Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1915:2621] 2025-03-31T15:54:50.047601Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1915:2621], schemeshard id = 72075186224037897 2025-03-31T15:54:50.060370Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T15:54:50.408486Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T15:54:50.408547Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T15:54:50.408618Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T15:54:50.505039Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T15:54:50.565419Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T15:54:50.566182Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T15:54:51.297167Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T15:54:52.044915Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T15:54:52.160436Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T15:54:55.478081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2236:3073], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:55.478196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:55.517907Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-31T15:54:56.304701Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T15:54:56.304908Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T15:54:56.305164Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T15:54:56.305281Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T15:54:56.305401Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T15:54:56.305559Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T15:54:56.305670Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T15:54:56.305765Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T15:54:56.305891Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T15:54:56.305994Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T15:54:56.306088Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T15:54:56.306227Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T15:54:56.485953Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T15:54:56.486810Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T15:54:56.488003Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T15:54:56.488061Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T15:54:56.492067Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T15:54:56.492130Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T15:54:56.493189Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T15:54:56.493462Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T15:54:56.494536Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T15:54:56.494578Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T15:54:56.494625Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T15:54:56.494664Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T15:54:56.506711Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T15:54:56.507046Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T15:54:56.509176Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T15:54:56.509507Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T15:54:56.512836Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T15:54:56.512906Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T15:54:56.514715Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T15:54:56.515013Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T15:54:56.515790Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T15:54:56.516065Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T15:54:56.788867Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-03-31T15:54:58.283569Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2586:3126], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:58.283886Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:58.290215Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72075186224037897 2025-03-31T15:54:58.432438Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-03-31T15:54:59.512797Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2684:3171], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:59.513183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:59.521947Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037897 2025-03-31T15:54:59.589311Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; waiting actualization: 0/0.000016s FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> DstCreator::SamePartitionCount [GOOD] >> TSubDomainTest::LsLs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::ColumnTypeMismatch [GOOD] Test command err: 2025-03-31T15:54:28.264783Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002614342781343:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:28.264811Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:54:29.324872Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002618637748842:2292];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001380/r3tmp/tmppXuO89/pdisk_1.dat 2025-03-31T15:54:29.458792Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:54:31.076739Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:31.077761Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:31.106386Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:31.152889Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:32.282495Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:32.311694Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:32.726194Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.173897s 2025-03-31T15:54:32.737921Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.186100s 2025-03-31T15:54:36.056702Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002614342781343:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:36.056766Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:54:36.056999Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002618637748842:2292];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:36.057045Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:9244 TServer::EnableGrpc on GrpcPort 64352, node 1 2025-03-31T15:54:39.658191Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:54:39.658412Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:54:39.658417Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:54:39.671781Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9244 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:54:45.270042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:54:45.318552Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:54:45.321796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:54:46.080030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743436485343 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743436486274 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743436485343 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743436486274 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-03-31T15:54:46.257861Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T15:54:46.257954Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T15:54:46.257963Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-03-31T15:54:46.259500Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-03-31T15:54:47.246877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:54:47.247163Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:55.864453Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743436485742, tx_id: 281474976710658 } } } 2025-03-31T15:54:55.865905Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-03-31T15:54:55.888829Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-03-31T15:54:55.913559Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743436486274 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "extra" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompact ... le: (empty maybe) 2025-03-31T15:54:58.407720Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:54:58.407826Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6167 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:54:58.748501Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:54:58.755698Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:54:58.759567Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:54:58.845735Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743436498797 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743436498930 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743436498797 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743436498930 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-03-31T15:54:58.903241Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T15:54:58.903512Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T15:54:58.903524Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-03-31T15:54:58.907660Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-03-31T15:55:02.823179Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488002741356737759:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:02.823254Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:55:09.366912Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743436498867, tx_id: 281474976710658 } } } 2025-03-31T15:55:09.367332Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-03-31T15:55:09.369658Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-03-31T15:55:09.373896Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743436498930 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 82488 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 3 PathOwnerId: 72057594046644480 2025-03-31T15:55:09.374048Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Column type mismatch: name: value, expected: Utf8, got: Uint32 >> StoragePool::TestDistributionRandomProbability [GOOD] >> StoragePool::TestDistributionRandomProbabilityWithOverflow >> THiveTest::TestSkipBadNode [GOOD] >> THiveTest::TestStopTenant >> StoragePool::TestDistributionRandomProbabilityWithOverflow [GOOD] >> StoragePool::TestDistributionExactMin >> TraverseDatashard::TraverseTwoTables [GOOD] |66.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |66.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |66.5%| [TA] {RESULT} $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |66.5%| [LD] {RESULT} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::WithSyncIndex [GOOD] Test command err: 2025-03-31T15:54:47.283749Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002697504778580:2081];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:47.284514Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001337/r3tmp/tmpomEZhY/pdisk_1.dat 2025-03-31T15:54:51.359254Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:53.480674Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:53.481025Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:53.565869Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:54.866474Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002697504778580:2081];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:54.888749Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:54:55.688134Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:22462 TServer::EnableGrpc on GrpcPort 2410, node 1 2025-03-31T15:54:56.992478Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:54:56.992514Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:54:56.992527Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:54:56.992686Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22462 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:54:57.980581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:54:58.028761Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:54:58.045366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743436498706 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyCo... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743436498062 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743436498706 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: ".sys" PathId: 18446... (TRUNCATED) 2025-03-31T15:54:58.797894Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T15:54:58.798138Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T15:54:58.798158Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-03-31T15:54:58.800996Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-03-31T15:55:04.795478Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743436498706, tx_id: 281474976710658 } } } 2025-03-31T15:55:04.797625Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-03-31T15:55:04.819039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T15:55:04.849265Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-03-31T15:55:04.849289Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-03-31T15:55:05.559652Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-03-31T15:55:05.590158Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Replicated" PathDescription { Self { Name: "Replicated" PathId: 5 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743436505342 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBT ... oCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 7 PathOwnerId: 72057594046644480 } 2025-03-31T15:55:05.916262Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 2] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 7] TClient::Ls request: /Root/Replicated/index_by_value TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "index_by_value" PathId: 6 SchemeshardId: 72057594046644480 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743436505342 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743436505342 ParentPathId: 6 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { ... (TRUNCATED) TClient::Ls request: /Root/Replicated/index_by_value/indexImplTable TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743436505342 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } ... (TRUNCATED) Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743436505342 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } Path: "/Root/Replicated/index_by_value/indexImplTable" >> TSubDomainTest::CreateDummyTabletsInDifferentDomains |66.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |66.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |66.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::KeyColumnNameMismatch [GOOD] Test command err: 2025-03-31T15:54:29.699920Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002618511080824:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:29.699995Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00136a/r3tmp/tmpPNC0zd/pdisk_1.dat 2025-03-31T15:54:32.456196Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:34.263950Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:34.737227Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002618511080824:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:34.744333Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:54:35.495280Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:35.711380Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:35.740825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:35.741058Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:35.787363Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:35.797982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9707 TServer::EnableGrpc on GrpcPort 25268, node 1 2025-03-31T15:54:42.253239Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:54:42.253258Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:54:42.253265Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:54:42.253380Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9707 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:54:43.953326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:54:44.016181Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:54:44.036482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743436484384 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743436484048 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743436484384 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" PathId: 1844... (TRUNCATED) 2025-03-31T15:54:44.634537Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T15:54:44.634635Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T15:54:44.634853Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-03-31T15:54:44.639120Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-03-31T15:54:49.936000Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743436484384, tx_id: 281474976710658 } } } 2025-03-31T15:54:49.937355Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-03-31T15:54:49.981338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T15:54:49.986385Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-03-31T15:54:49.986608Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 TClient::Ls request: /Root/Replicated 2025-03-31T15:54:50.341780Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-03-31T15:54:50.341808Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743436490313 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "ke... (TRUNCATED) 2025-03-31T15:54:50.487712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:54:50.487741Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:56.493655Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488002734284961902:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:56.493714Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00136a/r3tmp/tmpY3a4oA/pdisk_1.dat 2025-03-31T15:54:57.585565Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:57.606677Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:57.656009Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:57.656091Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: D ... : (empty maybe) 2025-03-31T15:54:58.179241Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:54:58.179367Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18577 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:54:58.625608Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:54:58.632843Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:54:58.635562Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:54:58.686676Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743436498671 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743436498797 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743436498671 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743436498797 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-03-31T15:54:58.764125Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T15:54:58.764311Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T15:54:58.764326Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-03-31T15:54:58.765211Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-03-31T15:55:01.491694Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488002734284961902:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:01.491761Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:55:06.972746Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743436498720, tx_id: 281474976710658 } } } 2025-03-31T15:55:06.973209Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-03-31T15:55:06.994194Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-03-31T15:55:07.034324Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743436498797 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 3 PathOwnerId: 72057594046644480 2025-03-31T15:55:07.035827Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Key column name mismatch: position: 0, expected: key, got: value ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::CannotFindColumn [GOOD] Test command err: 2025-03-31T15:54:45.769763Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002689124921115:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:45.770310Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001365/r3tmp/tmpzW5VS8/pdisk_1.dat 2025-03-31T15:54:46.415432Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:46.420699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:46.420837Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:46.426198Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1816 TServer::EnableGrpc on GrpcPort 13940, node 1 2025-03-31T15:54:46.687786Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:54:46.687811Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:54:46.687823Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:54:46.687953Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1816 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:54:47.382572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:54:47.529652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743436488416 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743436487541 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743436488416 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" PathId: 1844... (TRUNCATED) 2025-03-31T15:54:49.267052Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T15:54:49.267367Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T15:54:49.267377Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-03-31T15:54:49.272693Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-03-31T15:54:51.045471Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002689124921115:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:51.045517Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:54:57.425365Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743436488416, tx_id: 281474976710658 } } } 2025-03-31T15:54:57.425662Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-03-31T15:54:57.427030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T15:54:57.428125Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-03-31T15:54:57.428151Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-03-31T15:54:57.464827Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-03-31T15:54:57.464872Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] TClient::Ls request: /Root/Replicated TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743436497502 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "ke... (TRUNCATED) test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001365/r3tmp/tmpSFevoX/pdisk_1.dat 2025-03-31T15:54:58.709132Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:58.712659Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:58.739592Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:58.739699Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:58.741107Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4148 TServer::EnableGrpc on GrpcPort 18326, node 2 2025-03-31T15:54:59.291150Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:54:59.291183Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:54:59.291196Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:54:59.291309Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4148 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:55:00.155439Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:55:00.180856Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T15:55:00.193064Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:55:00.316243Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743436500211 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743436500442 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743436500211 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743436500442 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-03-31T15:55:00.433862Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T15:55:00.434220Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T15:55:00.434230Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-03-31T15:55:00.434794Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-03-31T15:55:09.491929Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743436500337, tx_id: 281474976715658 } } } 2025-03-31T15:55:09.492175Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-03-31T15:55:09.513021Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-03-31T15:55:09.531396Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743436500442 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 3 PathOwnerId: 72057594046644480 2025-03-31T15:55:09.532545Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Cannot find column: name: value >> DstCreator::ReplicationModeMismatch [GOOD] >> DstCreator::ReplicationConsistencyLevelMismatch >> TSubDomainTest::CreateTableInsideAndForceDeleteSubDomain >> THiveTest::TestHiveNoBalancingWithLowResourceUsage [GOOD] >> THiveTest::TestLockTabletExecution >> TSubDomainTest::CreateTablet >> TSubDomainTest::UserAttributes >> TSubDomainTest::Boot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::SamePartitionCount [GOOD] Test command err: 2025-03-31T15:54:43.732713Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002679965137995:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:43.732889Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001368/r3tmp/tmpuvhIMg/pdisk_1.dat 2025-03-31T15:54:45.055216Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:45.305753Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:45.314033Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:45.314131Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:45.320831Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2689 TServer::EnableGrpc on GrpcPort 7732, node 1 2025-03-31T15:54:46.023792Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:54:46.023819Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:54:46.023827Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:54:46.023947Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2689 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:54:46.470483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:54:46.492349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T15:54:46.497935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743436486610 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743436486533 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "user@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743436486610 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" PathId: 1844... (TRUNCATED) 2025-03-31T15:54:46.637466Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T15:54:46.637584Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T15:54:46.637600Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-03-31T15:54:46.638017Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-03-31T15:54:48.733783Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002679965137995:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:48.733844Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:54:56.267925Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743436486610, tx_id: 281474976710659 } } } 2025-03-31T15:54:56.269453Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-03-31T15:54:56.309435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T15:54:56.313883Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710660} 2025-03-31T15:54:56.313897Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710660 2025-03-31T15:54:56.514403Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710660 TClient::Ls request: /Root/Replicated 2025-03-31T15:54:56.527199Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1743436496508 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "user@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "ke... (TRUNCATED) 2025-03-31T15:54:58.177333Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488002744698301754:2206];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:58.434520Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001368/r3tmp/tmpXXeYRu/pdisk_1.dat 2025-03-31T15:54:58.575181Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:58.585690Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:58.585779Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:58.588095Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4120 TServer::EnableGrpc on GrpcPort 7662, node 2 2025-03-31T15:54:58.867895Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:54:58.867918Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:54:58.867925Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:54:58.868036Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4120 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:54:59.665702Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:54:59.707807Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1743436499910 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743436499728 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1743436499910 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" PathId: 1844... (TRUNCATED) 2025-03-31T15:54:59.896984Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T15:54:59.897138Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T15:54:59.897150Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-03-31T15:54:59.897879Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-03-31T15:55:03.135350Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488002744698301754:2206];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:03.135400Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:55:10.027328Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743436499910, tx_id: 281474976715658 } } } 2025-03-31T15:55:10.027975Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-03-31T15:55:10.031006Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T15:55:10.042651Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715659} 2025-03-31T15:55:10.042675Z node 2 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976715659 TClient::Ls request: /Root/Table 2025-03-31T15:55:10.394506Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715659 2025-03-31T15:55:10.394533Z node 2 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1743436499910 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root/Replicated TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743436510396 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "ke... (TRUNCATED) >> THiveTest::TestFollowers_LocalNodeOnly [GOOD] >> THiveTest::TestFollowersCrossDC_Tight >> TSubDomainTest::FailIfAffectedSetNotInterior ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTables [GOOD] Test command err: 2025-03-31T15:54:34.053089Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:54:34.053373Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T15:54:34.053445Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f72/r3tmp/tmpqjtnVK/pdisk_1.dat 2025-03-31T15:54:37.792851Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18894, node 1 2025-03-31T15:54:41.545316Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:54:41.545548Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:54:41.545769Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:54:41.604366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:54:41.616864Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:54:41.807170Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:41.808176Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:41.844038Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9419 2025-03-31T15:54:45.325425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:54:58.831199Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T15:54:58.892906Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:58.895992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:58.926459Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T15:54:58.928438Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:59.212939Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:59.213553Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:59.214027Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:59.214149Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:59.214392Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:59.214476Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:59.214555Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:59.214623Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:59.214707Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:59.465617Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:59.465937Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:59.489171Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:59.846491Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:59.967864Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T15:54:59.968328Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T15:55:00.044228Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T15:55:00.045740Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T15:55:00.046328Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T15:55:00.046850Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T15:55:00.046993Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T15:55:00.047205Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T15:55:00.047400Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T15:55:00.047530Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T15:55:00.048768Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T15:55:00.094338Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T15:55:00.094460Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1872:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T15:55:00.101089Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-03-31T15:55:00.109407Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1908:2617] 2025-03-31T15:55:00.110139Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1908:2617], schemeshard id = 72075186224037897 2025-03-31T15:55:00.112892Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T15:55:00.137287Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T15:55:00.137351Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T15:55:00.137437Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T15:55:00.154084Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T15:55:00.165483Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T15:55:00.165646Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T15:55:00.467494Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T15:55:01.078599Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T15:55:01.228584Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T15:55:04.793213Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2241:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:04.793319Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:04.906321Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-31T15:55:06.725312Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2546:3122], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:06.725453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:06.726543Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2551:3126]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T15:55:06.726676Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T15:55:06.726739Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2553:3128] 2025-03-31T15:55:06.726795Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2553:3128] 2025-03-31T15:55:06.728758Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2554:2996] 2025-03-31T15:55:06.729919Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2553:3128], server id = [2:2554:2996], tablet id = 72075186224037894, status = OK 2025-03-31T15:55:06.730978Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2554:2996], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-03-31T15:55:06.731458Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-03-31T15:55:06.732026Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T15:55:06.733060Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2551:3126], StatRequests.size() = 1 2025-03-31T15:55:06.770414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2558:3132], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:06.770493Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:06.770836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2563:3137], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:06.776646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-03-31T15:55:06.991807Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-31T15:55:06.991865Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-31T15:55:07.131553Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2553:3128], schemeshard count = 1 2025-03-31T15:55:08.037886Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2565:3139], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-03-31T15:55:08.658692Z node 1 :TX_PROXY ERROR: Actor# [1:2687:3213] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:55:08.713703Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2710:3229]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T15:55:08.713999Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T15:55:08.714029Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2710:3229], StatRequests.size() = 1 2025-03-31T15:55:08.991352Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpeksdmagpjptasbcewa5v7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODE3YTVjNmMtZWMyNWRlNTYtMzY2NzE1ZDktNDUxMzdjZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:55:09.553431Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72075186224037897 2025-03-31T15:55:10.258971Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:3059:3297]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T15:55:10.267586Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-31T15:55:10.267652Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [1:3059:3297], StatRequests.size() = 1 2025-03-31T15:55:10.318293Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [1:3068:3306]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T15:55:10.318436Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2025-03-31T15:55:10.318463Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 4, ReplyToActorId = [1:3068:3306], StatRequests.size() = 1 2025-03-31T15:55:10.374798Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqpekx0haaqvhf4pffk9pdcs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2I4ZGU3ODktNTM0YjhkYjctODRlZWVkZGEtZjFmNjFkNmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:55:10.466845Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:3119:3264]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-31T15:55:10.470211Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T15:55:10.470265Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-31T15:55:10.470685Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T15:55:10.470733Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-03-31T15:55:10.471054Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-31T15:55:10.487238Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-03-31T15:55:10.487516Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 2025-03-31T15:55:10.487805Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:3144:3277]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-31T15:55:10.490758Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T15:55:10.490812Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-31T15:55:10.491180Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T15:55:10.491293Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-03-31T15:55:10.491346Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 5] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-31T15:55:10.493696Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2025-03-31T15:55:10.493967Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 >> TModifyUserTest::ModifyUser |66.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |66.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |66.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |66.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut >> THiveTest::TestLockTabletExecutionRebootTimeout [GOOD] >> THiveTest::TestLockTabletExecutionReconnect >> ColumnBuildTest::CancelBuild >> THiveTest::TestStopTenant [GOOD] >> TScaleRecommenderTest::BasicTest |66.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |66.6%| [LD] {RESULT} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut >> THiveTest::TestLockTabletExecution [GOOD] >> THiveTest::TestLockTabletExecutionBadOwner >> PersQueueSdkReadSessionTest::ReadSessionWithAbort [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithClose >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1+withSink [GOOD] >> DstCreator::EmptyReplicationConfig [GOOD] |66.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest |66.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> THiveTest::TestDrainWithMaxTabletsScheduled [GOOD] >> THiveTest::TestDownAfterDrain >> TSubDomainTest::LsLs [GOOD] >> TSubDomainTest::LsAltered >> ColumnBuildTest::BuildColumnDoesnotRestoreDeletedRows >> THiveTest::TestLockTabletExecutionBadOwner [GOOD] |66.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_transfer_writer/unittest >> THiveTest::TestLockTabletExecutionDelete >> KqpLocks::EmptyRange [GOOD] >> KqpLocks::EmptyRangeAlreadyBroken >> TraverseDatashard::TraverseTwoTablesTwoServerlessDbs [GOOD] >> TScaleRecommenderTest::BasicTest [GOOD] >> TStorageBalanceTest::TestScenario1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 24783, MsgBus: 22446 2025-03-31T15:54:27.518203Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002611956056729:2136];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:27.520069Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001298/r3tmp/tmpZusic5/pdisk_1.dat 2025-03-31T15:54:29.128608Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:29.187262Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:29.190669Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:29.190728Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:29.197328Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24783, node 1 2025-03-31T15:54:29.288752Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:54:29.289500Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:54:29.289517Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:54:29.289649Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22446 TClient is connected to server localhost:22446 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:54:31.115887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:54:31.345857Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:54:31.386837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:54:32.580664Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002611956056729:2136];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:32.580735Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:54:34.668010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:54:37.754834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:54:38.966069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:54:43.572616Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488002680675535218:2417], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:43.573078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:44.036504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:54:44.152099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:54:44.179961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:54:44.179979Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:44.283654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:54:44.684763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:54:44.773617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:54:44.908045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:54:44.987350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488002684970503058:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:44.987437Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:44.987949Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488002684970503063:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:44.991755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:54:45.008786Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-31T15:54:45.009043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488002684970503065:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:54:45.115987Z node 1 :TX_PROXY ERROR: Actor# [1:7488002689265470418:3498] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:54:46.637596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T15:54:46.716818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T15:54:46.758418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 5531, MsgBus: 13383 2025-03-31T15:54:58.387583Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488002743951757140:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:58.387621Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001298/r3tmp/tmpoBLCeo/pdisk_1.dat 2025-03-31T15:54:58.666130Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:58.682190Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:58.682249Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:58.683386Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5531, node 2 2025-03-31T15:54:58.779262Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:54:58.779283Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:54:58.779293Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:54:58.779410Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13383 TClient is connected to server localhost:13383 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:54:59.827870Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:54:59.865432Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:54:59.983231Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:55:00.509116Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:55:00.742729Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:55:03.393880Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488002743951757140:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:03.393956Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:55:10.974929Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488002795491366542:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:10.975026Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:11.049146Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:55:11.138959Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:55:11.220351Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:55:11.277674Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:55:11.396843Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:55:11.505827Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:55:11.606638Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488002799786334360:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:11.606704Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:11.607282Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488002799786334365:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:11.623970Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:55:11.675309Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488002799786334367:2482], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:55:11.763515Z node 2 :TX_PROXY ERROR: Actor# [2:7488002799786334426:3492] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:55:13.633846Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:55:13.633880Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:13.824058Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976710673; 2025-03-31T15:55:13.837751Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488002808376269453:2529], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [2:7488002808376269328:2529]Got LOCKS BROKEN for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[2:7488002808376269453:2529].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-03-31T15:55:13.838420Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488002808376269446:2529], SessionActorId: [2:7488002808376269328:2529], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TwoShard`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[2:7488002808376269328:2529]. isRollback=0 2025-03-31T15:55:13.838777Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTA1NTljNmMtODhlZjY0NGItMWQxOWNjYmYtOThjNmZjMzU=, ActorId: [2:7488002808376269328:2529], ActorState: ExecuteState, TraceId: 01jqpem0eqcncawxsbw4nmfbq3, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7488002808376269447:2529] from: [2:7488002808376269446:2529] 2025-03-31T15:55:13.838878Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7488002808376269447:2529] TxId: 281474976710673. Ctx: { TraceId: 01jqpem0eqcncawxsbw4nmfbq3, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTA1NTljNmMtODhlZjY0NGItMWQxOWNjYmYtOThjNmZjMzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/TwoShard`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-03-31T15:55:13.840221Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTA1NTljNmMtODhlZjY0NGItMWQxOWNjYmYtOThjNmZjMzU=, ActorId: [2:7488002808376269328:2529], ActorState: ExecuteState, TraceId: 01jqpem0eqcncawxsbw4nmfbq3, Create QueryResponse for error on request, msg: >> THiveTest::TestLockTabletExecutionReconnect [GOOD] >> THiveTest::TestLockTabletExecutionRebootReconnect >> DstCreator::WithAsyncIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::EmptyReplicationConfig [GOOD] Test command err: 2025-03-31T15:54:47.902652Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002695826601443:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:48.056546Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001362/r3tmp/tmpYhZmrX/pdisk_1.dat 2025-03-31T15:54:52.402720Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:52.827341Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002695826601443:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:52.827394Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:54:55.555539Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:55.555568Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:56.947211Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:56.947242Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:57.411862Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:57.411949Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:57.424605Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:57.514395Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:7553 TServer::EnableGrpc on GrpcPort 13411, node 1 2025-03-31T15:54:57.943183Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:54:57.943202Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:54:57.943209Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:54:57.943305Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7553 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:54:58.378210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:54:58.394456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T15:54:58.537538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743436498433 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743436498608 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743436498433 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743436498608 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-03-31T15:54:58.576363Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T15:54:58.576474Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T15:54:58.576485Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-03-31T15:54:58.576960Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-03-31T15:55:03.715468Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743436498510, tx_id: 281474976710658 } } } 2025-03-31T15:55:03.716776Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-03-31T15:55:03.726416Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-03-31T15:55:03.743750Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743436498608 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } R ... 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:55:10.391281Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:55:10.391287Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:55:10.391385Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9935 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:55:10.648229Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:55:10.655863Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T15:55:10.745800Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:55:10.821827Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743436510697 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743436510907 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743436510697 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743436510907 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-03-31T15:55:10.899436Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T15:55:10.899603Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T15:55:10.899889Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-03-31T15:55:10.900409Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-03-31T15:55:14.587484Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488002790284735804:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:14.587607Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:55:14.596886Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743436510851, tx_id: 281474976715658 } } } 2025-03-31T15:55:14.597140Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-03-31T15:55:14.598444Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-03-31T15:55:14.599430Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743436510907 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 2025-03-31T15:55:14.599600Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Empty replication config |66.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |66.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |66.6%| [LD] {RESULT} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |66.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk >> StoragePool::TestDistributionExactMin [GOOD] >> StoragePool::TestDistributionExactMinWithOverflow [GOOD] >> StoragePool::TestDistributionRandomMin7p |66.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |66.7%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk >> THiveTest::TestHiveBalancerWithFollowers [GOOD] >> THiveTest::TestHiveBalancerWithLimit >> THiveTest::TestLockTabletExecutionDelete [GOOD] >> THiveTest::TestLockTabletExecutionDeleteReboot >> DstCreator::ReplicationConsistencyLevelMismatch [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::WithAsyncIndex [GOOD] Test command err: 2025-03-31T15:54:53.068599Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002716389192208:2139];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:53.296697Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00134a/r3tmp/tmpY5yx8i/pdisk_1.dat 2025-03-31T15:54:56.766045Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:56.989346Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:56.989479Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:57.010796Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:57.192761Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:26515 TServer::EnableGrpc on GrpcPort 20805, node 1 2025-03-31T15:54:57.963778Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002716389192208:2139];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:57.990622Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:54:58.076596Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:54:58.076615Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:54:58.076622Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:54:58.076795Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26515 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:54:58.848016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:54:58.870702Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:54:58.875698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743436499119 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743436498909 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743436499119 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" PathId: 1844... (TRUNCATED) 2025-03-31T15:54:59.247610Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T15:54:59.247829Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T15:54:59.247842Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-03-31T15:54:59.248610Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-03-31T15:55:10.424090Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743436499119, tx_id: 281474976710658 } } } 2025-03-31T15:55:10.424364Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-03-31T15:55:10.436191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480 2025-03-31T15:55:10.469087Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-03-31T15:55:10.469124Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 TClient::Ls request: /Root/Dir/Replicated 2025-03-31T15:55:10.565505Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-03-31T15:55:10.565528Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 4] TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743436510571 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "ke... (TRUNCATED) 2025-03-31T15:55:11.375221Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488002799497343320:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:11.375262Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00134a/r3tmp/tmpdeIY01/pdisk_1.dat 2025-03-31T15:55:11.580876Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:11.604751Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:55:11.611325Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:55:11.616885Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16017 TServer::EnableGrpc on GrpcPort 23163, node 2 2025-03-31T15:55:12.219181Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:55:12.219219Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:55:12.219229Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:55:12.219405Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16017 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:55:12.655453Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:55:12.662667Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:55:12.666194Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743436513007 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyCo... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743436512706 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743436513007 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: ".sys" PathId: 18446... (TRUNCATED) 2025-03-31T15:55:13.039971Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T15:55:13.040130Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T15:55:13.040139Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-03-31T15:55:13.043565Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-03-31T15:55:15.490295Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743436513007, tx_id: 281474976710658 } } } 2025-03-31T15:55:15.490600Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-03-31T15:55:15.492145Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T15:55:15.493485Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-03-31T15:55:15.493499Z node 2 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 TClient::Ls request: /Root/Replicated 2025-03-31T15:55:15.592817Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-03-31T15:55:15.592850Z node 2 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 5] TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 5 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743436515618 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key... (TRUNCATED) >> TSubDomainTest::Boot [GOOD] >> TSubDomainTest::CheckAccessCopyTable >> SplitterBasic::EqualSplitByMaxBytesLimitPerChunk [GOOD] >> SplitterBasic::EqualSplitByMaxRowsLimitPerChunk [GOOD] >> TModifyUserTest::ModifyUser [GOOD] >> TModifyUserTest::ModifyLdapUser >> ColumnBuildTest::AlreadyExists ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTablesTwoServerlessDbs [GOOD] Test command err: 2025-03-31T15:54:29.557659Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:54:29.558008Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:29.558166Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f63/r3tmp/tmpG31aas/pdisk_1.dat 2025-03-31T15:54:32.602346Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25322, node 1 2025-03-31T15:54:35.350992Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:54:35.351057Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:54:35.351107Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:54:35.362268Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:54:35.440479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:54:35.736919Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:35.737247Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:35.777065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24815 2025-03-31T15:54:39.163528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:54:57.986631Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T15:54:58.061103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:58.061235Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:58.108987Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T15:54:58.116235Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:58.417197Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:58.417683Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:58.418209Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:58.418359Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:58.418570Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:58.418645Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:58.418719Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:58.418792Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:58.418862Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:58.631438Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:58.631551Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:58.651230Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:58.812564Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:58.911928Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T15:54:58.912024Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T15:54:58.969603Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T15:54:58.970919Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T15:54:58.971178Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T15:54:58.971250Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T15:54:58.971305Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T15:54:58.971366Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T15:54:58.971437Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T15:54:58.971496Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T15:54:58.971969Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T15:54:59.023744Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T15:54:59.023888Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1872:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T15:54:59.040584Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1883:2604] 2025-03-31T15:54:59.046041Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1914:2620] 2025-03-31T15:54:59.046941Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1914:2620], schemeshard id = 72075186224037897 2025-03-31T15:54:59.052049Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-03-31T15:54:59.108263Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T15:54:59.108325Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T15:54:59.108398Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-03-31T15:54:59.141801Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T15:54:59.154880Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T15:54:59.161208Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T15:54:59.472549Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T15:54:59.813554Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T15:54:59.906766Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T15:55:01.149379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T15:55:03.315854Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:03.682511Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-03-31T15:55:03.683041Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-03-31T15:55:03.700768Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2590:2942], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-03-31T15:55:03.704674Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2592:2944] 2025-03-31T15:55:03.746046Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2592:2944], schemeshard id = 72075186224037899 2025-03-31T15:55:05.645396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T15:55:07.187798Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:08.060244Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037905 2025-03-31T15:55:08.060771Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037905 2025-03-31T15:55:08.061556Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:3079:3148], at schemeshard: 72075186224037905, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037905 2025-03-31T15:55:08.084072Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:3080:3149] 2025-03-31T15:55:08.086032Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:3080:3149], schemeshard id = 72075186224037905 2025-03-31T15:55:10.924012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3205:3412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:10.924289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:10.943887Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72075186224037899 2025-03-31T15:55:12.333720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3514:3461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:12.334293Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:12.346466Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3519:3465]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T15:55:12.347360Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T15:55:12.347704Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-03-31T15:55:12.348122Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:3522:3468] 2025-03-31T15:55:12.348391Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:3522:3468] 2025-03-31T15:55:12.349918Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:3523:3391] 2025-03-31T15:55:12.350567Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:3522:3468], server id = [2:3523:3391], tablet id = 72075186224037894, status = OK 2025-03-31T15:55:12.350935Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:3523:3391], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-03-31T15:55:12.351019Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-03-31T15:55:12.358376Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T15:55:12.358603Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:3519:3465], StatRequests.size() = 1 2025-03-31T15:55:12.427803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3527:3472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:12.428083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:12.428718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3532:3477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:12.437242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715664:3, at schemeshard: 72057594046644480 2025-03-31T15:55:12.767688Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-31T15:55:12.767945Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-31T15:55:12.841921Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:3522:3468], schemeshard count = 1 2025-03-31T15:55:13.201953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3534:3479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking } 2025-03-31T15:55:13.446524Z node 1 :TX_PROXY ERROR: Actor# [1:3660:3554] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:55:13.484249Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3683:3570]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T15:55:13.484769Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T15:55:13.484812Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:3683:3570], StatRequests.size() = 1 2025-03-31T15:55:13.590021Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqpekyne8wt9035bbddsp6c7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmU0N2E2YzMtNjA0ZTFiMmYtZTc0ZTMxMTktMWYxODcwMDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:55:13.720831Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72075186224037905 2025-03-31T15:55:14.599946Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:4041:3637]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T15:55:14.600245Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-31T15:55:14.600910Z node 2 :STATISTICS DEBUG: [72075186224037894] EvRequestStats, node id = 1, schemeshard count = 1, urgent = 0 2025-03-31T15:55:14.601078Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-03-31T15:55:14.601545Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T15:55:14.601668Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [1:4041:3637], StatRequests.size() = 1 2025-03-31T15:55:14.634618Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [1:4050:3646]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T15:55:14.634858Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2025-03-31T15:55:14.634907Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 4, ReplyToActorId = [1:4050:3646], StatRequests.size() = 1 2025-03-31T15:55:14.686970Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jqpem17y87hjagbn4jpr5ddj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE3ZWNhYzQtMjljZGQ5YzktM2U4MGUzZjItZDgzZjFiNTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:55:14.743465Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4094:3655]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-31T15:55:14.746768Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T15:55:14.746821Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-31T15:55:14.747390Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T15:55:14.747438Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-03-31T15:55:14.747490Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-31T15:55:14.763517Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-03-31T15:55:14.763782Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 2025-03-31T15:55:14.764292Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4119:3668]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-31T15:55:14.767709Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T15:55:14.767771Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-31T15:55:14.767995Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T15:55:14.768043Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-03-31T15:55:14.768113Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037905, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-31T15:55:14.770946Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2025-03-31T15:55:14.771214Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 >> THiveTest::TestFollowersCrossDC_Tight [GOOD] >> THiveTest::TestFollowersCrossDC_MovingLeader >> ColumnBuildTest::BaseCase >> THiveTest::TestLockTabletExecutionRebootReconnect [GOOD] >> THiveTest::TestLockTabletExecutionReconnectExpire |66.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> SplitterBasic::EqualSplitByMaxRowsLimitPerChunk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::ReplicationConsistencyLevelMismatch [GOOD] Test command err: 2025-03-31T15:54:56.895139Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002734285090610:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:56.896614Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00135c/r3tmp/tmpUXeFvo/pdisk_1.dat 2025-03-31T15:54:58.135251Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:58.144145Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:58.144396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:58.156127Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:58.229284Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:27758 TServer::EnableGrpc on GrpcPort 28311, node 1 2025-03-31T15:54:58.715714Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:54:58.715733Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:54:58.715740Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:54:58.715843Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27758 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:54:59.540958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:54:59.631780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:55:00.098244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743436499644 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743436500204 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743436499644 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743436500204 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-03-31T15:55:00.190971Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T15:55:00.191078Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T15:55:00.191107Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-03-31T15:55:00.195197Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-03-31T15:55:01.883644Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002734285090610:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:01.883691Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:55:11.184405Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743436499896, tx_id: 281474976710658 } } } 2025-03-31T15:55:11.185627Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-03-31T15:55:11.190037Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-03-31T15:55:11.247464Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743436500204 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_NONE ConsistencyLevel: CONSISTENCY_LEVEL_UNKNOWN } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTim ... 68897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29715 TServer::EnableGrpc on GrpcPort 11577, node 2 2025-03-31T15:55:13.082258Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:55:13.082292Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:55:13.082300Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:55:13.082432Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29715 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:55:13.998049Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:55:14.014354Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T15:55:14.018716Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T15:55:14.117352Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743436514050 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743436514197 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743436514050 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743436514197 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-03-31T15:55:14.167741Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T15:55:14.167942Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T15:55:14.167957Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-03-31T15:55:14.168443Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-03-31T15:55:16.746872Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743436514148, tx_id: 281474976715658 } } } 2025-03-31T15:55:16.747263Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-03-31T15:55:16.748712Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-03-31T15:55:16.749713Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743436514197 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_GLOBAL } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 3 PathOwnerId: 72057594046644480 2025-03-31T15:55:16.749892Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Replication consistency level mismatch: expected: CONSISTENCY_LEVEL_ROW, got: 1 >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage [GOOD] >> BasicUsage::TWriteSession_AutoBatching [GOOD] >> BasicUsage::TWriteSession_BatchingProducesContinueTokens [GOOD] >> BasicUsage::BrokenCredentialsProvider >> THiveTest::TestDownAfterDrain [GOOD] >> THiveTest::TestCreateTabletsWithRaceForStoragePoolsKIKIMR_9659 |66.7%| [TA] $(B)/ydb/core/tx/replication/controller/ut_dst_creator/test-results/unittest/{meta.json ... results_accumulator.log} >> TSubDomainTest::UserAttributes [GOOD] >> TSubDomainTest::UserAttributesApplyIf >> TSubDomainTest::LsAltered [GOOD] >> THiveTest::TestLockTabletExecutionDeleteReboot [GOOD] >> THiveTest::TestLockTabletExecutionBadUnlock >> TTxDataShardMiniKQL::MemoryUsageImmediateSmallTx >> ColumnBuildTest::CancelBuild [GOOD] >> TSubDomainTest::DeleteTableAndThenForceDeleteSubDomain [GOOD] >> TSubDomainTest::DatashardRunAtOtherNodeWhenOneNodeIsStopped >> THiveTest::TestLockTabletExecutionReconnectExpire [GOOD] >> THiveTest::TestLockTabletExecutionStealLock |66.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest >> THiveTest::TestHiveBalancerWithLimit [GOOD] >> THiveTest::TestHiveBalancerIgnoreTablet >> GenericFederatedQuery::YdbManagedSelectConstant [GOOD] >> TTxDataShardMiniKQL::ReadConstant ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::CancelBuild [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:55:14.682801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:55:14.682918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:55:14.682967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:55:14.683001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:55:14.683044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:55:14.683112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:55:14.683206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:55:14.683281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:55:14.683590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:55:14.761189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:55:14.761253Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:14.766000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:55:14.766077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:55:14.766214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:55:14.769850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:55:14.770037Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:55:14.770486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:55:14.770713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:55:14.772340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:55:14.773384Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:55:14.773444Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:55:14.773599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:55:14.773655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:55:14.773695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:55:14.773816Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:55:14.779078Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:55:14.916349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:55:14.916610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:55:14.916811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:55:14.917029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:55:14.917075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:55:14.924174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:55:14.924372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:55:14.924648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:55:14.924735Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:55:14.924780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:55:14.924817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:55:14.930205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:55:14.930304Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:55:14.930369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:55:14.933953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:55:14.934034Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:55:14.934094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:55:14.934184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:55:14.938245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:55:14.944800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:55:14.945124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:55:14.946495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:55:14.946684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:55:14.946751Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:55:14.947120Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:55:14.947189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:55:14.947410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:55:14.947507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:55:14.955681Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:55:14.955795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:55:14.956067Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:55:14.956155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:55:14.956495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:55:14.956563Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:55:14.956682Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:55:14.956720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:55:14.956768Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:55:14.956822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:55:14.956874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:55:14.956926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:55:14.956966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:55:14.957010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:55:14.957102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:55:14.957148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:55:14.957208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:55:14.959976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:55:14.960139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:55:14.960198Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T15:55:21.271306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2025-03-31T15:55:21.271426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 2025-03-31T15:55:21.271596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2025-03-31T15:55:21.271632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-03-31T15:55:21.271665Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000007 2025-03-31T15:55:21.271959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:55:21.272052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:55:21.272108Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710761:0 HandleReply TEvOperationPlan: step# 5000007 2025-03-31T15:55:21.272161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710761:0 128 -> 240 2025-03-31T15:55:21.273958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710761:0, at schemeshard: 72057594046678944 2025-03-31T15:55:21.274017Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710761:0 ProgressState 2025-03-31T15:55:21.274097Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-03-31T15:55:21.274124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-03-31T15:55:21.274154Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-03-31T15:55:21.274187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-03-31T15:55:21.274219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: true 2025-03-31T15:55:21.274269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:128:2153] message: TxId: 281474976710761 2025-03-31T15:55:21.274306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-03-31T15:55:21.274334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2025-03-31T15:55:21.274369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2025-03-31T15:55:21.274447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 12 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-03-31T15:55:21.276173Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-03-31T15:55:21.276226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-03-31T15:55:21.276281Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710761, buildInfoId: 102 2025-03-31T15:55:21.276353Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710761, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Cancellation_Unlocking, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [1:1172:3025], AlterMainTableTxId: 281474976710757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976710758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T15:55:21.278061Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-03-31T15:55:21.278143Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Cancellation_Unlocking, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [1:1172:3025], AlterMainTableTxId: 281474976710757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976710758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T15:55:21.278197Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Cancellation_Unlocking to Cancelled 2025-03-31T15:55:21.279682Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-03-31T15:55:21.279758Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Cancelled, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [1:1172:3025], AlterMainTableTxId: 281474976710757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976710758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T15:55:21.279791Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-03-31T15:55:21.279908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T15:55:21.279960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:1194:3047] TestWaitNotification: OK eventTxId 102 2025-03-31T15:55:21.282377Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2025-03-31T15:55:21.282624Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } } Progress: 0 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } } Progress: 0 } 2025-03-31T15:55:21.284971Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:55:21.285185Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 230us result status StatusSuccess 2025-03-31T15:55:21.285606Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 4 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "DefaultValue" Type: "Uint64" TypeId: 4 Id: 4 NotNull: false DefaultFromLiteral { type { type_id: UINT64 } value { uint64_value: 10 } } IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 10 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |66.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::LsAltered [GOOD] Test command err: 2025-03-31T15:55:11.917174Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002798135904888:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:11.921204Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b30/r3tmp/tmp4BnUjf/pdisk_1.dat 2025-03-31T15:55:12.515976Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:12.524632Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:55:12.524740Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:55:12.527611Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20909 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T15:55:12.756778Z node 1 :TX_PROXY DEBUG: actor# [1:7488002798135904993:2102] Handle TEvNavigate describe path dc-1 2025-03-31T15:55:12.756813Z node 1 :TX_PROXY DEBUG: Actor# [1:7488002802430872572:2258] HANDLE EvNavigateScheme dc-1 2025-03-31T15:55:12.756889Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488002802430872320:2117], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:12.756922Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7488002802430872320:2117], path# /dc-1, domainOwnerId# 72057594046644480 2025-03-31T15:55:12.757048Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002802430872573:2259][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-31T15:55:12.758721Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002798135904699:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7488002802430872577:2259] 2025-03-31T15:55:12.758741Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002798135904702:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7488002802430872578:2259] 2025-03-31T15:55:12.758768Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488002798135904699:2049] Subscribe: subscriber# [1:7488002802430872577:2259], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T15:55:12.758804Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488002798135904702:2052] Subscribe: subscriber# [1:7488002802430872578:2259], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T15:55:12.758844Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002798135904705:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7488002802430872579:2259] 2025-03-31T15:55:12.758862Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488002798135904705:2055] Subscribe: subscriber# [1:7488002802430872579:2259], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T15:55:12.758877Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002802430872577:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488002798135904699:2049] 2025-03-31T15:55:12.758900Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002802430872578:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488002798135904702:2052] 2025-03-31T15:55:12.758902Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002798135904699:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7488002802430872577:2259] 2025-03-31T15:55:12.758916Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002798135904702:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7488002802430872578:2259] 2025-03-31T15:55:12.758938Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002802430872579:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488002798135904705:2055] 2025-03-31T15:55:12.758957Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002798135904705:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7488002802430872579:2259] 2025-03-31T15:55:12.759009Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002802430872573:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488002802430872574:2259] 2025-03-31T15:55:12.759060Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002802430872573:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488002802430872575:2259] 2025-03-31T15:55:12.759135Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7488002802430872573:2259][/dc-1] Set up state: owner# [1:7488002802430872320:2117], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-31T15:55:12.759261Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002802430872573:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488002802430872576:2259] 2025-03-31T15:55:12.759328Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7488002802430872573:2259][/dc-1] Path was already updated: owner# [1:7488002802430872320:2117], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-31T15:55:12.759376Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002802430872577:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002802430872574:2259], cookie# 1 2025-03-31T15:55:12.759396Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002802430872578:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002802430872575:2259], cookie# 1 2025-03-31T15:55:12.759408Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002802430872579:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002802430872576:2259], cookie# 1 2025-03-31T15:55:12.759430Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002798135904705:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002802430872579:2259], cookie# 1 2025-03-31T15:55:12.759464Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002802430872579:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002798135904705:2055], cookie# 1 2025-03-31T15:55:12.759487Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002802430872573:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002802430872576:2259], cookie# 1 2025-03-31T15:55:12.759505Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002802430872573:2259][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-31T15:55:12.760607Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002798135904699:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002802430872577:2259], cookie# 1 2025-03-31T15:55:12.760656Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002798135904702:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002802430872578:2259], cookie# 1 2025-03-31T15:55:12.760709Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002802430872577:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002798135904699:2049], cookie# 1 2025-03-31T15:55:12.760730Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002802430872578:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002798135904702:2052], cookie# 1 2025-03-31T15:55:12.760753Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002802430872573:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002802430872574:2259], cookie# 1 2025-03-31T15:55:12.760779Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002802430872573:2259][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-31T15:55:12.760851Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002802430872573:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002802430872575:2259], cookie# 1 2025-03-31T15:55:12.760875Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002802430872573:2259][/dc-1] Unexpected sync response: sender# [1:7488002802430872575:2259], cookie# 1 2025-03-31T15:55:12.819707Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7488002802430872320:2117], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-03-31T15:55:12.820128Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7488002802430872320:2117], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... hId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2025-03-31T15:55:17.038352Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7488002825280378336:2329], recipient# [2:7488002825280378335:2328], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T15:55:17.038377Z node 2 :TX_PROXY DEBUG: Actor# [2:7488002825280378335:2328] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T15:55:17.038425Z node 2 :TX_PROXY DEBUG: Actor# [2:7488002825280378335:2328] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2025-03-31T15:55:17.038952Z node 2 :TX_PROXY DEBUG: Actor# [2:7488002825280378335:2328] Handle TEvDescribeSchemeResult Forward to# [2:7488002825280378334:2327] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 63 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743436516486 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743436516486 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1743436516528 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 ... (TRUNCATED) 2025-03-31T15:55:17.070100Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7488002820985410659:2113], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:17.070170Z node 2 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [2:7488002820985410659:2113], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480 2025-03-31T15:55:17.070347Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7488002825280378338:2330][/dc-1/.metadata/initialization/migrations] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-31T15:55:17.070782Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7488002816690443053:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [2:7488002825280378342:2330] 2025-03-31T15:55:17.070809Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:7488002816690443053:2049] Upsert description: path# /dc-1/.metadata/initialization/migrations 2025-03-31T15:55:17.070867Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:7488002816690443053:2049] Subscribe: subscriber# [2:7488002825280378342:2330], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T15:55:17.070900Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7488002816690443056:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [2:7488002825280378343:2330] 2025-03-31T15:55:17.070908Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:7488002816690443056:2052] Upsert description: path# /dc-1/.metadata/initialization/migrations 2025-03-31T15:55:17.070926Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:7488002816690443056:2052] Subscribe: subscriber# [2:7488002825280378343:2330], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T15:55:17.070949Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7488002816690443059:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [2:7488002825280378344:2330] 2025-03-31T15:55:17.070971Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:7488002816690443059:2055] Upsert description: path# /dc-1/.metadata/initialization/migrations 2025-03-31T15:55:17.070989Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:7488002816690443059:2055] Subscribe: subscriber# [2:7488002825280378344:2330], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T15:55:17.071020Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7488002825280378342:2330][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [2:7488002816690443053:2049] 2025-03-31T15:55:17.071039Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7488002825280378343:2330][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [2:7488002816690443056:2052] 2025-03-31T15:55:17.071055Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7488002825280378344:2330][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [2:7488002816690443059:2055] 2025-03-31T15:55:17.071129Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7488002825280378338:2330][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [2:7488002825280378339:2330] 2025-03-31T15:55:17.071169Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7488002825280378338:2330][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [2:7488002825280378340:2330] 2025-03-31T15:55:17.071205Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][2:7488002825280378338:2330][/dc-1/.metadata/initialization/migrations] Set up state: owner# [2:7488002820985410659:2113], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T15:55:17.071223Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7488002825280378338:2330][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [2:7488002825280378341:2330] 2025-03-31T15:55:17.071245Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:7488002825280378338:2330][/dc-1/.metadata/initialization/migrations] Ignore empty state: owner# [2:7488002820985410659:2113], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T15:55:17.071266Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7488002816690443053:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:7488002825280378342:2330] 2025-03-31T15:55:17.071280Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7488002816690443056:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:7488002825280378343:2330] 2025-03-31T15:55:17.071292Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7488002816690443059:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:7488002825280378344:2330] 2025-03-31T15:55:17.071332Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [2:7488002820985410659:2113], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/initialization/migrations PathId: Strong: 1 } 2025-03-31T15:55:17.071406Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [2:7488002820985410659:2113], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/initialization/migrations PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [2:7488002825280378338:2330] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-31T15:55:17.071479Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7488002820985410659:2113], cacheItem# { Subscriber: { Subscriber: [2:7488002825280378338:2330] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:55:17.071555Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7488002825280378345:2331], recipient# [2:7488002825280378337:2298], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> THiveTest::TestLockTabletExecutionBadUnlock [GOOD] >> THiveTest::TestLockTabletExecutionGoodUnlock ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::YdbManagedSelectConstant [GOOD] Test command err: Trying to start YDB, gRPC: 7429, MsgBus: 2503 2025-03-31T15:54:58.349156Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002742152844836:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:58.349223Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f2f/r3tmp/tmpgropKP/pdisk_1.dat 2025-03-31T15:54:58.948121Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:58.948246Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:58.949877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:58.956207Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7429, node 1 2025-03-31T15:54:59.203792Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:54:59.203816Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:54:59.203824Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:54:59.203950Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2503 TClient is connected to server localhost:2503 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:55:00.426314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:55:03.354613Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002742152844836:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:03.354697Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:55:11.625199Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488002797987420387:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:11.625322Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:12.463337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-03-31T15:55:12.597216Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488002802282387812:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:12.597320Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:12.597726Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488002802282387817:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:12.601819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-03-31T15:55:12.617555Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488002802282387819:2369], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T15:55:12.711249Z node 1 :TX_PROXY ERROR: Actor# [1:7488002802282387859:2436] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:55:13.297260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:55:13.704041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-03-31T15:55:13.956416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:55:13.956442Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:14.326446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T15:55:14.945106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T15:55:15.474791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T15:55:16.087061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-03-31T15:55:16.151741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-03-31T15:55:19.814780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710704:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 >> TTxDataShardMiniKQL::MemoryUsageImmediateSmallTx [GOOD] >> TTxDataShardMiniKQL::MemoryUsageImmediateMediumTx >> TSubDomainTest::StartAndStopTenanNode [GOOD] >> TSubDomainTest::StartTenanNodeAndStopAtDestructor >> THiveTest::TestCreateTabletsWithRaceForStoragePoolsKIKIMR_9659 [GOOD] >> THiveTest::TestDeleteTablet >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression >> THiveTest::TestLockTabletExecutionStealLock [GOOD] >> THiveTest::TestProgressWithMaxTabletsScheduled >> TTxDataShardMiniKQL::CrossShard_1_Cycle >> TTxDataShardMiniKQL::CrossShard_5_AllToAll >> TSubDomainTest::CreateTableInsideAndForceDeleteSubDomain [GOOD] >> TSubDomainTest::CreateTableInsidetThenStopTenantAndForceDeleteSubDomain |66.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest |66.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest >> TTxDataShardMiniKQL::ReadConstant [GOOD] >> TTxDataShardMiniKQL::ReadAfterWrite >> THiveTest::TestDeleteTablet [GOOD] >> THiveTest::TestDeleteOwnerTablets |66.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest >> TSubDomainTest::UserAttributesApplyIf [GOOD] >> GenericFederatedQuery::ClickHouseManagedSelectAll [GOOD] >> GenericFederatedQuery::ClickHouseFilterPushdown >> TModifyUserTest::ModifyLdapUser [GOOD] >> TModifyUserTest::ModifyUserIsEnabled >> TTxDataShardMiniKQL::MemoryUsageImmediateMediumTx [GOOD] >> TTxDataShardMiniKQL::MemoryUsageMultiShard >> THiveTest::TestLockTabletExecutionGoodUnlock [GOOD] >> THiveTest::TestLockTabletExecutionLocalGone >> ColumnBuildTest::AlreadyExists [GOOD] |66.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |66.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |66.7%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/test-results/unittest/{meta.json ... results_accumulator.log} |66.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::UserAttributesApplyIf [GOOD] Test command err: 2025-03-31T15:55:13.880943Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002808712854356:2202];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:13.888307Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b32/r3tmp/tmprsvTN4/pdisk_1.dat 2025-03-31T15:55:14.967934Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:14.969373Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:55:14.969476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:55:14.979261Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:55:14.982675Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10962 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T15:55:15.231064Z node 1 :TX_PROXY DEBUG: actor# [1:7488002808712854468:2104] Handle TEvNavigate describe path dc-1 2025-03-31T15:55:15.231182Z node 1 :TX_PROXY DEBUG: Actor# [1:7488002817302789350:2263] HANDLE EvNavigateScheme dc-1 2025-03-31T15:55:15.231320Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488002813007821787:2117], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:15.231446Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002813007822019:2234][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7488002813007821787:2117], cookie# 1 2025-03-31T15:55:15.233228Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002813007822027:2234][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002813007822024:2234], cookie# 1 2025-03-31T15:55:15.233325Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002813007822028:2234][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002813007822025:2234], cookie# 1 2025-03-31T15:55:15.233346Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002813007822029:2234][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002813007822026:2234], cookie# 1 2025-03-31T15:55:15.233385Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002808712854162:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002813007822027:2234], cookie# 1 2025-03-31T15:55:15.233427Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002808712854165:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002813007822028:2234], cookie# 1 2025-03-31T15:55:15.233447Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002808712854168:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002813007822029:2234], cookie# 1 2025-03-31T15:55:15.233475Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002813007822027:2234][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002808712854162:2049], cookie# 1 2025-03-31T15:55:15.233494Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002813007822028:2234][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002808712854165:2052], cookie# 1 2025-03-31T15:55:15.233505Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002813007822029:2234][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002808712854168:2055], cookie# 1 2025-03-31T15:55:15.233541Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002813007822019:2234][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002813007822024:2234], cookie# 1 2025-03-31T15:55:15.233569Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002813007822019:2234][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-31T15:55:15.233581Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002813007822019:2234][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002813007822025:2234], cookie# 1 2025-03-31T15:55:15.233599Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002813007822019:2234][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-31T15:55:15.233620Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002813007822019:2234][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002813007822026:2234], cookie# 1 2025-03-31T15:55:15.233635Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002813007822019:2234][/dc-1] Unexpected sync response: sender# [1:7488002813007822026:2234], cookie# 1 2025-03-31T15:55:15.233725Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7488002813007821787:2117], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-03-31T15:55:15.250996Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7488002813007821787:2117], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7488002813007822019:2234] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-31T15:55:15.252118Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488002813007821787:2117], cacheItem# { Subscriber: { Subscriber: [1:7488002813007822019:2234] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-03-31T15:55:15.254761Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488002817302789351:2264], recipient# [1:7488002817302789350:2263], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T15:55:15.254823Z node 1 :TX_PROXY DEBUG: Actor# [1:7488002817302789350:2263] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T15:55:15.302372Z node 1 :TX_PROXY DEBUG: Actor# [1:7488002817302789350:2263] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-03-31T15:55:15.305887Z node 1 :TX_PROXY DEBUG: Actor# [1:7488002817302789350:2263] Handle TEvDescribeSchemeResult Forward to# [1:7488002817302789349:2262] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T15:55:15.350684Z node 1 :TX_PROXY DEBUG: actor# [1:7488002808712854468:2104] Handle TEvProposeTransaction 2025-03-31T15:55:15.350720Z node 1 :TX_PROXY DEBUG: actor# [1:7488002808712854468:2104] TxId# 281474976710657 ProcessProposeTransaction 2025-03-31T15:55:15.350849Z node 1 :TX_PROXY DEBUG: actor# [1:7488002808712854468:2104] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7488002817302789357:2269] 2025-03-31T15:55:15.493267Z node 1 :TX_PROXY DEBUG: Actor# [1:7488002817302789357:2269] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-03-31T15:55:15.493330Z node 1 :TX_PROXY DEBUG: Actor# [1:7488002817302789357:2269] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T15:55:15.493395Z node 1 :TX_PROXY DEBUG: Actor# [1:7488002817302789357:2269] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T15:55:15.493707Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488002813007821787:2117], request# { ErrorCount: 0 Databa ... 7594046644480, LocalPathId: 2] Version: 6 }: sender# [2:7488002847727051805:2241], cookie# 281474976715662 2025-03-31T15:55:22.441796Z node 2 :SCHEME_BOARD_POPULATOR NOTICE: [2:7488002847727051797:2234] Ack update: ack to# [2:7488002843432084342:2146], cookie# 281474976715662, pathId# [OwnerId: 72057594046644480, LocalPathId: 2], version# 6 2025-03-31T15:55:22.441818Z node 2 :SCHEME_BOARD_POPULATOR DEBUG: [2:7488002847727051797:2234] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046644480 Generation: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Version: 6 }: sender# [2:7488002847727051806:2242], cookie# 281474976715662 2025-03-31T15:55:22.441828Z node 2 :SCHEME_BOARD_POPULATOR DEBUG: [2:7488002847727051797:2234] Ack for unknown update (already acked?): sender# [2:7488002847727051806:2242], cookie# 281474976715662 2025-03-31T15:55:22.441883Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715662 2025-03-31T15:55:22.441943Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715662 2025-03-31T15:55:22.441952Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715662 2025-03-31T15:55:22.441966Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715662, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 6 2025-03-31T15:55:22.441980Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-03-31T15:55:22.442038Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715662, subscribers: 0 2025-03-31T15:55:22.443947Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715662 waiting... 2025-03-31T15:55:22.447436Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715662, at schemeshard: 72057594046644480 TClient::Ls request: /dc-1/USER_0 2025-03-31T15:55:22.451899Z node 2 :TX_PROXY DEBUG: actor# [2:7488002843432084206:2086] Handle TEvNavigate describe path /dc-1/USER_0 2025-03-31T15:55:22.451939Z node 2 :TX_PROXY DEBUG: Actor# [2:7488002847727051959:2357] HANDLE EvNavigateScheme /dc-1/USER_0 2025-03-31T15:55:22.452026Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7488002843432084261:2111], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:22.452106Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7488002847727051890:2304][/dc-1/USER_0] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [2:7488002843432084261:2111], cookie# 10 2025-03-31T15:55:22.452188Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7488002847727051894:2304][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7488002847727051891:2304], cookie# 10 2025-03-31T15:55:22.452207Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7488002847727051895:2304][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7488002847727051892:2304], cookie# 10 2025-03-31T15:55:22.452223Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7488002847727051896:2304][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7488002847727051893:2304], cookie# 10 2025-03-31T15:55:22.452248Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7488002843432083951:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7488002847727051894:2304], cookie# 10 2025-03-31T15:55:22.452276Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7488002843432083954:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7488002847727051895:2304], cookie# 10 2025-03-31T15:55:22.452295Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7488002843432083957:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7488002847727051896:2304], cookie# 10 2025-03-31T15:55:22.452323Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7488002847727051894:2304][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 }: sender# [2:7488002843432083951:2049], cookie# 10 2025-03-31T15:55:22.452350Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7488002847727051895:2304][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 }: sender# [2:7488002843432083954:2052], cookie# 10 2025-03-31T15:55:22.452374Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7488002847727051896:2304][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 }: sender# [2:7488002843432083957:2055], cookie# 10 2025-03-31T15:55:22.452402Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7488002847727051890:2304][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 }: sender# [2:7488002847727051891:2304], cookie# 10 2025-03-31T15:55:22.452423Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7488002847727051890:2304][/dc-1/USER_0] Sync is in progress: cookie# 10, size# 3, half# 1, successes# 1, faulires# 0 2025-03-31T15:55:22.452440Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7488002847727051890:2304][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 }: sender# [2:7488002847727051892:2304], cookie# 10 2025-03-31T15:55:22.452458Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7488002847727051890:2304][/dc-1/USER_0] Sync is done: cookie# 10, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-31T15:55:22.452476Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7488002847727051890:2304][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 }: sender# [2:7488002847727051893:2304], cookie# 10 2025-03-31T15:55:22.452488Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7488002847727051890:2304][/dc-1/USER_0] Unexpected sync response: sender# [2:7488002847727051893:2304], cookie# 10 2025-03-31T15:55:22.452529Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [2:7488002843432084261:2111], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 } 2025-03-31T15:55:22.452613Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [2:7488002843432084261:2111], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [2:7488002847727051890:2304] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 10 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1743436522401 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-31T15:55:22.452682Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7488002843432084261:2111], cacheItem# { Subscriber: { Subscriber: [2:7488002847727051890:2304] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 10 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1743436522401 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 10 IsSync: true Partial: 0 } 2025-03-31T15:55:22.452882Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7488002847727051960:2358], recipient# [2:7488002847727051959:2357], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T15:55:22.452917Z node 2 :TX_PROXY DEBUG: Actor# [2:7488002847727051959:2357] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T15:55:22.452981Z node 2 :TX_PROXY DEBUG: Actor# [2:7488002847727051959:2357] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/USER_0" Options { ShowPrivateTable: true } 2025-03-31T15:55:22.453593Z node 2 :TX_PROXY DEBUG: Actor# [2:7488002847727051959:2357] Handle TEvDescribeSchemeResult Forward to# [2:7488002847727051958:2356] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "/dc-1/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1743436522401 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 4 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } UserAttributes { Key: "AttrA3" Value: "ValA3" } } PathId: 2 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1743436522401 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 4 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1... (TRUNCATED) >> TopicAutoscaling::ControlPlane_CreateAlterDescribe >> PersQueueSdkReadSessionTest::StopResumeReadingData [GOOD] >> ReadSessionImplTest::CreatePartitionStream [GOOD] >> ReadSessionImplTest::BrokenCompressedData >> TTxDataShardMiniKQL::ReadAfterWrite [GOOD] >> TTxDataShardMiniKQL::ReadNonExisting >> ReadSessionImplTest::BrokenCompressedData [GOOD] >> TSubDomainTest::CreateTablet [GOOD] >> TSubDomainTest::CreateTabletForUnknownDomain >> ReadSessionImplTest::CommitOffsetTwiceIsError >> ReadSessionImplTest::CommitOffsetTwiceIsError [GOOD] >> ReadSessionImplTest::DataReceivedCallback >> THiveTest::TestDeleteOwnerTablets [GOOD] >> THiveTest::TestDeleteOwnerTabletsMany ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::AlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:55:20.309208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:55:20.309820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:55:20.310340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:55:20.311116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:55:20.311632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:55:20.311863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:55:20.312379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:55:20.312926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:55:20.317547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:55:20.808001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:55:20.808513Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:20.881101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:55:20.882039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:55:20.883188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:55:20.967573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:55:20.972268Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:55:20.972879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:55:20.973096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:55:20.980519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:55:20.981637Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:55:20.981704Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:55:20.981845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:55:20.981885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:55:20.981918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:55:20.982055Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:55:21.026789Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:55:21.491374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:55:21.492993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:55:21.494994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:55:21.497477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:55:21.498445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:55:21.516080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:55:21.517864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:55:21.519996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:55:21.520403Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:55:21.520612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:55:21.520837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:55:21.526078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:55:21.526134Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:55:21.526177Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:55:21.533202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:55:21.533259Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:55:21.533328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:55:21.533374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:55:21.556078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:55:21.571298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:55:21.572749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:55:21.586058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:55:21.590527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:55:21.590764Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:55:21.592299Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:55:21.592491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:55:21.593081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:55:21.593225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:55:21.610973Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:55:21.612265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:55:21.613047Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:55:21.613577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:55:21.614120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:55:21.614314Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:55:21.615151Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:55:21.615361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:55:21.615620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:55:21.615905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:55:21.616184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:55:21.616923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:55:21.617403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:55:21.617870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:55:21.618377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:55:21.618619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:55:21.618843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:55:21.627780Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:55:21.627903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:55:21.627936Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... atisfy waiter [1:776:2657] TestWaitNotification: OK eventTxId 105 2025-03-31T15:55:26.833756Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2025-03-31T15:55:26.834305Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 642us result status StatusSuccess 2025-03-31T15:55:26.835595Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 200 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 3 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 2 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 2025-03-31T15:55:26.844044Z node 1 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: DoExecute TxId: 106 DatabaseName: "/MyRoot/ServerLessDB" Settings { source_path: "/MyRoot/ServerLessDB/Table" max_batch_rows: 2 max_shards_in_flight: 2 column_build_operation { column { ColumnName: "value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } } 2025-03-31T15:55:26.855651Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-03-31T15:55:26.856448Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: AlterMainTable, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:1141:3012], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T15:55:26.857567Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 106, at schemeshard: 72075186233409549 2025-03-31T15:55:26.857846Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 106, txId# 281474976725757 2025-03-31T15:55:26.858326Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: AlterMainTable, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:1141:3012], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T15:55:26.863615Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-03-31T15:55:26.863705Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: AlterMainTable, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:1141:3012], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T15:55:26.868047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/ServerLessDB" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table" Columns { Name: "value" Type: "Uint64" DefaultFromLiteral { type { type_id: UINT64 } value { uint64_value: 10 } } IsBuildInProgress: true } } Internal: true } TxId: 281474976725757 TabletId: 72075186233409549 FailOnExist: true , at schemeshard: 72075186233409549 2025-03-31T15:55:26.868287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /MyRoot/ServerLessDB/Table, pathId: , opId: 281474976725757:0, at schemeshard: 72075186233409549 2025-03-31T15:55:26.868600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976725757:1, propose status:StatusInvalidParameter, reason: Cannot alter type for column 'value', at schemeshard: 72075186233409549 2025-03-31T15:55:26.871105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976725757, response: Status: StatusInvalidParameter Reason: "Cannot alter type for column \'value\'" TxId: 281474976725757 SchemeshardId: 72075186233409549, at schemeshard: 72075186233409549 2025-03-31T15:55:26.871298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976725757, database: /MyRoot/ServerLessDB, subject: , status: StatusInvalidParameter, reason: Cannot alter type for column 'value', operation: ALTER TABLE, path: /MyRoot/ServerLessDB/Table 2025-03-31T15:55:26.871507Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976725757, status# StatusInvalidParameter 2025-03-31T15:55:26.871595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusInvalidParameter Reason: "Cannot alter type for column \'value\'" TxId: 281474976725757 SchemeshardId: 72075186233409549 2025-03-31T15:55:26.871671Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, BuildIndexId: 106, cookie: 106, txId: 281474976725757, status: StatusInvalidParameter 2025-03-31T15:55:26.871797Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, buildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: AlterMainTable, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:1141:3012], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }}, record: Status: StatusInvalidParameter Reason: "Cannot alter type for column \'value\'" TxId: 281474976725757 SchemeshardId: 72075186233409549 2025-03-31T15:55:26.872506Z node 1 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuilder::TTxReply: ReplyOnCreation, BuildIndexId: 106, status: BAD_REQUEST, error: At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column 'value', replyTo: [1:1141:3012] 2025-03-31T15:55:26.872947Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Message: TxId: 106 Status: BAD_REQUEST Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column \'value\'" severity: 1 } IndexBuild { Id: 106 Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column \'value\'" severity: 1 } State: STATE_PREPARING Settings { source_path: "/MyRoot/ServerLessDB/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } } Progress: 0 } BUILDINDEX RESPONSE CREATE: NKikimrIndexBuilder.TEvCreateResponse TxId: 106 Status: BAD_REQUEST Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column \'value\'" severity: 1 } IndexBuild { Id: 106 Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column \'value\'" severity: 1 } State: STATE_PREPARING Settings { source_path: "/MyRoot/ServerLessDB/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } } Progress: 0 } >> KqpLocks::EmptyRangeAlreadyBroken [GOOD] >> TopicAutoscaling::CommitTopPast_BeforeAutoscaleAwareSDK >> ReadSessionImplTest::DataReceivedCallback [GOOD] >> ReadSessionImplTest::CommonHandler >> THiveTest::TestProgressWithMaxTabletsScheduled [GOOD] >> ReadSessionImplTest::CommonHandler [GOOD] >> ColumnBuildTest::BaseCase [GOOD] >> THiveTest::TestHiveBalancerIgnoreTablet [GOOD] >> THiveTest::TestHiveBalancerNodeRestarts ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocks::EmptyRangeAlreadyBroken [GOOD] Test command err: Trying to start YDB, gRPC: 24975, MsgBus: 6071 2025-03-31T15:54:28.099695Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002615493124697:2277];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:28.099734Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012fb/r3tmp/tmpbl4r5D/pdisk_1.dat 2025-03-31T15:54:29.263191Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:29.676013Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:29.694965Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:29.695542Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:29.705767Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24975, node 1 2025-03-31T15:54:30.781695Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:54:30.781717Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:54:30.781723Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:54:30.782002Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:54:33.112530Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002615493124697:2277];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:33.112794Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:6071 TClient is connected to server localhost:6071 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:54:43.820072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:54:44.016148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:54:44.650250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:54:44.650275Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:44.922559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:54:45.134188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:54:45.242535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:54:52.979302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488002718572341421:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:53.909202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:55.421474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:54:55.540392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:54:55.669980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:54:55.776441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:54:55.924966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:54:56.518848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:54:56.787683Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488002735752211199:2506], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:56.787749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:56.788053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488002735752211206:2509], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:56.794172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:54:56.836601Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488002735752211208:2510], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:54:56.931525Z node 1 :TX_PROXY ERROR: Actor# [1:7488002735752211265:3547] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 8626, MsgBus: 18773 2025-03-31T15:55:01.360797Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488002755930179970:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:01.360838Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012fb/r3tmp/tmpPDWEH2/pdisk_1.dat 2025-03-31T15:55:02.927235Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:55:03.639542Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:03.642602Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:55:03.642899Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:55:03.710683Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8626, node 2 2025-03-31T15:55:05.188175Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:55:05.188193Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:55:05.188199Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:55:05.188777Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:55:06.363495Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488002755930179970:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:06.363559Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:18773 TClient is connected to server localhost:18773 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T15:55:10.535644Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:55:1 ... olCreatorActor] ActorId: [2:7488002807469789868:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T15:55:13.766184Z node 2 :TX_PROXY ERROR: Actor# [2:7488002807469789924:3473] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:55:15.209236Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=6; 2025-03-31T15:55:15.217736Z node 2 :TX_DATASHARD ERROR: Prepare transaction failed. txid 6 at tablet 72075186224037914 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-03-31T15:55:15.217901Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 6 at tablet 72075186224037914 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-03-31T15:55:15.218112Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488002816059724945:2512], Table: `/Root/Test` ([72057594046644480:9:1]), SessionActorId: [2:7488002811764757513:2512]Got LOCKS BROKEN for table `/Root/Test`. ShardID=72075186224037914, Sink=[2:7488002816059724945:2512].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-03-31T15:55:15.218626Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488002816059724938:2512], SessionActorId: [2:7488002811764757513:2512], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[2:7488002811764757513:2512]. isRollback=0 2025-03-31T15:55:15.218825Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGIyNzRkNDQtY2YzYWNiMGQtNjNiYmE3NzctZjZlZWZlZDg=, ActorId: [2:7488002811764757513:2512], ActorState: ExecuteState, TraceId: 01jqpem1t4f8rax1cbbpadyjzv, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7488002816059724939:2512] from: [2:7488002816059724938:2512] 2025-03-31T15:55:15.218882Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7488002816059724939:2512] TxId: 281474976715674. Ctx: { TraceId: 01jqpem1t4f8rax1cbbpadyjzv, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGIyNzRkNDQtY2YzYWNiMGQtNjNiYmE3NzctZjZlZWZlZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-03-31T15:55:15.220060Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGIyNzRkNDQtY2YzYWNiMGQtNjNiYmE3NzctZjZlZWZlZDg=, ActorId: [2:7488002811764757513:2512], ActorState: ExecuteState, TraceId: 01jqpem1t4f8rax1cbbpadyjzv, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 Trying to start YDB, gRPC: 21042, MsgBus: 12498 2025-03-31T15:55:16.312898Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488002820641309817:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:16.312960Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012fb/r3tmp/tmp1YcRpX/pdisk_1.dat 2025-03-31T15:55:16.587691Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:16.609290Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:55:16.609373Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:55:16.612064Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21042, node 3 2025-03-31T15:55:16.748907Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:55:16.748929Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:55:16.748935Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:55:16.749026Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12498 TClient is connected to server localhost:12498 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:55:17.404733Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:55:17.416946Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:55:17.437864Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:55:17.615044Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:55:17.840108Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:55:17.933977Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:55:21.315806Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488002820641309817:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:21.315873Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:55:24.579673Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488002855001049836:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:24.579768Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:24.717125Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:55:24.903111Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:55:24.998308Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:55:25.088673Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:55:25.158394Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:55:25.241543Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:55:25.357714Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488002859296017661:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:25.357800Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:25.358042Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488002859296017666:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:25.365056Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:55:25.385134Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488002859296017668:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:55:25.476952Z node 3 :TX_PROXY ERROR: Actor# [3:7488002859296017723:3463] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:55:27.576787Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YTYxZTA0OTUtZTMzYmQwNTUtOTc5NzVhLTRkNDJmYjJm, ActorId: [3:7488002863590985315:2503], ActorState: ExecuteState, TraceId: 01jqpemdjw2bjkc9rwtv3rt37w, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001
: Error: tx has deferred effects, but locks are broken >> TSubDomainTest::FailIfAffectedSetNotInterior [GOOD] >> TSubDomainTest::GenericCases >> TopicAutoscaling::PartitionSplit_PQv1 >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_BeforeAutoscaleAwareSDK ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::CommonHandler [GOOD] Test command err: 2025-03-31T15:53:52.652737Z :SpecifyClustersExplicitly INFO: Random seed for debugging is 1743436432652694 2025-03-31T15:53:56.446231Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002479296729583:2279];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:53:56.446414Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:53:57.041516Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488002477671668380:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:53:57.406741Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:54:00.645244Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T15:54:00.648580Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T15:54:01.459396Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002479296729583:2279];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:01.459454Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:54:01.930896Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488002477671668380:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:01.930941Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:54:02.102477Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012ee/r3tmp/tmp8rBiuw/pdisk_1.dat 2025-03-31T15:54:02.526733Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:02.567113Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:02.954452Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:03.107327Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:03.955730Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:04.118458Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:04.967373Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:05.139473Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:05.987507Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:06.144442Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:06.418295Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:06.420273Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:08.198235Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:08.198262Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:08.751379Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:08.751401Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:10.583171Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:10.584646Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:11.035582Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:11.035611Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:11.091418Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:11.091498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:11.092947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:11.092991Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:11.142284Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:11.169708Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T15:54:11.169898Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:11.218992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19915, node 1 2025-03-31T15:54:11.755213Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/0012ee/r3tmp/yandexv4CfAF.tmp 2025-03-31T15:54:11.755244Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/0012ee/r3tmp/yandexv4CfAF.tmp 2025-03-31T15:54:11.755406Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/0012ee/r3tmp/yandexv4CfAF.tmp 2025-03-31T15:54:11.755554Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:54:11.982673Z INFO: TTestServer started on Port 9426 GrpcPort 19915 TClient is connected to server localhost:9426 PQClient connected to localhost:19915 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:54:12.432199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-03-31T15:54:21.230812Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488002585045851160:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:21.230917Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:21.231911Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488002585045851171:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:21.343795Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpej4t98yst1wp7w5bttg7v", Request deadline has expired for 3.673682s seconds 2025-03-31T15:54:21.540497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2025-03-31T15:54:22.144670Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488002585045851174:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-03-31T15:54:22.292333Z node 2 :TX_PROXY ERROR: Actor# [2:7488002589340818502:2154] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:54:23.739303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:54:23.739335Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:24.279526Z node ... cookie 1 consumer shared/user session shared/user_3_1_13034729280852244306_v1 grpc read done: success# 1, data# { read { } } 2025-03-31T15:55:26.158691Z :INFO: [/Root] [/Root] [23bf0c65-e6d36d4a-d46e42e-1959602] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2716 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T15:55:26.158950Z :NOTICE: [/Root] [/Root] [23bf0c65-e6d36d4a-d46e42e-1959602] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-31T15:55:26.151754Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_13034729280852244306_v1 got read request: guid# 219f132a-5b9cfcac-8f7b6517-dddfc328 2025-03-31T15:55:26.160787Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_13034729280852244306_v1 grpc read done: success# 0, data# { } 2025-03-31T15:55:26.160816Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_13034729280852244306_v1 grpc read failed 2025-03-31T15:55:26.164085Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_3_1_13034729280852244306_v1 2025-03-31T15:55:26.164138Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7488002850244761131:2571] destroyed 2025-03-31T15:55:26.164189Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_13034729280852244306_v1 2025-03-31T15:55:26.163287Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_13034729280852244306_v1 closed 2025-03-31T15:55:26.163368Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_13034729280852244306_v1 is DEAD 2025-03-31T15:55:26.164252Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7488002850244761128:2568] disconnected; active server actors: 1 2025-03-31T15:55:26.164272Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7488002850244761128:2568] client user disconnected session shared/user_3_1_13034729280852244306_v1 2025-03-31T15:55:26.524670Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7488002863129663254:2605] TxId: 281474976715701. Ctx: { TraceId: 01jqpemcgj6dzk2wbpgar132nn, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MzQwZDU3OGEtMWRiMTVkMTAtYWY4NGZhYjEtMmMxOTRkYmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 4 2025-03-31T15:55:26.525487Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7488002863129663261:2612], TxId: 281474976715701, task: 2. Ctx: { SessionId : ydb://session/3?node_id=3&id=MzQwZDU3OGEtMWRiMTVkMTAtYWY4NGZhYjEtMmMxOTRkYmE=. TraceId : 01jqpemcgj6dzk2wbpgar132nn. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7488002863129663254:2605], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-03-31T15:55:26.525520Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7488002863129663263:2613], TxId: 281474976715701, task: 4. Ctx: { SessionId : ydb://session/3?node_id=3&id=MzQwZDU3OGEtMWRiMTVkMTAtYWY4NGZhYjEtMmMxOTRkYmE=. CustomerSuppliedId : . TraceId : 01jqpemcgj6dzk2wbpgar132nn. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7488002863129663254:2605], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-03-31T15:55:29.592676Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:55:29.599136Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:55:29.599186Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:55:29.606703Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:55:29.624952Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:55:29.638701Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:55:29.644443Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: 13. Commit offset: 31 2025-03-31T15:55:29.652358Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:55:29.652390Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:55:29.652419Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:55:29.686886Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:55:29.712391Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:55:29.713836Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:55:29.715107Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-31T15:55:29.751807Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-03-31T15:55:29.755869Z :INFO: Error decompressing data: (TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check) 2025-03-31T15:55:29.756495Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-3) 2025-03-31T15:55:29.758727Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T15:55:29.759003Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-03-31T15:55:29.759030Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-03-31T15:55:29.759075Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 3, size 16 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { DataDecompressionError: "(TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check)" Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2025-03-31T15:55:29.764933Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:55:29.764969Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:55:29.764997Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:55:29.793202Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:55:29.843835Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:55:29.845843Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:55:29.853559Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-31T15:55:29.960381Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:55:29.962618Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-31T15:55:29.979346Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T15:55:29.979662Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-03-31T15:55:29.979998Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 2). Partition stream id: 1 2025-03-31T15:55:29.988359Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:55:29.988388Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:55:29.988418Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:55:30.015704Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:55:30.035873Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:55:30.038411Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:55:30.039271Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-31T15:55:30.041455Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-03-31T15:55:30.074048Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-03-31T15:55:30.076114Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2025-03-31T15:55:30.077815Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-31T15:55:30.083180Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T15:55:30.084031Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2025-03-31T15:55:30.087227Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-03-31T15:55:30.088058Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-03-31T15:55:32.086339Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:55:32.086390Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:55:32.086423Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:55:32.143267Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:55:32.162777Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:55:32.170636Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:55:32.195863Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:55:32.196529Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-31T15:55:32.196606Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T15:55:32.196674Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes >> TTxDataShardMiniKQL::ReadNonExisting [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::BaseCase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:55:21.490848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:55:21.491363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:55:21.491713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:55:21.492011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:55:21.492470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:55:21.492492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:55:21.493094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:55:21.493618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:55:21.498162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:55:21.956486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:55:21.956537Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:21.960507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:55:21.960620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:55:21.960779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:55:21.966375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:55:21.966528Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:55:21.967114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:55:21.967325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:55:21.969316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:55:21.970267Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:55:21.970329Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:55:21.970488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:55:21.970530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:55:21.970593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:55:21.970744Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:55:21.977397Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:55:22.124584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:55:22.124817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:55:22.125010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:55:22.125313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:55:22.125378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:55:22.128421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:55:22.128548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:55:22.128716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:55:22.128806Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:55:22.128847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:55:22.128878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:55:22.130949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:55:22.131015Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:55:22.131060Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:55:22.133584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:55:22.133642Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:55:22.133695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:55:22.133737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:55:22.137368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:55:22.140017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:55:22.140223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:55:22.141221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:55:22.141381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:55:22.141434Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:55:22.141676Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:55:22.141721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:55:22.141867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:55:22.141956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:55:22.144285Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:55:22.144345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:55:22.144552Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:55:22.144605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:55:22.144784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:55:22.144825Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:55:22.144900Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:55:22.144924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:55:22.144949Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:55:22.144969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:55:22.145009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:55:22.145038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:55:22.145060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:55:22.145089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:55:22.145139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:55:22.145163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:55:22.145184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:55:22.146522Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:55:22.146606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:55:22.146630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... , IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1141:3012], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }}, record: Status: StatusAccepted TxId: 281474976725761 SchemeshardId: 72075186233409549 PathId: 2 2025-03-31T15:55:31.330151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976725761:0, at schemeshard: 72075186233409549 2025-03-31T15:55:31.330192Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDropLock TPropose opId# 281474976725761:0 ProgressState 2025-03-31T15:55:31.330243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976725761 ready parts: 1/1 2025-03-31T15:55:31.330346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72075186233409550 message:Transaction { AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 281474976725761 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72075186233409550 2025-03-31T15:55:31.332600Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-03-31T15:55:31.332678Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1141:3012], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-03-31T15:55:31.332829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976725761:4294967295 from tablet: 72075186233409549 to tablet: 72075186233409550 cookie: 0:281474976725761 msg type: 269090816 2025-03-31T15:55:31.332903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976725761, partId: 4294967295, tablet: 72075186233409550 2025-03-31T15:55:31.333026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976725761, at schemeshard: 72075186233409549 2025-03-31T15:55:31.333049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 0/1, is published: true 2025-03-31T15:55:31.333074Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976725761, at schemeshard: 72075186233409549 2025-03-31T15:55:31.355135Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [1:1812:3675], Recipient [1:747:2636]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186233409549 ClientId: [1:1812:3675] ServerId: [1:1814:3677] } 2025-03-31T15:55:31.355187Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-03-31T15:55:31.465962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 650, transactions count in step: 1, at schemeshard: 72075186233409549 2025-03-31T15:55:31.466067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976725761 AckTo { RawX1: 0 RawX2: 0 } } Step: 650 MediatorID: 72075186233409551 TabletID: 72075186233409549, at schemeshard: 72075186233409549 2025-03-31T15:55:31.466107Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDropLock TPropose opId# 281474976725761:0 HandleReply TEvOperationPlan: step# 650 2025-03-31T15:55:31.466430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976725761:0 128 -> 240 2025-03-31T15:55:31.495744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976725761:0, at schemeshard: 72075186233409549 2025-03-31T15:55:31.495794Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDone opId# 281474976725761:0 ProgressState 2025-03-31T15:55:31.495856Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976725761:0 progress is 1/1 2025-03-31T15:55:31.495879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-03-31T15:55:31.495904Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976725761:0 progress is 1/1 2025-03-31T15:55:31.496157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-03-31T15:55:31.496181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 1/1, is published: true 2025-03-31T15:55:31.496233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:561:2499] message: TxId: 281474976725761 2025-03-31T15:55:31.496264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-03-31T15:55:31.496288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976725761:0 2025-03-31T15:55:31.496504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976725761:0 2025-03-31T15:55:31.496561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 3 2025-03-31T15:55:31.544593Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976725761 2025-03-31T15:55:31.544664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976725761 2025-03-31T15:55:31.544717Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976725761, buildInfoId: 106 2025-03-31T15:55:31.544772Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976725761, buildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1141:3012], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-03-31T15:55:31.560761Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-03-31T15:55:31.561097Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1141:3012], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-03-31T15:55:31.561400Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-03-31T15:55:31.571681Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-03-31T15:55:31.572259Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1141:3012], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-03-31T15:55:31.573069Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 106, subscribers count# 1 2025-03-31T15:55:31.573973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-03-31T15:55:31.574242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:1159:3030] TestWaitNotification: OK eventTxId 106 2025-03-31T15:55:31.651077Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 106 2025-03-31T15:55:31.657744Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 106 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } } Progress: 100 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 106 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } } Progress: 100 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestProgressWithMaxTabletsScheduled [GOOD] Test command err: 2025-03-31T15:54:07.406186Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-03-31T15:54:07.530063Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-31T15:54:07.530997Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-03-31T15:54:07.531543Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-03-31T15:54:07.532424Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-03-31T15:54:07.532465Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-03-31T15:54:07.533146Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:48:2075] ControllerId# 72057594037932033 2025-03-31T15:54:07.533171Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-31T15:54:07.533267Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-03-31T15:54:07.534167Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-03-31T15:54:07.677494Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-03-31T15:54:07.677864Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-03-31T15:54:07.719552Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:47:2074] Create Queue# [1:56:2080] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:07.731318Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:47:2074] Create Queue# [1:57:2081] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:07.733545Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:47:2074] Create Queue# [1:58:2082] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:07.734542Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:47:2074] Create Queue# [1:59:2083] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:07.735223Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:47:2074] Create Queue# [1:60:2084] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:07.735866Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:47:2074] Create Queue# [1:61:2085] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:07.736661Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:47:2074] Create Queue# [1:62:2086] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:07.736685Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-03-31T15:54:07.736919Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:48:2075] 2025-03-31T15:54:07.737118Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:48:2075] 2025-03-31T15:54:07.737329Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-03-31T15:54:07.737544Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-03-31T15:54:07.749111Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-03-31T15:54:07.749365Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-03-31T15:54:07.889913Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-31T15:54:07.890009Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-03-31T15:54:07.901367Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:71:2073] ControllerId# 72057594037932033 2025-03-31T15:54:07.901409Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-31T15:54:07.901460Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-03-31T15:54:07.902485Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-03-31T15:54:07.924131Z node 2 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-03-31T15:54:07.924181Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-03-31T15:54:07.953201Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:70:2072] Create Queue# [2:77:2077] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:07.954056Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:70:2072] Create Queue# [2:78:2078] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:07.964650Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:70:2072] Create Queue# [2:79:2079] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:07.964836Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:70:2072] Create Queue# [2:80:2080] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:07.965724Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:70:2072] Create Queue# [2:81:2081] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:07.967806Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:70:2072] Create Queue# [2:82:2082] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:07.968343Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:70:2072] Create Queue# [2:83:2083] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:07.968373Z node 2 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-03-31T15:54:07.968765Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [2:71:2073] 2025-03-31T15:54:07.968792Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [2:71:2073] 2025-03-31T15:54:07.969005Z node 2 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-03-31T15:54:07.969375Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-03-31T15:54:07.970239Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-03-31T15:54:07.980705Z node 2 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-03-31T15:54:07.981936Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [2:71:2073] 2025-03-31T15:54:07.982139Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-03-31T15:54:07.982332Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-03-31T15:54:07.982946Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-03-31T15:54:08.207519Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:48:2075] 2025-03-31T15:54:08.207581Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-03-31T15:54:08.207613Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-03-31T15:54:08.236010Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-03-31T15:54:08.236059Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-03-31T15:54:08.300535Z node 2 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-03-31T15:54:08.302728Z node 2 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-03-31T15:54:08.427848Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:48:2075] 2025-03-31T15:54:08.427909Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-03-31T15:54:08.427939Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-03-31T15:54:08.428367Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-03-31T15:54:08.429329Z node 2 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-03-31T15:54:08.431009Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-31T15:54:08.439793Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-03-31T15:54:08.440496Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-03-31T15:54:08.440577Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-03-31T15:54:08.441210Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:52:2064] 2025-03-31T15:54:08.441246Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:52:2064] 2025-03-31T15:54:08.441303Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-03-31T15:54:08.441520Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [2:75:2064] 2025-03-31T15:54:08.441537Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [2:75:2064] 2025-03-31T15:54:08.441575Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-03-31T15:54:08.441796Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [2:94:2087] 2025-03-31T15:54:08.441813Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [2:94:2087] 2025-03-31T15:54:08.441970Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-03-31T15:54:08.442133Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-03-31T15:54:08.442497Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-03-31T15:54:08.442538Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 720575940379320 ... .260091Z node 23 :TABLET_RESOLVER DEBUG: SelectForward node 23 selfDC 2 leaderDC 2 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037893 followers: 0 countLeader 1 allowFollowers 0 winner: [23:1020:2316] 2025-03-31T15:55:29.260465Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037893] forward result local node, try to connect [23:1080:2354] 2025-03-31T15:55:29.260499Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037893]::SendEvent [23:1080:2354] 2025-03-31T15:55:29.261122Z node 23 :PIPE_SERVER DEBUG: [72075186224037893] Accept Connect Originator# [23:1080:2354] 2025-03-31T15:55:29.261744Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037893] connected with status OK role: Leader [23:1080:2354] 2025-03-31T15:55:29.261778Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037893] send queued [23:1080:2354] 2025-03-31T15:55:29.262496Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037894] ::Bootstrap [23:1084:2357] 2025-03-31T15:55:29.262521Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037894] lookup [23:1084:2357] 2025-03-31T15:55:29.263041Z node 23 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037894 entry.State: StInit ev: {EvForward TabletID: 72075186224037894 Ev: nullptr Flags: 1:2:0} 2025-03-31T15:55:29.272433Z node 23 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037894 Cookie: 0 ProxyOptions: SigNone} 2025-03-31T15:55:29.277059Z node 22 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 0} 2025-03-31T15:55:29.277375Z node 22 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 1} 2025-03-31T15:55:29.277418Z node 22 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 2} 2025-03-31T15:55:29.278319Z node 23 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037894 CurrentLeader: [23:680:2174] CurrentLeaderTablet: [23:686:2177] CurrentGeneration: 1 CurrentStep: 0} 2025-03-31T15:55:29.278793Z node 23 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037894 CurrentLeader: [23:680:2174] CurrentLeaderTablet: [23:686:2177] CurrentGeneration: 1 CurrentStep: 0} 2025-03-31T15:55:29.279131Z node 23 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037894 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037894 Cookie: 0 CurrentLeader: [23:680:2174] CurrentLeaderTablet: [23:686:2177] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-03-31T15:55:29.279167Z node 23 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037894 followers: 0 2025-03-31T15:55:29.279208Z node 23 :TABLET_RESOLVER DEBUG: SelectForward node 23 selfDC 2 leaderDC 2 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037894 followers: 0 countLeader 1 allowFollowers 0 winner: [23:680:2174] 2025-03-31T15:55:29.279532Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037894] forward result local node, try to connect [23:1084:2357] 2025-03-31T15:55:29.279568Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037894]::SendEvent [23:1084:2357] 2025-03-31T15:55:29.279652Z node 23 :PIPE_SERVER DEBUG: [72075186224037894] Accept Connect Originator# [23:1084:2357] 2025-03-31T15:55:29.280774Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037894] connected with status OK role: Leader [23:1084:2357] 2025-03-31T15:55:29.280815Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037894] send queued [23:1084:2357] 2025-03-31T15:55:29.282100Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037895] ::Bootstrap [23:1088:2360] 2025-03-31T15:55:29.282128Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037895] lookup [23:1088:2360] 2025-03-31T15:55:29.282436Z node 23 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037895 entry.State: StInit ev: {EvForward TabletID: 72075186224037895 Ev: nullptr Flags: 1:2:0} 2025-03-31T15:55:29.282552Z node 23 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037895 Cookie: 0 ProxyOptions: SigNone} 2025-03-31T15:55:29.288363Z node 22 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037895 Cookie: 0} 2025-03-31T15:55:29.288701Z node 22 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037895 Cookie: 1} 2025-03-31T15:55:29.288744Z node 22 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037895 Cookie: 2} 2025-03-31T15:55:29.290549Z node 23 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037895 CurrentLeader: [23:934:2268] CurrentLeaderTablet: [23:936:2269] CurrentGeneration: 2 CurrentStep: 0} 2025-03-31T15:55:29.290849Z node 23 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037895 CurrentLeader: [23:934:2268] CurrentLeaderTablet: [23:936:2269] CurrentGeneration: 2 CurrentStep: 0} 2025-03-31T15:55:29.295609Z node 23 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037895 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037895 Cookie: 0 CurrentLeader: [23:934:2268] CurrentLeaderTablet: [23:936:2269] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-03-31T15:55:29.295935Z node 23 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037895 followers: 0 2025-03-31T15:55:29.295977Z node 23 :TABLET_RESOLVER DEBUG: SelectForward node 23 selfDC 2 leaderDC 2 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037895 followers: 0 countLeader 1 allowFollowers 0 winner: [23:934:2268] 2025-03-31T15:55:29.296068Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037895] forward result local node, try to connect [23:1088:2360] 2025-03-31T15:55:29.296356Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037895]::SendEvent [23:1088:2360] 2025-03-31T15:55:29.296473Z node 23 :PIPE_SERVER DEBUG: [72075186224037895] Accept Connect Originator# [23:1088:2360] 2025-03-31T15:55:29.296746Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037895] connected with status OK role: Leader [23:1088:2360] 2025-03-31T15:55:29.296775Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037895] send queued [23:1088:2360] 2025-03-31T15:55:29.309759Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037896] ::Bootstrap [23:1092:2363] 2025-03-31T15:55:29.309813Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037896] lookup [23:1092:2363] 2025-03-31T15:55:29.315201Z node 23 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037896 entry.State: StInit ev: {EvForward TabletID: 72075186224037896 Ev: nullptr Flags: 1:2:0} 2025-03-31T15:55:29.315423Z node 23 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037896 Cookie: 0 ProxyOptions: SigNone} 2025-03-31T15:55:29.317826Z node 22 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037896 Cookie: 0} 2025-03-31T15:55:29.318150Z node 22 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037896 Cookie: 1} 2025-03-31T15:55:29.318188Z node 22 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037896 Cookie: 2} 2025-03-31T15:55:29.319063Z node 23 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037896 CurrentLeader: [23:762:2196] CurrentLeaderTablet: [23:768:2199] CurrentGeneration: 1 CurrentStep: 0} 2025-03-31T15:55:29.319158Z node 23 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037896 CurrentLeader: [23:762:2196] CurrentLeaderTablet: [23:768:2199] CurrentGeneration: 1 CurrentStep: 0} 2025-03-31T15:55:29.319857Z node 23 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037896 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037896 Cookie: 0 CurrentLeader: [23:762:2196] CurrentLeaderTablet: [23:768:2199] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-03-31T15:55:29.319890Z node 23 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037896 followers: 0 2025-03-31T15:55:29.319931Z node 23 :TABLET_RESOLVER DEBUG: SelectForward node 23 selfDC 2 leaderDC 2 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037896 followers: 0 countLeader 1 allowFollowers 0 winner: [23:762:2196] 2025-03-31T15:55:29.319995Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037896] forward result local node, try to connect [23:1092:2363] 2025-03-31T15:55:29.320245Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037896]::SendEvent [23:1092:2363] 2025-03-31T15:55:29.320330Z node 23 :PIPE_SERVER DEBUG: [72075186224037896] Accept Connect Originator# [23:1092:2363] 2025-03-31T15:55:29.320877Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037896] connected with status OK role: Leader [23:1092:2363] 2025-03-31T15:55:29.320907Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037896] send queued [23:1092:2363] 2025-03-31T15:55:29.330666Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037897] ::Bootstrap [23:1096:2366] 2025-03-31T15:55:29.330729Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037897] lookup [23:1096:2366] 2025-03-31T15:55:29.331296Z node 23 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037897 entry.State: StInit ev: {EvForward TabletID: 72075186224037897 Ev: nullptr Flags: 1:2:0} 2025-03-31T15:55:29.331474Z node 23 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037897 Cookie: 0 ProxyOptions: SigNone} 2025-03-31T15:55:29.340035Z node 22 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037897 Cookie: 0} 2025-03-31T15:55:29.340123Z node 22 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037897 Cookie: 1} 2025-03-31T15:55:29.340158Z node 22 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037897 Cookie: 2} 2025-03-31T15:55:29.342221Z node 23 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037897 CurrentLeader: [23:846:2220] CurrentLeaderTablet: [23:848:2221] CurrentGeneration: 2 CurrentStep: 0} 2025-03-31T15:55:29.342563Z node 23 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037897 CurrentLeader: [23:846:2220] CurrentLeaderTablet: [23:848:2221] CurrentGeneration: 2 CurrentStep: 0} 2025-03-31T15:55:29.342660Z node 23 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037897 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037897 Cookie: 0 CurrentLeader: [23:846:2220] CurrentLeaderTablet: [23:848:2221] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-03-31T15:55:29.342922Z node 23 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037897 followers: 0 2025-03-31T15:55:29.342965Z node 23 :TABLET_RESOLVER DEBUG: SelectForward node 23 selfDC 2 leaderDC 2 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037897 followers: 0 countLeader 1 allowFollowers 0 winner: [23:846:2220] 2025-03-31T15:55:29.343491Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037897] forward result local node, try to connect [23:1096:2366] 2025-03-31T15:55:29.343526Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037897]::SendEvent [23:1096:2366] 2025-03-31T15:55:29.344089Z node 23 :PIPE_SERVER DEBUG: [72075186224037897] Accept Connect Originator# [23:1096:2366] 2025-03-31T15:55:29.344199Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037897] connected with status OK role: Leader [23:1096:2366] 2025-03-31T15:55:29.344228Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037897] send queued [23:1096:2366] >> StoragePool::TestDistributionRandomMin7p [GOOD] >> StoragePool::TestDistributionRandomMin7pWithOverflow [GOOD] >> TopicAutoscaling::PartitionSplit_AutoscaleAwareSDK >> TModifyUserTest::ModifyUserIsEnabled [GOOD] >> TSubDomainTest::CreateDummyTabletsInDifferentDomains [GOOD] >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::ReadNonExisting [GOOD] Test command err: 2025-03-31T15:55:25.584942Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-03-31T15:55:25.586308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:55:25.586362Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:25.600697Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-03-31T15:55:25.601198Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-03-31T15:55:25.601514Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:55:25.617184Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T15:55:25.664008Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:55:25.664338Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T15:55:25.665898Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-31T15:55:25.665974Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-31T15:55:25.666028Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-31T15:55:25.666361Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T15:55:25.667078Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T15:55:25.667170Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:197:2154] in generation 2 2025-03-31T15:55:25.736810Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T15:55:25.774422Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-31T15:55:25.774604Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T15:55:25.774710Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-31T15:55:25.774744Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-31T15:55:25.774775Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-31T15:55:25.774828Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T15:55:25.775051Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T15:55:25.775113Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T15:55:25.775425Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-31T15:55:25.775537Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-31T15:55:25.775590Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T15:55:25.775643Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-31T15:55:25.775681Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-31T15:55:25.775712Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T15:55:25.775743Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T15:55:25.775771Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-31T15:55:25.775809Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T15:55:25.775907Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:55:25.775995Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:55:25.776049Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-31T15:55:25.778868Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-31T15:55:25.778935Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T15:55:25.779025Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T15:55:25.779461Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-31T15:55:25.779518Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-31T15:55:25.779566Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-31T15:55:25.779601Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T15:55:25.779630Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-31T15:55:25.779663Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-31T15:55:25.779683Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T15:55:25.779911Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-31T15:55:25.779952Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-31T15:55:25.779983Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-31T15:55:25.780003Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-31T15:55:25.780031Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-31T15:55:25.780049Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-31T15:55:25.780071Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-31T15:55:25.780108Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-31T15:55:25.780127Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-31T15:55:25.793789Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-31T15:55:25.793875Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T15:55:25.793910Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-31T15:55:25.793962Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-31T15:55:25.794033Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-31T15:55:25.794502Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:55:25.794545Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:55:25.794584Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-31T15:55:25.794696Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-31T15:55:25.794724Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T15:55:25.794879Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-31T15:55:25.794920Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T15:55:25.794952Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-31T15:55:25.794982Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-31T15:55:25.798176Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-31T15:55:25.798233Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T15:55:25.798419Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T15:55:25.798457Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T15:55:25.798501Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T15:55:25.798527Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-31T15:55:25.798548Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T15:55:25.798579Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-31T15:55:25.798600Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-31T15:55:25.798730Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T15:55:25.798752Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-31T15:55:25.798781Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-31T15:55:25.798808Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-31T15:55:25.798925Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-31T15:55:25.798961Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T15:55:25.798994Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-31T15:55:25.799012Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-31T15:55:25.799025Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-31T15:55:25.799066Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T15:55:25.799133Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-31T15:55:25.799165Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-31T15:55:25.799193Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-31T15:55:25.799239Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-31T15:55:25.799263Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-31T15:55:25.799287Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-31T15:55:25.799312Z node 1 :TX_DATA ... tateInit, received event# 268828673, Sender [3:232:2227], Recipient [3:235:2228]: NKikimr::TEvTablet::TEvRestored 2025-03-31T15:55:33.574268Z node 3 :TX_DATASHARD TRACE: StateInit, received event# 268828684, Sender [3:232:2227], Recipient [3:235:2228]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T15:55:33.593819Z node 3 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [3:235:2228] 2025-03-31T15:55:33.594121Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:55:33.604183Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Execute Persist Sys_SubDomainInfo 2025-03-31T15:55:33.652124Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:55:33.652270Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T15:55:33.654511Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-31T15:55:33.654613Z node 3 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-31T15:55:33.654682Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-31T15:55:33.656256Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T15:55:33.656475Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T15:55:33.656560Z node 3 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [3:278:2228] in generation 3 2025-03-31T15:55:33.696182Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T15:55:33.696543Z node 3 :TX_DATASHARD INFO: Switched to work state Ready tabletId 9437184 2025-03-31T15:55:33.697102Z node 3 :TX_DATASHARD INFO: 9437184 Sending notify to schemeshard 4200 txId 1 state Ready TxInFly 0 2025-03-31T15:55:33.698143Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 9437184 mediators count is 0 coordinators count is 1 buckets per mediator 2 2025-03-31T15:55:33.703816Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [3:283:2267] 2025-03-31T15:55:33.704182Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-31T15:55:33.704722Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 9437184 2025-03-31T15:55:33.704997Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T15:55:33.705985Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Execute 2025-03-31T15:55:33.706658Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Complete 2025-03-31T15:55:33.712535Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [3:235:2228], Recipient [3:235:2228]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T15:55:33.713072Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T15:55:33.714095Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-31T15:55:33.714654Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-31T15:55:33.720269Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [3:24:2071], Recipient [3:235:2228]: {TEvRegisterTabletResult TabletId# 9437184 Entry# 0} 2025-03-31T15:55:33.720324Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-03-31T15:55:33.720653Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 9437184 time 0 2025-03-31T15:55:33.720695Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T15:55:33.720819Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [3:24:2071], Recipient [3:235:2228]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 0 ReadStep# 0 } 2025-03-31T15:55:33.721137Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-03-31T15:55:33.721435Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 9437184 coordinator 72057594046316545 last step 0 next step 0 2025-03-31T15:55:33.722368Z node 3 :FLAT_TX_SCHEMESHARD WARN: Got TEvDataShard::TEvSchemaChanged for unknown txId 1 message# Source { RawX1: 235 RawX2: 12884904116 } Origin: 9437184 State: 2 TxId: 1 Step: 0 Generation: 3 2025-03-31T15:55:33.722717Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T15:55:33.723022Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-31T15:55:33.727281Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-31T15:55:33.727545Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T15:55:33.727914Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T15:55:33.728242Z node 3 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-31T15:55:33.728562Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T15:55:33.729505Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [3:281:2265], Recipient [3:235:2228]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 4200 Status: OK ServerId: [3:285:2269] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-03-31T15:55:33.729560Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-03-31T15:55:33.730189Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [3:123:2149], Recipient [3:235:2228]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1 2025-03-31T15:55:33.730747Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-03-31T15:55:33.731066Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 1 datashard 9437184 state Ready 2025-03-31T15:55:33.736291Z node 3 :TX_DATASHARD DEBUG: 9437184 Got TEvSchemaChangedResult from SS at 9437184 2025-03-31T15:55:33.757426Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [3:281:2265], Recipient [3:235:2228]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 4200 ClientId: [3:281:2265] ServerId: [3:285:2269] } 2025-03-31T15:55:33.757747Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-03-31T15:55:33.875738Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:99:2134], Recipient [3:235:2228]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 99 RawX2: 12884904022 } 2025-03-31T15:55:33.876108Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-03-31T15:55:33.877977Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:291:2273], Recipient [3:235:2228]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:55:33.878271Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:55:33.879078Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:289:2272], serverId# [3:291:2273], sessionId# [0:0:0] 2025-03-31T15:55:33.884422Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:99:2134], Recipient [3:235:2228]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 99 RawX2: 12884904022 } TxBody: "\032\365\001\037\004\0021\nvalue\005\205\n\205\002\207\205\002\207\203\001H\006\002\205\004\205\002?\006\002\205\000\034MyReads MyWrites\205\004\205\002?\006\002\206\202\024Reply\024Write?\014\205\002\206\203\010\002 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\010)\211\n?\006\203\005\004\200\205\002\203\004\006\213\002\203\004\203\004$SelectRow\000\003?\036 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000?\004\005?\"\003? p\001\013?&\003?$T\001\003?(\000\037\002\000\005?\016\005?\n?8\000\005?\014\003\005?\024\005?\020?8\000\006\000?\022\003?>\005?\032\006\000?\030\001\037/ \0018\001" TxId: 2 ExecLevel: 0 Flags: 0 2025-03-31T15:55:33.884468Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T15:55:33.884825Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T15:55:33.894302Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CheckDataTx 2025-03-31T15:55:33.895008Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-03-31T15:55:33.899804Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CheckDataTx 2025-03-31T15:55:33.900116Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-31T15:55:33.900154Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit BuildAndWaitDependencies 2025-03-31T15:55:33.900472Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-03-31T15:55:33.901028Z node 3 :TX_DATASHARD TRACE: Activated operation [0:2] at 9437184 2025-03-31T15:55:33.901072Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-03-31T15:55:33.901093Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit BuildAndWaitDependencies 2025-03-31T15:55:33.901113Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit ExecuteDataTx 2025-03-31T15:55:33.901131Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-03-31T15:55:33.907980Z node 3 :TX_DATASHARD TRACE: Executed operation [0:2] at tablet 9437184 with status COMPLETE 2025-03-31T15:55:33.908874Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:2] at 9437184: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-31T15:55:33.909203Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-03-31T15:55:33.909234Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit ExecuteDataTx 2025-03-31T15:55:33.909262Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit FinishPropose 2025-03-31T15:55:33.909292Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit FinishPropose 2025-03-31T15:55:33.909335Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 2 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-03-31T15:55:33.909398Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is DelayComplete 2025-03-31T15:55:33.909426Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit FinishPropose 2025-03-31T15:55:33.909470Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit CompletedOperations 2025-03-31T15:55:33.909503Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CompletedOperations 2025-03-31T15:55:33.909544Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-03-31T15:55:33.909578Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CompletedOperations 2025-03-31T15:55:33.909603Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:2] at 9437184 has finished 2025-03-31T15:55:33.909669Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-31T15:55:33.910373Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:2] at 9437184 on unit FinishPropose 2025-03-31T15:55:33.910410Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 >> TTxDataShardMiniKQL::MemoryUsageMultiShard [GOOD] >> THiveTest::TestLockTabletExecutionLocalGone [GOOD] >> THiveTest::TestLocalRegistrationInSharedHive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> StoragePool::TestDistributionRandomMin7pWithOverflow [GOOD] Test command err: Took 8.961013 seconds |66.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |66.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |66.8%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK >> TopicAutoscaling::Simple_BeforeAutoscaleAwareSDK ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::MemoryUsageMultiShard [GOOD] Test command err: 2025-03-31T15:55:22.525064Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-03-31T15:55:22.526303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:55:22.526354Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:22.534815Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-03-31T15:55:22.535261Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-03-31T15:55:22.535561Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:55:22.545865Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T15:55:22.591670Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:55:22.591940Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T15:55:22.593413Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-31T15:55:22.593482Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-31T15:55:22.593538Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-31T15:55:22.593945Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T15:55:22.594628Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T15:55:22.594706Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:197:2154] in generation 2 2025-03-31T15:55:22.708238Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T15:55:22.821570Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-31T15:55:22.822490Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T15:55:22.823092Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-31T15:55:22.823537Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-31T15:55:22.824257Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-31T15:55:22.824509Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T15:55:22.826858Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T15:55:22.827127Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T15:55:22.828134Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-31T15:55:22.828467Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-31T15:55:22.828752Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T15:55:22.829791Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-31T15:55:22.830057Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-31T15:55:22.830320Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T15:55:22.830577Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T15:55:22.830832Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-31T15:55:22.831187Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T15:55:22.832412Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:55:22.832914Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:55:22.833197Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-31T15:55:22.847848Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-31T15:55:22.848116Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T15:55:22.848903Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T15:55:22.849697Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-31T15:55:22.850383Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-31T15:55:22.850858Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-31T15:55:22.851076Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T15:55:22.851549Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-31T15:55:22.852051Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-31T15:55:22.852291Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T15:55:22.854903Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-31T15:55:22.855204Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-31T15:55:22.855659Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-31T15:55:22.856175Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-31T15:55:22.856213Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-31T15:55:22.856443Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-31T15:55:22.857166Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-31T15:55:22.857437Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-31T15:55:22.857459Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-31T15:55:22.885984Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-31T15:55:22.886254Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T15:55:22.886717Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-31T15:55:22.887188Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-31T15:55:22.887643Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-31T15:55:22.900277Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:55:22.900548Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:55:22.904528Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-31T15:55:22.906617Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2025-03-31T15:55:22.906660Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T15:55:22.907715Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2025-03-31T15:55:22.907758Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-03-31T15:55:22.908530Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2025-03-31T15:55:22.908795Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit PlanQueue 2025-03-31T15:55:22.931952Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2025-03-31T15:55:22.932204Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T15:55:22.934071Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T15:55:22.934313Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T15:55:22.934576Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T15:55:22.935021Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-31T15:55:22.935259Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T15:55:22.935465Z node 1 :TX_DATASHARD DEBUG: Found ready operation [2:1] in PlanQueue unit at 9437184 2025-03-31T15:55:22.935974Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit PlanQueue 2025-03-31T15:55:22.936643Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-03-31T15:55:22.936958Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit PlanQueue 2025-03-31T15:55:22.937929Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit LoadTxDetails 2025-03-31T15:55:22.938190Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2025-03-31T15:55:22.938943Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Restart 2025-03-31T15:55:22.938972Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-03-31T15:55:22.939246Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T15:55:22.939277Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T15:55:22.939305Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T15:55:22.940570Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T15:55:22.940800Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2025-03-31T15:55:22.942235Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 2:1 keys extracted: 0 2025-03-31T15:55:22.942491Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-03-31T15:55:22.942703Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit LoadTxDetails 2025-03-31T15:55:22.943227Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-31T15:55:22.943616Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-31T15:55:22.943694Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T15:55:22.943884Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit ProtectSchemeEc ... 0534} resubmit=1) 2025-03-31T15:55:34.032685Z node 3 :RESOURCE_BROKER DEBUG: Assigning waiting task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:257:2228]) to queue queue_transaction 2025-03-31T15:55:34.032752Z node 3 :RESOURCE_BROKER DEBUG: Allocate resources {0, 96990534} for task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:257:2228]) from queue queue_transaction 2025-03-31T15:55:34.032807Z node 3 :RESOURCE_BROKER DEBUG: Assigning in-fly task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:257:2228]) to queue queue_transaction 2025-03-31T15:55:34.032847Z node 3 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_transaction from 1.881864 to 18.818640 (insert task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:257:2228])) 2025-03-31T15:55:34.032954Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} acquired dyn mem Res{3 96990534b}, Memory{0 dyn 96990534} 2025-03-31T15:55:34.033012Z node 3 :RESOURCE_BROKER DEBUG: Update task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:369:2314]) (priority=5 type=transaction resources={0, 96990534} resubmit=1) 2025-03-31T15:55:34.033038Z node 3 :RESOURCE_BROKER DEBUG: Assigning waiting task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:369:2314]) to queue queue_transaction 2025-03-31T15:55:34.033068Z node 3 :RESOURCE_BROKER DEBUG: Allocate resources {0, 96990534} for task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:369:2314]) from queue queue_transaction 2025-03-31T15:55:34.033092Z node 3 :RESOURCE_BROKER DEBUG: Assigning in-fly task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:369:2314]) to queue queue_transaction 2025-03-31T15:55:34.033149Z node 3 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_transaction from 16.936776 to 33.873553 (insert task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:369:2314])) 2025-03-31T15:55:34.033208Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} acquired dyn mem Res{3 96990534b}, Memory{0 dyn 96990534} 2025-03-31T15:55:34.033246Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T15:55:34.033279Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437184 on unit ExecuteDataTx 2025-03-31T15:55:34.034139Z node 3 :TX_DATASHARD DEBUG: tx 5 at 9437184 restored its data 2025-03-31T15:55:34.485760Z node 3 :TX_DATASHARD TRACE: Executed operation [6:5] at tablet 9437184 with status COMPLETE 2025-03-31T15:55:34.485857Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [6:5] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 2, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 22, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-31T15:55:34.485912Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T15:55:34.485940Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437184 executing on unit ExecuteDataTx 2025-03-31T15:55:34.485966Z node 3 :TX_DATASHARD TRACE: Add [6:5] at 9437184 to execution unit CompleteOperation 2025-03-31T15:55:34.485992Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437184 on unit CompleteOperation 2025-03-31T15:55:34.486191Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437184 is DelayComplete 2025-03-31T15:55:34.486213Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437184 executing on unit CompleteOperation 2025-03-31T15:55:34.486245Z node 3 :TX_DATASHARD TRACE: Add [6:5] at 9437184 to execution unit CompletedOperations 2025-03-31T15:55:34.486268Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437184 on unit CompletedOperations 2025-03-31T15:55:34.486304Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437184 is Executed 2025-03-31T15:55:34.486325Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437184 executing on unit CompletedOperations 2025-03-31T15:55:34.486346Z node 3 :TX_DATASHARD TRACE: Execution plan for [6:5] at 9437184 has finished 2025-03-31T15:55:34.486374Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T15:55:34.486395Z node 3 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T15:55:34.486422Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T15:55:34.486446Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T15:55:34.486545Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 5 -> done Change{16, redo 636b alter 0b annex 0, ~{ 1001, 1, 3, 4, 12, 7, 8, 5 } -{ }, 0 gb} 2025-03-31T15:55:34.486599Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release Res{3 96990534b}, Memory{0 dyn 0} 2025-03-31T15:55:34.486782Z node 3 :RESOURCE_BROKER DEBUG: Finish task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:257:2228]) (release resources {0, 96990534}) 2025-03-31T15:55:34.486842Z node 3 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_transaction from 33.873553 to 16.936776 (remove task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:257:2228])) 2025-03-31T15:55:34.486943Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437185 2025-03-31T15:55:34.486975Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437185 on unit ExecuteDataTx 2025-03-31T15:55:34.490713Z node 3 :TX_DATASHARD DEBUG: tx 5 at 9437185 restored its data 2025-03-31T15:55:35.129615Z node 3 :TX_DATASHARD TRACE: Executed operation [6:5] at tablet 9437185 with status COMPLETE 2025-03-31T15:55:35.129692Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [6:5] at 9437185: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 2, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 22, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-31T15:55:35.129743Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437185 is ExecutedNoMoreRestarts 2025-03-31T15:55:35.129770Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437185 executing on unit ExecuteDataTx 2025-03-31T15:55:35.129795Z node 3 :TX_DATASHARD TRACE: Add [6:5] at 9437185 to execution unit CompleteOperation 2025-03-31T15:55:35.129833Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437185 on unit CompleteOperation 2025-03-31T15:55:35.130053Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437185 is DelayComplete 2025-03-31T15:55:35.130081Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437185 executing on unit CompleteOperation 2025-03-31T15:55:35.130105Z node 3 :TX_DATASHARD TRACE: Add [6:5] at 9437185 to execution unit CompletedOperations 2025-03-31T15:55:35.130128Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437185 on unit CompletedOperations 2025-03-31T15:55:35.130171Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437185 is Executed 2025-03-31T15:55:35.130201Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437185 executing on unit CompletedOperations 2025-03-31T15:55:35.130230Z node 3 :TX_DATASHARD TRACE: Execution plan for [6:5] at 9437185 has finished 2025-03-31T15:55:35.130256Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T15:55:35.130277Z node 3 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-03-31T15:55:35.130300Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-03-31T15:55:35.130321Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-03-31T15:55:35.130416Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 5 -> done Change{16, redo 636b alter 0b annex 0, ~{ 1001, 1, 3, 4, 12, 7, 8, 5 } -{ }, 0 gb} 2025-03-31T15:55:35.130467Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release Res{3 96990534b}, Memory{0 dyn 0} 2025-03-31T15:55:35.130606Z node 3 :RESOURCE_BROKER DEBUG: Finish task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:369:2314]) (release resources {0, 96990534}) 2025-03-31T15:55:35.130661Z node 3 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_transaction from 16.936776 to 0.000000 (remove task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:369:2314])) 2025-03-31T15:55:35.149878Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:10} commited cookie 1 for step 9 2025-03-31T15:55:35.150804Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-03-31T15:55:35.151059Z node 3 :TX_DATASHARD TRACE: Complete execution for [6:5] at 9437185 on unit CompleteOperation 2025-03-31T15:55:35.151119Z node 3 :TX_DATASHARD DEBUG: Complete [6 : 5] from 9437185 at tablet 9437185 send result to client [3:99:2134], exec latency: 2 ms, propose latency: 4 ms 2025-03-31T15:55:35.151368Z node 3 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437185 {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 2} 2025-03-31T15:55:35.151894Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-03-31T15:55:35.152598Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:10} commited cookie 1 for step 9 2025-03-31T15:55:35.152625Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T15:55:35.152647Z node 3 :TX_DATASHARD TRACE: Complete execution for [6:5] at 9437184 on unit CompleteOperation 2025-03-31T15:55:35.152675Z node 3 :TX_DATASHARD DEBUG: Complete [6 : 5] from 9437184 at tablet 9437184 send result to client [3:99:2134], exec latency: 2 ms, propose latency: 4 ms 2025-03-31T15:55:35.152705Z node 3 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-03-31T15:55:35.152965Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T15:55:35.153365Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [3:347:2314], Recipient [3:458:2400]: {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 2} 2025-03-31T15:55:35.153398Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T15:55:35.153696Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 9437186 source 9437186 dest 9437185 consumer 9437185 txId 5 2025-03-31T15:55:35.154507Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [3:235:2228], Recipient [3:458:2400]: {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-03-31T15:55:35.154533Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T15:55:35.154551Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 9437186 source 9437186 dest 9437184 consumer 9437184 txId 5 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TModifyUserTest::ModifyUserIsEnabled [GOOD] Test command err: 2025-03-31T15:55:13.849906Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002809066899647:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:13.849977Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ade/r3tmp/tmpwpdpLT/pdisk_1.dat 2025-03-31T15:55:14.296915Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:14.302323Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:55:14.302409Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:55:14.305315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63238 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T15:55:14.482395Z node 1 :TX_PROXY DEBUG: actor# [1:7488002809066899898:2103] Handle TEvNavigate describe path dc-1 2025-03-31T15:55:14.482455Z node 1 :TX_PROXY DEBUG: Actor# [1:7488002813361867474:2257] HANDLE EvNavigateScheme dc-1 2025-03-31T15:55:14.482571Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488002813361867241:2123], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:14.482621Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7488002813361867241:2123], path# /dc-1, domainOwnerId# 72057594046644480 2025-03-31T15:55:14.482798Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002813361867475:2258][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-31T15:55:14.484939Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002809066899594:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7488002813361867479:2258] 2025-03-31T15:55:14.484992Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488002809066899594:2049] Subscribe: subscriber# [1:7488002813361867479:2258], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T15:55:14.485061Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002809066899600:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7488002813361867481:2258] 2025-03-31T15:55:14.485087Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488002809066899600:2055] Subscribe: subscriber# [1:7488002813361867481:2258], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T15:55:14.485145Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002813361867479:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488002809066899594:2049] 2025-03-31T15:55:14.485224Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002813361867481:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488002809066899600:2055] 2025-03-31T15:55:14.485273Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002813361867475:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488002813361867476:2258] 2025-03-31T15:55:14.485348Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002813361867475:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488002813361867478:2258] 2025-03-31T15:55:14.485429Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7488002813361867475:2258][/dc-1] Set up state: owner# [1:7488002813361867241:2123], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-31T15:55:14.485558Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002813361867479:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002813361867476:2258], cookie# 1 2025-03-31T15:55:14.485580Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002813361867480:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002813361867477:2258], cookie# 1 2025-03-31T15:55:14.485593Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002813361867481:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002813361867478:2258], cookie# 1 2025-03-31T15:55:14.485627Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002809066899594:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7488002813361867479:2258] 2025-03-31T15:55:14.485653Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002809066899594:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002813361867479:2258], cookie# 1 2025-03-31T15:55:14.485675Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002809066899600:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7488002813361867481:2258] 2025-03-31T15:55:14.485687Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002809066899600:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002813361867481:2258], cookie# 1 2025-03-31T15:55:14.486315Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002809066899597:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7488002813361867480:2258] 2025-03-31T15:55:14.486381Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488002809066899597:2052] Subscribe: subscriber# [1:7488002813361867480:2258], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T15:55:14.486441Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002809066899597:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002813361867480:2258], cookie# 1 2025-03-31T15:55:14.486513Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002813361867479:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002809066899594:2049], cookie# 1 2025-03-31T15:55:14.486545Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002813361867481:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002809066899600:2055], cookie# 1 2025-03-31T15:55:14.486574Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002813361867480:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488002809066899597:2052] 2025-03-31T15:55:14.486603Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002813361867480:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002809066899597:2052], cookie# 1 2025-03-31T15:55:14.486647Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002813361867475:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002813361867476:2258], cookie# 1 2025-03-31T15:55:14.486691Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002813361867475:2258][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-31T15:55:14.486731Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002813361867475:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002813361867478:2258], cookie# 1 2025-03-31T15:55:14.486750Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002813361867475:2258][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-31T15:55:14.487669Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002813361867475:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488002813361867477:2258] 2025-03-31T15:55:14.487740Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7488002813361867475:2258][/dc-1] Path was already updated: owner# [1:7488002813361867241:2123], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-31T15:55:14.487766Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002813361867475:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002813361867477:2258], cookie# 1 2025-03-31T15:55:14.487778Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002813361867475:2258][/dc-1] Unexpected sync response: sender# [1:7488002813361867477:2258], cookie# 1 2025-03-31T15:55:14.487799Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002809066899597:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7488002813361867480:2258] 2025-03-31T15:55:14.538910Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7488002813361867241:2123], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-03-31T15:55:14.539354Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7488002813361867241:2123], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... Path: /dc-1 PathId: Partial: 0 } 2025-03-31T15:55:31.093496Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7488002866740800399:2114], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [3:7488002871035767965:2262] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 9 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743436528974 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-31T15:55:31.093834Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488002866740800399:2114], cacheItem# { Subscriber: { Subscriber: [3:7488002871035767965:2262] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 9 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743436528974 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 9 IsSync: true Partial: 0 } 2025-03-31T15:55:31.093952Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488002883920669956:2341], recipient# [3:7488002883920669955:2340], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user2 },{ Sid: user1 }] Groups: [] } }] } 2025-03-31T15:55:31.093977Z node 3 :TX_PROXY DEBUG: Actor# [3:7488002883920669955:2340] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T15:55:31.094006Z node 3 :TX_PROXY ERROR: Actor# [3:7488002883920669955:2340] txid# 281474976710662, Access denied for user2 on path /dc-1, with access AlterSchema 2025-03-31T15:55:31.094066Z node 3 :TX_PROXY ERROR: Actor# [3:7488002883920669955:2340] txid# 281474976710662, issues: { message: "Access denied for user2 on path /dc-1" issue_code: 200000 severity: 1 } 2025-03-31T15:55:31.094088Z node 3 :TX_PROXY DEBUG: Actor# [3:7488002883920669955:2340] txid# 281474976710662 SEND to# [3:7488002883920669954:2339] Source {TEvProposeTransactionStatus Status# 5} 2025-03-31T15:55:31.098979Z node 3 :TX_PROXY DEBUG: actor# [3:7488002866740800352:2089] Handle TEvProposeTransaction 2025-03-31T15:55:31.099258Z node 3 :TX_PROXY DEBUG: actor# [3:7488002866740800352:2089] TxId# 281474976710663 ProcessProposeTransaction 2025-03-31T15:55:31.099292Z node 3 :TX_PROXY DEBUG: actor# [3:7488002866740800352:2089] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [3:7488002883920669958:2343] 2025-03-31T15:55:31.101503Z node 3 :TX_PROXY DEBUG: Actor# [3:7488002883920669958:2343] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { ModifyUser { User: "user2" Password: "password" CanLogin: false } } } } UserToken: "\n\005user2\022\030\022\026\n\024all-users@well-known\032\322\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzQ3OTczMCwiaWF0IjoxNzQzNDM2NTMwLCJzdWIiOiJ1c2VyMiJ9.TskZfAV87kHhcWQcxMlQZyCHRMOJPCaCCOSpyaT15N54fH_RDOU1JmH1RIrfx0kP66uvnrMEcr6CxTqBZyAHVCZ4Qxm6H0Jb4vigt-Yerf2mkY2YtL_Jf-k_OVHQ52UrCeD_7Ibd-nv0ylKHKdS87ec4aTDVYUwHuvlLLSCyou9ia-aBD3oLp_ufVmMbhulO1jJ2Vectp64lAJjbpHTUk8UezbybVQVwtefDG37zWc_u1m6M05klp1-Tv_31yhi1Q6JZTh-W9AQVR7v1etW1nw8VT-2mkGNUU_MpjOi4VSa3fvosgh9-6W2zGQ3BcuGqJSelwD38shs1VP6HH6T0Gg\"\005Login*~eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzQ3OTczMCwiaWF0IjoxNzQzNDM2NTMwLCJzdWIiOiJ1c2VyMiJ9.**" PeerName: "" 2025-03-31T15:55:31.101555Z node 3 :TX_PROXY DEBUG: Actor# [3:7488002883920669958:2343] txid# 281474976710663 Bootstrap, UserSID: user2 CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T15:55:31.101567Z node 3 :TX_PROXY DEBUG: Actor# [3:7488002883920669958:2343] txid# 281474976710663 Bootstrap, UserSID: user2 IsClusterAdministrator: 1 2025-03-31T15:55:31.101899Z node 3 :TX_PROXY DEBUG: Actor# [3:7488002883920669958:2343] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-03-31T15:55:31.101963Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7488002866740800399:2114], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:31.102337Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7488002871035767965:2262][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [3:7488002866740800399:2114], cookie# 10 2025-03-31T15:55:31.102383Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7488002871035767969:2262][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [3:7488002871035767966:2262], cookie# 10 2025-03-31T15:55:31.102396Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7488002871035767970:2262][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [3:7488002871035767967:2262], cookie# 10 2025-03-31T15:55:31.102409Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7488002871035767971:2262][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [3:7488002871035767968:2262], cookie# 10 2025-03-31T15:55:31.102428Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7488002866740800088:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [3:7488002871035767969:2262], cookie# 10 2025-03-31T15:55:31.102449Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7488002866740800091:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [3:7488002871035767970:2262], cookie# 10 2025-03-31T15:55:31.102462Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7488002866740800094:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [3:7488002871035767971:2262], cookie# 10 2025-03-31T15:55:31.102484Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7488002871035767969:2262][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 }: sender# [3:7488002866740800088:2049], cookie# 10 2025-03-31T15:55:31.102496Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7488002871035767970:2262][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 }: sender# [3:7488002866740800091:2052], cookie# 10 2025-03-31T15:55:31.102507Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7488002871035767971:2262][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 }: sender# [3:7488002866740800094:2055], cookie# 10 2025-03-31T15:55:31.102531Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7488002871035767965:2262][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 }: sender# [3:7488002871035767966:2262], cookie# 10 2025-03-31T15:55:31.102548Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7488002871035767965:2262][/dc-1] Sync is in progress: cookie# 10, size# 3, half# 1, successes# 1, faulires# 0 2025-03-31T15:55:31.102824Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7488002871035767965:2262][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 }: sender# [3:7488002871035767967:2262], cookie# 10 2025-03-31T15:55:31.102838Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7488002871035767965:2262][/dc-1] Sync is done: cookie# 10, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-31T15:55:31.102853Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7488002871035767965:2262][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 }: sender# [3:7488002871035767968:2262], cookie# 10 2025-03-31T15:55:31.102864Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7488002871035767965:2262][/dc-1] Unexpected sync response: sender# [3:7488002871035767968:2262], cookie# 10 2025-03-31T15:55:31.102892Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7488002866740800399:2114], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-03-31T15:55:31.107648Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7488002866740800399:2114], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [3:7488002871035767965:2262] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 10 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743436528974 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-31T15:55:31.107731Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488002866740800399:2114], cacheItem# { Subscriber: { Subscriber: [3:7488002871035767965:2262] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 10 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743436528974 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 10 IsSync: true Partial: 0 } 2025-03-31T15:55:31.108466Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488002883920669959:2344], recipient# [3:7488002883920669958:2343], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user2 },{ Sid: user1 }] Groups: [] } }] } 2025-03-31T15:55:31.108499Z node 3 :TX_PROXY DEBUG: Actor# [3:7488002883920669958:2343] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T15:55:31.109041Z node 3 :TX_PROXY ERROR: Actor# [3:7488002883920669958:2343] txid# 281474976710663, Access denied for user2 on path /dc-1, with access AlterSchema 2025-03-31T15:55:31.109111Z node 3 :TX_PROXY ERROR: Actor# [3:7488002883920669958:2343] txid# 281474976710663, issues: { message: "Access denied for user2 on path /dc-1" issue_code: 200000 severity: 1 } 2025-03-31T15:55:31.109136Z node 3 :TX_PROXY DEBUG: Actor# [3:7488002883920669958:2343] txid# 281474976710663 SEND to# [3:7488002883920669957:2342] Source {TEvProposeTransactionStatus Status# 5} >> THiveTest::TestFollowersCrossDC_MovingLeader [GOOD] >> THiveTest::TestFollowersCrossDC_KillingHiveAndFollower >> TopicAutoscaling::ReadingAfterSplitTest_BeforeAutoscaleAwareSDK |66.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |66.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |66.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup >> THiveTest::TestLocalRegistrationInSharedHive [GOOD] >> TSubDomainTest::CheckAccessCopyTable [GOOD] >> TSubDomainTest::ConsistentCopyTable >> Balancing::Balancing_OneTopic_TopicApi >> TSubDomainTest::StartTenanNodeAndStopAtDestructor [GOOD] >> TSubDomainTest::CreateTabletForUnknownDomain [GOOD] >> TSubDomainTest::DatashardNotRunAtAllWhenSubDomainNodesIsStopped >> KqpProxy::NoLocalSessionExecution >> TopicAutoscaling::PartitionSplit_BeforeAutoscaleAwareSDK ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestLocalRegistrationInSharedHive [GOOD] Test command err: 2025-03-31T15:54:05.750333Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-03-31T15:54:05.807590Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-31T15:54:05.810887Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-03-31T15:54:05.814308Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-03-31T15:54:05.822401Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-03-31T15:54:05.822682Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-03-31T15:54:05.828325Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:30:2075] ControllerId# 72057594037932033 2025-03-31T15:54:05.828536Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-31T15:54:05.829324Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-03-31T15:54:05.836091Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-03-31T15:54:06.065566Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-03-31T15:54:06.065816Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-03-31T15:54:06.126234Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:37:2080] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:06.127309Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:38:2081] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:06.127614Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:39:2082] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:06.128644Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:40:2083] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:06.129325Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:41:2084] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:06.130136Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:42:2085] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:06.140321Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:43:2086] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:06.140374Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-03-31T15:54:06.140781Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:30:2075] 2025-03-31T15:54:06.140966Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:30:2075] 2025-03-31T15:54:06.141372Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-03-31T15:54:06.141592Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-03-31T15:54:06.144448Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-03-31T15:54:06.145439Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-03-31T15:54:06.202820Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:20:2063] 2025-03-31T15:54:06.202864Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:20:2063] 2025-03-31T15:54:06.806941Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:30:2075] 2025-03-31T15:54:06.808452Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-03-31T15:54:06.808857Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-03-31T15:54:06.809103Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-03-31T15:54:06.826707Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2025-03-31T15:54:06.833318Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:20:2063] 2025-03-31T15:54:06.833393Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:30:2075] 2025-03-31T15:54:06.833609Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-03-31T15:54:06.833829Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-03-31T15:54:06.883463Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-03-31T15:54:06.884421Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-03-31T15:54:06.884631Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-03-31T15:54:06.885186Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-03-31T15:54:06.889029Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-31T15:54:06.890803Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-03-31T15:54:06.895159Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-03-31T15:54:06.895558Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-03-31T15:54:06.896138Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [1:50:2091] 2025-03-31T15:54:06.896175Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [1:50:2091] 2025-03-31T15:54:06.903586Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-03-31T15:54:06.903649Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [1:50:2091] 2025-03-31T15:54:06.903875Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-03-31T15:54:06.904509Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StInit ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-03-31T15:54:06.904573Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-03-31T15:54:06.905538Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037932033 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-03-31T15:54:06.905742Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037932033 followers: 0 2025-03-31T15:54:06.906929Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:20:2063] 2025-03-31T15:54:06.906973Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] forward result error, check reconnect [1:30:2075] 2025-03-31T15:54:06.907529Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] schedule retry [1:30:2075] 2025-03-31T15:54:06.950839Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2025-03-31T15:54:06.951294Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2025-03-31T15:54:06.966413Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2025-03-31T15:54:06.966870Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2025-03-31T15:54:06.978689Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-03-31T15:54:06.980029Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:30:2075] 2025-03-31T15:54:06.981750Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037927937 Cookie: 0 ProxyOptions: SigNone} 2025-03-31T15:54:06.981993Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 2} 2025-03-31T15:54:06.982024Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-03-31T15:54:06.987755Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 0} 2025-03-31T15:54:06.988102Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 1} 2025-03-31T15:54:06.989118Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-03-31T15:54:06.989326Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037936129 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037936129 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-03-31T15:54:06.989356Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037936129 followers: 0 2025-03-31T15:54:06.989418Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] forward result error, check reconnect [1:20:2063] 2025-03-31T15:54:06.989593Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] connect failed [1:20:2063] 2025-03-31T15:54:06.990291Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037927937} 2025-03-31T15:54:06.990739Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2025-03-31T15:54:06.990761Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-03-31T15:54:06.995171Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:322} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\371$\224\316I\335\243.)W\014\261m\013\346Osy\0160" } 2025-03-31T15:54:06.995738Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037927937} 2025-03-31T15:54:06.996784Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForI ... 92Z node 34 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594046678944 Cookie: 1} 2025-03-31T15:55:37.860432Z node 34 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594046678944 Cookie: 2} 2025-03-31T15:55:37.860638Z node 35 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72057594046678944 CurrentLeader: [34:326:2265] CurrentLeaderTablet: [34:342:2274] CurrentGeneration: 2 CurrentStep: 0} 2025-03-31T15:55:37.860736Z node 35 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72057594046678944 CurrentLeader: [34:326:2265] CurrentLeaderTablet: [34:342:2274] CurrentGeneration: 2 CurrentStep: 0} 2025-03-31T15:55:37.860857Z node 35 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594046678944 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72057594046678944 Cookie: 0 CurrentLeader: [34:326:2265] CurrentLeaderTablet: [34:342:2274] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-03-31T15:55:37.860922Z node 35 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72057594046678944 followers: 0 2025-03-31T15:55:37.860988Z node 35 :TABLET_RESOLVER DEBUG: SelectForward node 35 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72057594046678944 followers: 0 countLeader 1 allowFollowers 0 winner: [34:326:2265] 2025-03-31T15:55:37.861107Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594046678944] forward result remote node 34 [35:550:2090] 2025-03-31T15:55:37.861208Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594046678944] remote node connected [35:550:2090] 2025-03-31T15:55:37.861274Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594046678944]::SendEvent [35:550:2090] 2025-03-31T15:55:37.861464Z node 34 :PIPE_SERVER DEBUG: [72057594046678944] Accept Connect Originator# [35:550:2090] 2025-03-31T15:55:37.861706Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594046678944] connected with status OK role: Leader [35:550:2090] 2025-03-31T15:55:37.861771Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594046678944] send queued [35:550:2090] 2025-03-31T15:55:37.861862Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594046678944] send [35:550:2090] 2025-03-31T15:55:37.861887Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594046678944] push event to server [35:550:2090] 2025-03-31T15:55:37.861937Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594046678944]::SendEvent [35:550:2090] 2025-03-31T15:55:37.862046Z node 34 :PIPE_SERVER DEBUG: [72057594046678944] Push Sender# [35:549:2090] EventType# 271122945 2025-03-31T15:55:37.862169Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{16, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme 2025-03-31T15:55:37.862240Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{16, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T15:55:37.862481Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{16, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} hope 1 -> done Change{11, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-31T15:55:37.862557Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{16, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T15:55:37.864027Z node 35 :PIPE_CLIENT DEBUG: TClient[72075186224037888] ::Bootstrap [35:557:2092] 2025-03-31T15:55:37.864072Z node 35 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [35:557:2092] 2025-03-31T15:55:37.864348Z node 35 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StInit ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-03-31T15:55:37.864501Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [35:556:2091] 2025-03-31T15:55:37.864527Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [35:556:2091] 2025-03-31T15:55:37.864602Z node 35 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-03-31T15:55:37.864659Z node 35 :TABLET_RESOLVER DEBUG: SelectForward node 35 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [34:325:2264] 2025-03-31T15:55:37.864754Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [35:556:2091] 2025-03-31T15:55:37.864811Z node 35 :PIPE_CLIENT DEBUG: TClient[72075186224037888] queue send [35:557:2092] 2025-03-31T15:55:37.864946Z node 35 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-03-31T15:55:37.865194Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result remote node 34 [35:556:2091] 2025-03-31T15:55:37.865717Z node 34 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-03-31T15:55:37.865764Z node 34 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-03-31T15:55:37.865796Z node 34 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-03-31T15:55:37.865871Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594037927937] remote node connected [35:556:2091] 2025-03-31T15:55:37.865917Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [35:556:2091] 2025-03-31T15:55:37.866257Z node 34 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [35:556:2091] 2025-03-31T15:55:37.866379Z node 35 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [34:463:2365] CurrentLeaderTablet: [34:478:2377] CurrentGeneration: 1 CurrentStep: 0} 2025-03-31T15:55:37.866489Z node 35 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [34:463:2365] CurrentLeaderTablet: [34:478:2377] CurrentGeneration: 1 CurrentStep: 0} 2025-03-31T15:55:37.866600Z node 35 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [34:463:2365] CurrentLeaderTablet: [34:478:2377] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-03-31T15:55:37.866635Z node 35 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037888 followers: 0 2025-03-31T15:55:37.866692Z node 35 :TABLET_RESOLVER DEBUG: SelectForward node 35 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [34:463:2365] 2025-03-31T15:55:37.866762Z node 35 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result remote node 34 [35:557:2092] 2025-03-31T15:55:37.866935Z node 35 :PIPE_CLIENT DEBUG: TClient[72075186224037888] remote node connected [35:557:2092] 2025-03-31T15:55:37.866971Z node 35 :PIPE_CLIENT DEBUG: TClient[72075186224037888]::SendEvent [35:557:2092] 2025-03-31T15:55:37.867380Z node 34 :PIPE_SERVER DEBUG: [72075186224037888] Accept Connect Originator# [35:557:2092] 2025-03-31T15:55:37.867484Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [35:556:2091] 2025-03-31T15:55:37.867516Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [35:556:2091] 2025-03-31T15:55:37.867547Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [35:556:2091] 2025-03-31T15:55:37.867638Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [35:556:2091] 2025-03-31T15:55:37.867971Z node 35 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connected with status OK role: Leader [35:557:2092] 2025-03-31T15:55:37.868003Z node 35 :PIPE_CLIENT DEBUG: TClient[72075186224037888] send queued [35:557:2092] 2025-03-31T15:55:37.868028Z node 35 :PIPE_CLIENT DEBUG: TClient[72075186224037888] push event to server [35:557:2092] 2025-03-31T15:55:37.868073Z node 35 :PIPE_CLIENT DEBUG: TClient[72075186224037888]::SendEvent [35:557:2092] 2025-03-31T15:55:37.868460Z node 34 :PIPE_SERVER DEBUG: [72057594037927937] Push Sender# [35:553:2091] EventType# 268959744 2025-03-31T15:55:37.868540Z node 34 :PIPE_SERVER DEBUG: [72075186224037888] Push Sender# [35:554:2092] EventType# 268959744 2025-03-31T15:55:37.868768Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{23, NKikimr::NHive::TTxRegisterNode} queued, type NKikimr::NHive::TTxRegisterNode 2025-03-31T15:55:37.868874Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{23, NKikimr::NHive::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T15:55:37.869066Z node 34 :HIVE WARN: HIVE#72057594037927937 Node(35, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:55:37.869181Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{23, NKikimr::NHive::TTxRegisterNode} hope 1 -> done Change{14, redo 208b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2025-03-31T15:55:37.869269Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{23, NKikimr::NHive::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T15:55:37.869437Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{6, NKikimr::NHive::TTxRegisterNode} queued, type NKikimr::NHive::TTxRegisterNode 2025-03-31T15:55:37.869497Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{6, NKikimr::NHive::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T15:55:37.869631Z node 34 :HIVE WARN: HIVE#72075186224037888 Node(35, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:55:37.869711Z node 34 :HIVE WARN: HIVE#72075186224037888 Node(35, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:55:37.869780Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{6, NKikimr::NHive::TTxRegisterNode} hope 1 -> done Change{6, redo 199b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2025-03-31T15:55:37.869829Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{6, NKikimr::NHive::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T15:55:37.870046Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:7} Tx{7, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2025-03-31T15:55:37.870122Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:7} Tx{7, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T15:55:37.870224Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:7} Tx{7, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-31T15:55:37.870305Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:7} Tx{7, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T15:55:37.870408Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{24, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2025-03-31T15:55:37.870457Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{24, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T15:55:37.870526Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{24, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{15, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-31T15:55:37.870577Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{24, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} >> ColumnBuildTest::BuildColumnDoesnotRestoreDeletedRows [GOOD] >> TableCreation::ConcurrentTableCreation >> TSubDomainTest::DatashardRunAtOtherNodeWhenOneNodeIsStopped [GOOD] >> TSubDomainTest::CreateTableInsidetThenStopTenantAndForceDeleteSubDomain [GOOD] >> TSubDomainTest::CreateTableInsideSubDomain >> KqpSort::TopSortTableExprOffset >> THiveTest::TestFollowersCrossDC_KillingHiveAndFollower [GOOD] >> THiveTest::TestGetStorageInfo ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::BuildColumnDoesnotRestoreDeletedRows [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:55:16.437792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:55:16.437872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:55:16.437899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:55:16.437940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:55:16.437978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:55:16.438003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:55:16.438068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:55:16.438146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:55:16.438456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:55:16.534691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:55:16.534772Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:16.543516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:55:16.543778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:55:16.544144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:55:16.556334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:55:16.556590Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:55:16.557382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:55:16.557696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:55:16.561234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:55:16.562776Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:55:16.562849Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:55:16.563038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:55:16.563112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:55:16.563156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:55:16.563329Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:55:16.573173Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:55:16.895132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:55:16.895497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:55:16.895921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:55:16.896485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:55:16.896614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:55:16.900270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:55:16.900418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:55:16.900634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:55:16.900703Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:55:16.900746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:55:16.900785Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:55:16.905489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:55:16.905575Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:55:16.905648Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:55:16.913714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:55:16.913798Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:55:16.913866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:55:16.913928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:55:16.923819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:55:16.926431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:55:16.926694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:55:16.927853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:55:16.928008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:55:16.928070Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:55:16.928377Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:55:16.928445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:55:16.928644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:55:16.928762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:55:16.931512Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:55:16.931580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:55:16.931818Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:55:16.931867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:55:16.932107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:55:16.932154Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:55:16.932260Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:55:16.932298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:55:16.932345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:55:16.932381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:55:16.932427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:55:16.932501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:55:16.932540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:55:16.932592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:55:16.932677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:55:16.932721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:55:16.932758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:55:16.935044Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:55:16.935191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:55:16.935260Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... MKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'27))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-31T15:55:38.196579Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2049:3912], Recipient [1:747:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'28))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-31T15:55:38.206122Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2050:3913], Recipient [1:747:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'29))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-31T15:55:38.229121Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2051:3914], Recipient [1:747:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'30))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-31T15:55:38.238579Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2052:3915], Recipient [1:747:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'31))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-31T15:55:38.245848Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2053:3916], Recipient [1:747:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'32))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-31T15:55:38.252383Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2054:3917], Recipient [1:747:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'33))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-31T15:55:38.303527Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2055:3918], Recipient [1:747:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'34))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-31T15:55:38.436811Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2056:3919], Recipient [1:747:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'35))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-31T15:55:38.520321Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2057:3920], Recipient [1:747:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'36))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-31T15:55:38.551116Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2058:3921], Recipient [1:747:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'37))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-31T15:55:38.629814Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2059:3922], Recipient [1:747:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'38))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-31T15:55:38.762336Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2060:3923], Recipient [1:747:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'39))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-31T15:55:38.968071Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2061:3924], Recipient [1:747:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'40))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-31T15:55:39.085743Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2062:3925], Recipient [1:747:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'41))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-31T15:55:39.208909Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2063:3926], Recipient [1:747:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'42))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-31T15:55:39.287042Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2064:3927], Recipient [1:747:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'43))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-31T15:55:39.370930Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2065:3928], Recipient [1:747:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'44))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-31T15:55:39.425399Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2066:3929], Recipient [1:747:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'45))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-31T15:55:39.623807Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2067:3930], Recipient [1:747:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'46))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-31T15:55:39.673423Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2068:3931], Recipient [1:747:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'47))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-31T15:55:39.721099Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2069:3932], Recipient [1:747:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'48))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-31T15:55:39.762079Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2070:3933], Recipient [1:747:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'49))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-31T15:55:39.840389Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2071:3934], Recipient [1:747:2636]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'50))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::StartTenanNodeAndStopAtDestructor [GOOD] Test command err: 2025-03-31T15:55:11.181859Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002799183923709:2221];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:11.181965Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000c01/r3tmp/tmp6K7gHV/pdisk_1.dat 2025-03-31T15:55:12.445954Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:55:13.339931Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:55:13.340060Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:55:13.362936Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:55:13.453836Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:13.455945Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; TClient is connected to server localhost:29002 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T15:55:14.077523Z node 1 :TX_PROXY DEBUG: actor# [1:7488002799183923787:2116] Handle TEvNavigate describe path dc-1 2025-03-31T15:55:14.077571Z node 1 :TX_PROXY DEBUG: Actor# [1:7488002812068826170:2449] HANDLE EvNavigateScheme dc-1 2025-03-31T15:55:14.077681Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488002799183923812:2130], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:14.077812Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002803478891193:2166][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7488002799183923812:2130], cookie# 1 2025-03-31T15:55:14.079299Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002803478891202:2166][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002803478891199:2166], cookie# 1 2025-03-31T15:55:14.079333Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002803478891203:2166][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002803478891200:2166], cookie# 1 2025-03-31T15:55:14.079349Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002803478891204:2166][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002803478891201:2166], cookie# 1 2025-03-31T15:55:14.079390Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002794888956166:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002803478891202:2166], cookie# 1 2025-03-31T15:55:14.079415Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002794888956169:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002803478891203:2166], cookie# 1 2025-03-31T15:55:14.079437Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002794888956172:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002803478891204:2166], cookie# 1 2025-03-31T15:55:14.079462Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002803478891202:2166][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002794888956166:2050], cookie# 1 2025-03-31T15:55:14.079478Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002803478891203:2166][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002794888956169:2053], cookie# 1 2025-03-31T15:55:14.079501Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002803478891204:2166][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002794888956172:2056], cookie# 1 2025-03-31T15:55:14.079538Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002803478891193:2166][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002803478891199:2166], cookie# 1 2025-03-31T15:55:14.079562Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002803478891193:2166][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-31T15:55:14.079576Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002803478891193:2166][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002803478891200:2166], cookie# 1 2025-03-31T15:55:14.079593Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002803478891193:2166][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-31T15:55:14.079625Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002803478891193:2166][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002803478891201:2166], cookie# 1 2025-03-31T15:55:14.079655Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002803478891193:2166][/dc-1] Unexpected sync response: sender# [1:7488002803478891201:2166], cookie# 1 2025-03-31T15:55:14.079712Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7488002799183923812:2130], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-03-31T15:55:14.085832Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7488002799183923812:2130], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7488002803478891193:2166] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-31T15:55:14.085994Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488002799183923812:2130], cacheItem# { Subscriber: { Subscriber: [1:7488002803478891193:2166] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-03-31T15:55:14.088171Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488002812068826171:2450], recipient# [1:7488002812068826170:2449], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T15:55:14.088230Z node 1 :TX_PROXY DEBUG: Actor# [1:7488002812068826170:2449] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T15:55:14.130586Z node 1 :TX_PROXY DEBUG: Actor# [1:7488002812068826170:2449] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-03-31T15:55:14.134774Z node 1 :TX_PROXY DEBUG: Actor# [1:7488002812068826170:2449] Handle TEvDescribeSchemeResult Forward to# [1:7488002812068826169:2448] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T15:55:14.243430Z node 1 :TX_PROXY DEBUG: actor# [1:7488002799183923787:2116] Handle TEvProposeTransaction 2025-03-31T15:55:14.243462Z node 1 :TX_PROXY DEBUG: actor# [1:7488002799183923787:2116] TxId# 281474976710657 ProcessProposeTransaction 2025-03-31T15:55:14.243672Z node 1 :TX_PROXY DEBUG: actor# [1:7488002799183923787:2116] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7488002812068826178:2456] 2025-03-31T15:55:14.467726Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488002799183923812:2130], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:14.467809Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7488002799183923812:2130], path# /dc-1/.metadata ... tual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-31T15:55:37.220859Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488002865097066466:2144], cacheItem# { Subscriber: { Subscriber: [3:7488002912341707421:2592] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:55:37.220898Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7488002865097066466:2144], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2025-03-31T15:55:37.220947Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7488002865097066466:2144], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7488002912341707420:2591] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-31T15:55:37.221014Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488002865097066466:2144], cacheItem# { Subscriber: { Subscriber: [3:7488002912341707420:2591] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:55:37.221112Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488002912341707434:2593], recipient# [3:7488002912341707419:2326], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:37.287239Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7488002865097066466:2144], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:37.287372Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488002865097066466:2144], cacheItem# { Subscriber: { Subscriber: [3:7488002869392034206:2442] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:55:37.287469Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488002912341707437:2594], recipient# [3:7488002912341707436:2327], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:37.358293Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7488002865097066466:2144], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:37.358442Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488002865097066466:2144], cacheItem# { Subscriber: { Subscriber: [3:7488002869392034206:2442] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:55:37.358590Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488002912341707441:2595], recipient# [3:7488002912341707440:2328], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:38.220953Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7488002865097066466:2144], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:38.221138Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488002865097066466:2144], cacheItem# { Subscriber: { Subscriber: [3:7488002912341707402:2589] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:55:38.221254Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488002916636674747:2601], recipient# [3:7488002916636674746:2329], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:38.291616Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7488002865097066466:2144], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:38.291748Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488002865097066466:2144], cacheItem# { Subscriber: { Subscriber: [3:7488002869392034206:2442] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:55:38.291831Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488002916636674750:2602], recipient# [3:7488002916636674749:2330], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:38.359598Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7488002865097066466:2144], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:38.359724Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488002865097066466:2144], cacheItem# { Subscriber: { Subscriber: [3:7488002869392034206:2442] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:55:38.359810Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488002916636674754:2603], recipient# [3:7488002916636674753:2331], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> KqpReturning::ReturningWorksIndexedUpsert+QueryService >> KqpNewEngine::PureTxMixedWithDeferred |66.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |66.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |66.9%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |66.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |66.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |66.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::DatashardRunAtOtherNodeWhenOneNodeIsStopped [GOOD] Test command err: 2025-03-31T15:55:11.378954Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002798126455061:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:11.379600Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b6a/r3tmp/tmpS36wwv/pdisk_1.dat 2025-03-31T15:55:12.431411Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:55:12.431789Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:12.441638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:55:12.441712Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:55:12.453568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22557 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T15:55:12.691346Z node 1 :TX_PROXY DEBUG: actor# [1:7488002798126455286:2116] Handle TEvNavigate describe path dc-1 2025-03-31T15:55:12.691386Z node 1 :TX_PROXY DEBUG: Actor# [1:7488002802421423069:2443] HANDLE EvNavigateScheme dc-1 2025-03-31T15:55:12.691498Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488002798126455309:2129], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:12.691569Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002802421422951:2347][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7488002798126455309:2129], cookie# 1 2025-03-31T15:55:12.693122Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002802421422961:2347][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002802421422958:2347], cookie# 1 2025-03-31T15:55:12.693204Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002802421422962:2347][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002802421422959:2347], cookie# 1 2025-03-31T15:55:12.693218Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002802421422963:2347][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002802421422960:2347], cookie# 1 2025-03-31T15:55:12.693241Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002798126454968:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002802421422963:2347], cookie# 1 2025-03-31T15:55:12.693268Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002802421422963:2347][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002798126454968:2056], cookie# 1 2025-03-31T15:55:12.693285Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002802421422951:2347][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002802421422960:2347], cookie# 1 2025-03-31T15:55:12.693301Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002802421422951:2347][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-31T15:55:12.693313Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002798126454962:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002802421422961:2347], cookie# 1 2025-03-31T15:55:12.693323Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002798126454965:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002802421422962:2347], cookie# 1 2025-03-31T15:55:12.693351Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002802421422961:2347][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002798126454962:2050], cookie# 1 2025-03-31T15:55:12.693359Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002802421422962:2347][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002798126454965:2053], cookie# 1 2025-03-31T15:55:12.693370Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002802421422951:2347][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002802421422958:2347], cookie# 1 2025-03-31T15:55:12.693383Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002802421422951:2347][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-31T15:55:12.693397Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002802421422951:2347][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002802421422959:2347], cookie# 1 2025-03-31T15:55:12.693405Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002802421422951:2347][/dc-1] Unexpected sync response: sender# [1:7488002802421422959:2347], cookie# 1 2025-03-31T15:55:12.693451Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7488002798126455309:2129], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-03-31T15:55:12.697654Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7488002798126455309:2129], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7488002802421422951:2347] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-31T15:55:12.697752Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488002798126455309:2129], cacheItem# { Subscriber: { Subscriber: [1:7488002802421422951:2347] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-03-31T15:55:12.699065Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488002802421423070:2444], recipient# [1:7488002802421423069:2443], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T15:55:12.699143Z node 1 :TX_PROXY DEBUG: Actor# [1:7488002802421423069:2443] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T15:55:12.740069Z node 1 :TX_PROXY DEBUG: Actor# [1:7488002802421423069:2443] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-03-31T15:55:12.743209Z node 1 :TX_PROXY DEBUG: Actor# [1:7488002802421423069:2443] Handle TEvDescribeSchemeResult Forward to# [1:7488002802421423068:2442] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T15:55:12.759391Z node 1 :TX_PROXY DEBUG: actor# [1:7488002798126455286:2116] Handle TEvProposeTransaction 2025-03-31T15:55:12.759411Z node 1 :TX_PROXY DEBUG: actor# [1:7488002798126455286:2116] TxId# 281474976715657 ProcessProposeTransaction 2025-03-31T15:55:12.759491Z node 1 :TX_PROXY DEBUG: actor# [1:7488002798126455286:2116] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7488002802421423075:2448] 2025-03-31T15:55:12.842949Z node 1 :TX_PROXY DEBUG: Actor# [1:7488002802421423075:2448] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-03-31T15:55:12.843002Z node 1 :TX_PROXY DEBUG: Actor# [1:7488002802421423075:2448] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T15:55:12.843057Z node 1 :TX_PROXY DEBUG: Actor# [1:7488002802421423075:2448] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T15:55:12.843359Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488002798126455309:2129], request# { ErrorCount: 0 Databa ... ration: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:40.897978Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7488002864734657370:2107], cacheItem# { Subscriber: { Subscriber: [4:7488002924864199992:2388] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:55:40.898029Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7488002864734657370:2107], cacheItem# { Subscriber: { Subscriber: [4:7488002924864199993:2389] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:55:40.898121Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7488002924864200019:2395], recipient# [4:7488002924864199990:2348], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:40.902344Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7488002924864199990:2348], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:55:41.272029Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7488002864734657370:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:41.272168Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7488002864734657370:2107], cacheItem# { Subscriber: { Subscriber: [4:7488002869029624783:2174] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:55:41.272508Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7488002929159167317:2396], recipient# [4:7488002929159167316:2351], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:41.302314Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7488002864734657370:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:41.302462Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7488002864734657370:2107], cacheItem# { Subscriber: { Subscriber: [4:7488002869029624783:2174] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:55:41.302554Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7488002929159167319:2397], recipient# [4:7488002929159167318:2352], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:41.325587Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7488002864734657370:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:41.326379Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7488002864734657370:2107], cacheItem# { Subscriber: { Subscriber: [4:7488002924864199992:2388] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:55:41.326434Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7488002864734657370:2107], cacheItem# { Subscriber: { Subscriber: [4:7488002924864199993:2389] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:55:41.327073Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7488002929159167320:2398], recipient# [4:7488002924864199990:2348], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:41.329990Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7488002924864199990:2348], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:55:41.419958Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7488002864734657370:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:41.420101Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7488002864734657370:2107], cacheItem# { Subscriber: { Subscriber: [4:7488002924864199994:2390] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:55:41.420179Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7488002929159167322:2399], recipient# [4:7488002929159167321:2353], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:41.420381Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; >> THiveTest::TestGetStorageInfo [GOOD] >> THiveTest::TestGetStorageInfoDeleteTabletBeforeAssigned >> THiveTest::TestGetStorageInfoDeleteTabletBeforeAssigned [GOOD] >> THiveTest::TestExternalBootWhenLocked >> KqpSort::ReverseFirstKeyOptimized >> KqpNewEngine::KeyColumnOrder >> KqpNotNullColumns::InsertNotNullPk |66.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |66.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |66.9%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut >> PersQueueSdkReadSessionTest::ReadSessionWithClose [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted >> TSubDomainTest::GenericCases [GOOD] >> KqpNewEngine::Update+UseSink >> THiveTest::TestExternalBootWhenLocked [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::GenericCases [GOOD] Test command err: 2025-03-31T15:55:13.921576Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002809737725989:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:13.921645Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b0a/r3tmp/tmpuJJOWL/pdisk_1.dat 2025-03-31T15:55:15.043238Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:55:15.062706Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:55:15.065866Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:55:15.077993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:55:15.102312Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:10375 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T15:55:15.447290Z node 1 :TX_PROXY DEBUG: actor# [1:7488002809737726219:2141] Handle TEvNavigate describe path dc-1 2025-03-31T15:55:15.447332Z node 1 :TX_PROXY DEBUG: Actor# [1:7488002818327661270:2450] HANDLE EvNavigateScheme dc-1 2025-03-31T15:55:15.447610Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488002814032693538:2154], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:15.447788Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002814032693850:2347][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7488002814032693538:2154], cookie# 1 2025-03-31T15:55:15.449734Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002818327661152:2347][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002818327661149:2347], cookie# 1 2025-03-31T15:55:15.449765Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002818327661153:2347][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002818327661150:2347], cookie# 1 2025-03-31T15:55:15.449778Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002818327661154:2347][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002818327661151:2347], cookie# 1 2025-03-31T15:55:15.449870Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002809737725859:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002818327661152:2347], cookie# 1 2025-03-31T15:55:15.449931Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002809737725862:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002818327661153:2347], cookie# 1 2025-03-31T15:55:15.449946Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002809737725865:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002818327661154:2347], cookie# 1 2025-03-31T15:55:15.449972Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002818327661152:2347][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002809737725859:2051], cookie# 1 2025-03-31T15:55:15.450017Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002818327661153:2347][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002809737725862:2054], cookie# 1 2025-03-31T15:55:15.450033Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002818327661154:2347][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002809737725865:2057], cookie# 1 2025-03-31T15:55:15.450080Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002814032693850:2347][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002818327661149:2347], cookie# 1 2025-03-31T15:55:15.450133Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002814032693850:2347][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-31T15:55:15.450145Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002814032693850:2347][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002818327661150:2347], cookie# 1 2025-03-31T15:55:15.450184Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002814032693850:2347][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-31T15:55:15.450209Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002814032693850:2347][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002818327661151:2347], cookie# 1 2025-03-31T15:55:15.450220Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002814032693850:2347][/dc-1] Unexpected sync response: sender# [1:7488002818327661151:2347], cookie# 1 2025-03-31T15:55:15.450403Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7488002814032693538:2154], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-03-31T15:55:15.480717Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7488002814032693538:2154], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7488002814032693850:2347] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-31T15:55:15.480847Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488002814032693538:2154], cacheItem# { Subscriber: { Subscriber: [1:7488002814032693850:2347] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-03-31T15:55:15.487881Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488002818327661271:2451], recipient# [1:7488002818327661270:2450], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T15:55:15.487974Z node 1 :TX_PROXY DEBUG: Actor# [1:7488002818327661270:2450] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T15:55:15.527014Z node 1 :TX_PROXY DEBUG: Actor# [1:7488002818327661270:2450] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-03-31T15:55:15.530212Z node 1 :TX_PROXY DEBUG: Actor# [1:7488002818327661270:2450] Handle TEvDescribeSchemeResult Forward to# [1:7488002818327661269:2449] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T15:55:15.575620Z node 1 :TX_PROXY DEBUG: actor# [1:7488002809737726219:2141] Handle TEvProposeTransaction 2025-03-31T15:55:15.575648Z node 1 :TX_PROXY DEBUG: actor# [1:7488002809737726219:2141] TxId# 281474976710657 ProcessProposeTransaction 2025-03-31T15:55:15.575765Z node 1 :TX_PROXY DEBUG: actor# [1:7488002809737726219:2141] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7488002818327661278:2457] 2025-03-31T15:55:15.946063Z node 1 :TX_PROXY DEBUG: Actor# [1:7488002818327661278:2457] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-03-31T15:55:15.946114Z node 1 :TX_PROXY DEBUG: Actor# [1:7488002818327661278:2457] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T15:55:15.946171Z node 1 :TX_PROXY DEBUG: Actor# [1:7488002818327661278:2457] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T15:55:15.946269Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488002814032693538:2154], request# { ErrorCount: 0 Databa ... ncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:47.571542Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7488002903110508958:2130], cacheItem# { Subscriber: { Subscriber: [4:7488002907405476656:2401] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:55:47.571633Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7488002954650117866:3098], recipient# [4:7488002954650117865:2347], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:47.686103Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7488002903110508958:2130], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:47.686214Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7488002903110508958:2130], cacheItem# { Subscriber: { Subscriber: [4:7488002950355150521:3086] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:55:47.686283Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7488002954650117868:3099], recipient# [4:7488002954650117867:2348], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:47.754332Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7488002903110508958:2130], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:47.754458Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7488002903110508958:2130], cacheItem# { Subscriber: { Subscriber: [4:7488002907405476656:2401] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:55:47.754549Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7488002954650117870:3100], recipient# [4:7488002954650117869:2349], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:48.575224Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7488002903110508958:2130], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:48.575360Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7488002903110508958:2130], cacheItem# { Subscriber: { Subscriber: [4:7488002907405476656:2401] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:55:48.575452Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7488002958945085177:3104], recipient# [4:7488002958945085176:2350], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:48.688192Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7488002903110508958:2130], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:48.688305Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7488002903110508958:2130], cacheItem# { Subscriber: { Subscriber: [4:7488002950355150521:3086] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:55:48.688367Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7488002958945085179:3105], recipient# [4:7488002958945085178:2351], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:48.756237Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7488002903110508958:2130], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:48.756378Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7488002903110508958:2130], cacheItem# { Subscriber: { Subscriber: [4:7488002907405476656:2401] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:55:48.756474Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7488002958945085181:3106], recipient# [4:7488002958945085180:2352], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:49.583271Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7488002903110508958:2130], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:49.583425Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7488002903110508958:2130], cacheItem# { Subscriber: { Subscriber: [4:7488002907405476656:2401] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:55:49.583545Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7488002963240052488:3110], recipient# [4:7488002963240052487:2353], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> THiveTest::TestHiveBalancerNodeRestarts [GOOD] >> THiveTest::TestHiveBalancerDifferentResources2 >> TStorageBalanceTest::TestScenario2 [GOOD] >> TStorageBalanceTest::TestScenario3 ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::ClickHouseFilterPushdown 2025-03-31 15:55:45,944 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-31 15:55:46,622 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 60 secs timeout. Process tree before termination: pid rss ref pdirt 16559 46.1M 46.1M 23.1M test_tool run_ut @/home/runner/.ya/build/build_root/0l5c/000f3a/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk0/testing_out_stuff/test 17156 1.4G 1.4G 921M └─ ydb-core-kqp-ut-federated_query-generic_ut --trace-path-append /home/runner/.ya/build/build_root/0l5c/000f3a/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unitt Test command err: Trying to start YDB, gRPC: 9240, MsgBus: 23524 2025-03-31T15:54:57.373118Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002739804877427:2190];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:57.373147Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f3a/r3tmp/tmpa4uhPH/pdisk_1.dat 2025-03-31T15:54:58.299777Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:58.307249Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:58.307372Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:58.311811Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9240, node 1 2025-03-31T15:54:58.547873Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:54:58.547898Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:54:58.547908Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:54:58.548042Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23524 TClient is connected to server localhost:23524 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:55:00.007460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:55:02.375249Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002739804877427:2190];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:02.375325Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:55:10.676849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488002795639452857:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:10.676957Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:10.971265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-03-31T15:55:11.106217Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488002799934420277:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:11.106305Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:11.106494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488002799934420282:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:11.110071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-03-31T15:55:11.122596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488002799934420284:2363], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T15:55:11.213385Z node 1 :TX_PROXY ERROR: Actor# [1:7488002799934420324:2429] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:55:12.149900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:55:12.721496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-03-31T15:55:13.237703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T15:55:13.289990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:55:13.290023Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:13.698959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T15:55:14.168493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T15:55:14.645842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-03-31T15:55:14.690121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-03-31T15:55:19.284525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710717:0, at schemeshard: 72057594046644480 2025-03-31T15:55:19.748336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710720:0, at schemeshard: 72057594046644480 2025-03-31T15:55:19.771414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710722:0, at schemeshard: 72057594046644480 2025-03-31T15:55:19.796130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710721:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "col1" type { type_id: UINT16 } } } } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "col1" type { type_id: UINT16 } } } } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "col1" type { type_id: UINT16 } } } } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2025-03-31T15:55:25.918173Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743436525936, txId: 281474976710753] shutting down Trying to start YDB, gRPC: 24371, MsgBus: 20467 2025-03-31T15:55:27.500240Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488002866433044490:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:27.500273Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f3a/r3tmp/tmpzntGH9/pdisk_1.dat 2025-03-31T15:55:29.054430Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:55:29.269123Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:29.312986Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:55:29.313072Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:55:29.324310Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24371, node 2 2025-03-31T15:55:30.091987Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:55:30.092007Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:55:30.092015Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:55:30.092140Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20467 2025-03-31T15:55:32.507689Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488002866433044490:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:32.508081Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:20467 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:55:35.419675Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:55:40.074251Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488002922267620044:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:40.074334Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:40.218966Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-03-31T15:55:40.483379Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488002922267620165:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:40.483450Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:40.484425Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488002922267620170:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:40.492191Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-03-31T15:55:40.554032Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488002922267620172:2364], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T15:55:40.635934Z node 2 :TX_PROXY ERROR: Actor# [2:7488002922267620235:2435] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:55:41.787695Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:55:42.388700Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:1, at schemeshard: 72057594046644480 2025-03-31T15:55:43.209178Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T15:55:43.643946Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:55:43.643969Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:44.558243Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710683:0, at schemeshard: 72057594046644480 Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8330113388/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/0l5c/000f3a/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk0/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8330113388/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/0l5c/000f3a/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/chunk0/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout",), {}) >> TStorageBalanceTest::TestScenario1 [GOOD] >> THiveTest::TestDeleteOwnerTabletsMany [GOOD] >> THiveTest::TestDeleteTabletWithFollowers ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestExternalBootWhenLocked [GOOD] Test command err: 2025-03-31T15:54:08.456317Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-03-31T15:54:08.526511Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-31T15:54:08.527869Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-03-31T15:54:08.532255Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-03-31T15:54:08.539706Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-03-31T15:54:08.540116Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-03-31T15:54:08.544836Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:73:2076] ControllerId# 72057594037932033 2025-03-31T15:54:08.545042Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-31T15:54:08.545485Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-03-31T15:54:08.551664Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-03-31T15:54:08.684644Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-03-31T15:54:08.684858Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-03-31T15:54:08.704428Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:72:2075] Create Queue# [1:81:2081] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.705238Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:72:2075] Create Queue# [1:82:2082] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.705865Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:72:2075] Create Queue# [1:83:2083] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.708169Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:72:2075] Create Queue# [1:84:2084] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.708602Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:72:2075] Create Queue# [1:85:2085] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.709009Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:72:2075] Create Queue# [1:86:2086] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.709749Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:72:2075] Create Queue# [1:87:2087] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.709915Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-03-31T15:54:08.710370Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:73:2076] 2025-03-31T15:54:08.710704Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:73:2076] 2025-03-31T15:54:08.711157Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-03-31T15:54:08.711373Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-03-31T15:54:08.715075Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-03-31T15:54:08.715163Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-03-31T15:54:08.721419Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-31T15:54:08.721543Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-03-31T15:54:08.722301Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:96:2074] ControllerId# 72057594037932033 2025-03-31T15:54:08.722325Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-31T15:54:08.722381Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-03-31T15:54:08.722533Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-03-31T15:54:08.725243Z node 2 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-03-31T15:54:08.725302Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-03-31T15:54:08.734094Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:95:2073] Create Queue# [2:102:2078] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.734564Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:95:2073] Create Queue# [2:103:2079] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.735296Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:95:2073] Create Queue# [2:104:2080] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.735611Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:95:2073] Create Queue# [2:105:2081] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.735982Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:95:2073] Create Queue# [2:106:2082] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.736647Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:95:2073] Create Queue# [2:107:2083] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.737063Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:95:2073] Create Queue# [2:108:2084] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.737087Z node 2 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-03-31T15:54:08.737148Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [2:96:2074] 2025-03-31T15:54:08.737319Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [2:96:2074] 2025-03-31T15:54:08.737359Z node 2 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-03-31T15:54:08.737396Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-03-31T15:54:08.738765Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-03-31T15:54:08.738849Z node 3 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-03-31T15:54:08.785751Z node 3 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-31T15:54:08.786020Z node 3 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-03-31T15:54:08.789907Z node 3 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [3:114:2074] ControllerId# 72057594037932033 2025-03-31T15:54:08.789943Z node 3 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-31T15:54:08.790004Z node 3 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-03-31T15:54:08.790930Z node 3 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-03-31T15:54:08.798049Z node 3 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-03-31T15:54:08.798099Z node 3 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-03-31T15:54:08.805872Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:113:2073] Create Queue# [3:120:2078] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.806715Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:113:2073] Create Queue# [3:121:2079] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.807415Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:113:2073] Create Queue# [3:122:2080] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.807837Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:113:2073] Create Queue# [3:123:2081] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.808685Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:113:2073] Create Queue# [3:124:2082] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.809329Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:113:2073] Create Queue# [3:125:2083] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.809799Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:113:2073] Create Queue# [3:126:2084] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.809825Z node 3 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-03-31T15:54:08.810193Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [3:114:2074] 2025-03-31T15:54:08.810221Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [3:114:2074] 2025-03-31T15:54:08.810416Z node 3 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-03-31T15:54:08.810457Z node 3 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-03-31T15:54:08.811989Z node 3 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-03-31T15:54:08.813321Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-03-31T15:54:09.104556Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:73:2076] 2025-03-31T15:54:09.104792Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-03-31T15:54:09.105223Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-03-31T15:54:09.118945Z node 2 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-03-31T15:54:09.119634Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [2:96:2074] 2025-03-31T15:54:09.119679Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-03-31T15:54:09.119871Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-03-31T15:54:09.120177Z node 3 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-03-31T15:54:09.120596Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [3:114:2074] 2025-03-31T15:54:09.121078Z node 3 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-03-31T15:54:09.121101Z node 3 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-03-31T15:54:09.122104Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeLis ... # true Marker# BPP21 2025-03-31T15:55:52.472641Z node 59 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 7.038 sample PartId# [72057594037927937:2:8:0:0:200:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 59 } TEvVPutResult{ TimestampMs# 24.14 VDiskId# [0:1:0:0:0] NodeId# 59 Status# OK } ] } 2025-03-31T15:55:52.473383Z node 59 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:8:0:0:200:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-03-31T15:55:52.475025Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} commited cookie 1 for step 8 2025-03-31T15:55:52.484002Z node 59 :TABLET_MAIN DEBUG: Tablet: 72075186224037888 Received TEvTabletStop from [59:98:2093], reason = ReasonStop Marker# TSYS29 2025-03-31T15:55:52.484365Z node 59 :PIPE_SERVER DEBUG: [72075186224037888] Stop 2025-03-31T15:55:52.485860Z node 59 :TABLET_MAIN NOTICE: Tablet: 72075186224037888 Type: Dummy, EReason: ReasonPill, SuggestedGeneration: 1, KnownGeneration: 1 Marker# TSYS31 2025-03-31T15:55:52.486848Z node 59 :PIPE_SERVER DEBUG: [72075186224037888] Detach 2025-03-31T15:55:52.488347Z node 59 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:3} suiciding, Waste{1:0, 289b +(0, 0b), 2 trc, -0b acc} 2025-03-31T15:55:52.503853Z node 59 :PIPE_CLIENT DEBUG: TClient[72075186224037888] peer closed [59:440:2351] 2025-03-31T15:55:52.504303Z node 59 :PIPE_CLIENT DEBUG: TClient[72075186224037888] notify reset [59:440:2351] 2025-03-31T15:55:52.504715Z node 59 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send [59:99:2093] 2025-03-31T15:55:52.505035Z node 59 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [59:99:2093] 2025-03-31T15:55:52.506013Z node 59 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [59:98:2093] EventType# 268960257 2025-03-31T15:55:52.511644Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxUpdateTabletStatus} queued, type NKikimr::NHive::TTxUpdateTabletStatus 2025-03-31T15:55:52.512005Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxUpdateTabletStatus} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T15:55:52.513536Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxUpdateTabletStatus} hope 1 -> done Change{13, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-31T15:55:52.514272Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxUpdateTabletStatus} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T15:55:52.534066Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{24, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2025-03-31T15:55:52.535047Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{24, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T15:55:52.543671Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{24, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{13, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-31T15:55:52.544563Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{24, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T15:55:52.549852Z node 59 :PIPE_CLIENT DEBUG: TClient[72075186224037888] ::Bootstrap [59:454:2358] 2025-03-31T15:55:52.550461Z node 59 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [59:454:2358] 2025-03-31T15:55:52.550991Z node 59 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StNormal ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-03-31T15:55:52.551608Z node 59 :TABLET_RESOLVER DEBUG: SelectForward node 59 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [59:374:2299] 2025-03-31T15:55:52.552520Z node 59 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result local node, try to connect [59:454:2358] 2025-03-31T15:55:52.552848Z node 59 :PIPE_CLIENT DEBUG: TClient[72075186224037888]::SendEvent [59:454:2358] 2025-03-31T15:55:52.554049Z node 59 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connect request undelivered [59:454:2358] 2025-03-31T15:55:52.554363Z node 59 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connect failed [59:454:2358] 2025-03-31T15:55:52.554641Z node 59 :TABLET_RESOLVER DEBUG: Handle TEvTabletProblem tabletId: 72075186224037888 entry.State: StNormal 2025-03-31T15:55:52.563697Z node 59 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-03-31T15:55:52.565555Z node 59 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-03-31T15:55:52.565967Z node 59 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-03-31T15:55:52.566001Z node 59 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-03-31T15:55:52.566355Z node 59 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [59:374:2299] CurrentLeaderTablet: [59:389:2311] CurrentGeneration: 1 CurrentStep: 0} 2025-03-31T15:55:52.566709Z node 59 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [59:374:2299] CurrentLeaderTablet: [59:389:2311] CurrentGeneration: 1 CurrentStep: 0} 2025-03-31T15:55:52.571019Z node 59 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [59:374:2299] CurrentLeaderTablet: [59:389:2311] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-03-31T15:55:52.571910Z node 59 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72075186224037888 followers: 0 2025-03-31T15:55:52.579419Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [60:456:2093] 2025-03-31T15:55:52.580027Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [60:456:2093] 2025-03-31T15:55:52.580417Z node 60 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-03-31T15:55:52.581083Z node 60 :TABLET_RESOLVER DEBUG: SelectForward node 60 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [59:323:2263] 2025-03-31T15:55:52.581981Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [60:456:2093] 2025-03-31T15:55:52.582292Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] received pending shutdown [60:456:2093] 2025-03-31T15:55:52.582641Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result remote node 59 [60:456:2093] 2025-03-31T15:55:52.582995Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] remote node connected [60:456:2093] 2025-03-31T15:55:52.587735Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [60:456:2093] 2025-03-31T15:55:52.590492Z node 59 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [60:456:2093] 2025-03-31T15:55:52.596367Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [60:456:2093] 2025-03-31T15:55:52.598587Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [60:456:2093] 2025-03-31T15:55:52.598925Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [60:456:2093] 2025-03-31T15:55:52.599854Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [60:456:2093] 2025-03-31T15:55:52.600975Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] shutdown pipe due to pending shutdown request [60:456:2093] 2025-03-31T15:55:52.601300Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [60:456:2093] 2025-03-31T15:55:52.603006Z node 59 :PIPE_SERVER DEBUG: [72057594037927937] Push Sender# [60:443:2088] EventType# 268697624 2025-03-31T15:55:52.604854Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{25, NKikimr::NHive::TTxStartTablet} queued, type NKikimr::NHive::TTxStartTablet 2025-03-31T15:55:52.605514Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{25, NKikimr::NHive::TTxStartTablet} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T15:55:52.607498Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{25, NKikimr::NHive::TTxStartTablet} hope 1 -> done Change{13, redo 83b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2025-03-31T15:55:52.608476Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{25, NKikimr::NHive::TTxStartTablet} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T15:55:52.629383Z node 59 :BS_PROXY_PUT INFO: [9eafe310d4d96c0d] bootstrap ActorId# [59:459:2361] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:9:0:0:92:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-03-31T15:55:52.630137Z node 59 :BS_PROXY_PUT DEBUG: [9eafe310d4d96c0d] Id# [72057594037927937:2:9:0:0:92:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-31T15:55:52.631064Z node 59 :BS_PROXY_PUT DEBUG: [9eafe310d4d96c0d] restore Id# [72057594037927937:2:9:0:0:92:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-03-31T15:55:52.634315Z node 59 :BS_PROXY_PUT DEBUG: [9eafe310d4d96c0d] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:9:0:0:92:1] Marker# BPG33 2025-03-31T15:55:52.635546Z node 59 :BS_PROXY_PUT DEBUG: [9eafe310d4d96c0d] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:9:0:0:92:1] Marker# BPG32 2025-03-31T15:55:52.637033Z node 59 :BS_PROXY DEBUG: Send to queueActorId# [59:78:2081] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:9:0:0:92:1] FDS# 92 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T15:55:52.642301Z node 59 :BS_PROXY_PUT DEBUG: [9eafe310d4d96c0d] received {EvVPutResult Status# OK ID# [72057594037927937:2:9:0:0:92:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 24 } Cost# 80724 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 25 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-03-31T15:55:52.643000Z node 59 :BS_PROXY_PUT DEBUG: [9eafe310d4d96c0d] Result# TEvPutResult {Id# [72057594037927937:2:9:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-03-31T15:55:52.643655Z node 59 :BS_PROXY_PUT INFO: [9eafe310d4d96c0d] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:9:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-03-31T15:55:52.645326Z node 59 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 10.281 sample PartId# [72057594037927937:2:9:0:0:92:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 59 } TEvVPutResult{ TimestampMs# 15.203 VDiskId# [0:1:0:0:0] NodeId# 59 Status# OK } ] } 2025-03-31T15:55:52.646748Z node 59 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:9:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-03-31T15:55:52.647722Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} commited cookie 1 for step 9 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> TStorageBalanceTest::TestScenario1 [GOOD] Test command err: 2025-03-31T15:54:08.092155Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-03-31T15:54:08.150880Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-31T15:54:08.152054Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-03-31T15:54:08.154860Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-03-31T15:54:08.161243Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-03-31T15:54:08.161542Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-03-31T15:54:08.167575Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:30:2075] ControllerId# 72057594037932033 2025-03-31T15:54:08.167983Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-31T15:54:08.168745Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-03-31T15:54:08.173757Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-03-31T15:54:08.328061Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-03-31T15:54:08.328427Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-03-31T15:54:08.346884Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:37:2080] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.347691Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:38:2081] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.348422Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:39:2082] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.349010Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:40:2083] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.350006Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:41:2084] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.350603Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:42:2085] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.351021Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:43:2086] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.351042Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-03-31T15:54:08.351440Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:30:2075] 2025-03-31T15:54:08.351616Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:30:2075] 2025-03-31T15:54:08.351980Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-03-31T15:54:08.352180Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-03-31T15:54:08.357277Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-03-31T15:54:08.358167Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-03-31T15:54:08.396658Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:20:2063] 2025-03-31T15:54:08.396694Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:20:2063] 2025-03-31T15:54:08.614508Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:30:2075] 2025-03-31T15:54:08.614723Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-03-31T15:54:08.615255Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-03-31T15:54:08.615720Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-03-31T15:54:08.636067Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2025-03-31T15:54:08.638339Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:20:2063] 2025-03-31T15:54:08.638406Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:30:2075] 2025-03-31T15:54:08.638444Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-03-31T15:54:08.638636Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-03-31T15:54:08.668662Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-03-31T15:54:08.669493Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-03-31T15:54:08.669546Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-03-31T15:54:08.669932Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-03-31T15:54:08.671312Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-31T15:54:08.672482Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-03-31T15:54:08.673048Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-03-31T15:54:08.673269Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-03-31T15:54:08.673907Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [1:50:2091] 2025-03-31T15:54:08.673926Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [1:50:2091] 2025-03-31T15:54:08.675161Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-03-31T15:54:08.675213Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [1:50:2091] 2025-03-31T15:54:08.675263Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-03-31T15:54:08.675816Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StInit ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-03-31T15:54:08.676584Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-03-31T15:54:08.677473Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037932033 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-03-31T15:54:08.677630Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037932033 followers: 0 2025-03-31T15:54:08.678419Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:20:2063] 2025-03-31T15:54:08.678462Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] forward result error, check reconnect [1:30:2075] 2025-03-31T15:54:08.678963Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] schedule retry [1:30:2075] 2025-03-31T15:54:08.701695Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2025-03-31T15:54:08.701920Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2025-03-31T15:54:08.710169Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2025-03-31T15:54:08.710640Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2025-03-31T15:54:08.711650Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-03-31T15:54:08.713086Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:30:2075] 2025-03-31T15:54:08.714665Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037927937 Cookie: 0 ProxyOptions: SigNone} 2025-03-31T15:54:08.714868Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 2} 2025-03-31T15:54:08.715176Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-03-31T15:54:08.716224Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 0} 2025-03-31T15:54:08.716259Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 1} 2025-03-31T15:54:08.716467Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-03-31T15:54:08.716549Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037936129 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037936129 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-03-31T15:54:08.716576Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037936129 followers: 0 2025-03-31T15:54:08.716634Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] forward result error, check reconnect [1:20:2063] 2025-03-31T15:54:08.716656Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] connect failed [1:20:2063] 2025-03-31T15:54:08.716736Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037927937} 2025-03-31T15:54:08.716831Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2025-03-31T15:54:08.716851Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-03-31T15:54:08.716972Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:322} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\371$\224\316I\335\243.)W\014\261m\013\346Osy\0160" } 2025-03-31T15:54:08.717035Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037927937} 2025-03-31T15:54:08.717363Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForI ... 540, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{361, redo 257b alter 0b annex 0, ~{ 2, 1 } -{ }, 0 gb} 2025-03-31T15:55:54.122088Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:183} Tx{540, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T15:55:54.148159Z node 24 :BS_PROXY_PUT INFO: [f1bcc24dc4c7d110] bootstrap ActorId# [24:3847:5132] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:182:0:0:248:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-03-31T15:55:54.148530Z node 24 :BS_PROXY_PUT DEBUG: [f1bcc24dc4c7d110] Id# [72057594037927937:2:182:0:0:248:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-31T15:55:54.148576Z node 24 :BS_PROXY_PUT DEBUG: [f1bcc24dc4c7d110] restore Id# [72057594037927937:2:182:0:0:248:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-03-31T15:55:54.148625Z node 24 :BS_PROXY_PUT DEBUG: [f1bcc24dc4c7d110] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:182:0:0:248:1] Marker# BPG33 2025-03-31T15:55:54.148658Z node 24 :BS_PROXY_PUT DEBUG: [f1bcc24dc4c7d110] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:182:0:0:248:1] Marker# BPG32 2025-03-31T15:55:54.149241Z node 24 :BS_PROXY DEBUG: Send to queueActorId# [24:35:2079] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:182:0:0:248:1] FDS# 248 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T15:55:54.164678Z node 24 :BS_PROXY_PUT DEBUG: [f1bcc24dc4c7d110] received {EvVPutResult Status# OK ID# [72057594037927937:2:182:0:0:248:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 197 } Cost# 81952 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 198 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-03-31T15:55:54.165301Z node 24 :BS_PROXY_PUT DEBUG: [f1bcc24dc4c7d110] Result# TEvPutResult {Id# [72057594037927937:2:182:0:0:248:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-03-31T15:55:54.166264Z node 24 :BS_PROXY_PUT INFO: [f1bcc24dc4c7d110] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:182:0:0:248:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-03-31T15:55:54.166384Z node 24 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 2.268 sample PartId# [72057594037927937:2:182:0:0:248:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 24 } TEvVPutResult{ TimestampMs# 17.731 VDiskId# [0:1:0:0:0] NodeId# 24 Status# OK } ] } 2025-03-31T15:55:54.166803Z node 24 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:182:0:0:248:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-03-31T15:55:54.166927Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:183} commited cookie 1 for step 182 2025-03-31T15:55:54.167260Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:183} Tx{541, NKikimr::NHive::TTxReassignGroups} queued, type NKikimr::NHive::TTxReassignGroups 2025-03-31T15:55:54.167708Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:183} Tx{541, NKikimr::NHive::TTxReassignGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T15:55:54.169355Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:183} Tx{541, NKikimr::NHive::TTxReassignGroups} hope 1 -> done Change{362, redo 303b alter 0b annex 0, ~{ 1, 2 } -{ }, 0 gb} 2025-03-31T15:55:54.169410Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:183} Tx{541, NKikimr::NHive::TTxReassignGroups} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T15:55:54.169990Z node 24 :PIPE_CLIENT DEBUG: TClient[72057594037932033] send [24:513:2455] 2025-03-31T15:55:54.170020Z node 24 :PIPE_CLIENT DEBUG: TClient[72057594037932033] push event to server [24:513:2455] 2025-03-31T15:55:54.170064Z node 24 :PIPE_SERVER DEBUG: [72057594037932033] HandleSend Sender# [24:473:2426] EventType# 268637702 c[def1] *************************--------------------------------------------------------------------------- (0.25) *************************--------------------------------------------------------------------------- (0.25) 2025-03-31T15:55:54.276752Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:184} Tx{542, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2025-03-31T15:55:54.276828Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:184} Tx{542, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T15:55:54.277182Z node 24 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923003761760}: tablet 72075186224037910 wasn't changed 2025-03-31T15:55:54.277226Z node 24 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923003761760}: tablet 72075186224037910 skipped channel 0 2025-03-31T15:55:54.277300Z node 24 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923003761760}: tablet 72075186224037910 skipped channel 1 2025-03-31T15:55:54.277333Z node 24 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923003761760}: tablet 72075186224037910 skipped channel 2 2025-03-31T15:55:54.277640Z node 24 :HIVE NOTICE: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{88923003761760}(72075186224037910)::Execute - TryToBoot was not successfull 2025-03-31T15:55:54.278363Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:184} Tx{542, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{363, redo 257b alter 0b annex 0, ~{ 2, 1 } -{ }, 0 gb} 2025-03-31T15:55:54.278647Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:184} Tx{542, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T15:55:54.302294Z node 24 :BS_PROXY_PUT INFO: [c665abdb4b1a4dd0] bootstrap ActorId# [24:3849:5134] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:183:0:0:248:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-03-31T15:55:54.303018Z node 24 :BS_PROXY_PUT DEBUG: [c665abdb4b1a4dd0] Id# [72057594037927937:2:183:0:0:248:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-31T15:55:54.303068Z node 24 :BS_PROXY_PUT DEBUG: [c665abdb4b1a4dd0] restore Id# [72057594037927937:2:183:0:0:248:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-03-31T15:55:54.304837Z node 24 :BS_PROXY_PUT DEBUG: [c665abdb4b1a4dd0] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:183:0:0:248:1] Marker# BPG33 2025-03-31T15:55:54.305196Z node 24 :BS_PROXY_PUT DEBUG: [c665abdb4b1a4dd0] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:183:0:0:248:1] Marker# BPG32 2025-03-31T15:55:54.305905Z node 24 :BS_PROXY DEBUG: Send to queueActorId# [24:35:2079] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:183:0:0:248:1] FDS# 248 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T15:55:54.313930Z node 24 :BS_PROXY_PUT DEBUG: [c665abdb4b1a4dd0] received {EvVPutResult Status# OK ID# [72057594037927937:2:183:0:0:248:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 198 } Cost# 81952 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 199 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-03-31T15:55:54.314025Z node 24 :BS_PROXY_PUT DEBUG: [c665abdb4b1a4dd0] Result# TEvPutResult {Id# [72057594037927937:2:183:0:0:248:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-03-31T15:55:54.314069Z node 24 :BS_PROXY_PUT INFO: [c665abdb4b1a4dd0] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:183:0:0:248:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-03-31T15:55:54.318900Z node 24 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 4.208 sample PartId# [72057594037927937:2:183:0:0:248:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 24 } TEvVPutResult{ TimestampMs# 12.246 VDiskId# [0:1:0:0:0] NodeId# 24 Status# OK } ] } 2025-03-31T15:55:54.319541Z node 24 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:183:0:0:248:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-03-31T15:55:54.319916Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:184} commited cookie 1 for step 183 2025-03-31T15:55:54.320217Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:184} Tx{543, NKikimr::NHive::TTxReassignGroups} queued, type NKikimr::NHive::TTxReassignGroups 2025-03-31T15:55:54.320258Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:184} Tx{543, NKikimr::NHive::TTxReassignGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T15:55:54.320917Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:184} Tx{543, NKikimr::NHive::TTxReassignGroups} hope 1 -> done Change{364, redo 303b alter 0b annex 0, ~{ 1, 2 } -{ }, 0 gb} 2025-03-31T15:55:54.321234Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:184} Tx{543, NKikimr::NHive::TTxReassignGroups} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T15:55:54.321318Z node 24 :PIPE_CLIENT DEBUG: TClient[72057594037932033] send [24:513:2455] 2025-03-31T15:55:54.321344Z node 24 :PIPE_CLIENT DEBUG: TClient[72057594037932033] push event to server [24:513:2455] 2025-03-31T15:55:54.321637Z node 24 :PIPE_SERVER DEBUG: [72057594037932033] HandleSend Sender# [24:473:2426] EventType# 268637702 c[def1] *************************--------------------------------------------------------------------------- (0.25) *************************--------------------------------------------------------------------------- (0.25) 2025-03-31T15:55:54.422857Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:185} Tx{544, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2025-03-31T15:55:54.422924Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:185} Tx{544, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T15:55:54.423035Z node 24 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923004713312}: tablet 72075186224037909 wasn't changed 2025-03-31T15:55:54.423068Z node 24 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923004713312}: tablet 72075186224037909 skipped channel 0 2025-03-31T15:55:54.423146Z node 24 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923004713312}: tablet 72075186224037909 skipped channel 1 2025-03-31T15:55:54.423175Z node 24 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923004713312}: tablet 72075186224037909 skipped channel 2 2025-03-31T15:55:54.423231Z node 24 :HIVE NOTICE: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{88923004713312}(72075186224037909)::Execute - TryToBoot was not successfull 2025-03-31T15:55:54.423314Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:185} Tx{544, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{365, redo 257b alter 0b annex 0, ~{ 2, 1 } -{ }, 0 gb} 2025-03-31T15:55:54.423369Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:185} Tx{544, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} >> TSubDomainTest::CreateTableInsideSubDomain [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::CreateTableInsideSubDomain [GOOD] Test command err: 2025-03-31T15:55:13.423726Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002809132620850:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:13.423809Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b48/r3tmp/tmp7t7bfr/pdisk_1.dat 2025-03-31T15:55:14.621815Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:55:14.932102Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:15.132738Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:55:15.133012Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:55:15.144774Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11484 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T15:55:15.334063Z node 1 :TX_PROXY DEBUG: actor# [1:7488002809132621077:2116] Handle TEvNavigate describe path dc-1 2025-03-31T15:55:15.337564Z node 1 :TX_PROXY DEBUG: Actor# [1:7488002817722556147:2437] HANDLE EvNavigateScheme dc-1 2025-03-31T15:55:15.337917Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488002813427588397:2130], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:15.338046Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002813427588454:2146][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7488002813427588397:2130], cookie# 1 2025-03-31T15:55:15.341657Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002813427588466:2146][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002813427588463:2146], cookie# 1 2025-03-31T15:55:15.341712Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002813427588467:2146][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002813427588464:2146], cookie# 1 2025-03-31T15:55:15.341735Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002813427588468:2146][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002813427588465:2146], cookie# 1 2025-03-31T15:55:15.341878Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002809132620752:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002813427588466:2146], cookie# 1 2025-03-31T15:55:15.341909Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002809132620755:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002813427588467:2146], cookie# 1 2025-03-31T15:55:15.341927Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002809132620758:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002813427588468:2146], cookie# 1 2025-03-31T15:55:15.342053Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002813427588466:2146][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002809132620752:2050], cookie# 1 2025-03-31T15:55:15.342072Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002813427588467:2146][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002809132620755:2053], cookie# 1 2025-03-31T15:55:15.342086Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002813427588468:2146][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002809132620758:2056], cookie# 1 2025-03-31T15:55:15.342166Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002813427588454:2146][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002813427588463:2146], cookie# 1 2025-03-31T15:55:15.342192Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002813427588454:2146][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-31T15:55:15.342207Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002813427588454:2146][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002813427588464:2146], cookie# 1 2025-03-31T15:55:15.342280Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002813427588454:2146][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-31T15:55:15.342311Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002813427588454:2146][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002813427588465:2146], cookie# 1 2025-03-31T15:55:15.342395Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002813427588454:2146][/dc-1] Unexpected sync response: sender# [1:7488002813427588465:2146], cookie# 1 2025-03-31T15:55:15.342575Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7488002813427588397:2130], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-03-31T15:55:15.364597Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7488002813427588397:2130], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7488002813427588454:2146] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-31T15:55:15.365025Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488002813427588397:2130], cacheItem# { Subscriber: { Subscriber: [1:7488002813427588454:2146] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-03-31T15:55:15.370334Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488002817722556148:2438], recipient# [1:7488002817722556147:2437], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T15:55:15.372152Z node 1 :TX_PROXY DEBUG: Actor# [1:7488002817722556147:2437] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T15:55:15.448134Z node 1 :TX_PROXY DEBUG: Actor# [1:7488002817722556147:2437] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-03-31T15:55:15.455356Z node 1 :TX_PROXY DEBUG: Actor# [1:7488002817722556147:2437] Handle TEvDescribeSchemeResult Forward to# [1:7488002817722556146:2436] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T15:55:15.523366Z node 1 :TX_PROXY DEBUG: actor# [1:7488002809132621077:2116] Handle TEvProposeTransaction 2025-03-31T15:55:15.523404Z node 1 :TX_PROXY DEBUG: actor# [1:7488002809132621077:2116] TxId# 281474976710657 ProcessProposeTransaction 2025-03-31T15:55:15.523620Z node 1 :TX_PROXY DEBUG: actor# [1:7488002809132621077:2116] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7488002817722556155:2444] 2025-03-31T15:55:15.639765Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488002813427588397:2130], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:15.639925Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7488002813427588397:2130], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480 2025-03-31T15:55:15.640416Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002817722556160:2448][/dc-1/.metadata/initialization/migrations] Handle NKikimr::T ... ncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:54.959704Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [5:7488002933337743481:2127], cacheItem# { Subscriber: { Subscriber: [5:7488002937632711310:2497] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:55:54.959780Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [5:7488002984877352070:2826], recipient# [5:7488002984877352068:2333], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:55.067132Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [5:7488002933337743481:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:55.067269Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [5:7488002933337743481:2127], cacheItem# { Subscriber: { Subscriber: [5:7488002980582384723:2810] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:55:55.067357Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [5:7488002989172319368:2827], recipient# [5:7488002989172319367:2334], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:55.875606Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [5:7488002933337743481:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:55.875759Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [5:7488002933337743481:2127], cacheItem# { Subscriber: { Subscriber: [5:7488002937632711310:2497] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:55:55.875849Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [5:7488002989172319373:2828], recipient# [5:7488002989172319372:2335], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:55.962639Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [5:7488002933337743481:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:55.962763Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [5:7488002933337743481:2127], cacheItem# { Subscriber: { Subscriber: [5:7488002937632711310:2497] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:55:55.962834Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [5:7488002989172319381:2832], recipient# [5:7488002989172319380:2336], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:56.069431Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [5:7488002933337743481:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:56.069576Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [5:7488002933337743481:2127], cacheItem# { Subscriber: { Subscriber: [5:7488002980582384723:2810] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:55:56.069679Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [5:7488002993467286679:2833], recipient# [5:7488002993467286678:2337], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:56.878795Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [5:7488002933337743481:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:56.878936Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [5:7488002933337743481:2127], cacheItem# { Subscriber: { Subscriber: [5:7488002937632711310:2497] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:55:56.879036Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [5:7488002993467286684:2834], recipient# [5:7488002993467286683:2338], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:56.964007Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [5:7488002933337743481:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:56.964149Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [5:7488002933337743481:2127], cacheItem# { Subscriber: { Subscriber: [5:7488002937632711310:2497] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:55:56.964248Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [5:7488002993467286692:2838], recipient# [5:7488002993467286691:2339], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> BasicUsage::BrokenCredentialsProvider [GOOD] >> TopicAutoscaling::ControlPlane_CreateAlterDescribe [GOOD] >> TopicAutoscaling::ControlPlane_DisableAutoPartitioning >> THiveTest::TestDeleteTabletWithFollowers [GOOD] >> THiveTest::TestCreateTabletBeforeLocal >> TableCreation::ConcurrentTableCreation [GOOD] >> TableCreation::ConcurrentMultipleTablesCreation >> TReplicaTest::Subscribe |66.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |66.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |66.9%| [LD] {RESULT} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut >> TReplicaTest::Subscribe [GOOD] >> TReplicaTest::SubscribeUnknownPath >> TContinuousBackupTests::Basic >> TReplicaTest::SubscribeUnknownPath [GOOD] >> TReplicaTest::SyncVersion >> KqpNotNullColumns::UpsertNotNull ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> BasicUsage::BrokenCredentialsProvider [GOOD] Test command err: 2025-03-31T15:53:53.467519Z :MaxByteSizeEqualZero INFO: Random seed for debugging is 1743436433467132 2025-03-31T15:54:02.563872Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002503984243921:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:02.564741Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:54:03.389470Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488002505393590617:2082];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:03.389520Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:54:06.718494Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T15:54:07.527566Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002503984243921:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:07.527930Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012b2/r3tmp/tmpALEHpd/pdisk_1.dat 2025-03-31T15:54:07.963430Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:08.971597Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488002505393590617:2082];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:08.971877Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:54:08.978452Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T15:54:10.815244Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:10.818260Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:10.859657Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:10.859688Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:11.568455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:11.568773Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:11.578439Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T15:54:11.583898Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:11.605657Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:11.605761Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:11.611897Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25350, node 1 2025-03-31T15:54:11.912238Z INFO: TTestServer started on Port 19458 GrpcPort 25350 TClient is connected to server localhost:19458 PQClient connected to localhost:25350 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:54:12.235454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T15:54:12.246717Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/0012b2/r3tmp/yandexa3mxep.tmp 2025-03-31T15:54:12.246754Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/0012b2/r3tmp/yandexa3mxep.tmp 2025-03-31T15:54:12.246983Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/0012b2/r3tmp/yandexa3mxep.tmp 2025-03-31T15:54:12.251311Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:54:12.264282Z node 1 :IMPORT WARN: Table profiles were not loaded waiting... waiting... waiting... waiting... 2025-03-31T15:54:20.579629Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpej4nr0jp9sf48g2k4s982", Request deadline has expired for 3.074507s seconds 2025-03-31T15:54:20.783268Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488002578408034982:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:20.783936Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488002578408034965:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:20.791358Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:20.974954Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488002581293656349:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:20.975043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:20.989616Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488002581293656364:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:21.027103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-03-31T15:54:21.621095Z node 2 :TX_PROXY ERROR: Actor# [2:7488002578408034987:2135] txid# 281474976715657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T15:54:21.853290Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488002578408034986:2322], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-31T15:54:22.060348Z node 2 :TX_PROXY ERROR: Actor# [2:7488002582703002314:2143] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:54:22.257898Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488002581293656366:2355], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-31T15:54:22.396774Z node 1 :TX_PROXY ERROR: Actor# [1:7488002589883591074:2754] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:54:23.881518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:54:23.885949Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488002589883591088:2363], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T15:54:23.887393Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTJjYjdkYWQtZDQ3MjAyZmYtYzVkYzhhNmUtYTZmZDc2M2U=, ActorId: [1:7488002581293656327:2349], ActorState: ExecuteState, TraceId: 01jqpejcgj4pxpe511d2zvnj4p, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T15:54:23.889666Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T15:54:23.889317Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488002586997969625:2329], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable! ... EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2025-03-31T15:55:50.359930Z node 5 :KQP_COMPUTE WARN: TxId: 281474976715686, task: 1, CA Id [5:7488002967755432430:2556]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-31T15:55:50.419724Z node 5 :KQP_COMPUTE WARN: TxId: 281474976715686, task: 1, CA Id [5:7488002967755432430:2556]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-31T15:55:50.514689Z node 5 :KQP_COMPUTE WARN: TxId: 281474976715686, task: 1, CA Id [5:7488002967755432430:2556]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-31T15:55:50.625804Z node 5 :KQP_COMPUTE WARN: TxId: 281474976715686, task: 1, CA Id [5:7488002967755432430:2556]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-31T15:55:51.592796Z node 5 :KQP_COMPUTE WARN: TxId: 281474976715686, task: 1, CA Id [5:7488002967755432430:2556]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-31T15:55:51.755295Z node 5 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715687. Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-31T15:55:51.755434Z node 5 :KQP_EXECUTER WARN: ActorId: [5:7488002972050399789:2557] TxId: 281474976715687. Ctx: { TraceId: 01jqpen4feb7t0x2cvb6299rxs, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=YTI4NWIyN2MtNmQyMjgzOTItMmY1ZGQwZmEtODgzNWY5ZGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-31T15:55:51.755675Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=YTI4NWIyN2MtNmQyMjgzOTItMmY1ZGQwZmEtODgzNWY5ZGU=, ActorId: [5:7488002967755432464:2557], ActorState: ExecuteState, TraceId: 01jqpen4feb7t0x2cvb6299rxs, Create QueryResponse for error on request, msg: 2025-03-31T15:55:51.757027Z node 5 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jqpen5c23qdgdp9ehg11awx3" } } YdbStatus: UNAVAILABLE ConsumedRu: 600 } 2025-03-31T15:55:51.900387Z node 5 :KQP_COMPUTE WARN: TxId: 281474976715686, task: 1, CA Id [5:7488002967755432430:2556]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-31T15:55:52.270306Z node 5 :KQP_COMPUTE WARN: TxId: 281474976715686, task: 1, CA Id [5:7488002967755432430:2556]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-31T15:55:53.134959Z node 5 :KQP_COMPUTE WARN: TxId: 281474976715686, task: 1, CA Id [5:7488002967755432430:2556]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-31T15:55:53.792508Z node 5 :KQP_COMPUTE WARN: TxId: 281474976715686, task: 1, CA Id [5:7488002967755432430:2556]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-31T15:55:54.031249Z node 5 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715689. Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-31T15:55:54.031711Z node 5 :KQP_EXECUTER WARN: ActorId: [5:7488002980640334500:2568] TxId: 281474976715689. Ctx: { TraceId: 01jqpen6j19r5rt6exj8s098xp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=Mjc0OGUxNzQtMWJjNDlmNGUtYmFmY2Y0M2EtZWFmZjEzMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-31T15:55:54.031875Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=Mjc0OGUxNzQtMWJjNDlmNGUtYmFmY2Y0M2EtZWFmZjEzMg==, ActorId: [5:7488002976345367163:2568], ActorState: ExecuteState, TraceId: 01jqpen6j19r5rt6exj8s098xp, Create QueryResponse for error on request, msg: 2025-03-31T15:55:54.033722Z node 5 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jqpen7k75x9s2hmhbad13ypc" } } YdbStatus: UNAVAILABLE ConsumedRu: 703 } 2025-03-31T15:55:54.403842Z node 5 :KQP_COMPUTE WARN: TxId: 281474976715686, task: 1, CA Id [5:7488002967755432430:2556]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-31T15:55:54.404925Z node 5 :KQP_COMPUTE ERROR: SelfId: [5:7488002967755432430:2556], TxId: 281474976715686, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=5&id=N2VmZDRiNzMtOTYxMWNlMmYtM2U5ZmFiYWItZDE5NThhZDk=. TraceId : 01jqpen45d1ka11zsa65cy022a. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Source[0] fatal error: {
: Error: Too many retries for shard 72075186224037890 } 2025-03-31T15:55:54.405256Z node 5 :KQP_COMPUTE ERROR: SelfId: [5:7488002967755432430:2556], TxId: 281474976715686, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=5&id=N2VmZDRiNzMtOTYxMWNlMmYtM2U5ZmFiYWItZDE5NThhZDk=. TraceId : 01jqpen45d1ka11zsa65cy022a. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: UNAVAILABLE DEFAULT_ERROR: {
: Error: Too many retries for shard 72075186224037890 }. 2025-03-31T15:55:54.407389Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=N2VmZDRiNzMtOTYxMWNlMmYtM2U5ZmFiYWItZDE5NThhZDk=, ActorId: [5:7488002967755432422:2556], ActorState: ExecuteState, TraceId: 01jqpen45d1ka11zsa65cy022a, Create QueryResponse for error on request, msg: 2025-03-31T15:55:54.432519Z node 5 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Too many retries for shard 72075186224037890" severity: 1 } TxMeta { id: "01jqpen45e4dwg1wxkweg8fydb" } } YdbStatus: UNAVAILABLE ConsumedRu: 2 } 2025-03-31T15:55:56.280375Z node 5 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715692. Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-31T15:55:56.280466Z node 5 :KQP_EXECUTER WARN: ActorId: [5:7488002993525236494:2578] TxId: 281474976715692. Ctx: { TraceId: 01jqpen8rxe6ydxypwgzdskkec, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=ZjIwYTYwOGQtZTEyZmEwMTMtMzZlNWMyNzktYmY0MDhlOTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-31T15:55:56.280620Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=ZjIwYTYwOGQtZTEyZmEwMTMtMzZlNWMyNzktYmY0MDhlOTk=, ActorId: [5:7488002989230269171:2578], ActorState: ExecuteState, TraceId: 01jqpen8rxe6ydxypwgzdskkec, Create QueryResponse for error on request, msg: 2025-03-31T15:55:56.285346Z node 5 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jqpen9rv85ehhed0hp2zjqz1" } } YdbStatus: UNAVAILABLE ConsumedRu: 661 } 2025-03-31T15:55:56.631283Z node 5 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715694. Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-31T15:55:56.631407Z node 5 :KQP_EXECUTER WARN: ActorId: [5:7488002993525236544:2586] TxId: 281474976715694. Ctx: { TraceId: 01jqpena4ne3zhfrj212tghqf8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=NDIxYjYwODMtNWJmMDNhMjEtODc4MDIyODItOGJkMzYzMzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-31T15:55:56.631578Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=NDIxYjYwODMtNWJmMDNhMjEtODc4MDIyODItOGJkMzYzMzY=, ActorId: [5:7488002993525236541:2586], ActorState: ExecuteState, TraceId: 01jqpena4ne3zhfrj212tghqf8, Create QueryResponse for error on request, msg: 2025-03-31T15:55:56.635559Z node 5 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jqpena4p4dj7zknpc4g2bpzd" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } 2025-03-31T15:55:58.270073Z node 5 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715696. Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-31T15:55:58.270179Z node 5 :KQP_EXECUTER WARN: ActorId: [5:7488003002115171220:2589] TxId: 281474976715696. Ctx: { TraceId: 01jqpenaz691ksfb10d5513ehb, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=OTliOGFjODgtOTQwNWFjMjItYzI2ZTU1ZmMtZWIzOTNmNDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-31T15:55:58.270656Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=OTliOGFjODgtOTQwNWFjMjItYzI2ZTU1ZmMtZWIzOTNmNDY=, ActorId: [5:7488002997820203901:2589], ActorState: ExecuteState, TraceId: 01jqpenaz691ksfb10d5513ehb, Create QueryResponse for error on request, msg: 2025-03-31T15:55:58.272030Z node 5 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jqpenbq2fcr3fgpqj936cr3n" } } YdbStatus: UNAVAILABLE ConsumedRu: 482 } 2025-03-31T15:55:58.856547Z node 5 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715698. Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-31T15:55:58.856649Z node 5 :KQP_EXECUTER WARN: ActorId: [5:7488003002115171271:2597] TxId: 281474976715698. Ctx: { TraceId: 01jqpenc9h78hdm1y7fj0vth60, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=YWIyYzY5NjUtYTYyZjgzOS02YzE0ZmI4LTI2NDYwYjY0, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-31T15:55:58.856843Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=YWIyYzY5NjUtYTYyZjgzOS02YzE0ZmI4LTI2NDYwYjY0, ActorId: [5:7488003002115171268:2597], ActorState: ExecuteState, TraceId: 01jqpenc9h78hdm1y7fj0vth60, Create QueryResponse for error on request, msg: 2025-03-31T15:55:58.857891Z node 5 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jqpenc9j3p8vyyfeb0xe1wc0" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } >> TReplicaTest::Unsubscribe >> TReplicaTest::SyncVersion [GOOD] |66.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |66.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |66.9%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator >> TSubDomainTest::ConsistentCopyTable [GOOD] >> TReplicaTest::Unsubscribe [GOOD] >> TReplicaTest::UnsubscribeUnknownPath ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::SyncVersion [GOOD] Test command err: 2025-03-31T15:56:05.162216Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-03-31T15:56:05.162287Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 1 2025-03-31T15:56:05.162452Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 72 2025-03-31T15:56:05.162485Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-03-31T15:56:05.168126Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-03-31T15:56:05.168284Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:7:2054] 2025-03-31T15:56:05.168366Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:7:2054], path# path, domainOwnerId# 0, capabilities# 2025-03-31T15:56:05.168485Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 40 2025-03-31T15:56:05.168527Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-03-31T15:56:05.168557Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-03-31T15:56:05.256733Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:7:2054] 2025-03-31T15:56:05.256791Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path 2025-03-31T15:56:05.256865Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:7:2054], path# path, domainOwnerId# 0, capabilities# 2025-03-31T15:56:06.272349Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:7:2054] 2025-03-31T15:56:06.272413Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 1, generation# 1 2025-03-31T15:56:06.272529Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 76 2025-03-31T15:56:06.272561Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-03-31T15:56:06.272616Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 100500, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 32} 2025-03-31T15:56:06.272693Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:7:2054] 2025-03-31T15:56:06.272754Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:7:2054], path# path, domainOwnerId# 0, capabilities# 2025-03-31T15:56:06.272835Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [3:7:2054], cookie# 1 >> THiveTest::TestCreateTabletBeforeLocal [GOOD] >> THiveTest::TestCreateTabletReboots >> THiveTest::TestHiveBalancerDifferentResources2 [GOOD] >> THiveTest::TestHiveBalancerUselessNeighbourMoves >> TReplicaTest::UpdateWithoutHandshake >> TReplicaTest::UnsubscribeUnknownPath [GOOD] >> TReplicaTest::UpdateWithoutHandshake [GOOD] >> TReplicaTest::UpdateWithStaleGeneration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::ConsistentCopyTable [GOOD] Test command err: 2025-03-31T15:55:13.256452Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002809528564469:2115];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:13.257412Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b58/r3tmp/tmpR41B4I/pdisk_1.dat 2025-03-31T15:55:13.684094Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:55:13.684183Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:55:13.685760Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:55:13.710707Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:20568 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T15:55:13.913865Z node 1 :TX_PROXY DEBUG: actor# [1:7488002809528564658:2104] Handle TEvNavigate describe path dc-1 2025-03-31T15:55:13.913915Z node 1 :TX_PROXY DEBUG: Actor# [1:7488002809528564937:2258] HANDLE EvNavigateScheme dc-1 2025-03-31T15:55:13.914045Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488002809528564683:2118], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:13.914101Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7488002809528564683:2118], path# /dc-1, domainOwnerId# 72057594046644480 2025-03-31T15:55:13.914325Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002809528564938:2259][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-31T15:55:13.922841Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002809528564357:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7488002809528564943:2259] 2025-03-31T15:55:13.922840Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002809528564354:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7488002809528564942:2259] 2025-03-31T15:55:13.922905Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488002809528564354:2049] Subscribe: subscriber# [1:7488002809528564942:2259], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T15:55:13.922905Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488002809528564357:2052] Subscribe: subscriber# [1:7488002809528564943:2259], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T15:55:13.922999Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002809528564360:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7488002809528564944:2259] 2025-03-31T15:55:13.923020Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488002809528564360:2055] Subscribe: subscriber# [1:7488002809528564944:2259], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T15:55:13.923027Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002809528564942:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488002809528564354:2049] 2025-03-31T15:55:13.923053Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002809528564943:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488002809528564357:2052] 2025-03-31T15:55:13.923108Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002809528564354:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7488002809528564942:2259] 2025-03-31T15:55:13.923113Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002809528564944:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488002809528564360:2055] 2025-03-31T15:55:13.923127Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002809528564357:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7488002809528564943:2259] 2025-03-31T15:55:13.923143Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002809528564360:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7488002809528564944:2259] 2025-03-31T15:55:13.923168Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002809528564938:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488002809528564939:2259] 2025-03-31T15:55:13.923245Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002809528564938:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488002809528564940:2259] 2025-03-31T15:55:13.923343Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7488002809528564938:2259][/dc-1] Set up state: owner# [1:7488002809528564683:2118], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-31T15:55:13.923476Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002809528564938:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488002809528564941:2259] 2025-03-31T15:55:13.923532Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7488002809528564938:2259][/dc-1] Path was already updated: owner# [1:7488002809528564683:2118], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-31T15:55:13.923580Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002809528564942:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002809528564939:2259], cookie# 1 2025-03-31T15:55:13.923612Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002809528564943:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002809528564940:2259], cookie# 1 2025-03-31T15:55:13.923627Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002809528564944:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002809528564941:2259], cookie# 1 2025-03-31T15:55:13.925341Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002809528564354:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002809528564942:2259], cookie# 1 2025-03-31T15:55:13.925383Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002809528564357:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002809528564943:2259], cookie# 1 2025-03-31T15:55:13.925421Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002809528564360:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002809528564944:2259], cookie# 1 2025-03-31T15:55:13.925461Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002809528564942:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002809528564354:2049], cookie# 1 2025-03-31T15:55:13.925477Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002809528564943:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002809528564357:2052], cookie# 1 2025-03-31T15:55:13.925488Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002809528564944:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002809528564360:2055], cookie# 1 2025-03-31T15:55:13.925523Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002809528564938:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002809528564939:2259], cookie# 1 2025-03-31T15:55:13.925555Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002809528564938:2259][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-31T15:55:13.925573Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002809528564938:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002809528564940:2259], cookie# 1 2025-03-31T15:55:13.925589Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002809528564938:2259][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-31T15:55:13.925615Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002809528564938:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002809528564941:2259], cookie# 1 2025-03-31T15:55:13.925648Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002809528564938:2259][/dc-1] Unexpected sync response: sender# [1:7488002809528564941:2259], cookie# 1 2025-03-31T15:55:13.981960Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7488002809528564683:2118], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-03-31T15:55:13.982326Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7488002809528564683:2118], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... pTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:56:05.487885Z node 6 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [6:7488002963516417679:2105], path# /dc-1/USER_1/.metadata/workload_manager/delayed_requests, domainOwnerId# 72057594046644480 2025-03-31T15:56:05.488191Z node 6 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [6:7488002963516417679:2105], path# /dc-1/USER_1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480 2025-03-31T15:56:05.488412Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][6:7488003032235894736:2293][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-31T15:56:05.488853Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][6:7488003032235894737:2294][/dc-1/USER_1/.metadata/workload_manager/running_requests] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-31T15:56:05.490116Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][6:7488003032235894736:2293][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [6:7488003032235894738:2293] 2025-03-31T15:56:05.490151Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][6:7488003032235894736:2293][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [6:7488003032235894739:2293] 2025-03-31T15:56:05.490181Z node 6 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][6:7488003032235894736:2293][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Set up state: owner# [6:7488002963516417679:2105], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T15:56:05.490204Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][6:7488003032235894736:2293][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [6:7488003032235894740:2293] 2025-03-31T15:56:05.490234Z node 6 :SCHEME_BOARD_SUBSCRIBER INFO: [main][6:7488003032235894736:2293][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [6:7488002963516417679:2105], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T15:56:05.490276Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][6:7488003032235894737:2294][/dc-1/USER_1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/running_requests Version: 0 }: sender# [6:7488003032235894744:2294] 2025-03-31T15:56:05.490296Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][6:7488003032235894737:2294][/dc-1/USER_1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/running_requests Version: 0 }: sender# [6:7488003032235894745:2294] 2025-03-31T15:56:05.490316Z node 6 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][6:7488003032235894737:2294][/dc-1/USER_1/.metadata/workload_manager/running_requests] Set up state: owner# [6:7488002963516417679:2105], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T15:56:05.490586Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][6:7488003032235894737:2294][/dc-1/USER_1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/running_requests Version: 0 }: sender# [6:7488003032235894746:2294] 2025-03-31T15:56:05.490608Z node 6 :SCHEME_BOARD_SUBSCRIBER INFO: [main][6:7488003032235894737:2294][/dc-1/USER_1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [6:7488002963516417679:2105], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T15:56:05.490914Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [6:7488002963516417679:2105], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_1/.metadata/workload_manager/delayed_requests PathId: Strong: 0 } 2025-03-31T15:56:05.490986Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [6:7488002963516417679:2105], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_1/.metadata/workload_manager/delayed_requests PathId: Strong: 0 }, by path# { Subscriber: { Subscriber: [6:7488003032235894736:2293] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-31T15:56:05.491069Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [6:7488002963516417679:2105], cacheItem# { Subscriber: { Subscriber: [6:7488003032235894736:2293] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:56:05.500587Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [6:7488002963516417679:2105], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_1/.metadata/workload_manager/running_requests PathId: Strong: 0 } 2025-03-31T15:56:05.500678Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [6:7488002963516417679:2105], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_1/.metadata/workload_manager/running_requests PathId: Strong: 0 }, by path# { Subscriber: { Subscriber: [6:7488003032235894737:2294] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-31T15:56:05.500767Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [6:7488002963516417679:2105], cacheItem# { Subscriber: { Subscriber: [6:7488003032235894737:2294] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:56:05.500867Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [6:7488003032235894750:2295], recipient# [6:7488003032235894735:2331], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-03-31T15:56:05.501257Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7488003032235894735:2331], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:56:05.579524Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7488002963516417679:2105], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:56:05.579656Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [6:7488002963516417679:2105], cacheItem# { Subscriber: { Subscriber: [6:7488003032235894736:2293] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:56:05.579923Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [6:7488002963516417679:2105], cacheItem# { Subscriber: { Subscriber: [6:7488003032235894737:2294] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:56:05.580017Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [6:7488003032235894751:2296], recipient# [6:7488003032235894735:2331], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-03-31T15:56:05.583653Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7488003032235894735:2331], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::UnsubscribeUnknownPath [GOOD] Test command err: 2025-03-31T15:56:06.960184Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-03-31T15:56:06.960280Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 1 2025-03-31T15:56:06.960437Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:8:2055] 2025-03-31T15:56:06.960489Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# path 2025-03-31T15:56:06.960615Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:8:2055], path# path, domainOwnerId# 0, capabilities# 2025-03-31T15:56:06.961331Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:9:2056] 2025-03-31T15:56:06.961575Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:9:2056], path# path, domainOwnerId# 0, capabilities# 2025-03-31T15:56:06.962003Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 72 2025-03-31T15:56:06.962328Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-03-31T15:56:07.003249Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-03-31T15:56:07.003613Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [1:8:2055] 2025-03-31T15:56:07.003676Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Unsubscribe: subscriber# [1:8:2055], path# path 2025-03-31T15:56:07.003781Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 40 2025-03-31T15:56:07.003823Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-03-31T15:56:07.003860Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-03-31T15:56:09.282687Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:7:2054] >> THiveTest::TestCreateSubHiveCreateManyTablets [GOOD] >> THiveTest::TestCreateSubHiveCreateManyTabletsWithReboots >> KqpProxy::NoLocalSessionExecution [GOOD] >> KqpProxy::NoUserAccessToScriptExecutionsTable >> TReplicaTest::UpdateWithStaleGeneration [GOOD] >> KqpNewEngine::PureTxMixedWithDeferred [GOOD] >> KqpNewEngine::ReadAfterWrite >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::UpdateWithStaleGeneration [GOOD] Test command err: 2025-03-31T15:56:10.388740Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 72 2025-03-31T15:56:10.388802Z node 1 :SCHEME_BOARD_REPLICA ERROR: [1:6:2053] Reject update from unknown populator: sender# [1:7:2054], owner# 1, generation# 1 2025-03-31T15:56:10.388881Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:7:2054] 2025-03-31T15:56:10.388916Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# path 2025-03-31T15:56:10.389002Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:7:2054], path# path, domainOwnerId# 0, capabilities# 2025-03-31T15:56:10.389104Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [1:7:2054] 2025-03-31T15:56:10.389148Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Unsubscribe: subscriber# [1:7:2054], path# path 2025-03-31T15:56:10.389195Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [1:7:2054] 2025-03-31T15:56:10.389220Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2025-03-31T15:56:10.389266Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:7:2054], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-03-31T15:56:10.389324Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [1:7:2054] 2025-03-31T15:56:10.389352Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Unsubscribe: subscriber# [1:7:2054], path# [OwnerId: 1, LocalPathId: 1] 2025-03-31T15:56:11.606541Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:7:2054] 2025-03-31T15:56:11.606614Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 1 2025-03-31T15:56:11.606726Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 0 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-03-31T15:56:11.606763Z node 2 :SCHEME_BOARD_REPLICA ERROR: [2:6:2053] Reject update from stale populator: sender# [2:7:2054], owner# 1, generation# 0, pending generation# 1 2025-03-31T15:56:11.606838Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:7:2054] 2025-03-31T15:56:11.606870Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path 2025-03-31T15:56:11.606929Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:7:2054], path# path, domainOwnerId# 0, capabilities# 2025-03-31T15:56:11.607006Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:7:2054] 2025-03-31T15:56:11.607048Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Unsubscribe: subscriber# [2:7:2054], path# path 2025-03-31T15:56:11.607182Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [2:7:2054] 2025-03-31T15:56:11.607227Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2025-03-31T15:56:11.607276Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:7:2054], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-03-31T15:56:11.607352Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [2:7:2054] 2025-03-31T15:56:11.607418Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Unsubscribe: subscriber# [2:7:2054], path# [OwnerId: 1, LocalPathId: 1] >> TContinuousBackupTests::TakeIncrementalBackup >> KqpNotNullColumns::InsertNotNullPk [GOOD] >> KqpNotNullColumns::InsertNotNull |67.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |67.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |67.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow >> KqpSort::TopSortTableExprOffset [GOOD] >> KqpSort::UnionAllSortLimit >> TopicAutoscaling::PartitionSplit_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionMerge_PreferedPartition_BeforeAutoscaleAwareSDK >> TContinuousBackupTests::Basic [GOOD] >> TopicAutoscaling::PartitionSplit_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_PreferedPartition_BeforeAutoscaleAwareSDK >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_PQv1 >> TKeyValueTracingTest::ReadSmall >> THiveTest::TestHiveBalancerUselessNeighbourMoves [GOOD] >> THiveTest::TestHiveBalancerWithImmovableTablets >> TopicAutoscaling::CommitTopPast_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ControlPlane_BackCompatibility >> TopicAutoscaling::Simple_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::Simple_AutoscaleAwareSDK ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup/unittest >> TContinuousBackupTests::Basic [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-31T15:56:12.061728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:56:12.062271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:56:12.062516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:56:12.062720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:56:12.063141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:56:12.063713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:56:12.064138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:56:12.069904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:56:12.072407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:56:12.328062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:56:12.328273Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:56:12.386935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:56:12.391609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:56:12.391782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:56:12.419675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:56:12.421154Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:56:12.427883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:56:12.428806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:56:12.439072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:56:12.440452Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:56:12.440522Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:56:12.440615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:56:12.440662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:56:12.440726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:56:12.440947Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:56:12.457635Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-31T15:56:12.622579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:56:12.622816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:56:12.623038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:56:12.623787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:56:12.623864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:56:12.626371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:56:12.626520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:56:12.627495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:56:12.627568Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:56:12.627625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:56:12.627671Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:56:12.630288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:56:12.630352Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:56:12.630397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:56:12.632478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:56:12.632532Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:56:12.632597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:56:12.632649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:56:12.636800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:56:12.640258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:56:12.640470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:56:12.641552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:56:12.641691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:56:12.641769Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:56:12.642092Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:56:12.642150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:56:12.642322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:56:12.642415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:56:12.644742Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:56:12.644810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:56:12.645050Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:56:12.645098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:56:12.645468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:56:12.645511Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:56:12.645613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:56:12.645645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:56:12.645684Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:56:12.645718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:56:12.645756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:56:12.645834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:56:12.645873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:56:12.645904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:56:12.645983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:56:12.646026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:56:12.646062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:56:12.648184Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:56:12.648302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:56:12.648341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 04, ready parts: 2/3, is published: true 2025-03-31T15:56:14.027230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T15:56:14.027357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T15:56:14.027387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-03-31T15:56:14.027416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: true 2025-03-31T15:56:14.077035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-31T15:56:14.077155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-31T15:56:14.077225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-03-31T15:56:14.077811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-31T15:56:14.079792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-31T15:56:14.079946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-31T15:56:14.079984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-31T15:56:14.102981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000005 OrderId: 104 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 8053 } } 2025-03-31T15:56:14.103033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2025-03-31T15:56:14.103173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000005 OrderId: 104 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 8053 } } 2025-03-31T15:56:14.103258Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000005 OrderId: 104 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 8053 } } FAKE_COORDINATOR: Erasing txId 104 2025-03-31T15:56:14.112722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 309 RawX2: 4294969592 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-03-31T15:56:14.112793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2025-03-31T15:56:14.112982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Source { RawX1: 309 RawX2: 4294969592 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-03-31T15:56:14.113035Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-31T15:56:14.113136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 309 RawX2: 4294969592 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-03-31T15:56:14.113212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 104:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T15:56:14.113269Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-31T15:56:14.113306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-31T15:56:14.113372Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 240 2025-03-31T15:56:14.114573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-31T15:56:14.115763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-31T15:56:14.116062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-31T15:56:14.116101Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-03-31T15:56:14.116190Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 3/3 2025-03-31T15:56:14.116224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-03-31T15:56:14.116272Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 3/3 2025-03-31T15:56:14.116314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-03-31T15:56:14.116374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 3/3, is published: true 2025-03-31T15:56:14.116438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:337:2316] message: TxId: 104 2025-03-31T15:56:14.116479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-03-31T15:56:14.116519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-03-31T15:56:14.116545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-03-31T15:56:14.116637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T15:56:14.116669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:1 2025-03-31T15:56:14.116685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:1 2025-03-31T15:56:14.116706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T15:56:14.116722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:2 2025-03-31T15:56:14.116737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:2 2025-03-31T15:56:14.116779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-03-31T15:56:14.117061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:56:14.117112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-03-31T15:56:14.117165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-31T15:56:14.117197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-31T15:56:14.117232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T15:56:14.119034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-03-31T15:56:14.119074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:727:2642] 2025-03-31T15:56:14.119162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 2025-03-31T15:56:14.119712Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/continuousBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T15:56:14.119914Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/continuousBackupImpl" took 199us result status StatusPathDoesNotExist 2025-03-31T15:56:14.120063Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/continuousBackupImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 2])" Path: "/MyRoot/Table/continuousBackupImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-31T15:56:14.120501Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T15:56:14.120637Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/continuousBackupImpl/streamImpl" took 147us result status StatusPathDoesNotExist 2025-03-31T15:56:14.120744Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/continuousBackupImpl/streamImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 2])" Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> KqpSort::ReverseFirstKeyOptimized [GOOD] >> KqpSort::ReverseLimitOptimized >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-false >> ReadSessionImplTest::ForcefulDestroyPartitionStream >> ReadSessionImplTest::ForcefulDestroyPartitionStream [GOOD] >> ReadSessionImplTest::DestroyPartitionStreamRequest >> ReadSessionImplTest::DestroyPartitionStreamRequest [GOOD] >> ReadSessionImplTest::DecompressZstdEmptyMessage >> ReadSessionImplTest::DecompressZstdEmptyMessage [GOOD] >> ReadSessionImplTest::PacksBatches_BatchABitBiggerThanLimit >> ReadSessionImplTest::PacksBatches_BatchABitBiggerThanLimit [GOOD] >> ReadSessionImplTest::PacksBatches_BatchesEqualToServerBatches >> ReadSessionImplTest::PacksBatches_BatchesEqualToServerBatches [GOOD] >> ReadSessionImplTest::HoleBetweenOffsets >> ReadSessionImplTest::HoleBetweenOffsets [GOOD] >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] >> KqpNewEngine::KeyColumnOrder [GOOD] >> KqpNewEngine::KeyColumnOrder2 >> TableCreation::ConcurrentMultipleTablesCreation [GOOD] >> TopicAutoscaling::ControlPlane_DisableAutoPartitioning [GOOD] >> TopicAutoscaling::ControlPlane_PauseAutoPartitioning >> KqpNewEngine::Update+UseSink [GOOD] >> KqpNewEngine::Update-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] Test command err: 2025-03-31T15:56:21.264111Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:21.264854Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:21.264876Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:56:21.286975Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:56:21.328435Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:56:21.883517Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:21.884465Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-31T15:56:21.885623Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:21.885638Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:21.885660Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:56:21.889734Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:56:21.902906Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:56:21.910169Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:21.915301Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-31T15:56:21.915710Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-03-31T15:56:21.921184Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:21.921206Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:21.921523Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:56:21.963437Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:56:22.016833Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:56:22.017033Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:22.019281Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-31T15:56:22.043735Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:22.053358Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-31T15:56:22.055149Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T15:56:22.055448Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-03-31T15:56:22.063746Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:22.063772Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:22.063791Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:56:22.086426Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:56:22.119251Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:56:22.122212Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:22.123038Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 11 Compressed message data size: 31 2025-03-31T15:56:22.152037Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-03-31T15:56:22.153749Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-03-31T15:56:22.168753Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-03-31T15:56:22.178831Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-03-31T15:56:22.180491Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T15:56:22.180525Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-03-31T15:56:22.180857Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-03-31T15:56:22.183489Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Getting new event 2025-03-31T15:56:22.183865Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-03-31T15:56:22.184135Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-03-31T15:56:22.184157Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-03-31T15:56:22.185772Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 Getting new event 2025-03-31T15:56:22.186407Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-03-31T15:56:22.186428Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-03-31T15:56:22.186698Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-03-31T15:56:22.187920Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 Getting new event 2025-03-31T15:56:22.187948Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-03-31T15:56:22.187964Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-03-31T15:56:22.187986Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-03-31T15:56:22.189452Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 2025-03-31T15:56:22.192597Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:22.192615Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:22.192634Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:56:22.244960Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:56:22.254226Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:56:22.254413Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:22.258256Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 10 Compressed message data size: 30 2025-03-31T15:56:22.338842Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-03-31T15:56:22.341697Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-03-31T15:56:22.369309Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-03-31T15:56:22.380628Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-03-31T15:56:22.383400Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T15:56:22.384052Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-03-31T15:56:22.384091Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-03-31T15:56:22.384113Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-03-31T15:56:22.384874Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-03-31T15:56:22.388303Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 5). Partition stream id: 1 Getting new event 2025-03-31T15:56:22.389061Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-03-31T15:56:22.389083Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-03-31T15:56:22.389100Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-03-31T15:56:22.389115Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-03-31T15:56:22.389144Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-03-31T15:56:22.392413Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 9). Partition stream id: 1 2025-03-31T15:56:22.394886Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:22.394907Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:22.394923Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:56:22.397323Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:56:22.408570Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:56:22.409369Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:22.410815Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-31T15:56:22.416681Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-03-31T15:56:22.481848Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-03-31T15:56:22.486913Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (10-11) 2025-03-31T15:56:22.494210Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-03-31T15:56:22.495055Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T15:56:22.495280Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-03-31T15:56:22.495311Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (10-10) 2025-03-31T15:56:22.495330Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (11-11) 2025-03-31T15:56:22.495460Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes 2025-03-31T15:56:22.495483Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes got data event: DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 11 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2025-03-31T15:56:22.496060Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Got commit req { cookies { assign_id: 1 partition_cookie: 1 } } 2025-03-31T15:56:22.496823Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [10, 12). Partition stream id: 1 Got commit req { cookies { assign_id: 1 partition_cookie: 2 } } >> KqpReturning::ReturningWorksIndexedUpsert+QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedUpsert-QueryService |67.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |67.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |67.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::ConcurrentMultipleTablesCreation [GOOD] Test command err: 2025-03-31T15:55:42.494220Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002932675935841:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:42.494600Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000cb3/r3tmp/tmpdwYfdF/pdisk_1.dat 2025-03-31T15:55:47.507666Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002932675935841:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:47.508301Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:55:47.801901Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:55:47.892561Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:47.900591Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:55:47.900691Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:55:47.911064Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14577 TServer::EnableGrpc on GrpcPort 5944, node 1 2025-03-31T15:55:49.471954Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:55:49.471979Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:55:49.471986Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:55:49.472113Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T15:55:49.734029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:55:57.002829Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-31T15:55:57.015434Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-31T15:55:57.024600Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-03-31T15:55:57.027650Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-03-31T15:55:57.027684Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-31T15:55:57.027725Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-31T15:55:57.034228Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-03-31T15:55:57.034237Z node 1 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-03-31T15:55:57.034533Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-03-31T15:55:57.040070Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-03-31T15:55:57.040080Z node 1 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-03-31T15:55:57.040355Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-03-31T15:55:57.044499Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-03-31T15:55:57.044504Z node 1 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-03-31T15:55:57.044758Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-03-31T15:55:57.056913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:1, at schemeshard: 72057594046644480 2025-03-31T15:55:57.112856Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T15:55:57.113453Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T15:55:57.115435Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T15:55:57.116238Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2025-03-31T15:55:57.116582Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Subscribe on create table tx: 281474976710658 2025-03-31T15:55:57.125946Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T15:55:57.127734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T15:55:57.130245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T15:55:57.161395Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2025-03-31T15:55:57.161443Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Subscribe on create table tx: 281474976710659 2025-03-31T15:55:57.183360Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2025-03-31T15:55:57.183415Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Subscribe on create table tx: 281474976710660 2025-03-31T15:55:58.063168Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976710658. Doublechecking... 2025-03-31T15:55:58.171981Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Column diff is empty, finishing 2025-03-31T15:55:58.372743Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976710660. Doublechecking... 2025-03-31T15:55:58.454617Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Column diff is empty, finishing 2025-03-31T15:55:59.042819Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976710659. Doublechecking... 2025-03-31T15:55:59.147594Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Column diff is empty, finishing 2025-03-31T15:55:59.153047Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 79e23b65-4503f7f4-2de126b-29ad0e3b, Bootstrap. Database: /dc-1 2025-03-31T15:55:59.284560Z node 1 :KQP_PROXY DEBUG: Request has 18445000637150.267378s seconds to be completed 2025-03-31T15:55:59.336275Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=Nzg1MzczYjMtYmIwZWRkNWEtYmVkYWNmODItYTQxZDJlMzc=, workerId: [1:7488003005690380851:2354], database: /dc-1, longSession: 1, local sessions count: 1 2025-03-31T15:55:59.337003Z node 1 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-31T15:55:59.344018Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T15:55:59.357021Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 79e23b65-4503f7f4-2de126b-29ad0e3b, RunDataQuery: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $execution_meta_ttl AS Interval; UPSERT INTO `.metadata/script_executions` (database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at) VALUES ($database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl); UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2025-03-31T15:55:59.359761Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=Nzg1MzczYjMtYmIwZWRkNWEtYmVkYWNmODItYTQxZDJlMzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7488003005690380851:2354] 2025-03-31T15:55:59.360042Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7488003005690380853:2518] 2025-03-31T15:55:59.375280Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003005690380854:2356], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:59.375377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:59.376428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003005690380866:2359], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:59.397921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ... BUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=ZWEyYTcxMDItMzg3MTZjNGUtMzg5NGVkYjgtN2ZiNzQ5MDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 10, targetId: [2:7488003090375962732:2394] 2025-03-31T15:56:19.879475Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 10 timeout: 300.000000s actor id: [2:7488003090375962745:3062] 2025-03-31T15:56:19.888258Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-03-31T15:56:19.888339Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-03-31T15:56:19.888387Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-03-31T15:56:19.888412Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-03-31T15:56:19.909822Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-03-31T15:56:19.910601Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-03-31T15:56:19.910620Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-03-31T15:56:19.910639Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-03-31T15:56:19.910752Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-03-31T15:56:19.910769Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-03-31T15:56:19.916789Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-03-31T15:56:19.916842Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-03-31T15:56:19.916868Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-03-31T15:56:19.917696Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-03-31T15:56:19.922777Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-03-31T15:56:19.922819Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-03-31T15:56:19.929165Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-03-31T15:56:19.929189Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-03-31T15:56:19.929207Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-03-31T15:56:19.929227Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-03-31T15:56:20.281795Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T15:56:20.349148Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715689. Doublechecking... 2025-03-31T15:56:20.349188Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715689. Doublechecking... 2025-03-31T15:56:20.349202Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715689. Doublechecking... 2025-03-31T15:56:20.349215Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715689. Doublechecking... 2025-03-31T15:56:20.349228Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715689. Doublechecking... 2025-03-31T15:56:20.349240Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715689. Doublechecking... 2025-03-31T15:56:20.349262Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715689. Doublechecking... 2025-03-31T15:56:20.349275Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715689. Doublechecking... 2025-03-31T15:56:20.349288Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715689. Doublechecking... 2025-03-31T15:56:20.349301Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715689. Doublechecking... 2025-03-31T15:56:20.349314Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715689. Doublechecking... 2025-03-31T15:56:20.349330Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715689. Doublechecking... 2025-03-31T15:56:20.349343Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715689. Doublechecking... 2025-03-31T15:56:20.349357Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715689. Doublechecking... 2025-03-31T15:56:20.349386Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715689. Doublechecking... 2025-03-31T15:56:20.349400Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715689. Doublechecking... 2025-03-31T15:56:20.349482Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715689. Doublechecking... 2025-03-31T15:56:20.349494Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715689. Doublechecking... 2025-03-31T15:56:20.349518Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715689. Doublechecking... 2025-03-31T15:56:20.349532Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715689. Doublechecking... 2025-03-31T15:56:20.399558Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-03-31T15:56:20.402443Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-03-31T15:56:20.402484Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-03-31T15:56:20.407844Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-03-31T15:56:20.407882Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-03-31T15:56:20.407900Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-03-31T15:56:20.407930Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-03-31T15:56:20.415334Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-03-31T15:56:20.419594Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-03-31T15:56:20.428592Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-03-31T15:56:20.428628Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-03-31T15:56:20.428647Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-03-31T15:56:20.428698Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-03-31T15:56:20.428716Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-03-31T15:56:20.428734Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-03-31T15:56:20.429017Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-03-31T15:56:20.439316Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-03-31T15:56:20.444389Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-03-31T15:56:20.444767Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-03-31T15:56:20.451549Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-03-31T15:56:21.176578Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 10, sender: [2:7488003090375962733:2395], selfId: [2:7488003030246418937:2068], source: [2:7488003090375962732:2394] 2025-03-31T15:56:21.177546Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 97ed9c6-5ed99a54-85f6b51e-36a9c1a6, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZWEyYTcxMDItMzg3MTZjNGUtMzg5NGVkYjgtN2ZiNzQ5MDA=, TxId: 2025-03-31T15:56:21.177572Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 97ed9c6-5ed99a54-85f6b51e-36a9c1a6, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZWEyYTcxMDItMzg3MTZjNGUtMzg5NGVkYjgtN2ZiNzQ5MDA=, TxId: 2025-03-31T15:56:21.178778Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 97ed9c6-5ed99a54-85f6b51e-36a9c1a6, result part successfully saved 2025-03-31T15:56:21.179070Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 97ed9c6-5ed99a54-85f6b51e-36a9c1a6, reply SUCCESS, issues: 2025-03-31T15:56:21.180078Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 97ed9c6-5ed99a54-85f6b51e-36a9c1a6, Bootstrap. Database: /dc-1 2025-03-31T15:56:21.181364Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=ZWEyYTcxMDItMzg3MTZjNGUtMzg5NGVkYjgtN2ZiNzQ5MDA=, workerId: [2:7488003090375962732:2394], local sessions count: 2 2025-03-31T15:56:21.181672Z node 2 :KQP_PROXY DEBUG: Request has 18445000637128.369953s seconds to be completed 2025-03-31T15:56:21.227903Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=MjZjODc0YTgtYmU0ZmY3NS1mYTY2ZmQ2Zi1jNDc0YWUx, workerId: [2:7488003098965897472:2412], database: /dc-1, longSession: 1, local sessions count: 3 2025-03-31T15:56:21.228088Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-31T15:56:21.230180Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 97ed9c6-5ed99a54-85f6b51e-36a9c1a6, RunDataQuery: -- TSaveScriptFinalStatusActor::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT operation_status, finalization_status, meta, customer_supplied_id, user_token, script_sinks, script_secret_names FROM `.metadata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_generation FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-03-31T15:56:21.239616Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=MjZjODc0YTgtYmU0ZmY3NS1mYTY2ZmQ2Zi1jNDc0YWUx, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 12, targetId: [2:7488003098965897472:2412] 2025-03-31T15:56:21.239955Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 12 timeout: 300.000000s actor id: [2:7488003098965897474:3165] 2025-03-31T15:56:21.317166Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=Zjc3ZWViODctM2MzNTEwNTgtMzI3Mjg2OGEtZDNjNDgzMmY=, workerId: [2:7488003090375962177:2384], local sessions count: 2 >> TKeyValueTracingTest::ReadSmall [FAIL] >> THiveTest::TestCreateTabletReboots [GOOD] >> THiveTest::TestCreateTabletWithWrongSPoolsAndReassignGroupsFailButDeletionIsOk |67.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |67.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |67.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build >> ReadSessionImplTest::SuccessfulInit >> ReadSessionImplTest::SuccessfulInit [GOOD] >> ReadSessionImplTest::SuccessfulInitAndThenTimeoutCallback [GOOD] >> ReadSessionImplTest::StopsRetryAfterFailedAttempt [GOOD] >> ReadSessionImplTest::StopsRetryAfterTimeout >> ReadSessionImplTest::StopsRetryAfterTimeout [GOOD] >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK [FAIL] >> TopicAutoscaling::ReBalancingAfterSplit_sessionsWithPartition >> TContinuousBackupTests::TakeIncrementalBackup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] Test command err: 2025-03-31T15:56:29.500233Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:29.500258Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:29.500282Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:56:29.507154Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:56:29.507677Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-03-31T15:56:29.507741Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:29.508821Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:29.508842Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:29.508861Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:56:29.524768Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:56:29.525246Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-03-31T15:56:29.525295Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:29.558058Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:29.558351Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:29.558372Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:56:29.586877Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-03-31T15:56:29.593211Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:29.594668Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:29.600599Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: INTERNAL_ERROR Issues: "
: Error: Failed to establish connection to server "" ( cluster cluster). Attempts done: 1 " } 2025-03-31T15:56:29.603968Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:29.603996Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:29.604015Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:56:29.636930Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-03-31T15:56:29.638119Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:29.638751Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:29.644829Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: TIMEOUT Issues: "
: Error: Failed to establish connection to server. Attempts done: 1 " } 2025-03-31T15:56:29.646778Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-03-31T15:56:29.646796Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-03-31T15:56:29.646813Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:56:29.681281Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:56:29.712331Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:56:30.055880Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-03-31T15:56:30.066233Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-31T15:56:30.070169Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 2. Cluster: "TestCluster". Topic: "TestTopic". Partition: 2. Read offset: (NULL) 2025-03-31T15:56:30.073295Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-50) 2025-03-31T15:56:30.075906Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T15:56:30.075950Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-03-31T15:56:30.075981Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-03-31T15:56:30.075999Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2025-03-31T15:56:30.076023Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2025-03-31T15:56:30.076039Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2025-03-31T15:56:30.076054Z :DEBUG: Take Data. Partition 1. Read: {0, 6} (7-7) 2025-03-31T15:56:30.076072Z :DEBUG: Take Data. Partition 1. Read: {0, 7} (8-8) 2025-03-31T15:56:30.076108Z :DEBUG: Take Data. Partition 1. Read: {0, 8} (9-9) 2025-03-31T15:56:30.076125Z :DEBUG: Take Data. Partition 1. Read: {0, 9} (10-10) 2025-03-31T15:56:30.076147Z :DEBUG: Take Data. Partition 1. Read: {0, 10} (11-11) 2025-03-31T15:56:30.076172Z :DEBUG: Take Data. Partition 1. Read: {0, 11} (12-12) 2025-03-31T15:56:30.076187Z :DEBUG: Take Data. Partition 1. Read: {0, 12} (13-13) 2025-03-31T15:56:30.076199Z :DEBUG: Take Data. Partition 1. Read: {0, 13} (14-14) 2025-03-31T15:56:30.076211Z :DEBUG: Take Data. Partition 1. Read: {0, 14} (15-15) 2025-03-31T15:56:30.076224Z :DEBUG: Take Data. Partition 1. Read: {0, 15} (16-16) 2025-03-31T15:56:30.076255Z :DEBUG: Take Data. Partition 1. Read: {0, 16} (17-17) 2025-03-31T15:56:30.076266Z :DEBUG: Take Data. Partition 1. Read: {0, 17} (18-18) 2025-03-31T15:56:30.076281Z :DEBUG: Take Data. Partition 1. Read: {0, 18} (19-19) 2025-03-31T15:56:30.076292Z :DEBUG: Take Data. Partition 1. Read: {0, 19} (20-20) 2025-03-31T15:56:30.076312Z :DEBUG: Take Data. Partition 1. Read: {0, 20} (21-21) 2025-03-31T15:56:30.076336Z :DEBUG: Take Data. Partition 1. Read: {0, 21} (22-22) 2025-03-31T15:56:30.076356Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (23-23) 2025-03-31T15:56:30.076371Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (24-24) 2025-03-31T15:56:30.076386Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (25-25) 2025-03-31T15:56:30.076401Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (26-26) 2025-03-31T15:56:30.076417Z :DEBUG: Take Data. Partition 1. Read: {1, 4} (27-27) 2025-03-31T15:56:30.076432Z :DEBUG: Take Data. Partition 1. Read: {1, 5} (28-28) 2025-03-31T15:56:30.076446Z :DEBUG: Take Data. Partition 1. Read: {1, 6} (29-29) 2025-03-31T15:56:30.076460Z :DEBUG: Take Data. Partition 1. Read: {1, 7} (30-30) 2025-03-31T15:56:30.076483Z :DEBUG: Take Data. Partition 1. Read: {1, 8} (31-31) 2025-03-31T15:56:30.076506Z :DEBUG: Take Data. Partition 1. Read: {1, 9} (32-32) 2025-03-31T15:56:30.076582Z :DEBUG: Take Data. Partition 1. Read: {1, 10} (33-33) 2025-03-31T15:56:30.076619Z :DEBUG: Take Data. Partition 1. Read: {1, 11} (34-34) 2025-03-31T15:56:30.076639Z :DEBUG: Take Data. Partition 1. Read: {1, 12} (35-35) 2025-03-31T15:56:30.076655Z :DEBUG: Take Data. Partition 1. Read: {1, 13} (36-36) 2025-03-31T15:56:30.076680Z :DEBUG: Take Data. Partition 1. Read: {1, 14} (37-37) 2025-03-31T15:56:30.076711Z :DEBUG: Take Data. Partition 1. Read: {1, 15} (38-38) 2025-03-31T15:56:30.076729Z :DEBUG: Take Data. Partition 1. Read: {1, 16} (39-39) 2025-03-31T15:56:30.076745Z :DEBUG: Take Data. Partition 1. Read: {1, 17} (40-40) 2025-03-31T15:56:30.076758Z :DEBUG: Take Data. Partition 1. Read: {1, 18} (41-41) 2025-03-31T15:56:30.076773Z :DEBUG: Take Data. Partition 1. Read: {1, 19} (42-42) 2025-03-31T15:56:30.076839Z :DEBUG: Take Data. Partition 1. Read: {1, 20} (43-43) 2025-03-31T15:56:30.076867Z :DEBUG: Take Data. Partition 1. Read: {1, 21} (44-44) 2025-03-31T15:56:30.076903Z :DEBUG: Take Data. Partition 1. Read: {1, 22} (45-45) 2025-03-31T15:56:30.076922Z :DEBUG: Take Data. Partition 1. Read: {1, 23} (46-46) 2025-03-31T15:56:30.076937Z :DEBUG: Take Data. Partition 1. Read: {1, 24} (47-47) 2025-03-31T15:56:30.076963Z :DEBUG: Take Data. Partition 1. Read: {1, 25} (48-48) 2025-03-31T15:56:30.076987Z :DEBUG: Take Data. Partition 1. Read: {1, 26} (49-49) 2025-03-31T15:56:30.076999Z :DEBUG: Take Data. Partition 1. Read: {1, 27} (50-50) 2025-03-31T15:56:30.077055Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2025-03-31T15:56:30.079123Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 2 (51-100) 2025-03-31T15:56:30.088219Z :DEBUG: Take Data. Partition 2. Read: {0, 0} (51-51) 2025-03-31T15:56:30.089237Z :DEBUG: Take Data. Partition 2. Read: {0, 1} (52-52) 2025-03-31T15:56:30.089260Z :DEBUG: Take Data. Partition 2. Read: {0, 2} (53-53) 2025-03-31T15:56:30.089275Z :DEBUG: Take Data. Partition 2. Read: {0, 3} (54-54) 2025-03-31T15:56:30.089293Z :DEBUG: Take Data. Partition 2. Read: {0, 4} (55-55) 2025-03-31T15:56:30.089306Z :DEBUG: Take Data. Partition 2. Read: {0, 5} (56-56) 2025-03-31T15:56:30.089319Z :DEBUG: Take Data. Partition 2. Read: {0, 6} (57-57) 2025-03-31T15:56:30.089333Z :DEBUG: Take Data. Partition 2. Read: {0, 7} (58-58) 2025-03-31T15:56:30.089971Z :DEBUG: Take Data. Partition 2. Read: {0, 8} (59-59) 2025-03-31T15:56:30.089992Z :DEBUG: Take Data. Partition 2. Read: {0, 9} (60-60) 2025-03-31T15:56:30.090311Z :DEBUG: Take Data. Partition 2. Read: {0, 10} (61-61) 2025-03-31T15:56:30.090328Z :DEBUG: Take Data. Partition 2. Read: {0, 11} (62-62) 2025-03-31T15:56:30.096141Z :DEBUG: Take Data. Partition 2. Read: {0, 12} (63-63) 2025-03-31T15:56:30.096479Z :DEBUG: Take Data. Partition 2. Read: {0, 13} (64-64) 2025-03-31T15:56:30.096494Z :DEBUG: Take Data. Partition 2. Read: {0, 14} (65-65) 2025-03-31T15:56:30.096796Z :DEBUG: Take Data. Partition 2. Read: {0, 15} (66-66) 2025-03-31T15:56:30.103385Z :DEBUG: Take Data. Partition 2. Read: {0, 16} (67-67) 2025-03-31T15:56:30.103726Z :DEBUG: Take Data. Partition 2. Read: {0, 17} (68-68) 2025-03-31T15:56:30.103750Z :DEBUG: Take Data. Partition 2. Read: {0, 18} (69-69) 2025-03-31T15:56:30.103768Z :DEBUG: Take Data. Partition 2. Read: {0, 19} (70-70) 2025-03-31T15:56:30.103784Z :DEBUG: Take Data. Partition 2. Read: {0, 20} (71-71) 2025-03-31T15:56:30.103802Z :DEBUG: Take Data. Partition 2. Read: {0, 21} (72-72) 2025-03-31T15:56:30.103821Z :DEBUG: Take Data. Partition 2. Read: {1, 0} (73-73) 2025-03-31T15:56:30.104138Z :DEBUG: Take Data. Partition 2. Read: {1, 1} (74-74) 2025-03-31T15:56:30.104491Z :DEBUG: Take Data. Partition 2. Read: {1, 2} (75-75) 2025-03-31T15:56:30.104509Z :DEBUG: Take Data. Partition 2. Read: {1, 3} (76-76) 2025-03-31T15:56:30.104526Z :DEBUG: Take Data. Partition 2. Read: {1, 4} (77-77) 2025-03-31T15:56:30.104543Z :DEBUG: Take Data. Partition 2. Read: {1, 5} (78-78) 2025-03-31T15:56:30.104887Z :DEBUG: Take Data. Partition 2. Read: {1, 6} (79-79) 2025-03-31T15:56:30.105533Z :DEBUG: Take Data. Partition 2. Read: {1, 7} (80-80) 2025-03-31T15:56:30.105548Z :DEBUG: Take Data. Partition 2. Read: {1, 8} (81-81) 2025-03-31T15:56:30.105561Z :DEBUG: Take Data. Partition 2. Read: {1, 9} (82-82) 2025-03-31T15:56:30.106439Z :DEBUG: Take Data. Partition 2. Read: {1, 10} (83-83) 2025-03-31T15:56:30.107718Z :DEBUG: Take Data. Partition 2. Read: {1, 11} (84-84) 2025-03-31T15:56:30.107739Z :DEBUG: Take Data. Partition 2. Read: {1, 12} (85-85) 2025-03-31T15:56:30.107753Z :DEBUG: Take Data. Partition 2. Read: {1, 13} (86-86) 2025-03-31T15:56:30.107766Z :DEBUG: Take Data. Partition 2. Read: {1, 14} (87-87) 2025-03-31T15:56:30.107780Z :DEBUG: Take Data. Partition 2. Read: {1, 15} (88-88) 2025-03-31T15:56:30.107794Z :DEBUG: Take Data. Partition 2. Read: {1, 16} (89-89) 2025-03-31T15:56:30.107808Z :DEBUG: Take Data. Partition 2. Read: {1, 17} (90-90) 2025-03-31T15:56:30.107832Z :DEBUG: Take Data. Partition 2. Read: {1, 18} (91-91) 2025-03-31T15:56:30.107855Z :DEBUG: Take Data. Partition 2. Read: {1, 19} (92-92) 2025-03-31T15:56:30.107871Z :DEBUG: Take Data. Partition 2. Read: {1, 20} (93-93) 2025-03-31T15:56:30.108196Z :DEBUG: Take Data. Partition 2. Read: {1, 21} (94-94) 2025-03-31T15:56:30.108501Z :DEBUG: Take Data. Partition 2. Read: {1, 22} (95-95) 2025-03-31T15:56:30.108791Z :DEBUG: Take Data. Partition 2. Read: {1, 23} (96-96) 2025-03-31T15:56:30.108805Z :DEBUG: Take Data. Partition 2. Read: {1, 24} (97-97) 2025-03-31T15:56:30.116086Z :DEBUG: Take Data. Partition 2. Read: {1, 25} (98-98) 2025-03-31T15:56:30.116112Z :DEBUG: Take Data. Partition 2. Read: {1, 26} (99-99) 2025-03-31T15:56:30.116126Z :DEBUG: Take Data. Partition 2. Read: {1, 27} (100-100) 2025-03-31T15:56:30.116490Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2025-03-31T15:56:30.116857Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-03-31T15:56:30.125683Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:30.125706Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:30.125724Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:56:30.130138Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:56:30.138766Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:56:30.138922Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:30.147270Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-31T15:56:30.373991Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:30.382506Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-03-31T15:56:30.383540Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T15:56:30.384521Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-03-31T15:56:30.384595Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-03-31T15:56:30.595379Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2025-03-31T15:56:30.699885Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2025-03-31T15:56:30.701816Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-03-31T15:56:30.705338Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-03-31T15:56:30.766643Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:30.766952Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:30.767252Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:56:30.803928Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:56:30.809124Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:56:30.836131Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:30.852971Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-31T15:56:31.081048Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:31.088556Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-03-31T15:56:31.089573Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T15:56:31.100262Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-03-31T15:56:31.102302Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2025-03-31T15:56:31.108185Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2025-03-31T15:56:31.104490Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-03-31T15:56:31.122353Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-03-31T15:56:31.127125Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions >> KqpNotNullColumns::UpsertNotNull [GOOD] >> KqpNotNullColumns::UpdateTable_DontChangeNotNull >> TTxDataShardMiniKQL::CrossShard_1_Cycle [GOOD] >> KqpNewEngine::ReadAfterWrite [GOOD] >> KqpNewEngine::ReadRangeWithParams >> TTxDataShardMiniKQL::CrossShard_2_SwapAndCopy ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup/unittest >> TContinuousBackupTests::TakeIncrementalBackup [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:56:19.785852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:56:19.786877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:56:19.794887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:56:19.795589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:56:19.796196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:56:19.796231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:56:19.796915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:56:19.797580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:56:19.813066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:56:20.377826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:56:20.378172Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:56:20.439498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:56:20.439977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:56:20.441081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:56:20.539946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:56:20.542689Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:56:20.549926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:56:20.551066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:56:20.569913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:56:20.587071Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:56:20.600050Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:56:20.601286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:56:20.601660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:56:20.602044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:56:20.611406Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:56:20.736376Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:56:21.974927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:56:21.975219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:56:21.975382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:56:21.975583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:56:21.975640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:56:21.997768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:56:22.009575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:56:22.011902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:56:22.012497Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:56:22.013111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:56:22.013404Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:56:22.058752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:56:22.064201Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:56:22.064599Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:56:22.108330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:56:22.119276Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:56:22.119866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:56:22.119916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:56:22.396270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:56:22.451718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:56:22.460002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:56:22.460969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:56:22.461081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:56:22.461134Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:56:22.461349Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:56:22.461387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:56:22.461542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:56:22.461612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:56:22.482973Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:56:22.490191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:56:22.490434Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:56:22.490477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:56:22.490727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:56:22.490767Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:56:22.490866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:56:22.490904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:56:22.490940Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:56:22.490974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:56:22.491008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:56:22.491052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:56:22.491104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:56:22.491139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:56:22.491221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:56:22.491266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:56:22.491296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:56:22.785840Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:56:22.786612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:56:22.795340Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 09546, at schemeshard: 72057594046678944 2025-03-31T15:56:28.852148Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:1 129 -> 240 2025-03-31T15:56:28.876906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:1, at schemeshard: 72057594046678944 2025-03-31T15:56:28.877939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:1, at schemeshard: 72057594046678944 2025-03-31T15:56:28.878191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:1, at schemeshard: 72057594046678944 2025-03-31T15:56:28.878226Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:1 ProgressState 2025-03-31T15:56:28.878853Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:1 progress is 4/4 2025-03-31T15:56:28.878886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/4 2025-03-31T15:56:28.878917Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:1 progress is 4/4 2025-03-31T15:56:28.887221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/4 2025-03-31T15:56:28.887284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 4/4, is published: true 2025-03-31T15:56:28.887645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:333:2312] message: TxId: 103 2025-03-31T15:56:28.888531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/4 2025-03-31T15:56:28.888862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-03-31T15:56:28.889268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-03-31T15:56:28.889913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-31T15:56:28.890226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:1 2025-03-31T15:56:28.890247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:1 2025-03-31T15:56:28.890608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T15:56:28.890636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:2 2025-03-31T15:56:28.890657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:2 2025-03-31T15:56:28.891010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-03-31T15:56:28.891041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:3 2025-03-31T15:56:28.891059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:3 2025-03-31T15:56:28.894480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-03-31T15:56:28.923904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-31T15:56:28.924606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:716:2620] TestWaitNotification: OK eventTxId 103 2025-03-31T15:56:28.930758Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T15:56:28.944748Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl" took 11.1ms result status StatusSuccess 2025-03-31T15:56:28.955908Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl" PathDescription { Self { Name: "IncrBackupImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupImpl" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:56:28.964862Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T15:56:28.972889Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/continuousBackupImpl/streamImpl" took 7.75ms result status StatusSuccess 2025-03-31T15:56:28.981230Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" PathDescription { Self { Name: "streamImpl" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeStreamImpl Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409548 } PersQueueGroup { Name: "streamImpl" PathId: 4 TotalGroupCount: 1 PartitionPerTablet: 2 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "continuousBackupImpl" TopicPath: "/MyRoot/Table/continuousBackupImpl/streamImpl" YdbDatabasePath: "/MyRoot" PartitionKeySchema { Name: "key" TypeId: 4 } MeteringMode: METERING_MODE_REQUEST_UNITS OffloadConfig { IncrementalBackup { DstPath: "/MyRoot/IncrBackupImpl" DstPathId { OwnerId: 72057594046678944 LocalId: 5 } } } } Partitions { PartitionId: 0 TabletId: 72075186233409547 Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409548 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:56:29.002533Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T15:56:29.005537Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl" took 2.86ms result status StatusSuccess 2025-03-31T15:56:29.012501Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl" PathDescription { Self { Name: "IncrBackupImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupImpl" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpNotNullColumns::InsertNotNull [GOOD] >> KqpNotNullColumns::InsertFromSelect >> THiveTest::TestCreateTabletWithWrongSPoolsAndReassignGroupsFailButDeletionIsOk [GOOD] >> THiveTest::TestCreateTabletAndReassignGroupsWithReboots >> THiveTest::TestHiveBalancerWithImmovableTablets [GOOD] >> THiveTest::TestHiveBalancerHighUsage |67.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/test-results/unittest/{meta.json ... results_accumulator.log} >> ReadSessionImplTest::UsesOnRetryStateDuringRetries [GOOD] >> RetryPolicy::TWriteSession_TestPolicy >> TopicAutoscaling::PartitionSplit_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_BeforeAutoscaleAwareSDK >> TopicAutoscaling::ReadingAfterSplitTest_BeforeAutoscaleAwareSDK [FAIL] >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK_AutoCommit >> ReadSessionImplTest::ProperlyOrdersDecompressedData [GOOD] >> ReadSessionImplTest::PacksBatches_ExactlyTwoMessagesInBatch >> Balancing::Balancing_OneTopic_TopicApi [GOOD] >> Balancing::Balancing_OneTopic_PQv1 >> ReadSessionImplTest::PacksBatches_ExactlyTwoMessagesInBatch [GOOD] >> ReadSessionImplTest::PacksBatches_OneMessageInEveryBatch [GOOD] >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks |67.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-false [GOOD] |67.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |67.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |67.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/test-results/unittest/{meta.json ... results_accumulator.log} |67.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |67.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |67.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime |67.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime |67.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime >> KqpProxy::NoUserAccessToScriptExecutionsTable [GOOD] |67.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |67.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |67.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:56:25.494459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:56:25.494560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:56:25.494605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:56:25.494641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:56:25.494685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:56:25.494716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:56:25.494809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:56:25.494891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:56:25.495287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:56:25.743990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:56:25.744362Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:56:25.850197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:56:25.850566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:56:25.857186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:56:25.887427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:56:25.889653Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:56:25.897479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:56:25.898464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:56:25.901440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:56:25.905261Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:56:25.905852Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:56:25.906868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:56:25.907291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:56:25.907980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:56:25.909945Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:56:25.976768Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:56:26.810158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:56:26.823158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:56:26.825019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:56:26.832207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:56:26.832917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:56:26.893212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:56:26.900191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:56:26.901999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:56:26.902075Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:56:26.902442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:56:26.902758Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:56:26.966494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:56:26.966851Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:56:26.968459Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:56:26.991388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:56:26.991461Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:56:26.992192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:56:26.992845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:56:27.082654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:56:27.118245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:56:27.124220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:56:27.152198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:56:27.153240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:56:27.153566Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:56:27.165387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:56:27.165836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:56:27.180959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:56:27.182313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:56:27.267690Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:56:27.268413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:56:27.271570Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:56:27.271890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:56:27.274299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:56:27.275046Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:56:27.284118Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:56:27.284476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:56:27.285014Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:56:27.285313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:56:27.285610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:56:27.286217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:56:27.286528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:56:27.292049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:56:27.292439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:56:27.293052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:56:27.293377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:56:27.364322Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:56:27.379604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:56:27.380265Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 94046678944 Generation: 2 LocalPathId: 1 Version: 102 PathOwnerId: 72057594046678944, cookie: 175 2025-03-31T15:56:35.601726Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-03-31T15:56:35.601762Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 102 2025-03-31T15:56:35.601801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:56:35.602990Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 4 PathOwnerId: 72057594046678944, cookie: 175 2025-03-31T15:56:35.603070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 4 PathOwnerId: 72057594046678944, cookie: 175 2025-03-31T15:56:35.603154Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-03-31T15:56:35.603186Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 26], version: 4 2025-03-31T15:56:35.603222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 3 2025-03-31T15:56:35.603322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 0/1, is published: true 2025-03-31T15:56:35.605327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 175:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:175 msg type: 269090816 2025-03-31T15:56:35.605451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 175, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 175 at step: 5000076 FAKE_COORDINATOR: advance: minStep5000076 State->FrontStep: 5000075 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 175 at step: 5000076 2025-03-31T15:56:35.606472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000076, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:56:35.606581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 175 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000076 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:56:35.606629Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TPropose, operationId: 175:0 HandleReply TEvOperationPlan, step: 5000076, at schemeshard: 72057594046678944 2025-03-31T15:56:35.606718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 26] name: USER_0 type: EPathTypeExtSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 175 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:56:35.606747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 26] 2025-03-31T15:56:35.606783Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 128 -> 134 2025-03-31T15:56:35.608354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-03-31T15:56:35.610037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-03-31T15:56:35.611564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2025-03-31T15:56:35.611620Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TDeleteExternalShards, operationId: 175:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:56:35.611752Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 134 -> 135 2025-03-31T15:56:35.611958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:56:35.612025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 2 FAKE_COORDINATOR: Erasing txId 175 2025-03-31T15:56:35.614638Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:56:35.614692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:56:35.614868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 26] 2025-03-31T15:56:35.615000Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:56:35.615040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 175, path id: 1 2025-03-31T15:56:35.615099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 175, path id: 26 2025-03-31T15:56:35.615596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2025-03-31T15:56:35.615654Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 175:0 ProgressState 2025-03-31T15:56:35.615689Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 135 -> 240 2025-03-31T15:56:35.616844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2025-03-31T15:56:35.616941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2025-03-31T15:56:35.616973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-03-31T15:56:35.617012Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 103 2025-03-31T15:56:35.617049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:56:35.618974Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2025-03-31T15:56:35.619102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2025-03-31T15:56:35.619145Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-03-31T15:56:35.619186Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 26], version: 18446744073709551615 2025-03-31T15:56:35.619224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 3 2025-03-31T15:56:35.619329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 0/1, is published: true 2025-03-31T15:56:35.621580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2025-03-31T15:56:35.621637Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 175:0 ProgressState 2025-03-31T15:56:35.621729Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#175:0 progress is 1/1 2025-03-31T15:56:35.621777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-03-31T15:56:35.621817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#175:0 progress is 1/1 2025-03-31T15:56:35.621854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-03-31T15:56:35.621886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 1/1, is published: true 2025-03-31T15:56:35.621943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-03-31T15:56:35.621979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 175:0 2025-03-31T15:56:35.622009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 175:0 2025-03-31T15:56:35.622081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 2 2025-03-31T15:56:35.622615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:56:35.622663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2025-03-31T15:56:35.622727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 1 2025-03-31T15:56:35.623343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:56:35.623390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2025-03-31T15:56:35.623452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:56:35.624823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-03-31T15:56:35.625401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-03-31T15:56:35.628129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-31T15:56:35.628254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 175, wait until txId: 175 TestWaitNotification wait txId: 175 2025-03-31T15:56:35.629531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: send EvNotifyTxCompletion 2025-03-31T15:56:35.629576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 175 2025-03-31T15:56:35.631189Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 175, at schemeshard: 72057594046678944 2025-03-31T15:56:35.631324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: got EvNotifyTxCompletionResult 2025-03-31T15:56:35.631363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: satisfy waiter [1:2596:4587] TestWaitNotification: OK eventTxId 175 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadSmall [FAIL] Test command err: equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture()+28 (0x1013BF9C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x105F7E30) TestOneRead(TBasicString>, TBasicString>)+4828 (0xFD8444C) NTestSuiteTKeyValueTracingTest::TTestCaseReadSmall::Execute_(NUnitTest::TTestContext&)+318 (0xFD8AC2E) std::__y1::__function::__func, void ()>::operator()()+280 (0xFD9E7F8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x10625D96) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x105FE9A9) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFD9D6A4) NUnitTest::TTestFactory::Execute()+2438 (0x10600276) NUnitTest::RunMain(int, char**)+5213 (0x1062030D) ??+0 (0x7F793E0DCD90) __libc_start_main+128 (0x7F793E0DCE40) _start+41 (0xD71A029) >> KqpSort::UnionAllSortLimit [GOOD] >> KqpSqlIn::CantRewrite >> THiveTest::TestHiveBalancerHighUsage [GOOD] >> THiveTest::TestHiveBalancerHighUsageAndColumnShards ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> KqpProxy::NoUserAccessToScriptExecutionsTable [GOOD] Test command err: 2025-03-31T15:55:42.196949Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002932095277443:2211];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:42.197408Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:55:42.363955Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488002933865369278:2220];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000cc3/r3tmp/tmphv1TRA/pdisk_1.dat 2025-03-31T15:55:42.625275Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:55:42.862548Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:55:42.862654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:55:42.863912Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:55:42.864026Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:55:42.866909Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T15:55:42.867110Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:55:42.867755Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:55:42.927500Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:13433 2025-03-31T15:55:47.199791Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002932095277443:2211];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:47.199840Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:55:47.319164Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488002933865369278:2220];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:47.319233Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:55:57.929549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:55:57.929571Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:56:03.796162Z node 2 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-31T15:56:03.798092Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-31T15:56:03.821096Z node 2 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-31T15:56:03.824606Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-31T15:56:03.844198Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-03-31T15:56:03.844239Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-03-31T15:56:03.844269Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-31T15:56:03.844318Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-31T15:56:03.844378Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T15:56:03.844423Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T15:56:03.854456Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T15:56:03.854710Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T15:56:03.914641Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=ZjQwNDA1OGEtNDM5NzQ4ZTAtN2E4YjFmZmMtMjIwNGY5OGM=, workerId: [2:7488003024059682673:2324], database: , longSession: 1, local sessions count: 1 2025-03-31T15:56:03.914922Z node 2 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-31T15:56:03.915438Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-31T15:56:03.917527Z node 2 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-03-31T15:56:03.917976Z node 2 :KQP_PROXY DEBUG: Updated table service config. 2025-03-31T15:56:03.917995Z node 2 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-31T15:56:03.918117Z node 2 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-31T15:56:03.931811Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=2&id=ZjQwNDA1OGEtNDM5NzQ4ZTAtN2E4YjFmZmMtMjIwNGY5OGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 2, targetId: [2:8678280833929343339:121] 2025-03-31T15:56:03.931869Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 2 timeout: 600.000000s actor id: [1:7488003022289591569:2589] 2025-03-31T15:56:03.935473Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T15:56:03.935707Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T15:56:03.935868Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T15:56:03.935887Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T15:56:03.947693Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T15:56:03.948882Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=2&id=ZjQwNDA1OGEtNDM5NzQ4ZTAtN2E4YjFmZmMtMjIwNGY5OGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [2:7488003024059682673:2324] 2025-03-31T15:56:03.948912Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [2:7488003024059682689:2135] 2025-03-31T15:56:03.971841Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003022289591570:2347], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:03.972210Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:04.115366Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488003024059682690:2325], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:04.115474Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:04.998371Z node 2 :KQP_PROXY DEBUG: TraceId: "01jqpenhfd8rczverv8xtgjpe2", Created new session, sessionId: ydb://session/3?node_id=2&id=MTA5OTRkOGMtMzEzODkxMjUtMmExMmY1NmMtYzE2MDcyYzU=, workerId: [2:7488003028354650001:2329], database: , longSession: 0, local sessions count: 2 2025-03-31T15:56:04.999934Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: 01jqpenhfd8rczverv8xtgjpe2, Database: , DatabaseId: , SessionId: ydb://session/3?node_id=2&id=MTA5OTRkOGMtMzEzODkxMjUtMmExMmY1NmMtYzE2MDcyYzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 4, targetId: [2:7488003028354650001:2329] 2025-03-31T15:56:04.999972Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 4 timeout: 300.000000s actor id: [2:7488003028354650002:2140] 2025-03-31T15:56:05.000488Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T15:56:05.000601Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488003028354650003:2330], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:05.000679Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:05.001129Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488003028354650008:2333], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:05.010764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-03-31T15:56:05.135151Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488003028354650010:2334], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-03-31T15:56:05.352217Z node 2 :TX_PROXY ERROR: Actor# [2:7488003032649617334:2150] txid# 281474976715658, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:56:05.807645Z node 2 :KQP_PROXY DEBUG: TraceId: "01jqpenhfd8rczverv8xtgjpe2", Forwarded response to sender actor, requestId: 4, sender: [2:7488003028354650000:2328], selfId: [2:7488002933865369131:2079], source: [2:7488003028354650001:2329] 2025-03-31T15:56:05.872143Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=MTA5OTRkOGMtMzEzODkxMjUtMmExMmY1NmMtYzE2MDcyYzU=, workerId: [2:7488003028354650001:2329], local sessions count: 1 2025-03-31T15:56:05.872221Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [1:7488002932095277522:2281], selfId: [2:7488002933865369131:2079], source: [2:7488003024059682673:2324] 2025-03-31T15:56:05.873452Z node 1 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 2, sender: [1:7488002936390245323:2455], selfId: [1:7488002932095277522:2281], source: [2:7488002933865369131:2079] 2025-03-31T15:56:13.170464Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:74 ... -03-31T15:56:34.312677Z node 3 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976715661. Doublechecking... 2025-03-31T15:56:34.393386Z node 3 :KQP_PROXY DEBUG: Table result_sets updater. Column diff is empty, finishing 2025-03-31T15:56:34.410543Z node 3 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976715659. Doublechecking... 2025-03-31T15:56:34.416173Z node 3 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976715660. Doublechecking... 2025-03-31T15:56:34.485212Z node 3 :KQP_PROXY DEBUG: Table script_executions updater. Column diff is empty, finishing 2025-03-31T15:56:34.507578Z node 3 :KQP_PROXY DEBUG: Table script_execution_leases updater. Column diff is empty, finishing 2025-03-31T15:56:34.511334Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 9e79dc03-24e32ed-8d064cd0-fb6846cc, Bootstrap. Database: /Root 2025-03-31T15:56:34.512664Z node 3 :KQP_PROXY DEBUG: Request has 18445000637115.038977s seconds to be completed 2025-03-31T15:56:34.514795Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=YjA2NGExNTgtYWUxYTU4YTAtYzc2YzcwOC0yZTc5MjJhMQ==, workerId: [3:7488003155307005127:2370], database: /Root, longSession: 1, local sessions count: 1 2025-03-31T15:56:34.514944Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-31T15:56:34.517006Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 9e79dc03-24e32ed-8d064cd0-fb6846cc, RunDataQuery: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $execution_meta_ttl AS Interval; UPSERT INTO `.metadata/script_executions` (database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at) VALUES ($database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl); UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2025-03-31T15:56:34.517747Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /Root, DatabaseId: , SessionId: ydb://session/3?node_id=3&id=YjA2NGExNTgtYWUxYTU4YTAtYzc2YzcwOC0yZTc5MjJhMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 5, targetId: [3:7488003155307005127:2370] 2025-03-31T15:56:34.517784Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 5 timeout: 300.000000s actor id: [3:7488003155307005129:3059] 2025-03-31T15:56:34.518519Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488003155307005130:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:34.518617Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:34.519196Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488003155307005135:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:34.522442Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:2, at schemeshard: 72057594046644480 2025-03-31T15:56:34.557382Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488003155307005137:2376], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-03-31T15:56:34.659026Z node 3 :TX_PROXY ERROR: Actor# [3:7488003155307005201:3112] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:56:34.898999Z node 3 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 5, sender: [3:7488003155307005128:2371], selfId: [3:7488003065112690298:2281], source: [3:7488003155307005127:2370] 2025-03-31T15:56:34.899673Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 9e79dc03-24e32ed-8d064cd0-fb6846cc, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=YjA2NGExNTgtYWUxYTU4YTAtYzc2YzcwOC0yZTc5MjJhMQ==, TxId: 2025-03-31T15:56:34.899718Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 9e79dc03-24e32ed-8d064cd0-fb6846cc, Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=YjA2NGExNTgtYWUxYTU4YTAtYzc2YzcwOC0yZTc5MjJhMQ==, TxId: 2025-03-31T15:56:34.899735Z node 3 :KQP_PROXY DEBUG: [ScriptExecutions] Create script execution operation. ExecutionId: 9e79dc03-24e32ed-8d064cd0-fb6846cc. Result: SUCCESS. Issues: 2025-03-31T15:56:34.903586Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=YzIzZGE2MjQtYTZjN2Q0YjUtOTE2NzFkNjQtMTJhMWFjZg==, workerId: [3:7488003155307005280:2389], database: /Root, longSession: 1, local sessions count: 2 2025-03-31T15:56:34.903759Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-31T15:56:34.904051Z node 3 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=3&id=YjA2NGExNTgtYWUxYTU4YTAtYzc2YzcwOC0yZTc5MjJhMQ==, workerId: [3:7488003155307005127:2370], local sessions count: 1 2025-03-31T15:56:34.904862Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: 01jqpepewy8005a2tkrvcj65q8, Database: /Root, DatabaseId: , SessionId: ydb://session/3?node_id=3&id=YzIzZGE2MjQtYTZjN2Q0YjUtOTE2NzFkNjQtMTJhMWFjZg==, CurrentExecutionId: 9e79dc03-24e32ed-8d064cd0-fb6846cc, CustomerSuppliedId: 01jqpepewy8005a2tkrvcj65q8, PoolId: }. TEvQueryRequest, set timer for: 604800.000000s timeout: 604800.000000s cancelAfter: 0.000000s. Send request to target, requestId: 7, targetId: [3:7488003155307005280:2389] 2025-03-31T15:56:34.904901Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 7 timeout: 604800.000000s actor id: [3:7488003155307005282:3164] 2025-03-31T15:56:34.927328Z node 3 :KQP_PROXY DEBUG: TraceId: "01jqpepfqebm06wdkckrgjar23", Request has 18445000637114.624319s seconds to be completed 2025-03-31T15:56:34.929527Z node 3 :KQP_PROXY DEBUG: TraceId: "01jqpepfqebm06wdkckrgjar23", Created new session, sessionId: ydb://session/3?node_id=3&id=MzZkMzM3YTEtNTNhZTE4MjYtZmM5M2Q2NGUtNTBmYjJmNWE=, workerId: [3:7488003155307005294:2395], database: /Root, longSession: 1, local sessions count: 2 2025-03-31T15:56:34.929683Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 01jqpepfqebm06wdkckrgjar23 2025-03-31T15:56:34.937901Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: 01jqpepfqse2cbe5c13yg5380g, Database: /Root, DatabaseId: , SessionId: ydb://session/3?node_id=3&id=MzZkMzM3YTEtNTNhZTE4MjYtZmM5M2Q2NGUtNTBmYjJmNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 9, targetId: [3:7488003155307005294:2395] 2025-03-31T15:56:34.937972Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 9 timeout: 300.000000s actor id: [3:7488003155307005297:3170] 2025-03-31T15:56:34.968593Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7488003155307005301:3172], for# user@builtin, access# DescribeSchema 2025-03-31T15:56:34.968634Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7488003155307005301:3172], for# user@builtin, access# DescribeSchema 2025-03-31T15:56:34.982628Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 9e79dc03-24e32ed-8d064cd0-fb6846cc, Bootstrap. Database: /Root 2025-03-31T15:56:34.983868Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7488003155307005298:2397], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/script_executions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T15:56:34.984385Z node 3 :KQP_PROXY DEBUG: Request has 18445000637114.567252s seconds to be completed 2025-03-31T15:56:34.985673Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MzZkMzM3YTEtNTNhZTE4MjYtZmM5M2Q2NGUtNTBmYjJmNWE=, ActorId: [3:7488003155307005294:2395], ActorState: ExecuteState, TraceId: 01jqpepfqse2cbe5c13yg5380g, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T15:56:34.986496Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=NDhiNTg2MGUtNzEyYmFmZDMtZWIyZmY2M2QtMjY4NzE3MmI=, workerId: [3:7488003155307005310:2400], database: /Root, longSession: 1, local sessions count: 3 2025-03-31T15:56:34.986642Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-31T15:56:34.986879Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 9e79dc03-24e32ed-8d064cd0-fb6846cc, RunDataQuery: -- TSaveScriptExecutionResultMetaQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_metas AS JsonDocument; UPDATE `.metadata/script_executions` SET result_set_metas = $result_set_metas WHERE database = $database AND execution_id = $execution_id; 2025-03-31T15:56:34.987143Z node 3 :KQP_PROXY DEBUG: TraceId: "01jqpepfqse2cbe5c13yg5380g", Forwarded response to sender actor, requestId: 9, sender: [3:7488003155307005296:2396], selfId: [3:7488003065112690298:2281], source: [3:7488003155307005294:2395] 2025-03-31T15:56:34.987294Z node 3 :KQP_PROXY DEBUG: TraceId: "01jqpepewy8005a2tkrvcj65q8", Forwarded response to sender actor, requestId: 7, sender: [3:7488003155307005124:3057], selfId: [3:7488003065112690298:2281], source: [3:7488003155307005280:2389] 2025-03-31T15:56:34.987402Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /Root, DatabaseId: , SessionId: ydb://session/3?node_id=3&id=NDhiNTg2MGUtNzEyYmFmZDMtZWIyZmY2M2QtMjY4NzE3MmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 11, targetId: [3:7488003155307005310:2400] 2025-03-31T15:56:34.987436Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 11 timeout: 300.000000s actor id: [3:7488003155307005312:3176] 2025-03-31T15:56:35.047815Z node 3 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable [GOOD] >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable2 >> KqpSort::ReverseLimitOptimized [GOOD] >> KqpSort::ReverseEightShardOptimized >> KqpNewEngine::KeyColumnOrder2 [GOOD] >> KqpNewEngine::LocksEffects >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions >> StreamCreator::Basic >> TopicAutoscaling::ControlPlane_BackCompatibility [GOOD] >> TopicAutoscaling::ControlPlane_AutoscalingWithStorageSizeRetention |67.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> TopicAutoscaling::ControlPlane_PauseAutoPartitioning [GOOD] >> TopicAutoscaling::ControlPlane_CDC_Enable >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks [GOOD] >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime >> KqpNotNullColumns::InsertFromSelect [GOOD] >> KqpNotNullColumns::InsertNotNullPg+useSink |67.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |67.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |67.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge >> KqpScanArrowFormat::SingleKey |67.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> KqpNewEngine::Update-UseSink [GOOD] >> KqpNewEngine::UpdateFromParams >> KqpNotNullColumns::UpdateTable_DontChangeNotNull [GOOD] >> KqpNotNullColumns::UpdateTable_DontChangeNotNullWithIndex >> THiveTest::TestCreateTabletAndReassignGroupsWithReboots [GOOD] >> THiveTest::TestCreateTabletChangeToExternal >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime [GOOD] >> KqpScanArrowFormat::AllTypesColumns >> ReadSessionImplTest::PartitionStreamStatus [GOOD] >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] >> KqpNewEngine::ReadRangeWithParams [GOOD] >> KqpNewEngine::ReadDifferentColumns ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] Test command err: 2025-03-31T15:56:35.224178Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:35.224238Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:35.224269Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:56:35.224685Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:56:35.225351Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:56:35.239184Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:35.240234Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-31T15:56:35.241136Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-03-31T15:56:35.241570Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-03-31T15:56:35.241801Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2025-03-31T15:56:35.241964Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-31T15:56:35.242081Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T15:56:35.242117Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2025-03-31T15:56:35.242178Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-03-31T15:56:35.242201Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-03-31T15:56:35.243701Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:35.243752Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:35.243779Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:56:35.244050Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:56:35.245116Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:56:35.251043Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:35.253477Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 10 Compressed message data size: 30 2025-03-31T15:56:35.254528Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-03-31T15:56:35.254684Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-03-31T15:56:35.256839Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-03-31T15:56:35.257031Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-03-31T15:56:35.259408Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T15:56:35.259456Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-03-31T15:56:35.259495Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-03-31T15:56:35.259647Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Getting new event 2025-03-31T15:56:35.259686Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-03-31T15:56:35.259708Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-03-31T15:56:35.259729Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-03-31T15:56:35.259864Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 Getting new event 2025-03-31T15:56:35.259991Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-03-31T15:56:35.260036Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-03-31T15:56:35.260062Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-03-31T15:56:35.260149Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 Getting new event 2025-03-31T15:56:35.260182Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-03-31T15:56:35.260201Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-03-31T15:56:35.260222Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-03-31T15:56:35.260318Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 2025-03-31T15:56:35.288150Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:35.288190Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:35.288216Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:56:35.288679Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:56:35.289199Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:56:35.291786Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:35.292379Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 100 Compressed message data size: 91 2025-03-31T15:56:35.294025Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-03-31T15:56:35.294451Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-03-31T15:56:35.294813Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-03-31T15:56:35.295110Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-03-31T15:56:35.295246Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T15:56:35.295309Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-03-31T15:56:35.295420Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 2). Partition stream id: 1 Getting new event 2025-03-31T15:56:35.295458Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-03-31T15:56:35.295476Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-03-31T15:56:35.295543Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [2, 3). Partition stream id: 1 Getting new event 2025-03-31T15:56:35.295566Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-03-31T15:56:35.295596Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-03-31T15:56:35.295697Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 4). Partition stream id: 1 Getting new event 2025-03-31T15:56:35.295727Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-03-31T15:56:35.295746Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStream ... tream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 190 SeqNo: 231 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 191 SeqNo: 232 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 192 SeqNo: 233 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 193 SeqNo: 234 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 194 SeqNo: 235 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 195 SeqNo: 236 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 196 SeqNo: 237 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 197 SeqNo: 238 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 198 SeqNo: 239 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 199 SeqNo: 240 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 200 SeqNo: 241 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-03-31T15:56:45.354681Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 201). Partition stream id: 1 2025-03-31T15:56:45.548186Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-03-31T15:56:45.548231Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-03-31T15:56:45.548265Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:56:45.584446Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:56:45.608334Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:56:45.610603Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-03-31T15:56:45.620411Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 1000000 Compressed message data size: 3028 Post function Getting new event 2025-03-31T15:56:49.053361Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-10) 2025-03-31T15:56:49.184765Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T15:56:49.204698Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-03-31T15:56:49.485330Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-03-31T15:56:49.486224Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2025-03-31T15:56:49.631933Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2025-03-31T15:56:49.632989Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2025-03-31T15:56:49.682415Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (7-7) 2025-03-31T15:56:49.683820Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (8-8) 2025-03-31T15:56:49.761284Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (9-9) 2025-03-31T15:56:49.764660Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (10-10) 2025-03-31T15:56:49.764855Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 10, size 10000000 bytes 2025-03-31T15:56:49.765699Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 9 SeqNo: 50 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 51 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-03-31T15:56:49.790092Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 11). Partition stream id: 1 2025-03-31T15:56:49.794141Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:49.794171Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:49.794200Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:56:49.794550Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:56:49.795256Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:56:49.795770Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:49.796049Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-31T15:56:49.796479Z :DEBUG: [db] [sessionid] [cluster] Requesting status for partition stream id: 1 2025-03-31T15:56:49.797477Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:49.797494Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:49.797523Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:56:49.797798Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:56:49.798216Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:56:49.798371Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:49.799849Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:49.801037Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-31T15:56:49.801159Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T15:56:49.801201Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-03-31T15:56:49.801364Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDrop |67.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |67.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |67.2%| [LD] {RESULT} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut >> THiveTest::TestCreateTabletChangeToExternal [GOOD] >> THiveTest::TestExternalBoot >> KqpReturning::ReturningWorksIndexedUpsert-QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedReplace-QueryService |67.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |67.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |67.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection >> THiveTest::TestHiveBalancerHighUsageAndColumnShards [GOOD] >> THiveTest::TestHiveBalancerWithSpareNodes >> IndexBuildTest::WithFollowers >> TopicAutoscaling::Simple_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::Simple_PQv1 >> IndexBuildTest::ShadowDataNotAllowedByDefault >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_BeforeAutoscaleAwareSDK >> THiveTest::TestExternalBoot [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery >> StreamCreator::Basic [GOOD] >> KqpSort::ReverseEightShardOptimized [GOOD] >> KqpSort::PassLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestExternalBoot [GOOD] Test command err: 2025-03-31T15:54:06.523598Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-03-31T15:54:06.553836Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-31T15:54:06.555034Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-03-31T15:54:06.559753Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-03-31T15:54:06.568036Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-03-31T15:54:06.568620Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-03-31T15:54:06.573477Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:73:2076] ControllerId# 72057594037932033 2025-03-31T15:54:06.573692Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-31T15:54:06.574104Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-03-31T15:54:06.579630Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-03-31T15:54:06.719067Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-03-31T15:54:06.719280Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-03-31T15:54:06.753324Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:72:2075] Create Queue# [1:81:2081] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:06.753830Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:72:2075] Create Queue# [1:82:2082] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:06.754489Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:72:2075] Create Queue# [1:83:2083] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:06.756953Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:72:2075] Create Queue# [1:84:2084] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:06.757309Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:72:2075] Create Queue# [1:85:2085] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:06.758337Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:72:2075] Create Queue# [1:86:2086] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:06.758632Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:72:2075] Create Queue# [1:87:2087] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:06.758655Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-03-31T15:54:06.759070Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:73:2076] 2025-03-31T15:54:06.760006Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:73:2076] 2025-03-31T15:54:06.760565Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-03-31T15:54:06.760767Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-03-31T15:54:06.765134Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-03-31T15:54:06.765415Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-03-31T15:54:06.816197Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-31T15:54:06.816680Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-03-31T15:54:06.820855Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:96:2074] ControllerId# 72057594037932033 2025-03-31T15:54:06.820889Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-31T15:54:06.821116Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-03-31T15:54:06.821962Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-03-31T15:54:06.828220Z node 2 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-03-31T15:54:06.828446Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-03-31T15:54:06.846227Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:95:2073] Create Queue# [2:102:2078] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:06.847046Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:95:2073] Create Queue# [2:103:2079] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:06.852257Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:95:2073] Create Queue# [2:104:2080] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:06.852771Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:95:2073] Create Queue# [2:105:2081] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:06.853284Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:95:2073] Create Queue# [2:106:2082] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:06.853946Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:95:2073] Create Queue# [2:107:2083] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:06.854729Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:95:2073] Create Queue# [2:108:2084] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:06.854752Z node 2 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-03-31T15:54:06.854810Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [2:96:2074] 2025-03-31T15:54:06.855004Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [2:96:2074] 2025-03-31T15:54:06.855041Z node 2 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-03-31T15:54:06.855070Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-03-31T15:54:06.858093Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-03-31T15:54:06.858356Z node 3 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-03-31T15:54:06.947420Z node 3 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-31T15:54:06.947719Z node 3 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-03-31T15:54:06.962390Z node 3 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [3:114:2074] ControllerId# 72057594037932033 2025-03-31T15:54:06.962440Z node 3 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-31T15:54:06.962502Z node 3 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-03-31T15:54:06.963405Z node 3 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-03-31T15:54:06.977373Z node 3 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-03-31T15:54:06.977425Z node 3 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-03-31T15:54:06.997349Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:113:2073] Create Queue# [3:120:2078] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:06.998053Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:113:2073] Create Queue# [3:121:2079] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:06.998884Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:113:2073] Create Queue# [3:122:2080] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:06.999195Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:113:2073] Create Queue# [3:123:2081] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:06.999867Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:113:2073] Create Queue# [3:124:2082] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:07.000693Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:113:2073] Create Queue# [3:125:2083] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:07.001161Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:113:2073] Create Queue# [3:126:2084] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:07.001185Z node 3 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-03-31T15:54:07.001414Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [3:114:2074] 2025-03-31T15:54:07.001437Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [3:114:2074] 2025-03-31T15:54:07.001471Z node 3 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-03-31T15:54:07.001652Z node 3 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-03-31T15:54:07.002913Z node 3 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-03-31T15:54:07.012831Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-03-31T15:54:07.642318Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:73:2076] 2025-03-31T15:54:07.644160Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-03-31T15:54:07.644408Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-03-31T15:54:07.656421Z node 2 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-03-31T15:54:07.657127Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [2:96:2074] 2025-03-31T15:54:07.657338Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-03-31T15:54:07.657529Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-03-31T15:54:07.657779Z node 3 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-03-31T15:54:07.658193Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [3:114:2074] 2025-03-31T15:54:07.658556Z node 3 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-03-31T15:54:07.658574Z node 3 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-03-31T15:54:07.659502Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeLis ... d result local node, try to connect [28:317:2292] 2025-03-31T15:56:56.650865Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037932033]::SendEvent [28:317:2292] 2025-03-31T15:56:56.650937Z node 28 :PIPE_SERVER DEBUG: [72057594037932033] Accept Connect Originator# [28:317:2292] 2025-03-31T15:56:56.655848Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037932033] connected with status OK role: Leader [28:317:2292] 2025-03-31T15:56:56.655888Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037932033] send queued [28:317:2292] 2025-03-31T15:56:56.655913Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037932033] push event to server [28:317:2292] 2025-03-31T15:56:56.655962Z node 28 :PIPE_SERVER DEBUG: [72057594037932033] HandleSend Sender# [28:283:2269] EventType# 268637702 2025-03-31T15:56:56.656472Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} queued, type NKikimr::NBsController::TBlobStorageController::TTxSelectGroups 2025-03-31T15:56:56.657621Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T15:56:56.663270Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} hope 1 -> done Change{20, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-31T15:56:56.664053Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T15:56:56.665398Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2025-03-31T15:56:56.666242Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T15:56:56.673242Z node 28 :HIVE NOTICE: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{88923004475424}(72075186224037888)::Execute - TryToBoot was not successfull 2025-03-31T15:56:56.674436Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{5, redo 698b alter 0b annex 0, ~{ 2, 1, 3 } -{ }, 0 gb} 2025-03-31T15:56:56.679359Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T15:56:56.692228Z node 28 :BS_PROXY_PUT INFO: [49bb8b081a887568] bootstrap ActorId# [28:320:2295] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:4:0:0:693:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-03-31T15:56:56.692336Z node 28 :BS_PROXY_PUT DEBUG: [49bb8b081a887568] Id# [72057594037927937:2:4:0:0:693:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-31T15:56:56.692375Z node 28 :BS_PROXY_PUT DEBUG: [49bb8b081a887568] restore Id# [72057594037927937:2:4:0:0:693:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-03-31T15:56:56.692421Z node 28 :BS_PROXY_PUT DEBUG: [49bb8b081a887568] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:4:0:0:693:1] Marker# BPG33 2025-03-31T15:56:56.692454Z node 28 :BS_PROXY_PUT DEBUG: [49bb8b081a887568] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:4:0:0:693:1] Marker# BPG32 2025-03-31T15:56:56.692551Z node 28 :BS_PROXY DEBUG: Send to queueActorId# [28:35:2079] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:4:0:0:693:1] FDS# 693 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T15:56:56.703876Z node 28 :BS_PROXY_PUT DEBUG: [49bb8b081a887568] received {EvVPutResult Status# OK ID# [72057594037927937:2:4:0:0:693:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 19 } Cost# 85456 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 20 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-03-31T15:56:56.704340Z node 28 :BS_PROXY_PUT DEBUG: [49bb8b081a887568] Result# TEvPutResult {Id# [72057594037927937:2:4:0:0:693:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-03-31T15:56:56.704392Z node 28 :BS_PROXY_PUT INFO: [49bb8b081a887568] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:4:0:0:693:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-03-31T15:56:56.704493Z node 28 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.468 sample PartId# [72057594037927937:2:4:0:0:693:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 28 } TEvVPutResult{ TimestampMs# 11.826 VDiskId# [0:1:0:0:0] NodeId# 28 Status# OK } ] } 2025-03-31T15:56:56.704597Z node 28 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:4:0:0:693:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-03-31T15:56:56.704707Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} commited cookie 1 for step 4 2025-03-31T15:56:56.705688Z node 28 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-03-31T15:56:56.705773Z node 28 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-03-31T15:56:56.705811Z node 28 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-03-31T15:56:56.705859Z node 28 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-03-31T15:56:56.705892Z node 28 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-03-31T15:56:56.705933Z node 28 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-03-31T15:56:56.705976Z node 28 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-03-31T15:56:56.707423Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [28:324:2298] 2025-03-31T15:56:56.707485Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [28:324:2298] 2025-03-31T15:56:56.707831Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [28:324:2298] 2025-03-31T15:56:56.707917Z node 28 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-03-31T15:56:56.707981Z node 28 :TABLET_RESOLVER DEBUG: SelectForward node 28 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [28:276:2265] 2025-03-31T15:56:56.708706Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] received pending shutdown [28:324:2298] 2025-03-31T15:56:56.709107Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result local node, try to connect [28:324:2298] 2025-03-31T15:56:56.709511Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [28:324:2298] 2025-03-31T15:56:56.710786Z node 28 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [28:324:2298] 2025-03-31T15:56:56.714555Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [28:324:2298] 2025-03-31T15:56:56.714988Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [28:324:2298] 2025-03-31T15:56:56.715615Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [28:324:2298] 2025-03-31T15:56:56.715990Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] shutdown pipe due to pending shutdown request [28:324:2298] 2025-03-31T15:56:56.716475Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [28:324:2298] 2025-03-31T15:56:56.716909Z node 28 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [28:323:2297] EventType# 268697624 2025-03-31T15:56:56.718044Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{6, NKikimr::NHive::TTxStartTablet} queued, type NKikimr::NHive::TTxStartTablet 2025-03-31T15:56:56.718694Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{6, NKikimr::NHive::TTxStartTablet} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T15:56:56.728239Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{6, NKikimr::NHive::TTxStartTablet} hope 1 -> done Change{6, redo 83b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2025-03-31T15:56:56.728983Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{6, NKikimr::NHive::TTxStartTablet} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T15:56:56.743904Z node 28 :BS_PROXY_PUT INFO: [6173685a7ad4b3c4] bootstrap ActorId# [28:327:2301] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:5:0:0:92:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-03-31T15:56:56.744068Z node 28 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] Id# [72057594037927937:2:5:0:0:92:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-31T15:56:56.744138Z node 28 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] restore Id# [72057594037927937:2:5:0:0:92:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-03-31T15:56:56.744222Z node 28 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:5:0:0:92:1] Marker# BPG33 2025-03-31T15:56:56.744284Z node 28 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:5:0:0:92:1] Marker# BPG32 2025-03-31T15:56:56.744473Z node 28 :BS_PROXY DEBUG: Send to queueActorId# [28:35:2079] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:5:0:0:92:1] FDS# 92 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T15:56:56.751343Z node 28 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] received {EvVPutResult Status# OK ID# [72057594037927937:2:5:0:0:92:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 20 } Cost# 80724 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 21 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-03-31T15:56:56.752799Z node 28 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] Result# TEvPutResult {Id# [72057594037927937:2:5:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-03-31T15:56:56.753499Z node 28 :BS_PROXY_PUT INFO: [6173685a7ad4b3c4] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:5:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-03-31T15:56:56.754467Z node 28 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.798 sample PartId# [72057594037927937:2:5:0:0:92:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 28 } TEvVPutResult{ TimestampMs# 7.683 VDiskId# [0:1:0:0:0] NodeId# 28 Status# OK } ] } 2025-03-31T15:56:56.759401Z node 28 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:5:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-03-31T15:56:56.759967Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:6} commited cookie 1 for step 5 >> TopicAutoscaling::PartitionMerge_PreferedPartition_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionMerge_PreferedPartition_AutoscaleAwareSDK >> KqpNotNullColumns::InsertNotNullPg+useSink [GOOD] >> KqpNotNullColumns::InsertNotNullPg-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> StreamCreator::Basic [GOOD] Test command err: 2025-03-31T15:56:45.235067Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003201156329188:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:56:45.268101Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00103a/r3tmp/tmpGE6w5Y/pdisk_1.dat 2025-03-31T15:56:48.957551Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:56:49.804680Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:56:49.847929Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:56:49.848030Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:56:49.852737Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9048 TServer::EnableGrpc on GrpcPort 9169, node 1 2025-03-31T15:56:50.159240Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003201156329188:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:56:50.159326Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:56:50.263762Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:56:50.263789Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:56:50.263795Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:56:50.263917Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9048 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:56:50.867452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:56:50.892593Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T15:56:50.898889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1743436611042 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743436610930 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1743436611042 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" PathId: 1844... (TRUNCATED) 2025-03-31T15:56:51.153397Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T15:56:51.153505Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T15:56:51.153521Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-03-31T15:56:51.154197Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-03-31T15:56:58.255785Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743436611042, tx_id: 281474976715658 } } } 2025-03-31T15:56:58.258139Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-03-31T15:56:58.278259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T15:56:58.287795Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715659} 2025-03-31T15:56:58.287834Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976715659 2025-03-31T15:56:58.483717Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715659 2025-03-31T15:56:58.484024Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] 2025-03-31T15:56:58.488776Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::NController::TEvPrivate::TEvAllowCreateStream 2025-03-31T15:56:59.199518Z node 1 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037888:1][1:7488003261285872154:2355] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:5:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-03-31T15:56:59.402894Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTableResponse { Result: { status: SUCCESS, issues: } } 2025-03-31T15:56:59.402922Z node 1 :REPLICATION_CONTROLLER INFO: [StreamCreator][rid 1][tid 1] Success: issues# 2025-03-31T15:56:59.450716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715662:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/Table 2025-03-31T15:56:59.484186Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTopicResponse { Result: { status: SUCCESS, issues: } } 2025-03-31T15:56:59.484216Z node 1 :REPLICATION_CONTROLLER INFO: [StreamCreator][rid 1][tid 1] Success: issues# TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1743436611042 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyC... (TRUNCATED) >> IndexBuildTest::ShadowDataNotAllowedByDefault [GOOD] >> IndexBuildTest::ShadowDataEdgeCases >> KqpNewEngine::LocksEffects [GOOD] >> KqpNewEngine::LeftSemiJoin |67.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |67.2%| [LD] {RESULT} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |67.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut >> KqpSqlIn::CantRewrite [GOOD] >> KqpSqlIn::ComplexKey >> IndexBuildTest::WithFollowers [GOOD] >> TTxDataShardMiniKQL::CrossShard_5_AllToAll [GOOD] >> TTxDataShardMiniKQL::CrossShard_6_Local >> KqpNewEngine::ReadDifferentColumns [GOOD] >> KqpNewEngine::ReadDifferentColumnsPk >> TopicAutoscaling::PartitionSplit_PreferedPartition_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_PreferedPartition_AutoscaleAwareSDK >> KqpNotNullColumns::UpdateTable_DontChangeNotNullWithIndex [GOOD] >> KqpNotNullColumns::UpdateTable_UniqIndex >> KqpNewEngine::UpdateFromParams [GOOD] >> KqpNewEngine::UpsertEmptyInput ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::WithFollowers [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:57:01.953289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:57:01.953400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:57:01.953442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:57:01.953477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:57:01.953523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:57:01.953571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:57:01.953637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:57:01.953720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:57:01.954115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:57:02.512218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:57:02.512293Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:57:02.597193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:57:02.597604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:57:02.599054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:57:02.688821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:57:02.690896Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:57:02.709596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:57:02.710816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:57:02.725103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:57:02.745832Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:57:02.746597Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:57:02.755803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:57:02.756482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:57:02.756952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:57:02.763122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:57:02.788225Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:57:02.982424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:57:02.982686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:02.982874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:57:02.985083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:57:02.985211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:02.988142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:57:02.988426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:57:02.988669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:02.988734Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:57:02.988771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:57:02.988805Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:57:02.991691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:02.991781Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:57:02.991824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:57:02.994999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:02.995097Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:02.995171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:57:02.995227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:57:02.999280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:57:03.001623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:57:03.001832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:57:03.002941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:57:03.003111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:57:03.003188Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:57:03.003531Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:57:03.003589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:57:03.003775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:57:03.003887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:57:03.007278Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:57:03.007332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:57:03.007550Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:57:03.007598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:57:03.007841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:03.007886Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:57:03.007985Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:57:03.008032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:57:03.008076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:57:03.008109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:57:03.008145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:57:03.008185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:57:03.008236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:57:03.008267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:57:03.008356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:57:03.008393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:57:03.008424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:57:03.010463Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:57:03.010580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:57:03.010633Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 31T15:57:04.756290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 2/3 2025-03-31T15:57:04.756986Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:1 progress is 2/3 2025-03-31T15:57:04.757391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 2/3 2025-03-31T15:57:04.757436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: false 2025-03-31T15:57:04.761976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T15:57:04.762585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T15:57:04.762613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-03-31T15:57:04.762645Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-03-31T15:57:04.762897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-31T15:57:04.771760Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T15:57:04.772082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T15:57:04.772359Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-03-31T15:57:04.772695Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-03-31T15:57:04.773024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:57:04.779519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T15:57:04.780000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T15:57:04.780044Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-03-31T15:57:04.780078Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2025-03-31T15:57:04.780437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-31T15:57:04.783574Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T15:57:04.784305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T15:57:04.784333Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-03-31T15:57:04.788169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:2, at schemeshard: 72057594046678944 2025-03-31T15:57:04.788231Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 104:2 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:57:04.794959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-03-31T15:57:04.796513Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:2 progress is 3/3 2025-03-31T15:57:04.796561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-03-31T15:57:04.796593Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:2 progress is 3/3 2025-03-31T15:57:04.796867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-03-31T15:57:04.797171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 3/3, is published: false 2025-03-31T15:57:04.797202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-03-31T15:57:04.797571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-03-31T15:57:04.797893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-03-31T15:57:04.798939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T15:57:04.799330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:1 2025-03-31T15:57:04.799655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:1 2025-03-31T15:57:04.799987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T15:57:04.800011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:2 2025-03-31T15:57:04.800027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:2 2025-03-31T15:57:04.800062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-31T15:57:04.800392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 1, subscribers: 1 2025-03-31T15:57:04.800727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-03-31T15:57:04.805133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T15:57:04.806676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T15:57:04.806711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-03-31T15:57:04.808055Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-03-31T15:57:04.808907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-03-31T15:57:04.809789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 1 2025-03-31T15:57:04.810546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:333:2312] 2025-03-31T15:57:04.828589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-31T15:57:04.837796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-31T15:57:04.837928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-31T15:57:04.837958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-31T15:57:04.855791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-31T15:57:04.856217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-03-31T15:57:04.856553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:702:2660] TestWaitNotification: OK eventTxId 104 2025-03-31T15:57:04.862680Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/WithFollowers" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:57:04.869625Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/WithFollowers" took 3.81ms result status StatusSuccess 2025-03-31T15:57:04.871997Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/WithFollowers" PathDescription { Self { Name: "WithFollowers" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 TableSchemaVersion: 4 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "WithFollowers" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "valueFloat" Type: "Float" TypeId: 33 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TopicAutoscaling::ControlPlane_CDC_Enable [GOOD] >> TopicAutoscaling::ControlPlane_CDC_Disable >> TopicAutoscaling::ControlPlane_AutoscalingWithStorageSizeRetention [GOOD] >> TopicAutoscaling::CDC_PartitionSplit_AutosplitByLoad >> THiveTest::TestHiveBalancerWithSpareNodes [GOOD] >> IndexBuildTest::ShadowDataEdgeCases [GOOD] >> KqpScanArrowFormat::AllTypesColumns [GOOD] >> KqpScanArrowFormat::AllTypesColumnsCellvec >> RetryPolicy::TWriteSession_TestPolicy [GOOD] >> RetryPolicy::TWriteSession_TestBrokenPolicy >> TSchemeshardCompactionQueueTest::EnqueueEmptyShard [GOOD] >> TSchemeshardCompactionQueueTest::EnqueueSinglePartedShard [GOOD] >> TSchemeshardCompactionQueueTest::EnqueueSinglePartedShardWhenEnabled [GOOD] >> Balancing::Balancing_OneTopic_PQv1 [FAIL] >> Balancing::Balancing_ManyTopics_TopicApi >> TopicAutoscaling::ReBalancingAfterSplit_sessionsWithPartition [GOOD] >> TopicAutoscaling::ReadFromTimestamp_BeforeAutoscaleAwareSDK ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestHiveBalancerWithSpareNodes [GOOD] Test command err: 2025-03-31T15:54:07.910105Z node 6 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-03-31T15:54:07.969442Z node 6 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-31T15:54:07.970249Z node 6 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-03-31T15:54:07.975013Z node 6 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [6:149:2076] ControllerId# 72057594037932033 2025-03-31T15:54:07.975243Z node 6 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-31T15:54:07.975824Z node 6 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-03-31T15:54:07.979995Z node 6 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-03-31T15:54:07.985913Z node 3 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-03-31T15:54:08.035893Z node 3 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-31T15:54:08.036365Z node 3 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-03-31T15:54:08.042176Z node 3 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [3:158:2076] ControllerId# 72057594037932033 2025-03-31T15:54:08.042409Z node 3 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-31T15:54:08.042475Z node 3 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-03-31T15:54:08.043301Z node 3 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-03-31T15:54:08.049119Z node 3 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-03-31T15:54:08.049968Z node 3 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-03-31T15:54:08.066252Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:157:2075] Create Queue# [3:164:2080] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.067184Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:157:2075] Create Queue# [3:165:2081] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.067782Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:157:2075] Create Queue# [3:166:2082] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.068058Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:157:2075] Create Queue# [3:167:2083] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.069050Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:157:2075] Create Queue# [3:168:2084] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.069399Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:157:2075] Create Queue# [3:169:2085] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.070222Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:157:2075] Create Queue# [3:170:2086] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.070250Z node 3 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-03-31T15:54:08.070497Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [3:158:2076] 2025-03-31T15:54:08.070701Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [3:158:2076] 2025-03-31T15:54:08.070918Z node 3 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-03-31T15:54:08.071128Z node 3 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-03-31T15:54:08.076766Z node 3 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-03-31T15:54:08.077252Z node 4 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-03-31T15:54:08.126029Z node 4 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-31T15:54:08.126135Z node 4 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-03-31T15:54:08.129781Z node 4 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [4:178:2077] ControllerId# 72057594037932033 2025-03-31T15:54:08.130038Z node 4 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-31T15:54:08.130446Z node 4 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-03-31T15:54:08.131564Z node 4 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-03-31T15:54:08.137669Z node 4 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-03-31T15:54:08.137890Z node 4 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-03-31T15:54:08.146977Z node 4 :BS_PROXY DEBUG: Group# 0 Actor# [4:177:2076] Create Queue# [4:184:2081] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.147365Z node 4 :BS_PROXY DEBUG: Group# 0 Actor# [4:177:2076] Create Queue# [4:185:2082] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.147645Z node 4 :BS_PROXY DEBUG: Group# 0 Actor# [4:177:2076] Create Queue# [4:186:2083] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.148011Z node 4 :BS_PROXY DEBUG: Group# 0 Actor# [4:177:2076] Create Queue# [4:187:2084] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.148509Z node 4 :BS_PROXY DEBUG: Group# 0 Actor# [4:177:2076] Create Queue# [4:188:2085] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.149175Z node 4 :BS_PROXY DEBUG: Group# 0 Actor# [4:177:2076] Create Queue# [4:189:2086] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.149884Z node 4 :BS_PROXY DEBUG: Group# 0 Actor# [4:177:2076] Create Queue# [4:190:2087] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.149909Z node 4 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-03-31T15:54:08.150108Z node 4 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [4:178:2077] 2025-03-31T15:54:08.150134Z node 4 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [4:178:2077] 2025-03-31T15:54:08.150168Z node 4 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-03-31T15:54:08.150201Z node 4 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-03-31T15:54:08.152174Z node 4 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-03-31T15:54:08.152411Z node 5 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-03-31T15:54:08.192553Z node 5 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-31T15:54:08.192726Z node 5 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-03-31T15:54:08.194614Z node 5 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [5:198:2077] ControllerId# 72057594037932033 2025-03-31T15:54:08.194647Z node 5 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-31T15:54:08.194865Z node 5 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-03-31T15:54:08.195177Z node 5 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-03-31T15:54:08.198868Z node 5 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-03-31T15:54:08.198911Z node 5 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-03-31T15:54:08.204107Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:197:2076] Create Queue# [5:204:2081] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.204454Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:197:2076] Create Queue# [5:205:2082] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.204863Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:197:2076] Create Queue# [5:206:2083] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.205161Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:197:2076] Create Queue# [5:207:2084] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.205635Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:197:2076] Create Queue# [5:208:2085] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.205920Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:197:2076] Create Queue# [5:209:2086] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.206375Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:197:2076] Create Queue# [5:210:2087] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.206399Z node 5 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-03-31T15:54:08.206454Z node 5 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [5:198:2077] 2025-03-31T15:54:08.207227Z node 5 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [5:198:2077] 2025-03-31T15:54:08.207278Z node 5 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-03-31T15:54:08.207313Z node 5 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-03-31T15:54:08.208448Z node 5 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-03-31T15:54:08.242449Z node 6 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-03-31T15:54:08.242511Z node 6 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-03-31T15:54:08.251242Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:148:2075] Create Queue# [6:215:2081] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.251821Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:148:2075] Create Queue# [6:216:2082] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.252431Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:148:2075] Create Queue# [6:217:2083] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.253043Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:148:2075] Create Queue# [6:218:2084] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.253673Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:148:2075] Create Queue# [6:219:2085] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.254546Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:148:2075] Create Queue# [6:220:2086] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.255650Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:148:2075] Create Queue# [6:221:20 ... 247Z node 56 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037892 CurrentLeader: [61:1939:2264] CurrentLeaderTablet: [61:1945:2267] CurrentGeneration: 3 CurrentStep: 0} 2025-03-31T15:57:07.787938Z node 56 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037892 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037892 Cookie: 0 CurrentLeader: [61:1939:2264] CurrentLeaderTablet: [61:1945:2267] CurrentGeneration: 3 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {7, 10, 0}} 2025-03-31T15:57:07.787971Z node 56 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037892 followers: 0 2025-03-31T15:57:07.788012Z node 56 :TABLET_RESOLVER DEBUG: SelectForward node 56 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037892 followers: 0 countLeader 1 allowFollowers 0 winner: [61:1939:2264] 2025-03-31T15:57:07.788104Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037892] forward result remote node 61 [56:2068:2742] 2025-03-31T15:57:07.788199Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037892] remote node connected [56:2068:2742] 2025-03-31T15:57:07.788551Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037892]::SendEvent [56:2068:2742] 2025-03-31T15:57:07.790955Z node 61 :PIPE_SERVER DEBUG: [72075186224037892] Accept Connect Originator# [56:2068:2742] 2025-03-31T15:57:07.795625Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037892] connected with status OK role: Leader [56:2068:2742] 2025-03-31T15:57:07.795671Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037892] send queued [56:2068:2742] 2025-03-31T15:57:07.806281Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037893] ::Bootstrap [56:2072:2744] 2025-03-31T15:57:07.806335Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037893] lookup [56:2072:2744] 2025-03-31T15:57:07.806732Z node 56 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037893 entry.State: StNormal ev: {EvForward TabletID: 72075186224037893 Ev: nullptr Flags: 1:2:0} 2025-03-31T15:57:07.807135Z node 56 :TABLET_RESOLVER DEBUG: SelectForward node 56 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037893 followers: 0 countLeader 1 allowFollowers 0 winner: [61:1287:2098] 2025-03-31T15:57:07.807570Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037893] forward result remote node 61 [56:2072:2744] 2025-03-31T15:57:07.807993Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037893] remote node connected [56:2072:2744] 2025-03-31T15:57:07.808029Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037893]::SendEvent [56:2072:2744] 2025-03-31T15:57:07.808547Z node 61 :PIPE_SERVER DEBUG: [72075186224037893] Accept Connect Originator# [56:2072:2744] 2025-03-31T15:57:07.809521Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037893] connected with status OK role: Leader [56:2072:2744] 2025-03-31T15:57:07.809558Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037893] send queued [56:2072:2744] 2025-03-31T15:57:07.818773Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037894] ::Bootstrap [56:2075:2746] 2025-03-31T15:57:07.818825Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037894] lookup [56:2075:2746] 2025-03-31T15:57:07.819267Z node 56 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037894 entry.State: StNormal ev: {EvForward TabletID: 72075186224037894 Ev: nullptr Flags: 1:2:0} 2025-03-31T15:57:07.819314Z node 56 :TABLET_RESOLVER DEBUG: SelectForward node 56 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037894 followers: 0 countLeader 1 allowFollowers 0 winner: [60:1292:2099] 2025-03-31T15:57:07.819391Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037894] forward result remote node 60 [56:2075:2746] 2025-03-31T15:57:07.819489Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037894] remote node connected [56:2075:2746] 2025-03-31T15:57:07.819534Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037894]::SendEvent [56:2075:2746] 2025-03-31T15:57:07.821674Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037894] connect request undelivered [56:2075:2746] 2025-03-31T15:57:07.822671Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037894] immediate retry [56:2075:2746] 2025-03-31T15:57:07.822707Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037894] lookup [56:2075:2746] 2025-03-31T15:57:07.822762Z node 56 :TABLET_RESOLVER DEBUG: Handle TEvTabletProblem tabletId: 72075186224037894 entry.State: StNormal 2025-03-31T15:57:07.823342Z node 56 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037894 entry.State: StProblemResolve ev: {EvForward TabletID: 72075186224037894 Ev: nullptr Flags: 1:2:0} 2025-03-31T15:57:07.823422Z node 56 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037894 Cookie: 0 ProxyOptions: SigNone} 2025-03-31T15:57:07.823837Z node 56 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 0} 2025-03-31T15:57:07.823886Z node 56 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 1} 2025-03-31T15:57:07.824249Z node 56 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 2} 2025-03-31T15:57:07.824295Z node 56 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037894 CurrentLeader: [61:1940:2265] CurrentLeaderTablet: [61:1946:2268] CurrentGeneration: 3 CurrentStep: 0} 2025-03-31T15:57:07.824691Z node 56 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037894 CurrentLeader: [61:1940:2265] CurrentLeaderTablet: [61:1946:2268] CurrentGeneration: 3 CurrentStep: 0} 2025-03-31T15:57:07.824757Z node 56 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037894 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037894 Cookie: 0 CurrentLeader: [61:1940:2265] CurrentLeaderTablet: [61:1946:2268] CurrentGeneration: 3 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {7, 10, 0}} 2025-03-31T15:57:07.824786Z node 56 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037894 followers: 0 2025-03-31T15:57:07.824827Z node 56 :TABLET_RESOLVER DEBUG: SelectForward node 56 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037894 followers: 0 countLeader 1 allowFollowers 0 winner: [61:1940:2265] 2025-03-31T15:57:07.824906Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037894] forward result remote node 61 [56:2075:2746] 2025-03-31T15:57:07.825007Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037894] remote node connected [56:2075:2746] 2025-03-31T15:57:07.825387Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037894]::SendEvent [56:2075:2746] 2025-03-31T15:57:07.825975Z node 61 :PIPE_SERVER DEBUG: [72075186224037894] Accept Connect Originator# [56:2075:2746] 2025-03-31T15:57:07.828699Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037894] connected with status OK role: Leader [56:2075:2746] 2025-03-31T15:57:07.828746Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037894] send queued [56:2075:2746] 2025-03-31T15:57:07.843798Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037895] ::Bootstrap [56:2079:2748] 2025-03-31T15:57:07.843862Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037895] lookup [56:2079:2748] 2025-03-31T15:57:07.843937Z node 56 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037895 entry.State: StNormal ev: {EvForward TabletID: 72075186224037895 Ev: nullptr Flags: 1:2:0} 2025-03-31T15:57:07.843982Z node 56 :TABLET_RESOLVER DEBUG: SelectForward node 56 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037895 followers: 0 countLeader 1 allowFollowers 0 winner: [61:1785:2193] 2025-03-31T15:57:07.844085Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037895] forward result remote node 61 [56:2079:2748] 2025-03-31T15:57:07.844169Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037895] remote node connected [56:2079:2748] 2025-03-31T15:57:07.844210Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037895]::SendEvent [56:2079:2748] 2025-03-31T15:57:07.845147Z node 61 :PIPE_SERVER DEBUG: [72075186224037895] Accept Connect Originator# [56:2079:2748] 2025-03-31T15:57:07.845758Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037895] connected with status OK role: Leader [56:2079:2748] 2025-03-31T15:57:07.845791Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037895] send queued [56:2079:2748] 2025-03-31T15:57:07.856798Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037896] ::Bootstrap [56:2082:2750] 2025-03-31T15:57:07.856853Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037896] lookup [56:2082:2750] 2025-03-31T15:57:07.856917Z node 56 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037896 entry.State: StNormal ev: {EvForward TabletID: 72075186224037896 Ev: nullptr Flags: 1:2:0} 2025-03-31T15:57:07.856959Z node 56 :TABLET_RESOLVER DEBUG: SelectForward node 56 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037896 followers: 0 countLeader 1 allowFollowers 0 winner: [61:1788:2195] 2025-03-31T15:57:07.857025Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037896] forward result remote node 61 [56:2082:2750] 2025-03-31T15:57:07.857104Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037896] remote node connected [56:2082:2750] 2025-03-31T15:57:07.857449Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037896]::SendEvent [56:2082:2750] 2025-03-31T15:57:07.858331Z node 61 :PIPE_SERVER DEBUG: [72075186224037896] Accept Connect Originator# [56:2082:2750] 2025-03-31T15:57:07.860411Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037896] connected with status OK role: Leader [56:2082:2750] 2025-03-31T15:57:07.860453Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037896] send queued [56:2082:2750] 2025-03-31T15:57:07.868713Z node 56 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [56:2084:2751] 2025-03-31T15:57:07.869141Z node 56 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [56:2084:2751] 2025-03-31T15:57:07.869601Z node 56 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-03-31T15:57:07.871156Z node 56 :TABLET_RESOLVER DEBUG: SelectForward node 56 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [56:589:2274] 2025-03-31T15:57:07.872054Z node 56 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [56:2084:2751] 2025-03-31T15:57:07.872445Z node 56 :PIPE_CLIENT DEBUG: TClient[72057594037927937] received pending shutdown [56:2084:2751] 2025-03-31T15:57:07.873196Z node 56 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result local node, try to connect [56:2084:2751] 2025-03-31T15:57:07.873582Z node 56 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [56:2084:2751] 2025-03-31T15:57:07.874791Z node 56 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [56:2084:2751] 2025-03-31T15:57:07.875025Z node 56 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [56:2084:2751] 2025-03-31T15:57:07.875077Z node 56 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [56:2084:2751] 2025-03-31T15:57:07.875590Z node 56 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [56:2084:2751] 2025-03-31T15:57:07.876007Z node 56 :PIPE_CLIENT DEBUG: TClient[72057594037927937] shutdown pipe due to pending shutdown request [56:2084:2751] 2025-03-31T15:57:07.876435Z node 56 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [56:2084:2751] 2025-03-31T15:57:07.876873Z node 56 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [56:561:2269] EventType# 268697616 >> IndexBuildTest::Lock ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::ShadowDataEdgeCases [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:57:01.217086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:57:01.217186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:57:01.217220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:57:01.217251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:57:01.217296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:57:01.217334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:57:01.217386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:57:01.217470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:57:01.217829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:57:01.827733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:57:01.828150Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:57:01.840928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:57:01.841323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:57:01.842385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:57:01.871966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:57:01.872226Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:57:01.872887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:57:01.873117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:57:01.875319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:57:01.876520Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:57:01.876589Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:57:01.876749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:57:01.876809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:57:01.876845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:57:01.876980Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:57:01.892709Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:57:02.290536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:57:02.290762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:02.290976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:57:02.291195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:57:02.291246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:02.301991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:57:02.304109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:57:02.306906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:02.308511Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:57:02.308818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:57:02.309123Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:57:02.327897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:02.328264Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:57:02.328809Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:57:02.350231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:02.350312Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:02.350993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:57:02.352043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:57:02.381806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:57:02.402247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:57:02.404285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:57:02.435048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:57:02.436619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:57:02.437390Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:57:02.448522Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:57:02.448945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:57:02.451215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:57:02.460307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:57:02.490299Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:57:02.498642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:57:02.500210Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:57:02.500270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:57:02.501878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:02.502300Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:57:02.519709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:57:02.520165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:57:02.520874Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:57:02.521278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:57:02.521990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:57:02.522636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:57:02.522951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:57:02.527358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:57:02.528405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:57:02.528757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:57:02.529417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:57:02.558547Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:57:02.564665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:57:02.565068Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... TxId: 109, tablet: 72075186233409548, partId: 0 2025-03-31T15:57:11.731674Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 109:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: PREPARED TxId: 109 MinStep: 5000008 MaxStep: 18446744073709551615 PrepareArriveTime: 163500 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 6642 } } 2025-03-31T15:57:11.733889Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TConfigureParts operationId# 109:0 HandleReply TEvProposeTransactionResult, at schemeshard: 72057594046678944 message# TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: PREPARED TxId: 109 MinStep: 5000008 MaxStep: 18446744073709551615 PrepareArriveTime: 163500 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 6642 } } 2025-03-31T15:57:11.734311Z node 2 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-03-31T15:57:11.744461Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409548, shardIdx: 72057594046678944:3, operationId: 109:0, left await: 0, at schemeshard: 72057594046678944 2025-03-31T15:57:11.744930Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 109:0 3 -> 128 2025-03-31T15:57:11.770861Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 109:0, at schemeshard: 72057594046678944 2025-03-31T15:57:11.772523Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 109:0, at schemeshard: 72057594046678944 2025-03-31T15:57:11.773234Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 109:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:57:11.774139Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 109 ready parts: 1/1 2025-03-31T15:57:11.779442Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409548 Flags: 2 } ExecLevel: 0 TxId: 109 MinStep: 5000008 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:57:11.805191Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 109:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:109 msg type: 269090816 2025-03-31T15:57:11.806230Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 109, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 109 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 109 at step: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72075186233409548 for txId: 109 at step: 5000008 2025-03-31T15:57:11.815736Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000008, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:57:11.816881Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 109 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 8589936749 } } Step: 5000008 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:57:11.817249Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 109:0 HandleReply TEvOperationPlan, operationId: 109:0, stepId: 5000008, at schemeshard: 72057594046678944 2025-03-31T15:57:11.822108Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 109:0 128 -> 129 2025-03-31T15:57:11.823363Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000008 2025-03-31T15:57:11.877629Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:57:11.878048Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 109, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-03-31T15:57:11.879853Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:57:11.880492Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 109, path id: 4 2025-03-31T15:57:11.881860Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 109:0, at schemeshard: 72057594046678944 2025-03-31T15:57:11.882627Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 109:0 ProgressState at tablet: 72057594046678944 2025-03-31T15:57:11.890568Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 109 2025-03-31T15:57:11.890703Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 109 2025-03-31T15:57:11.890743Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 109 2025-03-31T15:57:11.890788Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 109, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2025-03-31T15:57:11.890832Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-03-31T15:57:11.890917Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 109, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 109 2025-03-31T15:57:11.969824Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 109 2025-03-31T15:57:11.996416Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 109 Step: 5000008 OrderId: 109 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 23244 } } 2025-03-31T15:57:11.996842Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 109, tablet: 72075186233409548, partId: 0 2025-03-31T15:57:11.997382Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 109:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 109 Step: 5000008 OrderId: 109 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 23244 } } 2025-03-31T15:57:12.004101Z node 2 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 109 Step: 5000008 OrderId: 109 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 23244 } } 2025-03-31T15:57:12.025458Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 679 RawX2: 8589937225 } Origin: 72075186233409548 State: 2 TxId: 109 Step: 0 Generation: 2 2025-03-31T15:57:12.025840Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 109, tablet: 72075186233409548, partId: 0 2025-03-31T15:57:12.026028Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 109:0, at schemeshard: 72057594046678944, message: Source { RawX1: 679 RawX2: 8589937225 } Origin: 72075186233409548 State: 2 TxId: 109 Step: 0 Generation: 2 2025-03-31T15:57:12.026088Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 109:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-31T15:57:12.026176Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 109:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 679 RawX2: 8589937225 } Origin: 72075186233409548 State: 2 TxId: 109 Step: 0 Generation: 2 2025-03-31T15:57:12.026245Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 109:0, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T15:57:12.026291Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 109:0, at schemeshard: 72057594046678944 2025-03-31T15:57:12.026337Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 109:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-03-31T15:57:12.026373Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 109:0 129 -> 240 2025-03-31T15:57:12.033013Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 109:0, at schemeshard: 72057594046678944 2025-03-31T15:57:12.033441Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 109:0, at schemeshard: 72057594046678944 2025-03-31T15:57:12.033745Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 109:0, at schemeshard: 72057594046678944 2025-03-31T15:57:12.033785Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 109:0 ProgressState 2025-03-31T15:57:12.034440Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#109:0 progress is 1/1 2025-03-31T15:57:12.034490Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 109 ready parts: 1/1 2025-03-31T15:57:12.034533Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#109:0 progress is 1/1 2025-03-31T15:57:12.034570Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 109 ready parts: 1/1 2025-03-31T15:57:12.034606Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 109, ready parts: 1/1, is published: true 2025-03-31T15:57:12.034690Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:335:2314] message: TxId: 109 2025-03-31T15:57:12.034734Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 109 ready parts: 1/1 2025-03-31T15:57:12.034768Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 109:0 2025-03-31T15:57:12.034797Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 109:0 2025-03-31T15:57:12.034912Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-31T15:57:12.042066Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2025-03-31T15:57:12.042405Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [2:788:2733] TestWaitNotification: OK eventTxId 109 |67.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::EnqueueSinglePartedShardWhenEnabled [GOOD] |67.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |67.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |67.3%| [LD] {RESULT} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |67.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |67.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |67.3%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut >> KqpScanArrowFormat::SingleKey [GOOD] >> KqpScanArrowFormat::JoinWithParams |67.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |67.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |67.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK_AutoCommit [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PQv1 >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions [GOOD] >> PersQueueSdkReadSessionTest::SettingsValidation >> TopicAutoscaling::PartitionSplit_ManySession_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_PQv1 >> KqpReturning::ReturningWorksIndexedReplace-QueryService [GOOD] >> KqpSort::ComplexPkExclusiveSecondOptionalPredicate >> KqpNotNullColumns::InsertNotNullPg-useSink [GOOD] >> KqpNotNullColumns::FailedMultiEffects >> TBSV::ShouldLimitBlockStoreVolumeDropRate |67.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest |67.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> IndexBuildTest::Lock [GOOD] >> IndexBuildTest::IndexPartitioningIsPersisted |67.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> KqpSort::PassLimit [GOOD] >> KqpSort::OffsetPk >> KqpNewEngine::ReadDifferentColumnsPk [GOOD] >> KqpNewEngine::PushFlatmapInnerConnectionsToStageInput >> KqpNewEngine::LeftSemiJoin [GOOD] >> KqpNewEngine::LocksInRoTx >> KqpNewEngine::UpsertEmptyInput [GOOD] >> KqpNotNullColumns::AlterAddNotNullColumn >> TSchemeShardTopicSplitMergeTest::MargeInactivePartitions |67.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |67.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |67.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |67.3%| [LD] {RESULT} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |67.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |67.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut >> TSchemeShardTopicSplitMergeTest::SplitWithWrongBoundary >> TTxDataShardMiniKQL::CrossShard_2_SwapAndCopy [GOOD] >> TTxDataShardMiniKQL::CrossShard_3_AllToOne >> KqpScanArrowFormat::AllTypesColumnsCellvec [GOOD] >> KqpScanArrowFormat::AggregateNoColumnNoRemaps >> TBSV::ShouldLimitBlockStoreVolumeDropRate [GOOD] >> TBackupCollectionTests::HiddenByFeatureFlag >> IndexBuildTest::IndexPartitioningIsPersisted [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions [GOOD] >> PersQueueSdkReadSessionTest::SettingsValidation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::ShouldLimitBlockStoreVolumeDropRate [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:57:30.540781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:57:30.540877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:57:30.540917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:57:30.540955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:57:30.540996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:57:30.541025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:57:30.541102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:57:30.541176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:57:30.541519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:57:30.695505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:57:30.695603Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:57:30.701339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:57:30.701474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:57:30.701641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:57:30.712451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:57:30.712700Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:57:30.713352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:57:30.713592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:57:30.716281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:57:30.717478Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:57:30.717551Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:57:30.717728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:57:30.717773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:57:30.717811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:57:30.717987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:57:30.725317Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:57:30.904423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:57:30.904686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:30.904928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:57:30.905150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:57:30.905207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:30.908305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:57:30.908450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:57:30.908658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:30.908712Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:57:30.908743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:57:30.908773Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:57:30.911949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:30.912012Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:57:30.912066Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:57:30.915740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:30.915808Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:30.915864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:57:30.915913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:57:30.919198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:57:30.921564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:57:30.921827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:57:30.922763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:57:30.922905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:57:30.922982Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:57:30.923258Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:57:30.923305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:57:30.923454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:57:30.923545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:57:30.925795Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:57:30.925859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:57:30.926079Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:57:30.926118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:57:30.926322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:30.926362Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:57:30.926450Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:57:30.926477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:57:30.926523Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:57:30.926555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:57:30.926590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:57:30.926632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:57:30.926663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:57:30.926699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:57:30.926767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:57:30.926791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:57:30.926812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:57:30.928643Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:57:30.928751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:57:30.928791Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 1/1 2025-03-31T15:57:34.861669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 129 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:57:34.868731Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 24 TxId_Deprecated: 24 TabletID: 72075186233409569 Forgetting tablet 72075186233409569 2025-03-31T15:57:34.871747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 24 ShardOwnerId: 72057594046678944 ShardLocalIdx: 24, at schemeshard: 72057594046678944 2025-03-31T15:57:34.872839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 4 2025-03-31T15:57:34.874217Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 23 TxId_Deprecated: 23 TabletID: 72075186233409568 Forgetting tablet 72075186233409568 2025-03-31T15:57:34.875873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-03-31T15:57:34.876504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 23 ShardOwnerId: 72057594046678944 ShardLocalIdx: 23, at schemeshard: 72057594046678944 2025-03-31T15:57:34.877394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 3 2025-03-31T15:57:34.878112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-03-31T15:57:34.889065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 129:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:129 msg type: 269090816 2025-03-31T15:57:34.889970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 129, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 129 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 129 at step: 5000028 2025-03-31T15:57:34.898703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000028, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:57:34.899346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 129 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000028 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:57:34.899428Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropBlockStoreVolume TPropose, operationId: 129:0 HandleReply TEvOperationPlan, step: 5000028, at schemeshard: 72057594046678944 2025-03-31T15:57:34.899562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 2 2025-03-31T15:57:34.899716Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#129:0 progress is 1/1 2025-03-31T15:57:34.899757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-03-31T15:57:34.899796Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#129:0 progress is 1/1 2025-03-31T15:57:34.899835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-03-31T15:57:34.899905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:57:34.899973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 1 2025-03-31T15:57:34.900016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 129, ready parts: 1/1, is published: false 2025-03-31T15:57:34.900064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-03-31T15:57:34.900096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 129:0 2025-03-31T15:57:34.900139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 129:0 2025-03-31T15:57:34.900270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 2 2025-03-31T15:57:34.900310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 129, publications: 2, subscribers: 0 2025-03-31T15:57:34.900342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 1], 54 2025-03-31T15:57:34.900370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 13], 18446744073709551615 2025-03-31T15:57:34.903346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:24 2025-03-31T15:57:34.903887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:24 tabletId 72075186233409569 2025-03-31T15:57:34.904585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:23 2025-03-31T15:57:34.904841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:23 tabletId 72075186233409568 2025-03-31T15:57:34.906981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:24 hive 72057594037968897 at ss 72057594046678944 2025-03-31T15:57:34.907393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:23 hive 72057594037968897 at ss 72057594046678944 2025-03-31T15:57:34.908189Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:57:34.908225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:57:34.908887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 13] 2025-03-31T15:57:34.910270Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:57:34.910553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 129, path id: 1 2025-03-31T15:57:34.910829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 129, path id: 13 FAKE_COORDINATOR: Erasing txId 129 2025-03-31T15:57:34.912855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 13 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 129 2025-03-31T15:57:34.915374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 13 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 129 2025-03-31T15:57:34.915575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 129 2025-03-31T15:57:34.915862Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 13], version: 18446744073709551615 2025-03-31T15:57:34.916115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 1 2025-03-31T15:57:34.918253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:57:34.918504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 13], at schemeshard: 72057594046678944 2025-03-31T15:57:34.919406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:57:34.920594Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 54 PathOwnerId: 72057594046678944, cookie: 129 2025-03-31T15:57:34.920673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 54 PathOwnerId: 72057594046678944, cookie: 129 2025-03-31T15:57:34.920702Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 129 2025-03-31T15:57:34.920728Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 54 2025-03-31T15:57:34.920762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:57:34.920845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 129, subscribers: 0 2025-03-31T15:57:34.921107Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 24 TxId_Deprecated: 24 2025-03-31T15:57:34.922060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 24 ShardOwnerId: 72057594046678944 ShardLocalIdx: 24, at schemeshard: 72057594046678944 2025-03-31T15:57:34.922700Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 23 TxId_Deprecated: 23 2025-03-31T15:57:34.922954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 23 ShardOwnerId: 72057594046678944 ShardLocalIdx: 23, at schemeshard: 72057594046678944 2025-03-31T15:57:34.931786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-03-31T15:57:34.944777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-31T15:57:34.946654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-03-31T15:57:34.948626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:24 2025-03-31T15:57:34.950811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:23 TestModificationResult got TxId: 129, wait until txId: 129 TestWaitNotification wait txId: 129 2025-03-31T15:57:34.961433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 129: send EvNotifyTxCompletion 2025-03-31T15:57:34.961514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 129 2025-03-31T15:57:34.962544Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 129, at schemeshard: 72057594046678944 2025-03-31T15:57:34.962668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 129: got EvNotifyTxCompletionResult 2025-03-31T15:57:34.962709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 129: satisfy waiter [1:1671:3541] TestWaitNotification: OK eventTxId 129 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::IndexPartitioningIsPersisted [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:57:16.569636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:57:16.570359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:57:16.570647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:57:16.570955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:57:16.580086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:57:16.580475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:57:16.581775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:57:16.595811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:57:16.619184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:57:17.816929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:57:17.817680Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:57:17.862342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:57:17.863043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:57:17.877694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:57:17.963288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:57:17.965665Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:57:17.984105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:57:17.985521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:57:18.008520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:57:18.013479Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:57:18.013571Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:57:18.013723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:57:18.013771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:57:18.013803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:57:18.013930Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:57:18.080615Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:57:18.624130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:57:18.624399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:18.624630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:57:18.624923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:57:18.624988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:18.634685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:57:18.634847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:57:18.635127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:18.635208Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:57:18.635244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:57:18.635280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:57:18.648079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:18.648158Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:57:18.648197Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:57:18.650446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:18.650497Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:18.650563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:57:18.650626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:57:18.672321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:57:18.680171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:57:18.680437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:57:18.681575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:57:18.681730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:57:18.681787Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:57:18.682076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:57:18.682129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:57:18.682300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:57:18.682379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:57:18.690621Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:57:18.690702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:57:18.690883Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:57:18.690921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:57:18.691148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:18.691190Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:57:18.691276Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:57:18.691307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:57:18.691344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:57:18.691369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:57:18.691400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:57:18.691449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:57:18.691483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:57:18.691508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:57:18.691579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:57:18.691609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:57:18.691650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:57:18.694468Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:57:18.694633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:57:18.694678Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... on: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "Index" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:57:35.114090Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T15:57:35.114303Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index" took 232us result status StatusSuccess 2025-03-31T15:57:35.115014Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Index" PathDescription { Self { Name: "Index" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Index" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:57:35.115783Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T15:57:35.116078Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable" took 316us result status StatusSuccess 2025-03-31T15:57:35.117094Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Index/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } SplitBoundary { KeyPrefix { Tuple { Optional { Text: "alice" } } Tuple { } } } SplitBoundary { KeyPrefix { Tuple { Optional { Text: "bob" } } Tuple { } } } TableSchemaVersion: 2 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\005\000\000\000alice\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TablePartitions { EndOfRangeKeyPrefix: "\002\000\003\000\000\000bob\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 3 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TopicAutoscaling::ControlPlane_CDC_Disable [GOOD] >> TopicAutoscaling::MidOfRange [GOOD] >> TBackupCollectionTests::HiddenByFeatureFlag [GOOD] >> TBackupCollectionTests::DisallowedPath >> KqpSqlIn::ComplexKey [GOOD] >> KqpSqlIn::Dict |67.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |67.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |67.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk >> KqpNotNullColumns::FailedMultiEffects [GOOD] |67.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |67.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |67.4%| [LD] {RESULT} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut >> TTxDataShardMiniKQL::CrossShard_6_Local [GOOD] >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx >> TopicAutoscaling::CDC_PartitionSplit_AutosplitByLoad [GOOD] >> TopicAutoscaling::ControlPlane_CDC >> KqpScanArrowFormat::JoinWithParams [GOOD] >> KqpScanArrowInChanels::AggregateCountStar >> TBackupCollectionTests::DisallowedPath [GOOD] >> TBackupCollectionTests::ParallelCreate >> TSchemeShardTopicSplitMergeTest::SplitWithWrongBoundary [GOOD] >> TSchemeShardTopicSplitMergeTest::MargeInactivePartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::FailedMultiEffects [GOOD] Test command err: Trying to start YDB, gRPC: 3321, MsgBus: 2075 2025-03-31T15:55:53.015810Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002980954016311:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:53.016251Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ad5/r3tmp/tmp29M83c/pdisk_1.dat 2025-03-31T15:55:54.647020Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:55:54.647130Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:55:54.653231Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3321, node 1 2025-03-31T15:55:55.287127Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:55:56.648009Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:56.652961Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T15:55:56.658682Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T15:55:56.658774Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:55:56.813296Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:55:56.813313Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:55:56.813319Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:55:56.814148Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:55:58.011453Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002980954016311:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:58.011758Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:2075 TClient is connected to server localhost:2075 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:56:04.084896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:56:04.232602Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:56:10.961659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:56:10.961686Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:56:11.961635Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003058263428240:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:11.961712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:12.361965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T15:56:12.507420Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003062558395639:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:12.507524Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:12.511545Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003062558395644:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:12.519564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T15:56:12.544719Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488003062558395646:2371], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T15:56:12.601395Z node 1 :TX_PROXY ERROR: Actor# [1:7488003062558395697:2447] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:56:12.923561Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488003062558395745:2384], status: PRECONDITION_FAILED, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing key column in input: Key for table: /Root/TestInsertNotNullPk, code: 2029 2025-03-31T15:56:12.924930Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDZkNjNiMWEtMjgxZWJhMS1hNGI3NTM0OS1mYmE5NThkYQ==, ActorId: [1:7488003058263428234:2355], ActorState: ExecuteState, TraceId: 01jqpent739qz1zvxnpj7kk8r6, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id: 2025-03-31T15:56:12.969413Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488003062558395754:2388], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:47: Error: Failed to convert type: Struct<'Key':Null,'Value':String> to Struct<'Key':Uint64,'Value':String?>
:1:47: Error: Failed to convert 'Key': Null to Uint64
:1:47: Error: Failed to convert input columns types to scheme types, code: 2031 2025-03-31T15:56:12.970720Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDZkNjNiMWEtMjgxZWJhMS1hNGI3NTM0OS1mYmE5NThkYQ==, ActorId: [1:7488003058263428234:2355], ActorState: ExecuteState, TraceId: 01jqpent8d21rr621dt960d1t4, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 3062, MsgBus: 3938 2025-03-31T15:56:15.254150Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488003075253796984:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:56:15.261104Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ad5/r3tmp/tmp7Ohohz/pdisk_1.dat 2025-03-31T15:56:16.379788Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:56:16.382277Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:56:16.402465Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:56:16.402553Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:56:16.403727Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3062, node 2 2025-03-31T15:56:16.806226Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:56:16.806248Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:56:16.806254Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:56:16.806375Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3938 2025-03-31T15:56:20.257144Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488003075253796984:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:56:20.257363Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:3938 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:56:23.106856Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:56:31 ... urityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:57:08.640507Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:15.144412Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488003333627917068:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:15.144516Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:15.170564Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T15:57:15.315402Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488003333627917168:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:15.315534Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:15.323794Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488003333627917173:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:15.360891Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T15:57:15.423292Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7488003333627917175:2360], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T15:57:15.521611Z node 5 :TX_PROXY ERROR: Actor# [5:7488003333627917227:2427] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:57:17.540470Z node 5 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [5:7488003342217851906:2384], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing not null column in input: Value. All not null columns should be initialized, code: 2032 2025-03-31T15:57:17.541868Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=NDM1ZWU1YjctZTIyYzhmOGQtYjA0MjhiNWMtNWZkNzE5ZTM=, ActorId: [5:7488003333627917063:2344], ActorState: ExecuteState, TraceId: 01jqpeqs8b43m9sjjpjcgkg4rc, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-03-31T15:57:18.232373Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:57:18.232410Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:57:18.321012Z node 5 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-31T15:57:18.333181Z node 5 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [5:7488003342217851917:2388], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
: Error: Tried to insert NULL value into NOT NULL column: Value, code: 2031 2025-03-31T15:57:18.336856Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=NDM1ZWU1YjctZTIyYzhmOGQtYjA0MjhiNWMtNWZkNzE5ZTM=, ActorId: [5:7488003333627917063:2344], ActorState: ExecuteState, TraceId: 01jqpeqse2cyty0v0tfxaxms3h, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: Trying to start YDB, gRPC: 14252, MsgBus: 62644 2025-03-31T15:57:25.817000Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488003373974723062:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:57:25.841338Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ad5/r3tmp/tmpKMQc6B/pdisk_1.dat 2025-03-31T15:57:28.707714Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:57:29.294115Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:57:29.547976Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:57:29.548121Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:57:29.574411Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14252, node 6 2025-03-31T15:57:30.331287Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:57:30.331309Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:57:30.331318Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:57:30.331657Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62644 2025-03-31T15:57:30.819364Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488003373974723062:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:57:30.819455Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:62644 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:57:31.143817Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:35.669120Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488003416924396697:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:35.669237Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:35.702361Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T15:57:35.795990Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488003416924396801:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:35.796207Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:35.796748Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488003416924396806:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:35.802068Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T15:57:35.821315Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488003416924396808:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T15:57:35.922829Z node 6 :TX_PROXY ERROR: Actor# [6:7488003416924396861:2412] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:57:36.122995Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7488003421219364201:2365], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:29: Error: At function: KiWriteTable!
:3:55: Error: Failed to convert type: Struct<'Key':Int32,'Value':Null> to Struct<'Key':Uint64,'Value':String>
:3:55: Error: Failed to convert 'Value': Null to String
:3:55: Error: Failed to convert input columns types to scheme types, code: 2031 2025-03-31T15:57:36.123283Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=OGU3Y2ViNWUtZTFmNGIyZmYtNjFkOTcyZGEtYjIyOWNjMDY=, ActorId: [6:7488003416924396679:2337], ActorState: ExecuteState, TraceId: 01jqperbd3em2htt9krn8y2ze8, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-31T15:57:36.193057Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::MidOfRange [GOOD] Test command err: 2025-03-31T15:55:32.282866Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002887915595182:2079];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:32.291722Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:55:33.941822Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001943/r3tmp/tmppvuS2I/pdisk_1.dat 2025-03-31T15:55:35.623253Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:55:36.302181Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:55:36.302307Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:55:36.343130Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:55:36.486558Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19928, node 1 2025-03-31T15:55:37.128539Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/001943/r3tmp/yandex823wr6.tmp 2025-03-31T15:55:37.128562Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/001943/r3tmp/yandex823wr6.tmp 2025-03-31T15:55:37.128771Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/001943/r3tmp/yandex823wr6.tmp 2025-03-31T15:55:37.128868Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:55:37.283346Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002887915595182:2079];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:37.283552Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:55:37.862756Z INFO: TTestServer started on Port 13859 GrpcPort 19928 TClient is connected to server localhost:13859 PQClient connected to localhost:19928 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:55:41.366564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:55:41.394615Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:55:41.407840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T15:55:41.540037Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:55:41.549526Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-03-31T15:55:47.414297Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpemvmq3f3d2x604dmd8j0j", Request deadline has expired for 0.823949s seconds 2025-03-31T15:55:47.668480Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488002952340105473:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:47.668949Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:47.684653Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488002952340105485:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:47.689852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-31T15:55:47.732568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488002952340105487:2363], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-31T15:55:47.809473Z node 1 :TX_PROXY ERROR: Actor# [1:7488002952340105553:2486] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:55:48.622082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:55:48.641940Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488002952340105561:2370], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T15:55:48.647167Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTc3YmYwY2EtZWI3MTgwMTgtYzIwMDA4NTktNTBmM2ZlZDg=, ActorId: [1:7488002952340105455:2357], ActorState: ExecuteState, TraceId: 01jqpen1amd4rdy8b2jn36ykbv, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T15:55:48.650689Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T15:55:48.721619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:55:49.275653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7488002965225007789:2698] 2025-03-31T15:55:51.103569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:55:51.103593Z node 1 :IMPORT WARN: Table profiles were not loaded === CheckClustersList. Ok 2025-03-31T15:55:56.191549Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-03-31T15:55:56.395569Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7488002900800497482:2201]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T15:55:56.395836Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T15:55:56.396162Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:7488002900800497482:2201], Recipient [1:7488002900800497482:2201]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T15:55:56.396178Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T15:55:56.802556Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-03-31T15:55:56.805018Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7488002990994811746:2798], Recipient [1:7488002900800497482:2201]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:55:56.805050Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:55:56.805064Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-31T15:55:56.805100Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7488002990994811742:2795], Recipient [1:7488002900800497482:2201]: {TEvModifySchemeTransaction txid# 281474976710671 TabletId# 72057594046644480} 2025-03-31T15:55:56.805111Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-31T15:55:57.013341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "autoscalit-topic" TotalGroupCount: 5 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 5 Max ... 37893] TxId 281474976710673, State EXECUTED 2025-03-31T15:57:35.450678Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710673 2025-03-31T15:57:35.450690Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T15:57:35.450693Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976710673 State EXECUTED FrontTxId 281474976710673 2025-03-31T15:57:35.450713Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TPersQueue::SendEvReadSetAckToSenders 2025-03-31T15:57:35.450731Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976710673, NewState WAIT_RS_ACKS 2025-03-31T15:57:35.450750Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976710673 moved from EXECUTED to WAIT_RS_ACKS 2025-03-31T15:57:35.450781Z node 5 :PERSQUEUE DEBUG: [TxId: 281474976710673] PredicateAcks: 0/0 2025-03-31T15:57:35.450792Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-03-31T15:57:35.450806Z node 5 :PERSQUEUE DEBUG: [TxId: 281474976710673] PredicateAcks: 0/0 2025-03-31T15:57:35.450823Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] add an TxId 281474976710673 to the list for deletion 2025-03-31T15:57:35.450843Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976710673, NewState DELETING 2025-03-31T15:57:35.450867Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] delete key for TxId 281474976710673 2025-03-31T15:57:35.450873Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710673 2025-03-31T15:57:35.450883Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T15:57:35.450917Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-31T15:57:35.450937Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [5:7488003416293842125:2513] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710673 at schemeshard: 72057594046644480 2025-03-31T15:57:35.450973Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270794756, Sender [5:7488003416293842151:2515], Recipient [5:7488003416293842151:2515]: NKikimr::TEvKeyValue::TEvCollect 2025-03-31T15:57:35.451125Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270794752, Sender [5:7488003416293842151:2515], Recipient [5:7488003416293842151:2515]: NKikimrClient.TKeyValueRequest Cookie: 5 CmdDeleteRange { Range { From: "tx_00000281474976710673" IncludeFrom: true To: "tx_00000281474976710673" IncludeTo: true } } CmdWrite { Key: "_txinfo" Value: "\020\350\326\260\347\3362\030\221\200\200\200\200\200@(\240\215\0060\350\326\260\347\33628\221\200\200\200\200\200@" StorageChannel: INLINE } 2025-03-31T15:57:35.451199Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [5:7488003416293842135:2881], Recipient [5:7488003321804560334:2149]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-31T15:57:35.451219Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-31T15:57:35.451239Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-03-31T15:57:35.451251Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270794753, Sender [5:7488003416293842275:2515], Recipient [5:7488003416293842151:2515]: NKikimr::TEvKeyValue::TEvIntermediate 2025-03-31T15:57:35.451551Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270794760, Sender [5:7488003416293842274:2525], Recipient [5:7488003416293842151:2515]: NKikimr::TEvKeyValue::TEvCompleteGC 2025-03-31T15:57:35.451835Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270795264, Sender [5:7488003416293842151:2515], Recipient [5:7488003416293842151:2515]: NKikimrClient.TResponse Status: 1 Cookie: 5 DeleteRangeResult { Status: 0 } WriteResult { Status: 0 StatusFlags: 1 } 2025-03-31T15:57:35.451851Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvKeyValue::TEvResponse 2025-03-31T15:57:35.451866Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-31T15:57:35.451881Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Try execute txs with state DELETING 2025-03-31T15:57:35.451895Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976710673, State DELETING 2025-03-31T15:57:35.451911Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] delete TxId 281474976710673 2025-03-31T15:57:35.452037Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270794756, Sender [5:7488003416293842151:2515], Recipient [5:7488003416293842151:2515]: NKikimr::TEvKeyValue::TEvCollect 2025-03-31T15:57:35.453977Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270794760, Sender [5:7488003416293842278:2526], Recipient [5:7488003416293842151:2515]: NKikimr::TEvKeyValue::TEvCompleteGC 2025-03-31T15:57:35.470938Z node 5 :PQ_READ_PROXY DEBUG: new alter topic request 2025-03-31T15:57:35.478642Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [5:7488003416293842287:2965], Recipient [5:7488003321804560334:2149]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:57:35.478696Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:57:35.478718Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-31T15:57:35.478769Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [5:7488003416293842284:2963], Recipient [5:7488003321804560334:2149]: {TEvModifySchemeTransaction txid# 281474976710674 TabletId# 72057594046644480} 2025-03-31T15:57:35.478793Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-31T15:57:35.481548Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "Root/origin/feed" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "streamImpl" PathId: 15 TotalGroupCount: 3 PartitionPerTablet: 2 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "feed" TopicPath: "/Root/origin/feed/streamImpl" YdbDatabasePath: "/Root" MeteringMode: METERING_MODE_REQUEST_UNITS PartitionStrategy { MinPartitionCount: 3 MaxPartitionCount: 107 ScaleThresholdSeconds: 30 PartitionStrategyType: DISABLED } } Partitions { PartitionId: 0 TabletId: 72075186224037893 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186224037894 NextPartitionId: 1 } ApplyIf { PathId: 15 PathVersion: 2 } AllowAccessToPrivatePaths: true } TxId: 281474976710674 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-03-31T15:57:35.481956Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: Root/origin/feed/streamImpl, pathId: [OwnerId: 72057594046644480, LocalPathId: 15], opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T15:57:35.482132Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710674:1, propose status:StatusInvalidParameter, reason: Can`t disable auto partitioning., at schemeshard: 72057594046644480 2025-03-31T15:57:35.482363Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-31T15:57:35.482904Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710674, response: Status: StatusInvalidParameter Reason: "Can`t disable auto partitioning." TxId: 281474976710674 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-03-31T15:57:35.483358Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710674, database: /Root, subject: root@builtin, status: StatusInvalidParameter, reason: Can`t disable auto partitioning., operation: ALTER PERSISTENT QUEUE, path: Root/origin/feed/streamImpl 2025-03-31T15:57:35.483414Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T15:57:35.483619Z node 5 :TX_PROXY ERROR: Actor# [5:7488003416293842284:2963] txid# 281474976710674, issues: { message: "Can`t disable auto partitioning." severity: 1 } 2025-03-31T15:57:35.485579Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [5:7488003416293842287:2965], Recipient [5:7488003321804560334:2149]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-31T15:57:35.485611Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-31T15:57:35.485628Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-03-31T15:57:35.542120Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003416293842151:2515], Partition 0, Sender [0:0:0], Recipient [5:7488003416293842238:2521], Cookie: 0 2025-03-31T15:57:35.542218Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003416293842238:2521]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:57:35.542265Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:57:35.542329Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:57:35.542435Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:57:35.542463Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:57:35.542495Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:57:35.556952Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [5:7488003321804560334:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T15:57:35.557006Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T15:57:35.557083Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [5:7488003321804560334:2149], Recipient [5:7488003321804560334:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T15:57:35.557107Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T15:57:35.642610Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003416293842151:2515], Partition 0, Sender [0:0:0], Recipient [5:7488003416293842238:2521], Cookie: 0 2025-03-31T15:57:35.642696Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003416293842238:2521]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:57:35.642733Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:57:35.642787Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:57:35.642862Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:57:35.642883Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:57:35.642909Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 >> TopicAutoscaling::Simple_PQv1 [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_PQv1 >> TTabletPipeTest::TestSendAfterOpen >> TBackupCollectionTests::CreateAbsolutePath ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithWrongBoundary [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:57:35.428333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:57:35.428429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:57:35.428475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:57:35.428511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:57:35.428569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:57:35.428615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:57:35.428690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:57:35.428834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:57:35.429190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:57:35.531453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:57:35.531567Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:57:35.552574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:57:35.552698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:57:35.552898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:57:35.596148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:57:35.596483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:57:35.597250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:57:35.597579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:57:35.608002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:57:35.609603Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:57:35.609713Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:57:35.609995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:57:35.610061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:57:35.610117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:57:35.610302Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:57:35.629002Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:57:35.799256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:57:35.799658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:35.799881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:57:35.800150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:57:35.800234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:35.804104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:57:35.804323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:57:35.804551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:35.804634Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:57:35.804679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:57:35.804720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:57:35.809187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:35.809273Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:57:35.809333Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:57:35.813293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:35.813371Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:35.813429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:57:35.813495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:57:35.825427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:57:35.828507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:57:35.828743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:57:35.829993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:57:35.830181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:57:35.830251Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:57:35.830613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:57:35.830684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:57:35.830887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:57:35.832493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:57:35.864807Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:57:35.864890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:57:35.865110Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:57:35.865152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:57:35.865390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:35.865446Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:57:35.865556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:57:35.865597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:57:35.865638Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:57:35.865673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:57:35.865711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:57:35.865762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:57:35.865801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:57:35.865852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:57:35.865963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:57:35.866006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:57:35.866049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:57:35.870779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:57:35.870954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:57:35.870998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... chemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:57:37.318424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary is empty, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-03-31T15:57:37.318696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-03-31T15:57:37.318744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-03-31T15:57:37.323070Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-03-31T15:57:37.323529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-03-31T15:57:37.323711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:639:2562] TestWaitNotification: OK eventTxId 105 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\001" } TestModificationResults wait txId: 106 2025-03-31T15:57:37.338841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\001" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:57:37.340242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2025-03-31T15:57:37.342537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Split boundary less or equals FromBound of partition: '01' <= '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54', at schemeshard: 72057594046678944 2025-03-31T15:57:37.353213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Split boundary less or equals FromBound of partition: \'01\' <= \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\'" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:57:37.353395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary less or equals FromBound of partition: '01' <= '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54', operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-03-31T15:57:37.353672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-03-31T15:57:37.353711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-03-31T15:57:37.354111Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-03-31T15:57:37.354209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-03-31T15:57:37.354392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:646:2569] TestWaitNotification: OK eventTxId 106 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "UUUUUUUUUUUUUUUT" } TestModificationResults wait txId: 107 2025-03-31T15:57:37.369202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "UUUUUUUUUUUUUUUT" } } } TxId: 107 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:57:37.369458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 107:0, at schemeshard: 72057594046678944 2025-03-31T15:57:37.369709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 107:1, propose status:StatusInvalidParameter, reason: Split boundary less or equals FromBound of partition: '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54' <= '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54', at schemeshard: 72057594046678944 2025-03-31T15:57:37.372734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 107, response: Status: StatusInvalidParameter Reason: "Split boundary less or equals FromBound of partition: \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\' <= \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\'" TxId: 107 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:57:37.372976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 107, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary less or equals FromBound of partition: '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54' <= '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54', operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 2025-03-31T15:57:37.373320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: send EvNotifyTxCompletion 2025-03-31T15:57:37.373367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 107 2025-03-31T15:57:37.373829Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2025-03-31T15:57:37.373966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-03-31T15:57:37.374009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:653:2576] TestWaitNotification: OK eventTxId 107 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\255" } TestModificationResults wait txId: 108 2025-03-31T15:57:37.377283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\255" } } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:57:37.377498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 108:0, at schemeshard: 72057594046678944 2025-03-31T15:57:37.378593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusInvalidParameter, reason: Split boundary greate or equals ToBound of partition: 'AD' >= 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' (FromBound is '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54'), at schemeshard: 72057594046678944 2025-03-31T15:57:37.393275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusInvalidParameter Reason: "Split boundary greate or equals ToBound of partition: \'AD\' >= \'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9\' (FromBound is \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\')" TxId: 108 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:57:37.394267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary greate or equals ToBound of partition: 'AD' >= 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' (FromBound is '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54'), operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2025-03-31T15:57:37.398528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2025-03-31T15:57:37.398874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2025-03-31T15:57:37.401805Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2025-03-31T15:57:37.402485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-03-31T15:57:37.403000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [1:660:2583] TestWaitNotification: OK eventTxId 108 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } TestModificationResults wait txId: 109 2025-03-31T15:57:37.418305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } } TxId: 109 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:57:37.421728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 109:0, at schemeshard: 72057594046678944 2025-03-31T15:57:37.422827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 109:1, propose status:StatusInvalidParameter, reason: Split boundary greate or equals ToBound of partition: 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' >= 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' (FromBound is '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54'), at schemeshard: 72057594046678944 2025-03-31T15:57:37.425594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 109, response: Status: StatusInvalidParameter Reason: "Split boundary greate or equals ToBound of partition: \'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9\' >= \'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9\' (FromBound is \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\')" TxId: 109 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:57:37.425757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 109, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary greate or equals ToBound of partition: 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' >= 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' (FromBound is '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54'), operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 109, wait until txId: 109 TestWaitNotification wait txId: 109 2025-03-31T15:57:37.426144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: send EvNotifyTxCompletion 2025-03-31T15:57:37.426192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 109 2025-03-31T15:57:37.426675Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 109, at schemeshard: 72057594046678944 2025-03-31T15:57:37.426789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2025-03-31T15:57:37.426848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [1:667:2590] TestWaitNotification: OK eventTxId 109 >> TBackupCollectionTests::ParallelCreate [GOOD] >> TBackupCollectionTests::Drop >> TTabletPipeTest::TestSendAfterOpen [GOOD] >> KqpNotNullColumns::UpdateTable_UniqIndex [GOOD] >> KqpNotNullColumns::UpdateTable_UniqIndexPg |67.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |67.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |67.4%| [LD] {RESULT} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx [GOOD] |67.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendAfterOpen [GOOD] >> TBackupCollectionTests::Drop [GOOD] >> TBackupCollectionTests::DropTwice |67.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |67.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |67.4%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:114:2057] recipient: [1:108:2140] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:114:2057] recipient: [1:108:2140] Leader for TabletID 9437184 is [1:131:2154] sender: [1:132:2057] recipient: [1:108:2140] 2025-03-31T15:55:26.261503Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-03-31T15:55:26.301333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:55:26.301405Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:26.308127Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-03-31T15:55:26.308520Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-03-31T15:55:26.308748Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:55:26.349317Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T15:55:26.365621Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:55:26.365927Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T15:55:26.367800Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-31T15:55:26.367897Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-31T15:55:26.367963Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-31T15:55:26.368406Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T15:55:26.368550Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T15:55:26.368676Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:194:2154] in generation 2 Leader for TabletID 9437184 is [1:131:2154] sender: [1:209:2057] recipient: [1:14:2061] 2025-03-31T15:55:26.439888Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T15:55:26.482362Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-31T15:55:26.482585Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T15:55:26.482738Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:214:2212] 2025-03-31T15:55:26.482779Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-31T15:55:26.482832Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-31T15:55:26.482867Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T15:55:26.483023Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T15:55:26.483073Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T15:55:26.483405Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-31T15:55:26.483549Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-31T15:55:26.483724Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T15:55:26.483773Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-31T15:55:26.483810Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-31T15:55:26.483847Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T15:55:26.483881Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T15:55:26.483911Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-31T15:55:26.483948Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T15:55:26.484043Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:210:2209], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:55:26.484114Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:55:26.484189Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:208:2208], serverId# [1:210:2209], sessionId# [0:0:0] 2025-03-31T15:55:26.487075Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-31T15:55:26.487190Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T15:55:26.487308Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T15:55:26.487482Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-31T15:55:26.487536Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-31T15:55:26.487584Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-31T15:55:26.487638Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T15:55:26.487686Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-31T15:55:26.487733Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-31T15:55:26.487762Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T15:55:26.488133Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-31T15:55:26.488190Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-31T15:55:26.488248Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-31T15:55:26.488285Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-31T15:55:26.488336Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-31T15:55:26.488369Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-31T15:55:26.488410Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-31T15:55:26.488441Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-31T15:55:26.488468Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-31T15:55:26.507731Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-31T15:55:26.507809Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T15:55:26.507844Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-31T15:55:26.507900Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-31T15:55:26.507983Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-31T15:55:26.508517Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:220:2218], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:55:26.508565Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:55:26.508736Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:219:2217], serverId# [1:220:2218], sessionId# [0:0:0] 2025-03-31T15:55:26.509195Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2025-03-31T15:55:26.509399Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T15:55:26.509878Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2025-03-31T15:55:26.509922Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-03-31T15:55:26.509960Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2025-03-31T15:55:26.510003Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit PlanQueue 2025-03-31T15:55:26.517204Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2025-03-31T15:55:26.517301Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T15:55:26.517617Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T15:55:26.517677Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T15:55:26.517742Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T15:55:26.517784Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-31T15:55:26.517818Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T15:55:26.517917Z node 1 :TX_DATASHARD DEBUG: Found ready operation [2:1] in PlanQueue unit at 9437184 2025-03-31T15:55:26.517953Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit PlanQueue 2025-03-31T15:55:26.518000Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-03-31T15:55:26.518052Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit PlanQueue 2025-03-31T15:55:26.518105Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit LoadTxDetails 2025-03-31T15:55:26.518143Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2025-03-31T15:55:26.518313Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 2:1 keys extracted: 0 2025-03-31T15:55:26.518345Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-03-31T15:55:26.518380Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit LoadTxDetails 2025-03-31T15:55:26.518411Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-31T15:55:26.518434Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-31T15:55:26.518510Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T15:55:26.518537Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-31T15:55:26.518568Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-31T15:55:26.518602Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-31T15:55:26.518657Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is the new logically complete end at 9437184 2025-03-31T15:55:26.518695Z node 1 :TX_DATASHARD TRAC ... :39.032481Z node 23 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T15:57:39.032636Z node 23 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [23:279:2263], Recipient [23:233:2226]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 4200 Status: OK ServerId: [23:283:2267] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-03-31T15:57:39.032685Z node 23 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-03-31T15:57:39.032814Z node 23 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [23:122:2148], Recipient [23:233:2226]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1 2025-03-31T15:57:39.032860Z node 23 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-03-31T15:57:39.032910Z node 23 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 1 datashard 9437184 state Ready 2025-03-31T15:57:39.032992Z node 23 :TX_DATASHARD DEBUG: 9437184 Got TEvSchemaChangedResult from SS at 9437184 2025-03-31T15:57:39.058741Z node 23 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [23:279:2263], Recipient [23:233:2226]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 4200 ClientId: [23:279:2263] ServerId: [23:283:2267] } 2025-03-31T15:57:39.058829Z node 23 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-03-31T15:57:39.098172Z node 23 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [23:290:2272], Recipient [23:233:2226]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:57:39.098271Z node 23 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:57:39.098346Z node 23 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [23:288:2271], serverId# [23:290:2272], sessionId# [0:0:0] 2025-03-31T15:57:39.098545Z node 23 :TX_DATASHARD TRACE: StateWork, received event# 268830214, Sender [23:287:2270], Recipient [23:233:2226]: NKikimrTabletBase.TEvGetCounters 2025-03-31T15:57:39.151650Z node 23 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [23:99:2134], Recipient [23:233:2226]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 99 RawX2: 98784249942 } 2025-03-31T15:57:39.151768Z node 23 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-03-31T15:57:39.152313Z node 23 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [23:292:2274], Recipient [23:233:2226]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:57:39.152371Z node 23 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:57:39.152447Z node 23 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [23:291:2273], serverId# [23:292:2274], sessionId# [0:0:0] 2025-03-31T15:57:39.152780Z node 23 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [23:99:2134], Recipient [23:233:2226]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 99 RawX2: 98784249942 } TxBody: "\032\324\002\037\002\006Arg\005\205\n\205\000\205\004?\000\205\002\202\0047\034MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\002\203\004\205\002\203\004\01057$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000\013?\022\003?\020T\001\005?\026)\211\n?\024\206\203\004?\024? ?\024\203\004\020Fold\000)\211\002?\"\206? \034Collect\000)\211\006?(? \203\004\203\0024ListFromRange\000\003? \000\003?,\003\022z\003?.\004\007\010\000\n\003?\024\000)\251\000? \002\000\004)\251\000?\024\002\000\002)\211\006?$\203\005@? ?\024\030Invoke\000\003?F\006Add?@?D\001\006\002\014\000\007\016\000\003\005?\010?\014\006\002?\006?R\000\003?\014?\014\037/ \0018\000" TxId: 2 ExecLevel: 0 Flags: 0 2025-03-31T15:57:39.152837Z node 23 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T15:57:39.152982Z node 23 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T15:57:39.154165Z node 23 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CheckDataTx 2025-03-31T15:57:39.154264Z node 23 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-03-31T15:57:39.154325Z node 23 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CheckDataTx 2025-03-31T15:57:39.154377Z node 23 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-31T15:57:39.154451Z node 23 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit BuildAndWaitDependencies 2025-03-31T15:57:39.154505Z node 23 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-03-31T15:57:39.161280Z node 23 :TX_DATASHARD TRACE: Activated operation [0:2] at 9437184 2025-03-31T15:57:39.161437Z node 23 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-03-31T15:57:39.161491Z node 23 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit BuildAndWaitDependencies 2025-03-31T15:57:39.161549Z node 23 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit ExecuteDataTx 2025-03-31T15:57:39.161597Z node 23 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-03-31T15:57:39.161673Z node 23 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-03-31T15:57:39.161780Z node 23 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 requested 132374 more memory 2025-03-31T15:57:39.161843Z node 23 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2025-03-31T15:57:39.162339Z node 23 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T15:57:39.162411Z node 23 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-03-31T15:57:39.162493Z node 23 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-03-31T15:57:39.163607Z node 23 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 exceeded memory limit 132502 and requests 1060016 more for the next try 2025-03-31T15:57:39.163818Z node 23 :TX_DATASHARD DEBUG: tx 2 released its data 2025-03-31T15:57:39.163879Z node 23 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2025-03-31T15:57:39.164175Z node 23 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T15:57:39.164213Z node 23 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-03-31T15:57:39.165213Z node 23 :TX_DATASHARD DEBUG: tx 2 at 9437184 restored its data 2025-03-31T15:57:39.165278Z node 23 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-03-31T15:57:39.165871Z node 23 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 exceeded memory limit 1192518 and requests 9540144 more for the next try 2025-03-31T15:57:39.166022Z node 23 :TX_DATASHARD DEBUG: tx 2 released its data 2025-03-31T15:57:39.166066Z node 23 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2025-03-31T15:57:39.166296Z node 23 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T15:57:39.166336Z node 23 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-03-31T15:57:39.166991Z node 23 :TX_DATASHARD DEBUG: tx 2 at 9437184 restored its data 2025-03-31T15:57:39.167041Z node 23 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-03-31T15:57:39.167688Z node 23 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 exceeded memory limit 10732662 and requests 85861296 more for the next try 2025-03-31T15:57:39.167809Z node 23 :TX_DATASHARD DEBUG: tx 2 released its data 2025-03-31T15:57:39.167849Z node 23 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2025-03-31T15:57:39.168035Z node 23 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T15:57:39.168069Z node 23 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-03-31T15:57:39.168661Z node 23 :TX_DATASHARD DEBUG: tx 2 at 9437184 restored its data 2025-03-31T15:57:39.168706Z node 23 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-03-31T15:57:39.564621Z node 23 :TX_DATASHARD TRACE: Executed operation [0:2] at tablet 9437184 with status COMPLETE 2025-03-31T15:57:39.564744Z node 23 :TX_DATASHARD TRACE: Datashard execution counters for [0:2] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 8, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-31T15:57:39.564836Z node 23 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T15:57:39.564883Z node 23 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit ExecuteDataTx 2025-03-31T15:57:39.564939Z node 23 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit FinishPropose 2025-03-31T15:57:39.564994Z node 23 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit FinishPropose 2025-03-31T15:57:39.565118Z node 23 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-31T15:57:39.565156Z node 23 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit FinishPropose 2025-03-31T15:57:39.565204Z node 23 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit CompletedOperations 2025-03-31T15:57:39.565249Z node 23 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CompletedOperations 2025-03-31T15:57:39.565320Z node 23 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-03-31T15:57:39.565366Z node 23 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CompletedOperations 2025-03-31T15:57:39.565420Z node 23 :TX_DATASHARD TRACE: Execution plan for [0:2] at 9437184 has finished 2025-03-31T15:57:39.595868Z node 23 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-31T15:57:39.595957Z node 23 :TX_DATASHARD TRACE: Complete execution for [0:2] at 9437184 on unit FinishPropose 2025-03-31T15:57:39.596025Z node 23 :TX_DATASHARD TRACE: Propose transaction complete txid 2 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2025-03-31T15:57:39.596142Z node 23 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T15:57:39.597427Z node 23 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [23:297:2279], Recipient [23:233:2226]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:57:39.597505Z node 23 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:57:39.597567Z node 23 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [23:296:2278], serverId# [23:297:2279], sessionId# [0:0:0] 2025-03-31T15:57:39.597779Z node 23 :TX_DATASHARD TRACE: StateWork, received event# 268830214, Sender [23:295:2277], Recipient [23:233:2226]: NKikimrTabletBase.TEvGetCounters >> TTabletPipeTest::TestConsumerSidePipeReset >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge [GOOD] >> TFlatMetrics::TimeSeriesAvg16 [GOOD] >> TFlatMetrics::TimeSeriesAVG [GOOD] >> TBackupCollectionTests::CreateAbsolutePath [GOOD] >> TBackupCollectionTests::Create >> TopicAutoscaling::PartitionMerge_PreferedPartition_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionMerge_PreferedPartition_PQv1 >> KqpSort::ComplexPkExclusiveSecondOptionalPredicate [GOOD] >> KqpSort::ComplexPkInclusiveSecondOptionalPredicate >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDrop [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDropIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:57:35.525492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:57:35.525586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:57:35.525620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:57:35.525650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:57:35.525685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:57:35.525726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:57:35.525786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:57:35.525860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:57:35.526204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:57:35.624401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:57:35.624478Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:57:35.636938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:57:35.637259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:57:35.637409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:57:35.658017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:57:35.658251Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:57:35.658848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:57:35.659047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:57:35.663891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:57:35.665120Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:57:35.665185Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:57:35.665352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:57:35.665399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:57:35.665454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:57:35.665614Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:57:35.679383Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:57:35.956647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:57:35.956840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:35.956990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:57:35.957297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:57:35.957365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:35.964043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:57:35.964186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:57:35.964385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:35.964462Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:57:35.964508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:57:35.964537Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:57:35.972044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:35.972140Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:57:35.972209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:57:35.979967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:35.980032Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:35.980072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:57:35.980115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:57:35.998588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:57:36.005174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:57:36.005367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:57:36.006379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:57:36.006512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:57:36.006562Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:57:36.006813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:57:36.006857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:57:36.006994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:57:36.007064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:57:36.013626Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:57:36.013672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:57:36.013965Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:57:36.014009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:57:36.014300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:36.014344Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:57:36.014449Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:57:36.014479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:57:36.014515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:57:36.014541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:57:36.014587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:57:36.014630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:57:36.014681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:57:36.014734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:57:36.014803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:57:36.014836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:57:36.014874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:57:36.016968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:57:36.017086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:57:36.017123Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... _TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-03-31T15:57:41.525006Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 128 -> 240 2025-03-31T15:57:41.530786Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-31T15:57:41.544942Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-03-31T15:57:41.545344Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:57:41.545382Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-31T15:57:41.545582Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:57:41.545613Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 105, path id: 3 2025-03-31T15:57:41.546117Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-03-31T15:57:41.546559Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2025-03-31T15:57:41.548249Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-03-31T15:57:41.548593Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-31T15:57:41.548973Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-03-31T15:57:41.549659Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-31T15:57:41.555791Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-03-31T15:57:41.556142Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-31T15:57:41.556525Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-03-31T15:57:41.556903Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-03-31T15:57:41.557634Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-03-31T15:57:41.558307Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 1, subscribers: 0 2025-03-31T15:57:41.558996Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-03-31T15:57:41.562731Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-03-31T15:57:41.562818Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-03-31T15:57:41.562846Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-03-31T15:57:41.563216Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-03-31T15:57:41.563902Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-31T15:57:41.564327Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-03-31T15:57:41.593396Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-03-31T15:57:41.614935Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-03-31T15:57:41.615329Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-03-31T15:57:41.617970Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-03-31T15:57:41.618058Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-03-31T15:57:41.618091Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:754:2667] TestWaitNotification: OK eventTxId 105 2025-03-31T15:57:42.349196Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 3 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:57:42.355253Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 3 took 6.35ms result status StatusSuccess 2025-03-31T15:57:42.357758Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:57:42.478207Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T15:57:42.479605Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 1.79ms result status StatusSuccess 2025-03-31T15:57:42.494756Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >>>>> Verify partition 0 >>>>> Verify partition 1 >>>>> Verify partition 2 |67.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::TimeSeriesAVG [GOOD] >> KqpNewEngine::PushFlatmapInnerConnectionsToStageInput [GOOD] >> KqpNewEngine::PushPureFlatmapInnerConnectionsToStage >> AnalyzeColumnshard::AnalyzeRebootSaBeforeResolve [GOOD] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_PQv1 >> KqpNewEngine::LocksInRoTx [GOOD] >> KqpNewEngine::JoinWithPrecompute >> TTabletPipeTest::TestConsumerSidePipeReset [GOOD] >> TTabletPipeTest::TestInterconnectSession >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer >> KqpSort::OffsetPk [GOOD] >> KqpNotNullColumns::AlterAddNotNullColumn [GOOD] >> KqpNotNullColumns::AlterAddIndex >> KqpSort::OffsetTopSort >> TBackupCollectionTests::Create [GOOD] >> TBackupCollectionTests::CreateTwice >> TBackupCollectionTests::DropTwice [GOOD] >> TBackupCollectionTests::TableWithSystemColumns >> TTabletPipeTest::TestInterconnectSession [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeResolve [GOOD] Test command err: 2025-03-31T15:54:28.264650Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:54:28.265013Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:28.265165Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f80/r3tmp/tmpVnA5ZC/pdisk_1.dat 2025-03-31T15:54:29.384673Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26879, node 1 2025-03-31T15:54:29.696472Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:54:29.696542Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:54:29.696580Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:54:29.697102Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:54:29.699486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:54:29.814017Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:29.814531Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:29.837672Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14916 2025-03-31T15:54:32.205742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:54:45.326813Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T15:54:45.442292Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:45.442410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:45.475115Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T15:54:45.477434Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:45.800753Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:45.801356Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:45.801909Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:45.802032Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:45.802289Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:45.802377Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:45.802458Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:45.802536Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:45.802620Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:46.027714Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:46.027831Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:46.045282Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:46.208711Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:46.285071Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T15:54:46.285198Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T15:54:46.328893Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T15:54:46.329181Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T15:54:46.329412Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T15:54:46.329487Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T15:54:46.329551Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T15:54:46.329611Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T15:54:46.329704Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T15:54:46.329831Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T15:54:46.330377Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T15:54:46.359498Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T15:54:46.359610Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2595], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T15:54:46.366000Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1885:2606] 2025-03-31T15:54:46.371475Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2617] 2025-03-31T15:54:46.372249Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1904:2617], schemeshard id = 72075186224037897 2025-03-31T15:54:46.382180Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T15:54:46.412642Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T15:54:46.412707Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T15:54:46.412783Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T15:54:46.428830Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T15:54:46.451007Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T15:54:46.451236Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T15:54:46.660303Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T15:54:46.932081Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T15:54:47.007300Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T15:54:48.914387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2244:3078], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:48.914504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:48.967153Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-31T15:54:49.434729Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T15:54:49.434941Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T15:54:49.435274Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T15:54:49.435408Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T15:54:49.435541Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T15:54:49.435685Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T15:54:49.435804Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T15:54:49.435939Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T15:54:49.436626Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T15:54:49.436808Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T15:54:49.436940Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T15:54:49.437125Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T15:54:49.476911Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T15:54:49.477013Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;desc ... S INFO: [72075186224037894] OnActivateExecutor 2025-03-31T15:57:38.379504Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T15:57:38.380152Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T15:57:38.381100Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T15:57:38.381477Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded database: /Root/Database 2025-03-31T15:57:38.381528Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal start key 2025-03-31T15:57:38.381573Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal table owner id: 72075186224037897 2025-03-31T15:57:38.381616Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal table local path id: 4 2025-03-31T15:57:38.381654Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal start time: 1743436658312833 2025-03-31T15:57:38.381695Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal IsColumnTable: 1 2025-03-31T15:57:38.381771Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2025-03-31T15:57:38.381842Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T15:57:38.381975Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2025-03-31T15:57:38.382061Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 1 2025-03-31T15:57:38.382155Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 1 2025-03-31T15:57:38.382229Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T15:57:38.382405Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T15:57:38.383510Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T15:57:38.384285Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-31T15:57:38.384362Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-31T15:57:38.384497Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing ... blocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to STATISTICS_AGGREGATOR 2025-03-31T15:57:38.457636Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7296:5337] 2025-03-31T15:57:38.457926Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:7296:5337], schemeshard id = 72075186224037897 2025-03-31T15:57:38.458065Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7250:5308], server id = [2:7297:5338], tablet id = 72075186224037894, status = OK 2025-03-31T15:57:38.458168Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7297:5338] 2025-03-31T15:57:38.458239Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7297:5338], node id = 2, have schemeshards count = 1, need schemeshards count = 0 ... unblocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to ... unblocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to STATISTICS_AGGREGATOR 2025-03-31T15:57:38.556307Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T15:57:38.556407Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T15:57:38.557811Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7308:5341] 2025-03-31T15:57:38.558044Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-31T15:57:38.562225Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:2808:3228] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-03-31T15:57:38.562320Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:2808:3228] 2025-03-31T15:57:38.583697Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-31T15:57:38.583833Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Complete 2025-03-31T15:57:38.584004Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-03-31T15:57:38.584573Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7311:5344], server id = [2:7312:5345], tablet id = 72075186224037899, status = OK 2025-03-31T15:57:38.584709Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7311:5344], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:57:38.594025Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-31T15:57:38.594161Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-31T15:57:38.594342Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-31T15:57:38.594555Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-31T15:57:38.594877Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T15:57:38.607935Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7311:5344], server id = [2:7312:5345], tablet id = 72075186224037899 2025-03-31T15:57:38.607998Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:57:38.608712Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-31T15:57:38.695495Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7332:5364]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T15:57:38.695802Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T15:57:38.695861Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7332:5364], StatRequests.size() = 1 2025-03-31T15:57:38.919159Z node 2 :SYSTEM_VIEWS WARN: [72075186224037891] TEvIntervalQuerySummary, time mismath: node id# 2, interval end# 1970-01-01T00:02:04.000000Z, event interval end# 2025-03-31T15:57:36.000000Z 2025-03-31T15:57:38.919629Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MzZlZTQzODQtMTQxYjUwYTYtOTVjZDMzNDUtOWIwOWI1NmM=, TxId: 2025-03-31T15:57:38.919701Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MzZlZTQzODQtMTQxYjUwYTYtOTVjZDMzNDUtOWIwOWI1NmM=, TxId: 2025-03-31T15:57:38.920553Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T15:57:38.947193Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T15:57:38.947287Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T15:57:39.500864Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-03-31T15:57:39.500965Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-03-31T15:57:40.295733Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T15:57:40.296169Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T15:57:40.296542Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-31T15:57:41.684154Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T15:57:41.684610Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-31T15:57:41.684995Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T15:57:41.686248Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-31T15:57:41.708457Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-31T15:57:41.708758Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T15:57:41.708800Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T15:57:41.709102Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-31T15:57:41.726540Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-31T15:57:41.726706Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-03-31T15:57:41.727217Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7419:5414], server id = [2:7420:5415], tablet id = 72075186224037899, status = OK 2025-03-31T15:57:41.727304Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7419:5414], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:57:41.734633Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-31T15:57:41.736155Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-31T15:57:41.737857Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-31T15:57:41.740023Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-31T15:57:41.740479Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T15:57:41.742531Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7419:5414], server id = [2:7420:5415], tablet id = 72075186224037899 2025-03-31T15:57:41.742562Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:57:41.744518Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-31T15:57:41.794597Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Yjg1N2U0MzEtNmYzMWU0YTctYTk1MzM4YzktMTViMDIyOWQ=, TxId: 2025-03-31T15:57:41.794658Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Yjg1N2U0MzEtNmYzMWU0YTctYTk1MzM4YzktMTViMDIyOWQ=, TxId: 2025-03-31T15:57:41.795191Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T15:57:41.815401Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T15:57:41.815477Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2808:3228] >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer [GOOD] >> DataShardWrite::UpsertImmediate >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable2 [GOOD] >> DataShardWrite::ExecSQLUpsertImmediate+EvWrite |67.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TBackupCollectionTests::CreateTwice [GOOD] >> TBackupCollectionTests::BackupAbsentCollection >> DataShardWrite::UpsertPrepared+Volatile ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:159:2058] recipient: [1:157:2137] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:159:2058] recipient: [1:157:2137] Leader for TabletID 9437184 is [1:165:2141] sender: [1:166:2058] recipient: [1:157:2137] Leader for TabletID 9437185 is [0:0:0] sender: [2:170:2049] recipient: [2:162:2095] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [2:170:2049] recipient: [2:162:2095] Leader for TabletID 9437185 is [2:187:2098] sender: [2:188:2049] recipient: [2:162:2095] Leader for TabletID 9437184 is [1:165:2141] sender: [1:209:2058] recipient: [1:15:2062] Leader for TabletID 9437185 is [2:187:2098] sender: [1:211:2058] recipient: [1:15:2062] Leader for TabletID 9437185 is [2:187:2098] sender: [2:213:2049] recipient: [2:42:2053] Leader for TabletID 9437185 is [2:187:2098] sender: [2:214:2049] recipient: [2:154:2094] Leader for TabletID 9437185 is [2:187:2098] sender: [1:217:2058] recipient: [1:15:2062] Leader for TabletID 9437185 is [2:187:2098] sender: [2:219:2049] recipient: [2:42:2053] Leader for TabletID 9437185 is [2:187:2098] sender: [2:220:2049] recipient: [2:218:2111] Leader for TabletID 9437185 is [2:221:2112] sender: [2:222:2049] recipient: [2:218:2111] Leader for TabletID 9437185 is [2:221:2112] sender: [1:251:2058] recipient: [1:15:2062] |67.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestInterconnectSession [GOOD] >> TopicAutoscaling::PartitionSplit_PreferedPartition_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_PreferedPartition_PQv1 >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery [GOOD] >> BasicUsage::TWriteSession_WriteEncoded >> TBackupCollectionTests::TableWithSystemColumns [GOOD] >> TBackupCollectionTests::BackupAbsentCollection [GOOD] >> TBackupCollectionTests::BackupDroppedCollection ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable2 [GOOD] Test command err: 2025-03-31T15:55:12.689874Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002803304797046:2280];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:12.689912Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b18/r3tmp/tmp486z0B/pdisk_1.dat 2025-03-31T15:55:13.138582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:55:13.138672Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:55:13.142204Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:55:13.187715Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:62947 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T15:55:14.071737Z node 1 :TX_PROXY DEBUG: actor# [1:7488002803304797066:2136] Handle TEvNavigate describe path dc-1 2025-03-31T15:55:14.071795Z node 1 :TX_PROXY DEBUG: Actor# [1:7488002811894732137:2458] HANDLE EvNavigateScheme dc-1 2025-03-31T15:55:14.071953Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488002803304797089:2149], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:14.072061Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002807599764815:2447][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7488002803304797089:2149], cookie# 1 2025-03-31T15:55:14.087909Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002807599764819:2447][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002807599764816:2447], cookie# 1 2025-03-31T15:55:14.088002Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002803304796712:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002807599764819:2447], cookie# 1 2025-03-31T15:55:14.088035Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002807599764820:2447][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002807599764817:2447], cookie# 1 2025-03-31T15:55:14.088048Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002807599764821:2447][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002807599764818:2447], cookie# 1 2025-03-31T15:55:14.088096Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002807599764819:2447][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002803304796712:2051], cookie# 1 2025-03-31T15:55:14.088127Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002807599764815:2447][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002807599764816:2447], cookie# 1 2025-03-31T15:55:14.088150Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002807599764815:2447][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-31T15:55:14.088166Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002803304796715:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002807599764820:2447], cookie# 1 2025-03-31T15:55:14.088182Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002803304796718:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002807599764821:2447], cookie# 1 2025-03-31T15:55:14.088197Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002807599764820:2447][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002803304796715:2054], cookie# 1 2025-03-31T15:55:14.088208Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002807599764821:2447][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002803304796718:2057], cookie# 1 2025-03-31T15:55:14.088223Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002807599764815:2447][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002807599764817:2447], cookie# 1 2025-03-31T15:55:14.088257Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002807599764815:2447][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-31T15:55:14.088281Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002807599764815:2447][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002807599764818:2447], cookie# 1 2025-03-31T15:55:14.088293Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002807599764815:2447][/dc-1] Unexpected sync response: sender# [1:7488002807599764818:2447], cookie# 1 2025-03-31T15:55:14.088348Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7488002803304797089:2149], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-03-31T15:55:14.098228Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7488002803304797089:2149], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7488002807599764815:2447] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-31T15:55:14.098354Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488002803304797089:2149], cacheItem# { Subscriber: { Subscriber: [1:7488002807599764815:2447] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-03-31T15:55:14.111809Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488002811894732138:2459], recipient# [1:7488002811894732137:2458], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T15:55:14.111904Z node 1 :TX_PROXY DEBUG: Actor# [1:7488002811894732137:2458] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T15:55:14.165261Z node 1 :TX_PROXY DEBUG: Actor# [1:7488002811894732137:2458] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-03-31T15:55:14.168290Z node 1 :TX_PROXY DEBUG: Actor# [1:7488002811894732137:2458] Handle TEvDescribeSchemeResult Forward to# [1:7488002811894732136:2457] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T15:55:14.195515Z node 1 :TX_PROXY DEBUG: actor# [1:7488002803304797066:2136] Handle TEvProposeTransaction 2025-03-31T15:55:14.195550Z node 1 :TX_PROXY DEBUG: actor# [1:7488002803304797066:2136] TxId# 281474976710657 ProcessProposeTransaction 2025-03-31T15:55:14.195697Z node 1 :TX_PROXY DEBUG: actor# [1:7488002803304797066:2136] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7488002811894732145:2465] 2025-03-31T15:55:14.300763Z node 1 :TX_PROXY DEBUG: Actor# [1:7488002811894732145:2465] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-03-31T15:55:14.300818Z node 1 :TX_PROXY DEBUG: Actor# [1:7488002811894732145:2465] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T15:55:14.300882Z node 1 :TX_PROXY DEBUG: Actor# [1:7488002811894732145:2465] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T15:55:14.300966Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488002803304797089:2149], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: true Statu ... 0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-03-31T15:57:48.746862Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7488003446976270009:2351], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:57:48.887654Z node 13 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [13:7488003237170238830:2106], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:57:48.887829Z node 13 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [13:7488003237170238830:2106], cacheItem# { Subscriber: { Subscriber: [13:7488003250055140763:2117] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:57:48.887907Z node 13 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [13:7488003237170238830:2106], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:57:48.887999Z node 13 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [13:7488003237170238830:2106], cacheItem# { Subscriber: { Subscriber: [13:7488003443328669281:2233] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:57:48.888080Z node 13 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [13:7488003473393440427:2258], recipient# [13:7488003473393440425:2362], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:57:48.888144Z node 13 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [13:7488003473393440428:2259], recipient# [13:7488003473393440426:2363], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-03-31T15:57:48.888344Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T15:57:49.156418Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [11:7488003240817839629:2110], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:57:49.156599Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [11:7488003240817839629:2110], cacheItem# { Subscriber: { Subscriber: [11:7488003446976269991:2179] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:57:49.156685Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [11:7488003240817839629:2110], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:57:49.156764Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [11:7488003240817839629:2110], cacheItem# { Subscriber: { Subscriber: [11:7488003249407774244:2115] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:57:49.156846Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [11:7488003477041041135:2203], recipient# [11:7488003477041041133:2362], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-03-31T15:57:49.156916Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [11:7488003477041041136:2204], recipient# [11:7488003477041041134:2363], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:57:49.156968Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T15:57:49.708011Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [11:7488003240817839629:2110], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:57:49.708184Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [11:7488003240817839629:2110], cacheItem# { Subscriber: { Subscriber: [11:7488003446976270012:2181] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:57:49.708249Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [11:7488003240817839629:2110], cacheItem# { Subscriber: { Subscriber: [11:7488003446976270013:2182] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:57:49.708398Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [11:7488003477041041137:2205], recipient# [11:7488003446976270009:2351], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-03-31T15:57:49.708687Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7488003446976270009:2351], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup_collection/unittest >> TBackupCollectionTests::TableWithSystemColumns [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:57:35.960242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:57:35.960369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:57:35.960409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:57:35.960446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:57:35.960495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:57:35.960541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:57:35.960623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:57:35.960729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:57:35.961140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:57:36.098730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:57:36.098801Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:57:36.104020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:57:36.104128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:57:36.104304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:57:36.109884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:57:36.110178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:57:36.110919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:57:36.111204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:57:36.113489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:57:36.114715Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:57:36.114777Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:57:36.114956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:57:36.115006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:57:36.115052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:57:36.115268Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:57:36.123524Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:57:36.271007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:57:36.271306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:36.271563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:57:36.271831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:57:36.271896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:36.279560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:57:36.279755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:57:36.280033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:36.280128Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:57:36.280171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:57:36.280211Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:57:36.282924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:36.283032Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:57:36.283105Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:57:36.285535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:36.285628Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:36.285689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:57:36.285762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:57:36.289753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:57:36.295902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:57:36.296146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:57:36.297253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:57:36.297419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:57:36.297470Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:57:36.297807Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:57:36.297874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:57:36.298065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:57:36.298143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:57:36.300741Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:57:36.300791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:57:36.301001Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:57:36.301039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:57:36.301303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:36.301350Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:57:36.301450Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:57:36.301480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:57:36.301519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:57:36.301549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:57:36.301583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:57:36.301623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:57:36.301671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:57:36.301708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:57:36.301844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:57:36.301908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:57:36.301963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:57:36.310006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:57:36.310167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:57:36.310300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... RACE: StateWork, received event# 269550080, Sender [6:587:2544], Recipient [6:122:2148]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 106 Step: 5000007 OrderId: 106 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 1113 } } 2025-03-31T15:57:51.438177Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransactionResult 2025-03-31T15:57:51.438264Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 106 Step: 5000007 OrderId: 106 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 1113 } } 2025-03-31T15:57:51.438307Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409548, partId: 1 2025-03-31T15:57:51.438445Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 106:1, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 106 Step: 5000007 OrderId: 106 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 1113 } } 2025-03-31T15:57:51.438550Z node 6 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 106 Step: 5000007 OrderId: 106 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 1113 } } 2025-03-31T15:57:51.438593Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-31T15:57:51.439066Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [6:205:2207], Recipient [6:122:2148]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 8] Version: 3 } 2025-03-31T15:57:51.439122Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-03-31T15:57:51.439174Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 3 PathOwnerId: 72057594046678944, cookie: 106 2025-03-31T15:57:51.439232Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 3 PathOwnerId: 72057594046678944, cookie: 106 2025-03-31T15:57:51.439256Z node 6 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2025-03-31T15:57:51.439296Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 8], version: 3 2025-03-31T15:57:51.439344Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 4 2025-03-31T15:57:51.439416Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/2, is published: true 2025-03-31T15:57:51.439459Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-31T15:57:51.439770Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [6:647:2596], Recipient [6:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:57:51.439802Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:57:51.439827Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-03-31T15:57:51.440008Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269551620, Sender [6:587:2544], Recipient [6:122:2148]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 587 RawX2: 25769806320 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2025-03-31T15:57:51.440045Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChanged 2025-03-31T15:57:51.440149Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 587 RawX2: 25769806320 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2025-03-31T15:57:51.440204Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409548, partId: 1 2025-03-31T15:57:51.440371Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 106:1, at schemeshard: 72057594046678944, message: Source { RawX1: 587 RawX2: 25769806320 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2025-03-31T15:57:51.440436Z node 6 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 106:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-31T15:57:51.440547Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 106:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 587 RawX2: 25769806320 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2025-03-31T15:57:51.440616Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 106:1, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T15:57:51.440656Z node 6 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 106:1, at schemeshard: 72057594046678944 2025-03-31T15:57:51.440690Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 106:1, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-03-31T15:57:51.440731Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:1 129 -> 240 2025-03-31T15:57:51.440898Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-31T15:57:51.443541Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T15:57:51.443678Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T15:57:51.446549Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-03-31T15:57:51.446616Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T15:57:51.446737Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-03-31T15:57:51.446757Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T15:57:51.446836Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:1, at schemeshard: 72057594046678944 2025-03-31T15:57:51.446853Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T15:57:51.446902Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-03-31T15:57:51.446914Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T15:57:51.446973Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:1, at schemeshard: 72057594046678944 2025-03-31T15:57:51.447003Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T15:57:51.447037Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 106:1 2025-03-31T15:57:51.447167Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [6:587:2544] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 106 at schemeshard: 72057594046678944 2025-03-31T15:57:51.447495Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [6:122:2148], Recipient [6:122:2148]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-31T15:57:51.447540Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-31T15:57:51.447590Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:1, at schemeshard: 72057594046678944 2025-03-31T15:57:51.447646Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 106:1 ProgressState 2025-03-31T15:57:51.447779Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-31T15:57:51.447832Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:1 progress is 2/2 2025-03-31T15:57:51.447876Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 2/2 2025-03-31T15:57:51.447921Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:1 progress is 2/2 2025-03-31T15:57:51.447965Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 2/2 2025-03-31T15:57:51.448011Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 2/2, is published: true 2025-03-31T15:57:51.448101Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [6:301:2292] message: TxId: 106 2025-03-31T15:57:51.448159Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 2/2 2025-03-31T15:57:51.448208Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-03-31T15:57:51.448242Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2025-03-31T15:57:51.448320Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2025-03-31T15:57:51.448355Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:1 2025-03-31T15:57:51.448372Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:1 2025-03-31T15:57:51.448442Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 3 2025-03-31T15:57:51.451237Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T15:57:51.451374Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [6:301:2292] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 106 at schemeshard: 72057594046678944 2025-03-31T15:57:51.451576Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-03-31T15:57:51.451643Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [6:616:2565] 2025-03-31T15:57:51.451861Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [6:618:2567], Recipient [6:122:2148]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-31T15:57:51.451896Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-31T15:57:51.451920Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 >> KqpScanArrowFormat::AggregateNoColumnNoRemaps [GOOD] >> KqpScanArrowFormat::AggregateWithFunction >> TVPatchTests::PatchPartPutError >> TVPatchTests::PatchPartPutError [GOOD] >> TVPatchTests::PatchPartFastXorDiffWithEmptyDiffBuffer |67.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |67.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |67.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |67.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader >> TBackupCollectionTests::BackupDroppedCollection [GOOD] >> TBackupCollectionTests::BackupAbsentDirs >> TVPatchTests::PatchPartFastXorDiffWithEmptyDiffBuffer [GOOD] |67.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |67.5%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader >> Balancing::Balancing_ManyTopics_TopicApi [GOOD] >> Balancing::Balancing_ManyTopics_PQv1 |67.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartPutError [GOOD] Test command err: Recv 65537 2025-03-31T15:57:52.767601Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-03-31T15:57:52.768647Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-03-31T15:57:52.768711Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-03-31T15:57:52.768923Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# no 2025-03-31T15:57:52.768984Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:10:0] PullingPart# 1 Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-03-31T15:57:52.769168Z node 1 :BS_VDISK_PATCH INFO: {BSVSP08@skeleton_vpatch_actor.cpp:383} [0:1:0:0:0] TEvVPatch: received part data; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 DataParts# 4 ReceivedBlobId# [1:2:3:4:6:10:1] Status# OK ResultSize# 1 ParityPart# no 2025-03-31T15:57:52.769266Z node 1 :BS_VDISK_PATCH INFO: {BSVSP14@skeleton_vpatch_actor.cpp:462} [0:1:0:0:0] TEvVPatch: send xor diffs; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorDiffCount# 0 2025-03-31T15:57:52.769331Z node 1 :BS_VDISK_PATCH INFO: {BSVSP15@skeleton_vpatch_actor.cpp:502} [0:1:0:0:0] TEvVPatch: send vPut; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 ReceivedXorDiffs# 0 ExpectedXorDiffs# 0 Send NKikimr::TEvBlobStorage::TEvVPut Recv NKikimr::TEvBlobStorage::TEvVPutResult 2025-03-31T15:57:52.769498Z node 1 :BS_VDISK_PATCH INFO: {BSVSP10@skeleton_vpatch_actor.cpp:627} [0:1:0:0:0] TEvVPatch: received put result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# ERROR 2025-03-31T15:57:52.769547Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# ERROR ErrorReason# Recieve not OK status from VPutResult, received status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchResult 2025-03-31T15:57:52.769646Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm >> ColumnShardTiers::TTLUsage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartFastXorDiffWithEmptyDiffBuffer [GOOD] Test command err: Recv 65537 2025-03-31T15:57:52.959039Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:100:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-03-31T15:57:52.960097Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:100:0] Status# OK ResultSize# 1 2025-03-31T15:57:52.960159Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:100:0] FoundParts# [5] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchXorDiff 2025-03-31T15:57:52.960410Z node 1 :BS_VDISK_PATCH INFO: {BSVSP13@skeleton_vpatch_actor.cpp:674} [0:1:0:0:0] TEvVPatch: received xor diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] FromPart# 4 ToPart# 0 HasBuffer# no ReceivedXorDiffCount# 1/0 Send NKikimr::TEvBlobStorage::TEvVPatchXorDiffResult Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-03-31T15:57:52.960587Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] OriginalPartId# 5 PatchedPartId# 5 XorReceiver# yes ParityPart# yes ForceEnd# no 2025-03-31T15:57:52.960646Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:100:0] PullingPart# 5 Send NKikimr::TEvBlobStorage::TEvVGet >> AnalyzeColumnshard::Analyze [GOOD] >> KqpScanArrowInChanels::AggregateCountStar [GOOD] >> KqpScanArrowInChanels::AggregateByColumn >> TBackupCollectionTests::BackupAbsentDirs [GOOD] >> TBackupCollectionTests::BackupNonIncrementalCollection |67.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> PersQueueSdkReadSessionTest::SettingsValidation [GOOD] >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly >> AutoConfig::GetASPoolsWith4AndMoreCPUs [GOOD] >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeSave [GOOD] |67.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith4AndMoreCPUs [GOOD] >> RetryPolicy::TWriteSession_TestBrokenPolicy [GOOD] >> RetryPolicy::TWriteSession_RetryOnTargetCluster |67.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |67.5%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |67.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |67.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |67.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> DataShardWrite::UpsertImmediate [GOOD] >> DataShardWrite::UpsertImmediateManyColumns ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::Analyze [GOOD] Test command err: 2025-03-31T15:54:33.852887Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:54:33.853677Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T15:54:33.853918Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000fa1/r3tmp/tmpGZdYYF/pdisk_1.dat 2025-03-31T15:54:36.712189Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3060, node 1 2025-03-31T15:54:38.332227Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:54:38.332490Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:54:38.332737Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:54:38.349080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:54:38.351140Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:54:38.646337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:38.648583Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:38.672020Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13663 2025-03-31T15:54:41.936882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:54:49.724424Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T15:54:50.029349Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:50.029976Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:50.142292Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T15:54:50.160462Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:50.956000Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:50.956567Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:50.957016Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:50.957119Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:50.957292Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:50.957356Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:50.957434Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:50.957528Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:50.957624Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:51.247322Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:51.247422Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:51.270581Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:52.414643Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:52.856257Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T15:54:52.857005Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T15:54:53.070000Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T15:54:53.087697Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T15:54:53.087903Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T15:54:53.087966Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T15:54:53.088013Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T15:54:53.088062Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T15:54:53.088115Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T15:54:53.088165Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T15:54:53.094243Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T15:54:53.242298Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T15:54:53.242417Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1876:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T15:54:53.385461Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1890:2608] 2025-03-31T15:54:53.396181Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1908:2618] 2025-03-31T15:54:53.396267Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1908:2618], schemeshard id = 72075186224037897 2025-03-31T15:54:53.489369Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T15:54:53.682879Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T15:54:53.691295Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T15:54:53.691839Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T15:54:53.851355Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T15:54:53.913288Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T15:54:53.914467Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T15:54:54.900686Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T15:54:55.494296Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T15:54:55.603604Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T15:54:57.091003Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2247:3079], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:57.092293Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:57.120219Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-31T15:54:57.388141Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2332:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T15:54:57.388474Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2332:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T15:54:57.388814Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2332:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T15:54:57.388981Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2332:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T15:54:57.389110Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2332:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T15:54:57.389248Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2332:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T15:54:57.389408Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2332:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T15:54:57.389553Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2332:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T15:54:57.389699Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2332:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T15:54:57.389834Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2332:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T15:54:57.389952Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2332:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T15:54:57.390070Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2332:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T15:54:57.471323Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T15:54:57.471424Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;descr ... 03-31T15:55:15.711208Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:57:43.875559Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-03-31T15:57:43.875972Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-31T15:57:43.876343Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-31T15:57:43.877119Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-03-31T15:57:45.876084Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-03-31T15:57:45.876529Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 227.000000s, at schemeshard: 72075186224037897 2025-03-31T15:57:45.876762Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 51 2025-03-31T15:57:45.903795Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-31T15:57:47.361338Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T15:57:47.361804Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T15:57:47.362217Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-31T15:57:47.362643Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-03-31T15:57:47.363507Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-31T15:57:47.365833Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T15:57:47.422779Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-31T15:57:47.447049Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6993:5167], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:47.447924Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7003:5172], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:47.448340Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:47.618959Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-03-31T15:57:47.899805Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7007:5175], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-03-31T15:57:48.284590Z node 2 :TX_PROXY ERROR: Actor# [2:7103:5221] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:57:48.738434Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7132:5236]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T15:57:48.738723Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T15:57:48.738832Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7134:5238] 2025-03-31T15:57:48.738905Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7134:5238] 2025-03-31T15:57:48.739273Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7135:5239] 2025-03-31T15:57:48.743556Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7135:5239], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-03-31T15:57:48.743653Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-03-31T15:57:48.744044Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7134:5238], server id = [2:7135:5239], tablet id = 72075186224037894, status = OK 2025-03-31T15:57:48.744116Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T15:57:48.744215Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7132:5236], StatRequests.size() = 1 2025-03-31T15:57:49.145494Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NTkwZjQ3NTUtNjQ1ZTllMTEtZDBkYzg1OTgtMmE4N2RlMWY=, TxId: 2025-03-31T15:57:49.145596Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NTkwZjQ3NTUtNjQ1ZTllMTEtZDBkYzg1OTgtMmE4N2RlMWY=, TxId: 2025-03-31T15:57:49.146308Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T15:57:49.175413Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-31T15:57:49.175505Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T15:57:49.225908Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-31T15:57:49.226002Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-31T15:57:49.331838Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7134:5238], schemeshard count = 1 2025-03-31T15:57:50.574157Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T15:57:50.574261Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-31T15:57:50.577288Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-31T15:57:50.597321Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-31T15:57:50.598135Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T15:57:50.598211Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 4], AnalyzedShards 1 2025-03-31T15:57:50.624788Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T15:57:50.636131Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-03-31T15:57:50.637742Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-03-31T15:57:50.637917Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-03-31T15:57:50.664573Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-03-31T15:57:52.207814Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T15:57:52.207960Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-31T15:57:52.208013Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T15:57:52.208732Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-31T15:57:52.228734Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-31T15:57:52.229141Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T15:57:52.229212Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T15:57:52.230136Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-31T15:57:52.252634Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-31T15:57:52.252884Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-03-31T15:57:52.253439Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7261:5315], server id = [2:7262:5316], tablet id = 72075186224037899, status = OK 2025-03-31T15:57:52.253557Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7261:5315], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:57:52.257499Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-31T15:57:52.257639Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-31T15:57:52.257932Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-31T15:57:52.258156Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-31T15:57:52.258412Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7261:5315], server id = [2:7262:5316], tablet id = 72075186224037899 2025-03-31T15:57:52.258460Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:57:52.258662Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T15:57:52.261437Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-31T15:57:52.299709Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7282:5335]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T15:57:52.299979Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T15:57:52.300023Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7282:5335], StatRequests.size() = 1 2025-03-31T15:57:52.465740Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MzE0ZDFkMTctMTc0NTlmZjItMTQwM2ZhNWUtMjUzYjhkNmI=, TxId: 2025-03-31T15:57:52.465811Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MzE0ZDFkMTctMTc0NTlmZjItMTQwM2ZhNWUtMjUzYjhkNmI=, TxId: 2025-03-31T15:57:52.466400Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T15:57:52.492969Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T15:57:52.493062Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2809:3228] >> TBackupCollectionTests::BackupNonIncrementalCollection [GOOD] >> JsonChangeRecord::DataChange [GOOD] >> DataShardWrite::UpsertPrepared+Volatile [GOOD] >> DataShardWrite::UpsertPrepared-Volatile >> KqpSqlIn::Dict [GOOD] >> KqpSqlIn::Delete >> TopicAutoscaling::ReadFromTimestamp_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadFromTimestamp_AutoscaleAwareSDK ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeSave [GOOD] Test command err: 2025-03-31T15:54:29.271396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:54:29.271604Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T15:54:29.271687Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ff7/r3tmp/tmpCN8yzA/pdisk_1.dat 2025-03-31T15:54:30.318346Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11383, node 1 2025-03-31T15:54:32.677132Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:54:32.677373Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:54:32.677598Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:54:32.715295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:54:32.717276Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:54:32.986141Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:32.999209Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:33.037992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63743 2025-03-31T15:54:37.710605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:54:55.499722Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T15:54:55.646994Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:55.647524Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:55.733871Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T15:54:55.746904Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:56.748904Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:56.749514Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:56.750010Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:56.750184Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:56.750411Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:56.750496Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:56.750579Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:56.750652Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:56.750732Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:56.957604Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:56.957718Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:56.972798Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:57.451503Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:57.518995Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T15:54:57.519148Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T15:54:57.556421Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T15:54:57.557134Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T15:54:57.557379Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T15:54:57.557445Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T15:54:57.557511Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T15:54:57.557574Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T15:54:57.557628Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T15:54:57.557686Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T15:54:57.558353Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T15:54:57.589189Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T15:54:57.589331Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1872:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T15:54:57.605992Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-03-31T15:54:57.624604Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1908:2617] 2025-03-31T15:54:57.625259Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1908:2617], schemeshard id = 72075186224037897 2025-03-31T15:54:57.628592Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T15:54:57.727749Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T15:54:57.727811Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T15:54:57.728219Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T15:54:57.748207Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T15:54:57.755614Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T15:54:57.755741Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T15:54:58.053572Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T15:54:58.256738Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T15:54:58.356483Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T15:54:59.801666Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2241:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:59.801789Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:59.825421Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-31T15:55:00.349366Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2387:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T15:55:00.349608Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2387:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T15:55:00.349968Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2387:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T15:55:00.350142Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2387:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T15:55:00.350297Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2387:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T15:55:00.350433Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2387:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T15:55:00.350593Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2387:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T15:55:00.350741Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2387:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T15:55:00.350880Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2387:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T15:55:00.351029Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2387:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T15:55:00.353150Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2387:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T15:55:00.353300Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2387:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T15:55:00.435870Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2398:2889];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T15:55:00.435977Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2398:2889];tablet_id=72075186224037900;process= ... [72075186224037894] Subscribed for config changes 2025-03-31T15:57:52.840205Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-31T15:57:52.840304Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-31T15:57:52.840456Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T15:57:52.841898Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T15:57:52.841970Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T15:57:52.851123Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-31T15:57:52.942876Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-31T15:57:52.943050Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-03-31T15:57:52.944098Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8727:6573], server id = [2:8732:6578], tablet id = 72075186224037899, status = OK 2025-03-31T15:57:52.944220Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8727:6573], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:57:52.944626Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8728:6574], server id = [2:8733:6579], tablet id = 72075186224037900, status = OK 2025-03-31T15:57:52.944691Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8728:6574], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:57:52.945854Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8729:6575], server id = [2:8734:6580], tablet id = 72075186224037901, status = OK 2025-03-31T15:57:52.945942Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8729:6575], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:57:52.958974Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8730:6576], server id = [2:8735:6581], tablet id = 72075186224037902, status = OK 2025-03-31T15:57:52.959111Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8730:6576], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:57:52.959579Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8731:6577], server id = [2:8737:6583], tablet id = 72075186224037903, status = OK 2025-03-31T15:57:52.959632Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8731:6577], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:57:52.960751Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-31T15:57:52.961379Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-03-31T15:57:52.962511Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8727:6573], server id = [2:8732:6578], tablet id = 72075186224037899 2025-03-31T15:57:52.962562Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:57:52.962816Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-03-31T15:57:52.971673Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8728:6574], server id = [2:8733:6579], tablet id = 72075186224037900 2025-03-31T15:57:52.971740Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:57:52.972165Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-03-31T15:57:52.972704Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8729:6575], server id = [2:8734:6580], tablet id = 72075186224037901 2025-03-31T15:57:52.972734Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:57:52.972915Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-03-31T15:57:52.973243Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8741:6587], server id = [2:8745:6591], tablet id = 72075186224037904, status = OK 2025-03-31T15:57:52.973330Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8741:6587], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:57:52.973466Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8742:6588], server id = [2:8746:6592], tablet id = 72075186224037905, status = OK 2025-03-31T15:57:52.973513Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8742:6588], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:57:52.974740Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8730:6576], server id = [2:8735:6581], tablet id = 72075186224037902 2025-03-31T15:57:52.974776Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:57:52.979223Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8744:6590], server id = [2:8748:6594], tablet id = 72075186224037906, status = OK 2025-03-31T15:57:52.979414Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8744:6590], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:57:52.980022Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8731:6577], server id = [2:8737:6583], tablet id = 72075186224037903 2025-03-31T15:57:52.980059Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:57:52.981149Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8747:6593], server id = [2:8750:6596], tablet id = 72075186224037907, status = OK 2025-03-31T15:57:52.981225Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8747:6593], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:57:52.981386Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8749:6595], server id = [2:8751:6597], tablet id = 72075186224037908, status = OK 2025-03-31T15:57:52.981431Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8749:6595], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:57:52.982292Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-03-31T15:57:52.983025Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-03-31T15:57:52.994332Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-03-31T15:57:52.996949Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8741:6587], server id = [2:8745:6591], tablet id = 72075186224037904 2025-03-31T15:57:52.997225Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:57:52.997417Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8742:6588], server id = [2:8746:6592], tablet id = 72075186224037905 2025-03-31T15:57:52.997436Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:57:52.997513Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-03-31T15:57:52.997933Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8744:6590], server id = [2:8748:6594], tablet id = 72075186224037906 2025-03-31T15:57:52.997954Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:57:52.998055Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-03-31T15:57:52.998104Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-31T15:57:52.998423Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-31T15:57:53.009165Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-31T15:57:53.010436Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T15:57:53.039136Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8747:6593], server id = [2:8750:6596], tablet id = 72075186224037907 2025-03-31T15:57:53.039282Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:57:53.040175Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8749:6595], server id = [2:8751:6597], tablet id = 72075186224037908 2025-03-31T15:57:53.040217Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:57:53.040600Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-31T15:57:53.101433Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8773:6619]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T15:57:53.101714Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T15:57:53.101790Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8773:6619], StatRequests.size() = 1 2025-03-31T15:57:53.304149Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OWQyNmNhZjMtMzY0NTg4YzMtN2U2MGFiNGItM2Q5MDgyMGE=, TxId: 2025-03-31T15:57:53.304231Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OWQyNmNhZjMtMzY0NTg4YzMtN2U2MGFiNGItM2Q5MDgyMGE=, TxId: 2025-03-31T15:57:53.304907Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T15:57:53.321378Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8788:6625] 2025-03-31T15:57:53.321673Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:8788:6625], schemeshard id = 72075186224037897 2025-03-31T15:57:53.321769Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8680:6542], server id = [2:8789:6626], tablet id = 72075186224037894, status = OK 2025-03-31T15:57:53.321896Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8789:6626] 2025-03-31T15:57:53.321983Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:8789:6626], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-03-31T15:57:53.344769Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T15:57:53.344853Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T15:57:53.420431Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8792:6629]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-31T15:57:53.420746Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-31T15:57:53.420815Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-31T15:57:53.425451Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-31T15:57:53.425533Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-03-31T15:57:53.425598Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-31T15:57:53.442273Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 |67.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::DataChange [GOOD] >> JsonChangeRecord::Heartbeat [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup_collection/unittest >> TBackupCollectionTests::BackupNonIncrementalCollection [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-31T15:57:39.620810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:57:39.620914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:57:39.620953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:57:39.620994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:57:39.621058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:57:39.621088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:57:39.621144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:57:39.621217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:57:39.621618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:57:39.898177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:57:39.898566Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:57:40.009183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:57:40.009592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:57:40.009776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:57:40.054561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:57:40.054883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:57:40.055679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:57:40.056064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:57:40.063930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:57:40.065389Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:57:40.065462Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:57:40.065545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:57:40.065603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:57:40.065661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:57:40.065896Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:57:40.097218Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-31T15:57:40.629649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:57:40.629865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:40.630069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:57:40.630269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:57:40.630321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:40.644018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:57:40.644159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:57:40.644374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:40.644435Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:57:40.644471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:57:40.644510Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:57:40.654870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:40.655528Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:57:40.655756Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:57:40.681018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:40.681326Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:40.681386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:57:40.681442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:57:40.697338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:57:40.711242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:57:40.712547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:57:40.723364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:57:40.724737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:57:40.725064Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:57:40.727426Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:57:40.727986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:57:40.729518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:57:40.730963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:57:40.749531Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:57:40.749868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:57:40.754940Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:57:40.755267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:57:40.758114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:57:40.763242Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:57:40.764464Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:57:40.764752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:57:40.765318Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:57:40.765624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:57:40.765927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:57:40.766236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:57:40.771415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:57:40.771725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:57:40.772050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:57:40.773013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:57:40.773647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:57:40.797389Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:57:40.797813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:57:40.797889Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Complete at tablet# 72057594046678944 2025-03-31T15:57:56.060822Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 105:1 2025-03-31T15:57:56.061023Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:124:2150], Recipient [7:124:2150]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-31T15:57:56.061060Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-31T15:57:56.061127Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:1, at schemeshard: 72057594046678944 2025-03-31T15:57:56.061173Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:1 ProgressState 2025-03-31T15:57:56.061389Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-31T15:57:56.061430Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:1 progress is 2/2 2025-03-31T15:57:56.061475Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 2/2 2025-03-31T15:57:56.061528Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:1 progress is 2/2 2025-03-31T15:57:56.061570Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 2/2 2025-03-31T15:57:56.061621Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 2/2, is published: true 2025-03-31T15:57:56.061720Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:301:2292] message: TxId: 105 2025-03-31T15:57:56.061786Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 2/2 2025-03-31T15:57:56.061835Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-03-31T15:57:56.061890Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-03-31T15:57:56.062001Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-03-31T15:57:56.062044Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:1 2025-03-31T15:57:56.062064Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:1 2025-03-31T15:57:56.062157Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-03-31T15:57:56.062187Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-03-31T15:57:56.081473Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T15:57:56.081637Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:301:2292] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 105 at schemeshard: 72057594046678944 2025-03-31T15:57:56.081890Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-03-31T15:57:56.081965Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [7:527:2488] 2025-03-31T15:57:56.082216Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:529:2490], Recipient [7:124:2150]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-31T15:57:56.082255Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-31T15:57:56.082282Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 105 TestModificationResults wait txId: 106 2025-03-31T15:57:56.082877Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [7:599:2558], Recipient [7:124:2150]: {TEvModifySchemeTransaction txid# 106 TabletId# 72057594046678944} 2025-03-31T15:57:56.082946Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-31T15:57:56.100234Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpBackupIncrementalBackupCollection BackupIncrementalBackupCollection { Name: ".backups/collections/MyCollection1" } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:57:56.106360Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental, operationId: 106:0, at schemeshard: 72057594046678944 2025-03-31T15:57:56.106680Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 4], parent name: MyCollection1, child name: 19700101000000Z_incremental, child id: [OwnerId: 72057594046678944, LocalPathId: 8], at schemeshard: 72057594046678944 2025-03-31T15:57:56.106771Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 0 2025-03-31T15:57:56.106889Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:57:56.107032Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 106:1, explain: Incremental backup is disabled on this collection, at schemeshard: 72057594046678944 2025-03-31T15:57:56.107104Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:2, propose status:StatusInvalidParameter, reason: Incremental backup is disabled on this collection, at schemeshard: 72057594046678944 2025-03-31T15:57:56.109917Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Abort operation: IgniteOperation fail to propose a part, opId: 106:1, at schemeshard: 72057594046678944, already accepted parts: 1, propose result status: StatusInvalidParameter, with reason: Incremental backup is disabled on this collection, tx message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpBackupIncrementalBackupCollection BackupIncrementalBackupCollection { Name: ".backups/collections/MyCollection1" } } TxId: 106 TabletId: 72057594046678944 2025-03-31T15:57:56.110105Z node 7 :FLAT_TX_SCHEMESHARD INFO: MkDir AbortPropose, opId: 106:0, at schemeshard: 72057594046678944 2025-03-31T15:57:56.110396Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-31T15:57:56.123931Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Incremental backup is disabled on this collection" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:57:56.124166Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Incremental backup is disabled on this collection, operation: BACKUP INCREMENTAL, path: /MyRoot/.backups/collections/MyCollection1 2025-03-31T15:57:56.124240Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-03-31T15:57:56.124659Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-03-31T15:57:56.124716Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-03-31T15:57:56.125243Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:605:2564], Recipient [7:124:2150]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:57:56.125317Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:57:56.125363Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-03-31T15:57:56.125538Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [7:301:2292], Recipient [7:124:2150]: NKikimrScheme.TEvNotifyTxCompletion TxId: 106 2025-03-31T15:57:56.125577Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-03-31T15:57:56.130001Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-03-31T15:57:56.130222Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-03-31T15:57:56.130274Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [7:603:2562] 2025-03-31T15:57:56.130562Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:605:2564], Recipient [7:124:2150]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-31T15:57:56.130611Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-31T15:57:56.130657Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 2025-03-31T15:57:56.135532Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [7:606:2565], Recipient [7:124:2150]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/.backups/collections/MyCollection1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-03-31T15:57:56.135630Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-03-31T15:57:56.135804Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:57:56.136081Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1" took 272us result status StatusSuccess 2025-03-31T15:57:56.137829Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1" PathDescription { Self { Name: "MyCollection1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeBackupCollection CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 BackupCollectionVersion: 0 } ChildrenExist: true } Children { Name: "19700101000000Z_full" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 105 CreateStep: 5000006 ParentPathId: 4 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } BackupCollectionDescription { Name: "MyCollection1" ExplicitEntryList { Entries { Type: ETypeTable Path: "/MyRoot/Table1" } } Cluster { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TopicAutoscaling::ControlPlane_CDC [GOOD] >> TopicAutoscaling::BalancingAfterSplit_sessionsWithPartition >> DataShardWrite::ExecSQLUpsertImmediate+EvWrite [GOOD] >> DataShardWrite::ExecSQLUpsertImmediate-EvWrite |67.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::Heartbeat [GOOD] |67.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_backup_collection/test-results/unittest/{meta.json ... results_accumulator.log} |67.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::DataChangeVersion [GOOD] |67.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |67.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |67.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/test-results/unittest/{meta.json ... results_accumulator.log} |67.6%| [LD] {RESULT} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |67.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |67.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> TopicAutoscaling::ReadingAfterSplitTest_PQv1 [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_BeforeAutoscaleAwareSDK |67.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::DataChangeVersion [GOOD] |67.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |67.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> KqpNotNullColumns::AlterAddIndex [GOOD] >> KqpNewEngine::PushPureFlatmapInnerConnectionsToStage [GOOD] |67.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |67.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> KqpSort::ComplexPkInclusiveSecondOptionalPredicate [GOOD] >> KqpSort::Offset >> TopicAutoscaling::PartitionSplit_ManySession_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_AutoscaleAwareSDK |67.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> TraverseColumnShard::TraverseColumnTable [GOOD] |67.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |67.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |67.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |67.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::PushPureFlatmapInnerConnectionsToStage [GOOD] Test command err: Trying to start YDB, gRPC: 18812, MsgBus: 20484 2025-03-31T15:55:46.166027Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002951814243078:2135];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:46.253461Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000adc/r3tmp/tmp8d6zUn/pdisk_1.dat 2025-03-31T15:55:49.055503Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:55:49.267064Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:49.285250Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:55:49.285345Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:55:49.287977Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18812, node 1 2025-03-31T15:55:49.555605Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:55:49.555631Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:55:49.555638Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:55:49.555732Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20484 2025-03-31T15:55:51.095862Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002951814243078:2135];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:51.096222Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:20484 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:55:53.089528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:55:53.118840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:55:53.670419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:55:54.552025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:55:55.528181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:56:03.228687Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003024828688907:2422], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:03.228778Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:04.249459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:56:04.249496Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:56:04.273040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:56:04.330162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:56:04.390327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:56:04.442413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:56:04.521587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:56:04.664619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:56:04.915465Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003029123656753:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:04.915555Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:04.916702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003029123656758:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:04.938187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:56:05.155534Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-31T15:56:05.156589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488003029123656760:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:56:05.273875Z node 1 :TX_PROXY ERROR: Actor# [1:7488003033418624119:3530] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 17271, MsgBus: 12098 2025-03-31T15:56:12.624870Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488003062762334140:2210];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000adc/r3tmp/tmprYVlHD/pdisk_1.dat 2025-03-31T15:56:12.729170Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:56:12.835450Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:56:12.854180Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:56:12.854414Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:56:12.861049Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17271, node 2 2025-03-31T15:56:13.067568Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:56:13.067594Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:56:13.067605Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:56:13.067745Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12098 TClient is connected to server localhost:12098 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:56:14.550422Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:56:14.794905Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:56:15.433093Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2 ... Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:38.940553Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488003432855790135:2414], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:38.940640Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:39.027442Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:57:39.091274Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:57:39.151379Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:57:39.209875Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:57:39.264335Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:57:39.328360Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:57:39.435781Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488003437150757947:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:39.435889Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:39.436366Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488003437150757952:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:39.442522Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:57:39.459247Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488003437150757954:2468], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:57:39.515437Z node 6 :TX_PROXY ERROR: Actor# [6:7488003437150758008:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 17769, MsgBus: 23091 2025-03-31T15:57:48.719653Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488003472002963545:2205];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:57:48.832512Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000adc/r3tmp/tmpcGNKHL/pdisk_1.dat 2025-03-31T15:57:49.261135Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:57:49.300479Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:57:49.300594Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:57:49.316339Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17769, node 7 2025-03-31T15:57:49.530309Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:57:49.530336Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:57:49.530346Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:57:49.530499Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23091 TClient is connected to server localhost:23091 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:57:50.462143Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:50.473024Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:57:50.492561Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:50.636015Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:50.959395Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:51.108353Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:53.719240Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7488003472002963545:2205];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:57:53.719352Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:57:54.881112Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488003497772768941:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:54.881223Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:54.959678Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:57:55.039274Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:57:55.078617Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:57:55.129402Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:57:55.173913Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:57:55.222014Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:57:55.323869Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488003502067736756:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:55.323977Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:55.324253Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488003502067736761:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:55.329128Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:57:55.357217Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7488003502067736763:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:57:55.426985Z node 7 :TX_PROXY ERROR: Actor# [7:7488003502067736819:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |67.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |67.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |67.7%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut >> DataShardWrite::UpsertImmediateManyColumns [GOOD] >> DataShardWrite::ReplaceImmediate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::AlterAddIndex [GOOD] Test command err: Trying to start YDB, gRPC: 9942, MsgBus: 29696 2025-03-31T15:55:53.562449Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002978076690091:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:53.562477Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ad2/r3tmp/tmpnXUBDH/pdisk_1.dat 2025-03-31T15:55:56.225287Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:55:56.966561Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:57.142838Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:55:57.143170Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:55:57.187007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9942, node 1 2025-03-31T15:55:58.567751Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002978076690091:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:59.810500Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:56:00.777865Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:56:00.778150Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:56:00.778157Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:56:00.778252Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29696 TClient is connected to server localhost:29696 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:56:04.653795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:56:04.691201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:56:04.852626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T15:56:05.030584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T15:56:05.110746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:56:11.955253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:56:11.955293Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:56:12.054331Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003059681070523:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:12.060125Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:13.065409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:56:13.317943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:56:13.390774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:56:13.513065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:56:13.681927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:56:14.200480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:56:14.737557Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003068271005672:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:14.737643Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:14.738046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003068271005677:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:14.746630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:56:14.774815Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-31T15:56:14.776857Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488003068271005679:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:56:14.891701Z node 1 :TX_PROXY ERROR: Actor# [1:7488003068271005736:3520] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 19646, MsgBus: 8860 2025-03-31T15:56:25.630817Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488003118830335105:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:56:25.713620Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ad2/r3tmp/tmpAmm6cI/pdisk_1.dat 2025-03-31T15:56:26.244777Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:56:26.476424Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:56:26.476584Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:56:26.539363Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19646, node 2 2025-03-31T15:56:27.466064Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:56:27.466348Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:56:27.466357Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:56:27.467029Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:56:30.604698Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488003118830335105:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:56:30.604786Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:8860 TClient is connected to server localhost:8860 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:56:33.431304Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:56:33.664806Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part prop ... hemeshard: 72057594046644480 2025-03-31T15:57:41.326695Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:57:41.621342Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:57:41.691052Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:57:41.786806Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:57:41.902653Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:57:42.407267Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488003446539401462:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:42.407519Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:42.408227Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488003446539401470:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:42.412422Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:57:42.503308Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7488003446539401472:2468], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:57:42.610436Z node 5 :TX_PROXY ERROR: Actor# [5:7488003446539401530:3486] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:57:46.554495Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 61841, MsgBus: 14175 2025-03-31T15:57:49.706532Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488003479370281830:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:57:49.706583Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ad2/r3tmp/tmpZ2tF4j/pdisk_1.dat 2025-03-31T15:57:49.975400Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:57:49.976902Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:57:49.977012Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:57:49.984733Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61841, node 6 2025-03-31T15:57:50.167860Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:57:50.167888Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:57:50.167898Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:57:50.168069Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14175 TClient is connected to server localhost:14175 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:57:51.087227Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:51.097072Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T15:57:51.111358Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:51.253477Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:51.728411Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:51.839506Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:54.707027Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488003479370281830:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:57:54.716626Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:57:55.106991Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488003505140087386:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:55.107131Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:55.172445Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T15:57:55.245869Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T15:57:55.302046Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T15:57:55.385056Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T15:57:55.473937Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T15:57:55.594481Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T15:57:55.777148Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488003505140087921:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:55.777275Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:55.777745Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488003505140087926:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:55.784228Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T15:57:55.812374Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488003505140087928:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T15:57:55.902854Z node 6 :TX_PROXY ERROR: Actor# [6:7488003505140087985:3471] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:57:58.083520Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T15:57:58.291400Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-03-31T15:57:58.403659Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 |67.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> PersQueueSdkReadSessionTest::SettingsValidation [GOOD] >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds >> TMonitoringTests::ValidActorId >> TMonitoringTests::ValidActorId [GOOD] >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeAggregate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTable [GOOD] Test command err: 2025-03-31T15:54:28.098079Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:54:28.098385Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T15:54:28.098491Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000fb6/r3tmp/tmpVbQKkb/pdisk_1.dat 2025-03-31T15:54:29.292123Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29578, node 1 2025-03-31T15:54:29.687023Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:54:29.687113Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:54:29.687155Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:54:29.690787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:54:29.691358Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:54:29.789683Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:29.790863Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:29.822805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26385 2025-03-31T15:54:32.836117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:54:54.635744Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T15:54:54.869397Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:54.869499Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:54.944421Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T15:54:54.958325Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:56.160645Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:56.163694Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:56.165880Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:56.166008Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:56.166879Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:56.166943Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:56.167257Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:56.167706Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:56.167764Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:56.487826Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:56.488395Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:56.520437Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:57.262946Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:57.426325Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T15:54:57.426433Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T15:54:57.581001Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T15:54:57.582351Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T15:54:57.582570Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T15:54:57.582627Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T15:54:57.582678Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T15:54:57.582741Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T15:54:57.582803Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T15:54:57.582859Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T15:54:57.592082Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T15:54:57.690909Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T15:54:57.691020Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1876:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T15:54:57.736502Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1890:2608] 2025-03-31T15:54:57.738621Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1908:2618] 2025-03-31T15:54:57.738724Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1908:2618], schemeshard id = 72075186224037897 2025-03-31T15:54:57.757083Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T15:54:57.778451Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T15:54:57.778497Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T15:54:57.778557Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T15:54:57.789775Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T15:54:57.797105Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T15:54:57.797247Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T15:54:58.048794Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T15:54:58.293372Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T15:54:58.384613Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T15:54:59.791999Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2247:3079], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:59.792135Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:59.815954Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-31T15:55:00.287282Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2397:2886];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T15:55:00.287567Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2397:2886];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T15:55:00.287995Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2397:2886];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T15:55:00.288185Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2397:2886];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T15:55:00.288337Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2397:2886];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T15:55:00.288476Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2397:2886];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T15:55:00.288733Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2397:2886];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T15:55:00.288880Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2397:2886];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T15:55:00.289049Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2397:2886];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T15:55:00.289192Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2397:2886];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T15:55:00.289348Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2397:2886];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T15:55:00.289490Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2397:2886];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T15:55:00.361749Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2404:2890];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T15:55:00.361888Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2404:2890];tablet_id=72075186224037900;process= ... ode 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-31T15:57:56.347716Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T15:57:56.428459Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-31T15:57:56.428544Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-31T15:57:56.515787Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:8449:6421], schemeshard count = 1 2025-03-31T15:57:58.820075Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T15:57:58.820144Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T15:57:58.820178Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-31T15:57:58.820222Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T15:57:58.866795Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-31T15:57:58.892712Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-31T15:57:58.893315Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T15:57:58.893399Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T15:57:58.894326Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-31T15:57:58.912141Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-31T15:57:58.912401Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-03-31T15:57:58.913469Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8572:6488], server id = [2:8577:6493], tablet id = 72075186224037899, status = OK 2025-03-31T15:57:58.913962Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8572:6488], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:57:58.914340Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8573:6489], server id = [2:8578:6494], tablet id = 72075186224037900, status = OK 2025-03-31T15:57:58.914396Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8573:6489], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:57:58.914554Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8574:6490], server id = [2:8579:6495], tablet id = 72075186224037901, status = OK 2025-03-31T15:57:58.914596Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8574:6490], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:57:58.920845Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8575:6491], server id = [2:8580:6496], tablet id = 72075186224037902, status = OK 2025-03-31T15:57:58.920953Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8575:6491], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:57:58.928406Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8576:6492], server id = [2:8581:6497], tablet id = 72075186224037903, status = OK 2025-03-31T15:57:58.928524Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8576:6492], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:57:58.934517Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-31T15:57:58.935057Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8572:6488], server id = [2:8577:6493], tablet id = 72075186224037899 2025-03-31T15:57:58.935659Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:57:58.937605Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-03-31T15:57:58.938271Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8573:6489], server id = [2:8578:6494], tablet id = 72075186224037900 2025-03-31T15:57:58.938305Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:57:58.939049Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8595:6508], server id = [2:8598:6509], tablet id = 72075186224037904, status = OK 2025-03-31T15:57:58.939176Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8595:6508], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:57:58.939542Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-03-31T15:57:58.940471Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8574:6490], server id = [2:8579:6495], tablet id = 72075186224037901 2025-03-31T15:57:58.940499Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:57:58.941725Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-03-31T15:57:58.942671Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8599:6510], server id = [2:8600:6511], tablet id = 72075186224037905, status = OK 2025-03-31T15:57:58.942755Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8599:6510], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:57:58.943230Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8575:6491], server id = [2:8580:6496], tablet id = 72075186224037902 2025-03-31T15:57:58.943263Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:57:58.943996Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-03-31T15:57:58.944370Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8601:6512], server id = [2:8605:6516], tablet id = 72075186224037906, status = OK 2025-03-31T15:57:58.944442Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8601:6512], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:57:58.945187Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8576:6492], server id = [2:8581:6497], tablet id = 72075186224037903 2025-03-31T15:57:58.945217Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:57:58.946372Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8603:6514], server id = [2:8606:6517], tablet id = 72075186224037907, status = OK 2025-03-31T15:57:58.946449Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8603:6514], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:57:58.947015Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8608:6519], server id = [2:8611:6521], tablet id = 72075186224037908, status = OK 2025-03-31T15:57:58.947320Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8608:6519], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:57:58.950359Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-03-31T15:57:58.951523Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8595:6508], server id = [2:8598:6509], tablet id = 72075186224037904 2025-03-31T15:57:58.951561Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:57:58.952482Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-03-31T15:57:58.952825Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8599:6510], server id = [2:8600:6511], tablet id = 72075186224037905 2025-03-31T15:57:58.952855Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:57:58.953987Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-03-31T15:57:58.954326Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8601:6512], server id = [2:8605:6516], tablet id = 72075186224037906 2025-03-31T15:57:58.954354Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:57:58.956893Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-03-31T15:57:58.957202Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8603:6514], server id = [2:8606:6517], tablet id = 72075186224037907 2025-03-31T15:57:58.957232Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:57:58.957546Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-03-31T15:57:58.957597Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-31T15:57:58.957974Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-31T15:57:58.958215Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-31T15:57:58.958529Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T15:57:58.961302Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8608:6519], server id = [2:8611:6521], tablet id = 72075186224037908 2025-03-31T15:57:58.961337Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:57:58.962161Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-31T15:57:59.000029Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8638:6544]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T15:57:59.000197Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T15:57:59.000234Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8638:6544], StatRequests.size() = 1 2025-03-31T15:57:59.156062Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YTdjMjFiZDYtNWJmZTY0ZmUtYWI1ZTI0NTgtNjk4ZTYzOTI=, TxId: 2025-03-31T15:57:59.156123Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTdjMjFiZDYtNWJmZTY0ZmUtYWI1ZTI0NTgtNjk4ZTYzOTI=, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-03-31T15:57:59.156819Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8653:6550]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-31T15:57:59.157046Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T15:57:59.157453Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-31T15:57:59.157522Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-31T15:57:59.160630Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-31T15:57:59.160694Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-03-31T15:57:59.160748Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-31T15:57:59.169226Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> KqpNewEngine::JoinWithPrecompute [GOOD] >> KqpNewEngine::LiteralKeys >> TTxDataShardMiniKQL::CrossShard_3_AllToOne [GOOD] >> TTxDataShardMiniKQL::CrossShard_4_OneToAll |67.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |67.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TMonitoringTests::ValidActorId [GOOD] >> TMonitoringTests::InvalidActorId >> DataShardWrite::UpsertPrepared-Volatile [GOOD] >> DataShardWrite::UpsertNoLocksArbiter >> TMonitoringTests::InvalidActorId [GOOD] >> KqpScanArrowFormat::AggregateWithFunction [GOOD] |67.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |67.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest >> Sharding::XXUsage >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeResolve [GOOD] >> Sharding::XXUsage [GOOD] |67.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeAggregate [GOOD] Test command err: 2025-03-31T15:54:30.990412Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:54:30.991967Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T15:54:30.992199Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ea8/r3tmp/tmpGRi7u1/pdisk_1.dat 2025-03-31T15:54:35.995230Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:37.067548Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.128701s 2025-03-31T15:54:37.068336Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.129142s TServer::EnableGrpc on GrpcPort 22027, node 1 2025-03-31T15:54:39.476249Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:54:39.487376Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:54:39.495606Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:54:39.532093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:54:39.533637Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:54:39.993101Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:39.993851Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:40.050251Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30066 2025-03-31T15:54:42.966052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:54:58.477946Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T15:54:58.525027Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:58.525138Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:58.561342Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T15:54:58.566707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:58.856725Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:58.857298Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:58.857856Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:58.857985Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:58.858210Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:58.858287Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:58.858364Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:58.858439Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:58.858511Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:59.043016Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:59.043137Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:59.061320Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:59.252412Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:59.355412Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T15:54:59.355525Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T15:54:59.460745Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T15:54:59.473226Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T15:54:59.473467Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T15:54:59.473525Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T15:54:59.473582Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T15:54:59.473641Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T15:54:59.473693Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T15:54:59.473746Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T15:54:59.479640Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T15:54:59.543550Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T15:54:59.543659Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1877:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T15:54:59.558111Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1888:2606] 2025-03-31T15:54:59.577111Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1918:2621] 2025-03-31T15:54:59.579573Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1918:2621], schemeshard id = 72075186224037897 2025-03-31T15:54:59.591399Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T15:54:59.610176Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T15:54:59.610233Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T15:54:59.610299Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T15:54:59.631189Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T15:54:59.638981Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T15:54:59.639286Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T15:54:59.902511Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T15:55:00.352568Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T15:55:00.446099Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T15:55:03.730746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2240:3075], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:03.730862Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:03.832901Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-31T15:55:05.897681Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2881];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T15:55:05.897910Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2881];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T15:55:05.898241Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2881];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T15:55:05.898403Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2881];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T15:55:05.898530Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2881];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T15:55:05.898666Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2881];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T15:55:05.898789Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2881];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T15:55:05.898950Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2881];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T15:55:05.899156Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2881];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T15:55:05.900438Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2881];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T15:55:05.900811Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2881];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T15:55:05.901149Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2881];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T15:55:06.450449Z node 2 :TX_COL ... [72075186224037894] Subscribed for config changes 2025-03-31T15:58:00.012276Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-31T15:58:00.012354Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-31T15:58:00.012513Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T15:58:00.013957Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T15:58:00.014033Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T15:58:00.017517Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-31T15:58:00.046332Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-31T15:58:00.046536Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-03-31T15:58:00.047665Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8676:6539], server id = [2:8681:6544], tablet id = 72075186224037899, status = OK 2025-03-31T15:58:00.048114Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8676:6539], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:00.048342Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8677:6540], server id = [2:8682:6545], tablet id = 72075186224037900, status = OK 2025-03-31T15:58:00.048391Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8677:6540], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:00.055527Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8678:6541], server id = [2:8683:6546], tablet id = 72075186224037901, status = OK 2025-03-31T15:58:00.055640Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8678:6541], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:00.055996Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8679:6542], server id = [2:8684:6547], tablet id = 72075186224037902, status = OK 2025-03-31T15:58:00.056057Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8679:6542], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:00.057339Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8680:6543], server id = [2:8687:6550], tablet id = 72075186224037903, status = OK 2025-03-31T15:58:00.057401Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8680:6543], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:00.074972Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-31T15:58:00.077129Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8676:6539], server id = [2:8681:6544], tablet id = 72075186224037899 2025-03-31T15:58:00.077184Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:00.077727Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-03-31T15:58:00.078223Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8677:6540], server id = [2:8682:6545], tablet id = 72075186224037900 2025-03-31T15:58:00.078252Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:00.079058Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-03-31T15:58:00.079583Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8700:6559], server id = [2:8702:6561], tablet id = 72075186224037904, status = OK 2025-03-31T15:58:00.079690Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8700:6559], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:00.080389Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8678:6541], server id = [2:8683:6546], tablet id = 72075186224037901 2025-03-31T15:58:00.080419Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:00.094232Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-03-31T15:58:00.094928Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8701:6560], server id = [2:8705:6563], tablet id = 72075186224037905, status = OK 2025-03-31T15:58:00.095027Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8701:6560], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:00.095751Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8679:6542], server id = [2:8684:6547], tablet id = 72075186224037902 2025-03-31T15:58:00.095782Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:00.096674Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8704:6562], server id = [2:8706:6564], tablet id = 72075186224037906, status = OK 2025-03-31T15:58:00.096768Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8704:6562], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:00.097259Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-03-31T15:58:00.097564Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8708:6566], server id = [2:8710:6568], tablet id = 72075186224037907, status = OK 2025-03-31T15:58:00.097626Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8708:6566], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:00.103637Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8680:6543], server id = [2:8687:6550], tablet id = 72075186224037903 2025-03-31T15:58:00.103687Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:00.112326Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8713:6571], server id = [2:8717:6574], tablet id = 72075186224037908, status = OK 2025-03-31T15:58:00.112436Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8713:6571], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:00.115377Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-03-31T15:58:00.115836Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8700:6559], server id = [2:8702:6561], tablet id = 72075186224037904 2025-03-31T15:58:00.115868Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:00.116962Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-03-31T15:58:00.117385Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8701:6560], server id = [2:8705:6563], tablet id = 72075186224037905 2025-03-31T15:58:00.117411Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:00.118633Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-03-31T15:58:00.127459Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8704:6562], server id = [2:8706:6564], tablet id = 72075186224037906 2025-03-31T15:58:00.127538Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:00.128839Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-03-31T15:58:00.129276Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8708:6566], server id = [2:8710:6568], tablet id = 72075186224037907 2025-03-31T15:58:00.129310Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:00.129503Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-03-31T15:58:00.129561Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-31T15:58:00.129796Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-31T15:58:00.130048Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-31T15:58:00.130370Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T15:58:00.139317Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8713:6571], server id = [2:8717:6574], tablet id = 72075186224037908 2025-03-31T15:58:00.139362Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:00.140236Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-31T15:58:00.235675Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8742:6595]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T15:58:00.235942Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T15:58:00.235997Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8742:6595], StatRequests.size() = 1 2025-03-31T15:58:00.422015Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MTJhOWNmOGMtNWE0N2E3NzItMWZlMDcxYzItMjNlZDM3Y2I=, TxId: 2025-03-31T15:58:00.422095Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MTJhOWNmOGMtNWE0N2E3NzItMWZlMDcxYzItMjNlZDM3Y2I=, TxId: 2025-03-31T15:58:00.422690Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T15:58:00.469514Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8757:6601] 2025-03-31T15:58:00.469628Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8628:6507], server id = [2:8757:6601], tablet id = 72075186224037894, status = OK 2025-03-31T15:58:00.469757Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:8757:6601], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-03-31T15:58:00.469917Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8758:6602] 2025-03-31T15:58:00.469976Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:8758:6602], schemeshard id = 72075186224037897 2025-03-31T15:58:00.492192Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T15:58:00.492259Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T15:58:00.595993Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8761:6605]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-31T15:58:00.596347Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-31T15:58:00.596411Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-31T15:58:00.604066Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-31T15:58:00.604145Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-03-31T15:58:00.604218Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-31T15:58:00.615038Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> KqpSort::OffsetTopSort [GOOD] |67.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TMonitoringTests::InvalidActorId [GOOD] |67.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest |67.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest |67.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |67.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |67.8%| [LD] {RESULT} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |67.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> DataShardWrite::ExecSQLUpsertImmediate-EvWrite [GOOD] >> DataShardWrite::DeleteImmediate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest >> Sharding::XXUsage [GOOD] Test command err: 7451285447589077808 17808594593788185993 10944935006478182815 5941856054030822537 6031520052502783238 2765214394246177218 1311921178167086332 15404006430726745109 4617505701166496830 6739847327013073571 3389491583452315406 10967128052156919922 4354540708370266308 217944355098012912 442644803165056643 12276041200047230607 9207264489128778356 6975613627754040892 1393822587118415607 5160224836972034992 12992238638986043772 2330340733998803403 2200160695866749076 2873588125745766680 18047878607436588722 2678937815549487527 11514728338799462499 6850501273568631881 5151612412463646526 3597941655624072609 15154266338359012955 15764659543420427081 11803386456160030445 3723010118550879738 801471413722897402 9565093493692160843 13369574294043545703 1950265589815411971 3593991692366247985 6477278872921756277 7814970665086387503 3076044373564172175 10584916085439369741 4256024384536586437 5690004793092126745 12849683926217275398 14511162938199756225 9887827604226639418 10757372809913262848 977309509925676149 8204160480122610995 9125453610741351068 14151944478249680699 7471571670220387124 4014816359705197970 13934702060906842656 1795665700167899428 15790472876969070676 17924289898753511216 14229941137048340479 3223267035548916695 7982355925785641979 11898903236106976124 212528521105333414 16219351537615621486 12724398028810305979 16307382979232503490 15327379767446107099 5823256838764979131 567535117756248905 15747852852762405617 15386902532357083359 2152640174463800385 14092040915933674118 6231013156127118846 9620652724300068032 6609798029301563141 332604207191913545 13480508133858506677 2317409710367996766 6579032219474710395 10138167906173379867 17139954569324290027 525509792554403586 2706235307396556222 13115875832393642414 7652714621264686664 6195069027834983051 1053271855376941536 489213103154041830 >> TraverseColumnShard::TraverseColumnTableAggrStatUnavailableNode [GOOD] |67.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TraverseColumnShard::TraverseServerlessColumnTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeResolve [GOOD] Test command err: 2025-03-31T15:54:32.790600Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:54:32.792084Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T15:54:32.792350Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000e71/r3tmp/tmpDwafPU/pdisk_1.dat 2025-03-31T15:54:37.381848Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20598, node 1 2025-03-31T15:54:41.195612Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:54:41.195887Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:54:41.196140Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:54:41.279494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:54:41.280009Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:54:41.640832Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:41.642066Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:41.695679Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13141 2025-03-31T15:54:44.972817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:55:00.342233Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T15:55:00.482362Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:55:00.482682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:55:00.545477Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T15:55:00.561265Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:55:01.797832Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:55:01.813178Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:55:01.840156Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:55:01.840335Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:55:01.840798Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:55:01.841129Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:55:01.841441Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:55:01.841951Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:55:01.842034Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:55:02.451761Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:55:02.451860Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:55:02.496220Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:55:04.566153Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:05.133750Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T15:55:05.134117Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T15:55:05.436967Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T15:55:05.444932Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T15:55:05.447639Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T15:55:05.448463Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T15:55:05.449524Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T15:55:05.449884Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T15:55:05.450398Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T15:55:05.450949Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T15:55:05.464846Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T15:55:05.617018Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T15:55:05.617527Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1872:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T15:55:05.716467Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-03-31T15:55:05.845135Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1908:2617] 2025-03-31T15:55:05.845908Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1908:2617], schemeshard id = 72075186224037897 2025-03-31T15:55:05.863692Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T15:55:06.056248Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T15:55:06.056527Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T15:55:06.057307Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T15:55:06.336812Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T15:55:06.436889Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T15:55:06.437487Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T15:55:06.881695Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T15:55:07.699712Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T15:55:07.850113Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T15:55:10.480214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2240:3073], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:10.480364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:10.505766Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-31T15:55:10.947790Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T15:55:10.948079Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T15:55:10.948364Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T15:55:10.948505Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T15:55:10.948639Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T15:55:10.948849Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T15:55:10.948995Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T15:55:10.949146Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T15:55:10.949271Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T15:55:10.949406Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T15:55:10.949539Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T15:55:10.949669Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T15:55:11.009523Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2397:2889];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T15:55:11.009636Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2397:2889];tablet_id=72075186224037900;process= ... ientDestroyed, node id = 2, client id = [2:8570:6492], server id = [2:8575:6497], tablet id = 72075186224037901 2025-03-31T15:58:01.216598Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:01.217002Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8591:6510], server id = [2:8595:6512], tablet id = 72075186224037904, status = OK 2025-03-31T15:58:01.217085Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8591:6510], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:01.217451Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8593:6511], server id = [2:8597:6514], tablet id = 72075186224037905, status = OK 2025-03-31T15:58:01.217507Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8593:6511], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:01.217985Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-03-31T15:58:01.223417Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8571:6493], server id = [2:8576:6498], tablet id = 72075186224037902 2025-03-31T15:58:01.223467Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:01.224397Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8596:6513], server id = [2:8598:6515], tablet id = 72075186224037906, status = OK 2025-03-31T15:58:01.224485Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8596:6513], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:01.224917Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-03-31T15:58:01.225595Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8572:6494], server id = [2:8577:6499], tablet id = 72075186224037903 2025-03-31T15:58:01.225627Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:01.226320Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8599:6516], server id = [2:8603:6520], tablet id = 72075186224037907, status = OK 2025-03-31T15:58:01.226411Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8599:6516], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:01.233097Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8604:6521], server id = [2:8609:6525], tablet id = 72075186224037908, status = OK 2025-03-31T15:58:01.233198Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8604:6521], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:01.235846Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-03-31T15:58:01.236484Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8591:6510], server id = [2:8595:6512], tablet id = 72075186224037904 2025-03-31T15:58:01.236518Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:01.237928Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-03-31T15:58:01.238572Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8593:6511], server id = [2:8597:6514], tablet id = 72075186224037905 2025-03-31T15:58:01.238605Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:01.239408Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-03-31T15:58:01.240094Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8596:6513], server id = [2:8598:6515], tablet id = 72075186224037906 2025-03-31T15:58:01.240125Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:01.240916Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-03-31T15:58:01.241239Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8599:6516], server id = [2:8603:6520], tablet id = 72075186224037907 2025-03-31T15:58:01.241269Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:01.241580Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-03-31T15:58:01.241637Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-31T15:58:01.241819Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-31T15:58:01.242063Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-31T15:58:01.242473Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T15:58:01.245555Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8604:6521], server id = [2:8609:6525], tablet id = 72075186224037908 2025-03-31T15:58:01.245599Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:01.246426Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-31T15:58:01.326583Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8634:6546]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T15:58:01.326878Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T15:58:01.326941Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8634:6546], StatRequests.size() = 1 ... blocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to KQP_TABLE_RESOLVER ... waiting for 3rd TEvResolveKeySetResult (done) 2025-03-31T15:58:01.439812Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8445:6423], server id = [2:8446:6424], tablet id = 72075186224037894 2025-03-31T15:58:01.440114Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:8640:6549] 2025-03-31T15:58:01.440237Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:8640:6549] 2025-03-31T15:58:01.441002Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:8641:6550], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T15:58:01.558306Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T15:58:01.558444Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T15:58:01.559204Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T15:58:01.560249Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T15:58:01.560632Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded database: /Root/Database 2025-03-31T15:58:01.560686Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal start key 2025-03-31T15:58:01.560733Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal table owner id: 72075186224037897 2025-03-31T15:58:01.560776Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal table local path id: 4 2025-03-31T15:58:01.560821Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal start time: 1743436681116197 2025-03-31T15:58:01.560867Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal IsColumnTable: 1 2025-03-31T15:58:01.560919Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded global traversal round: 2 2025-03-31T15:58:01.561033Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2025-03-31T15:58:01.561102Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T15:58:01.561219Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2025-03-31T15:58:01.561300Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T15:58:01.561364Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T15:58:01.561439Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T15:58:01.561617Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T15:58:01.562718Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T15:58:01.575151Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-31T15:58:01.575274Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-31T15:58:01.575471Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing ... blocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to STATISTICS_AGGREGATOR 2025-03-31T15:58:01.655657Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8689:6579] 2025-03-31T15:58:01.655825Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8640:6549], server id = [2:8689:6579], tablet id = 72075186224037894, status = OK 2025-03-31T15:58:01.656036Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:8689:6579], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-03-31T15:58:01.656269Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8690:6580] 2025-03-31T15:58:01.656392Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:8690:6580], schemeshard id = 72075186224037897 ... unblocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to KQP_TABLE_RESOLVER ... unblocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to STATISTICS_AGGREGATOR ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse 2025-03-31T15:58:01.796387Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T15:58:01.796509Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T15:58:01.800566Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-31T15:58:01.836238Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MTg0NGIwYzktMmQ2MzdiYTAtYzA5YmM5N2MtOWJjZTFiZDI=, TxId: 2025-03-31T15:58:01.836335Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MTg0NGIwYzktMmQ2MzdiYTAtYzA5YmM5N2MtOWJjZTFiZDI=, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-03-31T15:58:01.837146Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8698:6584]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-31T15:58:01.837402Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-31T15:58:01.837463Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-31T15:58:01.842363Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-31T15:58:01.842452Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-03-31T15:58:01.842522Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-31T15:58:01.854836Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowFormat::AggregateWithFunction [GOOD] Test command err: Trying to start YDB, gRPC: 13092, MsgBus: 23898 2025-03-31T15:56:50.070609Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003222954992376:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:56:50.070675Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0017e5/r3tmp/tmpms4Tfs/pdisk_1.dat 2025-03-31T15:56:50.724935Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:56:50.744399Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:56:50.744509Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:56:50.748985Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13092, node 1 2025-03-31T15:56:50.852574Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:56:50.852611Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:56:50.852619Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:56:50.852724Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23898 TClient is connected to server localhost:23898 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:56:51.847018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:56:51.886199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:56:52.082063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:56:52.367304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:56:52.478982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:56:55.196425Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003222954992376:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:56:55.234988Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:57:01.176861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003270199634449:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:01.176955Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:01.833756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:57:01.887388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:57:01.929612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:57:01.977018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:57:02.061414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:57:02.206680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:57:02.631359Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003274494602311:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:02.632033Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:02.639378Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003274494602316:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:02.661952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:57:02.795866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488003274494602318:2482], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:57:02.879188Z node 1 :TX_PROXY ERROR: Actor# [1:7488003274494602383:3517] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:57:05.740124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:57:05.740493Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:57:06.165867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T15:57:10.465741Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743436630488, txId: 281474976710673] shutting down 864000000000 Trying to start YDB, gRPC: 18415, MsgBus: 26313 2025-03-31T15:57:13.941655Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488003324539906624:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:57:13.974694Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0017e5/r3tmp/tmpeOG3O2/pdisk_1.dat 2025-03-31T15:57:14.085540Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:57:14.110475Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:57:14.110554Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:57:14.112392Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18415, node 2 2025-03-31T15:57:14.244544Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:57:14.244566Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:57:14.244574Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:57:14.244700Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26313 TClient is connected to server localhost:26313 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:57:15.008715Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:15.020574Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T15:57:15.037562Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:15.135438Z node 2 :FLAT_TX_S ... 6658146131:7762515]; 2025-03-31T15:57:40.140644Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:57:40.375439Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T15:57:40.484781Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T15:57:40.540527Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T15:57:40.586511Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T15:57:40.675565Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T15:57:40.795663Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T15:57:41.090125Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488003442907793723:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:41.090218Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:41.093659Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488003442907793728:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:41.112640Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T15:57:41.338708Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488003442907793730:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T15:57:41.438905Z node 3 :TX_PROXY ERROR: Actor# [3:7488003442907793805:3471] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:57:50.251226Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:57:50.251263Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:57:51.821318Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743436668309, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 10766, MsgBus: 19939 2025-03-31T15:57:52.819895Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488003489570916955:2210];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:57:52.823786Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0017e5/r3tmp/tmpTWhkTc/pdisk_1.dat 2025-03-31T15:57:53.015616Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:57:53.048698Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:57:53.048807Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:57:53.051356Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10766, node 4 2025-03-31T15:57:53.267744Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:57:53.267773Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:57:53.267782Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:57:53.267944Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19939 TClient is connected to server localhost:19939 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:57:54.586954Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:54.596338Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T15:57:54.605971Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:54.703001Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:54.919954Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:55.013081Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:57.783240Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7488003489570916955:2210];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:57:57.783342Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:57:57.935593Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488003511045755060:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:57.935741Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:58.067251Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T15:57:58.146393Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T15:57:58.253931Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T15:57:58.353761Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T15:57:58.486577Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T15:57:58.585201Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T15:57:58.711040Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488003515340722877:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:58.711176Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:58.712605Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488003515340722882:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:58.718038Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T15:57:58.739984Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488003515340722884:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T15:57:58.830476Z node 4 :TX_PROXY ERROR: Actor# [4:7488003515340722942:3465] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:58:02.004736Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743436681280, txId: 281474976715671] shutting down |67.8%| [TA] $(B)/ydb/core/tx/sharding/ut/test-results/unittest/{meta.json ... results_accumulator.log} |67.8%| [TA] $(B)/ydb/core/tx/scheme_board/ut_monitoring/test-results/unittest/{meta.json ... results_accumulator.log} |67.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |67.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |67.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> KqpScanArrowInChanels::AggregateByColumn [GOOD] >> KqpNotNullColumns::UpdateTable_UniqIndexPg [GOOD] >> KqpNotNullColumns::UpdateTable_Immediate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSort::OffsetTopSort [GOOD] Test command err: Trying to start YDB, gRPC: 3164, MsgBus: 25185 2025-03-31T15:55:50.236453Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002968770374508:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:50.237264Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000adb/r3tmp/tmpNNWA7H/pdisk_1.dat 2025-03-31T15:55:53.326630Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:55:53.822977Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:53.855940Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:55:53.856257Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:55:53.889303Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:55:54.483456Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 3164, node 1 2025-03-31T15:55:55.222015Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002968770374508:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:55.222063Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:55:56.507359Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:55:56.507378Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:55:56.507384Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:55:56.507475Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25185 TClient is connected to server localhost:25185 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:56:03.376931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:56:03.408929Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:56:03.425664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T15:56:03.638466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T15:56:03.902950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:56:03.990955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:56:07.814368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003041784820360:2421], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:07.814462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:08.816100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:56:08.816435Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:56:10.149869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:56:10.411727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:56:10.641696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:56:11.123192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:56:11.400450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:56:11.653388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:56:11.931777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003058964690126:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:11.931892Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:11.932342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003058964690132:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:11.950483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:56:11.976116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488003058964690134:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:56:12.050105Z node 1 :TX_PROXY ERROR: Actor# [1:7488003063259657490:3510] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 25830, MsgBus: 3114 2025-03-31T15:56:20.530451Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488003094413667131:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:56:20.561350Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000adb/r3tmp/tmpcB0BRZ/pdisk_1.dat 2025-03-31T15:56:21.897988Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:56:22.747693Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:56:22.767403Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:56:22.768236Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:56:23.206480Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:56:23.207687Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 25830, node 2 2025-03-31T15:56:25.137921Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:56:25.137945Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:56:25.137953Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:56:25.138067Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:56:25.528547Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488003094413667131:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:56:25.528629Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:3114 TClient is connected to server localhost:3114 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 Sche ... ne=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7488003395066331225:2210];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:57:35.904808Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:57:38.806573Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488003429426071226:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:38.806675Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:38.879346Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T15:57:38.946006Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T15:57:39.006941Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T15:57:39.095385Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T15:57:39.165435Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T15:57:39.270194Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T15:57:39.376684Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488003433721039043:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:39.376815Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:39.383307Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488003433721039048:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:39.389500Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T15:57:39.413902Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7488003433721039050:2468], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T15:57:39.492902Z node 5 :TX_PROXY ERROR: Actor# [5:7488003433721039105:3469] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 10432, MsgBus: 62112 2025-03-31T15:57:49.788513Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488003478324457655:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:57:49.788579Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000adb/r3tmp/tmpnPRT0l/pdisk_1.dat 2025-03-31T15:57:50.196569Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:57:50.277961Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:57:50.278091Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:57:50.284504Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10432, node 6 2025-03-31T15:57:50.402749Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:57:50.402796Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:57:50.402809Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:57:50.402986Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62112 TClient is connected to server localhost:62112 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:57:51.274404Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:51.301992Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:51.432503Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:51.705274Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:51.876337Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:54.788939Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488003478324457655:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:57:54.789047Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:57:56.398634Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488003508389230500:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:56.398735Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:56.473055Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:57:56.535484Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:57:56.619493Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:57:56.715243Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:57:56.801192Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:57:56.904880Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:57:57.036256Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488003512684198337:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:57.036370Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:57.036753Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488003512684198342:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:57.041514Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:57:57.065239Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488003512684198344:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:57:57.160076Z node 6 :TX_PROXY ERROR: Actor# [6:7488003512684198409:3467] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> RemoteTopicReader::ReadTopic |67.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest |67.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableAggrStatUnavailableNode [GOOD] Test command err: 2025-03-31T15:54:41.761293Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:54:41.772066Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:41.772921Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f13/r3tmp/tmppNkHkF/pdisk_1.dat 2025-03-31T15:54:45.444701Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11548, node 1 2025-03-31T15:54:46.801835Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:54:46.802066Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:54:46.802180Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:54:46.816254Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:54:46.847842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:54:47.044646Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:47.051555Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:47.078533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11369 2025-03-31T15:54:50.509296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:54:59.742379Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T15:54:59.836830Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:59.837144Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:59.874211Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T15:54:59.876416Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:55:00.322617Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:55:00.323172Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:55:00.323771Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:55:00.323905Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:55:00.324116Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:55:00.324202Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:55:00.324277Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:55:00.324353Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:55:00.324468Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:55:00.512679Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:55:00.512790Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:55:00.534902Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:55:01.788328Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:02.466701Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T15:55:02.466798Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T15:55:02.827001Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T15:55:02.859457Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T15:55:02.861334Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T15:55:02.861802Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T15:55:02.862246Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T15:55:02.862963Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T15:55:02.866613Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T15:55:02.866946Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T15:55:02.868295Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T15:55:03.097254Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T15:55:03.098010Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1872:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T15:55:03.200795Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1883:2604] 2025-03-31T15:55:03.246420Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1915:2621] 2025-03-31T15:55:03.246506Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1915:2621], schemeshard id = 72075186224037897 2025-03-31T15:55:03.269597Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T15:55:03.471951Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T15:55:03.472133Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T15:55:03.472453Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T15:55:03.700949Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T15:55:03.753424Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T15:55:03.754335Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T15:55:04.884470Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T15:55:05.797238Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T15:55:05.908284Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T15:55:08.835431Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2238:3075], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:08.835545Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:08.874153Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-31T15:55:10.062986Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2879];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T15:55:10.063367Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2879];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T15:55:10.063683Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2879];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T15:55:10.063814Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2879];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T15:55:10.063935Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2879];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T15:55:10.064052Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2879];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T15:55:10.064174Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2879];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T15:55:10.064298Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2879];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T15:55:10.064426Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2879];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T15:55:10.064551Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2879];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T15:55:10.064692Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2879];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T15:55:10.064833Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2879];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T15:55:10.208008Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2396:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T15:55:10.208113Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2396:2885];tablet_id=72075186224037900;process= ... 599931Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T15:58:02.600052Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T15:58:02.601034Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-31T15:58:02.624413Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-31T15:58:02.624706Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-03-31T15:58:02.625605Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8619:6492], server id = [2:8624:6497], tablet id = 72075186224037899, status = OK 2025-03-31T15:58:02.626202Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8619:6492], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:02.626746Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8620:6493], server id = [2:8625:6498], tablet id = 72075186224037900, status = OK 2025-03-31T15:58:02.626821Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8620:6493], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:02.638024Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8621:6494], server id = [2:8626:6499], tablet id = 72075186224037901, status = OK 2025-03-31T15:58:02.638146Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8621:6494], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:02.639698Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8622:6495], server id = [2:8627:6500], tablet id = 72075186224037902, status = OK 2025-03-31T15:58:02.639790Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8622:6495], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:02.641035Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8623:6496], server id = [2:8628:6501], tablet id = 72075186224037903, status = OK 2025-03-31T15:58:02.641116Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8623:6496], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:02.672014Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-31T15:58:02.673307Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8619:6492], server id = [2:8624:6497], tablet id = 72075186224037899 2025-03-31T15:58:02.673366Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:02.682780Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-03-31T15:58:02.684139Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8620:6493], server id = [2:8625:6498], tablet id = 72075186224037900 2025-03-31T15:58:02.684187Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:02.684701Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-03-31T15:58:02.685390Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8642:6512], server id = [2:8646:6514], tablet id = 72075186224037904, status = OK 2025-03-31T15:58:02.685479Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8642:6512], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:02.686092Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8621:6494], server id = [2:8626:6499], tablet id = 72075186224037901 2025-03-31T15:58:02.686124Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:02.697301Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-03-31T15:58:02.699244Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8645:6513], server id = [2:8648:6516], tablet id = 72075186224037905, status = OK 2025-03-31T15:58:02.699351Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8645:6513], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:02.699497Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8622:6495], server id = [2:8627:6500], tablet id = 72075186224037902 2025-03-31T15:58:02.699530Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:02.700506Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-03-31T15:58:02.701123Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8647:6515], server id = [2:8650:6518], tablet id = 72075186224037906, status = OK 2025-03-31T15:58:02.701215Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8647:6515], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:02.701353Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8623:6496], server id = [2:8628:6501], tablet id = 72075186224037903 2025-03-31T15:58:02.701383Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:02.702701Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8649:6517], server id = [2:8653:6521], tablet id = 72075186224037907, status = OK 2025-03-31T15:58:02.702783Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8649:6517], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:02.709554Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8655:6523], server id = [2:8658:6525], tablet id = 72075186224037908, status = OK 2025-03-31T15:58:02.709672Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8655:6523], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:02.717384Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-03-31T15:58:02.718052Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8642:6512], server id = [2:8646:6514], tablet id = 72075186224037904 2025-03-31T15:58:02.718093Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:02.718395Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-03-31T15:58:02.726863Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8645:6513], server id = [2:8648:6516], tablet id = 72075186224037905 2025-03-31T15:58:02.726937Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:02.729508Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-03-31T15:58:02.730432Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8647:6515], server id = [2:8650:6518], tablet id = 72075186224037906 2025-03-31T15:58:02.730475Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:02.732155Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-03-31T15:58:02.732643Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8649:6517], server id = [2:8653:6521], tablet id = 72075186224037907 2025-03-31T15:58:02.732680Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:02.732904Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-03-31T15:58:02.732966Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-31T15:58:02.733196Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-31T15:58:02.733347Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-31T15:58:02.733728Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8655:6523], server id = [2:8658:6525], tablet id = 72075186224037908 2025-03-31T15:58:02.733770Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:02.734206Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-31T15:58:02.770939Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-31T15:58:02.771174Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-03-31T15:58:02.771676Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8671:6534], server id = [2:8672:6535], tablet id = 72075186224037900, status = OK 2025-03-31T15:58:02.771772Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8671:6534], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:02.772974Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-03-31T15:58:02.773061Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-31T15:58:02.773262Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-31T15:58:02.773434Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-31T15:58:02.773851Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T15:58:02.785445Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8671:6534], server id = [2:8672:6535], tablet id = 72075186224037900 2025-03-31T15:58:02.785504Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:02.786548Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-31T15:58:02.889706Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8690:6553]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T15:58:02.890171Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T15:58:02.890260Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8690:6553], StatRequests.size() = 1 2025-03-31T15:58:03.220323Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YmU5NmYyMy0yODczYzUyZC1kZjBmMDNiYS1jZThmZDA5Yg==, TxId: 2025-03-31T15:58:03.220393Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YmU5NmYyMy0yODczYzUyZC1kZjBmMDNiYS1jZThmZDA5Yg==, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-03-31T15:58:03.220964Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8704:6559]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-31T15:58:03.221433Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T15:58:03.221878Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-31T15:58:03.221948Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-31T15:58:03.229844Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-31T15:58:03.229945Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-03-31T15:58:03.230022Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-31T15:58:03.247845Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 probe = 3 >> TIcNodeCache::GetNodesInfoTest >> TTopicApiDescribes::GetLocalDescribe >> TTopicApiDescribes::DescribeConsumer |67.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |67.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |67.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest |67.9%| [TA] {RESULT} $(B)/ydb/core/tx/sharding/ut/test-results/unittest/{meta.json ... results_accumulator.log} |67.9%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_monitoring/test-results/unittest/{meta.json ... results_accumulator.log} |67.9%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |67.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |67.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |67.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |67.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |67.9%| [LD] {RESULT} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |67.9%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseServerlessColumnTable [GOOD] Test command err: 2025-03-31T15:54:29.485598Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:54:29.485908Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T15:54:29.486000Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f69/r3tmp/tmpZay9GT/pdisk_1.dat 2025-03-31T15:54:31.933660Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26811, node 1 2025-03-31T15:54:34.767507Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:54:34.767753Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:54:34.767974Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:54:34.804978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:54:34.812995Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:54:35.179183Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:35.180283Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:35.228259Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27245 2025-03-31T15:54:38.970316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:54:51.916089Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T15:54:52.079875Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:52.080764Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:52.151618Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T15:54:52.162180Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:53.162248Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:53.169569Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:53.179867Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:53.180264Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:53.180908Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:53.181254Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:53.181991Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:53.182118Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:53.182639Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:53.541394Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:53.541497Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:53.592776Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:55.016702Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:55.761627Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T15:54:55.762429Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T15:54:56.047931Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T15:54:56.082992Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T15:54:56.085194Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T15:54:56.085470Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T15:54:56.086143Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T15:54:56.103455Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T15:54:56.104263Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T15:54:56.104543Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T15:54:56.121289Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T15:54:56.350862Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T15:54:56.351752Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1877:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T15:54:56.414354Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1888:2606] 2025-03-31T15:54:56.452408Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1918:2621] 2025-03-31T15:54:56.461694Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1918:2621], schemeshard id = 72075186224037897 2025-03-31T15:54:56.534874Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-03-31T15:54:56.704288Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T15:54:56.704496Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T15:54:56.704789Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-03-31T15:54:56.738484Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T15:54:56.763392Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T15:54:56.763857Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T15:54:57.216011Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T15:54:57.451214Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T15:54:57.575702Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T15:54:58.508915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T15:54:59.600101Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:00.016506Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-03-31T15:55:00.016631Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-03-31T15:55:00.017044Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2595:2948], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-03-31T15:55:00.022057Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2596:2949] 2025-03-31T15:55:00.023354Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2596:2949], schemeshard id = 72075186224037899 2025-03-31T15:55:03.357711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2727:3245], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:03.357919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:03.449059Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-03-31T15:55:05.578495Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2879:3089];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T15:55:05.578746Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2879:3089];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T15:55:05.579068Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2879:3089];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T15:55:05.579200Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2879:3089];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T15:55:05.579300Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2879:3089];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T15:55:05.579433Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2879:3089];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T15:55:05.579554Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2879:3089];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T15:55:05.579668Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2879:3089];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T15:55:05.579776Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2879:3089];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T15 ... 025-03-31T15:58:01.372075Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:9542:7198], schemeshard count = 1 2025-03-31T15:58:02.719961Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2025-03-31T15:58:02.720038Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 212.000000s, at schemeshard: 72075186224037899 2025-03-31T15:58:02.720260Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 28 2025-03-31T15:58:02.736217Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-31T15:58:03.658488Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T15:58:03.658556Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T15:58:03.658598Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is column table. 2025-03-31T15:58:03.658635Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-03-31T15:58:03.669369Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-31T15:58:03.697611Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-31T15:58:03.698287Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T15:58:03.698386Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T15:58:03.699460Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-31T15:58:03.717720Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-31T15:58:03.718024Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-03-31T15:58:03.718923Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9685:7281], server id = [2:9690:7286], tablet id = 72075186224037905, status = OK 2025-03-31T15:58:03.719343Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9685:7281], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-03-31T15:58:03.720707Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9686:7282], server id = [2:9691:7287], tablet id = 72075186224037906, status = OK 2025-03-31T15:58:03.720782Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9686:7282], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-03-31T15:58:03.720932Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9687:7283], server id = [2:9692:7288], tablet id = 72075186224037907, status = OK 2025-03-31T15:58:03.720975Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9687:7283], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-03-31T15:58:03.722919Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9688:7284], server id = [2:9693:7289], tablet id = 72075186224037908, status = OK 2025-03-31T15:58:03.723024Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9688:7284], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-03-31T15:58:03.728500Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9689:7285], server id = [2:9695:7291], tablet id = 72075186224037909, status = OK 2025-03-31T15:58:03.728595Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9689:7285], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-03-31T15:58:03.741907Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-03-31T15:58:03.742718Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9685:7281], server id = [2:9690:7286], tablet id = 72075186224037905 2025-03-31T15:58:03.742770Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:03.792527Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-03-31T15:58:03.792763Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-03-31T15:58:03.793743Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9686:7282], server id = [2:9691:7287], tablet id = 72075186224037906 2025-03-31T15:58:03.793778Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:03.793874Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9687:7283], server id = [2:9692:7288], tablet id = 72075186224037907 2025-03-31T15:58:03.793894Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:03.794447Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9709:7301], server id = [2:9711:7302], tablet id = 72075186224037910, status = OK 2025-03-31T15:58:03.794535Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9709:7301], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-03-31T15:58:03.803839Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-03-31T15:58:03.804237Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9712:7303], server id = [2:9714:7305], tablet id = 72075186224037911, status = OK 2025-03-31T15:58:03.804341Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9712:7303], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-03-31T15:58:03.805673Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9713:7304], server id = [2:9715:7306], tablet id = 72075186224037912, status = OK 2025-03-31T15:58:03.805771Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9713:7304], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-03-31T15:58:03.806972Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9688:7284], server id = [2:9693:7289], tablet id = 72075186224037908 2025-03-31T15:58:03.807007Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:03.814523Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037909 2025-03-31T15:58:03.815149Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9717:7308], server id = [2:9720:7311], tablet id = 72075186224037913, status = OK 2025-03-31T15:58:03.815247Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9717:7308], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-03-31T15:58:03.815973Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9689:7285], server id = [2:9695:7291], tablet id = 72075186224037909 2025-03-31T15:58:03.816007Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:03.823633Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9723:7314], server id = [2:9727:7317], tablet id = 72075186224037914, status = OK 2025-03-31T15:58:03.823782Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9723:7314], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-03-31T15:58:03.825347Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037910 2025-03-31T15:58:03.826095Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9709:7301], server id = [2:9711:7302], tablet id = 72075186224037910 2025-03-31T15:58:03.826127Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:03.828587Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037911 2025-03-31T15:58:03.829213Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9712:7303], server id = [2:9714:7305], tablet id = 72075186224037911 2025-03-31T15:58:03.829256Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:03.830557Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037912 2025-03-31T15:58:03.831401Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9713:7304], server id = [2:9715:7306], tablet id = 72075186224037912 2025-03-31T15:58:03.831456Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:03.831935Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037913 2025-03-31T15:58:03.832317Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9717:7308], server id = [2:9720:7311], tablet id = 72075186224037913 2025-03-31T15:58:03.832346Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:03.832642Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037914 2025-03-31T15:58:03.832708Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-31T15:58:03.832885Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-31T15:58:03.833109Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-31T15:58:03.833400Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-03-31T15:58:03.837259Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9723:7314], server id = [2:9727:7317], tablet id = 72075186224037914 2025-03-31T15:58:03.837320Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:03.838185Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-31T15:58:03.906710Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:9751:7337]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T15:58:03.907021Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T15:58:03.907069Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:9751:7337], StatRequests.size() = 1 2025-03-31T15:58:04.202345Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZTMzOTg1Y2UtZWZkMDFjMGMtODIxYTg3MTItZjEyYjllODI=, TxId: 2025-03-31T15:58:04.202405Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTMzOTg1Y2UtZWZkMDFjMGMtODIxYTg3MTItZjEyYjllODI=, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-03-31T15:58:04.207181Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:9764:7343]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-31T15:58:04.207671Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T15:58:04.208626Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-31T15:58:04.208689Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-31T15:58:04.217345Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-31T15:58:04.217454Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-03-31T15:58:04.217518Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-31T15:58:04.234006Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowInChanels::AggregateByColumn [GOOD] Test command err: Trying to start YDB, gRPC: 29093, MsgBus: 16506 2025-03-31T15:56:49.668235Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003221426507679:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:56:49.669163Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0017e8/r3tmp/tmpOe2ok1/pdisk_1.dat 2025-03-31T15:56:50.414202Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:56:50.414289Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:56:50.422584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29093, node 1 2025-03-31T15:56:50.830754Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:56:50.830780Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:56:50.830788Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:56:50.830889Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:56:50.831165Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:16506 2025-03-31T15:56:54.674010Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003221426507679:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:56:54.675414Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:16506 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:56:58.789146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:56:59.414208Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:56:59.461547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:01.516083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:02.105067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:02.620630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:05.636146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:57:05.636526Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:57:06.480941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003294440953524:2420], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:06.481420Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:09.433391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:57:09.733616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:57:10.060033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:57:10.470532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:57:10.768756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:57:11.068051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:57:11.667311Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003315915790604:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:11.667391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:11.668801Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003315915790609:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:11.672660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:57:11.761001Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488003315915790611:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:57:11.857106Z node 1 :TX_PROXY ERROR: Actor# [1:7488003315915790667:3534] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:57:14.795516Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743436634828, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 11043, MsgBus: 23229 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0017e8/r3tmp/tmp3nC87q/pdisk_1.dat 2025-03-31T15:57:18.193450Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:57:18.397759Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:57:18.397836Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:57:18.407306Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:57:18.417642Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11043, node 2 2025-03-31T15:57:18.621319Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:57:18.621343Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:57:18.621350Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:57:18.621457Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23229 TClient is connected to server localhost:23229 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:57:23.551024Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:23.772950Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:25.273756Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:27.674292Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15: ... 49.823282Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488003478712339288:2423], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:49.823424Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:49.889109Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:57:49.949317Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:57:50.033852Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:57:50.088748Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:57:50.139585Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:57:50.272622Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:57:50.338259Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488003483007307109:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:50.338375Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:50.338743Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488003483007307114:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:50.342935Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:57:50.358456Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488003483007307116:2480], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:57:50.413890Z node 3 :TX_PROXY ERROR: Actor# [3:7488003483007307170:3482] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:57:52.908681Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743436672236, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 2170, MsgBus: 28197 2025-03-31T15:57:54.761078Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488003500121224465:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:57:54.761151Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0017e8/r3tmp/tmpeusmvi/pdisk_1.dat 2025-03-31T15:57:54.938459Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:57:54.955458Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:57:54.955581Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:57:54.957257Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2170, node 4 2025-03-31T15:57:55.031774Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:57:55.031803Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:57:55.031817Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:57:55.031967Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28197 TClient is connected to server localhost:28197 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:57:56.348629Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:56.370369Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:56.483576Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:56.716352Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:56.836036Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:59.763206Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7488003500121224465:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:57:59.763337Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:58:00.295157Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488003525891030012:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:00.295287Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:00.338510Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T15:58:00.392562Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T15:58:00.477520Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T15:58:00.557429Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T15:58:00.660200Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T15:58:00.808859Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T15:58:00.953859Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488003525891030545:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:00.954047Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:00.957647Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488003525891030550:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:00.968150Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T15:58:01.002046Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488003525891030553:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T15:58:01.066537Z node 4 :TX_PROXY ERROR: Actor# [4:7488003530185997902:3462] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:58:05.786270Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743436684374, txId: 281474976715671] shutting down |67.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> DataShardWrite::ReplaceImmediate [GOOD] >> DataShardWrite::ReplaceImmediate_DefaultValue |68.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAggregate [GOOD] |68.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::GetPartitionDescribe >> TTopicApiDescribes::DescribeTopic |68.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |68.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |68.0%| [LD] {RESULT} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut >> AnalyzeColumnshard::AnalyzeDeadline [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_PQv1 [GOOD] >> TopicAutoscaling::WithDir_PartitionSplit_AutosplitByLoad >> AnalyzeColumnshard::AnalyzeTwoColumnTables [GOOD] >> Yq_1::Basic_Null >> TraverseColumnShard::TraverseColumnTableHiveDistributionZeroNodes [GOOD] >> Yq_1::Basic >> Yq_1::CreateConnection_With_Existing_Name >> Yq_1::ListConnections >> Yq_1::ModifyConnections ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAggregate [GOOD] Test command err: 2025-03-31T15:54:29.075898Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:54:29.076156Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T15:54:29.076253Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ede/r3tmp/tmpc5qQPp/pdisk_1.dat 2025-03-31T15:54:29.989004Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8197, node 1 2025-03-31T15:54:32.062122Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:54:32.062173Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:54:32.062205Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:54:32.083813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:54:32.085521Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:54:32.422623Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:32.423329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:32.478466Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31073 2025-03-31T15:54:36.366546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:54:48.509973Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T15:54:48.758631Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:48.759220Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:48.834589Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T15:54:48.850425Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:50.450879Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:50.468547Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:50.469517Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:50.469627Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:50.469817Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:50.470423Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:50.470703Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:50.470765Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:50.471039Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:51.088658Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:51.088761Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:51.130700Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:53.346770Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:53.839636Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T15:54:53.839724Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T15:54:54.071187Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T15:54:54.104468Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T15:54:54.106930Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T15:54:54.111164Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T15:54:54.111395Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T15:54:54.111478Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T15:54:54.111539Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T15:54:54.111618Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T15:54:54.116692Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T15:54:54.279721Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T15:54:54.280366Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1877:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T15:54:54.377802Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1888:2606] 2025-03-31T15:54:54.442439Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1918:2621] 2025-03-31T15:54:54.453375Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1918:2621], schemeshard id = 72075186224037897 2025-03-31T15:54:54.506344Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T15:54:54.654793Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T15:54:54.655061Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T15:54:54.659408Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T15:54:54.807476Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T15:54:54.840292Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T15:54:54.841369Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T15:54:55.629088Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T15:54:56.071135Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T15:54:56.156447Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T15:54:57.711773Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2240:3075], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:57.711934Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:57.751947Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-31T15:54:57.903386Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T15:54:57.903617Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T15:54:57.903923Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T15:54:57.904049Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T15:54:57.904182Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T15:54:57.904345Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T15:54:57.904480Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T15:54:57.904609Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T15:54:57.904730Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T15:54:57.904876Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T15:54:57.905009Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T15:54:57.905241Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T15:54:57.931401Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T15:54:57.931504Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;descr ... 1943Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7120:5232], server id = [2:7121:5233], tablet id = 72075186224037894 2025-03-31T15:58:03.852028Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7251:5310] 2025-03-31T15:58:03.852092Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7251:5310] 2025-03-31T15:58:03.977006Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T15:58:03.977108Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T15:58:03.977676Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T15:58:03.978139Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T15:58:03.978462Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded database: /Root/Database 2025-03-31T15:58:03.978506Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal start key 2025-03-31T15:58:03.978545Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal table owner id: 72075186224037897 2025-03-31T15:58:03.978582Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal table local path id: 4 2025-03-31T15:58:03.978614Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal start time: 1743436683804077 2025-03-31T15:58:03.978643Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal IsColumnTable: 1 2025-03-31T15:58:03.978674Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded global traversal round: 2 2025-03-31T15:58:03.978746Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2025-03-31T15:58:03.978803Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T15:58:03.978890Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2025-03-31T15:58:03.978971Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 1 2025-03-31T15:58:03.979037Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 1 2025-03-31T15:58:03.983133Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T15:58:03.983309Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T15:58:03.984578Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T15:58:03.985346Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-31T15:58:03.985408Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-31T15:58:03.985529Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T15:58:03.986658Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T15:58:03.986710Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T15:58:03.992567Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-31T15:58:04.069744Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-31T15:58:04.069934Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-03-31T15:58:04.070469Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7298:5341], server id = [2:7299:5342], tablet id = 72075186224037899, status = OK 2025-03-31T15:58:04.070568Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7298:5341], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:04.074421Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-31T15:58:04.074546Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-31T15:58:04.074746Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-31T15:58:04.074970Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-31T15:58:04.079397Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T15:58:04.082270Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7298:5341], server id = [2:7299:5342], tablet id = 72075186224037899 2025-03-31T15:58:04.082318Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:04.082873Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-31T15:58:04.133474Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7319:5361]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T15:58:04.133729Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T15:58:04.133788Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7319:5361], StatRequests.size() = 1 2025-03-31T15:58:04.298316Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NjJlOWVjZjktZmU2YTM0OTgtNzAxNzI0NDMtNDhlNzY3OWY=, TxId: 2025-03-31T15:58:04.302093Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NjJlOWVjZjktZmU2YTM0OTgtNzAxNzI0NDMtNDhlNzY3OWY=, TxId: 2025-03-31T15:58:04.303025Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T15:58:04.317191Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7334:5367] 2025-03-31T15:58:04.317417Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:7334:5367], schemeshard id = 72075186224037897 2025-03-31T15:58:04.317540Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7251:5310], server id = [2:7335:5368], tablet id = 72075186224037894, status = OK 2025-03-31T15:58:04.317614Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7335:5368] 2025-03-31T15:58:04.317674Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7335:5368], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-03-31T15:58:04.337976Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T15:58:04.338050Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T15:58:04.460962Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7346:5371] 2025-03-31T15:58:04.461940Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:2801:3224] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-03-31T15:58:04.462001Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:2801:3224] 2025-03-31T15:58:04.462076Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Complete 2025-03-31T15:58:05.044450Z node 2 :STATISTICS DEBUG: Event round 3 is different from the current 0 2025-03-31T15:58:05.044540Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-03-31T15:58:05.973317Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T15:58:05.973412Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T15:58:05.973496Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-31T15:58:07.502668Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T15:58:07.502826Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-31T15:58:07.502881Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T15:58:07.503623Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-31T15:58:07.523603Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-31T15:58:07.524059Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T15:58:07.524147Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T15:58:07.524611Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-31T15:58:07.544598Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-31T15:58:07.544810Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2025-03-31T15:58:07.545370Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7418:5414], server id = [2:7419:5415], tablet id = 72075186224037899, status = OK 2025-03-31T15:58:07.545482Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7418:5414], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:07.547105Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-31T15:58:07.547209Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-31T15:58:07.547387Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-31T15:58:07.547593Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-31T15:58:07.547900Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T15:58:07.550829Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7418:5414], server id = [2:7419:5415], tablet id = 72075186224037899 2025-03-31T15:58:07.550889Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:07.558177Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-31T15:58:07.590855Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OGRmZTcwYjktM2Q3ZGJkODktZjQ0YmRiOTAtZDBhMGQ2ZDg=, TxId: 2025-03-31T15:58:07.591648Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OGRmZTcwYjktM2Q3ZGJkODktZjQ0YmRiOTAtZDBhMGQ2ZDg=, TxId: 2025-03-31T15:58:07.592407Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T15:58:07.628792Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T15:58:07.628911Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2801:3224] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeDeadline [GOOD] Test command err: 2025-03-31T15:54:45.190563Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:54:45.195443Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:45.196137Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f51/r3tmp/tmpk3qLBJ/pdisk_1.dat 2025-03-31T15:54:46.033005Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21958, node 1 2025-03-31T15:54:46.521678Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:54:46.521739Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:54:46.521776Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:54:46.522260Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:54:46.528164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:54:46.633455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:46.633587Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:46.650553Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24891 2025-03-31T15:54:47.516374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:54:59.075504Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T15:54:59.158925Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:59.159048Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:59.205170Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T15:54:59.216241Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:59.718968Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:59.720313Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:59.721273Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:59.721524Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:59.721868Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:59.722125Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:59.722233Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:59.722386Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:59.722607Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:59.907195Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:59.907310Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:59.931171Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:55:00.190895Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:00.354776Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T15:55:00.354881Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T15:55:00.437386Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T15:55:00.443096Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T15:55:00.444361Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T15:55:00.444899Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T15:55:00.445461Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T15:55:00.445686Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T15:55:00.446005Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T15:55:00.446157Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T15:55:00.447010Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T15:55:00.532540Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T15:55:00.532815Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1870:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T15:55:00.564819Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1885:2608] 2025-03-31T15:55:00.577535Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1902:2617] 2025-03-31T15:55:00.579586Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1902:2617], schemeshard id = 72075186224037897 2025-03-31T15:55:00.617288Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T15:55:00.646963Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T15:55:00.647028Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T15:55:00.647124Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T15:55:00.658782Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T15:55:00.667890Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T15:55:00.668015Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T15:55:01.760858Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T15:55:02.937719Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T15:55:03.023975Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T15:55:05.526212Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2243:3077], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:05.543365Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:06.162960Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-31T15:55:07.082698Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2334:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T15:55:07.084173Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2334:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T15:55:07.086144Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2334:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T15:55:07.086517Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2334:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T15:55:07.086630Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2334:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T15:55:07.088450Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2334:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T15:55:07.088671Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2334:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T15:55:07.089283Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2334:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T15:55:07.089604Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2334:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T15:55:07.089941Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2334:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T15:55:07.090295Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2334:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T15:55:07.092815Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2334:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T15:55:07.268012Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T15:55:07.268414Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;desc ... ble, opId: 281474976715661:0, at schemeshard: 72075186224037897 2025-03-31T15:55:11.463510Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; waiting actualization: 0/0.000017s ... waiting for TEvAnalyzeTableResponse 2025-03-31T15:55:13.693339Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2810:3166] 2025-03-31T15:55:13.696065Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:2808:3227] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-03-31T15:55:13.696122Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Create new force traversal operation, OperationId=operationId 2025-03-31T15:55:13.696156Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Create new force traversal table, OperationId=operationId , PathId [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T15:55:13.727594Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Complete 2025-03-31T15:55:24.991473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:55:24.991748Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:26.407246Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:55:26.407304Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:57:56.041060Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-03-31T15:57:56.041153Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-31T15:57:56.041196Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-31T15:57:56.041244Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-03-31T15:57:58.027919Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-03-31T15:57:58.028012Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 195.000000s, at schemeshard: 72075186224037897 2025-03-31T15:57:58.028339Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 51 2025-03-31T15:57:58.048441Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-31T15:57:59.539371Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T15:57:59.539482Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T15:57:59.539542Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-31T15:57:59.539598Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-03-31T15:57:59.539662Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-31T15:57:59.540125Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T15:57:59.544249Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-31T15:57:59.550193Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7025:5164], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:59.550321Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7035:5169], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:59.550404Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:59.568091Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-03-31T15:57:59.638817Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7039:5172], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-03-31T15:57:59.954662Z node 2 :TX_PROXY ERROR: Actor# [2:7137:5219] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:58:00.090652Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7166:5234]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T15:58:00.090892Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T15:58:00.090989Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7168:5236] 2025-03-31T15:58:00.091055Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7168:5236] 2025-03-31T15:58:00.092248Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7169:5237] 2025-03-31T15:58:00.092397Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7169:5237], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-03-31T15:58:00.092469Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-03-31T15:58:00.092660Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7168:5236], server id = [2:7169:5237], tablet id = 72075186224037894, status = OK 2025-03-31T15:58:00.092749Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T15:58:00.092841Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7166:5234], StatRequests.size() = 1 2025-03-31T15:58:00.271226Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NWVlYWEzNmQtZjFmYmQyNjEtY2Q4YmEyZDAtOTcxZjQ1NQ==, TxId: 2025-03-31T15:58:00.271315Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NWVlYWEzNmQtZjFmYmQyNjEtY2Q4YmEyZDAtOTcxZjQ1NQ==, TxId: 2025-03-31T15:58:00.271878Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T15:58:00.289604Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-31T15:58:00.289683Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T15:58:00.351468Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-31T15:58:00.351568Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-31T15:58:00.439918Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7168:5236], schemeshard count = 1 2025-03-31T15:58:01.646042Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T15:58:01.646150Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-31T15:58:01.649939Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-31T15:58:01.676730Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-31T15:58:01.677322Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T15:58:01.677383Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 4], AnalyzedShards 1 2025-03-31T15:58:01.696746Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T15:58:01.707977Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. ... blocking NKikimr::NStat::TEvStatistics::TEvAnalyzeTableResponse from TX_COLUMNSHARD_ACTOR to STATISTICS_AGGREGATOR ... waiting for TEvAnalyzeTableResponse (done) 2025-03-31T15:58:02.252016Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T15:58:02.252099Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T15:58:02.252144Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-31T15:58:02.252194Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-31T15:58:02.252249Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T15:58:02.260330Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-31T15:58:02.276586Z node 2 :STATISTICS ERROR: [72075186224037894] Delete long analyze operation, OperationId=operationId 2025-03-31T15:58:02.675728Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T15:58:02.675928Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T15:58:05.135587Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-03-31T15:58:05.135678Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-31T15:58:05.135734Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-31T15:58:05.135784Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-03-31T15:58:07.867812Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-03-31T15:58:07.867911Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 202.000000s, at schemeshard: 72075186224037897 2025-03-31T15:58:07.868119Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 51 2025-03-31T15:58:08.034596Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-31T15:58:08.034916Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeDeadline::Complete. Send TEvAnalyzeResponse for deleted operation, OperationId=operationId, ActorId=[1:2808:3227] 2025-03-31T15:58:08.035003Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-31T15:58:08.035502Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T15:58:08.035566Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T15:58:08.036471Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 >> PrivateApi::PingTask >> Yq_1::DescribeJob |68.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |68.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |68.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data >> Yq_1::DescribeConnection ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeTwoColumnTables [GOOD] Test command err: 2025-03-31T15:54:35.738557Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:54:35.740011Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T15:54:35.740304Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000e66/r3tmp/tmpaubvkR/pdisk_1.dat 2025-03-31T15:54:40.206981Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15769, node 1 2025-03-31T15:54:43.163968Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:54:43.164234Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:54:43.164507Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:54:43.221998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:54:43.233379Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:54:43.685416Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:43.686592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:43.730395Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18898 2025-03-31T15:54:45.904137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:54:58.255719Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T15:54:58.332945Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:58.333129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:58.375327Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T15:54:58.385898Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:58.697030Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:58.697666Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:58.698224Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:58.698364Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:58.698589Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:58.698687Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:58.698772Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:58.698851Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:58.698931Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:58.921389Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:58.921634Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:58.940969Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:59.207350Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:59.343020Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T15:54:59.343143Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T15:54:59.497913Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T15:54:59.511649Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T15:54:59.512621Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T15:54:59.512986Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T15:54:59.513121Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T15:54:59.513517Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T15:54:59.513779Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T15:54:59.513923Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T15:54:59.515215Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T15:54:59.578055Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T15:54:59.578181Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1872:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T15:54:59.594194Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-03-31T15:54:59.612517Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1908:2617] 2025-03-31T15:54:59.613172Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1908:2617], schemeshard id = 72075186224037897 2025-03-31T15:54:59.615449Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T15:54:59.678633Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T15:54:59.678757Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T15:54:59.678911Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T15:54:59.732471Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T15:54:59.778218Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T15:54:59.778352Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T15:55:00.125230Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T15:55:00.488880Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T15:55:00.636016Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T15:55:04.114453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2241:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:04.115556Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:04.322951Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-31T15:55:05.851392Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2326:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T15:55:05.856295Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2326:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T15:55:05.858543Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2326:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T15:55:05.865830Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2326:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T15:55:05.866377Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2326:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T15:55:05.866732Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2326:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T15:55:05.867317Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2326:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T15:55:05.867834Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2326:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T15:55:05.868339Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2326:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T15:55:05.868438Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2326:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T15:55:05.868741Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2326:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T15:55:05.868844Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2326:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T15:55:06.239567Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T15:55:06.240651Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;desc ... :02.475597Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-31T15:58:02.536300Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7576:5553]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T15:58:02.536526Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T15:58:02.536571Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7576:5553], StatRequests.size() = 1 2025-03-31T15:58:02.756404Z node 2 :SYSTEM_VIEWS WARN: [72075186224037891] TEvIntervalQuerySummary, time mismath: node id# 2, interval end# 1970-01-01T00:02:03.000000Z, event interval end# 2025-03-31T15:58:00.000000Z 2025-03-31T15:58:02.756940Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YTdlYTc2NTgtOTYyM2JmMTgtNDA5NTYwZWYtZWQ5NDM3NWQ=, TxId: 2025-03-31T15:58:02.756985Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTdlYTc2NTgtOTYyM2JmMTgtNDA5NTYwZWYtZWQ5NDM3NWQ=, TxId: 2025-03-31T15:58:02.757555Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T15:58:02.786658Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T15:58:02.786732Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Don't send TEvAnalyzeResponse. There are pending operations, OperationId operationId , ActorId=[1:3268:3371] 2025-03-31T15:58:03.359191Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-03-31T15:58:03.359301Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-03-31T15:58:04.235881Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T15:58:04.235961Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 5] is column table. 2025-03-31T15:58:04.247704Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-31T15:58:04.268654Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-31T15:58:04.269201Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T15:58:04.269259Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 5], AnalyzedShards 1 2025-03-31T15:58:04.304336Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T15:58:04.347978Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-03-31T15:58:04.349099Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-03-31T15:58:04.349206Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-03-31T15:58:04.377564Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-03-31T15:58:05.788209Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T15:58:05.788292Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 5] is column table. 2025-03-31T15:58:05.788335Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 5] 2025-03-31T15:58:05.789152Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-31T15:58:05.804317Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-31T15:58:05.804706Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T15:58:05.804778Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T15:58:05.805225Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-31T15:58:05.832561Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-31T15:58:05.832796Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-03-31T15:58:05.833438Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7696:5613], server id = [2:7697:5614], tablet id = 72075186224037900, status = OK 2025-03-31T15:58:05.833557Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7696:5613], path = { OwnerId: 72075186224037897 LocalId: 5 } 2025-03-31T15:58:05.840609Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-03-31T15:58:05.840717Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-31T15:58:05.840900Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-31T15:58:05.841090Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-31T15:58:05.843165Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T15:58:05.845576Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7696:5613], server id = [2:7697:5614], tablet id = 72075186224037900 2025-03-31T15:58:05.845625Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:05.846446Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-31T15:58:05.879245Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NTI4ODRiOWEtNjc5NmEzNWYtODMzYTc5ZGQtZmRhNzIwNjM=, TxId: 2025-03-31T15:58:05.879326Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NTI4ODRiOWEtNjc5NmEzNWYtODMzYTc5ZGQtZmRhNzIwNjM=, TxId: 2025-03-31T15:58:05.879862Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T15:58:05.948480Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 5] 2025-03-31T15:58:05.948542Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T15:58:06.523732Z node 2 :STATISTICS DEBUG: Event round 3 is different from the current 0 2025-03-31T15:58:06.523817Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-03-31T15:58:07.355843Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T15:58:07.356184Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T15:58:07.382341Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T15:58:07.382422Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T15:58:07.382484Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-31T15:58:08.720781Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T15:58:08.721155Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 5] is column table. 2025-03-31T15:58:08.722003Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 5] 2025-03-31T15:58:08.723821Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-31T15:58:08.739002Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-31T15:58:08.739397Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T15:58:08.739469Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T15:58:08.739861Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-31T15:58:08.758565Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-31T15:58:08.758788Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2025-03-31T15:58:08.759452Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7797:5671], server id = [2:7798:5672], tablet id = 72075186224037900, status = OK 2025-03-31T15:58:08.759569Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7797:5671], path = { OwnerId: 72075186224037897 LocalId: 5 } 2025-03-31T15:58:08.760887Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-03-31T15:58:08.760984Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-31T15:58:08.761157Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-31T15:58:08.761336Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-31T15:58:08.761595Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T15:58:08.782686Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7797:5671], server id = [2:7798:5672], tablet id = 72075186224037900 2025-03-31T15:58:08.782749Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:08.783472Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-31T15:58:08.831806Z node 2 :SYSTEM_VIEWS WARN: [72075186224037891] TEvIntervalQuerySummary, time mismath: node id# 2, interval end# 1970-01-01T00:02:07.000000Z, event interval end# 2025-03-31T15:58:06.000000Z 2025-03-31T15:58:08.832475Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OGZkYWRkNmItZGZjMzVmZjQtMjllODNhMjQtZGU4ZDViODQ=, TxId: 2025-03-31T15:58:08.832533Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OGZkYWRkNmItZGZjMzVmZjQtMjllODNhMjQtZGU4ZDViODQ=, TxId: 2025-03-31T15:58:08.837754Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T15:58:08.878883Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 5] 2025-03-31T15:58:08.878966Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3268:3371] >> Yq_1::DeleteConnections ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableHiveDistributionZeroNodes [GOOD] Test command err: 2025-03-31T15:54:34.090363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:54:34.091697Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T15:54:34.091989Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f74/r3tmp/tmppt4ZdY/pdisk_1.dat 2025-03-31T15:54:38.320217Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11430, node 1 2025-03-31T15:54:41.126239Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:54:41.126508Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:54:41.126930Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:54:41.141096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:54:41.144164Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:54:41.510305Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:41.515495Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:41.559511Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6752 2025-03-31T15:54:45.210976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:54:58.375662Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T15:54:58.430056Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:58.430184Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:58.461110Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T15:54:58.469221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:58.749660Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:58.750236Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:58.750698Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:58.750813Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:58.751036Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:58.751192Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:58.751300Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:58.751375Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:58.751465Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:58.969753Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:58.969862Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:58.989541Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:59.596660Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:59.655524Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T15:54:59.655629Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T15:54:59.735567Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T15:54:59.737221Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T15:54:59.737970Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T15:54:59.738198Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T15:54:59.738301Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T15:54:59.738536Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T15:54:59.738657Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T15:54:59.738775Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T15:54:59.739767Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T15:54:59.799710Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T15:54:59.799837Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1872:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T15:54:59.846387Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-03-31T15:54:59.857495Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1908:2617] 2025-03-31T15:54:59.858243Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1908:2617], schemeshard id = 72075186224037897 2025-03-31T15:54:59.860949Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T15:54:59.888294Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T15:54:59.888468Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T15:54:59.888665Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T15:54:59.910568Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T15:54:59.919387Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T15:54:59.919557Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T15:55:00.216623Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T15:55:00.644088Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T15:55:00.772626Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T15:55:04.100916Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2241:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:04.101832Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:04.338533Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-31T15:55:06.412074Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2387:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T15:55:06.414040Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2387:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T15:55:06.420853Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2387:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T15:55:06.422636Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2387:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T15:55:06.424334Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2387:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T15:55:06.424478Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2387:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T15:55:06.424594Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2387:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T15:55:06.424703Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2387:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T15:55:06.424814Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2387:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T15:55:06.425268Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2387:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T15:55:06.439764Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2387:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T15:55:06.440234Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2387:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T15:55:06.758598Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2398:2889];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T15:55:06.758697Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2398:2889];tablet_id=72075186224037900;process=T ... 58:04.458694Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:8495:6434], schemeshard count = 1 2025-03-31T15:58:06.907924Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T15:58:06.907994Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T15:58:06.908037Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-31T15:58:06.908086Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T15:58:06.934117Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-31T15:58:06.955689Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-31T15:58:06.956271Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T15:58:06.956363Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T15:58:06.957356Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 2 2025-03-31T15:58:06.957431Z node 2 :STATISTICS WARN: [72075186224037894] TTxResponseTabletDistribution::Execute. Some tablets are probably in Hive boot queue 2025-03-31T15:58:06.957491Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-31T15:58:08.232802Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-31T15:58:08.248594Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-31T15:58:08.248838Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-03-31T15:58:08.249668Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8657:6519], server id = [2:8662:6524], tablet id = 72075186224037899, status = OK 2025-03-31T15:58:08.250105Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8657:6519], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:08.264303Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8658:6520], server id = [2:8663:6525], tablet id = 72075186224037900, status = OK 2025-03-31T15:58:08.264416Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8658:6520], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:08.264791Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8659:6521], server id = [2:8664:6526], tablet id = 72075186224037901, status = OK 2025-03-31T15:58:08.264857Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8659:6521], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:08.265610Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8660:6522], server id = [2:8665:6527], tablet id = 72075186224037902, status = OK 2025-03-31T15:58:08.265676Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8660:6522], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:08.266894Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8661:6523], server id = [2:8668:6530], tablet id = 72075186224037903, status = OK 2025-03-31T15:58:08.266956Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8661:6523], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:08.307891Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-31T15:58:08.308940Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8657:6519], server id = [2:8662:6524], tablet id = 72075186224037899 2025-03-31T15:58:08.308995Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:08.310594Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-03-31T15:58:08.311013Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8658:6520], server id = [2:8663:6525], tablet id = 72075186224037900 2025-03-31T15:58:08.311045Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:08.323289Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8680:6539], server id = [2:8682:6540], tablet id = 72075186224037904, status = OK 2025-03-31T15:58:08.327294Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8680:6539], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:08.328612Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-03-31T15:58:08.329177Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8659:6521], server id = [2:8664:6526], tablet id = 72075186224037901 2025-03-31T15:58:08.329211Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:08.330626Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8683:6541], server id = [2:8685:6542], tablet id = 72075186224037905, status = OK 2025-03-31T15:58:08.330713Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8683:6541], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:08.330863Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-03-31T15:58:08.331658Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8660:6522], server id = [2:8665:6527], tablet id = 72075186224037902 2025-03-31T15:58:08.331689Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:08.332766Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-03-31T15:58:08.333222Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8686:6543], server id = [2:8689:6546], tablet id = 72075186224037906, status = OK 2025-03-31T15:58:08.333320Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8686:6543], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:08.333860Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8661:6523], server id = [2:8668:6530], tablet id = 72075186224037903 2025-03-31T15:58:08.333891Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:08.334195Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8688:6545], server id = [2:8691:6548], tablet id = 72075186224037907, status = OK 2025-03-31T15:58:08.334259Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8688:6545], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:08.347514Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8693:6550], server id = [2:8695:6552], tablet id = 72075186224037908, status = OK 2025-03-31T15:58:08.347629Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8693:6550], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:08.360791Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-03-31T15:58:08.361374Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8680:6539], server id = [2:8682:6540], tablet id = 72075186224037904 2025-03-31T15:58:08.361416Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:08.362005Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-03-31T15:58:08.368146Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8683:6541], server id = [2:8685:6542], tablet id = 72075186224037905 2025-03-31T15:58:08.368203Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:08.369752Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-03-31T15:58:08.370225Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8686:6543], server id = [2:8689:6546], tablet id = 72075186224037906 2025-03-31T15:58:08.370257Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:08.370782Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-03-31T15:58:08.377400Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8688:6545], server id = [2:8691:6548], tablet id = 72075186224037907 2025-03-31T15:58:08.377459Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:08.378253Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-03-31T15:58:08.378317Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-31T15:58:08.378532Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-31T15:58:08.378793Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-31T15:58:08.379267Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T15:58:08.381461Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8693:6550], server id = [2:8695:6552], tablet id = 72075186224037908 2025-03-31T15:58:08.381497Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:08.392314Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-31T15:58:08.455578Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8723:6575]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T15:58:08.455849Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T15:58:08.455906Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8723:6575], StatRequests.size() = 1 2025-03-31T15:58:08.615971Z node 2 :SYSTEM_VIEWS WARN: [72075186224037891] TEvIntervalQuerySummary, wrong stage: node id# 2 2025-03-31T15:58:08.616688Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=N2JiNWQzZGUtNjI3YWIyYjktZmEwYTFiNjMtOTg4MTEzMjY=, TxId: 2025-03-31T15:58:08.616745Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=N2JiNWQzZGUtNjI3YWIyYjktZmEwYTFiNjMtOTg4MTEzMjY=, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-03-31T15:58:08.617445Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8736:6581]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-31T15:58:08.617683Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T15:58:08.618236Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-31T15:58:08.618293Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-31T15:58:08.626583Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-31T15:58:08.626686Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-03-31T15:58:08.626743Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-31T15:58:08.645830Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> DataShardWrite::DeleteImmediate [GOOD] >> DataShardWrite::CancelImmediate >> KqpSqlIn::Delete [GOOD] >> DataShardSnapshots::MvccSnapshotAndSplit >> DataShardWrite::UpsertNoLocksArbiter [GOOD] >> DataShardWrite::UpsertLostPrepareArbiter |68.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |68.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |68.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs >> DataShardSnapshots::MvccSnapshotTailCleanup >> DataShardSnapshots::LockedWriteBulkUpsertConflict+UseSink >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly [GOOD] >> PersQueueSdkReadSessionTest::StopResumeReadingData >> TCacheTest::List >> TCacheTest::List [GOOD] >> TCacheTest::MigrationCommit >> Yq_1::CreateQuery_With_Idempotency ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSqlIn::Delete [GOOD] Test command err: Trying to start YDB, gRPC: 20708, MsgBus: 2394 2025-03-31T15:55:42.678004Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002931482731566:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:42.678048Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000adf/r3tmp/tmpvb0DgO/pdisk_1.dat 2025-03-31T15:55:43.202547Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:43.225125Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:55:43.225404Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:55:43.240114Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20708, node 1 2025-03-31T15:55:46.390046Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:55:46.390065Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:55:46.390071Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:55:46.390839Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:55:47.684489Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002931482731566:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:47.684952Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:2394 TClient is connected to server localhost:2394 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:55:50.210192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:55:50.686732Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:55:50.786677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:55:54.341922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:55:56.584561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:55:56.991706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:55:58.199415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:55:58.199727Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:56:02.632667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003017382079311:2423], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:02.632781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:03.062351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:56:03.119823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:56:03.191564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:56:03.245724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:56:03.303552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:56:03.375584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:56:03.429679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003021677047134:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:03.429808Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:03.430217Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003021677047139:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:03.438070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:56:03.451336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488003021677047141:2478], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:56:03.541020Z node 1 :TX_PROXY ERROR: Actor# [1:7488003021677047201:3499] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 22266, MsgBus: 3197 2025-03-31T15:56:15.881156Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488003072348106650:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:56:15.881260Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000adf/r3tmp/tmpQCqeNf/pdisk_1.dat 2025-03-31T15:56:16.534462Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:56:16.547075Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:56:16.547601Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:56:16.549240Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22266, node 2 2025-03-31T15:56:17.642876Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:56:17.642899Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:56:17.642907Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:56:17.643016Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:56:20.830318Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488003072348106650:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:56:20.830402Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:3197 TClient is connected to server localhost:3197 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:56:25.415235Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:56:25.424965Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:56:25.438207Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperat ... to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:48.653487Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488003472914075216:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:48.666871Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:57:48.746182Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7488003472914075218:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:57:48.822298Z node 5 :TX_PROXY ERROR: Actor# [5:7488003472914075276:3505] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:57:50.829554Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T15:57:50.926737Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T15:57:51.060144Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T15:57:52.607241Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:57:52.607281Z node 5 :IMPORT WARN: Table profiles were not loaded
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:44: Warning: At function: Filter, At function: Coalesce
:5:67: Warning: At function: SqlIn
:5:67: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:44: Warning: At function: Filter, At function: Coalesce
:5:67: Warning: At function: SqlIn
:5:67: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 11053, MsgBus: 29051 2025-03-31T15:57:56.893928Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488003506826099774:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:57:56.928744Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000adf/r3tmp/tmpy4WYB0/pdisk_1.dat 2025-03-31T15:57:57.330825Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:57:57.392797Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:57:57.392916Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:57:57.398959Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11053, node 6 2025-03-31T15:57:57.702669Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:57:57.702700Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:57:57.702712Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:57:57.702861Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29051 TClient is connected to server localhost:29051 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:57:58.696892Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:58.708377Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:57:58.718237Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:58.877001Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:59.191416Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:59.374981Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:01.803229Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488003506826099774:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:01.803320Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:58:03.527274Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488003536890872464:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:03.527697Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:03.582005Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:58:03.702098Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:58:03.807046Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:58:03.942662Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:58:04.062298Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:58:04.178863Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:58:04.289489Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488003541185840289:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:04.289623Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:04.289959Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488003541185840294:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:04.300706Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:58:04.332410Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488003541185840296:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:58:04.403859Z node 6 :TX_PROXY ERROR: Actor# [6:7488003541185840356:3465] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:58:06.793597Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T15:58:06.897673Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T15:58:07.079927Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> DataShardSnapshots::UncommittedChangesRenameTable+UseSink >> DataShardSnapshots::LockedWriteReuseAfterCommit+UseSink >> DataShardWrite::ReplaceImmediate_DefaultValue [GOOD] >> DataShardWrite::UpdateImmediate >> KqpNewEngine::LiteralKeys [GOOD] >> KqpSort::Offset [GOOD] >> TCacheTest::MigrationCommit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::MigrationCommit [GOOD] Test command err: 2025-03-31T15:58:14.754388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:58:14.754448Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-03-31T15:58:14.927108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 FAKE_COORDINATOR: Erasing txId 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 TestWaitNotification wait txId: 102 TestWaitNotification wait txId: 103 2025-03-31T15:58:14.962890Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-31T15:58:14.963030Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-31T15:58:14.963251Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-03-31T15:58:15.239543Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:58:15.239610Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-03-31T15:58:15.297618Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 Leader for TabletID 72057594046678944 is [2:70:2109] sender: [2:175:2067] recipient: [2:46:2093] Leader for TabletID 72057594046678944 is [2:70:2109] sender: [2:178:2067] recipient: [2:24:2071] Leader for TabletID 72057594046678944 is [2:70:2109] sender: [2:179:2067] recipient: [2:177:2171] Leader for TabletID 72057594046678944 is [2:180:2172] sender: [2:181:2067] recipient: [2:177:2171] 2025-03-31T15:58:15.349236Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:58:15.349299Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 101 Leader for TabletID 72057594046678944 is [2:180:2172] sender: [2:211:2067] recipient: [2:24:2071] 2025-03-31T15:58:15.378697Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-03-31T15:58:15.391481Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 102:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:247:2067] recipient: [2:238:2213] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:247:2067] recipient: [2:238:2213] Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:248:2067] recipient: [2:240:2215] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:248:2067] recipient: [2:240:2215] Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:250:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:250:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:253:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:253:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409546 is [2:251:2219] sender: [2:255:2067] recipient: [2:238:2213] Leader for TabletID 72075186233409547 is [2:254:2221] sender: [2:256:2067] recipient: [2:240:2215] TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 TestWaitNotification wait txId: 102 2025-03-31T15:58:15.431788Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 Leader for TabletID 72075186233409547 is [2:254:2221] sender: [2:289:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409546 is [2:251:2219] sender: [2:290:2067] recipient: [2:24:2071] FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 2025-03-31T15:58:15.533286Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 TestWaitNotification wait txId: 104 Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:340:2067] recipient: [2:336:2285] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:340:2067] recipient: [2:336:2285] Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:341:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:341:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409548 is [2:343:2289] sender: [2:344:2067] recipient: [2:336:2285] Leader for TabletID 72075186233409548 is [2:343:2289] sender: [2:345:2067] recipient: [2:24:2071] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-03-31T15:58:15.829378Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomain, opId: 105:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:420:2067] recipient: [2:415:2333] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:420:2067] recipient: [2:415:2333] Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:422:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:422:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409549 is [2:423:2337] sender: [2:424:2067] recipient: [2:415:2333] 2025-03-31T15:58:15.973265Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:58:15.973331Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 Leader for TabletID 72075186233409549 is [2:423:2337] sender: [2:451:2067] recipient: [2:24:2071] TestWaitNotification: OK eventTxId 105 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } TestModificationResults wait txId: 106 2025-03-31T15:58:16.085632Z node 2 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-31T15:58:16.085697Z node 2 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 4] 2025-03-31T15:58:16.086098Z node 2 :FLAT_TX_SCHEMESHARD ERROR: TWait ProgressState, dependent transaction: 106, parent transaction: 105, at schemeshard: 72057594046678944 2025-03-31T15:58:16.086232Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomainDecision, opId: 106:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-03-31T15:58:16.118133Z node 2 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 2025-03-31T15:58:16.118546Z node 2 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } Leader for TabletID 72057594046678944 is [2:180:2172] sender: [2:512:2067] recipient: [2:46:2093] Leader for TabletID 72057594046678944 is [2:180:2172] sender: [2:514:2067] recipient: [2:24:2071] Leader for TabletID 72057594046678944 is [2:180:2172] sender: [2:516:2067] recipient: [2:515:2408] Leader for TabletID 72057594046678944 is [2:517:2409] sender: [2:518:2067] recipient: [2:515:2408] 2025-03-31T15:58:16.307438Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:58:16.307512Z node 2 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046678944 is [2:517:2409] sender: [2:545:2067] recipient: [2:24:2071] { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::LiteralKeys [GOOD] Test command err: Trying to start YDB, gRPC: 25429, MsgBus: 4715 2025-03-31T15:55:50.595890Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002965828545018:2136];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:50.648875Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ad7/r3tmp/tmphep1mR/pdisk_1.dat 2025-03-31T15:55:53.621943Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:55:54.484236Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:54.655657Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:55:54.686211Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:55:54.686558Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 25429, node 1 2025-03-31T15:55:54.760665Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:55:55.540307Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002965828545018:2136];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:55.540809Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:55:56.231867Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:55:56.231886Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:55:56.231893Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:55:56.231983Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4715 TClient is connected to server localhost:4715 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:56:02.891975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:56:02.918919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:56:03.194178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:56:03.495565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:56:03.581263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:56:08.903758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:56:08.903786Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:56:11.312600Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003056022860017:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:11.312687Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:13.030986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:56:13.187374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:56:13.350106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:56:13.445596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:56:13.502238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:56:13.625812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:56:14.067378Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003068907762476:2496], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:14.067734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:14.069722Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003068907762481:2499], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:14.098302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:56:14.568122Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-31T15:56:14.569608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488003068907762483:2500], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:56:14.674690Z node 1 :TX_PROXY ERROR: Actor# [1:7488003068907762566:3541] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:56:17.636541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T15:56:19.817972Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 13411, MsgBus: 18858 2025-03-31T15:56:25.113492Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488003118712898260:2085];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:56:25.114717Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ad7/r3tmp/tmpJi9Rat/pdisk_1.dat 2025-03-31T15:56:25.280395Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:56:25.325058Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:56:25.325144Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:56:25.327631Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13411, node 2 2025-03-31T15:56:25.534839Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:56:25.534861Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:56:25.534869Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:56:25.535003Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18858 TClient is connected to server localhost:18858 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:56:29.660805Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, o ... : 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:51.211012Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:51.339857Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:53.844120Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488003473091724133:2142];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:57:53.844201Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:57:54.879605Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488003498861529623:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:54.879711Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:54.946685Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:57:55.030568Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:57:55.079847Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:57:55.131882Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:57:55.173052Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:57:55.221405Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:57:55.319813Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488003503156497436:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:55.319962Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:55.320537Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488003503156497441:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:55.329010Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:57:55.356369Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488003503156497443:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:57:55.430516Z node 6 :TX_PROXY ERROR: Actor# [6:7488003503156497498:3464] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 5192, MsgBus: 16283 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ad7/r3tmp/tmpFj9oTX/pdisk_1.dat 2025-03-31T15:58:03.663435Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:03.766313Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:03.770654Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:03.770761Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:03.781671Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5192, node 7 2025-03-31T15:58:04.223872Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:58:04.223900Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:58:04.223914Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:58:04.224104Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16283 TClient is connected to server localhost:16283 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:58:05.588916Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:05.615997Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:05.714723Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:06.008914Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:06.252224Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:10.688281Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488003570304235299:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:10.688454Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:10.740702Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:58:10.836806Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:58:10.922803Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:58:11.005419Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:58:11.079756Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:58:11.196145Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:58:11.439373Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488003574599203143:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:11.439535Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:11.440358Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488003574599203148:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:11.457077Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:58:11.493432Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7488003574599203150:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:58:11.592054Z node 7 :TX_PROXY ERROR: Actor# [7:7488003574599203207:3467] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSort::Offset [GOOD] Test command err: Trying to start YDB, gRPC: 4451, MsgBus: 1751 2025-03-31T15:55:43.168173Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002938807095604:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:43.172524Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ae2/r3tmp/tmpQulOYc/pdisk_1.dat 2025-03-31T15:55:47.855209Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:55:48.169586Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002938807095604:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:48.184607Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:55:49.219501Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:55:49.219527Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:55:49.441210Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:49.557554Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:55:49.557685Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:55:49.578750Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4451, node 1 2025-03-31T15:55:53.457184Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:55:53.457216Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:55:53.457224Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:55:53.457344Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1751 TClient is connected to server localhost:1751 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:56:02.644169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:56:02.664446Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:56:02.677585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T15:56:02.878208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:56:03.147230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:56:03.239635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:56:04.184039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:56:04.184068Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:56:04.677419Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003029001410524:2419], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:04.677512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:05.170897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:56:05.215504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:56:05.253887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:56:05.310998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:56:05.444151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:56:05.607547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:56:05.943321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003033296378353:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:05.944240Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:05.945527Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003033296378358:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:05.975818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:56:06.029358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488003033296378363:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:56:06.102162Z node 1 :TX_PROXY ERROR: Actor# [1:7488003037591345714:3504] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:56:11.676595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T15:56:11.823907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T15:56:11.917167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 5756, MsgBus: 6003 2025-03-31T15:56:26.096699Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488003117891318204:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ae2/r3tmp/tmpdvm7w6/pdisk_1.dat 2025-03-31T15:56:26.222956Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:56:26.401936Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:56:26.445713Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:56:26.445806Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:56:26.452341Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5756, node 2 2025-03-31T15:56:29.799729Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:56:29.799747Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:56:29.799755Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:56:29.799863Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:56:30.671512Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488003117891318204:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:56:30.671582Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:6003 TClient is connected to server localhost:6003 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: ... sed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T15:57:51.008537Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:53.659801Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7488003475197307117:2193];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:57:53.659868Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:57:54.700740Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488003500967112555:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:54.700883Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:54.778436Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:57:54.857880Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:57:54.907862Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:57:54.957713Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:57:55.010508Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:57:55.128928Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:57:55.200834Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488003505262080369:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:55.200966Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:55.201273Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488003505262080374:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:55.206021Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:57:55.218372Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7488003505262080376:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:57:55.297994Z node 5 :TX_PROXY ERROR: Actor# [5:7488003505262080431:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 8026, MsgBus: 11298 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ae2/r3tmp/tmpNaxixD/pdisk_1.dat 2025-03-31T15:58:01.086136Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:01.284967Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:01.338095Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:01.338224Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:01.351490Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8026, node 6 2025-03-31T15:58:01.593054Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:58:01.593093Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:58:01.593105Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:58:01.593454Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11298 TClient is connected to server localhost:11298 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:58:02.812802Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:02.822209Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T15:58:02.857896Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:03.043391Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:03.622295Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:04.098916Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:09.548552Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488003564905530569:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:09.548657Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:09.626396Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T15:58:09.710746Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T15:58:09.796657Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T15:58:09.904546Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T15:58:10.022421Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T15:58:10.117518Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T15:58:10.222999Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488003569200498393:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:10.223130Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:10.223476Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488003569200498398:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:10.228283Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T15:58:10.252305Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488003569200498400:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T15:58:10.351052Z node 6 :TX_PROXY ERROR: Actor# [6:7488003569200498456:3470] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_AutoscaleAwareSDK >> TopicAutoscaling::PartitionMerge_PreferedPartition_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_AutosplitByLoad |68.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> TSubDomainTest::DatashardNotRunAtAllWhenSubDomainNodesIsStopped [GOOD] >> TraverseColumnShard::TraverseColumnTableRebootColumnshard [GOOD] |68.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> Balancing::Balancing_ManyTopics_PQv1 [GOOD] >> TPersQueueMirrorer::TestBasicRemote |68.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure |68.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure |68.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure |68.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |68.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |68.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::DatashardNotRunAtAllWhenSubDomainNodesIsStopped [GOOD] Test command err: 2025-03-31T15:55:13.500092Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002809055989600:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:13.500174Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:55:13.570131Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488002806355248435:2138];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:13.570209Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:55:13.613962Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488002806658354763:2221];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b62/r3tmp/tmpyvajZy/pdisk_1.dat 2025-03-31T15:55:14.378344Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:55:14.819468Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:55:14.822673Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:55:15.186971Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:15.273673Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:55:15.273784Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:55:15.280045Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:55:15.280190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:55:15.286726Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T15:55:15.296979Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:55:15.299922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:55:15.309420Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:55:15.309500Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:55:15.331186Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-03-31T15:55:15.362885Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8275 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T15:55:16.739473Z node 1 :TX_PROXY DEBUG: actor# [1:7488002809055989822:2143] Handle TEvNavigate describe path dc-1 2025-03-31T15:55:16.739522Z node 1 :TX_PROXY DEBUG: Actor# [1:7488002821940892214:2482] HANDLE EvNavigateScheme dc-1 2025-03-31T15:55:16.739715Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488002813350957176:2166], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:16.739797Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002813350957253:2208][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7488002813350957176:2166], cookie# 1 2025-03-31T15:55:16.744633Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002813350957279:2208][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002813350957276:2208], cookie# 1 2025-03-31T15:55:16.744691Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002813350957280:2208][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002813350957277:2208], cookie# 1 2025-03-31T15:55:16.744710Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002813350957281:2208][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002813350957278:2208], cookie# 1 2025-03-31T15:55:16.744751Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002809055989464:2059] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002813350957281:2208], cookie# 1 2025-03-31T15:55:16.744807Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002813350957281:2208][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002809055989464:2059], cookie# 1 2025-03-31T15:55:16.744842Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002813350957253:2208][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002813350957278:2208], cookie# 1 2025-03-31T15:55:16.744867Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002813350957253:2208][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-31T15:55:16.744886Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002809055989458:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002813350957279:2208], cookie# 1 2025-03-31T15:55:16.744904Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488002809055989461:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488002813350957280:2208], cookie# 1 2025-03-31T15:55:16.744924Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002813350957279:2208][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002809055989458:2053], cookie# 1 2025-03-31T15:55:16.744939Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488002813350957280:2208][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002809055989461:2056], cookie# 1 2025-03-31T15:55:16.744958Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002813350957253:2208][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002813350957276:2208], cookie# 1 2025-03-31T15:55:16.744977Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002813350957253:2208][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-31T15:55:16.745000Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002813350957253:2208][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488002813350957277:2208], cookie# 1 2025-03-31T15:55:16.745013Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488002813350957253:2208][/dc-1] Unexpected sync response: sender# [1:7488002813350957277:2208], cookie# 1 2025-03-31T15:55:16.745083Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7488002813350957176:2166], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-03-31T15:55:16.776317Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7488002813350957176:2166], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7488002813350957253:2208] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-31T15:55:16.776420Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488002813350957176:2166], cacheItem# { Subscriber: { Subscriber: [1:7488002813350957253:2208] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-03-31T15:55:16.779548Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488002821940892218:2483], recipient# [1:7488002821940892214:2482], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T15:55:16.779611Z node 1 :TX_PROXY DEBUG: Actor# [1:7488002821940892214:2482] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T15:55:16.835228Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7488002806355248606:2108], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:16.835219Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488002813350957176:2166], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:55:16.835506Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488002813350957176:2166], cacheItem# { Subscriber: { Subscriber: [1:7488002817645924897:2470] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:55:16.835618Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488002821940892220:2484], recipient# [1:7488002821940892219:2301], result# { Er ... r: [8:7488003007147030812:2420] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:58:15.649943Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [8:7488003591262583689:2711], recipient# [8:7488003591262583688:2646], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:58:16.499652Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7488002932708849290:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:58:16.499819Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7488002932708849290:2127], cacheItem# { Subscriber: { Subscriber: [7:7488002937003817194:2547] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:58:16.499892Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7488002932708849290:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:58:16.499967Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7488002932708849290:2127], cacheItem# { Subscriber: { Subscriber: [7:7488002979953490561:2817] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:58:16.500035Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7488003594133816012:3719], recipient# [7:7488003594133816009:2644], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:58:16.500098Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7488003594133816011:3718], recipient# [7:7488003594133816010:2645], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:58:16.641983Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [8:7488002938427553641:2147], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:58:16.642172Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [8:7488002938427553641:2147], cacheItem# { Subscriber: { Subscriber: [8:7488002947017488299:2187] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:58:16.642282Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [8:7488003595557550987:2712], recipient# [8:7488003595557550986:2647], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:58:16.655545Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [8:7488002938427553641:2147], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:58:16.655711Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [8:7488002938427553641:2147], cacheItem# { Subscriber: { Subscriber: [8:7488003007147030812:2420] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:58:16.655805Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [8:7488003595557550989:2713], recipient# [8:7488003595557550988:2648], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:58:17.503338Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7488002932708849290:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:58:17.503526Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7488002932708849290:2127], cacheItem# { Subscriber: { Subscriber: [7:7488002979953490561:2817] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:58:17.503604Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7488002932708849290:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:58:17.503684Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7488002932708849290:2127], cacheItem# { Subscriber: { Subscriber: [7:7488002937003817194:2547] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T15:58:17.503758Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7488003598428783322:3725], recipient# [7:7488003598428783320:2646], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T15:58:17.503821Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7488003598428783323:3726], recipient# [7:7488003598428783321:2647], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> DataShardWrite::CancelImmediate [GOOD] >> DataShardWrite::DeletePrepared+Volatile ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootColumnshard [GOOD] Test command err: 2025-03-31T15:54:39.217578Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:54:39.218756Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T15:54:39.218979Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f45/r3tmp/tmpkVZCSF/pdisk_1.dat 2025-03-31T15:54:43.921713Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21866, node 1 2025-03-31T15:54:45.599768Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:54:45.599912Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:54:45.600027Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:54:45.641798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:54:45.652228Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:54:45.884740Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:45.885558Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:45.909320Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10460 2025-03-31T15:54:47.863608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:54:59.911870Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T15:54:59.992442Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:59.992728Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:55:00.045633Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T15:55:00.050896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:55:00.473429Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:55:00.479832Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:55:00.481654Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:55:00.481814Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:55:00.482291Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:55:00.482405Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:55:00.482796Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:55:00.482897Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:55:00.482981Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:55:00.754228Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:55:00.754320Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:55:00.853243Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:55:03.144903Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:03.428275Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T15:55:03.428394Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T15:55:03.545231Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T15:55:03.561977Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T15:55:03.564695Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T15:55:03.565313Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T15:55:03.565828Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T15:55:03.566362Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T15:55:03.566865Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T15:55:03.578565Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T15:55:03.582919Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T15:55:03.714514Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T15:55:03.715703Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1872:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T15:55:03.810239Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-03-31T15:55:03.863175Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1908:2617] 2025-03-31T15:55:03.871985Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1908:2617], schemeshard id = 72075186224037897 2025-03-31T15:55:03.921426Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T15:55:04.136674Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T15:55:04.137000Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T15:55:04.137537Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T15:55:04.219665Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T15:55:04.306302Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T15:55:04.315827Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T15:55:05.352739Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T15:55:06.395368Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T15:55:06.560357Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T15:55:09.784504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2241:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:09.784644Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:10.028202Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-31T15:55:11.072763Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T15:55:11.073013Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T15:55:11.073371Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T15:55:11.073530Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T15:55:11.073664Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T15:55:11.073821Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T15:55:11.074052Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T15:55:11.074195Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T15:55:11.074335Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T15:55:11.074462Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T15:55:11.079232Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T15:55:11.079571Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T15:55:11.156728Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2398:2889];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T15:55:11.157047Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2398:2889];tablet_id=72075186224037900;process= ... extTraversal 2025-03-31T15:58:16.167691Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T15:58:16.167731Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-31T15:58:16.167774Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T15:58:16.174659Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-31T15:58:16.216369Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-31T15:58:16.216987Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T15:58:16.217086Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T15:58:16.218085Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-31T15:58:16.238300Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-31T15:58:16.238567Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-03-31T15:58:16.240186Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8560:6492], server id = [2:8565:6497], tablet id = 72075186224037899, status = OK 2025-03-31T15:58:16.240579Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8560:6492], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:16.240936Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8561:6493], server id = [2:8566:6498], tablet id = 72075186224037900, status = OK 2025-03-31T15:58:16.240998Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8561:6493], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:16.242398Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8562:6494], server id = [2:8567:6499], tablet id = 72075186224037901, status = OK 2025-03-31T15:58:16.242456Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8562:6494], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:16.249345Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8563:6495], server id = [2:8568:6500], tablet id = 72075186224037902, status = OK 2025-03-31T15:58:16.249453Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8563:6495], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:16.249939Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8564:6496], server id = [2:8569:6501], tablet id = 72075186224037903, status = OK 2025-03-31T15:58:16.249998Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8564:6496], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:16.256520Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-31T15:58:16.257270Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8560:6492], server id = [2:8565:6497], tablet id = 72075186224037899 2025-03-31T15:58:16.257315Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:16.258181Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-03-31T15:58:16.258797Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8561:6493], server id = [2:8566:6498], tablet id = 72075186224037900 2025-03-31T15:58:16.258827Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:16.259815Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-03-31T15:58:16.260099Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8583:6512], server id = [2:8587:6514], tablet id = 72075186224037904, status = OK 2025-03-31T15:58:16.260183Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8583:6512], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:16.260951Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8562:6494], server id = [2:8567:6499], tablet id = 72075186224037901 2025-03-31T15:58:16.260981Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:16.261964Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-03-31T15:58:16.262506Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8586:6513], server id = [2:8589:6516], tablet id = 72075186224037905, status = OK 2025-03-31T15:58:16.262582Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8586:6513], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:16.262978Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-03-31T15:58:16.263910Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8563:6495], server id = [2:8568:6500], tablet id = 72075186224037902 2025-03-31T15:58:16.263978Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:16.264367Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8588:6515], server id = [2:8590:6517], tablet id = 72075186224037906, status = OK 2025-03-31T15:58:16.264434Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8588:6515], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:16.265048Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8564:6496], server id = [2:8569:6501], tablet id = 72075186224037903 2025-03-31T15:58:16.265076Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:16.266005Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8592:6519], server id = [2:8595:6522], tablet id = 72075186224037907, status = OK 2025-03-31T15:58:16.266068Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8592:6519], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:16.266522Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8594:6521], server id = [2:8599:6525], tablet id = 72075186224037908, status = OK 2025-03-31T15:58:16.266574Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8594:6521], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T15:58:16.274920Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-03-31T15:58:16.283987Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8583:6512], server id = [2:8587:6514], tablet id = 72075186224037904 2025-03-31T15:58:16.284038Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:16.285610Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-03-31T15:58:16.286071Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8586:6513], server id = [2:8589:6516], tablet id = 72075186224037905 2025-03-31T15:58:16.286103Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:16.286520Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-03-31T15:58:16.286744Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8588:6515], server id = [2:8590:6517], tablet id = 72075186224037906 2025-03-31T15:58:16.286768Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:16.296058Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-03-31T15:58:16.296386Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8592:6519], server id = [2:8595:6522], tablet id = 72075186224037907 2025-03-31T15:58:16.296414Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:16.296710Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-03-31T15:58:16.296754Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-31T15:58:16.296940Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-31T15:58:16.297158Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-31T15:58:16.297496Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T15:58:16.303853Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8594:6521], server id = [2:8599:6525], tablet id = 72075186224037908 2025-03-31T15:58:16.303899Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:16.304736Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-31T15:58:16.399460Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8626:6548]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T15:58:16.399781Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T15:58:16.399841Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8626:6548], StatRequests.size() = 1 2025-03-31T15:58:16.722962Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YWMyYTQ4YmYtZDIxZGYyYjEtMjNiODk1M2UtOTlhNzFhYWY=, TxId: 2025-03-31T15:58:16.723029Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YWMyYTQ4YmYtZDIxZGYyYjEtMjNiODk1M2UtOTlhNzFhYWY=, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-03-31T15:58:16.726116Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T15:58:16.727263Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];ev=NActors::IEventHandle;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-31T15:58:16.782527Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T15:58:16.782598Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T15:58:16.886805Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:8649:6559];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-03-31T15:58:17.259255Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8759:6654]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-31T15:58:17.259571Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-31T15:58:17.259641Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-31T15:58:17.263042Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-31T15:58:17.264814Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-03-31T15:58:17.264904Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-31T15:58:17.276684Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> KqpNotNullColumns::UpdateTable_Immediate [GOOD] |68.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> DataShardWrite::UpdateImmediate [GOOD] >> DataShardWrite::RejectOnChangeQueueOverflow |68.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |68.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |68.1%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/test-results/unittest/{meta.json ... results_accumulator.log} >> AnalyzeColumnshard::AnalyzeServerless [GOOD] >> BasicUsage::TWriteSession_WriteEncoded [GOOD] >> CompressExecutor::TestExecutorMemUsage |68.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |68.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsFromAdLdapServer >> TStorageBalanceTest::TestScenario3 [GOOD] |68.1%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |68.1%| [LD] {RESULT} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::UpdateTable_Immediate [GOOD] Test command err: Trying to start YDB, gRPC: 1744, MsgBus: 8207 2025-03-31T15:56:08.636308Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003045324143989:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:56:08.636347Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000acf/r3tmp/tmpbE0JQk/pdisk_1.dat 2025-03-31T15:56:11.931598Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:56:12.355949Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:56:12.356036Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:56:12.356171Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:56:12.364554Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1744, node 1 2025-03-31T15:56:12.616512Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:56:12.616534Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:56:12.616544Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:56:12.616625Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8207 2025-03-31T15:56:13.640183Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003045324143989:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:56:13.640245Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:8207 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:56:15.008583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:56:15.367978Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:56:26.520847Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003122633556046:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:26.520945Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:27.390151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:56:27.390182Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:56:29.352211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T15:56:30.046655Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003139813425353:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:30.047579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:30.055641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003139813425358:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:30.109493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T15:56:30.230719Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488003139813425360:2374], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T15:56:30.352817Z node 1 :TX_PROXY ERROR: Actor# [1:7488003139813425413:2452] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:56:31.730495Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488003144108392761:2389], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing not null column in input: Value. All not null columns should be initialized, code: 2032 2025-03-31T15:56:31.731430Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWIyYmRlYTctNmVlMTc4NTQtMTNiYzgyMDgtNTJhODI2ZTU=, ActorId: [1:7488003122633556028:2353], ActorState: ExecuteState, TraceId: 01jqpepcgcbdpsa6awj1a71njk, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-03-31T15:56:32.077984Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488003144108392770:2393], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:45: Error: Failed to convert type: Struct<'Key':Int32,'Value':Null> to Struct<'Key':Uint64?,'Value':String>
:1:45: Error: Failed to convert 'Value': Null to String
:1:45: Error: Failed to convert input columns types to scheme types, code: 2031 2025-03-31T15:56:32.079379Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWIyYmRlYTctNmVlMTc4NTQtMTNiYzgyMDgtNTJhODI2ZTU=, ActorId: [1:7488003122633556028:2353], ActorState: ExecuteState, TraceId: 01jqpepcs3fa5knqfm0ps38vx5, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 18182, MsgBus: 17617 2025-03-31T15:56:34.113985Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488003154646687927:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:56:34.196531Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000acf/r3tmp/tmpgtYrwP/pdisk_1.dat 2025-03-31T15:56:34.491394Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:56:34.524476Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:56:34.524558Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:56:34.527429Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18182, node 2 2025-03-31T15:56:34.674317Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:56:34.674341Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:56:34.674348Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:56:34.674465Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17617 TClient is connected to server localhost:17617 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:56:35.154506Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:56:39.115280Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488003154646687927:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:56:39.115339Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:56:44.011459Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488003193301394267:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:44.011645Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:44.018987Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operatio ... , (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:57:39.756886Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:57:39.785216Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23688, node 5 2025-03-31T15:57:40.596367Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:57:40.596397Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:57:40.596405Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:57:40.596942Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12720 2025-03-31T15:57:44.796974Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7488003433095228777:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:57:44.797054Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:12720 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:57:46.024637Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:57:50.399626Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488003480339869716:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:50.399757Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:50.400141Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488003480339869728:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:57:50.404940Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T15:57:50.481745Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7488003480339869730:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T15:57:50.574792Z node 5 :TX_PROXY ERROR: Actor# [5:7488003480339869781:2354] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:57:50.618870Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T15:57:54.653798Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:57:54.653841Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:02.465512Z node 5 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqpes2gw28fbw8n5et10qefr, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=N2IwZGI3ZTEtMWU2YjA2ZDAtMTk1YWQxYjMtODExNDEwNjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-31T15:58:02.465865Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=N2IwZGI3ZTEtMWU2YjA2ZDAtMTk1YWQxYjMtODExNDEwNjI=, ActorId: [5:7488003518994576365:2533], ActorState: ExecuteState, TraceId: 01jqpes2gw28fbw8n5et10qefr, Create QueryResponse for error on request, msg: 2025-03-31T15:58:05.854397Z node 5 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqpes57qescse5c781hxxnk6, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=ZTFjYWQ3ZGItZGRlMGYwZjEtODMyZWFjMzktMzk5MmZiNDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-31T15:58:05.854765Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=ZTFjYWQ3ZGItZGRlMGYwZjEtODMyZWFjMzktMzk5MmZiNDI=, ActorId: [5:7488003531879478364:2562], ActorState: ExecuteState, TraceId: 01jqpes57qescse5c781hxxnk6, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 2085, MsgBus: 18855 2025-03-31T15:58:07.261284Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488003553450511139:2128];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:07.266183Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000acf/r3tmp/tmpNupR5A/pdisk_1.dat 2025-03-31T15:58:07.619445Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:07.637491Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:07.637604Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:07.639595Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2085, node 6 2025-03-31T15:58:07.827903Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:58:07.827936Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:58:07.827948Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:58:07.828118Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18855 TClient is connected to server localhost:18855 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:58:08.946211Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:12.264063Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488003553450511139:2128];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:12.264151Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:58:14.006651Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488003579220315520:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:14.006780Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:14.034381Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T15:58:14.166489Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488003583515282965:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:14.166601Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:14.167014Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488003583515282970:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:14.180698Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T15:58:14.236031Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488003583515282972:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T15:58:14.332608Z node 6 :TX_PROXY ERROR: Actor# [6:7488003583515283027:2444] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithoutLoginPlaceholders [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnames [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromIpV4List [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromIpV6List [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnamesLdapsScheme [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnamesUnknownScheme [GOOD] >> DataShardWrite::UpsertLostPrepareArbiter [GOOD] >> DataShardWrite::UpsertNoLocksArbiterRestart >> TopicAutoscaling::PartitionSplit_PreferedPartition_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_AutoscaleAwareSDK |68.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |68.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |68.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |68.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |68.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest |68.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnamesUnknownScheme [GOOD] >> DataShardSnapshots::LockedWriteBulkUpsertConflict+UseSink [GOOD] >> DataShardSnapshots::LockedWriteBulkUpsertConflict-UseSink >> ExternalBlobsMultipleChannels::WithNewColumnFamilyAndCompaction >> KqpUserConstraint::KqpReadNull-UploadNull >> DataShardSnapshots::UncommittedChangesRenameTable+UseSink [GOOD] >> DataShardSnapshots::ShardRestartWholeShardLockBasic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeServerless [GOOD] Test command err: 2025-03-31T15:54:29.633548Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:54:29.633840Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T15:54:29.633923Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f9d/r3tmp/tmppsflla/pdisk_1.dat 2025-03-31T15:54:31.417186Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6478, node 1 2025-03-31T15:54:34.143125Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:54:34.143183Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:54:34.143215Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:54:34.171786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:54:34.173725Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:54:34.483597Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:34.484473Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:34.522793Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31958 2025-03-31T15:54:38.239676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:54:48.404116Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T15:54:48.577721Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:48.578236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:48.680558Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T15:54:48.707695Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:49.434235Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:49.441258Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:49.448040Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:49.448198Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:49.448420Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:49.448764Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:49.449045Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:49.449121Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:49.449187Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T15:54:49.703860Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:49.703951Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:49.717087Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:50.431473Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:50.719935Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T15:54:50.720425Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T15:54:50.913015Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T15:54:50.915591Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T15:54:50.917876Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T15:54:50.918420Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T15:54:50.919186Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T15:54:50.919826Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T15:54:50.920095Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T15:54:50.920815Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T15:54:50.922827Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T15:54:51.019774Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T15:54:51.020216Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1872:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T15:54:51.060987Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-03-31T15:54:51.076802Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1908:2617] 2025-03-31T15:54:51.077435Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1908:2617], schemeshard id = 72075186224037897 2025-03-31T15:54:51.079723Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-03-31T15:54:51.098970Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T15:54:51.099034Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T15:54:51.099130Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-03-31T15:54:51.130740Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T15:54:51.175933Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T15:54:51.176839Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T15:54:51.878106Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T15:54:52.750256Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T15:54:52.879834Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T15:54:54.285231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T15:54:56.572657Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:56.924982Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-03-31T15:54:56.925047Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-03-31T15:54:56.925150Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2596:2947], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-03-31T15:54:56.926828Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2604:2951] 2025-03-31T15:54:56.931635Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2604:2951], schemeshard id = 72075186224037899 2025-03-31T15:54:58.630683Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2727:3248], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:58.630861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:58.651929Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-03-31T15:54:58.832959Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2816:3051];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T15:54:58.833138Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2816:3051];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T15:54:58.833397Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2816:3051];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T15:54:58.833488Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2816:3051];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T15:54:58.833564Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2816:3051];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T15:54:58.833646Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2816:3051];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T15:54:58.833741Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2816:3051];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T15:54:58.833825Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2816:3051];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T15:54:58.833900Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2816:3051];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T15: ... 1T15:58:11.187375Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-03-31T15:58:13.360022Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037897 2025-03-31T15:58:13.360111Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 185.000000s, at schemeshard: 72075186224037897 2025-03-31T15:58:13.360535Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 25 2025-03-31T15:58:13.392486Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-31T15:58:15.012018Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T15:58:15.012105Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T15:58:15.012152Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-31T15:58:15.012224Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-03-31T15:58:15.012270Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-31T15:58:15.012663Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-03-31T15:58:15.016819Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-31T15:58:15.026367Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:8063:5952], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:15.026500Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:8074:5957], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:15.031547Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Shared, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:15.064769Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-03-31T15:58:15.155412Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:8077:5960], DatabaseId: /Root/Shared, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-03-31T15:58:15.410198Z node 2 :TX_PROXY ERROR: Actor# [2:8174:6009] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Shared/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:58:15.529077Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:8203:6024]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T15:58:15.529545Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T15:58:15.529666Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:8205:6026] 2025-03-31T15:58:15.529741Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:8205:6026] 2025-03-31T15:58:15.530224Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8206:6027] 2025-03-31T15:58:15.530344Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8205:6026], server id = [2:8206:6027], tablet id = 72075186224037894, status = OK 2025-03-31T15:58:15.530467Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:8206:6027], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-03-31T15:58:15.530540Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-03-31T15:58:15.530695Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T15:58:15.530791Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:8203:6024], StatRequests.size() = 1 2025-03-31T15:58:15.705263Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MTg4NmE4ZjgtNTE5ZmM3M2MtZDlhNDI2N2UtYzRjMTUxMzg=, TxId: 2025-03-31T15:58:15.705362Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MTg4NmE4ZjgtNTE5ZmM3M2MtZDlhNDI2N2UtYzRjMTUxMzg=, TxId: 2025-03-31T15:58:15.706046Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T15:58:15.739482Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-31T15:58:15.739556Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T15:58:15.779198Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-31T15:58:15.779277Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-31T15:58:15.863923Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:8205:6026], schemeshard count = 1 2025-03-31T15:58:16.311951Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2025-03-31T15:58:16.312020Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 183.000000s, at schemeshard: 72075186224037899 2025-03-31T15:58:16.312317Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 28 2025-03-31T15:58:16.332342Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-31T15:58:17.413999Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T15:58:17.414103Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is column table. 2025-03-31T15:58:17.417163Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-31T15:58:17.436827Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-31T15:58:17.437422Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T15:58:17.437481Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037899, LocalPathId: 2], AnalyzedShards 1 2025-03-31T15:58:17.456510Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T15:58:17.492054Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-03-31T15:58:17.493022Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-03-31T15:58:17.493126Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-03-31T15:58:17.510686Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-03-31T15:58:19.107249Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T15:58:19.107398Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is column table. 2025-03-31T15:58:19.107444Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-03-31T15:58:19.108157Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-31T15:58:19.139147Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-31T15:58:19.139589Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T15:58:19.139672Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T15:58:19.140645Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-31T15:58:19.158132Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-31T15:58:19.158357Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-03-31T15:58:19.158989Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8343:6115], server id = [2:8344:6116], tablet id = 72075186224037905, status = OK 2025-03-31T15:58:19.159145Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8343:6115], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-03-31T15:58:19.169993Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-03-31T15:58:19.170133Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-31T15:58:19.170364Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-31T15:58:19.170631Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-31T15:58:19.170989Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-03-31T15:58:19.173527Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8343:6115], server id = [2:8344:6116], tablet id = 72075186224037905 2025-03-31T15:58:19.173587Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T15:58:19.174261Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-31T15:58:19.321670Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8364:6135]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T15:58:19.321969Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T15:58:19.322028Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8364:6135], StatRequests.size() = 1 2025-03-31T15:58:19.788107Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MTQ1ZDYxZi0yYTllMTEyYi04ZjkwMWQ2NC1hNWQ1YTEy, TxId: 2025-03-31T15:58:19.788192Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MTQ1ZDYxZi0yYTllMTEyYi04ZjkwMWQ2NC1hNWQ1YTEy, TxId: 2025-03-31T15:58:19.788822Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T15:58:19.811007Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-03-31T15:58:19.811097Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3322:3401] >> DataShardSnapshots::MvccSnapshotAndSplit [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWrites+UseSink >> DataShardSnapshots::MvccSnapshotTailCleanup [GOOD] >> DataShardSnapshots::MvccSnapshotReadWithLongPlanQueue >> DataShardSnapshots::LockedWriteReuseAfterCommit+UseSink [GOOD] >> DataShardSnapshots::LockedWriteReuseAfterCommit-UseSink |68.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest |68.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::Simple >> PrivateApi::PingTask [GOOD] >> PrivateApi::GetTask >> DataShardWrite::DeletePrepared+Volatile [GOOD] >> DataShardWrite::DeletePrepared-Volatile >> KqpIndexes::InnerJoinWithNonIndexWherePredicate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> TStorageBalanceTest::TestScenario3 [GOOD] Test command err: c[def1] ------------------------------ (0) c[def1] ------------------------------ (0) ------------------------------ (0) c[def1] ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) c[def1] ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) c[def1] ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) c[def1] ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) c[def1] ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) c[def1] ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) c[def1] ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) c[def1] ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) 2025-03-31T15:54:11.381478Z node 5 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-03-31T15:54:11.393549Z node 5 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-31T15:54:11.393718Z node 5 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-03-31T15:54:11.394523Z node 5 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [5:287:2081] ControllerId# 72057594037932033 2025-03-31T15:54:11.394567Z node 5 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-31T15:54:11.394671Z node 5 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-03-31T15:54:11.394986Z node 5 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-03-31T15:54:11.397800Z node 5 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-03-31T15:54:11.397849Z node 5 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-03-31T15:54:11.399821Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:286:2080] Create Queue# [5:293:2085] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:11.399997Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:286:2080] Create Queue# [5:294:2086] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:11.400132Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:286:2080] Create Queue# [5:295:2087] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:11.400266Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:286:2080] Create Queue# [5:296:2088] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:11.400469Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:286:2080] Create Queue# [5:297:2089] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:11.400597Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:286:2080] Create Queue# [5:298:2090] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:11.400743Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:286:2080] Create Queue# [5:299:2091] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:11.400771Z node 5 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-03-31T15:54:11.400850Z node 5 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [5:287:2081] 2025-03-31T15:54:11.400883Z node 5 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [5:287:2081] 2025-03-31T15:54:11.400933Z node 5 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-03-31T15:54:11.400980Z node 5 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-03-31T15:54:11.401521Z node 5 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-03-31T15:54:11.401609Z node 6 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-03-31T15:54:11.404369Z node 6 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-31T15:54:11.404480Z node 6 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-03-31T15:54:11.405310Z node 6 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [6:307:2081] ControllerId# 72057594037932033 2025-03-31T15:54:11.405344Z node 6 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-31T15:54:11.405405Z node 6 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-03-31T15:54:11.405578Z node 6 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-03-31T15:54:11.407667Z node 6 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-03-31T15:54:11.407703Z node 6 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-03-31T15:54:11.409464Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:306:2080] Create Queue# [6:313:2085] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:11.409633Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:306:2080] Create Queue# [6:314:2086] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:11.409781Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:306:2080] Create Queue# [6:315:2087] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:11.409917Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:306:2080] Create Queue# [6:316:2088] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:11.410067Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:306:2080] Create Queue# [6:317:2089] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:11.410208Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:306:2080] Create Queue# [6:318:2090] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:11.410362Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:306:2080] Create Queue# [6:319:2091] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:11.410386Z node 6 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-03-31T15:54:11.410442Z node 6 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [6:307:2081] 2025-03-31T15:54:11.410475Z node 6 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [6:307:2081] 2025-03-31T15:54:11.410509Z node 6 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-03-31T15:54:11.410558Z node 6 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-03-31T15:54:11.411013Z node 6 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-03-31T15:54:11.411195Z node 7 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-03-31T15:54:11.413968Z node 7 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-31T15:54:11.414069Z node 7 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-03-31T15:54:11.414850Z node 7 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [7:329:2081] ControllerId# 72057594037932033 2025-03-31T15:54:11.414880Z node 7 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-31T15:54:11.414940Z node 7 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-03-31T15:54:11.415143Z node 7 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-03-31T15:54:11.417191Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-03-31T15:54:11.419787Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-31T15:54:11.419965Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-03-31T15:54:11.420536Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-03-31T15:54:11.421689Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-03-31T15:54:11.421743Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-03-31T15:54:11.422519Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:341:2084] ControllerId# 72057594037932033 2025-03-31T15:54:11.422554Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-31T15:54:11.422619Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-03-31T15:54:11.422802Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-03-31T15:54:11.435069Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-03-31T15:54:11.435139Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-03-31T15:54:11.436721Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:340:2083] Create Queue# [1:349:2089] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:11.436896Z node 1 :BS_PROXY DEBUG: Group# 0 Acto ... node 11 :BS_PROXY_PUT DEBUG: [795fbf64b26977dc] Result# TEvPutResult {Id# [72057594037927937:2:491:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-03-31T15:58:20.501286Z node 11 :BS_PROXY_PUT INFO: [795fbf64b26977dc] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:491:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-03-31T15:58:20.501418Z node 11 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.982 sample PartId# [72057594037927937:2:491:0:0:246:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 11 } TEvVPutResult{ TimestampMs# 16.025 VDiskId# [0:1:0:0:0] NodeId# 11 Status# OK } ] } 2025-03-31T15:58:20.502053Z node 11 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:491:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-03-31T15:58:20.502746Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:492} commited cookie 1 for step 491 2025-03-31T15:58:20.506779Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:492} Tx{1488, NKikimr::NHive::TTxReassignGroups} queued, type NKikimr::NHive::TTxReassignGroups 2025-03-31T15:58:20.506884Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:492} Tx{1488, NKikimr::NHive::TTxReassignGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T15:58:20.507159Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:492} Tx{1488, NKikimr::NHive::TTxReassignGroups} hope 1 -> done Change{994, redo 303b alter 0b annex 0, ~{ 1, 2 } -{ }, 0 gb} 2025-03-31T15:58:20.507225Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:492} Tx{1488, NKikimr::NHive::TTxReassignGroups} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T15:58:20.507364Z node 11 :PIPE_CLIENT DEBUG: TClient[72057594037932033] send [11:1297:2642] 2025-03-31T15:58:20.507401Z node 11 :PIPE_CLIENT DEBUG: TClient[72057594037932033] push event to server [11:1297:2642] 2025-03-31T15:58:20.507455Z node 11 :PIPE_SERVER DEBUG: [72057594037932033] HandleSend Sender# [11:1239:2604] EventType# 268637702 c[def1] *****----------------------------------------------------------------------------------------------- (0.046) *****----------------------------------------------------------------------------------------------- (0.048) ******---------------------------------------------------------------------------------------------- (0.062) *****----------------------------------------------------------------------------------------------- (0.054) *****----------------------------------------------------------------------------------------------- (0.046) *******--------------------------------------------------------------------------------------------- (0.066) ******---------------------------------------------------------------------------------------------- (0.06) *****----------------------------------------------------------------------------------------------- (0.052) *******--------------------------------------------------------------------------------------------- (0.068) *****----------------------------------------------------------------------------------------------- (0.052) *****----------------------------------------------------------------------------------------------- (0.046) 2025-03-31T15:58:20.612487Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:493} Tx{1489, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2025-03-31T15:58:20.612586Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:493} Tx{1489, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T15:58:20.612720Z node 11 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923002919744}: tablet 72075186224037958 wasn't changed 2025-03-31T15:58:20.612762Z node 11 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923002919744}: tablet 72075186224037958 skipped channel 0 2025-03-31T15:58:20.612859Z node 11 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923002919744}: tablet 72075186224037958 skipped channel 1 2025-03-31T15:58:20.612898Z node 11 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923002919744}: tablet 72075186224037958 skipped channel 2 2025-03-31T15:58:20.612975Z node 11 :HIVE NOTICE: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{88923002919744}(72075186224037958)::Execute - TryToBoot was not successfull 2025-03-31T15:58:20.613063Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:493} Tx{1489, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{995, redo 257b alter 0b annex 0, ~{ 2, 1 } -{ }, 0 gb} 2025-03-31T15:58:20.613123Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:493} Tx{1489, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T15:58:20.629444Z node 11 :BS_PROXY_PUT INFO: [d8c78320900fe758] bootstrap ActorId# [11:11598:6243] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:492:0:0:246:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-03-31T15:58:20.629612Z node 11 :BS_PROXY_PUT DEBUG: [d8c78320900fe758] Id# [72057594037927937:2:492:0:0:246:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-31T15:58:20.629659Z node 11 :BS_PROXY_PUT DEBUG: [d8c78320900fe758] restore Id# [72057594037927937:2:492:0:0:246:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-03-31T15:58:20.629716Z node 11 :BS_PROXY_PUT DEBUG: [d8c78320900fe758] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:492:0:0:246:1] Marker# BPG33 2025-03-31T15:58:20.629772Z node 11 :BS_PROXY_PUT DEBUG: [d8c78320900fe758] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:492:0:0:246:1] Marker# BPG32 2025-03-31T15:58:20.629898Z node 11 :BS_PROXY DEBUG: Send to queueActorId# [11:479:2089] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:492:0:0:246:1] FDS# 246 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T15:58:20.653003Z node 11 :BS_PROXY_PUT DEBUG: [d8c78320900fe758] received {EvVPutResult Status# OK ID# [72057594037927937:2:492:0:0:246:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 508 } Cost# 81937 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 509 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-03-31T15:58:20.653159Z node 11 :BS_PROXY_PUT DEBUG: [d8c78320900fe758] Result# TEvPutResult {Id# [72057594037927937:2:492:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-03-31T15:58:20.653217Z node 11 :BS_PROXY_PUT INFO: [d8c78320900fe758] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:492:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-03-31T15:58:20.653365Z node 11 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.93 sample PartId# [72057594037927937:2:492:0:0:246:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 11 } TEvVPutResult{ TimestampMs# 24.093 VDiskId# [0:1:0:0:0] NodeId# 11 Status# OK } ] } 2025-03-31T15:58:20.653929Z node 11 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:492:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-03-31T15:58:20.654326Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:493} commited cookie 1 for step 492 2025-03-31T15:58:20.660582Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:493} Tx{1490, NKikimr::NHive::TTxReassignGroups} queued, type NKikimr::NHive::TTxReassignGroups 2025-03-31T15:58:20.660679Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:493} Tx{1490, NKikimr::NHive::TTxReassignGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T15:58:20.660928Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:493} Tx{1490, NKikimr::NHive::TTxReassignGroups} hope 1 -> done Change{996, redo 303b alter 0b annex 0, ~{ 1, 2 } -{ }, 0 gb} 2025-03-31T15:58:20.660990Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:493} Tx{1490, NKikimr::NHive::TTxReassignGroups} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T15:58:20.661133Z node 11 :PIPE_CLIENT DEBUG: TClient[72057594037932033] send [11:1297:2642] 2025-03-31T15:58:20.661169Z node 11 :PIPE_CLIENT DEBUG: TClient[72057594037932033] push event to server [11:1297:2642] 2025-03-31T15:58:20.661224Z node 11 :PIPE_SERVER DEBUG: [72057594037932033] HandleSend Sender# [11:1239:2604] EventType# 268637702 c[def1] *****----------------------------------------------------------------------------------------------- (0.046) *****----------------------------------------------------------------------------------------------- (0.048) ******---------------------------------------------------------------------------------------------- (0.062) *****----------------------------------------------------------------------------------------------- (0.054) *****----------------------------------------------------------------------------------------------- (0.046) *******--------------------------------------------------------------------------------------------- (0.066) ******---------------------------------------------------------------------------------------------- (0.06) *****----------------------------------------------------------------------------------------------- (0.052) *******--------------------------------------------------------------------------------------------- (0.068) *****----------------------------------------------------------------------------------------------- (0.052) *****----------------------------------------------------------------------------------------------- (0.046) 2025-03-31T15:58:20.764659Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:494} Tx{1491, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2025-03-31T15:58:20.764761Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:494} Tx{1491, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T15:58:20.764897Z node 11 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923005280032}: tablet 72075186224037966 wasn't changed 2025-03-31T15:58:20.764942Z node 11 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923005280032}: tablet 72075186224037966 skipped channel 0 2025-03-31T15:58:20.765033Z node 11 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923005280032}: tablet 72075186224037966 skipped channel 1 2025-03-31T15:58:20.765069Z node 11 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923005280032}: tablet 72075186224037966 skipped channel 2 2025-03-31T15:58:20.765144Z node 11 :HIVE NOTICE: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{88923005280032}(72075186224037966)::Execute - TryToBoot was not successfull 2025-03-31T15:58:20.765221Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:494} Tx{1491, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{997, redo 257b alter 0b annex 0, ~{ 2, 1 } -{ }, 0 gb} 2025-03-31T15:58:20.765274Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:494} Tx{1491, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} |68.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq >> RemoteTopicReader::ReadTopic [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_existed_AutoscaleAwareSDK |68.2%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |68.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns >> KqpMultishardIndex::DataColumnWrite+UseSink >> TopicAutoscaling::WithDir_PartitionSplit_AutosplitByLoad [GOOD] >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] >> TTopicApiDescribes::GetPartitionDescribe [GOOD] >> TopicAutoscaling::BalancingAfterSplit_sessionsWithPartition [GOOD] >> TTopicApiDescribes::GetLocalDescribe [GOOD] >> KqpIndexes::PrefixedVectorIndexOrderByCosineSimilarityNotNullableLevel1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> RemoteTopicReader::ReadTopic [GOOD] Test command err: 2025-03-31T15:58:08.425188Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003560354509868:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:08.425510Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000c83/r3tmp/tmpSwvBoG/pdisk_1.dat 2025-03-31T15:58:09.695296Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:09.958108Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:09.970879Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:09.970980Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:10.027237Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:58:10.364240Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.196730s 2025-03-31T15:58:10.364368Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.196900s TClient is connected to server localhost:13784 TServer::EnableGrpc on GrpcPort 8380, node 1 2025-03-31T15:58:11.307832Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:58:11.307859Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:58:11.307865Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:58:11.307992Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13784 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:58:12.740641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:13.434345Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003560354509868:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:13.434416Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:58:14.078069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-03-31T15:58:14.999400Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003586124314419:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:14.999544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003586124314435:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:14.999615Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:15.000283Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003586124314434:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:15.071823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:2, at schemeshard: 72057594046644480 2025-03-31T15:58:15.152341Z node 1 :TX_PROXY ERROR: Actor# [1:7488003590419281738:2458] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T15:58:15.169461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488003590419281735:2377], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-31T15:58:15.169563Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488003590419281734:2376], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-31T15:58:15.234410Z node 1 :TX_PROXY ERROR: Actor# [1:7488003590419281786:2490] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:58:15.253966Z node 1 :TX_PROXY ERROR: Actor# [1:7488003590419281795:2496] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:58:20.240594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T15:58:21.503404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T15:58:22.218259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-31T15:58:22.972749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-31T15:58:23.977124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-31T15:58:24.935434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:58:24.935463Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:26.619904Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7488003637663922902:2904] Handshake: worker# [1:7488003577534379520:2299] 2025-03-31T15:58:26.640683Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7488003637663922902:2904] Create read session: session# [1:7488003637663922903:2298] 2025-03-31T15:58:26.643244Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7488003637663922902:2904] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-03-31T15:58:26.959835Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7488003637663922902:2904] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 9b Offset: 0 SeqNo: 1 CreateTime: 2025-03-31T15:58:26.376000Z MessageGroupId: producer ProducerId: producer }] } } 2025-03-31T15:58:26.982629Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7488003637663922902:2904] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-03-31T15:58:27.195605Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7488003637663922902:2904] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 9b Offset: 1 SeqNo: 2 CreateTime: 2025-03-31T15:58:27.135000Z MessageGroupId: producer ProducerId: producer }] } } 2025-03-31T15:58:27.242747Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7488003641958890331:2953] Handshake: worker# [1:7488003577534379520:2299] 2025-03-31T15:58:27.257332Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7488003641958890331:2953] Create read session: session# [1:7488003641958890332:2298] 2025-03-31T15:58:27.260731Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7488003641958890331:2953] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-03-31T15:58:27.303616Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7488003641958890331:2953] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 9b Offset: 1 SeqNo: 2 CreateTime: 2025-03-31T15:58:27.135000Z MessageGroupId: producer ProducerId: producer }] } } >> TTopicApiDescribes::DescribeConsumer [GOOD] |68.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |68.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |68.2%| [LD] {RESULT} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut >> TIcNodeCache::GetNodesInfoTest [GOOD] >> DataShardWrite::RejectOnChangeQueueOverflow [GOOD] >> DataShardWrite::UpsertBrokenLockArbiter >> Yq_1::CreateConnection_With_Existing_Name [GOOD] >> Yq_1::CreateConnections_With_Idempotency |68.2%| [TA] $(B)/ydb/core/tx/replication/service/ut_topic_reader/test-results/unittest/{meta.json ... results_accumulator.log} >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsFromAdLdapServer [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGood ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::GetPartitionDescribe [GOOD] Test command err: 2025-03-31T15:58:10.428656Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003568568367974:2275];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:10.428992Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:58:10.862004Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T15:58:10.862297Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488003569346761735:2291];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:10.869671Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00174e/r3tmp/tmpDN7OGr/pdisk_1.dat 2025-03-31T15:58:11.001460Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:58:11.379552Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:11.455535Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:11.491410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:11.491538Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:11.492066Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:11.492110Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:11.508073Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T15:58:11.508298Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:58:11.509281Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9566, node 1 2025-03-31T15:58:11.767306Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/00174e/r3tmp/yandex64yub8.tmp 2025-03-31T15:58:11.767332Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/00174e/r3tmp/yandex64yub8.tmp 2025-03-31T15:58:11.767511Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/00174e/r3tmp/yandex64yub8.tmp 2025-03-31T15:58:11.767662Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:58:11.865503Z INFO: TTestServer started on Port 11414 GrpcPort 9566 TClient is connected to server localhost:11414 PQClient connected to localhost:9566 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:58:12.406194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T15:58:12.491518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T15:58:12.716794Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:15.411332Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003568568367974:2275];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:15.411392Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:58:15.850760Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488003590821598381:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:15.850844Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488003590821598368:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:15.851029Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:15.858403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2025-03-31T15:58:15.863322Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488003569346761735:2291];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:15.863399Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:58:15.914414Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488003590821598385:2318], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-03-31T15:58:16.027768Z node 2 :TX_PROXY ERROR: Actor# [2:7488003595116565708:2179] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:58:17.100386Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 2146435072 Duration# 0.556984s 2025-03-31T15:58:17.100431Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.557067s 2025-03-31T15:58:17.179984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:58:17.224903Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488003595116565717:2323], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T15:58:17.226830Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjJhYWU0ZmMtZWFhNDg5MDEtZTY5YmRlMTMtZDNjYTMzZjU=, ActorId: [2:7488003590821598366:2313], ActorState: ExecuteState, TraceId: 01jqpesj8wa158v0jp2dt0nv1b, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T15:58:17.257017Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T15:58:17.260188Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488003594338172725:2346], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T15:58:17.261854Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjhmZThlYmUtMWZkY2E5ZTUtYTFhYjg5NmMtNmI3ZDM3ODc=, ActorId: [1:7488003594338172699:2339], ActorState: ExecuteState, TraceId: 01jqpesjhr2jmf8h6vvr27vyc2, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T15:58:17.262266Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T15:58:17.345232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:58:17.546519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-31T15:58:17.811440Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jqpesm2bdjejp89qfmtpbczj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2YzMjUwMGEtMTBmNjEyNTUtOTkyODViOGMtMTJmZTg5NTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7488003598633140491:3099] === CheckClustersList. Ok CreateTop ... 0, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 0 generation 2 [2:7488003633771272567:2490] 2025-03-31T15:58:25.186845Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T15:58:25.186868Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 3, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 3 generation 2 [2:7488003633771272566:2489] 2025-03-31T15:58:25.190182Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037892, NodeId 2, Generation 2 2025-03-31T15:58:25.191458Z node 1 :PERSQUEUE INFO: [rt3.dc1--topic-x:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T15:58:25.191499Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 1 generation 2 [1:7488003632992880534:2562] 2025-03-31T15:58:25.191708Z node 1 :PERSQUEUE INFO: [rt3.dc1--topic-x:6:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T15:58:25.191739Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 6, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 6 generation 2 [1:7488003632992880535:2563] 2025-03-31T15:58:25.192598Z node 1 :PERSQUEUE INFO: [rt3.dc1--topic-x:10:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T15:58:25.192651Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 10, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 10 generation 2 [1:7488003632992880531:2561] 2025-03-31T15:58:25.197120Z node 1 :PERSQUEUE INFO: [rt3.dc1--topic-x:4:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T15:58:25.197147Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037899, Partition: 4, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 4 generation 2 [1:7488003632992880579:2569] 2025-03-31T15:58:25.198466Z node 1 :PERSQUEUE INFO: [rt3.dc1--topic-x:8:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T15:58:25.198488Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 8, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 8 generation 2 [1:7488003632992880526:2558] 2025-03-31T15:58:25.201061Z node 1 :PERSQUEUE INFO: [rt3.dc1--topic-x:5:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T15:58:25.201098Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 5, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 5 generation 2 [1:7488003632992880530:2560] 2025-03-31T15:58:25.201172Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037898] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T15:58:25.201197Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037898] has a tx writes info 2025-03-31T15:58:25.201852Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037893, NodeId 1, Generation 2 2025-03-31T15:58:25.201876Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:9:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T15:58:25.201894Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 9, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 9 generation 2 [2:7488003633771272582:2495] 2025-03-31T15:58:25.202809Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 14, State: StateInit] bootstrapping 14 [2:7488003633771272671:2501] 2025-03-31T15:58:25.201487Z node 1 :PERSQUEUE INFO: [rt3.dc1--topic-x:12:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T15:58:25.201524Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 12, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 12 generation 2 [1:7488003632992880527:2559] 2025-03-31T15:58:25.205524Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 11, State: StateInit] bootstrapping 11 [2:7488003633771272672:2502] 2025-03-31T15:58:25.205852Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037896, NodeId 1, Generation 2 2025-03-31T15:58:25.205868Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037899, NodeId 1, Generation 2 2025-03-31T15:58:25.205880Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037894, NodeId 1, Generation 2 2025-03-31T15:58:25.206225Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T15:58:25.206247Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 2 generation 2 [2:7488003633771272583:2496] 2025-03-31T15:58:25.208246Z node 1 :PQ_READ_PROXY DEBUG: new Describe partition request 2025-03-31T15:58:25.208343Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor for request path: "/Root/PQ//rt3.dc1--topic-x" partition_id: 1 include_location: true 2025-03-31T15:58:25.208370Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor[1:7488003632992880694:2579]: Bootstrap 2025-03-31T15:58:25.208544Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037895, NodeId 2, Generation 2 2025-03-31T15:58:25.208977Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7488003632992880694:2579]: Request location 2025-03-31T15:58:25.209005Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:14:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T15:58:25.209037Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 14, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 14 generation 2 [2:7488003633771272671:2501] 2025-03-31T15:58:25.211288Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7488003632992880696:2580] connected; active server actors: 1 2025-03-31T15:58:25.211357Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 1, NodeId 1, Generation 2 2025-03-31T15:58:25.211904Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7488003632992880694:2579]: Got location 2025-03-31T15:58:25.212260Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7488003632992880696:2580] disconnected; active server actors: 1 2025-03-31T15:58:25.212285Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7488003632992880696:2580] disconnected no session 2025-03-31T15:58:25.212890Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037897] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T15:58:25.212914Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897] has a tx writes info 2025-03-31T15:58:25.213753Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 7, State: StateInit] bootstrapping 7 [2:7488003633771272707:2505] Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribePartitionResult] { partition { partition_id: 1 active: true partition_location { node_id: 1 generation: 2 } } } } } 2025-03-31T15:58:25.215270Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 13, State: StateInit] bootstrapping 13 [2:7488003633771272708:2506] 2025-03-31T15:58:25.217216Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:11:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T15:58:25.217243Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 11, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 11 generation 2 [2:7488003633771272672:2502] 2025-03-31T15:58:25.217890Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037898, NodeId 2, Generation 2 2025-03-31T15:58:25.219584Z node 1 :PQ_READ_PROXY DEBUG: new Describe partition request 2025-03-31T15:58:25.219684Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor for request path: "/Root/PQ//rt3.dc1--topic-x" partition_id: 3 include_stats: true include_location: true 2025-03-31T15:58:25.219718Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor[1:7488003632992880705:2581]: Bootstrap 2025-03-31T15:58:25.220416Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7488003632992880705:2581]: Request location 2025-03-31T15:58:25.221940Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7488003632992880705:2581]: Got location 2025-03-31T15:58:25.221365Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7488003632992880710:2583] connected; active server actors: 1 2025-03-31T15:58:25.221790Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 2, Generation 2 2025-03-31T15:58:25.222454Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7488003632992880710:2583] disconnected; active server actors: 1 2025-03-31T15:58:25.222469Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7488003632992880710:2583] disconnected no session Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribePartitionResult] { partition { partition_id: 3 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1743436705 nanos: 128000000 } max_write_time_lag { } bytes_written { } partition_node_id: 2 } partition_location { node_id: 2 generation: 2 } } } } } 2025-03-31T15:58:25.225002Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:7:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T15:58:25.225038Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 7, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 7 generation 2 [2:7488003633771272707:2505] 2025-03-31T15:58:25.225278Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:13:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T15:58:25.225300Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 13, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 13 generation 2 [2:7488003633771272708:2506] 2025-03-31T15:58:25.226226Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037897, NodeId 2, Generation 2 2025-03-31T15:58:25.226374Z node 1 :PQ_READ_PROXY DEBUG: new Describe partition request 2025-03-31T15:58:25.226459Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor for request path: "/Root/PQ//bad-topic" include_stats: true include_location: true 2025-03-31T15:58:25.226504Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor[1:7488003632992880712:2584]: Bootstrap Got response: operation { ready: true status: SCHEME_ERROR issues { message: "path \'Root/PQ/bad-topic\' does not exist or you do not have access rights" issue_code: 500018 severity: 1 } } |68.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |68.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |68.2%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_topic_reader/test-results/unittest/{meta.json ... results_accumulator.log} |68.3%| [LD] {RESULT} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut >> TopicAutoscaling::ReadFromTimestamp_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadFromTimestamp_PQv1 >> KqpIndexes::SimpleVectorIndexOrderByCosineSimilarityNotNullableLevel1 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::GetLocalDescribe [GOOD] Test command err: 2025-03-31T15:58:09.200414Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003565699732896:2279];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:09.200481Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:58:09.291976Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488003564904689613:2221];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:09.292259Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:58:09.500517Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0017af/r3tmp/tmpmajgyP/pdisk_1.dat 2025-03-31T15:58:09.512628Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T15:58:09.940178Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:09.947533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:09.947663Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:09.950714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:09.950826Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:09.954930Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:58:09.986796Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T15:58:09.995573Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4471, node 1 2025-03-31T15:58:10.196218Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/0017af/r3tmp/yandexKSC8aj.tmp 2025-03-31T15:58:10.196242Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/0017af/r3tmp/yandexKSC8aj.tmp 2025-03-31T15:58:10.196408Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/0017af/r3tmp/yandexKSC8aj.tmp 2025-03-31T15:58:10.196574Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:58:10.258879Z INFO: TTestServer started on Port 2623 GrpcPort 4471 TClient is connected to server localhost:2623 PQClient connected to localhost:4471 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:58:10.854259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T15:58:10.930673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-31T15:58:14.323309Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003565699732896:2279];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:14.323695Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:58:14.339271Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488003564904689613:2221];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:14.339367Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:58:17.018553Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpese6p7erjt0ecm8gd5ra2", Request deadline has expired for 0.409441s seconds 2025-03-31T15:58:17.047413Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488003599264428229:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:17.051401Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:17.052717Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488003599264428241:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:17.065796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2025-03-31T15:58:17.131471Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003600059472204:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:17.144971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:17.187325Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488003599264428243:2323], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2025-03-31T15:58:17.274270Z node 2 :TX_PROXY ERROR: Actor# [2:7488003599264428270:2185] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:58:17.836486Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488003600059472248:2352], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T15:58:17.837075Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488003599264428284:2327], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T15:58:17.838446Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjRmNzBjNjItOGM0YzgyYTktNGNkYjRjMTAtMTM0YWRlNTA=, ActorId: [2:7488003599264428227:2318], ActorState: ExecuteState, TraceId: 01jqpeskdx2wcp7mbbymn0zgak, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T15:58:17.838383Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmM0OGI5ZWMtYWNhZDE1NS03MjFmOWYxOC1kZjcyYzI5YQ==, ActorId: [1:7488003600059472201:2345], ActorState: ExecuteState, TraceId: 01jqpeskdte94qpnggg7asprck, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T15:58:17.841490Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T15:58:17.843050Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T15:58:17.846512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T15:58:17.947787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T15:58:18.238636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-31T15:58:18.656502Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqpesmvt44achcrqsa0mvq71, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGUxZDE3N2MtMTkzZGE5ZmQtYTZhNmRjZTUtMThkNDRmZTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7488003604354440037:3102] === CheckClustersList. Ok CreateTopicNoLegacy ... ate: StateInit] bootstrapping 10 [1:7488003630124244793:2506] 2025-03-31T15:58:24.776448Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 10, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 10 generation 1 [1:7488003630124244793:2506] 2025-03-31T15:58:24.777912Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037899, Partition: 4, State: StateInit] bootstrapping 4 [1:7488003630124244789:2502] 2025-03-31T15:58:24.779700Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 14, State: StateInit] bootstrapping 14 [1:7488003630124244792:2505] 2025-03-31T15:58:24.780141Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037899, Partition: 4, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 4 generation 1 [1:7488003630124244789:2502] 2025-03-31T15:58:24.781673Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 14, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 14 generation 1 [1:7488003630124244792:2505] 2025-03-31T15:58:24.782115Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 11, State: StateInit] bootstrapping 11 [1:7488003630124244795:2508] 2025-03-31T15:58:24.783967Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 11, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 11 generation 1 [1:7488003630124244795:2508] 2025-03-31T15:58:24.786484Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 5, State: StateInit] bootstrapping 5 [1:7488003630124244791:2504] 2025-03-31T15:58:24.789715Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 1, State: StateInit] bootstrapping 1 [1:7488003630124244810:2511] 2025-03-31T15:58:24.796644Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 1 generation 1 [1:7488003630124244810:2511] 2025-03-31T15:58:24.798796Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 5, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 5 generation 1 [1:7488003630124244791:2504] 2025-03-31T15:58:24.804161Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 6, State: StateInit] bootstrapping 6 [1:7488003630124244811:2512] 2025-03-31T15:58:24.806253Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 6, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 6 generation 1 [1:7488003630124244811:2512] 2025-03-31T15:58:24.812450Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 7, State: StateInit] bootstrapping 7 [2:7488003629329200027:2433] 2025-03-31T15:58:24.815455Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 7, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 7 generation 1 [2:7488003629329200027:2433] 2025-03-31T15:58:24.818245Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 13, State: StateInit] bootstrapping 13 [2:7488003629329200037:2434] 2025-03-31T15:58:24.820398Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 13, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 13 generation 1 [2:7488003629329200037:2434] 2025-03-31T15:58:24.822154Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 3, State: StateInit] bootstrapping 3 [2:7488003629329200023:2430] 2025-03-31T15:58:24.823739Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 0, State: StateInit] bootstrapping 0 [2:7488003629329200024:2431] 2025-03-31T15:58:24.824860Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 3, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 3 generation 1 [2:7488003629329200023:2430] 2025-03-31T15:58:24.825836Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 0 generation 1 [2:7488003629329200024:2431] 2025-03-31T15:58:24.827912Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 8, State: StateInit] bootstrapping 8 [2:7488003629329200057:2440] 2025-03-31T15:58:24.827980Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 12, State: StateInit] bootstrapping 12 [2:7488003629329200059:2442] 2025-03-31T15:58:24.830170Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 8, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 8 generation 1 [2:7488003629329200057:2440] 2025-03-31T15:58:24.830174Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 12, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 12 generation 1 [2:7488003629329200059:2442] 2025-03-31T15:58:24.833029Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 9, State: StateInit] bootstrapping 9 [2:7488003629329200038:2435] 2025-03-31T15:58:24.835064Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 9, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 9 generation 1 [2:7488003629329200038:2435] 2025-03-31T15:58:24.836755Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 2, State: StateInit] bootstrapping 2 [2:7488003629329200042:2437] 2025-03-31T15:58:24.838612Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 2 generation 1 [2:7488003629329200042:2437] 2025-03-31T15:58:24.889550Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037893] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T15:58:24.903860Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037896] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T15:58:24.905200Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T15:58:24.905798Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037895] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T15:58:24.906435Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037897] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T15:58:24.907056Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T15:58:24.907644Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037898] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T15:58:24.908041Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037899] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T15:58:24.943544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:58:24.943595Z node 1 :IMPORT WARN: Table profiles were not loaded Create topic result: 1 2025-03-31T15:58:25.073835Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7488003634419212390:3992]: Request location 2025-03-31T15:58:25.082688Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7488003634419212390:3992]: Got location 2025-03-31T15:58:25.081846Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7488003634419212401:3997] connected; active server actors: 1 2025-03-31T15:58:25.082110Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 2, Generation 1 2025-03-31T15:58:25.082127Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 1, NodeId 1, Generation 1 2025-03-31T15:58:25.082137Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 2, NodeId 2, Generation 1 2025-03-31T15:58:25.082150Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 2, Generation 1 2025-03-31T15:58:25.082160Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037899, partitionId 4, NodeId 1, Generation 1 2025-03-31T15:58:25.082170Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 5, NodeId 1, Generation 1 2025-03-31T15:58:25.082179Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 6, NodeId 1, Generation 1 2025-03-31T15:58:25.082192Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 7, NodeId 2, Generation 1 2025-03-31T15:58:25.082202Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 8, NodeId 2, Generation 1 2025-03-31T15:58:25.082212Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 9, NodeId 2, Generation 1 2025-03-31T15:58:25.082221Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 10, NodeId 1, Generation 1 2025-03-31T15:58:25.082242Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 11, NodeId 1, Generation 1 2025-03-31T15:58:25.082252Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 12, NodeId 2, Generation 1 2025-03-31T15:58:25.082262Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 13, NodeId 2, Generation 1 2025-03-31T15:58:25.082271Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 14, NodeId 1, Generation 1 2025-03-31T15:58:25.083689Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7488003634419212401:3997] disconnected; active server actors: 1 2025-03-31T15:58:25.084645Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7488003634419212401:3997] disconnected no session 2025-03-31T15:58:25.107913Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7488003634419212405:4001]: Request location 2025-03-31T15:58:25.109299Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7488003634419212416:4010] connected; active server actors: 1 2025-03-31T15:58:25.109361Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 1, NodeId 1, Generation 1 2025-03-31T15:58:25.109377Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 2, Generation 1 2025-03-31T15:58:25.109389Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 5, NodeId 1, Generation 1 2025-03-31T15:58:25.111328Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7488003634419212416:4010] disconnected; active server actors: 1 2025-03-31T15:58:25.111349Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7488003634419212416:4010] disconnected no session 2025-03-31T15:58:25.110851Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7488003634419212405:4001]: Got location 2025-03-31T15:58:25.111544Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7488003634419212419:4012]: Request location 2025-03-31T15:58:25.121118Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7488003634419212421:4014] connected; active server actors: 1 >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_AutoscaleAwareSDK >> Yq_1::ModifyConnections [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::BalancingAfterSplit_sessionsWithPartition [GOOD] Test command err: 2025-03-31T15:55:33.437562Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002894658372365:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:33.437848Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:55:35.422951Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001947/r3tmp/tmpgYk0As/pdisk_1.dat 2025-03-31T15:55:36.466408Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:55:36.501804Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:55:36.501914Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:55:36.564417Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:55:36.616677Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23995, node 1 2025-03-31T15:55:36.882231Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/001947/r3tmp/yandexp0xuYH.tmp 2025-03-31T15:55:36.882287Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/001947/r3tmp/yandexp0xuYH.tmp 2025-03-31T15:55:36.882442Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/001947/r3tmp/yandexp0xuYH.tmp 2025-03-31T15:55:36.882562Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:55:37.010655Z INFO: TTestServer started on Port 16498 GrpcPort 23995 TClient is connected to server localhost:16498 PQClient connected to localhost:23995 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:55:37.546582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T15:55:37.618221Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T15:55:37.629661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T15:55:37.823284Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:55:38.441528Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002894658372365:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:38.441905Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:55:43.027696Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpemr6n6dkp29kjq2gd7692", Request deadline has expired for 0.013573s seconds 2025-03-31T15:55:43.065816Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488002937608046162:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:43.065954Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:43.066369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488002937608046190:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:43.069713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-31T15:55:43.103529Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488002937608046192:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-31T15:55:43.181610Z node 1 :TX_PROXY ERROR: Actor# [1:7488002937608046256:2465] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:55:44.795436Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488002937608046264:2358], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T15:55:44.846026Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWYyNDc4ZDUtNTdiYzJjYmQtM2Y4ZTY0OGUtOTQ3M2JkYzE=, ActorId: [1:7488002937608046160:2346], ActorState: ExecuteState, TraceId: 01jqpemx1matpbjxzsmaqtbk6d, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T15:55:44.867029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:55:44.885033Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T15:55:45.155361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:55:46.144552Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488002946197981046:2383], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T15:55:46.146681Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTFiYTMzYTItZGQzMGFlMWEtZGM2MTc5MjgtZDZlMTcxMmE=, ActorId: [1:7488002946197981043:2382], ActorState: ExecuteState, TraceId: 01jqpemzwd78m77n1w63ww59dt, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T15:55:46.156077Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T15:55:46.397388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7488002959082883124:2693] 2025-03-31T15:55:51.611801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:55:51.611832Z node 1 :IMPORT WARN: Table profiles were not loaded === CheckClustersList. Ok 2025-03-31T15:55:54.024148Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-03-31T15:55:54.446139Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-03-31T15:55:54.449465Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7488002984852687092:2801], Recipient [1:7488002907543274740:2215]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:55:54.449768Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:55:54.449779Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-31T15:55:54.450074Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7488002984852687088:2798], Recipient [1:7488002907543274740:2215]: {TEvModifySchemeTransaction txid# 281474976710672 TabletId# 72057594046644480} 2025-03-31T15:55:54.450087Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-31T15:55:54.844565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "tes ... Z node 6 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][test-topic] pipe [6:7488003646778940202:2905] client test-consumer disconnected session test-consumer_6_2_8150367403296659815_v1 2025-03-31T15:58:28.343611Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] consumer test-consumer rebalancing was scheduled 2025-03-31T15:58:28.343668Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] consumer test-consumer balancing. Sessions=1, Families=3, UnradableFamilies=2 [2 (1), 1 (0), ], RequireBalancing=0 [] 2025-03-31T15:58:28.343710Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] consumer test-consumer balancing of the family=2 (Status=Free, Partitions=[1]) failed because there are no suitable reading sessions. 2025-03-31T15:58:28.343729Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] consumer test-consumer balancing of the family=1 (Status=Free, Partitions=[0]) failed because there are no suitable reading sessions. 2025-03-31T15:58:28.343754Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] consumer test-consumer balancing duration: 0.000063s 2025-03-31T15:58:28.344317Z :INFO: [/Root] [/Root] [e295a6ed-5b885e98-a02a990b-7edc33bc] Closing read session. Close timeout: 0.000000s 2025-03-31T15:58:28.344382Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:2:1:0:0 2025-03-31T15:58:28.344436Z :INFO: [/Root] [/Root] [e295a6ed-5b885e98-a02a990b-7edc33bc] Counters: { Errors: 0 CurrentSessionLifetimeMs: 174 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T15:58:28.344587Z :NOTICE: [/Root] [/Root] [e295a6ed-5b885e98-a02a990b-7edc33bc] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } >>>>> Session-2 Received TSessionClosedEvent message SessionClosed { Status: SUCCESS Issues: "
: Error: Session was gracefully closed " } 2025-03-31T15:58:28.349202Z node 6 :PQ_READ_PROXY DEBUG: session cookie 3 consumer test-consumer session test-consumer_6_3_4736386812373693314_v1 grpc read done: success# 0, data# { } 2025-03-31T15:58:28.349238Z node 6 :PQ_READ_PROXY INFO: session cookie 3 consumer test-consumer session test-consumer_6_3_4736386812373693314_v1 grpc read failed 2025-03-31T15:58:28.349274Z node 6 :PQ_READ_PROXY INFO: session cookie 3 consumer test-consumer session test-consumer_6_3_4736386812373693314_v1 grpc closed 2025-03-31T15:58:28.349313Z node 6 :PQ_READ_PROXY INFO: session cookie 3 consumer test-consumer session test-consumer_6_3_4736386812373693314_v1 is DEAD 2025-03-31T15:58:28.351274Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [6:7488003646778940223:3728], Recipient [6:7488003633894037963:2822]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-31T15:58:28.351314Z node 6 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-31T15:58:28.351341Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896] Handle TEvTabletPipe::TEvServerDisconnected 2025-03-31T15:58:28.351373Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Destroy direct read session test-consumer_6_3_4736386812373693314_v1 2025-03-31T15:58:28.351409Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [6:7488003646778940221:2916] destroyed 2025-03-31T15:58:28.351458Z node 6 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_6_3_4736386812373693314_v1 2025-03-31T15:58:28.351506Z node 6 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][test-topic] pipe [6:7488003646778940218:2913] disconnected; active server actors: 1 2025-03-31T15:58:28.351525Z node 6 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][test-topic] pipe [6:7488003646778940218:2913] client test-consumer disconnected session test-consumer_6_3_4736386812373693314_v1 2025-03-31T15:58:28.364066Z :DEBUG: [/Root] TraceId [] SessionId [producer-1|dfaef05c-77982a11-7a53bff8-23c4d438_0] PartitionId [0] Generation [1] Write session: destroy 2025-03-31T15:58:28.406808Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7488003633894037963:2822], Partition 2, Sender [0:0:0], Recipient [6:7488003633894038073:2838], Cookie: 0 2025-03-31T15:58:28.406914Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7488003633894038073:2838]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:28.406959Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:28.407018Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:58:28.407135Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:58:28.407168Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:58:28.407202Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:58:28.407279Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7488003633894037970:2823], Partition 1, Sender [0:0:0], Recipient [6:7488003633894038078:2840], Cookie: 0 2025-03-31T15:58:28.407322Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7488003633894038078:2840]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:28.407338Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:28.407362Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:58:28.407390Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:58:28.407404Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:58:28.407422Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:58:28.407471Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7488003578059461695:2473], Partition 0, Sender [0:0:0], Recipient [6:7488003578059461756:2477], Cookie: 0 2025-03-31T15:58:28.407501Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7488003578059461756:2477]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:28.407515Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:28.407537Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:58:28.407565Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:58:28.407611Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:58:28.407634Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:58:28.474974Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [6:7488003517929918365:2137]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T15:58:28.475027Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T15:58:28.475108Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [6:7488003517929918365:2137], Recipient [6:7488003517929918365:2137]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T15:58:28.475129Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T15:58:28.509651Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7488003633894037970:2823], Partition 1, Sender [0:0:0], Recipient [6:7488003633894038078:2840], Cookie: 0 2025-03-31T15:58:28.509769Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7488003633894038078:2840]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:28.509800Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:28.509849Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:58:28.509929Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:58:28.509962Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:58:28.509996Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:58:28.510068Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7488003578059461695:2473], Partition 0, Sender [0:0:0], Recipient [6:7488003578059461756:2477], Cookie: 0 2025-03-31T15:58:28.510104Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7488003578059461756:2477]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:28.510118Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:28.510142Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:58:28.510174Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:58:28.510188Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:58:28.510205Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:58:28.510253Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7488003633894037963:2822], Partition 2, Sender [0:0:0], Recipient [6:7488003633894038073:2838], Cookie: 0 2025-03-31T15:58:28.510286Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7488003633894038073:2838]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:28.510300Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:28.510323Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:58:28.510354Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:58:28.510370Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:58:28.510387Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 >> Yq_1::ModifyQuery >> TTopicApiDescribes::DescribeTopic [GOOD] >> TTxDataShardMiniKQL::CrossShard_4_OneToAll [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TIcNodeCache::GetNodesInfoTest [GOOD] Test command err: 2025-03-31T15:58:08.816029Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003559032657179:2144];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:08.873453Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:58:09.004799Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488003562578481688:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:09.004891Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:58:09.551766Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0017d4/r3tmp/tmpVQfkMV/pdisk_1.dat 2025-03-31T15:58:09.583195Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T15:58:09.874384Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:10.018883Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:10.062800Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:10.109551Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:10.109681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:10.110597Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:10.110665Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:10.120688Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T15:58:10.121015Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:58:10.121520Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28358, node 1 2025-03-31T15:58:10.477027Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/0017d4/r3tmp/yandexB8DOPZ.tmp 2025-03-31T15:58:10.477059Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/0017d4/r3tmp/yandexB8DOPZ.tmp 2025-03-31T15:58:10.477240Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/0017d4/r3tmp/yandexB8DOPZ.tmp 2025-03-31T15:58:10.477383Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:58:10.561586Z INFO: TTestServer started on Port 26532 GrpcPort 28358 TClient is connected to server localhost:26532 PQClient connected to localhost:28358 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:58:11.036127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T15:58:11.184286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-31T15:58:13.752754Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003559032657179:2144];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:13.752813Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:58:14.007472Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488003562578481688:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:14.007562Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:58:14.747889Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003584802462009:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:14.751651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:14.757850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003584802462022:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:14.762287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720662:3, at schemeshard: 72057594046644480 2025-03-31T15:58:14.875519Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976720662, at schemeshard: 72057594046644480 2025-03-31T15:58:14.879422Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488003584802462025:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720662 completed, doublechecking } 2025-03-31T15:58:14.972603Z node 1 :TX_PROXY ERROR: Actor# [1:7488003584802462123:2782] txid# 281474976720663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:58:15.612397Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488003588348285889:2322], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T15:58:15.614400Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTNmNjE1MDEtYzZjNjUxNmItYWFiYjMwOGEtYzFmNjAxYjM=, ActorId: [2:7488003588348285841:2315], ActorState: ExecuteState, TraceId: 01jqpeshpn5n3k537wke4s8zx3, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T15:58:15.617109Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T15:58:15.641130Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488003584802462133:2353], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T15:58:15.642747Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzBjNTRhN2ItYzYyNDhhNWYtZTc1Y2JmNjgtNjdlOGViNzc=, ActorId: [1:7488003584802462007:2339], ActorState: ExecuteState, TraceId: 01jqpesh6pdyzq3pttzjmt5713, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T15:58:15.643186Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T15:58:15.828829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 2025-03-31T15:58:16.083771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 2025-03-31T15:58:16.318619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-31T15:58:17.236897Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720667. Ctx: { TraceId: 01jqpesjzy0pr990epwzy3psmg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWRiMThiOWQtMTQxY2NhMzEtYTk2MzA4NjAtM2E3MTRiZGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7488003597687364524:3148] === CheckClustersList. Ok 2025-03-31T15:58:25.003294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:58:25.003338Z node 1 :IMPORT WARN: Table profiles were not loaded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::WithDir_PartitionSplit_AutosplitByLoad [GOOD] Test command err: 2025-03-31T15:55:37.459169Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002912148917358:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:37.459732Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:55:37.941810Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00192d/r3tmp/tmpULCEtV/pdisk_1.dat 2025-03-31T15:55:39.731179Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:55:40.336831Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:55:40.337244Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:55:40.384070Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:55:41.033728Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.210347s 2025-03-31T15:55:41.033794Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.210446s TServer::EnableGrpc on GrpcPort 61512, node 1 2025-03-31T15:55:41.451633Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:41.567633Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/00192d/r3tmp/yandexVSjjRI.tmp 2025-03-31T15:55:41.567675Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/00192d/r3tmp/yandexVSjjRI.tmp 2025-03-31T15:55:41.590558Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/00192d/r3tmp/yandexVSjjRI.tmp 2025-03-31T15:55:41.590869Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:55:41.624450Z INFO: TTestServer started on Port 27125 GrpcPort 61512 TClient is connected to server localhost:27125 PQClient connected to localhost:61512 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:55:42.132322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:55:42.163233Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:55:42.192702Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T15:55:42.199067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-31T15:55:42.355984Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-03-31T15:55:42.459266Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002912148917358:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:42.459349Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:55:50.557512Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpemwd0803pamvzfm1b9n6s", Request deadline has expired for 3.189835s seconds 2025-03-31T15:55:50.629193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488002967983493053:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:50.700920Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:51.344695Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488002967983493068:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:51.350310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-31T15:55:51.637928Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488002972278460372:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:51.660026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:51.663268Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2025-03-31T15:55:51.664080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488002972278460370:2361], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-31T15:55:51.758851Z node 1 :TX_PROXY ERROR: Actor# [1:7488002972278460430:2484] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:55:53.306162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:55:53.565023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:55:53.676581Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488002976573427746:2374], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T15:55:53.679605Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTRkYmUwY2EtMjNhOTFmYWUtNWNiOTcxMmYtMmQ3NjAwOWM=, ActorId: [1:7488002967983493051:2352], ActorState: ExecuteState, TraceId: 01jqpen4cvcmk3as37mn8xzfer, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T15:55:53.691723Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T15:55:54.104042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-31T15:55:55.515737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:55:55.515764Z node 1 :IMPORT WARN: Table profiles were not loaded === CheckClustersList. Subcribe to ClusterTracker from [1:7488002998048264588:2706] === CheckClustersList. Ok 2025-03-31T15:56:02.481855Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-03-31T15:56:02.671689Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-03-31T15:56:02.673783Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7488003019523101208:2783], Recipient [1:7488002920738852410:2214]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:56:02.673806Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:56:02.673817Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-31T15:56:02.673991Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7488003019523101204:2780], Recipient [1:7488002920738852410:2214]: {TEvModifySchemeTransaction txid# 281474976710671 TabletId# 72057594046644480} 2025-03-31T15:56:02.674007Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-31T15:56:03.053672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles ... PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:58:28.142240Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:58:28.147799Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003639911522244:2498], Partition 2, Sender [0:0:0], Recipient [5:7488003639911522334:2510], Cookie: 0 2025-03-31T15:58:28.147871Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003639911522334:2510]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:28.147897Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:28.147944Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:58:28.148022Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:58:28.148049Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:58:28.148088Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:58:28.212228Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003635616554810:2479], Partition 0, Sender [0:0:0], Recipient [5:7488003635616554873:2483], Cookie: 0 2025-03-31T15:58:28.212325Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003635616554873:2483]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:28.212354Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:28.212403Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:58:28.212475Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:58:28.212503Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:58:28.212544Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:58:28.243383Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003639911522246:2499], Partition 1, Sender [0:0:0], Recipient [5:7488003639911522318:2505], Cookie: 0 2025-03-31T15:58:28.243456Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003639911522318:2505]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:28.243481Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:28.243537Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:58:28.243621Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:58:28.243654Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:58:28.243686Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:58:28.247995Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003639911522244:2498], Partition 2, Sender [0:0:0], Recipient [5:7488003639911522334:2510], Cookie: 0 2025-03-31T15:58:28.248066Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003639911522334:2510]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:28.248094Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:28.248145Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:58:28.248207Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:58:28.248236Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:58:28.248266Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:58:28.312892Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003635616554810:2479], Partition 0, Sender [0:0:0], Recipient [5:7488003635616554873:2483], Cookie: 0 2025-03-31T15:58:28.312972Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003635616554873:2483]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:28.312998Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:28.313056Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:58:28.313129Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:58:28.313156Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:58:28.313191Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:58:28.343741Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003639911522246:2499], Partition 1, Sender [0:0:0], Recipient [5:7488003639911522318:2505], Cookie: 0 2025-03-31T15:58:28.343820Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003639911522318:2505]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:28.343859Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:28.343912Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:58:28.343991Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:58:28.344021Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:58:28.344056Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:58:28.348437Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003639911522244:2498], Partition 2, Sender [0:0:0], Recipient [5:7488003639911522334:2510], Cookie: 0 2025-03-31T15:58:28.348532Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003639911522334:2510]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:28.348560Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:28.348612Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:58:28.348695Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:58:28.348733Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:58:28.348767Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:58:28.416927Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003635616554810:2479], Partition 0, Sender [0:0:0], Recipient [5:7488003635616554873:2483], Cookie: 0 2025-03-31T15:58:28.417013Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003635616554873:2483]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:28.417041Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:28.417096Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:58:28.417180Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:58:28.417209Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:58:28.417244Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:58:28.445332Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003639911522246:2499], Partition 1, Sender [0:0:0], Recipient [5:7488003639911522318:2505], Cookie: 0 2025-03-31T15:58:28.445430Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003639911522318:2505]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:28.445461Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:28.445515Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:58:28.445610Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:58:28.445640Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:58:28.445690Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:58:28.455325Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003639911522244:2498], Partition 2, Sender [0:0:0], Recipient [5:7488003639911522334:2510], Cookie: 0 2025-03-31T15:58:28.455414Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003639911522334:2510]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:28.455441Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:28.455488Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:58:28.455568Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:58:28.455595Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:58:28.455625Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 >> DataShardSnapshots::ShardRestartWholeShardLockBasic [GOOD] >> DataShardSnapshots::ShardRestartLockUnrelatedUpsert >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDropIndex [GOOD] >> TSchemeshardBackgroundCleaningTest::TempInTemp >> DataShardSnapshots::MvccSnapshotLockedWrites+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWrites-UseSink >> DataShardSnapshots::LockedWriteBulkUpsertConflict-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitAborted+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::DescribeConsumer [GOOD] Test command err: 2025-03-31T15:58:09.092298Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003562515741881:2279];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:09.122876Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:58:09.242251Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488003562424548443:2158];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:09.596963Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T15:58:09.612608Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0017b4/r3tmp/tmpEx90Ss/pdisk_1.dat 2025-03-31T15:58:09.851714Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:58:10.303513Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:10.421904Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:10.421991Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:10.423849Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:10.423938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:10.456794Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T15:58:10.457766Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:58:10.458295Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:58:10.486445Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16194, node 1 2025-03-31T15:58:10.983902Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/0017b4/r3tmp/yandex6tgeVP.tmp 2025-03-31T15:58:10.983937Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/0017b4/r3tmp/yandex6tgeVP.tmp 2025-03-31T15:58:10.984146Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/0017b4/r3tmp/yandex6tgeVP.tmp 2025-03-31T15:58:10.984314Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:58:11.143390Z INFO: TTestServer started on Port 62579 GrpcPort 16194 TClient is connected to server localhost:62579 PQClient connected to localhost:16194 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:58:11.858110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T15:58:11.960853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-31T15:58:12.510684Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-03-31T15:58:14.075232Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003562515741881:2279];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:14.075311Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:58:14.307273Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488003562424548443:2158];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:14.307371Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:58:15.986748Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488003588194352457:2315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:15.986866Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488003588194352445:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:15.986948Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:16.023255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-03-31T15:58:16.091690Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488003588194352463:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-03-31T15:58:16.207218Z node 2 :TX_PROXY ERROR: Actor# [2:7488003592489319787:2134] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:58:16.931808Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488003592580514004:2354], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T15:58:16.932996Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488003592489319794:2321], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T15:58:16.934416Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZWU2ZDA1ZmYtZWI1M2FkY2EtYjQzNmJlYjctZWQ5ZDBkOTg=, ActorId: [2:7488003588194352443:2311], ActorState: ExecuteState, TraceId: 01jqpesjch8prc5bhsddsxv8vx, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T15:58:16.934433Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWFmNThlNTYtMmZlMGVjNmUtZTkyYjNhMmUtYjhjYjNhYzk=, ActorId: [1:7488003592580513964:2347], ActorState: ExecuteState, TraceId: 01jqpesk0n5b6g48ccn13w809k, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T15:58:16.937546Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T15:58:16.947875Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T15:58:17.046249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:58:17.270378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:58:17.448291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-31T15:58:17.835760Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jqpesm1yev8kaw9pe0kw12sc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWFjYjgwZmItNTA1ZjJiZDItYzBlNDViYTgtZGQ0YjdlNTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7488003596875481690:3082] === CheckClustersList. Ok CreateTopicNoLegacy: rt3.dc1--topic-x Create topic: /Root/PQ/rt3.dc1--topic-x AddTopic: rt3.dc1--topic-x ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ( ... .031999Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7488003635530189717:2636] disconnected no session Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeConsumerResult] { self { name: "rt3.dc1--topic-x/my-consumer" owner: "root@builtin" type: TOPIC created_at { plan_step: 1743436704492 tx_id: 281474976710676 } } consumer { name: "shared/my-consumer" important: true read_from { } attributes { key: "_service_type" value: "data-streams" } } partitions { active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 1 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 2 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 3 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 4 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 5 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 6 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 7 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 8 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 9 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 10 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 11 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 12 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 13 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 14 active: true partition_location { node_id: 2 generation: 2 } } } } } 2025-03-31T15:58:26.035365Z node 1 :PQ_READ_PROXY DEBUG: new Describe consumer request 2025-03-31T15:58:26.035463Z node 1 :PQ_READ_PROXY DEBUG: TDescribeConsumerActor for request path: "/Root/PQ//rt3.dc1--topic-x" consumer: "my-consumer" Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeConsumerResult] { self { name: "rt3.dc1--topic-x/my-consumer" owner: "root@builtin" type: TOPIC created_at { plan_step: 1743436704492 tx_id: 281474976710676 } } consumer { name: "shared/my-consumer" important: true read_from { } attributes { key: "_service_type" value: "data-streams" } } partitions { active: true } partitions { partition_id: 1 active: true } partitions { partition_id: 2 active: true } partitions { partition_id: 3 active: true } partitions { partition_id: 4 active: true } partitions { partition_id: 5 active: true } partitions { partition_id: 6 active: true } partitions { partition_id: 7 active: true } partitions { partition_id: 8 active: true } partitions { partition_id: 9 active: true } partitions { partition_id: 10 active: true } partitions { partition_id: 11 active: true } partitions { partition_id: 12 active: true } partitions { partition_id: 13 active: true } partitions { partition_id: 14 active: true } } } } 2025-03-31T15:58:26.040213Z node 1 :PQ_READ_PROXY DEBUG: new Describe consumer request 2025-03-31T15:58:26.040290Z node 1 :PQ_READ_PROXY DEBUG: TDescribeConsumerActor for request path: "/Root/PQ//bad-topic" consumer: "my-consumer" include_stats: true include_location: true Got response: operation { ready: true status: SCHEME_ERROR issues { message: "path \'Root/PQ/bad-topic\' does not exist or you do not have access rights" issue_code: 500018 severity: 1 } } 2025-03-31T15:58:26.533376Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710685, task: 1, CA Id [1:7488003635530189757:2641]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 0 2025-03-31T15:58:26.572086Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710685, task: 1, CA Id [1:7488003635530189757:2641]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-03-31T15:58:26.627862Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710685, task: 1, CA Id [1:7488003635530189757:2641]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-03-31T15:58:26.696096Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710685, task: 1, CA Id [1:7488003635530189757:2641]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-03-31T15:58:26.789208Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710685, task: 1, CA Id [1:7488003635530189757:2641]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-03-31T15:58:26.935212Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710685, task: 1, CA Id [1:7488003635530189757:2641]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-03-31T15:58:27.275909Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710685, task: 1, CA Id [1:7488003635530189757:2641]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-03-31T15:58:27.631900Z node 1 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976710686. Failed to resolve tablet: 72075186224037891 after several retries. 2025-03-31T15:58:27.632063Z node 1 :KQP_EXECUTER WARN: ActorId: [1:7488003639825157134:2642] TxId: 281474976710686. Ctx: { TraceId: 01jqpeswth2f6vxae5dythwpyw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDUxOTkxYTItMzIxZWQyYTUtODQxNjM0ZjItNTZhNDQxZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037891 after several retries. 2025-03-31T15:58:27.632376Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDUxOTkxYTItMzIxZWQyYTUtODQxNjM0ZjItNTZhNDQxZDc=, ActorId: [1:7488003635530189773:2642], ActorState: ExecuteState, TraceId: 01jqpeswth2f6vxae5dythwpyw, Create QueryResponse for error on request, msg: 2025-03-31T15:58:27.645860Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jqpesxj72fn2v31ybvehb9yz" } } YdbStatus: UNAVAILABLE ConsumedRu: 498 } 2025-03-31T15:58:27.887433Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710685, task: 1, CA Id [1:7488003635530189757:2641]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-03-31T15:58:28.500364Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710685, task: 1, CA Id [1:7488003635530189757:2641]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-03-31T15:58:29.249040Z node 1 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976710688. Failed to resolve tablet: 72075186224037891 after several retries. 2025-03-31T15:58:29.249161Z node 1 :KQP_EXECUTER WARN: ActorId: [1:7488003648415091825:2656] TxId: 281474976710688. Ctx: { TraceId: 01jqpesysp5d3764c2cyfj7rwp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGMwMjYxZjMtZjFiYzc2M2QtNjlkZmQ1OGYtOGRiNGRiYTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037891 after several retries. 2025-03-31T15:58:29.249420Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGMwMjYxZjMtZjFiYzc2M2QtNjlkZmQ1OGYtOGRiNGRiYTA=, ActorId: [1:7488003644120124504:2656], ActorState: ExecuteState, TraceId: 01jqpesysp5d3764c2cyfj7rwp, Create QueryResponse for error on request, msg: 2025-03-31T15:58:29.250767Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jqpesz6p8m9wf30tqbx11pg6" } } YdbStatus: UNAVAILABLE ConsumedRu: 273 } 2025-03-31T15:58:29.391699Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710685, task: 1, CA Id [1:7488003635530189757:2641]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-03-31T15:58:30.023833Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710685, task: 1, CA Id [1:7488003635530189757:2641]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-03-31T15:58:30.024667Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488003635530189757:2641], TxId: 281474976710685, task: 1. Ctx: { TraceId : 01jqpeswpn6ngs4dz43hf39k67. SessionId : ydb://session/3?node_id=1&id=YmE1MzAzMzUtNGE0YzYwNjUtMzg4NTYyMGUtZGU0ZjZkMjE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Source[0] fatal error: {
: Error: Too many retries for shard 72075186224037891 } 2025-03-31T15:58:30.025286Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488003635530189757:2641], TxId: 281474976710685, task: 1. Ctx: { TraceId : 01jqpeswpn6ngs4dz43hf39k67. SessionId : ydb://session/3?node_id=1&id=YmE1MzAzMzUtNGE0YzYwNjUtMzg4NTYyMGUtZGU0ZjZkMjE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: UNAVAILABLE DEFAULT_ERROR: {
: Error: Too many retries for shard 72075186224037891 }. 2025-03-31T15:58:30.026027Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmE1MzAzMzUtNGE0YzYwNjUtMzg4NTYyMGUtZGU0ZjZkMjE=, ActorId: [1:7488003635530189750:2641], ActorState: ExecuteState, TraceId: 01jqpeswpn6ngs4dz43hf39k67, Create QueryResponse for error on request, msg: 2025-03-31T15:58:30.027759Z node 1 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Too many retries for shard 72075186224037891" severity: 1 } TxMeta { id: "01jqpeswpy4res2748gteegkt3" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } >> KqpIndexes::SimpleVectorIndexOrderByCosineSimilarityNullableLevel1 >> DataShardWrite::UpsertNoLocksArbiterRestart [GOOD] >> DataShardWrite::UpsertLostPrepareArbiterRestart ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] Test command err: 2025-03-31T15:53:52.459817Z :TestReorderedExecutor INFO: Random seed for debugging is 1743436432459762 2025-03-31T15:53:53.128659Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002466633201349:2279];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:53:53.129052Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:53:53.292752Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488002465432110635:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:53:53.292950Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:53:56.023689Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T15:53:56.037690Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001300/r3tmp/tmpvXKp2e/pdisk_1.dat 2025-03-31T15:53:57.007227Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:53:57.044307Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:53:58.043208Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:53:58.122593Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002466633201349:2279];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:53:58.122635Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:53:58.294286Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488002465432110635:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:53:58.294802Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:53:59.087722Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:53:59.299726Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:00.110140Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:00.327637Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:01.999670Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:02.019221Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:03.751019Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:03.759903Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:05.482707Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:05.482738Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:06.263421Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:06.331344Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:06.483845Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:06.483875Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:07.315417Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:07.359671Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:07.544337Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:07.544370Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:08.142006Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:08.248711Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:08.249141Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:08.273892Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:08.273952Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:08.293974Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T15:54:08.294410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:08.314058Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11489, node 1 2025-03-31T15:54:09.901008Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/001300/r3tmp/yandexTGmf9k.tmp 2025-03-31T15:54:09.901033Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/001300/r3tmp/yandexTGmf9k.tmp 2025-03-31T15:54:09.901239Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/001300/r3tmp/yandexTGmf9k.tmp 2025-03-31T15:54:09.901332Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:54:10.013815Z INFO: TTestServer started on Port 19135 GrpcPort 11489 TClient is connected to server localhost:19135 PQClient connected to localhost:11489 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:54:10.890428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T15:54:10.975776Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T15:54:12.696609Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488002547036489633:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:12.696736Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:12.697279Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488002547036489645:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:12.714949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2025-03-31T15:54:12.818846Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488002547036489647:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-03-31T15:54:12.899518Z node 2 :TX_PROXY ERROR: Actor# [2:7488002547036489676:2150] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:54:13.620736Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488002547036489683:2336], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1 ... iled to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:64497: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:64497
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:64497. " 2025-03-31T15:58:21.972064Z :DEBUG: [/Root] [/Root] [c352bff4-84288f35-c7ef387-2606d504] Restart cluster discovery in 0.010524s 2025-03-31T15:58:21.987510Z :DEBUG: [/Root] [/Root] [c352bff4-84288f35-c7ef387-2606d504] Starting cluster discovery 2025-03-31T15:58:21.987719Z :INFO: [/Root] [/Root] [c352bff4-84288f35-c7ef387-2606d504] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:64497: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:64497
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:64497. " 2025-03-31T15:58:21.987754Z :DEBUG: [/Root] [/Root] [c352bff4-84288f35-c7ef387-2606d504] Restart cluster discovery in 0.025528s 2025-03-31T15:58:22.014897Z :DEBUG: [/Root] [/Root] [c352bff4-84288f35-c7ef387-2606d504] Starting cluster discovery 2025-03-31T15:58:22.015288Z :NOTICE: [/Root] [/Root] [c352bff4-84288f35-c7ef387-2606d504] Aborting read session. Description: SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:64497: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:64497
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:64497. " } 2025-03-31T15:58:22.015509Z :NOTICE: [/Root] [/Root] [c352bff4-84288f35-c7ef387-2606d504] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:64497: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:64497
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:64497. " } 2025-03-31T15:58:22.015677Z :INFO: [/Root] [/Root] [c352bff4-84288f35-c7ef387-2606d504] Closing read session. Close timeout: 0.000000s 2025-03-31T15:58:22.015791Z :NOTICE: [/Root] [/Root] [c352bff4-84288f35-c7ef387-2606d504] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-31T15:58:22.471456Z node 13 :KQP_EXECUTER ERROR: ActorId: [13:7488003620969102760:2499] TxId: 281474976715688. Ctx: { TraceId: 01jqpespxfc3bejmvyrntb0hcd, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=Zjk2ODY2ZTQtYjcyMjkxNjAtNGM1MjA2NjEtZjU2YTQwZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 14 2025-03-31T15:58:22.471707Z node 13 :KQP_COMPUTE ERROR: SelfId: [13:7488003620969102769:2540], TxId: 281474976715688, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jqpespxfc3bejmvyrntb0hcd. SessionId : ydb://session/3?node_id=13&id=Zjk2ODY2ZTQtYjcyMjkxNjAtNGM1MjA2NjEtZjU2YTQwZWE=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [13:7488003620969102760:2499], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-03-31T15:58:22.472068Z node 13 :KQP_COMPUTE ERROR: SelfId: [13:7488003620969102770:2541], TxId: 281474976715688, task: 4. Ctx: { TraceId : 01jqpespxfc3bejmvyrntb0hcd. SessionId : ydb://session/3?node_id=13&id=Zjk2ODY2ZTQtYjcyMjkxNjAtNGM1MjA2NjEtZjU2YTQwZWE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [13:7488003620969102760:2499], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-03-31T15:58:23.088183Z node 13 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715689. Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-31T15:58:23.088361Z node 13 :KQP_EXECUTER WARN: ActorId: [13:7488003620969102790:2543] TxId: 281474976715689. Ctx: { TraceId: 01jqpess578z2xy7d7dkas41e0, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZDNlZjQ0NTktZGRiZGVlZmYtYzVhMTFkNWQtMjZmYzZiNmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-31T15:58:23.088650Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=ZDNlZjQ0NTktZGRiZGVlZmYtYzVhMTFkNWQtMjZmYzZiNmI=, ActorId: [13:7488003620969102787:2543], ActorState: ExecuteState, TraceId: 01jqpess578z2xy7d7dkas41e0, Create QueryResponse for error on request, msg: 2025-03-31T15:58:23.099250Z node 13 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jqpess578z2xy7d7dm3cn8vj" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } 2025-03-31T15:58:23.471688Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=Zjk2ODY2ZTQtYjcyMjkxNjAtNGM1MjA2NjEtZjU2YTQwZWE=, ActorId: [13:7488003612379167928:2499], ActorState: ExecuteState, TraceId: 01jqpespxfc3bejmvyrntb0hcd, Create QueryResponse for error on request, msg: 2025-03-31T15:58:23.473983Z node 13 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Kikimr cluster or one of its subsystems was unavailable." issue_code: 2005 severity: 1 issues { message: "Failed to send EvStartKqpTasksRequest because node is unavailable: 14" severity: 1 } } TxMeta { id: "01jqpesrqr2khvxrqzy8w0c263" } } YdbStatus: UNAVAILABLE ConsumedRu: 1231 } 2025-03-31T15:58:25.308212Z node 13 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715692. Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-31T15:58:25.308348Z node 13 :KQP_EXECUTER WARN: ActorId: [13:7488003633854004785:2555] TxId: 281474976715692. Ctx: { TraceId: 01jqpesvaf70j6wqprg8v4m0bh, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZWI0OTg3YWMtMzUyZTBlOTEtYWM0OWM2OGEtNTNiZTlhYTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-31T15:58:25.308587Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=ZWI0OTg3YWMtMzUyZTBlOTEtYWM0OWM2OGEtNTNiZTlhYTQ=, ActorId: [13:7488003633854004782:2555], ActorState: ExecuteState, TraceId: 01jqpesvaf70j6wqprg8v4m0bh, Create QueryResponse for error on request, msg: 2025-03-31T15:58:25.310225Z node 13 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jqpesvag76r3kqq7x211hxpq" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } 2025-03-31T15:58:25.941875Z node 13 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715694. Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-31T15:58:25.942057Z node 13 :KQP_EXECUTER WARN: ActorId: [13:7488003633854004836:2547] TxId: 281474976715694. Ctx: { TraceId: 01jqpestqqextz988fhjcc57ez, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NGZmMzIwODgtOGY2OGFjZWEtMzM5MTY5YjQtNmFhYzM5ODY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-31T15:58:25.942391Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=NGZmMzIwODgtOGY2OGFjZWEtMzM5MTY5YjQtNmFhYzM5ODY=, ActorId: [13:7488003629559037459:2547], ActorState: ExecuteState, TraceId: 01jqpestqqextz988fhjcc57ez, Create QueryResponse for error on request, msg: 2025-03-31T15:58:25.944797Z node 13 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jqpesvyq22f84xvv956epn8z" } } YdbStatus: UNAVAILABLE ConsumedRu: 820 } 2025-03-31T15:58:27.532194Z node 13 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715696. Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-31T15:58:27.532339Z node 13 :KQP_EXECUTER WARN: ActorId: [13:7488003642443939546:2566] TxId: 281474976715696. Ctx: { TraceId: 01jqpesxfp535qz4hgnt87ewnd, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZThmYjFmYjgtOTdhY2I3NGYtOWZiM2UwOGYtZjRlODZlOTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-31T15:58:27.532595Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=ZThmYjFmYjgtOTdhY2I3NGYtOWZiM2UwOGYtZjRlODZlOTg=, ActorId: [13:7488003642443939543:2566], ActorState: ExecuteState, TraceId: 01jqpesxfp535qz4hgnt87ewnd, Create QueryResponse for error on request, msg: 2025-03-31T15:58:27.534437Z node 13 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jqpesxfqfqvzfg9x5f945ahy" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } 2025-03-31T15:58:27.988431Z node 13 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715698. Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-31T15:58:27.988574Z node 13 :KQP_EXECUTER WARN: ActorId: [13:7488003642443939595:2559] TxId: 281474976715698. Ctx: { TraceId: 01jqpesx464amkw7p33md85y5n, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OTIwYjhjZS0yMThmOTM4Ny05ZTA4ZTgwNS1mZWY0NmQ4ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-31T15:58:27.988870Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=OTIwYjhjZS0yMThmOTM4Ny05ZTA4ZTgwNS1mZWY0NmQ4ZA==, ActorId: [13:7488003638148972226:2559], ActorState: ExecuteState, TraceId: 01jqpesx464amkw7p33md85y5n, Create QueryResponse for error on request, msg: 2025-03-31T15:58:27.990755Z node 13 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jqpesxyp2mcjgpa7ey5c019a" } } YdbStatus: UNAVAILABLE ConsumedRu: 559 } |68.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |68.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |68.3%| [LD] {RESULT} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut >> KqpUserConstraint::KqpReadNull-UploadNull [GOOD] >> Yq_1::ListConnections [GOOD] >> Yq_1::ListConnectionsOnEmptyConnectionsTable >> TDatabaseResolverTests::DataStreams_Serverless >> TDatabaseResolverTests::DataStreams_Serverless [GOOD] >> TDatabaseResolverTests::DataStreams_PermissionDenied ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::DescribeTopic [GOOD] Test command err: 2025-03-31T15:58:10.455539Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003567627777748:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:10.455597Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:58:10.952895Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T15:58:10.956356Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001726/r3tmp/tmpedbkfd/pdisk_1.dat 2025-03-31T15:58:11.021670Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:11.597901Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:11.606989Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:11.615290Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:11.616841Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:11.616907Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:11.617437Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:11.626197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:58:11.633020Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T15:58:11.639828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24138, node 1 2025-03-31T15:58:12.136896Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/001726/r3tmp/yandexlbvg8L.tmp 2025-03-31T15:58:12.136917Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/001726/r3tmp/yandexlbvg8L.tmp 2025-03-31T15:58:12.137089Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/001726/r3tmp/yandexlbvg8L.tmp 2025-03-31T15:58:12.137206Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:58:12.404148Z INFO: TTestServer started on Port 26104 GrpcPort 24138 TClient is connected to server localhost:26104 PQClient connected to localhost:24138 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:58:13.204692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T15:58:13.395586Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T15:58:13.412276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-31T15:58:15.463227Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003567627777748:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:15.463287Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:58:18.982245Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003601987517309:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:18.982352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003601987517301:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:18.982486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:18.989572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-31T15:58:19.041196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488003601987517315:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-31T15:58:19.160618Z node 1 :TX_PROXY ERROR: Actor# [1:7488003606282484718:2803] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:58:19.694807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:58:19.719875Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488003606282484728:2361], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T15:58:19.722963Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488003605300671357:2323], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T15:58:19.723324Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MmM1Y2E3MTItZWRlNmEzN2EtY2UwMGNmOWQtMTZiYzk5ZTU=, ActorId: [2:7488003605300671309:2316], ActorState: ExecuteState, TraceId: 01jqpesnthb3r7071w3y8n5wnr, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T15:58:19.721720Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTdlMTUyM2QtYjk0MWE2N2UtMjA5N2NlYjgtZDkxOTdmMmY=, ActorId: [1:7488003601987517299:2345], ActorState: ExecuteState, TraceId: 01jqpesnayc7a6hb9mje30nj3q, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T15:58:19.740271Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T15:58:19.743386Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T15:58:19.899985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:58:20.199404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-31T15:58:20.976231Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jqpesprs6qpt85m96ct4t5kf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjYyN2JlYTAtZTVmODliNmEtZDJlMDAzMGQtN2ViYjlkMWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7488003614872419805:3159] === CheckClustersList. Ok CreateTopicNoLegacy: rt3.dc1--topic-x Create topic: /Root/PQ/rt3.dc1--topic-x AddTopic: rt3.dc1--topic-x ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` with topic = topic-x, dc = dc1 2025-03-31T15:58:26.271021Z node 1 :PQ_READ_PROXY DEBUG: new Create topic request 2025-03-31T15:58:26.479373Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710679. Ctx: { TraceId: 01jqpeswfe03b1n3hb7nwkvsad, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmYzYTlmMjEtM2RkMjgyMzktNmI2YmE0Zi1jYmRlMzg1Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:26.631987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:58:26.632011Z node 1 :IMPORT W ... retention_period { seconds: 64800 } partition_write_speed_bytes_per_second: 2097152 partition_write_burst_bytes: 2097152 attributes { key: "__max_partition_message_groups_seqno_stored" value: "6000000" } attributes { key: "_message_group_seqno_retention_period_ms" value: "1382400000" } consumers { name: "shared/user" read_from { } attributes { key: "_service_type" value: "data-streams" } consumer_stats { min_partitions_last_read_time { seconds: 1743436708 nanos: 480000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } } } topic_stats { min_last_write_time { seconds: 1743436709 nanos: 23000000 } max_write_time_lag { } bytes_written { } } } } } Describe topic with location 2025-03-31T15:58:29.093898Z node 1 :PQ_READ_PROXY DEBUG: new Describe topic request 2025-03-31T15:58:29.093999Z node 1 :PQ_READ_PROXY DEBUG: TDescribeTopicActor for request path: "/Root/PQ//rt3.dc1--topic-x" include_location: true 2025-03-31T15:58:29.094092Z node 1 :PQ_READ_PROXY DEBUG: Describe topic actor for path /Root/PQ//rt3.dc1--topic-x 2025-03-31T15:58:29.099651Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7488003649232160063:2613]: Request location 2025-03-31T15:58:29.117582Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7488003649232160065:2614] connected; active server actors: 1 2025-03-31T15:58:29.119391Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 2, Generation 2 2025-03-31T15:58:29.119415Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 1, NodeId 1, Generation 2 2025-03-31T15:58:29.119427Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 2, NodeId 2, Generation 2 2025-03-31T15:58:29.119438Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 2, Generation 2 2025-03-31T15:58:29.119448Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037899, partitionId 4, NodeId 1, Generation 2 2025-03-31T15:58:29.119460Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 5, NodeId 1, Generation 2 2025-03-31T15:58:29.119469Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 6, NodeId 1, Generation 2 2025-03-31T15:58:29.119481Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 7, NodeId 2, Generation 2 2025-03-31T15:58:29.119493Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 8, NodeId 2, Generation 2 2025-03-31T15:58:29.119504Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 9, NodeId 2, Generation 2 2025-03-31T15:58:29.119513Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 10, NodeId 1, Generation 2 2025-03-31T15:58:29.119524Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 11, NodeId 1, Generation 2 2025-03-31T15:58:29.119533Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 12, NodeId 2, Generation 2 2025-03-31T15:58:29.119543Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 13, NodeId 2, Generation 2 2025-03-31T15:58:29.119552Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 14, NodeId 1, Generation 2 2025-03-31T15:58:29.123794Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7488003649232160063:2613]: Got location Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeTopicResult] { self { name: "rt3.dc1--topic-x" owner: "root@builtin" type: TOPIC created_at { plan_step: 1743436707229 tx_id: 281474976710678 } } partitioning_settings { min_active_partitions: 15 max_active_partitions: 1 auto_partitioning_settings { strategy: AUTO_PARTITIONING_STRATEGY_DISABLED partition_write_speed { stabilization_window { seconds: 300 } up_utilization_percent: 80 down_utilization_percent: 20 } } } partitions { active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 1 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 2 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 3 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 4 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 5 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 6 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 7 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 8 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 9 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 10 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 11 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 12 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 13 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 14 active: true partition_location { node_id: 1 generation: 2 } } retention_period { seconds: 64800 } partition_write_speed_bytes_per_second: 2097152 partition_write_burst_bytes: 2097152 attributes { key: "__max_partition_message_groups_seqno_stored" value: "6000000" } attributes { key: "_message_group_seqno_retention_period_ms" value: "1382400000" } consumers { name: "shared/user" read_from { } attributes { key: "_service_type" value: "data-streams" } } } } } Describe topic with no stats or location 2025-03-31T15:58:29.127508Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7488003649232160065:2614] disconnected; active server actors: 1 2025-03-31T15:58:29.127545Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7488003649232160065:2614] disconnected no session 2025-03-31T15:58:29.128437Z node 1 :PQ_READ_PROXY DEBUG: new Describe topic request 2025-03-31T15:58:29.128522Z node 1 :PQ_READ_PROXY DEBUG: TDescribeTopicActor for request path: "/Root/PQ//rt3.dc1--topic-x" 2025-03-31T15:58:29.128624Z node 1 :PQ_READ_PROXY DEBUG: Describe topic actor for path /Root/PQ//rt3.dc1--topic-x Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeTopicResult] { self { name: "rt3.dc1--topic-x" owner: "root@builtin" type: TOPIC created_at { plan_step: 1743436707229 tx_id: 281474976710678 } } partitioning_settings { min_active_partitions: 15 max_active_partitions: 1 auto_partitioning_settings { strategy: AUTO_PARTITIONING_STRATEGY_DISABLED partition_write_speed { stabilization_window { seconds: 300 } up_utilization_percent: 80 down_utilization_percent: 20 } } } partitions { active: true } partitions { partition_id: 1 active: true } partitions { partition_id: 2 active: true } partitions { partition_id: 3 active: true } partitions { partition_id: 4 active: true } partitions { partition_id: 5 active: true } partitions { partition_id: 6 active: true } partitions { partition_id: 7 active: true } partitions { partition_id: 8 active: true } partitions { partition_id: 9 active: true } partitions { partition_id: 10 active: true } partitions { partition_id: 11 active: true } partitions { partition_id: 12 active: true } partitions { partition_id: 13 active: true } partitions { partition_id: 14 active: true } retention_period { seconds: 64800 } partition_write_speed_bytes_per_second: 2097152 partition_write_burst_bytes: 2097152 attributes { key: "__max_partition_message_groups_seqno_stored" value: "6000000" } attributes { key: "_message_group_seqno_retention_period_ms" value: "1382400000" } consumers { name: "shared/user" read_from { } attributes { key: "_service_type" value: "data-streams" } } } } } Describe bad topic 2025-03-31T15:58:29.134018Z node 1 :PQ_READ_PROXY DEBUG: new Describe topic request 2025-03-31T15:58:29.134098Z node 1 :PQ_READ_PROXY DEBUG: TDescribeTopicActor for request path: "/Root/PQ//bad-topic" include_stats: true include_location: true 2025-03-31T15:58:29.134207Z node 1 :PQ_READ_PROXY DEBUG: Describe topic actor for path /Root/PQ//bad-topic Got response: operation { ready: true status: SCHEME_ERROR issues { message: "path \'Root/PQ/bad-topic\' does not exist or you do not have access rights" issue_code: 500018 severity: 1 } } >> TDatabaseResolverTests::DataStreams_PermissionDenied [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::CrossShard_4_OneToAll [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:114:2057] recipient: [1:108:2140] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:114:2057] recipient: [1:108:2140] Leader for TabletID 9437184 is [1:131:2154] sender: [1:132:2057] recipient: [1:108:2140] 2025-03-31T15:55:26.297080Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-03-31T15:55:26.338909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:55:26.338963Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:26.354836Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-03-31T15:55:26.355387Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-03-31T15:55:26.355702Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:55:26.408993Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T15:55:26.417598Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:55:26.417891Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T15:55:26.419645Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-31T15:55:26.419732Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-31T15:55:26.419782Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-31T15:55:26.420163Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T15:55:26.420299Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T15:55:26.420425Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:194:2154] in generation 2 Leader for TabletID 9437184 is [1:131:2154] sender: [1:209:2057] recipient: [1:14:2061] 2025-03-31T15:55:26.518562Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T15:55:26.592270Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-31T15:55:26.592441Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T15:55:26.592554Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:214:2212] 2025-03-31T15:55:26.592592Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-31T15:55:26.592623Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-31T15:55:26.592654Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T15:55:26.593119Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T15:55:26.593341Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T15:55:26.594287Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-31T15:55:26.594562Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-31T15:55:26.594939Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T15:55:26.595146Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-31T15:55:26.595218Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-31T15:55:26.595287Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T15:55:26.595417Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T15:55:26.595519Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-31T15:55:26.595603Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T15:55:26.595858Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:210:2209], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:55:26.596102Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:55:26.596378Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:208:2208], serverId# [1:210:2209], sessionId# [0:0:0] 2025-03-31T15:55:26.602471Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-31T15:55:26.602541Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T15:55:26.602627Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T15:55:26.602777Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-31T15:55:26.602819Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-31T15:55:26.602862Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-31T15:55:26.602911Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T15:55:26.602943Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-31T15:55:26.602972Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-31T15:55:26.603000Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T15:55:26.603368Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-31T15:55:26.603410Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-31T15:55:26.603439Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-31T15:55:26.603605Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-31T15:55:26.603768Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-31T15:55:26.603961Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-31T15:55:26.604172Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-31T15:55:26.604418Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-31T15:55:26.604605Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-31T15:55:26.624936Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-31T15:55:26.625154Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T15:55:26.625343Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-31T15:55:26.625720Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-31T15:55:26.626127Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-31T15:55:26.630377Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:220:2218], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:55:26.630595Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:55:26.631223Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:219:2217], serverId# [1:220:2218], sessionId# [0:0:0] 2025-03-31T15:55:26.632077Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2025-03-31T15:55:26.632106Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T15:55:26.632894Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2025-03-31T15:55:26.632944Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-03-31T15:55:26.633593Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2025-03-31T15:55:26.633844Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit PlanQueue 2025-03-31T15:55:26.662363Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2025-03-31T15:55:26.662445Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T15:55:26.662727Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T15:55:26.662766Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T15:55:26.662824Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T15:55:26.662859Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-31T15:55:26.662890Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T15:55:26.662927Z node 1 :TX_DATASHARD DEBUG: Found ready operation [2:1] in PlanQueue unit at 9437184 2025-03-31T15:55:26.662958Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit PlanQueue 2025-03-31T15:55:26.663016Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-03-31T15:55:26.663058Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit PlanQueue 2025-03-31T15:55:26.663145Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit LoadTxDetails 2025-03-31T15:55:26.663176Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2025-03-31T15:55:26.663339Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 2:1 keys extracted: 0 2025-03-31T15:55:26.663369Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-03-31T15:55:26.663399Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit LoadTxDetails 2025-03-31T15:55:26.663424Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-31T15:55:26.663442Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-31T15:55:26.663510Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T15:55:26.663534Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-31T15:55:26.663567Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-31T15:55:26.663595Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-31T15:55:26.663639Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is the new logically complete end at 9437184 2025-03-31T15:55:26.663681Z node 1 :TX_DATASHARD TRAC ... 9437185 to execution unit ExecuteDataTx 2025-03-31T15:58:32.953667Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437185 on unit ExecuteDataTx 2025-03-31T15:58:32.954055Z node 41 :TX_DATASHARD TRACE: Executed operation [7:6] at tablet 9437185 with status COMPLETE 2025-03-31T15:58:32.954116Z node 41 :TX_DATASHARD TRACE: Datashard execution counters for [7:6] at 9437185: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 1, SelectRowBytes: 10, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-31T15:58:32.954190Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437185 is Executed 2025-03-31T15:58:32.954219Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437185 executing on unit ExecuteDataTx 2025-03-31T15:58:32.954245Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437185 to execution unit CompleteOperation 2025-03-31T15:58:32.954276Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437185 on unit CompleteOperation 2025-03-31T15:58:32.954491Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437185 is DelayComplete 2025-03-31T15:58:32.954525Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437185 executing on unit CompleteOperation 2025-03-31T15:58:32.954555Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437185 to execution unit CompletedOperations 2025-03-31T15:58:32.954585Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437185 on unit CompletedOperations 2025-03-31T15:58:32.954622Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437185 is Executed 2025-03-31T15:58:32.954648Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437185 executing on unit CompletedOperations 2025-03-31T15:58:32.954675Z node 41 :TX_DATASHARD TRACE: Execution plan for [7:6] at 9437185 has finished 2025-03-31T15:58:32.954710Z node 41 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T15:58:32.954739Z node 41 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-03-31T15:58:32.954770Z node 41 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-03-31T15:58:32.954804Z node 41 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-03-31T15:58:32.955033Z node 41 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [41:233:2226], Recipient [41:233:2226]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T15:58:32.955178Z node 41 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T15:58:32.955245Z node 41 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T15:58:32.955281Z node 41 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-31T15:58:32.955309Z node 41 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T15:58:32.955343Z node 41 :TX_DATASHARD DEBUG: Found ready operation [7:6] in PlanQueue unit at 9437184 2025-03-31T15:58:32.955370Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437184 on unit PlanQueue 2025-03-31T15:58:32.955401Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437184 is Executed 2025-03-31T15:58:32.955470Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437184 executing on unit PlanQueue 2025-03-31T15:58:32.955500Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437184 to execution unit LoadTxDetails 2025-03-31T15:58:32.955529Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437184 on unit LoadTxDetails 2025-03-31T15:58:32.956200Z node 41 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 7:6 keys extracted: 1 2025-03-31T15:58:32.956268Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437184 is Executed 2025-03-31T15:58:32.956301Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437184 executing on unit LoadTxDetails 2025-03-31T15:58:32.956327Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437184 to execution unit FinalizeDataTxPlan 2025-03-31T15:58:32.956355Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437184 on unit FinalizeDataTxPlan 2025-03-31T15:58:32.956394Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437184 is Executed 2025-03-31T15:58:32.956419Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437184 executing on unit FinalizeDataTxPlan 2025-03-31T15:58:32.956445Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-31T15:58:32.956472Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437184 on unit BuildAndWaitDependencies 2025-03-31T15:58:32.956524Z node 41 :TX_DATASHARD TRACE: Operation [7:6] is the new logically complete end at 9437184 2025-03-31T15:58:32.956557Z node 41 :TX_DATASHARD TRACE: Operation [7:6] is the new logically incomplete end at 9437184 2025-03-31T15:58:32.956587Z node 41 :TX_DATASHARD TRACE: Activated operation [7:6] at 9437184 2025-03-31T15:58:32.956625Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437184 is Executed 2025-03-31T15:58:32.956649Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437184 executing on unit BuildAndWaitDependencies 2025-03-31T15:58:32.956676Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437184 to execution unit BuildDataTxOutRS 2025-03-31T15:58:32.956703Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437184 on unit BuildDataTxOutRS 2025-03-31T15:58:32.956755Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437184 is Executed 2025-03-31T15:58:32.956780Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437184 executing on unit BuildDataTxOutRS 2025-03-31T15:58:32.956805Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437184 to execution unit StoreAndSendOutRS 2025-03-31T15:58:32.956831Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437184 on unit StoreAndSendOutRS 2025-03-31T15:58:32.956859Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437184 is Executed 2025-03-31T15:58:32.956884Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437184 executing on unit StoreAndSendOutRS 2025-03-31T15:58:32.956909Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437184 to execution unit PrepareDataTxInRS 2025-03-31T15:58:32.956935Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437184 on unit PrepareDataTxInRS 2025-03-31T15:58:32.956968Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437184 is Executed 2025-03-31T15:58:32.956991Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437184 executing on unit PrepareDataTxInRS 2025-03-31T15:58:32.957016Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437184 to execution unit LoadAndWaitInRS 2025-03-31T15:58:32.957041Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437184 on unit LoadAndWaitInRS 2025-03-31T15:58:32.957069Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437184 is Executed 2025-03-31T15:58:32.957094Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437184 executing on unit LoadAndWaitInRS 2025-03-31T15:58:32.957118Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437184 to execution unit ExecuteDataTx 2025-03-31T15:58:32.957145Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437184 on unit ExecuteDataTx 2025-03-31T15:58:32.957508Z node 41 :TX_DATASHARD TRACE: Executed operation [7:6] at tablet 9437184 with status COMPLETE 2025-03-31T15:58:32.957572Z node 41 :TX_DATASHARD TRACE: Datashard execution counters for [7:6] at 9437184: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 1, SelectRowBytes: 10, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-31T15:58:32.957630Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437184 is Executed 2025-03-31T15:58:32.957678Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437184 executing on unit ExecuteDataTx 2025-03-31T15:58:32.957711Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437184 to execution unit CompleteOperation 2025-03-31T15:58:32.957738Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437184 on unit CompleteOperation 2025-03-31T15:58:32.957939Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437184 is DelayComplete 2025-03-31T15:58:32.957973Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437184 executing on unit CompleteOperation 2025-03-31T15:58:32.958004Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437184 to execution unit CompletedOperations 2025-03-31T15:58:32.958032Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437184 on unit CompletedOperations 2025-03-31T15:58:32.958064Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437184 is Executed 2025-03-31T15:58:32.958090Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437184 executing on unit CompletedOperations 2025-03-31T15:58:32.958116Z node 41 :TX_DATASHARD TRACE: Execution plan for [7:6] at 9437184 has finished 2025-03-31T15:58:32.958148Z node 41 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T15:58:32.958174Z node 41 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T15:58:32.958203Z node 41 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T15:58:32.958240Z node 41 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T15:58:32.973906Z node 41 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437185 step# 7 txid# 6} 2025-03-31T15:58:32.973975Z node 41 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 7} 2025-03-31T15:58:32.974027Z node 41 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-03-31T15:58:32.974062Z node 41 :TX_DATASHARD TRACE: Complete execution for [7:6] at 9437185 on unit CompleteOperation 2025-03-31T15:58:32.974123Z node 41 :TX_DATASHARD DEBUG: Complete [7 : 6] from 9437185 at tablet 9437185 send result to client [41:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T15:58:32.974165Z node 41 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-03-31T15:58:32.974424Z node 41 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 7 txid# 6} 2025-03-31T15:58:32.974457Z node 41 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 7} 2025-03-31T15:58:32.974494Z node 41 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T15:58:32.974525Z node 41 :TX_DATASHARD TRACE: Complete execution for [7:6] at 9437184 on unit CompleteOperation 2025-03-31T15:58:32.974564Z node 41 :TX_DATASHARD DEBUG: Complete [7 : 6] from 9437184 at tablet 9437184 send result to client [41:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T15:58:32.974593Z node 41 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T15:58:32.974749Z node 41 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437186 step# 7 txid# 6} 2025-03-31T15:58:32.974779Z node 41 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437186 step# 7} 2025-03-31T15:58:32.974814Z node 41 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-31T15:58:32.974841Z node 41 :TX_DATASHARD TRACE: Complete execution for [7:6] at 9437186 on unit CompleteOperation 2025-03-31T15:58:32.974880Z node 41 :TX_DATASHARD DEBUG: Complete [7 : 6] from 9437186 at tablet 9437186 send result to client [41:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T15:58:32.974909Z node 41 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 >> TDataShardTrace::TestTraceDistributedUpsert+UseSink |68.3%| [TA] $(B)/ydb/services/persqueue_v1/ut/describes_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TDataShardTrace::TestTraceDistributedUpsert-UseSink >> TDataShardTrace::TestTraceDistributedSelectViaReadActors >> Yq_1::DescribeConnection [GOOD] >> Yq_1::DeleteQuery >> TDataShardTrace::TestTraceWriteImmediateOnShard >> Yq_1::Basic_Null [GOOD] >> Yq_1::Basic_TaggedLiteral ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> KqpUserConstraint::KqpReadNull-UploadNull [GOOD] Test command err: 2025-03-31T15:58:31.280472Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:58:31.280853Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:31.281063Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f59/r3tmp/tmp11KuqE/pdisk_1.dat 2025-03-31T15:58:32.160930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:58:32.248122Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:32.311790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:32.311924Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:32.327148Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:58:32.434016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:58:33.294656Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:865:2713], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:33.294764Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:874:2718], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:33.294856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:33.303805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T15:58:33.482265Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:879:2721], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T15:58:33.648795Z node 1 :TX_PROXY ERROR: Actor# [1:961:2772] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:58:34.256707Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpet3acbwb8c97cr7kpskt3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTY3Yzg4OWEtYjA4MGMzNDQtNDVkMzJmZDUtMzA3NTIwYzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::DataStreams_PermissionDenied [GOOD] Test command err: 2025-03-31T15:58:36.101800Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed DataStreams database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgbh': you have no permission to resolve database id into database endpoint. >> TDataShardTrace::TestTraceDistributedSelect >> DataShardSnapshots::LockedWriteReuseAfterCommit-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitSuccess+UseSink >> TDatabaseResolverTests::Ydb_Serverless >> DataShardWrite::DeletePrepared-Volatile [GOOD] >> DataShardWrite::DelayedVolatileTxAndEvWrite |68.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index >> TDatabaseResolverTests::Ydb_Serverless [GOOD] |68.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |68.3%| [TA] {RESULT} $(B)/ydb/services/persqueue_v1/ut/describes_ut/test-results/unittest/{meta.json ... results_accumulator.log} |68.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |68.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |68.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |68.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood |68.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Serverless [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-4 >> TDatabaseResolverTests::MySQL >> TDatabaseResolverTests::MySQL [GOOD] >> TDatabaseResolverTests::MySQL_PermissionDenied >> TDatabaseResolverTests::MySQL_PermissionDenied [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::MySQL_PermissionDenied [GOOD] Test command err: 2025-03-31T15:58:38.821770Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed MySQL database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-mysql/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. >> DataShardWrite::UpsertBrokenLockArbiter [GOOD] >> TGRpcStreamingTest::ClientNeverWrites >> DataShardSnapshots::ShardRestartLockUnrelatedUpsert [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByConflict |68.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |68.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest >> PersQueueSdkReadSessionTest::StopResumeReadingData [GOOD] >> ReadSessionImplTest::CreatePartitionStream [GOOD] >> ReadSessionImplTest::BrokenCompressedData [GOOD] >> ReadSessionImplTest::CommitOffsetTwiceIsError >> ReadSessionImplTest::CommitOffsetTwiceIsError [GOOD] >> ReadSessionImplTest::CommonHandler [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::UpsertBrokenLockArbiter [GOOD] Test command err: 2025-03-31T15:57:54.777920Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:57:54.778177Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:57:54.778334Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001217/r3tmp/tmpyUCJG5/pdisk_1.dat 2025-03-31T15:57:55.123798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:57:55.169531Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:57:55.213046Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:57:55.213188Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:57:55.228253Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:57:55.312330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:57:55.361885Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-31T15:57:55.362698Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-31T15:57:55.363037Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T15:57:55.363269Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:57:55.418895Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T15:57:55.419646Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:57:55.419763Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T15:57:55.421396Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T15:57:55.421484Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T15:57:55.421562Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T15:57:55.421945Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T15:57:55.422086Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T15:57:55.422184Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T15:57:55.435805Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T15:57:55.468946Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T15:57:55.469135Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T15:57:55.469279Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T15:57:55.469315Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T15:57:55.469349Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T15:57:55.469381Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T15:57:55.469608Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T15:57:55.469657Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T15:57:55.469922Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T15:57:55.470019Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T15:57:55.470160Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T15:57:55.470202Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T15:57:55.470243Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T15:57:55.470286Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T15:57:55.470334Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T15:57:55.470365Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T15:57:55.470406Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T15:57:55.470490Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:57:55.470580Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:57:55.470628Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T15:57:55.470725Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-31T15:57:55.470762Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T15:57:55.470862Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T15:57:55.471113Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T15:57:55.471175Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T15:57:55.471300Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T15:57:55.471342Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T15:57:55.471383Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T15:57:55.471419Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T15:57:55.471450Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T15:57:55.471743Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T15:57:55.471785Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T15:57:55.471814Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T15:57:55.471860Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T15:57:55.471905Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T15:57:55.471936Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T15:57:55.471970Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T15:57:55.471999Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T15:57:55.472022Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T15:57:55.473816Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T15:57:55.473887Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T15:57:55.486875Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T15:57:55.486970Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T15:57:55.487024Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T15:57:55.487073Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-31T15:57:55.487187Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T15:57:55.661169Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:57:55.661232Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:57:55.661271Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T15:57:55.661650Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-31T15:57:55.661689Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T15:57:55.661816Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T15:57:55.661891Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-31T15:57:55.661930Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-31T15:57:55.661964Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-31T15:57:55.671618Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T15:57:55.671720Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T15:57:55.672413Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T15:57:55.672455Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T15:57:55.672517Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T15:57:5 ... d: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-03-31T15:58:39.089553Z node 7 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[7:935:2775], 1001} after executionsCount# 1 2025-03-31T15:58:39.089595Z node 7 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[7:935:2775], 1001} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-31T15:58:39.089668Z node 7 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[7:935:2775], 1001} finished in read 2025-03-31T15:58:39.089736Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2025-03-31T15:58:39.089762Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit ExecuteRead 2025-03-31T15:58:39.089797Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037889 to execution unit CompletedOperations 2025-03-31T15:58:39.089824Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit CompletedOperations 2025-03-31T15:58:39.089871Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2025-03-31T15:58:39.089894Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit CompletedOperations 2025-03-31T15:58:39.089918Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037889 has finished 2025-03-31T15:58:39.089946Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-03-31T15:58:39.090030Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-03-31T15:58:39.090711Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [7:940:2780], Recipient [7:719:2597]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:58:39.090752Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:58:39.090789Z node 7 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [7:939:2779], serverId# [7:940:2780], sessionId# [0:0:0] 2025-03-31T15:58:39.090862Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [7:938:2778], Recipient [7:719:2597]: NKikimrTxDataShard.TEvGetInfoRequest 2025-03-31T15:58:39.092932Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [7:943:2783], Recipient [7:719:2597]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:58:39.092980Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:58:39.093015Z node 7 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [7:942:2782], serverId# [7:943:2783], sessionId# [0:0:0] 2025-03-31T15:58:39.093152Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [7:941:2781], Recipient [7:719:2597]: NKikimrTxDataShard.TEvRead ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-03-31T15:58:39.093291Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 0 2025-03-31T15:58:39.093333Z node 7 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037890 CompleteEdge# v1004/1000004 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-03-31T15:58:39.093367Z node 7 :TX_DATASHARD TRACE: 72075186224037890 changed HEAD read to non-repeatable v1004/18446744073709551615 2025-03-31T15:58:39.093420Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037890 on unit CheckRead 2025-03-31T15:58:39.093494Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037890 is Executed 2025-03-31T15:58:39.093534Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037890 executing on unit CheckRead 2025-03-31T15:58:39.093576Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037890 to execution unit BuildAndWaitDependencies 2025-03-31T15:58:39.093621Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037890 on unit BuildAndWaitDependencies 2025-03-31T15:58:39.093671Z node 7 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037890 2025-03-31T15:58:39.093703Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037890 is Executed 2025-03-31T15:58:39.093742Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037890 executing on unit BuildAndWaitDependencies 2025-03-31T15:58:39.093781Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037890 to execution unit ExecuteRead 2025-03-31T15:58:39.093809Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037890 on unit ExecuteRead 2025-03-31T15:58:39.093895Z node 7 :TX_DATASHARD TRACE: 72075186224037890 Execute read# 1, request: { ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-03-31T15:58:39.094063Z node 7 :TX_DATASHARD TRACE: 72075186224037890 Complete read# {[7:941:2781], 1002} after executionsCount# 1 2025-03-31T15:58:39.094123Z node 7 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[7:941:2781], 1002} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-31T15:58:39.094181Z node 7 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[7:941:2781], 1002} finished in read 2025-03-31T15:58:39.094233Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037890 is Executed 2025-03-31T15:58:39.094258Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037890 executing on unit ExecuteRead 2025-03-31T15:58:39.094282Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037890 to execution unit CompletedOperations 2025-03-31T15:58:39.094306Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037890 on unit CompletedOperations 2025-03-31T15:58:39.094350Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037890 is Executed 2025-03-31T15:58:39.094432Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037890 executing on unit CompletedOperations 2025-03-31T15:58:39.094456Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037890 has finished 2025-03-31T15:58:39.094485Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2025-03-31T15:58:39.094568Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2025-03-31T15:58:39.095857Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [7:946:2786], Recipient [7:716:2595]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:58:39.095907Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:58:39.095960Z node 7 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [7:945:2785], serverId# [7:946:2786], sessionId# [0:0:0] 2025-03-31T15:58:39.096103Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [7:944:2784], Recipient [7:716:2595]: NKikimrTxDataShard.TEvGetInfoRequest 2025-03-31T15:58:39.097027Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [7:949:2789], Recipient [7:716:2595]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:58:39.097073Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:58:39.097108Z node 7 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [7:948:2788], serverId# [7:949:2789], sessionId# [0:0:0] 2025-03-31T15:58:39.097306Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [7:947:2787], Recipient [7:716:2595]: NKikimrTxDataShard.TEvRead ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-03-31T15:58:39.097425Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037891, FollowerId 0 2025-03-31T15:58:39.097504Z node 7 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037891 CompleteEdge# v1004/1000004 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-03-31T15:58:39.097545Z node 7 :TX_DATASHARD TRACE: 72075186224037891 changed HEAD read to non-repeatable v1004/18446744073709551615 2025-03-31T15:58:39.097597Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037891 on unit CheckRead 2025-03-31T15:58:39.097684Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037891 is Executed 2025-03-31T15:58:39.097715Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037891 executing on unit CheckRead 2025-03-31T15:58:39.097744Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037891 to execution unit BuildAndWaitDependencies 2025-03-31T15:58:39.097782Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037891 on unit BuildAndWaitDependencies 2025-03-31T15:58:39.097846Z node 7 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037891 2025-03-31T15:58:39.097889Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037891 is Executed 2025-03-31T15:58:39.097913Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037891 executing on unit BuildAndWaitDependencies 2025-03-31T15:58:39.097935Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037891 to execution unit ExecuteRead 2025-03-31T15:58:39.097959Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037891 on unit ExecuteRead 2025-03-31T15:58:39.098062Z node 7 :TX_DATASHARD TRACE: 72075186224037891 Execute read# 1, request: { ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-03-31T15:58:39.098238Z node 7 :TX_DATASHARD TRACE: 72075186224037891 Complete read# {[7:947:2787], 1003} after executionsCount# 1 2025-03-31T15:58:39.098282Z node 7 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[7:947:2787], 1003} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-31T15:58:39.098338Z node 7 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[7:947:2787], 1003} finished in read 2025-03-31T15:58:39.098398Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037891 is Executed 2025-03-31T15:58:39.098431Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037891 executing on unit ExecuteRead 2025-03-31T15:58:39.098460Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037891 to execution unit CompletedOperations 2025-03-31T15:58:39.098485Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037891 on unit CompletedOperations 2025-03-31T15:58:39.098533Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037891 is Executed 2025-03-31T15:58:39.098568Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037891 executing on unit CompletedOperations 2025-03-31T15:58:39.098599Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037891 has finished 2025-03-31T15:58:39.098640Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037891 2025-03-31T15:58:39.098729Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037891 >> KqpIndexes::InnerJoinWithNonIndexWherePredicate [GOOD] >> KqpIndexes::InnerJoinSecondaryIndexLookupAndRightTablePredicateNonIndexColumn >> Yq_1::DescribeJob [GOOD] >> Yq_1::DescribeQuery |68.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest >> DataShardWrite::UpsertLostPrepareArbiterRestart [GOOD] >> Yq_1::DeleteConnections [GOOD] >> Yq_1::Create_And_Modify_The_Same_Connection |68.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::CommonHandler [GOOD] Test command err: 2025-03-31T15:56:33.651837Z :ReadSession INFO: Random seed for debugging is 1743436593651251 2025-03-31T15:56:34.628662Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003154033115429:2280];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:56:34.628719Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:56:34.640095Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488003158012192171:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:56:34.640147Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:56:34.922190Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T15:56:34.939242Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001203/r3tmp/tmpcPpQ7b/pdisk_1.dat 2025-03-31T15:56:35.239601Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:56:35.239992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:56:35.242460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:56:35.242547Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:56:35.246669Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T15:56:35.246810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:56:35.248216Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:56:35.274200Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29637, node 1 2025-03-31T15:56:35.307531Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T15:56:35.307550Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T15:56:35.451736Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/001203/r3tmp/yandexLngWeE.tmp 2025-03-31T15:56:35.451760Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/001203/r3tmp/yandexLngWeE.tmp 2025-03-31T15:56:35.451902Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/001203/r3tmp/yandexLngWeE.tmp 2025-03-31T15:56:35.452010Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:56:35.627239Z INFO: TTestServer started on Port 29837 GrpcPort 29637 TClient is connected to server localhost:29837 PQClient connected to localhost:29637 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:56:36.108098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-03-31T15:56:39.635893Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003154033115429:2280];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:56:39.635944Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:56:39.648827Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488003158012192171:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:56:39.648895Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:56:50.149084Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpeph8d405jb31wynzj4p8r", Request deadline has expired for 8.676967s seconds 2025-03-31T15:56:50.149166Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpepptd2124e72v7d0x1647", Request deadline has expired for 3.356076s seconds 2025-03-31T15:56:50.158996Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488003226731669282:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:50.156608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003222752593106:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:50.159073Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488003226731669271:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:50.156696Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003222752593102:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:50.157034Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:50.159209Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:50.162318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-03-31T15:56:50.271778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:56:50.271819Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:56:50.278363Z node 2 :TX_PROXY ERROR: Actor# [2:7488003226731669286:2137] txid# 281474976715657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T15:56:50.292613Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488003222752593117:2370], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-31T15:56:50.376274Z node 1 :TX_PROXY ERROR: Actor# [1:7488003222752593209:2777] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:56:50.766297Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488003226731669315:2327], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T15:56:50.766909Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488003222752593219:2376], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T15:56:50.767286Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGNlYzk0ZTEtNWM2ZjYyOWYtMTFkNjI2NzktZDFlNzI2MDg=, ActorId: [2:7488003226731669268:2319], ActorState: ExecuteState, TraceId: 01jqpepyjk968h6egbvy3hf7sk, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T15:56:50.768289Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmZjNTk5N2EtNDM2NzkxODUtOTcwZjNlM2ItNDFmY2FkMGM=, ActorId: [1:7488003222752593100:2364], ActorState: ExecuteState, TraceId: 01jqpepyk4de527pz8vk9p40z6, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T15:56:50.769711Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T15:56:50.769715Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T15:56:50.773981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:56:51.092253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but pr ... ze: 8 Meta: { "logtype": "unknown", "ident": "unknown", "server": "ipv6:[::1]:37740" } } } } 2025-03-31T15:58:37.806134Z :INFO: [/Root] [/Root] [b4180898-56217e5e-a081b870-51fa2676] Closing read session. Close timeout: 3.000000s 2025-03-31T15:58:37.806185Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2025-03-31T15:58:37.806241Z :INFO: [/Root] [/Root] [b4180898-56217e5e-a081b870-51fa2676] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1527 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T15:58:37.806956Z :INFO: [/Root] [/Root] [b4180898-56217e5e-a081b870-51fa2676] Closing read session. Close timeout: 0.000000s 2025-03-31T15:58:37.807010Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2025-03-31T15:58:37.807065Z :INFO: [/Root] [/Root] [b4180898-56217e5e-a081b870-51fa2676] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1528 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T15:58:37.807215Z :NOTICE: [/Root] [/Root] [b4180898-56217e5e-a081b870-51fa2676] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-31T15:58:37.813553Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_18025007255552412481_v1 grpc read done: success# 0, data# { } 2025-03-31T15:58:37.813586Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_7_1_18025007255552412481_v1 grpc read failed 2025-03-31T15:58:37.813635Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_7_1_18025007255552412481_v1 grpc closed 2025-03-31T15:58:37.813682Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_7_1_18025007255552412481_v1 is DEAD 2025-03-31T15:58:37.817944Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_7_1_18025007255552412481_v1 2025-03-31T15:58:37.818009Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [7:7488003678838512738:2546] destroyed 2025-03-31T15:58:37.818080Z node 8 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_7_1_18025007255552412481_v1 2025-03-31T15:58:37.817958Z node 7 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [7:7488003678838512735:2543] disconnected; active server actors: 1 2025-03-31T15:58:37.818004Z node 7 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [7:7488003678838512735:2543] client user disconnected session shared/user_7_1_18025007255552412481_v1 2025-03-31T15:58:38.770740Z node 7 :KQP_COMPUTE WARN: TxId: 281474976715692, task: 1, CA Id [7:7488003687428447566:2584]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2025-03-31T15:58:38.802755Z node 7 :KQP_COMPUTE WARN: TxId: 281474976715692, task: 1, CA Id [7:7488003687428447566:2584]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-31T15:58:38.856082Z node 7 :KQP_COMPUTE WARN: TxId: 281474976715692, task: 1, CA Id [7:7488003687428447566:2584]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-31T15:58:38.937304Z node 7 :KQP_COMPUTE WARN: TxId: 281474976715692, task: 1, CA Id [7:7488003687428447566:2584]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-31T15:58:39.023301Z node 7 :KQP_COMPUTE WARN: TxId: 281474976715692, task: 1, CA Id [7:7488003687428447566:2584]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-31T15:58:39.182623Z node 7 :KQP_COMPUTE WARN: TxId: 281474976715692, task: 1, CA Id [7:7488003687428447566:2584]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-31T15:58:39.182738Z node 7 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715693. Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-31T15:58:39.182875Z node 7 :KQP_EXECUTER WARN: ActorId: [7:7488003687428447594:2576] TxId: 281474976715693. Ctx: { TraceId: 01jqpet84z0gmp554p637eqxa6, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MzhlNDQ1NzYtYjJmNmM3ZWMtOTRkYzQyMDAtNGI2MGNiMmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-31T15:58:39.183197Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=MzhlNDQ1NzYtYjJmNmM3ZWMtOTRkYzQyMDAtNGI2MGNiMmY=, ActorId: [7:7488003687428447531:2576], ActorState: ExecuteState, TraceId: 01jqpet84z0gmp554p637eqxa6, Create QueryResponse for error on request, msg: 2025-03-31T15:58:39.189429Z node 7 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jqpet8vf4sps1jwfhyxrabjs" } } YdbStatus: UNAVAILABLE ConsumedRu: 458 } 2025-03-31T15:58:41.196737Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:58:41.196786Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:58:41.196831Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:58:41.201058Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:58:41.201899Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:58:41.202194Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:58:41.204706Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: 13. Commit offset: 31 2025-03-31T15:58:41.212802Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:58:41.212859Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:58:41.212918Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:58:41.213661Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:58:41.214475Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:58:41.214671Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:58:41.215195Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-31T15:58:41.216558Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-03-31T15:58:41.255714Z :INFO: Error decompressing data: (TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check) 2025-03-31T15:58:41.255903Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-3) 2025-03-31T15:58:41.256135Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T15:58:41.256202Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-03-31T15:58:41.256242Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-03-31T15:58:41.256315Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 3, size 16 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { DataDecompressionError: "(TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check)" Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2025-03-31T15:58:41.314006Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:58:41.314068Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:58:41.314123Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:58:41.314479Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:58:41.323376Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:58:41.323618Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:58:41.324095Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-31T15:58:41.325779Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:58:41.326137Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-31T15:58:41.330624Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T15:58:41.330715Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-03-31T15:58:41.330838Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 2). Partition stream id: 1 2025-03-31T15:58:41.334221Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:58:41.334264Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:58:41.334322Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:58:41.334664Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T15:58:41.335365Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T15:58:41.335559Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:58:41.336544Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:58:41.336715Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-31T15:58:41.336813Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T15:58:41.336918Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts |68.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest >> TDataShardTrace::TestTraceWriteImmediateOnShard [GOOD] |68.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest >> TPersQueueMirrorer::TestBasicRemote [GOOD] >> TPersQueueMirrorer::ValidStartStream >> Yq_1::CreateQuery_With_Idempotency [GOOD] >> Yq_1::CreateQuery_Without_Connection >> PrivateApi::GetTask [GOOD] >> PrivateApi::Nodes >> TGRpcStreamingTest::ClientNeverWrites [GOOD] >> ExternalBlobsMultipleChannels::WithNewColumnFamilyAndCompaction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceWriteImmediateOnShard [GOOD] Test command err: 2025-03-31T15:58:40.865341Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:58:40.866320Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:40.866518Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0015b4/r3tmp/tmpqxSx0h/pdisk_1.dat 2025-03-31T15:58:41.310187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:58:41.359000Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:41.398886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:41.399015Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:41.410576Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:58:41.508315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-5 |68.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |68.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |68.4%| [LD] {RESULT} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb >> DataShardSnapshots::MvccSnapshotReadWithLongPlanQueue [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts-UseSink |68.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |68.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |68.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |68.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> Yq_1::Basic [GOOD] >> Yq_1::Basic_EmptyList ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::UpsertLostPrepareArbiterRestart [GOOD] Test command err: 2025-03-31T15:57:54.857633Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:57:54.857909Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:57:54.858106Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001208/r3tmp/tmpwOUJLz/pdisk_1.dat 2025-03-31T15:57:55.286879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:57:55.345154Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:57:55.392583Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:57:55.392724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:57:55.404413Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:57:55.496918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:57:55.582325Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-31T15:57:55.583887Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-31T15:57:55.584458Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T15:57:55.584805Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:57:55.645881Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T15:57:55.646803Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:57:55.646962Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T15:57:55.649608Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T15:57:55.649696Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T15:57:55.649797Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T15:57:55.650440Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T15:57:55.650609Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T15:57:55.650726Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T15:57:55.663655Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T15:57:55.716491Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T15:57:55.716712Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T15:57:55.716857Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T15:57:55.716895Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T15:57:55.716935Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T15:57:55.716974Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T15:57:55.717213Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T15:57:55.717266Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T15:57:55.717533Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T15:57:55.717621Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T15:57:55.717758Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T15:57:55.717804Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T15:57:55.717873Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T15:57:55.717913Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T15:57:55.717999Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T15:57:55.719656Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T15:57:55.719755Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T15:57:55.719931Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:57:55.719976Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:57:55.720029Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T15:57:55.720136Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-31T15:57:55.720178Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T15:57:55.720318Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T15:57:55.720531Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T15:57:55.720624Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T15:57:55.720740Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T15:57:55.720797Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T15:57:55.720847Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T15:57:55.720886Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T15:57:55.720922Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T15:57:55.721247Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T15:57:55.721292Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T15:57:55.721326Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T15:57:55.721359Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T15:57:55.721407Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T15:57:55.721440Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T15:57:55.721473Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T15:57:55.721517Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T15:57:55.721546Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T15:57:55.723465Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T15:57:55.723524Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T15:57:55.738749Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T15:57:55.738828Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T15:57:55.738876Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T15:57:55.738931Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-31T15:57:55.739006Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T15:57:55.914026Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:57:55.914099Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:57:55.914145Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T15:57:55.914659Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-31T15:57:55.914702Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T15:57:55.914827Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T15:57:55.914872Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-31T15:57:55.914918Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-31T15:57:55.914960Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-31T15:57:55.928167Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T15:57:55.928275Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T15:57:55.929001Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T15:57:55.929047Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T15:57:55.929109Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T15:57:5 ... chemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-03-31T15:58:42.263101Z node 6 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[6:1003:2819], 1001} after executionsCount# 1 2025-03-31T15:58:42.263148Z node 6 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[6:1003:2819], 1001} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-31T15:58:42.263249Z node 6 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[6:1003:2819], 1001} finished in read 2025-03-31T15:58:42.263304Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-03-31T15:58:42.263328Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit ExecuteRead 2025-03-31T15:58:42.263351Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037889 to execution unit CompletedOperations 2025-03-31T15:58:42.263378Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037889 on unit CompletedOperations 2025-03-31T15:58:42.263421Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-03-31T15:58:42.263441Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit CompletedOperations 2025-03-31T15:58:42.263465Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037889 has finished 2025-03-31T15:58:42.263492Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-03-31T15:58:42.263607Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-03-31T15:58:42.264458Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:1009:2825], Recipient [6:718:2597]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:58:42.264501Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:58:42.264537Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [6:1008:2824], serverId# [6:1009:2825], sessionId# [0:0:0] 2025-03-31T15:58:42.264662Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [6:1007:2823], Recipient [6:718:2597]: NKikimrTxDataShard.TEvGetInfoRequest 2025-03-31T15:58:42.265387Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:1012:2828], Recipient [6:718:2597]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:58:42.265423Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:58:42.265477Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [6:1011:2827], serverId# [6:1012:2828], sessionId# [0:0:0] 2025-03-31T15:58:42.265670Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [6:1010:2826], Recipient [6:718:2597]: NKikimrTxDataShard.TEvRead ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-03-31T15:58:42.265769Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 0 2025-03-31T15:58:42.265806Z node 6 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037890 CompleteEdge# v1001/1000001 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-03-31T15:58:42.265842Z node 6 :TX_DATASHARD TRACE: 72075186224037890 changed HEAD read to non-repeatable v4000/18446744073709551615 2025-03-31T15:58:42.265906Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037890 on unit CheckRead 2025-03-31T15:58:42.265978Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037890 is Executed 2025-03-31T15:58:42.266002Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037890 executing on unit CheckRead 2025-03-31T15:58:42.266025Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037890 to execution unit BuildAndWaitDependencies 2025-03-31T15:58:42.266067Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037890 on unit BuildAndWaitDependencies 2025-03-31T15:58:42.266116Z node 6 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037890 2025-03-31T15:58:42.266150Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037890 is Executed 2025-03-31T15:58:42.266173Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037890 executing on unit BuildAndWaitDependencies 2025-03-31T15:58:42.266196Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037890 to execution unit ExecuteRead 2025-03-31T15:58:42.266218Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037890 on unit ExecuteRead 2025-03-31T15:58:42.266294Z node 6 :TX_DATASHARD TRACE: 72075186224037890 Execute read# 1, request: { ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-03-31T15:58:42.266448Z node 6 :TX_DATASHARD TRACE: 72075186224037890 Complete read# {[6:1010:2826], 1002} after executionsCount# 1 2025-03-31T15:58:42.266488Z node 6 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[6:1010:2826], 1002} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-31T15:58:42.266578Z node 6 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[6:1010:2826], 1002} finished in read 2025-03-31T15:58:42.266622Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037890 is Executed 2025-03-31T15:58:42.266647Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037890 executing on unit ExecuteRead 2025-03-31T15:58:42.266693Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037890 to execution unit CompletedOperations 2025-03-31T15:58:42.266720Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037890 on unit CompletedOperations 2025-03-31T15:58:42.266765Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037890 is Executed 2025-03-31T15:58:42.266787Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037890 executing on unit CompletedOperations 2025-03-31T15:58:42.266813Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037890 has finished 2025-03-31T15:58:42.266839Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2025-03-31T15:58:42.266924Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2025-03-31T15:58:42.267736Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:1016:2832], Recipient [6:715:2595]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:58:42.267780Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:58:42.267815Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [6:1015:2831], serverId# [6:1016:2832], sessionId# [0:0:0] 2025-03-31T15:58:42.267915Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [6:1014:2830], Recipient [6:715:2595]: NKikimrTxDataShard.TEvGetInfoRequest 2025-03-31T15:58:42.268641Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:1019:2835], Recipient [6:715:2595]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:58:42.268679Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:58:42.268710Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [6:1018:2834], serverId# [6:1019:2835], sessionId# [0:0:0] 2025-03-31T15:58:42.268857Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [6:1017:2833], Recipient [6:715:2595]: NKikimrTxDataShard.TEvRead ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-03-31T15:58:42.268974Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037891, FollowerId 0 2025-03-31T15:58:42.269016Z node 6 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037891 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-03-31T15:58:42.269047Z node 6 :TX_DATASHARD TRACE: 72075186224037891 changed HEAD read to non-repeatable v4000/18446744073709551615 2025-03-31T15:58:42.269093Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037891 on unit CheckRead 2025-03-31T15:58:42.269157Z node 6 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037891 is Executed 2025-03-31T15:58:42.269200Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037891 executing on unit CheckRead 2025-03-31T15:58:42.269227Z node 6 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037891 to execution unit BuildAndWaitDependencies 2025-03-31T15:58:42.269251Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037891 on unit BuildAndWaitDependencies 2025-03-31T15:58:42.269297Z node 6 :TX_DATASHARD TRACE: Activated operation [0:2] at 72075186224037891 2025-03-31T15:58:42.269329Z node 6 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037891 is Executed 2025-03-31T15:58:42.269351Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037891 executing on unit BuildAndWaitDependencies 2025-03-31T15:58:42.269377Z node 6 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037891 to execution unit ExecuteRead 2025-03-31T15:58:42.269399Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037891 on unit ExecuteRead 2025-03-31T15:58:42.269475Z node 6 :TX_DATASHARD TRACE: 72075186224037891 Execute read# 1, request: { ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-03-31T15:58:42.269622Z node 6 :TX_DATASHARD TRACE: 72075186224037891 Complete read# {[6:1017:2833], 1003} after executionsCount# 1 2025-03-31T15:58:42.269679Z node 6 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[6:1017:2833], 1003} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-31T15:58:42.269737Z node 6 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[6:1017:2833], 1003} finished in read 2025-03-31T15:58:42.269781Z node 6 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037891 is Executed 2025-03-31T15:58:42.269804Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037891 executing on unit ExecuteRead 2025-03-31T15:58:42.269826Z node 6 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037891 to execution unit CompletedOperations 2025-03-31T15:58:42.269849Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037891 on unit CompletedOperations 2025-03-31T15:58:42.269891Z node 6 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037891 is Executed 2025-03-31T15:58:42.269915Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037891 executing on unit CompletedOperations 2025-03-31T15:58:42.269938Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:2] at 72075186224037891 has finished 2025-03-31T15:58:42.269964Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037891 2025-03-31T15:58:42.270044Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037891 |68.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> DataShardSnapshots::LockedWriteDistributedCommitAborted+UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitAborted-UseSink |68.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TDataShardTrace::TestTraceDistributedUpsert-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ClientNeverWrites [GOOD] Test command err: 2025-03-31T15:58:40.639787Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003697779876770:2274];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:40.639844Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00142b/r3tmp/tmp4zZDuL/pdisk_1.dat 2025-03-31T15:58:41.694951Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:41.754587Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:41.761293Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:41.761452Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:41.792202Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:58:42.127506Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream accepted Name# Session ok# true peer# ipv6:[::1]:36724 2025-03-31T15:58:42.128054Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade attach Name# Session actor# [1:7488003706369811689:2267] peer# ipv6:[::1]:36724 2025-03-31T15:58:42.128084Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade read Name# Session peer# ipv6:[::1]:36724 2025-03-31T15:58:42.128179Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade write Name# Session data# peer# ipv6:[::1]:36724 2025-03-31T15:58:42.128491Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade finish Name# Session peer# ipv6:[::1]:36724 grpc status# (0) message# 2025-03-31T15:58:42.129168Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] write finished Name# Session ok# true peer# ipv6:[::1]:36724 2025-03-31T15:58:42.129499Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] read finished Name# Session ok# false data# peer# ipv6:[::1]:36724 2025-03-31T15:58:42.129537Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream done notification Name# Session ok# true peer# ipv6:[::1]:36724 2025-03-31T15:58:42.129563Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream finished Name# Session ok# true peer# ipv6:[::1]:36724 grpc status# (0) message# 2025-03-31T15:58:42.129637Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] deregistering request Name# Session peer# ipv6:[::1]:36724 (finish done) 2025-03-31T15:58:42.129864Z node 1 :GRPC_SERVER DEBUG: Received TEvWriteFinished, success = 1 2025-03-31T15:58:42.129887Z node 1 :GRPC_SERVER DEBUG: Received TEvReadFinished, success = 0 2025-03-31T15:58:42.129911Z node 1 :GRPC_SERVER DEBUG: Received TEvNotifiedWhenDone ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns [GOOD] Test command err: 2025-03-31T15:58:34.196674Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:58:34.196989Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:34.197251Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00168e/r3tmp/tmpGp5dLB/pdisk_1.dat 2025-03-31T15:58:34.610551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:58:34.714006Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:34.760964Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:34.761189Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:34.774848Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:58:34.867554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:58:35.233630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:742:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:35.233797Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:752:2628], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:35.233881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:35.239972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T15:58:35.433730Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:756:2631], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T15:58:35.511696Z node 1 :TX_PROXY ERROR: Actor# [1:830:2674] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:58:35.873748Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpet56z7c62e7dxj51mj7p1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0YTkwMTctN2ZlYzE4MTMtZmNjNzA2ZTUtNGMyZjczNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:35.958357Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpet5vgdpnqz8mpbnyx4e2a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjcyZTZjZjUtYmI0M2ZlOWQtZmZjZWU1ZjMtNWEzMDM0Y2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:36.033566Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpet5xx58r404y5s6zam2nx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDRlNmFlMmEtYTJjYmI1OTEtMTIwMzA1ZjItNjk2ZTM2MTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:36.108763Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqpet608c7kqe51941tnf69m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTBjMWEwODQtMzdkNzhkNGMtZTQ1NDkxMzEtYzA2OGI0NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:36.185540Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqpet62k3v7wnaksqs8x9rwn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmZlYzM5NmQtN2EwZWYyMWMtNzRjMGU0MzMtZjBiNDdmYzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:36.260589Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqpet65064dwfqh0k3vgmmas, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjgwZjdhMTYtYTY0ZjcyNmEtMjk0YzE3LTYxOTFhYjQ5, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:36.334634Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqpet67bdmtqhn3fzdze8ey1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDk0OWUzZTItMTgwN2JkNWYtNzM0NWM1MjQtYzc3YWZkYjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:36.408096Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jqpet69n8y5yaqeqddh5vmdf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWIyZjY0OS00ODJmYmFjOC0zZTFjM2FmOC01ZDc4ZGY3MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:36.481723Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jqpet6bz9b7vcp6skbv9y89r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWVjOTU2ODQtN2RlMWQ0YTItMTczZGUwOTYtZWU1M2NhMWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:36.557057Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jqpet6e84qv8bnc8rxb8kwp4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWU4Yzk3Ni0yMjRiMzE0OS02NThhZTMzNy1iOTc1MDYyYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:36.631251Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jqpet6gm03gnjm7zsdf8nhyz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzBhYzI0MTYtYmZkMGRjMC0yMjk4YmFlYi1iYzExMzRkZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:36.702862Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jqpet6jy4dr25yxd65yezpfn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmNhYmNmMjEtNTAyNjY3MWYtNzQ2MDYwZjgtYTZmYmMxOWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:36.775997Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jqpet6n6cjc9t33394cg5yzv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTBjZDIwOS05YTlkZmMwZi1iMTVmZTEyNi1jNzdkMjE5Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:36.849298Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jqpet6qef4y2wtrtajt2kdy0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDNlNzQ4ZWItOGRmODcyZGUtNGZiMmRiY2ItNGE3ZDUxYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:36.922016Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jqpet6sqekywa8xmh3y23hjc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmZkNzYyNDMtMWRkYzQ2ODMtM2E4ZGJmNDYtODdmM2I4ZTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:36.995654Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jqpet6w1awwx0t25crzqdh55, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmZmNjY5N2YtOGE4NjgyMDUtZGQ4ZTc5MjktMTljMDBlOWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:37.068965Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jqpet6ya6cjh1aq6cp0stze1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTI1MTM3NDItZTlkYmYwOTMtNjNjOGFhMGYtNjJiYWYzNWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:37.141518Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jqpet70k23vgaw2m52a3xv4g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTVmMTNhZGMtZGZhZGJlZDUtMmI1MzI0LWVmNThjODgz, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:37.212290Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jqpet72w256vaz74yybefa02, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTVkZDhmMDQtNmM5ZWI5ZmYtNDU0NGJkY2QtMzY4YTE3M2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:37.283308Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jqpet754dzaqagt0s2hqmshy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjY3NDgzNTAtOWMwZmFkY2QtZDAxNWU0MTktMzFhZDNhMTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:37.357895Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jqpet77aee9aj71t46seyz01, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODFhNDIzMzgtOGI2MGMzYzMtYTlmZjNmNDMtMmI0Y2Q4NWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:37.439985Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jqpet79n0f3p6b1nbevsnzna, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjdjM2NiZjktYjU2MWVkN2ItODQyZTA3YmYtNzk4MzFlNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:37.521807Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jqpet7c7b3k7kkv0pgzr8hgk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWIzMWU3NC02ZTg2YjliYy0zNDlkMGIwZS0zNjdjM2JkOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:37.589820Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jqpet7es5hst7d6rqgmq1ds6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjJkZjk0NzAtM2Q5ZjA5ZTEtODhmMDQwM2MtYWI4MDJiNDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:37.667630Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jqpet7gx86ac5r63feyh5d2g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjFhNTIxOTctNjA2MjM5ZTUtYTc0YjRiNS04OWI1OWYzNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:37.758970Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jqpet7ka2wcr7r3tbgnzzwq7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjI0OTMwZjItZjhhNTY0OTctNjAyYTRjZjQtNDc4M2M2ZjY=, Cu ... 40Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715725. Ctx: { TraceId: 01jqpetbke139qanw1hyjmcfxe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWUyZmU3MTctMzkxNWQzNi01MWYxMzYxZC02YTQxMzFkNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:41.935049Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715726. Ctx: { TraceId: 01jqpetbnw8bpy27trpmf0fer5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDY4YmNmYTItM2I5OTYxNzUtNzViMGNlOWMtNzJiOGNkYTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:42.055979Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715727. Ctx: { TraceId: 01jqpetbrp2av38g5ch55b7s9g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTI3ZDk5MzgtNGQ5MjVjNjItYjg2OTAxOTMtYzVhNDE1Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:42.171915Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01jqpetbwx80qse7sk41pmybq5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTllNzQ4NTMtYzNjNjRhZDYtNGVlMjA3MDgtMzRhZGE0MGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:42.237714Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715729. Ctx: { TraceId: 01jqpetc022rrb2e3tapnm2b0n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGIwMzhkMDktYzdjNTdiMTktOTVjNzZiNS0xOWRhNjc1Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:42.309483Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715730. Ctx: { TraceId: 01jqpetc243cvkc8bmtjs9491c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjE4YjQ4MWItNTkyNzI5ZTItODJjNDIwOWYtMzRmZWViMWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:42.404805Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715731. Ctx: { TraceId: 01jqpetc4bdge0vnf5bjp5ep6z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGJlNmNjNTctZDg3NWM5Y2EtZWU1ZmYyMDUtYWYyNjhhYTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:42.468647Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715732. Ctx: { TraceId: 01jqpetc7babgnr3mqp21v2ndw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDYyMTBhY2YtNGJlNTExYjktMmFlZTY1ODItNDUyMmMzZmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:42.531826Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715733. Ctx: { TraceId: 01jqpetc9b2gxsftncsvjfxs3f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2UxMjA4MWUtMjY5OTEyY2YtNzk5Njg2OTgtMWQ2YjA4OGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:42.593845Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715734. Ctx: { TraceId: 01jqpetcba11mt09rqk0mcwgx2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDIwYTZhMzMtYWE5YzQ1MmEtN2U2YmNmNTEtNDUwZmZiZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:42.664027Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715735. Ctx: { TraceId: 01jqpetcd9dgwyb4yjwmhv0m6c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmI5NTc2MDMtZGYxYWUwZDctYTc0MWE3OGQtZDU1YmZjNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:42.730345Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715736. Ctx: { TraceId: 01jqpetcfe49qskq4rpvmnmehh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTM2MjI1NmYtZWI2MWZjZWUtOTRhMzdhNy1iMzIwNTA2OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:42.797215Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715737. Ctx: { TraceId: 01jqpetchh3czanpbnz9zs3ms6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTZjZmJjM2EtMzZkNzk0NGEtMTA0NTZhNWQtNTYwNmZhZjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:42.869769Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715738. Ctx: { TraceId: 01jqpetckm36tcrw2xnm9mges9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGQyY2Q2YjUtYjk1MzYxYjctNTlhZmUyMTctNjczZWY4NDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:42.955459Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715739. Ctx: { TraceId: 01jqpetcnx37rmmg20grbv4vmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzhhYThkZjAtNDY5MDY3MGYtMThkNzFlZDAtMzc0ZjA0YjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:43.111238Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715740. Ctx: { TraceId: 01jqpetcv2c6f6drcgvx612adz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGM5YTRhNTUtZTcwZDcwMGYtOGMwMmIzMzAtODA4YzM0NzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:43.181141Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715741. Ctx: { TraceId: 01jqpetcxe1cq4cdewk9s10tn0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2MxMWRhYzctOTM1MzExYmEtMTFiZmY2MWYtZGEzZmZiZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:43.241337Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715742. Ctx: { TraceId: 01jqpetczk45y3e0ta9t4gz15c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjRlYTRjN2EtYzFiODVkNGMtZDM5OGM2ZTAtM2RjYTQ4YmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:43.299946Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715743. Ctx: { TraceId: 01jqpetd1g2dd6g5dz85t6swq4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTY1OWRmZDYtZDE5NGM2YzEtY2M5MjRmZjctNGRiMjgzYzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:43.369084Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715744. Ctx: { TraceId: 01jqpetd3b6cydy0zwhgr9dvsg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzNjOGUzNTMtZDRiN2NkODUtNGZhODU3MmQtMjUwMzliZDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:43.448464Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715745. Ctx: { TraceId: 01jqpetd5m1gzqkbds1mkzs66c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yzc2M2Y1M2EtM2IwYTZmMDMtZjk1ZDNiNzktMTgxMTViMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:43.521978Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715746. Ctx: { TraceId: 01jqpetd7z746rekx6ky1wsgpk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjJkZmY1MGItYWIyZmYyZjAtZDY4MDc5OTMtOWYzMjA4OWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:43.621273Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715747. Ctx: { TraceId: 01jqpetda94xhpma5ywakr90qh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjA0NDcxNDgtYmE0NjI0YzgtZDZjMTg5NGMtMzQxMjcwZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:43.689194Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715748. Ctx: { TraceId: 01jqpetddc560n813zhjbbtv97, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTZkN2JmMjQtYjM1MDU0MTktNzg2YTc0MmQtNWJhMGQzYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:43.759838Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715749. Ctx: { TraceId: 01jqpetdfjfa0xrgxga2cy6cpm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDY5MGU2MTAtNTRjYmUyZjItODEzNThlMzAtODExYTZkMjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:43.856237Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715750. Ctx: { TraceId: 01jqpetdhq99cbfkt3xns1bfmw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDQ0MWU3Y2EtYzFjZjFlYTQtZjAzZjcwY2EtNGZkOGY5NjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:43.925754Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715751. Ctx: { TraceId: 01jqpetdmq85hezxnrtmq0tj7t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzE5NzRhNGQtNGZlY2RiYzYtOGI0NmZjZjctMTkwY2RkMGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:44.004270Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715752. Ctx: { TraceId: 01jqpetdpw04bg3yxsvkd0qhv7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTU5ODFhMWQtNzg4MWY0YzYtZDk5MWFkZjgtZGIzMzVkMGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:44.082619Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715753. Ctx: { TraceId: 01jqpetdsc0959armt8ktqrkwj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Nzg4MzcwM2QtODhhNzI0ZTItZWNkNTAwNDUtMTk0NjFmNDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:44.151924Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715754. Ctx: { TraceId: 01jqpetdvt33hc1nyrvpt8sdcd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTg4MzIwNmYtZDJhMTg2NGItMjNkNTczM2ItZTI5NjA3OTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:44.223989Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715755. Ctx: { TraceId: 01jqpetdxzeycfjyb9r1rjf9r6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTBkNmY2YTQtZjgyZDlhNWQtOGM3NjBlYzctNzViZTIyMjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:44.297716Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715756. Ctx: { TraceId: 01jqpete070aakb7rvjsr3spzd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmRjNmJmYzktMzFkYzg3ODEtODZhYmYzZmItOTMyMTgyYjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:44.369338Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715757. Ctx: { TraceId: 01jqpete2h3zknbej6kp9t42f5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDFjMGQwNGItY2YwYWUxODgtYWI0NjFmZGEtYzk3NjIwY2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:44.439212Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715758. Ctx: { TraceId: 01jqpete4r46s4pyg4mbhtbh8n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODlkMGRhMTEtNDY3NGM0ODgtNDIwYjZiOC01NmMwYTRhZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:44.507968Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715759. Ctx: { TraceId: 01jqpete6y9ggrqhp7bbdrgqv3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTk3ZThjM2MtYWQxOTQyODQtMjQ2ODUwOTctZGNkNzk2OTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:44.674930Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715760. Ctx: { TraceId: 01jqpete9e11yc8dfbk67jk167, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjYwNzk5OGMtZTM0MGU3NTktODg3MWYxNDctYWVmZjIwNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TS3WrapperTests::MultipartUpload >> DataShardSnapshots::MvccSnapshotLockedWrites-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesRestart+UseSink >> TS3WrapperTests::MultipartUpload [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::WithNewColumnFamilyAndCompaction [GOOD] Test command err: 2025-03-31T15:58:30.265272Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:58:30.265557Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:30.265774Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0016c6/r3tmp/tmp7snt8d/pdisk_1.dat 2025-03-31T15:58:30.837239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:58:30.945222Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:30.997147Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:30.997304Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:31.012586Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:58:31.106519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:58:31.742228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 100:0, at schemeshard: 72057594046644480 2025-03-31T15:58:32.153235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:817:2675], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:32.153383Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:827:2680], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:32.153491Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:32.204462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T15:58:32.477061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:831:2683], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T15:58:32.550431Z node 1 :TX_PROXY ERROR: Actor# [1:890:2723] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:58:33.672133Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpet26p6jyd6ezz6h7x977q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzBmODBjNjEtZGQ1NTQ4NGYtNDg4MDNlZGEtNzFlOTY1YTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:33.845155Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpet3qydzrh2a4h678tm85t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjQ3MDFhYzctZTUzOTRlYmMtYWI4MWZmYzQtZjllMjUyOGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:33.929970Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpet3vw6mjdnhdx3d9bfm05, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGE4MjEzYTAtMTU3OGI5M2UtZmEwOTczYjItMzY2NzE3ODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:34.014544Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqpet3yh2m9r98ejr58wa8gj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWZlZGJhNjUtYTcwMDA0Ny1hY2U3Y2FkNy0zZDQ4NjhlMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:34.166595Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqpet41k66td52cqv8xtt901, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzdmNWMwZWYtOWRmNDg4MTUtZDBmYTFjYmEtYzAwZTk0MGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:34.241714Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqpet45y1eejqt9pve9bmcy9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjFmZTU3ZjctZDdhOWQyM2QtNTZhM2VjMjUtMjljNWMwYWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:34.328910Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqpet48d08dfqkyc268h9kgq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGIwMTVhNTktODFjOTk0NDItY2NmOGEyY2YtOWY3M2NjMWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:34.451218Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jqpet4bj1dkxry2brnfr8h9s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGIyYWJjMzMtNTliMGE5ZDQtOTk0MTdjYjAtZTMyZWNlOTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:34.620755Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jqpet4et1eycdcpbfp4fqdh7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjI0MDVlNi1jYTYwODY4MS05MTI5MjRmNC1mM2VkZjk5Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:34.845067Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jqpet4mn7yhyzpgke3dcjdpv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDU4ZmJmYTQtNDc2N2E2ZWItYzgzMjk2YzQtOGRlMWU3MDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:34.940591Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jqpet4v7cjxqa78r8qgsfgmz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjY1NDQxMjQtYTFmZGY0MTUtOWJjODAwY2ItYmM5N2U1YTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:35.014855Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jqpet4y43qyq510rr4jke81w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTAwZmM2OWEtY2E0YTNhYTQtODU0ZDE5ZjMtNGQwZTkxODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:35.165124Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jqpet50vfecxznm4cw7ahrpr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjkyNTJhNGQtMTNjYmZlYWYtZWQ1NDc0Ni1iNjBhYzgzMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:35.246223Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jqpet55c0whwt321vftar7zg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2U2NTQ3ZjEtZGY4N2ExZGQtYzM4ZDNmYzMtZmU4ZTA3YTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:35.314272Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jqpet57r55e7r8mptj91th0k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDM2MzZjZTQtMTk3MzNhZWItMjMwZGE2NmItMzBiN2Y1Zjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:35.441389Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jqpet59v7xjtj1zq225f3e6r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGNkMDI4MDMtYzJlODFjM2EtMTk5NTdkOTAtYWFiY2UzNWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:35.575956Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jqpet5dsbgjnjy4kd3mt0w59, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTY3YmQzNGQtY2Q3NGVjNjUtOWRhY2JlZTUtMThiNDc0ZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:35.730257Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jqpet5hy0b1bq3krvngs9tzt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWE1MzdjOWMtZWZlYWNmNjYtNTk3MjAwNTktZDdlMjhkMjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:35.876441Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jqpet5ps2y74ww7f6srd1q4f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTczOWM1NGQtZWZjOThhOWMtN2UxMWRiZTUtNGYxNTI1ZTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:35.986069Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jqpet5vs8k9w2fn12t1y89xg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDdlNGNkZjEtZmRhM2U4MjgtN2I3YmUzYTctYTU3NjI5MDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:36.098043Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jqpet5yz80pvjhp5gqe2z5k1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTVhNWQxOGQtYTRkMzMxYWItM2ZjNWUxZWEtZDk4OGU3Yjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:36.172594Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jqpet62b81jnw882c7vrm0kg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDk0MmViMTYtZGNkZDM5NDAtMjVjY2NjMzEtYmE0NTM3NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:36.242713Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jqpet64kcyh01g4s9dp6cmj4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2E0ZTc5OTgtYzMwOTVjNmMtNWRiYmU4MGUtODNlZTUyYzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:36.321529Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jqpet67d0nngjceg282x1kz3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTRlN2EyN2QtMTI1MzNmODAtOTBjNWZlNjMtNzdhNTc0NWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:36.395914Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jqpet699brbv8wzsmq50vv2k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTA1NzEwMzgtYmYzOGQ3ZTQtMTM3NmNiN2ItZjZjNjhmZTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:36.49259 ... 807543Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715725. Ctx: { TraceId: 01jqpet9m87wqey20x9ns0mgnr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTBiZTJkYjgtYjFlNTBjMzctMjFjMTJkNjAtOTMxNzE2MDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:39.869848Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715726. Ctx: { TraceId: 01jqpet9p7ehzkc1x5rdqyerd1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjkzZjhjNjgtYjcxOGNiMjItNTM0YmU4MWQtMTUxMWU1NmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:39.977445Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715727. Ctx: { TraceId: 01jqpet9r512yr28wy7dcwdyw5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTQzNDNmOTEtMTc0YzBjYmUtNDk0NzY2ZmItMzhjNmExYTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:40.040457Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01jqpet9vhb8h9jnsaaj21g7nv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmVhMWRhZjItNjNjODAyNmEtOWEwNzFhMjMtZGI4ZjkxOTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:40.103720Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715729. Ctx: { TraceId: 01jqpet9xgcg4tpb7b93d76g0d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGYyNWMzODMtYzQzZDI5YzUtZmY0ZDdkYjItYTllY2Y3ZmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:40.166527Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715730. Ctx: { TraceId: 01jqpet9zfeqs1a4d45e66w1mf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGIwYzA4ZjMtY2U2MWNlMWEtM2MyYWRkNjctYTc5YzFmNmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:40.230464Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715731. Ctx: { TraceId: 01jqpeta1e1cf09wp4sbd32mas, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTQ4MGQwMy05YzNmMTlkMy1hMzY0OGY5MS1lMzUzNzAyMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:40.294986Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715732. Ctx: { TraceId: 01jqpeta3f4zknyygek67a7avd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDZlNDRiODktY2IxNGMwOWUtN2I4NDQ0NWItNWVlN2ZjZWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:40.360158Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715733. Ctx: { TraceId: 01jqpeta5f1ts1k2k38v8rpkk9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmI2YTc2ZmEtZTJlZDQ1M2UtN2VkNjg5ZmEtN2JkY2I5NDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:40.421943Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715734. Ctx: { TraceId: 01jqpeta7g2m7ee0am1t4ak0m8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzhkZTUzYmItODA0NWY3NzctM2E2MGQzZTMtODJmNDljMmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:40.483472Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715735. Ctx: { TraceId: 01jqpeta9db7fkpq3tbmvfcrw5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQ2ZGQxMjQtZTk2NGRlMGItMjBjODQwZmYtMjk4OGQwOWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:40.545729Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715736. Ctx: { TraceId: 01jqpetabb7hy7d3gg27q19a58, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzdiMWVlNTQtNjgzNmM2N2ItZDVkNTQ5YmEtMzRhOWJiNTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:40.607040Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715737. Ctx: { TraceId: 01jqpetad90zhfhm9cxf6s5egb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjkyOWRkMTUtZjU2ZTNjYTMtMTI1NTcyMzAtMzljNThjMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:40.669914Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715738. Ctx: { TraceId: 01jqpetaf643x4tgs41mhvppht, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2Q0NTFlZjUtODFiOGQ5MWItZTczMTgzNzEtMjM5OWUyOTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:40.755921Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715739. Ctx: { TraceId: 01jqpetahw58mv00tf9st52j69, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWViNmY0ODAtZmE1NzVkZDEtZWNiNmFiMWUtYTBjY2ZjMTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:40.816635Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715740. Ctx: { TraceId: 01jqpetakv83fjh3rn2q8c32cc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWE0ZjU1NTgtNTA2ZjE0Y2MtZjhmMTc1MjQtYjcyMjQwZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:40.878026Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715741. Ctx: { TraceId: 01jqpetanr1tjtbhyd9af2dyz0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzQ4NDAwOTctYzc1M2VkM2YtMjM4NWFkMjQtYzI0MWE3MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:40.938782Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715742. Ctx: { TraceId: 01jqpetaqnewcexbpb6yf68sbz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2EzNzMxZWYtZWM4OTY1NTktNGU4ZmNlMGItNDgyM2FmODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:41.000450Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715743. Ctx: { TraceId: 01jqpetasj32nya9z98gtn0ddw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Zjk5YTNjZWMtNGY1N2I2MmItNDA2M2IzNTYtNTI4ZGFkZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:41.062233Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715744. Ctx: { TraceId: 01jqpetavgayaeace8q972kc49, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTExODZmOTctYmExMTRmNWItZTNlNmNmMTQtZjI5MDIzNTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:41.124029Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715745. Ctx: { TraceId: 01jqpetaxe4q0v3wgwsekxcpey, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTJhNjhlNjQtODBmYjBiNzQtMTA5ZDkxODUtNWZlYWVmNWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:41.188313Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715746. Ctx: { TraceId: 01jqpetazcckaynq583t78beck, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTBlMzk4ZTMtMTVjMjQ4ZTQtN2E4ZWJjZGEtYzQ4ZmJmZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:41.250324Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715747. Ctx: { TraceId: 01jqpetb1c3cbghek4x41a9cxt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzAyM2UwZjMtYmU2ZmNlYzQtNDg1OTc0YTgtOGQzYzBhMmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:41.309935Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715748. Ctx: { TraceId: 01jqpetb3ac04m088hkt12jp37, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzhhNTRkOTItOGM1MTFhNTctNzgwODRjZGUtNWEyNDcxYzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:41.369777Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715749. Ctx: { TraceId: 01jqpetb551ph9ctyrd387f2w9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTkxNzc2NTMtY2VhYmJjZTMtYzg0NDkwNDgtNDc1MjdkMTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:41.451473Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715750. Ctx: { TraceId: 01jqpetb71fbmpm85r9e3jkfky, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODgxMzBhNzMtNGJlYzVhNzAtMmFjMjMxODktNTQ2NGM1NDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:41.514647Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715751. Ctx: { TraceId: 01jqpetb9ke3n6rjdxw2pte1yc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjRlOWNhZGYtY2ZjZWY1MTYtN2ZjMmM1M2MtODliOTI0Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:41.698325Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715752. Ctx: { TraceId: 01jqpetbbj8mz1h8fh1w9p9843, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmQzY2RkYTItM2FmZWM3MzAtOTM0MmIwNzYtZGExOTE4NDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:41.967954Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715753. Ctx: { TraceId: 01jqpetbj982m534baw3zqs09g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWMxN2I4MDgtMzQ1NTE3YjMtMWYyYThmOTQtNzNiNmM0OGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:42.165690Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715754. Ctx: { TraceId: 01jqpetbt99he0fsjskjmdkfcr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzM0NzMyYzQtZmRhYzE0NTctZDM2YmI3MS1jOGRmZjBhMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:42.300749Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715755. Ctx: { TraceId: 01jqpetc0ec19sm34tq20pw5ay, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmRjMTk4NTgtMTUwYmJjMjMtNDQ4NjM2MWEtZGZmM2EyODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:42.373545Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715756. Ctx: { TraceId: 01jqpetc4de4m4htdmfs7c3w2b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGI5MzU4YjktMTRkMDBiNGUtZmEyYTgwMGEtYWFkZDk4NDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:42.436437Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715757. Ctx: { TraceId: 01jqpetc6d5jghcv15pzvhadd0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTZkZTViYjktNDI4NmVhNzAtMmU1NmRjMjAtM2Y5OWI0NDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:42.554335Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715758. Ctx: { TraceId: 01jqpetc8jegjrw7m2j6cds3pm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDMyMzRmNi01ZDUxZTZmMS1lZjUyZjM5ZC04YTk2OGMx, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:42.656389Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715759. Ctx: { TraceId: 01jqpetcc6czk2kn0ppxr7nmvc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Zjg0N2M2NzctNmVjNDNiYi0zZTcwOWM0OC1hNWRjNTNjYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:44.148484Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715760. Ctx: { TraceId: 01jqpetcw32x672chsa7n9sp9p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTNiZDM3MmYtY2IxMDY4MWEtN2JjOGI0NGEtYzc3OWQ1ZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> DataShardSnapshots::LockedWriteDistributedCommitSuccess+UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitSuccess-UseSink >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_AutoscaleAwareSDK [GOOD] >> KqpJoinOrder::TPCH9_100 |68.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |68.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |68.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat >> KqpJoin::CrossJoinCount ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedUpsert-UseSink [GOOD] Test command err: 2025-03-31T15:58:40.825855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:58:40.826129Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:40.826290Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0015c5/r3tmp/tmpZIyJ7x/pdisk_1.dat 2025-03-31T15:58:41.296649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:58:41.345288Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:41.388803Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:41.388938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:41.400378Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:58:41.501066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:58:43.874066Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:934:2776], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:43.874236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:945:2781], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:43.874340Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:43.908327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T15:58:43.984060Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-03-31T15:58:44.233021Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:948:2784], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T15:58:44.442830Z node 1 :TX_PROXY ERROR: Actor# [1:1017:2833] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:58:45.833868Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpetdmgez7gn42ht3xfzpra, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzJmMjhlZTItNjRlZDJjOTItYmVlMGUyMjYtYzNhMjhlZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Trace: (Session.query.QUERY_ACTION_EXECUTE -> [(CompileService -> [(CompileActor)]) , (LiteralExecuter) , (DataExecuter -> [(WaitForTableResolve) , (RunTasks) , (Datashard.Transaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendResult)]) , (Datashard.Transaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendResult)])])]) >> KqpIndexLookupJoin::Left+StreamLookup ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::MultipartUpload [GOOD] Test command err: 2025-03-31T15:58:46.876187Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 0DAF26EB-E9A0-4681-9D2B-29E904270653, request# CreateMultipartUpload { Bucket: TEST Key: key } REQUEST: POST /TEST/key?uploads HTTP/1.1 HEADERS: Host: localhost:20372 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 5EFE245B-197B-497C-A10B-3ADBFCBD78BA amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-storage-class: STANDARD S3_MOCK::HttpServeAction: 4 / /TEST/key / uploads= 2025-03-31T15:58:46.898986Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 0DAF26EB-E9A0-4681-9D2B-29E904270653, response# CreateMultipartUploadResult { Bucket: Key: TEST/key UploadId: 1 } 2025-03-31T15:58:46.900137Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 85E2E967-F8DB-4348-B258-9C66209FECB9, request# UploadPart { Bucket: TEST Key: key UploadId: 1 PartNumber: 1 } REQUEST: PUT /TEST/key?partNumber=1&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:20372 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 5F181E28-7A45-4339-B15B-F4A7807F9E45 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /TEST/key / partNumber=1&uploadId=1 / 4 2025-03-31T15:58:46.904141Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 85E2E967-F8DB-4348-B258-9C66209FECB9, response# UploadPartResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2025-03-31T15:58:46.904585Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 7FC4092D-E872-4810-834E-AF97A19EE7AD, request# CompleteMultipartUpload { Bucket: TEST Key: key UploadId: 1 MultipartUpload: { Parts: [841a2d689ad86bd1611447453c22c6fc] } } REQUEST: POST /TEST/key?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:20372 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 7B83BFF2-00D4-4031-8E6A-59B2D8266713 amz-sdk-request: attempt=1 content-length: 235 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /TEST/key / uploadId=1 2025-03-31T15:58:46.923966Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 7FC4092D-E872-4810-834E-AF97A19EE7AD, response# CompleteMultipartUploadResult { Bucket: Key: TEST/key ETag: 841a2d689ad86bd1611447453c22c6fc } 2025-03-31T15:58:46.924653Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 2E80A80B-0DA0-4CF2-881A-784C82A4267C, request# GetObject { Bucket: TEST Key: key Range: bytes=0-3 } REQUEST: GET /TEST/key HTTP/1.1 HEADERS: Host: localhost:20372 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: AF240FD7-4FA7-43E0-A8CD-865662C68C78 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-3 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key / 4 2025-03-31T15:58:46.948124Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 2E80A80B-0DA0-4CF2-881A-784C82A4267C, response# GetObjectResult { } >> TDataShardTrace::TestTraceDistributedSelect [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCDS64 >> KqpIndexLookupJoin::SimpleLeftSemiJoin-StreamLookup >> TDataShardTrace::TestTraceDistributedUpsert+UseSink [GOOD] >> DataShardWrite::DelayedVolatileTxAndEvWrite [GOOD] >> TDataShardTrace::TestTraceDistributedSelectViaReadActors [GOOD] >> TSchemeshardBackgroundCleaningTest::TempInTemp [GOOD] >> KqpJoin::AllowJoinsForComplexPredicates-StreamLookup >> DataShardSnapshots::ShardRestartLockBrokenByConflict [GOOD] >> DataShardSnapshots::ShardRestartWholeShardLockBrokenByUpsert >> ExternalBlobsMultipleChannels::Simple [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedSelect [GOOD] Test command err: 2025-03-31T15:58:40.862826Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:58:40.863118Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:40.863309Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0015e2/r3tmp/tmpxG0mr6/pdisk_1.dat 2025-03-31T15:58:41.281386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:58:41.325910Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:41.372195Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:41.372330Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:41.384396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:58:41.491836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:58:43.869537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:934:2776], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:43.869698Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:945:2781], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:43.869818Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:43.903448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T15:58:43.984110Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-03-31T15:58:44.233959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:948:2784], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T15:58:44.469022Z node 1 :TX_PROXY ERROR: Actor# [1:1017:2833] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:58:45.789911Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpetdmecjs09mjsve023xb2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTdlZDE0N2UtYzFjMDk1NmEtNmUwMjNkM2MtZWUxMDQzYjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:46.009214Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpetfk9bx93nvcz9xne1nnf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2FiZjJlNWYtYjhjNDNjYjItMTk3OTk4ZTctODJmMGQ2ODQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:47.620032Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqpetg1jac9b7ms3df2ffpp2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWVkYjY4MzUtZGU4ZDMxMmUtODk2NWM4YWEtMTEzZWVhZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedUpsert+UseSink [GOOD] Test command err: 2025-03-31T15:58:44.236494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:58:44.236736Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:44.236906Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0015d0/r3tmp/tmpFOF9Uy/pdisk_1.dat 2025-03-31T15:58:44.699026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:58:44.764363Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:44.812774Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:44.812913Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:44.828341Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:58:44.920546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:58:47.138122Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:934:2776], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:47.138303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:945:2781], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:47.138406Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:47.144364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T15:58:47.170704Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-03-31T15:58:47.380547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:948:2784], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T15:58:47.517620Z node 1 :TX_PROXY ERROR: Actor# [1:1017:2833] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:58:48.003808Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpetgtz98kybjrymz87b155, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWU0NGFhNjItZjdiNjljZDctMjUxZDFjNWMtZDQxN2QwYjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Trace: (Session.query.QUERY_ACTION_EXECUTE -> [(CompileService -> [(CompileActor)]) , (DataExecuter -> [(WaitForTableResolve) , (ComputeActor -> [(ForwardWriteActor)]) , (RunTasks) , (Commit -> [(Datashard.WriteTransaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWriteResult)]) , (Datashard.WriteTransaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWriteResult)])])])]) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_AutoscaleAwareSDK [GOOD] Test command err: 2025-03-31T15:55:35.452721Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002904282769820:2116];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:35.453850Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001933/r3tmp/tmp5K0sfz/pdisk_1.dat 2025-03-31T15:55:36.336409Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T15:55:36.552341Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:55:36.618772Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:36.646619Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:55:36.646705Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:55:36.656455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61077, node 1 2025-03-31T15:55:36.831823Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/001933/r3tmp/yandexFUc0hl.tmp 2025-03-31T15:55:36.831849Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/001933/r3tmp/yandexFUc0hl.tmp 2025-03-31T15:55:36.832036Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/001933/r3tmp/yandexFUc0hl.tmp 2025-03-31T15:55:36.832127Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:55:36.913259Z INFO: TTestServer started on Port 9208 GrpcPort 61077 TClient is connected to server localhost:9208 PQClient connected to localhost:61077 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:55:37.278625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:55:37.325210Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:55:37.351586Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T15:55:37.356835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-31T15:55:37.538296Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-03-31T15:55:40.419269Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002904282769820:2116];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:40.419329Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:55:44.814631Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpemqpp68tv6bb7pfmrwe89", Request deadline has expired for 2.267523s seconds 2025-03-31T15:55:45.001208Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488002942937476264:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:45.011649Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:45.034890Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488002947232443589:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:45.090635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-31T15:55:45.309277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488002947232443591:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-31T15:55:45.443606Z node 1 :TX_PROXY ERROR: Actor# [1:7488002947232443647:2468] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:55:47.506704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:55:47.555209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:55:47.781815Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488002955822378272:2367], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T15:55:47.784010Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWRkNWNhZDUtNjBlYjcxZmItYjlkYThjYy0yYTg3YzA5NQ==, ActorId: [1:7488002942937476262:2348], ActorState: ExecuteState, TraceId: 01jqpemysdcvk61w3gw20wrw9n, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T15:55:47.785982Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T15:55:47.792468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7488002960117345869:2670] 2025-03-31T15:55:51.618409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:55:51.618437Z node 1 :IMPORT WARN: Table profiles were not loaded === CheckClustersList. Ok 2025-03-31T15:55:54.816249Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-03-31T15:55:54.922301Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-03-31T15:55:54.924116Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7488002985887149885:2804], Recipient [1:7488002908577737468:2185]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:55:54.924462Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:55:54.924804Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-31T15:55:54.924837Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7488002985887149881:2801], Recipient [1:7488002908577737468:2185]: {TEvModifySchemeTransaction txid# 281474976710672 TabletId# 72057594046644480} 2025-03-31T15:55:54.925110Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-31T15:55:55.407401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710672 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-03-31T15:55:55.410589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T15:55:55.428014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId ... 03Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_5_1_5469898865869748468_v1 2025-03-31T15:58:45.419922Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_5_1_5469898865869748468_v1 2025-03-31T15:58:45.419939Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_5_1_5469898865869748468_v1 2025-03-31T15:58:45.432110Z :INFO: [/Root] TraceId [] SessionId [producer-1|cfcd117e-4e75e5f9-245376c5-e192f3f0_0] PartitionId [0] Generation [1] Write session will now close 2025-03-31T15:58:45.432176Z :DEBUG: [/Root] TraceId [] SessionId [producer-1|cfcd117e-4e75e5f9-245376c5-e192f3f0_0] PartitionId [0] Generation [1] Write session: aborting 2025-03-31T15:58:45.432375Z :INFO: [/Root] TraceId [] SessionId [producer-1|cfcd117e-4e75e5f9-245376c5-e192f3f0_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2025-03-31T15:58:45.432416Z :DEBUG: [/Root] TraceId [] SessionId [producer-1|cfcd117e-4e75e5f9-245376c5-e192f3f0_0] PartitionId [0] Generation [1] Write session: destroy 2025-03-31T15:58:45.435439Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [5:7488003602131172198:2181]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T15:58:45.435514Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T15:58:45.435621Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [5:7488003602131172198:2181], Recipient [5:7488003602131172198:2181]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T15:58:45.435646Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T15:58:45.443305Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: producer-1|cfcd117e-4e75e5f9-245376c5-e192f3f0_0 grpc read done: success: 0 data: 2025-03-31T15:58:45.443342Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: producer-1|cfcd117e-4e75e5f9-245376c5-e192f3f0_0 grpc read failed 2025-03-31T15:58:45.443378Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: producer-1|cfcd117e-4e75e5f9-245376c5-e192f3f0_0 grpc closed 2025-03-31T15:58:45.443392Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: producer-1|cfcd117e-4e75e5f9-245376c5-e192f3f0_0 is DEAD 2025-03-31T15:58:45.444839Z node 5 :PQ_PARTITION_CHOOSER TRACE: StateIdle, received event# 65543, Sender [5:7488003709505356802:2752], Recipient [5:7488003709505356804:2752]: NActors::TEvents::TEvPoison 2025-03-31T15:58:45.444921Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-31T15:58:45.446452Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [5:7488003709505356840:3356], Recipient [5:7488003670850650014:2465]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-31T15:58:45.446488Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-31T15:58:45.446509Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-03-31T15:58:45.446543Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7488003709505356838:2752] destroyed 2025-03-31T15:58:45.446617Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188506 (NKikimr::TEvPQ::TEvPipeDisconnected), Tablet [5:7488003670850650014:2465], Partition 0, Sender [5:7488003670850650014:2465], Recipient [5:7488003670850650075:2469], Cookie: 0 2025-03-31T15:58:45.446673Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188506, Sender [5:7488003670850650014:2465], Recipient [5:7488003670850650075:2469]: NKikimr::TEvPQ::TEvPipeDisconnected 2025-03-31T15:58:45.446702Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvPipeDisconnected 2025-03-31T15:58:45.446736Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-03-31T15:58:45.446772Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessChangeOwnerRequests. 2025-03-31T15:58:45.446808Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:58:45.446880Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:58:45.446906Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:58:45.446934Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:58:45.511257Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003713800324259:2787], Partition 1, Sender [0:0:0], Recipient [5:7488003713800324347:2798], Cookie: 0 2025-03-31T15:58:45.511342Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003713800324347:2798]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:45.511374Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:45.511433Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:58:45.511514Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:58:45.511543Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:58:45.511574Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:58:45.511645Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003670850650014:2465], Partition 0, Sender [0:0:0], Recipient [5:7488003670850650075:2469], Cookie: 0 2025-03-31T15:58:45.511684Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003670850650075:2469]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:45.511700Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:45.511729Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:58:45.511763Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:58:45.511779Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:58:45.511797Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:58:45.517401Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003713800324255:2785], Partition 2, Sender [0:0:0], Recipient [5:7488003713800324350:2800], Cookie: 0 2025-03-31T15:58:45.517488Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003713800324350:2800]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:45.517519Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:45.517589Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:58:45.517675Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:58:45.517705Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:58:45.517741Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:58:45.615259Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003713800324259:2787], Partition 1, Sender [0:0:0], Recipient [5:7488003713800324347:2798], Cookie: 0 2025-03-31T15:58:45.615362Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003713800324347:2798]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:45.615394Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:45.615451Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:58:45.615529Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:58:45.615557Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:58:45.615588Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:58:45.615653Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003670850650014:2465], Partition 0, Sender [0:0:0], Recipient [5:7488003670850650075:2469], Cookie: 0 2025-03-31T15:58:45.615689Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003670850650075:2469]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:45.615704Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:45.615730Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:58:45.615763Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:58:45.615786Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:58:45.615808Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:58:45.619295Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003713800324255:2785], Partition 2, Sender [0:0:0], Recipient [5:7488003713800324350:2800], Cookie: 0 2025-03-31T15:58:45.619359Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003713800324350:2800]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:45.619396Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:45.619446Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:58:45.619517Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:58:45.619554Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:58:45.619592Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 >> KqpJoinOrder::FiveWayJoinWithConstantFold+ColumnStore >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithCustomGroupAttributeGood >> RetryPolicy::TWriteSession_RetryOnTargetCluster [GOOD] >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::TempInTemp [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:224:2060] recipient: [1:218:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:224:2060] recipient: [1:218:2142] Leader for TabletID 72057594046678944 is [1:235:2153] sender: [1:236:2060] recipient: [1:218:2142] 2025-03-31T15:56:53.252811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:56:53.253544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:56:53.253861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:56:53.254207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:56:53.254545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:56:53.254872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:56:53.266253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:56:53.270555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:56:53.285268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:56:54.107814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:56:54.107877Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:56:54.156502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:56:54.168291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:56:54.170471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:56:54.506596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:56:54.529334Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:56:54.581298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:56:54.588918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:56:54.746900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:56:54.820982Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:56:54.824117Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:56:54.832625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:56:54.833325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:56:54.834658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:56:54.837483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:56:54.933772Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:235:2153] sender: [1:350:2060] recipient: [1:17:2064] 2025-03-31T15:56:55.608171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:56:55.610255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:56:55.616117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:56:55.635449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:56:55.636828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:56:55.665160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:56:55.665336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:56:55.665539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:56:55.665606Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:56:55.665639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:56:55.665687Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:56:55.678215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:56:55.678827Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:56:55.679419Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:56:55.686976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:56:55.687040Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:56:55.687093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:56:55.687143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:56:55.690212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:56:55.691904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:56:55.692066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:56:55.692860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:56:55.692979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 243 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:56:55.693032Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:56:55.693464Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:56:55.693527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:56:55.693773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:56:55.693874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:56:55.696132Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:56:55.696175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:56:55.696333Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:56:55.696369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:317:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:56:55.696649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:56:55.696688Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:56:55.696766Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:56:55.696807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:56:55.696841Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:56:55.696868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:56:55.696894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:56:55.696926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:56:55.696952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:56:55.696981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:56:55.697031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:56:55.697065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:56:55.697096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:56:55.698822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:56:55.698916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:56:55.698945Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 106 2025-03-31T15:58:48.392951Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2025-03-31T15:58:48.392981Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 3 2025-03-31T15:58:48.393007Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-03-31T15:58:48.393074Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2025-03-31T15:58:48.393114Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-31T15:58:48.394752Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T15:58:48.396393Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-03-31T15:58:48.396432Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T15:58:48.397597Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-03-31T15:58:48.397632Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-03-31T15:58:48.397880Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-03-31T15:58:48.397966Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-03-31T15:58:48.398351Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:683:2509], Recipient [7:235:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:58:48.398406Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:58:48.398443Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-03-31T15:58:48.398582Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [7:578:2404], Recipient [7:235:2153]: NKikimrScheme.TEvNotifyTxCompletion TxId: 106 2025-03-31T15:58:48.398618Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-03-31T15:58:48.398694Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-03-31T15:58:48.398795Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-03-31T15:58:48.398835Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [7:681:2507] 2025-03-31T15:58:48.399023Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:683:2509], Recipient [7:235:2153]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-31T15:58:48.399052Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-31T15:58:48.399104Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 TestModificationResults wait txId: 107 2025-03-31T15:58:48.399540Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [8:552:2102], Recipient [7:235:2153] 2025-03-31T15:58:48.399590Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-31T15:58:48.402148Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/test/tmp/a/b" OperationType: ESchemeOpMkDir MkDir { Name: "tmp2" } TempDirOwnerActorId { RawX1: 552 RawX2: 34359740470 } AllowCreateInTempDir: false } TxId: 107 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:58:48.402797Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/test/tmp/a/b/tmp2, operationId: 107:0, at schemeshard: 72057594046678944 2025-03-31T15:58:48.402952Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 107:1, propose status:StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/test/tmp/a/b', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-03-31T15:58:48.403205Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-31T15:58:48.405674Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 107, response: Status: StatusPreconditionFailed Reason: "Check failed: path: \'/MyRoot/test/tmp/a/b\', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeDir, state: EPathStateNoChanges)" TxId: 107 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:58:48.405888Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 107, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/test/tmp/a/b', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeDir, state: EPathStateNoChanges), operation: CREATE DIRECTORY, path: /MyRoot/test/tmp/a/b/tmp2 2025-03-31T15:58:48.405962Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 2025-03-31T15:58:48.406373Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: send EvNotifyTxCompletion 2025-03-31T15:58:48.406414Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 107 2025-03-31T15:58:48.406825Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:689:2515], Recipient [7:235:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:58:48.406876Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:58:48.406913Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-03-31T15:58:48.407014Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [7:578:2404], Recipient [7:235:2153]: NKikimrScheme.TEvNotifyTxCompletion TxId: 107 2025-03-31T15:58:48.407050Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-03-31T15:58:48.407143Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2025-03-31T15:58:48.407267Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-03-31T15:58:48.407309Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [7:687:2513] 2025-03-31T15:58:48.407507Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:689:2515], Recipient [7:235:2153]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-31T15:58:48.407554Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-31T15:58:48.407592Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2025-03-31T15:58:48.408004Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [8:552:2102], Recipient [7:235:2153] 2025-03-31T15:58:48.408047Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-31T15:58:48.410815Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/test/tmp/a/b" OperationType: ESchemeOpMkDir MkDir { Name: "tmp2" } TempDirOwnerActorId { RawX1: 552 RawX2: 34359740470 } AllowCreateInTempDir: true } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:58:48.411140Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/test/tmp/a/b/tmp2, operationId: 108:0, at schemeshard: 72057594046678944 2025-03-31T15:58:48.411209Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusPreconditionFailed, reason: Can't create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can't be created in another temporary directory., at schemeshard: 72057594046678944 2025-03-31T15:58:48.411413Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-31T15:58:48.414346Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusPreconditionFailed Reason: "Can\'t create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can\'t be created in another temporary directory." TxId: 108 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:58:48.414569Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Can't create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can't be created in another temporary directory., operation: CREATE DIRECTORY, path: /MyRoot/test/tmp/a/b/tmp2 2025-03-31T15:58:48.414647Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2025-03-31T15:58:48.415047Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2025-03-31T15:58:48.415109Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2025-03-31T15:58:48.415521Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:695:2521], Recipient [7:235:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:58:48.415573Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:58:48.415611Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-03-31T15:58:48.415748Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [7:578:2404], Recipient [7:235:2153]: NKikimrScheme.TEvNotifyTxCompletion TxId: 108 2025-03-31T15:58:48.415784Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-03-31T15:58:48.415863Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2025-03-31T15:58:48.415968Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-03-31T15:58:48.416008Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [7:693:2519] 2025-03-31T15:58:48.416176Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:695:2521], Recipient [7:235:2153]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-31T15:58:48.416207Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-31T15:58:48.416249Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 108 >> KqpIndexLookupJoin::CheckCastUint64ToInt64-StreamLookupJoin-NotNull ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedSelectViaReadActors [GOOD] Test command err: 2025-03-31T15:58:43.612581Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:58:43.612836Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:43.613001Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0015e1/r3tmp/tmpT5oCMe/pdisk_1.dat 2025-03-31T15:58:44.335628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:58:44.448805Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:44.499624Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:44.499790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:44.513712Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:58:44.645373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:58:47.170338Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:934:2776], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:47.170445Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:945:2781], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:47.170557Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:47.176390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T15:58:47.204895Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-03-31T15:58:47.405431Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:948:2784], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T15:58:47.512988Z node 1 :TX_PROXY ERROR: Actor# [1:1017:2833] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:58:47.965104Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpetgw0a8wys4j2bwcsaj4p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDBlZmYzZTEtNDRmMDU3YTMtYmE1YTk1OWQtOGNkZTU2N2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:48.186876Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpethp48a354s01j2kwj396, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2VjZjFmZTItZjVmZjg1OWItYjA4YjEyMTgtZjQ0Yzg4M2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:48.427787Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqpethx66nmaty1af6q6x22h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWQyMzc3Zi00NDkyNWI5MS1lOWNlYjIwOS0xNDhmNTk3NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> KqpIndexLookupJoin::CheckCastInt64ToUint64+StreamLookupJoin-NotNull ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::DelayedVolatileTxAndEvWrite [GOOD] Test command err: 2025-03-31T15:57:54.865434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:57:54.865679Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:57:54.865837Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001222/r3tmp/tmpdYBZeA/pdisk_1.dat 2025-03-31T15:57:55.372274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:57:55.451784Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:57:55.501487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:57:55.501654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:57:55.516548Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:57:55.628332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:57:55.764800Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-31T15:57:55.766118Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-31T15:57:55.766682Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T15:57:55.766962Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:57:55.860732Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T15:57:55.861625Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:57:55.861765Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T15:57:55.868090Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T15:57:55.868211Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T15:57:55.868297Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T15:57:55.868747Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T15:57:55.868967Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T15:57:55.869083Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T15:57:55.880508Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T15:57:55.991012Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T15:57:55.991687Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T15:57:55.991836Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T15:57:55.991880Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T15:57:55.991918Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T15:57:55.991979Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T15:57:55.992266Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T15:57:55.992340Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T15:57:55.992688Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T15:57:55.992799Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T15:57:55.992922Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T15:57:55.992965Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T15:57:55.993029Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T15:57:55.993079Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T15:57:55.993116Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T15:57:55.993156Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T15:57:55.993207Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T15:57:55.993332Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:57:55.993377Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:57:55.993432Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T15:57:55.993587Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-31T15:57:55.993654Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T15:57:55.993815Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T15:57:55.994108Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T15:57:55.994185Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T15:57:55.994327Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T15:57:55.994381Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T15:57:55.994427Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T15:57:55.994464Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T15:57:55.994500Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T15:57:55.994824Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T15:57:55.994887Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T15:57:55.994938Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T15:57:55.994975Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T15:57:55.995028Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T15:57:55.995062Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T15:57:55.999718Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T15:57:55.999799Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T15:57:55.999837Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T15:57:56.002281Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T15:57:56.002358Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T15:57:56.016261Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T15:57:56.016350Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T15:57:56.016411Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T15:57:56.016459Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-31T15:57:56.016553Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T15:57:56.197805Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:57:56.197907Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:57:56.197951Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T15:57:56.198417Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-31T15:57:56.198463Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T15:57:56.198601Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T15:57:56.198676Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-31T15:57:56.198722Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-31T15:57:56.198770Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-31T15:57:56.253381Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T15:57:56.253507Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T15:57:56.254384Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T15:57:56.254436Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T15:57:56.254501Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T15:57:5 ... p# 1508 txid# 281474976715660 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-03-31T15:58:46.937890Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-03-31T15:58:46.937924Z node 7 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715660 2025-03-31T15:58:46.937976Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 1508 txid# 281474976715660 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-03-31T15:58:46.938082Z node 7 :TX_DATASHARD DEBUG: Complete volatile write [1508 : 281474976715660] from 72075186224037889 at tablet 72075186224037889 send result to client [7:916:2664] 2025-03-31T15:58:46.939178Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T15:58:46.939433Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T15:58:46.939935Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T15:58:46.941731Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T15:58:46.943297Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-03-31T15:58:46.943465Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [7:697:2585], Recipient [7:699:2587]: {TEvReadSet step# 1508 txid# 281474976715660 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2025-03-31T15:58:46.943517Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T15:58:46.943582Z node 7 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715660 2025-03-31T15:58:46.943789Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-03-31T15:58:46.944203Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [7:699:2587], Recipient [7:697:2585]: {TEvReadSet step# 1508 txid# 281474976715660 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2025-03-31T15:58:46.944246Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T15:58:46.944280Z node 7 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715660 ... validating table 2025-03-31T15:58:48.067787Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqpetgnna0kh76gzrxsb4q12, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OGU0Y2VhNDgtMWMxM2E3YTctZWIzMzVhNjQtNzljMDJlMjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:48.145169Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [7:969:2782], Recipient [7:697:2585]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 2 Columns: 3 Columns: 4 Columns: 1 Snapshot { Step: 1508 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-03-31T15:58:48.145414Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-31T15:58:48.145512Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2025-03-31T15:58:48.145662Z node 7 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-03-31T15:58:48.145715Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2025-03-31T15:58:48.145763Z node 7 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-31T15:58:48.145825Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-31T15:58:48.145879Z node 7 :TX_DATASHARD TRACE: Activated operation [0:5] at 72075186224037888 2025-03-31T15:58:48.145947Z node 7 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-03-31T15:58:48.145977Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-31T15:58:48.146001Z node 7 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2025-03-31T15:58:48.146024Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2025-03-31T15:58:48.146178Z node 7 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 2 Columns: 3 Columns: 4 Columns: 1 Snapshot { Step: 1508 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-03-31T15:58:48.146463Z node 7 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v1508/18446744073709551615 2025-03-31T15:58:48.146532Z node 7 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[7:969:2782], 0} after executionsCount# 1 2025-03-31T15:58:48.146595Z node 7 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[7:969:2782], 0} sends rowCount# 1, bytes# 64, quota rows left# 1000, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-31T15:58:48.146701Z node 7 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[7:969:2782], 0} finished in read 2025-03-31T15:58:48.146782Z node 7 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-03-31T15:58:48.146810Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2025-03-31T15:58:48.146838Z node 7 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2025-03-31T15:58:48.146863Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2025-03-31T15:58:48.146906Z node 7 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-03-31T15:58:48.146930Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2025-03-31T15:58:48.146958Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037888 has finished 2025-03-31T15:58:48.147008Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-31T15:58:48.147168Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-03-31T15:58:48.151824Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [7:969:2782], Recipient [7:697:2585]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-03-31T15:58:48.151923Z node 7 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-03-31T15:58:48.152666Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [7:969:2782], Recipient [7:699:2587]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 2 Columns: 3 Columns: 4 Columns: 1 Snapshot { Step: 1508 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1000 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1000 RangesSize: 1 2025-03-31T15:58:48.152801Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-03-31T15:58:48.152868Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037889 on unit CheckRead 2025-03-31T15:58:48.152951Z node 7 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-03-31T15:58:48.152986Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit CheckRead 2025-03-31T15:58:48.153013Z node 7 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-03-31T15:58:48.153042Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037889 on unit BuildAndWaitDependencies 2025-03-31T15:58:48.153086Z node 7 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037889 2025-03-31T15:58:48.153116Z node 7 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-03-31T15:58:48.153139Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-03-31T15:58:48.153186Z node 7 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037889 to execution unit ExecuteRead 2025-03-31T15:58:48.153211Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037889 on unit ExecuteRead 2025-03-31T15:58:48.153315Z node 7 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 2 Columns: 3 Columns: 4 Columns: 1 Snapshot { Step: 1508 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1000 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1000 } 2025-03-31T15:58:48.153512Z node 7 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v1508/18446744073709551615 2025-03-31T15:58:48.153566Z node 7 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[7:969:2782], 1} after executionsCount# 1 2025-03-31T15:58:48.153604Z node 7 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[7:969:2782], 1} sends rowCount# 1, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-31T15:58:48.153671Z node 7 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[7:969:2782], 1} finished in read 2025-03-31T15:58:48.153724Z node 7 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-03-31T15:58:48.153749Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit ExecuteRead 2025-03-31T15:58:48.153773Z node 7 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037889 to execution unit CompletedOperations 2025-03-31T15:58:48.153800Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037889 on unit CompletedOperations 2025-03-31T15:58:48.153839Z node 7 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-03-31T15:58:48.153860Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit CompletedOperations 2025-03-31T15:58:48.153885Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037889 has finished 2025-03-31T15:58:48.153913Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-03-31T15:58:48.153995Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-03-31T15:58:48.154839Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [7:969:2782], Recipient [7:699:2587]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2025-03-31T15:58:48.154896Z node 7 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 1 } { items { int32_value: 1 } items { int32_value: 2 } items { int32_value: 3 } items { int32_value: 4 } }, { items { int32_value: 11 } items { int32_value: 12 } items { int32_value: 12 } items { int32_value: 12 } } >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-6 |68.5%| [TA] $(B)/ydb/core/tx/datashard/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpJoinOrder::TPCDS92+ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::Simple [GOOD] Test command err: 2025-03-31T15:58:31.989390Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:58:31.989709Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:31.989895Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00165d/r3tmp/tmp7CAsLG/pdisk_1.dat 2025-03-31T15:58:32.707455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:58:32.802326Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:32.880993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:32.881137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:32.898304Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:58:33.029297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:58:33.537516Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:742:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:33.537640Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:752:2628], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:33.537751Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:33.556115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T15:58:33.811776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:756:2631], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T15:58:33.913934Z node 1 :TX_PROXY ERROR: Actor# [1:830:2674] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:58:34.445427Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpet3hy2tr2wmndy6dvzq8x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWMyNGIxYzEtZDc0NGFkZGYtMzg3ODQzMi1jNzNlOTVjNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:34.599631Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpet4ew99wcy58ytrthvxq5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTM2NTkyZDUtOGVhMjI1OWEtNDQyZmU5MmItMTdjYjc3Yjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:34.843561Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpet4kpc2hyfkj29w1bgn5a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDFjM2MxYzgtYTJhNWMzNmItZmU4NjMzNGUtZmIzNmM4YzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:35.038186Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqpet4vm003m5hstymyyzybg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjMyYWY2YzEtYWU0NzU5OTAtYmViYzU4ODctZTVlMjRmNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:35.273793Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqpet51e7cbxh01x2bfwg9de, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzE4Mjc5ZjgtOWM1ZTVmZTMtYWY4MGFlMjAtOGEyODBlYWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:35.445279Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqpet5937cepnhv6asrzkf8r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODBlM2I1NDAtNjQ2NTYzOWItNmYwODc1ZWMtNTg1OTAyNzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:35.575545Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqpet5e62s36p1a25s8wnpfd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGRmN2M1NjctN2VmZmE0YjMtNWJjMTE0M2ItMTcxMWYyZTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:35.786477Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jqpet5hy7fkq002arkt8y0ej, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWZkNGRmYmEtNjJjYTNiMTItNjlhYTg4ZjEtZTAxMmNlMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:35.960869Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jqpet5s23v83vecqps7y9g1r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmY1OTc5MDItNzA4OTM3NmYtZDIzNjQyYS1kNGRmYWUzZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:36.029639Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jqpet5y0a1fmpwvw4xc47vf6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmZhMTU5NjAtZTgwZTBmODgtNzgxZDk1NTQtNjUzYThjNTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:36.112099Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jqpet6051fnpw9wred7c7whm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2ZmY2QzMzUtMTRiYWQ2MWEtMWUyYWE5MTktOTIwYjc0ZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:36.199504Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jqpet62s8g7z1b6hyv2722m0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWU1M2I5OTItYjdlNzc5NTEtYmY5ZTQwZWQtNGRjYTdiMTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:36.282602Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jqpet65f4b7v4kdyqyrk116c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTJiNzM1OC1jZTZlNzg0ZC1iNGYxNmMzMy1lYTU3MjIzZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:36.390702Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jqpet683d4hyz8y630atcqa2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTBhNGJjOC1mMWYzYzljLTk2YzIwN2FjLWZkNWVhZWNh, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:36.463808Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jqpet6be3hz83nfyjsgcmz3g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODhiYmIxMTUtZWU2MTRiNzctYWJjNjcyYWEtZjk3M2RmNzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:36.548961Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jqpet6ds0w4pd2yjvh6nmhp0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzUyNzA5ZTctMzRmN2I0ZGEtNWZhMWNlZi02Y2ZkNjNhNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:36.624087Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jqpet6gd95691cwdcqb8hr07, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjUwMTY2MDUtMjRhNjEzM2YtNTc3NmZlOGItZTIxMjgwZTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:36.703929Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jqpet6jr8y12snnbm3g46q6x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmU0ZWU1ZWMtMzFhNDQ3NjEtYjQ0MThiMTQtMmZhNTEwYjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:36.800391Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jqpet6n76g7tv0pfqmzqpgdz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGViOGEwNDktOTBmOTQ1ODctY2RkMTUyYjQtOWFlNTM0MDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:36.956468Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jqpet6r8ck424q6x2xf07n22, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTNjY2IzZWEtOWUwM2U1MDYtZDFkYTkzOS0zMGY2OGVkNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:37.074508Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jqpet6xde4rwwx83kn1780av, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzYyY2ZjZWItN2NjNDFmYTEtYWVhZTE0YTItZjg0MDJkMjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:37.220965Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jqpet70teekt9ax4rn9dc0yw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2ZjMTk4ZTctYTA3ZDExY2QtYTBhZGJlNzMtODg3ZmM3MDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:37.373114Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jqpet75k3mh9jpec5djrg27g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2E3Yjk2ODMtNzVlYjBhOTgtMzI3OWIxMGYtOThiMzExY2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:37.509428Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jqpet7aqdgk72by82gm9mhrh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODg3Y2IxMGQtZDg2ZjZhYWYtNzMyM2U5ZDktNDE3ZDY2ZjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:37.666816Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jqpet7ey1hvth7zy09kaytt2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWQ4ZDBkNTktZjJmMmFlZWMtMmE3YTJhMmUtY2YzMjU1NzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:37.884009Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jqpet7kw52ap9qxcy4tjdv1v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODY4MzY2ZmYtZTU3MGY5OTAtMmYzYTE2N2EtMzlmOWMzYWU= ... 992164Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715725. Ctx: { TraceId: 01jqpeten4eqx747d9ctn3mw9g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmI0MTA5MGMtMjcyYTZkNTgtYjcwYmY1ZGEtYmRkMmVhODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:45.066130Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715726. Ctx: { TraceId: 01jqpeter8da52n3wjrhynb2p3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2ExZWUxOWEtYjdlYjkzMDItMzdhOWQwN2EtZTkxZTRjMzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:45.155453Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715727. Ctx: { TraceId: 01jqpetetj38mceq3gd32n5k8m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDQ5OGY4ZTAtOWE2OGZhNTMtNGM5NGM2NGMtNWZkNDhjMjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:45.353548Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01jqpetexcbpzzn1kkc7f3zb14, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODczNTJjMzItY2UwMjc2YzktYzBhNDQzNjctYjcyZTczZDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:45.447363Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715729. Ctx: { TraceId: 01jqpetf3tctxbp8kvz4bjn2kn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTlhNTQ2MzMtZWM3NGM4ZWEtNWQ1NzRhYjktMmIzYjQ0NmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:45.540305Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715730. Ctx: { TraceId: 01jqpetf6g7d5csb2f5ntvans1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjUyNzBhODItOTVmNzIzNWQtNzJiNWRlMzEtZmVkMmQwODY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:45.624774Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715731. Ctx: { TraceId: 01jqpetf9d73vdqjwze75hnv38, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWYzYmZjMmItZDY0YTAxNDUtNTk4NDI2MTMtODE0MDdjOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:45.724302Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715732. Ctx: { TraceId: 01jqpetfc1824y4m0g0vwbmkgy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjM4ZjY3MDctZmJmN2JlODYtNGY3ZGYxNWQtMjFiNmQ0OTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:45.810850Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715733. Ctx: { TraceId: 01jqpetff4f4j16pay86wps1nk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmU4ZGFkN2UtNjZlMzAyMjgtMWZiODMxYjgtMWI4ZDcyNmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:45.942470Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715734. Ctx: { TraceId: 01jqpetfhx29vctgd5wn7z3wjf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTM5ZGY3Yi04OWRkM2E3Yy1jNGVhMzRkMS00ZWNjNGM4Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:46.017537Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715735. Ctx: { TraceId: 01jqpetfnz8v0wc5gg0968cq1y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjYxYzkyY2UtYTBlZGI3MTAtYzU5YTgwOWYtODU5ZTkxZjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:46.091703Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715736. Ctx: { TraceId: 01jqpetfrae5999q9a9f3mndgy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGYyMjZkOTYtMWY4MGIyNzAtZTNjZjc1MzUtNjM2NjUzNmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:46.164317Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715737. Ctx: { TraceId: 01jqpetftm6rv7cs41z73qhrj2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzY0OTI3Yi1kNDIyOTgzYi0yOGRjMDkzYi1lN2Q1ODU3YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:46.236081Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715738. Ctx: { TraceId: 01jqpetfwwc3z42n87j740yzen, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODc0YTM2MjctNGVlMzE3ZDAtYzdmNmEwYzItZTA5Mjc5ODY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:46.310369Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715739. Ctx: { TraceId: 01jqpetfz426ddpbm2vqgh0wge, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTFhYzg0MDUtYjA1OWUyOWUtNDU5YzBhZTgtMzY3ZmE4MTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:46.386768Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715740. Ctx: { TraceId: 01jqpetg1e64hvw9xbmrrz1ck0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTM5MGFjMTktYzAyMDVlYzctYTIyOWI2N2UtMWU3NzMzZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:46.467108Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715741. Ctx: { TraceId: 01jqpetg3v3599xk5cg2qha9rt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmFhNTE2NzQtMjU5ZjE0MGQtMmJiMmNjMzItY2U5NTJkZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:46.546233Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715742. Ctx: { TraceId: 01jqpetg6bct6xznh1fmhjhqys, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTAxY2VjZTUtYTU0YTNlODMtMTgxMWMzYTktZTZiM2FmZTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:46.667718Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715743. Ctx: { TraceId: 01jqpetg8w501cvssx1p1gxt1j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2QzOTNlY2MtYjBmYTI2ZTctNGU2OWNkZDAtZTlkNjU4ZjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:46.743846Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715744. Ctx: { TraceId: 01jqpetgck0qckpxqetdyggf0x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjJjNDRjMmUtMThhMDIyNTEtZWNiMDJkY2EtZTNlZGU4NjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:46.810679Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715745. Ctx: { TraceId: 01jqpetgf0fkvejgw1h50m8xr0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODFmMmEzYWMtYzg5YTk2MzktNTdmMzRjZWItNjM5NWFhYzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:46.902356Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715746. Ctx: { TraceId: 01jqpetgh450dkret4dcc6fzpk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjdkOWQ3MTUtMmY1NzFkMjYtNjMyMWY0ZTQtMmJmMWY2ZmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:46.991551Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715747. Ctx: { TraceId: 01jqpetgkz38dphwsz7wjnmsgn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Nzk3OTkzNDEtZTAzYjJmMDUtMzhjODljZDctNmM4ZjkzMDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:47.066461Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715748. Ctx: { TraceId: 01jqpetgps4ksab8w68vxwnsep, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzE1ZjFmZTUtYWEyMWRkZTctZGM5ZjY1YWEtNTM2MjA3ZWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:47.140451Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715749. Ctx: { TraceId: 01jqpetgs2cca2ga0jp72d6krv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTUxMTAxZDgtNWQ2OTg3YzUtMWU0ZWQ5NDYtNDcwMmZjOGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:47.219809Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715750. Ctx: { TraceId: 01jqpetgvd3a8hs4vmb8wkh239, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjZmYzYzNjItMTBhNDYyY2UtNGU2MmExZWUtMjY2NDE2YTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:47.294558Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715751. Ctx: { TraceId: 01jqpetgxwfhk0t6vpwwhbrav6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzQ3OGVkNS1kNDE1ODY3OS02NmVlN2Y5NS0xZmIxOThh, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:47.393439Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715752. Ctx: { TraceId: 01jqpeth08dn9g4pjs4989yp8g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTZmMjNiN2EtYWY1NzYwYjEtNjg3M2JmZjItNjc0NmIxMTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:47.456234Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715753. Ctx: { TraceId: 01jqpeth38aw982fkwkmq3x7hd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODA1MWJmNzAtNTgxNjY4ZmMtYThjN2NjNjItOGEyMzc0YjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:47.550772Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715754. Ctx: { TraceId: 01jqpeth58dtk7exg2syv7c19b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjFiZjFlMDUtNWEyOTFlMWQtZDViNGY3NzctMWViM2JhNTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:47.635026Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715755. Ctx: { TraceId: 01jqpeth8r4kmjxev8e3375d2w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTljNDQ3YTEtNmRmM2QxMWQtNWIzNjIxOTUtMWYwYjBjMjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:47.744838Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715756. Ctx: { TraceId: 01jqpethaxft5hsgj4tgjsnaqt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjVjZTRmODItZjRiODc2OS05NWMwNDFjMy1mMzI1ZWY4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:47.922912Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715757. Ctx: { TraceId: 01jqpethej8qbnp3s1ach69094, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmY3MjcwZTItMTA3YTYyNjgtYzliNWI5N2UtYzNlMjI1Y2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:48.161673Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715758. Ctx: { TraceId: 01jqpethma8q60m5q4n8q6vhph, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWY4Mzg3ZTItYTRjMTgyODAtZjY2ZWE4My1iNzA1MmE3Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:48.404372Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715759. Ctx: { TraceId: 01jqpethw4f4g3rxyepd6wpjjf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTVlNzIwNDktYTYyMTFkZmYtZDg3ZWM1NzUtYzIzOTdhMmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:49.144127Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715760. Ctx: { TraceId: 01jqpetjak3cca3wfyg090k994, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjAwNmZlYWEtOTJjNjAxMmItNTE1MjY1MjctYzM2ZmQxYmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> Yq_1::ListConnectionsOnEmptyConnectionsTable [GOOD] >> KqpIndexLookupJoin::MultiJoins >> TTxDataShardUploadRows::TestUploadRows >> KqpIndexLookupJoin::JoinByComplexKeyWithNullComponents+StreamLookupJoin >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_AutoscaleAwareSDK [GOOD] >> CompressExecutor::TestExecutorMemUsage [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataSplitThenPublish >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflow >> TTxDataShardUploadRows::TestUploadShadowRows ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::ListConnectionsOnEmptyConnectionsTable [GOOD] Test command err: 2025-03-31T15:58:11.844596Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003571217139672:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:11.851389Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0331 15:58:12.656975951 40334 dns_resolver.cc:162] no server name supplied in dns URI E0331 15:58:12.657169673 40334 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-31T15:58:13.004539Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:13.672601Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:22333: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:22333 } ] 2025-03-31T15:58:13.860215Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:22333: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:22333 2025-03-31T15:58:14.010881Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:15.015315Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:15.315531Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:22333: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:22333 } ] 2025-03-31T15:58:16.018244Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:16.779615Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003571217139672:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:16.779698Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:58:17.023866Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; E0331 15:58:17.662264487 40727 dns_resolver.cc:162] no server name supplied in dns URI E0331 15:58:17.662394130 40727 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-31T15:58:17.783733Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:18.024542Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:18.229701Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:22333: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:22333 2025-03-31T15:58:18.229882Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:22333: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:22333 } ] 2025-03-31T15:58:18.787570Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:19.029259Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:19.239916Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488003605576878496:2315], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:58:19.240078Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T15:58:19.331600Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488003605576878496:2315], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:58:19.480077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488003605576878496:2315], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000cff/r3tmp/tmppeH1zu/pdisk_1.dat 2025-03-31T15:58:19.817993Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:19.858340Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488003605576878496:2315], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:58:20.018539Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:20.037657Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 22333, node 1 2025-03-31T15:58:20.249957Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:58:20.249977Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:58:20.249984Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:58:20.250117Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:58:20.983307Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-03-31T15:58:20.983339Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-03-31T15:58:20.983347Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-03-31T15:58:20.984782Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-03-31T15:58:20.984799Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-03-31T15:58:20.984805Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-03-31T15:58:20.991279Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-03-31T15:58:20.991308Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-03-31T15:58:20.991314Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-03-31T15:58:21.036186Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-03-31T15:58:21.036226Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-03-31T15:58:21.036234Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-03-31T15:58:21.043809Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-03-31T15:58:21.043845Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-03-31T15:58:21.043853Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-03-31T15:58:21.045324Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-03-31T15:58:21.045352Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-03-31T15:58:21.045359Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-03-31T15:58:21.049043Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-03-31T15:58:21.049062Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-03-31T15:58:21.049068Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-03-31T15:58:21.050606Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-03-31T15:58:21.050637Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-03-31T15:58:21.050653Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-03-31T15:58:21.056998Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-03-31T15:58:21.057018Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-03-31T15:58:21.057026Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-03-31T15:58:21.059050Z node 1 :TX_PROXY ERROR: Actor# [1:7488003614166813594:2502] txid# 281474976715659, issues: { message: "database doesn\'t have storage pools at all to create tablet channels to storage pool binding by profile id" severity: 1 } 2025-03-31T15:58:21.059290Z node 1 :TX_PROXY ERROR: Actor# [1:7488003614166813593:2501] txid# 281474976715658, issues: { message: "database doesn\'t have storage pools at all to create tablet channels to storage pool binding by profile id" severity: 1 } 2025-03-31T15:58:21.059407Z node 1 :TX_PROXY ERROR: Actor# [1:7488003614166813595:2503] txid# 281474976715660, issues: { message: "database doesn\'t have storage pools at all to create tablet channels to storage pool binding by profile id" severity: 1 } 2025-03-31T15:58:21.060938Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-03-31T15:58:21.060955Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-03-31T15:58:21.060961Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-03-31T15:58:21.063154Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-03-31T15:58:21.063187Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-03-31T15:58:21.063195Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq ... ], task: 2 2025-03-31T15:58:48.415858Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003731266916464:2570], TxId: 281474976710688, task: 2. Ctx: { TraceId : 01jqpethnc23m4haze9sa9ka3r. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YzA0NWIyMGQtNzA1YzFmNGUtYTJhNWFiNTktY2Q4YmZhMmE=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Set execution timeout 299.568556s 2025-03-31T15:58:48.416653Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003731266916464:2570], TxId: 281474976710688, task: 2. Ctx: { TraceId : 01jqpethnc23m4haze9sa9ka3r. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YzA0NWIyMGQtNzA1YzFmNGUtYTJhNWFiNTktY2Q4YmZhMmE=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646926 2025-03-31T15:58:48.416765Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003731266916464:2570], TxId: 281474976710688, task: 2. Ctx: { TraceId : 01jqpethnc23m4haze9sa9ka3r. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YzA0NWIyMGQtNzA1YzFmNGUtYTJhNWFiNTktY2Q4YmZhMmE=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7488003731266916463 RawX2: 4503616807242249 } } DstEndpoint { ActorId { RawX1: 7488003731266916464 RawX2: 4503616807242250 } } InMemory: true DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7488003731266916464 RawX2: 4503616807242250 } } DstEndpoint { ActorId { RawX1: 7488003731266916457 RawX2: 4503616807242114 } } InMemory: true } 2025-03-31T15:58:48.416776Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003731266916464:2570], TxId: 281474976710688, task: 2. Ctx: { TraceId : 01jqpethnc23m4haze9sa9ka3r. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YzA0NWIyMGQtNzA1YzFmNGUtYTJhNWFiNTktY2Q4YmZhMmE=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Update input channelId: 1, peer: [4:7488003731266916463:2569] 2025-03-31T15:58:48.416836Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003731266916464:2570], TxId: 281474976710688, task: 2. Ctx: { TraceId : 01jqpethnc23m4haze9sa9ka3r. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YzA0NWIyMGQtNzA1YzFmNGUtYTJhNWFiNTktY2Q4YmZhMmE=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646926 2025-03-31T15:58:48.416946Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003731266916464:2570], TxId: 281474976710688, task: 2. Ctx: { TraceId : 01jqpethnc23m4haze9sa9ka3r. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YzA0NWIyMGQtNzA1YzFmNGUtYTJhNWFiNTktY2Q4YmZhMmE=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7488003731266916463 RawX2: 4503616807242249 } } DstEndpoint { ActorId { RawX1: 7488003731266916464 RawX2: 4503616807242250 } } InMemory: true DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7488003731266916464 RawX2: 4503616807242250 } } DstEndpoint { ActorId { RawX1: 7488003731266916457 RawX2: 4503616807242114 } } InMemory: true } 2025-03-31T15:58:48.416971Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003731266916464:2570], TxId: 281474976710688, task: 2. Ctx: { TraceId : 01jqpethnc23m4haze9sa9ka3r. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YzA0NWIyMGQtNzA1YzFmNGUtYTJhNWFiNTktY2Q4YmZhMmE=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-03-31T15:58:48.419594Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710688, task: 1, CA Id [4:7488003731266916463:2569]. Recv TEvReadResult from ShardID=72075186224037893, ReadId=0, Status=SUCCESS, Finished=1, RowCount=0, TxLocks= , BrokenTxLocks= 2025-03-31T15:58:48.419619Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710688, task: 1, CA Id [4:7488003731266916463:2569]. Taken 0 locks 2025-03-31T15:58:48.419634Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710688, task: 1, CA Id [4:7488003731266916463:2569]. new data for read #0 seqno = 1 finished = 1 2025-03-31T15:58:48.419675Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003731266916463:2569], TxId: 281474976710688, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=YzA0NWIyMGQtNzA1YzFmNGUtYTJhNWFiNTktY2Q4YmZhMmE=. TraceId : 01jqpethnc23m4haze9sa9ka3r. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 276037645 2025-03-31T15:58:48.419704Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003731266916463:2569], TxId: 281474976710688, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=YzA0NWIyMGQtNzA1YzFmNGUtYTJhNWFiNTktY2Q4YmZhMmE=. TraceId : 01jqpethnc23m4haze9sa9ka3r. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-03-31T15:58:48.419730Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710688, task: 1, CA Id [4:7488003731266916463:2569]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-03-31T15:58:48.419749Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710688, task: 1, CA Id [4:7488003731266916463:2569]. enter pack cells method shardId: 72075186224037893 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-03-31T15:58:48.419763Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710688, task: 1, CA Id [4:7488003731266916463:2569]. exit pack cells method shardId: 72075186224037893 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-03-31T15:58:48.419775Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710688, task: 1, CA Id [4:7488003731266916463:2569]. returned 0 rows; processed 0 rows 2025-03-31T15:58:48.419814Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710688, task: 1, CA Id [4:7488003731266916463:2569]. dropping batch for read #0 2025-03-31T15:58:48.419826Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710688, task: 1, CA Id [4:7488003731266916463:2569]. effective maxinflight 1 sorted 1 2025-03-31T15:58:48.419838Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710688, task: 1, CA Id [4:7488003731266916463:2569]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-03-31T15:58:48.419856Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710688, task: 1, CA Id [4:7488003731266916463:2569]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-03-31T15:58:48.419947Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003731266916463:2569], TxId: 281474976710688, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=YzA0NWIyMGQtNzA1YzFmNGUtYTJhNWFiNTktY2Q4YmZhMmE=. TraceId : 01jqpethnc23m4haze9sa9ka3r. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-03-31T15:58:48.419964Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003731266916464:2570], TxId: 281474976710688, task: 2. Ctx: { TraceId : 01jqpethnc23m4haze9sa9ka3r. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YzA0NWIyMGQtNzA1YzFmNGUtYTJhNWFiNTktY2Q4YmZhMmE=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646923 2025-03-31T15:58:48.419983Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710688, task: 2. Finish input channelId: 1, from: [4:7488003731266916463:2569] 2025-03-31T15:58:48.420009Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003731266916464:2570], TxId: 281474976710688, task: 2. Ctx: { TraceId : 01jqpethnc23m4haze9sa9ka3r. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YzA0NWIyMGQtNzA1YzFmNGUtYTJhNWFiNTktY2Q4YmZhMmE=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-03-31T15:58:48.420063Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003731266916464:2570], TxId: 281474976710688, task: 2. Ctx: { TraceId : 01jqpethnc23m4haze9sa9ka3r. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YzA0NWIyMGQtNzA1YzFmNGUtYTJhNWFiNTktY2Q4YmZhMmE=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-03-31T15:58:48.420091Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003731266916463:2569], TxId: 281474976710688, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=YzA0NWIyMGQtNzA1YzFmNGUtYTJhNWFiNTktY2Q4YmZhMmE=. TraceId : 01jqpethnc23m4haze9sa9ka3r. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646927 2025-03-31T15:58:48.420111Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003731266916463:2569], TxId: 281474976710688, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=YzA0NWIyMGQtNzA1YzFmNGUtYTJhNWFiNTktY2Q4YmZhMmE=. TraceId : 01jqpethnc23m4haze9sa9ka3r. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-03-31T15:58:48.420133Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710688, task: 1. Tasks execution finished 2025-03-31T15:58:48.420147Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003731266916463:2569], TxId: 281474976710688, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=YzA0NWIyMGQtNzA1YzFmNGUtYTJhNWFiNTktY2Q4YmZhMmE=. TraceId : 01jqpethnc23m4haze9sa9ka3r. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2025-03-31T15:58:48.420275Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710688, task: 1. pass away 2025-03-31T15:58:48.420368Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710688;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-03-31T15:58:48.420745Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003731266916464:2570], TxId: 281474976710688, task: 2. Ctx: { TraceId : 01jqpethnc23m4haze9sa9ka3r. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YzA0NWIyMGQtNzA1YzFmNGUtYTJhNWFiNTktY2Q4YmZhMmE=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-03-31T15:58:48.420780Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710688, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-03-31T15:58:48.420789Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710688, task: 2. Tasks execution finished 2025-03-31T15:58:48.420798Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003731266916464:2570], TxId: 281474976710688, task: 2. Ctx: { TraceId : 01jqpethnc23m4haze9sa9ka3r. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YzA0NWIyMGQtNzA1YzFmNGUtYTJhNWFiNTktY2Q4YmZhMmE=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-03-31T15:58:48.420842Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710688, task: 2. pass away 2025-03-31T15:58:48.420886Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710688;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-03-31T15:58:48.843463Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:5665: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:5665 >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataPublishThenSplit >> Yq_1::CreateConnections_With_Idempotency [GOOD] >> ReadLoad::ShouldReadKqp >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotReadLockedWrites+UseSink >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithCustomGroupAttributeGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsUseInvalidSearchFilterBad |68.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |68.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |68.6%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |68.6%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_AutoscaleAwareSDK [GOOD] Test command err: 2025-03-31T15:55:34.486682Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002898932805877:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:34.487019Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:55:35.628951Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001932/r3tmp/tmpdleAT2/pdisk_1.dat 2025-03-31T15:55:35.686863Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:55:36.440410Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:36.444814Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:55:36.444890Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:55:36.449731Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20030, node 1 2025-03-31T15:55:36.667709Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/001932/r3tmp/yandexmz8zcX.tmp 2025-03-31T15:55:36.667751Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/001932/r3tmp/yandexmz8zcX.tmp 2025-03-31T15:55:36.667882Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/001932/r3tmp/yandexmz8zcX.tmp 2025-03-31T15:55:36.667989Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:55:36.775502Z INFO: TTestServer started on Port 21559 GrpcPort 20030 TClient is connected to server localhost:21559 PQClient connected to localhost:20030 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:55:37.252196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T15:55:37.293226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T15:55:37.456386Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:55:39.484726Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002898932805877:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:39.485142Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:55:42.848971Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpemqmy6re77cywmatf9zbt", Request deadline has expired for 0.357854s seconds 2025-03-31T15:55:42.874253Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488002933292544942:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:42.874467Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:42.876092Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488002933292544954:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:42.881252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-31T15:55:42.892958Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488002933292544971:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-31T15:55:42.974094Z node 1 :TX_PROXY ERROR: Actor# [1:7488002933292545035:2467] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:55:43.592046Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488002933292545043:2357], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T15:55:43.742319Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDE2Y2VmZTYtNWJjODJmYWItYjczZjMwNzMtNmU3NGViOGI=, ActorId: [1:7488002933292544939:2345], ActorState: ExecuteState, TraceId: 01jqpemww39hvncm0sqhs5hmwk, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T15:55:43.760842Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T15:55:43.786810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:55:44.064754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:55:44.603845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-31T15:55:47.695383Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } TxMeta { } YdbResults { columns { name: "C.name" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.balancer" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.local" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.enabled" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.weight" type { optional_type { item { type_id: UINT64 } } } } columns { name: "V.version" type { optional_type { item { type_id: INT64 } } } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 1686 } === CheckClustersList. Subcribe to ClusterTracker from [1:7488002954767381891:2687] 2025-03-31T15:55:51.417032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:55:51.417391Z node 1 :IMPORT WARN: Table profiles were not loaded === CheckClustersList. Ok 2025-03-31T15:55:53.854531Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-03-31T15:55:53.991492Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-03-31T15:55:53.992603Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7488002980537185908:2822], Recipient [1:7488002907522740808:2214]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:55:53.992634Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:55:53.992649Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-31T15:55:53.992687Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7488002980537185904:2819], Recipient [1:7488002907522740808:2214]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-03-31T15:55:53.992701Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-31T15:55:54.124653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercen ... e 5 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Destroy direct read session test-consumer_5_1_15107512362894502383_v1 2025-03-31T15:58:49.305817Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [5:7488003732950416395:2803] destroyed 2025-03-31T15:58:49.305904Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [5:7488003728655448891:3383], Recipient [5:7488003690000742071:2472]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-31T15:58:49.305920Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-31T15:58:49.305933Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-03-31T15:58:49.305946Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session test-consumer_5_1_15107512362894502383_v1 2025-03-31T15:58:49.305967Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7488003728655448890:2766] destroyed 2025-03-31T15:58:49.306015Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_5_1_15107512362894502383_v1 2025-03-31T15:58:49.306034Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_5_1_15107512362894502383_v1 2025-03-31T15:58:49.306049Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_5_1_15107512362894502383_v1 2025-03-31T15:58:49.371444Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003690000742071:2472], Partition 0, Sender [0:0:0], Recipient [5:7488003690000742129:2476], Cookie: 0 2025-03-31T15:58:49.371529Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003690000742129:2476]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:49.371554Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:49.371600Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:58:49.371677Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:58:49.371698Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:58:49.371729Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:58:49.371781Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003732950416271:2784], Partition 2, Sender [0:0:0], Recipient [5:7488003732950416361:2794], Cookie: 0 2025-03-31T15:58:49.371815Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003732950416361:2794]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:49.371830Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:49.371854Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:58:49.371882Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:58:49.371897Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:58:49.371915Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:58:49.371956Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003732950416274:2785], Partition 1, Sender [0:0:0], Recipient [5:7488003732950416364:2796], Cookie: 0 2025-03-31T15:58:49.371998Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003732950416364:2796]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:49.372012Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:49.372036Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:58:49.372070Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:58:49.372083Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:58:49.372102Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:58:49.471596Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003690000742071:2472], Partition 0, Sender [0:0:0], Recipient [5:7488003690000742129:2476], Cookie: 0 2025-03-31T15:58:49.471677Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003690000742129:2476]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:49.471700Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:49.471742Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:58:49.471809Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:58:49.471829Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:58:49.471856Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:58:49.471905Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003732950416271:2784], Partition 2, Sender [0:0:0], Recipient [5:7488003732950416361:2794], Cookie: 0 2025-03-31T15:58:49.471939Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003732950416361:2794]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:49.471954Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:49.471974Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:58:49.472003Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:58:49.472018Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:58:49.472033Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:58:49.472070Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003732950416274:2785], Partition 1, Sender [0:0:0], Recipient [5:7488003732950416364:2796], Cookie: 0 2025-03-31T15:58:49.472104Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003732950416364:2796]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:49.472117Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:49.472137Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:58:49.472161Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:58:49.472175Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:58:49.472191Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:58:49.571704Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003690000742071:2472], Partition 0, Sender [0:0:0], Recipient [5:7488003690000742129:2476], Cookie: 0 2025-03-31T15:58:49.571787Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003690000742129:2476]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:49.571811Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:49.571850Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:58:49.571911Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:58:49.571930Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:58:49.571955Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:58:49.572000Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003732950416271:2784], Partition 2, Sender [0:0:0], Recipient [5:7488003732950416361:2794], Cookie: 0 2025-03-31T15:58:49.572030Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003732950416361:2794]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:49.572042Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:49.572064Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:58:49.572090Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:58:49.572104Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:58:49.572120Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:58:49.572162Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003732950416274:2785], Partition 1, Sender [0:0:0], Recipient [5:7488003732950416364:2796], Cookie: 0 2025-03-31T15:58:49.572194Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003732950416364:2796]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:49.572205Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:49.572225Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:58:49.572249Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:58:49.572264Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:58:49.572280Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::CreateConnections_With_Idempotency [GOOD] Test command err: 2025-03-31T15:58:11.653265Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003571271345250:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:11.653307Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0331 15:58:12.587334364 40285 dns_resolver.cc:162] no server name supplied in dns URI E0331 15:58:12.587518727 40285 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-31T15:58:12.711417Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:8920: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8920 } ] 2025-03-31T15:58:12.719314Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:13.648961Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:8920: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:8920 2025-03-31T15:58:13.655044Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:8920: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8920 } ] 2025-03-31T15:58:13.714299Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:14.716245Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:15.578043Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:8920: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8920 } ] 2025-03-31T15:58:15.723284Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:16.655299Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003571271345250:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:16.655359Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:58:16.723774Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; E0331 15:58:17.575965351 40619 dns_resolver.cc:162] no server name supplied in dns URI E0331 15:58:17.576121544 40619 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-31T15:58:17.679343Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:17.743480Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:17.807358Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:8920: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:8920 2025-03-31T15:58:17.850353Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:8920: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8920 } ] 2025-03-31T15:58:18.679556Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:18.748473Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:19.683636Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:19.757463Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:20.048041Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T15:58:20.072382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488003609926051376:2319], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:58:20.155953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488003609926051376:2319], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:58:20.307659Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488003609926051376:2319], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d1c/r3tmp/tmpzaCSCA/pdisk_1.dat 2025-03-31T15:58:20.580882Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488003609926051376:2319], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:58:20.768119Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:20.787445Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 8920, node 1 2025-03-31T15:58:21.156600Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-03-31T15:58:21.156652Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-03-31T15:58:21.156671Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-03-31T15:58:21.160835Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-03-31T15:58:21.160896Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-03-31T15:58:21.160911Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-03-31T15:58:21.167458Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-03-31T15:58:21.167494Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-03-31T15:58:21.167501Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-03-31T15:58:21.167581Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-03-31T15:58:21.167596Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-03-31T15:58:21.167602Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-03-31T15:58:21.174549Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-03-31T15:58:21.174584Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-03-31T15:58:21.174591Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-03-31T15:58:21.175994Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-03-31T15:58:21.176027Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-03-31T15:58:21.176034Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-03-31T15:58:21.183459Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-03-31T15:58:21.183497Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-03-31T15:58:21.183504Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-03-31T15:58:21.204556Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-03-31T15:58:21.204593Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-03-31T15:58:21.204601Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-03-31T15:58:21.206448Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-03-31T15:58:21.206478Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-03-31T15:58:21.206486Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-03-31T15:58:21.207983Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-03-31T15:58:21.208012Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-03-31T15:58:21.208019Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-03-31T15:58:21.216809Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-03-31T15:58:21.216845Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-03-31T15:58:21.216855Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-03-31T15:58:21.228058Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-03-31T15:58:21.228087Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-03-31T15:58:21.228094Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-03-31T15:58:21.229036Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-03-31T15:58:21.229049Z node 1 :YQ_CONTROL_PLANE_STORAGE D ... ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.341974Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.342108Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.342143Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.342292Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.342316Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.342544Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.342639Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.342790Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.342816Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.343019Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.343047Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.343218Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.343261Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.343476Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.343502Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.343654Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.343686Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.343833Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.343868Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.344025Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.344055Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.344146Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.344235Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.344335Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.344469Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.344507Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.344596Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.344748Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.344776Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.344923Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.344947Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.345035Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.345127Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.345267Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.345350Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.345435Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.345465Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.345568Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.345651Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.345760Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.345852Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.346020Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.346062Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.346211Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.346236Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.346370Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.346393Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.346528Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.346552Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.346630Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.346769Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.346855Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.346940Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.346971Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.347056Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.348001Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.348110Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.348193Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.348282Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.348373Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.348457Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.348538Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.348611Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.348688Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.348767Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.348851Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.348931Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.349012Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.349094Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.349218Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.349304Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.349380Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.349458Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.349556Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.349646Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.349744Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.349846Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.349925Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.350027Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.350110Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.350189Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.350263Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.350345Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.350422Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.350501Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.350578Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.350657Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.350745Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.350819Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.350897Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.350975Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.351059Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.351874Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.351977Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.352053Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.352131Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.352215Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.352331Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.352411Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.352490Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.352565Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.352645Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.352727Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.352806Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.352886Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.352963Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.353044Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.353120Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.353195Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.353297Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.353376Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.353455Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.353556Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.353637Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.353713Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.353788Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.353862Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.353934Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.354008Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.354085Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.354168Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.354271Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.354389Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.354478Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.354558Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.354632Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.354709Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.354790Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.354872Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:52.354988Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: >> UpsertLoad::ShouldWriteDataBulkUpsert >> DataShardSnapshots::LockedWriteDistributedCommitAborted-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict+UseSink |68.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |68.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |68.6%| [LD] {RESULT} $(B)/ydb/core/mind/ut/ydb-core-mind-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> CompressExecutor::TestExecutorMemUsage [GOOD] Test command err: 2025-03-31T15:53:52.259027Z :WriteAndReadSomeMessagesWithAsyncCompression INFO: Random seed for debugging is 1743436432258993 2025-03-31T15:53:52.834772Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002461379248219:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:53:52.839148Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:53:54.388868Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488002460836436798:2287];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:53:54.389774Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001260/r3tmp/tmpAmIbTj/pdisk_1.dat 2025-03-31T15:53:55.828866Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:53:57.302084Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:53:57.923367Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488002460836436798:2287];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:53:57.923561Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:53:58.330437Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:53:58.928191Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:53:59.363801Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:53:59.929707Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:00.384790Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:00.732836Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:00.734812Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:00.781979Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:00.782037Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:00.877681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:00.925483Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T15:54:00.948694Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:01.328255Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:01.395549Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 27089, node 1 2025-03-31T15:54:02.332063Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:02.416518Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:03.341736Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:05.453427Z INFO: TTestServer started on Port 4366 GrpcPort 27089 2025-03-31T15:54:06.777292Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002461379248219:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:06.778271Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:54:06.793605Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T15:54:07.693093Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:08.024870Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/001260/r3tmp/yandexTNu7Qu.tmp 2025-03-31T15:54:08.024893Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/001260/r3tmp/yandexTNu7Qu.tmp 2025-03-31T15:54:08.025336Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/001260/r3tmp/yandexTNu7Qu.tmp 2025-03-31T15:54:08.025620Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4366 PQClient connected to localhost:27089 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:54:11.012965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-03-31T15:54:11.311692Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-03-31T15:54:15.929387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488002560163497244:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:15.930940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:15.968356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488002560163497268:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:16.037497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-03-31T15:54:17.020003Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488002560163497271:2356], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-31T15:54:19.196127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:54:19.196152Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:54:19.199344Z node 1 :TX_PROXY ERROR: Actor# [1:7488002573048399273:2786] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:54:19.534555Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488002568210619390:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T15:54:19.534752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:54:19.537136Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488002577343366603:2369], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T15:54:19.539275Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTYyODY4NDItY2JkZWNjLWRkYmZlYjIxLWZhOGM5Y2Rh, ActorId: [1:7488002560163497234:2349], ActorState: ExecuteState, TraceId: 01jqpej7v4c252rkbyg2dg81wr, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T15:54:19.541148Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MWM1N2M0ZjEtYjE3NzNlNTgtNTMyYTYxNTAtNjc0YmJjZTY=, ActorId: [2:7488002563915652050:2339], ActorState: ExecuteState, TraceId: 01jqpej9f468hhaped20hxn2nq, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T15:54:19.560184Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permi ... 1 data: init_request { topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" } 2025-03-31T15:58:49.708133Z node 13 :PQ_WRITE_PROXY INFO: session request cookie: 3 topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" from ipv6:[::1]:47588 2025-03-31T15:58:49.708156Z node 13 :PQ_WRITE_PROXY INFO: write session: cookie=3 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:47588 proto=v1 topic=test-topic durationSec=0 2025-03-31T15:58:49.708171Z node 13 :PQ_WRITE_PROXY INFO: init check schema 2025-03-31T15:58:49.710139Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: describe result for acl check 2025-03-31T15:58:49.710360Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-03-31T15:58:49.710394Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-03-31T15:58:49.710420Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-03-31T15:58:49.710456Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7488003734452803288:2568] (SourceId=test-message-group-id, PreferedPartition=(NULL)) StartKqpSession 2025-03-31T15:58:49.715554Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7488003734452803288:2568] (SourceId=test-message-group-id, PreferedPartition=(NULL)) Select from the table 2025-03-31T15:58:49.871406Z node 14 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976720680. Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-31T15:58:49.871562Z node 14 :KQP_EXECUTER WARN: ActorId: [14:7488003734702838902:2511] TxId: 281474976720680. Ctx: { TraceId: 01jqpetj93cqz4asv2jjrq1x4h, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=ODI4OWY2YmQtNjNiZTBiODAtZDgxZGNlNTUtMTM0ODI2M2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-31T15:58:49.871824Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=ODI4OWY2YmQtNjNiZTBiODAtZDgxZGNlNTUtMTM0ODI2M2U=, ActorId: [14:7488003730407871586:2511], ActorState: ExecuteState, TraceId: 01jqpetj93cqz4asv2jjrq1x4h, Create QueryResponse for error on request, msg: 2025-03-31T15:58:49.874480Z node 14 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jqpetk9mcczan25rwjqjbtm3" } } YdbStatus: UNAVAILABLE ConsumedRu: 686 } 2025-03-31T15:58:49.919900Z node 13 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715694. Failed to resolve tablet: 72075186224037891 after several retries. 2025-03-31T15:58:49.920042Z node 13 :KQP_EXECUTER WARN: ActorId: [13:7488003734452803301:2570] TxId: 281474976715694. Ctx: { TraceId: 01jqpetkbk54wya9hrgb232hf8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MjE4YzFlYzYtM2JkMTBlM2ItOGExYzgzZWQtYThlYTRlODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037891 after several retries. 2025-03-31T15:58:49.920271Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=MjE4YzFlYzYtM2JkMTBlM2ItOGExYzgzZWQtYThlYTRlODc=, ActorId: [13:7488003734452803289:2570], ActorState: ExecuteState, TraceId: 01jqpetkbk54wya9hrgb232hf8, Create QueryResponse for error on request, msg: 2025-03-31T15:58:49.921944Z node 13 :PQ_PARTITION_CHOOSER INFO: TPartitionChooser [13:7488003734452803288:2568] (SourceId=test-message-group-id, PreferedPartition=(NULL)) ReplyError: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=13&id=MjE4YzFlYzYtM2JkMTBlM2ItOGExYzgzZWQtYThlYTRlODc=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jqpetkbn0n7e8zhyb2em5bq4" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 2025-03-31T15:58:49.922185Z node 13 :PQ_WRITE_PROXY INFO: session v1 error cookie: 3 reason: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=13&id=MjE4YzFlYzYtM2JkMTBlM2ItOGExYzgzZWQtYThlYTRlODc=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jqpetkbn0n7e8zhyb2em5bq4" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 sessionId: 2025-03-31T15:58:49.922604Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: is DEAD Test retry state: get retry delay 2025-03-31T15:58:49.931366Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f911abd1-cf5b6d5-e1c2747e-e60b9cd_0] Got error. Status: UNAVAILABLE, Description:
: Error: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=13&id=MjE4YzFlYzYtM2JkMTBlM2ItOGExYzgzZWQtYThlYTRlODc=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jqpetkbn0n7e8zhyb2em5bq4" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 , code: 500001 2025-03-31T15:58:49.931408Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f911abd1-cf5b6d5-e1c2747e-e60b9cd_0] Write session will restart in 2.000000s 2025-03-31T15:58:49.931544Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f911abd1-cf5b6d5-e1c2747e-e60b9cd_0] Write session: Do CDS request 2025-03-31T15:58:49.931582Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f911abd1-cf5b6d5-e1c2747e-e60b9cd_0] Do schedule cds request after 2000 ms 2025-03-31T15:58:50.184768Z node 13 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715696. Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-31T15:58:50.184916Z node 13 :KQP_EXECUTER WARN: ActorId: [13:7488003734452803345:2559] TxId: 281474976715696. Ctx: { TraceId: 01jqpetjrg2xq7trnpay89xp9k, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OWIxMmQ5MmYtNDhjYmYyZjktM2I3NGNkNGQtOWE2YTIzZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-31T15:58:50.185181Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=OWIxMmQ5MmYtNDhjYmYyZjktM2I3NGNkNGQtOWE2YTIzZDg=, ActorId: [13:7488003734452803264:2559], ActorState: ExecuteState, TraceId: 01jqpetjrg2xq7trnpay89xp9k, Create QueryResponse for error on request, msg: 2025-03-31T15:58:50.186996Z node 13 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jqpetkk02sqy5b777t71gxgw" } } YdbStatus: UNAVAILABLE ConsumedRu: 559 } 2025-03-31T15:58:50.380972Z node 14 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976720682. Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-31T15:58:50.381113Z node 14 :KQP_EXECUTER WARN: ActorId: [14:7488003738997806280:2521] TxId: 281474976720682. Ctx: { TraceId: 01jqpetkrv81nwm9etx2tyqxrw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=Y2NhMWNjZDItZmViZjM1OTgtNTc2ZjM2ZGEtM2MzMDUxYWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-31T15:58:50.381369Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=Y2NhMWNjZDItZmViZjM1OTgtNTc2ZjM2ZGEtM2MzMDUxYWY=, ActorId: [14:7488003738997806277:2521], ActorState: ExecuteState, TraceId: 01jqpetkrv81nwm9etx2tyqxrw, Create QueryResponse for error on request, msg: 2025-03-31T15:58:50.383419Z node 14 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jqpetkrz57s2gnq5h05k6wre" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } 2025-03-31T15:58:50.713629Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f911abd1-cf5b6d5-e1c2747e-e60b9cd_0] Write session: close. Timeout = 0 ms 2025-03-31T15:58:50.713715Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f911abd1-cf5b6d5-e1c2747e-e60b9cd_0] Write session will now close 2025-03-31T15:58:50.713798Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f911abd1-cf5b6d5-e1c2747e-e60b9cd_0] Write session: aborting 2025-03-31T15:58:50.714643Z :WARNING: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f911abd1-cf5b6d5-e1c2747e-e60b9cd_0] Write session: could not confirm all writes in time or session aborted, perform hard shutdown 2025-03-31T15:58:50.714711Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f911abd1-cf5b6d5-e1c2747e-e60b9cd_0] Write session: destroy 2025-03-31T15:58:50.774362Z node 13 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715698. Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-31T15:58:50.774512Z node 13 :KQP_EXECUTER WARN: ActorId: [13:7488003738747770743:2574] TxId: 281474976715698. Ctx: { TraceId: 01jqpetm645md6gkps45hqby1v, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NmRjZWM1MGYtODYwNjE5YjctZWQzOGRiNjMtZjNmZmYyYjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-31T15:58:50.774753Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=NmRjZWM1MGYtODYwNjE5YjctZWQzOGRiNjMtZjNmZmYyYjY=, ActorId: [13:7488003738747770740:2574], ActorState: ExecuteState, TraceId: 01jqpetm645md6gkps45hqby1v, Create QueryResponse for error on request, msg: 2025-03-31T15:58:50.776351Z node 13 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jqpetm65ax1qk41b1t4yrfeh" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } >> DataShardSnapshots::LockedWriteDistributedCommitSuccess-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitFreeze+UseSink >> KqpIndexLookupJoin::Left+StreamLookup [GOOD] >> KqpIndexLookupJoin::Left-StreamLookup >> DataShardSnapshots::ShardRestartWholeShardLockBrokenByUpsert [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead+UseSink >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-7 >> DataShardSnapshots::MvccSnapshotLockedWritesRestart+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesRestart-UseSink >> TBlobStorageProxyTest::TestProxyLongTailDiscover >> KqpMultishardIndex::DataColumnWrite+UseSink [GOOD] >> KqpMultishardIndex::DataColumnWrite-UseSink >> TTxDataShardUploadRows::TestUploadRows [GOOD] >> TTxDataShardUploadRows::TestUploadRowsDropColumnRace >> KqpJoin::CrossJoinCount [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_existed_AutoscaleAwareSDK [GOOD] |68.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |68.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |68.6%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer >> TBlobStorageProxyTest::TestVPutVGet >> TBlobStorageProxyTest::TestNormal ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::CrossJoinCount [GOOD] Test command err: Trying to start YDB, gRPC: 7953, MsgBus: 31259 2025-03-31T15:58:47.918792Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003727649905712:2204];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:47.918850Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019f4/r3tmp/tmpjKqJ4g/pdisk_1.dat 2025-03-31T15:58:48.830478Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:48.850180Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:48.850272Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:48.861255Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7953, node 1 2025-03-31T15:58:49.096960Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:58:49.096983Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:58:49.096991Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:58:49.097094Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31259 TClient is connected to server localhost:31259 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:58:50.402634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:50.465919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:50.759689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:51.169067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:51.353903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:52.925137Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003727649905712:2204];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:52.925205Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:58:54.116897Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003757714678419:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:54.117359Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:54.464710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:58:54.508022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:58:54.565622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:58:54.664683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:58:54.729627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:58:54.802669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:58:54.899553Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003757714678940:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:54.899641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:54.900008Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003757714678945:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:54.905001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:58:54.931332Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488003757714678947:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:58:55.028249Z node 1 :TX_PROXY ERROR: Actor# [1:7488003762009646300:3461] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:58:56.825340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T15:58:56.914603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T15:58:56.980882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> TTxDataShardUploadRows::TestUploadShadowRows [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowData >> Yq_1::Basic_TaggedLiteral [GOOD] >> TopicAutoscaling::PartitionSplit_AutosplitByLoad [GOOD] >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflow [GOOD] >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetry >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataPublishThenSplit [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish >> PrivateApi::Nodes [GOOD] >> KqpIndexLookupJoin::SimpleLeftSemiJoin-StreamLookup [GOOD] >> KqpIndexLookupJoin::CheckCastInt64ToUint64+StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastInt64ToUint64+StreamLookupJoin+NotNull >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataSplitThenPublish [GOOD] >> TTxDataShardUploadRows::UploadRowsToReplicatedTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::PartitionSplit_ManySession_existed_AutoscaleAwareSDK [GOOD] Test command err: 2025-03-31T15:55:41.950995Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002927775784864:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:41.962927Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001910/r3tmp/tmpKBrwEQ/pdisk_1.dat 2025-03-31T15:55:42.284208Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T15:55:42.505596Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:42.512049Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:55:42.512167Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:55:42.520004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29425, node 1 2025-03-31T15:55:42.601926Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/001910/r3tmp/yandexVMFLk2.tmp 2025-03-31T15:55:42.601954Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/001910/r3tmp/yandexVMFLk2.tmp 2025-03-31T15:55:42.602148Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/001910/r3tmp/yandexVMFLk2.tmp 2025-03-31T15:55:42.602260Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:55:42.650123Z INFO: TTestServer started on Port 21834 GrpcPort 29425 TClient is connected to server localhost:21834 PQClient connected to localhost:29425 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:55:42.975661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T15:55:43.020873Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T15:55:43.030009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T15:55:43.211267Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:55:46.983359Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002927775784864:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:46.983637Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:55:55.813006Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpemxj510jvgb1mnmtmrcak", Request deadline has expired for 7.502660s seconds 2025-03-31T15:55:55.814065Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpen2jb5w236gx02nvczcgn", Request deadline has expired for 2.281975s seconds 2025-03-31T15:55:55.990522Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488002987905327882:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:55.991587Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:55.999238Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488002987905327896:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:56.042445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-31T15:55:56.544517Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488002987905327898:2363], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-31T15:55:56.618187Z node 1 :TX_PROXY ERROR: Actor# [1:7488002992200295260:2483] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:55:57.487488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:55:57.487514Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:57.929912Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488002992200295275:2370], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T15:55:58.040857Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2RjMDU1MGQtMjBjZjc4ODMtMWY3NmNhMzAtYWM3YjE3Nzk=, ActorId: [1:7488002987905327880:2357], ActorState: ExecuteState, TraceId: 01jqpen9h38frqx9n0cxe8egpb, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T15:55:58.079718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:55:58.130462Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T15:55:58.398474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:55:59.384138Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488003005085197362:2400], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T15:55:59.393928Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmFhOWFiNzUtNjYxNmRkYWMtZjdmYTYxNDQtNjAyMTNhYw==, ActorId: [1:7488003005085197360:2399], ActorState: ExecuteState, TraceId: 01jqpencvjb4qjqp66enftwj96, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T15:55:59.394471Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T15:55:59.428265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-31T15:56:03.053456Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } TxMeta { } YdbResults { columns { name: "C.name" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.balancer" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.local" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.enabled" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.weight" type { optional_type { item { type_id: UINT64 } } } } columns { name: "V.version" type { optional_type { item { type_id: INT64 } } } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 1560 } === CheckClustersList. Subcribe to ClusterTracker from [1:7488003022265066724:2709] === CheckClustersList. Ok 2025-03-31T15:56:09.284076Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-03-31T15:56:09.587054Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-03-31T15:56:09.590183Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7488003048034870712:2834], Recipient [1:7488002932070752576:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:56:09.590220Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:56:09.590238Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-31T15:56:09.590279Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7488003048034870708:2831], Recipient ... dle event# 271188544 (NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated), Tablet [5:7488003722607135571:2470], Partition 0, Sender [5:7488003722607135631:2475], Recipient [5:7488003722607135626:2473], Cookie: 0 2025-03-31T15:58:57.056483Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188544, Sender [5:7488003722607135631:2475], Recipient [5:7488003722607135626:2473]: NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated 2025-03-31T15:58:57.056510Z node 5 :PERSQUEUE TRACE: StateIdle, processing event NReadQuoterEvents::TEvQuotaCountersUpdated 2025-03-31T15:58:57.056624Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [5:7488003649592690369:2145]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-31T15:58:57.056648Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-31T15:58:57.056670Z node 5 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-03-31T15:58:57.057102Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 271187975, Sender [5:7488003722607135567:2469], Recipient [5:7488003722607135571:2470]: NKikimrPQ.TStatus GetStatForAllConsumers: true 2025-03-31T15:58:57.057132Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvPersQueue::TEvStatus 2025-03-31T15:58:57.057154Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892] Handle TEvPersQueue::TEvStatus 2025-03-31T15:58:57.057232Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 271188536, Sender [5:7488003722607135567:2469], Recipient [5:7488003722607135571:2470]: NKikimrPQ.TEvSubDomainStatus SubDomainOutOfSpace: false 2025-03-31T15:58:57.057252Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvSubDomainStatus 2025-03-31T15:58:57.057308Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 271187975, Sender [5:7488003722607135567:2469], Recipient [5:7488003761261842567:2814]: NKikimrPQ.TStatus GetStatForAllConsumers: true 2025-03-31T15:58:57.057322Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvPersQueue::TEvStatus 2025-03-31T15:58:57.057335Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897] Handle TEvPersQueue::TEvStatus 2025-03-31T15:58:57.057390Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 271188536, Sender [5:7488003722607135567:2469], Recipient [5:7488003761261842567:2814]: NKikimrPQ.TEvSubDomainStatus SubDomainOutOfSpace: false 2025-03-31T15:58:57.057405Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvSubDomainStatus 2025-03-31T15:58:57.057461Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 271187975, Sender [5:7488003722607135567:2469], Recipient [5:7488003761261842566:2813]: NKikimrPQ.TStatus GetStatForAllConsumers: true 2025-03-31T15:58:57.057475Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvPersQueue::TEvStatus 2025-03-31T15:58:57.057502Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896] Handle TEvPersQueue::TEvStatus 2025-03-31T15:58:57.057557Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 271188536, Sender [5:7488003722607135567:2469], Recipient [5:7488003761261842566:2813]: NKikimrPQ.TEvSubDomainStatus SubDomainOutOfSpace: false 2025-03-31T15:58:57.057569Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvSubDomainStatus 2025-03-31T15:58:57.057632Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188491 (NKikimr::TEvPQ::TEvPartitionStatus), Tablet [5:7488003722607135571:2470], Partition 0, Sender [5:7488003722607135571:2470], Recipient [5:7488003722607135626:2473], Cookie: 0 2025-03-31T15:58:57.057677Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188491, Sender [5:7488003722607135571:2470], Recipient [5:7488003722607135626:2473]: NKikimr::TEvPQ::TEvPartitionStatus 2025-03-31T15:58:57.057700Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvPartitionStatus 2025-03-31T15:58:57.057943Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 3 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } 2025-03-31T15:58:57.058090Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188536 (NKikimr::TEvPQ::TEvSubDomainStatus), Tablet [5:7488003722607135571:2470], Partition 0, Sender [5:7488003722607135571:2470], Recipient [5:7488003722607135626:2473], Cookie: 0 2025-03-31T15:58:57.058142Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188536, Sender [5:7488003722607135571:2470], Recipient [5:7488003722607135626:2473]: NKikimrPQ.TEvSubDomainStatus SubDomainOutOfSpace: false 2025-03-31T15:58:57.058161Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvSubDomainStatus 2025-03-31T15:58:57.058210Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188491 (NKikimr::TEvPQ::TEvPartitionStatus), Tablet [5:7488003761261842567:2814], Partition 1, Sender [5:7488003761261842567:2814], Recipient [5:7488003761261842672:2830], Cookie: 0 2025-03-31T15:58:57.058247Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188491, Sender [5:7488003761261842567:2814], Recipient [5:7488003761261842672:2830]: NKikimr::TEvPQ::TEvPartitionStatus 2025-03-31T15:58:57.058264Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvPartitionStatus 2025-03-31T15:58:57.058437Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 3 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } 2025-03-31T15:58:57.058541Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188536 (NKikimr::TEvPQ::TEvSubDomainStatus), Tablet [5:7488003761261842567:2814], Partition 1, Sender [5:7488003761261842567:2814], Recipient [5:7488003761261842672:2830], Cookie: 0 2025-03-31T15:58:57.058584Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188536, Sender [5:7488003761261842567:2814], Recipient [5:7488003761261842672:2830]: NKikimrPQ.TEvSubDomainStatus SubDomainOutOfSpace: false 2025-03-31T15:58:57.058599Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvSubDomainStatus 2025-03-31T15:58:57.058641Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188491 (NKikimr::TEvPQ::TEvPartitionStatus), Tablet [5:7488003761261842566:2813], Partition 2, Sender [5:7488003761261842566:2813], Recipient [5:7488003761261842654:2823], Cookie: 0 2025-03-31T15:58:57.058677Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188491, Sender [5:7488003761261842566:2813], Recipient [5:7488003761261842654:2823]: NKikimr::TEvPQ::TEvPartitionStatus 2025-03-31T15:58:57.058690Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvPartitionStatus 2025-03-31T15:58:57.058849Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 3 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } 2025-03-31T15:58:57.058944Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188536 (NKikimr::TEvPQ::TEvSubDomainStatus), Tablet [5:7488003761261842566:2813], Partition 2, Sender [5:7488003761261842566:2813], Recipient [5:7488003761261842654:2823], Cookie: 0 2025-03-31T15:58:57.058985Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188536, Sender [5:7488003761261842566:2813], Recipient [5:7488003761261842654:2823]: NKikimrPQ.TEvSubDomainStatus SubDomainOutOfSpace: false 2025-03-31T15:58:57.059000Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvSubDomainStatus 2025-03-31T15:58:57.059058Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 271188503, Sender [5:7488003722607135626:2473], Recipient [5:7488003722607135571:2470]: NKikimr::TEvPQ::TEvPartitionLabeledCounters 2025-03-31T15:58:57.059102Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionLabeledCounters 2025-03-31T15:58:57.059256Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 271188503, Sender [5:7488003761261842672:2830], Recipient [5:7488003761261842567:2814]: NKikimr::TEvPQ::TEvPartitionLabeledCounters 2025-03-31T15:58:57.059269Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionLabeledCounters 2025-03-31T15:58:57.059356Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 271188503, Sender [5:7488003761261842654:2823], Recipient [5:7488003761261842566:2813]: NKikimr::TEvPQ::TEvPartitionLabeledCounters 2025-03-31T15:58:57.059369Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionLabeledCounters 2025-03-31T15:58:57.059758Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Send TEvPeriodicTopicStats PathId: 13 Generation: 1 StatsReportRound: 10 DataSize: 0 UsedReserveSize: 0 2025-03-31T15:58:57.059910Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] ProcessPendingStats. PendingUpdates size 3 2025-03-31T15:58:57.060380Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271188001, Sender [5:7488003722607135567:2469], Recipient [5:7488003649592690369:2145]: NKikimrPQ.TEvPeriodicTopicStats PathId: 13 Generation: 1 Round: 10 DataSize: 0 UsedReserveSize: 0 SubDomainOutOfSpace: false 2025-03-31T15:58:57.060410Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPersQueue::TEvPeriodicTopicStats 2025-03-31T15:58:57.060432Z node 5 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046644480, LocalPathId: 13] DataSize 0 UsedReserveSize 0 2025-03-31T15:58:57.060463Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTopicStats on# 0.099995s, queue# 1 2025-03-31T15:58:57.066942Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [5:7488003722607135567:2469], Recipient [5:7488003649592690369:2145]: NKikimrSchemeOp.TDescribePath PathId: 13 SchemeshardId: 72057594046644480 2025-03-31T15:58:57.066986Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme >> Yq_1::ModifyQuery [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_AutoscaleAwareSDK [GOOD] >> KqpIndexLookupJoin::CheckCastUint64ToInt64-StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUint64ToInt64-StreamLookupJoin+NotNull ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::Basic_TaggedLiteral [GOOD] Test command err: 2025-03-31T15:58:10.870985Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003567898026843:2273];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:10.871072Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0331 15:58:11.805728001 39940 dns_resolver.cc:162] no server name supplied in dns URI E0331 15:58:11.805918683 39940 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-31T15:58:11.927789Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:12.714682Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:28748: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:28748 } ] 2025-03-31T15:58:12.803785Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:28748: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:28748 2025-03-31T15:58:12.938619Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:13.943580Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:14.603265Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:28748: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:28748 } ] 2025-03-31T15:58:14.944009Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:15.865848Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003567898026843:2273];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:15.865912Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:58:15.947441Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:16.187503Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T15:58:16.208040Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488003593667830813:2313], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:58:16.294276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488003593667830813:2313], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:58:16.488224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488003593667830813:2313], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d08/r3tmp/tmprm1I8W/pdisk_1.dat E0331 15:58:16.807572763 40152 dns_resolver.cc:162] no server name supplied in dns URI E0331 15:58:16.807755304 40152 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-31T15:58:16.932920Z node 1 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:28748: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:28748 2025-03-31T15:58:16.933447Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:28748: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:28748 } ] 2025-03-31T15:58:16.935392Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:16.942155Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488003593667830813:2313], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:58:17.013838Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 28748, node 1 2025-03-31T15:58:17.131762Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T15:58:17.135586Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 TClient is connected to server localhost:18205 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-03-31T15:58:17.475200Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:17.475698Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:58:17.475715Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:58:17.475732Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:58:17.475865Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:58:17.486027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:20.527887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:20.528016Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:20.617225Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:58:20.810789Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-03-31T15:58:20.810840Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-03-31T15:58:20.810849Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-03-31T15:58:20.812389Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-03-31T15:58:20.812407Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-03-31T15:58:20.812414Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-03-31T15:58:20.815664Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-03-31T15:58:20.815693Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-03-31T15:58:20.815700Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-03-31T15:58:20.823918Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T15:58:20.834711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T15:58:20.835510Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-03-31T15:58:20.835530Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-03-31T15:58:20.835536Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-03-31T15:58:20.836735Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-03-31T15:58:20.836762Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-03-31T15:58:20.836792Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-03-31T15:58:20.837851Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-03-31T15:58:20.837865Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-03-31T15:58:20.837874Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-03-31T15:58:20.852408Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-03-31T15:58:20.852433Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-03-31T15:58:20.852440Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-03-31T15:58:20.872356Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-03-31T15:58:20.872380Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-03-31T15:58:20.872388Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-03-31T15:58:20.891680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T15:58:20.892377Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "Root/yq" 2025-0 ... ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.327914Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.327968Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.328001Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.328101Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.328141Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.328223Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.328251Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.328277Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.328351Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.328384Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.328414Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.328443Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.328533Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.328564Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.328601Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.328646Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.328745Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.328783Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.328811Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.328904Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.328940Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.328966Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.328993Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.329078Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.329104Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.329127Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.329156Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.329242Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.329270Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.329296Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.329327Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.329380Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.329459Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.329535Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.329572Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.329617Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.329702Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.329730Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.329753Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.329834Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.329858Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.329881Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.329979Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.330017Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.330040Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.330147Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.330296Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.330336Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.330364Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.330403Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.330509Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.330542Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.330565Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.330628Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.330691Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.330722Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.330783Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.330823Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.330936Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.330976Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.331005Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.331115Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.331151Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.331208Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.331240Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.331333Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.331373Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.331396Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.331469Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.331496Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.331521Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.331613Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.331635Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.331677Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.331781Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.331820Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.331844Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.331877Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.331996Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.332031Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.332054Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.332199Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.332223Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.332245Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.332334Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.332363Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.332384Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.332466Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.332489Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.332510Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.332570Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.332655Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.332679Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.332701Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.332729Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.332802Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.332847Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.332894Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.332941Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.333021Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.333057Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.333088Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.333180Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.333207Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.333229Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.333306Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.333336Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.333357Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.333414Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.333540Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.333568Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.333589Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.333651Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.333692Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.333743Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.333787Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.333832Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.333867Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.333913Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.333953Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.334018Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.334122Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.334153Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.334179Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.334247Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.334294Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.334318Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.334392Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.334435Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:58:59.334458Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: >> TBlobStorageProxyTest::TestVPutVGet [GOOD] >> TBlobStorageProxyTest::TestVPutVGetLimit >> TPersQueueMirrorer::ValidStartStream [GOOD] >> DataShardSnapshots::MvccSnapshotReadLockedWrites+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotReadLockedWrites-UseSink >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-8 >> UpsertLoad::ShouldWriteDataBulkUpsert [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsert2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::SimpleLeftSemiJoin-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 7052, MsgBus: 18777 2025-03-31T15:58:49.391644Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003734251901001:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:49.392139Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019df/r3tmp/tmpAQVeIk/pdisk_1.dat 2025-03-31T15:58:50.307529Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:50.310131Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:50.310211Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:50.322179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7052, node 1 2025-03-31T15:58:50.587676Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:58:50.587698Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:58:50.587714Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:58:50.587827Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18777 TClient is connected to server localhost:18777 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:58:52.068784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:52.091871Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:58:52.103298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:52.446610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:52.653097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:52.741772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:54.361567Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003734251901001:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:54.361664Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:58:55.655013Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003760021706421:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:55.659321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:56.015791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:58:56.078699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:58:56.145669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:58:56.190323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:58:56.235919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:58:56.327689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:58:56.409946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003764316674238:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:56.410000Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:56.410363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003764316674244:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:56.414367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:58:56.431774Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488003764316674246:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:58:56.519721Z node 1 :TX_PROXY ERROR: Actor# [1:7488003764316674300:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:58:57.839360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T15:58:57.898195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T15:58:58.007317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T15:58:58.142912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T15:58:58.181534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T15:58:58.272232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] Test command err: 2025-03-31T15:58:23.698568Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003624577926908:2277];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:23.698618Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0017a1/r3tmp/tmp18E1aV/pdisk_1.dat 2025-03-31T15:58:24.742302Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:24.779313Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:24.790552Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:24.799411Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:24.808478Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64330, node 1 2025-03-31T15:58:25.016107Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:58:25.016137Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:58:25.016145Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:58:25.016259Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:58:25.371364Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T15:58:25.378038Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T15:58:25.378085Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T15:58:25.378884Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:16022, port: 16022 2025-03-31T15:58:25.379617Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T15:58:25.415676Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T15:58:25.460224Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-31T15:58:25.510059Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****gPKQ (60ABA204) () has now valid token of ldapuser@ldap 2025-03-31T15:58:32.152610Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488003662915864237:2169];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:32.157545Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0017a1/r3tmp/tmpfrZGXT/pdisk_1.dat 2025-03-31T15:58:32.584166Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:32.677903Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:32.677995Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:32.680344Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6744, node 2 2025-03-31T15:58:32.891716Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:58:32.891739Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:58:32.891745Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:58:32.891857Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:58:33.486433Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T15:58:33.490668Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T15:58:33.490696Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T15:58:33.491430Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:26433, port: 26433 2025-03-31T15:58:33.491504Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T15:58:33.531310Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T15:58:33.583464Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-31T15:58:33.587281Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-31T15:58:33.587348Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T15:58:33.635328Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T15:58:33.683383Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T15:58:33.684230Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****1QIQ (F45299F4) () has now valid token of ldapuser@ldap 2025-03-31T15:58:38.159331Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488003688056317192:2127];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:38.159460Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0017a1/r3tmp/tmpIgmGGc/pdisk_1.dat 2025-03-31T15:58:38.584340Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:38.661411Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:38.661498Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:38.662939Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21342, node 3 2025-03-31T15:58:38.939840Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:58:38.939873Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:58:38.939879Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:58:38.940035Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:58:39.119264Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T15:58:39.130358Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T15:58:39.130395Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T15:58:39.131197Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:10579, port: 10579 2025-03-31T15:58:39.131268Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T15:58:39.160772Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T15:58:39.240224Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****cH_g (77C13D8F) () has now valid token of ldapuser@ldap 2025-03-31T15:58:43.793025Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488003709163080407:2218];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0017a1/r3tmp/tmpUbqYFU/pdisk_1.dat 2025-03-31T15:58:44.065839Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:58:44.142457Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:44.198480Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:44.198648Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:44.204460Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22600, node 4 2025-03-31T15:58:44.417832Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:58:44.417865Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:58:44.417872Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:58:44.418008Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:58:44.759235Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T15:58:44.763462Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T15:58:44.763503Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T15:58:44.764312Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://qqq:12594 ldap://localhost:12594 ldap://localhost:11111, port: 12594 2025-03-31T15:58:44.764394Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T15:58:44.793150Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T15:58:44.835417Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-31T15:58:44.835852Z node 4 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-31T15:58:44.835902Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T15:58:44.879571Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T15:58:44.927764Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T15:58:44.928627Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****ZP0A (AF81D690) () has now valid token of ldapuser@ldap 2025-03-31T15:58:50.558777Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7488003741008537092:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:50.639893Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0017a1/r3tmp/tmpRo8mY7/pdisk_1.dat 2025-03-31T15:58:50.813829Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:50.876230Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:50.876314Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:50.883731Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17151, node 5 2025-03-31T15:58:51.055867Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:58:51.055893Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:58:51.055900Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:58:51.056041Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:58:51.164746Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T15:58:51.172606Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T15:58:51.172643Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T15:58:51.173370Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:8197, port: 8197 2025-03-31T15:58:51.173467Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T15:58:51.205417Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2025-03-31T15:58:51.250354Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-31T15:58:51.251216Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-31T15:58:51.251262Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-03-31T15:58:51.296469Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-03-31T15:58:51.344789Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-03-31T15:58:51.347512Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****2-2g (F542F4B0) () has now valid token of ldapuser@ldap 2025-03-31T15:58:56.128089Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488003765019051297:2209];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:56.347111Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0017a1/r3tmp/tmpapS8xb/pdisk_1.dat 2025-03-31T15:58:56.600887Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:56.628050Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:56.628143Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:56.640533Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19116, node 6 2025-03-31T15:58:56.819765Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:58:56.819790Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:58:56.819798Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:58:56.819943Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:58:57.139239Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T15:58:57.147612Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T15:58:57.147645Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T15:58:57.148420Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:31378, port: 31378 2025-03-31T15:58:57.148529Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T15:58:57.190510Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: &(uid=ldapuser)(), attributes: memberOf 2025-03-31T15:58:57.190634Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter &(uid=ldapuser)() on server ldap://localhost:31378. Bad search filter 2025-03-31T15:58:57.190900Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****aG2g (625F4E75) () has now permanent error message 'Could not login via LDAP (Could not perform search for filter &(uid=ldapuser)() on server ldap://localhost:31378. Bad search filter)' >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Block >> TBlobStorageProxyTest::TestProxyLongTailDiscover [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::PartitionSplit_AutosplitByLoad [GOOD] Test command err: 2025-03-31T15:55:35.972989Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002902167597236:2131];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:35.999326Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:55:36.526872Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00193b/r3tmp/tmpjPgiyo/pdisk_1.dat 2025-03-31T15:55:36.904278Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:36.908243Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:55:36.908576Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:55:36.911159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62230, node 1 2025-03-31T15:55:37.249765Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/00193b/r3tmp/yandexcittVG.tmp 2025-03-31T15:55:37.249790Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/00193b/r3tmp/yandexcittVG.tmp 2025-03-31T15:55:37.249925Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/00193b/r3tmp/yandexcittVG.tmp 2025-03-31T15:55:37.250025Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:55:37.312511Z INFO: TTestServer started on Port 26388 GrpcPort 62230 TClient is connected to server localhost:26388 PQClient connected to localhost:62230 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:55:37.746070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T15:55:37.939943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-31T15:55:40.976247Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002902167597236:2131];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:40.976298Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:55:47.717878Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpemsrkdcsz0adgyr8ncprw", Request deadline has expired for 3.231767s seconds 2025-03-31T15:55:47.795373Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488002953707205576:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:47.795505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:47.795967Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488002953707205592:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:47.801314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-31T15:55:47.809775Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488002953707205594:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-31T15:55:48.413405Z node 1 :TX_PROXY ERROR: Actor# [1:7488002953707205658:2477] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:55:48.684630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:55:48.889973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:55:49.049344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:55:49.058714Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488002958002172985:2369], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T15:55:49.060347Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2Q5NDViYjctOWYyNDM0NmEtNDAwYjMyNGYtOTM0Y2JiNWE=, ActorId: [1:7488002953707205547:2347], ActorState: ExecuteState, TraceId: 01jqpen1m5b5mtfykk1xgvbbr2, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T15:55:49.062271Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7488002962297140560:2663] 2025-03-31T15:55:51.896145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:55:51.896170Z node 1 :IMPORT WARN: Table profiles were not loaded === CheckClustersList. Ok 2025-03-31T15:55:54.807541Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-03-31T15:55:55.015645Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7488002906462564928:2210]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T15:55:55.015980Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T15:55:55.016026Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:7488002906462564928:2210], Recipient [1:7488002906462564928:2210]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T15:55:55.016040Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T15:55:55.094661Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-03-31T15:55:55.112093Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7488002988066944529:2773], Recipient [1:7488002906462564928:2210]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:55:55.112121Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:55:55.112130Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-31T15:55:55.112455Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7488002988066944525:2770], Recipient [1:7488002906462564928:2210]: {TEvModifySchemeTransaction txid# 281474976710671 TabletId# 72057594046644480} 2025-03-31T15:55:55.112466Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-31T15:55:55.318366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710671 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-03-31T15:55:55.318967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T15:55:55.319546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as ... PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:58:59.360298Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:58:59.437889Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003759996214264:3009], Partition 3, Sender [0:0:0], Recipient [5:7488003759996214382:3026], Cookie: 0 2025-03-31T15:58:59.437965Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003759996214382:3026]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:59.437992Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:59.438034Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:58:59.438101Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:58:59.438123Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:58:59.438151Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:58:59.438209Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003674096866132:2473], Partition 0, Sender [0:0:0], Recipient [5:7488003674096866194:2477], Cookie: 0 2025-03-31T15:58:59.438241Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003674096866194:2477]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:59.438254Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:59.438275Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:58:59.438302Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:58:59.438329Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:58:59.438346Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:58:59.438396Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003734226409899:2874], Partition 2, Sender [0:0:0], Recipient [5:7488003738521377280:2887], Cookie: 0 2025-03-31T15:58:59.438430Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003738521377280:2887]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:59.438443Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:59.438463Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:58:59.438493Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:58:59.438506Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:58:59.438523Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:58:59.438598Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003759996214257:3008], Partition 4, Sender [0:0:0], Recipient [5:7488003759996214380:3024], Cookie: 0 2025-03-31T15:58:59.438630Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003759996214380:3024]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:59.438642Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:59.438662Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:58:59.438689Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:58:59.438702Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:58:59.438717Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:58:59.438756Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003734226409900:2875], Partition 1, Sender [0:0:0], Recipient [5:7488003738521377284:2889], Cookie: 0 2025-03-31T15:58:59.438790Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003738521377284:2889]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:59.438802Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:59.438823Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:58:59.438853Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:58:59.438867Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:58:59.438881Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:58:59.541771Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003759996214257:3008], Partition 4, Sender [0:0:0], Recipient [5:7488003759996214380:3024], Cookie: 0 2025-03-31T15:58:59.541858Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003759996214380:3024]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:59.541884Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:59.541931Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:58:59.542014Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:58:59.542036Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:58:59.542063Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:58:59.542131Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003734226409900:2875], Partition 1, Sender [0:0:0], Recipient [5:7488003738521377284:2889], Cookie: 0 2025-03-31T15:58:59.542170Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003738521377284:2889]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:59.542184Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:59.542211Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:58:59.542245Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:58:59.542263Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:58:59.542280Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:58:59.542332Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003759996214264:3009], Partition 3, Sender [0:0:0], Recipient [5:7488003759996214382:3026], Cookie: 0 2025-03-31T15:58:59.542393Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003759996214382:3026]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:59.542411Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:59.542437Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:58:59.542468Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:58:59.542483Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:58:59.542501Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:58:59.542550Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003674096866132:2473], Partition 0, Sender [0:0:0], Recipient [5:7488003674096866194:2477], Cookie: 0 2025-03-31T15:58:59.542584Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003674096866194:2477]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:59.542598Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:59.542619Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:58:59.542649Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:58:59.542664Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:58:59.542680Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:58:59.542720Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003734226409899:2874], Partition 2, Sender [0:0:0], Recipient [5:7488003738521377280:2887], Cookie: 0 2025-03-31T15:58:59.542752Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003738521377280:2887]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:59.542764Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:58:59.542793Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:58:59.542843Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:58:59.542862Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:58:59.542882Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> PrivateApi::Nodes [GOOD] Test command err: 2025-03-31T15:58:12.089360Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003575206131845:2280];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:12.089411Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0331 15:58:12.683314124 40647 dns_resolver.cc:162] no server name supplied in dns URI E0331 15:58:12.683507599 40647 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-31T15:58:13.094744Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:13.840321Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:9196: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:9196 } ] 2025-03-31T15:58:13.969864Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:9196: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:9196 2025-03-31T15:58:14.095914Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:15.099728Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:15.784697Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:9196: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:9196 } ] 2025-03-31T15:58:16.103347Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:17.089829Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003575206131845:2280];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:17.089883Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:58:17.104008Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:17.171864Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T15:58:17.173762Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488003596680968547:2313], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:58:17.233856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488003596680968547:2313], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:58:17.421244Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488003596680968547:2313], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000cf0/r3tmp/tmpp8Hu2J/pdisk_1.dat E0331 15:58:17.699772968 41001 dns_resolver.cc:162] no server name supplied in dns URI E0331 15:58:17.699952388 41001 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-31T15:58:17.800954Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488003596680968547:2313], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } TServer::EnableGrpc on GrpcPort 9196, node 1 2025-03-31T15:58:18.006725Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T15:58:18.006890Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T15:58:18.207493Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:58:18.207517Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:58:18.207525Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:58:18.213905Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:18.215339Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:58:18.489150Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-03-31T15:58:18.496016Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-03-31T15:58:18.496042Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-03-31T15:58:18.496050Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-03-31T15:58:18.498332Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-03-31T15:58:18.498350Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" TClient is connected to server localhost:25130 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-03-31T15:58:18.500364Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-03-31T15:58:18.500392Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-03-31T15:58:18.500398Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-03-31T15:58:18.501267Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-03-31T15:58:18.501278Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-03-31T15:58:18.501281Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-03-31T15:58:18.501849Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-03-31T15:58:18.501858Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-03-31T15:58:18.501861Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-03-31T15:58:18.521841Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-03-31T15:58:18.521861Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-03-31T15:58:18.521868Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-03-31T15:58:18.533029Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-03-31T15:58:18.533048Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-03-31T15:58:18.533055Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-03-31T15:58:18.535312Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-03-31T15:58:18.535336Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-03-31T15:58:18.535342Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-03-31T15:58:18.541343Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-03-31T15:58:18.541370Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-03-31T15:58:18.541376Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-03-31T15:58:18.542877Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-03-31T15:58:18.542911Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-03-31T15:58:18.542923Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-03-31T15:58:18.543569Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-03-31T15:58:18.543586Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-03-31T15:58:18.543598Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-03-31T15:58:18.544277Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-03-31T15:58:18.544305Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-03-31T15:58:18.544311Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-03-31T15:58:18.545145Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-03-31T15:58:18.545171Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-03-31T15:58:18.545177Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-03-31T15:58:18.574955Z node 1 :TX_PROXY ERROR: Actor# [1:7488003600975936362:2506] txid# 281474976715660, issues: { message: "database doesn\'t have storage pools at all to create tablet channels to storage pool binding by profile id" severity: 1 } 2025-03-31T15:58:18.576970Z node 1 :TX_PROXY ERROR: Actor# [1:7488003600975936361:2505] txid# 281474976715659, issues: { message: "database doesn\'t have storage pools at all to create tablet channels to storage pool binding by profile id" severity: 1 } 2025-03-31T15:58:18.577140Z node 1 :TX_PROXY ERROR: Actor# [1:7488003600975936390:2520] txid# 281474976715663, issues: { message: "database doesn\'t have storage pools at all to create tablet channels to storage pool binding by profile id" severity: 1 } 2025-03-31T15:58:18.577219Z node 1 :TX_PROXY ERROR: Actor# [1:7488003600975936388:2518] txid# 281474976715661, issues: { message: "database doesn\'t have storage pools at all to create tablet channels to storage pool binding by profile id" severity: 1 } 2025-03-31T15:58:18.577290Z node 1 :TX_PROXY ERROR: Actor# [1:7488003600975936389:2519] txid# 281474976715662, issues: { message: "database doesn\'t have storage pools at all to create tablet channels to storage pool binding by profile id" severity: 1 } 2025-03-31T15:58:18.577497Z node 1 :TX_PROXY ERROR: Actor# [1:7488003600975936359:2503] txid# 281474976715658, issues: { message: "database doesn\'t have storage pools at all to create tablet channels to storage pool binding by profile id" severity: 1 } 2025-03-31T15:58:18.582937Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create table "Root/yq/pending_small" error: BAD_REQUEST {
: Error: database doesn't have storage pools at all to create tablet channels to storage pool binding by profile id } 2025-03-31T15:58:18.587971Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-03-31T15:58:18.58800 ... : [7:7488003773314775640:2441], TxId: 281474976715680, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=ZDMyYzRiM2EtMjk3NmMwMzctZDZkNTZmNmUtZDk4ZjI1YmM=. CustomerSuppliedId : . TraceId : 01jqpetvd118zmss97wermats8. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Start compute actor [7:7488003773314775640:2441], task: 1 2025-03-31T15:58:58.133024Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7488003773314775640:2441], TxId: 281474976715680, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=ZDMyYzRiM2EtMjk3NmMwMzctZDZkNTZmNmUtZDk4ZjI1YmM=. CustomerSuppliedId : . TraceId : 01jqpetvd118zmss97wermats8. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Set execution timeout 299.823682s 2025-03-31T15:58:58.133401Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7488003773314775640:2441], TxId: 281474976715680, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=ZDMyYzRiM2EtMjk3NmMwMzctZDZkNTZmNmUtZDk4ZjI1YmM=. CustomerSuppliedId : . TraceId : 01jqpetvd118zmss97wermats8. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Create sink for output 0 { Sink { Type: "KqpTableSink" Settings { type_url: "type.googleapis.com/NKikimrKqp.TKqpTableSinkSettings" value: "\032\035\n\rRoot/yq/nodes\020\200\202\224\204\200\200\200\200\001\030\016(\001\"\r\n\006tenant\020\001 \201 \"\r\n\007node_id\020\003 \002*\024\n\016active_workers\020\005 \004*\022\n\013data_center\020\013 \201 *\017\n\texpire_at\020\010 2*\017\n\010hostname\020\004 \201 *\022\n\013instance_id\020\002 \201 *\027\n\021interconnect_port\020\t \002*\026\n\020memory_allocated\020\007 \004*\022\n\014memory_limit\020\006 \004*\023\n\014node_address\020\n \201 *\r\n\007node_id\020\003 \002*\r\n\006tenant\020\001 \201 0\235\247\200\200\200\200@8\007H\001R\022\tN:69\302\273\352g\021\211\t\000\000\007\000\020\000X\000`\000h\004h\nh\007h\003h\002h\010h\006h\005h\th\001h\000x\000" } } } 2025-03-31T15:58:58.133591Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7488003773314775640:2441], TxId: 281474976715680, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=ZDMyYzRiM2EtMjk3NmMwMzctZDZkNTZmNmUtZDk4ZjI1YmM=. CustomerSuppliedId : . TraceId : 01jqpetvd118zmss97wermats8. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646926 2025-03-31T15:58:58.133615Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7488003773314775640:2441], TxId: 281474976715680, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=ZDMyYzRiM2EtMjk3NmMwMzctZDZkNTZmNmUtZDk4ZjI1YmM=. CustomerSuppliedId : . TraceId : 01jqpetvd118zmss97wermats8. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Received channels info: 2025-03-31T15:58:58.133686Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7488003773314775640:2441], TxId: 281474976715680, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=ZDMyYzRiM2EtMjk3NmMwMzctZDZkNTZmNmUtZDk4ZjI1YmM=. CustomerSuppliedId : . TraceId : 01jqpetvd118zmss97wermats8. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. About to drain async output 0. FreeSpace: 67108864, allowedOvercommit: 4194304, toSend: 71303168, finished: 0 2025-03-31T15:58:58.133789Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715680, task: 1. Add data: 122 / 122 2025-03-31T15:58:58.133843Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715680, task: 1. Send data=122, closed=1, bufferActorId=[7:7488003773314775630:2441] 2025-03-31T15:58:58.133865Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7488003773314775640:2441], TxId: 281474976715680, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=ZDMyYzRiM2EtMjk3NmMwMzctZDZkNTZmNmUtZDk4ZjI1YmM=. CustomerSuppliedId : . TraceId : 01jqpetvd118zmss97wermats8. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Drain async output 0. Free space decreased: -9223372036787666944, sent data from buffer: 122 2025-03-31T15:58:58.133883Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715680, task: 1. Tasks execution finished 2025-03-31T15:58:58.133895Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7488003773314775640:2441], TxId: 281474976715680, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=ZDMyYzRiM2EtMjk3NmMwMzctZDZkNTZmNmUtZDk4ZjI1YmM=. CustomerSuppliedId : . TraceId : 01jqpetvd118zmss97wermats8. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Waiting finish of sink[0] 2025-03-31T15:58:58.133915Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7488003773314775640:2441], TxId: 281474976715680, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=ZDMyYzRiM2EtMjk3NmMwMzctZDZkNTZmNmUtZDk4ZjI1YmM=. CustomerSuppliedId : . TraceId : 01jqpetvd118zmss97wermats8. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646926 2025-03-31T15:58:58.133936Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7488003773314775640:2441], TxId: 281474976715680, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=ZDMyYzRiM2EtMjk3NmMwMzctZDZkNTZmNmUtZDk4ZjI1YmM=. CustomerSuppliedId : . TraceId : 01jqpetvd118zmss97wermats8. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Received channels info: 2025-03-31T15:58:58.133951Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715680, task: 1. Tasks execution finished 2025-03-31T15:58:58.133959Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7488003773314775640:2441], TxId: 281474976715680, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=ZDMyYzRiM2EtMjk3NmMwMzctZDZkNTZmNmUtZDk4ZjI1YmM=. CustomerSuppliedId : . TraceId : 01jqpetvd118zmss97wermats8. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Waiting finish of sink[0] 2025-03-31T15:58:58.133987Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7488003773314775640:2441], TxId: 281474976715680, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=ZDMyYzRiM2EtMjk3NmMwMzctZDZkNTZmNmUtZDk4ZjI1YmM=. CustomerSuppliedId : . TraceId : 01jqpetvd118zmss97wermats8. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-03-31T15:58:58.134003Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715680, task: 1. Tasks execution finished 2025-03-31T15:58:58.134013Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7488003773314775640:2441], TxId: 281474976715680, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=ZDMyYzRiM2EtMjk3NmMwMzctZDZkNTZmNmUtZDk4ZjI1YmM=. CustomerSuppliedId : . TraceId : 01jqpetvd118zmss97wermats8. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Waiting finish of sink[0] 2025-03-31T15:58:58.134086Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7488003773314775630:2441], SessionActorId: [7:7488003764724839862:2441], Create new TableWriteActor for table `Root/yq/nodes` ([72057594046644480:14:1]). lockId=281474976715677 [7:7488003773314775643:2441] 2025-03-31T15:58:58.134129Z node 7 :KQP_COMPUTE DEBUG: Table: `Root/yq/nodes` ([72057594046644480:14:1]), SessionActorId: [7:7488003764724839862:2441]Open: token=0 2025-03-31T15:58:58.134149Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7488003773314775630:2441], SessionActorId: [7:7488003764724839862:2441], ProcessRequestQueue [72057594046644480:14:1] NOT READY queue=1 2025-03-31T15:58:58.134193Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7488003773314775643:2441], Table: `Root/yq/nodes` ([72057594046644480:14:1]), SessionActorId: [7:7488003764724839862:2441]Write: token=0 2025-03-31T15:58:58.134297Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7488003773314775643:2441], Table: `Root/yq/nodes` ([72057594046644480:14:1]), SessionActorId: [7:7488003764724839862:2441]Close: token=0 2025-03-31T15:58:58.134370Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7488003773314775642:2441], TxId: 281474976715680, task: 1. TKqpForwardWriteActor recieve EvBufferWriteResult from [7:7488003773314775630:2441] 2025-03-31T15:58:58.134384Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7488003773314775642:2441], TxId: 281474976715680, task: 1. Finished 2025-03-31T15:58:58.134412Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7488003773314775640:2441], TxId: 281474976715680, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=ZDMyYzRiM2EtMjk3NmMwMzctZDZkNTZmNmUtZDk4ZjI1YmM=. CustomerSuppliedId : . TraceId : 01jqpetvd118zmss97wermats8. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-03-31T15:58:58.134431Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715680, task: 1. Tasks execution finished 2025-03-31T15:58:58.134452Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7488003773314775640:2441], TxId: 281474976715680, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=ZDMyYzRiM2EtMjk3NmMwMzctZDZkNTZmNmUtZDk4ZjI1YmM=. CustomerSuppliedId : . TraceId : 01jqpetvd118zmss97wermats8. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2025-03-31T15:58:58.134520Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715680, task: 1. pass away 2025-03-31T15:58:58.134590Z node 7 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715680;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-03-31T15:58:58.134902Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7488003773314775630:2441], SessionActorId: [7:7488003764724839862:2441], Start immediate commit 2025-03-31T15:58:58.134914Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7488003773314775643:2441], Table: `Root/yq/nodes` ([72057594046644480:14:1]), SessionActorId: [7:7488003764724839862:2441]SetImmediateCommit 2025-03-31T15:58:58.134927Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7488003773314775630:2441], SessionActorId: [7:7488003764724839862:2441], Flush data 2025-03-31T15:58:58.137437Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7488003773314775643:2441], Table: `Root/yq/nodes` ([72057594046644480:14:1]), SessionActorId: [7:7488003764724839862:2441]Send EvWrite to ShardID=72075186224037899, isPrepare=0, isImmediateCommit=1, TxId=0, LockTxId=0, LockNodeId=0, Locks= LockId: 281474976715677 DataShard: 72075186224037899 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 14, Size=228, Cookie=1, OperationsCount=1, IsFinal=1, Attempts=0, Mode=3 2025-03-31T15:58:58.140606Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7488003773314775639:2397], Table: `Root/yq/nodes` ([72057594046644480:14:1]), SessionActorId: [7:7488003764724839453:2397]Recv EvWriteResult from ShardID=72075186224037899, Status=STATUS_COMPLETED, TxId=4, Locks= , Cookie=1 2025-03-31T15:58:58.140639Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7488003773314775639:2397], Table: `Root/yq/nodes` ([72057594046644480:14:1]), SessionActorId: [7:7488003764724839453:2397]Got completed result TxId=4, TabletId=72075186224037899, Cookie=1, Mode=3, Locks= 2025-03-31T15:58:58.140691Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7488003773314775628:2397], SessionActorId: [7:7488003764724839453:2397], Committed TxId=0 2025-03-31T15:58:58.143210Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7488003773314775643:2441], Table: `Root/yq/nodes` ([72057594046644480:14:1]), SessionActorId: [7:7488003764724839862:2441]Recv EvWriteResult from ShardID=72075186224037899, Status=STATUS_COMPLETED, TxId=5, Locks= , Cookie=1 2025-03-31T15:58:58.143237Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7488003773314775643:2441], Table: `Root/yq/nodes` ([72057594046644480:14:1]), SessionActorId: [7:7488003764724839862:2441]Got completed result TxId=5, TabletId=72075186224037899, Cookie=1, Mode=3, Locks= 2025-03-31T15:58:58.143286Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7488003773314775630:2441], SessionActorId: [7:7488003764724839862:2441], Committed TxId=0 2025-03-31T15:58:58.868132Z node 7 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:13879: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:13879 >> KqpIndexLookupJoin::JoinByComplexKeyWithNullComponents+StreamLookupJoin [GOOD] >> KqpIndexLookupJoin::JoinByComplexKeyWithNullComponents-StreamLookupJoin >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Block ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::ModifyQuery [GOOD] Test command err: 2025-03-31T15:58:11.811505Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003570780657786:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:11.811549Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0331 15:58:12.445541486 40424 dns_resolver.cc:162] no server name supplied in dns URI E0331 15:58:12.445714994 40424 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-31T15:58:12.824499Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:13.423274Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:8154: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8154 } ] 2025-03-31T15:58:13.743950Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:8154: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:8154 2025-03-31T15:58:13.839807Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:14.846474Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:15.175364Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:8154: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8154 } ] 2025-03-31T15:58:15.855224Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:16.815015Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003570780657786:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:16.815076Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:58:16.855686Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; E0331 15:58:17.447784749 40794 dns_resolver.cc:162] no server name supplied in dns URI E0331 15:58:17.447974751 40794 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-31T15:58:17.712891Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488003596550461970:2312], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:58:17.713073Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000cf5/r3tmp/tmpAnBA4l/pdisk_1.dat 2025-03-31T15:58:17.812572Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488003596550461970:2312], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:58:17.827231Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:17.856970Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:17.939621Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:8154: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:8154 2025-03-31T15:58:17.939707Z node 1 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:8154: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:8154 2025-03-31T15:58:17.949328Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:8154: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8154 } ] 2025-03-31T15:58:18.071313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488003596550461970:2312], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:58:18.148544Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8154, node 1 2025-03-31T15:58:18.283747Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:58:18.283769Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:58:18.283774Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:58:18.283919Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4533 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:58:18.569971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:19.984005Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:19.984138Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:19.995744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:58:22.220820Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-03-31T15:58:22.220853Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-03-31T15:58:22.220866Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-03-31T15:58:22.235422Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T15:58:22.235561Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-03-31T15:58:22.235587Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-03-31T15:58:22.235595Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-03-31T15:58:22.236229Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-03-31T15:58:22.236244Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-03-31T15:58:22.236250Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-03-31T15:58:22.236464Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-03-31T15:58:22.236476Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-03-31T15:58:22.236481Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-03-31T15:58:22.237101Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-03-31T15:58:22.237114Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-03-31T15:58:22.237119Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-03-31T15:58:22.240150Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-03-31T15:58:22.240194Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-03-31T15:58:22.240206Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-03-31T15:58:22.241178Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-03-31T15:58:22.241198Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-03-31T15:58:22.241205Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-03-31T15:58:22.242044Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-03-31T15:58:22.242056Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-03-31T15:58:22.242085Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-03-31T15:58:22.252176Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-03-31T15:58:22.252210Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-03-31T15:58:22.252216Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-03-31T15:58:22.261336Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-03-31T15:58:22.261362Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-03-31T15:58:22.261373Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-03-31T15:58: ... vent from: [4:7488003777937089854:2834], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-31T15:58:59.220708Z node 4 :YQ_CONTROL_PLANE_STORAGE WARN: DB Error, Status: CLIENT_CANCELLED, Issues: [ {
: Error: GRpc error: (1): Cancelled on the server side } {
: Error: Grpc error response on endpoint localhost:8133 } ], Query: --!syntax_v1 -- Query name: HardPingTask(read) PRAGMA TablePathPrefix("Root/yq"); DECLARE $tenant as String; DECLARE $scope as String; DECLARE $query_id as String; $last_job_id = SELECT `last_job_id` FROM `queries` WHERE `scope` = $scope AND `query_id` = $query_id; SELECT `query`, `internal` FROM `queries` WHERE `scope` = $scope AND `query_id` = $query_id; SELECT `job_id`, `job` FROM `jobs` WHERE `scope` = $scope AND `query_id` = $query_id AND `job_id` = $last_job_id; SELECT `owner`, `retry_counter`, `retry_counter_updated_at`, `retry_rate` FROM `pending_small` WHERE `tenant` = $tenant AND `scope` = $scope AND `query_id` = $query_id; 2025-03-31T15:58:59.220861Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715823, task: 2. pass away 2025-03-31T15:58:59.221026Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715823;task_id=2;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2025-03-31T15:58:59.221288Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003777937089860:3163], TxId: 281474976715823, task: 1. Ctx: { TraceId : 01jqpetwjx2a08gzv2rerj6tm7. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=Zjk4NmUyODctOGU0M2NkZjUtNTUyMTI3NzAtZGRmMTIyYmI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646927 2025-03-31T15:58:59.221324Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003777937089860:3163], TxId: 281474976715823, task: 1. Ctx: { TraceId : 01jqpetwjx2a08gzv2rerj6tm7. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=Zjk4NmUyODctOGU0M2NkZjUtNTUyMTI3NzAtZGRmMTIyYmI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646735 2025-03-31T15:58:59.221355Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7488003777937089860:3163], TxId: 281474976715823, task: 1. Ctx: { TraceId : 01jqpetwjx2a08gzv2rerj6tm7. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=Zjk4NmUyODctOGU0M2NkZjUtNTUyMTI3NzAtZGRmMTIyYmI=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Handle abort execution event from: [4:7488003777937089854:2834], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-31T15:58:59.221400Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715823, task: 1. pass away 2025-03-31T15:58:59.221444Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715823;task_id=1;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2025-03-31T15:58:59.221637Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003777937089862:3165], TxId: 281474976715823, task: 5. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=Zjk4NmUyODctOGU0M2NkZjUtNTUyMTI3NzAtZGRmMTIyYmI=. TraceId : 01jqpetwjx2a08gzv2rerj6tm7. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646735 2025-03-31T15:58:59.221663Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7488003777937089862:3165], TxId: 281474976715823, task: 5. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=Zjk4NmUyODctOGU0M2NkZjUtNTUyMTI3NzAtZGRmMTIyYmI=. TraceId : 01jqpetwjx2a08gzv2rerj6tm7. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Handle abort execution event from: [4:7488003777937089854:2834], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-31T15:58:59.221704Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715823, task: 5. pass away 2025-03-31T15:58:59.221742Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715823;task_id=5;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2025-03-31T15:58:59.221853Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003777937089861:3164], TxId: 281474976715823, task: 3. Ctx: { SessionId : ydb://session/3?node_id=4&id=Zjk4NmUyODctOGU0M2NkZjUtNTUyMTI3NzAtZGRmMTIyYmI=. TraceId : 01jqpetwjx2a08gzv2rerj6tm7. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646735 2025-03-31T15:58:59.221880Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7488003777937089861:3164], TxId: 281474976715823, task: 3. Ctx: { SessionId : ydb://session/3?node_id=4&id=Zjk4NmUyODctOGU0M2NkZjUtNTUyMTI3NzAtZGRmMTIyYmI=. TraceId : 01jqpetwjx2a08gzv2rerj6tm7. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Handle abort execution event from: [4:7488003777937089854:2834], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-31T15:58:59.221919Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715823, task: 3. pass away 2025-03-31T15:58:59.221948Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715823;task_id=3;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2025-03-31T15:58:59.222573Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003777937089859:3162], TxId: 281474976715823, task: 4. Ctx: { SessionId : ydb://session/3?node_id=4&id=Zjk4NmUyODctOGU0M2NkZjUtNTUyMTI3NzAtZGRmMTIyYmI=. TraceId : 01jqpetwjx2a08gzv2rerj6tm7. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-03-31T15:58:59.222606Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003777937089859:3162], TxId: 281474976715823, task: 4. Ctx: { SessionId : ydb://session/3?node_id=4&id=Zjk4NmUyODctOGU0M2NkZjUtNTUyMTI3NzAtZGRmMTIyYmI=. TraceId : 01jqpetwjx2a08gzv2rerj6tm7. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646735 2025-03-31T15:58:59.222641Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7488003777937089859:3162], TxId: 281474976715823, task: 4. Ctx: { SessionId : ydb://session/3?node_id=4&id=Zjk4NmUyODctOGU0M2NkZjUtNTUyMTI3NzAtZGRmMTIyYmI=. TraceId : 01jqpetwjx2a08gzv2rerj6tm7. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Handle abort execution event from: [4:7488003777937089854:2834], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-31T15:58:59.222714Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715823, task: 4. pass away 2025-03-31T15:58:59.222765Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715823;task_id=4;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2025-03-31T15:58:59.222951Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7488003777937089854:2834] TxId: 281474976715823. Ctx: { TraceId: 01jqpetwjx2a08gzv2rerj6tm7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=Zjk4NmUyODctOGU0M2NkZjUtNTUyMTI3NzAtZGRmMTIyYmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Unexpected event while waiting for shutdown: NYql::NDq::TEvDqCompute::TEvChannelData 2025-03-31T15:58:59.224868Z node 4 :YQ_CONTROL_PLANE_STORAGE WARN: PingTaskRequest - PingTaskResult: {owner_id: "1fa2e4d0-1671134-857a5b96-9ef887a815" query_id { value: "utqueeba0km1av467l92" } ast: "(\n(let $1 (Configure! world (DataSource \'\"config\") \'\"DqEngine\" \'\"force\"))\n(let $2 (DataSink \'result))\n(let $3 \'(\'(\'type) \'(\'autoref) \'(\'columns \'(\'\"column0\"))))\n(let $4 (ResFill! $1 $2 (Key) (AsList (AsStruct \'(\'\"column0\" (Int32 \'\"1\")))) $3 \'\"dq\"))\n(return (Commit! $4 $2))\n)\n" plan: "{\"Detailed\":{\"Operations\":[{\"Id\":2,\"Name\":\"ResFill!\"}],\"OperationRoot\":2,\"Providers\":[],\"OperationStats\":{\"ResFill!\":1}},\"Basic\":{\"nodes\":[{\"id\":2,\"level\":1,\"name\":\"ResFill!\",\"type\":\"op\"}],\"links\":[]}}" scope: "yandexcloud://Execute_folder_id" deadline { seconds: 1743523138 nanos: 783603000 } tenant: "TestTenant" } ERROR: [ {
: Error: GRpc error: (1): Cancelled on the server side } {
: Error: Grpc error response on endpoint localhost:8133 } ] 2025-03-31T15:58:59.225228Z node 4 :YQL_PRIVATE_PROXY ERROR: PrivatePingTask - QueryId: utqueeba0km1av467l92, Owner: 1fa2e4d0-1671134-857a5b96-9ef887a815, Failed with code: GENERIC_ERROR Details:
: Error: GRpc error: (1): Cancelled on the server side
: Error: Grpc error response on endpoint localhost:8133
: Error: ControlPlane PingTaskError 2025-03-31T15:58:59.226113Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Zjk4NmUyODctOGU0M2NkZjUtNTUyMTI3NzAtZGRmMTIyYmI=, ActorId: [4:7488003739282382094:2834], ActorState: ExecuteState, TraceId: 01jqpetwjx2a08gzv2rerj6tm7, Create QueryResponse for error on request, msg: 2025-03-31T15:58:59.230665Z node 4 :FQ_PINGER WARN: QueryId: utqueeba0km1av467l92, Owner: 1fa2e4d0-1671134-857a5b96-9ef887a815 Ping response error: [ {
: Error: GRpc error: (1): Cancelled on the server side } {
: Error: Grpc error response on endpoint [::]:8133 } ]. Retry after: 0.000000s 2025-03-31T15:58:59.230821Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715824. Ctx: { TraceId: 01jqpetwjx2a08gzv2rerj6tm7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=Zjk4NmUyODctOGU0M2NkZjUtNTUyMTI3NzAtZGRmMTIyYmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:58:59.242649Z node 4 :FQ_PINGER WARN: QueryId: utqueeba0km1av467l92, Owner: 1fa2e4d0-1671134-857a5b96-9ef887a815 Ping response error: [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:8133: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint [::]:8133 } ]. Retry after: 0.094421s 2025-03-31T15:58:59.248386Z node 4 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:8133: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:8133 2025-03-31T15:58:59.344629Z node 4 :FQ_PINGER WARN: QueryId: utqueeba0km1av467l92, Owner: 1fa2e4d0-1671134-857a5b96-9ef887a815 Ping response error: [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:8133: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint [::]:8133 } ]. Retry after: 0.166612s 2025-03-31T15:58:59.519514Z node 4 :FQ_PINGER WARN: QueryId: utqueeba0km1av467l92, Owner: 1fa2e4d0-1671134-857a5b96-9ef887a815 Ping response error: [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:8133: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint [::]:8133 } ]. Retry after: 0.335937s 2025-03-31T15:58:59.691393Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:8133: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:8133 2025-03-31T15:58:59.867548Z node 4 :FQ_PINGER WARN: QueryId: utqueeba0km1av467l92, Owner: 1fa2e4d0-1671134-857a5b96-9ef887a815 Ping response error: [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:8133: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint [::]:8133 } ]. Retry after: 0.616809s >> TTxDataShardUploadRows::TestUploadRowsDropColumnRace [GOOD] >> TTxDataShardUploadRows::TestUploadRowsLocks >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead-UseSink >> KqpIndexes::InnerJoinSecondaryIndexLookupAndRightTablePredicateNonIndexColumn [GOOD] >> KqpIndexes::IndexTopSortPushDown >> Yq_1::DeleteQuery [GOOD] >> TBlobStorageProxyTest::TestDoubleGroups >> TBlobStorageProxyTest::TestVPutVGetLimit [GOOD] >> KqpIndexLookupJoin::MultiJoins [GOOD] >> TBlobStorageProxyTest::TestNormal [GOOD] >> TBlobStorageProxyTest::TestNormalMirror >> Yq_1::Create_And_Modify_The_Same_Connection [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesRestart-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts+UseSink >> TBlobStorageProxyTest::TestInFlightPuts >> TBlobStorageProxyTest::TestVPutVGetPersistence >> KqpIndexLookupJoin::Left-StreamLookup [GOOD] |68.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVPutVGetLimit [GOOD] >> TBlobStorageProxyTest::TestSingleFailureMirror >> TBlobStorageProxyTest::TestDoubleEmptyGet >> AggregateStatistics::ShouldBeCorrectlyAggregateStatisticsFromAllNodes >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict+UseSink [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnSplit+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::DeleteQuery [GOOD] Test command err: 2025-03-31T15:58:13.111769Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003580870211883:2205];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:13.111831Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0331 15:58:13.985573782 41219 dns_resolver.cc:162] no server name supplied in dns URI E0331 15:58:13.985750503 41219 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-31T15:58:14.398429Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:15.164372Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:12241: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:12241 2025-03-31T15:58:15.263492Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:12241: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:12241 } ] 2025-03-31T15:58:15.399690Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:16.423635Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:16.964655Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:12241: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:12241 } ] 2025-03-31T15:58:17.430528Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:18.115932Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003580870211883:2205];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:18.115995Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:58:18.430455Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; E0331 15:58:18.984721849 41429 dns_resolver.cc:162] no server name supplied in dns URI E0331 15:58:18.984882097 41429 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-31T15:58:19.119550Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:19.431011Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:19.812577Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:12241: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:12241 } ] 2025-03-31T15:58:19.960009Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:12241: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:12241 2025-03-31T15:58:20.189340Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:20.435652Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:21.187643Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:21.439453Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ce3/r3tmp/tmprCXrkz/pdisk_1.dat 2025-03-31T15:58:22.189092Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488003619524918154:2323], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:58:22.189241Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T15:58:22.190508Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:22.295378Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488003619524918154:2323], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } TServer::EnableGrpc on GrpcPort 12241, node 1 2025-03-31T15:58:22.517210Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:58:22.517230Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:58:22.517237Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:58:22.517376Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:58:22.517847Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:22.703787Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:22.703885Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:22.708191Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23335 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:58:22.962199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... E0331 15:58:23.990752916 41429 dns_resolver.cc:162] no server name supplied in dns URI E0331 15:58:23.990885326 41429 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-31T15:58:24.565466Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-03-31T15:58:24.565507Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-03-31T15:58:24.565518Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-03-31T15:58:24.623269Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-03-31T15:58:24.623301Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-03-31T15:58:24.623307Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-03-31T15:58:24.624237Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-03-31T15:58:24.624249Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-03-31T15:58:24.624254Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-03-31T15:58:24.634576Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T15:58:24.634740Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-03-31T15:58:24.634753Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-03-31T15:58:24.634760Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-03-31T15:58:24.636154Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-03-31T15:58:24.636181Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-03-31T15:58:24.636189Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-03-31T15:58:24.644003Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-03-31T15:58:24.644038Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-03-31T15:58:24.644044Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-03-31T15:58:24.650284Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-03-31T15:58:24.650298Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-03-31T15:58:24.650305Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-03-31T15:58:24.651962Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-03-31T15:58:24.651986Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-03-31T15:58:24.651990Z ... } } DstEndpoint { ActorId { RawX1: 7488003790479456151 RawX2: 4503616807242358 } } InMemory: true } 2025-03-31T15:59:02.061324Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003790479456156:3158], TxId: 281474976710838, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YTc4MTIxYTAtMjc3Yzg2ODctMWJlNjg4NS1jYzc1MDcxMw==. TraceId : 01jqpetyzm22ce8s1003qw26fb. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-03-31T15:59:02.061325Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003790479456155:3157], TxId: 281474976710838, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=YTc4MTIxYTAtMjc3Yzg2ODctMWJlNjg4NS1jYzc1MDcxMw==. CustomerSuppliedId : . TraceId : 01jqpetyzm22ce8s1003qw26fb. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646926 2025-03-31T15:59:02.061384Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003790479456155:3157], TxId: 281474976710838, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=YTc4MTIxYTAtMjc3Yzg2ODctMWJlNjg4NS1jYzc1MDcxMw==. CustomerSuppliedId : . TraceId : 01jqpetyzm22ce8s1003qw26fb. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7488003790479456155 RawX2: 4503616807242837 } } DstEndpoint { ActorId { RawX1: 7488003790479456156 RawX2: 4503616807242838 } } InMemory: true DstStageId: 1 } 2025-03-31T15:59:02.061395Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710838, task: 1, CA Id [4:7488003790479456155:3157]. enter getasyncinputdata results size 0, freeSpace 8388608 2025-03-31T15:59:02.061408Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710838, task: 1, CA Id [4:7488003790479456155:3157]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 1 pending shards 0 finished = 0 has limit 0 limit reached 0 2025-03-31T15:59:02.061434Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003790479456155:3157], TxId: 281474976710838, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=YTc4MTIxYTAtMjc3Yzg2ODctMWJlNjg4NS1jYzc1MDcxMw==. CustomerSuppliedId : . TraceId : 01jqpetyzm22ce8s1003qw26fb. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-03-31T15:59:02.061464Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710838, task: 1, CA Id [4:7488003790479456155:3157]. enter getasyncinputdata results size 0, freeSpace 8388608 2025-03-31T15:59:02.061510Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710838, task: 1, CA Id [4:7488003790479456155:3157]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 1 pending shards 0 finished = 0 has limit 0 limit reached 0 2025-03-31T15:59:02.063641Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710838, task: 1, CA Id [4:7488003790479456155:3157]. Recv TEvReadResult from ShardID=72075186224037895, ReadId=0, Status=SUCCESS, Finished=1, RowCount=0, TxLocks= , BrokenTxLocks= 2025-03-31T15:59:02.063676Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710838, task: 1, CA Id [4:7488003790479456155:3157]. Taken 0 locks 2025-03-31T15:59:02.063709Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710838, task: 1, CA Id [4:7488003790479456155:3157]. new data for read #0 seqno = 1 finished = 1 2025-03-31T15:59:02.063741Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003790479456155:3157], TxId: 281474976710838, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=YTc4MTIxYTAtMjc3Yzg2ODctMWJlNjg4NS1jYzc1MDcxMw==. CustomerSuppliedId : . TraceId : 01jqpetyzm22ce8s1003qw26fb. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 276037645 2025-03-31T15:59:02.063773Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003790479456155:3157], TxId: 281474976710838, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=YTc4MTIxYTAtMjc3Yzg2ODctMWJlNjg4NS1jYzc1MDcxMw==. CustomerSuppliedId : . TraceId : 01jqpetyzm22ce8s1003qw26fb. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-03-31T15:59:02.063796Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710838, task: 1, CA Id [4:7488003790479456155:3157]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-03-31T15:59:02.063813Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710838, task: 1, CA Id [4:7488003790479456155:3157]. enter pack cells method shardId: 72075186224037895 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-03-31T15:59:02.063830Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710838, task: 1, CA Id [4:7488003790479456155:3157]. exit pack cells method shardId: 72075186224037895 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-03-31T15:59:02.063843Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710838, task: 1, CA Id [4:7488003790479456155:3157]. returned 0 rows; processed 0 rows 2025-03-31T15:59:02.063878Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710838, task: 1, CA Id [4:7488003790479456155:3157]. dropping batch for read #0 2025-03-31T15:59:02.063890Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710838, task: 1, CA Id [4:7488003790479456155:3157]. effective maxinflight 1024 sorted 0 2025-03-31T15:59:02.063903Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710838, task: 1, CA Id [4:7488003790479456155:3157]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-03-31T15:59:02.063918Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710838, task: 1, CA Id [4:7488003790479456155:3157]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-03-31T15:59:02.064024Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003790479456155:3157], TxId: 281474976710838, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=YTc4MTIxYTAtMjc3Yzg2ODctMWJlNjg4NS1jYzc1MDcxMw==. CustomerSuppliedId : . TraceId : 01jqpetyzm22ce8s1003qw26fb. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-03-31T15:59:02.064044Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003790479456156:3158], TxId: 281474976710838, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YTc4MTIxYTAtMjc3Yzg2ODctMWJlNjg4NS1jYzc1MDcxMw==. TraceId : 01jqpetyzm22ce8s1003qw26fb. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646923 2025-03-31T15:59:02.064065Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710838, task: 2. Finish input channelId: 1, from: [4:7488003790479456155:3157] 2025-03-31T15:59:02.064096Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003790479456156:3158], TxId: 281474976710838, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YTc4MTIxYTAtMjc3Yzg2ODctMWJlNjg4NS1jYzc1MDcxMw==. TraceId : 01jqpetyzm22ce8s1003qw26fb. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-03-31T15:59:02.064156Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003790479456156:3158], TxId: 281474976710838, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YTc4MTIxYTAtMjc3Yzg2ODctMWJlNjg4NS1jYzc1MDcxMw==. TraceId : 01jqpetyzm22ce8s1003qw26fb. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-03-31T15:59:02.064168Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003790479456155:3157], TxId: 281474976710838, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=YTc4MTIxYTAtMjc3Yzg2ODctMWJlNjg4NS1jYzc1MDcxMw==. CustomerSuppliedId : . TraceId : 01jqpetyzm22ce8s1003qw26fb. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646927 2025-03-31T15:59:02.064185Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003790479456155:3157], TxId: 281474976710838, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=YTc4MTIxYTAtMjc3Yzg2ODctMWJlNjg4NS1jYzc1MDcxMw==. CustomerSuppliedId : . TraceId : 01jqpetyzm22ce8s1003qw26fb. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-03-31T15:59:02.064211Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710838, task: 1. Tasks execution finished 2025-03-31T15:59:02.064227Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003790479456155:3157], TxId: 281474976710838, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=YTc4MTIxYTAtMjc3Yzg2ODctMWJlNjg4NS1jYzc1MDcxMw==. CustomerSuppliedId : . TraceId : 01jqpetyzm22ce8s1003qw26fb. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2025-03-31T15:59:02.064344Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710838, task: 1. pass away 2025-03-31T15:59:02.064449Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710838;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-03-31T15:59:02.064880Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003790479456156:3158], TxId: 281474976710838, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YTc4MTIxYTAtMjc3Yzg2ODctMWJlNjg4NS1jYzc1MDcxMw==. TraceId : 01jqpetyzm22ce8s1003qw26fb. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-03-31T15:59:02.064923Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710838, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-03-31T15:59:02.064932Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710838, task: 2. Tasks execution finished 2025-03-31T15:59:02.064944Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003790479456156:3158], TxId: 281474976710838, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YTc4MTIxYTAtMjc3Yzg2ODctMWJlNjg4NS1jYzc1MDcxMw==. TraceId : 01jqpetyzm22ce8s1003qw26fb. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-03-31T15:59:02.064999Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710838, task: 2. pass away 2025-03-31T15:59:02.065053Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710838;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-03-31T15:59:02.071014Z node 4 :YQ_CONTROL_PLANE_STORAGE WARN: DescribeQueryRequest - DescribeQueryResult: {query_id: "utqueeba0h1crlj56sdk" } ERROR: {
: Error: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_queries.cpp:662: Query does not exist or permission denied. Please check the id of the query or your access rights, code: 1000 } E0331 15:59:02.334073158 45346 dns_resolver.cc:162] no server name supplied in dns URI E0331 15:59:02.334382686 45346 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-31T15:59:02.484305Z node 4 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:65284: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:65284 2025-03-31T15:59:02.728200Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:65284: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:65284 >> AggregateStatistics::ShouldBeCorrectlyAggregateStatisticsFromAllNodes [GOOD] >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetry [GOOD] >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption >> TopicAutoscaling::ReadFromTimestamp_PQv1 [GOOD] >> KqpJoin::AllowJoinsForComplexPredicates-StreamLookup [GOOD] >> TTxDataShardUploadRows::UploadRowsToReplicatedTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::MultiJoins [GOOD] Test command err: Trying to start YDB, gRPC: 25014, MsgBus: 31026 2025-03-31T15:58:52.584335Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003747421029083:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:52.584474Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019cd/r3tmp/tmpjN6Gd1/pdisk_1.dat 2025-03-31T15:58:53.460129Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:53.550195Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:53.550282Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 25014, node 1 2025-03-31T15:58:53.564015Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:58:53.758669Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:58:53.758692Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:58:53.758700Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:58:53.758824Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31026 TClient is connected to server localhost:31026 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:58:55.126087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:55.199312Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:58:55.214814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:55.534915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:55.851691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:55.998341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:57.591253Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003747421029083:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:57.606519Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:58:58.288550Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003773190834621:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:58.288706Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:58.644239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:58:58.729223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:58:58.817832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:58:58.902292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:58:58.945026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:58:59.053100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:58:59.151972Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003777485802449:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:59.152054Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:59.152381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003777485802454:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:59.157993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:58:59.175216Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488003777485802456:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:58:59.233663Z node 1 :TX_PROXY ERROR: Actor# [1:7488003777485802510:3464] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:00.637142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T15:59:00.691551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T15:59:00.768190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T15:59:00.858543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T15:59:00.945480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T15:59:01.033056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::Left-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 3552, MsgBus: 20192 2025-03-31T15:58:48.150959Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003729676101802:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:48.150990Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019ee/r3tmp/tmpHLPwtK/pdisk_1.dat 2025-03-31T15:58:48.768893Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:48.773724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:48.773809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:48.777202Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3552, node 1 2025-03-31T15:58:49.019660Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:58:49.019679Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:58:49.019702Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:58:49.019810Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20192 TClient is connected to server localhost:20192 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:58:49.930201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:49.941926Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:58:49.955175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:50.169585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:50.389306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:50.502308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:52.680567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003746855972760:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:52.680671Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:53.126038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:58:53.154569Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003729676101802:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:53.154652Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:58:53.182779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:58:53.221097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:58:53.251659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:58:53.286827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:58:53.343488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:58:53.413202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003751150940571:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:53.413280Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:53.413670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003751150940576:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:53.418190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:58:53.442055Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-31T15:58:53.442792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488003751150940578:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:58:53.504790Z node 1 :TX_PROXY ERROR: Actor# [1:7488003751150940631:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:58:54.761323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T15:58:54.828124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T15:58:54.874885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T15:58:54.920962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T15:58:54.968104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T15:58:55.050722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 16912, MsgBus: 14695 2025-03-31T15:58:57.738134Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488003771427584297:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:57.739010Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019ee/r3tmp/tmpCqeJH6/pdisk_1.dat 2025-03-31T15:58:58.104357Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:58.109848Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:58.109928Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:58.111948Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16912, node 2 2025-03-31T15:58:58.179629Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:58:58.179652Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:58:58.179660Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:58:58.179778Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14695 TClient is connected to server localhost:14695 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:58:58.673248Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:58.680037Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:58:58.688941Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T15:58:58.779706Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:58.961802Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:59.045863Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T15:59:02.127258Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488003792902422544:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:02.127351Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:02.181316Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:59:02.236823Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:59:02.327626Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:59:02.417456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:59:02.501313Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:59:02.599325Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:59:02.702347Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488003792902423077:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:02.702422Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:02.702894Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488003792902423082:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:02.706917Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:59:02.721275Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488003792902423084:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:59:02.739424Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488003771427584297:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:02.739498Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:59:02.795243Z node 2 :TX_PROXY ERROR: Actor# [2:7488003792902423139:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:03.999404Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T15:59:04.061022Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T15:59:04.105110Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T15:59:04.153974Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T15:59:04.245282Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T15:59:04.321131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::Create_And_Modify_The_Same_Connection [GOOD] Test command err: 2025-03-31T15:58:13.105331Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003583097703844:2165];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:13.105453Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0331 15:58:14.044218368 41286 dns_resolver.cc:162] no server name supplied in dns URI E0331 15:58:14.044360978 41286 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-31T15:58:14.471425Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:26923: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:26923 2025-03-31T15:58:14.641598Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:15.420444Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:26923: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26923 } ] 2025-03-31T15:58:15.641599Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:16.643511Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:17.147639Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:26923: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26923 } ] 2025-03-31T15:58:17.647560Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:18.107180Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003583097703844:2165];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:18.107232Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:58:18.651613Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; E0331 15:58:19.047337520 41437 dns_resolver.cc:162] no server name supplied in dns URI E0331 15:58:19.047483232 41437 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-31T15:58:19.107864Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:19.587294Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:26923: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26923 } ] 2025-03-31T15:58:19.636269Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:26923: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:26923 2025-03-31T15:58:19.658852Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:20.193064Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:20.659889Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:21.199158Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:21.663586Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:22.203535Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:22.321663Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488003621752409901:2318], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:58:22.321825Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T15:58:22.419540Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488003621752409901:2318], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000cdd/r3tmp/tmpAM0u4C/pdisk_1.dat 2025-03-31T15:58:22.606434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488003621752409901:2318], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:58:22.675244Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 26923, node 1 2025-03-31T15:58:22.863406Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T15:58:22.863466Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 TClient is connected to server localhost:22571 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-03-31T15:58:23.593930Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-03-31T15:58:23.593972Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-03-31T15:58:23.593981Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-03-31T15:58:23.594447Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-03-31T15:58:23.594473Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-03-31T15:58:23.594478Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-03-31T15:58:23.600097Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-03-31T15:58:23.600124Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-03-31T15:58:23.600131Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-03-31T15:58:23.648558Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T15:58:23.651651Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-03-31T15:58:23.651681Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-03-31T15:58:23.651687Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-03-31T15:58:23.660921Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-03-31T15:58:23.660951Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-03-31T15:58:23.660958Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-03-31T15:58:23.661845Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-03-31T15:58:23.661858Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-03-31T15:58:23.661863Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-03-31T15:58:23.680038Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-03-31T15:58:23.680065Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-03-31T15:58:23.680072Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-03-31T15:58:23.683356Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-03-31T15:58:23.683378Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-03-31T15:58:23.683387Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-03-31T15:58:23.692205Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-03-31T15:58:23.692225Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-03-31T15:58:23.692232Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-03-31T15:58:23.693070Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-03-31T15:58:23.693082Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-03-31T15:58:23.693087Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-03-31T15:58:23.703422Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-03-31T15:58:23.703451Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-03-31T15:58:23.703459Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-03-31T15:58:23.715970Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "Root/yq" 2025-03-31T15:58:23.715987Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "Root/yq": 2025-03-31T15:58:23.738969Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-03-31T15:58:23.738991Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-03-31T15:58:23.738998Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-03-31T15:58:23.742318Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-03 ... Finish input channelId: 1, from: [4:7488003783560972510:2664] 2025-03-31T15:59:00.175791Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003783560972511:2665], TxId: 281474976710716, task: 2. Ctx: { TraceId : 01jqpetx5h7t27qqm1bjpca6a0. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZmViNDc1ODMtNzNlNjJjMi00ODMwNjEwNi1hNmQyOTc1NA==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-03-31T15:59:00.175970Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003783560972511:2665], TxId: 281474976710716, task: 2. Ctx: { TraceId : 01jqpetx5h7t27qqm1bjpca6a0. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZmViNDc1ODMtNzNlNjJjMi00ODMwNjEwNi1hNmQyOTc1NA==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. About to drain async output 0. FreeSpace: 67108864, allowedOvercommit: 4194304, toSend: 71303168, finished: 0 2025-03-31T15:59:00.176589Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003783560972515:2665], TxId: 281474976710716, task: 2. Add data: 234 / 234 2025-03-31T15:59:00.176682Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003783560972515:2665], TxId: 281474976710716, task: 2. Send data=234, closed=1, bufferActorId=[4:7488003783560972505:2394] 2025-03-31T15:59:00.176713Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003783560972511:2665], TxId: 281474976710716, task: 2. Ctx: { TraceId : 01jqpetx5h7t27qqm1bjpca6a0. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZmViNDc1ODMtNzNlNjJjMi00ODMwNjEwNi1hNmQyOTc1NA==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Drain async output 0. Free space decreased: -9223372036787666944, sent data from buffer: 234 2025-03-31T15:59:00.176753Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710716, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-03-31T15:59:00.176764Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710716, task: 2. Tasks execution finished 2025-03-31T15:59:00.176779Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003783560972511:2665], TxId: 281474976710716, task: 2. Ctx: { TraceId : 01jqpetx5h7t27qqm1bjpca6a0. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZmViNDc1ODMtNzNlNjJjMi00ODMwNjEwNi1hNmQyOTc1NA==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Waiting finish of sink[0] 2025-03-31T15:59:00.176810Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003783560972510:2664], TxId: 281474976710716, task: 1. Ctx: { TraceId : 01jqpetx5h7t27qqm1bjpca6a0. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZmViNDc1ODMtNzNlNjJjMi00ODMwNjEwNi1hNmQyOTc1NA==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646927 2025-03-31T15:59:00.176865Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003783560972510:2664], TxId: 281474976710716, task: 1. Ctx: { TraceId : 01jqpetx5h7t27qqm1bjpca6a0. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZmViNDc1ODMtNzNlNjJjMi00ODMwNjEwNi1hNmQyOTc1NA==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-03-31T15:59:00.176899Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710716, task: 1. Tasks execution finished 2025-03-31T15:59:00.176914Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003783560972510:2664], TxId: 281474976710716, task: 1. Ctx: { TraceId : 01jqpetx5h7t27qqm1bjpca6a0. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZmViNDc1ODMtNzNlNjJjMi00ODMwNjEwNi1hNmQyOTc1NA==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-03-31T15:59:00.177065Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710716, task: 1. pass away 2025-03-31T15:59:00.177187Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710716;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-03-31T15:59:00.177498Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003783560972505:2394], SessionActorId: [4:7488003753496198921:2394], Create new TableWriteActor for table `Root/yq/connections` ([72057594046644480:9:1]). lockId=281474976710708 [4:7488003783560972520:2394] 2025-03-31T15:59:00.177546Z node 4 :KQP_COMPUTE DEBUG: Table: `Root/yq/connections` ([72057594046644480:9:1]), SessionActorId: [4:7488003753496198921:2394]Open: token=0 2025-03-31T15:59:00.177576Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003783560972505:2394], SessionActorId: [4:7488003753496198921:2394], ProcessRequestQueue [72057594046644480:9:1] NOT READY queue=1 2025-03-31T15:59:00.177818Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003783560972520:2394], Table: `Root/yq/connections` ([72057594046644480:9:1]), SessionActorId: [4:7488003753496198921:2394]Write: token=0 2025-03-31T15:59:00.177953Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003783560972520:2394], Table: `Root/yq/connections` ([72057594046644480:9:1]), SessionActorId: [4:7488003753496198921:2394]Close: token=0 2025-03-31T15:59:00.178047Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003783560972515:2665], TxId: 281474976710716, task: 2. TKqpForwardWriteActor recieve EvBufferWriteResult from [4:7488003783560972505:2394] 2025-03-31T15:59:00.178061Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003783560972515:2665], TxId: 281474976710716, task: 2. Finished 2025-03-31T15:59:00.178086Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003783560972511:2665], TxId: 281474976710716, task: 2. Ctx: { TraceId : 01jqpetx5h7t27qqm1bjpca6a0. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZmViNDc1ODMtNzNlNjJjMi00ODMwNjEwNi1hNmQyOTc1NA==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-03-31T15:59:00.178124Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710716, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-03-31T15:59:00.178132Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710716, task: 2. Tasks execution finished 2025-03-31T15:59:00.178146Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003783560972511:2665], TxId: 281474976710716, task: 2. Ctx: { TraceId : 01jqpetx5h7t27qqm1bjpca6a0. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZmViNDc1ODMtNzNlNjJjMi00ODMwNjEwNi1hNmQyOTc1NA==. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-03-31T15:59:00.178211Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710716, task: 2. pass away 2025-03-31T15:59:00.178278Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710716;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-03-31T15:59:00.178619Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003783560972505:2394], SessionActorId: [4:7488003753496198921:2394], Start prepare for distributed commit 2025-03-31T15:59:00.178632Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003783560972520:2394], Table: `Root/yq/connections` ([72057594046644480:9:1]), SessionActorId: [4:7488003753496198921:2394]SetPrepare; txId=281474976710716 2025-03-31T15:59:00.178648Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003783560972505:2394], SessionActorId: [4:7488003753496198921:2394], Flush data 2025-03-31T15:59:00.178823Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003783560972520:2394], Table: `Root/yq/connections` ([72057594046644480:9:1]), SessionActorId: [4:7488003753496198921:2394]Send EvWrite to ShardID=72075186224037894, isPrepare=1, isImmediateCommit=0, TxId=281474976710716, LockTxId=0, LockNodeId=0, Locks= LockId: 281474976710708 DataShard: 72075186224037894 Generation: 1 Counter: 1 SchemeShard: 72057594046644480 PathId: 9, Size=324, Cookie=1, OperationsCount=1, IsFinal=1, Attempts=0, Mode=1 2025-03-31T15:59:00.178923Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003783560972505:2394], SessionActorId: [4:7488003753496198921:2394], Send EvWrite (external) to ShardID=72075186224037888, isPrepare=1, isImmediateCommit=0, TxId=281474976710716, LockTxId=0, LockNodeId=0, Locks= LockId: 281474976710708 DataShard: 72075186224037888 Generation: 1 Counter: 1 SchemeShard: 72057594046644480 PathId: 7, Size=0, Cookie=0, OperationsCount=0, IsFinal=1, Attempts=0 2025-03-31T15:59:00.180716Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003783560972520:2394], Table: `Root/yq/connections` ([72057594046644480:9:1]), SessionActorId: [4:7488003753496198921:2394]Recv EvWriteResult from ShardID=72075186224037894, Status=STATUS_PREPARED, TxId=281474976710716, Locks= , Cookie=1 2025-03-31T15:59:00.180796Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003783560972505:2394], SessionActorId: [4:7488003753496198921:2394], Flush data 2025-03-31T15:59:00.180832Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003783560972505:2394], SessionActorId: [4:7488003753496198921:2394], Recv EvWriteResult (external) from ShardID=72075186224037888, Status=STATUS_PREPARED, TxId=281474976710716, Locks= , Cookie=0 2025-03-31T15:59:00.180849Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003783560972505:2394], SessionActorId: [4:7488003753496198921:2394], Got prepared result TxId=281474976710716, TabletId=72075186224037888, Cookie=0 2025-03-31T15:59:00.180882Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003783560972505:2394], SessionActorId: [4:7488003753496198921:2394], Start distributed commit with TxId=281474976710716 2025-03-31T15:59:00.180896Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003783560972520:2394], Table: `Root/yq/connections` ([72057594046644480:9:1]), SessionActorId: [4:7488003753496198921:2394]SetDistributedCommit; txId=281474976710716 2025-03-31T15:59:00.180931Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003783560972505:2394], SessionActorId: [4:7488003753496198921:2394], Execute planned transaction, coordinator: 72057594046316545, volitale: 1, shards: 2 2025-03-31T15:59:00.182702Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003783560972505:2394], SessionActorId: [4:7488003753496198921:2394], Got transaction status, status: 16 2025-03-31T15:59:00.182726Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003783560972505:2394], SessionActorId: [4:7488003753496198921:2394], Got transaction status, status: 17 2025-03-31T15:59:00.191889Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003783560972505:2394], SessionActorId: [4:7488003753496198921:2394], Recv EvWriteResult (external) from ShardID=72075186224037888, Status=STATUS_COMPLETED, TxId=281474976710716, Locks= , Cookie=0 2025-03-31T15:59:00.191929Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003783560972505:2394], SessionActorId: [4:7488003753496198921:2394], Got completed result TxId=281474976710716, TabletId=72075186224037888, Cookie=0, Locks= 2025-03-31T15:59:00.199580Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003783560972520:2394], Table: `Root/yq/connections` ([72057594046644480:9:1]), SessionActorId: [4:7488003753496198921:2394]Recv EvWriteResult from ShardID=72075186224037894, Status=STATUS_COMPLETED, TxId=281474976710716, Locks= , Cookie=0 2025-03-31T15:59:00.199627Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003783560972520:2394], Table: `Root/yq/connections` ([72057594046644480:9:1]), SessionActorId: [4:7488003753496198921:2394]Got completed result TxId=281474976710716, TabletId=72075186224037894, Cookie=0, Mode=2, Locks= 2025-03-31T15:59:00.199653Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003783560972505:2394], SessionActorId: [4:7488003753496198921:2394], Committed TxId=281474976710716 2025-03-31T15:59:00.404328Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:10724: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:10724 2025-03-31T15:59:01.383496Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: Client is stopped ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCorrectlyAggregateStatisticsFromAllNodes [GOOD] Test command err: 2025-03-31T15:59:06.963860Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-03-31T15:59:06.964911Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [1:38:2058], tablet id = 1, status = OK 2025-03-31T15:59:06.965272Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:38:2058], path = { OwnerId: 3 LocalId: 3 } 2025-03-31T15:59:06.965370Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:39:2059], server id = [1:39:2059], tablet id = 2, status = OK 2025-03-31T15:59:06.965427Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:39:2059], path = { OwnerId: 3 LocalId: 3 } 2025-03-31T15:59:06.965813Z node 3 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-03-31T15:59:06.965965Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-03-31T15:59:06.966064Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:40:2060], server id = [1:40:2060], tablet id = 3, status = OK 2025-03-31T15:59:06.966107Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:40:2060], path = { OwnerId: 3 LocalId: 3 } 2025-03-31T15:59:06.966245Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2025-03-31T15:59:06.966462Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 2 2025-03-31T15:59:06.966520Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:44:2057], server id = [3:44:2057], tablet id = 5, status = OK 2025-03-31T15:59:06.969640Z node 3 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [3:44:2057], path = { OwnerId: 3 LocalId: 3 } 2025-03-31T15:59:06.969795Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:46:2057], server id = [2:46:2057], tablet id = 4, status = OK 2025-03-31T15:59:06.969871Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:46:2057], path = { OwnerId: 3 LocalId: 3 } 2025-03-31T15:59:06.970052Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 4 2025-03-31T15:59:06.970173Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [0:0:0], tablet id = 1, status = ERROR 2025-03-31T15:59:06.970204Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-03-31T15:59:06.970248Z node 3 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 5 2025-03-31T15:59:06.970303Z node 3 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-03-31T15:59:06.970410Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2025-03-31T15:59:06.970559Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:46:2057], server id = [0:0:0], tablet id = 4, status = ERROR 2025-03-31T15:59:06.970579Z node 2 :STATISTICS DEBUG: Skip EvClientConnected 2025-03-31T15:59:06.970606Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:39:2059], server id = [0:0:0], tablet id = 2, status = ERROR 2025-03-31T15:59:06.970622Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-03-31T15:59:06.970661Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2025-03-31T15:59:06.970791Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:44:2057], server id = [0:0:0], tablet id = 5, status = ERROR 2025-03-31T15:59:06.970813Z node 3 :STATISTICS DEBUG: Skip EvClientConnected 2025-03-31T15:59:06.970875Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:40:2060], server id = [0:0:0], tablet id = 3, status = ERROR 2025-03-31T15:59:06.970890Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-03-31T15:59:06.970933Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:49:2057], server id = [4:49:2057], tablet id = 6, status = OK 2025-03-31T15:59:06.970981Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:49:2057], path = { OwnerId: 3 LocalId: 3 } 2025-03-31T15:59:06.971115Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2025-03-31T15:59:06.971260Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 6 2025-03-31T15:59:06.971305Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-31T15:59:06.971428Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:49:2057], server id = [0:0:0], tablet id = 6, status = ERROR 2025-03-31T15:59:06.971447Z node 4 :STATISTICS DEBUG: Skip EvClientConnected 2025-03-31T15:59:06.971559Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2025-03-31T15:59:06.971609Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-03-31T15:59:06.971820Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 2 2025-03-31T15:59:06.971859Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 >> DataShardSnapshots::LockedWriteDistributedCommitFreeze+UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitFreeze-UseSink |68.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Block [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Stripe >> TTxDataShardUploadRows::TestUploadShadowRowsShadowData [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::UploadRowsToReplicatedTable [GOOD] Test command err: 2025-03-31T15:58:57.381287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:58:57.381595Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:57.381808Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001946/r3tmp/tmpwmQEvx/pdisk_1.dat 2025-03-31T15:58:57.964812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:58:58.123208Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:58.181907Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:58.182047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:58.196352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:58:58.292067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:58:58.334727Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-31T15:58:58.336054Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-31T15:58:58.336588Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T15:58:58.336837Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:58:58.393660Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T15:58:58.394440Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:58:58.394574Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T15:58:58.396496Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T15:58:58.396577Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T15:58:58.396648Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T15:58:58.397008Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T15:58:58.397152Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T15:58:58.397227Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T15:58:58.411811Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T15:58:58.453604Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T15:58:58.453833Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T15:58:58.453961Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T15:58:58.454014Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T15:58:58.454075Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T15:58:58.454136Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T15:58:58.454369Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T15:58:58.454418Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T15:58:58.454688Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T15:58:58.454775Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T15:58:58.454880Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T15:58:58.454924Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T15:58:58.454968Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T15:58:58.455011Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T15:58:58.455046Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T15:58:58.455077Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T15:58:58.455193Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T15:58:58.455312Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:58:58.455369Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:58:58.455439Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T15:58:58.455614Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-31T15:58:58.455659Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T15:58:58.455765Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T15:58:58.455981Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T15:58:58.456038Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T15:58:58.456126Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T15:58:58.456198Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T15:58:58.456253Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T15:58:58.456298Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T15:58:58.456335Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T15:58:58.456606Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T15:58:58.456648Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T15:58:58.456686Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T15:58:58.456724Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T15:58:58.456795Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T15:58:58.456845Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T15:58:58.456893Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T15:58:58.456925Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T15:58:58.456949Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T15:58:58.458787Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T15:58:58.458857Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T15:58:58.471842Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T15:58:58.471927Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T15:58:58.471990Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T15:58:58.472036Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-31T15:58:58.472095Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T15:58:58.645447Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:58:58.645534Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:58:58.645576Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T15:58:58.646037Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-31T15:58:58.646079Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T15:58:58.646230Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T15:58:58.646292Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-31T15:58:58.646353Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-31T15:58:58.646391Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-31T15:58:58.658797Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T15:58:58.658908Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T15:58:58.667824Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T15:58:58.667890Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T15:58:58.667947Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T15:58:5 ... t 72075186224037890 to execution unit CompleteOperation 2025-03-31T15:59:01.445914Z node 1 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715667] at 72075186224037890 on unit CompleteOperation 2025-03-31T15:59:01.446116Z node 1 :TX_DATASHARD TRACE: Execution status for [3000:281474976715667] at 72075186224037890 is DelayComplete 2025-03-31T15:59:01.446145Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715667] at 72075186224037890 executing on unit CompleteOperation 2025-03-31T15:59:01.446184Z node 1 :TX_DATASHARD TRACE: Add [3000:281474976715667] at 72075186224037890 to execution unit CompletedOperations 2025-03-31T15:59:01.446230Z node 1 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715667] at 72075186224037890 on unit CompletedOperations 2025-03-31T15:59:01.446265Z node 1 :TX_DATASHARD TRACE: Execution status for [3000:281474976715667] at 72075186224037890 is Executed 2025-03-31T15:59:01.446290Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715667] at 72075186224037890 executing on unit CompletedOperations 2025-03-31T15:59:01.446317Z node 1 :TX_DATASHARD TRACE: Execution plan for [3000:281474976715667] at 72075186224037890 has finished 2025-03-31T15:59:01.446349Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T15:59:01.446373Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2025-03-31T15:59:01.446401Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-03-31T15:59:01.446428Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-03-31T15:59:01.459869Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-31T15:59:01.459943Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-31T15:59:01.459982Z node 1 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715667] at 72075186224037890 on unit CompleteOperation 2025-03-31T15:59:01.460044Z node 1 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715667] from 72075186224037890 at tablet 72075186224037890 send result to client [1:1128:2907], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T15:59:01.460096Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-31T15:59:05.621174Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:101:2147], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:59:05.621569Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T15:59:05.621654Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001946/r3tmp/tmpcsuuoh/pdisk_1.dat 2025-03-31T15:59:05.943619Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:59:05.977268Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:06.015938Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:06.016117Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:06.027931Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:59:06.124775Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:59:06.176002Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:667:2571] 2025-03-31T15:59:06.176277Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:06.260860Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:06.261009Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T15:59:06.262758Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T15:59:06.262846Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T15:59:06.262912Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T15:59:06.263291Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T15:59:06.263419Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T15:59:06.263500Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:683:2571] in generation 1 2025-03-31T15:59:06.275871Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T15:59:06.275980Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T15:59:06.276101Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T15:59:06.276193Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:685:2581] 2025-03-31T15:59:06.276236Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T15:59:06.276297Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T15:59:06.276337Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T15:59:06.276725Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T15:59:06.276833Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T15:59:06.277011Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T15:59:06.277056Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T15:59:06.277095Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T15:59:06.277139Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T15:59:06.277638Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:663:2568], serverId# [2:674:2575], sessionId# [0:0:0] 2025-03-31T15:59:06.277819Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T15:59:06.278063Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T15:59:06.278149Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T15:59:06.281437Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T15:59:06.293493Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T15:59:06.293610Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T15:59:06.457843Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:703:2593], serverId# [2:705:2595], sessionId# [0:0:0] 2025-03-31T15:59:06.459141Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T15:59:06.459217Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T15:59:06.459629Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T15:59:06.459684Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T15:59:06.459732Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T15:59:06.459982Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T15:59:06.460134Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T15:59:06.460721Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T15:59:06.460799Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T15:59:06.462054Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T15:59:06.462560Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T15:59:06.464231Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-31T15:59:06.464293Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T15:59:06.465184Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-31T15:59:06.465261Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T15:59:06.465772Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T15:59:06.465823Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T15:59:06.465874Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T15:59:06.465948Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:410:2405], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T15:59:06.466009Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T15:59:06.466103Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T15:59:06.468208Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T15:59:06.470265Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T15:59:06.470463Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-31T15:59:06.470548Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T15:59:06.479405Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:739:2621], serverId# [2:740:2622], sessionId# [0:0:0] 2025-03-31T15:59:06.479570Z node 2 :TX_DATASHARD NOTICE: Rejecting bulk upsert request on datashard: tablet# 72075186224037888, error# Can't execute bulk upsert at replicated table >> GracefulShutdown::TTxGracefulShutdown >> Yq_1::Basic_EmptyList [GOOD] >> Yq_1::Basic_EmptyDict ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::AllowJoinsForComplexPredicates-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 26754, MsgBus: 21196 2025-03-31T15:58:49.635740Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003734705187915:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:49.636224Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019e5/r3tmp/tmpwZ69Rl/pdisk_1.dat 2025-03-31T15:58:50.474390Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:50.482976Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:50.491331Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:50.494951Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26754, node 1 2025-03-31T15:58:50.839238Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:58:50.839258Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:58:50.839280Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:58:50.839392Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21196 TClient is connected to server localhost:21196 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:58:52.173530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:52.241805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:52.528704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:52.846433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:52.979675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:54.627166Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003734705187915:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:54.627259Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:58:55.977950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003760474993342:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:55.978064Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:56.353693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:58:56.422928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:58:56.502417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:58:56.601747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:58:56.665463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:58:56.729462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:58:56.817026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003764769961158:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:56.817107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:56.817479Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003764769961163:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:56.821880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:58:56.858776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488003764769961165:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:58:56.960836Z node 1 :TX_PROXY ERROR: Actor# [1:7488003764769961226:3463] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:05.447725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:59:05.447757Z node 1 :IMPORT WARN: Table profiles were not loaded >> UpsertLoad::ShouldWriteDataBulkUpsert2 [GOOD] >> TBlobStorageProxyTest::TestInFlightPuts [GOOD] >> TBlobStorageProxyTest::TestHugeCollectGarbage >> TableWriter::Backup [GOOD] |68.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-9 >> TBlobStorageProxyTest::TestVPutVGetPersistence [GOOD] |68.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> TBlobStorageProxyTest::TestNormalMirror [GOOD] >> TBlobStorageProxyTest::TestSingleFailureMirror [GOOD] >> TBlobStorageProxyTest::TestVBlockVPutVGet |68.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> TableWriter::Backup [GOOD] |68.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Block [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Stripe |68.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVPutVGetPersistence [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::TestUploadShadowRowsShadowData [GOOD] Test command err: 2025-03-31T15:58:58.328790Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:58:58.329118Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:58.329350Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001907/r3tmp/tmpkXRwsm/pdisk_1.dat 2025-03-31T15:58:58.746215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:58:58.813696Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:58.857004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:58.857134Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:58.872446Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:58:58.981718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:58:59.102058Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T15:58:59.102310Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:58:59.161309Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:58:59.161507Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T15:58:59.163310Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T15:58:59.163406Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T15:58:59.163460Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T15:58:59.163860Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T15:58:59.164024Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T15:58:59.164134Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T15:58:59.180142Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T15:58:59.216176Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T15:58:59.216420Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T15:58:59.216559Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T15:58:59.216598Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T15:58:59.216632Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T15:58:59.216672Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T15:58:59.217128Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T15:58:59.217267Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T15:58:59.217384Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T15:58:59.217427Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T15:58:59.217491Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T15:58:59.217535Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T15:58:59.217605Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T15:58:59.217780Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T15:58:59.218020Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T15:58:59.218123Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T15:58:59.220505Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T15:58:59.235161Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T15:58:59.235280Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T15:58:59.400673Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T15:58:59.405513Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T15:58:59.405602Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T15:58:59.406241Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T15:58:59.406290Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T15:58:59.406341Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T15:58:59.406617Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T15:58:59.406765Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T15:58:59.409420Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T15:58:59.409537Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T15:58:59.410051Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T15:58:59.410476Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T15:58:59.412680Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-31T15:58:59.412757Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T15:58:59.413952Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-31T15:58:59.414035Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T15:58:59.414948Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T15:58:59.415014Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T15:58:59.415060Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T15:58:59.415157Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T15:58:59.415218Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T15:58:59.415349Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T15:58:59.426344Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T15:58:59.428650Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T15:58:59.428867Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-31T15:58:59.428938Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T15:58:59.439444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:59.439588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:59.439683Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:59.445786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T15:58:59.452786Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T15:58:59.610167Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T15:58:59.613710Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T15:58:59.684248Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:00.044409Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpetwvd1ybvncga4k8bz7kf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YThiMzI4Y2UtODI4ODYxZWUtZjU3N2M5MDAtOTRhMDI4MjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:59:00.051805Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:856:2687], serverId# [1:857:2688], sessionId# [0:0:0] 2025-03-31T15:59:00.052303Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-03-31T15:59:00.052476Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=3 ... aChangedResult 2025-03-31T15:59:07.892970Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715664 datashard 72075186224037888 state Ready 2025-03-31T15:59:07.893031Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T15:59:07.900840Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [2:953:2764], Recipient [2:667:2571]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCAN SourceDeprecated { RawX1: 953 RawX2: 8589937356 } TxBody: " \0018\001B8\n\014\010\200\202\224\204\200\200\200\200\001\020\002\022\t\010\001\022\003key\030\002\022\013\010\002\022\005value\030\002\032\016\n\006\001\000\000\000\000\200\022\000\030\001 \001 \001H\001R\022\t\271\003\000\000\000\000\000\000\021\314\n\000\000\002\000\000\000" TxId: 281474976715665 ExecLevel: 0 Flags: 8 2025-03-31T15:59:07.900922Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T15:59:07.901045Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T15:59:07.901271Z node 2 :TX_DATASHARD TRACE: -- AddReadRange: [(Uint32 : NULL) ; ()] table: [72057594046644480:2:0] 2025-03-31T15:59:07.901371Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CheckDataTx 2025-03-31T15:59:07.901455Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-03-31T15:59:07.901505Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CheckDataTx 2025-03-31T15:59:07.901552Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-31T15:59:07.901588Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-31T15:59:07.901639Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2000/281474976715664 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2025-03-31T15:59:07.901697Z node 2 :TX_DATASHARD TRACE: Activated operation [0:281474976715665] at 72075186224037888 2025-03-31T15:59:07.901741Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-03-31T15:59:07.901766Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-31T15:59:07.901790Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit MakeScanSnapshot 2025-03-31T15:59:07.901815Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit MakeScanSnapshot 2025-03-31T15:59:07.901844Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-03-31T15:59:07.901870Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit MakeScanSnapshot 2025-03-31T15:59:07.901895Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit WaitForStreamClearance 2025-03-31T15:59:07.901920Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit WaitForStreamClearance 2025-03-31T15:59:07.901979Z node 2 :TX_DATASHARD TRACE: Requested stream clearance from [2:953:2764] for [0:281474976715665] at 72075186224037888 2025-03-31T15:59:07.902019Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Continue 2025-03-31T15:59:07.902267Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287942, Sender [2:953:2764], Recipient [2:667:2571]: NKikimrTx.TEvStreamClearancePending TxId: 281474976715665 2025-03-31T15:59:07.902311Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearancePending 2025-03-31T15:59:07.902418Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [2:953:2764], Recipient [2:667:2571]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715665 Cleared: true 2025-03-31T15:59:07.902451Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-03-31T15:59:07.902540Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:667:2571], Recipient [2:667:2571]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T15:59:07.902576Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T15:59:07.902651Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T15:59:07.902696Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-31T15:59:07.902745Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715665] at 72075186224037888 for WaitForStreamClearance 2025-03-31T15:59:07.902791Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit WaitForStreamClearance 2025-03-31T15:59:07.902849Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715665] at 72075186224037888 2025-03-31T15:59:07.902887Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-03-31T15:59:07.902928Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit WaitForStreamClearance 2025-03-31T15:59:07.902963Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit ReadTableScan 2025-03-31T15:59:07.902997Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit ReadTableScan 2025-03-31T15:59:07.903315Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Continue 2025-03-31T15:59:07.903357Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-31T15:59:07.903403Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-03-31T15:59:07.903450Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T15:59:07.903486Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T15:59:07.904165Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:959:2769], Recipient [2:667:2571]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-03-31T15:59:07.904211Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-03-31T15:59:07.904679Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715665, MessageQuota: 1 2025-03-31T15:59:07.905107Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T15:59:07.905164Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T15:59:07.905705Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715665, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-31T15:59:07.905941Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [2:946:2757], Recipient [2:667:2571]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594046644480 ClientId: [2:946:2757] ServerId: [2:948:2759] } 2025-03-31T15:59:07.905984Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-03-31T15:59:07.906400Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715665, PendingAcks: 0 2025-03-31T15:59:07.906469Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715665, MessageQuota: 0 2025-03-31T15:59:07.915919Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-03-31T15:59:07.915993Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715665, at: 72075186224037888 2025-03-31T15:59:07.916237Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:667:2571], Recipient [2:667:2571]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T15:59:07.916283Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T15:59:07.916345Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T15:59:07.916385Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-31T15:59:07.916430Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715665] at 72075186224037888 for ReadTableScan 2025-03-31T15:59:07.916465Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit ReadTableScan 2025-03-31T15:59:07.916504Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715665] at 72075186224037888 error: , IsFatalError: 0 2025-03-31T15:59:07.916565Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-03-31T15:59:07.916602Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit ReadTableScan 2025-03-31T15:59:07.916640Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit FinishPropose 2025-03-31T15:59:07.916672Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit FinishPropose 2025-03-31T15:59:07.916721Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715665 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-03-31T15:59:07.916799Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is DelayComplete 2025-03-31T15:59:07.916834Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit FinishPropose 2025-03-31T15:59:07.916881Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit CompletedOperations 2025-03-31T15:59:07.916923Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CompletedOperations 2025-03-31T15:59:07.916977Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-03-31T15:59:07.917003Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CompletedOperations 2025-03-31T15:59:07.917037Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715665] at 72075186224037888 has finished 2025-03-31T15:59:07.917096Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T15:59:07.917130Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-03-31T15:59:07.917163Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T15:59:07.917198Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T15:59:07.917271Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T15:59:07.917324Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715665] at 72075186224037888 on unit FinishPropose 2025-03-31T15:59:07.917379Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsert2 [GOOD] Test command err: 2025-03-31T15:59:01.556548Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:59:01.556892Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:59:01.557102Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a3b/r3tmp/tmpAzTdz5/pdisk_1.dat 2025-03-31T15:59:02.015226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:59:02.067567Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:02.108548Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:02.108710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:02.120736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:59:02.212134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:59:02.610387Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2025-03-31T15:59:02.610626Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2025-03-31T15:59:02.682144Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 2} TUpsertActor finished in 0.070915s, errors=0 2025-03-31T15:59:02.682257Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:739:2621] with tag# 2 2025-03-31T15:59:07.716747Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:101:2147], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:59:07.717114Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T15:59:07.717177Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a3b/r3tmp/tmpmmVt8E/pdisk_1.dat 2025-03-31T15:59:08.095853Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:59:08.129094Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:08.168738Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:08.168876Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:08.180439Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:59:08.274406Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:59:08.667814Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2025-03-31T15:59:08.667994Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:738:2620], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" 2025-03-31T15:59:08.747776Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:738:2620], subTag: 2} TUpsertActor finished in 0.079202s, errors=0 2025-03-31T15:59:08.747884Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:739:2621] with tag# 2 >> TableWriter::Restore [GOOD] |68.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestNormalMirror [GOOD] |68.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |68.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |68.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |68.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain >> KqpIndexLookupJoin::CheckCastInt64ToUint64+StreamLookupJoin+NotNull [GOOD] >> TBlobStorageProxyTest::TestVBlockVPutVGet [GOOD] >> Yq_1::DescribeQuery [GOOD] >> ReadLoad::ShouldReadKqp [GOOD] >> ReadLoad::ShouldReadKqpMoreThanRows |68.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> TableWriter::Restore [GOOD] |68.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |68.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |68.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan >> DataShardSnapshots::MvccSnapshotReadLockedWrites-UseSink [GOOD] >> DataShardSnapshots::ReadIteratorLocalSnapshotThenRestart |68.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |68.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish [GOOD] Test command err: 2025-03-31T15:58:57.470734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:58:57.471039Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:57.471282Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001928/r3tmp/tmpVGXEPK/pdisk_1.dat 2025-03-31T15:58:57.894205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:58:57.947636Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:57.989178Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:57.989315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:58.002182Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:58:58.102540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:58:58.169095Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-31T15:58:58.170354Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-31T15:58:58.170916Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T15:58:58.171249Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:58:58.221223Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T15:58:58.222081Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:58:58.222436Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T15:58:58.224470Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T15:58:58.224574Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T15:58:58.224639Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T15:58:58.225072Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T15:58:58.225240Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T15:58:58.225327Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T15:58:58.237687Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T15:58:58.272034Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T15:58:58.272291Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T15:58:58.272440Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T15:58:58.272488Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T15:58:58.272575Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T15:58:58.272630Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T15:58:58.272882Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T15:58:58.272941Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T15:58:58.273299Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T15:58:58.273410Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T15:58:58.273589Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T15:58:58.273633Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T15:58:58.273698Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T15:58:58.273767Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T15:58:58.273801Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T15:58:58.273838Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T15:58:58.273896Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T15:58:58.274036Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:58:58.274102Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:58:58.274149Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T15:58:58.274332Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-31T15:58:58.274380Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T15:58:58.274511Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T15:58:58.274759Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T15:58:58.274829Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T15:58:58.274936Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T15:58:58.275002Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T15:58:58.275073Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T15:58:58.275137Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T15:58:58.275173Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T15:58:58.275473Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T15:58:58.275512Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T15:58:58.275554Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T15:58:58.275593Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T15:58:58.275650Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T15:58:58.275696Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T15:58:58.275748Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T15:58:58.275780Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T15:58:58.275805Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T15:58:58.277575Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T15:58:58.277647Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T15:58:58.288465Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T15:58:58.288551Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T15:58:58.288626Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T15:58:58.288703Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-31T15:58:58.288798Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T15:58:58.446936Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:58:58.447007Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:58:58.447054Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T15:58:58.447524Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-31T15:58:58.447581Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T15:58:58.447719Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T15:58:58.447776Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-31T15:58:58.447839Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-31T15:58:58.447877Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-31T15:58:58.453763Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T15:58:58.453883Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T15:58:58.454673Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T15:58:58.454722Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T15:58:58.454784Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T15:58:5 ... 09.321619Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037889, TxId: 281474976715668, MessageQuota: 0 2025-03-31T15:59:09.511420Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037889 2025-03-31T15:59:09.511497Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715668, at: 72075186224037889 2025-03-31T15:59:09.511755Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:970:2777], Recipient [2:970:2777]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T15:59:09.511798Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T15:59:09.511868Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T15:59:09.511907Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 1 active planned 1 immediate 0 planned 1 2025-03-31T15:59:09.511951Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [3500:281474976715668] at 72075186224037889 for ReadTableScan 2025-03-31T15:59:09.511985Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037889 on unit ReadTableScan 2025-03-31T15:59:09.512023Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [3500:281474976715668] at 72075186224037889 error: , IsFatalError: 0 2025-03-31T15:59:09.512089Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037889 is Executed 2025-03-31T15:59:09.512122Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037889 executing on unit ReadTableScan 2025-03-31T15:59:09.512156Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037889 to execution unit CompleteOperation 2025-03-31T15:59:09.512191Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037889 on unit CompleteOperation 2025-03-31T15:59:09.512414Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037889 is DelayComplete 2025-03-31T15:59:09.512450Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037889 executing on unit CompleteOperation 2025-03-31T15:59:09.512506Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037889 to execution unit CompletedOperations 2025-03-31T15:59:09.512544Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037889 on unit CompletedOperations 2025-03-31T15:59:09.512583Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037889 is Executed 2025-03-31T15:59:09.512607Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037889 executing on unit CompletedOperations 2025-03-31T15:59:09.512635Z node 2 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715668] at 72075186224037889 has finished 2025-03-31T15:59:09.512683Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T15:59:09.512712Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-03-31T15:59:09.512745Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-03-31T15:59:09.512782Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-03-31T15:59:09.523832Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-31T15:59:09.523925Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-31T15:59:09.523971Z node 2 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715668] at 72075186224037889 on unit CompleteOperation 2025-03-31T15:59:09.524060Z node 2 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715668] from 72075186224037889 at tablet 72075186224037889 send result to client [2:1163:2941], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T15:59:09.524136Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T15:59:09.524593Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [2:1163:2941], Recipient [2:975:2779]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715668 Cleared: true 2025-03-31T15:59:09.524650Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-03-31T15:59:09.524749Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 3500} 2025-03-31T15:59:09.524811Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-31T15:59:09.524846Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-31T15:59:09.525036Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:975:2779], Recipient [2:975:2779]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T15:59:09.525074Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T15:59:09.525133Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-03-31T15:59:09.525172Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-03-31T15:59:09.525220Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [3500:281474976715668] at 72075186224037890 for WaitForStreamClearance 2025-03-31T15:59:09.525255Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit WaitForStreamClearance 2025-03-31T15:59:09.525292Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [3500:281474976715668] at 72075186224037890 2025-03-31T15:59:09.525333Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is Executed 2025-03-31T15:59:09.525506Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037890 executing on unit WaitForStreamClearance 2025-03-31T15:59:09.525558Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037890 to execution unit ReadTableScan 2025-03-31T15:59:09.525590Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit ReadTableScan 2025-03-31T15:59:09.525846Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is Continue 2025-03-31T15:59:09.525879Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T15:59:09.525913Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037890 2025-03-31T15:59:09.525950Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 out-of-order limits exceeded 2025-03-31T15:59:09.525985Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-03-31T15:59:09.526898Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:1183:2959], Recipient [2:975:2779]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-03-31T15:59:09.526944Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-03-31T15:59:09.527234Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715668, MessageQuota: 1 2025-03-31T15:59:09.528137Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715668, Size: 54, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-31T15:59:09.529955Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715668, PendingAcks: 0 2025-03-31T15:59:09.530015Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037890, TxId: 281474976715668, MessageQuota: 0 2025-03-31T15:59:09.638916Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037890 2025-03-31T15:59:09.638981Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715668, at: 72075186224037890 2025-03-31T15:59:09.645101Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:975:2779], Recipient [2:975:2779]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T15:59:09.645184Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T15:59:09.645263Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-03-31T15:59:09.645305Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-03-31T15:59:09.645349Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [3500:281474976715668] at 72075186224037890 for ReadTableScan 2025-03-31T15:59:09.645402Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit ReadTableScan 2025-03-31T15:59:09.645440Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [3500:281474976715668] at 72075186224037890 error: , IsFatalError: 0 2025-03-31T15:59:09.645489Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is Executed 2025-03-31T15:59:09.645531Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037890 executing on unit ReadTableScan 2025-03-31T15:59:09.645563Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037890 to execution unit CompleteOperation 2025-03-31T15:59:09.645599Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit CompleteOperation 2025-03-31T15:59:09.645823Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is DelayComplete 2025-03-31T15:59:09.645859Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037890 executing on unit CompleteOperation 2025-03-31T15:59:09.645889Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037890 to execution unit CompletedOperations 2025-03-31T15:59:09.645919Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit CompletedOperations 2025-03-31T15:59:09.645950Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is Executed 2025-03-31T15:59:09.645973Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037890 executing on unit CompletedOperations 2025-03-31T15:59:09.646000Z node 2 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715668] at 72075186224037890 has finished 2025-03-31T15:59:09.646031Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T15:59:09.646060Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2025-03-31T15:59:09.646091Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-03-31T15:59:09.646121Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-03-31T15:59:09.663569Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-31T15:59:09.663642Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-31T15:59:09.663677Z node 2 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715668] at 72075186224037890 on unit CompleteOperation 2025-03-31T15:59:09.663751Z node 2 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715668] from 72075186224037890 at tablet 72075186224037890 send result to client [2:1163:2941], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T15:59:09.663805Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 |68.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |68.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVBlockVPutVGet [GOOD] Test command err: 2025-03-31T15:59:07.307642Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/001093/r3tmp/tmppH22dr//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1 2025-03-31T15:59:07.341548Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR >> GracefulShutdown::TTxGracefulShutdown [GOOD] >> TTxDataShardUploadRows::TestUploadRowsLocks [GOOD] >> TBlobStorageProxyTest::TestHugeCollectGarbage [GOOD] >> KqpIndexLookupJoin::JoinByComplexKeyWithNullComponents-StreamLookupJoin [GOOD] >> KqpIndexLookupJoin::CheckCastUint64ToInt64-StreamLookupJoin+NotNull [GOOD] >> TBlobStorageProxyTest::TestDoubleEmptyGet [GOOD] >> TBlobStorageProxyTest::TestCompactedGetMultipart [GOOD] |68.8%| [TA] $(B)/ydb/core/backup/impl/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} >> TLocalTests::TestAlterTenant >> TSlotIndexesPoolTest::Expansion [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastInt64ToUint64+StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 5789, MsgBus: 25350 2025-03-31T15:58:51.245146Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003745009430671:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:51.323821Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019d3/r3tmp/tmpFtU6z9/pdisk_1.dat 2025-03-31T15:58:52.111192Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:52.111278Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:52.119482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:58:52.156150Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5789, node 1 2025-03-31T15:58:52.472361Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:58:52.472382Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:58:52.472388Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:58:52.472490Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25350 TClient is connected to server localhost:25350 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:58:53.780511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T15:58:53.842636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T15:58:54.065830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:54.409662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T15:58:54.551733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T15:58:56.215223Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003745009430671:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:56.215291Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:58:56.999811Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003766484268798:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:56.999950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:57.626824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:58:57.727148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:58:57.820994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:58:57.927162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:58:58.007197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:58:58.088206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:58:58.166568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003775074203918:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:58.166709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:58.167157Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003775074203923:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:58.175846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:58:58.207937Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488003775074203926:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:58:58.307819Z node 1 :TX_PROXY ERROR: Actor# [1:7488003775074203984:3467] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:58:59.726312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T15:58:59.859603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 29042, MsgBus: 17783 2025-03-31T15:59:01.951115Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488003788119999191:2088];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019d3/r3tmp/tmpNUkyBR/pdisk_1.dat 2025-03-31T15:59:02.190920Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:59:02.282142Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:02.289923Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:02.290620Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:02.316055Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29042, node 2 2025-03-31T15:59:02.582040Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:59:02.582065Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:59:02.582073Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:59:02.582207Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17783 TClient is connected to server localhost:17783 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:59:03.440646Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:03.478512Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:03.617827Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:03.936816Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:04.043156Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:06.951219Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488003788119999191:2088];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:06.951301Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:59:07.048230Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488003813889804698:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:07.048336Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:07.093836Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:59:07.161830Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:59:07.227416Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:59:07.337131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:59:07.386852Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:59:07.461189Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:59:07.565895Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488003813889805223:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:07.566005Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:07.566441Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488003813889805228:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:07.571901Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:59:07.595585Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488003813889805230:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:59:07.683834Z node 2 :TX_PROXY ERROR: Actor# [2:7488003813889805285:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:09.156296Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T15:59:09.296083Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> GracefulShutdown::TTxGracefulShutdown [GOOD] Test command err: 2025-03-31T15:59:10.409423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:10.409525Z node 1 :IMPORT WARN: Table profiles were not loaded >> TLocalTests::TestAddTenantWhileResolving |68.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |68.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestHugeCollectGarbage [GOOD] |68.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |68.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Expansion [GOOD] >> TDynamicNameserverTest::BasicFunctionality |68.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestCompactedGetMultipart [GOOD] |68.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::DescribeQuery [GOOD] Test command err: 2025-03-31T15:58:12.062488Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003575285920870:2278];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:12.062551Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0331 15:58:12.471684190 40608 dns_resolver.cc:162] no server name supplied in dns URI E0331 15:58:12.471843362 40608 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-31T15:58:12.571962Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:17750: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17750 } ] 2025-03-31T15:58:13.075006Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:13.669655Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:17750: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17750 } ] 2025-03-31T15:58:13.699782Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:17750: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:17750 2025-03-31T15:58:14.076713Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:15.083640Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:15.306812Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:17750: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17750 } ] 2025-03-31T15:58:16.083743Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:17.062799Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003575285920870:2278];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:17.062841Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:58:17.090056Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; E0331 15:58:17.482966804 40920 dns_resolver.cc:162] no server name supplied in dns URI E0331 15:58:17.483143415 40920 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-31T15:58:17.630591Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:17750: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17750 } ] 2025-03-31T15:58:17.718316Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T15:58:17.720169Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488003596760757553:2313], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000cec/r3tmp/tmpYh45zQ/pdisk_1.dat 2025-03-31T15:58:17.838494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488003596760757553:2313], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:58:17.913002Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17750, node 1 2025-03-31T15:58:18.025861Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:58:18.025882Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:58:18.025893Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:58:18.026068Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13281 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:58:18.517915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:21.197678Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:21.197807Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:21.205107Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:58:22.108777Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T15:58:22.126234Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-03-31T15:58:22.126262Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-03-31T15:58:22.126526Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-03-31T15:58:22.127647Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-03-31T15:58:22.127660Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-03-31T15:58:22.127664Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-03-31T15:58:22.128310Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-03-31T15:58:22.128323Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-03-31T15:58:22.128327Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-03-31T15:58:22.128979Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-03-31T15:58:22.128987Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-03-31T15:58:22.128989Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-03-31T15:58:22.133413Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-03-31T15:58:22.133431Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-03-31T15:58:22.133437Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-03-31T15:58:22.134465Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-03-31T15:58:22.134477Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-03-31T15:58:22.134488Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-03-31T15:58:22.135279Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-03-31T15:58:22.135292Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-03-31T15:58:22.135296Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-03-31T15:58:22.146741Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-03-31T15:58:22.146765Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-03-31T15:58:22.146770Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-03-31T15:58:22.179972Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-03-31T15:58:22.180001Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-03-31T15:58:22.180007Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-03-31T15:58:22.202791Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-03-31T15:58:22.202820Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-03-31T15:58:22.202826Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-03-31T15:58:22.214591Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-03-31T15:58:22.214609Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-03-31T15:58:22.214615Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-03-31T15:58:22.221026Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-03-31T15:58:22.221044Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-03-31T15:58:22.221051Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-03-31T15:58:22.222297Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-03-31T15:58:22.222311Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-03-31T15:58:22.222316Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-03-31T15: ... oint { ActorId { RawX1: 7488003817603846500 RawX2: 4503616807242723 } } InMemory: true DstStageId: 1 } 2025-03-31T15:59:08.191648Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003817603846499:3042], TxId: 281474976710807, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=Nzc5NzgwNTctN2QzYzIyMjMtY2I0YTIzLTRkNTI5MTMx. TraceId : 01jqpev4xwff4jj0hd23mgjh2d. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Update output channelId: 1, peer: [4:7488003817603846500:3043] 2025-03-31T15:59:08.191662Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710807, task: 1, CA Id [4:7488003817603846499:3042]. enter getasyncinputdata results size 0, freeSpace 8388608 2025-03-31T15:59:08.191674Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710807, task: 1, CA Id [4:7488003817603846499:3042]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 1 pending shards 0 finished = 0 has limit 0 limit reached 0 2025-03-31T15:59:08.191699Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003817603846499:3042], TxId: 281474976710807, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=Nzc5NzgwNTctN2QzYzIyMjMtY2I0YTIzLTRkNTI5MTMx. TraceId : 01jqpev4xwff4jj0hd23mgjh2d. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646926 2025-03-31T15:59:08.191739Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003817603846499:3042], TxId: 281474976710807, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=Nzc5NzgwNTctN2QzYzIyMjMtY2I0YTIzLTRkNTI5MTMx. TraceId : 01jqpev4xwff4jj0hd23mgjh2d. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7488003817603846499 RawX2: 4503616807242722 } } DstEndpoint { ActorId { RawX1: 7488003817603846500 RawX2: 4503616807242723 } } InMemory: true DstStageId: 1 } 2025-03-31T15:59:08.191752Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710807, task: 1, CA Id [4:7488003817603846499:3042]. enter getasyncinputdata results size 0, freeSpace 8388608 2025-03-31T15:59:08.191763Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710807, task: 1, CA Id [4:7488003817603846499:3042]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 1 pending shards 0 finished = 0 has limit 0 limit reached 0 2025-03-31T15:59:08.191779Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003817603846499:3042], TxId: 281474976710807, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=Nzc5NzgwNTctN2QzYzIyMjMtY2I0YTIzLTRkNTI5MTMx. TraceId : 01jqpev4xwff4jj0hd23mgjh2d. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-03-31T15:59:08.191791Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710807, task: 1, CA Id [4:7488003817603846499:3042]. enter getasyncinputdata results size 0, freeSpace 8388608 2025-03-31T15:59:08.191804Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710807, task: 1, CA Id [4:7488003817603846499:3042]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 1 pending shards 0 finished = 0 has limit 0 limit reached 0 2025-03-31T15:59:08.192593Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710807, task: 1, CA Id [4:7488003817603846499:3042]. Recv TEvReadResult from ShardID=72075186224037892, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= , BrokenTxLocks= 2025-03-31T15:59:08.192624Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710807, task: 1, CA Id [4:7488003817603846499:3042]. Taken 0 locks 2025-03-31T15:59:08.192640Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710807, task: 1, CA Id [4:7488003817603846499:3042]. new data for read #0 seqno = 1 finished = 1 2025-03-31T15:59:08.192663Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003817603846499:3042], TxId: 281474976710807, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=Nzc5NzgwNTctN2QzYzIyMjMtY2I0YTIzLTRkNTI5MTMx. TraceId : 01jqpev4xwff4jj0hd23mgjh2d. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 276037645 2025-03-31T15:59:08.192682Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003817603846499:3042], TxId: 281474976710807, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=Nzc5NzgwNTctN2QzYzIyMjMtY2I0YTIzLTRkNTI5MTMx. TraceId : 01jqpev4xwff4jj0hd23mgjh2d. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-03-31T15:59:08.192700Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710807, task: 1, CA Id [4:7488003817603846499:3042]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-03-31T15:59:08.192718Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710807, task: 1, CA Id [4:7488003817603846499:3042]. enter pack cells method shardId: 72075186224037892 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-03-31T15:59:08.192745Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710807, task: 1, CA Id [4:7488003817603846499:3042]. exit pack cells method shardId: 72075186224037892 processedRows: 0 packed rows: 1 freeSpace: 8386365 2025-03-31T15:59:08.192764Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710807, task: 1, CA Id [4:7488003817603846499:3042]. returned 1 rows; processed 1 rows 2025-03-31T15:59:08.192808Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710807, task: 1, CA Id [4:7488003817603846499:3042]. dropping batch for read #0 2025-03-31T15:59:08.192820Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710807, task: 1, CA Id [4:7488003817603846499:3042]. effective maxinflight 1024 sorted 0 2025-03-31T15:59:08.192834Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710807, task: 1, CA Id [4:7488003817603846499:3042]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-03-31T15:59:08.192851Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710807, task: 1, CA Id [4:7488003817603846499:3042]. returned async data processed rows 1 left freeSpace 8386365 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-03-31T15:59:08.193076Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003817603846499:3042], TxId: 281474976710807, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=Nzc5NzgwNTctN2QzYzIyMjMtY2I0YTIzLTRkNTI5MTMx. TraceId : 01jqpev4xwff4jj0hd23mgjh2d. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-03-31T15:59:08.193101Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003817603846499:3042], TxId: 281474976710807, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=Nzc5NzgwNTctN2QzYzIyMjMtY2I0YTIzLTRkNTI5MTMx. TraceId : 01jqpev4xwff4jj0hd23mgjh2d. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-03-31T15:59:08.193107Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003817603846500:3043], TxId: 281474976710807, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=Nzc5NzgwNTctN2QzYzIyMjMtY2I0YTIzLTRkNTI5MTMx. TraceId : 01jqpev4xwff4jj0hd23mgjh2d. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646923 2025-03-31T15:59:08.193138Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710807, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-03-31T15:59:08.193144Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710807, task: 2. Finish input channelId: 1, from: [4:7488003817603846499:3042] 2025-03-31T15:59:08.193199Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003817603846500:3043], TxId: 281474976710807, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=Nzc5NzgwNTctN2QzYzIyMjMtY2I0YTIzLTRkNTI5MTMx. TraceId : 01jqpev4xwff4jj0hd23mgjh2d. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-03-31T15:59:08.193447Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003817603846500:3043], TxId: 281474976710807, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=Nzc5NzgwNTctN2QzYzIyMjMtY2I0YTIzLTRkNTI5MTMx. TraceId : 01jqpev4xwff4jj0hd23mgjh2d. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-03-31T15:59:08.193462Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003817603846499:3042], TxId: 281474976710807, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=Nzc5NzgwNTctN2QzYzIyMjMtY2I0YTIzLTRkNTI5MTMx. TraceId : 01jqpev4xwff4jj0hd23mgjh2d. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646927 2025-03-31T15:59:08.193491Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003817603846499:3042], TxId: 281474976710807, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=Nzc5NzgwNTctN2QzYzIyMjMtY2I0YTIzLTRkNTI5MTMx. TraceId : 01jqpev4xwff4jj0hd23mgjh2d. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-03-31T15:59:08.193512Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710807, task: 1. Tasks execution finished 2025-03-31T15:59:08.193527Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003817603846499:3042], TxId: 281474976710807, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=Nzc5NzgwNTctN2QzYzIyMjMtY2I0YTIzLTRkNTI5MTMx. TraceId : 01jqpev4xwff4jj0hd23mgjh2d. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2025-03-31T15:59:08.193566Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003817603846500:3043], TxId: 281474976710807, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=Nzc5NzgwNTctN2QzYzIyMjMtY2I0YTIzLTRkNTI5MTMx. TraceId : 01jqpev4xwff4jj0hd23mgjh2d. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-03-31T15:59:08.193603Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710807, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-03-31T15:59:08.193614Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710807, task: 2. Tasks execution finished 2025-03-31T15:59:08.193627Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7488003817603846500:3043], TxId: 281474976710807, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=Nzc5NzgwNTctN2QzYzIyMjMtY2I0YTIzLTRkNTI5MTMx. TraceId : 01jqpev4xwff4jj0hd23mgjh2d. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2025-03-31T15:59:08.193634Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710807, task: 1. pass away 2025-03-31T15:59:08.193691Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710807, task: 2. pass away 2025-03-31T15:59:08.193734Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710807;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-03-31T15:59:08.193758Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710807;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-03-31T15:59:08.863512Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:7297: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:7297 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastUint64ToInt64-StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 14602, MsgBus: 25608 2025-03-31T15:58:51.067345Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003744804246680:2276];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:51.067434Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019d6/r3tmp/tmpsD0zk4/pdisk_1.dat 2025-03-31T15:58:52.122716Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:52.122805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:52.146630Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:52.150684Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:58:52.151380Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 14602, node 1 2025-03-31T15:58:52.388642Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:58:52.388664Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:58:52.388671Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:58:52.388785Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25608 TClient is connected to server localhost:25608 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:58:53.394533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:53.421717Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:58:53.426911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:53.674186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:53.979425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:54.114864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:56.067310Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003744804246680:2276];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:56.067394Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:58:57.402663Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003770574052000:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:57.402766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:58.297133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:58:58.395817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:58:58.477146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:58:58.544460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:58:58.613837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:58:58.692568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:58:58.787125Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003774869019826:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:58.787223Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:58.787564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003774869019831:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:58.791631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:58:58.813193Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-31T15:58:58.813843Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488003774869019833:2468], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:58:58.897747Z node 1 :TX_PROXY ERROR: Actor# [1:7488003774869019889:3463] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:00.335577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T15:59:00.437089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 7508, MsgBus: 19728 2025-03-31T15:59:03.112194Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488003793909375021:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:03.112253Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019d6/r3tmp/tmp2qnC9A/pdisk_1.dat 2025-03-31T15:59:03.520459Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:03.612147Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:03.612264Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:03.621040Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7508, node 2 2025-03-31T15:59:03.771605Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:59:03.771636Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:59:03.771645Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:59:03.771761Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19728 TClient is connected to server localhost:19728 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:59:04.553248Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:04.561971Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:59:04.582411Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:04.670223Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:04.928763Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:05.038440Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:08.115284Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488003793909375021:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:08.115367Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:59:08.190667Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488003815384213300:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:08.190853Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:08.210068Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:59:08.263351Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:59:08.377454Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:59:08.465653Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:59:08.525432Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:59:08.619920Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:59:08.765674Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488003815384213827:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:08.765796Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:08.766242Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488003815384213832:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:08.771428Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:59:08.811853Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488003815384213834:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:59:08.877658Z node 2 :TX_PROXY ERROR: Actor# [2:7488003815384213890:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:10.181118Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T15:59:10.266366Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Stripe [GOOD] >> TLocalTests::TestAlterTenant [GOOD] |68.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::JoinByComplexKeyWithNullComponents-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 9990, MsgBus: 30778 2025-03-31T15:58:53.493722Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003754150996447:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:53.494125Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019c4/r3tmp/tmpj9RCqI/pdisk_1.dat 2025-03-31T15:58:54.355723Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:54.426663Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:54.426766Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:54.430606Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9990, node 1 2025-03-31T15:58:54.762678Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:58:54.762697Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:58:54.762705Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:58:54.762800Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30778 TClient is connected to server localhost:30778 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:58:56.082347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:56.145055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:56.429069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:56.830303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:56.980760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:58.490987Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003754150996447:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:58.491074Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:59:00.204874Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003784215769171:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:00.204986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:00.655240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:59:00.697771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:59:00.729901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:59:00.780975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:59:00.841132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:59:00.906776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:59:00.990283Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003784215769687:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:00.990362Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:00.990573Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003784215769692:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:00.994485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:59:01.017434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488003784215769694:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:59:01.091375Z node 1 :TX_PROXY ERROR: Actor# [1:7488003788510737046:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:02.197436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T15:59:02.251531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9383, MsgBus: 12121 2025-03-31T15:59:04.221210Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488003798637124930:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:04.221240Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019c4/r3tmp/tmpWU63Fs/pdisk_1.dat 2025-03-31T15:59:04.553414Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:04.553517Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:04.564513Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:04.568548Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9383, node 2 2025-03-31T15:59:04.735844Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:59:04.735868Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:59:04.735876Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:59:04.735989Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12121 TClient is connected to server localhost:12121 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:59:05.452580Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:05.463424Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:59:05.482453Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:05.592101Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:05.980182Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:06.090891Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:08.509610Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488003815816995882:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:08.509709Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:08.559804Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:59:08.633059Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:59:08.689002Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:59:08.756189Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:59:08.843727Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:59:08.934376Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:59:09.006004Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488003820111963693:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:09.006072Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:09.006400Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488003820111963698:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:09.010707Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:59:09.037650Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488003820111963700:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:59:09.134034Z node 2 :TX_PROXY ERROR: Actor# [2:7488003820111963755:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:09.230604Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488003798637124930:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:09.230679Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:59:10.308186Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T15:59:10.355184Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 >> TNodeBrokerTest::TestListNodes >> KqpIndexes::PrefixedVectorIndexOrderByCosineSimilarityNotNullableLevel1 [GOOD] >> KqpIndexes::PrefixedVectorIndexOrderByCosineSimilarityNotNullableLevel2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::TestUploadRowsLocks [GOOD] Test command err: 2025-03-31T15:58:57.126423Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:58:57.126699Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:57.126879Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001913/r3tmp/tmpilxYIV/pdisk_1.dat 2025-03-31T15:58:57.606756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:58:57.692489Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:57.744662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:57.744779Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:57.756286Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:58:57.852423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:58:58.002389Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:684:2579] 2025-03-31T15:58:58.002672Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:58:58.085329Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:58:58.085645Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T15:58:58.091056Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T15:58:58.091187Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T15:58:58.091249Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T15:58:58.091661Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T15:58:58.092170Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:686:2581] 2025-03-31T15:58:58.092415Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:58:58.129068Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T15:58:58.129173Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:720:2579] in generation 1 2025-03-31T15:58:58.130296Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:58:58.130458Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T15:58:58.171969Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-03-31T15:58:58.172058Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-03-31T15:58:58.172106Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-03-31T15:58:58.172426Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T15:58:58.172784Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T15:58:58.172858Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:731:2581] in generation 1 2025-03-31T15:58:58.186867Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:689:2583] 2025-03-31T15:58:58.187124Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:58:58.200565Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:58:58.200934Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:694:2585] 2025-03-31T15:58:58.201126Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:58:58.222310Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T15:58:58.223843Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037891 2025-03-31T15:58:58.223918Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037891 2025-03-31T15:58:58.223961Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037891 2025-03-31T15:58:58.224340Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T15:58:58.225170Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T15:58:58.225262Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037891 persisting started state actor id [1:752:2583] in generation 1 2025-03-31T15:58:58.226094Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:58:58.226195Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T15:58:58.227695Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-03-31T15:58:58.227775Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-03-31T15:58:58.227826Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-03-31T15:58:58.228153Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T15:58:58.228285Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T15:58:58.228358Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037890 persisting started state actor id [1:753:2585] in generation 1 2025-03-31T15:58:58.240085Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T15:58:58.274841Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T15:58:58.275057Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T15:58:58.275260Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:758:2621] 2025-03-31T15:58:58.275306Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T15:58:58.275357Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T15:58:58.275406Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T15:58:58.275515Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T15:58:58.275548Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-03-31T15:58:58.275623Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T15:58:58.275678Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:759:2622] 2025-03-31T15:58:58.275698Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-03-31T15:58:58.275717Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-03-31T15:58:58.275736Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T15:58:58.276045Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T15:58:58.276083Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037891 2025-03-31T15:58:58.276129Z node 1 :TX_DATASHARD DEBUG: 72075186224037891 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T15:58:58.276189Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037891, actorId: [1:760:2623] 2025-03-31T15:58:58.276209Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037891 2025-03-31T15:58:58.276246Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2025-03-31T15:58:58.276267Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-03-31T15:58:58.276675Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T15:58:58.276763Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T15:58:58.276807Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-03-31T15:58:58.276874Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-03-31T15:58:58.276945Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T15:58:58.276974Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-03-31T15:58:58.277026Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T15:58:58.277103Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:761:2624] 2025-03-31T15:58:58.277157Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-03-31T15:58:58.277185Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-03-31T15:58:58.277222Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-31T15:58:58.277441Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T15:58:58.277504Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T15:58:58.277549Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T15:58:58.277604Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T15:58:58.277664Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T15:58:58.277687Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-03-31T15:58:58.277709Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-03-31T15:58:58.277736Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-31T15:58:58.277771Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037891 2025-03-31T15:58:58.277831Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037891 2025-03-31T15:58:58.277958Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:671:2573], serverId# [1:707:2591], sessionId# [0:0:0] 2025-03-31T15:58:58.278055Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:672:2574], serverId# [1:713:2597], sessionId# [0:0:0] 2025-03-31T15:58:58.278086Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-03-31T15:58:58.278111Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 0 2025-03-31T15:58:58.278133Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037891 TxInFly 0 2025-03-31T15:58:58.278162Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-03-31T15:58:58.278422Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T15:58:58.278669Z node 1 :TX_DATASHARD DEB ... 25-03-31T15:59:09.866774Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T15:59:09.879882Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T15:59:09.880028Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T15:59:10.061191Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:703:2593], serverId# [3:705:2595], sessionId# [0:0:0] 2025-03-31T15:59:10.061735Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T15:59:10.061790Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T15:59:10.062534Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T15:59:10.062584Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T15:59:10.062632Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T15:59:10.062886Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T15:59:10.063040Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T15:59:10.063782Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T15:59:10.063879Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T15:59:10.064303Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T15:59:10.064725Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T15:59:10.066296Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-31T15:59:10.066349Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T15:59:10.069518Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-31T15:59:10.069591Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T15:59:10.070372Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T15:59:10.070417Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T15:59:10.070462Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T15:59:10.070529Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T15:59:10.070569Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T15:59:10.070638Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T15:59:10.071362Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T15:59:10.073219Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-31T15:59:10.073312Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T15:59:10.075509Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T15:59:10.084418Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:10.084523Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:10.084593Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:10.089813Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T15:59:10.098123Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T15:59:10.290855Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T15:59:10.306637Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T15:59:10.419068Z node 3 :TX_PROXY ERROR: Actor# [3:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:11.128927Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpev7820z8skv63h12cdf3h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MjYzYjBlZjAtNDU2YTdlMWEtMTE4OTE1OC1mMGQ5ZjlkOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:59:11.136243Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:856:2687], serverId# [3:857:2688], sessionId# [0:0:0] 2025-03-31T15:59:11.136738Z node 3 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-03-31T15:59:11.136969Z node 3 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=3 2025-03-31T15:59:11.151635Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T15:59:11.357304Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpev8a60p3p2b69fwgw4ap9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZWE0NTFkYTItZWI2NDI0ZDktMmI1ZTMyZmEtMzAyODIzZGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:59:11.359972Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715661, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] { items { uint32_value: 300 } } 2025-03-31T15:59:11.367306Z node 3 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037888 2025-03-31T15:59:11.378784Z node 3 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2025-03-31T15:59:11.378877Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T15:59:11.378975Z node 3 :TX_DATASHARD DEBUG: Waiting for PlanStep# 1501 from mediator time cast 2025-03-31T15:59:11.379838Z node 3 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 1501 at tablet 72075186224037888 2025-03-31T15:59:11.379925Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T15:59:11.449412Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpev8gmfv9f0gtc36bggvvq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZWE0NTFkYTItZWI2NDI0ZDktMmI1ZTMyZmEtMzAyODIzZGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:59:11.452289Z node 3 :TX_DATASHARD DEBUG: Executing write operation for [0:5] at 72075186224037888 2025-03-31T15:59:11.452536Z node 3 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=5; 2025-03-31T15:59:11.463578Z node 3 :TX_DATASHARD INFO: Write transaction 5 at 72075186224037888 has an error: Operation is aborting because locks are not valid 2025-03-31T15:59:11.463910Z node 3 :TX_DATASHARD ERROR: Prepare transaction failed. txid 5 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-03-31T15:59:11.464166Z node 3 :TX_DATASHARD ERROR: Errors while proposing transaction txid 5 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-03-31T15:59:11.464255Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T15:59:11.464617Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:913:2693], Table: `/Root/table-1` ([72057594046644480:2:1]), SessionActorId: [3:863:2693]Got LOCKS BROKEN for table `/Root/table-1`. ShardID=72075186224037888, Sink=[3:913:2693].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-03-31T15:59:11.465300Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:906:2693], SessionActorId: [3:863:2693], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/table-1`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[3:863:2693]. isRollback=0 2025-03-31T15:59:11.465734Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZWE0NTFkYTItZWI2NDI0ZDktMmI1ZTMyZmEtMzAyODIzZGI=, ActorId: [3:863:2693], ActorState: ExecuteState, TraceId: 01jqpev8gmfv9f0gtc36bggvvq, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:907:2693] from: [3:906:2693] 2025-03-31T15:59:11.465990Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:907:2693] TxId: 281474976715662. Ctx: { TraceId: 01jqpev8gmfv9f0gtc36bggvvq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZWE0NTFkYTItZWI2NDI0ZDktMmI1ZTMyZmEtMzAyODIzZGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/table-1`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-03-31T15:59:11.467425Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZWE0NTFkYTItZWI2NDI0ZDktMmI1ZTMyZmEtMzAyODIzZGI=, ActorId: [3:863:2693], ActorState: ExecuteState, TraceId: 01jqpev8gmfv9f0gtc36bggvvq, Create QueryResponse for error on request, msg: 2025-03-31T15:59:11.468438Z node 3 :TX_DATASHARD DEBUG: Executing write operation for [0:6] at 72075186224037888 2025-03-31T15:59:11.468507Z node 3 :TX_DATASHARD DEBUG: Skip empty write operation for [0:6] at 72075186224037888 2025-03-31T15:59:11.468724Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 |68.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TLocalTests::TestAddTenantWhileResolving [GOOD] >> TDynamicNameserverTest::CacheMissDifferentDeadline |68.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::FixedNodeId >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead-UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead+UseSink |68.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TLocalTests::TestAlterTenant [GOOD] >> Yq_1::CreateQuery_Without_Connection [GOOD] |68.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Stripe [GOOD] |68.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TBlobStorageProxyTest::TestDoubleGroups [GOOD] >> TBlobStorageProxyTest::TestDoubleFailureStripe4Plus2 |68.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TLocalTests::TestAddTenantWhileResolving [GOOD] |68.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissDifferentDeadline [GOOD] |68.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> DataShardSnapshots::LockedWriteCleanupOnSplit+UseSink [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnSplit-UseSink >> TSchemeShardSubDomainTest::DiskSpaceUsage |68.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::ChangeSenderEmpty >> IncrementalRestoreScan::Empty >> IncrementalRestoreScan::ChangeSenderSimple >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-49 >> TSchemeShardSubDomainTest::SimultaneousDeclareAndCreateTable >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption [GOOD] >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissDifferentDeadline [GOOD] Test command err: 2025-03-31T15:59:14.419231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:14.419319Z node 1 :IMPORT WARN: Table profiles were not loaded ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... waiting for cache miss (done) >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Stripe [GOOD] >> TSchemeShardSubDomainTest::CopyRejects >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine >> TSchemeShardSubDomainTest::RedefineErrors >> TSchemeShardSubDomainTest::DeclareAndDelete >> TSchemeShardSubDomainTest::CreateWithoutPlanResolution ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::CreateQuery_Without_Connection [GOOD] Test command err: 2025-03-31T15:58:16.336014Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003592586620733:2279];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:16.336075Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0331 15:58:17.323348007 41895 dns_resolver.cc:162] no server name supplied in dns URI E0331 15:58:17.345279264 41895 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-31T15:58:17.564759Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:18.428493Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:12227: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:12227 } ] 2025-03-31T15:58:18.486725Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:12227: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:12227 2025-03-31T15:58:18.567588Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:19.571528Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:20.067229Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:12227: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:12227 } ] 2025-03-31T15:58:20.575945Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:21.344031Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003592586620733:2279];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:21.344093Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:58:21.583387Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; E0331 15:58:22.289185114 41962 dns_resolver.cc:162] no server name supplied in dns URI E0331 15:58:22.289353812 41962 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-31T15:58:22.347933Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:22.587574Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:22.675411Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:12227: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:12227 2025-03-31T15:58:22.683364Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:12227: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:12227 } ] 2025-03-31T15:58:23.361483Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:23.590719Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:24.012426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488003626946359325:2316], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:58:24.012629Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T15:58:24.103265Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488003626946359325:2316], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000cd8/r3tmp/tmpYxnG9Z/pdisk_1.dat 2025-03-31T15:58:24.297313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488003626946359325:2316], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:58:24.367267Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:24.643578Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488003626946359325:2316], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:58:24.643686Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 12227, node 1 2025-03-31T15:58:24.887524Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T15:58:24.887541Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T15:58:25.288865Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:58:25.288887Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:58:25.288895Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:58:25.289045Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:58:25.291779Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:19725 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-03-31T15:58:25.523709Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:25.526179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:25.542107Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:58:25.571857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:26.968025Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-03-31T15:58:26.968064Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-03-31T15:58:26.968074Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-03-31T15:58:26.970059Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-03-31T15:58:26.970091Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-03-31T15:58:26.970099Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-03-31T15:58:26.972637Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-03-31T15:58:26.972674Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-03-31T15:58:26.972682Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-03-31T15:58:26.973579Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-03-31T15:58:26.973592Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-03-31T15:58:26.973597Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-03-31T15:58:26.984303Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-03-31T15:58:26.984344Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-03-31T15:58:26.984352Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-03-31T15:58:26.985444Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T15:58:26.987243Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-03-31T15:58:26.987260Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-03-31T15:58:26.987267Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-03-31T15:58:26.997650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T15:58:26.999395Z node 1 :YQ_CONTROL_PLANE_ ... .774887Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.774923Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.775003Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.775061Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.775256Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.775285Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.775459Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.775584Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.775608Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.775653Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.775773Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.775956Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.775981Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.776003Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.776064Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.776121Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.776148Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.776206Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.776272Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.776307Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.776336Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.776405Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.776429Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.776450Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.776576Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.776612Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.776637Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.776683Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.776740Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.776874Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.776912Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.777022Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.777056Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.777136Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.777179Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.777271Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.777303Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.777343Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.777442Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.777521Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.777555Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.777628Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.777663Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.777711Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.777799Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.777844Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.777889Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.778013Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.778073Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.778106Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.778145Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.778242Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.778333Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.778367Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.778424Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.778541Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.778596Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.778656Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.778756Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.778840Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.778872Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.778956Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.783173Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.783448Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.783600Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.784002Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.784041Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.784168Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.784220Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.784248Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.784370Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.784532Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.784557Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.784578Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.784654Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.784681Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.784830Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.784864Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.784974Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.784997Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.785028Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.785165Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.785189Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.785209Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.785300Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.785325Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.785349Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.785464Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.785495Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.785527Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.785652Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.785701Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.785729Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.785917Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.786092Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.786252Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.786307Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.786409Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.786501Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.786613Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.786653Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.786766Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.786794Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.786817Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.786952Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.787007Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.787034Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.787284Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.787321Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.787389Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.787726Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.787753Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.787778Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.788048Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.788094Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.788120Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.788231Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.788331Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.788430Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.788582Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.788699Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.788730Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.789013Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.789046Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.789069Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.789353Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.789405Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.789428Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.789562Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:09.789651Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: [good] Yq_1::CreateQuery_Without_Connection >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas >> TSchemeShardSubDomainTest::ConcurrentCreateSubDomainAndDescribe |69.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Stripe [GOOD] >> TNodeBrokerTest::FixedNodeId [GOOD] >> TSchemeShardSubDomainTest::SimultaneousDeclareAndCreateTable [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitFreeze-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict-UseSink >> TSchemeShardSubDomainTest::CreateAndWait >> TSchemeShardSubDomainTest::DeclareAndDelete [GOOD] >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption [GOOD] Test command err: 2025-03-31T15:58:59.447724Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:58:59.448125Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:59.448336Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001936/r3tmp/tmpGpbGbU/pdisk_1.dat 2025-03-31T15:59:00.081390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:59:00.270958Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:00.321010Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:00.321135Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:00.336464Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:59:00.449811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:59:00.559612Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:676:2578] 2025-03-31T15:59:00.559973Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:00.666547Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:00.666721Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T15:59:00.672990Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T15:59:00.673092Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T15:59:00.673150Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T15:59:00.673612Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T15:59:00.673996Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T15:59:00.674072Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:703:2578] in generation 1 2025-03-31T15:59:00.678172Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:680:2580] 2025-03-31T15:59:00.678417Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:00.691764Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:00.691880Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T15:59:00.693334Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-03-31T15:59:00.693415Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-03-31T15:59:00.693475Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-03-31T15:59:00.693799Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T15:59:00.693934Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T15:59:00.694020Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:711:2580] in generation 1 2025-03-31T15:59:00.707956Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T15:59:00.764488Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T15:59:00.764751Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T15:59:00.764864Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:714:2599] 2025-03-31T15:59:00.764907Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T15:59:00.764941Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T15:59:00.764977Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T15:59:00.765307Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T15:59:00.765363Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-03-31T15:59:00.765413Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T15:59:00.765486Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:715:2600] 2025-03-31T15:59:00.765509Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-03-31T15:59:00.765539Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-03-31T15:59:00.765562Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T15:59:00.765926Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T15:59:00.766040Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T15:59:00.766670Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T15:59:00.766717Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T15:59:00.766777Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T15:59:00.766827Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T15:59:00.766878Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-03-31T15:59:00.766936Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-03-31T15:59:00.767005Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:670:2574], serverId# [1:688:2584], sessionId# [0:0:0] 2025-03-31T15:59:00.767045Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T15:59:00.767066Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-03-31T15:59:00.771377Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-03-31T15:59:00.771499Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-31T15:59:00.771767Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T15:59:00.772018Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T15:59:00.772124Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T15:59:00.772663Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:671:2575], serverId# [1:701:2592], sessionId# [0:0:0] 2025-03-31T15:59:00.772874Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-31T15:59:00.773029Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-03-31T15:59:00.773089Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-03-31T15:59:00.774959Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T15:59:00.787309Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-31T15:59:00.798227Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T15:59:00.798337Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T15:59:00.798450Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-03-31T15:59:00.798484Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-03-31T15:59:00.976843Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:739:2618], serverId# [1:742:2621], sessionId# [0:0:0] 2025-03-31T15:59:00.977044Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:740:2619], serverId# [1:743:2622], sessionId# [0:0:0] 2025-03-31T15:59:00.986509Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-03-31T15:59:00.986602Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T15:59:00.991436Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T15:59:00.991527Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T15:59:00.991782Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T15:59:00.991836Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-03-31T15:59:00.991908Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2025-03-31T15:59:00.992220Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T15:59:00.992410Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T15:59:00.992648Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T15:59:00.992679Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T15:59:00.992733Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T15:59:00.992875Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T15:59:00.992966Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T15:59:00.993058Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T15:59:00.993120Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T15:59:00.995561Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T ... an for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-31T15:59:14.556782Z node 3 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit DirectOp 2025-03-31T15:59:14.556828Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit DirectOp 2025-03-31T15:59:14.556881Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v6000/281474976710759 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v3000/18446744073709551615 ImmediateWriteEdgeReplied# v3000/18446744073709551615 2025-03-31T15:59:14.557052Z node 3 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T15:59:14.557106Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit DirectOp 2025-03-31T15:59:14.557165Z node 3 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2025-03-31T15:59:14.557203Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2025-03-31T15:59:14.557258Z node 3 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-03-31T15:59:14.557286Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2025-03-31T15:59:14.557318Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037888 has finished 2025-03-31T15:59:14.568479Z node 3 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2025-03-31T15:59:14.568599Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:5] at 72075186224037888 on unit DirectOp 2025-03-31T15:59:14.568661Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ... bulk upsert finished with status SCHEME_ERROR 2025-03-31T15:59:14.813057Z node 3 :TX_PROXY DEBUG: actor# [3:59:2106] Handle TEvExecuteKqpTransaction 2025-03-31T15:59:14.813145Z node 3 :TX_PROXY DEBUG: actor# [3:59:2106] TxId# 281474976715662 ProcessProposeKqpTransaction 2025-03-31T15:59:14.814289Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpevbmd3ekqxj16mvrss7wm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OGNmMjU4N2UtYTcwYTY5YzEtYTU4NzQzOWItN2ZmZGJkYzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:59:14.817650Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [3:1115:2908], Recipient [3:670:2574]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 3 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-03-31T15:59:14.817859Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-31T15:59:14.817928Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v8000/0 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v8000/18446744073709551615 ImmediateWriteEdgeReplied# v8000/18446744073709551615 2025-03-31T15:59:14.817982Z node 3 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v8000/18446744073709551615 2025-03-31T15:59:14.818055Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit CheckRead 2025-03-31T15:59:14.818183Z node 3 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-03-31T15:59:14.818239Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit CheckRead 2025-03-31T15:59:14.818281Z node 3 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-31T15:59:14.818335Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-31T15:59:14.818384Z node 3 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037888 2025-03-31T15:59:14.818427Z node 3 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-03-31T15:59:14.818453Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-31T15:59:14.818482Z node 3 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit ExecuteRead 2025-03-31T15:59:14.818503Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit ExecuteRead 2025-03-31T15:59:14.818630Z node 3 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 3 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-03-31T15:59:14.818919Z node 3 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[3:1115:2908], 0} after executionsCount# 1 2025-03-31T15:59:14.818981Z node 3 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[3:1115:2908], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-31T15:59:14.819159Z node 3 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[3:1115:2908], 0} finished in read 2025-03-31T15:59:14.819252Z node 3 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-03-31T15:59:14.819299Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteRead 2025-03-31T15:59:14.819343Z node 3 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2025-03-31T15:59:14.819372Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2025-03-31T15:59:14.819429Z node 3 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-03-31T15:59:14.819461Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2025-03-31T15:59:14.819489Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037888 has finished 2025-03-31T15:59:14.819535Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-31T15:59:14.819651Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-03-31T15:59:14.821045Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [3:1115:2908], Recipient [3:670:2574]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-03-31T15:59:14.821139Z node 3 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 4 } } 2025-03-31T15:59:15.024257Z node 3 :TX_PROXY DEBUG: actor# [3:59:2106] Handle TEvExecuteKqpTransaction 2025-03-31T15:59:15.024343Z node 3 :TX_PROXY DEBUG: actor# [3:59:2106] TxId# 281474976715663 ProcessProposeKqpTransaction 2025-03-31T15:59:15.025298Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqpevbwdf1a76dsxkfjnk1wk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZGM3ZjNiMTEtOGIzMzkzYS00YjZhZWFmOS1jOWRkMjRkZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:59:15.028341Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [3:1146:2933], Recipient [3:911:2738]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 8 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false RangesSize: 1 2025-03-31T15:59:15.028549Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-03-31T15:59:15.028612Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037889 CompleteEdge# v6000/281474976710759 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v5000/18446744073709551615 ImmediateWriteEdgeReplied# v5000/18446744073709551615 2025-03-31T15:59:15.028667Z node 3 :TX_DATASHARD TRACE: 72075186224037889 changed HEAD read to non-repeatable v8000/18446744073709551615 2025-03-31T15:59:15.028740Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit CheckRead 2025-03-31T15:59:15.028854Z node 3 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-03-31T15:59:15.028911Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit CheckRead 2025-03-31T15:59:15.028954Z node 3 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-03-31T15:59:15.028993Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit BuildAndWaitDependencies 2025-03-31T15:59:15.029051Z node 3 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037889 2025-03-31T15:59:15.029094Z node 3 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-03-31T15:59:15.029142Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-03-31T15:59:15.029170Z node 3 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit ExecuteRead 2025-03-31T15:59:15.029196Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit ExecuteRead 2025-03-31T15:59:15.029316Z node 3 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 8 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2025-03-31T15:59:15.029680Z node 3 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[3:1146:2933], 0} after executionsCount# 1 2025-03-31T15:59:15.029743Z node 3 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[3:1146:2933], 0} sends rowCount# 2, bytes# 64, quota rows left# 32765, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-31T15:59:15.029888Z node 3 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[3:1146:2933], 0} finished in read 2025-03-31T15:59:15.029961Z node 3 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-03-31T15:59:15.029992Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit ExecuteRead 2025-03-31T15:59:15.030020Z node 3 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit CompletedOperations 2025-03-31T15:59:15.030054Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit CompletedOperations 2025-03-31T15:59:15.030106Z node 3 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-03-31T15:59:15.030129Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit CompletedOperations 2025-03-31T15:59:15.030156Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037889 has finished 2025-03-31T15:59:15.030201Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-03-31T15:59:15.030315Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-03-31T15:59:15.031937Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [3:1146:2933], Recipient [3:911:2738]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-03-31T15:59:15.032013Z node 3 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 4 } } >> TSchemeShardSubDomainTest::CreateWithoutPlanResolution [GOOD] >> TDynamicNameserverTest::BasicFunctionality [GOOD] >> TSchemeShardSubDomainTest::RedefineErrors [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclareAndCreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-31T15:59:15.957735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:15.957821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:15.957858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:15.957890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:15.957949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:15.957990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:15.958057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:15.958159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:15.958477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:16.058407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:16.058466Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:16.073107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:16.073472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:16.073671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:16.087951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:16.088310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:16.089157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:16.089601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:16.092359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:16.093691Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:16.093754Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:16.093863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:16.093912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:16.093952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:16.094189Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.106678Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-31T15:59:16.282632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:16.282871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.283133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:16.283354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:16.283409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.288214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:16.288392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:16.288611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.288671Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:16.288716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:16.288774Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:16.293133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.293224Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:16.293270Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:16.296125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.296202Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.296273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:16.296342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:16.300713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:16.302872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:16.303099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:16.304156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:16.304289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:16.304362Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:16.305397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:16.305474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:16.305671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:16.305759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:16.308171Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:16.308242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:16.308440Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:16.308485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:16.308862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.308906Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:16.309027Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:16.309064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:16.309106Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:16.309140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:16.309175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:16.309230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:16.309285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:16.309317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:16.309415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:16.309451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:16.309482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:16.315865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:16.316048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:16.316092Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-03-31T15:59:16.387592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-03-31T15:59:16.387937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:16.388080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:16.388132Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-03-31T15:59:16.388393Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-03-31T15:59:16.388449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-03-31T15:59:16.388615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:16.388675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T15:59:16.388749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-03-31T15:59:16.393129Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:16.393191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:16.393359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T15:59:16.393477Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:16.393526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-03-31T15:59:16.393567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-03-31T15:59:16.393627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.393668Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-03-31T15:59:16.393770Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-03-31T15:59:16.393805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-31T15:59:16.393851Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-03-31T15:59:16.393891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-31T15:59:16.393941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-03-31T15:59:16.393996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-31T15:59:16.394034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-03-31T15:59:16.394067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-03-31T15:59:16.394143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T15:59:16.394180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-03-31T15:59:16.394221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-03-31T15:59:16.394269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-03-31T15:59:16.395377Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-03-31T15:59:16.395479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-03-31T15:59:16.395518Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-03-31T15:59:16.395556Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-31T15:59:16.395600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:16.397208Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-03-31T15:59:16.397308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-03-31T15:59:16.397367Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-03-31T15:59:16.397401Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-31T15:59:16.397441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T15:59:16.397523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-03-31T15:59:16.400144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-03-31T15:59:16.401507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestModificationResults wait txId: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2025-03-31T15:59:16.401796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-03-31T15:59:16.401837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-03-31T15:59:16.401946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-31T15:59:16.401969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-31T15:59:16.402425Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-03-31T15:59:16.402602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-03-31T15:59:16.402639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:313:2304] 2025-03-31T15:59:16.402795Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-31T15:59:16.402883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T15:59:16.402918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:313:2304] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-03-31T15:59:16.403364Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:16.403562Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 202us result status StatusSuccess 2025-03-31T15:59:16.404001Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:16.404520Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:16.404729Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 174us result status StatusPathDoesNotExist 2025-03-31T15:59:16.404884Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/table_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/USER_0\' (id: [OwnerId: 72057594046678944, LocalPathId: 2])" Path: "/MyRoot/USER_0/table_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/USER_0" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::FixedNodeId [GOOD] Test command err: 2025-03-31T15:59:14.955536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:14.955619Z node 1 :IMPORT WARN: Table profiles were not loaded >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false >> TSchemeShardSubDomainTest::CopyRejects [GOOD] >> TSchemeShardSubDomainTest::ConsistentCopyRejects >> TSchemeShardSubDomainTest::SimultaneousDeclare >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:16.408558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:16.408657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:16.408697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:16.408731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:16.408778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:16.408825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:16.408880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:16.408982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:16.409345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:16.517269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:16.517352Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:16.522763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:16.522875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:16.523076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:16.530364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:16.530610Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:16.531358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:16.531604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:16.533602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:16.534684Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:16.534743Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:16.534902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:16.534943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:16.534992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:16.535150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.546089Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:16.669255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:16.669516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.669753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:16.669977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:16.670033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.672459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:16.672597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:16.672802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.672872Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:16.672922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:16.672955Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:16.674886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.674962Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:16.675000Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:16.676803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.676852Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.676919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:16.676978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:16.680591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:16.682512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:16.682701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:16.683772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:16.683928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:16.683997Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:16.684261Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:16.684308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:16.684487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:16.684589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:16.686703Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:16.686762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:16.686912Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:16.686948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:16.687188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.687234Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:16.687332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:16.687364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:16.687398Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:16.687430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:16.687469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:16.687505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:16.687557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:16.687590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:16.687658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:16.687692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:16.687733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:16.689515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:16.689636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:16.689671Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... txid 100:0 3 -> 128 2025-03-31T15:59:16.738304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-03-31T15:59:16.743228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-03-31T15:59:16.744339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.744396Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 100:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.744439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 100:0, at tablet# 72057594046678944 2025-03-31T15:59:16.744497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2025-03-31T15:59:16.744644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:16.747633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-03-31T15:59:16.747823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-03-31T15:59:16.748189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:16.748301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:16.748353Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-03-31T15:59:16.748603Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-03-31T15:59:16.748655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-03-31T15:59:16.748822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:16.748873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T15:59:16.748921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-03-31T15:59:16.754730Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:16.754775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:16.754902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T15:59:16.754989Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:16.755051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-03-31T15:59:16.755121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-03-31T15:59:16.755287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.755321Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-03-31T15:59:16.755432Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-03-31T15:59:16.755468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-31T15:59:16.755552Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-03-31T15:59:16.755585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-31T15:59:16.755619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-03-31T15:59:16.755662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-31T15:59:16.755709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-03-31T15:59:16.755748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-03-31T15:59:16.755832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T15:59:16.755874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-03-31T15:59:16.755905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-03-31T15:59:16.755932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-03-31T15:59:16.756716Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-03-31T15:59:16.756794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-03-31T15:59:16.756830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-03-31T15:59:16.756887Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-31T15:59:16.756925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:16.757901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-03-31T15:59:16.757984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-03-31T15:59:16.758023Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-03-31T15:59:16.758064Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-31T15:59:16.758093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T15:59:16.758158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-03-31T15:59:16.764779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-03-31T15:59:16.765939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestModificationResults wait txId: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2025-03-31T15:59:16.766246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-03-31T15:59:16.766304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-03-31T15:59:16.766418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-31T15:59:16.766452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-31T15:59:16.766908Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-03-31T15:59:16.767040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-03-31T15:59:16.767098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:313:2304] 2025-03-31T15:59:16.767229Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-31T15:59:16.767318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T15:59:16.767347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:313:2304] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-03-31T15:59:16.767762Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:16.767980Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 214us result status StatusSuccess 2025-03-31T15:59:16.768373Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeclareAndDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-31T15:59:16.413843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:16.413936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:16.413980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:16.414016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:16.414076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:16.414105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:16.414156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:16.414238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:16.414556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:16.504495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:16.504558Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:16.513052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:16.513493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:16.513682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:16.527945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:16.528284Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:16.528986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:16.529382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:16.531493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:16.532962Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:16.533025Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:16.533160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:16.533216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:16.533257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:16.533482Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.542002Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-31T15:59:16.656847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:16.657067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.657284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:16.657505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:16.657565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.664228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:16.664384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:16.664638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.664702Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:16.664745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:16.664801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:16.666956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.667040Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:16.667138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:16.669234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.669305Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.669384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:16.669453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:16.673148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:16.675534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:16.675747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:16.676796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:16.676945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:16.677020Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:16.677278Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:16.677345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:16.677490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:16.677581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:16.685312Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:16.685400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:16.685589Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:16.685628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:16.686027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.686072Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:16.686158Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:16.686187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:16.686220Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:16.686266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:16.686313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:16.686350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:16.686396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:16.686427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:16.686495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:16.686527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:16.686558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:16.688330Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:16.688435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:16.688468Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 44, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:16.763646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-03-31T15:59:16.763673Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-31T15:59:16.763700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T15:59:16.763766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-03-31T15:59:16.766236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-03-31T15:59:16.766367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000003 2025-03-31T15:59:16.766732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:16.766855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:16.766904Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropSubdomain TPropose operationId# 101:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2025-03-31T15:59:16.766957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 2] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 101 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:16.766988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T15:59:16.767183Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 130 2025-03-31T15:59:16.767317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:16.767391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T15:59:16.768131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T15:59:16.768234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2025-03-31T15:59:16.769788Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:16.769826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:16.769925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T15:59:16.770054Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:16.770094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-03-31T15:59:16.770124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-03-31T15:59:16.770466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.770522Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 101:0 ProgressState 2025-03-31T15:59:16.770578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T15:59:16.770618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T15:59:16.770658Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T15:59:16.770690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T15:59:16.770726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-03-31T15:59:16.770757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T15:59:16.770789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-31T15:59:16.770833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-31T15:59:16.770895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T15:59:16.770929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-03-31T15:59:16.770958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-03-31T15:59:16.770986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-03-31T15:59:16.771508Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:16.771583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:16.771612Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-03-31T15:59:16.771670Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-03-31T15:59:16.771707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:16.772595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:16.772675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:16.772701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-31T15:59:16.772741Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-31T15:59:16.772769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T15:59:16.772851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-03-31T15:59:16.773594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:59:16.773646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T15:59:16.773739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T15:59:16.774005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:59:16.774066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T15:59:16.774129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:16.776008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T15:59:16.778019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T15:59:16.778113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-31T15:59:16.778213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-31T15:59:16.778464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-31T15:59:16.778502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-31T15:59:16.778892Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-31T15:59:16.778973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T15:59:16.779004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:340:2331] TestWaitNotification: OK eventTxId 101 2025-03-31T15:59:16.779522Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:16.779692Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 207us result status StatusPathDoesNotExist 2025-03-31T15:59:16.779849Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateWithoutPlanResolution [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:16.557926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:16.558037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:16.558095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:16.558131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:16.558173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:16.558215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:16.558283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:16.558372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:16.558704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:16.652279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:16.652333Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:16.656739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:16.656837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:16.657011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:16.661752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:16.661998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:16.662635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:16.662840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:16.665824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:16.666887Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:16.666945Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:16.667130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:16.667174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:16.667209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:16.667360Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.675404Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:16.788267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:16.788508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.788711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:16.788923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:16.788981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.791773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:16.791893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:16.792061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.792114Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:16.792162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:16.792196Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:16.796026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.796116Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:16.796162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:16.798777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.798837Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.798908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:16.798969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:16.803492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:16.806289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:16.806500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:16.807696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:16.807853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:16.807927Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:16.808261Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:16.808327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:16.808518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:16.808635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:16.811161Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:16.811212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:16.811411Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:16.811453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:16.811734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.811786Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:16.811892Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:16.811928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:16.811966Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:16.812001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:16.812041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:16.812100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:16.812161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:16.812194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:16.812266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:16.812314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:16.812348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:16.814348Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:16.814477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:16.814516Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-31T15:59:16.814574Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-31T15:59:16.814629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:16.814737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-31T15:59:16.818686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-31T15:59:16.819377Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 100 2025-03-31T15:59:16.820005Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Bootstrap 2025-03-31T15:59:16.839704Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Become StateWork (SchemeCache [1:272:2263]) 2025-03-31T15:59:16.842736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { Coordinators: 1 Mediators: 1 Name: "USER_0" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 100 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:16.843066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /MyRoot/USER_0, opId: 100:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.843260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 100:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: plan resolution is 0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.844651Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T15:59:16.847633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 100, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: plan resolution is 0" TxId: 100 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:16.847851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 100, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: plan resolution is 0, operation: CREATE DATABASE, path: /MyRoot/USER_0 2025-03-31T15:59:16.848476Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-03-31T15:59:16.848786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-03-31T15:59:16.848833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-03-31T15:59:16.849258Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-03-31T15:59:16.849379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-03-31T15:59:16.849414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:282:2273] TestWaitNotification: OK eventTxId 100 2025-03-31T15:59:16.849972Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:16.850210Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 252us result status StatusPathDoesNotExist 2025-03-31T15:59:16.850409Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::BasicFunctionality [GOOD] Test command err: 2025-03-31T15:59:14.205698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:14.205790Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:14.391514Z node 1 :NODE_BROKER ERROR: [Dirty] Configured lease duration (10.000000s) is too small. Using min. value: 300.000000s 2025-03-31T15:59:14.411780Z node 1 :NODE_BROKER ERROR: [Committed] Configured lease duration (10.000000s) is too small. Using min. value: 300.000000s >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-false >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable [GOOD] >> TBlobStorageProxyTest::TestDoubleFailureStripe4Plus2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RedefineErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:16.462633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:16.462729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:16.462768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:16.462800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:16.462844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:16.462882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:16.462949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:16.463056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:16.463428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:16.558399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:16.558460Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:16.563264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:16.563384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:16.563553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:16.568464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:16.568695Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:16.569365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:16.569581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:16.571440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:16.572540Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:16.572598Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:16.572751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:16.572794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:16.572828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:16.572969Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.579614Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:16.735560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:16.735784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.735984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:16.736185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:16.736236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.738304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:16.738431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:16.738600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.738645Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:16.738686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:16.738724Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:16.740559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.740627Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:16.740678Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:16.742361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.742407Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.742472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:16.742522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:16.746207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:16.748307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:16.748519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:16.749576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:16.749711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:16.749788Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:16.750073Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:16.750127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:16.750278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:16.750372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:16.752679Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:16.752739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:16.752912Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:16.752959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:16.753180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.753229Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:16.753319Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:16.753377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:16.753414Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:16.753444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:16.753479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:16.753517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:16.753570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:16.753603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:16.753674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:16.753711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:16.753742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:16.755965Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:16.756099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:16.756155Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... G: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:567:2521] TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2025-03-31T15:59:17.035294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 2 Name: "USER_0" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } StoragePools { Name: "pool-hdd-1" Kind: "hdd-1" } StoragePools { Name: "pool-hdd-2" Kind: "hdd-1" } } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:17.035556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /MyRoot/USER_0, opId: 108:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.035796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T15:59:17.036036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:17.036090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 108:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.040114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusAccepted TxId: 108 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2025-03-31T15:59:17.040296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: /MyRoot/USER_0 2025-03-31T15:59:17.040560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.040629Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 108:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:17.040672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 108:0 ProgressState no shards to create, do next state 2025-03-31T15:59:17.040709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 2 -> 3 2025-03-31T15:59:17.042874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.042962Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 108:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:17.043019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 3 -> 128 2025-03-31T15:59:17.045028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.045079Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 108:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.045127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 108:0, at tablet# 72057594046678944 2025-03-31T15:59:17.045178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 108 ready parts: 1/1 2025-03-31T15:59:17.045354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 108 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:17.047013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 108:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:108 msg type: 269090816 2025-03-31T15:59:17.047171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 108, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 108 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 108 at step: 5000007 2025-03-31T15:59:17.047538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:17.047678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 108 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:17.047729Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 108:0, at tablet# 72057594046678944 2025-03-31T15:59:17.048101Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 128 -> 240 2025-03-31T15:59:17.048165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 108:0, at tablet# 72057594046678944 2025-03-31T15:59:17.048381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-31T15:59:17.048462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 108 2025-03-31T15:59:17.050727Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:17.050786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T15:59:17.051013Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:17.051059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 108, path id: 2 2025-03-31T15:59:17.051438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.051496Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 108:0 ProgressState 2025-03-31T15:59:17.051598Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#108:0 progress is 1/1 2025-03-31T15:59:17.051633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-03-31T15:59:17.051673Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#108:0 progress is 1/1 2025-03-31T15:59:17.051719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-03-31T15:59:17.051766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: false 2025-03-31T15:59:17.051807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-03-31T15:59:17.051850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 108:0 2025-03-31T15:59:17.051895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 108:0 2025-03-31T15:59:17.051966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-03-31T15:59:17.052017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 108, publications: 1, subscribers: 0 2025-03-31T15:59:17.052063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 108, [OwnerId: 72057594046678944, LocalPathId: 2], 8 2025-03-31T15:59:17.052977Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2025-03-31T15:59:17.053082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2025-03-31T15:59:17.053121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 108 2025-03-31T15:59:17.053181Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2025-03-31T15:59:17.053227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-31T15:59:17.053318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 108, subscribers: 0 2025-03-31T15:59:17.056344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2025-03-31T15:59:17.056717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2025-03-31T15:59:17.056776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2025-03-31T15:59:17.057370Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2025-03-31T15:59:17.057474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-03-31T15:59:17.057512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [1:592:2546] TestWaitNotification: OK eventTxId 108 2025-03-31T15:59:17.058261Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:17.058501Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 274us result status StatusSuccess 2025-03-31T15:59:17.058914Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 6 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 6 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } StoragePools { Name: "pool-hdd-1" Kind: "hdd-1" } StoragePools { Name: "pool-hdd-2" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::CreateAndWait [GOOD] >> TSchemeShardSubDomainTest::ConcurrentCreateSubDomainAndDescribe [GOOD] >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:16.679506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:16.679579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:16.679613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:16.679642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:16.679674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:16.679703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:16.679745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:16.679811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:16.680037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:16.747665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:16.747707Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:16.751595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:16.751691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:16.751867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:16.762983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:16.763270Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:16.764104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:16.764374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:16.766342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:16.767520Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:16.767590Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:16.767733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:16.767789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:16.767836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:16.767985Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.777150Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:16.921121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:16.921388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.921748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:16.921979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:16.922032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.931910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:16.932063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:16.932258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.932325Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:16.932373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:16.932405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:16.937678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.937789Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:16.937836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:16.944264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.944361Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.944426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:16.944481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:16.948232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:16.952025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:16.952253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:16.953466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:16.953630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:16.953702Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:16.954002Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:16.954057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:16.954270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:16.954388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:16.958599Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:16.958659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:16.958854Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:16.958900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:16.959220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.959286Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:16.959399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:16.959445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:16.959487Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:16.959525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:16.959567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:16.959611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:16.959666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:16.959702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:16.959788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:16.959833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:16.959870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:16.962268Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:16.962398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:16.962439Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... rsion: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:17.483179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:17.483211Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-03-31T15:59:17.483242Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-03-31T15:59:17.483272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-31T15:59:17.483336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-03-31T15:59:17.484159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 101 Step: 140 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 DomainCoordinators: 72075186233409547 DomainCoordinators: 72075186233409548 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1536 } } 2025-03-31T15:59:17.484201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409552, partId: 0 2025-03-31T15:59:17.484341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 101 Step: 140 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 DomainCoordinators: 72075186233409547 DomainCoordinators: 72075186233409548 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1536 } } 2025-03-31T15:59:17.484476Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 101 Step: 140 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 DomainCoordinators: 72075186233409547 DomainCoordinators: 72075186233409548 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1536 } } 2025-03-31T15:59:17.486984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 615 RawX2: 4294969820 } Origin: 72075186233409552 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-31T15:59:17.487035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409552, partId: 0 2025-03-31T15:59:17.487195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 615 RawX2: 4294969820 } Origin: 72075186233409552 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-31T15:59:17.487259Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-31T15:59:17.487376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 615 RawX2: 4294969820 } Origin: 72075186233409552 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-31T15:59:17.487437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:7, datashard: 72075186233409552, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:17.487479Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.487529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409552, at schemeshard: 72057594046678944 2025-03-31T15:59:17.487581Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-03-31T15:59:17.491686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T15:59:17.491911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T15:59:17.492024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.492408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.492739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.492787Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-31T15:59:17.492884Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T15:59:17.492932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T15:59:17.492987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T15:59:17.493051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T15:59:17.493096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-03-31T15:59:17.493177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:273:2264] message: TxId: 101 2025-03-31T15:59:17.493227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T15:59:17.493272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-31T15:59:17.493305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-31T15:59:17.493467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-31T15:59:17.495783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T15:59:17.495828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:274:2265] TestWaitNotification: OK eventTxId 101 2025-03-31T15:59:17.496369Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:17.496595Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 217us result status StatusSuccess 2025-03-31T15:59:17.497065Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 140 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 10 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:17.497767Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:17.497993Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 217us result status StatusSuccess 2025-03-31T15:59:17.498403Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 140 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 10 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:16.899332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:16.899461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:16.899508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:16.899545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:16.899599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:16.899651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:16.899715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:16.899808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:16.900115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:16.989750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:16.989837Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:17.000593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:17.000708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:17.000914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:17.022187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:17.022440Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:17.023157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:17.023437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:17.025830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:17.027065Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:17.027157Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:17.027342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:17.027389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:17.027429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:17.027574Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.034863Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:17.167742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:17.167974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.168196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:17.168427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:17.168487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.170643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:17.170783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:17.170980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.171038Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:17.171110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:17.171153Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:17.173319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.173416Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:17.173456Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:17.174873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.174913Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.174959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:17.175002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:17.177674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:17.179185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:17.179327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:17.180129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:17.180252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:17.180312Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:17.180576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:17.180682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:17.180822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:17.180904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:17.182537Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:17.182586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:17.182719Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:17.182764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:17.182954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.183004Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:17.183103Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:17.183135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:17.183167Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:17.183192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:17.183220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:17.183264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:17.183307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:17.183332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:17.183381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:17.183409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:17.183436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:17.184853Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:17.184937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:17.184966Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Id: [OwnerId: 72057594046678944, LocalPathId: 2], version: 6 2025-03-31T15:59:17.575431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-03-31T15:59:17.576474Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T15:59:17.576559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T15:59:17.576581Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-31T15:59:17.576620Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-03-31T15:59:17.576652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-31T15:59:17.576709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-03-31T15:59:17.577754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 102 Step: 130 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1601 } } 2025-03-31T15:59:17.577808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409549, partId: 0 2025-03-31T15:59:17.577903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 102 Step: 130 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1601 } } 2025-03-31T15:59:17.578260Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 102 Step: 130 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1601 } } 2025-03-31T15:59:17.580872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 503 RawX2: 4294969751 } Origin: 72075186233409549 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-31T15:59:17.580927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409549, partId: 0 2025-03-31T15:59:17.581058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 503 RawX2: 4294969751 } Origin: 72075186233409549 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-31T15:59:17.581108Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-31T15:59:17.581183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 503 RawX2: 4294969751 } Origin: 72075186233409549 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-31T15:59:17.581266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:17.581303Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.581355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-03-31T15:59:17.581394Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-03-31T15:59:17.587331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T15:59:17.587488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T15:59:17.587579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.587708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.588095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.588145Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-31T15:59:17.588251Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T15:59:17.588285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T15:59:17.588341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T15:59:17.588383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T15:59:17.588419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-31T15:59:17.588492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:306:2297] message: TxId: 102 2025-03-31T15:59:17.588538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T15:59:17.588577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-31T15:59:17.588606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-31T15:59:17.588759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-31T15:59:17.591037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T15:59:17.591073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:531:2475] TestWaitNotification: OK eventTxId 102 2025-03-31T15:59:17.591532Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:17.591747Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 247us result status StatusSuccess 2025-03-31T15:59:17.592113Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 2 SecurityStateVersion: 0 } } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 130 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 10 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:17.592888Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:17.593366Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 460us result status StatusSuccess 2025-03-31T15:59:17.593747Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 130 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 10 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpIndexes::IndexTopSortPushDown [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:16.343373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:16.343481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:16.343528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:16.343597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:16.343652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:16.343691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:16.343765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:16.343857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:16.344260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:16.437870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:16.437938Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:16.442897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:16.443006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:16.443196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:16.447457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:16.447680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:16.448347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:16.448634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:16.450287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:16.451216Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:16.451265Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:16.451406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:16.451448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:16.451516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:16.451681Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.457356Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:16.589843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:16.590093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.590344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:16.590582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:16.590655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.594685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:16.594847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:16.595069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.595150Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:16.595207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:16.595242Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:16.598388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.598448Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:16.598477Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:16.600652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.600710Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.600781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:16.600841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:16.604471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:16.606795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:16.606981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:16.607782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:16.607903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:16.607956Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:16.608161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:16.608193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:16.608319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:16.608395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:16.610044Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:16.610076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:16.610208Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:16.610243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:16.610494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.610539Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:16.610634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:16.610672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:16.610712Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:16.610741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:16.610785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:16.610830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:16.610879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:16.610910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:16.611001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:16.611037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:16.611066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:16.613079Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:16.613196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:16.613229Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 17.382823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409550, partId: 0 2025-03-31T15:59:17.383005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TabletId: 72075186233409550 TxId: 104 Status: OK 2025-03-31T15:59:17.383110Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionAttachResult triggers early, at schemeshard: 72057594046678944 message# TabletId: 72075186233409550 TxId: 104 Status: OK 2025-03-31T15:59:17.383186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2025-03-31T15:59:17.383251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 2 2025-03-31T15:59:17.383690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409551, partId: 0 2025-03-31T15:59:17.383804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TabletId: 72075186233409551 TxId: 104 Status: OK 2025-03-31T15:59:17.383858Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionAttachResult triggers early, at schemeshard: 72057594046678944 message# TabletId: 72075186233409551 TxId: 104 Status: OK 2025-03-31T15:59:17.383890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2025-03-31T15:59:17.383924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 2 2025-03-31T15:59:17.384243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:17.384362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:17.384430Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvOperationPlan, step: 5000003, at tablet: 72057594046678944 2025-03-31T15:59:17.384530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 2 2025-03-31T15:59:17.389024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.389378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 FAKE_COORDINATOR: Erasing txId 104 2025-03-31T15:59:17.483805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409550, partId: 0 2025-03-31T15:59:17.483941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409550 Status: COMPLETE TxId: 104 Step: 5000003 2025-03-31T15:59:17.483996Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409550 Status: COMPLETE TxId: 104 Step: 5000003 2025-03-31T15:59:17.484044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:5, shard: 72075186233409550, left await: 1, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.484080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: false 2025-03-31T15:59:17.484131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-03-31T15:59:17.484611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409551, partId: 0 2025-03-31T15:59:17.484726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409551 Status: COMPLETE TxId: 104 Step: 5000003 2025-03-31T15:59:17.484769Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409551 Status: COMPLETE TxId: 104 Step: 5000003 2025-03-31T15:59:17.484797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:6, shard: 72075186233409551, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.484825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-03-31T15:59:17.484952Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-03-31T15:59:17.485086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:17.485132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-03-31T15:59:17.489278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.489526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.490205Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:17.490258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:17.490419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-31T15:59:17.490603Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:17.490650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:334:2310], at schemeshard: 72057594046678944, txId: 104, path id: 1 2025-03-31T15:59:17.490702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:334:2310], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-03-31T15:59:17.491242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.491324Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-03-31T15:59:17.491453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-31T15:59:17.491499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T15:59:17.491545Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-31T15:59:17.491582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T15:59:17.491622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-03-31T15:59:17.491674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T15:59:17.491731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-03-31T15:59:17.491765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-03-31T15:59:17.491946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2025-03-31T15:59:17.492012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-03-31T15:59:17.492047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-03-31T15:59:17.492078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-03-31T15:59:17.493003Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T15:59:17.493094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T15:59:17.493129Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-03-31T15:59:17.493177Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-03-31T15:59:17.493235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-31T15:59:17.494347Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T15:59:17.494440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T15:59:17.494471Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-03-31T15:59:17.494502Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-03-31T15:59:17.494532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-03-31T15:59:17.494632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-03-31T15:59:17.506346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-31T15:59:17.506556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 >> TSchemeShardSubDomainTest::DeleteAdd >> DataShardSnapshots::ReadIteratorLocalSnapshotThenRestart [GOOD] >> DataShardSnapshots::ReadIteratorLocalSnapshotThenWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestDoubleFailureStripe4Plus2 [GOOD] Test command err: 2025-03-31T15:59:15.773653Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/0010a9/r3tmp/tmpHem8z5//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1 2025-03-31T15:59:15.791277Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/0010a9/r3tmp/tmpHem8z5//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 2 2025-03-31T15:59:15.841363Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-03-31T15:59:15.842310Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:1:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateAndWait [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:17.308050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:17.308160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:17.308205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:17.308256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:17.308300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:17.308342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:17.308401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:17.308488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:17.308817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:17.427731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:17.427797Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:17.432552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:17.432679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:17.432914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:17.437900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:17.438153Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:17.438854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:17.439208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:17.441161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:17.442402Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:17.442453Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:17.442564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:17.442599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:17.442625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:17.442730Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.448674Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:17.601717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:17.601964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.602215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:17.602437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:17.602493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.606515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:17.606696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:17.606921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.606981Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:17.607030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:17.607063Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:17.612466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.612550Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:17.612589Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:17.621955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.622032Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.622108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:17.622168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:17.633786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:17.639800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:17.640017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:17.641121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:17.641260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:17.641350Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:17.641644Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:17.641701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:17.641868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:17.641972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:17.644488Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:17.644536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:17.644733Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:17.644778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:17.645032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.645081Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:17.645188Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:17.645231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:17.645268Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:17.645296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:17.645353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:17.645397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:17.645459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:17.645513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:17.645593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:17.645634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:17.645664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:17.647822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:17.647942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:17.647981Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... : TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:17.756682Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-03-31T15:59:17.756993Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-03-31T15:59:17.757066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-03-31T15:59:17.757227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T15:59:17.757293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T15:59:17.757361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-03-31T15:59:17.760299Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:17.760344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T15:59:17.760495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-31T15:59:17.760623Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:17.760671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-03-31T15:59:17.760714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 3 2025-03-31T15:59:17.760884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.760940Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-31T15:59:17.761022Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T15:59:17.761069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T15:59:17.761102Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T15:59:17.761139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T15:59:17.761187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-03-31T15:59:17.761222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T15:59:17.761251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-31T15:59:17.761277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-31T15:59:17.761361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-31T15:59:17.761396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-03-31T15:59:17.761423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-03-31T15:59:17.761532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-03-31T15:59:17.763359Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:17.763456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:17.763491Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-03-31T15:59:17.763528Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-03-31T15:59:17.763565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T15:59:17.764400Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:17.764495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:17.764537Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-31T15:59:17.764567Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-03-31T15:59:17.764615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T15:59:17.764692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-03-31T15:59:17.772354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T15:59:17.772571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 100, wait until txId: 101 TestModificationResults wait txId: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2025-03-31T15:59:17.772929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-03-31T15:59:17.772979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-03-31T15:59:17.773106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-31T15:59:17.773135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-31T15:59:17.773627Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-03-31T15:59:17.773798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-03-31T15:59:17.773840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:331:2322] 2025-03-31T15:59:17.774101Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-31T15:59:17.774192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T15:59:17.774229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:331:2322] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-03-31T15:59:17.774713Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:17.774961Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/dir/USER_0" took 235us result status StatusSuccess 2025-03-31T15:59:17.778232Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "/dc-1/users/tenant-1:hdd" Kind: "hdd" } StoragePools { Name: "/dc-1/users/tenant-1:hdd-1" Kind: "hdd-1" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:17.778967Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:17.779190Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/dir" took 233us result status StatusSuccess 2025-03-31T15:59:17.779540Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir" PathDescription { Self { Name: "dir" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpIndexes::SimpleVectorIndexOrderByCosineSimilarityNotNullableLevel1 [GOOD] >> KqpIndexes::SimpleVectorIndexOrderByCosineSimilarityNotNullableLevel2 >> TSchemeShardSubDomainTest::SimultaneousDeclare [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTableForceDrop >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-false [GOOD] >> TSchemeShardSubDomainTest::ConsistentCopyRejects [GOOD] >> TSchemeShardSubDomainTest::DeclareAndForbidTableInside >> TSchemeShardSubDomainTest::ForceDropTwice |69.0%| [TA] $(B)/ydb/core/tx/datashard/ut_upload_rows/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclare [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:18.197525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:18.197612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:18.197648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:18.197674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:18.197709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:18.197756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:18.197825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:18.197931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:18.198200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:18.287571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:18.287622Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:18.292148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:18.292241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:18.292446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:18.297667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:18.297926Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:18.298669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:18.298886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:18.300824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:18.302107Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:18.302168Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:18.302330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:18.302374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:18.302406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:18.302546Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:18.310156Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:18.480210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:18.480457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:18.480684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:18.480900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:18.480952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:18.482923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:18.483060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:18.483268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:18.483323Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:18.483371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:18.483423Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:18.485290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:18.485391Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:18.485430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:18.487057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:18.487128Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:18.487195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:18.487264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:18.491013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:18.492885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:18.493117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:18.494197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:18.494333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:18.494455Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:18.494737Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:18.494797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:18.494977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:18.495110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:18.498390Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:18.498459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:18.498645Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:18.498699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:18.498946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:18.498987Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:18.499069Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:18.499121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:18.499157Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:18.499187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:18.499222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:18.499256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:18.499304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:18.499376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:18.499435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:18.499469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:18.499511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:18.501341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:18.501456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:18.501493Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2025-03-31T15:59:18.545353Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 100 2025-03-31T15:59:18.545397Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-03-31T15:59:18.545432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T15:59:18.545518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 0/1, is published: true 2025-03-31T15:59:18.547646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-03-31T15:59:18.547723Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 100:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:18.547763Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 3 -> 128 2025-03-31T15:59:18.548605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-03-31T15:59:18.550745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-03-31T15:59:18.550929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-03-31T15:59:18.550972Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 100:0, at schemeshard: 72057594046678944 2025-03-31T15:59:18.551014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 100:0, at tablet# 72057594046678944 2025-03-31T15:59:18.551072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2025-03-31T15:59:18.551273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:18.553191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-03-31T15:59:18.553366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-03-31T15:59:18.553743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:18.553878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:18.553940Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-03-31T15:59:18.554170Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-03-31T15:59:18.554218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-03-31T15:59:18.554376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:18.554435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T15:59:18.554479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-03-31T15:59:18.556834Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:18.556880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:18.557059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T15:59:18.557165Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:18.557206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-03-31T15:59:18.557249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-03-31T15:59:18.557475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-03-31T15:59:18.557515Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-03-31T15:59:18.557629Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-03-31T15:59:18.557680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-31T15:59:18.557725Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-03-31T15:59:18.557754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-31T15:59:18.557793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-03-31T15:59:18.557838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-31T15:59:18.557891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-03-31T15:59:18.557938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-03-31T15:59:18.558025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T15:59:18.558074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 1 2025-03-31T15:59:18.558106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-03-31T15:59:18.558135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-03-31T15:59:18.559269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-03-31T15:59:18.559376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-03-31T15:59:18.559412Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-03-31T15:59:18.559466Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-31T15:59:18.559529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:18.561078Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-03-31T15:59:18.561168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-03-31T15:59:18.561196Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-03-31T15:59:18.561238Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-31T15:59:18.561271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T15:59:18.561378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 1 2025-03-31T15:59:18.561420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:273:2264] 2025-03-31T15:59:18.566634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-03-31T15:59:18.567261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-03-31T15:59:18.567389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-03-31T15:59:18.567424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:274:2265] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 100 2025-03-31T15:59:18.568006Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:18.568261Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 260us result status StatusSuccess 2025-03-31T15:59:18.568593Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TStoragePoolsQuotasTest::DifferentQuotasInteraction >> IncrementalRestoreScan::Empty [GOOD] >> TSchemeShardSubDomainTest::LS >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ConsistentCopyRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-31T15:59:16.447069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:16.447188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:16.447228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:16.447276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:16.447323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:16.447354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:16.447411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:16.447493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:16.447864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:16.539052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:16.539156Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:16.548771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:16.549166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:16.549379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:16.558272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:16.558588Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:16.559282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:16.559680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:16.561776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:16.563125Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:16.563185Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:16.563290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:16.563337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:16.563372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:16.563604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.570715Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-31T15:59:16.709693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:16.709939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.710184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:16.710430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:16.710484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.712904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:16.713059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:16.713289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.713354Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:16.713399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:16.713432Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:16.715622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.715680Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:16.715716Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:16.717814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.717871Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.717949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:16.718000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:16.721630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:16.723621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:16.723790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:16.724846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:16.725011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:16.725077Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:16.725363Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:16.725441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:16.725591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:16.725703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:16.727705Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:16.727756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:16.727911Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:16.727955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:16.728319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:16.728366Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:16.728465Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:16.728496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:16.728536Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:16.728564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:16.728600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:16.728669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:16.728725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:16.728760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:16.728830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:16.728881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:16.728911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:16.730777Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:16.730889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:16.730923Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 46678944 2025-03-31T15:59:18.781948Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 106:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 432 RawX2: 8589936989 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2025-03-31T15:59:18.782024Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 106:0, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:18.782067Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 106:0, at schemeshard: 72057594046678944 2025-03-31T15:59:18.782115Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 106:0, datashard: 72075186233409552, at schemeshard: 72057594046678944 2025-03-31T15:59:18.782155Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 106:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-03-31T15:59:18.782186Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:0 129 -> 240 2025-03-31T15:59:18.784585Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2025-03-31T15:59:18.785114Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-03-31T15:59:18.785179Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 106:0ProgressState, operation type TxCopyTable 2025-03-31T15:59:18.785232Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 106:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2025-03-31T15:59:18.785280Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 106, done: 0, blocked: 1 2025-03-31T15:59:18.785395Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 106:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 106 Name: CopyTableBarrier }, at tablet# 72057594046678944 2025-03-31T15:59:18.785438Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:0 240 -> 240 2025-03-31T15:59:18.787898Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-03-31T15:59:18.787964Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 106:0 ProgressState 2025-03-31T15:59:18.788114Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-03-31T15:59:18.788181Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-31T15:59:18.788232Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-03-31T15:59:18.788275Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-31T15:59:18.788321Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2025-03-31T15:59:18.788409Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:638:2562] message: TxId: 106 2025-03-31T15:59:18.788472Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-31T15:59:18.788527Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-03-31T15:59:18.788564Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2025-03-31T15:59:18.788737Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-03-31T15:59:18.788784Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-31T15:59:18.791262Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-03-31T15:59:18.791321Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:814:2714] TestWaitNotification: OK eventTxId 106 2025-03-31T15:59:18.792151Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:18.792409Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table" took 293us result status StatusSuccess 2025-03-31T15:59:18.792849Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table" PathDescription { Self { Name: "table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:18.793791Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dst" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:18.794065Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dst" took 283us result status StatusSuccess 2025-03-31T15:59:18.794468Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dst" PathDescription { Self { Name: "dst" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 106 CreateStep: 250 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "dst" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:18.795128Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:18.795324Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 227us result status StatusSuccess 2025-03-31T15:59:18.795771Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "dst" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 106 CreateStep: 250 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCopying Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:18.459543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:18.459676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:18.459723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:18.459762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:18.459810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:18.459859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:18.459931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:18.460042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:18.460408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:18.557693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:18.557773Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:18.563315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:18.563429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:18.563607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:18.569896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:18.570160Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:18.570945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:18.571218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:18.573514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:18.574691Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:18.574758Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:18.574927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:18.574979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:18.575037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:18.575231Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:18.586284Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:18.732370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:18.732603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:18.732789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:18.732970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:18.733024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:18.735959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:18.736096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:18.736320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:18.736396Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:18.736445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:18.736473Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:18.738566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:18.738650Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:18.738712Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:18.741199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:18.741253Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:18.741353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:18.741398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:18.747546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:18.751218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:18.751493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:18.752607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:18.752766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:18.752836Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:18.753117Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:18.753182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:18.753393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:18.753508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:18.756734Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:18.756782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:18.756917Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:18.756948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:18.757154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:18.757213Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:18.757293Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:18.757338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:18.757371Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:18.757395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:18.757440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:18.757471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:18.757511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:18.757539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:18.757607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:18.757647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:18.757683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:18.759566Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:18.759706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:18.759749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... :18.807596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T15:59:18.809715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T15:59:18.809776Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T15:59:18.809821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 101:0, at tablet# 72057594046678944 2025-03-31T15:59:18.809871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2025-03-31T15:59:18.810041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:18.812798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-03-31T15:59:18.812942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-03-31T15:59:18.813343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:18.813463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:18.813525Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-03-31T15:59:18.813824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-03-31T15:59:18.813877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-03-31T15:59:18.814041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:18.814100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T15:59:18.814156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T15:59:18.817063Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:18.817114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:18.817272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T15:59:18.817402Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:18.817443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-03-31T15:59:18.817484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 FAKE_COORDINATOR: Erasing txId 101 2025-03-31T15:59:18.817688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T15:59:18.817740Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-31T15:59:18.817843Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T15:59:18.817905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T15:59:18.817975Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T15:59:18.818009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T15:59:18.818052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-03-31T15:59:18.818094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T15:59:18.818135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-31T15:59:18.818174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-31T15:59:18.818243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T15:59:18.818285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-03-31T15:59:18.818322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-03-31T15:59:18.818354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-03-31T15:59:18.819155Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:18.819278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:18.819325Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-03-31T15:59:18.819366Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-31T15:59:18.819411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:18.820500Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:18.820589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:18.820623Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-31T15:59:18.820664Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-31T15:59:18.820709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T15:59:18.820801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-03-31T15:59:18.824817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T15:59:18.826095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-03-31T15:59:18.829724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "SomeDatabase" TimeCastBucketsPerMediator: 2 DatabaseQuotas { storage_quotas { unit_kind: "nonexistent_storage_kind" data_size_hard_quota: 1 } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:18.829948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /MyRoot/SomeDatabase, opId: 102:0, at schemeshard: 72057594046678944 2025-03-31T15:59:18.830315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain /MyRoot/SomeDatabase has the specified kinds. Existing storage kinds are: pool-kind-1, pool-kind-2, at schemeshard: 72057594046678944 2025-03-31T15:59:18.833071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain /MyRoot/SomeDatabase has the specified kinds. Existing storage kinds are: pool-kind-1, pool-kind-2" TxId: 102 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2025-03-31T15:59:18.833275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain /MyRoot/SomeDatabase has the specified kinds. Existing storage kinds are: pool-kind-1, pool-kind-2, operation: ALTER DATABASE, path: /MyRoot/SomeDatabase TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2025-03-31T15:59:18.833649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-31T15:59:18.833707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-03-31T15:59:18.833828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-31T15:59:18.833851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-31T15:59:18.834354Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-31T15:59:18.834464Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-31T15:59:18.834510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T15:59:18.834548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:311:2302] 2025-03-31T15:59:18.834767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T15:59:18.834798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:311:2302] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 >> TSchemeShardSubDomainTest::SimultaneousCreateTenantDirTable >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-EnableSeparateQuotas ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::IndexTopSortPushDown [GOOD] Test command err: Trying to start YDB, gRPC: 9335, MsgBus: 29096 2025-03-31T15:58:29.432181Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003650512583614:2197];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:29.438220Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000df3/r3tmp/tmpCJeebn/pdisk_1.dat 2025-03-31T15:58:30.080277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:30.080429Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:30.087827Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:30.091497Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9335, node 1 2025-03-31T15:58:30.267953Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:58:30.267979Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:58:30.267986Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:58:30.268128Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29096 TClient is connected to server localhost:29096 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:58:31.016566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:31.049773Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:58:31.070345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:31.327852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:31.601594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:31.767953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:34.420056Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003650512583614:2197];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:34.420138Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:58:34.957181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003671987421755:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:34.957312Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:35.449392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:58:35.507327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:58:35.599121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:58:35.687580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:58:35.746162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:58:35.809562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:58:35.895270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003676282389574:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:35.895373Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:35.895849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003676282389579:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:35.900714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:58:35.929839Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488003676282389581:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:58:35.995480Z node 1 :TX_PROXY ERROR: Actor# [1:7488003676282389638:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:58:37.354672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T15:58:37.398202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T15:58:37.454405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 2282, MsgBus: 13920 2025-03-31T15:58:42.651073Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488003706077417024:2167];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:42.712481Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000df3/r3tmp/tmpRbDnth/pdisk_1.dat 2025-03-31T15:58:42.908525Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:42.910664Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:42.910722Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:42.912039Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2282, node 2 2025-03-31T15:58:42.999713Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:58:42.999736Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:58:42.999743Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:58:42.999865Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13920 TClient is connected to server localhost:13920 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:58:43.621831Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:43.631907Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:58:43.652442Z node ... HARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:58:47.147016Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:58:47.228385Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:58:47.292622Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488003727552255677:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:47.292732Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488003727552255682:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:47.292758Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:47.297192Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:58:47.327353Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488003727552255684:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:58:47.422059Z node 2 :TX_PROXY ERROR: Actor# [2:7488003727552255740:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:58:47.646020Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488003706077417024:2167];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:47.656889Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:58:48.884821Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T15:58:57.883949Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:58:57.883980Z node 2 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 24538, MsgBus: 4611 2025-03-31T15:59:05.344121Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488003804560103744:2079];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:05.345071Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000df3/r3tmp/tmpsvFpCK/pdisk_1.dat 2025-03-31T15:59:05.756370Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:05.756476Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:05.762717Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:59:05.768486Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24538, node 3 2025-03-31T15:59:05.983767Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:59:05.983797Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:59:05.983806Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:59:05.983968Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4611 TClient is connected to server localhost:4611 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:59:06.920803Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:06.937076Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:59:06.946788Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:07.043186Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:07.294914Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:07.552589Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:10.347282Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488003804560103744:2079];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:10.347374Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:59:11.291278Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488003830329909263:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:11.291405Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:11.372543Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:59:11.427932Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:59:11.506406Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:59:11.560701Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:59:11.613389Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:59:11.694681Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:59:11.817123Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488003830329909791:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:11.817236Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:11.817509Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488003830329909796:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:11.885599Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:59:11.910861Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488003830329909798:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:59:11.978555Z node 3 :TX_PROXY ERROR: Actor# [3:7488003830329909857:3467] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:13.634565Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T15:59:13.718672Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T15:59:13.793311Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTableForceDrop [GOOD] >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects [GOOD] >> IncrementalRestoreScan::ChangeSenderEmpty [GOOD] >> TSchemeShardSubDomainTest::DeleteAdd [GOOD] >> TSchemeShardSubDomainTest::SetSchemeLimits >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts-UseSink >> IncrementalRestoreScan::ChangeSenderSimple [GOOD] >> TSchemeShardSubDomainTest::DeclareAndForbidTableInside [GOOD] >> TSchemeShardSubDomainTest::CreateWithoutTimeCastBuckets ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::Empty [GOOD] Test command err: 2025-03-31T15:59:18.784689Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:59:18.784959Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:59:18.785137Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0013c1/r3tmp/tmpZYFi0F/pdisk_1.dat 2025-03-31T15:59:19.226409Z node 1 :CHANGE_EXCHANGE DEBUG: [TIncrementalRestoreScan][1337][OwnerId: 1, LocalPathId: 2][OwnerId: 3, LocalPathId: 4][1:597:2521] Exhausted 2025-03-31T15:59:19.226528Z node 1 :CHANGE_EXCHANGE DEBUG: [TIncrementalRestoreScan][1337][OwnerId: 1, LocalPathId: 2][OwnerId: 3, LocalPathId: 4][1:597:2521] Handle TEvIncrementalRestoreScan::TEvFinished NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvFinished 2025-03-31T15:59:19.226573Z node 1 :CHANGE_EXCHANGE DEBUG: [TIncrementalRestoreScan][1337][OwnerId: 1, LocalPathId: 2][OwnerId: 3, LocalPathId: 4][1:597:2521] Finish 0 >> TNodeBrokerTest::TestListNodes [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainsInSeparateDir >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-49 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-50 >> TSchemeShardSubDomainTest::SchemeLimitsRejects ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTableForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:19.369436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:19.369577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:19.369616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:19.369659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:19.369706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:19.369758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:19.369825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:19.369918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:19.370274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:19.463456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:19.463521Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:19.468566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:19.468694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:19.468895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:19.475828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:19.476103Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:19.476812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:19.477078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:19.479165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:19.480398Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:19.480461Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:19.480662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:19.480710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:19.480746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:19.480896Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:19.492801Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:19.628075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:19.628329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:19.628578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:19.628806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:19.628867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:19.631337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:19.631470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:19.631666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:19.631716Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:19.631759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:19.631789Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:19.633748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:19.633817Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:19.633853Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:19.635762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:19.635845Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:19.635907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:19.635957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:19.645829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:19.650947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:19.651200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:19.652392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:19.652547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:19.652621Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:19.652911Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:19.652958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:19.653165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:19.653268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:19.655891Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:19.655937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:19.656131Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:19.656176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:19.656442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:19.656487Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:19.656605Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:19.656661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:19.656707Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:19.656738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:19.656771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:19.656808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:19.656858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:19.656891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:19.656973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:19.657013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:19.657045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:19.665560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:19.665727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:19.665768Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-03-31T15:59:19.890099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409548 2025-03-31T15:59:19.891153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T15:59:19.892054Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 Forgetting tablet 72075186233409551 2025-03-31T15:59:19.892503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-31T15:59:19.892673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409547 2025-03-31T15:59:19.893523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 Forgetting tablet 72075186233409549 2025-03-31T15:59:19.893977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-03-31T15:59:19.894160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T15:59:19.894573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:59:19.894619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T15:59:19.894742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T15:59:19.898931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-03-31T15:59:19.898993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-03-31T15:59:19.899178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186233409550, at schemeshard: 72057594046678944 2025-03-31T15:59:19.899280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2025-03-31T15:59:19.899386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-03-31T15:59:19.900022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-31T15:59:19.900079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-03-31T15:59:19.900145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:59:19.900192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T15:59:19.900274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:19.900638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-03-31T15:59:19.900682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-03-31T15:59:19.902687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-03-31T15:59:19.902729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-03-31T15:59:19.902894Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186233409551, at schemeshard: 72057594046678944 2025-03-31T15:59:19.902973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-31T15:59:19.902993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-03-31T15:59:19.903040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-03-31T15:59:19.903109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-03-31T15:59:19.903294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-31T15:59:19.904923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 100 2025-03-31T15:59:19.905154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-03-31T15:59:19.905207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-03-31T15:59:19.905345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-31T15:59:19.905369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-03-31T15:59:19.905438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-31T15:59:19.905457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-31T15:59:19.905956Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-03-31T15:59:19.906127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-03-31T15:59:19.906162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:631:2536] 2025-03-31T15:59:19.906339Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-31T15:59:19.906454Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-31T15:59:19.906490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T15:59:19.906510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:631:2536] 2025-03-31T15:59:19.906605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T15:59:19.906627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:631:2536] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2025-03-31T15:59:19.907104Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:19.907295Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 240us result status StatusPathDoesNotExist 2025-03-31T15:59:19.907490Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-31T15:59:19.908091Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:19.908265Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 175us result status StatusPathDoesNotExist 2025-03-31T15:59:19.908391Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/table_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/table_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-31T15:59:19.908861Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:19.909098Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 203us result status StatusSuccess 2025-03-31T15:59:19.909475Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-31T15:59:17.204179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:17.204278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:17.204324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:17.204386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:17.204439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:17.204472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:17.204547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:17.204660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:17.204998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:17.301504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:17.301567Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:17.310457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:17.310872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:17.311071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:17.319065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:17.319412Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:17.320186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:17.320576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:17.322759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:17.324210Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:17.324273Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:17.324398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:17.324452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:17.324499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:17.324734Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.332282Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-31T15:59:17.476972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:17.477202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.477453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:17.477698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:17.477780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.480404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:17.480584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:17.480801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.480859Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:17.480905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:17.480943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:17.483201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.483267Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:17.483304Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:17.485312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.485392Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.485449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:17.485498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:17.489274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:17.491350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:17.491534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:17.492652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:17.492818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:17.492887Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:17.493170Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:17.493222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:17.493410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:17.493518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:17.496976Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:17.497041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:17.497252Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:17.497307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:17.497741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.497786Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:17.497881Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:17.497916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:17.497951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:17.497978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:17.498016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:17.498074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:17.498112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:17.498144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:17.498213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:17.498252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:17.498288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:17.500182Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:17.500301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:17.500339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 3-31T15:59:19.789219Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2025-03-31T15:59:19.789462Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-03-31T15:59:19.789532Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterOlapStore TPropose operationId# 108:0 ProgressState at tablet: 72057594046678944 2025-03-31T15:59:19.789611Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 108 ready parts: 1/1 2025-03-31T15:59:19.789771Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 108 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:19.792108Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 108:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:108 msg type: 269090816 2025-03-31T15:59:19.792258Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 108, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 108 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 108 at step: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72075186233409549 for txId: 108 at step: 5000004 2025-03-31T15:59:19.792704Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:19.792840Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 108 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 8589936749 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:19.792904Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterOlapStore TPropose operationId# 108:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000004 2025-03-31T15:59:19.793093Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 128 -> 129 2025-03-31T15:59:19.793266Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:19.793349Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-03-31T15:59:19.794051Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186233409549;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=108;fline=tx_controller.cpp:211;event=finished_tx;tx_id=108; FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2025-03-31T15:59:19.796020Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:19.796095Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:19.796304Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-03-31T15:59:19.796523Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:19.796573Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:335:2311], at schemeshard: 72057594046678944, txId: 108, path id: 1 2025-03-31T15:59:19.796622Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:335:2311], at schemeshard: 72057594046678944, txId: 108, path id: 5 2025-03-31T15:59:19.797146Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-03-31T15:59:19.797204Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterOlapStore TProposedWaitParts operationId# 108:0 ProgressState at tablet: 72057594046678944 2025-03-31T15:59:19.797271Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TAlterOlapStore TProposedWaitParts operationId# 108:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409549 2025-03-31T15:59:19.798075Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2025-03-31T15:59:19.798194Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2025-03-31T15:59:19.798242Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2025-03-31T15:59:19.798285Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-03-31T15:59:19.798346Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-31T15:59:19.799216Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 108 2025-03-31T15:59:19.799298Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 108 2025-03-31T15:59:19.799327Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2025-03-31T15:59:19.799356Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 5 2025-03-31T15:59:19.799442Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-03-31T15:59:19.799556Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 108, ready parts: 0/1, is published: true 2025-03-31T15:59:19.802681Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 108:0 from tablet: 72057594046678944 to tablet: 72075186233409549 cookie: 72057594046678944:4 msg type: 275382275 2025-03-31T15:59:19.803570Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2025-03-31T15:59:19.804621Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 FAKE_COORDINATOR: Erasing txId 108 2025-03-31T15:59:19.817012Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409549 TxId: 108 2025-03-31T15:59:19.817092Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 108, tablet: 72075186233409549, partId: 0 2025-03-31T15:59:19.817234Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 108:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409549 TxId: 108 2025-03-31T15:59:19.817298Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 129 -> 240 2025-03-31T15:59:19.821776Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2025-03-31T15:59:19.821992Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-03-31T15:59:19.822093Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 108:0 ProgressState 2025-03-31T15:59:19.822217Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#108:0 progress is 1/1 2025-03-31T15:59:19.822274Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-03-31T15:59:19.822338Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#108:0 progress is 1/1 2025-03-31T15:59:19.822382Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-03-31T15:59:19.822422Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: true 2025-03-31T15:59:19.822505Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:489:2439] message: TxId: 108 2025-03-31T15:59:19.822564Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-03-31T15:59:19.822616Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 108:0 2025-03-31T15:59:19.822654Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 108:0 2025-03-31T15:59:19.822803Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-03-31T15:59:19.825544Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-03-31T15:59:19.825627Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [2:876:2790] TestWaitNotification: OK eventTxId 108 TestModificationResults wait txId: 109 2025-03-31T15:59:19.829352Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnStore AlterColumnStore { Name: "OlapStore1" AlterSchemaPresets { Name: "default" AlterSchema { AddColumns { Name: "comment2" Type: "Utf8" } } } } } TxId: 109 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:19.829609Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterOlapStore Propose, path: /MyRoot/OlapStore1, opId: 109:0, at schemeshard: 72057594046678944 2025-03-31T15:59:19.830019Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 109:1, propose status:StatusSchemeError, reason: Too many columns. new: 4. Limit: 3, at schemeshard: 72057594046678944 2025-03-31T15:59:19.833384Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 109, response: Status: StatusSchemeError Reason: "Too many columns. new: 4. Limit: 3" TxId: 109 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:19.833544Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 109, database: /MyRoot, subject: , status: StatusSchemeError, reason: Too many columns. new: 4. Limit: 3, operation: ALTER COLUMN STORE, path: /MyRoot/OlapStore1 TestModificationResult got TxId: 109, wait until txId: 109 TestWaitNotification wait txId: 109 2025-03-31T15:59:19.834037Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: send EvNotifyTxCompletion 2025-03-31T15:59:19.834086Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 109 2025-03-31T15:59:19.834725Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 109, at schemeshard: 72057594046678944 2025-03-31T15:59:19.834841Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2025-03-31T15:59:19.834872Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [2:911:2825] TestWaitNotification: OK eventTxId 109 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeleteAdd [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:19.234130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:19.234246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:19.234292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:19.234332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:19.234381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:19.234440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:19.234505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:19.234612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:19.234969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:19.356335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:19.356405Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:19.361979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:19.362107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:19.362322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:19.369773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:19.370057Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:19.370884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:19.371152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:19.373471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:19.374797Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:19.374868Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:19.375038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:19.375101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:19.375163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:19.375341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:19.383075Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:19.559347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:19.559600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:19.559858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:19.560136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:19.560194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:19.563208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:19.563349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:19.563544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:19.563606Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:19.563652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:19.563686Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:19.566029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:19.566112Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:19.566156Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:19.569203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:19.569261Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:19.569352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:19.569409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:19.573701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:19.578603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:19.578834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:19.580032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:19.580192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:19.580264Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:19.580536Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:19.580591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:19.580772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:19.580880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:19.583418Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:19.583470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:19.583661Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:19.583736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:19.583990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:19.584037Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:19.584133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:19.584165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:19.584205Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:19.584238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:19.584276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:19.584318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:19.584371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:19.584412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:19.584491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:19.584531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:19.584564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:19.586713Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:19.586837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:19.586875Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 6316545 cookie: 0:102 msg type: 269090816 2025-03-31T15:59:19.996003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000004 2025-03-31T15:59:19.996328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:19.996461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:19.996540Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet# 72057594046678944 2025-03-31T15:59:19.996899Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-03-31T15:59:19.996976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet# 72057594046678944 2025-03-31T15:59:19.997148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:19.997209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 8 2025-03-31T15:59:19.997262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-03-31T15:59:19.999491Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:19.999528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:19.999647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-31T15:59:19.999755Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:19.999809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-03-31T15:59:19.999845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 3 2025-03-31T15:59:20.000175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.000227Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-31T15:59:20.000325Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T15:59:20.000358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T15:59:20.000399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T15:59:20.000429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T15:59:20.000465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-03-31T15:59:20.000510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T15:59:20.000551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-31T15:59:20.000587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-31T15:59:20.000843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 9 2025-03-31T15:59:20.000895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-03-31T15:59:20.000933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-03-31T15:59:20.000962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-03-31T15:59:20.001513Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T15:59:20.001605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T15:59:20.001642Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-03-31T15:59:20.001688Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-03-31T15:59:20.001732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:20.002397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T15:59:20.002463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T15:59:20.002488Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-03-31T15:59:20.002515Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-03-31T15:59:20.002541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 8 2025-03-31T15:59:20.002614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-03-31T15:59:20.006380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T15:59:20.006508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-31T15:59:20.006739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-31T15:59:20.006781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-31T15:59:20.007268Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-31T15:59:20.007378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T15:59:20.007417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:978:2803] TestWaitNotification: OK eventTxId 102 2025-03-31T15:59:20.007896Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:20.008079Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 201us result status StatusSuccess 2025-03-31T15:59:20.008464Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409552 Coordinators: 72075186233409553 Coordinators: 72075186233409554 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409555 Mediators: 72075186233409556 Mediators: 72075186233409557 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:20.008964Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:20.009129Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 146us result status StatusSuccess 2025-03-31T15:59:20.009477Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::ForceDropTwice [GOOD] >> TSchemeShardSubDomainTest::LS [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeclareAndForbidTableInside [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:19.868523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:19.868636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:19.868679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:19.868715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:19.868765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:19.868814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:19.868906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:19.869010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:19.869367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:19.971525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:19.971595Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:19.979353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:19.979465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:19.979724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:19.995586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:19.995866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:19.996676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:19.996906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:19.999000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:20.000258Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:20.000325Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:20.000492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:20.000540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:20.000584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:20.000721Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.007980Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:20.164497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:20.164747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.164983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:20.165220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:20.165283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.171975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:20.172129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:20.172339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.172403Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:20.172453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:20.172490Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:20.175939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.176040Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:20.176102Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:20.179145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.179211Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.179288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:20.179342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:20.194934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:20.199322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:20.199576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:20.200701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:20.200853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:20.200920Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:20.201209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:20.201275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:20.201458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:20.201566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:20.204570Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:20.204624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:20.204814Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:20.204855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:20.205103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.205154Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:20.205260Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:20.205295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:20.205353Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:20.205384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:20.205422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:20.205474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:20.205539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:20.205576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:20.205670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:20.205709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:20.205780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:20.207962Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:20.208116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:20.208160Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... T15:59:20.270764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 3 2025-03-31T15:59:20.270956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.271007Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 101:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:20.271062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2025-03-31T15:59:20.271227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:20.272093Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:20.272208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:20.272253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-03-31T15:59:20.272298Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-03-31T15:59:20.272344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T15:59:20.274082Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:20.274173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:20.274216Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-03-31T15:59:20.274250Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-03-31T15:59:20.274282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T15:59:20.274363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-03-31T15:59:20.275751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-03-31T15:59:20.275899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000003 2025-03-31T15:59:20.278883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:20.279021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:20.279105Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2025-03-31T15:59:20.279254Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-03-31T15:59:20.279448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T15:59:20.279548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-31T15:59:20.280744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T15:59:20.282748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T15:59:20.284169Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:20.284222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T15:59:20.284394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-31T15:59:20.284482Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:20.284518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-03-31T15:59:20.284582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 3 FAKE_COORDINATOR: Erasing txId 101 2025-03-31T15:59:20.284818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.284863Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-31T15:59:20.285000Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T15:59:20.285041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T15:59:20.285085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T15:59:20.285122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T15:59:20.285166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-03-31T15:59:20.285214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T15:59:20.285256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-31T15:59:20.285289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-31T15:59:20.285380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T15:59:20.285444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-03-31T15:59:20.285489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-03-31T15:59:20.285520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-03-31T15:59:20.286307Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:20.286411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:20.286447Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-03-31T15:59:20.286491Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-03-31T15:59:20.286538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T15:59:20.287690Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:20.287777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:20.287811Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-31T15:59:20.287867Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-03-31T15:59:20.287901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-31T15:59:20.287977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-03-31T15:59:20.295154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T15:59:20.295544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-03-31T15:59:20.299499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0/dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } KeyColumnNames: "RowId" } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:20.299891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/USER_0/dir/table_0, opId: 102:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.300025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/USER_0/dir/table_0, opId: 102:0, schema: Name: "table_0" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } KeyColumnNames: "RowId", at schemeshard: 72057594046678944 2025-03-31T15:59:20.300174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusNameConflict, reason: Inclusive subDomain do not support shared transactions, at schemeshard: 72057594046678944 2025-03-31T15:59:20.304328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusNameConflict Reason: "Inclusive subDomain do not support shared transactions" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:20.304614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot/USER_0, subject: , status: StatusNameConflict, reason: Inclusive subDomain do not support shared transactions, operation: CREATE TABLE, path: /MyRoot/USER_0/dir/table_0 TestModificationResult got TxId: 102, wait until txId: 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::ChangeSenderEmpty [GOOD] Test command err: 2025-03-31T15:59:18.397423Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:59:18.397701Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:59:18.397876Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00137b/r3tmp/tmpUJuNb3/pdisk_1.dat 2025-03-31T15:59:18.936092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-03-31T15:59:18.936365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:59:18.936639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-31T15:59:18.936918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T15:59:18.936988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:59:18.937784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-31T15:59:18.937919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-31T15:59:18.938144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:59:18.938261Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-31T15:59:18.938306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:18.938340Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:18.938989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:59:18.939054Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-03-31T15:59:18.939117Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:18.939655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:59:18.939738Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:59:18.939796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-03-31T15:59:18.939846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:18.943933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:18.944609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:18.944801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-03-31T15:59:18.946067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-03-31T15:59:18.946120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-03-31T15:59:18.946157Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-03-31T15:59:18.977462Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-03-31T15:59:18.977529Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:19.022001Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T15:59:19.023215Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:19.023350Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:19.023560Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-31T15:59:19.036467Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:59:19.119265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T15:59:19.119474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-31T15:59:19.119535Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-03-31T15:59:19.119821Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:19.119879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-03-31T15:59:19.120075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-31T15:59:19.120167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-03-31T15:59:19.121365Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-31T15:59:19.121419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 1, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-31T15:59:19.121592Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-31T15:59:19.121634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:571:2498], at schemeshard: 72057594046644480, txId: 1, path id: 1 2025-03-31T15:59:19.122006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:59:19.122059Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 1:0 ProgressState 2025-03-31T15:59:19.122164Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:19.122209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:19.122250Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:19.122283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:19.122321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:19.122363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:19.122425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:19.122458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:19.122526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-03-31T15:59:19.122577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2025-03-31T15:59:19.122614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2025-03-31T15:59:19.125151Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2025-03-31T15:59:19.125282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2025-03-31T15:59:19.125339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 1 2025-03-31T15:59:19.125379Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 1, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-03-31T15:59:19.125445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-31T15:59:19.125565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2025-03-31T15:59:19.125621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:593:2518] 2025-03-31T15:59:19.126608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 1 2025-03-31T15:59:19.126954Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-03-31T15:59:19.127003Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-03-31T15:59:19.127214Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-03-31T15:59:19.145112Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-31T15:59:19.145229Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T15:59:19.145886Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-31T15:59:19.145989Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavi ... 281474976715657 CreateStep: 1000 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046644480 2025-03-31T15:59:19.889642Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvNavigate describe path /Root/IncrBackupTable 2025-03-31T15:59:19.889762Z node 1 :TX_PROXY DEBUG: Actor# [1:830:2682] HANDLE EvNavigateScheme /Root/IncrBackupTable 2025-03-31T15:59:19.890196Z node 1 :TX_PROXY DEBUG: Actor# [1:830:2682] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T15:59:19.890288Z node 1 :TX_PROXY DEBUG: Actor# [1:830:2682] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/IncrBackupTable" 2025-03-31T15:59:19.891471Z node 1 :TX_PROXY DEBUG: Actor# [1:830:2682] Handle TEvDescribeSchemeResult Forward to# [1:593:2518] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/IncrBackupTable" PathDescription { Self { Name: "IncrBackupTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1500 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 2025-03-31T15:59:19.892309Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:832:2684] HandleUserTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/IncrBackupTable TableId: [72057594046644480:3:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T15:59:19.892579Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:832:2684] HandleTargetTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T15:59:19.892887Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:832:2684] HandleKeys TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-03-31T15:59:19.893047Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:832:2684] Handle NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvNoMoreData >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead-UseSink >> KqpMultishardIndex::DataColumnWrite-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestListNodes [GOOD] Test command err: 2025-03-31T15:59:14.563472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:14.563578Z node 1 :IMPORT WARN: Table profiles were not loaded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::ChangeSenderSimple [GOOD] Test command err: 2025-03-31T15:59:18.530324Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:59:18.530652Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:59:18.530845Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001393/r3tmp/tmpM8lMyC/pdisk_1.dat 2025-03-31T15:59:19.100513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-03-31T15:59:19.100782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:59:19.101049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-31T15:59:19.101338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T15:59:19.101428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:59:19.103506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-31T15:59:19.103677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-31T15:59:19.103901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:59:19.104029Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-31T15:59:19.104075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:19.104113Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:19.104824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:59:19.104882Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-03-31T15:59:19.104924Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:19.105491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:59:19.105534Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:59:19.105593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-03-31T15:59:19.105659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:19.113841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:19.119381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:19.119674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-03-31T15:59:19.120936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-03-31T15:59:19.120994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-03-31T15:59:19.121034Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-03-31T15:59:19.167068Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-03-31T15:59:19.167172Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:19.211989Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T15:59:19.213251Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:19.213410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:19.213626Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-31T15:59:19.228444Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:59:19.313323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T15:59:19.313565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-31T15:59:19.313624Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-03-31T15:59:19.313929Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:19.313994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-03-31T15:59:19.314199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-31T15:59:19.314269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-03-31T15:59:19.315573Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-31T15:59:19.315630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 1, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-31T15:59:19.315816Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-31T15:59:19.315883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:571:2498], at schemeshard: 72057594046644480, txId: 1, path id: 1 2025-03-31T15:59:19.316302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:59:19.316358Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 1:0 ProgressState 2025-03-31T15:59:19.316471Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:19.316541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:19.316587Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:19.316621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:19.316660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:19.316705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:19.316761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:19.316799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:19.316865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-03-31T15:59:19.316906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2025-03-31T15:59:19.316942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2025-03-31T15:59:19.327910Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2025-03-31T15:59:19.328093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2025-03-31T15:59:19.328139Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 1 2025-03-31T15:59:19.328209Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 1, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-03-31T15:59:19.328257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-31T15:59:19.328385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2025-03-31T15:59:19.328449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:593:2518] 2025-03-31T15:59:19.329687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 1 2025-03-31T15:59:19.330059Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-03-31T15:59:19.330113Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-03-31T15:59:19.330312Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-03-31T15:59:19.350854Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-31T15:59:19.350965Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T15:59:19.351603Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-31T15:59:19.351709Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavi ... hemeshard: 72057594046644480 2025-03-31T15:59:20.040537Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2025-03-31T15:59:20.040636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-03-31T15:59:20.040670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715658 ready parts: 1/1 2025-03-31T15:59:20.040722Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-03-31T15:59:20.040753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715658 ready parts: 1/1 2025-03-31T15:59:20.040786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715658, ready parts: 1/1, is published: true 2025-03-31T15:59:20.040847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:593:2518] message: TxId: 281474976715658 2025-03-31T15:59:20.040885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715658 ready parts: 1/1 2025-03-31T15:59:20.040918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-03-31T15:59:20.040953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715658:0 2025-03-31T15:59:20.041073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-03-31T15:59:20.041740Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvNavigate describe path /Root/IncrBackupTable 2025-03-31T15:59:20.041913Z node 1 :TX_PROXY DEBUG: Actor# [1:818:2676] HANDLE EvNavigateScheme /Root/IncrBackupTable 2025-03-31T15:59:20.043773Z node 1 :TX_PROXY DEBUG: Actor# [1:818:2676] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T15:59:20.043880Z node 1 :TX_PROXY DEBUG: Actor# [1:818:2676] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/IncrBackupTable" Options { ShowPrivateTable: true } 2025-03-31T15:59:20.044954Z node 1 :TX_PROXY DEBUG: Actor# [1:818:2676] Handle TEvDescribeSchemeResult Forward to# [1:593:2518] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/IncrBackupTable" PathDescription { Self { Name: "IncrBackupTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1500 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 2025-03-31T15:59:20.046161Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:828:2680], serverId# [1:829:2681], sessionId# [0:0:0] 2025-03-31T15:59:20.047061Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:830:2682] HandleUserTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/IncrBackupTable TableId: [72057594046644480:3:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T15:59:20.047363Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:830:2682] HandleTargetTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T15:59:20.047672Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:830:2682] HandleKeys TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-03-31T15:59:20.047886Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:830:2682] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] BodySize: 18 }] } 2025-03-31T15:59:20.047996Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:830:2682] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Kind: IncrementalRestore Source: InitialScan Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 3] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-03-31T15:59:20.048177Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvGetProxyServicesRequest 2025-03-31T15:59:20.048255Z node 1 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][0:0][72075186224037888][1:834:2682] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-03-31T15:59:20.048559Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:835:2686], serverId# [1:836:2687], sessionId# [0:0:0] 2025-03-31T15:59:20.091194Z node 1 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][0:0][72075186224037888][1:834:2682] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-03-31T15:59:20.091327Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:830:2682] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-03-31T15:59:20.091470Z node 1 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][0:0][72075186224037888][1:834:2682] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Kind: IncrementalRestore Source: InitialScan Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 3] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-03-31T15:59:20.091538Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:830:2682] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-03-31T15:59:20.091742Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:830:2682] Handle NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvNoMoreData ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ForceDropTwice [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:19.933178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:19.933285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:19.933345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:19.933389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:19.933439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:19.933489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:19.933551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:19.933657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:19.934016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:20.032321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:20.032425Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:20.039945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:20.040050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:20.040235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:20.047006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:20.047296Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:20.047991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:20.048239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:20.056065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:20.057375Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:20.057457Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:20.057689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:20.057743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:20.057788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:20.057990Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.066981Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:20.216668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:20.216897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.217130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:20.217378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:20.217440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.219944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:20.220111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:20.220348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.220409Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:20.220457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:20.220490Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:20.222693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.222769Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:20.222811Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:20.224931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.224984Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.225058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:20.225119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:20.229538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:20.231627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:20.231815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:20.232893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:20.233037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:20.233104Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:20.233452Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:20.233511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:20.233674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:20.233829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:20.235935Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:20.235977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:20.236142Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:20.236181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:20.236447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.236499Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:20.236591Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:20.236622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:20.236679Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:20.236713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:20.236749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:20.236785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:20.236835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:20.236867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:20.236931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:20.236971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:20.237002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:20.238951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:20.239057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:20.239112Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 8944 2025-03-31T15:59:20.533062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:6 hive 72057594037968897 at ss 72057594046678944 2025-03-31T15:59:20.533087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-03-31T15:59:20.533110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-03-31T15:59:20.534901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-31T15:59:20.535225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-31T15:59:20.535610Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186233409550 Forgetting tablet 72075186233409550 2025-03-31T15:59:20.536544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-03-31T15:59:20.536887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-03-31T15:59:20.537726Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-03-31T15:59:20.537907Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-03-31T15:59:20.538078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:20.538328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-03-31T15:59:20.538783Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186233409551 2025-03-31T15:59:20.539041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-03-31T15:59:20.539297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-31T15:59:20.539910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-03-31T15:59:20.540070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409546 Forgetting tablet 72075186233409548 2025-03-31T15:59:20.542935Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409551 2025-03-31T15:59:20.546600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-31T15:59:20.546873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T15:59:20.547678Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 Forgetting tablet 72075186233409547 2025-03-31T15:59:20.548963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-03-31T15:59:20.549198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409549 2025-03-31T15:59:20.549752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:59:20.549803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T15:59:20.549957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T15:59:20.551171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:59:20.551221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T15:59:20.551306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:20.551748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-03-31T15:59:20.551800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-03-31T15:59:20.555054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-31T15:59:20.555127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-03-31T15:59:20.555295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-03-31T15:59:20.555323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-03-31T15:59:20.555437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-03-31T15:59:20.555473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-03-31T15:59:20.563354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-31T15:59:20.563436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-03-31T15:59:20.563566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-03-31T15:59:20.563611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-03-31T15:59:20.563817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-31T15:59:20.563915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 102 2025-03-31T15:59:20.564202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-31T15:59:20.564240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-03-31T15:59:20.564324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-03-31T15:59:20.564343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-03-31T15:59:20.564769Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-31T15:59:20.564898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T15:59:20.564936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:664:2566] 2025-03-31T15:59:20.565121Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-31T15:59:20.565190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-31T15:59:20.565227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:664:2566] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-03-31T15:59:20.565709Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:20.565888Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 189us result status StatusPathDoesNotExist 2025-03-31T15:59:20.566055Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-31T15:59:20.566473Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:20.566648Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 189us result status StatusSuccess 2025-03-31T15:59:20.566977Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-true >> TSchemeShardSubDomainTest::SimultaneousCreateForceDropTwice >> TSchemeShardSubDomainTest::SimultaneousCreateTenantDirTable [GOOD] >> TSchemeShardSubDomainTest::SetSchemeLimits [GOOD] >> TSchemeShardSubDomainTest::CreateWithoutTimeCastBuckets [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::LS [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:20.336078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:20.336198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:20.336243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:20.336282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:20.336345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:20.336390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:20.336468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:20.336593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:20.337039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:20.424992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:20.425053Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:20.429753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:20.429867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:20.430021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:20.444879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:20.445143Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:20.445881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:20.446197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:20.450432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:20.451783Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:20.451854Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:20.452014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:20.452061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:20.452098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:20.452246Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.459853Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:20.634401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:20.634668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.634925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:20.635183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:20.635249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.638444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:20.638623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:20.638849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.638921Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:20.638968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:20.639008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:20.641704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.641792Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:20.641839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:20.644400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.644471Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.644554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:20.644610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:20.656621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:20.659679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:20.659942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:20.661047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:20.661218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:20.661290Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:20.661610Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:20.661668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:20.661828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:20.661935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:20.666097Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:20.666152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:20.666336Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:20.666381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:20.666618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.666663Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:20.666751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:20.666786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:20.666823Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:20.666853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:20.666887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:20.666923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:20.666990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:20.667020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:20.667123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:20.667167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:20.667200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:20.669057Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:20.669189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:20.669225Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... trongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-03-31T15:59:20.784554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-03-31T15:59:20.784957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:20.785095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:20.785151Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-03-31T15:59:20.785506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-03-31T15:59:20.785564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-03-31T15:59:20.785722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:20.785800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-31T15:59:20.785878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-03-31T15:59:20.789272Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:20.789342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:20.789538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T15:59:20.789688Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:20.789730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-03-31T15:59:20.789777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-03-31T15:59:20.790038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.790092Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-03-31T15:59:20.790217Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-03-31T15:59:20.790261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-31T15:59:20.790307Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-03-31T15:59:20.790344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-31T15:59:20.790386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-03-31T15:59:20.790430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-31T15:59:20.790477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-03-31T15:59:20.790510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-03-31T15:59:20.790736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-03-31T15:59:20.790785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-03-31T15:59:20.790837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-03-31T15:59:20.790886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-03-31T15:59:20.791734Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-03-31T15:59:20.791835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-03-31T15:59:20.791871Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-03-31T15:59:20.791915Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-31T15:59:20.791961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:20.792813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-03-31T15:59:20.792909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-03-31T15:59:20.792940Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-03-31T15:59:20.792970Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-31T15:59:20.793000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-31T15:59:20.793092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-03-31T15:59:20.796980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-03-31T15:59:20.797374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-03-31T15:59:20.797648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-03-31T15:59:20.797695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-03-31T15:59:20.798179Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-03-31T15:59:20.798278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-03-31T15:59:20.798314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:451:2405] TestWaitNotification: OK eventTxId 100 2025-03-31T15:59:20.798811Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:20.799011Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 219us result status StatusSuccess 2025-03-31T15:59:20.799513Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:20.800089Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:20.800262Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 176us result status StatusSuccess 2025-03-31T15:59:20.800654Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SimultaneousCreateDelete |69.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnSplit-UseSink [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnCopyTable+UseSink |69.1%| [TA] $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSubDomainTest::CreateSubDomainsInSeparateDir [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SetSchemeLimits [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:20.893881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:20.893983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:20.894032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:20.894077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:20.894123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:20.894179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:20.894260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:20.894356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:20.894714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:20.985097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:20.985165Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:20.989339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:20.989425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:20.989556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:20.996596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:20.996861Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:20.997718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:20.997968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:21.000443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:21.001434Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:21.001496Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:21.001609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:21.001648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:21.001678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:21.001800Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:21.009373Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:21.173000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:21.173238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:21.173525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:21.173777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:21.173837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:21.176621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:21.176772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:21.176990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:21.177047Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:21.177097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:21.177134Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:21.180577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:21.180662Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:21.180703Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:21.182667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:21.182733Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:21.182802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:21.182858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:21.186895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:21.190807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:21.191053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:21.192224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:21.192369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:21.192445Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:21.192777Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:21.192840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:21.193009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:21.193123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:21.201698Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:21.201753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:21.201935Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:21.201982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:21.202244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:21.202294Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:21.202399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:21.202437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:21.202473Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:21.202516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:21.202551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:21.202590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:21.202660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:21.202702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:21.202776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:21.202824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:21.202861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:21.204949Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:21.205084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:21.205125Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-03-31T15:59:21.486964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-03-31T15:59:21.487680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:21.487786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:21.487827Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-03-31T15:59:21.488146Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-03-31T15:59:21.488199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-03-31T15:59:21.488382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:21.488455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T15:59:21.488533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-03-31T15:59:21.491118Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:21.491158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:21.492189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T15:59:21.492368Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:21.492472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:334:2310], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-03-31T15:59:21.492510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:334:2310], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-03-31T15:59:21.492860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-03-31T15:59:21.492924Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-03-31T15:59:21.493041Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-03-31T15:59:21.493086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-31T15:59:21.493133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-03-31T15:59:21.493168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-31T15:59:21.493202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-03-31T15:59:21.493254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-31T15:59:21.493316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-03-31T15:59:21.493349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-03-31T15:59:21.493525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-31T15:59:21.493576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-03-31T15:59:21.493613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-03-31T15:59:21.493642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-03-31T15:59:21.494512Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-03-31T15:59:21.494604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-03-31T15:59:21.494646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-03-31T15:59:21.494687Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-31T15:59:21.494726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:21.495554Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-03-31T15:59:21.495634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-03-31T15:59:21.495659Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-03-31T15:59:21.495685Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-31T15:59:21.495711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T15:59:21.495776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-03-31T15:59:21.498828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-03-31T15:59:21.499973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-03-31T15:59:21.500261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-03-31T15:59:21.500300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-03-31T15:59:21.500778Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-03-31T15:59:21.500874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-03-31T15:59:21.500907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:479:2427] TestWaitNotification: OK eventTxId 100 2025-03-31T15:59:21.501431Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:21.501649Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 279us result status StatusSuccess 2025-03-31T15:59:21.501998Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 3 ShardsInside: 2 ShardsLimit: 3 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 300 DatabaseQuotas { data_stream_shards_quota: 3 } SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:21.502497Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:21.502683Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 172us result status StatusSuccess 2025-03-31T15:59:21.503028Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 3 ShardsInside: 0 ShardsLimit: 3 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 300 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantDirTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:20.372953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:20.373079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:20.373123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:20.373163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:20.373211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:20.373256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:20.373344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:20.373441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:20.374027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:20.464552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:20.464637Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:20.469988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:20.470098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:20.470327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:20.480514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:20.480820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:20.481618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:20.481884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:20.484612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:20.485935Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:20.486008Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:20.486191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:20.486244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:20.486294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:20.486459Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.497179Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:20.664936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:20.665194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.665462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:20.665716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:20.665782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.672211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:20.672385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:20.672626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.672695Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:20.672749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:20.672789Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:20.675856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.675932Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:20.675971Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:20.677925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.677984Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.678054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:20.678109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:20.681103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:20.683020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:20.683245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:20.684216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:20.684344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:20.684408Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:20.684684Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:20.684744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:20.684938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:20.685049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:20.687206Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:20.687265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:20.687456Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:20.687500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:20.687699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.687741Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:20.687829Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:20.687859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:20.687892Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:20.687917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:20.687946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:20.687977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:20.688035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:20.688069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:20.688132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:20.688168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:20.688197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:20.690252Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:20.690353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:20.690393Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 3409552 2025-03-31T15:59:21.281715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 7 ShardOwnerId: 72057594046678944 ShardLocalIdx: 7, at schemeshard: 72057594046678944 2025-03-31T15:59:21.281976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-03-31T15:59:21.282741Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409552 2025-03-31T15:59:21.285864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:59:21.285925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-03-31T15:59:21.286068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-31T15:59:21.286138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-31T15:59:21.286181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-03-31T15:59:21.286876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:21.287136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-03-31T15:59:21.287634Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 Forgetting tablet 72075186233409546 2025-03-31T15:59:21.289512Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186233409551 2025-03-31T15:59:21.290250Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-03-31T15:59:21.290795Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-03-31T15:59:21.291537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-03-31T15:59:21.291811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 Forgetting tablet 72075186233409548 Forgetting tablet 72075186233409551 2025-03-31T15:59:21.293360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-03-31T15:59:21.293596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409547 2025-03-31T15:59:21.295242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-31T15:59:21.295502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409549 2025-03-31T15:59:21.296056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-03-31T15:59:21.296267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T15:59:21.297506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-31T15:59:21.298889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-31T15:59:21.299978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:59:21.300049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T15:59:21.300215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T15:59:21.302266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-03-31T15:59:21.302328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-03-31T15:59:21.302465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2025-03-31T15:59:21.302496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186233409552 2025-03-31T15:59:21.302604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-03-31T15:59:21.302734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:59:21.302785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T15:59:21.302874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:21.303044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-31T15:59:21.303100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-03-31T15:59:21.306925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-03-31T15:59:21.306989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-03-31T15:59:21.307105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-03-31T15:59:21.307135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-03-31T15:59:21.307264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-31T15:59:21.307294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-03-31T15:59:21.307364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-03-31T15:59:21.307415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-03-31T15:59:21.307559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-31T15:59:21.309703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-03-31T15:59:21.310074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-03-31T15:59:21.310132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-03-31T15:59:21.310687Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-31T15:59:21.310820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-31T15:59:21.310882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:777:2670] TestWaitNotification: OK eventTxId 103 2025-03-31T15:59:21.311531Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:21.311783Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 267us result status StatusPathDoesNotExist 2025-03-31T15:59:21.311992Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-31T15:59:21.312511Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:21.312750Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 247us result status StatusSuccess 2025-03-31T15:59:21.313165Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateWithoutTimeCastBuckets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-31T15:59:21.135292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:21.135407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:21.135449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:21.135485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:21.135548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:21.135582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:21.135646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:21.135747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:21.136117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:21.240475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:21.240551Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:21.250090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:21.250477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:21.250656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:21.260751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:21.261046Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:21.261848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:21.262207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:21.264443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:21.265818Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:21.265884Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:21.266023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:21.266075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:21.266117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:21.266322Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:21.276592Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-31T15:59:21.400935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:21.401157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:21.401405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:21.401653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:21.401733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:21.403982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:21.404115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:21.404315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:21.404395Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:21.404447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:21.404487Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:21.406495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:21.406575Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:21.406627Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:21.410339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:21.410421Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:21.410480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:21.410556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:21.414460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:21.417714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:21.417900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:21.419025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:21.419223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:21.419297Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:21.419571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:21.419626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:21.419785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:21.419890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:21.423124Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:21.423180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:21.423394Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:21.423442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:21.423837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:21.423883Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:21.424002Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:21.424039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:21.424080Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:21.424174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:21.424216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:21.424259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:21.424340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:21.424376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:21.424446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:21.424485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:21.424519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:21.426542Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:21.426669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:21.426707Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-31T15:59:21.426762Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-31T15:59:21.426811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:21.426926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-31T15:59:21.430283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-31T15:59:21.430776Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 100 2025-03-31T15:59:21.433907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "USER_0" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 100 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:21.434176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /MyRoot/USER_0, opId: 100:0, at schemeshard: 72057594046678944 2025-03-31T15:59:21.434280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 100:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: TimeCastBucketsPerMediator is 0, at schemeshard: 72057594046678944 2025-03-31T15:59:21.434633Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Bootstrap 2025-03-31T15:59:21.457440Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Become StateWork (SchemeCache [1:274:2265]) 2025-03-31T15:59:21.458749Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T15:59:21.462133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 100, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: TimeCastBucketsPerMediator is 0" TxId: 100 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:21.462354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 100, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: TimeCastBucketsPerMediator is 0, operation: CREATE DATABASE, path: /MyRoot/USER_0 2025-03-31T15:59:21.462962Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-03-31T15:59:21.463216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-03-31T15:59:21.463259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-03-31T15:59:21.463695Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-03-31T15:59:21.463787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-03-31T15:59:21.463824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:284:2275] TestWaitNotification: OK eventTxId 100 2025-03-31T15:59:21.464376Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:21.464567Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 214us result status StatusPathDoesNotExist 2025-03-31T15:59:21.464768Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::CreateWithNoEqualName >> TSchemeShardSubDomainTest::SimultaneousDefine ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::DataColumnWrite-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 15037, MsgBus: 19268 2025-03-31T15:58:30.027256Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003652714598995:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:30.030565Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000def/r3tmp/tmpVAIu8c/pdisk_1.dat 2025-03-31T15:58:30.643075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:30.649946Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:30.671722Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:30.681400Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15037, node 1 2025-03-31T15:58:30.895510Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:58:30.895531Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:58:30.895538Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:58:30.895641Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19268 TClient is connected to server localhost:19268 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:58:32.220227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:32.275026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:32.618482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:33.017901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:33.157102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:35.023799Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003652714598995:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:35.023890Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:58:35.658203Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003674189437249:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:35.658366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:36.106262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:58:36.148915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:58:36.236412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:58:36.302632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:58:36.397700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:58:36.468745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:58:36.571206Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003678484405067:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:36.571378Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:36.571622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003678484405072:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:36.576039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:58:36.592693Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488003678484405074:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:58:36.687690Z node 1 :TX_PROXY ERROR: Actor# [1:7488003678484405131:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:58:39.458325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:45.643330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:58:45.643372Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:50.049904Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710709 at tablet 72075186224037932 status: OVERLOADED errors: WRONG_SHARD_STATE (Wrong shard state: SplitSrcWaitForNoTxInFlight tablet id: 72075186224037932) | 2025-03-31T15:58:50.057227Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710709 at tablet 72075186224037931 status: OVERLOADED errors: WRONG_SHARD_STATE (Wrong shard state: SplitSrcWaitForNoTxInFlight tablet id: 72075186224037931) | 2025-03-31T15:58:50.094010Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037921 not found 2025-03-31T15:58:50.094041Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037920 not found 2025-03-31T15:58:50.094055Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037932 not found 2025-03-31T15:58:50.094074Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037931 not found 2025-03-31T15:58:52.411658Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488003747203886683:2915], TxId: 281474976710720, task: 1. Ctx: { TraceId : 01jqpetn79ez3xq28jyey3wndj. SessionId : ydb://session/3?node_id=1&id=YjZkYmIyY2YtMTBkYTcwNWUtY2JmNTcxMmQtZDhhNTkzZjQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-03-31T15:58:52.412367Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488003747203886684:2916], TxId: 281474976710720, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=YjZkYmIyY2YtMTBkYTcwNWUtY2JmNTcxMmQtZDhhNTkzZjQ=. CustomerSuppliedId : . TraceId : 01jqpetn79ez3xq28jyey3wndj. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7488003747203886680:2508], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-31T15:58:52.413040Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjZkYmIyY2YtMTBkYTcwNWUtY2JmNTcxMmQtZDhhNTkzZjQ=, ActorId: [1:7488003691369307362:2508], ActorState: ExecuteState, TraceId: 01jqpetn79ez3xq28jyey3wndj, Create QueryResponse for error on request, msg: 2025-03-31T15:58:52.550113Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710722 at tablet 72075186224037927 status: OVERLOADED errors: WRONG_SHARD_STATE (Wrong shard state: SplitSrcWaitForNoTxInFlight tablet id: 72075186224037927) | 2025-03-31T15:58:52.551013Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710722 at tablet 72075186224037929 status: OVERLOADED errors: WRONG_SHARD_STATE (Wrong shard state: SplitSrcWaitForNoTxInFlight tablet id: 72075186224037929) | 2025-03-31T15:58:52.551751Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710722 at tablet 72075186224037924 status: OVERLOADED errors: WRONG_SHARD_STATE (Wrong shard state: SplitSrcWaitForNoTxInFlight tablet id: 72075186224037924) | 2025-03-31T15:58:52.551805Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710722 at tablet 72075186224037930 status: OVERLOADED errors: WRONG_SHARD_STATE (Wrong shard state: SplitSrcWaitForNoTxInFlight tablet id: 72075186224037930) | 2025-03-31T15:58:52.552556Z node 1 :TX_DATASHARD ERROR: Errors while proposing transactio ... 4037934 status: OVERLOADED errors: WRONG_SHARD_STATE (Wrong shard state: SplitSrcWaitForNoTxInFlight tablet id: 72075186224037934) | 2025-03-31T15:58:52.633246Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037924 not found 2025-03-31T15:58:52.644777Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037923 not found 2025-03-31T15:58:52.657851Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037927 not found 2025-03-31T15:58:52.660719Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037929 not found 2025-03-31T15:58:52.660744Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037928 not found 2025-03-31T15:58:52.672662Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037925 not found 2025-03-31T15:58:52.672727Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037933 not found Trying to start YDB, gRPC: 12897, MsgBus: 62607 2025-03-31T15:58:59.036611Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488003779309952565:2210];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:59.036854Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000def/r3tmp/tmpJWSIEP/pdisk_1.dat 2025-03-31T15:58:59.188989Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:59.219405Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:59.219498Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:59.223107Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12897, node 2 2025-03-31T15:58:59.378777Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:58:59.378802Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:58:59.378813Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:58:59.378962Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62607 TClient is connected to server localhost:62607 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:59:00.124963Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:00.136029Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:59:00.146282Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:00.302660Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:00.604158Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:00.840364Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:03.997929Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488003796489823374:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:03.998109Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:04.015199Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488003779309952565:2210];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:04.015270Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:59:04.053576Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:59:04.136050Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:59:04.248610Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:59:04.327508Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:59:04.376242Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:59:04.433652Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:59:04.495473Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488003800784791193:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:04.495572Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:04.496918Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488003800784791198:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:04.501456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:59:04.513515Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488003800784791200:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:59:04.597807Z node 2 :TX_PROXY ERROR: Actor# [2:7488003800784791254:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:06.064318Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:14.165243Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:59:14.165292Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:16.275281Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488003852324403160:2859], TxId: 281474976710731, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZGQwOGU4NzgtZGVhNzViYmYtZGRhNzE3YjctOTMxNDg3Yw==. TraceId : 01jqpevcxrfvxkhkww2t76vbfj. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-03-31T15:59:16.275955Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488003852324403161:2860], TxId: 281474976710731, task: 2. Ctx: { TraceId : 01jqpevcxrfvxkhkww2t76vbfj. SessionId : ydb://session/3?node_id=2&id=ZGQwOGU4NzgtZGVhNzViYmYtZGRhNzE3YjctOTMxNDg3Yw==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7488003852324403157:2494], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-31T15:59:16.276316Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGQwOGU4NzgtZGVhNzViYmYtZGRhNzE3YjctOTMxNDg3Yw==, ActorId: [2:7488003809374726123:2494], ActorState: ExecuteState, TraceId: 01jqpevcxrfvxkhkww2t76vbfj, Create QueryResponse for error on request, msg: 2025-03-31T15:59:16.564054Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037921 not found 2025-03-31T15:59:16.585195Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037928 not found 2025-03-31T15:59:16.585239Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037932 not found 2025-03-31T15:59:16.585257Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037933 not found 2025-03-31T15:59:16.585274Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037927 not found 2025-03-31T15:59:16.588263Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037925 not found >> TSchemeShardSubDomainTest::SimultaneousCreateForceDropTwice [GOOD] >> TSchemeShardSubDomainTest::Create >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-true [GOOD] >> TSchemeShardSubDomainTest::RestartAtInFly ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TTLUsage Test command err: 2025-03-31T15:57:58.492085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:57:58.492362Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:57:58.492542Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001b98/r3tmp/tmpy7YgFU/pdisk_1.dat TServer::EnableGrpc on GrpcPort 22266, node 1 TClient is connected to server localhost:7988 2025-03-31T15:57:59.423120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:57:59.522418Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:57:59.523139Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:57:59.523199Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:57:59.523241Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:57:59.523696Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:57:59.564465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:57:59.564629Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:57:59.576185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:57:59.737237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-03-31T15:57:59.913164Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:688:2580], Recipient [1:746:2626]: NKikimr::TEvTablet::TEvBoot 2025-03-31T15:57:59.914464Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:688:2580], Recipient [1:746:2626]: NKikimr::TEvTablet::TEvRestored 2025-03-31T15:57:59.914783Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:746:2626];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T15:57:59.949126Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:746:2626];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T15:57:59.949505Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2025-03-31T15:57:59.959509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T15:57:59.959762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T15:57:59.960045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T15:57:59.960185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T15:57:59.960304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T15:57:59.960525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T15:57:59.960665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T15:57:59.960788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T15:57:59.960914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T15:57:59.961040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T15:57:59.961191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T15:57:59.961322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T15:57:59.985413Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:688:2580], Recipient [1:746:2626]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T15:57:59.987238Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:689:2581], Recipient [1:749:2629]: NKikimr::TEvTablet::TEvBoot 2025-03-31T15:57:59.988484Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 72075186224037888 2025-03-31T15:57:59.988628Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:689:2581], Recipient [1:749:2629]: NKikimr::TEvTablet::TEvRestored 2025-03-31T15:57:59.988930Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:749:2629];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T15:58:00.017908Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:749:2629];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T15:58:00.018168Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037889 2025-03-31T15:58:00.025285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T15:58:00.025397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T15:58:00.025658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T15:58:00.025791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T15:58:00.025947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T15:58:00.026076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T15:58:00.026194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T15:58:00.026320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T15:58:00.026475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T15:58:00.026633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T15:58:00.026994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T15:58:00.027208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T15:58:00.030988Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T15:58:00.031072Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T15:58:00.031314Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T15:58:00.031460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T15:58:00.031535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T15:58:00.031586Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T15:58:00.031695Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T15:58:00.031771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id ... =0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; 2025-03-31T15:58:43.481821Z node 1 :TX_COLUMNSHARD TRACE: StateWork, received event# 2146435085, Sender [1:1313:3115], Recipient [1:746:2626]: NKikimr::NColumnShard::TEvPrivate::TEvGarbageCollectionFinished 2025-03-31T15:58:43.482385Z node 1 :TX_COLUMNSHARD TRACE: StateWork, received event# 2146435073, Sender [1:1314:3116], Recipient [1:746:2626]: NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex 2025-03-31T15:58:43.482425Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 72075186224037888 2025-03-31T15:58:43.482649Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[31] (CS::GENERAL) apply at tablet 72075186224037888 2025-03-31T15:58:43.487247Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 72075186224037888 Save Batch GenStep: 1:21 Blob count: 1 2025-03-31T15:58:43.487416Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=2911808;raw_bytes=96858255;count=2;records=82491} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=18275528;raw_bytes=616499669;count=8;records=517509} inactive {blob_bytes=26052856;raw_bytes=873325723;count=17;records=740034} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 72075186224037888 TEvBlobStorage::TEvPut tId=72075186224037888;c=1;:74/0:size=2465;count=18;;1:size=57218;count=9;;2:size=0;count=0;;3:size=1466448;count=1;;4:size=1479208;count=1;;5:size=1458600;count=1;;6:size=1445408;count=1;;7:size=1445928;count=1;;8:size=1445448;count=1;;9:size=1445400;count=1;;10:size=1445528;count=1;;11:size=808584;count=1;;12:size=4463248;count=6;;13:size=1445744;count=1;;14:size=1445360;count=1;;15:size=1445608;count=1;;16:size=1225528;count=1;;17:size=1425984;count=1;;18:size=1426000;count=1;;19:size=1445376;count=1;;20:size=1445920;count=1;;21:size=1445360;count=1;;22:size=2010824;count=3;;23:size=1215392;count=1;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; TEvBlobStorage::TEvPut tId=72075186224037888;c=0;:74/0:size=2534;count=19;;1:size=57218;count=9;;2:size=0;count=0;;3:size=1466448;count=1;;4:size=1479208;count=1;;5:size=1458600;count=1;;6:size=1445408;count=1;;7:size=1445928;count=1;;8:size=1445448;count=1;;9:size=1445400;count=1;;10:size=1445528;count=1;;11:size=808584;count=1;;12:size=4463248;count=6;;13:size=1445744;count=1;;14:size=1445360;count=1;;15:size=1445608;count=1;;16:size=1225528;count=1;;17:size=1425984;count=1;;18:size=1426000;count=1;;19:size=1445376;count=1;;20:size=1445920;count=1;;21:size=1445360;count=1;;22:size=2010824;count=3;;23:size=1215392;count=1;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; 2025-03-31T15:58:43.500326Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4988152-e4911f0-ac8152e8-67335656;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::GENERAL;success=1; 2025-03-31T15:58:43.500394Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4988152-e4911f0-ac8152e8-67335656;fline=with_appended.cpp:65;portions=28,;task_id=4988152-e4911f0-ac8152e8-67335656; 2025-03-31T15:58:43.500672Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=4988152-e4911f0-ac8152e8-67335656;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:28;path_id:3;records_count:85687;min_schema_snapshot:(plan_step=1500;tx_id=281474976715658;);schema_version:1;level:0;column_size:3018840;index_size:0;meta:((produced=SPLIT_COMPACTED;)););path_id=3; 2025-03-31T15:58:43.500870Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=4988152-e4911f0-ac8152e8-67335656;fline=tiering.cpp:49;tiering_info=__DEFAULT/0.000000s;$$DELETE/599999.000000s;; 2025-03-31T15:58:43.500970Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4988152-e4911f0-ac8152e8-67335656;fline=manager.cpp:15;event=unlock;process_id=CS::GENERAL::4988152-e4911f0-ac8152e8-67335656; 2025-03-31T15:58:43.501036Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4988152-e4911f0-ac8152e8-67335656;fline=granule.cpp:101;event=OnCompactionFinished;info=(granule:3;path_id:3;size:21192904;portions_count:28;); 2025-03-31T15:58:43.501083Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4988152-e4911f0-ac8152e8-67335656;tablet_id=72075186224037888;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T15:58:43.501141Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4988152-e4911f0-ac8152e8-67335656;tablet_id=72075186224037888;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T15:58:43.501224Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4988152-e4911f0-ac8152e8-67335656;tablet_id=72075186224037888;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=1; 2025-03-31T15:58:43.501781Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4988152-e4911f0-ac8152e8-67335656;tablet_id=72075186224037888;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=21000; 2025-03-31T15:58:43.501825Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4988152-e4911f0-ac8152e8-67335656;tablet_id=72075186224037888;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T15:58:43.501875Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4988152-e4911f0-ac8152e8-67335656;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T15:58:43.501917Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4988152-e4911f0-ac8152e8-67335656;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T15:58:43.502023Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4988152-e4911f0-ac8152e8-67335656;tablet_id=72075186224037888;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.599000s; 2025-03-31T15:58:43.502091Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4988152-e4911f0-ac8152e8-67335656;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T15:58:43.502298Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037888 Save Batch GenStep: 1:21 Blob count: 1 VERIFY failed (2025-03-31T15:58:43.502507Z): tablet_id=72075186224037888;task_id=4988152-e4911f0-ac8152e8-67335656;verification=CompactionsLimit.Dec() >= 0;fline=ro_controller.cpp:39; ydb/library/actors/core/log.cpp:754 ~TVerifyFormattedRecordWriter(): requirement false failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+873 (0x18BCFDF9) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+571 (0x18BBE08B) NActors::TVerifyFormattedRecordWriter::~TVerifyFormattedRecordWriter()+326 (0x19ED5BF6) NKikimr::NYDBTest::NColumnShard::TReadOnlyController::DoOnWriteIndexComplete(NKikimr::NOlap::TColumnEngineChanges const&, NKikimr::NColumnShard::TColumnShard const&)+4577 (0x485F4931) NKikimr::NColumnShard::TTxWriteIndex::Complete(NActors::TActorContext const&)+4797 (0x305CB3BD) NKikimr::NTabletFlatExecutor::TSeat::Complete(NActors::TActorContext const&, bool)+899 (0x1E8C20F3) NKikimr::NTabletFlatExecutor::TLogicRedo::Confirm(unsigned int, NActors::TActorContext const&, NActors::TActorId const&)+3856 (0x1E7A5AD0) NKikimr::NTabletFlatExecutor::TExecutor::Handle(TAutoPtr, TDelete>&, NActors::TActorContext const&)+3444 (0x1E5EBEC4) NKikimr::NTabletFlatExecutor::TExecutor::StateWork(TAutoPtr&)+2821 (0x1E588C45) NActors::IActor::Receive(TAutoPtr&)+237 (0x19E073CD) NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool)+3557 (0x358DC3C5) NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant)+12602 (0x358D4C3A) NActors::TTestActorRuntimeBase::WaitForEdgeEvents(std::__y1::function&)>, TSet, std::__y1::allocator> const&, TDuration)+1076 (0x358DEFB4) NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventIf(TSet, std::__y1::allocator> const&, std::__y1::function const&, TDuration)+292 (0x35AAC134) NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEvent(NActors::TActorId const&, TDuration)+419 (0x35AAB253) NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration)+307 (0x35AA34B3) NActors::TTestActorRuntime::SimulateSleep(TDuration)+1115 (0x35AA308B) NKikimr::NTestSuiteColumnShardTiers::TTestCaseTTLUsage::Execute_(NUnitTest::TTestContext&)+4702 (0x187AE7FE) std::__y1::__function::__func, void ()>::operator()()+280 (0x187C03B8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x1907CB16) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x1904C649) NKikimr::NTestSuiteColumnShardTiers::TCurrentTest::Execute()+1204 (0x187BF364) NUnitTest::TTestFactory::Execute()+2438 (0x1904DF16) NUnitTest::RunMain(int, char**)+5213 (0x1907708D) ??+0 (0x7F7529D47D90) __libc_start_main+128 (0x7F7529D47E40) _start+41 (0x16133029) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainsInSeparateDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:21.303242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:21.303363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:21.303404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:21.303442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:21.303492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:21.303541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:21.303614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:21.303730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:21.304088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:21.389993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:21.390053Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:21.394934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:21.395030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:21.395241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:21.400922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:21.401152Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:21.401840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:21.402058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:21.404281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:21.405770Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:21.405866Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:21.406110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:21.406175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:21.406224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:21.406410Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:21.414939Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:21.574410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:21.574714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:21.574979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:21.575240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:21.575309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:21.580739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:21.580937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:21.581205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:21.581270Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:21.581344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:21.581388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:21.583893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:21.583980Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:21.584025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:21.586368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:21.586449Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:21.586515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:21.586573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:21.590914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:21.593550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:21.593789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:21.594997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:21.595199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:21.595277Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:21.595580Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:21.595635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:21.595813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:21.595946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:21.599628Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:21.599697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:21.599897Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:21.599943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:21.600217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:21.600289Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:21.600399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:21.600435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:21.600476Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:21.600511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:21.600552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:21.600596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:21.600650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:21.600689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:21.600784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:21.600837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:21.600882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:21.603138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:21.603281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:21.603324Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... tion IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-03-31T15:59:22.041616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T15:59:22.041655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-31T15:59:22.041685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-31T15:59:22.041902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 9 2025-03-31T15:59:22.041949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-03-31T15:59:22.042000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 7 2025-03-31T15:59:22.042045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 4], 3 2025-03-31T15:59:22.042989Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T15:59:22.043073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T15:59:22.043134Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-03-31T15:59:22.043176Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 7 2025-03-31T15:59:22.043231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T15:59:22.044184Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T15:59:22.044282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T15:59:22.044313Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-03-31T15:59:22.044339Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-03-31T15:59:22.044367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 8 2025-03-31T15:59:22.044472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-03-31T15:59:22.049587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T15:59:22.049933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 100 2025-03-31T15:59:22.050192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-03-31T15:59:22.050242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-03-31T15:59:22.050383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-31T15:59:22.050430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-03-31T15:59:22.050493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-31T15:59:22.050512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-31T15:59:22.051005Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-03-31T15:59:22.051254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-03-31T15:59:22.051296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:930:2764] 2025-03-31T15:59:22.051381Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-31T15:59:22.051536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T15:59:22.051565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:930:2764] 2025-03-31T15:59:22.051729Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-31T15:59:22.051834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T15:59:22.051859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:930:2764] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2025-03-31T15:59:22.052368Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomains/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:22.052683Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SubDomains/USER_0" took 297us result status StatusSuccess 2025-03-31T15:59:22.053142Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomains/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:22.053752Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomains/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:22.053966Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SubDomains/USER_1" took 194us result status StatusSuccess 2025-03-31T15:59:22.054296Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomains/USER_1" PathDescription { Self { Name: "USER_1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409552 Coordinators: 72075186233409553 Coordinators: 72075186233409554 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409555 Mediators: 72075186233409556 Mediators: 72075186233409557 } DomainKey { SchemeShard: 72057594046678944 PathId: 4 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 4 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:22.054812Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomains" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:22.054978Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SubDomains" took 182us result status StatusSuccess 2025-03-31T15:59:22.055350Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomains" PathDescription { Self { Name: "SubDomains" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: true } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } Children { Name: "USER_1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SchemeQuotas >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateDelete [GOOD] >> TSchemeShardSubDomainTest::Redefine >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateForceDropTwice [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:22.125389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:22.125503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:22.125555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:22.125597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:22.125643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:22.125689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:22.125758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:22.125860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:22.126372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:22.221324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:22.221384Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:22.234224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:22.234318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:22.234471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:22.245750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:22.246051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:22.246800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:22.247103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:22.249463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:22.250636Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:22.250709Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:22.250946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:22.251008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:22.251074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:22.251266Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.258664Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:22.378988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:22.379290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.379569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:22.379872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:22.379953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.382748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:22.382902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:22.383147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.383211Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:22.383261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:22.383296Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:22.392215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.392309Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:22.392361Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:22.394663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.394723Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.394796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:22.394865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:22.408329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:22.411058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:22.411270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:22.412236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:22.412361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:22.412431Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:22.412698Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:22.412758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:22.412942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:22.413086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:22.415289Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:22.415332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:22.415478Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:22.415516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:22.415754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.415796Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:22.415890Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:22.415929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:22.415974Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:22.416010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:22.416062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:22.416112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:22.416163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:22.416194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:22.416261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:22.416299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:22.416335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:22.418144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:22.418272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:22.418314Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... athId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T15:59:22.492934Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-03-31T15:59:22.492962Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-03-31T15:59:22.492994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:22.493748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T15:59:22.493842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T15:59:22.493868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-03-31T15:59:22.493903Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-31T15:59:22.493930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-03-31T15:59:22.493985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 1 2025-03-31T15:59:22.494013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:274:2265] 2025-03-31T15:59:22.496794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:5 hive 72057594037968897 at ss 72057594046678944 2025-03-31T15:59:22.496847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-03-31T15:59:22.496873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-03-31T15:59:22.496899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:6 hive 72057594037968897 at ss 72057594046678944 2025-03-31T15:59:22.496921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-03-31T15:59:22.496942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-03-31T15:59:22.497889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T15:59:22.498014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T15:59:22.498137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T15:59:22.498179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:275:2266] 2025-03-31T15:59:22.498621Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 2025-03-31T15:59:22.498773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-03-31T15:59:22.499069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-03-31T15:59:22.499338Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 2025-03-31T15:59:22.499522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:22.499697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-03-31T15:59:22.499836Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 2025-03-31T15:59:22.500037Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 2025-03-31T15:59:22.500162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-03-31T15:59:22.500293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-31T15:59:22.500507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-03-31T15:59:22.500630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T15:59:22.500773Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 2025-03-31T15:59:22.501030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-31T15:59:22.501172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T15:59:22.501485Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 2025-03-31T15:59:22.501677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-03-31T15:59:22.501835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T15:59:22.502243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:59:22.502288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T15:59:22.502413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T15:59:22.502745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:59:22.502804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T15:59:22.502886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:22.504314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-03-31T15:59:22.507004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-31T15:59:22.507127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-03-31T15:59:22.507298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-03-31T15:59:22.507695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-31T15:59:22.509505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-03-31T15:59:22.509699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-31T15:59:22.509792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2025-03-31T15:59:22.510474Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:22.510658Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 199us result status StatusPathDoesNotExist 2025-03-31T15:59:22.510856Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-31T15:59:22.511301Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:22.511481Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 156us result status StatusSuccess 2025-03-31T15:59:22.511818Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:22.187518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:22.187618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:22.187657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:22.187689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:22.187732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:22.187778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:22.187837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:22.187926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:22.188254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:22.271055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:22.271149Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:22.276439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:22.276535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:22.276723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:22.282857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:22.283124Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:22.283766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:22.284001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:22.285943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:22.287127Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:22.287193Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:22.287360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:22.287417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:22.287463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:22.287606Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.294642Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:22.438061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:22.438318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.438555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:22.438818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:22.438879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.442509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:22.442668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:22.442947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.443015Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:22.443064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:22.443118Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:22.445799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.445885Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:22.445928Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:22.448221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.448281Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.448353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:22.448414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:22.452400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:22.454861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:22.455065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:22.456251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:22.456411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:22.456484Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:22.456759Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:22.456816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:22.456981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:22.457099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:22.459639Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:22.459688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:22.459867Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:22.459905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:22.460169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.460213Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:22.460308Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:22.460339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:22.460374Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:22.460420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:22.460467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:22.460507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:22.460555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:22.460588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:22.460656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:22.460690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:22.460720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:22.462763Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:22.462889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:22.462922Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... _TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-03-31T15:59:22.501267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:22.501404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:22.501452Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-03-31T15:59:22.501633Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-03-31T15:59:22.501678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-03-31T15:59:22.501831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:22.501922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T15:59:22.501990Z node 1 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no IsActiveChild, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T15:59:22.503141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T15:59:22.503458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2025-03-31T15:59:22.504733Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:22.504770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:22.504911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T15:59:22.504997Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:22.505026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-03-31T15:59:22.505075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-03-31T15:59:22.505382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.505425Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-31T15:59:22.505521Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T15:59:22.505552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T15:59:22.505593Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T15:59:22.505638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T15:59:22.505678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-03-31T15:59:22.505716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T15:59:22.505754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-31T15:59:22.505782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-31T15:59:22.505847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T15:59:22.505886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-03-31T15:59:22.505919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-03-31T15:59:22.505950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-03-31T15:59:22.507023Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:22.507147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:22.507185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-03-31T15:59:22.507221Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-31T15:59:22.507257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:22.508446Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:22.508531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:22.508563Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-31T15:59:22.508589Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-31T15:59:22.508619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T15:59:22.508687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-03-31T15:59:22.510760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T15:59:22.511922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-03-31T15:59:22.515074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "SomeDatabase" TimeCastBucketsPerMediator: 2 ExternalSchemeShard: true DatabaseQuotas { storage_quotas { unit_kind: "nonexistent_storage_kind" data_size_hard_quota: 1 } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:22.515342Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 102:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "SomeDatabase" TimeCastBucketsPerMediator: 2 ExternalSchemeShard: true DatabaseQuotas { storage_quotas { unit_kind: "nonexistent_storage_kind" data_size_hard_quota: 1 } } } 2025-03-31T15:59:22.515382Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 102:0, path /MyRoot/SomeDatabase 2025-03-31T15:59:22.515561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 102:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: , at schemeshard: 72057594046678944 2025-03-31T15:59:22.515622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: , at schemeshard: 72057594046678944 2025-03-31T15:59:22.517954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: " TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:22.518139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: , operation: ALTER DATABASE, path: /MyRoot/SomeDatabase TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2025-03-31T15:59:22.518445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-31T15:59:22.518485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-03-31T15:59:22.518576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-31T15:59:22.518615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-31T15:59:22.519065Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-31T15:59:22.519199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T15:59:22.519229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:305:2296] 2025-03-31T15:59:22.519399Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-31T15:59:22.519517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T15:59:22.519546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:305:2296] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop >> TSchemeShardSubDomainTest::RmDir >> TSchemeShardSubDomainTest::DeclareDefineAndDelete >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects >> TSchemeShardSubDomainTest::Restart >> ReadLoad::ShouldReadKqpMoreThanRows [GOOD] >> TSchemeShardSubDomainTest::Create [GOOD] >> TSchemeShardSubDomainTest::CreateAlterNbsChannels >> TSchemeShardSubDomainTest::SimultaneousDefine [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:21.854225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:21.854338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:21.854384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:21.854421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:21.854470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:21.854519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:21.854590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:21.854688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:21.855069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:21.956892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:21.956972Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:21.962629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:21.962735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:21.962940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:21.969198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:21.969542Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:21.970377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:21.970646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:21.972940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:21.974329Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:21.974397Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:21.974580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:21.974649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:21.974698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:21.974879Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:21.983019Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:22.140765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:22.141034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.141271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:22.141575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:22.141646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.146114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:22.146297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:22.146544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.146638Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:22.146694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:22.146729Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:22.152448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.152568Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:22.152623Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:22.154857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.154909Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.154977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:22.155049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:22.159047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:22.161889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:22.162068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:22.163136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:22.163275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:22.163341Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:22.163605Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:22.163655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:22.163817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:22.163937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:22.166207Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:22.166249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:22.166440Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:22.166488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:22.166708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.166753Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:22.166840Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:22.166875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:22.166911Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:22.166957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:22.166995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:22.167033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:22.167101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:22.167137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:22.167209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:22.167243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:22.167274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:22.176670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:22.176831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:22.176869Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... rd: 72057594046678944, cookie: 103 2025-03-31T15:59:22.724700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.724832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.725154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.725206Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-03-31T15:59:22.725315Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-31T15:59:22.725355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T15:59:22.725392Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-31T15:59:22.725417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T15:59:22.725448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-03-31T15:59:22.725514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:481:2437] message: TxId: 103 2025-03-31T15:59:22.725569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T15:59:22.725604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-03-31T15:59:22.725643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-03-31T15:59:22.725746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-03-31T15:59:22.727935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-31T15:59:22.727984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:482:2438] TestWaitNotification: OK eventTxId 103 2025-03-31T15:59:22.728585Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:22.728813Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 266us result status StatusSuccess 2025-03-31T15:59:22.729461Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "dir_0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:22.730078Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:22.730301Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 221us result status StatusSuccess 2025-03-31T15:59:22.730709Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:22.731424Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:22.731639Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0" took 193us result status StatusSuccess 2025-03-31T15:59:22.731937Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0" PathDescription { Self { Name: "dir_0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "table_1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 200 ParentPathId: 4 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:22.732428Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0/table_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:22.732620Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0/table_1" took 196us result status StatusSuccess 2025-03-31T15:59:22.732988Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0/table_1" PathDescription { Self { Name: "table_1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 200 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_1" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:22.405761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:22.405881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:22.405923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:22.405964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:22.406015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:22.406069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:22.406156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:22.406269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:22.406644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:22.498279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:22.498337Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:22.505201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:22.505317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:22.505512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:22.511684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:22.511941Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:22.512736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:22.512977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:22.515425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:22.516788Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:22.516862Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:22.517099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:22.517157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:22.517213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:22.517426Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.525305Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:22.665967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:22.666205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.666411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:22.666620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:22.666679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.669073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:22.669196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:22.669424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.669479Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:22.669525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:22.669558Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:22.671499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.671571Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:22.671635Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:22.673491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.673538Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.673595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:22.673672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:22.677165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:22.680023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:22.680221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:22.681158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:22.681302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:22.681369Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:22.681643Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:22.681689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:22.681827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:22.681930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:22.685084Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:22.685126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:22.685320Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:22.685361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:22.685592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.685649Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:22.685740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:22.685772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:22.685807Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:22.685834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:22.685866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:22.685907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:22.685954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:22.685980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:22.686064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:22.686102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:22.686130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:22.687933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:22.688057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:22.688088Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... nReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.927429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409551, partId: 0 2025-03-31T15:59:22.927583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409551 2025-03-31T15:59:22.927628Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 101:0 HandleReply TEvConfigureStatus operationId:101:0 at schemeshard:72057594046678944 2025-03-31T15:59:22.927688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 101:0 Got OK TEvConfigureStatus from tablet# 72075186233409551 shardIdx# 72057594046678944:6 at schemeshard# 72057594046678944 2025-03-31T15:59:22.927728Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 3 -> 128 2025-03-31T15:59:22.930732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.930913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.930963Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.931006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 101:0, at tablet# 72057594046678944 2025-03-31T15:59:22.931052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2025-03-31T15:59:22.931213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:22.933076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-03-31T15:59:22.933213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-03-31T15:59:22.933573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:22.933684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:22.933725Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-03-31T15:59:22.934061Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-03-31T15:59:22.934114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-03-31T15:59:22.934282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:22.934337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-03-31T15:59:22.934405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-03-31T15:59:22.936990Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:22.937059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:22.937237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T15:59:22.937374Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:22.937415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-03-31T15:59:22.937464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-03-31T15:59:22.937785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.937828Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-31T15:59:22.937933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T15:59:22.937967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T15:59:22.938018Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T15:59:22.938049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T15:59:22.938088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-03-31T15:59:22.938144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T15:59:22.938184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-31T15:59:22.938216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-31T15:59:22.938489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2025-03-31T15:59:22.938534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 1 2025-03-31T15:59:22.938567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-03-31T15:59:22.938599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-03-31T15:59:22.939326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:22.939437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:22.939476Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-03-31T15:59:22.939518Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-31T15:59:22.939561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:22.940231Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:22.940299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:22.940330Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-31T15:59:22.940371Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-31T15:59:22.940398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-03-31T15:59:22.940465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 1 2025-03-31T15:59:22.940530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:569:2478] 2025-03-31T15:59:22.943528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T15:59:22.943648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T15:59:22.943699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T15:59:22.943726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:570:2479] TestWaitNotification: OK eventTxId 101 2025-03-31T15:59:22.944287Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:22.944471Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 200us result status StatusSuccess 2025-03-31T15:59:22.944902Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:17.808636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:17.808747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:17.808791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:17.808832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:17.808881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:17.808927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:17.808991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:17.809086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:17.809433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:17.903373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:17.903437Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:17.910694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:17.910969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:17.911181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:17.916580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:17.916842Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:17.917603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:17.917828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:17.919882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:17.921099Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:17.921158Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:17.921336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:17.921384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:17.921434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:17.921582Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.929084Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:18.077846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:18.078074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:18.078316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:18.078585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:18.078637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:18.084410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:18.084612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:18.084822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:18.084870Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:18.084931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:18.084961Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:18.087452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:18.087522Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:18.087563Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:18.089633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:18.089680Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:18.089735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:18.089788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:18.093410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:18.095507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:18.095683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:18.096788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:18.096914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:18.096982Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:18.097251Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:18.097305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:18.097478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:18.097586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:18.101177Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:18.101210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:18.101387Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:18.101416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:18.101587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:18.101623Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:18.101688Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:18.101713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:18.101767Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:18.101800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:18.101828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:18.101858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:18.101907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:18.101933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:18.101985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:18.102013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:18.102033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:18.103367Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:18.103454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:18.103481Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... hemeshard: 72057594046678944, message: Source { RawX1: 437 RawX2: 4294969697 } Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2025-03-31T15:59:22.880331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409548, partId: 0 2025-03-31T15:59:22.880436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 437 RawX2: 4294969697 } Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2025-03-31T15:59:22.880505Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2025-03-31T15:59:22.880980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.881039Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 ProgressState, operation type: TxDropTable, at tablet# 72057594046678944 2025-03-31T15:59:22.881095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 103:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-03-31T15:59:22.881130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 103, done: 0, blocked: 1 2025-03-31T15:59:22.881201Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 103 Name: RenamePathBarrier }, at tablet# 72057594046678944 2025-03-31T15:59:22.881336Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 137 -> 129 2025-03-31T15:59:22.881462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T15:59:22.881521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-31T15:59:22.884220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.885804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.886194Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:22.886254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T15:59:22.886453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-31T15:59:22.886627Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:22.886689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-03-31T15:59:22.886739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-03-31T15:59:22.886963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.887016Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2025-03-31T15:59:22.887157Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.887209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-03-31T15:59:22.887260Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2025-03-31T15:59:22.888257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T15:59:22.888344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T15:59:22.888370Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-03-31T15:59:22.888403Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2025-03-31T15:59:22.888448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-31T15:59:22.889408Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T15:59:22.889496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T15:59:22.889518Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-03-31T15:59:22.889539Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-03-31T15:59:22.889563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-31T15:59:22.889616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-03-31T15:59:22.892930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T15:59:22.892999Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:22.893393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-31T15:59:22.893601Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-31T15:59:22.893643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T15:59:22.893687Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-31T15:59:22.893740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T15:59:22.893782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-03-31T15:59:22.893868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:404:2371] message: TxId: 103 2025-03-31T15:59:22.893922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T15:59:22.893954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-03-31T15:59:22.893978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-03-31T15:59:22.894083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T15:59:22.894476Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:22.894504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T15:59:22.895446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-31T15:59:22.896173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-31T15:59:22.896743Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:22.896818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 0, path id: 2 2025-03-31T15:59:22.896904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-31T15:59:22.896956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:667:2601] 2025-03-31T15:59:22.897882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 9 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification: OK eventTxId 103 2025-03-31T15:59:22.898648Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:22.898866Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 190us result status StatusSuccess 2025-03-31T15:59:22.899223Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "unquoted_storage_pool" Kind: "unquoted_storage_pool_kind" } StoragePools { Name: "quoted_storage_pool" Kind: "quoted_storage_pool_kind" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "unquoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "quoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { storage_quotas { unit_kind: "quoted_storage_pool_kind" data_size_hard_quota: 1 } } SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::RestartAtInFly [GOOD] >> TSchemeShardSubDomainTest::TableDiskSpaceQuotas >> TSchemeShardSubDomainTest::SchemeLimitsRejectsWithIndexedTables >> KqpIndexes::SimpleVectorIndexOrderByCosineSimilarityNullableLevel1 [GOOD] >> KqpIndexes::SimpleVectorIndexOrderByCosineSimilarityNullableLevel2 >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTablets >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain [GOOD] >> TStoragePoolsQuotasTest::DifferentQuotasInteraction [GOOD] >> TSchemeShardSubDomainTest::CreateWithNoEqualName [GOOD] >> TSchemeShardSubDomainTest::Redefine [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDefine [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:23.056475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:23.056597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:23.056638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:23.056678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:23.056726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:23.056780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:23.056847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:23.056970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:23.057371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:23.154996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:23.155065Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:23.160723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:23.160851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:23.161058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:23.167259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:23.167526Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:23.168335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:23.168603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:23.170955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:23.172257Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:23.172332Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:23.172511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:23.172561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:23.172601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:23.172764Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.180931Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:23.304598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:23.304842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.305085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:23.305310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:23.305379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.307473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:23.307601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:23.307761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.307808Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:23.307853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:23.307883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:23.309544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.309606Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:23.309642Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:23.311163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.311218Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.311276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:23.311318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:23.314132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:23.315814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:23.315986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:23.316839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:23.316960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:23.317035Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:23.317290Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:23.317328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:23.317475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:23.317577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:23.319884Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:23.319935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:23.320098Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:23.320138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:23.320343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.320383Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:23.320465Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:23.320490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:23.320521Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:23.320545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:23.320574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:23.320609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:23.320655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:23.320679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:23.320742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:23.320776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:23.320811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:23.322332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:23.322455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:23.322496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... TxId: 101, partId: 0, tablet: 72075186233409548 2025-03-31T15:59:23.489598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-03-31T15:59:23.489810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409546 2025-03-31T15:59:23.489900Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 101:0 HandleReply TEvConfigureStatus operationId:101:0 at schemeshard:72057594046678944 2025-03-31T15:59:23.489964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 101:0 Got OK TEvConfigureStatus from tablet# 72075186233409546 shardIdx# 72057594046678944:1 at schemeshard# 72057594046678944 2025-03-31T15:59:23.491347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2025-03-31T15:59:23.491476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409547 2025-03-31T15:59:23.491516Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 101:0 HandleReply TEvConfigureStatus operationId:101:0 at schemeshard:72057594046678944 2025-03-31T15:59:23.491555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 101:0 Got OK TEvConfigureStatus from tablet# 72075186233409547 shardIdx# 72057594046678944:2 at schemeshard# 72057594046678944 2025-03-31T15:59:23.495939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.496156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.497707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409548, partId: 0 2025-03-31T15:59:23.497879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409548 2025-03-31T15:59:23.497930Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 101:0 HandleReply TEvConfigureStatus operationId:101:0 at schemeshard:72057594046678944 2025-03-31T15:59:23.497989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 101:0 Got OK TEvConfigureStatus from tablet# 72075186233409548 shardIdx# 72057594046678944:3 at schemeshard# 72057594046678944 2025-03-31T15:59:23.498029Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 3 -> 128 2025-03-31T15:59:23.500885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.501095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.501145Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.501196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 101:0, at tablet# 72057594046678944 2025-03-31T15:59:23.501287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2025-03-31T15:59:23.501444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:23.503503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-03-31T15:59:23.503627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000003 2025-03-31T15:59:23.503968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:23.504100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:23.504176Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-03-31T15:59:23.504491Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-03-31T15:59:23.504565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-03-31T15:59:23.504746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-31T15:59:23.504837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-03-31T15:59:23.507403Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:23.507457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T15:59:23.507657Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:23.507699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-03-31T15:59:23.507923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.507998Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-31T15:59:23.508108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T15:59:23.508159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T15:59:23.508205Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T15:59:23.508243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T15:59:23.508282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-03-31T15:59:23.508320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T15:59:23.508381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-31T15:59:23.508419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-31T15:59:23.508621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-03-31T15:59:23.508675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 1, subscribers: 1 2025-03-31T15:59:23.508707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 4 2025-03-31T15:59:23.509481Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:23.509608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:23.509655Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-31T15:59:23.509698Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-03-31T15:59:23.509736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-31T15:59:23.509829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 1 2025-03-31T15:59:23.509866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:306:2297] 2025-03-31T15:59:23.513614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T15:59:23.513722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T15:59:23.513754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:313:2304] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-03-31T15:59:23.514387Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:23.514632Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 267us result status StatusSuccess 2025-03-31T15:59:23.515040Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RestartAtInFly [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:23.326605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:23.326715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:23.326760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:23.326799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:23.326853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:23.326898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:23.326961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:23.327075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:23.327451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:23.407257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:23.407302Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:23.412433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:23.412545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:23.412768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:23.418182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:23.418427Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:23.419186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:23.419420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:23.421415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:23.422715Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:23.422788Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:23.422968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:23.423018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:23.423058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:23.423236Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.439729Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:23.559232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:23.559476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.559727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:23.559972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:23.560040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.563470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:23.563600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:23.563811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.563871Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:23.563920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:23.563953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:23.565971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.566048Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:23.566091Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:23.568392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.568443Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.568515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:23.568574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:23.572152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:23.574817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:23.575027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:23.576193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:23.576347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:23.576422Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:23.576740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:23.576794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:23.577008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:23.577145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:23.579659Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:23.579730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:23.579916Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:23.579954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:23.580203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.580251Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:23.580354Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:23.580390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:23.580429Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:23.580459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:23.580498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:23.580541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:23.580595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:23.580630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:23.580710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:23.580745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:23.580776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:23.582894Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:23.583050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:23.583122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Seconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:23.794668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:23.794708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:23.794745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:23.794782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:23.794835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:23.794893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:23.794962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:23.795422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:23.814116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:23.815386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:23.815577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:23.815716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:23.815766Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:23.815995Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:23.816769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 2, at schemeshard: 72057594046678944 2025-03-31T15:59:23.816861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_0, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T15:59:23.816964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.817041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.817332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-03-31T15:59:23.817729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.817813Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-03-31T15:59:23.818036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.818132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.818299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 3, at schemeshard: 72057594046678944 2025-03-31T15:59:23.818372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T15:59:23.818431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T15:59:23.818454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T15:59:23.818558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.818684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.818910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 9, at schemeshard: 72057594046678944 2025-03-31T15:59:23.819300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.819461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.819964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.820072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.820367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.820480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.820603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.820819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.820928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.821097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.821388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.821678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.821789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.821858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.829953Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:23.830027Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:23.830248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:23.830305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:23.830348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:23.830433Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 100 2025-03-31T15:59:23.894593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-03-31T15:59:23.894666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 Leader for TabletID 72057594046678944 is [1:456:2406] sender: [1:516:2058] recipient: [1:15:2062] 2025-03-31T15:59:23.895795Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-03-31T15:59:23.895932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-03-31T15:59:23.895993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:514:2451] TestWaitNotification: OK eventTxId 100 2025-03-31T15:59:23.896971Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:23.897197Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 257us result status StatusSuccess 2025-03-31T15:59:23.897641Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:23.898166Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:23.898338Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 160us result status StatusSuccess 2025-03-31T15:59:23.898702Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::DeclareDefineAndDelete [GOOD] >> TSchemeShardSubDomainTest::RmDir [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenDrop ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> ReadLoad::ShouldReadKqpMoreThanRows [GOOD] Test command err: 2025-03-31T15:59:00.030410Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:59:00.030682Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:59:00.030968Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a52/r3tmp/tmpaHDnRf/pdisk_1.dat 2025-03-31T15:59:00.706102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:59:00.794213Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:00.868783Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:00.868938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:00.881017Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:59:00.981014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:59:01.488775Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# usertable in dir# /Root with rows# 100 2025-03-31T15:59:01.490677Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 1} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" 2025-03-31T15:59:01.519724Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 1} TUpsertActor finished in 0.028700s, errors=0 2025-03-31T15:59:01.520025Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kReadKqpStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } ReadKqpStart { RowCount: 100 Inflights: 10 } 2025-03-31T15:59:01.520156Z node 1 :DS_LOAD_TEST NOTICE: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 3} Bootstrap called: RowCount: 100 Inflights: 10 2025-03-31T15:59:01.521255Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 2 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2025-03-31T15:59:01.521421Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 3} started fullscan actor# [1:750:2632] 2025-03-31T15:59:01.521574Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 1} Bootstrap called, sample# 100 2025-03-31T15:59:01.521610Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 1} Connect to# 72075186224037888 called 2025-03-31T15:59:01.522549Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 1} Handle TEvClientConnected called, Status# OK 2025-03-31T15:59:01.528024Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 1} finished in 0.005266s, sampled# 100, iter finished# 1, oks# 100 2025-03-31T15:59:01.528295Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 3} received keyCount# 100 2025-03-31T15:59:01.528559Z node 1 :DS_LOAD_TEST NOTICE: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 3} started# 10 actors each with inflight# 1 2025-03-31T15:59:01.528638Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 2} Bootstrap called 2025-03-31T15:59:01.528707Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 2} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-03-31T15:59:01.528770Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 3} Bootstrap called 2025-03-31T15:59:01.528794Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 3} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-03-31T15:59:01.528843Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 4} Bootstrap called 2025-03-31T15:59:01.528873Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 4} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-03-31T15:59:01.528924Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 5} Bootstrap called 2025-03-31T15:59:01.528951Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 5} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-03-31T15:59:01.528978Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 6} Bootstrap called 2025-03-31T15:59:01.529014Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 6} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-03-31T15:59:01.529060Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 7} Bootstrap called 2025-03-31T15:59:01.529086Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 7} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-03-31T15:59:01.529109Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 8} Bootstrap called 2025-03-31T15:59:01.529131Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 8} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-03-31T15:59:01.529156Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 9} Bootstrap called 2025-03-31T15:59:01.529175Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 9} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-03-31T15:59:01.529202Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 10} Bootstrap called 2025-03-31T15:59:01.529223Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 10} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-03-31T15:59:01.529266Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 11} Bootstrap called 2025-03-31T15:59:01.529290Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 11} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-03-31T15:59:01.537512Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 2} session: ydb://session/3?node_id=1&id=ZDZmM2Y5OTEtZDM5ZjU3YjgtNzM4ZWY4N2EtOTFjOGEzMw== 2025-03-31T15:59:01.539827Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 3} session: ydb://session/3?node_id=1&id=ZTQxYjA2MzAtZDNjMjgzZDUtY2JiMDgzOTItZjNjNTExNDA= 2025-03-31T15:59:01.546725Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 4} session: ydb://session/3?node_id=1&id=NTY2NmMxOGUtMWJiYThlZWUtOWYzNWUwZjEtZDVjOWMzYWU= 2025-03-31T15:59:01.546887Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 5} session: ydb://session/3?node_id=1&id=MTgwZDI3OTQtODQ0YjIwZGItMmJhNmJlNGItMmRmMjIwZjU= 2025-03-31T15:59:01.550458Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 6} session: ydb://session/3?node_id=1&id=NDM1NWY3Ny1iZDE4OTg2Yi1lYWQxNjljYS1jNzg2MjQ5Ng== 2025-03-31T15:59:01.550618Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 7} session: ydb://session/3?node_id=1&id=Njg2YzVlNjctNGUzODIxMDItOWEwNjEzNjEtZTg2MDc5MGU= 2025-03-31T15:59:01.556655Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 8} session: ydb://session/3?node_id=1&id=NDFhMzExMmUtM2M2YjA5ZGUtYThjMTQ1YzAtNzA5OTYwMDc= 2025-03-31T15:59:01.563515Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 9} session: ydb://session/3?node_id=1&id=ZmI5YWQ5MjItYTRkODc2NTUtM2YzNTllMzAtZjc5Nzk1YTU= 2025-03-31T15:59:01.566960Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 10} session: ydb://session/3?node_id=1&id=NmQ0YTg0MDctZGU5MjZkNmMtY2M1OWY3ZWUtYmZkODcxNjc= 2025-03-31T15:59:01.571311Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 11} session: ydb://session/3?node_id=1&id=ZmUzZjgxNGEtYTE4NjFlMjktZWY5NDkwYTgtMWIwMmMyN2I= 2025-03-31T15:59:01.582817Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:774:2656], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:01.582971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:805:2681], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:01.583031Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:806:2682], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:01.583132Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:807:2683], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:01.583213Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:808:2684], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:01.583262Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:809:2685], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:01.583305Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:812:2688], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:01.583359Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:814:2690], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:01.583814Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:816:2692], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Erro ... 2025-03-31T15:59:15.955230Z node 2 :TX_PROXY ERROR: Actor# [2:855:2725] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T15:59:15.955709Z node 2 :TX_PROXY ERROR: Actor# [2:856:2726] txid# 281474976715664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T15:59:15.956195Z node 2 :TX_PROXY ERROR: Actor# [2:863:2728] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T15:59:15.957327Z node 2 :TX_PROXY ERROR: Actor# [2:866:2729] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T15:59:15.958021Z node 2 :TX_PROXY ERROR: Actor# [2:870:2730] txid# 281474976715667, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T15:59:15.959767Z node 2 :TX_PROXY ERROR: Actor# [2:848:2724] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T15:59:16.107066Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:828:2704], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T15:59:16.107226Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:829:2705], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T15:59:16.107301Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:830:2706], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T15:59:16.107365Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:833:2709], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T15:59:16.107424Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:834:2710], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T15:59:16.107481Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:835:2711], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T15:59:16.107562Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:836:2712], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T15:59:16.107628Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:838:2714], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T15:59:16.107690Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:845:2721], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T15:59:16.107750Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:857:2727], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T15:59:16.144909Z node 2 :TX_PROXY ERROR: Actor# [2:986:2822] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:16.713961Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:747:2629], subTag: 2} finished in 0.840027s, errors=0 2025-03-31T15:59:16.714234Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:738:2620], subTag: 3} finished: 2 { Tag: 2 DurationMs: 840 OperationsOK: 100 OperationsError: 0 } 2025-03-31T15:59:16.728668Z node 2 :TX_PROXY ERROR: Actor# [2:1915:3144] txid# 281474976715769, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:17.243644Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:747:2629], subTag: 11} finished in 1.346809s, errors=0 2025-03-31T15:59:17.244000Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:738:2620], subTag: 3} finished: 11 { Tag: 11 DurationMs: 1346 OperationsOK: 100 OperationsError: 0 } 2025-03-31T15:59:17.259639Z node 2 :TX_PROXY ERROR: Actor# [2:2822:3450] txid# 281474976715870, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:17.852480Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:747:2629], subTag: 9} finished in 1.959432s, errors=0 2025-03-31T15:59:17.852667Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:738:2620], subTag: 3} finished: 9 { Tag: 9 DurationMs: 1959 OperationsOK: 100 OperationsError: 0 } 2025-03-31T15:59:17.872643Z node 2 :TX_PROXY ERROR: Actor# [2:3729:3756] txid# 281474976715971, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:18.526363Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:747:2629], subTag: 3} finished in 2.647949s, errors=0 2025-03-31T15:59:18.526746Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:738:2620], subTag: 3} finished: 3 { Tag: 3 DurationMs: 2647 OperationsOK: 100 OperationsError: 0 } 2025-03-31T15:59:18.542962Z node 2 :TX_PROXY ERROR: Actor# [2:4636:4062] txid# 281474976716072, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:19.255515Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:747:2629], subTag: 7} finished in 3.367191s, errors=0 2025-03-31T15:59:19.255965Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:738:2620], subTag: 3} finished: 7 { Tag: 7 DurationMs: 3367 OperationsOK: 100 OperationsError: 0 } 2025-03-31T15:59:19.273525Z node 2 :TX_PROXY ERROR: Actor# [2:5543:4368] txid# 281474976716173, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:20.100474Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:747:2629], subTag: 6} finished in 4.214353s, errors=0 2025-03-31T15:59:20.100865Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:738:2620], subTag: 3} finished: 6 { Tag: 6 DurationMs: 4214 OperationsOK: 100 OperationsError: 0 } 2025-03-31T15:59:20.118089Z node 2 :TX_PROXY ERROR: Actor# [2:6450:4674] txid# 281474976716274, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:20.756681Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:747:2629], subTag: 4} finished in 4.872886s, errors=0 2025-03-31T15:59:20.757091Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:738:2620], subTag: 3} finished: 4 { Tag: 4 DurationMs: 4872 OperationsOK: 100 OperationsError: 0 } 2025-03-31T15:59:20.774472Z node 2 :TX_PROXY ERROR: Actor# [2:7357:4980] txid# 281474976716375, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:21.522402Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:747:2629], subTag: 8} finished in 5.629515s, errors=0 2025-03-31T15:59:21.522889Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:738:2620], subTag: 3} finished: 8 { Tag: 8 DurationMs: 5629 OperationsOK: 100 OperationsError: 0 } 2025-03-31T15:59:21.542412Z node 2 :TX_PROXY ERROR: Actor# [2:8264:5286] txid# 281474976716476, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:22.299582Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:747:2629], subTag: 5} finished in 6.415599s, errors=0 2025-03-31T15:59:22.300105Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:738:2620], subTag: 3} finished: 5 { Tag: 5 DurationMs: 6415 OperationsOK: 100 OperationsError: 0 } 2025-03-31T15:59:22.320531Z node 2 :TX_PROXY ERROR: Actor# [2:9171:5592] txid# 281474976716577, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:23.153092Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:747:2629], subTag: 10} finished in 7.256402s, errors=0 2025-03-31T15:59:23.153593Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:738:2620], subTag: 3} finished: 10 { Tag: 10 DurationMs: 7256 OperationsOK: 100 OperationsError: 0 } 2025-03-31T15:59:23.153665Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:738:2620], subTag: 3} finished in 7.283316s, oks# 1000, errors# 0 2025-03-31T15:59:23.153933Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:747:2629] with tag# 3 >> TSchemeShardSubDomainTest::CreateForceDropSolomon ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:20.215767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:20.215861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:20.215902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:20.215934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:20.215979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:20.216023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:20.216085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:20.216179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:20.216527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:20.304086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:20.304143Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:20.309032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:20.309132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:20.309327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:20.314759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:20.315010Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:20.315754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:20.316003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:20.318042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:20.319265Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:20.319338Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:20.319541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:20.319595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:20.319633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:20.319788Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.327714Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:20.461474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:20.461699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.461920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:20.462149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:20.462212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.464782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:20.464926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:20.465105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.465157Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:20.465202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:20.465233Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:20.468292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.468372Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:20.468411Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:20.470317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.470366Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.470451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:20.470512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:20.474350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:20.476909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:20.477086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:20.478133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:20.478266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:20.478345Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:20.478590Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:20.478637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:20.478792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:20.478890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:20.483575Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:20.483768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:20.483980Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:20.484037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:20.484278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.484322Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:20.484413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:20.484462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:20.484504Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:20.484534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:20.484568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:20.484609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:20.484656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:20.484686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:20.484789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:20.484825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:20.484858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:20.497641Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:20.497783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:20.497817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... age: Source { RawX1: 520 RawX2: 4294969763 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2025-03-31T15:59:23.959812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409549, partId: 0 2025-03-31T15:59:23.960059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72075186233409546, message: Source { RawX1: 520 RawX2: 4294969763 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2025-03-31T15:59:23.960129Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72075186233409546 2025-03-31T15:59:23.962998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-03-31T15:59:23.963135Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 ProgressState, operation type: TxDropTable, at tablet# 72075186233409546 2025-03-31T15:59:23.963199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 104:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-03-31T15:59:23.963265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 104, done: 0, blocked: 1 2025-03-31T15:59:23.963363Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 104 Name: RenamePathBarrier }, at tablet# 72075186233409546 2025-03-31T15:59:23.963515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 137 -> 129 2025-03-31T15:59:23.963661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 4 2025-03-31T15:59:23.963769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-03-31T15:59:23.965367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-03-31T15:59:23.984436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-03-31T15:59:23.988301Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-03-31T15:59:23.988374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-03-31T15:59:23.988636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 2] 2025-03-31T15:59:23.988824Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-03-31T15:59:23.988891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:445:2397], at schemeshard: 72075186233409546, txId: 104, path id: 1 2025-03-31T15:59:23.988943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:445:2397], at schemeshard: 72075186233409546, txId: 104, path id: 2 2025-03-31T15:59:23.989566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-03-31T15:59:23.989634Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2025-03-31T15:59:23.989746Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2025-03-31T15:59:23.989789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2025-03-31T15:59:23.989839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 240 2025-03-31T15:59:23.992046Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409546, cookie: 104 2025-03-31T15:59:23.992191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409546, cookie: 104 2025-03-31T15:59:23.992240Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-03-31T15:59:23.992290Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 9 2025-03-31T15:59:23.992342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 5 2025-03-31T15:59:23.993124Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-03-31T15:59:23.993233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-03-31T15:59:23.993290Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-03-31T15:59:23.993323Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 18446744073709551615 2025-03-31T15:59:23.993364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 4 2025-03-31T15:59:23.993439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-03-31T15:59:24.005728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-03-31T15:59:24.005816Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2025-03-31T15:59:24.006296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-03-31T15:59:24.006528Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-31T15:59:24.006585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T15:59:24.006640Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-31T15:59:24.006680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T15:59:24.006724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-03-31T15:59:24.006822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:550:2489] message: TxId: 104 2025-03-31T15:59:24.006873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T15:59:24.006928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-03-31T15:59:24.006969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-03-31T15:59:24.007174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 2 2025-03-31T15:59:24.011700Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-03-31T15:59:24.011766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-03-31T15:59:24.012597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-03-31T15:59:24.014594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-03-31T15:59:24.015956Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-03-31T15:59:24.016020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:445:2397], at schemeshard: 72075186233409546, txId: 0, path id: 1 2025-03-31T15:59:24.016122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-03-31T15:59:24.016172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:824:2742] 2025-03-31T15:59:24.017081Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2025-03-31T15:59:24.018947Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-03-31T15:59:24.023384Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 4.42ms result status StatusSuccess 2025-03-31T15:59:24.024037Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 2 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { Audience: "/MyRoot/SomeDatabase" } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateWithNoEqualName [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:22.984715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:22.984806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:22.984837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:22.984866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:22.984901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:22.984935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:22.984992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:22.985058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:22.985404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:23.088206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:23.088284Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:23.096284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:23.096373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:23.096518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:23.105620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:23.105838Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:23.106395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:23.106580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:23.108263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:23.109249Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:23.109329Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:23.109498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:23.109550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:23.109591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:23.109806Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.117138Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:23.245703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:23.245930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.246140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:23.246400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:23.246452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.248720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:23.248832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:23.248983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.249045Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:23.249090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:23.249117Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:23.250859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.250933Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:23.250974Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:23.252942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.252984Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.253038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:23.253081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:23.255947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:23.258174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:23.258388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:23.259619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:23.259763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:23.259841Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:23.260125Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:23.260186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:23.260366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:23.260504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:23.263071Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:23.263156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:23.263388Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:23.263444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:23.263834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.263898Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:23.264015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:23.264060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:23.264110Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:23.264149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:23.264216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:23.264278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:23.264344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:23.264381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:23.264461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:23.264501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:23.264555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:23.266635Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:23.266758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:23.266802Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... hemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T15:59:24.038166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.038326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.038735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.038819Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-31T15:59:24.038956Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T15:59:24.039034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T15:59:24.039111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T15:59:24.039154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T15:59:24.039202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-31T15:59:24.039303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:626:2560] message: TxId: 102 2025-03-31T15:59:24.039376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T15:59:24.039428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-31T15:59:24.039469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-31T15:59:24.039624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-31T15:59:24.051238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T15:59:24.051310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:627:2561] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 108 2025-03-31T15:59:24.054611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: "USER_3" } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:24.054863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/USER_3, operationId: 108:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.055009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/USER_3', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeSubDomain, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-03-31T15:59:24.072119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/USER_3\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeSubDomain, state: EPathStateNoChanges)" TxId: 108 SchemeshardId: 72057594046678944 PathId: 5 PathCreateTxId: 106, at schemeshard: 72057594046678944 2025-03-31T15:59:24.072341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/USER_3', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeSubDomain, state: EPathStateNoChanges), operation: CREATE DIRECTORY, path: /MyRoot/USER_3 TestModificationResult got TxId: 108, wait until txId: 108 2025-03-31T15:59:24.073075Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:24.073358Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 235us result status StatusSuccess 2025-03-31T15:59:24.073758Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:24.074447Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:24.074681Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 234us result status StatusSuccess 2025-03-31T15:59:24.075223Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1" PathDescription { Self { Name: "USER_1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "USER_1" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:24.076083Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:24.076327Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_2" took 239us result status StatusSuccess 2025-03-31T15:59:24.076638Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_2" PathDescription { Self { Name: "USER_2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 104 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:24.077407Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:24.077601Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_3" took 199us result status StatusSuccess 2025-03-31T15:59:24.077982Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_3" PathDescription { Self { Name: "USER_3" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 106 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 } DomainKey { SchemeShard: 72057594046678944 PathId: 5 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 5 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::Restart [GOOD] >> TSchemeShardSubDomainTest::CreateAlterNbsChannels [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:20.128821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:20.128894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:20.128922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:20.128950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:20.128987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:20.129038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:20.129083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:20.129154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:20.129411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:20.227037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:20.227164Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:20.234399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:20.234515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:20.234686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:20.240795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:20.241041Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:20.241776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:20.241990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:20.245148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:20.246329Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:20.246394Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:20.246555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:20.246615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:20.246670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:20.246813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.254044Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:20.413536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:20.413802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.414069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:20.414318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:20.414377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.419931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:20.420097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:20.420341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.420416Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:20.420466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:20.420502Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:20.423716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.423804Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:20.423856Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:20.426730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.426794Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.426858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:20.426916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:20.430881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:20.438586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:20.438829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:20.439955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:20.440112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:20.440182Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:20.440462Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:20.440512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:20.440675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:20.440836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:20.464392Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:20.464451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:20.464634Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:20.464680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:20.464937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.464991Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:20.465088Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:20.465123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:20.465161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:20.465205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:20.465246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:20.465289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:20.465362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:20.465398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:20.465471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:20.465514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:20.465548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:20.467665Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:20.467794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:20.467849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... 69697 } Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2025-03-31T15:59:23.961524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409548, partId: 0 2025-03-31T15:59:23.961662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 437 RawX2: 4294969697 } Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2025-03-31T15:59:23.961716Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2025-03-31T15:59:23.962157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.962217Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 ProgressState, operation type: TxDropTable, at tablet# 72057594046678944 2025-03-31T15:59:23.962269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 103:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-03-31T15:59:23.962311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 103, done: 0, blocked: 1 2025-03-31T15:59:23.962418Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 103 Name: RenamePathBarrier }, at tablet# 72057594046678944 2025-03-31T15:59:23.962539Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 137 -> 129 2025-03-31T15:59:23.962665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T15:59:23.962730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-31T15:59:23.965393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.972040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.972348Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:23.972396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T15:59:23.972596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-31T15:59:23.972751Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:23.972818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-03-31T15:59:23.972873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-03-31T15:59:23.973367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.973432Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2025-03-31T15:59:23.973533Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.973584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-03-31T15:59:23.973629Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2025-03-31T15:59:23.974639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T15:59:23.974763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T15:59:23.974804Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-03-31T15:59:23.974845Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2025-03-31T15:59:23.974892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-31T15:59:23.980269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T15:59:23.980401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T15:59:23.980457Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-03-31T15:59:23.980495Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-03-31T15:59:23.980547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-31T15:59:23.980644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-03-31T15:59:23.987936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.988014Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:23.988494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-31T15:59:23.988707Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-31T15:59:23.988753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T15:59:23.988816Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-31T15:59:23.988854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T15:59:23.988895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-03-31T15:59:23.988982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:404:2371] message: TxId: 103 2025-03-31T15:59:23.989031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T15:59:23.989069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-03-31T15:59:23.989102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-03-31T15:59:23.989209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T15:59:23.989752Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:23.989801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T15:59:23.990340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-31T15:59:23.996549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-31T15:59:23.997734Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:23.997808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 0, path id: 2 2025-03-31T15:59:23.997896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-31T15:59:23.997948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:737:2672] 2025-03-31T15:59:23.998844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 9 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification: OK eventTxId 103 2025-03-31T15:59:24.000587Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:24.000841Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 262us result status StatusSuccess 2025-03-31T15:59:24.001347Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::Redefine [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-31T15:59:23.714446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:23.714544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:23.714587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:23.714657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:23.714724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:23.714754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:23.714826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:23.714918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:23.715304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:23.811734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:23.811794Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:23.820844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:23.821219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:23.821438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:23.828836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:23.829147Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:23.829884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:23.830206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:23.832352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:23.833626Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:23.833686Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:23.833804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:23.833854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:23.833895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:23.834107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.841220Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-31T15:59:23.989392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:23.989622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.989869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:23.990095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:23.990152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.992315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:23.992454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:23.992637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.992746Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:23.992792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:23.992829Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:23.994843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.994911Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:23.994955Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:23.997223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.997306Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.997374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:23.997426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:24.001323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:24.003206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:24.003358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:24.004427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:24.004578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:24.004642Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:24.004914Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:24.004970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:24.005130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:24.005239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:24.007327Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:24.007378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:24.007586Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:24.007627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:24.008031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.008079Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:24.008177Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:24.008214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:24.008252Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:24.008299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:24.008348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:24.008394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:24.008448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:24.008485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:24.008558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:24.008594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:24.008628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:24.010517Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:24.010645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:24.010685Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... sReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T15:59:24.242541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-03-31T15:59:24.242595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T15:59:24.242634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-03-31T15:59:24.242675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-03-31T15:59:24.242836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-03-31T15:59:24.242878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-03-31T15:59:24.242917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-03-31T15:59:24.242949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-03-31T15:59:24.243804Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T15:59:24.243897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T15:59:24.243945Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-03-31T15:59:24.243986Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-03-31T15:59:24.244035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:24.244787Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T15:59:24.244874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T15:59:24.244905Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-03-31T15:59:24.244948Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-31T15:59:24.245006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-31T15:59:24.245078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-03-31T15:59:24.247394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-03-31T15:59:24.247456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-03-31T15:59:24.247503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-03-31T15:59:24.249399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-31T15:59:24.249693Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-03-31T15:59:24.251474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:24.251813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409546 2025-03-31T15:59:24.252772Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-03-31T15:59:24.253123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-03-31T15:59:24.253375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T15:59:24.254055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-31T15:59:24.254247Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409548 2025-03-31T15:59:24.254609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-31T15:59:24.254831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409547 2025-03-31T15:59:24.255736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:59:24.255801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T15:59:24.255958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T15:59:24.256352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:59:24.256457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T15:59:24.256527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:24.260341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-31T15:59:24.260403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-03-31T15:59:24.263548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-03-31T15:59:24.263609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-03-31T15:59:24.264373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-31T15:59:24.264441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-03-31T15:59:24.264679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-31T15:59:24.264775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-03-31T15:59:24.265134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-03-31T15:59:24.265185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-03-31T15:59:24.265736Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-03-31T15:59:24.265880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-03-31T15:59:24.265927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:574:2528] TestWaitNotification: OK eventTxId 104 2025-03-31T15:59:24.266600Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:24.266785Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 214us result status StatusPathDoesNotExist 2025-03-31T15:59:24.266972Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-31T15:59:24.268275Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:24.268516Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 219us result status StatusSuccess 2025-03-31T15:59:24.268871Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::Delete >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTablets [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-31T15:59:23.854722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:23.854821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:23.854861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:23.854898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:23.854973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:23.855012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:23.855098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:23.855189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:23.855577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:23.976120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:23.976179Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:23.985240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:23.985656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:23.985871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:23.993776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:23.994085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:23.994854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:23.995236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:23.997691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:23.999142Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:23.999211Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:23.999308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:23.999353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:23.999391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:23.999595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.007119Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-31T15:59:24.182252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:24.182435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.182618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:24.182850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:24.182925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.187984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:24.188099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:24.188261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.188308Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:24.188341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:24.188363Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:24.190065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.190119Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:24.190148Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:24.193197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.193276Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.193339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:24.193384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:24.196966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:24.198531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:24.198660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:24.199438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:24.199556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:24.199608Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:24.199816Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:24.199851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:24.199976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:24.200042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:24.201500Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:24.201547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:24.201684Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:24.201715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:24.201969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.201998Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:24.202071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:24.202099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:24.202133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:24.202161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:24.202191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:24.202229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:24.202285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:24.202319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:24.202385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:24.202432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:24.202467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:24.204220Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:24.204357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:24.204393Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-03-31T15:59:24.409008Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186233409550 2025-03-31T15:59:24.409244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-03-31T15:59:24.409566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 Forgetting tablet 72075186233409550 2025-03-31T15:59:24.410045Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-03-31T15:59:24.410586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:24.410776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-03-31T15:59:24.411301Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-03-31T15:59:24.411916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-03-31T15:59:24.412072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 Forgetting tablet 72075186233409546 2025-03-31T15:59:24.412975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T15:59:24.413467Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186233409551 Forgetting tablet 72075186233409548 2025-03-31T15:59:24.413957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T15:59:24.414501Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-03-31T15:59:24.414892Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-03-31T15:59:24.415263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-03-31T15:59:24.415442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409551 2025-03-31T15:59:24.416730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-31T15:59:24.416906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409547 2025-03-31T15:59:24.417469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-03-31T15:59:24.417599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409549 2025-03-31T15:59:24.419147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:59:24.419198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T15:59:24.419326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T15:59:24.420219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:59:24.420265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T15:59:24.420323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:24.421033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-03-31T15:59:24.421079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-03-31T15:59:24.421280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186233409550, at schemeshard: 72057594046678944 2025-03-31T15:59:24.422913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-31T15:59:24.422955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-03-31T15:59:24.423027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-03-31T15:59:24.423050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-03-31T15:59:24.425840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-03-31T15:59:24.425881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-03-31T15:59:24.426032Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186233409551, at schemeshard: 72057594046678944 2025-03-31T15:59:24.426110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-31T15:59:24.426132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-03-31T15:59:24.426334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-03-31T15:59:24.426382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-03-31T15:59:24.426963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-31T15:59:24.427974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2025-03-31T15:59:24.428329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-03-31T15:59:24.428367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-03-31T15:59:24.428463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-31T15:59:24.428501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-31T15:59:24.428914Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-03-31T15:59:24.428987Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-31T15:59:24.429039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-03-31T15:59:24.429071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:620:2525] 2025-03-31T15:59:24.429219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T15:59:24.429244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:620:2525] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-03-31T15:59:24.429675Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:24.429849Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 207us result status StatusPathDoesNotExist 2025-03-31T15:59:24.430014Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-31T15:59:24.430347Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:24.430516Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 149us result status StatusSuccess 2025-03-31T15:59:24.430873Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RmDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:23.974852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:23.974971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:23.975022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:23.975064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:23.979333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:23.979440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:23.979568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:23.979689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:23.980146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:24.111734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:24.111807Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:24.121971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:24.122093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:24.122302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:24.127666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:24.127947Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:24.128732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:24.128972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:24.131025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:24.132459Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:24.132537Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:24.132719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:24.132776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:24.132822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:24.132990Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.142440Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:24.301893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:24.302110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.302329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:24.302571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:24.302628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.305931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:24.306075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:24.306292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.306356Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:24.306406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:24.306436Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:24.323053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.323149Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:24.323198Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:24.325423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.325481Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.325547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:24.325601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:24.329519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:24.331188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:24.331395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:24.332206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:24.332331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:24.332394Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:24.332674Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:24.332735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:24.332883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:24.332993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:24.335003Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:24.335053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:24.335226Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:24.335264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:24.335495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.335549Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:24.335629Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:24.335660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:24.335708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:24.335749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:24.335798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:24.335834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:24.335883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:24.335916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:24.336001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:24.336047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:24.336078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:24.338094Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:24.338199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:24.338234Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 100:0, at tablet# 72057594046678944 2025-03-31T15:59:24.520581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2025-03-31T15:59:24.520705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:24.522373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-03-31T15:59:24.522544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-03-31T15:59:24.522879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:24.522999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:24.523044Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-03-31T15:59:24.523431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-03-31T15:59:24.523495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-03-31T15:59:24.523654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:24.523722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-03-31T15:59:24.523791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-03-31T15:59:24.526879Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:24.526941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:24.527129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T15:59:24.527272Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:24.527314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-03-31T15:59:24.527364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-03-31T15:59:24.527741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.527787Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-03-31T15:59:24.527889Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-03-31T15:59:24.527925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-31T15:59:24.527973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-03-31T15:59:24.528019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-31T15:59:24.528061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-03-31T15:59:24.528096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-31T15:59:24.528145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-03-31T15:59:24.528182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-03-31T15:59:24.528384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2025-03-31T15:59:24.528426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 1 2025-03-31T15:59:24.528460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-03-31T15:59:24.528491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-03-31T15:59:24.529151Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-03-31T15:59:24.529250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-03-31T15:59:24.529303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-03-31T15:59:24.529339Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-31T15:59:24.529381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:24.530282Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-03-31T15:59:24.530353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-03-31T15:59:24.530381Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-03-31T15:59:24.530407Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-31T15:59:24.530433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-03-31T15:59:24.530499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 1 2025-03-31T15:59:24.530542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:569:2479] 2025-03-31T15:59:24.534021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-03-31T15:59:24.535053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-03-31T15:59:24.535155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-03-31T15:59:24.535185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:570:2480] TestWaitNotification: OK eventTxId 100 2025-03-31T15:59:24.535646Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:24.535905Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 223us result status StatusSuccess 2025-03-31T15:59:24.536328Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-03-31T15:59:24.539139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "USER_0" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:24.539275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TRmDir Propose, path: /MyRoot/USER_0, pathId: 0, opId: 101:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.539429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusPathIsNotDirectory, reason: Check failed: path: '/MyRoot/USER_0', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeSubDomain, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-03-31T15:59:24.541572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusPathIsNotDirectory Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeSubDomain, state: EPathStateNoChanges)" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:24.541708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPathIsNotDirectory, reason: Check failed: path: '/MyRoot/USER_0', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeSubDomain, state: EPathStateNoChanges), operation: DROP DIRECTORY, path: /MyRoot/USER_0 TestModificationResult got TxId: 101, wait until txId: 101 >> DataShardSnapshots::ReadIteratorLocalSnapshotThenWrite [GOOD] >> DataShardSnapshots::RepeatableReadAfterSplitRace >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenMkDir >> TSchemeShardSubDomainTest::SchemeLimitsRejects [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeclareDefineAndDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:23.990179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:23.990301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:23.990350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:23.990394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:23.990453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:23.990506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:23.990593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:23.990706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:23.999193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:24.115558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:24.115639Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:24.132568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:24.132677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:24.132933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:24.138783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:24.139111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:24.139945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:24.140190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:24.142289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:24.143639Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:24.143708Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:24.143891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:24.143999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:24.144044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:24.144209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.152524Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:24.294797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:24.295131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.295385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:24.295639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:24.295703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.300412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:24.300594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:24.300813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.300878Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:24.300928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:24.300965Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:24.304372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.304465Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:24.304509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:24.308775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.308858Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.308940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:24.309005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:24.315948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:24.323899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:24.324164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:24.325429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:24.325597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:24.325678Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:24.325978Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:24.326042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:24.326221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:24.326405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:24.329113Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:24.329168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:24.329403Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:24.329448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:24.329733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.329788Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:24.329900Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:24.329938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:24.329984Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:24.330025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:24.330075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:24.330122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:24.330188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:24.330225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:24.330310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:24.330367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:24.330415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:24.332508Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:24.332665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:24.332711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-03-31T15:59:24.528722Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:24.528757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:24.528872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T15:59:24.529041Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:24.529100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-03-31T15:59:24.529137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-31T15:59:24.529320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.529361Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 102:0 ProgressState 2025-03-31T15:59:24.529420Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T15:59:24.529453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T15:59:24.529507Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T15:59:24.529542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T15:59:24.529579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-03-31T15:59:24.529614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T15:59:24.529649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-31T15:59:24.529679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-31T15:59:24.529869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-03-31T15:59:24.529917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-03-31T15:59:24.529956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-03-31T15:59:24.529986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-03-31T15:59:24.530758Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T15:59:24.530831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T15:59:24.530863Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-03-31T15:59:24.530911Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-03-31T15:59:24.530970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:24.531979Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T15:59:24.532061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T15:59:24.532087Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-03-31T15:59:24.532113Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-31T15:59:24.532157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-31T15:59:24.532226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-03-31T15:59:24.537823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-03-31T15:59:24.537884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-03-31T15:59:24.537909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-03-31T15:59:24.539072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T15:59:24.541715Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2025-03-31T15:59:24.542026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:24.542376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T15:59:24.542936Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-03-31T15:59:24.543757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-03-31T15:59:24.543966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T15:59:24.544245Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-03-31T15:59:24.544518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-31T15:59:24.544669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409548 Forgetting tablet 72075186233409547 2025-03-31T15:59:24.546437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:59:24.546484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T15:59:24.546605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T15:59:24.547675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:59:24.547727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T15:59:24.547806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:24.548081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T15:59:24.549553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-31T15:59:24.549602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-03-31T15:59:24.549684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-03-31T15:59:24.549720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-03-31T15:59:24.551656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-31T15:59:24.551719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-03-31T15:59:24.551904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-31T15:59:24.552009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-31T15:59:24.552276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-31T15:59:24.552313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-31T15:59:24.552725Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-31T15:59:24.552810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T15:59:24.552845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:518:2472] TestWaitNotification: OK eventTxId 102 2025-03-31T15:59:24.553379Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:24.553568Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 200us result status StatusPathDoesNotExist 2025-03-31T15:59:24.553736Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::Restart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:24.142202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:24.142307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:24.142369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:24.142417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:24.142466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:24.142514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:24.142610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:24.142707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:24.143059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:24.240388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:24.240450Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:24.245440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:24.245555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:24.245778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:24.252939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:24.253217Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:24.254019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:24.254265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:24.256463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:24.257788Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:24.257853Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:24.258018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:24.258066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:24.258104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:24.258248Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.267049Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:24.396900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:24.397129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.397380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:24.397631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:24.397695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.404170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:24.404315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:24.404529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.404599Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:24.404648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:24.404686Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:24.408398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.408488Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:24.408534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:24.412258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.412322Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.412400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:24.412455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:24.416414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:24.420277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:24.420528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:24.421770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:24.421920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:24.421994Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:24.422263Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:24.422315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:24.422483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:24.422594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:24.424632Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:24.424706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:24.424861Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:24.424917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:24.425159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.425201Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:24.425306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:24.425336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:24.425374Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:24.425402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:24.425439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:24.425474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:24.425524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:24.425555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:24.425627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:24.425659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:24.425687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:24.428925Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:24.429044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:24.429079Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 4046678944 2025-03-31T15:59:24.609713Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:457:2058] recipient: [1:102:2137] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:460:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:461:2058] recipient: [1:459:2411] Leader for TabletID 72057594046678944 is [1:462:2412] sender: [1:463:2058] recipient: [1:459:2411] 2025-03-31T15:59:24.653186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:24.653368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:24.653429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:24.653478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:24.653536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:24.653587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:24.653654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:24.653729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:24.654068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:24.673718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:24.675146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:24.675331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:24.675475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:24.675603Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:24.675997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:24.676775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 2, at schemeshard: 72057594046678944 2025-03-31T15:59:24.676870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_0, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T15:59:24.676961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.677028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.677256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-03-31T15:59:24.677540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.677637Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-03-31T15:59:24.677858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.677946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.678051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 3, at schemeshard: 72057594046678944 2025-03-31T15:59:24.678099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T15:59:24.678129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T15:59:24.678153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T15:59:24.678244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.678317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.678549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 9, at schemeshard: 72057594046678944 2025-03-31T15:59:24.678997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.679139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.679596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.679672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.679939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.680034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.680173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.680388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.680605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.680767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.681034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.681221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.681290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.681355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.693183Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:24.693281Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:24.693600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:24.693640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:24.693676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:24.693825Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:462:2412] sender: [1:520:2058] recipient: [1:15:2062] 2025-03-31T15:59:24.751146Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:24.751410Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 305us result status StatusSuccess 2025-03-31T15:59:24.751795Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:24.752393Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:24.752571Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 230us result status StatusSuccess 2025-03-31T15:59:24.752925Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomain >> TSchemeShardSubDomainTest::DeleteAndRestart ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateAlterNbsChannels [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:23.159538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:23.159635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:23.159670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:23.159718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:23.159765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:23.159804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:23.159868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:23.159957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:23.160298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:23.228697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:23.228738Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:23.232930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:23.233020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:23.233228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:23.238192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:23.238385Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:23.238935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:23.239144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:23.240845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:23.241811Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:23.241878Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:23.242023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:23.242066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:23.242094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:23.242208Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.248439Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:23.399586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:23.399807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.400019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:23.400233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:23.400288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.402948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:23.403074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:23.403271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.403325Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:23.403384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:23.403416Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:23.405719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.405777Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:23.405814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:23.407471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.407514Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.407581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:23.407631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:23.411499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:23.413371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:23.413548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:23.414504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:23.414630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:23.414690Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:23.414977Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:23.415029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:23.415198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:23.415301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:23.417355Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:23.417400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:23.417569Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:23.417608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:23.417842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.417882Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:23.417987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:23.418022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:23.418057Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:23.418085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:23.418119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:23.418167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:23.418216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:23.418253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:23.418317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:23.418354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:23.418387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:23.420311Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:23.420422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:23.420454Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 2025-03-31T15:59:24.792009Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-03-31T15:59:24.792035Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2025-03-31T15:59:24.792062Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-03-31T15:59:24.792092Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-31T15:59:24.793306Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-03-31T15:59:24.793377Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-03-31T15:59:24.793401Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-03-31T15:59:24.793427Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-31T15:59:24.793454Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-31T15:59:24.793519Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-03-31T15:59:24.794635Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-03-31T15:59:24.794682Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-03-31T15:59:24.794706Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-03-31T15:59:24.794729Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-03-31T15:59:24.795189Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-03-31T15:59:24.798224Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2025-03-31T15:59:24.799387Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-03-31T15:59:24.799796Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:24.800067Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409548 2025-03-31T15:59:24.800950Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-03-31T15:59:24.801088Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-03-31T15:59:24.803538Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T15:59:24.804296Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-03-31T15:59:24.804517Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-31T15:59:24.804689Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409547 2025-03-31T15:59:24.805811Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-03-31T15:59:24.805967Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409549 2025-03-31T15:59:24.806462Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-03-31T15:59:24.807521Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:59:24.807591Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-31T15:59:24.807659Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T15:59:24.808189Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:59:24.808251Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T15:59:24.808378Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T15:59:24.808719Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-03-31T15:59:24.811504Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-31T15:59:24.811565Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-03-31T15:59:24.811735Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-03-31T15:59:24.811782Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-03-31T15:59:24.812084Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-31T15:59:24.812118Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-03-31T15:59:24.812162Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-03-31T15:59:24.812185Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-03-31T15:59:24.814317Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-03-31T15:59:24.814481Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:59:24.814540Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T15:59:24.814636Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:24.814934Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-31T15:59:24.817157Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-03-31T15:59:24.817471Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-03-31T15:59:24.817513Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-03-31T15:59:24.817976Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-03-31T15:59:24.818074Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-03-31T15:59:24.818117Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:652:2606] TestWaitNotification: OK eventTxId 105 2025-03-31T15:59:24.818734Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:24.818960Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/BSVolume" took 269us result status StatusPathDoesNotExist 2025-03-31T15:59:24.819167Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-31T15:59:24.819804Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:24.819963Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 165us result status StatusPathDoesNotExist 2025-03-31T15:59:24.820102Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:24.095033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:24.095171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:24.095205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:24.095237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:24.095300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:24.095356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:24.095444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:24.095579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:24.095912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:24.183880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:24.183931Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:24.188235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:24.188329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:24.188519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:24.192946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:24.193178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:24.193842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:24.194051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:24.196809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:24.197985Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:24.198047Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:24.198221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:24.198271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:24.198310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:24.198454Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.204720Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:24.319921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:24.320132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.320383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:24.320602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:24.320652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.322848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:24.322987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:24.323192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.323253Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:24.323304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:24.323349Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:24.325253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.325344Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:24.325380Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:24.327077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.327144Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.327210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:24.327267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:24.336831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:24.338627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:24.338816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:24.339823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:24.339965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:24.340028Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:24.340285Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:24.340331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:24.340476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:24.340632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:24.342600Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:24.342642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:24.342787Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:24.342826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:24.343072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.343138Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:24.343236Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:24.343266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:24.343301Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:24.343343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:24.343382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:24.343418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:24.343464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:24.343496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:24.343573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:24.343611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:24.343638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:24.345555Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:24.345664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:24.345697Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 31T15:59:24.918688Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 106 2025-03-31T15:59:24.918713Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-03-31T15:59:24.918736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-31T15:59:24.919596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 106 2025-03-31T15:59:24.919694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 106 2025-03-31T15:59:24.919736Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2025-03-31T15:59:24.919771Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-31T15:59:24.919802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-31T15:59:24.919873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2025-03-31T15:59:24.926393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:5 hive 72057594037968897 at ss 72057594046678944 2025-03-31T15:59:24.926496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-03-31T15:59:24.926521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-03-31T15:59:24.926544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-03-31T15:59:24.926565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-03-31T15:59:24.927396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-03-31T15:59:24.929635Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186233409550 2025-03-31T15:59:24.930032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-03-31T15:59:24.930412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 Forgetting tablet 72075186233409550 2025-03-31T15:59:24.931994Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-03-31T15:59:24.932322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:24.932604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T15:59:24.934479Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-03-31T15:59:24.935792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 Forgetting tablet 72075186233409546 2025-03-31T15:59:24.937121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-03-31T15:59:24.937403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T15:59:24.937988Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409548 2025-03-31T15:59:24.939913Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 Forgetting tablet 72075186233409547 2025-03-31T15:59:24.940275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-31T15:59:24.940493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409549 2025-03-31T15:59:24.941893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-03-31T15:59:24.942140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-31T15:59:24.942849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:59:24.942901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-31T15:59:24.942976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T15:59:24.943332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-03-31T15:59:24.943586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:59:24.943633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T15:59:24.943745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T15:59:24.946762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-03-31T15:59:24.946812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-03-31T15:59:24.946892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-31T15:59:24.946939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-03-31T15:59:24.947482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-03-31T15:59:24.947507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-03-31T15:59:24.949195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-31T15:59:24.949242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-03-31T15:59:24.949387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-03-31T15:59:24.949424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-03-31T15:59:24.949616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-03-31T15:59:24.949704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-31T15:59:24.949763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:59:24.949801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T15:59:24.949864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:24.952252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-03-31T15:59:24.952629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-03-31T15:59:24.952674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-03-31T15:59:24.953455Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-03-31T15:59:24.953564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-03-31T15:59:24.953601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:917:2819] TestWaitNotification: OK eventTxId 106 2025-03-31T15:59:24.954452Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:24.954687Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 236us result status StatusSuccess 2025-03-31T15:59:24.955109Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTablets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:24.766576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:24.766650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:24.766677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:24.766703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:24.766736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:24.766769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:24.766821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:24.766887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:24.767169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:24.839411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:24.839462Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:24.845877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:24.846011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:24.846272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:24.852152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:24.852405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:24.853089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:24.853335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:24.855367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:24.856475Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:24.856545Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:24.856696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:24.856755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:24.856793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:24.856924Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.872117Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:25.001998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:25.002239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:25.002477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:25.002703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:25.002782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:25.006695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:25.006837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:25.007048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:25.007126Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:25.007188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:25.007233Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:25.009174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:25.009240Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:25.009298Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:25.013357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:25.013423Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:25.013541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:25.013616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:25.017157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:25.018909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:25.019156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:25.019997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:25.020147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:25.020276Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:25.020535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:25.020574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:25.020757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:25.020909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:25.022989Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:25.023059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:25.023236Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:25.023276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:25.023500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:25.023542Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:25.023632Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:25.023665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:25.023701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:25.023764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:25.023802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:25.023880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:25.023932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:25.023963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:25.024022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:25.024061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:25.024095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:25.026030Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:25.026169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:25.026209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 7594046316545 2025-03-31T15:59:25.083918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-03-31T15:59:25.084145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-03-31T15:59:25.084581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:25.084717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:25.084777Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-03-31T15:59:25.085072Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-03-31T15:59:25.085136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-03-31T15:59:25.085377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:25.085459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T15:59:25.085524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T15:59:25.088047Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:25.088101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:25.088312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T15:59:25.088430Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:25.088466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-03-31T15:59:25.088507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 2 FAKE_COORDINATOR: Erasing txId 100 2025-03-31T15:59:25.088726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-03-31T15:59:25.088776Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-03-31T15:59:25.088907Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-03-31T15:59:25.088951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-31T15:59:25.089020Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-03-31T15:59:25.089058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-31T15:59:25.089103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-03-31T15:59:25.089160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-31T15:59:25.089209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-03-31T15:59:25.089249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-03-31T15:59:25.089362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T15:59:25.089409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-03-31T15:59:25.089452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-03-31T15:59:25.089483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-03-31T15:59:25.090374Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-03-31T15:59:25.090476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-03-31T15:59:25.090518Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-03-31T15:59:25.090563Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-31T15:59:25.090610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:25.091860Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-03-31T15:59:25.091980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-03-31T15:59:25.092023Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-03-31T15:59:25.092061Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-31T15:59:25.092103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T15:59:25.092208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-03-31T15:59:25.098752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-03-31T15:59:25.100101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-03-31T15:59:25.100407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-03-31T15:59:25.100457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-03-31T15:59:25.100974Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-03-31T15:59:25.101116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-03-31T15:59:25.101156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:307:2298] TestWaitNotification: OK eventTxId 100 2025-03-31T15:59:25.101848Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:25.102138Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 267us result status StatusSuccess 2025-03-31T15:59:25.102650Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:25.103319Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:25.103532Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 212us result status StatusSuccess 2025-03-31T15:59:25.103946Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop >> TSchemeShardSubDomainTest::SchemeLimitsRejectsWithIndexedTables [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-50 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-51 >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart-UseSink >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenDrop [GOOD] >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true [GOOD] >> TSchemeShardSubDomainTest::CreateDropNbs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeLimitsRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-31T15:59:21.270401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:21.270506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:21.270543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:21.270581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:21.270641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:21.270671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:21.270733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:21.270811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:21.271163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:21.372505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:21.372571Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:21.382000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:21.382403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:21.382613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:21.390410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:21.390720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:21.391428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:21.391787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:21.393919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:21.395280Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:21.395352Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:21.395460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:21.395505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:21.395542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:21.395766Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:21.403459Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-31T15:59:21.535904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:21.536143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:21.536364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:21.536603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:21.536659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:21.539971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:21.540131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:21.540346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:21.540395Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:21.540451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:21.540493Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:21.542960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:21.543040Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:21.543110Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:21.545609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:21.545680Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:21.545744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:21.545805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:21.549564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:21.555826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:21.556050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:21.557196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:21.557365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:21.557453Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:21.557757Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:21.557809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:21.557974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:21.558091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:21.560772Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:21.560832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:21.561066Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:21.561111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:21.561543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:21.561591Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:21.561715Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:21.561769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:21.561810Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:21.561838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:21.561875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:21.561921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:21.561972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:21.562011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:21.562090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:21.562127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:21.562157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:21.564082Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:21.564203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:21.564240Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 0591Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 139 2025-03-31T15:59:25.120623Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 139, pathId: [OwnerId: 72057594046678944, LocalPathId: 16], version: 18446744073709551615 2025-03-31T15:59:25.120655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 4 2025-03-31T15:59:25.122371Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 139 2025-03-31T15:59:25.122478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 139 2025-03-31T15:59:25.122510Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 139 2025-03-31T15:59:25.122540Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 139, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-31T15:59:25.122572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-31T15:59:25.122656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 139, subscribers: 0 2025-03-31T15:59:25.123837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-03-31T15:59:25.123878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:15 hive 72057594037968897 at ss 72057594046678944 2025-03-31T15:59:25.123895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:14 hive 72057594037968897 at ss 72057594046678944 2025-03-31T15:59:25.123931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-03-31T15:59:25.123954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:16 hive 72057594037968897 at ss 72057594046678944 2025-03-31T15:59:25.124533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 139 2025-03-31T15:59:25.125322Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-03-31T15:59:25.125598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:25.125831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T15:59:25.126635Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 15 TxId_Deprecated: 15 TabletID: 72075186233409556 2025-03-31T15:59:25.127874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 15 ShardOwnerId: 72057594046678944 ShardLocalIdx: 15, at schemeshard: 72057594046678944 2025-03-31T15:59:25.128131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 3 Forgetting tablet 72075186233409546 Forgetting tablet 72075186233409556 2025-03-31T15:59:25.129800Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 14 TxId_Deprecated: 14 TabletID: 72075186233409555 2025-03-31T15:59:25.131305Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-03-31T15:59:25.131682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 14 ShardOwnerId: 72057594046678944 ShardLocalIdx: 14, at schemeshard: 72057594046678944 2025-03-31T15:59:25.131922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 2 2025-03-31T15:59:25.132268Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 16 TxId_Deprecated: 16 TabletID: 72075186233409557 2025-03-31T15:59:25.133590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-31T15:59:25.133811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409555 2025-03-31T15:59:25.134972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 16 ShardOwnerId: 72057594046678944 ShardLocalIdx: 16, at schemeshard: 72057594046678944 2025-03-31T15:59:25.135199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 1 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409557 2025-03-31T15:59:25.136997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:59:25.137062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 16], at schemeshard: 72057594046678944 2025-03-31T15:59:25.137150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T15:59:25.138140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 139 2025-03-31T15:59:25.138442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:59:25.138496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T15:59:25.138639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T15:59:25.138952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 139 2025-03-31T15:59:25.139568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-31T15:59:25.139620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-03-31T15:59:25.139730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:15 2025-03-31T15:59:25.139756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:15 tabletId 72075186233409556 2025-03-31T15:59:25.142123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:14 2025-03-31T15:59:25.142166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:14 tabletId 72075186233409555 2025-03-31T15:59:25.142263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-31T15:59:25.142288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-03-31T15:59:25.142347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:16 2025-03-31T15:59:25.142383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:16 tabletId 72075186233409557 2025-03-31T15:59:25.144474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-03-31T15:59:25.144576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:59:25.144611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T15:59:25.144677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:25.144890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-31T15:59:25.146313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 139, wait until txId: 139 TestWaitNotification wait txId: 139 2025-03-31T15:59:25.147119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 139: send EvNotifyTxCompletion 2025-03-31T15:59:25.147183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 139 2025-03-31T15:59:25.148304Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 139, at schemeshard: 72057594046678944 2025-03-31T15:59:25.148408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 139: got EvNotifyTxCompletionResult 2025-03-31T15:59:25.148443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 139: satisfy waiter [1:2267:4041] TestWaitNotification: OK eventTxId 139 2025-03-31T15:59:25.149701Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:25.149864Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 182us result status StatusSuccess 2025-03-31T15:59:25.150167Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 5 ShardsInside: 0 ShardsLimit: 6 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 20 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |69.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:25.112890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:25.112983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:25.113026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:25.113069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:25.113119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:25.113164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:25.113238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:25.113347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:25.113679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:25.210025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:25.210098Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:25.215595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:25.215710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:25.215911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:25.223628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:25.223956Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:25.224722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:25.224998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:25.227360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:25.228675Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:25.228741Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:25.228937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:25.229007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:25.229060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:25.229236Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:25.237367Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:25.381947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:25.382203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:25.382438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:25.382643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:25.382706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:25.388033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:25.388199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:25.388390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:25.388443Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:25.388491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:25.388526Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:25.392970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:25.393048Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:25.393089Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:25.397578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:25.397671Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:25.397736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:25.397795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:25.401672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:25.410157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:25.410396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:25.411516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:25.411673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:25.411739Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:25.412037Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:25.412090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:25.412276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:25.412410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:25.420055Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:25.420117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:25.420331Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:25.420376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:25.420671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:25.420745Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:25.420853Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:25.420890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:25.420928Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:25.420960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:25.421001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:25.421044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:25.421105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:25.421141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:25.421221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:25.421276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:25.421332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:25.423540Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:25.423677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:25.423720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... TDropForceUnsafe TPropose, operationId: 101:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2025-03-31T15:59:25.545235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 2] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 101 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:25.545291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T15:59:25.545415Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 130 2025-03-31T15:59:25.545662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:25.545739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T15:59:25.547413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T15:59:25.548232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2025-03-31T15:59:25.549462Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:25.549502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:25.549626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T15:59:25.549755Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:25.549806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-03-31T15:59:25.549873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-03-31T15:59:25.550051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T15:59:25.550096Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 101:0 ProgressState 2025-03-31T15:59:25.550213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T15:59:25.550278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T15:59:25.550324Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T15:59:25.550359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T15:59:25.550401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-03-31T15:59:25.550442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T15:59:25.550479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-31T15:59:25.550513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-31T15:59:25.550590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T15:59:25.550629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-03-31T15:59:25.550665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-03-31T15:59:25.550704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-03-31T15:59:25.551505Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:25.551589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:25.551621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-03-31T15:59:25.551664Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-03-31T15:59:25.551714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:25.552281Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:25.552369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:25.552410Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-31T15:59:25.552443Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-31T15:59:25.552494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T15:59:25.552661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-03-31T15:59:25.553155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:59:25.553202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T15:59:25.553314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T15:59:25.553795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:59:25.553845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T15:59:25.553910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:25.557451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T15:59:25.557896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T15:59:25.559216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-31T15:59:25.559326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-31T15:59:25.559551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-31T15:59:25.559590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-31T15:59:25.560065Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-31T15:59:25.560166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T15:59:25.560203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:339:2330] TestWaitNotification: OK eventTxId 101 2025-03-31T15:59:25.560702Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:25.560893Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 232us result status StatusPathDoesNotExist 2025-03-31T15:59:25.561102Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-31T15:59:25.561680Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:25.561970Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 256us result status StatusSuccess 2025-03-31T15:59:25.562399Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::Delete [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenMkDir [GOOD] >> TSchemeShardSubDomainTest::CreateDropSolomon >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets >> TSchemeShardSubDomainTest::SchemeQuotas [GOOD] >> TNodeBrokerTest::ResolveScopeIdForServerless ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:25.507775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:25.507893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:25.507941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:25.507974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:25.508033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:25.508081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:25.508148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:25.508246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:25.508576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:25.607533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:25.607596Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:25.621011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:25.621134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:25.621331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:25.633684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:25.633953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:25.634717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:25.634964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:25.637275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:25.638545Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:25.638631Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:25.638845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:25.638897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:25.638943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:25.639186Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:25.647220Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:25.793055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:25.793306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:25.793579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:25.793858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:25.793924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:25.796474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:25.796631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:25.796889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:25.796951Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:25.796996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:25.797031Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:25.799419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:25.799500Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:25.799542Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:25.801705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:25.801778Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:25.801851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:25.801915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:25.805780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:25.809098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:25.809338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:25.810354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:25.810500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:25.810573Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:25.810824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:25.810873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:25.811034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:25.811180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:25.813564Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:25.813609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:25.813774Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:25.813809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:25.814038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:25.814085Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:25.814187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:25.814222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:25.814256Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:25.814282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:25.814314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:25.814352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:25.814400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:25.814451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:25.814525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:25.814569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:25.814600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:25.816554Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:25.816672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:25.816712Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... O: TDropSubdomain TPropose operationId# 101:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2025-03-31T15:59:25.903222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 2] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 101 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:25.903261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T15:59:25.903433Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 130 2025-03-31T15:59:25.903558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:25.903615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T15:59:25.905274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T15:59:25.905975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2025-03-31T15:59:25.906949Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:25.906977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:25.908210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T15:59:25.908423Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:25.908473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-03-31T15:59:25.908520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-03-31T15:59:25.908708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T15:59:25.908750Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 101:0 ProgressState 2025-03-31T15:59:25.908803Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T15:59:25.908852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T15:59:25.908907Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T15:59:25.908940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T15:59:25.908978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-03-31T15:59:25.909017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T15:59:25.909063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-31T15:59:25.909109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-31T15:59:25.909196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T15:59:25.909269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-03-31T15:59:25.909320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-03-31T15:59:25.909359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-03-31T15:59:25.910146Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:25.910244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:25.910277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-03-31T15:59:25.910316Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-03-31T15:59:25.910370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:25.910937Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:25.911030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:25.911061Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-31T15:59:25.911105Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-31T15:59:25.911135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T15:59:25.911222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-03-31T15:59:25.911684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:59:25.911727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T15:59:25.911818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T15:59:25.912279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:59:25.912331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T15:59:25.912393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:25.916108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T15:59:25.916512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T15:59:25.917947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-31T15:59:25.918068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-31T15:59:25.918337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-31T15:59:25.918380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-31T15:59:25.918819Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-31T15:59:25.918911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T15:59:25.918960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:339:2330] TestWaitNotification: OK eventTxId 101 2025-03-31T15:59:25.919509Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:25.919763Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 272us result status StatusPathDoesNotExist 2025-03-31T15:59:25.919951Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-31T15:59:25.920536Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:25.920713Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 185us result status StatusSuccess 2025-03-31T15:59:25.921316Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeLimitsRejectsWithIndexedTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:24.675565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:24.675658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:24.675698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:24.675731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:24.675781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:24.675820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:24.675882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:24.676029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:24.676344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:24.761521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:24.761575Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:24.769100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:24.769212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:24.769413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:24.775888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:24.776169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:24.777007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:24.777246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:24.779395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:24.780606Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:24.780677Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:24.780869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:24.780917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:24.780956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:24.781126Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.788661Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:24.928790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:24.929046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.929317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:24.929590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:24.929664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.932791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:24.932934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:24.933142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.933196Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:24.933248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:24.933303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:24.935666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.935752Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:24.935797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:24.937977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.938035Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.938103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:24.938158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:24.941937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:24.945358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:24.945606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:24.946830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:24.947003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:24.947106Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:24.947385Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:24.947443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:24.947628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:24.947746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:24.951560Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:24.951613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:24.951794Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:24.951838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:24.952082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.952130Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:24.952232Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:24.952266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:24.952306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:24.952338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:24.952374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:24.952412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:24.952500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:24.952533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:24.952655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:24.952697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:24.952727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:24.954691Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:24.954811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:24.954854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... , at schemeshard: 72057594046678944, message: Source { RawX1: 603 RawX2: 4294969838 } Origin: 72075186233409548 State: 2 TxId: 107 Step: 0 Generation: 2 2025-03-31T15:59:25.747116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409548, partId: 0 2025-03-31T15:59:25.747251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: Source { RawX1: 603 RawX2: 4294969838 } Origin: 72075186233409548 State: 2 TxId: 107 Step: 0 Generation: 2 2025-03-31T15:59:25.747322Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 107:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-31T15:59:25.747430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 107:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 603 RawX2: 4294969838 } Origin: 72075186233409548 State: 2 TxId: 107 Step: 0 Generation: 2 2025-03-31T15:59:25.747501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 107:0, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:25.747549Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72057594046678944 2025-03-31T15:59:25.747583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 107:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-03-31T15:59:25.747622Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 129 -> 240 2025-03-31T15:59:25.752197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 606 RawX2: 4294969839 } Origin: 72075186233409549 State: 2 TxId: 107 Step: 0 Generation: 2 2025-03-31T15:59:25.752268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409549, partId: 2 2025-03-31T15:59:25.752459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 107:2, at schemeshard: 72057594046678944, message: Source { RawX1: 606 RawX2: 4294969839 } Origin: 72075186233409549 State: 2 TxId: 107 Step: 0 Generation: 2 2025-03-31T15:59:25.752534Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 107:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-31T15:59:25.752624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 107:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 606 RawX2: 4294969839 } Origin: 72075186233409549 State: 2 TxId: 107 Step: 0 Generation: 2 2025-03-31T15:59:25.752679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 107:2, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:25.752715Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 107:2, at schemeshard: 72057594046678944 2025-03-31T15:59:25.752773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 107:2, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-03-31T15:59:25.752819Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:2 129 -> 240 2025-03-31T15:59:25.754540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-03-31T15:59:25.754647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-03-31T15:59:25.772763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-03-31T15:59:25.773015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-03-31T15:59:25.773123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-03-31T15:59:25.773276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:2, at schemeshard: 72057594046678944 2025-03-31T15:59:25.775858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-03-31T15:59:25.776091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-03-31T15:59:25.776142Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 107:0 ProgressState 2025-03-31T15:59:25.776273Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 2/3 2025-03-31T15:59:25.776312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 2/3 2025-03-31T15:59:25.776356Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 2/3 2025-03-31T15:59:25.776397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 2/3 2025-03-31T15:59:25.776464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 2/3, is published: true 2025-03-31T15:59:25.777023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:2, at schemeshard: 72057594046678944 2025-03-31T15:59:25.777392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:2, at schemeshard: 72057594046678944 2025-03-31T15:59:25.777441Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 107:2 ProgressState 2025-03-31T15:59:25.777518Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:2 progress is 3/3 2025-03-31T15:59:25.777551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 3/3 2025-03-31T15:59:25.777596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:2 progress is 3/3 2025-03-31T15:59:25.777637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 3/3 2025-03-31T15:59:25.777673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 3/3, is published: true 2025-03-31T15:59:25.777760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:478:2426] message: TxId: 107 2025-03-31T15:59:25.777811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 3/3 2025-03-31T15:59:25.777888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2025-03-31T15:59:25.777940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2025-03-31T15:59:25.778069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-03-31T15:59:25.778111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:1 2025-03-31T15:59:25.778133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:1 2025-03-31T15:59:25.778164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-03-31T15:59:25.778184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:2 2025-03-31T15:59:25.778217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:2 2025-03-31T15:59:25.778270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-03-31T15:59:25.782002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-03-31T15:59:25.782109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:534:2482] TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2025-03-31T15:59:25.786435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "Table7" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value0" Type: "Utf8" } Columns { Name: "Value1" Type: "Utf8" } Columns { Name: "Value2" Type: "Utf8" } Columns { Name: "Value3" Type: "Utf8" } Columns { Name: "Value4" Type: "Utf8" } KeyColumnNames: "RowId" } IndexDescription { Name: "UserDefinedIndexByValue0" KeyColumnNames: "Value0" } IndexDescription { Name: "UserDefinedIndexByValue1" KeyColumnNames: "Value1" } IndexDescription { Name: "UserDefinedIndexByValue2" KeyColumnNames: "Value2" } IndexDescription { Name: "UserDefinedIndexByValue3" KeyColumnNames: "Value3" } IndexDescription { Name: "UserDefinedIndexByValue4" KeyColumnNames: "Value4" } } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:25.787062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTableIndex construct operation table path: /MyRoot/USER_0/Table7 domain path id: [OwnerId: 72057594046678944, LocalPathId: 2] domain path: /MyRoot/USER_0 shardsToCreate: 6 GetShardsInside: 4 MaxShards: 7 2025-03-31T15:59:25.787192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 108:0, explain: indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5, at schemeshard: 72057594046678944 2025-03-31T15:59:25.787239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusResourceExhausted, reason: indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5, at schemeshard: 72057594046678944 2025-03-31T15:59:25.790521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusResourceExhausted Reason: "indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5" TxId: 108 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:25.790724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot/USER_0, subject: , status: StatusResourceExhausted, reason: indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/USER_0/Table7 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2025-03-31T15:59:25.791267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2025-03-31T15:59:25.791335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2025-03-31T15:59:25.791846Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2025-03-31T15:59:25.791969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-03-31T15:59:25.792026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [1:732:2647] TestWaitNotification: OK eventTxId 108 |69.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::ListNodesCacheWhenNoChanges |69.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |69.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |69.3%| [TA] {RESULT} $(B)/ydb/core/backup/impl/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |69.3%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_upload_rows/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSubDomainTest::DeleteAndRestart [GOOD] >> TTenantPoolTests::TestForcedSensorLabelsForStaticConfig >> TSchemeShardSubDomainTest::CreateForceDropSolomon [GOOD] |69.3%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/test-results/unittest/{meta.json ... results_accumulator.log} |69.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:20.733602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:20.733715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:20.733757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:20.733793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:20.733836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:20.733881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:20.733945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:20.734037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:20.734333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:20.824997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:20.825067Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:20.830115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:20.830246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:20.830449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:20.835696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:20.835977Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:20.836672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:20.836921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:20.839065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:20.840308Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:20.840376Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:20.840597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:20.840644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:20.840683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:20.840866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.849541Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:20.981316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:20.981589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.981812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:20.982085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:20.982143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.985250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:20.985412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:20.985611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.985690Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:20.985741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:20.985776Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:20.987927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.988011Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:20.988049Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:20.989939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.989983Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.990051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:20.990109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:20.999654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:21.001923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:21.002115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:21.003241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:21.003393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:21.003469Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:21.003730Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:21.003786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:21.003960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:21.004071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:21.006444Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:21.006488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:21.006710Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:21.006752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:21.007009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:21.007062Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:21.007192Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:21.007229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:21.007267Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:21.007296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:21.007341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:21.007385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:21.007442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:21.007472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:21.007549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:21.007584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:21.007612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:21.009557Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:21.009674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:21.009704Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... ed, tabletId: 72075186233409546, at schemeshard: 72075186233409546, message: Source { RawX1: 520 RawX2: 4294969763 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2025-03-31T15:59:26.095393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409549, partId: 0 2025-03-31T15:59:26.095608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72075186233409546, message: Source { RawX1: 520 RawX2: 4294969763 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2025-03-31T15:59:26.095676Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72075186233409546 2025-03-31T15:59:26.096185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-03-31T15:59:26.096247Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 ProgressState, operation type: TxDropTable, at tablet# 72075186233409546 2025-03-31T15:59:26.096302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 104:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-03-31T15:59:26.096362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 104, done: 0, blocked: 1 2025-03-31T15:59:26.096503Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 104 Name: RenamePathBarrier }, at tablet# 72075186233409546 2025-03-31T15:59:26.096647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 137 -> 129 2025-03-31T15:59:26.096794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 4 2025-03-31T15:59:26.096856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-03-31T15:59:26.099598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-03-31T15:59:26.103150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-03-31T15:59:26.103462Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-03-31T15:59:26.103547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-03-31T15:59:26.103726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 2] 2025-03-31T15:59:26.103898Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-03-31T15:59:26.103948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:445:2397], at schemeshard: 72075186233409546, txId: 104, path id: 1 2025-03-31T15:59:26.104016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:445:2397], at schemeshard: 72075186233409546, txId: 104, path id: 2 2025-03-31T15:59:26.104094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-03-31T15:59:26.104147Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2025-03-31T15:59:26.104264Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2025-03-31T15:59:26.104305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2025-03-31T15:59:26.104352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 240 2025-03-31T15:59:26.106513Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409546, cookie: 104 2025-03-31T15:59:26.106690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409546, cookie: 104 2025-03-31T15:59:26.106738Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-03-31T15:59:26.106783Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 9 2025-03-31T15:59:26.106825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 5 2025-03-31T15:59:26.107997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-03-31T15:59:26.108109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-03-31T15:59:26.108145Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-03-31T15:59:26.108177Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 18446744073709551615 2025-03-31T15:59:26.108249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 4 2025-03-31T15:59:26.108333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-03-31T15:59:26.110984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-03-31T15:59:26.111054Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2025-03-31T15:59:26.111498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-03-31T15:59:26.111695Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-31T15:59:26.111752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T15:59:26.111842Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-31T15:59:26.111883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T15:59:26.111942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-03-31T15:59:26.112010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:550:2489] message: TxId: 104 2025-03-31T15:59:26.112053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T15:59:26.112089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-03-31T15:59:26.112122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-03-31T15:59:26.112229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 2 2025-03-31T15:59:26.112741Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-03-31T15:59:26.112781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-03-31T15:59:26.113373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-03-31T15:59:26.115395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-03-31T15:59:26.115593Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-03-31T15:59:26.115641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:445:2397], at schemeshard: 72075186233409546, txId: 0, path id: 1 2025-03-31T15:59:26.115732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-03-31T15:59:26.115772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:752:2669] 2025-03-31T15:59:26.116595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2025-03-31T15:59:26.117783Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-03-31T15:59:26.118042Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 242us result status StatusSuccess 2025-03-31T15:59:26.118539Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 2 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "quoted_storage_pool" Kind: "quoted_storage_pool_kind" } StoragePools { Name: "unquoted_storage_pool" Kind: "unquoted_storage_pool_kind" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "unquoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "quoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { storage_quotas { unit_kind: "quoted_storage_pool_kind" data_size_hard_quota: 1 } } SecurityState { Audience: "/MyRoot/SomeDatabase" } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenMkDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:26.054763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:26.054884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:26.054929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:26.054973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:26.055031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:26.055107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:26.055188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:26.055290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:26.055671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:26.151480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:26.151545Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:26.156611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:26.156724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:26.156912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:26.164240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:26.164500Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:26.165137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:26.165387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:26.167406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:26.168576Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:26.168642Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:26.168827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:26.168884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:26.168924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:26.169081Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.177462Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:26.340697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:26.340926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.341156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:26.341400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:26.341459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.344737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:26.344910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:26.345143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.345199Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:26.345243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:26.345296Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:26.351993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.352090Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:26.352128Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:26.355049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.355135Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.355232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:26.355304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:26.358918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:26.361989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:26.362213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:26.363348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:26.363501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:26.363583Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:26.363837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:26.363891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:26.364083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:26.364220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:26.366767Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:26.366816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:26.366986Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:26.367042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:26.367318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.367365Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:26.367467Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:26.367520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:26.367560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:26.367591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:26.367625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:26.367665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:26.367715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:26.367748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:26.367831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:26.367866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:26.367896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:26.369954Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:26.370072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:26.370111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T15:59:26.447385Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:26.447425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T15:59:26.447577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-31T15:59:26.447659Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:26.447690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-03-31T15:59:26.447724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 3 FAKE_COORDINATOR: Erasing txId 101 2025-03-31T15:59:26.447931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.447971Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-31T15:59:26.448071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T15:59:26.448121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T15:59:26.448171Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T15:59:26.448201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T15:59:26.448249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-03-31T15:59:26.448294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T15:59:26.448324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-31T15:59:26.448354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-31T15:59:26.448416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T15:59:26.448451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-03-31T15:59:26.448484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-03-31T15:59:26.448513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-03-31T15:59:26.449214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:26.449336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:26.449374Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-03-31T15:59:26.449412Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-03-31T15:59:26.449447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T15:59:26.450394Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:26.450472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:26.450508Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-31T15:59:26.450539Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-03-31T15:59:26.450565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-31T15:59:26.450633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-03-31T15:59:26.453199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T15:59:26.454462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-31T15:59:26.454673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-31T15:59:26.454711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-31T15:59:26.455160Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-31T15:59:26.455257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T15:59:26.455290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:333:2324] TestWaitNotification: OK eventTxId 101 2025-03-31T15:59:26.455842Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:26.456025Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 223us result status StatusSuccess 2025-03-31T15:59:26.456441Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:26.456937Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:26.457086Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 146us result status StatusSuccess 2025-03-31T15:59:26.457423Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "MyDir" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:26.457855Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/MyDir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:26.457984Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/MyDir" took 142us result status StatusSuccess 2025-03-31T15:59:26.458228Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/MyDir" PathDescription { Self { Name: "MyDir" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::Delete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:25.775861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:25.775971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:25.776024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:25.776064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:25.776118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:25.776164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:25.776242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:25.776344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:25.776744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:25.869451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:25.869506Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:25.876226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:25.876333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:25.876552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:25.890025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:25.890314Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:25.890978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:25.891244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:25.893939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:25.895162Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:25.895218Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:25.895441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:25.895481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:25.895528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:25.895680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:25.908630Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:26.057270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:26.057491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.057720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:26.057953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:26.058005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.064179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:26.064345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:26.064543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.064593Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:26.064641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:26.064674Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:26.066968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.067039Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:26.067104Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:26.069277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.069326Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.069392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:26.069445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:26.073407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:26.075564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:26.075749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:26.076822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:26.076956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:26.077017Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:26.077315Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:26.077361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:26.077537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:26.077638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:26.080122Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:26.080186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:26.080369Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:26.080418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:26.080639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.080684Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:26.080776Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:26.080808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:26.080845Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:26.080873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:26.080905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:26.080941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:26.080987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:26.081042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:26.081109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:26.081145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:26.081176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:26.083306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:26.083425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:26.083457Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... TxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T15:59:26.246706Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-31T15:59:26.246727Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-31T15:59:26.246754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-31T15:59:26.246801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-03-31T15:59:26.260242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-03-31T15:59:26.260315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-03-31T15:59:26.260367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-03-31T15:59:26.261399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T15:59:26.265940Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2025-03-31T15:59:26.267306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:26.267745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T15:59:26.268473Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-03-31T15:59:26.268898Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409548 2025-03-31T15:59:26.269750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-03-31T15:59:26.270023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T15:59:26.270550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-31T15:59:26.270736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409547 2025-03-31T15:59:26.271737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T15:59:26.272106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:59:26.272165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T15:59:26.272284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T15:59:26.272681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:59:26.272727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T15:59:26.272806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:26.276927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-31T15:59:26.277008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-03-31T15:59:26.277213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-03-31T15:59:26.277267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-03-31T15:59:26.279576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-31T15:59:26.279661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-03-31T15:59:26.280522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-31T15:59:26.280638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-31T15:59:26.280899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-31T15:59:26.280957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-31T15:59:26.281436Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-31T15:59:26.281555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T15:59:26.281592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:491:2445] TestWaitNotification: OK eventTxId 101 2025-03-31T15:59:26.282090Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:26.282277Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 196us result status StatusPathDoesNotExist 2025-03-31T15:59:26.282488Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-31T15:59:26.283026Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:26.283224Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 197us result status StatusSuccess 2025-03-31T15:59:26.283584Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted 2025-03-31T15:59:26.284122Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-03-31T15:59:26.284246Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2025-03-31T15:59:26.284318Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 2025-03-31T15:59:26.284869Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:26.285069Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 195us result status StatusSuccess 2025-03-31T15:59:26.285444Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TTenantPoolTests::TestStateStatic >> TNodeBrokerTest::DoNotReuseDynnodeIdsBelowMinDynamicNodeId >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomain [GOOD] >> TLocalTests::TestRemoveTenantWhileResolving |69.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop [GOOD] >> TDynamicNameserverTest::ListNodesCacheWhenNoChanges [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-31T15:59:23.574833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:23.574916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:23.574971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:23.575003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:23.575066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:23.575110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:23.575168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:23.575247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:23.575598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:23.689335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:23.689402Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:23.703014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:23.703431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:23.703609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:23.723752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:23.724052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:23.724764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:23.725100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:23.729554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:23.730963Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:23.731044Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:23.731171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:23.731238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:23.731272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:23.731461Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:23.744373Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-31T15:59:24.039360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:24.039585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.039804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:24.040012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:24.040054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.044227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:24.044431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:24.044645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.044695Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:24.044743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:24.044782Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:24.067862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.067946Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:24.067986Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:24.074382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.074449Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.074512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:24.074577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:24.078480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:24.080828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:24.080988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:24.082039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:24.082161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:24.082231Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:24.082496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:24.082544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:24.082718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:24.082801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:24.092037Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:24.092101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:24.092304Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:24.092354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:24.092702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.092750Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:24.092861Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:24.092897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:24.092935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:24.092963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:24.092997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:24.093033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:24.093088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:24.093117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:24.093229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:24.093283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:24.093315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:24.100432Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:24.100577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:24.100616Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... bletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:26.363291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/USER_0/Table11, opId: 137:0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.363404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/USER_0/Table11, opId: 137:0, schema: Name: "Table11" Columns { Name: "key" Type: "Uint32" } Columns { Name: "Value" Type: "Utf8" } KeyColumnNames: "key", at schemeshard: 72057594046678944 2025-03-31T15:59:26.363860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: USER_0, child name: Table11, child id: [OwnerId: 72057594046678944, LocalPathId: 10], at schemeshard: 72057594046678944 2025-03-31T15:59:26.363932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 0 2025-03-31T15:59:26.363989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 1 2025-03-31T15:59:26.364076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new path created for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 2 2025-03-31T15:59:26.364125Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 137:0 1 -> 2 2025-03-31T15:59:26.364793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 137:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:26.364878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 137:0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.365023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 11 2025-03-31T15:59:26.365093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 3 2025-03-31T15:59:26.368007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 137, response: Status: StatusAccepted TxId: 137 SchemeshardId: 72057594046678944 PathId: 10, at schemeshard: 72057594046678944 2025-03-31T15:59:26.368223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 137, database: /MyRoot/USER_0, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /MyRoot/USER_0/Table11 2025-03-31T15:59:26.368505Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:26.368581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 137, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T15:59:26.368814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 137, path id: [OwnerId: 72057594046678944, LocalPathId: 10] 2025-03-31T15:59:26.368902Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:26.368960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:1043:2904], at schemeshard: 72057594046678944, txId: 137, path id: 2 2025-03-31T15:59:26.369012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:1043:2904], at schemeshard: 72057594046678944, txId: 137, path id: 10 2025-03-31T15:59:26.369440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 137:0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.369506Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 137:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046678944 2025-03-31T15:59:26.369805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 137:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046678944 OwnerIdx: 10 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 10 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } 2025-03-31T15:59:26.370699Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 18 PathOwnerId: 72057594046678944, cookie: 137 2025-03-31T15:59:26.370805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 18 PathOwnerId: 72057594046678944, cookie: 137 2025-03-31T15:59:26.370839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 137 2025-03-31T15:59:26.370898Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 137, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18 2025-03-31T15:59:26.370950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 12 2025-03-31T15:59:26.371738Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 10 Version: 1 PathOwnerId: 72057594046678944, cookie: 137 2025-03-31T15:59:26.371814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 10 Version: 1 PathOwnerId: 72057594046678944, cookie: 137 2025-03-31T15:59:26.371855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 137 2025-03-31T15:59:26.371897Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 137, pathId: [OwnerId: 72057594046678944, LocalPathId: 10], version: 1 2025-03-31T15:59:26.371935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 4 2025-03-31T15:59:26.372008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 137, ready parts: 0/1, is published: true 2025-03-31T15:59:26.375109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 137:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:10 msg type: 268697601 2025-03-31T15:59:26.375267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 137, partId: 0, tablet: 72057594037968897 2025-03-31T15:59:26.375316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 137, shardIdx: 72057594046678944:10, partId: 0 2025-03-31T15:59:26.375854Z node 1 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 10 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 10 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } 2025-03-31T15:59:26.376057Z node 1 :HIVE INFO: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 10, type DataShard, boot OK, tablet id 72075186233409555 2025-03-31T15:59:26.376984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2025-03-31T15:59:26.377038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 137, shardIdx: 72057594046678944:10, partId: 0 2025-03-31T15:59:26.377170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 137:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2025-03-31T15:59:26.377224Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 137:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2025-03-31T15:59:26.377295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 137:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2025-03-31T15:59:26.377387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 137:0 2 -> 3 2025-03-31T15:59:26.379598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 137 2025-03-31T15:59:26.379772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 137 2025-03-31T15:59:26.381585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 137:0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.381967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 137:0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.382030Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId# 137:0 ProgressState at tabletId# 72057594046678944 2025-03-31T15:59:26.382125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable TConfigureParts operationId# 137:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409555 seqNo: 4:5 2025-03-31T15:59:26.382581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable TConfigureParts operationId# 137:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409555 message: TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 993 RawX2: 4294970161 } TxBody: "\n\236\004\n\007Table11\020\n\032\r\n\003key\030\002 \001(\000@\000\032\020\n\005Value\030\200$ \002(\000@\000(\001:\262\003\022\253\003\010\200\200\200\002\020\254\002\030\364\003 \200\200\200\010(\0000\200\200\200 8\200\200\200\010@\2008H\000RX\010\000\020\000\030\010 \010(\200\200\200@0\377\377\377\377\0178\001B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen1P\nX\200\200\001`nh\000p\000Rb\010\001\020\200\200\200\024\030\005 \020(\200\200\200\200\0020\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen2P\nX\200\200\001`nh\200\200\200\004p\200\200\200\004Rc\010\002\020\200\200\200\310\001\030\005 \020(\200\200\200\200@0\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen3P\nX\200\200\001`nh\200\200\200(p\200\200\200(X\001`\005j$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionr\017compaction_gen0z\017compaction_gen0\202\001\004scan\210\001\200\200\200\010\220\001\364\003\230\0012\270\001\2008\300\001\006R\002\020\001J\026/MyRoot/USER_0/Table11\242\001\006\001\000\000\000\000\200\252\001\000\260\001\001\270\001\000\210\002\001\222\002\013\t\240\207\205\000\000\000\000\001\020\n:\004\010\004\020\005" TxId: 137 ExecLevel: 0 Flags: 0 SchemeShardId: 72057594046678944 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } SubDomainPathId: 2 2025-03-31T15:59:26.387281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 137:0 from tablet: 72057594046678944 to tablet: 72075186233409555 cookie: 72057594046678944:10 msg type: 269549568 2025-03-31T15:59:26.387495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 137, partId: 0, tablet: 72075186233409555 TestModificationResult got TxId: 137, wait until txId: 137 |69.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets [GOOD] >> TNodeBrokerTest::NodeNameExpiration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeleteAndRestart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:26.268351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:26.268456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:26.268502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:26.268538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:26.268580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:26.268625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:26.268678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:26.268771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:26.269145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:26.358166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:26.358227Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:26.362832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:26.362933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:26.363139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:26.367958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:26.368194Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:26.368901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:26.369112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:26.371015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:26.372159Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:26.372223Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:26.372380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:26.372426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:26.372464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:26.372598Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.380180Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:26.514866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:26.515132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.515386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:26.515617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:26.515683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.524622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:26.524764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:26.524974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.525033Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:26.525079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:26.525112Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:26.531012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.531110Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:26.531162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:26.537447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.537515Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.537580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:26.537638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:26.541480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:26.543852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:26.544107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:26.545237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:26.545404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:26.545474Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:26.545751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:26.545830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:26.546011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:26.546116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:26.548357Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:26.548428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:26.548603Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:26.548639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:26.548877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.548922Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:26.549010Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:26.549046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:26.549087Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:26.549117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:26.549153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:26.549190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:26.549262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:26.549298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:26.549375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:26.549417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:26.549454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:26.551425Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:26.551567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:26.551610Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 678944 is [1:553:2487] sender: [1:612:2058] recipient: [1:15:2062] 2025-03-31T15:59:26.883700Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-31T15:59:26.883822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T15:59:26.883859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:610:2531] TestWaitNotification: OK eventTxId 101 2025-03-31T15:59:26.884312Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:26.884453Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 165us result status StatusPathDoesNotExist 2025-03-31T15:59:26.884608Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-31T15:59:26.885518Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:553:2487] sender: [1:616:2058] recipient: [1:102:2137] Leader for TabletID 72057594046678944 is [1:553:2487] sender: [1:618:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:553:2487] sender: [1:620:2058] recipient: [1:619:2536] Leader for TabletID 72057594046678944 is [1:621:2537] sender: [1:622:2058] recipient: [1:619:2536] 2025-03-31T15:59:26.921827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:26.921942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:26.922011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:26.922050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:26.922086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:26.922118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:26.922182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:26.922283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:26.922614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:26.940738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:26.942100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:26.942285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:26.942512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:26.942549Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:26.942640Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:26.943378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:26.943493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.943591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.944059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.944159Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-03-31T15:59:26.944372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.944473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.944587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.944700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.944821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.945046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.945365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.945486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.945885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.945959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.946207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.946331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.946418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.946619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.946716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.946859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.947328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.947530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.947600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.947673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.969182Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:26.969298Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:26.969717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:26.969805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:26.969864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:26.970812Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:621:2537] sender: [1:680:2058] recipient: [1:15:2062] 2025-03-31T15:59:27.003546Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:27.003737Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 218us result status StatusPathDoesNotExist 2025-03-31T15:59:27.003887Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-31T15:59:27.004539Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:27.004727Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 172us result status StatusSuccess 2025-03-31T15:59:27.005053Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateForceDropSolomon [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:25.490012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:25.490114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:25.490159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:25.490193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:25.490237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:25.490280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:25.490365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:25.490453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:25.490800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:25.580845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:25.580896Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:25.585640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:25.585758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:25.586004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:25.590884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:25.591119Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:25.591875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:25.592203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:25.593892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:25.595042Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:25.595140Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:25.595316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:25.595367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:25.595401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:25.595567Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:25.603473Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:25.711537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:25.711733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:25.711920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:25.712107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:25.712157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:25.714206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:25.714331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:25.714515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:25.714573Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:25.714617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:25.714673Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:25.716398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:25.716455Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:25.716482Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:25.718164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:25.718201Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:25.718244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:25.718284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:25.720920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:25.722667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:25.722853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:25.723754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:25.723897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:25.723949Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:25.724159Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:25.724197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:25.724339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:25.724453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:25.726331Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:25.726373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:25.726526Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:25.726557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:25.726759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:25.726802Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:25.726878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:25.726904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:25.726940Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:25.726969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:25.727006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:25.727042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:25.727103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:25.727140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:25.727198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:25.727236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:25.727263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:25.729203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:25.729337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:25.729394Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... T_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:39 2025-03-31T15:59:26.932993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:39 tabletId 72075186233409584 2025-03-31T15:59:26.933076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:9 2025-03-31T15:59:26.933108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:9 tabletId 72075186233409554 2025-03-31T15:59:26.933213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:13 2025-03-31T15:59:26.933257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:13 tabletId 72075186233409558 2025-03-31T15:59:26.933356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:17 2025-03-31T15:59:26.933384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:17 tabletId 72075186233409562 2025-03-31T15:59:26.933464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:21 2025-03-31T15:59:26.933488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:21 tabletId 72075186233409566 2025-03-31T15:59:26.933587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:26 2025-03-31T15:59:26.933613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:26 tabletId 72075186233409571 2025-03-31T15:59:26.933655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:30 2025-03-31T15:59:26.933678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:30 tabletId 72075186233409575 2025-03-31T15:59:26.933810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:34 2025-03-31T15:59:26.933839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:34 tabletId 72075186233409579 2025-03-31T15:59:26.933921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-03-31T15:59:26.933951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-03-31T15:59:26.934040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:38 2025-03-31T15:59:26.934067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:38 tabletId 72075186233409583 2025-03-31T15:59:26.934116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:8 2025-03-31T15:59:26.934138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:8 tabletId 72075186233409553 2025-03-31T15:59:26.934190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:12 2025-03-31T15:59:26.934214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:12 tabletId 72075186233409557 2025-03-31T15:59:26.941499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2025-03-31T15:59:26.941564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186233409552 2025-03-31T15:59:26.941705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:16 2025-03-31T15:59:26.941733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:16 tabletId 72075186233409561 2025-03-31T15:59:26.942722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:25 2025-03-31T15:59:26.942768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:25 tabletId 72075186233409570 2025-03-31T15:59:26.943004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:20 2025-03-31T15:59:26.943034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:20 tabletId 72075186233409565 2025-03-31T15:59:26.943796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:29 2025-03-31T15:59:26.943845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:29 tabletId 72075186233409574 2025-03-31T15:59:26.948250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:33 2025-03-31T15:59:26.948309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:33 tabletId 72075186233409578 2025-03-31T15:59:26.948425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-31T15:59:26.948448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-03-31T15:59:26.948498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:37 2025-03-31T15:59:26.948547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:37 tabletId 72075186233409582 2025-03-31T15:59:26.950932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:42 2025-03-31T15:59:26.950984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:42 tabletId 72075186233409587 2025-03-31T15:59:26.955851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-03-31T15:59:26.955923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-03-31T15:59:26.956096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:11 2025-03-31T15:59:26.956145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:11 tabletId 72075186233409556 2025-03-31T15:59:26.956298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:15 2025-03-31T15:59:26.956321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:15 tabletId 72075186233409560 2025-03-31T15:59:26.956472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:19 2025-03-31T15:59:26.956501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:19 tabletId 72075186233409564 2025-03-31T15:59:26.956595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:24 2025-03-31T15:59:26.956616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:24 tabletId 72075186233409569 2025-03-31T15:59:26.956691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:23 2025-03-31T15:59:26.956714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:23 tabletId 72075186233409568 2025-03-31T15:59:26.956778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:28 2025-03-31T15:59:26.956804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:28 tabletId 72075186233409573 2025-03-31T15:59:26.957658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:32 2025-03-31T15:59:26.957698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:32 tabletId 72075186233409577 2025-03-31T15:59:26.957850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-31T15:59:26.957877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-03-31T15:59:26.957949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:36 2025-03-31T15:59:26.958008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:36 tabletId 72075186233409581 2025-03-31T15:59:26.958304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-03-31T15:59:26.958431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-31T15:59:26.958525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:59:26.958570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T15:59:26.958665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:26.964259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-03-31T15:59:26.964596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-03-31T15:59:26.964642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-03-31T15:59:26.965087Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-31T15:59:26.965195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-31T15:59:26.965229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:2055:3656] TestWaitNotification: OK eventTxId 103 2025-03-31T15:59:26.965806Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:26.965998Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/Solomon" took 216us result status StatusPathDoesNotExist 2025-03-31T15:59:26.966169Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-31T15:59:26.966826Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:26.966988Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 170us result status StatusPathDoesNotExist 2025-03-31T15:59:26.967170Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::CreateDropNbs [GOOD] >> TNodeBrokerTest::ResolveScopeIdForServerless [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithReadConflicts |69.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:26.322052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:26.322187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:26.322229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:26.322267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:26.322320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:26.322363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:26.322429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:26.322542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:26.322904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:26.419654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:26.419722Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:26.426738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:26.426889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:26.427126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:26.440690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:26.441015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:26.441806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:26.442058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:26.444529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:26.445818Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:26.445887Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:26.446059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:26.446104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:26.446158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:26.446315Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.454404Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:26.600625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:26.600888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.601136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:26.601408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:26.601462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.604093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:26.604288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:26.604512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.604603Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:26.604658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:26.604694Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:26.607511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.607597Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:26.607654Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:26.610543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.610614Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.610702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:26.610769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:26.614882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:26.617637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:26.617870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:26.619111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:26.619278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:26.619352Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:26.619664Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:26.619714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:26.619943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:26.620061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:26.622780Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:26.622838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:26.623052Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:26.623117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:26.623392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.623446Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:26.623555Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:26.623594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:26.623641Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:26.623677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:26.623734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:26.623783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:26.623837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:26.623871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:26.623954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:26.623997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:26.624029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:26.626260Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:26.626392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:26.626429Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ookie: 103 2025-03-31T15:59:27.206651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-31T15:59:27.209332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T15:59:27.209525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T15:59:27.209836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T15:59:27.209904Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-03-31T15:59:27.210001Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-31T15:59:27.210033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T15:59:27.210070Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-31T15:59:27.210097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T15:59:27.210137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-03-31T15:59:27.210229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:481:2437] message: TxId: 103 2025-03-31T15:59:27.210283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T15:59:27.210321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-03-31T15:59:27.210360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-03-31T15:59:27.210469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-03-31T15:59:27.217407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-31T15:59:27.217475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:482:2438] TestWaitNotification: OK eventTxId 103 2025-03-31T15:59:27.218100Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:27.218328Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 251us result status StatusSuccess 2025-03-31T15:59:27.218864Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "dir_0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:27.219551Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:27.221092Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 1.51ms result status StatusSuccess 2025-03-31T15:59:27.221585Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:27.222236Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:27.222436Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0" took 212us result status StatusSuccess 2025-03-31T15:59:27.222765Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0" PathDescription { Self { Name: "dir_0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "table_1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 200 ParentPathId: 4 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:27.223378Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0/table_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:27.223596Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0/table_1" took 187us result status StatusSuccess 2025-03-31T15:59:27.223887Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0/table_1" PathDescription { Self { Name: "table_1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 200 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_1" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::ListNodesCacheWhenNoChanges [GOOD] Test command err: 2025-03-31T15:59:27.432062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:27.432142Z node 1 :IMPORT WARN: Table profiles were not loaded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:26.545942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:26.546035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:26.546070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:26.546110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:26.546156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:26.546195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:26.546251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:26.546339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:26.546642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:26.621380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:26.621450Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:26.625917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:26.626028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:26.626248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:26.631458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:26.631663Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:26.632263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:26.632507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:26.634325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:26.635251Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:26.635333Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:26.635556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:26.635616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:26.635656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:26.635800Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.642111Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:26.777228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:26.777447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.777634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:26.777809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:26.777850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.784058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:26.784210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:26.784422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.784483Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:26.784529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:26.784568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:26.788128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.788225Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:26.788272Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:26.791323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.791392Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.791474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:26.791531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:26.795662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:26.798582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:26.798828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:26.800044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:26.800225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:26.800299Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:26.800599Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:26.800668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:26.800846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:26.800993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:26.806623Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:26.806685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:26.806900Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:26.806945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:26.807265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:26.807312Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:26.807412Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:26.807449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:26.807509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:26.807545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:26.807581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:26.807628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:26.807683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:26.807717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:26.807805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:26.807844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:26.807890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:26.810780Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:26.810914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:26.810950Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... : ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186233409551 2025-03-31T15:59:27.331183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T15:59:27.332071Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-03-31T15:59:27.332372Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-03-31T15:59:27.333012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-03-31T15:59:27.333291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 Forgetting tablet 72075186233409548 2025-03-31T15:59:27.334943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-03-31T15:59:27.336066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409551 2025-03-31T15:59:27.337581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-31T15:59:27.337809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T15:59:27.338424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-03-31T15:59:27.338630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409549 2025-03-31T15:59:27.340699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T15:59:27.341271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:59:27.341324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T15:59:27.341477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T15:59:27.344730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-03-31T15:59:27.344790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-03-31T15:59:27.345052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2025-03-31T15:59:27.345085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186233409552 2025-03-31T15:59:27.345261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-31T15:59:27.345290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-03-31T15:59:27.345350Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186233409552, at schemeshard: 72057594046678944 2025-03-31T15:59:27.352730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-03-31T15:59:27.352907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:59:27.352975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T15:59:27.353064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:27.353291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-03-31T15:59:27.353349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-03-31T15:59:27.353451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-03-31T15:59:27.353476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-03-31T15:59:27.353557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-31T15:59:27.353582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-03-31T15:59:27.353731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-03-31T15:59:27.353774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-03-31T15:59:27.355808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-31T15:59:27.357766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2025-03-31T15:59:27.358063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-31T15:59:27.358105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-03-31T15:59:27.358222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-31T15:59:27.358245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-31T15:59:27.358747Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-31T15:59:27.358867Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-31T15:59:27.358936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T15:59:27.358977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:718:2609] 2025-03-31T15:59:27.359141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T15:59:27.359193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:718:2609] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2025-03-31T15:59:27.359747Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:27.359937Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 233us result status StatusPathDoesNotExist 2025-03-31T15:59:27.360138Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-31T15:59:27.360622Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:27.360825Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 170us result status StatusPathDoesNotExist 2025-03-31T15:59:27.360981Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/table_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/table_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-31T15:59:27.361445Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:27.361620Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 183us result status StatusSuccess 2025-03-31T15:59:27.362032Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TTenantPoolTests::TestStateStatic [GOOD] >> TNodeBrokerTest::ExtendLeasePipelining |69.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TLocalTests::TestRemoveTenantWhileResolving [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-31T15:59:27.339726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:27.339819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:27.339864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:27.339896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:27.339952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:27.339982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:27.340032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:27.340126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:27.340474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:27.430732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:27.430795Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:27.439162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:27.439516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:27.439692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:27.460154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:27.460539Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:27.461277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:27.461690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:27.464330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:27.465653Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:27.465710Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:27.465806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:27.465864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:27.465904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:27.466122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:27.473460Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-31T15:59:27.614971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:27.615269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:27.615501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:27.615772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:27.615838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:27.622858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:27.623046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:27.623312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:27.623395Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:27.623445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:27.623476Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:27.629546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:27.629638Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:27.629699Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:27.631883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:27.631960Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:27.632030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:27.632082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:27.644449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:27.646658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:27.646835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:27.647960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:27.648112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:27.648184Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:27.648478Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:27.648538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:27.648713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:27.648799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:27.650982Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:27.651041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:27.651248Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:27.651306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:27.651722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:27.651768Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:27.651869Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:27.651905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:27.651945Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:27.651982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:27.652022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:27.652060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:27.652115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:27.652152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:27.652219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:27.652261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:27.652295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:27.654230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:27.654358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:27.654399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-31T15:59:27.654465Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-31T15:59:27.654510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:27.654639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-31T15:59:27.657790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-31T15:59:27.658335Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 100 2025-03-31T15:59:27.662005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Name: "USER_1" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 100 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:27.662274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /MyRoot/USER_1, opId: 100:0, at schemeshard: 72057594046678944 2025-03-31T15:59:27.662394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 100:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with coordinators, but no mediators, at schemeshard: 72057594046678944 2025-03-31T15:59:27.662792Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Bootstrap 2025-03-31T15:59:27.683659Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Become StateWork (SchemeCache [1:274:2265]) 2025-03-31T15:59:27.685006Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T15:59:27.687586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 100, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: cant create subdomain with coordinators, but no mediators" TxId: 100 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:27.687785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 100, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with coordinators, but no mediators, operation: CREATE DATABASE, path: /MyRoot/USER_1 2025-03-31T15:59:27.688371Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 100, wait until txId: 100 TestModificationResults wait txId: 101 2025-03-31T15:59:27.693486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { PlanResolution: 50 Mediators: 1 Name: "USER_2" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:27.693793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /MyRoot/USER_2, opId: 101:0, at schemeshard: 72057594046678944 2025-03-31T15:59:27.693910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with mediators, but no coordinators, at schemeshard: 72057594046678944 2025-03-31T15:59:27.698616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: cant create subdomain with mediators, but no coordinators" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:27.698864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with mediators, but no coordinators, operation: CREATE DATABASE, path: /MyRoot/USER_2 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2025-03-31T15:59:27.699199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-03-31T15:59:27.699242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-03-31T15:59:27.699342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-31T15:59:27.699364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-31T15:59:27.699828Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-03-31T15:59:27.699961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-03-31T15:59:27.699998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:288:2279] 2025-03-31T15:59:27.700171Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-31T15:59:27.700261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T15:59:27.700284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:288:2279] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-03-31T15:59:27.700724Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:27.700902Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 192us result status StatusPathDoesNotExist 2025-03-31T15:59:27.701160Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-31T15:59:27.701714Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:27.701922Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_2" took 211us result status StatusPathDoesNotExist 2025-03-31T15:59:27.702082Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_2\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_2" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-31T15:59:27.702596Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:27.702789Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 163us result status StatusSuccess 2025-03-31T15:59:27.703276Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ResolveScopeIdForServerless [GOOD] Test command err: 2025-03-31T15:59:27.533650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:27.533739Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:27.618564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 2025-03-31T15:59:27.662815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000002 FAKE_COORDINATOR: Erasing txId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateDropNbs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:26.999357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:26.999447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:26.999489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:26.999525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:26.999568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:26.999615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:26.999733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:26.999828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:27.000149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:27.102895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:27.102975Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:27.121661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:27.121794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:27.122012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:27.128639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:27.128952Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:27.129805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:27.130052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:27.132304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:27.133627Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:27.133702Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:27.133892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:27.133943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:27.133987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:27.134151Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:27.142598Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:27.311320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:27.311566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:27.311820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:27.312055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:27.312115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:27.314854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:27.315018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:27.315249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:27.315314Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:27.315366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:27.315403Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:27.317544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:27.317645Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:27.317688Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:27.319571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:27.319623Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:27.319706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:27.319786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:27.323686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:27.326113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:27.326336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:27.327518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:27.327678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:27.327804Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:27.328099Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:27.328154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:27.328352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:27.328484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:27.332500Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:27.332562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:27.332798Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:27.332840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:27.333104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:27.333155Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:27.333277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:27.333314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:27.333354Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:27.333384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:27.333422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:27.333465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:27.333519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:27.333558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:27.333641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:27.333676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:27.333707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:27.335878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:27.336012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:27.336071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 2025-03-31T15:59:27.604791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T15:59:27.604830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-03-31T15:59:27.604874Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-03-31T15:59:27.604910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-31T15:59:27.605770Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T15:59:27.605840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T15:59:27.605864Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-03-31T15:59:27.605888Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-31T15:59:27.605913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-31T15:59:27.605990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-03-31T15:59:27.607570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-03-31T15:59:27.607622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-03-31T15:59:27.607658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-03-31T15:59:27.607686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-03-31T15:59:27.609515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T15:59:27.610220Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2025-03-31T15:59:27.611415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:27.611703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T15:59:27.612286Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-03-31T15:59:27.612443Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-03-31T15:59:27.612592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-03-31T15:59:27.612822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186233409548 2025-03-31T15:59:27.613623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-31T15:59:27.613787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409547 2025-03-31T15:59:27.614260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T15:59:27.614420Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-03-31T15:59:27.614605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 Forgetting tablet 72075186233409549 2025-03-31T15:59:27.615342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-03-31T15:59:27.615499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-31T15:59:27.615803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:59:27.615849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-31T15:59:27.615925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T15:59:27.616104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:59:27.616159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T15:59:27.616269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T15:59:27.618775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-31T15:59:27.618838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-03-31T15:59:27.618952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-03-31T15:59:27.618986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-03-31T15:59:27.619549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-31T15:59:27.619587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-03-31T15:59:27.629013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-03-31T15:59:27.629095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-03-31T15:59:27.629435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-03-31T15:59:27.629527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-31T15:59:27.629579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:59:27.629671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T15:59:27.629754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:27.636660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-31T15:59:27.636934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-31T15:59:27.636987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-31T15:59:27.637484Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-31T15:59:27.637576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T15:59:27.637634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:534:2489] TestWaitNotification: OK eventTxId 102 2025-03-31T15:59:27.652472Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:27.652666Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/BSVolume" took 237us result status StatusPathDoesNotExist 2025-03-31T15:59:27.652833Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-31T15:59:27.653548Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:27.653690Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 153us result status StatusPathDoesNotExist 2025-03-31T15:59:27.653855Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |69.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsage [GOOD] >> TEnumerationTest::TestPublish [GOOD] >> TTenantPoolTests::TestForcedSensorLabelsForStaticConfig [GOOD] |69.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |69.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestStateStatic [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead-UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead+UseSink >> TNodeBrokerTest::LoadStateMoveEpoch >> TNodeBrokerTest::NoEffectBeforeCommit |69.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |69.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TLocalTests::TestRemoveTenantWhileResolving [GOOD] |69.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |69.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TEnumerationTest::TestPublish [GOOD] >> TSchemeShardSubDomainTest::CreateDropSolomon [GOOD] |69.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsage [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:15.529517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:15.529608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:15.529644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:15.529681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:15.529731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:15.529769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:15.529815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:15.529897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:15.530158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:15.622537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:15.622610Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:15.636835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:15.636948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:15.637163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:15.656326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:15.656620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:15.657293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:15.657574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:15.664058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:15.665411Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:15.665485Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:15.665662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:15.665707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:15.665746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:15.665899Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:15.679311Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:15.814731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:15.814969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:15.815248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:15.815473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:15.815524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:15.820134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:15.820314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:15.820535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:15.820587Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:15.820631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:15.820662Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:15.823672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:15.823748Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:15.823789Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:15.827586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:15.827684Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:15.827757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:15.827826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:15.831409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:15.836262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:15.836484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:15.837696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:15.837875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:15.837954Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:15.838254Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:15.838308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:15.838485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:15.838607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:15.852689Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:15.852750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:15.852963Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:15.853010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:15.853307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:15.853373Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:15.853470Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:15.853524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:15.853565Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:15.853592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:15.853627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:15.853667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:15.853715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:15.853751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:15.853836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:15.853878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:15.853914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:15.856670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:15.856821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:15.856858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... teStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Table2" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1752 DataSize: 1752 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:28.344320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:483:2441] sender: [1:767:2058] recipient: [1:102:2137] Leader for TabletID 72057594046678944 is [1:483:2441] sender: [1:770:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:483:2441] sender: [1:771:2058] recipient: [1:769:2687] Leader for TabletID 72057594046678944 is [1:772:2688] sender: [1:773:2058] recipient: [1:769:2687] 2025-03-31T15:59:28.391400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:28.391521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:28.391591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:28.391642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:28.391693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:28.391724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:28.391795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:28.391910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:28.392290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:28.412555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:28.414325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:28.414536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:28.414647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:28.414695Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:28.415058Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:28.415940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 3, at schemeshard: 72057594046678944 2025-03-31T15:59:28.416033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: Table1, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T15:59:28.416091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: Table2, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-31T15:59:28.416195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:28.416279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:28.416773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 2, at schemeshard: 72057594046678944 2025-03-31T15:59:28.416896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-03-31T15:59:28.416958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-03-31T15:59:28.417044Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-03-31T15:59:28.417341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 4, at schemeshard: 72057594046678944 2025-03-31T15:59:28.417538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:28.417681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 3, at schemeshard: 72057594046678944 2025-03-31T15:59:28.417735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T15:59:28.417770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-31T15:59:28.417807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T15:59:28.417951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 3, at schemeshard: 72057594046678944 2025-03-31T15:59:28.418161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:28.418492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 9, at schemeshard: 72057594046678944 2025-03-31T15:59:28.418890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:28.419077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:28.419537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:28.419627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:28.419843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:28.419930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:28.420004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:28.420160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:28.420239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:28.420372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:28.420544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:28.420729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:28.420796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:28.420855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-03-31T15:59:28.430553Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:28.430662Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:28.431157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:28.431232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:28.431287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:28.431375Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:772:2688] sender: [1:830:2058] recipient: [1:15:2062] 2025-03-31T15:59:28.466728Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:28.467330Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 613us result status StatusSuccess 2025-03-31T15:59:28.467881Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Table2" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1752 DataSize: 1752 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TNodeBrokerTest::MinDynamicNodeIdShifted ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestForcedSensorLabelsForStaticConfig [GOOD] Test command err: 2025-03-31T15:59:28.080967Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-31T15:59:28.081563Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/0l5c/001510/r3tmp/tmprWUXey/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-31T15:59:28.082273Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/0l5c/001510/r3tmp/tmprWUXey/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/001510/r3tmp/tmprWUXey/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 16807964385776986328 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-31T15:59:28.088274Z node 3 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-31T15:59:28.088779Z node 3 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/0l5c/001510/r3tmp/tmprWUXey/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-31T15:59:28.089112Z node 3 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/0l5c/001510/r3tmp/tmprWUXey/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/001510/r3tmp/tmprWUXey/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 14679209913050311491 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_AutoscaleAwareSDK [GOOD] Test command err: 2025-03-31T15:55:41.465035Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002928443315782:2135];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:41.516638Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018fb/r3tmp/tmp1LL1Fw/pdisk_1.dat 2025-03-31T15:55:42.334474Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T15:55:43.125740Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:55:44.217996Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:44.241047Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:55:44.249722Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:55:44.249806Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:55:44.808088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:55:45.658570Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.108289s 2025-03-31T15:55:45.663693Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.113203s TServer::EnableGrpc on GrpcPort 14958, node 1 2025-03-31T15:55:47.531159Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002928443315782:2135];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:47.531544Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:55:47.531771Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/0018fb/r3tmp/yandexfpenLD.tmp 2025-03-31T15:55:47.531782Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/0018fb/r3tmp/yandexfpenLD.tmp 2025-03-31T15:55:47.531919Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/0018fb/r3tmp/yandexfpenLD.tmp 2025-03-31T15:55:47.532010Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:55:47.749599Z INFO: TTestServer started on Port 20694 GrpcPort 14958 TClient is connected to server localhost:20694 PQClient connected to localhost:14958 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:55:49.931899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:55:49.968986Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:55:49.997008Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T15:55:50.002349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-31T15:55:57.578735Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpen4y18m926e2r5nfjrsk4", Request deadline has expired for 1.663622s seconds 2025-03-31T15:55:57.693620Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488002997162793303:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:57.695149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:57.711345Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488002997162793327:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:57.749386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-31T15:55:57.943257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488002997162793329:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-31T15:55:59.068411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:55:59.068431Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:59.987251Z node 1 :TX_PROXY ERROR: Actor# [1:7488003001457760692:2485] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:56:00.795002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:56:01.030746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:56:01.133934Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488003005752728009:2371], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T15:56:01.216314Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWMzNGM2YWUtOTkyZjc2MWItNzFhYTMxNjctZjZmMTU5OGM=, ActorId: [1:7488002997162793285:2349], ActorState: ExecuteState, TraceId: 01jqpenb80fy24y0krmyff85dt, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T15:56:01.244502Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T15:56:02.000089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7488003022932597527:2702] === CheckClustersList. Ok 2025-03-31T15:56:09.356257Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-03-31T15:56:09.552016Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7488002937033250754:2208]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T15:56:09.552333Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T15:56:09.552705Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:7488002937033250754:2208], Recipient [1:7488002937033250754:2208]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T15:56:09.553303Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T15:56:09.694237Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-03-31T15:56:09.695313Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7488003048702401515:2814], Recipient [1:7488002937033250754:2208]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:56:09.695337Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:56:09.695347Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-31T15:56:09.695383Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7488003048702401511:2811], Recipient [1:7488002937033250754:2208]: {TEvModifySchemeTransaction txid# 281474976710672 TabletId# 72057594046644480} 2025-03-31T15:56:09.695393Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-31T15:56:10.158260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { ... 88505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003777283700027:2870], Partition 3, Sender [0:0:0], Recipient [5:7488003777283700119:2882], Cookie: 0 2025-03-31T15:59:01.435431Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003777283700119:2882]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:59:01.435457Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:59:01.435503Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:59:01.435577Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:59:01.435599Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:59:01.435627Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:59:01.439357Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003764398797687:2790], Partition 1, Sender [0:0:0], Recipient [5:7488003764398797788:2804], Cookie: 0 2025-03-31T15:59:01.439427Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003764398797788:2804]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:59:01.439449Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:59:01.439491Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:59:01.439561Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:59:01.439586Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:59:01.439614Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:59:01.439672Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003725744090663:2462], Partition 0, Sender [0:0:0], Recipient [5:7488003725744090725:2466], Cookie: 0 2025-03-31T15:59:01.439700Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003725744090725:2466]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:59:01.439711Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:59:01.439746Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:59:01.439773Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:59:01.439787Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:59:01.439803Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:59:01.439840Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003764398797686:2789], Partition 2, Sender [0:0:0], Recipient [5:7488003764398797785:2802], Cookie: 0 2025-03-31T15:59:01.439872Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003764398797785:2802]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:59:01.439885Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:59:01.439906Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:59:01.439933Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:59:01.439947Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:59:01.439962Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:59:01.507447Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003777283700019:2869], Partition 4, Sender [0:0:0], Recipient [5:7488003777283700114:2878], Cookie: 0 2025-03-31T15:59:01.507524Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003777283700114:2878]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:59:01.507548Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:59:01.507593Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:59:01.507680Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:59:01.507702Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:59:01.507730Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:59:01.537817Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003777283700027:2870], Partition 3, Sender [0:0:0], Recipient [5:7488003777283700119:2882], Cookie: 0 2025-03-31T15:59:01.537919Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003777283700119:2882]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:59:01.537954Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:59:01.538011Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:59:01.538103Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:59:01.538156Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:59:01.538191Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:59:01.543628Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003764398797687:2790], Partition 1, Sender [0:0:0], Recipient [5:7488003764398797788:2804], Cookie: 0 2025-03-31T15:59:01.543723Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003764398797788:2804]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:59:01.543755Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:59:01.543805Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:59:01.543889Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:59:01.543921Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:59:01.543951Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:59:01.544009Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003725744090663:2462], Partition 0, Sender [0:0:0], Recipient [5:7488003725744090725:2466], Cookie: 0 2025-03-31T15:59:01.544051Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003725744090725:2466]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:59:01.544065Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:59:01.544096Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:59:01.544138Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:59:01.544157Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:59:01.544177Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:59:01.544225Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003764398797686:2789], Partition 2, Sender [0:0:0], Recipient [5:7488003764398797785:2802], Cookie: 0 2025-03-31T15:59:01.544268Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003764398797785:2802]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:59:01.544284Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:59:01.544309Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:59:01.544344Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:59:01.544363Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:59:01.544385Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:59:01.564902Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 65538, Sender [0:0:0], Recipient [5:7488003764398797686:2789]: NActors::TEvents::TEvWakeup 2025-03-31T15:59:01.567344Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 65538, Sender [0:0:0], Recipient [5:7488003764398797687:2790]: NActors::TEvents::TEvWakeup 2025-03-31T15:59:01.611573Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003777283700019:2869], Partition 4, Sender [0:0:0], Recipient [5:7488003777283700114:2878], Cookie: 0 2025-03-31T15:59:01.611683Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003777283700114:2878]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:59:01.611728Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:59:01.611788Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:59:01.611876Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:59:01.611907Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:59:01.611943Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::ReadFromTimestamp_PQv1 [GOOD] Test command err: 2025-03-31T15:55:36.845019Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002908857027930:2143];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:36.854694Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001922/r3tmp/tmp6BcVyC/pdisk_1.dat 2025-03-31T15:55:37.191953Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T15:55:37.539714Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:37.580673Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:55:37.580783Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:55:37.584533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4915, node 1 2025-03-31T15:55:38.127678Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/001922/r3tmp/yandexFuCrir.tmp 2025-03-31T15:55:38.127700Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/001922/r3tmp/yandexFuCrir.tmp 2025-03-31T15:55:38.127862Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/001922/r3tmp/yandexFuCrir.tmp 2025-03-31T15:55:38.127976Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:55:38.726433Z INFO: TTestServer started on Port 15874 GrpcPort 4915 TClient is connected to server localhost:15874 PQClient connected to localhost:4915 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-03-31T15:55:41.847612Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002908857027930:2143];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:41.847790Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:55:42.020674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T15:55:42.152330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T15:55:42.456085Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:55:50.094680Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpemwhq8xp6vzsymztsnrv6", Request deadline has expired for 2.597571s seconds 2025-03-31T15:55:50.111369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488002968986570846:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:50.111497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:50.115659Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488002968986570873:2361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:55:50.118299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-31T15:55:50.135656Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488002968986570875:2362], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-31T15:55:50.236550Z node 1 :TX_PROXY ERROR: Actor# [1:7488002968986570926:2483] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:55:52.306526Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488002968986570946:2368], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T15:55:52.312194Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWYxMjJiNjMtMjYyY2M5ZS0yNmFlMDY0Ny0xYjFhODRiOA==, ActorId: [1:7488002968986570843:2356], ActorState: ExecuteState, TraceId: 01jqpen3yw6qxv6pwnb48mqzps, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T15:55:52.314432Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T15:55:52.318859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:55:52.471275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:55:52.523915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:55:52.524226Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:53.041460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7488002994756375076:2702] === CheckClustersList. Ok 2025-03-31T15:56:01.664328Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-03-31T15:56:01.671383Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7488002913151995554:2190]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T15:56:01.671419Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T15:56:01.671462Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:7488002913151995554:2190], Recipient [1:7488002913151995554:2190]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T15:56:01.671476Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T15:56:01.848683Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-03-31T15:56:01.852658Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7488003016231211761:2819], Recipient [1:7488002913151995554:2190]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:56:01.852681Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:56:01.852955Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-31T15:56:01.852987Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7488003016231211757:2816], Recipient [1:7488002913151995554:2190]: {TEvModifySchemeTransaction txid# 281474976710672 TabletId# 72057594046644480} 2025-03-31T15:56:01.852998Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-31T15:56:02.323630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710672 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-03-31T15:56:02.326336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topi ... 5:59:04.948922Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvProxyResponse 2025-03-31T15:59:04.948943Z node 5 :PERSQUEUE DEBUG: Answer ok topic: 'test-topic' partition: 2 messageNo: 0 requestId: cookie: 1 2025-03-31T15:59:04.949053Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_5_1_604601467397562758_v1 TopicId: Topic /Root/test-topic in database: Root, partition 2(assignId:2) initDone 1 event { Cookie: 1 } 2025-03-31T15:59:04.949096Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_5_1_604601467397562758_v1 TopicId: Topic /Root/test-topic in database: Root, partition 2(assignId:2) commit done to position 1 endOffset 1 with cookie 1 2025-03-31T15:59:04.949167Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_5_1_604601467397562758_v1 replying for commits: assignId# 2, from# 1, to# 1, offset# 1 2025-03-31T15:59:04.950039Z node 5 :PERSQUEUE TRACE: StateFunc, received event# 271450112, Sender [5:7488003790146335626:2875], Recipient [5:7488003661297313576:2077]: NKikimr::NPQ::TEvPqCache::TEvCacheL2Request 2025-03-31T15:59:04.950074Z node 5 :PERSQUEUE TRACE: StateFunc, processing event TEvPqCache::TEvCacheL2Request 2025-03-31T15:59:04.964158Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_5_1_604601467397562758_v1 grpc read done: success# 0, data# { } 2025-03-31T15:59:04.964201Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_5_1_604601467397562758_v1 grpc read failed 2025-03-31T15:59:04.964247Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_5_1_604601467397562758_v1 grpc closed 2025-03-31T15:59:04.964302Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_5_1_604601467397562758_v1 is DEAD 2025-03-31T15:59:04.964797Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][test-topic] pipe [5:7488003798736270586:2954] disconnected; active server actors: 1 2025-03-31T15:59:04.964823Z node 5 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][test-topic] pipe [5:7488003798736270586:2954] client test-consumer disconnected session test-consumer_5_1_604601467397562758_v1 2025-03-31T15:59:04.965036Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [5:7488003798736270590:3792], Recipient [5:7488003730016791787:2463]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-31T15:59:04.965062Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-31T15:59:04.965081Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-03-31T15:59:04.965102Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session test-consumer_5_1_604601467397562758_v1 2025-03-31T15:59:04.965135Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7488003798736270589:2957] destroyed 2025-03-31T15:59:04.965189Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_5_1_604601467397562758_v1 2025-03-31T15:59:04.966004Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [5:7488003798736270597:3794], Recipient [5:7488003790146335589:2871]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-31T15:59:04.966036Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-31T15:59:04.966053Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897] Handle TEvTabletPipe::TEvServerDisconnected 2025-03-31T15:59:04.966071Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037897] Destroy direct read session test-consumer_5_1_604601467397562758_v1 2025-03-31T15:59:04.966097Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037897] server disconnected, pipe [5:7488003798736270595:2959] destroyed 2025-03-31T15:59:04.966148Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [5:7488003798736270598:3795], Recipient [5:7488003790146335588:2870]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-31T15:59:04.966162Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-31T15:59:04.966177Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896] Handle TEvTabletPipe::TEvServerDisconnected 2025-03-31T15:59:04.966191Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Destroy direct read session test-consumer_5_1_604601467397562758_v1 2025-03-31T15:59:04.966211Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [5:7488003798736270596:2958] destroyed 2025-03-31T15:59:04.966248Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_5_1_604601467397562758_v1 2025-03-31T15:59:04.966269Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_5_1_604601467397562758_v1 2025-03-31T15:59:04.982448Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003790146335589:2871], Partition 1, Sender [0:0:0], Recipient [5:7488003790146335669:2879], Cookie: 0 2025-03-31T15:59:04.982536Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003790146335669:2879]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:59:04.982569Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:59:04.982622Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:59:04.982698Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:59:04.982729Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:59:04.982766Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:59:04.992149Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003730016791787:2463], Partition 0, Sender [0:0:0], Recipient [5:7488003730016791849:2467], Cookie: 0 2025-03-31T15:59:04.992237Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003730016791849:2467]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:59:04.992269Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:59:04.992318Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:59:04.992404Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:59:04.992432Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:59:04.992463Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:59:05.031730Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003790146335588:2870], Partition 2, Sender [0:0:0], Recipient [5:7488003790146335664:2877], Cookie: 0 2025-03-31T15:59:05.031770Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435094, Sender [0:0:0], Recipient [5:7488003665592281271:2169]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTopicStats 2025-03-31T15:59:05.031806Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTopicStats 2025-03-31T15:59:05.031817Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003790146335664:2877]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:59:05.031834Z node 5 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-03-31T15:59:05.031846Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-03-31T15:59:05.031848Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:59:05.031897Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:59:05.031913Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTopicStats on# 0.000000s, queue# 1 2025-03-31T15:59:05.031975Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:59:05.032003Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:59:05.032034Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:59:05.033078Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435094, Sender [0:0:0], Recipient [5:7488003665592281271:2169]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTopicStats 2025-03-31T15:59:05.033119Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTopicStats 2025-03-31T15:59:05.033142Z node 5 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-03-31T15:59:05.080170Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003790146335589:2871], Partition 1, Sender [0:0:0], Recipient [5:7488003790146335669:2879], Cookie: 0 2025-03-31T15:59:05.080240Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003790146335669:2879]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:59:05.080260Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:59:05.080304Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:59:05.080381Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:59:05.080418Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:59:05.080444Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T15:59:05.093476Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7488003730016791787:2463], Partition 0, Sender [0:0:0], Recipient [5:7488003730016791849:2467], Cookie: 0 2025-03-31T15:59:05.093570Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7488003730016791849:2467]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:59:05.093611Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T15:59:05.093670Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T15:59:05.093758Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T15:59:05.093785Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T15:59:05.093825Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TPersQueueMirrorer::ValidStartStream [GOOD] Test command err: 2025-03-31T15:55:41.699734Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002926930826725:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:41.699957Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001904/r3tmp/tmpfiCq51/pdisk_1.dat 2025-03-31T15:55:42.036615Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T15:55:42.356328Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:55:42.368498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:55:42.369171Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:55:42.390203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24685, node 1 2025-03-31T15:55:43.034860Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/001904/r3tmp/yandexSuvWWh.tmp 2025-03-31T15:55:43.034887Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/001904/r3tmp/yandexSuvWWh.tmp 2025-03-31T15:55:43.035056Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/001904/r3tmp/yandexSuvWWh.tmp 2025-03-31T15:55:43.035191Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:55:43.130987Z INFO: TTestServer started on Port 17764 GrpcPort 24685 TClient is connected to server localhost:17764 2025-03-31T15:55:46.727613Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002926930826725:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:55:46.728149Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; PQClient connected to localhost:24685 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:55:47.851605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T15:55:47.955808Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T15:55:48.066830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T15:55:49.623498Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:55:57.347601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:55:57.347631Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:56:00.461138Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpen3nn5rbnt7w7ty7adw6k", Request deadline has expired for 5.735033s seconds 2025-03-31T15:56:00.462160Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpen8n88m5z5s82jbw9xd5k", Request deadline has expired for 0.630071s seconds 2025-03-31T15:56:00.650055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003008535206223:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:00.650155Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:00.658496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003008535206253:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:00.726567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-31T15:56:01.324150Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488003008535206255:2368], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-31T15:56:01.392819Z node 1 :TX_PROXY ERROR: Actor# [1:7488003012830173618:2494] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:56:02.771504Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488003012830173633:2375], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T15:56:02.790981Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTk0NmM4MjktZWIxY2I1ZGYtMzllYmJjYzItOWE1OTY2Mzc=, ActorId: [1:7488003008535206221:2361], ActorState: ExecuteState, TraceId: 01jqpene2cb2zqef37fbzg0b9c, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T15:56:02.794029Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T15:56:02.804262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:56:02.913310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:56:03.066168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7488003021420108526:2679] === CheckClustersList. Ok 2025-03-31T15:56:08.430511Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-03-31T15:56:08.552000Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-03-31T15:56:08.556053Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7488003042894945189:2779], Recipient [1:7488002931225794438:2189]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:56:08.556099Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:56:08.556109Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-31T15:56:08.556146Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7488003042894945185:2776], Recipient [1:7488002931225794438:2189]: {TEvModifySchemeTransaction txid# 281474976710671 TabletId# 72057594046644480} 2025-03-31T15:56:08.556158Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-31T15:56:08.735678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 10 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710671 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-03-31T15:56:08.738718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T15:56:08.740427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-03-31T15:56:08.740472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-03-31T15:56:08.740744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard crea ... partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 10 WriteTimestampMS: 1743436739049 CreateTimestampMS: 1743436739048 SizeLag: 1340 WriteTimestampEstimateMS: 1743436739287 } Cookie: 18446744073709551615 } 2025-03-31T15:58:59.335144Z node 7 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_7_2_16832651762919123956_v1 INIT DONE TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 10 readOffset 0 committedOffset 0 2025-03-31T15:58:59.335206Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_16832651762919123956_v1 sending to client partition status 2025-03-31T15:58:59.339450Z :INFO: [] [] [33e7f0d0-5035aa18-8238466c-e589dae] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "/topic1". Partition: 0. Read offset: 5 2025-03-31T15:58:59.339942Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_16832651762919123956_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 read_offset: 5 } } 2025-03-31T15:58:59.340092Z node 7 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_7_2_16832651762919123956_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 5, commitOffset# (empty maybe) 2025-03-31T15:58:59.340141Z node 7 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_7_2_16832651762919123956_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 10 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 5 2025-03-31T15:58:59.340168Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_16832651762919123956_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 5 endOffset 10 2025-03-31T15:58:59.340222Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_16832651762919123956_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 5, endOffset# 10, WTime# 1743436739049, sizeLag# 1340 2025-03-31T15:58:59.340239Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_16832651762919123956_v1TEvPartitionReady. Aval parts: 1 2025-03-31T15:58:59.340280Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_16832651762919123956_v1 performing read request: guid# f4fab070-26ba4d0e-6bc65ff0-de8d70e7, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 6, size# 1608, partitionsAsked# 1, maxTimeLag# 0ms 2025-03-31T15:58:59.340358Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_16832651762919123956_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 6 maxSize 1608 maxTimeLagMs 0 readTimestampMs 0 readOffset 5 EndOffset 10 ClientCommitOffset 0 committedOffset 0 Guid f4fab070-26ba4d0e-6bc65ff0-de8d70e7 2025-03-31T15:58:59.340832Z node 8 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-03-31T15:58:59.340878Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 0 2025-03-31T15:58:59.340990Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 3 Topic 'rt3.dc1--topic1' partition 0 user user offset 5 count 6 size 1608 endOffset 10 max time lag 0ms effective offset 5 2025-03-31T15:58:59.341017Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 3 added 0 blobs, size 0 count 0 last offset 5, current partition end offset: 10 2025-03-31T15:58:59.341156Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 3. All data is from uncompacted head. 2025-03-31T15:58:59.341177Z node 8 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-31T15:58:59.341316Z node 8 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 5 2025-03-31T15:58:59.342350Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_16832651762919123956_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 10 Result { Offset: 5 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 6 WriteTimestampMS: 1743436739191 CreateTimestampMS: 1743436739166 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 6 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 7 WriteTimestampMS: 1743436739232 CreateTimestampMS: 1743436739166 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 7 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 8 WriteTimestampMS: 1743436739233 CreateTimestampMS: 1743436739166 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 8 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 9 WriteTimestampMS: 1743436739233 CreateTimestampMS: 1743436739166 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 9 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 10 WriteTimestampMS: 1743436739233 CreateTimestampMS: 1743436739166 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 18446744073709551581 RealReadOffset: 9 WaitQuotaTimeMs: 0 EndOffset: 10 StartOffset: 0 } Cookie: 5 } 2025-03-31T15:58:59.342593Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_16832651762919123956_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset10 2025-03-31T15:58:59.342638Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_16832651762919123956_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 10 ReadOffset 10 ReadGuid f4fab070-26ba4d0e-6bc65ff0-de8d70e7 has messages 1 2025-03-31T15:58:59.342754Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_16832651762919123956_v1 read done: guid# f4fab070-26ba4d0e-6bc65ff0-de8d70e7, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 673 2025-03-31T15:58:59.342779Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_16832651762919123956_v1 response to read: guid# f4fab070-26ba4d0e-6bc65ff0-de8d70e7 2025-03-31T15:58:59.342986Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_16832651762919123956_v1 Process answer. Aval parts: 0 2025-03-31T15:58:59.346888Z :DEBUG: [] [] [33e7f0d0-5035aa18-8238466c-e589dae] [] Got ReadResponse, serverBytesSize = 673, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428127 2025-03-31T15:58:59.347051Z :DEBUG: [] [] [33e7f0d0-5035aa18-8238466c-e589dae] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428127 2025-03-31T15:58:59.351235Z :DEBUG: [] Decompression task done. Partition/PartitionSessionId: 1 (5-9) 2025-03-31T15:58:59.351299Z :DEBUG: [] [] [33e7f0d0-5035aa18-8238466c-e589dae] [] Returning serverBytesSize = 673 to budget 2025-03-31T15:58:59.351340Z :DEBUG: [] [] [33e7f0d0-5035aa18-8238466c-e589dae] [] In ContinueReadingDataImpl, ReadSizeBudget = 673, ReadSizeServerDelta = 52428127 2025-03-31T15:58:59.351641Z :DEBUG: [] [] [33e7f0d0-5035aa18-8238466c-e589dae] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-03-31T15:58:59.355194Z :DEBUG: [] Take Data. Partition 0. Read: {0, 0} (5-5) 2025-03-31T15:58:59.355276Z :DEBUG: [] Take Data. Partition 0. Read: {1, 0} (6-6) 2025-03-31T15:58:59.355319Z :DEBUG: [] Take Data. Partition 0. Read: {2, 0} (7-7) 2025-03-31T15:58:59.355354Z :DEBUG: [] Take Data. Partition 0. Read: {2, 1} (8-8) 2025-03-31T15:58:59.355396Z :DEBUG: [] Take Data. Partition 0. Read: {2, 2} (9-9) 2025-03-31T15:58:59.355455Z :DEBUG: [] [] [33e7f0d0-5035aa18-8238466c-e589dae] [] The application data is transferred to the client. Number of messages 5, size 115 bytes 2025-03-31T15:58:59.355522Z :DEBUG: [] [] [33e7f0d0-5035aa18-8238466c-e589dae] [] Returning serverBytesSize = 0 to budget 2025-03-31T15:58:59.355692Z :INFO: [] [] [33e7f0d0-5035aa18-8238466c-e589dae] Closing read session. Close timeout: 0.000000s 2025-03-31T15:58:59.355743Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:9:0 2025-03-31T15:58:59.355793Z :INFO: [] [] [33e7f0d0-5035aa18-8238466c-e589dae] Counters: { Errors: 0 CurrentSessionLifetimeMs: 87 BytesRead: 115 MessagesRead: 5 BytesReadCompressed: 115 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T15:58:59.355911Z :NOTICE: [] [] [33e7f0d0-5035aa18-8238466c-e589dae] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-03-31T15:58:59.355959Z :DEBUG: [] [] [33e7f0d0-5035aa18-8238466c-e589dae] [] Abort session to cluster 2025-03-31T15:58:59.356454Z :NOTICE: [] [] [33e7f0d0-5035aa18-8238466c-e589dae] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-31T15:58:59.357154Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_16832651762919123956_v1 grpc read done: success# 1, data# { read_request { bytes_size: 673 } } 2025-03-31T15:58:59.357341Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_16832651762919123956_v1 got read request: guid# 41b548ae-94d295bc-f9ed0cd8-a6e5dfd 2025-03-31T15:58:59.364566Z :DEBUG: [] MessageGroupId [src-id-test] SessionId [src-id-test|a5deb071-8fabb301-2631ce-33d8e0e9_0] Write session: destroy 2025-03-31T15:58:59.370258Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_16832651762919123956_v1 grpc read done: success# 0, data# { } 2025-03-31T15:58:59.370292Z node 7 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_7_2_16832651762919123956_v1 grpc read failed 2025-03-31T15:58:59.370324Z node 7 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_7_2_16832651762919123956_v1 grpc closed 2025-03-31T15:58:59.370375Z node 7 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_7_2_16832651762919123956_v1 is DEAD 2025-03-31T15:58:59.371840Z node 7 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic1] pipe [7:7488003780855107325:2521] disconnected; active server actors: 1 2025-03-31T15:58:59.371897Z node 7 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--topic1] pipe [7:7488003780855107325:2521] client user disconnected session shared/user_7_2_16832651762919123956_v1 2025-03-31T15:58:59.375346Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_7_2_16832651762919123956_v1 2025-03-31T15:58:59.375407Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [7:7488003780855107328:2524] destroyed 2025-03-31T15:58:59.375469Z node 8 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_7_2_16832651762919123956_v1 2025-03-31T15:59:00.419223Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:59:00.419269Z node 7 :IMPORT WARN: Table profiles were not loaded >> DataShardSnapshots::LockedWriteCleanupOnCopyTable+UseSink [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnCopyTable-UseSink >> TDynamicNameserverTest::CacheMissSimpleDeadline |69.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateDropSolomon [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:27.190041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:27.190137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:27.190176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:27.190209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:27.190256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:27.190293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:27.190337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:27.190408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:27.190680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:27.302880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:27.302947Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:27.308278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:27.308393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:27.308609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:27.314871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:27.315137Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:27.315883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:27.316116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:27.317931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:27.319225Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:27.319296Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:27.319476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:27.319525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:27.319562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:27.319709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:27.326999Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:27.494250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:27.494490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:27.494723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:27.494973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:27.495032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:27.498872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:27.499047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:27.499295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:27.499353Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:27.499401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:27.499433Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:27.501709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:27.501784Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:27.501822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:27.503716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:27.503767Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:27.503835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:27.503897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:27.508204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:27.516342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:27.516615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:27.517846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:27.518016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:27.518093Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:27.518449Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:27.518505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:27.518705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:27.518828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:27.521835Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:27.521892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:27.522128Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:27.522175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:27.522456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:27.522503Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:27.522602Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:27.522635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:27.522676Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:27.522709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:27.522747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:27.522795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:27.522854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:27.522887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:27.522970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:27.523010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:27.523039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:27.525270Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:27.525407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:27.525458Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 857051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:28.857182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T15:59:28.857380Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:28.857438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 104, path id: 1 2025-03-31T15:59:28.857483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 104, path id: 2 2025-03-31T15:59:28.857830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-31T15:59:28.857886Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 104:0 ProgressState 2025-03-31T15:59:28.857982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-31T15:59:28.858016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T15:59:28.858055Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-31T15:59:28.858089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T15:59:28.858128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-03-31T15:59:28.858172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T15:59:28.858242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-03-31T15:59:28.858410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-03-31T15:59:28.858575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-31T15:59:28.858638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-03-31T15:59:28.858678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-03-31T15:59:28.858713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-03-31T15:59:28.859636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T15:59:28.859735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T15:59:28.859783Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-03-31T15:59:28.859827Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-03-31T15:59:28.859884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:28.860554Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T15:59:28.860638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T15:59:28.860672Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-03-31T15:59:28.860708Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-31T15:59:28.860737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T15:59:28.860801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-03-31T15:59:28.863156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-03-31T15:59:28.863213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-03-31T15:59:28.864629Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-03-31T15:59:28.880610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:28.880997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409546 2025-03-31T15:59:28.881705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-31T15:59:28.881861Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409547 2025-03-31T15:59:28.883212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-31T15:59:28.883492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T15:59:28.885721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:59:28.885814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T15:59:28.886072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T15:59:28.886732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:59:28.886799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T15:59:28.886877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:28.887784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-31T15:59:28.890045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-31T15:59:28.890102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-03-31T15:59:28.890292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-31T15:59:28.890356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-03-31T15:59:28.892081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-31T15:59:28.892206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-03-31T15:59:28.892614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-03-31T15:59:28.892664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-03-31T15:59:28.893240Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-03-31T15:59:28.893377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-03-31T15:59:28.893423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:2094:3699] TestWaitNotification: OK eventTxId 104 2025-03-31T15:59:28.902283Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:28.902574Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/Solomon" took 276us result status StatusPathDoesNotExist 2025-03-31T15:59:28.902783Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-31T15:59:28.903768Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:28.903934Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 174us result status StatusPathDoesNotExist 2025-03-31T15:59:28.904062Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TNodeBrokerTest::RegistrationPipeliningNodeName >> TNodeBrokerTest::TestRandomActions >> TNodeBrokerTest::UpdateEpochPipelining >> TDataShardLocksTest::MvccTestWriteBreaksLocks [GOOD] >> TDataShardLocksTest::Points_ManyTx >> TDataShardLocksTest::Points_OneTx [GOOD] >> TDataShardLocksTest::Points_ManyTx_RemoveAll >> TDataShardLocksTest::MvccTestOooTxDoesntBreakPrecedingReadersLocks [GOOD] >> TDataShardLocksTest::MvccTestOutdatedLocksRemove [GOOD] >> TDataShardLocksTest::MvccTestBreakEdge [GOOD] >> TDataShardLocksTest::MvccTestAlreadyBrokenLocks [GOOD] >> TNodeBrokerTest::NodeNameReuseRestartWithHostChanges >> TDynamicNameserverTest::CacheMissSimpleDeadline [GOOD] |69.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |69.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |69.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissSameDeadline >> TNodeBrokerTest::NodeNameWithDifferentTenants >> TDataShardLocksTest::Points_ManyTx [GOOD] >> TDataShardLocksTest::Points_ManyTx_BreakAll >> TDynamicNameserverTest::CacheMissPipeDisconnect |69.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |69.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDataShardLocksTest::Points_ManyTx_RemoveAll [GOOD] >> TDataShardLocksTest::UseLocksCache >> TSlotIndexesPoolTest::Init [GOOD] >> TNodeBrokerTest::TestListNodesEpochDeltas >> TSlotIndexesPoolTest::Basic [GOOD] |69.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::MvccTestAlreadyBrokenLocks [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissSimpleDeadline [GOOD] Test command err: 2025-03-31T15:59:30.191331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:30.191406Z node 1 :IMPORT WARN: Table profiles were not loaded ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... waiting for cache miss (done) |69.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |69.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |69.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> Yq_1::Basic_EmptyDict [GOOD] >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas [GOOD] >> TDataShardLocksTest::Points_ManyTx_BreakAll [GOOD] >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf |69.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissSameDeadline [GOOD] >> TNodeBrokerTest::NoEffectBeforeCommit [GOOD] |69.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Basic [GOOD] |69.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |69.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Init [GOOD] >> TDynamicNameserverTest::CacheMissPipeDisconnect [GOOD] >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf [GOOD] >> TNodeBrokerTest::ExtendLeasePipelining [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-51 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-52 >> TNodeBrokerTest::NodeNameExpiration [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:17.083475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:17.083590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:17.083638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:17.083689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:17.083740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:17.083773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:17.083832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:17.083930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:17.084271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:17.179551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:17.179617Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:17.188596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:17.188711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:17.188964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:17.200053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:17.200407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:17.201067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:17.201303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:17.203426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:17.204661Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:17.204741Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:17.204914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:17.204979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:17.205021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:17.205231Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.212666Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:17.333265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:17.333460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.333644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:17.333870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:17.333914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.335818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:17.335932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:17.336089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.336140Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:17.336178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:17.336206Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:17.337884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.337928Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:17.337954Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:17.339370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.339405Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.339458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:17.339516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:17.342304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:17.343915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:17.344062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:17.344891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:17.345013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:17.345076Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:17.345292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:17.345357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:17.345499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:17.345564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:17.347287Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:17.347324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:17.347481Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:17.347511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:17.347716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.347760Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:17.347833Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:17.347862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:17.347897Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:17.347920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:17.347960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:17.347990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:17.348032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:17.348053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:17.348104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:17.348146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:17.348169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:17.349599Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:17.349685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:17.349711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... : Source { RawX1: 520 RawX2: 4294969763 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2025-03-31T15:59:30.870890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409549, partId: 0 2025-03-31T15:59:30.871032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72075186233409546, message: Source { RawX1: 520 RawX2: 4294969763 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2025-03-31T15:59:30.871111Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72075186233409546 2025-03-31T15:59:30.871773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-03-31T15:59:30.871847Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 ProgressState, operation type: TxDropTable, at tablet# 72075186233409546 2025-03-31T15:59:30.871903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 104:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-03-31T15:59:30.871948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 104, done: 0, blocked: 1 2025-03-31T15:59:30.872045Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 104 Name: RenamePathBarrier }, at tablet# 72075186233409546 2025-03-31T15:59:30.872184Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 137 -> 129 2025-03-31T15:59:30.872334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 4 2025-03-31T15:59:30.872403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-03-31T15:59:30.877627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-03-31T15:59:30.879165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-03-31T15:59:30.879596Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-03-31T15:59:30.879643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-03-31T15:59:30.879797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 2] 2025-03-31T15:59:30.880009Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-03-31T15:59:30.880063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:445:2397], at schemeshard: 72075186233409546, txId: 104, path id: 1 2025-03-31T15:59:30.880109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:445:2397], at schemeshard: 72075186233409546, txId: 104, path id: 2 2025-03-31T15:59:30.880661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-03-31T15:59:30.880727Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2025-03-31T15:59:30.880831Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2025-03-31T15:59:30.880873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2025-03-31T15:59:30.880925Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 240 2025-03-31T15:59:30.881952Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72075186233409546, cookie: 104 2025-03-31T15:59:30.882041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72075186233409546, cookie: 104 2025-03-31T15:59:30.882076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-03-31T15:59:30.882110Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 11 2025-03-31T15:59:30.882148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 5 2025-03-31T15:59:30.883058Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-03-31T15:59:30.883172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-03-31T15:59:30.883204Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-03-31T15:59:30.883233Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 18446744073709551615 2025-03-31T15:59:30.883265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 4 2025-03-31T15:59:30.883331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-03-31T15:59:30.886456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-03-31T15:59:30.886516Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2025-03-31T15:59:30.886865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-03-31T15:59:30.887061Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-31T15:59:30.887132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T15:59:30.887173Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-31T15:59:30.887213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T15:59:30.887253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-03-31T15:59:30.887321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:550:2489] message: TxId: 104 2025-03-31T15:59:30.887365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T15:59:30.887408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-03-31T15:59:30.887438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-03-31T15:59:30.887536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 2 2025-03-31T15:59:30.888051Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-03-31T15:59:30.888083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-03-31T15:59:30.889033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-03-31T15:59:30.889154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-03-31T15:59:30.890348Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-03-31T15:59:30.890406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:445:2397], at schemeshard: 72075186233409546, txId: 0, path id: 1 2025-03-31T15:59:30.890817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-03-31T15:59:30.890858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1431:3340] 2025-03-31T15:59:30.891312Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2025-03-31T15:59:30.894901Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-03-31T15:59:30.895163Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 294us result status StatusSuccess 2025-03-31T15:59:30.895605Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 2 SubDomainStateVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { Audience: "/MyRoot/SomeDatabase" } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NoEffectBeforeCommit [GOOD] Test command err: 2025-03-31T15:59:29.819860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:29.819957Z node 1 :IMPORT WARN: Table profiles were not loaded ... waiting for commit ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ... waiting for commit (done) ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissSameDeadline [GOOD] Test command err: 2025-03-31T15:59:31.137828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:31.137914Z node 1 :IMPORT WARN: Table profiles were not loaded ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... waiting for cache miss (done) >> TNodeBrokerTest::RegistrationPipelining >> TLocalTests::TestAddTenant >> TTenantPoolTests::TestSensorsConfigForStaticSlot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissPipeDisconnect [GOOD] Test command err: 2025-03-31T15:59:31.457174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:31.457277Z node 1 :IMPORT WARN: Table profiles were not loaded ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... waiting for cache miss (done) >> TNodeBrokerTest::DoNotReuseDynnodeIdsBelowMinDynamicNodeId [GOOD] >> TNodeBrokerTest::LoadStateMoveEpoch [GOOD] |69.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf [GOOD] |69.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |69.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ExtendLeasePipelining [GOOD] Test command err: 2025-03-31T15:59:29.173795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:29.173880Z node 1 :IMPORT WARN: Table profiles were not loaded ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR >> TNodeBrokerTest::RegistrationPipeliningNodeName [GOOD] >> TNodeBrokerTest::MinDynamicNodeIdShifted [GOOD] |69.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameExpiration [GOOD] Test command err: 2025-03-31T15:59:28.624794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:28.624883Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:28.644967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 >> TDynamicNameserverTest::CacheMissNoDeadline |69.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Ranges [GOOD] >> TNodeBrokerTest::ExtendLeaseRestartRace |69.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::DoNotReuseDynnodeIdsBelowMinDynamicNodeId [GOOD] Test command err: 2025-03-31T15:59:28.139066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:28.139161Z node 1 :IMPORT WARN: Table profiles were not loaded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::RegistrationPipeliningNodeName [GOOD] Test command err: 2025-03-31T15:59:30.882358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:30.882462Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:30.905792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 2025-03-31T15:59:30.947733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000002 FAKE_COORDINATOR: Erasing txId 102 ... waiting for commit ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ... waiting for commit (done) ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::LoadStateMoveEpoch [GOOD] Test command err: 2025-03-31T15:59:29.835119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:29.835209Z node 1 :IMPORT WARN: Table profiles were not loaded >> TLocalTests::TestAddTenant [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::Basic_EmptyDict [GOOD] Test command err: 2025-03-31T15:58:11.607157Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003573097530620:2206];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:11.611860Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0331 15:58:12.512187085 40239 dns_resolver.cc:162] no server name supplied in dns URI E0331 15:58:12.512327335 40239 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-31T15:58:12.616471Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:31972: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:31972 } ] 2025-03-31T15:58:12.715263Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:13.703228Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:31972: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:31972 2025-03-31T15:58:13.791339Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:13.795618Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:31972: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:31972 } ] 2025-03-31T15:58:14.802502Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:15.257863Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:31972: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:31972 } ] 2025-03-31T15:58:15.806062Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:16.543793Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003573097530620:2206];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:16.543854Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:58:16.807452Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; E0331 15:58:17.523711017 40570 dns_resolver.cc:162] no server name supplied in dns URI E0331 15:58:17.523868794 40570 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-31T15:58:17.551491Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:17.627509Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:31972: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:31972 } ] 2025-03-31T15:58:17.643473Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:31972: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:31972 2025-03-31T15:58:17.807939Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:18.556993Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:18.811889Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:19.559577Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d06/r3tmp/tmpSKRuBF/pdisk_1.dat 2025-03-31T15:58:19.922568Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:20.359729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:20.359840Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:20.392356Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31972, node 1 2025-03-31T15:58:20.757328Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:20.764674Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:58:20.764693Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:58:20.764701Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:58:20.764857Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6405 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:58:21.174494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:21.983566Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-03-31T15:58:21.983611Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-03-31T15:58:21.983621Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-03-31T15:58:21.993072Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-03-31T15:58:21.993110Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-03-31T15:58:21.993119Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-03-31T15:58:22.003393Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-03-31T15:58:22.003413Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-03-31T15:58:22.003426Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-03-31T15:58:22.021765Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-03-31T15:58:22.021784Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-03-31T15:58:22.021791Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-03-31T15:58:22.033347Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T15:58:22.033480Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-03-31T15:58:22.033496Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-03-31T15:58:22.033504Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-03-31T15:58:22.035527Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-03-31T15:58:22.035542Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-03-31T15:58:22.035549Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-03-31T15:58:22.070467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T15:58:22.074081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T15:58:22.075398Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-03-31T15:58:22.075422Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-03-31T15:58:22.075428Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-03-31T15:58:22.076428Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-03-31T15:58:22.076441Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-03-31T15:58:22.076447Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-03-31T15:58:22.078307Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-03-31T15:58:22.078325Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-03-31T15:58:22.078332Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create tab ... ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.701402Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.701502Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.701569Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.701627Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.701685Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.701776Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.701839Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.701905Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.701966Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.702031Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.702088Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.702145Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.702201Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.702257Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.702318Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.702383Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.702439Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.702496Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.702553Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.702611Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.702670Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.702735Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.702846Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.702964Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.703036Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.703154Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.703262Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.703380Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.703511Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.703638Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.703747Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.703851Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.703964Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.704057Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.704147Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.704233Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.704333Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.704420Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.704512Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.704599Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.704687Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.704774Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.704904Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.704999Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.705106Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.705239Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.705350Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.705458Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.705545Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.705632Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.705719Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.705828Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.705921Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.706011Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.706098Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.706206Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.706327Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.706430Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.706512Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.706602Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.706687Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.706775Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.706865Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.706958Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.707045Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.707154Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.707283Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.707396Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.707498Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.707599Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.707698Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.707810Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.707913Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.708013Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.708112Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.708216Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.708354Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.708461Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.708563Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.708661Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.708763Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.708861Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.708998Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.709147Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.709271Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.709391Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.709489Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.709633Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.713630Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.713915Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.714033Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.714138Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.714282Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.714495Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.714663Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.714829Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.715016Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.715171Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.715335Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.715452Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.715552Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.715667Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.715765Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.715860Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.715953Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.716050Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.716186Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.716282Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.716384Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.716480Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.716608Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.716715Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.716809Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.716902Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.717000Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.717099Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.717194Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.717308Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.718219Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.718370Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.718471Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.718961Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.719395Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.719545Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.719665Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.719777Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.719879Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.720129Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.720293Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-31T15:59:29.720434Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: >> TNodeBrokerTest::NodeNameWithDifferentTenants [GOOD] >> TNodeBrokerTest::NodeNameReuseRestart >> TNodeBrokerTest::SingleDomainModeBannedIds >> TNodeBrokerTest::ConfigPipelining |69.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Ranges [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::MinDynamicNodeIdShifted [GOOD] Test command err: 2025-03-31T15:59:30.191712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:30.191789Z node 1 :IMPORT WARN: Table profiles were not loaded >> TDynamicNameserverTest::CacheMissNoDeadline [GOOD] >> TTenantPoolTests::TestSensorsConfigForStaticSlot [GOOD] |69.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TLocalTests::TestAddTenant [GOOD] Test command err: 2025-03-31T15:59:32.864947Z node 1 :LOCAL ERROR: TDomainLocal(dc-1): Receive TEvDescribeSchemeResult with bad status StatusPathDoesNotExist reason is <> while resolving subdomain dc-1 2025-03-31T15:59:32.865273Z node 1 :LOCAL ERROR: Unknown domain dc-3 >> TDynamicNameserverTest::TestCacheUsage >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-EnableSeparateQuotas [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameWithDifferentTenants [GOOD] Test command err: 2025-03-31T15:59:31.504949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:31.505041Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:31.519379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 2025-03-31T15:59:31.559731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000002 FAKE_COORDINATOR: Erasing txId 102 >> TNodeBrokerTest::BasicFunctionality |69.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissNoDeadline [GOOD] Test command err: 2025-03-31T15:59:33.285513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:33.285609Z node 1 :IMPORT WARN: Table profiles were not loaded ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... waiting for cache miss (done) ... unblocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... unblocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from to NODE_BROKER_ACTOR >> AggregateStatistics::ShouldBeCcorrectProcessingTabletTimeout [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestSensorsConfigForStaticSlot [GOOD] Test command err: 2025-03-31T15:59:32.582813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:32.582872Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:32.635921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 >> AggregateStatistics::ShouldBeCcorrectProcessingOfLocalTablets [GOOD] >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout >> TNodeBrokerTest::UpdateEpochPipelining [GOOD] |69.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout [GOOD] >> TNodeBrokerTest::RegistrationPipelining [GOOD] |69.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ChildNodesShouldBeInvalidateByTimeout [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-EnableSeparateQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:20.447004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:20.447174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:20.447230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:20.447268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:20.447314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:20.447365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:20.447440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:20.447555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:20.447920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:20.542045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:20.542103Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:20.547578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:20.547672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:20.547840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:20.553223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:20.553477Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:20.554169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:20.554395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:20.556368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:20.557532Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:20.557605Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:20.557937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:20.557991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:20.558034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:20.558187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.565203Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:20.707473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:20.707685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.707899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:20.708128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:20.708187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.710631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:20.710783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:20.710980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.711039Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:20.711101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:20.711140Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:20.712950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.713019Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:20.713058Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:20.714774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.714822Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.714886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:20.714950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:20.718864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:20.720853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:20.721038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:20.722103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:20.722241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:20.722307Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:20.722674Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:20.722727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:20.722891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:20.722986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:20.725404Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:20.725453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:20.725637Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:20.725688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:20.725904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:20.725952Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:20.726049Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:20.726083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:20.726138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:20.726174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:20.726214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:20.726255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:20.726303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:20.726337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:20.726405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:20.726445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:20.726478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:20.728539Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:20.728658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:20.728693Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2025-03-31T15:59:33.739297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409548, partId: 0 2025-03-31T15:59:33.739439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 437 RawX2: 4294969697 } Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2025-03-31T15:59:33.739497Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2025-03-31T15:59:33.744210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T15:59:33.744296Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 ProgressState, operation type: TxDropTable, at tablet# 72057594046678944 2025-03-31T15:59:33.744366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 103:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-03-31T15:59:33.744403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 103, done: 0, blocked: 1 2025-03-31T15:59:33.744490Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 103 Name: RenamePathBarrier }, at tablet# 72057594046678944 2025-03-31T15:59:33.744619Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 137 -> 129 2025-03-31T15:59:33.744742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T15:59:33.744811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-31T15:59:33.747985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T15:59:33.748243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T15:59:33.749680Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:33.749735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T15:59:33.749911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-31T15:59:33.750116Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:33.750162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-03-31T15:59:33.750210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-03-31T15:59:33.750499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T15:59:33.750552Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2025-03-31T15:59:33.750643Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T15:59:33.750680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-03-31T15:59:33.750723Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2025-03-31T15:59:33.751908Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T15:59:33.752017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T15:59:33.752054Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-03-31T15:59:33.752092Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 10 2025-03-31T15:59:33.752132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-31T15:59:33.753376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T15:59:33.753458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T15:59:33.753485Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-03-31T15:59:33.753513Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-03-31T15:59:33.753544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-31T15:59:33.753617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-03-31T15:59:33.759440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T15:59:33.759507Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:33.759870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-31T15:59:33.760058Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-31T15:59:33.760093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T15:59:33.760140Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-31T15:59:33.760171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T15:59:33.760208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-03-31T15:59:33.760296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:404:2371] message: TxId: 103 2025-03-31T15:59:33.760340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T15:59:33.760378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-03-31T15:59:33.760412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-03-31T15:59:33.760498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T15:59:33.761118Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:33.761155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T15:59:33.761675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-31T15:59:33.763470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-31T15:59:33.764548Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:33.764588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 0, path id: 2 2025-03-31T15:59:33.764766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-31T15:59:33.764796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:1345:3271] 2025-03-31T15:59:33.765256Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 11 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification: OK eventTxId 103 2025-03-31T15:59:33.768398Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:33.768578Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 212us result status StatusSuccess 2025-03-31T15:59:33.768903Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCcorrectProcessingTabletTimeout [GOOD] Test command err: 2025-03-31T15:59:34.205070Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-03-31T15:59:34.212411Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [1:8:2055], tablet id = 1, status = OK 2025-03-31T15:59:34.212742Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:8:2055], path = { OwnerId: 3 LocalId: 3 } 2025-03-31T15:59:34.212846Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2025-03-31T15:59:34.212986Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:9:2056], server id = [1:9:2056], tablet id = 2, status = OK 2025-03-31T15:59:34.213023Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:9:2056], path = { OwnerId: 3 LocalId: 3 } 2025-03-31T15:59:34.213122Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [1:10:2057], tablet id = 3, status = OK 2025-03-31T15:59:34.213217Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:10:2057], path = { OwnerId: 3 LocalId: 3 } 2025-03-31T15:59:34.213277Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [0:0:0], tablet id = 1, status = ERROR 2025-03-31T15:59:34.213296Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-03-31T15:59:34.213328Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [1:11:2058], tablet id = 4, status = OK 2025-03-31T15:59:34.213358Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:11:2058], path = { OwnerId: 3 LocalId: 3 } 2025-03-31T15:59:34.213409Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2025-03-31T15:59:34.213486Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:12:2059], server id = [1:12:2059], tablet id = 5, status = OK 2025-03-31T15:59:34.213517Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:12:2059], path = { OwnerId: 3 LocalId: 3 } 2025-03-31T15:59:34.213581Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [1:13:2060], tablet id = 6, status = OK 2025-03-31T15:59:34.213637Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:13:2060], path = { OwnerId: 3 LocalId: 3 } 2025-03-31T15:59:34.213684Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2025-03-31T15:59:34.213703Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-03-31T15:59:34.213728Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:14:2061], server id = [1:14:2061], tablet id = 7, status = OK 2025-03-31T15:59:34.213758Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:14:2061], path = { OwnerId: 3 LocalId: 3 } 2025-03-31T15:59:34.213797Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 5 2025-03-31T15:59:34.213853Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:12:2059], server id = [0:0:0], tablet id = 5, status = ERROR 2025-03-31T15:59:34.213873Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-03-31T15:59:34.213894Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 7 2025-03-31T15:59:34.213934Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:14:2061], server id = [0:0:0], tablet id = 7, status = ERROR 2025-03-31T15:59:34.213947Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-03-31T15:59:34.224245Z node 1 :STATISTICS DEBUG: Tablet 1 has already been processed 2025-03-31T15:59:34.224312Z node 1 :STATISTICS ERROR: No result was received from the tablet 2 2025-03-31T15:59:34.224337Z node 1 :STATISTICS DEBUG: Tablet 2 is not local. 2025-03-31T15:59:34.224380Z node 1 :STATISTICS DEBUG: Tablet 3 has already been processed 2025-03-31T15:59:34.224446Z node 1 :STATISTICS ERROR: No result was received from the tablet 4 2025-03-31T15:59:34.224464Z node 1 :STATISTICS DEBUG: Tablet 4 is not local. 2025-03-31T15:59:34.224514Z node 1 :STATISTICS DEBUG: Tablet 5 has already been processed 2025-03-31T15:59:34.224535Z node 1 :STATISTICS ERROR: No result was received from the tablet 6 2025-03-31T15:59:34.224549Z node 1 :STATISTICS DEBUG: Tablet 6 is not local. 2025-03-31T15:59:34.224592Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-03-31T15:59:34.224697Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-03-31T15:59:34.224730Z node 1 :STATISTICS DEBUG: Skip TEvStatisticsRequestTimeout 2025-03-31T15:59:34.224790Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:9:2056], server id = [0:0:0], tablet id = 2, status = ERROR 2025-03-31T15:59:34.224815Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-03-31T15:59:34.224843Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [0:0:0], tablet id = 4, status = ERROR 2025-03-31T15:59:34.224857Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-03-31T15:59:34.224879Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [0:0:0], tablet id = 6, status = ERROR 2025-03-31T15:59:34.224894Z node 1 :STATISTICS DEBUG: Skip EvClientConnected >> TNodeBrokerTest::NodeNameReuseRestartWithHostChanges [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3 >> AggregateStatistics::ShouldBePings >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder [GOOD] >> TBlobStorageProxyTest::TestPersistence ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCcorrectProcessingOfLocalTablets [GOOD] Test command err: 2025-03-31T15:59:34.282528Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-03-31T15:59:34.289152Z node 1 :STATISTICS DEBUG: EvClientDestroyed, node id = 1, client id = [1:9:2056], server id = [1:9:2056], tablet id = 2 2025-03-31T15:59:34.289254Z node 1 :STATISTICS DEBUG: Tablet 2 is not local. 2025-03-31T15:59:34.289393Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2025-03-31T15:59:34.290110Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [1:8:2055], tablet id = 1, status = ERROR 2025-03-31T15:59:34.290155Z node 1 :STATISTICS DEBUG: Tablet 1 is not local. 2025-03-31T15:59:34.290252Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2025-03-31T15:59:34.290272Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-03-31T15:59:34.290329Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [1:11:2058], tablet id = 4, status = ERROR 2025-03-31T15:59:34.290347Z node 1 :STATISTICS DEBUG: Tablet 4 is not local. 2025-03-31T15:59:34.290406Z node 1 :STATISTICS DEBUG: EvClientDestroyed, node id = 1, client id = [1:12:2059], server id = [1:12:2059], tablet id = 5 2025-03-31T15:59:34.290426Z node 1 :STATISTICS DEBUG: Tablet 5 is not local. 2025-03-31T15:59:34.290512Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 6 2025-03-31T15:59:34.290582Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:14:2061], server id = [1:14:2061], tablet id = 7, status = ERROR 2025-03-31T15:59:34.290601Z node 1 :STATISTICS DEBUG: Tablet 7 is not local. 2025-03-31T15:59:34.290637Z node 1 :STATISTICS DEBUG: EvClientDestroyed, node id = 1, client id = [1:15:2062], server id = [1:15:2062], tablet id = 8 2025-03-31T15:59:34.290658Z node 1 :STATISTICS DEBUG: Tablet 8 is not local. 2025-03-31T15:59:34.290726Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-03-31T15:59:34.290809Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [0:0:0], tablet id = 6, status = ERROR 2025-03-31T15:59:34.290829Z node 1 :STATISTICS DEBUG: Skip EvClientConnected >> TBlobStorageProxyTest::TestProxyPutSingleTimeout >> AggregateStatistics::ShouldBePings [GOOD] >> TBlobStorageProxyTest::TestProxySimpleDiscover ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout [GOOD] Test command err: 2025-03-31T15:59:34.435168Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-03-31T15:59:34.436291Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [1:38:2058], tablet id = 1, status = OK 2025-03-31T15:59:34.436718Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:38:2058], path = { OwnerId: 3 LocalId: 3 } 2025-03-31T15:59:34.437033Z node 3 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-03-31T15:59:34.437193Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-03-31T15:59:34.437318Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2025-03-31T15:59:34.437550Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [0:0:0], tablet id = 1, status = ERROR 2025-03-31T15:59:34.437640Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-03-31T15:59:34.437687Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:42:2057], server id = [3:42:2057], tablet id = 3, status = OK 2025-03-31T15:59:34.437744Z node 3 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [3:42:2057], path = { OwnerId: 3 LocalId: 3 } 2025-03-31T15:59:34.437889Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2025-03-31T15:59:34.437969Z node 3 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2025-03-31T15:59:34.438012Z node 3 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-03-31T15:59:34.438144Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:42:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2025-03-31T15:59:34.438167Z node 3 :STATISTICS DEBUG: Skip EvClientConnected 2025-03-31T15:59:34.438219Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [4:47:2057], tablet id = 4, status = OK 2025-03-31T15:59:34.438257Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:47:2057], path = { OwnerId: 3 LocalId: 3 } 2025-03-31T15:59:34.438317Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 4 2025-03-31T15:59:34.438350Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-31T15:59:34.438568Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2025-03-31T15:59:34.438662Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [0:0:0], tablet id = 4, status = ERROR 2025-03-31T15:59:34.438693Z node 4 :STATISTICS DEBUG: Skip EvClientConnected 2025-03-31T15:59:34.438758Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2025-03-31T15:59:34.449284Z node 4 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-03-31T15:59:34.449367Z node 4 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-03-31T15:59:34.449427Z node 3 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-03-31T15:59:34.449454Z node 3 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-03-31T15:59:34.463133Z node 2 :STATISTICS DEBUG: Skip TEvKeepAliveTimeout 2025-03-31T15:59:34.463222Z node 1 :STATISTICS INFO: Node 2 is unavailable 2025-03-31T15:59:34.463257Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-03-31T15:59:34.463366Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-03-31T15:59:34.463403Z node 1 :STATISTICS DEBUG: Skip TEvKeepAliveTimeout 2025-03-31T15:59:34.463471Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-03-31T15:59:34.463499Z node 1 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-03-31T15:59:34.463614Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-03-31T15:59:34.463643Z node 1 :STATISTICS DEBUG: Skip TEvAggregateKeepAlive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::UpdateEpochPipelining [GOOD] Test command err: 2025-03-31T15:59:31.011637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:31.011716Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:32.316243Z node 1 :NODE_BROKER ERROR: Cannot register node host2:1001: ERROR_TEMP: No free node IDs ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR >> TBlobStorageProxyTest::TestPartialGetBlock >> TBlobStorageProxyTest::TestCollectGarbagePersistence >> TNodeBrokerTest::SingleDomainModeBannedIds [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::RegistrationPipelining [GOOD] Test command err: 2025-03-31T15:59:32.780704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:32.780785Z node 1 :IMPORT WARN: Table profiles were not loaded ... waiting for commit ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ... waiting for commit (done) ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ChildNodesShouldBeInvalidateByTimeout [GOOD] Test command err: 2025-03-31T15:59:34.577895Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-03-31T15:59:34.578932Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [1:38:2058], tablet id = 1, status = OK 2025-03-31T15:59:34.579319Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:38:2058], path = { OwnerId: 3 LocalId: 3 } 2025-03-31T15:59:34.579609Z node 3 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-03-31T15:59:34.579743Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-03-31T15:59:34.579842Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2025-03-31T15:59:34.580007Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [0:0:0], tablet id = 1, status = ERROR 2025-03-31T15:59:34.580041Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-03-31T15:59:34.580079Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:42:2057], server id = [3:42:2057], tablet id = 3, status = OK 2025-03-31T15:59:34.580121Z node 3 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [3:42:2057], path = { OwnerId: 3 LocalId: 3 } 2025-03-31T15:59:34.580242Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2025-03-31T15:59:34.580313Z node 3 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2025-03-31T15:59:34.580360Z node 3 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-03-31T15:59:34.580473Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:42:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2025-03-31T15:59:34.580489Z node 3 :STATISTICS DEBUG: Skip EvClientConnected 2025-03-31T15:59:34.580535Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [4:47:2057], tablet id = 4, status = OK 2025-03-31T15:59:34.580571Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:47:2057], path = { OwnerId: 3 LocalId: 3 } 2025-03-31T15:59:34.580627Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 4 2025-03-31T15:59:34.580658Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-31T15:59:34.580763Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2025-03-31T15:59:34.580820Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [0:0:0], tablet id = 4, status = ERROR 2025-03-31T15:59:34.580854Z node 4 :STATISTICS DEBUG: Skip EvClientConnected 2025-03-31T15:59:34.580911Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2025-03-31T15:59:34.591323Z node 4 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-03-31T15:59:34.591382Z node 4 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-03-31T15:59:34.591436Z node 3 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-03-31T15:59:34.591459Z node 3 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-03-31T15:59:34.602442Z node 2 :STATISTICS DEBUG: Skip TEvKeepAliveTimeout 2025-03-31T15:59:34.602524Z node 1 :STATISTICS INFO: Node 2 is unavailable 2025-03-31T15:59:34.602561Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-03-31T15:59:34.602657Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-03-31T15:59:34.602694Z node 1 :STATISTICS DEBUG: Skip TEvKeepAliveTimeout 2025-03-31T15:59:34.602761Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-03-31T15:59:34.602786Z node 1 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-03-31T15:59:34.602885Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-03-31T15:59:34.602915Z node 1 :STATISTICS DEBUG: Skip TEvAggregateKeepAlive >> TNodeBrokerTest::TestListNodesEpochDeltas [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameReuseRestartWithHostChanges [GOOD] Test command err: 2025-03-31T15:59:31.217385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:31.217463Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:31.236916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder [GOOD] Test command err: 2025-03-31T15:59:34.331324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:34.331414Z node 1 :IMPORT WARN: Table profiles were not loaded ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... waiting for cache miss (done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBePings [GOOD] Test command err: 2025-03-31T15:59:34.752607Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-03-31T15:59:34.753089Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-03-31T15:59:34.860492Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 2 2025-03-31T15:59:34.860598Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-03-31T15:59:34.860666Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2025-03-31T15:59:34.861588Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:19:2055], server id = [0:0:0], tablet id = 2, status = ERROR 2025-03-31T15:59:34.861648Z node 2 :STATISTICS DEBUG: Skip EvClientConnected 2025-03-31T15:59:34.861696Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:16:2056], server id = [0:0:0], tablet id = 1, status = ERROR 2025-03-31T15:59:34.861716Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-03-31T15:59:34.861803Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 2 2025-03-31T15:59:34.861871Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 >> TBlobStorageProxyTest::TestQuadrupleGroups >> TBlobStorageProxyTest::TestVPutVCollectVGetRace >> TNodeBrokerTest::ConfigPipelining [GOOD] >> TBlobStorageProxyTest::TestGetMultipart >> TDynamicNameserverTest::TestCacheUsage [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::SingleDomainModeBannedIds [GOOD] Test command err: 2025-03-31T15:59:33.757968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:33.758062Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:33.869881Z node 1 :NODE_BROKER ERROR: Cannot register node host3:1001: ERROR_TEMP: No free node IDs 2025-03-31T15:59:33.908187Z node 1 :NODE_BROKER ERROR: Cannot register node host4:1001: ERROR_TEMP: No free node IDs 2025-03-31T15:59:33.921970Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node ID is banned 2025-03-31T15:59:34.800279Z node 1 :NODE_BROKER ERROR: Cannot register node host4:1001: ERROR_TEMP: No free node IDs 2025-03-31T15:59:34.826501Z node 1 :NODE_BROKER ERROR: Cannot register node host4:1001: ERROR_TEMP: No free node IDs >> TNodeBrokerTest::NodeNameReuseRestart [GOOD] >> TBlobStorageProxyTest::TestBlock >> TBlobStorageProxyTest::TestProxyGetSingleTimeout >> TBlobStorageProxyTest::TestProxySimpleDiscoverNone >> DataShardSnapshots::MvccSnapshotLockedWritesWithReadConflicts [GOOD] >> DataShardSnapshots::LockedWritesLimitedPerKey+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestListNodesEpochDeltas [GOOD] Test command err: 2025-03-31T15:59:31.858596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:31.858692Z node 1 :IMPORT WARN: Table profiles were not loaded |69.8%| [TA] $(B)/ydb/core/statistics/service/ut/ut_aggregation/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::TestCacheUsage [GOOD] Test command err: 2025-03-31T15:59:34.293849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:34.293938Z node 1 :IMPORT WARN: Table profiles were not loaded >> TBlobStorageProxyTest::TestBlockPersistence >> TBlobStorageProxyTest::TestProxyPutInvalidSize ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ConfigPipelining [GOOD] Test command err: 2025-03-31T15:59:34.137369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:34.137475Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:34.230223Z node 1 :NODE_BROKER ERROR: Cannot register node host1:1001: ERROR_TEMP: No free node IDs ... waiting for commit ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ... waiting for commit (done) ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameReuseRestart [GOOD] Test command err: 2025-03-31T15:59:34.040579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:34.040784Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:34.060617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 >> TBlobStorageProxyTest::TestGetAndRangeGetManyBlobs >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart-UseSink >> TBlobStorageProxyTest::TestProxyRestoreOnGetStripe >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead-UseSink >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Block >> TBlobStorageProxyTest::TestProxyPutSingleTimeout [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnDiscoverBlock >> TBlobStorageProxyTest::TestEmptyDiscover >> TBlobStorageProxyTest::TestDoubleFailure >> DataShardSnapshots::LockedWriteCleanupOnCopyTable-UseSink [GOOD] >> DataShardSnapshots::DelayedWriteReadableAfterSplit >> TBlobStorageProxyTest::TestVPutVCollectVGetRace [GOOD] >> TBlobStorageProxyTest::TestVGetNoData >> TBlobStorageProxyTest::TestPartialGetBlock [GOOD] >> TBlobStorageProxyTest::TestPartialGetMirror >> TBlobStorageProxyTest::TestProxyRestoreOnGetBlock >> UpsertLoad::ShouldWriteKqpUpsert2 >> TBlobStorageProxyTest::TestProxySimpleDiscoverNone [GOOD] >> TBlobStorageProxyTest::TestPutGetMany >> UpsertLoad::ShouldCreateTable >> TBlobStorageProxyTest::TestProxySimpleDiscover [GOOD] >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql |69.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |69.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |69.8%| [TA] {RESULT} $(B)/ydb/core/statistics/service/ut/ut_aggregation/test-results/unittest/{meta.json ... results_accumulator.log} |69.8%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut >> UpsertLoad::ShouldWriteKqpUpsert >> ReadLoad::ShouldReadIterate >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-52 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-53 >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom >> TNodeBrokerTest::ExtendLeaseRestartRace [GOOD] >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom >> TBlobStorageProxyTest::TestProxyPutInvalidSize [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure >> OlapEstimationRowsCorrectness::TPCH11 >> UpsertLoad::ShouldWriteDataBulkUpsertBatch >> TBlobStorageProxyTest::TestVGetNoData [GOOD] >> TBlobStorageProxyTest::TestBlock [GOOD] >> TBlobStorageProxyTest::TestBatchedPutRequestDoesNotContainAHugeBlob >> TBlobStorageProxyTest::TestEmptyDiscover [GOOD] >> TBlobStorageProxyTest::TestEmptyDiscoverMaxi >> TSchemeShardSubDomainTest::TableDiskSpaceQuotas [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ExtendLeaseRestartRace [GOOD] Test command err: 2025-03-31T15:59:33.739861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:33.739938Z node 1 :IMPORT WARN: Table profiles were not loaded ... rebooting node broker ... OnActivateExecutor tabletId# 72057594037936129 ... captured cache request ... sending extend lease request ... captured cache request ... waiting for response ... waiting for epoch update |69.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |69.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |69.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf >> TBlobStorageProxyTest::TestGetMultipart [GOOD] >> TBlobStorageProxyTest::TestGetFail >> TBlobStorageProxyTest::TestPartialGetMirror [GOOD] >> TBlobStorageProxyTest::TestPersistence [GOOD] >> TBlobStorageProxyTest::TestPartialGetStripe >> TNodeBrokerTest::BasicFunctionality [GOOD] >> TBlobStorageProxyTest::TestProxyGetSingleTimeout [GOOD] >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout |69.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVGetNoData [GOOD] |69.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPartialGetMirror [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::TableDiskSpaceQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:24.665420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:24.665526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:24.665564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:24.665596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:24.665662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:24.665715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:24.665777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:24.665891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:24.666233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:24.754889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:24.754946Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:24.761106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:24.761213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:24.761456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:24.770198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:24.770453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:24.771138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:24.771379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:24.773315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:24.774488Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:24.774554Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:24.774727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:24.774772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:24.774809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:24.774951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.782640Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:24.893708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:24.893951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.894208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:24.894462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:24.894516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.896943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:24.897086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:24.897246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.897321Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:24.897358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:24.897402Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:24.899425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.899495Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:24.899536Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:24.901438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.901487Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.901551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:24.901600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:24.904540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:24.906375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:24.906581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:24.907685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:24.907841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:24.907910Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:24.908176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:24.908229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:24.908392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:24.908514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:24.910717Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:24.910758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:24.910951Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:24.910990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:24.911273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:24.911325Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:24.911402Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:24.911428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:24.911469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:24.911493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:24.911518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:24.911547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:24.911582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:24.911604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:24.911688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:24.911719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:24.911748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:24.913669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:24.913773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:24.913813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... 7594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 767 RawX2: 4294969999 } Origin: 72075186233409549 State: 5 TxId: 107 Step: 0 Generation: 2 2025-03-31T15:59:38.571282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409549, partId: 0 2025-03-31T15:59:38.571461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: Source { RawX1: 767 RawX2: 4294969999 } Origin: 72075186233409549 State: 5 TxId: 107 Step: 0 Generation: 2 2025-03-31T15:59:38.571536Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2025-03-31T15:59:38.572089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-03-31T15:59:38.572152Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 ProgressState, operation type: TxDropTable, at tablet# 72057594046678944 2025-03-31T15:59:38.572197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 107:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-03-31T15:59:38.572235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 107, done: 0, blocked: 1 2025-03-31T15:59:38.572339Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 107 Name: RenamePathBarrier }, at tablet# 72057594046678944 2025-03-31T15:59:38.572466Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 137 -> 129 2025-03-31T15:59:38.572616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T15:59:38.572690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-31T15:59:38.576114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-03-31T15:59:38.576935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-03-31T15:59:38.577646Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:38.577716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T15:59:38.577910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-03-31T15:59:38.578070Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:38.578114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 107, path id: 2 2025-03-31T15:59:38.578163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 107, path id: 4 2025-03-31T15:59:38.578608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-03-31T15:59:38.578676Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 107:0 ProgressState at tablet: 72057594046678944 2025-03-31T15:59:38.578780Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72057594046678944 2025-03-31T15:59:38.578832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 107:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-03-31T15:59:38.578885Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 129 -> 240 2025-03-31T15:59:38.579948Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 14 PathOwnerId: 72057594046678944, cookie: 107 2025-03-31T15:59:38.580065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 14 PathOwnerId: 72057594046678944, cookie: 107 2025-03-31T15:59:38.580106Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2025-03-31T15:59:38.580144Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 14 2025-03-31T15:59:38.580191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-31T15:59:38.581115Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2025-03-31T15:59:38.581216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2025-03-31T15:59:38.581246Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2025-03-31T15:59:38.581289Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-03-31T15:59:38.581325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-03-31T15:59:38.581403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 0/1, is published: true 2025-03-31T15:59:38.585228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-03-31T15:59:38.585310Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 107:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:38.585694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-31T15:59:38.585847Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2025-03-31T15:59:38.585933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-03-31T15:59:38.585979Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2025-03-31T15:59:38.586014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-03-31T15:59:38.586064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2025-03-31T15:59:38.586108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-03-31T15:59:38.586151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2025-03-31T15:59:38.586182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2025-03-31T15:59:38.586298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-03-31T15:59:38.587368Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:38.587413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T15:59:38.590895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-03-31T15:59:38.591537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-03-31T15:59:38.593399Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:38.593475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 0, path id: 2 2025-03-31T15:59:38.594240Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 15 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification wait txId: 107 2025-03-31T15:59:38.594843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: send EvNotifyTxCompletion 2025-03-31T15:59:38.594887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 107 2025-03-31T15:59:38.595617Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2025-03-31T15:59:38.595729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-03-31T15:59:38.595768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:999:2926] TestWaitNotification: OK eventTxId 107 2025-03-31T15:59:38.596521Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:38.598531Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 2ms result status StatusSuccess 2025-03-31T15:59:38.599064Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SubDomainStateVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 1 } SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::BasicFunctionality [GOOD] Test command err: 2025-03-31T15:59:34.878279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:34.878376Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:36.376275Z node 1 :NODE_BROKER ERROR: Cannot register node host1:1001: WRONG_REQUEST: Another location is registered for host1:1001 2025-03-31T15:59:36.391904Z node 1 :NODE_BROKER ERROR: Cannot register node host4:1001: ERROR_TEMP: No free node IDs 2025-03-31T15:59:36.392478Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-03-31T15:59:36.392926Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired >> TBlobStorageProxyTest::TestBatchedPutRequestDoesNotContainAHugeBlob [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3 [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3Plus2 >> TBlobStorageProxyTest::TestBlockPersistence [GOOD] >> TBlobStorageProxyTest::TestCollectGarbage >> TBlobStorageProxyTest::TestDoubleFailure [GOOD] >> TBlobStorageProxyTest::TestDoubleFailureMirror3Plus2 >> RetryPolicy::TWriteSession_TestPolicy [GOOD] >> RetryPolicy::TWriteSession_TestBrokenPolicy |69.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestBatchedPutRequestDoesNotContainAHugeBlob [GOOD] >> TBlobStorageProxyTest::TestGetFail [GOOD] >> TBlobStorageProxyTest::TestPartialGetStripe [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH17 >> TBlobStorageProxyTest::TestPutGetMany [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Block [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Stripe >> KqpJoinOrder::TPCDS23+ColumnStore >> KqpFlipJoin::Right_1 |69.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPartialGetStripe [GOOD] |69.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetMany [GOOD] |69.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestGetFail [GOOD] >> KqpJoinOrder::FiveWayJoinWithComplexPreds2+ColumnStore >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 >> KqpJoinOrder::FourWayJoinLeftFirst+ColumnStore >> DataShardSnapshots::RepeatableReadAfterSplitRace [GOOD] >> DataShardSnapshots::PostMergeNotCompactedTooEarly >> TBlobStorageProxyTest::TestCollectGarbage [GOOD] >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout [GOOD] >> KqpJoinOrder::FiveWayJoinWithPreds+ColumnStore >> UpsertLoad::ShouldWriteDataBulkUpsertBatch [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom >> UpsertLoad::ShouldCreateTable [GOOD] >> UpsertLoad::ShouldDropCreateTable >> ReadLoad::ShouldReadIterate [GOOD] >> ReadLoad::ShouldReadIterateMoreThanRows >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom [GOOD] >> TDataShardLocksTest::UseLocksCache [GOOD] >> UpsertLoad::ShouldWriteKqpUpsert2 [GOOD] |69.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestCollectGarbage [GOOD] >> UpsertLoad::ShouldWriteKqpUpsert [GOOD] |69.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout [GOOD] >> KqpIndexLookupJoin::LeftJoinSkipNullFilter-StreamLookup >> TBlobStorageProxyTest::TestDoubleFailureMirror3Plus2 [GOOD] >> KqpJoin::JoinDupColumnRightPure >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster [GOOD] >> RetryPolicy::TWriteSession_SeqNoShift >> KqpJoinOrder::FiveWayJoinWithConstantFold-ColumnStore >> DataShardSnapshots::LockedWritesLimitedPerKey+UseSink [GOOD] >> DataShardSnapshots::LockedWritesLimitedPerKey-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom [GOOD] Test command err: 2025-03-31T15:59:42.091962Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:59:42.092214Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:59:42.092379Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a01/r3tmp/tmpU3V1Mg/pdisk_1.dat 2025-03-31T15:59:42.503825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:59:42.572125Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:42.618343Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:42.618524Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:42.632327Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:59:42.734129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:59:43.148567Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 KeyFrom: 12345 } 2025-03-31T15:59:43.148798Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 KeyFrom: 12345 with type# 1, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2025-03-31T15:59:43.310257Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 2} TUpsertActor finished in 0.160898s, errors=0 2025-03-31T15:59:43.310351Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:739:2621] with tag# 2 >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsert2 [GOOD] Test command err: 2025-03-31T15:59:41.013812Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:59:41.014067Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:59:41.014243Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a35/r3tmp/tmpk7tFiB/pdisk_1.dat 2025-03-31T15:59:41.450983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:59:41.503998Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:41.544524Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:41.544684Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:41.556444Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:59:41.648699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:59:42.047111Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "JustTable" } UpsertKqpStart { RowCount: 20 Inflight: 5 } 2025-03-31T15:59:42.047295Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 2025-03-31T15:59:42.052211Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 2} started# 5 actors each with inflight# 4 2025-03-31T15:59:42.052308Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 2025-03-31T15:59:42.052368Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 2025-03-31T15:59:42.052398Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 2025-03-31T15:59:42.052424Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 2025-03-31T15:59:42.052462Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 2025-03-31T15:59:42.055994Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 1} session: ydb://session/3?node_id=1&id=M2Y4YTkwNC1kZDY0MmNiZi1mNDk4YmI0NC02Y2FiOWZlMw== 2025-03-31T15:59:42.059815Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 2} session: ydb://session/3?node_id=1&id=MWU0ZmY0MDEtMmZiYWMyMTktMWI5OGYxYmItMmFhYWRmN2U= 2025-03-31T15:59:42.059882Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 3} session: ydb://session/3?node_id=1&id=YzY4OTE3ZTgtOWQ4YzJiYi1mMmE3MDY1Yy04M2NhMDhkYQ== 2025-03-31T15:59:42.061027Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 4} session: ydb://session/3?node_id=1&id=ZGZiMTk4MWQtM2E0MWUyNzItZjgyMDEyNGMtZmVjN2MwOGU= 2025-03-31T15:59:42.062399Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 5} session: ydb://session/3?node_id=1&id=NDlhZGQxMzEtMmYwMmQxOTItN2Q3NjAxYzQtMjUxZWY5Yjk= 2025-03-31T15:59:42.068138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:753:2635], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:42.068353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:778:2654], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:42.068431Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:779:2655], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:42.068496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:780:2656], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:42.068542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:781:2657], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:42.068582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:782:2658], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:42.068665Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:42.077012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T15:59:42.140615Z node 1 :TX_PROXY ERROR: Actor# [1:799:2675] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T15:59:42.141565Z node 1 :TX_PROXY ERROR: Actor# [1:800:2676] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T15:59:42.142720Z node 1 :TX_PROXY ERROR: Actor# [1:801:2677] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T15:59:42.143301Z node 1 :TX_PROXY ERROR: Actor# [1:802:2678] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T15:59:42.326604Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:790:2666], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T15:59:42.326712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:791:2667], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T15:59:42.326797Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:792:2668], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T15:59:42.326874Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:793:2669], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T15:59:42.326929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:794:2670], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T15:59:42.362366Z node 1 :TX_PROXY ERROR: Actor# [1:900:2741] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:42.920852Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 1} finished in 1743436782.920787s, errors=0 2025-03-31T15:59:42.921204Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1743436782920 OperationsOK: 4 OperationsError: 0 } 2025-03-31T15:59:42.936430Z node 1 :TX_PROXY ERROR: Actor# [1:973:2779] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:43.017302Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 5} finished in 1743436783.017260s, errors=0 2025-03-31T15:59:43.017647Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1743436783017 OperationsOK: 4 OperationsError: 0 } 2025-03-31T15:59:43.034201Z node 1 :TX_PROXY ERROR: Actor# [1:1024:2801] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:43.125170Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 2} finished in 1743436783.125131s, errors=0 2025-03-31T15:59:43.125478Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1743436783125 OperationsOK: 4 OperationsError: 0 } 2025-03-31T15:59:43.142481Z node 1 :TX_PROXY ERROR: Actor# [1:1075:2823] txid# 281474976715678, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:43.224054Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 4} finished in 1743436783.224015s, errors=0 2025-03-31T15:59:43.224345Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1743436783224 OperationsOK: 4 OperationsError: 0 } 2025-03-31T15:59:43.240992Z node 1 :TX_PROXY ERROR: Actor# [1:1126:2845] txid# 281474976715683, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:43.321565Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 3} finished in 1743436783.321524s, errors=0 2025-03-31T15:59:43.321864Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1743436783321 OperationsOK: 4 OperationsError: 0 } 2025-03-31T15:59:43.321923Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 2} finished in 1.269991s, oks# 20, errors# 0 2025-03-31T15:59:43.322036Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:739:2621] with tag# 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsert [GOOD] Test command err: 2025-03-31T15:59:41.444340Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:59:41.444628Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:59:41.444834Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a20/r3tmp/tmp59Egp3/pdisk_1.dat 2025-03-31T15:59:41.846033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:59:41.899273Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:41.943963Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:41.944154Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:41.955877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:59:42.041130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:59:42.392110Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } UpsertKqpStart { RowCount: 20 Inflight: 5 } 2025-03-31T15:59:42.392275Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 2025-03-31T15:59:42.406123Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 2} started# 5 actors each with inflight# 4 2025-03-31T15:59:42.406225Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 2025-03-31T15:59:42.406284Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 2025-03-31T15:59:42.406313Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 2025-03-31T15:59:42.406342Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 2025-03-31T15:59:42.406379Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 2025-03-31T15:59:42.409846Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 1} session: ydb://session/3?node_id=1&id=NzMzNzkwYmUtOTM2OTM0YTgtY2ZjMmM0YzMtNWMwNmUxMGU= 2025-03-31T15:59:42.412800Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 2} session: ydb://session/3?node_id=1&id=YWJmYjAxYzctMjY3NjNjODItYjJlOWQwNTQtNzIyYjM5ODY= 2025-03-31T15:59:42.412872Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 3} session: ydb://session/3?node_id=1&id=Y2Q2MWFlZDItZjVjN2VlM2MtNTI5ODc3NzctZThjOTk4YTk= 2025-03-31T15:59:42.414378Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 4} session: ydb://session/3?node_id=1&id=MzhkNWQ2NDItMzc5YmQ3YzgtZWQ0MmZhMTQtNzE5NmIyNmY= 2025-03-31T15:59:42.415813Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 5} session: ydb://session/3?node_id=1&id=NzYzNDFiNDItMmQzZTgwYmMtNjIzY2UwYTgtNTExYzM4NGQ= 2025-03-31T15:59:42.420136Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:753:2635], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:42.420273Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:778:2654], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:42.420360Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:779:2655], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:42.420417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:780:2656], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:42.420464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:781:2657], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:42.420509Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:782:2658], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:42.420596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:42.428468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T15:59:42.497209Z node 1 :TX_PROXY ERROR: Actor# [1:799:2675] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T15:59:42.498139Z node 1 :TX_PROXY ERROR: Actor# [1:800:2676] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T15:59:42.499389Z node 1 :TX_PROXY ERROR: Actor# [1:801:2677] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T15:59:42.500035Z node 1 :TX_PROXY ERROR: Actor# [1:802:2678] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T15:59:42.678710Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:790:2666], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T15:59:42.680224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:791:2667], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T15:59:42.680337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:792:2668], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T15:59:42.680411Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:793:2669], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T15:59:42.680461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:794:2670], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T15:59:42.717179Z node 1 :TX_PROXY ERROR: Actor# [1:900:2741] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:43.225072Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 4} finished in 1743436783.225026s, errors=0 2025-03-31T15:59:43.225419Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1743436783225 OperationsOK: 4 OperationsError: 0 } 2025-03-31T15:59:43.239867Z node 1 :TX_PROXY ERROR: Actor# [1:973:2779] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:43.306578Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 1} finished in 1743436783.306541s, errors=0 2025-03-31T15:59:43.306872Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1743436783306 OperationsOK: 4 OperationsError: 0 } 2025-03-31T15:59:43.321093Z node 1 :TX_PROXY ERROR: Actor# [1:1024:2801] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:43.387120Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 3} finished in 1743436783.387051s, errors=0 2025-03-31T15:59:43.387465Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1743436783387 OperationsOK: 4 OperationsError: 0 } 2025-03-31T15:59:43.401210Z node 1 :TX_PROXY ERROR: Actor# [1:1075:2823] txid# 281474976715678, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:43.477148Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 2} finished in 1743436783.477093s, errors=0 2025-03-31T15:59:43.477435Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1743436783477 OperationsOK: 4 OperationsError: 0 } 2025-03-31T15:59:43.491906Z node 1 :TX_PROXY ERROR: Actor# [1:1126:2845] txid# 281474976715683, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:43.585507Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 5} finished in 1743436783.585471s, errors=0 2025-03-31T15:59:43.585646Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1743436783585 OperationsOK: 4 OperationsError: 0 } 2025-03-31T15:59:43.585697Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 2} finished in 1.179885s, oks# 20, errors# 0 2025-03-31T15:59:43.585813Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:739:2621] with tag# 2 >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3Plus2 [GOOD] >> TBlobStorageProxyTest::TestCollectGarbagePersistence [GOOD] >> TBlobStorageProxyTest::TestCollectGarbageAfterLargeData ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestDoubleFailureMirror3Plus2 [GOOD] Test command err: 2025-03-31T15:59:38.123622Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/000fc9/r3tmp/tmp7Zgkf8//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1 2025-03-31T15:59:38.143486Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/000fc9/r3tmp/tmp7Zgkf8//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 2 2025-03-31T15:59:38.232474Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:1:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-03-31T15:59:38.232660Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-03-31T15:59:41.887817Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/000fc9/r3tmp/tmpIxD31p//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1 2025-03-31T15:59:41.888891Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/000fc9/r3tmp/tmpIxD31p//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 2 2025-03-31T15:59:41.931874Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:1:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-03-31T15:59:41.932141Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::UseLocksCache [GOOD] Test command err: 2025-03-31T15:59:34.262199Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:59:34.262389Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:59:34.262514Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019d8/r3tmp/tmpz2uldX/pdisk_1.dat 2025-03-31T15:59:34.650302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:59:34.695280Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:34.740702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:34.740913Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:34.752776Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:59:34.843254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:59:34.889656Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:671:2573]: NKikimr::TEvTablet::TEvBoot 2025-03-31T15:59:34.890995Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:671:2573]: NKikimr::TEvTablet::TEvRestored 2025-03-31T15:59:34.891596Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:671:2573] 2025-03-31T15:59:34.891899Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:34.937587Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:671:2573]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T15:59:34.938810Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:34.939027Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T15:59:34.941088Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T15:59:34.941205Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T15:59:34.941279Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T15:59:34.941690Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T15:59:34.942038Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T15:59:34.942114Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:698:2573] in generation 1 2025-03-31T15:59:34.942339Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:659:2566], Recipient [1:675:2575]: NKikimr::TEvTablet::TEvBoot 2025-03-31T15:59:34.943749Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:659:2566], Recipient [1:675:2575]: NKikimr::TEvTablet::TEvRestored 2025-03-31T15:59:34.944211Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:675:2575] 2025-03-31T15:59:34.944447Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:34.954440Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:659:2566], Recipient [1:675:2575]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T15:59:34.955238Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:34.955379Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T15:59:34.956916Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-03-31T15:59:34.956993Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-03-31T15:59:34.957049Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-03-31T15:59:34.957385Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T15:59:34.957528Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T15:59:34.957614Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:706:2575] in generation 1 2025-03-31T15:59:34.969055Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T15:59:35.005926Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T15:59:35.006192Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T15:59:35.006358Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:709:2594] 2025-03-31T15:59:35.006417Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T15:59:35.006471Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T15:59:35.006514Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T15:59:35.006844Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:671:2573], Recipient [1:671:2573]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T15:59:35.006904Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T15:59:35.007031Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T15:59:35.007072Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-03-31T15:59:35.007161Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T15:59:35.007237Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:710:2595] 2025-03-31T15:59:35.007266Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-03-31T15:59:35.007289Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-03-31T15:59:35.007313Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T15:59:35.007639Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:675:2575], Recipient [1:675:2575]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T15:59:35.007674Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T15:59:35.008012Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T15:59:35.008177Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T15:59:35.008355Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T15:59:35.008395Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T15:59:35.008436Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T15:59:35.008494Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T15:59:35.008539Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T15:59:35.008574Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T15:59:35.008613Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T15:59:35.008668Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-03-31T15:59:35.008728Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-03-31T15:59:35.009246Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:692:2586], Recipient [1:671:2573]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:59:35.009319Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:59:35.009363Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:665:2569], serverId# [1:692:2586], sessionId# [0:0:0] 2025-03-31T15:59:35.009444Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T15:59:35.009475Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-03-31T15:59:35.009500Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037889 2025-03-31T15:59:35.009544Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-03-31T15:59:35.009570Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-03-31T15:59:35.009609Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-03-31T15:59:35.009649Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-31T15:59:35.009785Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:692:2586] 2025-03-31T15:59:35.009831Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T15:59:35.009944Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T15:59:35.010165Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T15:59:35.010235Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T15:59:35.010366Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T15:59:35.010440Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T15:59:35.010482Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T15:59:35.010518Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T15:59:35.010554Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T15:59:35.010856Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T15:59:35.010904Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T15:59:35.010955Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T15:59:35.011001Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T15:59:35.011053Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T15:59:35.011112Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 720751 ... xecution status for [0:2] at 72075186224037888 is Executed 2025-03-31T15:59:43.068025Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2025-03-31T15:59:43.068074Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:2] at 72075186224037888 has finished 2025-03-31T15:59:43.068108Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T15:59:43.068162Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-03-31T15:59:43.068216Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T15:59:43.068263Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T15:59:43.068509Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [2:61:2108], Recipient [2:985:2786]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 2 Status: STATUS_NOT_FOUND 2025-03-31T15:59:43.079498Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpew7736j5jvzywgw7rve94, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDVmZmQyZjAtMTY2NDU4NGUtYWM5M2Q0MGEtYTMxNWIzMWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T15:59:43.082391Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:1013:2801], Recipient [2:985:2786]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-03-31T15:59:43.082534Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-31T15:59:43.082625Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2025-03-31T15:59:43.082727Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-03-31T15:59:43.082778Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2025-03-31T15:59:43.082815Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-31T15:59:43.082859Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-31T15:59:43.082909Z node 2 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037888 2025-03-31T15:59:43.082968Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-03-31T15:59:43.082997Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-31T15:59:43.083027Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2025-03-31T15:59:43.083052Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2025-03-31T15:59:43.084115Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-03-31T15:59:43.084416Z node 2 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v2500/18446744073709551615 2025-03-31T15:59:43.084477Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is DelayComplete 2025-03-31T15:59:43.084514Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2025-03-31T15:59:43.084554Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2025-03-31T15:59:43.084599Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2025-03-31T15:59:43.084645Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-03-31T15:59:43.084670Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2025-03-31T15:59:43.084699Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037888 has finished 2025-03-31T15:59:43.084742Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-31T15:59:43.096050Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T15:59:43.096126Z node 2 :TX_DATASHARD TRACE: Complete execution for [2500:281474976715661] at 72075186224037888 on unit CompleteWrite 2025-03-31T15:59:43.096186Z node 2 :TX_DATASHARD DEBUG: Complete write [2500 : 281474976715661] from 72075186224037888 at tablet 72075186224037888 send result to client [2:942:2727] 2025-03-31T15:59:43.096263Z node 2 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037888 {TEvReadSet step# 2500 txid# 281474976715661 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2025-03-31T15:59:43.096302Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T15:59:43.096415Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T15:59:43.096507Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:2] at 72075186224037888 on unit FinishProposeWrite 2025-03-31T15:59:43.096561Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 2 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2025-03-31T15:59:43.096653Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T15:59:43.096742Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-03-31T15:59:43.096785Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:3] at 72075186224037888 on unit ExecuteRead 2025-03-31T15:59:43.096839Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[2:1013:2801], 0} after executionsCount# 1 2025-03-31T15:59:43.096915Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1013:2801], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-31T15:59:43.097008Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1013:2801], 0} finished in read 2025-03-31T15:59:43.097292Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [2:985:2786], Recipient [2:675:2576]: {TEvReadSet step# 2500 txid# 281474976715661 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2025-03-31T15:59:43.097336Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T15:59:43.097371Z node 2 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715661 2025-03-31T15:59:43.098356Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:1013:2801], Recipient [2:985:2786]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-03-31T15:59:43.098426Z node 2 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-03-31T15:59:43.098603Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:1013:2801], Recipient [2:675:2576]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 RangesSize: 1 2025-03-31T15:59:43.098714Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-03-31T15:59:43.098767Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit CheckRead 2025-03-31T15:59:43.098831Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2025-03-31T15:59:43.098861Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit CheckRead 2025-03-31T15:59:43.098889Z node 2 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-03-31T15:59:43.098935Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit BuildAndWaitDependencies 2025-03-31T15:59:43.098980Z node 2 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037889 2025-03-31T15:59:43.099011Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2025-03-31T15:59:43.099038Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-03-31T15:59:43.099062Z node 2 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037889 to execution unit ExecuteRead 2025-03-31T15:59:43.099104Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit ExecuteRead 2025-03-31T15:59:43.099226Z node 2 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 } 2025-03-31T15:59:43.099440Z node 2 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v2500/18446744073709551615 2025-03-31T15:59:43.099493Z node 2 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[2:1013:2801], 1} after executionsCount# 1 2025-03-31T15:59:43.099562Z node 2 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[2:1013:2801], 1} sends rowCount# 2, bytes# 64, quota rows left# 997, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-31T15:59:43.099639Z node 2 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[2:1013:2801], 1} finished in read 2025-03-31T15:59:43.099707Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2025-03-31T15:59:43.099760Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit ExecuteRead 2025-03-31T15:59:43.099794Z node 2 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037889 to execution unit CompletedOperations 2025-03-31T15:59:43.099822Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit CompletedOperations 2025-03-31T15:59:43.099866Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2025-03-31T15:59:43.099889Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit CompletedOperations 2025-03-31T15:59:43.099913Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037889 has finished 2025-03-31T15:59:43.099954Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-03-31T15:59:43.100055Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-03-31T15:59:43.100757Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:1013:2801], Recipient [2:675:2576]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2025-03-31T15:59:43.100813Z node 2 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 1 } |69.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |69.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |69.9%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead-UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead+UseSink >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-53 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-54 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom [GOOD] Test command err: 2025-03-31T15:59:42.141551Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:59:42.141796Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:59:42.141967Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a00/r3tmp/tmpI2gsyN/pdisk_1.dat 2025-03-31T15:59:42.622085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:59:42.704062Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:42.748642Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:42.748770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:42.760840Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:59:42.857433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:59:43.245718Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } UpsertKqpStart { RowCount: 20 Inflight: 5 KeyFrom: 12345 } 2025-03-31T15:59:43.246093Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 KeyFrom: 12345 2025-03-31T15:59:43.249745Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 2} started# 5 actors each with inflight# 4 2025-03-31T15:59:43.249824Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-03-31T15:59:43.249874Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-03-31T15:59:43.249899Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-03-31T15:59:43.249923Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-03-31T15:59:43.249957Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-03-31T15:59:43.253139Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 1} session: ydb://session/3?node_id=1&id=MmZjMWY5YzAtMTc3YjZkODMtY2UwZDZiZTQtZDlkN2Y5MWE= 2025-03-31T15:59:43.258137Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 2} session: ydb://session/3?node_id=1&id=MzZlNWUzNDItMWYwNTZhM2QtN2YxNmE0YTgtMzQzNjA4MzA= 2025-03-31T15:59:43.258212Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 3} session: ydb://session/3?node_id=1&id=ODBlMTY1YjgtNTUzY2MzYjEtMWMyYjViM2YtODUxZDZkN2E= 2025-03-31T15:59:43.259893Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 4} session: ydb://session/3?node_id=1&id=N2ZlMGQyZjAtY2QzZjdhOWQtNjA4YWY4YzgtOGMyMTc3NzM= 2025-03-31T15:59:43.262222Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 5} session: ydb://session/3?node_id=1&id=YWZkY2Q5MDAtZWQ3Y2RkYjMtY2ZiYTM2NTctNGEwNzZhYjE= 2025-03-31T15:59:43.266741Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:753:2635], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:43.266881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:778:2654], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:43.266959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:779:2655], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:43.267021Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:780:2656], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:43.267065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:781:2657], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:43.267121Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:782:2658], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:43.267209Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:43.276335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T15:59:43.350302Z node 1 :TX_PROXY ERROR: Actor# [1:799:2675] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T15:59:43.351205Z node 1 :TX_PROXY ERROR: Actor# [1:800:2676] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T15:59:43.352332Z node 1 :TX_PROXY ERROR: Actor# [1:801:2677] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T15:59:43.352912Z node 1 :TX_PROXY ERROR: Actor# [1:802:2678] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T15:59:43.545468Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:790:2666], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T15:59:43.545592Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:791:2667], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T15:59:43.545653Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:792:2668], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T15:59:43.545720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:793:2669], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T15:59:43.545773Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:794:2670], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T15:59:43.585007Z node 1 :TX_PROXY ERROR: Actor# [1:900:2741] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:44.170731Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 3} finished in 1743436784.170671s, errors=0 2025-03-31T15:59:44.171068Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1743436784170 OperationsOK: 4 OperationsError: 0 } 2025-03-31T15:59:44.186563Z node 1 :TX_PROXY ERROR: Actor# [1:973:2779] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:44.254701Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 4} finished in 1743436784.254659s, errors=0 2025-03-31T15:59:44.255055Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1743436784254 OperationsOK: 4 OperationsError: 0 } 2025-03-31T15:59:44.268892Z node 1 :TX_PROXY ERROR: Actor# [1:1024:2801] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:44.338030Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 5} finished in 1743436784.337986s, errors=0 2025-03-31T15:59:44.338376Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1743436784337 OperationsOK: 4 OperationsError: 0 } 2025-03-31T15:59:44.355013Z node 1 :TX_PROXY ERROR: Actor# [1:1075:2823] txid# 281474976715678, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:44.432612Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 2} finished in 1743436784.432569s, errors=0 2025-03-31T15:59:44.433082Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1743436784432 OperationsOK: 4 OperationsError: 0 } 2025-03-31T15:59:44.448422Z node 1 :TX_PROXY ERROR: Actor# [1:1126:2845] txid# 281474976715683, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:44.537070Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 1} finished in 1743436784.537028s, errors=0 2025-03-31T15:59:44.537386Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1743436784537 OperationsOK: 4 OperationsError: 0 } 2025-03-31T15:59:44.537445Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 2} finished in 1.287923s, oks# 20, errors# 0 2025-03-31T15:59:44.537605Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:739:2621] with tag# 2 |69.9%| [TA] $(B)/ydb/core/tx/datashard/ut_locks/test-results/unittest/{meta.json ... results_accumulator.log} >> TPersQueueTest::WriteExisting >> TPersQueueTest::DirectReadPreCached |69.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3Plus2 [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Stripe [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnDiscoverBlock [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnGetStripe [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror3Plus2 >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart+UseSink >> KqpQueryPerf::MultiDeleteFromTable+QueryService-UseSink >> KqpQueryPerf::Update+QueryService+UseSink >> TBlobStorageProxyTest::TestCollectGarbageAfterLargeData [GOOD] |69.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Stripe [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyRestoreOnDiscoverBlock [GOOD] Test command err: 2025-03-31T15:59:37.595383Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/00107f/r3tmp/tmpaQMdPa//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 2 2025-03-31T15:59:37.613416Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:1:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-03-31T15:59:41.089237Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/00107f/r3tmp/tmpaQMdPa//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 3 2025-03-31T15:59:41.128986Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:2:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-03-31T15:59:42.625338Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/00107f/r3tmp/tmpaQMdPa//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 3 2025-03-31T15:59:42.631354Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:2:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-03-31T15:59:44.271536Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/00107f/r3tmp/tmpaQMdPa//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 3 2025-03-31T15:59:44.277242Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:2:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-03-31T15:59:45.803238Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/00107f/r3tmp/tmpaQMdPa//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 3 2025-03-31T15:59:45.859496Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:2:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR >> KqpQueryPerf::Delete+QueryService-UseSink >> KqpWorkload::STOCK >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom [GOOD] |69.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |69.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |69.9%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_locks/test-results/unittest/{meta.json ... results_accumulator.log} |69.9%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut >> TBlobStorageProxyTest::TestProxyRestoreOnGetBlock [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror >> KqpQueryPerf::MultiRead+QueryService |69.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestCollectGarbageAfterLargeData [GOOD] |69.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |70.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |70.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 [GOOD] Test command err: 2025-03-31T15:59:40.853684Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:59:40.853919Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:59:40.854083Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a0c/r3tmp/tmpjEEiLd/pdisk_1.dat 2025-03-31T15:59:41.267069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:59:41.307004Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:41.346475Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:41.346625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:41.360287Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:59:41.458332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:59:41.813214Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 } 2025-03-31T15:59:41.813391Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 1, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2025-03-31T15:59:41.953297Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 2} TUpsertActor finished in 0.139439s, errors=0 2025-03-31T15:59:41.953444Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:739:2621] with tag# 2 2025-03-31T15:59:45.937478Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:101:2147], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:59:45.937749Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T15:59:45.937841Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a0c/r3tmp/tmpJ09jta/pdisk_1.dat 2025-03-31T15:59:46.251310Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:59:46.289680Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:46.338176Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:46.338319Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:46.350025Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:59:46.453498Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:59:46.882001Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 } 2025-03-31T15:59:46.882157Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:738:2620], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 1, target# TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" 2025-03-31T15:59:47.024000Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:738:2620], subTag: 2} TUpsertActor finished in 0.141419s, errors=0 2025-03-31T15:59:47.024125Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:739:2621] with tag# 2 >> KqpQueryPerf::IndexUpdateOn-QueryService-UseSink >> TBlobStorageProxyTest::TestQuadrupleGroups [GOOD] >> TBlobStorageProxyTest::TestSingleFailure >> ReadLoad::ShouldReadIterateMoreThanRows [GOOD] >> UpsertLoad::ShouldDropCreateTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom [GOOD] Test command err: 2025-03-31T15:59:41.772600Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:59:41.772861Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:59:41.773032Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0009ef/r3tmp/tmpxZAhli/pdisk_1.dat 2025-03-31T15:59:42.184052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:59:42.225653Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:42.269892Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:42.270029Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:42.281695Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:59:42.364594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:59:42.765355Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertBulkStart { RowCount: 100 Inflight: 3 BatchSize: 7 } 2025-03-31T15:59:42.765541Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 2} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 3 BatchSize: 7 with type# 0, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2025-03-31T15:59:42.859965Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 2} TUpsertActor finished in 0.093962s, errors=0 2025-03-31T15:59:42.860058Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:739:2621] with tag# 2 2025-03-31T15:59:46.811611Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:101:2147], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:59:46.811965Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T15:59:46.812054Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0009ef/r3tmp/tmp0snfcW/pdisk_1.dat 2025-03-31T15:59:47.119719Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:59:47.163020Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:47.204763Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:47.204891Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:47.219172Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:59:47.307606Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:59:47.600797Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertBulkStart { RowCount: 10 Inflight: 3 KeyFrom: 12345 } 2025-03-31T15:59:47.600931Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:738:2620], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 KeyFrom: 12345 with type# 0, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2025-03-31T15:59:47.683774Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:738:2620], subTag: 2} TUpsertActor finished in 0.082424s, errors=0 2025-03-31T15:59:47.683863Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:739:2621] with tag# 2 >> KqpQueryPerf::IndexReplace-QueryService-UseSink >> KqpQueryPerf::MultiDeleteFromTable-QueryService-UseSink |70.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |70.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |70.0%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id >> TBlobStorageProxyTest::TestEmptyDiscoverMaxi [GOOD] >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> ReadLoad::ShouldReadIterateMoreThanRows [GOOD] Test command err: 2025-03-31T15:59:40.848564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:59:40.848769Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:59:40.848904Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a14/r3tmp/tmpRlnqXz/pdisk_1.dat 2025-03-31T15:59:41.247030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:59:41.295370Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:41.344871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:41.345025Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:41.358946Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:59:41.462661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:59:41.989801Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# usertable in dir# /Root with rows# 1000 2025-03-31T15:59:42.000064Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 1} TUpsertActor Bootstrap called: RowCount: 1000 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" 2025-03-31T15:59:42.083980Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 1} TUpsertActor finished in 0.083490s, errors=0 2025-03-31T15:59:42.084690Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kReadIteratorStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } ReadIteratorStart { RowCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 } 2025-03-31T15:59:42.084855Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorLoadScenario# [1:747:2629] with id# {Tag: 0, parent: [1:738:2620], subTag: 3} Bootstrap called: RowCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 2025-03-31T15:59:42.086336Z node 1 :DS_LOAD_TEST INFO: ReadIteratorLoadScenario# {Tag: 0, parent: [1:738:2620], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 2 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2025-03-31T15:59:42.086526Z node 1 :DS_LOAD_TEST INFO: started fullscan actor# [1:750:2632] 2025-03-31T15:59:42.086670Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 1} Bootstrap called, sample# 0 2025-03-31T15:59:42.086773Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 1} Connect to# 72075186224037888 called 2025-03-31T15:59:42.087956Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 1} Handle TEvClientConnected called, Status# OK 2025-03-31T15:59:42.094565Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 1} finished in 0.006534s, read# 1000 2025-03-31T15:59:42.095023Z node 1 :DS_LOAD_TEST NOTICE: fullscan actor# [1:750:2632] with chunkSize# 0 finished: 0 { DurationMs: 6 OperationsOK: 1000 OperationsError: 0 } 2025-03-31T15:59:42.099033Z node 1 :DS_LOAD_TEST INFO: started fullscan actor# [1:753:2635] 2025-03-31T15:59:42.099190Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 2} Bootstrap called, sample# 0 2025-03-31T15:59:42.099236Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 2} Connect to# 72075186224037888 called 2025-03-31T15:59:42.099647Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 2} Handle TEvClientConnected called, Status# OK 2025-03-31T15:59:42.378855Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 2} finished in 0.279131s, read# 1000 2025-03-31T15:59:42.379021Z node 1 :DS_LOAD_TEST NOTICE: fullscan actor# [1:753:2635] with chunkSize# 1 finished: 0 { DurationMs: 279 OperationsOK: 1000 OperationsError: 0 } 2025-03-31T15:59:42.379288Z node 1 :DS_LOAD_TEST INFO: started fullscan actor# [1:756:2638] 2025-03-31T15:59:42.379343Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 3} Bootstrap called, sample# 0 2025-03-31T15:59:42.379379Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 3} Connect to# 72075186224037888 called 2025-03-31T15:59:42.379743Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 3} Handle TEvClientConnected called, Status# OK 2025-03-31T15:59:42.494259Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 3} finished in 0.114450s, read# 1000 2025-03-31T15:59:42.494456Z node 1 :DS_LOAD_TEST NOTICE: fullscan actor# [1:756:2638] with chunkSize# 10 finished: 0 { DurationMs: 114 OperationsOK: 1000 OperationsError: 0 } 2025-03-31T15:59:42.494588Z node 1 :DS_LOAD_TEST INFO: started fullscan actor# [1:759:2641] 2025-03-31T15:59:42.494646Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 4} Bootstrap called, sample# 1000 2025-03-31T15:59:42.494683Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 4} Connect to# 72075186224037888 called 2025-03-31T15:59:42.494953Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 4} Handle TEvClientConnected called, Status# OK 2025-03-31T15:59:42.500775Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 4} finished in 0.005119s, sampled# 1000, iter finished# 1, oks# 1000 2025-03-31T15:59:42.500952Z node 1 :DS_LOAD_TEST INFO: ReadIteratorLoadScenario# {Tag: 0, parent: [1:738:2620], subTag: 3} received keyCount# 1000 2025-03-31T15:59:42.501168Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorLoadScenario# {Tag: 0, parent: [1:738:2620], subTag: 3} started read actor with id# [1:762:2644] 2025-03-31T15:59:42.501246Z node 1 :DS_LOAD_TEST NOTICE: TReadIteratorPoints# {Tag: 0, parent: [1:747:2629], subTag: 5} Bootstrap called, will read keys# 1000 2025-03-31T15:59:43.099960Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorLoadScenario# {Tag: 0, parent: [1:738:2620], subTag: 3} received point times# 1000, Inflight left# 0 2025-03-31T15:59:43.100415Z node 1 :DS_LOAD_TEST INFO: headread with inflight# 1 finished: 0 { DurationMs: 599 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 6\n99.9%: 21\n" } 2025-03-31T15:59:43.100611Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorLoadScenario# {Tag: 0, parent: [1:738:2620], subTag: 3} finished in 1.015577s with report: { DurationMs: 6 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 1, type# FullScan with chunk# inf" } { DurationMs: 279 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 2, type# FullScan with chunk# 1" } { DurationMs: 114 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 3, type# FullScan with chunk# 10" } { DurationMs: 599 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 6\n99.9%: 21\n" PrefixInfo: "Test run# 4, type# ReadHeadPoints with inflight# 1" } 2025-03-31T15:59:43.100997Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:747:2629] with tag# 3 2025-03-31T15:59:46.978485Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:101:2147], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:59:46.978918Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T15:59:46.979016Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a14/r3tmp/tmpNwBVqF/pdisk_1.dat 2025-03-31T15:59:47.341211Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:59:47.373601Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:47.412586Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:47.412721Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:47.424800Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:59:47.509284Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:59:47.827840Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# usertable in dir# /Root with rows# 10 2025-03-31T15:59:47.828213Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:738:2620], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" 2025-03-31T15:59:47.854079Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:738:2620], subTag: 1} TUpsertActor finished in 0.025511s, errors=0 2025-03-31T15:59:47.854714Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kReadIteratorStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } ReadIteratorStart { RowCount: 10 ReadCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 } 2025-03-31T15:59:47.854852Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorLoadScenario# [2:747:2629] with id# {Tag: 0, parent: [2:738:2620], subTag: 3} Bootstrap called: RowCount: 10 ReadCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 2025-03-31T15:59:47.856060Z node 2 :DS_LOAD_TEST INFO: ReadIteratorLoadScenario# {Tag: 0, parent: [2:738:2620], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 2 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2025-03-31T15:59:47.856209Z node 2 :DS_LOAD_TEST INFO: started fullscan actor# [2:750:2632] 2025-03-31T15:59:47.856314Z node 2 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 1} Bootstrap called, sample# 0 2025-03-31T15:59:47.856374Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 1} Connect to# 72075186224037888 called 2025-03-31T15:59:47.856718Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 1} Handle TEvClientConnected called, Status# OK 2025-03-31T15:59:47.857492Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 1} finished in 0.000722s, read# 10 2025-03-31T15:59:47.857658Z node 2 :DS_LOAD_TEST NOTICE: fullscan actor# [2:750:2632] with chunkSize# 0 finished: 0 { DurationMs: 0 OperationsOK: 10 OperationsError: 0 } 2025-03-31T15:59:47.857794Z node 2 :DS_LOAD_TEST INFO: started fullscan actor# [2:753:2635] 2025-03-31T15:59:47.857841Z node 2 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 2} Bootstrap called, sample# 0 2025-03-31T15:59:47.857870Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 2} Connect to# 72075186224037888 called 2025-03-31T15:59:47.858113Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 2} Handle TEvClientConnected called, Status# OK 2025-03-31T15:59:47.862837Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 2} finished in 0.004675s, read# 10 2025-03-31T15:59:47.862970Z node 2 :DS_LOAD_TEST NOTICE: fullscan actor# [2:753:2635] with chunkSize# 1 finished: 0 { DurationMs: 4 OperationsOK: 10 OperationsError: 0 } 2025-03-31T15:59:47.863069Z node 2 :DS_LOAD_TEST INFO: started fullscan actor# [2:756:2638] 2025-03-31T15:59:47.863146Z node 2 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 3} Bootstrap called, sample# 0 2025-03-31T15:59:47.863193Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 3} Connect to# 72075186224037888 called 2025-03-31T15:59:47.863434Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 3} Handle TEvClientConnected called, Status# OK 2025-03-31T15:59:47.864339Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 3} finished in 0.000866s, read# 10 2025-03-31T15:59:47.864456Z node 2 :DS_LOAD_TEST NOTICE: fullscan actor# [2:756:2638] with chunkSize# 10 finished: 0 { DurationMs: 0 OperationsOK: 10 OperationsError: 0 } 2025-03-31T15:59:47.864565Z node 2 :DS_LOAD_TEST INFO: started fullscan actor# [2:759:2641] 2025-03-31T15:59:47.864622Z node 2 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 4} Bootstrap called, sample# 10 2025-03-31T15:59:47.864650Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 4} Connect to# 72075186224037888 called 2025-03-31T15:59:47.864870Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 4} Handle TEvClientConnected called, Status# OK 2025-03-31T15:59:47.870663Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 4} finished in 0.005686s, sampled# 10, iter finished# 1, oks# 10 2025-03-31T15:59:47.870826Z node 2 :DS_LOAD_TEST INFO: ReadIteratorLoadScenario# {Tag: 0, parent: [2:738:2620], subTag: 3} received keyCount# 10 2025-03-31T15:59:47.871006Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorLoadScenario# {Tag: 0, parent: [2:738:2620], subTag: 3} started read actor with id# [2:762:2644] 2025-03-31T15:59:47.871070Z node 2 :DS_LOAD_TEST NOTICE: TReadIteratorPoints# {Tag: 0, parent: [2:747:2629], subTag: 5} Bootstrap called, will read keys# 10 2025-03-31T15:59:48.850950Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorLoadScenario# {Tag: 0, parent: [2:738:2620], subTag: 3} received point times# 1000, Inflight left# 0 2025-03-31T15:59:48.851264Z node 2 :DS_LOAD_TEST INFO: headread with inflight# 1 finished: 0 { DurationMs: 980 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 4\n99%: 13\n99.9%: 67\n" } 2025-03-31T15:59:48.851477Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorLoadScenario# {Tag: 0, parent: [2:738:2620], subTag: 3} finished in 0.996428s with report: { DurationMs: 0 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 1, type# FullScan with chunk# inf" } { DurationMs: 4 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 2, type# FullScan with chunk# 1" } { DurationMs: 0 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 3, type# FullScan with chunk# 10" } { DurationMs: 980 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 4\n99%: 13\n99.9%: 67\n" PrefixInfo: "Test run# 4, type# ReadHeadPoints with inflight# 1" } 2025-03-31T15:59:48.851591Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:747:2629] with tag# 3 >> KqpQueryPerf::MultiDeleteFromTable-QueryService+UseSink >> KqpQueryPerf::IndexUpsert-QueryService-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldDropCreateTable [GOOD] Test command err: 2025-03-31T15:59:41.147619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:59:41.147921Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:59:41.148127Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a24/r3tmp/tmpSeJPmI/pdisk_1.dat 2025-03-31T15:59:41.576395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:59:41.637103Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:41.675962Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:41.676107Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:41.688603Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:59:41.780807Z node 1 :DS_LOAD_TEST NOTICE: TLoad# 0 creates table# BrandNewTable in dir# /Root 2025-03-31T15:59:42.117479Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:645:2552], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:42.117651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:42.147533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:59:42.719486Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# BrandNewTable in dir# /Root with rows# 10 2025-03-31T15:59:42.721446Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:641:2549], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "BrandNewTable" 2025-03-31T15:59:42.745832Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:641:2549], subTag: 1} TUpsertActor finished in 0.024018s, errors=0 2025-03-31T15:59:42.746173Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "BrandNewTable" CreateTable: true MinParts: 11 MaxParts: 13 MaxPartSizeMb: 1234 } TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "BrandNewTable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2025-03-31T15:59:42.746301Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:641:2549], subTag: 3} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "BrandNewTable" 2025-03-31T15:59:42.804493Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:641:2549], subTag: 3} TUpsertActor finished in 0.057827s, errors=0 2025-03-31T15:59:42.804585Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:755:2630] with tag# 3 2025-03-31T15:59:47.282187Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:101:2147], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:59:47.282569Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T15:59:47.282664Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a24/r3tmp/tmpLLiwxi/pdisk_1.dat 2025-03-31T15:59:47.627725Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:59:47.665011Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:47.710292Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:47.710425Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:47.723353Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:59:47.814979Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:59:48.121900Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 } UpsertBulkStart { RowCount: 100 Inflight: 3 } 2025-03-31T15:59:48.122030Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:738:2620], subTag: 2} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 2025-03-31T15:59:48.588342Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:738:2620], subTag: 2} TUpsertActor finished in 0.465909s, errors=0 2025-03-31T15:59:48.588446Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:739:2621] with tag# 2 2025-03-31T15:59:48.598673Z node 2 :DS_LOAD_TEST NOTICE: TLoad# 0 drops table# table in dir# /Root 2025-03-31T15:59:48.618702Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:780:2662], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:48.618843Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:48.881288Z node 2 :DS_LOAD_TEST NOTICE: TLoad# 0 creates table# table in dir# /Root 2025-03-31T15:59:48.901730Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:846:2708], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:48.901849Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:48.924349Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T15:59:48.995028Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-03-31T15:59:49.216334Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# table in dir# /Root with rows# 10 2025-03-31T15:59:49.216673Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:777:2659], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037889 TableId: 3 WorkingDir: "/Root" TableName: "table" 2025-03-31T15:59:49.231524Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:777:2659], subTag: 1} TUpsertActor finished in 0.014529s, errors=0 2025-03-31T15:59:49.231889Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "table" DropTable: true } TargetShard { TabletId: 72075186224037889 TableId: 3 WorkingDir: "/Root" TableName: "table" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2025-03-31T15:59:49.232054Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:777:2659], subTag: 3} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037889 TableId: 3 WorkingDir: "/Root" TableName: "table" 2025-03-31T15:59:49.291883Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:777:2659], subTag: 3} TUpsertActor finished in 0.059540s, errors=0 2025-03-31T15:59:49.291981Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:939:2782] with tag# 3 |70.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestEmptyDiscoverMaxi [GOOD] >> KqpFlipJoin::Right_1 [GOOD] >> KqpFlipJoin::Right_2 |70.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi [GOOD] >> KqpQueryPerf::Replace+QueryService-UseSink >> KqpQueryPerf::RangeRead-QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T15:59:17.003513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T15:59:17.003622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:17.003665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-31T15:59:17.003702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T15:59:17.003769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T15:59:17.003836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T15:59:17.003911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T15:59:17.004013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T15:59:17.004336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:59:17.088231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:17.088293Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:17.095599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:59:17.095723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T15:59:17.095913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T15:59:17.102840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T15:59:17.103113Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T15:59:17.103957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:17.104262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T15:59:17.106822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:17.108025Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:17.110858Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:17.111170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T15:59:17.111237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:17.111289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T15:59:17.111471Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.119897Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T15:59:17.287140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T15:59:17.287388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.287614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T15:59:17.287854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T15:59:17.287914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.291558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:17.291707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T15:59:17.291883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.291957Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T15:59:17.292017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T15:59:17.292054Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T15:59:17.294873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.294951Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T15:59:17.294991Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T15:59:17.298199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.298261Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.298324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:17.298385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T15:59:17.302130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:17.304131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T15:59:17.304347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T15:59:17.305390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:17.305539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:17.305608Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:17.305884Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T15:59:17.305956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T15:59:17.306135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T15:59:17.306234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T15:59:17.312036Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:17.312097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T15:59:17.312287Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:17.312334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T15:59:17.312565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T15:59:17.312614Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T15:59:17.312731Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:17.312767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:17.312807Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T15:59:17.312840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:17.312885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T15:59:17.312925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T15:59:17.312978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T15:59:17.313029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T15:59:17.313093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T15:59:17.313131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T15:59:17.313166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T15:59:17.315178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:17.315305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T15:59:17.315346Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... Id 72075186233409548 2025-03-31T15:59:46.373189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-03-31T15:59:46.373287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-03-31T15:59:46.382440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 3150, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T15:59:46.382618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 AckTo { RawX1: 0 RawX2: 0 } } Step: 3150 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T15:59:46.382677Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropPQ TPropose, operationId: 103:0 HandleReply TEvOperationPlan, step: 3150, at schemeshard: 72057594046678944 2025-03-31T15:59:46.382890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T15:59:46.382977Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 240 2025-03-31T15:59:46.383210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T15:59:46.383294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-31T15:59:46.386089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-03-31T15:59:46.386147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-03-31T15:59:46.386707Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T15:59:46.386751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T15:59:46.386899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T15:59:46.386960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-31T15:59:46.387106Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T15:59:46.387155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-03-31T15:59:46.387197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-03-31T15:59:46.387233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-03-31T15:59:46.387701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T15:59:46.387746Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-03-31T15:59:46.387858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-31T15:59:46.387896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T15:59:46.387937Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-31T15:59:46.387975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T15:59:46.388018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-03-31T15:59:46.388062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T15:59:46.388107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-03-31T15:59:46.388145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-03-31T15:59:46.388304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T15:59:46.388357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2025-03-31T15:59:46.388404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 2], 9 2025-03-31T15:59:46.388440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-03-31T15:59:46.389742Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T15:59:46.389850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T15:59:46.389888Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-03-31T15:59:46.389933Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-03-31T15:59:46.389976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-31T15:59:46.390518Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 2025-03-31T15:59:46.391211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T15:59:46.391267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-31T15:59:46.391337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-31T15:59:46.392392Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 2025-03-31T15:59:46.392717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-03-31T15:59:46.393359Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T15:59:46.393446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T15:59:46.393477Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-03-31T15:59:46.393512Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 9 2025-03-31T15:59:46.393545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T15:59:46.393623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-03-31T15:59:46.394021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-03-31T15:59:46.398944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-31T15:59:46.399140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-31T15:59:46.399464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-03-31T15:59:46.399606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-31T15:59:46.401107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-03-31T15:59:46.401673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-03-31T15:59:46.401736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-03-31T15:59:46.402193Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-31T15:59:46.402293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-31T15:59:46.402331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:771:2686] TestWaitNotification: OK eventTxId 103 2025-03-31T15:59:46.988394Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T15:59:46.988665Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 319us result status StatusSuccess 2025-03-31T15:59:46.989103Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1" PathDescription { Self { Name: "USER_1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 1 } SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |70.0%| [TA] $(B)/ydb/core/load_test/ut_ycsb/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQueryPerf::IndexInsert-QueryService-UseSink |70.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |70.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |70.0%| [TA] {RESULT} $(B)/ydb/core/load_test/ut_ycsb/test-results/unittest/{meta.json ... results_accumulator.log} |70.0%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut >> TBlobStorageProxyTest::TestSingleFailure [GOOD] >> KqpIndexLookupJoin::LeftJoinSkipNullFilter-StreamLookup [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-54 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-55 >> KqpQueryPerf::AggregateToScalar+QueryService >> KqpQueryPerf::Update-QueryService+UseSink |70.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestSingleFailure [GOOD] Test command err: 2025-03-31T15:59:50.960917Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/001063/r3tmp/tmp6kIXIR//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1 2025-03-31T15:59:50.961361Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR >> TargetDiscoverer::Transfer >> KqpJoin::JoinDupColumnRightPure [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftJoinSkipNullFilter-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 18674, MsgBus: 23376 2025-03-31T15:59:44.378532Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003970466730188:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:44.378570Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019b2/r3tmp/tmpNmUfeh/pdisk_1.dat 2025-03-31T15:59:45.296545Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:45.296629Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:45.347047Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:45.362968Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18674, node 1 2025-03-31T15:59:45.587397Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:59:45.587418Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:59:45.587425Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:59:45.587537Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23376 TClient is connected to server localhost:23376 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:59:46.518873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:46.536472Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:59:46.550861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:46.763638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:46.981621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:47.089139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:48.967028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003987646601076:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:48.967157Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:49.342606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:59:49.379250Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003970466730188:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:49.379310Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:59:49.412476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:59:49.464707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:59:49.505407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:59:49.549414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:59:49.604477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:59:49.678166Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003991941568888:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:49.678232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:49.678443Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003991941568893:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:49.682447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:59:49.701685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488003991941568895:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:59:49.795918Z node 1 :TX_PROXY ERROR: Actor# [1:7488003991941568950:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:51.158267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T15:59:51.200640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T15:59:51.280798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T15:59:51.350797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T15:59:51.402231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T15:59:51.449594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead-UseSink >> DataShardSnapshots::LockedWritesLimitedPerKey-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit+UseSink >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinDupColumnRightPure [GOOD] Test command err: Trying to start YDB, gRPC: 7058, MsgBus: 18685 2025-03-31T15:59:44.823552Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003970478393529:2266];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:44.823603Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019af/r3tmp/tmpxzt0AZ/pdisk_1.dat 2025-03-31T15:59:45.319467Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:45.324100Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:45.324173Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:45.325335Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7058, node 1 2025-03-31T15:59:45.539615Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:59:45.539637Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:59:45.539649Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:59:45.539755Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18685 TClient is connected to server localhost:18685 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:59:46.582422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:46.611868Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:59:46.622044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:46.907652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:47.225729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:47.317771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:49.823222Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003970478393529:2266];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:49.823294Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:59:49.894737Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003991953231573:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:49.894854Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:50.282967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:59:50.351131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:59:50.421150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:59:50.484364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:59:50.551459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:59:50.620530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:59:50.742672Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003996248199387:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:50.742759Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:50.742834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003996248199392:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:50.747268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:59:50.764529Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-31T15:59:50.766402Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488003996248199394:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:59:50.851864Z node 1 :TX_PROXY ERROR: Actor# [1:7488003996248199451:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:52.207680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T15:59:52.289789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T15:59:52.347038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> KqpIndexes::PrefixedVectorIndexOrderByCosineSimilarityNotNullableLevel2 [GOOD] >> KqpIndexes::PrefixedVectorIndexOrderByCosineDistanceNullableLevel2 >> KqpQueryPerf::Delete+QueryService-UseSink [GOOD] >> KqpQueryPerf::Delete+QueryService+UseSink |70.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> KqpQueryPerf::MultiDeleteFromTable+QueryService-UseSink [GOOD] >> KqpQueryPerf::MultiDeleteFromTable+QueryService+UseSink |70.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest |70.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |70.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |70.1%| [LD] {RESULT} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure [GOOD] Test command err: 2025-03-31T15:59:43.708881Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/001023/r3tmp/tmpQZpI0s//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1 2025-03-31T15:59:43.721292Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-03-31T15:59:45.789666Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/001023/r3tmp/tmpQZpI0s//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 2 2025-03-31T15:59:45.874715Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:1:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-03-31T15:59:47.985132Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/001023/r3tmp/tmpQZpI0s//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 3 2025-03-31T15:59:48.015458Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:2:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-03-31T15:59:50.253624Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 4 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/001023/r3tmp/tmpQZpI0s//vdisk_bad_3/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 4 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 4 2025-03-31T15:59:50.283543Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:3:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 4 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-03-31T15:59:52.807711Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 5 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/001023/r3tmp/tmpQZpI0s//vdisk_bad_4/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 5 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 5 2025-03-31T15:59:52.923515Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:4:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 5 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-03-31T15:59:54.862091Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 6 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/001023/r3tmp/tmpQZpI0s//vdisk_bad_5/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 6 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 6 2025-03-31T15:59:54.932109Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:5:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 6 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR >> KqpQueryPerf::Update+QueryService+UseSink [GOOD] |70.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |70.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |70.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |70.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view >> KqpQueryPerf::MultiRead+QueryService [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror [GOOD] |70.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |70.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |70.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |70.1%| [TA] $(B)/ydb/services/fq/ut_integration/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror3Plus2 [GOOD] >> CdcStreamChangeCollector::InsertSingleRow >> DataShardSnapshots::DelayedWriteReadableAfterSplit [GOOD] >> DataShardSnapshots::DelayedWriteReplyAfterSplit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 20359, MsgBus: 4982 2025-03-31T15:59:48.272121Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003988742431955:2213];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000fab/r3tmp/tmpvCwd4P/pdisk_1.dat 2025-03-31T15:59:48.699764Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:59:48.928041Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:48.928168Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:48.961307Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:59:49.004285Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20359, node 1 2025-03-31T15:59:49.347695Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:59:49.347716Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:59:49.347722Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:59:49.347821Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4982 TClient is connected to server localhost:4982 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:59:50.648421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:50.686610Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:50.705630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T15:59:50.989463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:51.288812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:51.400121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:53.256846Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003988742431955:2213];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:53.288900Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:59:53.911285Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004010217270029:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:53.911372Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:54.358841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:59:54.433289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:59:54.518042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:59:54.569105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:59:54.624437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:59:54.706017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:59:54.784702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004014512237849:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:54.784785Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:54.785027Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004014512237854:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:54.789053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:59:54.804249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004014512237856:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:59:54.903548Z node 1 :TX_PROXY ERROR: Actor# [1:7488004014512237912:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |70.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |70.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |70.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror [GOOD] >> AssignTxId::Basic >> AsyncIndexChangeCollector::CoveredIndexUpdateCoveredColumn |70.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |70.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror3Plus2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiRead+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 4504, MsgBus: 15385 2025-03-31T15:59:48.971515Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003989174994931:2266];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:48.971565Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f99/r3tmp/tmpiNxCn1/pdisk_1.dat 2025-03-31T15:59:50.051837Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:50.055966Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:50.056061Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:50.067225Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:59:50.072675Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4504, node 1 2025-03-31T15:59:50.292629Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:59:50.292646Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:59:50.292668Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:59:50.292744Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15385 TClient is connected to server localhost:15385 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:59:51.309490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:51.344949Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:59:51.356615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:51.608744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:51.864371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:51.983261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:53.971847Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003989174994931:2266];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:54.016053Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:59:54.162358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004014944800283:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:54.162484Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:54.519284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:59:54.600977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:59:54.681542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:59:54.731790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:59:54.801178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:59:54.923160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:59:55.060846Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004019239768102:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:55.060948Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:55.061347Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004019239768107:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:55.067934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:59:55.090676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004019239768109:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:59:55.180433Z node 1 :TX_PROXY ERROR: Actor# [1:7488004019239768168:3461] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |70.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |70.2%| [TA] {RESULT} $(B)/ydb/services/fq/ut_integration/test-results/unittest/{meta.json ... results_accumulator.log} |70.2%| [LD] {RESULT} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut >> KqpFlipJoin::Right_2 [GOOD] >> TargetDiscoverer::Transfer [GOOD] >> KqpQueryPerf::RangeRead-QueryService [GOOD] >> KqpQueryPerf::MultiDeleteFromTable-QueryService-UseSink [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-55 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-56 |70.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_subdomain/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQueryPerf::Replace+QueryService-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart+UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart+UseSink |70.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |70.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |70.2%| [TA] $(B)/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/{meta.json ... results_accumulator.log} |70.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |70.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |70.2%| [LD] {RESULT} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut >> KqpQueryPerf::MultiDeleteFromTable-QueryService+UseSink [GOOD] |70.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |70.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain/test-results/unittest/{meta.json ... results_accumulator.log} |70.2%| [LD] {RESULT} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 9784, MsgBus: 25564 2025-03-31T15:59:52.466566Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004005074770881:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:52.468050Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f61/r3tmp/tmpHaE4zz/pdisk_1.dat 2025-03-31T15:59:53.023038Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:53.076872Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:53.076986Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:53.083438Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9784, node 1 2025-03-31T15:59:53.232590Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:59:53.232622Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:59:53.232628Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:59:53.232767Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25564 TClient is connected to server localhost:25564 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:59:53.856961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:53.876969Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:59:53.897580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:54.061787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:54.230675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:54.319332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:56.792499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004022254641702:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:56.792637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:57.419372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:59:57.452849Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004005074770881:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:57.452907Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:59:57.468862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:59:57.551486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:59:57.594117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:59:57.642407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:59:57.705138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:59:57.808691Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004026549609514:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:57.808793Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:57.809234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004026549609521:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:57.813269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:59:57.844381Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-31T15:59:57.844663Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004026549609523:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:59:57.919584Z node 1 :TX_PROXY ERROR: Actor# [1:7488004026549609577:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryPerf::IndexUpdateOn-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexUpdateOn-QueryService+UseSink >> RetryPolicy::TWriteSession_TestBrokenPolicy [GOOD] >> RetryPolicy::TWriteSession_RetryOnTargetCluster ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiDeleteFromTable-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 3431, MsgBus: 1647 2025-03-31T15:59:50.481844Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003997061190037:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:50.481889Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f7e/r3tmp/tmprqxjYN/pdisk_1.dat 2025-03-31T15:59:51.576063Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:51.593918Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:51.594027Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:51.595151Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:59:51.601876Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3431, node 1 2025-03-31T15:59:51.923655Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:59:51.923677Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:59:51.923683Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:59:51.923784Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1647 TClient is connected to server localhost:1647 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:59:52.941186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:53.009624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:53.261709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T15:59:53.517826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T15:59:53.645372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:55.485299Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003997061190037:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:55.584803Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:59:55.704071Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004018536028330:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:55.704209Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:56.277755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:59:56.345853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:59:56.408127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:59:56.466477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:59:56.536581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:59:56.620024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:59:56.754979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004022830996150:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:56.755062Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:56.755348Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004022830996155:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:56.759961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:59:56.790515Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004022830996157:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:59:56.896404Z node 1 :TX_PROXY ERROR: Actor# [1:7488004022830996215:3464] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpFlipJoin::Right_2 [GOOD] Test command err: Trying to start YDB, gRPC: 7304, MsgBus: 16195 2025-03-31T15:59:42.106800Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003965369487024:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:42.106842Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019bb/r3tmp/tmpqnumgb/pdisk_1.dat 2025-03-31T15:59:42.790365Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:42.790435Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:42.797089Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:59:42.817615Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7304, node 1 2025-03-31T15:59:42.955596Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:59:42.955613Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:59:42.955619Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:59:42.955733Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16195 TClient is connected to server localhost:16195 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:59:43.853715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:43.875861Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:59:43.885549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:44.096338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:44.411712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:44.526596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:46.899160Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003982549357998:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:46.899283Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:47.106862Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003965369487024:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:47.106944Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:59:47.204864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:59:47.244650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:59:47.316968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:59:47.358079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:59:47.450637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:59:47.559921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:59:47.655787Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003986844325815:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:47.655949Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:47.656273Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003986844325821:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:47.661333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:59:47.690506Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-31T15:59:47.691050Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488003986844325823:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:59:47.770278Z node 1 :TX_PROXY ERROR: Actor# [1:7488003986844325878:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:49.722793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T15:59:49.762047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T15:59:49.822370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T15:59:49.893165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 11881, MsgBus: 5358 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019bb/r3tmp/tmpXj0mrg/pdisk_1.dat 2025-03-31T15:59:51.979009Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:59:52.020144Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:52.046941Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:52.067303Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:52.070092Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11881, node 2 2025-03-31T15:59:52.179569Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:59:52.179590Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:59:52.179598Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:59:52.179717Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5358 TClient is connected to server localhost:5358 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:59:52.796564Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:52.806713Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T15:59:52.821445Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:52.910855Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T15:59:53.125080Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:53.200752Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T15:59:55.479229Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004017263728005:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:55.479355Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:55.603118Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T15:59:55.690563Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T15:59:55.740812Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T15:59:55.862211Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T15:59:55.958986Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T15:59:56.041660Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T15:59:56.156559Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004021558695824:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:56.156629Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:56.156792Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004021558695829:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:56.160850Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T15:59:56.192697Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488004021558695831:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T15:59:56.259643Z node 2 :TX_PROXY ERROR: Actor# [2:7488004021558695887:3452] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:57.700304Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T15:59:57.780423Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T15:59:57.896355Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T15:59:57.951122Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Transfer [GOOD] Test command err: 2025-03-31T15:59:55.102519Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004017346843313:2195];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:55.115232Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0013b7/r3tmp/tmpzTd9td/pdisk_1.dat 2025-03-31T15:59:55.874616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:55.874732Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:55.879265Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:55.881750Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23616 TServer::EnableGrpc on GrpcPort 2936, node 1 2025-03-31T15:59:56.406619Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:59:56.406640Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:59:56.406646Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:59:56.406756Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23616 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:59:57.293480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T15:59:57.565552Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Topic, owner: root@builtin, type: Topic, size_bytes: 0, created_at: { plan_step: 1743436797452, tx_id: 281474976710658 } } } 2025-03-31T15:59:57.565586Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root/Topic 2025-03-31T15:59:57.595682Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTopicResponse { Result: { status: SUCCESS, issues: } } 2025-03-31T15:59:57.595713Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe topic succeeded: path# /Root/Topic 2025-03-31T15:59:57.595743Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Topic, dstPath# /Root/Replicated/Table, kind# Transfer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace+QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 17857, MsgBus: 4236 2025-03-31T15:59:52.266759Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004004989793276:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:52.274261Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f6b/r3tmp/tmpMLnlrN/pdisk_1.dat 2025-03-31T15:59:52.810072Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:52.819155Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:52.822655Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:52.848759Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17857, node 1 2025-03-31T15:59:53.043784Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:59:53.043808Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:59:53.043820Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:59:53.043969Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4236 TClient is connected to server localhost:4236 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:59:53.914627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:53.957380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:54.137976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:54.339163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:54.454893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:57.267055Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004004989793276:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:57.267149Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:59:57.533562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004026464631532:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:57.533698Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:57.965725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:59:58.009628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:59:58.044431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:59:58.079983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:59:58.130686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:59:58.215254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:59:58.320677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004030759599350:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:58.320767Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:58.321134Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004030759599355:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:58.333353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:59:58.382156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004030759599357:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:59:58.480705Z node 1 :TX_PROXY ERROR: Actor# [1:7488004030759599418:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> SystemView::AuthUsers |70.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |70.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |70.3%| [LD] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut >> SystemView::ShowCreateTableDefaultLiteral |70.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |70.3%| [TA] {RESULT} $(B)/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiDeleteFromTable-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 10581, MsgBus: 24787 2025-03-31T15:59:51.366044Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004002688007663:2192];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:51.374929Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f76/r3tmp/tmpsxMcB7/pdisk_1.dat 2025-03-31T15:59:52.225968Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:52.226076Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:52.228573Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:59:52.279749Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10581, node 1 2025-03-31T15:59:52.503623Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:59:52.503644Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:59:52.503653Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:59:52.503763Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24787 TClient is connected to server localhost:24787 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:59:53.678654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:53.746748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:53.941879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:54.275176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:54.440073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:56.367275Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004002688007663:2192];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:56.367385Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:59:57.239135Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004028457813084:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:57.239235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:57.539844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:59:57.593141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:59:57.635793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:59:57.682089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:59:57.749746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:59:57.836466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:59:57.914351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004028457813607:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:57.914425Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:57.914738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004028457813612:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:57.918574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:59:57.933740Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004028457813614:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:59:58.016397Z node 1 :TX_PROXY ERROR: Actor# [1:7488004032752780967:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> SystemView::PgTablesOneSchemeShardDataQuery >> SystemView::VSlotsFields >> KqpQueryPerf::AggregateToScalar+QueryService [GOOD] >> KqpQueryPerf::AggregateToScalar-QueryService >> KqpQueryPerf::IndexReplace-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexReplace-QueryService+UseSink >> KqpQueryPerf::Update-QueryService+UseSink [GOOD] >> TPersQueueTest::WriteExisting [GOOD] >> TPersQueueTest::WriteExistingBigValue >> KqpQueryPerf::IndexUpsert-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexUpsert-QueryService+UseSink >> ConvertYdbValueToMiniKQLValueTest::SimpleBool [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleBoolTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleDecimal [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleDecimalTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalString [GOOD] >> ConvertYdbValueToMiniKQLValueTest::PgValue [GOOD] >> SystemView::PartitionStatsOneSchemeShard >> KqpQueryPerf::IndexInsert-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexInsert-QueryService+UseSink >> ConvertYdbPermissionNameToACLAttrs::TestEqualGranularAndDeprecatedAcl [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalEmpty |70.3%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::OptionalEmpty [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalOptionalEmpty [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalOptionalEmpty2 [GOOD] >> ConvertYdbValueToMiniKQLValueTest::List [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Dict [GOOD] >> KqpQueryPerf::MultiDeleteFromTable+QueryService+UseSink [GOOD] >> DbCounters::TabletsSimple ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 12880, MsgBus: 7307 2025-03-31T15:59:54.465031Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004014392618717:2276];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:54.465079Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f55/r3tmp/tmpwNUIsT/pdisk_1.dat 2025-03-31T15:59:55.221987Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:55.222081Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:55.226730Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:59:55.255878Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12880, node 1 2025-03-31T15:59:55.571678Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:59:55.571700Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:59:55.571706Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:59:55.571809Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7307 TClient is connected to server localhost:7307 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:59:56.881354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:56.938072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:57.193559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T15:59:57.503313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T15:59:57.672734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:59.468484Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004014392618717:2276];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:59.468563Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:00:00.186614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004040162424035:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:00.186722Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:00.740308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:00:00.782616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:00:00.826460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:00:00.889355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:00:00.937597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:00:00.989032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:00:01.083438Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004044457391846:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:01.083522Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:01.084825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004044457391851:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:01.089045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:00:01.123409Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004044457391853:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:00:01.228059Z node 1 :TX_PROXY ERROR: Actor# [1:7488004044457391911:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |70.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::PgValue [GOOD] >> KqpQueryPerf::Delete+QueryService+UseSink [GOOD] |70.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::Dict [GOOD] >> TPersQueueTest::DirectReadPreCached [GOOD] >> TPersQueueTest::DirectReadNotCached |70.3%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldEraseOnUint32 >> CdcStreamChangeCollector::InsertSingleRow [GOOD] >> CdcStreamChangeCollector::InsertSingleUuidRow |70.3%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |70.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |70.3%| [LD] {RESULT} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |70.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiDeleteFromTable+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 62012, MsgBus: 6083 2025-03-31T15:59:48.306351Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003991169957586:2265];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:48.306414Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000fb0/r3tmp/tmpYtpuVu/pdisk_1.dat 2025-03-31T15:59:49.103481Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:49.105101Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:49.105231Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:49.109807Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62012, node 1 2025-03-31T15:59:49.359764Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:59:49.359794Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:59:49.359806Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:59:49.359945Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6083 TClient is connected to server localhost:6083 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:59:50.226984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:50.250952Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:59:50.263267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:50.464635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:50.773317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:50.912441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:52.865006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004008349828359:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:52.865122Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:53.201373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:59:53.239548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:59:53.268753Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003991169957586:2265];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:53.268842Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:59:53.339169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:59:53.420808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:59:53.479459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:59:53.564601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:59:53.638331Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004012644796184:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:53.638422Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:53.638713Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004012644796189:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:53.642469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:59:53.655652Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004012644796191:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:59:53.736308Z node 1 :TX_PROXY ERROR: Actor# [1:7488004012644796246:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 3568, MsgBus: 25074 2025-03-31T15:59:57.502874Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488004026291535868:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:57.507956Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000fb0/r3tmp/tmpBM2Bbp/pdisk_1.dat 2025-03-31T15:59:57.954175Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:57.994338Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:58.003260Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:58.011969Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3568, node 2 2025-03-31T15:59:58.271777Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:59:58.271800Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:59:58.271807Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:59:58.271927Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25074 TClient is connected to server localhost:25074 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T15:59:59.064435Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T15:59:59.112223Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:59.236311Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:59.499004Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:59.602909Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:02.250246Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004047766373988:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:02.250359Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:02.332374Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:00:02.420090Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:00:02.501791Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:00:02.503283Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488004026291535868:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:02.503367Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:00:02.575968Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:00:02.642413Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:00:02.738425Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:00:02.855311Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004047766374518:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:02.855420Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:02.856825Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004047766374523:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:02.863240Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:00:02.894598Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488004047766374525:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:00:02.951391Z node 2 :TX_PROXY ERROR: Actor# [2:7488004047766374580:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |70.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Delete+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 11576, MsgBus: 15381 2025-03-31T15:59:48.385447Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003989241858683:2190];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:48.385485Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000fc8/r3tmp/tmpELfNxe/pdisk_1.dat 2025-03-31T15:59:48.986737Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:48.986843Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:49.121245Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:49.122766Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11576, node 1 2025-03-31T15:59:49.407557Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:59:49.407584Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:59:49.407594Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:59:49.407701Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15381 TClient is connected to server localhost:15381 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:59:50.267649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:50.314307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:50.637902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:50.938895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T15:59:51.046060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T15:59:53.059252Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004010716696820:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:53.059359Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:53.387395Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003989241858683:2190];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:53.423555Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:59:53.472550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:59:53.515206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:59:53.564140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:59:53.610777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:59:53.650901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:59:53.735675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:59:53.855023Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004010716697340:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:53.855127Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:53.855414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004010716697346:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:53.860623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:59:53.895565Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004010716697348:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:59:53.997975Z node 1 :TX_PROXY ERROR: Actor# [1:7488004010716697407:3461] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 26036, MsgBus: 24579 2025-03-31T15:59:57.283626Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488004027159563606:2207];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:57.366338Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000fc8/r3tmp/tmpZgfp9m/pdisk_1.dat 2025-03-31T15:59:57.599715Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:57.648209Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:57.648288Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:57.649647Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26036, node 2 2025-03-31T15:59:57.801535Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:59:57.801560Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:59:57.801568Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:59:57.801690Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24579 TClient is connected to server localhost:24579 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:59:58.737728Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:58.746144Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:59:58.872092Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:59.038239Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T15:59:59.388205Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:59.520693Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:00:02.247376Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488004027159563606:2207];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:02.247454Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:00:02.888408Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004048634401703:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:02.888550Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:02.948054Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:00:02.996555Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:00:03.037293Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:00:03.083521Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:00:03.128220Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:00:03.189378Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:00:03.283743Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004052929369517:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:03.283841Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:03.284113Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004052929369522:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:03.287645Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:00:03.306906Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488004052929369524:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:00:03.380241Z node 2 :TX_PROXY ERROR: Actor# [2:7488004052929369582:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TBlobStorageWardenTest::TestCreatePDiskAndEncryptedGroup |70.4%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> AsyncIndexChangeCollector::CoveredIndexUpdateCoveredColumn [GOOD] >> AsyncIndexChangeCollector::CoveredIndexUpsert |70.4%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> AssignTxId::Basic [GOOD] >> DataShardSnapshots::PostMergeNotCompactedTooEarly [GOOD] >> DataShardSnapshots::PipelineAndMediatorRestoreRace |70.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |70.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |70.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |70.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |70.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |70.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead-UseSink [GOOD] Test command err: 2025-03-31T15:58:19.867853Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:58:19.868137Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:19.868307Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00143c/r3tmp/tmp1oRlDP/pdisk_1.dat 2025-03-31T15:58:20.985913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:58:21.086653Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:21.203588Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=Njc4YzI0NTQtNDZhMzNiZGYtMzQzMDM0MDctY2RiZDhiYTQ=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id Njc4YzI0NTQtNDZhMzNiZGYtMzQzMDM0MDctY2RiZDhiYTQ= 2025-03-31T15:58:21.204363Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=Njc4YzI0NTQtNDZhMzNiZGYtMzQzMDM0MDctY2RiZDhiYTQ=, ActorId: [1:619:2540], ActorState: unknown state, session actor bootstrapped 2025-03-31T15:58:21.213832Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=Njc4YzI0NTQtNDZhMzNiZGYtMzQzMDM0MDctY2RiZDhiYTQ=, ActorId: [1:619:2540], ActorState: ReadyState, TraceId: 01jqpesqgx424fqn4vym5yqh7g, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: CREATE TABLE `/Root/table1` (key int, value int, PRIMARY KEY (key)); rpcActor: [0:0:0] database: databaseId: /Root pool id: default 2025-03-31T15:58:21.958758Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:623:2543], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:21.958897Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:22.064759Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:22.064881Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:22.067780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:58:22.102157Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:58:22.161218Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:684:2576], Recipient [1:689:2579]: NKikimr::TEvTablet::TEvBoot 2025-03-31T15:58:22.162375Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:684:2576], Recipient [1:689:2579]: NKikimr::TEvTablet::TEvRestored 2025-03-31T15:58:22.162939Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:689:2579] 2025-03-31T15:58:22.171383Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:58:22.255769Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:684:2576], Recipient [1:689:2579]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T15:58:22.256557Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:58:22.256681Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T15:58:22.258418Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T15:58:22.258504Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T15:58:22.258555Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T15:58:22.258913Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T15:58:22.259064Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T15:58:22.271287Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:703:2579] in generation 1 2025-03-31T15:58:22.271918Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T15:58:22.364061Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T15:58:22.364277Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T15:58:22.364394Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:705:2588] 2025-03-31T15:58:22.364433Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T15:58:22.364471Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T15:58:22.364511Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T15:58:22.364736Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:689:2579], Recipient [1:689:2579]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T15:58:22.364787Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T15:58:22.365053Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T15:58:22.365148Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T15:58:22.365561Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T15:58:22.365610Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T15:58:22.365648Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T15:58:22.365683Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T15:58:22.365733Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T15:58:22.365764Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T15:58:22.365873Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T15:58:22.403908Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:708:2590], Recipient [1:689:2579]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:58:22.403987Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:58:22.404059Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:680:2574], serverId# [1:708:2590], sessionId# [0:0:0] 2025-03-31T15:58:22.404213Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:708:2590] 2025-03-31T15:58:22.404257Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T15:58:22.404375Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T15:58:22.404599Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T15:58:22.404660Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T15:58:22.404769Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T15:58:22.404829Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T15:58:22.404880Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T15:58:22.404955Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T15:58:22.404991Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T15:58:22.405303Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T15:58:22.405351Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T15:58:22.405382Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T15:58:22.405412Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T15:58:22.405461Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T15:58:22.405502Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T15:58:22.405542Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T15:58:22.405574Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T15:58:22.405599Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T15:58:22.406466Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T15:58:22.406518Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T15:58:22.406553Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T15:58:22.406592Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-31T15:58:22.406671Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T15:58:22.417791Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:709:2591], Recipient [1:689:2579]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T15:58:22.417856Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T15:58:22.469257Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:724:2600], Recipient [1:689:2579]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:58:22.469342Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:58:22.469401Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:722:2598], serverId# [1:724:2600], sessionId# [0:0:0] 2025-03-31T15:58:22.470300Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:689:2579]: {TEvPlanStep step# 300 MediatorId# 72057594046382081 Tabl ... node 13 :TX_DATASHARD TRACE: Execution plan for [0:281474976715665] at 72075186224037888 has finished 2025-03-31T16:00:07.271718Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:00:07.271798Z node 13 :TX_DATASHARD TRACE: Complete execution for [0:281474976715665] at 72075186224037888 on unit FinishPropose 2025-03-31T16:00:07.271877Z node 13 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715665 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: LOCKS_BROKEN 2025-03-31T16:00:07.272002Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:00:07.273051Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=OGM3M2ZiMmYtODc5NjU0NTEtYmY2NzIyZWEtNjRhN2E2YTA=, ActorId: [13:841:2686], ActorState: ExecuteState, TraceId: 01jqpewyys5p4n5thnvtk1knbe, Create QueryResponse for error on request, msg: 2025-03-31T16:00:07.274338Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqpewyys5p4n5thnvtk1knbe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OGM3M2ZiMmYtODc5NjU0NTEtYmY2NzIyZWEtNjRhN2E2YTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:00:07.274745Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [13:949:2686], Recipient [13:904:2732]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 949 RawX2: 55834577534 } TxBody: " \0018\001j3\010\001\032\'\n#\t\215\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0028\000 \003\"\006\020\0020\000@\n\220\001\000" TxId: 281474976715666 ExecLevel: 0 Flags: 8 2025-03-31T16:00:07.274792Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:00:07.274924Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [13:904:2732], Recipient [13:904:2732]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-03-31T16:00:07.274957Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-03-31T16:00:07.275033Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:00:07.275269Z node 13 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2025-03-31T16:00:07.275369Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit CheckDataTx 2025-03-31T16:00:07.275424Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2025-03-31T16:00:07.275458Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit CheckDataTx 2025-03-31T16:00:07.275491Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715666] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-31T16:00:07.275525Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-31T16:00:07.275567Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v400/18446744073709551615 ImmediateWriteEdge# v500/18446744073709551615 ImmediateWriteEdgeReplied# v500/18446744073709551615 2025-03-31T16:00:07.275620Z node 13 :TX_DATASHARD TRACE: Activated operation [0:281474976715666] at 72075186224037888 2025-03-31T16:00:07.275656Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2025-03-31T16:00:07.275684Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-31T16:00:07.275711Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715666] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-03-31T16:00:07.275743Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit ExecuteKqpDataTx 2025-03-31T16:00:07.275826Z node 13 :TX_DATASHARD TRACE: Operation [0:281474976715666] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193448 2025-03-31T16:00:07.275960Z node 13 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 HasWrites: false 2025-03-31T16:00:07.276062Z node 13 :TX_DATASHARD TRACE: add locks to result: 0 2025-03-31T16:00:07.276147Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2025-03-31T16:00:07.276181Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-03-31T16:00:07.276208Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715666] at 72075186224037888 to execution unit FinishPropose 2025-03-31T16:00:07.276238Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit FinishPropose 2025-03-31T16:00:07.276298Z node 13 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715666 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-03-31T16:00:07.276418Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is DelayComplete 2025-03-31T16:00:07.276451Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit FinishPropose 2025-03-31T16:00:07.276480Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715666] at 72075186224037888 to execution unit CompletedOperations 2025-03-31T16:00:07.276513Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit CompletedOperations 2025-03-31T16:00:07.276564Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2025-03-31T16:00:07.276592Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit CompletedOperations 2025-03-31T16:00:07.276618Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:281474976715666] at 72075186224037888 has finished 2025-03-31T16:00:07.276689Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:00:07.276725Z node 13 :TX_DATASHARD TRACE: Complete execution for [0:281474976715666] at 72075186224037888 on unit FinishPropose 2025-03-31T16:00:07.276769Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:00:07.278428Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [13:61:2108], Recipient [13:904:2732]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 13 Status: STATUS_NOT_FOUND 2025-03-31T16:00:07.513017Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jqpewz3n2vmcm57m4zb9tg3w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MzJlMTIyZGQtMWJkNGZmODktNmUxOWVlN2QtYWMxYmVmYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:00:07.515834Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [13:970:2776], Recipient [13:904:2732]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-03-31T16:00:07.516104Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-31T16:00:07.516202Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v400/18446744073709551615 ImmediateWriteEdge# v500/18446744073709551615 ImmediateWriteEdgeReplied# v500/18446744073709551615 2025-03-31T16:00:07.516288Z node 13 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v500/18446744073709551615 2025-03-31T16:00:07.516417Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2025-03-31T16:00:07.516604Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-03-31T16:00:07.516686Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2025-03-31T16:00:07.516769Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-31T16:00:07.516829Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-31T16:00:07.516918Z node 13 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037888 2025-03-31T16:00:07.516991Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-03-31T16:00:07.517025Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-31T16:00:07.517053Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2025-03-31T16:00:07.517084Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2025-03-31T16:00:07.517272Z node 13 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-03-31T16:00:07.517675Z node 13 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[13:970:2776], 0} after executionsCount# 1 2025-03-31T16:00:07.517792Z node 13 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[13:970:2776], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-31T16:00:07.517977Z node 13 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[13:970:2776], 0} finished in read 2025-03-31T16:00:07.518090Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-03-31T16:00:07.518123Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2025-03-31T16:00:07.518153Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2025-03-31T16:00:07.518188Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2025-03-31T16:00:07.518254Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-03-31T16:00:07.518279Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2025-03-31T16:00:07.518324Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037888 has finished 2025-03-31T16:00:07.518394Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-31T16:00:07.518581Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-03-31T16:00:07.519884Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [13:970:2776], Recipient [13:904:2732]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-03-31T16:00:07.519975Z node 13 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 22 } } |70.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit+UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> AssignTxId::Basic [GOOD] Test command err: 2025-03-31T16:00:00.133951Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004042155909759:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:00.188047Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001af7/r3tmp/tmpXfwWHp/pdisk_1.dat 2025-03-31T16:00:01.229987Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:01.230097Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:01.233842Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:00:01.235992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:00:01.304723Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:10719 TServer::EnableGrpc on GrpcPort 17945, node 1 2025-03-31T16:00:02.171767Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:00:02.171790Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:00:02.171796Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:00:02.171958Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10719 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:00:03.053710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:03.102241Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:00:05.136683Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004042155909759:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:05.136787Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:00:07.605609Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004072220681511:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:07.605709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:08.263533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateReplication, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:00:08.284517Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] OnActivateExecutor 2025-03-31T16:00:08.284616Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxInitSchema] Execute 2025-03-31T16:00:08.291775Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxInitSchema] Complete 2025-03-31T16:00:08.291876Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxInit] Execute 2025-03-31T16:00:08.292126Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxInit] Complete 2025-03-31T16:00:08.292137Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] SwitchToWork 2025-03-31T16:00:08.334177Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvCreateReplication PathId { OwnerId: 72057594046644480 LocalId: 2 } OperationId { TxId: 281474976710658 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:17945" Database: "/Root" OAuthToken { Token: "***" } EnableSsl: false } Specific { Targets { SrcPath: "/Root/table" DstPath: "/Root/replica" } } ConsistencySettings { Global { CommitIntervalMilliSeconds: 10000 } } } 2025-03-31T16:00:08.334460Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxCreateReplication] Execute: NKikimrReplication.TEvCreateReplication PathId { OwnerId: 72057594046644480 LocalId: 2 } OperationId { TxId: 281474976710658 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:17945" Database: "/Root" OAuthToken { Token: "***" } EnableSsl: false } Specific { Targets { SrcPath: "/Root/table" DstPath: "/Root/replica" } } ConsistencySettings { Global { CommitIntervalMilliSeconds: 10000 } } } 2025-03-31T16:00:08.334538Z node 1 :REPLICATION_CONTROLLER NOTICE: [controller 72075186224037888][TxCreateReplication] Add replication: rid# 1, pathId# [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-31T16:00:08.339650Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxCreateReplication] Complete 2025-03-31T16:00:08.347633Z node 1 :REPLICATION_CONTROLLER TRACE: [TenantResolver][rid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root/replication TableId: [72057594046644480:2:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindReplication DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T16:00:08.349479Z node 1 :REPLICATION_CONTROLLER TRACE: [TenantResolver][rid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T16:00:08.349735Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimr::NReplication::NController::TEvPrivate::TEvResolveTenantResult { ReplicationId: 1 Tenant: /Root Sucess: 1 } 2025-03-31T16:00:08.349750Z node 1 :REPLICATION_CONTROLLER NOTICE: [controller 72075186224037888] Tenant resolved: rid# 1, tenant# /Root 2025-03-31T16:00:08.349763Z node 1 :REPLICATION_CONTROLLER INFO: [controller 72075186224037888] Discover tenant nodes: tenant# /Root TClient::Ls request: /Root/replication 2025-03-31T16:00:08.358830Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimr::TEvDiscovery::TEvDiscoveryData 2025-03-31T16:00:08.358875Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888] Create session: nodeId# 1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "replication" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeReplication CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743436808393 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ReplicationVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsIns... (TRUNCATED) 2025-03-31T16:00:08.372361Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 1 TxId: 0 } 2025-03-31T16:00:08.372477Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 0, allocated# 0 2025-03-31T16:00:08.372531Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 1, assigned# 0, allocated# 0, exhausted# 1 2025-03-31T16:00:08.372684Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-03-31T16:00:08.372735Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 0, allocated# 5 2025-03-31T16:00:08.375817Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 1, allocated# 4, exhausted# 0 2025-03-31T16:00:08.376431Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 9999 TxId: 0 } 2025-03-31T16:00:08.376481Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 1, allocated# 4 2025-03-31T16:00:08.376553Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 1, allocated# 4, exhausted# 0 2025-03-31T16:00:08.377004Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 9999 TxId: 18446744073709551615 } 2025-03-31T16:00:08.377031Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 1, allocated# 4 2025-03-31T16:00:08.377058Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 1, allocated# 4, exhausted# 0 2025-03-31T16:00:08.377355Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 10000 TxId: 0 } 2025-03-31T16:00:08.377376Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 1, allocated# 4 2025-03-31T16:00:08.377970Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 2, allocated# 3, exhausted# 0 2025-03-31T16:00:08.378419Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 5000 TxId: 0 } 2025-03-31T16:00:08.378455Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 2, allocated# 3 2025-03-31T16:00:08.378490Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 2, allocated# 3, exhausted# 0 2025-03-31T16:00:08.379994Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 20000 TxId: 0 } Versions { Step: 30000 TxId: 0 } Versions { Step: 40000 TxId: 0 } 2025-03-31T16:00:08.380066Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 3, assigned# 2, allocated# 3 2025-03-31T16:00:08.384912Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 5, allocated# 0, exhausted# 0 2025-03-31T16:00:08.385052Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-03-31T16:00:08.385089Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 0, assigned# 5, allocated# 5 2025-03-31T16:00:08.385132Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 5, allocated# 5, exhausted# 0 2025-03-31T16:00:08.387006Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 50000 TxId: 0 } 2025-03-31T16:00:08.387074Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 5, allocated# 5 2025-03-31T16:00:08.388905Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 5, allocated# 5, exhausted# 0 2025-03-31T16:00:08.403311Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: SCHEME_ERROR, issues: {
: Error: Path not found } } } 2025-03-31T16:00:08.404084Z node 1 :REPLICATION_CONTROLLER ERROR: [TargetDiscoverer][rid 1] Describe path failed: path# /Root/table, status# SCHEME_ERROR, issues# {
: Error: Path not found } 2025-03-31T16:00:08.411105Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimr::NReplication::NController::TEvPrivate::TEvDiscoveryTargetsResult { ReplicationId: 1 ToAdd [] ToDelete [] Failed [/Root/table: SCHEME_ERROR ({
: Error: Path not found })] } 2025-03-31T16:00:08.418793Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxDiscoveryTargetsResult] Execute: NKikimr::NReplication::NController::TEvPrivate::TEvDiscoveryTargetsResult { ReplicationId: 1 ToAdd [] ToDelete [] Failed [/Root/table: SCHEME_ERROR ({
: Error: Path not found })] } 2025-03-31T16:00:08.418888Z node 1 :REPLICATION_CONTROLLER ERROR: [controller 72075186224037888][TxDiscoveryTargetsResult] Discovery error: rid# 1, error# /Root/table: SCHEME_ERROR ({
: Error: Path not found }) 2025-03-31T16:00:08.427268Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxDiscoveryTargetsResult] Complete >> YdbProxy::CreateTopic >> YdbProxy::CopyTable >> YdbProxy::MakeDirectory >> YdbProxy::ListDirectory >> SystemView::VSlotsFields [GOOD] >> SystemView::TopPartitionsByCpuFields >> YdbProxy::DropTable >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-56 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-57 >> TCertificateAuthUtilsTest::GenerateAndVerifyCertificates >> TCertificateAuthUtilsTest::GenerateAndVerifyCertificates [GOOD] >> YdbProxy::ReadTopic |70.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TCertificateAuthUtilsTest::GenerateAndVerifyCertificates [GOOD] >> PartitionEndWatcher::EmptyPartition [GOOD] >> PartitionEndWatcher::AfterCommit [GOOD] >> YdbProxy::AlterTable >> KqpQueryPerf::AggregateToScalar-QueryService [GOOD] |70.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |70.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |70.4%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |70.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |70.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |70.4%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut >> KqpQueryPerf::IndexUpdateOn-QueryService+UseSink [GOOD] >> KqpIndexes::SimpleVectorIndexOrderByCosineSimilarityNotNullableLevel2 [GOOD] >> YdbProxy::RemoveDirectory >> TBlobStorageWardenTest::TestCreatePDiskAndEncryptedGroup [GOOD] >> SystemView::PgTablesOneSchemeShardDataQuery [GOOD] >> SystemView::ShowCreateTable |70.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::AggregateToScalar-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 19120, MsgBus: 32765 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f4c/r3tmp/tmpQc1XbW/pdisk_1.dat 2025-03-31T15:59:54.440301Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004014837629796:2209];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:54.440962Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:59:55.115350Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:55.115469Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:55.120522Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:59:55.172147Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19120, node 1 2025-03-31T15:59:55.311682Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:59:55.311703Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:59:55.311708Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:59:55.311828Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32765 TClient is connected to server localhost:32765 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:59:56.202431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:56.231622Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:59:56.249588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:56.519376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:56.850104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:56.962985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:59.242254Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004036312467881:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:59.242387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:59.412570Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004014837629796:2209];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:59.412633Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:59:59.628320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:59:59.671427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:59:59.732611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:59:59.792070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:59:59.851581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:59:59.933779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:00:00.027489Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004040607435693:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:00.027574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:00.027958Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004040607435698:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:00.032075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:00:00.051883Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004040607435700:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:00:00.131347Z node 1 :TX_PROXY ERROR: Actor# [1:7488004040607435755:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 26841, MsgBus: 17586 2025-03-31T16:00:04.196691Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488004060071849939:2195];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:04.196827Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f4c/r3tmp/tmp4QvPcZ/pdisk_1.dat 2025-03-31T16:00:04.563432Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:04.621832Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:04.621915Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:04.628222Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26841, node 2 2025-03-31T16:00:04.793740Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:00:04.793762Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:00:04.793781Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:00:04.793896Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17586 TClient is connected to server localhost:17586 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:00:06.154700Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:06.189330Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:06.331987Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:06.530039Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:06.612195Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:09.199378Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488004060071849939:2195];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:09.199454Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:00:10.596399Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004085841655341:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:10.596482Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:10.655612Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:00:10.718043Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:00:10.769487Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:00:10.840931Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:00:10.942200Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:00:11.032817Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:00:11.215358Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004090136623162:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:11.215441Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:11.215660Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004090136623168:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:11.227638Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:00:11.245318Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488004090136623170:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:00:11.321874Z node 2 :TX_PROXY ERROR: Actor# [2:7488004090136623228:3461] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest >> TBlobStorageWardenTest::TestCreatePDiskAndEncryptedGroup [GOOD] Test command err: 2025-03-31T16:00:11.222764Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:1:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:00:11.408036Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:2:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:00:11.548069Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:00:11.549126Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:2:0] targetVDisk# [3e000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:00:11.612039Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:00:11.653941Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 Sending TEvPut Sending TEvGet Sending TEvVGet Sending TEvPut Sending TEvGet >> AsyncIndexChangeCollector::CoveredIndexUpsert [GOOD] >> AsyncIndexChangeCollector::AllColumnsInPk >> CdcStreamChangeCollector::InsertSingleUuidRow [GOOD] >> CdcStreamChangeCollector::IndexAndStreamUpsert >> YdbProxy::CreateTopic [GOOD] >> YdbProxy::DescribeConsumer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SimpleVectorIndexOrderByCosineSimilarityNotNullableLevel2 [GOOD] Test command err: Trying to start YDB, gRPC: 14769, MsgBus: 20648 2025-03-31T15:58:33.407465Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003667403753010:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:33.407924Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dec/r3tmp/tmpX2ut5g/pdisk_1.dat 2025-03-31T15:58:34.177825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:34.177963Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:34.196415Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:34.204136Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14769, node 1 2025-03-31T15:58:34.503650Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:58:34.503671Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:58:34.503678Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:58:34.503812Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20648 TClient is connected to server localhost:20648 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:58:36.157011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:36.183234Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:58:36.208177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:36.661897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:37.088622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:37.233441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:38.387285Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003667403753010:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:38.387374Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:58:41.025159Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003701763493031:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:41.025282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:41.537521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:58:41.603498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:58:41.685661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:58:41.865564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:58:41.950184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:58:42.052987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:58:42.171016Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003706058460862:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:42.171159Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:42.171503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003706058460867:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:42.180101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:58:42.201540Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488003706058460869:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:58:42.259533Z node 1 :TX_PROXY ERROR: Actor# [1:7488003706058460927:3463] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:58:43.526779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T15:58:43.880333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-03-31T15:58:43.986092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-03-31T15:58:49.166271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:58:49.166309Z node 1 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 2957, MsgBus: 21957 2025-03-31T15:59:18.910604Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488003858623777441:2060];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dec/r3tmp/tmp86TJ4K/pdisk_1.dat 2025-03-31T15:59:18.990636Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:59:19.133693Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:19.133797Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:19.136989Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:19.156336Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2957, node 2 2025-03-31T15:59:19.315719Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:59:19.315747Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:59:19.315756Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:59:19.315893Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21957 TClient is connected to server localhost:21957 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:59:19.955825Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:19.965459Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:59:19.977760Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:20.058507Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:20.274201Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:20.374564Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:22.935409Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488003875803648406:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:22.935545Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:22.964250Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:59:22.998346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:59:23.035600Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:59:23.069960Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:59:23.107558Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:59:23.158259Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:59:23.235836Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488003880098616220:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:23.235944Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:23.236219Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488003880098616225:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:23.239876Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:59:23.249993Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488003880098616227:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:59:23.350376Z node 2 :TX_PROXY ERROR: Actor# [2:7488003880098616282:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:23.899325Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488003858623777441:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:23.925197Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:59:24.518656Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T15:59:24.860648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-03-31T15:59:24.908841Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-03-31T15:59:24.972287Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715760:0, at schemeshard: 72057594046644480 2025-03-31T15:59:25.019271Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715763:0, at schemeshard: 72057594046644480 2025-03-31T15:59:25.067339Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037926 not found 2025-03-31T15:59:25.067379Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037925 not found 2025-03-31T15:59:25.068717Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037924 not found 2025-03-31T15:59:34.107940Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:59:34.107977Z node 2 :IMPORT WARN: Table profiles were not loaded >> DistributedEraseTests::ConditionalEraseRowsShouldEraseOnUint32 [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSchemeTx |70.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpdateOn-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 30210, MsgBus: 9807 2025-03-31T15:59:49.186169Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003995328933223:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:49.186206Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f8b/r3tmp/tmp1VvKWX/pdisk_1.dat 2025-03-31T15:59:50.106157Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:50.129061Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:50.129173Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:50.140228Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30210, node 1 2025-03-31T15:59:50.299700Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:59:50.299726Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:59:50.299736Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:59:50.299840Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9807 TClient is connected to server localhost:9807 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:59:51.400741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:51.431583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:51.588616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:51.813778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:51.955663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:54.190824Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003995328933223:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:54.190904Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:59:54.265934Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004016803771426:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:54.266098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:54.723572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:59:54.800913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:59:54.898563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:59:54.965650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:59:55.046364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:59:55.133828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:59:55.267639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004021098739246:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:55.267730Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:55.268065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004021098739252:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:55.272692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:59:55.311845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004021098739255:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:59:55.412295Z node 1 :TX_PROXY ERROR: Actor# [1:7488004021098739313:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:57.416083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T15:59:57.486323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T15:59:57.554224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 11813, MsgBus: 11589 2025-03-31T16:00:02.547749Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488004047963173963:2218];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f8b/r3tmp/tmpvF8ZFo/pdisk_1.dat 2025-03-31T16:00:02.893657Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:00:02.912863Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:02.949757Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:02.949869Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:02.951969Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11813, node 2 2025-03-31T16:00:03.055624Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:00:03.055648Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:00:03.055656Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:00:03.055791Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11589 TClient is connected to server localhost:11589 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:00:03.716785Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:03.723151Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:00:03.733463Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:03.852576Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:04.101633Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:04.248471Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:06.608400Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004065143044714:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:06.608495Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:06.763169Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:00:06.832684Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:00:06.912598Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:00:06.971963Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:00:07.016203Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:00:07.098704Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:00:07.204538Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004069438012529:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:07.204638Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:07.205285Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004069438012534:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:07.216026Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:00:07.239272Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488004069438012536:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:00:07.310849Z node 2 :TX_PROXY ERROR: Actor# [2:7488004069438012590:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:00:07.511314Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488004047963173963:2218];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:07.511472Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:00:09.312785Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:00:09.420562Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:00:09.524480Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> YdbProxy::CopyTable [GOOD] >> YdbProxy::CopyTables >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart+UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit+UseSink >> YdbProxy::ListDirectory [GOOD] >> YdbProxy::DropTopic |70.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |70.5%| [LD] {RESULT} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |70.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |70.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |70.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |70.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap >> KqpQueryPerf::IndexInsert-QueryService+UseSink [GOOD] >> YdbProxy::MakeDirectory [GOOD] >> YdbProxy::OAuthToken |70.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TCertificateAuthUtilsTest::ClientCertAuthorizationParamsMatch [GOOD] >> YdbProxy::DropTable [GOOD] >> YdbProxy::DescribeTopic |70.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |70.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TCertificateAuthUtilsTest::ClientCertAuthorizationParamsMatch [GOOD] >> KqpQueryPerf::IndexUpsert-QueryService+UseSink [GOOD] >> KqpQueryPerf::IndexReplace-QueryService+UseSink [GOOD] |70.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexInsert-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 28239, MsgBus: 24223 2025-03-31T15:59:53.114866Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004012550116761:2202];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:53.115047Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f5a/r3tmp/tmphQhb93/pdisk_1.dat 2025-03-31T15:59:53.744942Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:53.745054Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:53.748478Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:53.755817Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28239, node 1 2025-03-31T15:59:53.919593Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:59:53.919629Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:59:53.919636Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:59:53.919738Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24223 TClient is connected to server localhost:24223 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:59:54.786581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:54.812701Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:59:54.834951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:55.081999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:55.416457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:55.567875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:58.119250Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004012550116761:2202];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:58.237690Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:59:58.513018Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004034024954876:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:58.513161Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:59.035803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:59:59.093291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:59:59.202109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:59:59.301381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:59:59.424513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:59:59.516601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:59:59.614709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004038319922698:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:59.614797Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:59.615115Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004038319922703:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:59.623290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:59:59.639298Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-31T15:59:59.639535Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004038319922705:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:59:59.734998Z node 1 :TX_PROXY ERROR: Actor# [1:7488004038319922760:3465] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:00:01.306589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:00:01.358515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:00:01.442511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22109, MsgBus: 11506 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f5a/r3tmp/tmpAWbSBg/pdisk_1.dat 2025-03-31T16:00:06.221960Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:00:06.360029Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:06.364717Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:06.364792Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:06.366397Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22109, node 2 2025-03-31T16:00:06.495208Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:00:06.495230Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:00:06.495242Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:00:06.495364Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11506 TClient is connected to server localhost:11506 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:00:07.097496Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:07.113205Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:00:07.133745Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:07.237982Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:07.453241Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:07.565949Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:13.344390Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004098694861275:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:13.344479Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:13.388129Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:00:13.445270Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:00:13.522734Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:00:13.673017Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:00:13.737203Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:00:13.833031Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:00:13.919316Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004098694861789:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:13.919405Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:13.919878Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004098694861794:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:13.924246Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:00:13.943523Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-31T16:00:13.944756Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488004098694861796:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:00:14.038847Z node 2 :TX_PROXY ERROR: Actor# [2:7488004102989829148:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:00:15.429648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:00:15.552648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:00:15.674649Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> YdbProxy::AlterTable [GOOD] >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW+VolatileTxs >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW+VolatileTxs |70.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpsert-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 13951, MsgBus: 31773 2025-03-31T15:59:51.428519Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004002972443271:2128];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:51.432882Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f77/r3tmp/tmpF3KvvU/pdisk_1.dat 2025-03-31T15:59:52.057803Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:52.061876Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:52.061979Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:52.065103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13951, node 1 2025-03-31T15:59:52.245578Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:59:52.245611Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:59:52.245625Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:59:52.245756Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31773 TClient is connected to server localhost:31773 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:59:53.117441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:53.135830Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:59:53.144710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:53.373506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:53.629608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:53.753908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:56.423277Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004002972443271:2128];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:56.423375Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:59:56.687563Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004024447281460:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:56.687689Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:57.312601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:59:57.362860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:59:57.418197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:59:57.492404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:59:57.538833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:59:57.628508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:59:57.756634Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004028742249278:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:57.756722Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:57.756908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004028742249283:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:57.762032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:59:57.774992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004028742249285:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:59:57.855335Z node 1 :TX_PROXY ERROR: Actor# [1:7488004028742249339:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:00:00.075037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:00:00.183556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:00:00.298763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 25481, MsgBus: 9690 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f77/r3tmp/tmpOLIYi3/pdisk_1.dat 2025-03-31T16:00:05.883279Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:00:06.160216Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:06.198420Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:06.198512Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:06.208987Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25481, node 2 2025-03-31T16:00:06.351638Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:00:06.351659Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:00:06.351667Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:00:06.351782Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9690 TClient is connected to server localhost:9690 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:00:07.134467Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:07.151286Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:00:07.166150Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:07.294946Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:07.553522Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:07.666877Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:10.984146Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004083093872620:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:10.984271Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:11.114088Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:00:11.231479Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:00:11.296953Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:00:11.343557Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:00:11.407862Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:00:11.536859Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:00:11.708479Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004087388840446:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:11.708551Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:11.708787Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004087388840451:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:11.713218Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:00:11.748457Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488004087388840453:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:00:11.830495Z node 2 :TX_PROXY ERROR: Actor# [2:7488004087388840507:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:00:13.610572Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:00:13.745936Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:00:13.949478Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> TCertificateCheckerTest::CheckSubjectDns ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexReplace-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 26982, MsgBus: 15731 2025-03-31T15:59:50.395382Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003998523859377:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:50.413287Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f89/r3tmp/tmpoyXIPq/pdisk_1.dat 2025-03-31T15:59:51.101714Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:51.122653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:51.122733Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:51.124749Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26982, node 1 2025-03-31T15:59:51.379447Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:59:51.379468Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:59:51.379473Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:59:51.379569Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15731 TClient is connected to server localhost:15731 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:59:52.310540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:52.352986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:52.644202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:52.949932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:53.049091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:55.357654Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003998523859377:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:55.415920Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:59:56.103942Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004024293664806:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:56.104049Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:56.524214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:59:56.581718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:59:56.645569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:59:56.724475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:59:56.807736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:59:56.875861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:59:56.952447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004024293665324:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:56.952520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:56.952876Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004024293665329:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:56.957586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:59:56.973019Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004024293665331:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:59:57.049648Z node 1 :TX_PROXY ERROR: Actor# [1:7488004028588632683:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:59.148727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T15:59:59.249958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T15:59:59.418460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 23356, MsgBus: 20133 2025-03-31T16:00:04.672654Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488004056053678961:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:04.672704Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f89/r3tmp/tmpa7B3ga/pdisk_1.dat 2025-03-31T16:00:05.103741Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:05.136050Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:05.136471Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:05.143448Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23356, node 2 2025-03-31T16:00:05.331696Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:00:05.331724Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:00:05.331732Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:00:05.331861Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20133 TClient is connected to server localhost:20133 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:00:06.117343Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:06.135289Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:00:06.154225Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:06.284568Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:06.597236Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:06.895415Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:09.674821Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488004056053678961:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:09.674891Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:00:11.468398Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004086118451761:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:11.468479Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:11.609464Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:00:11.674319Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:00:11.719370Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:00:11.769132Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:00:11.848313Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:00:11.948651Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:00:12.045234Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004090413419586:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:12.045319Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:12.045740Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004090413419591:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:12.050695Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:00:12.070003Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-03-31T16:00:12.070631Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488004090413419593:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:00:12.138014Z node 2 :TX_PROXY ERROR: Actor# [2:7488004090413419648:3457] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:00:13.627064Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:00:13.743035Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:00:13.887416Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::AlterTable [GOOD] Test command err: 2025-03-31T16:00:14.763917Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004102919767151:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:14.764113Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00156b/r3tmp/tmpVA4WmU/pdisk_1.dat 2025-03-31T16:00:15.818424Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:15.837422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:15.837566Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:15.838639Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:00:15.858931Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6449 TServer::EnableGrpc on GrpcPort 22456, node 1 2025-03-31T16:00:16.647567Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:00:16.647588Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:00:16.647599Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:00:16.647717Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6449 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:00:17.406823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:19.757151Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004102919767151:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:19.757229Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:00:21.590040Z node 1 :TX_PROXY ERROR: Actor# [1:7488004132984538908:2315] txid# 281474976710658, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-03-31T16:00:21.612548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:00:21.788635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:00:21.825646Z node 1 :TX_PROXY ERROR: Actor# [1:7488004132984539027:2396] txid# 281474976710661, issues: { message: "Can\'t drop unknown column: \'extra\'" severity: 1 } >> YdbProxy::DescribeConsumer [GOOD] >> YdbProxy::CopyTables [GOOD] >> YdbProxy::AlterTopic >> YdbProxy::RemoveDirectory [GOOD] >> YdbProxy::StaticCreds >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_3 >> AsyncIndexChangeCollector::AllColumnsInPk [GOOD] >> AsyncIndexChangeCollector::CoverIndexedColumn |70.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> CdcStreamChangeCollector::IndexAndStreamUpsert [GOOD] >> CdcStreamChangeCollector::NewImage >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-57 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-58 |70.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |70.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |70.6%| [LD] {RESULT} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |70.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |70.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |70.6%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow >> TCertificateCheckerTest::CheckSubjectDns [GOOD] >> YdbProxy::DescribeTopic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DescribeConsumer [GOOD] Test command err: 2025-03-31T16:00:12.288253Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004092721006763:2197];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:12.288589Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0015d4/r3tmp/tmp77fZZF/pdisk_1.dat 2025-03-31T16:00:13.002897Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:13.002993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:13.009306Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:00:13.086437Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:18192 TServer::EnableGrpc on GrpcPort 8535, node 1 2025-03-31T16:00:13.638632Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:00:13.638655Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:00:13.638670Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:00:13.639027Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18192 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:00:14.366375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:14.487587Z node 1 :TX_PROXY ERROR: Actor# [1:7488004101310941836:2299] txid# 281474976710658, issues: { message: "Invalid retention period: specified: 31536000s, min: 1s, max: 2678400s" severity: 1 } 2025-03-31T16:00:18.241060Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488004116898189131:2211];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0015d4/r3tmp/tmpVkxJiS/pdisk_1.dat 2025-03-31T16:00:18.316566Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:00:18.505386Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:18.572664Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:18.572755Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:18.574252Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8333 TServer::EnableGrpc on GrpcPort 2497, node 2 2025-03-31T16:00:19.263799Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:00:19.263823Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:00:19.263831Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:00:19.263950Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8333 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:00:20.205295Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:20.219595Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 >> TPersqueueControlPlaneTestSuite::TestAddRemoveReadRule >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit-UseSink [GOOD] >> SystemView::ShowCreateTableDefaultLiteral [GOOD] >> SystemView::ShowCreateTablePartitionAtKeys >> YdbProxy::OAuthToken [GOOD] >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit |70.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TCertificateCheckerTest::CheckSubjectDns [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSchemeTx [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnDeadShard >> DataShardSnapshots::PipelineAndMediatorRestoreRace [GOOD] >> DataShardSnapshots::ShardRestartLockBasic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DescribeTopic [GOOD] Test command err: 2025-03-31T16:00:13.435647Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004095500361052:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:13.436062Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001573/r3tmp/tmpof4SsF/pdisk_1.dat 2025-03-31T16:00:14.117749Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:14.152225Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:14.152349Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:14.168169Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26967 TServer::EnableGrpc on GrpcPort 24691, node 1 2025-03-31T16:00:14.539876Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:00:14.539896Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:00:14.539904Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:00:14.540023Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26967 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:00:15.259814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:15.292389Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:00:18.417293Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004095500361052:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:18.417379Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:00:18.777022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:00:18.944104Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-03-31T16:00:18.945915Z node 1 :TX_PROXY ERROR: Actor# [1:7488004116975198203:2403] txid# 281474976710660, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-03-31T16:00:20.789366Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488004126582352336:2212];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001573/r3tmp/tmpK7A5tF/pdisk_1.dat 2025-03-31T16:00:20.864835Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:00:21.004440Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:21.058249Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:21.058337Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:21.061179Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10325 TServer::EnableGrpc on GrpcPort 4230, node 2 2025-03-31T16:00:21.547749Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:00:21.547773Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:00:21.547781Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:00:21.547894Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10325 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:00:22.011988Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:22.020358Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 |70.6%| [TA] $(B)/ydb/core/security/certificate_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::OAuthToken [GOOD] Test command err: 2025-03-31T16:00:12.670860Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004090327431673:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:12.670911Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001585/r3tmp/tmp4BROl3/pdisk_1.dat 2025-03-31T16:00:13.703868Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:13.704006Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:13.728200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:00:13.797641Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:00:13.881035Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:21720 TServer::EnableGrpc on GrpcPort 5127, node 1 2025-03-31T16:00:14.579775Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:00:14.579802Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:00:14.579809Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:00:14.579931Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21720 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:00:15.469536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:15.539913Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:00:15.666179Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T16:00:20.736960Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488004127411258054:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:20.737004Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001585/r3tmp/tmpxFPzfa/pdisk_1.dat 2025-03-31T16:00:21.171572Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:21.257159Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:21.257280Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:21.265148Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23479 TServer::EnableGrpc on GrpcPort 18433, node 2 2025-03-31T16:00:21.987512Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:00:21.987536Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:00:21.987543Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:00:21.987652Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23479 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:00:22.762210Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:22.793768Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 >> YdbProxy::DropTopic [GOOD] >> TOlap::StoreStats ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit-UseSink [GOOD] Test command err: 2025-03-31T15:58:17.085987Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:58:17.086269Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:17.086443Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0014c3/r3tmp/tmpsjSmzJ/pdisk_1.dat 2025-03-31T15:58:17.642837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:58:17.685901Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:17.731275Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T15:58:17.732371Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:17.732494Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:17.732672Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-31T15:58:17.756310Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:58:17.846247Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-03-31T15:58:17.846345Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-03-31T15:58:17.846496Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-03-31T15:58:17.999735Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-31T15:58:17.999836Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T15:58:18.000445Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-31T15:58:18.000532Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T15:58:18.000842Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T15:58:18.001011Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T15:58:18.001101Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-31T15:58:18.001413Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-03-31T15:58:18.002879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:58:18.004227Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-31T15:58:18.004293Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-03-31T15:58:18.060960Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-31T15:58:18.062194Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-31T15:58:18.062662Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T15:58:18.062916Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:58:18.120154Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T15:58:18.120790Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:58:18.120889Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T15:58:18.122213Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T15:58:18.122273Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T15:58:18.122309Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T15:58:18.122622Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T15:58:18.122743Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T15:58:18.122809Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T15:58:18.123190Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T15:58:18.161193Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T15:58:18.161489Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T15:58:18.161609Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T15:58:18.161646Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T15:58:18.161697Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T15:58:18.161786Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T15:58:18.162003Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T15:58:18.162057Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T15:58:18.162312Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T15:58:18.162411Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T15:58:18.162510Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T15:58:18.162544Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T15:58:18.162582Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T15:58:18.162616Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T15:58:18.162669Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T15:58:18.162703Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T15:58:18.162746Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T15:58:18.162852Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:58:18.162889Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:58:18.162968Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T15:58:18.163124Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-31T15:58:18.163165Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T15:58:18.163306Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T15:58:18.163580Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T15:58:18.163635Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T15:58:18.163734Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T15:58:18.163778Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T15:58:18.163825Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T15:58:18.163903Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T15:58:18.163983Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T15:58:18.164355Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T15:58:18.164402Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T15:58:18.164443Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T15:58:18.164477Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T15:58:18.164523Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T15:58:18.164574Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T15:58:18.164613Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T15:58:18.164645Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T15:58:18.164672Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T15:58:18.165866Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T15:58:18.165927Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T15:58:18.165962Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T15:58:18.166012Z node 1 :TX_DATASHARD TRACE: Prop ... 2075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-03-31T16:00:25.118112Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-03-31T16:00:25.118193Z node 13 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037889 dest 72075186224037888 producer 72075186224037889 txId 281474976715663 2025-03-31T16:00:25.118336Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 3036 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-03-31T16:00:25.118440Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-03-31T16:00:25.118570Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [13:666:2570], Recipient [13:756:2635]: {TEvReadSet step# 3036 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-03-31T16:00:25.118598Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-03-31T16:00:25.118626Z node 13 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715663 2025-03-31T16:00:25.118678Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 3036 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-03-31T16:00:25.118713Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-03-31T16:00:25.118785Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [13:756:2635], Recipient [13:666:2570]: {TEvReadSet step# 3036 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-03-31T16:00:25.118813Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-03-31T16:00:25.118841Z node 13 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037889 dest 72075186224037888 producer 72075186224037889 txId 281474976715663 2025-03-31T16:00:25.118882Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 3036 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-03-31T16:00:25.119612Z node 13 :TX_DATASHARD DEBUG: Complete [3036 : 281474976715663] from 72075186224037888 at tablet 72075186224037888 send result to client [13:983:2781], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:00:25.120001Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [13:666:2570], Recipient [13:756:2635]: {TEvReadSet step# 3036 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-03-31T16:00:25.120052Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-03-31T16:00:25.120082Z node 13 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715663 2025-03-31T16:00:25.120137Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 3036 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-03-31T16:00:25.120255Z node 13 :TX_DATASHARD DEBUG: Complete [3036 : 281474976715663] from 72075186224037889 at tablet 72075186224037889 send result to client [13:983:2781], exec latency: 0 ms, propose latency: 0 ms TEvProposeTransactionResult: TxKind: TX_KIND_DATA Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715663 TxResult: "" ExecLatency: 0 ProposeLatency: 0 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 1980 } } ComputeActorStats { Tasks { Tables { TablePath: "/Root/table-1" WriteRows: 1 WriteBytes: 8 } } } 2025-03-31T16:00:25.121094Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:00:25.121451Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 TEvProposeTransactionResult: TxKind: TX_KIND_DATA Origin: 72075186224037889 Status: COMPLETE TxId: 281474976715663 TxResult: "" ExecLatency: 0 ProposeLatency: 0 TxStats { PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 4932 } } ComputeActorStats { Tasks { Tables { TablePath: "/Root/table-2" WriteRows: 1 WriteBytes: 8 } } } 2025-03-31T16:00:25.123847Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:00:25.125531Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-03-31T16:00:25.125759Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [13:666:2570], Recipient [13:756:2635]: {TEvReadSet step# 3036 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2025-03-31T16:00:25.125872Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:00:25.125954Z node 13 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715663 2025-03-31T16:00:25.132631Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-03-31T16:00:25.157142Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [13:756:2635], Recipient [13:666:2570]: {TEvReadSet step# 3036 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2025-03-31T16:00:25.157262Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:00:25.157337Z node 13 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715663 2025-03-31T16:00:25.358495Z node 13 :TX_PROXY DEBUG: actor# [13:59:2106] Handle TEvExecuteKqpTransaction 2025-03-31T16:00:25.358602Z node 13 :TX_PROXY DEBUG: actor# [13:59:2106] TxId# 281474976715667 ProcessProposeKqpTransaction 2025-03-31T16:00:25.359891Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jqpexghc2frtak54k9nn8sfe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MjY0OTg4MWYtOWFhMGM5MjgtYTJmZDE5YzEtNWQ3Y2EwMDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TEvRead: ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 2025-03-31T16:00:25.363536Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [13:1107:2901], Recipient [13:666:2570]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-03-31T16:00:25.363724Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-31T16:00:25.363817Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3036/281474976715663 IncompleteEdge# v{min} UnprotectedReadEdge# v4000/18446744073709551615 ImmediateWriteEdge# v4001/0 ImmediateWriteEdgeReplied# v4001/0 2025-03-31T16:00:25.363907Z node 13 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v4001/18446744073709551615 2025-03-31T16:00:25.364023Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2025-03-31T16:00:25.364178Z node 13 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-03-31T16:00:25.364248Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2025-03-31T16:00:25.364324Z node 13 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-31T16:00:25.364382Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-31T16:00:25.364448Z node 13 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2025-03-31T16:00:25.364513Z node 13 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-03-31T16:00:25.364539Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-31T16:00:25.364565Z node 13 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2025-03-31T16:00:25.364589Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2025-03-31T16:00:25.364770Z node 13 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-03-31T16:00:25.365192Z node 13 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[13:1107:2901], 0} after executionsCount# 1 2025-03-31T16:00:25.365285Z node 13 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[13:1107:2901], 0} sends rowCount# 2, bytes# 96, quota rows left# 999, quota bytes left# 5242784, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-31T16:00:25.365427Z node 13 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[13:1107:2901], 0} finished in read 2025-03-31T16:00:25.365537Z node 13 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-03-31T16:00:25.365564Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2025-03-31T16:00:25.365589Z node 13 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2025-03-31T16:00:25.365619Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2025-03-31T16:00:25.365671Z node 13 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-03-31T16:00:25.365694Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-03-31T16:00:25.365732Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2025-03-31T16:00:25.365799Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-31T16:00:25.365965Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-03-31T16:00:25.367170Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [13:1107:2901], Recipient [13:666:2570]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-03-31T16:00:25.367256Z node 13 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 2 } items { uint32_value: 22 } } |70.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |70.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |70.6%| [TA] {RESULT} $(B)/ydb/core/security/certificate_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |70.6%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut >> KqpIndexes::SimpleVectorIndexOrderByCosineSimilarityNullableLevel2 [GOOD] >> DataShardSnapshots::DelayedWriteReplyAfterSplit [GOOD] >> DataShardSnapshots::DelayedWriteReadableAfterSplitAndReboot >> SystemView::AuthUsers [GOOD] >> SystemView::AuthUsers_LockUnlock ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DropTopic [GOOD] Test command err: 2025-03-31T16:00:13.042161Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004096540130820:2267];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:13.043934Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00157f/r3tmp/tmp1Z28wz/pdisk_1.dat 2025-03-31T16:00:13.717008Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:13.717114Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:13.722801Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:00:13.842562Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:61519 TServer::EnableGrpc on GrpcPort 27890, node 1 2025-03-31T16:00:14.295070Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:00:14.295121Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:00:14.295128Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:00:14.295245Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61519 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:00:15.309887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:15.453068Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T16:00:19.852057Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488004121601543510:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:19.940573Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00157f/r3tmp/tmpQFfcfW/pdisk_1.dat 2025-03-31T16:00:20.126815Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:20.146829Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:20.155305Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:20.164160Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1968 TServer::EnableGrpc on GrpcPort 9692, node 2 2025-03-31T16:00:20.840248Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:00:20.840289Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:00:20.840297Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:00:20.840438Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1968 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:00:21.730388Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:22.145899Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:00:22.169059Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-03-31T16:00:22.169111Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-03-31T16:00:22.169476Z node 2 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2025-03-31T16:00:22.169548Z node 2 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found 2025-03-31T16:00:22.222023Z node 2 :TX_PROXY ERROR: Actor# [2:7488004134486446046:2407] txid# 281474976710660, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } >> TOlapNaming::CreateColumnTableOk >> TOlapNaming::AlterColumnStoreOk >> YdbProxy::StaticCreds [GOOD] >> TOlapNaming::CreateColumnStoreOk |70.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |70.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |70.7%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage >> SystemView::PartitionStatsOneSchemeShard [GOOD] >> SystemView::PartitionStatsOneSchemeShardDataQuery >> AsyncIndexChangeCollector::CoverIndexedColumn [GOOD] >> TPersQueueTest::WriteExistingBigValue [GOOD] >> TPersQueueTest::WriteEmptyData |70.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |70.7%| [LD] {RESULT} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |70.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut >> YdbProxy::AlterTopic [GOOD] >> TOlap::CreateTableWithNullableKeysNotAllowed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SimpleVectorIndexOrderByCosineSimilarityNullableLevel2 [GOOD] Test command err: Trying to start YDB, gRPC: 8159, MsgBus: 10610 2025-03-31T15:58:34.802914Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003673429832177:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:34.856075Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000deb/r3tmp/tmpGzQjNx/pdisk_1.dat 2025-03-31T15:58:35.765319Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:35.785780Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:35.785891Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:35.789396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8159, node 1 2025-03-31T15:58:36.127750Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:58:36.127769Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:58:36.127785Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:58:36.127912Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10610 TClient is connected to server localhost:10610 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:58:37.253536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:37.278086Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:58:37.291665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:37.642901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:38.073070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:38.198176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:39.795295Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003673429832177:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:39.795364Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:58:42.972373Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003707789572184:2414], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:42.972471Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:43.350286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:58:43.391754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:58:43.459452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:58:43.522198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:58:43.566021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:58:43.626027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:58:43.729338Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003712084540023:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:43.729590Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:43.730616Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003712084540029:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:43.741214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:58:43.764092Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488003712084540031:2469], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:58:43.836155Z node 1 :TX_PROXY ERROR: Actor# [1:7488003712084540087:3468] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:58:45.565729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T15:58:45.896305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-03-31T15:58:46.024341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-03-31T15:58:50.764489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:58:50.764524Z node 1 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 28389, MsgBus: 31534 2025-03-31T15:59:24.272676Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488003886015830999:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:24.272722Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000deb/r3tmp/tmpWVV6OF/pdisk_1.dat 2025-03-31T15:59:24.496286Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:24.526935Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:24.527035Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:24.528622Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28389, node 2 2025-03-31T15:59:24.604839Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:59:24.604869Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:59:24.604880Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:59:24.605062Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31534 TClient is connected to server localhost:31534 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:59:25.092676Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:25.115230Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:25.186670Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:25.440916Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:25.535720Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:28.519057Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488003903195701967:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:28.519161Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:28.573540Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:59:28.605964Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:59:28.642193Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:59:28.693906Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:59:28.731756Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:59:28.766194Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:59:28.838714Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488003903195702478:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:28.838853Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:28.838947Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488003903195702483:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:28.843026Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:59:28.857221Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488003903195702485:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:59:28.932234Z node 2 :TX_PROXY ERROR: Actor# [2:7488003903195702540:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:29.273060Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488003886015830999:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:29.273142Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:59:29.941935Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T15:59:30.236570Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-03-31T15:59:30.282798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-03-31T15:59:30.381508Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715760:0, at schemeshard: 72057594046644480 2025-03-31T15:59:30.432580Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715763:0, at schemeshard: 72057594046644480 2025-03-31T15:59:30.482426Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037925 not found 2025-03-31T15:59:30.482481Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037924 not found 2025-03-31T15:59:30.482502Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037926 not found 2025-03-31T15:59:39.493068Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:59:39.493094Z node 2 :IMPORT WARN: Table profiles were not loaded >> TOlap::CreateStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::StaticCreds [GOOD] Test command err: 2025-03-31T16:00:16.455168Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004110567180747:2205];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:16.456039Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001564/r3tmp/tmpgBKpJW/pdisk_1.dat 2025-03-31T16:00:17.327525Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:17.396245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:17.396339Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:17.445099Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13112 TServer::EnableGrpc on GrpcPort 11977, node 1 2025-03-31T16:00:18.195635Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:00:18.195656Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:00:18.195662Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:00:18.195754Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13112 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:00:19.242179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:19.372069Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T16:00:19.396261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:00:19.431616Z node 1 :TX_PROXY ERROR: Actor# [1:7488004123452083145:2329] txid# 281474976710660, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001564/r3tmp/tmpNGfP9B/pdisk_1.dat 2025-03-31T16:00:25.397434Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:00:25.511832Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:25.561793Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:25.561875Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:25.568229Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30197 TServer::EnableGrpc on GrpcPort 2694, node 2 2025-03-31T16:00:26.364045Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:00:26.364066Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:00:26.364074Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:00:26.364182Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30197 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:00:27.000295Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:27.007797Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:00:27.019667Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743436827048 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "user1" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 1 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 ... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743436827048 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "user1" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 ... (TRUNCATED) 2025-03-31T16:00:27.382416Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 >> TOlapNaming::AlterColumnStoreOk [GOOD] >> TOlapNaming::AlterColumnStoreFailed >> CdcStreamChangeCollector::NewImage [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::AlterTopic [GOOD] Test command err: 2025-03-31T16:00:12.287386Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004093428884987:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:12.288167Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00158a/r3tmp/tmpK0pdLl/pdisk_1.dat 2025-03-31T16:00:12.716467Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:12.747633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:12.747723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:12.752235Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23721 TServer::EnableGrpc on GrpcPort 29649, node 1 2025-03-31T16:00:13.135529Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:00:13.135559Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:00:13.135569Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:00:13.135721Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23721 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:00:13.764919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:13.827600Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:00:17.283544Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004093428884987:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:17.283651Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:00:18.031256Z node 1 :TX_PROXY ERROR: Actor# [1:7488004119198689301:2313] txid# 281474976710658, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-03-31T16:00:18.053172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:00:19.135223Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488004120988402677:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:19.172093Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00158a/r3tmp/tmpHoQHdJ/pdisk_1.dat 2025-03-31T16:00:19.323877Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:19.360130Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:19.360215Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:19.368551Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61032 TServer::EnableGrpc on GrpcPort 19109, node 2 2025-03-31T16:00:19.843710Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:00:19.843736Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:00:19.843744Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:00:19.843850Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61032 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:00:20.336474Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:23.406137Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:00:23.530216Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00158a/r3tmp/tmpP8DVla/pdisk_1.dat 2025-03-31T16:00:24.891071Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:00:25.238297Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:25.295692Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:25.295786Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:25.298815Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64728 TServer::EnableGrpc on GrpcPort 24504, node 3 2025-03-31T16:00:26.225365Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:00:26.225387Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:00:26.225397Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:00:26.225514Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64728 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:00:26.891353Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:27.378244Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:00:27.444558Z node 3 :TX_PROXY ERROR: Actor# [3:7488004157002516055:2403] txid# 281474976710660, issues: { message: "Invalid retention period: specified: 31536000s, min: 1s, max: 2678400s" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::CoverIndexedColumn [GOOD] Test command err: 2025-03-31T16:00:05.528847Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:00:05.529118Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:00:05.529311Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018c2/r3tmp/tmpjt272H/pdisk_1.dat 2025-03-31T16:00:06.161790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:00:06.241655Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:06.288851Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:06.289023Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:06.304237Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:00:06.417112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:00:06.543583Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:676:2578] 2025-03-31T16:00:06.543903Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:00:06.692233Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:00:06.692442Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:00:06.694227Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:00:06.694313Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:00:06.694402Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:00:06.694793Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:00:06.706230Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:00:06.706401Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:703:2578] in generation 1 2025-03-31T16:00:06.707919Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:680:2580] 2025-03-31T16:00:06.708156Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:00:06.754145Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:00:06.754265Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:00:06.784874Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-03-31T16:00:06.784995Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-03-31T16:00:06.785101Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-03-31T16:00:06.785414Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:00:06.785565Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:00:06.785635Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:711:2580] in generation 1 2025-03-31T16:00:06.797242Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:00:06.897711Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:00:06.897915Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:00:06.898066Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:714:2599] 2025-03-31T16:00:06.898127Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:00:06.898162Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:00:06.898199Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:00:06.898532Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:00:06.898570Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-03-31T16:00:06.898626Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:00:06.898693Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:715:2600] 2025-03-31T16:00:06.898717Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-03-31T16:00:06.898744Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-03-31T16:00:06.898767Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:00:06.899335Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:00:06.899451Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:00:06.899986Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:00:06.900057Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:00:06.900097Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:00:06.900138Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:00:06.900190Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-03-31T16:00:06.900266Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-03-31T16:00:06.900341Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:670:2574], serverId# [1:688:2584], sessionId# [0:0:0] 2025-03-31T16:00:06.900383Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T16:00:06.900407Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:00:06.900452Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-03-31T16:00:06.900485Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-31T16:00:06.900623Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:00:06.900952Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:00:06.901072Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:00:06.901481Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:671:2575], serverId# [1:701:2592], sessionId# [0:0:0] 2025-03-31T16:00:06.901656Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-31T16:00:06.901829Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-03-31T16:00:06.901888Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-03-31T16:00:06.907857Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:00:06.907957Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-31T16:00:06.918778Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:00:06.918894Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:00:06.919006Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-03-31T16:00:06.919039Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-03-31T16:00:07.091667Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:739:2618], serverId# [1:742:2621], sessionId# [0:0:0] 2025-03-31T16:00:07.091925Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:740:2619], serverId# [1:743:2622], sessionId# [0:0:0] 2025-03-31T16:00:07.104899Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-03-31T16:00:07.104995Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:00:07.105523Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:00:07.105559Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:00:07.105697Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T16:00:07.105755Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:00:07.105810Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2025-03-31T16:00:07.106061Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:00:07.106198Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:00:07.106394Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:00:07.106446Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:00:07.106475Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T16:00:07.106629Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:00:07.106725Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:00:07.106807Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T16:00:07.106861Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:00:07.113328Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T ... 224037890 2025-03-31T16:00:31.357356Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037890 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:00:31.357499Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:00:31.359718Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-31T16:00:31.359811Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:00:31.359878Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-03-31T16:00:31.359939Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037890 tableId# [OwnerId: 72057594046644480, LocalPathId: 6] schema version# 1 2025-03-31T16:00:31.360333Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037890 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:00:31.360703Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:00:31.362959Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2025-03-31T16:00:31.363008Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:00:31.364104Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-31T16:00:31.364197Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:00:31.365497Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 1000} 2025-03-31T16:00:31.365563Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-31T16:00:31.366434Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:00:31.366486Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:00:31.366550Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:00:31.366652Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:00:31.366730Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T16:00:31.366851Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:00:31.368303Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037890 time 0 2025-03-31T16:00:31.368353Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-31T16:00:31.368830Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:00:31.368928Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-31T16:00:31.368986Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-03-31T16:00:31.369695Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-31T16:00:31.369756Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-03-31T16:00:31.369791Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037889 2025-03-31T16:00:31.369848Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:00:31.369896Z node 4 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T16:00:31.369975Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:00:31.370081Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 1000} 2025-03-31T16:00:31.370128Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-31T16:00:31.371964Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-31T16:00:31.372014Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-03-31T16:00:31.372084Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037890 2025-03-31T16:00:31.372173Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037890 at tablet 72075186224037890 send result to client [4:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:00:31.372223Z node 4 :TX_DATASHARD INFO: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T16:00:31.372316Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-31T16:00:31.378026Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T16:00:31.378508Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-31T16:00:31.378597Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:00:31.379231Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T16:00:31.379621Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T16:00:31.380004Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2025-03-31T16:00:31.380062Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-03-31T16:00:31.380870Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037890 state Ready 2025-03-31T16:00:31.380928Z node 4 :TX_DATASHARD DEBUG: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2025-03-31T16:00:31.395971Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:838:2697], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:31.396095Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:848:2702], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:31.396184Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:31.412812Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:00:31.419374Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:00:31.419509Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-31T16:00:31.419567Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-03-31T16:00:31.613371Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:00:31.613527Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-31T16:00:31.613590Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-03-31T16:00:31.626027Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:852:2705], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:00:31.665238Z node 4 :TX_PROXY ERROR: Actor# [4:934:2756] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:00:31.782588Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpexpmz3egwxdvcyh0evsds, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZmMyNzU2NzctNWEwM2Y3YTctZTkyYTE4OGUtYjViOTkzYTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:00:31.785684Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1039:2799], serverId# [4:1040:2800], sessionId# [0:0:0] 2025-03-31T16:00:31.786155Z node 4 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037889 2025-03-31T16:00:31.786464Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1743436831786358 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 38b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-03-31T16:00:31.786640Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1743436831786358 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-03-31T16:00:31.786799Z node 4 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037889, row count=1 2025-03-31T16:00:31.799914Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 38 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-03-31T16:00:31.800047Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:00:31.807454Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1046:2805], serverId# [4:1047:2806], sessionId# [0:0:0] 2025-03-31T16:00:31.824519Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1048:2807], serverId# [4:1049:2808], sessionId# [0:0:0] >> TOlapNaming::CreateColumnStoreOk [GOOD] >> TOlapNaming::CreateColumnStoreFailed >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW+VolatileTxs [GOOD] >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs >> TPersQueueTest::DirectReadNotCached [GOOD] >> TPersQueueTest::DirectReadBadCases >> TOlap::CreateTableWithNullableKeysNotAllowed [GOOD] >> TOlap::CreateTableWithNullableKeys >> TOlapNaming::AlterColumnStoreFailed [GOOD] >> TOlapNaming::CreateColumnStoreFailed [GOOD] >> TOlapNaming::AlterColumnTableOk |70.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |70.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |70.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing >> SystemView::ShowCreateTable [GOOD] >> SystemView::QueryStats >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnDeadShard [GOOD] >> TOlap::CreateStore [GOOD] >> TOlap::CreateDropTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::NewImage [GOOD] Test command err: 2025-03-31T16:00:03.497900Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:00:03.498278Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:00:03.498496Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018d1/r3tmp/tmpVKbPUY/pdisk_1.dat 2025-03-31T16:00:04.087062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:00:04.216119Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:04.229271Z node 1 :TABLET_SAUSAGECACHE NOTICE: Update config MemoryLimit: 33554432 2025-03-31T16:00:04.268177Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:04.268343Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:04.280178Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:00:04.368681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:00:04.440298Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:00:04.440568Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:00:04.493225Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:00:04.493364Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:00:04.496027Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:00:04.496122Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:00:04.496198Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:00:04.496644Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:00:04.496824Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:00:04.496933Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:00:04.510142Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:00:04.551365Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:00:04.551653Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:00:04.551788Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:00:04.551829Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:00:04.551868Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:00:04.551937Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:00:04.552400Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:00:04.552507Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:00:04.552619Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:00:04.552666Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:00:04.552736Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:00:04.552791Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:00:04.552883Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:00:04.553069Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:00:04.553356Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:00:04.553482Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:00:04.555890Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:00:04.566780Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:00:04.566892Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:00:04.737353Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T16:00:04.746566Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:00:04.746669Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:00:04.755948Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:00:04.756023Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:00:04.756092Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T16:00:04.756392Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:00:04.756563Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:00:04.757267Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:00:04.757353Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:00:04.763853Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:00:04.764349Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:00:04.766717Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-31T16:00:04.766782Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:00:04.776300Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-31T16:00:04.776425Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:00:04.777566Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:00:04.777621Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:00:04.777676Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:00:04.777748Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:00:04.777801Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T16:00:04.777936Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:00:04.790680Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:00:04.801277Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T16:00:04.801509Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-31T16:00:04.801579Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:00:04.848501Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:00:04.848744Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-03-31T16:00:04.848802Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-03-31T16:00:04.848841Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-03-31T16:00:04.850178Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:00:04.892028Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:00:05.441577Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:00:05.441651Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:00:05.441832Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:00:05.441877Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:00:05.441923Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-03-31T16:00:05.442135Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-03-31T16:00:05.442254Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:00:05.442565Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:00:05.451531Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:00:05.612083Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-03-31T16:00:05.612185Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:00:05.612230Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:00:05.612296Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tab ... DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:00:33.158346Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:00:33.158410Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T16:00:33.158750Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:00:33.158931Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:00:33.159830Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:00:33.159931Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:00:33.160524Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:00:33.161120Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:00:33.163889Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-31T16:00:33.163990Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:00:33.165333Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-31T16:00:33.165447Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:00:33.166774Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:00:33.166851Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:00:33.166915Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:00:33.167007Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:00:33.167076Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T16:00:33.167236Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:00:33.168775Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:00:33.171156Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T16:00:33.171462Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-31T16:00:33.171545Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:00:33.187526Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:00:33.187737Z node 4 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-03-31T16:00:33.187806Z node 4 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-03-31T16:00:33.187846Z node 4 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-03-31T16:00:33.188495Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:00:33.216390Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:00:33.444733Z node 4 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:00:33.444839Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:00:33.445237Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:00:33.445301Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:00:33.445361Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-03-31T16:00:33.445637Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-03-31T16:00:33.445833Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:00:33.446248Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:00:33.447363Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:00:33.531439Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-03-31T16:00:33.531557Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:00:33.531595Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:00:33.531640Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:00:33.531723Z node 4 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:00:33.531792Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-03-31T16:00:33.531891Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:00:33.534466Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-03-31T16:00:33.534578Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:00:33.549610Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:887:2724], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:33.549759Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:897:2729], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:33.549888Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:33.556995Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:00:33.564847Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:00:33.767462Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:00:33.772297Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:901:2732], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:00:33.815605Z node 4 :TX_PROXY ERROR: Actor# [4:957:2769] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:00:33.928597Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpexrrbe730vkhg69pda1qv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NGQ1YzgzYTQtOTdjN2E5YWQtNzc1NWY1MGEtNjgzOWJiNDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:00:33.931743Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:988:2786], serverId# [4:989:2787], sessionId# [0:0:0] 2025-03-31T16:00:33.932278Z node 4 :TX_DATASHARD DEBUG: Executing write operation for [0:3] at 72075186224037888 2025-03-31T16:00:33.942122Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1743436833941927 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 40b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-03-31T16:00:33.942416Z node 4 :TX_DATASHARD DEBUG: Executed write operation for [0:3] at 72075186224037888, row count=1 2025-03-31T16:00:33.954468Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 40 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-03-31T16:00:33.954599Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:00:34.080564Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpexs585pe7ra053j27dp5a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YmY2Yzk2ZjQtMWJmNmNlODEtNjlhNDhhODktZmNhM2ZhNDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:00:34.083330Z node 4 :TX_DATASHARD DEBUG: Executing write operation for [0:4] at 72075186224037888 2025-03-31T16:00:34.083697Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1743436834083576 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-03-31T16:00:34.083892Z node 4 :TX_DATASHARD DEBUG: Executed write operation for [0:4] at 72075186224037888, row count=1 2025-03-31T16:00:34.095325Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 18 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-03-31T16:00:34.095411Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:00:34.098287Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:1016:2805], serverId# [4:1017:2806], sessionId# [0:0:0] 2025-03-31T16:00:34.108116Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:1018:2807], serverId# [4:1019:2808], sessionId# [0:0:0] >> TOlap::CustomDefaultPresets >> TOlap::CreateTableWithNullableKeys [GOOD] >> TOlap::CreateDropStandaloneTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlapNaming::CreateColumnStoreFailed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:00:33.663921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:00:33.664036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:00:33.664079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:00:33.664130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:00:33.664191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:00:33.664218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:00:33.664295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:00:33.664386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:00:33.664781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:00:33.773079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:00:33.773150Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:33.785839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:00:33.785960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:00:33.786120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:00:33.796026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:00:33.796272Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:00:33.797017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:00:33.797264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:00:33.804840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:00:33.806193Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:00:33.806277Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:00:33.806468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:00:33.806536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:00:33.806584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:00:33.806741Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:00:33.822012Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:00:34.053142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:00:34.053413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:34.053638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:00:34.053887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:00:34.053953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:34.056701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:00:34.056866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:00:34.057105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:34.057190Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:00:34.057230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:00:34.057270Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:00:34.059784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:34.059874Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:00:34.059938Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:00:34.062230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:34.062294Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:34.062349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:00:34.062422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:00:34.073074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:00:34.075367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:00:34.075613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:00:34.076772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:00:34.076937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:00:34.077022Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:00:34.077342Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:00:34.077403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:00:34.077612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:00:34.077718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:00:34.080290Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:00:34.080358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:00:34.080586Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:00:34.080664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:00:34.080972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:34.081027Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:00:34.081136Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:00:34.081173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:00:34.081218Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:00:34.081256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:00:34.081312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:00:34.081353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:00:34.081392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:00:34.081424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:00:34.081495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:00:34.081535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:00:34.081572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:00:34.083721Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:00:34.083867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:00:34.083921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... AT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:35.887048Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:00:35.887203Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:00:35.887438Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:35.887500Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:00:35.887540Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:00:35.887579Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:00:35.889523Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:35.889598Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:00:35.889639Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:00:35.891631Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:35.891687Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:35.891734Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:00:35.891807Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:00:35.891947Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:00:35.893603Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:00:35.893803Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:00:35.894659Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:00:35.894794Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 8589936749 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:00:35.894849Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:00:35.895147Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:00:35.895209Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:00:35.895386Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:00:35.895469Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:00:35.897478Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:00:35.897563Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:00:35.897763Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:00:35.897808Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:00:35.898176Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:35.898227Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:00:35.898350Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:00:35.898387Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:00:35.898430Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:00:35.898464Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:00:35.898508Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:00:35.898549Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:00:35.898592Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:00:35.898628Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:00:35.898701Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:00:35.898744Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:00:35.898783Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:00:35.899353Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:00:35.899457Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:00:35.899508Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-31T16:00:35.899550Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-31T16:00:35.899593Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:00:35.899691Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-31T16:00:35.902624Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-31T16:00:35.903121Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-03-31T16:00:35.906399Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "OlapStore" ColumnShardCount: 1 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "data" Type: "Utf8" } Columns { Name: "mess age" Type: "Utf8" } KeyColumnNames: "timestamp" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:00:35.906771Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /MyRoot/OlapStore, opId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:00:35.906984Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Invalid name for column 'mess age', at schemeshard: 72057594046678944 2025-03-31T16:00:35.907529Z node 2 :TX_PROXY DEBUG: actor# [2:269:2260] Bootstrap 2025-03-31T16:00:35.938401Z node 2 :TX_PROXY DEBUG: actor# [2:269:2260] Become StateWork (SchemeCache [2:274:2265]) 2025-03-31T16:00:35.939245Z node 2 :TX_PROXY DEBUG: actor# [2:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T16:00:35.946301Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Invalid name for column \'mess age\'" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:00:35.946495Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Invalid name for column 'mess age', operation: CREATE COLUMN STORE, path: /MyRoot/OlapStore 2025-03-31T16:00:35.947043Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-31T16:00:35.947301Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-31T16:00:35.947348Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-31T16:00:35.947754Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-31T16:00:35.947864Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T16:00:35.947907Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:284:2275] TestWaitNotification: OK eventTxId 101 2025-03-31T16:00:35.948346Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:00:35.948523Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore" took 216us result status StatusPathDoesNotExist 2025-03-31T16:00:35.948746Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/OlapStore\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/OlapStore" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlapNaming::AlterColumnStoreFailed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:00:32.535645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:00:32.535757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:00:32.535803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:00:32.535851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:00:32.535900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:00:32.535928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:00:32.535984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:00:32.536059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:00:32.536409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:00:32.787381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:00:32.787452Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:32.804418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:00:32.804529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:00:32.804707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:00:32.840036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:00:32.840306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:00:32.840979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:00:32.841234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:00:32.847836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:00:32.849195Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:00:32.849272Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:00:32.849445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:00:32.849504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:00:32.849563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:00:32.849743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:00:32.868009Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:00:33.276857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:00:33.277097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:33.277301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:00:33.277542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:00:33.277610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:33.288050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:00:33.288218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:00:33.288416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:33.288502Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:00:33.288539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:00:33.288571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:00:33.300198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:33.300279Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:00:33.300319Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:00:33.307834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:33.307890Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:33.307944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:00:33.308016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:00:33.319232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:00:33.327925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:00:33.328227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:00:33.329659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:00:33.329786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:00:33.329852Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:00:33.330149Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:00:33.330208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:00:33.330393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:00:33.330476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:00:33.336563Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:00:33.336641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:00:33.336834Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:00:33.336882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:00:33.337152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:33.337197Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:00:33.337293Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:00:33.337332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:00:33.337372Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:00:33.337404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:00:33.337452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:00:33.337494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:00:33.337525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:00:33.337552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:00:33.337628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:00:33.337664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:00:33.337707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:00:33.344086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:00:33.344268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:00:33.344341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... RD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-03-31T16:00:35.768013Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-03-31T16:00:35.774390Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186233409546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=102;fline=tx_controller.cpp:211;event=finished_tx;tx_id=102; 2025-03-31T16:00:35.774702Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:00:35.774824Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 8589936749 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:00:35.774889Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateColumnTable TPropose operationId# 102:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000003 2025-03-31T16:00:35.779607Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-03-31T16:00:35.779819Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:00:35.779895Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T16:00:35.788243Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:00:35.788329Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:00:35.788546Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-31T16:00:35.788722Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:00:35.788800Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-31T16:00:35.788853Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 3 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-03-31T16:00:35.789256Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:00:35.789309Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateColumnTable TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-03-31T16:00:35.789368Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TCreateColumnTable TProposedWaitParts operationId# 102:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2025-03-31T16:00:35.790235Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:00:35.790342Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:00:35.790382Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:00:35.790425Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-03-31T16:00:35.790488Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T16:00:35.791951Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:00:35.792032Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:00:35.792057Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:00:35.792088Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-03-31T16:00:35.792116Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-31T16:00:35.792215Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-03-31T16:00:35.802039Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2025-03-31T16:00:35.802174Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:3 msg type: 268697639 2025-03-31T16:00:35.802253Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 0, tablet: 72057594037968897 2025-03-31T16:00:35.802646Z node 2 :HIVE INFO: [72057594037968897] TEvUpdateTabletsObject, msg: ObjectId: 7726343884038809171 TabletIds: 72075186233409546 TxId: 102 TxPartId: 0 2025-03-31T16:00:35.803033Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Update tablets object reply, message: Status: OK TxId: 102 TxPartId: 0, at schemeshard: 72057594046678944 2025-03-31T16:00:35.803333Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Status: OK TxId: 102 TxPartId: 0 2025-03-31T16:00:35.807336Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:00:35.808984Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:00:35.810611Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:00:35.823777Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 102 2025-03-31T16:00:35.823839Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-03-31T16:00:35.823969Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 102 FAKE_COORDINATOR: Erasing txId 102 2025-03-31T16:00:35.835906Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:00:35.836093Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:00:35.836139Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-31T16:00:35.836273Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:00:35.836312Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:00:35.836360Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:00:35.836391Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:00:35.836427Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-31T16:00:35.836516Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:341:2320] message: TxId: 102 2025-03-31T16:00:35.836579Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:00:35.836641Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-31T16:00:35.836671Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-31T16:00:35.836807Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T16:00:35.838832Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:00:35.838888Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:404:2376] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 2025-03-31T16:00:35.845021Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnStore AlterColumnStore { Name: "OlapStore" AlterSchemaPresets { Name: "default" AlterSchema { AddColumns { Name: "mess age" Type: "Utf8" } } } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:00:35.845300Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterOlapStore Propose, path: /MyRoot/OlapStore, opId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:00:35.845573Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusSchemeError, reason: Invalid name for column 'mess age', at schemeshard: 72057594046678944 2025-03-31T16:00:35.851969Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusSchemeError Reason: "Invalid name for column \'mess age\'" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:00:35.852139Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusSchemeError, reason: Invalid name for column 'mess age', operation: ALTER COLUMN STORE, path: /MyRoot/OlapStore TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-03-31T16:00:35.852443Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-03-31T16:00:35.852483Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-03-31T16:00:35.852879Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-31T16:00:35.852973Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-31T16:00:35.853008Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:442:2414] TestWaitNotification: OK eventTxId 103 >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW+VolatileTxs [GOOD] >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-58 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-59 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateTableWithNullableKeys [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:00:34.381586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:00:34.381705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:00:34.381758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:00:34.381810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:00:34.381866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:00:34.381896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:00:34.381952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:00:34.382050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:00:34.382386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:00:34.490633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:00:34.490701Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:34.496118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:00:34.496229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:00:34.496383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:00:34.502978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:00:34.503238Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:00:34.503881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:00:34.504122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:00:34.506106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:00:34.507310Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:00:34.507381Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:00:34.507550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:00:34.507607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:00:34.507650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:00:34.507790Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:00:34.516488Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:00:34.693075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:00:34.693330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:34.693541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:00:34.693791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:00:34.693865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:34.700646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:00:34.700806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:00:34.701049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:34.701130Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:00:34.701167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:00:34.701202Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:00:34.708283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:34.708363Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:00:34.708405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:00:34.713138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:34.713348Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:34.713406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:00:34.713471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:00:34.717193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:00:34.721186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:00:34.721415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:00:34.722484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:00:34.722651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:00:34.722717Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:00:34.723054Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:00:34.723132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:00:34.723312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:00:34.723389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:00:34.733000Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:00:34.733077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:00:34.733286Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:00:34.733338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:00:34.733648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:34.733698Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:00:34.733793Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:00:34.733824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:00:34.733860Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:00:34.733889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:00:34.733942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:00:34.733987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:00:34.734019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:00:34.734049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:00:34.734128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:00:34.734161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:00:34.734206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:00:34.736882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:00:34.737059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:00:34.737105Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 8944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-31T16:00:36.833211Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-03-31T16:00:36.833381Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:00:36.833437Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 104, path id: 1 2025-03-31T16:00:36.833491Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-03-31T16:00:36.833518Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 104, path id: 4 2025-03-31T16:00:36.833800Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-31T16:00:36.833864Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropColumnTable TProposedWaitParts operationId# 104:0 ProgressState at schemeshard: 72057594046678944 2025-03-31T16:00:36.833930Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TDropColumnTable TProposedWaitParts operationId# 104:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409547 2025-03-31T16:00:36.834793Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T16:00:36.834911Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T16:00:36.834950Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-03-31T16:00:36.834994Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-03-31T16:00:36.835042Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-03-31T16:00:36.840392Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T16:00:36.840497Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T16:00:36.840527Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-03-31T16:00:36.840561Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-03-31T16:00:36.840612Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-31T16:00:36.845085Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T16:00:36.845203Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T16:00:36.845233Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-03-31T16:00:36.845265Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 7 2025-03-31T16:00:36.845298Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T16:00:36.845390Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-03-31T16:00:36.847472Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 104:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 275382275 2025-03-31T16:00:36.853581Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-31T16:00:36.853769Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-31T16:00:36.854787Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-31T16:00:36.867539Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409547 TxId: 104 2025-03-31T16:00:36.867610Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409547, partId: 0 2025-03-31T16:00:36.867791Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409547 TxId: 104 2025-03-31T16:00:36.867853Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 130 FAKE_COORDINATOR: Erasing txId 104 2025-03-31T16:00:36.870556Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-31T16:00:36.870739Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-31T16:00:36.870794Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropColumnTable TProposedDeleteParts operationId# 104:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:00:36.870917Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-31T16:00:36.871054Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-31T16:00:36.871107Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T16:00:36.871153Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-31T16:00:36.871196Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T16:00:36.871235Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-03-31T16:00:36.871331Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:371:2350] message: TxId: 104 2025-03-31T16:00:36.871383Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T16:00:36.871443Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-03-31T16:00:36.871483Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-03-31T16:00:36.871607Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-03-31T16:00:36.879248Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-03-31T16:00:36.879402Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-03-31T16:00:36.879463Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [2:608:2568] 2025-03-31T16:00:36.880010Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-03-31T16:00:36.880677Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[2:475:2444];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; Forgetting tablet 72075186233409547 2025-03-31T16:00:36.884608Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-31T16:00:36.885550Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-03-31T16:00:36.886324Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:00:36.886384Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-03-31T16:00:36.886450Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-31T16:00:36.889860Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-31T16:00:36.889942Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-03-31T16:00:36.890219Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 2025-03-31T16:00:36.890765Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyDir/MyTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:00:36.890974Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyDir/MyTable" took 245us result status StatusPathDoesNotExist 2025-03-31T16:00:36.891158Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyDir/MyTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/MyDir\' (id: [OwnerId: 72057594046678944, LocalPathId: 3])" Path: "/MyRoot/MyDir/MyTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/MyDir" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "MyDir" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-31T16:00:36.891761Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 4 SchemeshardId: 72057594046678944 Options { }, at schemeshard: 72057594046678944 2025-03-31T16:00:36.891854Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 4 took 103us result status StatusPathDoesNotExist 2025-03-31T16:00:36.891931Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'\', error: path is empty" Path: "" PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> SystemView::ShowCreateTablePartitionAtKeys [FAIL] >> SystemView::ShowCreateTablePartitionSettings ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnDeadShard [GOOD] Test command err: 2025-03-31T16:00:13.502348Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:00:13.502771Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:00:13.502978Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001308/r3tmp/tmpBK8Ops/pdisk_1.dat 2025-03-31T16:00:13.982925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:00:14.035221Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:14.075506Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:14.075646Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:14.089100Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:00:14.183628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:00:14.281059Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:687:2586] 2025-03-31T16:00:14.281382Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:00:14.351411Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:00:14.351650Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:00:14.353762Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:00:14.353893Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:00:14.353960Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:00:14.354387Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:00:14.354770Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:00:14.354867Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:718:2586] in generation 1 2025-03-31T16:00:14.356753Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:691:2588] 2025-03-31T16:00:14.356993Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:00:14.367869Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:00:14.368035Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:00:14.369699Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-03-31T16:00:14.369775Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-03-31T16:00:14.369842Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-03-31T16:00:14.370186Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:00:14.370475Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:00:14.370542Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:731:2588] in generation 1 2025-03-31T16:00:14.372149Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:695:2590] 2025-03-31T16:00:14.372411Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:00:14.382647Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:00:14.382766Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:00:14.384224Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-03-31T16:00:14.384307Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-03-31T16:00:14.384366Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-03-31T16:00:14.384688Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:00:14.384833Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:00:14.384903Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037890 persisting started state actor id [1:739:2590] in generation 1 2025-03-31T16:00:14.396054Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:00:14.450593Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-03-31T16:00:14.450832Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:00:14.450998Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:743:2617] 2025-03-31T16:00:14.451050Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-03-31T16:00:14.451110Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-03-31T16:00:14.451153Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:00:14.451550Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:00:14.451591Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-03-31T16:00:14.451660Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:00:14.451722Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:744:2618] 2025-03-31T16:00:14.451748Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-03-31T16:00:14.451772Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-03-31T16:00:14.451797Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-31T16:00:14.452093Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:00:14.452127Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:00:14.452180Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:00:14.452245Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:745:2619] 2025-03-31T16:00:14.452274Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:00:14.452297Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:00:14.452340Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:00:14.452399Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-03-31T16:00:14.452503Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-03-31T16:00:14.452731Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T16:00:14.452779Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:00:14.452835Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-03-31T16:00:14.452880Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-31T16:00:14.453139Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:679:2582], serverId# [1:713:2602], sessionId# [0:0:0] 2025-03-31T16:00:14.453204Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037890 2025-03-31T16:00:14.453284Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2025-03-31T16:00:14.453365Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:00:14.453411Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:00:14.453541Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-31T16:00:14.453832Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-03-31T16:00:14.453937Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-03-31T16:00:14.454404Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-03-31T16:00:14.454463Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:00:14.454499Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037890 TxInFly 0 2025-03-31T16:00:14.454535Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-31T16:00:14.455024Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:00:14.455061Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:00:14.455112Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:00:14.455148Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:00:14.455262Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:678:2581], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T16:00:14.455626Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:00:14.455775Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:00:14.455888Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:00:14.457928Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:00:14.458012Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-31T16:00:14.468998Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:00:14.469124Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:00:14.469797Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-03-31T16:00:14.469872Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-03-31T16:00:14.524601Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:680:2583], serverId# [1:762:2629], sessionId# [0:0:0] 2025-03-31T16:00:14.524821Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-03-31T16:00:14.525086Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037890 txId 2 ... egularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-03-31T16:00:35.856941Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1107:2845] Handle TEvDataShard::TEvEraseRowsRequest 2025-03-31T16:00:35.857139Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1107:2845] Propose tx: txId# 281474976715662, shard# 72075186224037890, keys# 3, dependents# 0, dependencies# 1 2025-03-31T16:00:35.857269Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1107:2845] Propose tx: txId# 281474976715662, shard# 72075186224037888, keys# 3, dependents# 0, dependencies# 1 2025-03-31T16:00:35.857357Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1107:2845] Propose tx: txId# 281474976715662, shard# 72075186224037889, keys# 3, dependents# 2, dependencies# 0 2025-03-31T16:00:35.857661Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-03-31T16:00:35.857881Z node 3 :TX_DATASHARD DEBUG: Prepared DistributedErase transaction txId 281474976715662 at tablet 72075186224037890 2025-03-31T16:00:35.858407Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:00:35.858530Z node 3 :TX_DATASHARD DEBUG: Prepared DistributedErase transaction txId 281474976715662 at tablet 72075186224037888 2025-03-31T16:00:35.858754Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-31T16:00:35.858873Z node 3 :TX_DATASHARD DEBUG: Prepared DistributedErase transaction txId 281474976715662 at tablet 72075186224037889 2025-03-31T16:00:35.871055Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-03-31T16:00:35.871185Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:00:35.871312Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-03-31T16:00:35.871389Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037890 source 72075186224037890 dest 72075186224037888 consumer 72075186224037888 txId 281474976715661 2025-03-31T16:00:35.871579Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1107:2845] HandlePropose TEvDataShard::TEvProposeTransactionResult: txId# 281474976715662, shard# 72075186224037888, status# 1 2025-03-31T16:00:35.871728Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715661 2025-03-31T16:00:35.871802Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-03-31T16:00:35.871845Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037890 2025-03-31T16:00:35.871936Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037890 consumer 72075186224037890 txId 281474976715661 2025-03-31T16:00:35.871975Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037890 consumer 72075186224037890 txId 281474976715661 2025-03-31T16:00:35.872014Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1107:2845] HandlePropose TEvDataShard::TEvProposeTransactionResult: txId# 281474976715662, shard# 72075186224037889, status# 1 2025-03-31T16:00:35.872055Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1107:2845] HandlePropose TEvDataShard::TEvProposeTransactionResult: txId# 281474976715662, shard# 72075186224037890, status# 1 2025-03-31T16:00:35.872090Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1107:2845] Register plan: txId# 281474976715662, minStep# 1535, maxStep# 31535 2025-03-31T16:00:35.890085Z node 3 :TX_DATASHARD INFO: OnDetach: 72075186224037888 2025-03-31T16:00:35.890256Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-03-31T16:00:35.893155Z node 3 :TX_DATASHARD ERROR: [DistEraser] [3:1107:2845] Reply: txId# 281474976715662, status# SHARD_UNKNOWN, error# Tx state unknown: reason# lost pipe while waiting for reply (plan), txId# 281474976715662, shard# 72075186224037888 2025-03-31T16:00:35.893391Z node 3 :TX_DATASHARD DEBUG: Client pipe to tablet 72075186224037888 from 72075186224037889 is reset 2025-03-31T16:00:35.893456Z node 3 :TX_DATASHARD DEBUG: Client pipe to tablet 72075186224037888 from 72075186224037890 is reset 2025-03-31T16:00:35.894131Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037889 2025-03-31T16:00:35.894196Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 4, at: 72075186224037889 2025-03-31T16:00:35.894359Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T16:00:35.894426Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:00:35.894485Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 1 2025-03-31T16:00:35.894557Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-31T16:00:35.896497Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [3:1102:2841], serverId# [3:1103:2842], sessionId# [0:0:0] 2025-03-31T16:00:35.943306Z node 3 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [3:1119:2856] 2025-03-31T16:00:35.943638Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:00:35.962751Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:00:35.964065Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:00:35.967004Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:00:35.967158Z node 3 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:00:35.967236Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:00:35.967781Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:00:35.968169Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:00:35.968284Z node 3 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [3:1134:2856] in generation 2 2025-03-31T16:00:35.984024Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:00:35.984170Z node 3 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037888 2025-03-31T16:00:35.984328Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:00:35.984727Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [3:1137:2864] 2025-03-31T16:00:35.984798Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:00:35.984855Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:00:35.984897Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:00:35.985208Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Execute 2025-03-31T16:00:35.985442Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Complete 2025-03-31T16:00:35.986607Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:00:35.986718Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:00:35.986906Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1534 2025-03-31T16:00:35.986957Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:00:35.988083Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:00:35.988345Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:00:35.988418Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:00:35.988471Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 1 2025-03-31T16:00:35.988533Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:00:35.988749Z node 3 :TX_DATASHARD DEBUG: Start TTxProgressResendRS at tablet 72075186224037888 2025-03-31T16:00:35.988813Z node 3 :TX_DATASHARD INFO: Resend RS at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715661 2025-03-31T16:00:35.988886Z node 3 :TX_DATASHARD DEBUG: Send RS 1 at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715661 2025-03-31T16:00:35.989103Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 1500 next step 1534 2025-03-31T16:00:35.989181Z node 3 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715661 2025-03-31T16:00:35.989270Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 1534 txid# 281474976715661 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-03-31T16:00:35.989337Z node 3 :TX_DATASHARD NOTICE: Outdated readset for 1534:281474976715661 at 72075186224037889 2025-03-31T16:00:35.989399Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-03-31T16:00:35.989465Z node 3 :TX_DATASHARD DEBUG: Send RS Ack at 72075186224037889 {TEvReadSet step# 1534 txid# 281474976715661 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-03-31T16:00:35.989639Z node 3 :TX_DATASHARD DEBUG: Start TTxProgressResendRS at tablet 72075186224037888 2025-03-31T16:00:35.989675Z node 3 :TX_DATASHARD INFO: Resend RS at 72075186224037888 from 72075186224037888 to 72075186224037890 txId 281474976715661 2025-03-31T16:00:35.989710Z node 3 :TX_DATASHARD DEBUG: Send RS 2 at 72075186224037888 from 72075186224037888 to 72075186224037890 txId 281474976715661 2025-03-31T16:00:35.989855Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715661 2025-03-31T16:00:35.989961Z node 3 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037888 dest 72075186224037890 producer 72075186224037888 txId 281474976715661 2025-03-31T16:00:35.990019Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 1534 txid# 281474976715661 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 2 Flags# 0} 2025-03-31T16:00:35.990054Z node 3 :TX_DATASHARD NOTICE: Outdated readset for 1534:281474976715661 at 72075186224037890 2025-03-31T16:00:35.990105Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-03-31T16:00:35.990153Z node 3 :TX_DATASHARD DEBUG: Send RS Ack at 72075186224037890 {TEvReadSet step# 1534 txid# 281474976715661 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 2 Flags# 0} 2025-03-31T16:00:35.990287Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037890 consumer 72075186224037890 txId 281474976715661 >> TOlap::CreateStoreWithDirs >> TOlap::CreateDropTable [GOOD] >> TOlap::CreateDropStandaloneTableDefaultSharding >> TOlap::CustomDefaultPresets [GOOD] >> TOlap::Decimal >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit+UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit-UseSink |70.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError >> TOlap::Decimal [GOOD] >> TPersqueueControlPlaneTestSuite::TestAddRemoveReadRule [GOOD] >> TPersqueueDataPlaneTestSuite::WriteSession >> TPQTestSlow::TestOnDiskStoredSourceIds |70.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> TOlap::CreateDropStandaloneTable [GOOD] >> TOlap::AlterStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::Decimal [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:00:37.835642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:00:37.835752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:00:37.835801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:00:37.835851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:00:37.835900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:00:37.835928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:00:37.835981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:00:37.836059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:00:37.836401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:00:37.965933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:00:37.966005Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:37.972803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:00:37.972922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:00:37.973087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:00:37.986947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:00:37.987254Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:00:37.987933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:00:37.988190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:00:37.992050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:00:37.993262Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:00:37.993328Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:00:37.993481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:00:37.993542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:00:37.993587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:00:37.993742Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:00:38.015524Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:00:38.143008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:00:38.143262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:38.143469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:00:38.143751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:00:38.143815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:38.148333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:00:38.148509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:00:38.148733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:38.148840Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:00:38.148878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:00:38.148918Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:00:38.151606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:38.151692Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:00:38.151728Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:00:38.155343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:38.155398Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:38.155456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:00:38.155513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:00:38.165248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:00:38.167458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:00:38.167696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:00:38.168891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:00:38.169034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:00:38.169100Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:00:38.169419Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:00:38.169479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:00:38.169641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:00:38.169774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:00:38.177195Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:00:38.177244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:00:38.177478Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:00:38.177522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:00:38.177800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:38.177852Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:00:38.178063Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:00:38.178100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:00:38.178139Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:00:38.178175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:00:38.178245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:00:38.178351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:00:38.178383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:00:38.178413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:00:38.178482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:00:38.178519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:00:38.178551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:00:38.180734Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:00:38.180885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:00:38.180941Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 6:00:39.839766Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:00:39.846199Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-03-31T16:00:39.846384Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 101 at step: 5000002 2025-03-31T16:00:39.846841Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:00:39.846987Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 8589936749 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:00:39.847107Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateOlapStore TPropose operationId# 101:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000002 2025-03-31T16:00:39.847315Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 129 2025-03-31T16:00:39.847462Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:00:39.847524Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:00:39.848506Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186233409546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=tx_controller.cpp:211;event=finished_tx;tx_id=101; FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 2025-03-31T16:00:39.869250Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:00:39.869319Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:00:39.869530Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:00:39.869694Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:00:39.869763Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-03-31T16:00:39.869813Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-03-31T16:00:39.870268Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:00:39.870334Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateOlapStore TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 2025-03-31T16:00:39.870424Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TCreateOlapStore TProposedWaitParts operationId# 101:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2025-03-31T16:00:39.871379Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:00:39.871508Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:00:39.871554Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-03-31T16:00:39.871601Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-31T16:00:39.871674Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:00:39.872988Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:00:39.873072Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:00:39.873102Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-03-31T16:00:39.873133Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-31T16:00:39.873167Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T16:00:39.873423Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-03-31T16:00:39.884107Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2025-03-31T16:00:39.885618Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T16:00:39.886163Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T16:00:39.916003Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 2025-03-31T16:00:39.916081Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-03-31T16:00:39.916242Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 FAKE_COORDINATOR: Erasing txId 101 2025-03-31T16:00:39.924281Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:00:39.924479Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:00:39.924522Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-31T16:00:39.924678Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T16:00:39.924722Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:00:39.924774Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T16:00:39.924806Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:00:39.924848Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-03-31T16:00:39.924918Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:341:2320] message: TxId: 101 2025-03-31T16:00:39.925007Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:00:39.925056Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-31T16:00:39.925093Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-31T16:00:39.925243Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:00:39.927797Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T16:00:39.927857Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:342:2321] TestWaitNotification: OK eventTxId 101 2025-03-31T16:00:39.928421Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:00:39.928707Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore" took 299us result status StatusSuccess 2025-03-31T16:00:39.929482Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore" PathDescription { Self { Name: "OlapStore" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnStoreVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnStoreDescription { Name: "OlapStore" ColumnShardCount: 1 ColumnShards: 72075186233409546 SchemaPresets { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Decimal(35,9)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 9 } NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } } NextSchemaPresetId: 2 NextTtlSettingsPresetId: 1 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest [GOOD] >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse >> DataShardSnapshots::ShardRestartLockBasic [GOOD] >> DataShardSnapshots::ShardRestartAfterDropTable >> TOlap::CreateStoreWithDirs [GOOD] >> TOlap::CreateTable >> TPQTestSlow::TestWriteVeryBigMessage >> SlowTopicAutopartitioning::CDC_Write |70.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TOlap::AlterStore [GOOD] >> TOlap::AlterTtl >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit [GOOD] >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit |70.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |70.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |70.8%| [LD] {RESULT} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |70.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TOlap::AlterTtl [GOOD] >> TOlap::CreateTable [GOOD] >> TOlap::CreateTableTtl >> TOlapNaming::CreateColumnTableOk [GOOD] >> TOlapNaming::CreateColumnTableFailed >> TPQCDTest::TestRelatedServicesAreRunning ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::AlterTtl [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:00:38.219580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:00:38.219692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:00:38.219731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:00:38.219771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:00:38.219815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:00:38.219836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:00:38.219881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:00:38.219963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:00:38.220244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:00:38.315661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:00:38.315728Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:38.320773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:00:38.320886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:00:38.321039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:00:38.325877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:00:38.326114Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:00:38.326779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:00:38.327017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:00:38.329049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:00:38.330250Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:00:38.330315Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:00:38.330469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:00:38.330529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:00:38.330579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:00:38.330724Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:00:38.339549Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:00:38.563940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:00:38.564203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:38.564416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:00:38.564682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:00:38.564762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:38.572003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:00:38.572191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:00:38.572413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:38.572495Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:00:38.572540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:00:38.572599Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:00:38.577023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:38.577110Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:00:38.577152Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:00:38.579885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:38.579962Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:38.580047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:00:38.580143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:00:38.584543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:00:38.587238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:00:38.587476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:00:38.588671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:00:38.588838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:00:38.588912Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:00:38.589260Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:00:38.589328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:00:38.589518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:00:38.589674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:00:38.592461Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:00:38.592517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:00:38.592787Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:00:38.592849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:00:38.593167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:38.593222Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:00:38.593332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:00:38.593371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:00:38.593415Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:00:38.593446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:00:38.593503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:00:38.593549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:00:38.593587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:00:38.593619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:00:38.593699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:00:38.593760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:00:38.593801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:00:38.596194Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:00:38.596409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:00:38.596476Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-03-31T16:00:43.865037Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 106, at schemeshard: 72057594046678944 2025-03-31T16:00:43.865098Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 0/1, is published: true 2025-03-31T16:00:43.865143Z node 3 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 106, at schemeshard: 72057594046678944 2025-03-31T16:00:43.902866Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: Status: PREPARED TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 106 MinStep: 0 MaxStep: 18446744073709551615 DomainCoordinators: 72057594046316545 2025-03-31T16:00:43.902950Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409546, partId: 0 2025-03-31T16:00:43.903187Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 106:0, at schemeshard: 72057594046678944, message: Status: PREPARED TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 106 MinStep: 0 MaxStep: 18446744073709551615 DomainCoordinators: 72057594046316545 2025-03-31T16:00:43.903251Z node 3 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-03-31T16:00:43.903326Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409546, shardIdx: 72057594046678944:1, operationId: 106:0, left await: 0, at schemeshard: 72057594046678944 2025-03-31T16:00:43.903371Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:0 3 -> 128 2025-03-31T16:00:43.909281Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2025-03-31T16:00:43.909514Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-03-31T16:00:43.909586Z node 3 :FLAT_TX_SCHEMESHARD INFO: TAlterColumnTable TPropose operationId# 106:0 HandleReply ProgressState at tablet: 72057594046678944 2025-03-31T16:00:43.909694Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 106 ready parts: 1/1 2025-03-31T16:00:43.909880Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 106 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:00:43.912052Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 106:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:106 msg type: 269090816 2025-03-31T16:00:43.912217Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 106, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 106 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 106 at step: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 106 at step: 5000007 2025-03-31T16:00:43.925085Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186233409546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=106;fline=tx_controller.cpp:211;event=finished_tx;tx_id=106; 2025-03-31T16:00:43.925427Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:00:43.925576Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 106 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 12884904046 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:00:43.925654Z node 3 :FLAT_TX_SCHEMESHARD INFO: TAlterColumnTable TPropose operationId# 106:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000007 2025-03-31T16:00:43.926517Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:0 128 -> 129 2025-03-31T16:00:43.926783Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:00:43.926857Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000007 2025-03-31T16:00:43.936647Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:00:43.936713Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:00:43.936941Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-31T16:00:43.937126Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:00:43.937179Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:207:2209], at schemeshard: 72057594046678944, txId: 106, path id: 2 2025-03-31T16:00:43.937231Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:207:2209], at schemeshard: 72057594046678944, txId: 106, path id: 3 2025-03-31T16:00:43.937305Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-03-31T16:00:43.937366Z node 3 :FLAT_TX_SCHEMESHARD INFO: TAlterColumnTable TProposedWaitParts operationId# 106:0 ProgressState at tablet: 72057594046678944 2025-03-31T16:00:43.937447Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TAlterColumnTable TProposedWaitParts operationId# 106:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2025-03-31T16:00:43.943194Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 106 2025-03-31T16:00:43.943354Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 106 2025-03-31T16:00:43.943404Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2025-03-31T16:00:43.943477Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2025-03-31T16:00:43.943554Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T16:00:43.945171Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 14 PathOwnerId: 72057594046678944, cookie: 106 2025-03-31T16:00:43.945274Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 14 PathOwnerId: 72057594046678944, cookie: 106 2025-03-31T16:00:43.945307Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2025-03-31T16:00:43.945343Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 14 2025-03-31T16:00:43.945384Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-31T16:00:43.945478Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 0/1, is published: true 2025-03-31T16:00:43.955646Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 106:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2025-03-31T16:00:43.957092Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-03-31T16:00:43.965800Z node 3 :TX_TIERING ERROR: fline=manager.cpp:158;error=cannot_read_secrets;reason=Can't read access key: No such secret: SId:secret; 2025-03-31T16:00:43.966727Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-03-31T16:00:43.988022Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 106 2025-03-31T16:00:43.988103Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409546, partId: 0 2025-03-31T16:00:43.988283Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 106:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 106 FAKE_COORDINATOR: Erasing txId 106 2025-03-31T16:00:43.990855Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2025-03-31T16:00:43.991046Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-03-31T16:00:43.991120Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 106:0 ProgressState 2025-03-31T16:00:43.991301Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-03-31T16:00:43.991351Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-31T16:00:43.991402Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-03-31T16:00:43.991439Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-31T16:00:43.991484Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2025-03-31T16:00:43.991567Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:342:2321] message: TxId: 106 2025-03-31T16:00:43.991624Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-31T16:00:43.991669Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-03-31T16:00:43.991707Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2025-03-31T16:00:43.991858Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T16:00:43.994300Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-03-31T16:00:43.994357Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [3:551:2522] TestWaitNotification: OK eventTxId 106 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/MyRoot/Tier1' stopped at tablet 72075186233409546 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/MyRoot/Tier1' stopped at tablet 72075186233409546 >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_3 [GOOD] >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 >> TOlap::CreateTableTtl [GOOD] >> TOlapNaming::CreateColumnTableFailed [GOOD] >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed |70.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateTableTtl [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-31T16:00:39.880954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:00:39.881053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:00:39.881110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:00:39.881158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:00:39.881227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:00:39.881257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:00:39.881319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:00:39.881390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:00:39.881734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:00:40.131425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:00:40.131494Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:40.177043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:00:40.177479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:00:40.177683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:00:40.210219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:00:40.210569Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:00:40.211338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:00:40.211776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:00:40.225886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:00:40.227310Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:00:40.227385Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:00:40.227508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:00:40.227591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:00:40.227641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:00:40.227866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:00:40.250922Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-31T16:00:40.622541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:00:40.622805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:40.623008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:00:40.623281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:00:40.623350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:40.628853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:00:40.629004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:00:40.629224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:40.629296Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:00:40.629358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:00:40.629396Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:00:40.639283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:40.639364Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:00:40.639405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:00:40.644234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:40.644317Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:40.644361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:00:40.644417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:00:40.671728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:00:40.689217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:00:40.689421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:00:40.690532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:00:40.690696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:00:40.690761Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:00:40.691062Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:00:40.691146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:00:40.691316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:00:40.691412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:00:40.704793Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:00:40.704868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:00:40.705072Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:00:40.705119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:00:40.705522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:40.705577Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:00:40.705679Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:00:40.705718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:00:40.705755Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:00:40.705788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:00:40.705858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:00:40.705903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:00:40.705936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:00:40.705966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:00:40.706042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:00:40.706077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:00:40.706107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:00:40.715198Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:00:40.715441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:00:40.715488Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... SHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-03-31T16:00:45.904548Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2025-03-31T16:00:45.904669Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-03-31T16:00:45.904706Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-31T16:00:45.904752Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-03-31T16:00:45.904791Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-31T16:00:45.904831Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: true 2025-03-31T16:00:45.904913Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:342:2321] message: TxId: 105 2025-03-31T16:00:45.904971Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-31T16:00:45.905014Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-03-31T16:00:45.905049Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-03-31T16:00:45.905184Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-03-31T16:00:45.912443Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-03-31T16:00:45.912543Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [3:521:2492] TestWaitNotification: OK eventTxId 105 2025-03-31T16:00:45.913260Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/Table3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:00:45.913555Z node 3 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/Table3" took 326us result status StatusSuccess 2025-03-31T16:00:45.914051Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore/Table3" PathDescription { Self { Name: "Table3" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 105 CreateStep: 5000006 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 ColumnTableTtlSettingsVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "Table3" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } TtlSettings { Enabled { ColumnName: "timestamp" ColumnUnit: UNIT_AUTO Tiers { ApplyAfterSeconds: 360 EvictToExternalStorage { Storage: "/MyRoot/Tier1" } } } Version: 1 } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 2 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 106 2025-03-31T16:00:45.920502Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/OlapStore" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "Table4" TtlSettings { Enabled { ColumnName: "timestamp" ColumnUnit: UNIT_AUTO Tiers { ApplyAfterSeconds: 3600000000 EvictToExternalStorage { Storage: "/MyRoot/Tier1" } } } } ColumnShardCount: 1 } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:00:45.920890Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/OlapStore/Table4, opId: 106:0, at schemeshard: 72057594046678944 2025-03-31T16:00:45.921309Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: OlapStore, child name: Table4, child id: [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-03-31T16:00:45.921396Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 0 2025-03-31T16:00:45.921642Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2025-03-31T16:00:45.921966Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:00:45.922015Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 106:0, at schemeshard: 72057594046678944 2025-03-31T16:00:45.922137Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-03-31T16:00:45.922201Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2025-03-31T16:00:45.935417Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusAccepted TxId: 106 SchemeshardId: 72057594046678944 PathId: 7, at schemeshard: 72057594046678944 2025-03-31T16:00:45.935664Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE COLUMN TABLE, path: /MyRoot/OlapStore/ 2025-03-31T16:00:45.935957Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:00:45.936008Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:00:45.936252Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2025-03-31T16:00:45.936362Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:00:45.936405Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:207:2209], at schemeshard: 72057594046678944, txId: 106, path id: 2 2025-03-31T16:00:45.936454Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:207:2209], at schemeshard: 72057594046678944, txId: 106, path id: 7 2025-03-31T16:00:45.936910Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-03-31T16:00:45.936967Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCreateColumnTable TConfigureParts operationId# 106:0 ProgressState at tabletId# 72057594046678944 2025-03-31T16:00:45.937171Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TCreateColumnTable TConfigureParts operationId# 106:0 ProgressState Propose modify scheme on shard tabletId: 72075186233409546 2025-03-31T16:00:45.938263Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 10 PathOwnerId: 72057594046678944, cookie: 106 2025-03-31T16:00:45.938380Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 10 PathOwnerId: 72057594046678944, cookie: 106 2025-03-31T16:00:45.938422Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2025-03-31T16:00:45.938467Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 10 2025-03-31T16:00:45.938515Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-03-31T16:00:45.939794Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 1 PathOwnerId: 72057594046678944, cookie: 106 2025-03-31T16:00:45.939877Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 1 PathOwnerId: 72057594046678944, cookie: 106 2025-03-31T16:00:45.939908Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2025-03-31T16:00:45.939956Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 1 2025-03-31T16:00:45.939985Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-03-31T16:00:45.940057Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 0/1, is published: true 2025-03-31T16:00:45.948268Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 106:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382272 2025-03-31T16:00:45.948435Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 106, partId: 0, tablet: 72075186233409546 2025-03-31T16:00:45.960094Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-03-31T16:00:45.960334Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 TestModificationResult got TxId: 106, wait until txId: 106 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/MyRoot/Tier1' stopped at tablet 72075186233409546 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/MyRoot/Tier1' stopped at tablet 72075186233409546 >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError [GOOD] >> TOlapNaming::AlterColumnTableOk [GOOD] >> TOlapNaming::AlterColumnTableFailed >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlapNaming::CreateColumnTableFailed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-31T16:00:32.207915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:00:32.208015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:00:32.208079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:00:32.208152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:00:32.208217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:00:32.208253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:00:32.208319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:00:32.208396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:00:32.208724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:00:32.427441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:00:32.427507Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:32.446302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:00:32.446718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:00:32.446905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:00:32.462383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:00:32.462714Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:00:32.463426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:00:32.463806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:00:32.467682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:00:32.468974Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:00:32.469039Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:00:32.469153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:00:32.469223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:00:32.469275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:00:32.469491Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:00:32.479954Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-31T16:00:32.884403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:00:32.884676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:32.884871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:00:32.885104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:00:32.885162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:32.896468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:00:32.896650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:00:32.896873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:32.896947Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:00:32.897002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:00:32.897038Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:00:32.904064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:32.904144Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:00:32.904201Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:00:32.912124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:32.912230Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:32.912270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:00:32.912329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:00:32.926707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:00:32.940366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:00:32.940570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:00:32.941697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:00:32.941848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:00:32.941928Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:00:32.942263Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:00:32.942322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:00:32.942488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:00:32.942571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:00:32.960246Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:00:32.960316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:00:32.960500Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:00:32.960542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:00:32.960964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:32.961011Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:00:32.961118Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:00:32.961150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:00:32.961189Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:00:32.961222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:00:32.961288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:00:32.961338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:00:32.961370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:00:32.961397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:00:32.961468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:00:32.961510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:00:32.961545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:00:32.971807Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:00:32.971993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:00:32.972043Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... State, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:00:45.901779Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:00:45.901836Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:00:45.920353Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:45.920427Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:00:45.920474Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:00:45.940052Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:45.940115Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:45.940163Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:00:45.940219Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:00:45.940365Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:00:45.948192Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:00:45.948433Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:00:45.949424Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:00:45.949561Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 8589936747 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:00:45.949616Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:00:45.949874Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:00:45.949928Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:00:45.950090Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:00:45.950159Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:00:45.968764Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:00:45.968828Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:00:45.969012Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:00:45.969053Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:00:45.969132Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:45.969178Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:00:45.969280Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:00:45.969325Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:00:45.969363Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:00:45.969396Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:00:45.969434Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:00:45.969484Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:00:45.969531Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:00:45.969565Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:00:45.969636Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:00:45.969676Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:00:45.969707Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:00:45.970771Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:00:45.970876Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:00:45.970915Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-31T16:00:45.970953Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-31T16:00:45.970991Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:00:45.973581Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-31T16:00:46.010035Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-31T16:00:46.010568Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-03-31T16:00:46.013966Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TestTable" Schema { Columns { Name: "Id" Type: "Int32" NotNull: true } Columns { Name: "mess age" Type: "Utf8" } KeyColumnNames: "Id" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:00:46.014274Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/TestTable, opId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:00:46.014491Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Invalid name for column 'mess age', at schemeshard: 72057594046678944 2025-03-31T16:00:46.014897Z node 2 :TX_PROXY DEBUG: actor# [2:268:2259] Bootstrap 2025-03-31T16:00:46.034542Z node 2 :TX_PROXY DEBUG: actor# [2:268:2259] Become StateWork (SchemeCache [2:273:2264]) 2025-03-31T16:00:46.035932Z node 2 :TX_PROXY DEBUG: actor# [2:268:2259] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T16:00:46.045779Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Invalid name for column \'mess age\'" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:00:46.045954Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Invalid name for column 'mess age', operation: CREATE COLUMN TABLE, path: /MyRoot/ 2025-03-31T16:00:46.046626Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-31T16:00:46.046872Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-31T16:00:46.046920Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-31T16:00:46.047362Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-31T16:00:46.047491Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T16:00:46.047533Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:283:2274] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-03-31T16:00:46.050513Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TestTable" Schema { Columns { Name: "Id" Type: "Int32" NotNull: true } Columns { Name: "~!@#$%^&*()+=asdfa" Type: "Utf8" } KeyColumnNames: "Id" } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:00:46.050769Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/TestTable, opId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:00:46.050947Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusSchemeError, reason: Invalid name for column '~!@#$%^&*()+=asdfa', at schemeshard: 72057594046678944 2025-03-31T16:00:46.059977Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusSchemeError Reason: "Invalid name for column \'~!@#$%^&*()+=asdfa\'" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:00:46.060121Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSchemeError, reason: Invalid name for column '~!@#$%^&*()+=asdfa', operation: CREATE COLUMN TABLE, path: /MyRoot/ TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-31T16:00:46.060403Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-31T16:00:46.060441Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-31T16:00:46.060805Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-31T16:00:46.060901Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:00:46.060930Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:290:2281] TestWaitNotification: OK eventTxId 102 >> SystemView::ShowCreateTablePartitionSettings [GOOD] >> SystemView::ShowCreateTableReadReplicas >> ObjectStorageListingTest::ListingNoFilter >> DbCounters::TabletsSimple [GOOD] >> LabeledDbCounters::OneTablet >> TPersqueueDataPlaneTestSuite::WriteSession [GOOD] >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError [GOOD] Test command err: === Server->StartServer(false); 2025-03-31T16:00:27.471723Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004157517870716:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:27.471769Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:00:28.209567Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488004157164281067:2211];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:28.208634Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a0d/r3tmp/tmpug42ZL/pdisk_1.dat 2025-03-31T16:00:28.219733Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:00:28.318116Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:00:28.474254Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:00:29.120628Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:29.120728Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:29.126055Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:29.126168Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:29.137589Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:00:29.137741Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:00:29.139186Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:00:29.210445Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3705, node 1 2025-03-31T16:00:29.483254Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:00:29.621789Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:00:29.621853Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:00:29.625138Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/000a0d/r3tmp/yandex7m9dt2.tmp 2025-03-31T16:00:29.625159Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/000a0d/r3tmp/yandex7m9dt2.tmp 2025-03-31T16:00:29.625316Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/000a0d/r3tmp/yandex7m9dt2.tmp 2025-03-31T16:00:29.625445Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:00:29.708975Z INFO: TTestServer started on Port 65108 GrpcPort 3705 TClient is connected to server localhost:65108 PQClient connected to localhost:3705 === TenantModeEnabled() = 1 === Init PQ - start server on port 3705 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:00:30.254895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-31T16:00:30.255507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:00:30.255732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-31T16:00:30.256021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:00:30.256085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:00:30.260010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-31T16:00:30.260144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-31T16:00:30.260326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:00:30.260384Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-31T16:00:30.260399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-03-31T16:00:30.260433Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-03-31T16:00:30.265508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:00:30.265567Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-03-31T16:00:30.265583Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 waiting... 2025-03-31T16:00:30.268040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:00:30.268082Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:00:30.268120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-31T16:00:30.268155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-03-31T16:00:30.272548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:00:30.275903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-03-31T16:00:30.276048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-03-31T16:00:30.277282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:00:30.277319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-03-31T16:00:30.277338Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:00:30.280150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743436830324, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T16:00:30.280316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743436830324 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-31T16:00:30.280369Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-31T16:00:30.280669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-03-31T16:00:30.280700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-31T16:00:30.280845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-31T16:00:30.280898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-03-31T16:00:30.283938Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-31T16:00:30.283967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-31T16:00:30.284119Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-31T16:00:30.284143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7488004166107806069:2448], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-03-31T16:00:30.284213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:00:30.284245Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-03-31T16:00:30.284327Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-31T16:00:30.284352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-31T16:00:30.284378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-31T16:00:30.284388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-31T16:00:30.284406Z node 1 :FLAT_TX_SCH ... cookie: 1 sessionId: 12345678|df8f0939-68b13f5e-39dac5eb-74235e6b_0 2025-03-31T16:00:46.017410Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|df8f0939-68b13f5e-39dac5eb-74235e6b_0 is DEAD Finish: 0 === InitializeWritePQService done === PersQueueClient 2025-03-31T16:00:46.017646Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison === InitializePQ completed BEFORE MODIFY PERMISSIONS 2025-03-31T16:00:46.052256Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root/acc" OperationType: ESchemeOpModifyACL ModifyACL { Name: "topic1" DiffACL: "\n\031\010\001\022\025\032\023test_user_0@builtin\n!\010\000\022\035\010\001\020\366\213\001\032\023test_user_0@builtin \003\n\031\010\001\022\025\032\023test_user_1@builtin\n!\010\000\022\035\010\001\020\366\213\001\032\023test_user_1@builtin \003\n\031\010\001\022\025\032\023test_user_2@builtin\n!\010\000\022\035\010\001\020\366\213\001\032\023test_user_2@builtin \003" } } TxId: 281474976720665 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:46888" , at schemeshard: 72057594046644480 2025-03-31T16:00:46.052422Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root/acc/topic1, operationId: 281474976720665:0, at schemeshard: 72057594046644480 2025-03-31T16:00:46.052588Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046644480, LocalPathId: 10] name: topic1 type: EPathTypePersQueueGroup state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046644480, LocalPathId: 9] 2025-03-31T16:00:46.052599Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2025-03-31T16:00:46.052749Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976720665:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-03-31T16:00:46.052768Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976720665:0, at schemeshard: 72057594046644480 2025-03-31T16:00:46.052837Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720665:0 progress is 1/1 2025-03-31T16:00:46.052853Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720665 ready parts: 1/1 2025-03-31T16:00:46.052871Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720665:0 progress is 1/1 2025-03-31T16:00:46.052889Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720665 ready parts: 1/1 2025-03-31T16:00:46.052926Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 3 2025-03-31T16:00:46.052976Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720665, ready parts: 1/1, is published: false 2025-03-31T16:00:46.052995Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 10], at schemeshard: 72057594046644480 2025-03-31T16:00:46.053006Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720665 ready parts: 1/1 2025-03-31T16:00:46.053018Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976720665:0 2025-03-31T16:00:46.053034Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976720665, publications: 1, subscribers: 0 2025-03-31T16:00:46.053046Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976720665, [OwnerId: 72057594046644480, LocalPathId: 10], 3 2025-03-31T16:00:46.056163Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976720665, response: Status: StatusSuccess TxId: 281474976720665 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-03-31T16:00:46.056439Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976720665, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/acc/topic1, add access: +W:test_user_0@builtin, add access: +W:test_user_1@builtin, add access: +W:test_user_2@builtin, remove access: -():test_user_0@builtin:-, remove access: -():test_user_1@builtin:-, remove access: -():test_user_2@builtin:- 2025-03-31T16:00:46.056628Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-31T16:00:46.056640Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976720665, path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2025-03-31T16:00:46.056846Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-31T16:00:46.056862Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:7488004212906059461:2369], at schemeshard: 72057594046644480, txId: 281474976720665, path id: 10 2025-03-31T16:00:46.057896Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976720665 2025-03-31T16:00:46.057983Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976720665 2025-03-31T16:00:46.057996Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976720665 2025-03-31T16:00:46.058013Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976720665, pathId: [OwnerId: 72057594046644480, LocalPathId: 10], version: 3 2025-03-31T16:00:46.058030Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 4 2025-03-31T16:00:46.058102Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976720665, subscribers: 0 2025-03-31T16:00:46.063797Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976720665 2025-03-31T16:00:46.066955Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-03-31T16:00:46.066982Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 2 2025-03-31T16:00:46.067313Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "/Root/acc/topic1" message_group_id: "test-group-id" } 2025-03-31T16:00:46.067392Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 2 topic: "/Root/acc/topic1" message_group_id: "test-group-id" from ipv6:[::1]:46874 2025-03-31T16:00:46.067404Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:46874 proto=v1 topic=/Root/acc/topic1 durationSec=0 2025-03-31T16:00:46.067413Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-03-31T16:00:46.070454Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: describe result for acl check 2025-03-31T16:00:46.070635Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-03-31T16:00:46.070655Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-03-31T16:00:46.070666Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-03-31T16:00:46.070716Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7488004238675864341:2396] (SourceId=test-group-id, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-03-31T16:00:46.070736Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2025-03-31T16:00:46.079367Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-03-31T16:00:46.079621Z node 3 :PERSQUEUE INFO: new Cookie test-group-id|114b998f-6197a51f-2b71f072-824495f8_0 generated for partition 0 topic 'acc/topic1' owner test-group-id 2025-03-31T16:00:46.080177Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: test-group-id|114b998f-6197a51f-2b71f072-824495f8_0 2025-03-31T16:00:46.084167Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: test-group-id|114b998f-6197a51f-2b71f072-824495f8_0 grpc read done: success: 1 data: update_token_request [content omitted] 2025-03-31T16:00:46.087269Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: test-group-id|114b998f-6197a51f-2b71f072-824495f8_0 grpc read done: success: 1 data: update_token_request [content omitted] 2025-03-31T16:00:46.087347Z node 3 :PQ_WRITE_PROXY INFO: session v1 error cookie: 2 reason: got another 'update_token_request' while previous still in progress, only single token update is allowed at a time sessionId: test-group-id|114b998f-6197a51f-2b71f072-824495f8_0 2025-03-31T16:00:46.087650Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-group-id|114b998f-6197a51f-2b71f072-824495f8_0 is DEAD 2025-03-31T16:00:46.087942Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-31T16:00:46.578578Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7488004238675864358:2403], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:00:46.579564Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjA1ZDdmYTAtZjRjYTcwZjQtODljYzFlMjUtZjlhNmJmNWQ=, ActorId: [3:7488004238675864351:2399], ActorState: ExecuteState, TraceId: 01jqpey5en34kgaeww0ajs37pq, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:00:46.579948Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minstep/unittest >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs [GOOD] Test command err: 2025-03-31T16:00:32.326025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:00:32.326348Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:00:32.326538Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001b66/r3tmp/tmpvlmVan/pdisk_1.dat 2025-03-31T16:00:33.359063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:00:33.370730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:00:33.381175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:00:33.382433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-31T16:00:33.385033Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2025-03-31T16:00:33.385111Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 step# 500 Status# 16 SEND to# [1:409:2404] Proxy marker# C1 2025-03-31T16:00:33.422174Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:33.436139Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Merged config: { } 2025-03-31T16:00:33.481492Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvRegisterNode from [1:335:2375] HiveId: 72057594037968897 ServicedDomains { SchemeShard: 72057594046644480 PathId: 1 } TabletAvailability { Type: Mediator Priority: 0 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: KeyValue Priority: 0 } TabletAvailability { Type: Coordinator Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } TabletAvailability { Type: SchemeShard Priority: 0 } TabletAvailability { Type: DataShard Priority: 0 } TabletAvailability { Type: PersQueue Priority: 0 } TabletAvailability { Type: PersQueueReadBalancer Priority: 0 } TabletAvailability { Type: Kesus Priority: 0 } TabletAvailability { Type: SysViewProcessor Priority: 0 } TabletAvailability { Type: ColumnShard Priority: 0 } TabletAvailability { Type: SequenceShard Priority: 0 } TabletAvailability { Type: ReplicationController Priority: 0 } TabletAvailability { Type: StatisticsAggregator Priority: 0 } 2025-03-31T16:00:33.481662Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Execute 2025-03-31T16:00:33.481791Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:33.481841Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-03-31T16:00:33.481877Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-03-31T16:00:33.481923Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-03-31T16:00:33.481965Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-03-31T16:00:33.482047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:33.482330Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-03-31T16:00:33.482374Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-03-31T16:00:33.482429Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2025-03-31T16:00:33.482559Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-03-31T16:00:33.482983Z node 1 :HIVE DEBUG: HIVE#72057594037968897 TEvInterconnect::TEvNodeInfo NodeId 1 Location DataCenter: "1" Module: "1" Rack: "1" Unit: "1" 2025-03-31T16:00:33.495727Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Complete 2025-03-31T16:00:33.495825Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Node(1) Ping([1:335:2375]) 2025-03-31T16:00:33.495941Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-03-31T16:00:33.496748Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle::TEvSyncTablets 2025-03-31T16:00:33.496845Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:335:2375])::Execute 2025-03-31T16:00:33.496887Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-03-31T16:00:33.496956Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:335:2375])::Complete 2025-03-31T16:00:33.497147Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 1: Status: 0 StartTime: 0 ResourceMaximum { Memory: 270443347968 } 2025-03-31T16:00:33.497226Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Execute 2025-03-31T16:00:33.497284Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:00:33.497446Z node 1 :HIVE DEBUG: HIVE#72057594037968897 AddRegisteredDataCentersNode(1, 1) 2025-03-31T16:00:33.497502Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-03-31T16:00:33.497536Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-03-31T16:00:33.497693Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-03-31T16:00:33.497732Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-03-31T16:00:33.497774Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2025-03-31T16:00:33.497812Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-03-31T16:00:33.508581Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Complete 2025-03-31T16:00:33.508695Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-03-31T16:00:33.581602Z node 1 :TX_COORDINATOR DEBUG: Transaction 1 has been planned 2025-03-31T16:00:33.581690Z node 1 :TX_COORDINATOR DEBUG: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2025-03-31T16:00:33.582015Z node 1 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 1000 in 0.500000s at 0.950000s 2025-03-31T16:00:33.582424Z node 1 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 500, txid# 1 marker# C2 2025-03-31T16:00:33.582486Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 stepId# 500 Status# 17 SEND EvProposeTransactionStatus to# [1:409:2404] Proxy 2025-03-31T16:00:33.583646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T16:00:33.585136Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-03-31T16:00:33.585234Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-03-31T16:00:33.585274Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 acknowledged 2025-03-31T16:00:33.585310Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 acknowledged 2025-03-31T16:00:33.586321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:00:33.586399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2025-03-31T16:00:33.592101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2025-03-31T16:00:33.594895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/table-1, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:00:33.602635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:00:33.602743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:00:33.603816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table-1 2025-03-31T16:00:33.617271Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(DataShard(72057594046644480,1)) 2025-03-31T16:00:33.665530Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046644480 OwnerIdx: 1 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-03-31T16:00:33.665657Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2025-03-31T16:00:33.665911Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type DataShard: {} 2025-03-31T16:00:33.665983Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for object (72057594046644480,2): {} 2025-03-31T16:00:33.666044Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2025-03-31T16:00:33.666234Z node 1 :HIVE DEBUG: HIVE#72057594037968897 CreateTabletFollowers Tablet DataShard.72075186224037888.Leader.0 2025-03-31T16:00:33.666835Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2025-03-31T16:00:33.667002Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "/Root:test" } } ReturnAllMatchingGroups: true 2025-03-31T16:00:33.671965Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Connected to tablet 72057594037932033 from tablet 72057594037968897 2025-03-31T16:00:33.672421Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle TEvControllerSelectGroupsResult: success Status: OK NewStyleQuerySupported: true MatchingGroups { Groups { ErasureSpecies: 0 GroupID: 2181038080 StoragePoolName: "/Root:test" AssuredResources { } CurrentResources { } PhysicalGroup: true Decommitted: false } } 2025-03-31T16:00:33.672545Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923048996992}(72075186224037888,HIVE_REASSIGN_REASON_NO,[]) 2025-03-31T16:00:33.672645Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923048996992}: tablet 72075186224037888 channel 0 assigned to group 2181038080 2025-03-31T16:00:33.672831Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923048996992}: tablet 72075186224037888 channel 1 assigned to group 2181038080 2025-03-31T16:00:33.672904Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923048996992}: tablet 72075186224037888 channel 2 assigned to group 21810380 ... TxBlockStorageResult::Execute(72075186224037888 OK) 2025-03-31T16:00:46.710673Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxBlockStorageResult::Complete(72075186224037888 OK) 2025-03-31T16:00:46.711011Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle::TEvInitiateDeleteStorage TabletId=72075186224037888 2025-03-31T16:00:46.711362Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-03-31T16:00:46.711499Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037888 2025-03-31T16:00:46.711552Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-03-31T16:00:46.711772Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTabletResult::Execute(72075186224037888 OK) 2025-03-31T16:00:46.722983Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-03-31T16:00:46.724459Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715665 HANDLE EvProposeTransaction marker# C0 2025-03-31T16:00:46.724553Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715665 step# 3500 Status# 16 SEND to# [2:410:2405] Proxy marker# C1 2025-03-31T16:00:46.735790Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTabletResult(72075186224037888)::Complete SideEffects {} 2025-03-31T16:00:46.826604Z node 2 :TX_COORDINATOR DEBUG: Transaction 281474976715665 has been planned 2025-03-31T16:00:46.826711Z node 2 :TX_COORDINATOR DEBUG: Planned transaction 281474976715665 for mediator 72057594046382081 tablet 72057594046644480 2025-03-31T16:00:46.826756Z node 2 :TX_COORDINATOR DEBUG: Planned transaction 281474976715665 for mediator 72057594046382081 tablet 72075186224037889 2025-03-31T16:00:46.826996Z node 2 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 4000 in 0.500000s at 3.950000s 2025-03-31T16:00:46.827456Z node 2 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 3500, txid# 281474976715665 marker# C2 2025-03-31T16:00:46.827538Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715665 stepId# 3500 Status# 17 SEND EvProposeTransactionStatus to# [2:410:2405] Proxy 2025-03-31T16:00:46.828183Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715665 at step 3500 at tablet 72075186224037889 { Transactions { TxId: 281474976715665 AckTo { RawX1: 0 RawX2: 0 } } Step: 3500 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-03-31T16:00:46.828244Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:00:46.828419Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 3500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T16:00:46.828825Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T16:00:46.828876Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:00:46.828921Z node 2 :TX_DATASHARD DEBUG: Found ready operation [3500:281474976715665] in PlanQueue unit at 72075186224037889 2025-03-31T16:00:46.829133Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3500:281474976715665 keys extracted: 0 2025-03-31T16:00:46.829386Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:00:46.829731Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T16:00:46.829816Z node 2 :TX_DATASHARD INFO: Trying to DROP TABLE at 72075186224037889 2025-03-31T16:00:46.830241Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:00:46.831627Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3500} 2025-03-31T16:00:46.831704Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-31T16:00:46.832505Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-31T16:00:46.832577Z node 2 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715665] from 72075186224037889 at tablet 72075186224037889 send result to client [2:410:2405], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:00:46.832647Z node 2 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715665 state PreOffline TxInFly 0 2025-03-31T16:00:46.832742Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:00:46.833222Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-03-31T16:00:46.833316Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:25] persistent tx 281474976715665 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-03-31T16:00:46.833360Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:25] persistent tx 281474976715665 for mediator 72057594046382081 tablet 72075186224037889 removed=1 2025-03-31T16:00:46.833389Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:25] persistent tx 281474976715665 for mediator 72057594046382081 acknowledged 2025-03-31T16:00:46.833425Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:25] persistent tx 281474976715665 acknowledged 2025-03-31T16:00:46.834035Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715665, done: 0, blocked: 1 2025-03-31T16:00:46.851856Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715665:0 2025-03-31T16:00:46.851989Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715665, publications: 2, subscribers: 1 2025-03-31T16:00:46.852403Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715665 datashard 72075186224037889 state PreOffline 2025-03-31T16:00:46.852493Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-03-31T16:00:46.853387Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715665, subscribers: 1 2025-03-31T16:00:46.853854Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-31T16:00:46.854824Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=MWU4YTNlNzYtNmZiZWJhOC01MzNhMDQ5Mi1kMTQxMjM3Zg== 2025-03-31 16:00:46.854 INFO ydb-core-tx-datashard-ut_minstep(pid=83667, tid=0x00007FE05A27ED00) [core exec] yql_execution.cpp:133: Completed async execution for node #42 2025-03-31T16:00:46.854939Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=MWU4YTNlNzYtNmZiZWJhOC01MzNhMDQ5Mi1kMTQxMjM3Zg== 2025-03-31 16:00:46.854 INFO ydb-core-tx-datashard-ut_minstep(pid=83667, tid=0x00007FE05A27ED00) [core exec] yql_execution.cpp:153: State is ExecutionComplete after apply async changes for node #42 2025-03-31T16:00:46.855018Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=MWU4YTNlNzYtNmZiZWJhOC01MzNhMDQ5Mi1kMTQxMjM3Zg== 2025-03-31 16:00:46.854 INFO ydb-core-tx-datashard-ut_minstep(pid=83667, tid=0x00007FE05A27ED00) [core exec] yql_execution.cpp:59: Begin, root #43 2025-03-31T16:00:46.855065Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=MWU4YTNlNzYtNmZiZWJhOC01MzNhMDQ5Mi1kMTQxMjM3Zg== 2025-03-31 16:00:46.855 INFO ydb-core-tx-datashard-ut_minstep(pid=83667, tid=0x00007FE05A27ED00) [core exec] yql_execution.cpp:72: Collect unused nodes for root #43, status: Ok 2025-03-31T16:00:46.856411Z node 2 :KQP_YQL TRACE: SessionId: ydb://session/3?node_id=2&id=MWU4YTNlNzYtNmZiZWJhOC01MzNhMDQ5Mi1kMTQxMjM3Zg== 2025-03-31 16:00:46.856 TRACE ydb-core-tx-datashard-ut_minstep(pid=83667, tid=0x00007FE05A27ED00) [core exec] yql_execution.cpp:387: {0}, callable #43 2025-03-31T16:00:46.856516Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=MWU4YTNlNzYtNmZiZWJhOC01MzNhMDQ5Mi1kMTQxMjM3Zg== 2025-03-31 16:00:46.856 INFO ydb-core-tx-datashard-ut_minstep(pid=83667, tid=0x00007FE05A27ED00) [core exec] yql_execution.cpp:577: Node #43 finished execution 2025-03-31T16:00:46.856594Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=MWU4YTNlNzYtNmZiZWJhOC01MzNhMDQ5Mi1kMTQxMjM3Zg== 2025-03-31 16:00:46.856 INFO ydb-core-tx-datashard-ut_minstep(pid=83667, tid=0x00007FE05A27ED00) [core exec] yql_execution.cpp:594: Node #43 created 0 trackable nodes: 2025-03-31T16:00:46.856648Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=MWU4YTNlNzYtNmZiZWJhOC01MzNhMDQ5Mi1kMTQxMjM3Zg== 2025-03-31 16:00:46.856 INFO ydb-core-tx-datashard-ut_minstep(pid=83667, tid=0x00007FE05A27ED00) [core exec] yql_execution.cpp:87: Finish, output #43, status: Ok 2025-03-31T16:00:46.856698Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=MWU4YTNlNzYtNmZiZWJhOC01MzNhMDQ5Mi1kMTQxMjM3Zg== 2025-03-31 16:00:46.856 INFO ydb-core-tx-datashard-ut_minstep(pid=83667, tid=0x00007FE05A27ED00) [core exec] yql_execution.cpp:93: Creating finalizing transformer, output #43 2025-03-31T16:00:46.856887Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=MWU4YTNlNzYtNmZiZWJhOC01MzNhMDQ5Mi1kMTQxMjM3Zg== 2025-03-31 16:00:46.856 NOTE ydb-core-tx-datashard-ut_minstep(pid=83667, tid=0x00007FE05A27ED00) [common provider] yql_provider_gateway.cpp:21:
: Info: Execution, code: 1060 2025-03-31T16:00:46.856938Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=MWU4YTNlNzYtNmZiZWJhOC01MzNhMDQ5Mi1kMTQxMjM3Zg== 2025-03-31 16:00:46.856 NOTE ydb-core-tx-datashard-ut_minstep(pid=83667, tid=0x00007FE05A27ED00) [common provider] yql_provider_gateway.cpp:21:
:1:12: Info: Executing DROP TABLE 2025-03-31T16:00:46.856977Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=MWU4YTNlNzYtNmZiZWJhOC01MzNhMDQ5Mi1kMTQxMjM3Zg== 2025-03-31 16:00:46.856 NOTE ydb-core-tx-datashard-ut_minstep(pid=83667, tid=0x00007FE05A27ED00) [common provider] yql_provider_gateway.cpp:21:
: Info: Success, code: 4 2025-03-31T16:00:46.875842Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-03-31T16:00:46.876095Z node 2 :TX_DATASHARD INFO: 72075186224037889 Initiating switch from PreOffline to Offline state 2025-03-31T16:00:46.877884Z node 2 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-03-31T16:00:46.878798Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-03-31T16:00:46.882484Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186224037889 2025-03-31T16:00:46.882571Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037889 2025-03-31T16:00:46.882704Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Tablet(DataShard.72075186224037889.Leader.1) VolatileState: Running -> Stopped (Node 2) 2025-03-31T16:00:46.882833Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037889.Leader.1 gen 1) to node 2 2025-03-31T16:00:46.882955Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 |70.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> SystemView::TopPartitionsByCpuFields [GOOD] >> SystemView::TopPartitionsByCpuTables >> ObjectStorageListingTest::FilterListing |70.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> TOlap::CreateDropStandaloneTableDefaultSharding [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-59 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-60 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minstep/unittest >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs [GOOD] Test command err: 2025-03-31T16:00:29.642708Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:00:29.642991Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:00:29.643401Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001b7a/r3tmp/tmp8zrAfq/pdisk_1.dat 2025-03-31T16:00:30.653636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:00:30.672553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:00:30.682567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:00:30.688428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-31T16:00:30.701906Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2025-03-31T16:00:30.702048Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 step# 500 Status# 16 SEND to# [1:409:2404] Proxy marker# C1 2025-03-31T16:00:30.740484Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:30.757124Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Merged config: { } 2025-03-31T16:00:30.805941Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvRegisterNode from [1:335:2375] HiveId: 72057594037968897 ServicedDomains { SchemeShard: 72057594046644480 PathId: 1 } TabletAvailability { Type: Mediator Priority: 0 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: KeyValue Priority: 0 } TabletAvailability { Type: Coordinator Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } TabletAvailability { Type: SchemeShard Priority: 0 } TabletAvailability { Type: DataShard Priority: 0 } TabletAvailability { Type: PersQueue Priority: 0 } TabletAvailability { Type: PersQueueReadBalancer Priority: 0 } TabletAvailability { Type: Kesus Priority: 0 } TabletAvailability { Type: SysViewProcessor Priority: 0 } TabletAvailability { Type: ColumnShard Priority: 0 } TabletAvailability { Type: SequenceShard Priority: 0 } TabletAvailability { Type: ReplicationController Priority: 0 } TabletAvailability { Type: StatisticsAggregator Priority: 0 } 2025-03-31T16:00:30.806103Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Execute 2025-03-31T16:00:30.806290Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:30.806345Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-03-31T16:00:30.806386Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-03-31T16:00:30.806427Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-03-31T16:00:30.806463Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-03-31T16:00:30.806552Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:30.806846Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-03-31T16:00:30.806898Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-03-31T16:00:30.806935Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2025-03-31T16:00:30.807005Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-03-31T16:00:30.807230Z node 1 :HIVE DEBUG: HIVE#72057594037968897 TEvInterconnect::TEvNodeInfo NodeId 1 Location DataCenter: "1" Module: "1" Rack: "1" Unit: "1" 2025-03-31T16:00:30.819868Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Complete 2025-03-31T16:00:30.819986Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Node(1) Ping([1:335:2375]) 2025-03-31T16:00:30.820114Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-03-31T16:00:30.822748Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle::TEvSyncTablets 2025-03-31T16:00:30.822892Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:335:2375])::Execute 2025-03-31T16:00:30.822934Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-03-31T16:00:30.823017Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:335:2375])::Complete 2025-03-31T16:00:30.823228Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 1: Status: 0 StartTime: 0 ResourceMaximum { Memory: 270443347968 } 2025-03-31T16:00:30.825053Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Execute 2025-03-31T16:00:30.825143Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:00:30.825340Z node 1 :HIVE DEBUG: HIVE#72057594037968897 AddRegisteredDataCentersNode(1, 1) 2025-03-31T16:00:30.825446Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-03-31T16:00:30.825485Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-03-31T16:00:30.825696Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-03-31T16:00:30.825729Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-03-31T16:00:30.825784Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2025-03-31T16:00:30.825833Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-03-31T16:00:30.836727Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Complete 2025-03-31T16:00:30.836815Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-03-31T16:00:30.909891Z node 1 :TX_COORDINATOR DEBUG: Transaction 1 has been planned 2025-03-31T16:00:30.910026Z node 1 :TX_COORDINATOR DEBUG: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2025-03-31T16:00:30.910407Z node 1 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 1000 in 0.500000s at 0.950000s 2025-03-31T16:00:30.910813Z node 1 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 500, txid# 1 marker# C2 2025-03-31T16:00:30.910893Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 stepId# 500 Status# 17 SEND EvProposeTransactionStatus to# [1:409:2404] Proxy 2025-03-31T16:00:30.911861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T16:00:30.913268Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-03-31T16:00:30.913366Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-03-31T16:00:30.913402Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 acknowledged 2025-03-31T16:00:30.913438Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 acknowledged 2025-03-31T16:00:30.914202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:00:30.914282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2025-03-31T16:00:30.915436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2025-03-31T16:00:30.918246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/table-1, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:00:30.925399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:00:30.925534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:00:30.926597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table-1 2025-03-31T16:00:30.930776Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(DataShard(72057594046644480,1)) 2025-03-31T16:00:30.970011Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046644480 OwnerIdx: 1 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-03-31T16:00:30.970142Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2025-03-31T16:00:30.970470Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type DataShard: {} 2025-03-31T16:00:30.970539Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for object (72057594046644480,2): {} 2025-03-31T16:00:30.970607Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2025-03-31T16:00:30.970788Z node 1 :HIVE DEBUG: HIVE#72057594037968897 CreateTabletFollowers Tablet DataShard.72075186224037888.Leader.0 2025-03-31T16:00:30.979789Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2025-03-31T16:00:30.980075Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "/Root:test" } } ReturnAllMatchingGroups: true 2025-03-31T16:00:30.980825Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Connected to tablet 72057594037932033 from tablet 72057594037968897 2025-03-31T16:00:30.981304Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle TEvControllerSelectGroupsResult: success Status: OK NewStyleQuerySupported: true MatchingGroups { Groups { ErasureSpecies: 0 GroupID: 2181038080 StoragePoolName: "/Root:test" AssuredResources { } CurrentResources { } PhysicalGroup: true Decommitted: false } } 2025-03-31T16:00:30.981417Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923048996992}(72075186224037888,HIVE_REASSIGN_REASON_NO,[]) 2025-03-31T16:00:30.981500Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923048996992}: tablet 72075186224037888 channel 0 assigned to group 2181038080 2025-03-31T16:00:30.981667Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923048996992}: tablet 72075186224037888 channel 1 assigned to group 2181038080 2025-03-31T16:00:30.981731Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923048996992}: tablet 72075186224037888 channel 2 assigned to group 21810380 ... 037889 2025-03-31T16:00:48.341604Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715665 ssId 72057594046644480 seqNo 2:4 2025-03-31T16:00:48.341658Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715665 at tablet 72075186224037889 2025-03-31T16:00:48.341874Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:00:48.342088Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:00:48.342169Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T16:00:48.342205Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:00:48.342245Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 1 2025-03-31T16:00:48.353398Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-31T16:00:48.353549Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-31T16:00:48.355314Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715665 HANDLE EvProposeTransaction marker# C0 2025-03-31T16:00:48.355389Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715665 step# 33000 Status# 16 SEND to# [2:410:2405] Proxy marker# C1 2025-03-31T16:00:48.430252Z node 2 :TX_COORDINATOR DEBUG: Transaction 281474976715665 has been planned 2025-03-31T16:00:48.430362Z node 2 :TX_COORDINATOR DEBUG: Planned transaction 281474976715665 for mediator 72057594046382081 tablet 72057594046644480 2025-03-31T16:00:48.430410Z node 2 :TX_COORDINATOR DEBUG: Planned transaction 281474976715665 for mediator 72057594046382081 tablet 72075186224037889 2025-03-31T16:00:48.430746Z node 2 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 33500 in 0.500000s at 33.450000s 2025-03-31T16:00:48.431225Z node 2 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 33000, txid# 281474976715665 marker# C2 2025-03-31T16:00:48.431327Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715665 stepId# 33000 Status# 17 SEND EvProposeTransactionStatus to# [2:410:2405] Proxy 2025-03-31T16:00:48.431862Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 33000, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T16:00:48.432578Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715665 at step 33000 at tablet 72075186224037889 { Transactions { TxId: 281474976715665 AckTo { RawX1: 0 RawX2: 0 } } Step: 33000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-03-31T16:00:48.432644Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:00:48.432848Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T16:00:48.432906Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:00:48.432975Z node 2 :TX_DATASHARD DEBUG: Found ready operation [33000:281474976715665] in PlanQueue unit at 72075186224037889 2025-03-31T16:00:48.433199Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 33000:281474976715665 keys extracted: 0 2025-03-31T16:00:48.433371Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:00:48.433688Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T16:00:48.433765Z node 2 :TX_DATASHARD INFO: Trying to DROP TABLE at 72075186224037889 2025-03-31T16:00:48.434231Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:00:48.442077Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 33000} 2025-03-31T16:00:48.442182Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-31T16:00:48.442798Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-03-31T16:00:48.442915Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:54] persistent tx 281474976715665 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-03-31T16:00:48.442964Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:54] persistent tx 281474976715665 for mediator 72057594046382081 tablet 72075186224037889 removed=1 2025-03-31T16:00:48.443002Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:54] persistent tx 281474976715665 for mediator 72057594046382081 acknowledged 2025-03-31T16:00:48.443050Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:54] persistent tx 281474976715665 acknowledged 2025-03-31T16:00:48.443561Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-31T16:00:48.443642Z node 2 :TX_DATASHARD DEBUG: Complete [33000 : 281474976715665] from 72075186224037889 at tablet 72075186224037889 send result to client [2:410:2405], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:00:48.443721Z node 2 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715665 state PreOffline TxInFly 0 2025-03-31T16:00:48.443822Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:00:48.444652Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715665, done: 0, blocked: 1 2025-03-31T16:00:48.449730Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715665 datashard 72075186224037889 state PreOffline 2025-03-31T16:00:48.449840Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-03-31T16:00:48.450466Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715665:0 2025-03-31T16:00:48.450673Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715665, publications: 1, subscribers: 1 2025-03-31T16:00:48.451314Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715665, subscribers: 1 2025-03-31T16:00:48.452135Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-31T16:00:48.453234Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=MTRjOTJjYzEtYTg4M2Y3YzMtOTkwYjZmYmUtZmFiYWVjNGE= 2025-03-31 16:00:48.453 INFO ydb-core-tx-datashard-ut_minstep(pid=83653, tid=0x00007F0DF479CD00) [core exec] yql_execution.cpp:133: Completed async execution for node #42 2025-03-31T16:00:48.453372Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=MTRjOTJjYzEtYTg4M2Y3YzMtOTkwYjZmYmUtZmFiYWVjNGE= 2025-03-31 16:00:48.453 INFO ydb-core-tx-datashard-ut_minstep(pid=83653, tid=0x00007F0DF479CD00) [core exec] yql_execution.cpp:153: State is ExecutionComplete after apply async changes for node #42 2025-03-31T16:00:48.453456Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=MTRjOTJjYzEtYTg4M2Y3YzMtOTkwYjZmYmUtZmFiYWVjNGE= 2025-03-31 16:00:48.453 INFO ydb-core-tx-datashard-ut_minstep(pid=83653, tid=0x00007F0DF479CD00) [core exec] yql_execution.cpp:59: Begin, root #43 2025-03-31T16:00:48.453512Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=MTRjOTJjYzEtYTg4M2Y3YzMtOTkwYjZmYmUtZmFiYWVjNGE= 2025-03-31 16:00:48.453 INFO ydb-core-tx-datashard-ut_minstep(pid=83653, tid=0x00007F0DF479CD00) [core exec] yql_execution.cpp:72: Collect unused nodes for root #43, status: Ok 2025-03-31T16:00:48.453571Z node 2 :KQP_YQL TRACE: SessionId: ydb://session/3?node_id=2&id=MTRjOTJjYzEtYTg4M2Y3YzMtOTkwYjZmYmUtZmFiYWVjNGE= 2025-03-31 16:00:48.453 TRACE ydb-core-tx-datashard-ut_minstep(pid=83653, tid=0x00007F0DF479CD00) [core exec] yql_execution.cpp:387: {0}, callable #43 2025-03-31T16:00:48.453660Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=MTRjOTJjYzEtYTg4M2Y3YzMtOTkwYjZmYmUtZmFiYWVjNGE= 2025-03-31 16:00:48.453 INFO ydb-core-tx-datashard-ut_minstep(pid=83653, tid=0x00007F0DF479CD00) [core exec] yql_execution.cpp:577: Node #43 finished execution 2025-03-31T16:00:48.453739Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=MTRjOTJjYzEtYTg4M2Y3YzMtOTkwYjZmYmUtZmFiYWVjNGE= 2025-03-31 16:00:48.453 INFO ydb-core-tx-datashard-ut_minstep(pid=83653, tid=0x00007F0DF479CD00) [core exec] yql_execution.cpp:594: Node #43 created 0 trackable nodes: 2025-03-31T16:00:48.453798Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=MTRjOTJjYzEtYTg4M2Y3YzMtOTkwYjZmYmUtZmFiYWVjNGE= 2025-03-31 16:00:48.453 INFO ydb-core-tx-datashard-ut_minstep(pid=83653, tid=0x00007F0DF479CD00) [core exec] yql_execution.cpp:87: Finish, output #43, status: Ok 2025-03-31T16:00:48.453855Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=MTRjOTJjYzEtYTg4M2Y3YzMtOTkwYjZmYmUtZmFiYWVjNGE= 2025-03-31 16:00:48.453 INFO ydb-core-tx-datashard-ut_minstep(pid=83653, tid=0x00007F0DF479CD00) [core exec] yql_execution.cpp:93: Creating finalizing transformer, output #43 2025-03-31T16:00:48.454031Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=MTRjOTJjYzEtYTg4M2Y3YzMtOTkwYjZmYmUtZmFiYWVjNGE= 2025-03-31 16:00:48.453 NOTE ydb-core-tx-datashard-ut_minstep(pid=83653, tid=0x00007F0DF479CD00) [common provider] yql_provider_gateway.cpp:21:
: Info: Execution, code: 1060 2025-03-31T16:00:48.454093Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=MTRjOTJjYzEtYTg4M2Y3YzMtOTkwYjZmYmUtZmFiYWVjNGE= 2025-03-31 16:00:48.454 NOTE ydb-core-tx-datashard-ut_minstep(pid=83653, tid=0x00007F0DF479CD00) [common provider] yql_provider_gateway.cpp:21:
:1:12: Info: Executing DROP TABLE 2025-03-31T16:00:48.454139Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=MTRjOTJjYzEtYTg4M2Y3YzMtOTkwYjZmYmUtZmFiYWVjNGE= 2025-03-31 16:00:48.454 NOTE ydb-core-tx-datashard-ut_minstep(pid=83653, tid=0x00007F0DF479CD00) [common provider] yql_provider_gateway.cpp:21:
: Info: Success, code: 4 2025-03-31T16:00:48.474723Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-03-31T16:00:48.475012Z node 2 :TX_DATASHARD INFO: 72075186224037889 Initiating switch from PreOffline to Offline state 2025-03-31T16:00:48.477155Z node 2 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-03-31T16:00:48.478194Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-03-31T16:00:48.478675Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186224037889 2025-03-31T16:00:48.478743Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037889 2025-03-31T16:00:48.478863Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Tablet(DataShard.72075186224037889.Leader.1) VolatileState: Running -> Stopped (Node 2) 2025-03-31T16:00:48.479013Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037889.Leader.1 gen 1) to node 2 2025-03-31T16:00:48.485759Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersqueueDataPlaneTestSuite::WriteSession [GOOD] Test command err: === Server->StartServer(false); 2025-03-31T16:00:27.979794Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004158557411669:2213];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:27.980112Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:00:28.011680Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488004162195219167:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:28.011731Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:00:28.685465Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:00:28.739547Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a32/r3tmp/tmpSRBUgF/pdisk_1.dat 2025-03-31T16:00:28.976548Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:00:29.092795Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:00:29.640337Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:29.653798Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:29.653895Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:29.757240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:29.757321Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:29.758629Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:00:29.780523Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:00:29.790147Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22880, node 1 2025-03-31T16:00:30.177967Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/000a32/r3tmp/yandexWhAtdQ.tmp 2025-03-31T16:00:30.177992Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/000a32/r3tmp/yandexWhAtdQ.tmp 2025-03-31T16:00:30.178140Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/000a32/r3tmp/yandexWhAtdQ.tmp 2025-03-31T16:00:30.178260Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:00:30.389712Z INFO: TTestServer started on Port 4749 GrpcPort 22880 TClient is connected to server localhost:4749 PQClient connected to localhost:22880 === TenantModeEnabled() = 1 === Init PQ - start server on port 22880 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:00:31.439941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-31T16:00:31.440307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:00:31.440520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-31T16:00:31.440790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:00:31.440822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:00:31.443988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-31T16:00:31.444172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-31T16:00:31.444358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:00:31.444408Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-31T16:00:31.444428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-03-31T16:00:31.444443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-03-31T16:00:31.452701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:00:31.452757Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-03-31T16:00:31.452772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-03-31T16:00:31.454380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:00:31.454399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-03-31T16:00:31.454416Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:00:31.460847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:00:31.460890Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:00:31.460934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-31T16:00:31.460972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-03-31T16:00:31.464882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:00:31.475933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-03-31T16:00:31.476106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-03-31T16:00:31.485249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743436831521, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T16:00:31.491284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743436831521 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-31T16:00:31.491354Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-31T16:00:31.491601Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-03-31T16:00:31.491636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-31T16:00:31.491789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-31T16:00:31.491855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-03-31T16:00:31.499952Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-31T16:00:31.499990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-31T16:00:31.500175Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-31T16:00:31.500192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7488004167147346782:2402], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-03-31T16:00:31.500248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:00:31.500282Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-03-31T16:00:31.510214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-31T16:00:31.511652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-31T16:00:31.516975Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-31T16:00:31.517003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-31T16:00:31.517021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-03-31T16:00:31.517041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOpe ... 00:47.226218Z :NOTICE: [/Root] [/Root] [1f95ac3e-195b50ed-c465456d-e04cb8a0] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-31T16:00:47.226465Z :INFO: [/Root] [/Root] [78179699-7a99af6b-daaad378-1c9aa516] Starting read session 2025-03-31T16:00:47.226509Z :DEBUG: [/Root] [/Root] [78179699-7a99af6b-daaad378-1c9aa516] Starting session to cluster null (localhost:4700) 2025-03-31T16:00:47.226646Z :DEBUG: [/Root] [/Root] [78179699-7a99af6b-daaad378-1c9aa516] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:00:47.226681Z :DEBUG: [/Root] [/Root] [78179699-7a99af6b-daaad378-1c9aa516] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:00:47.226715Z :DEBUG: [/Root] [/Root] [78179699-7a99af6b-daaad378-1c9aa516] [null] Reconnecting session to cluster null in 0.000000s 2025-03-31T16:00:47.236788Z node 3 :PQ_READ_PROXY DEBUG: new grpc connection 2025-03-31T16:00:47.236814Z node 3 :PQ_READ_PROXY DEBUG: new session created cookie 2 2025-03-31T16:00:47.241905Z :DEBUG: [/Root] [/Root] [78179699-7a99af6b-daaad378-1c9aa516] [null] Successfully connected. Initializing session 2025-03-31T16:00:47.243071Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session grpc read done: success# 1, data# { init_request { topics_read_settings { topic: "/Root/account1/write_topic" } read_only_original: true consumer: "consumer_aba" read_params { max_read_size: 104857600 } } } 2025-03-31T16:00:47.243286Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1471598176740694617_v1 read init: from# ipv6:[::1]:44182, request# { init_request { topics_read_settings { topic: "/Root/account1/write_topic" } read_only_original: true consumer: "consumer_aba" read_params { max_read_size: 104857600 } } } 2025-03-31T16:00:47.243477Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1471598176740694617_v1 auth for : consumer_aba 2025-03-31T16:00:47.244061Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1471598176740694617_v1 Handle describe topics response 2025-03-31T16:00:47.244169Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1471598176740694617_v1 auth is DEAD 2025-03-31T16:00:47.244257Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1471598176740694617_v1 auth ok: topics# 1, initDone# 0 2025-03-31T16:00:47.245535Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1471598176740694617_v1 register session: topic# /Root/account1/write_topic 2025-03-31T16:00:47.250210Z :INFO: [/Root] [/Root] [78179699-7a99af6b-daaad378-1c9aa516] [null] Server session id: consumer_aba_3_2_1471598176740694617_v1 2025-03-31T16:00:47.250558Z :DEBUG: [/Root] [/Root] [78179699-7a99af6b-daaad378-1c9aa516] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:00:47.247669Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037894][write_topic] pipe [3:7488004243662593602:2420] connected; active server actors: 1 2025-03-31T16:00:47.251349Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1471598176740694617_v1 grpc read done: success# 1, data# { read { } } 2025-03-31T16:00:47.251475Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1471598176740694617_v1 got read request: guid# efc3c8fb-e1fe7238-f28f1086-5143c93d 2025-03-31T16:00:47.252870Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1471598176740694617_v1 assign: record# { Partition: 0 TabletId: 72075186224037893 Topic: "write_topic" Generation: 1 Step: 1 Session: "consumer_aba_3_2_1471598176740694617_v1" ClientId: "consumer_aba" PipeClient { RawX1: 7488004243662593602 RawX2: 4503612512274804 } Path: "/Root/account1/write_topic" } 2025-03-31T16:00:47.251418Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037894][write_topic] consumer "consumer_aba" register session for pipe [3:7488004243662593602:2420] session consumer_aba_3_2_1471598176740694617_v1 2025-03-31T16:00:47.253006Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1471598176740694617_v1 INITING TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) 2025-03-31T16:00:47.253404Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1471598176740694617_v1 TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037893 Generation: 1, pipe: [3:7488004243662593604:2423] 2025-03-31T16:00:47.251498Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba register readable partition 0 2025-03-31T16:00:47.253572Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: consumer_aba_3_2_1471598176740694617_v1:1 with generation 1 2025-03-31T16:00:47.251578Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba family created family=1 (Status=Free, Partitions=[0]) 2025-03-31T16:00:47.251632Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037894][write_topic] consumer consumer_aba register reading session ReadingSession "consumer_aba_3_2_1471598176740694617_v1" (Sender=[3:7488004243662593599:2420], Pipe=[3:7488004243662593602:2420], Partitions=[], ActiveFamilyCount=0) 2025-03-31T16:00:47.251675Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba rebalancing was scheduled 2025-03-31T16:00:47.251747Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba balancing. Sessions=1, Families=1, UnradableFamilies=1 [1 (0), ], RequireBalancing=0 [] 2025-03-31T16:00:47.251814Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba balancing family=1 (Status=Free, Partitions=[0]) for ReadingSession "consumer_aba_3_2_1471598176740694617_v1" (Sender=[3:7488004243662593599:2420], Pipe=[3:7488004243662593602:2420], Partitions=[], ActiveFamilyCount=0) 2025-03-31T16:00:47.251897Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037894][write_topic] consumer consumer_aba family 1 status Active partitions [0] session "consumer_aba_3_2_1471598176740694617_v1" sender [3:7488004243662593599:2420] lock partition 0 for ReadingSession "consumer_aba_3_2_1471598176740694617_v1" (Sender=[3:7488004243662593599:2420], Pipe=[3:7488004243662593602:2420], Partitions=[], ActiveFamilyCount=1) generation 1 step 1 2025-03-31T16:00:47.251979Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2025-03-31T16:00:47.252015Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba balancing duration: 0.000240s 2025-03-31T16:00:47.277088Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1471598176740694617_v1 TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 1 WriteTimestampMS: 1743436847111 CreateTimestampMS: 1743436847102 SizeLag: 165 WriteTimestampEstimateMS: 1743436847111 } Cookie: 18446744073709551615 } 2025-03-31T16:00:47.277170Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1471598176740694617_v1 INIT DONE TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) EndOffset 1 readOffset 0 committedOffset 0 2025-03-31T16:00:47.277262Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1471598176740694617_v1 sending to client partition status Got new read session event: CreatePartitionStream { PartitionStreamId: 1 TopicPath: account1/write_topic Cluster: PartitionId: 0 CommittedOffset: 0 EndOffset: 1 } 2025-03-31T16:00:47.280072Z :INFO: [/Root] [/Root] [78179699-7a99af6b-daaad378-1c9aa516] Closing read session. Close timeout: 0.000000s 2025-03-31T16:00:47.280142Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:account1/write_topic:0:1:0:0 2025-03-31T16:00:47.280193Z :INFO: [/Root] [/Root] [78179699-7a99af6b-daaad378-1c9aa516] Counters: { Errors: 0 CurrentSessionLifetimeMs: 53 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T16:00:47.280294Z :NOTICE: [/Root] [/Root] [78179699-7a99af6b-daaad378-1c9aa516] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-03-31T16:00:47.280333Z :DEBUG: [/Root] [/Root] [78179699-7a99af6b-daaad378-1c9aa516] [null] Abort session to cluster 2025-03-31T16:00:47.280750Z :NOTICE: [/Root] [/Root] [78179699-7a99af6b-daaad378-1c9aa516] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-31T16:00:47.283286Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1471598176740694617_v1 grpc read done: success# 0, data# { } 2025-03-31T16:00:47.283320Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1471598176740694617_v1 grpc read failed 2025-03-31T16:00:47.283350Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1471598176740694617_v1 grpc closed 2025-03-31T16:00:47.283402Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1471598176740694617_v1 is DEAD 2025-03-31T16:00:47.285846Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: consumer_aba_3_2_1471598176740694617_v1 2025-03-31T16:00:47.287285Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037894][write_topic] pipe [3:7488004243662593602:2420] disconnected; active server actors: 1 2025-03-31T16:00:47.287343Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037894][write_topic] pipe [3:7488004243662593602:2420] client consumer_aba disconnected session consumer_aba_3_2_1471598176740694617_v1 2025-03-31T16:00:47.367973Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7488004243662593617:2428], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:00:47.370602Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YzhiNzk5YzUtN2NjOGE3OTQtODEzZGI2M2ItMTcxZDhkYzE=, ActorId: [3:7488004243662593610:2424], ActorState: ExecuteState, TraceId: 01jqpey66a8791kctn54jn9vnb, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:00:47.371117Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } >> TPersQueueTest::WriteEmptyData [GOOD] >> TPersQueueTest::WriteNonExistingPartition |70.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> SystemView::PartitionStatsOneSchemeShardDataQuery [GOOD] >> SystemView::Nodes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateDropStandaloneTableDefaultSharding [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-31T16:00:34.919516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:00:34.919628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:00:34.919692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:00:34.919741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:00:34.919798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:00:34.919829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:00:34.919890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:00:34.919978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:00:34.920297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:00:35.099665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:00:35.099737Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:35.116907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:00:35.117326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:00:35.117480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:00:35.155449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:00:35.155798Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:00:35.156490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:00:35.156929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:00:35.159383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:00:35.160733Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:00:35.160796Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:00:35.160928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:00:35.160994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:00:35.161039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:00:35.161250Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:00:35.192756Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-31T16:00:35.507552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:00:35.507798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:35.508005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:00:35.508284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:00:35.508393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:35.512355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:00:35.512494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:00:35.512763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:35.512836Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:00:35.512870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:00:35.512901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:00:35.515036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:35.515131Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:00:35.515172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:00:35.517180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:35.517230Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:35.517268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:00:35.517323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:00:35.521126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:00:35.523458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:00:35.523639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:00:35.524705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:00:35.524848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:00:35.524920Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:00:35.525185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:00:35.525230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:00:35.525400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:00:35.525488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:00:35.527714Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:00:35.527765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:00:35.527936Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:00:35.528014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:00:35.528434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:35.528484Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:00:35.528583Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:00:35.528635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:00:35.528686Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:00:35.528746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:00:35.528794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:00:35.528834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:00:35.528868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:00:35.528895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:00:35.528964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:00:35.529001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:00:35.529042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:00:35.531748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:00:35.531871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:00:35.531912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... pipe to deleted shardIdx 72057594046678944:50 tabletId 72075186233409595 2025-03-31T16:00:49.978695Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:52 2025-03-31T16:00:49.978721Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:52 tabletId 72075186233409597 2025-03-31T16:00:49.978841Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:00:49.978903Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-03-31T16:00:49.979006Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T16:00:49.980802Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:46 2025-03-31T16:00:49.980841Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:46 tabletId 72075186233409591 2025-03-31T16:00:49.981558Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:48 2025-03-31T16:00:49.981590Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:48 tabletId 72075186233409593 2025-03-31T16:00:49.982094Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:42 2025-03-31T16:00:49.982133Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:42 tabletId 72075186233409587 2025-03-31T16:00:49.983438Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:44 2025-03-31T16:00:49.983477Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:44 tabletId 72075186233409589 2025-03-31T16:00:49.983634Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:65 2025-03-31T16:00:49.983662Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:65 tabletId 72075186233409610 2025-03-31T16:00:49.983769Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:63 2025-03-31T16:00:49.983794Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:63 tabletId 72075186233409608 2025-03-31T16:00:49.983851Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:61 2025-03-31T16:00:49.983874Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:61 tabletId 72075186233409606 2025-03-31T16:00:49.983992Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:59 2025-03-31T16:00:49.984034Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:59 tabletId 72075186233409604 2025-03-31T16:00:49.986033Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-03-31T16:00:49.986070Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-03-31T16:00:49.986528Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-31T16:00:49.986559Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-03-31T16:00:49.987672Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-03-31T16:00:49.987707Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-03-31T16:00:49.987860Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:23 2025-03-31T16:00:49.987890Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:23 tabletId 72075186233409568 2025-03-31T16:00:49.987989Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:21 2025-03-31T16:00:49.988016Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:21 tabletId 72075186233409566 2025-03-31T16:00:49.988097Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:19 2025-03-31T16:00:49.988123Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:19 tabletId 72075186233409564 2025-03-31T16:00:49.988214Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:17 2025-03-31T16:00:49.988240Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:17 tabletId 72075186233409562 2025-03-31T16:00:49.988330Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:15 2025-03-31T16:00:49.988357Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:15 tabletId 72075186233409560 2025-03-31T16:00:49.989352Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:13 2025-03-31T16:00:49.989386Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:13 tabletId 72075186233409558 2025-03-31T16:00:49.989451Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:11 2025-03-31T16:00:49.989476Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:11 tabletId 72075186233409556 2025-03-31T16:00:49.989564Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:9 2025-03-31T16:00:49.989588Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:9 tabletId 72075186233409554 2025-03-31T16:00:49.989667Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:36 2025-03-31T16:00:49.989692Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:36 tabletId 72075186233409581 2025-03-31T16:00:49.989785Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:38 2025-03-31T16:00:49.989810Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:38 tabletId 72075186233409583 2025-03-31T16:00:49.989884Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:40 2025-03-31T16:00:49.989908Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:40 tabletId 72075186233409585 2025-03-31T16:00:49.994131Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:32 2025-03-31T16:00:49.994184Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:32 tabletId 72075186233409577 2025-03-31T16:00:49.994287Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:34 2025-03-31T16:00:49.994311Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:34 tabletId 72075186233409579 2025-03-31T16:00:49.994387Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:28 2025-03-31T16:00:49.994412Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:28 tabletId 72075186233409573 2025-03-31T16:00:49.994479Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:30 2025-03-31T16:00:49.994504Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:30 tabletId 72075186233409575 2025-03-31T16:00:49.994578Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:26 2025-03-31T16:00:49.994602Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:26 tabletId 72075186233409571 2025-03-31T16:00:49.994660Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:24 2025-03-31T16:00:49.994683Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:24 tabletId 72075186233409569 2025-03-31T16:00:49.994748Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:57 2025-03-31T16:00:49.994772Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:57 tabletId 72075186233409602 2025-03-31T16:00:49.994843Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:53 2025-03-31T16:00:49.994870Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:53 tabletId 72075186233409598 2025-03-31T16:00:49.999637Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:55 2025-03-31T16:00:49.999694Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:55 tabletId 72075186233409600 2025-03-31T16:00:49.999792Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:49 2025-03-31T16:00:49.999816Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:49 tabletId 72075186233409594 2025-03-31T16:00:49.999883Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:47 2025-03-31T16:00:49.999905Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:47 tabletId 72075186233409592 2025-03-31T16:00:49.999987Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:51 2025-03-31T16:00:50.000013Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:51 tabletId 72075186233409596 2025-03-31T16:00:50.000075Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:45 2025-03-31T16:00:50.000100Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:45 tabletId 72075186233409590 2025-03-31T16:00:50.000169Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:43 2025-03-31T16:00:50.000194Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:43 tabletId 72075186233409588 2025-03-31T16:00:50.000259Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:41 2025-03-31T16:00:50.000305Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:41 tabletId 72075186233409586 2025-03-31T16:00:50.000431Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 105 2025-03-31T16:00:50.001640Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyDir/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:00:50.001873Z node 3 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyDir/ColumnTable" took 276us result status StatusPathDoesNotExist 2025-03-31T16:00:50.002040Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyDir/ColumnTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/MyDir\' (id: [OwnerId: 72057594046678944, LocalPathId: 2])" Path: "/MyRoot/MyDir/ColumnTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/MyDir" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "MyDir" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-31T16:00:50.002704Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 4 SchemeshardId: 72057594046678944 Options { }, at schemeshard: 72057594046678944 2025-03-31T16:00:50.002794Z node 3 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 4 took 106us result status StatusPathDoesNotExist 2025-03-31T16:00:50.002882Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'\', error: path is empty" Path: "" PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |70.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |70.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |70.9%| [TA] $(B)/ydb/core/tx/datashard/ut_minstep/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardSnapshots::ShardRestartAfterDropTable [GOOD] >> DataShardSnapshots::ShardRestartAfterDropTableAndAbort |70.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError [GOOD] Test command err: === Server->StartServer(false); 2025-03-31T16:00:27.692062Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004158409235597:2147];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:27.692109Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:00:27.894870Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488004156323395464:2139];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:27.894917Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:00:28.409190Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:00:28.429074Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a43/r3tmp/tmpnT7lzT/pdisk_1.dat 2025-03-31T16:00:28.723261Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:00:28.897884Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:00:29.496812Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:29.554353Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:29.554439Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:29.568702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:29.568798Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:29.594519Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:00:29.594746Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:00:29.597332Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24623, node 1 2025-03-31T16:00:30.023755Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/000a43/r3tmp/yandex4tBflG.tmp 2025-03-31T16:00:30.023780Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/000a43/r3tmp/yandex4tBflG.tmp 2025-03-31T16:00:30.023918Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/000a43/r3tmp/yandex4tBflG.tmp 2025-03-31T16:00:30.024027Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:00:30.144499Z INFO: TTestServer started on Port 24559 GrpcPort 24623 TClient is connected to server localhost:24559 PQClient connected to localhost:24623 === TenantModeEnabled() = 1 === Init PQ - start server on port 24623 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:00:31.137493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-31T16:00:31.137678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:00:31.137903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-31T16:00:31.138171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:00:31.138215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:00:31.147408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-31T16:00:31.147558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-31T16:00:31.147735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:00:31.147780Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-31T16:00:31.147796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-03-31T16:00:31.147817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-03-31T16:00:31.151183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:00:31.151232Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-03-31T16:00:31.151249Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-03-31T16:00:31.155589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:00:31.155616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-03-31T16:00:31.155645Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:00:31.163952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:00:31.163988Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:00:31.164018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-31T16:00:31.164115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-03-31T16:00:31.172703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:00:31.177861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-03-31T16:00:31.178049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-03-31T16:00:31.192398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743436831227, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T16:00:31.192542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743436831227 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-31T16:00:31.192582Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-31T16:00:31.192868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-03-31T16:00:31.192898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-31T16:00:31.193038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-31T16:00:31.193113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-03-31T16:00:31.204094Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-31T16:00:31.204126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-31T16:00:31.204301Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-31T16:00:31.204317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7488004166999170775:2394], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-03-31T16:00:31.204363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:00:31.204391Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-03-31T16:00:31.204478Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-31T16:00:31.204489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-31T16:00:31.204506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-31T16:00:31.204530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-31T16:00:31.204548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-03-31T16:00:31.204568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TO ... 0 data: 2025-03-31T16:00:48.442255Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|8433964f-b202cf7b-3e6a0aa6-ba1c99ca_0 grpc read failed 2025-03-31T16:00:48.442435Z node 3 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 1 sessionId: 12345678|8433964f-b202cf7b-3e6a0aa6-ba1c99ca_0 2025-03-31T16:00:48.442450Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|8433964f-b202cf7b-3e6a0aa6-ba1c99ca_0 is DEAD 2025-03-31T16:00:48.442674Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison Finish: 0 === InitializeWritePQService done === PersQueueClient === InitializePQ completed BEFORE MODIFY PERMISSIONS 2025-03-31T16:00:48.477575Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root/acc" OperationType: ESchemeOpModifyACL ModifyACL { Name: "topic1" DiffACL: "\n\027\010\001\022\023\032\021test_user@builtin\n\037\010\000\022\033\010\001\020\366\213\001\032\021test_user@builtin \003" } } TxId: 281474976715665 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:36024" , at schemeshard: 72057594046644480 2025-03-31T16:00:48.477741Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root/acc/topic1, operationId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:00:48.477851Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046644480, LocalPathId: 10] name: topic1 type: EPathTypePersQueueGroup state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046644480, LocalPathId: 9] 2025-03-31T16:00:48.477860Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2025-03-31T16:00:48.477979Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715665:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-03-31T16:00:48.477997Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:00:48.478065Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715665:0 progress is 1/1 2025-03-31T16:00:48.478076Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2025-03-31T16:00:48.478094Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715665:0 progress is 1/1 2025-03-31T16:00:48.478102Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2025-03-31T16:00:48.478142Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 3 2025-03-31T16:00:48.478190Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715665, ready parts: 1/1, is published: false 2025-03-31T16:00:48.478206Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 10], at schemeshard: 72057594046644480 2025-03-31T16:00:48.478216Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2025-03-31T16:00:48.478226Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715665:0 2025-03-31T16:00:48.478235Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715665, publications: 1, subscribers: 0 2025-03-31T16:00:48.478245Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715665, [OwnerId: 72057594046644480, LocalPathId: 10], 3 2025-03-31T16:00:48.491598Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715665, response: Status: StatusSuccess TxId: 281474976715665 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-03-31T16:00:48.491842Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715665, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/acc/topic1, add access: +W:test_user@builtin, remove access: -():test_user@builtin:- 2025-03-31T16:00:48.492040Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-31T16:00:48.492058Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715665, path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2025-03-31T16:00:48.492254Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-31T16:00:48.492271Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:7488004217048527353:2363], at schemeshard: 72057594046644480, txId: 281474976715665, path id: 10 2025-03-31T16:00:48.493395Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715665 2025-03-31T16:00:48.493468Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715665 2025-03-31T16:00:48.493488Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715665 2025-03-31T16:00:48.493505Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715665, pathId: [OwnerId: 72057594046644480, LocalPathId: 10], version: 3 2025-03-31T16:00:48.493520Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 4 2025-03-31T16:00:48.493619Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715665, subscribers: 0 2025-03-31T16:00:48.496364Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715665 2025-03-31T16:00:48.504108Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-03-31T16:00:48.504135Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 2 2025-03-31T16:00:48.504805Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "/Root/acc/topic1" message_group_id: "test-message-group" } 2025-03-31T16:00:48.504931Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 2 topic: "/Root/acc/topic1" message_group_id: "test-message-group" from ipv6:[::1]:35994 2025-03-31T16:00:48.504945Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:35994 proto=v1 topic=/Root/acc/topic1 durationSec=0 2025-03-31T16:00:48.504954Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-03-31T16:00:48.505973Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: describe result for acl check 2025-03-31T16:00:48.506170Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-03-31T16:00:48.506179Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-03-31T16:00:48.506239Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-03-31T16:00:48.506278Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7488004247113299555:2398] (SourceId=test-message-group, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-03-31T16:00:48.506299Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2025-03-31T16:00:48.507152Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-03-31T16:00:48.507379Z node 3 :PERSQUEUE INFO: new Cookie test-message-group|c313b2e9-c3c48692-bebbae74-15e1140c_0 generated for partition 0 topic 'acc/topic1' owner test-message-group 2025-03-31T16:00:48.507763Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: test-message-group|c313b2e9-c3c48692-bebbae74-15e1140c_0 2025-03-31T16:00:48.515534Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: test-message-group|c313b2e9-c3c48692-bebbae74-15e1140c_0 grpc read done: success: 1 data: update_token_request [content omitted] 2025-03-31T16:00:48.515954Z node 3 :PQ_WRITE_PROXY INFO: updating token 2025-03-31T16:00:48.515998Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-03-31T16:00:48.517082Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-message-group|c313b2e9-c3c48692-bebbae74-15e1140c_0 describe result for acl check 2025-03-31T16:00:48.517178Z node 3 :PQ_WRITE_PROXY INFO: session v1 error cookie: 2 reason: access to topic 'Topic /Root/acc/topic1 in database: /Root' denied for 'test_user_2@builtin' due to 'no WriteTopic rights', Marker# PQ1125 sessionId: test-message-group|c313b2e9-c3c48692-bebbae74-15e1140c_0 2025-03-31T16:00:48.517465Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-message-group|c313b2e9-c3c48692-bebbae74-15e1140c_0 is DEAD 2025-03-31T16:00:48.517706Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-31T16:00:48.820156Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7488004247113299569:2406], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:00:48.821992Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YzU4ZTM3MzEtNTk2ZGI0ZDUtM2U1MTQ1LWRkOTUyZmJk, ActorId: [3:7488004247113299562:2402], ActorState: ExecuteState, TraceId: 01jqpey7mjezja2qznqq0qhdsq, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:00:48.823054Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } |70.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse [GOOD] >> TProxyActorTest::TestAttachSession |70.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TPersQueueTest::DirectReadBadCases [GOOD] >> TPersQueueTest::DirectReadStop >> KqpJoinOrder::FiveWayJoinWithConstantFold+ColumnStore [GOOD] >> TPQCDTest::TestRelatedServicesAreRunning [GOOD] >> KqpJoinOrder::TPCH9_100 [GOOD] >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit [GOOD] >> TProxyActorTest::TestAttachSession [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse [GOOD] Test command err: === Server->StartServer(false); 2025-03-31T16:00:29.637410Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004164198852336:2279];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:29.637473Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:00:29.664966Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488004167125839092:2132];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:29.665034Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:00:30.270821Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a1e/r3tmp/tmpjoaHrt/pdisk_1.dat 2025-03-31T16:00:30.364640Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:00:30.699290Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:00:30.717513Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:00:31.182458Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:31.183221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:31.183779Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:31.183837Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:31.216477Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:00:31.216626Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:00:31.230861Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:00:31.294438Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9858, node 1 2025-03-31T16:00:31.739478Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:00:31.766663Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:00:31.766712Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:00:31.782977Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:00:31.791729Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/000a1e/r3tmp/yandexpeQ2YR.tmp 2025-03-31T16:00:31.791751Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/000a1e/r3tmp/yandexpeQ2YR.tmp 2025-03-31T16:00:31.791900Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/000a1e/r3tmp/yandexpeQ2YR.tmp 2025-03-31T16:00:31.792006Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:00:31.854522Z INFO: TTestServer started on Port 14576 GrpcPort 9858 TClient is connected to server localhost:14576 PQClient connected to localhost:9858 === TenantModeEnabled() = 1 === Init PQ - start server on port 9858 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:00:33.080424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-31T16:00:33.080652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:00:33.080849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-31T16:00:33.081050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:00:33.081098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:00:33.084283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-31T16:00:33.084397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-31T16:00:33.084571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:00:33.084646Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-31T16:00:33.084665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-03-31T16:00:33.084677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-03-31T16:00:33.087303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:00:33.087337Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-03-31T16:00:33.087363Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 waiting... 2025-03-31T16:00:33.092379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:00:33.092420Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:00:33.092459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-31T16:00:33.092484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-03-31T16:00:33.100986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:00:33.101394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:00:33.101415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-03-31T16:00:33.101436Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:00:33.107872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-03-31T16:00:33.108031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-03-31T16:00:33.113196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743436833152, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T16:00:33.113341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743436833152 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-31T16:00:33.113392Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-31T16:00:33.113643Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-03-31T16:00:33.113677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-31T16:00:33.113826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-31T16:00:33.113880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-03-31T16:00:33.129115Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-31T16:00:33.129142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-31T16:00:33.129357Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-31T16:00:33.129373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7488004172788787456:2441], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-03-31T16:00:33.129419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:00:33.129451Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-03-31T16:00:33.129533Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#2814749767106 ... Query: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-03-31T16:00:50.063290Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-03-31T16:00:50.063323Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7488004253840490760:2402] (SourceId=test-group-id, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-03-31T16:00:50.063343Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2025-03-31T16:00:50.064259Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-03-31T16:00:50.064377Z node 3 :PERSQUEUE INFO: new Cookie test-group-id|613c7c38-7835167c-ae45fe2e-92ca2159_0 generated for partition 0 topic 'acc/topic1' owner test-group-id 2025-03-31T16:00:50.066392Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: test-group-id|613c7c38-7835167c-ae45fe2e-92ca2159_0 ===Assert streaming op1 ===Assert streaming op2 2025-03-31T16:00:50.073203Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: test-group-id|613c7c38-7835167c-ae45fe2e-92ca2159_0 grpc read done: success: 1 data: write_request[data omitted] 2025-03-31T16:00:50.073512Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-03-31T16:00:50.075540Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-03-31T16:00:50.094549Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse ===ModifyAcl BEFORE MODIFY PERMISSIONS 2025-03-31T16:00:50.104967Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root/acc" OperationType: ESchemeOpModifyACL ModifyACL { Name: "topic1" DiffACL: "\n\031\010\001\022\025\032\023test_user_0@builtin" } } TxId: 281474976715666 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:58822" , at schemeshard: 72057594046644480 2025-03-31T16:00:50.105134Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root/acc/topic1, operationId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:00:50.105229Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046644480, LocalPathId: 10] name: topic1 type: EPathTypePersQueueGroup state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046644480, LocalPathId: 9] 2025-03-31T16:00:50.105237Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2025-03-31T16:00:50.105356Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715666:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-03-31T16:00:50.105372Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:00:50.105435Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715666:0 progress is 1/1 2025-03-31T16:00:50.105443Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715666 ready parts: 1/1 2025-03-31T16:00:50.105460Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715666:0 progress is 1/1 2025-03-31T16:00:50.105467Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715666 ready parts: 1/1 2025-03-31T16:00:50.105501Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 3 2025-03-31T16:00:50.105537Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715666, ready parts: 1/1, is published: false 2025-03-31T16:00:50.105552Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 10], at schemeshard: 72057594046644480 2025-03-31T16:00:50.105578Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715666 ready parts: 1/1 2025-03-31T16:00:50.105597Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715666:0 2025-03-31T16:00:50.105610Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715666, publications: 1, subscribers: 0 2025-03-31T16:00:50.105619Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715666, [OwnerId: 72057594046644480, LocalPathId: 10], 4 2025-03-31T16:00:50.110146Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715666, response: Status: StatusSuccess TxId: 281474976715666 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-03-31T16:00:50.110346Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715666, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/acc/topic1, remove access: -():test_user_0@builtin:- 2025-03-31T16:00:50.110514Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-31T16:00:50.110526Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715666, path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2025-03-31T16:00:50.110686Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-31T16:00:50.110701Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:7488004223775718576:2380], at schemeshard: 72057594046644480, txId: 281474976715666, path id: 10 2025-03-31T16:00:50.111873Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715666 2025-03-31T16:00:50.111948Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715666 2025-03-31T16:00:50.111967Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715666 2025-03-31T16:00:50.111983Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715666, pathId: [OwnerId: 72057594046644480, LocalPathId: 10], version: 4 2025-03-31T16:00:50.111998Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 4 2025-03-31T16:00:50.112086Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715666, subscribers: 0 ===Wait for session created with token with removed ACE to die2025-03-31T16:00:50.118443Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715666 2025-03-31T16:00:50.400925Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7488004253840490787:2407], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:00:50.401557Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ODg5MTI4NTAtZTUzYzRlOGEtMjMxYzAwMjgtMjc0NmE2ZDM=, ActorId: [3:7488004253840490785:2406], ActorState: ExecuteState, TraceId: 01jqpey958594hfg57f5ns2wrt, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:00:50.402046Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:00:51.067324Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-03-31T16:00:51.068598Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-group-id|613c7c38-7835167c-ae45fe2e-92ca2159_0 describe result for acl check 2025-03-31T16:00:51.068745Z node 3 :PQ_WRITE_PROXY INFO: session v1 error cookie: 2 reason: access to topic 'Topic /Root/acc/topic1 in database: /Root' denied for 'test_user_0@builtin' due to 'no WriteTopic rights', Marker# PQ1125 sessionId: test-group-id|613c7c38-7835167c-ae45fe2e-92ca2159_0 2025-03-31T16:00:51.069119Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-group-id|613c7c38-7835167c-ae45fe2e-92ca2159_0 is DEAD 2025-03-31T16:00:51.069405Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison status: UNAUTHORIZED issues { message: "access to topic \'Topic /Root/acc/topic1 in database: /Root\' denied for \'test_user_0@builtin\' due to \'no WriteTopic rights\', Marker# PQ1125" issue_code: 500018 severity: 1 } 2025-03-31T16:00:51.455562Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7488004258135458108:2415], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:00:51.457457Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZTE1ZTkwODMtMjU3MTU0N2QtYmEyMmI5YjctNDMwNWUxYzY=, ActorId: [3:7488004258135458106:2414], ActorState: ExecuteState, TraceId: 01jqpeya6ha7rv3snbayfkrhj2, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:00:51.461805Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } |70.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> ObjectStorageListingTest::ListingNoFilter [GOOD] |70.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |70.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed [GOOD] |70.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestAttachSession [GOOD] |70.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphore ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestRelatedServicesAreRunning [GOOD] Test command err: 2025-03-31T16:00:45.719956Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004236199919493:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:45.742548Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0016dd/r3tmp/tmpPmlkYD/pdisk_1.dat 2025-03-31T16:00:46.458492Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:46.458573Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:46.460362Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61842, node 1 2025-03-31T16:00:46.555190Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:46.855758Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/0016dd/r3tmp/yandexrxPLU8.tmp 2025-03-31T16:00:46.855781Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/0016dd/r3tmp/yandexrxPLU8.tmp 2025-03-31T16:00:46.855949Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/0016dd/r3tmp/yandexrxPLU8.tmp 2025-03-31T16:00:46.856063Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6024 PQClient connected to localhost:61842 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:00:47.549795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:00:47.587443Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:00:47.619483Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-03-31T16:00:50.709782Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004236199919493:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:50.709851Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:00:51.154377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004261969723873:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:51.154520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:51.156094Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004261969723886:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:51.160559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-03-31T16:00:51.188694Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004261969723888:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-31T16:00:51.593671Z node 1 :TX_PROXY ERROR: Actor# [1:7488004261969723954:2401] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:00:51.637601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:00:51.839864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:00:51.844158Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488004261969723963:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:00:51.846058Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2MyZjE1YzctMzQ2ZjUyNTQtMTVhN2Y1NDYtMzNjNDFkMzc=, ActorId: [1:7488004261969723863:2332], ActorState: ExecuteState, TraceId: 01jqpey9y12n4ddfxh23zdj0gv, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:00:51.856503Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:00:52.017978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-31T16:00:52.376359Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jqpeyaxe3fkh4prthbd1ndh9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTFmMTAwZDgtMjU5YTZlYS0yMTg1ZmE4Yy02ZDNhNWQ0OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |70.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TOlapNaming::AlterColumnTableFailed [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit [GOOD] Test command err: === Server->StartServer(false); 2025-03-31T16:00:28.733618Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004160035382419:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:28.733678Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:00:29.501054Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:00:29.590269Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a05/r3tmp/tmp99o6V6/pdisk_1.dat 2025-03-31T16:00:29.768185Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:00:29.824574Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:00:30.391421Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:30.394085Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:30.394184Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:30.396347Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:30.396413Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:30.421467Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:00:30.439213Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:00:30.466102Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8889, node 1 2025-03-31T16:00:30.815867Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/000a05/r3tmp/yandexPj96e6.tmp 2025-03-31T16:00:30.815893Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/000a05/r3tmp/yandexPj96e6.tmp 2025-03-31T16:00:30.816052Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/000a05/r3tmp/yandexPj96e6.tmp 2025-03-31T16:00:30.816176Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:00:30.949868Z INFO: TTestServer started on Port 3586 GrpcPort 8889 TClient is connected to server localhost:3586 PQClient connected to localhost:8889 === TenantModeEnabled() = 1 === Init PQ - start server on port 8889 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:00:32.217244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-31T16:00:32.217523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:00:32.217747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-31T16:00:32.217976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:00:32.218060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:00:32.236268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-31T16:00:32.236463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-31T16:00:32.236680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:00:32.236752Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-31T16:00:32.236770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-03-31T16:00:32.236783Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-03-31T16:00:32.245417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:00:32.245485Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-03-31T16:00:32.245520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-03-31T16:00:32.246864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:00:32.246888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-03-31T16:00:32.246913Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:00:32.251958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:00:32.251994Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:00:32.252038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-31T16:00:32.252065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-03-31T16:00:32.312096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:00:32.314501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-03-31T16:00:32.314658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-03-31T16:00:32.322876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743436832361, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T16:00:32.323098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743436832361 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-31T16:00:32.323159Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-31T16:00:32.323507Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-03-31T16:00:32.323540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-31T16:00:32.323708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-31T16:00:32.323772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-03-31T16:00:32.344564Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-31T16:00:32.344596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-31T16:00:32.344813Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-31T16:00:32.344844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7488004168625317671:2397], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-03-31T16:00:32.344898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:00:32.344924Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-03-31T16:00:32.345008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-31T16:00:32.345019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-31T16:00:32.345037Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-31T16:00:32.345044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-31T16:00:32.345062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-03-31T16:00:32.345083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-31T16:00:32.345134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-03-31T16:00:32.345144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2025-03-31T16:00:32.345204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [Owner ... --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-03-31T16:00:52.651590Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037893] server connected, pipe [3:7488004262975673687:2420], now have 1 active actors on pipe 2025-03-31T16:00:52.644567Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7488004262975673684:2420] (SourceId=123, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-03-31T16:00:52.644587Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 4 sessionId: partition: 0 expectedGeneration: (NULL) 2025-03-31T16:00:52.651993Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037893, NodeId 4, Generation: 1 2025-03-31T16:00:52.652542Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-31T16:00:52.652574Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message batch for topic 'PQ/account/topic' partition 0 2025-03-31T16:00:52.652658Z node 4 :PERSQUEUE INFO: new Cookie 123|c4fb17d0-291db862-ebb7e9bf-f3886435_0 generated for partition 0 topic 'PQ/account/topic' owner 123 2025-03-31T16:00:52.652759Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-03-31T16:00:52.652809Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-31T16:00:52.653586Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-31T16:00:52.653613Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message batch for topic 'PQ/account/topic' partition 0 2025-03-31T16:00:52.653679Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-31T16:00:52.654126Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 4 partition: 0 MaxSeqNo: 2 sessionId: 123|c4fb17d0-291db862-ebb7e9bf-f3886435_0 2025-03-31T16:00:52.654985Z :INFO: [] MessageGroupId [123] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1743436852654 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T16:00:52.655114Z :INFO: [] MessageGroupId [123] SessionId [] Write session established. Init response: last_sequence_number: 2 session_id: "123|c4fb17d0-291db862-ebb7e9bf-f3886435_0" topic: "PQ/account/topic" 2025-03-31T16:00:52.655425Z :DEBUG: [] MessageGroupId [123] SessionId [123|c4fb17d0-291db862-ebb7e9bf-f3886435_0] Write 1 messages with Id from 1 to 1 2025-03-31T16:00:52.655564Z :DEBUG: [] MessageGroupId [123] SessionId [123|c4fb17d0-291db862-ebb7e9bf-f3886435_0] Write session: try to update token 2025-03-31T16:00:52.655606Z :DEBUG: [] MessageGroupId [123] SessionId [123|c4fb17d0-291db862-ebb7e9bf-f3886435_0] Send 1 message(s) (0 left), first sequence number is 3 2025-03-31T16:00:52.655929Z :INFO: [] MessageGroupId [123] SessionId [123|c4fb17d0-291db862-ebb7e9bf-f3886435_0] Write session: close. Timeout = 10000 ms 2025-03-31T16:00:52.659658Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: 123|c4fb17d0-291db862-ebb7e9bf-f3886435_0 grpc read done: success: 1 data: write_request[data omitted] 2025-03-31T16:00:52.660008Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-03-31T16:00:52.660736Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-31T16:00:52.660770Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message batch for topic 'PQ/account/topic' partition 0 2025-03-31T16:00:52.660869Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 1 2025-03-31T16:00:52.661263Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) Received event: NActors::IEventHandle 2025-03-31T16:00:52.661648Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-31T16:00:52.661671Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message batch for topic 'PQ/account/topic' partition 0 2025-03-31T16:00:52.661729Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message topic: PQ/account/topic partition: 0 SourceId: '\000123' SeqNo: 3 partNo : 0 messageNo: 1 size 372 offset: -1 2025-03-31T16:00:52.661802Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Send write quota request. Topic: "PQ/account/topic". Partition: 0. Amount: 376. Cookie: 3 2025-03-31T16:00:52.661863Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Got quota. Topic: "PQ/account/topic". Partition: 0: Cookie: 3 2025-03-31T16:00:52.661978Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Topic 'PQ/account/topic' partition 0 part blob processing sourceId '\000123' seqNo 3 partNo 0 2025-03-31T16:00:52.663101Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Topic 'PQ/account/topic' partition 0 part blob complete sourceId '\000123' seqNo 3 partNo 0 FormedBlobsCount 0 NewHead: Offset 2 PartNo 0 PackedSize 443 count 1 nextOffset 3 batches 1 2025-03-31T16:00:52.663624Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Add new write blob: topic 'PQ/account/topic' partition 0 compactOffset 2,1 HeadOffset 0 endOffset 2 curOffset 3 d0000000000_00000000000000000002_00000_0000000001_00000| size 431 WTime 1743436852661 2025-03-31T16:00:52.663735Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:00:52.663750Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:00:52.663764Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-03-31T16:00:52.663778Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:00:52.663793Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] m0000000000p123 2025-03-31T16:00:52.663803Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] d0000000000_00000000000000000002_00000_0000000001_00000| 2025-03-31T16:00:52.663816Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:00:52.664206Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:00:52.664231Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] =========================== 2025-03-31T16:00:52.664277Z node 4 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-31T16:00:52.664343Z node 4 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 2 partNo 0 count 1 size 431 2025-03-31T16:00:52.671988Z node 4 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 2 count 1 size 431 actorID [4:7488004258827296926:2336] 2025-03-31T16:00:52.672096Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 376 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:00:52.672138Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:00:52.672193Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Answering for message sourceid: '\000123', Topic: 'PQ/account/topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2025-03-31T16:00:52.672360Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-03-31T16:00:52.672853Z node 4 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037893' partition 0 offset 2 partno 0 count 1 parts 0 size 431 2025-03-31T16:00:52.673148Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) Received event: NActors::IEventHandle 2025-03-31T16:00:52.679642Z :DEBUG: [] MessageGroupId [123] SessionId [123|c4fb17d0-291db862-ebb7e9bf-f3886435_0] Write session got write response: sequence_numbers: 3 offsets: 2 already_written: false write_statistics { persist_duration_ms: 9 } 2025-03-31T16:00:52.679711Z :DEBUG: [] MessageGroupId [123] SessionId [123|c4fb17d0-291db862-ebb7e9bf-f3886435_0] Write session: acknoledged message 1 2025-03-31T16:00:52.756355Z :INFO: [] MessageGroupId [123] SessionId [123|c4fb17d0-291db862-ebb7e9bf-f3886435_0] Write session will now close 2025-03-31T16:00:52.756451Z :DEBUG: [] MessageGroupId [123] SessionId [123|c4fb17d0-291db862-ebb7e9bf-f3886435_0] Write session: aborting 2025-03-31T16:00:52.756930Z :INFO: [] MessageGroupId [123] SessionId [123|c4fb17d0-291db862-ebb7e9bf-f3886435_0] Write session: gracefully shut down, all writes complete 2025-03-31T16:00:52.756983Z :DEBUG: [] MessageGroupId [123] SessionId [123|c4fb17d0-291db862-ebb7e9bf-f3886435_0] Write session: destroy 2025-03-31T16:00:52.795259Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: 123|c4fb17d0-291db862-ebb7e9bf-f3886435_0 grpc read done: success: 0 data: 2025-03-31T16:00:52.795286Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: 123|c4fb17d0-291db862-ebb7e9bf-f3886435_0 grpc read failed 2025-03-31T16:00:52.795334Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: 123|c4fb17d0-291db862-ebb7e9bf-f3886435_0 grpc closed 2025-03-31T16:00:52.795350Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: 123|c4fb17d0-291db862-ebb7e9bf-f3886435_0 is DEAD 2025-03-31T16:00:52.796790Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-31T16:00:52.803240Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037893] server disconnected, pipe [3:7488004262975673687:2420] destroyed 2025-03-31T16:00:52.803333Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-03-31T16:00:52.975323Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7488004262975673695:2425], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:00:52.977754Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTNiZDJiZjctYjdmZWI4NC1kNTQ5ZDJjYy1lOTNmNGUwYg==, ActorId: [3:7488004262975673693:2424], ActorState: ExecuteState, TraceId: 01jqpeybnh8jsym92zgfag6jdv, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:00:52.978131Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit-UseSink [GOOD] |70.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |70.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> ObjectStorageListingTest::ListingNoFilter [GOOD] Test command err: 2025-03-31T16:00:52.532957Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:00:52.533220Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:00:52.533397Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0015e7/r3tmp/tmpG1Tcq2/pdisk_1.dat 2025-03-31T16:00:52.995018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:00:53.045486Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:53.092882Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:53.093030Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:53.106930Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:00:53.194600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:00:53.284094Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:00:53.284391Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:00:53.338595Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:00:53.338727Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:00:53.340707Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:00:53.340804Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:00:53.340858Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:00:53.341301Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:00:53.341473Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:00:53.341567Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:00:53.352392Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:00:53.419753Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:00:53.420054Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:00:53.420257Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:00:53.420311Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:00:53.420353Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:00:53.420388Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:00:53.420896Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:00:53.420990Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:00:53.421110Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:00:53.421148Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:00:53.421190Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:00:53.421233Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:00:53.421304Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:00:53.421506Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:00:53.421730Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:00:53.421857Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:00:53.424068Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:00:53.435418Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:00:53.435568Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:00:53.598795Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T16:00:53.604046Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:00:53.604159Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:00:53.604820Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:00:53.604885Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:00:53.604946Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T16:00:53.605214Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:00:53.605406Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:00:53.605979Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:00:53.606049Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:00:53.608667Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:00:53.609162Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:00:53.611421Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-31T16:00:53.611491Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:00:53.612600Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-31T16:00:53.612730Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:00:53.613739Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:00:53.613787Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:00:53.613864Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:00:53.613934Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:00:53.613995Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T16:00:53.614090Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:00:53.618367Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:00:53.620714Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T16:00:53.620956Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-31T16:00:53.621021Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:00:53.641788Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:53.641919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:53.641991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:53.647820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:00:53.655025Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:00:53.839397Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:00:53.851233Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:00:53.940029Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:00:54.447867Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpeycc766aq6bama587y7zp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjM4OTZjZTItM2NiNzRlMGMtYjg0MWY4YjctYWVlMjFhYmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:00:54.454739Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:856:2687], serverId# [1:857:2688], sessionId# [0:0:0] 2025-03-31T16:00:54.455391Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-03-31T16:00:54.455655Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=5 2025-03-31T16:00:54.467833Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:00:54.480992Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:864:2694], serverId# [1:865:2695], sessionId# [0:0:0] 2025-03-31T16:00:54.481302Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: start at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test/")), end at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test0")) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-03-31T16:00:54.481555Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 3 common prefixes: 2 2025-03-31T16:00:54.481822Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [1:864:2694], serverId# [1:865:2695], sessionId# [0:0:0] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed [GOOD] Test command err: 2025-03-31T16:00:47.100437Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004241759779517:2204];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:47.100948Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001679/r3tmp/tmpkJxdkt/pdisk_1.dat 2025-03-31T16:00:48.012642Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:48.076120Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:48.076201Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:48.088158Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3257, node 1 2025-03-31T16:00:48.427496Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/001679/r3tmp/yandexXnqrPC.tmp 2025-03-31T16:00:48.427520Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/001679/r3tmp/yandexXnqrPC.tmp 2025-03-31T16:00:48.427704Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/001679/r3tmp/yandexXnqrPC.tmp 2025-03-31T16:00:48.427836Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11746 PQClient connected to localhost:3257 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:00:49.026945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:00:49.085941Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:49.109211Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:49.125051Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-03-31T16:00:52.099248Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004241759779517:2204];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:52.099346Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:00:52.270863Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004263234616577:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:52.270986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:52.270992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004263234616586:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:52.277438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-03-31T16:00:52.297996Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004263234616592:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-31T16:00:52.699282Z node 1 :TX_PROXY ERROR: Actor# [1:7488004263234616657:2398] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:00:52.735907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:00:52.919286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:00:52.941543Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488004263234616666:2346], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:00:52.943452Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZGNmNDdiOTktMWIxZGFhZmQtMjY0NzNjODktNzdkNmZhZTA=, ActorId: [1:7488004263234616575:2334], ActorState: ExecuteState, TraceId: 01jqpeyb0z25btvazkjwnew1gv, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:00:52.945599Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:00:53.105412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-31T16:00:53.600884Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jqpeyc1c8v4g9n2abd1xq144, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDBlNjI3ZTktZDI0ZTEyNGUtZThiNGIwNDctNTYzMTU1OGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> ObjectStorageListingTest::FilterListing [GOOD] >> TProxyActorTest::TestCreateSemaphore [GOOD] |71.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |71.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> DataShardSnapshots::DelayedWriteReadableAfterSplitAndReboot [GOOD] >> DataShardSnapshots::BrokenLockChangesDontLeak ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlapNaming::AlterColumnTableFailed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-31T16:00:37.557385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:00:37.557504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:00:37.557591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:00:37.557644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:00:37.557716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:00:37.557755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:00:37.557822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:00:37.557901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:00:37.558238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:00:37.668460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:00:37.668524Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:37.685709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:00:37.686130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:00:37.686306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:00:37.698513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:00:37.698829Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:00:37.699542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:00:37.699954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:00:37.701975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:00:37.703239Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:00:37.703303Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:00:37.703434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:00:37.703512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:00:37.703556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:00:37.703763Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:00:37.720410Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-31T16:00:37.887443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:00:37.887747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:37.887960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:00:37.888210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:00:37.888276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:37.896132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:00:37.896322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:00:37.896558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:37.896644Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:00:37.896704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:00:37.896742Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:00:37.903995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:37.904082Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:00:37.904143Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:00:37.906492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:37.906567Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:37.906609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:00:37.906673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:00:37.910475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:00:37.921719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:00:37.921972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:00:37.923054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:00:37.923261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:00:37.923332Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:00:37.923684Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:00:37.923742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:00:37.923912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:00:37.923995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:00:37.929546Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:00:37.929608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:00:37.929828Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:00:37.929872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:00:37.930265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:37.930315Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:00:37.930412Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:00:37.930445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:00:37.930478Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:00:37.930513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:00:37.930574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:00:37.930618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:00:37.930657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:00:37.930686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:00:37.930750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:00:37.930789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:00:37.930820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:00:37.932881Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:00:37.933014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:00:37.933074Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... d: 101 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 2025-03-31T16:00:55.269917Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409582 TxId: 101 2025-03-31T16:00:55.269944Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409582, partId: 0 2025-03-31T16:00:55.269997Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409582 TxId: 101 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 101 2025-03-31T16:00:55.278072Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:00:55.278290Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:00:55.278400Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:00:55.278483Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:00:55.278601Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:00:55.278679Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:00:55.278769Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:00:55.278879Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:00:55.282072Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:00:55.282285Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:00:55.282395Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:00:55.282461Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:00:55.282594Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:00:55.282865Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:00:55.282966Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:00:55.283998Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:00:55.298757Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:00:55.298977Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:00:55.299118Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:00:55.299222Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:00:55.299337Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:00:55.299428Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:00:55.299516Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:00:55.299608Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:00:55.300470Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:00:55.300575Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:00:55.300658Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:00:55.300725Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:00:55.300798Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:00:55.300880Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:00:55.300954Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:00:55.301067Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:00:55.301141Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-31T16:00:55.301259Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T16:00:55.301301Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:00:55.301347Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T16:00:55.301381Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:00:55.301426Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-03-31T16:00:55.301510Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:2776:4041] message: TxId: 101 2025-03-31T16:00:55.301569Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:00:55.301662Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-31T16:00:55.301705Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-31T16:00:55.302969Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 66 2025-03-31T16:00:55.312226Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T16:00:55.312300Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:2777:4042] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-03-31T16:00:55.315522Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnTable AlterColumnTable { Name: "TestTable" AlterSchema { AddColumns { Name: "New Column" Type: "Int32" } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:00:55.315739Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterColumnTable Propose, path: /MyRoot/TestTable, opId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:00:55.315953Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusSchemeError, reason: update parse error: Invalid name for column 'New Column'. in alter constructor STANDALONE_UPDATE, at schemeshard: 72057594046678944 2025-03-31T16:00:55.321415Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusSchemeError Reason: "update parse error: Invalid name for column \'New Column\'. in alter constructor STANDALONE_UPDATE" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:00:55.321610Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSchemeError, reason: update parse error: Invalid name for column 'New Column'. in alter constructor STANDALONE_UPDATE, operation: ALTER COLUMN TABLE, path: /MyRoot/TestTable TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-31T16:00:55.321983Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-31T16:00:55.322038Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-31T16:00:55.322537Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-31T16:00:55.322673Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:00:55.322717Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:3575:4771] TestWaitNotification: OK eventTxId 102 |71.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestDisconnectWhileAttaching |71.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |71.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |71.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphore [GOOD] |71.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |71.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |71.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> KqpJoinOrder::FiveWayJoinWithConstantFold-ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit-UseSink [GOOD] Test command err: 2025-03-31T15:58:22.372303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:58:22.372558Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:22.372739Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001436/r3tmp/tmpkBfGQ0/pdisk_1.dat 2025-03-31T15:58:23.022815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:58:23.110874Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:23.160011Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T15:58:23.161001Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:23.161111Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:23.161272Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-31T15:58:23.180701Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:58:23.267453Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-03-31T15:58:23.267516Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-03-31T15:58:23.267656Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-03-31T15:58:23.585962Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-31T15:58:23.586057Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T15:58:23.586625Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-31T15:58:23.586717Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T15:58:23.588581Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T15:58:23.588832Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T15:58:23.588955Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-31T15:58:23.589310Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-03-31T15:58:23.590678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:58:23.591702Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-31T15:58:23.591777Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-03-31T15:58:23.643019Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-31T15:58:23.657238Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-31T15:58:23.657756Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T15:58:23.657999Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:58:23.762781Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T15:58:23.765039Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:58:23.765192Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T15:58:23.766897Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T15:58:23.766996Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T15:58:23.767050Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T15:58:23.767467Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T15:58:23.767603Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T15:58:23.767687Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T15:58:23.768091Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T15:58:23.825042Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T15:58:23.825229Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T15:58:23.825346Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T15:58:23.825382Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T15:58:23.825416Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T15:58:23.825462Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T15:58:23.825671Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T15:58:23.825748Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T15:58:23.826021Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T15:58:23.826109Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T15:58:23.826245Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T15:58:23.826282Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T15:58:23.826323Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T15:58:23.826366Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T15:58:23.826396Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T15:58:23.826428Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T15:58:23.826467Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T15:58:23.826570Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:58:23.826620Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:58:23.826669Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T15:58:23.826768Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-31T15:58:23.826803Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T15:58:23.826922Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T15:58:23.827512Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T15:58:23.827595Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T15:58:23.827706Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T15:58:23.827761Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T15:58:23.827801Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T15:58:23.827839Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T15:58:23.827884Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T15:58:23.828286Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T15:58:23.828331Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T15:58:23.828385Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T15:58:23.828419Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T15:58:23.828468Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T15:58:23.828498Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T15:58:23.828533Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T15:58:23.828565Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T15:58:23.828611Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T15:58:23.829789Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T15:58:23.829840Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T15:58:23.829873Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T15:58:23.829915Z node 1 :TX_DATASHARD TRACE: Prop ... ablet: 72075186224037890 2025-03-31T16:00:53.744938Z node 13 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 4, at tablet: 72075186224037890 2025-03-31T16:00:53.745841Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 278593539, Sender [13:834:2693], Recipient [13:666:2570]: NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2,3,4] } 2025-03-31T16:00:53.745954Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435091, Sender [13:666:2570], Recipient [13:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRemoveChangeRecords 2025-03-31T16:00:53.746006Z node 13 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 3, at tablet# 72075186224037888 2025-03-31T16:00:53.746039Z node 13 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 2, at tablet: 72075186224037888 2025-03-31T16:00:53.746123Z node 13 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 3, at tablet: 72075186224037888 2025-03-31T16:00:53.746165Z node 13 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 4, at tablet: 72075186224037888 2025-03-31T16:00:53.746400Z node 13 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 3, left# 0, at tablet# 72075186224037890 2025-03-31T16:00:53.746732Z node 13 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 3, left# 0, at tablet# 72075186224037888 2025-03-31T16:00:54.011541Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [13:1079:2892]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-03-31T16:00:54.793258Z node 13 :TX_PROXY DEBUG: actor# [13:59:2106] Handle TEvExecuteKqpTransaction 2025-03-31T16:00:54.793391Z node 13 :TX_PROXY DEBUG: actor# [13:59:2106] TxId# 281474976715671 ProcessProposeKqpTransaction 2025-03-31T16:00:54.795028Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jqpeycry5e91m02798nak1rq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZTEwMzFiNzAtZDBlMzJmY2ItMWI0Njg5NWYtMjg1MzZlYjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TEvRead: ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 4 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false 2025-03-31T16:00:54.798154Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [13:1706:3414], Recipient [13:797:2664]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 4 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false RangesSize: 3 2025-03-31T16:00:54.798486Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-03-31T16:00:54.798583Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037889 CompleteEdge# v8021/281474976715670 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-03-31T16:00:54.798682Z node 13 :TX_DATASHARD TRACE: 72075186224037889 changed HEAD read to non-repeatable v9000/18446744073709551615 2025-03-31T16:00:54.798822Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit CheckRead 2025-03-31T16:00:54.799041Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-03-31T16:00:54.799148Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit CheckRead 2025-03-31T16:00:54.799235Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-03-31T16:00:54.799310Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit BuildAndWaitDependencies 2025-03-31T16:00:54.799385Z node 13 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037889 2025-03-31T16:00:54.799462Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-03-31T16:00:54.799501Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-03-31T16:00:54.799531Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit ExecuteRead 2025-03-31T16:00:54.799560Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit ExecuteRead 2025-03-31T16:00:54.799763Z node 13 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 4 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2025-03-31T16:00:54.800260Z node 13 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[13:1706:3414], 0} after executionsCount# 1 2025-03-31T16:00:54.800409Z node 13 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[13:1706:3414], 0} sends rowCount# 2, bytes# 64, quota rows left# 32765, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 3, firstUnprocessed# 0 2025-03-31T16:00:54.800565Z node 13 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[13:1706:3414], 0} finished in read 2025-03-31T16:00:54.800692Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-03-31T16:00:54.800728Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit ExecuteRead 2025-03-31T16:00:54.800762Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit CompletedOperations 2025-03-31T16:00:54.800799Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit CompletedOperations 2025-03-31T16:00:54.800868Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-03-31T16:00:54.800897Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit CompletedOperations 2025-03-31T16:00:54.800940Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037889 has finished 2025-03-31T16:00:54.801028Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-03-31T16:00:54.801233Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-03-31T16:00:54.802692Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [13:1706:3414], Recipient [13:797:2664]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-03-31T16:00:54.802787Z node 13 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 21 } } 2025-03-31T16:00:55.542096Z node 13 :TX_PROXY DEBUG: actor# [13:59:2106] Handle TEvExecuteKqpTransaction 2025-03-31T16:00:55.542226Z node 13 :TX_PROXY DEBUG: actor# [13:59:2106] TxId# 281474976715672 ProcessProposeKqpTransaction 2025-03-31T16:00:55.543931Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jqpeydh4f11dded7v8j8s021, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=M2VhYzk1MmMtYWYxMzBkNzktNGIzYWNhMGUtYzFlODNhMTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TEvRead: ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 7 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false 2025-03-31T16:00:55.547052Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [13:1737:3439], Recipient [13:1079:2892]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 7 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false RangesSize: 3 2025-03-31T16:00:55.547465Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037891, FollowerId 0 2025-03-31T16:00:55.547570Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037891 CompleteEdge# v8021/281474976715670 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-03-31T16:00:55.547665Z node 13 :TX_DATASHARD TRACE: 72075186224037891 changed HEAD read to non-repeatable v9000/18446744073709551615 2025-03-31T16:00:55.547790Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037891 on unit CheckRead 2025-03-31T16:00:55.548014Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037891 is Executed 2025-03-31T16:00:55.548115Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037891 executing on unit CheckRead 2025-03-31T16:00:55.548211Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037891 to execution unit BuildAndWaitDependencies 2025-03-31T16:00:55.548276Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037891 on unit BuildAndWaitDependencies 2025-03-31T16:00:55.548345Z node 13 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037891 2025-03-31T16:00:55.548442Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037891 is Executed 2025-03-31T16:00:55.548474Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037891 executing on unit BuildAndWaitDependencies 2025-03-31T16:00:55.548505Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037891 to execution unit ExecuteRead 2025-03-31T16:00:55.548534Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037891 on unit ExecuteRead 2025-03-31T16:00:55.548697Z node 13 :TX_DATASHARD TRACE: 72075186224037891 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 7 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2025-03-31T16:00:55.549168Z node 13 :TX_DATASHARD TRACE: 72075186224037891 Complete read# {[13:1737:3439], 0} after executionsCount# 1 2025-03-31T16:00:55.549296Z node 13 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[13:1737:3439], 0} sends rowCount# 2, bytes# 64, quota rows left# 32765, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 3, firstUnprocessed# 0 2025-03-31T16:00:55.549427Z node 13 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[13:1737:3439], 0} finished in read 2025-03-31T16:00:55.549575Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037891 is Executed 2025-03-31T16:00:55.549610Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037891 executing on unit ExecuteRead 2025-03-31T16:00:55.549642Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037891 to execution unit CompletedOperations 2025-03-31T16:00:55.549673Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037891 on unit CompletedOperations 2025-03-31T16:00:55.549727Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037891 is Executed 2025-03-31T16:00:55.549754Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037891 executing on unit CompletedOperations 2025-03-31T16:00:55.549805Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037891 has finished 2025-03-31T16:00:55.549884Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037891 2025-03-31T16:00:55.550098Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037891 2025-03-31T16:00:55.551511Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [13:1737:3439], Recipient [13:1079:2892]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-03-31T16:00:55.551610Z node 13 :TX_DATASHARD TRACE: 72075186224037891 ReadCancel: { ReadId: 0 } { items { uint32_value: 10 } items { uint32_value: 110 } }, { items { uint32_value: 20 } items { uint32_value: 210 } } >> TProxyActorTest::TestCreateSemaphoreInterrupted ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> ObjectStorageListingTest::FilterListing [GOOD] Test command err: 2025-03-31T16:00:54.125874Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:00:54.126139Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:00:54.126311Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0015d9/r3tmp/tmpHaFi6f/pdisk_1.dat 2025-03-31T16:00:54.635978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:00:54.689902Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:54.733137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:54.733276Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:54.744977Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:00:54.844021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:00:54.896570Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:00:54.896872Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:00:54.948006Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:00:54.948152Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:00:54.950003Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:00:54.950095Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:00:54.950155Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:00:54.950580Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:00:54.950754Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:00:54.950847Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:00:54.963852Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:00:55.019826Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:00:55.020104Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:00:55.020242Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:00:55.020292Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:00:55.020329Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:00:55.020381Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:00:55.020862Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:00:55.020979Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:00:55.021094Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:00:55.021133Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:00:55.021173Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:00:55.021213Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:00:55.021296Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:00:55.021499Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:00:55.021748Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:00:55.021848Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:00:55.031211Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:00:55.043816Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:00:55.043940Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:00:55.226154Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T16:00:55.244584Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:00:55.244680Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:00:55.245387Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:00:55.245457Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:00:55.245520Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T16:00:55.245790Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:00:55.245993Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:00:55.246625Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:00:55.246702Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:00:55.257258Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:00:55.257814Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:00:55.268466Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-31T16:00:55.268537Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:00:55.269610Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-31T16:00:55.269724Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:00:55.270684Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:00:55.270736Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:00:55.270810Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:00:55.270879Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:00:55.270937Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T16:00:55.271025Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:00:55.287638Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:00:55.289906Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T16:00:55.290141Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-31T16:00:55.290209Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:00:55.314026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:55.314149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:55.314241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:55.329072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:00:55.342304Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:00:55.518442Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:00:55.521911Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:00:55.659997Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:00:56.418451Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpeye0fd8m6a2xy3m423101, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODkzZTRlMDUtNzcyZjNiNTgtMzRlNWIwMzctNGU2OTZjYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:00:56.426143Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:856:2687], serverId# [1:857:2688], sessionId# [0:0:0] 2025-03-31T16:00:56.426708Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-03-31T16:00:56.426927Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=5 2025-03-31T16:00:56.439323Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:00:56.454355Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:864:2694], serverId# [1:865:2695], sessionId# [0:0:0] 2025-03-31T16:00:56.454652Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: start at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test/")), end at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test0")) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-03-31T16:00:56.454904Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 2 common prefixes: 1 2025-03-31T16:00:56.459363Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [1:864:2694], serverId# [1:865:2695], sessionId# [0:0:0] 2025-03-31T16:00:56.461951Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:870:2700], serverId# [1:871:2701], sessionId# [0:0:0] 2025-03-31T16:00:56.462249Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: start at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test/")), end at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test0")) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-03-31T16:00:56.462595Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 1 common prefixes: 1 2025-03-31T16:00:56.462815Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [1:870:2700], serverId# [1:871:2701], sessionId# [0:0:0] |71.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |71.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> TProxyActorTest::TestDisconnectWhileAttaching [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithConstantFold+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 31910, MsgBus: 18724 2025-03-31T15:58:50.517889Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003740284701099:2201];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:50.531334Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019dc/r3tmp/tmpDQNwsM/pdisk_1.dat 2025-03-31T15:58:51.517568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:51.517667Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:51.524378Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:58:51.545740Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:51.548552Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 31910, node 1 2025-03-31T15:58:51.779610Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:58:51.779631Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:58:51.779637Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:58:51.779742Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18724 TClient is connected to server localhost:18724 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:58:52.867365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:55.515285Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003740284701099:2201];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:55.515345Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:58:56.358951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003766054505405:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:56.359108Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:56.359628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003766054505417:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:56.364578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T15:58:56.390572Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488003766054505419:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T15:58:56.456596Z node 1 :TX_PROXY ERROR: Actor# [1:7488003766054505470:2345] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:58:56.977984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T15:58:57.305449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488003770349473026:2355];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T15:58:57.305728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488003770349473026:2355];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T15:58:57.305981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488003770349473026:2355];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T15:58:57.306096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488003770349473026:2355];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T15:58:57.306189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488003770349473026:2355];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T15:58:57.306292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488003770349473026:2355];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T15:58:57.306393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488003770349473026:2355];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T15:58:57.306506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488003770349473026:2355];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T15:58:57.306644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488003770349473026:2355];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T15:58:57.306767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488003770349473026:2355];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T15:58:57.306878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488003770349473026:2355];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T15:58:57.306975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488003770349473026:2355];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T15:58:57.316617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003770349473028:2356];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T15:58:57.316684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003770349473028:2356];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T15:58:57.316898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003770349473028:2356];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T15:58:57.317027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003770349473028:2356];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T15:58:57.317140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003770349473028:2356];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T15:58:57.317231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003770349473028:2356];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T15:58:57.317344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003770349473028:2356];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T15:58:57.317451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003770349473028:2356];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T15:58:57.317615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003770349473028:2356];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T15:58:57.317732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003770349473028:2356];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T15:58:57.317827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003770349473028:2356];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T15:58:57.317918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003770349473028:2356];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CL ... ller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:28.862919Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039247;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:28.868828Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039221;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:28.887657Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:28.902094Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:28.918532Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:28.930687Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:28.954973Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:28.969991Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:28.981993Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:28.991663Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039293;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.016996Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.018010Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.036870Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.043676Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.063620Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.069403Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.082450Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039249;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.095965Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039269;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.115975Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039243;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.124154Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039251;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.151838Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.158813Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039209;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.177264Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.183025Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.197569Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.211575Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.236832Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.249972Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.270069Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.297728Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.317143Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.324381Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.348181Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.359363Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.366237Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.370671Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039257;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.390281Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.393439Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.425653Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039277;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.429081Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.449108Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.458305Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.474611Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.474959Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.872852Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039236;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:30.070920Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpew3nr3pr0jvz9402d34g0", SessionId: ydb://session/3?node_id=1&id=YTY1ODdlZjYtNmU3OTAyYjktZjM5MGQwNDctNGE3YTk3MTY=, Slow query, duration: 50.872258s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:00:30.713680Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:00:30.714399Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:00:30.716555Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7488004096767044429:11045];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039094; 2025-03-31T16:00:30.716992Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; |71.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCH9_100 [GOOD] Test command err: Trying to start YDB, gRPC: 27690, MsgBus: 61328 2025-03-31T15:58:47.765861Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003728347061412:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:47.801059Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019b6/r3tmp/tmp2rgq5m/pdisk_1.dat 2025-03-31T15:58:48.400860Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:48.400967Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:48.417755Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:48.431261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27690, node 1 2025-03-31T15:58:48.735483Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:58:48.735502Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:58:48.735509Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:58:48.735603Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61328 TClient is connected to server localhost:61328 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:58:49.917306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:52.699347Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003728347061412:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:52.699423Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:58:53.452986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003754116865722:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:53.453113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:53.453496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003754116865734:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:53.457720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T15:58:53.489370Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488003754116865736:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T15:58:53.575780Z node 1 :TX_PROXY ERROR: Actor# [1:7488003754116865787:2345] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:58:54.179079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T15:58:54.328327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488003758411833232:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T15:58:54.328508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488003758411833232:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T15:58:54.328741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488003758411833232:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T15:58:54.328874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488003758411833232:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T15:58:54.329022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488003758411833232:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T15:58:54.334128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488003758411833232:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T15:58:54.334374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488003758411833232:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T15:58:54.334513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488003758411833232:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T15:58:54.334634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488003758411833232:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T15:58:54.334754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488003758411833232:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T15:58:54.334889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488003758411833232:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T15:58:54.335023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488003758411833232:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T15:58:54.340090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488003758411833230:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T15:58:54.340154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488003758411833230:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T15:58:54.340726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488003758411833230:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T15:58:54.340883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488003758411833230:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T15:58:54.340991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488003758411833230:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T15:58:54.341083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488003758411833230:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T15:58:54.341187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488003758411833230:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T15:58:54.347306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488003758411833230:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T15:58:54.347475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488003758411833230:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T15:58:54.347609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488003758411833230:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T15:58:54.347728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488003758411833230:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T15:58:54.347849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488003758411833230:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T15:58:54.424941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488003758411833235:2354];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.c ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.268549Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039289;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.279660Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.290399Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.294321Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.302309Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.302360Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.309459Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.312490Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039255;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.321371Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039261;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.328610Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039267;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.338450Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.346513Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.354060Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.366659Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.381662Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.388158Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.397711Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.401105Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.404626Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.413002Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.419630Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.420277Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.428444Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.430703Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.436281Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.441804Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.456946Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.460500Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.470775Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.471619Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.485702Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.489948Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.500655Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.505615Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.514695Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.519387Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.525319Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.529720Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.543780Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.553853Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.562602Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.564111Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.577161Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.579863Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.730697Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039358;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:24.929397Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpew2q9dywydyywtkavemn6", SessionId: ydb://session/3?node_id=1&id=NjI3NTA1MTEtMjc5Y2ViYy02MTQwOGJmOS05MGY4MTQyZg==, Slow query, duration: 46.711057s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:00:25.715670Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:00:25.716841Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7488004007519982449:9178];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039392; 2025-03-31T16:00:25.718546Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:00:25.719595Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestDisconnectWhileAttaching [GOOD] Test command err: ... waiting for blocked registrations ... blocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from KESUS_PROXY_ACTOR to KESUS_TABLET_ACTOR ... waiting for blocked registrations (done) 2025-03-31T16:00:58.352567Z node 1 :PIPE_SERVER ERROR: [72057594037927937] NodeDisconnected NodeId# 2 ... unblocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from KESUS_PROXY_ACTOR to KESUS_TABLET_ACTOR |71.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |71.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> TProxyActorTest::TestCreateSemaphoreInterrupted [GOOD] >> TPQCDTest::TestPrioritizeLocalDatacenter >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-60 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-61 >> TPQCDTest::TestUnavailableWithoutBoth >> TPQCDTest::TestUnavailableWithoutClustersList >> TPQCDTest::TestUnavailableWithoutNetClassifier |71.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |71.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest |71.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> SystemView::QueryStats [GOOD] >> SystemView::QueryStatsFields >> SystemView::ShowCreateTableReadReplicas [GOOD] >> SystemView::ShowCreateTableKeyBloomFilter >> TPQCDTest::TestDiscoverClusters |71.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphoreInterrupted [GOOD] |71.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithConstantFold-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 28739, MsgBus: 15887 2025-03-31T15:59:44.936960Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003970479433830:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:44.937490Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019b3/r3tmp/tmpb1d6lg/pdisk_1.dat 2025-03-31T15:59:45.769903Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:45.773764Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:45.773853Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:45.776703Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28739, node 1 2025-03-31T15:59:45.927665Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:59:45.927687Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:59:45.927694Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:59:45.927802Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15887 TClient is connected to server localhost:15887 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:59:46.831362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:49.103202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003991954270838:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:49.103296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:49.103631Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003991954270850:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:49.108401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T15:59:49.129662Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488003991954270852:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T15:59:49.212329Z node 1 :TX_PROXY ERROR: Actor# [1:7488003991954270903:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:49.722879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T15:59:49.858544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T15:59:49.906851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:59:49.925001Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003970479433830:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:49.925076Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:59:49.958285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:59:49.996097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:59:50.249493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:59:50.348529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:59:50.403529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:59:50.484765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T15:59:50.535880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-31T15:59:50.590797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-31T15:59:50.653700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T15:59:50.731668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T15:59:51.448763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-31T15:59:51.532494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T15:59:51.590280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T15:59:51.644703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T15:59:51.698866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T15:59:51.786520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-31T15:59:51.830604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T15:59:51.897531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-31T15:59:51.960193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-31T15:59:52.009932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-31T15:59:52.062390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-31T15:59:52.118468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-31T15:59:52.210676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-31T15:59:52.340058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-31T15:59:52.388346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-31T15:59:52.431943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-31T15:59:52.476712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-03-31T15:59:52.525361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but p ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.228963Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.233802Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038470;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.234209Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038474;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.239849Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038496;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.243864Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038484;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.254055Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.257666Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038436;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.268481Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038532;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.277675Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038456;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.286767Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038514;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.294452Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038544;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.295668Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038534;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.303116Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038548;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.303279Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038440;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.309878Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038508;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.309893Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038454;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.316200Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038522;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.316860Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038538;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.322313Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038448;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.324219Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038554;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.328344Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038518;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.329614Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038460;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.334106Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038458;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.336161Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038450;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.340140Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038552;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.342838Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038574;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.346401Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038480;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.349039Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038486;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.351933Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038560;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.355356Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038528;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.359709Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038564;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.366259Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038566;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.377655Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038488;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.378259Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.384945Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038512;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.384965Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038520;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.391992Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038556;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.392184Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038524;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.398954Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038550;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.404305Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038562;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.415881Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038542;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.419272Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038492;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.422643Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038594;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.473774Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038606;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.475053Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038437;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:48.527617Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpewjvn488zv9t3af91a5rj", SessionId: ydb://session/3?node_id=1&id=MTcyNjI5NjMtNDA3ZmNiZWEtNjlkNzk4OTMtZjFkN2E2NDc=, Slow query, duration: 53.786014s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:00:48.854060Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:00:48.854061Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:00:48.854941Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7488004185227825729:5899];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038331;receive=72075186224038170; 2025-03-31T16:00:48.855334Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; |71.1%| [TA] $(B)/ydb/core/tx/datashard/ut_object_storage_listing/test-results/unittest/{meta.json ... results_accumulator.log} |71.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |71.1%| [TA] $(B)/ydb/core/kesus/proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TStorageTenantTest::CreateTableOutsideDatabaseFailToStartTabletsButDropIsOk [GOOD] >> TStorageTenantTest::Empty [GOOD] |71.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> RetryPolicy::TWriteSession_RetryOnTargetCluster [GOOD] >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster |71.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |71.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> RetryPolicy::TWriteSession_SeqNoShift [GOOD] >> RetryPolicy::RetryWithBatching |71.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TStorageTenantTest::Empty [GOOD] |71.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |71.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |71.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl >> SystemView::AuthUsers_LockUnlock [GOOD] >> TTxAllocatorClientTest::ZeroRange >> SystemView::AuthUsers_Access |71.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TStorageTenantTest::CreateTableOutsideDatabaseFailToStartTabletsButDropIsOk [GOOD] |71.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |71.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest |71.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest |71.2%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_minstep/test-results/unittest/{meta.json ... results_accumulator.log} |71.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest |71.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest |71.2%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/test-results/unittest/{meta.json ... results_accumulator.log} |71.2%| [TA] {RESULT} $(B)/ydb/core/kesus/proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |71.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl >> TTxAllocatorClientTest::InitiatingRequest |71.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::InitiatingRequest [GOOD] |71.2%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/test-results/unittest/{meta.json ... results_accumulator.log} >> TTxAllocatorClientTest::AllocateOverTheEdge >> TTxAllocatorClientTest::Boot >> TTxAllocatorClientTest::AllocateOverTheEdge [GOOD] >> DataShardSnapshots::ShardRestartAfterDropTableAndAbort [GOOD] >> TTxAllocatorClientTest::Boot [GOOD] >> TBoardSubscriberTest::DropByDisconnect >> TBoardSubscriberTest::SimpleSubscriber ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::InitiatingRequest [GOOD] Test command err: 2025-03-31T16:01:03.544694Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-03-31T16:01:03.545162Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-03-31T16:01:03.545844Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-03-31T16:01:03.547414Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:01:03.547855Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-03-31T16:01:03.556837Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:01:03.556963Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:01:03.557066Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:01:03.557146Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-03-31T16:01:03.557265Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:01:03.557345Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-03-31T16:01:03.557477Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-03-31T16:01:03.558058Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#5000 2025-03-31T16:01:03.558465Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:01:03.558530Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:01:03.558604Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2025-03-31T16:01:03.558646Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 0 to# 5000 >> TBoardSubscriberTest::NotAvailableByShutdown ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::AllocateOverTheEdge [GOOD] Test command err: 2025-03-31T16:01:04.082064Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-03-31T16:01:04.082566Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-03-31T16:01:04.083502Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-03-31T16:01:04.085206Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:01:04.085647Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-03-31T16:01:04.099976Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:01:04.100109Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:01:04.100242Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:01:04.100388Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-03-31T16:01:04.100495Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:01:04.100599Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-03-31T16:01:04.100736Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-03-31T16:01:04.101377Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#5000 2025-03-31T16:01:04.101828Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:01:04.101903Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:01:04.102007Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2025-03-31T16:01:04.102056Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 0 to# 5000 2025-03-31T16:01:04.102267Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-03-31T16:01:04.102434Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-03-31T16:01:04.102617Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-03-31T16:01:04.102770Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-03-31T16:01:04.102915Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#5000 2025-03-31T16:01:04.103526Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:01:04.103591Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:01:04.103671Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 5000 Reserved to# 10000 2025-03-31T16:01:04.103708Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 5000 to# 10000 2025-03-31T16:01:04.103914Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 500 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-03-31T16:01:04.104068Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-03-31T16:01:04.104255Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 2500 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-03-31T16:01:04.104540Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-03-31T16:01:04.104683Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#5000 2025-03-31T16:01:04.105033Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:01:04.105125Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:01:04.105218Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 10000 Reserved to# 15000 2025-03-31T16:01:04.105256Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 10000 to# 15000 2025-03-31T16:01:04.105417Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 3000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::Boot [GOOD] Test command err: 2025-03-31T16:01:04.312182Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-03-31T16:01:04.312819Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-03-31T16:01:04.313696Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-03-31T16:01:04.315694Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:01:04.316330Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-03-31T16:01:04.328098Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:01:04.328330Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:01:04.328429Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:01:04.328515Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-03-31T16:01:04.328665Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:01:04.328773Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-03-31T16:01:04.328938Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 >> TBoardSubscriberTest::ManySubscribersManyPublisher |71.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest |71.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::ReconnectReplica >> TPQCDTest::TestUnavailableWithoutBoth [GOOD] >> TPQCDTest::TestUnavailableWithoutClustersList [GOOD] >> TBoardSubscriberTest::DropByDisconnect [GOOD] >> TBoardSubscriberTest::SimpleSubscriber [GOOD] |71.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest |71.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::NotAvailableByShutdown [GOOD] >> TOlap::StoreStats [GOOD] >> TOlap::StoreStatsQuota |71.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::ManySubscribersManyPublisher [GOOD] >> DataShardSnapshots::BrokenLockChangesDontLeak [GOOD] >> TBoardSubscriberTest::ReconnectReplica [GOOD] |71.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::NotAvailableByShutdown [GOOD] |71.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::DropByDisconnect [GOOD] |71.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::SimpleSubscriber [GOOD] |71.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::ManySubscribersManyPublisher [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutClustersList [GOOD] Test command err: 2025-03-31T16:01:00.913943Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004297656990559:2209];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:00.914468Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0015fe/r3tmp/tmpeKvESw/pdisk_1.dat 2025-03-31T16:01:01.524657Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:01.533146Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:01:01.533279Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:01:01.535409Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20195, node 1 2025-03-31T16:01:01.665862Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/0015fe/r3tmp/yandex2lhv8H.tmp 2025-03-31T16:01:01.665891Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/0015fe/r3tmp/yandex2lhv8H.tmp 2025-03-31T16:01:01.666078Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/0015fe/r3tmp/yandex2lhv8H.tmp 2025-03-31T16:01:01.666202Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:01:04.316597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004314836860293:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:04.316685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:04.316906Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004314836860320:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:04.329678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2025-03-31T16:01:04.363621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004314836860322:2379], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2025-03-31T16:01:04.477751Z node 1 :TX_PROXY ERROR: Actor# [1:7488004314836860383:2370] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:01:05.062895Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488004314836860404:2387], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:01:05.064651Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmZlMDIzODQtNGI1YTFhOGEtYzYzODJmYjQtOTliOWZkYTg=, ActorId: [1:7488004314836860291:2374], ActorState: ExecuteState, TraceId: 01jqpeypsge3b1sty9b41r2111, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:01:05.077580Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutBoth [GOOD] Test command err: 2025-03-31T16:01:00.914587Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004297153167885:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:00.914618Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001632/r3tmp/tmpShFw5N/pdisk_1.dat 2025-03-31T16:01:01.597732Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:01:01.597834Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:01:01.600107Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:01.603412Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24342, node 1 2025-03-31T16:01:01.754622Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:01:01.754646Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:01:01.754671Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:01:01.754797Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:01:04.560263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004314333037808:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:04.560428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:04.560796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004314333037820:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:04.573347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2025-03-31T16:01:04.598126Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004314333037822:2381], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2025-03-31T16:01:04.704878Z node 1 :TX_PROXY ERROR: Actor# [1:7488004314333037883:2372] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:01:05.076060Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488004314333037896:2389], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:01:05.076596Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjFiYmJhMDQtYzE1M2RhYy02ZjkyNDQ0ZS05M2IzN2YwNQ==, ActorId: [1:7488004314333037805:2376], ActorState: ExecuteState, TraceId: 01jqpeyq180a85aj5gjpkp0h59, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:01:05.113527Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } |71.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::ReconnectReplica [GOOD] >> TPersqueueControlPlaneTestSuite::SetupReadLockSessionWithDatabase ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::ShardRestartAfterDropTableAndAbort [GOOD] Test command err: 2025-03-31T15:58:18.064297Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:58:18.064578Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:18.064769Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001476/r3tmp/tmps1eY1E/pdisk_1.dat 2025-03-31T15:58:18.592402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:58:18.635331Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:18.679896Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T15:58:18.681051Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:18.681150Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:18.681271Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-31T15:58:18.692840Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:58:18.777728Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-03-31T15:58:18.777837Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-03-31T15:58:18.777996Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-03-31T15:58:18.910323Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-31T15:58:18.910434Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T15:58:18.911126Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-31T15:58:18.911228Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T15:58:18.911624Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T15:58:18.911856Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T15:58:18.911960Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-31T15:58:18.912347Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-03-31T15:58:18.913887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:58:18.915164Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-31T15:58:18.915253Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-03-31T15:58:18.957781Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-31T15:58:18.959163Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-31T15:58:18.959795Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T15:58:18.960092Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:58:19.001940Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T15:58:19.002607Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:58:19.002709Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T15:58:19.004182Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T15:58:19.004317Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T15:58:19.004379Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T15:58:19.004786Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T15:58:19.004930Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T15:58:19.005005Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T15:58:19.005390Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T15:58:19.087031Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T15:58:19.087329Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T15:58:19.087465Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T15:58:19.087509Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T15:58:19.087567Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T15:58:19.087622Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T15:58:19.087866Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T15:58:19.087938Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T15:58:19.088253Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T15:58:19.088365Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T15:58:19.088474Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T15:58:19.088517Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T15:58:19.088561Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T15:58:19.088601Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T15:58:19.088638Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T15:58:19.088677Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T15:58:19.088723Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T15:58:19.088839Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:58:19.088878Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:58:19.095343Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T15:58:19.095664Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-31T15:58:19.095730Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T15:58:19.095918Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T15:58:19.096221Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T15:58:19.096298Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T15:58:19.096421Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T15:58:19.096478Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T15:58:19.096529Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T15:58:19.096581Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T15:58:19.096618Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T15:58:19.096968Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T15:58:19.097018Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T15:58:19.097071Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T15:58:19.097113Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T15:58:19.097177Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T15:58:19.097230Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T15:58:19.097270Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T15:58:19.097309Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T15:58:19.097338Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T15:58:19.098738Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T15:58:19.098799Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T15:58:19.098833Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T15:58:19.098896Z node 1 :TX_DATASHARD TRACE: Prop ... ounter: 0 SchemeShard: 72057594046644480 PathId: 2 HasWrites: true } SendingShards: 72075186224037888 ReceivingShards: 72075186224037888 Op: Commit } 2025-03-31T16:01:03.252559Z node 14 :TX_DATASHARD TRACE: Handle TTxWrite: at tablet# 72075186224037888 2025-03-31T16:01:03.252772Z node 14 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_WRONG_SHARD_STATE;details=Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state);tx_id=0; 2025-03-31T16:01:03.252902Z node 14 :TX_DATASHARD NOTICE: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state) 2025-03-31T16:01:03.253168Z node 14 :KQP_COMPUTE ERROR: SelfId: [14:871:2684], Table: `/Root/table` ([72057594046644480:2:1]), SessionActorId: [14:843:2684]Got WRONG SHARD STATE for table `/Root/table`. ShardID=72075186224037888, Sink=[14:871:2684].{
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state), code: 2029 } 2025-03-31T16:01:03.253397Z node 14 :KQP_COMPUTE ERROR: SelfId: [14:864:2684], SessionActorId: [14:843:2684], statusCode=UNAVAILABLE. Issue=
: Error: Wrong shard state. Table `/Root/table`., code: 2005
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state), code: 2029 . sessionActorId=[14:843:2684]. isRollback=0 2025-03-31T16:01:03.253943Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=NWEyNjU0NGItZjc2ZjJmZjEtNjI2OTY5YmItMWM4MTBmZjA=, ActorId: [14:843:2684], ActorState: ExecuteState, TraceId: 01jqpeynme3cf0wagph6am1w49, got TEvKqpBuffer::TEvError in ExecuteState, status: UNAVAILABLE send to: [14:996:2684] from: [14:864:2684] 2025-03-31T16:01:03.254419Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 278003712, Sender [14:864:2684], Recipient [14:689:2579]: NKikimrDataEvents.TEvWrite TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 HasWrites: true } Op: Rollback } 2025-03-31T16:01:03.254465Z node 14 :TX_DATASHARD TRACE: Handle TTxWrite: at tablet# 72075186224037888 2025-03-31T16:01:03.254552Z node 14 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_WRONG_SHARD_STATE;details=Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state);tx_id=0; 2025-03-31T16:01:03.254610Z node 14 :TX_DATASHARD NOTICE: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state) 2025-03-31T16:01:03.254828Z node 14 :KQP_EXECUTER ERROR: ActorId: [14:996:2684] TxId: 281474976715665. Ctx: { TraceId: 01jqpeynme3cf0wagph6am1w49, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=NWEyNjU0NGItZjc2ZjJmZjEtNjI2OTY5YmItMWM4MTBmZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: {
: Error: Wrong shard state. Table `/Root/table`., code: 2005 subissue: {
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state), code: 2029 } } 2025-03-31T16:01:03.255281Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=NWEyNjU0NGItZjc2ZjJmZjEtNjI2OTY5YmItMWM4MTBmZjA=, ActorId: [14:843:2684], ActorState: ExecuteState, TraceId: 01jqpeynme3cf0wagph6am1w49, Create QueryResponse for error on request, msg: ... blocking NKikimr::NLongTxService::TEvLongTxService::TEvLockStatus from LONG_TX_SERVICE to TX_DATASHARD_ACTOR 2025-03-31T16:01:03.257858Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 65543, Sender [14:593:2518], Recipient [14:689:2579]: NActors::TEvents::TEvPoison 2025-03-31T16:01:03.258399Z node 14 :TX_DATASHARD INFO: OnDetach: 72075186224037888 2025-03-31T16:01:03.258509Z node 14 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-03-31T16:01:03.282924Z node 14 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [14:1000:2813], Recipient [14:1002:2814]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:01:03.286244Z node 14 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [14:1000:2813], Recipient [14:1002:2814]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:01:03.286427Z node 14 :TX_DATASHARD TRACE: StateInit, received event# 268828684, Sender [14:1000:2813], Recipient [14:1002:2814]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:01:03.298487Z node 14 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [14:1002:2814] 2025-03-31T16:01:03.298946Z node 14 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:01:03.311539Z node 14 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:01:03.314251Z node 14 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:01:03.318374Z node 14 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:01:03.318555Z node 14 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:01:03.318688Z node 14 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:01:03.319739Z node 14 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:01:03.320224Z node 14 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:01:03.320344Z node 14 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:01:03.320455Z node 14 :TX_DATASHARD INFO: Switched to work state PreOffline tabletId 72075186224037888 2025-03-31T16:01:03.320686Z node 14 :TX_DATASHARD DEBUG: 72075186224037888 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 1 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-03-31T16:01:03.320794Z node 14 :TX_DATASHARD INFO: Send registration request to time cast PreOffline tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:01:03.321053Z node 14 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [14:1016:2821] 2025-03-31T16:01:03.321129Z node 14 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:01:03.321214Z node 14 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: PreOffline, queue size: 0 2025-03-31T16:01:03.321290Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:01:03.321805Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [14:61:2108], Recipient [14:1002:2814]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 14 Status: STATUS_NOT_FOUND 2025-03-31T16:01:03.322425Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [14:1002:2814], Recipient [14:1002:2814]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:01:03.322490Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:01:03.322988Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 2146435075, Sender [14:1002:2814], Recipient [14:1002:2814]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressResendReadSet 2025-03-31T16:01:03.323057Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressResendReadSet 2025-03-31T16:01:03.324635Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [14:24:2071], Recipient [14:1002:2814]: {TEvRegisterTabletResult TabletId# 72075186224037888 Entry# 600} 2025-03-31T16:01:03.324716Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-03-31T16:01:03.324810Z node 14 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 600 2025-03-31T16:01:03.324897Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:01:03.325570Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:01:03.325658Z node 14 :TX_DATASHARD INFO: Progress tx at non-ready tablet 72075186224037888 state 5 2025-03-31T16:01:03.326305Z node 14 :TX_DATASHARD DEBUG: Start TTxProgressResendRS at tablet 72075186224037888 2025-03-31T16:01:03.326400Z node 14 :TX_DATASHARD INFO: Resend RS at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715663 2025-03-31T16:01:03.326493Z node 14 :TX_DATASHARD DEBUG: Send RS 1 at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715663 2025-03-31T16:01:03.329441Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [14:1002:2814], Recipient [14:903:2727]: {TEvReadSet step# 500 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 138 Seqno# 1 Flags# 0} 2025-03-31T16:01:03.329537Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-03-31T16:01:03.329660Z node 14 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715663 2025-03-31T16:01:03.329847Z node 14 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 500 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 138 Seqno# 1 Flags# 0} 2025-03-31T16:01:03.329954Z node 14 :TX_DATASHARD NOTICE: Outdated readset for 500:281474976715663 at 72075186224037889 2025-03-31T16:01:03.330070Z node 14 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-03-31T16:01:03.330185Z node 14 :TX_DATASHARD DEBUG: Send RS Ack at 72075186224037889 {TEvReadSet step# 500 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 138 Seqno# 1 Flags# 0} 2025-03-31T16:01:03.330410Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:01:03.330617Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [14:24:2071], Recipient [14:1002:2814]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 400 NextReadStep# 600 ReadStep# 600 } 2025-03-31T16:01:03.330675Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-03-31T16:01:03.330779Z node 14 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 400 next step 600 2025-03-31T16:01:03.331000Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [14:903:2727], Recipient [14:1002:2814]: {TEvReadSet step# 500 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2025-03-31T16:01:03.331050Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:01:03.331178Z node 14 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715663 2025-03-31T16:01:03.331321Z node 14 :TX_DATASHARD DEBUG: 72075186224037888 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: >> TPersQueueTest::WriteNonExistingPartition [GOOD] >> TPersQueueTest::WriteNonExistingTopic >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClass >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-61 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-62 >> TPersQueueNewSchemeCacheTest::CheckGrpcWriteNoDC |71.3%| [TA] $(B)/ydb/core/base/ut_board_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed >> TPQCDTest::TestPrioritizeLocalDatacenter [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-true |71.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::BrokenLockChangesDontLeak [GOOD] Test command err: 2025-03-31T15:58:20.264731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T15:58:20.264981Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:20.265132Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0014cb/r3tmp/tmpGxUVwb/pdisk_1.dat 2025-03-31T15:58:21.137516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T15:58:21.230553Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:21.271463Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T15:58:21.272557Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:21.272675Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:21.272856Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-31T15:58:21.284165Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:58:21.385171Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-03-31T15:58:21.385250Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-03-31T15:58:21.385423Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-03-31T15:58:21.586692Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-31T15:58:21.586795Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T15:58:21.587405Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-31T15:58:21.587496Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T15:58:21.587821Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T15:58:21.587985Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T15:58:21.588073Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-31T15:58:21.588380Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-03-31T15:58:21.589752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:58:21.590709Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-31T15:58:21.590790Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-03-31T15:58:21.627865Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-31T15:58:21.629109Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-31T15:58:21.629539Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T15:58:21.629847Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T15:58:21.679731Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T15:58:21.680502Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T15:58:21.680619Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T15:58:21.682395Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T15:58:21.682490Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T15:58:21.682556Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T15:58:21.682910Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T15:58:21.683254Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T15:58:21.683351Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T15:58:21.683762Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T15:58:21.736735Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T15:58:21.736939Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T15:58:21.737065Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T15:58:21.737101Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T15:58:21.737145Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T15:58:21.737188Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T15:58:21.737399Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T15:58:21.737461Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T15:58:21.737774Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T15:58:21.737873Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T15:58:21.738005Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T15:58:21.738045Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T15:58:21.738083Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T15:58:21.738132Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T15:58:21.738170Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T15:58:21.738205Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T15:58:21.738252Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T15:58:21.738360Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T15:58:21.738400Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T15:58:21.738447Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T15:58:21.738544Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-31T15:58:21.738586Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T15:58:21.738969Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T15:58:21.739240Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T15:58:21.739324Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T15:58:21.739428Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T15:58:21.739477Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T15:58:21.739513Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T15:58:21.739549Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T15:58:21.739582Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T15:58:21.739954Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T15:58:21.739997Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T15:58:21.740036Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T15:58:21.740076Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T15:58:21.740126Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T15:58:21.740155Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T15:58:21.740189Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T15:58:21.740221Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T15:58:21.740261Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T15:58:21.741547Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T15:58:21.741602Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T15:58:21.741636Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T15:58:21.741678Z node 1 :TX_DATASHARD TRACE: Prop ... _DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v400/0 IncompleteEdge# v{min} UnprotectedReadEdge# v400/18446744073709551615 ImmediateWriteEdge# v401/0 ImmediateWriteEdgeReplied# v401/0 2025-03-31T16:01:05.313112Z node 16 :TX_DATASHARD TRACE: Activated operation [0:5] at 72075186224037888 2025-03-31T16:01:05.313207Z node 16 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-03-31T16:01:05.313260Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-31T16:01:05.313297Z node 16 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit ExecuteWrite 2025-03-31T16:01:05.313338Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit ExecuteWrite 2025-03-31T16:01:05.313403Z node 16 :TX_DATASHARD DEBUG: Executing write operation for [0:5] at 72075186224037888 2025-03-31T16:01:05.313505Z node 16 :TX_DATASHARD TRACE: Operation [0:5] at 72075186224037888 aborting because it cannot acquire locks 2025-03-31T16:01:05.313632Z node 16 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because it cannot acquire locks;tx_id=5; 2025-03-31T16:01:05.313770Z node 16 :TX_DATASHARD INFO: Write transaction 5 at 72075186224037888 has an error: Operation is aborting because it cannot acquire locks 2025-03-31T16:01:05.313854Z node 16 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-03-31T16:01:05.313899Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteWrite 2025-03-31T16:01:05.313927Z node 16 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit FinishProposeWrite 2025-03-31T16:01:05.313959Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit FinishProposeWrite 2025-03-31T16:01:05.314191Z node 16 :TX_DATASHARD ERROR: Prepare transaction failed. txid 5 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because it cannot acquire locks" issue_code: 2001 severity: 1 } 2025-03-31T16:01:05.314273Z node 16 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is DelayComplete 2025-03-31T16:01:05.314323Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit FinishProposeWrite 2025-03-31T16:01:05.314408Z node 16 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2025-03-31T16:01:05.314500Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2025-03-31T16:01:05.314583Z node 16 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-03-31T16:01:05.314615Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2025-03-31T16:01:05.314663Z node 16 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037888 has finished 2025-03-31T16:01:05.314786Z node 16 :TX_DATASHARD TRACE: TTxWrite complete: at tablet# 72075186224037888 2025-03-31T16:01:05.314871Z node 16 :TX_DATASHARD TRACE: Complete execution for [0:5] at 72075186224037888 on unit FinishProposeWrite 2025-03-31T16:01:05.314969Z node 16 :TX_DATASHARD TRACE: Propose transaction complete txid 5 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_LOCKS_BROKEN 2025-03-31T16:01:05.315258Z node 16 :TX_DATASHARD ERROR: Errors while proposing transaction txid 5 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because it cannot acquire locks" issue_code: 2001 severity: 1 } 2025-03-31T16:01:05.315418Z node 16 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:01:05.315854Z node 16 :KQP_COMPUTE ERROR: SelfId: [16:906:2681], Table: `/Root/table` ([72057594046644480:2:1]), SessionActorId: [16:841:2681]Got LOCKS BROKEN for table `/Root/table`. ShardID=72075186224037888, Sink=[16:906:2681].{
: Error: Operation is aborting because it cannot acquire locks, code: 2001 } 2025-03-31T16:01:05.316119Z node 16 :KQP_COMPUTE ERROR: SelfId: [16:899:2681], SessionActorId: [16:841:2681], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/table`., code: 2001
: Error: Operation is aborting because it cannot acquire locks, code: 2001 . sessionActorId=[16:841:2681]. isRollback=0 2025-03-31T16:01:05.316849Z node 16 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=16&id=NDA4YTVjZmMtMzM2MzlmYWEtOGIzODc1ZDAtN2UzNDZkZTI=, ActorId: [16:841:2681], ActorState: ExecuteState, TraceId: 01jqpeyqewfgjetjpfknjfe75d, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [16:900:2681] from: [16:899:2681] 2025-03-31T16:01:05.317149Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 278003712, Sender [16:899:2681], Recipient [16:689:2579]: NKikimrDataEvents.TEvWrite TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 } Op: Rollback } 2025-03-31T16:01:05.317234Z node 16 :TX_DATASHARD TRACE: Handle TTxWrite: at tablet# 72075186224037888 2025-03-31T16:01:05.317482Z node 16 :KQP_EXECUTER ERROR: ActorId: [16:900:2681] TxId: 281474976715663. Ctx: { TraceId: 01jqpeyqewfgjetjpfknjfe75d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=NDA4YTVjZmMtMzM2MzlmYWEtOGIzODc1ZDAtN2UzNDZkZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/table`., code: 2001 subissue: {
: Error: Operation is aborting because it cannot acquire locks, code: 2001 } } 2025-03-31T16:01:05.319476Z node 16 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=16&id=NDA4YTVjZmMtMzM2MzlmYWEtOGIzODc1ZDAtN2UzNDZkZTI=, ActorId: [16:841:2681], ActorState: ExecuteState, TraceId: 01jqpeyqewfgjetjpfknjfe75d, Create QueryResponse for error on request, msg: 2025-03-31T16:01:05.321170Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [16:689:2579], Recipient [16:689:2579]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-03-31T16:01:05.321231Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-03-31T16:01:05.321320Z node 16 :TX_DATASHARD TRACE: TTxWrite:: execute at tablet# 72075186224037888 2025-03-31T16:01:05.321526Z node 16 :TX_DATASHARD TRACE: Parsing write transaction for 0 at 72075186224037888, record: TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 } Op: Rollback } 2025-03-31T16:01:05.321682Z node 16 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2025-03-31T16:01:05.321817Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit CheckWrite 2025-03-31T16:01:05.321873Z node 16 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-03-31T16:01:05.321921Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit CheckWrite 2025-03-31T16:01:05.321967Z node 16 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-31T16:01:05.322001Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-31T16:01:05.322046Z node 16 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v400/0 IncompleteEdge# v{min} UnprotectedReadEdge# v400/18446744073709551615 ImmediateWriteEdge# v401/0 ImmediateWriteEdgeReplied# v401/0 2025-03-31T16:01:05.322110Z node 16 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037888 2025-03-31T16:01:05.322150Z node 16 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-03-31T16:01:05.322181Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-31T16:01:05.322208Z node 16 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit ExecuteWrite 2025-03-31T16:01:05.322237Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit ExecuteWrite 2025-03-31T16:01:05.322271Z node 16 :TX_DATASHARD DEBUG: Executing write operation for [0:6] at 72075186224037888 2025-03-31T16:01:05.322410Z node 16 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 2025-03-31T16:01:05.322500Z node 16 :TX_DATASHARD DEBUG: Skip empty write operation for [0:6] at 72075186224037888 2025-03-31T16:01:05.322643Z node 16 :TX_DATASHARD TRACE: add locks to result: 0 2025-03-31T16:01:05.322763Z node 16 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:01:05.322811Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteWrite 2025-03-31T16:01:05.322892Z node 16 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit FinishProposeWrite 2025-03-31T16:01:05.322962Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit FinishProposeWrite 2025-03-31T16:01:05.323003Z node 16 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is DelayComplete 2025-03-31T16:01:05.323033Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit FinishProposeWrite 2025-03-31T16:01:05.323065Z node 16 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2025-03-31T16:01:05.323122Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2025-03-31T16:01:05.323176Z node 16 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-03-31T16:01:05.323205Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2025-03-31T16:01:05.323253Z node 16 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037888 has finished 2025-03-31T16:01:05.323334Z node 16 :TX_DATASHARD TRACE: TTxWrite complete: at tablet# 72075186224037888 2025-03-31T16:01:05.323383Z node 16 :TX_DATASHARD TRACE: Complete execution for [0:6] at 72075186224037888 on unit FinishProposeWrite 2025-03-31T16:01:05.323424Z node 16 :TX_DATASHARD TRACE: Propose transaction complete txid 6 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2025-03-31T16:01:05.323514Z node 16 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:01:05.323807Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [16:61:2108], Recipient [16:689:2579]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 16 Status: STATUS_NOT_FOUND 2025-03-31T16:01:05.329753Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [16:913:2731], Recipient [16:689:2579]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:01:05.329900Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:01:05.330012Z node 16 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [16:912:2730], serverId# [16:913:2731], sessionId# [0:0:0] 2025-03-31T16:01:05.330294Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269553224, Sender [16:593:2518], Recipient [16:689:2579]: NKikimr::TEvDataShard::TEvGetOpenTxs >> TSchemeShardColumnTableTTL::AlterColumnTable_Negative |71.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |71.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |71.3%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/test-results/unittest/{meta.json ... results_accumulator.log} |71.3%| [TA] {RESULT} $(B)/ydb/core/base/ut_board_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiersAlterTable |71.3%| [LD] {RESULT} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |71.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSuccess |71.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::MoveTable ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestPrioritizeLocalDatacenter [GOOD] Test command err: 2025-03-31T16:01:00.424661Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004299233406173:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:00.425463Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00161c/r3tmp/tmpDYLaGr/pdisk_1.dat 2025-03-31T16:01:01.273596Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:01:01.273677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:01:01.279650Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:01.282153Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10596, node 1 2025-03-31T16:01:01.571783Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/00161c/r3tmp/yandexrqRF5e.tmp 2025-03-31T16:01:01.571823Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/00161c/r3tmp/yandexrqRF5e.tmp 2025-03-31T16:01:01.575622Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/00161c/r3tmp/yandexrqRF5e.tmp 2025-03-31T16:01:01.575809Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10503 PQClient connected to localhost:10596 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:01:02.428636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:02.465463Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:01:02.497847Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:04.890980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004316413276087:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:04.891187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:04.894698Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004316413276100:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:04.899763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-03-31T16:01:04.918834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004316413276102:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-31T16:01:05.243367Z node 1 :TX_PROXY ERROR: Actor# [1:7488004320708243463:2394] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:01:05.319306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:01:05.427958Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004299233406173:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:05.428017Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:01:05.429623Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488004320708243471:2345], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:01:05.435594Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzUyMmQ4M2MtZTJlNTU2YmQtOGJlNTAyZjgtMmYyNWVmMDM=, ActorId: [1:7488004316413276085:2333], ActorState: ExecuteState, TraceId: 01jqpeyqak2379k1mrvh9dy0gf, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:01:05.437941Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:01:05.494790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:01:05.592004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-31T16:01:05.834669Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jqpeyr4j2c1qkcnj4k9ca7we, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjBmYmIxN2ItODZlMjViMDctOTcyNjVjYjMtN2UwM2UxNTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |71.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |71.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |71.3%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator >> TSchemeShardTTLTests::AlterTableShouldSucceedOnAsyncIndexedTable >> KqpJoinOrder::TPCDS92+ColumnStore [GOOD] >> TSchemeShardTTLTests::ShouldCheckQuotas >> KqpIndexes::PrefixedVectorIndexOrderByCosineDistanceNullableLevel2 [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnUnspecifiedTTL >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-false >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 [GOOD] >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiersAlterTable [GOOD] >> TPersQueueTest::DirectReadStop [GOOD] >> TPersQueueTest::DirectReadCleanCache >> TSchemeShardColumnTableTTL::CreateColumnTable >> SystemView::Nodes [GOOD] >> SystemView::PartitionStatsLocksFields >> TPQCDTest::TestUnavailableWithoutNetClassifier [GOOD] |71.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiersAlterTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:01:09.060301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:01:09.060417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:09.060458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:01:09.060496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:01:09.060568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:01:09.060621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:01:09.060681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:09.060820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:01:09.061212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:01:09.160409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:01:09.160487Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:09.167373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:01:09.167491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:01:09.167660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:01:09.174031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:01:09.174302Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:01:09.174980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:09.175229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:09.178431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:09.183040Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:09.183161Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:09.183357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:01:09.183413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:09.183478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:09.183680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:01:09.191330Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:01:09.358037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:09.358325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:09.358652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:01:09.358935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:01:09.358996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:09.364733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:09.364939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:01:09.365207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:09.365271Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:01:09.365310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:01:09.365363Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:01:09.367911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:09.367981Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:01:09.368021Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:01:09.370264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:09.370316Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:09.370363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:09.370437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:01:09.374524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:01:09.377010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:01:09.377294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:01:09.378468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:09.378655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:09.378709Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:09.379025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:01:09.379105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:09.379293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:09.379379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:01:09.382081Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:09.382169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:09.382401Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:09.382472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:01:09.382746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:09.382796Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:01:09.382920Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:09.382961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:09.383008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:09.383048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:09.383111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:01:09.383163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:09.383207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:01:09.383240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:01:09.383352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:01:09.383405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:01:09.383457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:01:09.385761Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:09.385900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:09.385947Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... type: 269090816 2025-03-31T16:01:09.714272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-03-31T16:01:09.714907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:09.715042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:09.715128Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2025-03-31T16:01:09.715395Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-03-31T16:01:09.715542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:01:09.719421Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:09.719477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:01:09.719726Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:09.719800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-03-31T16:01:09.720091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:01:09.720140Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-03-31T16:01:09.720753Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:01:09.720846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:01:09.720880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:01:09.720919Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-03-31T16:01:09.720954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T16:01:09.721052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-03-31T16:01:09.724106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:01:09.756058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1070 } } 2025-03-31T16:01:09.756111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-03-31T16:01:09.756266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1070 } } 2025-03-31T16:01:09.756382Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1070 } } FAKE_COORDINATOR: Erasing txId 102 2025-03-31T16:01:09.757225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-31T16:01:09.757274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-03-31T16:01:09.757408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-31T16:01:09.757468Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-31T16:01:09.757542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-31T16:01:09.757622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:09.757665Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:01:09.757694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-31T16:01:09.757731Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-03-31T16:01:09.771794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:01:09.772113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:01:09.772262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:01:09.772306Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-31T16:01:09.772419Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:01:09.772454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:01:09.772504Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:01:09.772542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:01:09.772581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-31T16:01:09.772657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:333:2312] message: TxId: 102 2025-03-31T16:01:09.772702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:01:09.772738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-31T16:01:09.772782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-31T16:01:09.772923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:01:09.779586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:01:09.779659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:393:2365] TestWaitNotification: OK eventTxId 102 2025-03-31T16:01:09.780214Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:01:09.780481Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 282us result status StatusSuccess 2025-03-31T16:01:09.780932Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |71.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed [GOOD] >> TSchemeShardTTLTests::AlterTableShouldSuccess [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnUnspecifiedTTL [GOOD] >> SystemView::QueryStatsFields [GOOD] >> SystemView::PartitionStatsTtlFields >> TSchemeShardTTLTests::AlterTableShouldSucceedOnAsyncIndexedTable [GOOD] >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiers ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:01:08.746704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:01:08.746825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:08.746871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:01:08.746915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:01:08.746966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:01:08.747021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:01:08.747119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:08.747273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:01:08.747729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:01:08.890226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:01:08.890303Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:08.898295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:01:08.898419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:01:08.898594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:01:08.909890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:01:08.910204Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:01:08.911194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:08.911435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:08.913908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:08.915648Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:08.915725Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:08.915923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:01:08.915976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:08.916031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:08.916211Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:01:08.925161Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:01:09.139574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:09.139830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:09.140102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:01:09.140381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:01:09.140446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:09.143663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:09.143838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:01:09.144071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:09.144128Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:01:09.144169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:01:09.144208Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:01:09.146844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:09.146920Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:01:09.146984Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:01:09.149691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:09.149755Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:09.149802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:09.149889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:01:09.161607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:01:09.165915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:01:09.166222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:01:09.167404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:09.167595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:09.167673Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:09.168350Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:01:09.168424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:09.168662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:09.168769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:01:09.176373Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:09.176437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:09.176632Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:09.176698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:01:09.176989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:09.177045Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:01:09.177145Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:09.177186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:09.177230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:09.177269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:09.177311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:01:09.177361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:09.177401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:01:09.177435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:01:09.177534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:01:09.177581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:01:09.177637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:01:09.188378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:09.188582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:09.188641Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... nc, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:384:2356], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:01:10.284563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710760:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710760 msg type: 269090816 2025-03-31T16:01:10.284709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710760, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2025-03-31T16:01:10.285005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2025-03-31T16:01:10.285084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2025-03-31T16:01:10.285152Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 2025-03-31T16:01:10.285312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:10.285419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:10.285477Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2025-03-31T16:01:10.285524Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2025-03-31T16:01:10.287574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-03-31T16:01:10.287630Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2025-03-31T16:01:10.287720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-03-31T16:01:10.287752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-03-31T16:01:10.287818Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-03-31T16:01:10.287866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-03-31T16:01:10.287912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-03-31T16:01:10.287984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:128:2153] message: TxId: 281474976710760 2025-03-31T16:01:10.288029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-03-31T16:01:10.288064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2025-03-31T16:01:10.288093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2025-03-31T16:01:10.288179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 FAKE_COORDINATOR: Erasing txId 281474976710760 2025-03-31T16:01:10.290077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-03-31T16:01:10.290164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2025-03-31T16:01:10.290247Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2025-03-31T16:01:10.290333Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:384:2356], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:01:10.292087Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-03-31T16:01:10.292184Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:384:2356], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:01:10.292273Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-03-31T16:01:10.293948Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-03-31T16:01:10.294041Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:384:2356], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:01:10.294079Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-03-31T16:01:10.294203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:01:10.294266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:475:2436] TestWaitNotification: OK eventTxId 102 2025-03-31T16:01:10.294899Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:01:10.295192Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 304us result status StatusSuccess 2025-03-31T16:01:10.295693Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByValue" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::PrefixedVectorIndexOrderByCosineDistanceNullableLevel2 [GOOD] Test command err: Trying to start YDB, gRPC: 16638, MsgBus: 24253 2025-03-31T15:58:30.804266Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003653781449758:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:30.804775Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000df1/r3tmp/tmpw22r9t/pdisk_1.dat 2025-03-31T15:58:31.756863Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:31.777339Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:31.792118Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:31.829584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:58:31.831358Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 16638, node 1 2025-03-31T15:58:32.190028Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:58:32.190056Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:58:32.190064Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:58:32.190211Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24253 TClient is connected to server localhost:24253 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:58:33.540421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:33.587362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:33.922186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:34.421754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:34.597810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:35.791359Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003653781449758:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:35.791427Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:58:38.312796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003688141189778:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:38.312929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:39.176633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T15:58:39.290074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T15:58:39.395148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:58:39.449940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T15:58:39.506896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T15:58:39.621133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T15:58:39.736398Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003692436157611:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:39.736487Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:39.736852Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003692436157616:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:39.741668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T15:58:39.779993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488003692436157618:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T15:58:39.873152Z node 1 :TX_PROXY ERROR: Actor# [1:7488003692436157687:3467] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:58:41.635884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T15:58:42.244152Z node 1 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: DoExecute TxId: 281474976710673 DatabaseName: "/Root" Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "user" index_columns: "emb" global_vector_kmeans_tree_index { vector_settings { settings { metric: SIMILARITY_COSINE vector_type: VECTOR_TYPE_UINT8 vector_dimension: 2 } clusters: 2 levels: 1 } } } } 2025-03-31T15:58:42.258004Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-03-31T15:58:42.258150Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7488003705321060153:2535], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T15:58:42.258382Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 281474976710673, txId# 281474976715757 2025-03-31T15:58:42.258451Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7488003705321060153:2535], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T15:58:42.258848Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-03-31T15:58:42.258898Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7488003705321060153:2535], AlterMainTableTxId: 0, ... e: id# 281474976715673 2025-03-31T16:00:08.742843Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7488004070847134047:2534], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 60, upload bytes: 1500, read rows: 69, read bytes: 2322 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:00:08.742945Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 281474976715673, txId# 281474976710766 2025-03-31T16:00:08.742997Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976715673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7488004070847134047:2534], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 60, upload bytes: 1500, read rows: 69, read bytes: 2322 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:00:08.755949Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976715673 2025-03-31T16:00:08.756047Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7488004070847134047:2534], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710766, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 60, upload bytes: 1500, read rows: 69, read bytes: 2322 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:00:08.757580Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, BuildIndexId: 281474976715673, cookie: 281474976715673, txId: 281474976710766, status: StatusAccepted 2025-03-31T16:00:08.757713Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976715673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7488004070847134047:2534], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710766, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 60, upload bytes: 1500, read rows: 69, read bytes: 2322 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }}, record: Status: StatusAccepted TxId: 281474976710766 SchemeshardId: 72057594046644480 PathId: 16 2025-03-31T16:00:08.758440Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976715673 2025-03-31T16:00:08.758503Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7488004070847134047:2534], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 60, upload bytes: 1500, read rows: 69, read bytes: 2322 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:00:08.761616Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710766, buildInfoId: 281474976715673 2025-03-31T16:00:08.761710Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710766, buildInfo: TBuildInfo{ IndexBuildId: 281474976715673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7488004070847134047:2534], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 60, upload bytes: 1500, read rows: 69, read bytes: 2322 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:00:08.772571Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037926 not found 2025-03-31T16:00:08.772629Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037927 not found 2025-03-31T16:00:08.772661Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037928 not found 2025-03-31T16:00:08.772936Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976715673 2025-03-31T16:00:08.773030Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7488004070847134047:2534], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 60, upload bytes: 1500, read rows: 69, read bytes: 2322 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:00:08.773076Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-03-31T16:00:08.773987Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976715673 2025-03-31T16:00:08.774042Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7488004070847134047:2534], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 60, upload bytes: 1500, read rows: 69, read bytes: 2322 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:00:08.779629Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/Root" IndexBuildId: 281474976715673 2025-03-31T16:00:08.779904Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 281474976715673 State: STATE_DONE Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "user" index_columns: "emb" global_vector_kmeans_tree_index { } } max_batch_rows: 50000 max_batch_bytes: 8388608 max_shards_in_flight: 32 max_retries_upload_batch: 50 } Progress: 100 } 2025-03-31T16:00:08.786098Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037929 not found 2025-03-31T16:00:12.428430Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:00:12.428459Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:18.095416Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TTxBilling, id# 281474976715673 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSuccess [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:01:09.722808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:01:09.722906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:09.722944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:01:09.722977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:01:09.723019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:01:09.723064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:01:09.723903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:09.724074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:01:09.724532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:01:09.816486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:01:09.816551Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:09.821308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:01:09.821399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:01:09.821544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:01:09.826652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:01:09.826870Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:01:09.827546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:09.827757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:09.829746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:09.830872Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:09.830925Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:09.831074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:01:09.831140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:09.831180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:09.831340Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:01:09.838074Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:01:10.080401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:10.080636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:10.080900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:01:10.081219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:01:10.081275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:10.084546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:10.084683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:01:10.084901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:10.084956Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:01:10.084989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:01:10.085022Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:01:10.087413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:10.087503Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:01:10.087547Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:01:10.089433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:10.089482Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:10.089528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:10.089599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:01:10.094002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:01:10.096093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:01:10.097536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:01:10.098581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:10.098749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:10.098802Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:10.099143Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:01:10.099206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:10.099414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:10.099512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:01:10.102089Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:10.102141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:10.102309Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:10.102373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:01:10.102589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:10.102636Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:01:10.102732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:10.102764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:10.102801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:10.102834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:10.102875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:01:10.102915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:10.102950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:01:10.102980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:01:10.103058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:01:10.103141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:01:10.103185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:01:10.105142Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:10.105253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:10.105294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2025-03-31T16:01:10.539273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 104 at step: 5000004 2025-03-31T16:01:10.539930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:10.540059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:10.540123Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 104:0 HandleReply TEvOperationPlan, operationId: 104:0, stepId: 5000004, at schemeshard: 72057594046678944 2025-03-31T16:01:10.540439Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 129 2025-03-31T16:01:10.540566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2025-03-31T16:01:10.569311Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:10.569372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:01:10.569697Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:10.569760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 104, path id: 2 2025-03-31T16:01:10.570049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-31T16:01:10.570112Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72057594046678944 2025-03-31T16:01:10.570795Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T16:01:10.570918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T16:01:10.570976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-03-31T16:01:10.571021Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-03-31T16:01:10.571070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T16:01:10.571188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-03-31T16:01:10.578654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-31T16:01:10.593323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000004 OrderId: 104 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1080 } } 2025-03-31T16:01:10.593397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2025-03-31T16:01:10.593671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000004 OrderId: 104 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1080 } } 2025-03-31T16:01:10.593781Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000004 OrderId: 104 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1080 } } FAKE_COORDINATOR: Erasing txId 104 2025-03-31T16:01:10.594803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-03-31T16:01:10.594855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2025-03-31T16:01:10.594991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-03-31T16:01:10.595056Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-31T16:01:10.595171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-03-31T16:01:10.595242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 104:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:10.595288Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-31T16:01:10.595321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-31T16:01:10.595357Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 240 2025-03-31T16:01:10.607740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-31T16:01:10.608132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-31T16:01:10.608307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-31T16:01:10.608356Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-03-31T16:01:10.608464Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-31T16:01:10.608509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T16:01:10.608550Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-31T16:01:10.608589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T16:01:10.608630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-03-31T16:01:10.608715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:333:2312] message: TxId: 104 2025-03-31T16:01:10.608780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T16:01:10.608825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-03-31T16:01:10.608873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-03-31T16:01:10.609000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:01:10.620095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-03-31T16:01:10.620174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:441:2413] TestWaitNotification: OK eventTxId 104 2025-03-31T16:01:10.620888Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:01:10.621115Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 246us result status StatusSuccess 2025-03-31T16:01:10.621534Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 3 TTLSettings { Disabled { } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutNetClassifier [GOOD] Test command err: 2025-03-31T16:01:00.842291Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004300635378794:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:00.842322Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0015f8/r3tmp/tmpyC5hus/pdisk_1.dat 2025-03-31T16:01:01.399700Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:01.447776Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:01:01.447906Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:01:01.454650Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28887, node 1 2025-03-31T16:01:01.639617Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:01:01.639636Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:01:01.639645Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:01:01.639748Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29869 PQClient connected to localhost:28887 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:01:02.102625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-03-31T16:01:02.171275Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-03-31T16:01:05.096066Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004322110216020:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:05.096225Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004322110216008:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:05.096638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:05.108108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-03-31T16:01:05.123457Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004322110216030:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-31T16:01:05.218870Z node 1 :TX_PROXY ERROR: Actor# [1:7488004322110216095:2395] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:01:05.734224Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488004322110216103:2345], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:01:05.734444Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTBkNGMwMzItNjdiZjZiNDgtZTdlZGMwNGEtMmMxNWVkMA==, ActorId: [1:7488004322110216004:2332], ActorState: ExecuteState, TraceId: 01jqpeyqha0656dh80q3bhrhxz, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:01:05.738297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:01:05.744870Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:01:05.844438Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004300635378794:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:05.895048Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:01:05.952729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:01:06.056231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-31T16:01:06.409546Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jqpeyrmmatt02tt1mksgeqdv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjFhNjI2YzMtNmNlNWVhNTgtOWFmZDVkY2QtMTQwYmJhOTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 [GOOD] Test command err: 2025-03-31T16:00:25.775737Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004148143422868:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:25.775783Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:00:25.888587Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488004149625471149:2094];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:25.895718Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:00:26.311720Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:00:26.291926Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a63/r3tmp/tmpkw9soX/pdisk_1.dat 2025-03-31T16:00:26.891283Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:00:26.901372Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:00:27.135570Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:27.135662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:27.136968Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:27.137014Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:27.143818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:00:27.146213Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:00:27.149091Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:00:27.207327Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31798, node 1 2025-03-31T16:00:27.347408Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:00:27.375352Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:00:27.634023Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/000a63/r3tmp/yandexL4Pnwm.tmp 2025-03-31T16:00:27.634046Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/000a63/r3tmp/yandexL4Pnwm.tmp 2025-03-31T16:00:27.634205Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/000a63/r3tmp/yandexL4Pnwm.tmp 2025-03-31T16:00:27.634480Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:00:27.771549Z INFO: TTestServer started on Port 4841 GrpcPort 31798 TClient is connected to server localhost:4841 PQClient connected to localhost:31798 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:00:28.744533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:00:28.877097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:00:29.308877Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:30.779387Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004148143422868:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:30.779458Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:00:30.891207Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488004149625471149:2094];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:30.891309Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:00:33.012196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004178208195067:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:33.012335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:33.028736Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004182503162383:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:33.062945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-03-31T16:00:33.075201Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004182503162417:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:33.075533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:33.130437Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004182503162385:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-03-31T16:00:33.200778Z node 1 :TX_PROXY ERROR: Actor# [1:7488004182503162459:2766] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:00:33.639874Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488004182503162478:2355], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:00:33.641662Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTQ3NzBhNTYtNWZlOTZhM2YtZmJlYjNjN2EtZGYwOWEzNmE=, ActorId: [1:7488004178208195048:2342], ActorState: ExecuteState, TraceId: 01jqpexr8363w5ymyp7sh7t9jk, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:00:33.644013Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:00:33.647560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:00:33.737744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:00:33.924012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-31T16:00:34.292939Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jqpexs9k2fwvv129ctby5w1a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTg2YjRlNmYtMTBhMjY0YzYtMzMwOGJiZTEtNjBhODFlMDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7488004186798130232:3105] === CheckClustersList. Ok >>>>> Prepare scheme WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-03-31T16:00:40.879981Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488004152438390442:2142], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:00:40.880249Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7488004152438390442:2142], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 } 2025-03-31T16:00:40.880431Z node 1 :TX_PROXY_SCHEME_CACH ... n DomainInfo }] } 2025-03-31T16:01:08.431450Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7488004239267279515:2105], cacheItem# { Subscriber: { Subscriber: [4:7488004260742116046:2117] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:01:08.431588Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7488004333756561848:3424], recipient# [4:7488004333756561847:2456], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:01:08.504076Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvResolveKeySet: self# [3:7488004238623500761:2127], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 12] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 },{ TableId: [OwnerId: 72057594046644480, LocalPathId: 10] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-03-31T16:01:08.504185Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TResolve: self# [3:7488004238623500761:2127], cacheItem# { Subscriber: { Subscriber: [3:7488004260098338354:2852] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 26 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1743436852367 PathId: [OwnerId: 72057594046644480, LocalPathId: 12] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 12] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:01:08.504269Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TResolve: self# [3:7488004238623500761:2127], cacheItem# { Subscriber: { Subscriber: [3:7488004260098338245:2780] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 26 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1743436852059 PathId: [OwnerId: 72057594046644480, LocalPathId: 10] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 10] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:01:08.504510Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488004333112785773:5066], recipient# [3:7488004333112785772:2819], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 12] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 },{ TableId: [OwnerId: 72057594046644480, LocalPathId: 10] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-03-31T16:01:08.505607Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7488004238623500761:2127], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:01:08.505716Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488004238623500761:2127], cacheItem# { Subscriber: { Subscriber: [3:7488004238623500818:2143] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 28 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743436847621 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:01:08.505870Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488004333112785776:5067], recipient# [3:7488004333112785775:2880], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T16:01:08.668414Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7488004239267279515:2105], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:01:08.668553Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7488004239267279515:2105], cacheItem# { Subscriber: { Subscriber: [4:7488004243562246833:2110] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:01:08.668636Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7488004333756561860:3429], recipient# [4:7488004333756561859:2459], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:01:08.831595Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7488004238623500761:2127], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:01:08.831733Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488004238623500761:2127], cacheItem# { Subscriber: { Subscriber: [3:7488004242918468689:2555] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:01:08.831834Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488004333112785793:5076], recipient# [3:7488004333112785792:2883], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:01:08.940360Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7488004238623500761:2127], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:01:08.940508Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488004238623500761:2127], cacheItem# { Subscriber: { Subscriber: [3:7488004260098338153:2744] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:01:08.940604Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488004333112785798:5079], recipient# [3:7488004333112785797:2884], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |71.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnUnspecifiedTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:01:10.580326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:01:10.580437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:10.580474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:01:10.580510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:01:10.580555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:01:10.580595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:01:10.580659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:10.580796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:01:10.581204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:01:10.695333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:01:10.695415Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:10.716349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:01:10.716447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:01:10.716598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:01:10.721797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:01:10.722043Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:01:10.722628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:10.722828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:10.739870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:10.741223Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:10.741286Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:10.741439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:01:10.741487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:10.741521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:10.741682Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:01:10.760064Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:01:10.915988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:10.916251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:10.916586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:01:10.916830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:01:10.916882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:10.919275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:10.919422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:01:10.919644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:10.919697Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:01:10.919728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:01:10.919760Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:01:10.928061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:10.928149Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:01:10.928192Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:01:10.930294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:10.930347Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:10.930404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:10.930473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:01:10.939988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:01:10.942070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:01:10.942264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:01:10.943265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:10.943432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:10.943490Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:10.943766Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:01:10.943822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:10.943994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:10.944098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:01:10.946286Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:10.946334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:10.946530Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:10.946588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:01:10.946792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:10.946831Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:01:10.946925Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:10.946960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:10.946995Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:10.947025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:10.947064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:01:10.947131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:10.947163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:01:10.947189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:01:10.947262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:01:10.947322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:01:10.947371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:01:10.949239Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:10.949354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:10.949397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-31T16:01:10.949437Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-31T16:01:10.949478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:10.949595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-31T16:01:10.954883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-31T16:01:10.955492Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-03-31T16:01:10.956861Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Bootstrap 2025-03-31T16:01:10.974527Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Become StateWork (SchemeCache [1:272:2263]) 2025-03-31T16:01:10.977326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:10.977680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:10.977790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { }, at schemeshard: 72057594046678944 2025-03-31T16:01:10.978185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: TTL status must be specified, at schemeshard: 72057594046678944 2025-03-31T16:01:10.979686Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T16:01:10.982564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "TTL status must be specified" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:10.982740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: TTL status must be specified, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2025-03-31T16:01:10.983403Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSucceedOnAsyncIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-31T16:01:10.334132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:01:10.334249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:10.334294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:01:10.334333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:01:10.334384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:01:10.334417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:01:10.334504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:10.334593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:01:10.335030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:01:10.438901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:01:10.438980Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:10.449191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:01:10.449664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:01:10.449866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:01:10.459040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:01:10.459398Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:01:10.460197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:10.460682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:10.463155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:10.464723Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:10.464799Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:10.464927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:01:10.464998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:10.465047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:10.465328Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:01:10.473783Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-31T16:01:10.645952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:10.646325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:10.646601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:01:10.646913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:01:10.646996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:10.660182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:10.660403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:01:10.660681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:10.660757Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:01:10.660800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:01:10.660839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:01:10.672478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:10.672565Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:01:10.672615Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:01:10.676467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:10.676550Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:10.676637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:10.676718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:01:10.687796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:01:10.699329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:01:10.699595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:01:10.700918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:10.701131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:10.701190Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:10.701546Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:01:10.701626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:10.701824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:10.701909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:10.712395Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:10.712479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:10.712688Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:10.712750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:01:10.713140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:10.713189Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:01:10.713316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:10.713354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:10.713388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:10.713414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:10.713455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:01:10.713488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:10.713515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:01:10.713543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:01:10.713612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:01:10.713644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:01:10.713719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:01:10.715370Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:10.715499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:10.715539Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ntStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 102 at step: 5000003 2025-03-31T16:01:11.220770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:11.220908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:11.220997Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2025-03-31T16:01:11.221466Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-03-31T16:01:11.221625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-03-31T16:01:11.228728Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:11.228792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:01:11.229156Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:11.229203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-31T16:01:11.230013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:01:11.230082Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-03-31T16:01:11.230686Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:01:11.230813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:01:11.230860Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:01:11.230903Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-03-31T16:01:11.230948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-31T16:01:11.231046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-03-31T16:01:11.239935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:01:11.266560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 2420 } } 2025-03-31T16:01:11.266651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-03-31T16:01:11.266876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 2420 } } 2025-03-31T16:01:11.267019Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 2420 } } FAKE_COORDINATOR: Erasing txId 102 2025-03-31T16:01:11.268240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 325 RawX2: 4294969604 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-31T16:01:11.268307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-03-31T16:01:11.268483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 325 RawX2: 4294969604 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-31T16:01:11.268545Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-31T16:01:11.268635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 325 RawX2: 4294969604 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-31T16:01:11.268709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:11.268751Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:01:11.268814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-03-31T16:01:11.268864Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-03-31T16:01:11.278769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:01:11.279258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:01:11.279420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:01:11.279467Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-31T16:01:11.279611Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:01:11.279662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:01:11.279710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:01:11.279751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:01:11.279792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-31T16:01:11.279884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:377:2345] message: TxId: 102 2025-03-31T16:01:11.279960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:01:11.280018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-31T16:01:11.280058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-31T16:01:11.280263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T16:01:11.283114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:01:11.283178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:507:2432] TestWaitNotification: OK eventTxId 102 2025-03-31T16:01:11.283787Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:01:11.284131Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 310us result status StatusSuccess 2025-03-31T16:01:11.284733Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTTLTests::ShouldCheckQuotas [GOOD] >> TPQCDTest::TestDiscoverClusters [GOOD] >> TSchemeShardTTLTests::CheckCounters >> SystemView::ShowCreateTableKeyBloomFilter [GOOD] >> SystemView::ShowCreateTableTtlSettings |71.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |71.4%| [LD] {RESULT} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |71.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ShouldCheckQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:01:10.699534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:01:10.699638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:10.699699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:01:10.699735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:01:10.699776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:01:10.699820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:01:10.699880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:10.700065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:01:10.700433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:01:10.832362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:01:10.832433Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:10.848598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:01:10.848701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:01:10.848843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:01:10.863981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:01:10.864217Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:01:10.864913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:10.865103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:10.871929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:10.873451Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:10.873521Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:10.873686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:01:10.873729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:10.873786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:10.873949Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:01:10.892418Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:01:11.093194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:11.093412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:11.093683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:01:11.093972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:01:11.094037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:11.100175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:11.100346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:01:11.100575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:11.100628Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:01:11.100660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:01:11.100692Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:01:11.112004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:11.112079Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:01:11.112124Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:01:11.114185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:11.114240Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:11.114283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:11.114351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:01:11.118417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:01:11.120420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:01:11.120621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:01:11.121551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:11.121710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:11.121773Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:11.122052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:01:11.122102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:11.122262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:11.122339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:01:11.124835Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:11.124886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:11.125051Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:11.125128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:01:11.125324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:11.125365Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:01:11.125457Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:11.125490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:11.125526Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:11.125555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:11.125594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:01:11.125633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:11.125676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:01:11.125711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:01:11.125787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:01:11.125833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:01:11.125880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:01:11.140679Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:11.140837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:11.140879Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 3:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 200 2025-03-31T16:01:11.885851Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 129 2025-03-31T16:01:11.885996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-31T16:01:11.886064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-31T16:01:11.916695Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:11.916764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:01:11.916988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-03-31T16:01:11.917207Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:11.917249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-03-31T16:01:11.917296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 4 2025-03-31T16:01:11.917964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:01:11.918043Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2025-03-31T16:01:11.935922Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:01:11.936055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:01:11.936101Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-03-31T16:01:11.936142Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 7 2025-03-31T16:01:11.936186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-03-31T16:01:11.937088Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:01:11.937178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:01:11.937203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-03-31T16:01:11.937231Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-03-31T16:01:11.937261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-03-31T16:01:11.937324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-03-31T16:01:11.938924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 103 Step: 200 OrderId: 103 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1221 } } 2025-03-31T16:01:11.938972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409549, partId: 0 2025-03-31T16:01:11.939162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 103 Step: 200 OrderId: 103 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1221 } } 2025-03-31T16:01:11.939265Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 103 Step: 200 OrderId: 103 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1221 } } 2025-03-31T16:01:11.939875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 543 RawX2: 4294969784 } Origin: 72075186233409549 State: 2 TxId: 103 Step: 0 Generation: 2 2025-03-31T16:01:11.939920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409549, partId: 0 2025-03-31T16:01:11.940044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 543 RawX2: 4294969784 } Origin: 72075186233409549 State: 2 TxId: 103 Step: 0 Generation: 2 2025-03-31T16:01:11.940105Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-31T16:01:11.940222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 543 RawX2: 4294969784 } Origin: 72075186233409549 State: 2 TxId: 103 Step: 0 Generation: 2 2025-03-31T16:01:11.940298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:11.940338Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:01:11.940374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-03-31T16:01:11.940416Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2025-03-31T16:01:11.948675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-31T16:01:11.951133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-31T16:01:11.951283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:01:11.951398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:01:11.951696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:01:11.951740Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-03-31T16:01:11.951838Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-31T16:01:11.951875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T16:01:11.951921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-31T16:01:11.951955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T16:01:11.951990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-03-31T16:01:11.952059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:404:2371] message: TxId: 103 2025-03-31T16:01:11.952121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T16:01:11.952156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-03-31T16:01:11.952184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-03-31T16:01:11.952300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-31T16:01:11.965213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-31T16:01:11.965275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:571:2508] TestWaitNotification: OK eventTxId 103 W0000 00:00:1743436871.966001 93541 text_format.cc:398] Warning parsing text-format NKikimrSchemeOp.TTableDescription: 9:35: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 104 2025-03-31T16:01:11.969037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/SubDomain" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table4" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 SysSettings { RunInterval: 1799999999 } Tiers { ApplyAfterSeconds: 3600 Delete { } } } } } } TxId: 104 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:11.969530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/SubDomain/Table4, opId: 104:0, at schemeshard: 72057594046678944 2025-03-31T16:01:11.969721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/SubDomain/Table4, opId: 104:0, schema: Name: "Table4" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 SysSettings { RunInterval: 1799999999 } Tiers { ApplyAfterSeconds: 3600 Delete { } } } }, at schemeshard: 72057594046678944 2025-03-31T16:01:11.970172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 104:1, propose status:StatusSchemeError, reason: TTL run interval cannot be less than limit: 1800, at schemeshard: 72057594046678944 2025-03-31T16:01:11.976967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 104, response: Status: StatusSchemeError Reason: "TTL run interval cannot be less than limit: 1800" TxId: 104 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:11.977138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 104, database: /MyRoot/SubDomain, subject: , status: StatusSchemeError, reason: TTL run interval cannot be less than limit: 1800, operation: CREATE TABLE, path: /MyRoot/SubDomain/Table4 TestModificationResult got TxId: 104, wait until txId: 104 >> TSchemeShardTTLTests::TtlTiersValidation >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiers [GOOD] >> TSchemeShardTTLTests::AlterTableShouldFailOnSimultaneousDropColumnAndEnableTTL >> TBlobStorageProxyTest::TestGetAndRangeGetManyBlobs [GOOD] >> TBlobStorageProxyTest::TestEmptyRange |71.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS92+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 25280, MsgBus: 3790 2025-03-31T15:58:52.058198Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003750592607103:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:52.058243Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019ca/r3tmp/tmpIPOHty/pdisk_1.dat 2025-03-31T15:58:53.063060Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:53.075469Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:53.075570Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:53.078431Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:58:53.093260Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25280, node 1 2025-03-31T15:58:53.387032Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:58:53.387061Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:58:53.387073Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:58:53.387178Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3790 TClient is connected to server localhost:3790 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:58:54.773271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:57.059303Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003750592607103:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:57.059359Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:58:58.351429Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003776362411552:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:58.351502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003776362411564:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:58.351526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:58.356168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T15:58:58.391318Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488003776362411566:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T15:58:58.499479Z node 1 :TX_PROXY ERROR: Actor# [1:7488003776362411617:2348] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:58:58.942443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T15:58:59.454192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003780657379144:2362];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T15:58:59.454503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003780657379144:2362];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T15:58:59.459339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003780657379144:2362];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T15:58:59.459690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003780657379144:2362];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T15:58:59.459829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003780657379144:2362];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T15:58:59.459967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003780657379144:2362];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T15:58:59.460099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003780657379144:2362];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T15:58:59.460232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003780657379144:2362];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T15:58:59.460360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003780657379144:2362];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T15:58:59.460482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003780657379144:2362];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T15:58:59.460607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003780657379144:2362];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T15:58:59.460718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003780657379144:2362];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T15:58:59.470623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488003780657379123:2354];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T15:58:59.471022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488003780657379123:2354];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T15:58:59.471333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488003780657379123:2354];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T15:58:59.471500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488003780657379123:2354];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T15:58:59.471613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488003780657379123:2354];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T15:58:59.471716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488003780657379123:2354];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T15:58:59.471822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488003780657379123:2354];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T15:58:59.471934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488003780657379123:2354];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T15:58:59.472050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488003780657379123:2354];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T15:58:59.472156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488003780657379123:2354];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T15:58:59.472267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488003780657379123:2354];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T15:58:59.472378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488003780657379123:2354];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS ... :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.709052Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.716847Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.724012Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.724250Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.730790Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.751988Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039253;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.753448Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.776843Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.781690Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.789434Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.800624Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.809806Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.824908Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.827329Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.837832Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.838475Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.847052Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.854435Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039302;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.863358Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.865842Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.869098Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.877054Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.884069Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.888263Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.894559Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.898059Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.909411Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.912170Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.921774Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.924976Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.932405Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.933618Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.938587Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.944500Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.947757Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:29.959037Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:30.177347Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:30.224812Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:30.403408Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpew5fv2q53p2dd7v8tnkjd", SessionId: ydb://session/3?node_id=1&id=MjdmZjBiMGItN2MzNGJiYzMtYmJkMjlhZjgtNzQxMDNjNTg=, Slow query, duration: 49.351074s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:00:31.308927Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:00:31.309437Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:00:31.310271Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7488003969635977219:7916];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224039094; 2025-03-31T16:00:31.310619Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:00:59.343040Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpey4s04axqjg51r6y8ezg9", SessionId: ydb://session/3?node_id=1&id=MjdmZjBiMGItN2MzNGJiYzMtYmJkMjlhZjgtNzQxMDNjNTg=, Slow query, duration: 13.461853s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/test/ds/\";\n\n$bla = (\n SELECT\n web_sales.ws_item_sk bla_item_sk,\n avg(ws_ext_discount_amt) bla_ext_discount_amt\n FROM\n web_sales\n cross join date_dim\n WHERE\n cast(d_date as date) between cast('2001-03-12' as date) and\n (cast('2001-03-12' as date) + DateTime::IntervalFromDays(90))\n and d_date_sk = ws_sold_date_sk\n group by web_sales.ws_item_sk\n );\n\n-- start query 1 in stream 0 using template query92.tpl and seed 2031708268\nselect\n sum(ws_ext_discount_amt) as `Excess Discount Amount`\nfrom\n web_sales\n cross join item\n cross join date_dim\n join $bla bla on (item.i_item_sk = bla.bla_item_sk)\nwhere\ni_manufact_id = 356\nand i_item_sk = ws_item_sk\nand cast(d_date as date) between cast('2001-03-12' as date) and\n (cast('2001-03-12' as date) + DateTime::IntervalFromDays(90))\nand d_date_sk = ws_sold_date_sk\nand ws_ext_discount_amt\n > 1.3 * bla.bla_ext_discount_amt\norder by `Excess Discount Amount`\nlimit 100;\n", parameters: 0b >> TSchemeShardTTLUtility::ValidateTiers [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiers [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:01:12.661598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:01:12.661716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:12.661749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:01:12.661777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:01:12.661824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:01:12.661855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:01:12.662096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:12.662208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:01:12.662518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:01:12.760478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:01:12.760549Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:12.766269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:01:12.766388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:01:12.766548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:01:12.772009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:01:12.772315Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:01:12.772969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:12.773216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:12.775480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:12.776753Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:12.776814Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:12.776986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:01:12.777032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:12.777071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:12.777236Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:01:12.785366Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:01:12.935394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:12.935684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:12.935987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:01:12.936254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:01:12.936329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:12.940530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:12.940690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:01:12.940914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:12.940968Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:01:12.941002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:01:12.941035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:01:12.943993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:12.944070Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:01:12.944107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:01:12.948047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:12.948115Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:12.948274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:12.948359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:01:12.952807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:01:12.955917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:01:12.956149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:01:12.957169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:12.957339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:12.957389Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:12.957733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:01:12.957787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:12.957969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:12.958047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:01:12.964986Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:12.965047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:12.965265Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:12.965328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:01:12.965577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:12.965624Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:01:12.965722Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:12.965758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:12.965799Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:12.965835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:12.965887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:01:12.965948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:12.965986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:01:12.966020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:01:12.966117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:01:12.966159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:01:12.966208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:01:12.972834Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:12.973017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:12.973071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... T16:01:13.326641Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:13.326735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:13.326928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:01:13.327205Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:13.327254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-03-31T16:01:13.327300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 FAKE_COORDINATOR: Erasing txId 101 2025-03-31T16:01:13.327597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:13.327651Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 2025-03-31T16:01:13.329038Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:01:13.329168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:01:13.329210Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-03-31T16:01:13.329268Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-31T16:01:13.329320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:01:13.330582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 2311 } } 2025-03-31T16:01:13.330635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-03-31T16:01:13.330778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 2311 } } 2025-03-31T16:01:13.330869Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 2311 } } 2025-03-31T16:01:13.332773Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:01:13.332891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:01:13.332926Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-03-31T16:01:13.332962Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-31T16:01:13.332996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T16:01:13.333084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-03-31T16:01:13.334614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-31T16:01:13.334668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-03-31T16:01:13.334813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-31T16:01:13.334863Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-31T16:01:13.334941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-31T16:01:13.335004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:13.335042Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:13.335075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-31T16:01:13.335139Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-03-31T16:01:13.339174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T16:01:13.339618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:13.339927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T16:01:13.341006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:13.341157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:13.341199Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-31T16:01:13.341302Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T16:01:13.341340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:01:13.341378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T16:01:13.341415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:01:13.341455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-03-31T16:01:13.341557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:333:2312] message: TxId: 101 2025-03-31T16:01:13.341621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:01:13.341662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-31T16:01:13.341697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-31T16:01:13.341853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:01:13.343906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T16:01:13.343953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:334:2313] TestWaitNotification: OK eventTxId 101 2025-03-31T16:01:13.344566Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:01:13.344832Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 255us result status StatusSuccess 2025-03-31T16:01:13.345283Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestDiscoverClusters [GOOD] Test command err: 2025-03-31T16:01:01.250112Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004300912586145:2205];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:01.250596Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001605/r3tmp/tmpWKlwie/pdisk_1.dat 2025-03-31T16:01:01.981787Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:01:01.981880Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:01:01.991554Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:01:02.025320Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20190, node 1 2025-03-31T16:01:02.167830Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/001605/r3tmp/yandex4EBITi.tmp 2025-03-31T16:01:02.167858Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/001605/r3tmp/yandex4EBITi.tmp 2025-03-31T16:01:02.168029Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/001605/r3tmp/yandex4EBITi.tmp 2025-03-31T16:01:02.168137Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5289 PQClient connected to localhost:20190 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:01:02.680497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:02.695249Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:02.711254Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:02.728089Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:02.754096Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-03-31T16:01:04.994853Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004313797488589:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:04.999349Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:05.001530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004313797488626:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:05.006161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-03-31T16:01:05.019792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004318092455924:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-31T16:01:05.070331Z node 1 :TX_PROXY ERROR: Actor# [1:7488004318092455989:2393] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:01:05.588269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:01:05.589754Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488004318092455997:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:01:05.591985Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWU0NTFhMGEtYjgyNzE4MTgtNmM0YWQwM2EtNTdlYjdlMzA=, ActorId: [1:7488004313797488587:2331], ActorState: ExecuteState, TraceId: 01jqpeyqf007d3r9qtm0tn34m0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:01:05.594268Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:01:05.727547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:01:05.867298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-31T16:01:06.152199Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jqpeyrcvdddf7e7s9svz1kwc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGRiMTM3ZWMtYTY4M2Y1NS04Yjc1YTM3Ni03OTc5NmQ4Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:01:06.241506Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004300912586145:2205];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:06.241604Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:01:07.580609Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710668. Ctx: { TraceId: 01jqpeysq07ggexrd15r16y8jr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTIxMzdkMjktYTk1MWJlY2ItYzk0OTg1ZWQtYzc0OWE5Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:01:09.021521Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710670. Ctx: { TraceId: 01jqpeyv4408h9mdcmq8gxqjvf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2ViMWM2NDItYjYyNmRmOTUtZjU0YmQ0ODAtM2U1YTkzYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:01:10.328275Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jqpeywfh6r9r7tsx4q183zsf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjY4MTNlOGEtMjhmZDE1YzUtOTY5Njg4YzktNmRiY2JkZjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:01:11.758595Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710680. Ctx: { TraceId: 01jqpeyxty4fpbbpkzbk7wjxvh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGUyM2EwNWItNmUzMDU5MmEtY2NlNjlhN2YtNDc1YzY0NmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TSchemeShardTTLTestsWithReboots::AlterTable >> ResultFormatter::EmptyResultSet [GOOD] >> ResultFormatter::EmptyList [GOOD] >> ResultFormatter::EmptyTuple [GOOD] >> ResultFormatter::Utf8WithQuotes [GOOD] >> ResultFormatter::VariantStruct [GOOD] >> ResultFormatter::FormatEmptySchema [GOOD] >> ResultFormatter::FormatNonEmptySchema [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongColumnType |71.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLUtility::ValidateTiers [GOOD] >> THeavyPerfTest::TTestLoadEverything [GOOD] |71.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::EmptyTuple [GOOD] |71.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> THiveImplTest::BootQueueSpeed >> TPersqueueControlPlaneTestSuite::SetupReadLockSessionWithDatabase [GOOD] >> TPersqueueControlPlaneTestSuite::SetupWriteLockSessionWithDatabase |71.5%| [TA] $(B)/ydb/services/persqueue_cluster_discovery/ut/test-results/unittest/{meta.json ... results_accumulator.log} |71.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::FormatNonEmptySchema [GOOD] |71.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::VariantStruct [GOOD] >> TSchemeShardTTLTests::AlterTableShouldFailOnSimultaneousDropColumnAndEnableTTL [GOOD] >> TPopulatorTestWithResets::UpdateAck >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-62 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-63 >> SystemView::TopPartitionsByCpuTables [GOOD] >> SystemView::TopPartitionsByCpuRanges >> TSchemeShardTTLTests::TtlTiersValidation [GOOD] >> TPopulatorTestWithResets::UpdateAck [GOOD] |71.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest |71.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongColumnType [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::TtlTiersValidation [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-31T16:01:14.444142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:01:14.444264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:14.444307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:01:14.444339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:01:14.444381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:01:14.444409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:01:14.444495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:14.444570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:01:14.444930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:01:14.559268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:01:14.559318Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:14.571899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:01:14.572220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:01:14.572352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:01:14.588458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:01:14.588789Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:01:14.589400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:14.589735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:14.593650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:14.594850Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:14.594913Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:14.595003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:01:14.595038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:14.595071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:14.595297Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:01:14.606899Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-31T16:01:14.758921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:14.760667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:14.760955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:01:14.761143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:01:14.761201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:14.767362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:14.767552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:01:14.767810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:14.767866Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:01:14.767902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:01:14.767934Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:01:14.770360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:14.770423Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:01:14.770456Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:01:14.780162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:14.780237Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:14.780279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:14.780364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:01:14.790821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:01:14.796274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:01:14.796460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:01:14.797618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:14.797794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:14.797846Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:14.798115Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:01:14.798171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:14.798348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:14.798430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:14.804647Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:14.804703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:14.804905Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:14.804976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:01:14.805330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:14.805374Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:01:14.805505Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:14.805539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:14.805579Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:14.805610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:14.805644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:01:14.805681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:14.805713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:01:14.805744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:01:14.805810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:01:14.805846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:01:14.805894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:01:14.808035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:14.808165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:14.808219Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 5.211298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:01:15.211551Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:15.211599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-03-31T16:01:15.211648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-03-31T16:01:15.212363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:15.212432Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-03-31T16:01:15.214317Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:01:15.214455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:01:15.214500Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-03-31T16:01:15.214541Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-31T16:01:15.214583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:01:15.215778Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:01:15.215865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:01:15.215899Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-03-31T16:01:15.215931Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-31T16:01:15.215974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T16:01:15.217921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-03-31T16:01:15.218642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1323 } } 2025-03-31T16:01:15.218712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-03-31T16:01:15.218853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1323 } } 2025-03-31T16:01:15.218981Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1323 } } 2025-03-31T16:01:15.219714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 309 RawX2: 4294969592 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-31T16:01:15.219764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-03-31T16:01:15.219951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 309 RawX2: 4294969592 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-31T16:01:15.220005Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-31T16:01:15.220107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 309 RawX2: 4294969592 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-31T16:01:15.220267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:15.220320Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:15.220372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-31T16:01:15.220420Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-03-31T16:01:15.223972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T16:01:15.225335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T16:01:15.225457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:15.225568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:15.225901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:15.225953Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-31T16:01:15.226058Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T16:01:15.226133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:01:15.226185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T16:01:15.226220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:01:15.226257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-03-31T16:01:15.226347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:337:2316] message: TxId: 101 2025-03-31T16:01:15.226425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:01:15.226478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-31T16:01:15.226511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-31T16:01:15.226660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:01:15.228998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T16:01:15.229068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:338:2317] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-03-31T16:01:15.232487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "TTLEnabledTable" TTLSettings { Enabled { ColumnName: "modified_at" Tiers { ApplyAfterSeconds: 3600 Delete { } } Tiers { ApplyAfterSeconds: 7200 Delete { } } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:15.232775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /MyRoot/TTLEnabledTable, pathId: , opId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:01:15.233108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Tier 0: only the last tier in TTL settings can have Delete action, at schemeshard: 72057594046678944 2025-03-31T16:01:15.235652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Tier 0: only the last tier in TTL settings can have Delete action" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:15.235838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Tier 0: only the last tier in TTL settings can have Delete action, operation: ALTER TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 2025-03-31T16:01:15.245433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "TTLEnabledTable" TTLSettings { Enabled { ColumnName: "modified_at" Tiers { ApplyAfterSeconds: 3600 EvictToExternalStorage { Storage: "/Root/abc" } } Tiers { ApplyAfterSeconds: 7200 Delete { } } } } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:15.245718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /MyRoot/TTLEnabledTable, pathId: , opId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:01:15.246078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: Only DELETE via TTL is allowed for row-oriented tables, at schemeshard: 72057594046678944 2025-03-31T16:01:15.250437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "Only DELETE via TTL is allowed for row-oriented tables" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:15.250598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Only DELETE via TTL is allowed for row-oriented tables, operation: ALTER TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 103, wait until txId: 103 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldFailOnSimultaneousDropColumnAndEnableTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-31T16:01:14.506522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:01:14.506620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:14.506660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:01:14.506692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:01:14.506732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:01:14.506759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:01:14.506836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:14.506909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:01:14.507284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:01:14.618790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:01:14.618854Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:14.627481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:01:14.627916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:01:14.628076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:01:14.640740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:01:14.641026Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:01:14.641672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:14.642044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:14.645137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:14.646368Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:14.646454Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:14.646556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:01:14.646603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:14.646660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:14.646872Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:01:14.658284Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-31T16:01:14.837146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:14.837381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:14.837633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:01:14.837957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:01:14.838033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:14.842497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:14.842663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:01:14.842878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:14.842935Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:01:14.842973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:01:14.843004Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:01:14.847966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:14.848030Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:01:14.848066Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:01:14.856040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:14.856105Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:14.856165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:14.856257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:01:14.860410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:01:14.868780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:01:14.868982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:01:14.870182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:14.870336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:14.870397Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:14.870664Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:01:14.870741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:14.870992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:14.871122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:14.874229Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:14.874280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:14.874439Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:14.874513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:01:14.874854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:14.874900Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:01:14.875013Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:14.875048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:14.875098Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:14.875134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:14.875169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:01:14.875211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:14.875247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:01:14.875282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:01:14.875338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:01:14.875372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:01:14.875423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:01:14.877644Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:14.877768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:14.877806Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 78944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 101 at step: 5000002 2025-03-31T16:01:15.132500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:15.132612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:15.132662Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId# 101:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000002 2025-03-31T16:01:15.132809Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 129 2025-03-31T16:01:15.132934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:15.132993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 2025-03-31T16:01:15.144288Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:15.144354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:15.144597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:01:15.144872Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:15.144923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-03-31T16:01:15.144975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-03-31T16:01:15.145760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:15.145841Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-03-31T16:01:15.148513Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:01:15.148695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:01:15.148746Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-03-31T16:01:15.148791Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-31T16:01:15.148851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:01:15.150168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:01:15.150267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:01:15.150303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-03-31T16:01:15.150339Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-31T16:01:15.150389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T16:01:15.150490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-03-31T16:01:15.151234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1273 } } 2025-03-31T16:01:15.151307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-03-31T16:01:15.151459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1273 } } 2025-03-31T16:01:15.151569Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1273 } } 2025-03-31T16:01:15.152298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 309 RawX2: 4294969592 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-31T16:01:15.152349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-03-31T16:01:15.152585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 309 RawX2: 4294969592 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-31T16:01:15.152644Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-31T16:01:15.152753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 309 RawX2: 4294969592 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-31T16:01:15.152826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:15.152868Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:15.152915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-31T16:01:15.152960Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-03-31T16:01:15.156895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T16:01:15.158239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T16:01:15.158360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:15.158485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:15.158818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:15.158874Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-31T16:01:15.158990Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T16:01:15.159058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:01:15.159125Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T16:01:15.159163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:01:15.159209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-03-31T16:01:15.159315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:337:2316] message: TxId: 101 2025-03-31T16:01:15.159376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:01:15.159422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-31T16:01:15.159459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-31T16:01:15.159608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:01:15.161948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T16:01:15.162012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:338:2317] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-03-31T16:01:15.165675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "TTLEnabledTable" DropColumns { Name: "modified_at" } TTLSettings { Enabled { ColumnName: "modified_at" } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:15.165996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /MyRoot/TTLEnabledTable, pathId: , opId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:01:15.166429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Cannot enable TTL on dropped column: 'modified_at', at schemeshard: 72057594046678944 2025-03-31T16:01:15.172538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Cannot enable TTL on dropped column: \'modified_at\'" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:15.172730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Cannot enable TTL on dropped column: 'modified_at', operation: ALTER TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 102, wait until txId: 102 |71.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest |71.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TBlobStorageProxyTest::TestEmptyRange [GOOD] >> TPopulatorTest::Boot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTestWithResets::UpdateAck [GOOD] Test command err: 2025-03-31T16:01:15.856660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:01:15.856745Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 100 2025-03-31T16:01:15.992740Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 330, preserialized size# 51 2025-03-31T16:01:15.992858Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 3 2025-03-31T16:01:15.994514Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-31T16:01:15.994701Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-31T16:01:15.994751Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-31T16:01:15.995697Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirC" PathDescription { Self { Name: "DirC" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: false CreateTxId: 100 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 220, preserialized size# 2 2025-03-31T16:01:15.995765Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 2 FAKE_COORDINATOR: Add transaction: 100 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000001 FAKE_COORDINATOR: Erasing txId 100 2025-03-31T16:01:15.999558Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 340, preserialized size# 56 2025-03-31T16:01:15.999626Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 4 2025-03-31T16:01:15.999957Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirC" PathDescription { Self { Name: "DirC" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 225, preserialized size# 2 2025-03-31T16:01:16.000005Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 3 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-03-31T16:01:16.023906Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 2 }: sender# [1:12:2059] 2025-03-31T16:01:16.023998Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:96:2122] Successful handshake: replica# [1:12:2059] 2025-03-31T16:01:16.024045Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:96:2122] Resume sync: replica# [1:12:2059], fromPathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:01:16.024149Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 2 }: sender# [1:15:2062] 2025-03-31T16:01:16.024208Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:97:2123] Successful handshake: replica# [1:15:2062] 2025-03-31T16:01:16.024234Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:97:2123] Resume sync: replica# [1:15:2062], fromPathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:01:16.024297Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 2 }: sender# [1:18:2065] 2025-03-31T16:01:16.024324Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:98:2124] Successful handshake: replica# [1:18:2065] 2025-03-31T16:01:16.024344Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:98:2124] Resume sync: replica# [1:18:2065], fromPathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:01:16.024435Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Replica: [1:24339059:0] }: sender# [1:96:2122] 2025-03-31T16:01:16.024532Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 2] PathVersion: 3 } }: sender# [1:95:2121] 2025-03-31T16:01:16.024683Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 1] }: sender# [1:96:2122] 2025-03-31T16:01:16.024820Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 0 2025-03-31T16:01:16.024916Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] }: sender# [1:96:2122] 2025-03-31T16:01:16.024964Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 0 2025-03-31T16:01:16.025051Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Replica: [1:1099535966835:0] }: sender# [1:97:2123] 2025-03-31T16:01:16.025110Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:12:2059], cookie# 0 2025-03-31T16:01:16.025211Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 2] PathVersion: 3 } }: sender# [1:95:2121] 2025-03-31T16:01:16.025309Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 1] }: sender# [1:97:2123] 2025-03-31T16:01:16.025355Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:12:2059], cookie# 0 2025-03-31T16:01:16.025408Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 0 2025-03-31T16:01:16.025501Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] }: sender# [1:97:2123] 2025-03-31T16:01:16.025542Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:15:2062], cookie# 0 2025-03-31T16:01:16.025594Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Replica: [1:2199047594611:0] }: sender# [1:98:2124] 2025-03-31T16:01:16.025635Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 0 2025-03-31T16:01:16.025690Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 2] PathVersion: 3 } }: sender# [1:95:2121] 2025-03-31T16:01:16.025784Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 1] }: sender# [1:98:2124] 2025-03-31T16:01:16.025829Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:15:2062], cookie# 0 2025-03-31T16:01:16.025868Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 0 2025-03-31T16:01:16.025932Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] }: sender# [1:98:2124] 2025-03-31T16:01:16.025994Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:18:2065], cookie# 0 2025-03-31T16:01:16.026060Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Replica: [1:24339059:0] }: sender# [1:96:2122] 2025-03-31T16:01:16.026109Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:95:2121] 2025-03-31T16:01:16.026155Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 0 2025-03-31T16:01:16.026251Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:96:2122], cookie# 0 2025-03-31T16:01:16.026295Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:96:2122], cookie# 0 2025-03-31T16:01:16.026345Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:12:2059] 2025-03-31T16:01:16.026397Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:18:2065], cookie# 0 2025-03-31T16:01:16.026474Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:96:2122], cookie# 100 2025-03-31T16:01:16.026518Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Replica: [1:1099535966835:0] }: sender# [1:97:2123] 2025-03-31T16:01:16.026580Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:95:2121] 2025-03-31T16:01:16.026637Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:96:2122], cookie# 0 2025-03-31T16:01:16.026661Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:96:2122], cookie# 0 2025-03-31T16:01:16.026708Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:15:2062] 2025-03-31T16:01:16.026751Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:96:2122], cookie# 100 2025-03-31T16:01:16.026789Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:97:2123], cookie# 0 2025-03-31T16:01:16.026816Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:97:2123], cookie# 0 2025-03-31T16:01:16.026865Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:97:2123], cookie# 100 2025-03-31T16:01:16.026911Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 3 2025-03-31T16:01:16.026958Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 4 2025-03-31T16:01:16.033023Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Replica: [1:2199047594611:0] }: sender# [1:98:2124] 2025-03-31T16:01:16.033143Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:95:2121] 2025-03-31T16:01:16.033687Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:97:2123], cookie# 0 2025-03-31T16:01:16.033738Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:97:2123], cookie# 0 2025-03-31T16:01:16.033814Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:18:2065] 2025-03-31T16:01:16.034029Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:97:2123], cookie# 100 2025-03-31T16:01:16.034064Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 2 2025-03-31T16:01:16.034108Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 3 2025-03-31T16:01:16.034397Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:98:2124], cookie# 0 2025-03-31T16:01:16.034424Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:98:2124], cookie# 0 2025-03-31T16:01:16.034591Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:98:2124], cookie# 100 2025-03-31T16:01:16.034617Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:98:2124], cookie# 100 2025-03-31T16:01:16.034760Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:98:2124], cookie# 0 2025-03-31T16:01:16.034793Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:98:2124], cookie# 0 2025-03-31T16:01:16.035052Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:98:2124], cookie# 100 2025-03-31T16:01:16.035077Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:98:2124], cookie# 100 TestWaitNotification: OK eventTxId 100 >> TPopulatorTest::MakeDir >> ResultFormatter::Tuple [GOOD] >> ResultFormatter::Tagged [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongColumnType [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:01:15.568781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:01:15.568880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:15.568921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:01:15.568953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:01:15.568993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:01:15.569034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:01:15.569104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:15.569276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:01:15.569654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:01:15.672995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:01:15.673059Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:15.680576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:01:15.680671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:01:15.680818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:01:15.686303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:01:15.686543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:01:15.687253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:15.687480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:15.689673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:15.690882Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:15.690947Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:15.691154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:01:15.691207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:15.691244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:15.691392Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:01:15.701338Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:01:15.867911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:15.868160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:15.868469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:01:15.868784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:01:15.868851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:15.877026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:15.877191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:01:15.877532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:15.877600Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:01:15.877635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:01:15.877669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:01:15.879911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:15.879965Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:01:15.880001Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:01:15.881920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:15.881966Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:15.882010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:15.882075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:01:15.885980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:01:15.889770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:01:15.889974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:01:15.890969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:15.891143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:15.891219Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:15.891509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:01:15.891585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:15.891749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:15.891833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:01:15.894106Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:15.894172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:15.894354Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:15.894408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:01:15.894638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:15.894682Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:01:15.894793Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:15.894829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:15.894866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:15.894895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:15.894931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:01:15.894972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:15.895008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:01:15.895038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:01:15.895123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:01:15.895184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:01:15.895237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:01:15.897386Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:15.897497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:15.897536Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-31T16:01:15.897578Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-31T16:01:15.897619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:15.897760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-31T16:01:15.901169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-31T16:01:15.901711Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-03-31T16:01:15.903064Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Bootstrap 2025-03-31T16:01:15.923381Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Become StateWork (SchemeCache [1:272:2263]) 2025-03-31T16:01:15.926102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "String" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:15.926457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:15.926580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "String" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" } }, at schemeshard: 72057594046678944 2025-03-31T16:01:15.926971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Unsupported column type, at schemeshard: 72057594046678944 2025-03-31T16:01:15.928827Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T16:01:15.932844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Unsupported column type" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:15.933029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Unsupported column type, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2025-03-31T16:01:15.933722Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> TPopulatorTest::Boot [GOOD] >> TPopulatorTest::RemoveDir |71.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest |71.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Tagged [GOOD] >> TPopulatorTest::MakeDir [GOOD] >> TPopulatorTest::RemoveDir [GOOD] |71.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestEmptyRange [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::Boot [GOOD] Test command err: 2025-03-31T16:01:17.151281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:01:17.151345Z node 1 :IMPORT WARN: Table profiles were not loaded |71.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |71.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |71.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::MakeDir [GOOD] Test command err: 2025-03-31T16:01:17.520087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:01:17.520168Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 100 2025-03-31T16:01:17.633827Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 330, preserialized size# 51 2025-03-31T16:01:17.633924Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 3 2025-03-31T16:01:17.640085Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-31T16:01:17.640246Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-31T16:01:17.640294Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-31T16:01:17.641039Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: false CreateTxId: 100 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 220, preserialized size# 2 2025-03-31T16:01:17.641090Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 2 2025-03-31T16:01:17.641229Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:12:2059], cookie# 100 2025-03-31T16:01:17.641300Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:15:2062], cookie# 100 2025-03-31T16:01:17.641344Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:18:2065], cookie# 100 2025-03-31T16:01:17.641408Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:96:2122], cookie# 100 2025-03-31T16:01:17.641450Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-31T16:01:17.641504Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-31T16:01:17.641543Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-31T16:01:17.641782Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:97:2123], cookie# 100 2025-03-31T16:01:17.641817Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 3 2025-03-31T16:01:17.641865Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:12:2059], cookie# 100 2025-03-31T16:01:17.641900Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:15:2062], cookie# 100 2025-03-31T16:01:17.641953Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:18:2065], cookie# 100 2025-03-31T16:01:17.642395Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:98:2124], cookie# 100 2025-03-31T16:01:17.642572Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:96:2122], cookie# 100 2025-03-31T16:01:17.643071Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:97:2123], cookie# 100 2025-03-31T16:01:17.643169Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 2 2025-03-31T16:01:17.643442Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:98:2124], cookie# 100 2025-03-31T16:01:17.643474Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:98:2124], cookie# 100 FAKE_COORDINATOR: Add transaction: 100 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000001 FAKE_COORDINATOR: Erasing txId 100 2025-03-31T16:01:17.646084Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 340, preserialized size# 56 2025-03-31T16:01:17.646138Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 4 2025-03-31T16:01:17.646289Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-31T16:01:17.646360Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-31T16:01:17.646412Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-31T16:01:17.646993Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 225, preserialized size# 2 2025-03-31T16:01:17.647042Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 3 2025-03-31T16:01:17.649436Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:12:2059], cookie# 100 2025-03-31T16:01:17.649500Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:15:2062], cookie# 100 2025-03-31T16:01:17.649537Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:18:2065], cookie# 100 2025-03-31T16:01:17.649734Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:96:2122], cookie# 100 2025-03-31T16:01:17.649783Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-31T16:01:17.649854Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-31T16:01:17.649910Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-31T16:01:17.650082Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:97:2123], cookie# 100 2025-03-31T16:01:17.650115Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 4 2025-03-31T16:01:17.650164Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:12:2059], cookie# 100 2025-03-31T16:01:17.650211Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:15:2062], cookie# 100 2025-03-31T16:01:17.650242Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:18:2065], cookie# 100 2025-03-31T16:01:17.650707Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:98:2124], cookie# 100 2025-03-31T16:01:17.650839Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:96:2122], cookie# 100 2025-03-31T16:01:17.651184Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:97:2123], cookie# 100 2025-03-31T16:01:17.651220Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 3 2025-03-31T16:01:17.651440Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:98:2124], cookie# 100 2025-03-31T16:01:17.651468Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:98:2124], cookie# 100 TestModificationResult got TxId: 100, wait until txId: 100 |71.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> TSchemeShardColumnTableTTL::AlterColumnTable_Negative [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::RemoveDir [GOOD] Test command err: 2025-03-31T16:01:17.901812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:01:17.901878Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 100 2025-03-31T16:01:17.989949Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 330, preserialized size# 51 2025-03-31T16:01:17.990048Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 3 2025-03-31T16:01:17.991590Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-31T16:01:17.991715Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-31T16:01:17.991758Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-31T16:01:17.992576Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirB" PathDescription { Self { Name: "DirB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: false CreateTxId: 100 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 220, preserialized size# 2 2025-03-31T16:01:17.992633Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 2 2025-03-31T16:01:17.992796Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:12:2059], cookie# 100 2025-03-31T16:01:17.992879Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:15:2062], cookie# 100 2025-03-31T16:01:17.992938Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:18:2065], cookie# 100 2025-03-31T16:01:17.993014Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:96:2122], cookie# 100 2025-03-31T16:01:17.993061Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-31T16:01:17.993122Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-31T16:01:17.993163Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-31T16:01:17.993418Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:97:2123], cookie# 100 2025-03-31T16:01:17.993458Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 3 2025-03-31T16:01:17.993571Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:12:2059], cookie# 100 2025-03-31T16:01:17.993628Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:15:2062], cookie# 100 2025-03-31T16:01:17.993684Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:18:2065], cookie# 100 2025-03-31T16:01:17.994118Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:98:2124], cookie# 100 2025-03-31T16:01:17.994334Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:96:2122], cookie# 100 2025-03-31T16:01:17.994621Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:97:2123], cookie# 100 2025-03-31T16:01:17.994646Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 2 2025-03-31T16:01:17.994825Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:98:2124], cookie# 100 2025-03-31T16:01:17.994878Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:98:2124], cookie# 100 FAKE_COORDINATOR: Add transaction: 100 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000001 FAKE_COORDINATOR: Erasing txId 100 2025-03-31T16:01:17.997487Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 340, preserialized size# 56 2025-03-31T16:01:17.997542Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 4 2025-03-31T16:01:17.997704Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-31T16:01:17.997799Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-31T16:01:17.997839Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-31T16:01:17.998468Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirB" PathDescription { Self { Name: "DirB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 225, preserialized size# 2 2025-03-31T16:01:17.998525Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 3 2025-03-31T16:01:1 ... oard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 101 2025-03-31T16:01:18.006972Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 101 2025-03-31T16:01:18.007028Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:18:2065], cookie# 101 2025-03-31T16:01:18.007464Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:98:2124], cookie# 101 2025-03-31T16:01:18.007527Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:12:2059], cookie# 101 2025-03-31T16:01:18.007610Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:15:2062], cookie# 101 2025-03-31T16:01:18.007658Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 101 2025-03-31T16:01:18.007836Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:96:2122], cookie# 101 2025-03-31T16:01:18.007886Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 5 2025-03-31T16:01:18.007984Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:12:2059], cookie# 101 2025-03-31T16:01:18.008024Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:15:2062], cookie# 101 2025-03-31T16:01:18.008074Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:18:2065], cookie# 101 2025-03-31T16:01:18.008403Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:97:2123], cookie# 101 2025-03-31T16:01:18.008574Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:96:2122], cookie# 101 2025-03-31T16:01:18.008680Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:97:2123], cookie# 101 2025-03-31T16:01:18.008738Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 3 2025-03-31T16:01:18.009006Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:98:2124], cookie# 101 2025-03-31T16:01:18.009039Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:98:2124], cookie# 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 2025-03-31T16:01:18.011136Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 101, event size# 232, preserialized size# 2 2025-03-31T16:01:18.011209Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 101, is deletion# false, version: 6 2025-03-31T16:01:18.011370Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 101 2025-03-31T16:01:18.011420Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 101 2025-03-31T16:01:18.011474Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 101 2025-03-31T16:01:18.011719Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/Root/DirB\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000002, drop txId: 101" Path: "/Root/DirB" PathId: 2 LastExistedPrefixPath: "/Root" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 101, event size# 306, preserialized size# 0 2025-03-31T16:01:18.011764Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 101, is deletion# true, version: 0 2025-03-31T16:01:18.011866Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:12:2059], cookie# 101 2025-03-31T16:01:18.011950Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:15:2062], cookie# 101 2025-03-31T16:01:18.011993Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:18:2065], cookie# 101 2025-03-31T16:01:18.012047Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 101 2025-03-31T16:01:18.012089Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 101 2025-03-31T16:01:18.012138Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 101 2025-03-31T16:01:18.012389Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:96:2122], cookie# 101 2025-03-31T16:01:18.012495Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:97:2123], cookie# 101 2025-03-31T16:01:18.012527Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 6 2025-03-31T16:01:18.012595Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:12:2059], cookie# 101 2025-03-31T16:01:18.012639Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:15:2062], cookie# 101 2025-03-31T16:01:18.012686Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:18:2065], cookie# 101 2025-03-31T16:01:18.013070Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:98:2124], cookie# 101 2025-03-31T16:01:18.013197Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:96:2122], cookie# 101 2025-03-31T16:01:18.013341Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:97:2123], cookie# 101 2025-03-31T16:01:18.013378Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 18446744073709551615 2025-03-31T16:01:18.013668Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:98:2124], cookie# 101 2025-03-31T16:01:18.013701Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:98:2124], cookie# 101 TestModificationResult got TxId: 101, wait until txId: 101 |71.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |71.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |71.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |71.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |71.6%| [TA] {RESULT} $(B)/ydb/services/persqueue_cluster_discovery/ut/test-results/unittest/{meta.json ... results_accumulator.log} |71.6%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |71.6%| [TA] $(B)/ydb/core/blobstorage/dsproxy/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} >> StatisticsSaveLoad::Delete |71.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |71.6%| [TA] $(B)/ydb/core/tx/scheme_board/ut_populator/test-results/unittest/{meta.json ... results_accumulator.log} >> StatisticsSaveLoad::Simple >> StatisticsSaveLoad::ForbidAccess >> ResultFormatter::Primitive [GOOD] >> ResultFormatter::Struct [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::AlterColumnTable_Negative [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-31T16:01:09.094685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:01:09.094798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:09.094845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:01:09.094886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:01:09.094938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:01:09.094967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:01:09.095061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:09.095180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:01:09.095552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:01:09.191911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:01:09.191971Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:09.199737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:01:09.200214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:01:09.200454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:01:09.222221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:01:09.222624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:01:09.223375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:09.223800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:09.228064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:09.229568Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:09.229657Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:09.229757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:01:09.229807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:09.229850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:09.230097Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:01:09.239790Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-31T16:01:09.387982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:09.388366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:09.388670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:01:09.388967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:01:09.389051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:09.393119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:09.393296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:01:09.393547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:09.393606Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:01:09.393647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:01:09.393685Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:01:09.396235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:09.396330Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:01:09.396374Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:01:09.398719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:09.398783Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:09.398845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:09.398920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:01:09.411338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:01:09.413735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:01:09.413944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:01:09.415127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:09.415304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:09.415366Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:09.415664Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:01:09.415722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:09.415916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:09.416066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:09.424481Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:09.424546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:09.424779Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:09.424858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:01:09.425239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:09.425305Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:01:09.425418Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:09.425472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:09.425529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:09.425565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:09.425603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:01:09.425649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:09.425689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:01:09.425727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:01:09.425805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:01:09.425847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:01:09.425902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:01:09.427999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:09.428124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:09.428188Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... lete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:17.979473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:17.980019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:17.980141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:17.980440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:17.980559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:17.980688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:17.980818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:17.980900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:17.980975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:17.981064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:17.981221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:17.981302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:18.004808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:18.004980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:18.005062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:18.005137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:18.005262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:18.005311Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-31T16:01:18.005430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T16:01:18.005468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:01:18.005571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T16:01:18.005620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:01:18.005658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-03-31T16:01:18.005760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:2808:4073] message: TxId: 101 2025-03-31T16:01:18.005815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:01:18.005895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-31T16:01:18.005931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-31T16:01:18.008375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 66 2025-03-31T16:01:18.021688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T16:01:18.021776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:2809:4074] TestWaitNotification: OK eventTxId 101 2025-03-31T16:01:18.022534Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:01:18.022835Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 337us result status StatusSuccess 2025-03-31T16:01:18.023594Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 64 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "TTLEnabledTable" Schema { Columns { Id: 1 Name: "key" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "modified_at" Type: "Timestamp" TypeId: 50 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 3 Name: "str" Type: "String" TypeId: 4097 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "key" NextColumnId: 4 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ColumnShardCount: 64 Sharding { ColumnShards: 72075186233409546 ColumnShards: 72075186233409547 ColumnShards: 72075186233409548 ColumnShards: 72075186233409549 ColumnShards: 72075186233409550 ColumnShards: 72075186233409551 ColumnShards: 72075186233409552 ColumnShards: 72075186233409553 ColumnShards: 72075186233409554 ColumnShards: 72075186233409555 ColumnShards: 72075186233409556 ColumnShards: 72075186233409557 ColumnShards: 72075186233409558 ColumnShards: 72075186233409559 ColumnShards: 72075186233409560 ColumnShards: 72075186233409561 ColumnShards: 72075186233409562 ColumnShards: 72075186233409563 ColumnShards: 72075186233409564 ColumnShards: 72075186233409565 ColumnShards: 72075186233409566 ColumnShards: 72075186233409567 ColumnShards: 72075186233409568 ColumnShards: 72075186233409569 ColumnShards: 72075186233409570 ColumnShards: 72075186233409571 ColumnShards: 72075186233409572 ColumnShards: 72075186233409573 ColumnShards: 72075186233409574 ColumnShards: 72075186233409575 ColumnShards: 72075186233409576 ColumnShards: 72075186233409577 ColumnShards: 72075186233409578 ColumnShards: 72075186233409579 ColumnShards: 72075186233409580 ColumnShards: 72075186233409581 ColumnShards: 72075186233409582 ColumnShards: 72075186233409583 ColumnShards: 72075186233409584 ColumnShards: 72075186233409585 ColumnShards: 72075186233409586 ColumnShards: 72075186233409587 ColumnShards: 72075186233409588 ColumnShards: 72075186233409589 ColumnShards: 72075186233409590 ColumnShards: 72075186233409591 ColumnShards: 72075186233409592 ColumnShards: 72075186233409593 ColumnShards: 72075186233409594 ColumnShards: 72075186233409595 ColumnShards: 72075186233409596 ColumnShards: 72075186233409597 ColumnShards: 72075186233409598 ColumnShards: 72075186233409599 ColumnShards: 72075186233409600 ColumnShards: 72075186233409601 ColumnShards: 72075186233409602 ColumnShards: 72075186233409603 ColumnShards: 72075186233409604 ColumnShards: 72075186233409605 ColumnShards: 72075186233409606 ColumnShards: 72075186233409607 ColumnShards: 72075186233409608 ColumnShards: 72075186233409609 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "key" } } StorageConfig { DataChannelCount: 64 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR W0000 00:00:1743436878.024340 92940 text_format.cc:398] Warning parsing text-format NKikimrSchemeOp.TAlterColumnTable: 6:35: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 102 2025-03-31T16:01:18.027567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnTable AlterColumnTable { Name: "TTLEnabledTable" AlterTtlSettings { Enabled { ColumnName: "str" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:18.027822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:01:18.028364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusSchemeError, reason: ttl update error: Unsupported column type. in alter constructor STANDALONE_UPDATE, at schemeshard: 72057594046678944 2025-03-31T16:01:18.032319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusSchemeError Reason: "ttl update error: Unsupported column type. in alter constructor STANDALONE_UPDATE" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:18.032516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSchemeError, reason: ttl update error: Unsupported column type. in alter constructor STANDALONE_UPDATE, operation: ALTER COLUMN TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 102, wait until txId: 102 >> ResultFormatter::EmptyDict [GOOD] >> ResultFormatter::Dict [GOOD] >> ResultFormatter::Decimal [GOOD] >> ResultFormatter::Void [GOOD] >> ResultFormatter::VariantTuple [GOOD] >> ResultFormatter::StructWithNoFields [GOOD] >> ResultFormatter::StructTypeNameAsString [GOOD] >> ResultFormatter::List [GOOD] >> ResultFormatter::Null [GOOD] |71.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Struct [GOOD] >> ResultFormatter::Optional [GOOD] >> ResultFormatter::Pg >> TSchemeShardTTLTests::CreateTableShouldSucceedAsyncOnIndexedTable >> ResultFormatter::Pg [GOOD] >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-false |71.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::VariantTuple [GOOD] |71.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::StructTypeNameAsString [GOOD] >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_ColumnType |71.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Decimal [GOOD] >> TPersqueueControlPlaneTestSuite::SetupWriteLockSessionWithDatabase [GOOD] |71.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Null [GOOD] |71.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |71.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Pg [GOOD] >> TSchemeShardTTLUtility::GetExpireAfter [GOOD] >> SystemView::AuthUsers_Access [GOOD] >> SystemView::AuthUsers_ResultOrder |71.7%| [TA] $(B)/ydb/core/fq/libs/result_formatter/ut/test-results/unittest/{meta.json ... results_accumulator.log} |71.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLUtility::GetExpireAfter [GOOD] |71.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_ColumnType [GOOD] >> TSchemeShardTTLTests::CreateTableShouldSucceedAsyncOnIndexedTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersqueueControlPlaneTestSuite::SetupWriteLockSessionWithDatabase [GOOD] Test command err: === Server->StartServer(false); 2025-03-31T16:01:07.505241Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004328988749187:2280];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:07.505293Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:01:07.588541Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488004327817036538:2078];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:07.588595Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:01:07.894952Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:01:07.969700Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0009fa/r3tmp/tmpDCSZeO/pdisk_1.dat 2025-03-31T16:01:08.491639Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:08.528219Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:01:08.531760Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:01:08.531855Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:01:08.532497Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:01:08.532542Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:01:08.538537Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:01:08.538690Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:01:08.540861Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24706, node 1 2025-03-31T16:01:08.743016Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/0009fa/r3tmp/yandexsWY6ip.tmp 2025-03-31T16:01:08.743053Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/0009fa/r3tmp/yandexsWY6ip.tmp 2025-03-31T16:01:08.743245Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/0009fa/r3tmp/yandexsWY6ip.tmp 2025-03-31T16:01:08.743385Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:01:08.804230Z INFO: TTestServer started on Port 14137 GrpcPort 24706 TClient is connected to server localhost:14137 PQClient connected to localhost:24706 === TenantModeEnabled() = 1 === Init PQ - start server on port 24706 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:01:09.619924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-31T16:01:09.620103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:01:09.620285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-31T16:01:09.620528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:01:09.620566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:01:09.623674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-31T16:01:09.623804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-31T16:01:09.623970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:01:09.624028Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-31T16:01:09.624042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-03-31T16:01:09.624053Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-03-31T16:01:09.627415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:01:09.627462Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-03-31T16:01:09.627490Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-03-31T16:01:09.629351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:01:09.629387Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:01:09.629414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-31T16:01:09.629447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-03-31T16:01:09.642816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:01:09.643248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:01:09.643272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-03-31T16:01:09.643299Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:01:09.652389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-03-31T16:01:09.652597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-03-31T16:01:09.655423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743436869699, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T16:01:09.655551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743436869699 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-31T16:01:09.655592Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-31T16:01:09.655843Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-03-31T16:01:09.655882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-31T16:01:09.656027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-31T16:01:09.656094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-03-31T16:01:09.662936Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-31T16:01:09.662976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-31T16:01:09.663155Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-31T16:01:09.663172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7488004333283716919:2395], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-03-31T16:01:09.663224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:01:09.663249Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-03-31T16:01:09.663346Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-31T16:01:09.663366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-31T16:01:09.663387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-31T16:01:09.663405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-31T16:01:09.663423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-03-31T16:01:09.663444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-31T16:01:09.663464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-03-31T16:01:09.663474Z ... 64 2025-03-31T16:01:19.750903Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710664 2025-03-31T16:01:19.750933Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710664, pathId: [OwnerId: 72057594046644480, LocalPathId: 11], version: 5 2025-03-31T16:01:19.750951Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 11] was 2 2025-03-31T16:01:19.751154Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 12 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710664 2025-03-31T16:01:19.751214Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 12 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710664 2025-03-31T16:01:19.751224Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710664 2025-03-31T16:01:19.751234Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710664, pathId: [OwnerId: 72057594046644480, LocalPathId: 12], version: 2 2025-03-31T16:01:19.751245Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 12] was 4 2025-03-31T16:01:19.751282Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710664, subscribers: 1 2025-03-31T16:01:19.751297Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [3:7488004379026791567:2371] 2025-03-31T16:01:19.753814Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710664 2025-03-31T16:01:19.753851Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710664 Create topic result: 1 === EnablePQLogs === CreateChannel === NewStub === InitializeWritePQService === InitializeWritePQService start iteration === InitializeWritePQService create streamingWriter === InitializeWritePQService Write 2025-03-31T16:01:19.871249Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-03-31T16:01:19.871282Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 1 2025-03-31T16:01:19.876325Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "Root/acc/topic1" message_group_id: "12345678" } 2025-03-31T16:01:19.876429Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "Root/acc/topic1" message_group_id: "12345678" from ipv6:[::1]:37006 2025-03-31T16:01:19.876443Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:37006 proto=v1 topic=Root/acc/topic1 durationSec=0 2025-03-31T16:01:19.876453Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-03-31T16:01:19.879698Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2025-03-31T16:01:19.879875Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-03-31T16:01:19.879888Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-03-31T16:01:19.879896Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-03-31T16:01:19.879933Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7488004379026791807:2382] (SourceId=12345678, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-03-31T16:01:19.879977Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-03-31T16:01:19.880803Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-03-31T16:01:19.881006Z node 3 :PERSQUEUE INFO: new Cookie 12345678|9bc3ba9-b3d98b08-c54e2dd-40a26586_0 generated for partition 0 topic 'acc/topic1' owner 12345678 2025-03-31T16:01:19.881401Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: 12345678|9bc3ba9-b3d98b08-c54e2dd-40a26586_0 Finish: 0 2025-03-31T16:01:19.887011Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: 12345678|9bc3ba9-b3d98b08-c54e2dd-40a26586_0 grpc read done: success: 0 data: 2025-03-31T16:01:19.887044Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|9bc3ba9-b3d98b08-c54e2dd-40a26586_0 grpc read failed 2025-03-31T16:01:19.887374Z node 3 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 1 sessionId: 12345678|9bc3ba9-b3d98b08-c54e2dd-40a26586_0 2025-03-31T16:01:19.887392Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|9bc3ba9-b3d98b08-c54e2dd-40a26586_0 is DEAD === InitializeWritePQService done === PersQueueClient2025-03-31T16:01:19.887654Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison === InitializePQ completed 2025-03-31T16:01:19.916689Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-03-31T16:01:19.916719Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 2 2025-03-31T16:01:19.917141Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "topic1" message_group_id: "12345678" } 2025-03-31T16:01:19.917248Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 2 topic: "topic1" message_group_id: "12345678" from ipv6:[::1]:37006 2025-03-31T16:01:19.917263Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:37006 proto=v1 topic=topic1 durationSec=0 2025-03-31T16:01:19.917272Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-03-31T16:01:19.919428Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: describe result for acl check 2025-03-31T16:01:19.919559Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-03-31T16:01:19.919570Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-03-31T16:01:19.919577Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-03-31T16:01:19.919607Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7488004379026791827:2391] (SourceId=12345678, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-03-31T16:01:19.919625Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2025-03-31T16:01:19.920335Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-03-31T16:01:19.920529Z node 3 :PERSQUEUE INFO: new Cookie 12345678|f7404f57-c052319c-692d1b12-cc017aa_0 generated for partition 0 topic 'acc/topic1' owner 12345678 2025-03-31T16:01:19.920874Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: 12345678|f7404f57-c052319c-692d1b12-cc017aa_0 2025-03-31T16:01:19.932110Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: 12345678|f7404f57-c052319c-692d1b12-cc017aa_0 grpc read done: success: 0 data: 2025-03-31T16:01:19.932154Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 12345678|f7404f57-c052319c-692d1b12-cc017aa_0 grpc read failed 2025-03-31T16:01:19.932195Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 12345678|f7404f57-c052319c-692d1b12-cc017aa_0 grpc closed 2025-03-31T16:01:19.932220Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 12345678|f7404f57-c052319c-692d1b12-cc017aa_0 is DEAD 2025-03-31T16:01:19.933010Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-31T16:01:20.459549Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7488004383321759143:2401], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:01:20.461645Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZWJhMjQ5N2QtZmJmY2NlNzQtMjE0MWE5NTQtNDMwY2Q2ZTQ=, ActorId: [3:7488004383321759136:2397], ActorState: ExecuteState, TraceId: 01jqpez6gb9k2ahgg4ne3apwxh, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:01:20.462182Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:01:20.507657Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488004361846920942:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:20.507768Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-63 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-64 |71.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> SystemView::ShowCreateTableTtlSettings [FAIL] >> SystemView::ShowCreateTableTemporary |71.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> THiveImplTest::BootQueueSpeed [GOOD] >> THiveImplTest::BalancerSpeedAndDistribution |71.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |71.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceedAsyncOnIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:01:22.084876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:01:22.084980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:22.085023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:01:22.085065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:01:22.085106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:01:22.085157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:01:22.085229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:22.085368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:01:22.085717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:01:22.189803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:01:22.189866Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:22.194508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:01:22.194657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:01:22.194807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:01:22.200676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:01:22.200920Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:01:22.201560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:22.201765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:22.204936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:22.206102Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:22.206162Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:22.206312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:01:22.206353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:22.206389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:22.206526Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:01:22.220289Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:01:22.372525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:22.372804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:22.373043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:01:22.373286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:01:22.373336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:22.378369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:22.378547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:01:22.378775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:22.378833Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:01:22.378869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:01:22.378905Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:01:22.381346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:22.381414Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:01:22.381455Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:01:22.383736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:22.383795Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:22.383843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:22.383918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:01:22.404112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:01:22.411156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:01:22.411418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:01:22.412483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:22.412652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:22.412704Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:22.413039Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:01:22.413100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:22.413330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:22.413430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:01:22.420195Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:22.420271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:22.420470Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:22.420530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:01:22.420846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:22.420903Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:01:22.421002Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:22.421037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:22.421077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:22.421108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:22.421150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:01:22.421197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:22.421237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:01:22.421267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:01:22.421357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:01:22.421408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:01:22.421464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:01:22.423778Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:22.423919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:22.424041Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 1 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1354 } } 2025-03-31T16:01:22.799404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 323 RawX2: 4294969602 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-31T16:01:22.799458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2025-03-31T16:01:22.799636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 323 RawX2: 4294969602 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-31T16:01:22.799692Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-31T16:01:22.799773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 323 RawX2: 4294969602 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-31T16:01:22.799836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:22.799875Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:22.799931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-03-31T16:01:22.799982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-03-31T16:01:22.800747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 319 RawX2: 4294969599 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-31T16:01:22.800801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 2 2025-03-31T16:01:22.800907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:2, at schemeshard: 72057594046678944, message: Source { RawX1: 319 RawX2: 4294969599 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-31T16:01:22.800942Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-31T16:01:22.800995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 319 RawX2: 4294969599 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-31T16:01:22.801040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:22.801066Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:2, at schemeshard: 72057594046678944 2025-03-31T16:01:22.801091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-31T16:01:22.801120Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:2 129 -> 240 2025-03-31T16:01:22.808732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T16:01:22.809993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T16:01:22.818028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T16:01:22.818246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:22.818558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T16:01:22.818853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-03-31T16:01:22.821842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:22.822395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-03-31T16:01:22.822673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:22.822722Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-31T16:01:22.822825Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 2/3 2025-03-31T16:01:22.822894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-03-31T16:01:22.822932Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 2/3 2025-03-31T16:01:22.822967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-03-31T16:01:22.823075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2025-03-31T16:01:22.823716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2025-03-31T16:01:22.823752Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:2 ProgressState 2025-03-31T16:01:22.823814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 3/3 2025-03-31T16:01:22.823841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-03-31T16:01:22.823871Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 3/3 2025-03-31T16:01:22.823894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-03-31T16:01:22.823922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2025-03-31T16:01:22.824003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:377:2345] message: TxId: 101 2025-03-31T16:01:22.824065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-03-31T16:01:22.824145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-31T16:01:22.824184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-31T16:01:22.824336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T16:01:22.824381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:1 2025-03-31T16:01:22.824406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:1 2025-03-31T16:01:22.824435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-31T16:01:22.824455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:2 2025-03-31T16:01:22.824473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:2 2025-03-31T16:01:22.824516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-31T16:01:22.827797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T16:01:22.827858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:378:2346] TestWaitNotification: OK eventTxId 101 2025-03-31T16:01:22.828470Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:01:22.828807Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 300us result status StatusSuccess 2025-03-31T16:01:22.829305Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |71.7%| [TA] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} |71.7%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_populator/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_ColumnType [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-31T16:01:22.141457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:01:22.141572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:22.141633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:01:22.141672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:01:22.141723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:01:22.141753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:01:22.141841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:22.141928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:01:22.142345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:01:22.255301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:01:22.255362Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:22.265041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:01:22.265534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:01:22.265726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:01:22.274553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:01:22.274893Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:01:22.275633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:22.276077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:22.278594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:22.280006Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:22.280077Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:22.280220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:01:22.280272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:22.280317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:22.280559Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:01:22.289253Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-31T16:01:22.469357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:22.469701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:22.469962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:01:22.470238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:01:22.470335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:22.480339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:22.480527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:01:22.480758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:22.480822Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:01:22.480862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:01:22.480915Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:01:22.483609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:22.483699Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:01:22.483753Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:01:22.488419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:22.488499Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:22.488554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:22.488639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:01:22.492662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:01:22.494841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:01:22.495042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:01:22.496280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:22.496469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:22.496532Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:22.496833Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:01:22.496887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:22.497080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:22.497171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:22.500032Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:22.500090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:22.500338Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:22.500418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:01:22.500851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:22.500924Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:01:22.501037Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:22.501073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:22.501116Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:22.501167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:22.501224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:01:22.501266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:22.501304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:01:22.501346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:01:22.501420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:01:22.501461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:01:22.501517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:01:22.503833Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:22.503977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:22.504019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-31T16:01:22.504066Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-31T16:01:22.504138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:22.504295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-31T16:01:22.508148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-31T16:01:22.508741Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR W0000 00:00:1743436882.510085 97430 text_format.cc:398] Warning parsing text-format NKikimrSchemeOp.TColumnTableDescription: 11:43: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 101 2025-03-31T16:01:22.513158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TTLEnabledTable" Schema { Columns { Name: "key" Type: "Uint64" NotNull: true } Columns { Name: "modified_at" Type: "String" } KeyColumnNames: "key" } TtlSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:22.513591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:22.514032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Unsupported column type, at schemeshard: 72057594046678944 2025-03-31T16:01:22.514544Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Bootstrap 2025-03-31T16:01:22.535371Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] Become StateWork (SchemeCache [1:274:2265]) 2025-03-31T16:01:22.537095Z node 1 :TX_PROXY DEBUG: actor# [1:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T16:01:22.553542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Unsupported column type" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:22.553779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Unsupported column type, operation: CREATE COLUMN TABLE, path: /MyRoot/ 2025-03-31T16:01:22.554662Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 W0000 00:00:1743436882.555237 97430 text_format.cc:398] Warning parsing text-format NKikimrSchemeOp.TColumnTableDescription: 11:43: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 102 2025-03-31T16:01:22.558225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TTLEnabledTable" Schema { Columns { Name: "key" Type: "Uint64" NotNull: true } Columns { Name: "modified_at" Type: "DyNumber" } KeyColumnNames: "key" } TtlSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:22.558592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:01:22.558822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusSchemeError, reason: Type 'DyNumber' specified for column 'modified_at' is not supported, at schemeshard: 72057594046678944 2025-03-31T16:01:22.573008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusSchemeError Reason: "Type \'DyNumber\' specified for column \'modified_at\' is not supported" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:22.573197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSchemeError, reason: Type 'DyNumber' specified for column 'modified_at' is not supported, operation: CREATE COLUMN TABLE, path: /MyRoot/ TestModificationResult got TxId: 102, wait until txId: 102 |71.7%| [TA] {RESULT} $(B)/ydb/core/fq/libs/result_formatter/ut/test-results/unittest/{meta.json ... results_accumulator.log} |71.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |71.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |71.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSucceedOnIndexedTable >> TSchemeShardTTLTests::RacyAlterTableAndConditionalErase >> TSchemeShardTTLTestsWithReboots::CopyTable >> TSchemeShardTTLTests::ConditionalErase >> LocalTableWriter::ApplyInCorrectOrder |71.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::DataAlongWithHeartbeat |71.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> TPersQueueTest::WriteNonExistingTopic [GOOD] >> TPersQueueTest::WriteAfterAlter >> TPersQueueNewSchemeCacheTest::CheckGrpcWriteNoDC [GOOD] >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC >> TSchemeShardTTLTests::AlterTableShouldSucceedOnIndexedTable [GOOD] >> LocalTableWriter::SupportedTypes >> LocalTableWriter::WriteTable >> TPersQueueTest::DirectReadCleanCache [GOOD] >> TPersQueueTest::DirectReadRestartPQRB >> LocalTableWriter::ConsistentWrite >> SystemView::PartitionStatsLocksFields [GOOD] >> SystemView::PartitionStatsFields |71.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSucceedOnIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-31T16:01:25.206265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:01:25.206377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:25.206415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:01:25.206476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:01:25.206530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:01:25.206559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:01:25.206651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:25.206730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:01:25.207133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:01:25.310971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:01:25.311034Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:25.330541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:01:25.331000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:01:25.331207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:01:25.358922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:01:25.359248Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:01:25.359886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:25.360268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:25.362184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:25.363420Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:25.363492Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:25.363593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:01:25.363636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:25.363672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:25.363897Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:01:25.370699Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-31T16:01:25.746513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:25.746769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:25.747025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:01:25.747486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:01:25.747558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:25.749903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:25.750060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:01:25.750293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:25.750353Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:01:25.750391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:01:25.750424Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:01:25.752462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:25.752529Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:01:25.752570Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:01:25.754298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:25.754350Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:25.754405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:25.754480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:01:25.758235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:01:25.764201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:01:25.764397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:01:25.765470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:25.765627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:25.765675Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:25.765966Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:01:25.766022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:25.766207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:25.766281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:25.784172Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:25.784241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:25.784422Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:25.784487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:01:25.784867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:25.784914Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:01:25.785029Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:25.785066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:25.785104Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:25.785135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:25.785172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:01:25.785215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:25.785248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:01:25.785279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:01:25.785350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:01:25.785384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:01:25.785434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:01:25.787720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:25.787871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:25.787936Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 102 at step: 5000003 2025-03-31T16:01:26.248127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:26.248251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:26.248316Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2025-03-31T16:01:26.248616Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-03-31T16:01:26.248766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T16:01:26.254320Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:26.254380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:01:26.254688Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:26.254760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-03-31T16:01:26.255242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:01:26.255325Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-03-31T16:01:26.255892Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:01:26.256020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:01:26.256078Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:01:26.256122Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-03-31T16:01:26.256172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-31T16:01:26.256261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-03-31T16:01:26.262853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:01:26.288907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1289 } } 2025-03-31T16:01:26.288976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-03-31T16:01:26.289177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1289 } } 2025-03-31T16:01:26.289295Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1289 } } FAKE_COORDINATOR: Erasing txId 102 2025-03-31T16:01:26.290603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 325 RawX2: 4294969604 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-31T16:01:26.290664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-03-31T16:01:26.290836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 325 RawX2: 4294969604 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-31T16:01:26.290924Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-31T16:01:26.291019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 325 RawX2: 4294969604 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-31T16:01:26.291125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:26.291166Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:01:26.291202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-03-31T16:01:26.291238Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-03-31T16:01:26.300382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:01:26.301015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:01:26.301367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:01:26.301419Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-31T16:01:26.301531Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:01:26.301571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:01:26.301612Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:01:26.301644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:01:26.301683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-31T16:01:26.301766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:377:2345] message: TxId: 102 2025-03-31T16:01:26.301822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:01:26.301940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-31T16:01:26.301986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-31T16:01:26.302165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T16:01:26.307324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:01:26.307390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:460:2421] TestWaitNotification: OK eventTxId 102 2025-03-31T16:01:26.307973Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:01:26.308262Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 305us result status StatusSuccess 2025-03-31T16:01:26.308770Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> LocalTableWriter::WaitTxIds >> LocalTableWriter::DecimalKeys >> THiveImplTest::BalancerSpeedAndDistribution [GOOD] >> THiveImplTest::TestShortTabletTypes [GOOD] >> THiveImplTest::TestStDev [GOOD] >> THiveTest::TestBlockCreateTablet |71.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::AlterColumnTable >> LocalTableWriter::WriteTable [GOOD] >> LocalTableWriter::SupportedTypes [GOOD] >> LocalTableWriter::DataAlongWithHeartbeat [GOOD] >> THiveTest::TestBlockCreateTablet [GOOD] >> THiveTest::DrainWithHiveRestart >> LocalTableWriter::ConsistentWrite [GOOD] >> LocalTableWriter::ApplyInCorrectOrder [GOOD] >> LocalTableWriter::WaitTxIds [GOOD] |71.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::WriteTable [GOOD] Test command err: 2025-03-31T16:01:26.764119Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004411759166322:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:26.764204Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a39/r3tmp/tmpjRQtDy/pdisk_1.dat 2025-03-31T16:01:27.397312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:01:27.397413Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:01:27.401452Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:01:27.451544Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:18520 TServer::EnableGrpc on GrpcPort 22865, node 1 2025-03-31T16:01:27.823017Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:01:27.823049Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:01:27.823057Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:01:27.823191Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18520 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:01:28.224444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:01:28.263835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743436888438 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) 2025-03-31T16:01:28.453514Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004420349101605:2354] Handshake: worker# [1:7488004420349101513:2294] 2025-03-31T16:01:28.453764Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004420349101605:2354] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T16:01:28.453989Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004420349101605:2354] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-03-31T16:01:28.454011Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004420349101605:2354] Send handshake: worker# [1:7488004420349101513:2294] 2025-03-31T16:01:28.454397Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004420349101605:2354] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 36b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 36b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 36b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-03-31T16:01:28.454616Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004420349101605:2354] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 36 },{ Order: 2 BodySize: 36 },{ Order: 3 BodySize: 36 }] } 2025-03-31T16:01:28.454795Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7488004420349101608:2354] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-03-31T16:01:28.454832Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004420349101605:2354] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-03-31T16:01:28.454910Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7488004420349101608:2354] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b },{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b },{ Order: 3 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2025-03-31T16:01:28.457769Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7488004420349101608:2354] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-03-31T16:01:28.457814Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004420349101605:2354] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-03-31T16:01:28.457852Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004420349101605:2354] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::SupportedTypes [GOOD] Test command err: 2025-03-31T16:01:26.796654Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004410195099475:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:26.797414Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a22/r3tmp/tmpl6UAHD/pdisk_1.dat 2025-03-31T16:01:27.375343Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:27.407067Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:01:27.407182Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:01:27.409541Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21762 TServer::EnableGrpc on GrpcPort 29064, node 1 2025-03-31T16:01:27.878885Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:01:27.878909Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:01:27.878916Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:01:27.879031Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21762 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:01:28.331651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:28.367903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743436888494 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "int32_value" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "ui... (TRUNCATED) 2025-03-31T16:01:28.519805Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004418785034764:2356] Handshake: worker# [1:7488004418785034671:2295] 2025-03-31T16:01:28.520412Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004418785034764:2356] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T16:01:28.520740Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004418785034764:2356] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-03-31T16:01:28.520767Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004418785034764:2356] Send handshake: worker# [1:7488004418785034671:2295] 2025-03-31T16:01:28.522019Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004418785034764:2356] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 45b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 4 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 41b Offset: 5 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 41b Offset: 6 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 7 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 44b Offset: 8 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 66b Offset: 9 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 71b Offset: 10 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 72b Offset: 11 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 12 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 48b Offset: 13 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 51b Offset: 14 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 58b Offset: 15 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 51b Offset: 16 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 54b Offset: 17 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 57b Offset: 18 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 76b Offset: 19 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 20 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 54b Offset: 21 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 61b Offset: 22 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 51b Offset: 23 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 24 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 46b Offset: 25 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 47b Offset: 26 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 50b Offset: 27 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 28 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 72b Offset: 29 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 57b Offset: 30 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 64b Offset: 31 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-03-31T16:01:28.523021Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004418785034764:2356] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 45 },{ Order: 2 BodySize: 45 },{ Order: 3 BodySize: 45 },{ Order: 4 BodySize: 45 },{ Order: 5 BodySize: 41 },{ Order: 6 BodySize: 41 },{ Order: 7 BodySize: 45 },{ Order: 8 BodySize: 44 },{ Order: 9 BodySize: 66 },{ Order: 10 BodySize: 71 },{ Order: 11 BodySize: 72 },{ Order: 12 BodySize: 49 },{ Order: 13 BodySize: 48 },{ Order: 14 BodySize: 51 },{ Order: 15 BodySize: 58 },{ Order: 16 BodySize: 51 },{ Order: 17 BodySize: 54 },{ Order: 18 BodySize: 57 },{ Order: 19 BodySize: 76 },{ Order: 20 BodySize: 45 },{ Order: 21 BodySize: 54 },{ Order: 22 BodySize: 61 },{ Order: 23 BodySize: 51 },{ Order: 24 BodySize: 45 },{ Order: 25 BodySize: 46 },{ Order: 26 BodySize: 47 },{ Order: 27 BodySize: 50 },{ Order: 28 BodySize: 49 },{ Order: 29 BodySize: 72 },{ Order: 30 BodySize: 57 },{ Order: 31 BodySize: 64 }] } 2025-03-31T16:01:28.523537Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7488004418785034767:2356] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-03-31T16:01:28.523586Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004418785034764:2356] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-03-31T16:01:28.523934Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7488004418785034767:2356] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 3 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 4 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 5 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 41b },{ Order: 6 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 41b },{ Order: 7 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 8 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 44b },{ Order: 9 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 66b },{ Order: 10 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 71b },{ Order: 11 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 72b },{ Order: 12 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 13 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 14 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 15 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 58b },{ Order: 16 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 17 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 54b },{ Order: 18 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 19 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 76b },{ Order: 20 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 21 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 54b },{ Order: 22 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 61b },{ Order: 23 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 24 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 25 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 46b },{ Order: 26 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 47b },{ Order: 27 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 50b },{ Order: 28 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 29 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 72b },{ Order: 30 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 31 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 64b }] } 2025-03-31T16:01:28.581433Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7488004418785034767:2356] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-03-31T16:01:28.581524Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004418785034764:2356] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-03-31T16:01:28.581599Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004418785034764:2356] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31] } >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-64 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-65 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::DataAlongWithHeartbeat [GOOD] Test command err: 2025-03-31T16:01:25.883568Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004406617540555:2194];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:25.884378Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a58/r3tmp/tmpLJFMDl/pdisk_1.dat 2025-03-31T16:01:26.572660Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:26.617463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:01:26.617556Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:01:26.620280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23802 TServer::EnableGrpc on GrpcPort 3730, node 1 2025-03-31T16:01:27.180966Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:01:27.180990Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:01:27.180997Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:01:27.181120Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23802 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:01:27.781589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:27.811608Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:27.822971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743436887983 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) 2025-03-31T16:01:28.043424Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004419502443023:2357] Handshake: worker# [1:7488004419502443024:2358] 2025-03-31T16:01:28.043773Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004419502443023:2357] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T16:01:28.044055Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004419502443023:2357] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-03-31T16:01:28.044087Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004419502443023:2357] Send handshake: worker# [1:7488004419502443024:2358] 2025-03-31T16:01:28.044674Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004419502443023:2357] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 19b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-03-31T16:01:28.062368Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004419502443023:2357] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2025-03-31T16:01:28.062557Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004419502443023:2357] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 }] } 2025-03-31T16:01:28.062757Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7488004419502443027:2357] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-03-31T16:01:28.062830Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004419502443023:2357] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-03-31T16:01:28.062911Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7488004419502443027:2357] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2025-03-31T16:01:28.076430Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7488004419502443027:2357] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-03-31T16:01:28.076515Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004419502443023:2357] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-03-31T16:01:28.076582Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004419502443023:2357] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::ApplyInCorrectOrder [GOOD] Test command err: 2025-03-31T16:01:25.966457Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004405157235665:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:25.967121Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a6b/r3tmp/tmpzkZK6o/pdisk_1.dat 2025-03-31T16:01:26.842922Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:26.879037Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:01:26.883243Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:01:26.890780Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25940 TServer::EnableGrpc on GrpcPort 19818, node 1 2025-03-31T16:01:27.328475Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:01:27.328499Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:01:27.328507Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:01:27.328621Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25940 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:01:28.219350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:28.234007Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:01:28.238086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743436888368 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) 2025-03-31T16:01:28.440943Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004418042138270:2358] Handshake: worker# [1:7488004418042138178:2298] 2025-03-31T16:01:28.441245Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004418042138270:2358] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T16:01:28.441449Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004418042138270:2358] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-03-31T16:01:28.441472Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004418042138270:2358] Send handshake: worker# [1:7488004418042138178:2298] 2025-03-31T16:01:28.441977Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004418042138270:2358] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-03-31T16:01:28.451856Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004418042138270:2358] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2025-03-31T16:01:28.452015Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004418042138270:2358] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 }] } 2025-03-31T16:01:28.452373Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7488004418042138273:2358] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-03-31T16:01:28.452434Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004418042138270:2358] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-03-31T16:01:28.452539Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7488004418042138273:2358] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2025-03-31T16:01:28.458615Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7488004418042138273:2358] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-03-31T16:01:28.458733Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004418042138270:2358] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-03-31T16:01:28.458793Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004418042138270:2358] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } 2025-03-31T16:01:28.459364Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004418042138270:2358] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 49b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 48b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 19b Offset: 4 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-03-31T16:01:28.460190Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004418042138270:2358] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 20 TxId: 0 } TxId: 2 } 2025-03-31T16:01:28.460314Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004418042138270:2358] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 2 BodySize: 49 },{ Order: 3 BodySize: 48 }] } 2025-03-31T16:01:28.460444Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7488004418042138273:2358] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 2 Group: 0 Step: 11 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 3 Group: 0 Step: 2 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2025-03-31T16:01:28.464913Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7488004418042138273:2358] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-03-31T16:01:28.465001Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004418042138270:2358] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-03-31T16:01:28.465040Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004418042138270:2358] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2,3] } >> TSchemeShardTTLTests::ConditionalErase [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::ConsistentWrite [GOOD] Test command err: 2025-03-31T16:01:27.039688Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004413017804917:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:27.039804Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a33/r3tmp/tmpAIoyJR/pdisk_1.dat 2025-03-31T16:01:27.535399Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:27.571292Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:01:27.571420Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:01:27.575326Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28683 TServer::EnableGrpc on GrpcPort 14853, node 1 2025-03-31T16:01:28.047841Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:01:28.047866Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:01:28.047872Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:01:28.047990Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28683 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:01:28.507241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:28.531641Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:01:28.538422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743436888655 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) 2025-03-31T16:01:28.656340Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004417312772901:2356] Handshake: worker# [1:7488004417312772808:2295] 2025-03-31T16:01:28.656646Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004417312772901:2356] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T16:01:28.656850Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004417312772901:2356] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-03-31T16:01:28.656876Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004417312772901:2356] Send handshake: worker# [1:7488004417312772808:2295] 2025-03-31T16:01:28.657498Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004417312772901:2356] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 48b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 48b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-03-31T16:01:28.663141Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004417312772901:2356] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2025-03-31T16:01:28.663338Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004417312772901:2356] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 },{ Order: 2 BodySize: 48 },{ Order: 3 BodySize: 48 }] } 2025-03-31T16:01:28.663546Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7488004417312772904:2356] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-03-31T16:01:28.663608Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004417312772901:2356] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-03-31T16:01:28.663696Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7488004417312772904:2356] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 2 Group: 0 Step: 2 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 3 Group: 0 Step: 3 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2025-03-31T16:01:28.666027Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7488004417312772904:2356] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-03-31T16:01:28.666108Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004417312772901:2356] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-03-31T16:01:28.666172Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004417312772901:2356] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3] } 2025-03-31T16:01:28.666610Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004417312772901:2356] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 19b Offset: 4 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-03-31T16:01:28.667055Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004417312772901:2356] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 49b Offset: 5 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 6 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 7 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 8 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-03-31T16:01:28.667519Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004417312772901:2356] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 20 TxId: 0 } TxId: 2 } VersionTxIds { Version { Step: 30 TxId: 0 } TxId: 3 } 2025-03-31T16:01:28.667635Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004417312772901:2356] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 5 BodySize: 49 },{ Order: 6 BodySize: 49 },{ Order: 7 BodySize: 49 },{ Order: 8 BodySize: 49 }] } 2025-03-31T16:01:28.667791Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7488004417312772904:2356] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 5 Group: 0 Step: 11 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 6 Group: 0 Step: 12 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 7 Group: 0 Step: 21 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 8 Group: 0 Step: 22 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b }] } 2025-03-31T16:01:28.669404Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7488004417312772904:2356] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-03-31T16:01:28.669445Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004417312772901:2356] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-03-31T16:01:28.669480Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004417312772901:2356] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [5,6,7,8] } 2025-03-31T16:01:28.669856Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004417312772901:2356] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 49b Offset: 9 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 10 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-03-31T16:01:28.669966Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004417312772901:2356] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 9 BodySize: 49 },{ Order: 10 BodySize: 49 }] } 2025-03-31T16:01:28.670086Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7488004417312772904:2356] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 9 Group: 0 Step: 13 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 10 Group: 0 Step: 23 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b }] } 2025-03-31T16:01:28.671971Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7488004417312772904:2356] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-03-31T16:01:28.672090Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004417312772901:2356] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-03-31T16:01:28.672146Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004417312772901:2356] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [9,10] } 2025-03-31T16:01:28.672727Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004417312772901:2356] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 19b Offset: 11 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } |71.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |71.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |71.8%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::WaitTxIds [GOOD] Test command err: 2025-03-31T16:01:28.022742Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004420715310025:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:28.027729Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a09/r3tmp/tmpbDYsf0/pdisk_1.dat 2025-03-31T16:01:28.556257Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:28.562753Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:01:28.562873Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:01:28.565790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15747 TServer::EnableGrpc on GrpcPort 62965, node 1 2025-03-31T16:01:28.831326Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:01:28.831348Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:01:28.831359Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:01:28.831583Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15747 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:01:29.376754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:29.399851Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:01:29.406854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743436889516 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) 2025-03-31T16:01:29.561969Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004425010277890:2355] Handshake: worker# [1:7488004425010277891:2356] 2025-03-31T16:01:29.562437Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004425010277890:2355] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T16:01:29.562796Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004425010277890:2355] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-03-31T16:01:29.562907Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004425010277890:2355] Send handshake: worker# [1:7488004425010277891:2356] 2025-03-31T16:01:29.563616Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004425010277890:2355] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-03-31T16:01:29.569456Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004425010277890:2355] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2025-03-31T16:01:29.569622Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004425010277890:2355] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 }] } 2025-03-31T16:01:29.569808Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7488004425010277894:2355] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-03-31T16:01:29.569883Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004425010277890:2355] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-03-31T16:01:29.570063Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7488004425010277894:2355] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2025-03-31T16:01:29.574053Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7488004425010277894:2355] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-03-31T16:01:29.574126Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004425010277890:2355] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-03-31T16:01:29.574198Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004425010277890:2355] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } 2025-03-31T16:01:30.567849Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004425010277890:2355] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 20 TxId: 0 } TxId: 2 } 2025-03-31T16:01:30.567977Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004425010277890:2355] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 2 BodySize: 49 }] } 2025-03-31T16:01:30.568139Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7488004425010277894:2355] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 2 Group: 0 Step: 11 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b }] } 2025-03-31T16:01:30.573209Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7488004425010277894:2355] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-03-31T16:01:30.573284Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004425010277890:2355] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-03-31T16:01:30.573323Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004425010277890:2355] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2] } >> LocalTableWriter::DecimalKeys [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ConditionalErase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:01:26.245977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:01:26.246112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:26.246156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:01:26.246192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:01:26.246253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:01:26.246291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:01:26.246373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:26.246518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:01:26.246933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:01:26.377501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:01:26.377560Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:26.382613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:01:26.382719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:01:26.382858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:01:26.387842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:01:26.388073Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:01:26.388715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:26.388895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:26.390809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:26.391914Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:26.391972Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:26.392137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:01:26.392179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:26.392216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:26.392381Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:01:26.398887Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:01:26.590019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:26.590226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:26.590466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:01:26.590696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:01:26.590748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:26.596162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:26.596297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:01:26.596477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:26.596544Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:01:26.596577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:01:26.596606Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:01:26.600232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:26.600304Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:01:26.600344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:01:26.605171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:26.605233Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:26.605285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:26.605355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:01:26.609032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:01:26.620291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:01:26.620505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:01:26.621476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:26.621621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:26.621668Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:26.622038Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:01:26.622123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:26.622295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:26.622370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:01:26.628395Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:26.628452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:26.628649Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:26.628705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:01:26.628920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:26.628965Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:01:26.629058Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:26.629089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:26.629123Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:26.629160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:26.629198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:01:26.629241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:26.629275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:01:26.629304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:01:26.629372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:01:26.629429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:01:26.629480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:01:26.635621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:26.635799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:26.635836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Size 627 rowCount 2 cpuUsage 0 2025-03-31T16:01:31.935894Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2025-03-31T16:01:31.936011Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-03-31T16:01:31.936161Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-03-31T16:01:31.936298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409546, request: TableId: 2 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640242000 ColumnUnit: UNIT_AUTO } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-03-31T16:01:31.936389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409550, request: TableId: 6 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640242000 ColumnUnit: UNIT_MICROSECONDS } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-03-31T16:01:31.936451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409549, request: TableId: 5 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640242000 ColumnUnit: UNIT_MICROSECONDS } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-03-31T16:01:31.936505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409548, request: TableId: 4 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640242000 ColumnUnit: UNIT_AUTO } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-03-31T16:01:31.936592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409547, request: TableId: 3 Expiration { ColumnId: 2 WallClockTimestamp: 1600463040242000 ColumnUnit: UNIT_AUTO } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-03-31T16:01:31.936664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409551, request: TableId: 7 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640242000 ColumnUnit: UNIT_MICROSECONDS } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-03-31T16:01:31.937129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2025-03-31T16:01:31.937529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2025-03-31T16:01:31.938521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2025-03-31T16:01:31.938840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409551, at schemeshard: 72057594046678944 2025-03-31T16:01:31.942217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2025-03-31T16:01:31.945657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409550, at schemeshard: 72057594046678944 2025-03-31T16:01:31.946185Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-03-31T16:01:31.946243Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2025-03-31T16:01:31.946831Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-03-31T16:01:31.946868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2025-03-31T16:01:31.949108Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-03-31T16:01:31.949149Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2025-03-31T16:01:31.952732Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-03-31T16:01:31.952800Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2025-03-31T16:01:31.959359Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-03-31T16:01:31.959440Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409550, at schemeshard: 72057594046678944 2025-03-31T16:01:31.960417Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-03-31T16:01:31.960524Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-03-31T16:01:31.960569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:3, run at: 2020-09-18T23:04:00.243000Z, at schemeshard: 72057594046678944 2025-03-31T16:01:31.960640Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-03-31T16:01:31.960708Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-03-31T16:01:31.960755Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-03-31T16:01:31.960800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:1, run at: 2020-09-18T23:04:00.243000Z, at schemeshard: 72057594046678944 2025-03-31T16:01:31.960857Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-03-31T16:01:31.960903Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-03-31T16:01:31.960931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:4, run at: 2020-09-18T23:04:00.243000Z, at schemeshard: 72057594046678944 2025-03-31T16:01:31.960974Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-03-31T16:01:31.960999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:2, run at: 2020-09-18T23:04:00.244000Z, at schemeshard: 72057594046678944 2025-03-31T16:01:31.966748Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-03-31T16:01:31.966863Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-03-31T16:01:31.966900Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-03-31T16:01:31.966920Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-03-31T16:01:31.966942Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-03-31T16:01:31.967008Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-03-31T16:01:31.967041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:5, run at: 2020-09-18T23:04:00.245000Z, at schemeshard: 72057594046678944 2025-03-31T16:01:31.967109Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-03-31T16:01:32.047285Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 5 2025-03-31T16:01:32.047449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046678944:2 data size 0 row count 0 2025-03-31T16:01:32.047522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], pathId map=TTLEnabledTable2, is column=0, is olap=0 2025-03-31T16:01:32.047571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 3: RowCount 0, DataSize 0 2025-03-31T16:01:32.047681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 5 shard idx 72057594046678944:4 data size 43 row count 1 2025-03-31T16:01:32.047717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], pathId map=TTLEnabledTable4, is column=0, is olap=0 2025-03-31T16:01:32.047778Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409549 followerId=0, pathId 5: RowCount 1, DataSize 43 2025-03-31T16:01:32.047854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:3 data size 603 row count 2 2025-03-31T16:01:32.047885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTable3, is column=0, is olap=0 2025-03-31T16:01:32.047909Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409548 followerId=0, pathId 4: RowCount 2, DataSize 603 2025-03-31T16:01:32.047964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2025-03-31T16:01:32.048028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable1, is column=0, is olap=0 2025-03-31T16:01:32.048053Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 0, DataSize 0 2025-03-31T16:01:32.048110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 6 shard idx 72057594046678944:5 data size 627 row count 2 2025-03-31T16:01:32.048144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409550 maps to shardIdx: 72057594046678944:5 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], pathId map=TTLEnabledTable5, is column=0, is olap=0 2025-03-31T16:01:32.048176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409550 followerId=0, pathId 6: RowCount 2, DataSize 627, with borrowed parts 2025-03-31T16:01:32.072277Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-03-31T16:01:32.072349Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409551, at schemeshard: 72057594046678944 2025-03-31T16:01:32.077546Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-03-31T16:01:32.077680Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-03-31T16:01:32.077746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:6, run at: 2020-09-18T23:04:00.247000Z, at schemeshard: 72057594046678944 2025-03-31T16:01:32.077813Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 >> KqpWorkload::STOCK [GOOD] |71.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::DecimalKeys [GOOD] Test command err: 2025-03-31T16:01:28.870719Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004420796603770:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:28.870751Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0009ec/r3tmp/tmpx5OqTQ/pdisk_1.dat 2025-03-31T16:01:29.507199Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:29.508127Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:01:29.508219Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:01:29.528316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23919 TServer::EnableGrpc on GrpcPort 19682, node 1 2025-03-31T16:01:29.929977Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:01:29.930000Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:01:29.930008Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:01:29.930123Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23919 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:01:30.454810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:30.477764Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:01:30.481675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743436890608 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Decimal(1,0)" TypeId: 4865 Id: 1 NotNull: false TypeInfo { DecimalPrecision: 1 DecimalScale: 0 } IsBuildInProgress: false } Columns { Name: "value" Type: "Decimal(35,10)" TypeId: 4865 I... (TRUNCATED) 2025-03-31T16:01:30.661819Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004429386539074:2356] Handshake: worker# [1:7488004429386538982:2296] 2025-03-31T16:01:30.662141Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004429386539074:2356] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T16:01:30.662491Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004429386539074:2356] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Decimal(1,0) : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-03-31T16:01:30.662562Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004429386539074:2356] Send handshake: worker# [1:7488004429386538982:2296] 2025-03-31T16:01:30.663164Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004429386539074:2356] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 57b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 57b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 57b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-03-31T16:01:30.663351Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004429386539074:2356] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 57 },{ Order: 2 BodySize: 57 },{ Order: 3 BodySize: 57 }] } 2025-03-31T16:01:30.663533Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7488004429386539077:2356] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-03-31T16:01:30.663574Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004429386539074:2356] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-03-31T16:01:30.663671Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7488004429386539077:2356] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 3 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b }] } 2025-03-31T16:01:30.666780Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7488004429386539077:2356] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-03-31T16:01:30.666860Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004429386539074:2356] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-03-31T16:01:30.666912Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7488004429386539074:2356] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3] } >> SystemView::ShowCreateTableTemporary [FAIL] |71.8%| [TA] $(B)/ydb/core/tx/replication/service/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpWorkload::STOCK [GOOD] Test command err: Trying to start YDB, gRPC: 6370, MsgBus: 11179 2025-03-31T15:59:48.507505Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003988687165210:2141];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:48.512190Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f92/r3tmp/tmpn336N0/pdisk_1.dat 2025-03-31T15:59:49.154493Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:49.154611Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:49.157589Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:59:49.175202Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6370, node 1 2025-03-31T15:59:49.421146Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:59:49.421197Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:59:49.421203Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:59:49.421320Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11179 TClient is connected to server localhost:11179 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:59:50.352270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:52.487498Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004005867034976:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:52.487584Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:52.789398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T15:59:52.912366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T15:59:53.578165Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003988687165210:2141];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:53.626147Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:59:53.726206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T15:59:54.247358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004014456973733:2634], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:54.247432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:54.247507Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004014456973738:2637], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:54.251891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-03-31T15:59:54.268406Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004014456973740:2638], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-31T15:59:54.352145Z node 1 :TX_PROXY ERROR: Actor# [1:7488004014456973822:5117] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:00:04.171364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:00:04.171401Z node 1 :IMPORT WARN: Table profiles were not loaded took: 0.801910s took: 0.861616s took: 0.862357s took: 0.882723s took: 0.850172s took: 0.872245s took: 0.891659s took: 0.900497s took: 0.781285s took: 0.844719s took: 8.323124s took: 8.419389s took: 8.515574s took: 8.544256s took: 8.554979s took: 8.591018s took: 8.596203s took: 8.597300s took: 8.575172s took: 8.601970s 2025-03-31T16:01:29.485021Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976711161; took: 9.165221s 2025-03-31T16:01:29.503814Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976711166; 2025-03-31T16:01:29.504808Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976711168; 2025-03-31T16:01:29.509230Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488004422478883816:5595], Table: `/Root/stock` ([72057594046644480:2:1]), SessionActorId: [1:7488004383824176214:5595]Got LOCKS BROKEN for table `/Root/stock`. ShardID=72075186224037888, Sink=[1:7488004422478883816:5595].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-03-31T16:01:29.509954Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488004418183915585:5595], SessionActorId: [1:7488004383824176214:5595], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/stock`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7488004383824176214:5595]. isRollback=0 2025-03-31T16:01:29.510296Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTkwMWEwYzctYzM5ZTRhMWYtNDI1NjU2ZS1jNGFjNTMyMw==, ActorId: [1:7488004383824176214:5595], ActorState: ExecuteState, TraceId: 01jqpez7fg46yytz61wvbs4r71, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7488004422478883519:5595] from: [1:7488004418183915585:5595] 2025-03-31T16:01:29.510406Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7488004422478883519:5595] TxId: 281474976711161. Ctx: { TraceId: 01jqpez7fg46yytz61wvbs4r71, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTkwMWEwYzctYzM5ZTRhMWYtNDI1NjU2ZS1jNGFjNTMyMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/stock`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-03-31T16:01:29.514709Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTkwMWEwYzctYzM5ZTRhMWYtNDI1NjU2ZS1jNGFjNTMyMw==, ActorId: [1:7488004383824176214:5595], ActorState: ExecuteState, TraceId: 01jqpez7fg46yytz61wvbs4r71, Create QueryResponse for error on request, msg: 2025-03-31T16:01:29.516439Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488004422478883871:5605], Table: `/Root/stock` ([72057594046644480:2:1]), SessionActorId: [1:7488004383824176226:5605]Got LOCKS BROKEN for table `/Root/stock`. ShardID=72075186224037888, Sink=[1:7488004422478883871:5605].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-03-31T16:01:29.516520Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488004418183915575:5605], SessionActorId: [1:7488004383824176226:5605], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/stock`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7488004383824176226:5605]. isRollback=0 2025-03-31T16:01:29.516778Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Yjc1NDkwYWMtMTc1Zjg0My00NzBjMGZhYi00MDMxYWRkZQ==, ActorId: [1:7488004383824176226:5605], ActorState: ExecuteState, TraceId: 01jqpez7e4cqtqc0xr7bn6ebns, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7488004422478883637:5605] from: [1:7488004418183915575:5605] 2025-03-31T16:01:29.516841Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7488004422478883637:5605] TxId: 281474976711166. Ctx: { TraceId: 01jqpez7e4cqtqc0xr7bn6ebns, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjc1NDkwYWMtMTc1Zjg0My00NzBjMGZhYi00MDMxYWRkZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/stock`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } took: 9.182083s 2025-03-31T16:01:29.522085Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Yjc1NDkwYWMtMTc1Zjg0My00NzBjMGZhYi00MDMxYWRkZQ==, ActorId: [1:7488004383824176226:5605], ActorState: ExecuteState, TraceId: 01jqpez7e4cqtqc0xr7bn6ebns, Create QueryResponse for error on request, msg: took: 9.190914s 2025-03-31T16:01:29.528421Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976711161; 2025-03-31T16:01:29.528628Z node 1 :TX_DATASHARD ERROR: Complete volatile write [1743436889509 : 281474976711161] from 72075186224037889 at tablet 72075186224037889, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } 2025-03-31T16:01:29.533229Z node 1 :GLOBAL WARN: fline=even ... 6:01:31.768216Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037904 not found 2025-03-31T16:01:31.768235Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037915 not found 2025-03-31T16:01:31.768252Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037911 not found 2025-03-31T16:01:31.768332Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037896 not found 2025-03-31T16:01:31.768350Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037913 not found 2025-03-31T16:01:31.768372Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037897 not found 2025-03-31T16:01:31.768389Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037900 not found 2025-03-31T16:01:31.768405Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037898 not found 2025-03-31T16:01:31.768421Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037924 not found 2025-03-31T16:01:31.768438Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037923 not found 2025-03-31T16:01:31.768458Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037928 not found 2025-03-31T16:01:31.768475Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037922 not found 2025-03-31T16:01:31.768491Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037921 not found 2025-03-31T16:01:31.768506Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037927 not found 2025-03-31T16:01:31.768522Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037914 not found 2025-03-31T16:01:31.768539Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037905 not found 2025-03-31T16:01:31.768557Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037908 not found 2025-03-31T16:01:31.768579Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037902 not found 2025-03-31T16:01:31.768788Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-03-31T16:01:31.768813Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037919 not found 2025-03-31T16:01:31.768841Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-03-31T16:01:31.768874Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037926 not found 2025-03-31T16:01:31.768900Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037896 not found 2025-03-31T16:01:31.768949Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037909 not found 2025-03-31T16:01:31.768972Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037898 not found 2025-03-31T16:01:31.768992Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037913 not found 2025-03-31T16:01:31.769014Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037903 not found 2025-03-31T16:01:31.769032Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037901 not found 2025-03-31T16:01:31.769050Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037918 not found 2025-03-31T16:01:31.769066Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037929 not found 2025-03-31T16:01:31.769081Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037916 not found 2025-03-31T16:01:31.773419Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037924 not found 2025-03-31T16:01:31.777971Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037895 not found 2025-03-31T16:01:31.778929Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037910 not found 2025-03-31T16:01:31.781508Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037893 not found 2025-03-31T16:01:31.783805Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037915 not found 2025-03-31T16:01:31.783848Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037911 not found 2025-03-31T16:01:31.784452Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037897 not found 2025-03-31T16:01:31.975375Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037936 not found 2025-03-31T16:01:31.984803Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037949 not found 2025-03-31T16:01:31.984830Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037932 not found 2025-03-31T16:01:31.984854Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037956 not found 2025-03-31T16:01:32.007799Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037958 not found 2025-03-31T16:01:32.007840Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037945 not found 2025-03-31T16:01:32.007858Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037934 not found 2025-03-31T16:01:32.007874Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037968 not found 2025-03-31T16:01:32.007890Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037953 not found 2025-03-31T16:01:32.007910Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037967 not found 2025-03-31T16:01:32.007925Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037957 not found 2025-03-31T16:01:32.007942Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037961 not found 2025-03-31T16:01:32.007962Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037943 not found 2025-03-31T16:01:32.020592Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037947 not found 2025-03-31T16:01:32.020620Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037946 not found 2025-03-31T16:01:32.020638Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037938 not found 2025-03-31T16:01:32.020653Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037969 not found 2025-03-31T16:01:32.020669Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037940 not found 2025-03-31T16:01:32.020686Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037933 not found 2025-03-31T16:01:32.029091Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037962 not found 2025-03-31T16:01:32.029132Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037952 not found 2025-03-31T16:01:32.032788Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037939 not found 2025-03-31T16:01:32.032834Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037966 not found 2025-03-31T16:01:32.047769Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037965 not found 2025-03-31T16:01:32.047812Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037963 not found 2025-03-31T16:01:32.047830Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037942 not found 2025-03-31T16:01:32.047847Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037937 not found 2025-03-31T16:01:32.047864Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037955 not found 2025-03-31T16:01:32.047907Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037964 not found 2025-03-31T16:01:32.047935Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037931 not found 2025-03-31T16:01:32.047955Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037950 not found 2025-03-31T16:01:32.047973Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037948 not found 2025-03-31T16:01:32.048010Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037954 not found 2025-03-31T16:01:32.048027Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037951 not found 2025-03-31T16:01:32.048047Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037959 not found 2025-03-31T16:01:32.048066Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037944 not found 2025-03-31T16:01:32.048086Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037941 not found 2025-03-31T16:01:32.048132Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037930 not found 2025-03-31T16:01:32.049394Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037935 not found 2025-03-31T16:01:32.049418Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037960 not found >> RetryPolicy::RetryWithBatching [GOOD] >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeRequest >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchSize |71.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest |71.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldNotBatchWhenDisabled >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchTimeout >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeReserved >> TStoragePoolsStatsPersistence::SameAggregatedStatsAfterRestart |71.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |71.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |71.9%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |71.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream >> TSchemeshardStatsBatchingTest::TopicAccountSizeAndUsedReserveSize >> StatisticsSaveLoad::Simple [GOOD] >> StatisticsSaveLoad::Delete [GOOD] >> TSchemeshardStatsBatchingTest::PeriodicTopicStatsReload >> THiveTest::DrainWithHiveRestart [GOOD] >> THiveTest::TestCheckSubHiveForwarding |71.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |71.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |71.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |71.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ShouldSkipDroppedColumn |71.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |71.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |71.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |71.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |71.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |71.9%| [LD] {RESULT} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::Simple [GOOD] Test command err: 2025-03-31T16:01:24.884745Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:01:24.885043Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:01:24.885121Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011fa/r3tmp/tmpuAyWGV/pdisk_1.dat 2025-03-31T16:01:25.352077Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29637, node 1 2025-03-31T16:01:25.850342Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:01:25.850425Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:01:25.850465Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:01:25.853913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:01:25.854447Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:01:25.965437Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:01:25.965588Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:01:25.993230Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22333 2025-03-31T16:01:26.714040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:01:31.047791Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T16:01:31.096681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:01:31.096865Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:01:31.129245Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:01:31.131673Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:01:31.416819Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:01:31.417465Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:01:31.418061Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:01:31.418230Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:01:31.418476Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:01:31.418566Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:01:31.418662Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:01:31.418779Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:01:31.418873Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:01:31.609423Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:01:31.609556Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:01:31.623906Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:01:31.832239Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:31.933863Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T16:01:31.933988Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T16:01:31.970154Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T16:01:31.972235Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T16:01:31.972458Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T16:01:31.972518Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T16:01:31.972566Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T16:01:31.972635Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T16:01:31.972699Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T16:01:31.972754Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T16:01:31.974144Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T16:01:32.008821Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:01:32.008937Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1877:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:01:32.017472Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1888:2606] 2025-03-31T16:01:32.023448Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1918:2621] 2025-03-31T16:01:32.024724Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1918:2621], schemeshard id = 72075186224037897 2025-03-31T16:01:32.032037Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T16:01:32.049101Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T16:01:32.049163Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T16:01:32.049245Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T16:01:32.072914Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T16:01:32.084609Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T16:01:32.084804Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T16:01:32.296033Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T16:01:32.494441Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T16:01:32.614927Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:01:33.424505Z node 1 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:01:33.425078Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T16:01:33.462604Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-31T16:01:33.467752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2261:3089], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:33.467869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2276:3094], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:33.467944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:33.475493Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72075186224037897 2025-03-31T16:01:33.544080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2281:3097], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:01:33.886926Z node 1 :TX_PROXY ERROR: Actor# [1:2374:3129] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:01:34.212975Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2396:3141]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:01:34.213229Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T16:01:34.213319Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2398:3143] 2025-03-31T16:01:34.213388Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2398:3143] 2025-03-31T16:01:34.213879Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2399:2843] 2025-03-31T16:01:34.214225Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2398:3143], server id = [2:2399:2843], tablet id = 72075186224037894, status = OK 2025-03-31T16:01:34.214420Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2399:2843], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-03-31T16:01:34.214487Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-03-31T16:01:34.214691Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:01:34.214799Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2396:3141], StatRequests.size() = 1 2025-03-31T16:01:34.381785Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=N2Y1YTI4MGMtNjA1ODMwMmUtNjYwY2ZiZmMtMzk1ODUzY2Q=, TxId: 2025-03-31T16:01:34.381873Z node 1 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=N2Y1YTI4MGMtNjA1ODMwMmUtNjYwY2ZiZmMtMzk1ODUzY2Q=, TxId: 2025-03-31T16:01:34.383664Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T16:01:34.387818Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tag AS Uint32; SELECT data FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id AND stat_type = $stat_type AND column_tag = $column_tag; 2025-03-31T16:01:34.464373Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2432:3164]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:01:34.464581Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T16:01:34.464633Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2432:3164], StatRequests.size() = 1 2025-03-31T16:01:34.640295Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=MWIxOTc4NGUtYzZmNGQ4ZjAtMjYyZTc2YWUtNDYyOTRiYWU=, TxId: 01jqpezmbx5w58swqnf8dhvdrq 2025-03-31T16:01:34.640469Z node 1 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=MWIxOTc4NGUtYzZmNGQ4ZjAtMjYyZTc2YWUtNDYyOTRiYWU=, TxId: 01jqpezmbx5w58swqnf8dhvdrq 2025-03-31T16:01:34.643163Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T16:01:34.646401Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tag AS Uint32; SELECT data FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id AND stat_type = $stat_type AND column_tag = $column_tag; 2025-03-31T16:01:34.679651Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=MmI3NWY3MDMtZGVlNmY0MzMtNDg4YTdmZWMtYWI1MDgxNGU=, TxId: 01jqpezmdz98spv58st1t5ar4h 2025-03-31T16:01:34.679785Z node 1 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=MmI3NWY3MDMtZGVlNmY0MzMtNDg4YTdmZWMtYWI1MDgxNGU=, TxId: 01jqpezmdz98spv58st1t5ar4h >> SystemView::PartitionStatsFields [FAIL] >> SystemView::PDisksFields |71.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |71.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |71.9%| [LD] {RESULT} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::Delete [GOOD] Test command err: 2025-03-31T16:01:23.894147Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:01:23.894436Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:01:23.894523Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001223/r3tmp/tmpA8GzTJ/pdisk_1.dat 2025-03-31T16:01:24.601672Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24203, node 1 2025-03-31T16:01:25.039405Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:01:25.039476Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:01:25.039514Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:01:25.042121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:01:25.042580Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:01:25.189581Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:01:25.189712Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:01:25.228472Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3375 2025-03-31T16:01:26.118783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:01:31.162128Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T16:01:31.205357Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:01:31.205475Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:01:31.249581Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:01:31.260409Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:01:31.581008Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:01:31.581774Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:01:31.582585Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:01:31.582781Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:01:31.583134Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:01:31.583238Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:01:31.583328Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:01:31.583400Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:01:31.583511Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:01:31.797927Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:01:31.798049Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:01:31.817134Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:01:32.040053Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:32.116489Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T16:01:32.116593Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T16:01:32.161184Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T16:01:32.161977Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T16:01:32.162213Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T16:01:32.162270Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T16:01:32.162330Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T16:01:32.162393Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T16:01:32.162441Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T16:01:32.162496Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T16:01:32.163443Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T16:01:32.194184Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:01:32.194293Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1872:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:01:32.202834Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-03-31T16:01:32.212747Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1908:2617] 2025-03-31T16:01:32.213482Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1908:2617], schemeshard id = 72075186224037897 2025-03-31T16:01:32.216302Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T16:01:32.248537Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T16:01:32.248598Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T16:01:32.248672Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T16:01:32.270832Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T16:01:32.279126Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T16:01:32.279319Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T16:01:32.521047Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T16:01:32.714452Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T16:01:32.881043Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:01:33.778805Z node 1 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:01:33.779540Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T16:01:33.813920Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-31T16:01:33.818246Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2261:3088], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:33.818392Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2277:3093], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:33.818517Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:33.826336Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72075186224037897 2025-03-31T16:01:33.963960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2281:3096], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:01:34.266448Z node 1 :TX_PROXY ERROR: Actor# [1:2371:3125] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:01:34.555703Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2393:3137]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:01:34.555955Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T16:01:34.556064Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2395:3139] 2025-03-31T16:01:34.556132Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2395:3139] 2025-03-31T16:01:34.556603Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2396:2844] 2025-03-31T16:01:34.556925Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2395:3139], server id = [2:2396:2844], tablet id = 72075186224037894, status = OK 2025-03-31T16:01:34.557179Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2396:2844], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-03-31T16:01:34.557297Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-03-31T16:01:34.557565Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:01:34.557647Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2393:3137], StatRequests.size() = 1 2025-03-31T16:01:34.705620Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=ZWY3NjAwNzMtNDc2OGVmMC1mMzBjZmY1ZS1hMDM4ZjYzOA==, TxId: 2025-03-31T16:01:34.705706Z node 1 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=ZWY3NjAwNzMtNDc2OGVmMC1mMzBjZmY1ZS1hMDM4ZjYzOA==, TxId: 2025-03-31T16:01:34.706808Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T16:01:34.710147Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-31T16:01:34.753036Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2429:3160]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:01:34.753292Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T16:01:34.753345Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2429:3160], StatRequests.size() = 1 2025-03-31T16:01:34.906599Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=ZTQ3ODFkNDEtODgyZjM3Zi05MmZkODM3NS01NWNkYTcxMQ==, TxId: 2025-03-31T16:01:34.906684Z node 1 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=ZTQ3ODFkNDEtODgyZjM3Zi05MmZkODM3NS01NWNkYTcxMQ==, TxId: 2025-03-31T16:01:34.908311Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T16:01:34.911549Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tag AS Uint32; SELECT data FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id AND stat_type = $stat_type AND column_tag = $column_tag; 2025-03-31T16:01:35.030543Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:2461:3175]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:01:35.030799Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-31T16:01:35.030843Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [1:2461:3175], StatRequests.size() = 1 2025-03-31T16:01:35.187931Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=YjUzZDI1YzctNTZmZGE5OGYtMzgzM2Q2OTQtMmRjYzg5ZDc=, TxId: 01jqpezmxg2yf9bsem3rhm89z7 2025-03-31T16:01:35.188165Z node 1 :STATISTICS WARN: [TQueryBase] Finish with BAD_REQUEST, Issues: {
: Error: No data }, SessionId: ydb://session/3?node_id=1&id=YjUzZDI1YzctNTZmZGE5OGYtMzgzM2Q2OTQtMmRjYzg5ZDc=, TxId: 01jqpezmxg2yf9bsem3rhm89z7 >> TSchemeshardStatsBatchingTest::TopicAccountSizeAndUsedReserveSize [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> RetryPolicy::RetryWithBatching [GOOD] Test command err: 2025-03-31T15:53:52.600938Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.600974Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.601001Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:53:52.601828Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-03-31T15:53:52.601914Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.601956Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.603263Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.009417s 2025-03-31T15:53:52.604087Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-03-31T15:53:52.604216Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.604482Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.604669Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.006532s 2025-03-31T15:53:52.611971Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-03-31T15:53:52.612030Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.612063Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:53:52.612132Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.006924s 2025-03-31T15:53:52.734953Z :TWriteSession_TestPolicy INFO: Random seed for debugging is 1743436432734926 2025-03-31T15:53:58.073332Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488002480918095452:2258];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:53:58.105864Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:54:02.209401Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T15:54:02.216766Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T15:54:02.980603Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488002480918095452:2258];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:02.981171Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012bf/r3tmp/tmpOL7txd/pdisk_1.dat 2025-03-31T15:54:03.831365Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:04.123442Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:07.045166Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488002483915813043:2241];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:54:07.045802Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:54:07.046249Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:07.682046Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:07.693262Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:08.056299Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:08.056330Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:08.995313Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:08.995338Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:09.061830Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:09.061861Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:10.030960Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:10.034723Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:10.075547Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:10.079762Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:54:10.206361Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:10.206594Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:10.208340Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:54:10.208376Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:54:10.226358Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T15:54:10.281499Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:10.377292Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:54:10.440315Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28389, node 1 2025-03-31T15:54:11.071889Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/0012bf/r3tmp/yandexnQhB9P.tmp 2025-03-31T15:54:11.071918Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/0012bf/r3tmp/yandexnQhB9P.tmp 2025-03-31T15:54:11.072182Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/0012bf/r3tmp/yandexnQhB9P.tmp 2025-03-31T15:54:11.072327Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:54:11.122193Z INFO: TTestServer started on Port 20481 GrpcPort 28389 TClient is connected to server localhost:20481 PQClient connected to localhost:28389 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:54:11.499771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-03-31T15:54:16.811832Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488002562522474953:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:16.812990Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:16.811780Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488002561225224548:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:16.812213Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488002561225224529:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:16.815485Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488002562522474965:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:16.814822Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:54:16.831031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-03-31T15:54:17.643565Z node 1 :KQP_WORKLOAD_SERVICE ... 224037892, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:01:32.785243Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000ptest-message-group-id 2025-03-31T16:01:32.785259Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] d0000000000_00000000000000000000_00000_0000000010_00000| 2025-03-31T16:01:32.785273Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:01:32.785306Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:01:32.785338Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] =========================== 2025-03-31T16:01:32.785399Z node 17 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-31T16:01:32.785501Z node 17 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 0 partNo 0 count 10 size 1208 2025-03-31T16:01:32.790867Z node 17 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 0 count 10 size 1208 actorID [17:7488004426108115641:2618] 2025-03-31T16:01:32.791021Z node 17 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 0 offset 0 partno 0 count 10 parts 0 size 1208 2025-03-31T16:01:32.791038Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 1230 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:01:32.791116Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:01:32.791211Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2025-03-31T16:01:32.791249Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:01:32.791282Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 1 is stored on disk 2025-03-31T16:01:32.791316Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:01:32.791351Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2025-03-31T16:01:32.791376Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:01:32.791408Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 4, partNo: 0, Offset: 3 is stored on disk 2025-03-31T16:01:32.791430Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:01:32.791463Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 5, partNo: 0, Offset: 4 is stored on disk 2025-03-31T16:01:32.791487Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:01:32.791519Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 6, partNo: 0, Offset: 5 is stored on disk 2025-03-31T16:01:32.791554Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:01:32.791594Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 7, partNo: 0, Offset: 6 is stored on disk 2025-03-31T16:01:32.791625Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:01:32.791658Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 8, partNo: 0, Offset: 7 is stored on disk 2025-03-31T16:01:32.791691Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:01:32.791726Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 9, partNo: 0, Offset: 8 is stored on disk 2025-03-31T16:01:32.791751Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:01:32.791787Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 10, partNo: 0, Offset: 9 is stored on disk 2025-03-31T16:01:32.791973Z node 17 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-03-31T16:01:32.792037Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-31T16:01:32.792086Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-03-31T16:01:32.792130Z node 17 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-03-31T16:01:32.792269Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 Topic 'rt3.dc1--test-topic' partition 0 user user offset 0 count 1 size 1024000 endOffset 10 max time lag 0ms effective offset 0 2025-03-31T16:01:32.792315Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 10 2025-03-31T16:01:32.792555Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-03-31T16:01:32.792579Z node 17 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-31T16:01:32.792665Z node 17 :PERSQUEUE DEBUG: Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp done, result 1743436892781 queuesize 0 startOffset 0 2025-03-31T16:01:32.793352Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|b54d3b50-3731acc-c8ae1e9-c2227cee_0] Write session got write response: sequence_numbers: 1 sequence_numbers: 2 sequence_numbers: 3 sequence_numbers: 4 sequence_numbers: 5 sequence_numbers: 6 sequence_numbers: 7 sequence_numbers: 8 sequence_numbers: 9 sequence_numbers: 10 offsets: 0 offsets: 1 offsets: 2 offsets: 3 offsets: 4 offsets: 5 offsets: 6 offsets: 7 offsets: 8 offsets: 9 already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false write_statistics { persist_duration_ms: 9 } 2025-03-31T16:01:32.793433Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|b54d3b50-3731acc-c8ae1e9-c2227cee_0] Write session: acknoledged message 1 2025-03-31T16:01:32.793486Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|b54d3b50-3731acc-c8ae1e9-c2227cee_0] Write session: acknoledged message 2 2025-03-31T16:01:32.793518Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|b54d3b50-3731acc-c8ae1e9-c2227cee_0] Write session: acknoledged message 3 2025-03-31T16:01:32.793543Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|b54d3b50-3731acc-c8ae1e9-c2227cee_0] Write session: acknoledged message 4 2025-03-31T16:01:32.793577Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|b54d3b50-3731acc-c8ae1e9-c2227cee_0] Write session: acknoledged message 5 2025-03-31T16:01:32.793602Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|b54d3b50-3731acc-c8ae1e9-c2227cee_0] Write session: acknoledged message 6 2025-03-31T16:01:32.793625Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|b54d3b50-3731acc-c8ae1e9-c2227cee_0] Write session: acknoledged message 7 2025-03-31T16:01:32.793657Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|b54d3b50-3731acc-c8ae1e9-c2227cee_0] Write session: acknoledged message 8 2025-03-31T16:01:32.793692Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|b54d3b50-3731acc-c8ae1e9-c2227cee_0] Write session: acknoledged message 9 2025-03-31T16:01:32.793721Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|b54d3b50-3731acc-c8ae1e9-c2227cee_0] Write session: acknoledged message 10 2025-03-31T16:01:32.794054Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|b54d3b50-3731acc-c8ae1e9-c2227cee_0] Write session: close. Timeout = 0 ms 2025-03-31T16:01:32.794114Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|b54d3b50-3731acc-c8ae1e9-c2227cee_0] Write session will now close 2025-03-31T16:01:32.794170Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|b54d3b50-3731acc-c8ae1e9-c2227cee_0] Write session: aborting 2025-03-31T16:01:32.794706Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|b54d3b50-3731acc-c8ae1e9-c2227cee_0] Write session: gracefully shut down, all writes complete 2025-03-31T16:01:32.794758Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|b54d3b50-3731acc-c8ae1e9-c2227cee_0] Write session: destroy 2025-03-31T16:01:32.800046Z node 17 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 7 sessionId: test-message-group-id|b54d3b50-3731acc-c8ae1e9-c2227cee_0 grpc read done: success: 0 data: 2025-03-31T16:01:32.800089Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|b54d3b50-3731acc-c8ae1e9-c2227cee_0 grpc read failed 2025-03-31T16:01:32.800133Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|b54d3b50-3731acc-c8ae1e9-c2227cee_0 grpc closed 2025-03-31T16:01:32.800161Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|b54d3b50-3731acc-c8ae1e9-c2227cee_0 is DEAD 2025-03-31T16:01:32.801207Z node 17 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-31T16:01:32.810214Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [17:7488004434698050516:2644] destroyed 2025-03-31T16:01:32.810316Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClass [GOOD] >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI >> TSchemeshardStatsBatchingTest::PeriodicTopicStatsReload [GOOD] |71.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |72.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |72.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicAccountSizeAndUsedReserveSize [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:01:36.772402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:01:36.772525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:36.772573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:01:36.772611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:01:36.772673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:01:36.772728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:01:36.772822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:36.772984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:01:36.773423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:01:36.889139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:01:36.889205Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:36.894250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:01:36.894371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:01:36.894538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:01:36.899766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:01:36.899995Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:01:36.900674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:36.900902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:36.903890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:36.905174Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:36.905236Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:36.905398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:01:36.905450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:36.905491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:36.905643Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:01:36.913080Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:01:37.088444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:37.088701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:37.088926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:01:37.089193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:01:37.089264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:37.092312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:37.092498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:01:37.092741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:37.092805Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:01:37.092847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:01:37.092895Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:01:37.094987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:37.095060Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:01:37.095138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:01:37.102950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:37.103024Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:37.103111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:37.103200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:01:37.107548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:01:37.113057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:01:37.113327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:01:37.114595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:37.114787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:37.114857Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:37.115222Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:01:37.115312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:37.115506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:37.115611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:01:37.132598Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:37.132662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:37.132865Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:37.132920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:01:37.133161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:37.133219Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:01:37.133315Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:37.133344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:37.133378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:37.133406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:37.133620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:01:37.133684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:37.133740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:01:37.133776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:01:37.133845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:01:37.133878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:01:37.133928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:01:37.135717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:37.135845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:37.135876Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... DeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 2025-03-31T16:01:38.052345Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:1028:2891], Recipient [1:283:2270]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037968897 Status: OK ServerId: [1:1030:2893] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-03-31T16:01:38.052382Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-03-31T16:01:38.052426Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594037968897, status: OK, at schemeshard: 72057594046678944 2025-03-31T16:01:38.052640Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 2025-03-31T16:01:38.052754Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268698118, Sender [1:216:2215], Recipient [1:283:2270]: NKikimrHive.TEvDeleteTabletReply Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 2025-03-31T16:01:38.052783Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvHive::TEvDeleteTabletReply 2025-03-31T16:01:38.052847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-03-31T16:01:38.053236Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268698118, Sender [1:216:2215], Recipient [1:283:2270]: NKikimrHive.TEvDeleteTabletReply Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 2025-03-31T16:01:38.053268Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvHive::TEvDeleteTabletReply 2025-03-31T16:01:38.053309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-03-31T16:01:38.055476Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409551][Topic3] Discovered subdomain [OwnerId: 72057594046678944, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186233409551 2025-03-31T16:01:38.055608Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Discovered subdomain [OwnerId: 72057594046678944, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186233409547 2025-03-31T16:01:38.056262Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:01:38.058359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-31T16:01:38.058399Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:01:38.058518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-31T16:01:38.058579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-31T16:01:38.058598Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:01:38.062280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-03-31T16:01:38.062411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-03-31T16:01:38.062602Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877763, Sender [1:1028:2891], Recipient [1:283:2270]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594037968897 ClientId: [1:1028:2891] ServerId: [1:1030:2893] } 2025-03-31T16:01:38.062639Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-03-31T16:01:38.062685Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Client pipe, to tablet: 72057594037968897, from:72057594046678944 is reset TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-03-31T16:01:38.063291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-03-31T16:01:38.063335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-03-31T16:01:38.063914Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:1044:2907], Recipient [1:283:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:01:38.064011Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:01:38.064054Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-03-31T16:01:38.064200Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [1:548:2482], Recipient [1:283:2270]: NKikimrScheme.TEvNotifyTxCompletion TxId: 104 2025-03-31T16:01:38.064231Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-03-31T16:01:38.064302Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-03-31T16:01:38.064430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-03-31T16:01:38.064463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1042:2905] 2025-03-31T16:01:38.064657Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [1:1044:2907], Recipient [1:283:2270]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-31T16:01:38.064840Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-31T16:01:38.064882Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 2025-03-31T16:01:38.065563Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:1045:2908], Recipient [1:283:2270]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-03-31T16:01:38.065729Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-03-31T16:01:38.065828Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:01:38.066056Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 175us result status StatusSuccess 2025-03-31T16:01:38.066518Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 808 AccountSize: 808 DataSize: 31 UsedReserveSize: 31 } } PQPartitionsInside: 4 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:38.067417Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271188001, Sender [1:1046:2909], Recipient [1:283:2270]: NKikimrPQ.TEvPeriodicTopicStats PathId: 4 Generation: 1 Round: 6 DataSize: 151 UsedReserveSize: 151 2025-03-31T16:01:38.067477Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPersQueue::TEvPeriodicTopicStats 2025-03-31T16:01:38.067518Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 4] DataSize 151 UsedReserveSize 151 2025-03-31T16:01:38.067553Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-03-31T16:01:38.067996Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:1047:2910], Recipient [1:283:2270]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-03-31T16:01:38.068037Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-03-31T16:01:38.068120Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:01:38.072132Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 158us result status StatusSuccess 2025-03-31T16:01:38.072635Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 808 AccountSize: 808 DataSize: 182 UsedReserveSize: 182 } } PQPartitionsInside: 4 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> THiveTest::TestCheckSubHiveForwarding [GOOD] >> THiveTest::TestCheckSubHiveDrain >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-65 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-66 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::PeriodicTopicStatsReload [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:01:37.646048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:01:37.646136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:37.646171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:01:37.646202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:01:37.646246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:01:37.646285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:01:37.646590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:37.646743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:01:37.647155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:01:37.746379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:01:37.746435Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:37.751371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:01:37.751464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:01:37.751629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:01:37.757052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:01:37.757272Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:01:37.757919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:37.758116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:37.760080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:37.761244Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:37.761302Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:37.761470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:01:37.761514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:37.761551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:37.761687Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:01:37.768780Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:01:37.906505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:37.906722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:37.906913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:01:37.907249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:01:37.907317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:37.909594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:37.909743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:01:37.909920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:37.909975Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:01:37.910008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:01:37.910039Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:01:37.912082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:37.912137Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:01:37.912178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:01:37.914029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:37.914077Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:37.914127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:37.914184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:01:37.937168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:01:37.939466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:01:37.939691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:01:37.940689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:37.940820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:37.940894Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:37.941165Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:01:37.941218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:37.941397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:37.941502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:01:37.943744Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:37.943790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:37.943965Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:37.944031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:01:37.944242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:37.944283Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:01:37.944386Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:37.944425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:37.944466Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:37.944496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:37.944532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:01:37.944572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:37.944607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:01:37.944635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:01:37.944705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:01:37.944761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:01:37.944806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:01:37.946789Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:37.946916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:37.946951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... schemeshard: 72057594046678944 2025-03-31T16:01:38.416296Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-03-31T16:01:38.416562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:38.416678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:38.416778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 2, at schemeshard: 72057594046678944 2025-03-31T16:01:38.416821Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:1, tabletId: 72075186233409546, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: PersQueue, at schemeshard: 72057594046678944 2025-03-31T16:01:38.416861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-03-31T16:01:38.416908Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:2, tabletId: 72075186233409547, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: PersQueueReadBalancer, at schemeshard: 72057594046678944 2025-03-31T16:01:38.416931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T16:01:38.417043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:38.417145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:38.417337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 8, at schemeshard: 72057594046678944 2025-03-31T16:01:38.417481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T16:01:38.417794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:38.417897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:38.418285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:38.418363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:38.418564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:38.418650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:38.418745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:38.418955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:38.419052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:38.419237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:38.419531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:38.419731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:38.419820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:38.419912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:38.420182Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-31T16:01:38.430257Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:01:38.430431Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-03-31T16:01:38.431532Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435082, Sender [1:564:2494], Recipient [1:564:2494]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-03-31T16:01:38.431593Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-03-31T16:01:38.432665Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:38.432730Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:38.435569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:01:38.435647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:38.435723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:38.435765Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:01:38.440888Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [1:600:2494], Recipient [1:564:2494]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-03-31T16:01:38.440960Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-03-31T16:01:38.440996Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:564:2494] sender: [1:621:2058] recipient: [1:15:2062] 2025-03-31T16:01:38.493716Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:620:2538], Recipient [1:564:2494]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-03-31T16:01:38.493784Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-03-31T16:01:38.493940Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:01:38.494219Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 284us result status StatusSuccess 2025-03-31T16:01:38.495044Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 1 WriteSpeedInBytesPerSecond: 7 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 7 AccountSize: 17 DataSize: 17 UsedReserveSize: 7 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:38.495908Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271188001, Sender [1:622:2539], Recipient [1:564:2494]: NKikimrPQ.TEvPeriodicTopicStats PathId: 2 Generation: 1 Round: 96 DataSize: 19 UsedReserveSize: 7 2025-03-31T16:01:38.495980Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPersQueue::TEvPeriodicTopicStats 2025-03-31T16:01:38.496018Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 19 UsedReserveSize 7 2025-03-31T16:01:38.496191Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-03-31T16:01:38.496260Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTopicStats on# 0.000000s, queue# 1 2025-03-31T16:01:38.496507Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:623:2540], Recipient [1:564:2494]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-03-31T16:01:38.496542Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-03-31T16:01:38.496644Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:01:38.496848Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 177us result status StatusSuccess 2025-03-31T16:01:38.497507Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 1 WriteSpeedInBytesPerSecond: 7 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 7 AccountSize: 17 DataSize: 17 UsedReserveSize: 7 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeRequest [GOOD] >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeReserved [GOOD] |72.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> StatisticsSaveLoad::ForbidAccess [GOOD] >> TSchemeShardTTLTests::BackupCopyHasNoTtlSettings |72.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeRequest [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:01:36.112736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:01:36.112854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:36.112894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:01:36.112926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:01:36.118163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:01:36.118264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:01:36.118376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:36.118555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:01:36.119006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:01:36.223295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:01:36.223369Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:36.229304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:01:36.229492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:01:36.229688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:01:36.237927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:01:36.238075Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:01:36.238524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:36.238669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:36.240122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:36.241040Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:36.241079Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:36.241173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:01:36.241202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:36.241226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:36.241352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:01:36.248126Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:01:36.383424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:36.383676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:36.383905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:01:36.384246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:01:36.384311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:36.387159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:36.387336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:01:36.387583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:36.387659Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:01:36.387694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:01:36.387728Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:01:36.389841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:36.389903Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:01:36.389943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:01:36.392203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:36.392262Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:36.392330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:36.392441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:01:36.403377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:01:36.405750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:01:36.405922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:01:36.406891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:36.407020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:36.407118Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:36.407390Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:01:36.407459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:36.407601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:36.407706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:01:36.409832Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:36.409873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:36.410051Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:36.410105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:01:36.410303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:36.410345Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:01:36.410492Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:36.410526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:36.410562Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:36.410614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:36.410654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:01:36.410688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:36.410726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:01:36.410769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:01:36.410823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:01:36.410871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:01:36.410918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:01:36.412793Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:36.412894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:36.412926Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:38.583364Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2025-03-31T16:01:38.583452Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 2 2025-03-31T16:01:38.584255Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 2 Generation: 2 StatsReportRound: 3 DataSize: 16975298 UsedReserveSize: 0 2025-03-31T16:01:38.584384Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2025-03-31T16:01:38.584601Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 16975298 UsedReserveSize 0 2025-03-31T16:01:38.600551Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-03-31T16:01:38.611227Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:38.611463Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 2 took 264us result status StatusSuccess 2025-03-31T16:01:38.612107Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:39.259362Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2025-03-31T16:01:39.259441Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 3 2025-03-31T16:01:39.260164Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 2 Generation: 2 StatsReportRound: 4 DataSize: 16975298 UsedReserveSize: 0 2025-03-31T16:01:39.260260Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2025-03-31T16:01:39.260496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 16975298 UsedReserveSize 0 2025-03-31T16:01:39.275076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-03-31T16:01:39.287491Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:39.287650Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 2 took 182us result status StatusSuccess 2025-03-31T16:01:39.288075Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:39.331163Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:01:39.331700Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 559us result status StatusSuccess 2025-03-31T16:01:39.332089Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:39.332788Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186233409547][Topic1] pipe [1:628:2549] connected; active server actors: 1 2025-03-31T16:01:39.350111Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] BALANCER INIT DONE for Topic1: (0, 72075186233409546) (1, 72075186233409546) (2, 72075186233409546) 2025-03-31T16:01:39.350582Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Discovered subdomain [OwnerId: 72057594046678944, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186233409547 2025-03-31T16:01:39.352521Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:39.352690Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 2 took 182us result status StatusSuccess 2025-03-31T16:01:39.353110Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:39.353247Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TEvClientConnected TabletId 72057594046678944, NodeId 1, Generation 3 2025-03-31T16:01:39.353773Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TEvClientConnected TabletId 72075186233409546, NodeId 1, Generation 2 2025-03-31T16:01:39.387053Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186233409547][Topic1] pipe [1:675:2584] connected; active server actors: 1 >> SystemView::AuthUsers_ResultOrder [GOOD] >> SystemView::AuthUsers_TableRange ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeReserved [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:01:36.283044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:01:36.283170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:36.283226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:01:36.283270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:01:36.283325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:01:36.283364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:01:36.283450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:36.283608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:01:36.284034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:01:36.379820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:01:36.379889Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:36.388617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:01:36.388731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:01:36.388912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:01:36.395444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:01:36.395771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:01:36.396496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:36.396698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:36.399134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:36.400739Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:36.400822Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:36.401041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:01:36.401098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:36.401150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:36.401345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:01:36.409672Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:01:36.598107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:36.598344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:36.598546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:01:36.598779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:01:36.598848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:36.601340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:36.601509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:01:36.601730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:36.601826Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:01:36.601879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:01:36.601931Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:01:36.604068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:36.604125Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:01:36.604164Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:01:36.606027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:36.606073Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:36.606140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:36.606197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:01:36.610107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:01:36.615236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:01:36.615501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:01:36.616579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:36.616731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:36.616795Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:36.617118Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:01:36.617174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:36.617339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:36.617439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:01:36.620284Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:36.620334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:36.620566Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:36.620630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:01:36.620850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:36.620903Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:01:36.621004Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:36.621037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:36.621077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:36.621112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:36.621154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:01:36.621213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:36.621273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:01:36.621307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:01:36.621365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:01:36.621413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:01:36.621460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:01:36.623497Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:36.623612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:36.623650Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 7594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 136598400 AccountSize: 136598400 DataSize: 16975298 UsedReserveSize: 16975298 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:38.715509Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2025-03-31T16:01:38.715595Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 2 2025-03-31T16:01:38.716026Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2025-03-31T16:01:38.716137Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2025-03-31T16:01:38.716224Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 16975298 UsedReserveSize: 16975298 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2025-03-31T16:01:38.716605Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 2 Generation: 2 StatsReportRound: 2 DataSize: 16975298 UsedReserveSize: 16975298 2025-03-31T16:01:38.716716Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2025-03-31T16:01:38.716953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 16975298 UsedReserveSize 16975298 2025-03-31T16:01:38.735300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-03-31T16:01:38.747533Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:38.747695Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 2 took 178us result status StatusSuccess 2025-03-31T16:01:38.748048Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 136598400 AccountSize: 136598400 DataSize: 16975298 UsedReserveSize: 16975298 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:39.375986Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2025-03-31T16:01:39.376070Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 3 2025-03-31T16:01:39.376456Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2025-03-31T16:01:39.376565Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2025-03-31T16:01:39.376617Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 16975298 UsedReserveSize: 16975298 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2025-03-31T16:01:39.376996Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 2 Generation: 2 StatsReportRound: 3 DataSize: 16975298 UsedReserveSize: 16975298 2025-03-31T16:01:39.377111Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2025-03-31T16:01:39.377321Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 16975298 UsedReserveSize 16975298 2025-03-31T16:01:39.392921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-03-31T16:01:39.403664Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:39.403872Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 2 took 231us result status StatusSuccess 2025-03-31T16:01:39.404329Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 136598400 AccountSize: 136598400 DataSize: 16975298 UsedReserveSize: 16975298 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:39.446407Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:01:39.446633Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 287us result status StatusSuccess 2025-03-31T16:01:39.447248Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 136598400 AccountSize: 136598400 DataSize: 16975298 UsedReserveSize: 16975298 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TestYmqHttpProxy::TestSendMessageEmptyQueueUrl |72.0%| [TA] $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC [GOOD] >> TestYmqHttpProxy::TestSendMessage |72.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |72.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw >> TestKinesisHttpProxy::DifferentContentTypes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::ForbidAccess [GOOD] Test command err: 2025-03-31T16:01:25.052678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:01:25.052974Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:01:25.053058Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011eb/r3tmp/tmpAMyHKO/pdisk_1.dat 2025-03-31T16:01:25.823972Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13490, node 1 2025-03-31T16:01:26.159416Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:01:26.159482Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:01:26.159518Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:01:26.173156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:01:26.173757Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:01:26.293749Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:01:26.293890Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:01:26.324609Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10846 2025-03-31T16:01:27.002225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:01:32.255846Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T16:01:32.312743Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:01:32.312875Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:01:32.353405Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:01:32.360270Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:01:32.691538Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:01:32.692390Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:01:32.693148Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:01:32.693354Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:01:32.693615Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:01:32.693704Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:01:32.693801Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:01:32.693972Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:01:32.694095Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:01:32.907853Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:01:32.908004Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:01:32.925345Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:01:33.145420Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:33.215538Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T16:01:33.215659Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T16:01:33.257624Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T16:01:33.258519Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T16:01:33.258757Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T16:01:33.258823Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T16:01:33.258875Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T16:01:33.258939Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T16:01:33.259020Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T16:01:33.259108Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T16:01:33.259722Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T16:01:33.290855Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:01:33.290975Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1872:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:01:33.299013Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-03-31T16:01:33.308602Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1908:2617] 2025-03-31T16:01:33.309403Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1908:2617], schemeshard id = 72075186224037897 2025-03-31T16:01:33.312289Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T16:01:33.340360Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T16:01:33.340427Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T16:01:33.340508Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T16:01:33.364939Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T16:01:33.386864Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T16:01:33.387047Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T16:01:33.602296Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T16:01:33.779265Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T16:01:33.934067Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:01:35.251590Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2241:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:35.251742Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:35.278884Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-31T16:01:36.022109Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2546:3122], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:36.022287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:36.023987Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2551:3126]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:01:36.024309Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T16:01:36.024456Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2553:3128] 2025-03-31T16:01:36.024541Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2553:3128] 2025-03-31T16:01:36.025194Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2554:2996] 2025-03-31T16:01:36.025509Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2553:3128], server id = [2:2554:2996], tablet id = 72075186224037894, status = OK 2025-03-31T16:01:36.025720Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2554:2996], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-03-31T16:01:36.025807Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-03-31T16:01:36.026080Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:01:36.026154Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2551:3126], StatRequests.size() = 1 2025-03-31T16:01:36.048067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2558:3132], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:36.048230Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:36.048712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2563:3137], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:36.055989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-03-31T16:01:36.270035Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-31T16:01:36.270144Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-31T16:01:36.382567Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2553:3128], schemeshard count = 1 2025-03-31T16:01:36.950671Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2565:3139], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-03-31T16:01:37.244552Z node 1 :TX_PROXY ERROR: Actor# [1:2687:3213] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:01:37.255982Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2710:3229]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:01:37.256162Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T16:01:37.256210Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2710:3229], StatRequests.size() = 1 2025-03-31T16:01:37.316302Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpeznq8259mvb2z1a77wrjt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGU0ZTk3YTktMmU5MjU3MDgtY2I1MTQzNWQtY2VkYTA0NDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:01:37.527499Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:2789:3259], for# user@builtin, access# DescribeSchema 2025-03-31T16:01:37.527588Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:2789:3259], for# user@builtin, access# DescribeSchema 2025-03-31T16:01:37.541120Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:2779:3255], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/Database/.metadata/_statistics]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:01:37.543067Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmI1ZTI0YzUtMjE4ZjhlYTgtOTU0YTdmYzYtMjY5MjM2ZTE=, ActorId: [1:2770:3247], ActorState: ExecuteState, TraceId: 01jqpezq6cc0yxcywpbtw7tpyz, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: >> THiveTest::TestCheckSubHiveDrain [GOOD] >> THiveTest::TestCheckSubHiveMigration >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndSameParams >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-false [GOOD] >> SystemView::TopPartitionsByCpuRanges [GOOD] >> SystemView::TopPartitionsByCpuFollowers >> TSchemeShardTTLTests::BackupCopyHasNoTtlSettings [GOOD] >> TPQTestSlow::TestWriteVeryBigMessage [GOOD] |72.0%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/test-results/unittest/{meta.json ... results_accumulator.log} |72.0%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |72.0%| [TA] $(B)/ydb/core/statistics/database/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TStoragePoolsStatsPersistence::SameAggregatedStatsAfterRestart [GOOD] >> TestKinesisHttpProxy::UnauthorizedGetShardIteratorRequest >> TestKinesisHttpProxy::MissingAction >> TestKinesisHttpProxy::TestPing ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC [GOOD] Test command err: 2025-03-31T16:01:08.224958Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004331533135548:2082];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0009e3/r3tmp/tmpqKYF8T/pdisk_1.dat 2025-03-31T16:01:08.741102Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:01:08.768953Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:01:08.769113Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488004331901091869:2267];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:08.769150Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:01:08.946271Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:01:09.492604Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:01:09.492737Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:01:09.494442Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:01:09.494545Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:01:09.500652Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:01:09.500784Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:01:09.501979Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:01:09.505924Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20972, node 1 2025-03-31T16:01:09.872104Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/0009e3/r3tmp/yandex4oiU4F.tmp 2025-03-31T16:01:09.872132Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/0009e3/r3tmp/yandex4oiU4F.tmp 2025-03-31T16:01:09.872358Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/0009e3/r3tmp/yandex4oiU4F.tmp 2025-03-31T16:01:09.872499Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:01:10.044585Z INFO: TTestServer started on Port 2985 GrpcPort 20972 TClient is connected to server localhost:2985 PQClient connected to localhost:20972 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:01:10.800010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:01:10.974880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-31T16:01:13.198589Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004331533135548:2082];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:13.198671Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:01:13.226289Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488004331901091869:2267];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:13.270649Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:01:14.818919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004357302940425:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:14.819074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:14.819173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004357302940445:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:14.851937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-31T16:01:14.855757Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004357302940479:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:14.855905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:14.876252Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004357302940447:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-31T16:01:15.137227Z node 1 :TX_PROXY ERROR: Actor# [1:7488004357302940535:2775] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:01:15.168096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:01:15.315382Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488004361597907852:2353], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:01:15.317439Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWE1NzhhNS1hN2Q3YjJjNS1iZmRhZDBmOC0yMDM5ODdiZQ==, ActorId: [1:7488004357302940421:2339], ActorState: ExecuteState, TraceId: 01jqpez1092he2tknbf84a83xg, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:01:15.319894Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:01:15.349679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:01:15.547696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-31T16:01:16.022588Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jqpez1yd9507bhxwa2qjhswm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGM5MDdjMjYtMWU1Mjk5ZDMtNTg0NTE2Y2MtZDhhZGI5Nzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7488004365892875610:3121] === CheckClustersList. Ok WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-03-31T16:01:22.448575Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488004331533135795:2130], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:01:22.448839Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7488004331533135795:2130], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 } TClient::Ls response: 2025-03-31T16:01:22.448941Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7488004331533135795:2130], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7488004340123070880:2454] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743436870889 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-31T16:01:22.449028Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488004331533135795:2130], cacheItem# { Subscriber: { Subscriber: [1:7488004340123070880:2454] DomainOwnerId: 720575 ... false SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 17 IsSync: true Partial: 0 } 2025-03-31T16:01:40.179622Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7488004413513648588:2128], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root/PQ/Config/V2/Cluster PathId: Partial: 0 } 2025-03-31T16:01:40.179672Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7488004413513648588:2128], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root/PQ/Config/V2/Cluster PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [3:7488004430693518677:2717] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 18 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1743436892134 PathId: [OwnerId: 72057594046644480, LocalPathId: 10] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, by pathId# nullptr 2025-03-31T16:01:40.179717Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488004413513648588:2128], cacheItem# { Subscriber: { Subscriber: [3:7488004430693518677:2717] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 18 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1743436892134 PathId: [OwnerId: 72057594046644480, LocalPathId: 10] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: Root/PQ/Config/V2/Cluster TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpUnknown RedirectRequired: true ShowPrivatePath: false SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 18 IsSync: true Partial: 0 } 2025-03-31T16:01:40.179771Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7488004413513648588:2128], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root/PQ/Config/V2/Versions PathId: Partial: 0 } 2025-03-31T16:01:40.179829Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7488004413513648588:2128], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root/PQ/Config/V2/Versions PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [3:7488004434988486226:2904] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 18 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1743436892470 PathId: [OwnerId: 72057594046644480, LocalPathId: 12] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, by pathId# nullptr 2025-03-31T16:01:40.179893Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488004413513648588:2128], cacheItem# { Subscriber: { Subscriber: [3:7488004434988486226:2904] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 18 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1743436892470 PathId: [OwnerId: 72057594046644480, LocalPathId: 12] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: Root/PQ/Config/V2/Versions TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpUnknown RedirectRequired: true ShowPrivatePath: false SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 18 IsSync: true Partial: 0 } 2025-03-31T16:01:40.180144Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488004469348227286:4980], recipient# [3:7488004469348227284:2608], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/PQ/Config/V2/Cluster TableId: [72057594046644480:10:1] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: false SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } },{ Path: Root/PQ/Config/V2/Cluster TableId: [72057594046644480:10:1] RequestType: ByPath Operation: OpUnknown RedirectRequired: true ShowPrivatePath: false SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T16:01:40.180294Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488004469348227287:4981], recipient# [3:7488004469348227285:2609], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/PQ/Config/V2/Versions TableId: [72057594046644480:12:1] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: false SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } },{ Path: Root/PQ/Config/V2/Versions TableId: [72057594046644480:12:1] RequestType: ByPath Operation: OpUnknown RedirectRequired: true ShowPrivatePath: false SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T16:01:40.181315Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7488004413513648588:2128], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: TableId: [72057594046644480:10:0] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:01:40.181420Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488004413513648588:2128], cacheItem# { Subscriber: { Subscriber: [3:7488004430693518677:2717] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 18 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1743436892134 PathId: [OwnerId: 72057594046644480, LocalPathId: 10] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: TableId: [72057594046644480:10:0] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:01:40.181558Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7488004413513648588:2128], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: TableId: [72057594046644480:12:0] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:01:40.181638Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488004413513648588:2128], cacheItem# { Subscriber: { Subscriber: [3:7488004434988486226:2904] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 18 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1743436892470 PathId: [OwnerId: 72057594046644480, LocalPathId: 12] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: TableId: [72057594046644480:12:0] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:01:40.181805Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488004469348227290:4982], recipient# [3:7488004413513648568:2290], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/PQ/Config/V2/Cluster TableId: [72057594046644480:10:1] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T16:01:40.181908Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488004469348227291:4983], recipient# [3:7488004413513648568:2290], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/PQ/Config/V2/Versions TableId: [72057594046644480:12:1] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T16:01:40.182740Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7488004413513648588:2128], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:01:40.182832Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488004413513648588:2128], cacheItem# { Subscriber: { Subscriber: [3:7488004413513649084:2452] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 29 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743436888130 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:01:40.182970Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488004469348227292:4984], recipient# [3:7488004413513648568:2290], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } |72.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |72.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |72.0%| [TA] {RESULT} $(B)/ydb/core/statistics/database/ut/test-results/unittest/{meta.json ... results_accumulator.log} |72.1%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BackupCopyHasNoTtlSettings [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-31T16:01:40.904250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:01:40.904377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:40.904421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:01:40.904456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:01:40.904502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:01:40.904532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:01:40.904620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:40.904709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:01:40.905123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:01:41.022514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:01:41.022593Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:41.032342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:01:41.032785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:01:41.032967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:01:41.041910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:01:41.042242Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:01:41.042965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:41.043435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:41.045986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:41.047527Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:41.047610Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:41.047720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:01:41.047773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:41.047812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:41.048076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:01:41.060143Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-31T16:01:41.301339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:41.301676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:41.301959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:01:41.302256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:01:41.302334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:41.312206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:41.312405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:01:41.312673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:41.312752Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:01:41.312794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:01:41.312834Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:01:41.320005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:41.320101Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:01:41.320146Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:01:41.328208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:41.328283Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:41.328339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:41.328437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:01:41.332744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:01:41.339982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:01:41.340282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:01:41.341510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:41.341719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:41.341780Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:41.342091Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:01:41.342154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:41.342351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:41.342454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:41.348376Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:41.348452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:41.348671Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:41.348741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:01:41.349203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:41.349258Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:01:41.349419Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:41.349457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:41.349522Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:41.349558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:41.349598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:01:41.349644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:41.349683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:01:41.349723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:01:41.349803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:01:41.349854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:01:41.349915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:01:41.352187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:41.352339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:41.352386Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-31T16:01:41.836778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-03-31T16:01:41.843694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:01:41.846098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-03-31T16:01:41.846642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 2 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1894 } } 2025-03-31T16:01:41.846705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-03-31T16:01:41.846848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 2 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1894 } } 2025-03-31T16:01:41.846957Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 2 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1894 } } 2025-03-31T16:01:41.847663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 410 RawX2: 4294969675 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-31T16:01:41.847716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-03-31T16:01:41.847855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 410 RawX2: 4294969675 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-31T16:01:41.847931Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-31T16:01:41.848088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 410 RawX2: 4294969675 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-31T16:01:41.848187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:41.848229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2025-03-31T16:01:41.852267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:01:41.852700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:01:41.873932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 309 RawX2: 4294969592 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-31T16:01:41.874005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-03-31T16:01:41.874197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 309 RawX2: 4294969592 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-31T16:01:41.874257Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-31T16:01:41.874347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 309 RawX2: 4294969592 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-31T16:01:41.874435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:41.874480Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:01:41.874520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-31T16:01:41.874555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-03-31T16:01:41.874584Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-03-31T16:01:41.879031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:01:41.879581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:01:41.879703Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 102:0ProgressState, operation type TxCopyTable 2025-03-31T16:01:41.879777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 102:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2025-03-31T16:01:41.879819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 102, done: 0, blocked: 1 2025-03-31T16:01:41.879924Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 102:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 102 Name: CopyTableBarrier }, at tablet# 72057594046678944 2025-03-31T16:01:41.879966Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 240 -> 240 2025-03-31T16:01:41.888433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:01:41.888515Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-31T16:01:41.888669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:01:41.888716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:01:41.888759Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:01:41.888794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:01:41.888859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-31T16:01:41.888947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:337:2316] message: TxId: 102 2025-03-31T16:01:41.889011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:01:41.889072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-31T16:01:41.889110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-31T16:01:41.889297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-31T16:01:41.889339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:01:41.891822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:01:41.891903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:440:2401] TestWaitNotification: OK eventTxId 102 2025-03-31T16:01:41.892563Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTableCopy" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:01:41.892890Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTableCopy" took 334us result status StatusSuccess 2025-03-31T16:01:41.893500Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTableCopy" PathDescription { Self { Name: "TTLEnabledTableCopy" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTableCopy" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "ts" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: true IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:01:10.742303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:01:10.742407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:10.742457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:01:10.742513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:01:10.742563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:01:10.742594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:01:10.742659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:10.742811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:01:10.755981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:01:10.893297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:01:10.893346Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:10.900091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:01:10.900246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:01:10.900399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:01:10.907633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:01:10.907875Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:01:10.908583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:10.908807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:10.910846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:10.912124Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:10.912192Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:10.912373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:01:10.912430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:10.912465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:10.912706Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:01:10.920660Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:01:11.115032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:11.115300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:11.115566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:01:11.115903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:01:11.115960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:11.132275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:11.132458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:01:11.132682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:11.132744Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:01:11.132779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:01:11.132811Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:01:11.138404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:11.138497Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:01:11.138536Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:01:11.144020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:11.144089Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:11.144134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:11.144202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:01:11.148425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:01:11.156099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:01:11.156315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:01:11.157479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:11.157653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:11.157703Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:11.158029Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:01:11.158095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:11.158326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:11.158433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:01:11.164205Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:11.164295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:11.164493Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:11.164579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:01:11.164818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:11.164863Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:01:11.164953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:11.164990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:11.165026Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:11.165057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:11.165094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:01:11.165141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:11.165195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:01:11.165233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:01:11.165294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:01:11.165328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:01:11.165386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:01:11.171587Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:11.171731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:11.171773Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... e to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:41.697482Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:41.697881Z node 27 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:01:41.706941Z node 27 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [27:122:2148] sender: [27:238:2058] recipient: [27:15:2062] 2025-03-31T16:01:41.719844Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:41.720170Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:41.720431Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:01:41.720687Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:01:41.720770Z node 27 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:41.723572Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:41.723737Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:01:41.724012Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:41.724107Z node 27 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:01:41.724184Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:01:41.724241Z node 27 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:01:41.726486Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:41.726567Z node 27 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:01:41.726639Z node 27 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:01:41.728439Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:41.728509Z node 27 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:41.728587Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:41.728677Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:01:41.728873Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:01:41.730607Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:01:41.730857Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:01:41.731907Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:41.732100Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 115964119147 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:41.732176Z node 27 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:41.732512Z node 27 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:01:41.732597Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:41.732889Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:41.733016Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:01:41.735163Z node 27 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:41.735237Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:41.735526Z node 27 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:41.735602Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [27:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:01:41.735742Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:41.735821Z node 27 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:01:41.736049Z node 27 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:41.736120Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:41.736189Z node 27 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:41.736251Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:41.736336Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:01:41.736410Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:41.736472Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:01:41.736528Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:01:41.736627Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:01:41.736695Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:01:41.736756Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:01:41.737989Z node 27 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:41.738144Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:41.738207Z node 27 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-31T16:01:41.738279Z node 27 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-31T16:01:41.738348Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:41.738490Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-31T16:01:41.741303Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-31T16:01:41.742020Z node 27 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-03-31T16:01:41.743326Z node 27 :TX_PROXY DEBUG: actor# [27:268:2259] Bootstrap 2025-03-31T16:01:41.779523Z node 27 :TX_PROXY DEBUG: actor# [27:268:2259] Become StateWork (SchemeCache [27:273:2264]) 2025-03-31T16:01:41.783063Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "DyNumber" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:41.783677Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:41.783873Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "DyNumber" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } }, at schemeshard: 72057594046678944 2025-03-31T16:01:41.784551Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: To enable TTL on integral type column 'ValueSinceUnixEpochModeSettings' should be specified, at schemeshard: 72057594046678944 2025-03-31T16:01:41.785995Z node 27 :TX_PROXY DEBUG: actor# [27:268:2259] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T16:01:41.789380Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "To enable TTL on integral type column \'ValueSinceUnixEpochModeSettings\' should be specified" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:41.789678Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: To enable TTL on integral type column 'ValueSinceUnixEpochModeSettings' should be specified, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2025-03-31T16:01:41.790275Z node 27 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::TestWriteVeryBigMessage [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] 2025-03-31T16:00:43.055930Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:00:43.056040Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927938 is [1:153:2174] sender: [1:154:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:179:2057] recipient: [1:14:2061] 2025-03-31T16:00:43.136382Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:00:43.182406Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-03-31T16:00:43.184076Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2025-03-31T16:00:43.192461Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:185:2198] 2025-03-31T16:00:43.194801Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:186:2199] 2025-03-31T16:00:43.203629Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:186:2199] 2025-03-31T16:00:43.224259Z node 1 :PERSQUEUE INFO: new Cookie default|5fed4083-7e07c742-d51879ee-f21610f6_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:00:43.246146Z node 1 :PERSQUEUE INFO: new Cookie default|3dc312d4-b0068325-4a19898b-d9704e58_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:00:43.304459Z node 1 :PERSQUEUE INFO: new Cookie default|1bdd4bb8-a1b07610-42222ed1-e809f192_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:245:2057] recipient: [1:99:2134] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:248:2057] recipient: [1:247:2248] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:249:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [1:250:2249] sender: [1:251:2057] recipient: [1:247:2248] 2025-03-31T16:00:43.528008Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:00:43.528085Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:00:43.528803Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:299:2290] 2025-03-31T16:00:43.546075Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:300:2291] 2025-03-31T16:00:43.604530Z node 1 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:00:43.604635Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [1:299:2290] 2025-03-31T16:00:43.617256Z node 1 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:00:43.617356Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [1:300:2291] Leader for TabletID 72057594037927937 is [1:250:2249] sender: [1:332:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:108:2057] recipient: [2:101:2135] 2025-03-31T16:00:44.672003Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:00:44.672087Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927938 is [2:153:2174] sender: [2:154:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:179:2057] recipient: [2:14:2061] 2025-03-31T16:00:44.727323Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:00:44.728193Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-03-31T16:00:44.728896Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:185:2198] 2025-03-31T16:00:44.733542Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:185:2198] 2025-03-31T16:00:44.740195Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:186:2199] 2025-03-31T16:00:44.742101Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:186:2199] 2025-03-31T16:00:44.756942Z node 2 :PERSQUEUE INFO: new Cookie default|24c21185-6b4f6c4e-a3c42891-57b22cfc_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:00:44.783268Z node 2 :PERSQUEUE INFO: new Cookie default|1efe9b5f-5c3ead5d-6ad18a0e-6e106f93_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:00:44.830470Z node 2 :PERSQUEUE INFO: new Cookie default|b2e59bf0-b2a035ee-3458038c-327a012f_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default !Reboot 72057594037927937 (actor [2:107:2139]) on event NKikimr::TEvPersQueue::TEvOffsets ! Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:244:2057] recipient: [2:99:2134] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:246:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:248:2057] recipient: [2:247:2247] Leader for TabletID 72057594037927937 is [2:249:2248] sender: [2:250:2057] recipient: [2:247:2247] 2025-03-31T16:00:44.947485Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:00:44.947566Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:00:44.948453Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:298:2289] 2025-03-31T16:00:44.950845Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:299:2290] 2025-03-31T16:00:44.995246Z node 2 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:00:44.995345Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [2:298:2289] 2025-03-31T16:00:45.008801Z node 2 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:00:45.008888Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [2:299:2290] !Reboot 72057594037927937 (actor [2:107:2139]) rebooted! !Reboot 72057594037927937 (actor [2:107:2139]) tablet resolver refreshed! new actor is[2:249:2248] Leader for TabletID 72057594037927937 is [2:249:2248] sender: [2:351:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:249:2248] sender: [2:354:2057] recipient: [2:99:2134] Leader for TabletID 72057594037927937 is [2:249:2248] sender: [2:357:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:249:2248] sender: [2:358:2057] recipient: [2:356:2324] Leader for TabletID 72057594037927937 is [2:359:2325] sender: [2:360:2057] recipient: [2:356:2324] 2025-03-31T16:00:46.498326Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:00:46.498416Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:00:46.499328Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:410:2368] 2025-03-31T16:00:46.502020Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:411:2369] 2025-03-31T16:00:46.537663Z node 2 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:00:46.537769Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 4 [2:410:2368] 2025-03-31T16:00:46.548621Z node 2 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:00:46.548705Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 4 [2:411:2369] Leader for TabletID 72057594037927937 is [2:359:2325] sender: [2:443:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:108:2057] recipie ... n 3 [53:300:2291] Leader for TabletID 72057594037927937 is [53:250:2249] sender: [53:332:2057] recipient: [53:14:2061] !Reboot 72057594037927937 (actor [53:250:2249]) on event NKikimr::TEvPersQueue::TEvOffsets ! Leader for TabletID 72057594037927937 is [53:250:2249] sender: [53:334:2057] recipient: [53:99:2134] Leader for TabletID 72057594037927937 is [53:250:2249] sender: [53:337:2057] recipient: [53:14:2061] Leader for TabletID 72057594037927937 is [53:250:2249] sender: [53:338:2057] recipient: [53:336:2312] Leader for TabletID 72057594037927937 is [53:339:2313] sender: [53:340:2057] recipient: [53:336:2312] 2025-03-31T16:01:38.598035Z node 53 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:01:38.598116Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:01:38.598965Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [53:390:2356] 2025-03-31T16:01:38.602258Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [53:391:2357] 2025-03-31T16:01:38.650055Z node 53 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:01:38.650145Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 4 [53:390:2356] 2025-03-31T16:01:38.658987Z node 53 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:01:38.659063Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 4 [53:391:2357] !Reboot 72057594037927937 (actor [53:250:2249]) rebooted! !Reboot 72057594037927937 (actor [53:250:2249]) tablet resolver refreshed! new actor is[53:339:2313] Leader for TabletID 72057594037927937 is [53:339:2313] sender: [53:440:2057] recipient: [53:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:103:2057] recipient: [54:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:103:2057] recipient: [54:101:2135] Leader for TabletID 72057594037927937 is [54:107:2139] sender: [54:108:2057] recipient: [54:101:2135] 2025-03-31T16:01:40.520228Z node 54 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:01:40.520329Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [54:149:2057] recipient: [54:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [54:149:2057] recipient: [54:147:2170] Leader for TabletID 72057594037927938 is [54:153:2174] sender: [54:154:2057] recipient: [54:147:2170] Leader for TabletID 72057594037927937 is [54:107:2139] sender: [54:179:2057] recipient: [54:14:2061] 2025-03-31T16:01:40.551826Z node 54 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:01:40.552927Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 54 actor [54:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 54 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 54 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 54 Important: false } 2025-03-31T16:01:40.553557Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [54:185:2198] 2025-03-31T16:01:40.556966Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [54:185:2198] 2025-03-31T16:01:40.559500Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [54:186:2199] 2025-03-31T16:01:40.562212Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [54:186:2199] 2025-03-31T16:01:40.571654Z node 54 :PERSQUEUE INFO: new Cookie default|8c5f7c41-7fd7fb44-3c948b3d-15e3e760_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:01:40.590132Z node 54 :PERSQUEUE INFO: new Cookie default|dfeaf3f1-4fe718e8-98e9b3ed-2f0eb079_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:01:40.638983Z node 54 :PERSQUEUE INFO: new Cookie default|ddde914a-57ee672b-cac534eb-e0ec62af_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [54:107:2139] sender: [54:243:2057] recipient: [54:99:2134] Leader for TabletID 72057594037927937 is [54:107:2139] sender: [54:246:2057] recipient: [54:14:2061] Leader for TabletID 72057594037927937 is [54:107:2139] sender: [54:247:2057] recipient: [54:245:2246] Leader for TabletID 72057594037927937 is [54:248:2247] sender: [54:249:2057] recipient: [54:245:2246] 2025-03-31T16:01:40.712267Z node 54 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:01:40.712349Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:01:40.713475Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [54:297:2288] 2025-03-31T16:01:40.717121Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [54:298:2289] 2025-03-31T16:01:40.743346Z node 54 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:01:40.743445Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [54:297:2288] 2025-03-31T16:01:40.756111Z node 54 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:01:40.756215Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [54:298:2289] Leader for TabletID 72057594037927937 is [54:248:2247] sender: [54:330:2057] recipient: [54:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:103:2057] recipient: [55:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:103:2057] recipient: [55:101:2135] Leader for TabletID 72057594037927937 is [55:107:2139] sender: [55:108:2057] recipient: [55:101:2135] 2025-03-31T16:01:41.563779Z node 55 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:01:41.563878Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [55:149:2057] recipient: [55:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [55:149:2057] recipient: [55:147:2170] Leader for TabletID 72057594037927938 is [55:153:2174] sender: [55:154:2057] recipient: [55:147:2170] Leader for TabletID 72057594037927937 is [55:107:2139] sender: [55:179:2057] recipient: [55:14:2061] 2025-03-31T16:01:41.592806Z node 55 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:01:41.593949Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 55 actor [55:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 55 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 55 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 55 Important: false } 2025-03-31T16:01:41.594823Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [55:185:2198] 2025-03-31T16:01:41.598431Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [55:185:2198] 2025-03-31T16:01:41.601248Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [55:186:2199] 2025-03-31T16:01:41.603021Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [55:186:2199] 2025-03-31T16:01:41.611776Z node 55 :PERSQUEUE INFO: new Cookie default|191e7c4b-30c05040-dfb39e76-7d6d5536_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:01:41.632476Z node 55 :PERSQUEUE INFO: new Cookie default|f757083e-c4981fe-fc74b734-e2a72ae3_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:01:41.730942Z node 55 :PERSQUEUE INFO: new Cookie default|9bf1f936-24bcd89a-638352c4-8dbdb35c_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [55:107:2139] sender: [55:245:2057] recipient: [55:99:2134] Leader for TabletID 72057594037927937 is [55:107:2139] sender: [55:247:2057] recipient: [55:14:2061] Leader for TabletID 72057594037927937 is [55:107:2139] sender: [55:249:2057] recipient: [55:248:2248] Leader for TabletID 72057594037927937 is [55:250:2249] sender: [55:251:2057] recipient: [55:248:2248] 2025-03-31T16:01:41.818661Z node 55 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:01:41.818757Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:01:41.820808Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [55:299:2290] 2025-03-31T16:01:41.823419Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [55:300:2291] 2025-03-31T16:01:41.842964Z node 55 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:01:41.843048Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [55:299:2290] 2025-03-31T16:01:41.857607Z node 55 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:01:41.857708Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [55:300:2291] Leader for TabletID 72057594037927937 is [55:250:2249] sender: [55:332:2057] recipient: [55:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TStoragePoolsStatsPersistence::SameAggregatedStatsAfterRestart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:01:36.713305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:01:36.713409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:36.713453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:01:36.713494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:01:36.713554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:01:36.713591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:01:36.713652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:36.713853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:01:36.714354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:01:36.820537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:01:36.820607Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:36.830236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:01:36.830351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:01:36.830571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:01:36.836299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:01:36.836576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:01:36.837394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:36.837689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:36.840857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:36.842308Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:36.842379Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:36.842545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:01:36.842595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:36.842637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:36.842814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:01:36.856395Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:01:37.006159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:37.006464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:37.006685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:01:37.007000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:01:37.007072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:37.010718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:37.010899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:01:37.011146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:37.011222Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:01:37.011263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:01:37.011300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:01:37.016525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:37.016614Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:01:37.016667Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:01:37.019820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:37.019892Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:37.019970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:37.020053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:01:37.024500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:01:37.027514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:01:37.027767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:01:37.029203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:37.029419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:37.029497Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:37.029827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:01:37.029897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:37.030143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:37.030284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:01:37.032946Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:37.033034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:37.033301Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:37.033372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:01:37.033645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:37.033699Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:01:37.033840Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:37.033879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:37.033938Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:37.033973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:37.034017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:01:37.034067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:37.034104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:01:37.034155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:01:37.034244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:01:37.034292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:01:37.034354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:01:37.036881Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:37.037126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:37.037176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... 0s, InflightLimit# 10 2025-03-31T16:01:41.765883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:01:41.765921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:01:41.765957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:01:41.765994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:01:41.766064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:41.766143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:01:41.766485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:01:41.782936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:01:41.785476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:01:41.785666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:01:41.785820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:01:41.785863Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:41.786124Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:01:41.786966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 2, at schemeshard: 72057594046678944 2025-03-31T16:01:41.787053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: SomeTable, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T16:01:41.787155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:41.787224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:41.787655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:41.787835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-03-31T16:01:41.787948Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-03-31T16:01:41.788162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 2, at schemeshard: 72057594046678944 2025-03-31T16:01:41.788278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:41.788375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:41.788413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T16:01:41.788537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:41.788705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:41.789070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2025-03-31T16:01:41.789389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:41.789516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:41.789894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:41.789986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:41.790200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:41.790320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:41.790404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:41.790583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:41.790680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:41.790838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:41.791053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:41.791261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:41.791326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:41.791384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:41.803026Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:41.803136Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:41.803830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:01:41.803915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:41.803967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:41.810699Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:758:2712] sender: [1:812:2058] recipient: [1:15:2062] 2025-03-31T16:01:41.867362Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:01:41.867609Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeTable" took 286us result status StatusSuccess 2025-03-31T16:01:41.868049Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeTable" PathDescription { Self { Name: "SomeTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "SomeTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 Family: 1 FamilyName: "alternative" NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 4140 RowCount: 100 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { PoolsUsage { PoolKind: "pool-kind-1" DataSize: 1020 IndexSize: 0 } PoolsUsage { PoolKind: "pool-kind-2" DataSize: 3120 IndexSize: 0 } } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 82488 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 4140 DataSize: 4140 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 1020 DataSize: 1020 IndexSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-2" TotalSize: 3120 DataSize: 3120 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:41.870464Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:01:41.870655Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 218us result status StatusSuccess 2025-03-31T16:01:41.879526Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "SomeTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 4140 DataSize: 4140 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 1020 DataSize: 1020 IndexSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-2" TotalSize: 3120 DataSize: 3120 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-false [GOOD] >> DataShardVolatile::DistributedWriteThenImmediateUpsert >> TSchemeShardTTLTests::CheckCounters [GOOD] >> THiveTest::TestCheckSubHiveMigration [GOOD] >> THiveTest::TestCheckSubHiveMigrationManyTablets ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:01:22.201783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:01:22.201902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:22.201938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:01:22.201974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:01:22.202016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:01:22.202062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:01:22.202119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:22.202252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:01:22.202588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:01:22.294274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:01:22.294333Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:22.299062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:01:22.299187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:01:22.299317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:01:22.304306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:01:22.304521Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:01:22.305169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:22.305349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:22.307351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:22.308661Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:22.308725Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:22.308897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:01:22.308945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:22.308985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:22.309136Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:01:22.316086Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:01:22.453962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:22.454164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:22.454445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:01:22.454742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:01:22.454803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:22.457214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:22.457357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:01:22.457586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:22.457652Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:01:22.457686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:01:22.457719Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:01:22.459825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:22.459896Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:01:22.459944Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:01:22.461844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:22.461890Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:22.461937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:22.462013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:01:22.465762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:01:22.467698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:01:22.467883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:01:22.468880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:22.469038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:22.469085Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:22.469386Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:01:22.469441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:22.469605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:22.469693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:01:22.471822Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:22.471871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:22.472077Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:22.472150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:01:22.472356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:22.472411Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:01:22.472511Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:22.472544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:22.472579Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:22.472607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:22.472643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:01:22.472689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:22.472721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:01:22.472748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:01:22.472824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:01:22.472864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:01:22.472905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:01:22.474691Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:22.474789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:22.474825Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:43.452297Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:01:43.452536Z node 18 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:43.452593Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [18:205:2207], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-03-31T16:01:43.452641Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [18:205:2207], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-03-31T16:01:43.453147Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:43.453203Z node 18 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 2025-03-31T16:01:43.454260Z node 18 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:01:43.454355Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:01:43.454389Z node 18 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-03-31T16:01:43.454431Z node 18 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-31T16:01:43.454471Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:01:43.465998Z node 18 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:01:43.466105Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:01:43.466139Z node 18 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-03-31T16:01:43.466172Z node 18 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-31T16:01:43.466211Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T16:01:43.466310Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-03-31T16:01:43.466887Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 11770 } } 2025-03-31T16:01:43.466936Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-03-31T16:01:43.467065Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 11770 } } 2025-03-31T16:01:43.467189Z node 18 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 11770 } } FAKE_COORDINATOR: Erasing txId 101 2025-03-31T16:01:43.471011Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 306 RawX2: 77309413621 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-31T16:01:43.471060Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-03-31T16:01:43.471211Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 306 RawX2: 77309413621 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-31T16:01:43.479287Z node 18 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-31T16:01:43.479451Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 306 RawX2: 77309413621 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-31T16:01:43.479517Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:43.479559Z node 18 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:43.479604Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-31T16:01:43.479648Z node 18 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-03-31T16:01:43.482536Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T16:01:43.482896Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T16:01:43.491242Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:43.491904Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:43.492249Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:43.492298Z node 18 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-31T16:01:43.492401Z node 18 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T16:01:43.492440Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:01:43.492482Z node 18 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T16:01:43.492516Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:01:43.492562Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-03-31T16:01:43.492639Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [18:334:2313] message: TxId: 101 2025-03-31T16:01:43.492696Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:01:43.492743Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-31T16:01:43.492775Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-31T16:01:43.492898Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:01:43.499948Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T16:01:43.500001Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [18:335:2314] TestWaitNotification: OK eventTxId 101 2025-03-31T16:01:43.500505Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:01:43.500705Z node 18 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" took 242us result status StatusSuccess 2025-03-31T16:01:43.501222Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" PathDescription { Self { Name: "TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "DyNumber" TypeId: 4866 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 ColumnUnit: UNIT_NANOSECONDS Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |72.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |72.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |72.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain >> KqpJoinOrder::FiveWayJoinWithComplexPreds2+ColumnStore [GOOD] >> SystemView::PDisksFields [GOOD] >> SystemView::GroupsFields ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CheckCounters [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-31T16:01:13.319040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:01:13.319216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:13.319257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:01:13.319328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:01:13.319398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:01:13.319427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:01:13.319483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:13.319564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:01:13.319946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:01:13.411227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:01:13.411301Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:13.429467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:01:13.429809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:01:13.429996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:01:13.436694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:01:13.437018Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:01:13.437700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:13.438048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:13.439995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:13.441455Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:13.441522Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:13.441709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:01:13.441755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:13.441798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:13.441907Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:01:13.448723Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:238:2058] recipient: [1:15:2062] 2025-03-31T16:01:13.582886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:13.583244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:13.583503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:01:13.583803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:01:13.583878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:13.586528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:13.586673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:01:13.586894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:13.586948Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:01:13.587001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:01:13.587036Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:01:13.589635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:13.589704Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:01:13.589740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:01:13.591810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:13.591884Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:13.591927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:13.591997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:01:13.595700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:01:13.598023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:01:13.598198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:01:13.599286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:13.599427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:13.599477Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:13.599858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:01:13.599936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:13.600137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:13.600245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:01:13.602589Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:13.602649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:13.602861Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:13.602899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:01:13.602983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:13.603040Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:01:13.603162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:13.603195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:13.603229Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:13.603285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:13.603331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:01:13.603375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:13.603408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:01:13.603437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:01:13.603510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:01:13.603547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:01:13.603577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:01:13.612469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:13.612637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:13.612675Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... adyToDone TxId: 107 ready parts: 1/1 2025-03-31T16:01:43.541022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2025-03-31T16:01:43.541120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:340:2319] message: TxId: 107 2025-03-31T16:01:43.541176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-03-31T16:01:43.541221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2025-03-31T16:01:43.541286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2025-03-31T16:01:43.541469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-03-31T16:01:43.541515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T16:01:43.541969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:01:43.542028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T16:01:43.542108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:01:43.552257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-03-31T16:01:43.552322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:1352:3253] 2025-03-31T16:01:43.552595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 107 Name: "SchemeShard/NumShardsByTtlLag" Ranges: "0" Ranges: "900" Ranges: "1800" Ranges: "3600" Ranges: "7200" Ranges: "14400" Ranges: "28800" Ranges: "57600" Ranges: "86400" Ranges: "inf" Values: 0 Values: 2 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 2025-03-31T16:01:43.704793Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-03-31T16:01:43.705517Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409549 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-03-31T16:01:43.705662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:3 data size 0 row count 0 2025-03-31T16:01:43.705741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0 2025-03-31T16:01:43.705792Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409548 followerId=0, pathId 4: RowCount 0, DataSize 0 2025-03-31T16:01:43.706616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:4 data size 0 row count 0 2025-03-31T16:01:43.706676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0 2025-03-31T16:01:43.706712Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409549 followerId=0, pathId 4: RowCount 0, DataSize 0 Name: "SchemeShard/NumShardsByTtlLag" Ranges: "0" Ranges: "900" Ranges: "1800" Ranges: "3600" Ranges: "7200" Ranges: "14400" Ranges: "28800" Ranges: "57600" Ranges: "86400" Ranges: "inf" Values: 0 Values: 0 Values: 2 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 2025-03-31T16:01:43.819845Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2025-03-31T16:01:43.819973Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-03-31T16:01:43.820101Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-03-31T16:01:43.820287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409549, request: TableId: 4 Expiration { ColumnId: 2 WallClockTimestamp: 1743450163798242 ColumnUnit: UNIT_AUTO } SchemaVersion: 4 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-03-31T16:01:43.820386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409548, request: TableId: 4 Expiration { ColumnId: 2 WallClockTimestamp: 1743450163798242 ColumnUnit: UNIT_AUTO } SchemaVersion: 4 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-03-31T16:01:43.821186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2025-03-31T16:01:43.822068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2025-03-31T16:01:43.822269Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-03-31T16:01:43.822338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2025-03-31T16:01:43.822972Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-03-31T16:01:43.823013Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2025-03-31T16:01:43.826900Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-03-31T16:01:43.827358Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-03-31T16:01:43.827445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:4, run at: 2025-03-31T20:42:43.798242Z, at schemeshard: 72057594046678944 2025-03-31T16:01:43.828838Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-03-31T16:01:43.828911Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-03-31T16:01:43.829000Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-03-31T16:01:43.829037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:4, run at: 2025-03-31T20:42:43.798242Z, at schemeshard: 72057594046678944 2025-03-31T16:01:43.829082Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-03-31T16:01:43.850860Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-03-31T16:01:43.900206Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-03-31T16:01:43.900350Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409549 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-03-31T16:01:43.900417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:3 data size 0 row count 0 2025-03-31T16:01:43.900502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0 2025-03-31T16:01:43.900554Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409548 followerId=0, pathId 4: RowCount 0, DataSize 0 2025-03-31T16:01:43.900844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:4 data size 0 row count 0 2025-03-31T16:01:43.900897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0 2025-03-31T16:01:43.900929Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409549 followerId=0, pathId 4: RowCount 0, DataSize 0 Name: "SchemeShard/NumShardsByTtlLag" Ranges: "0" Ranges: "900" Ranges: "1800" Ranges: "3600" Ranges: "7200" Ranges: "14400" Ranges: "28800" Ranges: "57600" Ranges: "86400" Ranges: "inf" Values: 2 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 2025-03-31T16:01:43.948787Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-03-31T16:01:44.016249Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0001 2025-03-31T16:01:44.016392Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409549 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0001 2025-03-31T16:01:44.016470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:3 data size 0 row count 0 2025-03-31T16:01:44.016553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0 2025-03-31T16:01:44.016604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409548 followerId=0, pathId 4: RowCount 0, DataSize 0 2025-03-31T16:01:44.016863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:4 data size 0 row count 0 2025-03-31T16:01:44.016910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0 2025-03-31T16:01:44.016953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409549 followerId=0, pathId 4: RowCount 0, DataSize 0 Name: "SchemeShard/NumShardsByTtlLag" Ranges: "0" Ranges: "900" Ranges: "1800" Ranges: "3600" Ranges: "7200" Ranges: "14400" Ranges: "28800" Ranges: "57600" Ranges: "86400" Ranges: "inf" Values: 0 Values: 2 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 >> OlapEstimationRowsCorrectness::TPCH11 [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> KqpStreamLookup::ReadTableWithIndexDuringSplit >> DataStreams::TestReservedResourcesMetering >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesTimeout |72.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |72.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |72.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer >> TSchemeshardStatsBatchingTest::ShouldNotBatchWhenDisabled [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailesOnNotATopic >> KqpStreamLookup::ReadTableDuringSplit >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailesOnNotATopic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldNotBatchWhenDisabled [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:01:36.135888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:01:36.135968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:36.136002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:01:36.136025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:01:36.136059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:01:36.136088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:01:36.136130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:36.136239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:01:36.136518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:01:36.223526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:01:36.223586Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:36.232052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:01:36.232146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:01:36.232279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:01:36.237582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:01:36.237749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:01:36.238266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:36.238499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:36.243704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:36.245263Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:36.245358Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:36.245627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:01:36.245698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:36.245766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:36.246001Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:01:36.268988Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:01:36.419444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:36.419641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:36.419829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:01:36.420084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:01:36.420140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:36.424131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:36.424285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:01:36.424463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:36.424532Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:01:36.424590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:01:36.424622Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:01:36.428208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:36.428284Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:01:36.428320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:01:36.430623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:36.430674Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:36.430722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:36.430789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:01:36.434623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:01:36.436735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:01:36.436896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:01:36.438065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:36.438211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:36.438267Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:36.438594Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:01:36.438675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:36.438817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:36.439055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:01:36.441226Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:36.441270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:36.441471Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:36.441547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:01:36.441805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:36.441850Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:01:36.441939Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:36.441973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:36.442010Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:36.442040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:36.442094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:01:36.442135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:36.442176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:01:36.442204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:01:36.442265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:01:36.442301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:01:36.442347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:01:36.450860Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:36.451013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:36.451069Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... s only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: Simple, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T16:01:45.853066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:45.853156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:45.853631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:45.853750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-03-31T16:01:45.853833Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-03-31T16:01:45.854103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 2, at schemeshard: 72057594046678944 2025-03-31T16:01:45.854255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:45.854376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:45.854435Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:1, tabletId: 72075186233409546, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: DataShard, at schemeshard: 72057594046678944 2025-03-31T16:01:45.854479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T16:01:45.854621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:45.854764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:45.854997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2025-03-31T16:01:45.855405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:45.855522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:45.856029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:45.856108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:45.856400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:45.856508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:45.856603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:45.856863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:45.856953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:45.857116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:45.857378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:45.857583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:45.857648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:45.857699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:45.857977Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-31T16:01:45.873741Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:01:45.873966Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-03-31T16:01:45.875459Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435082, Sender [1:1013:2957], Recipient [1:1013:2957]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-03-31T16:01:45.875515Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-03-31T16:01:45.879481Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:45.879584Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:45.880431Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:1013:2957], Recipient [1:1013:2957]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:01:45.880480Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:01:45.880908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:01:45.880984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:45.881037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:45.881077Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:01:45.885265Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [1:1049:2957], Recipient [1:1013:2957]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-03-31T16:01:45.885355Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-03-31T16:01:45.885397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1013:2957] sender: [1:1069:2058] recipient: [1:15:2062] 2025-03-31T16:01:45.934804Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:1068:3001], Recipient [1:1013:2957]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-03-31T16:01:45.934879Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-03-31T16:01:45.935049Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:01:45.938076Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Simple" took 2.92ms result status StatusSuccess 2025-03-31T16:01:45.939143Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Simple" PathDescription { Self { Name: "Simple" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1001 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Simple" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 MaxPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 13984 RowCount: 100 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 82488 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 13984 DataSize: 13984 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-66 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-67 >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest |72.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |72.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |72.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } >> TPersQueueTest::WriteAfterAlter [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedBeforeDataIsDecompressed_Compressed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailesOnNotATopic [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2025-03-31T16:01:46.973582Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:01:46.978242Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:01:46.978587Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-03-31T16:01:46.978637Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:01:46.978673Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-03-31T16:01:46.978757Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:01:46.978808Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:01:46.978878Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-03-31T16:01:46.979652Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:262:2254], now have 1 active actors on pipe 2025-03-31T16:01:46.979719Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-03-31T16:01:46.996046Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-31T16:01:46.999221Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-31T16:01:46.999440Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:01:47.000309Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-31T16:01:47.000470Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:01:47.000893Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:01:47.001189Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:270:2260] 2025-03-31T16:01:47.003468Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-03-31T16:01:47.003531Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:270:2260] 2025-03-31T16:01:47.003588Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:01:47.003639Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-31T16:01:47.004794Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:273:2262], now have 1 active actors on pipe 2025-03-31T16:01:47.058013Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:01:47.061939Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:01:47.062307Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] doesn't have tx info 2025-03-31T16:01:47.062363Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:01:47.062423Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-03-31T16:01:47.062464Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:01:47.062516Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:01:47.062579Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-03-31T16:01:47.063306Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [2:405:2360], now have 1 active actors on pipe 2025-03-31T16:01:47.063479Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-03-31T16:01:47.063712Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 2(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-31T16:01:47.066364Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-31T16:01:47.066508Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:01:47.067334Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 2 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-31T16:01:47.067485Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:01:47.067897Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:01:47.068138Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [2:413:2366] 2025-03-31T16:01:47.070174Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-03-31T16:01:47.070238Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [2:413:2366] 2025-03-31T16:01:47.070294Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:01:47.070342Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-03-31T16:01:47.071225Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [2:416:2368], now have 1 active actors on pipe 2025-03-31T16:01:47.089056Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:01:47.092981Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:01:47.093299Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-03-31T16:01:47.093350Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:01:47.093383Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-03-31T16:01:47.093464Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:01:47.093513Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:01:47.093581Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-03-31T16:01:47.094314Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:465:2405], now have 1 active actors on pipe 2025-03-31T16:01:47.094368Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-03-31T16:01:47.094546Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 3(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-31T16:01:47.097690Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-31T16:01:47.097833Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:01:47.098596Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 3 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-31T16:01:47.098716Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:01:47.099116Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:01:47.099345Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [2:473:2411] 2025-03-31T16:01:47.101184Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-03-31T16:01:47.101247Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:473:2411] 2025-03-31T16:01:47.101309Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:01:47.101352Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-03-31T16:01:47.102123Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:476:2413], now have 1 active actors on pipe REQUEST MetaRequest { CmdGetReadSessionsInfo { ClientId: "client_id" Topic: "rt3.dc1--topic1" Topic: "rt3.dc1--topic2" } } Ticket: "client_id@builtin" 2025-03-31T16:01:47.110704Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:485:2416], now have 1 active actors on pipe 2025-03-31T16:01:47.111301Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [2:488:2417], now have 1 active actors on pipe 2025-03-31T16:01:47.111603Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:489:2417], now have 1 active actors on pipe 2025-03-31T16:01:47.112253Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [2:485:2416] destroyed 2025-03-31T16:01:47.112787Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [2:488:2417] destroyed 2025-03-31T16:01:47.112869Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server disconnected, pipe [2:489:2417] destroyed RESULT Status: 1 ErrorCode: OK MetaResponse { CmdGetReadSessionsInfoResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 2 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 0 ErrorCode: INITIALIZING ErrorReason: "tablet for partition is not running" } PartitionResult { Partition: 1 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 2 ErrorCode: OK } PartitionResult { Partition: 2 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 2 ErrorCode: OK } ErrorCode: OK } } } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchSize [GOOD] >> TestYmqHttpProxy::TestSendMessageEmptyQueueUrl [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TSchemeShardTTLTests::RacyAlterTableAndConditionalErase [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchSize [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:01:36.124601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:01:36.124680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:36.124708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:01:36.124733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:01:36.124770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:01:36.124788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:01:36.124825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:36.124937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:01:36.125203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:01:36.223668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:01:36.223749Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:36.231316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:01:36.231403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:01:36.231538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:01:36.243684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:01:36.243901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:01:36.245186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:36.245477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:36.247887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:36.249016Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:36.249087Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:36.249227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:01:36.249270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:36.249305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:36.249427Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:01:36.260107Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:01:36.407483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:36.407692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:36.407888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:01:36.408224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:01:36.408282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:36.414732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:36.414899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:01:36.415202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:36.415268Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:01:36.415325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:01:36.415362Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:01:36.419969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:36.420033Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:01:36.420082Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:01:36.424063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:36.424120Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:36.424173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:36.424241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:01:36.428668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:01:36.432684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:01:36.432885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:01:36.433883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:36.434144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:36.434193Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:36.434466Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:01:36.434515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:36.434663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:36.434777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:01:36.441540Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:36.441581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:36.441794Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:36.441853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:01:36.442056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:36.442096Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:01:36.442181Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:36.442212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:36.442243Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:36.442270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:36.442301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:01:36.442348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:36.442384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:01:36.442410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:01:36.442488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:01:36.442546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:01:36.442589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:01:36.444441Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:36.444559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:36.444617Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... t for Tables, read records: 2, at schemeshard: 72057594046678944 2025-03-31T16:01:48.183266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-03-31T16:01:48.183328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-03-31T16:01:48.183389Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-03-31T16:01:48.183623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 4, at schemeshard: 72057594046678944 2025-03-31T16:01:48.183733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:48.184233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 2, at schemeshard: 72057594046678944 2025-03-31T16:01:48.184275Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:1, tabletId: 72075186233409546, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: DataShard, at schemeshard: 72057594046678944 2025-03-31T16:01:48.184320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T16:01:48.184357Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:2, tabletId: 72075186233409547, PathId: [OwnerId: 72057594046678944, LocalPathId: 3], TabletType: DataShard, at schemeshard: 72057594046678944 2025-03-31T16:01:48.184374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-31T16:01:48.184464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 2, at schemeshard: 72057594046678944 2025-03-31T16:01:48.184573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:48.184742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 6, at schemeshard: 72057594046678944 2025-03-31T16:01:48.184964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:48.185051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:48.185303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:48.185369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:48.185598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:48.185697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:48.185788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:48.186024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:48.186106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:48.186277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:48.186499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:48.186663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:48.186708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:48.186759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:48.187010Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-31T16:01:48.201232Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:01:48.201404Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-03-31T16:01:48.202888Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435082, Sender [1:1137:3069], Recipient [1:1137:3069]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-03-31T16:01:48.202941Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-03-31T16:01:48.204908Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:48.204993Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:48.206413Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:1137:3069], Recipient [1:1137:3069]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:01:48.206480Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:01:48.206855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:01:48.206923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:48.206991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:48.207032Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:01:48.208312Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [1:1173:3069], Recipient [1:1137:3069]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-03-31T16:01:48.208361Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-03-31T16:01:48.208419Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1137:3069] sender: [1:1193:2058] recipient: [1:15:2062] 2025-03-31T16:01:48.249652Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:1192:3113], Recipient [1:1137:3069]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-03-31T16:01:48.249717Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-03-31T16:01:48.249829Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:01:48.250106Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Simple" took 267us result status StatusSuccess 2025-03-31T16:01:48.250873Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Simple" PathDescription { Self { Name: "Simple" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1001 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Simple" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 1 MinPartitionsCount: 20 MaxPartitionsCount: 20 } } TableSchemaVersion: 2 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 13984 RowCount: 100 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 22989 Memory: 141368 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 13984 DataSize: 13984 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TestYmqHttpProxy::TestSendMessageFifoQueue >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndSameParams [GOOD] >> TestYmqHttpProxy::TestSendMessage [GOOD] >> TestKinesisHttpProxy::DifferentContentTypes [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCH11 [GOOD] Test command err: Trying to start YDB, gRPC: 12446, MsgBus: 22922 2025-03-31T15:59:38.963564Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003948266098086:2204];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:38.963956Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019c1/r3tmp/tmp0Rw5f7/pdisk_1.dat 2025-03-31T15:59:39.636326Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:39.636428Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:39.640440Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:59:39.663238Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12446, node 1 2025-03-31T15:59:39.807951Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:59:39.807980Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:59:39.807991Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:59:39.808131Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22922 TClient is connected to server localhost:22922 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:59:40.772225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:40.794761Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:59:43.151118Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003969740935095:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:43.151278Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:43.151722Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003969740935107:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:43.156436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T15:59:43.171857Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488003969740935109:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T15:59:43.243686Z node 1 :TX_PROXY ERROR: Actor# [1:7488003969740935160:2342] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:43.648036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T15:59:43.907312Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003948266098086:2204];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:43.907380Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:59:43.976409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488003969740935430:2356];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T15:59:43.976409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488003969740935422:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T15:59:43.976623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488003969740935422:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T15:59:43.976916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488003969740935422:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T15:59:43.977087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488003969740935422:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T15:59:43.977106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488003969740935430:2356];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T15:59:43.977217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488003969740935422:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T15:59:43.977297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488003969740935430:2356];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T15:59:43.977335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488003969740935422:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T15:59:43.977478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488003969740935430:2356];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T15:59:43.977494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488003969740935422:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T15:59:43.977598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488003969740935430:2356];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T15:59:43.977614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488003969740935422:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T15:59:43.977703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488003969740935430:2356];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T15:59:43.977715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488003969740935422:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T15:59:43.977819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488003969740935430:2356];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T15:59:43.977835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488003969740935422:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T15:59:43.978028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488003969740935430:2356];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T15:59:43.978159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488003969740935430:2356];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T15:59:43.978275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488003969740935430:2356];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T15:59:43.978402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488003969740935430:2356];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T15:59:43.978521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488003969740935430:2356];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T15:59:43.986079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488003969740935422:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T15:59:43.986282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488003969740935422:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T15:59:44.046515Z node 1 :T ... ller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.162711Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.168179Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039368;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.175036Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.178095Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.183740Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.188255Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.190481Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.194035Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039330;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.197377Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.201520Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.204114Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.211358Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.211453Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.220126Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039366;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.221644Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.226582Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.233734Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.234543Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.240488Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.248251Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.251738Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.258431Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.258823Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.265810Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039362;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.269136Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.272482Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.276115Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.279044Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.285443Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.288351Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.295008Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.299544Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.301668Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.305930Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039265;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.309182Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.314122Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.316973Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.326136Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.332008Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.336973Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.348223Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.359905Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.372958Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.373688Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.398627Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:27.628286Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpeyb091eqfq3bgtgnptm8a", SessionId: ydb://session/3?node_id=1&id=NzQ3MzBhMzQtYTM4MzM3YWMtNDZlZGMxOGItODY0YmVmYmY=, Slow query, duration: 35.393941s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:01:28.158346Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:01:28.158853Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:01:28.159440Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7488004382057855929:12105];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-03-31T16:01:28.159797Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> TestKinesisHttpProxy::TestPing [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithComplexPreds2+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 26187, MsgBus: 21192 2025-03-31T15:59:42.424707Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003965522551164:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:42.424770Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019b8/r3tmp/tmpAfiOfZ/pdisk_1.dat 2025-03-31T15:59:43.120040Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:43.120140Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:43.123242Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:59:43.200601Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26187, node 1 2025-03-31T15:59:43.470250Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:59:43.470273Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:59:43.470281Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:59:43.470382Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21192 TClient is connected to server localhost:21192 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:59:44.327420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:44.351138Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:59:47.068018Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003986997388303:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:47.068150Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:47.068646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003986997388315:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:47.077868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T15:59:47.094589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488003986997388317:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T15:59:47.178420Z node 1 :TX_PROXY ERROR: Actor# [1:7488003986997388368:2343] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:47.424812Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003965522551164:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:47.424894Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:59:47.681459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T15:59:48.177039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488003991292355954:2364];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T15:59:48.177650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488003986997388649:2359];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T15:59:48.177713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488003986997388649:2359];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T15:59:48.177970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488003986997388649:2359];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T15:59:48.177973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488003991292355954:2364];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T15:59:48.178097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488003991292355954:2364];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T15:59:48.181678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488003986997388649:2359];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T15:59:48.183297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488003991292355954:2364];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T15:59:48.183608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488003986997388649:2359];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T15:59:48.183824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488003991292355954:2364];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T15:59:48.183987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488003986997388649:2359];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T15:59:48.184131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488003991292355954:2364];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T15:59:48.184357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488003986997388649:2359];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T15:59:48.184514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488003991292355954:2364];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T15:59:48.184672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488003986997388649:2359];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T15:59:48.184847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488003991292355954:2364];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T15:59:48.185032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488003986997388649:2359];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T15:59:48.185203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488003991292355954:2364];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T15:59:48.185401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488003986997388649:2359];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T15:59:48.185543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488003991292355954:2364];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T15:59:48.185693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488003986997388649:2359];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T15:59:48.185819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488003991292355954:2364];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T15:59:48.185951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488003986997388649:2359];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T15:59:48.186050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488003991292355954:2364];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T15:59:48.235874Z node 1 :T ... ller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.057688Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.060524Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.070338Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.071910Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.081650Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.084203Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039190;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.094089Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039192;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.109454Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.110388Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039245;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.253645Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.266806Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.282247Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.297271Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.322546Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.327572Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.328174Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039194;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.339978Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039212;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.346640Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.349554Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039189;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.356928Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039187;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.358800Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039241;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.364071Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039198;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.367599Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039227;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.371644Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039215;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.375675Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039217;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.379556Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039205;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.383594Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039222;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.386503Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039202;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.391247Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039188;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.393695Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039191;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.399758Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039200;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.400069Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039210;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.406601Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.407377Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039261;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.413450Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039204;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.413450Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039208;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.420638Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039211;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.420697Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.427346Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039236;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.427346Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039196;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.434558Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039207;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.434717Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039186;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.441444Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039201;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.441444Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039214;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.542416Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:30.625907Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpeybd43s58eqwxjy4mgxt4", SessionId: ydb://session/3?node_id=1&id=MTc3NWYzMDItMjFjNTUwYjItY2M2MTNjZWEtZmYzNzhlNDE=, Slow query, duration: 37.981367s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:01:30.894550Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:01:30.894954Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:01:30.895719Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7488004386429406002:11751];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-03-31T16:01:30.895990Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> TestKinesisHttpProxy::UnauthorizedGetShardIteratorRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnEmptyTopicName >> TestYmqHttpProxy::TestReceiveMessage >> TestKinesisHttpProxy::MissingAction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::RacyAlterTableAndConditionalErase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:01:25.579653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:01:25.579767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:25.579807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:01:25.579840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:01:25.579880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:01:25.579933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:01:25.579993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:25.580167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:01:25.580547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:01:25.714070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:01:25.714135Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:25.723525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:01:25.723631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:01:25.723795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:01:25.728984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:01:25.729235Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:01:25.729903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:25.730100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:25.736716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:25.737982Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:25.738049Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:25.738194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:01:25.738238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:25.738280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:25.738441Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:01:25.745657Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:01:25.914585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:25.914802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:25.915055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:01:25.915385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:01:25.915444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:25.918129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:25.918273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:01:25.918489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:25.918541Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:01:25.918578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:01:25.918611Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:01:25.920729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:25.920788Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:01:25.920827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:01:25.922764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:25.922812Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:25.922855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:25.922929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:01:25.927064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:01:25.929435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:01:25.929631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:01:25.930665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:25.930829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:25.930914Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:25.931246Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:01:25.931314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:25.931480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:25.931558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:01:25.936475Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:25.936524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:25.936711Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:25.936774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:01:25.936976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:25.937021Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:01:25.937122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:25.937158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:25.937194Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:25.937225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:25.937259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:01:25.937301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:25.937338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:01:25.937364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:01:25.937457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:01:25.937501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:01:25.937547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:01:25.939690Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:25.939818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:25.939858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 251 } } 2025-03-31T16:01:49.280870Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-03-31T16:01:49.281004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409546, shardIdx: 72057594046678944:1, operationId: 102:0, left await: 0, at schemeshard: 72057594046678944 2025-03-31T16:01:49.281048Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 3 -> 128 2025-03-31T16:01:49.283889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:01:49.284174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:01:49.284247Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 102:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:01:49.284355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-03-31T16:01:49.284562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:01:49.300153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-03-31T16:01:49.300352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-03-31T16:01:49.301182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:49.301336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:49.301402Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2025-03-31T16:01:49.301761Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-03-31T16:01:49.301932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-03-31T16:01:49.335559Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:49.335634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:01:49.336088Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:49.336161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-31T16:01:49.336670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:01:49.336791Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-03-31T16:01:49.337909Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:01:49.338058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:01:49.338111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:01:49.338181Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-03-31T16:01:49.338237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T16:01:49.338344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 2025-03-31T16:01:49.352433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1717 } } 2025-03-31T16:01:49.352520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-03-31T16:01:49.352716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1717 } } 2025-03-31T16:01:49.352850Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1717 } } 2025-03-31T16:01:49.354051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-31T16:01:49.354119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-03-31T16:01:49.354297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-31T16:01:49.354360Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-31T16:01:49.354513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-31T16:01:49.354591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:49.354640Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:01:49.354689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-31T16:01:49.354738Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-03-31T16:01:49.375754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:01:49.376242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:01:49.376695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:01:49.377117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:01:49.377178Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-31T16:01:49.377329Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:01:49.377379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:01:49.377427Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:01:49.377485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:01:49.377533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-31T16:01:49.377629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:333:2312] message: TxId: 102 2025-03-31T16:01:49.377694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:01:49.377765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-31T16:01:49.377807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-31T16:01:49.377957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:01:49.390645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:01:49.390737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:614:2569] TestWaitNotification: OK eventTxId 102 2025-03-31T16:01:49.391340Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-03-31T16:01:49.391430Z node 1 :FLAT_TX_SCHEMESHARD ERROR: Unsuccessful conditional erase: tabletId: 72075186233409546, status: SCHEME_ERROR, error: Schema version mismatch: got 1, expected 2, retry after: 300.000000s, at schemeshard: 72057594046678944 2025-03-31T16:01:49.404325Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-03-31T16:01:49.404529Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-03-31T16:01:49.404592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:1, run at: 1970-01-01T00:06:00.037500Z, at schemeshard: 72057594046678944 2025-03-31T16:01:49.404678Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndDifferentParams >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 >> TestKinesisHttpProxy::GoodRequestPutRecords ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } 2025-03-31T16:01:48.887376Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:01:48.899327Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:01:48.899751Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-03-31T16:01:48.899827Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:01:48.899880Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-03-31T16:01:48.899928Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:01:48.899973Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:01:48.900040Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-03-31T16:01:48.906526Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:261:2253], now have 1 active actors on pipe 2025-03-31T16:01:48.906686Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-03-31T16:01:48.940246Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-31T16:01:48.949231Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-31T16:01:48.949386Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:01:48.956116Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-31T16:01:48.956352Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:01:48.956810Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:01:48.957185Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:269:2259] 2025-03-31T16:01:48.959728Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-03-31T16:01:48.959838Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:269:2259] 2025-03-31T16:01:48.959898Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:01:48.959946Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-31T16:01:48.960893Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:272:2261], now have 1 active actors on pipe 2025-03-31T16:01:49.089228Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:01:49.106425Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:01:49.106801Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] doesn't have tx info 2025-03-31T16:01:49.106854Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:01:49.106892Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] no config, start with empty partitions and default config 2025-03-31T16:01:49.106949Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:01:49.106999Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:01:49.107057Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137] doesn't have tx writes info 2025-03-31T16:01:49.107759Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [2:405:2360], now have 1 active actors on pipe 2025-03-31T16:01:49.107942Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2025-03-31T16:01:49.108135Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Config update version 2(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-31T16:01:49.120761Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-31T16:01:49.120924Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:01:49.121872Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137] Config applied version 2 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-31T16:01:49.122015Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:01:49.122506Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:01:49.122764Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] bootstrapping 0 [2:413:2366] 2025-03-31T16:01:49.124922Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2025-03-31T16:01:49.124999Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [2:413:2366] 2025-03-31T16:01:49.125056Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:01:49.125131Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-31T16:01:49.125987Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [2:416:2368], now have 1 active actors on pipe 2025-03-31T16:01:49.148647Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:01:49.154130Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:01:49.154530Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] doesn't have tx info 2025-03-31T16:01:49.154629Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:01:49.154667Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-03-31T16:01:49.154734Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:01:49.154789Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:01:49.154854Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-03-31T16:01:49.155595Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [2:465:2405], now have 1 active actors on pipe 2025-03-31T16:01:49.155724Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-03-31T16:01:49.155939Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 3(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-31T16:01:49.158804Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-31T16:01:49.158979Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:01:49.159971Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 3 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-31T16:01:49.160121Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:01:49.160525Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:01:49.160762Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [2:473:2411] 2025-03-31T16:01:49.163036Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-03-31T16:01:49.163130Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [2:473:2411] 2025-03-31T16:01:49.163185Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:01:49.163244Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Proc ... edTxs.size=0 2025-03-31T16:01:50.320286Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:01:50.320344Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-03-31T16:01:50.321089Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:521:2446], now have 1 active actors on pipe 2025-03-31T16:01:50.321154Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-03-31T16:01:50.321352Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 8(current 0) received from actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-31T16:01:50.326983Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-31T16:01:50.327171Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:01:50.328022Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 8 actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-31T16:01:50.328151Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:01:50.328511Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:01:50.328727Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:529:2452] 2025-03-31T16:01:50.330683Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-03-31T16:01:50.330751Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:529:2452] 2025-03-31T16:01:50.330806Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:01:50.330852Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-03-31T16:01:50.331991Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:532:2454], now have 1 active actors on pipe 2025-03-31T16:01:50.333147Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:538:2457], now have 1 active actors on pipe 2025-03-31T16:01:50.333292Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:539:2458], now have 1 active actors on pipe 2025-03-31T16:01:50.333498Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-31T16:01:50.333678Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-31T16:01:50.333724Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:540:2458], now have 1 active actors on pipe 2025-03-31T16:01:50.333907Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-31T16:01:50.346771Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:548:2465], now have 1 active actors on pipe 2025-03-31T16:01:50.375770Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:01:50.380503Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:01:50.380894Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-03-31T16:01:50.380947Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:01:50.381093Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:01:50.382245Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:01:50.382304Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-03-31T16:01:50.382435Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:01:50.382906Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:01:50.383217Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:605:2510] 2025-03-31T16:01:50.386733Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-03-31T16:01:50.388333Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-03-31T16:01:50.388641Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-03-31T16:01:50.388946Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-03-31T16:01:50.389228Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-03-31T16:01:50.389274Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-03-31T16:01:50.389315Z node 3 :PERSQUEUE INFO: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:01:50.389358Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-03-31T16:01:50.389415Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:605:2510] 2025-03-31T16:01:50.389472Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:01:50.391015Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-03-31T16:01:50.392236Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [3:539:2458] destroyed 2025-03-31T16:01:50.392309Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [3:538:2457] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionStatusResult { TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 1 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 77 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 77 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } PartitionResult { Partition: 2 Status: STATUS_UNKNOWN } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 38 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 38 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } } } >> KqpJoinOrder::FiveWayJoinWithPreds+ColumnStore [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TestKinesisHttpProxy::TestRequestBadJson >> KqpJoinOrder::CanonizedJoinOrderTPCH17 [GOOD] >> TestKinesisHttpProxy::TestRequestWithWrongRegion >> TestKinesisHttpProxy::PutRecordsWithLongExplicitHashKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } 2025-03-31T16:01:48.985063Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:01:49.005371Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:01:49.005794Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-03-31T16:01:49.005866Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:01:49.005919Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-03-31T16:01:49.005981Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:01:49.006051Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:01:49.006139Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-03-31T16:01:49.007028Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:260:2252], now have 1 active actors on pipe 2025-03-31T16:01:49.007138Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-03-31T16:01:49.052887Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-31T16:01:49.064670Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-31T16:01:49.064845Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:01:49.065944Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-31T16:01:49.066159Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:01:49.066701Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:01:49.067098Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [3:268:2258] 2025-03-31T16:01:49.070050Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-03-31T16:01:49.070140Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [3:268:2258] 2025-03-31T16:01:49.070213Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:01:49.070278Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-31T16:01:49.075572Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:271:2260], now have 1 active actors on pipe 2025-03-31T16:01:49.258162Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:01:49.272290Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:01:49.272742Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] doesn't have tx info 2025-03-31T16:01:49.272811Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:01:49.272873Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] no config, start with empty partitions and default config 2025-03-31T16:01:49.272941Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:01:49.273010Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:01:49.273091Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928137] doesn't have tx writes info 2025-03-31T16:01:49.273920Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [3:402:2357], now have 1 active actors on pipe 2025-03-31T16:01:49.274047Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2025-03-31T16:01:49.274284Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Config update version 2(current 0) received from actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-31T16:01:49.281170Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-31T16:01:49.281354Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:01:49.282378Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928137] Config applied version 2 actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-31T16:01:49.282569Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:01:49.283041Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:01:49.283334Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] bootstrapping 0 [3:410:2363] 2025-03-31T16:01:49.285830Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2025-03-31T16:01:49.285910Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [3:410:2363] 2025-03-31T16:01:49.295321Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:01:49.295444Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-31T16:01:49.296615Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [3:413:2365], now have 1 active actors on pipe 2025-03-31T16:01:49.321787Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:01:49.327190Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:01:49.327653Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] doesn't have tx info 2025-03-31T16:01:49.327721Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:01:49.327777Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-03-31T16:01:49.327850Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:01:49.327910Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:01:49.327987Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-03-31T16:01:49.328825Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:462:2402], now have 1 active actors on pipe 2025-03-31T16:01:49.328980Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-03-31T16:01:49.329204Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 3(current 0) received from actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-31T16:01:49.332342Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-31T16:01:49.332516Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:01:49.333366Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 3 actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-31T16:01:49.333499Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:01:49.333898Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:01:49.334169Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [3:470:2408] 2025-03-31T16:01:49.336846Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-03-31T16:01:49.336944Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [3:470:2408] 2025-03-31T16:01:49.337038Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 b ... 38] doesn't have tx info 2025-03-31T16:01:50.392214Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:01:50.392259Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-03-31T16:01:50.392309Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:01:50.392365Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:01:50.392441Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-03-31T16:01:50.393286Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [4:467:2407], now have 1 active actors on pipe 2025-03-31T16:01:50.393357Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-03-31T16:01:50.393568Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 7(current 0) received from actor [4:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 7 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-31T16:01:50.397322Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 7 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-31T16:01:50.397483Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:01:50.398283Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 7 actor [4:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 7 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-31T16:01:50.398430Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:01:50.398827Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:01:50.399057Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [4:475:2413] 2025-03-31T16:01:50.403248Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-03-31T16:01:50.403354Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [4:475:2413] 2025-03-31T16:01:50.403440Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:01:50.403507Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-03-31T16:01:50.404612Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [4:478:2415], now have 1 active actors on pipe 2025-03-31T16:01:50.431858Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:01:50.437828Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:01:50.438235Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-03-31T16:01:50.438301Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:01:50.438357Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-03-31T16:01:50.438415Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:01:50.438482Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:01:50.438551Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-03-31T16:01:50.439350Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:527:2452], now have 1 active actors on pipe 2025-03-31T16:01:50.439475Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-03-31T16:01:50.439703Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 8(current 0) received from actor [4:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-31T16:01:50.442583Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-31T16:01:50.442800Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:01:50.443876Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 8 actor [4:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-31T16:01:50.444060Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:01:50.444460Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:01:50.444749Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [4:535:2458] 2025-03-31T16:01:50.447413Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-03-31T16:01:50.447553Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [4:535:2458] 2025-03-31T16:01:50.447624Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:01:50.447697Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-03-31T16:01:50.448654Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:538:2460], now have 1 active actors on pipe 2025-03-31T16:01:50.450428Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [4:546:2463], now have 1 active actors on pipe 2025-03-31T16:01:50.450551Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [4:547:2464], now have 1 active actors on pipe 2025-03-31T16:01:50.450671Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:548:2464], now have 1 active actors on pipe 2025-03-31T16:01:50.462510Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:553:2468], now have 1 active actors on pipe 2025-03-31T16:01:50.490085Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:01:50.494028Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:01:50.494426Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-03-31T16:01:50.494488Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:01:50.494639Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:01:50.495800Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:01:50.495893Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-03-31T16:01:50.496026Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:01:50.496437Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:01:50.496697Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [4:610:2513] 2025-03-31T16:01:50.498972Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-03-31T16:01:50.500520Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-03-31T16:01:50.500848Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-03-31T16:01:50.501230Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-03-31T16:01:50.501486Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-03-31T16:01:50.501543Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-03-31T16:01:50.501616Z node 4 :PERSQUEUE INFO: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:01:50.501663Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-03-31T16:01:50.501724Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [4:610:2513] 2025-03-31T16:01:50.501805Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:01:50.501861Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-03-31T16:01:50.502852Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [4:547:2464] destroyed 2025-03-31T16:01:50.502934Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [4:546:2463] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionLocationsResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionLocation { Partition: 0 Host: "::1" HostId: 4 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionLocation { Partition: 1 Host: "::1" HostId: 4 ErrorCode: OK } PartitionLocation { Partition: 2 ErrorCode: INITIALIZING ErrorReason: "Tablet for that partition is not running" } ErrorCode: OK } } } |72.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |72.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |72.2%| [LD] {RESULT} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnEmptyTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'/Root/PQ/\', Marker# PQ17" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "empty topic in GetReadSessionsInfo request" ErrorCode: BAD_REQUEST } >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified >> TNodeBrokerTest::TestRandomActions [GOOD] >> DataShardVolatile::DistributedWriteThenImmediateUpsert [GOOD] >> DataShardVolatile::DistributedWriteThenSplit >> TColumnShardTestReadWrite::CompactionInGranule_PKString_Reboot >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified [GOOD] >> JsonProtoConversion::JsonToProtoArray [GOOD] >> JsonProtoConversion::NlohmannJsonToProtoArray >> JsonProtoConversion::NlohmannJsonToProtoArray [GOOD] |72.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> LabeledDbCounters::OneTablet [GOOD] >> LabeledDbCounters::OneTabletRemoveCounters ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestRandomActions [GOOD] Test command err: 2025-03-31T15:59:30.904522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T15:59:30.904604Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:31.017399Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-03-31T15:59:31.017947Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-03-31T15:59:31.833605Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-03-31T15:59:32.290035Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-03-31T15:59:32.290450Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-03-31T15:59:32.291141Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-03-31T15:59:32.756035Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-03-31T15:59:32.771191Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-03-31T15:59:32.772075Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-03-31T15:59:32.812687Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-03-31T15:59:32.826534Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-03-31T15:59:32.884739Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-03-31T15:59:34.192855Z node 1 :NODE_BROKER ERROR: Cannot register node host5:4: ERROR_TEMP: No free node IDs 2025-03-31T15:59:34.193327Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Node has expired 2025-03-31T15:59:34.207263Z node 1 :NODE_BROKER ERROR: Cannot register node host2:1: ERROR_TEMP: No free node IDs 2025-03-31T15:59:34.207909Z node 1 :NODE_BROKER ERROR: Cannot register node host7:6: ERROR_TEMP: No free node IDs 2025-03-31T15:59:34.257088Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Node has expired 2025-03-31T15:59:34.273714Z node 1 :NODE_BROKER ERROR: Cannot register node host13:12: ERROR_TEMP: No free node IDs 2025-03-31T15:59:34.311593Z node 1 :NODE_BROKER ERROR: Cannot register node host13:12: ERROR_TEMP: No free node IDs 2025-03-31T15:59:34.363053Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Node has expired 2025-03-31T15:59:34.364644Z node 1 :NODE_BROKER ERROR: Cannot register node host12:11: ERROR_TEMP: No free node IDs 2025-03-31T15:59:34.379397Z node 1 :NODE_BROKER ERROR: Cannot register node host5:4: ERROR_TEMP: No free node IDs 2025-03-31T15:59:34.380638Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Node has expired 2025-03-31T15:59:35.109768Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-03-31T15:59:35.110281Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-03-31T15:59:35.718353Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-03-31T15:59:35.719990Z node 1 :NODE_BROKER ERROR: Cannot register node host1:0: ERROR_TEMP: No free node IDs 2025-03-31T15:59:35.720698Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-03-31T15:59:35.721471Z node 1 :NODE_BROKER ERROR: Cannot register node host6:5: ERROR_TEMP: No free node IDs 2025-03-31T15:59:35.738071Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-03-31T15:59:35.738633Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-03-31T15:59:35.739155Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-03-31T15:59:35.739575Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-03-31T15:59:35.742666Z node 1 :NODE_BROKER ERROR: Cannot register node host1:0: ERROR_TEMP: No free node IDs 2025-03-31T15:59:35.743538Z node 1 :NODE_BROKER ERROR: Cannot register node host6:5: ERROR_TEMP: No free node IDs 2025-03-31T15:59:35.744567Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-03-31T15:59:35.745204Z node 1 :NODE_BROKER ERROR: Cannot register node host12:11: ERROR_TEMP: No free node IDs 2025-03-31T15:59:35.747612Z node 1 :NODE_BROKER ERROR: Cannot register node host1:0: ERROR_TEMP: No free node IDs 2025-03-31T15:59:35.747995Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-03-31T15:59:35.749105Z node 1 :NODE_BROKER ERROR: Cannot register node host8:7: ERROR_TEMP: No free node IDs 2025-03-31T15:59:35.749621Z node 1 :NODE_BROKER ERROR: Cannot register node host3:2: ERROR_TEMP: No free node IDs 2025-03-31T15:59:35.750253Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-03-31T15:59:35.752090Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-03-31T15:59:35.752578Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-03-31T15:59:35.753273Z node 1 :NODE_BROKER ERROR: Cannot register node host1:0: ERROR_TEMP: No free node IDs 2025-03-31T15:59:35.889946Z node 1 :NODE_BROKER ERROR: Cannot register node host12:11: ERROR_TEMP: No free node IDs 2025-03-31T15:59:36.446365Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-03-31T15:59:36.487898Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-03-31T15:59:36.488953Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-03-31T15:59:36.489477Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-03-31T15:59:36.489936Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-03-31T15:59:36.522545Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-03-31T15:59:36.523171Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-03-31T15:59:36.524842Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-03-31T15:59:37.144336Z node 1 :NODE_BROKER ERROR: Cannot register node host12:11: ERROR_TEMP: No free node IDs 2025-03-31T15:59:37.213777Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Node has expired 2025-03-31T15:59:37.247731Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Node has expired 2025-03-31T15:59:37.248316Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Node has expired 2025-03-31T15:59:38.015845Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-03-31T15:59:38.052771Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-03-31T15:59:38.232106Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-03-31T15:59:38.928728Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-03-31T15:59:39.036717Z node 1 :NODE_BROKER ERROR: Cannot register node host4:3: ERROR_TEMP: No free node IDs 2025-03-31T15:59:39.054340Z node 1 :NODE_BROKER ERROR: Cannot register node host15:14: ERROR_TEMP: No free node IDs 2025-03-31T15:59:39.111578Z node 1 :NODE_BROKER ERROR: Cannot register node host9:8: ERROR_TEMP: No free node IDs 2025-03-31T15:59:39.129764Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-03-31T15:59:39.130327Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-03-31T15:59:39.913481Z node 1 :NODE_BROKER ERROR: Cannot register node host3:2: ERROR_TEMP: No free node IDs 2025-03-31T15:59:39.983170Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Node has expired 2025-03-31T15:59:40.028938Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Node has expired 2025-03-31T15:59:40.059716Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Node has expired 2025-03-31T15:59:40.060722Z node 1 :NODE_BROKER ERROR: Cannot register node host15:14: ERROR_TEMP: No free node IDs 2025-03-31T15:59:40.061677Z node 1 :NODE_BROKER ERROR: Cannot register node host9:8: ERROR_TEMP: No free node IDs 2025-03-31T15:59:40.062335Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Node has expired 2025-03-31T15:59:40.099392Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Node has expired 2025-03-31T15:59:40.103320Z node 1 :NODE_BROKER ERROR: Cannot register node host10:9: ERROR_TEMP: No free node IDs 2025-03-31T15:59:40.103995Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Node has expired 2025-03-31T15:59:40.104647Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Node has expired 2025-03-31T15:59:40.107924Z node 1 :NODE_BROKER ERROR: Cannot register node host9:8: ERROR_TEMP: No free node IDs 2025-03-31T15:59:40.108553Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Node has expired 2025-03-31T15:59:40.124896Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Node has expired 2025-03-31T15:59:40.832615Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-03-31T15:59:40.861945Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-03-31T15:59:40.880997Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-03-31T15:59:40.881813Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-03-31T15:59:40.882402Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-03-31T15:59:40.906505Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-03-31T15:59:40.978031Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-03-31T15:59:40.978637Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-03-31T15:59:40.981554Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-03-31T15:59:40.982308Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-03-31T15:59:41.458669Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-03-31T15:59:41.459522Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-03-31T15:59:41.484421Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-03-31T15:59:41.512961Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-03-31T15:59:41.513686Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-03-31T15:59:41.514271Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-03-31T15:59:41.514866Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-03-31T15:59:41.517887Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-03-31T15:59:41.556975Z node 1 :NODE_BROKER ERROR: ... OKER ERROR: Cannot register node host12:11: ERROR_TEMP: No free node IDs 2025-03-31T16:01:38.655958Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-03-31T16:01:38.657951Z node 1 :NODE_BROKER ERROR: Cannot register node host13:12: ERROR_TEMP: No free node IDs 2025-03-31T16:01:38.662651Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-03-31T16:01:38.670361Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-03-31T16:01:38.672771Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-03-31T16:01:38.674991Z node 1 :NODE_BROKER ERROR: Cannot register node host10:9: ERROR_TEMP: No free node IDs 2025-03-31T16:01:38.679967Z node 1 :NODE_BROKER ERROR: Cannot register node host12:11: ERROR_TEMP: No free node IDs 2025-03-31T16:01:38.682425Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-03-31T16:01:38.684610Z node 1 :NODE_BROKER ERROR: Cannot register node host15:14: ERROR_TEMP: No free node IDs 2025-03-31T16:01:38.686506Z node 1 :NODE_BROKER ERROR: Cannot register node host4:3: ERROR_TEMP: No free node IDs 2025-03-31T16:01:38.691678Z node 1 :NODE_BROKER ERROR: Cannot register node host7:6: ERROR_TEMP: No free node IDs 2025-03-31T16:01:38.696062Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-03-31T16:01:38.698358Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-03-31T16:01:38.700983Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-03-31T16:01:38.705362Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-03-31T16:01:39.493170Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-03-31T16:01:39.499268Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-03-31T16:01:39.545990Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-03-31T16:01:39.555434Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-03-31T16:01:39.682967Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-03-31T16:01:39.685901Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-03-31T16:01:39.788517Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-03-31T16:01:39.826040Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-03-31T16:01:39.830976Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-03-31T16:01:40.914947Z node 1 :NODE_BROKER ERROR: Cannot register node host12:11: ERROR_TEMP: No free node IDs 2025-03-31T16:01:40.931068Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-03-31T16:01:40.934132Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-03-31T16:01:40.936814Z node 1 :NODE_BROKER ERROR: Cannot register node host2:1: ERROR_TEMP: No free node IDs 2025-03-31T16:01:40.951075Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-03-31T16:01:40.956800Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-03-31T16:01:40.980319Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-03-31T16:01:40.982615Z node 1 :NODE_BROKER ERROR: Cannot register node host1:0: ERROR_TEMP: No free node IDs 2025-03-31T16:01:40.985441Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-03-31T16:01:41.529084Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-03-31T16:01:41.531876Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-03-31T16:01:41.589925Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-03-31T16:01:42.127824Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-03-31T16:01:42.131489Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-03-31T16:01:42.136760Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-03-31T16:01:42.140860Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-03-31T16:01:42.181591Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-03-31T16:01:42.189417Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-03-31T16:01:42.466236Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-03-31T16:01:42.472083Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-03-31T16:01:42.508498Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-03-31T16:01:42.511740Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-03-31T16:01:42.570973Z node 1 :NODE_BROKER ERROR: Cannot register node host1:0: ERROR_TEMP: No free node IDs 2025-03-31T16:01:42.592444Z node 1 :NODE_BROKER ERROR: Cannot register node host9:8: ERROR_TEMP: No free node IDs 2025-03-31T16:01:42.649944Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Node has expired 2025-03-31T16:01:42.677733Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Node has expired 2025-03-31T16:01:42.823420Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Node has expired 2025-03-31T16:01:42.851104Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Node has expired 2025-03-31T16:01:42.882810Z node 1 :NODE_BROKER ERROR: Cannot register node host1:0: ERROR_TEMP: No free node IDs 2025-03-31T16:01:42.886142Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Node has expired 2025-03-31T16:01:42.907496Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Node has expired 2025-03-31T16:01:43.084192Z node 1 :NODE_BROKER ERROR: Cannot register node host4:3: ERROR_TEMP: No free node IDs 2025-03-31T16:01:43.254140Z node 1 :NODE_BROKER ERROR: Cannot register node host3:2: ERROR_TEMP: No free node IDs 2025-03-31T16:01:44.230644Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-03-31T16:01:44.338556Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-03-31T16:01:44.379971Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-03-31T16:01:44.394487Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-03-31T16:01:45.444015Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-03-31T16:01:45.946378Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-03-31T16:01:45.953765Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-03-31T16:01:45.990246Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-03-31T16:01:46.033013Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-03-31T16:01:46.049343Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-03-31T16:01:46.061122Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-03-31T16:01:46.087237Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-03-31T16:01:46.196750Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-03-31T16:01:46.411535Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-03-31T16:01:47.161465Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-03-31T16:01:47.351571Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-03-31T16:01:47.354510Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-03-31T16:01:47.968369Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-03-31T16:01:47.974518Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-03-31T16:01:47.977032Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-03-31T16:01:47.981615Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-03-31T16:01:49.275666Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-03-31T16:01:49.315688Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-03-31T16:01:49.326965Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-03-31T16:01:49.391890Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-03-31T16:01:49.511808Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-03-31T16:01:49.521553Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-03-31T16:01:49.532190Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-03-31T16:01:49.543836Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-03-31T16:01:49.547359Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-03-31T16:01:49.550626Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-03-31T16:01:49.608526Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-03-31T16:01:49.632543Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-03-31T16:01:49.728158Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-03-31T16:01:49.731739Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-03-31T16:01:49.811874Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-03-31T16:01:49.852035Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Node has expired 2025-03-31T16:01:50.556833Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-03-31T16:01:50.565233Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-03-31T16:01:50.572184Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-03-31T16:01:50.631969Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-03-31T16:01:50.657855Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-03-31T16:01:50.701842Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "No clientId specified in CmdGetReadSessionsInfo" ErrorCode: BAD_REQUEST } |72.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::JsonToProtoArray [GOOD] >> JsonProtoConversion::ProtoMapToJson [GOOD] |72.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::NlohmannJsonToProtoArray [GOOD] >> JsonProtoConversion::JsonToProtoMap [GOOD] |72.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |72.2%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |72.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |72.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::ProtoMapToJson [GOOD] |72.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::JsonToProtoMap [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain >> TSchemeShardColumnTableTTL::CreateColumnTable [GOOD] >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster [GOOD] >> RetryPolicy::TWriteSession_SeqNoShift |72.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |72.2%| [LD] {RESULT} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |72.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-67 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-68 >> KqpJoinOrder::FourWayJoinLeftFirst+ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithPreds+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 21195, MsgBus: 12992 2025-03-31T15:59:43.387781Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003967185003159:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:43.388313Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019b7/r3tmp/tmpuqPUn8/pdisk_1.dat 2025-03-31T15:59:44.217557Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:44.217642Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:44.219768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:59:44.261214Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21195, node 1 2025-03-31T15:59:44.317037Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:59:44.317058Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:59:44.317066Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:59:44.317208Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12992 TClient is connected to server localhost:12992 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:59:45.108294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:47.986077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003984364872871:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:47.986177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:47.986570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003984364872883:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:47.993612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T15:59:48.019461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488003984364872885:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T15:59:48.095841Z node 1 :TX_PROXY ERROR: Actor# [1:7488003988659840232:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:48.381902Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003967185003159:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:48.382006Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:59:48.690028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T15:59:49.086908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003988659840555:2364];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T15:59:49.087190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003988659840555:2364];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T15:59:49.087284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488003988659840549:2363];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T15:59:49.087325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488003988659840549:2363];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T15:59:49.087440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003988659840555:2364];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T15:59:49.087521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488003988659840549:2363];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T15:59:49.087564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003988659840555:2364];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T15:59:49.087619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488003988659840549:2363];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T15:59:49.087660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003988659840555:2364];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T15:59:49.087706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488003988659840549:2363];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T15:59:49.087763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003988659840555:2364];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T15:59:49.087792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488003988659840549:2363];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T15:59:49.087871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003988659840555:2364];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T15:59:49.087879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488003988659840549:2363];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T15:59:49.087979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003988659840555:2364];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T15:59:49.087997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488003988659840549:2363];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T15:59:49.088138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003988659840555:2364];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T15:59:49.088246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003988659840555:2364];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T15:59:49.088344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003988659840555:2364];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T15:59:49.088442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003988659840555:2364];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T15:59:49.090397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488003988659840549:2363];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T15:59:49.090570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488003988659840549:2363];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T15:59:49.090702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488003988659840549:2363];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T15:59:49.090827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488003988659840549:2363];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T15:59:49.135767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488003988659840512:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.c ... ntroller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.860325Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039245;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.864647Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039266;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.867940Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039242;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.874207Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039247;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.880477Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039249;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.888147Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039269;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.894887Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039244;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.901247Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039284;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.909464Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039230;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.916531Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039366;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.922757Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039221;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.929201Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.936242Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039246;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.943913Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039270;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.954125Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.961620Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039223;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.968410Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039231;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.971776Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039267;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.975941Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039316;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.978527Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039293;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.983350Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.985538Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039298;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.990652Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039225;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.992620Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039306;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.998426Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039232;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.999180Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:35.008155Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039233;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:35.008155Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039286;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:35.016210Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:35.020129Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:35.023511Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:35.027812Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:35.031390Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039290;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:35.035894Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:35.039349Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039261;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:35.044795Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039262;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:35.047000Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039236;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:35.053154Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039276;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:35.058562Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039264;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:35.060961Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039271;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:35.066806Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:35.068681Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:35.076549Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039300;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:35.076624Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039256;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:35.084046Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039255;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:35.235345Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpeyd18dg5t4mwcmz5c31j5", SessionId: ydb://session/3?node_id=1&id=Y2I1YTIxMzgtNTQ2N2QxYS05NWMwYmZiLTM3MWRlNDAx, Slow query, duration: 40.922551s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:01:35.534125Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:01:35.534124Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:01:35.534659Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7488004358027079335:10567];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039392; 2025-03-31T16:01:35.535220Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH17 [GOOD] Test command err: Trying to start YDB, gRPC: 20039, MsgBus: 19401 2025-03-31T15:59:41.488623Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003959629177056:2095];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:41.489960Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019bf/r3tmp/tmplZzxVR/pdisk_1.dat 2025-03-31T15:59:42.158166Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:42.171465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:42.171544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:42.191858Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20039, node 1 2025-03-31T15:59:42.395641Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:59:42.395661Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:59:42.395669Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:59:42.395794Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19401 TClient is connected to server localhost:19401 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:59:43.357793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:45.705094Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003976809046871:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:45.705232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:45.705526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003976809046883:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:45.709526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T15:59:45.733466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488003976809046885:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T15:59:45.829865Z node 1 :TX_PROXY ERROR: Actor# [1:7488003976809046936:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:46.456030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T15:59:46.491193Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003959629177056:2095];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:46.491273Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:59:46.852281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488003981104014496:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T15:59:46.852490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488003981104014496:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T15:59:46.852796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488003981104014496:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T15:59:46.852929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488003981104014496:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T15:59:46.853036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488003981104014496:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T15:59:46.853182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488003981104014496:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T15:59:46.853283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488003981104014496:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T15:59:46.853390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488003981104014496:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T15:59:46.853525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488003981104014496:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T15:59:46.853652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488003981104014496:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T15:59:46.853761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488003981104014496:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T15:59:46.853860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488003981104014496:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T15:59:46.858270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488003981104014529:2363];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T15:59:46.858379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488003981104014529:2363];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T15:59:46.858626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488003981104014529:2363];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T15:59:46.858732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488003981104014529:2363];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T15:59:46.858828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488003981104014529:2363];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T15:59:46.858956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488003981104014529:2363];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T15:59:46.859334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488003981104014529:2363];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T15:59:46.859478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488003981104014529:2363];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T15:59:46.859585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488003981104014529:2363];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T15:59:46.859688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488003981104014529:2363];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T15:59:46.859787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488003981104014529:2363];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T15:59:46.859885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488003981104014529:2363];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T15:59:46.906411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488003981104014506:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.c ... tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.166162Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039245;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.171802Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039267;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.171839Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039211;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.177905Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039235;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.177917Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039249;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.183707Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039269;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.183707Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039265;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.189601Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039255;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.189602Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039223;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.195255Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039233;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.195274Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039263;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.200301Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039257;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.201333Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039290;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.206151Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039229;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.206494Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039286;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.211689Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039237;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.211735Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.216231Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039215;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.216870Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039282;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.221616Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039253;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.221665Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039271;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.227885Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039219;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.227886Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039251;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.233524Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039261;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.233532Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039221;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.239021Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.239399Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.244810Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039239;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.244830Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.249703Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.250414Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.254468Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039356;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.256416Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039217;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.259269Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.263379Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039213;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.264330Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.269192Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039302;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.269326Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.274115Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.278285Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039300;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.282295Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.286114Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.286403Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039231;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.294317Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039320;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.294649Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.316646Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039289;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:34.418473Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpeya1rdacg4j525n49yjme", SessionId: ydb://session/3?node_id=1&id=YzYyNmQ1YzctNjBhN2MxNDQtZmVjNTM2ZTUtM2VmZTU5YTk=, Slow query, duration: 43.161126s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:01:34.793765Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:01:34.793777Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:01:34.794496Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> TTransferTests::Create >> KqpScan::ScanDuringSplit10 >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-true [GOOD] >> KqpScan::RemoteShardScan >> KqpScan::ScanRetryRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:01:11.042035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:01:11.042138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:11.042173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:01:11.042205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:01:11.042248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:01:11.042292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:01:11.042349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:11.042501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:01:11.042868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:01:11.132829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:01:11.132878Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:11.137175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:01:11.137263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:01:11.137439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:01:11.142406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:01:11.142644Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:01:11.143303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:11.143494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:11.144941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:11.145812Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:11.145851Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:11.145943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:01:11.145971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:11.145994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:11.146101Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:01:11.152920Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:01:11.306860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:11.307065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:11.307349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:01:11.307553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:01:11.307594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:11.310224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:11.310381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:01:11.310584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:11.310640Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:01:11.310677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:01:11.310710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:01:11.314410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:11.314486Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:01:11.314526Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:01:11.317011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:11.317064Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:11.317096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:11.317152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:01:11.326226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:01:11.328782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:01:11.329034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:01:11.330108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:11.330276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:11.330324Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:11.330609Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:01:11.330656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:11.330835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:11.330915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:01:11.336470Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:11.336532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:11.336736Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:11.336792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:01:11.337027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:11.337092Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:01:11.337187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:11.337221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:11.337259Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:11.337293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:11.337334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:01:11.337383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:11.337416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:01:11.337446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:01:11.337531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:01:11.337572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:01:11.337617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:01:11.339723Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:11.339856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:11.339892Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 44 2025-03-31T16:01:54.500200Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:54.500336Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:54.500453Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:54.500568Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:54.500666Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:54.500774Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:54.500860Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:54.505505Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:54.505734Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:54.506492Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:54.506617Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:54.506732Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:54.507010Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:54.507154Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:54.507527Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:54.508537Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:54.508659Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:54.508739Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:54.508831Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:54.508907Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:54.509006Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:54.509160Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:54.509231Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:54.509289Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:54.509349Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:54.509456Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:54.509500Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-31T16:01:54.509645Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T16:01:54.509686Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:01:54.509733Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T16:01:54.509768Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:01:54.509813Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-03-31T16:01:54.509906Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [4:2778:4043] message: TxId: 101 2025-03-31T16:01:54.509980Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:01:54.510082Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-31T16:01:54.510141Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-31T16:01:54.511607Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 66 2025-03-31T16:01:54.515981Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T16:01:54.516070Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [4:2779:4044] TestWaitNotification: OK eventTxId 101 2025-03-31T16:01:54.516780Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:01:54.517105Z node 4 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 353us result status StatusSuccess 2025-03-31T16:01:54.517846Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 ColumnTableTtlSettingsVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 64 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "TTLEnabledTable" Schema { Columns { Id: 1 Name: "key" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "modified_at" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "modified_at" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } TtlSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 ColumnUnit: UNIT_SECONDS Tiers { ApplyAfterSeconds: 3600 Delete { } } } Version: 1 } ColumnShardCount: 64 Sharding { ColumnShards: 72075186233409546 ColumnShards: 72075186233409547 ColumnShards: 72075186233409548 ColumnShards: 72075186233409549 ColumnShards: 72075186233409550 ColumnShards: 72075186233409551 ColumnShards: 72075186233409552 ColumnShards: 72075186233409553 ColumnShards: 72075186233409554 ColumnShards: 72075186233409555 ColumnShards: 72075186233409556 ColumnShards: 72075186233409557 ColumnShards: 72075186233409558 ColumnShards: 72075186233409559 ColumnShards: 72075186233409560 ColumnShards: 72075186233409561 ColumnShards: 72075186233409562 ColumnShards: 72075186233409563 ColumnShards: 72075186233409564 ColumnShards: 72075186233409565 ColumnShards: 72075186233409566 ColumnShards: 72075186233409567 ColumnShards: 72075186233409568 ColumnShards: 72075186233409569 ColumnShards: 72075186233409570 ColumnShards: 72075186233409571 ColumnShards: 72075186233409572 ColumnShards: 72075186233409573 ColumnShards: 72075186233409574 ColumnShards: 72075186233409575 ColumnShards: 72075186233409576 ColumnShards: 72075186233409577 ColumnShards: 72075186233409578 ColumnShards: 72075186233409579 ColumnShards: 72075186233409580 ColumnShards: 72075186233409581 ColumnShards: 72075186233409582 ColumnShards: 72075186233409583 ColumnShards: 72075186233409584 ColumnShards: 72075186233409585 ColumnShards: 72075186233409586 ColumnShards: 72075186233409587 ColumnShards: 72075186233409588 ColumnShards: 72075186233409589 ColumnShards: 72075186233409590 ColumnShards: 72075186233409591 ColumnShards: 72075186233409592 ColumnShards: 72075186233409593 ColumnShards: 72075186233409594 ColumnShards: 72075186233409595 ColumnShards: 72075186233409596 ColumnShards: 72075186233409597 ColumnShards: 72075186233409598 ColumnShards: 72075186233409599 ColumnShards: 72075186233409600 ColumnShards: 72075186233409601 ColumnShards: 72075186233409602 ColumnShards: 72075186233409603 ColumnShards: 72075186233409604 ColumnShards: 72075186233409605 ColumnShards: 72075186233409606 ColumnShards: 72075186233409607 ColumnShards: 72075186233409608 ColumnShards: 72075186233409609 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "modified_at" } } StorageConfig { DataChannelCount: 64 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |72.3%| [TA] $(B)/ydb/core/mind/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DataStreams::TestReservedResourcesMetering [GOOD] >> DataStreams::TestReservedStorageMetering >> TTransferTests::Create_Disabled >> TSchemeShardColumnTableTTL::AlterColumnTable [GOOD] >> TestYmqHttpProxy::TestSendMessageFifoQueue [GOOD] >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:01:08.778149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:01:08.778254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:08.778300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:01:08.778334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:01:08.778378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:01:08.778421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:01:08.778515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:08.778653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:01:08.779034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:01:09.067798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:01:09.067865Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:09.084418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:01:09.084524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:01:09.084666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:01:09.099384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:01:09.099647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:01:09.100306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:09.100511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:09.116331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:09.117582Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:09.117647Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:09.117800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:01:09.117844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:09.117883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:09.118019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:01:09.164063Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:01:09.389274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:09.389502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:09.389755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:01:09.390028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:01:09.390081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:09.408807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:09.408955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:01:09.409145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:09.409195Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:01:09.409232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:01:09.409267Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:01:09.420011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:09.420083Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:01:09.420123Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:01:09.424354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:09.424414Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:09.424456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:09.424531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:01:09.442995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:01:09.445795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:01:09.445974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:01:09.446950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:09.447137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:09.447208Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:09.447512Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:01:09.447568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:09.447741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:09.447851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:01:09.455968Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:09.456062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:09.456277Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:09.456353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:01:09.456575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:09.456617Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:01:09.456713Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:09.456747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:09.456787Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:09.456817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:09.456853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:01:09.456891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:09.456925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:01:09.456953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:01:09.457025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:01:09.457081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:01:09.457127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:01:09.460586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:09.460764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:09.460805Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:56.164240Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:56.164634Z node 37 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:01:56.187687Z node 37 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [37:124:2150] sender: [37:239:2058] recipient: [37:15:2062] 2025-03-31T16:01:56.222368Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:56.222754Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:56.226844Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:01:56.227333Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:01:56.227447Z node 37 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:56.236495Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:56.236690Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:01:56.237035Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:56.237149Z node 37 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:01:56.237245Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:01:56.237317Z node 37 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:01:56.244070Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:56.244203Z node 37 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:01:56.244289Z node 37 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:01:56.252060Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:56.252151Z node 37 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:56.252264Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:56.252370Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:01:56.252653Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:01:56.259011Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:01:56.259418Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:01:56.260635Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:56.260885Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 158913792108 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:56.260990Z node 37 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:56.261386Z node 37 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:01:56.261505Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:56.261910Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:56.262063Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:56.272336Z node 37 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:56.272464Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:56.272811Z node 37 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:56.272909Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [37:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:01:56.273559Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:56.273662Z node 37 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:01:56.273951Z node 37 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:56.274046Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:56.274146Z node 37 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:56.274233Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:56.274335Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:01:56.274425Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:56.274518Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:01:56.274588Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:01:56.274725Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:01:56.274810Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:01:56.274880Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:01:56.275941Z node 37 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:56.276154Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:56.276259Z node 37 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-31T16:01:56.276360Z node 37 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-31T16:01:56.276455Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:56.276645Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-31T16:01:56.296198Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-31T16:01:56.297172Z node 37 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-03-31T16:01:56.302542Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "pgint8" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:56.311559Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:01:56.311894Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "pgint8" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } }, at schemeshard: 72057594046678944 2025-03-31T16:01:56.312965Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: To enable TTL on integral PG type column 'ValueSinceUnixEpochModeSettings' should be specified, at schemeshard: 72057594046678944 2025-03-31T16:01:56.319220Z node 37 :TX_PROXY DEBUG: actor# [37:269:2260] Bootstrap 2025-03-31T16:01:56.449545Z node 37 :TX_PROXY DEBUG: actor# [37:269:2260] Become StateWork (SchemeCache [37:274:2265]) 2025-03-31T16:01:56.456222Z node 37 :TX_PROXY DEBUG: actor# [37:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T16:01:56.468664Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "To enable TTL on integral PG type column \'ValueSinceUnixEpochModeSettings\' should be specified" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:56.469023Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: To enable TTL on integral PG type column 'ValueSinceUnixEpochModeSettings' should be specified, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2025-03-31T16:01:56.469867Z node 37 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> TestYmqHttpProxy::TestSendMessageWithAttributes >> TSchemeShardExtSubDomainTest::CreateItemsInsideExtSubdomainAtGSSwithoutTSS >> TestKinesisHttpProxy::TestRequestBadJson [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::AlterColumnTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:01:30.925313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:01:30.925440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:30.925484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:01:30.925522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:01:30.925578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:01:30.925615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:01:30.925673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:30.925806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:01:30.926265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:01:31.019634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:01:31.019697Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:31.024589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:01:31.024689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:01:31.024842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:01:31.029671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:01:31.029887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:01:31.030531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:31.030721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:31.032588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:31.033715Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:31.033775Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:31.033929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:01:31.033974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:31.034011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:31.034165Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:01:31.041148Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:01:31.202608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:31.202833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:31.203073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:01:31.203535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:01:31.203635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:31.207183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:31.207382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:01:31.207631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:31.207693Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:01:31.207734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:01:31.207770Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:01:31.210416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:31.210488Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:01:31.210529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:01:31.212940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:31.213010Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:31.213064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:31.213149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:01:31.226456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:01:31.229225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:01:31.229480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:01:31.230585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:31.230772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:31.230832Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:31.231164Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:01:31.231223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:31.231421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:31.231504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:01:31.237666Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:31.237711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:31.237879Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:31.238125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:01:31.238300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:31.238340Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:01:31.238444Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:31.238473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:31.238506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:31.238532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:31.238564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:01:31.238599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:31.238626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:01:31.238650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:01:31.238721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:01:31.238758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:01:31.238796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:01:31.240991Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:31.241141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:31.241189Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... d::TEvNotifyTxCompletionResult> complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:01:56.484467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:01:56.484571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:01:56.484667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:01:56.484807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:01:56.485349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:01:56.486610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:01:56.486765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:01:56.486884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:01:56.486978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:01:56.487073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:01:56.487189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:01:56.487283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:01:56.487369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:01:56.487485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:01:56.487534Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-03-31T16:01:56.487666Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-31T16:01:56.487707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T16:01:56.487786Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-31T16:01:56.487824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T16:01:56.487863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-03-31T16:01:56.487943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:2802:4067] message: TxId: 103 2025-03-31T16:01:56.487994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T16:01:56.488056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-03-31T16:01:56.488088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-03-31T16:01:56.489322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 66 2025-03-31T16:01:56.503053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-31T16:01:56.503150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:3992:5184] TestWaitNotification: OK eventTxId 103 2025-03-31T16:01:56.503931Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:01:56.504201Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 307us result status StatusSuccess 2025-03-31T16:01:56.504786Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 ColumnTableVersion: 3 ColumnTableSchemaVersion: 1 ColumnTableTtlSettingsVersion: 3 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 64 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "TTLEnabledTable" Schema { Columns { Id: 1 Name: "key" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "modified_at" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 3 Name: "saved_at" Type: "Datetime" TypeId: 49 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 4 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "modified_at" NextColumnId: 5 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } TtlSettings { Disabled { } Version: 3 } ColumnShardCount: 64 Sharding { ColumnShards: 72075186233409546 ColumnShards: 72075186233409547 ColumnShards: 72075186233409548 ColumnShards: 72075186233409549 ColumnShards: 72075186233409550 ColumnShards: 72075186233409551 ColumnShards: 72075186233409552 ColumnShards: 72075186233409553 ColumnShards: 72075186233409554 ColumnShards: 72075186233409555 ColumnShards: 72075186233409556 ColumnShards: 72075186233409557 ColumnShards: 72075186233409558 ColumnShards: 72075186233409559 ColumnShards: 72075186233409560 ColumnShards: 72075186233409561 ColumnShards: 72075186233409562 ColumnShards: 72075186233409563 ColumnShards: 72075186233409564 ColumnShards: 72075186233409565 ColumnShards: 72075186233409566 ColumnShards: 72075186233409567 ColumnShards: 72075186233409568 ColumnShards: 72075186233409569 ColumnShards: 72075186233409570 ColumnShards: 72075186233409571 ColumnShards: 72075186233409572 ColumnShards: 72075186233409573 ColumnShards: 72075186233409574 ColumnShards: 72075186233409575 ColumnShards: 72075186233409576 ColumnShards: 72075186233409577 ColumnShards: 72075186233409578 ColumnShards: 72075186233409579 ColumnShards: 72075186233409580 ColumnShards: 72075186233409581 ColumnShards: 72075186233409582 ColumnShards: 72075186233409583 ColumnShards: 72075186233409584 ColumnShards: 72075186233409585 ColumnShards: 72075186233409586 ColumnShards: 72075186233409587 ColumnShards: 72075186233409588 ColumnShards: 72075186233409589 ColumnShards: 72075186233409590 ColumnShards: 72075186233409591 ColumnShards: 72075186233409592 ColumnShards: 72075186233409593 ColumnShards: 72075186233409594 ColumnShards: 72075186233409595 ColumnShards: 72075186233409596 ColumnShards: 72075186233409597 ColumnShards: 72075186233409598 ColumnShards: 72075186233409599 ColumnShards: 72075186233409600 ColumnShards: 72075186233409601 ColumnShards: 72075186233409602 ColumnShards: 72075186233409603 ColumnShards: 72075186233409604 ColumnShards: 72075186233409605 ColumnShards: 72075186233409606 ColumnShards: 72075186233409607 ColumnShards: 72075186233409608 ColumnShards: 72075186233409609 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "modified_at" } } StorageConfig { DataChannelCount: 64 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 104 2025-03-31T16:01:56.580132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnTable AlterColumnTable { Name: "TTLEnabledTable" AlterSchema { AlterColumns { Name: "data" DefaultValue: "10" } } } } TxId: 104 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:56.580388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 104:0, at schemeshard: 72057594046678944 2025-03-31T16:01:56.602108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 104:1, propose status:StatusSchemeError, reason: schema update error: sparsed columns are disabled, at schemeshard: 72057594046678944 2025-03-31T16:01:56.611125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 104, response: Status: StatusSchemeError Reason: "schema update error: sparsed columns are disabled" TxId: 104 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:56.611324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusSchemeError, reason: schema update error: sparsed columns are disabled, operation: ALTER COLUMN TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-03-31T16:01:56.611816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-03-31T16:01:56.611871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-03-31T16:01:56.612532Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-03-31T16:01:56.612677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-03-31T16:01:56.612725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:4327:5518] TestWaitNotification: OK eventTxId 104 >> TTransferTests::Create_Disabled [GOOD] >> TTransferTests::CreateWithoutCredentials >> TestKinesisHttpProxy::TestRequestWithWrongRegion [GOOD] >> TTransferTests::Create [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndWait >> TTransferTests::CreateSequential >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndDifferentParams [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-ExternalHive >> TNebiusAccessServiceTest::Authenticate >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-false >> SystemView::GroupsFields [GOOD] >> TNebiusAccessServiceTest::Authenticate [GOOD] >> TestKinesisHttpProxy::TestConsumersEmptyNames >> TestKinesisHttpProxy::PutRecordsWithLongExplicitHashKey [GOOD] >> TestKinesisHttpProxy::GoodRequestPutRecords [GOOD] >> TestKinesisHttpProxy::TestRequestWithIAM >> TestYmqHttpProxy::TestCreateQueueWithWrongBody >> TSchemeShardExtSubDomainTest::CreateAndWait [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-ExternalHive >> TestYmqHttpProxy::TestReceiveMessage [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst |72.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |72.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |72.3%| [TA] {RESULT} $(B)/ydb/core/mind/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TestKinesisHttpProxy::PutRecordsWithIncorrectHashKey >> TSchemeShardExtSubDomainTest::CreateItemsInsideExtSubdomainAtGSSwithoutTSS [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-false >> TTransferTests::CreateWithoutCredentials [GOOD] >> TTransferTests::CreateWrongConfig |72.3%| [LD] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TNebiusAccessServiceTest::Authenticate [GOOD] Test command err: 2025-03-31T16:01:59.937299Z node 1 :GRPC_CLIENT DEBUG: [517000003208] Connect to grpc://localhost:26467 2025-03-31T16:02:00.021684Z node 1 :GRPC_CLIENT DEBUG: [517000003208] Request AuthenticateRequest { iam_token: "**** (3C4833B6)" } 2025-03-31T16:02:00.047727Z node 1 :GRPC_CLIENT DEBUG: [517000003208] Status 7 Permission Denied 2025-03-31T16:02:00.048377Z node 1 :GRPC_CLIENT DEBUG: [517000003208] Request AuthenticateRequest { iam_token: "**** (86DDB286)" } 2025-03-31T16:02:00.051370Z node 1 :GRPC_CLIENT DEBUG: [517000003208] Response AuthenticateResponse { account { user_account { id: "1234" } } } >> TestKinesisHttpProxy::DoubleCreateStream >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst-ExternalHive >> TestYmqHttpProxy::TestReceiveMessageWithAttributes >> DataShardVolatile::DistributedWriteThenSplit [GOOD] >> DataShardVolatile::DistributedWriteThenReadIterator >> TTransferTests::CreateSequential [GOOD] >> TTransferTests::CreateInParallel |72.3%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-true >> TTransferTests::CreateWrongConfig [GOOD] >> TTransferTests::CreateWrongBatchSize ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FourWayJoinLeftFirst+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 19170, MsgBus: 7913 2025-03-31T15:59:42.708535Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003964858222077:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:42.726684Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019b4/r3tmp/tmpjwQajk/pdisk_1.dat 2025-03-31T15:59:43.445091Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:43.445235Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:43.448002Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:59:43.482171Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19170, node 1 2025-03-31T15:59:43.647699Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:59:43.647727Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:59:43.647738Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:59:43.647859Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7913 TClient is connected to server localhost:7913 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:59:44.648913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:44.707590Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:59:47.339582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003986333059217:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:47.339704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:47.340159Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003986333059229:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:47.344408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T15:59:47.359744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488003986333059231:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T15:59:47.461600Z node 1 :TX_PROXY ERROR: Actor# [1:7488003986333059282:2342] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:47.715188Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003964858222077:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:47.715248Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:59:47.955043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T15:59:48.275278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003990628026837:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T15:59:48.275506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003990628026837:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T15:59:48.275788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003990628026837:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T15:59:48.275940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003990628026837:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T15:59:48.276069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003990628026837:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T15:59:48.276190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003990628026837:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T15:59:48.276298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003990628026837:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T15:59:48.276406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003990628026837:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T15:59:48.276522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003990628026837:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T15:59:48.276630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003990628026837:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T15:59:48.276734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003990628026837:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T15:59:48.276846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488003990628026837:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T15:59:48.328041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488003990628026839:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T15:59:48.328101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488003990628026839:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T15:59:48.328329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488003990628026839:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T15:59:48.328441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488003990628026839:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T15:59:48.328559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488003990628026839:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T15:59:48.328655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488003990628026839:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T15:59:48.328769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488003990628026839:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T15:59:48.328917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488003990628026839:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T15:59:48.329039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488003990628026839:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T15:59:48.329210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488003990628026839:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T15:59:48.329310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488003990628026839:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T15:59:48.329418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488003990628026839:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T15:59:48.332653Z node 1 :TX_C ... ller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.584657Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039196;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.588244Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039332;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.590264Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039364;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.594168Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039270;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.596010Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039336;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.600357Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039260;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.601892Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039308;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.606252Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039356;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.607104Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039191;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.612848Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039368;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.612925Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.619548Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039197;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.619579Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039370;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.625578Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039306;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.625578Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039366;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.631963Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039280;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.631964Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039189;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.637696Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039195;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.642406Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.643357Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.649190Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039340;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.651044Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.655158Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039342;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.657854Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.661244Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.664201Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.667490Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039372;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.670269Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.674049Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039312;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.678553Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039193;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.680748Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.684688Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.686419Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.692081Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.692329Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.698182Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.698186Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.704523Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.704540Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.710395Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.710402Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.716533Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.719799Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.721419Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.727702Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:36.897935Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpeyg1b00dvddwgnq6ne3vk", SessionId: ydb://session/3?node_id=1&id=YmZkYWY1OTItZTg0MWYwZjgtYmYzMDM1MmUtN2JmZDE5YzE=, Slow query, duration: 39.509617s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:01:37.177426Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:01:37.177886Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7488004359995263793:10144];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224038933; 2025-03-31T16:01:37.177970Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:01:37.178581Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-true >> TSchemeShardTTLTests::ShouldSkipDroppedColumn [GOOD] |72.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |72.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |72.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg >> TPersQueueTest::DirectReadRestartPQRB [GOOD] >> TPersQueueTest::DirectReadRestartTablet >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-ExternalHive >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-ExternalHive |72.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |72.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |72.4%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut >> TNebiusAccessServiceTest::Authorize |72.4%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TNebiusAccessServiceTest::Authorize [GOOD] >> TTransferTests::CreateWrongBatchSize [GOOD] >> TTransferTests::CreateWrongFlushIntervalIsSmall >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ShouldSkipDroppedColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-31T16:01:38.416578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:01:38.416686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:38.416717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:01:38.416744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:01:38.416780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:01:38.416805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:01:38.416869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:38.416936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:01:38.417237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:01:38.515701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:01:38.515793Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:38.527840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:01:38.528199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:01:38.528333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:01:38.538026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:01:38.538344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:01:38.539061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:38.539503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:38.542083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:38.543470Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:38.543548Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:38.543659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:01:38.543711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:38.543751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:38.544032Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:01:38.551717Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-31T16:01:38.706237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:38.706518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:38.706784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:01:38.707164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:01:38.707301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:38.710055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:38.710221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:01:38.710471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:38.710545Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:01:38.710585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:01:38.710623Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:01:38.712775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:38.712839Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:01:38.712883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:01:38.714885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:38.714941Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:38.715031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:38.715121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:01:38.719220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:01:38.721237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:01:38.721490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:01:38.722666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:38.722841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:38.722901Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:38.723306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:01:38.723385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:38.723562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:38.723642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:38.725846Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:38.725901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:38.726104Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:38.726168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:01:38.726560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:38.726627Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:01:38.726755Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:38.726795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:38.726839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:38.726873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:38.726915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:01:38.726957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:38.726998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:01:38.727036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:01:38.727124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:01:38.727179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:01:38.727237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:01:38.729415Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:38.729551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:38.729597Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 6678944 2025-03-31T16:01:39.273070Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-03-31T16:01:39.273162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-31T16:01:39.273198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T16:01:39.273234Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-31T16:01:39.273283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T16:01:39.273322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-03-31T16:01:39.273375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:377:2345] message: TxId: 103 2025-03-31T16:01:39.273410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T16:01:39.273442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-03-31T16:01:39.273479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-03-31T16:01:39.273562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T16:01:39.276012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-31T16:01:39.276075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:503:2464] TestWaitNotification: OK eventTxId 103 2025-03-31T16:01:45.162190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:01:45.162264Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:47.103975Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0713 2025-03-31T16:01:47.115952Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0887 2025-03-31T16:01:47.163290Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-03-31T16:01:47.163472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-03-31T16:01:47.163607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=indexImplTable, is column=0, is olap=0 2025-03-31T16:01:47.163668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 4: RowCount 0, DataSize 0 2025-03-31T16:01:47.163802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2025-03-31T16:01:47.163867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable, is column=0, is olap=0 2025-03-31T16:01:47.163892Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 2: RowCount 0, DataSize 0 2025-03-31T16:01:47.174349Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-03-31T16:01:51.051795Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0189 2025-03-31T16:01:51.062484Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0233 2025-03-31T16:01:51.107308Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-03-31T16:01:51.107523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-03-31T16:01:51.107587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=indexImplTable, is column=0, is olap=0 2025-03-31T16:01:51.107628Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 4: RowCount 0, DataSize 0 2025-03-31T16:01:51.107744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2025-03-31T16:01:51.107822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable, is column=0, is olap=0 2025-03-31T16:01:51.107863Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 2: RowCount 0, DataSize 0 2025-03-31T16:01:51.119347Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-03-31T16:01:54.851761Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0189 2025-03-31T16:01:54.863602Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0233 2025-03-31T16:01:54.911457Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-03-31T16:01:54.911675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-03-31T16:01:54.911768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=indexImplTable, is column=0, is olap=0 2025-03-31T16:01:54.911814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 4: RowCount 0, DataSize 0 2025-03-31T16:01:54.911914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2025-03-31T16:01:54.911959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable, is column=0, is olap=0 2025-03-31T16:01:54.911998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 2: RowCount 0, DataSize 0 2025-03-31T16:01:54.923389Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-03-31T16:01:58.773497Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0061 2025-03-31T16:01:58.787639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0071 2025-03-31T16:01:58.832838Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-03-31T16:01:58.833069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-03-31T16:01:58.833173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=indexImplTable, is column=0, is olap=0 2025-03-31T16:01:58.833244Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 4: RowCount 0, DataSize 0 2025-03-31T16:01:58.833377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2025-03-31T16:01:58.833412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable, is column=0, is olap=0 2025-03-31T16:01:58.833450Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 2: RowCount 0, DataSize 0 2025-03-31T16:01:58.843817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-03-31T16:02:02.553407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2025-03-31T16:02:02.553565Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-03-31T16:02:02.553820Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-03-31T16:02:02.554108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409547, request: TableId: 2 Expiration { ColumnId: 2 WallClockTimestamp: 60025000 ColumnUnit: UNIT_AUTO } SchemaVersion: 3 Indexes { OwnerId: 72057594046678944 PathId: 4 SchemaVersion: 1 KeyMap { IndexColumnId: 1 MainColumnId: 3 } KeyMap { IndexColumnId: 2 MainColumnId: 1 } } Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-03-31T16:02:02.554754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2025-03-31T16:02:02.555857Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-03-31T16:02:02.555923Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2025-03-31T16:02:02.564716Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-03-31T16:02:02.565017Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-03-31T16:02:02.565082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:1, run at: 1970-01-01T01:01:00.025000Z, at schemeshard: 72057594046678944 2025-03-31T16:02:02.565160Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TNebiusAccessServiceTest::Authorize [GOOD] Test command err: 2025-03-31T16:02:03.419660Z node 3 :GRPC_CLIENT DEBUG: [517000005888] Connect to grpc://localhost:8839 2025-03-31T16:02:03.471839Z node 3 :GRPC_CLIENT DEBUG: [517000005888] Request AuthorizeRequest { checks { key: 0 value { permission { name: "perm" } resource_path { path { id: "path_id" } } iam_token: "**** (717F937C)" } } } 2025-03-31T16:02:03.495233Z node 3 :GRPC_CLIENT DEBUG: [517000005888] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user_id" } } } } } 2025-03-31T16:02:03.503447Z node 3 :GRPC_CLIENT DEBUG: [517000005888] Request AuthorizeRequest { checks { key: 0 value { permission { name: "perm" } resource_path { path { id: "path_id" } } iam_token: "**** (79225CA9)" } } } 2025-03-31T16:02:03.511790Z node 3 :GRPC_CLIENT DEBUG: [517000005888] Status 7 Permission Denied 2025-03-31T16:02:03.515712Z node 3 :GRPC_CLIENT DEBUG: [517000005888] Request AuthorizeRequest { checks { key: 0 value { permission { name: "denied" } resource_path { path { id: "path_id" } } iam_token: "**** (717F937C)" } } } 2025-03-31T16:02:03.523474Z node 3 :GRPC_CLIENT DEBUG: [517000005888] Status 7 Permission Denied 2025-03-31T16:02:03.525878Z node 3 :GRPC_CLIENT DEBUG: [517000005888] Request AuthorizeRequest { checks { key: 0 value { permission { name: "perm" } resource_path { path { id: "p" } } iam_token: "**** (717F937C)" } } } 2025-03-31T16:02:03.528318Z node 3 :GRPC_CLIENT DEBUG: [517000005888] Status 7 Permission Denied >> SystemView::AuthUsers_TableRange [GOOD] >> SystemView::AuthPermissions_ResultOrder |72.4%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-false >> TTransferTests::CreateWrongFlushIntervalIsSmall [GOOD] >> TTransferTests::CreateWrongFlushIntervalIsBig >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-false >> KqpStreamLookup::ReadTableDuringSplit [GOOD] >> TTransferTests::CreateWrongFlushIntervalIsBig [GOOD] >> TTransferTests::CreateInParallel [GOOD] >> TTransferTests::CreateDropRecreate >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-true >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-68 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-69 >> TCmsTenatsTest::TestClusterRatioLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_transfer/unittest >> TTransferTests::CreateWrongFlushIntervalIsBig [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-31T16:01:58.466158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:01:58.466249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:58.466290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:01:58.466344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:01:58.466396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:01:58.466421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:01:58.466484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:58.466578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:01:58.466933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:01:58.585884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:01:58.585948Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:58.607596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:01:58.608110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:01:58.608305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:01:58.627327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:01:58.627657Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:01:58.628542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:58.629025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:58.631872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:58.633229Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:58.633290Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:58.633377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:01:58.633459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:58.633493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:58.633693Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:01:58.641477Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-31T16:01:58.807746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:58.808025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:58.808300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:01:58.808566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:01:58.808625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:58.811640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:58.811836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:01:58.812083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:58.812143Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:01:58.812181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:01:58.812216Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:01:58.814991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:58.815058Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:01:58.815117Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:01:58.818369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:58.818442Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:58.818498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:58.818547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:01:58.822399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:01:58.825123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:01:58.825322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:01:58.826390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:58.826557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:58.826624Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:58.826963Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:01:58.827019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:58.827205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:58.827287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:58.829767Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:58.829823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:58.830032Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:58.830075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:01:58.830458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:58.830508Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:01:58.830600Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:58.830630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:58.830666Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:58.830693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:58.830732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:01:58.830793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:58.830829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:01:58.830861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:01:58.830935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:01:58.830971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:01:58.831002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:01:58.833047Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:58.833159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:58.833197Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 7594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:02:04.948635Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:02:04.948837Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:02:04.949632Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:04.949771Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 25769805932 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:02:04.949825Z node 6 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:02:04.950069Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:02:04.950135Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:02:04.950330Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:02:04.950408Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:02:04.953095Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:02:04.953152Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:02:04.953350Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:02:04.953396Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [6:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:02:04.953723Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:04.953773Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:02:04.953888Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:02:04.953925Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:02:04.953971Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:02:04.954004Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:02:04.954041Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:02:04.954091Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:02:04.954143Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:02:04.954178Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:02:04.954252Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:02:04.954290Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:02:04.954326Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:02:04.954792Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:02:04.954888Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:02:04.954930Z node 6 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-31T16:02:04.954975Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-31T16:02:04.955016Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:02:04.955133Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-31T16:02:04.958582Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-31T16:02:04.959072Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-03-31T16:02:04.959918Z node 6 :TX_PROXY DEBUG: actor# [6:270:2261] Bootstrap 2025-03-31T16:02:04.981360Z node 6 :TX_PROXY DEBUG: actor# [6:270:2261] Become StateWork (SchemeCache [6:275:2266]) 2025-03-31T16:02:04.981789Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [6:274:2265], Recipient [6:123:2149]: {TEvModifySchemeTransaction txid# 101 TabletId# 72057594046678944} 2025-03-31T16:02:04.981841Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-31T16:02:04.984050Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTransfer Replication { Name: "Transfer" Config { TransferSpecific { Target { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } Batching { FlushIntervalMilliSeconds: 86400001 } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:02:04.984302Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateReplication Propose: opId# 101:0, path# /MyRoot/Transfer 2025-03-31T16:02:04.984389Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusInvalidParameter, reason: Flush interval must be less than or equal to 24 hours, at schemeshard: 72057594046678944 2025-03-31T16:02:04.984601Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-31T16:02:04.985550Z node 6 :TX_PROXY DEBUG: actor# [6:270:2261] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T16:02:04.990935Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusInvalidParameter Reason: "Flush interval must be less than or equal to 24 hours" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:02:04.991154Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Flush interval must be less than or equal to 24 hours, operation: CREATE TRANSFER, path: /MyRoot/Transfer 2025-03-31T16:02:04.991220Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:02:04.991968Z node 6 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-31T16:02:04.992238Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-31T16:02:04.992282Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-31T16:02:04.992619Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [6:287:2278], Recipient [6:123:2149]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:02:04.992672Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:02:04.992715Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-03-31T16:02:04.992866Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [6:284:2275], Recipient [6:123:2149]: NKikimrScheme.TEvNotifyTxCompletion TxId: 101 2025-03-31T16:02:04.992893Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-03-31T16:02:04.992965Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-31T16:02:04.993077Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T16:02:04.993130Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [6:285:2276] 2025-03-31T16:02:04.993307Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [6:287:2278], Recipient [6:123:2149]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-31T16:02:04.993336Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-31T16:02:04.993376Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 2025-03-31T16:02:04.993704Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [6:288:2279], Recipient [6:123:2149]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Transfer" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-03-31T16:02:04.993751Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-03-31T16:02:04.993864Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Transfer" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:02:04.994048Z node 6 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Transfer" took 195us result status StatusPathDoesNotExist 2025-03-31T16:02:04.994204Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Transfer\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Transfer" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TCmsTest::RequestReplaceDevices >> THiveTest::TestCreateSubHiveCreateManyTabletsWithReboots [GOOD] >> THiveTest::TestCheckSubHiveMigrationWithReboots >> TestYmqHttpProxy::TestSendMessageWithAttributes [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] |72.4%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TestKinesisHttpProxy::TestConsumersEmptyNames [GOOD] >> KqpStreamLookup::ReadTableWithIndexDuringSplit [GOOD] >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-false >> TPQTest::TestWriteTimeStampEstimate >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp/unittest >> KqpStreamLookup::ReadTableDuringSplit [GOOD] Test command err: 2025-03-31T16:01:51.018381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:01:51.018661Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:01:51.018849Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011fb/r3tmp/tmpSu2hol/pdisk_1.dat 2025-03-31T16:01:51.615381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:01:51.697306Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:51.741351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:01:51.741492Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:01:51.755041Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:01:51.858316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:01:52.303028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:739:2621], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:52.316303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:748:2626], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:52.316436Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:52.325781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:01:52.532002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:753:2629], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:01:52.690215Z node 1 :TX_PROXY ERROR: Actor# [1:827:2672] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:02:02.751454Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpf05nce9tm6r8wp2q2vka1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTIzNmU4N2MtYmIzOGUwN2MtNjczOWRjNGUtOTQyNzgwZTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:02:02.809828Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpf05nce9tm6r8wp2q2vka1", SessionId: ydb://session/3?node_id=1&id=OTIzNmU4N2MtYmIzOGUwN2MtNjczOWRjNGUtOTQyNzgwZTQ=, Slow query, duration: 10.509060s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "UPSERT INTO `/Root/TestTable` (key, value) VALUES (0, 00), (1, 11), (2, 22), (3, 33), (4, 44), (5, 55), (6, 66), (7, 77), (8, 88), (9, 99), (10, 1010), (11, 1111), (12, 1212), (13, 1313), (14, 1414), (15, 1515), (16, 1616), (17, 1717), (18, 1818), (19, 1919), (20, 2020), (21, 2121), (22, 2222), (23, 2323), (24, 2424), (25, 2525), (26, 2626), (27, 2727), (28, 2828), (29, 2929), (30, 3030), (31, 3131), (32, 3232), (33, 3333), (34, 3434), (35, 3535), (36, 3636), (37, 3737), (38, 3838), (39, 3939), (40, 4040), (41, 4141), (42, 4242), (43, 4343), (44, 4444), (45, 4545), (46, 4646), (47, 4747), (48, 4848), (49, 4949), (50, 5050), (51, 5151), (52, 5252), (53, 5353), (54, 5454), (55, 5555), (56, 5656), (57, 5757), (58, 5858), (59, 5959), (60, 6060), (61, 6161), (62, 6262), (63, 6363), (64, 6464), (65, 6565), (66, 6666), (67, 6767), (68, 6868), (69, 6969), (70, 7070), (71, 7171), (72, 7272), (73, 7373), (74, 7474), (75, 7575), (76, 7676), (77, 7777), (78, 7878), (79, 7979), (80, 8080), (81, 8181), (82, 8282), (83, 8383), (84, 8484), (85, 8585), (86, 8686), (87, 8787), (88, 8888), (89, 8989), (90, 9090), (91, 9191), (92, 9292), (93, 9393), (94, 9494), (95, 9595), (96, 9696), (97, 9797), (98, 9898), (99, 9999), (100, 100100), (101, 101101), (102, 102102), (103, 103103), (104, 104104), (105, 105105), (106, 106106), (107, 107107), (108, 108108), (109, 109109), (110, 110110), (111, 111111), (112, 112112), (113, 113113), (114, 114114), (115, 115115), (116, 116116), (117, 117117), (118, 118118), (119, 119119), (120, 120120), (121, 121121), (122, 122122), (123, 123123), (124, 124124), (125, 125125), (126, 126126), (127, 127127), (128, 128128), (129, 129129), (130, 130130), (131, 131131), (132, 132132), (133, 133133), (134, 134134), (135, 135135), (136, 136136), (137, 137137), (138, 138138), (139, 139139), (140, 140140), (141, 141141), (142, 142142), (143, 143143), (144, 144144), (145, 145145), (146, 146146), (147, 147147), (148, 148148), (149, 149149), (150, 150150), (151, 151151), (152, 152152), (153, 153153), (154, 154154), (155, 155155), (156, 156156), (157, 157157), (158, 158158), (159, 159159), (160, 160160), (161, 161161), (162, 162162), (163, 163163), (164, 164164), (165, 165165), (166, 166166), (167, 167167), (168, 168168), (169, 169169), (170, 170170), (171, 171171), (172, 172172), (173, 173173), (174, 174174), (175, 175175), (176, 176176), (177, 177177), (178, 178178), (179, 179179), (180, 180180), (181, 181181), (182, 182182), (183, 183183), (184, 184184), (185, 185185), (186, 186186), (187, 187187), (188, 188188), (189, 189189), (190, 190190), (191, 191191), (192, 192192), (193, 193193), (194, 194194), (195, 195195), (196, 196196), (197, 197197), (198, 198198), (199, 199199), (200, 200200), (201, 201201), (202, 202202), (203, 203203), (204, 204204), (205, 205205), (206, 206206), (207, 207207), (208, 208208), (209, 209209), (210, 210210), (211, 211211), (212, 212212), (213, 213213), (214, 214214), (215, 215215), (216, 216216), (217, 217217), (218, 218218), (219, 219219), (220, 220220), (221, 221221), (222, 222222), (223, 223223), (224, 224224), (225, 225225), (226, 226226), (227, 227227), (228, 228228), (229, 229229), (230, 230230), (231, 231231), (232, 232232), (233, 233233), (234, 234234), (235, 235235), (236, 236236), (237, 237237), (238, 238238), (239, 239239), (240, 240240), (241, 241241), (242, 242242), (243, 243243), (244, 244244), (245, 245245), (246, 246246), (247, 247247), (248, 248248), (249, 249249), (250, 250250), (251, 251251), (252, 252252), (253, 253253), (254, 254254), (255, 255255), (256, 256256), (257, 257257), (258, 258258), (259, 259259), (260, 260260), (261, 261261), (262, 262262), (263, 263263), (264, 264264), (265, 265265), (266, 266266), (267, 267267), (268, 268268), (269, 269269), (270, 270270), (271, 271271), (272, 272272), (273, 273273), (274, 274274), (275, 275275), (276, 276276), (277, 277277), (278, 278278), (279, 279279), (280, 280280), (281, 281281), (282, 282282), (283, 283283), (284, 284284), (285, 285285), (286, 286286), (287, 287287), (288, 288288), (289, 289289), (290, 290290), (291, 291291), (292, 292292), (293, 293293), (294, 294294), (295, 295295), (296, 296296), (297, 297297), (298, 298298), (299, 299299), (300, 300300), (301, 301301), (302, 302302), (303, 303303), (304, 304304), (305, 305305), (306, 306306), (307, 307307), (308, 308308), (309, 309309), (310, 310310), (311, 311311), (312, 312312), (313, 313313), (314, 314314), (315, 315315), (316, 316316), (317, 317317), (318, 318318), (319, 319319), (320, 320320), (321, 321321), (322, 322322), (323, 323323), (324, 324324), (325, 325325), (326, 326326), (327, 327327), (328, 328328), (329, 329329), (330, 330330), (331, 331331), (332, 332332), (333, 333333), (334, 334334), (335, 335335), (336, 336336), (337, 337337), (338, 338338), (339, 339339), (340, 340340), (341, 341341), (342, 342342), (343, 343343), (344, 344344), (345, 345345), (346, 346346), (347, 347347), (348, 348348), (349, 349349), (350, 350350), (351, 351351), (352, 352352), (353, 353353), (354, 354354), (355, 355355), (356, 356356), (357, 357357), (358, 358358), (359, 359359), (360, 360360), (361, 361361), (362, 362362), (363, 363363), (364, 364364), (365, 365365), (366, 366366), (367, 367367), (368, 368368), (369, 369369), (370, 370370), (371, 371371), (372, 372372), (373, 373373), (374, 374374), (375, 375375), (376, 376376), (377, 377377), (378, 378378), (379, 379379), (380, 380380), (381, 381381), (382, 382382), (383, 383383), (384, 384384), (385, 385385), (386, 386386), (387, 387387), (388, 388388), (389, 389389), (390, 390390), (391, 391391), (392, 392392), (393, 393393), (394, 394394), (395, 395395), (396, 396396), (397, 397397), (398, 398398), (399, 399399), (400, 400400), (401, 401401), (402, 402402), (403, 403403), (404, 404404), (405, 405405), (406, 406406), (407, 407407), (408, 408408), (409, 409409), (410, 410410), (411, 411411), (412, 412412), (413, 413413), (414, 414414), (415, 415415), (416, 416416), (417, 417417), (418, 418418), (419, 419419), (420, 420420), (421, 421421), (422, 422422), (423, 423423), (424, 424424), (425, 425425), (426, 426426), (427, 427427), (428, 428428), (429, 429429), (430, 430430), (431, 431431), (432, 432432), (433, 433433), (434, 434434), (435, 435435), (436, 436436), (437, 437437), (438, 438438), (439, 439439), (440, 440440), (441, 441441), (442, 442442), (443, 443443), (444, 444444), (445, 445445), (446, 446446), (447, 447447), (448, 448448), (449, 449449), (450, 450450), (451, 451451), (452, 452452), (453, 453453), (454, 454454), (455, 455455), (456, 456456), (457, 457457), (458, 458458), (459, 459459), (460, 460460), (461, 461461), (462, 462462), (463, 463463), (464, 464464), (465, 465465), (466, 466466), (467, 467467), (468, 468468), (469, 469469), (470, 470470), (471, 471471), (472, 472472), (473, 473473), (474, 474474), (475, 475475), (476, 476476), (477, 477477), (478, 478478), (479, 479479), (480, 480480), (481, 481481), (482, 482482), (483, 483483), (484, 484484), (485, 485485), (486, 486486), (487, 487487), (488, 488488), (489, 489489), (490, 490490), (491, 491491), (492, 492492), (493, 493493), (494, 494494), (495, 495495), (496, 496496), (497, 497497), (498, 498498), (499, 499499), (500, 500500), (501, 501501), (502, 502502), (503, 503503), (504, 504504), (505, 505505), (506, 506506), (507, 507507), (508, 508508), (509, 509509), (510, 510510), (511, 511511), (512, 512512), (513, 513513), (514, 514514), (515, 515515), (516, 516516), (517, 517517), (518, 518518), (519, 519519), (520, 520520), (521, 521521), (522, 522522), (523, 523523), (524, 524524), (525, 525525), (526, 526526), (527, 527527), (528, 528528), (529, 529529), (530, 530530), (531, 531531), (532, 532532), (533, 533533), (534, 534534), (535, 535535), (536, 536536), (537, 537537), (538, 538538), (539, 539539), (540, 540540), (541, 541541), (542, 542542), (543, 543543), (544, 544544), (545, 545545), (546, 546546), (547, 547547), (548, 548548), (549, 549549), (550, 550550), (551, 551551), (552, 552552), (553, 553553), (554, 554554), (555, 555555), (556, 556556), (557, 557557), (558, 558558), (559, 559559), (560, 560560), (561, 561561), (562, 562562), (563, 563563), (564, 564564), (565, 565565), (566, 566566), (567, 567567), (568, 568568), (569, 569569), (570, 570570), (571, 571571), (572, 572572), (573, 573573), (574, 574574), (575, 575575), (576, 576576), (577, 577577), (578, 578578), (579, 579579), (580, 580580), (581, 581581), (582, 582582), (583, 583583), (584, 584584), (585, 585585), (586, 586586), (587, 587587), (588, 588588), (589, 589589), (590, 590590), (591, 591591), (592, 592592), (593, 593593), (594, 594594), (595, 595595), (596, 596596), (597, 597597), (598, 598598), (599, 599599), (600, 600600), (601, 601601), (602, 602602), (603, 603603), (604, 604604), (605, 605605), (606, 606606), (607, 607607), (608, 608608), (609, 609609), (610, 610610), (611, 611611), (612, 612612), (613, 613613), (614, 614614), (615, 615615), (616, 616616), (617, 617617), (618, 618618), (619, 619619), (620, 620620), (621, 621621), (622, 622622), (623, 623623), (624, 624624), (625, 625625), (626, 626626), (627, 627627), (628, 628628), (629, 629629), (630, 630630), (631, 631631), (632, 632632), (633, 633633), (634, 634634), (635, 635635), (636, 636636), (637, 637637), (638, 638638), (639, 639639), (640, 640640), (641, 641641), (642, 642642), (643, 643643), (644, 644644), (645, 645645), (646, 646646), (647, 647647), (648, 648648), (649, 649649), (650, 650650), (651, 651651), (652, 652652), (653, 653653), (654, 654654), (655, 655655), (656, 656656), (657, 657657), (658, 658658), (659, 659659), (660, 660660), (661, 661661), (662, 662662), (663, 663663), (664, 664664), (665, 665665), (666, 666666), (667, 667667), (668, 668668), (669, 669669), (670, 670670), (671, 671671), (672, 672672), (673, 673673), (674, 674674), (675, 675675), (676, 676676), (677, 677677), (678, 678678), (679, 679679), (680, 680680), (681, 681681), (682, 682682), (683, 683683), (684, 684684), (685, 685685), (686, 686686), (687, 687687), (688, 688688), (689, 689689), (690, 690690), (691, 691691), (692, 692692), (693, 693693), (694, 694694), (695, 695695), (696, 696696), (697, 697697), (698, 698698), (699, 699699), (700, 700700), (701, 701701), (702, 702702), (703, 703703), (704, 704704), (705, 705705), (706, 706706), (707, 707707), (708, 708708), (709, 709709), (710, 710710), (711, 711711), (712, 712712), (713, 713713), (714, 714714), (715, 715715), (716, 716716), (717, 717717), (718, 718718), (719, 719719), (720, 720720), (721, 721721), (722, 722722), (723, 723723), (724, 724724), (725, 725725), (726, 726726), (727, 727727), (728, 728728), (729, 729729), (730, 730730), (731, 731731), (732, 732732), (733, 733733), (734, 734734), (735, 735735), (736, 736736), (737, 737737), (738, 738738), (739, 739739), (740, 740740), (741, 741741), (742, 742742), (743, 743743), (744, 744744), (745, 745745), (746, 746746), (747, 747747), (748, 748748), (749, 749749), (750, 750750), (751, 751751), (752, 752752), (753, 753753), (754, 754754), (755, 755755), (756, 756756), (757, 757757), (758, 758758), (759, 759759), (760, 760760), (761, 761761), (762, 762762), (763, 763763), (764, 764764), (765, 765765), (766, 766766), (767, 767767), (768, 768768), (769, 769769), (770, 770770), (771, 771771), (772, 772772), (773, 773773), (774, 774774), (775, 775775), (776, 776776), (777, 777777), (778, 778778), (779, 779779), (780, 780780), (781, 781781), (782, 782782), (783, 783783), (784, 784784), (785, 785785), (786, 786786), (787, 787787), (788, 788788), (789, 789789), (790, 790790), (791, 791791), (792, 792792), (793, 793793), (794, 794794), (795, 795795), (796, 796796), (797, 797797), (798, 798798), (799, 799799), (800, 800800), (801, 801801), (802, 802802), (803, 803803), (804, 804804), (805, 805805), (806, 806806), (807, 807807), (808, 808808), (809, 809809), (810, 810810), (811, 811811), (812, 812812), (813, 813813), (814, 814814), (815, 815815), (816, 816816), (817, 817817), (818, 818818), (819, 819819), (820, 820820), (821, 821821), (822, 822822), (823, 823823), (824, 824824), (825, 825825), (826, 826826), (827, 827827), (828, 828828), (829, 829829), (830, 830830), (831, 831831), (832, 832832), (833, 833833), (834, 834834), (835, 835835), (836, 836836), (837, 837837), (838, 838838), (839, 839839), (840, 840840), (841, 841841), (842, 842842), (843, 843843), (844, 844844), (845, 845845), (846, 846846), (847, 847847), (848, 848848), (849, 849849), (850, 850850), (851, 851851), (852, 852852), (853, 853853), (854, 854854), (855, 855855), (856, 856856), (857, 857857), (858, 858858), (859, 859859), (860, 860860), (861, 861861), (862, 862862), (863, 863863), (864, 864864), (865, 865865), (866, 866866), (867, 867867), (868, 868868), (869, 869869), (870, 870870), (871, 871871), (872, 872872), (873, 873873), (874, 874874), (875, 875875), (876, 876876), (877, 877877), (878, 878878), (879, 879879), (880, 880880), (881, 881881), (882, 882882), (883, 883883), (884, 884884), (885, 885885), (886, 886886), (887, 887887), (888, 888888), (889, 889889), (890, 890890), (891, 891891), (892, 892892), (893, 893893), (894, 894894), (895, 895895), (896, 896896), (897, 897897), (898, 898898), (899, 899899), (900, 900900), (901, 901901), (902, 902902), (903, 903903), (904, 904904), (905, 905905), (906, 906906), (907, 907907), (908, 908908), (909, 909909), (910, 910910), (911, 911911), (912, 912912), (913, 913913), (914, 914914), (915, 915915), (916, 916916), (917, 917917), (918, 918918), (919, 919919), (920, 920920), (921, 921921), (922, 922922), (923, 923923), (924, 924924), (925, 925925), (926, 926926), (927, 927927), (928, 928928), (929, 929929), (930, 930930), (931, 931931), (932, 932932), (933, 933933), (934, 934934), (935, 935935), (936, 936936), (937, 937937), (938, 938938), (939, 939939), (940, 940940), (941, 941941), (942, 942942), (943, 943943), (944, 944944), (945, 945945), (946, 946946), (947, 947947), (948, 948948), (949, 949949), (950, 950950), (951, 951951), (952, 952952), (953, 953953), (954, 954954), (955, 955955), (956, 956956), (957, 957957), (958, 958958), (959, 959959), (960, 960960), (961, 961961), (962, 962962), (963, 963963), (964, 964964), (965, 965965), (966, 966966), (967, 967967), (968, 968968), (969, 969969), (970, 970970), (971, 971971), (972, 972972), (973, 973973), (974, 974974), (975, 975975), (976, 976976), (977, 977977), (978, 978978), (979, 979979), (980, 980980), (981, 981981), (982, 982982), (983, 983983), (984, 984984), (985, 985985), (986, 986986), (987, 987987), (988, 988988), (989, 989989), (990, 990990), (991, 991991), (992, 992992), (993, 993993), (994, 994994), (995, 995995), (996, 996996), (997, 997997), (998, 998998), (999, 999999), (10000, 10000);", parameters: 0b 2025-03-31T16:02:03.429256Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpf0fxyamn82edptj9apwat, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWQyMWE2MTMtMjZkM2MxYmYtYmZjYmM3OTEtNzlmNmE2Y2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Captured TEvDataShard::TEvRead from KQP_SOURCE_READ_ACTOR to TX_DATASHARD_ACTOR 2025-03-31T16:02:03.453060Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpf0fxyamn82edptj9apwat, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWQyMWE2MTMtMjZkM2MxYmYtYmZjYmM3OTEtNzlmNmE2Y2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR --- split started --- --- split finished --- Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR >> TestYmqHttpProxy::TestCreateQueueWithWrongBody [GOOD] >> TPQTest::TestMessageNo >> TestYmqHttpProxy::TestSetQueueAttributes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:02:00.081228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:02:00.081359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:02:00.081399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:02:00.081436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:02:00.081481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:02:00.081511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:02:00.081592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:02:00.081728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:02:00.082076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:02:00.271809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:02:00.271874Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:00.284314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:02:00.284426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:02:00.284585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:02:00.301708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:02:00.301956Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:02:00.302655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:00.302890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:02:00.305395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:02:00.306872Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:02:00.306958Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:02:00.307172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:02:00.307221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:02:00.307264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:02:00.307429Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:02:00.315646Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:02:00.451190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:02:00.451374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:00.451577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:02:00.451797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:02:00.451838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:00.460202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:00.460335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:02:00.460538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:00.460593Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:02:00.460646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:02:00.460696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:02:00.462698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:00.462752Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:02:00.462790Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:02:00.465276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:00.465333Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:00.465420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:02:00.465476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:02:00.469415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:02:00.471484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:02:00.471693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:02:00.472742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:00.472881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:02:00.472926Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:02:00.473230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:02:00.473280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:02:00.473465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:02:00.473544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:02:00.475991Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:02:00.476073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:02:00.476267Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:02:00.476304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:02:00.476533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:00.476580Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:02:00.476744Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:02:00.476779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:02:00.476815Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:02:00.476845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:02:00.476878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:02:00.476921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:02:00.476956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:02:00.476983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:02:00.477049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:02:00.477085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:02:00.477128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:02:00.479033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:02:00.479192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:02:00.479230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... TTxPublishToSchemeBoard Send, to populator: [7:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-03-31T16:02:06.338459Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:02:06.338520Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 103:0, ProgressState, NeedSyncHive: 0 2025-03-31T16:02:06.338562Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 240 -> 240 2025-03-31T16:02:06.339589Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:02:06.339741Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:02:06.339796Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-03-31T16:02:06.339842Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 6 2025-03-31T16:02:06.339894Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-03-31T16:02:06.339992Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-03-31T16:02:06.342449Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 2 TabletID: 72075186234409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 4 UserAttributesVersion: 1 TenantHive: 72075186233409546 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-03-31T16:02:06.342552Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T16:02:06.342653Z node 7 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[7:396:2364], EffectiveACLVersion: 0, SubdomainVersion: 4, UserAttributesVersion: 1, TenantHive: 72075186233409546, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 4, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 72075186233409546, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-31T16:02:06.342738Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186234409546 2025-03-31T16:02:06.342768Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186234409546, txId: 0, path id: [OwnerId: 72075186234409546, LocalPathId: 1] 2025-03-31T16:02:06.342896Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186234409546 2025-03-31T16:02:06.342927Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:484:2426], at schemeshard: 72075186234409546, txId: 0, path id: 1 2025-03-31T16:02:06.344053Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186234409546, msg: Owner: 72075186234409546 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186234409546, cookie: 0 2025-03-31T16:02:06.344149Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:02:06.344193Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-03-31T16:02:06.344331Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-31T16:02:06.344379Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T16:02:06.344431Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-31T16:02:06.344470Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T16:02:06.344516Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-03-31T16:02:06.344569Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T16:02:06.344623Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-03-31T16:02:06.344659Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-03-31T16:02:06.344743Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-03-31T16:02:06.346237Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-31T16:02:06.346328Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-03-31T16:02:06.347970Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-03-31T16:02:06.348019Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-03-31T16:02:06.348520Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-31T16:02:06.348616Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-31T16:02:06.348666Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [7:560:2500] TestWaitNotification: OK eventTxId 103 2025-03-31T16:02:06.349191Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:02:06.349366Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 227us result status StatusSuccess 2025-03-31T16:02:06.349733Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:02:06.350228Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:02:06.350389Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 175us result status StatusSuccess 2025-03-31T16:02:06.350723Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:02:06.351323Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186234409546 2025-03-31T16:02:06.351483Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186234409546 describe path "/MyRoot/USER_0" took 178us result status StatusSuccess 2025-03-31T16:02:06.351819Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "MyRoot/USER_0" PathId: 1 SchemeshardId: 72075186234409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot/USER_0" } } } PathId: 1 PathOwnerId: 72075186234409546, at schemeshard: 72075186234409546 >> TestKinesisHttpProxy::TestListStreamConsumers >> SystemView::TopPartitionsByCpuFollowers [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-31T16:01:56.612584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:01:56.612681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:56.612724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:01:56.612771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:01:56.612820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:01:56.612878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:01:56.612963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:56.613055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:01:56.613454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:01:56.847501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:01:56.847582Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:56.864972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:01:56.865444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:01:56.865643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:01:56.919122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:01:56.919494Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:01:56.920222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:56.920676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:56.930599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:56.932219Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:56.932305Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:56.932413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:01:56.932465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:56.932505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:56.932804Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:01:56.953944Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-31T16:01:57.403199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:57.403488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:57.403743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:01:57.403984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:01:57.404055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:57.420328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:57.420496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:01:57.420734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:57.420791Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:01:57.420855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:01:57.420918Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:01:57.432319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:57.432399Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:01:57.432458Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:01:57.444374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:57.444450Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:57.444509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:57.444558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:01:57.455019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:01:57.464218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:01:57.464525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:01:57.465682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:57.465840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:57.465892Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:57.466187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:01:57.466240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:57.466422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:57.466523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:57.483759Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:57.483847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:57.484057Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:57.484097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:01:57.484482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:57.484531Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:01:57.484618Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:57.484651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:57.484692Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:57.484719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:57.484752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:01:57.484814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:57.484858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:01:57.484885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:01:57.484975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:01:57.485019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:01:57.485048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:01:57.492095Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:57.492276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:57.492314Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Z node 7 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [7:125:2151] sender: [7:237:2058] recipient: [7:15:2062] 2025-03-31T16:02:06.499287Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:02:06.499544Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:06.499836Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:02:06.500111Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:02:06.500205Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:06.503266Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:06.503415Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:02:06.503687Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:06.503762Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:02:06.503819Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:02:06.503871Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:02:06.506699Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:06.506778Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:02:06.506836Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:02:06.508967Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:06.509042Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:06.509103Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:02:06.509172Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:02:06.509376Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:02:06.512075Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:02:06.512307Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:02:06.513451Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:06.513640Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 30064773229 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:02:06.513727Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:02:06.514118Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:02:06.514193Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:02:06.514439Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:02:06.514546Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:02:06.517262Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:02:06.517353Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:02:06.517593Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:02:06.517681Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:02:06.518124Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:06.518193Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:02:06.518362Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:02:06.518420Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:02:06.518475Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:02:06.518525Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:02:06.518590Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:02:06.518655Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:02:06.518707Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:02:06.518756Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:02:06.518858Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:02:06.518914Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:02:06.518999Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:02:06.520173Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:02:06.520318Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:02:06.520370Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-31T16:02:06.520431Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-31T16:02:06.520494Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:02:06.520655Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-31T16:02:06.524501Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-31T16:02:06.525157Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-03-31T16:02:06.525965Z node 7 :TX_PROXY DEBUG: actor# [7:267:2258] Bootstrap 2025-03-31T16:02:06.554972Z node 7 :TX_PROXY DEBUG: actor# [7:267:2258] Become StateWork (SchemeCache [7:272:2263]) 2025-03-31T16:02:06.558373Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "USER_1" ExternalSchemeShard: true } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:02:06.558596Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 101:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "USER_1" ExternalSchemeShard: true } 2025-03-31T16:02:06.558653Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 101:0, path /MyRoot/USER_1 2025-03-31T16:02:06.558850Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 101:0, explain: Invalid AlterExtSubDomain request: Check failed: path: '/MyRoot/USER_1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), at schemeshard: 72057594046678944 2025-03-31T16:02:06.558909Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusPathDoesNotExist, reason: Invalid AlterExtSubDomain request: Check failed: path: '/MyRoot/USER_1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), at schemeshard: 72057594046678944 2025-03-31T16:02:06.560589Z node 7 :TX_PROXY DEBUG: actor# [7:267:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T16:02:06.564027Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusPathDoesNotExist Reason: "Invalid AlterExtSubDomain request: Check failed: path: \'/MyRoot/USER_1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:02:06.564224Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Invalid AlterExtSubDomain request: Check failed: path: '/MyRoot/USER_1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), operation: ALTER DATABASE, path: /MyRoot/USER_1 2025-03-31T16:02:06.564941Z node 7 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> SystemView::TopPartitionsByTliFields >> TTransferTests::CreateDropRecreate [GOOD] >> TTransferTests::ConsistencyLevel >> TestKinesisHttpProxy::TestRequestWithIAM [GOOD] >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchTimeout [GOOD] >> TPQTest::TestWriteTimeStampEstimate [GOOD] >> TPQTestInternal::TestAsInt [GOOD] >> TPQTestInternal::TestAsIntWide [GOOD] >> TPQTestInternal::StoreKeys [GOOD] >> TPQTestInternal::RestoreKeys [GOOD] >> TestKinesisHttpProxy::PutRecordsWithIncorrectHashKey [GOOD] >> DataStreams::TestReservedStorageMetering [GOOD] >> DataStreams::TestReservedConsumersMetering >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true >> TestYmqHttpProxy::TestCreateQueueWithWrongAttribute >> TCmsTest::RequestReplaceBrokenDevices ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp/unittest >> KqpStreamLookup::ReadTableWithIndexDuringSplit [GOOD] Test command err: 2025-03-31T16:01:49.385968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:01:49.386253Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:01:49.386451Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00120b/r3tmp/tmpFgT4Dd/pdisk_1.dat 2025-03-31T16:01:49.897126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:01:49.986851Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:50.065192Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:01:50.065345Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:01:50.080583Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:01:50.177913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:01:50.587995Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:789:2660], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:50.588145Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:800:2665], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:50.588287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:50.593171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:01:50.769161Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:803:2668], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:01:50.913930Z node 1 :TX_PROXY ERROR: Actor# [1:881:2715] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:02:05.886648Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpf03zs36ffpdn44q2jrbj6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTM1YTYwMTAtNjU3NjFlOWEtNjBiYjc3ODktY2UxMGIyZmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:02:05.958139Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpf03zs36ffpdn44q2jrbj6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTM1YTYwMTAtNjU3NjFlOWEtNjBiYjc3ODktY2UxMGIyZmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:02:06.157939Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpf03zs36ffpdn44q2jrbj6", SessionId: ydb://session/3?node_id=1&id=OTM1YTYwMTAtNjU3NjFlOWEtNjBiYjc3ODktY2UxMGIyZmQ=, Slow query, duration: 15.571583s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "UPSERT INTO `/Root/TestTable` (key, value) VALUES (0, 00), (1, 11), (2, 22), (3, 33), (4, 44), (5, 55), (6, 66), (7, 77), (8, 88), (9, 99), (10, 1010), (11, 1111), (12, 1212), (13, 1313), (14, 1414), (15, 1515), (16, 1616), (17, 1717), (18, 1818), (19, 1919), (20, 2020), (21, 2121), (22, 2222), (23, 2323), (24, 2424), (25, 2525), (26, 2626), (27, 2727), (28, 2828), (29, 2929), (30, 3030), (31, 3131), (32, 3232), (33, 3333), (34, 3434), (35, 3535), (36, 3636), (37, 3737), (38, 3838), (39, 3939), (40, 4040), (41, 4141), (42, 4242), (43, 4343), (44, 4444), (45, 4545), (46, 4646), (47, 4747), (48, 4848), (49, 4949), (50, 5050), (51, 5151), (52, 5252), (53, 5353), (54, 5454), (55, 5555), (56, 5656), (57, 5757), (58, 5858), (59, 5959), (60, 6060), (61, 6161), (62, 6262), (63, 6363), (64, 6464), (65, 6565), (66, 6666), (67, 6767), (68, 6868), (69, 6969), (70, 7070), (71, 7171), (72, 7272), (73, 7373), (74, 7474), (75, 7575), (76, 7676), (77, 7777), (78, 7878), (79, 7979), (80, 8080), (81, 8181), (82, 8282), (83, 8383), (84, 8484), (85, 8585), (86, 8686), (87, 8787), (88, 8888), (89, 8989), (90, 9090), (91, 9191), (92, 9292), (93, 9393), (94, 9494), (95, 9595), (96, 9696), (97, 9797), (98, 9898), (99, 9999), (100, 100100), (101, 101101), (102, 102102), (103, 103103), (104, 104104), (105, 105105), (106, 106106), (107, 107107), (108, 108108), (109, 109109), (110, 110110), (111, 111111), (112, 112112), (113, 113113), (114, 114114), (115, 115115), (116, 116116), (117, 117117), (118, 118118), (119, 119119), (120, 120120), (121, 121121), (122, 122122), (123, 123123), (124, 124124), (125, 125125), (126, 126126), (127, 127127), (128, 128128), (129, 129129), (130, 130130), (131, 131131), (132, 132132), (133, 133133), (134, 134134), (135, 135135), (136, 136136), (137, 137137), (138, 138138), (139, 139139), (140, 140140), (141, 141141), (142, 142142), (143, 143143), (144, 144144), (145, 145145), (146, 146146), (147, 147147), (148, 148148), (149, 149149), (150, 150150), (151, 151151), (152, 152152), (153, 153153), (154, 154154), (155, 155155), (156, 156156), (157, 157157), (158, 158158), (159, 159159), (160, 160160), (161, 161161), (162, 162162), (163, 163163), (164, 164164), (165, 165165), (166, 166166), (167, 167167), (168, 168168), (169, 169169), (170, 170170), (171, 171171), (172, 172172), (173, 173173), (174, 174174), (175, 175175), (176, 176176), (177, 177177), (178, 178178), (179, 179179), (180, 180180), (181, 181181), (182, 182182), (183, 183183), (184, 184184), (185, 185185), (186, 186186), (187, 187187), (188, 188188), (189, 189189), (190, 190190), (191, 191191), (192, 192192), (193, 193193), (194, 194194), (195, 195195), (196, 196196), (197, 197197), (198, 198198), (199, 199199), (200, 200200), (201, 201201), (202, 202202), (203, 203203), (204, 204204), (205, 205205), (206, 206206), (207, 207207), (208, 208208), (209, 209209), (210, 210210), (211, 211211), (212, 212212), (213, 213213), (214, 214214), (215, 215215), (216, 216216), (217, 217217), (218, 218218), (219, 219219), (220, 220220), (221, 221221), (222, 222222), (223, 223223), (224, 224224), (225, 225225), (226, 226226), (227, 227227), (228, 228228), (229, 229229), (230, 230230), (231, 231231), (232, 232232), (233, 233233), (234, 234234), (235, 235235), (236, 236236), (237, 237237), (238, 238238), (239, 239239), (240, 240240), (241, 241241), (242, 242242), (243, 243243), (244, 244244), (245, 245245), (246, 246246), (247, 247247), (248, 248248), (249, 249249), (250, 250250), (251, 251251), (252, 252252), (253, 253253), (254, 254254), (255, 255255), (256, 256256), (257, 257257), (258, 258258), (259, 259259), (260, 260260), (261, 261261), (262, 262262), (263, 263263), (264, 264264), (265, 265265), (266, 266266), (267, 267267), (268, 268268), (269, 269269), (270, 270270), (271, 271271), (272, 272272), (273, 273273), (274, 274274), (275, 275275), (276, 276276), (277, 277277), (278, 278278), (279, 279279), (280, 280280), (281, 281281), (282, 282282), (283, 283283), (284, 284284), (285, 285285), (286, 286286), (287, 287287), (288, 288288), (289, 289289), (290, 290290), (291, 291291), (292, 292292), (293, 293293), (294, 294294), (295, 295295), (296, 296296), (297, 297297), (298, 298298), (299, 299299), (300, 300300), (301, 301301), (302, 302302), (303, 303303), (304, 304304), (305, 305305), (306, 306306), (307, 307307), (308, 308308), (309, 309309), (310, 310310), (311, 311311), (312, 312312), (313, 313313), (314, 314314), (315, 315315), (316, 316316), (317, 317317), (318, 318318), (319, 319319), (320, 320320), (321, 321321), (322, 322322), (323, 323323), (324, 324324), (325, 325325), (326, 326326), (327, 327327), (328, 328328), (329, 329329), (330, 330330), (331, 331331), (332, 332332), (333, 333333), (334, 334334), (335, 335335), (336, 336336), (337, 337337), (338, 338338), (339, 339339), (340, 340340), (341, 341341), (342, 342342), (343, 343343), (344, 344344), (345, 345345), (346, 346346), (347, 347347), (348, 348348), (349, 349349), (350, 350350), (351, 351351), (352, 352352), (353, 353353), (354, 354354), (355, 355355), (356, 356356), (357, 357357), (358, 358358), (359, 359359), (360, 360360), (361, 361361), (362, 362362), (363, 363363), (364, 364364), (365, 365365), (366, 366366), (367, 367367), (368, 368368), (369, 369369), (370, 370370), (371, 371371), (372, 372372), (373, 373373), (374, 374374), (375, 375375), (376, 376376), (377, 377377), (378, 378378), (379, 379379), (380, 380380), (381, 381381), (382, 382382), (383, 383383), (384, 384384), (385, 385385), (386, 386386), (387, 387387), (388, 388388), (389, 389389), (390, 390390), (391, 391391), (392, 392392), (393, 393393), (394, 394394), (395, 395395), (396, 396396), (397, 397397), (398, 398398), (399, 399399), (400, 400400), (401, 401401), (402, 402402), (403, 403403), (404, 404404), (405, 405405), (406, 406406), (407, 407407), (408, 408408), (409, 409409), (410, 410410), (411, 411411), (412, 412412), (413, 413413), (414, 414414), (415, 415415), (416, 416416), (417, 417417), (418, 418418), (419, 419419), (420, 420420), (421, 421421), (422, 422422), (423, 423423), (424, 424424), (425, 425425), (426, 426426), (427, 427427), (428, 428428), (429, 429429), (430, 430430), (431, 431431), (432, 432432), (433, 433433), (434, 434434), (435, 435435), (436, 436436), (437, 437437), (438, 438438), (439, 439439), (440, 440440), (441, 441441), (442, 442442), (443, 443443), (444, 444444), (445, 445445), (446, 446446), (447, 447447), (448, 448448), (449, 449449), (450, 450450), (451, 451451), (452, 452452), (453, 453453), (454, 454454), (455, 455455), (456, 456456), (457, 457457), (458, 458458), (459, 459459), (460, 460460), (461, 461461), (462, 462462), (463, 463463), (464, 464464), (465, 465465), (466, 466466), (467, 467467), (468, 468468), (469, 469469), (470, 470470), (471, 471471), (472, 472472), (473, 473473), (474, 474474), (475, 475475), (476, 476476), (477, 477477), (478, 478478), (479, 479479), (480, 480480), (481, 481481), (482, 482482), (483, 483483), (484, 484484), (485, 485485), (486, 486486), (487, 487487), (488, 488488), (489, 489489), (490, 490490), (491, 491491), (492, 492492), (493, 493493), (494, 494494), (495, 495495), (496, 496496), (497, 497497), (498, 498498), (499, 499499), (500, 500500), (501, 501501), (502, 502502), (503, 503503), (504, 504504), (505, 505505), (506, 506506), (507, 507507), (508, 508508), (509, 509509), (510, 510510), (511, 511511), (512, 512512), (513, 513513), (514, 514514), (515, 515515), (516, 516516), (517, 517517), (518, 518518), (519, 519519), (520, 520520), (521, 521521), (522, 522522), (523, 523523), (524, 524524), (525, 525525), (526, 526526), (527, 527527), (528, 528528), (529, 529529), (530, 530530), (531, 531531), (532, 532532), (533, 533533), (534, 534534), (535, 535535), (536, 536536), (537, 537537), (538, 538538), (539, 539539), (540, 540540), (541, 541541), (542, 542542), (543, 543543), (544, 544544), (545, 545545), (546, 546546), (547, 547547), (548, 548548), (549, 549549), (550, 550550), (551, 551551), (552, 552552), (553, 553553), (554, 554554), (555, 555555), (556, 556556), (557, 557557), (558, 558558), (559, 559559), (560, 560560), (561, 561561), (562, 562562), (563, 563563), (564, 564564), (565, 565565), (566, 566566), (567, 567567), (568, 568568), (569, 569569), (570, 570570), (571, 571571), (572, 572572), (573, 573573), (574, 574574), (575, 575575), (576, 576576), (577, 577577), (578, 578578), (579, 579579), (580, 580580), (581, 581581), (582, 582582), (583, 583583), (584, 584584), (585, 585585), (586, 586586), (587, 587587), (588, 588588), (589, 589589), (590, 590590), (591, 591591), (592, 592592), (593, 593593), (594, 594594), (595, 595595), (596, 596596), (597, 597597), (598, 598598), (599, 599599), (600, 600600), (601, 601601), (602, 602602), (603, 603603), (604, 604604), (605, 605605), (606, 606606), (607, 607607), (608, 608608), (609, 609609), (610, 610610), (611, 611611), (612, 612612), (613, 613613), (614, 614614), (615, 615615), (616, 616616), (617, 617617), (618, 618618), (619, 619619), (620, 620620), (621, 621621), (622, 622622), (623, 623623), (624, 624624), (625, 625625), (626, 626626), (627, 627627), (628, 628628), (629, 629629), (630, 630630), (631, 631631), (632, 632632), (633, 633633), (634, 634634), (635, 635635), (636, 636636), (637, 637637), (638, 638638), (639, 639639), (640, 640640), (641, 641641), (642, 642642), (643, 643643), (644, 644644), (645, 645645), (646, 646646), (647, 647647), (648, 648648), (649, 649649), (650, 650650), (651, 651651), (652, 652652), (653, 653653), (654, 654654), (655, 655655), (656, 656656), (657, 657657), (658, 658658), (659, 659659), (660, 660660), (661, 661661), (662, 662662), (663, 663663), (664, 664664), (665, 665665), (666, 666666), (667, 667667), (668, 668668), (669, 669669), (670, 670670), (671, 671671), (672, 672672), (673, 673673), (674, 674674), (675, 675675), (676, 676676), (677, 677677), (678, 678678), (679, 679679), (680, 680680), (681, 681681), (682, 682682), (683, 683683), (684, 684684), (685, 685685), (686, 686686), (687, 687687), (688, 688688), (689, 689689), (690, 690690), (691, 691691), (692, 692692), (693, 693693), (694, 694694), (695, 695695), (696, 696696), (697, 697697), (698, 698698), (699, 699699), (700, 700700), (701, 701701), (702, 702702), (703, 703703), (704, 704704), (705, 705705), (706, 706706), (707, 707707), (708, 708708), (709, 709709), (710, 710710), (711, 711711), (712, 712712), (713, 713713), (714, 714714), (715, 715715), (716, 716716), (717, 717717), (718, 718718), (719, 719719), (720, 720720), (721, 721721), (722, 722722), (723, 723723), (724, 724724), (725, 725725), (726, 726726), (727, 727727), (728, 728728), (729, 729729), (730, 730730), (731, 731731), (732, 732732), (733, 733733), (734, 734734), (735, 735735), (736, 736736), (737, 737737), (738, 738738), (739, 739739), (740, 740740), (741, 741741), (742, 742742), (743, 743743), (744, 744744), (745, 745745), (746, 746746), (747, 747747), (748, 748748), (749, 749749), (750, 750750), (751, 751751), (752, 752752), (753, 753753), (754, 754754), (755, 755755), (756, 756756), (757, 757757), (758, 758758), (759, 759759), (760, 760760), (761, 761761), (762, 762762), (763, 763763), (764, 764764), (765, 765765), (766, 766766), (767, 767767), (768, 768768), (769, 769769), (770, 770770), (771, 771771), (772, 772772), (773, 773773), (774, 774774), (775, 775775), (776, 776776), (777, 777777), (778, 778778), (779, 779779), (780, 780780), (781, 781781), (782, 782782), (783, 783783), (784, 784784), (785, 785785), (786, 786786), (787, 787787), (788, 788788), (789, 789789), (790, 790790), (791, 791791), (792, 792792), (793, 793793), (794, 794794), (795, 795795), (796, 796796), (797, 797797), (798, 798798), (799, 799799), (800, 800800), (801, 801801), (802, 802802), (803, 803803), (804, 804804), (805, 805805), (806, 806806), (807, 807807), (808, 808808), (809, 809809), (810, 810810), (811, 811811), (812, 812812), (813, 813813), (814, 814814), (815, 815815), (816, 816816), (817, 817817), (818, 818818), (819, 819819), (820, 820820), (821, 821821), (822, 822822), (823, 823823), (824, 824824), (825, 825825), (826, 826826), (827, 827827), (828, 828828), (829, 829829), (830, 830830), (831, 831831), (832, 832832), (833, 833833), (834, 834834), (835, 835835), (836, 836836), (837, 837837), (838, 838838), (839, 839839), (840, 840840), (841, 841841), (842, 842842), (843, 843843), (844, 844844), (845, 845845), (846, 846846), (847, 847847), (848, 848848), (849, 849849), (850, 850850), (851, 851851), (852, 852852), (853, 853853), (854, 854854), (855, 855855), (856, 856856), (857, 857857), (858, 858858), (859, 859859), (860, 860860), (861, 861861), (862, 862862), (863, 863863), (864, 864864), (865, 865865), (866, 866866), (867, 867867), (868, 868868), (869, 869869), (870, 870870), (871, 871871), (872, 872872), (873, 873873), (874, 874874), (875, 875875), (876, 876876), (877, 877877), (878, 878878), (879, 879879), (880, 880880), (881, 881881), (882, 882882), (883, 883883), (884, 884884), (885, 885885), (886, 886886), (887, 887887), (888, 888888), (889, 889889), (890, 890890), (891, 891891), (892, 892892), (893, 893893), (894, 894894), (895, 895895), (896, 896896), (897, 897897), (898, 898898), (899, 899899), (900, 900900), (901, 901901), (902, 902902), (903, 903903), (904, 904904), (905, 905905), (906, 906906), (907, 907907), (908, 908908), (909, 909909), (910, 910910), (911, 911911), (912, 912912), (913, 913913), (914, 914914), (915, 915915), (916, 916916), (917, 917917), (918, 918918), (919, 919919), (920, 920920), (921, 921921), (922, 922922), (923, 923923), (924, 924924), (925, 925925), (926, 926926), (927, 927927), (928, 928928), (929, 929929), (930, 930930), (931, 931931), (932, 932932), (933, 933933), (934, 934934), (935, 935935), (936, 936936), (937, 937937), (938, 938938), (939, 939939), (940, 940940), (941, 941941), (942, 942942), (943, 943943), (944, 944944), (945, 945945), (946, 946946), (947, 947947), (948, 948948), (949, 949949), (950, 950950), (951, 951951), (952, 952952), (953, 953953), (954, 954954), (955, 955955), (956, 956956), (957, 957957), (958, 958958), (959, 959959), (960, 960960), (961, 961961), (962, 962962), (963, 963963), (964, 964964), (965, 965965), (966, 966966), (967, 967967), (968, 968968), (969, 969969), (970, 970970), (971, 971971), (972, 972972), (973, 973973), (974, 974974), (975, 975975), (976, 976976), (977, 977977), (978, 978978), (979, 979979), (980, 980980), (981, 981981), (982, 982982), (983, 983983), (984, 984984), (985, 985985), (986, 986986), (987, 987987), (988, 988988), (989, 989989), (990, 990990), (991, 991991), (992, 992992), (993, 993993), (994, 994994), (995, 995995), (996, 996996), (997, 997997), (998, 998998), (999, 999999), (10000, 10000);", parameters: 0b 2025-03-31T16:02:06.407704Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpf0k6kdz1y2jyw62qdjj1b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjljZmY4NmQtZDMyYmNhMjEtM2M2NzlmMmEtNjk1ODI1Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Captured TEvDataShard::TEvRead from KQP_SOURCE_READ_ACTOR to TX_DATASHARD_ACTOR >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTestInternal::RestoreKeys [GOOD] Test command err: 2025-03-31T16:02:07.798903Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:02:07.798990Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:02:07.819630Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:178:2193], now have 1 active actors on pipe 2025-03-31T16:02:07.819765Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-03-31T16:02:07.841422Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "aaa" Generation: 1 Important: true } 2025-03-31T16:02:07.844801Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "aaa" Generation: 1 Important: true } 2025-03-31T16:02:07.844981Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:02:07.847576Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "aaa" Generation: 1 Important: true } 2025-03-31T16:02:07.847889Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:02:07.847973Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:02:07.848747Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:02:07.849179Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:186:2199] 2025-03-31T16:02:07.850139Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-03-31T16:02:07.850208Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:186:2199] 2025-03-31T16:02:07.850270Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:02:07.851301Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-31T16:02:07.851431Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-03-31T16:02:07.851497Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-03-31T16:02:07.851543Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user aaa reinit request with generation 1 2025-03-31T16:02:07.851571Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user aaa reinit with generation 1 done 2025-03-31T16:02:07.851740Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:02:07.851802Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:02:07.851841Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:02:07.851895Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:02:07.851942Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-03-31T16:02:07.851967Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-03-31T16:02:07.851990Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000caaa 2025-03-31T16:02:07.852015Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uaaa 2025-03-31T16:02:07.852064Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:02:07.852121Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-03-31T16:02:07.852253Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-31T16:02:07.852296Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user aaa readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-31T16:02:07.852503Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:02:07.852816Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:188:2201] 2025-03-31T16:02:07.853847Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Initializing completed. 2025-03-31T16:02:07.853906Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 2 [1:188:2201] 2025-03-31T16:02:07.853964Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:02:07.854758Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-03-31T16:02:07.854855Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit request with generation 1 2025-03-31T16:02:07.854909Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit with generation 1 done 2025-03-31T16:02:07.854955Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user aaa reinit request with generation 1 2025-03-31T16:02:07.854981Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user aaa reinit with generation 1 done 2025-03-31T16:02:07.855228Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:02:07.855264Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-03-31T16:02:07.855294Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-03-31T16:02:07.855325Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-03-31T16:02:07.855367Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001cuser 2025-03-31T16:02:07.855415Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uuser 2025-03-31T16:02:07.855442Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001caaa 2025-03-31T16:02:07.855465Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uaaa 2025-03-31T16:02:07.855494Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-03-31T16:02:07.855538Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] =========================== 2025-03-31T16:02:07.855631Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-31T16:02:07.855681Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user aaa readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-31T16:02:07.855830Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-31T16:02:07.855967Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-31T16:02:07.861246Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:02:07.861625Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:02:07.862075Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:201:2210], now have 1 active actors on pipe 2025-03-31T16:02:07.862814Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:204:2212], now have 1 active actors on pipe 2025-03-31T16:02:07.862957Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-31T16:02:07.863016Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-03-31T16:02:07.863115Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 0 messageNo: 0 size 7 offset: -1 2025-03-31T16:02:07.863260Z node 1 :PERSQUEUE DEBUG: tablet ... ] Topic 'topic' partition 0 user aaa readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 1 2025-03-31T16:02:07.944182Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Start initializing step TInitDataStep 2025-03-31T16:02:07.944244Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-03-31T16:02:07.944288Z node 1 :PERSQUEUE INFO: [topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:02:07.944335Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Initializing completed. 2025-03-31T16:02:07.944450Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 3 [1:294:2285] 2025-03-31T16:02:07.944509Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:02:07.944570Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-03-31T16:02:07.944662Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 1 2025-03-31T16:02:07.944702Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user aaa readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 1 2025-03-31T16:02:07.945087Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 Topic 'topic' partition 0 user user offset 0 count 1 size 1024000 endOffset 1 max time lag 0ms effective offset 0 2025-03-31T16:02:07.945158Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 1 2025-03-31T16:02:07.945214Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-03-31T16:02:07.945255Z node 1 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-31T16:02:07.945400Z node 1 :PERSQUEUE DEBUG: Topic 'topic' partition 0 user user readTimeStamp done, result 1000000 queuesize 1 startOffset 0 2025-03-31T16:02:07.945453Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user aaa readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 1 2025-03-31T16:02:07.945499Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user aaa send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 1 2025-03-31T16:02:07.945774Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 1 Topic 'topic' partition 0 user aaa offset 0 count 1 size 1024000 endOffset 1 max time lag 0ms effective offset 0 2025-03-31T16:02:07.945851Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 1 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 1 2025-03-31T16:02:07.945898Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Reading cookie 1. All data is from uncompacted head. 2025-03-31T16:02:07.945940Z node 1 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-31T16:02:07.946059Z node 1 :PERSQUEUE DEBUG: Topic 'topic' partition 0 user aaa readTimeStamp done, result 1000000 queuesize 0 startOffset 0 2025-03-31T16:02:07.947050Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:318:2298], now have 1 active actors on pipe 2025-03-31T16:02:07.949196Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-31T16:02:07.949258Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-03-31T16:02:07.949744Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-31T16:02:07.950159Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:321:2300], now have 1 active actors on pipe 2025-03-31T16:02:07.950251Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-31T16:02:07.950321Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-03-31T16:02:07.950417Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: topic partition: 0 SourceId: 'sourceid0' SeqNo: 2 partNo : 0 messageNo: 1 size 7 offset: -1 2025-03-31T16:02:07.950499Z node 1 :PERSQUEUE DEBUG: tablet 72057594037927937 topic 'topic' partition 0 error: new GetOwnership request needed for owner 2025-03-31T16:02:07.950598Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 2, Error new GetOwnership request needed for owner 2025-03-31T16:02:07.950652Z node 1 :PERSQUEUE DEBUG: Answer error topic: 'topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-03-31T16:02:07.950982Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:323:2302], now have 1 active actors on pipe 2025-03-31T16:02:07.951139Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-31T16:02:07.951194Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-03-31T16:02:07.951292Z node 1 :PERSQUEUE INFO: new Cookie default|38672a8c-a83ff4f-5781b66d-7265b890_0 generated for partition 0 topic 'topic' owner default 2025-03-31T16:02:07.951426Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-03-31T16:02:07.951515Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-31T16:02:07.951900Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:325:2304], now have 1 active actors on pipe 2025-03-31T16:02:07.951985Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-31T16:02:07.952016Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-03-31T16:02:07.952086Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: topic partition: 0 SourceId: 'sourceid0' SeqNo: 2 partNo : 0 messageNo: 0 size 7 offset: -1 2025-03-31T16:02:07.952183Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Send write quota request. Topic: "topic". Partition: 0. Amount: 16. Cookie: 1 2025-03-31T16:02:07.952289Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Got quota. Topic: "topic". Partition: 0: Cookie: 1 2025-03-31T16:02:07.952454Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 part blob processing sourceId 'sourceid0' seqNo 2 partNo 0 2025-03-31T16:02:07.963612Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 part blob complete sourceId 'sourceid0' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 1 PartNo 0 PackedSize 79 count 1 nextOffset 2 batches 1 2025-03-31T16:02:07.964307Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'topic' partition 0 compactOffset 1,1 HeadOffset 0 endOffset 1 curOffset 2 d0000000000_00000000000000000001_00000_0000000001_00000| size 65 WTime 2000000 2025-03-31T16:02:07.964464Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:02:07.964502Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:02:07.964540Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-03-31T16:02:07.964579Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:02:07.964618Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psourceid0 2025-03-31T16:02:07.964643Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000001_00000_0000000001_00000| 2025-03-31T16:02:07.964666Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:02:07.964699Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:02:07.964770Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-03-31T16:02:07.964902Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-31T16:02:07.964986Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 1 partNo 0 count 1 size 65 2025-03-31T16:02:07.967753Z node 1 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 1 count 1 size 65 actorID [1:283:2276] 2025-03-31T16:02:07.967929Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 16 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:02:07.968019Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:02:07.968090Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'sourceid0', Topic: 'topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 1 is stored on disk 2025-03-31T16:02:07.968321Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] topic 'topicCounters. CacheSize 65 CachedBlobs 1 2025-03-31T16:02:07.968418Z node 1 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 1 partno 0 count 1 parts 0 size 65 2025-03-31T16:02:07.968509Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-31T16:02:07.968929Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:331:2309], now have 1 active actors on pipe 2025-03-31T16:02:07.969052Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-31T16:02:07.969098Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-03-31T16:02:07.969197Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-31T16:02:07.969547Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:333:2311], now have 1 active actors on pipe 2025-03-31T16:02:07.969647Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-31T16:02:07.969689Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-03-31T16:02:07.969791Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-31T16:02:07.970103Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:335:2313], now have 1 active actors on pipe 2025-03-31T16:02:07.970193Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-31T16:02:07.970238Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-03-31T16:02:07.970371Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 >> TestKinesisHttpProxy::TestRequestNoAuthorization >> TMaintenanceApiTest::ManyActionGroupsWithSingleAction |72.4%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} >> TestKinesisHttpProxy::ListShards >> TCmsTest::RequestRestartServicesRejectSecond ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchTimeout [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:01:36.143675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:01:36.143805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:36.143842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:01:36.143872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:01:36.143924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:01:36.143966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:01:36.144020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:36.144162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:01:36.144498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:01:36.234209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:01:36.234258Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:36.238615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:01:36.238702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:01:36.238856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:01:36.243300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:01:36.243491Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:01:36.244080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:36.244259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:36.246394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:36.247460Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:36.247513Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:36.247646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:01:36.247684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:36.247714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:36.247861Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:01:36.254465Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:01:36.385776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:36.385988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:36.386243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:01:36.386463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:01:36.386516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:36.394082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:36.394219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:01:36.394455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:36.394502Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:01:36.394533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:01:36.394558Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:01:36.396262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:36.396305Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:01:36.396334Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:01:36.398147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:36.398212Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:36.398260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:36.398314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:01:36.402235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:01:36.404818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:01:36.405052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:01:36.406110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:36.406260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:36.406324Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:36.406610Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:01:36.406664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:36.406829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:36.406932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:01:36.409139Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:36.409191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:36.409390Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:36.409458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:01:36.409681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:36.409719Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:01:36.409810Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:36.409842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:36.409879Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:36.409912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:36.409948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:01:36.409990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:36.410022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:01:36.410063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:01:36.410130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:01:36.410174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:01:36.410218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:01:36.412180Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:36.412308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:36.412356Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ly one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: Simple, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T16:02:07.413005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:02:07.413077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:02:07.413472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:07.413589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-03-31T16:02:07.413687Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-03-31T16:02:07.413932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 2, at schemeshard: 72057594046678944 2025-03-31T16:02:07.414082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:02:07.414202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:07.414258Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:1, tabletId: 72075186233409546, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: DataShard, at schemeshard: 72057594046678944 2025-03-31T16:02:07.414300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T16:02:07.414423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:07.414534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:02:07.414752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2025-03-31T16:02:07.415047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:02:07.415536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:02:07.415933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:02:07.416024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:02:07.416225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:02:07.416335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:02:07.416425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:02:07.416588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:02:07.416674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:02:07.416824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:02:07.417044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-03-31T16:02:07.417207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-03-31T16:02:07.417258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-03-31T16:02:07.417309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-03-31T16:02:07.417550Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-31T16:02:07.433889Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:02:07.434087Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-03-31T16:02:07.445318Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435082, Sender [1:1747:3670], Recipient [1:1747:3670]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-03-31T16:02:07.445409Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-03-31T16:02:07.446938Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:02:07.447022Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:02:07.448651Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:1747:3670], Recipient [1:1747:3670]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:02:07.448705Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:02:07.448964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:02:07.449051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:02:07.449139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:02:07.449175Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:02:07.471583Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [1:1785:3670], Recipient [1:1747:3670]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-03-31T16:02:07.471687Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-03-31T16:02:07.471729Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1747:3670] sender: [1:1805:2058] recipient: [1:15:2062] 2025-03-31T16:02:07.537225Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:1804:3716], Recipient [1:1747:3670]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-03-31T16:02:07.537289Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-03-31T16:02:07.537436Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:02:07.537733Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Simple" took 297us result status StatusSuccess 2025-03-31T16:02:07.538503Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Simple" PathDescription { Self { Name: "Simple" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1001 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Simple" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 MaxPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 27456 RowCount: 200 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 27487 Memory: 156728 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 27456 DataSize: 27456 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |72.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true [GOOD] >> TCmsTenatsTest::TestClusterRatioLimit [GOOD] >> TCmsTenatsTest::TestClusterRatioLimitForceRestartMode >> TPQTest::TestMessageNo [GOOD] >> TPQTest::TestOwnership |72.4%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} |72.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |72.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |72.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |72.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup >> TTransferTests::ConsistencyLevel [GOOD] >> TTransferTests::Alter |72.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:01:58.893853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:01:58.893975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:58.894016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:01:58.894052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:01:58.894100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:01:58.894130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:01:58.894263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:58.894356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:01:58.894691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:01:59.031333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:01:59.031448Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:59.044809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:01:59.044931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:01:59.045148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:01:59.062063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:01:59.062305Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:01:59.063014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:59.063286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:59.065598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:59.066797Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:59.066864Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:59.067069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:01:59.067148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:59.071241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:59.071511Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:01:59.096252Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:01:59.326866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:59.327165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:59.327394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:01:59.327637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:01:59.327718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:59.329963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:59.330099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:01:59.330290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:59.330364Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:01:59.330400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:01:59.330438Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:01:59.332512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:59.332574Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:01:59.332614Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:01:59.334627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:59.334678Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:59.334756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:59.334828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:01:59.338611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:01:59.340735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:01:59.340920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:01:59.342009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:59.342147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:59.342201Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:59.342516Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:01:59.342574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:59.342748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:59.342827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:01:59.345126Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:59.345192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:59.345401Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:59.345449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:01:59.345698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:59.345748Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:01:59.345906Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:59.345950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:59.345992Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:59.346043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:59.346083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:01:59.346130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:59.346168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:01:59.346198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:01:59.346265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:01:59.346339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:01:59.346376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:01:59.348331Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:59.348446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:59.348503Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... d: 72057594046678944, LocalPathId: 2], type: EPathTypeExtSubDomain, state: EPathStateAlter)" TxId: 104 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:02:08.328859Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusMultipleModifications, reason: Invalid AlterExtSubDomain request: Check failed: path: '/MyRoot/USER_0', error: path is under operation (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeExtSubDomain, state: EPathStateAlter), operation: ALTER DATABASE, path: /MyRoot/USER_0 2025-03-31T16:02:08.330558Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:02:08.330637Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 103:0 ProgressState, operation type: TxAlterExtSubDomain, at tablet# 72057594046678944 2025-03-31T16:02:08.330697Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 103:0 ProgressState no shards to create, do next state 2025-03-31T16:02:08.330759Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 2 -> 3 2025-03-31T16:02:08.332725Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:02:08.332793Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 103:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:02:08.332842Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 3 -> 128 2025-03-31T16:02:08.334691Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:02:08.334747Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:02:08.334802Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 103:0, at tablet# 72057594046678944 2025-03-31T16:02:08.334860Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2025-03-31T16:02:08.335028Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:02:08.336651Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-03-31T16:02:08.336808Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2025-03-31T16:02:08.337182Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:08.337338Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 30064773228 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:02:08.337399Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 103:0, at tablet# 72057594046678944 2025-03-31T16:02:08.337696Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 240 2025-03-31T16:02:08.337762Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 103:0, at tablet# 72057594046678944 2025-03-31T16:02:08.337919Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-03-31T16:02:08.338106Z node 7 :FLAT_TX_SCHEMESHARD INFO: Send TEvUpdateTenantSchemeShard, to actor: [7:394:2364], msg: TabletId: 72057594046678944 Generation: 2 StoragePools { Name: "pool-1" Kind: "hdd" } SubdomainVersion: 4, at schemeshard: 72057594046678944 2025-03-31T16:02:08.340205Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateTenantSchemeShard, at schemeshard: 72075186234409546, msg: TabletId: 72057594046678944 Generation: 2 StoragePools { Name: "pool-1" Kind: "hdd" } SubdomainVersion: 4 2025-03-31T16:02:08.340350Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpdateTenant DoExecute, msg: TabletId: 72057594046678944 Generation: 2 StoragePools { Name: "pool-1" Kind: "hdd" } SubdomainVersion: 4, at schemeshard: 72075186234409546 2025-03-31T16:02:08.340558Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Cannot publish paths for unknown operation id#0 FAKE_COORDINATOR: Erasing txId 103 2025-03-31T16:02:08.341063Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:02:08.341123Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:02:08.341335Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:02:08.341386Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:205:2207], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-03-31T16:02:08.341832Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:02:08.341896Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 103:0, ProgressState, NeedSyncHive: 0 2025-03-31T16:02:08.341954Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 240 -> 240 2025-03-31T16:02:08.342644Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:02:08.342767Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:02:08.342813Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-03-31T16:02:08.342864Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 6 2025-03-31T16:02:08.342918Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-03-31T16:02:08.343018Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-03-31T16:02:08.346237Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 2 TabletID: 72075186234409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 4 UserAttributesVersion: 1 TenantHive: 72075186233409546 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-03-31T16:02:08.346350Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T16:02:08.346450Z node 7 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[7:394:2364], EffectiveACLVersion: 0, SubdomainVersion: 4, UserAttributesVersion: 1, TenantHive: 72075186233409546, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 4, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 72075186233409546, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-31T16:02:08.346564Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186234409546 2025-03-31T16:02:08.346598Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186234409546, txId: 0, path id: [OwnerId: 72075186234409546, LocalPathId: 1] 2025-03-31T16:02:08.346725Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186234409546 2025-03-31T16:02:08.346759Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:486:2428], at schemeshard: 72075186234409546, txId: 0, path id: 1 2025-03-31T16:02:08.347654Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186234409546, msg: Owner: 72075186234409546 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186234409546, cookie: 0 2025-03-31T16:02:08.347900Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-31T16:02:08.347980Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T16:02:08.348104Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:02:08.348150Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-03-31T16:02:08.348293Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-31T16:02:08.348347Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T16:02:08.348396Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-31T16:02:08.348440Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T16:02:08.348488Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-03-31T16:02:08.348543Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T16:02:08.348605Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-03-31T16:02:08.348649Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-03-31T16:02:08.348734Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 TestModificationResult got TxId: 103, wait until txId: 104 TestModificationResults wait txId: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 2025-03-31T16:02:08.350968Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-03-31T16:02:08.351025Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-03-31T16:02:08.351543Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-31T16:02:08.351678Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-31T16:02:08.351722Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [7:571:2508] TestWaitNotification: OK eventTxId 103 >> TCmsTest::RequestReplaceDevices [GOOD] >> TCmsTest::RequestReplaceDevicePDisk >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-false >> YdbSdkSessionsPool::RunSmallPlan >> YdbSdkSessionsPool::StressTestAsync1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:02:00.206753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:02:00.206879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:02:00.206928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:02:00.206962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:02:00.207014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:02:00.207045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:02:00.207120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:02:00.207287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:02:00.207667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:02:00.477238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:02:00.477307Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:00.493313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:02:00.493422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:02:00.493619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:02:00.510216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:02:00.510443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:02:00.511242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:00.511490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:02:00.517952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:02:00.519278Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:02:00.519357Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:02:00.519543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:02:00.519596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:02:00.519636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:02:00.519812Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:02:00.531178Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:02:00.714526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:02:00.714810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:00.715056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:02:00.715359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:02:00.715441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:00.719293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:00.719504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:02:00.719797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:00.719869Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:02:00.719907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:02:00.719941Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:02:00.722905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:00.722975Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:02:00.723033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:02:00.725954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:00.726019Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:00.726090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:02:00.726147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:02:00.730094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:02:00.732560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:02:00.732779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:02:00.733831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:00.733981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:02:00.734044Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:02:00.734356Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:02:00.734407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:02:00.734608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:02:00.734692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:02:00.737434Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:02:00.737494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:02:00.737731Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:02:00.737791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:02:00.738029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:00.738076Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:02:00.738230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:02:00.738266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:02:00.738304Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:02:00.738331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:02:00.738371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:02:00.738408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:02:00.738440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:02:00.738466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:02:00.738531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:02:00.738573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:02:00.738615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:02:00.748958Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:02:00.749139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:02:00.749176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... D INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-31T16:02:09.083386Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:02:09.083472Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-03-31T16:02:09.085489Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-03-31T16:02:09.085651Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-03-31T16:02:09.086088Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:09.086249Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 30064773228 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:02:09.086316Z node 7 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TPropose, operationId: 102:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2025-03-31T16:02:09.086461Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 2] name: USER_0 type: EPathTypeExtSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 102 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:02:09.086500Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:02:09.086539Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 134 2025-03-31T16:02:09.088350Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:02:09.088914Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-03-31T16:02:09.089864Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:02:09.089923Z node 7 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TDeleteExternalShards, operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:02:09.090037Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 134 -> 135 2025-03-31T16:02:09.090271Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:02:09.090332Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T16:02:09.091814Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:02:09.091847Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:02:09.091947Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:02:09.092069Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:02:09.092093Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:205:2207], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-03-31T16:02:09.092115Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:205:2207], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-31T16:02:09.092397Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:02:09.092463Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 102:0 ProgressState 2025-03-31T16:02:09.092514Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 135 -> 240 2025-03-31T16:02:09.093425Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:02:09.093513Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:02:09.093544Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:02:09.093574Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-03-31T16:02:09.093605Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:02:09.094225Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:02:09.094303Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:02:09.094330Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:02:09.094359Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-31T16:02:09.094389Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:02:09.094452Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-03-31T16:02:09.107806Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:02:09.107897Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-31T16:02:09.108063Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:02:09.108125Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:02:09.108185Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:02:09.108225Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:02:09.108322Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-31T16:02:09.108416Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:02:09.108466Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-31T16:02:09.108509Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-31T16:02:09.108620Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T16:02:09.109377Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:02:09.109458Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T16:02:09.109551Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T16:02:09.110033Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:02:09.110100Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T16:02:09.110179Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:02:09.111316Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:02:09.111851Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:02:09.114379Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-31T16:02:09.114497Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-31T16:02:09.114782Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-31T16:02:09.114843Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-31T16:02:09.115447Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-31T16:02:09.115604Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:02:09.115663Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:340:2331] TestWaitNotification: OK eventTxId 102 2025-03-31T16:02:09.116277Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:02:09.116506Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 271us result status StatusPathDoesNotExist 2025-03-31T16:02:09.116700Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TestKinesisHttpProxy::DoubleCreateStream [GOOD] >> YdbSdkSessionsPool::WaitQueue1 >> DataShardVolatile::DistributedWriteThenReadIterator [GOOD] >> DataShardVolatile::DistributedWriteThenReadIteratorStream >> TestYmqHttpProxy::TestReceiveMessageWithAttributes [GOOD] >> TCmsTest::RequestReplaceBrokenDevices [GOOD] >> TCmsTest::PermissionDuration |72.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_stats/test-results/unittest/{meta.json ... results_accumulator.log} >> TTransferTests::Alter [GOOD] >> TMaintenanceApiTest::ManyActionGroupsWithSingleAction [GOOD] >> TMaintenanceApiTest::CreateTime >> TestKinesisHttpProxy::GoodRequestGetRecords >> TestYmqHttpProxy::TestReceiveMessageWithAttemptId >> TCmsTest::RequestRestartServicesRejectSecond [GOOD] >> TCmsTest::RequestRestartServicesWrongHost >> TCmsTenatsTest::TestClusterRatioLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestClusterLimitForceRestartModeScheduled |72.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |72.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless >> TCmsTest::RequestReplaceDevicePDisk [GOOD] >> TCmsTest::RequestReplaceDevicePDiskByPath >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI [GOOD] >> KqpScan::RemoteShardScan [GOOD] >> KqpScan::ScanDuringSplit >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-false [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedBeforeDataIsDecompressed_Compressed [GOOD] >> TCmsTest::PermissionDuration [GOOD] |72.5%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_stats/test-results/unittest/{meta.json ... results_accumulator.log} |72.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true >> TPersQueueTest::WhenTheTopicIsDeletedAfterReadingTheData_Compressed >> TCmsTest::RacyStartCollecting ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_transfer/unittest >> TTransferTests::Alter [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:01:57.959440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:01:57.959571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:57.959631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:01:57.959676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:01:57.959743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:01:57.959786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:01:57.959855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:57.959944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:01:57.960342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:01:58.106511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:01:58.106593Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:58.120810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:01:58.120941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:01:58.121126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:01:58.127467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:01:58.127729Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:01:58.128389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:58.128646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:58.131126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:58.132423Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:58.132482Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:58.132652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:01:58.132696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:58.132748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:58.132895Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:01:58.140795Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:01:58.319296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:58.319615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:58.319916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:01:58.320208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:01:58.320269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:58.326198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:58.326376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:01:58.326610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:58.326665Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:01:58.326707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:01:58.326737Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:01:58.336601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:58.336694Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:01:58.336732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:01:58.339136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:58.339202Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:58.339294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:58.339354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:01:58.344180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:01:58.350272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:01:58.350507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:01:58.351754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:58.351907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:58.351957Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:58.352330Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:01:58.352387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:58.352574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:58.352651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:01:58.356253Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:58.356318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:58.356509Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:58.356572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:01:58.356927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:58.356979Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:01:58.357078Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:58.357109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:58.357146Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:58.357179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:58.357233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:01:58.357290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:58.357329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:01:58.357362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:01:58.357473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:01:58.357510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:01:58.357563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:01:58.360448Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:58.360588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:58.360626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Id, TxId: 103, partId: 0, tablet: 72075186233409546 2025-03-31T16:02:11.060002Z node 6 :REPLICATION_CONTROLLER TRACE: [controller 72075186233409546] Handle NKikimrReplication.TEvAlterReplication PathId { OwnerId: 72057594046678944 LocalId: 2 } OperationId { TxId: 103 PartId: 0 } SwitchState { StandBy { } } Config { SrcConnectionParams { StaticCredentials { User: "user" Password: "***" } } ConsistencySettings { Row { } } TransferSpecific { Target { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } } } 2025-03-31T16:02:11.060286Z node 6 :REPLICATION_CONTROLLER DEBUG: [controller 72075186233409546][TxAlterReplication] Execute: NKikimrReplication.TEvAlterReplication PathId { OwnerId: 72057594046678944 LocalId: 2 } OperationId { TxId: 103 PartId: 0 } SwitchState { StandBy { } } Config { SrcConnectionParams { StaticCredentials { User: "user" Password: "***" } } ConsistencySettings { Row { } } TransferSpecific { Target { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } } } 2025-03-31T16:02:11.060461Z node 6 :REPLICATION_CONTROLLER NOTICE: [controller 72075186233409546][TxAlterReplication] Alter replication: rid# 1, pathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:02:11.068007Z node 6 :REPLICATION_CONTROLLER DEBUG: [controller 72075186233409546][TxAlterReplication] Complete 2025-03-31T16:02:11.068341Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 276758531, Sender [6:310:2297], Recipient [6:123:2149]: NKikimrReplication.TEvAlterReplicationResult OperationId { TxId: 103 PartId: 0 } Origin: 72075186233409546 Status: SUCCESS 2025-03-31T16:02:11.068393Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NReplication::TEvController::TEvAlterReplicationResult 2025-03-31T16:02:11.068470Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvAlterReplicationResult, at schemeshard: 72057594046678944, message: OperationId { TxId: 103 PartId: 0 } Origin: 72075186233409546 Status: SUCCESS 2025-03-31T16:02:11.068647Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: OperationId { TxId: 103 PartId: 0 } Origin: 72075186233409546 Status: SUCCESS 2025-03-31T16:02:11.068755Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TAlterReplication TConfigureParts opId# 103:0 HandleReply NKikimrReplication.TEvAlterReplicationResult OperationId { TxId: 103 PartId: 0 } Origin: 72075186233409546 Status: SUCCESS 2025-03-31T16:02:11.068812Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 3 -> 128 2025-03-31T16:02:11.068939Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-31T16:02:11.068997Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Ack tablet strongly msg opId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:2 2025-03-31T16:02:11.071195Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:02:11.071257Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:02:11.071317Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 103:0 2025-03-31T16:02:11.071496Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [6:123:2149], Recipient [6:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-31T16:02:11.071532Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-31T16:02:11.071623Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:02:11.071665Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TAlterReplication TPropose opId# 103:0 ProgressState 2025-03-31T16:02:11.071717Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-31T16:02:11.071768Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2025-03-31T16:02:11.071927Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:02:11.078848Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:02:11.078935Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-03-31T16:02:11.079061Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2025-03-31T16:02:11.079419Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269287424, Sender [6:133:2156], Recipient [6:258:2249] 2025-03-31T16:02:11.079474Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:02:11.079592Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:11.079755Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 25769805932 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:02:11.079823Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TAlterReplication TPropose opId# 103:0 HandleReply TEvOperationPlan: step# 5000004 2025-03-31T16:02:11.079988Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 240 2025-03-31T16:02:11.080209Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-31T16:02:11.080286Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:02:11.080356Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Ack tablet strongly msg opId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 2025-03-31T16:02:11.082549Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:02:11.082616Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Ack coordinator stepId#5000004 first txId#103 countTxs#1 2025-03-31T16:02:11.082681Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Ack mediator stepId#5000004 2025-03-31T16:02:11.082727Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 103:0 2025-03-31T16:02:11.082974Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [6:123:2149], Recipient [6:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-31T16:02:11.083012Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-31T16:02:11.083101Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:02:11.083149Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:02:11.083411Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:02:11.083464Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [6:207:2209], at schemeshard: 72057594046678944, txId: 103, path id: 2 FAKE_COORDINATOR: Erasing txId 103 2025-03-31T16:02:11.084056Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:02:11.084113Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-03-31T16:02:11.084233Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-31T16:02:11.084279Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-31T16:02:11.084322Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T16:02:11.084369Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-31T16:02:11.084411Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T16:02:11.084462Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-03-31T16:02:11.084512Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T16:02:11.084561Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-03-31T16:02:11.084599Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-03-31T16:02:11.084743Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T16:02:11.084827Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2025-03-31T16:02:11.084871Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 2], 4 2025-03-31T16:02:11.085571Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [6:207:2209], Recipient [6:123:2149]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 4 } 2025-03-31T16:02:11.085624Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-03-31T16:02:11.085730Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:02:11.085840Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:02:11.085890Z node 6 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-03-31T16:02:11.085937Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-03-31T16:02:11.085983Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:02:11.086078Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-03-31T16:02:11.086120Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-31T16:02:11.089627Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:02:11.090259Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-31T16:02:11.090315Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 >> TMaintenanceApiTest::CreateTime [GOOD] >> TMaintenanceApiTest::LastRefreshTime >> KqpScan::ScanRetryRead [GOOD] >> KqpScan::ScanRetryReadRanges >> TCmsTest::RequestRestartServicesWrongHost [GOOD] >> TCmsTest::RestartNodeInDownState >> KqpScan::ScanDuringSplit10 [GOOD] >> KqpScan::ScanDuringSplitThenMerge >> TCmsTest::RequestReplaceDevicePDiskByPath [GOOD] >> TCmsTest::RequestReplaceManyDevicesOnOneNode >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-69 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-70 |72.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_transfer/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI [GOOD] Test command err: 2025-03-31T16:01:07.755137Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004328431411131:2210];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:07.755366Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:01:08.393250Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000993/r3tmp/tmpFH3TcS/pdisk_1.dat 2025-03-31T16:01:08.414852Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:01:08.702223Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:01:08.790311Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:01:09.069344Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:01:09.069428Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:01:09.074292Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:01:09.074394Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:01:09.075951Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:01:09.083049Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:01:09.084430Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:01:09.103639Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11269, node 1 2025-03-31T16:01:09.490252Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/000993/r3tmp/yandexZADiKa.tmp 2025-03-31T16:01:09.490284Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/000993/r3tmp/yandexZADiKa.tmp 2025-03-31T16:01:09.490457Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/000993/r3tmp/yandexZADiKa.tmp 2025-03-31T16:01:09.490639Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:01:09.712844Z INFO: TTestServer started on Port 16247 GrpcPort 11269 TClient is connected to server localhost:16247 PQClient connected to localhost:11269 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:01:10.214897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:01:10.300325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-31T16:01:12.736113Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004328431411131:2210];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:12.736232Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:01:14.039885Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004358496183203:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:14.055562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:14.063571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004358496183215:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:14.073624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-03-31T16:01:14.097212Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004358496183249:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:14.097715Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:14.115977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004358496183217:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-03-31T16:01:14.373646Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 2146435072 Duration# 0.229661s 2025-03-31T16:01:14.373649Z node 1 :TX_PROXY ERROR: Actor# [1:7488004358496183297:2786] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:01:14.373687Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.229725s 2025-03-31T16:01:14.400704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:01:14.507190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:01:14.608928Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488004358496183319:2353], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:01:14.610742Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzU3YTM4ZGUtYWM3NmJmYmYtNzEzNTJjMGMtZTNjMmFjNWE=, ActorId: [1:7488004358496183200:2341], ActorState: ExecuteState, TraceId: 01jqpez09b95pwrgntevzdd1yz, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:01:14.613010Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:01:14.746412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-31T16:01:15.159751Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jqpez15pekfh950q3ykkj253, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWNjNTk5ODUtZWVhYjNkOGMtZGI0NWI3YWYtM2MyYmE3Mzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7488004362791151080:3133] === CheckClustersList. Ok 2025-03-31T16:01:20.451829Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488004332726378555:2132], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:01:20.452107Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7488004332726378555:2132], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 } 2025-03-31T16:01:20.452239Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7488004332726378555:2132], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7488004332726378758:2266] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743436870294 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-31T16:01:20.452363Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488004332726378555:2132], cacheItem# { Subscriber: { Subscriber: [1:7488004332726378758:2266] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: ... 04539168288848:2430] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:02:10.474903Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7488004599297833231:4032], recipient# [7:7488004599297833230:2547], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:02:10.558901Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [8:7488004536546357328:2105], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:02:10.559077Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [8:7488004536546357328:2105], cacheItem# { Subscriber: { Subscriber: [8:7488004570906095873:2195] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:02:10.559193Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [8:7488004600970867584:2671], recipient# [8:7488004600970867583:2381], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:02:10.703749Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [8:7488004536546357328:2105], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:02:10.703925Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [8:7488004536546357328:2105], cacheItem# { Subscriber: { Subscriber: [8:7488004540841324651:2110] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:02:10.704032Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [8:7488004600970867586:2672], recipient# [8:7488004600970867585:2382], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:02:10.715748Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvResolveKeySet: self# [7:7488004534873321073:2127], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 12] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 },{ TableId: [OwnerId: 72057594046644480, LocalPathId: 10] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-03-31T16:02:10.715882Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TResolve: self# [7:7488004534873321073:2127], cacheItem# { Subscriber: { Subscriber: [7:7488004564938093301:2878] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 16 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1743436923249 PathId: [OwnerId: 72057594046644480, LocalPathId: 12] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 12] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:02:10.715956Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TResolve: self# [7:7488004534873321073:2127], cacheItem# { Subscriber: { Subscriber: [7:7488004564938093199:2816] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 16 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1743436922920 PathId: [OwnerId: 72057594046644480, LocalPathId: 10] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 10] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:02:10.716322Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7488004599297833238:4037], recipient# [7:7488004599297833237:2509], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 12] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 },{ TableId: [OwnerId: 72057594046644480, LocalPathId: 10] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-03-31T16:02:10.726173Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7488004534873321073:2127], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:02:10.726396Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7488004534873321073:2127], cacheItem# { Subscriber: { Subscriber: [7:7488004539168288840:2428] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 27 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743436917089 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:02:10.726685Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7488004599297833241:4038], recipient# [7:7488004599297833240:2548], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T16:02:10.727660Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7488004534873321073:2127], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:02:10.727795Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7488004534873321073:2127], cacheItem# { Subscriber: { Subscriber: [7:7488004564938093102:2759] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:02:10.727882Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7488004599297833243:4039], recipient# [7:7488004599297833242:2549], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:02:11.075806Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:02:11.075854Z node 7 :IMPORT WARN: Table profiles were not loaded >> TCmsTenatsTest::TestClusterLimitForceRestartModeScheduled [GOOD] >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled >> TestKinesisHttpProxy::TestListStreamConsumers [GOOD] >> TestKinesisHttpProxy::TestListStreamConsumersWithMaxResults >> TCmsTest::RacyStartCollecting [GOOD] >> TCmsTest::PriorityRange >> TestYmqHttpProxy::TestCreateQueueWithWrongAttribute [GOOD] >> TestYmqHttpProxy::TestSetQueueAttributes [GOOD] >> YdbSdkSessionsPool::RunSmallPlan [GOOD] >> TMaintenanceApiTest::LastRefreshTime [GOOD] >> TCmsTest::RestartNodeInDownState [GOOD] >> TCmsTest::SamePriorityRequest >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true [GOOD] |72.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TMaintenanceApiTest::LastRefreshTime [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithTags ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::ShowCreateTableTemporary [FAIL] Test command err: 2025-03-31T16:00:04.274221Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004058596393896:2211];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:04.274347Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001a0f/r3tmp/tmpFts7zY/pdisk_1.dat 2025-03-31T16:00:05.196090Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:05.279470Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:05.279612Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:05.282947Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:00:05.335736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6945, node 1 2025-03-31T16:00:05.707613Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:00:05.707641Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:00:05.707648Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:00:05.707758Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27801 TClient is connected to server localhost:27801 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:00:06.759385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:09.235378Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004058596393896:2211];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:09.235492Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:00:09.772478Z node 1 :KQP_COMPILE_SERVICE INFO: Subscribed for config changes 2025-03-31T16:00:09.772514Z node 1 :KQP_COMPILE_SERVICE INFO: Updated config 2025-03-31T16:00:09.801080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004080071231402:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:09.801247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:09.801719Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004080071231414:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:09.806523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:00:09.869977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004080071231416:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:00:09.962366Z node 1 :TX_PROXY ERROR: Actor# [1:7488004080071231504:2758] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:00:09.964051Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: CREATE TABLE test_show_create (\n Key Uint32,\n Value Bool DEFAULT true,\n PRIMARY KEY (Key)\n );\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-03-31T16:00:09.964233Z node 1 :KQP_COMPILE_SERVICE DEBUG: Perform request, TraceId.SpanIdPtr: 0x000050F00030D268 2025-03-31T16:00:09.964279Z node 1 :KQP_COMPILE_SERVICE DEBUG: Received compile request, sender: [1:7488004080071231374:2340], queryUid: , queryText: "CREATE TABLE test_show_create (\n Key Uint32,\n Value Bool DEFAULT true,\n PRIMARY KEY (Key)\n );\n ", keepInCache: 1, split: 0{ TraceId: 01jqpex1j4aa8hd0qzh94t4kdh, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTg2NGZlYWMtMWRiZmI4OTctMTAxNjZkM2QtNTI5OTBlNzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default} 2025-03-31T16:00:09.964436Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: CREATE TABLE test_show_create (\n Key Uint32,\n Value Bool DEFAULT true,\n PRIMARY KEY (Key)\n );\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-03-31T16:00:09.964500Z node 1 :KQP_COMPILE_SERVICE DEBUG: Added request to queue, sender: [1:7488004080071231374:2340], queueSize: 1 2025-03-31T16:00:09.965249Z node 1 :KQP_COMPILE_SERVICE DEBUG: Created compile actor, sender: [1:7488004080071231374:2340], compileActor: [1:7488004080071231523:2352] 2025-03-31T16:00:10.501665Z node 1 :KQP_YQL INFO: TraceId: 01jqpex1j4aa8hd0qzh94t4kdh, SessionId: CompileActor 2025-03-31 16:00:10.501 INFO ydb-core-sys_view-ut(pid=79827, tid=0x00007F6A53286640) [KQP] kqp_host.cpp:1382: Compiled query: ( (let $1 '('"Key" (AsOptionalType (DataType 'Uint32)) '('columnConstrains '()) '())) (let $2 '('"Value" (AsOptionalType (DataType 'Bool)) '('columnConstrains '('('default (Bool '"true")))) '())) (let $3 '('('mode 'create) '('columns '($1 $2)) '('primarykey '('"Key")))) (return (Write! world (DataSink '"kikimr" '"db") (Key '('tablescheme (String '"/Root/test_show_create"))) (Void) $3)) ) 2025-03-31T16:00:10.502795Z node 1 :KQP_YQL TRACE: TraceId: 01jqpex1j4aa8hd0qzh94t4kdh, SessionId: CompileActor 2025-03-31 16:00:10.502 TRACE ydb-core-sys_view-ut(pid=79827, tid=0x00007F6A53286640) [KQP] kqp_transform.cpp:33: YqlTransformer: ( (let $1 '('"Key" (AsOptionalType (DataType 'Uint32)) '('columnConstrains '()) '())) (let $2 '('"Value" (AsOptionalType (DataType 'Bool)) '('columnConstrains '('('default (Bool '"true")))) '())) (let $3 '('('mode 'create) '('columns '($1 $2)) '('primarykey '('"Key")))) (let $4 (Write! world (DataSink '"kikimr" '"db") (Key '('tablescheme (String '"/Root/test_show_create"))) (Void) $3)) (return (Commit! $4 (DataSink '"kikimr" '"db") '('('"mode" '"flush")))) ) 2025-03-31T16:00:10.503154Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpex1j4aa8hd0qzh94t4kdh, SessionId: CompileActor 2025-03-31 16:00:10.503 DEBUG ydb-core-sys_view-ut(pid=79827, tid=0x00007F6A53286640) [perf] yql_expr_optimize.cpp:540: Execution of [ExpandApply] took 240us 2025-03-31T16:00:10.503701Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpex1j4aa8hd0qzh94t4kdh, SessionId: CompileActor 2025-03-31 16:00:10.503 DEBUG ydb-core-sys_view-ut(pid=79827, tid=0x00007F6A53286640) [core eval] yql_eval_expr.cpp:384: EvaluateExpression - start 2025-03-31T16:00:10.505194Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpex1j4aa8hd0qzh94t4kdh, SessionId: CompileActor 2025-03-31 16:00:10.505 DEBUG ydb-core-sys_view-ut(pid=79827, tid=0x00007F6A53286640) [core eval] yql_eval_expr.cpp:1156: EvaluateExpression - finish 2025-03-31T16:00:10.505923Z node 1 :KQP_YQL TRACE: TraceId: 01jqpex1j4aa8hd0qzh94t4kdh, SessionId: CompileActor 2025-03-31 16:00:10.505 TRACE ydb-core-sys_view-ut(pid=79827, tid=0x00007F6A53286640) [KQP] kqp_transform.cpp:33: YqlTransformer: ( (let $1 '('"Key" (AsOptionalType (DataType 'Uint32)) '('columnConstrains '()) '())) (let $2 '('"Value" (AsOptionalType (DataType 'Bool)) '('columnConstrains '('('default (Bool '"true")))) '())) (let $3 '('('mode 'create) '('columns '($1 $2)) '('primarykey '('"Key")))) (let $4 (Write! world (DataSink '"kikimr" '"db") (Key '('tablescheme (String '"/Root/test_show_create"))) (Void) $3)) (return (Commit! $4 (DataSink '"kikimr" '"db") '('('"mode" '"flush")))) ) 2025-03-31T16:00:10.517661Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpex1j4aa8hd0qzh94t4kdh, SessionId: CompileActor 2025-03-31 16:00:10.517 DEBUG ydb-core-sys_view-ut(pid=79827, tid=0x00007F6A53286640) [perf] type_ann_expr.cpp:47: Execution of [TypeAnnotationTransformer::DoTransform] took 4.16ms 2025-03-31T16:00:10.518184Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpex1j4aa8hd0qzh94t4kdh, SessionId: CompileActor 2025-03-31 16:00:10.518 DEBUG ydb-core-sys_view-ut(pid=79827, tid=0x00007F6A53286640) [perf] yql_expr_constraint.cpp:3226: Execution of [ConstraintTransformer::DoTransform] took 293us 2025-03-31T16:00:10.518294Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpex1j4aa8hd0qzh94t4kdh, SessionId: CompileActor 2025-03-31 16:00:10.518 DEBUG ydb-core-sys_view-ut(pid=79827, tid=0x00007F6A53286640) [perf] yql_expr_csee.cpp:620: Execution of [UpdateCompletness] took 54us 2025-03-31T16:00:10.518756Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpex1j4aa8hd0qzh94t4kdh, SessionId: CompileActor 2025-03-31 16:00:10.518 DEBUG ydb-core-sys_view-ut(pid=79827, tid=0x00007F6A53286640) [perf] yql_expr_csee.cpp:633: Execution of [EliminateCommonSubExpressions] took 407us 2025-03-31T16:00:10.5241 ... ) [core exec] yql_execution.cpp:72: Collect unused nodes for root #43, status: Ok 2025-03-31T16:01:10.455374Z node 21 :KQP_YQL TRACE: TraceId: 01jqpeywr7d26655qme41keze5, SessionId: CompileActor 2025-03-31 16:01:10.455 TRACE ydb-core-sys_view-ut(pid=79827, tid=0x00007F6A46B4E640) [core exec] yql_execution.cpp:387: {0}, callable #43 2025-03-31T16:01:10.455460Z node 21 :KQP_YQL TRACE: TraceId: 01jqpeywr7d26655qme41keze5, SessionId: CompileActor 2025-03-31 16:01:10.455 TRACE ydb-core-sys_view-ut(pid=79827, tid=0x00007F6A46B4E640) [core exec] yql_execution.cpp:387: {1}, callable #42 2025-03-31T16:01:10.455591Z node 21 :KQP_YQL TRACE: TraceId: 01jqpeywr7d26655qme41keze5, SessionId: CompileActor 2025-03-31 16:01:10.455 TRACE ydb-core-sys_view-ut(pid=79827, tid=0x00007F6A46B4E640) [core exec] yql_execution.cpp:387: {1}, callable #42 2025-03-31T16:01:10.455990Z node 21 :KQP_YQL INFO: TraceId: 01jqpeywr7d26655qme41keze5, SessionId: CompileActor 2025-03-31 16:01:10.455 INFO ydb-core-sys_view-ut(pid=79827, tid=0x00007F6A46B4E640) [core exec] yql_execution.cpp:466: Register async execution for node #42 2025-03-31T16:01:10.456115Z node 21 :KQP_YQL INFO: TraceId: 01jqpeywr7d26655qme41keze5, SessionId: CompileActor 2025-03-31 16:01:10.456 INFO ydb-core-sys_view-ut(pid=79827, tid=0x00007F6A46B4E640) [core exec] yql_execution.cpp:87: Finish, output #43, status: Async 2025-03-31T16:01:10.456335Z node 21 :KQP_YQL INFO: TraceId: 01jqpeywr7d26655qme41keze5, SessionId: CompileActor 2025-03-31 16:01:10.456 INFO ydb-core-sys_view-ut(pid=79827, tid=0x00007F6A46B4E640) [core exec] yql_execution.cpp:133: Completed async execution for node #42 2025-03-31T16:01:10.456447Z node 21 :KQP_YQL INFO: TraceId: 01jqpeywr7d26655qme41keze5, SessionId: CompileActor 2025-03-31 16:01:10.456 INFO ydb-core-sys_view-ut(pid=79827, tid=0x00007F6A46B4E640) [core exec] yql_execution.cpp:153: State is ExecutionComplete after apply async changes for node #42 2025-03-31T16:01:10.456526Z node 21 :KQP_YQL INFO: TraceId: 01jqpeywr7d26655qme41keze5, SessionId: CompileActor 2025-03-31 16:01:10.456 INFO ydb-core-sys_view-ut(pid=79827, tid=0x00007F6A46B4E640) [core exec] yql_execution.cpp:59: Begin, root #43 2025-03-31T16:01:10.456588Z node 21 :KQP_YQL INFO: TraceId: 01jqpeywr7d26655qme41keze5, SessionId: CompileActor 2025-03-31 16:01:10.456 INFO ydb-core-sys_view-ut(pid=79827, tid=0x00007F6A46B4E640) [core exec] yql_execution.cpp:72: Collect unused nodes for root #43, status: Ok 2025-03-31T16:01:10.456654Z node 21 :KQP_YQL TRACE: TraceId: 01jqpeywr7d26655qme41keze5, SessionId: CompileActor 2025-03-31 16:01:10.456 TRACE ydb-core-sys_view-ut(pid=79827, tid=0x00007F6A46B4E640) [core exec] yql_execution.cpp:387: {0}, callable #43 2025-03-31T16:01:10.456744Z node 21 :KQP_YQL INFO: TraceId: 01jqpeywr7d26655qme41keze5, SessionId: CompileActor 2025-03-31 16:01:10.456 INFO ydb-core-sys_view-ut(pid=79827, tid=0x00007F6A46B4E640) [core exec] yql_execution.cpp:577: Node #43 finished execution 2025-03-31T16:01:10.456830Z node 21 :KQP_YQL INFO: TraceId: 01jqpeywr7d26655qme41keze5, SessionId: CompileActor 2025-03-31 16:01:10.456 INFO ydb-core-sys_view-ut(pid=79827, tid=0x00007F6A46B4E640) [core exec] yql_execution.cpp:594: Node #43 created 0 trackable nodes: 2025-03-31T16:01:10.456893Z node 21 :KQP_YQL INFO: TraceId: 01jqpeywr7d26655qme41keze5, SessionId: CompileActor 2025-03-31 16:01:10.456 INFO ydb-core-sys_view-ut(pid=79827, tid=0x00007F6A46B4E640) [core exec] yql_execution.cpp:87: Finish, output #43, status: Ok 2025-03-31T16:01:10.456963Z node 21 :KQP_YQL INFO: TraceId: 01jqpeywr7d26655qme41keze5, SessionId: CompileActor 2025-03-31 16:01:10.456 INFO ydb-core-sys_view-ut(pid=79827, tid=0x00007F6A46B4E640) [core exec] yql_execution.cpp:93: Creating finalizing transformer, output #43 2025-03-31T16:01:10.460398Z node 21 :KQP_COMPILE_SERVICE DEBUG: Received response, sender: [21:7488004326826082091:2340], status: SUCCESS, compileActor: [21:7488004339710985076:2428] 2025-03-31T16:01:10.460523Z node 21 :KQP_COMPILE_SERVICE DEBUG: Send response, sender: [21:7488004326826082091:2340], queryUid: cbe4c91b-81584d90-1feca83c-a4f885c5, status:SUCCESS 2025-03-31T16:01:10.631837Z node 21 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 21, TabletId: 72075186224037891 not found 2025-03-31T16:01:13.881937Z node 26 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[26:7488004355297905667:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:13.881999Z node 26 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001a0f/r3tmp/tmp7kMK8U/pdisk_1.dat 2025-03-31T16:01:14.357747Z node 26 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:14.451190Z node 26 :HIVE WARN: HIVE#72057594037968897 Node(26, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:01:14.451328Z node 26 :HIVE WARN: HIVE#72057594037968897 Node(26, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:01:14.469264Z node 26 :HIVE WARN: HIVE#72057594037968897 Node(26, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22675, node 26 2025-03-31T16:01:14.765450Z node 26 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:01:14.765485Z node 26 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:01:14.765502Z node 26 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:01:14.765764Z node 26 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13234 TClient is connected to server localhost:13234 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:01:15.685113Z node 26 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:18.889872Z node 26 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[26:7488004355297905667:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:18.890004Z node 26 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:01:21.063896Z node 26 :KQP_PROXY ERROR: TraceId: "01jqpez20q38cm3w08aqm2kv7w", Request deadline has expired for 0.277770s seconds 2025-03-31T16:01:21.066371Z node 26 :KQP_COMPILE_SERVICE INFO: Subscribed for config changes 2025-03-31T16:01:21.066417Z node 26 :KQP_COMPILE_SERVICE INFO: Updated config (NYdb::Dev::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession 2025-03-31T16:01:24.672464Z node 31 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[31:7488004402348025176:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:24.672608Z node 31 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001a0f/r3tmp/tmpcpH5LN/pdisk_1.dat 2025-03-31T16:01:25.134184Z node 31 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:25.218602Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:01:25.218743Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:01:25.223922Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3148, node 31 2025-03-31T16:01:25.814245Z node 31 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:01:25.814288Z node 31 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:01:25.814309Z node 31 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:01:25.814598Z node 31 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28373 TClient is connected to server localhost:28373 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:01:26.904934Z node 31 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:29.679351Z node 31 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[31:7488004402348025176:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:29.679461Z node 31 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:01:32.243331Z node 31 :KQP_PROXY ERROR: TraceId: "01jqpezcyw411hndxjt8qhj80c", Request deadline has expired for 0.240205s seconds 2025-03-31T16:01:32.243846Z node 31 :KQP_COMPILE_SERVICE INFO: Subscribed for config changes 2025-03-31T16:01:32.243890Z node 31 :KQP_COMPILE_SERVICE INFO: Updated config (NYdb::Dev::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession >> TestYmqHttpProxy::TestTagQueue >> TestKinesisHttpProxy::ListShards [GOOD] >> TestKinesisHttpProxy::TestRequestNoAuthorization [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::RunSmallPlan [GOOD] Test command err: 2025-03-31T16:02:10.571150Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004600672445289:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:10.571253Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000639/r3tmp/tmpPxm391/pdisk_1.dat 2025-03-31T16:02:11.299899Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65183, node 1 2025-03-31T16:02:11.434141Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:11.434245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:11.467327Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:02:11.516525Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:02:11.516547Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:02:11.516554Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:02:11.516667Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28826 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:02:11.913546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... >> DataCleanup::CleanupDataNoTables [GOOD] >> DataCleanup::CleanupDataNoTablesWithRestart >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled [GOOD] >> DataCleanup::CleanupDataNoTablesWithRestart [GOOD] >> DataCleanup::CleanupDataLog [GOOD] >> DataCleanup::CleanupData >> TPQTestSlow::TestOnDiskStoredSourceIds [GOOD] >> DataCleanup::CleanupData [GOOD] >> DataCleanup::CleanupDataMultipleFamilies >> DataCleanup::CleanupDataMultipleFamilies [GOOD] >> DataCleanup::CleanupDataMultipleTables [GOOD] >> DataCleanup::CleanupDataWithFollowers ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:01:58.744305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:01:58.744435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:58.744479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:01:58.744526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:01:58.744579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:01:58.744620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:01:58.744682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:58.744765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:01:58.745188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:01:58.891332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:01:58.891421Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:58.904399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:01:58.904543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:01:58.904726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:01:58.911649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:01:58.911936Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:01:58.912728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:58.913047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:58.915776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:58.917049Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:58.917130Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:58.917317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:01:58.917368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:58.917406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:58.917573Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:01:58.925112Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:01:59.050717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:59.050985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:59.051300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:01:59.051557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:01:59.051616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:59.055310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:59.055485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:01:59.055726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:59.055799Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:01:59.055848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:01:59.055901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:01:59.059745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:59.059818Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:01:59.059867Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:01:59.062530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:59.062637Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:59.062705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:59.062785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:01:59.066365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:01:59.072080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:01:59.072349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:01:59.073483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:59.073669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:59.073721Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:59.074055Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:01:59.074123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:59.074410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:59.074512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:01:59.080449Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:59.080505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:59.080705Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:59.080750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:01:59.081013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:59.081065Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:01:59.081230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:59.081280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:59.081322Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:59.081352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:59.081394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:01:59.081452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:59.081487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:01:59.081516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:01:59.081608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:01:59.081645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:01:59.081676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:01:59.088039Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:59.088219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:01:59.088264Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new path created for pathId [OwnerId: 72075186233409546, LocalPathId: 9] was 2 2025-03-31T16:02:16.113704Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 116:0 1 -> 2 2025-03-31T16:02:16.114368Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 116:1, propose status:StatusAccepted, reason: , at schemeshard: 72075186233409546 2025-03-31T16:02:16.114447Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 116:0, at schemeshard: 72075186233409546 2025-03-31T16:02:16.114630Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 12 2025-03-31T16:02:16.114713Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 9] was 3 2025-03-31T16:02:16.118613Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 116, response: Status: StatusAccepted TxId: 116 SchemeshardId: 72075186233409546 PathId: 9, at schemeshard: 72075186233409546 2025-03-31T16:02:16.118858Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 116, database: /MyRoot/USER_0, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /MyRoot/USER_0/Table11 2025-03-31T16:02:16.119250Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-03-31T16:02:16.119331Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 116, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-03-31T16:02:16.119650Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 116, path id: [OwnerId: 72075186233409546, LocalPathId: 9] 2025-03-31T16:02:16.119784Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-03-31T16:02:16.119842Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:724:2626], at schemeshard: 72075186233409546, txId: 116, path id: 1 2025-03-31T16:02:16.119911Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:724:2626], at schemeshard: 72075186233409546, txId: 116, path id: 9 2025-03-31T16:02:16.120649Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 116:0, at schemeshard: 72075186233409546 2025-03-31T16:02:16.120746Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 116:0 ProgressState, operation type: TxCreateTable, at tablet# 72075186233409546 2025-03-31T16:02:16.121092Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 116:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72075186233409546 OwnerIdx: 11 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 9 BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } 2025-03-31T16:02:16.122097Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 1 Version: 16 PathOwnerId: 72075186233409546, cookie: 116 2025-03-31T16:02:16.122391Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 1 Version: 16 PathOwnerId: 72075186233409546, cookie: 116 2025-03-31T16:02:16.122456Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 116 2025-03-31T16:02:16.122516Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 116, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 16 2025-03-31T16:02:16.122577Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 13 2025-03-31T16:02:16.124487Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 9 Version: 1 PathOwnerId: 72075186233409546, cookie: 116 2025-03-31T16:02:16.124578Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 9 Version: 1 PathOwnerId: 72075186233409546, cookie: 116 2025-03-31T16:02:16.124611Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 116 2025-03-31T16:02:16.124641Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 116, pathId: [OwnerId: 72075186233409546, LocalPathId: 9], version: 1 2025-03-31T16:02:16.124675Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 9] was 4 2025-03-31T16:02:16.124762Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 116, ready parts: 0/1, is published: true 2025-03-31T16:02:16.130861Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 116:0 from tablet: 72075186233409546 to tablet: 72057594037968897 cookie: 72075186233409546:11 msg type: 268697601 2025-03-31T16:02:16.131064Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 116, partId: 0, tablet: 72057594037968897 2025-03-31T16:02:16.131148Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 116, shardIdx: 72075186233409546:11, partId: 0 2025-03-31T16:02:16.131747Z node 7 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72075186233409546 OwnerIdx: 11 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 9 BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } 2025-03-31T16:02:16.132032Z node 7 :HIVE INFO: [72057594037968897] TEvCreateTablet, Owner 72075186233409546, OwnerIdx 11, type DataShard, boot OK, tablet id 72075186233409556 2025-03-31T16:02:16.132217Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72075186233409546 message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2025-03-31T16:02:16.132276Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 116, shardIdx: 72075186233409546:11, partId: 0 2025-03-31T16:02:16.132473Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 116:0, at schemeshard: 72075186233409546, message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2025-03-31T16:02:16.132542Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 116:0 HandleReply TEvCreateTabletReply, at tabletId: 72075186233409546 2025-03-31T16:02:16.132644Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 116:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2025-03-31T16:02:16.132775Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 116:0 2 -> 3 2025-03-31T16:02:16.134252Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 116 2025-03-31T16:02:16.136480Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 116 2025-03-31T16:02:16.140187Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 116:0, at schemeshard: 72075186233409546 2025-03-31T16:02:16.140466Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 116:0, at schemeshard: 72075186233409546 2025-03-31T16:02:16.140547Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId# 116:0 ProgressState at tabletId# 72075186233409546 2025-03-31T16:02:16.140661Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable TConfigureParts operationId# 116:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409556 seqNo: 3:8 2025-03-31T16:02:16.141170Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable TConfigureParts operationId# 116:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409556 message: TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 673 RawX2: 30064773658 } TxBody: "\n\236\004\n\007Table11\020\t\032\r\n\003key\030\002 \001(\000@\000\032\020\n\005Value\030\200$ \002(\000@\000(\001:\262\003\022\253\003\010\200\200\200\002\020\254\002\030\364\003 \200\200\200\010(\0000\200\200\200 8\200\200\200\010@\2008H\000RX\010\000\020\000\030\010 \010(\200\200\200@0\377\377\377\377\0178\001B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen1P\nX\200\200\001`nh\000p\000Rb\010\001\020\200\200\200\024\030\005 \020(\200\200\200\200\0020\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen2P\nX\200\200\001`nh\200\200\200\004p\200\200\200\004Rc\010\002\020\200\200\200\310\001\030\005 \020(\200\200\200\200@0\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen3P\nX\200\200\001`nh\200\200\200(p\200\200\200(X\001`\005j$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionr\017compaction_gen0z\017compaction_gen0\202\001\004scan\210\001\200\200\200\010\220\001\364\003\230\0012\270\001\2008\300\001\006R\002\020\001J\026/MyRoot/USER_0/Table11\242\001\006\001\000\000\000\000\200\252\001\000\260\001\001\270\001\000\210\002\001\222\002\013\t\n\000\220\000\000\020\000\001\020\t:\004\010\003\020\010" TxId: 116 ExecLevel: 0 Flags: 0 SchemeShardId: 72075186233409546 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } SubDomainPathId: 1 2025-03-31T16:02:16.149969Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 116:0 from tablet: 72075186233409546 to tablet: 72075186233409556 cookie: 72075186233409546:11 msg type: 269549568 2025-03-31T16:02:16.150219Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 116, partId: 0, tablet: 72075186233409556 TestModificationResult got TxId: 116, wait until txId: 116 TestModificationResults wait txId: 117 2025-03-31T16:02:16.204380Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table12" Columns { Name: "key" Type: "Uint32" } Columns { Name: "Value" Type: "Utf8" } KeyColumnNames: "key" } } TxId: 117 TabletId: 72075186233409546 , at schemeshard: 72075186233409546 2025-03-31T16:02:16.224409Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 117, response: Status: StatusQuotaExceeded Reason: "Request exceeded a limit on the number of schema operations, try again later." TxId: 117 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2025-03-31T16:02:16.224689Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 117, database: /MyRoot/USER_0, subject: , status: StatusQuotaExceeded, reason: Request exceeded a limit on the number of schema operations, try again later., operation: CREATE TABLE, path: /MyRoot/USER_0/Table12 TestModificationResult got TxId: 117, wait until txId: 117 >> DataCleanup::CleanupDataWithFollowers [GOOD] >> DataCleanup::CleanupDataMultipleTimes [GOOD] >> DataCleanup::CleanupDataEmptyTable [GOOD] >> DataCleanup::CleanupDataWithRestarts >> TCmsTest::PriorityRange [GOOD] >> DataCleanup::CleanupDataWithRestarts [GOOD] >> DataCleanup::CleanupDataRetryWithNotGreaterGenerations >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundSnapshot [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundSnapshotToRegular [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen1 >> DataCleanup::CleanupDataRetryWithNotGreaterGenerations [GOOD] >> DBase::Select [GOOD] >> DBase::Subsets [GOOD] >> DBase::WideKey >> TestKinesisHttpProxy::ListShardsEmptyFields >> TestKinesisHttpProxy::TestUnauthorizedPutRecords >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen1 [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionToRegular >> DataShardVolatile::DistributedWriteThenReadIteratorStream [GOOD] >> DataShardVolatile::DistributedWriteThenScanQuery >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionToRegular [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen2 |72.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled [GOOD] >> YdbSdkSessionsPool::WaitQueue1 [GOOD] >> TRowVersionRangesTest::SimpleInserts [GOOD] >> TRowVersionRangesTest::MergeHoleOuter [GOOD] >> TRowVersionRangesTest::SteppedCookieAllocatorOrder [GOOD] >> TRowVersionRangesTest::SteppedCookieAllocatorLowerBound [GOOD] >> TS3FIFOCache::Touch [GOOD] >> TS3FIFOCache::Touch_MainQueue [GOOD] >> TS3FIFOCache::EvictNext [GOOD] >> TS3FIFOCache::UpdateLimit [GOOD] >> TS3FIFOCache::Erase [GOOD] >> TS3FIFOCache::Random ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::TestOnDiskStoredSourceIds [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] 2025-03-31T16:00:41.446762Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:00:41.446874Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927938 is [1:153:2174] sender: [1:154:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:179:2057] recipient: [1:14:2061] 2025-03-31T16:00:41.484588Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:00:41.502458Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-03-31T16:00:41.503663Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2025-03-31T16:00:41.506421Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:185:2198] 2025-03-31T16:00:41.508680Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:186:2199] 2025-03-31T16:00:41.510499Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:186:2199] 2025-03-31T16:00:41.517851Z node 1 :PERSQUEUE INFO: new Cookie default|b3b20692-990b0540-559a5d2a-72a42376_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:00:41.523458Z node 1 :PERSQUEUE INFO: new Cookie default|28c3e567-6ee216c2-193cb362-44a7d67a_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:00:41.551669Z node 1 :PERSQUEUE INFO: new Cookie default|6287743c-49c9bd19-e364efbe-e710adc0_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:00:41.561164Z node 1 :PERSQUEUE INFO: new Cookie default|c2105a7-9e0f21ce-b4b8fc6e-c3621b0c_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:00:41.569201Z node 1 :PERSQUEUE INFO: new Cookie default|2934db16-40aff72f-6fc0742f-6f9cb81a_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:00:41.577292Z node 1 :PERSQUEUE INFO: new Cookie default|8e955041-cfac91ef-cee2e1ac-cb579e71_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:108:2057] recipient: [2:101:2135] 2025-03-31T16:00:42.458067Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:00:42.458150Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927938 is [2:153:2174] sender: [2:154:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:179:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:107:2139]) on event NKikimr::TEvPersQueue::TEvUpdateConfigBuilder ! Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:181:2057] recipient: [2:99:2134] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:184:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:185:2057] recipient: [2:183:2195] Leader for TabletID 72057594037927937 is [2:186:2196] sender: [2:187:2057] recipient: [2:183:2195] 2025-03-31T16:00:42.584141Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:00:42.584221Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [2:107:2139]) rebooted! !Reboot 72057594037927937 (actor [2:107:2139]) tablet resolver refreshed! new actor is[2:186:2196] Leader for TabletID 72057594037927937 is [2:186:2196] sender: [2:260:2057] recipient: [2:14:2061] 2025-03-31T16:00:44.472918Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:00:44.474179Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-03-31T16:00:44.480433Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:266:2258] 2025-03-31T16:00:44.488892Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [2:266:2258] 2025-03-31T16:00:44.495539Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:267:2259] 2025-03-31T16:00:44.497857Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [2:267:2259] 2025-03-31T16:00:44.519586Z node 2 :PERSQUEUE INFO: new Cookie default|81ac0029-c5780865-84b73229-b8d26253_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:00:44.531796Z node 2 :PERSQUEUE INFO: new Cookie default|19dc126d-50626fcd-d627fe63-7111ccfe_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:00:44.576917Z node 2 :PERSQUEUE INFO: new Cookie default|e78e5378-e5b0c13d-8fd19228-c726ec18_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:00:44.590342Z node 2 :PERSQUEUE INFO: new Cookie default|f55df682-3f707198-69a25438-7c54a01b_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:00:44.606844Z node 2 :PERSQUEUE INFO: new Cookie default|ab112de8-a55020c9-a0dbc271-38e1e079_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:00:44.618552Z node 2 :PERSQUEUE INFO: new Cookie default|3e0d900c-901bbd02-9aefd5bb-d7dc7d48_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:108:2057] recipient: [3:101:2135] 2025-03-31T16:00:45.211821Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:00:45.211902Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927938 is [3:153:2174] sender: [3:154:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:179:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:107:2139]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:181:2057] recipient: [3:99:2134] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:184:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:185:2057] recipient: [3:183:2195] Leader for TabletID 72057594037927937 is [3:186:2196] sender: [3:187:2057] recipient: [3:183:2195] 2025-03-31T16:00:45.289341Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:00:45.289425Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [3:107:2139]) rebooted! !Reboot 72057594037927937 (actor [3:107:2139]) tablet resolver refreshed! new actor is[3:186:2196] Leader for TabletID 72057594037927937 is [3:186:2196] sender: [3:260:2057] recipient: [3:14:2061] 2025-03-31T16:00:47.108559Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:00:47.109494Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 3 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } 2025-03-31T16:00:47.110596Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:266:2258] 2025-03-31T16:00:47.113392Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [3:266:2258] 2025-03-31T16:00:47.123534Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:267:2259] 2025-03-31T16:00:47.125733Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [3:267:2259] 2025-03-31T16:00:47.155458Z node 3 :PERSQUEUE INFO: new Cookie default|9662e911-650fb33d-4980281 ... 20960Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 47 actor [47:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 47 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 47 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 47 Important: false } 2025-03-31T16:02:14.021956Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [47:183:2196] 2025-03-31T16:02:14.025637Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [47:183:2196] 2025-03-31T16:02:14.030742Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [47:184:2197] 2025-03-31T16:02:14.032603Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [47:184:2197] 2025-03-31T16:02:14.052842Z node 47 :PERSQUEUE INFO: new Cookie default|afdd3f64-4cccafcd-5459a6cc-1b778fa8_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:02:14.060657Z node 47 :PERSQUEUE INFO: new Cookie default|2f0e48d2-6edf28c1-28801c1-5aa66c34_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:02:14.096866Z node 47 :PERSQUEUE INFO: new Cookie default|54709b3c-7cc49d91-4b29a13d-4e7af183_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:02:14.110824Z node 47 :PERSQUEUE INFO: new Cookie default|f43ac05c-da04f21e-bcd95350-4dde2c87_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:02:14.133437Z node 47 :PERSQUEUE INFO: new Cookie default|dfb49258-bcf66c3e-60161760-768c3869_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:02:14.150954Z node 47 :PERSQUEUE INFO: new Cookie default|bd8e7130-6b001ac2-5cf346be-3c036c95_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default !Reboot 72057594037927937 (actor [47:107:2139]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [47:107:2139] sender: [47:282:2057] recipient: [47:99:2134] Leader for TabletID 72057594037927937 is [47:107:2139] sender: [47:285:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [47:107:2139] sender: [47:286:2057] recipient: [47:284:2279] Leader for TabletID 72057594037927937 is [47:287:2280] sender: [47:288:2057] recipient: [47:284:2279] 2025-03-31T16:02:14.241068Z node 47 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:02:14.241149Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:02:14.242374Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [47:336:2321] 2025-03-31T16:02:14.249835Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [47:337:2322] 2025-03-31T16:02:14.268259Z node 47 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:02:14.268365Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [47:337:2322] 2025-03-31T16:02:14.271712Z node 47 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:02:14.271801Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [47:336:2321] !Reboot 72057594037927937 (actor [47:107:2139]) rebooted! !Reboot 72057594037927937 (actor [47:107:2139]) tablet resolver refreshed! new actor is[47:287:2280] Leader for TabletID 72057594037927937 is [47:287:2280] sender: [47:384:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:103:2057] recipient: [48:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:103:2057] recipient: [48:101:2135] Leader for TabletID 72057594037927937 is [48:107:2139] sender: [48:108:2057] recipient: [48:101:2135] 2025-03-31T16:02:16.271706Z node 48 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:02:16.271790Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [48:149:2057] recipient: [48:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [48:149:2057] recipient: [48:147:2170] Leader for TabletID 72057594037927938 is [48:153:2174] sender: [48:154:2057] recipient: [48:147:2170] Leader for TabletID 72057594037927937 is [48:107:2139] sender: [48:177:2057] recipient: [48:14:2061] 2025-03-31T16:02:16.295251Z node 48 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:02:16.296167Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 48 actor [48:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 48 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 48 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 48 Important: false } 2025-03-31T16:02:16.297115Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [48:183:2196] 2025-03-31T16:02:16.299865Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [48:183:2196] 2025-03-31T16:02:16.302027Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [48:184:2197] 2025-03-31T16:02:16.304206Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [48:184:2197] 2025-03-31T16:02:16.315969Z node 48 :PERSQUEUE INFO: new Cookie default|6c423c30-68e090ca-3134f641-5755d9b8_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:02:16.323695Z node 48 :PERSQUEUE INFO: new Cookie default|d9c6619a-30808bbc-4b9d34ff-630e1876_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:02:16.361247Z node 48 :PERSQUEUE INFO: new Cookie default|955ef68b-6ba016c6-431fb9c1-be49664d_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:02:16.373136Z node 48 :PERSQUEUE INFO: new Cookie default|3c115a09-90e18602-ce1b6256-d5e87d30_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:02:16.385535Z node 48 :PERSQUEUE INFO: new Cookie default|e0505db6-f44104bd-1d6b55af-eaa74f05_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:02:16.397173Z node 48 :PERSQUEUE INFO: new Cookie default|4809097c-5949ae28-544124d0-8c07ab7c_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:103:2057] recipient: [49:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:103:2057] recipient: [49:101:2135] Leader for TabletID 72057594037927937 is [49:107:2139] sender: [49:108:2057] recipient: [49:101:2135] 2025-03-31T16:02:17.172114Z node 49 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:02:17.172200Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [49:149:2057] recipient: [49:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [49:149:2057] recipient: [49:147:2170] Leader for TabletID 72057594037927938 is [49:153:2174] sender: [49:154:2057] recipient: [49:147:2170] Leader for TabletID 72057594037927937 is [49:107:2139] sender: [49:179:2057] recipient: [49:14:2061] 2025-03-31T16:02:17.195938Z node 49 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:02:17.196957Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 49 actor [49:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 49 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 49 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 49 Important: false } 2025-03-31T16:02:17.198023Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [49:185:2198] 2025-03-31T16:02:17.201283Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [49:185:2198] 2025-03-31T16:02:17.203576Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [49:186:2199] 2025-03-31T16:02:17.205936Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [49:186:2199] 2025-03-31T16:02:17.217819Z node 49 :PERSQUEUE INFO: new Cookie default|a83100f8-410d2d5a-d799db4e-82abb5b7_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:02:17.228793Z node 49 :PERSQUEUE INFO: new Cookie default|8d9e6cb-5a545e0c-47eb64d2-866aa3d7_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:02:17.264678Z node 49 :PERSQUEUE INFO: new Cookie default|9fd9b82c-6ed56978-e154b32-4339f4d3_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:02:17.276542Z node 49 :PERSQUEUE INFO: new Cookie default|1df44770-5861f457-d7fdc818-620916a6_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:02:17.287767Z node 49 :PERSQUEUE INFO: new Cookie default|a4b24f77-5c697ce4-99f01988-556e0612_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:02:17.299181Z node 49 :PERSQUEUE INFO: new Cookie default|6e3f9156-2fd8dbf6-b0bf147-e4577150_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::PriorityRange [GOOD] Test command err: 2025-03-31T16:02:14.112232Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 17:17 2025-03-31T16:02:14.112318Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 18:18 2025-03-31T16:02:14.112343Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 19:19 2025-03-31T16:02:14.112368Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 20:20 2025-03-31T16:02:14.112390Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 21:21 2025-03-31T16:02:14.112413Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 22:22 2025-03-31T16:02:14.112438Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 23:23 2025-03-31T16:02:14.112460Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 24:24 2025-03-31T16:02:14.128979Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 17:17 2025-03-31T16:02:14.129054Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 18:18 2025-03-31T16:02:14.129079Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 19:19 2025-03-31T16:02:14.129103Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 20:20 2025-03-31T16:02:14.129128Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 21:21 2025-03-31T16:02:14.129150Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 22:22 2025-03-31T16:02:14.129192Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 23:23 2025-03-31T16:02:14.129219Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 24:24 2025-03-31T16:02:14.178390Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 17:17 2025-03-31T16:02:14.178457Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 18:18 2025-03-31T16:02:14.178482Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 19:19 2025-03-31T16:02:14.178510Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 20:20 2025-03-31T16:02:14.178534Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 21:21 2025-03-31T16:02:14.178555Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 22:22 2025-03-31T16:02:14.178576Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 23:23 2025-03-31T16:02:14.178596Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 24:24 >> TS3FIFOCache::Random [GOOD] >> TS3FIFOGhostQueue::Basics [GOOD] >> TScheme::Shapshot [GOOD] >> TScheme::Delta [GOOD] >> TScheme::Policy [GOOD] >> TScreen::Cuts [GOOD] >> TScreen::Join [GOOD] >> TScreen::Sequential >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen2 [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundSnapshotPriorityByTime >> DBase::WideKey [GOOD] >> DBase::VersionBasics [GOOD] >> DBase::VersionPureMem >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundSnapshotPriorityByTime [GOOD] >> TFlatTableExecutor_CachePressure::TestNotEnoughLocalCache [GOOD] >> TFlatTableExecutor_Cold::ColdBorrowScan >> Bloom::Conf [GOOD] >> Bloom::Hashes >> TFlatTableExecutor_Cold::ColdBorrowScan [GOOD] >> TFlatTableExecutor_ColumnGroups::TestManyRows >> Bloom::Hashes [GOOD] >> Bloom::Rater >> TCmsTest::SamePriorityRequest [GOOD] >> DataStreams::TestReservedConsumersMetering [GOOD] >> TestYmqHttpProxy::TestReceiveMessageWithAttemptId [GOOD] >> TCmsTest::RequestReplaceManyDevicesOnOneNode [GOOD] >> Bloom::Rater [GOOD] >> Bloom::Dipping >> TScreen::Sequential [GOOD] >> TScreen::Random ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::WaitQueue1 [GOOD] Test command err: 2025-03-31T16:02:11.320925Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004604478452718:2082];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:11.321031Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000629/r3tmp/tmplgUs9a/pdisk_1.dat 2025-03-31T16:02:12.415453Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:02:12.416764Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:12.416883Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:12.426064Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:02:12.475290Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31481, node 1 2025-03-31T16:02:12.560619Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:02:12.560924Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:02:12.682711Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:02:12.684508Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:02:12.684527Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:02:12.684682Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25083 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:02:13.202865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:16.303518Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004604478452718:2082];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:16.303611Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> DBase::VersionPureMem [GOOD] >> DBase::VersionPureParts |72.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::SamePriorityRequest [GOOD] |72.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::RequestReplaceManyDevicesOnOneNode [GOOD] >> KqpPg::TypeCoercionBulkUpsert >> TestYmqHttpProxy::TestListQueues >> KqpPg::InsertFromSelect_Simple+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestReservedConsumersMetering [GOOD] Test command err: 2025-03-31T16:01:46.617443Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004497590544810:2207];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:46.635699Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001a38/r3tmp/tmpSKc5sf/pdisk_1.dat 2025-03-31T16:01:47.348146Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:47.349829Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:01:47.349894Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:01:47.366238Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64519, node 1 2025-03-31T16:01:47.530385Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:01:47.530406Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:01:47.530414Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:01:47.530521Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8819 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:01:47.852921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:48.008820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:8819 2025-03-31T16:01:48.315365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:48.332906Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-03-31T16:01:48.929649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480 encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000001" } records { sequence_number: "0" shard_id: "shard-000009" } records { sequence_number: "0" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000005" } records { sequence_number: "0" shard_id: "shard-000008" } records { sequence_number: "1" shard_id: "shard-000004" } records { sequence_number: "2" shard_id: "shard-000004" } records { sequence_number: "1" shard_id: "shard-000005" } records { sequence_number: "1" shard_id: "shard-000001" } records { sequence_number: "1" shard_id: "shard-000009" } records { sequence_number: "0" shard_id: "shard-000006" } records { sequence_number: "2" shard_id: "shard-000001" } records { sequence_number: "0" shard_id: "shard-000007" } records { sequence_number: "1" shard_id: "shard-000007" } records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000007" } records { sequence_number: "3" shard_id: "shard-000004" } records { sequence_number: "2" shard_id: "shard-000005" } records { sequence_number: "0" shard_id: "shard-000003" } records { sequence_number: "2" shard_id: "shard-000009" } records { sequence_number: "1" shard_id: "shard-000008" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000006" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000009" } records { sequence_number: "3" shard_id: "shard-000001" } records { sequence_number: "4" shard_id: "shard-000009" } records { sequence_number: "4" shard_id: "shard-000004" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000001" } encryption_type: NONE records { sequence_number: "5" shard_id: "shard-000001" } records { sequence_number: "5" shard_id: "shard-000009" } records { sequence_number: "5" shard_id: "shard-000004" } records { sequence_number: "3" shard_id: "shard-000005" } records { sequence_number: "2" shard_id: "shard-000008" } records { sequence_number: "6" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000004" } records { sequence_number: "4" shard_id: "shard-000005" } records { sequence_number: "6" shard_id: "shard-000001" } records { sequence_number: "6" shard_id: "shard-000009" } records { sequence_number: "2" shard_id: "shard-000006" } records { sequence_number: "7" shard_id: "shard-000001" } records { sequence_number: "3" shard_id: "shard-000007" } records { sequence_number: "4" shard_id: "shard-000007" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000007" } records { sequence_number: "8" shard_id: "shard-000004" } records { sequence_number: "5" shard_id: "shard-000005" } records { sequence_number: "1" shard_id: "shard-000003" } records { sequence_number: "7" shard_id: "shard-000009" } records { sequence_number: "3" shard_id: "shard-000008" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000006" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000009" } records { sequence_number: "8" shard_id: "shard-000001" } records { sequence_number: "9" shard_id: "shard-000009" } records { sequence_number: "9" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000001" } encryption_type: NONE records { sequence_number: "10" shard_id: "shard-000001" } records { sequence_number: "10" shard_id: "shard-000009" } records { sequence_number: "10" shard_id: "shard-000004" } records { sequence_number: "6" shard_id: "shard-000005" } records { sequence_number: "4" shard_id: "shard-000008" } records { sequence_number: "11" shard_id: "shard-000004" } records { sequence_number: "12" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000005" } records { sequence_number: "11" shard_id: "shard-000001" } records { sequence_number: "11" shard_id: "shard-000009" } records { sequence_number: "4" shard_id: "shard-000006" } records { sequence_number: "12" shard_id: "shard-000001" } records { sequence_number: "6" shard_id: "shard-000007" } records { sequence_number: "7" shard_id: "shard-000007" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000007" } records { sequence_number: "13" shard_id: "shard-000004" } records { sequence_number: "8" shard_id: "shard-000005" } records { sequence_number: "2" shard_id: "shard-000003" } records { sequence_number: "12" shard_id: "shard-000009" } records { sequence_number: "5" shard_id: "shard-000008" } records { sequence_number: "9" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000006" } records { sequence_number: "10" shard_id: "shard-000000" } records { sequence_number: "13" shard_id: "shard-000009" } records { sequence_number: "13" shard_id: "shard-000001" } records { sequence_number: "14" shard_id: "shard-000009" } records { sequence_number: "14" shard_id: "shard-000004" } records { sequence_number: "11" shard_id: "shard-000000" } records { sequence_number: "14" shard_id: "shard-000001" } 2025-03-31T16:01:51.583277Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004497590544810:2207];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:51.583342Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; encryption_type: NONE records { sequence_number: "15" shard_id: "shard-000001" } records { sequence_number: "15" shard_id: "shard-000009" } records { sequence_number: "15" shard_id: "shard-000004" } records { sequence_number: "9" shard_id: "shard-000005" } records { sequence_number: "6" shard_id: "shard-000008" } records { sequence_number: "16" shard_id: "shard-000004" } records { sequence_number: "17" shard_id: "shard-000004" } records { sequence_number: "10" shard_id: "shard-000005" } records { sequence_number: "16" shard_id: "shard-000001" } records { sequence_number: "16" shard_id: "shard-000009" } records { sequence_number: "6" shard_id: "shard-000006" } records { sequence_number: "17" shard_id: "shard-000001" } records { sequence_number: "9" shard_id: "shard-000007" } records { sequence_number: "10" shard_id: "shard-000007" } records { sequence_number: "12" shard_id: "shard-000000" } records { sequence_number: "11" shard_id: "shard-000007" } records { sequence_number: "18" shard_id: "shard-000004" } records { sequence_number: "11" shard_id: "shard-000005" } records { sequence_number: "3" shard_id: "shard-000003" } records { sequence_number: "17" shard_id: "shard-000009" } records { sequence_number: "7" shard_id: "shard-000008" } records { sequence_number: "13" shard_id: "shard-000000" } records { sequence_number: "7" shard_id: "shard-000006" } records { sequence_number: "14" shard_id: "shard-000000" } records { sequence_number: "18" shard_id: "shard-000009" } records { sequence_number: "18" shard_id: "shard-000001" } records { sequence_number: "19" shard_id: "shard-000009" } records { sequence_number: "19" shard_id: "shard-000004" } records { sequence_number: "15" shard_id: "shard-000000" } records { sequence_number: "19" shard_id: "shard-000001" } encryption_type: NONE records { sequence_number: "20" shard_id: "shard-000001" } records { sequence_number: "20" shard_id: "shard-000009" } records { sequence_number: "20" shard_id: "shard-000004" } records { sequence_number: "1 ... older_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1743436931096-170","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":0,"unit":"second","start":1743436931,"finish":1743436931},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743436931}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1743436931096-171","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":0,"unit":"mbyte*second","start":1743436931,"finish":1743436931},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743436931}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"used_storage-root-72075186224037888-1743436931096-172","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":0,"unit":"byte*second","start":1743436931,"finish":1743436931},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743436931}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1743436931153-173","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1743436931,"finish":1743436932},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743436932}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1743436931153-174","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1743436931,"finish":1743436932},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743436932}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1743436931153-175","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1743436931,"finish":1743436932},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743436932}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"used_storage-root-72075186224037888-1743436931153-176","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1743436931,"finish":1743436932},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743436932}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1743436932210-177","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1743436932,"finish":1743436933},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743436933}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1743436932210-178","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1743436932,"finish":1743436933},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743436933}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1743436932210-179","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1743436932,"finish":1743436933},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743436933}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"used_storage-root-72075186224037888-1743436932210-180","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1743436932,"finish":1743436933},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743436933}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1743436933260-181","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1743436933,"finish":1743436934},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743436934}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1743436933260-182","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1743436933,"finish":1743436934},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743436934}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1743436933260-183","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1743436933,"finish":1743436934},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743436934}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"used_storage-root-72075186224037888-1743436933260-184","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1743436933,"finish":1743436934},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743436934}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1743436934375-185","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1743436934,"finish":1743436935},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743436935}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1743436934375-186","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1743436934,"finish":1743436935},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743436935}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1743436934375-187","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1743436934,"finish":1743436935},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743436935}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"used_storage-root-72075186224037888-1743436934375-188","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1743436934,"finish":1743436935},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743436935}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1743436935447-189","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1743436935,"finish":1743436936},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743436936}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1743436935447-190","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1743436935,"finish":1743436936},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743436936}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1743436935447-191","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1743436935,"finish":1743436936},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743436936}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"used_storage-root-72075186224037888-1743436935447-192","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1743436935,"finish":1743436936},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743436936}' >> Bloom::Dipping [GOOD] >> Bloom::Basics [GOOD] >> IncrementalBackup::SimpleBackup >> Bloom::Stairs >> DBase::VersionPureParts [GOOD] >> DBase::VersionCompactedMem >> TScreen::Random [GOOD] >> TScreen::Shrink [GOOD] >> TScreen::Cook [GOOD] >> TSharedPageCache::Limits >> Bloom::Stairs [GOOD] >> BuildStatsBTreeIndex::Single >> BuildStatsBTreeIndex::Single [GOOD] >> BuildStatsBTreeIndex::Single_Slices >> IncrementalBackup::SimpleRestoreBackupCollection+WithIncremental >> BuildStatsBTreeIndex::Single_Slices [GOOD] >> BuildStatsBTreeIndex::Single_History |72.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> BuildStatsBTreeIndex::Single_History [GOOD] >> BuildStatsBTreeIndex::Single_History_Slices >> DBase::VersionCompactedMem [GOOD] >> DBase::VersionCompactedParts >> TSharedPageCache::Limits [GOOD] >> TSharedPageCache::Limits_Config >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-70 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-71 >> TPartBtreeIndexIteration::NoNodes_History >> TestKinesisHttpProxy::GoodRequestGetRecords [GOOD] >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-false >> BuildStatsBTreeIndex::Single_History_Slices [GOOD] >> BuildStatsBTreeIndex::Single_Groups >> BuildStatsBTreeIndex::Single_Groups [GOOD] >> BuildStatsBTreeIndex::Single_Groups_Slices >> SystemView::AuthPermissions_ResultOrder [GOOD] >> SystemView::AuthPermissions_Selects >> TestKinesisHttpProxy::TestListStreamConsumersWithMaxResults [GOOD] >> BuildStatsBTreeIndex::Single_Groups_Slices [GOOD] >> BuildStatsBTreeIndex::Single_Groups_History |72.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> DBase::VersionCompactedParts [GOOD] >> DBase::UncommittedChangesVisibility [GOOD] >> DBase::UncommittedChangesCommitWithUpdates [GOOD] >> DBase::ReplayNewTable [GOOD] >> DBase::SnapshotNewTable [GOOD] >> Memtable::Basics [GOOD] >> Memtable::BasicsReverse [GOOD] >> Memtable::Markers [GOOD] >> Memtable::Overlap [GOOD] >> Memtable::Wreck >> TSharedPageCache::Limits_Config [GOOD] >> TSharedPageCache::ThreeLeveledLRU >> TestKinesisHttpProxy::GoodRequestGetRecordsCbor >> BuildStatsBTreeIndex::Single_Groups_History [GOOD] >> BuildStatsBTreeIndex::Single_Groups_History_Slices >> Memtable::Wreck [GOOD] >> Memtable::Erased >> TFlatTableExecutor_ColumnGroups::TestManyRows [GOOD] >> TFlatTableExecutor_CompactionScan::TestCompactionScan >> Memtable::Erased [GOOD] >> NFwd_TBlobs::MemTableTest [GOOD] >> NFwd_TBlobs::Lower [GOOD] >> NFwd_TBlobs::Sieve [GOOD] >> NFwd_TBlobs::SieveFiltered [GOOD] >> NFwd_TBlobs::Basics [GOOD] >> NFwd_TBlobs::Simple [GOOD] >> NFwd_TBlobs::Shuffle [GOOD] >> NFwd_TBlobs::Grow [GOOD] >> NFwd_TBlobs::Trace [GOOD] >> NFwd_TBlobs::Filtered [GOOD] >> NFwd_TBTreeIndexCache::Basics [GOOD] >> NFwd_TBTreeIndexCache::IndexPagesLocator [GOOD] >> NFwd_TBTreeIndexCache::GetTwice [GOOD] >> NFwd_TBTreeIndexCache::ForwardTwice [GOOD] >> NFwd_TBTreeIndexCache::Forward_OnlyUsed [GOOD] >> NFwd_TBTreeIndexCache::Skip_Done [GOOD] >> NFwd_TBTreeIndexCache::Skip_Done_None [GOOD] >> NFwd_TBTreeIndexCache::Skip_Keep [GOOD] >> NFwd_TBTreeIndexCache::Skip_Wait [GOOD] >> NFwd_TBTreeIndexCache::Trace_BTree [GOOD] >> NFwd_TBTreeIndexCache::Trace_Data [GOOD] >> NFwd_TBTreeIndexCache::End [GOOD] >> NFwd_TBTreeIndexCache::Slices [GOOD] >> NFwd_TBTreeIndexCache::ManyApplies [GOOD] >> TFlatTableExecutor_CompactionScan::TestCompactionScan [GOOD] >> TFlatTableExecutor_CompressedSelectRows::TestCompressedSelectRows >> BuildStatsBTreeIndex::Single_Groups_History_Slices [GOOD] >> BuildStatsBTreeIndex::Mixed >> TFlatTableExecutor_CompressedSelectRows::TestCompressedSelectRows [GOOD] >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionDirect >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionDirect [GOOD] >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionEnqueue >> BuildStatsBTreeIndex::Mixed [GOOD] >> BuildStatsBTreeIndex::Mixed_Groups >> TestKinesisHttpProxy::TestListStreamConsumersWithToken >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionEnqueue [GOOD] >> TFlatTableExecutor_ExecutorTxLimit::TestExecutorTxLimit [GOOD] >> TFlatTableExecutor_Follower::BasicFollowerRead [GOOD] >> TFlatTableExecutor_Follower::FollowerEarlyRebootHoles [GOOD] >> TFlatTableExecutor_Follower::FollowerAttachOnTxQueueScanSnapshot >> TSchemeShardServerLess::TestServerlessComputeResourcesModeValidation >> BuildStatsBTreeIndex::Mixed_Groups [GOOD] >> BuildStatsBTreeIndex::Mixed_Groups_History >> TFlatTableExecutor_Follower::FollowerAttachOnTxQueueScanSnapshot [GOOD] >> TFlatTableExecutor_Follower::FollowerAttachAfterLoan [GOOD] >> TFlatTableExecutor_Gc::TestFailedGcAfterReboot >> TFlatTableExecutor_Gc::TestFailedGcAfterReboot [GOOD] >> TFlatTableExecutor_IndexLoading::CalculateReadSize_FlatIndex >> TFlatTableExecutor_Reboot::TestSchemeGcAfterReassign >> BuildStatsBTreeIndex::Mixed_Groups_History [GOOD] >> BuildStatsFlatIndex::Single >> TFlatTableExecutor_Reboot::TestSchemeGcAfterReassign [GOOD] >> TFlatTableExecutor_RejectProbability::MaxedOutRejectProbability >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-false [GOOD] >> TPQTest::TestOwnership [GOOD] >> TPQTest::TestOffsetEstimation [GOOD] >> TPQTest::TestMaxTimeLagRewind >> BuildStatsFlatIndex::Single [GOOD] >> BuildStatsFlatIndex::Single_Slices >> BuildStatsFlatIndex::Single_Slices [GOOD] >> BuildStatsFlatIndex::Single_History >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag >> TestKinesisHttpProxy::TestUnauthorizedPutRecords [GOOD] >> TFlatTableExecutor_IndexLoading::CalculateReadSize_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::CalculateReadSize_BTreeIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> NFwd_TBTreeIndexCache::ManyApplies [GOOD] Test command err: 00000.000 II| FAKE_ENV: Born at 2025-03-31T16:02:17.376525Z 00000.011 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.012 II| FAKE_ENV: Starting storage for BS group 0 00000.013 II| FAKE_ENV: Starting storage for BS group 1 00000.013 II| FAKE_ENV: Starting storage for BS group 2 00000.013 II| FAKE_ENV: Starting storage for BS group 3 00000.019 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.020 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.020 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.020 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {146b, 4} 00000.020 II| FAKE_ENV: DS.1 gone, left {105b, 3}, put {105b, 3} 00000.020 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.020 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.020 II| FAKE_ENV: All BS storage groups are stopped 00000.021 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.021 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-31T16:02:17.402852Z 00000.009 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.010 II| FAKE_ENV: Starting storage for BS group 0 00000.010 II| FAKE_ENV: Starting storage for BS group 1 00000.010 II| FAKE_ENV: Starting storage for BS group 2 00000.010 II| FAKE_ENV: Starting storage for BS group 3 00000.018 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.019 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.019 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.019 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {292b, 8} 00000.019 II| FAKE_ENV: DS.1 gone, left {210b, 6}, put {210b, 6} 00000.019 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.019 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.019 II| FAKE_ENV: All BS storage groups are stopped 00000.019 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.019 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-31T16:02:17.428453Z 00000.011 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.012 II| FAKE_ENV: Starting storage for BS group 0 00000.012 II| FAKE_ENV: Starting storage for BS group 1 00000.012 II| FAKE_ENV: Starting storage for BS group 2 00000.012 II| FAKE_ENV: Starting storage for BS group 3 00000.052 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.053 NN| TABLET_SAUSAGECACHE: Poison cache serviced 1 reqs hit {1 76b} miss {0 0b} 00000.053 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.053 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {116b, 2} 00000.053 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {1181b, 13} 00000.053 II| FAKE_ENV: DS.1 gone, left {909b, 3}, put {1913b, 12} 00000.053 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {132b, 2} 00000.053 II| FAKE_ENV: All BS storage groups are stopped 00000.053 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.053 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-31T16:02:17.487533Z 00000.008 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.009 II| FAKE_ENV: Starting storage for BS group 1 00000.009 II| FAKE_ENV: Starting storage for BS group 2 00000.009 II| FAKE_ENV: Starting storage for BS group 3 00000.038 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.039 NN| TABLET_SAUSAGECACHE: Poison cache serviced 1 reqs hit {1 102443b} miss {0 0b} 00000.039 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.039 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {751b, 11} 00000.039 II| FAKE_ENV: DS.1 gone, left {541b, 3}, put {103970b, 10} 00000.039 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.039 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.039 II| FAKE_ENV: All BS storage groups are stopped 00000.039 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.039 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-31T16:02:17.533490Z 00000.009 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.009 II| FAKE_ENV: Starting storage for BS group 0 00000.010 II| FAKE_ENV: Starting storage for BS group 1 00000.010 II| FAKE_ENV: Starting storage for BS group 2 00000.010 II| FAKE_ENV: Starting storage for BS group 3 00000.081 II| TABLET_SAUSAGECACHE: Wakeup 1 00000.082 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.083 NN| TABLET_SAUSAGECACHE: Poison cache serviced 11 reqs hit {18 513007b} miss {0 0b} 00000.083 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.083 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {2095b, 23} 00000.083 II| FAKE_ENV: DS.1 gone, left {774b, 4}, put {210604b, 21} 00000.083 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {205178b, 4} 00000.083 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {102690b, 4} 00000.084 II| FAKE_ENV: All BS storage groups are stopped 00000.084 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 15.00s 00000.084 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-31T16:02:17.623940Z 00000.009 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.009 II| FAKE_ENV: Starting storage for BS group 0 00000.010 II| FAKE_ENV: Starting storage for BS group 1 00000.010 II| FAKE_ENV: Starting storage for BS group 2 00000.010 II| FAKE_ENV: Starting storage for BS group 3 00000.056 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.057 NN| TABLET_SAUSAGECACHE: Poison cache serviced 3 reqs hit {3 307329b} miss {0 0b} 00000.057 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.057 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {1826b, 23} 00000.057 II| FAKE_ENV: DS.1 gone, left {1247b, 3}, put {311467b, 22} 00000.057 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.057 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.057 II| FAKE_ENV: All BS storage groups are stopped 00000.057 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.057 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-31T16:02:17.687788Z 00000.008 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.009 II| FAKE_ENV: Starting storage for BS group 1 00000.009 II| FAKE_ENV: Starting storage for BS group 2 00000.009 II| FAKE_ENV: Starting storage for BS group 3 00000.039 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 5 actors 00000.040 NN| TABLET_SAUSAGECACHE: Poison cache serviced 4 reqs hit {8 307836b} miss {0 0b} 00000.040 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.040 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.040 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.040 II| FAKE_ENV: DS.0 gone, left {57b, 2}, put {1423b, 30} 00000.040 II| FAKE_ENV: DS.1 gone, left {629b, 3}, put {310476b, 16} 00000.040 II| FAKE_ENV: All BS storage groups are stopped 00000.040 II| FAKE_ENV: Model stopped, hosted 5 actors, spent 0.000s 00000.041 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-31T16:02:17.734246Z 00000.007 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 00000.052 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.053 NN| TABLET_SAUSAGECACHE: Poison cache serviced 2 reqs hit {2 194646b} miss {0 0b} 00000.053 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.053 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.053 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {1766b, 27} 00000.053 II| FAKE_ENV: DS.1 gone, left {732b, 6}, put {197813b, 24} 00000.053 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.053 II| FAKE_ENV: All BS storage groups are stopped 00000.053 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.054 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-31T16:02:17.793514Z 00000.008 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.009 II| FAKE_ENV: Starting storage for BS group 3 00000.014 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.015 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.015 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.015 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {326b, 7} 00000.015 II| FAKE_ENV: DS.1 gone, left {418b, 4}, put {453b, 5} 00000.015 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.015 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.015 II| FAKE_ENV: All BS storage groups are stopped 00000.015 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.015 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-31T16:02:17.814372Z 00000.007 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 00000.081 II| TABLET_SAUSAGECACHE: Wakeup 1 00000.082 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.083 NN| TABLET_SAUSAGECACHE: Poison cache serviced 6 reqs hit {8 410030b} miss {0 0b} 00000.083 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.083 II| FAKE_ENV: DS.1 gone, left {504b, 4}, put {310786b, 20} 00000.083 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.083 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.083 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {1492b, 23} 00000.083 II| FAKE_ENV: All BS storage groups are stopped 00000.083 II| FAKE_ENV: Model stopped, hosted 5 actors, spent 15.00s 00000.083 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-31T16:02:17.906082Z 00000.023 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.024 II| FAKE_ENV: Starting storage for BS group 0 00000.024 II| FAKE_ENV: Starting storage for BS group 1 00000.024 II| FAKE_ENV: Starting storage for BS group 2 00000.024 II| FAKE_ENV: Starting storage for BS group 3 00000.053 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.054 NN| TABLET_SAUSAGECACHE: Poison cache serviced 1 reqs hit {1 102443b} miss {0 0b} 00000.054 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.054 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {751b, 11} 00000.054 II| FAKE_ENV: DS.1 gone, left {541b, 3}, put {103970b, 10} 0 ... t32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{7} Label{74 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{8} Label{84 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{9} Label{94 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{11} Label{114 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{12} Label{124 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{13} Label{134 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{15} Label{154 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{16} Label{164 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{17} Label{174 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{19} Label{194 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{20} Label{204 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{21} Label{214 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{24} Label{244 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{25} Label{254 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 2358b + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 98b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 150 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 0 RowCount: 2 DataSize: 50 ErasedRowCount: 0 | | | > {2} | | | PageId: 1 RowCount: 4 DataSize: 100 ErasedRowCount: 0 | | | > {4} | | | PageId: 2 RowCount: 6 DataSize: 150 ErasedRowCount: 0 | | > {6} | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 300 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 3 RowCount: 8 DataSize: 200 ErasedRowCount: 0 | | | > {8} | | | PageId: 4 RowCount: 10 DataSize: 250 ErasedRowCount: 0 | | | > {10} | | | PageId: 5 RowCount: 12 DataSize: 300 ErasedRowCount: 0 | | > {12} | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 7 RowCount: 14 DataSize: 350 ErasedRowCount: 0 | | | > {14} | | | PageId: 8 RowCount: 16 DataSize: 400 ErasedRowCount: 0 | | | > {16} | | | PageId: 9 RowCount: 18 DataSize: 450 ErasedRowCount: 0 | > {18} | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 600 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 11 RowCount: 20 DataSize: 500 ErasedRowCount: 0 | | | > {20} | | | PageId: 12 RowCount: 22 DataSize: 550 ErasedRowCount: 0 | | | > {22} | | | PageId: 13 RowCount: 24 DataSize: 600 ErasedRowCount: 0 | | > {24} | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 750 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 15 RowCount: 26 DataSize: 650 ErasedRowCount: 0 | | | > {26} | | | PageId: 16 RowCount: 28 DataSize: 700 ErasedRowCount: 0 | | | > {28} | | | PageId: 17 RowCount: 30 DataSize: 750 ErasedRowCount: 0 | | > {30} | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 233b} | | | PageId: 19 RowCount: 32 DataSize: 800 ErasedRowCount: 0 | | | > {32} | | | PageId: 20 RowCount: 34 DataSize: 850 ErasedRowCount: 0 | | | > {34} | | | PageId: 21 RowCount: 36 DataSize: 900 ErasedRowCount: 0 | | | > {36} | | | PageId: 24 RowCount: 38 DataSize: 950 ErasedRowCount: 0 | | | > {38} | | | PageId: 25 RowCount: 40 DataSize: 1000 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{7} Label{74 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{8} Label{84 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{9} Label{94 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{11} Label{114 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{12} Label{124 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{13} Label{134 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{15} Label{154 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{16} Label{164 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{17} Label{174 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{19} Label{194 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{20} Label{204 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{21} Label{214 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{24} Label{244 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{25} Label{254 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 2358b + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 98b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 150 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 0 RowCount: 2 DataSize: 50 ErasedRowCount: 0 | | | > {2} | | | PageId: 1 RowCount: 4 DataSize: 100 ErasedRowCount: 0 | | | > {4} | | | PageId: 2 RowCount: 6 DataSize: 150 ErasedRowCount: 0 | | > {6} | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 300 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 3 RowCount: 8 DataSize: 200 ErasedRowCount: 0 | | | > {8} | | | PageId: 4 RowCount: 10 DataSize: 250 ErasedRowCount: 0 | | | > {10} | | | PageId: 5 RowCount: 12 DataSize: 300 ErasedRowCount: 0 | | > {12} | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 7 RowCount: 14 DataSize: 350 ErasedRowCount: 0 | | | > {14} | | | PageId: 8 RowCount: 16 DataSize: 400 ErasedRowCount: 0 | | | > {16} | | | PageId: 9 RowCount: 18 DataSize: 450 ErasedRowCount: 0 | > {18} | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 600 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 11 RowCount: 20 DataSize: 500 ErasedRowCount: 0 | | | > {20} | | | PageId: 12 RowCount: 22 DataSize: 550 ErasedRowCount: 0 | | | > {22} | | | PageId: 13 RowCount: 24 DataSize: 600 ErasedRowCount: 0 | | > {24} | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 750 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 15 RowCount: 26 DataSize: 650 ErasedRowCount: 0 | | | > {26} | | | PageId: 16 RowCount: 28 DataSize: 700 ErasedRowCount: 0 | | | > {28} | | | PageId: 17 RowCount: 30 DataSize: 750 ErasedRowCount: 0 | | > {30} | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 233b} | | | PageId: 19 RowCount: 32 DataSize: 800 ErasedRowCount: 0 | | | > {32} | | | PageId: 20 RowCount: 34 DataSize: 850 ErasedRowCount: 0 | | | > {34} | | | PageId: 21 RowCount: 36 DataSize: 900 ErasedRowCount: 0 | | | > {36} | | | PageId: 24 RowCount: 38 DataSize: 950 ErasedRowCount: 0 | | | > {38} | | | PageId: 25 RowCount: 40 DataSize: 1000 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{7} Label{74 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{8} Label{84 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{9} Label{94 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{11} Label{114 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{12} Label{124 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{13} Label{134 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{15} Label{154 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{16} Label{164 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{17} Label{174 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{19} Label{194 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{20} Label{204 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{21} Label{214 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{24} Label{244 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{25} Label{254 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} >> TestYmqHttpProxy::TestCreateQueueWithTags [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:02:22.877022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:02:22.877126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:02:22.877168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:02:22.877200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:02:22.877248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:02:22.877275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:02:22.877372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:02:22.877463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:02:22.877811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:02:22.963772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:02:22.963826Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:22.968514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:02:22.968624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:02:22.968808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:02:22.973927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:02:22.974105Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:02:22.974577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:22.974767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:02:22.976513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:02:22.977553Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:02:22.977605Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:02:22.977728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:02:22.977762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:02:22.977813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:02:22.977916Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:02:22.983869Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:02:23.131620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:02:23.131875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:23.132085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:02:23.132312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:02:23.132370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:23.135749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:23.135908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:02:23.136116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:23.136188Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:02:23.136239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:02:23.136271Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:02:23.138697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:23.138757Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:02:23.138806Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:02:23.140641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:23.140692Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:23.140736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:02:23.140786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:02:23.148969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:02:23.151042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:02:23.151270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:02:23.152107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:23.152249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:02:23.152289Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:02:23.152552Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:02:23.152595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:02:23.152748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:02:23.152815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:02:23.154816Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:02:23.154859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:02:23.155023Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:02:23.155055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:02:23.155293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:23.155342Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:02:23.155440Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:02:23.155472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:02:23.155510Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:02:23.155538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:02:23.155565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:02:23.155596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:02:23.155622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:02:23.155662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:02:23.155735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:02:23.155781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:02:23.155812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:02:23.157272Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:02:23.157405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:02:23.157441Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ecute, operationId: 106:0, at schemeshard: 72057594046678944 2025-03-31T16:02:23.971629Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 106:0 ProgressState 2025-03-31T16:02:23.971738Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-03-31T16:02:23.971780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-31T16:02:23.971819Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-03-31T16:02:23.971923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-31T16:02:23.971979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2025-03-31T16:02:23.972022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-31T16:02:23.972064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-03-31T16:02:23.972097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2025-03-31T16:02:23.972293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-03-31T16:02:23.973681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-03-31T16:02:23.984251Z node 1 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186234409546 2025-03-31T16:02:23.984567Z node 1 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 7 TxId_Deprecated: 7 TabletID: 72075186234409548 2025-03-31T16:02:23.985292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:02:23.996364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-03-31T16:02:23.996744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 Forgetting tablet 72075186234409546 2025-03-31T16:02:23.998411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 7 ShardOwnerId: 72057594046678944 ShardLocalIdx: 7, at schemeshard: 72057594046678944 2025-03-31T16:02:23.998645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-31T16:02:24.007716Z node 1 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186234409547 Forgetting tablet 72075186234409548 Forgetting tablet 72075186234409547 2025-03-31T16:02:24.009366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-03-31T16:02:24.009618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T16:02:24.010378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-03-31T16:02:24.010712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:02:24.010759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-31T16:02:24.010875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-31T16:02:24.019708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:02:24.019791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-31T16:02:24.019883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:02:24.029123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-03-31T16:02:24.029203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186234409546 2025-03-31T16:02:24.029436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2025-03-31T16:02:24.029464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186234409548 2025-03-31T16:02:24.029538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-03-31T16:02:24.029584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186234409547 2025-03-31T16:02:24.031243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-31T16:02:24.031354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-03-31T16:02:24.031737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-03-31T16:02:24.031794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-03-31T16:02:24.032365Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-03-31T16:02:24.032485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-03-31T16:02:24.032524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:930:2789] TestWaitNotification: OK eventTxId 106 2025-03-31T16:02:24.033165Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0/dir/table0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:02:24.033385Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0/dir/table0" took 243us result status StatusPathDoesNotExist 2025-03-31T16:02:24.033576Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0/dir/table0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ServerLess0/dir/table0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-31T16:02:24.034093Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:02:24.034307Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 179us result status StatusPathDoesNotExist 2025-03-31T16:02:24.034469Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ServerLess0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-31T16:02:24.034960Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:02:24.042399Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 166us result status StatusSuccess 2025-03-31T16:02:24.044221Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "SharedDB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 wait until 72075186233409550 is deleted wait until 72075186233409551 is deleted wait until 72075186233409552 is deleted wait until 72075186233409553 is deleted 2025-03-31T16:02:24.045106Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409550 2025-03-31T16:02:24.045229Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409551 2025-03-31T16:02:24.045281Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409552 2025-03-31T16:02:24.045392Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409553 Deleted tabletId 72075186233409550 Deleted tabletId 72075186233409551 Deleted tabletId 72075186233409552 Deleted tabletId 72075186233409553 >> TestYmqHttpProxy::TestTagQueue [GOOD] >> TFlatTableExecutor_RejectProbability::MaxedOutRejectProbability [GOOD] >> TFlatTableExecutor_RejectProbability::SomeRejectProbability >> BuildStatsFlatIndex::Single_History [GOOD] >> BuildStatsFlatIndex::Single_History_Slices >> TestKinesisHttpProxy::ListShardsEmptyFields [GOOD] >> TPartBtreeIndexIteration::NoNodes_History [GOOD] >> TPartBtreeIndexIteration::OneNode >> BuildStatsFlatIndex::Single_History_Slices [GOOD] >> BuildStatsFlatIndex::Single_Groups >> TIterator::Single >> TSchemeShardServerLess::TestServerlessComputeResourcesModeValidation [GOOD] >> TSchemeShardTTLTestsWithReboots::AlterTable [GOOD] >> TBtreeIndexBuilder::NoNodes [GOOD] >> TBtreeIndexBuilder::OneNode [GOOD] >> TBtreeIndexBuilder::FewNodes [GOOD] >> TBtreeIndexBuilder::SplitBySize [GOOD] >> TBtreeIndexNode::TIsNullBitmap [GOOD] >> TBtreeIndexNode::CompareTo [GOOD] >> TBtreeIndexNode::Basics [GOOD] >> TBtreeIndexNode::Group [GOOD] >> TBtreeIndexNode::History [GOOD] >> TBtreeIndexNode::OneKey >> TFlatTableExecutor_RejectProbability::SomeRejectProbability [GOOD] >> TFlatTableExecutor_RejectProbability::ZeroRejectProbability >> TFlatTableExecutor_IndexLoading::CalculateReadSize_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_FlatIndex >> BuildStatsFlatIndex::Single_Groups [GOOD] >> TBtreeIndexNode::OneKey [GOOD] >> BuildStatsFlatIndex::Single_Groups_Slices >> TBtreeIndexNode::Reusable [GOOD] >> TBtreeIndexNode::CutKeys [GOOD] >> TBtreeIndexTPart::Conf [GOOD] >> TBtreeIndexTPart::NoNodes [GOOD] >> TBtreeIndexTPart::OneNode [GOOD] >> TBtreeIndexTPart::FewNodes [GOOD] >> TBtreeIndexTPart::Erases [GOOD] >> TBtreeIndexTPart::Groups [GOOD] >> TBtreeIndexTPart::History >> TSharedPageCache::ThreeLeveledLRU [GOOD] >> TSharedPageCache::S3FIFO >> TestKinesisHttpProxy::TestWrongStream >> TestYmqHttpProxy::TestDeleteMessage >> TBtreeIndexTPart::History [GOOD] >> TBtreeIndexTPart::External >> BuildStatsFlatIndex::Single_Groups_Slices [GOOD] >> BuildStatsFlatIndex::Single_Groups_History >> TFlatTableExecutor_RejectProbability::ZeroRejectProbability [GOOD] >> TFlatTableExecutor_RejectProbability::ZeroRejectProbabilityMultipleTables >> TBtreeIndexTPart::External [GOOD] >> TChargeBTreeIndex::NoNodes >> TestYmqHttpProxy::TestUntagQueue >> TestKinesisHttpProxy::ListShardsExclusiveStartShardId >> TChargeBTreeIndex::NoNodes_Groups >> TChargeBTreeIndex::NoNodes [GOOD] >> TChargeBTreeIndex::FewNodes >> BuildStatsFlatIndex::Single_Groups_History [GOOD] >> BuildStatsFlatIndex::Single_Groups_History_Slices >> DataShardVolatile::DistributedWriteThenScanQuery [GOOD] >> DataShardVolatile::DistributedWriteWithAsyncIndex >> TFlatTableExecutor_RejectProbability::ZeroRejectProbabilityMultipleTables [GOOD] >> TFlatTableExecutor_Reschedule::TestExecuteReschedule [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorSetResourceProfile [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestTxData ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesModeValidation [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:02:24.418364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:02:24.418470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:02:24.418507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:02:24.418533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:02:24.418577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:02:24.418601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:02:24.418650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:02:24.418710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:02:24.418985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:02:24.511786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:02:24.511853Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:24.524650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:02:24.524760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:02:24.524958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:02:24.530685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:02:24.530906Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:02:24.531655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:24.531909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:02:24.534061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:02:24.535265Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:02:24.535326Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:02:24.535556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:02:24.535603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:02:24.535641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:02:24.535781Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:02:24.543844Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:02:24.680454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:02:24.680716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:24.680950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:02:24.681157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:02:24.681215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:24.684223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:24.684615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:02:24.684860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:24.684924Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:02:24.684960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:02:24.684994Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:02:24.687636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:24.687701Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:02:24.687756Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:02:24.689863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:24.689911Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:24.689962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:02:24.690024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:02:24.694021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:02:24.697242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:02:24.697497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:02:24.701745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:24.701911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:02:24.701982Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:02:24.702267Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:02:24.702322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:02:24.710188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:02:24.710334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:02:24.713624Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:02:24.713721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:02:24.713919Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:02:24.713964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:02:24.714192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:24.714236Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:02:24.714337Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:02:24.714375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:02:24.714411Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:02:24.714439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:02:24.714481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:02:24.714526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:02:24.714561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:02:24.714614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:02:24.714709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:02:24.714749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:02:24.714782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:02:24.716888Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:02:24.717014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:02:24.717056Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... State::TPropose ProgressState leave, operationId 104:0, at tablet# 72057594046678944 2025-03-31T16:02:25.185451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 104 ready parts: 1/1 2025-03-31T16:02:25.185574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 104 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:02:25.187634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2025-03-31T16:02:25.187748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000005 2025-03-31T16:02:25.188050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:25.188191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:02:25.188235Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet# 72057594046678944 2025-03-31T16:02:25.188496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-03-31T16:02:25.188665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet# 72057594046678944 2025-03-31T16:02:25.188784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-03-31T16:02:25.188874Z node 1 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3], Generation: 2, ActorId:[1:602:2533], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 104 2025-03-31T16:02:25.191354Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:02:25.191429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-31T16:02:25.191574Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:02:25.191604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-03-31T16:02:25.191925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-31T16:02:25.191974Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 104:0, ProgressState, NeedSyncHive: 0 2025-03-31T16:02:25.192003Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 240 -> 240 2025-03-31T16:02:25.192838Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T16:02:25.192945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T16:02:25.192984Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-03-31T16:02:25.193035Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-03-31T16:02:25.193076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2025-03-31T16:02:25.193167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-03-31T16:02:25.196044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-31T16:02:25.196100Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-03-31T16:02:25.196227Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-31T16:02:25.196261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T16:02:25.196295Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-31T16:02:25.196324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T16:02:25.196359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-03-31T16:02:25.196400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T16:02:25.196453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-03-31T16:02:25.196483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-03-31T16:02:25.196665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-03-31T16:02:25.197125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-03-31T16:02:25.198826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-03-31T16:02:25.198874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-03-31T16:02:25.199348Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-03-31T16:02:25.199482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-03-31T16:02:25.199520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:759:2641] TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-03-31T16:02:25.202426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "SharedDB" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:02:25.202598Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "SharedDB" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } 2025-03-31T16:02:25.202638Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, path /MyRoot/SharedDB 2025-03-31T16:02:25.202788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 105:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless, at schemeshard: 72057594046678944 2025-03-31T16:02:25.202841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless, at schemeshard: 72057594046678944 2025-03-31T16:02:25.205274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:02:25.205430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless, operation: ALTER DATABASE, path: /MyRoot/SharedDB TestModificationResult got TxId: 105, wait until txId: 105 TestModificationResults wait txId: 106 2025-03-31T16:02:25.208381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeUnspecified } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:02:25.208550Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 106:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeUnspecified } 2025-03-31T16:02:25.208589Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 106:0, path /MyRoot/ServerLess0 2025-03-31T16:02:25.208725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 106:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified, at schemeshard: 72057594046678944 2025-03-31T16:02:25.208768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified, at schemeshard: 72057594046678944 2025-03-31T16:02:25.211266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:02:25.211452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified, operation: ALTER DATABASE, path: /MyRoot/ServerLess0 TestModificationResult got TxId: 106, wait until txId: 106 >> BuildStatsHistogram::Three_Mixed_Small_2_Levels >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestTxData [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorStaticMemoryLimits [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorReuseStaticMemory >> BuildStatsFlatIndex::Single_Groups_History_Slices [GOOD] >> BuildStatsFlatIndex::Mixed >> KqpScan::ScanDuringSplit [GOOD] >> KqpScan::ScanAfterSplitSlowMetaRead >> BuildStatsHistogram::Three_Mixed_Small_2_Levels [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_2_Levels_3_Buckets >> TFlatTableExecutor_ResourceProfile::TestExecutorReuseStaticMemory [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxDataLimitExceeded [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestPages ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::AlterTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:135:2157] sender: [1:136:2058] recipient: [1:111:2142] 2025-03-31T16:01:14.921512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:01:14.921589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:14.921631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:01:14.921666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:01:14.921711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:01:14.921740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:01:14.921822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:14.921902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:01:14.922281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:01:15.030556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-31T16:01:15.030614Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:169:2058] recipient: [1:15:2062] 2025-03-31T16:01:15.057700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:01:15.057836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:01:15.057970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:01:15.062056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:01:15.062344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:01:15.063416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:15.063632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:15.076853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:15.078872Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:15.078972Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:15.079266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:01:15.079333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:15.079392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:15.079584Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:210:2058] recipient: [1:208:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:210:2058] recipient: [1:208:2209] Leader for TabletID 72057594037968897 is [1:214:2213] sender: [1:215:2058] recipient: [1:208:2209] 2025-03-31T16:01:15.086742Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:235:2058] recipient: [1:15:2062] 2025-03-31T16:01:15.278782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:15.279010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:15.279273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:01:15.279571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:01:15.279636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:15.288127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:15.288300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:01:15.288528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:15.288594Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:01:15.288630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:01:15.288663Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:01:15.292385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:15.292461Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:01:15.292500Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:01:15.294403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:15.294459Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:15.294506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:15.294582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:01:15.312838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:01:15.315279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:01:15.315468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:135:2157] sender: [1:250:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:01:15.316504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:15.316638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:15.316683Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:15.317018Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:01:15.317070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:15.317238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:15.317330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:01:15.321224Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:15.321287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:15.321474Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:15.321516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:202:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:01:15.322224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:15.322283Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:01:15.322390Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:15.322434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:15.322474Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025 ... :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:25.154371Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1003 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 219043334249 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:02:25.154431Z node 51 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 1003:0 HandleReply TEvOperationPlan, operationId: 1003:0, stepId: 5000004, at schemeshard: 72057594046678944 2025-03-31T16:02:25.154779Z node 51 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 128 -> 129 2025-03-31T16:02:25.154947Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2025-03-31T16:02:25.161743Z node 51 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:02:25.161804Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-31T16:02:25.162138Z node 51 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:02:25.162184Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [51:207:2209], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2025-03-31T16:02:25.163496Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-03-31T16:02:25.163573Z node 51 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 ProgressState at tablet: 72057594046678944 2025-03-31T16:02:25.164594Z node 51 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-03-31T16:02:25.164723Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-03-31T16:02:25.164764Z node 51 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-03-31T16:02:25.164805Z node 51 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-03-31T16:02:25.164852Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-31T16:02:25.164963Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 1003 2025-03-31T16:02:25.167881Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1225 } } 2025-03-31T16:02:25.167960Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2025-03-31T16:02:25.168138Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1225 } } 2025-03-31T16:02:25.168255Z node 51 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1225 } } 2025-03-31T16:02:25.170106Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 334 RawX2: 219043334417 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-03-31T16:02:25.170168Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2025-03-31T16:02:25.170315Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 334 RawX2: 219043334417 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-03-31T16:02:25.170367Z node 51 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-31T16:02:25.170459Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 334 RawX2: 219043334417 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-03-31T16:02:25.170518Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:25.170557Z node 51 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2025-03-31T16:02:25.170601Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-31T16:02:25.170648Z node 51 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 129 -> 240 2025-03-31T16:02:25.171984Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-03-31T16:02:25.183316Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-03-31T16:02:25.183606Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-03-31T16:02:25.184153Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-03-31T16:02:25.184210Z node 51 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2025-03-31T16:02:25.184339Z node 51 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2025-03-31T16:02:25.184381Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-03-31T16:02:25.184431Z node 51 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2025-03-31T16:02:25.184465Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-03-31T16:02:25.184509Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-03-31T16:02:25.184557Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-03-31T16:02:25.184600Z node 51 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2025-03-31T16:02:25.184638Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2025-03-31T16:02:25.184776Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-03-31T16:02:25.208643Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-03-31T16:02:25.208715Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-03-31T16:02:25.209160Z node 51 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-03-31T16:02:25.209278Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-03-31T16:02:25.209318Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [51:456:2429] TestWaitNotification: OK eventTxId 1003 2025-03-31T16:02:25.209837Z node 51 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:02:25.210088Z node 51 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 294us result status StatusSuccess 2025-03-31T16:02:25.210657Z node 51 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> BuildStatsHistogram::Three_Mixed_Small_2_Levels_3_Buckets [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_1_Level >> BuildStatsFlatIndex::Mixed [GOOD] >> BuildStatsFlatIndex::Mixed_Groups >> BuildStatsHistogram::Three_Mixed_Small_1_Level [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_0_Levels [GOOD] >> BuildStatsHistogram::Three_Serial_Small_2_Levels >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestPages [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorPageLimitExceeded >> TIterator::Single [GOOD] >> TIterator::SingleReverse |72.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |72.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |72.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_transfer/test-results/unittest/{meta.json ... results_accumulator.log} |72.6%| [LD] {RESULT} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut >> TChargeBTreeIndex::FewNodes [GOOD] >> TChargeBTreeIndex::FewNodes_Groups >> TFlatTableExecutor_ResourceProfile::TestExecutorPageLimitExceeded [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemory >> TSchemeShardServerLess::StorageBilling >> TSchemeShardServerLess::Fake [GOOD] >> BuildStatsHistogram::Three_Serial_Small_2_Levels [GOOD] >> BuildStatsHistogram::Three_Serial_Small_2_Levels_3_Buckets [GOOD] >> BuildStatsHistogram::Three_Serial_Small_1_Level >> BuildStatsFlatIndex::Mixed_Groups [GOOD] >> BuildStatsFlatIndex::Mixed_Groups_History >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemory [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemoryFollower >> KqpScan::ScanRetryReadRanges [GOOD] >> BuildStatsHistogram::Three_Serial_Small_1_Level [GOOD] >> BuildStatsHistogram::Three_Serial_Small_0_Levels [GOOD] >> BuildStatsMixedIndex::Single >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemoryFollower [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorMemoryLimitExceeded >> BuildStatsFlatIndex::Mixed_Groups_History [GOOD] >> BuildStatsFlatIndex::Serial >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag [GOOD] >> BuildStatsMixedIndex::Single [GOOD] >> BuildStatsMixedIndex::Single_Slices >> TFlatTableExecutor_ResourceProfile::TestExecutorMemoryLimitExceeded [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorPreserveTxData [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxDataGC [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxPartialDataHold >> BuildStatsMixedIndex::Single_Slices [GOOD] >> BuildStatsMixedIndex::Single_History >> TPartBtreeIndexIteration::OneNode [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups >> BuildStatsFlatIndex::Serial [GOOD] >> BuildStatsFlatIndex::Serial_Groups >> TFlatTableExecutor_ResourceProfile::TestExecutorTxPartialDataHold [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxHoldAndUse [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxHoldOnRelease [GOOD] >> TFlatTableExecutor_ResourceProfile::TestUpdateConfig [GOOD] >> TFlatTableExecutor_SliceOverlapScan::TestSliceOverlapScan >> TChargeBTreeIndex::NoNodes_Groups [GOOD] >> TChargeBTreeIndex::NoNodes_History >> TSharedPageCache::S3FIFO [GOOD] >> TSharedPageCache::ClockPro |72.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::Fake [GOOD] >> BuildStatsMixedIndex::Single_History [GOOD] >> BuildStatsMixedIndex::Single_History_Slices >> BuildStatsFlatIndex::Serial_Groups [GOOD] >> BuildStatsFlatIndex::Serial_Groups_History >> TChargeBTreeIndex::NoNodes_History [GOOD] >> TChargeBTreeIndex::NoNodes_Groups_History >> KqpScan::ScanDuringSplitThenMerge [GOOD] >> KqpScan::ScanPg >> BuildStatsMixedIndex::Single_History_Slices [GOOD] >> BuildStatsMixedIndex::Single_Groups ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:02:25.272127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:02:25.272203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:02:25.272243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:02:25.272273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:02:25.272321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:02:25.272344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:02:25.272398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:02:25.272465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:02:25.272740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:02:25.364687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:02:25.364767Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:25.371259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:02:25.371401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:02:25.371627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:02:25.380320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:02:25.380607Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:02:25.381322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:25.381615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:02:25.384561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:02:25.385842Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:02:25.385916Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:02:25.386126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:02:25.386172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:02:25.386210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:02:25.386363Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:02:25.399274Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:02:25.526017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:02:25.526220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:25.526409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:02:25.526792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:02:25.526860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:25.532251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:25.532445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:02:25.532677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:25.532736Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:02:25.532776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:02:25.532812Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:02:25.544654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:25.544732Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:02:25.544815Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:02:25.548759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:25.548832Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:25.548902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:02:25.548977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:02:25.559030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:02:25.564151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:02:25.564407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:02:25.565582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:25.565734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:02:25.565780Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:02:25.566111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:02:25.566174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:02:25.566388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:02:25.566490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:02:25.576305Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:02:25.576379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:02:25.576611Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:02:25.576653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:02:25.576897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:25.576944Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:02:25.577048Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:02:25.577084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:02:25.577127Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:02:25.577155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:02:25.577190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:02:25.577244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:02:25.577277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:02:25.577333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:02:25.577411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:02:25.577446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:02:25.577482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:02:25.579750Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:02:25.579916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:02:25.579955Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ly TEvInitTenantSchemeShardResult operationId: 104:0 at schemeshard: 72057594046678944 2025-03-31T16:02:26.824571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 104:0 Got OK TEvInitTenantSchemeShardResult from schemeshard tablet: 72075186234409549 shardIdx: 72057594046678944:5 at schemeshard: 72057594046678944 2025-03-31T16:02:26.827861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-31T16:02:26.827937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-31T16:02:26.829674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186234409551, partId: 0 2025-03-31T16:02:26.829839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186234409551 2025-03-31T16:02:26.829887Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 104:0 HandleReply TEvConfigureStatus operationId:104:0 at schemeshard:72057594046678944 2025-03-31T16:02:26.829927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 104:0 Got OK TEvConfigureStatus from tablet# 72075186234409551 shardIdx# 72057594046678944:7 at schemeshard# 72057594046678944 2025-03-31T16:02:26.900365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 3 -> 128 2025-03-31T16:02:26.907911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-31T16:02:26.912818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-31T16:02:26.913031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-31T16:02:26.913094Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-31T16:02:26.913151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 104:0, at tablet# 72057594046678944 2025-03-31T16:02:26.913206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 104 ready parts: 1/1 2025-03-31T16:02:26.913360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 104 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:02:26.916531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2025-03-31T16:02:26.916667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000005 2025-03-31T16:02:26.916988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:26.917117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:02:26.917178Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet# 72057594046678944 2025-03-31T16:02:26.917489Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-03-31T16:02:26.917557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet# 72057594046678944 2025-03-31T16:02:26.917667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-03-31T16:02:26.917773Z node 1 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3], Generation: 2, ActorId:[1:602:2533], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 104 2025-03-31T16:02:26.920324Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:02:26.920391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-31T16:02:26.920570Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:02:26.920607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-03-31T16:02:26.920941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-31T16:02:26.920999Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 104:0, ProgressState, NeedSyncHive: 0 2025-03-31T16:02:26.921268Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 240 -> 240 2025-03-31T16:02:26.922370Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T16:02:26.922490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T16:02:26.922532Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-03-31T16:02:26.922590Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-03-31T16:02:26.922637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2025-03-31T16:02:26.922745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-03-31T16:02:26.926014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-31T16:02:26.926094Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-03-31T16:02:26.926234Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-31T16:02:26.926274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T16:02:26.926316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-31T16:02:26.926530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T16:02:26.926577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-03-31T16:02:26.926626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T16:02:26.926668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-03-31T16:02:26.926711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-03-31T16:02:26.926907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-03-31T16:02:26.927448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-03-31T16:02:26.929369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-03-31T16:02:26.929420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-03-31T16:02:26.929870Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-03-31T16:02:27.004524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-03-31T16:02:27.004618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:759:2641] TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-03-31T16:02:27.007975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeExclusive } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:02:27.008172Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeExclusive } 2025-03-31T16:02:27.008214Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, path /MyRoot/ServerLess0 2025-03-31T16:02:27.008388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 105:0, explain: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, at schemeshard: 72057594046678944 2025-03-31T16:02:27.008438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusPreconditionFailed, reason: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, at schemeshard: 72057594046678944 2025-03-31T16:02:27.011520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusPreconditionFailed Reason: "Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:02:27.011751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, operation: ALTER DATABASE, path: /MyRoot/ServerLess0 TestModificationResult got TxId: 105, wait until txId: 105 >> TIterator::SingleReverse [GOOD] >> BuildStatsFlatIndex::Serial_Groups_History [GOOD] >> BuildStatsHistogram::Single >> TIterator::Mixed >> BuildStatsMixedIndex::Single_Groups [GOOD] >> BuildStatsMixedIndex::Single_Groups_Slices >> TSchemeShardServerLess::TestServerlessComputeResourcesMode >> BuildStatsMixedIndex::Single_Groups_Slices [GOOD] >> BuildStatsMixedIndex::Single_Groups_History |72.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanRetryReadRanges [GOOD] Test command err: 2025-03-31T16:02:06.863665Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:02:06.864143Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:02:06.864394Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:02:06.865667Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:02:06.866056Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:02:06.866247Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001a03/r3tmp/tmpobjo8G/pdisk_1.dat 2025-03-31T16:02:07.495280Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:07.715829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:02:07.834668Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:07.834816Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:07.844711Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:07.844860Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:07.865092Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:02:07.865859Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:02:07.866324Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:02:08.186427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:02:08.901235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1401:2836], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:08.901427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1412:2841], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:08.901542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:08.908577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:02:09.496230Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1415:2844], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:02:09.623454Z node 1 :TX_PROXY ERROR: Actor# [1:1543:2913] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:02:11.030910Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpf0nw0fkgbcxtc2v2wwzy4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGZlN2QxMDItYWI4YzU3MTEtZGE3NzkwYTMtNTdjYWUxMWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- nodeId: 2 2025-03-31T16:02:11.867862Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpf0r049efs480grabhax8p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTkyYjFmNTAtZmFhMzY5MjYtMTI4MDcyYmItODMxMmZiMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- EvScan [1:1622:2965] -> [2:1578:2438] -- EvScanData from [2:1626:2445]: pass 2025-03-31T16:02:12.630073Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqpf0r049efs480grabhax8p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTkyYjFmNTAtZmFhMzY5MjYtMTI4MDcyYmItODMxMmZiMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- EvStreamData: {"ResultSet":{"columns":[{"name":"column0","type":{"optional_type":{"item":{"type_id":4}}}}],"rows":[{"items":[{"uint64_value":596400}]}]},"SeqNo":1,"QueryResultIndex":0,"ChannelId":1} 2025-03-31T16:02:12.633247Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down 2025-03-31T16:02:21.828308Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:700:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:02:21.828592Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:02:21.828921Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:02:21.830587Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:697:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:02:21.830965Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:02:21.831374Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001a03/r3tmp/tmpBeCmAI/pdisk_1.dat 2025-03-31T16:02:22.219307Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:22.384677Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:02:22.484484Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:22.484706Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:22.492369Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:22.492500Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:22.507220Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-03-31T16:02:22.507851Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:02:22.508336Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:02:22.817723Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:02:23.489561Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1404:2839], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:23.489699Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1414:2844], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:23.489803Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:23.498537Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:02:24.044143Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:1418:2847], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:02:24.136920Z node 3 :TX_PROXY ERROR: Actor# [3:1546:2916] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:02:24.979595Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpf143z54e65jyr2eja7c2f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ODNjNmNjZGYtYzdhYzQ4NTktOTM5MDQ5N2ItMTMwYjgwMGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- nodeId: 4 2025-03-31T16:02:25.911410Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpf15kz9gmbc12rw305nmvj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OWMzNWE5YzYtNDJjNmRmYTYtOGY0ZWI3NC02NTQyOTNmYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- EvScan [3:1626:2969] -> [4:1581:2438] -- EvScanData from [4:1630:2445]: pass -- EvStreamData: {"ResultSet":{"columns":[{"name":"key","type":{"optional_type":{"item":{"type_id":2}}}},{"name":"value","type":{"optional_type":{"item":{"type_id":2}}}}],"rows":[{"items":[{"uint32_value":2},{"uint32_value":22}]},{"items":[{"uint32_value":21},{"uint32_value":2121}]},{"items":[{"uint32_value":22},{"uint32_value":2222}]},{"items":[{"uint32_value":23},{"uint32_value":2323}]},{"items":[{"uint32_value":24},{"uint32_value":2424}]},{"items":[{"uint32_value":25},{"uint32_value":2525}]},{"items":[{"uint32_value":26},{"uint32_value":2626}]},{"items":[{"uint32_value":27},{"uint32_value":2727}]},{"items":[{"uint32_value":28},{"uint32_value":2828}]},{"items":[{"uint32_value":29},{"uint32_value":2929}]},{"items":[{"uint32_value":40},{"uint32_value":4040}]},{"items":[{"uint32_value":41},{"uint32_value":4141}]},{"items":[{"uint32_value":42},{"uint32_value":4242}]},{"items":[{"uint32_value":43},{"uint32_value":4343}]},{"items":[{"uint32_value":44},{"uint32_value":4444}]},{"items":[{"uint32_value":45},{"uint32_value":4545}]},{"items":[{"uint32_value":46},{"uint32_value":4646}]},{"items":[{"uint32_value":47},{"uint32_value":4747}]},{"items":[{"uint32_value":48},{"uint32_value":4848}]},{"items":[{"uint32_value":49},{"uint32_value":4949}]},{"items":[{"uint32_value":50},{"uint32_value":5050}]}]},"SeqNo":1,"QueryResultIndex":0,"ChannelId":2} -- EvStreamData: {"ResultSet":{"columns":[{"name":"key","type":{"optional_type":{"item":{"type_id":2}}}},{"name":"value","type":{"optional_type":{"item":{"type_id":2}}}}]},"SeqNo":2,"QueryResultIndex":0,"ChannelId":2} 2025-03-31T16:02:25.926909Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down >> BuildStatsMixedIndex::Single_Groups_History [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_Slices >> BuildStatsMixedIndex::Single_Groups_History_Slices [GOOD] >> BuildStatsMixedIndex::Mixed >> IncrementalBackup::SimpleBackup [GOOD] >> IncrementalBackup::MultiRestore >> IncrementalBackup::SimpleRestoreBackupCollection+WithIncremental [GOOD] >> IncrementalBackup::SimpleRestoreBackupCollection-WithIncremental >> BuildStatsMixedIndex::Mixed [GOOD] >> BuildStatsMixedIndex::Mixed_Groups >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-true >> BuildStatsMixedIndex::Mixed_Groups [GOOD] >> BuildStatsMixedIndex::Mixed_Groups_History >> TestYmqHttpProxy::TestListQueues [GOOD] >> TChargeBTreeIndex::FewNodes_Groups [GOOD] >> TChargeBTreeIndex::FewNodes_History >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-71 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-72 >> BuildStatsMixedIndex::Mixed_Groups_History [GOOD] >> BuildStatsMixedIndex::Serial >> IncrementalBackup::BackupRestore >> BuildStatsMixedIndex::Serial [GOOD] >> BuildStatsMixedIndex::Serial_Groups >> TSharedPageCache::ClockPro [GOOD] >> TSharedPageCache::ReplacementPolicySwitch >> TSchemeShardServerLess::TestServerlessComputeResourcesMode [GOOD] >> TestKinesisHttpProxy::GoodRequestGetRecordsCbor [GOOD] >> BuildStatsMixedIndex::Serial_Groups [GOOD] >> BuildStatsMixedIndex::Serial_Groups_History >> TSharedPageCache::ReplacementPolicySwitch [GOOD] >> TSharedPageCache::BigCache_BTreeIndex >> TestYmqHttpProxy::TestPurgeQueue >> TFlatTableExecutor_SliceOverlapScan::TestSliceOverlapScan [GOOD] >> TFlatTableExecutor_SnapshotWithCommits::SnapshotWithCommits [GOOD] >> TFlatTableExecutor_StickyPages::TestNonSticky_FlatIndex >> BuildStatsMixedIndex::Serial_Groups_History [GOOD] >> BuildStatsMixedIndex::Single_LowResolution >> TFlatTableExecutor_StickyPages::TestNonSticky_FlatIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestNonSticky_BTreeIndex >> TChargeBTreeIndex::NoNodes_Groups_History [GOOD] >> TChargeBTreeIndex::OneNode ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:02:28.922526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:02:28.922607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:02:28.922644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:02:28.922669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:02:28.922704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:02:28.922737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:02:28.922794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:02:28.922858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:02:28.923132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:02:29.012696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:02:29.012777Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:29.017737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:02:29.017835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:02:29.018018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:02:29.023132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:02:29.023390Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:02:29.024018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:29.024242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:02:29.026168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:02:29.027373Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:02:29.027436Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:02:29.027611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:02:29.027656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:02:29.027691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:02:29.027829Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:02:29.034645Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:02:29.173709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:02:29.173932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:29.174134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:02:29.174325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:02:29.174372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:29.185008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:29.185160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:02:29.185356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:29.185422Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:02:29.185454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:02:29.185485Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:02:29.188002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:29.188073Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:02:29.188126Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:02:29.191004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:29.191062Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:29.191141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:02:29.191210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:02:29.195120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:02:29.200156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:02:29.200442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:02:29.201408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:29.201546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:02:29.201609Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:02:29.201888Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:02:29.201940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:02:29.202094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:02:29.202171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:02:29.204678Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:02:29.204739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:02:29.204934Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:02:29.204975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:02:29.205190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:29.205231Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:02:29.205321Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:02:29.205354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:02:29.205389Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:02:29.205418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:02:29.205449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:02:29.205489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:02:29.205529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:02:29.205584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:02:29.205656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:02:29.205709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:02:29.205741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:02:29.207691Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:02:29.207818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:02:29.207855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-03-31T16:02:29.768422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-31T16:02:29.768531Z node 1 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3], Generation: 2, ActorId:[1:606:2536], EffectiveACLVersion: 0, SubdomainVersion: 4, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 4, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-31T16:02:29.769025Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186234409549 2025-03-31T16:02:29.769075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186234409549, txId: 0, path id: [OwnerId: 72075186234409549, LocalPathId: 1] 2025-03-31T16:02:29.769256Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186234409549 2025-03-31T16:02:29.769321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:704:2606], at schemeshard: 72075186234409549, txId: 0, path id: 1 2025-03-31T16:02:29.770804Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186234409549, msg: Owner: 72075186234409549 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186234409549, cookie: 0 2025-03-31T16:02:29.771416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 106:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:3 msg type: 268697640 2025-03-31T16:02:29.771541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 106, partId: 0, tablet: 72075186233409546 2025-03-31T16:02:29.771706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-03-31T16:02:29.771769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-31T16:02:29.771962Z node 1 :HIVE INFO: [72075186233409546] TEvUpdateDomain, msg: DomainKey { SchemeShard: 72057594046678944 PathId: 3 } ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared TxId: 106 2025-03-31T16:02:29.772054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Update domain reply, message: Origin: 72075186233409546 TxId: 106, at schemeshard: 72057594046678944 2025-03-31T16:02:29.772097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409546, partId: 0 2025-03-31T16:02:29.772228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 106:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 106 2025-03-31T16:02:29.772279Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 106:0, HandleReply TEvUpdateDomainReply, from hive: 72075186233409546 2025-03-31T16:02:29.772332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:0 138 -> 240 2025-03-31T16:02:29.775048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2025-03-31T16:02:29.775280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-03-31T16:02:29.775330Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 106:0 ProgressState 2025-03-31T16:02:29.775474Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-03-31T16:02:29.775519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-31T16:02:29.775578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-03-31T16:02:29.775641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-31T16:02:29.775685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2025-03-31T16:02:29.775732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-31T16:02:29.775773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-03-31T16:02:29.775806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2025-03-31T16:02:29.775916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-03-31T16:02:29.778512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-03-31T16:02:29.778594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-03-31T16:02:29.779201Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-03-31T16:02:29.779329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-03-31T16:02:29.779386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:841:2723] TestWaitNotification: OK eventTxId 106 2025-03-31T16:02:29.780185Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:02:29.780451Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 278us result status StatusSuccess 2025-03-31T16:02:29.780847Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "ServerLess0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:02:29.781546Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186234409549 2025-03-31T16:02:29.781750Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186234409549 describe path "/MyRoot/ServerLess0" took 206us result status StatusSuccess 2025-03-31T16:02:29.782130Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "MyRoot/ServerLess0" PathId: 1 SchemeshardId: 72075186234409549 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 3 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot/ServerLess0" } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 1 PathOwnerId: 72075186234409549, at schemeshard: 72075186234409549 2025-03-31T16:02:29.782801Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:02:29.782990Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 195us result status StatusSuccess 2025-03-31T16:02:29.783370Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "ServerLess0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:02:29.783930Z node 1 :HIVE INFO: [72075186233409546] TEvRequestDomainInfo, 72057594046678944:3 >> IncrementalBackup::SimpleRestore >> BuildStatsMixedIndex::Single_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Slices_LowResolution >> KqpJoinOrder::TPCDS23+ColumnStore [GOOD] >> TFlatTableExecutor_StickyPages::TestNonSticky_BTreeIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestSticky >> TFlatTableExecutor_StickyPages::TestSticky [GOOD] >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_FlatIndex >> BuildStatsMixedIndex::Single_Slices_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_LowResolution >> TestKinesisHttpProxy::TestListStreamConsumersWithToken [GOOD] >> TChargeBTreeIndex::FewNodes_History [GOOD] >> TChargeBTreeIndex::FewNodes_Sticky >> TSharedPageCache::BigCache_BTreeIndex [GOOD] >> TSharedPageCache::BigCache_FlatIndex >> TChargeBTreeIndex::OneNode [GOOD] >> TChargeBTreeIndex::OneNode_Groups >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_FlatIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_BTreeIndex >> BuildStatsMixedIndex::Single_Groups_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_Slices_LowResolution >> TestKinesisHttpProxy::GoodRequestGetRecordsLongStreamName >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_BTreeIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyMain >> KqpQuery::UdfTerminate >> TFlatTableExecutor_StickyPages::TestStickyMain [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyAlt_FlatIndex >> BuildStatsMixedIndex::Single_Groups_Slices_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_LowResolution >> TFlatTableExecutor_StickyPages::TestStickyAlt_FlatIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyAlt_BTreeIndex >> TSharedPageCache::BigCache_FlatIndex [GOOD] >> TSharedPageCache::MiddleCache_BTreeIndex >> TFlatTableExecutor_StickyPages::TestStickyAlt_BTreeIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyAll >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-true [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_Slices_LowResolution >> TFlatTableExecutor_StickyPages::TestStickyAll [GOOD] >> TFlatTableExecutor_StickyPages::TestAlterAddFamilySticky >> TChargeBTreeIndex::FewNodes_Sticky [GOOD] >> TChargeBTreeIndex::FewNodes_Groups_History >> TFlatTableExecutor_StickyPages::TestAlterAddFamilySticky [GOOD] >> TFlatTableExecutor_StickyPages::TestAlterAddFamilyPartiallySticky >> TestKinesisHttpProxy::TestCounters >> TFlatTableExecutor_StickyPages::TestAlterAddFamilyPartiallySticky [GOOD] >> TFlatTableExecutor_VersionedLargeBlobs::TestMultiVersionCompactionLargeBlobs >> BuildStatsMixedIndex::Single_Groups_History_Slices_LowResolution [GOOD] >> Charge::Lookups [GOOD] >> Charge::ByKeysBasics >> TFlatTableExecutor_VersionedLargeBlobs::TestMultiVersionCompactionLargeBlobs [GOOD] >> TFlatTableExecutor_VersionedRows::TestVersionedRows >> Charge::ByKeysBasics [GOOD] >> Charge::ByKeysGroups [GOOD] >> Charge::ByKeysGroupsLimits [GOOD] >> Charge::ByKeysLimits [GOOD] >> Charge::ByKeysReverse [GOOD] >> Charge::ByKeysHistory [GOOD] >> Charge::ByKeysIndex [GOOD] >> Charge::ByRows [GOOD] >> Charge::ByRowsReverse [GOOD] >> Charge::ByRowsLimits [GOOD] >> Charge::ByRowsLimitsReverse >> Charge::ByRowsLimitsReverse [GOOD] >> DBase::Basics [GOOD] >> DBase::Defaults [GOOD] >> DBase::Garbage [GOOD] >> DBase::Affects [GOOD] >> DBase::Annex [GOOD] >> DBase::AnnexRollbackChanges [GOOD] >> DBase::Outer [GOOD] >> DBase::KIKIMR_15506_MissingSnapshotKeys [GOOD] >> DBase::EraseCacheWithUncommittedChanges [GOOD] >> DBase::EraseCacheWithUncommittedChangesCompacted [GOOD] >> DBase::AlterAndUpsertChangesVisibility [GOOD] >> DBase::DropModifiedTable [GOOD] >> DBase::KIKIMR_15598_Many_MemTables >> SystemView::TopPartitionsByTliFields [GOOD] >> TSharedPageCache::MiddleCache_BTreeIndex [GOOD] >> TSharedPageCache::MiddleCache_FlatIndex >> SystemView::TabletsShards >> TFlatTableExecutor_VersionedRows::TestVersionedRows [GOOD] >> TFlatTableExecutor_VersionedRows::TestVersionedRowsSmallBlobs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:02:30.092889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:02:30.093027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:02:30.093080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:02:30.093131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:02:30.093192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:02:30.093250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:02:30.093355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:02:30.093479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:02:30.093887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:02:30.178990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:02:30.179052Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:30.184413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:02:30.184543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:02:30.184772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:02:30.193551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:02:30.193885Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:02:30.194698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:30.194991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:02:30.198844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:02:30.200295Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:02:30.200363Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:02:30.200506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:02:30.200557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:02:30.200590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:02:30.200716Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:02:30.214293Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:02:30.403501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:02:30.403792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:30.404048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:02:30.404326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:02:30.404393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:30.410838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:30.411033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:02:30.411368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:30.411468Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:02:30.411511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:02:30.411551Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:02:30.414104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:30.414183Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:02:30.414249Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:02:30.417464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:30.417531Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:30.417601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:02:30.417677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:02:30.421787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:02:30.424436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:02:30.424738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:02:30.425849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:30.426005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:02:30.426066Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:02:30.426344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:02:30.426407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:02:30.426567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:02:30.426630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:02:30.429605Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:02:30.429693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:02:30.429959Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:02:30.430012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:02:30.430306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:30.430363Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:02:30.430482Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:02:30.430528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:02:30.430574Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:02:30.430623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:02:30.430672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:02:30.430725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:02:30.430762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:02:30.430834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:02:30.430942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:02:30.431013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:02:30.431056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:02:30.433546Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:02:30.433751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:02:30.433803Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ecute, operationId: 106:0, at schemeshard: 72057594046678944 2025-03-31T16:02:31.382500Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 106:0 ProgressState 2025-03-31T16:02:31.382630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-03-31T16:02:31.382673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-31T16:02:31.382717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-03-31T16:02:31.382752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-31T16:02:31.382815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2025-03-31T16:02:31.382886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-31T16:02:31.382932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-03-31T16:02:31.382970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2025-03-31T16:02:31.383210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-03-31T16:02:31.386061Z node 1 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186234409549 2025-03-31T16:02:31.386284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-03-31T16:02:31.386769Z node 1 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 7 TxId_Deprecated: 7 TabletID: 72075186234409551 2025-03-31T16:02:31.387929Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:02:31.391285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-03-31T16:02:31.391719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-31T16:02:31.392830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 7 ShardOwnerId: 72057594046678944 ShardLocalIdx: 7, at schemeshard: 72057594046678944 2025-03-31T16:02:31.393105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 Forgetting tablet 72075186234409549 2025-03-31T16:02:31.393890Z node 1 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186234409550 Forgetting tablet 72075186234409551 Forgetting tablet 72075186234409550 2025-03-31T16:02:31.396486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-03-31T16:02:31.396761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T16:02:31.397100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-03-31T16:02:31.398164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:02:31.398223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-31T16:02:31.398367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-31T16:02:31.399035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:02:31.399106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-31T16:02:31.399191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:02:31.402579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-03-31T16:02:31.402655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186234409549 2025-03-31T16:02:31.403346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2025-03-31T16:02:31.403393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186234409551 2025-03-31T16:02:31.403564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-03-31T16:02:31.403629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186234409550 2025-03-31T16:02:31.405443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-31T16:02:31.405573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-03-31T16:02:31.405962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-03-31T16:02:31.406011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-03-31T16:02:31.406576Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-03-31T16:02:31.406704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-03-31T16:02:31.406747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:928:2791] TestWaitNotification: OK eventTxId 106 2025-03-31T16:02:31.407486Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0/dir/table0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:02:31.407782Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0/dir/table0" took 287us result status StatusPathDoesNotExist 2025-03-31T16:02:31.408032Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0/dir/table0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ServerLess0/dir/table0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-31T16:02:31.408671Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:02:31.408882Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 212us result status StatusPathDoesNotExist 2025-03-31T16:02:31.409034Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ServerLess0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-31T16:02:31.409602Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:02:31.409787Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 203us result status StatusSuccess 2025-03-31T16:02:31.410176Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "SharedDB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 wait until 72075186234409549 is deleted wait until 72075186234409550 is deleted wait until 72075186234409551 is deleted wait until 72075186234409552 is deleted 2025-03-31T16:02:31.410857Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409549 2025-03-31T16:02:31.410943Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409550 2025-03-31T16:02:31.411019Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409551 2025-03-31T16:02:31.411220Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409552 Deleted tabletId 72075186234409549 Deleted tabletId 72075186234409550 Deleted tabletId 72075186234409551 Deleted tabletId 72075186234409552 >> TPartBtreeIndexIteration::OneNode_Groups [GOOD] >> TPartBtreeIndexIteration::OneNode_History >> TSharedPageCache::MiddleCache_FlatIndex [GOOD] >> TSharedPageCache::ZeroCache_BTreeIndex >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService+useSink >> TChargeBTreeIndex::OneNode_Groups [GOOD] >> TChargeBTreeIndex::OneNode_History >> TestKinesisHttpProxy::TestWrongStream [GOOD] >> TestYmqHttpProxy::TestDeleteMessage [GOOD] >> TSchemeShardTTLTestsWithReboots::MoveTable [GOOD] >> BuildStatsHistogram::Single [GOOD] >> BuildStatsHistogram::Single_Slices >> TestKinesisHttpProxy::ListShardsExclusiveStartShardId [GOOD] >> TestYmqHttpProxy::TestUntagQueue [GOOD] >> TestYmqHttpProxy::TestDeleteMessageBatch >> TestKinesisHttpProxy::TestWrongStream2 >> TChargeBTreeIndex::OneNode_History [GOOD] >> TChargeBTreeIndex::OneNode_Groups_History >> DataShardVolatile::DistributedWriteWithAsyncIndex [GOOD] >> DataShardVolatile::DistributedWriteThenLateWriteReadCommit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::MoveTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:135:2157] sender: [1:136:2058] recipient: [1:111:2142] 2025-03-31T16:01:09.600531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:01:09.600627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:09.600665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:01:09.600697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:01:09.600740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:01:09.600767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:01:09.600839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:09.600923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:01:09.601301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:01:09.683590Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-31T16:01:09.683640Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:169:2058] recipient: [1:15:2062] 2025-03-31T16:01:09.701345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:01:09.701498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:01:09.701683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:01:09.706202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:01:09.706503Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:01:09.707152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:09.707384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:09.715043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:09.716717Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:09.716786Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:09.716871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:01:09.716921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:09.716960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:09.717101Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:210:2058] recipient: [1:208:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:210:2058] recipient: [1:208:2209] Leader for TabletID 72057594037968897 is [1:214:2213] sender: [1:215:2058] recipient: [1:208:2209] 2025-03-31T16:01:09.724202Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:235:2058] recipient: [1:15:2062] 2025-03-31T16:01:09.848155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:09.848453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:09.848719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:01:09.849004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:01:09.849062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:09.852668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:09.852824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:01:09.853050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:09.853103Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:01:09.853142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:01:09.853175Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:01:09.855357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:09.855420Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:01:09.855459Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:01:09.857750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:09.857822Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:09.857867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:09.857927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:01:09.861792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:01:09.864193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:01:09.864399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:135:2157] sender: [1:250:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:01:09.865482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:09.865627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:09.865675Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:09.865992Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:01:09.866041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:09.866217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:09.866304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:01:09.868778Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:09.868846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:09.869049Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:09.869089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:202:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:01:09.869471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:09.869520Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:01:09.869622Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:09.869653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:09.869689Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025 ... LAT_TX_SCHEMESHARD INFO: TMoveTable TDeleteTableBarrier operationId: 1003:0 HandleReply TEvPrivate:TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 1003 Name: RenamePathBarrier }, at tablet# 72057594046678944 2025-03-31T16:02:32.827643Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T16:02:32.827831Z node 62 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 137 -> 129 2025-03-31T16:02:32.827956Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-31T16:02:32.828019Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-31T16:02:32.828975Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-03-31T16:02:32.831396Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-03-31T16:02:32.831681Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-03-31T16:02:32.831832Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-03-31T16:02:32.834387Z node 62 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:02:32.834456Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:02:32.834678Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-31T16:02:32.834865Z node 62 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:02:32.834917Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [62:205:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 1 2025-03-31T16:02:32.834977Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [62:205:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2025-03-31T16:02:32.835615Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-03-31T16:02:32.835688Z node 62 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 ProgressState at tablet: 72057594046678944 2025-03-31T16:02:32.835796Z node 62 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2025-03-31T16:02:32.835848Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-31T16:02:32.835899Z node 62 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 129 -> 240 2025-03-31T16:02:32.837146Z node 62 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1003 2025-03-31T16:02:32.837278Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1003 2025-03-31T16:02:32.837327Z node 62 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-03-31T16:02:32.837375Z node 62 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-03-31T16:02:32.837425Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-03-31T16:02:32.839032Z node 62 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-03-31T16:02:32.839172Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-03-31T16:02:32.839214Z node 62 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-03-31T16:02:32.839258Z node 62 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-03-31T16:02:32.839315Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T16:02:32.839408Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2025-03-31T16:02:32.846767Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-03-31T16:02:32.846847Z node 62 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:02:32.846902Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 1003:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 3], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-03-31T16:02:32.847017Z node 62 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2025-03-31T16:02:32.847055Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-03-31T16:02:32.847131Z node 62 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2025-03-31T16:02:32.847167Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-03-31T16:02:32.847209Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-03-31T16:02:32.847258Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-03-31T16:02:32.847315Z node 62 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2025-03-31T16:02:32.847355Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2025-03-31T16:02:32.847508Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-31T16:02:32.847550Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-31T16:02:32.848030Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:02:32.848077Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-31T16:02:32.848139Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-31T16:02:32.849083Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-03-31T16:02:32.850860Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-03-31T16:02:32.853740Z node 62 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-03-31T16:02:32.854102Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-03-31T16:02:32.854147Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-03-31T16:02:32.854522Z node 62 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-03-31T16:02:32.854622Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-03-31T16:02:32.854654Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [62:473:2446] TestWaitNotification: OK eventTxId 1003 2025-03-31T16:02:32.855131Z node 62 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTableMoved" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:02:32.855366Z node 62 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTableMoved" took 291us result status StatusSuccess 2025-03-31T16:02:32.855906Z node 62 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTableMoved" PathDescription { Self { Name: "TTLEnabledTableMoved" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TTLEnabledTableMoved" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TestYmqHttpProxy::TestTagQueueMultipleQueriesInflight >> TIterator::Mixed [GOOD] >> TIterator::MixedReverse >> TestKinesisHttpProxy::ListShardsTimestamp >> KqpLimits::BigParameter >> TSharedPageCache::ZeroCache_BTreeIndex [GOOD] >> TSharedPageCache::ZeroCache_FlatIndex >> IncrementalBackup::SimpleRestoreBackupCollection-WithIncremental [GOOD] >> KqpPg::TypeCoercionBulkUpsert [GOOD] >> KqpPg::TypeCoercionInsert+useSink >> TSharedPageCache::ZeroCache_FlatIndex [GOOD] >> TSwitchableCache::Touch [GOOD] >> TSwitchableCache::Erase [GOOD] >> TSwitchableCache::EvictNext [GOOD] >> TSwitchableCache::UpdateLimit [GOOD] >> TSwitchableCache::Switch_Touch_RotatePages_All [GOOD] >> TSwitchableCache::Switch_Touch_RotatePages_Parts [GOOD] >> TSwitchableCache::Switch_RotatePages_Force [GOOD] >> TSwitchableCache::Switch_RotatePages_Evicts [GOOD] >> TSwitchableCache::Switch_Touch [GOOD] >> TSwitchableCache::Switch_Erase [GOOD] >> TSwitchableCache::Switch_EvictNext [GOOD] >> TSwitchableCache::Switch_UpdateLimit [GOOD] >> TVersions::WreckHead >> IncrementalBackup::MultiRestore [GOOD] >> IncrementalBackup::E2EBackupCollection >> DBase::KIKIMR_15598_Many_MemTables [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS23+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 20365, MsgBus: 27895 2025-03-31T15:59:42.230468Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003961991483819:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:42.303293Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019bc/r3tmp/tmpCWpRTC/pdisk_1.dat 2025-03-31T15:59:43.092414Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:43.119642Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:43.119757Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:43.121710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20365, node 1 2025-03-31T15:59:43.339648Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:59:43.339672Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:59:43.339689Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:59:43.339788Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27895 TClient is connected to server localhost:27895 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:59:44.427381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:44.449083Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:59:46.841061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003979171353678:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:46.841229Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:46.841603Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003979171353690:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:46.845707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T15:59:46.872743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488003979171353692:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T15:59:46.943341Z node 1 :TX_PROXY ERROR: Actor# [1:7488003979171353743:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:47.230642Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003961991483819:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:47.230693Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:59:47.397915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T15:59:47.916352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488003983466321249:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T15:59:47.916570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488003983466321249:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T15:59:47.916801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488003983466321249:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T15:59:47.916917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488003983466321249:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T15:59:47.917017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488003983466321249:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T15:59:47.917157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488003983466321249:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T15:59:47.917265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488003983466321249:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T15:59:47.917356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488003983466321249:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T15:59:47.917456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488003983466321249:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T15:59:47.917553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488003983466321249:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T15:59:47.917653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488003983466321249:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T15:59:47.917772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488003983466321249:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T15:59:47.927593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488003983466321287:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T15:59:47.927664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488003983466321287:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T15:59:47.927882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488003983466321287:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T15:59:47.928011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488003983466321287:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T15:59:47.928126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488003983466321287:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T15:59:47.928307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488003983466321287:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T15:59:47.928396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488003983466321287:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T15:59:47.928502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488003983466321287:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T15:59:47.928596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488003983466321287:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T15:59:47.928699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488003983466321287:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T15:59:47.929385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488003983466321287:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T15:59:47.929506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488003983466321287:2357];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T15:59:48.007379Z node 1 :T ... ller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.035721Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.039813Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.041002Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.055628Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.059034Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.061502Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.064985Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.067113Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.070682Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.072378Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.076751Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.077692Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.082819Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.082819Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.088998Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.089340Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.095045Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.095054Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.101433Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.104708Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.106861Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.111000Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039245;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.112610Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.116733Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.117765Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.123022Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.123200Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.131401Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.134130Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.138733Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.141238Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.146023Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.149082Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.153200Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.157826Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.162015Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039306;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.164890Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.169283Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.172118Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.176266Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.178846Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.183528Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.186532Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.190241Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.194441Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:01:31.354962Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpey99harg4dwn8taak9e1h", SessionId: ydb://session/3?node_id=1&id=ZDMxZmEyNzktOTYzODAzOC1iMzZhYTJlNi03NmM1NzlmZA==, Slow query, duration: 40.873322s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:01:31.883790Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:01:31.884284Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:01:31.884688Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7488004382898338008:11632];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-03-31T16:01:31.885071Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpExplain::SelfJoin3xSameLabels ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::SimpleRestoreBackupCollection-WithIncremental [GOOD] Test command err: 2025-03-31T16:02:25.065864Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:02:25.066186Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:02:25.066404Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001849/r3tmp/tmpirapuI/pdisk_1.dat 2025-03-31T16:02:25.504891Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:596:2520], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:02:25.504988Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:02:25.505042Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-31T16:02:25.505227Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:593:2518], Recipient [1:409:2404]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2025-03-31T16:02:25.505265Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-31T16:02:25.712545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-03-31T16:02:25.712867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:02:25.713190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-31T16:02:25.713483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:02:25.713609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:02:25.713735Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-31T16:02:25.714638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-31T16:02:25.714788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-31T16:02:25.714929Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T16:02:25.714981Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-03-31T16:02:25.715206Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-31T16:02:25.715255Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-31T16:02:25.715345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:02:25.715460Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-31T16:02:25.715510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:02:25.715553Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:02:25.715697Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-31T16:02:25.716330Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T16:02:25.716381Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-03-31T16:02:25.716550Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-31T16:02:25.716606Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-31T16:02:25.716687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:02:25.716743Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-03-31T16:02:25.716785Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:02:25.716886Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-31T16:02:25.717404Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T16:02:25.717446Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-03-31T16:02:25.717576Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-31T16:02:25.717610Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-31T16:02:25.717674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:02:25.717711Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:02:25.717769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-03-31T16:02:25.717803Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-31T16:02:25.717870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:02:25.730397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:02:25.731355Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T16:02:25.731441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:02:25.731651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-03-31T16:02:25.733138Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:601:2525], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046316545 Status: OK ServerId: [1:603:2526] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-03-31T16:02:25.733202Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-03-31T16:02:25.733269Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046316545, status: OK, at schemeshard: 72057594046644480 2025-03-31T16:02:25.733444Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 500 TxId: 1 2025-03-31T16:02:25.733898Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:605:2528], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:02:25.733955Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:02:25.733992Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-31T16:02:25.734139Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [1:593:2518], Recipient [1:409:2404]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2025-03-31T16:02:25.734196Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-03-31T16:02:25.734300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-03-31T16:02:25.734343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-03-31T16:02:25.734386Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-03-31T16:02:25.784406Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 273285138, Sender [1:43:2090], Recipient [1:409:2404]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } } ItemKinds: 26 ItemKinds: 34 ItemKinds: 52 ItemKinds: 54 ItemKinds: 73 Local: true } 2025-03-31T16:02:25.784524Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-03-31T16:02:25.784610Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:25.785019Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-03-31T16:02:25.785093Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } 2025-03-31T16:02:25.824288Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:25.824474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:25.836592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:02:25.913133Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 17 StepId: 500 TxId: 1 2025-03-31T16:02:25.914001Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:636:2544], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:02:25.914056Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:02:25.914094Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-31T16:02:25.914259Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:409:2404]: {TEvPlanStep step# 500 MediatorId# 72057594046382081 TabletID 72057594046644480} 2025-03-31T16:02:25.914318Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:02:25.914418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T16:02:25.914579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemes ... eration: 1 2025-03-31T16:02:34.414232Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715662:0, shardIdx: 72057594046644480:2, datashard: 72075186224037889, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-03-31T16:02:34.414280Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715662:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2025-03-31T16:02:34.414316Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-31T16:02:34.414544Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:02:34.414568Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T16:02:34.414877Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:02:34.414914Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T16:02:34.426074Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:02:34.426179Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715662 state Ready TxInFly 0 2025-03-31T16:02:34.426264Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:02:34.426364Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:02:34.426700Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [2:1007:2796], Recipient [2:410:2405]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:02:34.426746Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:02:34.426781Z node 2 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-31T16:02:34.426981Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269551620, Sender [2:739:2610], Recipient [2:410:2405]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 739 RawX2: 8589937202 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-03-31T16:02:34.427018Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChanged 2025-03-31T16:02:34.427110Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 739 RawX2: 8589937202 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-03-31T16:02:34.427151Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715662, tablet: 72075186224037888, partId: 0 2025-03-31T16:02:34.427338Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715662:0, at schemeshard: 72057594046644480, message: Source { RawX1: 739 RawX2: 8589937202 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-03-31T16:02:34.427394Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715662:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-03-31T16:02:34.427472Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715662:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 739 RawX2: 8589937202 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-03-31T16:02:34.427531Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715662:0, shardIdx: 72057594046644480:1, datashard: 72075186224037888, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-03-31T16:02:34.427573Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:02:34.427627Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715662:0, datashard: 72075186224037889, at schemeshard: 72057594046644480 2025-03-31T16:02:34.427684Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715662:0, datashard: 72075186224037888, at schemeshard: 72057594046644480 2025-03-31T16:02:34.427719Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715662:0 129 -> 240 2025-03-31T16:02:34.427847Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-31T16:02:34.428420Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:02:34.428460Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T16:02:34.428497Z node 2 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715662:0 2025-03-31T16:02:34.428613Z node 2 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [2:954:2752] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715662 at schemeshard: 72057594046644480 2025-03-31T16:02:34.428680Z node 2 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [2:739:2610] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715662 at schemeshard: 72057594046644480 2025-03-31T16:02:34.428801Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715662 datashard 72075186224037888 state Ready 2025-03-31T16:02:34.428885Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:02:34.429084Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715662 datashard 72075186224037889 state Ready 2025-03-31T16:02:34.429130Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-03-31T16:02:34.429308Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [2:410:2405], Recipient [2:410:2405]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-31T16:02:34.429350Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-31T16:02:34.429434Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:02:34.429498Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 281474976715662:0ProgressState, operation type TxCopyTable 2025-03-31T16:02:34.429546Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-31T16:02:34.429596Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 281474976715662:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2025-03-31T16:02:34.429645Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715662, done: 0, blocked: 1 2025-03-31T16:02:34.429756Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 281474976715662:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 281474976715662 Name: CopyTableBarrier }, at tablet# 72057594046644480 2025-03-31T16:02:34.429802Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715662:0 240 -> 240 2025-03-31T16:02:34.430441Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T16:02:34.430480Z node 2 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715662:0 2025-03-31T16:02:34.430602Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [2:410:2405], Recipient [2:410:2405]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-31T16:02:34.430637Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-31T16:02:34.430692Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:02:34.430739Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715662:0 ProgressState 2025-03-31T16:02:34.430872Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-31T16:02:34.430920Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715662:0 progress is 1/1 2025-03-31T16:02:34.430966Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 1/1 2025-03-31T16:02:34.431016Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715662:0 progress is 1/1 2025-03-31T16:02:34.435188Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 1/1 2025-03-31T16:02:34.435350Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715662, ready parts: 1/1, is published: true 2025-03-31T16:02:34.435464Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:934:2736] message: TxId: 281474976715662 2025-03-31T16:02:34.435545Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 1/1 2025-03-31T16:02:34.435610Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715662:0 2025-03-31T16:02:34.435659Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715662:0 2025-03-31T16:02:34.435878Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 11] was 3 2025-03-31T16:02:34.435933Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 3 2025-03-31T16:02:34.436618Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T16:02:34.436730Z node 2 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [2:934:2736] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715662 at schemeshard: 72057594046644480 2025-03-31T16:02:34.437168Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [2:942:2743], Recipient [2:410:2405]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-31T16:02:34.437217Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-31T16:02:34.437249Z node 2 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-03-31T16:02:34.580178Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [2:1034:2813], serverId# [2:1035:2814], sessionId# [0:0:0] 2025-03-31T16:02:34.580380Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqpf1etb0bt6cxtbttqhd53x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmM3NWM2NDktMWEzMGM2ZjAtYmMzZWU3YmItZmYyMjc0ODQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 2 } items { uint32_value: 20 } }, { items { uint32_value: 3 } items { uint32_value: 30 } }, { items { uint32_value: 4 } items { uint32_value: 40 } }, { items { uint32_value: 5 } items { uint32_value: 50 } } >> TPartBtreeIndexIteration::OneNode_History [GOOD] >> TPartBtreeIndexIteration::OneNode_Slices ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::GroupsFields [GOOD] Test command err: 2025-03-31T16:00:06.956002Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004065548044013:2272];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:06.956052Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019fd/r3tmp/tmpg5o9Xf/pdisk_1.dat 2025-03-31T16:00:08.156115Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:08.186341Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:00:08.214065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:08.214155Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:08.239053Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19525, node 1 2025-03-31T16:00:08.713603Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:00:08.713630Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:00:08.713637Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:00:08.713759Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27698 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:00:09.196141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:09.254213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:09.291195Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7488004079585678871:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:09.291306Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:00:09.318234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:09.376527Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:09.376637Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:09.386880Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-03-31T16:00:09.415699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:09.415801Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:09.423943Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-03-31T16:00:09.558611Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:00:09.631539Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:00:10.421665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:10.925343Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488004082318496701:2209];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:10.925494Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:00:11.063975Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:11.064069Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:11.148422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:00:11.211989Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 waiting... 2025-03-31T16:00:11.335878Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:00:11.646145Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:11.646264Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:11.660295Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-03-31T16:00:11.671929Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:00:11.959970Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004065548044013:2272];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:11.960066Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:00:14.285690Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7488004079585678871:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:14.285755Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:00:15.915748Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488004082318496701:2209];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:15.915825Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:00:20.041579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:00:20.635698Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004125677587505:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:20.635920Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:20.636219Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004125677587516:2366], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:20.689123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710663:3, at schemeshard: 72057594046644480 2025-03-31T16:00:20.763296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004125677587519:2367], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710663 completed, doublechecking } 2025-03-31T16:00:20.907823Z node 1 :TX_PROXY ERROR: Actor# [1:7488004125677587588:3100] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:00:23.167189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:00:23.167238Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:24.107715Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jqpexc3yaa9m2y24906ck8w6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2EwMWZkNzEtMjFkYjBmYWQtYTY1NTEyYWUtNzk4NjQ4ODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:00:24.236765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:00:25.146756Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jqpexg0e29e6kgp2dv0rxpt8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2EwMWZkNzEtMjFkYjBmYWQtYTY1NTEyYWUtNzk4NjQ4ODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:00:25.381875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T16:00:26.330224Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01jqpexhj6fv52hwc5t2ck92e7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2EwMWZkNzEtMjFkYjBmYWQtYTY1NTEyYWUtNzk4NjQ4ODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:00:27.200180Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01jqpexhqy46mnt5621drx0423, Database: , DatabaseId: /Root, SessionId: ydb://session/3?nod ... olatileState: Disconnected -> Connecting 2025-03-31T16:01:38.284733Z node 26 :HIVE WARN: HIVE#72057594037968897 Node(26, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22537, node 26 2025-03-31T16:01:38.375790Z node 26 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:01:38.375824Z node 26 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:01:38.375835Z node 26 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:01:38.376018Z node 26 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18873 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:01:38.781879Z node 26 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:43.059238Z node 26 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[26:7488004463444112584:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:43.059349Z node 26 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:01:43.565298Z node 26 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [26:7488004484918949703:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:43.565702Z node 26 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:43.566478Z node 26 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [26:7488004484918949738:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:43.573653Z node 26 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:01:43.592233Z node 26 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [26:7488004484918949740:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:01:43.689078Z node 26 :TX_PROXY ERROR: Actor# [26:7488004484918949791:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:01:44.150683Z node 26 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqpezrgm0n4fqgjddq8xzw0j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=26&id=M2JlMjY5YjEtMjRiNTE0NzctNjRmODM1ZC02NzhmZTU3, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:01:44.156967Z node 26 :SYSTEM_VIEWS INFO: Scan started, actor: [26:7488004489213917129:2346], owner: [26:7488004489213917126:2344], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2025-03-31T16:01:44.175439Z node 26 :SYSTEM_VIEWS INFO: Scan prepared, actor: [26:7488004489213917129:2346], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-31T16:01:44.216048Z node 26 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [26:7488004489213917129:2346], row count: 1, finished: 1 2025-03-31T16:01:44.216131Z node 26 :SYSTEM_VIEWS INFO: Scan finished, actor: [26:7488004489213917129:2346], owner: [26:7488004489213917126:2344], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2025-03-31T16:01:44.224152Z node 26 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743436904147, txId: 281474976710660] shutting down 2025-03-31T16:01:46.843135Z node 27 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[27:7488004497126484521:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:46.843201Z node 27 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019fd/r3tmp/tmpMABrDB/pdisk_1.dat 2025-03-31T16:01:47.147117Z node 27 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:47.174937Z node 27 :HIVE WARN: HIVE#72057594037968897 Node(27, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:01:47.175061Z node 27 :HIVE WARN: HIVE#72057594037968897 Node(27, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:01:47.184173Z node 27 :HIVE WARN: HIVE#72057594037968897 Node(27, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25227, node 27 2025-03-31T16:01:47.441593Z node 27 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:01:47.441618Z node 27 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:01:47.441630Z node 27 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:01:47.441819Z node 27 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30222 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:01:47.924684Z node 27 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:51.847263Z node 27 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[27:7488004497126484521:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:51.847354Z node 27 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:01:55.590496Z node 27 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [27:7488004535781191085:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:55.590673Z node 27 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:55.594729Z node 27 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [27:7488004535781191113:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:55.601211Z node 27 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:01:55.652581Z node 27 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [27:7488004535781191115:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:01:55.733279Z node 27 :TX_PROXY ERROR: Actor# [27:7488004535781191182:2604] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:01:56.303583Z node 27 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqpf01et6gx5zcq8gj0m8tse, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=27&id=NDAyNTQ4MWYtMTM1MTlhYi00ZGM3ODNhNy03Zjg2MzliYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:01:56.310963Z node 27 :SYSTEM_VIEWS INFO: Scan started, actor: [27:7488004540076158521:2356], owner: [27:7488004540076158518:2354], scan id: 0, table id: [72057594046644480:1:0:ds_groups] 2025-03-31T16:01:56.326232Z node 27 :SYSTEM_VIEWS INFO: Scan prepared, actor: [27:7488004540076158521:2356], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-31T16:01:56.342204Z node 27 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [27:7488004540076158521:2356], row count: 1, finished: 1 2025-03-31T16:01:56.342290Z node 27 :SYSTEM_VIEWS INFO: Scan finished, actor: [27:7488004540076158521:2356], owner: [27:7488004540076158518:2354], scan id: 0, table id: [72057594046644480:1:0:ds_groups] 2025-03-31T16:01:56.362832Z node 27 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743436916301, txId: 281474976710660] shutting down >> IncrementalBackup::SimpleRestore [GOOD] >> IncrementalBackup::SimpleBackupBackupCollection+WithIncremental >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-72 [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedAfterReadingTheData_Compressed [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedBeforeDataIsDecompressed_Uncompressed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> DBase::KIKIMR_15598_Many_MemTables [GOOD] Test command err: 3 parts: [0:0:1:0:0:0:0] 150 rows, 7 pages, 1 levels: (286, 103) (607, 210) (811, 278) (1315, 446) (1540, 521) [0:0:2:0:0:0:0] 197 rows, 9 pages, 2 levels: (253, 92) (577, 200) (742, 255) (1156, 393) (1594, 539) [0:0:3:0:0:0:0] 153 rows, 7 pages, 1 levels: (199, 74) (514, 179) (769, 264) (1291, 438) (1555, 526) Checking BTree: Touched 100% bytes, 5 pages RowCountHistogram: 2% (actual 0%) key = (10, 11) value = 12 (actual 1 - 2% error) 2% (actual 0%) key = (16, 13) value = 24 (actual 2 - 4% error) 5% (actual 11%) key = (199, 74) value = 49 (actual 61 - -2% error) 4% (actual 2%) key = (253, 92) value = 73 (actual 74 - 0% error) 4% (actual 2%) key = (286, 103) value = 97 (actual 84 - 2% error) 4% (actual 8%) key = (418, 147) value = 120 (actual 125 - -1% error) 4% (actual 5%) key = (514, 179) value = 144 (actual 154 - -2% error) 5% (actual 4%) key = (577, 200) value = 169 (actual 174 - -1% error) 4% (actual 1%) key = (607, 210) value = 192 (actual 183 - 1% error) 4% (actual 8%) key = (742, 255) value = 214 (actual 226 - -2% error) 5% (actual 1%) key = (769, 264) value = 239 (actual 235 - 0% error) 4% (actual 2%) key = (811, 278) value = 262 (actual 248 - 2% error) 4% (actual 9%) key = (958, 327) value = 286 (actual 293 - -1% error) 5% (actual 5%) key = (1054, 359) value = 311 (actual 322 - -2% error) 4% (actual 2%) key = (1087, 370) value = 334 (actual 332 - 0% error) 4% (actual 4%) key = (1156, 393) value = 358 (actual 354 - 0% error) 4% (actual 8%) key = (1291, 438) value = 381 (actual 394 - -2% error) 4% (actual 1%) key = (1315, 446) value = 404 (actual 401 - 0% error) 4% (actual 3%) key = (1375, 466) value = 426 (actual 419 - 1% error) 4% (actual 10%) key = (1540, 521) value = 449 (actual 469 - -4% error) 3% (actual 1%) key = (1555, 526) value = 465 (actual 474 - -1% error) 3% (actual 2%) key = (1594, 539) value = 482 (actual 484 - 0% error) 1% (actual 2%) key = (1636, 553) value = 491 (actual 497 - -1% error) 1% (actual 0%) key = (1639, 554) value = 496 (actual 498 - 0% error) 0% (actual 0%) DataSizeHistogram: 2% (actual 13%) key = (10, 11) value = 950 (actual 5800 - -11% error) 2% (actual 0%) key = (16, 13) value = 1933 (actual 5800 - -9% error) 4% (actual 0%) key = (199, 74) value = 3866 (actual 5800 - -4% error) 4% (actual 9%) key = (253, 92) value = 5849 (actual 9821 - -9% error) 4% (actual 4%) key = (286, 103) value = 7810 (actual 11827 - -9% error) 4% (actual 4%) key = (418, 147) value = 9825 (actual 13848 - -9% error) 4% (actual 4%) key = (514, 179) value = 11834 (actual 15888 - -9% error) 4% (actual 4%) key = (577, 200) value = 13865 (actual 17883 - -9% error) 4% (actual 4%) key = (607, 210) value = 15865 (actual 19876 - -9% error) 4% (actual 4%) key = (742, 255) value = 17859 (actual 21881 - -9% error) 4% (actual 4%) key = (769, 264) value = 19882 (actual 23918 - -9% error) 4% (actual 0%) key = (811, 278) value = 21897 (actual 23918 - -4% error) 4% (actual 9%) key = (958, 327) value = 23894 (actual 27913 - -9% error) 4% (actual 4%) key = (1054, 359) value = 25915 (actual 29895 - -9% error) 4% (actual 0%) key = (1087, 370) value = 27901 (actual 29895 - -4% error) 4% (actual 4%) key = (1156, 393) value = 29881 (actual 31850 - -4% error) 4% (actual 4%) key = (1291, 438) value = 31821 (actual 33747 - -4% error) 4% (actual 4%) key = (1315, 446) value = 33794 (actual 35739 - -4% error) 4% (actual 9%) key = (1375, 466) value = 35737 (actual 39763 - -9% error) 4% (actual 4%) key = (1540, 521) value = 37749 (actual 41447 - -8% error) 3% (actual 0%) key = (1555, 526) value = 39198 (actual 41447 - -5% error) 3% (actual 1%) key = (1594, 539) value = 40605 (actual 42020 - -3% error) 1% (actual 0%) key = (1636, 553) value = 41344 (actual 42020 - -1% error) 0% (actual 0%) key = (1639, 554) value = 41733 (actual 42020 - 0% error) 0% (actual 0%) Checking Flat: Touched 100% bytes, 3 pages RowCountHistogram: 4% (actual 0%) key = (10, 11) value = 24 (actual 1 - 4% error) 5% (actual 0%) key = (16, 13) value = 49 (actual 2 - 9% error) 5% (actual 11%) key = (199, 74) value = 74 (actual 61 - 2% error) 4% (actual 2%) key = (253, 92) value = 97 (actual 74 - 4% error) 4% (actual 2%) key = (286, 103) value = 120 (actual 84 - 7% error) 4% (actual 8%) key = (418, 147) value = 144 (actual 125 - 3% error) 5% (actual 5%) key = (514, 179) value = 169 (actual 154 - 3% error) 5% (actual 4%) key = (577, 200) value = 194 (actual 174 - 4% error) 4% (actual 1%) key = (607, 210) value = 216 (actual 183 - 6% error) 4% (actual 8%) key = (742, 255) value = 237 (actual 226 - 2% error) 5% (actual 1%) key = (769, 264) value = 262 (actual 235 - 5% error) 5% (actual 2%) key = (811, 278) value = 287 (actual 248 - 7% error) 4% (actual 9%) key = (958, 327) value = 311 (actual 293 - 3% error) 4% (actual 5%) key = (1054, 359) value = 335 (actual 322 - 2% error) 4% (actual 2%) key = (1087, 370) value = 358 (actual 332 - 5% error) 4% (actual 4%) key = (1156, 393) value = 382 (actual 354 - 5% error) 4% (actual 8%) key = (1291, 438) value = 404 (actual 394 - 2% error) 4% (actual 1%) key = (1315, 446) value = 426 (actual 401 - 5% error) 4% (actual 3%) key = (1375, 466) value = 448 (actual 419 - 5% error) 4% (actual 10%) key = (1540, 521) value = 472 (actual 469 - 0% error) 2% (actual 1%) key = (1555, 526) value = 483 (actual 474 - 1% error) 2% (actual 2%) key = (1594, 539) value = 493 (actual 484 - 1% error) 1% (actual 3%) DataSizeHistogram: 4% (actual 13%) key = (10, 11) value = 1900 (actual 5800 - -9% error) 4% (actual 0%) key = (16, 13) value = 3867 (actual 5800 - -4% error) 4% (actual 0%) key = (199, 74) value = 5800 (actual 5800 - 0% error) 4% (actual 9%) key = (253, 92) value = 7798 (actual 9821 - -4% error) 4% (actual 4%) key = (286, 103) value = 9821 (actual 11827 - -4% error) 4% (actual 4%) key = (418, 147) value = 11827 (actual 13848 - -4% error) 4% (actual 4%) key = (514, 179) value = 13848 (actual 15888 - -4% error) 4% (actual 4%) key = (577, 200) value = 15888 (actual 17883 - -4% error) 4% (actual 4%) key = (607, 210) value = 17883 (actual 19876 - -4% error) 4% (actual 4%) key = (742, 255) value = 19876 (actual 21881 - -4% error) 4% (actual 4%) key = (769, 264) value = 21881 (actual 23918 - -4% error) 4% (actual 0%) key = (811, 278) value = 23918 (actual 23918 - 0% error) 4% (actual 9%) key = (958, 327) value = 25907 (actual 27913 - -4% error) 4% (actual 4%) key = (1054, 359) value = 27913 (actual 29895 - -4% error) 4% (actual 0%) key = (1087, 370) value = 29895 (actual 29895 - 0% error) 4% (actual 4%) key = (1156, 393) value = 31850 (actual 31850 - 0% error) 4% (actual 4%) key = (1291, 438) value = 33747 (actual 33747 - 0% error) 4% (actual 4%) key = (1315, 446) value = 35739 (actual 35739 - 0% error) 4% (actual 9%) key = (1375, 466) value = 37727 (actual 39763 - -4% error) 4% (actual 4%) key = (1540, 521) value = 39763 (actual 41447 - -4% error) 2% (actual 0%) key = (1555, 526) value = 40669 (actual 41447 - -1% error) 1% (actual 1%) key = (1594, 539) value = 41447 (actual 42020 - -1% error) 1% (actual 0%) Checking Mixed: Touched 100% bytes, 5 pages RowCountHistogram: 14% (actual 12%) key = (199, 74) value = 74 (actual 61 - 2% error) 4% (actual 2%) key = (253, 92) value = 97 (actual 74 - 4% error) 4% (actual 2%) key = (286, 103) value = 120 (actual 84 - 7% error) 4% (actual 8%) key = (418, 147) value = 144 (actual 125 - 3% error) 5% (actual 5%) key = (514, 179) value = 169 (actual 154 - 3% error) 5% (actual 4%) key = (577, 200) value = 194 (actual 174 - 4% error) 4% (actual 1%) key = (607, 210) value = 216 (actual 183 - 6% error) 4% (actual 8%) key = (742, 255) value = 237 (actual 226 - 2% error) 5% (actual 1%) key = (769, 264) value = 262 (actual 235 - 5% error) 5% (actual 2%) key = (811, 278) value = 287 (actual 248 - 7% error) 4% (actual 9%) key = (958, 327) value = 311 (actual 293 - 3% error) 4% (actual 5%) key = (1054, 359) value = 335 (actual 322 - 2% error) 4% (actual 2%) key = (1087, 370) value = 358 (actual 332 - 5% error) 4% (actual 4%) key = (1156, 393) value = 382 (actual 354 - 5% error) 4% (actual 8%) key = (1291, 438) value = 404 (actual 394 - 2% error) 4% (actual 1%) key = (1315, 446) value = 426 (actual 401 - 5% error) 4% (actual 3%) key = (1375, 466) value = 448 (actual 419 - 5% error) 4% (actual 10%) key = (1540, 521) value = 472 (actual 469 - 0% error) 2% (actual 1%) key = (1555, 526) value = 483 (actual 474 - 1% error) 2% (actual 2%) key = (1594, 539) value = 493 (actual 484 - 1% error) 1% (actual 3%) DataSizeHistogram: 13% (actual 13%) key = (199, 74) value = 5800 (actual 5800 - 0% error) 4% (actual 9%) key = (253, 92) value = 7798 (actual 9821 - -4% error) 4% (actual 4%) key = (286, 103) value = 9821 (actual 11827 - -4% error) 4% (actual 4%) key = (418, 147) value = 11827 (actual 13848 - -4% error) 4% (actual 4%) key = (514, 179) value = 13848 (actual 15888 - -4% error) 4% (actual 4%) key = (577, 200) value = 15888 (actual 17883 - -4% error) 4% (actual 4%) key = (607, 210) value = 17883 (actual 19876 - -4% error) 4% (actual 4%) key = (742, 255) value = 19876 (actual 21881 - -4% error) 4% (actual 4%) key = (769, 264) value = 21881 (actual 23918 - -4% error) 4% (actual 0%) key = (811, 278) value = 23918 (actual 23918 - 0% error) 4% (actual 9%) key = (958, 327) value = 25907 (actual 27913 - -4% error) 4% (actual 4%) key = (1054, 359) value = 27913 (actual 29895 - -4% error) 4% (actual 0%) key = (1087, 370) value = 29895 (actual 29895 - 0% error) 4% (actual 4%) key = (1156, 393) value = 31850 (actual 31850 - 0% error) 4% (actual 4%) key = (1291, 438) value = 33747 (actual 33747 - 0% error) 4% (actual 4%) key = (1315, 446) value = 35739 (actual 35739 - 0% error) 4% (actual 9%) key = (1375, 466) value = 37727 (actual 39763 - -4% error) 4% (actual 4%) key = (1540, 521) value = 39763 (actual 41447 - -4% error) 2% (actual 0%) key = (1555, 526) value = 40669 (actual 41447 - -1% error) 1% (actual 1%) key = (1594, 539) value = 41447 (actual 42020 - -1% error) 1% (actual 0%) 3 parts: [0:0:1:0:0:0:0] 150 rows, 7 pages, 1 levels: (286, 103) (607, 210) (811, 278) (1315, 446) (1540, 521) [0:0:2:0:0:0:0] 197 rows, 9 pages, 2 levels: (253, 92) (577, 200) (742, 255) (1156, 393) (1594, 539) [0:0:3:0:0:0:0] 153 rows, 7 pages, 1 levels: (199, 74) (514, 179) (769, 264) (1291, 438) (1555, 526) Checking BTree: Touched 100% bytes, 5 pages RowCountHistogram: 19% (actual 16%) key = (286, 103) value = 97 (actual 84 - 2% error) 19% (actual 19%) key = (607, 210) value = 192 (actual 183 - 1% error) 18% (actual 22%) key = (958, 327) value = 286 (actual 293 - -1% error) 19% (actual 20%) key = (1291, 438) value = 381 (actual 394 - -2% error) 23% (actual 21%) DataSizeHistogram: 18% (actual 28%) key = (286, 103) value = 7810 (actual 11827 - -9% error) 19% (actual 19%) key = (607, 210) value = 15865 (actual 19876 - -9% error) 19% (actual 19%) key = (958, 327) value ... 85 - 0% error) 4% (actual 4%) key = (631, 218) value = 15674 (actual 15674 - 0% error) 4% (actual 4%) key = (709, 244) value = 17709 (actual 17709 - 0% error) 4% (actual 4%) key = (766, 263) value = 19664 (actual 19664 - 0% error) 4% (actual 4%) key = (853, 292) value = 21673 (actual 21673 - 0% error) 4% (actual 4%) key = (934, 319) value = 23712 (actual 23712 - 0% error) 4% (actual 4%) key = (1006, 343) value = 25687 (actual 25687 - 0% error) 4% (actual 4%) key = (1087, 370) value = 27765 (actual 27678 - 0% error) 4% (actual 4%) key = (1156, 393) value = 29741 (actual 29741 - 0% error) 4% (actual 4%) key = (1246, 423) value = 31726 (actual 31726 - 0% error) 4% (actual 4%) key = (1324, 449) value = 33698 (actual 33698 - 0% error) 4% (actual 4%) key = (1396, 473) value = 35700 (actual 35700 - 0% error) 4% (actual 4%) key = (1471, 498) value = 37620 (actual 37620 - 0% error) 4% (actual 4%) key = (1543, 522) value = 39641 (actual 39641 - 0% error) 4% (actual 4%) key = (1633, 552) value = 41669 (actual 41669 - 0% error) 0% (actual 0%) Checking Flat: Touched 100% bytes, 3 pages RowCountHistogram: 5% (actual 5%) key = (91, 38) value = 25 (actual 25 - 0% error) 5% (actual 5%) key = (166, 63) value = 50 (actual 50 - 0% error) 4% (actual 4%) key = (253, 92) value = 74 (actual 74 - 0% error) 4% (actual 4%) key = (325, 116) value = 96 (actual 96 - 0% error) 4% (actual 4%) key = (394, 139) value = 119 (actual 119 - 0% error) 5% (actual 5%) key = (481, 168) value = 144 (actual 144 - 0% error) 4% (actual 4%) key = (556, 193) value = 167 (actual 167 - 0% error) 4% (actual 4%) key = (631, 218) value = 191 (actual 191 - 0% error) 4% (actual 4%) key = (709, 244) value = 215 (actual 215 - 0% error) 3% (actual 3%) key = (766, 263) value = 234 (actual 234 - 0% error) 5% (actual 5%) key = (853, 292) value = 261 (actual 261 - 0% error) 4% (actual 4%) key = (934, 319) value = 285 (actual 285 - 0% error) 4% (actual 4%) key = (1006, 343) value = 309 (actual 309 - 0% error) 4% (actual 4%) key = (1087, 370) value = 332 (actual 332 - 0% error) 0% (actual 0%) key = (1090, 371) value = 333 (actual 333 - 0% error) 4% (actual 4%) key = (1156, 393) value = 354 (actual 354 - 0% error) 5% (actual 5%) key = (1246, 423) value = 380 (actual 380 - 0% error) 4% (actual 4%) key = (1324, 449) value = 404 (actual 404 - 0% error) 4% (actual 4%) key = (1396, 473) value = 426 (actual 426 - 0% error) 4% (actual 4%) key = (1471, 498) value = 448 (actual 448 - 0% error) 4% (actual 4%) key = (1543, 522) value = 470 (actual 470 - 0% error) 5% (actual 5%) key = (1633, 552) value = 496 (actual 496 - 0% error) 0% (actual 0%) DataSizeHistogram: 4% (actual 4%) key = (91, 38) value = 1974 (actual 1974 - 0% error) 4% (actual 4%) key = (166, 63) value = 3992 (actual 3992 - 0% error) 4% (actual 4%) key = (253, 92) value = 5889 (actual 5889 - 0% error) 4% (actual 4%) key = (325, 116) value = 7868 (actual 7868 - 0% error) 4% (actual 4%) key = (394, 139) value = 9910 (actual 9910 - 0% error) 4% (actual 4%) key = (481, 168) value = 11938 (actual 11938 - 0% error) 4% (actual 4%) key = (556, 193) value = 13685 (actual 13685 - 0% error) 4% (actual 4%) key = (631, 218) value = 15674 (actual 15674 - 0% error) 4% (actual 4%) key = (709, 244) value = 17709 (actual 17709 - 0% error) 4% (actual 4%) key = (766, 263) value = 19664 (actual 19664 - 0% error) 4% (actual 4%) key = (853, 292) value = 21673 (actual 21673 - 0% error) 4% (actual 4%) key = (934, 319) value = 23712 (actual 23712 - 0% error) 4% (actual 4%) key = (1006, 343) value = 25687 (actual 25687 - 0% error) 4% (actual 4%) key = (1087, 370) value = 27678 (actual 27678 - 0% error) 0% (actual 0%) key = (1090, 371) value = 27765 (actual 27765 - 0% error) 4% (actual 4%) key = (1156, 393) value = 29741 (actual 29741 - 0% error) 4% (actual 4%) key = (1246, 423) value = 31726 (actual 31726 - 0% error) 4% (actual 4%) key = (1324, 449) value = 33698 (actual 33698 - 0% error) 4% (actual 4%) key = (1396, 473) value = 35700 (actual 35700 - 0% error) 4% (actual 4%) key = (1471, 498) value = 37620 (actual 37620 - 0% error) 4% (actual 4%) key = (1543, 522) value = 39641 (actual 39641 - 0% error) 4% (actual 4%) key = (1633, 552) value = 41669 (actual 41669 - 0% error) 0% (actual 0%) Checking Mixed: Touched 100% bytes, 3 pages RowCountHistogram: 14% (actual 5%) key = (91, 38) value = 70 (actual 25 - 9% error) 5% (actual 5%) key = (166, 63) value = 95 (actual 50 - 9% error) 4% (actual 4%) key = (253, 92) value = 119 (actual 74 - 9% error) 4% (actual 4%) key = (325, 116) value = 141 (actual 96 - 9% error) 4% (actual 4%) key = (394, 139) value = 164 (actual 119 - 9% error) 5% (actual 5%) key = (481, 168) value = 189 (actual 144 - 9% error) 4% (actual 9%) key = (631, 218) value = 212 (actual 191 - 4% error) 4% (actual 4%) key = (709, 244) value = 236 (actual 215 - 4% error) 3% (actual 3%) key = (766, 263) value = 255 (actual 234 - 4% error) 5% (actual 5%) key = (853, 292) value = 282 (actual 261 - 4% error) 4% (actual 4%) key = (934, 319) value = 306 (actual 285 - 4% error) 4% (actual 4%) key = (1006, 343) value = 330 (actual 309 - 4% error) 4% (actual 4%) key = (1087, 370) value = 353 (actual 332 - 4% error) 0% (actual 4%) key = (1156, 393) value = 354 (actual 354 - 0% error) 5% (actual 5%) key = (1246, 423) value = 380 (actual 380 - 0% error) 4% (actual 4%) key = (1324, 449) value = 404 (actual 404 - 0% error) 4% (actual 4%) key = (1396, 473) value = 426 (actual 426 - 0% error) 4% (actual 4%) key = (1471, 498) value = 448 (actual 448 - 0% error) 4% (actual 4%) key = (1543, 522) value = 470 (actual 470 - 0% error) 5% (actual 5%) key = (1633, 552) value = 496 (actual 496 - 0% error) 0% (actual 0%) DataSizeHistogram: 14% (actual 4%) key = (91, 38) value = 5939 (actual 1974 - 9% error) 4% (actual 4%) key = (166, 63) value = 7957 (actual 3992 - 9% error) 4% (actual 4%) key = (253, 92) value = 9854 (actual 5889 - 9% error) 4% (actual 4%) key = (325, 116) value = 11833 (actual 7868 - 9% error) 4% (actual 4%) key = (394, 139) value = 13875 (actual 9910 - 9% error) 4% (actual 4%) key = (481, 168) value = 15903 (actual 11938 - 9% error) 4% (actual 8%) key = (631, 218) value = 17650 (actual 15674 - 4% error) 4% (actual 4%) key = (709, 244) value = 19685 (actual 17709 - 4% error) 4% (actual 4%) key = (766, 263) value = 21640 (actual 19664 - 4% error) 4% (actual 4%) key = (853, 292) value = 23649 (actual 21673 - 4% error) 4% (actual 4%) key = (934, 319) value = 25688 (actual 23712 - 4% error) 4% (actual 4%) key = (1006, 343) value = 27663 (actual 25687 - 4% error) 4% (actual 4%) key = (1087, 370) value = 29654 (actual 27678 - 4% error) 0% (actual 4%) key = (1156, 393) value = 29741 (actual 29741 - 0% error) 4% (actual 4%) key = (1246, 423) value = 31726 (actual 31726 - 0% error) 4% (actual 4%) key = (1324, 449) value = 33698 (actual 33698 - 0% error) 4% (actual 4%) key = (1396, 473) value = 35700 (actual 35700 - 0% error) 4% (actual 4%) key = (1471, 498) value = 37620 (actual 37620 - 0% error) 4% (actual 4%) key = (1543, 522) value = 39641 (actual 39641 - 0% error) 4% (actual 4%) key = (1633, 552) value = 41669 (actual 41669 - 0% error) 0% (actual 0%) 3 parts: [0:0:1:0:0:0:0] 167 rows, 1 pages, 0 levels: () () () () () [0:0:2:0:0:0:0] 166 rows, 1 pages, 0 levels: () () () () () [0:0:3:0:0:0:0] 167 rows, 1 pages, 0 levels: () () () () () Checking BTree: Touched 0% bytes, 0 pages RowCountHistogram: 33% (actual 33%) key = (553, 192) value = 167 (actual 166 - 0% error) 33% (actual 33%) key = (1087, 370) value = 333 (actual 332 - 0% error) 33% (actual 33%) DataSizeHistogram: 32% (actual 32%) key = (553, 192) value = 13565 (actual 13565 - 0% error) 33% (actual 33%) key = (1087, 370) value = 27505 (actual 27505 - 0% error) 33% (actual 33%) Checking Flat: Touched 100% bytes, 3 pages RowCountHistogram: 33% (actual 33%) key = (556, 193) value = 167 (actual 167 - 0% error) 33% (actual 33%) key = (1090, 371) value = 333 (actual 333 - 0% error) 33% (actual 33%) DataSizeHistogram: 32% (actual 32%) key = (556, 193) value = 13565 (actual 13565 - 0% error) 33% (actual 33%) key = (1090, 371) value = 27505 (actual 27505 - 0% error) 33% (actual 33%) Checking Mixed: Touched 0% bytes, 0 pages RowCountHistogram: 100% (actual 100%) DataSizeHistogram: 100% (actual 100%) Got : 24000 2106439 49449 38 44 Expected: 24000 2106439 49449 38 44 { [2455, 2599), [2798, 3624), [4540, 4713), [5654, 7161), [8509, 8794), [8936, 9973), [11888, 14280), [14337, 14882), [15507, 16365), [17368, 19451), [19536, 20135), [20790, 21503), [21589, 23243) } Got : 12816 1121048 49449 20 23 Expected: 12816 1121048 49449 20 23 Got : 24000 3547100 81694 64 44 Expected: 24000 3547100 81694 64 44 { [1012, 1475), [1682, 1985), [2727, 3553), [3599, 3992), [5397, 7244), [9181, 9807), [9993, 10178), [12209, 14029), [15089, 15342), [16198, 16984), [17238, 18436), [21087, 21876), [23701, 23794) } Got : 9582 1425198 81694 26 17 Expected: 9582 1425198 81694 26 17 Got : 24000 2460139 23760 42 41 Expected: 24000 2460139 23760 42 41 { [1296, 2520), [3888, 4320), [5040, 6840), [6912, 7272), [10872, 11160), [11520, 12096), [12096, 13824), [15192, 15624), [17064, 17856), [18216, 19296), [19800, 20160), [20736, 21096), [21096, 22104) } Got : 10440 1060798 23760 18 18 Expected: 10440 1060798 23760 18 18 Got : 24000 4054050 46562 68 43 Expected: 24000 4054050 46562 68 43 { [460, 1518), [2300, 2484), [2760, 4002), [4600, 5842), [6302, 9752), [11178, 12328), [14582, 14858), [16790, 18032), [18216, 18446), [18722, 19504), [19504, 19964), [20378, 20470), [21344, 23506) } Got : 13570 2277890 46562 38 24 Expected: 13570 2277890 46562 38 24 Got : 24000 2106459 49449 38 44 Expected: 24000 2106459 49449 38 44 Got : 24000 2460219 23555 41 41 Expected: 24000 2460219 23555 41 41 Got : 24000 4054270 46543 66 43 Expected: 24000 4054270 46543 66 43 Got : 24000 2106479 49555 38 44 Expected: 24000 2106479 49555 38 44 Got : 24000 2460259 23628 41 41 Expected: 24000 2460259 23628 41 41 Got : 24000 4054290 46640 65 43 Expected: 24000 4054290 46640 65 43 Got : 24000 2106439 66674 3 4 Expected: 24000 2106439 66674 3 4 { [2455, 2599), [2798, 3624), [4540, 4713), [5654, 7161), [8509, 8794), [8936, 9973), [11888, 14280), [14337, 14882), [15507, 16365), [17368, 19451), [19536, 20135), [20790, 21503), [21589, 23243) } Got : 12816 1121048 66674 2 2 Expected: 12816 1121048 66674 2 2 Got : 24000 2460139 33541 4 4 Expected: 24000 2460139 33541 4 4 { [1296, 2520), [3888, 4320), [5040, 6840), [6912, 7272), [10872, 11160), [11520, 12096), [12096, 13824), [15192, 15624), [17064, 17856), [18216, 19296), [19800, 20160), [20736, 21096), [21096, 22104) } Got : 10440 1060798 33541 1 1 Expected: 10440 1060798 33541 1 1 Got : 24000 4054050 64742 7 4 Expected: 24000 4054050 64742 7 4 { [460, 1518), [2300, 2484), [2760, 4002), [4600, 5842), [6302, 9752), [11178, 12328), [14582, 14858), [16790, 18032), [18216, 18446), [18722, 19504), [19504, 19964), [20378, 20470), [21344, 23506) } Got : 13570 2234982 64742 4 2 Expected: 13570 2234982 64742 4 2 >> SystemView::AuthPermissions_Selects [GOOD] >> TPartBtreeIndexIteration::OneNode_Slices [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups_Slices >> KqpExplain::SortStage >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService+useSink [GOOD] >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService-useSink >> TChargeBTreeIndex::FewNodes_Groups_History [GOOD] >> TChargeBTreeIndex::FewNodes_Groups_History_Sticky >> TChargeBTreeIndex::OneNode_Groups_History [GOOD] >> TClockProCache::Touch [GOOD] >> TClockProCache::Lifecycle [GOOD] >> TClockProCache::EvictNext [GOOD] >> TClockProCache::UpdateLimit [GOOD] >> TClockProCache::Erase [GOOD] >> TClockProCache::Random [GOOD] >> TCompaction::OneMemtable [GOOD] >> TCompaction::ManyParts >> KqpQuery::SelectWhereInSubquery >> TestKinesisHttpProxy::GoodRequestGetRecordsLongStreamName [GOOD] >> KqpQuery::UdfTerminate [GOOD] >> KqpQuery::UdfMemoryLimit >> KqpTypes::UnsafeTimestampCastV0 >> TestKinesisHttpProxy::TestCounters [GOOD] >> BuildStatsHistogram::Single_Slices [GOOD] >> BuildStatsHistogram::Single_History >> TCompaction::ManyParts [GOOD] >> TCompaction::BootAbort >> TestYmqHttpProxy::TestPurgeQueue [GOOD] >> IncrementalBackup::BackupRestore [GOOD] >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental >> TComputeScheduler::TTotalLimits [GOOD] >> TCompaction::BootAbort [GOOD] >> TCompaction::Defaults [GOOD] >> TCompaction::Merges [GOOD] >> TCompactionMulti::ManyParts >> TestKinesisHttpProxy::ErroneousRequestGetRecords >> TestKinesisHttpProxy::TestEmptyHttpBody >> TestYmqHttpProxy::TestSendMessageBatch |72.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |72.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |72.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::AuthPermissions_Selects [GOOD] Test command err: 2025-03-31T16:00:04.485935Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004059856812952:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:04.485986Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001a19/r3tmp/tmpJBeycy/pdisk_1.dat 2025-03-31T16:00:05.530158Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:00:05.653405Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:05.653496Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:05.669066Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:00:05.719468Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26530, node 1 2025-03-31T16:00:05.791924Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:00:06.003808Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:00:06.003828Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:00:06.003835Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:00:06.003939Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25553 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:00:06.728062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:07.030874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "Tenant1" } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-31T16:00:07.031101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateExtSubDomain Propose, path/Root/Tenant1, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:00:07.031162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: Tenant1, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-03-31T16:00:07.031297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-03-31T16:00:07.031435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-03-31T16:00:07.031519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:00:07.031535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:00:07.031591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-31T16:00:07.041910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-03-31T16:00:07.079965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusAccepted TxId: 281474976710658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-03-31T16:00:07.080271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/Tenant1 2025-03-31T16:00:07.080462Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-31T16:00:07.080476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-31T16:00:07.080593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-31T16:00:07.080664Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-31T16:00:07.080679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7488004064151780906:2418], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 1 2025-03-31T16:00:07.080691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7488004064151780906:2418], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 2 2025-03-31T16:00:07.080721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:00:07.080738Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:00:07.080756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710658:0, at tablet# 72057594046644480 2025-03-31T16:00:07.080782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710658 ready parts: 1/1 2025-03-31T16:00:07.091036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710658 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:00:07.101618Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-03-31T16:00:07.101738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-03-31T16:00:07.101750Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-03-31T16:00:07.101770Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-03-31T16:00:07.101785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-03-31T16:00:07.107566Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-03-31T16:00:07.107722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-03-31T16:00:07.107751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 waiting... 2025-03-31T16:00:07.107785Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2025-03-31T16:00:07.107824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-03-31T16:00:07.107952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710658, ready parts: 0/1, is published: true 2025-03-31T16:00:07.110053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710658:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710658 msg type: 269090816 2025-03-31T16:00:07.110156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710658, partId: 4294967295, tablet: 72057594046316545 2025-03-31T16:00:07.111725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T16:00:07.111760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710658, ready parts: 0/1, is published: true 2025-03-31T16:00:07.111780Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T16:00:07.127975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710658 2025-03-31T16:00:07.128053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710658 2025-03-31T16:00:07.128551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743436807154, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T16:00:07.128691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743436807154 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-31T16:00:07.128732Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710658:0, at tablet# 72057594046644480 2025-03-31T16:00:07.128899Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2025-03-31T16:00:07.128944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710658:0, at tablet# 72057594046644480 2025-03-31T16:00:07.129070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCoun ... dmin access: 0 is admin: 1 2025-03-31T16:02:34.011696Z node 31 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:02:34.012210Z node 31 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user2 },{ Sid: user1 }] Groups: [] } Children [.metadata,Dir1,Table0,Tenant1,Tenant2] }] } 2025-03-31T16:02:34.012274Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7488004702261747909:2430], row count: 0, finished: 0 2025-03-31T16:02:34.014475Z node 31 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:02:34.015017Z node 31 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [72057594046644480:9:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [SubDir1,SubDir2] }] } 2025-03-31T16:02:34.015134Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7488004702261747909:2430], row count: 0, finished: 0 2025-03-31T16:02:34.015219Z node 31 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1/SubDir1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:02:34.015547Z node 31 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1/SubDir1 TableId: [72057594046644480:10:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [] }] } 2025-03-31T16:02:34.015626Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7488004702261747909:2430], row count: 2, finished: 0 2025-03-31T16:02:34.015713Z node 31 :SYSTEM_VIEWS INFO: Scan finished, actor: [31:7488004702261747909:2430], owner: [31:7488004702261747906:2428], scan id: 0, table id: [72057594046644480:1:0:auth_permissions] 2025-03-31T16:02:34.019242Z node 31 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [31:7488004655017105512:2144], database# , query hash# 3187945588805523718, cpu time# 202102 2025-03-31T16:02:34.020670Z node 31 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743436954004, txId: 281474976710687] shutting down 2025-03-31T16:02:34.107229Z node 34 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector: TEvProcessOverloaded , top size by CPU # 0, top size by TLI # 0, time# 2025-03-31T16:02:34.106927Z 2025-03-31T16:02:34.317372Z node 31 :KQP_EXECUTER ERROR: TxId: 281474976710690. Ctx: { TraceId: 01jqpf1ef554aj98y8h3kszmtd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=31&id=MzA4MzViM2EtMjM1ODA5NDMtZGFjMWU5ZmMtMWFjMmQ5Y2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:02:34.322651Z node 31 :SYSTEM_VIEWS INFO: Scan started, actor: [31:7488004702261747948:2439], owner: [31:7488004702261747945:2437], scan id: 0, table id: [72057594046644480:1:0:auth_permissions] 2025-03-31T16:02:34.324424Z node 31 :SYSTEM_VIEWS INFO: Scan prepared, actor: [31:7488004702261747948:2439], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-31T16:02:34.324479Z node 31 :SYSTEM_VIEWS DEBUG: ProceedToScan, tenant name: /Root tenant owner: root@builtin subject sid: empty require admin access: 0 is admin: 1 2025-03-31T16:02:34.324553Z node 31 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:02:34.324893Z node 31 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user2 },{ Sid: user1 }] Groups: [] } Children [.metadata,Dir1,Table0,Tenant1,Tenant2] }] } 2025-03-31T16:02:34.324960Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7488004702261747948:2439], row count: 0, finished: 0 2025-03-31T16:02:34.325044Z node 31 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:02:34.325348Z node 31 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [72057594046644480:9:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [SubDir1,SubDir2] }] } 2025-03-31T16:02:34.325418Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7488004702261747948:2439], row count: 0, finished: 0 2025-03-31T16:02:34.327198Z node 31 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1/SubDir1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:02:34.327479Z node 31 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1/SubDir1 TableId: [72057594046644480:10:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [] }] } 2025-03-31T16:02:34.327546Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7488004702261747948:2439], row count: 1, finished: 0 2025-03-31T16:02:34.330581Z node 31 :SYSTEM_VIEWS INFO: Scan finished, actor: [31:7488004702261747948:2439], owner: [31:7488004702261747945:2437], scan id: 0, table id: [72057594046644480:1:0:auth_permissions] 2025-03-31T16:02:34.336919Z node 31 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743436954311, txId: 281474976710689] shutting down 2025-03-31T16:02:34.337172Z node 31 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [31:7488004655017105512:2144], database# , query hash# 15123460272068726277, cpu time# 225629 2025-03-31T16:02:34.352607Z node 32 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:02:34.349542Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 33 2025-03-31T16:02:34.350006Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(33, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-31T16:02:34.350137Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 34 2025-03-31T16:02:34.350757Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-31T16:02:34.353392Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 35 2025-03-31T16:02:34.354140Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(35, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-31T16:02:34.354332Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 32 2025-03-31T16:02:34.354995Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(32, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-31T16:02:34.363639Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[32:7488004665463287927:2105], Type=268959746 2025-03-31T16:02:34.365432Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[35:7488004658001627259:2104], Type=268959746 2025-03-31T16:02:34.365486Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[35:7488004658001627259:2104], Type=268959746 2025-03-31T16:02:34.365514Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[35:7488004658001627259:2104], Type=268959746 2025-03-31T16:02:34.365540Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[35:7488004658001627259:2104], Type=268959746 2025-03-31T16:02:34.365565Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[35:7488004658001627259:2104], Type=268959746 2025-03-31T16:02:34.365600Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[35:7488004658001627259:2104], Type=268959746 2025-03-31T16:02:34.366175Z node 34 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TComputeScheduler::TTotalLimits [GOOD] Test command err: 1610 1600 1610 1600 >> SystemView::TabletsShards [GOOD] >> TCompactionMulti::ManyParts [GOOD] >> TCompactionMulti::MainPageCollectionEdge >> KqpScan::ScanPg [GOOD] >> TestKinesisHttpProxy::TestWrongStream2 [GOOD] >> TIterator::MixedReverse [GOOD] >> TIterator::Serial >> TPartBtreeIndexIteration::OneNode_Groups_Slices [GOOD] >> TPartBtreeIndexIteration::OneNode_History_Slices >> TestYmqHttpProxy::TestDeleteMessageBatch [GOOD] >> TestKinesisHttpProxy::ListShardsTimestamp [GOOD] >> TKqpScanData::UnboxedValueSize >> TKqpScanData::UnboxedValueSize [GOOD] >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_BTreeIndex >> TestKinesisHttpProxy::TestWrongRequest >> TKqpScanData::ArrowToUnboxedValueConverter >> TKqpScanData::ArrowToUnboxedValueConverter [GOOD] >> KqpLimits::BigParameter [GOOD] >> KqpLimits::CancelAfterRoTx >> DataShardVolatile::DistributedWriteThenLateWriteReadCommit [GOOD] >> DataShardVolatile::TwoAppendsMustBeVolatile+UseSink |72.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::UnboxedValueSize [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanPg [GOOD] Test command err: 2025-03-31T16:02:07.161757Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:02:07.162333Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:02:07.162782Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:02:07.164242Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:02:07.164693Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:02:07.164942Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019f6/r3tmp/tmpg6kBPr/pdisk_1.dat 2025-03-31T16:02:07.699480Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:07.894169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:02:08.012047Z node 1 :TX_PROXY DEBUG: actor# [1:208:2173] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T16:02:08.019629Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:08.019783Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:08.021229Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-31T16:02:08.021926Z node 2 :TX_PROXY DEBUG: actor# [2:238:2129] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T16:02:08.023921Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:08.024007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:08.030621Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976720656 RangeEnd# 281474976725656 txAllocator# 72057594046447617 2025-03-31T16:02:08.048948Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:02:08.049981Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:02:08.050411Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:02:08.391060Z node 1 :TX_PROXY DEBUG: actor# [1:208:2173] Handle TEvProposeTransaction 2025-03-31T16:02:08.391143Z node 1 :TX_PROXY DEBUG: actor# [1:208:2173] TxId# 281474976715657 ProcessProposeTransaction 2025-03-31T16:02:08.391305Z node 1 :TX_PROXY DEBUG: actor# [1:208:2173] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:1234:2747] 2025-03-31T16:02:08.515234Z node 1 :TX_PROXY DEBUG: Actor# [1:1234:2747] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-31T16:02:08.515370Z node 1 :TX_PROXY DEBUG: Actor# [1:1234:2747] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:02:08.516114Z node 1 :TX_PROXY DEBUG: Actor# [1:1234:2747] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-31T16:02:08.516207Z node 1 :TX_PROXY DEBUG: Actor# [1:1234:2747] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:02:08.516631Z node 1 :TX_PROXY DEBUG: Actor# [1:1234:2747] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:02:08.516857Z node 1 :TX_PROXY DEBUG: Actor# [1:1234:2747] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:02:08.516952Z node 1 :TX_PROXY DEBUG: Actor# [1:1234:2747] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-31T16:02:08.517271Z node 1 :TX_PROXY DEBUG: Actor# [1:1234:2747] txid# 281474976715657 HANDLE EvClientConnected 2025-03-31T16:02:08.518905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:02:08.525225Z node 1 :TX_PROXY DEBUG: Actor# [1:1234:2747] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-31T16:02:08.525351Z node 1 :TX_PROXY DEBUG: Actor# [1:1234:2747] txid# 281474976715657 SEND to# [1:1141:2687] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-03-31T16:02:08.642146Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1285:2390] 2025-03-31T16:02:08.642451Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:02:08.702727Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:02:08.703004Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:02:08.704947Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:02:08.705057Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:02:08.705126Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:02:08.705508Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:02:08.705891Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:02:08.706030Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:1309:2390] in generation 1 2025-03-31T16:02:08.741779Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:02:08.794442Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:02:08.794607Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:02:08.794716Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:1312:2407] 2025-03-31T16:02:08.794747Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:02:08.794777Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:02:08.794811Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:02:08.795289Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:02:08.795413Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:02:08.795596Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:02:08.795642Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:02:08.795692Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:02:08.795743Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:02:08.852347Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:1268:2777], serverId# [2:1316:2408], sessionId# [0:0:0] 2025-03-31T16:02:08.852886Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:02:08.853182Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:02:08.853332Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:02:08.856981Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:02:08.873431Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:02:08.873596Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:02:09.123555Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /Root, board: kqpexch+/Root, with size: 2 2025-03-31T16:02:09.123864Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /Root, board: kqpexch+/Root, with size: 2 2025-03-31T16:02:09.124017Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-31T16:02:09.171355Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /Root, board: kqpexch+/Root, with size: 2 2025-03-31T16:02:09.171685Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /Root, board: kqpexch+/Root, with size: 2 2025-03-31T16:02:09.171847Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-31T16:02:09.172147Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-31T16:02:09.198507Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:1346:2798], serverId# [2:1348:2418], sessionId# [0:0:0] 2025-03-31T16:02:09.211543Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:02:09.211690Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:02:09.212143Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:02:09.212191Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:02:09.212247Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T16:02:09.212545Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:02:09.212691Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025 ... xId: 281474976715664, task: 1. Tasks execution finished 2025-03-31T16:02:26.699073Z node 3 :KQP_COMPUTE DEBUG: SelfId: [3:1656:2984], TxId: 281474976715664, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=OTRjYzQ2NmYtZTYzNjEyN2YtYjEyNTBjNjAtNzY0Yzk1OWU=. TraceId : 01jqpf15zr9q3nj2q5k479tmtx. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2025-03-31T16:02:26.699196Z node 3 :KQP_COMPUTE DEBUG: TxId: 281474976715664, task: 1. pass away 2025-03-31T16:02:26.699291Z node 3 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715664;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-03-31T16:02:26.699458Z node 3 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976715664, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-03-31T16:02:26.699658Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1653:2944] TxId: 281474976715664. Ctx: { TraceId: 01jqpf15zr9q3nj2q5k479tmtx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OTRjYzQ2NmYtZTYzNjEyN2YtYjEyNTBjNjAtNzY0Yzk1OWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [3:1656:2984], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 1986 Tasks { TaskId: 1 CpuTimeUs: 564 FinishTimeMs: 1743436946698 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 64 BuildCpuTimeUs: 500 HostName: "ghrun-tuhoqst4xe" NodeId: 3 CreateTimeMs: 1743436946696 } MaxMemoryUsage: 1048576 } 2025-03-31T16:02:26.699713Z node 3 :KQP_EXECUTER INFO: TxId: 281474976715664. Ctx: { TraceId: 01jqpf15zr9q3nj2q5k479tmtx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OTRjYzQ2NmYtZTYzNjEyN2YtYjEyNTBjNjAtNzY0Yzk1OWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [3:1656:2984] 2025-03-31T16:02:26.700535Z node 3 :KQP_EXECUTER INFO: ActorId: [3:1653:2944] TxId: 281474976715664. Ctx: { TraceId: 01jqpf15zr9q3nj2q5k479tmtx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OTRjYzQ2NmYtZTYzNjEyN2YtYjEyNTBjNjAtNzY0Yzk1OWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Full stats: { CpuTimeUs: 3687 DurationUs: 1743436944743274 ExecuterCpuTimeUs: 1701 StartTimeMs: 1956 FinishTimeMs: 1743436946699 Stages { StageGuid: "6d9ec58a-73c6ccf5-ece9d034-e64f17b1" Program: "(\n(declare %kqp%tx_result_binding_0_0 (ListType (StructType \'(\'\"column0\" (OptionalType (DataType \'Uint64))))))\n(return (lambda \'() (Iterator %kqp%tx_result_binding_0_0)))\n)\n" ComputeActors { CpuTimeUs: 1986 Tasks { TaskId: 1 CpuTimeUs: 564 FinishTimeMs: 1743436946698 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 64 BuildCpuTimeUs: 500 HostName: "ghrun-tuhoqst4xe" NodeId: 3 CreateTimeMs: 1743436946696 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743436946697 } TxPlansWithStats: "{\"Node Type\":\"Phase\",\"PlanNodeId\":8,\"Plans\":[{\"Node Type\":\"ResultSet_1\",\"PlanNodeId\":7,\"PlanNodeType\":\"ResultSet\",\"Plans\":[{\"CTE Name\":\"precompute_0_0\",\"Node Type\":\"ConstantExpr\",\"Operators\":[{\"Inputs\":[],\"Iterator\":\"precompute_0_0\",\"Name\":\"Iterator\"}],\"PlanNodeId\":6,\"StageGuid\":\"6d9ec58a-73c6ccf5-ece9d034-e64f17b1\",\"Stats\":{\"BaseTimeMs\":1743436946697,\"ComputeNodes\":[{\"CpuTimeUs\":1986,\"Tasks\":[{\"ComputeTimeUs\":64,\"FinishTimeMs\":1743436946698,\"Host\":\"ghrun-tuhoqst4xe\",\"NodeId\":3,\"OutputBytes\":6,\"OutputRows\":1,\"ResultBytes\":6,\"ResultRows\":1,\"TaskId\":1}]}],\"FinishedTasks\":0,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"\"}" StatConvertBytes: 685 Extra { type_url: "type.googleapis.com/NKqpProto.TKqpExecutionExtraStats" value: "\022\013\010\302\017\020\302\017\030\302\017 \001" } } 2025-03-31T16:02:26.700594Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1653:2944] TxId: 281474976715664. Ctx: { TraceId: 01jqpf15zr9q3nj2q5k479tmtx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OTRjYzQ2NmYtZTYzNjEyN2YtYjEyNTBjNjAtNzY0Yzk1OWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-31T16:02:26.700639Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:1653:2944] TxId: 281474976715664. Ctx: { TraceId: 01jqpf15zr9q3nj2q5k479tmtx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OTRjYzQ2NmYtZTYzNjEyN2YtYjEyNTBjNjAtNzY0Yzk1OWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2025-03-31T16:02:26.700700Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1653:2944] TxId: 281474976715664. Ctx: { TraceId: 01jqpf15zr9q3nj2q5k479tmtx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OTRjYzQ2NmYtZTYzNjEyN2YtYjEyNTBjNjAtNzY0Yzk1OWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.001986s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-03-31T16:02:26.701572Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down 2025-03-31T16:02:26.701686Z node 3 :TX_PROXY DEBUG: actor# [3:208:2173] Handle TEvProposeTransaction 2025-03-31T16:02:26.701724Z node 3 :TX_PROXY DEBUG: actor# [3:208:2173] TxId# 0 ProcessProposeTransaction 2025-03-31T16:02:26.701859Z node 3 :TX_PROXY DEBUG: actor# [3:208:2173] Cookie# 0 userReqId# "" txid# 0 reqId# [3:1658:2985] SnapshotReq marker# P0 2025-03-31T16:02:26.702867Z node 3 :TX_PROXY DEBUG: Actor# [3:1661:2985] txid# 0 HANDLE EvNavigateKeySetResult TResolveTablesActor marker# P1 ErrorCount# 0 2025-03-31T16:02:26.703061Z node 3 :TX_PROXY DEBUG: Actor# [3:1661:2985] txid# 0 HANDLE EvResolveKeySetResult TResolveTablesActor marker# P2 ErrorCount# 0 2025-03-31T16:02:26.703213Z node 3 :TX_PROXY DEBUG: Actor# [3:1658:2985] SEND TEvDiscardVolatileSnapshotRequest to datashard 72075186224037888 marker# P3 2025-03-31T16:02:35.200036Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:02:35.200463Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:02:35.200697Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:02:35.202747Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:02:35.203180Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:02:35.203436Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019f6/r3tmp/tmpnASU9M/pdisk_1.dat 2025-03-31T16:02:35.594329Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:35.793173Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:02:35.902713Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:35.902884Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:35.907879Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:35.907999Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:35.927388Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 6 Cookie 6 2025-03-31T16:02:35.928520Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:02:35.929001Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:02:36.228330Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:02:36.966152Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1401:2836], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:36.966289Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1412:2841], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:36.966401Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:36.973751Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:02:37.455347Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:1415:2844], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:02:37.542592Z node 5 :TX_PROXY ERROR: Actor# [5:1545:2915] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:02:38.204065Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpf1h931ztyxrkdfnm4xgy0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=YWMxN2MyMWEtYTExOWQ2ZjAtY2E3ZjMzODEtYzAwM2JhNzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:02:38.951769Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpf1jh4dxxm4a1c5a6d4q07, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=OTJhOTNkZWYtYzc3ODUyZDAtOTRiNzIwYWYtMjIxMjMxNjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:02:39.700030Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqpf1jh4dxxm4a1c5a6d4q07, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=OTJhOTNkZWYtYzc3ODUyZDAtOTRiNzIwYWYtMjIxMjMxNjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:02:39.704203Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService-useSink [GOOD] >> KqpLimits::ComputeNodeMemoryLimit |72.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::ArrowToUnboxedValueConverter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::TabletsShards [GOOD] Test command err: 2025-03-31T16:00:04.342567Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004057660611067:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:04.355781Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001a0a/r3tmp/tmptYTvte/pdisk_1.dat 2025-03-31T16:00:05.198143Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:05.201131Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:05.207283Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:05.213782Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10965, node 1 2025-03-31T16:00:05.477141Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:00:05.477159Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:00:05.477193Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:00:05.477305Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6277 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:00:06.433304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:09.301516Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004057660611067:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:09.301691Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:00:10.239704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004083430415370:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:10.239885Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:10.240555Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004083430415386:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:10.245945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:00:10.276946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004083430415389:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:00:10.378598Z node 1 :TX_PROXY ERROR: Actor# [1:7488004083430415442:2345] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:00:12.072677Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqpewyd854jv68q25rcww7pc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjRlNDI2YTctYTI1MmY1MjMtYzNlZWQ4ZjAtOGQwM2EyMDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:00:12.179579Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7488004092020350082:2349], owner: [1:7488004092020350078:2347], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2025-03-31T16:00:12.181274Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7488004092020350082:2349], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-31T16:00:12.207067Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7488004092020350082:2349], row count: 2, finished: 1 2025-03-31T16:00:12.214383Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7488004092020350082:2349], owner: [1:7488004092020350078:2347], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2025-03-31T16:00:12.225406Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743436812070, txId: 281474976710660] shutting down 2025-03-31T16:00:14.327396Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488004099590663572:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:14.408520Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001a0a/r3tmp/tmp2Gg9SE/pdisk_1.dat 2025-03-31T16:00:14.929585Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:15.130174Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:15.130278Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:15.168627Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30138, node 2 2025-03-31T16:00:15.503969Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:00:15.503996Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:00:15.504004Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:00:15.504144Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15259 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:00:16.160743Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:16.249917Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:16.327374Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488004107381126233:2291];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:16.327522Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:00:16.371783Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7488004108950088557:2210];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:16.372046Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; waiting... 2025-03-31T16:00:16.399796Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:16.399900Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:16.463282Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 6 Cookie 6 2025-03-31T16:00:16.481614Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:00:16.531597Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:16.531688Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:16.552705Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-03-31T16:00:16.560194Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:00:16.912085Z node 6 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector bootstrapped 2025-03-31T16:00:16.939773Z node 6 :SYSTEM_VIEWS INFO: [72075186224037893] OnActivateExecutor 2025-03-31T16:00:16.939884Z node 6 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInitSchema::Execute 2025-03-31T16:00:17.032561Z node 6 :SYSTEM_VIEWS DEBUG: Handle TEvSysView::TEvRegisterDbCounters: service id# [6:7488004107381125989:2073], path id# [OwnerId: 72057594046644480, LocalPathId: 2], service# 2 2025-03-31T16:00:17.032855Z node 6 :IMPORT WARN: Table profiles were not loaded ... ount# 0 2025-03-31T16:02:30.000945Z node 26 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [26:7488004589134910139:2063], interval end# 2025-03-31T16:02:30.000000Z, event interval end# 2025-03-31T16:02:30.000000Z 2025-03-31T16:02:30.000987Z node 26 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [26:7488004589134910139:2063], query logs count# 0, processor ids count# 0, processor id to database count# 0 2025-03-31T16:02:30.002255Z node 26 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [26:7488004597724844946:2206], interval end# 2025-03-31T16:02:30.000000Z, event interval end# 2025-03-31T16:02:30.000000Z 2025-03-31T16:02:30.002292Z node 26 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [26:7488004597724844946:2206], query logs count# 0, processor ids count# 1, processor id to database count# 1 2025-03-31T16:02:30.000614Z node 24 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [24:7488004599234543577:2078], interval end# 2025-03-31T16:02:30.000000Z, event interval end# 2025-03-31T16:02:30.000000Z 2025-03-31T16:02:30.000616Z node 24 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [24:7488004590644608897:2063], interval end# 2025-03-31T16:02:30.000000Z, event interval end# 2025-03-31T16:02:30.000000Z 2025-03-31T16:02:30.000676Z node 24 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [24:7488004590644608897:2063], query logs count# 0, processor ids count# 0, processor id to database count# 0 2025-03-31T16:02:30.000676Z node 24 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [24:7488004599234543577:2078], query logs count# 0, processor ids count# 1, processor id to database count# 1 2025-03-31T16:02:30.000843Z node 23 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [23:7488004589035456600:2063], interval end# 2025-03-31T16:02:30.000000Z, event interval end# 2025-03-31T16:02:30.000000Z 2025-03-31T16:02:30.000908Z node 23 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [23:7488004589035456600:2063], query logs count# 0, processor ids count# 0, processor id to database count# 0 2025-03-31T16:02:30.002753Z node 23 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [23:7488004597625391340:2143], interval end# 2025-03-31T16:02:30.000000Z, event interval end# 2025-03-31T16:02:30.000000Z 2025-03-31T16:02:30.002786Z node 23 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [23:7488004597625391340:2143], query logs count# 0, processor ids count# 1, processor id to database count# 1 2025-03-31T16:02:30.063077Z node 26 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [26:7488004589134910139:2063] 2025-03-31T16:02:30.183180Z node 23 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [23:7488004589035456600:2063] 2025-03-31T16:02:30.202338Z node 26 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [26:7488004589134910139:2063] 2025-03-31T16:02:30.368208Z node 23 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [23:7488004589035456600:2063] 2025-03-31T16:02:30.436324Z node 25 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [25:7488004591288992886:2063] 2025-03-31T16:02:30.440753Z node 26 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [26:7488004597724844946:2206] 2025-03-31T16:02:30.538749Z node 25 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [25:7488004591288992886:2063] 2025-03-31T16:02:30.709347Z node 23 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [23:7488004597625391340:2143] 2025-03-31T16:02:30.910692Z node 25 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [25:7488004599878927560:2074] 2025-03-31T16:02:30.911722Z node 25 :SYSTEM_VIEWS DEBUG: Send counters: service id# [25:7488004599878927560:2074], processor id# 72075186224037893, database# /Root/Tenant1, generation# 4396011169429483881, node id# 25, is retrying# 0, is labeled# 0 2025-03-31T16:02:30.950730Z node 25 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [25:7488004599878927560:2074], processor id# 72075186224037893, database# /Root/Tenant1 2025-03-31T16:02:30.951393Z node 25 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [25:7488004599878927560:2074], database# /Root/Tenant1, processor id# 72075186224037893 2025-03-31T16:02:30.953367Z node 23 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [23:7488004597625391340:2143] 2025-03-31T16:02:30.954454Z node 23 :SYSTEM_VIEWS DEBUG: Send counters: service id# [23:7488004597625391340:2143], processor id# 72075186224037899, database# /Root/Tenant2, generation# 8413235337180586906, node id# 23, is retrying# 0, is labeled# 0 2025-03-31T16:02:30.989798Z node 23 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [23:7488004597625391340:2143], processor id# 72075186224037899, database# /Root/Tenant2 2025-03-31T16:02:30.989932Z node 23 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [23:7488004597625391340:2143], database# /Root/Tenant2, processor id# 72075186224037899 2025-03-31T16:02:30.992722Z node 25 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [25:7488004599878927560:2074] 2025-03-31T16:02:32.574666Z node 27 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[27:7488004693965396586:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:32.574751Z node 27 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001a0a/r3tmp/tmpOFxkUJ/pdisk_1.dat 2025-03-31T16:02:32.801791Z node 27 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:32.849746Z node 27 :HIVE WARN: HIVE#72057594037968897 Node(27, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:32.849892Z node 27 :HIVE WARN: HIVE#72057594037968897 Node(27, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:32.852492Z node 27 :HIVE WARN: HIVE#72057594037968897 Node(27, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3746, node 27 2025-03-31T16:02:32.943818Z node 27 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:02:32.943847Z node 27 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:02:32.943854Z node 27 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:02:32.944041Z node 27 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22334 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:02:33.412648Z node 27 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:33.427879Z node 27 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:37.575330Z node 27 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[27:7488004693965396586:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:37.575427Z node 27 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:02:39.095777Z node 27 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [27:7488004724030168509:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:39.095933Z node 27 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:39.103262Z node 27 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [27:7488004724030168521:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:39.110272Z node 27 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:02:39.137831Z node 27 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [27:7488004724030168523:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:02:39.218140Z node 27 :TX_PROXY ERROR: Actor# [27:7488004724030168574:2468] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:02:39.394961Z node 27 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpf1e2409kexadgh2gnew4f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=27&id=MjcyNTk0MmUtZTBlMDRmYzQtNjVjN2VhYjgtMTIyMjg0ZTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:02:39.407421Z node 27 :SYSTEM_VIEWS INFO: Scan started, actor: [27:7488004724030168608:2359], owner: [27:7488004724030168604:2357], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-03-31T16:02:39.409310Z node 27 :SYSTEM_VIEWS INFO: Scan prepared, actor: [27:7488004724030168608:2359], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-31T16:02:39.412416Z node 27 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [27:7488004724030168608:2359], row count: 3, finished: 1 2025-03-31T16:02:39.412478Z node 27 :SYSTEM_VIEWS INFO: Scan finished, actor: [27:7488004724030168608:2359], owner: [27:7488004724030168604:2357], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-03-31T16:02:39.417590Z node 27 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743436959390, txId: 281474976715661] shutting down >> TestKinesisHttpProxy::ListShardsToken >> KqpScan::ScanAfterSplitSlowMetaRead [GOOD] |72.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |72.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |72.7%| [LD] {RESULT} $(B)/ydb/core/engine/ut/ydb-core-engine-ut >> TCompactionMulti::MainPageCollectionEdge [GOOD] >> TCompactionMulti::MainPageCollectionEdgeMany >> TPartBtreeIndexIteration::OneNode_History_Slices [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups_History_Slices >> KqpExplain::SelfJoin3xSameLabels [GOOD] >> KqpExplain::PureExpr ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestDeleteMessageBatch [GOOD] Test command err: 2025-03-31T16:01:42.378167Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004480827585492:2205];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:42.378672Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001064/r3tmp/tmpqn68yG/pdisk_1.dat 2025-03-31T16:01:43.087574Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:43.116993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:01:43.119259Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:01:43.124453Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62548, node 1 2025-03-31T16:01:43.259737Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:01:43.259764Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:01:43.259771Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:01:43.259893Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24422 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:01:43.671364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:43.708238Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:24422 2025-03-31T16:01:44.111528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:44.122394Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T16:01:44.128709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:44.151870Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-03-31T16:01:44.161033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:44.483317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:01:44.571721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:01:44.636155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:44.711371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:44.771264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:44.812024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:44.856727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:44.898964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:44.942945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:46.818710Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004498007455964:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:46.818844Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:46.819112Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004498007455976:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:46.827395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-03-31T16:01:46.852462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004498007455978:2380], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-03-31T16:01:46.913329Z node 1 :TX_PROXY ERROR: Actor# [1:7488004498007456029:2918] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:01:47.222225Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jqpf009z1v3b5g0n0f4js2xs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mjk1NjQ0MmQtN2E5MjAxMjEtNzVlYjEzNmYtZmM0OTE5ZWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:01:47.282096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:01:47.330856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:47.355295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-31T16:01:47.368564Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004480827585492:2205];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:47.368622Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:01:47.397827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:01:47.438933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-31T16:01:47.476136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:01:47.519538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-31T16:01:47.594278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:47.666752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:47.766728Z node 1 :HTTP INFO: Listening on http://127.0.0.1:23761 2025-03-31T16:01:48.768028Z node 1 :SQS INFO: Start SQS service actor 2025-03-31T16:01:48.768163Z node 1 :SQS DEBUG: SQS service config: { EnableSqs: true YandexCloudMode: true EnableDeadLetterQueues: true } 2025-03-31T16:01:48.769625Z node 1 :SQS DEBUG: Enable scheme board scheme cache 2025-03-31T16:01:48.778706Z node 1 :SQS INFO: Start SQS proxy service actor 2025-03-31T16:01:48.779039Z node 1 :HTTP INFO: Listening on http://[::]:21574 2025-03-31T16:01:48.806208Z node 1 :SQS NOTICE: [Node tracker] schedule describe tables after 0.000000s 2025-03-31T16:01:48.806232Z node 1 :SQS INFO: Request SQS users list 2025-03-31T16:01:48.806253Z node 1 :SQS DEBUG: [Node tracker] bootstrap on node=1 2025-03-31T16:01:48.806262Z node 1 :SQS DEBUG: Request SQS queues list 2025-03-31T16:01:48.810585Z node 1 :SQS DEBUG: Request [] Starting executor actor for query(idx=GET_QUEUES_LIST_ID). Mode: COMPILE 2025-03-31T16:01:48.810728Z node 1 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Compile program: ( ... HandleSqsResponse DeleteMessageBatch { RequestId: "358cde79-93eede3c-f5f163bd-dc55ed30" Entries { Id: "Id-0" } Entries { Id: "Id-1" } } RequestId: "358cde79-93eede3c-f5f163bd-dc55ed30" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false 2025-03-31T16:02:41.732263Z node 7 :SQS TRACE: Sending answer to proxy actor [7:7488004732068678338:2542]: DeleteMessageBatch { RequestId: "358cde79-93eede3c-f5f163bd-dc55ed30" Entries { Id: "Id-0" } Entries { Id: "Id-1" } } RequestId: "358cde79-93eede3c-f5f163bd-dc55ed30" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false 2025-03-31T16:02:41.732317Z node 7 :SQS TRACE: Dec local leader ref for actor [7:7488004732068678339:3800]. Found: 1 2025-03-31T16:02:41.733541Z node 7 :SQS TRACE: Request [358cde79-93eede3c-f5f163bd-dc55ed30] HandleResponse: { DeleteMessageBatch { RequestId: "358cde79-93eede3c-f5f163bd-dc55ed30" Entries { Id: "Id-0" } Entries { Id: "Id-1" } } RequestId: "358cde79-93eede3c-f5f163bd-dc55ed30" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false }, status: OK 2025-03-31T16:02:41.733630Z node 7 :SQS DEBUG: Request [358cde79-93eede3c-f5f163bd-dc55ed30] Sending reply from proxy actor: { DeleteMessageBatch { RequestId: "358cde79-93eede3c-f5f163bd-dc55ed30" Entries { Id: "Id-0" } Entries { Id: "Id-1" } } RequestId: "358cde79-93eede3c-f5f163bd-dc55ed30" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false } 2025-03-31T16:02:41.733882Z node 7 :HTTP_PROXY DEBUG: http request [DeleteMessageBatch] requestId [358cde79-93eede3c-f5f163bd-dc55ed30] Got succesfult GRPC response. 2025-03-31T16:02:41.734018Z node 7 :HTTP_PROXY INFO: http request [DeleteMessageBatch] requestId [358cde79-93eede3c-f5f163bd-dc55ed30] reply ok 2025-03-31T16:02:41.734168Z node 7 :HTTP_PROXY DEBUG: http request [DeleteMessageBatch] requestId [358cde79-93eede3c-f5f163bd-dc55ed30] Send metering event. HttpStatusCode: 200 IsFifo: 0 FolderId: folder4 RequestSizeInBytes: 716 ResponseSizeInBytes: 222 SourceAddress: 9837:5f00:6050:0:8037:5f00:6050:0 ResourceId: 000000000000000101v0 Action: DeleteMessageBatch 2025-03-31T16:02:41.734297Z node 7 :HTTP DEBUG: (#37,[::1]:50118) <- (200 ) 2025-03-31T16:02:41.734394Z node 7 :HTTP DEBUG: (#37,[::1]:50118) connection closed Http output full {"Successful":[{"Id":"Id-0"},{"Id":"Id-1"}]} 2025-03-31T16:02:41.735295Z node 7 :HTTP DEBUG: (#37,[::1]:50130) incoming connection opened 2025-03-31T16:02:41.735362Z node 7 :HTTP DEBUG: (#37,[::1]:50130) -> (POST /Root) 2025-03-31T16:02:41.735486Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [184a:2301:6050:0:4a:2301:6050:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: 230bd09f-fd782ba8-7753a28d-d698c09c 2025-03-31T16:02:41.735835Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [230bd09f-fd782ba8-7753a28d-d698c09c] got new request from [184a:2301:6050:0:4a:2301:6050:0] 2025-03-31T16:02:41.736208Z node 7 :HTTP_PROXY DEBUG: http request [ReceiveMessage] requestId [230bd09f-fd782ba8-7753a28d-d698c09c] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2025-03-31T16:02:41.736234Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [230bd09f-fd782ba8-7753a28d-d698c09c] sending grpc request to '' database: '/Root' iam token size: 0 2025-03-31T16:02:41.736460Z node 7 :SQS TRACE: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] HandleResponse { Status: 48 TxId: 281474976715712 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "messages" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Offset" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "SentTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } } } Value { Struct { Optional { } } } } } 2025-03-31T16:02:41.736490Z node 7 :SQS DEBUG: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Attempt 1 execution duration: 4ms 2025-03-31T16:02:41.736643Z node 7 :SQS TRACE: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Sending mkql execution result: { Status: 48 TxId: 281474976715712 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "messages" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Offset" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "SentTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } } } Value { Struct { Optional { } } } } } 2025-03-31T16:02:41.736686Z node 7 :SQS TRACE: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Minikql data response: {"messages": []} 2025-03-31T16:02:41.736781Z node 7 :SQS DEBUG: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] execution duration: 5ms 2025-03-31T16:02:41.736882Z node 7 :SQS DEBUG: Request [] Sending executed reply 2025-03-31T16:02:41.736976Z node 7 :SQS DEBUG: Handle oldest timestamp metrics for [cloud4/000000000000000101v0/3] 2025-03-31T16:02:41.742178Z node 7 :SQS DEBUG: Got new request in YMQ proxy. FolderId: folder4, CloudId: cloud4, UserSid: fake_user_sid@as, RequestId: 230bd09f-fd782ba8-7753a28d-d698c09c 2025-03-31T16:02:41.742325Z node 7 :SQS DEBUG: Request [230bd09f-fd782ba8-7753a28d-d698c09c] Proxy actor: used user_name='cloud4', queue_name='000000000000000101v0', folder_id='folder4' 2025-03-31T16:02:41.742333Z node 7 :SQS DEBUG: Request [230bd09f-fd782ba8-7753a28d-d698c09c] Request proxy started 2025-03-31T16:02:41.742446Z node 7 :SQS DEBUG: Request [230bd09f-fd782ba8-7753a28d-d698c09c] Answer configuration for queue [cloud4/000000000000000101v0] without leader 2025-03-31T16:02:41.742507Z node 7 :SQS DEBUG: Request [230bd09f-fd782ba8-7753a28d-d698c09c] Get configuration duration: 0ms 2025-03-31T16:02:41.742591Z node 7 :SQS DEBUG: Request [230bd09f-fd782ba8-7753a28d-d698c09c] Send get leader node request to sqs service for cloud4/000000000000000101v0 2025-03-31T16:02:41.742613Z node 7 :SQS DEBUG: Request [230bd09f-fd782ba8-7753a28d-d698c09c] Leader node for queue [cloud4/000000000000000101v0] is 7 2025-03-31T16:02:41.742638Z node 7 :SQS DEBUG: Request [230bd09f-fd782ba8-7753a28d-d698c09c] Got leader node for queue response. Node id: 7. Status: 0 2025-03-31T16:02:41.742733Z node 7 :SQS TRACE: Request [230bd09f-fd782ba8-7753a28d-d698c09c] Sending request from proxy to leader node 7: ReceiveMessage { Auth { UserName: "cloud4" FolderId: "folder4" UserSID: "fake_user_sid@as" } QueueName: "000000000000000101v0" } RequestId: "230bd09f-fd782ba8-7753a28d-d698c09c" 2025-03-31T16:02:41.742807Z node 7 :SQS DEBUG: Request [230bd09f-fd782ba8-7753a28d-d698c09c] Received Sqs Request: ReceiveMessage { Auth { UserName: "cloud4" FolderId: "folder4" UserSID: "fake_user_sid@as" } QueueName: "000000000000000101v0" } RequestId: "230bd09f-fd782ba8-7753a28d-d698c09c" 2025-03-31T16:02:41.742859Z node 7 :SQS DEBUG: Request [230bd09f-fd782ba8-7753a28d-d698c09c] Request started. Actor: [7:7488004732068678372:3828] 2025-03-31T16:02:41.742891Z node 7 :SQS TRACE: Inc local leader ref for actor [7:7488004732068678372:3828] 2025-03-31T16:02:41.742908Z node 7 :SQS DEBUG: Request [230bd09f-fd782ba8-7753a28d-d698c09c] Forward configuration request to queue [cloud4/000000000000000101v0] leader 2025-03-31T16:02:41.742942Z node 7 :SQS DEBUG: Request [230bd09f-fd782ba8-7753a28d-d698c09c] Get configuration duration: 0ms 2025-03-31T16:02:41.742957Z node 7 :SQS TRACE: Request [230bd09f-fd782ba8-7753a28d-d698c09c] Got configuration. Root url: http://ghrun-tuhoqst4xe.auto.internal:8771, Shards: 4, Fail: 0 Http output full {} 2025-03-31T16:02:41.742981Z node 7 :SQS TRACE: Request [230bd09f-fd782ba8-7753a28d-d698c09c] Got configuration. Attributes: { ContentBasedDeduplication: 0 DelaySeconds: 0.000000s FifoQueue: 0 MaximumMessageSize: 262144 MessageRetentionPeriod: 345600.000000s ReceiveMessageWaitTime: 0.000000s VisibilityTimeout: 30.000000s } 2025-03-31T16:02:41.742993Z node 7 :SQS TRACE: Request [230bd09f-fd782ba8-7753a28d-d698c09c] DoRoutine 2025-03-31T16:02:41.743039Z node 7 :SQS TRACE: Increment active message requests for [cloud4/000000000000000101v0/0]. ActiveMessageRequests: 1 2025-03-31T16:02:41.743054Z node 7 :SQS DEBUG: Request [230bd09f-fd782ba8-7753a28d-d698c09c] Received empty result from shard 0 infly. Infly capacity: 0. Messages count: 0 2025-03-31T16:02:41.743065Z node 7 :SQS DEBUG: Request [230bd09f-fd782ba8-7753a28d-d698c09c] No known messages in this shard. Skip attempt to add messages to infly 2025-03-31T16:02:41.743073Z node 7 :SQS DEBUG: Request [230bd09f-fd782ba8-7753a28d-d698c09c] Already tried to add messages to infly 2025-03-31T16:02:41.743137Z node 7 :SQS TRACE: Decrement active message requests for [[cloud4/000000000000000101v0/0]. ActiveMessageRequests: 0 2025-03-31T16:02:41.743219Z node 7 :SQS TRACE: Request [230bd09f-fd782ba8-7753a28d-d698c09c] SendReplyAndDie from action actor { ReceiveMessage { RequestId: "230bd09f-fd782ba8-7753a28d-d698c09c" } } 2025-03-31T16:02:41.743317Z node 7 :SQS TRACE: Request [230bd09f-fd782ba8-7753a28d-d698c09c] Sending sqs response: { ReceiveMessage { RequestId: "230bd09f-fd782ba8-7753a28d-d698c09c" } RequestId: "230bd09f-fd782ba8-7753a28d-d698c09c" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false } 2025-03-31T16:02:41.743387Z node 7 :SQS TRACE: Dec local leader ref for actor [7:7488004732068678372:3828]. Found: 1 2025-03-31T16:02:41.743468Z node 7 :SQS DEBUG: Request ReceiveMessage working duration: 6ms 2025-03-31T16:02:41.743534Z node 7 :SQS TRACE: HandleSqsResponse ReceiveMessage { RequestId: "230bd09f-fd782ba8-7753a28d-d698c09c" } RequestId: "230bd09f-fd782ba8-7753a28d-d698c09c" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false 2025-03-31T16:02:41.743581Z node 7 :SQS TRACE: Sending answer to proxy actor [7:7488004732068678371:2546]: ReceiveMessage { RequestId: "230bd09f-fd782ba8-7753a28d-d698c09c" } RequestId: "230bd09f-fd782ba8-7753a28d-d698c09c" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false 2025-03-31T16:02:41.743728Z node 7 :SQS TRACE: Request [230bd09f-fd782ba8-7753a28d-d698c09c] HandleResponse: { ReceiveMessage { RequestId: "230bd09f-fd782ba8-7753a28d-d698c09c" } RequestId: "230bd09f-fd782ba8-7753a28d-d698c09c" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false }, status: OK 2025-03-31T16:02:41.743807Z node 7 :SQS DEBUG: Request [230bd09f-fd782ba8-7753a28d-d698c09c] Sending reply from proxy actor: { ReceiveMessage { RequestId: "230bd09f-fd782ba8-7753a28d-d698c09c" } RequestId: "230bd09f-fd782ba8-7753a28d-d698c09c" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false } 2025-03-31T16:02:41.744017Z node 7 :HTTP_PROXY DEBUG: http request [ReceiveMessage] requestId [230bd09f-fd782ba8-7753a28d-d698c09c] Got succesfult GRPC response. 2025-03-31T16:02:41.744058Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [230bd09f-fd782ba8-7753a28d-d698c09c] reply ok 2025-03-31T16:02:41.744166Z node 7 :HTTP_PROXY DEBUG: http request [ReceiveMessage] requestId [230bd09f-fd782ba8-7753a28d-d698c09c] Send metering event. HttpStatusCode: 200 IsFifo: 0 FolderId: folder4 RequestSizeInBytes: 526 ResponseSizeInBytes: 179 SourceAddress: 184a:2301:6050:0:4a:2301:6050:0 ResourceId: 000000000000000101v0 Action: ReceiveMessage 2025-03-31T16:02:41.744267Z node 7 :HTTP DEBUG: (#37,[::1]:50130) <- (200 ) 2025-03-31T16:02:41.744350Z node 7 :HTTP DEBUG: (#37,[::1]:50130) connection closed >> TestYmqHttpProxy::TestTagQueueMultipleQueriesInflight [GOOD] >> TKqpScanData::FailOnUnsupportedPgType >> TComputeScheduler::ResourceWeight [GOOD] >> TKqpScanData::FailOnUnsupportedPgType [GOOD] >> TIterator::Serial [GOOD] >> TIterator::SerialReverse >> TComputeScheduler::QueryLimits [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanAfterSplitSlowMetaRead [GOOD] Test command err: 2025-03-31T16:02:03.906569Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:02:03.907154Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:02:03.907396Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:02:03.908656Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:02:03.909051Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:02:03.909225Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019f5/r3tmp/tmpcLKPRU/pdisk_1.dat 2025-03-31T16:02:04.457045Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:04.704193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:02:04.809526Z node 1 :TX_PROXY DEBUG: actor# [1:208:2173] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T16:02:04.811970Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:04.812113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:04.813196Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-31T16:02:04.813773Z node 2 :TX_PROXY DEBUG: actor# [2:238:2129] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T16:02:04.819695Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:04.819791Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:04.821777Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976720656 RangeEnd# 281474976725656 txAllocator# 72057594046447617 2025-03-31T16:02:04.838131Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:02:04.838872Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:02:04.839346Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:02:05.155267Z node 1 :TX_PROXY DEBUG: actor# [1:208:2173] Handle TEvProposeTransaction 2025-03-31T16:02:05.155341Z node 1 :TX_PROXY DEBUG: actor# [1:208:2173] TxId# 281474976715657 ProcessProposeTransaction 2025-03-31T16:02:05.155524Z node 1 :TX_PROXY DEBUG: actor# [1:208:2173] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:1231:2744] 2025-03-31T16:02:05.417047Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 7 } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-31T16:02:05.417170Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:02:05.417924Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-31T16:02:05.418035Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:02:05.418443Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:02:05.418640Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:02:05.418747Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-31T16:02:05.419186Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2744] txid# 281474976715657 HANDLE EvClientConnected 2025-03-31T16:02:05.421155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:02:05.428521Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-31T16:02:05.428613Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2744] txid# 281474976715657 SEND to# [1:1141:2687] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-03-31T16:02:05.590720Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:1309:2802] 2025-03-31T16:02:05.591034Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:02:05.666066Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037894 actor [1:1312:2804] 2025-03-31T16:02:05.666344Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:02:05.683578Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037892 actor [1:1314:2806] 2025-03-31T16:02:05.683823Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:02:05.693437Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:02:05.694062Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:02:05.695440Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-03-31T16:02:05.695525Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-03-31T16:02:05.695579Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-03-31T16:02:05.695990Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:02:05.696481Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:02:05.696558Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:1409:2802] in generation 1 2025-03-31T16:02:05.700503Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:02:05.701275Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:02:05.702817Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037894 2025-03-31T16:02:05.702876Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037894 2025-03-31T16:02:05.702940Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037894 2025-03-31T16:02:05.703283Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:02:05.703861Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:02:05.703933Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037894 persisting started state actor id [1:1419:2804] in generation 1 2025-03-31T16:02:05.716047Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:02:05.716155Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:02:05.717668Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037892 2025-03-31T16:02:05.717748Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037892 2025-03-31T16:02:05.717798Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037892 2025-03-31T16:02:05.718209Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:02:05.718374Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:02:05.718472Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037892 persisting started state actor id [1:1434:2806] in generation 1 2025-03-31T16:02:05.736630Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1406:2399] 2025-03-31T16:02:05.736886Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:02:05.782906Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [2:1416:2400] 2025-03-31T16:02:05.783175Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:02:05.826358Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037893 actor [2:1422:2402] 2025-03-31T16:02:05.826600Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:02:05.841283Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:02:05.841453Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:02:05.843164Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:02:05.843260Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:02:05.843342Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:02:05.843737Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:02:05.843879Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:02:05.843947Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:1492:2399] in generation 1 2025-03-31T16:02:05.846995Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [2:1424:2403] 2025-03-31T16:02:05.847247Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:02:05.856505Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:02:05.856793Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:02:05.858327Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037891 2025-03-31T16:02:05.858390Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037891 2025-03-31T16:02:05.858439Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037891 2025-03-31T16:02:05.858725Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:02:05.858922Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:02:05.858979Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037891 persisting started state actor id [2:1501:2400] in generation 1 2025-03-31T16:02:05.862156Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:02:05.862456Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:02:05.863868Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: Q ... SessionId : ydb://session/3?node_id=5&id=ZWEwNTJiNjQtNDljZGYxYzYtZWQ0OTcyYzQtNzRiZjdkYjA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Poll inputs 2025-03-31T16:02:41.800299Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1982:3163], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jqpf1jytd8178nhr20fqswfm. SessionId : ydb://session/3?node_id=5&id=ZWEwNTJiNjQtNDljZGYxYzYtZWQ0OTcyYzQtNzRiZjdkYjA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Poll sources 2025-03-31T16:02:41.800366Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1982:3163], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jqpf1jytd8178nhr20fqswfm. SessionId : ydb://session/3?node_id=5&id=ZWEwNTJiNjQtNDljZGYxYzYtZWQ0OTcyYzQtNzRiZjdkYjA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Resume execution, run status: Finished 2025-03-31T16:02:41.800439Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1982:3163], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jqpf1jytd8178nhr20fqswfm. SessionId : ydb://session/3?node_id=5&id=ZWEwNTJiNjQtNDljZGYxYzYtZWQ0OTcyYzQtNzRiZjdkYjA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. ProcessOutputsState.Inflight: 0 2025-03-31T16:02:41.800493Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1982:3163], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jqpf1jytd8178nhr20fqswfm. SessionId : ydb://session/3?node_id=5&id=ZWEwNTJiNjQtNDljZGYxYzYtZWQ0OTcyYzQtNzRiZjdkYjA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Do not drain channelId: 1, finished 2025-03-31T16:02:41.800567Z node 5 :KQP_COMPUTE DEBUG: TxId: 281474976715667, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-03-31T16:02:41.800922Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1979:2974] TxId: 281474976715667. Ctx: { TraceId: 01jqpf1jytd8178nhr20fqswfm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=ZWEwNTJiNjQtNDljZGYxYzYtZWQ0OTcyYzQtNzRiZjdkYjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send TEvStreamData to [5:1648:2974], seqNo: 1, nRows: 1 2025-03-31T16:02:41.801199Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1979:2974] TxId: 281474976715667. Ctx: { TraceId: 01jqpf1jytd8178nhr20fqswfm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=ZWEwNTJiNjQtNDljZGYxYzYtZWQ0OTcyYzQtNzRiZjdkYjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [5:1982:3163], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 592 Tasks { TaskId: 1 CpuTimeUs: 241 FinishTimeMs: 1743436961799 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 63 BuildCpuTimeUs: 178 HostName: "ghrun-tuhoqst4xe" NodeId: 5 CreateTimeMs: 1743436961798 } MaxMemoryUsage: 1048576 } 2025-03-31T16:02:41.801332Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1979:2974] TxId: 281474976715667. Ctx: { TraceId: 01jqpf1jytd8178nhr20fqswfm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=ZWEwNTJiNjQtNDljZGYxYzYtZWQ0OTcyYzQtNzRiZjdkYjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [5:1982:3163], ... response 271646822 NKikimr::NKqp::TEvKqpExecuter::TEvStreamData NKikimrKqp.TEvExecuterStreamData ResultSet { columns { name: "column0" type { optional_type { item { type_id: UINT64 } } } } rows { items { uint64_value: 596400 } } } SeqNo: 1 QueryResultIndex: 0 ChannelId: 1 2025-03-31T16:02:41.801752Z node 5 :KQP_EXECUTER DEBUG: TxId: 281474976715667, send ack to channelId: 1, seqNo: 1, enough: 0, freeSpace: 100, to: [5:1983:3163] 2025-03-31T16:02:41.801880Z node 5 :KQP_COMPUTE TRACE: TxId: 281474976715667, task: 1. Received channel data ack for channelId: 1, seqNo: 1, lastSentSeqNo: 1, freeSpace: 100, early finish: 0 2025-03-31T16:02:41.801936Z node 5 :KQP_COMPUTE TRACE: TxId: 281474976715667, task: 1. PeerState, peerState:(freeSpace:100;inFlightBytes:0;inFlightCount:0;), sentSeqNo: 1, ackSeqNo: 1 2025-03-31T16:02:41.801967Z node 5 :KQP_COMPUTE TRACE: TxId: 281474976715667, task: 1. Resume compute actor 2025-03-31T16:02:41.802050Z node 5 :KQP_COMPUTE DEBUG: SelfId: [5:1982:3163], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jqpf1jytd8178nhr20fqswfm. SessionId : ydb://session/3?node_id=5&id=ZWEwNTJiNjQtNDljZGYxYzYtZWQ0OTcyYzQtNzRiZjdkYjA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-03-31T16:02:41.802133Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1982:3163], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jqpf1jytd8178nhr20fqswfm. SessionId : ydb://session/3?node_id=5&id=ZWEwNTJiNjQtNDljZGYxYzYtZWQ0OTcyYzQtNzRiZjdkYjA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Poll inputs 2025-03-31T16:02:41.802173Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1982:3163], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jqpf1jytd8178nhr20fqswfm. SessionId : ydb://session/3?node_id=5&id=ZWEwNTJiNjQtNDljZGYxYzYtZWQ0OTcyYzQtNzRiZjdkYjA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Poll sources 2025-03-31T16:02:41.802213Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1982:3163], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jqpf1jytd8178nhr20fqswfm. SessionId : ydb://session/3?node_id=5&id=ZWEwNTJiNjQtNDljZGYxYzYtZWQ0OTcyYzQtNzRiZjdkYjA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Resume execution, run status: Finished 2025-03-31T16:02:41.802246Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1982:3163], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jqpf1jytd8178nhr20fqswfm. SessionId : ydb://session/3?node_id=5&id=ZWEwNTJiNjQtNDljZGYxYzYtZWQ0OTcyYzQtNzRiZjdkYjA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. ProcessOutputsState.Inflight: 0 2025-03-31T16:02:41.802292Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1982:3163], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jqpf1jytd8178nhr20fqswfm. SessionId : ydb://session/3?node_id=5&id=ZWEwNTJiNjQtNDljZGYxYzYtZWQ0OTcyYzQtNzRiZjdkYjA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Do not drain channelId: 1, finished 2025-03-31T16:02:41.802335Z node 5 :KQP_COMPUTE DEBUG: TxId: 281474976715667, task: 1. Tasks execution finished 2025-03-31T16:02:41.802370Z node 5 :KQP_COMPUTE DEBUG: SelfId: [5:1982:3163], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jqpf1jytd8178nhr20fqswfm. SessionId : ydb://session/3?node_id=5&id=ZWEwNTJiNjQtNDljZGYxYzYtZWQ0OTcyYzQtNzRiZjdkYjA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-03-31T16:02:41.802484Z node 5 :KQP_COMPUTE DEBUG: TxId: 281474976715667, task: 1. pass away 2025-03-31T16:02:41.802588Z node 5 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715667;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-03-31T16:02:41.802767Z node 5 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976715667, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-03-31T16:02:41.802992Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1979:2974] TxId: 281474976715667. Ctx: { TraceId: 01jqpf1jytd8178nhr20fqswfm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=ZWEwNTJiNjQtNDljZGYxYzYtZWQ0OTcyYzQtNzRiZjdkYjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [5:1982:3163], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 2598 Tasks { TaskId: 1 CpuTimeUs: 247 FinishTimeMs: 1743436961802 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 69 BuildCpuTimeUs: 178 HostName: "ghrun-tuhoqst4xe" NodeId: 5 CreateTimeMs: 1743436961798 } MaxMemoryUsage: 1048576 } 2025-03-31T16:02:41.803053Z node 5 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jqpf1jytd8178nhr20fqswfm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=ZWEwNTJiNjQtNDljZGYxYzYtZWQ0OTcyYzQtNzRiZjdkYjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [5:1982:3163] 2025-03-31T16:02:41.804083Z node 5 :KQP_EXECUTER INFO: ActorId: [5:1979:2974] TxId: 281474976715667. Ctx: { TraceId: 01jqpf1jytd8178nhr20fqswfm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=ZWEwNTJiNjQtNDljZGYxYzYtZWQ0OTcyYzQtNzRiZjdkYjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Full stats: { CpuTimeUs: 5216 DurationUs: 1743436958269155 ExecuterCpuTimeUs: 2618 StartTimeMs: 3534 FinishTimeMs: 1743436961803 Stages { StageGuid: "f9a584e6-334f35f6-bb7fc484-4e27438a" Program: "(\n(declare %kqp%tx_result_binding_0_0 (ListType (StructType \'(\'\"column0\" (OptionalType (DataType \'Uint64))))))\n(return (lambda \'() (Iterator %kqp%tx_result_binding_0_0)))\n)\n" ComputeActors { CpuTimeUs: 2598 Tasks { TaskId: 1 CpuTimeUs: 247 FinishTimeMs: 1743436961802 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 69 BuildCpuTimeUs: 178 HostName: "ghrun-tuhoqst4xe" NodeId: 5 CreateTimeMs: 1743436961798 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743436961799 } TxPlansWithStats: "{\"Node Type\":\"Phase\",\"PlanNodeId\":8,\"Plans\":[{\"Node Type\":\"ResultSet_1\",\"PlanNodeId\":7,\"PlanNodeType\":\"ResultSet\",\"Plans\":[{\"CTE Name\":\"precompute_0_0\",\"Node Type\":\"ConstantExpr\",\"Operators\":[{\"Inputs\":[],\"Iterator\":\"precompute_0_0\",\"Name\":\"Iterator\"}],\"PlanNodeId\":6,\"StageGuid\":\"f9a584e6-334f35f6-bb7fc484-4e27438a\",\"Stats\":{\"BaseTimeMs\":1743436961799,\"ComputeNodes\":[{\"CpuTimeUs\":2598,\"Tasks\":[{\"ComputeTimeUs\":69,\"FinishTimeMs\":1743436961802,\"Host\":\"ghrun-tuhoqst4xe\",\"NodeId\":5,\"OutputBytes\":6,\"OutputRows\":1,\"ResultBytes\":6,\"ResultRows\":1,\"TaskId\":1}]}],\"FinishedTasks\":0,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"\"}" StatConvertBytes: 685 Extra { type_url: "type.googleapis.com/NKqpProto.TKqpExecutionExtraStats" value: "\022\013\010\246\024\020\246\024\030\246\024 \001" } } 2025-03-31T16:02:41.804219Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1979:2974] TxId: 281474976715667. Ctx: { TraceId: 01jqpf1jytd8178nhr20fqswfm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=ZWEwNTJiNjQtNDljZGYxYzYtZWQ0OTcyYzQtNzRiZjdkYjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-31T16:02:41.804269Z node 5 :KQP_EXECUTER TRACE: ActorId: [5:1979:2974] TxId: 281474976715667. Ctx: { TraceId: 01jqpf1jytd8178nhr20fqswfm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=ZWEwNTJiNjQtNDljZGYxYzYtZWQ0OTcyYzQtNzRiZjdkYjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2025-03-31T16:02:41.804317Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1979:2974] TxId: 281474976715667. Ctx: { TraceId: 01jqpf1jytd8178nhr20fqswfm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=ZWEwNTJiNjQtNDljZGYxYzYtZWQ0OTcyYzQtNzRiZjdkYjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.002598s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 ... response 271646721 NKikimr::NKqp::NPrivateEvents::TEvQueryResponse NKikimrKqp.TEvQueryResponse Response { TxMeta { } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 833 >> TKqpScanData::EmptyColumns >> TKqpScanData::EmptyColumns [GOOD] >> IncrementalBackup::E2EBackupCollection [GOOD] >> TCompactionMulti::MainPageCollectionEdgeMany [GOOD] >> TCompactionMulti::MainPageCollectionOverflow |72.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::FailOnUnsupportedPgType [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TComputeScheduler::ResourceWeight [GOOD] Test command err: 510 500 1510 1500 990 1000 1000 1000 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TComputeScheduler::QueryLimits [GOOD] Test command err: 800 800 800 800 >> TCompactionMulti::MainPageCollectionOverflow [GOOD] >> TCompactionMulti::MainPageCollectionOverflowSmallRefs >> KqpExplain::SortStage [GOOD] >> KqpExplain::SqlIn >> IncrementalBackup::SimpleBackupBackupCollection+WithIncremental [GOOD] >> IncrementalBackup::SimpleBackupBackupCollection-WithIncremental >> TCompactionMulti::MainPageCollectionOverflowSmallRefs [GOOD] >> TCompactionMulti::MainPageCollectionOverflowLargeRefs [GOOD] >> TExecutorDb::RandomOps |72.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::EmptyColumns [GOOD] >> KqpQuery::UdfMemoryLimit [GOOD] >> KqpQuery::TryToUpdateNonExistentColumn >> KqpQuery::SelectWhereInSubquery [GOOD] >> KqpQuery::TableSink_ReplaceDataShardDataQuery+UseSink >> TKqpScanData::DifferentNumberOfInputAndResultColumns >> KqpTypes::UnsafeTimestampCastV0 [GOOD] >> KqpTypes::UnsafeTimestampCastV1 >> TKqpScanData::DifferentNumberOfInputAndResultColumns [GOOD] >> TChargeBTreeIndex::FewNodes_Groups_History_Sticky [GOOD] >> NFwd_TFlatIndexCache::Basics [GOOD] >> NFwd_TFlatIndexCache::IndexPagesLocator [GOOD] >> NFwd_TFlatIndexCache::GetTwice [GOOD] >> NFwd_TFlatIndexCache::ForwardTwice [GOOD] >> NFwd_TFlatIndexCache::Skip_Done [GOOD] >> NFwd_TFlatIndexCache::Skip_Done_None [GOOD] >> NFwd_TFlatIndexCache::Skip_Keep [GOOD] >> NFwd_TFlatIndexCache::Skip_Wait [GOOD] >> NFwd_TFlatIndexCache::Trace [GOOD] >> NFwd_TFlatIndexCache::End [GOOD] >> NFwd_TFlatIndexCache::Slices [GOOD] >> NFwd_TLoadedPagesCircularBuffer::Basics [GOOD] >> NOther::Blocks [GOOD] >> NPage::Encoded [GOOD] >> NPage::ABI_002 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::E2EBackupCollection [GOOD] Test command err: 2025-03-31T16:02:23.822746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:02:23.823016Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:02:23.823228Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00185c/r3tmp/tmpTrhp4D/pdisk_1.dat 2025-03-31T16:02:24.226368Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:596:2520], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:02:24.226467Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:02:24.226504Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-31T16:02:24.226673Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:593:2518], Recipient [1:409:2404]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2025-03-31T16:02:24.226706Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-31T16:02:24.354725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-03-31T16:02:24.355004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:02:24.355289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-31T16:02:24.355559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:02:24.355642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:02:24.355768Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-31T16:02:24.356538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-31T16:02:24.359710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-31T16:02:24.359871Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T16:02:24.359913Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-03-31T16:02:24.360219Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-31T16:02:24.360265Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-31T16:02:24.360348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:02:24.360477Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-31T16:02:24.360539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:02:24.360580Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:02:24.360744Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-31T16:02:24.361353Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T16:02:24.361402Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-03-31T16:02:24.361590Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-31T16:02:24.361631Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-31T16:02:24.361704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:02:24.361758Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-03-31T16:02:24.361803Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:02:24.361890Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-31T16:02:24.362362Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T16:02:24.362396Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-03-31T16:02:24.362521Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-31T16:02:24.362553Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-31T16:02:24.362611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:02:24.362651Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:02:24.362707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-03-31T16:02:24.362741Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-31T16:02:24.362778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:02:24.367000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:02:24.367806Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T16:02:24.367860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:02:24.368040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-03-31T16:02:24.369377Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:601:2525], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046316545 Status: OK ServerId: [1:603:2526] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-03-31T16:02:24.369433Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-03-31T16:02:24.369497Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046316545, status: OK, at schemeshard: 72057594046644480 2025-03-31T16:02:24.369646Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 500 TxId: 1 2025-03-31T16:02:24.370092Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:605:2528], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:02:24.370150Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:02:24.370185Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-31T16:02:24.370350Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [1:593:2518], Recipient [1:409:2404]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2025-03-31T16:02:24.370387Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-03-31T16:02:24.370485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-03-31T16:02:24.370527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-03-31T16:02:24.370565Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-03-31T16:02:24.421032Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 273285138, Sender [1:43:2090], Recipient [1:409:2404]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } } ItemKinds: 26 ItemKinds: 34 ItemKinds: 52 ItemKinds: 54 ItemKinds: 73 Local: true } 2025-03-31T16:02:24.421163Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-03-31T16:02:24.421207Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:24.421682Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-03-31T16:02:24.421756Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } 2025-03-31T16:02:24.461441Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:24.461594Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:24.473249Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:02:24.556116Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 17 StepId: 500 TxId: 1 2025-03-31T16:02:24.556997Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:636:2544], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:02:24.557045Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:02:24.557075Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-31T16:02:24.557237Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:409:2404]: {TEvPlanStep step# 500 MediatorId# 72057594046382081 TabletID 72057594046644480} 2025-03-31T16:02:24.557268Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:02:24.557350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T16:02:24.557502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemes ... ode 3 :TX_DATASHARD INFO: 72075186224037892 Sending notify to schemeshard 72057594046644480 txId 281474976715668 state Ready TxInFly 0 2025-03-31T16:02:43.841685Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-03-31T16:02:43.841999Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [3:1566:3213], Recipient [3:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:02:43.842043Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:02:43.842073Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-31T16:02:43.842256Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269551620, Sender [3:1224:2934], Recipient [3:409:2404]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 1224 RawX2: 12884904822 } Origin: 72075186224037892 State: 2 TxId: 281474976715668 Step: 0 Generation: 1 2025-03-31T16:02:43.842291Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChanged 2025-03-31T16:02:43.842358Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 1224 RawX2: 12884904822 } Origin: 72075186224037892 State: 2 TxId: 281474976715668 Step: 0 Generation: 1 2025-03-31T16:02:43.842399Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715668, tablet: 72075186224037892, partId: 1 2025-03-31T16:02:43.842522Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715668:1, at schemeshard: 72057594046644480, message: Source { RawX1: 1224 RawX2: 12884904822 } Origin: 72075186224037892 State: 2 TxId: 281474976715668 Step: 0 Generation: 1 2025-03-31T16:02:43.842563Z node 3 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715668:1 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-03-31T16:02:43.842632Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715668:1 HandleReply TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 1224 RawX2: 12884904822 } Origin: 72075186224037892 State: 2 TxId: 281474976715668 Step: 0 Generation: 1 2025-03-31T16:02:43.842690Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715668:1, shardIdx: 72057594046644480:5, datashard: 72075186224037892, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-03-31T16:02:43.842723Z node 3 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715668:1, at schemeshard: 72057594046644480 2025-03-31T16:02:43.842756Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715668:1, datashard: 72075186224037892, at schemeshard: 72057594046644480 2025-03-31T16:02:43.842791Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715668:1 129 -> 240 2025-03-31T16:02:43.843014Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TRestoreMultipleIncrementalBackups TDone, operationId: 281474976715668:1 Constructed op# SrcTablePaths: "/Root/.backups/collections/MyCollection/19700101000002Z_incremental/Table" DstTablePath: "/Root/Table" SrcPathIds { OwnerId: 72057594046644480 LocalId: 15 } 2025-03-31T16:02:43.843214Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-31T16:02:43.843777Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715668:1, at schemeshard: 72057594046644480 2025-03-31T16:02:43.843814Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T16:02:43.843847Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715668:1 2025-03-31T16:02:43.843916Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [3:1224:2934] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715668 at schemeshard: 72057594046644480 2025-03-31T16:02:43.844029Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715668 datashard 72075186224037892 state Ready 2025-03-31T16:02:43.844084Z node 3 :TX_DATASHARD DEBUG: 72075186224037892 Got TEvSchemaChangedResult from SS at 72075186224037892 2025-03-31T16:02:43.844275Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [3:409:2404], Recipient [3:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-31T16:02:43.844312Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-31T16:02:43.844361Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715668:1, at schemeshard: 72057594046644480 2025-03-31T16:02:43.844416Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TRestoreMultipleIncrementalBackups TDone, operationId: 281474976715668:1 ProgressState 2025-03-31T16:02:43.844561Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-31T16:02:43.844603Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:1 progress is 1/2 2025-03-31T16:02:43.844651Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 1/2 2025-03-31T16:02:43.844698Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715668, done: 1, blocked: 1 2025-03-31T16:02:43.844820Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 281474976715668:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 281474976715668 Name: CopyTableBarrier }, at tablet# 72057594046644480 2025-03-31T16:02:43.844988Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715668:0 240 -> 240 2025-03-31T16:02:43.845211Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:1 progress is 1/2 2025-03-31T16:02:43.845260Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 1/2 2025-03-31T16:02:43.845311Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715668, ready parts: 1/2, is published: true 2025-03-31T16:02:43.845751Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T16:02:43.845789Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715668:0 2025-03-31T16:02:43.845932Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [3:409:2404], Recipient [3:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-31T16:02:43.845968Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-31T16:02:43.846037Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715668:0, at schemeshard: 72057594046644480 2025-03-31T16:02:43.846083Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715668:0 ProgressState 2025-03-31T16:02:43.846236Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-31T16:02:43.846267Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:0 progress is 2/2 2025-03-31T16:02:43.846296Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 2/2 2025-03-31T16:02:43.846329Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:0 progress is 2/2 2025-03-31T16:02:43.846356Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 2/2 2025-03-31T16:02:43.846386Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715668, ready parts: 2/2, is published: true 2025-03-31T16:02:43.846456Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:1442:3110] message: TxId: 281474976715668 2025-03-31T16:02:43.846516Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 2/2 2025-03-31T16:02:43.846574Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:0 2025-03-31T16:02:43.846621Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:0 2025-03-31T16:02:43.846789Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 16] was 4 2025-03-31T16:02:43.846837Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 12] was 3 2025-03-31T16:02:43.846882Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:1 2025-03-31T16:02:43.846905Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:1 2025-03-31T16:02:43.846949Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 15] was 3 2025-03-31T16:02:43.846973Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 16] was 3 2025-03-31T16:02:43.847550Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T16:02:43.847654Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [3:1442:3110] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715668 at schemeshard: 72057594046644480 2025-03-31T16:02:43.848057Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [3:1449:3116], Recipient [3:409:2404]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-31T16:02:43.848093Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-31T16:02:43.848125Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-03-31T16:02:43.862893Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [3:1566:3213], Recipient [3:409:2404]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-31T16:02:43.862989Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-31T16:02:43.863045Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-03-31T16:02:43.961816Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:02:43.961893Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:02:43.961977Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:409:2404], Recipient [3:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:02:43.962006Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:02:44.282154Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jqpf1r8a3kb5wtdmx7yvz8jt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=N2QyNDVhN2ItYWM1MTdlMjQtNTc1Yzk0ZjctNjQ3Y2ZlMjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 2 } items { uint32_value: 200 } }, { items { uint32_value: 3 } items { uint32_value: 30 } } |72.8%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp_scan/test-results/unittest/{meta.json ... results_accumulator.log} >> TKqpScanData::EmptyColumnsAndNonEmptyArrowBatch >> NPage::ABI_002 [GOOD] >> NPage::GroupIdEncoding [GOOD] >> NPageCollection::Align [GOOD] >> NPageCollection::Meta >> TKqpScanData::EmptyColumnsAndNonEmptyArrowBatch [GOOD] |72.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::DifferentNumberOfInputAndResultColumns [GOOD] |72.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> NPageCollection::Meta [GOOD] >> NPageCollection::PagesToBlobsConverter [GOOD] >> NPageCollection::Grow [GOOD] >> NPageCollection::Groups [GOOD] >> NPageCollection::Chop [GOOD] >> NPageCollection::CookieAllocator [GOOD] >> NProto::LargeGlobId [GOOD] >> Redo::ABI_008 [GOOD] >> Self::Literals [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestTagQueueMultipleQueriesInflight [GOOD] Test command err: 2025-03-31T16:01:41.096460Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004476443370916:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:41.096535Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0010bc/r3tmp/tmpq3FmBb/pdisk_1.dat 2025-03-31T16:01:41.669864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:01:41.669959Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:01:41.671721Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:01:41.802630Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1677, node 1 2025-03-31T16:01:41.993579Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:01:41.993601Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:01:41.993606Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:01:41.993710Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9095 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:01:42.591517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:42.624286Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:9095 2025-03-31T16:01:42.925807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:42.940058Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T16:01:42.948831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:42.969003Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-03-31T16:01:42.975411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:43.158981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:01:43.243552Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2025-03-31T16:01:43.253164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:01:43.343412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:01:43.417101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:01:43.525284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:43.590821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:43.667760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:43.745670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:43.821317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:45.901791Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004493623241537:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:45.901918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:45.902336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004493623241549:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:45.906428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-03-31T16:01:45.921789Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710673, at schemeshard: 72057594046644480 2025-03-31T16:01:45.922058Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004493623241551:2380], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-03-31T16:01:45.986264Z node 1 :TX_PROXY ERROR: Actor# [1:7488004493623241602:2919] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:01:46.094850Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004476443370916:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:46.094929Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:01:46.332091Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jqpezzd80e7eswrnpb8qb6mr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWY5NGMwNzMtMjViN2NmZDktNWU1OWI3OTMtOTIwOGEzYjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root waiting... 2025-03-31T16:01:46.403984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:01:46.464397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:46.544203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:46.597632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:46.644606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:46.687514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:46.742972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:46.788942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:46.828965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:46.929518Z node 1 :HTTP INFO: Listening on http://127.0.0.1:15010 2025-03-31T16:01:47.952658Z node 1 :SQS INFO: Start SQS proxy service actor 2025-03-31T16:01:47.953420Z node 1 :HTTP INFO: Listening on http://[::]:3360 2025-03-31T16:01:47.953919Z node 1 :SQS INFO: Start SQS service actor 2025-03-31T16:01:47.954043Z node 1 :SQS DEBUG: SQS service config: { EnableSqs: true YandexCloudMode: false EnableDeadLetterQueues: true } 2025-03-31T16:01:47.955300Z node 1 :SQS DEBUG: Enable scheme board scheme cache 2025-03-31T16:01:47.987292Z node 1 :SQS INFO: Request SQS users list 2025-03-31T16:01:47.987337Z node 1 :SQS DEBUG: Request SQS queues list 2025-03-31T16:01:47.992030Z node 1 :SQS DEBUG: Request [] Starting executor actor for query(idx=GET_QUEUES_LIST_ID). Mode ... request [ListQueueTags] url [/Root] database [/Root] requestId: 40fb1a24-7fd3ee0a-9aebbac0-2267b04f 2025-03-31T16:02:44.293513Z node 7 :HTTP_PROXY INFO: http request [ListQueueTags] requestId [40fb1a24-7fd3ee0a-9aebbac0-2267b04f] got new request from [d868:4e00:6050:0:c068:4e00:6050:0] 2025-03-31T16:02:44.293970Z node 7 :HTTP_PROXY DEBUG: http request [ListQueueTags] requestId [40fb1a24-7fd3ee0a-9aebbac0-2267b04f] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2025-03-31T16:02:44.293989Z node 7 :HTTP_PROXY INFO: http request [ListQueueTags] requestId [40fb1a24-7fd3ee0a-9aebbac0-2267b04f] sending grpc request to '' database: '/Root' iam token size: 0 2025-03-31T16:02:44.294098Z node 7 :SQS DEBUG: Got new request in YMQ proxy. FolderId: folder4, CloudId: cloud4, UserSid: fake_user_sid@as, RequestId: 40fb1a24-7fd3ee0a-9aebbac0-2267b04f 2025-03-31T16:02:44.294198Z node 7 :SQS DEBUG: Request [40fb1a24-7fd3ee0a-9aebbac0-2267b04f] Proxy actor: used user_name='cloud4', queue_name='000000000000000301v0', folder_id='folder4' 2025-03-31T16:02:44.294209Z node 7 :SQS DEBUG: Request [40fb1a24-7fd3ee0a-9aebbac0-2267b04f] Request proxy started 2025-03-31T16:02:44.294295Z node 7 :SQS DEBUG: Request [40fb1a24-7fd3ee0a-9aebbac0-2267b04f] Answer configuration for queue [cloud4/000000000000000301v0] without leader 2025-03-31T16:02:44.295011Z node 7 :SQS DEBUG: Request [40fb1a24-7fd3ee0a-9aebbac0-2267b04f] Get configuration duration: 0ms 2025-03-31T16:02:44.295151Z node 7 :SQS DEBUG: Request [40fb1a24-7fd3ee0a-9aebbac0-2267b04f] Send get leader node request to sqs service for cloud4/000000000000000301v0 2025-03-31T16:02:44.295211Z node 7 :SQS DEBUG: Request [40fb1a24-7fd3ee0a-9aebbac0-2267b04f] Leader node for queue [cloud4/000000000000000301v0] is 7 2025-03-31T16:02:44.295246Z node 7 :SQS DEBUG: Request [40fb1a24-7fd3ee0a-9aebbac0-2267b04f] Got leader node for queue response. Node id: 7. Status: 0 2025-03-31T16:02:44.295351Z node 7 :SQS TRACE: Request [40fb1a24-7fd3ee0a-9aebbac0-2267b04f] Sending request from proxy to leader node 7: ListQueueTags { Auth { UserName: "cloud4" FolderId: "folder4" UserSID: "fake_user_sid@as" } QueueName: "000000000000000301v0" } RequestId: "40fb1a24-7fd3ee0a-9aebbac0-2267b04f" 2025-03-31T16:02:44.295426Z node 7 :SQS DEBUG: Request [40fb1a24-7fd3ee0a-9aebbac0-2267b04f] Received Sqs Request: ListQueueTags { Auth { UserName: "cloud4" FolderId: "folder4" UserSID: "fake_user_sid@as" } QueueName: "000000000000000301v0" } RequestId: "40fb1a24-7fd3ee0a-9aebbac0-2267b04f" 2025-03-31T16:02:44.295471Z node 7 :SQS DEBUG: Request [40fb1a24-7fd3ee0a-9aebbac0-2267b04f] Request started. Actor: [7:7488004744635655701:5526] 2025-03-31T16:02:44.295516Z node 7 :SQS TRACE: Inc local leader ref for actor [7:7488004744635655701:5526] 2025-03-31T16:02:44.295533Z node 7 :SQS DEBUG: Request [40fb1a24-7fd3ee0a-9aebbac0-2267b04f] Forward configuration request to queue [cloud4/000000000000000301v0] leader Http output full {} 2025-03-31T16:02:44.299975Z node 7 :SQS TRACE: Request [e8d2c30e-53a21534-f8588f55-53ca79a7] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] HandleResponse { Status: 48 TxId: 281474976715925 Step: 1743436964339 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "attrs" Type { Kind: Optional Optional { Item { Kind: Optional Optional { Item { Kind: Struct Struct { Member { Name: "ContentBasedDeduplication" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "DelaySeconds" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "DlqArn" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "MaxReceiveCount" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MaximumMessageSize" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MessageRetentionPeriod" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ReceiveMessageWaitTime" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ShowDetailedCountersDeadline" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "VisibilityTimeout" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "queueExists" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "tags" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } Value { Struct { Optional { Optional { Struct { Optional { Bool: false } } Struct { Optional { Uint64: 0 } } Struct { Optional { Text: "" } } Struct { Optional { Text: "" } } Struct { Optional { Bool: true } } Struct { Optional { Uint64: 0 } } Struct { Optional { Uint64: 262144 } } Struct { Optional { Uint64: 345600000 } } Struct { Optional { Uint64: 0 } } Struct { } Struct { Optional { Uint64: 30000 } } } } } Struct { Optional { Bool: true } } Struct { Optional { Text: "{}" } } } } } 2025-03-31T16:02:44.300060Z node 7 :SQS DEBUG: Request [e8d2c30e-53a21534-f8588f55-53ca79a7] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] Attempt 1 execution duration: 10ms 2025-03-31T16:02:44.300598Z node 7 :SQS TRACE: Request [e8d2c30e-53a21534-f8588f55-53ca79a7] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] Sending mkql execution result: { Status: 48 TxId: 281474976715925 Step: 1743436964339 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "attrs" Type { Kind: Optional Optional { Item { Kind: Optional Optional { Item { Kind: Struct Struct { Member { Name: "ContentBasedDeduplication" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "DelaySeconds" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "DlqArn" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "MaxReceiveCount" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MaximumMessageSize" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MessageRetentionPeriod" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ReceiveMessageWaitTime" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ShowDetailedCountersDeadline" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "VisibilityTimeout" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "queueExists" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "tags" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } Value { Struct { Optional { Optional { Struct { Optional { Bool: false } } Struct { Optional { Uint64: 0 } } Struct { Optional { Text: "" } } Struct { Optional { Text: "" } } Struct { Optional { Bool: true } } Struct { Optional { Uint64: 0 } } Struct { Optional { Uint64: 262144 } } Struct { Optional { Uint64: 345600000 } } Struct { Optional { Uint64: 0 } } Struct { } Struct { Optional { Uint64: 30000 } } } } } Struct { Optional { Bool: true } } Struct { Optional { Text: "{}" } } } } } 2025-03-31T16:02:44.300705Z node 7 :SQS TRACE: Request [e8d2c30e-53a21534-f8588f55-53ca79a7] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] Minikql data response: {"attrs": {"ContentBasedDeduplication": false, "DelaySeconds": 0, "DlqArn": "", "DlqName": "", "FifoQueue": true, "MaxReceiveCount": 0, "MaximumMessageSize": 262144, "MessageRetentionPeriod": 345600000, "ReceiveMessageWaitTime": 0, "ShowDetailedCountersDeadline": null, "VisibilityTimeout": 30000}, "queueExists": true, "tags": "{}"} 2025-03-31T16:02:44.300872Z node 7 :SQS DEBUG: Request [e8d2c30e-53a21534-f8588f55-53ca79a7] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] execution duration: 13ms 2025-03-31T16:02:44.301020Z node 7 :SQS DEBUG: Request [e8d2c30e-53a21534-f8588f55-53ca79a7] Sending executed reply 2025-03-31T16:02:44.301331Z node 7 :SQS DEBUG: Request [40fb1a24-7fd3ee0a-9aebbac0-2267b04f] Get configuration duration: 5ms 2025-03-31T16:02:44.301351Z node 7 :SQS TRACE: Request [40fb1a24-7fd3ee0a-9aebbac0-2267b04f] Got configuration. Root url: http://ghrun-tuhoqst4xe.auto.internal:8771, Shards: 1, Fail: 0 2025-03-31T16:02:44.301371Z node 7 :SQS TRACE: Request [40fb1a24-7fd3ee0a-9aebbac0-2267b04f] DoRoutine 2025-03-31T16:02:44.301433Z node 7 :SQS TRACE: Request [40fb1a24-7fd3ee0a-9aebbac0-2267b04f] SendReplyAndDie from action actor { ListQueueTags { RequestId: "40fb1a24-7fd3ee0a-9aebbac0-2267b04f" } } 2025-03-31T16:02:44.301503Z node 7 :SQS TRACE: Request [40fb1a24-7fd3ee0a-9aebbac0-2267b04f] Sending sqs response: { ListQueueTags { RequestId: "40fb1a24-7fd3ee0a-9aebbac0-2267b04f" } RequestId: "40fb1a24-7fd3ee0a-9aebbac0-2267b04f" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true } 2025-03-31T16:02:44.301573Z node 7 :SQS TRACE: Dec local leader ref for actor [7:7488004744635655701:5526]. Found: 1 2025-03-31T16:02:44.301631Z node 7 :SQS TRACE: HandleSqsResponse ListQueueTags { RequestId: "40fb1a24-7fd3ee0a-9aebbac0-2267b04f" } RequestId: "40fb1a24-7fd3ee0a-9aebbac0-2267b04f" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true 2025-03-31T16:02:44.301680Z node 7 :SQS TRACE: Sending answer to proxy actor [7:7488004744635655699:2775]: ListQueueTags { RequestId: "40fb1a24-7fd3ee0a-9aebbac0-2267b04f" } RequestId: "40fb1a24-7fd3ee0a-9aebbac0-2267b04f" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true 2025-03-31T16:02:44.301972Z node 7 :SQS TRACE: Request [40fb1a24-7fd3ee0a-9aebbac0-2267b04f] HandleResponse: { ListQueueTags { RequestId: "40fb1a24-7fd3ee0a-9aebbac0-2267b04f" } RequestId: "40fb1a24-7fd3ee0a-9aebbac0-2267b04f" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true }, status: OK 2025-03-31T16:02:44.302032Z node 7 :SQS DEBUG: Request [40fb1a24-7fd3ee0a-9aebbac0-2267b04f] Sending reply from proxy actor: { ListQueueTags { RequestId: "40fb1a24-7fd3ee0a-9aebbac0-2267b04f" } RequestId: "40fb1a24-7fd3ee0a-9aebbac0-2267b04f" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true } 2025-03-31T16:02:44.302237Z node 7 :HTTP_PROXY DEBUG: http request [ListQueueTags] requestId [40fb1a24-7fd3ee0a-9aebbac0-2267b04f] Got succesfult GRPC response. 2025-03-31T16:02:44.302282Z node 7 :HTTP_PROXY INFO: http request [ListQueueTags] requestId [40fb1a24-7fd3ee0a-9aebbac0-2267b04f] reply ok 2025-03-31T16:02:44.302372Z node 7 :HTTP_PROXY DEBUG: http request [ListQueueTags] requestId [40fb1a24-7fd3ee0a-9aebbac0-2267b04f] Send metering event. HttpStatusCode: 200 IsFifo: 1 FolderId: folder4 RequestSizeInBytes: 530 ResponseSizeInBytes: 179 SourceAddress: d868:4e00:6050:0:c068:4e00:6050:0 ResourceId: 000000000000000301v0 Action: ListQueueTags 2025-03-31T16:02:44.302473Z node 7 :HTTP DEBUG: (#37,[::1]:45250) <- (200 ) 2025-03-31T16:02:44.302567Z node 7 :HTTP DEBUG: (#37,[::1]:45250) connection closed >> THiveTest::TestCheckSubHiveMigrationManyTablets [GOOD] >> THiveTest::PipeAlivenessOfDeadTablet >> KqpStats::OneShardLocalExec-UseSink |72.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::EmptyColumnsAndNonEmptyArrowBatch [GOOD] >> TReplicationTests::Create >> TReplicationTests::CreateSequential >> BuildStatsHistogram::Single_History [GOOD] >> BuildStatsHistogram::Single_History_Slices >> TIterator::SerialReverse [GOOD] >> TIterator::GetKey >> TIterator::GetKey [GOOD] >> TIterator::GetKeyWithEraseCache [GOOD] >> TIterator::GetKeyWithVersionSkips [GOOD] >> TLegacy::IndexIter >> TOlap::StoreStatsQuota [GOOD] >> TestKinesisHttpProxy::TestEmptyHttpBody [GOOD] >> TestKinesisHttpProxy::ErroneousRequestGetRecords [GOOD] |72.8%| [TA] $(B)/ydb/core/kqp/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TLegacy::IndexIter [GOOD] >> TLegacy::ScreenedIndexIter [GOOD] >> TLegacy::StatsIter >> TestYmqHttpProxy::TestSendMessageBatch [GOOD] >> TLegacy::StatsIter [GOOD] >> TPageHandleTest::Uninitialized [GOOD] >> TPageHandleTest::NormalUse [GOOD] >> TPageHandleTest::HandleRef [GOOD] >> TPageHandleTest::PinnedRef [GOOD] >> TPageHandleTest::PinnedRefPure [GOOD] >> TPart::State [GOOD] >> TPart::Trivials [GOOD] >> TPart::Basics [GOOD] >> TPart::BasicColumnGroups [GOOD] >> TPart::CellDefaults [GOOD] >> TPart::Matter [GOOD] >> TPart::External [GOOD] >> TPart::Outer [GOOD] >> TPart::MassCheck >> KqpStats::JoinNoStatsScan ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> Self::Literals [GOOD] Test command err: + BTreeIndex{PageId: 0 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385, 13 rev 1, 683b} | PageId: 10000 RowCount: 100 DataSize: 1000 GroupDataSize: 2000 ErasedRowCount: 30 | > {0, a, false, 0} | PageId: 10001 RowCount: 201 DataSize: 2001 GroupDataSize: 4001 ErasedRowCount: 61 | > {1, b, true, 10} | PageId: 10002 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93 | > {2, c, false, 20} | PageId: 10003 RowCount: 406 DataSize: 4006 GroupDataSize: 8006 ErasedRowCount: 126 | > {3, d, true, 30} | PageId: 10004 RowCount: 510 DataSize: 5010 GroupDataSize: 10010 ErasedRowCount: 160 | > {4, e, false, 40} | PageId: 10005 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195 | > {5, f, true, 50} | PageId: 10006 RowCount: 721 DataSize: 7021 GroupDataSize: 14021 ErasedRowCount: 231 | > {6, g, false, 60} | PageId: 10007 RowCount: 828 DataSize: 8028 GroupDataSize: 16028 ErasedRowCount: 268 | > {7, h, true, 70} | PageId: 10008 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306 | > {8, i, false, 80} | PageId: 10009 RowCount: 1045 DataSize: 10045 GroupDataSize: 20045 ErasedRowCount: 345 | > {9, j, true, 90} | PageId: 10010 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385 + BTreeIndex{PageId: 9 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840, 13 rev 1, 116b} | + BTreeIndex{PageId: 5 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306, 13 rev 1, 179b} | | + BTreeIndex{PageId: 0 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93, 13 rev 1, 179b} | | | PageId: 10000 RowCount: 100 DataSize: 1000 GroupDataSize: 2000 ErasedRowCount: 30 | | | > {0, a, false, 0} | | | PageId: 10001 RowCount: 201 DataSize: 2001 GroupDataSize: 4001 ErasedRowCount: 61 | | | > {1, b, true, 10} | | | PageId: 10002 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93 | | > {2, c, false, 20} | | + BTreeIndex{PageId: 1 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195, 13 rev 1, 179b} | | | PageId: 10003 RowCount: 406 DataSize: 4006 GroupDataSize: 8006 ErasedRowCount: 126 | | | > {3, d, true, 30} | | | PageId: 10004 RowCount: 510 DataSize: 5010 GroupDataSize: 10010 ErasedRowCount: 160 | | | > {4, e, false, 40} | | | PageId: 10005 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195 | | > {5, f, true, 50} | | + BTreeIndex{PageId: 2 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306, 13 rev 1, 179b} | | | PageId: 10006 RowCount: 721 DataSize: 7021 GroupDataSize: 14021 ErasedRowCount: 231 | | | > {6, g, false, 60} | | | PageId: 10007 RowCount: 828 DataSize: 8028 GroupDataSize: 16028 ErasedRowCount: 268 | | | > {7, h, true, 70} | | | PageId: 10008 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306 | > {8, i, false, 80} | + BTreeIndex{PageId: 8 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840, 13 rev 1, 242b} | | + BTreeIndex{PageId: 3 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426, 13 rev 1, 179b} | | | PageId: 10009 RowCount: 1045 DataSize: 10045 GroupDataSize: 20045 ErasedRowCount: 345 | | | > {9, j, true, 90} | | | PageId: 10010 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385 | | | > {10, k, false, 100} | | | PageId: 10011 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426 | | > {11, l, true, 110} | | + BTreeIndex{PageId: 4 RowCount: 1605 DataSize: 15105 GroupDataSize: 30105 ErasedRowCount: 555, 13 rev 1, 179b} | | | PageId: 10012 RowCount: 1378 DataSize: 13078 GroupDataSize: 26078 ErasedRowCount: 468 | | | > {12, m, false, 120} | | | PageId: 10013 RowCount: 1491 DataSize: 14091 GroupDataSize: 28091 ErasedRowCount: 511 | | | > {13, n, true, 130} | | | PageId: 10014 RowCount: 1605 DataSize: 15105 GroupDataSize: 30105 ErasedRowCount: 555 | | > {14, o, false, 140} | | + BTreeIndex{PageId: 6 RowCount: 1953 DataSize: 18153 GroupDataSize: 36153 ErasedRowCount: 693, 13 rev 1, 179b} | | | PageId: 10015 RowCount: 1720 DataSize: 16120 GroupDataSize: 32120 ErasedRowCount: 600 | | | > {15, p, true, 150} | | | PageId: 10016 RowCount: 1836 DataSize: 17136 GroupDataSize: 34136 ErasedRowCount: 646 | | | > {16, q, false, 160} | | | PageId: 10017 RowCount: 1953 DataSize: 18153 GroupDataSize: 36153 ErasedRowCount: 693 | | > {17, r, true, 170} | | + BTreeIndex{PageId: 7 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840, 13 rev 1, 179b} | | | PageId: 10018 RowCount: 2071 DataSize: 19171 GroupDataSize: 38171 ErasedRowCount: 741 | | | > {18, s, false, 180} | | | PageId: 10019 RowCount: 2190 DataSize: 20190 GroupDataSize: 40190 ErasedRowCount: 790 | | | > {19, t, true, 190} | | | PageId: 10020 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840 + BTreeIndex{PageId: 15 RowCount: 15150 DataSize: 106050 GroupDataSize: 207050 ErasedRowCount: 8080, 13 rev 1, 174b} | + BTreeIndex{PageId: 12 RowCount: 9078 DataSize: 70278 GroupDataSize: 138278 ErasedRowCount: 4318, 13 rev 1, 690b} | | + BTreeIndex{PageId: 0 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426, 13 rev 1, 702b} | | | PageId: 10000 RowCount: 100 DataSize: 1000 GroupDataSize: 2000 ErasedRowCount: 30 | | | > {0, x, NULL, NULL} | | | PageId: 10001 RowCount: 201 DataSize: 2001 GroupDataSize: 4001 ErasedRowCount: 61 | | | > {1, xx, NULL, NULL} | | | PageId: 10002 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93 | | | > {2, xxx, NULL, NULL} | | | PageId: 10003 RowCount: 406 DataSize: 4006 GroupDataSize: 8006 ErasedRowCount: 126 | | | > {3, xxxx, NULL, NULL} | | | PageId: 10004 RowCount: 510 DataSize: 5010 GroupDataSize: 10010 ErasedRowCount: 160 | | | > {4, xxxxx, NULL, NULL} | | | PageId: 10005 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195 | | | > {5, xxxxxx, NULL, NULL} | | | PageId: 10006 RowCount: 721 DataSize: 7021 GroupDataSize: 14021 ErasedRowCount: 231 | | | > {6, xxxxxxx, NULL, NULL} | | | PageId: 10007 RowCount: 828 DataSize: 8028 GroupDataSize: 16028 ErasedRowCount: 268 | | | > {7, xxxxxxxx, NULL, NULL} | | | PageId: 10008 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306 | | | > {8, xxxxxxxxx, NULL, NULL} | | | PageId: 10009 RowCount: 1045 DataSize: 10045 GroupDataSize: 20045 ErasedRowCount: 345 | | | > {9, xxxxxxxxxx, NULL, NULL} | | | PageId: 10010 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385 | | | > {10, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10011 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426 | | > {11, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 1 RowCount: 2431 DataSize: 22231 GroupDataSize: 44231 ErasedRowCount: 891, 13 rev 1, 683b} | | | PageId: 10012 RowCount: 1378 DataSize: 13078 GroupDataSize: 26078 ErasedRowCount: 468 | | | > {12, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10013 RowCount: 1491 DataSize: 14091 GroupDataSize: 28091 ErasedRowCount: 511 | | | > {13, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10014 RowCount: 1605 DataSize: 15105 GroupDataSize: 30105 ErasedRowCount: 555 | | | > {14, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10015 RowCount: 1720 DataSize: 16120 GroupDataSize: 32120 ErasedRowCount: 600 | | | > {15, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10016 RowCount: 1836 DataSize: 17136 GroupDataSize: 34136 ErasedRowCount: 646 | | | > {16, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10017 RowCount: 1953 DataSize: 18153 GroupDataSize: 36153 ErasedRowCount: 693 | | | > {17, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10018 RowCount: 2071 DataSize: 19171 GroupDataSize: 38171 ErasedRowCount: 741 | | | > {18, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10019 RowCount: 2190 DataSize: 20190 GroupDataSize: 40190 ErasedRowCount: 790 | | | > {19, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10020 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840 | | | > {20, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10021 RowCount: 2431 DataSize: 22231 GroupDataSize: 44231 ErasedRowCount: 891 | | > {21, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 2 RowCount: 3565 DataSize: 31465 GroupDataSize: 62465 ErasedRowCount: 1395, 13 rev 1, 689b} | | | PageId: 10022 RowCount: 2553 DataSize: 23253 GroupDataSize: 46253 ErasedRowCount: 943 | | | > {22, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10023 RowCount: 2676 DataSize: 24276 GroupDataSize: 48276 ErasedRowCount: 996 | | | > {23, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10024 RowCount: 2800 DataSize: 25300 GroupDataSize: 50300 ErasedRowCount: 1050 | | | > {24, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10025 RowCount: 2925 DataSize: 26325 GroupDataSize: 52325 ErasedRowCount: 1105 | | | > {25, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10026 RowCount: 3051 DataSize: 27351 GroupDataSize: 54351 ErasedRowCount: 1161 | | | > {26, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10027 RowCount: 3178 DataSize: 28378 GroupDataSize: 56378 ErasedRowCount: 1218 | | | > {27, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10028 RowCount: 3306 DataSize: 29406 GroupDataSize: 58406 ErasedRowCount: 1276 | | | > {28, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10029 RowCount: 3435 DataSize: 30435 GroupDataSize: 60435 ErasedRowCount: 1335 | | | > {29, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10030 RowCount: 3565 DataSize: 31465 GroupDataSize: 62465 ErasedRowCount: 1395 | | > {30, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 3 RowCount: 4641 DataSize: 39741 GroupDataSize: 78741 ErasedRowCount: 1911, 13 rev 1, 669b} | | | PageId: 10031 RowCount: 3696 DataSize: 32496 GroupDataSize: 64496 ErasedRowCount: 1456 | | | > {31, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10032 RowCount: 3828 DataSize: 33528 GroupDataSize: 66528 ErasedRowCount: 1518 | | | > {32, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10033 RowCount: 3961 DataSize: 34561 GroupDataSize: 68561 ErasedRowCount: 1581 | | | > {33, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10034 RowCount: 4095 DataSize: 35595 GroupDataSize: 70595 ErasedRowCount: 1645 | | | > {34, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10035 RowCount: 4230 DataSize: 36630 GroupDataSize: 72630 ErasedRowCount: 1710 | | | > {35, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10036 RowCount: 4366 DataSize: 37666 GroupDataSize: 74666 ErasedRowCount: 1776 | | | > {36, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10037 RowCount: 4503 DataSize: 38703 GroupDataSize: 76703 ErasedRowCount: 1843 | | | > {37, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10038 RowCount: 4641 DataSize: 39741 GroupDataSize: 78741 ErasedRowCount: 1911 | | > {38, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 4 RowCount: 5781 DataSize: 48081 GroupDataSize: 95081 ErasedRowCount: 2491, 13 rev 1, 725b} | | | PageId: 10039 RowCount: 4780 DataSize: 40780 GroupDataSize: 80780 ErasedRowCount: 1980 | | | > {39, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10040 RowCount: 4920 DataSize: 41820 GroupDataSize: 82820 ErasedRowCount: 2050 | | | > {40, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10041 RowCount: 5061 DataSize: 42861 GroupDataSize: 84861 ErasedRowCount: 2121 | | | > {41, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10042 RowCount: 5203 DataSize: 43903 GroupDataSize: 86903 ErasedRowCount: 2193 | | | > {42, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10043 RowCount: 5346 DataSize: 44946 GroupDataSize: 88946 ErasedRowCount: 2266 | | | > {43, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10044 RowCount: 5490 DataSize: 45990 GroupDataSize: 90990 ErasedRowCount: 2340 | | | > {44, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10045 RowCount: 5635 DataSize: 47035 GroupDataSize: 93035 ErasedRowCount: 2415 | | | > {45, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10046 RowCount: 5781 DataSize: 48081 GroupDataSize: 95081 ErasedRowCount: 2491 | | > {46, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 5 RowCount: 6831 DataSize: 55431 GroupDataSize: 109431 ErasedRowCount: 3051, 13 ... 3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b {0} | 1 2 50b {2} | 2 4 50b {4} | 3 6 50b {6} | 4 8 50b {8} | 5 10 50b {10} | 6 12 50b {12} | 7 14 50b {14} | 8 16 50b {16} | 9 18 50b {18} | 10 20 50b {20} | 11 22 50b {22} | 12 24 50b {24} | 13 26 50b {26} | 14 28 50b {28} | 15 30 50b {30} | 16 32 50b {32} | 17 34 50b {34} | 18 36 50b {36} | 19 38 50b {38} | 19 39 50b {39} + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b {0} | 1 2 50b {2} | 2 4 50b {4} | 3 6 50b {6} | 4 8 50b {8} | 5 10 50b {10} | 6 12 50b {12} | 7 14 50b {14} | 8 16 50b {16} | 9 18 50b {18} | 10 20 50b {20} | 11 22 50b {22} | 12 24 50b {24} | 13 26 50b {26} | 14 28 50b {28} | 15 30 50b {30} | 16 32 50b {32} | 17 34 50b {34} | 18 36 50b {36} | 19 38 50b {38} | 19 39 50b {39} + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b {0} | 1 2 50b {2} | 2 4 50b {4} | 3 6 50b {6} | 4 8 50b {8} | 5 10 50b {10} | 6 12 50b {12} | 7 14 50b {14} | 8 16 50b {16} | 9 18 50b {18} | 10 20 50b {20} | 11 22 50b {22} | 12 24 50b {24} | 13 26 50b {26} | 14 28 50b {28} | 15 30 50b {30} | 16 32 50b {32} | 17 34 50b {34} | 18 36 50b {36} | 19 38 50b {38} | 19 39 50b {39} + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} >> TestKinesisHttpProxy::GoodRequestCreateStream >> TPart::MassCheck [GOOD] >> TPart::WreckPart >> YdbProxy::ReadTopic [GOOD] >> YdbProxy::ReadNonExistentTopic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::StoreStatsQuota [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:00:30.168311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:00:30.168419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:00:30.168464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:00:30.168509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:00:30.168571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:00:30.168601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:00:30.168675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:00:30.168756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:00:30.169071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:00:30.260056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:00:30.260119Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:30.267653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:00:30.267769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:00:30.267923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:00:30.274022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:00:30.274291Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:00:30.275002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:00:30.275285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:00:30.279034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:00:30.280318Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:00:30.280385Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:00:30.280547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:00:30.280607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:00:30.280665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:00:30.280831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:00:30.292385Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:00:30.503004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:00:30.503268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:30.503471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:00:30.503702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:00:30.503763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:30.506288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:00:30.506444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:00:30.506641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:30.506727Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:00:30.506763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:00:30.506800Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:00:30.509979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:30.510051Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:00:30.510086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:00:30.512702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:30.512762Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:30.512815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:00:30.512883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:00:30.516571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:00:30.518998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:00:30.519231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:00:30.520370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:00:30.520530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:00:30.520611Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:00:30.520935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:00:30.520992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:00:30.521171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:00:30.521261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:00:30.523882Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:00:30.523949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:00:30.524153Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:00:30.524200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:00:30.524492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:00:30.524544Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:00:30.524662Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:00:30.524701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:00:30.524740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:00:30.524775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:00:30.524829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:00:30.524876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:00:30.524908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:00:30.524937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:00:30.525014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:00:30.525052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:00:30.525087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:00:30.527442Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:00:30.527591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:00:30.527639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 416:2385];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:02:46.133208Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:416:2385];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:02:46.133264Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:416:2385];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:02:46.133362Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:416:2385];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:02:46.260091Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;parent=[2:416:2385];fline=actor.cpp:33;event=skip_flush_writing; 2025-03-31T16:02:46.448976Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;parent=[2:416:2385];fline=actor.cpp:33;event=skip_flush_writing; 2025-03-31T16:02:46.462639Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:416:2385];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186233409546; 2025-03-31T16:02:46.462783Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:416:2385];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546; 2025-03-31T16:02:46.462836Z node 2 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-31T16:02:46.462886Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:416:2385];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:02:46.462965Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:416:2385];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:02:46.463038Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:416:2385];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=1; 2025-03-31T16:02:46.463135Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:416:2385];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=4700006;tx_id=18446744073709551615;;current_snapshot_ts=5000006; 2025-03-31T16:02:46.463197Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:416:2385];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:02:46.463252Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:416:2385];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:02:46.463292Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:416:2385];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:02:46.463391Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:416:2385];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:02:46.601501Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;parent=[2:416:2385];fline=actor.cpp:33;event=skip_flush_writing; 2025-03-31T16:02:46.779593Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;parent=[2:416:2385];fline=actor.cpp:33;event=skip_flush_writing; 2025-03-31T16:02:46.793026Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:416:2385];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186233409546; 2025-03-31T16:02:46.793194Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:416:2385];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546; 2025-03-31T16:02:46.793254Z node 2 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-31T16:02:46.793335Z node 2 :TX_COLUMNSHARD DEBUG: There are stats for 1 tables 2025-03-31T16:02:46.793448Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:416:2385];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:02:46.793535Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:416:2385];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:02:46.793629Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:416:2385];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=1; 2025-03-31T16:02:46.793757Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:416:2385];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=4700006;tx_id=18446744073709551615;;current_snapshot_ts=5000006; 2025-03-31T16:02:46.793810Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:416:2385];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:02:46.793872Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:416:2385];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:02:46.793915Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:416:2385];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:02:46.794021Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:416:2385];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:02:46.794263Z node 2 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 3] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-03-31T16:02:46.795112Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046678944:1 data size 0 row count 0 2025-03-31T16:02:46.795287Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], pathId map=OlapStore, is column=0, is olap=1 2025-03-31T16:02:46.795346Z node 2 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 3: RowCount 0, DataSize 0 2025-03-31T16:02:46.795435Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: OLAP store contains 1 tables. 2025-03-31T16:02:46.795616Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Aggregated stats for pathId 3: RowCount 0, DataSize 0 2025-03-31T16:02:46.796084Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:02:46.796137Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:02:46.796477Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:02:46.800858Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:02:46.800941Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:338:2314], at schemeshard: 72057594046678944, txId: 0, path id: 2 2025-03-31T16:02:46.801040Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 234us result status StatusSuccess 2025-03-31T16:02:46.801658Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } Children { Name: "OlapStore" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 1000000 data_size_soft_quota: 900000 } SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:02:46.802623Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 0 2025-03-31T16:02:46.803271Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:416:2385];ev=NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated;fline=columnshard_subdomain_path_id.cpp:90;notify_subdomain=[OwnerId: 72057594046678944, LocalPathId: 2]; >> TReplicationTests::Create [GOOD] >> TReplicationTests::ConsistencyLevel >> KqpQuery::QueryClientTimeoutPrecompiled >> KqpParams::ImplicitParameterTypes >> KqpTypes::QuerySpecialTypes >> TReplicationTests::CreateSequential [GOOD] >> TReplicationTests::CreateInParallel >> THiveTest::PipeAlivenessOfDeadTablet [GOOD] |72.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |72.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut >> KqpPg::InsertFromSelect_Simple+useSink [GOOD] >> TFlatTableExecutor_VersionedRows::TestVersionedRowsSmallBlobs [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalTypeYdb >> KqpPg::InsertFromSelect_Simple-useSink >> TMiniKQLProtoTestYdb::TestExportOptionalTypeYdb [GOOD] >> TFlatTableExecutor_VersionedRows::TestVersionedRowsLargeBlobs >> TestKinesisHttpProxy::TestWrongRequest [GOOD] >> TMiniKQLProtoTestYdb::TestExportListTypeYdb >> TMiniKQLProtoTestYdb::TestExportListTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportIntegralYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalYdb >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalNotEmptyYdb >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalNotEmptyYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalYdb >> TMiniKQLProtoTestYdb::TestExportOptionalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportListYdb >> TMiniKQLProtoTestYdb::TestExportListYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantNotNullYdb >> SystemView::PartitionStatsTtlFields [GOOD] >> SystemView::QueryStatsAllTables >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantNotNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalNullYdb >> TPart::WreckPart [GOOD] >> TPart::PageFailEnv |72.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_scan/test-results/unittest/{meta.json ... results_accumulator.log} |72.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |72.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |72.8%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |72.8%| [LD] {RESULT} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |72.8%| [TA] {RESULT} $(B)/ydb/core/kqp/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::TestEmptyHttpBody [GOOD] Test command err: 2025-03-31T16:01:43.295746Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004481823099178:2078];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:43.302977Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00102c/r3tmp/tmpyue3pT/pdisk_1.dat 2025-03-31T16:01:44.061622Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:44.071210Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:01:44.071289Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:01:44.080502Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21609, node 1 2025-03-31T16:01:44.326134Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:01:44.326164Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:01:44.326170Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:01:44.326283Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28191 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:01:44.745483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:28191 2025-03-31T16:01:45.056998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:45.066971Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T16:01:45.069154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:45.086785Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-03-31T16:01:45.092781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:45.273015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:01:45.330387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:01:45.382437Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2025-03-31T16:01:45.421302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:45.464435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:45.513576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:45.568389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:45.617440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:45.662576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:45.745569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:47.375804Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004499002969782:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:47.375950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:47.376343Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004499002969794:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:47.380452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-03-31T16:01:47.394356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004499002969796:2380], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-03-31T16:01:47.485732Z node 1 :TX_PROXY ERROR: Actor# [1:7488004499002969847:2921] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:01:47.930465Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jqpf00vd707pgg7hzkz64tas, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2Y5OGM3NzYtYTEwMDkyZDctMzllY2RlYTMtNzQyN2U0NDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:01:48.033858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:48.103986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:48.151391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:48.235715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:48.285825Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004481823099178:2078];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:48.285920Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:01:48.336357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:48.389993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:48.459497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:48.512207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:48.638001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:48.771351Z node 1 :HTTP INFO: Listening on http://127.0.0.1:28731 2025-03-31T16:01:49.790848Z node 1 :SQS INFO: Start SQS service actor 2025-03-31T16:01:49.790974Z node 1 :SQS DEBUG: SQS service config: { EnableSqs: true YandexCloudMode: true EnableDeadLetterQueues: true } 2025-03-31T16:01:49.792337Z node 1 :SQS DEBUG: Enable scheme board scheme cache 2025-03-31T16:01:49.795194Z node 1 :SQS INFO: Start SQS proxy service actor 2025-03-31T16:01:49.819877Z node 1 :HTTP INFO: Listening on http://[::]:27305 2025-03-31T16:01:49.841231Z node 1 :SQS INFO: Request SQS users list 2025-03-31T16:01:49.841263Z node 1 :SQS DEBUG: Request SQS queues list 2025-03-31T16:01:49.841333Z node 1 :SQS NOTICE: [Node tracker] schedule describe tables after 0.000000s 2025-03-31T16:01:49.841358Z node 1 :SQS DEBUG: [Node tracker] bootstrap on node=1 2025-03-31T16:01:49.846572Z node 1 :SQS DEBUG: Request [] Starting executor actor for query(idx=GET_USER_SETTINGS_ID). Mode: COMPILE 2025-03-31T16:01:49.846673Z node 1 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Compile program: ... e" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-03-31T16:02:46.381569Z node 8 :SQS DEBUG: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Attempt 1 execution duration: 32ms 2025-03-31T16:02:46.381845Z node 8 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-03-31T16:02:46.381880Z node 8 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2025-03-31T16:02:46.382008Z node 8 :SQS DEBUG: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 32ms 2025-03-31T16:02:46.382388Z node 8 :SQS TRACE: Handle user settings: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-03-31T16:02:46.396019Z node 8 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-03-31T16:02:46.396060Z node 8 :SQS DEBUG: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 20ms 2025-03-31T16:02:46.396521Z node 8 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-03-31T16:02:46.396560Z node 8 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2025-03-31T16:02:46.396725Z node 8 :SQS DEBUG: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 22ms 2025-03-31T16:02:46.397296Z node 8 :SQS TRACE: Handle queues list: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-03-31T16:02:46.562660Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7488004755656041532:2444]: Pool not found 2025-03-31T16:02:46.562907Z node 8 :SQS DEBUG: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2025-03-31T16:02:46.986062Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7488004755656041528:2441]: Pool not found 2025-03-31T16:02:46.986671Z node 8 :SQS DEBUG: [cleanup removed queues] getting queues... 2025-03-31T16:02:46.995703Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7488004755656041671:2461], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:46.995846Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [8:7488004755656041672:2462], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-03-31T16:02:46.995923Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:47.323945Z node 8 :HTTP DEBUG: (#37,[::1]:51952) incoming connection opened 2025-03-31T16:02:47.324017Z node 8 :HTTP DEBUG: (#37,[::1]:51952) -> (POST /Root) 2025-03-31T16:02:47.324193Z node 8 :HTTP_PROXY INFO: proxy service: incoming request from [783f:e300:6050:0:603f:e300:6050:0] request [CreateStream] url [/Root] database [/Root] requestId: 67b0a857-2c0d7463-5a3f1300-76a1df4b 2025-03-31T16:02:47.324848Z node 8 :HTTP_PROXY INFO: http request [CreateStream] requestId [67b0a857-2c0d7463-5a3f1300-76a1df4b] reply with status: BAD_REQUEST message: ydb/core/http_proxy/json_proto_conversion.h:395: Top level of json value is not a map 2025-03-31T16:02:47.325126Z node 8 :HTTP DEBUG: (#37,[::1]:51952) <- (400 MissingParameter) 2025-03-31T16:02:47.325178Z node 8 :HTTP DEBUG: (#37,[::1]:51952) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.CreateStream X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 4 null 0 2025-03-31T16:02:47.325214Z node 8 :HTTP DEBUG: (#37,[::1]:51952) Response: HTTP/1.1 400 MissingParameter Connection: close x-amzn-requestid: 67b0a857-2c0d7463-5a3f1300-76a1df4b x-amz-crc32: 851558042 Content-Type: application/x-amz-json-1.1 Content-Length: 127 {"__type":"MissingParameter","message":"ydb/core/http_proxy/json_proto_conversion.h:395: Top level of json value is not a map"} 2025-03-31T16:02:47.325294Z node 8 :HTTP DEBUG: (#37,[::1]:51952) connection closed Http output full {"__type":"MissingParameter","message":"ydb/core/http_proxy/json_proto_conversion.h:395: Top level of json value is not a map"} 2025-03-31T16:02:47.363010Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7488004755656041669:2460]: Pool not found 2025-03-31T16:02:47.367500Z node 8 :SQS DEBUG: [cleanup removed queues] there are no queues to delete >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNullYdb >> TPartBtreeIndexIteration::OneNode_Groups_History_Slices [GOOD] >> TPartGroupBtreeIndexIter::NoNodes >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNotNullYdb >> TPartGroupBtreeIndexIter::NoNodes [GOOD] >> TPartGroupBtreeIndexIter::OneNode >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNotNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalYdbType >> TMiniKQLEngineFlatTest::TestPureProgram >> TPartGroupBtreeIndexIter::OneNode [GOOD] >> TPartGroupBtreeIndexIter::FewNodes >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalYdbType [GOOD] >> KqpExplain::PureExpr [GOOD] >> KqpExplain::ReadTableRangesFullScan >> TMiniKQLEngineFlatTest::TestPureProgram [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExists >> TPartGroupBtreeIndexIter::FewNodes [GOOD] >> TPartMulti::Basics [GOOD] >> TPartMulti::BasicsReverse [GOOD] >> TPartSlice::TrivialMerge [GOOD] >> TPartSlice::SimpleMerge [GOOD] >> TPartSlice::ComplexMerge [GOOD] >> TPartSlice::LongTailMerge [GOOD] >> TPartSlice::CutSingle [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestSendMessageBatch [GOOD] Test command err: 2025-03-31T16:01:41.555130Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004473587975519:2197];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:41.555750Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0010a2/r3tmp/tmpPo0QBV/pdisk_1.dat 2025-03-31T16:01:42.254707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:01:42.261573Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:01:42.263273Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:01:42.271672Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26684, node 1 2025-03-31T16:01:42.591772Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:01:42.591795Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:01:42.591802Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:01:42.591958Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10269 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:01:43.310568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:43.339405Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:10269 2025-03-31T16:01:43.702915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:43.727765Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T16:01:43.729605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:43.757142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:43.947461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:01:44.042027Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2025-03-31T16:01:44.054512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:01:44.113862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:44.153550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:44.191811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:44.281118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:44.363355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:44.453775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:44.533501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:46.520436Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004473587975519:2197];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:46.529226Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:01:46.621173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004495062813319:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:46.621317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:46.621793Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004495062813331:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:46.626085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-03-31T16:01:46.657320Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004495062813333:2381], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-03-31T16:01:46.725864Z node 1 :TX_PROXY ERROR: Actor# [1:7488004495062813384:2924] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:01:47.086353Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jqpf003tb7ktbhr2dtdsb5yj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGEwNWNjNjEtYjVjZWNjZTEtNGMyMzQyZDktYTRkOTVmN2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:01:47.132770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:47.185396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:47.263871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:47.340224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:47.390352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:47.454730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:47.495166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:47.534318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:47.582966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:47.691378Z node 1 :HTTP INFO: Listening on http://127.0.0.1:24963 2025-03-31T16:01:48.702025Z node 1 :SQS INFO: Start SQS service actor 2025-03-31T16:01:48.702142Z node 1 :SQS DEBUG: SQS service config: { EnableSqs: true YandexCloudMode: true EnableDeadLetterQueues: true } 2025-03-31T16:01:48.703755Z node 1 :SQS INFO: Start SQS proxy service actor 2025-03-31T16:01:48.704021Z node 1 :HTTP INFO: Listening on http://[::]:27163 2025-03-31T16:01:48.704187Z node 1 :SQS DEBUG: Enable scheme board scheme cache 2025-03-31T16:01:48.746938Z node 1 :SQS INFO: Request SQS users list 2025-03-31T16:01:48.746975Z node 1 :SQS DEBUG: Request SQS queues list 2025-03-31T16:01:48.747060Z node 1 :SQS NOTICE: [Node tracker] schedule describe tables after 0.000000s 2025-03-31T16:01:48.747139Z node 1 :SQS DEBUG: [Node tracker] bootstrap on node=1 2025-03-31T16:01:48.772765Z node 1 :SQS DEBUG: Request [] Starting executor actor for query(idx=GET_QUEUES_LIST_ID). Mode: COMPILE 2025-03-31T16:01:48.773781Z node 1 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Compile program: ( ... 3-31T16:02:47.660637Z node 7 :SQS TRACE: Request [acac2b26-ce3a17a3-3584bbc-d9adc958] Sending sqs response: { SendMessageBatch { RequestId: "acac2b26-ce3a17a3-3584bbc-d9adc958" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "35a1571-e2c0bfdb-60edf946-6c5e86c2" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "ce0f44ee-ec321e95-a11bdb48-88cc1963" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "acac2b26-ce3a17a3-3584bbc-d9adc958" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true } 2025-03-31T16:02:47.660856Z node 7 :SQS DEBUG: Request SendMessageBatch working duration: 110ms 2025-03-31T16:02:47.660917Z node 7 :SQS TRACE: Dec local leader ref for actor [7:7488004757613675563:3628]. Found: 1 2025-03-31T16:02:47.660940Z node 7 :SQS TRACE: HandleSqsResponse SendMessageBatch { RequestId: "acac2b26-ce3a17a3-3584bbc-d9adc958" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "35a1571-e2c0bfdb-60edf946-6c5e86c2" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "ce0f44ee-ec321e95-a11bdb48-88cc1963" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "acac2b26-ce3a17a3-3584bbc-d9adc958" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true 2025-03-31T16:02:47.661063Z node 7 :SQS TRACE: Sending answer to proxy actor [7:7488004757613675559:2503]: SendMessageBatch { RequestId: "acac2b26-ce3a17a3-3584bbc-d9adc958" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "35a1571-e2c0bfdb-60edf946-6c5e86c2" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "ce0f44ee-ec321e95-a11bdb48-88cc1963" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "acac2b26-ce3a17a3-3584bbc-d9adc958" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true 2025-03-31T16:02:47.661312Z node 7 :SQS TRACE: Request [acac2b26-ce3a17a3-3584bbc-d9adc958] HandleResponse: { SendMessageBatch { RequestId: "acac2b26-ce3a17a3-3584bbc-d9adc958" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "35a1571-e2c0bfdb-60edf946-6c5e86c2" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "ce0f44ee-ec321e95-a11bdb48-88cc1963" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "acac2b26-ce3a17a3-3584bbc-d9adc958" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true }, status: OK 2025-03-31T16:02:47.661439Z node 7 :SQS DEBUG: Request [acac2b26-ce3a17a3-3584bbc-d9adc958] Sending reply from proxy actor: { SendMessageBatch { RequestId: "acac2b26-ce3a17a3-3584bbc-d9adc958" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "35a1571-e2c0bfdb-60edf946-6c5e86c2" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "ce0f44ee-ec321e95-a11bdb48-88cc1963" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "acac2b26-ce3a17a3-3584bbc-d9adc958" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true } 2025-03-31T16:02:47.661950Z node 7 :HTTP_PROXY DEBUG: http request [SendMessageBatch] requestId [acac2b26-ce3a17a3-3584bbc-d9adc958] Got succesfult GRPC response. 2025-03-31T16:02:47.662158Z node 7 :HTTP_PROXY INFO: http request [SendMessageBatch] requestId [acac2b26-ce3a17a3-3584bbc-d9adc958] reply ok 2025-03-31T16:02:47.662343Z node 7 :HTTP_PROXY DEBUG: http request [SendMessageBatch] requestId [acac2b26-ce3a17a3-3584bbc-d9adc958] Send metering event. HttpStatusCode: 200 IsFifo: 1 FolderId: folder4 RequestSizeInBytes: 1063 ResponseSizeInBytes: 642 SourceAddress: f82e:ec00:6050:0:e02e:ec00:6050:0 ResourceId: 000000000000000101v0 Action: SendMessageBatch 2025-03-31T16:02:47.662531Z node 7 :HTTP DEBUG: (#40,[::1]:57232) <- (200 ) 2025-03-31T16:02:47.662630Z node 7 :HTTP DEBUG: (#40,[::1]:57232) connection closed Http output full {"Successful":[{"SequenceNumber":"1","Id":"Id-0","MD5OfMessageBody":"94a29778a1f1f41bf68142847b2e6106","MD5OfMessageAttributes":"3d778967e1fa431d626ffb890c486385","MessageId":"35a1571-e2c0bfdb-60edf946-6c5e86c2"},{"SequenceNumber":"2","Id":"Id-1","MD5OfMessageBody":"3bf7e6d806a0b8062135ae945eca30bf","MessageId":"ce0f44ee-ec321e95-a11bdb48-88cc1963"}],"Failed":[{"Message":"No MessageGroupId parameter.","Id":"Id-2","Code":"MissingParameter","SenderFault":true}]} 2025-03-31T16:02:47.663046Z node 7 :SQS TRACE: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Compile program response: { Status: 48 MiniKQLCompileResults { CompiledProgram: "\037\016\nFlags\010Name\010Args\016Payload\022Parameter\014Offset\032SentTimestamp\006\002\206\202\t\211\004\202\203\005@\206\205\004\207\203\010\207\203\010\026\032$SetResult\000\003?\002\020messages\t\211\004?\016\205\004?\016\203\014\020List$Truncated\203\004\030Member\000\t\211\026?\026\203\005\004\200\205\004\203\004\203\004\026\032\213\010\203\010\203\010\203\010\203\010\213\010?$?&\203\010\203\010\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?\034 \000\001\205\000\000\000\000\001\032\000\000\000\000\000\000\000?\014\005?\"\003?\036\010\003? \006\003\013?,\t\351\000?$\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?R\003?T(QUEUE_ID_NUMBER_HASH\003\022\000\t\351\000?&\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?h\003?j\036QUEUE_ID_NUMBER\003\022\000\t\351\000?(\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?~\003?\200\022TIME_FROM\003\022\000\003?*\000\010\013?2?`?v\003?.\177\377\377\377\377\377\377\377\377\003?0\177\377\377\377\377\377\377\377\377\014\003?4\000\003?6\002\003?8\000\003?:\000\006\010?>\003\203\014\000\003\203\014\000\003\203\014\000\003\203\014\000\017\003?@\000\377\007\003?\030\000\002\001\000/" } } 2025-03-31T16:02:47.663078Z node 7 :SQS DEBUG: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] compilation duration: 2ms 2025-03-31T16:02:47.663105Z node 7 :SQS DEBUG: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) has been prepared 2025-03-31T16:02:47.663121Z node 7 :SQS DEBUG: Request [] Executing compiled query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) 2025-03-31T16:02:47.663230Z node 7 :SQS DEBUG: Request [] Starting executor actor for query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID). Mode: COMPILE_AND_EXEC 2025-03-31T16:02:47.663319Z node 7 :SQS TRACE: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Serializing params: {"QUEUE_ID_NUMBER": 2, "QUEUE_ID_NUMBER_HASH": 17472595041006102391, "SHARD": 0, "QUEUE_ID_NUMBER_AND_SHARD_HASH": 12311263855443095412, "TIME_FROM": 0} 2025-03-31T16:02:47.663657Z node 7 :SQS TRACE: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Execute program: { Transaction { MiniKQLTransaction { Mode: COMPILE_AND_EXEC Program { Bin: "\037\016\nFlags\010Name\010Args\016Payload\022Parameter\014Offset\032SentTimestamp\006\002\206\202\t\211\004\202\203\005@\206\205\004\207\203\010\207\203\010\026\032$SetResult\000\003?\002\020messages\t\211\004?\016\205\004?\016\203\014\020List$Truncated\203\004\030Member\000\t\211\026?\026\203\005\004\200\205\004\203\004\203\004\026\032\213\010\203\010\203\010\203\010\203\010\213\010?$?&\203\010\203\010\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?\034 \000\001\205\000\000\000\000\001\032\000\000\000\000\000\000\000?\014\005?\"\003?\036\010\003? \006\003\013?,\t\351\000?$\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?R\003?T(QUEUE_ID_NUMBER_HASH\003\022\000\t\351\000?&\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?h\003?j\036QUEUE_ID_NUMBER\003\022\000\t\351\000?(\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?~\003?\200\022TIME_FROM\003\022\000\003?*\000\010\013?2?`?v\003?.\177\377\377\377\377\377\377\377\377\003?0\177\377\377\377\377\377\377\377\377\014\003?4\000\003?6\002\003?8\000\003?:\000\006\010?>\003\203\014\000\003\203\014\000\003\203\014\000\003\203\014\000\017\003?@\000\377\007\003?\030\000\002\001\000/" } Params { Bin: "\037\000\005\205\n\203\010\203\010\203\010\203\004\203\010> TPartSlice::CutMulti [GOOD] >> TPartSlice::LookupBasics [GOOD] >> TPartSlice::LookupFull [GOOD] >> TPartSlice::EqualByRowId [GOOD] >> TPartSlice::SupersetByRowId [GOOD] >> TPartSlice::Subtract [GOOD] >> TPartSlice::ParallelCompactions [GOOD] >> TPartSlice::UnsplitBorrow [GOOD] >> TPartSliceLoader::RestoreMissingSlice >> TReplicationTests::ConsistencyLevel [GOOD] >> TReplicationTests::Alter >> TestKinesisHttpProxy::ListShardsToken [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExists [GOOD] >> TMiniKQLProgramBuilderTest::TestEraseRowStaticKey >> TMiniKQLEngineFlatTest::TestSelectRangeFromInclusive >> TMiniKQLProgramBuilderTest::TestEraseRowStaticKey [GOOD] >> TMiniKQLProgramBuilderTest::TestEraseRowPartialDynamicKey >> TMiniKQLEngineFlatTest::TestSelectRangeFromInclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFromExclusive >> TMiniKQLProgramBuilderTest::TestEraseRowPartialDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectRow >> TMiniKQLEngineFlatTest::TestSelectRangeFromExclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromIncTo >> TPartSliceLoader::RestoreMissingSlice [GOOD] >> TPartSliceLoader::RestoreOneSlice [GOOD] >> TPartSliceLoader::RestoreMissingSliceFullScreen >> TMiniKQLProgramBuilderTest::TestSelectRow [GOOD] >> TMiniKQLProgramBuilderTest::TestUpdateRowDynamicKey >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromIncTo [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromIncTo >> TPartSliceLoader::RestoreMissingSliceFullScreen [GOOD] >> TPartSliceLoader::RestoreFromScreenIndexKeys [GOOD] >> TPartSliceLoader::RestoreFromScreenDataKeys [GOOD] >> TRowVersionRangesTest::MergeFailLeft [GOOD] >> TRowVersionRangesTest::MergeFailRight [GOOD] >> TRowVersionRangesTest::MergeFailOuter [GOOD] >> TRowVersionRangesTest::MergeFailInner [GOOD] >> TRowVersionRangesTest::MergeExtendLeft [GOOD] >> TRowVersionRangesTest::MergeExtendLeftInner [GOOD] >> TRowVersionRangesTest::MergeExtendLeftComplete [GOOD] >> TRowVersionRangesTest::MergeExtendRight [GOOD] >> TRowVersionRangesTest::MergeExtendRightInner [GOOD] >> TRowVersionRangesTest::MergeExtendRightComplete [GOOD] >> TRowVersionRangesTest::MergeExtendBoth [GOOD] >> TRowVersionRangesTest::MergeHoleExact [GOOD] >> TRowVersionRangesTest::MergeHoleInner [GOOD] >> TRowVersionRangesTest::MergeAllOuter [GOOD] >> TRowVersionRangesTest::MergeAllInner [GOOD] >> TRowVersionRangesTest::MergeAllEdges [GOOD] >> TRowVersionRangesTest::ContainsEmpty [GOOD] >> TRowVersionRangesTest::ContainsNonEmpty [GOOD] >> TRowVersionRangesTest::ContainsInvalid [GOOD] >> TRowVersionRangesTest::AdjustDown [GOOD] >> TRowVersionRangesTest::AdjustDownSnapshot [GOOD] >> TMiniKQLProgramBuilderTest::TestUpdateRowDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectFromInclusiveRange >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromIncTo [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromExcTo >> TMiniKQLProgramBuilderTest::TestSelectFromInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectFromExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectToInclusiveRange >> TReplicationTests::CreateInParallel [GOOD] >> TReplicationTests::CreateDropRecreate >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromExcTo [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromExcTo |72.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalYdbType [GOOD] |72.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |72.9%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |72.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller >> TMiniKQLProgramBuilderTest::TestSelectToInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectToExclusiveRange >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromExcTo [GOOD] >> TMiniKQLEngineFlatTest::TestMapsPushdown >> TMiniKQLEngineFlatTest::TestMapsPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestNoOrderedTakePushdown >> TMiniKQLProgramBuilderTest::TestSelectToExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectBothFromInclusiveToInclusiveRange >> TPart::PageFailEnv [GOOD] >> TPart::ForwardEnv >> TMiniKQLEngineFlatTest::TestNoOrderedTakePushdown [GOOD] >> TMiniKQLEngineFlatTest::TestNoAggregatedPushdown >> TMiniKQLProgramBuilderTest::TestSelectBothFromInclusiveToInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectBothFromExclusiveToExclusiveRange >> TMiniKQLEngineFlatTest::TestNoAggregatedPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestNoPartialSortPushdown >> TMiniKQLProgramBuilderTest::TestSelectBothFromExclusiveToExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestInvalidParameterName >> TMiniKQLEngineFlatTest::TestNoPartialSortPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestMultiRSPerDestination >> TMiniKQLProgramBuilderTest::TestInvalidParameterName [GOOD] >> TMiniKQLProgramBuilderTest::TestInvalidParameterType >> TMiniKQLEngineFlatTest::TestMultiRSPerDestination [GOOD] >> TMiniKQLProgramBuilderTest::TestInvalidParameterType [GOOD] >> TPart::ForwardEnv [GOOD] >> TPart::WreckPartColumnGroups >> IncrementalBackup::SimpleBackupBackupCollection-WithIncremental [GOOD] >> TReplicationTests::Alter [GOOD] >> TReplicationTests::CannotAddReplicationConfig ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::TestWrongRequest [GOOD] Test command err: 2025-03-31T16:01:42.868454Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004476817696777:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:42.868489Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00104a/r3tmp/tmpZ36gO7/pdisk_1.dat 2025-03-31T16:01:43.717527Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:43.721657Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:01:43.721721Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:01:43.732464Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20428, node 1 2025-03-31T16:01:43.995995Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:01:43.996023Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:01:43.996031Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:01:43.996153Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26451 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:01:44.480019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:44.512041Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:26451 2025-03-31T16:01:44.836341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:44.842748Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T16:01:44.844779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:44.871828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:45.029993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:01:45.143996Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2025-03-31T16:01:45.152947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:01:45.231744Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2025-03-31T16:01:45.239904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:45.307886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:45.390895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:01:45.459872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-31T16:01:45.510054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:45.562601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:45.600135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:47.464193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004498292534720:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:47.464319Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:47.465662Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004498292534732:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:47.470018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-03-31T16:01:47.482901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004498292534734:2380], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-03-31T16:01:47.537011Z node 1 :TX_PROXY ERROR: Actor# [1:7488004498292534785:2923] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:01:47.871203Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004476817696777:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:47.871277Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:01:48.048879Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jqpf00y557gxvhqkxhxq75pb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTBhZjZmMTYtNzY0ZDhhMi05NTVkNTZlMS04ZThlYjQ2MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:01:48.113824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:48.166641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:48.227230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:48.274274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:48.319026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:48.380073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:48.452266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:48.498574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:48.561241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:48.682480Z node 1 :HTTP INFO: Listening on http://127.0.0.1:11923 2025-03-31T16:01:49.687958Z node 1 :SQS INFO: Start SQS service actor 2025-03-31T16:01:49.691193Z node 1 :SQS INFO: Start SQS proxy service actor 2025-03-31T16:01:49.691515Z node 1 :HTTP INFO: Listening on http://[::]:27995 2025-03-31T16:01:49.692013Z node 1 :SQS DEBUG: SQS service config: { EnableSqs: true YandexCloudMode: true EnableDeadLetterQueues: true } 2025-03-31T16:01:49.693090Z node 1 :SQS DEBUG: Enable scheme board scheme cache 2025-03-31T16:01:49.727050Z node 1 :SQS INFO: Request SQS users list 2025-03-31T16:01:49.727115Z node 1 :SQS DEBUG: Request SQS queues list 2025-03-31T16:01:49.731211Z node 1 :SQS NOTICE: [Node tracker] schedule describe tables after 0.000000s 2025-03-31T16:01:49.731243Z node 1 :SQS DEBUG: [Node tracker] bootstrap on node=1 2025-03-31T16:01:49.748461Z node 1 :SQS DEBUG: Request [] Starting executor actor for query( ... SER_SETTINGS_ID) Queue [] Attempt 1 execution duration: 14ms 2025-03-31T16:02:48.444544Z node 8 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-03-31T16:02:48.444582Z node 8 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2025-03-31T16:02:48.444680Z node 8 :SQS DEBUG: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 15ms 2025-03-31T16:02:48.445217Z node 8 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-03-31T16:02:48.445235Z node 8 :SQS DEBUG: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 14ms 2025-03-31T16:02:48.445597Z node 8 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-03-31T16:02:48.445622Z node 8 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2025-03-31T16:02:48.445730Z node 8 :SQS DEBUG: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 15ms 2025-03-31T16:02:48.446011Z node 8 :SQS TRACE: Handle user settings: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-03-31T16:02:48.446333Z node 8 :SQS TRACE: Handle queues list: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-03-31T16:02:48.654055Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7488004761951250621:2442]: Pool not found 2025-03-31T16:02:48.654806Z node 8 :SQS DEBUG: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2025-03-31T16:02:48.967660Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7488004761951250619:2441]: Pool not found 2025-03-31T16:02:48.968801Z node 8 :SQS DEBUG: [cleanup removed queues] getting queues... 2025-03-31T16:02:48.971853Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7488004761951250778:2461], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:48.971932Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [8:7488004761951250779:2462], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-03-31T16:02:48.971986Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:49.341729Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7488004761951250776:2460]: Pool not found 2025-03-31T16:02:49.342884Z node 8 :SQS DEBUG: [cleanup removed queues] there are no queues to delete 2025-03-31T16:02:49.355479Z node 8 :HTTP DEBUG: (#37,[::1]:33214) incoming connection opened 2025-03-31T16:02:49.355584Z node 8 :HTTP DEBUG: (#37,[::1]:33214) -> (POST /) Http output full {"__type":"InvalidArgumentException","message":"ydb/core/http_proxy/json_proto_conversion.h:400: Unexpected json key: WrongStreamName"} 400 {"__type":"InvalidArgumentException","message":"ydb/core/http_proxy/json_proto_conversion.h:400: Unexpected json key: WrongStreamName"} 2025-03-31T16:02:49.355710Z node 8 :HTTP_PROXY INFO: proxy service: incoming request from [3898:1000:6050:0:2098:1000:6050:0] request [CreateStream] url [/] database [] requestId: fa1a1309-52da11a4-c191ed15-b2c73d85 2025-03-31T16:02:49.356305Z node 8 :HTTP_PROXY WARN: http request [CreateStream] requestId [fa1a1309-52da11a4-c191ed15-b2c73d85] got new request with incorrect json from [3898:1000:6050:0:2098:1000:6050:0] database '' 2025-03-31T16:02:49.356496Z node 8 :HTTP_PROXY INFO: http request [CreateStream] requestId [fa1a1309-52da11a4-c191ed15-b2c73d85] reply with status: BAD_REQUEST message: ydb/core/http_proxy/json_proto_conversion.h:400: Unexpected json key: WrongStreamName 2025-03-31T16:02:49.356797Z node 8 :HTTP DEBUG: (#37,[::1]:33214) <- (400 InvalidArgumentException) 2025-03-31T16:02:49.356850Z node 8 :HTTP DEBUG: (#37,[::1]:33214) Request: POST / HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.CreateStream X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 57 { "ShardCount":5, "StreamName":"testtopic", "WrongStreamName":"WrongStreamName" } 0 2025-03-31T16:02:49.356883Z node 8 :HTTP DEBUG: (#37,[::1]:33214) Response: HTTP/1.1 400 InvalidArgumentException Connection: close x-amzn-requestid: fa1a1309-52da11a4-c191ed15-b2c73d85 x-amz-crc32: 3053902336 Content-Type: application/x-amz-json-1.1 Content-Length: 135 {"__type":"InvalidArgumentException","message":"ydb/core/http_proxy/json_proto_conversion.h:400: Unexpected json key: WrongStreamName"} 2025-03-31T16:02:49.356999Z node 8 :HTTP DEBUG: (#37,[::1]:33214) connection closed >> KqpExplain::SqlIn [GOOD] >> KqpExplain::SsaProgramInJsonPlan >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExists >> KqpTypes::UnsafeTimestampCastV1 [GOOD] >> KqpTypes::Time64Columns-EnableTableDatetime64 >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExistsNullKey >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExistsNullKey [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItems [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItemsFromNull >> TReplicationTests::CreateDropRecreate [GOOD] >> TReplicationTests::CreateWithoutCredentials ------- [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestMultiRSPerDestination [GOOD] Test command err: PrepareShardPrograms (491): too many shard readsets (2 > 1), src tables: [200:301:0], dst tables: [200:301:0] Type { Kind: Struct } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TRowVersionRangesTest::AdjustDownSnapshot [GOOD] Test command err: Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 5129b 40r} data 5373b + FlatIndex{3} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 1 0 2466b {0, 1} | 1 39 2466b {5, 7} + BTreeIndex{Empty, PageId: 1 RowCount: 40 DataSize: 2466 GroupDataSize: 2663 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{1} Label{14 rev 1, 2466b}, [0, +40)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} | ERowOp 1: {0, 4} {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: {0, 6} {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} | ERowOp 1: {0, 7} {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 2430b 40r} data 4017b + FlatIndex{20} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 120b {0, 1} | 1 2 120b {0, 4} | 2 4 120b {0, 7} | 3 6 120b {0, 10} | 4 8 120b {1, 3} | 5 10 122b {1, 6} | 6 12 122b {1, 8} | 7 14 122b {2, NULL} | 8 16 122b {2, 4} | 9 18 122b {2, 7} | 10 20 122b {2, 10} | 11 22 122b {3, 3} | 12 24 122b {3, 6} | 13 26 122b {3, 8} | 14 28 122b {4, NULL} | 15 30 122b {4, 4} | 16 32 122b {4, 7} | 17 34 122b {4, 10} | 18 36 122b {5, 3} | 19 38 122b {5, 6} | 19 39 122b {5, 7} + BTreeIndex{PageId: 21 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 976b} | PageId: 0 RowCount: 2 DataSize: 120 ErasedRowCount: 0 | > {0, 4} | PageId: 1 RowCount: 4 DataSize: 240 ErasedRowCount: 0 | > {0, 7} | PageId: 2 RowCount: 6 DataSize: 360 ErasedRowCount: 0 | > {0, 10} | PageId: 3 RowCount: 8 DataSize: 480 ErasedRowCount: 0 | > {1, 3} | PageId: 4 RowCount: 10 DataSize: 600 ErasedRowCount: 0 | > {1, 6} | PageId: 5 RowCount: 12 DataSize: 722 ErasedRowCount: 0 | > {1, 8} | PageId: 6 RowCount: 14 DataSize: 844 ErasedRowCount: 0 | > {2, NULL} | PageId: 7 RowCount: 16 DataSize: 966 ErasedRowCount: 0 | > {2, 4} | PageId: 8 RowCount: 18 DataSize: 1088 ErasedRowCount: 0 | > {2, 7} | PageId: 9 RowCount: 20 DataSize: 1210 ErasedRowCount: 0 | > {2, 10} | PageId: 10 RowCount: 22 DataSize: 1332 ErasedRowCount: 0 | > {3, 3} | PageId: 11 RowCount: 24 DataSize: 1454 ErasedRowCount: 0 | > {3, 6} | PageId: 12 RowCount: 26 DataSize: 1576 ErasedRowCount: 0 | > {3, 8} | PageId: 13 RowCount: 28 DataSize: 1698 ErasedRowCount: 0 | > {4, NULL} | PageId: 14 RowCount: 30 DataSize: 1820 ErasedRowCount: 0 | > {4, 4} | PageId: 15 RowCount: 32 DataSize: 1942 ErasedRowCount: 0 | > {4, 7} | PageId: 16 RowCount: 34 DataSize: 2064 ErasedRowCount: 0 | > {4, 10} | PageId: 17 RowCount: 36 DataSize: 2186 ErasedRowCount: 0 | > {5, 3} | PageId: 18 RowCount: 38 DataSize: 2308 ErasedRowCount: 0 | > {5, 6} | PageId: 19 RowCount: 40 DataSize: 2430 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 120b}, [0, +2)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} + Rows{1} Label{14 rev 1, 120b}, [2, +2)row | ERowOp 1: {0, 4} {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: {0, 6} {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} + Rows{2} Label{24 rev 1, 120b}, [4, +2)row | ERowOp 1: {0, 7} {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} + Rows{3} Label{34 rev 1, 120b}, [6, +2)row | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} + Rows{4} Label{44 rev 1, 120b}, [8, +2)row | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} + Rows{5} Label{54 rev 1, 122b}, [10, +2)row | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} + Rows{6} Label{64 rev 1, 122b}, [12, +2)row | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} + Rows{7} Label{74 rev 1, 122b}, [14, +2)row | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} + Rows{8} Label{84 rev 1, 122b}, [16, +2)row | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} + Rows{9} Label{94 rev 1, 122b}, [18, +2)row | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} + Rows{10} Label{104 rev 1, 122b}, [20, +2)row | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} + Rows{11} Label{114 rev 1, 122b}, [22, +2)row | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} + Rows{12} Label{124 rev 1, 122b}, [24, +2)row | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} + Rows{13} Label{134 rev 1, 122b}, [26, +2)row | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} + Rows{14} Label{144 rev 1, 122b}, [28, +2)row | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} + Rows{15} Label{154 rev 1, 122b}, [30, +2)row | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} + Rows{16} Label{164 rev 1, 122b}, [32, +2)row | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} + Rows{17} Label{174 rev 1, 122b}, [34, +2)row | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{18} Label{184 rev 1, 122b}, [36, +2)row | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{19} Label{194 rev 1, 122b}, [38, +2)row | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 4910b 40r} data 6206b + FlatIndex{26} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 50b {0, 1} | 1 2 50b {0, 4} | 2 4 50b {0, 7} | 3 6 50b {0, 10} | 4 8 50b {1, 3} | 5 10 50b {1, 6} | 6 12 50b {1, 8} | 7 14 50b {2, NULL} | 8 16 50b {2, 4} | 10 18 50b {2, 7} | 11 20 50b {2, 10} | 12 22 50b {3, 3} | 13 24 50b {3, 6} | 15 26 50b {3, 8} | 16 28 50b {4, NULL} | 17 30 50b {4, 4} | 18 32 50b {4, 7} | 19 34 50b {4, 10} | 21 36 50b {5, 3} | 22 38 50b {5, 6} | 22 39 50b {5, 7} + BTreeIndex ... xxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} + Rows{3} Label{34 rev 1, 120b}, [6, +2)row | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} + Rows{4} Label{44 rev 1, 120b}, [8, +2)row | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} + Rows{5} Label{54 rev 1, 122b}, [10, +2)row | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} + Rows{6} Label{64 rev 1, 122b}, [12, +2)row | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} + Rows{7} Label{74 rev 1, 122b}, [14, +2)row | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} + Rows{8} Label{84 rev 1, 122b}, [16, +2)row | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} + Rows{9} Label{94 rev 1, 122b}, [18, +2)row | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} + Rows{10} Label{104 rev 1, 122b}, [20, +2)row | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} + Rows{11} Label{114 rev 1, 122b}, [22, +2)row | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} + Rows{12} Label{124 rev 1, 122b}, [24, +2)row | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} + Rows{13} Label{134 rev 1, 122b}, [26, +2)row | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} + Rows{14} Label{144 rev 1, 122b}, [28, +2)row | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} + Rows{15} Label{154 rev 1, 122b}, [30, +2)row | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} + Rows{16} Label{164 rev 1, 122b}, [32, +2)row | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} + Rows{17} Label{174 rev 1, 122b}, [34, +2)row | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{18} Label{184 rev 1, 122b}, [36, +2)row | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{19} Label{194 rev 1, 122b}, [38, +2)row | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 2430b 40r} data 4441b + FlatIndex{26} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 120b {0, 1} | 1 2 120b {0, 4} | 2 4 120b {0, 7} | 3 6 120b {0, 10} | 4 8 120b {1, 3} | 5 10 122b {1, 6} | 7 12 122b {1, 8} | 8 14 122b {2, NULL} | 9 16 122b {2, 4} | 11 18 122b {2, 7} | 12 20 122b {2, 10} | 13 22 122b {3, 3} | 15 24 122b {3, 6} | 16 26 122b {3, 8} | 17 28 122b {4, NULL} | 19 30 122b {4, 4} | 20 32 122b {4, 7} | 21 34 122b {4, 10} | 24 36 122b {5, 3} | 25 38 122b {5, 6} | 25 39 122b {5, 7} + BTreeIndex{PageId: 29 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 102b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 1088 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 360 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 0 RowCount: 2 DataSize: 120 ErasedRowCount: 0 | | | > {0, 4} | | | PageId: 1 RowCount: 4 DataSize: 240 ErasedRowCount: 0 | | | > {0, 7} | | | PageId: 2 RowCount: 6 DataSize: 360 ErasedRowCount: 0 | | > {0, 10} | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 722 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 3 RowCount: 8 DataSize: 480 ErasedRowCount: 0 | | | > {1, 3} | | | PageId: 4 RowCount: 10 DataSize: 600 ErasedRowCount: 0 | | | > {1, 6} | | | PageId: 5 RowCount: 12 DataSize: 722 ErasedRowCount: 0 | | > {1, 8} | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 1088 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 7 RowCount: 14 DataSize: 844 ErasedRowCount: 0 | | | > {2, NULL} | | | PageId: 8 RowCount: 16 DataSize: 966 ErasedRowCount: 0 | | | > {2, 4} | | | PageId: 9 RowCount: 18 DataSize: 1088 ErasedRowCount: 0 | > {2, 7} | + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 1454 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 11 RowCount: 20 DataSize: 1210 ErasedRowCount: 0 | | | > {2, 10} | | | PageId: 12 RowCount: 22 DataSize: 1332 ErasedRowCount: 0 | | | > {3, 3} | | | PageId: 13 RowCount: 24 DataSize: 1454 ErasedRowCount: 0 | | > {3, 6} | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 1820 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 15 RowCount: 26 DataSize: 1576 ErasedRowCount: 0 | | | > {3, 8} | | | PageId: 16 RowCount: 28 DataSize: 1698 ErasedRowCount: 0 | | | > {4, NULL} | | | PageId: 17 RowCount: 30 DataSize: 1820 ErasedRowCount: 0 | | > {4, 4} | | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 249b} | | | PageId: 19 RowCount: 32 DataSize: 1942 ErasedRowCount: 0 | | | > {4, 7} | | | PageId: 20 RowCount: 34 DataSize: 2064 ErasedRowCount: 0 | | | > {4, 10} | | | PageId: 21 RowCount: 36 DataSize: 2186 ErasedRowCount: 0 | | | > {5, 3} | | | PageId: 24 RowCount: 38 DataSize: 2308 ErasedRowCount: 0 | | | > {5, 6} | | | PageId: 25 RowCount: 40 DataSize: 2430 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 120b}, [0, +2)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} + Rows{1} Label{14 rev 1, 120b}, [2, +2)row | ERowOp 1: {0, 4} {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: {0, 6} {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} + Rows{2} Label{24 rev 1, 120b}, [4, +2)row | ERowOp 1: {0, 7} {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} + Rows{3} Label{34 rev 1, 120b}, [6, +2)row | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} + Rows{4} Label{44 rev 1, 120b}, [8, +2)row | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} + Rows{5} Label{54 rev 1, 122b}, [10, +2)row | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} + Rows{7} Label{74 rev 1, 122b}, [12, +2)row | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} + Rows{8} Label{84 rev 1, 122b}, [14, +2)row | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} + Rows{9} Label{94 rev 1, 122b}, [16, +2)row | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} + Rows{11} Label{114 rev 1, 122b}, [18, +2)row | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} + Rows{12} Label{124 rev 1, 122b}, [20, +2)row | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} + Rows{13} Label{134 rev 1, 122b}, [22, +2)row | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} + Rows{15} Label{154 rev 1, 122b}, [24, +2)row | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} + Rows{16} Label{164 rev 1, 122b}, [26, +2)row | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} + Rows{17} Label{174 rev 1, 122b}, [28, +2)row | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} + Rows{19} Label{194 rev 1, 122b}, [30, +2)row | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} + Rows{20} Label{204 rev 1, 122b}, [32, +2)row | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} + Rows{21} Label{214 rev 1, 122b}, [34, +2)row | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{24} Label{244 rev 1, 122b}, [36, +2)row | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{25} Label{254 rev 1, 122b}, [38, +2)row | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItemsFromNull [GOOD] >> KqpQuery::TryToUpdateNonExistentColumn [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByBytes >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByBytes [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNullNull |72.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProgramBuilderTest::TestInvalidParameterType [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNullNull [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeToExclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNoShards >> TMiniKQLEngineFlatTest::TestSelectRangeNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitions >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsNotExists >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitions [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems1 >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsNotExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsExists >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems1 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems2 >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowPayload [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowPayloadNullKey >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems2 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems3 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNoColumns >> TMiniKQLEngineFlatTest::TestSelectRowPayloadNullKey [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeToInclusive >> TReplicationTests::CannotAddReplicationConfig [GOOD] >> TReplicationTests::CannotSetAsyncReplicaAttribute >> TMiniKQLEngineFlatTest::TestSelectRangeNoColumns [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::SimpleBackupBackupCollection-WithIncremental [GOOD] Test command err: 2025-03-31T16:02:34.012899Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:02:34.013195Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:02:34.013404Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00181e/r3tmp/tmpz7P4CG/pdisk_1.dat 2025-03-31T16:02:34.436454Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:596:2520], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:02:34.436540Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:02:34.436597Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-31T16:02:34.436735Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:593:2518], Recipient [1:409:2404]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2025-03-31T16:02:34.436774Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-31T16:02:34.558743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-03-31T16:02:34.559026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:02:34.559337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-31T16:02:34.559638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:02:34.559749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:02:34.559862Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-31T16:02:34.560638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-31T16:02:34.560821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-31T16:02:34.560933Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T16:02:34.560977Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-03-31T16:02:34.561212Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-31T16:02:34.561263Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-31T16:02:34.561346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:02:34.561430Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-31T16:02:34.561476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:02:34.561515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:02:34.561641Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-31T16:02:34.562164Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T16:02:34.562210Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-03-31T16:02:34.562369Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-31T16:02:34.562408Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-31T16:02:34.562477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:02:34.562522Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-03-31T16:02:34.562581Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:02:34.562669Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-31T16:02:34.563137Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T16:02:34.563171Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-03-31T16:02:34.563272Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-31T16:02:34.563321Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-31T16:02:34.563380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:02:34.563417Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:02:34.563462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-03-31T16:02:34.563494Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-31T16:02:34.563535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:02:34.567359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:02:34.568094Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T16:02:34.568152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:02:34.568319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-03-31T16:02:34.569329Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:601:2525], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046316545 Status: OK ServerId: [1:603:2526] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-03-31T16:02:34.569366Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-03-31T16:02:34.569413Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046316545, status: OK, at schemeshard: 72057594046644480 2025-03-31T16:02:34.569510Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 500 TxId: 1 2025-03-31T16:02:34.569813Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:605:2528], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:02:34.569849Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:02:34.569877Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-31T16:02:34.569972Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [1:593:2518], Recipient [1:409:2404]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2025-03-31T16:02:34.570000Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-03-31T16:02:34.570068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-03-31T16:02:34.570144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-03-31T16:02:34.570181Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-03-31T16:02:34.603377Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 273285138, Sender [1:43:2090], Recipient [1:409:2404]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } } ItemKinds: 26 ItemKinds: 34 ItemKinds: 52 ItemKinds: 54 ItemKinds: 73 Local: true } 2025-03-31T16:02:34.603492Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-03-31T16:02:34.603535Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:34.604071Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-03-31T16:02:34.604163Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } 2025-03-31T16:02:34.653165Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:34.653306Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:34.664979Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:02:34.739937Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 17 StepId: 500 TxId: 1 2025-03-31T16:02:34.740575Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:636:2544], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:02:34.740608Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:02:34.740633Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-31T16:02:34.740732Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:409:2404]: {TEvPlanStep step# 500 MediatorId# 72057594046382081 TabletID 72057594046644480} 2025-03-31T16:02:34.740758Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:02:34.740847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T16:02:34.740969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemes ... 76715662:1, at schemeshard: 72057594046644480 2025-03-31T16:02:50.813261Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T16:02:50.813690Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715662:1, at schemeshard: 72057594046644480 2025-03-31T16:02:50.813736Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T16:02:50.825273Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:02:50.825363Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715662 state Ready TxInFly 0 2025-03-31T16:02:50.825441Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:02:50.825521Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:02:50.825822Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [3:997:2793], Recipient [3:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:02:50.825863Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:02:50.825893Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-31T16:02:50.826055Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269551620, Sender [3:666:2570], Recipient [3:409:2404]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 666 RawX2: 12884904458 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-03-31T16:02:50.826086Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChanged 2025-03-31T16:02:50.826146Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 666 RawX2: 12884904458 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-03-31T16:02:50.826182Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715662, tablet: 72075186224037888, partId: 1 2025-03-31T16:02:50.826332Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715662:1, at schemeshard: 72057594046644480, message: Source { RawX1: 666 RawX2: 12884904458 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-03-31T16:02:50.826380Z node 3 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715662:1 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-03-31T16:02:50.826445Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715662:1 HandleReply TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 666 RawX2: 12884904458 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-03-31T16:02:50.826492Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715662:1, shardIdx: 72057594046644480:1, datashard: 72075186224037888, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-03-31T16:02:50.826529Z node 3 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715662:1, at schemeshard: 72057594046644480 2025-03-31T16:02:50.826576Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715662:1, datashard: 72075186224037889, at schemeshard: 72057594046644480 2025-03-31T16:02:50.826614Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715662:1, datashard: 72075186224037888, at schemeshard: 72057594046644480 2025-03-31T16:02:50.826651Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715662:1 129 -> 240 2025-03-31T16:02:50.826790Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-31T16:02:50.829772Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715662:1, at schemeshard: 72057594046644480 2025-03-31T16:02:50.829824Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T16:02:50.829860Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715662:1 2025-03-31T16:02:50.829996Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [3:938:2743] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715662 at schemeshard: 72057594046644480 2025-03-31T16:02:50.830051Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [3:666:2570] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715662 at schemeshard: 72057594046644480 2025-03-31T16:02:50.830173Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715662 datashard 72075186224037888 state Ready 2025-03-31T16:02:50.830261Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:02:50.830466Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715662 datashard 72075186224037889 state Ready 2025-03-31T16:02:50.830506Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-03-31T16:02:50.830646Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [3:409:2404], Recipient [3:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-31T16:02:50.830679Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-31T16:02:50.830741Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715662:1, at schemeshard: 72057594046644480 2025-03-31T16:02:50.830797Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 281474976715662:1ProgressState, operation type TxCopyTable 2025-03-31T16:02:50.830843Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-31T16:02:50.830886Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 281474976715662:1, name: CopyTableBarrier, done: 1, blocked: 1, parts count: 2 2025-03-31T16:02:50.830928Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715662, done: 1, blocked: 1 2025-03-31T16:02:50.831016Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 281474976715662:1 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 281474976715662 Name: CopyTableBarrier }, at tablet# 72057594046644480 2025-03-31T16:02:50.831066Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715662:1 240 -> 240 2025-03-31T16:02:50.831705Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T16:02:50.831743Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715662:1 2025-03-31T16:02:50.831865Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [3:409:2404], Recipient [3:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-31T16:02:50.831898Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-31T16:02:50.831958Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715662:1, at schemeshard: 72057594046644480 2025-03-31T16:02:50.832005Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715662:1 ProgressState 2025-03-31T16:02:50.832128Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-31T16:02:50.832162Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715662:1 progress is 2/2 2025-03-31T16:02:50.832198Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 2/2 2025-03-31T16:02:50.832248Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715662:1 progress is 2/2 2025-03-31T16:02:50.832289Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 2/2 2025-03-31T16:02:50.832334Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715662, ready parts: 2/2, is published: true 2025-03-31T16:02:50.832421Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:909:2723] message: TxId: 281474976715662 2025-03-31T16:02:50.832478Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 2/2 2025-03-31T16:02:50.832524Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715662:0 2025-03-31T16:02:50.833882Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715662:0 2025-03-31T16:02:50.834022Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 2 2025-03-31T16:02:50.834064Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715662:1 2025-03-31T16:02:50.834093Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715662:1 2025-03-31T16:02:50.834220Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 11] was 3 2025-03-31T16:02:50.834260Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-03-31T16:02:50.834904Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T16:02:50.835023Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [3:909:2723] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715662 at schemeshard: 72057594046644480 2025-03-31T16:02:50.835505Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [3:923:2730], Recipient [3:409:2404]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-31T16:02:50.835547Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-31T16:02:50.835578Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-03-31T16:02:51.026693Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [3:1024:2810], serverId# [3:1025:2811], sessionId# [0:0:0] 2025-03-31T16:02:51.026897Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqpf1yts1yqbreb1zja1azvc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzU3OWY5NWYtZGU0OTAzZTMtMzU1NzM1NTMtZThlNGRmNTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 2 } items { uint32_value: 20 } }, { items { uint32_value: 3 } items { uint32_value: 30 } } 2025-03-31T16:02:51.271984Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqpf1z18fg5fvkn96z8tmcs1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=M2EyNmM4MDAtOWI0ZmExOWUtOTRmYTk1NGMtNTVlYjM5YzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 2 } items { uint32_value: 20 } }, { items { uint32_value: 3 } items { uint32_value: 30 } } >> TMiniKQLEngineFlatTest::TestSelectRangeToInclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowManyShards >> TMiniKQLEngineFlatHostTest::ShardId [GOOD] >> TMiniKQLEngineFlatHostTest::Basic [GOOD] >> TMiniKQLEngineFlatTest::TestAbort >> YdbProxy::ReadNonExistentTopic [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowManyShards [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowNoShards >> TMiniKQLEngineFlatTest::TestAbort [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail1 >> TMiniKQLEngineFlatTest::TestSelectRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitions [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByItems >> TReplicationTests::CreateWithoutCredentials [GOOD] >> TReplicationTests::Describe >> KqpQuery::TableSink_ReplaceDataShardDataQuery+UseSink [GOOD] >> KqpQuery::TableSink_ReplaceDataShardDataQuery-UseSink >> TMiniKQLEngineFlatTest::TestCASBoth2Fail1 [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail2 >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByItems [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByBytes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::ListShardsToken [GOOD] Test command err: 2025-03-31T16:01:43.039242Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004483982504415:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:43.039286Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001040/r3tmp/tmp3AL8x1/pdisk_1.dat 2025-03-31T16:01:43.770246Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:01:43.770333Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:01:43.828950Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:01:43.843383Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4377, node 1 2025-03-31T16:01:44.100339Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:01:44.100360Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:01:44.100369Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:01:44.100472Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11759 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:01:44.621521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:11759 2025-03-31T16:01:44.943950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:44.952463Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T16:01:44.964282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:44.982233Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-03-31T16:01:44.989144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:45.184763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:01:45.294206Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2025-03-31T16:01:45.299560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:01:45.359352Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2025-03-31T16:01:45.365589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:45.451020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:45.531688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:45.576465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:45.643338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:45.733157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:45.794813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:47.473057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004501162375009:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:47.473059Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004501162374997:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:47.473151Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:47.476353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-03-31T16:01:47.490132Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004501162375011:2380], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-03-31T16:01:47.581506Z node 1 :TX_PROXY ERROR: Actor# [1:7488004501162375062:2923] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:01:48.011475Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jqpf00ye9qrapvh96a5mfmqa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzQ1YmQ3MDUtYjM2Zjc0ZmQtYzA5OWYyZjItY2FlYWQyYmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:01:48.041528Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004483982504415:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:48.041994Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:01:48.064517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:48.139010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:48.176549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:48.225344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:48.286333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:48.335598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:48.419270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:48.487491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:48.577424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:48.715502Z node 1 :HTTP INFO: Listening on http://127.0.0.1:4704 2025-03-31T16:01:49.723273Z node 1 :SQS INFO: Start SQS proxy service actor 2025-03-31T16:01:49.723641Z node 1 :HTTP INFO: Listening on http://[::]:5819 2025-03-31T16:01:49.727186Z node 1 :SQS INFO: Start SQS service actor 2025-03-31T16:01:49.727322Z node 1 :SQS DEBUG: SQS service config: { EnableSqs: true YandexCloudMode: true EnableDeadLetterQueues: true } 2025-03-31T16:01:49.745038Z node 1 :SQS DEBUG: Enable scheme board scheme cache 2025-03-31T16:01:49.786842Z node 1 :SQS INFO: Request SQS users list 2025-03-31T16:01:49.786850Z node 1 :SQS NOTICE: [Node tracker] schedule describe tables after 0.000000s 2025-03-31T16:01:49.786883Z node 1 :SQS DEBUG: [Node tracker] bootstrap on node=1 2025-03-31T16:01:49.788013Z node 1 :SQS DEBUG: Request SQS queues list 2025-03-31T16:01:49.804093Z node 1 :SQS DEBUG: Request [] Starting executor actor for query(idx ... 6224037907] TPersQueue::SendEvReadSetAckToSenders 2025-03-31T16:02:50.523163Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037910] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-31T16:02:50.523172Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] TxId 281474976710690, NewState WAIT_RS_ACKS 2025-03-31T16:02:50.523179Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037910] Try execute txs with state DELETING 2025-03-31T16:02:50.523185Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] TxId 281474976710690 moved from EXECUTED to WAIT_RS_ACKS 2025-03-31T16:02:50.523190Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037910] TxId 281474976710690, State DELETING 2025-03-31T16:02:50.523202Z node 8 :HTTP DEBUG: (#37,[::1]:49566) <- (200 ) 2025-03-31T16:02:50.523206Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037910] delete TxId 281474976710690 2025-03-31T16:02:50.523207Z node 8 :PERSQUEUE DEBUG: [TxId: 281474976710690] PredicateAcks: 0/0 2025-03-31T16:02:50.523226Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-03-31T16:02:50.523236Z node 8 :PERSQUEUE DEBUG: [TxId: 281474976710690] PredicateAcks: 0/0 2025-03-31T16:02:50.523250Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] add an TxId 281474976710690 to the list for deletion 2025-03-31T16:02:50.523266Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] TxId 281474976710690, NewState DELETING 2025-03-31T16:02:50.523286Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] delete key for TxId 281474976710690 2025-03-31T16:02:50.523295Z node 8 :HTTP DEBUG: (#37,[::1]:49566) connection closed 2025-03-31T16:02:50.523313Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037908] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-31T16:02:50.523324Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037908] Try execute txs with state DELETING 2025-03-31T16:02:50.523327Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-31T16:02:50.523334Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037908] TxId 281474976710690, State DELETING 2025-03-31T16:02:50.523344Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037908] delete TxId 281474976710690 2025-03-31T16:02:50.523365Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Registered with mediator time cast 2025-03-31T16:02:50.523414Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Registered with mediator time cast 2025-03-31T16:02:50.523429Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037909] Registered with mediator time cast 2025-03-31T16:02:50.524730Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-31T16:02:50.524747Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Try execute txs with state DELETING 2025-03-31T16:02:50.524781Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] TxId 281474976710690, State DELETING 2025-03-31T16:02:50.524798Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] delete TxId 281474976710690 2025-03-31T16:02:50.524848Z node 8 :HTTP DEBUG: (#37,[::1]:49568) incoming connection opened 2025-03-31T16:02:50.524910Z node 8 :HTTP DEBUG: (#37,[::1]:49568) -> (POST /Root) 2025-03-31T16:02:50.525031Z node 8 :HTTP_PROXY INFO: proxy service: incoming request from [380e:e500:6050:0:200e:e500:6050:0] request [ListShards] url [/Root] database [/Root] requestId: fbf12783-75a52314-ac741d1e-9b586576 2025-03-31T16:02:50.525443Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [fbf12783-75a52314-ac741d1e-9b586576] got new request from [380e:e500:6050:0:200e:e500:6050:0] database '/Root' stream 'teststream' E0000 00:00:1743436970.526151 127417 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-03-31T16:02:50.525987Z node 8 :HTTP_PROXY DEBUG: http request [ListShards] requestId [fbf12783-75a52314-ac741d1e-9b586576] [auth] Authorized successfully 2025-03-31T16:02:50.526085Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [fbf12783-75a52314-ac741d1e-9b586576] sending grpc request to '' database: '/Root' iam token size: 0 Http output full {"NextToken":"CKX0w+feMhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 200 {"NextToken":"CKX0w+feMhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 2025-03-31T16:02:50.532909Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server connected, pipe [8:7488004769930824088:2523], now have 1 active actors on pipe 2025-03-31T16:02:50.532961Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server connected, pipe [8:7488004769930824089:2524], now have 1 active actors on pipe 2025-03-31T16:02:50.534039Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [fbf12783-75a52314-ac741d1e-9b586576] reply ok 2025-03-31T16:02:50.534401Z node 8 :HTTP DEBUG: (#37,[::1]:49568) <- (200 ) 2025-03-31T16:02:50.534514Z node 8 :HTTP DEBUG: (#37,[::1]:49568) connection closed 2025-03-31T16:02:50.535186Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server disconnected, pipe [8:7488004769930824088:2523] destroyed 2025-03-31T16:02:50.535222Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [8:7488004769930824089:2524] destroyed 2025-03-31T16:02:50.536441Z node 8 :HTTP DEBUG: (#37,[::1]:49580) incoming connection opened 2025-03-31T16:02:50.536506Z node 8 :HTTP DEBUG: (#37,[::1]:49580) -> (POST /Root) 2025-03-31T16:02:50.536677Z node 8 :HTTP_PROXY INFO: proxy service: incoming request from [b8c8:101:6050:0:a0c8:101:6050:0] request [ListShards] url [/Root] database [/Root] requestId: f9bca4bf-dc4afd0f-5ba24024-3b26524e 2025-03-31T16:02:50.537114Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [f9bca4bf-dc4afd0f-5ba24024-3b26524e] got new request from [b8c8:101:6050:0:a0c8:101:6050:0] database '/Root' stream 'teststream' 2025-03-31T16:02:50.547046Z node 8 :HTTP_PROXY DEBUG: http request [ListShards] requestId [f9bca4bf-dc4afd0f-5ba24024-3b26524e] [auth] Authorized successfully 2025-03-31T16:02:50.547232Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [f9bca4bf-dc4afd0f-5ba24024-3b26524e] sending grpc request to '' database: '/Root' iam token size: 0 E0000 00:00:1743436970.547322 127417 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-03-31T16:02:50.551790Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server connected, pipe [8:7488004769930824100:2528], now have 1 active actors on pipe 2025-03-31T16:02:50.551879Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server connected, pipe [8:7488004769930824101:2529], now have 1 active actors on pipe 2025-03-31T16:02:50.552662Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [f9bca4bf-dc4afd0f-5ba24024-3b26524e] reply ok 2025-03-31T16:02:50.552910Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server disconnected, pipe [8:7488004769930824100:2528] destroyed 2025-03-31T16:02:50.552943Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [8:7488004769930824101:2529] destroyed 2025-03-31T16:02:50.553042Z node 8 :HTTP DEBUG: (#37,[::1]:49580) <- (200 ) 2025-03-31T16:02:50.553162Z node 8 :HTTP DEBUG: (#37,[::1]:49580) connection closed Http output full {"NextToken":"CLj0w+feMhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 200 {"NextToken":"CLj0w+feMhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 2025-03-31T16:02:50.554913Z node 8 :HTTP DEBUG: (#40,[::1]:49584) incoming connection opened 2025-03-31T16:02:50.554991Z node 8 :HTTP DEBUG: (#40,[::1]:49584) -> (POST /Root) 2025-03-31T16:02:50.555148Z node 8 :HTTP_PROXY INFO: proxy service: incoming request from [380c:3f00:6050:0:200c:3f00:6050:0] request [ListShards] url [/Root] database [/Root] requestId: 867f376-b9564581-4741b89f-c1019b68 2025-03-31T16:02:50.555604Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [867f376-b9564581-4741b89f-c1019b68] got new request from [380c:3f00:6050:0:200c:3f00:6050:0] database '/Root' stream 'teststream' 2025-03-31T16:02:50.556026Z node 8 :HTTP_PROXY DEBUG: http request [ListShards] requestId [867f376-b9564581-4741b89f-c1019b68] [auth] Authorized successfully 2025-03-31T16:02:50.556128Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [867f376-b9564581-4741b89f-c1019b68] sending grpc request to '' database: '/Root' iam token size: 0 E0000 00:00:1743436970.556212 127416 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-03-31T16:02:50.557166Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server connected, pipe [8:7488004769930824114:2534], now have 1 active actors on pipe 2025-03-31T16:02:50.557243Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server connected, pipe [8:7488004769930824115:2535], now have 1 active actors on pipe 2025-03-31T16:02:50.557922Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [867f376-b9564581-4741b89f-c1019b68] reply ok 2025-03-31T16:02:50.558110Z node 8 :HTTP DEBUG: (#40,[::1]:49584) <- (200 ) 2025-03-31T16:02:50.558225Z node 8 :HTTP DEBUG: (#40,[::1]:49584) connection closed Http output full {"NextToken":"CL30w+feMhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 200 {"NextToken":"CL30w+feMhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 2025-03-31T16:02:50.560141Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server disconnected, pipe [8:7488004769930824114:2534] destroyed 2025-03-31T16:02:50.560184Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [8:7488004769930824115:2535] destroyed >> TMiniKQLEngineFlatTest::TestCASBoth2Fail2 [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail12 [GOOD] >> TMiniKQLEngineFlatTest::TestBug998 >> RetryPolicy::TWriteSession_SeqNoShift [GOOD] >> RetryPolicy::RetryWithBatching >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByBytes [GOOD] >> TMiniKQLEngineFlatTest::TestSomePushDown >> TMiniKQLEngineFlatTest::TestBug998 [GOOD] >> TMiniKQLEngineFlatTest::TestAcquireLocks |72.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |72.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |72.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order >> TMiniKQLEngineFlatTest::TestSomePushDown [GOOD] >> TMiniKQLEngineFlatTest::TestTakePushdown >> TMiniKQLEngineFlatTest::TestTakePushdown [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortNonImmediatePushdown ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::TryToUpdateNonExistentColumn [GOOD] Test command err: Trying to start YDB, gRPC: 61536, MsgBus: 64833 2025-03-31T16:02:31.648981Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004690394302552:2193];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:31.649050Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ad1/r3tmp/tmpR0TW1M/pdisk_1.dat 2025-03-31T16:02:32.152585Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:32.176501Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:32.176591Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:32.180527Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61536, node 1 2025-03-31T16:02:32.315188Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:02:32.315228Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:02:32.315242Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:02:32.315406Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64833 TClient is connected to server localhost:64833 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:02:32.893776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:32.922761Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:02:32.933202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:33.107180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:33.281540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:33.363365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:35.491179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004707574173389:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:35.491443Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:36.138921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:02:36.170042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:02:36.233335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:02:36.263841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:02:36.297348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:02:36.374114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:02:36.467346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004711869141208:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:36.467456Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:36.471944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004711869141213:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:36.476223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:02:36.489097Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004711869141215:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:02:36.552613Z node 1 :TX_PROXY ERROR: Actor# [1:7488004711869141269:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:02:36.649548Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004690394302552:2193];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:36.649631Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:02:38.238856Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488004720459076240:2508], TxId: 281474976710671, task: 1. Ctx: { TraceId : 01jqpf1j1091zwaf4qzdmr8wjx. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MjZjMTdiNS1mODI5MTI1ZS01ZjQxZGY0MS03OTcxZWRlNA==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED DEFAULT_ERROR: {
: Error: Terminate was called, reason(17): Bad filter value. }. 2025-03-31T16:02:38.240046Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488004720459076241:2509], TxId: 281474976710671, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=MjZjMTdiNS1mODI5MTI1ZS01ZjQxZGY0MS03OTcxZWRlNA==. TraceId : 01jqpf1j1091zwaf4qzdmr8wjx. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7488004720459076236:2500], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-31T16:02:38.241766Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjZjMTdiNS1mODI5MTI1ZS01ZjQxZGY0MS03OTcxZWRlNA==, ActorId: [1:7488004716164108865:2500], ActorState: ExecuteState, TraceId: 01jqpf1j1091zwaf4qzdmr8wjx, Create QueryResponse for error on request, msg:
: Error: Terminate was called, reason(17): Bad filter value. Trying to start YDB, gRPC: 3313, MsgBus: 1898 2025-03-31T16:02:39.200381Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488004725593666543:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:39.200471Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ad1/r3tmp/tmpwYYV8K/pdisk_1.dat 2025-03-31T16:02:39.321842Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:39.341288Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:39.341375Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 3313, node 2 2025-03-31T16:02:39.345528Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:02:39.463628Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:02:39.463661Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:02:39.463668Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:02:39.463835Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1898 TClient is connected to server localhost:1898 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true Create ... Table, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:02:42.809196Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:02:42.845449Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:02:42.920235Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:02:42.952115Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:02:43.020735Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:02:43.123158Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004742773538023:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:43.123259Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:43.125017Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004742773538028:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:43.129708Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:02:43.144912Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488004742773538030:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:02:43.230383Z node 2 :TX_PROXY ERROR: Actor# [2:7488004742773538085:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:02:44.201044Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488004725593666543:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:44.201106Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 1601, MsgBus: 64314 2025-03-31T16:02:45.520920Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488004750777069546:2133];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:45.522003Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ad1/r3tmp/tmpogusX1/pdisk_1.dat 2025-03-31T16:02:45.675481Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:45.697358Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:45.697467Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:45.709591Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1601, node 3 2025-03-31T16:02:45.766252Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:02:45.766274Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:02:45.766283Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:02:45.766435Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64314 TClient is connected to server localhost:64314 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:02:46.248271Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:46.262968Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:46.339722Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:46.502660Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:46.606575Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:49.486222Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488004767956940427:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:49.486280Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:49.542007Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:02:49.613525Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:02:49.657395Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:02:49.706477Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:02:49.792216Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:02:49.849987Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:02:49.962535Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488004767956940949:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:49.962610Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:49.962972Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488004767956940954:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:49.971707Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:02:49.995363Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488004767956940956:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:02:50.073961Z node 3 :TX_PROXY ERROR: Actor# [3:7488004772251908307:3453] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:02:50.525168Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488004750777069546:2133];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:50.525240Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:02:51.497212Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7488004776546875909:2503], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:3:84: Error: At function: KiUpdateTable!
:3:84: Error: Column 'NonExistentColumn' does not exist in table '/Root/KeyValue'., code: 2017 2025-03-31T16:02:51.499488Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=N2FlY2ZmYjQtYTJkMDExMWMtOGM1MjQ2MjktMTI0NDU5ZDk=, ActorId: [3:7488004776546875899:2497], ActorState: ExecuteState, TraceId: 01jqpf1ze21yg217mxh0nfrewm, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: >> TMiniKQLEngineFlatTest::TestAcquireLocks [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownMultipleConsumers >> TVersions::WreckHead [GOOD] >> TVersions::WreckHeadReverse >> TMiniKQLEngineFlatTest::TestTopSortNonImmediatePushdown [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownMultipleConsumers [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownNonPureLambda [GOOD] >> TMiniKQLEngineFlatTest::NoOrderedMapPushdown |72.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestSelectRangeNoColumns [GOOD] >> TMiniKQLEngineFlatTest::NoOrderedMapPushdown [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownWriteToTable >> TMiniKQLEngineFlatTest::TestEmptyProgram >> TMiniKQLEngineFlatTest::NoMapPushdownWriteToTable [GOOD] |72.9%| [TA] $(B)/ydb/core/tx/schemeshard/ut_olap/test-results/unittest/{meta.json ... results_accumulator.log} >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure >> TMiniKQLEngineFlatTest::TestEmptyProgram [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRow >> TReplicationTests::CannotSetAsyncReplicaAttribute [GOOD] >> TReplicationTests::AlterReplicatedTable >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRow [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowNullKey [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowManyShards >> TMiniKQLEngineFlatTest::TestEraseRowManyShards [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Success >> TPart::WreckPartColumnGroups [GOOD] >> TPart::PageFailEnvColumnGroups >> TMiniKQLEngineFlatTest::TestCASBoth2Success [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowNoShards >> TMiniKQLProtoTestYdb::TestExportVoidTypeYdb >> TMiniKQLProgramBuilderTest::TestUpdateRowStaticKey >> TMiniKQLEngineFlatTest::TestEraseRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestDiagnostics >> TMiniKQLProtoTestYdb::TestExportVoidTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportUuidTypeYdb >> TMiniKQLProgramBuilderTest::TestUpdateRowStaticKey [GOOD] >> TMiniKQLProtoTestYdb::TestExportDataTypeYdb >> TMiniKQLEngineFlatTest::TestDiagnostics [GOOD] >> TMiniKQLEngineFlatTest::TestCombineByKeyPushdown >> TMiniKQLProtoTestYdb::TestExportUuidTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportTupleTypeYdb >> TMiniKQLProtoTestYdb::TestExportDataTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalTypeYdb >> KqpStats::OneShardLocalExec-UseSink [GOOD] >> KqpStats::OneShardNonLocalExec+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::ReadNonExistentTopic [GOOD] Test command err: 2025-03-31T16:00:14.395618Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004099924439570:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:14.395962Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001579/r3tmp/tmpokYsQV/pdisk_1.dat 2025-03-31T16:00:15.107989Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:15.130279Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:15.130415Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:15.138754Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13535 TServer::EnableGrpc on GrpcPort 20522, node 1 2025-03-31T16:00:15.763684Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:00:15.763708Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:00:15.763715Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:00:15.763836Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13535 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:00:16.324834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:16.823845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-03-31T16:00:19.387314Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004099924439570:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:19.387400Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:00:19.953686Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004121399276818:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:19.955494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:19.962610Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004121399276856:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:19.966622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004121399276857:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:19.976593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:2, at schemeshard: 72057594046644480 2025-03-31T16:00:19.985757Z node 1 :TX_PROXY ERROR: Actor# [1:7488004121399276882:2461] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T16:00:20.012703Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-03-31T16:00:20.014215Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004121399276861:2375], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-03-31T16:00:20.014290Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004121399276881:2376], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-03-31T16:00:20.070196Z node 1 :TX_PROXY ERROR: Actor# [1:7488004125694244206:2486] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:00:20.110785Z node 1 :TX_PROXY ERROR: Actor# [1:7488004125694244224:2494] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:00:22.710937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-31T16:00:23.529080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:00:24.305814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-31T16:00:25.026335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-31T16:00:26.175008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-31T16:00:30.090295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:00:30.090342Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:48.551952Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488004761185126204:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:48.553366Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001579/r3tmp/tmp1ZwYHD/pdisk_1.dat 2025-03-31T16:02:48.805998Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:48.891933Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:48.892053Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:48.907526Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3608 TServer::EnableGrpc on GrpcPort 31824, node 2 2025-03-31T16:02:49.398252Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:02:49.398276Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:02:49.398283Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:02:49.398419Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3608 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:02:49.867740Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:49.876461Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 |73.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestTopSortNonImmediatePushdown [GOOD] >> TMiniKQLEngineFlatTest::TestCombineByKeyPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestCombineByKeyNoPushdown >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistWithoutColumns >> TReplicationTests::Describe [GOOD] >> TReplicationTests::CreateReplicatedTable >> TMiniKQLProtoTestYdb::TestExportTupleTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportStructTypeYdb >> TMiniKQLProtoTestYdb::TestExportDecimalTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDictTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportBoolYdb >> TMiniKQLEngineFlatTest::TestCombineByKeyNoPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestLengthPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestInternalResult >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistWithoutColumns [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayload >> TMiniKQLProtoTestYdb::TestExportStructTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVariantTupleTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVariantStructTypeYdb >> TMiniKQLProtoTestYdb::TestExportBoolYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDoubleYdb >> TMiniKQLEngineFlatTest::TestInternalResult [GOOD] >> TMiniKQLEngineFlatTest::TestIndependentSelects >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayloadNullValue >> TMiniKQLProtoTestYdb::TestExportVariantStructTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVoidYdb >> TMiniKQLProtoTestYdb::TestExportDoubleYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalYdb >> TMiniKQLEngineFlatTest::TestIndependentSelects [GOOD] >> TMiniKQLEngineFlatTest::TestCrossTableRs |73.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayloadNullValue [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistErasePayload >> TMiniKQLProtoTestYdb::TestExportVoidYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalNegativeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalHugeYdb >> TMiniKQLProtoTestYdb::TestExportStringYdb >> TMiniKQLEngineFlatTest::TestCrossTableRs [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistErasePayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowExistChangePayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowExistErasePayload >> TMiniKQLProtoTestYdb::TestExportStringYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalHugeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportUuidYdb >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalOptionalYdb >> TMiniKQLEngineFlatTest::TestUpdateRowExistErasePayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowManyShards >> KqpStats::StreamLookupStats+StreamLookupJoin >> TMiniKQLProtoTestYdb::TestExportUuidYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalOptionalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportTupleYdb >> TMiniKQLProtoTestYdb::TestExportDictYdb >> TMiniKQLEngineFlatTest::TestUpdateRowManyShards [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNoShards >> TMiniKQLProtoTestYdb::TestExportTupleYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportStructYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVariantYdb >> TMiniKQLProtoTestYdb::TestExportVariantYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDictYdb [GOOD] >> TMiniKQLProtoTestYdb::TestCellsFromTuple [GOOD] >> TReplicationTests::AlterReplicatedTable [GOOD] >> TReplicationTests::AlterReplicatedIndexTable >> TMiniKQLEngineFlatTest::TestUpdateRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortPushdownPk >> TMiniKQLEngineFlatTest::TestTopSortPushdownPk [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortPushdown >> KqpQuery::PreparedQueryInvalidate >> OperationMapping::IndexBuildCanceled [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortPushdown [GOOD] >> TMiniKQLProgramBuilderTest::TestEraseRowDynamicKey >> BuildStatsHistogram::Single_History_Slices [GOOD] >> BuildStatsHistogram::Ten_Mixed >> TMiniKQLProgramBuilderTest::TestEraseRowDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestAcquireLocks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestCrossTableRs [GOOD] Test command err: SetProgram (370): ydb/core/engine/mkql_engine_flat.cpp:183: ExtractResultType(): requirement !label.StartsWith(TxInternalResultPrefix) failed. Label can't be used in SetResult as it's reserved for internal purposes: __cantuse PrepareShardPrograms (491): too many shard readsets (1 > 0), src tables: [200:301:0], dst tables: [200:302:0] Type { Kind: Struct } >> KqpQuery::OlapCreateAsSelect_Simple >> TMiniKQLProgramBuilderTest::TestAcquireLocks [GOOD] >> TMiniKQLProgramBuilderTest::TestDiagnostics >> TMiniKQLProgramBuilderTest::TestDiagnostics [GOOD] |73.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildCanceled [GOOD] |73.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestCellsFromTuple [GOOD] >> SplitPathTests::WithDatabaseShouldFail [GOOD] >> KqpParams::ImplicitParameterTypes [GOOD] >> KqpParams::ExplicitSameParameterTypesQueryCacheCheck |73.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestExportVariantYdb [GOOD] >> KqpTypes::QuerySpecialTypes [GOOD] >> KqpTypes::SelectNull >> TestKinesisHttpProxy::GoodRequestCreateStream [GOOD] |73.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest |73.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithDatabaseShouldFail [GOOD] >> TPart::PageFailEnvColumnGroups [GOOD] >> TPart::ForwardEnvColumnGroups >> BsControllerConfig::AddDriveSerial >> KqpTypes::Time64Columns-EnableTableDatetime64 [GOOD] >> TReplicationTests::AlterReplicatedIndexTable [GOOD] >> TReplicationTests::CopyReplicatedTable |73.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProgramBuilderTest::TestDiagnostics [GOOD] >> TPart::ForwardEnvColumnGroups [GOOD] >> TPart::Versions [GOOD] >> TPart::ManyVersions [GOOD] >> TPart::ManyDeltas [GOOD] >> TPart::CutKeys_Lz4 [GOOD] >> TPart::CutKeys_Seek >> TPart::CutKeys_Seek [GOOD] >> TPart::CutKeys_SeekPages [GOOD] >> TPart::CutKeys_SeekSlices [GOOD] >> TPart::CutKeys_CutString [GOOD] >> TPart::CutKeys_CutUtf8String [GOOD] >> TPartBtreeIndexIteration::NoNodes >> SlowTopicAutopartitioning::CDC_Write [GOOD] >> KqpExplain::ReadTableRangesFullScan [GOOD] >> TReplicationTests::CreateReplicatedTable [GOOD] >> TReplicationTests::DropReplicationWithInvalidCredentials >> KqpExplain::ReadTableRanges |73.0%| [TA] $(B)/ydb/core/engine/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> BsControllerConfig::OverlayMap >> BsControllerConfig::OverlayMapCrossReferences >> BsControllerConfig::Basic >> BsControllerConfig::ReassignGroupDisk >> BsControllerConfig::PDiskCreate >> DataShardVolatile::TwoAppendsMustBeVolatile+UseSink [GOOD] >> DataShardVolatile::TwoAppendsMustBeVolatile-UseSink >> TPQTest::TestMaxTimeLagRewind [GOOD] >> TPQTest::TestManyConsumers >> TPersQueueTest::WhenTheTopicIsDeletedBeforeDataIsDecompressed_Uncompressed [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedAfterDecompressingTheData_Uncompressed >> KqpStats::JoinNoStatsScan [GOOD] >> KqpStats::DeferredEffects+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpTypes::Time64Columns-EnableTableDatetime64 [GOOD] Test command err: Trying to start YDB, gRPC: 5601, MsgBus: 13710 2025-03-31T16:02:39.320565Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004723522412282:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:39.320621Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ac0/r3tmp/tmps21eSR/pdisk_1.dat 2025-03-31T16:02:39.855493Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:39.855552Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:39.860668Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:02:39.860939Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5601, node 1 2025-03-31T16:02:40.005630Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:02:40.005656Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:02:40.005664Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:02:40.005785Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13710 TClient is connected to server localhost:13710 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:02:40.715982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:40.751511Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:02:40.759862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:40.935262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:41.125477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:41.223526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:42.922596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004736407315924:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:42.922722Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:43.369373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:02:43.407605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:02:43.446182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:02:43.484732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:02:43.520899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:02:43.575077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:02:43.660028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004740702283739:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:43.660124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:43.660270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004740702283744:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:43.668243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:02:43.682448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004740702283746:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:02:43.734853Z node 1 :TX_PROXY ERROR: Actor# [1:7488004740702283798:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:02:44.321474Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004723522412282:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:44.321551Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:02:44.705261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480
: Warning: Optimization, code: 1070
:3:29: Warning: Unsafe conversion integral value to Timestamp, consider using date types, code: 1102 Trying to start YDB, gRPC: 17560, MsgBus: 28508 2025-03-31T16:02:45.805838Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488004749684469498:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:45.805905Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ac0/r3tmp/tmpflrlh6/pdisk_1.dat 2025-03-31T16:02:45.935406Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:45.950306Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:45.950384Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:45.953577Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17560, node 2 2025-03-31T16:02:45.999246Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:02:45.999270Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:02:45.999277Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:02:45.999388Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28508 TClient is connected to server localhost:28508 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:02:46.492669Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:02:46.515356Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:46.624737Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:46.778051Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:46.875811Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:49.632084Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004766864340465:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:49.632184Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:49.696327Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:02:49.772641Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:02:49.831567Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:02:49.882322Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:02:49.937604Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:02:50.015831Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:02:50.125824Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004771159308285:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:50.125925Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:50.126247Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004771159308290:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:50.130986Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:02:50.157130Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488004771159308292:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:02:50.236176Z node 2 :TX_PROXY ERROR: Actor# [2:7488004771159308347:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:02:50.806633Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488004749684469498:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:50.806697Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:02:51.371276Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:02:51.513873Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488004775454276035:2508], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:6:25: Error: At function: AsList
:6:46: Error: At function: AsStruct
:3:29: Error: At function: Just, At function: UnsafeTimestampCast
:3:29: Error: Unsafe timestamp cast restricted from SQL v1. 2025-03-31T16:02:51.515393Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTU1YjVkNjAtNGE4Zjk4NzgtOGJjOTkwMmItZjEyYzZjODY=, ActorId: [2:7488004775454275938:2497], ActorState: ExecuteState, TraceId: 01jqpf1zew6ve5pdztmc7nmkh4, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:6:25: Error: At function: AsList
:6:46: Error: At function: AsStruct
:3:29: Error: At function: Just, At function: UnsafeTimestampCast
:3:29: Error: Unsafe timestamp cast restricted from SQL v1. Trying to start YDB, gRPC: 24350, MsgBus: 65487 2025-03-31T16:02:52.429991Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488004781442173888:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:52.430054Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ac0/r3tmp/tmpmSuLPi/pdisk_1.dat 2025-03-31T16:02:52.596081Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:52.610824Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:52.610924Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:52.613503Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24350, node 3 2025-03-31T16:02:52.672105Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:02:52.672132Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:02:52.672142Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:02:52.672288Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65487 TClient is connected to server localhost:65487 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:02:53.187707Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:56.530818Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488004798622043732:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:56.531321Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:56.553897Z node 3 :TX_PROXY ERROR: Actor# [3:7488004798622043753:2307] txid# 281474976715658, issues: { message: "Type \'Datetime64\' specified for column \'DatetimePK\', but support for new date/time 64 types is disabled (EnableTableDatetime64 feature flag is off)" severity: 1 } >> TReplicationTests::CopyReplicatedTable [GOOD] >> BsControllerConfig::OverlayMap [GOOD] |73.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |73.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut >> KqpQuery::QueryClientTimeoutPrecompiled [GOOD] >> KqpQuery::QueryExplain >> TPartBtreeIndexIteration::NoNodes [GOOD] >> TPartBtreeIndexIteration::NoNodes_Groups >> DataShardTxOrder::RandomPoints_DelayRS_Reboot |73.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |73.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |73.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap/test-results/unittest/{meta.json ... results_accumulator.log} |73.0%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |73.0%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |73.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::OverlayMap [GOOD] |73.0%| [TA] {RESULT} $(B)/ydb/core/engine/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TReplicationTests::DropReplicationWithInvalidCredentials [GOOD] >> TReplicationTests::DropReplicationWithUnknownSecret ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::GoodRequestCreateStream [GOOD] Test command err: 2025-03-31T16:01:41.867336Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004476341922510:2145];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:41.879442Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001078/r3tmp/tmpZRtjqd/pdisk_1.dat 2025-03-31T16:01:42.551326Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:42.553724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:01:42.553808Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:01:42.562789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1318, node 1 2025-03-31T16:01:42.875997Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:01:42.876048Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:01:42.876055Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:01:42.876171Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7267 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:01:43.330351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:43.353940Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:7267 2025-03-31T16:01:43.653287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:43.675594Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T16:01:43.681500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:43.696942Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-03-31T16:01:43.704684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:44.024443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:01:44.100789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:01:44.160416Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2025-03-31T16:01:44.165695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:44.253275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:44.298307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:44.383022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:44.438761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:44.519562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:44.596641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:46.448592Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004497816760361:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:46.448735Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:46.451179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004497816760369:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:01:46.455212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-03-31T16:01:46.465634Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004497816760375:2380], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-03-31T16:01:46.528859Z node 1 :TX_PROXY ERROR: Actor# [1:7488004497816760426:2926] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:01:46.865150Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004476341922510:2145];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:01:46.865909Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:01:46.866925Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jqpezzxkb2gjtncj7ym30tkr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmY2MTFmZGQtODk3MDcwYmUtZmZiODY1OTYtZTBmYWJlOTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:01:46.959263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:47.002656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:47.044763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:47.083986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:47.140416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:47.215750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:47.267513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:01:47.304853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-31T16:01:47.341881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:01:47.402349Z node 1 :HTTP INFO: Listening on http://127.0.0.1:64646 2025-03-31T16:01:48.449780Z node 1 :SQS INFO: Start SQS service actor 2025-03-31T16:01:48.449912Z node 1 :SQS DEBUG: SQS service config: { EnableSqs: true YandexCloudMode: true EnableDeadLetterQueues: true } 2025-03-31T16:01:48.451167Z node 1 :SQS INFO: Start SQS proxy service actor 2025-03-31T16:01:48.451208Z node 1 :SQS DEBUG: Enable scheme board scheme cache 2025-03-31T16:01:48.451445Z node 1 :HTTP INFO: Listening on http://[::]:10352 2025-03-31T16:01:48.518745Z node 1 :SQS INFO: Request SQS users list 2025-03-31T16:01:48.518793Z node 1 :SQS DEBUG: Request SQS queues list 2025-03-31T16:01:48.519207Z node 1 :SQS NOTICE: [Node tracker] schedule describe tables after 0.000000s 2025-03-31T16:01:48.519233Z node 1 :SQS DEBUG: [Node tracker] bootstrap on node=1 2025-03-31T16:01:48.555932Z node 1 :SQS DEBUG: Request [] Starting executor actor for query(idx ... ed" retryable:0 2025-03-31T16:02:56.614049Z node 8 :TICKET_PARSER DEBUG: Ticket **** (C9049D91) () has now valid token of Service1_id@as 2025-03-31T16:02:56.614175Z node 8 :HTTP_PROXY DEBUG: http request [DescribeStream] requestId [ae53c374-12d0ffc9-74f734dd-8ee124ef] [auth] Authorized successfully 2025-03-31T16:02:56.614342Z node 8 :HTTP_PROXY INFO: http request [DescribeStream] requestId [ae53c374-12d0ffc9-74f734dd-8ee124ef] sending grpc request to '' database: '/Root' iam token size: 0 2025-03-31T16:02:56.618221Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server connected, pipe [8:7488004796472942848:2526], now have 1 active actors on pipe 2025-03-31T16:02:56.618326Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037908] server connected, pipe [8:7488004796472942849:2527], now have 1 active actors on pipe 2025-03-31T16:02:56.618381Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037909] server connected, pipe [8:7488004796472942850:2528], now have 1 active actors on pipe 2025-03-31T16:02:56.618419Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server connected, pipe [8:7488004796472942851:2529], now have 1 active actors on pipe 2025-03-31T16:02:56.618457Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server connected, pipe [8:7488004796472942852:2530], now have 1 active actors on pipe Http output full {"StreamDescription":{"RetentionPeriodHours":24,"WriteQuotaKbPerSec":1024,"StreamModeDetails":{"StreamMode":"ON_DEMAND"},"StreamArn":"testtopic","PartitioningSettings":{"MinActivePartitions":5,"AutoPartitioningSettings":{"Strategy":"AUTO_PARTITIONING_STRATEGY_DISABLED","PartitionWriteSpeed":{"StabilizationWindow":{"Nanos":0,"Seconds":300},"DownUtilizationPercent":30,"UpUtilizationPercent":90}},"MaxActivePartitions":5},"Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}},{"ShardId":"shard-000002","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"204169420152563078078024764459060926872","StartingHashKey":"136112946768375385385349842972707284582"}},{"ShardId":"shard-000003","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"272225893536750770770699685945414569163","StartingHashKey":"204169420152563078078024764459060926873"}},{"ShardId":"shard-000004","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"340282366920938463463374607431768211455","StartingHashKey":"272225893536750770770699685945414569164"}}],"KeyId":"","Owner":"Service1_id@as","StreamStatus":"ACTIVE","HasMoreShards":false,"EncryptionType":"ENCRYPTION_UNDEFINED","StreamCreationTimestamp":1743436977,"StorageLimitMb":0,"StreamName":"testtopic"}} 2025-03-31T16:02:56.619729Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037908] server disconnected, pipe [8:7488004796472942849:2527] destroyed 2025-03-31T16:02:56.619763Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037909] server disconnected, pipe [8:7488004796472942850:2528] destroyed 2025-03-31T16:02:56.619785Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server disconnected, pipe [8:7488004796472942851:2529] destroyed 2025-03-31T16:02:56.619805Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [8:7488004796472942852:2530] destroyed 2025-03-31T16:02:56.619814Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server disconnected, pipe [8:7488004796472942848:2526] destroyed 2025-03-31T16:02:56.620953Z node 8 :HTTP_PROXY INFO: http request [DescribeStream] requestId [ae53c374-12d0ffc9-74f734dd-8ee124ef] reply ok 200 {"StreamDescription":{"RetentionPeriodHours":24,"WriteQuotaKbPerSec":1024,"StreamModeDetails":{"StreamMode":"ON_DEMAND"},"StreamArn":"testtopic","PartitioningSettings":{"MinActivePartitions":5,"AutoPartitioningSettings":{"Strategy":"AUTO_PARTITIONING_STRATEGY_DISABLED","PartitionWriteSpeed":{"StabilizationWindow":{"Nanos":0,"Seconds":300},"DownUtilizationPercent":30,"UpUtilizationPercent":90}},"MaxActivePartitions":5},"Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}},{"ShardId":"shard-000002","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"204169420152563078078024764459060926872","StartingHashKey":"136112946768375385385349842972707284582"}},{"ShardId":"shard-000003","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"272225893536750770770699685945414569163","StartingHashKey":"204169420152563078078024764459060926873"}},{"ShardId":"shard-000004","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"340282366920938463463374607431768211455","StartingHashKey":"272225893536750770770699685945414569164"}}],"KeyId":"","Owner":"Service1_id@as","StreamStatus":"ACTIVE","HasMoreShards":false,"EncryptionType":"ENCRYPTION_UNDEFINED","StreamCreationTimestamp":1743436977,"StorageLimitMb":0,"StreamName":"testtopic"}} 2025-03-31T16:02:56.621565Z node 8 :HTTP DEBUG: (#40,[::1]:37386) <- (200 ) 2025-03-31T16:02:56.621695Z node 8 :HTTP DEBUG: (#40,[::1]:37386) connection closed 2025-03-31T16:02:56.623189Z node 8 :HTTP DEBUG: (#37,[::1]:37398) incoming connection opened 2025-03-31T16:02:56.623275Z node 8 :HTTP DEBUG: (#37,[::1]:37398) -> (POST /Root) 2025-03-31T16:02:56.623404Z node 8 :HTTP_PROXY INFO: proxy service: incoming request from [7869:800:6050:0:6069:800:6050:0] request [DescribeStreamSummary] url [/Root] database [/Root] requestId: bf7a425a-da0c05f6-15df6f72-64416368 2025-03-31T16:02:56.623899Z node 8 :HTTP_PROXY INFO: http request [DescribeStreamSummary] requestId [bf7a425a-da0c05f6-15df6f72-64416368] got new request from [7869:800:6050:0:6069:800:6050:0] database '/Root' stream 'testtopic' 2025-03-31T16:02:56.630205Z node 8 :HTTP_PROXY DEBUG: http request [DescribeStreamSummary] requestId [bf7a425a-da0c05f6-15df6f72-64416368] [auth] Authorized successfully 2025-03-31T16:02:56.630401Z node 8 :HTTP_PROXY INFO: http request [DescribeStreamSummary] requestId [bf7a425a-da0c05f6-15df6f72-64416368] sending grpc request to '' database: '/Root' iam token size: 0 2025-03-31T16:02:56.632080Z node 8 :HTTP_PROXY INFO: http request [DescribeStreamSummary] requestId [bf7a425a-da0c05f6-15df6f72-64416368] reply ok 2025-03-31T16:02:56.632423Z node 8 :HTTP DEBUG: (#37,[::1]:37398) <- (200 ) 2025-03-31T16:02:56.632516Z node 8 :HTTP DEBUG: (#37,[::1]:37398) connection closed Http output full {"StreamDescriptionSummary":{"RetentionPeriodHours":24,"OpenShardCount":5,"StreamArn":"testtopic","ConsumerCount":0,"KeyId":"","StreamStatus":"ACTIVE","EncryptionType":"NONE","StreamCreationTimestamp":1743436.976,"StreamName":"testtopic"}} 200 {"StreamDescriptionSummary":{"RetentionPeriodHours":24,"OpenShardCount":5,"StreamArn":"testtopic","ConsumerCount":0,"KeyId":"","StreamStatus":"ACTIVE","EncryptionType":"NONE","StreamCreationTimestamp":1743436.976,"StreamName":"testtopic"}} 2025-03-31T16:02:56.633840Z node 8 :HTTP DEBUG: (#40,[::1]:37410) incoming connection opened 2025-03-31T16:02:56.633907Z node 8 :HTTP DEBUG: (#40,[::1]:37410) -> (POST /Root) 2025-03-31T16:02:56.634028Z node 8 :HTTP_PROXY INFO: proxy service: incoming request from [d899:2000:6050:0:c099:2000:6050:0] request [DescribeStream] url [/Root] database [/Root] requestId: 3f2a9721-2de19cee-983040c-176cad6e 2025-03-31T16:02:56.634380Z node 8 :HTTP_PROXY INFO: http request [DescribeStream] requestId [3f2a9721-2de19cee-983040c-176cad6e] got new request from [d899:2000:6050:0:c099:2000:6050:0] database '/Root' stream 'testtopic' 2025-03-31T16:02:56.634922Z node 8 :HTTP_PROXY DEBUG: http request [DescribeStream] requestId [3f2a9721-2de19cee-983040c-176cad6e] [auth] Authorized successfully 2025-03-31T16:02:56.635035Z node 8 :HTTP_PROXY INFO: http request [DescribeStream] requestId [3f2a9721-2de19cee-983040c-176cad6e] sending grpc request to '' database: '/Root' iam token size: 0 2025-03-31T16:02:56.640435Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037908] server connected, pipe [8:7488004796472942878:2539], now have 1 active actors on pipe 2025-03-31T16:02:56.640482Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037909] server connected, pipe [8:7488004796472942879:2540], now have 1 active actors on pipe 2025-03-31T16:02:56.640518Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server connected, pipe [8:7488004796472942880:2541], now have 1 active actors on pipe 2025-03-31T16:02:56.640556Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server connected, pipe [8:7488004796472942881:2542], now have 1 active actors on pipe 2025-03-31T16:02:56.640591Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server connected, pipe [8:7488004796472942877:2538], now have 1 active actors on pipe Http output full {"StreamDescription":{"RetentionPeriodHours":24,"WriteQuotaKbPerSec":1024,"StreamModeDetails":{"StreamMode":"ON_DEMAND"},"StreamArn":"testtopic","PartitioningSettings":{"MinActivePartitions":5,"AutoPartitioningSettings":{"Strategy":"AUTO_PARTITIONING_STRATEGY_DISABLED","PartitionWriteSpeed":{"StabilizationWindow":{"Nanos":0,"Seconds":300},"DownUtilizationPercent":30,"UpUtilizationPercent":90}},"MaxActivePartitions":5},"Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}},{"ShardId":"shard-000002","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"204169420152563078078024764459060926872","StartingHashKey":"136112946768375385385349842972707284582"}},{"ShardId":"shard-000003","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"272225893536750770770699685945414569163","StartingHashKey":"204169420152563078078024764459060926873"}},{"ShardId":"shard-000004","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"340282366920938463463374607431768211455","StartingHashKey":"272225893536750770770699685945414569164"}}],"KeyId":"","Owner":"Service1_id@as","StreamStatus":"ACTIVE","HasMoreShards":false,"EncryptionType":"ENCRYPTION_UNDEFINED","StreamCreationTimestamp":1743436977,"StorageLimitMb":0,"StreamName":"testtopic"}} 2025-03-31T16:02:56.642577Z node 8 :HTTP_PROXY INFO: http request [DescribeStream] requestId [3f2a9721-2de19cee-983040c-176cad6e] reply ok 2025-03-31T16:02:56.643170Z node 8 :HTTP DEBUG: (#40,[::1]:37410) <- (200 ) 2025-03-31T16:02:56.643257Z node 8 :HTTP DEBUG: (#40,[::1]:37410) connection closed 2025-03-31T16:02:56.644892Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037909] server disconnected, pipe [8:7488004796472942879:2540] destroyed 2025-03-31T16:02:56.644938Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server disconnected, pipe [8:7488004796472942880:2541] destroyed 2025-03-31T16:02:56.644963Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [8:7488004796472942881:2542] destroyed 2025-03-31T16:02:56.644985Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server disconnected, pipe [8:7488004796472942877:2538] destroyed 2025-03-31T16:02:56.645008Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037908] server disconnected, pipe [8:7488004796472942878:2539] destroyed >> DataShardTxOrder::RandomPoints_DelayRS_Reboot_Dirty |73.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |73.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |73.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_replication/unittest >> TReplicationTests::CopyReplicatedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-31T16:02:47.546867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:02:47.546961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:02:47.546998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:02:47.547044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:02:47.547113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:02:47.547154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:02:47.547211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:02:47.547303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:02:47.547634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:02:47.641928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:02:47.641993Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:47.650616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:02:47.651018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:02:47.651238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:02:47.658821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:02:47.659119Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:02:47.659780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:47.660090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:02:47.662345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:02:47.663698Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:02:47.663764Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:02:47.663865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:02:47.663909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:02:47.663948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:02:47.664137Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:02:47.675728Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-31T16:02:47.825598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:02:47.825822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:47.826062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:02:47.826285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:02:47.826338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:47.828874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:47.829012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:02:47.829250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:47.829310Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:02:47.829350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:02:47.829385Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:02:47.831552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:47.831626Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:02:47.831667Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:02:47.839860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:47.839939Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:47.839996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:02:47.840066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:02:47.844006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:02:47.846307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:02:47.846493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:02:47.847603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:47.847760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:02:47.847828Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:02:47.848106Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:02:47.848160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:02:47.848314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:02:47.848384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:02:47.850597Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:02:47.850655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:02:47.850843Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:02:47.850884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:02:47.851331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:47.851403Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:02:47.851509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:02:47.851547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:02:47.851587Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:02:47.851620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:02:47.851658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:02:47.851722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:02:47.851781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:02:47.851811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:02:47.851885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:02:47.851923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:02:47.851959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:02:47.853917Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:02:47.854038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:02:47.854089Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... BUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-31T16:02:58.721779Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-03-31T16:02:58.754445Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:02:58.754872Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:02:58.758381Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 2057 } } 2025-03-31T16:02:58.758453Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-03-31T16:02:58.758629Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 2057 } } 2025-03-31T16:02:58.758779Z node 8 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 2057 } } FAKE_COORDINATOR: Erasing txId 102 2025-03-31T16:02:58.759769Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 407 RawX2: 34359740744 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-31T16:02:58.759841Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-03-31T16:02:58.760037Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 407 RawX2: 34359740744 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-31T16:02:58.760113Z node 8 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-31T16:02:58.760240Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 407 RawX2: 34359740744 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-31T16:02:58.760323Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:58.760387Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2025-03-31T16:02:58.775970Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:02:58.776509Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:02:58.793690Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 34359740659 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-31T16:02:58.793751Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-03-31T16:02:58.793904Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 34359740659 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-31T16:02:58.793948Z node 8 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-31T16:02:58.794025Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 304 RawX2: 34359740659 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-31T16:02:58.794084Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:58.794126Z node 8 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:02:58.794183Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-31T16:02:58.794240Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-03-31T16:02:58.794302Z node 8 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-03-31T16:02:58.796544Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:02:58.797014Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:02:58.797092Z node 8 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 102:0ProgressState, operation type TxCopyTable 2025-03-31T16:02:58.797161Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 102:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2025-03-31T16:02:58.797227Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 102, done: 0, blocked: 1 2025-03-31T16:02:58.797372Z node 8 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 102:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 102 Name: CopyTableBarrier }, at tablet# 72057594046678944 2025-03-31T16:02:58.797440Z node 8 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 240 -> 240 2025-03-31T16:02:58.803914Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:02:58.804030Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-31T16:02:58.804204Z node 8 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:02:58.804263Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:02:58.804325Z node 8 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:02:58.804386Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:02:58.804457Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-31T16:02:58.804578Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [8:332:2311] message: TxId: 102 2025-03-31T16:02:58.804657Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:02:58.804732Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-31T16:02:58.804788Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-31T16:02:58.805007Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-31T16:02:58.805071Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:02:58.812107Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:02:58.812211Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [8:437:2398] TestWaitNotification: OK eventTxId 102 2025-03-31T16:02:58.812960Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/CopyTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:02:58.813249Z node 8 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/CopyTable" took 323us result status StatusSuccess 2025-03-31T16:02:58.813728Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/CopyTable" PathDescription { Self { Name: "CopyTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "CopyTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> LabeledDbCounters::OneTabletRemoveCounters [GOOD] >> LabeledDbCounters::OneTabletRestart >> BsControllerConfig::AddDriveSerial [GOOD] >> BsControllerConfig::AddDriveSerialMassive |73.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |73.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |73.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_FlatIndex >> TReplicationTests::DropReplicationWithUnknownSecret [GOOD] >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop+UseSink >> BuildStatsHistogram::Ten_Mixed [GOOD] >> BuildStatsHistogram::Ten_Serial |73.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |73.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |73.1%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant >> TTxAllocatorClientTest::ZeroRange [GOOD] >> DataShardTxOrder::RandomPoints_ReproducerDelayData1 >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock-EvWrite >> DataShardTxOrder::ReadWriteReorder ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_replication/unittest >> TReplicationTests::DropReplicationWithUnknownSecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:02:47.570289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:02:47.570363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:02:47.570425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:02:47.570460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:02:47.570499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:02:47.570543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:02:47.570605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:02:47.570687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:02:47.571036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:02:47.681760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:02:47.681850Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:47.688377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:02:47.688669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:02:47.688849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:02:47.703282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:02:47.703627Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:02:47.704443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:47.704712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:02:47.712084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:02:47.713585Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:02:47.713666Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:02:47.713859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:02:47.713913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:02:47.713956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:02:47.714171Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:02:47.736420Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:02:47.910905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:02:47.913345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:47.913658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:02:47.913929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:02:47.913998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:47.928198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:47.928352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:02:47.928609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:47.928680Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:02:47.928721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:02:47.928763Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:02:47.936032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:47.936117Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:02:47.936162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:02:47.950662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:47.950744Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:47.950796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:02:47.950871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:02:47.957238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:02:47.961248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:02:47.961486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:02:47.962721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:47.962896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:02:47.962962Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:02:47.963336Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:02:47.963400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:02:47.963616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:02:47.963707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:02:47.967278Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:02:47.967342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:02:47.967558Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:02:47.967615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:02:47.967940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:47.967993Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:02:47.968106Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:02:47.968145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:02:47.968190Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:02:47.968228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:02:47.968267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:02:47.968332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:02:47.968372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:02:47.968414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:02:47.968517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:02:47.968559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:02:47.968593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:02:47.970855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:02:47.970991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:02:47.971034Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... E: StateWork, received event# 2146435072, Sender [9:125:2151], Recipient [9:125:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-31T16:03:01.076608Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation FAKE_COORDINATOR: Erasing txId 102 2025-03-31T16:03:01.076724Z node 9 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:01.076761Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:01.076911Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:03:01.077111Z node 9 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:01.077150Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [9:207:2209], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-03-31T16:03:01.077194Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [9:207:2209], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-31T16:03:01.077621Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:03:01.077673Z node 9 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-31T16:03:01.077827Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-31T16:03:01.077873Z node 9 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:03:01.077926Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:03:01.077993Z node 9 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:03:01.078053Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:03:01.078112Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-03-31T16:03:01.078170Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:03:01.078225Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-31T16:03:01.078273Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-31T16:03:01.078470Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T16:03:01.078533Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-03-31T16:03:01.078581Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-03-31T16:03:01.078630Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-03-31T16:03:01.080080Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [9:207:2209], Recipient [9:125:2151]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 } 2025-03-31T16:03:01.080137Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-03-31T16:03:01.080239Z node 9 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:03:01.080326Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:03:01.080363Z node 9 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:03:01.080439Z node 9 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-03-31T16:03:01.080497Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:01.080611Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-31T16:03:01.081976Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [9:207:2209], Recipient [9:125:2151]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 } 2025-03-31T16:03:01.082030Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-03-31T16:03:01.082118Z node 9 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:03:01.082204Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:03:01.082238Z node 9 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:03:01.082271Z node 9 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-31T16:03:01.082305Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T16:03:01.082411Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-03-31T16:03:01.082465Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-31T16:03:01.083561Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435083, Sender [9:125:2151], Recipient [9:125:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2025-03-31T16:03:01.083609Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2025-03-31T16:03:01.083691Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:03:01.083748Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T16:03:01.083849Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:03:01.086083Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:03:01.086763Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:03:01.086803Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:03:01.088656Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:03:01.088698Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:03:01.088819Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-31T16:03:01.089108Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-31T16:03:01.089173Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-31T16:03:01.089670Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [9:449:2404], Recipient [9:125:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:01.089739Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:01.089789Z node 9 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-03-31T16:03:01.089979Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [9:365:2344], Recipient [9:125:2151]: NKikimrScheme.TEvNotifyTxCompletion TxId: 102 2025-03-31T16:03:01.090024Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-03-31T16:03:01.090134Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-31T16:03:01.090270Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:03:01.090327Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [9:447:2402] 2025-03-31T16:03:01.090570Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [9:449:2404], Recipient [9:125:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-31T16:03:01.090611Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-31T16:03:01.090663Z node 9 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-03-31T16:03:01.091216Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [9:450:2405], Recipient [9:125:2151]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-03-31T16:03:01.091293Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-03-31T16:03:01.091430Z node 9 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:03:01.091659Z node 9 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Replication" took 244us result status StatusPathDoesNotExist 2025-03-31T16:03:01.091857Z node 9 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Replication\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Replication" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> BsControllerConfig::PDiskCreate [GOOD] |73.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |73.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |73.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost >> BsControllerConfig::Basic [GOOD] >> BsControllerConfig::DeleteStoragePool >> KqpQuery::TableSink_ReplaceDataShardDataQuery-UseSink [GOOD] >> TPartBtreeIndexIteration::NoNodes_Groups [GOOD] >> TPartBtreeIndexIteration::FewNodes >> TPersQueueTest::DirectReadRestartTablet [GOOD] >> TPersQueueTest::EachMessageGetsExactlyOneAcknowledgementInCorrectOrder |73.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} >> BsControllerConfig::ReassignGroupDisk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::ZeroRange [GOOD] Test command err: 2025-03-31T16:01:02.920902Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-03-31T16:01:02.921544Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-03-31T16:01:02.922428Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-03-31T16:01:02.924379Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:01:02.924967Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-03-31T16:01:02.939683Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:01:02.939913Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:01:02.940010Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:01:02.942508Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-03-31T16:01:02.942797Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:01:02.942982Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-03-31T16:01:02.943231Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-03-31T16:01:02.944033Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#5000 2025-03-31T16:01:02.944622Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:01:02.944712Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:01:02.944830Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2025-03-31T16:01:02.944879Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 0 to# 5000 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::PDiskCreate [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:214:2066] recipient: [1:194:2076] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:214:2066] recipient: [1:194:2076] Leader for TabletID 72057594037932033 is [1:216:2078] sender: [1:217:2066] recipient: [1:194:2076] 2025-03-31T16:02:59.047231Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-03-31T16:02:59.064607Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-03-31T16:02:59.065152Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-03-31T16:02:59.067363Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:02:59.067918Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-03-31T16:02:59.068661Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-03-31T16:02:59.068722Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-03-31T16:02:59.069013Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-03-31T16:02:59.096785Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-03-31T16:02:59.097047Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-03-31T16:02:59.097296Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-03-31T16:02:59.097474Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-31T16:02:59.097601Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-31T16:02:59.097696Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:216:2078] sender: [1:242:2066] recipient: [1:20:2067] 2025-03-31T16:02:59.111868Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-03-31T16:02:59.112058Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-31T16:02:59.127773Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-31T16:02:59.127964Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-31T16:02:59.128085Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-31T16:02:59.128194Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-31T16:02:59.128342Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-31T16:02:59.128439Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-31T16:02:59.128502Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-31T16:02:59.128564Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-31T16:02:59.143809Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-31T16:02:59.144018Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-31T16:02:59.155566Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-31T16:02:59.155768Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-03-31T16:02:59.157393Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-03-31T16:02:59.157467Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-03-31T16:02:59.157774Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-03-31T16:02:59.157851Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-03-31T16:02:59.188257Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "test box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } } } Command { QueryBaseConfig { } } } 2025-03-31T16:02:59.189120Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1000 Path# /dev/disk1 2025-03-31T16:02:59.189194Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1001 Path# /dev/disk2 2025-03-31T16:02:59.189233Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1002 Path# /dev/disk3 2025-03-31T16:02:59.189262Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1000 Path# /dev/disk1 2025-03-31T16:02:59.189310Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1001 Path# /dev/disk2 2025-03-31T16:02:59.189343Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1002 Path# /dev/disk3 2025-03-31T16:02:59.189369Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1000 Path# /dev/disk1 2025-03-31T16:02:59.189397Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1001 Path# /dev/disk2 2025-03-31T16:02:59.189426Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1002 Path# /dev/disk3 2025-03-31T16:02:59.189454Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1000 Path# /dev/disk1 2025-03-31T16:02:59.189484Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1001 Path# /dev/disk2 2025-03-31T16:02:59.189513Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1002 Path# /dev/disk3 2025-03-31T16:02:59.189565Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1000 Path# /dev/disk1 2025-03-31T16:02:59.189600Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2025-03-31T16:02:59.189651Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1002 Path# /dev/disk3 2025-03-31T16:02:59.189693Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1000 Path# /dev/disk1 2025-03-31T16:02:59.189733Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1001 Path# /dev/disk2 2025-03-31T16:02:59.189767Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1002 Path# /dev/disk3 2025-03-31T16:02:59.189796Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1000 Path# /dev/disk1 2025-03-31T16:02:59.189822Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1001 Path# /dev/disk2 2025-03-31T16:02:59.189873Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1002 Path# /dev/disk3 2025-03-31T16:02:59.189921Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 8:1000 Path# /dev/disk1 2025-03-31T16:02:59.189950Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 8:1001 Path# /dev/disk2 2025-03-31T16:02:59.189980Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 8:1002 Path# /dev/disk3 2025-03-31T16:02:59.190022Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 9:1000 Path# /dev/disk1 2025-03-31T16:02:59.190057Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 9:1001 Path# /dev/disk2 2025-03-31T16:02:59.190087Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 9:1002 Path# /dev/disk3 2025-03-31T16:02:59.190114Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 10:1000 Path# /dev/disk1 2025-03-31T16:02:59.190145Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 10:1001 Path# /dev/disk2 2025-03-31T16:02:59.190219Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 10:1002 Path# /dev/disk3 Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:214:2066] recipient: [11:193:2076] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:214:2066] recipient: [11:193:2076] Leader for TabletID 72057594037932033 is [11:216:2078] sender: [11:217:2066] recipient: [11:193:2076] 2025-03-31T16:03:00.909852Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-03-31T16:03:00.911012Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-03-31T16:03:00.911276Z node 11 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-03-31T16:03:00.912981Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:03:00.913457Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-03-31T16:03:00.914123Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-03-31T16:03:00.914161Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-03-31T16:03:00.914467Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-03-31T16:03:00.938229Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-03-31T16:03:00.938395Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-03-31T16:03:00.938551Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-03-31T16:03:00.938677Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-31T16:03:00.938816Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-31T16:03:00.938891Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:216:2078] sender: [11:242:2066] recipient: [11:20:2067] 2025-03-31T16:03:00.951048Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-03-31T16:03:00.951275Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-31T16:03:00.963781Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-31T16:03:00.963948Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-31T16:03:00.964045Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-31T16:03:00.964149Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-31T16:03:00.964644Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-31T16:03:00.964752Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-31T16:03:00.964840Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-31T16:03:00.964918Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-31T16:03:00.976658Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-31T16:03:00.976830Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-31T16:03:00.991732Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-31T16:03:00.991896Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-03-31T16:03:00.993315Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-03-31T16:03:00.993390Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-03-31T16:03:00.993617Z node 11 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-03-31T16:03:00.993691Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-03-31T16:03:00.994598Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 2 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "test box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 2 } } } Command { QueryBaseConfig { } } } 2025-03-31T16:03:00.995221Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 11:1000 Path# /dev/disk1 2025-03-31T16:03:00.995277Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 11:1001 Path# /dev/disk2 2025-03-31T16:03:00.995313Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 11:1002 Path# /dev/disk3 2025-03-31T16:03:00.995347Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 12:1000 Path# /dev/disk1 2025-03-31T16:03:00.995436Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 12:1001 Path# /dev/disk2 2025-03-31T16:03:00.995477Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 12:1002 Path# /dev/disk3 2025-03-31T16:03:00.995513Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 13:1000 Path# /dev/disk1 2025-03-31T16:03:00.995540Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 13:1001 Path# /dev/disk2 2025-03-31T16:03:00.995566Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 13:1002 Path# /dev/disk3 2025-03-31T16:03:00.995591Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 14:1000 Path# /dev/disk1 2025-03-31T16:03:00.995617Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 14:1001 Path# /dev/disk2 2025-03-31T16:03:00.995644Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 14:1002 Path# /dev/disk3 2025-03-31T16:03:00.995670Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 15:1000 Path# /dev/disk1 2025-03-31T16:03:00.995696Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 15:1001 Path# /dev/disk2 2025-03-31T16:03:00.995721Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 15:1002 Path# /dev/disk3 2025-03-31T16:03:00.995767Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 16:1000 Path# /dev/disk1 2025-03-31T16:03:00.995796Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 16:1001 Path# /dev/disk2 2025-03-31T16:03:00.995877Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 16:1002 Path# /dev/disk3 2025-03-31T16:03:00.995920Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 17:1000 Path# /dev/disk1 2025-03-31T16:03:00.995959Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 17:1001 Path# /dev/disk2 2025-03-31T16:03:00.995990Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 17:1002 Path# /dev/disk3 2025-03-31T16:03:00.996027Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 18:1000 Path# /dev/disk1 2025-03-31T16:03:00.996058Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 18:1001 Path# /dev/disk2 2025-03-31T16:03:00.996084Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 18:1002 Path# /dev/disk3 2025-03-31T16:03:00.996108Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 19:1000 Path# /dev/disk1 2025-03-31T16:03:00.996131Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 19:1001 Path# /dev/disk2 2025-03-31T16:03:00.996189Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 19:1002 Path# /dev/disk3 2025-03-31T16:03:00.996220Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 20:1000 Path# /dev/disk1 2025-03-31T16:03:00.996245Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 20:1001 Path# /dev/disk2 2025-03-31T16:03:00.996275Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 20:1002 Path# /dev/disk3 >> TFlatTableExecutor_IndexLoading::Scan_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_BTreeIndex |73.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |73.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |73.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} |73.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup >> BsControllerConfig::OverlayMapCrossReferences [GOOD] >> KqpStats::StreamLookupStats+StreamLookupJoin [GOOD] >> KqpStats::StreamLookupStats-StreamLookupJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::ReassignGroupDisk [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:278:2068] recipient: [1:260:2078] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:278:2068] recipient: [1:260:2078] Leader for TabletID 72057594037932033 is [1:280:2080] sender: [1:281:2068] recipient: [1:260:2078] 2025-03-31T16:02:59.036732Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-03-31T16:02:59.046954Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-03-31T16:02:59.047423Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-03-31T16:02:59.108750Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:02:59.109278Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-03-31T16:02:59.110029Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-03-31T16:02:59.110072Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-03-31T16:02:59.110344Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-03-31T16:02:59.145211Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-03-31T16:02:59.145352Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-03-31T16:02:59.145563Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-03-31T16:02:59.145688Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-31T16:02:59.145825Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-31T16:02:59.145913Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:280:2080] sender: [1:306:2068] recipient: [1:22:2069] 2025-03-31T16:02:59.167783Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-03-31T16:02:59.167988Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-31T16:02:59.178892Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-31T16:02:59.179064Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-31T16:02:59.179178Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-31T16:02:59.179313Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-31T16:02:59.179437Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-31T16:02:59.179490Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-31T16:02:59.179548Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-31T16:02:59.179649Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-31T16:02:59.191435Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-31T16:02:59.191590Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-31T16:02:59.203100Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-31T16:02:59.203306Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-03-31T16:02:59.204645Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-03-31T16:02:59.204718Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-03-31T16:02:59.204972Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-03-31T16:02:59.205036Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-03-31T16:02:59.253654Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk" } } } Command { DefineBox { BoxId: 1 Name: "box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 8 PDiskFilter { Property { Type: ROT } } } } } 2025-03-31T16:02:59.254361Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1000 Path# /dev/disk 2025-03-31T16:02:59.254417Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1000 Path# /dev/disk 2025-03-31T16:02:59.254463Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1000 Path# /dev/disk 2025-03-31T16:02:59.254492Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1000 Path# /dev/disk 2025-03-31T16:02:59.254528Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1000 Path# /dev/disk 2025-03-31T16:02:59.254556Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1000 Path# /dev/disk 2025-03-31T16:02:59.254581Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1000 Path# /dev/disk 2025-03-31T16:02:59.254605Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 8:1000 Path# /dev/disk 2025-03-31T16:02:59.254631Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 9:1000 Path# /dev/disk 2025-03-31T16:02:59.254657Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 10:1000 Path# /dev/disk 2025-03-31T16:02:59.254694Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 11:1000 Path# /dev/disk 2025-03-31T16:02:59.254731Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 12:1000 Path# /dev/disk Response# Status { Success: true } Status { Success: true } Status { Success: true } Success: true ConfigTxSeqNo: 1 2025-03-31T16:02:59.280509Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { UpdateDriveStatus { HostKey { NodeId: 1 } Path: "/dev/disk" Status: INACTIVE } } } Response# Status { Success: true } Success: true ConfigTxSeqNo: 2 Leader for TabletID 72057594037932033 is [0:0:0] sender: [13:278:2068] recipient: [13:260:2078] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [13:278:2068] recipient: [13:260:2078] Leader for TabletID 72057594037932033 is [13:280:2080] sender: [13:281:2068] recipient: [13:260:2078] 2025-03-31T16:03:01.414783Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-03-31T16:03:01.415864Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-03-31T16:03:01.416107Z node 13 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-03-31T16:03:01.417593Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:03:01.418025Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-03-31T16:03:01.418717Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-03-31T16:03:01.418755Z node 13 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-03-31T16:03:01.418984Z node 13 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-03-31T16:03:01.444673Z node 13 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-03-31T16:03:01.444816Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-03-31T16:03:01.444931Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-03-31T16:03:01.445035Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-31T16:03:01.445198Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-31T16:03:01.445290Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [13:280:2080] sender: [13:306:2068] recipient: [13:22:2069] 2025-03-31T16:03:01.457978Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-03-31T16:03:01.458157Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-31T16:03:01.470236Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-31T16:03:01.470391Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-31T16:03:01.470499Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-31T16:03:01.470579Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-31T16:03:01.470693Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-31T16:03:01.470744Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-31T16:03:01.470785Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-31T16:03:01.470854Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-31T16:03:01.481705Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-31T16:03:01.481862Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-31T16:03:01.496123Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-31T16:03:01.496267Z node 13 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-03-31T16:03:01.497574Z node 13 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-03-31T16:03:01.497634Z node 13 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-03-31T16:03:01.497837Z node 13 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-03-31T16:03:01.497877Z node 13 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-03-31T16:03:01.498737Z node 13 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 2 Drive { Path: "/dev/disk" } } } Command { DefineBox { BoxId: 1 Name: "box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 2 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 8 PDiskFilter { Property { Type: ROT } } } } } 2025-03-31T16:03:01.499283Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 13:1000 Path# /dev/disk 2025-03-31T16:03:01.499329Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 14:1000 Path# /dev/disk 2025-03-31T16:03:01.499364Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 15:1000 Path# /dev/disk 2025-03-31T16:03:01.499400Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 16:1000 Path# /dev/disk 2025-03-31T16:03:01.499438Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 17:1000 Path# /dev/disk 2025-03-31T16:03:01.499470Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 18:1000 Path# /dev/disk 2025-03-31T16:03:01.499494Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 19:1000 Path# /dev/disk 2025-03-31T16:03:01.499522Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 20:1000 Path# /dev/disk 2025-03-31T16:03:01.499561Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 21:1000 Path# /dev/disk 2025-03-31T16:03:01.499606Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 22:1000 Path# /dev/disk 2025-03-31T16:03:01.499634Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 23:1000 Path# /dev/disk 2025-03-31T16:03:01.499658Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 24:1000 Path# /dev/disk Response# Status { Success: true } Status { Success: true } Status { Success: true } Success: true ConfigTxSeqNo: 1 2025-03-31T16:03:01.522878Z node 13 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { UpdateDriveStatus { HostKey { NodeId: 1 } Path: "/dev/disk" Status: INACTIVE } } } Response# Status { ErrorDescription: "Host not found NodeId# 1 HostKey# NodeId: 1\n incorrect" FailReason: kHostNotFound FailParam { NodeId: 1 } } ErrorDescription: "Host not found NodeId# 1 HostKey# NodeId: 1\n incorrect" |73.2%| [TA] $(B)/ydb/core/tx/tx_allocator_client/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQuery::PreparedQueryInvalidate [GOOD] >> KqpQuery::QueryCache >> KqpPg::TypeCoercionInsert+useSink [GOOD] >> KqpPg::TableSelect+useSink >> KqpTypes::SelectNull [GOOD] >> KqpTypes::Time64Columns+EnableTableDatetime64 >> KqpExplain::SsaProgramInJsonPlan [GOOD] >> KqpLimits::AffectedShardsLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::TableSink_ReplaceDataShardDataQuery-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 31279, MsgBus: 6193 2025-03-31T16:02:38.708522Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004719105627503:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:38.708599Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ac2/r3tmp/tmpklsldd/pdisk_1.dat 2025-03-31T16:02:39.152470Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:39.203104Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:39.203252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:39.206321Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31279, node 1 2025-03-31T16:02:39.302240Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:02:39.302273Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:02:39.302288Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:02:39.302488Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6193 TClient is connected to server localhost:6193 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:02:39.996876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:02:40.031105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:02:40.183114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:40.389424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:40.479248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:42.602861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004736285498469:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:42.603046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:42.933562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:02:42.964431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:02:43.001641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:02:43.050959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:02:43.111340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:02:43.191216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:02:43.277567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004740580466285:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:43.277669Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:43.278037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004740580466290:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:43.282857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:02:43.299102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004740580466292:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:02:43.383150Z node 1 :TX_PROXY ERROR: Actor# [1:7488004740580466347:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:02:43.706863Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004719105627503:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:43.706930Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 14081, MsgBus: 1538 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ac2/r3tmp/tmpcwI4qA/pdisk_1.dat 2025-03-31T16:02:45.733235Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:02:45.812968Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:45.831345Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:45.831418Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:45.833544Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14081, node 2 2025-03-31T16:02:45.903539Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:02:45.903563Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:02:45.903569Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:02:45.903656Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1538 TClient is connected to server localhost:1538 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:02:46.367580Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:46.374525Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:02:49.524370Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004766616346170:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:49.524508Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:49.563748Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:02:49.835723Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:02:50.148855Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004770911314848:2441], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:50.148919Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004770911314853:2444], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:50.148966Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:50.152718Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-03-31T16:02:50.164379Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488004770911314855:2445], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-03-31T16:02:50.232855Z node 2 :TX_PROXY ERROR: Actor# [2:7488004770911314921:3252] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 27810, MsgBus: 18733 2025-03-31T16:02:53.695912Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488004784351725789:2151];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ac2/r3tmp/tmpSDjo5k/pdisk_1.dat 2025-03-31T16:02:53.827831Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:02:53.851984Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:53.866805Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:53.866895Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:53.871275Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27810, node 3 2025-03-31T16:02:53.978698Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:02:53.978735Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:02:53.978746Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:02:53.978906Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18733 TClient is connected to server localhost:18733 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:02:54.677000Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:58.422003Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488004805826562812:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:58.422073Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:58.513580Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:02:58.704492Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488004784351725789:2151];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:58.715234Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:02:58.802719Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:02:59.147450Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488004810121531479:2442], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:59.147634Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:59.150495Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488004810121531488:2445], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:59.155823Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-03-31T16:02:59.168978Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488004810121531494:2446], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-03-31T16:02:59.258437Z node 3 :TX_PROXY ERROR: Actor# [3:7488004810121531560:3257] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpParams::ExplicitSameParameterTypesQueryCacheCheck [GOOD] >> KqpParams::ImplicitDifferentParameterTypesQueryCacheCheck >> BsControllerConfig::AddDriveSerialMassive [GOOD] |73.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::OverlayMapCrossReferences [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCDS64 [GOOD] >> TxOrderInternals::OperationOrder [GOOD] >> TPartBtreeIndexIteration::FewNodes [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups >> TFlatTableExecutor_IndexLoading::Scan_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_History_FlatIndex >> DataShardTxOrder::ReadWriteReorder [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline_oo8_dirty |73.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |73.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |73.2%| [TA] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |73.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::PipeAlivenessOfDeadTablet [GOOD] Test command err: 2025-03-31T15:54:07.678197Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-03-31T15:54:07.730722Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-31T15:54:07.731896Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-03-31T15:54:07.735389Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-03-31T15:54:07.746958Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-03-31T15:54:07.759350Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-03-31T15:54:07.768581Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:48:2075] ControllerId# 72057594037932033 2025-03-31T15:54:07.768628Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-31T15:54:07.769399Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-03-31T15:54:07.773323Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-03-31T15:54:08.146773Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-03-31T15:54:08.146837Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-03-31T15:54:08.148827Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:47:2074] Create Queue# [1:56:2080] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.148962Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:47:2074] Create Queue# [1:57:2081] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.149060Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:47:2074] Create Queue# [1:58:2082] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.149775Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:47:2074] Create Queue# [1:59:2083] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.150379Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:47:2074] Create Queue# [1:60:2084] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.150761Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:47:2074] Create Queue# [1:61:2085] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.155806Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:47:2074] Create Queue# [1:62:2086] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.155844Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-03-31T15:54:08.156237Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:48:2075] 2025-03-31T15:54:08.156415Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:48:2075] 2025-03-31T15:54:08.156759Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-03-31T15:54:08.157136Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-03-31T15:54:08.165063Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-03-31T15:54:08.165649Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-03-31T15:54:08.180355Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-31T15:54:08.180477Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-03-31T15:54:08.188054Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:71:2073] ControllerId# 72057594037932033 2025-03-31T15:54:08.188101Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-31T15:54:08.188434Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-03-31T15:54:08.188711Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-03-31T15:54:08.194789Z node 2 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-03-31T15:54:08.194863Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-03-31T15:54:08.196760Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:70:2072] Create Queue# [2:77:2077] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.196942Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:70:2072] Create Queue# [2:78:2078] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.197090Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:70:2072] Create Queue# [2:79:2079] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.197209Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:70:2072] Create Queue# [2:80:2080] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.197382Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:70:2072] Create Queue# [2:81:2081] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.197534Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:70:2072] Create Queue# [2:82:2082] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.197675Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:70:2072] Create Queue# [2:83:2083] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:08.197697Z node 2 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-03-31T15:54:08.197759Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [2:71:2073] 2025-03-31T15:54:08.197786Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [2:71:2073] 2025-03-31T15:54:08.197829Z node 2 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-03-31T15:54:08.197872Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-03-31T15:54:08.198210Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-03-31T15:54:08.198686Z node 2 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-03-31T15:54:08.198855Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [2:71:2073] 2025-03-31T15:54:08.198895Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-03-31T15:54:08.198946Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-03-31T15:54:08.199074Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-03-31T15:54:08.631430Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:48:2075] 2025-03-31T15:54:08.631482Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-03-31T15:54:08.631508Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-03-31T15:54:08.646156Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-03-31T15:54:08.646206Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-03-31T15:54:08.689528Z node 2 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-03-31T15:54:08.694214Z node 2 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-03-31T15:54:08.742200Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:48:2075] 2025-03-31T15:54:08.742253Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-03-31T15:54:08.742438Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-03-31T15:54:08.742874Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-03-31T15:54:08.759986Z node 2 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-03-31T15:54:08.762096Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-31T15:54:08.762377Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-03-31T15:54:08.762495Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-03-31T15:54:08.763048Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-03-31T15:54:08.763808Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:52:2064] 2025-03-31T15:54:08.763834Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:52:2064] 2025-03-31T15:54:08.763888Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-03-31T15:54:08.764387Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [2:75:2064] 2025-03-31T15:54:08.764404Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [2:75:2064] 2025-03-31T15:54:08.764427Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-03-31T15:54:08.764595Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [2:94:2087] 2025-03-31T15:54:08.764747Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [2:94:2087] 2025-03-31T15:54:08.764777Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-03-31T15:54:08.765126Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-03-31T15:54:08.765149Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-03-31T15:54:08.765472Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 720575940379320 ... 6:02:48.408726Z node 14 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037936131 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037936131 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-03-31T16:02:48.408793Z node 14 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037936131 followers: 0 2025-03-31T16:02:48.408895Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037936131] forward result error, check reconnect [14:143:2159] 2025-03-31T16:02:48.408945Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037936131] schedule retry [14:143:2159] 2025-03-31T16:02:48.420359Z node 14 :BS_PROXY_PUT INFO: [848c0e06882585e9] bootstrap ActorId# [14:412:2367] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:9:0:0:199:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-03-31T16:02:48.420572Z node 14 :BS_PROXY_PUT DEBUG: [848c0e06882585e9] Id# [72057594037927937:2:9:0:0:199:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:02:48.420673Z node 14 :BS_PROXY_PUT DEBUG: [848c0e06882585e9] restore Id# [72057594037927937:2:9:0:0:199:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-03-31T16:02:48.420780Z node 14 :BS_PROXY_PUT DEBUG: [848c0e06882585e9] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:9:0:0:199:1] Marker# BPG33 2025-03-31T16:02:48.420852Z node 14 :BS_PROXY_PUT DEBUG: [848c0e06882585e9] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:9:0:0:199:1] Marker# BPG32 2025-03-31T16:02:48.421126Z node 14 :BS_PROXY DEBUG: Send to queueActorId# [14:37:2080] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:9:0:0:199:1] FDS# 199 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T16:02:48.437956Z node 14 :BS_PROXY_PUT DEBUG: [848c0e06882585e9] received {EvVPutResult Status# OK ID# [72057594037927937:2:9:0:0:199:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 23 } Cost# 81566 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 24 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-03-31T16:02:48.438171Z node 14 :BS_PROXY_PUT DEBUG: [848c0e06882585e9] Result# TEvPutResult {Id# [72057594037927937:2:9:0:0:199:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-03-31T16:02:48.438329Z node 14 :BS_PROXY_PUT INFO: [848c0e06882585e9] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:9:0:0:199:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-03-31T16:02:48.438597Z node 14 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.133 sample PartId# [72057594037927937:2:9:0:0:199:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 14 } TEvVPutResult{ TimestampMs# 17.992 VDiskId# [0:1:0:0:0] NodeId# 14 Status# OK } ] } 2025-03-31T16:02:48.438894Z node 14 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:9:0:0:199:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-03-31T16:02:48.439178Z node 14 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} commited cookie 1 for step 9 2025-03-31T16:02:48.476095Z node 14 :PIPE_CLIENT DEBUG: TClient[72075186224037888] client retry [14:407:2365] 2025-03-31T16:02:48.476169Z node 14 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [14:407:2365] 2025-03-31T16:02:48.476285Z node 14 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StInit ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-03-31T16:02:48.476465Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-03-31T16:02:48.476662Z node 14 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-03-31T16:02:48.476749Z node 14 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-03-31T16:02:48.476790Z node 14 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-03-31T16:02:48.476845Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-03-31T16:02:48.476904Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-03-31T16:02:48.476937Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-03-31T16:02:48.477054Z node 14 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-03-31T16:02:48.477115Z node 14 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72075186224037888 followers: 0 2025-03-31T16:02:48.477208Z node 14 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result error, check reconnect [14:407:2365] 2025-03-31T16:02:48.477251Z node 14 :PIPE_CLIENT DEBUG: TClient[72075186224037888] schedule retry [14:407:2365] 2025-03-31T16:02:48.500228Z node 14 :PIPE_CLIENT DEBUG: TClient[72075186224037888] client retry [14:407:2365] 2025-03-31T16:02:48.500323Z node 14 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [14:407:2365] 2025-03-31T16:02:48.500455Z node 14 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StInit ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-03-31T16:02:48.500661Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-03-31T16:02:48.500867Z node 14 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-03-31T16:02:48.501107Z node 14 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-03-31T16:02:48.501183Z node 14 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-03-31T16:02:48.501250Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-03-31T16:02:48.501328Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-03-31T16:02:48.501370Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-03-31T16:02:48.501525Z node 14 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-03-31T16:02:48.501592Z node 14 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72075186224037888 followers: 0 2025-03-31T16:02:48.501700Z node 14 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result error, check reconnect [14:407:2365] 2025-03-31T16:02:48.501776Z node 14 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connect failed, check aliveness [14:407:2365] 2025-03-31T16:02:48.559394Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037936131] client retry [14:143:2159] 2025-03-31T16:02:48.559500Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037936131] lookup [14:143:2159] 2025-03-31T16:02:48.559633Z node 14 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936131 entry.State: StInit ev: {EvForward TabletID: 72057594037936131 Ev: nullptr Flags: 1:2:0} 2025-03-31T16:02:48.559812Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-03-31T16:02:48.559996Z node 14 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-03-31T16:02:48.560073Z node 14 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-03-31T16:02:48.560135Z node 14 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-03-31T16:02:48.560216Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131} 2025-03-31T16:02:48.560296Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131} 2025-03-31T16:02:48.560345Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131} 2025-03-31T16:02:48.560464Z node 14 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037936131 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037936131 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-03-31T16:02:48.560524Z node 14 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037936131 followers: 0 2025-03-31T16:02:48.560614Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037936131] forward result error, check reconnect [14:143:2159] 2025-03-31T16:02:48.560656Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037936131] schedule retry [14:143:2159] 2025-03-31T16:02:48.571564Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [14:416:2368] 2025-03-31T16:02:48.571633Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [14:416:2368] 2025-03-31T16:02:48.571726Z node 14 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-03-31T16:02:48.571811Z node 14 :TABLET_RESOLVER DEBUG: SelectForward node 14 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [14:271:2262] 2025-03-31T16:02:48.571927Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [14:416:2368] 2025-03-31T16:02:48.571988Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result local node, try to connect [14:416:2368] 2025-03-31T16:02:48.572080Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [14:416:2368] 2025-03-31T16:02:48.572240Z node 14 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [14:416:2368] 2025-03-31T16:02:48.572465Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [14:416:2368] 2025-03-31T16:02:48.572560Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [14:416:2368] 2025-03-31T16:02:48.572632Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [14:416:2368] 2025-03-31T16:02:48.572742Z node 14 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [14:407:2365] EventType# 268697616 2025-03-31T16:02:48.572931Z node 14 :HIVE WARN: HIVE#72057594037927937 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) 2025-03-31T16:02:48.573063Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037927937] received poison pill [14:416:2368] 2025-03-31T16:02:48.573153Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [14:416:2368] 2025-03-31T16:02:48.573232Z node 14 :PIPE_SERVER DEBUG: [72057594037927937] Got PeerClosed from# [14:416:2368] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::AddDriveSerialMassive [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:214:2066] recipient: [1:194:2076] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:214:2066] recipient: [1:194:2076] Leader for TabletID 72057594037932033 is [1:216:2078] sender: [1:217:2066] recipient: [1:194:2076] 2025-03-31T16:02:57.544401Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-03-31T16:02:57.551287Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-03-31T16:02:57.551731Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-03-31T16:02:57.553762Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:02:57.554274Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-03-31T16:02:57.554947Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-03-31T16:02:57.554985Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-03-31T16:02:57.555386Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-03-31T16:02:57.566473Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-03-31T16:02:57.566610Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-03-31T16:02:57.566820Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-03-31T16:02:57.566950Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-31T16:02:57.567182Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-31T16:02:57.567285Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:216:2078] sender: [1:239:2066] recipient: [1:20:2067] 2025-03-31T16:02:57.579853Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-03-31T16:02:57.580018Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-31T16:02:57.595711Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-31T16:02:57.595866Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-31T16:02:57.595969Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-31T16:02:57.596062Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-31T16:02:57.596188Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-31T16:02:57.596247Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-31T16:02:57.596290Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-31T16:02:57.596373Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-31T16:02:57.607168Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-31T16:02:57.607335Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-31T16:02:57.619685Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-31T16:02:57.619834Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-03-31T16:02:57.621157Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-03-31T16:02:57.621205Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-03-31T16:02:57.621404Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-03-31T16:02:57.621463Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-03-31T16:02:57.651181Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2025-03-31T16:02:57.652649Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2025-03-31T16:02:57.653245Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:214:2066] recipient: [11:193:2076] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:214:2066] recipient: [11:193:2076] Leader for TabletID 72057594037932033 is [11:216:2078] sender: [11:217:2066] recipient: [11:193:2076] 2025-03-31T16:02:59.463894Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-03-31T16:02:59.464808Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-03-31T16:02:59.465055Z node 11 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-03-31T16:02:59.466383Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:02:59.466802Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-03-31T16:02:59.467476Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-03-31T16:02:59.467508Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-03-31T16:02:59.467737Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-03-31T16:02:59.481101Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-03-31T16:02:59.481223Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-03-31T16:02:59.481375Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-03-31T16:02:59.481470Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-31T16:02:59.481546Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-31T16:02:59.481590Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:216:2078] sender: [11:239:2066] recipient: [11:20:2067] 2025-03-31T16:02:59.494034Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-03-31T16:02:59.494211Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-31T16:02:59.507673Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-31T16:02:59.507822Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-31T16:02:59.507907Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-31T16:02:59.507979Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-31T16:02:59.508120Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-31T16:02:59.508219Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-31T16:02:59.508269Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-31T16:02:59.508326Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-31T16:02:59.519640Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-31T16:02:59.519785Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-31T16:02:59.531236Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-31T16:02:59.531419Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-03-31T16:02:59.532727Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-03-31T16:02:59.532790Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-03-31T16:02:59.532994Z node 11 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-03-31T16:02:59.533038Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-03-31T16:02:59.533677Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2025-03-31T16:02:59.535071Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# ... ommand { AddDriveSerial { Serial: "SN_5" BoxId: 1 } } } 2025-03-31T16:03:01.219022Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_6" BoxId: 1 } } } 2025-03-31T16:03:01.219852Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_7" BoxId: 1 } } } 2025-03-31T16:03:01.220573Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_8" BoxId: 1 } } } 2025-03-31T16:03:01.221214Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_9" BoxId: 1 } } } 2025-03-31T16:03:01.222127Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_0" } } } 2025-03-31T16:03:01.222987Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_1" } } } 2025-03-31T16:03:01.223871Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_2" } } } 2025-03-31T16:03:01.224674Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_3" } } } 2025-03-31T16:03:01.225411Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_4" } } } 2025-03-31T16:03:01.226146Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_5" } } } 2025-03-31T16:03:01.227046Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_6" } } } 2025-03-31T16:03:01.227974Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_7" } } } 2025-03-31T16:03:01.228776Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_8" } } } 2025-03-31T16:03:01.229601Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_9" } } } Leader for TabletID 72057594037932033 is [0:0:0] sender: [31:214:2066] recipient: [31:195:2076] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [31:214:2066] recipient: [31:195:2076] Leader for TabletID 72057594037932033 is [31:216:2078] sender: [31:217:2066] recipient: [31:195:2076] 2025-03-31T16:03:03.127964Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-03-31T16:03:03.128919Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-03-31T16:03:03.129143Z node 31 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-03-31T16:03:03.130782Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:03:03.131286Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-03-31T16:03:03.131947Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-03-31T16:03:03.131980Z node 31 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-03-31T16:03:03.132219Z node 31 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-03-31T16:03:03.142117Z node 31 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-03-31T16:03:03.142322Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-03-31T16:03:03.142452Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-03-31T16:03:03.142544Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-31T16:03:03.142676Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-31T16:03:03.142744Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [31:216:2078] sender: [31:239:2066] recipient: [31:20:2067] 2025-03-31T16:03:03.155031Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-03-31T16:03:03.155245Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-31T16:03:03.166075Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-31T16:03:03.166309Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-31T16:03:03.166392Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-31T16:03:03.166495Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-31T16:03:03.166636Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-31T16:03:03.166716Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-31T16:03:03.166767Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-31T16:03:03.166817Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-31T16:03:03.179663Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-31T16:03:03.179824Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-31T16:03:03.191623Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-31T16:03:03.191754Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-03-31T16:03:03.192990Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-03-31T16:03:03.193051Z node 31 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-03-31T16:03:03.193303Z node 31 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-03-31T16:03:03.193347Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-03-31T16:03:03.194004Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_0" BoxId: 1 } } } 2025-03-31T16:03:03.195226Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_1" BoxId: 1 } } } 2025-03-31T16:03:03.195919Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_2" BoxId: 1 } } } 2025-03-31T16:03:03.196533Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_3" BoxId: 1 } } } 2025-03-31T16:03:03.197126Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_4" BoxId: 1 } } } 2025-03-31T16:03:03.197707Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_5" BoxId: 1 } } } 2025-03-31T16:03:03.198300Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_6" BoxId: 1 } } } 2025-03-31T16:03:03.199058Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_7" BoxId: 1 } } } 2025-03-31T16:03:03.206761Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_8" BoxId: 1 } } } 2025-03-31T16:03:03.207612Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_9" BoxId: 1 } } } 2025-03-31T16:03:03.208369Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_0" } } } 2025-03-31T16:03:03.209114Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_1" } } } 2025-03-31T16:03:03.209786Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_2" } } } 2025-03-31T16:03:03.210681Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_3" } } } 2025-03-31T16:03:03.219838Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_4" } } } 2025-03-31T16:03:03.221320Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_5" } } } 2025-03-31T16:03:03.222523Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_6" } } } 2025-03-31T16:03:03.226214Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_7" } } } 2025-03-31T16:03:03.227347Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_8" } } } 2025-03-31T16:03:03.228246Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_9" } } } |73.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> TxOrderInternals::OperationOrder [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ReadWriteReorder [GOOD] Test command err: 2025-03-31T16:03:03.513403Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:03:03.514801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:03:03.514867Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:03.528373Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:03:03.528885Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-03-31T16:03:03.529204Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:03.552807Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:03:03.636022Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:03.636491Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:03:03.638154Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-31T16:03:03.638239Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-31T16:03:03.638287Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-31T16:03:03.638751Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:03:03.639654Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:03:03.639743Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:197:2154] in generation 2 2025-03-31T16:03:03.739896Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:03:03.792086Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-31T16:03:03.792394Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:03:03.792583Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-31T16:03:03.792653Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-31T16:03:03.792709Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-31T16:03:03.792802Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:03:03.793172Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:03:03.793247Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:03:03.793672Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-31T16:03:03.793794Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-31T16:03:03.793875Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:03:03.793933Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:03:03.793987Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-31T16:03:03.794040Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T16:03:03.794094Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T16:03:03.794142Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-31T16:03:03.794206Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:03:03.794336Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:03.794405Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:03.794480Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-31T16:03:03.797589Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\n\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-31T16:03:03.797672Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:03:03.797800Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T16:03:03.798023Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-31T16:03:03.798090Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-31T16:03:03.798168Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-31T16:03:03.798228Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:03:03.798309Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-31T16:03:03.798362Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-31T16:03:03.798422Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:03:03.798836Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-31T16:03:03.798901Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-31T16:03:03.798969Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-31T16:03:03.799012Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:03:03.799063Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-31T16:03:03.799234Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-31T16:03:03.799288Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-31T16:03:03.799327Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-31T16:03:03.799356Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-31T16:03:03.815903Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-31T16:03:03.815991Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:03:03.816028Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:03:03.816101Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-31T16:03:03.816195Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-31T16:03:03.816833Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:03.816901Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:03.816961Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-31T16:03:03.817135Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-31T16:03:03.817177Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:03:03.817331Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-31T16:03:03.817378Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:03:03.817423Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-31T16:03:03.817461Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-31T16:03:03.825507Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-31T16:03:03.825619Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:03:03.825910Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:03:03.825951Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:03:03.826023Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:03:03.826069Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:03:03.826106Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T16:03:03.826153Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-31T16:03:03.826194Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-31T16:03:03.826245Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:03:03.826299Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-31T16:03:03.826340Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-31T16:03:03.826379Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-31T16:03:03.826595Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-31T16:03:03.826638Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:03:03.826675Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-31T16:03:03.826701Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-31T16:03:03.826723Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-31T16:03:03.826826Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:03:03.826852Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-31T16:03:03.826889Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-31T16:03:03.826922Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-31T16:03:03.827009Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-31T16:03:03.827048Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-31T16:03:03.827103Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-31T16:03:03.827157Z node 1 :TX_DATASH ... WaitInRS 2025-03-31T16:03:05.343562Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437185 is Executed 2025-03-31T16:03:05.343589Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437185 executing on unit LoadAndWaitInRS 2025-03-31T16:03:05.343612Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437185 to execution unit ExecuteDataTx 2025-03-31T16:03:05.343650Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437185 on unit ExecuteDataTx 2025-03-31T16:03:05.344124Z node 1 :TX_DATASHARD TRACE: Executed operation [1000005:12] at tablet 9437185 with status COMPLETE 2025-03-31T16:03:05.344189Z node 1 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:12] at 9437185: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 1, SelectRowBytes: 8, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-31T16:03:05.344245Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437185 is Executed 2025-03-31T16:03:05.344269Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437185 executing on unit ExecuteDataTx 2025-03-31T16:03:05.344295Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437185 to execution unit CompleteOperation 2025-03-31T16:03:05.344322Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437185 on unit CompleteOperation 2025-03-31T16:03:05.344519Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437185 is DelayComplete 2025-03-31T16:03:05.344553Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437185 executing on unit CompleteOperation 2025-03-31T16:03:05.344612Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437185 to execution unit CompletedOperations 2025-03-31T16:03:05.344647Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437185 on unit CompletedOperations 2025-03-31T16:03:05.344686Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437185 is Executed 2025-03-31T16:03:05.344710Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437185 executing on unit CompletedOperations 2025-03-31T16:03:05.344768Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000005:12] at 9437185 has finished 2025-03-31T16:03:05.344821Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:03:05.344858Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-03-31T16:03:05.344905Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-03-31T16:03:05.344961Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-03-31T16:03:05.345212Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:234:2227], Recipient [1:234:2227]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:03:05.345259Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:03:05.345314Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:03:05.345343Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:03:05.345367Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T16:03:05.345395Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000005:12] in PlanQueue unit at 9437184 2025-03-31T16:03:05.345435Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit PlanQueue 2025-03-31T16:03:05.345464Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-03-31T16:03:05.345487Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit PlanQueue 2025-03-31T16:03:05.345513Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit LoadTxDetails 2025-03-31T16:03:05.345535Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit LoadTxDetails 2025-03-31T16:03:05.346312Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000005:12 keys extracted: 3 2025-03-31T16:03:05.346354Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-03-31T16:03:05.346382Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit LoadTxDetails 2025-03-31T16:03:05.346406Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit FinalizeDataTxPlan 2025-03-31T16:03:05.346429Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit FinalizeDataTxPlan 2025-03-31T16:03:05.346463Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-03-31T16:03:05.346482Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit FinalizeDataTxPlan 2025-03-31T16:03:05.346518Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-31T16:03:05.346543Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit BuildAndWaitDependencies 2025-03-31T16:03:05.346595Z node 1 :TX_DATASHARD TRACE: Operation [1000005:12] is the new logically complete end at 9437184 2025-03-31T16:03:05.346628Z node 1 :TX_DATASHARD TRACE: Operation [1000005:12] is the new logically incomplete end at 9437184 2025-03-31T16:03:05.346653Z node 1 :TX_DATASHARD TRACE: Activated operation [1000005:12] at 9437184 2025-03-31T16:03:05.346688Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-03-31T16:03:05.346708Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit BuildAndWaitDependencies 2025-03-31T16:03:05.346745Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit BuildDataTxOutRS 2025-03-31T16:03:05.346766Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit BuildDataTxOutRS 2025-03-31T16:03:05.346808Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-03-31T16:03:05.346841Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit BuildDataTxOutRS 2025-03-31T16:03:05.346860Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit StoreAndSendOutRS 2025-03-31T16:03:05.346877Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit StoreAndSendOutRS 2025-03-31T16:03:05.346938Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-03-31T16:03:05.346961Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit StoreAndSendOutRS 2025-03-31T16:03:05.346981Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit PrepareDataTxInRS 2025-03-31T16:03:05.347002Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit PrepareDataTxInRS 2025-03-31T16:03:05.347029Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-03-31T16:03:05.347048Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit PrepareDataTxInRS 2025-03-31T16:03:05.347067Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit LoadAndWaitInRS 2025-03-31T16:03:05.347222Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit LoadAndWaitInRS 2025-03-31T16:03:05.347247Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-03-31T16:03:05.347274Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit LoadAndWaitInRS 2025-03-31T16:03:05.347300Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit ExecuteDataTx 2025-03-31T16:03:05.347326Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit ExecuteDataTx 2025-03-31T16:03:05.347838Z node 1 :TX_DATASHARD TRACE: Executed operation [1000005:12] at tablet 9437184 with status COMPLETE 2025-03-31T16:03:05.347895Z node 1 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:12] at 9437184: {NSelectRow: 3, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 3, SelectRowBytes: 24, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-31T16:03:05.347955Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-03-31T16:03:05.347984Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit ExecuteDataTx 2025-03-31T16:03:05.348010Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit CompleteOperation 2025-03-31T16:03:05.348032Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit CompleteOperation 2025-03-31T16:03:05.348240Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is DelayComplete 2025-03-31T16:03:05.348276Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit CompleteOperation 2025-03-31T16:03:05.348304Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit CompletedOperations 2025-03-31T16:03:05.348342Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit CompletedOperations 2025-03-31T16:03:05.348379Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-03-31T16:03:05.348402Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit CompletedOperations 2025-03-31T16:03:05.348426Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000005:12] at 9437184 has finished 2025-03-31T16:03:05.348455Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:03:05.348494Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T16:03:05.348522Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T16:03:05.348562Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T16:03:05.368495Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000005 txid# 12} 2025-03-31T16:03:05.368575Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000005} 2025-03-31T16:03:05.368664Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-03-31T16:03:05.368712Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:12] at 9437185 on unit CompleteOperation 2025-03-31T16:03:05.368820Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 12] from 9437185 at tablet 9437185 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:03:05.368878Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-03-31T16:03:05.377825Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000005 txid# 12} 2025-03-31T16:03:05.377898Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000005} 2025-03-31T16:03:05.377953Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:03:05.377990Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:12] at 9437184 on unit CompleteOperation 2025-03-31T16:03:05.378046Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 12] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-03-31T16:03:05.378089Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 >> DataShardOutOfOrder::TestShardSnapshotReadNoEarlyReply >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites+EvWrite >> TFlatTableExecutor_VersionedRows::TestVersionedRowsLargeBlobs [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2NoRestart >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2NoRestart [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2 [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1 >> DataShardTxOrder::ForceOnlineBetweenOnline >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1 [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1ToSchema2 >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1ToSchema2 [GOOD] >> TGenCompaction::OverloadFactorDuringForceCompaction >> KqpQuery::QueryExplain [GOOD] >> KqpQuery::QueryFromSqs |73.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |73.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |73.2%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber >> KqpStats::OneShardNonLocalExec+UseSink [GOOD] >> KqpStats::OneShardNonLocalExec-UseSink >> KqpQuery::OlapCreateAsSelect_Simple [GOOD] >> KqpQuery::OltpCreateAsSelect_Simple >> DataShardTxOrder::RandomDotRanges_DelayRS >> TGroupMapperTest::Mirror3dc >> BuildStatsHistogram::Ten_Serial [GOOD] >> BuildStatsHistogram::Ten_Crossed >> KqpExplain::ReadTableRanges [GOOD] |73.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> SlowTopicAutopartitioning::CDC_Write [GOOD] Test command err: 2025-03-31T16:00:42.276634Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004220630084120:2081];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:42.286898Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000faa/r3tmp/tmpWkiu9b/pdisk_1.dat 2025-03-31T16:00:42.567490Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:00:42.778447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:42.778528Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:42.816859Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28920, node 1 2025-03-31T16:00:42.862194Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:43.055773Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/000faa/r3tmp/yandexf55pik.tmp 2025-03-31T16:00:43.055810Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/000faa/r3tmp/yandexf55pik.tmp 2025-03-31T16:00:43.055978Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/000faa/r3tmp/yandexf55pik.tmp 2025-03-31T16:00:43.056111Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:00:43.179183Z INFO: TTestServer started on Port 15149 GrpcPort 28920 TClient is connected to server localhost:15149 PQClient connected to localhost:28920 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:00:43.558518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:00:43.593958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-31T16:00:47.080121Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004242104921380:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:47.080414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:47.080974Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004242104921400:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:47.085921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-31T16:00:47.140034Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2025-03-31T16:00:47.140934Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004242104921402:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-31T16:00:47.282643Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004220630084120:2081];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:47.282744Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:00:47.528594Z node 1 :TX_PROXY ERROR: Actor# [1:7488004242104921466:2455] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:00:47.590770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:00:47.682599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:00:47.725485Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488004242104921476:2351], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:00:47.731479Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzUwMDYwNTYtODU3YzhmY2YtN2Q2OGJhMS05NDI5YTMzNQ==, ActorId: [1:7488004242104921360:2336], ActorState: ExecuteState, TraceId: 01jqpey5xt32f7dt2xtvqzjmea, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:00:47.733801Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:00:47.880273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7488004246399889070:2642] === CheckClustersList. Ok 2025-03-31T16:00:54.014120Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-03-31T16:00:54.119684Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7488004272169693131:2796], Recipient [1:7488004220630084517:2200]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:00:54.119734Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:00:54.119757Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-31T16:00:54.119802Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7488004272169693126:2792], Recipient [1:7488004220630084517:2200]: {TEvModifySchemeTransaction txid# 281474976710672 TabletId# 72057594046644480} 2025-03-31T16:00:54.119825Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-31T16:00:54.180296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "origin" Columns { Name: "id" Type: "Uint64" NotNull: false } Columns { Name: "order" Type: "Uint64" NotNull: false } Columns { Name: "value" Type: "Utf8" NotNull: false } KeyColumnNames: "id" KeyColumnNames: "order" UniformPartitionsCount: 64 PartitionConfig { PartitioningPolicy { MinPartitionsCount: 64 MaxPartitionsCount: 64 } } Temporary: false } } TxId: 281474976710672 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-03-31T16:00:54.180640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/origin, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:00:54.180747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /Root/origin, opId: 281474976710672:0, schema: Name: "origin" Columns { Name: "id" Type: "Uint64" NotNull: false } Columns { Name: "order" Type: "Uint64" NotNull: false } Columns { Name: "value" Type: "Utf8" NotNull: false } KeyColumnNames: "id" KeyColumnNames: "order" UniformPartitionsCount: 64 PartitionConfig { PartitioningPolicy { MinPartitionsCount: 64 MaxPartitionsCount: 64 } } Temporary: false, at schemeshard: 72057594046644480 2025-03-31T16:00:54.181172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: origin, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-03-31T16:00:54.181207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-03-31T16:00:54.181239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2025-03-31T16:00:54.181255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2025-03-31T16:00:54.181265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 3 2025-03-31T16:00:54.181274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 4 2025-03-31T16:00:54.181287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 5 2025-03-31T16:00:54.181295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPat ... 3-31T16:01:36.266098Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Answering for message sourceid: '\00072075186224037952', Topic: 'origin/feed/streamImpl', Partition: 1, SeqNo: 140, partNo: 0, Offset: 29335 is stored on disk 2025-03-31T16:01:36.266120Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2025-03-31T16:01:36.266136Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Answering for message sourceid: '\00072075186224037952', Topic: 'origin/feed/streamImpl', Partition: 1, SeqNo: 143, partNo: 0, Offset: 29336 is stored on disk 2025-03-31T16:01:36.266152Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2025-03-31T16:01:36.266164Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Answering for message sourceid: '\00072075186224037952', Topic: 'origin/feed/streamImpl', Partition: 1, SeqNo: 145, partNo: 0, Offset: 29337 is stored on disk 2025-03-31T16:01:36.266178Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2025-03-31T16:01:36.266193Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Answering for message sourceid: '\00072075186224037952', Topic: 'origin/feed/streamImpl', Partition: 1, SeqNo: 146, partNo: 0, Offset: 29338 is stored on disk 2025-03-31T16:01:36.266208Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2025-03-31T16:01:36.266262Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Answering for message sourceid: '\00072075186224037952', Topic: 'origin/feed/streamImpl', Partition: 1, SeqNo: 147, partNo: 0, Offset: 29339 is stored on disk 2025-03-31T16:01:36.266295Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2025-03-31T16:01:36.266317Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Answering for message sourceid: '\00072075186224037952', Topic: 'origin/feed/streamImpl', Partition: 1, SeqNo: 149, partNo: 0, Offset: 29340 is stored on disk 2025-03-31T16:01:36.266338Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2025-03-31T16:01:36.266354Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Answering for message sourceid: '\00072075186224037952', Topic: 'origin/feed/streamImpl', Partition: 1, SeqNo: 151, partNo: 0, Offset: 29341 is stored on disk 2025-03-31T16:01:36.266371Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2025-03-31T16:01:36.266389Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Answering for message sourceid: '\00072075186224037952', Topic: 'origin/feed/streamImpl', Partition: 1, SeqNo: 152, partNo: 0, Offset: 29342 is stored on disk 2025-03-31T16:01:36.266407Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2025-03-31T16:01:36.266448Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Answering for message sourceid: '\00072075186224037952', Topic: 'origin/feed/streamImpl', Partition: 1, SeqNo: 155, partNo: 0, Offset: 29343 is stored on disk 2025-03-31T16:01:36.266474Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2025-03-31T16:01:36.266496Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Answering for message sourceid: '\00072075186224037952', Topic: 'origin/feed/streamImpl', Partition: 1, SeqNo: 156, partNo: 0, Offset: 29344 is stored on disk 2025-03-31T16:01:36.266516Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2025-03-31T16:01:36.266539Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Answering for message sourceid: '\00072075186224037952', Topic: 'origin/feed/streamImpl', Partition: 1, SeqNo: 160, partNo: 0, Offset: 29345 is stored on disk 2025-03-31T16:01:36.266584Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2025-03-31T16:01:36.266604Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Answering for message sourceid: '\00072075186224037952', Topic: 'origin/feed/streamImpl', Partition: 1, SeqNo: 161, partNo: 0, Offset: 29346 is stored on disk 2025-03-31T16:01:36.266634Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2025-03-31T16:01:36.266655Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Answering for message sourceid: '\00072075186224037952', Topic: 'origin/feed/streamImpl', Partition: 1, SeqNo: 162, partNo: 0, Offset: 29347 is stored on disk 2025-03-31T16:01:36.266676Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2025-03-31T16:01:36.266694Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Answering for message sourceid: '\00072075186224037952', Topic: 'origin/feed/streamImpl', Partition: 1, SeqNo: 164, partNo: 0, Offset: 29348 is stored on disk 2025-03-31T16:01:36.266713Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2025-03-31T16:01:36.266730Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Answering for message sourceid: '\00072075186224037952', Topic: 'origin/feed/streamImpl', Partition: 1, SeqNo: 168, partNo: 0, Offset: 29349 is stored on disk 2025-03-31T16:01:36.266749Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2025-03-31T16:01:36.266768Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Answering for message sourceid: '\00072075186224037952', Topic: 'origin/feed/streamImpl', Partition: 1, SeqNo: 169, partNo: 0, Offset: 29350 is stored on disk 2025-03-31T16:01:36.266785Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2025-03-31T16:01:36.266806Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Answering for message sourceid: '\00072075186224037952', Topic: 'origin/feed/streamImpl', Partition: 1, SeqNo: 170, partNo: 0, Offset: 29351 is stored on disk 2025-03-31T16:01:36.266824Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2025-03-31T16:01:36.266841Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Answering for message sourceid: '\00072075186224037952', Topic: 'origin/feed/streamImpl', Partition: 1, SeqNo: 172, partNo: 0, Offset: 29352 is stored on disk 2025-03-31T16:01:36.266859Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2025-03-31T16:01:36.266952Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Answering for message sourceid: '\00072075186224037952', Topic: 'origin/feed/streamImpl', Partition: 1, SeqNo: 173, partNo: 0, Offset: 29353 is stored on disk 2025-03-31T16:01:36.266978Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2025-03-31T16:01:36.266996Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Answering for message sourceid: '\00072075186224037952', Topic: 'origin/feed/streamImpl', Partition: 1, SeqNo: 174, partNo: 0, Offset: 29354 is stored on disk 2025-03-31T16:01:36.267014Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2025-03-31T16:01:36.267036Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Answering for message sourceid: '\00072075186224037952', Topic: 'origin/feed/streamImpl', Partition: 1, SeqNo: 175, partNo: 0, Offset: 29355 is stored on disk 2025-03-31T16:01:36.267061Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2025-03-31T16:01:36.267118Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Answering for message sourceid: '\00072075186224037952', Topic: 'origin/feed/streamImpl', Partition: 1, SeqNo: 176, partNo: 0, Offset: 29356 is stored on disk 2025-03-31T16:01:36.267142Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2025-03-31T16:01:36.267184Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Answering for message sourceid: '\00072075186224037952', Topic: 'origin/feed/streamImpl', Partition: 1, SeqNo: 179, partNo: 0, Offset: 29357 is stored on disk 2025-03-31T16:01:36.267208Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2025-03-31T16:01:36.267225Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Answering for message sourceid: '\00072075186224037952', Topic: 'origin/feed/streamImpl', Partition: 1, SeqNo: 182, partNo: 0, Offset: 29358 is stored on disk 2025-03-31T16:01:36.267244Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2025-03-31T16:01:36.267261Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Answering for message sourceid: '\00072075186224037952', Topic: 'origin/feed/streamImpl', Partition: 1, SeqNo: 183, partNo: 0, Offset: 29359 is stored on disk 2025-03-31T16:01:36.267280Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2025-03-31T16:01:36.267301Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Answering for message sourceid: '\00072075186224037952', Topic: 'origin/feed/streamImpl', Partition: 1, SeqNo: 187, partNo: 0, Offset: 29360 is stored on disk 2025-03-31T16:01:36.267319Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2025-03-31T16:01:36.267338Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Answering for message sourceid: '\00072075186224037952', Topic: 'origin/feed/streamImpl', Partition: 1, SeqNo: 190, partNo: 0, Offset: 29361 is stored on disk 2025-03-31T16:01:36.267358Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2025-03-31T16:01:36.267373Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Answering for message sourceid: '\00072075186224037952', Topic: 'origin/feed/streamImpl', Partition: 1, SeqNo: 191, partNo: 0, Offset: 29362 is stored on disk 2025-03-31T16:01:36.267409Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2025-03-31T16:01:36.267433Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Answering for message sourceid: '\00072075186224037952', Topic: 'origin/feed/streamImpl', Partition: 1, SeqNo: 192, partNo: 0, Offset: 29363 is stored on disk 2025-03-31T16:01:36.267454Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 >> DataShardOutOfOrder::UncommittedReadSetAck >> TGenCompaction::OverloadFactorDuringForceCompaction [GOOD] >> TGenCompaction::ForcedCompactionNoGenerations [GOOD] >> TGenCompaction::ForcedCompactionWithGenerations [GOOD] >> TGenCompaction::ForcedCompactionWithFinalParts [GOOD] >> TGenCompaction::ForcedCompactionByDeletedRows [GOOD] >> TGenCompaction::ForcedCompactionByUnreachableMvccData [GOOD] >> TGenCompaction::ForcedCompactionByUnreachableMvccDataRestart [GOOD] >> TGenCompaction::ForcedCompactionByUnreachableMvccDataBorrowed [GOOD] >> TIterator::Basics >> TIterator::Basics [GOOD] >> TIterator::External [GOOD] >> KqpStats::DeferredEffects+UseSink [GOOD] >> KqpPragma::Auth >> KqpStats::DeferredEffects-UseSink >> TGroupMapperTest::Mirror3dc [GOOD] >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop+UseSink [GOOD] >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::ReadTableRanges [GOOD] Test command err: Trying to start YDB, gRPC: 10376, MsgBus: 8653 2025-03-31T16:02:36.469731Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004711759875846:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:36.469766Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ac7/r3tmp/tmpKrbOnq/pdisk_1.dat 2025-03-31T16:02:36.868861Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:36.868959Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:36.870836Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10376, node 1 2025-03-31T16:02:36.899889Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:02:36.905984Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:02:36.974808Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:37.011613Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:02:37.011643Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:02:37.011651Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:02:37.011762Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8653 TClient is connected to server localhost:8653 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:02:37.574853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:37.599008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:37.786039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:37.978312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:38.071026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:40.057042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004728939746815:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:40.057150Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:40.411246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:02:40.498532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:02:40.571755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:02:40.628456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:02:40.667790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:02:40.745193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:02:40.834856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004728939747335:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:40.834962Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:40.835286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004728939747340:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:40.845927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:02:40.871978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004728939747342:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:02:40.936727Z node 1 :TX_PROXY ERROR: Actor# [1:7488004728939747396:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:02:41.470692Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004711759875846:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:41.470770Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":17,"Plans":[{"PlanNodeId":16,"Plans":[{"PlanNodeId":15,"Plans":[{"PlanNodeId":14,"Plans":[{"PlanNodeId":13,"Plans":[{"PlanNodeId":12,"Plans":[{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Subplan Name":"CTE Stage_11","Plans":[{"Tables":["KeyValue"],"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/KeyValue","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"Stage","Parent Relationship":"InitPlan"}],"Node Type":"Map","PlanNodeType":"Connection"},{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Node Type":"UnionAll","PlanNodeType":"Connection","CTE Name":"Stage_11"}],"Node Type":"Stage"}],"Node Type":"Broadcast","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"GroupBy":"item.t1.Key","Aggregation":"state","Name":"Aggregate","Phase":"Intermediate"},{"Inputs":[{"InternalOperatorId":2},{"ExternalPlanNodeId":6}],"E-Rows":"No estimate","Condition":"t1.Key = t2.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"},{"Inputs":[{"ExternalPlanNodeId":9}],"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Aggregate-InnerJoin (MapJoin)-Filter"}],"Node Type":"HashShuffle","KeyColumns":["t1.Key"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":11}],"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Node Type":"Map","PlanNodeType":"Connection"},{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":1,"Node Type":"UnionAll","PlanNodeType":"Connection","CTE Name":"Stage_11"}],"Node Type":"Stage"}],"Node Type":"Broadcast","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"SortBy":"row.Key","Name":"Sort"},{"Inputs":[{"InternalOperatorId":2},{"ExternalPlanNodeId":3}],"E-Rows":"No estimate","Condition":"Foo.t1.Key = t1.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"},{"Inputs":[{"ExternalPlanNodeId":13}],"E-Rows":"No estimate","Predicate":"Exist(item.t1.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Sort-InnerJoin (MapJoin)-Filter"}],"Node Type":"Merge","SortColumns":["Key (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/KeyValue","reads":[{"columns":["Key"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":11,"Plans":[{"PlanNodeId":12,"Plans":[{"PlanNodeId":15,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/KeyValue","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"},{"PlanNodeId":19,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/KeyValue","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type": ... an":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["TwoKeys"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"item.Key2 \u003E 101","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key1 (-∞, +∞)","Key2 (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/TwoKeys","E-Rows":"No estimate","Table":"TwoKeys","ReadColumns":["Key1","Key2","Value"],"E-Cost":"No estimate"}],"Node Type":"Filter-TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/TwoKeys","reads":[{"columns":["Key1","Key2","Value"],"scan_by":["Key1 (-∞, +∞)","Key2 (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key1 (-∞, +∞)","Key2 (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/TwoKeys","E-Rows":"No estimate","Table":"TwoKeys","ReadColumns":["Key1","Key2","Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"item.Key2 \u003E 101","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 16559, MsgBus: 27061 2025-03-31T16:02:58.271843Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488004805269791118:2216];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ac7/r3tmp/tmpv1FpbS/pdisk_1.dat 2025-03-31T16:02:58.424708Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:02:58.513064Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:58.534338Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:58.534428Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:58.540296Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16559, node 4 2025-03-31T16:02:58.739742Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:02:58.739765Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:02:58.739775Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:02:58.739907Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27061 TClient is connected to server localhost:27061 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:02:59.609006Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:59.636138Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:02:59.650810Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:59.775965Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:00.082134Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:00.190428Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:03.232904Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7488004805269791118:2216];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:03.232988Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:03.623311Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488004826744629214:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:03.623423Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:03.684401Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:03.739858Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:03.823416Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:03.890740Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:04.030923Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:04.101834Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:04.209545Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488004831039597032:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:04.209648Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:04.209899Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488004831039597037:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:04.214740Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:04.250871Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488004831039597039:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:03:04.339188Z node 4 :TX_PROXY ERROR: Actor# [4:7488004831039597097:3464] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } waiting... 2025-03-31T16:03:06.419006Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:03:06.932494Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:03:07.030302Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, 100)","Key [2000, +∞)"],"Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/KeyValue","E-Rows":"No estimate","Table":"KeyValue","ReadRangesKeys":["Key"],"ReadColumns":["Key","Value"],"E-Cost":"No estimate","ReadRangesExpectedSize":2}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/KeyValue","reads":[{"columns":["Key","Value"],"scan_by":["Key (-∞, 100)","Key [2000, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, 100)","Key [2000, +∞)"],"Name":"TableRangeScan","Path":"\/Root\/KeyValue","E-Rows":"No estimate","Table":"KeyValue","ReadRangesKeys":["Key"],"ReadColumns":["Key","Value"],"E-Cost":"No estimate","ReadRangesExpectedSize":2}],"Node Type":"TableRangeScan"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} |73.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |73.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |73.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |73.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::Mirror3dc [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TIterator::External [GOOD] Test command err: 00000.000 II| FAKE_ENV: Born at 2025-03-31T16:02:24.158398Z 00000.008 DD| RESOURCE_BROKER: TResourceBrokerActor bootstrap 00000.012 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.012 II| FAKE_ENV: Starting storage for BS group 0 00000.013 II| FAKE_ENV: Starting storage for BS group 1 00000.013 II| FAKE_ENV: Starting storage for BS group 2 00000.013 II| FAKE_ENV: Starting storage for BS group 3 00000.018 II| TABLET_FLATBOOT: Leader{1:2:-} booting Deps{0:0 entries 0} {nil} 00000.018 DD| TABLET_FLATBOOT: Leader{1:2:-} fired stage 1, has 1 jobs, Boot{ 2 que, 2 refs } 00000.018 II| TABLET_FLATBOOT: Leader{1:2:-} loading { Alter 0, Turns 0, Loans 0, GCExt 0 } 00000.018 DD| TABLET_FLATBOOT: Leader{1:2:-} fired stage 2, has 4 jobs, Boot{ 5 que, 2 refs } 00000.018 DD| TABLET_FLATBOOT: Leader{1:2:-} fired stage 3, has 1 jobs, Boot{ 2 que, 2 refs } 00000.018 II| TABLET_FLATBOOT: Leader{1:2:-} redo log has 0 records, last before 0:0 00000.018 DD| TABLET_FLATBOOT: Leader{1:2:-} fired stage 4, has 1 jobs, Boot{ 2 que, 2 refs } 00000.018 II| TABLET_FLATBOOT: Leader{1:2:-} result: db change {1 -> 1} snap on 0 00000.018 DD| TABLET_FLATBOOT: Leader{1:2:-} fired stage 5, has 0 jobs, Boot{ 1 que, 2 refs } 00000.019 II| TABLET_FLATBOOT: Leader{1:2:-} booting completed, took 0.000s 00000.078 II| TABLET_FLATBOOT: Leader{1:3:-} booting Deps{2:1 entries 252} {nil} 00000.078 DD| TABLET_FLATBOOT: Leader{1:3:-} fired stage 1, has 1 jobs, Boot{ 2 que, 2 refs } 00000.078 DD| TABLET_FLATBOOT: Leader{1:3:-} snap in deps on 2:1, TLargeGlobId{[1:2:1:1:28672:35:0] ~35b, grp 1} 00000.078 DD| TABLET_FLATBOOT: Leader{1:3:-} process snap gc entry, + [ ], - [ ] 00000.078 DD| TABLET_FLATBOOT: Leader{1:3:-} Loading TLargeGlobId{[1:2:1:1:28672:35:0] ~35b, grp 1} 00000.078 II| TABLET_FLATBOOT: Leader{1:3:-} snap on 2:1 change 1, 25b, ABI 28 of [1, 28], GC{ +0 -0 } 00000.078 DD| TABLET_FLATBOOT: Leader{1:3:-} process gc snapshot, + [ ], - [ ] 00000.078 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:2:1:8192:209:0] ], - [ ] 00000.078 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:3:1:24576:74:0] ], - [ ] 00000.078 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:4:1:24576:79:0] ], - [ ] 00000.078 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:5:1:24576:81:0] ], - [ ] 00000.078 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:6:1:24576:81:0] ], - [ ] 00000.078 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:7:1:24576:81:0] ], - [ ] 00000.078 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:8:1:24576:79:0] ], - [ ] 00000.078 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:9:1:24576:79:0] ], - [ ] 00000.078 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:10:1:24576:81:0] ], - [ ] 00000.078 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:11:1:24576:81:0] ], - [ ] 00000.078 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:12:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:13:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:14:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:15:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:16:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:17:1:24576:79:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:18:1:24576:79:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:19:1:24576:79:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:20:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:21:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:22:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:23:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:24:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:25:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:26:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:27:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:28:1:24576:79:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:29:1:24576:79:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:30:1:24576:79:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:31:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:32:1:24576:79:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:33:1:24576:83:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:34:1:24576:82:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:35:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:36:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:37:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:38:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:39:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:40:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:41:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:42:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:43:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:44:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:45:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:46:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:47:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:48:1:24576:79:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:49:1:24576:79:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:50:1:24576:82:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:51:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:52:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:53:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:54:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:55:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:56:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:57:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:58:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:59:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:60:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:61:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:62:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:63:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:64:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:65:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:66:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:67:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:68:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:69:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:70:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:71:1:24576:81:0] ], - [ ] 00000.079 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:72:1:24576:81:0] ], - [ ] 00000.080 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:73:1:24576:81:0] ], - [ ] 00000.080 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:74:1:24576:81:0] ], - [ ] 00000.080 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:75:1:24576:81:0] ], - [ ] 00000.080 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:76:1:24576:81:0] ], - [ ] 00000.080 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:77:1:24576:81:0] ], - [ ] 00000.080 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:78:1:24576:81:0] ], - [ ] 00000.080 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:79:1:24576:81:0] ], - [ ] 00000.080 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:80:1:24576:81:0] ], - [ ] 00000.080 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:81:1:24576:81:0] ], - [ ] 00000.080 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:82:1:24576:81:0] ], - [ ] 00000.080 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:83:1:24576:81:0] ], - [ ] 00000.080 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:84:1:24576:81:0] ], - [ ] 00000.080 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:85:1:24576:81:0] ], - [ ] 00000.080 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:86:1:24576:81:0] ], - [ ] 00000.080 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:87:1:24576:81:0] ], - [ ] 00000.080 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:88:1:24576:81:0] ], - [ ] 00000.080 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:89:1:24576:81:0] ], - [ ] 00000.080 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:90:1:24576:81:0] ], - [ ] 00000.080 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:91:1:24576:81:0] ], - [ ] 00000.080 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:92:1:24576:81:0] ], - [ ] 00000.080 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:93:1:24576:81:0] ], - [ ] 00000.080 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:94:1:24576:81:0] ], - [ ] 00000.080 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:95:1:24576:81:0] ], - [ ] 00000.080 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:96:1:24576:81:0] ], - [ ] 00000.080 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:97:1:24576:81:0] ], - [ ] 00000.080 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:98:1:24576:81:0] ], - [ ] 00000.080 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:99:1:24576:81:0] ], - ... 24576:100:0], [1:2:19:1:24576:97:0], [1:2:20:1:24576:96:0], [1:2:21:1:24576:97:0], [1:2:22:1:24576:97:0], [1:2:23:1:24576:97:0], [1:2:24:1:24576:97:0], [1:2:25:1:24576:97:0], [1:2:26:1:24576:97:0], [1:2:27:1:24576:97:0], [1:2:28:1:24576:96:0], [1:2:29:1:24576:100:0], [1:2:30:1:24576:97:0], [1:2:31:1:24576:96:0], [1:2:32:1:24576:96:0], [1:2:33:1:24576:104:0], [1:2:34:1:24576:97:0], [1:2:35:1:24576:99:0], [1:2:36:1:24576:97:0], [1:2:37:1:24576:97:0], [1:2:38:1:24576:97:0], [1:2:39:1:24576:97:0], [1:2:40:1:24576:97:0], [1:2:41:1:24576:97:0], [1:2:42:1:24576:97:0], [1:2:43:1:24576:97:0], [1:2:44:1:24576:97:0], [1:2:45:1:24576:97:0], [1:2:46:1:24576:97:0], [1:2:47:1:24576:97:0], [1:2:48:1:24576:97:0], [1:2:49:1:24576:97:0], [1:2:50:1:24576:97:0], [1:2:51:1:24576:97:0], [1:2:52:1:24576:97:0], [1:2:53:1:24576:97:0], [1:2:54:1:24576:97:0], [1:2:55:1:24576:97:0], [1:2:56:1:24576:97:0], [1:2:57:1:24576:97:0], [1:2:58:1:24576:97:0], [1:2:59:1:24576:97:0], [1:2:60:1:24576:97:0], [1:2:61:1:24576:97:0], [1:2:62:1:24576:97:0], [1:2:63:1:24576:97:0], [1:2:64:1:24576:97:0], [1:2:65:1:24576:97:0], [1:2:66:1:24576:97:0], [1:2:67:1:24576:97:0], [1:2:68:1:24576:97:0], [1:2:69:1:24576:97:0], [1:2:70:1:24576:97:0], [1:2:71:1:24576:97:0], [1:2:72:1:24576:97:0], [1:2:73:1:24576:101:0], [1:2:74:1:24576:102:0], [1:2:75:1:24576:101:0], [1:2:76:1:24576:102:0], [1:2:77:1:24576:104:0], [1:2:78:1:24576:104:0], [1:2:79:1:24576:104:0], [1:2:80:1:24576:104:0], [1:2:81:1:24576:103:0], [1:2:82:1:24576:101:0], [1:2:83:1:24576:104:0], [1:2:84:1:24576:104:0], [1:2:85:1:24576:104:0], [1:2:86:1:24576:104:0], [1:2:87:1:24576:104:0], [1:2:88:1:24576:104:0], [1:2:89:1:24576:104:0], [1:2:90:1:24576:101:0], [1:2:91:1:24576:104:0], [1:2:92:1:24576:104:0], [1:2:93:1:24576:98:0], [1:2:94:1:24576:104:0], [1:2:95:1:24576:104:0], [1:2:96:1:24576:104:0], [1:2:97:1:24576:104:0], [1:2:98:1:24576:104:0], [1:2:99:1:24576:104:0], [1:2:100:1:24576:104:0], [1:2:101:1:24576:97:0], [1:2:102:1:24576:100:0], [1:2:103:1:24576:104:0], [1:2:104:1:24576:104:0], [1:2:105:1:24576:104:0], [1:2:106:1:24576:104:0], [1:2:107:1:24576:104:0], [1:2:108:1:24576:104:0], [1:2:109:1:24576:104:0], [1:2:110:1:24576:104:0], [1:2:111:1:24576:104:0], [1:2:112:1:24576:104:0], [1:2:113:1:24576:104:0], [1:2:114:1:24576:104:0], [1:2:115:1:24576:104:0], [1:2:116:1:24576:104:0], [1:2:117:1:24576:104:0], [1:2:118:1:24576:104:0], [1:2:119:1:24576:104:0], [1:2:120:1:24576:104:0], [1:2:121:1:24576:104:0], [1:2:122:1:24576:104:0], [1:2:123:1:24576:104:0], [1:2:124:1:24576:104:0], [1:2:125:1:24576:104:0], [1:2:126:1:24576:104:0], [1:2:127:1:24576:104:0], [1:2:128:1:24576:104:0], [1:2:129:1:24576:104:0], [1:2:130:1:24576:104:0], [1:2:131:1:24576:104:0], [1:2:132:1:24576:104:0], [1:2:133:1:24576:104:0], [1:2:134:1:24576:104:0], [1:2:135:1:24576:104:0], [1:2:136:1:24576:104:0], [1:2:137:1:24576:104:0], [1:2:138:1:24576:104:0], [1:2:139:1:24576:104:0], [1:2:140:1:24576:104:0], [1:2:141:1:24576:104:0], [1:2:142:1:24576:104:0], [1:2:145:1:24576:60:0], [1:2:146:1:24576:60:0] } 00000.123 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:143:1:12288:758:0] owner [35:212:2237] class Online from cache [ ] already requested [ ] to request [ 22 23 24 25 ] 00000.123 TT| TABLET_SAUSAGECACHE: Loaded page collection [1:2:143:1:12288:758:0] status OK pages [ 22 23 24 25 ] 00000.124 II| TABLET_EXECUTOR: Leader{1:3:0} activating executor 00000.124 II| TABLET_EXECUTOR: LSnap{1:3, on 3:1, 1880b, wait} done, Waste{2:0, 141856b +(140, 14018b), 146 trc} 00000.125 DD| TABLET_SAUSAGECACHE: Attach page collection [1:2:143:1:12288:758:0] owner [35:212:2237] 00000.125 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:143:1:12288:758:0] owner [35:212:2237] class AsyncLoad from cache [ ] already requested [ 22 23 24 25 ] to request [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 ] 00000.125 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:143:1:12288:758:0] async queue pages [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 ] 00000.126 TT| TABLET_SAUSAGECACHE: Loaded page collection [1:2:143:1:12288:758:0] status OK pages [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 ] 00000.126 DD| TABLET_EXECUTOR: Leader{1:3:2} got result TEvResult{26 pages [1:2:143:1:12288:758:0] ok OK}, category 2 00000.126 TT| TABLET_SAUSAGECACHE: Touch page collection [1:2:143:1:12288:758:0] owner [35:212:2237] pages [ 22 23 24 25 ] 00000.127 DD| TABLET_EXECUTOR: Leader{1:3:2} commited cookie 2 for step 1 00000.127 TT| TABLET_SAUSAGECACHE: Touch page collection [1:2:143:1:12288:758:0] owner [35:212:2237] pages [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 ] 00000.128 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_StickyPages::TTxFullScan} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_StickyPages::TTxFullScan 00000.128 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_StickyPages::TTxFullScan} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.129 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_StickyPages::TTxFullScan} hope 1 -> done Change{145, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.129 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_StickyPages::TTxFullScan} release 4194304b of static, Memory{0 dyn 0} 00000.129 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.129 II| TABLET_EXECUTOR: Leader{1:3:2} suiciding, Waste{2:0, 141856b +(0, 0b), 1 trc, -14018b acc} 00000.129 DD| TABLET_SAUSAGECACHE: Unregister owner [35:212:2237] 00000.130 NN| TABLET_SAUSAGECACHE: Poison cache serviced 3 reqs hit {6 1077b} miss {50 281387b} 00000.130 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.130 II| FAKE_ENV: DS.1 gone, left {143736b, 8}, put {157893b, 150} 00000.130 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.130 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.130 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {14354b, 149} 00000.130 II| FAKE_ENV: All BS storage groups are stopped 00000.130 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.130 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 782}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-31T16:02:32.078362Z 00000.008 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.009 II| FAKE_ENV: Starting storage for BS group 1 00000.009 II| FAKE_ENV: Starting storage for BS group 2 00000.009 II| FAKE_ENV: Starting storage for BS group 3 00000.036 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.037 NN| TABLET_SAUSAGECACHE: Poison cache serviced 3 reqs hit {3 512b} miss {0 0b} 00000.037 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.037 II| FAKE_ENV: DS.0 gone, left {1356b, 12}, put {1376b, 13} 00000.037 II| FAKE_ENV: DS.1 gone, left {6814b, 23}, put {6814b, 23} 00000.037 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.037 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.037 II| FAKE_ENV: All BS storage groups are stopped 00000.037 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.037 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-31T16:02:32.122099Z 00000.007 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 00000.227 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.227 NN| TABLET_SAUSAGECACHE: Poison cache serviced 10 reqs hit {860 5551893b} miss {0 0b} 00000.227 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.227 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.227 II| FAKE_ENV: DS.0 gone, left {1201b, 13}, put {1221b, 14} 00000.227 II| FAKE_ENV: DS.1 gone, left {6751256b, 17}, put {6751256b, 17} 00000.230 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.230 II| FAKE_ENV: All BS storage groups are stopped 00000.230 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.230 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-31T16:02:32.367872Z 00000.006 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 00016.851 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00016.851 NN| TABLET_SAUSAGECACHE: Poison cache serviced 4109 reqs hit {2091 2366986b} miss {6144 6340608b} 00016.852 II| FAKE_ENV: Shut order, stopping 4 BS groups 00016.852 II| FAKE_ENV: DS.0 gone, left {1761b, 14}, put {1781b, 15} 00016.852 II| FAKE_ENV: DS.1 gone, left {6927727b, 27}, put {6927727b, 27} 00016.855 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00016.855 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00016.855 II| FAKE_ENV: All BS storage groups are stopped 00016.855 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00016.855 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-31T16:02:49.254241Z 00000.008 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.009 II| FAKE_ENV: Starting storage for BS group 0 00000.009 II| FAKE_ENV: Starting storage for BS group 1 00000.009 II| FAKE_ENV: Starting storage for BS group 2 00000.009 II| FAKE_ENV: Starting storage for BS group 3 00018.047 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00018.047 NN| TABLET_SAUSAGECACHE: Poison cache serviced 4106 reqs hit {43 253450b} miss {4096 4227072b} 00018.048 II| FAKE_ENV: Shut order, stopping 4 BS groups 00018.048 II| FAKE_ENV: DS.0 gone, left {44744b, 2}, put {164747b, 16} 00018.049 II| FAKE_ENV: DS.1 gone, left {2764621b, 2068}, put {2764621b, 2068} 00018.056 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00018.056 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00018.056 II| FAKE_ENV: All BS storage groups are stopped 00018.056 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00018.056 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-31T16:03:07.329415Z 00000.009 II| FAKE_ENV: Starting storage for BS group 0 00000.010 II| FAKE_ENV: Starting storage for BS group 1 00000.010 II| FAKE_ENV: Starting storage for BS group 2 00000.010 II| FAKE_ENV: Starting storage for BS group 3 00000.000 II| FAKE_ENV: Born at 2025-03-31T16:03:07.369957Z 00000.010 II| FAKE_ENV: Starting storage for BS group 0 00000.010 II| FAKE_ENV: Starting storage for BS group 1 00000.010 II| FAKE_ENV: Starting storage for BS group 2 00000.010 II| FAKE_ENV: Starting storage for BS group 3 00000.000 II| FAKE_ENV: Born at 2025-03-31T16:03:07.430992Z 00000.009 II| FAKE_ENV: Starting storage for BS group 0 00000.009 II| FAKE_ENV: Starting storage for BS group 1 00000.010 II| FAKE_ENV: Starting storage for BS group 2 00000.010 II| FAKE_ENV: Starting storage for BS group 3 00000.000 II| FAKE_ENV: Born at 2025-03-31T16:03:07.518425Z 00000.018 II| FAKE_ENV: Starting storage for BS group 0 00000.018 II| FAKE_ENV: Starting storage for BS group 1 00000.019 II| FAKE_ENV: Starting storage for BS group 2 00000.019 II| FAKE_ENV: Starting storage for BS group 3 >> TExtSubDomainTest::DeclareAndDrop >> TFlatTableExecutor_IndexLoading::Scan_History_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_History_BTreeIndex >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock-EvWrite [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLockOutOfOrder >> KqpStats::StreamLookupStats-StreamLookupJoin [GOOD] >> KqpStats::SysViewCancelled ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCDS64 [GOOD] Test command err: Trying to start YDB, gRPC: 30588, MsgBus: 16332 2025-03-31T15:58:48.945855Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003732992745111:2195];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:48.949731Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019e8/r3tmp/tmpz2W8ih/pdisk_1.dat 2025-03-31T15:58:49.804450Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:49.804567Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:49.811065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30588, node 1 2025-03-31T15:58:49.841800Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T15:58:49.842279Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T15:58:49.971096Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:58:49.971124Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:58:49.971134Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:58:49.971249Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:58:50.000730Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:50.003286Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TClient is connected to server localhost:16332 TClient is connected to server localhost:16332 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:58:50.829379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:58:50.876674Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:58:53.919858Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003732992745111:2195];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:53.919913Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:58:54.161047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003758762549422:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:54.161202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:54.161633Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003758762549434:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:54.170071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T15:58:54.184448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488003758762549436:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T15:58:54.271597Z node 1 :TX_PROXY ERROR: Actor# [1:7488003758762549487:2342] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:58:54.659024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T15:58:55.059479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488003758762549793:2364];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T15:58:55.059740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488003758762549793:2364];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T15:58:55.060029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488003758762549793:2364];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T15:58:55.060177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488003758762549793:2364];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T15:58:55.060333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488003758762549793:2364];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T15:58:55.060454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488003758762549793:2364];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T15:58:55.060551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488003758762549793:2364];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T15:58:55.060684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488003758762549793:2364];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T15:58:55.060809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488003758762549793:2364];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T15:58:55.060918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488003758762549793:2364];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T15:58:55.061034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488003758762549793:2364];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T15:58:55.061133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488003758762549793:2364];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T15:58:55.067143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488003758762549781:2358];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T15:58:55.067207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488003758762549781:2358];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T15:58:55.067440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488003758762549781:2358];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T15:58:55.067541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488003758762549781:2358];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T15:58:55.067649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488003758762549781:2358];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T15:58:55.067738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488003758762549781:2358];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T15:58:55.067839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488003758762549781:2358];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T15:58:55.067942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488003758762549781:2358];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T15:58:55.068055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488003758762549781:2358];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T15:58:55.068160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488003758762549781:2358];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T15:58:55.068247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488003758762549781:2358];tablet_id=72075186224037895;process=TTxInitSchema::Execute; ... 00:22.526283Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:22.546155Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039217;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:22.557199Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:22.560106Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039235;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:22.581093Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039186;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:22.588330Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:22.602438Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:22.603241Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:22.616289Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:22.633464Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:22.646315Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:22.652242Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:22.664154Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:22.680555Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:22.693928Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:22.701828Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:22.716049Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:22.722194Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:22.734949Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039232;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:22.745246Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:22.991173Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039344;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:23.008821Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:23.020212Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:23.034326Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039191;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:00:23.235767Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpew2bf97jzej2nh9q26mae", SessionId: ydb://session/3?node_id=1&id=YzllZjNjY2UtOWM5ODc1ODctYmUzNmExODgtNTdjOTBmZDc=, Slow query, duration: 45.395366s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:00:24.253734Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:00:24.254190Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:00:24.255263Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7488004007870699256:9213];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224038933; 2025-03-31T16:00:24.255675Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:02:50.580737Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpezsx29vn2ce0nas8zc14g", SessionId: ydb://session/3?node_id=1&id=YzllZjNjY2UtOWM5ODc1ODctYmUzNmExODgtNTdjOTBmZDc=, Slow query, duration: 70.314397s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "PRAGMA TablePathPrefix='/Root/test/ds';\n\n-- NB: Subquerys\n\n$cs_ui =\n\n (select catalog_sales.cs_item_sk cs_item_sk\n\n ,sum(cs_ext_list_price) as sale,sum(cr_refunded_cash+cr_reversed_charge+cr_store_credit) as refund\n\n from catalog_sales as catalog_sales\n\n cross join catalog_returns as catalog_returns\n\n where cs_item_sk = cr_item_sk\n\n and cs_order_number = cr_order_number\n\n group by catalog_sales.cs_item_sk\n\n having sum(cs_ext_list_price)>2*sum(cr_refunded_cash+cr_reversed_charge+cr_store_credit));\n\n$cross_sales =\n\n (select item.i_product_name product_name\n\n ,item.i_item_sk item_sk\n\n ,store.s_store_name store_name\n\n ,store.s_zip store_zip\n\n ,ad1.ca_street_number b_street_number\n\n ,ad1.ca_street_name b_street_name\n\n ,ad1.ca_city b_city\n\n ,ad1.ca_zip b_zip\n\n ,ad2.ca_street_number c_street_number\n\n ,ad2.ca_street_name c_street_name\n\n ,ad2.ca_city c_city\n\n ,ad2.ca_zip c_zip\n\n ,d1.d_year as syear\n\n ,d2.d_year as fsyear\n\n ,d3.d_year s2year\n\n ,count(*) cnt\n\n ,sum(ss_wholesale_cost) s1\n\n ,sum(ss_list_price) s2\n\n ,sum(ss_coupon_amt) s3\n\n FROM store_sales as store_sales\n\n cross join store_returns as store_returns\n\n cross join $cs_ui cs_ui\n\n cross join date_dim d1\n\n cross join date_dim d2\n\n cross join date_dim d3\n\n cross join store as store\n\n cross join customer as customer\n\n cross join customer_demographics cd1\n\n cross join customer_demographics cd2\n\n cross join promotion as promotion\n\n cross join household_demographics hd1\n\n cross join household_demographics hd2\n\n cross join customer_address ad1\n\n cross join customer_address ad2\n\n cross join income_band ib1\n\n cross join income_band ib2\n\n cross join item as item\n\n WHERE ss_store_sk = s_store_sk AND\n\n ss_sold_date_sk = d1.d_date_sk AND\n\n ss_customer_sk = c_customer_sk AND\n\n ss_cdemo_sk= cd1.cd_demo_sk AND\n\n ss_hdemo_sk = hd1.hd_demo_sk AND\n\n ss_addr_sk = ad1.ca_address_sk and\n\n ss_item_sk = i_item_sk and\n\n ss_item_sk = sr_item_sk and\n\n ss_ticket_number = sr_ticket_number and\n\n ss_item_sk = cs_ui.cs_item_sk and\n\n c_current_cdemo_sk = cd2.cd_demo_sk AND\n\n c_current_hdemo_sk = hd2.hd_demo_sk AND\n\n c_current_addr_sk = ad2.ca_address_sk and\n\n c_first_sales_date_sk = d2.d_date_sk and\n\n c_first_shipto_date_sk = d3.d_date_sk and\n\n ss_promo_sk = p_promo_sk and\n\n hd1.hd_income_band_sk = ib1.ib_income_band_sk and\n\n hd2.hd_income_band_sk = ib2.ib_income_band_sk and\n\n cd1.cd_marital_status <> cd2.cd_marital_status and\n\n i_color in ('azure','gainsboro','misty','blush','hot','lemon') and\n\n i_current_price between 80 and 80 + 10 and\n\n i_current_price between 80 + 1 and 80 + 15\n\ngroup by item.i_product_name\n\n ,item.i_item_sk\n\n ,store.s_store_name\n\n ,store.s_zip\n\n ,ad1.ca_street_number\n\n ,ad1.ca_street_name\n\n ,ad1.ca_city\n\n ,ad1.ca_zip\n\n ,ad2.ca_street_number\n\n ,ad2.ca_street_name\n\n ,ad2.ca_city\n\n ,ad2.ca_zip\n\n ,d1.d_year\n\n ,d2.d_year\n\n ,d3.d_year\n\n);\n\n-- start query 1 in stream 0 using template query64.tpl and seed 1220860970\n\nselect cs1.product_name\n\n ,cs1.store_name\n\n ,cs1.store_zip\n\n ,cs1.b_street_number\n\n ,cs1.b_street_name\n\n ,cs1.b_city\n\n ,cs1.b_zip\n\n ,cs1.c_street_number\n\n ,cs1.c_street_name\n\n ,cs1.c_city\n\n ,cs1.c_zip\n\n ,cs1.syear\n\n ,cs1.cnt\n\n ,cs1.s1 as s11\n\n ,cs1.s2 as s21\n\n ,cs1.s3 as s31\n\n ,cs2.s1 as s12\n\n ,cs2.s2 as s22\n\n ,cs2.s3 as s32\n\n ,cs2.syear\n\n ,cs2.cnt\n\nfrom $cross_sales cs1 cross join $cross_sales cs2\n\nwhere cs1.item_sk=cs2.item_sk and\n\n cs1.syear = 1999 and\n\n cs2.syear = 1999 + 1 and\n\n cs2.cnt <= cs1.cnt and\n\n cs1.store_name = cs2.store_name and\n\n cs1.store_zip = cs2.store_zip\n\norder by cs1.product_name\n\n ,cs1.store_name\n\n ,cs2.cnt\n\n ,s11\n\n ,s21\n\n ,s22;\n\n\n\n-- end query 1 in stream 0 using template query64.tpl\n", parameters: 0b >> TPartBtreeIndexIteration::FewNodes_Groups [GOOD] >> TPartBtreeIndexIteration::FewNodes_History >> TVersions::WreckHeadReverse [GOOD] >> TVersions::Wreck2 |73.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |73.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpTypes::Time64Columns+EnableTableDatetime64 [GOOD] >> THiveTest::TestCheckSubHiveMigrationWithReboots [GOOD] >> THiveTest::TestCreateAndDeleteTabletWithStoragePoolsReboots >> KqpQuery::QueryCache [GOOD] >> KqpQuery::Pure >> TExecutorDb::RandomOps [GOOD] >> TExecutorDb::FullScan |73.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |73.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |73.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpTypes::Time64Columns+EnableTableDatetime64 [GOOD] Test command err: Trying to start YDB, gRPC: 18228, MsgBus: 65394 2025-03-31T16:02:48.868054Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004762517981409:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:48.868098Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ab9/r3tmp/tmpeOVV18/pdisk_1.dat 2025-03-31T16:02:49.463043Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:49.484187Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:49.484261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:49.502213Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18228, node 1 2025-03-31T16:02:49.725852Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:02:49.725874Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:02:49.725880Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:02:49.726000Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65394 TClient is connected to server localhost:65394 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:02:50.638459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:50.663752Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:02:50.691791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:50.981351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:51.260349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:51.366245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:53.533335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004783992819668:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:53.533455Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:53.871208Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004762517981409:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:53.871261Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:02:53.892158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:02:53.972670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:02:54.024594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:02:54.061317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:02:54.115499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:02:54.198677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:02:54.268764Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004788287787486:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:54.268908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:54.269336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004788287787491:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:54.273668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:02:54.287214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004788287787493:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:02:54.389673Z node 1 :TX_PROXY ERROR: Actor# [1:7488004788287787549:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 27978, MsgBus: 2508 2025-03-31T16:02:56.914517Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488004798556433766:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:56.914573Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ab9/r3tmp/tmp2A44R9/pdisk_1.dat 2025-03-31T16:02:57.214049Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:57.236387Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:57.236468Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:57.240640Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27978, node 2 2025-03-31T16:02:57.407645Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:02:57.407668Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:02:57.407676Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:02:57.407794Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2508 TClient is connected to server localhost:2508 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:02:58.272753Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:58.283859Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:02:58.309777Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:58.416057Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:58.625793Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:58.805909Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:01.665165Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004820031272012:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:01.665299Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:01.726285Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:01.775966Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:01.828614Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:01.876610Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:01.919242Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488004798556433766:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:01.919342Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:01.977835Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:02.057647Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:02.155447Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004824326239825:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:02.155680Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:02.163257Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004824326239830:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:02.171643Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:02.190040Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488004824326239832:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:03:02.242709Z node 2 :TX_PROXY ERROR: Actor# [2:7488004824326239887:3453] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 16289, MsgBus: 24784 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ab9/r3tmp/tmpFNZsNo/pdisk_1.dat 2025-03-31T16:03:05.269476Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:03:05.269624Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:05.269692Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:05.276548Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:03:05.291549Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16289, node 3 2025-03-31T16:03:05.508031Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:05.508053Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:05.508064Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:05.508200Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24784 TClient is connected to server localhost:24784 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:06.560645Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:06.574172Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:03:10.855792Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488004858648514791:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:10.855880Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:10.891162Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:03:10.996493Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488004858648514892:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:10.996624Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:10.998095Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488004858648514897:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:11.002460Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:03:11.014240Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488004858648514899:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:03:11.125288Z node 3 :TX_PROXY ERROR: Actor# [3:7488004862943482246:2403] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQuery::OltpCreateAsSelect_Simple [GOOD] >> KqpQuery::OltpCreateAsSelect_Disable >> KqpParams::ImplicitDifferentParameterTypesQueryCacheCheck [GOOD] >> KqpParams::DefaultParameterValue |73.3%| [TA] $(B)/ydb/core/persqueue/ut/slow/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps >> DataShardOutOfOrder::TestShardSnapshotReadNoEarlyReply [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock+EvWrite >> TSchemeShardUserAttrsTest::SetAttrs >> TPQTest::TestManyConsumers [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline_oo8_dirty [GOOD] >> BuildStatsHistogram::Ten_Crossed [GOOD] >> BuildStatsHistogram::Ten_Mixed_Log >> TExtSubDomainTest::DeclareAndDrop [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_oo8_dirty [GOOD] Test command err: 2025-03-31T16:03:06.613642Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:03:06.615874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:03:06.615946Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:06.628494Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:03:06.629017Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-03-31T16:03:06.629321Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:06.649383Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:03:06.706433Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:06.706798Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:03:06.708705Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-31T16:03:06.708792Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-31T16:03:06.708846Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-31T16:03:06.709229Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:03:06.709846Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:03:06.709937Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:197:2154] in generation 2 2025-03-31T16:03:06.804726Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:03:06.861500Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-31T16:03:06.861703Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:03:06.861831Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-31T16:03:06.861873Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-31T16:03:06.861917Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-31T16:03:06.861965Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:03:06.862198Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:03:06.862251Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:03:06.862550Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-31T16:03:06.862662Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-31T16:03:06.862716Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:03:06.862756Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:03:06.862794Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-31T16:03:06.862829Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T16:03:06.862867Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T16:03:06.862926Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-31T16:03:06.862965Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:03:06.863175Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:06.863243Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:06.863302Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-31T16:03:06.866125Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\001J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-31T16:03:06.866182Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:03:06.866278Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T16:03:06.866421Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-31T16:03:06.866467Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-31T16:03:06.866530Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-31T16:03:06.866573Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:03:06.866618Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-31T16:03:06.866668Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-31T16:03:06.866709Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:03:06.867045Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-31T16:03:06.867156Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-31T16:03:06.867220Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-31T16:03:06.867261Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:03:06.867306Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-31T16:03:06.867349Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-31T16:03:06.867396Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-31T16:03:06.867432Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-31T16:03:06.867459Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-31T16:03:06.880403Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-31T16:03:06.880481Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:03:06.880516Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:03:06.880571Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-31T16:03:06.880683Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-31T16:03:06.881213Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:06.881272Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:06.881335Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-31T16:03:06.881471Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-31T16:03:06.881505Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:03:06.881645Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-31T16:03:06.881688Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:03:06.881728Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-31T16:03:06.881771Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-31T16:03:06.894014Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-31T16:03:06.894099Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:03:06.894331Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:03:06.894376Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:03:06.894442Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:03:06.894484Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:03:06.894520Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T16:03:06.894564Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-31T16:03:06.894601Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-31T16:03:06.894650Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:03:06.894687Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-31T16:03:06.894728Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-31T16:03:06.894769Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-31T16:03:06.894994Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-31T16:03:06.895042Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:03:06.898779Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-31T16:03:06.898847Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-31T16:03:06.898889Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-31T16:03:06.899023Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:03:06.899060Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-31T16:03:06.899128Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-31T16:03:06.899195Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-31T16:03:06.899274Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-31T16:03:06.899316Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-31T16:03:06.899353Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-31T16:03:06.899405Z node 1 :TX_DATA ... 2025-03-31T16:03:14.845848Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:03:14.845888Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 113 2025-03-31T16:03:14.845978Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 116 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 38} 2025-03-31T16:03:14.846002Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:03:14.846024Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 116 2025-03-31T16:03:14.846108Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 119 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 39} 2025-03-31T16:03:14.846132Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:03:14.846157Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 119 2025-03-31T16:03:14.846221Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-03-31T16:03:14.846250Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:03:14.846293Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-03-31T16:03:14.846415Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-03-31T16:03:14.846445Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:03:14.846482Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-03-31T16:03:14.846583Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 122 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 40} 2025-03-31T16:03:14.846618Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:03:14.846642Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 122 2025-03-31T16:03:14.846719Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-03-31T16:03:14.846743Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:03:14.846765Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-03-31T16:03:14.846841Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-03-31T16:03:14.846865Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:03:14.846905Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-03-31T16:03:14.846997Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-03-31T16:03:14.847025Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:03:14.847046Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 2025-03-31T16:03:14.855376Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 125 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 41} 2025-03-31T16:03:14.855469Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:03:14.855518Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 125 2025-03-31T16:03:14.855758Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2025-03-31T16:03:14.855790Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:03:14.855816Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2025-03-31T16:03:14.855902Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2025-03-31T16:03:14.855928Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:03:14.855969Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2025-03-31T16:03:14.856068Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-03-31T16:03:14.856109Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:03:14.856138Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-03-31T16:03:14.856224Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-03-31T16:03:14.856271Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:03:14.856295Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-03-31T16:03:14.856416Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:03:14.856468Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:149] at 9437184 on unit CompleteOperation 2025-03-31T16:03:14.856537Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 3 ms 2025-03-31T16:03:14.856615Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2025-03-31T16:03:14.856668Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:03:14.856860Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:03:14.856891Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:151] at 9437184 on unit CompleteOperation 2025-03-31T16:03:14.856931Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 151] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 3 ms 2025-03-31T16:03:14.856974Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-03-31T16:03:14.857000Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:03:14.857100Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:03:14.857127Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437184 on unit CompleteOperation 2025-03-31T16:03:14.857197Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 3 ms 2025-03-31T16:03:14.857269Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-03-31T16:03:14.857301Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:03:14.857404Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:03:14.857450Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:154] at 9437184 on unit CompleteOperation 2025-03-31T16:03:14.857488Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 154] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 3 ms 2025-03-31T16:03:14.857536Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-03-31T16:03:14.857560Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:03:14.857819Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2025-03-31T16:03:14.857857Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:03:14.857890Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2025-03-31T16:03:14.857996Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-03-31T16:03:14.858032Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:03:14.858132Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2025-03-31T16:03:14.858229Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-03-31T16:03:14.858257Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:03:14.858310Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2025-03-31T16:03:14.858387Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-03-31T16:03:14.858421Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:03:14.858461Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 |73.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut >> TPartBtreeIndexIteration::FewNodes_History [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_History_BTreeIndex [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites+EvWrite [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_FlatIndex >> TPartBtreeIndexIteration::FewNodes_Sticky >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-EvWrite |73.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps [GOOD] >> TSchemeShardUserAttrsTest::SetAttrs [GOOD] >> TBackupTests::BackupUuidColumn[Zstd] >> TSchemeShardUserAttrsTest::VariousUse >> TSchemeShardServerLess::StorageBilling [GOOD] >> KqpLimits::AffectedShardsLimit [GOOD] >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] >> KqpPragma::Auth [GOOD] >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch >> DataShardVolatile::TwoAppendsMustBeVolatile-UseSink [GOOD] >> TBackupTests::BackupUuidColumn[Zstd] [GOOD] >> DataShardTxOrder::ForceOnlineBetweenOnline [GOOD] >> KqpPragma::MatchRecognizeWithTimeOrderRecoverer >> DataShardVolatile::VolatileCommitOnBlobStorageFailure+UseSink >> KqpQuery::QueryFromSqs [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::SetAttrs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:03:16.125609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:03:16.125714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:16.125756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:03:16.125796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:03:16.125851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:03:16.125880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:03:16.125966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:16.126060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:03:16.126573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:16.279823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:03:16.279904Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:16.291118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:16.291240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:03:16.291420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:03:16.302407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:03:16.302689Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:03:16.303465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:16.303736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:16.307584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:16.309064Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:16.309161Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:16.309385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:03:16.309442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:16.309491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:03:16.309659Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:03:16.319029Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:03:16.505257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:16.505492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:16.505683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:03:16.505938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:03:16.512882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:16.536089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:16.536261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:03:16.536474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:16.536556Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:03:16.536599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:03:16.536636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:03:16.539235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:16.539305Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:16.539367Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:03:16.541655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:16.541716Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:16.541773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:16.541847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:03:16.546001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:16.562899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:03:16.563194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:03:16.564416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:16.564603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:16.564678Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:16.565002Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:03:16.565081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:16.565271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:03:16.565356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:03:16.580518Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:16.580606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:16.580845Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:16.580903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:03:16.581165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:16.581223Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:03:16.581362Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:16.581401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:16.581443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:16.581477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:16.581520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:03:16.581566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:16.581609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:03:16.581667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:03:16.581759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:16.581801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:03:16.581836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:03:16.584411Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:16.584593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:16.584644Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ationSubscriber for txId 102: satisfy waiter [1:318:2309] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2025-03-31T16:03:16.793733Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:03:16.793955Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 271us result status StatusSuccess 2025-03-31T16:03:16.794372Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrA1" Value: "ValA1" } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-03-31T16:03:16.797566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "" OperationType: ESchemeOpAlterUserAttributes AlterUserAttributes { PathName: "MyRoot" UserAttributes { Key: "AttrRoot" Value: "ValRoot" } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:16.798006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: /MyRoot, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:03:16.798117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:03:16.798257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:03:16.798304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:03:16.801375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:16.801770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: MyRoot 2025-03-31T16:03:16.802013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:03:16.802058Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:03:16.802117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2025-03-31T16:03:16.802294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:16.805028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-03-31T16:03:16.805205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2025-03-31T16:03:16.805627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:16.805780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:16.805838Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 103:0, stepId:5000004, at schemeshard: 72057594046678944 2025-03-31T16:03:16.806069Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-31T16:03:16.806135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T16:03:16.806183Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-31T16:03:16.806223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T16:03:16.806285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:16.806343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-03-31T16:03:16.806405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:16.806440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T16:03:16.806482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-03-31T16:03:16.806516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-03-31T16:03:16.806580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-31T16:03:16.806718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2025-03-31T16:03:16.806822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2025-03-31T16:03:16.810010Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:16.810084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:16.810327Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:16.810396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 1 FAKE_COORDINATOR: Erasing txId 103 2025-03-31T16:03:16.811251Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:03:16.811381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:03:16.811440Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-03-31T16:03:16.811495Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-03-31T16:03:16.811536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:16.811641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-03-31T16:03:16.814271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-03-31T16:03:16.814690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-03-31T16:03:16.814752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-03-31T16:03:16.819962Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-31T16:03:16.820118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-31T16:03:16.820166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:337:2328] TestWaitNotification: OK eventTxId 103 2025-03-31T16:03:16.826746Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:03:16.827164Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 6.3ms result status StatusSuccess 2025-03-31T16:03:16.827718Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } UserAttributes { Key: "AttrRoot" Value: "ValRoot" } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |73.3%| [TA] {RESULT} $(B)/ydb/core/persqueue/ut/slow/test-results/unittest/{meta.json ... results_accumulator.log} |73.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDrop [GOOD] Test command err: 2025-03-31T16:03:11.355888Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004860626599506:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:11.355961Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001b6c/r3tmp/tmpVpjgVP/pdisk_1.dat 2025-03-31T16:03:12.042763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:12.042886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:12.056318Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:03:12.116654Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:10880 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T16:03:12.506179Z node 1 :TX_PROXY DEBUG: actor# [1:7488004860626599758:2103] Handle TEvNavigate describe path dc-1 2025-03-31T16:03:12.506271Z node 1 :TX_PROXY DEBUG: Actor# [1:7488004864921567351:2265] HANDLE EvNavigateScheme dc-1 2025-03-31T16:03:12.506452Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488004860626599837:2137], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:03:12.506579Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488004864921567334:2260][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7488004860626599837:2137], cookie# 1 2025-03-31T16:03:12.509080Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488004864921567338:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488004864921567335:2260], cookie# 1 2025-03-31T16:03:12.509161Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488004864921567339:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488004864921567336:2260], cookie# 1 2025-03-31T16:03:12.509199Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488004864921567340:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488004864921567337:2260], cookie# 1 2025-03-31T16:03:12.509237Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488004860626599454:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488004864921567338:2260], cookie# 1 2025-03-31T16:03:12.509261Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488004860626599457:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488004864921567339:2260], cookie# 1 2025-03-31T16:03:12.509276Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488004860626599460:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488004864921567340:2260], cookie# 1 2025-03-31T16:03:12.509324Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488004864921567338:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488004860626599454:2049], cookie# 1 2025-03-31T16:03:12.509341Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488004864921567339:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488004860626599457:2052], cookie# 1 2025-03-31T16:03:12.509354Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488004864921567340:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488004860626599460:2055], cookie# 1 2025-03-31T16:03:12.509392Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488004864921567334:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488004864921567335:2260], cookie# 1 2025-03-31T16:03:12.509419Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488004864921567334:2260][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-31T16:03:12.509446Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488004864921567334:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488004864921567336:2260], cookie# 1 2025-03-31T16:03:12.509467Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488004864921567334:2260][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-31T16:03:12.509495Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488004864921567334:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488004864921567337:2260], cookie# 1 2025-03-31T16:03:12.509507Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488004864921567334:2260][/dc-1] Unexpected sync response: sender# [1:7488004864921567337:2260], cookie# 1 2025-03-31T16:03:12.509576Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7488004860626599837:2137], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-03-31T16:03:12.520313Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7488004860626599837:2137], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7488004864921567334:2260] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-31T16:03:12.520454Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488004860626599837:2137], cacheItem# { Subscriber: { Subscriber: [1:7488004864921567334:2260] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-03-31T16:03:12.524191Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488004864921567353:2267], recipient# [1:7488004864921567351:2265], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T16:03:12.524289Z node 1 :TX_PROXY DEBUG: Actor# [1:7488004864921567351:2265] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T16:03:12.562570Z node 1 :TX_PROXY DEBUG: Actor# [1:7488004864921567351:2265] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-03-31T16:03:12.566118Z node 1 :TX_PROXY DEBUG: Actor# [1:7488004864921567351:2265] Handle TEvDescribeSchemeResult Forward to# [1:7488004864921567350:2264] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:03:12.632158Z node 1 :TX_PROXY DEBUG: actor# [1:7488004860626599758:2103] Handle TEvProposeTransaction 2025-03-31T16:03:12.632205Z node 1 :TX_PROXY DEBUG: actor# [1:7488004860626599758:2103] TxId# 281474976710657 ProcessProposeTransaction 2025-03-31T16:03:12.632316Z node 1 :TX_PROXY DEBUG: actor# [1:7488004860626599758:2103] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7488004864921567358:2271] 2025-03-31T16:03:12.738221Z node 1 :TX_PROXY DEBUG: Actor# [1:7488004864921567358:2271] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-03-31T16:03:12.738306Z node 1 :TX_PROXY DEBUG: Actor# [1:7488004864921567358:2271] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:03:12.738406Z node 1 :TX_PROXY DEBUG: Actor# [1:7488004864921567358:2271] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:03:12.738532Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488004860626599837:2137], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: true Statu ... 6:03:12.834968Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 18446744073709551615 2025-03-31T16:03:12.834991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-03-31T16:03:12.835140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 1 2025-03-31T16:03:12.835178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:7488004864921567412:2298] 2025-03-31T16:03:12.835525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710659 2025-03-31T16:03:12.835663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-03-31T16:03:12.835704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-03-31T16:03:12.835747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-03-31T16:03:12.835896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 TClient::Ls request: /dc-1 2025-03-31T16:03:12.835913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-03-31T16:03:12.835970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-31T16:03:12.836406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710659 2025-03-31T16:03:12.836484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-03-31T16:03:12.836540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-03-31T16:03:12.837401Z node 1 :TX_PROXY DEBUG: actor# [1:7488004860626599758:2103] Handle TEvNavigate describe path /dc-1 2025-03-31T16:03:12.837494Z node 1 :TX_PROXY DEBUG: Actor# [1:7488004864921567428:2325] HANDLE EvNavigateScheme /dc-1 2025-03-31T16:03:12.837657Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488004860626599837:2137], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:03:12.837772Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488004864921567334:2260][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7488004860626599837:2137], cookie# 4 2025-03-31T16:03:12.837839Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488004864921567338:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488004864921567335:2260], cookie# 4 2025-03-31T16:03:12.837870Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488004864921567339:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488004864921567336:2260], cookie# 4 2025-03-31T16:03:12.837885Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488004864921567340:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488004864921567337:2260], cookie# 4 2025-03-31T16:03:12.837912Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488004860626599454:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488004864921567338:2260], cookie# 4 2025-03-31T16:03:12.837962Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488004860626599457:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488004864921567339:2260], cookie# 4 2025-03-31T16:03:12.838002Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488004860626599460:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488004864921567340:2260], cookie# 4 2025-03-31T16:03:12.838051Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488004864921567338:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 7 Partial: 0 }: sender# [1:7488004860626599454:2049], cookie# 4 2025-03-31T16:03:12.838080Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488004864921567339:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 7 Partial: 0 }: sender# [1:7488004860626599457:2052], cookie# 4 2025-03-31T16:03:12.838129Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488004864921567340:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 7 Partial: 0 }: sender# [1:7488004860626599460:2055], cookie# 4 2025-03-31T16:03:12.838163Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488004864921567334:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 7 Partial: 0 }: sender# [1:7488004864921567335:2260], cookie# 4 2025-03-31T16:03:12.838189Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488004864921567334:2260][/dc-1] Sync is in progress: cookie# 4, size# 3, half# 1, successes# 1, faulires# 0 2025-03-31T16:03:12.838235Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488004864921567334:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 7 Partial: 0 }: sender# [1:7488004864921567336:2260], cookie# 4 2025-03-31T16:03:12.838260Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488004864921567334:2260][/dc-1] Sync is done: cookie# 4, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-31T16:03:12.838280Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488004864921567334:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 7 Partial: 0 }: sender# [1:7488004864921567337:2260], cookie# 4 2025-03-31T16:03:12.838297Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488004864921567334:2260][/dc-1] Unexpected sync response: sender# [1:7488004864921567337:2260], cookie# 4 2025-03-31T16:03:12.838344Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7488004860626599837:2137], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-03-31T16:03:12.838439Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7488004860626599837:2137], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7488004864921567334:2260] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743436992801 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-31T16:03:12.838538Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488004860626599837:2137], cacheItem# { Subscriber: { Subscriber: [1:7488004864921567334:2260] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743436992801 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2025-03-31T16:03:12.838722Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488004864921567429:2326], recipient# [1:7488004864921567428:2325], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T16:03:12.838779Z node 1 :TX_PROXY DEBUG: Actor# [1:7488004864921567428:2325] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743436992801 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version... (TRUNCATED) 2025-03-31T16:03:12.838866Z node 1 :TX_PROXY DEBUG: Actor# [1:7488004864921567428:2325] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2025-03-31T16:03:12.839515Z node 1 :TX_PROXY DEBUG: Actor# [1:7488004864921567428:2325] Handle TEvDescribeSchemeResult Forward to# [1:7488004864921567427:2324] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743436992801 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::StorageBilling [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-31T16:02:27.583254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:02:27.583375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:02:27.583447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:02:27.583516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:02:27.583572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:02:27.583621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:02:27.583687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:02:27.583804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:02:27.584182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:02:27.695921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:02:27.696033Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:27.707227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:02:27.707757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:02:27.707971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:02:27.725306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:02:27.725711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:02:27.726526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:27.726985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:02:27.730643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:02:27.732473Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:02:27.732557Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:02:27.732716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:02:27.732764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:02:27.732804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:02:27.733039Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:02:27.750486Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-31T16:02:27.899226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:02:27.899530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:27.899814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:02:27.900120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:02:27.900190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:27.909321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:27.909501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:02:27.909768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:27.909855Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:02:27.909900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:02:27.909935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:02:27.914533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:27.914610Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:02:27.914666Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:02:27.920331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:27.920452Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:27.920517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:02:27.920600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:02:27.924772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:02:27.927446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:02:27.927689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:02:27.928910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:02:27.929081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:02:27.929145Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:02:27.929469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:02:27.929523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:02:27.929700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:02:27.929787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:02:27.932812Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:02:27.932885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:02:27.933088Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:02:27.933131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:02:27.933673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:02:27.933731Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:02:27.933830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:02:27.933888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:02:27.933932Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:02:27.934110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:02:27.934156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:02:27.934200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:02:27.934238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:02:27.934285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:02:27.934372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:02:27.934413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:02:27.934445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:02:27.936704Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:02:27.936848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:02:27.936885Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... :0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-03-31T16:03:16.321787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 107, done: 0, blocked: 1 2025-03-31T16:03:16.321869Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 107 Name: RenamePathBarrier }, at tablet# 72075186233409549 2025-03-31T16:03:16.322014Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 137 -> 129 2025-03-31T16:03:16.322182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409549, LocalPathId: 1] was 4 2025-03-31T16:03:16.322278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 3 2025-03-31T16:03:16.325702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72075186233409549 2025-03-31T16:03:16.327201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72075186233409549 2025-03-31T16:03:16.327547Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409549 2025-03-31T16:03:16.327628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409549, txId: 107, path id: [OwnerId: 72075186233409549, LocalPathId: 1] 2025-03-31T16:03:16.327820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409549, txId: 107, path id: [OwnerId: 72075186233409549, LocalPathId: 2] 2025-03-31T16:03:16.328033Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409549 2025-03-31T16:03:16.328105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:661:2573], at schemeshard: 72075186233409549, txId: 107, path id: 1 2025-03-31T16:03:16.328155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:661:2573], at schemeshard: 72075186233409549, txId: 107, path id: 2 2025-03-31T16:03:16.328691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72075186233409549 2025-03-31T16:03:16.328771Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 107:0 ProgressState at tablet: 72075186233409549 2025-03-31T16:03:16.328881Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72075186233409549 2025-03-31T16:03:16.328923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 107:0, datashard: 72075186233409552, at schemeshard: 72075186233409549 2025-03-31T16:03:16.328983Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 129 -> 240 2025-03-31T16:03:16.330002Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409549, cookie: 107 2025-03-31T16:03:16.330111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409549, cookie: 107 2025-03-31T16:03:16.330157Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409549, txId: 107 2025-03-31T16:03:16.330203Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409549, txId: 107, pathId: [OwnerId: 72075186233409549, LocalPathId: 1], version: 9 2025-03-31T16:03:16.330256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409549, LocalPathId: 1] was 5 2025-03-31T16:03:16.331368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409549, cookie: 107 2025-03-31T16:03:16.331487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409549, cookie: 107 2025-03-31T16:03:16.331525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409549, txId: 107 2025-03-31T16:03:16.331560Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409549, txId: 107, pathId: [OwnerId: 72075186233409549, LocalPathId: 2], version: 18446744073709551615 2025-03-31T16:03:16.331596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 4 2025-03-31T16:03:16.331679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 0/1, is published: true 2025-03-31T16:03:16.335551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72075186233409549 2025-03-31T16:03:16.335627Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 107:0 ProgressState, at schemeshard: 72075186233409549 2025-03-31T16:03:16.336189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 3 2025-03-31T16:03:16.336368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2025-03-31T16:03:16.336412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-03-31T16:03:16.336457Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2025-03-31T16:03:16.336495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-03-31T16:03:16.336541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2025-03-31T16:03:16.336620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:802:2683] message: TxId: 107 2025-03-31T16:03:16.336671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-03-31T16:03:16.336718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2025-03-31T16:03:16.336753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2025-03-31T16:03:16.336910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 2 2025-03-31T16:03:16.339631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 107 2025-03-31T16:03:16.340295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 107 2025-03-31T16:03:16.341402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-03-31T16:03:16.341463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:2212:4058] TestWaitNotification: OK eventTxId 107 2025-03-31T16:03:16.361571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72075186233409549, message: Source { RawX1: 776 RawX2: 4294969961 } TabletId: 72075186233409552 State: 4 2025-03-31T16:03:16.361692Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409552, state: Offline, at schemeshard: 72075186233409549 2025-03-31T16:03:16.364150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72075186233409549:4 hive 72057594037968897 at ss 72075186233409549 2025-03-31T16:03:16.365075Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72075186233409549 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409552 2025-03-31T16:03:16.368368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72075186233409549 ShardLocalIdx: 4, at schemeshard: 72075186233409549 2025-03-31T16:03:16.368829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 1 2025-03-31T16:03:16.369932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72075186233409549 2025-03-31T16:03:16.370000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72075186233409549, LocalPathId: 2], at schemeshard: 72075186233409549 2025-03-31T16:03:16.370080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72075186233409549, LocalPathId: 1] was 4 2025-03-31T16:03:16.373363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72075186233409549:4 2025-03-31T16:03:16.373452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72075186233409549:4 tabletId 72075186233409552 2025-03-31T16:03:16.374136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72075186233409549 2025-03-31T16:03:16.522410Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409549 2025-03-31T16:03:16.522529Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409549 2025-03-31T16:03:16.522606Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409549 2025-03-31T16:03:16.522681Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409546 2025-03-31T16:03:16.522778Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409546 2025-03-31T16:03:16.522835Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2025-03-31T16:03:16.522884Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2025-03-31T16:03:16.522918Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-03-31T16:03:16.522964Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-03-31T16:03:16.599697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:03:16.600099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling: make a bill, record: '{"usage":{"start":1600452180,"quantity":59,"finish":1600452239,"type":"delta","unit":"byte*second"},"tags":{"ydb_size":0},"id":"72057594046678944-3-1600452180-1600452239-0","cloud_id":"CLOUD_ID_VAL","source_wt":1600452240,"source_id":"sless-docapi-ydb-storage","resource_id":"DATABASE_ID_VAL","schema":"ydb.serverless.v1","folder_id":"FOLDER_ID_VAL","version":"1.0.0"} ', schemeshardId: 72075186233409549, domainId: [OwnerId: 72057594046678944, LocalPathId: 3], now: 2020-09-18T18:04:00.027000Z, LastBillTime: 2020-09-18T18:02:00.000000Z, lastBilled: 2020-09-18T18:02:00.000000Z--2020-09-18T18:02:59.000000Z, toBill: 2020-09-18T18:03:00.000000Z--2020-09-18T18:03:59.000000Z, next retry at: 2020-09-18T18:05:00.000000Z 2025-03-31T16:03:16.605851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete grabMeteringMessage has happened 2025-03-31T16:03:16.606148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TFakeMetering got TEvMetering::TEvWriteMeteringJson ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:03:15.977812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:03:15.977932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:15.977975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:03:15.978016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:03:15.978064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:03:15.978100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:03:15.978219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:15.978363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:03:15.978734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:16.115442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:03:16.115515Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:16.128767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:16.128870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:03:16.129041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:03:16.152045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:03:16.152384Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:03:16.153097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:16.153384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:16.156263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:16.157502Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:16.157567Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:16.157746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:03:16.157802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:16.157842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:03:16.158003Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:03:16.184112Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:03:16.387914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:16.388225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:16.388480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:03:16.388740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:03:16.388801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:16.392090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:16.392287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:03:16.392528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:16.392595Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:03:16.392637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:03:16.392680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:03:16.395729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:16.395812Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:16.395866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:03:16.398648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:16.398736Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:16.398808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:16.398897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:03:16.409661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:16.412407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:03:16.412676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:03:16.413913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:16.414077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:16.414147Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:16.414453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:03:16.414507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:16.414711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:03:16.414820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:03:16.418009Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:16.418093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:16.418294Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:16.418354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:03:16.418592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:16.418638Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:03:16.418747Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:16.418778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:16.418848Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:16.418887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:16.418925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:03:16.418965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:16.419004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:03:16.419053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:03:16.419152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:16.419195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:03:16.419241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:03:16.421644Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:16.421793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:16.421835Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 5 msg type: 269090816 2025-03-31T16:03:16.658799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 105, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 105 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 105 at step: 5000005 2025-03-31T16:03:16.662854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:16.663044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:16.663142Z node 1 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 105:0, step: 5000005, at schemeshard: 72057594046678944 2025-03-31T16:03:16.663286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 105:0, at schemeshard: 72057594046678944 2025-03-31T16:03:16.663360Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-03-31T16:03:16.663411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-31T16:03:16.663458Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-03-31T16:03:16.664657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-31T16:03:16.664758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-31T16:03:16.664858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-03-31T16:03:16.664903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-03-31T16:03:16.664998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-31T16:03:16.665075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-03-31T16:03:16.665122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-03-31T16:03:16.665214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-03-31T16:03:16.665268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 2, subscribers: 0 2025-03-31T16:03:16.665311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-03-31T16:03:16.665346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-03-31T16:03:16.672780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-03-31T16:03:16.673992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-03-31T16:03:16.676391Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:16.676471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:16.678914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-03-31T16:03:16.679191Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:16.679236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 105, path id: 1 2025-03-31T16:03:16.679472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 105, path id: 4 FAKE_COORDINATOR: Erasing txId 105 2025-03-31T16:03:16.680271Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 105 2025-03-31T16:03:16.680377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 105 2025-03-31T16:03:16.680425Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2025-03-31T16:03:16.680501Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-03-31T16:03:16.680554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-03-31T16:03:16.681169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-03-31T16:03:16.685406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-03-31T16:03:16.685481Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-03-31T16:03:16.685519Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-03-31T16:03:16.685578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-03-31T16:03:16.685727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-03-31T16:03:16.686188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:03:16.686241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-03-31T16:03:16.686343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-31T16:03:16.689545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-03-31T16:03:16.696261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-03-31T16:03:16.696432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-03-31T16:03:16.696944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-03-31T16:03:16.697000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-03-31T16:03:16.697602Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-03-31T16:03:16.697771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-03-31T16:03:16.697819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:399:2390] TestWaitNotification: OK eventTxId 105 2025-03-31T16:03:16.698681Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirC" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:03:16.698942Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirC" took 272us result status StatusPathDoesNotExist 2025-03-31T16:03:16.699172Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirC\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/DirC" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-31T16:03:16.699883Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:03:16.700131Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 249us result status StatusSuccess 2025-03-31T16:03:16.700616Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-72 [GOOD] Test command err: Starting YDB, grpc: 14106, msgbus: 22633 2025-03-31T15:58:38.735472Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003686468410233:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:58:38.735523Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0015df/r3tmp/tmpean0Co/pdisk_1.dat 2025-03-31T15:58:39.602225Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:58:39.675569Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:58:39.675686Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:58:39.694172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14106, node 1 2025-03-31T15:58:40.059767Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T15:58:40.059791Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T15:58:40.059800Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T15:58:40.059904Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22633 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T15:58:40.547560Z node 1 :TX_PROXY DEBUG: actor# [1:7488003686468410480:2125] Handle TEvNavigate describe path dc-1 2025-03-31T15:58:40.547615Z node 1 :TX_PROXY DEBUG: Actor# [1:7488003695058345583:2447] HANDLE EvNavigateScheme dc-1 2025-03-31T15:58:40.551582Z node 1 :TX_PROXY DEBUG: Actor# [1:7488003695058345583:2447] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T15:58:40.641016Z node 1 :TX_PROXY DEBUG: Actor# [1:7488003695058345583:2447] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-03-31T15:58:40.701968Z node 1 :TX_PROXY DEBUG: Actor# [1:7488003695058345583:2447] Handle TEvDescribeSchemeResult Forward to# [1:7488003695058345582:2446] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T15:58:40.746984Z node 1 :TX_PROXY DEBUG: actor# [1:7488003686468410480:2125] Handle TEvProposeTransaction 2025-03-31T15:58:40.747023Z node 1 :TX_PROXY DEBUG: actor# [1:7488003686468410480:2125] TxId# 281474976715657 ProcessProposeTransaction 2025-03-31T15:58:40.747153Z node 1 :TX_PROXY DEBUG: actor# [1:7488003686468410480:2125] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7488003695058345591:2454] 2025-03-31T15:58:40.931005Z node 1 :TX_PROXY DEBUG: Actor# [1:7488003695058345591:2454] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-31T15:58:40.943172Z node 1 :TX_PROXY DEBUG: Actor# [1:7488003695058345591:2454] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T15:58:40.943216Z node 1 :TX_PROXY DEBUG: Actor# [1:7488003695058345591:2454] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T15:58:40.943298Z node 1 :TX_PROXY DEBUG: Actor# [1:7488003695058345591:2454] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T15:58:40.943644Z node 1 :TX_PROXY DEBUG: Actor# [1:7488003695058345591:2454] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T15:58:40.943795Z node 1 :TX_PROXY DEBUG: Actor# [1:7488003695058345591:2454] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-31T15:58:40.943842Z node 1 :TX_PROXY DEBUG: Actor# [1:7488003695058345591:2454] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-31T15:58:40.943956Z node 1 :TX_PROXY DEBUG: Actor# [1:7488003695058345591:2454] txid# 281474976715657 HANDLE EvClientConnected 2025-03-31T15:58:40.947646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T15:58:40.964722Z node 1 :TX_PROXY DEBUG: Actor# [1:7488003695058345591:2454] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-31T15:58:40.964778Z node 1 :TX_PROXY DEBUG: Actor# [1:7488003695058345591:2454] txid# 281474976715657 SEND to# [1:7488003695058345590:2453] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} waiting... 2025-03-31T15:58:41.017034Z node 1 :TX_PROXY DEBUG: actor# [1:7488003686468410480:2125] Handle TEvProposeTransaction 2025-03-31T15:58:41.017063Z node 1 :TX_PROXY DEBUG: actor# [1:7488003686468410480:2125] TxId# 281474976715658 ProcessProposeTransaction 2025-03-31T15:58:41.017096Z node 1 :TX_PROXY DEBUG: actor# [1:7488003686468410480:2125] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7488003699353312933:2492] 2025-03-31T15:58:41.019360Z node 1 :TX_PROXY DEBUG: Actor# [1:7488003699353312933:2492] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-31T15:58:41.019417Z node 1 :TX_PROXY DEBUG: Actor# [1:7488003699353312933:2492] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T15:58:41.019456Z node 1 :TX_PROXY DEBUG: Actor# [1:7488003699353312933:2492] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T15:58:41.019518Z node 1 :TX_PROXY DEBUG: Actor# [1:7488003699353312933:2492] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-03-31T15:58:41.019781Z node 1 :TX_PROXY DEBUG: Actor# [1:7488003699353312933:2492] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T15:58:41.019854Z node 1 :TX_PROXY DEBUG: Actor# [1:7488003699353312933:2492] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T15:58:41.019889Z node 1 :TX_PROXY DEBUG: Actor# [1:7488003699353312933:2492] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-03-31T15:58:41.020005Z node 1 :TX_PROXY DEBUG: Actor# [1:7488003699353312933:2492] txid# 281474976715658 HANDLE EvClientConnected 2025-03-31T15:58:41.020421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T15:58:41.028242Z node 1 :TX_PROXY DEBUG: Actor# [1:7488003699353312933:2492] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715658} 2025-03-31T15:58:41.028287Z node 1 :TX_PROXY DEBUG: Actor# [1:7488003699353312933:2492] txid# 281474976715658 SEND to# [1:7488003699353312932:2491] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 48} 2025-03-31T15:58:43.288887Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003707943247624:2340], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:43.288994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488003707943247616:2337], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:43.289175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:58:43.289604Z node 1 :TX_PROXY DEBUG: actor# [1:7488003686468410480:2125] Handle TEvProposeTransaction 2025-03-31T15:58:43.289650Z node 1 :TX_PROXY DEBUG: actor# [1:7488003686468410480:2125] TxId# 281474976715659 ProcessProposeTransaction 2025-03-31 ... wd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:43392" 2025-03-31T16:02:35.466440Z node 59 :TX_PROXY DEBUG: Actor# [59:7488004705855851278:2601] txid# 281474976715661 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T16:02:35.466462Z node 59 :TX_PROXY DEBUG: Actor# [59:7488004705855851278:2601] txid# 281474976715661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T16:02:35.466513Z node 59 :TX_PROXY DEBUG: Actor# [59:7488004705855851278:2601] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:02:35.466825Z node 59 :TX_PROXY DEBUG: Actor# [59:7488004705855851278:2601] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:02:35.466949Z node 59 :TX_PROXY DEBUG: Actor# [59:7488004705855851278:2601] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:02:35.467022Z node 59 :TX_PROXY DEBUG: Actor# [59:7488004705855851278:2601] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-03-31T16:02:35.467194Z node 59 :TX_PROXY DEBUG: Actor# [59:7488004705855851278:2601] txid# 281474976715661 HANDLE EvClientConnected 2025-03-31T16:02:35.478602Z node 59 :TX_PROXY DEBUG: Actor# [59:7488004705855851278:2601] txid# 281474976715661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715661} 2025-03-31T16:02:35.478662Z node 59 :TX_PROXY DEBUG: Actor# [59:7488004705855851278:2601] txid# 281474976715661 SEND to# [59:7488004705855851277:2334] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-03-31T16:02:35.624531Z node 59 :TX_PROXY DEBUG: actor# [59:7488004684381013899:2112] Handle TEvProposeTransaction 2025-03-31T16:02:35.624565Z node 59 :TX_PROXY DEBUG: actor# [59:7488004684381013899:2112] TxId# 281474976715662 ProcessProposeTransaction 2025-03-31T16:02:35.624614Z node 59 :TX_PROXY DEBUG: actor# [59:7488004684381013899:2112] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [59:7488004705855851298:2615] 2025-03-31T16:02:35.627440Z node 59 :TX_PROXY DEBUG: Actor# [59:7488004705855851298:2615] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:43404" 2025-03-31T16:02:35.627522Z node 59 :TX_PROXY DEBUG: Actor# [59:7488004705855851298:2615] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T16:02:35.627546Z node 59 :TX_PROXY DEBUG: Actor# [59:7488004705855851298:2615] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T16:02:35.627602Z node 59 :TX_PROXY DEBUG: Actor# [59:7488004705855851298:2615] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:02:35.627925Z node 59 :TX_PROXY DEBUG: Actor# [59:7488004705855851298:2615] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:02:35.628037Z node 59 :TX_PROXY DEBUG: Actor# [59:7488004705855851298:2615] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:02:35.628100Z node 59 :TX_PROXY DEBUG: Actor# [59:7488004705855851298:2615] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-03-31T16:02:35.628294Z node 59 :TX_PROXY DEBUG: Actor# [59:7488004705855851298:2615] txid# 281474976715662 HANDLE EvClientConnected 2025-03-31T16:02:35.628829Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:02:35.632893Z node 59 :TX_PROXY DEBUG: Actor# [59:7488004705855851298:2615] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-03-31T16:02:35.632973Z node 59 :TX_PROXY DEBUG: Actor# [59:7488004705855851298:2615] txid# 281474976715662 SEND to# [59:7488004705855851297:2348] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-03-31T16:02:35.702454Z node 59 :TX_PROXY DEBUG: actor# [59:7488004684381013899:2112] Handle TEvProposeTransaction 2025-03-31T16:02:35.702516Z node 59 :TX_PROXY DEBUG: actor# [59:7488004684381013899:2112] TxId# 281474976715663 ProcessProposeTransaction 2025-03-31T16:02:35.702584Z node 59 :TX_PROXY DEBUG: actor# [59:7488004684381013899:2112] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [59:7488004705855851331:2634] 2025-03-31T16:02:35.705620Z node 59 :TX_PROXY DEBUG: Actor# [59:7488004705855851331:2634] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:43412" 2025-03-31T16:02:35.705720Z node 59 :TX_PROXY DEBUG: Actor# [59:7488004705855851331:2634] txid# 281474976715663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T16:02:35.705746Z node 59 :TX_PROXY DEBUG: Actor# [59:7488004705855851331:2634] txid# 281474976715663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T16:02:35.705804Z node 59 :TX_PROXY DEBUG: Actor# [59:7488004705855851331:2634] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:02:35.706427Z node 59 :TX_PROXY DEBUG: Actor# [59:7488004705855851331:2634] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:02:35.706559Z node 59 :TX_PROXY DEBUG: Actor# [59:7488004705855851331:2634] HANDLE EvNavigateKeySetResult, txid# 281474976715663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:02:35.706620Z node 59 :TX_PROXY DEBUG: Actor# [59:7488004705855851331:2634] txid# 281474976715663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715663 TabletId# 72057594046644480} 2025-03-31T16:02:35.706788Z node 59 :TX_PROXY DEBUG: Actor# [59:7488004705855851331:2634] txid# 281474976715663 HANDLE EvClientConnected 2025-03-31T16:02:35.716565Z node 59 :TX_PROXY DEBUG: Actor# [59:7488004705855851331:2634] txid# 281474976715663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715663} 2025-03-31T16:02:35.716634Z node 59 :TX_PROXY DEBUG: Actor# [59:7488004705855851331:2634] txid# 281474976715663 SEND to# [59:7488004705855851330:2350] Source {TEvProposeTransactionStatus txid# 281474976715663 Status# 48} 2025-03-31T16:02:35.787577Z node 59 :TX_PROXY DEBUG: actor# [59:7488004684381013899:2112] Handle TEvProposeTransaction 2025-03-31T16:02:35.787625Z node 59 :TX_PROXY DEBUG: actor# [59:7488004684381013899:2112] TxId# 281474976715664 ProcessProposeTransaction 2025-03-31T16:02:35.787707Z node 59 :TX_PROXY DEBUG: actor# [59:7488004684381013899:2112] Cookie# 0 userReqId# "" txid# 281474976715664 SEND to# [59:7488004705855851359:2646] 2025-03-31T16:02:35.791179Z node 59 :TX_PROXY DEBUG: Actor# [59:7488004705855851359:2646] txid# 281474976715664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzQ4MDE1NSwiaWF0IjoxNzQzNDM2OTU1LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.eWmJJU_Tsq7iNc5IMAOP-2bf82FHneDRguKAsQNEs4vOIt6BVl27duhqoClQNQum-lIco-3W_ap1fTgWu5Nmo2HhuJLONhImkxP1uOt3LvkiGcZSp4H2xRlvb_Z0i7_EN3Ds-m00NavQrYjVuLxMJBd4PyA3_lcJKN7AB9Tl2K9bnXbTXAap2C9_27J_0nkXucrBAQdUWDu05X-J59ab-OkIE-mzv9dLzp5WVVTpfC5eB-sdgnwE3akQkehWhc8S6zQwO_TjKHGr_zA30aElfWqdoLgmVHFlw3_OUnepdJwj_j70BYy5sCpSIlo_IJbeQakLzqW8jlhmM7hRBW1ddQ\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzQ4MDE1NSwiaWF0IjoxNzQzNDM2OTU1LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:43442" 2025-03-31T16:02:35.791354Z node 59 :TX_PROXY DEBUG: Actor# [59:7488004705855851359:2646] txid# 281474976715664 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T16:02:35.791385Z node 59 :TX_PROXY DEBUG: Actor# [59:7488004705855851359:2646] txid# 281474976715664 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-03-31T16:02:35.791588Z node 59 :TX_PROXY DEBUG: Actor# [59:7488004705855851359:2646] txid# 281474976715664 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-03-31T16:02:35.791619Z node 59 :TX_PROXY DEBUG: Actor# [59:7488004705855851359:2646] txid# 281474976715664 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-03-31T16:02:35.791691Z node 59 :TX_PROXY DEBUG: Actor# [59:7488004705855851359:2646] txid# 281474976715664 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:02:35.792111Z node 59 :TX_PROXY DEBUG: Actor# [59:7488004705855851359:2646] txid# 281474976715664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:02:35.792165Z node 59 :TX_PROXY ERROR: Actor# [59:7488004705855851359:2646] txid# 281474976715664, Access denied for ordinaryuser, attempt to manage user 2025-03-31T16:02:35.792336Z node 59 :TX_PROXY ERROR: Actor# [59:7488004705855851359:2646] txid# 281474976715664, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-03-31T16:02:35.792393Z node 59 :TX_PROXY DEBUG: Actor# [59:7488004705855851359:2646] txid# 281474976715664 SEND to# [59:7488004705855851358:2362] Source {TEvProposeTransactionStatus Status# 5} 2025-03-31T16:02:35.792843Z node 59 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=59&id=ZWYxN2NmYzEtY2FiMGJlNzYtMmY3YTcyNjYtN2I3MDU2ZmE=, ActorId: [59:7488004705855851349:2362], ActorState: ExecuteState, TraceId: 01jqpf1g3kczzz9dnd7nx0nhte, Create QueryResponse for error on request, msg: 2025-03-31T16:02:35.793158Z node 59 :TX_PROXY DEBUG: actor# [59:7488004684381013899:2112] Handle TEvExecuteKqpTransaction 2025-03-31T16:02:35.793192Z node 59 :TX_PROXY DEBUG: actor# [59:7488004684381013899:2112] TxId# 281474976715665 ProcessProposeKqpTransaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestManyConsumers [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] 2025-03-31T16:02:08.118691Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:02:08.118789Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927938 is [1:153:2174] sender: [1:154:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:179:2057] recipient: [1:14:2061] 2025-03-31T16:02:08.159760Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:02:08.184907Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-03-31T16:02:08.186343Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2025-03-31T16:02:08.189259Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:185:2198] 2025-03-31T16:02:08.192108Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:186:2199] 2025-03-31T16:02:08.194143Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:186:2199] 2025-03-31T16:02:08.201673Z node 1 :PERSQUEUE INFO: new Cookie default|e0757f70-5244f96a-cf729f16-e6cddabd_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:108:2057] recipient: [2:101:2135] 2025-03-31T16:02:08.758002Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:02:08.758080Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927938 is [2:153:2174] sender: [2:154:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:179:2057] recipient: [2:14:2061] 2025-03-31T16:02:08.776092Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:02:08.776969Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-03-31T16:02:08.777599Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:185:2198] 2025-03-31T16:02:08.780357Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:185:2198] 2025-03-31T16:02:08.782617Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:186:2199] 2025-03-31T16:02:08.784956Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:186:2199] 2025-03-31T16:02:08.792091Z node 2 :PERSQUEUE INFO: new Cookie default|65021d98-7b825d1e-8ec26131-f3987b3f_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:108:2057] recipient: [3:101:2135] 2025-03-31T16:02:09.214197Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:02:09.214286Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927938 is [3:153:2174] sender: [3:154:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:179:2057] recipient: [3:14:2061] 2025-03-31T16:02:09.230285Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:02:09.230984Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 3 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } 2025-03-31T16:02:09.231553Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:185:2198] 2025-03-31T16:02:09.233238Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [3:185:2198] 2025-03-31T16:02:09.234376Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:186:2199] 2025-03-31T16:02:09.236015Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [3:186:2199] 2025-03-31T16:02:09.242200Z node 3 :PERSQUEUE INFO: new Cookie default|564923a6-88fb8b3b-c2d79be6-5ab1ba45_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:103:2057] recipient: [4:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:103:2057] recipient: [4:101:2135] Leader for TabletID 72057594037927937 is [4:107:2139] sender: [4:108:2057] recipient: [4:101:2135] 2025-03-31T16:02:09.877876Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:02:09.877990Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:149:2057] recipient: [4:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:149:2057] recipient: [4:147:2170] Leader for TabletID 72057594037927938 is [4:153:2174] sender: [4:154:2057] recipient: [4:147:2170] Leader for TabletID 72057594037927937 is [4:107:2139] sender: [4:179:2057] recipient: [4:14:2061] 2025-03-31T16:02:09.928484Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:02:09.929513Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 4 actor [4:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 10 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 4 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 4 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 4 Important: false } 2025-03-31T16:02:09.930285Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:185:2198] 2025-03-31T16:02:09.933335Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [4:185:2198] 2025-03-31T16:02:09.946140Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [4:186:2199] 2025-03-31T16:02:09.948390Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [4:186:2199] 2025-03-31T16:02:09.977987Z node 4 :PERSQUEUE INFO: new Cookie default|e45e6c67-76d7cd5a-1312be03-b8dd0f61_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:02:09.978626Z node 4 :PERSQUEUE INFO: new Cookie default|562bfbdd-25963403-3e38f6ba-66ee248b_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:103:2057] recipient: [5:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:103:2057] recipient: [5:101:2135] Leader for TabletID 72057594037927937 is [5:107:2139] sender: [5:108:2057] recipient: [5:101:2135] 2025-03-31T16:02:10.608778Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:02:10.608874Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [5:149:2057] recipient: [5:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [5:149:2057] recipient: [5:147:2170] Leader for TabletID 72057594037927938 is [5:153:2174] sender: [5:154:2057] recipient: [5:147:2170] Leader for TabletID 72057594037927 ... 6 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-31T16:03:11.851671Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-31T16:03:11.901747Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [40:998:2994], now have 1 active actors on pipe 2025-03-31T16:03:11.904077Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-31T16:03:11.925742Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-31T16:03:11.969490Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [40:1001:2997], now have 1 active actors on pipe 2025-03-31T16:03:11.971357Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-31T16:03:11.988944Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-31T16:03:12.059905Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [40:1004:3000], now have 1 active actors on pipe 2025-03-31T16:03:12.062179Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-31T16:03:12.088438Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-31T16:03:12.139449Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [40:1007:3003], now have 1 active actors on pipe 2025-03-31T16:03:12.141682Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-31T16:03:12.161926Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-31T16:03:12.211002Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [40:1010:3006], now have 1 active actors on pipe 2025-03-31T16:03:12.213281Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-31T16:03:12.233415Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-31T16:03:12.279317Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [40:1013:3009], now have 1 active actors on pipe 2025-03-31T16:03:12.281559Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-31T16:03:12.298545Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-31T16:03:12.374183Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [40:1016:3012], now have 1 active actors on pipe 2025-03-31T16:03:12.376474Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-31T16:03:12.395719Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-31T16:03:12.443636Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [40:1019:3015], now have 1 active actors on pipe 2025-03-31T16:03:12.445880Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-31T16:03:12.465743Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-31T16:03:12.509440Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [40:1022:3018], now have 1 active actors on pipe 2025-03-31T16:03:12.511705Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-31T16:03:12.530368Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-31T16:03:12.574593Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [40:1025:3021], now have 1 active actors on pipe 2025-03-31T16:03:12.576907Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-31T16:03:12.595907Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-31T16:03:12.649323Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [40:1028:3024], now have 1 active actors on pipe 2025-03-31T16:03:12.651349Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-31T16:03:12.702706Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-31T16:03:12.773289Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [40:1031:3027], now have 1 active actors on pipe 2025-03-31T16:03:12.775399Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-31T16:03:12.794458Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-31T16:03:12.921600Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [40:1034:3030], now have 1 active actors on pipe 2025-03-31T16:03:12.931912Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-31T16:03:12.984221Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-31T16:03:13.057359Z node 40 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [40:1037:3033] connected; active server actors: 1 >> TSchemeShardUserAttrsTest::VariousUse [GOOD] >> TSchemeShardUserAttrsTest::UserConditionsAtAlter |73.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_serverless/test-results/unittest/{meta.json ... results_accumulator.log} >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::AffectedShardsLimit [GOOD] Test command err: Trying to start YDB, gRPC: 1189, MsgBus: 24650 2025-03-31T16:02:38.431538Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004721229440260:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:38.432483Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ac5/r3tmp/tmpc1IlKO/pdisk_1.dat 2025-03-31T16:02:38.940617Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:38.940751Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:38.946002Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:02:38.954738Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1189, node 1 2025-03-31T16:02:39.069991Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:02:39.070028Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:02:39.070040Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:02:39.070181Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24650 TClient is connected to server localhost:24650 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:02:39.872928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:39.923416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:40.082262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:40.283827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:40.367661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:42.274284Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004738409311188:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:42.274425Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:42.682248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:02:42.730774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:02:42.775465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:02:42.849572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:02:42.879825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:02:42.966671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:02:43.025767Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004742704279004:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:43.025861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:43.026071Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004742704279009:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:43.030430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:02:43.045909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004742704279011:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:02:43.146658Z node 1 :TX_PROXY ERROR: Actor# [1:7488004742704279067:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:02:43.435226Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004721229440260:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:43.435285Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"SortBy":"row.Text","Name":"Sort"},{"Scan":"Parallel","ReadRange":["Key [150, 266]"],"E-Size":"No estimate","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"No estimate"}],"Node Type":"Sort-TableRangeScan"}],"Node Type":"Merge","SortColumns":["Text (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data","Key","Text"],"scan_by":["Key [150, 266]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","ReadRange":["Key [150, 266]"],"E-Size":"No estimate","Name":"TableRangeScan","Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.Text","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 26839, MsgBus: 21393 2025-03-31T16:02:45.113531Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488004748125041520:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:45.113589Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ac5/r3tmp/tmpCYj0dy/pdisk_1.dat 2025-03-31T16:02:45.303596Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:45.329525Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:45.329612Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:45.331063Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26839, node 2 2025-03-31T16:02:45.398581Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:02:45.398608Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:02:45.398618Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:02:45.398742Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21393 TClient is connected to server localhost:21393 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: ... ratorId":1}],"E-Rows":"No estimate","Predicate":"Value \u003E 0","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/OlapTable","E-Rows":"No estimate","Table":"OlapTable","ReadColumns":["Key","Value"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":0},"Column":{"Id":3}}},{"Assign":{"Function":{"YqlOperationId":15,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":2},{"Id":3}]},"Column":{"Id":4}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":5}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":4},{"Id":5}]},"Column":{"Id":6}}},{"Filter":{"Predicate":{"Id":6}}},{"Projection":{"Columns":[{"Id":1},{"Id":2}]}}]},"E-Cost":"No estimate"}],"Node Type":"Filter-TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/OlapTable","reads":[{"columns":["Key","Value"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/OlapTable","E-Rows":"No estimate","Table":"OlapTable","ReadColumns":["Key","Value"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":0},"Column":{"Id":3}}},{"Assign":{"Function":{"YqlOperationId":15,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":2},{"Id":3}]},"Column":{"Id":4}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":5}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":4},{"Id":5}]},"Column":{"Id":6}}},{"Filter":{"Predicate":{"Id":6}}},{"Projection":{"Columns":[{"Id":1},{"Id":2}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Value \u003E 0","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 20625, MsgBus: 28712 2025-03-31T16:03:04.836064Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488004831895605965:2197];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:04.836140Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ac5/r3tmp/tmpJqBuG2/pdisk_1.dat 2025-03-31T16:03:04.985707Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:05.014114Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:05.014233Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:05.016481Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20625, node 4 2025-03-31T16:03:05.159871Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:05.159901Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:05.159913Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:05.160075Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28712 TClient is connected to server localhost:28712 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:06.149192Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:06.157611Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:03:06.171241Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:06.353264Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:06.600062Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:06.714738Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:09.839221Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7488004831895605965:2197];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:09.916304Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:11.417893Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488004861960378690:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:11.418029Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:11.511035Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:11.574224Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:11.658067Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:11.711451Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:11.837570Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:11.905540Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:12.002832Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488004866255346519:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:12.003000Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:12.003689Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488004866255346524:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:12.015455Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:12.037964Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-03-31T16:03:12.040293Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488004866255346526:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:03:12.126038Z node 4 :TX_PROXY ERROR: Actor# [4:7488004866255346594:3456] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:03:13.499137Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:14.310489Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:16.647793Z node 4 :KQP_EXECUTER WARN: ActorId: [4:7488004883435218200:2633] TxId: 281474976715674. Ctx: { TraceId: 01jqpf2qgd4pa3hyt5g2wje7bb, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NDIzOTdlYmYtY2M0MDM2ZmUtZGQwZmVlN2ItMTFjYTZkMWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Too many affected shards: datashardTasks=21, limit: 20 2025-03-31T16:03:16.648114Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NDIzOTdlYmYtY2M0MDM2ZmUtZGQwZmVlN2ItMTFjYTZkMWE=, ActorId: [4:7488004879140250552:2633], ActorState: ExecuteState, TraceId: 01jqpf2qgd4pa3hyt5g2wje7bb, Create QueryResponse for error on request, msg:
: Error: Affected too many shards: 0, code: 2029 >> THiveTest::TestCreateAndDeleteTabletWithStoragePoolsReboots [GOOD] >> THiveTest::TestCreateAndDeleteTabletWithStoragePools >> DataShardTxOrder::RandomPoints_ReproducerDelayData1 [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLockOutOfOrder [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ForceOnlineBetweenOnline [GOOD] Test command err: 2025-03-31T16:03:08.284054Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:03:08.285443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:03:08.285518Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:08.296309Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:03:08.296818Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-03-31T16:03:08.297097Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:08.307658Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:03:08.352832Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:08.353161Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:03:08.354753Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-31T16:03:08.354834Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-31T16:03:08.354904Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-31T16:03:08.355296Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:03:08.355919Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:03:08.356016Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:197:2154] in generation 2 2025-03-31T16:03:08.434371Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:03:08.480917Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-31T16:03:08.481110Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:03:08.481227Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-31T16:03:08.481262Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-31T16:03:08.481296Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-31T16:03:08.481348Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:03:08.481576Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:03:08.481621Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:03:08.481899Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-31T16:03:08.482002Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-31T16:03:08.482058Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:03:08.482099Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:03:08.482132Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-31T16:03:08.482163Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T16:03:08.482193Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T16:03:08.482222Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-31T16:03:08.482256Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:03:08.482350Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:08.482398Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:08.482456Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-31T16:03:08.493564Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-31T16:03:08.493628Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:03:08.493724Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T16:03:08.493896Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-31T16:03:08.493935Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-31T16:03:08.493992Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-31T16:03:08.494037Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:03:08.494077Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-31T16:03:08.494124Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-31T16:03:08.494171Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:03:08.494464Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-31T16:03:08.494517Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-31T16:03:08.494551Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-31T16:03:08.494588Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:03:08.494640Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-31T16:03:08.494672Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-31T16:03:08.494707Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-31T16:03:08.494736Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-31T16:03:08.494762Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-31T16:03:08.512153Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-31T16:03:08.512222Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:03:08.512258Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:03:08.512316Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-31T16:03:08.512397Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-31T16:03:08.512951Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:08.513014Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:08.513060Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-31T16:03:08.513196Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-31T16:03:08.513231Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:03:08.513391Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-31T16:03:08.513434Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:03:08.513469Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-31T16:03:08.513520Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-31T16:03:08.542476Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-31T16:03:08.542558Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:03:08.542848Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:03:08.542910Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:03:08.542979Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:03:08.543023Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:03:08.543060Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T16:03:08.551225Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-31T16:03:08.551309Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-31T16:03:08.551364Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:03:08.551406Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-31T16:03:08.551446Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-31T16:03:08.551489Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-31T16:03:08.551702Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-31T16:03:08.551743Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:03:08.551778Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-31T16:03:08.551801Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-31T16:03:08.551821Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-31T16:03:08.551900Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:03:08.551925Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-31T16:03:08.551957Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-31T16:03:08.551983Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-31T16:03:08.552046Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-31T16:03:08.552082Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-31T16:03:08.552113Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-31T16:03:08.552151Z node 1 :TX_DATA ... 1000005:149] at 9437186 on unit CompleteOperation 2025-03-31T16:03:18.040053Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:03:18.040076Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-31T16:03:18.040156Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-31T16:03:18.040486Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2025-03-31T16:03:18.040555Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:03:18.040611Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2025-03-31T16:03:18.040775Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-03-31T16:03:18.040812Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:03:18.040853Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2025-03-31T16:03:18.040933Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [1:234:2227], Recipient [1:457:2399]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2025-03-31T16:03:18.040963Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-03-31T16:03:18.040993Z node 1 :TX_DATASHARD DEBUG: Receive RS at 9437186 source 9437184 dest 9437186 producer 9437184 txId 152 2025-03-31T16:03:18.041061Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 9437186 got read set: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2025-03-31T16:03:18.041101Z node 1 :TX_DATASHARD TRACE: Filled readset for [1000005:152] from=9437184 to=9437186origin=9437184 2025-03-31T16:03:18.041169Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-03-31T16:03:18.041287Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2025-03-31T16:03:18.041314Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:03:18.041349Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2025-03-31T16:03:18.041439Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-03-31T16:03:18.041463Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:03:18.041484Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-03-31T16:03:18.041534Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-03-31T16:03:18.041566Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:03:18.041607Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2025-03-31T16:03:18.041679Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:457:2399], Recipient [1:457:2399]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:03:18.041706Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:03:18.041765Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437186 2025-03-31T16:03:18.041801Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:03:18.041835Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [1000005:152] at 9437186 for LoadAndWaitInRS 2025-03-31T16:03:18.041863Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit LoadAndWaitInRS 2025-03-31T16:03:18.041906Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is Executed 2025-03-31T16:03:18.041940Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit LoadAndWaitInRS 2025-03-31T16:03:18.041986Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit ExecuteDataTx 2025-03-31T16:03:18.042010Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit ExecuteDataTx 2025-03-31T16:03:18.042481Z node 1 :TX_DATASHARD TRACE: Executed operation [1000005:152] at tablet 9437186 with status COMPLETE 2025-03-31T16:03:18.042560Z node 1 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:152] at 9437186: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 5, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-31T16:03:18.042604Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is ExecutedNoMoreRestarts 2025-03-31T16:03:18.042628Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit ExecuteDataTx 2025-03-31T16:03:18.042654Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit CompleteOperation 2025-03-31T16:03:18.042682Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit CompleteOperation 2025-03-31T16:03:18.042894Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is DelayComplete 2025-03-31T16:03:18.042927Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit CompleteOperation 2025-03-31T16:03:18.042954Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit CompletedOperations 2025-03-31T16:03:18.042979Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit CompletedOperations 2025-03-31T16:03:18.043010Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is Executed 2025-03-31T16:03:18.043031Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit CompletedOperations 2025-03-31T16:03:18.043077Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000005:152] at 9437186 has finished 2025-03-31T16:03:18.043132Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:03:18.043171Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437186 2025-03-31T16:03:18.043200Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437186 has no attached operations 2025-03-31T16:03:18.043252Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437186 2025-03-31T16:03:18.043513Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-03-31T16:03:18.043558Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:03:18.043600Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2025-03-31T16:03:18.043704Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-03-31T16:03:18.043729Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:03:18.043753Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-03-31T16:03:18.043838Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-03-31T16:03:18.043862Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:03:18.044336Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-03-31T16:03:18.044500Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-03-31T16:03:18.044536Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:03:18.044559Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-03-31T16:03:18.044640Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-03-31T16:03:18.044683Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:03:18.044706Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-03-31T16:03:18.044826Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-03-31T16:03:18.044856Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:03:18.044898Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-03-31T16:03:18.063127Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-31T16:03:18.063203Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2025-03-31T16:03:18.063281Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-03-31T16:03:18.063354Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-03-31T16:03:18.063391Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-31T16:03:18.063670Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-03-31T16:03:18.063710Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:03:18.063745Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::VariousUse [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-31T16:03:18.179804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:03:18.179904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:18.179941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:03:18.179980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:03:18.180041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:03:18.180094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:03:18.180164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:18.180239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:03:18.180595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:18.323534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:03:18.323624Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:18.333155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:18.333538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:03:18.333697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:03:18.341661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:03:18.341927Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:03:18.342650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:18.343180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:18.345524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:18.346911Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:18.346968Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:18.347060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:03:18.347132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:18.347188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:03:18.347443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:03:18.380279Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-31T16:03:18.602638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:18.602905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:18.603144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:03:18.603429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:03:18.603487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:18.607891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:18.608066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:03:18.608719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:18.608824Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:03:18.608887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:03:18.608928Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:03:18.616150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:18.616237Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:18.616304Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:03:18.624293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:18.624374Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:18.624433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:18.624496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:03:18.628524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:18.637006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:03:18.637241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:03:18.638627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:18.638803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:18.638873Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:18.639294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:03:18.639347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:18.639521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:03:18.639596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:18.642944Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:18.643022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:18.643236Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:18.643275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:03:18.643647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:18.643695Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:03:18.643788Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:18.643818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:18.643854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:18.643883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:18.643935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:03:18.644023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:18.644059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:03:18.644101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:03:18.644173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:18.644212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:03:18.644251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:03:18.647214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:18.647372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:18.647420Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 1/1 2025-03-31T16:03:18.974701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-03-31T16:03:18.974781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:18.974845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-31T16:03:18.974912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-03-31T16:03:18.974946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 1/1, is published: false 2025-03-31T16:03:18.974995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-03-31T16:03:18.975032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 112:0 2025-03-31T16:03:18.975063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 112:0 2025-03-31T16:03:18.975125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-03-31T16:03:18.975168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 112, publications: 3, subscribers: 0 2025-03-31T16:03:18.975253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-03-31T16:03:18.975284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 3], 7 2025-03-31T16:03:18.975317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-03-31T16:03:18.976267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-03-31T16:03:18.978413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-03-31T16:03:18.979689Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:18.979731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:18.979918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-31T16:03:18.980024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-03-31T16:03:18.980174Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:18.980215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 112, path id: 1 2025-03-31T16:03:18.980257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 112, path id: 3 2025-03-31T16:03:18.980281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 112, path id: 4 FAKE_COORDINATOR: Erasing txId 112 2025-03-31T16:03:18.981008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 112 2025-03-31T16:03:18.981087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 112 2025-03-31T16:03:18.981118Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 112 2025-03-31T16:03:18.981164Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-03-31T16:03:18.981200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-31T16:03:18.981695Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 112 2025-03-31T16:03:18.981764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 112 2025-03-31T16:03:18.981811Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 112 2025-03-31T16:03:18.981839Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 7 2025-03-31T16:03:18.981872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T16:03:18.982878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-03-31T16:03:18.982976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-03-31T16:03:18.987049Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 112 2025-03-31T16:03:18.987149Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-03-31T16:03:18.987197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-03-31T16:03:18.987337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 112, subscribers: 0 2025-03-31T16:03:18.987666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:03:18.987746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-03-31T16:03:18.987834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-31T16:03:18.990837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-03-31T16:03:18.991896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-03-31T16:03:18.993300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-03-31T16:03:18.993428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 112, wait until txId: 112 TestWaitNotification wait txId: 112 2025-03-31T16:03:18.993897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: send EvNotifyTxCompletion 2025-03-31T16:03:18.993940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 112 2025-03-31T16:03:18.994655Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2025-03-31T16:03:18.994791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2025-03-31T16:03:18.994829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:499:2490] TestWaitNotification: OK eventTxId 112 2025-03-31T16:03:18.995698Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:03:18.995948Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 244us result status StatusSuccess 2025-03-31T16:03:19.063879Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000008 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrB1" Value: "ValB1" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 113 2025-03-31T16:03:19.067502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "DirB" } ApplyIf { PathId: 2 PathVersion: 8 } ApplyIf { PathId: 3 PathVersion: 7 } ApplyIf { PathId: 4 PathVersion: 3 } } TxId: 113 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:19.067722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TRmDir Propose, path: /MyRoot/DirB, pathId: 0, opId: 113:0, at schemeshard: 72057594046678944 2025-03-31T16:03:19.067861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 113:1, propose status:StatusPreconditionFailed, reason: fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-03-31T16:03:19.076269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 113, response: Status: StatusPreconditionFailed Reason: "fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 4]" TxId: 113 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:19.076463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 113, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 4], operation: DROP DIRECTORY, path: /MyRoot/DirB TestModificationResult got TxId: 113, wait until txId: 113 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryFromSqs [GOOD] Test command err: Trying to start YDB, gRPC: 17472, MsgBus: 3660 2025-03-31T16:02:48.832479Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004764461469568:2205];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:48.832887Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ab4/r3tmp/tmpBVxPB7/pdisk_1.dat 2025-03-31T16:02:49.353545Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:49.353636Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:49.356990Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:02:49.390426Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17472, node 1 2025-03-31T16:02:49.603960Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:02:49.603994Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:02:49.604020Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:02:49.604127Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3660 TClient is connected to server localhost:3660 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:02:50.468349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:50.496113Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:02:50.510598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:50.743001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:50.983507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:51.069070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:53.275221Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004785936307664:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:53.275363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:53.595721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:02:53.637297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:02:53.668064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:02:53.726820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:02:53.770361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:02:53.814090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:02:53.832159Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004764461469568:2205];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:53.832262Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:02:53.872692Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004785936308175:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:53.872768Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:53.873004Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004785936308180:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:53.876300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:02:53.886987Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004785936308182:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:02:53.984375Z node 1 :TX_PROXY ERROR: Actor# [1:7488004785936308240:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:02:55.010377Z node 1 :GRPC_SERVER DEBUG: [0x51b0000d8280] received request Name# PrepareDataQuery ok# true data# session_id: "ydb://session/3?node_id=1&id=YTBlNDA5MmQtNzQ1OGQ2ZmItNDIzZmZkMS0zN2QyN2JhZA==" yql_text: "\n SELECT * FROM `/Root/TwoShard`;\n " operation_params { } peer# ipv6:%5B::1%5D:60918 2025-03-31T16:02:55.010504Z node 1 :GRPC_SERVER DEBUG: [0x51b000311580] created request Name# PrepareDataQuery 2025-03-31T16:02:55.010999Z node 1 :GRPC_SERVER DEBUG: [0x51b0000d8280] received request without user token Name# PrepareDataQuery data# session_id: "ydb://session/3?node_id=1&id=YTBlNDA5MmQtNzQ1OGQ2ZmItNDIzZmZkMS0zN2QyN2JhZA==" yql_text: "\n SELECT * FROM `/Root/TwoShard`;\n " operation_params { } peer# ipv6:%5B::1%5D:60918 database# /Root 2025-03-31T16:02:55.011391Z node 1 :GRPC_SERVER DEBUG: Got grpc request# PrepareDataQueryRequest, traceId# 01jqpf22x3ab05ahmzcrjevnzz, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# /Root, peer# ipv6:[::1]:60918, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-03-31T16:02:55.172237Z node 1 :GRPC_SERVER DEBUG: [0x51b0000d8280] issuing response Name# PrepareDataQuery data# operation { ready: true status: SUCCESS result { type_url: "type.googleapis.com/Ydb.Table.PrepareQueryResult" value: "\n>ydb://preparedqueryid/4?id=a7477a47-b6c623e8-704b58ac-a9c39a50" } } peer# ipv6:%5B::1%5D:60918 2025-03-31T16:02:55.175248Z node 1 :GRPC_SERVER DEBUG: [0x51b0000d8280] finished request Name# PrepareDataQuery ok# true peer# ipv6:%5B::1%5D:60918 2025-03-31T16:02:55.181933Z node 1 :GRPC_SERVER DEBUG: [0x51b00021fd80] received request Name# ExecuteDataQuery ok# true data# session_id: "ydb://session/3?node_id=1&id=YTBlNDA5MmQtNzQ1OGQ2ZmItNDIzZmZkMS0zN2QyN2JhZA==" tx_control { begin_tx { serializable_read_write { } } commit_tx: true } query { id: "ydb://preparedqueryid/4?id=a7477a47-b6c623e8-704b58ac-a9c39a50" } query_cache_policy { keep_in_cache: true } operation_params { } peer# ipv6:%5B::1%5D:60922 2025-03-31T16:02:55.181997Z node 1 :GRPC_SERVER DEBUG: [0x51b000220b80] created request Name# ExecuteDataQuery 2025-03-31T16:02:55.182113Z node 1 :GRPC_SERVER DEBUG: [0x51b00021fd80] received request without user token Name# ExecuteDataQuery data# session_id: "ydb://session/3?node_id=1&id=YTBlNDA5MmQtNzQ1OGQ2ZmItNDIzZmZkMS0zN2QyN2JhZA==" tx_control { begin_tx { serializable_read_write { } } commit_tx: true } query { id: "ydb://preparedqueryid/4?id=a7477a47-b6c623e8-704b58ac-a9c39a50" } query_cache_policy { keep_in_cache: true } operation_params { } peer# ipv6:%5B::1%5D:60922 database# /Root 2025-03-31T16:02:55.182497Z node 1 :GRPC_SERVER DEBUG: Got grpc request# ExecuteDataQueryRequest, traceId# 01jqpf232edd801rk33fasmegr, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# /Root, peer# ipv6:[::1]:60922, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# 2.998610s 2025-03-31T16:02:58.180716Z node 1 :GRPC_SERVER DEBUG: [0x51b00021fd80] issuing response Name# ExecuteDataQuery data# operation { ready: true status: INTERNAL_ERROR issues { message: "Closing Grpc request, client should not see this message." severity: 1 } } peer# ipv6:%5B::1%5D:60922 2025-03-31T16:02:58.181378Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7488004794526243159:2500] TxId: 281474976710671. Ctx: { TraceId: 01jqpf232edd801rk33fasmegr, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTBlNDA5MmQtNzQ1OGQ2ZmItNDIzZmZkMS0zN2QyN2JhZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-03-31T16:02:58.181999Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488004794526243166:2510], TxId: 281474976710671, task: 1. Ctx: { TraceId : 01jqpf232edd801rk33fasmegr. CustomerSuppliedId : . SessionId : ... hecking } 2025-03-31T16:03:04.940853Z node 2 :TX_PROXY ERROR: Actor# [2:7488004829653628229:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } AST: ( (let $1 (KqpTable '"/Root/Test" '"72057594046644480:9" '"" '1)) (let $2 '('"Amount" '"Comment" '"Group" '"Name")) (let $3 (Uint64 '"1001")) (let $4 (Uint32 '1)) (let $5 (KqpRowsSourceSettings $1 $2 '('('"ItemsLimit" $3) '('"Sequential" '1)) '((KqlKeyExc $4 (String '"Name")) (KqlKeyInc $4)))) (let $6 (OptionalType (DataType 'String))) (let $7 (StructType '('"Amount" (OptionalType (DataType 'Uint64))) '('"Comment" $6) '('"Group" (OptionalType (DataType 'Uint32))) '('"Name" $6))) (let $8 '('('"_logical_id" '710) '('"_id" '"aa9717b9-aa2ac669-4823104d-d606752c") '('"_wide_channels" $7))) (let $9 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $5)) (lambda '($13) (block '( (let $14 (lambda '($15) (Member $15 '"Amount") (Member $15 '"Comment") (Member $15 '"Group") (Member $15 '"Name"))) (return (FromFlow (ExpandMap (Take (ToFlow $13) $3) $14))) ))) $8)) (let $10 (DqCnUnionAll (TDqOutput $9 '"0"))) (let $11 (DqPhyStage '($10) (lambda '($16) (FromFlow (NarrowMap (Take (ToFlow $16) $3) (lambda '($17 $18 $19 $20) (AsStruct '('"Amount" $17) '('"Comment" $18) '('"Group" $19) '('"Name" $20)))))) '('('"_logical_id" '723) '('"_id" '"f0a794ec-7aa603b-6d54e6a7-fc98ed8d")))) (let $12 (DqCnResult (TDqOutput $11 '"0") '())) (return (KqpPhysicalQuery '((KqpPhysicalTx '($9 $11) '($12) '() '('('"type" '"data")))) '((KqpTxResultBinding (ListType $7) '"0" '"0")) '('('"type" '"data_query")))) ) Plan: {"Plan":{"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Test"],"PlanNodeId":1,"Operators":[{"Scan":"Sequential","ReadRange":["Group (1)","Name (Name, +∞)"],"E-Size":"No estimate","ReadLimit":"1001","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/Test","E-Rows":"No estimate","Table":"Test","ReadColumns":["Amount","Comment","Group","Name"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":1}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":3}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Test","reads":[{"lookup_by":["Group (1)"],"columns":["Amount","Comment","Group","Name"],"scan_by":["Name (Name, +∞)"],"limit":"1001","type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Sequential","ReadRange":["Group (1)","Name (Name, +∞)"],"E-Size":"No estimate","ReadLimit":"1001","Name":"TableRangeScan","Path":"\/Root\/Test","E-Rows":"No estimate","Table":"Test","ReadColumns":["Amount","Comment","Group","Name"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 29004, MsgBus: 63861 2025-03-31T16:03:07.911766Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488004843189884558:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:07.911821Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ab4/r3tmp/tmp9xf5IN/pdisk_1.dat 2025-03-31T16:03:08.186388Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:08.215716Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:08.215804Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:08.217658Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29004, node 3 2025-03-31T16:03:08.375734Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:08.375758Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:08.375766Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:08.375917Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63861 TClient is connected to server localhost:63861 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:09.172624Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:09.183173Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:03:09.200232Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:09.304389Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:09.541525Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:09.686911Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:12.915636Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488004843189884558:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:12.915715Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:13.608391Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488004868959690108:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:13.608499Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:13.699438Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:13.834395Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:13.873180Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:13.949783Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:14.006379Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:14.087930Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:14.207071Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488004873254657925:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:14.207188Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:14.207649Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488004873254657930:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:14.213188Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:14.253325Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488004873254657932:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:03:14.342026Z node 3 :TX_PROXY ERROR: Actor# [3:7488004873254657993:3465] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:03:16.101931Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:1, at schemeshard: 72057594046644480 |73.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |73.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |73.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless/test-results/unittest/{meta.json ... results_accumulator.log} |73.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop-UseSink [GOOD] Test command err: 2025-03-31T16:03:06.019435Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:03:06.019727Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:03:06.019927Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011ca/r3tmp/tmplvPuyl/pdisk_1.dat 2025-03-31T16:03:06.480054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:03:06.578771Z node 1 :KQP_RESOURCE_MANAGER INFO: Updated table service config: ComputeActorsCount: 10000 ChannelBufferSize: 8388608 MkqlLightProgramMemoryLimit: 1048576 MkqlHeavyProgramMemoryLimit: 31457280 QueryMemoryLimit: 32212254720 PublishStatisticsIntervalSec: 2 MaxTotalChannelBuffersSize: 2147483648 MinChannelBufferSize: 2048 2025-03-31T16:03:06.578886Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-03-31T16:03:06.578961Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 7 2025-03-31T16:03:06.579124Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Updated table service config. 2025-03-31T16:03:06.579297Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:06.651866Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T16:03:06.652895Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-31T16:03:06.653123Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:06.653247Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:06.668346Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:03:06.762061Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-03-31T16:03:06.762138Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-03-31T16:03:06.762313Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:642:2550] 2025-03-31T16:03:07.025475Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value1" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value2" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-31T16:03:07.025609Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:03:07.026257Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-31T16:03:07.026359Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:03:07.026657Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:03:07.026927Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:03:07.027033Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-31T16:03:07.028918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:03:07.029426Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 HANDLE EvClientConnected 2025-03-31T16:03:07.030133Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-31T16:03:07.030208Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-03-31T16:03:07.070738Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:658:2565], Recipient [1:666:2571]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:03:07.072141Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:658:2565], Recipient [1:666:2571]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:03:07.072652Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2571] 2025-03-31T16:03:07.072936Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:07.173081Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:658:2565], Recipient [1:666:2571]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:03:07.173930Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:07.174063Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:03:07.177669Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:03:07.177794Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:03:07.177869Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:03:07.178300Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:03:07.178472Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:03:07.178573Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2571] in generation 1 2025-03-31T16:03:07.192163Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:03:07.222434Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:03:07.222662Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:03:07.222796Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2581] 2025-03-31T16:03:07.222861Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:03:07.222922Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:03:07.222963Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:03:07.223222Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2571], Recipient [1:666:2571]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:03:07.223287Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:03:07.223653Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:03:07.223761Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:03:07.223835Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:03:07.223882Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:03:07.223933Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T16:03:07.223989Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:03:07.224033Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:03:07.224068Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:03:07.224137Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:03:07.224623Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2575], Recipient [1:666:2571]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:07.224669Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:07.224724Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:665:2570], serverId# [1:673:2575], sessionId# [0:0:0] 2025-03-31T16:03:07.224808Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2575] 2025-03-31T16:03:07.224852Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:03:07.224991Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:03:07.225246Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T16:03:07.225306Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:03:07.225409Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:03:07.225479Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:03:07.225532Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T16:03:07.225589Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T16:03:07.225638Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:03:07.225942Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T16:03:07.225990Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T16:03:07.226031Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T16:03:07.226072Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:03:07.226140Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T16:03:07.226184Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T16:03:07.226226Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T16:03:07.226264Z node 1 :TX_DATASH ... ocessing event TEvDataShard::TEvSchemaChangedResult 2025-03-31T16:03:18.491975Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715663 datashard 72075186224037888 state Ready 2025-03-31T16:03:18.492044Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 --- resending captured proposals --- waiting for result 2025-03-31T16:03:18.493390Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553206, Sender [2:890:2720], Recipient [2:668:2572]: NKikimrTxDataShard.TEvKqpScan TxId: 281474976715662 ScanId: 2 LocalPathId: 2 TablePath: "/Root/table-1" SchemaVersion: 1 ColumnTags: 3 ColumnTypes: 2 Ranges { From: "\001\000\000\000\000\200" To: "" FromInclusive: true ToInclusive: false } Snapshot { Step: 2000 TxId: 281474976715661 } Generation: 1 ItemsLimit: 0 Reverse: false DataFormat: FORMAT_CELLVEC StatsMode: DQ_STATS_MODE_NONE ColumnTypeInfos { } LockNodeId: 0 2025-03-31T16:03:18.493528Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715662. Table '/Root/table-1' schema version changed at 72075186224037888 2025-03-31T16:03:18.493678Z node 2 :KQP_COMPUTE WARN: SelfId: [2:890:2720]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/table-1' scheme changed., code: 2028 , tablet id: 72075186224037888, actor_id: [2:668:2572] 2025-03-31T16:03:18.493728Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:890:2720]. Enqueue for resolve 72075186224037888 2025-03-31T16:03:18.493782Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:359;event=scanner_finished;tablet_id=72075186224037888;stop_shard=1; 2025-03-31T16:03:18.493836Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:93;event=stop_scanner;actor_id=NO_VALUE_OPTIONAL;message=;final_flag=1; 2025-03-31T16:03:18.493943Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:890:2720]. Sending TEvResolveKeySet update for table '/Root/table-1', range: [(Uint32 : NULL) ; ()), attempt #1 2025-03-31T16:03:18.494170Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:890:2720]. Received TEvResolveKeySetResult update for table '/Root/table-1' 2025-03-31T16:03:18.494226Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:890:2720]. Resolve request failed for table '/Root/table-1', ErrorCount# 1 2025-03-31T16:03:18.494326Z node 2 :KQP_COMPUTE DEBUG: kqp_scan_compute_actor.cpp:167 :TEvTerminateFromFetcher: [2:890:2720]/[2:888:2718] 2025-03-31T16:03:18.494433Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:888:2718], TxId: 281474976715662, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=Y2VhOTU1MDItNTY1YTU5ZGYtOGQ0MzE3MGUtZGI2M2FmZmY=. TraceId : 01jqpf2raf9q64m14587pnjcvr. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. InternalError: SCHEME_ERROR KIKIMR_SCHEME_MISMATCH: {
: Error: Table '/Root/table-1' scheme changed., code: 2028 }. 2025-03-31T16:03:18.494602Z node 2 :KQP_COMPUTE DEBUG: TxId: 281474976715662, task: 1. pass away 2025-03-31T16:03:18.494695Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715662;task_id=1;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2025-03-31T16:03:18.497170Z node 2 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976715662, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-03-31T16:03:18.497330Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_scan_fetcher_actor.cpp:100;event=TEvTerminateFromCompute;sender=[2:888:2718];info={
: Error: COMPUTE_STATE_FAILURE }; 2025-03-31T16:03:18.497405Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:289;event=abort_all_scanners;error_message=Send abort execution from compute actor, message: {
: Error: COMPUTE_STATE_FAILURE }; 2025-03-31T16:03:18.497584Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Schedule publish at 1970-01-01T00:00:04.000000Z, after 1.550000s 2025-03-31T16:03:18.497816Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:883:2692] TxId: 281474976715662. Ctx: { TraceId: 01jqpf2raf9q64m14587pnjcvr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2VhOTU1MDItNTY1YTU5ZGYtOGQ0MzE3MGUtZGI2M2FmZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:888:2718], task: 1, state: COMPUTE_STATE_FAILURE, stats: { CpuTimeUs: 250552 Tasks { TaskId: 1 CpuTimeUs: 245318 Tables { TablePath: "/Root/table-1" } ComputeCpuTimeUs: 13 BuildCpuTimeUs: 245305 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-tuhoqst4xe" NodeId: 2 CreateTimeMs: 1743436997676 } MaxMemoryUsage: 1048576 } 2025-03-31T16:03:18.497914Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715662. Ctx: { TraceId: 01jqpf2raf9q64m14587pnjcvr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2VhOTU1MDItNTY1YTU5ZGYtOGQ0MzE3MGUtZGI2M2FmZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:888:2718] 2025-03-31T16:03:18.498018Z node 2 :KQP_EXECUTER INFO: ActorId: [2:883:2692] TxId: 281474976715662. Ctx: { TraceId: 01jqpf2raf9q64m14587pnjcvr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2VhOTU1MDItNTY1YTU5ZGYtOGQ0MzE3MGUtZGI2M2FmZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. task: 1, does not have the CA id yet or is already complete 2025-03-31T16:03:18.498109Z node 2 :KQP_EXECUTER INFO: ActorId: [2:883:2692] TxId: 281474976715662. Ctx: { TraceId: 01jqpf2raf9q64m14587pnjcvr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2VhOTU1MDItNTY1YTU5ZGYtOGQ0MzE3MGUtZGI2M2FmZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. aborting compute actor execution, message: {
: Error: Terminate execution }, compute actor: [2:889:2719], task: 2 2025-03-31T16:03:18.498264Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:883:2692] TxId: 281474976715662. Ctx: { TraceId: 01jqpf2raf9q64m14587pnjcvr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2VhOTU1MDItNTY1YTU5ZGYtOGQ0MzE3MGUtZGI2M2FmZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-31T16:03:18.498479Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2VhOTU1MDItNTY1YTU5ZGYtOGQ0MzE3MGUtZGI2M2FmZmY=, ActorId: [2:857:2692], ActorState: ExecuteState, TraceId: 01jqpf2raf9q64m14587pnjcvr, Create QueryResponse for error on request, msg: 2025-03-31T16:03:18.498896Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:889:2719], TxId: 281474976715662, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=Y2VhOTU1MDItNTY1YTU5ZGYtOGQ0MzE3MGUtZGI2M2FmZmY=. CustomerSuppliedId : . TraceId : 01jqpf2raf9q64m14587pnjcvr. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646735 2025-03-31T16:03:18.498995Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:889:2719], TxId: 281474976715662, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=Y2VhOTU1MDItNTY1YTU5ZGYtOGQ0MzE3MGUtZGI2M2FmZmY=. CustomerSuppliedId : . TraceId : 01jqpf2raf9q64m14587pnjcvr. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Handle abort execution event from: [2:883:2692], status: SCHEME_ERROR, reason: {
: Error: Terminate execution } 2025-03-31T16:03:18.500958Z node 2 :KQP_COMPUTE DEBUG: TxId: 281474976715662, task: 2. pass away 2025-03-31T16:03:18.501079Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715662;task_id=2;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2025-03-31T16:03:18.506731Z node 2 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976715662, taskId: 2. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-03-31T16:03:18.506992Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] Handle TEvExecuteKqpTransaction 2025-03-31T16:03:18.507049Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] TxId# 281474976715664 ProcessProposeKqpTransaction 2025-03-31T16:03:18.507666Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down 2025-03-31T16:03:18.507763Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] Handle TEvProposeTransaction 2025-03-31T16:03:18.507809Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] TxId# 0 ProcessProposeTransaction 2025-03-31T16:03:18.507914Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] Cookie# 0 userReqId# "" txid# 0 reqId# [2:925:2751] SnapshotReq marker# P0 2025-03-31T16:03:18.508468Z node 2 :TX_PROXY DEBUG: Actor# [2:927:2751] txid# 0 HANDLE EvNavigateKeySetResult TResolveTablesActor marker# P1 ErrorCount# 0 2025-03-31T16:03:18.508707Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715664. Resolved key sets: 0 2025-03-31T16:03:18.508799Z node 2 :TX_PROXY DEBUG: Actor# [2:927:2751] txid# 0 HANDLE EvResolveKeySetResult TResolveTablesActor marker# P2 ErrorCount# 0 2025-03-31T16:03:18.508923Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqpf2raf9q64m14587pnjcvr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2VhOTU1MDItNTY1YTU5ZGYtOGQ0MzE3MGUtZGI2M2FmZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:18.508979Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715664. Ctx: { TraceId: 01jqpf2raf9q64m14587pnjcvr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2VhOTU1MDItNTY1YTU5ZGYtOGQ0MzE3MGUtZGI2M2FmZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-03-31T16:03:18.509042Z node 2 :KQP_EXECUTER INFO: ActorId: [2:924:2692] TxId: 281474976715664. Ctx: { TraceId: 01jqpf2raf9q64m14587pnjcvr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2VhOTU1MDItNTY1YTU5ZGYtOGQ0MzE3MGUtZGI2M2FmZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-03-31T16:03:18.509156Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:924:2692] TxId: 281474976715664. Ctx: { TraceId: 01jqpf2raf9q64m14587pnjcvr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2VhOTU1MDItNTY1YTU5ZGYtOGQ0MzE3MGUtZGI2M2FmZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-31T16:03:18.509242Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:924:2692] TxId: 281474976715664. Ctx: { TraceId: 01jqpf2raf9q64m14587pnjcvr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2VhOTU1MDItNTY1YTU5ZGYtOGQ0MzE3MGUtZGI2M2FmZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-03-31T16:03:18.509326Z node 2 :TX_PROXY DEBUG: Actor# [2:925:2751] SEND TEvDiscardVolatileSnapshotRequest to datashard 72075186224037888 marker# P3 2025-03-31T16:03:18.509645Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [2:595:2520], selfId: [2:57:2104], source: [2:857:2692] 2025-03-31T16:03:18.509886Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:925:2751], Recipient [2:668:2572]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715661 2025-03-31T16:03:18.510742Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=Y2VhOTU1MDItNTY1YTU5ZGYtOGQ0MzE3MGUtZGI2M2FmZmY=, workerId: [2:857:2692], local sessions count: 0 Response { QueryIssues { message: "Table \'/Root/table-1\' scheme changed." issue_code: 2028 severity: 1 } QueryIssues { message: "Query invalidated on scheme/internal error during Scan execution" issue_code: 2019 severity: 1 } TxMeta { } } YdbStatus: ABORTED ConsumedRu: 569 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::BackupUuidColumn[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-31T16:03:17.604632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:03:17.604760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:17.604801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:03:17.604839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:03:17.604884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:03:17.604912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:03:17.604978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:17.605078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:03:17.605478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:17.733859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:03:17.733927Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:17.745826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:17.746315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:03:17.746510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:03:17.775742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:03:17.776099Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:03:17.776898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:17.777337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:17.784225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:17.785687Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:17.785785Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:17.785892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:03:17.785937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:17.785973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:03:17.786212Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:03:17.804529Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-31T16:03:17.957191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:17.957480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:17.957720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:03:17.958041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:03:17.958121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:17.964856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:17.965031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:03:17.965299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:17.965382Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:03:17.965421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:03:17.965488Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:03:17.968544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:17.968616Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:17.968656Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:03:17.970932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:17.971000Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:17.971077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:17.971157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:03:17.974719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:17.978457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:03:17.978687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:03:17.979862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:17.980037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:17.980130Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:17.980480Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:03:17.980548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:17.980738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:03:17.980839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:17.983480Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:17.983547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:17.983772Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:17.983824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:03:17.984278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:17.984331Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:03:17.984453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:17.984488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:17.984530Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:17.984561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:17.984595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:03:17.984641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:17.984677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:03:17.984715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:03:17.984810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:17.984855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:03:17.984898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:03:17.987072Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:17.987246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:17.987317Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... INFO: TBackup TPropose, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:18.356619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-03-31T16:03:18.356791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:18.358797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-03-31T16:03:18.358944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-03-31T16:03:18.359675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:18.359802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:18.359881Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-03-31T16:03:18.360039Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-03-31T16:03:18.360177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:03:18.403950Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:416:2387], attempt# 0 2025-03-31T16:03:18.440347Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:416:2387], sender# [1:415:2385] FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-03-31T16:03:18.447941Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:18.447997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:03:18.448238Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:18.448279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-31T16:03:18.448845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:03:18.448902Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-03-31T16:03:18.449561Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:03:18.449660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:03:18.449691Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:03:18.449727Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-31T16:03:18.449768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T16:03:18.449844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-03-31T16:03:18.453035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:3828 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 993E5FE5-BB95-43E0-80DE-07293EB7FBEB amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2025-03-31T16:03:18.454872Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:416:2387], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:3828 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: EED4875C-572F-410B-88FD-F0997721B3B3 amz-sdk-request: attempt=1 content-length: 357 content-md5: IxJB3qM/y2xlsv8qcwTF7g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-03-31T16:03:18.464742Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:416:2387], result# PutObjectResult { ETag: 231241dea33fcb6c65b2ff2a7304c5ee } 2025-03-31T16:03:18.464847Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:415:2385] 2025-03-31T16:03:18.465077Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:416:2387], sender# [1:415:2385], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:3828 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B2834E67-064D-42AF-A492-F148AD767CF1 amz-sdk-request: attempt=1 content-length: 40 content-md5: LXbLDYru8NmFsYXNSXjnpQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 40 2025-03-31T16:03:18.479350Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:416:2387], result# PutObjectResult { ETag: 2d76cb0d8aeef0d985b185cd4978e7a5 } 2025-03-31T16:03:18.479440Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:416:2387], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-03-31T16:03:18.479633Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:415:2385], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-03-31T16:03:18.489924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 309 RawX2: 4294969592 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-03-31T16:03:18.490008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-03-31T16:03:18.490192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 309 RawX2: 4294969592 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-03-31T16:03:18.490295Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 309 RawX2: 4294969592 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-03-31T16:03:18.490363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:18.490423Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:03:18.490464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-31T16:03:18.490516Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-03-31T16:03:18.490693Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:18.492928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:03:18.493332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:03:18.493400Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-31T16:03:18.493507Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:03:18.493606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:03:18.493659Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:03:18.493695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:03:18.493732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-31T16:03:18.493801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:337:2316] message: TxId: 102 2025-03-31T16:03:18.493851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:03:18.493890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-31T16:03:18.493926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-31T16:03:18.494048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:03:18.496188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:03:18.496239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:399:2371] TestWaitNotification: OK eventTxId 102 >> TPartBtreeIndexIteration::FewNodes_Sticky [GOOD] >> TPartBtreeIndexIteration::FewNodes_Slices >> KqpQuery::OltpCreateAsSelect_Disable [GOOD] >> BsControllerConfig::DeleteStoragePool [GOOD] >> TSubscriberTest::SyncWithOutdatedReplica >> TSubscriberTest::ReconnectOnFailure >> KqpStats::DeferredEffects-UseSink [GOOD] >> TSubscriberCombinationsTest::MigratedPathRecreation >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex [GOOD] >> TSchemeShardUserAttrsTest::UserConditionsAtAlter [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex_Empty ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_ReproducerDelayData1 [GOOD] Test command err: 2025-03-31T16:03:02.967288Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:03:02.968471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:03:02.968534Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:02.978190Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:03:02.978702Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-03-31T16:03:02.978988Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:02.989765Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:03:03.037418Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:03.037739Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:03:03.039436Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-31T16:03:03.039552Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-31T16:03:03.039695Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-31T16:03:03.040082Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:03:03.040855Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:03:03.040949Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:197:2154] in generation 2 2025-03-31T16:03:03.117172Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:03:03.164781Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-31T16:03:03.164978Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:03:03.165089Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-31T16:03:03.165125Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-31T16:03:03.165161Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-31T16:03:03.165198Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:03:03.165444Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:03:03.165484Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:03:03.165761Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-31T16:03:03.165850Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-31T16:03:03.165898Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:03:03.165932Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:03:03.165968Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-31T16:03:03.166001Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T16:03:03.166042Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T16:03:03.166071Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-31T16:03:03.166108Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:03:03.166200Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:03.166249Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:03.166305Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-31T16:03:03.173297Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-31T16:03:03.173387Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:03:03.173485Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T16:03:03.173649Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-31T16:03:03.173691Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-31T16:03:03.173743Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-31T16:03:03.173784Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:03:03.173831Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-31T16:03:03.173875Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-31T16:03:03.173924Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:03:03.174234Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-31T16:03:03.174304Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-31T16:03:03.174342Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-31T16:03:03.174372Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:03:03.174411Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-31T16:03:03.174439Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-31T16:03:03.174543Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-31T16:03:03.174578Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-31T16:03:03.174604Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-31T16:03:03.195723Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-31T16:03:03.195815Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:03:03.195852Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:03:03.195912Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-31T16:03:03.195989Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-31T16:03:03.196500Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:03.196563Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:03.196608Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-31T16:03:03.196738Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-31T16:03:03.196767Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:03:03.196894Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-31T16:03:03.196937Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:03:03.196972Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-31T16:03:03.197003Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-31T16:03:03.207115Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-31T16:03:03.207215Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:03:03.207475Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:03:03.207518Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:03:03.207575Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:03:03.207620Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:03:03.207657Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T16:03:03.207699Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-31T16:03:03.207735Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-31T16:03:03.207778Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:03:03.207820Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-31T16:03:03.207857Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-31T16:03:03.207905Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-31T16:03:03.208117Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-31T16:03:03.208162Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:03:03.208214Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-31T16:03:03.208244Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-31T16:03:03.208269Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-31T16:03:03.208343Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:03:03.208370Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-31T16:03:03.208400Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-31T16:03:03.208438Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-31T16:03:03.208507Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-31T16:03:03.208544Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-31T16:03:03.208577Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-31T16:03:03.208614Z node 1 :TX_D ... eady operations at 9437184 2025-03-31T16:03:19.271561Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:03:19.271655Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:506] at 9437184 on unit CompleteOperation 2025-03-31T16:03:19.271750Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 506] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 4 ms 2025-03-31T16:03:19.271842Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 506 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-03-31T16:03:19.271889Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:03:19.272091Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-03-31T16:03:19.272154Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:03:19.272185Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:507] at 9437184 on unit CompleteOperation 2025-03-31T16:03:19.272226Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 507] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:03:19.272271Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 507 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-03-31T16:03:19.272297Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:03:19.272413Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-03-31T16:03:19.272437Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-03-31T16:03:19.272456Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:03:19.272479Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:508] at 9437184 on unit CompleteOperation 2025-03-31T16:03:19.272525Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 508] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:03:19.272572Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 508 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-03-31T16:03:19.272596Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:03:19.272698Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-03-31T16:03:19.272719Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-03-31T16:03:19.272737Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-03-31T16:03:19.272757Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:03:19.272781Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:509] at 9437184 on unit CompleteOperation 2025-03-31T16:03:19.272831Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 509] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:03:19.272882Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 509 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-03-31T16:03:19.272910Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:03:19.273020Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-03-31T16:03:19.273043Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:03:19.273093Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:510] at 9437184 on unit CompleteOperation 2025-03-31T16:03:19.273129Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 510] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:03:19.273190Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 510 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-03-31T16:03:19.273216Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:03:19.273310Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:03:19.273333Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:511] at 9437184 on unit CompleteOperation 2025-03-31T16:03:19.273421Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 511] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:03:19.273464Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 511 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-03-31T16:03:19.273492Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:03:19.273599Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:03:19.273623Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:512] at 9437184 on unit CompleteOperation 2025-03-31T16:03:19.273655Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 512] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:03:19.273691Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 512 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-03-31T16:03:19.273738Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:03:19.273838Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:03:19.273883Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:516] at 9437184 on unit FinishPropose 2025-03-31T16:03:19.273927Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 516 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2025-03-31T16:03:19.274002Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:03:19.274136Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:03:19.274178Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:514] at 9437184 on unit CompleteOperation 2025-03-31T16:03:19.274215Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 514] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 4 ms 2025-03-31T16:03:19.274255Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 514 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-03-31T16:03:19.274286Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:03:19.274392Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:03:19.274413Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:515] at 9437184 on unit CompleteOperation 2025-03-31T16:03:19.274442Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 515] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:03:19.274465Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:03:19.274657Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000005 txid# 506 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-03-31T16:03:19.274700Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:03:19.274792Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 506 2025-03-31T16:03:19.275866Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000005 txid# 507 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-03-31T16:03:19.275918Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:03:19.275946Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 507 2025-03-31T16:03:19.276151Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000005 txid# 508 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-03-31T16:03:19.276197Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:03:19.276226Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 508 2025-03-31T16:03:19.276354Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000005 txid# 509 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-03-31T16:03:19.276384Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:03:19.276406Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 509 2025-03-31T16:03:19.276547Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000005 txid# 510 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-03-31T16:03:19.276581Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:03:19.276608Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 510 2025-03-31T16:03:19.276754Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000005 txid# 511 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-03-31T16:03:19.276782Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:03:19.276805Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 511 2025-03-31T16:03:19.276906Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000005 txid# 512 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-03-31T16:03:19.276932Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:03:19.276955Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 512 2025-03-31T16:03:19.277077Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000005 txid# 514 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-03-31T16:03:19.277103Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:03:19.277126Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 514 expect 5 6 - 6 6 7 - - - - - - - - - - - - - - - - - - - - - - - - - - actual 5 6 - 6 6 7 - - - - - - - - - - - - - - - - - - - - - - - - - - interm 5 6 - 6 6 - - - - - - - - - - - - - - - - - - - - - - - - - - - >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex_Empty [GOOD] >> TFlatTableExecutor_KeepEraseMarkers::TestKeepEraseMarkers [GOOD] >> TFlatTableExecutor_LongTx::MemTableLongTx [GOOD] >> TFlatTableExecutor_LongTx::CompactUncommittedLongTx >> TSequence::CreateSequenceParallel ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:03:18.520285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:03:18.520414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:18.520458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:03:18.520492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:03:18.520537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:03:18.520567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:03:18.520627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:18.520705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:03:18.521104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:18.700483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:03:18.700564Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:18.717278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:18.717420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:03:18.717663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:03:18.726458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:03:18.726750Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:03:18.727740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:18.728050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:18.730576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:18.732209Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:18.732309Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:18.732544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:03:18.732604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:18.732648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:03:18.732855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:03:18.751506Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:03:18.905546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:18.905847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:18.906087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:03:18.906360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:03:18.906430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:18.909611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:18.909809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:03:18.910040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:18.910109Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:03:18.910151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:03:18.910220Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:03:18.912842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:18.912914Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:18.912956Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:03:18.915500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:18.915557Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:18.915765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:18.915828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:03:18.920684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:18.923326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:03:18.923612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:03:18.926402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:18.926583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:18.926652Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:18.927025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:03:18.927106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:18.927336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:03:18.927438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:03:18.930942Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:18.931025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:18.931257Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:18.931302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:03:18.931556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:18.931612Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:03:18.931727Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:18.931762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:18.931814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:18.931854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:18.931898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:03:18.931948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:18.931990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:03:18.932024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:03:18.932107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:18.932148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:03:18.932180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:03:18.934773Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:18.934956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:18.934995Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... SHARD INFO: TBackup TPropose, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:19.457767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-03-31T16:03:19.457969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:19.468048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-03-31T16:03:19.468227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-03-31T16:03:19.468722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:19.468865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:19.468924Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-03-31T16:03:19.469069Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-03-31T16:03:19.469209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:03:19.475635Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:414:2385], attempt# 0 2025-03-31T16:03:19.616530Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:414:2385], sender# [1:413:2382] REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:11854 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B5C76CBB-69B9-4841-BBEB-ECE2743E08BC amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2025-03-31T16:03:19.635924Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:414:2385], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-03-31T16:03:19.638345Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:19.638433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:03:19.638792Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:19.638852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-31T16:03:19.639581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:03:19.639646Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-03-31T16:03:19.640973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:03:19.641081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:03:19.641118Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:03:19.641183Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-31T16:03:19.641232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T16:03:19.641352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:11854 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A4A708D9-B12A-4F6D-88F8-AB5020269CB6 amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-03-31T16:03:19.645837Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:414:2385], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2025-03-31T16:03:19.646533Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:413:2382] 2025-03-31T16:03:19.646659Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:414:2385], sender# [1:413:2382], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:11854 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 0321621D-D814-448C-BF3A-EE14C2962827 amz-sdk-request: attempt=1 content-length: 11 content-md5: bj4KQf2rit2DOGLxvSlUww== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 11 2025-03-31T16:03:19.655426Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:414:2385], result# PutObjectResult { ETag: 6e3e0a41fdab8add833862f1bd2954c3 } 2025-03-31T16:03:19.655510Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:414:2385], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-03-31T16:03:19.655927Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:413:2382], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-03-31T16:03:19.676122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:03:19.699222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-03-31T16:03:19.699303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-03-31T16:03:19.699489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-03-31T16:03:19.699596Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-03-31T16:03:19.699659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:19.699694Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:03:19.699746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-31T16:03:19.699786Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-03-31T16:03:19.699974Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:19.708734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:03:19.708957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:03:19.709021Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-31T16:03:19.709136Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:03:19.709171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:03:19.709237Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:03:19.709280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:03:19.709315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-31T16:03:19.709420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:333:2312] message: TxId: 102 2025-03-31T16:03:19.709484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:03:19.709522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-31T16:03:19.709552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-31T16:03:19.709691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:03:19.713001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:03:19.713065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:397:2369] TestWaitNotification: OK eventTxId 102 >> TFlatTableExecutor_LongTx::CompactUncommittedLongTx [GOOD] >> TFlatTableExecutor_LongTx::CompactCommittedLongTx [GOOD] >> TFlatTableExecutor_LongTx::CompactedLongTxRestart >> YdbSdkSessionsPool::StressTestAsync1 [GOOD] >> TSubscriberTest::SyncWithOutdatedReplica [GOOD] >> TSubscriberTest::ReconnectOnFailure [GOOD] >> TSequence::CreateSequence >> TFlatTableExecutor_LongTx::CompactedLongTxRestart [GOOD] >> TFlatTableExecutor_LongTx::CompactMultipleChanges [GOOD] >> TFlatTableExecutor_LongTx::LongTxBorrow >> TSubscriberTest::NotifyDelete >> TFlatTableExecutor_LongTx::LongTxBorrow [GOOD] >> TFlatTableExecutor_LongTx::MemTableLongTxRead >> THiveTest::TestCreateAndDeleteTabletWithStoragePools [GOOD] >> THiveTest::TestCreateAndReassignTabletWithStoragePools >> TSubscriberCombinationsTest::MigratedPathRecreation [GOOD] >> TSubscriberTest::Boot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLockOutOfOrder [GOOD] Test command err: 2025-03-31T16:03:06.894043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:03:06.894307Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:03:06.894495Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011c1/r3tmp/tmprW2Leg/pdisk_1.dat 2025-03-31T16:03:07.328723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:03:07.397165Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:07.440842Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:07.440996Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:07.456434Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:03:07.561089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:03:07.912798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:03:08.254148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:830:2682], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:08.254519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:841:2687], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:08.254621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:08.260715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:03:08.464266Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:844:2690], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:03:08.582233Z node 1 :TX_PROXY ERROR: Actor# [1:904:2731] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:03:09.193692Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpf2ftn23vd79tt18mppbn3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWE4ZGE0N2QtNGE5MTQyNTktNmQzYzgyYTktNDQ3ZGJjZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:09.335971Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpf2gs41f696k0s4gqt46b2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjkzMWZmZDYtMTg1ZjRlM2UtZmUzZGNmYzAtYmI4MzgxM2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:10.129363Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqpf2h5ece9grkfccxxgd7h7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmY5NDAwOS1iZTExOGZhMi03OGQ2NzAwZS01YWQyYjU4Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2025-03-31T16:03:10.580254Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqpf2hz41p0e2b913dpqgbag, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmM5YjI0ZmItZjY3MTRjZmYtNWRiNmU1ZGMtNmYxNzFiMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:10.710576Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqpf2j43b2zkzxzn6pvb99v6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmY5NDAwOS1iZTExOGZhMi03OGQ2NzAwZS01YWQyYjU4Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:10.872368Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqpf2j8c6xcz55497tdbjyws, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmY5NDAwOS1iZTExOGZhMi03OGQ2NzAwZS01YWQyYjU4Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:10.956343Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NmY5NDAwOS1iZTExOGZhMi03OGQ2NzAwZS01YWQyYjU4Yw==, ActorId: [1:967:2781], ActorState: ExecuteState, TraceId: 01jqpf2jcydm2mrsyk73p1zxcr, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken 2025-03-31T16:03:10.971768Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jqpf2jcydm2mrsyk73p1zxcr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmY5NDAwOS1iZTExOGZhMi03OGQ2NzAwZS01YWQyYjU4Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:15.460326Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:101:2147], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:03:15.460701Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:03:15.460802Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011c1/r3tmp/tmpVcnSQU/pdisk_1.dat 2025-03-31T16:03:15.819908Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:03:15.874816Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:15.924595Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:15.924737Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:15.936285Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:03:16.028897Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:03:16.331904Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:03:16.638716Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:830:2682], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:16.638894Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:841:2687], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:16.639001Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:16.644709Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:03:16.804714Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:844:2690], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:03:16.841993Z node 2 :TX_PROXY ERROR: Actor# [2:904:2731] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:03:16.903853Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpf2r0x0wyeqcw9cr0kwmtx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NThjN2FjOTMtYmI4YjlkMWYtMzQxYmUzNTEtZGI2YzI0Mzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:17.011400Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpf2ra1ayqasrjthtwbycj0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmU5MzUxMjItOWZhOTRhMjktYjJmYWE3YTMtNGQwZmEwMGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... performing the first select 2025-03-31T16:03:17.869761Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqpf2rkzc67j7fxmqxk8bb5e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjA2ZmM0Zi1kN2MxYjMyNi04ZmJlZWNkMy00MzliNGMwZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2025-03-31T16:03:18.331869Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqpf2s96avh1t1be2p3vjtz7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Njc1YTg2ZDktNTZlNGM3N2MtODEwYTE0NWYtOGQ5ZjBkZmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } ... waiting for commit read sets 2025-03-31T16:03:18.448926Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqpf2spae2g49gy5zt38k9tw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Njc1YTg2ZDktNTZlNGM3N2MtODEwYTE0NWYtOGQ5ZjBkZmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... captured readset ... captured readset ... performing an upsert 2025-03-31T16:03:18.997333Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqpf2t4ncqhw6jetzf17fbxa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDA3YzZkYWUtMWRhNzAyYjktY2I1OTY2MDUtN2QxOGUzYWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... performing the second select 2025-03-31T16:03:19.160315Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jqpf2tbe4jqek0c23sgzma1e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjA2ZmM0Zi1kN2MxYjMyNi04ZmJlZWNkMy00MzliNGMwZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... performing the third select 2025-03-31T16:03:19.258344Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jqpf2tfx40raf6heegb0cn0q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjA2ZmM0Zi1kN2MxYjMyNi04ZmJlZWNkMy00MzliNGMwZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... performing the last upsert and commit 2025-03-31T16:03:19.322229Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjA2ZmM0Zi1kN2MxYjMyNi04ZmJlZWNkMy00MzliNGMwZQ==, ActorId: [2:976:2780], ActorState: ExecuteState, TraceId: 01jqpf2tjy35nxmk0yrapjkfkz, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::DeleteStoragePool [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:214:2066] recipient: [1:194:2076] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:214:2066] recipient: [1:194:2076] Leader for TabletID 72057594037932033 is [1:216:2078] sender: [1:217:2066] recipient: [1:194:2076] 2025-03-31T16:02:59.046914Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-03-31T16:02:59.056112Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-03-31T16:02:59.056502Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-03-31T16:02:59.058763Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:02:59.059297Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-03-31T16:02:59.059949Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-03-31T16:02:59.059997Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-03-31T16:02:59.060259Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-03-31T16:02:59.073915Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-03-31T16:02:59.074081Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-03-31T16:02:59.074279Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-03-31T16:02:59.074394Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-31T16:02:59.074506Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-31T16:02:59.074585Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:216:2078] sender: [1:239:2066] recipient: [1:20:2067] 2025-03-31T16:02:59.091812Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-03-31T16:02:59.092001Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-31T16:02:59.103690Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-31T16:02:59.103841Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-31T16:02:59.103935Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-31T16:02:59.104049Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-31T16:02:59.104264Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-31T16:02:59.104327Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-31T16:02:59.104370Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-31T16:02:59.104419Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-31T16:02:59.115621Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-31T16:02:59.115768Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-31T16:02:59.127690Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-31T16:02:59.127843Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-03-31T16:02:59.129181Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-03-31T16:02:59.129243Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-03-31T16:02:59.129449Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-03-31T16:02:59.129504Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-03-31T16:02:59.160643Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {} Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:214:2066] recipient: [11:193:2076] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:214:2066] recipient: [11:193:2076] Leader for TabletID 72057594037932033 is [11:216:2078] sender: [11:217:2066] recipient: [11:193:2076] 2025-03-31T16:03:01.097979Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-03-31T16:03:01.098904Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-03-31T16:03:01.099191Z node 11 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-03-31T16:03:01.100464Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:03:01.100893Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-03-31T16:03:01.101457Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-03-31T16:03:01.101485Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-03-31T16:03:01.101701Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-03-31T16:03:01.135076Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-03-31T16:03:01.135258Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-03-31T16:03:01.135391Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-03-31T16:03:01.135490Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-31T16:03:01.135589Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-31T16:03:01.135660Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:216:2078] sender: [11:239:2066] recipient: [11:20:2067] 2025-03-31T16:03:01.147715Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-03-31T16:03:01.147864Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-31T16:03:01.159672Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-31T16:03:01.159820Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-31T16:03:01.159916Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-31T16:03:01.160012Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-31T16:03:01.160123Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-31T16:03:01.160173Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-31T16:03:01.160216Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-31T16:03:01.160270Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-31T16:03:01.171685Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-31T16:03:01.171852Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-31T16:03:01.183648Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-31T16:03:01.183796Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-03-31T16:03:01.185208Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-03-31T16:03:01.185277Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-03-31T16:03:01.185519Z node 11 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-03-31T16:03:01.185573Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-03-31T16:03:01.186161Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {} Leader for TabletID 72057594037932033 is [0:0:0] sender: [21:3014:2106] recipient: [21:2915:2116] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [21:3014:2106] recipient: [21:2915:2116] Leader for TabletID 72057594037932033 is [21:3016:2118] sender: [21:3017:2106] recipient: [21:2915:2116] 2025-03-31T16:03:03.950240Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-03-31T16:03:03.951707Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-03-31T16:03:03.951951Z n ... 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 96:1002 Path# /dev/disk3 2025-03-31T16:03:13.029921Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 97:1000 Path# /dev/disk1 2025-03-31T16:03:13.029945Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 97:1001 Path# /dev/disk2 2025-03-31T16:03:13.029969Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 97:1002 Path# /dev/disk3 2025-03-31T16:03:13.029991Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 98:1000 Path# /dev/disk1 2025-03-31T16:03:13.030013Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 98:1001 Path# /dev/disk2 2025-03-31T16:03:13.030036Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 98:1002 Path# /dev/disk3 2025-03-31T16:03:13.030060Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 99:1000 Path# /dev/disk1 2025-03-31T16:03:13.030082Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 99:1001 Path# /dev/disk2 2025-03-31T16:03:13.030105Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 99:1002 Path# /dev/disk3 2025-03-31T16:03:13.030128Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 100:1000 Path# /dev/disk1 2025-03-31T16:03:13.030151Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 100:1001 Path# /dev/disk2 2025-03-31T16:03:13.030175Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 100:1002 Path# /dev/disk3 2025-03-31T16:03:13.030199Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 101:1000 Path# /dev/disk1 2025-03-31T16:03:13.030227Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 101:1001 Path# /dev/disk2 2025-03-31T16:03:13.030251Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 101:1002 Path# /dev/disk3 2025-03-31T16:03:13.030273Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 102:1000 Path# /dev/disk1 2025-03-31T16:03:13.030298Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 102:1001 Path# /dev/disk2 2025-03-31T16:03:13.030321Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 102:1002 Path# /dev/disk3 2025-03-31T16:03:13.030345Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 103:1000 Path# /dev/disk1 2025-03-31T16:03:13.030368Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 103:1001 Path# /dev/disk2 2025-03-31T16:03:13.030390Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 103:1002 Path# /dev/disk3 2025-03-31T16:03:13.030412Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 104:1000 Path# /dev/disk1 2025-03-31T16:03:13.030435Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 104:1001 Path# /dev/disk2 2025-03-31T16:03:13.030458Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 104:1002 Path# /dev/disk3 2025-03-31T16:03:13.030481Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 105:1000 Path# /dev/disk1 2025-03-31T16:03:13.030504Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 105:1001 Path# /dev/disk2 2025-03-31T16:03:13.030526Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 105:1002 Path# /dev/disk3 2025-03-31T16:03:13.030549Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 106:1000 Path# /dev/disk1 2025-03-31T16:03:13.030572Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 106:1001 Path# /dev/disk2 2025-03-31T16:03:13.030594Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 106:1002 Path# /dev/disk3 2025-03-31T16:03:13.030619Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 107:1000 Path# /dev/disk1 2025-03-31T16:03:13.030641Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 107:1001 Path# /dev/disk2 2025-03-31T16:03:13.030665Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 107:1002 Path# /dev/disk3 2025-03-31T16:03:13.030693Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 108:1000 Path# /dev/disk1 2025-03-31T16:03:13.030717Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 108:1001 Path# /dev/disk2 2025-03-31T16:03:13.030743Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 108:1002 Path# /dev/disk3 2025-03-31T16:03:13.030764Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 109:1000 Path# /dev/disk1 2025-03-31T16:03:13.030786Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 109:1001 Path# /dev/disk2 2025-03-31T16:03:13.030808Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 109:1002 Path# /dev/disk3 2025-03-31T16:03:13.030853Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 110:1000 Path# /dev/disk1 2025-03-31T16:03:13.030879Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 110:1001 Path# /dev/disk2 2025-03-31T16:03:13.030905Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 110:1002 Path# /dev/disk3 2025-03-31T16:03:13.030929Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 111:1000 Path# /dev/disk1 2025-03-31T16:03:13.030954Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 111:1001 Path# /dev/disk2 2025-03-31T16:03:13.030979Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 111:1002 Path# /dev/disk3 2025-03-31T16:03:13.031002Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 112:1000 Path# /dev/disk1 2025-03-31T16:03:13.031026Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 112:1001 Path# /dev/disk2 2025-03-31T16:03:13.031050Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 112:1002 Path# /dev/disk3 2025-03-31T16:03:13.031072Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 113:1000 Path# /dev/disk1 2025-03-31T16:03:13.031378Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 113:1001 Path# /dev/disk2 2025-03-31T16:03:13.031446Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 113:1002 Path# /dev/disk3 2025-03-31T16:03:13.031480Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 114:1000 Path# /dev/disk1 2025-03-31T16:03:13.031509Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 114:1001 Path# /dev/disk2 2025-03-31T16:03:13.031542Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 114:1002 Path# /dev/disk3 2025-03-31T16:03:13.031570Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 115:1000 Path# /dev/disk1 2025-03-31T16:03:13.031600Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 115:1001 Path# /dev/disk2 2025-03-31T16:03:13.031625Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 115:1002 Path# /dev/disk3 2025-03-31T16:03:13.031664Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 116:1000 Path# /dev/disk1 2025-03-31T16:03:13.031690Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 116:1001 Path# /dev/disk2 2025-03-31T16:03:13.031719Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 116:1002 Path# /dev/disk3 2025-03-31T16:03:13.031756Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 117:1000 Path# /dev/disk1 2025-03-31T16:03:13.031795Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 117:1001 Path# /dev/disk2 2025-03-31T16:03:13.031832Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 117:1002 Path# /dev/disk3 2025-03-31T16:03:13.031862Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 118:1000 Path# /dev/disk1 2025-03-31T16:03:13.031887Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 118:1001 Path# /dev/disk2 2025-03-31T16:03:13.031912Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 118:1002 Path# /dev/disk3 2025-03-31T16:03:13.031940Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 119:1000 Path# /dev/disk1 2025-03-31T16:03:13.031972Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 119:1001 Path# /dev/disk2 2025-03-31T16:03:13.032004Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 119:1002 Path# /dev/disk3 2025-03-31T16:03:13.032037Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 120:1000 Path# /dev/disk1 2025-03-31T16:03:13.032064Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 120:1001 Path# /dev/disk2 2025-03-31T16:03:13.032089Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 120:1002 Path# /dev/disk3 2025-03-31T16:03:13.050964Z node 71 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "storage pool 1" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 50 PDiskFilter { Property { Type: ROT } } } } } 2025-03-31T16:03:13.222430Z node 71 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.171583s 2025-03-31T16:03:13.222554Z node 71 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.171732s 2025-03-31T16:03:13.257345Z node 71 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 StoragePoolId: 2 Name: "storage pool 2" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 50 PDiskFilter { Property { Type: SSD } } } } Command { DeleteStoragePool { BoxId: 1 StoragePoolId: 2 ItemConfigGeneration: 1 } } } 2025-03-31T16:03:13.350838Z node 71 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DeleteStoragePool { BoxId: 1 StoragePoolId: 1 ItemConfigGeneration: 1 } } Command { QueryBaseConfig { } } } >> TFlatTableExecutor_LongTx::MemTableLongTxRead [GOOD] >> TFlatTableExecutor_LongTx::CompactedTxIdReuse >> TSubscriberTest::InvalidNotification >> TFlatTableExecutor_LongTx::CompactedTxIdReuse [GOOD] >> TFlatTableExecutor_LongTx::MergeSkewedCommitted >> TSubscriberTest::SyncPartial >> TFlatTableExecutor_LongTx::MergeSkewedCommitted [GOOD] >> TFlatTableExecutor_LongTxAndBlobs::SmallValues ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::UserConditionsAtAlter [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:03:20.639030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:03:20.639231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:20.639276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:03:20.639313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:03:20.639366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:03:20.639391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:03:20.639477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:20.639592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:03:20.639948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:20.735573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:03:20.735650Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:20.744860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:20.744967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:03:20.745100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:03:20.754642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:03:20.754901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:03:20.755979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:20.756253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:20.760081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:20.761242Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:20.761295Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:20.761438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:03:20.761489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:20.761523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:03:20.761642Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:03:20.769393Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:03:20.987480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:20.987762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:20.987994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:03:20.988243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:03:20.988303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:21.006176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:21.006333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:03:21.006571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:21.006648Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:03:21.006696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:03:21.006734Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:03:21.021930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:21.022039Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:21.022103Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:03:21.051955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:21.052050Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:21.052136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:21.052222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:03:21.056514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:21.065197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:03:21.065477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:03:21.066607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:21.066776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:21.066862Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:21.067211Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:03:21.067276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:21.067485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:03:21.067593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:03:21.079823Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:21.079952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:21.080157Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:21.080204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:03:21.080480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:21.080533Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:03:21.080652Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:21.080692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:21.080737Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:21.080783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:21.080824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:03:21.080866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:21.080905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:03:21.080953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:03:21.081032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:21.081084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:03:21.081124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:03:21.084167Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:21.084372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:21.084423Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... G: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-31T16:03:21.225537Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-31T16:03:21.225651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:03:21.225696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:328:2319] TestWaitNotification: OK eventTxId 102 2025-03-31T16:03:21.226271Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:03:21.226559Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 231us result status StatusSuccess 2025-03-31T16:03:21.226978Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-03-31T16:03:21.229985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterUserAttributes AlterUserAttributes { PathName: "DirA" UserAttributes { Key: "AttrA2" Value: "ValA2" } } ApplyIf { PathId: 2 PathVersion: 4 } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:21.230216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: /MyRoot/DirA, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:03:21.230346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-03-31T16:03:21.230485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:03:21.230550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:03:21.233184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:21.233365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: /MyRoot/DirA 2025-03-31T16:03:21.233580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:03:21.233625Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:03:21.233682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2025-03-31T16:03:21.233832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:21.236476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-03-31T16:03:21.236614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2025-03-31T16:03:21.237089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:21.237212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:21.237266Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 103:0, stepId:5000004, at schemeshard: 72057594046678944 2025-03-31T16:03:21.237471Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-31T16:03:21.237512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T16:03:21.237560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-31T16:03:21.237598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T16:03:21.237685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T16:03:21.237753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-03-31T16:03:21.237814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T16:03:21.237867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T16:03:21.237909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-03-31T16:03:21.237948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-03-31T16:03:21.238002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T16:03:21.238046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2025-03-31T16:03:21.238101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-03-31T16:03:21.240635Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:21.240703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:03:21.240913Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:21.240966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 FAKE_COORDINATOR: Erasing txId 103 2025-03-31T16:03:21.241918Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:03:21.242038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:03:21.242088Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-03-31T16:03:21.242129Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-03-31T16:03:21.242192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T16:03:21.242308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-03-31T16:03:21.244553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-03-31T16:03:21.244854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-03-31T16:03:21.244899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-03-31T16:03:21.245406Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-31T16:03:21.245507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-31T16:03:21.245584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:345:2336] TestWaitNotification: OK eventTxId 103 2025-03-31T16:03:21.246222Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:03:21.246448Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 223us result status StatusSuccess 2025-03-31T16:03:21.246814Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 3 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrA2" Value: "ValA2" } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TFlatTableExecutor_LongTxAndBlobs::SmallValues [GOOD] >> TFlatTableExecutor_LongTxAndBlobs::OuterBlobValues >> KqpQuery::Pure [GOOD] >> TSubscriberTest::NotifyDelete [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::ReconnectOnFailure [GOOD] Test command err: 2025-03-31T16:03:21.436790Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-31T16:03:21.440162Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-03-31T16:03:21.440381Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:35:2065] 2025-03-31T16:03:21.440607Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-03-31T16:03:21.440709Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:40:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-03-31T16:03:21.440823Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:36:2065] 2025-03-31T16:03:21.440888Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][2:34:2065][path] Set up state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:03:21.440966Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:37:2065] 2025-03-31T16:03:21.441007Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:03:21.441627Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:35:2065] 2025-03-31T16:03:21.441710Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:03:21.441793Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:36:2065] 2025-03-31T16:03:21.441866Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:03:21.441959Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:37:2065] 2025-03-31T16:03:21.442020Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:03:21.453631Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:45:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-03-31T16:03:21.453829Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:35:2065] 2025-03-31T16:03:21.453901Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:03:21.453983Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:46:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-03-31T16:03:21.454129Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:36:2065] 2025-03-31T16:03:21.454169Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:03:21.454223Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:47:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-03-31T16:03:21.454310Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:37:2065] 2025-03-31T16:03:21.454349Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:03:21.455154Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:45:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2025-03-31T16:03:21.455310Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [2:35:2065] 2025-03-31T16:03:21.455430Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][2:34:2065][path] Update to strong state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::SyncWithOutdatedReplica [GOOD] Test command err: 2025-03-31T16:03:21.458335Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-31T16:03:21.461168Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 2] Version: 2 }: sender# [1:3:2050] 2025-03-31T16:03:21.461338Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [1:6:2053] 2025-03-31T16:03:21.461382Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [1:9:2056] 2025-03-31T16:03:21.461453Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 2] Version: 2 }: sender# [1:36:2066] 2025-03-31T16:03:21.461513Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [1:37:2066] 2025-03-31T16:03:21.461573Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:35:2066][path] Set up state: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:03:21.461715Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [1:38:2066] 2025-03-31T16:03:21.461825Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:35:2066][path] Path was already updated: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:03:21.461974Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:33:2064], cookie# 1 2025-03-31T16:03:21.462070Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:36:2066], cookie# 1 2025-03-31T16:03:21.462124Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:37:2066], cookie# 1 2025-03-31T16:03:21.462179Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:38:2066], cookie# 1 2025-03-31T16:03:21.462234Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:3:2050], cookie# 1 2025-03-31T16:03:21.462278Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:6:2053], cookie# 1 2025-03-31T16:03:21.462306Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:9:2056], cookie# 1 2025-03-31T16:03:21.462353Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:36:2066], cookie# 1 2025-03-31T16:03:21.462391Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-31T16:03:21.462435Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:37:2066], cookie# 1 2025-03-31T16:03:21.462472Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-31T16:03:21.462544Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:38:2066], cookie# 1 2025-03-31T16:03:21.462571Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Unexpected sync response: sender# [1:38:2066], cookie# 1 >> TSubscriberTest::Boot [GOOD] >> TFlatTableExecutor_LongTxAndBlobs::OuterBlobValues [GOOD] >> TFlatTableExecutor_LongTxAndBlobs::ExternalBlobValues >> TSubscriberTest::InvalidNotification [GOOD] >> TFlatTableExecutor_LongTxAndBlobs::ExternalBlobValues [GOOD] >> TFlatTableExecutor_LowPriorityTxs::TestEnqueueCancel [GOOD] >> TFlatTableExecutor_LowPriorityTxs::TestLowPriority >> TPartBtreeIndexIteration::FewNodes_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_Slices >> TFlatTableExecutor_LowPriorityTxs::TestLowPriority [GOOD] |73.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |73.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut >> TFlatTableExecutor_LowPriorityTxs::TestLowPriorityCancel [GOOD] |73.5%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut >> TFlatTableExecutor_LowPriorityTxs::TestLowPriorityAllocatingCancel |73.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |73.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |73.5%| [LD] {RESULT} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut >> TSubscriberTest::SyncPartial [GOOD] >> TExternalTableTest::ReplaceExternalTableIfNotExists >> TFlatTableExecutor_LowPriorityTxs::TestLowPriorityAllocatingCancel [GOOD] >> TFlatTableExecutor_MoveTableData::TestMoveSnapshot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::OltpCreateAsSelect_Disable [GOOD] Test command err: Trying to start YDB, gRPC: 19145, MsgBus: 3746 2025-03-31T16:02:56.421011Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004796258867845:2276];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:56.421147Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000aab/r3tmp/tmpxl4qOW/pdisk_1.dat 2025-03-31T16:02:57.091590Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:57.093199Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:57.093277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:57.103008Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19145, node 1 2025-03-31T16:02:57.444048Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:02:57.444071Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:02:57.444079Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:02:57.444209Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3746 TClient is connected to server localhost:3746 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:02:58.195147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:00.852542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004813438737444:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:00.852721Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:00.859078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004813438737471:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:00.865146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:03:00.883376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004813438737473:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:03:00.982011Z node 1 :TX_PROXY ERROR: Actor# [1:7488004813438737524:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:03:01.400605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:03:01.421331Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004796258867845:2276];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:01.421428Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:01.564965Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037897;self_id=[1:7488004817733704956:2343];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:03:01.568222Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037896;self_id=[1:7488004817733704962:2346];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:03:01.637326Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037897;self_id=[1:7488004817733704956:2343];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:03:01.637562Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037897 2025-03-31T16:03:01.639528Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037896;self_id=[1:7488004817733704962:2346];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:03:01.639662Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037896 2025-03-31T16:03:01.655633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488004817733704956:2343];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:03:01.655844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488004817733704956:2343];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:03:01.656171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488004817733704956:2343];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:03:01.656297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488004817733704956:2343];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:03:01.656908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488004817733704962:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:03:01.656946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488004817733704962:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:03:01.657095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488004817733704962:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:03:01.657191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488004817733704962:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:03:01.657300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488004817733704962:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:03:01.657438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488004817733704962:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:03:01.657547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488004817733704962:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:03:01.657654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488004817733704962:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:03:01.657791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488004817733704962:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:03:01.657908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488004817733704962:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:03:01.658014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488004817733704962:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:03:01.658127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488004817733704962:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:03:01.661527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488004817733704956:2343];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:03:01.661735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488004817733704956:2343];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:03:01.661836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488004817733704956:2343];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:03:01.661945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488004817733704956:2343];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:03:01.662054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488004817733704956:2343];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-0 ... :07.953012Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000aab/r3tmp/tmp844Det/pdisk_1.dat 2025-03-31T16:03:08.145956Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:08.159989Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:08.160095Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:08.162729Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25946, node 2 2025-03-31T16:03:08.331551Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:08.331574Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:08.331581Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:08.331702Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18875 TClient is connected to server localhost:18875 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:09.076051Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:09.087765Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:03:09.090363Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:03:09.135259Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:03:12.250712Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004864024938999:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:12.250804Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004864024938989:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:12.251169Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:12.254679Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-03-31T16:03:12.276132Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488004864024939005:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-03-31T16:03:12.355641Z node 2 :TX_PROXY ERROR: Actor# [2:7488004864024939056:2351] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:03:12.385042Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:12.662750Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:1, at schemeshard: 72057594046644480 2025-03-31T16:03:12.952617Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488004842550101828:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:12.952689Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 5441, MsgBus: 20941 2025-03-31T16:03:14.572041Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488004875344930232:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:14.572091Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000aab/r3tmp/tmpBGUMZ6/pdisk_1.dat 2025-03-31T16:03:14.906537Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:14.940878Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:14.940984Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:14.948122Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5441, node 3 2025-03-31T16:03:15.155778Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:15.155801Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:15.155811Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:15.155943Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20941 TClient is connected to server localhost:20941 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:15.796836Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:15.807966Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:03:19.575277Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488004875344930232:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:19.575367Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:19.735465Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488004896819767377:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:19.735574Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:19.736152Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488004896819767389:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:19.741586Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:03:19.823376Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488004896819767391:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:03:19.915480Z node 3 :TX_PROXY ERROR: Actor# [3:7488004896819767442:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:03:19.990846Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:03:20.318789Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7488004901114734880:2357], status: GENERIC_ERROR, issues:
: Error: Pre type annotation, code: 1020
:5:49: Error: Creating table with data is not supported. 2025-03-31T16:03:20.319339Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjU0ZGZiZC1mNzI3ZWM4NS1iOWM3OGEwZC01MTAxY2M2Mw==, ActorId: [3:7488004901114734878:2356], ActorState: ExecuteState, TraceId: 01jqpf2vk2d0cv4e6540cbssy7, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> TFlatTableExecutor_MoveTableData::TestMoveSnapshot [GOOD] >> TFlatTableExecutor_MoveTableData::TestMoveSnapshotFollower >> TExternalTableTest::DropTableTwice >> BindQueue::Basic >> TFlatTableExecutor_MoveTableData::TestMoveSnapshotFollower [GOOD] >> TFlatTableExecutor_PostponedScan::TestPostponedScan |73.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |73.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |73.5%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::NotifyDelete [GOOD] Test command err: 2025-03-31T16:03:22.121378Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-31T16:03:22.136873Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2025-03-31T16:03:22.137091Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:6:2053] 2025-03-31T16:03:22.137148Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:9:2056] 2025-03-31T16:03:22.137237Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:36:2066] 2025-03-31T16:03:22.137306Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:37:2066] 2025-03-31T16:03:22.137376Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:35:2066][path] Set up state: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:03:22.137561Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:38:2066] 2025-03-31T16:03:22.137649Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:35:2066][path] Path was already updated: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:03:22.138140Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:3:2050] 2025-03-31T16:03:22.138242Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:6:2053] 2025-03-31T16:03:22.138318Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:9:2056] 2025-03-31T16:03:22.138607Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:36:2066] 2025-03-31T16:03:22.138702Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:35:2066][path] Path was updated to new version: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:03:22.138768Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:37:2066] 2025-03-31T16:03:22.138845Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:35:2066][path] Path was already updated: owner# [1:33:2064], state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:03:22.138916Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:38:2066] 2025-03-31T16:03:22.138960Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:35:2066][path] Path was already updated: owner# [1:33:2064], state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::DeferredEffects-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 9370, MsgBus: 11602 2025-03-31T16:02:48.418116Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004762911293380:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:48.418162Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000abb/r3tmp/tmpVGvxFW/pdisk_1.dat 2025-03-31T16:02:49.020556Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:49.020669Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:49.022176Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:02:49.068731Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9370, node 1 2025-03-31T16:02:49.243622Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:02:49.243646Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:02:49.243652Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:02:49.243736Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11602 TClient is connected to server localhost:11602 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:02:50.030347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:50.054513Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:02:50.066888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:50.268580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:50.480607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:50.573820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:52.734291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004780091164351:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:52.734452Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:53.121740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:02:53.199970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:02:53.262102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:02:53.291212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:02:53.319286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:02:53.352084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:02:53.405481Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004784386132161:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:53.405591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:53.406053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004784386132166:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:53.410712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:02:53.418175Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004762911293380:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:53.424098Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:02:53.435205Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004784386132168:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:02:53.521515Z node 1 :TX_PROXY ERROR: Actor# [1:7488004784386132226:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:02:58.205190Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743436975308, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 29679, MsgBus: 2931 2025-03-31T16:02:59.208822Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488004808850482542:2084];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:59.209700Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000abb/r3tmp/tmp1zDOgB/pdisk_1.dat 2025-03-31T16:02:59.415521Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:59.446932Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:59.447052Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 29679, node 2 2025-03-31T16:02:59.449685Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:02:59.570400Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:02:59.570436Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:02:59.570444Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:02:59.570546Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2931 TClient is connected to server localhost:2931 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:00.372751Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:00.386449Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:03:00.406755Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:00.575838Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:03:00.842344Z node 2 :FLAT_TX_SCHEME ... rt proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:04.099859Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:04.153374Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:04.211225Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488004808850482542:2084];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:04.215191Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:04.245695Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:04.343192Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:04.495306Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004830325321257:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:04.495375Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:04.495526Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004830325321262:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:04.499334Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:04.517615Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488004830325321264:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:03:04.610058Z node 2 :TX_PROXY ERROR: Actor# [2:7488004830325321318:3456] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Warning: Type annotation, code: 1030
:3:46: Warning: At function: Coalesce
:3:58: Warning: At function: SqlIn
:3:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 3624, MsgBus: 7055 2025-03-31T16:03:09.296091Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488004854200062862:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:09.296142Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000abb/r3tmp/tmpoaiNxn/pdisk_1.dat 2025-03-31T16:03:09.714000Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:09.783646Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:09.783757Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:09.786436Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3624, node 3 2025-03-31T16:03:09.975839Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:09.975874Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:09.975887Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:09.976056Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7055 TClient is connected to server localhost:7055 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:10.724764Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:10.745515Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:03:10.759231Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:10.873628Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:11.117367Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:11.231329Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:14.296851Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488004854200062862:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:14.296940Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:15.385544Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488004879969868456:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:15.385653Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:15.513915Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:15.603399Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:15.690319Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:15.776243Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:15.921276Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:16.010471Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:16.139752Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488004884264836286:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:16.139892Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:16.140404Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488004884264836291:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:16.146120Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:16.179363Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488004884264836293:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:03:16.238692Z node 3 :TX_PROXY ERROR: Actor# [3:7488004884264836349:3465] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Warning: Type annotation, code: 1030
:3:46: Warning: At function: Coalesce
:3:58: Warning: At function: SqlIn
:3:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 >> RetryPolicy::RetryWithBatching [GOOD] >> BuildStatsHistogram::Ten_Mixed_Log [GOOD] >> BuildStatsHistogram::Ten_Serial_Log >> TBlobStorageWardenTest::ObtainTenantKeySamePin [GOOD] >> TBlobStorageWardenTest::ObtainTenantKeyDifferentPin [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::SyncPartial [GOOD] Test command err: 2025-03-31T16:03:22.441566Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-31T16:03:22.443690Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-03-31T16:03:22.443821Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-03-31T16:03:22.443869Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-03-31T16:03:22.443947Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2065] 2025-03-31T16:03:22.444022Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:36:2065] 2025-03-31T16:03:22.444062Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:34:2065][path] Set up state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:03:22.444109Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2065] 2025-03-31T16:03:22.444168Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:03:22.444402Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:33:2064], cookie# 1 2025-03-31T16:03:22.444531Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:35:2065], cookie# 1 2025-03-31T16:03:22.444634Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:36:2065], cookie# 1 2025-03-31T16:03:22.444686Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:37:2065], cookie# 1 2025-03-31T16:03:22.444757Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:6:2053], cookie# 1 2025-03-31T16:03:22.444799Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:9:2056], cookie# 1 2025-03-31T16:03:22.444855Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:35:2065], cookie# 1 2025-03-31T16:03:22.444891Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 0, faulires# 1 2025-03-31T16:03:22.444931Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2065] 2025-03-31T16:03:22.444975Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:03:22.445015Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:36:2065], cookie# 1 2025-03-31T16:03:22.445036Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 1 2025-03-31T16:03:22.445064Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:37:2065], cookie# 1 2025-03-31T16:03:22.445094Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 1, partial# 0 2025-03-31T16:03:22.445230Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:33:2064], cookie# 2 2025-03-31T16:03:22.445324Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:35:2065], cookie# 2 2025-03-31T16:03:22.445348Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Sync is in progress: cookie# 2, size# 3, half# 1, successes# 0, faulires# 1 2025-03-31T16:03:22.445391Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:36:2065], cookie# 2 2025-03-31T16:03:22.445452Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:37:2065], cookie# 2 2025-03-31T16:03:22.445509Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:9:2056], cookie# 2 2025-03-31T16:03:22.445591Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:36:2065], cookie# 2 2025-03-31T16:03:22.445619Z node 1 :SCHEME_BOARD_SUBSCRIBER WARN: [main][1:34:2065][path] Sync is done: cookie# 2, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2025-03-31T16:03:22.445682Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:36:2065] 2025-03-31T16:03:22.445743Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:03:22.445792Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:37:2065], cookie# 2 2025-03-31T16:03:22.445816Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Unexpected sync response: sender# [1:37:2065], cookie# 2 2025-03-31T16:03:22.445892Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:33:2064], cookie# 3 2025-03-31T16:03:22.445969Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:35:2065], cookie# 3 2025-03-31T16:03:22.445999Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Sync is in progress: cookie# 3, size# 3, half# 1, successes# 0, faulires# 1 2025-03-31T16:03:22.446024Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:36:2065], cookie# 3 2025-03-31T16:03:22.446047Z node 1 :SCHEME_BOARD_SUBSCRIBER WARN: [main][1:34:2065][path] Sync is done: cookie# 3, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2025-03-31T16:03:22.446076Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:37:2065], cookie# 3 2025-03-31T16:03:22.446174Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:37:2065], cookie# 3 2025-03-31T16:03:22.446213Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Unexpected sync response: sender# [1:37:2065], cookie# 3 2025-03-31T16:03:22.446267Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2065] 2025-03-31T16:03:22.446309Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::StressTestAsync1 [GOOD] Test command err: 2025-03-31T16:02:10.799588Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004598579821216:2234];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:10.799739Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000658/r3tmp/tmpLXr7cR/pdisk_1.dat 2025-03-31T16:02:11.967244Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:02:12.011306Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:12.050266Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:12.050354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:12.059200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18631, node 1 2025-03-31T16:02:12.295406Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:02:12.295445Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:02:12.295453Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:02:12.295605Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5807 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:02:12.682198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:15.796744Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004598579821216:2234];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:15.796854Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:02:26.956293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:02:26.956332Z node 1 :IMPORT WARN: Table profiles were not loaded >> TFlatTableExecutor_PostponedScan::TestPostponedScan [GOOD] >> TFlatTableExecutor_PostponedScan::TestCancelFinishedScan >> TExecutorDb::FullScan [GOOD] >> TExecutorDb::CoordinatorSimulation >> TSequence::CreateSequenceParallel [GOOD] >> TSequence::CreateSequenceSequential >> TFlatTableExecutor_PostponedScan::TestCancelFinishedScan [GOOD] >> TFlatTableExecutor_PostponedScan::TestCancelRunningPostponedScan >> TSequence::CreateSequence [GOOD] >> TSequence::CreateDropRecreate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::InvalidNotification [GOOD] Test command err: 2025-03-31T16:03:22.318165Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-31T16:03:22.320679Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-03-31T16:03:22.320825Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-03-31T16:03:22.320879Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-03-31T16:03:22.320970Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2065] 2025-03-31T16:03:22.321064Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:36:2065] 2025-03-31T16:03:22.321108Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:34:2065][path] Set up state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:03:22.321161Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2065] 2025-03-31T16:03:22.321199Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:03:22.321524Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { PathId: [OwnerId: 1, LocalPathId: 1] Version: 0 }: sender# [1:33:2064] 2025-03-31T16:03:22.321587Z node 1 :SCHEME_BOARD_SUBSCRIBER ERROR: [main][1:34:2065][path] Suspicious NKikimrSchemeBoard.TEvNotify { PathId: [OwnerId: 1, LocalPathId: 1] Version: 0 }: sender# [1:33:2064] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::Boot [GOOD] Test command err: 2025-03-31T16:03:21.613114Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:34:2065] 2025-03-31T16:03:21.613200Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Successful handshake: owner# 800, generation# 1 2025-03-31T16:03:21.613455Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:34:2065] 2025-03-31T16:03:21.613495Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Commit generation: owner# 800, generation# 1 2025-03-31T16:03:21.613556Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [1:35:2066] 2025-03-31T16:03:21.613590Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 900, generation# 1 2025-03-31T16:03:21.613867Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [1:35:2066] 2025-03-31T16:03:21.613913Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Commit generation: owner# 900, generation# 1 2025-03-31T16:03:21.614040Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/db/dir_inside] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-31T16:03:21.614479Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/db/dir_inside DomainOwnerId: 1 }: sender# [1:41:2068] 2025-03-31T16:03:21.614515Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Upsert description: path# /root/db/dir_inside 2025-03-31T16:03:21.614663Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Subscribe: subscriber# [1:41:2068], path# /root/db/dir_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-03-31T16:03:21.614937Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/db/dir_inside DomainOwnerId: 1 }: sender# [1:42:2068] 2025-03-31T16:03:21.614978Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# /root/db/dir_inside 2025-03-31T16:03:21.615053Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:42:2068], path# /root/db/dir_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-03-31T16:03:21.615212Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/db/dir_inside DomainOwnerId: 1 }: sender# [1:43:2068] 2025-03-31T16:03:21.615244Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:9:2056] Upsert description: path# /root/db/dir_inside 2025-03-31T16:03:21.615323Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:9:2056] Subscribe: subscriber# [1:43:2068], path# /root/db/dir_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-03-31T16:03:21.615445Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:3:2050] 2025-03-31T16:03:21.615537Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:41:2068] 2025-03-31T16:03:21.615591Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:42:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:6:2053] 2025-03-31T16:03:21.615627Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:42:2068] 2025-03-31T16:03:21.615678Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:43:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:9:2056] 2025-03-31T16:03:21.615730Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:43:2068] 2025-03-31T16:03:21.615816Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:38:2068] 2025-03-31T16:03:21.615911Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:39:2068] 2025-03-31T16:03:21.615961Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:37:2068][/root/db/dir_inside] Set up state: owner# [1:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:03:21.616019Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:40:2068] 2025-03-31T16:03:21.616056Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:37:2068][/root/db/dir_inside] Ignore empty state: owner# [1:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== !argsLeft.IsDeletion 2025-03-31T16:03:21.616350Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:34:2065], cookie# 0, event size# 118 2025-03-31T16:03:21.616394Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Update description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111], deletion# false 2025-03-31T16:03:21.622998Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Upsert description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111], pathDescription# {Status StatusSuccess, Path /root/db/dir_inside, PathId [OwnerId: 800, LocalPathId: 1111], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 1], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 67} 2025-03-31T16:03:21.623308Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside PathId: [OwnerId: 800, LocalPathId: 1111] Version: 1 }: sender# [1:3:2050] 2025-03-31T16:03:21.623424Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [1:41:2068] 2025-03-31T16:03:21.623513Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside PathId: [OwnerId: 800, LocalPathId: 1111] Version: 1 }: sender# [1:38:2068] 2025-03-31T16:03:21.623592Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:37:2068][/root/db/dir_inside] Update to strong state: owner# [1:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 800, LocalPathId: 1111], Version: 1) DomainId: [OwnerId: 800, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() < argsRight.GetSuperId() =========== !argsRight.IsDeletion 2025-03-31T16:03:21.623896Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 900 Generation: 1 }: sender# [1:35:2066], cookie# 0, event size# 117 2025-03-31T16:03:21.623947Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], deletion# false 2025-03-31T16:03:21.624027Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], pathDescription# {Status StatusSuccess, Path /root/db/dir_inside, PathId [OwnerId: 900, LocalPathId: 11], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 1], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 67} 2025-03-31T16:03:21.624190Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:42:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside PathId: [OwnerId: 900, LocalPathId: 11] Version: 1 }: sender# [1:6:2053] 2025-03-31T16:03:21.624255Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [1:42:2068] 2025-03-31T16:03:21.624318Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside PathId: [OwnerId: 900, LocalPathId: 11] Version: 1 }: sender# [1:39:2068] 2025-03-31T16:03:21.624418Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:37:2068][/root/db/dir_inside] Path was updated to new version: owner# [1:36:2067], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 800, LocalPathId: 1111], Version: 1) DomainId: [OwnerId: 800, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 900, LocalPathId: 11], Version: 1) DomainId: [OwnerId: 800, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:03:22.198978Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:34:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-31T16:03:22.201515Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:3:2050] 2025-03-31T16:03:22.201601Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:6:2053] 2025-03-31T16:03:22.201671Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:40:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:9:2056] 2025-03-31T16:03:22.201745Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:35:2065] 2025-03-31T16:03:22.201814Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:36:2065] 2025-03-31T16:03:22.201858Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:34:2065][path] Set up state: owner# [3:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:03:22.201905Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:37:2065] 2025-03-31T16:03:22.201954Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:34:2065][path] Ignore empty state: owner# [3:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock+EvWrite [GOOD] >> TBlobStorageWardenTest::TestSendToInvalidGroupId >> TFlatTableExecutor_PostponedScan::TestCancelRunningPostponedScan [GOOD] >> TFlatTableExecutor_PostponedScan::TestPostponedScanSnapshotMVCC >> THiveTest::TestCreateAndReassignTabletWithStoragePools [GOOD] >> THiveTest::TestCreateAndReassignTabletWhileStarting >> TBlobStorageWardenTest::TestFilterBadSerials [GOOD] >> TBlobStorageWardenTest::TestGivenPDiskFormatedWithGuid1AndCreatedWithGuid2WhenYardInitThenError >> TFlatTableExecutor_PostponedScan::TestPostponedScanSnapshotMVCC [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::Pure [GOOD] Test command err: Trying to start YDB, gRPC: 21442, MsgBus: 9862 2025-03-31T16:02:56.092003Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004798713486017:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:56.092112Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000aaf/r3tmp/tmpLDGToF/pdisk_1.dat 2025-03-31T16:02:56.743383Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:56.766206Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:56.766336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:56.780467Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21442, node 1 2025-03-31T16:02:57.116315Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:02:57.116339Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:02:57.116349Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:02:57.116460Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9862 TClient is connected to server localhost:9862 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:02:58.132651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:58.172188Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:02:58.187971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:58.420215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:58.639678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:02:58.832744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:03:01.117346Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004798713486017:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:01.119560Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:01.181148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004820188324265:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:01.181291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:01.635341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:01.682512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:01.727326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:01.789884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:01.830711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:01.876677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:02.003708Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004820188324779:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:02.003788Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:02.004756Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004824483292080:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:02.009118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:02.035889Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004824483292082:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:03:02.138005Z node 1 :TX_PROXY ERROR: Actor# [1:7488004824483292140:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:03:03.339434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 1130, MsgBus: 16450 2025-03-31T16:03:04.805222Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488004832547236075:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:04.805267Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000aaf/r3tmp/tmpwjlLBy/pdisk_1.dat 2025-03-31T16:03:05.116958Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:05.167145Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:05.167240Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:05.176334Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1130, node 2 2025-03-31T16:03:05.370992Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:05.371015Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:05.371021Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:05.371546Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16450 TClient is connected to server localhost:16450 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:06.417059Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:06.451826Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:03:06.466145Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:06.615695Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16 ... fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488004832547236075:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:09.807598Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:09.907613Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004854022074335:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:09.907702Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:09.974848Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:10.025276Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:10.069311Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:10.119862Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:10.175918Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:10.265994Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:10.348356Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004858317042144:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:10.348465Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:10.348729Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004858317042149:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:10.352973Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:10.395421Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488004858317042151:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:03:10.489749Z node 2 :TX_PROXY ERROR: Actor# [2:7488004858317042207:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 63179, MsgBus: 2801 2025-03-31T16:03:13.099943Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488004868817176705:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:13.099998Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000aaf/r3tmp/tmprJ38Tm/pdisk_1.dat 2025-03-31T16:03:13.488086Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:13.537799Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:13.537913Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:13.544343Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63179, node 3 2025-03-31T16:03:13.723801Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:13.723832Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:13.723842Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:13.723984Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2801 TClient is connected to server localhost:2801 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:14.781272Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:14.813831Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:14.921236Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:15.213190Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:15.331746Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:18.101064Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488004868817176705:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:18.101147Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:18.561141Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488004890292014952:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:18.561239Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:18.622376Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:18.709068Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:18.774035Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:18.887672Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:18.994909Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:19.209867Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:19.339590Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488004894586982774:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:19.339737Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:19.340270Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488004894586982779:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:19.345957Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:19.375316Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488004894586982781:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:03:19.454271Z node 3 :TX_PROXY ERROR: Actor# [3:7488004894586982837:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::ObtainTenantKeyDifferentPin [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedAfterDecompressingTheData_Uncompressed [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedAfterReadingTheData_Uncompressed >> KqpParams::DefaultParameterValue [GOOD] >> TBlobStorageWardenTest::TestHttpMonPage >> TBlobStorageWardenTest::TestDeleteStoragePool >> TExternalTableTest::DropTableTwice [GOOD] >> TExternalTableTest::ReplaceExternalTableIfNotExists [GOOD] >> TBlobStorageWardenTest::TestUnmonitoredEventsThenNoMonitorings >> TBlobStorageWardenTest::TestReceivedPDiskRestartNotAllowed >> TBlobStorageWardenTest::TestReceivedPDiskRestartNotAllowed [GOOD] >> TSequence::CreateDropRecreate [GOOD] >> TSequence::CreateSequenceInsideSequenceNotAllowed >> TBlobStorageWardenTest::TestLimitedKeylessGroupThenNoMonitoring >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-EvWrite [GOOD] >> TBlobStorageWardenTest::TestSendToInvalidGroupId [GOOD] >> TBlobStorageWardenTest::TestSendUsefulMonitoring ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock+EvWrite [GOOD] Test command err: 2025-03-31T16:03:10.634553Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:03:10.634817Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:03:10.635040Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011b7/r3tmp/tmp32OLiX/pdisk_1.dat 2025-03-31T16:03:11.068654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:03:11.130120Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:11.175890Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:11.176062Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:11.187691Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:03:11.282703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:03:11.677864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:03:11.996148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:830:2682], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:11.996275Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:841:2687], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:11.996398Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:12.003499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:03:12.192604Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:844:2690], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:03:12.279736Z node 1 :TX_PROXY ERROR: Actor# [1:904:2731] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:03:12.637938Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpf2kfsf49jg2cs37kpvz0d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODU4YjM2ZGEtOGU3NTFmYzgtZDgwMThlZjQtYjZjZjQ4Yjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:12.782962Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpf2m4y1x942dwn8t19r6q7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWVhY2I4NTgtMTZmNzFlNTUtM2M3Y2JlOGUtYTZlOTE5YmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... waiting for at least 2 blocked commits ... blocked commit for tablet 72075186224037888 ... blocked commit for tablet 72075186224037889 2025-03-31T16:03:13.879951Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqpf2n463df5hnehz69haz3w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGRkODRjMDEtOTM4MmUzMGYtNTE3YWNlZDgtZjVkYjg4ZmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:13.977870Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqpf2naz4py49g2rs0xgrna8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjVkMTYwMjAtZDM1M2E0Y2UtMTY1ODVkMmEtYjRmYmIxN2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... shards are ready for read-only immediate transactions ... waiting for at least 2 blocked commits ... blocked commit for tablet 72075186224037888 ... blocked commit for tablet 72075186224037889 2025-03-31T16:03:19.570537Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:101:2147], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:03:19.570950Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:03:19.571052Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011b7/r3tmp/tmpKwRtYW/pdisk_1.dat 2025-03-31T16:03:20.005799Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:03:20.064547Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:20.105247Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:20.105393Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:20.121639Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:03:20.217652Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:03:20.590860Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:03:20.890091Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:830:2682], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:20.890198Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:841:2687], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:20.890267Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:20.897807Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:03:21.101936Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:844:2690], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:03:21.147258Z node 2 :TX_PROXY ERROR: Actor# [2:904:2731] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:03:21.241137Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpf2w5rd4xzkm2as4bsn0pb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGIyOGQ2Yi1mYmFiZTY3Zi1kZGVhOWNmOC1jMjRmMTkwMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:21.370099Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpf2whdcy74qzq755yn1tgs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2Q0MDQ3MWItN2M4MTBhMzItNTBkMGFjOTUtNzg5NzE1NTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:22.110845Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqpf2wxb37fc7ffgaygx7389, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGE4M2Y0OTUtOWYwMWM5MDAtNjgwOWZjZjEtZmU1Zjk1Yzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2025-03-31T16:03:22.582941Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqpf2xpp5jfasc3gr0a7xaxf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTk4YmRkYTItM2Y5NTRjYTMtYTQ4MWNmOGEtMWNmNTEwYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:22.710996Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqpf2xvccv82asf049qxsg9a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGE4M2Y0OTUtOWYwMWM5MDAtNjgwOWZjZjEtZmU1Zjk1Yzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:22.829187Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqpf2xyy6a4q2b981bq2rapd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGE4M2Y0OTUtOWYwMWM5MDAtNjgwOWZjZjEtZmU1Zjk1Yzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:23.027443Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGE4M2Y0OTUtOWYwMWM5MDAtNjgwOWZjZjEtZmU1Zjk1Yzg=, ActorId: [2:976:2780], ActorState: ExecuteState, TraceId: 01jqpf2y2hb9nv7kr45pch7kzs, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken >> TSequence::CreateSequenceSequential [GOOD] >> TSequence::CreateSequenceInsideTableThenDropSequence ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::DropTableTwice [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:111:2142] 2025-03-31T16:03:23.803924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:03:23.804032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:23.804077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:03:23.804125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:03:23.804174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:03:23.804245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:03:23.804336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:23.804460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:03:23.804831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:23.904204Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-31T16:03:23.904271Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:23.910299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:23.910572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:03:23.910756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:03:23.916782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:03:23.916995Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:03:23.917759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:23.917991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:23.920159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:23.921279Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:23.921341Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:23.921611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:03:23.921663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:23.921709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:03:23.921883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:03:23.929786Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:238:2058] recipient: [1:15:2062] 2025-03-31T16:03:24.078796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:24.079316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:24.079606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:03:24.079868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:03:24.079930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:24.085948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:24.086134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:03:24.086368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:24.086441Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:03:24.086489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:03:24.086532Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:03:24.092219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:24.092358Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:24.092413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:03:24.094870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:24.094936Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:24.094986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:24.095059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:03:24.103321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:24.111967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:03:24.112224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:03:24.113338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:24.113491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969446 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:24.113546Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:24.113857Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:03:24.113924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:24.114135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:03:24.114242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:24.124289Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:24.124377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:24.124668Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:24.124719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:03:24.125032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:24.125086Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:03:24.125187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:24.125242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:24.125305Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:24.125342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:24.125382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:03:24.125444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:24.125484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:03:24.125517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:03:24.125621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:24.125661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:03:24.125696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:03:24.128010Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2025-03-31T16:03:24.255259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:24.255381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969446 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:24.255450Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalTable TPropose opId# 103:0 HandleReply TEvOperationPlan: step# 5000004 2025-03-31T16:03:24.255647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T16:03:24.255755Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 240 2025-03-31T16:03:24.255942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:24.256009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-31T16:03:24.256053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T16:03:24.256705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-31T16:03:24.257819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-31T16:03:24.259487Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:24.259533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:24.259660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-31T16:03:24.259767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:03:24.259910Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:24.259975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-03-31T16:03:24.260044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-03-31T16:03:24.260072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 103, path id: 2 FAKE_COORDINATOR: Erasing txId 103 2025-03-31T16:03:24.260314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:03:24.260360Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-03-31T16:03:24.260467Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-31T16:03:24.260504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T16:03:24.260543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-31T16:03:24.260576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T16:03:24.260619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-03-31T16:03:24.260676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T16:03:24.260723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-03-31T16:03:24.260759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-03-31T16:03:24.260836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T16:03:24.260882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:03:24.260930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 3, subscribers: 0 2025-03-31T16:03:24.260973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-03-31T16:03:24.261002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-03-31T16:03:24.261028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-03-31T16:03:24.261489Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:03:24.261596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:03:24.261641Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 103 2025-03-31T16:03:24.261686Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-03-31T16:03:24.261752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-31T16:03:24.262197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:03:24.262251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-31T16:03:24.262318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-31T16:03:24.262968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:03:24.263046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:03:24.263075Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-03-31T16:03:24.263547Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-03-31T16:03:24.263582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:24.264867Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:03:24.264948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:03:24.264979Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-03-31T16:03:24.265009Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-03-31T16:03:24.265038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T16:03:24.265127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-03-31T16:03:24.268288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-31T16:03:24.268566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-31T16:03:24.269117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-31T16:03:24.269587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-03-31T16:03:24.269928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-03-31T16:03:24.269980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-03-31T16:03:24.270411Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-31T16:03:24.270503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-31T16:03:24.270555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:366:2357] TestWaitNotification: OK eventTxId 103 2025-03-31T16:03:24.271065Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:03:24.271332Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 236us result status StatusPathDoesNotExist 2025-03-31T16:03:24.271554Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ExternalTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:111:2142] 2025-03-31T16:03:23.756707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:03:23.756844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:23.756882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:03:23.756921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:03:23.756963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:03:23.757019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:03:23.757090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:23.757184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:03:23.757506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:23.891439Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-31T16:03:23.891502Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:23.912253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:23.912446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:03:23.912577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:03:23.960102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:03:23.960312Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:03:23.961014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:23.961218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:23.972752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:23.973839Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:23.973902Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:23.974165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:03:23.974229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:23.974275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:03:23.974477Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:03:24.002824Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:238:2058] recipient: [1:15:2062] 2025-03-31T16:03:24.189850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:24.190106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:24.190341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:03:24.190580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:03:24.190634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:24.194128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:24.194272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:03:24.194463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:24.194518Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:03:24.194553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:03:24.194584Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:03:24.196822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:24.196881Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:24.196917Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:03:24.199263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:24.199333Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:24.199381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:24.199434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:03:24.210292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:24.213799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:03:24.213994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:03:24.215152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:24.215303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969446 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:24.215358Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:24.215624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:03:24.215684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:24.215863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:03:24.215942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:24.218600Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:24.218661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:24.218830Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:24.218889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:03:24.219201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:24.219270Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:03:24.219381Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:24.219423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:24.219461Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:24.219492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:24.219532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:03:24.219588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:24.219642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:03:24.219670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:03:24.219726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:24.219761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:03:24.219791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:03:24.222313Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... 04, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-03-31T16:03:24.384490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-31T16:03:24.385271Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T16:03:24.385409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T16:03:24.385435Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-03-31T16:03:24.385461Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-03-31T16:03:24.385486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-31T16:03:24.385546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-03-31T16:03:24.387844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2025-03-31T16:03:24.387977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000005 2025-03-31T16:03:24.388886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:24.388997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969446 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:24.389046Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TAlterExternalTable TPropose, operationId: 104:0 HandleReply TEvOperationPlan: step# 5000005 2025-03-31T16:03:24.389162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-03-31T16:03:24.389366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:24.389438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T16:03:24.390645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-31T16:03:24.390955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 FAKE_COORDINATOR: Erasing txId 104 2025-03-31T16:03:24.392193Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:24.392228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:24.392368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-31T16:03:24.392441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-31T16:03:24.392546Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:24.392592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 104, path id: 1 2025-03-31T16:03:24.392633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-03-31T16:03:24.392671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-03-31T16:03:24.392842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-31T16:03:24.392880Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-03-31T16:03:24.392993Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-31T16:03:24.393096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T16:03:24.393139Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-31T16:03:24.393173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T16:03:24.393210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-03-31T16:03:24.393251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T16:03:24.393307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-03-31T16:03:24.393342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-03-31T16:03:24.393444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-31T16:03:24.393481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T16:03:24.393517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-03-31T16:03:24.393550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-03-31T16:03:24.393580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 4 2025-03-31T16:03:24.394659Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T16:03:24.394763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T16:03:24.394800Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-03-31T16:03:24.394880Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-03-31T16:03:24.394916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-31T16:03:24.395982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T16:03:24.396059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T16:03:24.396085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-03-31T16:03:24.396123Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-03-31T16:03:24.396155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T16:03:24.396249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-03-31T16:03:24.399429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-31T16:03:24.400832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-03-31T16:03:24.401086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-03-31T16:03:24.401124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-03-31T16:03:24.401708Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-03-31T16:03:24.401806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-03-31T16:03:24.401850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:388:2379] TestWaitNotification: OK eventTxId 104 2025-03-31T16:03:24.402445Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:03:24.402635Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 203us result status StatusSuccess 2025-03-31T16:03:24.402952Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 3 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 3 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/other_location" Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestReceivedPDiskRestartNotAllowed [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestSendToInvalidGroupId [GOOD] Test command err: 2025-03-31T16:03:24.142402Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:24.142533Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:3:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:24.144109Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:24.144702Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:24.144792Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:24.144858Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:24.144936Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:3:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:24.147533Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:24.148396Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001031/r3tmp/tmpqhIEhL/pdisk_1.dat 2025-03-31T16:03:24.867255Z node 1 :BS_PROXY_PUT INFO: [084d0c3a19bee089] bootstrap ActorId# [1:484:2466] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:1291:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-03-31T16:03:24.867416Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1291:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:24.867462Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1291:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:24.867488Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1291:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:24.867513Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1291:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:24.867536Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1291:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:24.867560Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1291:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:24.867595Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] restore Id# [72057594037932033:2:8:0:0:1291:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-03-31T16:03:24.867664Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:1291:1] Marker# BPG33 2025-03-31T16:03:24.867705Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:1291:1] Marker# BPG32 2025-03-31T16:03:24.867745Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:1291:2] Marker# BPG33 2025-03-31T16:03:24.867769Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:1291:2] Marker# BPG32 2025-03-31T16:03:24.867789Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:1291:3] Marker# BPG33 2025-03-31T16:03:24.867804Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:1291:3] Marker# BPG32 2025-03-31T16:03:24.867937Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:46:2090] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1291:3] FDS# 1291 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T16:03:24.867988Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:39:2083] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1291:2] FDS# 1291 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T16:03:24.868030Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:60:2104] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1291:1] FDS# 1291 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T16:03:24.876288Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1291:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90165 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-03-31T16:03:24.876518Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1291:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 90165 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-03-31T16:03:24.876605Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1291:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90165 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-03-31T16:03:24.876688Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:1291:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-03-31T16:03:24.876767Z node 1 :BS_PROXY_PUT INFO: [084d0c3a19bee089] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:1291:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-03-31T16:03:24.876951Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.975 sample PartId# [72057594037932033:2:8:0:0:1291:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 0.976 sample PartId# [72057594037932033:2:8:0:0:1291:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 0.976 sample PartId# [72057594037932033:2:8:0:0:1291:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 9.288 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 9.469 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 9.556 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } ] } 2025-03-31T16:03:24.910077Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 4294967295 IsLimitedKeyless# 0 fullIfPossible# 1 Marker# DSP58 2025-03-31T16:03:24.912575Z node 1 :BS_PROXY CRIT: The request was sent for an invalid groupID Group# 4294967295 HandleError ev# TEvBlock {TabletId# 1234 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} Response# TEvBlockResult {Status# ERROR ErrorReason# "Created as unconfigured in error state (DSPE11). It happens when the request was sent for an invalid groupID"} Marker# DSP31 Sending TEvPut 2025-03-31T16:03:24.912972Z node 1 :BS_PROXY DEBUG: The request was sent for an invalid groupID Group# 4294967295 HandleError ev# TEvPut {Id# [1234:1:0:0:0:5:0] Size# 5 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:1:0:0:0:5:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as unconfigured in error state (DSPE11). It happens when the request was sent for an invalid groupID" ApproximateFreeSpaceShare# 0} Marker# DSP31 2025-03-31T16:03:24.913140Z node 1 :BS_PROXY DEBUG: The request was sent for an invalid groupID Group# 4294967295 HandleError ev# TEvCollectGarbage {TabletId# 1234 RecordGeneration# 4294967295 PerGenerationCounter# 4294967295 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 4294967295 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 1 IsMonitored# 1} Response# TEvCollectGarbageResult {TabletId# 1234 RecordGeneration# 4294967295 PerGenerationCounter# 4294967295 Channel# 0 Status# ERROR ErrorReason# "Created as unconfigured in error state (DSPE11). It happens when the request was sent for an invalid groupID"} Marker# DSP31 >> TSequence::CreateSequenceInsideSequenceNotAllowed [GOOD] >> TSequence::CreateSequenceInsideIndexTableNotAllowed >> TBlobStorageWardenTest::TestUnmonitoredEventsThenNoMonitorings [GOOD] >> THiveTest::TestCreateAndReassignTabletWhileStarting [GOOD] >> THiveTest::TestCreateTabletAndReassignGroups ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::DefaultParameterValue [GOOD] Test command err: Trying to start YDB, gRPC: 4571, MsgBus: 30660 2025-03-31T16:02:48.915740Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004760598045938:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:48.919190Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ab2/r3tmp/tmphZox2m/pdisk_1.dat 2025-03-31T16:02:49.583233Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:49.583317Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:49.594889Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:49.595642Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4571, node 1 2025-03-31T16:02:49.755390Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:02:49.755414Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:02:49.755512Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:02:49.755622Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30660 TClient is connected to server localhost:30660 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:02:50.589622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:50.654145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:50.888881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:51.145659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:51.240888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:53.281125Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004782072884203:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:53.281266Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:53.629484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:02:53.703736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:02:53.775425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:02:53.830282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:02:53.869089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:02:53.917657Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004760598045938:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:53.917742Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:02:53.922153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:02:54.012478Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004786367852017:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:54.012574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:54.012910Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004786367852022:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:54.017939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:02:54.032146Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004786367852024:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:02:54.089787Z node 1 :TX_PROXY ERROR: Actor# [1:7488004786367852079:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 15200, MsgBus: 26583 2025-03-31T16:02:56.788399Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488004794832524105:2202];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:56.867372Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ab2/r3tmp/tmpM7u7P2/pdisk_1.dat 2025-03-31T16:02:57.048305Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:57.092262Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:57.092351Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:57.096292Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15200, node 2 2025-03-31T16:02:57.263721Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:02:57.263750Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:02:57.263758Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:02:57.263886Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26583 TClient is connected to server localhost:26583 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:02:57.889480Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:57.905147Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:02:57.922098Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:58.077426Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:58.274839Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:58.400963Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreat ... osed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:03:07.248632Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:09.945657Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488004831331322903:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:10.022235Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:10.474166Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488004857101128394:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:10.474338Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:10.580772Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:10.640320Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:10.695587Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:10.751945Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:10.800942Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:10.852663Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:10.923462Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488004857101128907:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:10.923615Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:10.923910Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488004857101128912:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:10.932210Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:10.960762Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488004857101128914:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:03:11.068500Z node 3 :TX_PROXY ERROR: Actor# [3:7488004861396096268:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 61558, MsgBus: 4272 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ab2/r3tmp/tmphnxF78/pdisk_1.dat 2025-03-31T16:03:14.842454Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:03:14.993536Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:15.044593Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:15.044688Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:15.052446Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61558, node 4 2025-03-31T16:03:15.239799Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:15.239829Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:15.239839Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:15.239973Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4272 TClient is connected to server localhost:4272 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:16.100430Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:16.119846Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:03:16.141682Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:16.305358Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:16.584104Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:16.785673Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:20.674775Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488004897736074834:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:20.674935Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:20.727222Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:20.784266Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:20.834056Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:20.924297Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:21.026461Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:21.147350Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:21.295707Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488004902031042657:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:21.295840Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:21.303313Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488004902031042664:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:21.313042Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:21.340098Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488004902031042666:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:03:21.435338Z node 4 :TX_PROXY ERROR: Actor# [4:7488004902031042722:3465] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpStats::OneShardNonLocalExec-UseSink [GOOD] >> DataShardOutOfOrder::UncommittedReadSetAck [GOOD] >> TExportToS3Tests::ShouldSucceedOnSingleShardTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-EvWrite [GOOD] Test command err: 2025-03-31T16:03:12.053433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:03:12.053651Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:03:12.053802Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011b3/r3tmp/tmpafVfpu/pdisk_1.dat 2025-03-31T16:03:12.452846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:03:12.507277Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:12.561984Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:12.562134Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:12.576265Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:03:12.674758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:03:12.718336Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:03:12.719540Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:03:12.720070Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:03:12.720452Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:12.766558Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:03:12.767395Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:12.767559Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:03:12.769212Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:03:12.769318Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:03:12.769392Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:03:12.769963Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:03:12.770126Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:03:12.770223Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:03:12.782615Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:03:12.810588Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:03:12.810842Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:03:12.811009Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:03:12.811055Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:03:12.811119Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:03:12.811163Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:03:12.811591Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:03:12.811657Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:03:12.811995Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:03:12.812109Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:03:12.812244Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:03:12.812317Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:03:12.812376Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T16:03:12.812428Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:03:12.812469Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:03:12.812511Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:03:12.812562Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:03:12.812659Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:12.812707Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:12.812755Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:03:12.812870Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-31T16:03:12.812992Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:03:12.813113Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:03:12.813386Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T16:03:12.813456Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:03:12.813618Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:03:12.813676Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:03:12.813717Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T16:03:12.813760Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T16:03:12.813800Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:03:12.814118Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T16:03:12.814168Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T16:03:12.814205Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T16:03:12.814231Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:03:12.814273Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T16:03:12.814300Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T16:03:12.814341Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T16:03:12.814381Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:03:12.814402Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T16:03:12.816109Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T16:03:12.816161Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:03:12.828154Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:03:12.828244Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:03:12.828277Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:03:12.828322Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-31T16:03:12.828393Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:03:12.993797Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:12.993879Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:12.993924Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T16:03:12.994392Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-31T16:03:12.994438Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:03:12.994585Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:03:12.994643Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-31T16:03:12.994693Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-31T16:03:12.994732Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-31T16:03:13.036432Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:03:13.036536Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:03:13.037376Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:03:13.037422Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:03:13.037501Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:03:1 ... 2025-03-31T16:03:24.544579Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1077:2858] TxId: 281474976715667. Ctx: { TraceId: 01jqpf2z99219j5j0rpqk0wr78, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWQ0ZWUzZTYtNWNmNDg4NmMtYjU5YmI3NzItZmRhZTI1ZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:1088:2880], CA [2:1089:2881], 2025-03-31T16:03:24.544799Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1077:2858] TxId: 281474976715667. Ctx: { TraceId: 01jqpf2z99219j5j0rpqk0wr78, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWQ0ZWUzZTYtNWNmNDg4NmMtYjU5YmI3NzItZmRhZTI1ZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1088:2880], task: 6, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 434 Tasks { TaskId: 6 StageId: 5 CpuTimeUs: 193 FinishTimeMs: 1743437004543 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 129 BuildCpuTimeUs: 64 HostName: "ghrun-tuhoqst4xe" NodeId: 2 CreateTimeMs: 1743437004532 } MaxMemoryUsage: 1048576 } 2025-03-31T16:03:24.544852Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jqpf2z99219j5j0rpqk0wr78, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWQ0ZWUzZTYtNWNmNDg4NmMtYjU5YmI3NzItZmRhZTI1ZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1088:2880] 2025-03-31T16:03:24.544945Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1077:2858] TxId: 281474976715667. Ctx: { TraceId: 01jqpf2z99219j5j0rpqk0wr78, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWQ0ZWUzZTYtNWNmNDg4NmMtYjU5YmI3NzItZmRhZTI1ZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1089:2881], 2025-03-31T16:03:24.544982Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1077:2858] TxId: 281474976715667. Ctx: { TraceId: 01jqpf2z99219j5j0rpqk0wr78, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWQ0ZWUzZTYtNWNmNDg4NmMtYjU5YmI3NzItZmRhZTI1ZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1089:2881], 2025-03-31T16:03:24.545255Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1077:2858] TxId: 281474976715667. Ctx: { TraceId: 01jqpf2z99219j5j0rpqk0wr78, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWQ0ZWUzZTYtNWNmNDg4NmMtYjU5YmI3NzItZmRhZTI1ZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1089:2881], task: 7, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 414 DurationUs: 2000 Tasks { TaskId: 7 StageId: 6 CpuTimeUs: 185 FinishTimeMs: 1743437004545 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ResultRows: 2 ResultBytes: 7 ComputeCpuTimeUs: 144 BuildCpuTimeUs: 41 HostName: "ghrun-tuhoqst4xe" NodeId: 2 StartTimeMs: 1743437004543 CreateTimeMs: 1743437004533 } MaxMemoryUsage: 1048576 } 2025-03-31T16:03:24.545314Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jqpf2z99219j5j0rpqk0wr78, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWQ0ZWUzZTYtNWNmNDg4NmMtYjU5YmI3NzItZmRhZTI1ZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1089:2881] 2025-03-31T16:03:24.548849Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1077:2858] TxId: 281474976715667. Ctx: { TraceId: 01jqpf2z99219j5j0rpqk0wr78, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWQ0ZWUzZTYtNWNmNDg4NmMtYjU5YmI3NzItZmRhZTI1ZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Full stats: { CpuTimeUs: 13159 DurationUs: 1743437002503602 Tables { TablePath: "/Root/table-1" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } Tables { TablePath: "/Root/table-2" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } ExecuterCpuTimeUs: 9067 StartTimeMs: 2041 FinishTimeMs: 1743437004545 Stages { StageId: 5 StageGuid: "5bf1baac-c8f5e541-e649a859-6a7a96d" Program: "(\n(return (lambda \'($1) (FromFlow (Take (ToFlow $1) (Uint64 \'\"1001\")))))\n)\n" ComputeActors { CpuTimeUs: 434 Tasks { TaskId: 6 StageId: 5 CpuTimeUs: 193 FinishTimeMs: 1743437004543 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 129 BuildCpuTimeUs: 64 HostName: "ghrun-tuhoqst4xe" NodeId: 2 CreateTimeMs: 1743437004532 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743437004539 } Stages { StageGuid: "da10310d-9fde0e42-4dffde2c-5ab8aa00" Program: "(\n(return (lambda \'($1) (block \'(\n (let $2 (lambda \'($3) (Member $3 \'\"key\") (Member $3 \'\"value\")))\n (return (FromFlow (ExpandMap (Take (ToFlow $1) (Uint64 \'\"1001\")) $2)))\n))))\n)\n" BaseTimeMs: 1743437004539 } Stages { StageId: 3 StageGuid: "fd211659-c73f62f-6601ad70-59328447" Program: "(\n(return (lambda \'($1) (FromFlow (WideTop (ToFlow $1) (Uint64 \'\"1001\") \'(\'(\'0 (Bool \'true)))))))\n)\n" BaseTimeMs: 1743437004539 } Stages { StageId: 2 StageGuid: "5c2f3c19-a0bcbff2-652a2847-8aa8b887" Program: "(\n(return (lambda \'($1) (block \'(\n (let $2 (lambda \'($3) (Member $3 \'\"key\") (Member $3 \'\"value\")))\n (return (FromFlow (ExpandMap (Take (ToFlow $1) (Uint64 \'\"1001\")) $2)))\n))))\n)\n" BaseTimeMs: 1743437004539 } Stages { StageId: 4 StageGuid: "61c6e454-10813268-de87ed4e-c01586dc" Program: "(\n(return (lambda \'($1 $2) (block \'(\n (let $3 (lambda \'($6 $7) (AsStruct \'(\'\"key\" $6) \'(\'\"value\" $7))))\n (let $4 (Sort (Extend (NarrowMap (ToFlow $1) $3) (NarrowMap (ToFlow $2) $3)) (Bool \'true) (lambda \'($8) (Member $8 \'\"key\"))))\n (let $5 (lambda \'($9) (Member $9 \'\"key\") (Member $9 \'\"value\")))\n (return (FromFlow (ExpandMap $4 $5)))\n))))\n)\n" BaseTimeMs: 1743437004539 } Stages { StageId: 6 StageGuid: "7dfbc4bd-7c9e2f62-8a8f9ad6-994b1b87" Program: "(\n(return (lambda \'($1) (FromFlow (NarrowMap (Take (ToFlow $1) (Uint64 \'\"1001\")) (lambda \'($2 $3) (AsStruct \'(\'\"key\" $2) \'(\'\"value\" $3)))))))\n)\n" BaseTimeMs: 1743437004539 } Stages { StageId: 1 StageGuid: "28664260-f1dec76a-f6271233-b620fba0" Program: "(\n(return (lambda \'($1) (FromFlow (WideTop (ToFlow $1) (Uint64 \'\"1001\") \'(\'(\'0 (Bool \'true)))))))\n)\n" BaseTimeMs: 1743437004539 } TxPlansWithStats: "{\"Node Type\":\"Phase\",\"PlanNodeId\":17,\"Plans\":[{\"Node Type\":\"ResultSet\",\"PlanNodeId\":16,\"PlanNodeType\":\"ResultSet\",\"Plans\":[{\"Node Type\":\"Limit\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":14}],\"Limit\":\"1001\",\"Name\":\"Limit\"}],\"PlanNodeId\":15,\"Plans\":[{\"Node Type\":\"UnionAll\",\"PlanNodeId\":14,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Limit\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":12}],\"Limit\":\"1001\",\"Name\":\"Limit\"}],\"PlanNodeId\":13,\"Plans\":[{\"Node Type\":\"Merge\",\"PlanNodeId\":12,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Sort-Union\",\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"Sort\",\"SortBy\":\"row.key\"},{\"Inputs\":[{\"ExternalPlanNodeId\":10},{\"ExternalPlanNodeId\":5}],\"Name\":\"Union\"}],\"PlanNodeId\":11,\"Plans\":[{\"Node Type\":\"UnionAll\",\"PlanNodeId\":10,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Top\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":8}],\"Limit\":\"1001\",\"Name\":\"Top\",\"TopBy\":\"row.key\"}],\"PlanNodeId\":9,\"Plans\":[{\"Node Type\":\"UnionAll\",\"PlanNodeId\":8,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Limit\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":6}],\"Limit\":\"1001\",\"Name\":\"Limit\"}],\"PlanNodeId\":7,\"Plans\":[{\"Node Type\":\"TablePointLookup\",\"Operators\":[{\"E-Cost\":\"No estimate\",\"E-Rows\":\"No estimate\",\"E-Size\":\"No estimate\",\"Inputs\":[],\"Name\":\"TablePointLookup\",\"Path\":\"\\/Root\\/table-1\",\"ReadColumns\":[\"key\",\"value\"],\"ReadLimit\":\"1001\",\"ReadRange\":[\"key (3)\"],\"Scan\":\"Sequential\",\"Table\":\"table-1\"}],\"PlanNodeId\":6,\"StageGuid\":\"\",\"Tables\":[\"table-1\"]}],\"StageGuid\":\"da10310d-9fde0e42-4dffde2c-5ab8aa00\",\"Stats\":{\"BaseTimeMs\":1743437004539,\"FinishedTasks\":0,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"28664260-f1dec76a-f6271233-b620fba0\",\"Stats\":{\"BaseTimeMs\":1743437004539,\"FinishedTasks\":0,\"PhysicalStageId\":1,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"},{\"Node Type\":\"UnionAll\",\"PlanNodeId\":5,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Top\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":3}],\"Limit\":\"1001\",\"Name\":\"Top\",\"TopBy\":\"row.key\"}],\"PlanNodeId\":4,\"Plans\":[{\"Node Type\":\"UnionAll\",\"PlanNodeId\":3,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Limit\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":1}],\"Limit\":\"1001\",\"Name\":\"Limit\"}],\"PlanNodeId\":2,\"Plans\":[{\"Node Type\":\"TablePointLookup\",\"Operators\":[{\"E-Cost\":\"No estimate\",\"E-Rows\":\"No estimate\",\"E-Size\":\"No estimate\",\"Inputs\":[],\"Name\":\"TablePointLookup\",\"Path\":\"\\/Root\\/table-2\",\"ReadColumns\":[\"key\",\"value\"],\"ReadLimit\":\"1001\",\"ReadRange\":[\"key (4)\"],\"Scan\":\"Sequential\",\"Table\":\"table-2\"}],\"PlanNodeId\":1,\"StageGuid\":\"\",\"Tables\":[\"table-2\"]}],\"StageGuid\":\"5c2f3c19-a0bcbff2-652a2847-8aa8b887\",\"Stats\":{\"BaseTimeMs\":1743437004539,\"FinishedTasks\":0,\"PhysicalStageId\":2,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"fd211659-c73f62f-6601ad70-59328447\",\"Stats\":{\"BaseTimeMs\":1743437004539,\"FinishedTasks\":0,\"PhysicalStageId\":3,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"61c6e454-10813268-de87ed4e-c01586dc\",\"Stats\":{\"BaseTimeMs\":1743437004539,\"FinishedTasks\":0,\"PhysicalStageId\":4,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"SortColumns\":[\"key (Asc)\"],\"StageGuid\":\"\"}],\"StageGuid\":\"5bf1baac-c8f5e541-e649a859-6a7a96d\",\"Stats\":{\"BaseTimeMs\":1743437004539,\"ComputeNodes\":[{\"CpuTimeUs\":434,\"Tasks\":[{\"ComputeTimeUs\":129,\"FinishTimeMs\":1743437004543,\"Host\":\"ghrun-tuhoqst4xe\",\"InputBytes\":7,\"InputRows\":2,\"NodeId\":2,\"OutputBytes\":7,\"OutputRows\":2,\"TaskId\":6}]}],\"FinishedTasks\":0,\"PhysicalStageId\":5,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"7dfbc4bd-7c9e2f62-8a8f9ad6-994b1b87\",\"Stats\":{\"BaseTimeMs\":1743437004539,\"FinishedTasks\":0,\"PhysicalStageId\":6,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"\"}" StatConvertBytes: 3892 Extra { type_url: "type.googleapis.com/NKqpProto.TKqpExecutionExtraStats" value: "\010\002\022\013\010\215\003\020\270\t\030\374\037 \007" } } 2025-03-31T16:03:24.548978Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1077:2858] TxId: 281474976715667. Ctx: { TraceId: 01jqpf2z99219j5j0rpqk0wr78, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWQ0ZWUzZTYtNWNmNDg4NmMtYjU5YmI3NzItZmRhZTI1ZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-31T16:03:24.549056Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1077:2858] TxId: 281474976715667. Ctx: { TraceId: 01jqpf2z99219j5j0rpqk0wr78, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWQ0ZWUzZTYtNWNmNDg4NmMtYjU5YmI3NzItZmRhZTI1ZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.004092s ReadRows: 2 ReadBytes: 16 ru: 2 rate limiter was not found force flag: 1 { items { uint32_value: 3 } items { uint32_value: 2 } }, { items { uint32_value: 4 } items { uint32_value: 2 } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TFlatTableExecutor_PostponedScan::TestPostponedScanSnapshotMVCC [GOOD] Test command err: 00000.000 II| FAKE_ENV: Born at 2025-03-31T16:02:17.909643Z 00000.009 DD| RESOURCE_BROKER: TResourceBrokerActor bootstrap 00000.011 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.012 II| FAKE_ENV: Starting storage for BS group 0 00000.012 II| FAKE_ENV: Starting storage for BS group 1 00000.012 II| FAKE_ENV: Starting storage for BS group 2 00000.012 II| FAKE_ENV: Starting storage for BS group 3 00000.024 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (1 by [1:30:2062]) priority=200 resources={1, 0} 00000.024 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (1 by [1:30:2062]) to queue queue_background_compaction 00000.024 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (1 by [1:30:2062]) from queue queue_background_compaction 00000.024 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (1 by [1:30:2062]) to queue queue_background_compaction 00000.024 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 0.000000 to 50.000000 (insert task gen0-table-101-tablet-1 (1 by [1:30:2062])) 00000.028 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (1 by [1:30:2062]) (release resources {1, 0}) 00000.028 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 50.000000 to 0.000000 (remove task gen0-table-101-tablet-1 (1 by [1:30:2062])) 00000.030 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.030 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.030 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.030 II| FAKE_ENV: DS.0 gone, left {771b, 9}, put {791b, 10} 00000.030 II| FAKE_ENV: DS.1 gone, left {1347b, 10}, put {1347b, 10} 00000.030 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.030 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.030 II| FAKE_ENV: All BS storage groups are stopped 00000.030 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.030 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 23}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-31T16:02:17.962364Z 00000.008 DD| RESOURCE_BROKER: TResourceBrokerActor bootstrap 00000.010 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.010 II| FAKE_ENV: Starting storage for BS group 0 00000.011 II| FAKE_ENV: Starting storage for BS group 1 00000.011 II| FAKE_ENV: Starting storage for BS group 2 00000.011 II| FAKE_ENV: Starting storage for BS group 3 00000.014 DD| RESOURCE_BROKER: Submitted new background_compaction task bckg-block (987987987987 by [2:8:2055]) priority=0 resources={1, 0} 00000.014 DD| RESOURCE_BROKER: Assigning waiting task bckg-block (987987987987 by [2:8:2055]) to queue queue_background_compaction 00000.014 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task bckg-block (987987987987 by [2:8:2055]) from queue queue_background_compaction 00000.014 DD| RESOURCE_BROKER: Assigning in-fly task bckg-block (987987987987 by [2:8:2055]) to queue queue_background_compaction 00000.014 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 0.000000 to 300.000000 (insert task bckg-block (987987987987 by [2:8:2055])) 00000.016 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (1 by [2:30:2062]) priority=200 resources={1, 0} 00000.016 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (1 by [2:30:2062]) to queue queue_background_compaction 00000.016 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.018 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (1 by [2:30:2062]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.018 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (1 by [2:30:2062]) to queue queue_compaction_gen0 00000.018 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (1 by [2:30:2062]) from queue queue_compaction_gen0 00000.018 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (1 by [2:30:2062]) to queue queue_compaction_gen0 00000.018 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 5.000000 (insert task gen0-table-101-tablet-1 (1 by [2:30:2062])) 00000.022 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (1 by [2:30:2062]) (release resources {1, 0}) 00000.022 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 5.000000 to 0.000000 (remove task gen0-table-101-tablet-1 (1 by [2:30:2062])) 00000.023 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.023 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.023 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.023 II| FAKE_ENV: DS.0 gone, left {1262b, 14}, put {1282b, 15} 00000.024 II| FAKE_ENV: DS.1 gone, left {1890b, 15}, put {1890b, 15} 00000.024 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.024 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.024 II| FAKE_ENV: All BS storage groups are stopped 00000.024 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.024 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 31}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-31T16:02:17.992272Z 00000.007 DD| RESOURCE_BROKER: TResourceBrokerActor bootstrap 00000.008 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.009 II| FAKE_ENV: Starting storage for BS group 0 00000.009 II| FAKE_ENV: Starting storage for BS group 1 00000.009 II| FAKE_ENV: Starting storage for BS group 2 00000.009 II| FAKE_ENV: Starting storage for BS group 3 00000.013 DD| RESOURCE_BROKER: Submitted new background_compaction task bckg-block (987987987987 by [3:8:2055]) priority=0 resources={1, 0} 00000.013 DD| RESOURCE_BROKER: Assigning waiting task bckg-block (987987987987 by [3:8:2055]) to queue queue_background_compaction 00000.013 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task bckg-block (987987987987 by [3:8:2055]) from queue queue_background_compaction 00000.013 DD| RESOURCE_BROKER: Assigning in-fly task bckg-block (987987987987 by [3:8:2055]) to queue queue_background_compaction 00000.013 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 0.000000 to 300.000000 (insert task bckg-block (987987987987 by [3:8:2055])) 00000.016 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (1 by [3:30:2062]) priority=200 resources={1, 0} 00000.016 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (1 by [3:30:2062]) to queue queue_background_compaction 00000.016 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.017 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (1 by [3:30:2062]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.017 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (1 by [3:30:2062]) to queue queue_compaction_gen0 00000.017 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (1 by [3:30:2062]) from queue queue_compaction_gen0 00000.017 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (1 by [3:30:2062]) to queue queue_compaction_gen0 00000.018 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 5.000000 (insert task gen0-table-101-tablet-1 (1 by [3:30:2062])) 00000.020 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (1 by [3:30:2062]) (release resources {1, 0}) 00000.021 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 5.000000 to 0.000000 (remove task gen0-table-101-tablet-1 (1 by [3:30:2062])) 00000.022 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (2 by [3:30:2062]) priority=200 resources={1, 0} 00000.022 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (2 by [3:30:2062]) to queue queue_background_compaction 00000.022 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.027 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (2 by [3:30:2062]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.027 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (2 by [3:30:2062]) to queue queue_compaction_gen0 00000.027 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (2 by [3:30:2062]) from queue queue_compaction_gen0 00000.027 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (2 by [3:30:2062]) to queue queue_compaction_gen0 00000.027 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 4.750000 (insert task gen0-table-101-tablet-1 (2 by [3:30:2062])) 00000.073 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (2 by [3:30:2062]) (release resources {1, 0}) 00000.073 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 4.750000 to 0.000000 (remove task gen0-table-101-tablet-1 (2 by [3:30:2062])) 00000.075 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (3 by [3:30:2062]) priority=200 resources={1, 0} 00000.075 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (3 by [3:30:2062]) to queue queue_background_compaction 00000.075 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.076 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (3 by [3:30:2062]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.076 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (3 by [3:30:2062]) to queue queue_compaction_gen0 00000.076 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (3 by [3:30:2062]) from queue queue_compaction_gen0 00000.076 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (3 by [3:30:2062]) to queue queue_compaction_gen0 00000.077 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 4.500000 (insert task gen0-table-101-tablet-1 (3 by [3:30:2062])) 00000.079 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (3 by [3:30:2062]) (release resources {1, 0}) 00000.079 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 4.500000 to 0.000000 (remove task gen0-table-101-tablet-1 (3 by [3:30:2062])) 00000.081 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (4 by [3:30:2062]) priority=200 resources={1, 0} 00000.081 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (4 by [3:30:2062]) to queue queue_background_compaction 00000.081 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.082 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (4 by [3:30:2062]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.082 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (4 by [3:30:2062]) to queue queue_compaction_gen0 00000.082 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (4 by [3:30:2062]) from queue queue_compaction_gen0 00000.082 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (4 by [3:30:2062]) to queue queue_compaction_gen0 00000.082 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 4.250000 (insert task gen0-table-101-tablet-1 (4 by [3:30:2062])) 00000.085 DD| R ... eader{1:2:97} starting compaction 00000.094 II| TABLET_EXECUTOR: Leader{1:2:98} starting Scan{15 on 101, Compact{1.2.97, eph 8}} 00000.094 II| TABLET_EXECUTOR: Leader{1:2:98} started compaction 15 00000.094 II| TABLET_OPS_HOST: Scan{15 on 101, Compact{1.2.97, eph 8}} begin on TSubset{head 9, 1m 1p 0c} 00000.100 II| TABLET_OPS_HOST: Scan{15 on 101, Compact{1.2.97, eph 8}} end=0, 80r seen, TFwd{fetch=1.93KiB,saved=1.93KiB,usage=1.93KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.101 II| TABLET_EXECUTOR: Leader{1:2:98} Compact 15 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 97, product {1 parts epoch 9} done 00000.109 II| TABLET_EXECUTOR: Leader{1:2:109} starting compaction 00000.109 II| TABLET_EXECUTOR: Leader{1:2:110} starting Scan{17 on 101, Compact{1.2.109, eph 9}} 00000.109 II| TABLET_EXECUTOR: Leader{1:2:110} started compaction 17 00000.109 II| TABLET_OPS_HOST: Scan{17 on 101, Compact{1.2.109, eph 9}} begin on TSubset{head 10, 1m 1p 0c} 00000.112 II| TABLET_OPS_HOST: Scan{17 on 101, Compact{1.2.109, eph 9}} end=0, 90r seen, TFwd{fetch=2.21KiB,saved=2.21KiB,usage=2.21KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.113 II| TABLET_EXECUTOR: Leader{1:2:110} Compact 17 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 109, product {1 parts epoch 10} done 00000.117 II| TABLET_EXECUTOR: Leader{1:2:121} starting compaction 00000.117 II| TABLET_EXECUTOR: Leader{1:2:122} starting Scan{19 on 101, Compact{1.2.121, eph 10}} 00000.117 II| TABLET_EXECUTOR: Leader{1:2:122} started compaction 19 00000.117 II| TABLET_OPS_HOST: Scan{19 on 101, Compact{1.2.121, eph 10}} begin on TSubset{head 11, 1m 1p 0c} 00000.128 II| TABLET_OPS_HOST: Scan{19 on 101, Compact{1.2.121, eph 10}} end=0, 100r seen, TFwd{fetch=2.48KiB,saved=2.48KiB,usage=2.48KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.138 II| TABLET_EXECUTOR: Leader{1:2:123} Compact 19 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 121, product {1 parts epoch 11} done 00000.141 II| TABLET_EXECUTOR: Leader{1:2:133} starting compaction 00000.141 II| TABLET_EXECUTOR: Leader{1:2:134} starting Scan{21 on 101, Compact{1.2.133, eph 11}} 00000.141 II| TABLET_EXECUTOR: Leader{1:2:134} started compaction 21 00000.141 II| TABLET_OPS_HOST: Scan{21 on 101, Compact{1.2.133, eph 11}} begin on TSubset{head 12, 1m 1p 0c} 00000.143 II| TABLET_OPS_HOST: Scan{21 on 101, Compact{1.2.133, eph 11}} end=0, 110r seen, TFwd{fetch=2.75KiB,saved=2.75KiB,usage=2.75KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.145 II| TABLET_EXECUTOR: Leader{1:2:134} Compact 21 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 133, product {1 parts epoch 12} done 00000.147 II| TABLET_EXECUTOR: Leader{1:2:137} starting Scan{24 on 101, DummyScan} 00000.147 II| TABLET_OPS_HOST: Scan{24 on 101, DummyScan} begin on TSubset{head 12, 1m 1p 0c} 00000.150 II| TABLET_EXECUTOR: Leader{1:2:146} starting compaction 00000.150 II| TABLET_EXECUTOR: Leader{1:2:147} starting Scan{25 on 101, Compact{1.2.146, eph 12}} 00000.150 II| TABLET_EXECUTOR: Leader{1:2:147} started compaction 25 00000.150 II| TABLET_OPS_HOST: Scan{25 on 101, Compact{1.2.146, eph 12}} begin on TSubset{head 13, 1m 1p 0c} 00000.153 II| TABLET_OPS_HOST: Scan{25 on 101, Compact{1.2.146, eph 12}} end=0, 120r seen, TFwd{fetch=3.03KiB,saved=3.03KiB,usage=3.03KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.154 II| TABLET_EXECUTOR: Leader{1:2:147} Compact 25 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 146, product {1 parts epoch 13} done 00000.158 II| TABLET_EXECUTOR: Leader{1:2:158} starting compaction 00000.158 II| TABLET_EXECUTOR: Leader{1:2:159} starting Scan{27 on 101, Compact{1.2.158, eph 13}} 00000.158 II| TABLET_EXECUTOR: Leader{1:2:159} started compaction 27 00000.158 II| TABLET_OPS_HOST: Scan{27 on 101, Compact{1.2.158, eph 13}} begin on TSubset{head 14, 1m 1p 0c} 00000.160 II| TABLET_OPS_HOST: Scan{27 on 101, Compact{1.2.158, eph 13}} end=0, 130r seen, TFwd{fetch=3.44KiB,saved=3.44KiB,usage=3.44KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.162 II| TABLET_EXECUTOR: Leader{1:2:160} Compact 27 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 158, product {1 parts epoch 14} done 00000.165 II| TABLET_EXECUTOR: Leader{1:2:170} starting compaction 00000.165 II| TABLET_EXECUTOR: Leader{1:2:171} starting Scan{29 on 101, Compact{1.2.170, eph 14}} 00000.165 II| TABLET_EXECUTOR: Leader{1:2:171} started compaction 29 00000.165 II| TABLET_OPS_HOST: Scan{29 on 101, Compact{1.2.170, eph 14}} begin on TSubset{head 15, 1m 1p 0c} 00000.167 II| TABLET_OPS_HOST: Scan{29 on 101, Compact{1.2.170, eph 14}} end=0, 140r seen, TFwd{fetch=3.87KiB,saved=3.87KiB,usage=3.87KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.172 II| TABLET_EXECUTOR: Leader{1:2:172} Compact 29 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 170, product {1 parts epoch 15} done 00000.176 II| TABLET_EXECUTOR: Leader{1:2:182} starting compaction 00000.177 II| TABLET_EXECUTOR: Leader{1:2:183} starting Scan{31 on 101, Compact{1.2.182, eph 15}} 00000.177 II| TABLET_EXECUTOR: Leader{1:2:183} started compaction 31 00000.177 II| TABLET_OPS_HOST: Scan{31 on 101, Compact{1.2.182, eph 15}} begin on TSubset{head 16, 1m 1p 0c} 00000.180 II| TABLET_OPS_HOST: Scan{31 on 101, Compact{1.2.182, eph 15}} end=0, 150r seen, TFwd{fetch=4.3KiB,saved=4.3KiB,usage=4.3KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.181 II| TABLET_EXECUTOR: Leader{1:2:184} Compact 31 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 182, product {1 parts epoch 16} done 00000.184 II| TABLET_EXECUTOR: Leader{1:2:194} starting compaction 00000.184 II| TABLET_EXECUTOR: Leader{1:2:195} starting Scan{33 on 101, Compact{1.2.194, eph 16}} 00000.185 II| TABLET_EXECUTOR: Leader{1:2:195} started compaction 33 00000.185 II| TABLET_OPS_HOST: Scan{33 on 101, Compact{1.2.194, eph 16}} begin on TSubset{head 17, 1m 1p 0c} 00000.187 II| TABLET_OPS_HOST: Scan{33 on 101, Compact{1.2.194, eph 16}} end=0, 160r seen, TFwd{fetch=4.73KiB,saved=4.73KiB,usage=4.73KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.190 II| TABLET_EXECUTOR: Leader{1:2:196} Compact 33 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 194, product {1 parts epoch 17} done 00000.193 II| TABLET_EXECUTOR: Leader{1:2:206} starting compaction 00000.194 II| TABLET_EXECUTOR: Leader{1:2:207} starting Scan{35 on 101, Compact{1.2.206, eph 17}} 00000.194 II| TABLET_EXECUTOR: Leader{1:2:207} started compaction 35 00000.194 II| TABLET_OPS_HOST: Scan{35 on 101, Compact{1.2.206, eph 17}} begin on TSubset{head 18, 1m 1p 0c} 00000.216 II| TABLET_OPS_HOST: Scan{35 on 101, Compact{1.2.206, eph 17}} end=0, 170r seen, TFwd{fetch=5.16KiB,saved=5.16KiB,usage=5.16KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.218 II| TABLET_EXECUTOR: Leader{1:2:208} Compact 35 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 206, product {1 parts epoch 18} done 00000.221 II| TABLET_EXECUTOR: Leader{1:2:218} starting compaction 00000.221 II| TABLET_EXECUTOR: Leader{1:2:219} starting Scan{37 on 101, Compact{1.2.218, eph 18}} 00000.221 II| TABLET_EXECUTOR: Leader{1:2:219} started compaction 37 00000.221 II| TABLET_OPS_HOST: Scan{37 on 101, Compact{1.2.218, eph 18}} begin on TSubset{head 19, 1m 1p 0c} 00000.228 II| TABLET_OPS_HOST: Scan{37 on 101, Compact{1.2.218, eph 18}} end=0, 180r seen, TFwd{fetch=5.59KiB,saved=5.59KiB,usage=5.59KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.235 II| TABLET_EXECUTOR: Leader{1:2:219} Compact 37 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 218, product {1 parts epoch 19} done 00000.238 II| TABLET_EXECUTOR: Leader{1:2:230} starting compaction 00000.238 II| TABLET_EXECUTOR: Leader{1:2:231} starting Scan{39 on 101, Compact{1.2.230, eph 19}} 00000.238 II| TABLET_EXECUTOR: Leader{1:2:231} started compaction 39 00000.238 II| TABLET_OPS_HOST: Scan{39 on 101, Compact{1.2.230, eph 19}} begin on TSubset{head 20, 1m 1p 0c} 00000.241 II| TABLET_OPS_HOST: Scan{39 on 101, Compact{1.2.230, eph 19}} end=0, 190r seen, TFwd{fetch=6.02KiB,saved=6.02KiB,usage=6.02KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.242 II| TABLET_EXECUTOR: Leader{1:2:231} Compact 39 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 230, product {1 parts epoch 20} done 00000.246 II| TABLET_EXECUTOR: Leader{1:2:242} starting compaction 00000.246 II| TABLET_EXECUTOR: Leader{1:2:243} starting Scan{41 on 101, Compact{1.2.242, eph 20}} 00000.246 II| TABLET_EXECUTOR: Leader{1:2:243} started compaction 41 00000.246 II| TABLET_OPS_HOST: Scan{41 on 101, Compact{1.2.242, eph 20}} begin on TSubset{head 21, 1m 1p 0c} 00000.251 II| TABLET_OPS_HOST: Scan{41 on 101, Compact{1.2.242, eph 20}} end=0, 200r seen, TFwd{fetch=6.45KiB,saved=6.45KiB,usage=6.45KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.262 II| TABLET_EXECUTOR: Leader{1:2:244} Compact 41 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 242, product {1 parts epoch 21} done 00000.265 II| TABLET_EXECUTOR: Leader{1:2:254} starting compaction 00000.265 II| TABLET_EXECUTOR: Leader{1:2:255} starting Scan{43 on 101, Compact{1.2.254, eph 21}} 00000.265 II| TABLET_EXECUTOR: Leader{1:2:255} started compaction 43 00000.266 II| TABLET_OPS_HOST: Scan{43 on 101, Compact{1.2.254, eph 21}} begin on TSubset{head 22, 1m 1p 0c} 00000.281 II| TABLET_OPS_HOST: Scan{43 on 101, Compact{1.2.254, eph 21}} end=0, 210r seen, TFwd{fetch=6.88KiB,saved=6.88KiB,usage=6.88KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.283 II| TABLET_EXECUTOR: Leader{1:2:255} Compact 43 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 254, product {1 parts epoch 22} done 00000.287 II| TABLET_EXECUTOR: Leader{1:2:266} starting compaction 00000.287 II| TABLET_EXECUTOR: Leader{1:2:267} starting Scan{45 on 101, Compact{1.2.266, eph 22}} 00000.287 II| TABLET_EXECUTOR: Leader{1:2:267} started compaction 45 00000.287 II| TABLET_OPS_HOST: Scan{45 on 101, Compact{1.2.266, eph 22}} begin on TSubset{head 23, 1m 1p 0c} 00000.289 II| TABLET_OPS_HOST: Scan{45 on 101, Compact{1.2.266, eph 22}} end=0, 220r seen, TFwd{fetch=7.43KiB,saved=7.43KiB,usage=7.43KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=3}, trace 0 of 0 ~2p 00000.295 II| TABLET_EXECUTOR: Leader{1:2:268} Compact 45 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 266, product {1 parts epoch 23} done 00000.297 II| TABLET_OPS_HOST: Scan{24 on 101, DummyScan} end=0, 111r seen, TFwd{fetch=3.03KiB,saved=3.03KiB,usage=3.03KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1} 00000.298 II| TABLET_EXECUTOR: Leader{1:2:270} suiciding, Waste{2:0, 8879b +(262, 99769b), 269 trc, -99769b acc} 00000.301 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.302 NN| TABLET_SAUSAGECACHE: Poison cache serviced 23 reqs hit {24 76962b} miss {0 0b} 00000.302 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.302 II| FAKE_ENV: DS.0 gone, left {27025b, 269}, put {27045b, 270} 00000.303 II| FAKE_ENV: DS.1 gone, left {111147b, 290}, put {111147b, 290} 00000.304 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.304 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.304 II| FAKE_ENV: All BS storage groups are stopped 00000.304 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.304 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 153}, stopped ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestUnmonitoredEventsThenNoMonitorings [GOOD] Test command err: 2025-03-31T16:03:25.046253Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000fbf/r3tmp/tmp6KzzNL/pdisk_1.dat 2025-03-31T16:03:25.120009Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:25.120140Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:25.123071Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:25.123194Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:25.130473Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:25.795106Z node 1 :BS_PROXY_PUT INFO: [abc2fc901918ac71] bootstrap ActorId# [1:482:2460] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:1296:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-03-31T16:03:25.795274Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] Id# [72057594037932033:2:8:0:0:1296:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:25.795321Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] Id# [72057594037932033:2:8:0:0:1296:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:25.795349Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] Id# [72057594037932033:2:8:0:0:1296:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:25.795377Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] Id# [72057594037932033:2:8:0:0:1296:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:25.795409Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] Id# [72057594037932033:2:8:0:0:1296:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:25.795437Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] Id# [72057594037932033:2:8:0:0:1296:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:25.795475Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] restore Id# [72057594037932033:2:8:0:0:1296:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-03-31T16:03:25.795555Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:1296:1] Marker# BPG33 2025-03-31T16:03:25.795600Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:1296:1] Marker# BPG32 2025-03-31T16:03:25.795644Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:1296:2] Marker# BPG33 2025-03-31T16:03:25.795675Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:1296:2] Marker# BPG32 2025-03-31T16:03:25.795706Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:1296:3] Marker# BPG33 2025-03-31T16:03:25.795731Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:1296:3] Marker# BPG32 2025-03-31T16:03:25.795895Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:46:2090] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1296:3] FDS# 1296 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T16:03:25.795965Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:39:2083] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1296:2] FDS# 1296 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T16:03:25.796011Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:60:2104] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1296:1] FDS# 1296 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T16:03:25.804255Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1296:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90204 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-03-31T16:03:25.804395Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1296:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 90204 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-03-31T16:03:25.804445Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1296:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90204 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-03-31T16:03:25.804496Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:1296:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-03-31T16:03:25.804539Z node 1 :BS_PROXY_PUT INFO: [abc2fc901918ac71] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:1296:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-03-31T16:03:25.804715Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.116 sample PartId# [72057594037932033:2:8:0:0:1296:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.117 sample PartId# [72057594037932033:2:8:0:0:1296:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.117 sample PartId# [72057594037932033:2:8:0:0:1296:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 9.385 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 9.49 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 9.537 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } 2025-03-31T16:03:25.854145Z node 1 :BS_PROXY_PUT INFO: [1a43693427d0a82b] bootstrap ActorId# [1:527:2497] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:9:0:0:224:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-03-31T16:03:25.854310Z node 1 :BS_PROXY_PUT DEBUG: [1a43693427d0a82b] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:25.854360Z node 1 :BS_PROXY_PUT DEBUG: [1a43693427d0a82b] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:25.854394Z node 1 :BS_PROXY_PUT DEBUG: [1a43693427d0a82b] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:25.854424Z node 1 :BS_PROXY_PUT DEBUG: [1a43693427d0a82b] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:25.854452Z node 1 :BS_PROXY_PUT DEBUG: [1a43693427d0a82b] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:25.854480Z node 1 :BS_PROXY_PUT DEBUG: [1a43693427d0a82b] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:25.854522Z node 1 :BS_PROXY_PUT DEBUG: [1a43693427d0a82b] restore Id# [72057594037932033:2:9:0:0:224:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-03-31T16:03:25.854588Z node 1 :BS_PROXY_PUT DEBUG: [1a43693427d0a82b] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:9:0:0:224:1] Marker# BPG33 2025-03-31T16:03:25.854634Z node 1 :BS_PROXY_PUT DEBUG: [1a43693427d0a82b] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:9:0:0:224:1] Marker# BPG32 2025-03-31T16:03:25.854677Z node 1 :BS_PROXY_PUT DEBUG: [1a43693427d0a82b] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:9:0:0:224:2] Marker# BPG33 2025-03-31T16:03:25.854706Z node 1 :BS_PROXY_PUT DEBUG: [1a43693427d0a82b] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:9:0:0:224:2] Marker# BPG32 2025-03-31T16:03:25.854737Z node 1 :BS_PROXY_PUT DEBUG: [1a43693427d0a82b] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:9:0:0:224:3] Marker# BPG33 2025-03-31T16:03:25.854760Z node 1 :BS_PROXY_PUT DEBUG: [1a43693427d0a82b] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:9:0:0:224:3] Marker# BPG32 2025-03-31T16:03:25.854927Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:39:2083] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:3] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T16:03:25.855011Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:60:2104] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:2] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T16:03:25.855061Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:53:2097] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:1] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T16:03:25.857121Z node 1 :BS_PROXY_PUT DEBUG: [1a43693427d0a82b] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:224:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81763 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:2:0] Marker# BPP01 2025-03-31T16:03:25.857422Z node 1 :BS_PROXY_PUT DEBUG: [1a43693427d0a82b] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:224:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81763 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-03-31T16:03:25.857520Z node 1 :BS_PROXY_PUT DEBUG: [1a43693427d0a82b] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:224:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 81763 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000 ... PROXY_PUT DEBUG: [bba3bffd2e286f4b] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:10:0:0:238:1] Marker# BPG32 2025-03-31T16:03:25.895808Z node 1 :BS_PROXY_PUT DEBUG: [bba3bffd2e286f4b] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:10:0:0:238:2] Marker# BPG33 2025-03-31T16:03:25.895834Z node 1 :BS_PROXY_PUT DEBUG: [bba3bffd2e286f4b] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:10:0:0:238:2] Marker# BPG32 2025-03-31T16:03:25.895868Z node 1 :BS_PROXY_PUT DEBUG: [bba3bffd2e286f4b] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:10:0:0:238:3] Marker# BPG33 2025-03-31T16:03:25.895892Z node 1 :BS_PROXY_PUT DEBUG: [bba3bffd2e286f4b] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:10:0:0:238:3] Marker# BPG32 2025-03-31T16:03:25.896043Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:60:2104] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:10:0:0:238:3] FDS# 238 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T16:03:25.896106Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:53:2097] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:10:0:0:238:2] FDS# 238 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T16:03:25.896152Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:46:2090] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:10:0:0:238:1] FDS# 238 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T16:03:25.897823Z node 1 :BS_PROXY_PUT DEBUG: [bba3bffd2e286f4b] received {EvVPutResult Status# OK ID# [72057594037932033:2:10:0:0:238:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 11 } Cost# 81874 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 12 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-03-31T16:03:25.898073Z node 1 :BS_PROXY_PUT DEBUG: [bba3bffd2e286f4b] received {EvVPutResult Status# OK ID# [72057594037932033:2:10:0:0:238:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 81874 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:2:0] Marker# BPP01 2025-03-31T16:03:25.898189Z node 1 :BS_PROXY_PUT DEBUG: [bba3bffd2e286f4b] received {EvVPutResult Status# OK ID# [72057594037932033:2:10:0:0:238:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 81874 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-03-31T16:03:25.898289Z node 1 :BS_PROXY_PUT DEBUG: [bba3bffd2e286f4b] Result# TEvPutResult {Id# [72057594037932033:2:10:0:0:238:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-03-31T16:03:25.898354Z node 1 :BS_PROXY_PUT INFO: [bba3bffd2e286f4b] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:10:0:0:238:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-03-31T16:03:25.898540Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.03 sample PartId# [72057594037932033:2:10:0:0:238:3] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.031 sample PartId# [72057594037932033:2:10:0:0:238:2] QueryCount# 1 VDiskId# [2000000:1:0:2:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.031 sample PartId# [72057594037932033:2:10:0:0:238:1] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 2.739 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 2.957 VDiskId# [2000000:1:0:2:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 3.09 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } 2025-03-31T16:03:25.900041Z node 1 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-03-31T16:03:25.900094Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-03-31T16:03:25.901983Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:528:2498] Create Queue# [1:532:2501] targetNodeId# 1 Marker# DSP01 2025-03-31T16:03:25.902119Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:528:2498] Create Queue# [1:533:2502] targetNodeId# 1 Marker# DSP01 2025-03-31T16:03:25.902237Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:528:2498] Create Queue# [1:534:2503] targetNodeId# 1 Marker# DSP01 2025-03-31T16:03:25.902350Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:528:2498] Create Queue# [1:535:2504] targetNodeId# 1 Marker# DSP01 2025-03-31T16:03:25.902468Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:528:2498] Create Queue# [1:536:2505] targetNodeId# 1 Marker# DSP01 2025-03-31T16:03:25.902580Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:528:2498] Create Queue# [1:537:2506] targetNodeId# 1 Marker# DSP01 2025-03-31T16:03:25.902697Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:528:2498] Create Queue# [1:538:2507] targetNodeId# 1 Marker# DSP01 2025-03-31T16:03:25.902722Z node 1 :BS_PROXY INFO: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2025-03-31T16:03:25.903436Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-31T16:03:25.903563Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-31T16:03:25.903660Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-31T16:03:25.903726Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-31T16:03:25.903788Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-31T16:03:25.903852Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-31T16:03:25.903897Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-31T16:03:25.903923Z node 1 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2025-03-31T16:03:25.903960Z node 1 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2025-03-31T16:03:25.904106Z node 1 :BS_PROXY_BLOCK DEBUG: [f913878b3da83702] bootstrap ActorId# [1:539:2508] Group# 2181038082 TabletId# 1234 Generation# 1 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2025-03-31T16:03:25.904153Z node 1 :BS_PROXY_BLOCK DEBUG: [f913878b3da83702] Sending TEvVBlock Tablet# 1234 Generation# 1 vdiskId# [82000002:1:0:0:0] node# 1 Marker# DSPB03 2025-03-31T16:03:25.904381Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:532:2501] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 1 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 13812539193262231709 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2025-03-31T16:03:25.905432Z node 1 :BS_PROXY_BLOCK DEBUG: [f913878b3da83702] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 1 Marker# DSPB01 2025-03-31T16:03:25.905492Z node 1 :BS_PROXY_BLOCK DEBUG: [f913878b3da83702] Result# TEvBlockResult {Status# OK} Marker# DSPB04 2025-03-31T16:03:25.905816Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:532:2501] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 2025-03-31T16:03:25.907938Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 0 Marker# DSP57 initialize full monitoring 2025-03-31T16:03:25.908690Z node 1 :BS_PROXY_BLOCK DEBUG: [91379e686f748e92] bootstrap ActorId# [1:541:2510] Group# 2181038082 TabletId# 1234 Generation# 3 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2025-03-31T16:03:25.908752Z node 1 :BS_PROXY_BLOCK DEBUG: [91379e686f748e92] Sending TEvVBlock Tablet# 1234 Generation# 3 vdiskId# [82000002:1:0:0:0] node# 1 Marker# DSPB03 2025-03-31T16:03:25.908928Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:532:2501] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 3 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 17731750147387654901 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2025-03-31T16:03:25.909875Z node 1 :BS_PROXY_BLOCK DEBUG: [91379e686f748e92] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 1 Marker# DSPB01 2025-03-31T16:03:25.909933Z node 1 :BS_PROXY_BLOCK DEBUG: [91379e686f748e92] Result# TEvBlockResult {Status# OK} Marker# DSPB04 2025-03-31T16:03:25.910346Z node 1 :BS_PROXY_BLOCK DEBUG: [cd65997ea3b51537] bootstrap ActorId# [1:542:2511] Group# 2181038082 TabletId# 1234 Generation# 4 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2025-03-31T16:03:25.910403Z node 1 :BS_PROXY_BLOCK DEBUG: [cd65997ea3b51537] Sending TEvVBlock Tablet# 1234 Generation# 4 vdiskId# [82000002:1:0:0:0] node# 1 Marker# DSPB03 2025-03-31T16:03:25.910554Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:532:2501] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 4 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 1553493503860896779 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2025-03-31T16:03:25.911903Z node 1 :BS_PROXY_BLOCK DEBUG: [cd65997ea3b51537] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 1 Marker# DSPB01 2025-03-31T16:03:25.911965Z node 1 :BS_PROXY_BLOCK DEBUG: [cd65997ea3b51537] Result# TEvBlockResult {Status# OK} Marker# DSPB04 >> TPartBtreeIndexIteration::FewNodes_Groups_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_History_Slices >> TSequence::CreateSequenceInsideTableThenDropSequence [GOOD] >> TSequence::CreateSequenceInsideTableThenDropTable >> TExportToS3Tests::UidAsIdempotencyKey >> TBlobStorageWardenTest::TestLimitedKeylessGroupThenNoMonitoring [GOOD] >> TPersQueueTest::EachMessageGetsExactlyOneAcknowledgementInCorrectOrder [GOOD] >> TPersQueueTest::Delete >> TBlobStorageWardenTest::TestDeleteStoragePool [GOOD] >> TBlobStorageWardenTest::TestBlockEncriptedGroup >> TExportToS3Tests::CancelUponCreatingExportDirShouldSucceed >> TBlobStorageWardenTest::TestGivenPDiskFormatedWithGuid1AndCreatedWithGuid2WhenYardInitThenError [GOOD] >> KqpRm::DisonnectNodes >> TExportToS3Tests::DropSourceTableBeforeTransferring >> TExportToS3Tests::CancelUponTransferringSingleShardTableShouldSucceed >> TSequence::CreateSequenceInsideIndexTableNotAllowed [GOOD] >> TSequence::CopyTableWithSequence >> KqpPragma::MatchRecognizeWithTimeOrderRecoverer [GOOD] |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> TBlobStorageWardenTest::TestSendUsefulMonitoring [GOOD] >> TExportToS3Tests::CheckItemProgress ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestLimitedKeylessGroupThenNoMonitoring [GOOD] Test command err: 2025-03-31T16:03:25.394241Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:25.404118Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:25.404266Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:25.409080Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:25.409228Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:25.409327Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000fd5/r3tmp/tmpp9hLgN/pdisk_1.dat 2025-03-31T16:03:26.127711Z node 1 :BS_PROXY_PUT INFO: [5a9a1d6240d04444] bootstrap ActorId# [1:544:2462] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:1294:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-03-31T16:03:26.127978Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1294:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:26.128027Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1294:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:26.128058Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1294:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:26.128085Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1294:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:26.128114Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1294:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:26.128144Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1294:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:26.128194Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] restore Id# [72057594037932033:2:8:0:0:1294:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-03-31T16:03:26.128279Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:1294:1] Marker# BPG33 2025-03-31T16:03:26.128330Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:1294:1] Marker# BPG32 2025-03-31T16:03:26.132195Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:1294:2] Marker# BPG33 2025-03-31T16:03:26.132268Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:1294:2] Marker# BPG32 2025-03-31T16:03:26.132302Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:1294:3] Marker# BPG33 2025-03-31T16:03:26.132331Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:1294:3] Marker# BPG32 2025-03-31T16:03:26.132756Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:65:2091] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1294:3] FDS# 1294 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T16:03:26.132844Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:58:2084] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1294:2] FDS# 1294 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T16:03:26.132901Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:79:2105] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1294:1] FDS# 1294 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T16:03:26.145746Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1294:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90188 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-03-31T16:03:26.146074Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1294:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 90188 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-03-31T16:03:26.146218Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1294:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90188 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-03-31T16:03:26.146319Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:1294:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-03-31T16:03:26.146397Z node 1 :BS_PROXY_PUT INFO: [5a9a1d6240d04444] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:1294:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-03-31T16:03:26.146617Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 6.709 sample PartId# [72057594037932033:2:8:0:0:1294:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 6.71 sample PartId# [72057594037932033:2:8:0:0:1294:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 6.711 sample PartId# [72057594037932033:2:8:0:0:1294:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 19.621 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 19.888 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 20.022 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } 2025-03-31T16:03:26.226442Z node 1 :BS_PROXY_PUT INFO: [8d27cf9df52bfb78] bootstrap ActorId# [1:589:2499] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:9:0:0:229:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-03-31T16:03:26.226619Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:26.226666Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:26.226694Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:26.226723Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:26.226749Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:26.226776Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:26.226825Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] restore Id# [72057594037932033:2:9:0:0:229:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-03-31T16:03:26.226928Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:9:0:0:229:1] Marker# BPG33 2025-03-31T16:03:26.226985Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:9:0:0:229:1] Marker# BPG32 2025-03-31T16:03:26.227037Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:9:0:0:229:2] Marker# BPG33 2025-03-31T16:03:26.227070Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:9:0:0:229:2] Marker# BPG32 2025-03-31T16:03:26.227131Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:9:0:0:229:3] Marker# BPG33 2025-03-31T16:03:26.227160Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:9:0:0:229:3] Marker# BPG32 2025-03-31T16:03:26.227331Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:58:2084] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:229:3] FDS# 229 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T16:03:26.227409Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:79:2105] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:229:2] FDS# 229 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T16:03:26.227465Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:72:2098] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:229:1] FDS# 229 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T16:03:26.231490Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:229:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 81803 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-03-31T16:03:26.231838Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:229:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81803 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:2:0] Marker# BPP01 2025-03-31T16:03:26.231989Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:229:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81803 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2 ... ituation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:26.284858Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] restore Id# [1234:2:0:0:0:5:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-03-31T16:03:26.284924Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG33 2025-03-31T16:03:26.284969Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG32 2025-03-31T16:03:26.285094Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:594:2503] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:5:1] FDS# 5 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T16:03:26.288328Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] received {EvVPutResult Status# OK ID# [1234:2:0:0:0:5:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 2 } Cost# 80039 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 3 }}}} from# [82000002:1:0:0:0] Marker# BPP01 2025-03-31T16:03:26.288480Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] Result# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 2181038082 Marker# BPP12 2025-03-31T16:03:26.288541Z node 1 :BS_PROXY_PUT INFO: [f913878b3da83702] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-03-31T16:03:26.288683Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 2181038082 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.578 sample PartId# [1234:2:0:0:0:5:1] QueryCount# 1 VDiskId# [82000002:1:0:0:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 3.863 VDiskId# [82000002:1:0:0:0] NodeId# 1 Status# OK } ] } 2025-03-31T16:03:26.289193Z node 2 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# IsLimitedKeyless# false Marker# DSP02 2025-03-31T16:03:26.289234Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateUnconfigured Marker# DSP07 2025-03-31T16:03:26.289323Z node 2 :BS_PROXY DEBUG: Group# 2181038082 HandleEnqueue# TEvBlock {TabletId# 1234 Generation# 3 Deadline# 18446744073709551 IsMonitored# 1} Marker# DSP17 2025-03-31T16:03:26.289929Z node 2 :BS_NODE ERROR: {NW19@node_warden_group.cpp:211} error while parsing group GroupId# 2181038082 Err# LifeCyclePhase# KEY_NOT_LOADED Key.Id# "" Key.Version# 0 MainKey.Id# "/home/runner/.ya/build/build_root/0l5c/000fd5/r3tmp/tmpp9hLgN//key.txt" MainKey.Version# 1 GroupKeyNonce# 2181038082 2025-03-31T16:03:26.290856Z node 2 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# true Marker# DSP02 2025-03-31T16:03:26.290921Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 1 fullIfPossible# 0 Marker# DSP58 2025-03-31T16:03:26.293114Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:605:2105] Create Queue# [2:607:2106] targetNodeId# 1 Marker# DSP01 2025-03-31T16:03:26.293255Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:605:2105] Create Queue# [2:608:2107] targetNodeId# 1 Marker# DSP01 2025-03-31T16:03:26.293364Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:605:2105] Create Queue# [2:609:2108] targetNodeId# 1 Marker# DSP01 2025-03-31T16:03:26.293474Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:605:2105] Create Queue# [2:610:2109] targetNodeId# 1 Marker# DSP01 2025-03-31T16:03:26.293583Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:605:2105] Create Queue# [2:611:2110] targetNodeId# 1 Marker# DSP01 2025-03-31T16:03:26.293690Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:605:2105] Create Queue# [2:612:2111] targetNodeId# 1 Marker# DSP01 2025-03-31T16:03:26.293798Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:605:2105] Create Queue# [2:613:2112] targetNodeId# 1 Marker# DSP01 2025-03-31T16:03:26.293825Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2025-03-31T16:03:26.295001Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-31T16:03:26.301196Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-31T16:03:26.301329Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-31T16:03:26.301519Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-31T16:03:26.301574Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-31T16:03:26.301628Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-31T16:03:26.301721Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-31T16:03:26.301752Z node 2 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2025-03-31T16:03:26.301794Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2025-03-31T16:03:26.301975Z node 2 :BS_PROXY_BLOCK DEBUG: [d70ef3c23a1a2346] bootstrap ActorId# [2:614:2113] Group# 2181038082 TabletId# 1234 Generation# 3 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2025-03-31T16:03:26.302039Z node 2 :BS_PROXY_BLOCK DEBUG: [d70ef3c23a1a2346] Sending TEvVBlock Tablet# 1234 Generation# 3 vdiskId# [82000002:1:0:0:0] node# 1 Marker# DSPB03 2025-03-31T16:03:26.302223Z node 2 :BS_PROXY DEBUG: Send to queueActorId# [2:607:2106] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 3 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 14473822642158764293 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2025-03-31T16:03:26.303561Z node 2 :BS_PROXY_BLOCK DEBUG: [d70ef3c23a1a2346] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 1 Marker# DSPB01 2025-03-31T16:03:26.303630Z node 2 :BS_PROXY_BLOCK DEBUG: [d70ef3c23a1a2346] Result# TEvBlockResult {Status# OK} Marker# DSPB04 Sending TEvPut 2025-03-31T16:03:26.303958Z node 2 :BS_PROXY INFO: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:3:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:3:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2025-03-31T16:03:26.304158Z node 2 :BS_PROXY DEBUG: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:4:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:4:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2025-03-31T16:03:26.304498Z node 1 :BS_PROXY_PUT INFO: [91379e686f748e92] bootstrap ActorId# [1:615:2513] Group# 2181038082 BlobCount# 1 BlobIDs# [[1234:2:0:0:0:11:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2025-03-31T16:03:26.304640Z node 1 :BS_PROXY_PUT DEBUG: [91379e686f748e92] Id# [1234:2:0:0:0:11:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:26.304700Z node 1 :BS_PROXY_PUT DEBUG: [91379e686f748e92] restore Id# [1234:2:0:0:0:11:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-03-31T16:03:26.304756Z node 1 :BS_PROXY_PUT DEBUG: [91379e686f748e92] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG33 2025-03-31T16:03:26.304800Z node 1 :BS_PROXY_PUT DEBUG: [91379e686f748e92] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG32 2025-03-31T16:03:26.304925Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:594:2503] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:11:1] FDS# 11 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T16:03:26.305133Z node 1 :BS_VDISK_PUT ERROR: VDISK[82000002:_:0:0:0]: TEvVPut: failed to pass the Hull check; id# [1234:2:0:0:0:11:1] status# {Status# BLOCKED} Marker# BSVS03 2025-03-31T16:03:26.305392Z node 1 :BS_PROXY_PUT INFO: [91379e686f748e92] received {EvVPutResult Status# BLOCKED ErrorReason# "blocked" ID# [1234:2:0:0:0:11:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 3 } Cost# 80086 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 4 }}}} from# [82000002:1:0:0:0] Marker# BPP01 2025-03-31T16:03:26.305475Z node 1 :BS_PROXY_PUT ERROR: [91379e686f748e92] Result# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:1:0:0:0]" ApproximateFreeSpaceShare# 0} GroupId# 2181038082 Marker# BPP12 2025-03-31T16:03:26.305532Z node 1 :BS_PROXY_PUT NOTICE: [91379e686f748e92] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:1:0:0:0]" ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-03-31T16:03:26.305632Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 2181038082 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.591 sample PartId# [1234:2:0:0:0:11:1] QueryCount# 1 VDiskId# [82000002:1:0:0:0] NodeId# 1 } ] } 2025-03-31T16:03:26.305999Z node 2 :BS_PROXY DEBUG: Send to queueActorId# [2:607:2106] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> RetryPolicy::RetryWithBatching [GOOD] Test command err: 2025-03-31T15:56:34.736641Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:34.736669Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:34.736693Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T15:56:34.751422Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-03-31T15:56:34.751491Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:34.751531Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:34.752796Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007395s 2025-03-31T15:56:34.755013Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-03-31T15:56:34.755060Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:34.755106Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:34.755162Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.008680s 2025-03-31T15:56:34.756901Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-03-31T15:56:34.756942Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:34.756970Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T15:56:34.757029Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.008135s 2025-03-31T15:56:34.823166Z :TWriteSession_TestPolicy INFO: Random seed for debugging is 1743436594823132 2025-03-31T15:56:35.343540Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488003159748881233:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:56:35.343636Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:56:35.712781Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T15:56:35.717627Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011fc/r3tmp/tmpWFibxz/pdisk_1.dat 2025-03-31T15:56:35.796788Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:56:36.282355Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:56:36.421556Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:56:36.437387Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:56:36.437487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:56:36.451378Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:56:36.451458Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:56:36.469727Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T15:56:36.469870Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:56:36.472246Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13332, node 1 2025-03-31T15:56:37.654209Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/0011fc/r3tmp/yandexahpcPo.tmp 2025-03-31T15:56:37.654235Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/0011fc/r3tmp/yandexahpcPo.tmp 2025-03-31T15:56:37.655373Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/0011fc/r3tmp/yandexahpcPo.tmp 2025-03-31T15:56:37.656121Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:56:40.237860Z INFO: TTestServer started on Port 8036 GrpcPort 13332 2025-03-31T15:56:43.839160Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488003159748881233:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:56:43.839306Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:8036 PQClient connected to localhost:13332 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:56:47.452990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-31T15:56:49.572088Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:56:51.227877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T15:56:51.228054Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:56:56.177833Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488003249943194787:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:56.178624Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488003249943194781:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:56.180803Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:56:56.414069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2025-03-31T15:56:56.848302Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488003249943194803:2323], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-03-31T15:56:57.091915Z node 2 :TX_PROXY ERROR: Actor# [2:7488003249943194831:2140] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:56:57.690104Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpepy7z4wmtjvn3p1g4qavn", Request deadline has expired for 2.914992s seconds 2025-03-31T15:57:00.062058Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488003254238162142:2327], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T15:57:00.077275Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjMzOTA3NjItMmMwMDE4YjItYTgxNGM4ZWYtZTM4Yjg3MDA=, ActorId: [2:7488003245648227467:2318], ActorState: ExecuteState, TraceId: 01jqpeq4dw4tbaejerr95e953f, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T15:57:00.111743Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T15:57:00.156208Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488003255265566442:2357], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T15:57:00.157497Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODM0ODk0ZjYtMWVhNTQ2MTEtYTQ5MDdlZGEtZWU4ZTcwZDk=, ActorId: [1:7488003250970599025:2348], ActorState: ExecuteState, TraceId: 01jqpeq5yscvgrcvvs3cz543xw, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T15:57:00.159398Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T15:57:00.211706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T15:57:01.318953Z node 1 :KQP_CO ... ------------- 2025-03-31T16:03:20.048451Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000ptest-message-group-id 2025-03-31T16:03:20.048469Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] d0000000000_00000000000000000000_00000_0000000010_00000| 2025-03-31T16:03:20.048488Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:03:20.048521Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:03:20.048560Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] =========================== 2025-03-31T16:03:20.048755Z node 17 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-31T16:03:20.048902Z node 17 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 0 partNo 0 count 10 size 1208 2025-03-31T16:03:20.075559Z node 17 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 0 count 10 size 1208 actorID [17:7488004891067992962:2599] 2025-03-31T16:03:20.075750Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 1230 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:03:20.075751Z node 17 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 0 offset 0 partno 0 count 10 parts 0 size 1208 2025-03-31T16:03:20.075828Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:03:20.075910Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2025-03-31T16:03:20.075955Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:03:20.075990Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 1 is stored on disk 2025-03-31T16:03:20.076015Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:03:20.076051Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2025-03-31T16:03:20.076078Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:03:20.076113Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 4, partNo: 0, Offset: 3 is stored on disk 2025-03-31T16:03:20.076135Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:03:20.076170Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 5, partNo: 0, Offset: 4 is stored on disk 2025-03-31T16:03:20.076197Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:03:20.076231Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 6, partNo: 0, Offset: 5 is stored on disk 2025-03-31T16:03:20.076253Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:03:20.076284Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 7, partNo: 0, Offset: 6 is stored on disk 2025-03-31T16:03:20.076308Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:03:20.076344Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 8, partNo: 0, Offset: 7 is stored on disk 2025-03-31T16:03:20.076366Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:03:20.076400Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 9, partNo: 0, Offset: 8 is stored on disk 2025-03-31T16:03:20.076425Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:03:20.076459Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 10, partNo: 0, Offset: 9 is stored on disk 2025-03-31T16:03:20.076720Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-31T16:03:20.076772Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-03-31T16:03:20.077002Z node 17 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-03-31T16:03:20.077156Z node 17 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-03-31T16:03:20.077768Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 Topic 'rt3.dc1--test-topic' partition 0 user user offset 0 count 1 size 1024000 endOffset 10 max time lag 0ms effective offset 0 2025-03-31T16:03:20.077823Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 10 2025-03-31T16:03:20.078070Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-03-31T16:03:20.078101Z node 17 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-31T16:03:20.078202Z node 17 :PERSQUEUE DEBUG: Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp done, result 1743436999957 queuesize 0 startOffset 0 2025-03-31T16:03:20.087450Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4a176dca-e00c22c8-2c8ffa0f-4c23d91_0] Write session got write response: sequence_numbers: 1 sequence_numbers: 2 sequence_numbers: 3 sequence_numbers: 4 sequence_numbers: 5 sequence_numbers: 6 sequence_numbers: 7 sequence_numbers: 8 sequence_numbers: 9 sequence_numbers: 10 offsets: 0 offsets: 1 offsets: 2 offsets: 3 offsets: 4 offsets: 5 offsets: 6 offsets: 7 offsets: 8 offsets: 9 already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false write_statistics { persist_duration_ms: 30 queued_in_partition_duration_ms: 84 } 2025-03-31T16:03:20.087533Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4a176dca-e00c22c8-2c8ffa0f-4c23d91_0] Write session: acknoledged message 1 2025-03-31T16:03:20.087589Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4a176dca-e00c22c8-2c8ffa0f-4c23d91_0] Write session: acknoledged message 2 2025-03-31T16:03:20.087620Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4a176dca-e00c22c8-2c8ffa0f-4c23d91_0] Write session: acknoledged message 3 2025-03-31T16:03:20.087647Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4a176dca-e00c22c8-2c8ffa0f-4c23d91_0] Write session: acknoledged message 4 2025-03-31T16:03:20.087682Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4a176dca-e00c22c8-2c8ffa0f-4c23d91_0] Write session: acknoledged message 5 2025-03-31T16:03:20.087715Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4a176dca-e00c22c8-2c8ffa0f-4c23d91_0] Write session: acknoledged message 6 2025-03-31T16:03:20.087743Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4a176dca-e00c22c8-2c8ffa0f-4c23d91_0] Write session: acknoledged message 7 2025-03-31T16:03:20.087786Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4a176dca-e00c22c8-2c8ffa0f-4c23d91_0] Write session: acknoledged message 8 2025-03-31T16:03:20.087824Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4a176dca-e00c22c8-2c8ffa0f-4c23d91_0] Write session: acknoledged message 9 2025-03-31T16:03:20.087856Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4a176dca-e00c22c8-2c8ffa0f-4c23d91_0] Write session: acknoledged message 10 2025-03-31T16:03:20.091433Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4a176dca-e00c22c8-2c8ffa0f-4c23d91_0] Write session: close. Timeout = 0 ms 2025-03-31T16:03:20.091507Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4a176dca-e00c22c8-2c8ffa0f-4c23d91_0] Write session will now close 2025-03-31T16:03:20.091578Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4a176dca-e00c22c8-2c8ffa0f-4c23d91_0] Write session: aborting 2025-03-31T16:03:20.092199Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4a176dca-e00c22c8-2c8ffa0f-4c23d91_0] Write session: gracefully shut down, all writes complete 2025-03-31T16:03:20.092249Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|4a176dca-e00c22c8-2c8ffa0f-4c23d91_0] Write session: destroy 2025-03-31T16:03:20.099204Z node 17 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: test-message-group-id|4a176dca-e00c22c8-2c8ffa0f-4c23d91_0 grpc read done: success: 0 data: 2025-03-31T16:03:20.099255Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-message-group-id|4a176dca-e00c22c8-2c8ffa0f-4c23d91_0 grpc read failed 2025-03-31T16:03:20.099305Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-message-group-id|4a176dca-e00c22c8-2c8ffa0f-4c23d91_0 grpc closed 2025-03-31T16:03:20.099336Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-message-group-id|4a176dca-e00c22c8-2c8ffa0f-4c23d91_0 is DEAD 2025-03-31T16:03:20.100456Z node 17 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-31T16:03:20.108109Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [17:7488004895362960559:2625] destroyed 2025-03-31T16:03:20.108218Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. >> TExportToS3Tests::RebootDuringAbortion >> LocalPartitionReader::Booting >> TExportToS3Tests::DropCopiesBeforeTransferring1 >> TBlobStorageWardenTest::TestHttpMonPage [GOOD] >> LocalPartitionReader::Booting [GOOD] >> THiveTest::TestCreateTabletAndReassignGroups [GOOD] >> THiveTest::TestCreateTabletAndReassignGroups3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestGivenPDiskFormatedWithGuid1AndCreatedWithGuid2WhenYardInitThenError [GOOD] Test command err: 2025-03-31T16:03:24.819591Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:24.820546Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:24.821962Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:3:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:24.823164Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:24.823281Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:24.825520Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:24.828422Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001012/r3tmp/tmpunfaBx/pdisk_1.dat Formatting pdisk Creating PDisk Creating pdisk Verify that PDisk returns ERROR 2025-03-31T16:03:25.891965Z node 1 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/0l5c/001012/r3tmp/tmpKfq1LH//new_pdisk.dat": no such file. PDiskId# 1001 2025-03-31T16:03:25.892523Z node 1 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1001 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/0l5c/001012/r3tmp/tmpKfq1LH//new_pdisk.dat": no such file. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/001012/r3tmp/tmpKfq1LH//new_pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 13614323105611543803 PDiskId# 1001 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 HashedMainKey[0]# 0x221976E60BD392C7 StartOwnerRound# 10 SectorMap# false EnableSectorEncryption # 1 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# Enable WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1001 2025-03-31T16:03:25.942685Z node 1 :BS_PROXY_PUT INFO: [e2e5f1b9c917f854] bootstrap ActorId# [1:545:2463] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:349:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-03-31T16:03:25.942856Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:349:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:25.942924Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:349:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:25.942952Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:349:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:25.942978Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:349:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:25.943003Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:349:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:25.943027Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:349:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:25.943065Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] restore Id# [72057594037932033:2:8:0:0:349:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-03-31T16:03:25.943156Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:349:1] Marker# BPG33 2025-03-31T16:03:25.943203Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:349:1] Marker# BPG32 2025-03-31T16:03:25.943239Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:349:2] Marker# BPG33 2025-03-31T16:03:25.943264Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:349:2] Marker# BPG32 2025-03-31T16:03:25.943292Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:349:3] Marker# BPG33 2025-03-31T16:03:25.943317Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:349:3] Marker# BPG32 2025-03-31T16:03:25.943485Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:65:2091] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:349:3] FDS# 349 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T16:03:25.943554Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:58:2084] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:349:2] FDS# 349 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T16:03:25.943599Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:79:2105] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:349:1] FDS# 349 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T16:03:25.952707Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:349:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 82748 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-03-31T16:03:25.952990Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:349:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 82748 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-03-31T16:03:25.953097Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:349:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 82748 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-03-31T16:03:25.953170Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:349:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-03-31T16:03:25.953232Z node 1 :BS_PROXY_PUT INFO: [e2e5f1b9c917f854] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:349:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-03-31T16:03:25.953436Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.1 sample PartId# [72057594037932033:2:8:0:0:349:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.102 sample PartId# [72057594037932033:2:8:0:0:349:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.102 sample PartId# [72057594037932033:2:8:0:0:349:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 10.267 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 10.501 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 10.602 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::UncommittedReadSetAck [GOOD] Test command err: 2025-03-31T16:03:18.279314Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:03:18.279816Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:03:18.280029Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:03:18.281251Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:03:18.281667Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:03:18.281860Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011ab/r3tmp/tmpkY0h7z/pdisk_1.dat 2025-03-31T16:03:18.880142Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:19.140530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:03:19.281672Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:19.281828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:19.291993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:19.292104Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:19.314307Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:03:19.315137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:03:19.315580Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:03:19.670026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:03:19.885999Z node 2 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [2:1256:2378], Recipient [2:1282:2390]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:03:19.904215Z node 2 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [2:1256:2378], Recipient [2:1282:2390]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:03:19.904802Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1282:2390] 2025-03-31T16:03:19.905112Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:20.056669Z node 2 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [2:1256:2378], Recipient [2:1282:2390]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:03:20.087530Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:20.087804Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:03:20.089603Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:03:20.089717Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:03:20.089801Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:03:20.090220Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:03:20.090668Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:03:20.090765Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:1306:2390] in generation 1 2025-03-31T16:03:20.094504Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:03:20.194637Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:03:20.194895Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:03:20.195053Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:1310:2407] 2025-03-31T16:03:20.195117Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:03:20.195161Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:03:20.195198Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:03:20.195437Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:1282:2390], Recipient [2:1282:2390]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:03:20.195487Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:03:20.195742Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:03:20.195837Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:03:20.196019Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:03:20.196069Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:03:20.196121Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T16:03:20.196159Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:03:20.196191Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:03:20.196242Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:03:20.196319Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:03:20.260176Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:1314:2408], Recipient [2:1282:2390]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:20.260244Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:20.260297Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:1266:2774], serverId# [2:1314:2408], sessionId# [0:0:0] 2025-03-31T16:03:20.260687Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:838:2465], Recipient [2:1314:2408] 2025-03-31T16:03:20.260735Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:03:20.260862Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:03:20.261090Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T16:03:20.261167Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:03:20.261277Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:03:20.261343Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:03:20.261386Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T16:03:20.261437Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T16:03:20.261476Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:03:20.261756Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T16:03:20.261812Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T16:03:20.261854Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T16:03:20.261890Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:03:20.261938Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T16:03:20.261966Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T16:03:20.262007Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T16:03:20.262085Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:03:20.262111Z node 2 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T16:03:20.266528Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:03:20.266598Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:03:20.266658Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:03:20.266710Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-31T16:03:20.266791Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:03:20.267030Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [2:1315:2409], Recipient [2:1282:2390]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T16:03:20.267104Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:03:20.566735Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:1347:2418], Recipient [2:1282:2390]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:20.566817Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:20.566858Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:1345:2797], serverId# [2:1347:2418], sessionId# [0:0:0] 2025-03-31T16:03:20.571873Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:1061:2620], Recipient [2:1347:2418] 2025-03-31T16:03:20.571998Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:03:20.572209Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:03:20.572252Z node 2 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] ... RD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:03:24.945899Z node 2 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037890 source 72075186224037890 dest 72075186224037889 consumer 72075186224037889 txId 281474976715669 2025-03-31T16:03:24.973061Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:2346:2587], Recipient [2:2214:2546]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:24.973108Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:24.973135Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:2208:3334], serverId# [2:2346:2587], sessionId# [0:0:0] 2025-03-31T16:03:24.973797Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:2347:2588], Recipient [2:2214:2546]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:24.973839Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:24.973871Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [2:2206:2544], serverId# [2:2347:2588], sessionId# [0:0:0] 2025-03-31T16:03:24.974035Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [2:2090:2515], Recipient [2:2214:2546]: {TEvReadSet step# 2541 txid# 281474976715668 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-03-31T16:03:24.974064Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-03-31T16:03:24.974095Z node 2 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037888 dest 72075186224037890 producer 72075186224037888 txId 281474976715668 2025-03-31T16:03:24.974179Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 2541 txid# 281474976715668 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-03-31T16:03:24.974571Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-31T16:03:24.975056Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [2:2090:2515], Recipient [2:2214:2546]: {TEvReadSet step# 2690 txid# 281474976715669 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-03-31T16:03:24.976193Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-03-31T16:03:24.976247Z node 2 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037888 dest 72075186224037890 producer 72075186224037888 txId 281474976715669 2025-03-31T16:03:24.976315Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 2690 txid# 281474976715669 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-03-31T16:03:24.976576Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [2:2090:2515], Recipient [2:2214:2546]: {TEvReadSet step# 2690 txid# 281474976715669 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 3 Flags# 0} 2025-03-31T16:03:24.976610Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-03-31T16:03:24.976650Z node 2 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037888 dest 72075186224037890 producer 72075186224037888 txId 281474976715669 2025-03-31T16:03:24.976707Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 2690 txid# 281474976715669 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 3 Flags# 0} 2025-03-31T16:03:24.977013Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [1:2149:3289], Recipient [2:2346:2587] 2025-03-31T16:03:24.977049Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-03-31T16:03:24.977086Z node 2 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037889 dest 72075186224037890 producer 72075186224037889 txId 281474976715667 2025-03-31T16:03:24.977136Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 2540 txid# 281474976715667 TabletSource# 72075186224037889 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-03-31T16:03:24.977847Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [1:2149:3289], Recipient [2:2346:2587] 2025-03-31T16:03:24.977881Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-03-31T16:03:24.977913Z node 2 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037889 dest 72075186224037890 producer 72075186224037889 txId 281474976715669 2025-03-31T16:03:24.978179Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-31T16:03:24.978413Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [1:2149:3289], Recipient [2:2346:2587] 2025-03-31T16:03:24.978441Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-03-31T16:03:24.978474Z node 2 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037889 dest 72075186224037890 producer 72075186224037889 txId 281474976715669 2025-03-31T16:03:24.978935Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 2690 txid# 281474976715669 TabletSource# 72075186224037889 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-03-31T16:03:24.979568Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 2690 txid# 281474976715669 TabletSource# 72075186224037889 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 3 Flags# 0} 2025-03-31T16:03:24.979701Z node 2 :TX_DATASHARD DEBUG: Complete volatile write [2690 : 281474976715669] from 72075186224037890 at tablet 72075186224037890 send result to client [1:2297:3353] 2025-03-31T16:03:24.980136Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-31T16:03:24.984215Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-03-31T16:03:24.984931Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [2:2214:2546], Recipient [2:2090:2515]: {TEvReadSet step# 2541 txid# 281474976715668 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletConsumer# 72075186224037890 Flags# 0 Seqno# 1} 2025-03-31T16:03:24.984983Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:03:24.985021Z node 2 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037890 consumer 72075186224037890 txId 281474976715668 2025-03-31T16:03:24.987689Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-03-31T16:03:24.987780Z node 2 :TX_DATASHARD DEBUG: Send RS Reply at 72075186224037890 {TEvReadSet step# 2690 txid# 281474976715669 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-03-31T16:03:24.987846Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-03-31T16:03:24.988150Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [2:2214:2546], Recipient [2:2090:2515]: {TEvReadSet step# 2690 txid# 281474976715669 TabletSource# 72075186224037890 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037890 ReadSet.Size()# 0 Seqno# 0 Flags# 3} 2025-03-31T16:03:24.988191Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-03-31T16:03:24.988225Z node 2 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037890 dest 72075186224037888 producer 72075186224037890 txId 281474976715669 2025-03-31T16:03:24.988295Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 2690 txid# 281474976715669 TabletSource# 72075186224037890 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037890 ReadSet.Size()# 0 Seqno# 0 Flags# 3} 2025-03-31T16:03:24.988363Z node 2 :TX_DATASHARD NOTICE: Outdated readset for 2690:281474976715669 at 72075186224037888 2025-03-31T16:03:24.988439Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-03-31T16:03:24.988549Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-03-31T16:03:24.988796Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [2:2214:2546], Recipient [1:2149:3289] 2025-03-31T16:03:24.988825Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:03:24.988866Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037890 consumer 72075186224037890 txId 281474976715667 2025-03-31T16:03:24.989783Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-03-31T16:03:24.989837Z node 2 :TX_DATASHARD DEBUG: Send RS Reply at 72075186224037890 {TEvReadSet step# 2690 txid# 281474976715669 TabletSource# 72075186224037889 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-03-31T16:03:24.989999Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-03-31T16:03:24.990119Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [2:2214:2546], Recipient [2:2090:2515]: {TEvReadSet step# 2690 txid# 281474976715669 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletConsumer# 72075186224037890 Flags# 0 Seqno# 3} 2025-03-31T16:03:24.990149Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:03:24.990177Z node 2 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037890 consumer 72075186224037890 txId 281474976715669 2025-03-31T16:03:24.990369Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [2:2214:2546], Recipient [1:2149:3289] 2025-03-31T16:03:24.990407Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-03-31T16:03:24.990445Z node 1 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037890 dest 72075186224037889 producer 72075186224037890 txId 281474976715669 2025-03-31T16:03:24.990512Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 2690 txid# 281474976715669 TabletSource# 72075186224037890 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037890 ReadSet.Size()# 0 Seqno# 0 Flags# 3} 2025-03-31T16:03:24.990547Z node 1 :TX_DATASHARD NOTICE: Outdated readset for 2690:281474976715669 at 72075186224037889 2025-03-31T16:03:24.990590Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-03-31T16:03:24.990728Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [2:2214:2546], Recipient [1:2149:3289] 2025-03-31T16:03:24.990761Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:03:24.990796Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037890 consumer 72075186224037890 txId 281474976715669 |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::OneShardNonLocalExec-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 16702, MsgBus: 22936 2025-03-31T16:02:47.122661Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004760057483193:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:47.123255Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000abe/r3tmp/tmplaza5y/pdisk_1.dat 2025-03-31T16:02:47.538470Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:47.561365Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:47.561467Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:47.563068Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16702, node 1 2025-03-31T16:02:47.663173Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:02:47.663194Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:02:47.663208Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:02:47.663318Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22936 TClient is connected to server localhost:22936 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:02:48.400159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:48.443412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:48.687523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:48.898210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:48.986499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:51.485428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004777237354029:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:51.485589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:51.823306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:02:51.877842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:02:51.963127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:02:51.996114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:02:52.024323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:02:52.100135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:02:52.100956Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004760057483193:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:52.101018Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:02:52.170257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004781532321843:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:52.170366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:52.170498Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004781532321848:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:52.175732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:02:52.189251Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004781532321850:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:02:52.287981Z node 1 :TX_PROXY ERROR: Actor# [1:7488004781532321907:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 27332, MsgBus: 64600 2025-03-31T16:02:55.301780Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488004790503511870:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:55.301843Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:02:55.343946Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488004794437950886:2091];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:55.368314Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000abe/r3tmp/tmp39z1L0/pdisk_1.dat 2025-03-31T16:02:55.722582Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:55.788876Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:55.789426Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:55.791754Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:55.791829Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:55.800943Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-03-31T16:02:55.801116Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:02:55.804403Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27332, node 2 2025-03-31T16:02:55.874742Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:02:55.874774Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:02:55.874784Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:02:55.874931Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64600 TClient is connected to server localhost:64600 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:02:56.420550Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:56.481125Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part propos ... istence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:00.400110Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:00.504398Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:00.592053Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:00.734784Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004811978351242:2416], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:00.734886Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:00.735362Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004811978351247:2419], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:00.741540Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:00.785138Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488004811978351249:2420], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:03:00.872894Z node 2 :TX_PROXY ERROR: Actor# [2:7488004811978351330:4248] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 8003, MsgBus: 1348 2025-03-31T16:03:08.241942Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488004849956465868:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:08.242020Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:03:08.271308Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7488004849278795651:2209];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:08.281930Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000abe/r3tmp/tmpLSreve/pdisk_1.dat 2025-03-31T16:03:08.778261Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:08.826623Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:08.826734Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:08.828825Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:08.828907Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:08.834116Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-03-31T16:03:08.834273Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:03:08.835665Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8003, node 4 2025-03-31T16:03:09.017438Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:09.017460Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:09.017469Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:09.017611Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1348 TClient is connected to server localhost:1348 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:10.061494Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:10.123046Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:10.602932Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:11.096038Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:11.377721Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:13.242227Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7488004849956465868:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:13.242309Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:13.275222Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7488004849278795651:2209];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:13.275311Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:15.930715Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488004880021239187:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:15.930829Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:16.129486Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:16.253519Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:16.428235Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:16.576082Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:16.744931Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:16.943444Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:17.076016Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488004888611174527:2423], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:17.076134Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:17.076534Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488004888611174534:2426], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:17.081491Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:17.133280Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488004888611174536:2427], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720668 completed, doublechecking } 2025-03-31T16:03:17.193055Z node 4 :TX_PROXY ERROR: Actor# [4:7488004888611174606:4296] txid# 281474976720669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:03:23.690358Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:03:23.690388Z node 4 :IMPORT WARN: Table profiles were not loaded >> BindQueue::Basic [GOOD] >> TBlobStorageWardenTest::ObtainPDiskKeySamePin [GOOD] >> TSequence::CreateSequenceInsideTableThenDropTable [GOOD] >> TSequence::CreateSequencesWithIndexedTable >> LocalPartitionReader::FeedSlowly ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestSendUsefulMonitoring [GOOD] Test command err: 2025-03-31T16:03:25.668665Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:25.674908Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:25.675033Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:25.678478Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:25.678618Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:25.678711Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000fac/r3tmp/tmp5F41QH/pdisk_1.dat 2025-03-31T16:03:26.851919Z node 1 :BS_PROXY_PUT INFO: [5a9a1d6240d04444] bootstrap ActorId# [1:544:2462] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:1282:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-03-31T16:03:26.852100Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1282:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:26.852143Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1282:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:26.852185Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1282:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:26.852211Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1282:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:26.852232Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1282:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:26.852257Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1282:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:26.852298Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] restore Id# [72057594037932033:2:8:0:0:1282:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-03-31T16:03:26.852371Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:1282:1] Marker# BPG33 2025-03-31T16:03:26.852414Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:1282:1] Marker# BPG32 2025-03-31T16:03:26.852455Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:1282:2] Marker# BPG33 2025-03-31T16:03:26.852478Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:1282:2] Marker# BPG32 2025-03-31T16:03:26.852504Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:1282:3] Marker# BPG33 2025-03-31T16:03:26.852533Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:1282:3] Marker# BPG32 2025-03-31T16:03:26.852740Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:65:2091] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1282:3] FDS# 1282 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T16:03:26.852820Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:58:2084] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1282:2] FDS# 1282 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T16:03:26.852871Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:79:2105] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1282:1] FDS# 1282 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T16:03:26.873345Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1282:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90094 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-03-31T16:03:26.873593Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1282:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 90094 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-03-31T16:03:26.873674Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1282:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90094 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-03-31T16:03:26.873748Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:1282:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-03-31T16:03:26.873812Z node 1 :BS_PROXY_PUT INFO: [5a9a1d6240d04444] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:1282:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-03-31T16:03:26.874008Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.151 sample PartId# [72057594037932033:2:8:0:0:1282:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.152 sample PartId# [72057594037932033:2:8:0:0:1282:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.152 sample PartId# [72057594037932033:2:8:0:0:1282:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 21.69 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 21.873 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 21.952 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } 2025-03-31T16:03:26.943846Z node 1 :BS_PROXY_PUT INFO: [8d27cf9df52bfb78] bootstrap ActorId# [1:589:2499] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:9:0:0:229:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-03-31T16:03:26.944016Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:26.944061Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:26.944089Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:26.944115Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:26.944144Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:26.944169Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:26.944220Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] restore Id# [72057594037932033:2:9:0:0:229:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-03-31T16:03:26.944296Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:9:0:0:229:1] Marker# BPG33 2025-03-31T16:03:26.944356Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:9:0:0:229:1] Marker# BPG32 2025-03-31T16:03:26.944395Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:9:0:0:229:2] Marker# BPG33 2025-03-31T16:03:26.944423Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:9:0:0:229:2] Marker# BPG32 2025-03-31T16:03:26.944451Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:9:0:0:229:3] Marker# BPG33 2025-03-31T16:03:26.944475Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:9:0:0:229:3] Marker# BPG32 2025-03-31T16:03:26.944634Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:58:2084] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:229:3] FDS# 229 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T16:03:26.944702Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:79:2105] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:229:2] FDS# 229 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T16:03:26.944751Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:72:2098] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:229:1] FDS# 229 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T16:03:26.960070Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:229:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 81803 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-03-31T16:03:26.960398Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:229:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81803 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:2:0] Marker# BPP01 2025-03-31T16:03:26.960546Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:229:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81803 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2 ... ] Create Queue# [1:595:2504] targetNodeId# 1 Marker# DSP01 2025-03-31T16:03:26.995683Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:590:2500] Create Queue# [1:596:2505] targetNodeId# 1 Marker# DSP01 2025-03-31T16:03:26.995793Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:590:2500] Create Queue# [1:597:2506] targetNodeId# 1 Marker# DSP01 2025-03-31T16:03:26.995903Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:590:2500] Create Queue# [1:598:2507] targetNodeId# 1 Marker# DSP01 2025-03-31T16:03:26.996025Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:590:2500] Create Queue# [1:599:2508] targetNodeId# 1 Marker# DSP01 2025-03-31T16:03:26.996145Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:590:2500] Create Queue# [1:600:2509] targetNodeId# 1 Marker# DSP01 2025-03-31T16:03:26.996174Z node 1 :BS_PROXY INFO: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2025-03-31T16:03:26.996863Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-31T16:03:26.996983Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-31T16:03:26.997056Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-31T16:03:26.997133Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-31T16:03:26.997187Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-31T16:03:26.997241Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-31T16:03:26.997295Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-31T16:03:26.997320Z node 1 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2025-03-31T16:03:26.997354Z node 1 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2025-03-31T16:03:26.997398Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 0 Marker# DSP57 initialize full monitoring 2025-03-31T16:03:26.998223Z node 1 :BS_PROXY_PUT INFO: [1a43693427d0a82b] bootstrap ActorId# [1:601:2510] Group# 2181038082 BlobCount# 1 BlobIDs# [[1234:2:0:0:0:5:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2025-03-31T16:03:26.998377Z node 1 :BS_PROXY_PUT DEBUG: [1a43693427d0a82b] Id# [1234:2:0:0:0:5:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:26.998418Z node 1 :BS_PROXY_PUT DEBUG: [1a43693427d0a82b] restore Id# [1234:2:0:0:0:5:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-03-31T16:03:26.998469Z node 1 :BS_PROXY_PUT DEBUG: [1a43693427d0a82b] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG33 2025-03-31T16:03:26.998498Z node 1 :BS_PROXY_PUT DEBUG: [1a43693427d0a82b] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG32 2025-03-31T16:03:26.998617Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:594:2503] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:5:1] FDS# 5 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T16:03:27.001951Z node 1 :BS_PROXY_PUT DEBUG: [1a43693427d0a82b] received {EvVPutResult Status# OK ID# [1234:2:0:0:0:5:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 80039 ExtQueueId# PutTabletLog IntQueueId# IntPutLog CostSettings# { SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257} Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 1 }}}} from# [82000002:1:0:0:0] Marker# BPP01 2025-03-31T16:03:27.002063Z node 1 :BS_PROXY_PUT DEBUG: [1a43693427d0a82b] Result# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 2181038082 Marker# BPP12 2025-03-31T16:03:27.002112Z node 1 :BS_PROXY_PUT INFO: [1a43693427d0a82b] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-03-31T16:03:27.002218Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 2181038082 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.546 sample PartId# [1234:2:0:0:0:5:1] QueryCount# 1 VDiskId# [82000002:1:0:0:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 3.903 VDiskId# [82000002:1:0:0:0] NodeId# 1 Status# OK } ] } 2025-03-31T16:03:27.002768Z node 2 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# IsLimitedKeyless# false Marker# DSP02 2025-03-31T16:03:27.002814Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateUnconfigured Marker# DSP07 2025-03-31T16:03:27.002920Z node 2 :BS_PROXY DEBUG: Group# 2181038082 HandleEnqueue# TEvCollectGarbage {TabletId# 1234 RecordGeneration# 4294967295 PerGenerationCounter# 4294967295 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 4294967295 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 1 IsMonitored# 1} Marker# DSP17 2025-03-31T16:03:27.003585Z node 2 :BS_NODE ERROR: {NW19@node_warden_group.cpp:211} error while parsing group GroupId# 2181038082 Err# LifeCyclePhase# KEY_NOT_LOADED Key.Id# "" Key.Version# 0 MainKey.Id# "/home/runner/.ya/build/build_root/0l5c/000fac/r3tmp/tmp5F41QH//key.txt" MainKey.Version# 1 GroupKeyNonce# 2181038082 2025-03-31T16:03:27.004505Z node 2 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# true Marker# DSP02 2025-03-31T16:03:27.004555Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 1 fullIfPossible# 0 Marker# DSP58 2025-03-31T16:03:27.006457Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:603:2105] Create Queue# [2:605:2106] targetNodeId# 1 Marker# DSP01 2025-03-31T16:03:27.006610Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:603:2105] Create Queue# [2:606:2107] targetNodeId# 1 Marker# DSP01 2025-03-31T16:03:27.006724Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:603:2105] Create Queue# [2:607:2108] targetNodeId# 1 Marker# DSP01 2025-03-31T16:03:27.006833Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:603:2105] Create Queue# [2:608:2109] targetNodeId# 1 Marker# DSP01 2025-03-31T16:03:27.006959Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:603:2105] Create Queue# [2:609:2110] targetNodeId# 1 Marker# DSP01 2025-03-31T16:03:27.007059Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:603:2105] Create Queue# [2:610:2111] targetNodeId# 1 Marker# DSP01 2025-03-31T16:03:27.007195Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:603:2105] Create Queue# [2:611:2112] targetNodeId# 1 Marker# DSP01 2025-03-31T16:03:27.007224Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2025-03-31T16:03:27.008427Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-31T16:03:27.008725Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-31T16:03:27.008789Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-31T16:03:27.008935Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-31T16:03:27.009041Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-31T16:03:27.009143Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-31T16:03:27.009208Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-31T16:03:27.009232Z node 2 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2025-03-31T16:03:27.009267Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2025-03-31T16:03:27.009479Z node 2 :BS_PROXY DEBUG: Send to queueActorId# [2:605:2106] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 >> LocalPartitionReader::FeedSlowly [GOOD] |73.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Booting [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestHttpMonPage [GOOD] Test command err: 2025-03-31T16:03:25.093349Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:3:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:25.096775Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:25.096903Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:25.100214Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:25.102283Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:3:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:25.102654Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:25.102829Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00101c/r3tmp/tmpGJrJj2/pdisk_1.dat 2025-03-31T16:03:26.990923Z node 2 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:26.991734Z node 2 :BS_SYNCLOG WARN: VDISK[2000000:_:0:3:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:26.992959Z node 2 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:26.993465Z node 2 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:26.994291Z node 2 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:26.995476Z node 2 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00101c/r3tmp/tmpxWHDat/pdisk_1.dat >> LocalPartitionReader::Simple |73.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |73.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |73.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |73.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |73.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Simple [GOOD] >> TBlobStorageWardenTest::TestBlockEncriptedGroup [GOOD] >> TExportToS3Tests::CancelUponCreatingExportDirShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponCopyingTablesShouldSucceed >> TExportToS3Tests::ShouldSucceedOnSingleShardTable [GOOD] >> TExportToS3Tests::ShouldSucceedOnMultiShardTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpPragma::MatchRecognizeWithTimeOrderRecoverer [GOOD] Test command err: Trying to start YDB, gRPC: 8889, MsgBus: 1908 2025-03-31T16:03:09.365434Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004853740721308:2270];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:09.365480Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000bfe/r3tmp/tmpP0CXZz/pdisk_1.dat 2025-03-31T16:03:10.048551Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:10.048648Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:10.050613Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:03:10.055500Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8889, node 1 2025-03-31T16:03:10.157859Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:10.157887Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:10.157895Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:10.158040Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1908 TClient is connected to server localhost:1908 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:10.895647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:10.919993Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:03:10.938258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:11.127808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:11.410495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:11.505763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:14.371261Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004853740721308:2270];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:14.371355Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:14.560355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004875215559343:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:14.560517Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:15.257167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:15.365772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:15.464576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:15.590855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:15.673993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:15.768225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:15.901181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004879510527168:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:15.901284Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:15.901800Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004879510527173:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:15.906587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:15.939395Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004879510527175:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:03:16.017437Z node 1 :TX_PROXY ERROR: Actor# [1:7488004883805494527:3464] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:03:17.256603Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488004888100462146:2508], status: GENERIC_ERROR, issues:
: Error: Pre type annotation, code: 1020
:2:34: Error: Pragma auth not supported inside Kikimr query., code: 2016 2025-03-31T16:03:17.257425Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTM3Y2E1OWEtMzFkYTM0MmMtOWM1M2I0MzUtYWUwNWJjYWY=, ActorId: [1:7488004888100462138:2503], ActorState: ExecuteState, TraceId: 01jqpf2rjmff2qvfh4z0c2xtgb, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 18778, MsgBus: 10382 2025-03-31T16:03:18.298281Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488004892438378608:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:18.299830Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000bfe/r3tmp/tmpmXkus4/pdisk_1.dat 2025-03-31T16:03:18.501940Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:18.520813Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:18.520909Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:18.523695Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18778, node 2 2025-03-31T16:03:18.624576Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:18.624599Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:18.624609Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:18.624734Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10382 TClient is connected to server localhost:10382 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:19.821101Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:19.857742Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:20.035556Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:20.344928Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:20.433677Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:23.302733Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488004892438378608:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:23.302836Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:23.461477Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004913913216838:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:23.461567Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:23.543459Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:23.626812Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:23.675597Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:23.726605Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:23.779411Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:23.836924Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:23.909488Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004913913217348:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:23.909602Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:23.910026Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004913913217353:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:23.915903Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:23.933856Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488004913913217355:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:03:24.022234Z node 2 :TX_PROXY ERROR: Actor# [2:7488004918208184707:3454] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:03:25.600771Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:03:26.521067Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437006549, txId: 281474976715673] shutting down >> TExportToS3Tests::ShouldRestartOnScanErrors |73.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::FeedSlowly [GOOD] >> DataShardVolatile::VolatileCommitOnBlobStorageFailure+UseSink [GOOD] >> DataShardVolatile::VolatileCommitOnBlobStorageFailure-UseSink >> TSequence::CopyTableWithSequence [GOOD] >> TSequence::AlterSequence >> TExportToS3Tests::UidAsIdempotencyKey [GOOD] >> TExportToS3Tests::UserSID >> BuildStatsHistogram::Ten_Serial_Log [GOOD] >> BuildStatsHistogram::Ten_Crossed_Log >> TExportToS3Tests::DropSourceTableBeforeTransferring [GOOD] >> TExportToS3Tests::DropCopiesBeforeTransferring2 |73.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Simple [GOOD] |73.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestBlockEncriptedGroup [GOOD] Test command err: 2025-03-31T16:03:24.993151Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:24.993284Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:3:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:24.994708Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:24.995484Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:24.995602Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:24.995699Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:24.995791Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:3:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:25.002761Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:03:25.003868Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ff4/r3tmp/tmp1w6SEe/pdisk_1.dat 2025-03-31T16:03:25.760837Z node 1 :BS_PROXY_PUT INFO: [084d0c3a19bee089] bootstrap ActorId# [1:484:2466] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:1296:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-03-31T16:03:25.761037Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1296:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:25.761089Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1296:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:25.761120Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1296:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:25.761150Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1296:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:25.761184Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1296:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:25.761216Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1296:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:25.761255Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] restore Id# [72057594037932033:2:8:0:0:1296:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-03-31T16:03:25.761338Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:1296:1] Marker# BPG33 2025-03-31T16:03:25.761392Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:1296:1] Marker# BPG32 2025-03-31T16:03:25.761440Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:1296:2] Marker# BPG33 2025-03-31T16:03:25.761471Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:1296:2] Marker# BPG32 2025-03-31T16:03:25.761504Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:1296:3] Marker# BPG33 2025-03-31T16:03:25.761532Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:1296:3] Marker# BPG32 2025-03-31T16:03:25.761724Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:46:2090] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1296:3] FDS# 1296 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T16:03:25.761801Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:39:2083] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1296:2] FDS# 1296 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T16:03:25.761854Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:60:2104] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1296:1] FDS# 1296 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T16:03:25.773157Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1296:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90204 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-03-31T16:03:25.773469Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1296:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 90204 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-03-31T16:03:25.773596Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1296:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90204 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-03-31T16:03:25.773689Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:1296:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-03-31T16:03:25.773762Z node 1 :BS_PROXY_PUT INFO: [084d0c3a19bee089] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:1296:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-03-31T16:03:25.774002Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.278 sample PartId# [72057594037932033:2:8:0:0:1296:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.279 sample PartId# [72057594037932033:2:8:0:0:1296:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.28 sample PartId# [72057594037932033:2:8:0:0:1296:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 12.654 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 12.908 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 13.026 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } 2025-03-31T16:03:25.855920Z node 1 :BS_PROXY_PUT INFO: [b6b2c6548553d7a5] bootstrap ActorId# [1:529:2503] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:9:0:0:224:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-03-31T16:03:25.856109Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:25.856157Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:25.856191Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:25.856221Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:25.856251Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:25.856282Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:25.856327Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] restore Id# [72057594037932033:2:9:0:0:224:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-03-31T16:03:25.856407Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:9:0:0:224:1] Marker# BPG33 2025-03-31T16:03:25.856462Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:9:0:0:224:1] Marker# BPG32 2025-03-31T16:03:25.856510Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:9:0:0:224:2] Marker# BPG33 2025-03-31T16:03:25.856539Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:9:0:0:224:2] Marker# BPG32 2025-03-31T16:03:25.856573Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:9:0:0:224:3] Marker# BPG33 2025-03-31T16:03:25.856601Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:9:0:0:224:3] Marker# BPG32 2025-03-31T16:03:25.856781Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:39:2083] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:3] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T16:03:25.856868Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:60:2104] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:2] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T16:03:25.856930Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:53:2097] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:1] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T16:03:25.871909Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:224:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81763 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:2:0] Marker# BPP01 2025-03-31T16:03:25.872178Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] received {EvVPutResult Status# ... ituation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:28.213828Z node 2 :BS_PROXY_PUT DEBUG: [f913878b3da83702] restore Id# [1234:2:0:0:0:5:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-03-31T16:03:28.213891Z node 2 :BS_PROXY_PUT DEBUG: [f913878b3da83702] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG33 2025-03-31T16:03:28.213941Z node 2 :BS_PROXY_PUT DEBUG: [f913878b3da83702] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG32 2025-03-31T16:03:28.214071Z node 2 :BS_PROXY DEBUG: Send to queueActorId# [2:594:2503] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:5:1] FDS# 5 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T16:03:28.219788Z node 2 :BS_PROXY_PUT DEBUG: [f913878b3da83702] received {EvVPutResult Status# OK ID# [1234:2:0:0:0:5:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 2 } Cost# 80039 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 3 }}}} from# [82000002:1:0:0:0] Marker# BPP01 2025-03-31T16:03:28.219942Z node 2 :BS_PROXY_PUT DEBUG: [f913878b3da83702] Result# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 2181038082 Marker# BPP12 2025-03-31T16:03:28.220022Z node 2 :BS_PROXY_PUT INFO: [f913878b3da83702] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-03-31T16:03:28.220171Z node 2 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 2181038082 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.593 sample PartId# [1234:2:0:0:0:5:1] QueryCount# 1 VDiskId# [82000002:1:0:0:0] NodeId# 2 } TEvVPutResult{ TimestampMs# 6.361 VDiskId# [82000002:1:0:0:0] NodeId# 2 Status# OK } ] } 2025-03-31T16:03:28.220794Z node 3 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# IsLimitedKeyless# false Marker# DSP02 2025-03-31T16:03:28.220841Z node 3 :BS_PROXY INFO: Group# 2181038082 SetStateUnconfigured Marker# DSP07 2025-03-31T16:03:28.220973Z node 3 :BS_PROXY DEBUG: Group# 2181038082 HandleEnqueue# TEvBlock {TabletId# 1234 Generation# 3 Deadline# 18446744073709551 IsMonitored# 1} Marker# DSP17 2025-03-31T16:03:28.221659Z node 3 :BS_NODE ERROR: {NW19@node_warden_group.cpp:211} error while parsing group GroupId# 2181038082 Err# LifeCyclePhase# KEY_NOT_LOADED Key.Id# "" Key.Version# 0 MainKey.Id# "/home/runner/.ya/build/build_root/0l5c/000ff4/r3tmp/tmpurJf8a//key.txt" MainKey.Version# 1 GroupKeyNonce# 2181038082 2025-03-31T16:03:28.222685Z node 3 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# true Marker# DSP02 2025-03-31T16:03:28.222735Z node 3 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 1 fullIfPossible# 0 Marker# DSP58 2025-03-31T16:03:28.228290Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:605:2105] Create Queue# [3:607:2106] targetNodeId# 2 Marker# DSP01 2025-03-31T16:03:28.228457Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:605:2105] Create Queue# [3:608:2107] targetNodeId# 2 Marker# DSP01 2025-03-31T16:03:28.228578Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:605:2105] Create Queue# [3:609:2108] targetNodeId# 2 Marker# DSP01 2025-03-31T16:03:28.228705Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:605:2105] Create Queue# [3:610:2109] targetNodeId# 2 Marker# DSP01 2025-03-31T16:03:28.228829Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:605:2105] Create Queue# [3:611:2110] targetNodeId# 2 Marker# DSP01 2025-03-31T16:03:28.228942Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:605:2105] Create Queue# [3:612:2111] targetNodeId# 2 Marker# DSP01 2025-03-31T16:03:28.229058Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:605:2105] Create Queue# [3:613:2112] targetNodeId# 2 Marker# DSP01 2025-03-31T16:03:28.229092Z node 3 :BS_PROXY INFO: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2025-03-31T16:03:28.230433Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-31T16:03:28.230704Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-31T16:03:28.230775Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-31T16:03:28.230969Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-31T16:03:28.231062Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-31T16:03:28.231168Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-31T16:03:28.231229Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-31T16:03:28.231260Z node 3 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2025-03-31T16:03:28.231301Z node 3 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2025-03-31T16:03:28.231477Z node 3 :BS_PROXY_BLOCK DEBUG: [d70ef3c23a1a2346] bootstrap ActorId# [3:614:2113] Group# 2181038082 TabletId# 1234 Generation# 3 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2025-03-31T16:03:28.231533Z node 3 :BS_PROXY_BLOCK DEBUG: [d70ef3c23a1a2346] Sending TEvVBlock Tablet# 1234 Generation# 3 vdiskId# [82000002:1:0:0:0] node# 2 Marker# DSPB03 2025-03-31T16:03:28.231739Z node 3 :BS_PROXY DEBUG: Send to queueActorId# [3:607:2106] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 3 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 12104228579147306105 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2025-03-31T16:03:28.233334Z node 3 :BS_PROXY_BLOCK DEBUG: [d70ef3c23a1a2346] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 2 Marker# DSPB01 2025-03-31T16:03:28.233411Z node 3 :BS_PROXY_BLOCK DEBUG: [d70ef3c23a1a2346] Result# TEvBlockResult {Status# OK} Marker# DSPB04 Sending TEvPut 2025-03-31T16:03:28.233869Z node 3 :BS_PROXY INFO: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:3:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:3:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2025-03-31T16:03:28.234099Z node 3 :BS_PROXY DEBUG: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:4:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:4:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2025-03-31T16:03:28.234497Z node 2 :BS_PROXY_PUT INFO: [91379e686f748e92] bootstrap ActorId# [2:615:2513] Group# 2181038082 BlobCount# 1 BlobIDs# [[1234:2:0:0:0:11:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2025-03-31T16:03:28.234668Z node 2 :BS_PROXY_PUT DEBUG: [91379e686f748e92] Id# [1234:2:0:0:0:11:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:28.234731Z node 2 :BS_PROXY_PUT DEBUG: [91379e686f748e92] restore Id# [1234:2:0:0:0:11:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-03-31T16:03:28.234802Z node 2 :BS_PROXY_PUT DEBUG: [91379e686f748e92] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG33 2025-03-31T16:03:28.234851Z node 2 :BS_PROXY_PUT DEBUG: [91379e686f748e92] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG32 2025-03-31T16:03:28.235043Z node 2 :BS_PROXY DEBUG: Send to queueActorId# [2:594:2503] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:11:1] FDS# 11 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T16:03:28.235361Z node 2 :BS_VDISK_PUT ERROR: VDISK[82000002:_:0:0:0]: TEvVPut: failed to pass the Hull check; id# [1234:2:0:0:0:11:1] status# {Status# BLOCKED} Marker# BSVS03 2025-03-31T16:03:28.235691Z node 2 :BS_PROXY_PUT INFO: [91379e686f748e92] received {EvVPutResult Status# BLOCKED ErrorReason# "blocked" ID# [1234:2:0:0:0:11:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 3 } Cost# 80086 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 4 }}}} from# [82000002:1:0:0:0] Marker# BPP01 2025-03-31T16:03:28.235795Z node 2 :BS_PROXY_PUT ERROR: [91379e686f748e92] Result# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:1:0:0:0]" ApproximateFreeSpaceShare# 0} GroupId# 2181038082 Marker# BPP12 2025-03-31T16:03:28.235857Z node 2 :BS_PROXY_PUT NOTICE: [91379e686f748e92] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:1:0:0:0]" ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-03-31T16:03:28.235978Z node 2 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 2181038082 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.774 sample PartId# [1234:2:0:0:0:11:1] QueryCount# 1 VDiskId# [82000002:1:0:0:0] NodeId# 2 } ] } 2025-03-31T16:03:28.236409Z node 3 :BS_PROXY DEBUG: Send to queueActorId# [3:607:2106] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 >> TExportToS3Tests::CheckItemProgress [GOOD] >> TExportToS3Tests::CancelledExportEndTime |73.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> TExportToS3Tests::DropCopiesBeforeTransferring1 [GOOD] >> TExportToS3Tests::CorruptedDyNumber >> THiveTest::TestCreateTabletAndReassignGroups3 [GOOD] >> THiveTest::TestCreateTabletAndMixedReassignGroups3 |73.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> TVersions::Wreck2 [GOOD] >> TVersions::Wreck2Reverse >> KqpRm::SnapshotSharingByExchanger >> KqpRm::NotEnoughExecutionUnits >> KqpRm::SingleTask ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::ObtainPDiskKeySamePin [GOOD] Test command err: Disable nodeId# 53 Pick Enable nodeId# 53 Pick Pick Disable nodeId# 37 Pick Pick Disable nodeId# 8 Delete nodeId# 19 Pick Add nodeId# 101 Disable nodeId# 53 Disable nodeId# 32 Disable nodeId# 2 Delete nodeId# 25 Add nodeId# 102 Delete nodeId# 33 Disable nodeId# 54 Disable nodeId# 45 Pick Enable nodeId# 32 Add nodeId# 103 Enable nodeId# 2 Disable nodeId# 91 Delete nodeId# 81 Enable nodeId# 53 Delete nodeId# 11 Disable nodeId# 23 Enable nodeId# 91 Delete nodeId# 97 Add nodeId# 104 Enable nodeId# 45 Enable nodeId# 23 Add nodeId# 105 Disable nodeId# 78 Disable nodeId# 47 Disable nodeId# 58 Disable nodeId# 26 Add nodeId# 106 Delete nodeId# 87 Disable nodeId# 95 Disable nodeId# 44 Add nodeId# 107 Enable nodeId# 58 Disable nodeId# 80 Disable nodeId# 64 Disable nodeId# 77 Pick Enable nodeId# 47 Disable nodeId# 45 Pick Delete nodeId# 66 Add nodeId# 108 Pick Disable nodeId# 23 Enable nodeId# 54 Enable nodeId# 64 Delete nodeId# 64 Delete nodeId# 10 Delete nodeId# 5 Disable nodeId# 51 Disable nodeId# 47 Pick Add nodeId# 109 Enable nodeId# 77 Disable nodeId# 22 Disable nodeId# 55 Delete nodeId# 18 Delete nodeId# 89 Add nodeId# 110 Disable nodeId# 40 Pick Add nodeId# 111 Enable nodeId# 37 Disable nodeId# 46 Disable nodeId# 43 Pick Pick Add nodeId# 112 Disable nodeId# 63 Disable nodeId# 30 Delete nodeId# 6 Disable nodeId# 85 Disable nodeId# 77 Disable nodeId# 79 Delete nodeId# 75 Enable nodeId# 30 Delete nodeId# 46 Delete nodeId# 13 Disable nodeId# 9 Delete nodeId# 37 Add nodeId# 113 Enable nodeId# 45 Delete nodeId# 74 Delete nodeId# 110 Disable nodeId# 68 Pick Pick Disable nodeId# 42 Delete nodeId# 1 Disable nodeId# 61 Disable nodeId# 2 Delete nodeId# 70 Add nodeId# 114 Enable nodeId# 43 Pick Pick Add nodeId# 115 Disable nodeId# 30 Delete nodeId# 50 Add nodeId# 116 Delete nodeId# 85 Pick Pick Add nodeId# 117 Disable nodeId# 20 Delete nodeId# 106 Disable nodeId# 71 Add nodeId# 118 Enable nodeId# 77 Enable nodeId# 61 Disable nodeId# 28 Delete nodeId# 114 Disable nodeId# 31 Pick Enable nodeId# 28 Enable nodeId# 78 Delete nodeId# 93 Pick Pick Delete nodeId# 71 Pick Enable nodeId# 47 Disable nodeId# 35 Pick Add nodeId# 119 Delete nodeId# 92 Enable nodeId# 8 Pick Add nodeId# 120 Disable nodeId# 60 Add nodeId# 121 Pick Delete nodeId# 90 Add nodeId# 122 Disable nodeId# 122 Enable nodeId# 95 Pick Enable nodeId# 22 Disable nodeId# 88 Delete nodeId# 14 Enable nodeId# 88 Enable nodeId# 9 Pick Enable nodeId# 23 Delete nodeId# 24 Pick Delete nodeId# 72 Enable nodeId# 30 Disable nodeId# 54 Add nodeId# 123 Add nodeId# 124 Enable nodeId# 80 Enable nodeId# 63 Enable nodeId# 2 Disable nodeId# 76 Add nodeId# 125 Disable nodeId# 95 Add nodeId# 126 Disable nodeId# 119 Delete nodeId# 26 Enable nodeId# 55 Delete nodeId# 20 Enable nodeId# 35 Delete nodeId# 23 Disable nodeId# 108 Add nodeId# 127 Delete nodeId# 109 Add nodeId# 128 Add nodeId# 129 Add nodeId# 130 Enable nodeId# 44 Disable nodeId# 127 Delete nodeId# 82 Add nodeId# 131 Enable nodeId# 108 Delete nodeId# 8 Disable nodeId# 91 Pick Delete nodeId# 130 Disable nodeId# 123 Pick Enable nodeId# 91 Disable nodeId# 12 Pick Enable nodeId# 79 Disable nodeId# 47 Delete nodeId# 124 Disable nodeId# 108 Pick Delete nodeId# 73 Disable nodeId# 111 Disable nodeId# 36 Delete nodeId# 21 Disable nodeId# 100 Add nodeId# 132 Add nodeId# 133 Delete nodeId# 43 Delete nodeId# 126 Add nodeId# 134 Pick Add nodeId# 135 Delete nodeId# 51 Enable nodeId# 42 Enable nodeId# 54 Delete nodeId# 59 Delete nodeId# 111 Pick Delete nodeId# 38 Delete nodeId# 99 Enable nodeId# 36 Add nodeId# 136 Delete nodeId# 55 Delete nodeId# 118 Delete nodeId# 132 Enable nodeId# 127 Enable nodeId# 12 Delete nodeId# 121 Delete nodeId# 127 Enable nodeId# 122 Delete nodeId# 131 Enable nodeId# 123 Pick Pick Enable nodeId# 47 Disable nodeId# 4 Add nodeId# 137 Add nodeId# 138 Disable nodeId# 116 Enable nodeId# 60 Add nodeId# 139 Disable nodeId# 44 Delete nodeId# 96 Add nodeId# 140 Add nodeId# 141 Delete nodeId# 105 Add nodeId# 142 Enable nodeId# 31 Pick Enable nodeId# 44 Add nodeId# 143 Enable nodeId# 116 Pick Enable nodeId# 68 Disable nodeId# 12 Enable nodeId# 95 Pick Add nodeId# 144 Enable nodeId# 76 Pick Delete nodeId# 98 Pick Disable nodeId# 15 Pick Add nodeId# 145 Enable nodeId# 100 Pick Disable nodeId# 137 Delete nodeId# 119 Enable nodeId# 4 Add nodeId# 146 Enable nodeId# 137 Disable nodeId# 16 Disable nodeId# 134 Delete nodeId# 44 Delete nodeId# 91 Pick Enable nodeId# 40 Enable nodeId# 134 Add nodeId# 147 Enable nodeId# 108 Disable nodeId# 29 Add nodeId# 148 Delete nodeId# 7 Pick Pick Enable nodeId# 16 Delete nodeId# 48 Pick Pick Disable nodeId# 84 Pick Add nodeId# 149 Enable nodeId# 15 Delete nodeId# 84 Enable nodeId# 12 Delete nodeId# 68 Add nodeId# 150 Disable nodeId# 107 Delete nodeId# 148 Pick Disable nodeId# 103 Disable nodeId# 65 Disable nodeId# 40 Pick Enable nodeId# 65 Add nodeId# 151 Add nodeId# 152 Add nodeId# 153 Pick Pick Delete nodeId# 61 Add nodeId# 154 Disable nodeId# 34 Enable nodeId# 103 Add nodeId# 155 Delete nodeId# 30 Delete nodeId# 108 Delete nodeId# 52 Disable nodeId# 28 Pick Pick Enable nodeId# 29 Enable nodeId# 28 Enable nodeId# 107 Add nodeId# 156 Delete nodeId# 17 Delete nodeId# 58 Delete nodeId# 34 Enable nodeId# 40 Add nodeId# 157 Delete nodeId# 154 Disable nodeId# 79 Enable nodeId# 79 Disable nodeId# 144 Pick Enable nodeId# 144 Pick Disable nodeId# 149 Delete nodeId# 3 Enable nodeId# 149 Add nodeId# 158 Disable nodeId# 29 Pick Pick Disable nodeId# 36 Enable nodeId# 36 Pick Delete nodeId# 69 Delete nodeId# 103 Disable nodeId# 101 Enable nodeId# 29 Add nodeId# 159 Add nodeId# 160 Disable nodeId# 40 Pick Enable nodeId# 40 Delete nodeId# 133 Delete nodeId# 15 Pick Delete nodeId# 22 Delete nodeId# 135 Enable nodeId# 101 Add nodeId# 161 Pick Pick Delete nodeId# 39 Delete nodeId# 149 Pick Delete nodeId# 78 Disable nodeId# 65 Enable nodeId# 65 Delete nodeId# 143 Disable nodeId# 100 Disable nodeId# 79 Disable nodeId# 129 Delete nodeId# 77 Enable nodeId# 100 Add nodeId# 162 Enable nodeId# 129 Enable nodeId# 79 Delete nodeId# 83 Pick Disable nodeId# 161 Enable nodeId# 161 Delete nodeId# 9 Add nodeId# 163 Add nodeId# 164 Pick Add nodeId# 165 Disable nodeId# 49 Disable nodeId# 147 Pick Disable nodeId# 100 Delete nodeId# 136 Enable nodeId# 49 Delete nodeId# 156 Enable nodeId# 147 Delete nodeId# 146 Add nodeId# 166 Pick Pick Disable nodeId# 56 Add nodeId# 167 Enable nodeId# 56 Enable nodeId# 100 Delete nodeId# 95 Disable nodeId# 151 Delete nodeId# 2 Add nodeId# 168 Enable nodeId# 151 Pick Add nodeId# 169 Add nodeId# 170 Add nodeId# 171 Disable nodeId# 16 Disable nodeId# 94 Disable nodeId# 4 Pick Add nodeId# 172 Add nodeId# 173 Add nodeId# 174 Disable nodeId# 76 Pick Pick Pick Delete nodeId# 27 Pick Add nodeId# 175 Disable nodeId# 42 Pick Disable nodeId# 29 Pick Enable nodeId# 94 Pick Enable nodeId# 76 Enable nodeId# 4 Add nodeId# 176 Enable nodeId# 16 Add nodeId# 177 Pick Enable nodeId# 29 Enable nodeId# 42 Disable nodeId# 151 Pick Pick Pick Disable nodeId# 155 Disable nodeId# 4 Enable nodeId# 4 Delete nodeId# 123 Enable nodeId# 151 Disable nodeId# 113 Disable nodeId# 169 Enable nodeId# 155 Enable nodeId# 113 Add nodeId# 178 Delete nodeId# 163 Enable nodeId# 169 Delete nodeId# 60 Delete nodeId# 164 Disable nodeId# 145 Enable nodeId# 145 Disable nodeId# 113 Add nodeId# 179 Delete nodeId# 129 Pick Delete nodeId# 159 Add nodeId# 180 Disable nodeId# 142 Enable nodeId# 113 Add nodeId# 181 Disable nodeId# 86 Delete nodeId# 28 Pick Add nodeId# 182 Pick Pick Add nodeId# 183 Enable nodeId# 86 Disable nodeId# 100 Enable nodeId# 142 Delete nodeId# 100 Pick Add nodeId# 184 Add nodeId# 185 Delete nodeId# 184 Add nodeId# 186 Add nodeId# 187 Pick Add nodeId# 188 Delete nodeId# 185 Pick Pick Pick Delete nodeId# 158 Disable nodeId# 102 Pick Add nodeId# 189 Delete nodeId# 128 Enable nodeId# 102 Delete nodeId# 169 Pick Add nodeId# 190 Pick Delete nodeId# 88 Pick Add nodeId# 191 Add nodeId# 192 Disable nodeId# 137 Add nodeId# 193 Enable nodeId# 137 Add nodeId# 194 Pick Delete nodeId# 147 Pick Pick Disable nodeId# 179 Disable nodeId# 181 Add nodeId# 195 Pick Disable nodeId# 168 Enable nodeId# 168 Pick Delete nodeId# 160 Add nodeId# 196 Enable nodeId# 181 Disable nodeId# 35 Add nodeId# 197 Enable nodeId# 179 Pick Enable nodeId# 35 Add nodeId# 198 Pick Add nodeId# 199 Disable nodeId# 183 Disable nodeId# 182 Enable nodeId# 183 Delete nodeId# 153 Disable nodeId# 42 Enable nodeId# 42 Enable nodeId# 182 Add nodeId# 200 Disable nodeId# 180 Disable nodeId# 36 Disable nodeId# 137 Disable nodeId# 112 Delete nodeId# 102 Pick Delete nodeId# 115 Delete nodeId# 76 Add nodeId# 201 Delete nodeId# 176 Disable nodeId# 54 Add nodeId# 202 Enable nodeId# 180 Enable nodeId# 54 Delete nodeId# 170 Enable nodeId# 36 Delete nodeId# 200 Enable nodeId# 112 Enable nodeId# 137 Disable nodeId# 12 Pick Pick Delete nodeId# 29 Delete nodeId# 16 Delete nodeId# 41 Disable nodeId# 35 Pick Add nodeId# 203 Add nodeId# 204 Add nodeId# 205 Enable nodeId# 35 Add nodeId# 206 Add nodeId# 207 Delete nodeId# 104 Delete nodeId# 150 Pick Delete nodeId# 139 Delete nodeId# 40 Disable nodeId# 137 Disable nodeId# 116 Add nodeId# 208 Disable nodeId# 56 Enable nodeId# 56 Enable nodeId# 116 Enable nodeId# 137 Delete nodeId# 192 Add nodeId# 209 Disable nodeId# 42 Add nodeId# 210 Delete nodeId# 197 Pick Pick Delete nodeId# 204 Add nodeId# 211 Pick Add nodeId# 212 Add nodeId# 213 Pick Add nodeId# 214 Disable nodeId# 113 Disable nodeId# 177 Enable nodeId# 113 Disable nodeId# 145 Pick Enable nodeId# 145 Add nodeId# 215 Disable nodeId# 186 Delete nodeId# 206 Enable nodeId# 177 Disable nodeId# 166 Enable nodeId# 186 Enable nodeId# 166 Add nodeId# 216 Enable nodeId# 12 Disable nodeId# 54 Enable nodeId# 42 Delete nodeId# 12 Add nodeId# 217 Disable nodeId# 190 Add nodeId# 218 Enable nodeId# 190 Add nodeId# 219 Enable nodeId# 54 Pick Disable nodeId# 31 Enable nodeId# 31 Pick Pick Add nodeId# 220 Delete nodeId# 151 Pick Delete nodeId# 172 Add nodeId# 221 Pick Pick Add nodeId# 222 Delete nodeId# 65 Delete nodeId# 53 Pick Add nodeId# 223 Delete nodeId# 207 Add nodeId# 224 Delete nodeId# 125 Add nodeId# 225 Pick Add nodeId# 226 Disable nodeId# 107 Add nodeId# 227 Disable nodeId# 56 Disable nodeId# 208 Delete nodeId# 152 Add nodeId# 228 Pick Add nodeId# 229 Disable nodeId# 221 Add nodeId# 230 Pick Add nodeId# 231 Delete nodeId# 171 Add nodeId# 232 Disable nodeId# 134 Delete nodeId# 213 Add nodeId# 233 Disable nodeId# 45 Add nodeId# 234 Pick Delete nodeId# 183 Disable nodeId# 196 Enable nodeId# 134 Pick Pick Disable nodeId# 141 Enable nodeId# 45 Disable nodeId# 142 Pick Delete nodeId# 180 Delete nodeId# 166 Add nodeId# 235 Enable nodeId# 208 Enable nodeId# 141 Disable nodeId# 168 Enable nodeId# 168 Add nodeId# 236 Disable nodeId# 173 Enable nodeId# 56 Pick Enable nodeId# 142 Disable nodeId# 42 Enable nodeId# 221 Add nodeId# 237 Add nodeId# 238 Add nodeId# 239 Add nodeId# 240 Disable nodeId# 201 Delete nodeId# 112 Delete nodeId# 162 Enable nodeId# 196 Delete nodeId# 155 Disable nodeId# 219 Disable nodeId# 230 Add nodeId# 241 Enable nodeId# 219 Enable nodeId# 107 Delete nodeId# 193 Delete nodeId# 4 Disable nodeId# 189 Disable nodeId# 47 Pick Disable nodeId# 165 Delete nodeId# 231 Add nodeId# 242 Delete nodeId# 194 Disable nodeId# 188 Delete nodeId# 221 Disable nodeId# 211 Disable nodeId# 49 Add nodeId# 243 Disable nodeId# 80 Disable nodeId# 120 Enable nodeId# 80 Add nodeId# 244 Enable nodeId# 42 Disable nodeId# 101 Pick Add nodeId# 245 Delete nodeId# 236 Add nodeId# 246 Enable nodeId# 120 Disable nodeId# 80 Enable nodeId# 49 Add nodeId# 247 Disable nodeId# 45 Pick Disable nodeId# 63 Disable nodeId# 190 Pick Add nodeId# 248 Enable nodeId# 80 Pick Add nodeId# 249 Disable nodeId# 144 Add nodeId# 250 Delete nodeId# 198 Add nodeId# 251 Add nodeId# 252 Delete nodeId# 216 Delete nodeId# 215 Pick Pick Delete nodeId# 86 Pick Add nodeId# 253 Disable nodeId# 250 Delete nodeId# 137 Add nodeId# 254 Add nodeId# 255 Enable nodeId# 47 Pick Disable nodeId# 202 Delete nodeId# 238 Disable nodeId# 178 ... elete nodeId# 20266 Disable nodeId# 20305 Disable nodeId# 20292 Enable nodeId# 20292 Enable nodeId# 20305 Delete nodeId# 20302 Add nodeId# 20316 Add nodeId# 20317 Disable nodeId# 20312 Enable nodeId# 20312 Disable nodeId# 20292 Delete nodeId# 20298 Delete nodeId# 20296 Pick Pick Pick Enable nodeId# 20292 Disable nodeId# 20270 Delete nodeId# 20290 Pick Disable nodeId# 20300 Delete nodeId# 20297 Delete nodeId# 20287 Disable nodeId# 20280 Disable nodeId# 20204 Delete nodeId# 20284 Delete nodeId# 20312 Enable nodeId# 20280 Delete nodeId# 20280 Pick Pick Enable nodeId# 20204 Pick Delete nodeId# 20309 Disable nodeId# 20304 Enable nodeId# 20300 Add nodeId# 20318 Enable nodeId# 20270 Pick Disable nodeId# 20317 Pick Enable nodeId# 20304 Add nodeId# 20319 Add nodeId# 20320 Add nodeId# 20321 Delete nodeId# 20307 Add nodeId# 20322 Enable nodeId# 20317 Delete nodeId# 20281 Disable nodeId# 20318 Disable nodeId# 20276 Delete nodeId# 20316 Enable nodeId# 20276 Enable nodeId# 20318 Pick Delete nodeId# 20268 Disable nodeId# 20299 Enable nodeId# 20299 Add nodeId# 20323 Pick Disable nodeId# 20270 Pick Pick Delete nodeId# 20323 Add nodeId# 20324 Add nodeId# 20325 Add nodeId# 20326 Pick Disable nodeId# 20322 Delete nodeId# 20305 Delete nodeId# 20311 Add nodeId# 20327 Delete nodeId# 20320 Delete nodeId# 20294 Add nodeId# 20328 Add nodeId# 20329 Pick Disable nodeId# 20327 Enable nodeId# 20327 Pick Add nodeId# 20330 Add nodeId# 20331 Delete nodeId# 20330 Pick Delete nodeId# 20329 Add nodeId# 20332 Disable nodeId# 20324 Add nodeId# 20333 Pick Disable nodeId# 20300 Disable nodeId# 20328 Add nodeId# 20334 Disable nodeId# 20327 Add nodeId# 20335 Delete nodeId# 20333 Enable nodeId# 20270 Disable nodeId# 20254 Delete nodeId# 20332 Disable nodeId# 20334 Disable nodeId# 20321 Pick Pick Disable nodeId# 20295 Disable nodeId# 20209 Pick Add nodeId# 20336 Pick Delete nodeId# 20292 Disable nodeId# 20315 Delete nodeId# 20291 Add nodeId# 20337 Add nodeId# 20338 Pick Delete nodeId# 20254 Pick Delete nodeId# 20336 Pick Enable nodeId# 20209 Disable nodeId# 20314 Add nodeId# 20339 Pick Enable nodeId# 20314 Pick Add nodeId# 20340 Disable nodeId# 20319 Pick Pick Delete nodeId# 20317 Pick Delete nodeId# 20295 Delete nodeId# 20321 Pick Enable nodeId# 20324 Add nodeId# 20341 Add nodeId# 20342 Add nodeId# 20343 Add nodeId# 20344 Enable nodeId# 20315 Pick Add nodeId# 20345 Delete nodeId# 20334 Pick Add nodeId# 20346 Disable nodeId# 20326 Pick Add nodeId# 20347 Enable nodeId# 20326 Pick Add nodeId# 20348 Add nodeId# 20349 Delete nodeId# 20209 Add nodeId# 20350 Pick Delete nodeId# 20306 Add nodeId# 20351 Add nodeId# 20352 Disable nodeId# 20343 Disable nodeId# 20335 Enable nodeId# 20300 Add nodeId# 20353 Pick Delete nodeId# 20315 Enable nodeId# 20327 Delete nodeId# 20340 Disable nodeId# 20300 Disable nodeId# 20352 Disable nodeId# 20353 Disable nodeId# 20348 Enable nodeId# 20352 Delete nodeId# 20204 Delete nodeId# 20337 Add nodeId# 20354 Delete nodeId# 20353 Enable nodeId# 20335 Add nodeId# 20355 Delete nodeId# 20319 Pick Enable nodeId# 20348 Pick Delete nodeId# 20351 Enable nodeId# 20322 Disable nodeId# 20349 Add nodeId# 20356 Enable nodeId# 20349 Disable nodeId# 20348 Add nodeId# 20357 Add nodeId# 20358 Delete nodeId# 20299 Enable nodeId# 20343 Pick Enable nodeId# 20328 Delete nodeId# 20304 Delete nodeId# 20356 Disable nodeId# 20318 Disable nodeId# 20339 Add nodeId# 20359 Delete nodeId# 20327 Add nodeId# 20360 Pick Disable nodeId# 20326 Enable nodeId# 20326 Enable nodeId# 20348 Delete nodeId# 20355 Enable nodeId# 20339 Enable nodeId# 20300 Enable nodeId# 20318 Disable nodeId# 20314 Pick Disable nodeId# 20339 Pick Add nodeId# 20361 Pick Disable nodeId# 20331 Pick Delete nodeId# 20324 Delete nodeId# 20350 Disable nodeId# 20345 Disable nodeId# 20358 Add nodeId# 20362 Add nodeId# 20363 Enable nodeId# 20339 Pick Pick Add nodeId# 20364 Pick Enable nodeId# 20314 Disable nodeId# 20347 Enable nodeId# 20331 Add nodeId# 20365 Delete nodeId# 20362 Enable nodeId# 20358 Pick Enable nodeId# 20345 Add nodeId# 20366 Delete nodeId# 20335 Enable nodeId# 20347 Add nodeId# 20367 Pick Disable nodeId# 20357 Delete nodeId# 20300 Pick Add nodeId# 20368 Add nodeId# 20369 Add nodeId# 20370 Add nodeId# 20371 Enable nodeId# 20357 Add nodeId# 20372 Pick Add nodeId# 20373 Pick Disable nodeId# 20270 Add nodeId# 20374 Add nodeId# 20375 Disable nodeId# 20367 Pick Pick Delete nodeId# 20361 Add nodeId# 20376 Pick Pick Delete nodeId# 20376 Add nodeId# 20377 Delete nodeId# 20374 Pick Disable nodeId# 20342 Add nodeId# 20378 Disable nodeId# 20347 Delete nodeId# 20358 Add nodeId# 20379 Enable nodeId# 20342 Disable nodeId# 20370 Pick Delete nodeId# 20326 Add nodeId# 20380 Enable nodeId# 20270 Add nodeId# 20381 Delete nodeId# 20380 Enable nodeId# 20370 Enable nodeId# 20367 Add nodeId# 20382 Delete nodeId# 20369 Disable nodeId# 20354 Disable nodeId# 20325 Enable nodeId# 20347 Delete nodeId# 20379 Add nodeId# 20383 Add nodeId# 20384 Delete nodeId# 20322 Pick Enable nodeId# 20354 Delete nodeId# 20325 Delete nodeId# 20377 Disable nodeId# 20349 Disable nodeId# 20367 Delete nodeId# 20331 Pick Pick Disable nodeId# 20378 Add nodeId# 20385 Delete nodeId# 20371 Pick Disable nodeId# 20359 Delete nodeId# 20375 Add nodeId# 20386 Disable nodeId# 20385 Enable nodeId# 20367 Disable nodeId# 20382 Pick Disable nodeId# 20313 Disable nodeId# 20348 Pick Disable nodeId# 20342 Enable nodeId# 20378 Delete nodeId# 20313 Delete nodeId# 20348 Add nodeId# 20387 Add nodeId# 20388 Enable nodeId# 20359 Disable nodeId# 20318 Enable nodeId# 20318 Add nodeId# 20389 Disable nodeId# 20270 Add nodeId# 20390 Disable nodeId# 20381 Enable nodeId# 20381 Enable nodeId# 20385 Add nodeId# 20391 Delete nodeId# 20383 Delete nodeId# 20276 Add nodeId# 20392 Enable nodeId# 20270 Enable nodeId# 20349 Add nodeId# 20393 Disable nodeId# 20357 Pick Enable nodeId# 20357 Delete nodeId# 20381 Enable nodeId# 20342 Pick Delete nodeId# 20372 Delete nodeId# 20328 Enable nodeId# 20382 Add nodeId# 20394 Add nodeId# 20395 Delete nodeId# 20366 Pick Add nodeId# 20396 Delete nodeId# 20344 Delete nodeId# 20345 Delete nodeId# 20367 Add nodeId# 20397 Add nodeId# 20398 Disable nodeId# 20388 Delete nodeId# 20363 Add nodeId# 20399 Add nodeId# 20400 Enable nodeId# 20388 Add nodeId# 20401 Add nodeId# 20402 Add nodeId# 20403 Delete nodeId# 20398 Disable nodeId# 20346 Disable nodeId# 20399 Enable nodeId# 20346 Enable nodeId# 20399 Delete nodeId# 20338 Disable nodeId# 20394 Add nodeId# 20404 Disable nodeId# 20370 Add nodeId# 20405 Delete nodeId# 20347 Disable nodeId# 20365 Disable nodeId# 20385 Disable nodeId# 20368 Disable nodeId# 20373 Disable nodeId# 20342 Add nodeId# 20406 Pick Disable nodeId# 20405 Add nodeId# 20407 Add nodeId# 20408 Delete nodeId# 20359 Add nodeId# 20409 Delete nodeId# 20368 Add nodeId# 20410 Disable nodeId# 20354 Disable nodeId# 20389 Pick Disable nodeId# 20314 Disable nodeId# 20399 Add nodeId# 20411 Disable nodeId# 20407 Enable nodeId# 20370 Add nodeId# 20412 Disable nodeId# 20390 Add nodeId# 20413 Enable nodeId# 20405 Delete nodeId# 20349 Pick Add nodeId# 20414 Delete nodeId# 20373 Delete nodeId# 20392 Enable nodeId# 20389 Pick Pick Pick Disable nodeId# 20412 Pick Add nodeId# 20415 Disable nodeId# 20387 Pick Add nodeId# 20416 Delete nodeId# 20402 Disable nodeId# 20414 Add nodeId# 20417 Add nodeId# 20418 Enable nodeId# 20385 Add nodeId# 20419 Disable nodeId# 20413 Add nodeId# 20420 Pick Disable nodeId# 20388 Delete nodeId# 20412 Enable nodeId# 20399 Add nodeId# 20421 Enable nodeId# 20354 Delete nodeId# 20418 Pick Enable nodeId# 20388 Enable nodeId# 20342 Add nodeId# 20422 Pick Add nodeId# 20423 Delete nodeId# 20341 Delete nodeId# 20410 Disable nodeId# 20403 Pick Delete nodeId# 20365 Delete nodeId# 20423 Enable nodeId# 20413 Enable nodeId# 20407 Delete nodeId# 20342 Disable nodeId# 20419 Delete nodeId# 20397 Add nodeId# 20424 Delete nodeId# 20399 Disable nodeId# 20393 Add nodeId# 20425 Enable nodeId# 20387 Pick Disable nodeId# 20422 Delete nodeId# 20401 Disable nodeId# 20357 Add nodeId# 20426 Pick Pick Enable nodeId# 20393 Delete nodeId# 20405 Add nodeId# 20427 Delete nodeId# 20384 Delete nodeId# 20407 Pick Enable nodeId# 20403 Enable nodeId# 20422 Pick Add nodeId# 20428 Add nodeId# 20429 Disable nodeId# 20378 Pick Delete nodeId# 20408 Delete nodeId# 20364 Add nodeId# 20430 Add nodeId# 20431 Disable nodeId# 20427 Enable nodeId# 20394 Enable nodeId# 20314 Add nodeId# 20432 Enable nodeId# 20378 Pick Add nodeId# 20433 Delete nodeId# 20360 Delete nodeId# 20428 Enable nodeId# 20427 Delete nodeId# 20339 Delete nodeId# 20387 Disable nodeId# 20415 Pick Delete nodeId# 20389 Delete nodeId# 20314 Add nodeId# 20434 Disable nodeId# 20386 Add nodeId# 20435 Disable nodeId# 20433 Add nodeId# 20436 Delete nodeId# 20346 Add nodeId# 20437 Disable nodeId# 20420 Add nodeId# 20438 Pick Add nodeId# 20439 Add nodeId# 20440 Pick Enable nodeId# 20414 Add nodeId# 20441 Pick Disable nodeId# 20431 Delete nodeId# 20391 Disable nodeId# 20406 Add nodeId# 20442 Delete nodeId# 20393 Enable nodeId# 20415 Delete nodeId# 20427 Disable nodeId# 20440 Add nodeId# 20443 Pick Pick Add nodeId# 20444 Add nodeId# 20445 Add nodeId# 20446 Disable nodeId# 20435 Delete nodeId# 20431 Add nodeId# 20447 Delete nodeId# 20394 Add nodeId# 20448 Add nodeId# 20449 Pick Pick Disable nodeId# 20415 Disable nodeId# 20400 Disable nodeId# 20411 Add nodeId# 20450 Delete nodeId# 20425 Delete nodeId# 20424 Delete nodeId# 20414 Delete nodeId# 20400 Add nodeId# 20451 Delete nodeId# 20417 Enable nodeId# 20415 Add nodeId# 20452 Enable nodeId# 20386 Delete nodeId# 20270 Enable nodeId# 20419 Enable nodeId# 20433 Pick Delete nodeId# 20438 Delete nodeId# 20385 Pick Add nodeId# 20453 Add nodeId# 20454 Disable nodeId# 20452 Delete nodeId# 20406 Delete nodeId# 20435 Delete nodeId# 20409 Pick Delete nodeId# 20419 Add nodeId# 20455 Pick Add nodeId# 20456 Add nodeId# 20457 Pick Add nodeId# 20458 Delete nodeId# 20404 Pick Disable nodeId# 20318 Delete nodeId# 20439 Add nodeId# 20459 Disable nodeId# 20444 Delete nodeId# 20457 Delete nodeId# 20436 Disable nodeId# 20426 Delete nodeId# 20447 Enable nodeId# 20420 Disable nodeId# 20420 Pick Disable nodeId# 20458 Delete nodeId# 20421 Disable nodeId# 20456 Enable nodeId# 20440 Add nodeId# 20460 Add nodeId# 20461 Disable nodeId# 20413 Disable nodeId# 20388 Add nodeId# 20462 Enable nodeId# 20458 Disable nodeId# 20443 Add nodeId# 20463 Disable nodeId# 20440 Enable nodeId# 20420 Enable nodeId# 20411 Disable nodeId# 20448 Disable nodeId# 20453 Disable nodeId# 20461 Add nodeId# 20464 Disable nodeId# 20382 Disable nodeId# 20462 Pick Pick Add nodeId# 20465 Pick Add nodeId# 20466 Pick Enable nodeId# 20443 Pick Delete nodeId# 20437 Add nodeId# 20467 Delete nodeId# 20465 Add nodeId# 20468 Delete nodeId# 20430 Enable nodeId# 20426 Add nodeId# 20469 Pick Disable nodeId# 20451 Delete nodeId# 20318 Delete nodeId# 20388 Delete nodeId# 20456 Pick Pick Enable nodeId# 20413 Add nodeId# 20470 Add nodeId# 20471 Pick Pick Delete nodeId# 20459 Pick Delete nodeId# 20426 Delete nodeId# 20403 Enable nodeId# 20452 Enable nodeId# 20440 Delete nodeId# 20448 Disable nodeId# 20468 Delete nodeId# 20382 Enable nodeId# 20357 Add nodeId# 20472 Add nodeId# 20473 Disable nodeId# 20454 Pick Pick Pick Delete nodeId# 20440 Disable nodeId# 20352 Add nodeId# 20474 Delete nodeId# 20396 Pick Enable nodeId# 20454 Pick Add nodeId# 20475 Delete nodeId# 20446 Add nodeId# 20476 Add nodeId# 20477 Disable nodeId# 20452 Enable nodeId# 20462 Disable nodeId# 20433 Pick Disable nodeId# 20411 Enable nodeId# 20468 Disable nodeId# 20416 Disable nodeId# 20460 Enable nodeId# 20416 Disable nodeId# 20471 Delete nodeId# 20343 Disable nodeId# 20395 Pick Enable nodeId# 20451 Enable nodeId# 20444 Disable nodeId# 20472 Add nodeId# 20478 Disable nodeId# 20416 Enable nodeId# 20390 Add nodeId# 20479 Delete nodeId# 20478 Enable nodeId# 20453 Delete nodeId# 20474 Disable nodeId# 20453 Disable nodeId# 20432 Delete nodeId# 20458 Enable nodeId# 20453 Enable nodeId# 20416 Pick Disable nodeId# 20390 Pick Pick Enable nodeId# 20460 >> KqpRm::ManyTasks >> TSequence::CreateSequencesWithIndexedTable [GOOD] >> TSequence::CreateTableWithDefaultFromSequence >> TExportToS3Tests::RebootDuringAbortion [GOOD] >> TExportToS3Tests::ExportStartTime >> KqpRm::Reduce >> TPartBtreeIndexIteration::FewNodes_History_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices |73.7%| [TA] $(B)/ydb/core/backup/impl/ut_local_partition_reader/test-results/unittest/{meta.json ... results_accumulator.log} >> TExportToS3Tests::UserSID [GOOD] >> TExportToS3Tests::TablePermissions >> KqpRm::NodesMembershipByExchanger >> KqpRm::ResourceBrokerNotEnoughResources >> KqpRm::DisonnectNodes [GOOD] >> TExportToS3Tests::ShouldSucceedOnMultiShardTable [GOOD] >> TExportToS3Tests::ShouldSucceedOnManyTables >> KqpRm::NotEnoughMemory >> TExportToS3Tests::DropCopiesBeforeTransferring2 [GOOD] >> TExportToS3Tests::EnableChecksumsPersistance >> KqpRm::SingleSnapshotByExchanger ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::DisonnectNodes [GOOD] Test command err: 2025-03-31T16:03:28.090539Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-31T16:03:28.091103Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/0l5c/000c5d/r3tmp/tmpi3kGlB/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-31T16:03:28.094078Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/0l5c/000c5d/r3tmp/tmpi3kGlB/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/000c5d/r3tmp/tmpi3kGlB/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 714339473488350040 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-31T16:03:28.141096Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-03-31T16:03:28.141404Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-03-31T16:03:28.162081Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:457:2100] with ResourceBroker at [2:428:2099] 2025-03-31T16:03:28.162240Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:458:2101] 2025-03-31T16:03:28.162477Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:456:2334] with ResourceBroker at [1:427:2315] 2025-03-31T16:03:28.162549Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:459:2335] 2025-03-31T16:03:28.162682Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-03-31T16:03:28.162730Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-03-31T16:03:28.162764Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-03-31T16:03:28.162787Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-03-31T16:03:28.162936Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:28.181686Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743437008 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-31T16:03:28.182003Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:28.182063Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743437008 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-31T16:03:28.182179Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-03-31T16:03:28.182205Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:28.182285Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743437008 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-31T16:03:28.182437Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-03-31T16:03:28.182572Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-03-31T16:03:28.182641Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-03-31T16:03:28.182659Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:28.182719Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743437008 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-31T16:03:28.183350Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-03-31T16:03:28.183401Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-31T16:03:28.183564Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-31T16:03:28.183967Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-31T16:03:28.184223Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-03-31T16:03:28.184373Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-03-31T16:03:28.184476Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-31T16:03:28.184591Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-31T16:03:29.241765Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-03-31T16:03:29.241900Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-03-31T16:03:29.242181Z node 1 :PIPE_SERVER ERROR: [72057594046447617] NodeDisconnected NodeId# 2 2025-03-31T16:03:29.242385Z node 1 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 2025-03-31T16:03:29.242868Z node 1 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 2 2025-03-31T16:03:29.243638Z node 2 :TX_PROXY WARN: actor# [2:145:2087] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2025-03-31T16:03:29.243812Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-03-31T16:03:29.244040Z node 2 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:58:2073] ServerId# [1:349:2267] TabletId# 72057594037932033 PipeClientId# [2:58:2073] 2025-03-31T16:03:29.244268Z node 2 :KQP_RESOURCE_MANAGER INFO: Subcriber is not available for info exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-03-31T16:03:29.244332Z node 2 :KQP_RESOURCE_MANAGER INFO: Kill previous info exchanger subscriber for 'kqpexch+/dc-1' at [2:463:2103], reason: tenant updated 2025-03-31T16:03:29.244604Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-31T16:03:29.248491Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-31T16:03:29.248953Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-31T16:03:30.071836Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request >> KqpRm::NotEnoughExecutionUnits [GOOD] >> KqpRm::SingleTask [GOOD] >> TSequence::AlterSequence [GOOD] >> TSequence::AlterTableSetDefaultFromSequence >> KqpRm::ManyTasks [GOOD] >> TExportToS3Tests::CorruptedDyNumber [GOOD] >> TExportToS3Tests::CompletedExportEndTime >> TExternalTableTest::SchemeErrors >> TExportToS3Tests::CancelledExportEndTime [GOOD] >> TExportToS3Tests::Checksums >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet >> TExternalTableTest::CreateExternalTable >> THiveTest::TestCreateTabletAndMixedReassignGroups3 [GOOD] >> THiveTest::TestCreateExternalTablet >> TExportToS3Tests::ExportStartTime [GOOD] >> TExportToS3Tests::ExportPartitioningSettings >> KqpRm::Reduce [GOOD] >> TExternalTableTest::ParallelCreateSameExternalTable >> TSequence::CreateTableWithDefaultFromSequence [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceAndIndex >> KqpRm::ResourceBrokerNotEnoughResources [GOOD] >> TExportToS3Tests::TablePermissions [GOOD] >> TExternalTableTest::ParallelCreateExternalTable >> TExportToS3Tests::ShouldRestartOnScanErrors [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentExport ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NotEnoughExecutionUnits [GOOD] Test command err: 2025-03-31T16:03:30.600667Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-31T16:03:30.601211Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/0l5c/000c4a/r3tmp/tmp2D6AqZ/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-31T16:03:30.601751Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/0l5c/000c4a/r3tmp/tmp2D6AqZ/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/000c4a/r3tmp/tmp2D6AqZ/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 4830868441028977280 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-31T16:03:30.641302Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-03-31T16:03:30.641609Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-03-31T16:03:30.662606Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:463:2100] with ResourceBroker at [2:434:2099] 2025-03-31T16:03:30.662748Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:464:2101] 2025-03-31T16:03:30.662900Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:462:2340] with ResourceBroker at [1:433:2321] 2025-03-31T16:03:30.662978Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:465:2341] 2025-03-31T16:03:30.668888Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-03-31T16:03:30.668961Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-03-31T16:03:30.669006Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-03-31T16:03:30.669031Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-03-31T16:03:30.669096Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:30.682076Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743437010 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-31T16:03:30.682438Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:30.682525Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743437010 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-31T16:03:30.682743Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-03-31T16:03:30.682914Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-03-31T16:03:30.682984Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-03-31T16:03:30.683070Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:30.683240Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743437010 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-31T16:03:30.683463Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-03-31T16:03:30.683489Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:30.683552Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743437010 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-31T16:03:30.684240Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-03-31T16:03:30.684451Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-31T16:03:30.684734Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-31T16:03:30.685188Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-31T16:03:30.685331Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-31T16:03:30.685410Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-31T16:03:30.685503Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-31T16:03:30.685770Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-31T16:03:30.685912Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-31T16:03:30.686020Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SingleTask [GOOD] Test command err: 2025-03-31T16:03:30.673388Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-31T16:03:30.673810Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/0l5c/000c40/r3tmp/tmp3M196y/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-31T16:03:30.674327Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/0l5c/000c40/r3tmp/tmp3M196y/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/000c40/r3tmp/tmp3M196y/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 15574454137194336094 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-31T16:03:30.713417Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-03-31T16:03:30.713720Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-03-31T16:03:30.726870Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:461:2100] with ResourceBroker at [2:432:2099] 2025-03-31T16:03:30.727009Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:462:2101] 2025-03-31T16:03:30.727288Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:460:2338] with ResourceBroker at [1:431:2319] 2025-03-31T16:03:30.727355Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:463:2339] 2025-03-31T16:03:30.727537Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-03-31T16:03:30.727589Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-03-31T16:03:30.727609Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-03-31T16:03:30.727623Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-03-31T16:03:30.727760Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:30.741606Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743437010 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-31T16:03:30.741771Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:30.741852Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743437010 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-31T16:03:30.742124Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-03-31T16:03:30.742267Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-03-31T16:03:30.742340Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-03-31T16:03:30.742361Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:30.742434Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743437010 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-31T16:03:30.742551Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-03-31T16:03:30.742575Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:30.742624Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743437010 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-31T16:03:30.743131Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-03-31T16:03:30.743242Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-31T16:03:30.743696Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-31T16:03:30.744105Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-31T16:03:30.744250Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-03-31T16:03:30.744284Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-31T16:03:30.744422Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-31T16:03:30.744587Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-31T16:03:30.744653Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-31T16:03:30.747127Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-2-1 (1 by [1:460:2338]) priority=0 resources={0, 100} 2025-03-31T16:03:30.747197Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-2-1 (1 by [1:460:2338]) to queue queue_kqp_resource_manager 2025-03-31T16:03:30.747236Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-2-1 (1 by [1:460:2338]) from queue queue_kqp_resource_manager 2025-03-31T16:03:30.747262Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-2-1 (1 by [1:460:2338]) to queue queue_kqp_resource_manager 2025-03-31T16:03:30.747305Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-2-1 (1 by [1:460:2338])) 2025-03-31T16:03:30.747480Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-03-31T16:03:30.747666Z node 1 :RESOURCE_BROKER DEBUG: Finish task kqp-1-2-1 (1 by [1:460:2338]) (release resources {0, 100}) 2025-03-31T16:03:30.747701Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.000000 (remove task kqp-1-2-1 (1 by [1:460:2338])) 2025-03-31T16:03:30.747729Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 2. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 0. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::ManyTasks [GOOD] Test command err: 2025-03-31T16:03:30.759689Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-31T16:03:30.760274Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/0l5c/000c39/r3tmp/tmptHBl9S/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-31T16:03:30.760961Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/0l5c/000c39/r3tmp/tmptHBl9S/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/000c39/r3tmp/tmptHBl9S/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 6757353381175170494 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-31T16:03:30.818614Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-03-31T16:03:30.818959Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-03-31T16:03:30.842159Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:461:2100] with ResourceBroker at [2:432:2099] 2025-03-31T16:03:30.842303Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:462:2101] 2025-03-31T16:03:30.842528Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:460:2338] with ResourceBroker at [1:431:2319] 2025-03-31T16:03:30.842598Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:463:2339] 2025-03-31T16:03:30.842775Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-03-31T16:03:30.842823Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-03-31T16:03:30.842858Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-03-31T16:03:30.842880Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-03-31T16:03:30.843104Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:30.861117Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743437010 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-31T16:03:30.861345Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:30.861447Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743437010 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-31T16:03:30.861833Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-03-31T16:03:30.861972Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-03-31T16:03:30.862083Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-03-31T16:03:30.862116Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:30.862226Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743437010 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-31T16:03:30.862410Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-03-31T16:03:30.862446Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:30.862526Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743437010 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-31T16:03:30.863248Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-03-31T16:03:30.863343Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-31T16:03:30.863800Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-31T16:03:30.864247Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-31T16:03:30.864463Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-03-31T16:03:30.864516Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-31T16:03:30.864734Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-31T16:03:30.864919Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-31T16:03:30.865033Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-31T16:03:30.868135Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-1-1 (1 by [1:460:2338]) priority=0 resources={0, 100} 2025-03-31T16:03:30.868206Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-1-1 (1 by [1:460:2338]) to queue queue_kqp_resource_manager 2025-03-31T16:03:30.868259Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [1:460:2338]) from queue queue_kqp_resource_manager 2025-03-31T16:03:30.868297Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-1-1 (1 by [1:460:2338]) to queue queue_kqp_resource_manager 2025-03-31T16:03:30.868364Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [1:460:2338])) 2025-03-31T16:03:30.868558Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-03-31T16:03:30.868779Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-2-2 (2 by [1:460:2338]) priority=0 resources={0, 100} 2025-03-31T16:03:30.868811Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-2-2 (2 by [1:460:2338]) to queue queue_kqp_resource_manager 2025-03-31T16:03:30.868863Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-2-2 (2 by [1:460:2338]) from queue queue_kqp_resource_manager 2025-03-31T16:03:30.868890Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-2-2 (2 by [1:460:2338]) to queue queue_kqp_resource_manager 2025-03-31T16:03:30.868932Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-1-2-2 (2 by [1:460:2338])) 2025-03-31T16:03:30.868965Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-03-31T16:03:30.869071Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-3-3 (3 by [1:460:2338]) priority=0 resources={0, 100} 2025-03-31T16:03:30.869096Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-3-3 (3 by [1:460:2338]) to queue queue_kqp_resource_manager 2025-03-31T16:03:30.869121Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-3-3 (3 by [1:460:2338]) from queue queue_kqp_resource_manager 2025-03-31T16:03:30.869144Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-3-3 (3 by [1:460:2338]) to queue queue_kqp_resource_manager 2025-03-31T16:03:30.869166Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.750000 (insert task kqp-1-3-3 (3 by [1:460:2338])) 2025-03-31T16:03:30.869193Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 3. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-03-31T16:03:30.869294Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-4-4 (4 by [1:460:2338]) priority=0 resources={0, 100} 2025-03-31T16:03:30.869323Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-4-4 (4 by [1:460:2338]) to queue queue_kqp_resource_manager 2025-03-31T16:03:30.869357Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-4-4 (4 by [1:460:2338]) from queue queue_kqp_resource_manager 2025-03-31T16:03:30.869396Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-4-4 (4 by [1:460:2338]) to queue queue_kqp_resource_manager 2025-03-31T16:03:30.869424Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.750000 to 1.000000 (insert task kqp-1-4-4 (4 by [1:460:2338])) 2025-03-31T16:03:30.869452Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 4. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-03-31T16:03:30.869557Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-5-5 (5 by [1:460:2338]) priority=0 resources={0, 100} 2025-03-31T16:03:30.869587Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-5-5 (5 by [1:460:2338]) to queue queue_kqp_resource_manager 2025-03-31T16:03:30.869612Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-5-5 (5 by [1:460:2338]) from queue queue_kqp_resource_manager 2025-03-31T16:03:30.869634Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-5-5 (5 by [1:460:2338]) to queue queue_kqp_resource_manager 2025-03-31T16:03:30.869657Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 1.000000 to 1.250000 (insert task kqp-1-5-5 (5 by [1:460:2338])) 2025-03-31T16:03:30.869692Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 5. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-03-31T16:03:30.869782Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-6-6 (6 by [1:460:2338]) priority=0 resources={0, 100} 2025-03-31T16:03:30.869805Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-6-6 (6 by [1:460:2338]) to queue queue_kqp_resource_manager 2025-03-31T16:03:30.869821Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-6-6 (6 by [1:460:2338]) from queue queue_kqp_resource_manager 2025-03-31T16:03:30.869836Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-6-6 (6 by [1:460:2338]) to queue queue_kqp_resource_manager 2025-03-31T16:03:30.869851Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 1.250000 to 1.500000 (insert task kqp-1-6-6 (6 by [1:460:2338])) 2025-03-31T16:03:30.869865Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 6. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-03-31T16:03:30.869927Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-7-7 (7 by [1:460:2338]) priority=0 resources={0, 100} 2025-03-31T16:03:30.869973Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-7-7 (7 by [1:460:2338]) to queue queue_kqp_resource_manager 2025-03-31T16:03:30.869999Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-7-7 (7 by [1:460:2338]) from queue queue_kqp_resource_manager 2025-03-31T16:03:30.870012Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-7-7 (7 by [1:460:2338]) to queue queue_kqp_resource_manager 2025-03-31T16:03:30.870030Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 1.500000 to 1.750000 (insert task kqp-1-7-7 (7 by [1:460:2338])) 2025-03-31T16:03:30.870050Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 7. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-03-31T16:03:30.870155Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-8-8 (8 by [1:460:2338]) priority=0 resources={0, 100} 2025-03-31T16:03:30.870203Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-8-8 (8 by [1:460:2338]) to queue queue_kqp_resource_manager 2025-03-31T16:03:30.870239Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-8-8 (8 by [1:460:2338]) from queue queue_kqp_resource_manager 2025-03-31T16:03:30.870272Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-8-8 (8 by [1:460:2338]) to queue queue_kqp_resource_manager 2025-03-31T16:03:30.870301Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 1.750000 to 2.000000 (insert task kqp-1-8-8 (8 by [1:460:2338])) 2025-03-31T16:03:30.870325Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 8. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-03-31T16:03:30.870422Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-9-9 (9 by [1:460:2338]) priority=0 resources={0, 100} 2025-03-31T16:03:30.870445Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-9-9 (9 by [1:460:2338]) to queue queue_kqp_resource_manager 2025-03-31T16:03:30.870485Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-9-9 (9 by [1:460:2338]) from queue queue_kqp_resource_manager 2025-03-31T16:03:30.870514Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-9-9 (9 by [1:460:2338]) to queue queue_kqp_resource_manager 2025-03-31T16:03:30.870548Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 2.000000 to 2.250000 (insert task kqp-1-9-9 (9 by [1:460:2338])) 2025-03-31T16:03:30.870582Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 9. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-03-31T16:03:30.870706Z node 1 :RESOURCE_BROKER DEBUG: Finish task kqp-1-1-1 (1 by [1:460:2338]) (release resources {0, 100}) 2025-03-31T16:03:30.870750Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 2.250000 to 2.000000 (remove task kqp-1-1-1 (1 by [1:460:2338])) 2025-03-31T16:03:30.870792Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 0. >> KqpRm::NotEnoughMemory [GOOD] >> TExternalTableTest::ReadOnlyMode >> TExportToS3Tests::ShouldOmitNonStrictStorageSettings ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::Reduce [GOOD] Test command err: 2025-03-31T16:03:31.041325Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-31T16:03:31.041896Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/0l5c/000c38/r3tmp/tmpraIVMb/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-31T16:03:31.042649Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/0l5c/000c38/r3tmp/tmpraIVMb/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/000c38/r3tmp/tmpraIVMb/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 1374246811705706180 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-31T16:03:31.084514Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-03-31T16:03:31.084806Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-03-31T16:03:31.104341Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:461:2100] with ResourceBroker at [2:432:2099] 2025-03-31T16:03:31.104466Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:462:2101] 2025-03-31T16:03:31.104646Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:460:2338] with ResourceBroker at [1:431:2319] 2025-03-31T16:03:31.104701Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:463:2339] 2025-03-31T16:03:31.104835Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-03-31T16:03:31.104874Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-03-31T16:03:31.104901Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-03-31T16:03:31.104919Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-03-31T16:03:31.105051Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:31.119747Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743437011 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-31T16:03:31.119954Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:31.120043Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743437011 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-31T16:03:31.120393Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-03-31T16:03:31.120522Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-03-31T16:03:31.120627Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-03-31T16:03:31.120663Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:31.120784Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743437011 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-31T16:03:31.120953Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-03-31T16:03:31.120981Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:31.121033Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743437011 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-31T16:03:31.121517Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-03-31T16:03:31.121583Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-31T16:03:31.121917Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-31T16:03:31.122207Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-31T16:03:31.122375Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-03-31T16:03:31.122410Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-31T16:03:31.122579Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-31T16:03:31.122689Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-31T16:03:31.122773Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-31T16:03:31.126077Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-1-1 (1 by [1:460:2338]) priority=0 resources={0, 100} 2025-03-31T16:03:31.126142Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-1-1 (1 by [1:460:2338]) to queue queue_kqp_resource_manager 2025-03-31T16:03:31.126183Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [1:460:2338]) from queue queue_kqp_resource_manager 2025-03-31T16:03:31.126214Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-1-1 (1 by [1:460:2338]) to queue queue_kqp_resource_manager 2025-03-31T16:03:31.126266Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [1:460:2338])) 2025-03-31T16:03:31.126441Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-03-31T16:03:31.126656Z node 1 :RESOURCE_BROKER DEBUG: Update task kqp-1-1-1 (1 by [1:460:2338]) (priority=0 type=kqp_query resources={0, 30} resubmit=0) 2025-03-31T16:03:31.126695Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-1-1 (1 by [1:460:2338]) to queue queue_kqp_resource_manager 2025-03-31T16:03:31.126735Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.075000 (insert task kqp-1-1-1 (1 by [1:460:2338])) 2025-03-31T16:03:31.126782Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Released resources, Memory: 70, Free Tier: 0, ExecutionUnits: 0. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::ResourceBrokerNotEnoughResources [GOOD] >> TExportToS3Tests::EnableChecksumsPersistance [GOOD] Test command err: 2025-03-31T16:03:31.147157Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-31T16:03:31.147758Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/0l5c/000c2c/r3tmp/tmpj3TocU/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-31T16:03:31.148355Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/0l5c/000c2c/r3tmp/tmpj3TocU/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/000c2c/r3tmp/tmpj3TocU/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 2681832344398388311 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-31T16:03:31.190168Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-03-31T16:03:31.190475Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-03-31T16:03:31.206561Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:461:2100] with ResourceBroker at [2:432:2099] 2025-03-31T16:03:31.206727Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:462:2101] 2025-03-31T16:03:31.206912Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:460:2338] with ResourceBroker at [1:431:2319] 2025-03-31T16:03:31.206997Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:463:2339] 2025-03-31T16:03:31.207201Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-03-31T16:03:31.207252Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-03-31T16:03:31.207287Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-03-31T16:03:31.207310Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-03-31T16:03:31.207449Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:31.223662Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743437011 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-31T16:03:31.223854Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:31.224003Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743437011 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 100000000 Memory { Pool: 1 Available: 100000000 } ExecutionUnits: 100 2025-03-31T16:03:31.224352Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-03-31T16:03:31.224480Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-03-31T16:03:31.224597Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-03-31T16:03:31.224649Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:31.224750Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743437011 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-31T16:03:31.225040Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-03-31T16:03:31.225080Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:31.225168Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743437011 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 100000000 Memory { Pool: 1 Available: 100000000 } ExecutionUnits: 100 2025-03-31T16:03:31.225836Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-03-31T16:03:31.225958Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-31T16:03:31.226370Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-31T16:03:31.226869Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-31T16:03:31.227157Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-03-31T16:03:31.227231Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-31T16:03:31.227455Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-31T16:03:31.227651Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-31T16:03:31.227778Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-31T16:03:31.230895Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-2-1 (1 by [1:460:2338]) priority=0 resources={0, 1000} 2025-03-31T16:03:31.230995Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-2-1 (1 by [1:460:2338]) to queue queue_kqp_resource_manager 2025-03-31T16:03:31.231054Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 1000} for task kqp-1-2-1 (1 by [1:460:2338]) from queue queue_kqp_resource_manager 2025-03-31T16:03:31.231138Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-2-1 (1 by [1:460:2338]) to queue queue_kqp_resource_manager 2025-03-31T16:03:31.231216Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 2.500000 (insert task kqp-1-2-1 (1 by [1:460:2338])) 2025-03-31T16:03:31.231450Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 1000ExternalMemory: 0 } 2025-03-31T16:03:31.231534Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-2-2 (2 by [1:460:2338]) priority=0 resources={0, 100000} 2025-03-31T16:03:31.231574Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-2-2 (2 by [1:460:2338]) to queue queue_kqp_resource_manager 2025-03-31T16:03:31.231619Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task kqp-1-2-2 (2 by [1:460:2338]) 2025-03-31T16:03:31.231661Z node 1 :RESOURCE_BROKER DEBUG: Removing task kqp-1-2-2 (2 by [1:460:2338]) 2025-03-31T16:03:31.231745Z node 1 :KQP_RESOURCE_MANAGER NOTICE: TxId: 1, taskId: 2. Not enough memory for query, requested: 100000. TxResourcesInfo { TxId: 1, Database: , tx initially granted memory: 0B, tx total memory allocations: 1000B, tx largest successful memory allocation: 1000B, tx last failed memory allocation: 0B, tx total execution units: 0, started at: 2025-03-31T16:03:31.230798Z } >> TExportToS3Tests::EncryptedExport >> TExportToS3Tests::RebootDuringCompletion ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NotEnoughMemory [GOOD] Test command err: 2025-03-31T16:03:31.311746Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-31T16:03:31.312409Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/0l5c/000c24/r3tmp/tmphS0pCQ/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-31T16:03:31.313048Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/0l5c/000c24/r3tmp/tmphS0pCQ/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/000c24/r3tmp/tmphS0pCQ/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 7899593668834476313 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-31T16:03:31.350322Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-03-31T16:03:31.350561Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-03-31T16:03:31.363202Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:461:2100] with ResourceBroker at [2:432:2099] 2025-03-31T16:03:31.363308Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:462:2101] 2025-03-31T16:03:31.363461Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:460:2338] with ResourceBroker at [1:431:2319] 2025-03-31T16:03:31.363503Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:463:2339] 2025-03-31T16:03:31.363630Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-03-31T16:03:31.363667Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-03-31T16:03:31.363688Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-03-31T16:03:31.363703Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-03-31T16:03:31.363815Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:31.374219Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743437011 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-31T16:03:31.374396Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:31.374465Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743437011 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-31T16:03:31.374806Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-03-31T16:03:31.374933Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-03-31T16:03:31.375030Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-03-31T16:03:31.375055Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:31.375166Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743437011 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-31T16:03:31.375289Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-03-31T16:03:31.375315Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:31.375413Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743437011 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-31T16:03:31.375926Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-03-31T16:03:31.376001Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-31T16:03:31.376305Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-31T16:03:31.376642Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-31T16:03:31.376796Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-03-31T16:03:31.376834Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-31T16:03:31.377037Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-31T16:03:31.377173Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-31T16:03:31.377244Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 >> THiveTest::TestCreateExternalTablet [GOOD] >> TExternalTableTest::SchemeErrors [GOOD] >> TExportToS3Tests::ShouldSucceedOnManyTables [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentTxs >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] >> TExternalTableTest::CreateExternalTable [GOOD] >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists >> TExternalTableTest::ParallelCreateSameExternalTable [GOOD] >> TExportToS3Tests::Checksums [GOOD] >> TExportToS3Tests::Changefeeds >> TSequence::CreateTableWithDefaultFromSequenceAndIndex [GOOD] >> TExportToS3Tests::ExportPartitioningSettings [GOOD] >> TExportToS3Tests::ExportIndexTablePartitioningSettings ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::TablePermissions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:03:27.692887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:03:27.692989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:27.693040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:03:27.693075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:03:27.693120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:03:27.693182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:03:27.693250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:27.693321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:03:27.693694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:27.782503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:03:27.782594Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:27.788676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:27.788839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:03:27.789040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:03:27.795361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:03:27.795625Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:03:27.796467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:27.796752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:27.799312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:27.800757Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:27.800836Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:27.801032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:03:27.801096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:27.801140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:03:27.801338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:03:27.809751Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:03:27.942930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:27.943234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:27.943515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:03:27.943806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:03:27.943873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:27.947380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:27.947565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:03:27.947805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:27.947869Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:03:27.947904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:03:27.947938Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:03:27.954166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:27.954242Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:27.954280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:03:27.956867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:27.956932Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:27.956968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:27.957034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:03:27.968423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:27.971639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:03:27.971897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:03:27.973003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:27.973182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:27.973241Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:27.973519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:03:27.973574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:27.973747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:03:27.973837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:03:27.976309Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:27.976371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:27.976605Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:27.976649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:03:27.976884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:27.976929Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:03:27.977051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:27.977089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:27.977150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:27.977183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:27.977223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:03:27.977264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:27.977332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:03:27.977374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:03:27.977464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:27.977504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:03:27.977533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:03:27.979770Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:27.979907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:27.979952Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... HEMESHARD INFO: Change state for txid 281474976710759:0 3 -> 128 2025-03-31T16:03:31.286542Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-31T16:03:31.286711Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-31T16:03:31.286768Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:31.286840Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710759 ready parts: 1/1 2025-03-31T16:03:31.286970Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409547 Flags: 2 } ExecLevel: 0 TxId: 281474976710759 MinStep: 5000005 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:31.288756Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710759 msg type: 269090816 2025-03-31T16:03:31.288863Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710759 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710759 at step: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 281474976710759 at step: 5000005 2025-03-31T16:03:31.289313Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:31.289415Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710759 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 12884904044 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:31.289467Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 HandleReply TEvOperationPlan, stepId: 5000005, at schemeshard: 72057594046678944 2025-03-31T16:03:31.289573Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 128 -> 129 2025-03-31T16:03:31.289668Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-31T16:03:31.326848Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:31.326912Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-03-31T16:03:31.327246Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:31.327299Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:203:2205], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 4 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:18831 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E7D52700-526D-4378-875A-B52E42A2676F amz-sdk-request: attempt=1 content-length: 73 content-md5: q/ySd5GvS6I/qOVxS/4Thg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /metadata.json / / 73 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000005 2025-03-31T16:03:31.327958Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-31T16:03:31.328014Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 281474976710759 2025-03-31T16:03:31.330714Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-03-31T16:03:31.330813Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-03-31T16:03:31.330887Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710759 2025-03-31T16:03:31.330925Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-03-31T16:03:31.330985Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-03-31T16:03:31.331167Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 0/1, is published: true REQUEST: PUT /permissions.pb HTTP/1.1 HEADERS: Host: localhost:18831 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: DF451699-EBE7-4BAC-9D98-ADAA745741C2 amz-sdk-request: attempt=1 content-length: 137 content-md5: WeIr3D5bqIjvqMGEjx2JrA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /permissions.pb / / 137 2025-03-31T16:03:31.334814Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:18831 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 8273D75E-F375-4847-A840-BD42772FCF76 amz-sdk-request: attempt=1 content-length: 355 content-md5: 4DhJNWgTpoG3PVvZ0uCHUA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /scheme.pb / / 355 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:18831 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 5858D0E3-88B1-4B1A-A48C-D6DF0C5088B5 amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / / 0 2025-03-31T16:03:31.351221Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 446 RawX2: 12884904303 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-03-31T16:03:31.351267Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2025-03-31T16:03:31.351415Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 446 RawX2: 12884904303 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-03-31T16:03:31.351539Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 446 RawX2: 12884904303 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-03-31T16:03:31.351607Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:31.351659Z node 3 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-31T16:03:31.351711Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-03-31T16:03:31.351753Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2025-03-31T16:03:31.351916Z node 3 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:31.358084Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-31T16:03:31.358476Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-31T16:03:31.358527Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2025-03-31T16:03:31.358649Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-03-31T16:03:31.358682Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-03-31T16:03:31.358717Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-03-31T16:03:31.358749Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-03-31T16:03:31.358780Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2025-03-31T16:03:31.358846Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:125:2151] message: TxId: 281474976710759 2025-03-31T16:03:31.358886Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-03-31T16:03:31.358922Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2025-03-31T16:03:31.358967Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2025-03-31T16:03:31.359103Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-31T16:03:31.362808Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-03-31T16:03:31.362882Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 2025-03-31T16:03:31.368170Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-31T16:03:31.368252Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:475:2436] TestWaitNotification: OK eventTxId 103 >> TExternalTableTest::ParallelCreateExternalTable [GOOD] >> YdbIndexTable::MultiShardTableOneUniqIndex >> TExportToS3Tests::CompletedExportEndTime [GOOD] >> TExportToS3Tests::ChecksumsWithCompression >> KqpRm::NodesMembershipByExchanger [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::SchemeErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:111:2142] 2025-03-31T16:03:31.902309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:03:31.902423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:31.902465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:03:31.902501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:03:31.902548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:03:31.902592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:03:31.902676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:31.902773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:03:31.903116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:31.990292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-31T16:03:31.990378Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:31.995055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:31.995307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:03:31.995501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:03:32.001258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:03:32.001518Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:03:32.002269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:32.002534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:32.005002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:32.006113Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:32.006179Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:32.006486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:03:32.006545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:32.006591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:03:32.006785Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.014617Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:238:2058] recipient: [1:15:2062] 2025-03-31T16:03:32.167729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:32.167991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.168239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:03:32.168483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:03:32.168541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.170793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:32.170942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:03:32.171157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.171226Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:03:32.171262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:03:32.171296Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:03:32.173262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.173327Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:32.173368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:03:32.175468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.175525Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.175563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:32.175631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:03:32.179490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:32.183981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:03:32.184199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:03:32.185264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:32.185404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969446 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:32.185450Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:32.185793Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:03:32.185853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:32.186056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:03:32.186155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:32.190433Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:32.190504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:32.190702Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:32.190752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:03:32.191056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.191135Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:03:32.191237Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:32.191300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:32.191356Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:32.191392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:32.191429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:03:32.191498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:32.191542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:03:32.191573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:03:32.191658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:32.191702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:03:32.191737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:03:32.193789Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 126 2025-03-31T16:03:32.265490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "BlaBlaType" } } } TxId: 126 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:32.265764Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 126:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "BlaBlaType" } } 2025-03-31T16:03:32.265831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 126:0, path# /MyRoot/DirA/Table2 2025-03-31T16:03:32.266102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 126:1, propose status:StatusSchemeError, reason: Type 'BlaBlaType' specified for column 'RowId' is not supported by storage, at schemeshard: 72057594046678944 2025-03-31T16:03:32.268310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 126, response: Status: StatusSchemeError Reason: "Type \'BlaBlaType\' specified for column \'RowId\' is not supported by storage" TxId: 126 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:32.268473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 126, database: /MyRoot, subject: , status: StatusSchemeError, reason: Type 'BlaBlaType' specified for column 'RowId' is not supported by storage, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 2025-03-31T16:03:32.270766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "" Type: "Uint64" } } } TxId: 127 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:32.271124Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 127:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "" Type: "Uint64" } } 2025-03-31T16:03:32.271235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 127:0, path# /MyRoot/DirA/Table2 2025-03-31T16:03:32.271358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 127:1, propose status:StatusSchemeError, reason: Columns cannot have an empty name, at schemeshard: 72057594046678944 2025-03-31T16:03:32.273426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 127, response: Status: StatusSchemeError Reason: "Columns cannot have an empty name" TxId: 127 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:32.273604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 127, database: /MyRoot, subject: , status: StatusSchemeError, reason: Columns cannot have an empty name, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 127, wait until txId: 127 TestModificationResults wait txId: 128 2025-03-31T16:03:32.275945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" TypeId: 27 } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:32.276246Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" TypeId: 27 } } 2025-03-31T16:03:32.276312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 128:0, path# /MyRoot/DirA/Table2 2025-03-31T16:03:32.276453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 128:1, propose status:StatusSchemeError, reason: Cannot set TypeId for column 'RowId', use Type, at schemeshard: 72057594046678944 2025-03-31T16:03:32.278582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 128, response: Status: StatusSchemeError Reason: "Cannot set TypeId for column \'RowId\', use Type" TxId: 128 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:32.278746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusSchemeError, reason: Cannot set TypeId for column 'RowId', use Type, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 128, wait until txId: 128 TestModificationResults wait txId: 129 2025-03-31T16:03:32.281103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" } } } TxId: 129 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:32.281414Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 129:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" } } 2025-03-31T16:03:32.281499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 129:0, path# /MyRoot/DirA/Table2 2025-03-31T16:03:32.281613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 129:1, propose status:StatusSchemeError, reason: Missing Type for column 'RowId', at schemeshard: 72057594046678944 2025-03-31T16:03:32.283705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 129, response: Status: StatusSchemeError Reason: "Missing Type for column \'RowId\'" TxId: 129 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:32.283873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 129, database: /MyRoot, subject: , status: StatusSchemeError, reason: Missing Type for column 'RowId', operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 129, wait until txId: 129 TestModificationResults wait txId: 130 2025-03-31T16:03:32.286149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" Id: 2 } Columns { Name: "RowId2" Type: "Uint64" Id: 2 } } } TxId: 130 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:32.286454Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 130:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" Id: 2 } Columns { Name: "RowId2" Type: "Uint64" Id: 2 } } 2025-03-31T16:03:32.286533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 130:0, path# /MyRoot/DirA/Table2 2025-03-31T16:03:32.286699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 130:1, propose status:StatusSchemeError, reason: Duplicate column id: 2, at schemeshard: 72057594046678944 2025-03-31T16:03:32.288801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 130, response: Status: StatusSchemeError Reason: "Duplicate column id: 2" TxId: 130 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:32.288947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 130, database: /MyRoot, subject: , status: StatusSchemeError, reason: Duplicate column id: 2, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 130, wait until txId: 130 TestModificationResults wait txId: 131 2025-03-31T16:03:32.291529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource1" Location: "/" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } } } TxId: 131 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:32.291857Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 131:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource1" Location: "/" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } } 2025-03-31T16:03:32.291937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 131:0, path# /MyRoot/DirA/Table2 2025-03-31T16:03:32.292054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 131:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ExternalDataSource1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), at schemeshard: 72057594046678944 2025-03-31T16:03:32.294085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 131, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalDataSource1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" TxId: 131 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:32.294233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 131, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ExternalDataSource1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 131, wait until txId: 131 >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:111:2142] 2025-03-31T16:03:31.876272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:03:31.876345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:31.876374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:03:31.876404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:03:31.876435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:03:31.876468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:03:31.876520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:31.876586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:03:31.876819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:31.955738Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-31T16:03:31.955787Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:31.961731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:31.961946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:03:31.962090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:03:31.967349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:03:31.967531Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:03:31.968102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:31.968274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:31.969956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:31.970757Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:31.970808Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:31.971010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:03:31.971057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:31.971117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:03:31.971230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:03:31.979106Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:238:2058] recipient: [1:15:2062] 2025-03-31T16:03:32.098238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:32.098475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.098710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:03:32.098991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:03:32.099047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.101753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:32.101921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:03:32.102140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.102198Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:03:32.102236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:03:32.102271Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:03:32.104774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.104846Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:32.104880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:03:32.108265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.108329Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.108371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:32.108418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:03:32.112275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:32.114704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:03:32.114910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:03:32.116058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:32.116199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969446 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:32.116267Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:32.116519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:03:32.116569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:32.116773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:03:32.116884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:32.119545Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:32.119606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:32.119790Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:32.119831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:03:32.120101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.120150Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:03:32.120236Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:32.120264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:32.120296Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:32.120336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:32.120371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:03:32.120433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:32.120481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:03:32.120510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:03:32.120565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:32.120600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:03:32.120633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:03:32.122857Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... d: 2] 2025-03-31T16:03:32.172316Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:32.172345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-03-31T16:03:32.172371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-03-31T16:03:32.172393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-03-31T16:03:32.172442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.172476Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-31T16:03:32.172574Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T16:03:32.172602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:03:32.172645Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T16:03:32.172683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:03:32.172722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-03-31T16:03:32.172770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:03:32.172809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-31T16:03:32.172839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-31T16:03:32.172903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:03:32.172981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-03-31T16:03:32.173018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-03-31T16:03:32.173051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-03-31T16:03:32.174161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:03:32.174239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:03:32.174264Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-03-31T16:03:32.174302Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-31T16:03:32.174346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:32.176765Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:03:32.176846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:03:32.176880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-31T16:03:32.176915Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-03-31T16:03:32.176949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T16:03:32.177037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-03-31T16:03:32.180812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T16:03:32.182348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-31T16:03:32.182627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-31T16:03:32.182684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-31T16:03:32.183166Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-31T16:03:32.183269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T16:03:32.183306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:302:2293] TestWaitNotification: OK eventTxId 101 2025-03-31T16:03:32.183794Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:03:32.184044Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 245us result status StatusSuccess 2025-03-31T16:03:32.184405Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2025-03-31T16:03:32.187929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:32.188279Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } 2025-03-31T16:03:32.188366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 102:0, explain: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2025-03-31T16:03:32.188416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusPreconditionFailed, reason: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2025-03-31T16:03:32.190861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusPreconditionFailed Reason: "Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:32.191068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, operation: CREATE EXTERNAL TABLE, path: /MyRoot/ExternalTable TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-31T16:03:32.191416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-31T16:03:32.191457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-31T16:03:32.191961Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-31T16:03:32.192072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:03:32.192110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:310:2301] TestWaitNotification: OK eventTxId 102 2025-03-31T16:03:32.192625Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:03:32.192840Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 198us result status StatusPathDoesNotExist 2025-03-31T16:03:32.193045Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ExternalTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ParallelCreateSameExternalTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:111:2142] 2025-03-31T16:03:32.061672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:03:32.061801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:32.061846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:03:32.061905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:03:32.061952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:03:32.062007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:03:32.062088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:32.062179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:03:32.062499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:32.149624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-31T16:03:32.149682Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:32.155049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:32.155324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:03:32.155529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:03:32.161977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:03:32.162236Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:03:32.163068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:32.163325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:32.165613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:32.166768Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:32.166831Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:32.167152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:03:32.167215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:32.167274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:03:32.167452Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.175616Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:238:2058] recipient: [1:15:2062] 2025-03-31T16:03:32.296676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:32.296907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.297103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:03:32.297309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:03:32.297368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.299915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:32.300104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:03:32.300327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.300389Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:03:32.300446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:03:32.300497Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:03:32.303261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.303338Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:32.303390Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:03:32.309625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.309720Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.309767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:32.309821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:03:32.314063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:32.316659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:03:32.316842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:03:32.317961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:32.318114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969446 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:32.318167Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:32.318464Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:03:32.318528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:32.318836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:03:32.318919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:32.322239Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:32.322297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:32.322475Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:32.322514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:03:32.322774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.322833Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:03:32.322941Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:32.322995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:32.323039Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:32.323070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:32.323132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:03:32.323197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:32.323236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:03:32.323264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:03:32.323337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:32.323374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:03:32.323405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:03:32.325582Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... t reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T16:03:32.437504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 125, subscribers: 0 2025-03-31T16:03:32.440278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 125 2025-03-31T16:03:32.443578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 125 2025-03-31T16:03:32.447370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 125 TestModificationResult got TxId: 125, wait until txId: 125 TestModificationResults wait txId: 126 TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 TestModificationResult got TxId: 127, wait until txId: 127 2025-03-31T16:03:32.448141Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:03:32.448419Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 272us result status StatusSuccess 2025-03-31T16:03:32.448788Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 125 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:32.449508Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:03:32.449677Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 171us result status StatusSuccess 2025-03-31T16:03:32.449989Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 125 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 125 2025-03-31T16:03:32.450314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: send EvNotifyTxCompletion 2025-03-31T16:03:32.450360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 125 TestWaitNotification wait txId: 126 2025-03-31T16:03:32.450448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: send EvNotifyTxCompletion 2025-03-31T16:03:32.450487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 126 TestWaitNotification wait txId: 127 2025-03-31T16:03:32.450558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: send EvNotifyTxCompletion 2025-03-31T16:03:32.450578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 127 2025-03-31T16:03:32.451220Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2025-03-31T16:03:32.451382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2025-03-31T16:03:32.451436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [1:340:2331] 2025-03-31T16:03:32.451557Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2025-03-31T16:03:32.451749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2025-03-31T16:03:32.451775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:340:2331] 2025-03-31T16:03:32.451885Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2025-03-31T16:03:32.451955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2025-03-31T16:03:32.451980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:340:2331] TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 2025-03-31T16:03:32.452584Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:03:32.452827Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 215us result status StatusSuccess 2025-03-31T16:03:32.453148Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 125 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 128 2025-03-31T16:03:32.456613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "NilNoviSubLuna" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:32.457011Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "NilNoviSubLuna" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } 2025-03-31T16:03:32.457107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 128:0, path# /MyRoot/NilNoviSubLuna 2025-03-31T16:03:32.457283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 128:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-03-31T16:03:32.460844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 128, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/NilNoviSubLuna\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges)" TxId: 128 SchemeshardId: 72057594046678944 PathId: 3 PathCreateTxId: 125, at schemeshard: 72057594046678944 2025-03-31T16:03:32.461031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), operation: CREATE EXTERNAL TABLE, path: /MyRoot/NilNoviSubLuna TestModificationResult got TxId: 128, wait until txId: 128 >> KqpRm::SnapshotSharingByExchanger [GOOD] >> TExternalTableTest::ReadOnlyMode [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentExport [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentImport >> TExportToS3Tests::CancelUponTransferringSingleShardTableShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponTransferringMultiShardTableShouldSucceed >> TSequence::AlterTableSetDefaultFromSequence [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sequence/unittest >> TSequence::CreateTableWithDefaultFromSequenceAndIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:03:22.423716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:03:22.423859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:22.423915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:03:22.423963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:03:22.424013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:03:22.424059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:03:22.424141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:22.424240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:03:22.424617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:22.523301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:03:22.523373Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:22.529514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:22.529641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:03:22.529822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:03:22.535928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:03:22.536234Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:03:22.537019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:22.537291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:22.539720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:22.541046Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:22.541120Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:22.541300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:03:22.541349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:22.541406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:03:22.541600Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:03:22.549967Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:03:22.723786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:22.724068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:22.724316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:03:22.724551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:03:22.724683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:22.727602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:22.727801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:03:22.728094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:22.728182Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:03:22.728246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:03:22.728290Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:03:22.731022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:22.731134Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:22.731187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:03:22.733560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:22.733624Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:22.733689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:22.733763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:03:22.738160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:22.742789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:03:22.744042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:03:22.746123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:22.746310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:22.746413Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:22.746793Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:03:22.746879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:22.747106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:03:22.747219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:03:22.750071Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:22.750133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:22.750343Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:22.750391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:03:22.750653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:22.750709Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:03:22.750911Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:22.750961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:22.751011Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:22.751045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:22.751135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:03:22.751190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:22.751247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:03:22.751285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:03:22.751380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:22.751425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:03:22.751465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:03:22.753878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:22.754022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:22.754066Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... peration in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:03:32.542190Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-03-31T16:03:32.542226Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-03-31T16:03:32.542323Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/4, is published: true 2025-03-31T16:03:32.542355Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-31T16:03:32.543213Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:03:32.543249Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 102:2 2025-03-31T16:03:32.543347Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:332:2312] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 102 at schemeshard: 72057594046678944 2025-03-31T16:03:32.543722Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:124:2150], Recipient [7:124:2150]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-31T16:03:32.543752Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-31T16:03:32.543795Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:2, at schemeshard: 72057594046678944 2025-03-31T16:03:32.543844Z node 7 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:2 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:32.544138Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-31T16:03:32.544302Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-31T16:03:32.544346Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 3/4 2025-03-31T16:03:32.544400Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/4 2025-03-31T16:03:32.544459Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 3/4 2025-03-31T16:03:32.544504Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/4 2025-03-31T16:03:32.544562Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/4, is published: true 2025-03-31T16:03:32.545153Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:03:32.545194Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 102:0 2025-03-31T16:03:32.545266Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:340:2317] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 102 at schemeshard: 72057594046678944 2025-03-31T16:03:32.545409Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:03:32.545435Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:03:32.545905Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:124:2150], Recipient [7:124:2150]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-31T16:03:32.545935Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-31T16:03:32.545976Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.546007Z node 7 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:32.546250Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-31T16:03:32.546361Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-31T16:03:32.546398Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 4/4 2025-03-31T16:03:32.546429Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-03-31T16:03:32.546470Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 4/4 2025-03-31T16:03:32.546500Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-03-31T16:03:32.546536Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 4/4, is published: true 2025-03-31T16:03:32.546621Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:409:2367] message: TxId: 102 2025-03-31T16:03:32.546691Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-03-31T16:03:32.546748Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-31T16:03:32.546788Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-31T16:03:32.546887Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T16:03:32.546951Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:1 2025-03-31T16:03:32.547045Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:1 2025-03-31T16:03:32.547114Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T16:03:32.547147Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:2 2025-03-31T16:03:32.547171Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:2 2025-03-31T16:03:32.547223Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-03-31T16:03:32.547252Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:3 2025-03-31T16:03:32.547277Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:3 2025-03-31T16:03:32.547326Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-03-31T16:03:32.547841Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435083, Sender [7:124:2150], Recipient [7:124:2150]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2025-03-31T16:03:32.547896Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2025-03-31T16:03:32.547985Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:03:32.548052Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-03-31T16:03:32.548138Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:03:32.549937Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:03:32.549985Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:03:32.550116Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:03:32.550145Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:03:32.550194Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:03:32.550247Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:03:32.550307Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:03:32.550334Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:03:32.552618Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:03:32.552666Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:03:32.552796Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:03:32.553238Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:03:32.553356Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:409:2367] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 102 at schemeshard: 72057594046678944 2025-03-31T16:03:32.553791Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:03:32.553865Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:515:2466] 2025-03-31T16:03:32.555104Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:517:2468], Recipient [7:124:2150]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-31T16:03:32.555158Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-31T16:03:32.555194Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 2025-03-31T16:03:32.555773Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-03-31T16:03:32.556364Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [7:594:2545], Recipient [7:124:2150]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-03-31T16:03:32.556434Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-03-31T16:03:32.556572Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:03:32.556834Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 262us result status StatusPathDoesNotExist 2025-03-31T16:03:32.557028Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeTable, state: EPathStateNotExist), drop stepId: 5000003, drop txId: 102" Path: "/MyRoot/Table" PathId: 2 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ParallelCreateExternalTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:111:2142] 2025-03-31T16:03:32.094600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:03:32.094693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:32.094732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:03:32.094767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:03:32.094830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:03:32.094879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:03:32.094998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:32.095108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:03:32.095433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:32.187723Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-31T16:03:32.187801Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:32.196338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:32.196558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:03:32.196720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:03:32.205137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:03:32.205396Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:03:32.206204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:32.206489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:32.209244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:32.210450Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:32.210521Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:32.210893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:03:32.210972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:32.211027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:03:32.211249Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.219733Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:238:2058] recipient: [1:15:2062] 2025-03-31T16:03:32.390912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:32.391201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.391447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:03:32.391722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:03:32.391786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.394977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:32.395190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:03:32.395417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.395494Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:03:32.395550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:03:32.395608Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:03:32.398342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.398420Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:32.398466Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:03:32.400896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.400981Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.401028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:32.401084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:03:32.405951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:32.408639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:03:32.408857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:03:32.410063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:32.410224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969446 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:32.410287Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:32.410609Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:03:32.410683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:32.410898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:03:32.411007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:32.413853Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:32.413924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:32.414114Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:32.414188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:03:32.414500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.414575Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:03:32.414686Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:32.414727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:32.414771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:32.414812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:32.414850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:03:32.414925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:32.414988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:03:32.415024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:03:32.415128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:32.415173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:03:32.415208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:03:32.417566Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... 125: got EvNotifyTxCompletionResult 2025-03-31T16:03:32.581187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [1:372:2363] 2025-03-31T16:03:32.581626Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2025-03-31T16:03:32.581677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2025-03-31T16:03:32.581695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:372:2363] 2025-03-31T16:03:32.581830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2025-03-31T16:03:32.581845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:372:2363] TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 2025-03-31T16:03:32.582229Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:03:32.582748Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable1" took 480us result status StatusSuccess 2025-03-31T16:03:32.583178Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable1" PathDescription { Self { Name: "ExternalTable1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 126 CreateStep: 5000005 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable1" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Content: "" } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:32.584019Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:03:32.584249Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable2" took 235us result status StatusSuccess 2025-03-31T16:03:32.584547Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable2" PathDescription { Self { Name: "ExternalTable2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable2" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false } Content: "" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:32.585388Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:03:32.585558Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 190us result status StatusSuccess 2025-03-31T16:03:32.585953Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 125 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: true } Children { Name: "ExternalTable1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 126 CreateStep: 5000005 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "ExternalTable2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:32.586405Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:03:32.586546Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable1" took 146us result status StatusSuccess 2025-03-31T16:03:32.586727Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable1" PathDescription { Self { Name: "ExternalTable1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 126 CreateStep: 5000005 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable1" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Content: "" } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:32.587218Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:03:32.587416Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable2" took 255us result status StatusSuccess 2025-03-31T16:03:32.587620Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable2" PathDescription { Self { Name: "ExternalTable2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable2" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false } Content: "" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> YdbIndexTable::MultiShardTableUniqAndNonUniqIndex >> YdbIndexTable::MultiShardTableOneIndexIndexOverlapDataColumn >> YdbIndexTable::OnlineBuild ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NodesMembershipByExchanger [GOOD] Test command err: 2025-03-31T16:03:31.221187Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-31T16:03:31.221814Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/0l5c/000c33/r3tmp/tmpx6hsxa/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-31T16:03:31.222451Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/0l5c/000c33/r3tmp/tmpx6hsxa/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/000c33/r3tmp/tmpx6hsxa/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 10896417685007062563 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-31T16:03:31.265290Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-03-31T16:03:31.265596Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-03-31T16:03:31.284814Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:461:2100] with ResourceBroker at [2:432:2099] 2025-03-31T16:03:31.284953Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:462:2101] 2025-03-31T16:03:31.285154Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:460:2338] with ResourceBroker at [1:431:2319] 2025-03-31T16:03:31.285216Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:463:2339] 2025-03-31T16:03:31.285367Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-03-31T16:03:31.285411Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-03-31T16:03:31.285441Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-03-31T16:03:31.285462Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-03-31T16:03:31.285609Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:31.298834Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743437011 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-31T16:03:31.299098Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:31.299200Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743437011 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-31T16:03:31.299627Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-03-31T16:03:31.299767Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-03-31T16:03:31.299888Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-03-31T16:03:31.299925Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:31.300041Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743437011 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-31T16:03:31.300250Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-03-31T16:03:31.300287Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:31.300365Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743437011 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-31T16:03:31.300993Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-03-31T16:03:31.301117Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-31T16:03:31.301535Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-31T16:03:31.302013Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-31T16:03:31.302218Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-03-31T16:03:31.302275Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-31T16:03:31.302504Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-31T16:03:31.302688Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-31T16:03:31.302803Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-31T16:03:32.351503Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-03-31T16:03:32.351606Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-03-31T16:03:32.352493Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-31T16:03:32.619134Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request |73.8%| [TA] $(B)/ydb/core/blobstorage/nodewarden/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpRm::SingleSnapshotByExchanger [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentTxs [GOOD] >> YdbIndexTable::MultiShardTableOneIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SnapshotSharingByExchanger [GOOD] Test command err: 2025-03-31T16:03:30.626892Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-31T16:03:30.627528Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/0l5c/000c51/r3tmp/tmpLIH9xH/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-31T16:03:30.628106Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/0l5c/000c51/r3tmp/tmpLIH9xH/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/000c51/r3tmp/tmpLIH9xH/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 13632284723898790812 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-31T16:03:30.673205Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-03-31T16:03:30.673513Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-03-31T16:03:30.692564Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:461:2100] with ResourceBroker at [2:432:2099] 2025-03-31T16:03:30.692674Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:462:2101] 2025-03-31T16:03:30.692867Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:460:2338] with ResourceBroker at [1:431:2319] 2025-03-31T16:03:30.692937Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:463:2339] 2025-03-31T16:03:30.693111Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-03-31T16:03:30.693156Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-03-31T16:03:30.693189Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-03-31T16:03:30.693211Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-03-31T16:03:30.693358Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:30.707765Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743437010 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-31T16:03:30.707973Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:30.708064Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743437010 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-31T16:03:30.708411Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-03-31T16:03:30.708549Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-03-31T16:03:30.708680Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-03-31T16:03:30.708717Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:30.708838Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743437010 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-31T16:03:30.709032Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-03-31T16:03:30.709065Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:30.709132Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743437010 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-31T16:03:30.709750Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-03-31T16:03:30.709844Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-31T16:03:30.710242Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-31T16:03:30.710645Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-31T16:03:30.710847Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-03-31T16:03:30.710905Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-31T16:03:30.711259Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-31T16:03:30.711492Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-31T16:03:30.711644Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-31T16:03:31.743395Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-03-31T16:03:31.743518Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-03-31T16:03:31.743692Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-1-1 (1 by [1:460:2338]) priority=0 resources={0, 100} 2025-03-31T16:03:31.743752Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-1-1 (1 by [1:460:2338]) to queue queue_kqp_resource_manager 2025-03-31T16:03:31.743825Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [1:460:2338]) from queue queue_kqp_resource_manager 2025-03-31T16:03:31.743868Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-1-1 (1 by [1:460:2338]) to queue queue_kqp_resource_manager 2025-03-31T16:03:31.743914Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [1:460:2338])) 2025-03-31T16:03:31.744127Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-03-31T16:03:31.744208Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-2-1-2 (2 by [1:460:2338]) priority=0 resources={0, 100} 2025-03-31T16:03:31.744256Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-2-1-2 (2 by [1:460:2338]) to queue queue_kqp_resource_manager 2025-03-31T16:03:31.744288Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-2-1-2 (2 by [1:460:2338]) from queue queue_kqp_resource_manager 2025-03-31T16:03:31.744332Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-2-1-2 (2 by [1:460:2338]) to queue queue_kqp_resource_manager 2025-03-31T16:03:31.744363Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-2-1-2 (2 by [1:460:2338])) 2025-03-31T16:03:31.744423Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 2, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-03-31T16:03:31.744475Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:31.744588Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743437011 AvailableComputeActors: 80 UsedMemory: 200 TotalMemory: 1000 Memory { Pool: 1 Available: 800 } ExecutionUnits: 80 2025-03-31T16:03:31.744832Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-31T16:03:32.010997Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-03-31T16:03:32.011176Z node 2 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-1-1 (1 by [2:461:2100]) priority=0 resources={0, 100} 2025-03-31T16:03:32.011223Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-1-1 (1 by [2:461:2100]) to queue queue_kqp_resource_manager 2025-03-31T16:03:32.011277Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [2:461:2100]) from queue queue_kqp_resource_manager 2025-03-31T16:03:32.011314Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-1-1 (1 by [2:461:2100]) to queue queue_kqp_resource_manager 2025-03-31T16:03:32.011380Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [2:461:2100])) 2025-03-31T16:03:32.011504Z node 2 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-03-31T16:03:32.011589Z node 2 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-2-2-2 (2 by [2:461:2100]) priority=0 resources={0, 100} 2025-03-31T16:03:32.011626Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-2-2-2 (2 by [2:461:2100]) to queue queue_kqp_resource_manager 2025-03-31T16:03:32.011663Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-2-2-2 (2 by [2:461:2100]) from queue queue_kqp_resource_manager 2025-03-31T16:03:32.011703Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-2-2-2 (2 by [2:461:2100]) to queue queue_kqp_resource_manager 2025-03-31T16:03:32.011745Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-2-2-2 (2 by [2:461:2100])) 2025-03-31T16:03:32.011820Z node 2 :KQP_RESOURCE_MANAGER DEBUG: TxId: 2, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-03-31T16:03:32.011961Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:32.012110Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: alloc, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743437012 AvailableComputeActors: 80 UsedMemory: 200 TotalMemory: 1000 Memory { Pool: 1 Available: 800 } ExecutionUnits: 80 2025-03-31T16:03:32.012514Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-31T16:03:32.282385Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-03-31T16:03:32.282561Z node 1 :RESOURCE_BROKER DEBUG: Finish task kqp-1-1-1 (1 by [1:460:2338]) (release resources {0, 100}) 2025-03-31T16:03:32.282681Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.350200 (remove task kqp-1-1-1 (1 by [1:460:2338])) 2025-03-31T16:03:32.282779Z node 1 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.200400 2025-03-31T16:03:32.282831Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2025-03-31T16:03:32.282882Z node 1 :RESOURCE_BROKER DEBUG: Finish task kqp-2-1-2 (2 by [1:460:2338]) (release resources {0, 100}) 2025-03-31T16:03:32.282919Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.350200 to 0.200400 (remove task kqp-2-1-2 (2 by [1:460:2338])) 2025-03-31T16:03:32.282989Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 2, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2025-03-31T16:03:32.283065Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:32.283219Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743437013 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-31T16:03:32.283556Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-31T16:03:32.550334Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-03-31T16:03:32.550472Z node 2 :RESOURCE_BROKER DEBUG: Finish task kqp-1-1-1 (1 by [2:461:2100]) (release resources {0, 100}) 2025-03-31T16:03:32.550557Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.350200 (remove task kqp-1-1-1 (1 by [2:461:2100])) 2025-03-31T16:03:32.550606Z node 2 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.200400 2025-03-31T16:03:32.550666Z node 2 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2025-03-31T16:03:32.550729Z node 2 :RESOURCE_BROKER DEBUG: Finish task kqp-2-2-2 (2 by [2:461:2100]) (release resources {0, 100}) 2025-03-31T16:03:32.550778Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.350200 to 0.200400 (remove task kqp-2-2-2 (2 by [2:461:2100])) 2025-03-31T16:03:32.550830Z node 2 :KQP_RESOURCE_MANAGER DEBUG: TxId: 2, taskId: 2. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2025-03-31T16:03:32.550915Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:32.551147Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: alloc, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743437014 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-31T16:03:32.551597Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-31T16:03:32.829462Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:111:2142] 2025-03-31T16:03:31.941231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:03:31.941357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:31.941400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:03:31.941459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:03:31.941513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:03:31.941552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:03:31.941628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:31.941742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:03:31.942055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:32.032873Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-31T16:03:32.032942Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:32.038579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:32.038795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:03:32.038934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:03:32.044840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:03:32.045054Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:03:32.045702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:32.045899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:32.048052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:32.049054Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:32.049109Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:32.049375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:03:32.049427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:32.049471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:03:32.049648Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.056806Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:238:2058] recipient: [1:15:2062] 2025-03-31T16:03:32.230237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:32.230468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.230676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:03:32.230882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:03:32.230943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.233387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:32.233525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:03:32.233694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.233749Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:03:32.233783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:03:32.233813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:03:32.236119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.236179Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:32.236219Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:03:32.238191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.238242Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.238283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:32.238330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:03:32.242353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:32.244513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:03:32.244688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:03:32.245683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:32.245810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969446 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:32.245859Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:32.246143Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:03:32.246194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:32.246379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:03:32.246457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:32.250133Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:32.250196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:32.250379Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:32.250420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:03:32.250693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.250742Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:03:32.250841Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:32.250876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:32.250920Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:32.250951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:32.251004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:03:32.251067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:32.251137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:03:32.251167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:03:32.251237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:32.251276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:03:32.251310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:03:32.253491Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-03-31T16:03:32.972832Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-03-31T16:03:32.972851Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-03-31T16:03:32.974370Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:03:32.974462Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:03:32.974492Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:03:32.974535Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-03-31T16:03:32.974577Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-31T16:03:32.975644Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:03:32.975719Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:03:32.975748Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:03:32.975773Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-03-31T16:03:32.975800Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T16:03:32.976961Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:03:32.977049Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:03:32.977075Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:03:32.977102Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-03-31T16:03:32.977130Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T16:03:32.977194Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-03-31T16:03:32.978444Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:03:32.980166Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:03:32.980248Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-31T16:03:32.980443Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-31T16:03:32.980483Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-31T16:03:32.980919Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-31T16:03:32.981018Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:03:32.981056Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:334:2325] TestWaitNotification: OK eventTxId 102 2025-03-31T16:03:32.981560Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:03:32.981758Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 230us result status StatusSuccess 2025-03-31T16:03:32.982045Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-03-31T16:03:32.985206Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/new_location" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:32.985519Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 103:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/new_location" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } 2025-03-31T16:03:32.985597Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 103:0, path# /MyRoot/ExternalTable 2025-03-31T16:03:32.985733Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/ExternalTable', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-03-31T16:03:32.988400Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges)" TxId: 103 SchemeshardId: 72057594046678944 PathId: 3 PathCreateTxId: 102, at schemeshard: 72057594046678944 2025-03-31T16:03:32.988550Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/ExternalTable', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), operation: CREATE EXTERNAL TABLE, path: /MyRoot/ExternalTable TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-03-31T16:03:32.988892Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-03-31T16:03:32.988939Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-03-31T16:03:32.989373Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-31T16:03:32.989484Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-31T16:03:32.989519Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:342:2333] TestWaitNotification: OK eventTxId 103 2025-03-31T16:03:32.990023Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:03:32.990182Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 180us result status StatusSuccess 2025-03-31T16:03:32.990445Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TExportToS3Tests::ShouldOmitNonStrictStorageSettings [GOOD] >> TExportToS3Tests::ShouldPreserveIncrBackupFlag >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:111:2142] 2025-03-31T16:03:32.346387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:03:32.346477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:32.346536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:03:32.346575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:03:32.346626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:03:32.346670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:03:32.346759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:32.346862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:03:32.347222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:32.427510Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-31T16:03:32.427572Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:32.433140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:32.433373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:03:32.433554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:03:32.439490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:03:32.439725Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:03:32.440503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:32.440734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:32.443323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:32.444469Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:32.444537Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:32.444824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:03:32.444877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:32.444931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:03:32.445110Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.455948Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:238:2058] recipient: [1:15:2062] 2025-03-31T16:03:32.571060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:32.571358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.571621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:03:32.571917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:03:32.572006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.576179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:32.576375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:03:32.576571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.576641Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:03:32.576682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:03:32.576724Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:03:32.582746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.582836Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:32.582880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:03:32.588650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.588724Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.588781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:32.588861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:03:32.593212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:32.597888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:03:32.598126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:03:32.599374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:32.599541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969446 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:32.599598Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:32.599903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:03:32.599971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:32.600182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:03:32.600274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:32.603732Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:32.603798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:32.603996Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:32.604049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:03:32.604326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.604381Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:03:32.604489Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:32.604552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:32.604614Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:32.604657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:32.604705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:03:32.604779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:32.604829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:03:32.604865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:03:32.604967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:32.605020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:03:32.605060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:03:32.614760Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... HEMESHARD NOTICE: IgniteOperation, opId: 129:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:03:33.043824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-03-31T16:03:33.043913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-03-31T16:03:33.048224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 129, response: Status: StatusAccepted TxId: 129 SchemeshardId: 72057594046678944 PathId: 5, at schemeshard: 72057594046678944 2025-03-31T16:03:33.048450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 129, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /MyRoot/SubDirBBBB 2025-03-31T16:03:33.048842Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:33.048911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:33.049108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-03-31T16:03:33.049204Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:33.049242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:485:2443], at schemeshard: 72057594046678944, txId: 129, path id: 1 2025-03-31T16:03:33.049281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:485:2443], at schemeshard: 72057594046678944, txId: 129, path id: 5 2025-03-31T16:03:33.049355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 129:0, at schemeshard: 72057594046678944 2025-03-31T16:03:33.049398Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 129:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:33.049454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 129 ready parts: 1/1 2025-03-31T16:03:33.049581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 129 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:33.050797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 129 2025-03-31T16:03:33.050937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 129 2025-03-31T16:03:33.050993Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 129 2025-03-31T16:03:33.051034Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-03-31T16:03:33.051071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-03-31T16:03:33.052223Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 129 2025-03-31T16:03:33.052349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 129 2025-03-31T16:03:33.052388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 129 2025-03-31T16:03:33.052420Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2025-03-31T16:03:33.052456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-03-31T16:03:33.052546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 129, ready parts: 0/1, is published: true 2025-03-31T16:03:33.056837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 129:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:129 msg type: 269090816 2025-03-31T16:03:33.057065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 129, partId: 4294967295, tablet: 72057594046316545 2025-03-31T16:03:33.059383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 FAKE_COORDINATOR: Add transaction: 129 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 129 at step: 5000005 2025-03-31T16:03:33.059969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-03-31T16:03:33.060290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:33.060425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 129 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969446 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:33.060487Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 129:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000005, at schemeshard: 72057594046678944 2025-03-31T16:03:33.060679Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 129:0 128 -> 240 2025-03-31T16:03:33.060861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-03-31T16:03:33.060925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-03-31T16:03:33.063257Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:33.063322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:33.063538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-03-31T16:03:33.063645Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:33.063679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:485:2443], at schemeshard: 72057594046678944, txId: 129, path id: 1 2025-03-31T16:03:33.063731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:485:2443], at schemeshard: 72057594046678944, txId: 129, path id: 5 FAKE_COORDINATOR: Erasing txId 129 2025-03-31T16:03:33.064008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 129:0, at schemeshard: 72057594046678944 2025-03-31T16:03:33.064059Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 129:0 ProgressState 2025-03-31T16:03:33.064163Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#129:0 progress is 1/1 2025-03-31T16:03:33.064201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-03-31T16:03:33.064251Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#129:0 progress is 1/1 2025-03-31T16:03:33.064305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-03-31T16:03:33.064348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 129, ready parts: 1/1, is published: false 2025-03-31T16:03:33.064390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-03-31T16:03:33.064434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 129:0 2025-03-31T16:03:33.064468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 129:0 2025-03-31T16:03:33.064559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-03-31T16:03:33.064607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 129, publications: 2, subscribers: 0 2025-03-31T16:03:33.064643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-03-31T16:03:33.064675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 5], 3 2025-03-31T16:03:33.065422Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 129 2025-03-31T16:03:33.065526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 129 2025-03-31T16:03:33.065564Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 129 2025-03-31T16:03:33.065603Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-03-31T16:03:33.065642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-03-31T16:03:33.066462Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 129 2025-03-31T16:03:33.066552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 129 2025-03-31T16:03:33.066599Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 129 2025-03-31T16:03:33.066632Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 3 2025-03-31T16:03:33.066661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-03-31T16:03:33.066732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 129, subscribers: 0 2025-03-31T16:03:33.070398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-03-31T16:03:33.070506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 TestModificationResult got TxId: 129, wait until txId: 129 >> TExportToS3Tests::CancelUponCopyingTablesShouldSucceed [GOOD] >> TExportToS3Tests::AuditCompletedExport >> TExportToS3Tests::ExportIndexTablePartitioningSettings [GOOD] >> TExportToS3Tests::ChecksumsWithCompression [GOOD] >> TExternalTableTest::DropExternalTable >> KqpPg::InsertFromSelect_Simple-useSink [GOOD] >> KqpPg::InsertFromSelect_NoReorder-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sequence/unittest >> TSequence::AlterTableSetDefaultFromSequence [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:03:22.797745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:03:22.797872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:22.797910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:03:22.797943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:03:22.797981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:03:22.798039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:03:22.798101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:22.798172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:03:22.798512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:22.911430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:03:22.911505Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:22.924434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:22.924548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:03:22.924690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:03:22.946040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:03:22.946326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:03:22.947052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:22.947338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:22.954374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:22.955704Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:22.955780Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:22.955954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:03:22.955999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:22.956063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:03:22.956230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:03:22.977266Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:03:23.301044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:23.301283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:23.301502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:03:23.301707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:03:23.301755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:23.312203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:23.312423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:03:23.312717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:23.312800Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:03:23.312864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:03:23.312907Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:03:23.316331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:23.316421Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:23.316463Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:03:23.324054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:23.324121Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:23.324200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:23.324270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:03:23.328094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:23.332110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:03:23.332373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:03:23.333511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:23.333685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:23.333750Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:23.334106Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:03:23.334162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:23.334323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:03:23.334452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:03:23.344638Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:23.344696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:23.344908Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:23.344950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:03:23.345174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:23.345224Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:03:23.345401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:23.345438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:23.345474Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:23.345504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:23.345541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:03:23.345583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:23.345633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:03:23.345668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:03:23.345761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:23.345810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:03:23.345842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:03:23.352465Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:23.352627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:23.352667Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... d [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2025-03-31T16:03:33.227476Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 114, ready parts: 0/1, is published: true 2025-03-31T16:03:33.227535Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 FAKE_COORDINATOR: Erasing txId 114 2025-03-31T16:03:33.228409Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269550080, Sender [7:984:2930], Recipient [7:124:2150]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 114 Step: 5000014 OrderId: 114 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1542 } } 2025-03-31T16:03:33.228449Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransactionResult 2025-03-31T16:03:33.228551Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 114 Step: 5000014 OrderId: 114 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1542 } } 2025-03-31T16:03:33.228586Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 114, tablet: 72075186233409549, partId: 0 2025-03-31T16:03:33.228722Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 114:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 114 Step: 5000014 OrderId: 114 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1542 } } 2025-03-31T16:03:33.228844Z node 7 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 114 Step: 5000014 OrderId: 114 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1542 } } 2025-03-31T16:03:33.228902Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-31T16:03:33.229988Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:1046:2984], Recipient [7:124:2150]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:33.230040Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:33.230078Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-03-31T16:03:33.230911Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269551620, Sender [7:984:2930], Recipient [7:124:2150]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 984 RawX2: 30064774002 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2025-03-31T16:03:33.230957Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChanged 2025-03-31T16:03:33.231324Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 984 RawX2: 30064774002 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2025-03-31T16:03:33.231411Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 114, tablet: 72075186233409549, partId: 0 2025-03-31T16:03:33.231604Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 114:0, at schemeshard: 72057594046678944, message: Source { RawX1: 984 RawX2: 30064774002 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2025-03-31T16:03:33.231685Z node 7 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 114:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-31T16:03:33.231820Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 114:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 984 RawX2: 30064774002 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2025-03-31T16:03:33.231914Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 114:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:33.231961Z node 7 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 114:0, at schemeshard: 72057594046678944 2025-03-31T16:03:33.232017Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 114:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-03-31T16:03:33.232074Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 114:0 129 -> 240 2025-03-31T16:03:33.232275Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-31T16:03:33.234423Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:03:33.238461Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 114 2025-03-31T16:03:33.238556Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:03:33.238698Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 114 2025-03-31T16:03:33.238726Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:03:33.238838Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 114:0, at schemeshard: 72057594046678944 2025-03-31T16:03:33.238870Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:03:33.239037Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 114:0, at schemeshard: 72057594046678944 2025-03-31T16:03:33.239133Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:03:33.239190Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 114:0 2025-03-31T16:03:33.239345Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:984:2930] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 114 at schemeshard: 72057594046678944 2025-03-31T16:03:33.239799Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:124:2150], Recipient [7:124:2150]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-31T16:03:33.239854Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-31T16:03:33.239924Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 114:0, at schemeshard: 72057594046678944 2025-03-31T16:03:33.239977Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 114:0 ProgressState 2025-03-31T16:03:33.240140Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-31T16:03:33.240191Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#114:0 progress is 1/1 2025-03-31T16:03:33.240245Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2025-03-31T16:03:33.240333Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#114:0 progress is 1/1 2025-03-31T16:03:33.240393Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2025-03-31T16:03:33.240453Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 114, ready parts: 1/1, is published: true 2025-03-31T16:03:33.240559Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:389:2357] message: TxId: 114 2025-03-31T16:03:33.240636Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2025-03-31T16:03:33.240715Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 114:0 2025-03-31T16:03:33.240759Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 114:0 2025-03-31T16:03:33.240935Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-03-31T16:03:33.244549Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:03:33.244751Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:389:2357] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 114 at schemeshard: 72057594046678944 2025-03-31T16:03:33.245029Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 114: got EvNotifyTxCompletionResult 2025-03-31T16:03:33.245117Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 114: satisfy waiter [7:1014:2952] 2025-03-31T16:03:33.245425Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:1016:2954], Recipient [7:124:2150]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-31T16:03:33.245472Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-31T16:03:33.245504Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 114 TestModificationResults wait txId: 115 2025-03-31T16:03:33.246626Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [7:1055:2993], Recipient [7:124:2150]: {TEvModifySchemeTransaction txid# 115 TabletId# 72057594046678944} 2025-03-31T16:03:33.246687Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-31T16:03:33.249510Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table3" Columns { Name: "value" DefaultFromSequence: "/MyRoot/seq1" } } } TxId: 115 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:33.249878Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /MyRoot/Table3, pathId: , opId: 115:0, at schemeshard: 72057594046678944 2025-03-31T16:03:33.250271Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 115:1, propose status:StatusInvalidParameter, reason: Column 'value' is of type Bool but default expression is of type Int64, at schemeshard: 72057594046678944 2025-03-31T16:03:33.250518Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-31T16:03:33.253705Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 115, response: Status: StatusInvalidParameter Reason: "Column \'value\' is of type Bool but default expression is of type Int64" TxId: 115 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:33.253879Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 115, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Column 'value' is of type Bool but default expression is of type Int64, operation: ALTER TABLE, path: /MyRoot/Table3 2025-03-31T16:03:33.253991Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 115, wait until txId: 115 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SingleSnapshotByExchanger [GOOD] Test command err: 2025-03-31T16:03:31.746805Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-31T16:03:31.750296Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/0l5c/000c15/r3tmp/tmp2mtaaq/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-31T16:03:31.750906Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/0l5c/000c15/r3tmp/tmp2mtaaq/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/000c15/r3tmp/tmp2mtaaq/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 7509788434526844015 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-31T16:03:31.796973Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-03-31T16:03:31.797205Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-03-31T16:03:31.813592Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:461:2100] with ResourceBroker at [2:432:2099] 2025-03-31T16:03:31.813738Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:462:2101] 2025-03-31T16:03:31.813932Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:460:2338] with ResourceBroker at [1:431:2319] 2025-03-31T16:03:31.814000Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:463:2339] 2025-03-31T16:03:31.814158Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-03-31T16:03:31.814218Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-03-31T16:03:31.814248Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-03-31T16:03:31.814270Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-03-31T16:03:31.814421Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:31.825425Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743437011 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-31T16:03:31.825642Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:31.825735Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743437011 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-31T16:03:31.826078Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-03-31T16:03:31.826204Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-03-31T16:03:31.826323Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-03-31T16:03:31.826356Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:31.826478Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743437011 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-31T16:03:31.826675Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-03-31T16:03:31.826711Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:31.826791Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743437011 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-31T16:03:31.827564Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-03-31T16:03:31.827675Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-31T16:03:31.828117Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-31T16:03:31.828573Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-31T16:03:31.828801Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-03-31T16:03:31.828894Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-31T16:03:31.829159Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-31T16:03:31.829345Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-31T16:03:31.829543Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-31T16:03:31.833016Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-2-1 (1 by [1:460:2338]) priority=0 resources={0, 100} 2025-03-31T16:03:31.833099Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-2-1 (1 by [1:460:2338]) to queue queue_kqp_resource_manager 2025-03-31T16:03:31.833161Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-2-1 (1 by [1:460:2338]) from queue queue_kqp_resource_manager 2025-03-31T16:03:31.833218Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-2-1 (1 by [1:460:2338]) to queue queue_kqp_resource_manager 2025-03-31T16:03:31.833291Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-2-1 (1 by [1:460:2338])) 2025-03-31T16:03:31.833517Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-03-31T16:03:31.833617Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-2-1-2 (2 by [1:460:2338]) priority=0 resources={0, 100} 2025-03-31T16:03:31.833661Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-2-1-2 (2 by [1:460:2338]) to queue queue_kqp_resource_manager 2025-03-31T16:03:31.833714Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-2-1-2 (2 by [1:460:2338]) from queue queue_kqp_resource_manager 2025-03-31T16:03:31.833754Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-2-1-2 (2 by [1:460:2338]) to queue queue_kqp_resource_manager 2025-03-31T16:03:31.833815Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-2-1-2 (2 by [1:460:2338])) 2025-03-31T16:03:31.833885Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 2, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-03-31T16:03:31.834127Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:31.834283Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743437011 AvailableComputeActors: 80 UsedMemory: 200 TotalMemory: 1000 Memory { Pool: 1 Available: 800 } ExecutionUnits: 80 2025-03-31T16:03:31.834561Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-31T16:03:32.996211Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-03-31T16:03:32.996359Z node 1 :RESOURCE_BROKER DEBUG: Finish task kqp-1-2-1 (1 by [1:460:2338]) (release resources {0, 100}) 2025-03-31T16:03:32.996415Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.300000 (remove task kqp-1-2-1 (1 by [1:460:2338])) 2025-03-31T16:03:32.996449Z node 1 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.100000 2025-03-31T16:03:32.996497Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 2. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2025-03-31T16:03:32.996556Z node 1 :RESOURCE_BROKER DEBUG: Finish task kqp-2-1-2 (2 by [1:460:2338]) (release resources {0, 100}) 2025-03-31T16:03:32.996595Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.300000 to 0.100000 (remove task kqp-2-1-2 (2 by [1:460:2338])) 2025-03-31T16:03:32.996634Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 2, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2025-03-31T16:03:32.996800Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-31T16:03:32.996972Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743437012 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-31T16:03:32.997303Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-31T16:03:33.384526Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request >> TExportToS3Tests::RebootDuringCompletion [GOOD] >> TExportToS3Tests::SchemaMapping >> TSubscriberTest::Sync >> TExportToS3Tests::EncryptedExport [GOOD] >> TSubscriberCombinationsTest::CombinationsRootDomain >> TSubscriberTest::StrongNotificationAfterCommit >> BuildStatsHistogram::Ten_Crossed_Log [GOOD] >> BuildStatsHistogram::Five_Five_Mixed >> TSubscriberTest::NotifyUpdate >> TExportToS3Tests::Changefeeds [GOOD] |73.8%| [TA] $(B)/ydb/core/kqp/rm_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSubscriberTest::Sync [GOOD] >> TSchemeShardUserAttrsTest::MkDir >> TSubscriberTest::StrongNotificationAfterCommit [GOOD] >> TSchemeShardUserAttrsTest::Boot >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] >> TSubscriberTest::NotifyUpdate [GOOD] >> TExportToS3Tests::ShouldPreserveIncrBackupFlag [GOOD] >> TExportToS3Tests::ShouldExcludeBackupTableFromStats ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldSucceedOnConcurrentTxs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:03:27.422817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:03:27.422950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:27.422993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:03:27.423029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:03:27.423167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:03:27.423196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:03:27.423256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:27.423365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:03:27.423719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:27.527427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:03:27.527528Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:27.537778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:27.537939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:03:27.538161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:03:27.564464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:03:27.564734Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:03:27.565571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:27.565801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:27.568675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:27.570125Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:27.570238Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:27.570433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:03:27.570489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:27.570535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:03:27.570745Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:03:27.584851Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:03:27.733136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:27.733437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:27.733685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:03:27.733965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:03:27.734037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:27.736784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:27.736951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:03:27.737190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:27.737288Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:03:27.737332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:03:27.737373Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:03:27.740448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:27.740518Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:27.740556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:03:27.743113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:27.743185Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:27.743228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:27.743275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:03:27.747156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:27.750831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:03:27.751177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:03:27.752368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:27.752535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:27.752589Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:27.752904Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:03:27.752956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:27.753159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:03:27.753271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:03:27.755894Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:27.755951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:27.756173Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:27.756215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:03:27.756504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:27.756548Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:03:27.756664Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:27.756703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:27.756761Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:27.756800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:27.756838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:03:27.756887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:27.756934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:03:27.756965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:03:27.757037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:27.757075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:03:27.757105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:03:27.759169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:27.759312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:27.759424Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ardId: 72075186233409547 CpuTimeUsec: 257 } } 2025-03-31T16:03:33.573746Z node 4 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-03-31T16:03:33.573869Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409547, shardIdx: 72057594046678944:2, operationId: 281474976710760:0, left await: 0, at schemeshard: 72057594046678944 2025-03-31T16:03:33.573928Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 3 -> 128 2025-03-31T16:03:33.577202Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-03-31T16:03:33.577377Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-03-31T16:03:33.577444Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710760:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:33.577538Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710760 ready parts: 1/1 2025-03-31T16:03:33.577699Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409547 Flags: 2 } ExecLevel: 0 TxId: 281474976710760 MinStep: 5000006 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:33.579667Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710760:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710760 msg type: 269090816 2025-03-31T16:03:33.579795Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710760, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 281474976710760 at step: 5000006 2025-03-31T16:03:33.580106Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:33.580227Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 17179871340 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:33.580290Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710760:0 HandleReply TEvOperationPlan, stepId: 5000006, at schemeshard: 72057594046678944 2025-03-31T16:03:33.580463Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 129 2025-03-31T16:03:33.580879Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000006 2025-03-31T16:03:33.630284Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:33.630343Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710760, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-03-31T16:03:33.630645Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:33.630688Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:207:2209], at schemeshard: 72057594046678944, txId: 281474976710760, path id: 4 2025-03-31T16:03:33.631267Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-03-31T16:03:33.631334Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710760:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:33.632306Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710760 2025-03-31T16:03:33.632402Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710760 2025-03-31T16:03:33.632434Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710760 2025-03-31T16:03:33.632471Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710760, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-03-31T16:03:33.632507Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-03-31T16:03:33.632587Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 281474976710760 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:1767 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 34AD0A71-B5BD-4C2B-8150-99B33E400917 amz-sdk-request: attempt=1 content-length: 73 content-md5: fOPVvXe4lXvxEe0jvYDDBA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /metadata.json / / 73 2025-03-31T16:03:33.635906Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710760 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:1767 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 117DE472-1571-47FA-95DD-954FA915D579 amz-sdk-request: attempt=1 content-length: 465 content-md5: I8OyVo4ze5n8ehz5iBQr/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /scheme.pb / / 465 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:1767 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 042BFE34-87AB-4023-9C5C-2982301884B7 amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / / 0 2025-03-31T16:03:33.664257Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 492 RawX2: 17179871645 } Origin: 72075186233409547 State: 2 TxId: 281474976710760 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-03-31T16:03:33.664321Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710760, tablet: 72075186233409547, partId: 0 2025-03-31T16:03:33.664465Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944, message: Source { RawX1: 492 RawX2: 17179871645 } Origin: 72075186233409547 State: 2 TxId: 281474976710760 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-03-31T16:03:33.664595Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710760:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 492 RawX2: 17179871645 } Origin: 72075186233409547 State: 2 TxId: 281474976710760 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-03-31T16:03:33.664673Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710760:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:33.664727Z node 4 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-03-31T16:03:33.664771Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710760:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-03-31T16:03:33.664823Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 129 -> 240 2025-03-31T16:03:33.665018Z node 4 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 281474976710760:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:33.666843Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-03-31T16:03:33.666999Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-03-31T16:03:33.667042Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2025-03-31T16:03:33.667201Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-03-31T16:03:33.667235Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-03-31T16:03:33.667274Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-03-31T16:03:33.667303Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-03-31T16:03:33.667340Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-03-31T16:03:33.667405Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [4:123:2149] message: TxId: 281474976710760 2025-03-31T16:03:33.667446Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-03-31T16:03:33.667483Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2025-03-31T16:03:33.667515Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2025-03-31T16:03:33.667633Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-31T16:03:33.669612Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-03-31T16:03:33.669677Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2025-03-31T16:03:33.671648Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:03:33.671714Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:521:2482] TestWaitNotification: OK eventTxId 102 |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TExternalTableTest::DropExternalTable [GOOD] >> TExternalTableTest::Decimal >> TExportToS3Tests::ShouldSucceedOnConcurrentImport [GOOD] >> TExportToS3Tests::ShouldRetryAtFinalStage >> TSchemeShardUserAttrsTest::SpecialAttributes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::Sync [GOOD] Test command err: 2025-03-31T16:03:34.872259Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-31T16:03:34.875316Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2025-03-31T16:03:34.875460Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:6:2053] 2025-03-31T16:03:34.875508Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:9:2056] 2025-03-31T16:03:34.875584Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:36:2066] 2025-03-31T16:03:34.875645Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:37:2066] 2025-03-31T16:03:34.875703Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:35:2066][path] Set up state: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:03:34.875848Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:38:2066] 2025-03-31T16:03:34.875918Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:35:2066][path] Path was already updated: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:03:34.876070Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:33:2064], cookie# 1 2025-03-31T16:03:34.876191Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:36:2066], cookie# 1 2025-03-31T16:03:34.876276Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:37:2066], cookie# 1 2025-03-31T16:03:34.876325Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:38:2066], cookie# 1 2025-03-31T16:03:34.876375Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:3:2050], cookie# 1 2025-03-31T16:03:34.876420Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:6:2053], cookie# 1 2025-03-31T16:03:34.876449Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:9:2056], cookie# 1 2025-03-31T16:03:34.876510Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:36:2066], cookie# 1 2025-03-31T16:03:34.876553Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-31T16:03:34.876601Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:37:2066], cookie# 1 2025-03-31T16:03:34.876646Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-31T16:03:34.876711Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:38:2066], cookie# 1 2025-03-31T16:03:34.876744Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Unexpected sync response: sender# [1:38:2066], cookie# 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ExportIndexTablePartitioningSettings [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:03:28.289707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:03:28.289819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:28.289874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:03:28.289918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:03:28.289965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:03:28.289996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:03:28.290088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:28.290167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:03:28.290539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:28.381370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:03:28.381439Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:28.388193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:28.388340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:03:28.388484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:03:28.394679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:03:28.394933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:03:28.395854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:28.396126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:28.399567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:28.400945Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:28.401015Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:28.401215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:03:28.401273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:28.401315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:03:28.401534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.412202Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:03:28.558632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:28.558849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.559073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:03:28.559352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:03:28.559416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.562501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:28.562663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:03:28.562919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.563002Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:03:28.563065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:03:28.563128Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:03:28.574338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.574429Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:28.574476Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:03:28.588780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.588854Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.588895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:28.588964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:03:28.592846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:28.599902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:03:28.600150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:03:28.601299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:28.601468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:28.601524Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:28.601820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:03:28.601879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:28.602068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:03:28.602183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:03:28.606634Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:28.606704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:28.606956Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:28.607010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:03:28.607290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.607343Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:03:28.607467Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:28.607512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:28.607568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:28.607604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:28.607651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:03:28.607711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:28.607765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:03:28.607796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:03:28.607867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:28.607904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:03:28.607939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:03:28.610096Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:28.610250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:28.610293Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Id: 72075186233409548 CpuTimeUsec: 362 } } 2025-03-31T16:03:33.899681Z node 4 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-03-31T16:03:33.899798Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409548, shardIdx: 72057594046678944:3, operationId: 281474976710759:0, left await: 0, at schemeshard: 72057594046678944 2025-03-31T16:03:33.899851Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 3 -> 128 2025-03-31T16:03:33.912121Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-31T16:03:33.912342Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-31T16:03:33.912400Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:33.912487Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710759 ready parts: 1/1 2025-03-31T16:03:33.912642Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409548 Flags: 2 } ExecLevel: 0 TxId: 281474976710759 MinStep: 5000005 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:33.918060Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710759 msg type: 269090816 2025-03-31T16:03:33.918181Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710759 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710759 at step: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72075186233409548 for txId: 281474976710759 at step: 5000005 2025-03-31T16:03:33.918455Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:33.918529Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710759 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 17179871340 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:33.918577Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 HandleReply TEvOperationPlan, stepId: 5000005, at schemeshard: 72057594046678944 2025-03-31T16:03:33.918694Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 128 -> 129 2025-03-31T16:03:33.918803Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:19444 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 6D68C4D4-94D2-4B70-ACDF-8EFC7A331C87 amz-sdk-request: attempt=1 content-length: 73 content-md5: q/ySd5GvS6I/qOVxS/4Thg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /metadata.json / / 73 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000005 2025-03-31T16:03:33.986615Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:33.986672Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 6] 2025-03-31T16:03:33.986944Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:33.987007Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:207:2209], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 6 2025-03-31T16:03:33.987574Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-31T16:03:33.987658Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 281474976710759 2025-03-31T16:03:33.988665Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-03-31T16:03:33.988766Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-03-31T16:03:33.988798Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710759 2025-03-31T16:03:33.988834Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 3 2025-03-31T16:03:33.988872Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2025-03-31T16:03:33.988978Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 0/1, is published: true REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:19444 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 6AAE78B5-5D31-437B-B952-66D6C76BD055 amz-sdk-request: attempt=1 content-length: 602 content-md5: GgrERoUcI3sF1n0Je2MTCQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /scheme.pb / / 602 2025-03-31T16:03:33.992990Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:19444 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 36D54422-792D-41AF-B694-7BE9D598F4D0 amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / / 0 2025-03-31T16:03:34.015758Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 507 RawX2: 17179871649 } Origin: 72075186233409548 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-03-31T16:03:34.015812Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409548, partId: 0 2025-03-31T16:03:34.015987Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 507 RawX2: 17179871649 } Origin: 72075186233409548 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-03-31T16:03:34.016093Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 507 RawX2: 17179871649 } Origin: 72075186233409548 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-03-31T16:03:34.016164Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:34.016199Z node 4 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-31T16:03:34.016241Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-03-31T16:03:34.016288Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2025-03-31T16:03:34.016449Z node 4 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:34.018377Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-31T16:03:34.018722Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-31T16:03:34.018763Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2025-03-31T16:03:34.018869Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-03-31T16:03:34.018895Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-03-31T16:03:34.018940Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-03-31T16:03:34.018968Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-03-31T16:03:34.019020Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2025-03-31T16:03:34.019113Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [4:123:2149] message: TxId: 281474976710759 2025-03-31T16:03:34.019157Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-03-31T16:03:34.019187Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2025-03-31T16:03:34.019209Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2025-03-31T16:03:34.019320Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-03-31T16:03:34.022205Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-03-31T16:03:34.022276Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 2025-03-31T16:03:34.024317Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:03:34.024378Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:538:2488] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::StrongNotificationAfterCommit [GOOD] Test command err: 2025-03-31T16:03:35.092304Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-31T16:03:35.094610Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-03-31T16:03:35.094730Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-03-31T16:03:35.094779Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-03-31T16:03:35.094897Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2065] 2025-03-31T16:03:35.094989Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:36:2065] 2025-03-31T16:03:35.095046Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:34:2065][path] Set up state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:03:35.095116Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2065] 2025-03-31T16:03:35.095153Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:03:35.095596Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-03-31T16:03:35.095669Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2065] 2025-03-31T16:03:35.095736Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:34:2065][path] Update to strong state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:03:35.095903Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-03-31T16:03:35.095957Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:36:2065] 2025-03-31T16:03:35.095994Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::NotifyUpdate [GOOD] Test command err: 2025-03-31T16:03:35.167609Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-31T16:03:35.169859Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-03-31T16:03:35.170001Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-03-31T16:03:35.170050Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-03-31T16:03:35.170129Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2065] 2025-03-31T16:03:35.170217Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:36:2065] 2025-03-31T16:03:35.170276Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:34:2065][path] Set up state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:03:35.170332Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2065] 2025-03-31T16:03:35.170369Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:03:35.171705Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2025-03-31T16:03:35.171816Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:35:2065] 2025-03-31T16:03:35.171883Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:34:2065][path] Update to strong state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ChecksumsWithCompression [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:03:28.339672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:03:28.339759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:28.339809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:03:28.339843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:03:28.339883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:03:28.339913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:03:28.339967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:28.340036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:03:28.340342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:28.424406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:03:28.424462Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:28.429419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:28.429517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:03:28.429642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:03:28.433449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:03:28.433621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:03:28.434261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:28.434449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:28.436318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:28.437241Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:28.437286Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:28.437407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:03:28.437453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:28.437484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:03:28.437629Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.445554Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:03:28.611584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:28.611815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.612015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:03:28.612261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:03:28.612334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.620097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:28.620249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:03:28.620518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.620582Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:03:28.620620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:03:28.620668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:03:28.631861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.631937Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:28.631976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:03:28.634020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.634072Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.634121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:28.634203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:03:28.638218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:28.640363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:03:28.640571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:03:28.641743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:28.641911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:28.641961Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:28.642241Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:03:28.642295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:28.642463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:03:28.642565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:03:28.644860Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:28.644902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:28.645045Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:28.645073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:03:28.645253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.645288Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:03:28.645366Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:28.645413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:28.645469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:28.645511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:28.645562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:03:28.645614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:28.645660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:03:28.645695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:03:28.645756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:28.645791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:03:28.645824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:03:28.647511Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:28.647629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:28.647671Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... el: 0 TxId: 281474976710759 MinStep: 5000005 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:33.970797Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710759 msg type: 269090816 2025-03-31T16:03:33.970880Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710759 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710759 at step: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 281474976710759 at step: 5000005 2025-03-31T16:03:33.971177Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:33.971265Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710759 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 17179871340 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:33.971315Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 HandleReply TEvOperationPlan, stepId: 5000005, at schemeshard: 72057594046678944 2025-03-31T16:03:33.971494Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 128 -> 129 2025-03-31T16:03:33.971612Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-31T16:03:34.033100Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:34.033153Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-03-31T16:03:34.033383Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:34.033439Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:207:2209], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 4 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000005 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:29313 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B285E33E-085F-4F32-9210-A90842717C4C amz-sdk-request: attempt=1 content-length: 73 content-md5: a9Su4FHJt26Hhw4HV0+Ocg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /metadata.json / / 73 2025-03-31T16:03:34.035298Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-31T16:03:34.035368Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:34.036321Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-03-31T16:03:34.036407Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-03-31T16:03:34.036436Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710759 2025-03-31T16:03:34.036469Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-03-31T16:03:34.036502Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-03-31T16:03:34.036576Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 281474976710759 REQUEST: PUT /metadata.json.sha256 HTTP/1.1 HEADERS: Host: localhost:29313 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 82C659A8-A118-42F2-AD2B-8A0E121580CC amz-sdk-request: attempt=1 content-length: 78 content-md5: 5v+lOCwt7SV92xRPjSiuqQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /metadata.json.sha256 / / 78 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:29313 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: FFCA9006-39A4-4605-9936-B8070C42801D amz-sdk-request: attempt=1 content-length: 355 content-md5: 4DhJNWgTpoG3PVvZ0uCHUA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /scheme.pb / / 355 2025-03-31T16:03:34.047952Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 REQUEST: PUT /scheme.pb.sha256 HTTP/1.1 HEADERS: Host: localhost:29313 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 6BF74687-2203-4B22-857A-BD569A4787F8 amz-sdk-request: attempt=1 content-length: 74 content-md5: NWNhlq1fHKxcSj+x5Xq9NQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /scheme.pb.sha256 / / 74 REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:29313 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 7106846B-F724-459B-8A55-B60B8933C2BD amz-sdk-request: attempt=1 content-length: 27 content-md5: CTqKvdXJPw0OgRdlsoR71Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 27 REQUEST: PUT /data_00.csv.sha256 HTTP/1.1 HEADERS: Host: localhost:29313 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A19FA339-9F25-473D-AC13-FAAD87351D30 amz-sdk-request: attempt=1 content-length: 76 content-md5: gmOXObjloPe2DGxtDsgfpg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.sha256 / / 76 2025-03-31T16:03:34.069763Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 448 RawX2: 17179871601 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-03-31T16:03:34.069812Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2025-03-31T16:03:34.069938Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 448 RawX2: 17179871601 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-03-31T16:03:34.070040Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 448 RawX2: 17179871601 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-03-31T16:03:34.070154Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:34.070195Z node 4 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-31T16:03:34.070237Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-03-31T16:03:34.070279Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2025-03-31T16:03:34.070429Z node 4 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:34.072193Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-31T16:03:34.072336Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-31T16:03:34.072377Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2025-03-31T16:03:34.072496Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-03-31T16:03:34.072524Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-03-31T16:03:34.072566Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-03-31T16:03:34.072608Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-03-31T16:03:34.072644Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2025-03-31T16:03:34.072714Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [4:123:2149] message: TxId: 281474976710759 2025-03-31T16:03:34.072774Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-03-31T16:03:34.072803Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2025-03-31T16:03:34.072830Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2025-03-31T16:03:34.072926Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-31T16:03:34.074777Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-03-31T16:03:34.074831Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 2025-03-31T16:03:34.076584Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:03:34.076665Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:477:2438] TestWaitNotification: OK eventTxId 102 |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-03-31T16:03:34.772723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:03:34.772822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:34.772865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:03:34.772904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:03:34.772969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:03:34.773001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:03:34.773080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:34.773176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:03:34.773543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:34.864697Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-31T16:03:34.864769Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:34.888810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:34.889155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:03:34.889361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:03:34.899142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:03:34.899390Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:03:34.900137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:34.900894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:34.907923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:34.909523Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:34.909623Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:34.909744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:03:34.909799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:34.909845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:03:34.910095Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:03:34.919546Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:240:2058] recipient: [1:15:2062] 2025-03-31T16:03:35.056772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:35.057079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:35.057347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:03:35.057588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:03:35.057650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:35.060546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:35.060708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:03:35.060927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:35.060987Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:03:35.061025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:03:35.061080Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:03:35.063557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:35.063635Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:35.063679Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:03:35.066074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:35.066166Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:35.066237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:35.066312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:03:35.070399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:35.075668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:03:35.075905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:03:35.077015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:35.077165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:35.077225Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:35.077543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:03:35.077619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:35.077825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:03:35.077909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:03:35.081829Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:35.081896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:35.082099Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:35.082146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:03:35.082617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:35.082671Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:03:35.082783Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:35.082839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:35.082888Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:35.082923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:35.082962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:03:35.083027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:35.083066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:03:35.083118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:03:35.083205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:35.083242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:03:35.083276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:03:35.085377Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... LocalPathId: 3] was 2 2025-03-31T16:03:35.176548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:03:35.177612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:03:35.178707Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:35.178746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:35.178915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-31T16:03:35.179019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-31T16:03:35.179142Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:35.179178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-03-31T16:03:35.179211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 102, path id: 3 2025-03-31T16:03:35.179239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 102, path id: 3 FAKE_COORDINATOR: Erasing txId 102 2025-03-31T16:03:35.179565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:03:35.179611Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-31T16:03:35.179710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:03:35.179743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:03:35.179800Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:03:35.179832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:03:35.179890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-03-31T16:03:35.179974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:03:35.180008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-31T16:03:35.180038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-31T16:03:35.180107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-31T16:03:35.180144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-03-31T16:03:35.180176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2025-03-31T16:03:35.180204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-03-31T16:03:35.181002Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:03:35.181090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:03:35.181122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:03:35.181167Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-03-31T16:03:35.181210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-31T16:03:35.182165Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:03:35.182240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:03:35.182269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:03:35.182295Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-03-31T16:03:35.182323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T16:03:35.182384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-03-31T16:03:35.187400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:03:35.188505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-31T16:03:35.188778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-31T16:03:35.188827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-31T16:03:35.189306Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-31T16:03:35.189418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:03:35.189456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:329:2320] TestWaitNotification: OK eventTxId 102 2025-03-31T16:03:35.190159Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:03:35.190367Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 229us result status StatusSuccess 2025-03-31T16:03:35.190881Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-03-31T16:03:35.194779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "UniqueName" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:35.195189Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 103:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "UniqueName" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } 2025-03-31T16:03:35.195307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TAlterExternalTable Propose: opId# 103:0, path# /MyRoot/UniqueName, ReplaceIfExists: 1 2025-03-31T16:03:35.195450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable, at schemeshard: 72057594046678944 2025-03-31T16:03:35.199466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/UniqueName\', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable" TxId: 103 SchemeshardId: 72057594046678944 PathId: 2 PathCreateTxId: 101, at schemeshard: 72057594046678944 2025-03-31T16:03:35.199847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable, operation: CREATE EXTERNAL TABLE, path: /MyRoot/UniqueName TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-03-31T16:03:35.200244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-03-31T16:03:35.200285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-03-31T16:03:35.200798Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-31T16:03:35.200894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-31T16:03:35.200943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:337:2328] TestWaitNotification: OK eventTxId 103 >> TExportToS3Tests::AuditCompletedExport [GOOD] >> TExportToS3Tests::AuditCancelledExport ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::EncryptedExport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:03:27.867326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:03:27.867419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:27.867471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:03:27.867505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:03:27.867553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:03:27.867582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:03:27.867641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:27.867713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:03:27.868030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:27.957856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:03:27.957923Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:27.962539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:27.962675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:03:27.962856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:03:27.967810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:03:27.968013Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:03:27.968631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:27.968846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:27.970757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:27.971940Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:27.972001Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:27.972190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:03:27.972247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:27.972287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:03:27.972463Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:03:27.979686Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:03:28.133936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:28.134133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.134312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:03:28.134573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:03:28.134651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.137264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:28.137375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:03:28.137537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.137578Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:03:28.137605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:03:28.137635Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:03:28.139695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.139778Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:28.139814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:03:28.141733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.141796Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.141842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:28.141913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:03:28.145700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:28.147834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:03:28.148040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:03:28.149077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:28.149214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:28.149264Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:28.149533Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:03:28.149593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:28.149775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:03:28.149864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:03:28.152765Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:28.152830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:28.153030Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:28.153072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:03:28.153304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.153350Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:03:28.153442Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:28.153503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:28.153559Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:28.153606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:28.153644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:03:28.153683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:28.153748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:03:28.153784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:03:28.153856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:28.153897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:03:28.153929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:03:28.155972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:28.156215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:28.156252Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 4046678944 2025-03-31T16:03:34.384160Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976710763:0, at schemeshard: 72057594046678944 2025-03-31T16:03:34.384271Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710763 ready parts: 1/1 2025-03-31T16:03:34.384581Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710763 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:34.385511Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-03-31T16:03:34.385633Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-03-31T16:03:34.385672Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-03-31T16:03:34.385721Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2025-03-31T16:03:34.385766Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-03-31T16:03:34.386864Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-03-31T16:03:34.386955Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-03-31T16:03:34.387001Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-03-31T16:03:34.387036Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 11 2025-03-31T16:03:34.387073Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-03-31T16:03:34.387271Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-03-31T16:03:34.390596Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-03-31T16:03:34.392732Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710763, at schemeshard: 72057594046678944 2025-03-31T16:03:34.392815Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-03-31T16:03:34.392876Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710763, at schemeshard: 72057594046678944 2025-03-31T16:03:34.393890Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710763:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710763 msg type: 269090816 2025-03-31T16:03:34.394046Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710763, partId: 4294967295, tablet: 72057594046316545 2025-03-31T16:03:34.394580Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 FAKE_COORDINATOR: Add transaction: 281474976710763 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710763 at step: 5000010 2025-03-31T16:03:34.395227Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000010, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:34.395373Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710763 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 17179871340 } } Step: 5000010 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:34.395439Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710763:0, step: 5000010, at schemeshard: 72057594046678944 2025-03-31T16:03:34.395607Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710763:0, at schemeshard: 72057594046678944 2025-03-31T16:03:34.395691Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2025-03-31T16:03:34.395745Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-03-31T16:03:34.395798Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2025-03-31T16:03:34.395848Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-03-31T16:03:34.395948Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-31T16:03:34.396047Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-31T16:03:34.396098Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: false 2025-03-31T16:03:34.396173Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-03-31T16:03:34.396228Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710763:0 2025-03-31T16:03:34.396271Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710763:0 2025-03-31T16:03:34.396355Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-03-31T16:03:34.396406Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710763, publications: 2, subscribers: 1 2025-03-31T16:03:34.396457Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2025-03-31T16:03:34.396514Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-03-31T16:03:34.398186Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-03-31T16:03:34.401605Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:34.401668Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:34.401864Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-03-31T16:03:34.402029Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:34.402075Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:207:2209], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 1 2025-03-31T16:03:34.402117Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:207:2209], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710763 2025-03-31T16:03:34.403071Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-03-31T16:03:34.403461Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-03-31T16:03:34.403510Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710763 2025-03-31T16:03:34.403567Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2025-03-31T16:03:34.403640Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-03-31T16:03:34.404203Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-03-31T16:03:34.404305Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-03-31T16:03:34.404349Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710763 2025-03-31T16:03:34.404381Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-03-31T16:03:34.404411Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-31T16:03:34.404511Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710763, subscribers: 1 2025-03-31T16:03:34.404578Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:123:2149] 2025-03-31T16:03:34.409793Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-03-31T16:03:34.410138Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-03-31T16:03:34.410232Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-03-31T16:03:34.410328Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710763 2025-03-31T16:03:34.410387Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-03-31T16:03:34.410433Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-03-31T16:03:34.410487Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763, id# 103, itemIdx# 4294967295 2025-03-31T16:03:34.412992Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-03-31T16:03:34.413127Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-31T16:03:34.413191Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [4:1132:3010] TestWaitNotification: OK eventTxId 103 >> TExportToS3Tests::SchemaMapping [GOOD] >> TExportToS3Tests::SchemaMappingEncryption >> TSchemeShardUserAttrsTest::Boot [GOOD] >> TExternalTableTest::Decimal [GOOD] |73.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |73.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |73.9%| [TA] {RESULT} $(B)/ydb/core/backup/impl/ut_local_partition_reader/test-results/unittest/{meta.json ... results_accumulator.log} |73.9%| [TA] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardUserAttrsTest::MkDir [GOOD] |73.9%| [TA] {RESULT} $(B)/ydb/core/kqp/rm_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |73.9%| [LD] {RESULT} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive >> TSchemeShardUserAttrsTest::SpecialAttributes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::Changefeeds [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:03:28.191904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:03:28.192023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:28.192094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:03:28.192147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:03:28.192228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:03:28.192257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:03:28.192345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:28.192438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:03:28.192817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:28.285123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:03:28.285194Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:28.290390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:28.290529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:03:28.290692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:03:28.297667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:03:28.297945Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:03:28.298876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:28.299214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:28.302013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:28.303510Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:28.303593Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:28.303771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:03:28.303812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:28.303846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:03:28.304013Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.324429Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:03:28.494482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:28.494809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.495170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:03:28.495460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:03:28.495522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.498506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:28.498689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:03:28.498940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.499010Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:03:28.499061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:03:28.499127Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:03:28.501474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.501537Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:28.501578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:03:28.503613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.503677Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.503723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:28.503794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:03:28.507987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:28.510565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:03:28.510853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:03:28.512151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:28.512309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:28.512374Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:28.512701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:03:28.512761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:28.512975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:03:28.513090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:03:28.517773Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:28.517855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:28.518078Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:28.518159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:03:28.518462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.518526Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:03:28.518650Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:28.518702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:28.518787Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:28.518832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:28.518876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:03:28.518948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:28.519023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:03:28.519067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:03:28.520206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:28.520295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:03:28.520344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:03:28.522772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:28.522927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:28.522988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 7594046678944 2025-03-31T16:03:34.797968Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-03-31T16:03:34.798023Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710761 ready parts: 1/1 2025-03-31T16:03:34.798172Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710761 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:34.798757Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-03-31T16:03:34.798852Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-03-31T16:03:34.798880Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2025-03-31T16:03:34.798910Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-03-31T16:03:34.798940Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-31T16:03:34.799617Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-03-31T16:03:34.799698Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-03-31T16:03:34.799738Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2025-03-31T16:03:34.799767Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 9], version: 7 2025-03-31T16:03:34.799793Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 3 2025-03-31T16:03:34.799849Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-03-31T16:03:34.803116Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-03-31T16:03:34.803847Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2025-03-31T16:03:34.803901Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-03-31T16:03:34.803949Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2025-03-31T16:03:34.804076Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2025-03-31T16:03:34.804183Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000010 2025-03-31T16:03:34.804669Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000010, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:34.804776Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 17179871340 } } Step: 5000010 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:34.804825Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000010, at schemeshard: 72057594046678944 2025-03-31T16:03:34.804953Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-03-31T16:03:34.805047Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-03-31T16:03:34.805085Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-03-31T16:03:34.805128Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-03-31T16:03:34.805187Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-03-31T16:03:34.805249Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:34.805319Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 2 2025-03-31T16:03:34.805353Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2025-03-31T16:03:34.805430Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-03-31T16:03:34.805471Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2025-03-31T16:03:34.805507Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2025-03-31T16:03:34.805570Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 3 2025-03-31T16:03:34.805612Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2025-03-31T16:03:34.805647Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 12 2025-03-31T16:03:34.805679Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 9], 18446744073709551615 2025-03-31T16:03:34.806310Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-03-31T16:03:34.806411Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-03-31T16:03:34.809476Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:34.809517Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:34.809710Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 9] 2025-03-31T16:03:34.809830Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:34.809860Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:207:2209], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-03-31T16:03:34.809912Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:207:2209], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 9 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-03-31T16:03:34.810693Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-03-31T16:03:34.810789Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-03-31T16:03:34.810826Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2025-03-31T16:03:34.810899Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2025-03-31T16:03:34.810958Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-31T16:03:34.811579Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-03-31T16:03:34.811651Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-03-31T16:03:34.811679Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-03-31T16:03:34.811705Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 9], version: 18446744073709551615 2025-03-31T16:03:34.811731Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 2 2025-03-31T16:03:34.811804Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-03-31T16:03:34.811875Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:123:2149] 2025-03-31T16:03:34.816008Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-03-31T16:03:34.820048Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-03-31T16:03:34.820146Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-03-31T16:03:34.820199Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-03-31T16:03:34.820243Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-03-31T16:03:34.820280Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-03-31T16:03:34.820308Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 105, itemIdx# 4294967295 2025-03-31T16:03:34.822092Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-03-31T16:03:34.822176Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-03-31T16:03:34.822223Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [4:1390:3180] TestWaitNotification: OK eventTxId 105 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::Boot [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:03:36.170217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:03:36.170322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:36.170366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:03:36.170400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:03:36.170440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:03:36.170466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:03:36.170539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:36.170607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:03:36.170923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:36.251576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:03:36.251646Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:36.257108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:36.257225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:03:36.257389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:03:36.262947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:03:36.263265Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:03:36.263943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:36.264216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:36.266621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:36.267881Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:36.267953Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:36.268139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:03:36.268202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:36.268244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:03:36.268405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:03:36.275948Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:03:36.405154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:36.405422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:36.405635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:03:36.405906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:03:36.405962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:36.409275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:36.409420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:03:36.409632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:36.409700Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:03:36.409732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:03:36.409770Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:03:36.416977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:36.417073Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:36.417152Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:03:36.419791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:36.419847Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:36.419885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:36.419943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:03:36.422351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:36.424149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:03:36.424301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:03:36.425025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:36.425114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:36.425158Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:36.425342Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:03:36.425376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:36.425487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:03:36.425539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:03:36.427111Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:36.427176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:36.427330Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:36.427366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:03:36.427526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:36.427557Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:03:36.427636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:36.427659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:36.427687Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:36.427716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:36.427744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:03:36.427777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:36.427801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:03:36.427833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:03:36.427878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:36.427907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:03:36.427939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:03:36.429408Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:36.429480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:36.429515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-31T16:03:36.429566Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-31T16:03:36.429595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:03:36.429667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-31T16:03:36.431986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-31T16:03:36.432470Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::Decimal [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:111:2142] 2025-03-31T16:03:34.996577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:03:34.996680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:34.996722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:03:34.996754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:03:34.996808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:03:34.996842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:03:34.996918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:34.997018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:03:34.997318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:35.088232Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-31T16:03:35.088306Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:35.094276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:35.094488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:03:35.094685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:03:35.106914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:03:35.107206Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:03:35.107878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:35.108094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:35.113019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:35.113935Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:35.113992Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:35.114205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:03:35.114244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:35.114279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:03:35.114468Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:03:35.123554Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:238:2058] recipient: [1:15:2062] 2025-03-31T16:03:35.337535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:35.337810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:35.338047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:03:35.338344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:03:35.338456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:35.353689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:35.353854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:03:35.354083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:35.354158Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:03:35.354192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:03:35.354228Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:03:35.360511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:35.360585Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:35.360624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:03:35.362916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:35.362979Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:35.363046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:35.364663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:03:35.368809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:35.376326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:03:35.376561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:03:35.377727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:35.377872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969446 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:35.377927Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:35.378231Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:03:35.378285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:35.378553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:03:35.378645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:35.381271Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:35.381327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:35.381529Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:35.381567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:03:35.381838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:35.381887Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:03:35.381988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:35.382038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:35.382094Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:35.382127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:35.382165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:03:35.382394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:35.382432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:03:35.382464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:03:35.382533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:35.382573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:03:35.382606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:03:35.398320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... TOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000003 2025-03-31T16:03:36.361821Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:36.361929Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 8589936748 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:36.361989Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateExternalTable TPropose, operationId: 101:0 HandleReply TEvOperationPlan: step# 5000003 2025-03-31T16:03:36.362100Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-03-31T16:03:36.362257Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:36.362312Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T16:03:36.362351Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T16:03:36.363282Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T16:03:36.364952Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2025-03-31T16:03:36.366106Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:36.366144Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:36.366252Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-31T16:03:36.366313Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-31T16:03:36.366353Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:03:36.366425Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:36.366446Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-03-31T16:03:36.366470Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 3 2025-03-31T16:03:36.366486Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 3 2025-03-31T16:03:36.366501Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-03-31T16:03:36.366740Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:03:36.366769Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-31T16:03:36.366856Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T16:03:36.366892Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:03:36.366928Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T16:03:36.366956Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:03:36.366985Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-03-31T16:03:36.367035Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:03:36.367064Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-31T16:03:36.367112Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-31T16:03:36.367176Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-31T16:03:36.367208Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:03:36.367234Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 3, subscribers: 0 2025-03-31T16:03:36.367278Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-03-31T16:03:36.367307Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-03-31T16:03:36.367328Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-03-31T16:03:36.368194Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:03:36.368265Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:03:36.368293Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 101 2025-03-31T16:03:36.368348Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-03-31T16:03:36.368381Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-31T16:03:36.369450Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:03:36.369503Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:03:36.369522Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-03-31T16:03:36.369554Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-03-31T16:03:36.369578Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T16:03:36.370099Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:03:36.370151Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:03:36.370175Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-31T16:03:36.370197Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-03-31T16:03:36.370215Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T16:03:36.370267Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-03-31T16:03:36.372336Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T16:03:36.372439Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T16:03:36.373291Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-31T16:03:36.373436Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-31T16:03:36.373486Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-31T16:03:36.373778Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-31T16:03:36.373844Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T16:03:36.373870Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:335:2326] TestWaitNotification: OK eventTxId 101 2025-03-31T16:03:36.374168Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:03:36.374310Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 160us result status StatusSuccess 2025-03-31T16:03:36.374523Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Decimal(35,9)" TypeId: 4865 Id: 1 NotNull: false TypeInfo { DecimalPrecision: 35 DecimalScale: 9 } } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::MkDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:03:35.982780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:03:35.982882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:35.982924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:03:35.982958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:03:35.983002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:03:35.983052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:03:35.984293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:35.984389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:03:35.984749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:36.093473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:03:36.093558Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:36.112678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:36.112810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:03:36.112995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:03:36.140046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:03:36.140360Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:03:36.141224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:36.141530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:36.150643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:36.152215Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:36.152301Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:36.152506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:03:36.152562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:36.152610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:03:36.152779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:03:36.163049Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:03:36.346968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:36.347322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:36.347580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:03:36.347840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:03:36.347903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:36.352497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:36.352657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:03:36.352889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:36.352966Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:03:36.353006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:03:36.353047Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:03:36.355825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:36.355919Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:36.355997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:03:36.360171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:36.360265Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:36.360341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:36.360404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:03:36.368039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:36.372565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:03:36.372835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:03:36.373962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:36.374134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:36.374210Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:36.374520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:03:36.374575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:36.374765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:03:36.374858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:03:36.379854Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:36.379935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:36.380157Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:36.380209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:03:36.380454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:36.380504Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:03:36.380615Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:36.380668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:36.380710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:36.380741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:36.380784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:03:36.380826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:36.380866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:03:36.380917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:03:36.381019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:36.381061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:03:36.381097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:03:36.383446Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:36.383597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:36.383650Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... .525738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-03-31T16:03:36.526285Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-03-31T16:03:36.526438Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-31T16:03:36.526473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-03-31T16:03:36.526516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:378:2369] 2025-03-31T16:03:36.526701Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-31T16:03:36.526752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T16:03:36.526777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:378:2369] 2025-03-31T16:03:36.526881Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-31T16:03:36.526908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:03:36.526922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:378:2369] 2025-03-31T16:03:36.526991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-31T16:03:36.527006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:378:2369] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-03-31T16:03:36.527676Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:03:36.527908Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 259us result status StatusSuccess 2025-03-31T16:03:36.528443Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:36.528925Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:03:36.529082Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 140us result status StatusSuccess 2025-03-31T16:03:36.529320Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 } ChildrenExist: true } Children { Name: "SubDirA" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrA1" Value: "ValA1" } UserAttributes { Key: "AttrA2" Value: "ValA2" } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:36.529745Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:03:36.529858Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 115us result status StatusSuccess 2025-03-31T16:03:36.530067Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrB1" Value: "ValB1" } UserAttributes { Key: "AttrB2" Value: "ValB2" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:36.530497Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/SubDirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:03:36.530598Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/SubDirA" took 110us result status StatusSuccess 2025-03-31T16:03:36.530830Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/SubDirA" PathDescription { Self { Name: "SubDirA" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "DirB" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000005 ParentPathId: 4 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrAA1" Value: "ValAA1" } UserAttributes { Key: "AttrAA2" Value: "ValAA2" } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:36.531479Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/SubDirA/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:03:36.531634Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/SubDirA/DirB" took 155us result status StatusSuccess 2025-03-31T16:03:36.531844Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/SubDirA/DirB" PathDescription { Self { Name: "DirB" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000005 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrAB1" Value: "ValAB1" } UserAttributes { Key: "AttrAB2" Value: "ValAB2" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::SpecialAttributes [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:03:36.393410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:03:36.393519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:36.393568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:03:36.393610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:03:36.393657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:03:36.393719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:03:36.393800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:36.393901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:03:36.394280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:36.490175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:03:36.490254Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:36.496634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:36.496734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:03:36.496896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:03:36.507124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:03:36.507472Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:03:36.508143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:36.508348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:36.510161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:36.511177Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:36.511242Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:36.511433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:03:36.511486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:36.511531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:03:36.511685Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:03:36.518271Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:03:36.629012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:36.629254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:36.629472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:03:36.629770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:03:36.629841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:36.632382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:36.632500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:03:36.632657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:36.632715Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:03:36.632747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:03:36.632774Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:03:36.634666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:36.634711Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:36.634748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:03:36.636498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:36.636548Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:36.636598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:36.636660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:03:36.639486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:36.641227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:03:36.641366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:03:36.642178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:36.642315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:36.642373Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:36.642687Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:03:36.642743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:36.642913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:03:36.642986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:03:36.645125Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:36.645172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:36.645335Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:36.645370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:03:36.645610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:36.645650Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:03:36.645751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:36.645778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:36.645804Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:36.645825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:36.645852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:03:36.645880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:36.645911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:03:36.645946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:03:36.646002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:36.646029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:03:36.646063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:03:36.647843Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:36.647980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:36.648027Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 3-31T16:03:36.686679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-03-31T16:03:36.686711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-31T16:03:36.686875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:03:36.686907Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 102:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:36.686950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-03-31T16:03:36.687075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:36.687705Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:03:36.687810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:03:36.687833Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:03:36.687878Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-03-31T16:03:36.687925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:36.688566Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:03:36.688616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:03:36.688632Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:03:36.688650Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-03-31T16:03:36.688667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T16:03:36.688712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-03-31T16:03:36.691209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-03-31T16:03:36.691354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000002 2025-03-31T16:03:36.691719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:36.691848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:36.691900Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 102:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002, at schemeshard: 72057594046678944 2025-03-31T16:03:36.692044Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-03-31T16:03:36.692224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:03:36.692284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T16:03:36.693400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:03:36.693655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-03-31T16:03:36.694566Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:36.694600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:36.694719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:03:36.694801Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:36.694828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-03-31T16:03:36.694880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-31T16:03:36.695005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:03:36.695051Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-31T16:03:36.695158Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:03:36.695187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:03:36.695214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:03:36.695235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:03:36.695261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-03-31T16:03:36.695299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:03:36.695341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-31T16:03:36.695366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-31T16:03:36.695421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T16:03:36.695449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-03-31T16:03:36.695472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-03-31T16:03:36.695496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-03-31T16:03:36.696125Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:03:36.696195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:03:36.696221Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:03:36.696247Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-31T16:03:36.696277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:36.696850Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:03:36.696914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:03:36.696934Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:03:36.696951Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-31T16:03:36.696972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T16:03:36.697012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-03-31T16:03:36.699257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:03:36.700282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 2025-03-31T16:03:36.702452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: "DirD" } AlterUserAttributes { UserAttributes { Key: "__extra_path_symbols_allowed" Value: "./_" } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:36.702664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/DirD, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:03:36.702748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: UserAttributes: attribute '__extra_path_symbols_allowed' has invalid value './_', forbidden symbols are found, at schemeshard: 72057594046678944 2025-03-31T16:03:36.708160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "UserAttributes: attribute \'__extra_path_symbols_allowed\' has invalid value \'./_\', forbidden symbols are found" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:36.708382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: UserAttributes: attribute '__extra_path_symbols_allowed' has invalid value './_', forbidden symbols are found, operation: CREATE DIRECTORY, path: /MyRoot/DirD TestModificationResult got TxId: 103, wait until txId: 103 >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest |74.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> TExportToS3Tests::AuditCancelledExport [GOOD] |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupJoin+StreamLookupJoin |74.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_user_attributes/test-results/unittest/{meta.json ... results_accumulator.log} |74.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_external_table/test-results/unittest/{meta.json ... results_accumulator.log} >> TBackupTests::ShouldSucceedOnLargeData[Zstd] >> KqpCost::OlapPointLookup >> DataShardVolatile::VolatileCommitOnBlobStorageFailure-UseSink [GOOD] >> DataShardVolatile::VolatileTxAbortedOnSplit |74.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |74.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] >> TExportToS3Tests::SchemaMappingEncryption [GOOD] >> TExportToS3Tests::SchemaMappingEncryptionIncorrectKey >> TExecutorDb::CoordinatorSimulation [GOOD] >> TBackupTests::ShouldSucceedOnLargeData[Raw] >> TBackupTests::BackupUuidColumn[Raw] |74.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium >> TExecutorDb::RandomCoordinatorSimulation |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest |74.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] >> KqpCost::PointLookup >> TExportToS3Tests::CancelUponTransferringMultiShardTableShouldSucceed [GOOD] |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OltpWriteRow-isSink >> TExportToS3Tests::CancelUponTransferringSingleTableShouldSucceed [GOOD] >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] [GOOD] >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices [GOOD] >> KqpCost::IndexLookupAndTake-useSink >> KqpCost::ScanQueryRangeFullScan-SourceRead >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices_Sticky |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |74.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |74.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/test-results/unittest/{meta.json ... results_accumulator.log} |74.0%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |74.0%| [LD] {RESULT} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::AuditCancelledExport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:111:2142] 2025-03-31T16:03:27.747342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:03:27.747454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:27.747500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:03:27.747538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:03:27.747589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:03:27.747636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:03:27.747730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:27.747828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:03:27.748168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:27.833103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:03:27.833163Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:27.853425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:27.853934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:03:27.854116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:03:27.890577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:03:27.890923Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:03:27.891675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:27.891939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:27.904848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:27.906471Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:27.906549Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:27.906685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:03:27.906734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:27.906784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:03:27.907031Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:03:27.925287Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:240:2058] recipient: [1:15:2062] 2025-03-31T16:03:28.091455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:28.091759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.092010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:03:28.092271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:03:28.092334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.095501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:28.095692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:03:28.096027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.096097Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:03:28.096132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:03:28.096173Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:03:28.098503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.098567Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:28.098607Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:03:28.100910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.100969Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.101015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:28.101101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:03:28.104989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:28.107252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:03:28.107519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:03:28.108694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:28.108865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969446 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:28.108937Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:28.109246Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:03:28.109322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:28.109524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:03:28.109608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:28.112108Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:28.112191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:28.112435Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:28.112496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:03:28.113209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.113289Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:03:28.113397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:28.113434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:28.113475Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:28.113512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:28.113553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:03:28.113620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:28.113660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:03:28.113693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:03:28.113765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:28.113830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:03:28.113884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:03:28.115987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:28.116140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:28.116185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... es: 0 S3Settings { Endpoint: "localhost:5872" Scheme: HTTP Bucket: "" ObjectKeyPattern: "" AccessKey: "" SecretKey: "" StorageClass: STORAGE_CLASS_UNSPECIFIED UseVirtualAddressing: true } Table { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } NeedToBill: true SnapshotStep: 0 SnapshotTxId: 0 EnableChecksums: false EnablePermissions: false } Internal: true } TxId: 281474976710759 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:37.754816Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TBackup Propose, path: /MyRoot/export-102/0, opId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-31T16:03:37.754978Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-03-31T16:03:37.755482Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710759:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:03:37.755556Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpBackup, opId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-31T16:03:37.756881Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion export in-flight, txId: 102, at schemeshard: 72057594046678944 2025-03-31T16:03:37.756948Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 102, at schemeshard: 72057594046678944 2025-03-31T16:03:37.758902Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710759, response: Status: StatusAccepted TxId: 281474976710759 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:37.759296Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710759, database: /MyRoot, subject: , status: StatusAccepted, operation: BACKUP TABLE, path: /MyRoot/export-102/0 2025-03-31T16:03:37.759593Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976710759, status# StatusAccepted 2025-03-31T16:03:37.759686Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusAccepted TxId: 281474976710759 SchemeshardId: 72057594046678944 2025-03-31T16:03:37.760126Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-31T16:03:37.760262Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710759:0 ProgressState, operation type: TxBackup, at tablet# 72057594046678944 2025-03-31T16:03:37.760331Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710759:0 ProgressState no shards to create, do next state 2025-03-31T16:03:37.760375Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 2 -> 3 2025-03-31T16:03:37.762910Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProposeCancelTx Execute, at schemeshard: 72057594046678944, message: TargetTxId: 281474976710759 TxId: 102 2025-03-31T16:03:37.762981Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Execute cancel tx: opId# 102:0, target opId# 281474976710759:0 2025-03-31T16:03:37.763363Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-31T16:03:37.763419Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TBackup TConfigurePart ProgressState, opId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-31T16:03:37.763614Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Propose backup to datashard 72075186233409547 txid 281474976710759:0 at schemeshard 72057594046678944 2025-03-31T16:03:37.768129Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProposeCancelTx Complete, at schemeshard: 72057594046678944 2025-03-31T16:03:37.768354Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-31T16:03:37.768405Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TBackup TConfigurePart ProgressState, opId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-31T16:03:37.768554Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Propose backup to datashard 72075186233409547 txid 281474976710759:0 at schemeshard 72057594046678944 2025-03-31T16:03:37.769118Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvCancelTxResult: Cookie: 102, at schemeshard: 72057594046678944 2025-03-31T16:03:37.769240Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusAccepted Result: "Cancelled at SchemeShard" TargetTxId: 281474976710759 TxId: 102 2025-03-31T16:03:37.769947Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2025-03-31T16:03:37.770148Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 0, tablet: 72075186233409547 2025-03-31T16:03:37.774927Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2025-03-31T16:03:37.776152Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:03:37.776273Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:565:2524] TestWaitNotification: OK eventTxId 102 AUDIT LOG buffer(7): 2025-03-31T16:03:36.906104Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2025-03-31T16:03:36.962736Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE TABLE, paths=[/MyRoot/Table], status=SUCCESS, detailed_status=StatusAccepted 2025-03-31T16:03:37.360863Z: component=schemeshard, id=102, uid=foo, remote_address=127.0.0.1, subject=user@builtin, sanitized_token={none}, database=/MyRoot, operation=EXPORT START, status=SUCCESS, detailed_status=SUCCESS, export_type=s3, export_item_count=1, export_s3_bucket={none}, export_s3_prefix={none} 2025-03-31T16:03:37.367777Z: component=schemeshard, tx_id=281474976710757, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE DIRECTORY, paths=[/MyRoot/export-102], status=SUCCESS, detailed_status=StatusAccepted 2025-03-31T16:03:37.410779Z: component=schemeshard, tx_id=281474976710758, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=CREATE TABLE COPY FROM, paths=[/MyRoot/export-102/0], status=SUCCESS, detailed_status=StatusAccepted 2025-03-31T16:03:37.759184Z: component=schemeshard, tx_id=281474976710759, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=BACKUP TABLE, paths=[/MyRoot/export-102/0], status=SUCCESS, detailed_status=StatusAccepted 2025-03-31T16:03:37.769537Z: component=schemeshard, id=102, uid=foo, remote_address=127.0.0.1, subject=user@builtin, sanitized_token={none}, database=/MyRoot, operation=EXPORT END, status=ERROR, detailed_status=CANCELLED, reason=Cancelled manually, export_type=s3, export_item_count=1, export_s3_bucket={none}, export_s3_prefix={none}, start_time=2025-03-31T16:03:36.956156Z, end_time=2025-03-31T16:04:07.005156Z AUDIT LOG checked line: 2025-03-31T16:03:37.769537Z: component=schemeshard, id=102, uid=foo, remote_address=127.0.0.1, subject=user@builtin, sanitized_token={none}, database=/MyRoot, operation=EXPORT END, status=ERROR, detailed_status=CANCELLED, reason=Cancelled manually, export_type=s3, export_item_count=1, export_s3_bucket={none}, export_s3_prefix={none}, start_time=2025-03-31T16:03:36.956156Z, end_time=2025-03-31T16:04:07.005156Z >> TExportToS3Tests::SchemaMappingEncryptionIncorrectKey [GOOD] >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] [GOOD] >> TBackupTests::BackupUuidColumn[Raw] [GOOD] |74.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table/test-results/unittest/{meta.json ... results_accumulator.log} >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] [GOOD] |74.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |74.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |74.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:03:38.125745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:03:38.125862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:38.125903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:03:38.125938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:03:38.125985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:03:38.126015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:03:38.126074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:38.126185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:03:38.126689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:38.225867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:03:38.225951Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:38.231807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:38.231925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:03:38.232103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:03:38.238840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:03:38.239112Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:03:38.239801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:38.240053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:38.243507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:38.244772Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:38.244856Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:38.245050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:03:38.245100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:38.245141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:03:38.245339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:03:38.253954Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:03:38.391310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:38.391589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:38.391864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:03:38.392147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:03:38.392210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:38.395497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:38.395682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:03:38.396017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:38.396091Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:03:38.396139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:03:38.396203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:03:38.398895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:38.398974Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:38.399016Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:03:38.402430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:38.402497Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:38.402570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:38.402658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:03:38.407900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:38.411408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:03:38.411742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:03:38.413000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:38.413199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:38.413275Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:38.413581Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:03:38.413636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:38.413854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:03:38.413953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:03:38.424482Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:38.424568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:38.424808Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:38.424856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:03:38.425144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:38.425209Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:03:38.425349Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:38.425393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:38.425427Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:38.425455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:38.425487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:03:38.425529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:38.425592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:03:38.425628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:03:38.425717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:38.425765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:03:38.425801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:03:38.428287Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:38.428426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:38.428466Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... : 638 content-md5: Myp3UygaBNGp6+7AMgyRnQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 638 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-03-31T16:03:39.010250Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:39.010306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:03:39.010628Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:39.010686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-31T16:03:39.011341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:03:39.011403Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 REQUEST: PUT /data_01.csv HTTP/1.1 HEADERS: Host: localhost:28105 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 5796E5EB-16DE-401A-AE33-0C81EBD108E1 amz-sdk-request: attempt=1 content-length: 11 content-md5: jsMhyzH+cyrvZpBm0dQVGQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_01.csv / / 11 2025-03-31T16:03:39.014774Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:475:2434], result# PutObjectResult { ETag: 332a7753281a04d1a9ebeec0320c919d } 2025-03-31T16:03:39.014871Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:482:2439], result# PutObjectResult { ETag: 8ec321cb31fe732aef669066d1d41519 } 2025-03-31T16:03:39.014914Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:482:2439], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-03-31T16:03:39.015023Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:474:2433] 2025-03-31T16:03:39.015172Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:475:2434], sender# [1:474:2433], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } 2025-03-31T16:03:39.015317Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:481:2438], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-03-31T16:03:39.022787Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:03:39.022994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:03:39.023043Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:03:39.023158Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-31T16:03:39.023216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-31T16:03:39.023336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 FAKE_COORDINATOR: Erasing txId 102 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:28105 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 12D48E5C-9A0D-4152-BEDB-9752DB8657ED amz-sdk-request: attempt=1 content-length: 11 content-md5: bj4KQf2rit2DOGLxvSlUww== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 11 2025-03-31T16:03:39.030298Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:475:2434], result# PutObjectResult { ETag: 6e3e0a41fdab8add833862f1bd2954c3 } 2025-03-31T16:03:39.030359Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:475:2434], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-03-31T16:03:39.030561Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:474:2433], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-03-31T16:03:39.039254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:03:39.066208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 313 RawX2: 4294969593 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-03-31T16:03:39.066304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-03-31T16:03:39.066498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 313 RawX2: 4294969593 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-03-31T16:03:39.066631Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 313 RawX2: 4294969593 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-03-31T16:03:39.066706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:39.066858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:39.067729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-03-31T16:03:39.067793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-03-31T16:03:39.067955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-03-31T16:03:39.068037Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 322 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-03-31T16:03:39.068105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:39.068147Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:03:39.068189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-31T16:03:39.068229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-03-31T16:03:39.068255Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-03-31T16:03:39.068385Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:39.074053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:03:39.077531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:03:39.078092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:03:39.078149Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-31T16:03:39.078266Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:03:39.078337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:03:39.078380Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:03:39.078413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:03:39.078453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-31T16:03:39.078544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:371:2338] message: TxId: 102 2025-03-31T16:03:39.078598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:03:39.078639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-31T16:03:39.078671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-31T16:03:39.078808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T16:03:39.081208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:03:39.081269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:454:2414] TestWaitNotification: OK eventTxId 102 >> KqpPg::TableSelect+useSink [GOOD] >> KqpPg::TableSelect-useSink |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> BuildStatsHistogram::Five_Five_Mixed [GOOD] >> BuildStatsHistogram::Five_Five_Serial |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpPg::InsertFromSelect_NoReorder-useSink [GOOD] >> KqpPg::InsertFromSelect_Serial+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:03:39.201585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:03:39.201689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:39.201728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:03:39.201760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:03:39.201801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:03:39.201831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:03:39.201888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:39.201987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:03:39.202371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:39.302878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:03:39.302960Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:39.308964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:39.309074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:03:39.309238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:03:39.316922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:03:39.317132Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:03:39.317776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:39.318007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:39.320629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:39.321759Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:39.321832Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:39.322026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:03:39.322077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:39.322114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:03:39.322249Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:03:39.329262Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:03:39.464900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:39.465137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:39.465355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:03:39.465556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:03:39.465609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:39.469742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:39.469904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:03:39.470136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:39.470204Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:03:39.470245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:03:39.470319Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:03:39.476140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:39.476218Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:39.476261Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:03:39.482120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:39.482203Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:39.482273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:39.482330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:03:39.486070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:39.488206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:03:39.488446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:03:39.489484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:39.489616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:39.489671Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:39.489943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:03:39.489993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:39.490173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:03:39.490250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:03:39.492415Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:39.492458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:39.492601Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:39.492634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:03:39.492860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:39.492917Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:03:39.493035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:39.493073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:39.493111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:39.493139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:39.493176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:03:39.493213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:39.493267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:03:39.493299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:03:39.493365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:39.493402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:03:39.493426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:03:39.495425Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:39.495529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:39.495564Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... d DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:03:40.043785Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:40.043842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:22887 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 58952904-4B99-4529-814B-393515249F08 amz-sdk-request: attempt=1 content-length: 638 content-md5: Myp3UygaBNGp6+7AMgyRnQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 638 2025-03-31T16:03:40.044430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:03:40.044482Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 REQUEST: PUT /data_01.csv.zst HTTP/1.1 HEADERS: Host: localhost:22887 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 4655C20C-1BCC-4F7C-B937-23562174C08E amz-sdk-request: attempt=1 content-length: 20 content-md5: 2025-03-31T16:03:40.048018Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:475:2434], result# PutObjectResult { ETag: 332a7753281a04d1a9ebeec0320c919d } 8NOHH1ycwPXC5K+v+37u8g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_01.csv.zst / / 20 2025-03-31T16:03:40.048153Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:474:2433] 2025-03-31T16:03:40.048266Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:475:2434], sender# [1:474:2433], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } 2025-03-31T16:03:40.048443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:03:40.048575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:03:40.048636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:03:40.048682Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-31T16:03:40.048718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-31T16:03:40.048822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-03-31T16:03:40.049028Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:482:2439], result# PutObjectResult { ETag: f0d3871f5c9cc0f5c2e4afaffb7eeef2 } 2025-03-31T16:03:40.049134Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:482:2439], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-03-31T16:03:40.049406Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:481:2438], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:22887 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 84A2AAAD-EB42-4ECC-A15C-117032C1E580 amz-sdk-request: attempt=1 content-length: 20 content-md5: 2qFn9G0TW8wfvJ9C+A5Jbw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 20 2025-03-31T16:03:40.057514Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:475:2434], result# PutObjectResult { ETag: daa167f46d135bcc1fbc9f42f80e496f } 2025-03-31T16:03:40.057566Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:475:2434], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-03-31T16:03:40.057920Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:474:2433], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 FAKE_COORDINATOR: Erasing txId 102 2025-03-31T16:03:40.068983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:03:40.094341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 313 RawX2: 4294969593 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-03-31T16:03:40.094415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-03-31T16:03:40.094596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 313 RawX2: 4294969593 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-03-31T16:03:40.094702Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 313 RawX2: 4294969593 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-03-31T16:03:40.094763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:40.094971Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:40.095523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-03-31T16:03:40.095578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-03-31T16:03:40.095765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-03-31T16:03:40.095838Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 322 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-03-31T16:03:40.095877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:40.095909Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:03:40.095943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-31T16:03:40.095978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-03-31T16:03:40.096010Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-03-31T16:03:40.096147Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:40.099662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:03:40.100102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:03:40.100481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:03:40.100547Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-31T16:03:40.100664Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:03:40.100718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:03:40.100782Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:03:40.100835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:03:40.100876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-31T16:03:40.100944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:371:2338] message: TxId: 102 2025-03-31T16:03:40.100991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:03:40.101033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-31T16:03:40.101073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-31T16:03:40.101209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T16:03:40.102937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:03:40.102980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:454:2414] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::BackupUuidColumn[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:03:39.409908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:03:39.410008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:39.410048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:03:39.410084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:03:39.410126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:03:39.410156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:03:39.410210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:39.410279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:03:39.410671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:39.489958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:03:39.490006Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:39.494576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:39.494669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:03:39.494829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:03:39.499435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:03:39.499643Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:03:39.500333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:39.500571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:39.503394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:39.504441Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:39.504516Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:39.504725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:03:39.504775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:39.504814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:03:39.504945Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:03:39.514544Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:03:39.653789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:39.654026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:39.654229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:03:39.654473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:03:39.654531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:39.660290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:39.660458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:03:39.660661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:39.660721Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:03:39.660755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:03:39.660813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:03:39.662896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:39.662969Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:39.663017Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:03:39.665126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:39.665190Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:39.665259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:39.665308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:03:39.669064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:39.671408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:03:39.671621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:03:39.672727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:39.672905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:39.672971Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:39.673267Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:03:39.673327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:39.673500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:03:39.673580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:03:39.676983Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:39.677047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:39.677219Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:39.677260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:03:39.677432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:39.677479Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:03:39.677580Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:39.677616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:39.677643Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:39.677665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:39.677697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:03:39.677741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:39.677771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:03:39.677792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:03:39.677850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:39.677876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:03:39.677898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:03:39.679650Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:39.679741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:39.679777Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... SHARD INFO: TBackup TPropose, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:39.979170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-03-31T16:03:39.979357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:39.981770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-03-31T16:03:39.981910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-03-31T16:03:39.982331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:39.982460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:39.982528Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-03-31T16:03:39.982680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-03-31T16:03:39.982815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:03:40.028539Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:414:2385], attempt# 0 2025-03-31T16:03:40.046412Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:414:2385], sender# [1:413:2382] FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-03-31T16:03:40.049953Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:40.050010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:03:40.050294Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:40.050404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-31T16:03:40.050871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:03:40.050924Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-03-31T16:03:40.052040Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:03:40.052149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:03:40.052190Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:03:40.052249Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-31T16:03:40.052296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T16:03:40.052382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:11663 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: AF219B97-C38A-40B9-9A33-EFE82ED6C36C amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2025-03-31T16:03:40.056261Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:414:2385], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } 2025-03-31T16:03:40.058047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:11663 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F476AB3F-58C5-4E18-BAE5-7740287D944E amz-sdk-request: attempt=1 content-length: 357 content-md5: IxJB3qM/y2xlsv8qcwTF7g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-03-31T16:03:40.060829Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:414:2385], result# PutObjectResult { ETag: 231241dea33fcb6c65b2ff2a7304c5ee } 2025-03-31T16:03:40.060920Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:413:2382] 2025-03-31T16:03:40.061012Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:414:2385], sender# [1:413:2382], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:11663 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 9DC95A7B-086E-4E43-A8CC-D28D5D774C5D amz-sdk-request: attempt=1 content-length: 39 content-md5: GLX1nc5/cKhlAfxBHlykQA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 39 2025-03-31T16:03:40.063484Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:414:2385], result# PutObjectResult { ETag: 18b5f59dce7f70a86501fc411e5ca440 } 2025-03-31T16:03:40.063532Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:414:2385], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-03-31T16:03:40.063660Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:413:2382], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-03-31T16:03:40.082171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-03-31T16:03:40.082237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-03-31T16:03:40.082413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-03-31T16:03:40.082524Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-03-31T16:03:40.082590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:40.082625Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:03:40.082662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-31T16:03:40.082699Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-03-31T16:03:40.082867Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:40.085153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:03:40.085581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:03:40.085656Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-31T16:03:40.085756Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:03:40.085792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:03:40.085834Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:03:40.085885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:03:40.085930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-31T16:03:40.086003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:333:2312] message: TxId: 102 2025-03-31T16:03:40.086063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:03:40.086107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-31T16:03:40.086153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-31T16:03:40.086267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:03:40.088508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:03:40.088570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:397:2369] TestWaitNotification: OK eventTxId 102 |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OltpWriteRow+isSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::SchemaMappingEncryptionIncorrectKey [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:03:32.821282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:03:32.821378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:32.821493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:03:32.821534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:03:32.821579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:03:32.821609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:03:32.821684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:32.821758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:03:32.822116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:32.925014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:03:32.925102Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:32.930408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:32.930532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:03:32.930708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:03:32.940062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:03:32.940298Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:03:32.940999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:32.941267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:32.943585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:32.944835Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:32.944899Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:32.945080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:03:32.945141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:32.945189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:03:32.945374Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.952512Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:03:33.107961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:33.108218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:33.108431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:03:33.108675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:03:33.108744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:33.116228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:33.116385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:03:33.116624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:33.116697Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:03:33.116732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:03:33.116766Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:03:33.120036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:33.120132Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:33.120178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:03:33.124351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:33.124406Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:33.124454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:33.124522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:03:33.128488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:33.130927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:03:33.131232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:03:33.132430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:33.132617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:33.132673Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:33.132984Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:03:33.133042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:33.133235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:03:33.133345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:03:33.136671Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:33.136745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:33.136931Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:33.136971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:03:33.137203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:33.137247Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:03:33.137337Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:33.137370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:33.137441Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:33.137504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:33.137546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:03:33.137583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:33.137633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:03:33.137666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:03:33.137730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:33.137770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:03:33.137804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:03:33.139590Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:33.139694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:33.139731Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 85Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710758 2025-03-31T16:03:39.749048Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-03-31T16:03:39.749110Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-03-31T16:03:39.750543Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-03-31T16:03:39.750651Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-03-31T16:03:39.750687Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710758 2025-03-31T16:03:39.750724Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-03-31T16:03:39.750763Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-03-31T16:03:39.750859Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-03-31T16:03:39.761955Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-03-31T16:03:39.762125Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710758, at schemeshard: 72057594046678944 2025-03-31T16:03:39.762180Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-03-31T16:03:39.762261Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710758, at schemeshard: 72057594046678944 2025-03-31T16:03:39.763010Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710758:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710758 msg type: 269090816 2025-03-31T16:03:39.763220Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 4294967295, tablet: 72057594046316545 2025-03-31T16:03:39.763609Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 FAKE_COORDINATOR: Add transaction: 281474976710758 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710758 at step: 5000005 2025-03-31T16:03:39.764062Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:39.764221Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710758 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 17179871340 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:39.764285Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710758:0, step: 5000005, at schemeshard: 72057594046678944 2025-03-31T16:03:39.764419Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710758:0, at schemeshard: 72057594046678944 2025-03-31T16:03:39.764492Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710758:0 progress is 1/1 2025-03-31T16:03:39.764555Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-03-31T16:03:39.764612Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710758:0 progress is 1/1 2025-03-31T16:03:39.764658Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-03-31T16:03:39.764724Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-31T16:03:39.764815Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-03-31T16:03:39.764857Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: false 2025-03-31T16:03:39.764919Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-03-31T16:03:39.764966Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710758:0 2025-03-31T16:03:39.765005Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710758:0 2025-03-31T16:03:39.765081Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-03-31T16:03:39.765131Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710758, publications: 2, subscribers: 1 2025-03-31T16:03:39.765181Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-03-31T16:03:39.765223Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-03-31T16:03:39.771974Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 FAKE_COORDINATOR: Erasing txId 281474976710758 2025-03-31T16:03:39.777724Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:39.777774Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:39.777947Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-03-31T16:03:39.778122Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:39.778164Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:207:2209], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 1 2025-03-31T16:03:39.778211Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:207:2209], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 4 2025-03-31T16:03:39.779003Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-03-31T16:03:39.779149Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-03-31T16:03:39.779195Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710758 2025-03-31T16:03:39.779253Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-03-31T16:03:39.779315Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-03-31T16:03:39.780264Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-03-31T16:03:39.780361Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-03-31T16:03:39.780395Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710758 2025-03-31T16:03:39.780428Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-03-31T16:03:39.780460Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-03-31T16:03:39.780550Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710758, subscribers: 1 2025-03-31T16:03:39.780608Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:123:2149] 2025-03-31T16:03:39.781047Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:03:39.781103Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-03-31T16:03:39.781184Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-31T16:03:39.788959Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-03-31T16:03:39.795147Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-03-31T16:03:39.795329Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2025-03-31T16:03:39.795432Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710758 2025-03-31T16:03:39.795514Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-03-31T16:03:39.795562Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758 2025-03-31T16:03:39.795608Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758, id# 103, itemIdx# 4294967295 2025-03-31T16:03:39.796005Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-31T16:03:39.802714Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 103 2025-03-31T16:03:39.803096Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-03-31T16:03:39.803158Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-03-31T16:03:39.803685Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-31T16:03:39.803813Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-31T16:03:39.803865Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [4:554:2513] TestWaitNotification: OK eventTxId 103 >> KqpCost::IndexLookup-useSink >> KqpCost::IndexLookup+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:03:39.875775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:03:39.875899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:39.875946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:03:39.875987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:03:39.876039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:03:39.876071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:03:39.876135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:39.876220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:03:39.876649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:39.970671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:03:39.970935Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:39.979362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:39.980431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:03:39.980767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:03:39.988446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:03:39.988790Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:03:39.990074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:39.990450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:39.993643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:39.995561Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:39.995671Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:39.995895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:03:39.995978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:39.996054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:03:39.996396Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:03:40.016091Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:03:40.159774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:40.159997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:40.160205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:03:40.160485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:03:40.160590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:40.167346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:40.167571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:03:40.167824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:40.167908Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:03:40.167954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:03:40.168022Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:03:40.172385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:40.172484Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:40.172527Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:03:40.174916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:40.174981Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:40.175059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:40.175174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:03:40.179261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:40.181571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:03:40.181820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:03:40.183027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:40.183328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:40.183399Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:40.183751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:03:40.183818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:40.184034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:03:40.184122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:03:40.186648Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:40.186701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:40.186934Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:40.186976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:03:40.187220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:40.187272Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:03:40.187390Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:40.187453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:40.187510Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:40.187545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:40.187598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:03:40.187642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:40.187677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:03:40.187708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:03:40.187784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:40.187826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:03:40.187860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:03:40.190402Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:40.190543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:40.190589Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... NFO: TBackup TPropose, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:40.537899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-03-31T16:03:40.538041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:40.540540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-03-31T16:03:40.540672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-03-31T16:03:40.541121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:40.541244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:40.541300Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-03-31T16:03:40.541434Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-03-31T16:03:40.541563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:03:40.553377Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:410:2381], attempt# 0 2025-03-31T16:03:40.585046Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:410:2381], sender# [1:409:2380] REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:15542 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C3950FDA-9B92-4EFC-85B0-BA5F202FC34D amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2025-03-31T16:03:40.593297Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:410:2381], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-03-31T16:03:40.597209Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:40.597313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:03:40.597662Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:40.597712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-31T16:03:40.598305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:03:40.598381Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-03-31T16:03:40.599766Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:03:40.599891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:03:40.599937Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:03:40.600006Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-31T16:03:40.600064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T16:03:40.600179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:15542 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 6EB20605-37F9-4411-9F72-194954B6D8E4 amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-03-31T16:03:40.609654Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:410:2381], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2025-03-31T16:03:40.609831Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:409:2380] 2025-03-31T16:03:40.610030Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:410:2381], sender# [1:409:2380], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv.zst HTTP/1.12025-03-31T16:03:40.614209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 HEADERS: Host: localhost:15542 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 4888831C-80F7-43F5-A304-41A867080C7C amz-sdk-request: attempt=1 content-length: 20 content-md5: 2qFn9G0TW8wfvJ9C+A5Jbw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 20 2025-03-31T16:03:40.618896Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:410:2381], result# PutObjectResult { ETag: daa167f46d135bcc1fbc9f42f80e496f } 2025-03-31T16:03:40.618978Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:410:2381], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-03-31T16:03:40.619195Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:409:2380], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-03-31T16:03:40.632568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-03-31T16:03:40.632647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-03-31T16:03:40.632843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-03-31T16:03:40.633050Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-03-31T16:03:40.633126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:40.633166Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:03:40.633210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-31T16:03:40.633259Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-03-31T16:03:40.633424Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:40.636179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:03:40.636637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:03:40.636701Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-31T16:03:40.636828Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:03:40.636872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:03:40.636931Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:03:40.636978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:03:40.637018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-31T16:03:40.637096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:331:2310] message: TxId: 102 2025-03-31T16:03:40.637150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:03:40.637192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-31T16:03:40.637229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-31T16:03:40.637363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:03:40.639774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:03:40.639850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:395:2367] TestWaitNotification: OK eventTxId 102 |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpLimits::ComputeNodeMemoryLimit [GOOD] >> KqpLimits::DataShardReplySizeExceeded >> KqpCost::ScanQueryRangeFullScan+SourceRead >> TSchemeShardTTLTestsWithReboots::CopyTable [GOOD] >> TVersions::Wreck2Reverse [GOOD] >> TVersions::Wreck1 >> KqpCost::IndexLookupAndTake+useSink |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::QuerySeviceRangeFullScan |74.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::CopyTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:135:2157] sender: [1:136:2058] recipient: [1:111:2142] 2025-03-31T16:01:25.620266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:01:25.620362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:25.620406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:01:25.620439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:01:25.620481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:01:25.620510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:01:25.620567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:01:25.620647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:01:25.621047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:01:25.722528Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-31T16:01:25.722610Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:169:2058] recipient: [1:15:2062] 2025-03-31T16:01:25.741465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:01:25.741635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:01:25.741883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:01:25.754544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:01:25.754801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:01:25.755499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:25.755769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:01:25.767001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:25.768644Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:25.768718Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:25.768799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:01:25.768843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:25.768886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:01:25.769022Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:210:2058] recipient: [1:208:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:210:2058] recipient: [1:208:2209] Leader for TabletID 72057594037968897 is [1:214:2213] sender: [1:215:2058] recipient: [1:208:2209] 2025-03-31T16:01:25.788130Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:235:2058] recipient: [1:15:2062] 2025-03-31T16:01:25.946205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:01:25.946458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:25.946705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:01:25.946993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:01:25.947131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:25.952707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:25.952967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:01:25.953177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:25.953233Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:01:25.953269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:01:25.953307Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:01:25.956237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:25.956318Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:01:25.956378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:01:25.959045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:25.959141Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:25.959201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:25.959275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:01:25.963067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:01:25.965820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:01:25.966010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:135:2157] sender: [1:250:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:01:25.967368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:01:25.967522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:01:25.967576Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:25.967898Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:01:25.967953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:01:25.968139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:01:25.968222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:01:25.992616Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:01:25.992691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:01:25.992879Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:01:25.992919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:202:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:01:25.993294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:01:25.993357Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:01:25.993486Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:01:25.993539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:01:25.993576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025 ... 5596Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2025-03-31T16:03:43.525716Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 330 RawX2: 416611830029 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-03-31T16:03:43.525760Z node 97 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-31T16:03:43.525835Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 330 RawX2: 416611830029 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-03-31T16:03:43.525888Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:43.525924Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2025-03-31T16:03:43.527239Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-03-31T16:03:43.527349Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-03-31T16:03:43.534119Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-03-31T16:03:43.534688Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 3 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1375 } } 2025-03-31T16:03:43.534741Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409547, partId: 0 2025-03-31T16:03:43.534892Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 3 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1375 } } 2025-03-31T16:03:43.535000Z node 97 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 3 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1375 } } 2025-03-31T16:03:43.536089Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 436 RawX2: 416611830118 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-03-31T16:03:43.536146Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409547, partId: 0 2025-03-31T16:03:43.536287Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 436 RawX2: 416611830118 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-03-31T16:03:43.536333Z node 97 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-31T16:03:43.536412Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 436 RawX2: 416611830118 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-03-31T16:03:43.536459Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:43.536491Z node 97 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2025-03-31T16:03:43.536527Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-31T16:03:43.536571Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-03-31T16:03:43.536601Z node 97 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 129 -> 240 2025-03-31T16:03:43.539533Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-03-31T16:03:43.540985Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-03-31T16:03:43.541614Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-03-31T16:03:43.541678Z node 97 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 1003:0ProgressState, operation type TxCopyTable 2025-03-31T16:03:43.541746Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 1003:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2025-03-31T16:03:43.541785Z node 97 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 1003, done: 0, blocked: 1 2025-03-31T16:03:43.541858Z node 97 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 1003:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 1003 Name: CopyTableBarrier }, at tablet# 72057594046678944 2025-03-31T16:03:43.541898Z node 97 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 240 -> 240 2025-03-31T16:03:43.547179Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-03-31T16:03:43.547245Z node 97 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2025-03-31T16:03:43.547352Z node 97 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2025-03-31T16:03:43.547387Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-03-31T16:03:43.547425Z node 97 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2025-03-31T16:03:43.547455Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-03-31T16:03:43.547491Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-03-31T16:03:43.547536Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-03-31T16:03:43.547578Z node 97 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2025-03-31T16:03:43.547610Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2025-03-31T16:03:43.547766Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-31T16:03:43.547807Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-03-31T16:03:43.550522Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-03-31T16:03:43.550604Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-03-31T16:03:43.551008Z node 97 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-03-31T16:03:43.551139Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-03-31T16:03:43.551177Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [97:532:2493] TestWaitNotification: OK eventTxId 1003 2025-03-31T16:03:43.551685Z node 97 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTableCopy" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:03:43.551896Z node 97 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTableCopy" took 250us result status StatusSuccess 2025-03-31T16:03:43.552363Z node 97 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTableCopy" PathDescription { Self { Name: "TTLEnabledTableCopy" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTableCopy" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpCost::ScanScriptingRangeFullScan-SourceRead >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed [GOOD] >> KqpInplaceUpdate::SingleRowStr+UseSink >> KqpImmediateEffects::ForceImmediateEffectsExecution+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestCreateExternalTablet [GOOD] Test command err: 2025-03-31T15:54:09.191482Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-03-31T15:54:09.246800Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-31T15:54:09.247711Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-03-31T15:54:09.251607Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-03-31T15:54:09.257903Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-03-31T15:54:09.258112Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-03-31T15:54:09.262946Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:30:2075] ControllerId# 72057594037932033 2025-03-31T15:54:09.263131Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-31T15:54:09.263612Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-03-31T15:54:09.269607Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-03-31T15:54:09.469580Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-03-31T15:54:09.469827Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-03-31T15:54:09.498571Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:37:2080] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:09.498790Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:38:2081] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:09.498950Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:39:2082] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:09.499113Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:40:2083] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:09.499269Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:41:2084] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:09.499383Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:42:2085] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:09.499502Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:43:2086] targetNodeId# 1 Marker# DSP01 2025-03-31T15:54:09.499529Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-03-31T15:54:09.499609Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:30:2075] 2025-03-31T15:54:09.499637Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:30:2075] 2025-03-31T15:54:09.499684Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-03-31T15:54:09.500406Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-03-31T15:54:09.511376Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-03-31T15:54:09.512485Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-03-31T15:54:09.550875Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:20:2063] 2025-03-31T15:54:09.550919Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:20:2063] 2025-03-31T15:54:09.827633Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:30:2075] 2025-03-31T15:54:09.827834Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-03-31T15:54:09.828203Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-03-31T15:54:09.828561Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-03-31T15:54:09.840874Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2025-03-31T15:54:09.843803Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:20:2063] 2025-03-31T15:54:09.844001Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:30:2075] 2025-03-31T15:54:09.844052Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-03-31T15:54:09.844085Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-03-31T15:54:09.860560Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-03-31T15:54:09.861356Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-03-31T15:54:09.861609Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-03-31T15:54:09.861989Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-03-31T15:54:09.863553Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-31T15:54:09.864833Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-03-31T15:54:09.865341Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-03-31T15:54:09.865766Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-03-31T15:54:09.866397Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [1:50:2091] 2025-03-31T15:54:09.866433Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [1:50:2091] 2025-03-31T15:54:09.868646Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-03-31T15:54:09.868710Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [1:50:2091] 2025-03-31T15:54:09.868778Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-03-31T15:54:09.869113Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StInit ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-03-31T15:54:09.869211Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-03-31T15:54:09.870123Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037932033 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-03-31T15:54:09.870399Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037932033 followers: 0 2025-03-31T15:54:09.871238Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:20:2063] 2025-03-31T15:54:09.871304Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] forward result error, check reconnect [1:30:2075] 2025-03-31T15:54:09.871904Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] schedule retry [1:30:2075] 2025-03-31T15:54:09.881853Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2025-03-31T15:54:09.881962Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2025-03-31T15:54:09.883059Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2025-03-31T15:54:09.883174Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2025-03-31T15:54:09.883323Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-03-31T15:54:09.884117Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:30:2075] 2025-03-31T15:54:09.884584Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037927937 Cookie: 0 ProxyOptions: SigNone} 2025-03-31T15:54:09.884667Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 2} 2025-03-31T15:54:09.884704Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-03-31T15:54:09.889145Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 0} 2025-03-31T15:54:09.889218Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 1} 2025-03-31T15:54:09.889479Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-03-31T15:54:09.889586Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037936129 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037936129 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-03-31T15:54:09.889625Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037936129 followers: 0 2025-03-31T15:54:09.889695Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] forward result error, check reconnect [1:20:2063] 2025-03-31T15:54:09.889730Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] connect failed [1:20:2063] 2025-03-31T15:54:09.889828Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037927937} 2025-03-31T15:54:09.889935Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2025-03-31T15:54:09.889959Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-03-31T15:54:09.890084Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:322} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\371$\224\316I\335\243.)W\014\261m\013\346Osy\0160" } 2025-03-31T15:54:09.890152Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037927937} 2025-03-31T15:54:09.890522Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForI ... 72057594037927937 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72057594037927937 Cookie: 0 CurrentLeader: [153:270:2261] CurrentLeaderTablet: [153:277:2265] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 0}} 2025-03-31T16:03:32.169405Z node 153 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72057594037927937 followers: 0 2025-03-31T16:03:32.169522Z node 153 :TABLET_RESOLVER DEBUG: SelectForward node 153 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [153:270:2261] 2025-03-31T16:03:32.169658Z node 153 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result local node, try to connect [153:267:2260] 2025-03-31T16:03:32.169751Z node 153 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [153:267:2260] 2025-03-31T16:03:32.169931Z node 153 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [153:267:2260] 2025-03-31T16:03:32.170155Z node 153 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [153:267:2260] 2025-03-31T16:03:32.170246Z node 153 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [153:267:2260] 2025-03-31T16:03:32.170322Z node 153 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [153:267:2260] 2025-03-31T16:03:32.170421Z node 153 :PIPE_CLIENT DEBUG: TClient[72057594037927937] shutdown pipe due to pending shutdown request [153:267:2260] 2025-03-31T16:03:32.170491Z node 153 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [153:267:2260] 2025-03-31T16:03:32.170633Z node 153 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [153:266:2259] EventType# 268697601 2025-03-31T16:03:32.170935Z node 153 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:4} Tx{4, NKikimr::NHive::TTxCreateTablet} queued, type NKikimr::NHive::TTxCreateTablet 2025-03-31T16:03:32.171055Z node 153 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:4} Tx{4, NKikimr::NHive::TTxCreateTablet} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T16:03:32.172035Z node 153 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:4} Tx{4, NKikimr::NHive::TTxCreateTablet} hope 1 -> done Change{4, redo 1157b alter 0b annex 0, ~{ 14, 0, 1, 2 } -{ }, 0 gb} 2025-03-31T16:03:32.172159Z node 153 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:4} Tx{4, NKikimr::NHive::TTxCreateTablet} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T16:03:32.172348Z node 153 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [153:309:2286] 2025-03-31T16:03:32.172394Z node 153 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [153:309:2286] 2025-03-31T16:03:32.172465Z node 153 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [153:309:2286] 2025-03-31T16:03:32.172538Z node 153 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StNormal ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-03-31T16:03:32.172614Z node 153 :TABLET_RESOLVER DEBUG: SelectForward node 153 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037932033 followers: 0 countLeader 1 allowFollowers 0 winner: [153:94:2122] 2025-03-31T16:03:32.172763Z node 153 :PIPE_CLIENT DEBUG: TClient[72057594037932033] forward result local node, try to connect [153:309:2286] 2025-03-31T16:03:32.172811Z node 153 :PIPE_CLIENT DEBUG: TClient[72057594037932033]::SendEvent [153:309:2286] 2025-03-31T16:03:32.172912Z node 153 :PIPE_SERVER DEBUG: [72057594037932033] Accept Connect Originator# [153:309:2286] 2025-03-31T16:03:32.173050Z node 153 :PIPE_CLIENT DEBUG: TClient[72057594037932033] connected with status OK role: Leader [153:309:2286] 2025-03-31T16:03:32.173095Z node 153 :PIPE_CLIENT DEBUG: TClient[72057594037932033] send queued [153:309:2286] 2025-03-31T16:03:32.173130Z node 153 :PIPE_CLIENT DEBUG: TClient[72057594037932033] push event to server [153:309:2286] 2025-03-31T16:03:32.173188Z node 153 :PIPE_SERVER DEBUG: [72057594037932033] HandleSend Sender# [153:277:2265] EventType# 268637702 2025-03-31T16:03:32.173483Z node 153 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} queued, type NKikimr::NBsController::TBlobStorageController::TTxSelectGroups 2025-03-31T16:03:32.173605Z node 153 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T16:03:32.173854Z node 153 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} hope 1 -> done Change{20, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-31T16:03:32.173972Z node 153 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T16:03:32.174335Z node 153 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2025-03-31T16:03:32.174443Z node 153 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T16:03:32.174940Z node 153 :HIVE NOTICE: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{88923003449280}(72075186224037888)::Execute - TryToBoot was not successfull 2025-03-31T16:03:32.175135Z node 153 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{5, redo 698b alter 0b annex 0, ~{ 2, 1, 3 } -{ }, 0 gb} 2025-03-31T16:03:32.175255Z node 153 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T16:03:32.187066Z node 153 :BS_PROXY_PUT INFO: [185eac4b9c06d110] bootstrap ActorId# [153:312:2289] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:4:0:0:698:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-03-31T16:03:32.187302Z node 153 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037927937:2:4:0:0:698:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-31T16:03:32.187368Z node 153 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] restore Id# [72057594037927937:2:4:0:0:698:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-03-31T16:03:32.187441Z node 153 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:4:0:0:698:1] Marker# BPG33 2025-03-31T16:03:32.187489Z node 153 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:4:0:0:698:1] Marker# BPG32 2025-03-31T16:03:32.187651Z node 153 :BS_PROXY DEBUG: Send to queueActorId# [153:35:2079] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:4:0:0:698:1] FDS# 698 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-31T16:03:32.192022Z node 153 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] received {EvVPutResult Status# OK ID# [72057594037927937:2:4:0:0:698:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 18 } Cost# 85496 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 19 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-03-31T16:03:32.192184Z node 153 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Result# TEvPutResult {Id# [72057594037927937:2:4:0:0:698:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-03-31T16:03:32.192253Z node 153 :BS_PROXY_PUT INFO: [185eac4b9c06d110] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:4:0:0:698:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-03-31T16:03:32.192401Z node 153 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.763 sample PartId# [72057594037927937:2:4:0:0:698:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 153 } TEvVPutResult{ TimestampMs# 5.185 VDiskId# [0:1:0:0:0] NodeId# 153 Status# OK } ] } 2025-03-31T16:03:32.192587Z node 153 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:4:0:0:698:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-03-31T16:03:32.192727Z node 153 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} commited cookie 1 for step 4 2025-03-31T16:03:32.193152Z node 153 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-03-31T16:03:32.193276Z node 153 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-03-31T16:03:32.193336Z node 153 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-03-31T16:03:32.193381Z node 153 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-03-31T16:03:32.193429Z node 153 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-03-31T16:03:32.193498Z node 153 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-03-31T16:03:32.193540Z node 153 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-03-31T16:03:32.193998Z node 153 :PIPE_CLIENT DEBUG: TClient[72075186224037888] ::Bootstrap [153:316:2292] 2025-03-31T16:03:32.194075Z node 153 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [153:316:2292] 2025-03-31T16:03:32.194227Z node 153 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StInit ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-03-31T16:03:32.194456Z node 153 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-03-31T16:03:32.194642Z node 153 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-03-31T16:03:32.194741Z node 153 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-03-31T16:03:32.194788Z node 153 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-03-31T16:03:32.194852Z node 153 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-03-31T16:03:32.194930Z node 153 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-03-31T16:03:32.194993Z node 153 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-03-31T16:03:32.195194Z node 153 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-03-31T16:03:32.195294Z node 153 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72075186224037888 followers: 0 2025-03-31T16:03:32.195470Z node 153 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result error, check reconnect [153:316:2292] 2025-03-31T16:03:32.195548Z node 153 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connect failed [153:316:2292] >> KqpEffects::InsertAbort_Select_Duplicates+UseSink >> KqpWrite::Insert >> KqpCost::IndexLookupJoin+StreamLookupJoin [GOOD] >> TPersQueueTest::Delete [GOOD] >> TPersQueueTest::FetchRequest |74.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |74.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |74.2%| [LD] {RESULT} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut >> KqpCost::OlapPointLookup [GOOD] >> BuildStatsHistogram::Five_Five_Serial [GOOD] >> BuildStatsHistogram::Five_Five_Crossed >> KqpCost::PointLookup [GOOD] >> KqpImmediateEffects::InsertDuplicates+UseSink >> KqpImmediateEffects::Upsert >> DataShardVolatile::VolatileTxAbortedOnSplit [GOOD] >> DataShardVolatile::VolatileTxAbortedOnDrop ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:111:2142] 2025-03-31T16:03:27.924305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:03:27.924441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:27.924474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:03:27.924503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:03:27.924535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:03:27.924557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:03:27.924605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:27.924700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:03:27.925043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:28.022158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:03:28.022221Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:28.035810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:28.036121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:03:28.036269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:03:28.042593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:03:28.042807Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:03:28.043467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:28.043676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:28.045788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:28.047230Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:28.047298Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:28.047413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:03:28.047467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:28.047520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:03:28.047752Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.055032Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:240:2058] recipient: [1:15:2062] 2025-03-31T16:03:28.245738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:28.246054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.246315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:03:28.246576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:03:28.246638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.248906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:28.249119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:03:28.249410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.249468Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:03:28.249506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:03:28.249549Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:03:28.251566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.251644Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:28.251688Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:03:28.253871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.253935Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.253984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:28.254051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:03:28.258192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:28.260483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:03:28.260729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:03:28.261738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:28.261869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969446 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:28.261933Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:28.262203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:03:28.262245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:28.262408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:03:28.262487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:28.265365Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:28.265432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:28.265643Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:28.265708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:03:28.266132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:28.266182Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:03:28.266301Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:28.266339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:28.266381Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:28.266416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:28.266458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:03:28.266521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:28.266573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:03:28.266607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:03:28.266695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:28.266735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:03:28.266785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:03:28.268952Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:28.269092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:28.269135Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 94046678944 2025-03-31T16:03:45.618349Z node 3 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976710763:0, at schemeshard: 72057594046678944 2025-03-31T16:03:45.618417Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710763 ready parts: 1/1 2025-03-31T16:03:45.618606Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710763 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:45.619170Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-03-31T16:03:45.619307Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-03-31T16:03:45.619339Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-03-31T16:03:45.619385Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2025-03-31T16:03:45.619433Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-03-31T16:03:45.620446Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-03-31T16:03:45.620548Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-03-31T16:03:45.620587Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-03-31T16:03:45.620618Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 11 2025-03-31T16:03:45.620665Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-31T16:03:45.620762Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-03-31T16:03:45.623059Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-03-31T16:03:45.623213Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710763, at schemeshard: 72057594046678944 2025-03-31T16:03:45.623266Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-03-31T16:03:45.623306Z node 3 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710763, at schemeshard: 72057594046678944 2025-03-31T16:03:45.624970Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710763:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710763 msg type: 269090816 2025-03-31T16:03:45.625101Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710763, partId: 4294967295, tablet: 72057594046316545 2025-03-31T16:03:45.625534Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 FAKE_COORDINATOR: Add transaction: 281474976710763 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710763 at step: 5000009 2025-03-31T16:03:45.625968Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000009, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:45.626095Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710763 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 12884904046 } } Step: 5000009 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:45.626144Z node 3 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710763:0, step: 5000009, at schemeshard: 72057594046678944 2025-03-31T16:03:45.626284Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710763:0, at schemeshard: 72057594046678944 2025-03-31T16:03:45.626351Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2025-03-31T16:03:45.626388Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-03-31T16:03:45.626435Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2025-03-31T16:03:45.626495Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-03-31T16:03:45.626551Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-31T16:03:45.626629Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-03-31T16:03:45.626676Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: false 2025-03-31T16:03:45.626729Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-03-31T16:03:45.626768Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710763:0 2025-03-31T16:03:45.626799Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710763:0 2025-03-31T16:03:45.626868Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-31T16:03:45.626904Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710763, publications: 2, subscribers: 1 2025-03-31T16:03:45.626940Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2025-03-31T16:03:45.626971Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-03-31T16:03:45.628304Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-03-31T16:03:45.630007Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:45.630050Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:45.630233Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-03-31T16:03:45.630379Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:45.630433Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:207:2209], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 1 2025-03-31T16:03:45.630472Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:207:2209], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710763 2025-03-31T16:03:45.631276Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-03-31T16:03:45.631376Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-03-31T16:03:45.631412Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710763 2025-03-31T16:03:45.631455Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2025-03-31T16:03:45.631503Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-03-31T16:03:45.632193Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-03-31T16:03:45.632293Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-03-31T16:03:45.632323Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710763 2025-03-31T16:03:45.632362Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-03-31T16:03:45.632396Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-03-31T16:03:45.632466Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710763, subscribers: 1 2025-03-31T16:03:45.632509Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [3:126:2152] 2025-03-31T16:03:45.635197Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-03-31T16:03:45.635615Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-03-31T16:03:45.635710Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-03-31T16:03:45.635770Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710763 2025-03-31T16:03:45.635813Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-03-31T16:03:45.635842Z node 3 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-03-31T16:03:45.635877Z node 3 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763, id# 103, itemIdx# 4294967295 2025-03-31T16:03:45.637409Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-03-31T16:03:45.637501Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-31T16:03:45.637549Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:775:2710] TestWaitNotification: OK eventTxId 103 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupJoin+StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 8376, MsgBus: 64183 2025-03-31T16:03:38.405351Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004976739076034:2091];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:38.405450Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000be6/r3tmp/tmpsECiJb/pdisk_1.dat 2025-03-31T16:03:38.881666Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:38.881787Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:38.885212Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:03:38.915696Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8376, node 1 2025-03-31T16:03:39.019359Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:39.019391Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:39.019398Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:39.019527Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64183 TClient is connected to server localhost:64183 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:39.663443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:39.685623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:39.862750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:40.105221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:40.202332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:42.213795Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004993918946975:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:42.213902Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:42.574455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:42.618329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:42.674159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:42.723456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:42.769156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:42.851936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:43.022781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004998213914794:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:43.022874Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:43.023190Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004998213914799:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:43.028939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:43.049975Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004998213914801:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:03:43.150502Z node 1 :TX_PROXY ERROR: Actor# [1:7488004998213914857:3468] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:03:43.407454Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004976739076034:2091];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:43.407516Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:44.438574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:03:44.477387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:03:44.519204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 /Root/Join1_2 1 19 /Root/Join1_1 8 136 >> KqpImmediateEffects::ConflictingKeyW1WR2 >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices_Sticky [GOOD] >> KqpCost::ScanQueryRangeFullScan-SourceRead [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapPointLookup [GOOD] Test command err: Trying to start YDB, gRPC: 7113, MsgBus: 27889 2025-03-31T16:03:38.595351Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004975823093900:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:38.597675Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000be1/r3tmp/tmpX8wkf1/pdisk_1.dat 2025-03-31T16:03:39.050693Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:39.054593Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:39.054686Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:39.058423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7113, node 1 2025-03-31T16:03:39.146278Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:39.146310Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:39.146316Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:39.146461Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27889 TClient is connected to server localhost:27889 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:39.793711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:39.836399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:40.042283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:40.318724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:40.405031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:42.744895Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004993002964713:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:42.745010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:43.175856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:43.242573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:43.276630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:43.309162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:43.355603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:43.422342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:43.487738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004997297932521:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:43.487834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:43.488121Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004997297932526:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:43.492795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:43.514428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004997297932528:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:03:43.590783Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004975823093900:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:43.591449Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:43.608087Z node 1 :TX_PROXY ERROR: Actor# [1:7488004997297932583:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:03:44.833421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:03:45.022451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7488005001592900306:2503];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:03:45.022695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7488005001592900306:2503];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:03:45.023005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7488005001592900306:2503];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:03:45.023184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7488005001592900306:2503];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:03:45.023311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7488005001592900306:2503];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:03:45.023424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7488005001592900306:2503];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:03:45.023555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7488005001592900306:2503];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:03:45.023683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7488005001592900306:2503];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:03:45.023799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7488005001592900306:2503];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:03:45.023918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7488005001592900306:2503];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:03:45.024022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7488005001592900306:2503];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:03:45.024132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7488005001592900306:2503];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:03:45.032850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7488005001592900301:2502];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:03:45.032915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7488005001592900301:2502];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:03:45.033199Z node 1 :TX_COLUM ... :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:03:45.214259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:03:45.214624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:03:45.214659Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:03:45.214787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:03:45.214809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:03:45.214908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:03:45.214957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:03:45.215169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:03:45.215197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:03:45.215305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:03:45.215333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:03:45.216523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:03:45.216571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:03:45.216645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:03:45.216668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:03:45.216793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:03:45.216815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:03:45.216883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:03:45.216907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:03:45.216955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:03:45.216975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:03:45.217006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:03:45.217028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:03:45.217399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:03:45.217432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:03:45.217572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:03:45.218141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:03:45.218272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:03:45.218299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:03:45.218427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:03:45.218448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:03:45.218541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:03:45.218571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:03:45.224207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:03:45.224266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:03:45.224436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:03:45.224462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:03:45.224557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:03:45.224579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:03:45.282720Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T16:03:45.282772Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T16:03:45.287422Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T16:03:45.289848Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T16:03:45.292380Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T16:03:45.296377Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T16:03:45.297662Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037920;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T16:03:45.300974Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T16:03:45.303408Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T16:03:45.305623Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T16:03:45.439930Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2025-03-31T16:03:45.440359Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2025-03-31T16:03:45.440957Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;self_id=[1:7488005001592900299:2501];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037927;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037923;receive=72075186224037928; 2025-03-31T16:03:45.441288Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2 >> KqpCost::IndexLookupAndTake-useSink [GOOD] >> KqpCost::OltpWriteRow-isSink [GOOD] >> KqpPg::InsertFromSelect_Serial+useSink [GOOD] >> KqpPg::InsertFromSelect_Serial-useSink >> TPersQueueTest::WhenTheTopicIsDeletedAfterReadingTheData_Uncompressed [GOOD] >> TTopicYqlTest::CreateAndAlterTopicYql ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::PointLookup [GOOD] Test command err: Trying to start YDB, gRPC: 12583, MsgBus: 28289 2025-03-31T16:03:39.477462Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004982593717811:2136];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:39.486281Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000bd2/r3tmp/tmpT7ZnnJ/pdisk_1.dat 2025-03-31T16:03:40.048104Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:40.053968Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:40.054082Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:40.059771Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12583, node 1 2025-03-31T16:03:40.325870Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:40.325900Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:40.325907Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:40.326034Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28289 TClient is connected to server localhost:28289 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:41.120151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:41.139607Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:03:41.216711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:41.512863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:41.799920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:41.901292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:44.069069Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005004068555989:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:44.069287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:44.401735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:44.480903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:44.483261Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004982593717811:2136];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:44.489906Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:44.532734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:44.567321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:44.641685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:44.688625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:44.785780Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005004068556514:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:44.785860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:44.786183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005004068556519:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:44.790620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:44.805889Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005004068556521:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:03:44.898264Z node 1 :TX_PROXY ERROR: Actor# [1:7488005004068556578:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAndTake-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 62710, MsgBus: 64730 2025-03-31T16:03:40.113927Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004984469803566:2273];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:40.113997Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000bbd/r3tmp/tmp7NnVEs/pdisk_1.dat 2025-03-31T16:03:40.591945Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:40.594859Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:40.594944Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 62710, node 1 2025-03-31T16:03:40.601283Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:03:40.688185Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:40.688205Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:40.688213Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:40.688328Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64730 TClient is connected to server localhost:64730 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:41.397327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:41.421983Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:03:41.429688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:41.589366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:41.745774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:41.834535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:43.918058Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004997354707004:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:43.918168Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:44.329169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:44.374063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:44.418702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:44.505369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:44.544325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:44.595210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:44.691576Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005001649674821:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:44.691669Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:44.692014Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005001649674826:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:44.700113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:44.728646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005001649674828:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:03:44.799618Z node 1 :TX_PROXY ERROR: Actor# [1:7488005001649674884:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:03:45.113210Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004984469803566:2273];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:45.113293Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:46.127547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 /Root/SecondaryKeys/Index/indexImplTable 2 16 /Root/SecondaryKeys 1 8 >> KqpCost::OltpWriteRow+isSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanQueryRangeFullScan-SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 5043, MsgBus: 22645 2025-03-31T16:03:40.018338Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004985676251007:2142];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:40.018933Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000bc4/r3tmp/tmpCzTaaR/pdisk_1.dat 2025-03-31T16:03:40.482550Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:40.485516Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:40.485621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:40.491612Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5043, node 1 2025-03-31T16:03:40.727275Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:40.727299Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:40.727306Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:40.727423Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22645 TClient is connected to server localhost:22645 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:41.501246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:41.514313Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:03:41.529994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:41.712228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:41.933905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:42.085848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:44.721506Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005002856121898:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:44.721726Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:45.023241Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004985676251007:2142];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:45.023492Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:45.248880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:45.318522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:45.357917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:45.395453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:45.449887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:45.501486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:45.591525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005007151089714:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:45.591627Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:45.592455Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005007151089719:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:45.597115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:45.616246Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005007151089721:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:03:45.767308Z node 1 :TX_PROXY ERROR: Actor# [1:7488005007151089779:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:03:47.212871Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: kqp_proxy, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743437027 AvailableComputeActors: 10000 UsedMemory: 0 TotalMemory: 10737418240 Memory { Pool: 1 Available: 10737418240 } ExecutionUnits: 10000 KqpProxyNodeResources { NodeId: 1 DataCenterNumId: 49 ActiveWorkersCount: 0 DataCenterId: "1" } 2025-03-31T16:03:47.215636Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:03:47.273616Z node 1 :KQP_GATEWAY DEBUG: Load table metadata from cache by path, request Path: /Root/Test 2025-03-31T16:03:47.559917Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpSnapshotManager at [1:7488005015741024711:2494] 2025-03-31T16:03:47.559963Z node 1 :KQP_RESOURCE_MANAGER DEBUG: KqpSnapshotManager: got snapshot request from [1:7488005015741024649:2494] 2025-03-31T16:03:47.580397Z node 1 :KQP_RESOURCE_MANAGER DEBUG: KqpSnapshotManager: snapshot 1743437027619:281474976710671 created 2025-03-31T16:03:47.580845Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7488005015741024722:2494] TxId: 281474976710672. Ctx: { TraceId: 01jqpf3nw9b0wzgmkmhc1a92kr, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGUxODRhNjgtNGQ3ODU5NjMtNjdmMzc2ZTItNTczZDZmYmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Executing physical tx, type: 3, stages: 2 2025-03-31T16:03:47.581004Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-03-31T16:03:47.581031Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,1], InputsCount: 1, OutputsCount: 1 2025-03-31T16:03:47.581319Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710672. Resolved key sets: 1 2025-03-31T16:03:47.581472Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 9] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-03-31T16:03:47.581539Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7488005015741024722:2494] TxId: 281474976710672. Ctx: { TraceId: 01jqpf3nw9b0wzgmkmhc1a92kr, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGUxODRhNjgtNGQ3ODU5NjMtNjdmMzc2ZTItNTczZDZmYmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Start resolving tablets nodes... (1) 2025-03-31T16:03:47.581740Z node 1 :KQP_EXECUTER DEBUG: [ShardsResolver] TxId: 281474976710672. Shard resolve complete, resolved shards: 1 2025-03-31T16:03:47.581772Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7488005015741024722:2494] TxId: 281474976710672. Ctx: { TraceId: 01jqpf3nw9b0wzgmkmhc1a92kr, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGUxODRhNjgtNGQ3ODU5NjMtNjdmMzc2ZTItNTczZDZmYmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolved, success: 1, failed: 0 2025-03-31T16:03:47.581805Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7488005015741024722:2494] TxId: 281474976710672. Ctx: { TraceId: 01jqpf3nw9b0wzgmkmhc1a92kr, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGUxODRhNjgtNGQ3ODU5NjMtNjdmMzc2ZTItNTczZDZmYmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards on nodes: node 1: [72075186224037914] 2025-03-31T16:03:47.581821Z node 1 ... CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-03-31T16:03:47.609160Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7488005015741024727:2502], TxId: 281474976710672, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZGUxODRhNjgtNGQ3ODU5NjMtNjdmMzc2ZTItNTczZDZmYmY=. TraceId : 01jqpf3nw9b0wzgmkmhc1a92kr. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-03-31T16:03:47.609211Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 1. Tasks execution finished 2025-03-31T16:03:47.609226Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7488005015741024726:2501], TxId: 281474976710672, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZGUxODRhNjgtNGQ3ODU5NjMtNjdmMzc2ZTItNTczZDZmYmY=. TraceId : 01jqpf3nw9b0wzgmkmhc1a92kr. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Compute state finished. All channels and sinks finished 2025-03-31T16:03:47.609331Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 1. pass away 2025-03-31T16:03:47.609406Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710672;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-03-31T16:03:47.609686Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976710672, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-03-31T16:03:47.609854Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7488005015741024722:2494] TxId: 281474976710672. Ctx: { TraceId: 01jqpf3nw9b0wzgmkmhc1a92kr, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGUxODRhNjgtNGQ3ODU5NjMtNjdmMzc2ZTItNTczZDZmYmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send TEvStreamData to [1:7488005015741024649:2494], seqNo: 1, nRows: 1 2025-03-31T16:03:47.610020Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7488005015741024722:2494] TxId: 281474976710672. Ctx: { TraceId: 01jqpf3nw9b0wzgmkmhc1a92kr, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGUxODRhNjgtNGQ3ODU5NjMtNjdmMzc2ZTItNTczZDZmYmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7488005015741024726:2501], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 2222 Tasks { TaskId: 1 CpuTimeUs: 817 FinishTimeMs: 1743437027609 OutputRows: 1 OutputBytes: 19 Tables { TablePath: "/Root/Test" ReadRows: 3 ReadBytes: 96 } ComputeCpuTimeUs: 128 BuildCpuTimeUs: 689 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-tuhoqst4xe" NodeId: 1 CreateTimeMs: 1743437027584 } MaxMemoryUsage: 1048576 } 2025-03-31T16:03:47.610070Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710672. Ctx: { TraceId: 01jqpf3nw9b0wzgmkmhc1a92kr, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGUxODRhNjgtNGQ3ODU5NjMtNjdmMzc2ZTItNTczZDZmYmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7488005015741024726:2501] 2025-03-31T16:03:47.610152Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7488005015741024722:2494] TxId: 281474976710672. Ctx: { TraceId: 01jqpf3nw9b0wzgmkmhc1a92kr, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGUxODRhNjgtNGQ3ODU5NjMtNjdmMzc2ZTItNTczZDZmYmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [1:7488005015741024727:2502], 2025-03-31T16:03:47.610214Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule publish at 2025-03-31T16:03:49.211657Z, after 1.603636s 2025-03-31T16:03:47.613482Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710672, send ack to channelId: 2, seqNo: 1, enough: 0, freeSpace: 8388488, to: [1:7488005015741024730:2502] 2025-03-31T16:03:47.613542Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7488005015741024727:2502], TxId: 281474976710672, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZGUxODRhNjgtNGQ3ODU5NjMtNjdmMzc2ZTItNTczZDZmYmY=. TraceId : 01jqpf3nw9b0wzgmkmhc1a92kr. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-03-31T16:03:47.613591Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-03-31T16:03:47.613601Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. Tasks execution finished 2025-03-31T16:03:47.613617Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7488005015741024727:2502], TxId: 281474976710672, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZGUxODRhNjgtNGQ3ODU5NjMtNjdmMzc2ZTItNTczZDZmYmY=. TraceId : 01jqpf3nw9b0wzgmkmhc1a92kr. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Compute state finished. All channels and sinks finished 2025-03-31T16:03:47.613696Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. pass away 2025-03-31T16:03:47.613782Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710672;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-03-31T16:03:47.613972Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976710672, taskId: 2. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-03-31T16:03:47.614116Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7488005015741024722:2494] TxId: 281474976710672. Ctx: { TraceId: 01jqpf3nw9b0wzgmkmhc1a92kr, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGUxODRhNjgtNGQ3ODU5NjMtNjdmMzc2ZTItNTczZDZmYmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7488005015741024727:2502], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 19180 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 810 FinishTimeMs: 1743437027613 InputRows: 1 InputBytes: 19 OutputRows: 1 OutputBytes: 19 ResultRows: 1 ResultBytes: 19 ComputeCpuTimeUs: 179 BuildCpuTimeUs: 631 HostName: "ghrun-tuhoqst4xe" NodeId: 1 CreateTimeMs: 1743437027587 } MaxMemoryUsage: 1048576 } 2025-03-31T16:03:47.614154Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710672. Ctx: { TraceId: 01jqpf3nw9b0wzgmkmhc1a92kr, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGUxODRhNjgtNGQ3ODU5NjMtNjdmMzc2ZTItNTczZDZmYmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7488005015741024727:2502] 2025-03-31T16:03:47.616087Z node 1 :KQP_EXECUTER INFO: ActorId: [1:7488005015741024722:2494] TxId: 281474976710672. Ctx: { TraceId: 01jqpf3nw9b0wzgmkmhc1a92kr, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGUxODRhNjgtNGQ3ODU5NjMtNjdmMzc2ZTItNTczZDZmYmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Full stats: { CpuTimeUs: 42004 DurationUs: 33244 Tables { TablePath: "/Root/Test" ReadRows: 3 ReadBytes: 96 } ExecuterCpuTimeUs: 20602 StartTimeMs: 1743437027580 FinishTimeMs: 1743437027614 Stages { StageGuid: "8aace415-a0d0b933-2e843fd3-2e406bcd" Program: "(\n(return (lambda \'() (block \'(\n (let $1 (KqpTable \'\"/Root/Test\" \'\"72057594046644480:9\" \'\"\" \'1))\n (let $2 \'(\'\"Amount\" \'\"Comment\" \'\"Group\" \'\"Name\"))\n (let $3 (KqpWideReadTableRanges $1 (Void) $2 \'() \'()))\n (return (FromFlow (WideFilter $3 (lambda \'($4 $5 $6 $7) (Coalesce (< $4 (Uint64 \'\"5000\")) (Bool \'false))) (Uint64 \'1))))\n))))\n)\n" ComputeActors { CpuTimeUs: 2222 Tasks { TaskId: 1 CpuTimeUs: 817 FinishTimeMs: 1743437027609 OutputRows: 1 OutputBytes: 19 Tables { TablePath: "/Root/Test" ReadRows: 3 ReadBytes: 96 } ComputeCpuTimeUs: 128 BuildCpuTimeUs: 689 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-tuhoqst4xe" NodeId: 1 CreateTimeMs: 1743437027584 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743437027609 } Stages { StageId: 1 StageGuid: "2ce4f92d-305b3471-33758d25-12a00624" Program: "(\n(return (lambda \'($1) (FromFlow (NarrowMap (Take (ToFlow $1) (Uint64 \'1)) (lambda \'($2 $3 $4 $5) (AsStruct \'(\'\"Amount\" $2) \'(\'\"Comment\" $3) \'(\'\"Group\" $4) \'(\'\"Name\" $5)))))))\n)\n" ComputeActors { CpuTimeUs: 19180 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 810 FinishTimeMs: 1743437027613 InputRows: 1 InputBytes: 19 OutputRows: 1 OutputBytes: 19 ResultRows: 1 ResultBytes: 19 ComputeCpuTimeUs: 179 BuildCpuTimeUs: 631 HostName: "ghrun-tuhoqst4xe" NodeId: 1 CreateTimeMs: 1743437027587 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743437027609 } TxPlansWithStats: "{\"Node Type\":\"Phase\",\"PlanNodeId\":5,\"Plans\":[{\"Node Type\":\"ResultSet\",\"PlanNodeId\":4,\"PlanNodeType\":\"ResultSet\",\"Plans\":[{\"Node Type\":\"Limit\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":2}],\"Limit\":\"1\",\"Name\":\"Limit\"}],\"PlanNodeId\":3,\"Plans\":[{\"Node Type\":\"Merge\",\"PlanNodeId\":2,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Limit-Filter-TableFullScan\",\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Limit\":\"1\",\"Name\":\"Limit\"},{\"E-Cost\":\"No estimate\",\"E-Rows\":\"No estimate\",\"E-Size\":\"No estimate\",\"Inputs\":[{\"InternalOperatorId\":2}],\"Name\":\"Filter\",\"Predicate\":\"item.Amount \\u003C 5000\"},{\"E-Cost\":\"No estimate\",\"E-Rows\":\"No estimate\",\"E-Size\":\"No estimate\",\"Inputs\":[],\"Name\":\"TableFullScan\",\"Path\":\"\\/Root\\/Test\",\"ReadColumns\":[\"Amount\",\"Comment\",\"Group\",\"Name\"],\"ReadRanges\":[\"Group (-∞, +∞)\",\"Name (-∞, +∞)\"],\"Scan\":\"Parallel\",\"Table\":\"Test\"}],\"PlanNodeId\":1,\"StageGuid\":\"8aace415-a0d0b933-2e843fd3-2e406bcd\",\"Stats\":{\"BaseTimeMs\":1743437027609,\"ComputeNodes\":[{\"CpuTimeUs\":2222,\"Tasks\":[{\"ComputeTimeUs\":128,\"FinishTimeMs\":1743437027609,\"Host\":\"ghrun-tuhoqst4xe\",\"NodeId\":1,\"OutputBytes\":19,\"OutputRows\":1,\"TaskId\":1}]}],\"FinishedTasks\":0,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"},\"Tables\":[\"Test\"]}],\"SortColumns\":[\"Group (Asc)\"],\"StageGuid\":\"\"}],\"StageGuid\":\"2ce4f92d-305b3471-33758d25-12a00624\",\"Stats\":{\"BaseTimeMs\":1743437027609,\"ComputeNodes\":[{\"CpuTimeUs\":19180,\"Tasks\":[{\"ComputeTimeUs\":179,\"FinishTimeMs\":1743437027613,\"Host\":\"ghrun-tuhoqst4xe\",\"InputBytes\":19,\"InputRows\":1,\"NodeId\":1,\"OutputBytes\":19,\"OutputRows\":1,\"ResultBytes\":19,\"ResultRows\":1,\"TaskId\":2}]}],\"FinishedTasks\":0,\"PhysicalStageId\":1,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"\"}" StatConvertBytes: 1754 Extra { type_url: "type.googleapis.com/NKqpProto.TKqpExecutionExtraStats" value: "\010\001\022\r\010\256\021\020\354\225\001\030\232\247\001 \002" } } 2025-03-31T16:03:47.616133Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7488005015741024722:2494] TxId: 281474976710672. Ctx: { TraceId: 01jqpf3nw9b0wzgmkmhc1a92kr, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGUxODRhNjgtNGQ3ODU5NjMtNjdmMzc2ZTItNTczZDZmYmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-31T16:03:47.616175Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7488005015741024722:2494] TxId: 281474976710672. Ctx: { TraceId: 01jqpf3nw9b0wzgmkmhc1a92kr, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGUxODRhNjgtNGQ3ODU5NjMtNjdmMzc2ZTItNTczZDZmYmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.021402s ReadRows: 3 ReadBytes: 96 ru: 14 rate limiter was not found force flag: 1 2025-03-31T16:03:47.617097Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437027619, txId: 281474976710671] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OltpWriteRow-isSink [GOOD] Test command err: Trying to start YDB, gRPC: 30151, MsgBus: 21272 2025-03-31T16:03:39.617460Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004980526412169:2134];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:39.617501Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000bd0/r3tmp/tmp2pDNw6/pdisk_1.dat 2025-03-31T16:03:40.134494Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:40.145714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:40.145806Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:40.147952Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30151, node 1 2025-03-31T16:03:40.255766Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:40.255794Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:40.255838Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:40.255998Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21272 TClient is connected to server localhost:21272 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:40.920926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:40.947836Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:03:40.962880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:41.177254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:41.378064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:41.532524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:43.769220Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004997706283062:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:43.769344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:44.118931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:44.161331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:44.197726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:44.248255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:44.288819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:44.341282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:44.400167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005002001250870:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:44.400263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:44.400551Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005002001250875:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:44.404414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:44.419207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005002001250877:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:03:44.478921Z node 1 :TX_PROXY ERROR: Actor# [1:7488005002001250930:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:03:44.619373Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004980526412169:2134];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:44.619428Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:45.740991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 query_phases { duration_us: 764 cpu_time_us: 764 } query_phases { duration_us: 5716 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1589 affected_shards: 1 } compilation { duration_us: 78340 cpu_time_us: 74300 } process_cpu_time_us: 1210 total_duration_us: 89268 total_cpu_time_us: 77863 query_phases { duration_us: 540 cpu_time_us: 540 } query_phases { duration_us: 7666 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1159 affected_shards: 1 } compilation { duration_us: 91323 cpu_time_us: 81595 } process_cpu_time_us: 901 total_duration_us: 103517 total_cpu_time_us: 84195 2025-03-31T16:03:46.488332Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488005010591186053:2530], TxId: 281474976710676, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MWI2MjJjZTItZDViN2NkNGYtMWVlMTgwMzEtNWRmN2ZkMDY=. TraceId : 01jqpf3mtreqczgdk7mt9zwd5r. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-03-31T16:03:46.488822Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488005010591186054:2531], TxId: 281474976710676, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MWI2MjJjZTItZDViN2NkNGYtMWVlMTgwMzEtNWRmN2ZkMDY=. TraceId : 01jqpf3mtreqczgdk7mt9zwd5r. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7488005010591186050:2491], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-31T16:03:46.489224Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWI2MjJjZTItZDViN2NkNGYtMWVlMTgwMzEtNWRmN2ZkMDY=, ActorId: [1:7488005006296218491:2491], ActorState: ExecuteState, TraceId: 01jqpf3mtreqczgdk7mt9zwd5r, Create QueryResponse for error on request, msg: query_phases { duration_us: 759 cpu_time_us: 759 } query_phases { duration_us: 8210 table_access { name: "/Root/TestTable" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 6327 affected_shards: 1 } query_phases { duration_us: 34366 cpu_time_us: 34385 } compilation { duration_us: 288517 cpu_time_us: 283660 } process_cpu_time_us: 1677 total_duration_us: 352293 total_cpu_time_us: 326808 query_phases { duration_us: 738 cpu_time_us: 738 } query_phases { duration_us: 4860 table_access { name: "/Root/TestTable" partitions_count: 1 } cpu_time_us: 3942 affected_shards: 1 } query_phases { duration_us: 2190 cpu_time_us: 1863 } query_phases { duration_us: 11223 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1224 affected_shards: 1 } compilation { duration_us: 271987 cpu_time_us: 266028 } process_cpu_time_us: 1612 total_duration_us: 312846 total_cpu_time_us: 275407 query_phases { duration_us: 661 cpu_time_us: 661 } query_phases { duration_us: 9845 table_access { name: "/Root/TestTable" partitions_count: 1 } cpu_time_us: 8525 affected_shards: 1 } query_phases { duration_us: 1241 cpu_time_us: 564 affected_shards: 1 } compilation { duration_us: 368232 cpu_time_us: 362947 } process_cpu_time_us: 1353 total_duration_us: 389191 total_cpu_time_us: 374050 query_phases { duration_us: 636 cpu_time_us: 636 } query_phases { duration_us: 7203 table_access { name: "/Root/TestTable" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 6071 affected_shards: 1 } query_phases { duration_us: 4237 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1199 affected_shards: 1 } compilation { duration_us: 344154 cpu_time_us: 337328 } process_cpu_time_us: 1351 total_duration_us: 364388 total_cpu_time_us: 346585 query_phases { duration_us: 870 cpu_time_us: 870 } query_phases { duration_us: 3575 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 1185 affected_shards: 1 } compilation { duration_us: 82947 cpu_time_us: 78417 } process_cpu_time_us: 1272 total_duration_us: 91457 total_cpu_time_us: 81744 query_phases { duration_us: 717 cpu_time_us: 717 } query_phases { duration_us: 3504 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 1005 affected_shards: 1 } compilation { duration_us: 79853 cpu_time_us: 68176 } process_cpu_time_us: 1160 total_duration_us: 89978 total_cpu_time_us: 71058 >> KqpImmediateEffects::Delete ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices_Sticky [GOOD] Test command err: ======= CUT ======= Part{[1:2:3:0:0:0:0] eph 0, 346b 12r} data 755b + FlatIndex{4} Label{3 rev 3, 172b} 5 rec | Page Row Bytes (Uint32, String) | 0 0 86b {1, aaa} | 1 3 88b {1, b} | 2 6 86b {2, NULL} | 3 9 86b {2, ccx} | 3 11 86b {2, cxz} + BTreeIndex{PageId: 5 RowCount: 12 DataSize: 346 ErasedRowCount: 0} Label{13 rev 1, 208b} | PageId: 0 RowCount: 3 DataSize: 86 ErasedRowCount: 0 | > {1, b} | PageId: 1 RowCount: 6 DataSize: 174 ErasedRowCount: 0 | > {2, NULL} | PageId: 2 RowCount: 9 DataSize: 260 ErasedRowCount: 0 | > {2, ccx} | PageId: 3 RowCount: 12 DataSize: 346 ErasedRowCount: 0 ======= FULL ======= Part{[1:2:3:0:0:0:0] eph 0, 346b 12r} data 777b + FlatIndex{4} Label{3 rev 3, 179b} 5 rec | Page Row Bytes (Uint32, String) | 0 0 86b {1, aaa} | 1 3 88b {1, baaaa} | 2 6 86b {2, aaa} | 3 9 86b {2, ccx} | 3 11 86b {2, cxz} + BTreeIndex{PageId: 5 RowCount: 12 DataSize: 346 ErasedRowCount: 0} Label{13 rev 1, 223b} | PageId: 0 RowCount: 3 DataSize: 86 ErasedRowCount: 0 | > {1, baaaa} | PageId: 1 RowCount: 6 DataSize: 174 ErasedRowCount: 0 | > {2, aaa} | PageId: 2 RowCount: 9 DataSize: 260 ErasedRowCount: 0 | > {2, ccx} | PageId: 3 RowCount: 12 DataSize: 346 ErasedRowCount: 0 ======= CUT ======= Part{[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1347b + FlatIndex{10} Label{3 rev 3, 362b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b {1, aaa} | 1 1 42b {1, ab} | 2 2 42b {1, ac} | 3 3 42b {1, b} | 4 4 42b {1, bb} | 5 5 42b {2, NULL} | 6 6 42b {2, ab} | 7 7 42b {2, ac} | 8 8 42b {2, b} | 9 9 42b {2, bb} | 9 9 42b {2, bba} + BTreeIndex{PageId: 11 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 536b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > {1, ab} | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > {1, ac} | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > {1, b} | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > {1, bb} | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > {2, NULL} | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > {2, ab} | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > {2, ac} | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > {2, b} | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > {2, bb} | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 ======= FULL ======= Part{[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1381b + FlatIndex{10} Label{3 rev 3, 375b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b {1, aaa} | 1 1 42b {1, aba} | 2 2 42b {1, aca} | 3 3 42b {1, baa} | 4 4 42b {1, bba} | 5 5 42b {2, aaa} | 6 6 42b {2, aba} | 7 7 42b {2, aca} | 8 8 42b {2, baa} | 9 9 42b {2, bba} | 9 9 42b {2, bba} + BTreeIndex{PageId: 11 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 557b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > {1, aba} | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > {1, aca} | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > {1, baa} | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > {1, bba} | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > {2, aaa} | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > {2, aba} | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > {2, aca} | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > {2, baa} | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > {2, bba} | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 ======= SLICES ======= { [0, 2), [2, 3), [3, 5), [5, 6), [6, 7), [7, 8), [8, 9] } ======= CUT ======= Part{[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1347b + FlatIndex{10} Label{3 rev 3, 362b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b {1, aaa} | 1 1 42b {1, ab} | 2 2 42b {1, ac} | 3 3 42b {1, b} | 4 4 42b {1, bb} | 5 5 42b {2, NULL} | 6 6 42b {2, ab} | 7 7 42b {2, ac} | 8 8 42b {2, b} | 9 9 42b {2, bb} | 9 9 42b {2, bba} + BTreeIndex{PageId: 11 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 536b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > {1, ab} | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > {1, ac} | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > {1, b} | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > {1, bb} | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > {2, NULL} | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > {2, ab} | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > {2, ac} | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > {2, b} | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > {2, bb} | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 ======= FULL ======= Part{[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1381b + FlatIndex{10} Label{3 rev 3, 375b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b {1, aaa} | 1 1 42b {1, aba} | 2 2 42b {1, aca} | 3 3 42b {1, baa} | 4 4 42b {1, bba} | 5 5 42b {2, aaa} | 6 6 42b {2, aba} | 7 7 42b {2, aca} | 8 8 42b {2, baa} | 9 9 42b {2, bba} | 9 9 42b {2, bba} + BTreeIndex{PageId: 11 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 557b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > {1, aba} | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > {1, aca} | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > {1, baa} | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > {1, bba} | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > {2, aaa} | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > {2, aba} | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > {2, aca} | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > {2, baa} | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > {2, bba} | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 81b 2r} data 316b + FlatIndex{2} Label{3 rev 3, 107b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 41b {ccccccd} | 1 1 41b {ccccccd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 81 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccccd} | PageId: 1 RowCount: 2 DataSize: 81 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 83b 2r} data 320b + FlatIndex{2} Label{3 rev 3, 109b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 43b {ccccccd} | 1 1 43b {ccccccddd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 83 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccccd} | PageId: 1 RowCount: 2 DataSize: 83 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 80b 2r} data 312b + FlatIndex{2} Label{3 rev 3, 105b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 40b {cccccd} | 1 1 40b {cccccd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 80 ErasedRowCount: 0} Label{13 rev 1, 108b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccccd} | PageId: 1 RowCount: 2 DataSize: 80 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 82b 2r} data 316b + FlatIndex{2} Label{3 rev 3, 107b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 42b {cccccd} | 1 1 42b {cccccddd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 82 ErasedRowCount: 0} Label{13 rev 1, 108b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccccd} | PageId: 1 RowCount: 2 DataSize: 82 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 79b 2r} data 308b + FlatIndex{2} Label{3 rev 3, 103b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 39b {ccccd} | 1 1 39b {ccccd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 79 ErasedRowCount: 0} Label{13 rev 1, 107b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccd} | PageId: 1 RowCount: 2 DataSize: 79 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 81b 2r} data 312b + FlatIndex{2} Label{3 rev 3, 105b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 41b {ccccd} | 1 1 41b {ccccddd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 81 ErasedRowCount: 0} Label{13 rev 1, 107b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccd} | PageId: 1 RowCount: 2 DataSize: 81 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 78b 2r} data 304b + FlatIndex{2} Label{3 rev 3, 101b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 38b {cccd} | 1 1 38b {cccd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 78 ErasedRowCount: 0} Label{13 rev 1, 106b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccd} | PageId: 1 RowCount: 2 DataSize: 78 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 80b 2r} data 308b + FlatIndex{2} Label{3 rev 3, 103b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 40b {cccd} | 1 1 40b {cccddd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 80 ErasedRowCount: 0} Label{13 rev 1, 106b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccd} | PageId: 1 RowCount: 2 DataSize: 80 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 75b 2r} data 292b + FlatIndex{2} Label{3 rev 3, 95b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 35b {d} | 1 1 35b {d} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 75 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 75 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 77b 2r} data 296b + FlatIndex{2} Label{3 rev 3, 97b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 37b {d} | 1 1 37b {ddd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 77 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 77 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 69b 2r} data 280b + FlatIndex{2} Label{3 rev 3, 89b} 3 rec | Page Row Bytes (String) | 0 0 34b {} | 1 1 35b {d} | 1 1 35b {d} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 69 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 34 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 69 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 71b 2r} data 284b + FlatIndex{2} Label{3 rev 3, 91b} 3 rec | Page Row Bytes (String) | 0 0 34b {} | 1 1 37b {d} | 1 1 37b {ddd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 71 ErasedRowCount: 0} Label{13 rev 1, 10 ... {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{48} Label{484 rev 1, 138b}, [36, +2)row | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{51} Label{514 rev 1, 138b}, [38, +2)row | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 4), [6, 8), [8, 12), [14, 16), [16, 18), [20, 28), [32, 34), [34, 38), [38, 39] } Part{[1:2:3:0:0:0:0] eph 0, 10774b 40r} data 15576b + FlatIndex{95} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 66b {0, 1} | 2 2 66b {0, 4} | 4 4 82b {0, 7} | 8 6 66b {0, 10} | 11 8 66b {1, 3} | 14 10 82b {1, 6} | 20 12 66b {1, 8} | 23 14 66b {2, NULL} | 26 16 82b {2, 4} | 36 18 66b {2, 7} | 39 20 66b {2, 10} | 42 22 82b {3, 3} | 48 24 66b {3, 6} | 53 26 66b {3, 8} | 58 28 82b {4, NULL} | 64 30 66b {4, 4} | 67 32 66b {4, 7} | 70 34 82b {4, 10} | 82 36 66b {5, 3} | 87 38 66b {5, 6} | 87 39 66b {5, 7} + BTreeIndex{PageId: 98 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 102b} | + BTreeIndex{PageId: 72 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 15 RowCount: 6 DataSize: 214 GroupDataSize: 1052 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 0 RowCount: 2 DataSize: 66 GroupDataSize: 76 ErasedRowCount: 0 | | | > {0, 4} | | | PageId: 2 RowCount: 4 DataSize: 132 GroupDataSize: 526 ErasedRowCount: 0 | | | > {0, 7} | | | PageId: 4 RowCount: 6 DataSize: 214 GroupDataSize: 1052 ErasedRowCount: 0 | | > {0, 10} | | + BTreeIndex{PageId: 27 RowCount: 12 DataSize: 428 GroupDataSize: 2467 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 8 RowCount: 8 DataSize: 280 GroupDataSize: 1488 ErasedRowCount: 0 | | | > {1, 3} | | | PageId: 11 RowCount: 10 DataSize: 346 GroupDataSize: 1938 ErasedRowCount: 0 | | | > {1, 6} | | | PageId: 14 RowCount: 12 DataSize: 428 GroupDataSize: 2467 ErasedRowCount: 0 | | > {1, 8} | | + BTreeIndex{PageId: 43 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 20 RowCount: 14 DataSize: 494 GroupDataSize: 2906 ErasedRowCount: 0 | | | > {2, NULL} | | | PageId: 23 RowCount: 16 DataSize: 560 GroupDataSize: 3360 ErasedRowCount: 0 | | | > {2, 4} | | | PageId: 26 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0 | > {2, 7} | + BTreeIndex{PageId: 97 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 59 RowCount: 24 DataSize: 856 GroupDataSize: 5315 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 36 RowCount: 20 DataSize: 708 GroupDataSize: 4330 ErasedRowCount: 0 | | | > {2, 10} | | | PageId: 39 RowCount: 22 DataSize: 774 GroupDataSize: 4784 ErasedRowCount: 0 | | | > {3, 3} | | | PageId: 42 RowCount: 24 DataSize: 856 GroupDataSize: 5315 ErasedRowCount: 0 | | > {3, 6} | | + BTreeIndex{PageId: 71 RowCount: 30 DataSize: 1070 GroupDataSize: 6739 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 48 RowCount: 26 DataSize: 922 GroupDataSize: 5754 ErasedRowCount: 0 | | | > {3, 8} | | | PageId: 53 RowCount: 28 DataSize: 988 GroupDataSize: 6208 ErasedRowCount: 0 | | | > {4, NULL} | | | PageId: 58 RowCount: 30 DataSize: 1070 GroupDataSize: 6739 ErasedRowCount: 0 | | > {4, 4} | | + BTreeIndex{PageId: 96 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 249b} | | | PageId: 64 RowCount: 32 DataSize: 1136 GroupDataSize: 7178 ErasedRowCount: 0 | | | > {4, 7} | | | PageId: 67 RowCount: 34 DataSize: 1202 GroupDataSize: 7632 ErasedRowCount: 0 | | | > {4, 10} | | | PageId: 70 RowCount: 36 DataSize: 1284 GroupDataSize: 8163 ErasedRowCount: 0 | | | > {5, 3} | | | PageId: 82 RowCount: 38 DataSize: 1350 GroupDataSize: 8602 ErasedRowCount: 0 | | | > {5, 6} | | | PageId: 87 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 66b}, [0, +2)row | ERowOp 1: {0, 1} | ERowOp 1: {0, 3} + Rows{2} Label{24 rev 1, 66b}, [2, +2)row | ERowOp 1: {0, 4} | ERowOp 1: {0, 6} + Rows{4} Label{44 rev 1, 82b}, [4, +2)row | ERowOp 1: {0, 7} | ERowOp 1: {0, 8} + Rows{8} Label{84 rev 1, 66b}, [6, +2)row | ERowOp 1: {0, 10} | ERowOp 1: {1, 1} + Rows{11} Label{114 rev 1, 66b}, [8, +2)row | ERowOp 1: {1, 3} | ERowOp 1: {1, 4} + Rows{14} Label{144 rev 1, 82b}, [10, +2)row | ERowOp 1: {1, 6} | ERowOp 1: {1, 7} + Rows{20} Label{204 rev 1, 66b}, [12, +2)row | ERowOp 1: {1, 8} | ERowOp 1: {1, 10} + Rows{23} Label{234 rev 1, 66b}, [14, +2)row | ERowOp 1: {2, 1} | ERowOp 1: {2, 3} + Rows{26} Label{264 rev 1, 82b}, [16, +2)row | ERowOp 1: {2, 4} | ERowOp 1: {2, 6} + Rows{36} Label{364 rev 1, 66b}, [18, +2)row | ERowOp 1: {2, 7} | ERowOp 1: {2, 8} + Rows{39} Label{394 rev 1, 66b}, [20, +2)row | ERowOp 1: {2, 10} | ERowOp 1: {3, 1} + Rows{42} Label{424 rev 1, 82b}, [22, +2)row | ERowOp 1: {3, 3} | ERowOp 1: {3, 4} + Rows{48} Label{484 rev 1, 66b}, [24, +2)row | ERowOp 1: {3, 6} | ERowOp 1: {3, 7} + Rows{53} Label{534 rev 1, 66b}, [26, +2)row | ERowOp 1: {3, 8} | ERowOp 1: {3, 10} + Rows{58} Label{584 rev 1, 82b}, [28, +2)row | ERowOp 1: {4, 1} | ERowOp 1: {4, 3} + Rows{64} Label{644 rev 1, 66b}, [30, +2)row | ERowOp 1: {4, 4} | ERowOp 1: {4, 6} + Rows{67} Label{674 rev 1, 66b}, [32, +2)row | ERowOp 1: {4, 7} | ERowOp 1: {4, 8} + Rows{70} Label{704 rev 1, 82b}, [34, +2)row | ERowOp 1: {4, 10} | ERowOp 1: {5, 1} + Rows{82} Label{824 rev 1, 66b}, [36, +2)row | ERowOp 1: {5, 3} | ERowOp 1: {5, 4} + Rows{87} Label{874 rev 1, 66b}, [38, +2)row | ERowOp 1: {5, 6} | ERowOp 1: {5, 7} Slices{ [0, 4), [6, 8), [8, 12), [14, 16), [16, 18), [20, 28), [32, 34), [34, 38), [38, 39] } Part{[1:2:3:0:0:0:0] eph 0, 10774b 40r} data 15576b + FlatIndex{95} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 66b {0, 1} | 2 2 66b {0, 4} | 4 4 82b {0, 7} | 8 6 66b {0, 10} | 11 8 66b {1, 3} | 14 10 82b {1, 6} | 20 12 66b {1, 8} | 23 14 66b {2, NULL} | 26 16 82b {2, 4} | 36 18 66b {2, 7} | 39 20 66b {2, 10} | 42 22 82b {3, 3} | 48 24 66b {3, 6} | 53 26 66b {3, 8} | 58 28 82b {4, NULL} | 64 30 66b {4, 4} | 67 32 66b {4, 7} | 70 34 82b {4, 10} | 82 36 66b {5, 3} | 87 38 66b {5, 6} | 87 39 66b {5, 7} + BTreeIndex{PageId: 98 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 102b} | + BTreeIndex{PageId: 72 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 15 RowCount: 6 DataSize: 214 GroupDataSize: 1052 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 0 RowCount: 2 DataSize: 66 GroupDataSize: 76 ErasedRowCount: 0 | | | > {0, 4} | | | PageId: 2 RowCount: 4 DataSize: 132 GroupDataSize: 526 ErasedRowCount: 0 | | | > {0, 7} | | | PageId: 4 RowCount: 6 DataSize: 214 GroupDataSize: 1052 ErasedRowCount: 0 | | > {0, 10} | | + BTreeIndex{PageId: 27 RowCount: 12 DataSize: 428 GroupDataSize: 2467 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 8 RowCount: 8 DataSize: 280 GroupDataSize: 1488 ErasedRowCount: 0 | | | > {1, 3} | | | PageId: 11 RowCount: 10 DataSize: 346 GroupDataSize: 1938 ErasedRowCount: 0 | | | > {1, 6} | | | PageId: 14 RowCount: 12 DataSize: 428 GroupDataSize: 2467 ErasedRowCount: 0 | | > {1, 8} | | + BTreeIndex{PageId: 43 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 20 RowCount: 14 DataSize: 494 GroupDataSize: 2906 ErasedRowCount: 0 | | | > {2, NULL} | | | PageId: 23 RowCount: 16 DataSize: 560 GroupDataSize: 3360 ErasedRowCount: 0 | | | > {2, 4} | | | PageId: 26 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0 | > {2, 7} | + BTreeIndex{PageId: 97 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 59 RowCount: 24 DataSize: 856 GroupDataSize: 5315 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 36 RowCount: 20 DataSize: 708 GroupDataSize: 4330 ErasedRowCount: 0 | | | > {2, 10} | | | PageId: 39 RowCount: 22 DataSize: 774 GroupDataSize: 4784 ErasedRowCount: 0 | | | > {3, 3} | | | PageId: 42 RowCount: 24 DataSize: 856 GroupDataSize: 5315 ErasedRowCount: 0 | | > {3, 6} | | + BTreeIndex{PageId: 71 RowCount: 30 DataSize: 1070 GroupDataSize: 6739 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 48 RowCount: 26 DataSize: 922 GroupDataSize: 5754 ErasedRowCount: 0 | | | > {3, 8} | | | PageId: 53 RowCount: 28 DataSize: 988 GroupDataSize: 6208 ErasedRowCount: 0 | | | > {4, NULL} | | | PageId: 58 RowCount: 30 DataSize: 1070 GroupDataSize: 6739 ErasedRowCount: 0 | | > {4, 4} | | + BTreeIndex{PageId: 96 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 249b} | | | PageId: 64 RowCount: 32 DataSize: 1136 GroupDataSize: 7178 ErasedRowCount: 0 | | | > {4, 7} | | | PageId: 67 RowCount: 34 DataSize: 1202 GroupDataSize: 7632 ErasedRowCount: 0 | | | > {4, 10} | | | PageId: 70 RowCount: 36 DataSize: 1284 GroupDataSize: 8163 ErasedRowCount: 0 | | | > {5, 3} | | | PageId: 82 RowCount: 38 DataSize: 1350 GroupDataSize: 8602 ErasedRowCount: 0 | | | > {5, 6} | | | PageId: 87 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 66b}, [0, +2)row | ERowOp 1: {0, 1} | ERowOp 1: {0, 3} + Rows{2} Label{24 rev 1, 66b}, [2, +2)row | ERowOp 1: {0, 4} | ERowOp 1: {0, 6} + Rows{4} Label{44 rev 1, 82b}, [4, +2)row | ERowOp 1: {0, 7} | ERowOp 1: {0, 8} + Rows{8} Label{84 rev 1, 66b}, [6, +2)row | ERowOp 1: {0, 10} | ERowOp 1: {1, 1} + Rows{11} Label{114 rev 1, 66b}, [8, +2)row | ERowOp 1: {1, 3} | ERowOp 1: {1, 4} + Rows{14} Label{144 rev 1, 82b}, [10, +2)row | ERowOp 1: {1, 6} | ERowOp 1: {1, 7} + Rows{20} Label{204 rev 1, 66b}, [12, +2)row | ERowOp 1: {1, 8} | ERowOp 1: {1, 10} + Rows{23} Label{234 rev 1, 66b}, [14, +2)row | ERowOp 1: {2, 1} | ERowOp 1: {2, 3} + Rows{26} Label{264 rev 1, 82b}, [16, +2)row | ERowOp 1: {2, 4} | ERowOp 1: {2, 6} + Rows{36} Label{364 rev 1, 66b}, [18, +2)row | ERowOp 1: {2, 7} | ERowOp 1: {2, 8} + Rows{39} Label{394 rev 1, 66b}, [20, +2)row | ERowOp 1: {2, 10} | ERowOp 1: {3, 1} + Rows{42} Label{424 rev 1, 82b}, [22, +2)row | ERowOp 1: {3, 3} | ERowOp 1: {3, 4} + Rows{48} Label{484 rev 1, 66b}, [24, +2)row | ERowOp 1: {3, 6} | ERowOp 1: {3, 7} + Rows{53} Label{534 rev 1, 66b}, [26, +2)row | ERowOp 1: {3, 8} | ERowOp 1: {3, 10} + Rows{58} Label{584 rev 1, 82b}, [28, +2)row | ERowOp 1: {4, 1} | ERowOp 1: {4, 3} + Rows{64} Label{644 rev 1, 66b}, [30, +2)row | ERowOp 1: {4, 4} | ERowOp 1: {4, 6} + Rows{67} Label{674 rev 1, 66b}, [32, +2)row | ERowOp 1: {4, 7} | ERowOp 1: {4, 8} + Rows{70} Label{704 rev 1, 82b}, [34, +2)row | ERowOp 1: {4, 10} | ERowOp 1: {5, 1} + Rows{82} Label{824 rev 1, 66b}, [36, +2)row | ERowOp 1: {5, 3} | ERowOp 1: {5, 4} + Rows{87} Label{874 rev 1, 66b}, [38, +2)row | ERowOp 1: {5, 6} | ERowOp 1: {5, 7} >> KqpImmediateEffects::UpdateAfterInsert >> KqpCost::IndexLookup+useSink [GOOD] >> KqpCost::IndexLookup-useSink [GOOD] >> KqpEffects::InsertAbort_Select_Success ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OltpWriteRow+isSink [GOOD] Test command err: Trying to start YDB, gRPC: 26503, MsgBus: 9214 2025-03-31T16:03:42.324155Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004994651939923:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:42.324724Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b97/r3tmp/tmpXYvsGW/pdisk_1.dat 2025-03-31T16:03:42.840446Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:42.843826Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:42.843931Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:42.845745Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26503, node 1 2025-03-31T16:03:43.079714Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:43.079744Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:43.079754Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:43.079887Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9214 TClient is connected to server localhost:9214 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:43.737610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:03:43.764675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:03:43.898334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:44.083326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:03:44.169013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:03:46.078218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005011831810758:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:46.078410Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:46.458923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:46.499642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:46.549354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:46.598960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:46.687069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:46.776961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:46.863506Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005011831811281:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:46.863628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:46.863952Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005011831811286:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:46.868279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:46.881512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005011831811288:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:03:46.952629Z node 1 :TX_PROXY ERROR: Actor# [1:7488005011831811343:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:03:47.309445Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004994651939923:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:47.309514Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:48.438025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 query_phases { duration_us: 5495 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1705 affected_shards: 1 } compilation { duration_us: 54497 cpu_time_us: 51245 } process_cpu_time_us: 454 total_duration_us: 63840 total_cpu_time_us: 53404 query_phases { duration_us: 3387 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1357 affected_shards: 1 } compilation { duration_us: 54357 cpu_time_us: 47610 } process_cpu_time_us: 394 total_duration_us: 62771 total_cpu_time_us: 49361 2025-03-31T16:03:48.910687Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=5; 2025-03-31T16:03:48.937313Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 5 at tablet 72075186224037919 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-31T16:03:48.937499Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 5 at tablet 72075186224037919 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-31T16:03:48.937787Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488005020421746450:2499], Table: `/Root/TestTable` ([72057594046644480:16:1]), SessionActorId: [1:7488005020421746233:2499]Got CONSTRAINT VIOLATION for table `/Root/TestTable`. ShardID=72075186224037919, Sink=[1:7488005020421746450:2499].{
: Error: Duplicate keys have been found., code: 2012 } 2025-03-31T16:03:48.938377Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488005020421746443:2499], SessionActorId: [1:7488005020421746233:2499], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TestTable`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7488005020421746233:2499]. isRollback=0 2025-03-31T16:03:48.938622Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2M3MzAwYTMtYWZjMDQzZTUtOTQyNzQ3OTctNWFjNGYyMTk=, ActorId: [1:7488005020421746233:2499], ActorState: ExecuteState, TraceId: 01jqpf3qdv11ne3c2e5n2793vn, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7488005020421746444:2499] from: [1:7488005020421746443:2499] 2025-03-31T16:03:48.938711Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7488005020421746444:2499] TxId: 281474976710675. Ctx: { TraceId: 01jqpf3qdv11ne3c2e5n2793vn, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2M3MzAwYTMtYWZjMDQzZTUtOTQyNzQ3OTctNWFjNGYyMTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TestTable`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-31T16:03:48.939758Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2M3MzAwYTMtYWZjMDQzZTUtOTQyNzQ3OTctNWFjNGYyMTk=, ActorId: [1:7488005020421746233:2499], ActorState: ExecuteState, TraceId: 01jqpf3qdv11ne3c2e5n2793vn, Create QueryResponse for error on request, msg: query_phases { duration_us: 38654 cpu_time_us: 2307 affected_shards: 1 } compilation { duration_us: 101664 cpu_time_us: 98957 } process_cpu_time_us: 551 total_duration_us: 143499 total_cpu_time_us: 101815 query_phases { duration_us: 6976 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1527 affected_shards: 1 } compilation { duration_us: 72518 cpu_time_us: 68901 } process_cpu_time_us: 485 total_duration_us: 83385 total_cpu_time_us: 70913 query_phases { duration_us: 3133 cpu_time_us: 1610 affected_shards: 1 } compilation { duration_us: 101112 cpu_time_us: 97966 } process_cpu_time_us: 523 total_duration_us: 108295 total_cpu_time_us: 100099 query_phases { duration_us: 4746 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1653 affected_shards: 1 } compilation { duration_us: 96013 cpu_time_us: 92946 } process_cpu_time_us: 566 total_duration_us: 104400 total_cpu_time_us: 95165 query_phases { duration_us: 3514 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 1411 affected_shards: 1 } compilation { duration_us: 54538 cpu_time_us: 52249 } process_cpu_time_us: 426 total_duration_us: 60598 total_cpu_time_us: 54086 query_phases { duration_us: 7648 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 1400 affected_shards: 1 } compilation { duration_us: 67236 cpu_time_us: 64607 } process_cpu_time_us: 468 total_duration_us: 77850 total_cpu_time_us: 66475 >> KqpCost::ScanQueryRangeFullScan+SourceRead [GOOD] >> KqpEffects::InsertRevert_Literal_Success |74.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |74.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |74.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move >> KqpCost::QuerySeviceRangeFullScan [GOOD] >> KqpInplaceUpdate::SingleRowArithm+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookup+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 20523, MsgBus: 3464 2025-03-31T16:03:42.641535Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004993265563827:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:42.641832Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b91/r3tmp/tmposQ3Ag/pdisk_1.dat 2025-03-31T16:03:43.162882Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:43.168163Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:43.168261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:43.170931Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20523, node 1 2025-03-31T16:03:43.301795Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:43.301824Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:43.301832Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:43.301965Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3464 TClient is connected to server localhost:3464 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:43.919411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:43.933414Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:03:43.946937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:44.090729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:44.343462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:44.434135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:46.857403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005010445434654:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:46.857564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:47.265456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:47.319520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:47.353428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:47.464690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:47.505581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:47.563738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:47.630724Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004993265563827:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:47.630779Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:47.634462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005014740402462:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:47.634539Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:47.634831Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005014740402467:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:47.639990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:47.655725Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005014740402469:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:03:47.720564Z node 1 :TX_PROXY ERROR: Actor# [1:7488005014740402525:3456] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:03:49.059923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 /Root/SecondaryKeys/Index/indexImplTable 1 8 /Root/SecondaryKeys 1 8 >> KqpInplaceUpdate::Negative_BatchUpdate+UseSink >> KqpCost::IndexLookupAndTake+useSink [GOOD] >> KqpWrite::UpsertNullKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookup-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 19043, MsgBus: 31336 2025-03-31T16:03:42.426082Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004995890447214:2110];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:42.426135Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b88/r3tmp/tmppDuRd2/pdisk_1.dat 2025-03-31T16:03:43.039389Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:43.039495Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:43.041361Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19043, node 1 2025-03-31T16:03:43.110217Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:03:43.112008Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:43.116594Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:03:43.251583Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:43.251618Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:43.251627Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:43.251762Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31336 TClient is connected to server localhost:31336 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:44.085673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:44.107237Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:03:44.121111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:44.350353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:44.592825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:44.687938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:46.941922Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005013070318127:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:46.942054Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:47.267469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:47.340855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:47.406428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:47.426172Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004995890447214:2110];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:47.426245Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:47.479121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:47.522785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:47.567952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:47.659641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005017365285951:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:47.659715Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:47.659927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005017365285956:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:47.664249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:47.680079Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005017365285958:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:03:47.763662Z node 1 :TX_PROXY ERROR: Actor# [1:7488005017365286014:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:03:49.043842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 /Root/SecondaryKeys/Index/indexImplTable 1 8 /Root/SecondaryKeys 1 8 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanQueryRangeFullScan+SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 64764, MsgBus: 12712 2025-03-31T16:03:43.686493Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004998043493107:2170];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:43.699568Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b86/r3tmp/tmpfRwXui/pdisk_1.dat 2025-03-31T16:03:44.122537Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:44.127461Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:44.127569Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:44.130321Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64764, node 1 2025-03-31T16:03:44.251654Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:44.251677Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:44.251684Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:44.251805Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12712 TClient is connected to server localhost:12712 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:44.962142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:44.982012Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:44.995516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:03:45.172294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:03:45.366753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:03:45.454738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:47.772967Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005015223363941:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:47.773113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:48.233397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:48.276309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:48.332077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:48.422124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:48.507846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:48.612260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:48.683623Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004998043493107:2170];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:48.691421Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:48.755549Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005019518331765:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:48.755699Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:48.763293Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005019518331771:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:48.768763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:48.786249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005019518331774:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:03:48.880412Z node 1 :TX_PROXY ERROR: Actor# [1:7488005019518331833:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:03:50.248153Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: kqp_proxy, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743437030 AvailableComputeActors: 10000 UsedMemory: 0 TotalMemory: 10737418240 Memory { Pool: 1 Available: 10737418240 } ExecutionUnits: 10000 KqpProxyNodeResources { NodeId: 1 DataCenterNumId: 49 ActiveWorkersCount: 0 DataCenterId: "1" } 2025-03-31T16:03:50.248245Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:03:50.372234Z node 1 :KQP_GATEWAY DEBUG: Load table metadata from cache by path, request Path: /Root/Test 2025-03-31T16:03:50.759415Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpSnapshotManager at [1:7488005028108266780:2492] 2025-03-31T16:03:50.759452Z node 1 :KQP_RESOURCE_MANAGER DEBUG: KqpSnapshotManager: got snapshot request from [1:7488005028108266684:2492] 2025-03-31T16:03:50.766801Z node 1 :KQP_RESOURCE_MANAGER DEBUG: KqpSnapshotManager: snapshot 1743437030811:281474976710671 created 2025-03-31T16:03:50.767125Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7488005028108266793:2492] TxId: 281474976710672. Ctx: { TraceId: 01jqpf3rv7913qrmezq58b4dyd, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDVmZTBkNGYtYTk4MTQzNWYtZjJkMDMxNDktMWQzZGVmYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Executing physical tx, type: 3, stages: 2 2025-03-31T16:03:50.767337Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 1, OutputsCount: 1 2025-03-31T16:03:50.767355Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,1], InputsCount: 1, OutputsCount: 1 2025-03-31T16:03:50.767564Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710672. Resolved key sets: 1 2025-03-31T16:03:50.767740Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 9] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-03-31T16:03:50.767810Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7488005028108266793:2492] TxId: 281474976710672. Ctx: { TraceId: 01jqpf3rv7913qrmezq58b4dyd, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDVmZTBkNGYtYTk4MTQzNWYtZjJkMDMxNDktMWQzZGVmYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Start resolving tablets nodes... (1) 2025-03-31T16:03:50.767928Z node 1 :KQP_EXECUTER DEBUG: [ShardsResolver] TxId: 281474976710672. Shard resolve complete, resolved shards: 1 2025-03-31T16:03:50.767969Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7488005028108266793:2492] TxId: 281474976710672. Ctx: { TraceId: 01jqpf3rv7913qrmezq58b4dyd, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDVmZTBkNGYtYTk4MTQzNWYtZjJkMDMxNDktMWQzZGVmYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolved, success: 1, failed: 0 2025-03-31T16:03:50.768012Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7488005028108266793:2492] TxId: 281474976710672. Ctx: { TraceId: 01jqpf3rv7913qrmezq58b4dyd, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDVmZTBkNGYtYTk4MTQzNWYtZjJkMDMxNDktMWQzZGVmYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards on nodes: node 1: [72075186224037914] 2025-03-31T16:03:50.768029Z node ... SuppliedId: , PoolId: default}. Waiting for: CA [1:7488005028108266797:2502], CA [1:7488005028108266798:2503], 2025-03-31T16:03:50.786310Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7488005028108266793:2492] TxId: 281474976710672. Ctx: { TraceId: 01jqpf3rv7913qrmezq58b4dyd, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDVmZTBkNGYtYTk4MTQzNWYtZjJkMDMxNDktMWQzZGVmYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send TEvStreamData to [1:7488005028108266684:2492], seqNo: 1, nRows: 1 2025-03-31T16:03:50.786359Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7488005028108266797:2502], TxId: 281474976710672, task: 1. Ctx: { TraceId : 01jqpf3rv7913qrmezq58b4dyd. SessionId : ydb://session/3?node_id=1&id=NDVmZTBkNGYtYTk4MTQzNWYtZjJkMDMxNDktMWQzZGVmYmQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646927 2025-03-31T16:03:50.786396Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7488005028108266797:2502], TxId: 281474976710672, task: 1. Ctx: { TraceId : 01jqpf3rv7913qrmezq58b4dyd. SessionId : ydb://session/3?node_id=1&id=NDVmZTBkNGYtYTk4MTQzNWYtZjJkMDMxNDktMWQzZGVmYmQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2025-03-31T16:03:50.786419Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 1. Tasks execution finished 2025-03-31T16:03:50.786444Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7488005028108266797:2502], TxId: 281474976710672, task: 1. Ctx: { TraceId : 01jqpf3rv7913qrmezq58b4dyd. SessionId : ydb://session/3?node_id=1&id=NDVmZTBkNGYtYTk4MTQzNWYtZjJkMDMxNDktMWQzZGVmYmQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Compute state finished. All channels and sinks finished 2025-03-31T16:03:50.786570Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 1. pass away 2025-03-31T16:03:50.786720Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710672;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-03-31T16:03:50.786901Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976710672, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-03-31T16:03:50.787004Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule publish at 2025-03-31T16:03:52.243851Z, after 1.460879s 2025-03-31T16:03:50.789468Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7488005028108266793:2492] TxId: 281474976710672. Ctx: { TraceId: 01jqpf3rv7913qrmezq58b4dyd, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDVmZTBkNGYtYTk4MTQzNWYtZjJkMDMxNDktMWQzZGVmYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7488005028108266797:2502], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 14694 DurationUs: 2000 Tasks { TaskId: 1 CpuTimeUs: 1129 FinishTimeMs: 1743437030786 OutputRows: 1 OutputBytes: 19 Tables { TablePath: "/Root/Test" ReadRows: 1 ReadBytes: 20 AffectedPartitions: 1 } IngressRows: 3 ComputeCpuTimeUs: 147 BuildCpuTimeUs: 982 HostName: "ghrun-tuhoqst4xe" NodeId: 1 StartTimeMs: 1743437030784 CreateTimeMs: 1743437030769 } MaxMemoryUsage: 1048576 } 2025-03-31T16:03:50.789552Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710672. Ctx: { TraceId: 01jqpf3rv7913qrmezq58b4dyd, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDVmZTBkNGYtYTk4MTQzNWYtZjJkMDMxNDktMWQzZGVmYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7488005028108266797:2502] 2025-03-31T16:03:50.789617Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7488005028108266793:2492] TxId: 281474976710672. Ctx: { TraceId: 01jqpf3rv7913qrmezq58b4dyd, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDVmZTBkNGYtYTk4MTQzNWYtZjJkMDMxNDktMWQzZGVmYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [1:7488005028108266798:2503], 2025-03-31T16:03:50.789802Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710672, send ack to channelId: 2, seqNo: 1, enough: 0, freeSpace: 8388488, to: [1:7488005028108266801:2503] 2025-03-31T16:03:50.789854Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7488005028108266798:2503], TxId: 281474976710672, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=NDVmZTBkNGYtYTk4MTQzNWYtZjJkMDMxNDktMWQzZGVmYmQ=. CustomerSuppliedId : . TraceId : 01jqpf3rv7913qrmezq58b4dyd. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2025-03-31T16:03:50.789917Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-03-31T16:03:50.789929Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. Tasks execution finished 2025-03-31T16:03:50.789948Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7488005028108266798:2503], TxId: 281474976710672, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=NDVmZTBkNGYtYTk4MTQzNWYtZjJkMDMxNDktMWQzZGVmYmQ=. CustomerSuppliedId : . TraceId : 01jqpf3rv7913qrmezq58b4dyd. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Compute state finished. All channels and sinks finished 2025-03-31T16:03:50.790015Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. pass away 2025-03-31T16:03:50.790113Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710672;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-03-31T16:03:50.790302Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976710672, taskId: 2. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-03-31T16:03:50.790453Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7488005028108266793:2492] TxId: 281474976710672. Ctx: { TraceId: 01jqpf3rv7913qrmezq58b4dyd, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDVmZTBkNGYtYTk4MTQzNWYtZjJkMDMxNDktMWQzZGVmYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7488005028108266798:2503], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 5205 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 785 FinishTimeMs: 1743437030789 InputRows: 1 InputBytes: 19 OutputRows: 1 OutputBytes: 19 ResultRows: 1 ResultBytes: 19 ComputeCpuTimeUs: 143 BuildCpuTimeUs: 642 HostName: "ghrun-tuhoqst4xe" NodeId: 1 CreateTimeMs: 1743437030781 } MaxMemoryUsage: 1048576 } 2025-03-31T16:03:50.790486Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710672. Ctx: { TraceId: 01jqpf3rv7913qrmezq58b4dyd, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDVmZTBkNGYtYTk4MTQzNWYtZjJkMDMxNDktMWQzZGVmYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7488005028108266798:2503] 2025-03-31T16:03:50.792483Z node 1 :KQP_EXECUTER INFO: ActorId: [1:7488005028108266793:2492] TxId: 281474976710672. Ctx: { TraceId: 01jqpf3rv7913qrmezq58b4dyd, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDVmZTBkNGYtYTk4MTQzNWYtZjJkMDMxNDktMWQzZGVmYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Full stats: { CpuTimeUs: 32143 DurationUs: 23266 Tables { TablePath: "/Root/Test" ReadRows: 1 ReadBytes: 20 AffectedPartitions: 1 } ExecuterCpuTimeUs: 12244 StartTimeMs: 1743437030767 FinishTimeMs: 1743437030790 Stages { StageId: 1 StageGuid: "64bf6da8-6541546-5913d58e-fbc5fc65" Program: "(\n(return (lambda \'($1) (FromFlow (NarrowMap (Take (ToFlow $1) (Uint64 \'1)) (lambda \'($2 $3 $4 $5) (AsStruct \'(\'\"Amount\" $2) \'(\'\"Comment\" $3) \'(\'\"Group\" $4) \'(\'\"Name\" $5)))))))\n)\n" ComputeActors { CpuTimeUs: 5205 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 785 FinishTimeMs: 1743437030789 InputRows: 1 InputBytes: 19 OutputRows: 1 OutputBytes: 19 ResultRows: 1 ResultBytes: 19 ComputeCpuTimeUs: 143 BuildCpuTimeUs: 642 HostName: "ghrun-tuhoqst4xe" NodeId: 1 CreateTimeMs: 1743437030781 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743437030784 } Stages { StageGuid: "5048cae8-bf3397e0-757f80a5-80c966bc" Program: "(\n(return (lambda \'($1) (block \'(\n (let $2 (lambda \'($5) (block \'(\n (let $6 (Member $5 \'\"Amount\"))\n (return $6 (Member $5 \'\"Comment\") (Member $5 \'\"Group\") (Member $5 \'\"Name\") (Coalesce (< $6 (Uint64 \'\"5000\")) (Bool \'false)))\n ))))\n (let $3 (WideFilter (ExpandMap (ToFlow $1) $2) (lambda \'($7 $8 $9 $10 $11) $11) (Uint64 \'1)))\n (let $4 (lambda \'($12 $13 $14 $15 $16) $12 $13 $14 $15))\n (return (FromFlow (WideMap $3 $4)))\n))))\n)\n" BaseTimeMs: 1743437030784 } TxPlansWithStats: "{\"Node Type\":\"Phase\",\"PlanNodeId\":6,\"Plans\":[{\"Node Type\":\"ResultSet\",\"PlanNodeId\":5,\"PlanNodeType\":\"ResultSet\",\"Plans\":[{\"Node Type\":\"Limit\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":3}],\"Limit\":\"1\",\"Name\":\"Limit\"}],\"PlanNodeId\":4,\"Plans\":[{\"Node Type\":\"Merge\",\"PlanNodeId\":3,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Limit-Filter\",\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Limit\":\"1\",\"Name\":\"Limit\"},{\"E-Cost\":\"No estimate\",\"E-Rows\":\"No estimate\",\"E-Size\":\"No estimate\",\"Inputs\":[{\"ExternalPlanNodeId\":1}],\"Name\":\"Filter\",\"Predicate\":\"item.Amount \\u003C 5000\"}],\"PlanNodeId\":2,\"Plans\":[{\"Node Type\":\"TableFullScan\",\"Operators\":[{\"E-Cost\":\"No estimate\",\"E-Rows\":\"No estimate\",\"E-Size\":\"No estimate\",\"Inputs\":[],\"Name\":\"TableFullScan\",\"Path\":\"\\/Root\\/Test\",\"ReadColumns\":[\"Amount\",\"Comment\",\"Group\",\"Name\"],\"ReadRanges\":[\"Group (-∞, +∞)\",\"Name (-∞, +∞)\"],\"ReadRangesPointPrefixLen\":\"0\",\"Scan\":\"Parallel\",\"Table\":\"Test\"}],\"PlanNodeId\":1,\"StageGuid\":\"\",\"Tables\":[\"Test\"]}],\"StageGuid\":\"5048cae8-bf3397e0-757f80a5-80c966bc\",\"Stats\":{\"BaseTimeMs\":1743437030784,\"FinishedTasks\":0,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"SortColumns\":[\"Group (Asc)\"],\"StageGuid\":\"\"}],\"StageGuid\":\"64bf6da8-6541546-5913d58e-fbc5fc65\",\"Stats\":{\"BaseTimeMs\":1743437030784,\"ComputeNodes\":[{\"CpuTimeUs\":5205,\"Tasks\":[{\"ComputeTimeUs\":143,\"FinishTimeMs\":1743437030789,\"Host\":\"ghrun-tuhoqst4xe\",\"InputBytes\":19,\"InputRows\":1,\"NodeId\":1,\"OutputBytes\":19,\"OutputRows\":1,\"ResultBytes\":19,\"ResultRows\":1,\"TaskId\":2}]}],\"FinishedTasks\":0,\"PhysicalStageId\":1,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"\"}" StatConvertBytes: 1676 Extra { type_url: "type.googleapis.com/NKqpProto.TKqpExecutionExtraStats" value: "\010\001\022\014\010\325(\020\346r\030\273\233\001 \002" } } 2025-03-31T16:03:50.792541Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7488005028108266793:2492] TxId: 281474976710672. Ctx: { TraceId: 01jqpf3rv7913qrmezq58b4dyd, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDVmZTBkNGYtYTk4MTQzNWYtZjJkMDMxNDktMWQzZGVmYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-31T16:03:50.792590Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7488005028108266793:2492] TxId: 281474976710672. Ctx: { TraceId: 01jqpf3rv7913qrmezq58b4dyd, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDVmZTBkNGYtYTk4MTQzNWYtZjJkMDMxNDktMWQzZGVmYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.019899s ReadRows: 1 ReadBytes: 20 ru: 13 rate limiter was not found force flag: 1 2025-03-31T16:03:50.793541Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437030811, txId: 281474976710671] shutting down >> KqpWrite::ProjectReplace+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::QuerySeviceRangeFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 20359, MsgBus: 14337 2025-03-31T16:03:44.486012Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005001807238865:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:44.486050Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b65/r3tmp/tmpfYlqUs/pdisk_1.dat 2025-03-31T16:03:45.030916Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:45.033934Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:45.034032Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:45.039057Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20359, node 1 2025-03-31T16:03:45.211681Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:45.211703Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:45.211710Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:45.211843Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14337 TClient is connected to server localhost:14337 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:45.998801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:46.024951Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:03:46.046904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:46.207516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:46.432973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:46.563148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:48.986606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005018987109811:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:48.986737Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:49.336013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:49.391367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:49.440723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:49.487449Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005001807238865:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:49.487500Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:49.509232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:49.555308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:49.640120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:49.707428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005023282077626:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:49.707512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:49.707893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005023282077631:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:49.712257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:49.726571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005023282077633:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:03:49.811723Z node 1 :TX_PROXY ERROR: Actor# [1:7488005023282077689:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpImmediateEffects::ForceImmediateEffectsExecution+UseSink [GOOD] >> KqpImmediateEffects::ForceImmediateEffectsExecution-UseSink >> KqpImmediateEffects::UpsertAfterInsertWithIndex >> TExecutorDb::RandomCoordinatorSimulation [GOOD] >> TExecutorDb::MultiPage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAndTake+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 21963, MsgBus: 11462 2025-03-31T16:03:44.184378Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005003310800226:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:44.184419Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b78/r3tmp/tmpDoyikJ/pdisk_1.dat 2025-03-31T16:03:44.745897Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:44.766746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:44.766855Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:44.768948Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21963, node 1 2025-03-31T16:03:45.027964Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:45.027993Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:45.028015Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:45.028166Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11462 TClient is connected to server localhost:11462 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:45.766326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:45.803987Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:03:45.823549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:46.017760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:46.227975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:46.319595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:48.706678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005020490671179:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:48.706768Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:49.099144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:49.145825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:49.185417Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005003310800226:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:49.185527Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:49.210099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:49.281367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:49.322053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:49.403804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:49.470731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005024785638994:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:49.470879Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:49.473554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005024785639000:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:49.477764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:49.496472Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005024785639002:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:03:49.558748Z node 1 :TX_PROXY ERROR: Actor# [1:7488005024785639055:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:03:50.855734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 /Root/SecondaryKeys/Index/indexImplTable 2 16 /Root/SecondaryKeys 1 8 >> KqpInplaceUpdate::SingleRowStr+UseSink [GOOD] >> KqpInplaceUpdate::SingleRowStr-UseSink >> KqpCost::ScanScriptingRangeFullScan-SourceRead [GOOD] >> BuildStatsHistogram::Five_Five_Crossed [GOOD] >> BuildStatsHistogram::Single_Small_2_Levels >> KqpWrite::CastValues >> QueryStats::Ranges [GOOD] >> BuildStatsHistogram::Single_Small_2_Levels [GOOD] >> BuildStatsHistogram::Single_Small_2_Levels_3_Buckets [GOOD] >> BuildStatsHistogram::Single_Small_1_Level |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest >> BuildStatsHistogram::Single_Small_1_Level [GOOD] >> BuildStatsHistogram::Single_Small_0_Levels [GOOD] >> BuildStatsHistogram::Mixed_Groups_History >> TExecutorDb::MultiPage [GOOD] >> TExecutorDb::EncodedPage >> KqpEffects::InsertAbort_Select_Duplicates+UseSink [GOOD] >> KqpEffects::InsertAbort_Select_Conflict-UseSink >> KqpImmediateEffects::InsertDuplicates+UseSink [GOOD] >> KqpImmediateEffects::InsertDuplicates-UseSink >> KqpWrite::Insert [GOOD] >> KqpWrite::InsertRevert |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest >> QueryStats::Ranges [GOOD] >> TExecutorDb::EncodedPage [GOOD] >> TFlatCxxDatabaseTest::BasicSchemaTest |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest >> KqpLimits::DataShardReplySizeExceeded [GOOD] >> TFlatCxxDatabaseTest::BasicSchemaTest [GOOD] >> TFlatCxxDatabaseTest::RenameColumnSchemaTest [GOOD] >> TFlatCxxDatabaseTest::SchemaFillerTest [GOOD] >> TFlatDatabaseDecimal::UpdateRead [GOOD] >> TFlatEraseCacheTest::BasicUsage [GOOD] >> TFlatEraseCacheTest::BasicUsageReverse [GOOD] >> TFlatEraseCacheTest::CacheEviction >> TFlatEraseCacheTest::CacheEviction [GOOD] >> TFlatEraseCacheTest::StressGarbageCollection [GOOD] >> TFlatEraseCacheTest::StressGarbageCollectionWithStrings >> KqpPg::InsertFromSelect_Serial-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_ColumnOrder+useSink >> TFlatEraseCacheTest::StressGarbageCollectionWithStrings [GOOD] >> TFlatExecutorLeases::Basics ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanScriptingRangeFullScan-SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 25024, MsgBus: 10784 2025-03-31T16:03:46.180498Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005009749869061:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:46.180969Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b52/r3tmp/tmphvL92n/pdisk_1.dat 2025-03-31T16:03:47.037802Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:47.048516Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:47.048595Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:47.052844Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25024, node 1 2025-03-31T16:03:47.361898Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:47.361917Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:47.361943Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:47.362043Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10784 TClient is connected to server localhost:10784 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:48.333433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:48.352579Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:03:48.361080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:48.688385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:48.971336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:49.126333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:51.229054Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005009749869061:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:51.229109Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:51.602346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005031224707199:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:51.607184Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:52.094843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:52.168090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:52.238077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:52.276546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:52.372305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:52.460398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:52.531246Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005035519675024:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:52.531342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:52.532848Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005035519675029:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:52.537165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:52.562896Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-31T16:03:52.563223Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005035519675031:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:03:52.631533Z node 1 :TX_PROXY ERROR: Actor# [1:7488005035519675086:3463] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:03:54.394243Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437034381, txId: 281474976710671] shutting down >> KqpImmediateEffects::Upsert [GOOD] >> KqpImmediateEffects::UpsertAfterInsert >> BuildStatsHistogram::Mixed_Groups_History [GOOD] >> BuildStatsHistogram::Serial_Groups_History |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest >> KqpImmediateEffects::ConflictingKeyW1WR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyW1WRR2 >> DataShardVolatile::VolatileTxAbortedOnDrop [GOOD] >> DataShardVolatile::UpsertNoLocksArbiter+UseSink >> BuildStatsHistogram::Serial_Groups_History [GOOD] >> BuildStatsHistogram::Benchmark |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest >> TFlatExecutorLeases::Basics [GOOD] >> TFlatExecutorLeases::BasicsLeaseTimeout >> KqpEffects::InsertRevert_Literal_Success [GOOD] >> KqpEffects::UpdateOn_Literal >> BuildStatsHistogram::Benchmark [GOOD] >> BuildStatsHistogram::Many_Mixed >> KqpImmediateEffects::Delete [GOOD] >> KqpImmediateEffects::DeleteAfterInsert >> KqpEffects::InsertAbort_Select_Success [GOOD] >> KqpEffects::InsertAbort_Select_Duplicates-UseSink >> KqpImmediateEffects::UpdateAfterInsert [GOOD] >> KqpImmediateEffects::UnobservedUncommittedChangeConflict >> TFlatExecutorLeases::BasicsLeaseTimeout [GOOD] >> TFlatExecutorLeases::BasicsInitialLease ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::DataShardReplySizeExceeded [GOOD] Test command err: Trying to start YDB, gRPC: 21152, MsgBus: 5781 2025-03-31T16:02:33.167317Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004699586181408:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:33.167367Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000acd/r3tmp/tmplG0xaT/pdisk_1.dat 2025-03-31T16:02:33.677103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:33.677238Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:33.681693Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:02:33.712243Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21152, node 1 2025-03-31T16:02:33.788914Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:02:33.788947Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:02:33.788954Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:02:33.789093Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5781 TClient is connected to server localhost:5781 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:02:34.383038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:34.421812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:36.788681Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004712471084324:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:36.789087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:36.789602Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004712471084336:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:36.794049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:02:36.806534Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004712471084338:2359], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:02:36.869785Z node 1 :TX_PROXY ERROR: Actor# [1:7488004712471084389:2606] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:02:37.445584Z node 1 :KQP_COMPUTE WARN: fline=kqp_compute_actor_factory.cpp:40;problem=cannot_allocate_memory;tx_id=281474976710661;task_id=3;memory=1048576; 2025-03-31T16:02:37.445628Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710661, task: 3. [Mem] memory 1048576 NOT granted 2025-03-31T16:02:37.458460Z node 1 :KQP_COMPUTE WARN: fline=kqp_compute_actor_factory.cpp:40;problem=cannot_allocate_memory;tx_id=281474976710661;task_id=2;memory=1048576; 2025-03-31T16:02:37.458485Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710661, task: 2. [Mem] memory 1048576 NOT granted 2025-03-31T16:02:37.475301Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488004716766051746:2370], TxId: 281474976710661, task: 3. Ctx: { SessionId : ydb://session/3?node_id=1&id=YzljOTViMGUtZDdlNjVjMjgtMmFhOGRkNzEtN2UzNGFlODk=. TraceId : 01jqpf1f24a5ha2s64j3cyp57x. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: OVERLOADED KIKIMR_PRECONDITION_FAILED: {
: Error: Mkql memory limit exceeded, allocated by task 3: 10, host: ghrun-tuhoqst4xe, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 50B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 5, started at: 2025-03-31T16:02:37.417461Z }, code: 2029 }. 2025-03-31T16:02:37.478368Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488004716766051745:2369], TxId: 281474976710661, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YzljOTViMGUtZDdlNjVjMjgtMmFhOGRkNzEtN2UzNGFlODk=. TraceId : 01jqpf1f24a5ha2s64j3cyp57x. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: OVERLOADED KIKIMR_PRECONDITION_FAILED: {
: Error: Mkql memory limit exceeded, allocated by task 2: 10, host: ghrun-tuhoqst4xe, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 50B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 5, started at: 2025-03-31T16:02:37.417461Z }, code: 2029 }. 2025-03-31T16:02:37.478848Z node 1 :KQP_COMPUTE WARN: fline=kqp_compute_actor_factory.cpp:40;problem=cannot_allocate_memory;tx_id=281474976710661;task_id=4;memory=1048576; 2025-03-31T16:02:37.478885Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710661, task: 4. [Mem] memory 1048576 NOT granted 2025-03-31T16:02:37.479157Z node 1 :KQP_COMPUTE WARN: fline=kqp_compute_actor_factory.cpp:40;problem=cannot_allocate_memory;tx_id=281474976710661;task_id=5;memory=1048576; 2025-03-31T16:02:37.479190Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710661, task: 5. [Mem] memory 1048576 NOT granted 2025-03-31T16:02:37.479290Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488004716766051747:2371], TxId: 281474976710661, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=YzljOTViMGUtZDdlNjVjMjgtMmFhOGRkNzEtN2UzNGFlODk=. CustomerSuppliedId : . TraceId : 01jqpf1f24a5ha2s64j3cyp57x. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: OVERLOADED KIKIMR_PRECONDITION_FAILED: {
: Error: Mkql memory limit exceeded, allocated by task 4: 10, host: ghrun-tuhoqst4xe, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 30B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 3, started at: 2025-03-31T16:02:37.417461Z }, code: 2029 }. 2025-03-31T16:02:37.479607Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488004716766051748:2372], TxId: 281474976710661, task: 5. Ctx: { SessionId : ydb://session/3?node_id=1&id=YzljOTViMGUtZDdlNjVjMjgtMmFhOGRkNzEtN2UzNGFlODk=. CustomerSuppliedId : . TraceId : 01jqpf1f24a5ha2s64j3cyp57x. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: OVERLOADED KIKIMR_PRECONDITION_FAILED: {
: Error: Mkql memory limit exceeded, allocated by task 5: 10, host: ghrun-tuhoqst4xe, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 20B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 2, started at: 2025-03-31T16:02:37.417461Z }, code: 2029 }. 2025-03-31T16:02:37.484118Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488004716766051744:2368], TxId: 281474976710661, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YzljOTViMGUtZDdlNjVjMjgtMmFhOGRkNzEtN2UzNGFlODk=. TraceId : 01jqpf1f24a5ha2s64j3cyp57x. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7488004716766051718:2354], status: OVERLOADED, reason: {
: Error: Terminate execution } 2025-03-31T16:02:37.491939Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzljOTViMGUtZDdlNjVjMjgtMmFhOGRkNzEtN2UzNGFlODk=, ActorId: [1:7488004712471084318:2354], ActorState: ExecuteState, TraceId: 01jqpf1f24a5ha2s64j3cyp57x, Create QueryResponse for error on request, msg:
: Error: Mkql memory limit exceeded, allocated by task 3: 10, host: ghrun-tuhoqst4xe, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 50B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 5, started at: 2025-03-31T16:02:37.417461Z } , code: 2029 query_phases { duration_us: 86303 table_access { name: "/Root/LargeTable" partitions_count: 2 } cpu_time_us: 143862 affected_shards: 8 } compilation { duration_us: 526724 cpu_time_us: 521941 } process_cpu_time_us: 589 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"LargeTable\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"No estimate\",\"ReadRanges\":[\"Key (-\342\210\236, +\342\210\236)\",\"KeyText (-\342\210\236, +\342\210\236)\"],\"Name\":\"TableFullScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/LargeTable\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"No estimate\",\"Table\":\"LargeTable\",\"ReadColumns\":[\"Data\",\"DataText\",\"Key\",\"KeyText\"],\"E-Cost\":\"No estimate ... t have access permissions } 2025-03-31T16:02:47.180790Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488004756690436163:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:47.183977Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:02:47.206435Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488004756690436165:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:02:47.296008Z node 3 :TX_PROXY ERROR: Actor# [3:7488004756690436221:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:02:47.892856Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488004735215597400:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:47.892922Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:02:58.088361Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:02:58.088404Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:42.413951Z node 3 :KQP_EXECUTER WARN: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqpf1wdv1jh8xbeez8em31zz, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=N2I3YWU2ZjMtNzhmMTA0ZmEtZDBiYTYwZDUtZjQzNWViOGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, memory limit exceeded. 2025-03-31T16:03:42.415299Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=N2I3YWU2ZjMtNzhmMTA0ZmEtZDBiYTYwZDUtZjQzNWViOGI=, ActorId: [3:7488004760985403812:2497], ActorState: ExecuteState, TraceId: 01jqpf1wdv1jh8xbeez8em31zz, Create QueryResponse for error on request, msg: 2025-03-31T16:03:42.415500Z node 3 :KQP_SLOW_LOG WARN: TraceId: "01jqpf1wdv1jh8xbeez8em31zz", SessionId: ydb://session/3?node_id=3&id=N2I3YWU2ZjMtNzhmMTA0ZmEtZDBiYTYwZDUtZjQzNWViOGI=, Slow query, duration: 54.035466s, status: PRECONDITION_FAILED, user: UNAUTHENTICATED, results: 0b, text: "\n SELECT ToDict(\n ListMap(\n ListFromRange(0ul, 5000000ul),\n ($x) -> { RETURN AsTuple($x, $x + 1); }\n )\n );\n ", parameters: 0b
: Warning: Type annotation, code: 1030
:2:13: Warning: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At function: SqlProjectItem
:2:20: Warning: At function: ToDict
:5:38: Warning: At function: OrderedMap
:5:53: Warning: At function: +
:5:53: Warning: Integral type implicit bitcast: Uint64 and Int32, code: 1107
: Error: Memory limit exceeded, code: 2029 Trying to start YDB, gRPC: 26604, MsgBus: 22457 2025-03-31T16:03:43.683437Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488004999700840233:2207];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:43.749447Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000acd/r3tmp/tmpIWQyl1/pdisk_1.dat 2025-03-31T16:03:43.927926Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:43.933194Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:43.933300Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:43.936932Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26604, node 4 2025-03-31T16:03:44.089046Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:44.089077Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:44.089086Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:44.089222Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22457 TClient is connected to server localhost:22457 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:44.768901Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:44.790030Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:44.896036Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:45.127975Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:45.230588Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:48.368771Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488005021175678340:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:48.368872Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:48.523952Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:48.583163Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:48.645015Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:48.682642Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7488004999700840233:2207];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:48.682823Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:48.716032Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:48.798068Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:48.893715Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:49.008265Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488005025470646159:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:49.008372Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:49.008826Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488005025470646164:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:49.014844Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:49.051420Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488005025470646166:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:03:49.153124Z node 4 :TX_PROXY ERROR: Actor# [4:7488005025470646225:3467] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:03:50.733095Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:03:55.914255Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YjBlNmRkNy0yZTlhODE2Ni04ZDE2Y2YzNS03MWEwZjZhZg==, ActorId: [4:7488005029765613836:2500], ActorState: ExecuteState, TraceId: 01jqpf3xt3byen463khffydf24, Create QueryResponse for error on request, msg: >> KqpInplaceUpdate::SingleRowArithm+UseSink [GOOD] >> KqpInplaceUpdate::SingleRowArithm-UseSink >> KqpWrite::ProjectReplace+UseSink [GOOD] >> KqpWrite::ProjectReplace-UseSink >> KqpWrite::UpsertNullKey [GOOD] >> KqpImmediateEffects::ForceImmediateEffectsExecution-UseSink [GOOD] >> KqpInplaceUpdate::SingleRowStr-UseSink [GOOD] >> KqpInplaceUpdate::Negative_BatchUpdate+UseSink [GOOD] >> KqpInplaceUpdate::Negative_BatchUpdate-UseSink >> TFlatExecutorLeases::BasicsInitialLease [GOOD] >> TFlatExecutorLeases::BasicsInitialLeaseTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::UpsertNullKey [GOOD] Test command err: Trying to start YDB, gRPC: 29542, MsgBus: 26088 2025-03-31T16:03:53.482007Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005043123442288:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:53.482492Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011cf/r3tmp/tmpuL5Rbt/pdisk_1.dat 2025-03-31T16:03:54.122727Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:54.122835Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:54.129917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:03:54.169786Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29542, node 1 2025-03-31T16:03:54.605464Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:54.605485Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:54.605492Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:54.605624Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26088 TClient is connected to server localhost:26088 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:55.678549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:55.734714Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:03:55.754518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:56.078351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:56.339482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:56.446447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:58.441974Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005043123442288:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:58.500438Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:58.664360Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005064598280399:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:58.664497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:59.349113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:59.418872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:59.473290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:59.513688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:59.585361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:59.663761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:59.746156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005068893248221:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:59.746232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:59.746481Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005068893248226:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:59.750545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:59.769640Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005068893248228:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:03:59.856069Z node 1 :TX_PROXY ERROR: Actor# [1:7488005068893248285:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpWrite::CastValues [GOOD] >> KqpWrite::CastValuesOptional ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ForceImmediateEffectsExecution-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 19491, MsgBus: 2455 2025-03-31T16:03:46.343664Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005010416453957:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:46.343711Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011e3/r3tmp/tmpUQ8zPj/pdisk_1.dat 2025-03-31T16:03:47.093301Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:47.093416Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:47.096380Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:03:47.099979Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19491, node 1 2025-03-31T16:03:47.268163Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:47.268189Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:47.268208Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:47.268343Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2455 TClient is connected to server localhost:2455 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:47.990647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:48.012405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:48.274060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:48.532284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:48.631282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:50.464635Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005027596324843:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:50.464767Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:50.797798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:50.847157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:50.896121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:50.953660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:51.046037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:51.150168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:51.251709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005031891292662:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:51.251828Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:51.252285Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005031891292667:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:51.257034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:51.286476Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005031891292669:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:03:51.348183Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005010416453957:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:51.348283Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:51.388058Z node 1 :TX_PROXY ERROR: Actor# [1:7488005031891292725:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:03:52.656175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 21093, MsgBus: 62752 2025-03-31T16:03:54.194958Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488005047747973906:2210];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:54.211428Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011e3/r3tmp/tmpAdJYAU/pdisk_1.dat 2025-03-31T16:03:54.583565Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:54.614548Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:54.614869Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:54.616192Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21093, node 2 2025-03-31T16:03:54.763239Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:54.763274Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:54.763281Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:54.763382Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62752 TClient is connected to server localhost:62752 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:55.376701Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:55.388306Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:03:55.401997Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:55.488893Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:55.670569Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:55.757222Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:58.885461Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005064927844697:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:58.885556Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:58.936208Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:59.028601Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:59.118271Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:59.196698Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:59.200230Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488005047747973906:2210];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:59.200372Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:59.291446Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:59.398330Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:59.473404Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005069222812517:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:59.473495Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:59.474403Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005069222812522:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:59.478193Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:59.492372Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488005069222812524:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:03:59.574530Z node 2 :TX_PROXY ERROR: Actor# [2:7488005069222812579:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:00.805330Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowStr-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 6822, MsgBus: 28149 2025-03-31T16:03:46.241973Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005009983111642:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:46.243137Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011e9/r3tmp/tmp4nyoAD/pdisk_1.dat 2025-03-31T16:03:47.091821Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:47.096489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:47.096599Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:47.102708Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6822, node 1 2025-03-31T16:03:47.257220Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:47.257245Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:47.257253Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:47.257424Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28149 TClient is connected to server localhost:28149 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:47.999860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:48.047710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:48.266670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:48.555059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:48.676666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:51.227731Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005009983111642:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:51.227788Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:51.230284Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005031457949893:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:51.230413Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:51.976488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:52.048068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:52.135987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:52.196150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:52.243326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:52.371577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:52.451242Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005035752917719:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:52.451316Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:52.451538Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005035752917724:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:52.457404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:52.474798Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005035752917726:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:03:52.533701Z node 1 :TX_PROXY ERROR: Actor# [1:7488005035752917781:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:03:53.592787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 15874, MsgBus: 22302 2025-03-31T16:03:55.668082Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488005048380166762:2205];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:55.668857Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011e9/r3tmp/tmpR2ZmtE/pdisk_1.dat 2025-03-31T16:03:55.792528Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:55.805042Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:55.805144Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:55.806744Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15874, node 2 2025-03-31T16:03:55.972293Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:55.972317Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:55.972324Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:55.972892Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22302 TClient is connected to server localhost:22302 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:56.475351Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:56.488307Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:56.600085Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:56.859356Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:56.965524Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:59.673556Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005065560037553:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:59.673664Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:59.728153Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:59.772978Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:59.813701Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:59.853495Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:59.888627Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:59.930952Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:00.036968Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005069855005368:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:00.037048Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:00.037420Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005069855005373:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:00.041644Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:00.053511Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488005069855005375:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:04:00.109583Z node 2 :TX_PROXY ERROR: Actor# [2:7488005069855005430:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:00.623201Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488005048380166762:2205];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:00.623287Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:01.364121Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> KqpWrite::InsertRevert [GOOD] >> TFlatExecutorLeases::BasicsInitialLeaseTimeout [GOOD] >> TFlatTableDatetime::TestDate [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionPriorityByTime >> KqpImmediateEffects::InsertDuplicates-UseSink [GOOD] >> KqpEffects::InsertAbort_Select_Conflict-UseSink [GOOD] >> KqpImmediateEffects::UpsertAfterInsert [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionPriorityByTime [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_Default >> KqpPg::InsertNoTargetColumns_ColumnOrder+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_ColumnOrder-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::InsertRevert [GOOD] Test command err: Trying to start YDB, gRPC: 18115, MsgBus: 15485 2025-03-31T16:03:46.845325Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005012885253082:2082];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:47.095640Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011de/r3tmp/tmpbpjtwA/pdisk_1.dat 2025-03-31T16:03:47.584624Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:47.595022Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:47.599345Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:47.603113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18115, node 1 2025-03-31T16:03:47.800817Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:47.807436Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:47.807461Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:47.807626Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15485 TClient is connected to server localhost:15485 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:48.836670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:48.891595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:49.133467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:49.392178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:49.510973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:51.853892Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005012885253082:2082];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:51.853957Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:52.204143Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005038655058611:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:52.204260Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:52.653513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:52.739698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:52.797170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:52.879294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:52.917135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:52.996578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:53.085581Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005042950026432:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:53.085650Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:53.086048Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005042950026437:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:53.091042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:53.112139Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-31T16:03:53.114047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005042950026439:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:03:53.179600Z node 1 :TX_PROXY ERROR: Actor# [1:7488005042950026497:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:03:54.657917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:03:55.028813Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=281474976710672; 2025-03-31T16:03:55.040886Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488005051539961549:2503], Table: `/Root/KeyValue` ([72057594046644480:6:1]), SessionActorId: [1:7488005047244994101:2503]Got CONSTRAINT VIOLATION for table `/Root/KeyValue`. ShardID=72075186224037911, Sink=[1:7488005051539961549:2503].{
: Error: Duplicate keys have been found., code: 2012 } 2025-03-31T16:03:55.041462Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488005051539961539:2503], SessionActorId: [1:7488005047244994101:2503], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/KeyValue`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7488005047244994101:2503]. isRollback=0 2025-03-31T16:03:55.041689Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODc3YmUwYTQtYjU5ZGE3MDgtMmE2ZjNlOGItNWJmMjM1NmQ=, ActorId: [1:7488005047244994101:2503], ActorState: ExecuteState, TraceId: 01jqpf3x966dz133ymbjq1nmbz, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7488005051539961540:2503] from: [1:7488005051539961539:2503] 2025-03-31T16:03:55.041789Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7488005051539961540:2503] TxId: 281474976710672. Ctx: { TraceId: 01jqpf3x966dz133ymbjq1nmbz, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODc3YmUwYTQtYjU5ZGE3MDgtMmE2ZjNlOGItNWJmMjM1NmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/KeyValue`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-31T16:03:55.043100Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODc3YmUwYTQtYjU5ZGE3MDgtMmE2ZjNlOGItNWJmMjM1NmQ=, ActorId: [1:7488005047244994101:2503], ActorState: ExecuteState, TraceId: 01jqpf3x966dz133ymbjq1nmbz, Create QueryResponse for error on request, msg:
: Error: Constraint violated. Table: `/Root/KeyValue`., code: 2012
: Error: Duplicate keys have been found., code: 2012 2025-03-31T16:03:55.049618Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976710672; 2025-03-31T16:03:55.049796Z node 1 :TX_DATASHARD ERROR: Complete volatile write [1743437035074 : 281474976710672] from 72075186224037919 at tablet 72075186224037919, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } 2025-03-31T16:03:55.196698Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=281474976710673; 2025-03-31T16:03:55.204936Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488005051539961597:2503], Table: `/Root/KeyValue` ([72057594046644480:6:1]), SessionActorId: [1:7488005047244994101:2503]Got CONSTRAINT VIOLATION for table `/Root/KeyValue`. ShardID=72075186224037911, Sink=[1:7488005051539961597:2503].{
: Error: Duplicate keys have been found., code: 2012 } 2025-03-31T16:03:55.205075Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488005051539961589:2503], SessionActorId: [1:7488005047244994101:2503], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/KeyValue`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7488005047244994101:2503]. isRollback=0 2025-03-31T16:03:55.205385Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODc3YmUwYTQtYjU5ZGE3MDgtMmE2ZjNlOGItNWJmMjM1NmQ=, ActorId: [1:7488005047244994101:2503], ActorState: ExecuteState, TraceId: 01jqpf3xhj2cr582973tkp7hy4, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7488005051539961590:2503] from: [1:7488005051539961589:2503] 2025-03-31T16:03:55.205462Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7488005051539961590:2503] TxId: 281474976710673. Ctx: { TraceId: 01jqpf3xhj2cr582973tkp7hy4, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODc3YmUwYTQtYjU5ZGE3MDgtMmE2ZjNlOGItNWJmMjM1NmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/KeyValue`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-31T16:03:55.206614Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODc3YmUwYTQtYjU5ZGE3MDgtMmE2ZjNlOGItNWJmMjM1NmQ=, ActorId: [1:7488005047244994101:2503], ActorState: ExecuteState, TraceId: 01jqpf3xhj2cr582973tkp7hy4, Create QueryResponse for error on request, msg: 2025-03-31T16:03:55.212586Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976710673; 2025-03-31T16:03:55.212774Z node 1 :TX_DATASHARD ERROR: Complete volatile write [1743437035242 : 281474976710673] from 72075186224037919 at tablet 72075186224037919, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 }
: Error: Constraint violated. Table: `/Root/KeyValue`., code: 2012
: Error: Duplicate keys have been found., code: 2012 Trying to start YDB, gRPC: 24821, MsgBus: 21829 2025-03-31T16:03:56.620963Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488005052819749479:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:56.621023Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011de/r3tmp/tmpGQ2aht/pdisk_1.dat 2025-03-31T16:03:56.875987Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:56.889061Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:56.889153Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:56.899922Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24821, node 2 2025-03-31T16:03:57.027679Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:57.027703Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:57.027711Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:57.027831Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21829 TClient is connected to server localhost:21829 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:57.641892Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:57.649957Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:03:57.656860Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:57.743363Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:57.966037Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:58.067445Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:00.524550Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005069999620449:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:00.524663Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:00.590559Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:00.659739Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:00.743658Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:00.791129Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:00.848745Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:00.897611Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:00.969615Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005069999620964:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:00.969723Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:00.969871Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005069999620969:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:00.973001Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:00.983226Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488005069999620971:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:01.077219Z node 2 :TX_PROXY ERROR: Actor# [2:7488005074294588323:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:01.621448Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488005052819749479:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:01.621521Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:02.104736Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> KqpImmediateEffects::ConflictingKeyW1WRR2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpsertAfterInsert [GOOD] Test command err: Trying to start YDB, gRPC: 22142, MsgBus: 64633 2025-03-31T16:03:48.008723Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005017375065498:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:48.008782Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011db/r3tmp/tmpJ95gqt/pdisk_1.dat 2025-03-31T16:03:48.828112Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22142, node 1 2025-03-31T16:03:48.889916Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:48.890019Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:48.891434Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:03:49.095603Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:49.095626Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:49.095638Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:49.095735Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64633 TClient is connected to server localhost:64633 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:50.125739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:50.157314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:50.382744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:50.693864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:03:50.821902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:03:52.993906Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005038849903766:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:52.994032Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:53.009229Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005017375065498:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:53.009291Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:53.322198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:53.401480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:53.443939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:53.484498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:53.531814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:53.603645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:53.678177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005043144871578:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:53.678257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:53.678411Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005043144871583:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:53.683012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:53.698438Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005043144871585:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:03:53.797435Z node 1 :TX_PROXY ERROR: Actor# [1:7488005043144871641:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:03:55.446333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 3819, MsgBus: 9077 2025-03-31T16:03:57.635574Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488005059926960934:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:57.635624Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011db/r3tmp/tmp3CpfO2/pdisk_1.dat 2025-03-31T16:03:57.874709Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:57.890294Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:57.890369Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:57.892968Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3819, node 2 2025-03-31T16:03:58.063593Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:58.063619Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:58.063629Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:58.063734Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9077 TClient is connected to server localhost:9077 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:59.021181Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:59.027992Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:03:59.044817Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:59.179734Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:59.478680Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:59.570114Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:02.148064Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005081401799176:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:02.148171Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:02.236991Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:02.304602Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:02.346063Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:02.399276Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:02.456866Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:02.501584Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:02.608925Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005081401799694:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:02.609043Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:02.609582Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005081401799699:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:02.613658Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:02.625578Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488005081401799701:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:02.635673Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488005059926960934:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:02.635774Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:02.681738Z node 2 :TX_PROXY ERROR: Actor# [2:7488005081401799755:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:03.680442Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Select_Conflict-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 5345, MsgBus: 14758 2025-03-31T16:03:46.742592Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005012592805678:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:46.744942Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011e1/r3tmp/tmpe7RQrc/pdisk_1.dat 2025-03-31T16:03:47.667688Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:47.667789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:47.668052Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:47.672475Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5345, node 1 2025-03-31T16:03:47.888656Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:47.888675Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:47.888683Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:47.888792Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14758 TClient is connected to server localhost:14758 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:48.833747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:48.859075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:49.080062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:49.303820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:49.414382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:51.817342Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005012592805678:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:51.817660Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:52.067401Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005038362611090:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:52.067519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:52.501106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:52.546578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:52.586528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:52.638175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:52.709875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:52.785279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:52.908358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005038362611613:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:52.908511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:52.908901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005038362611618:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:52.913169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:52.950094Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005038362611620:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:03:53.026302Z node 1 :TX_PROXY ERROR: Actor# [1:7488005042657578971:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:03:54.456887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:03:55.130257Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=281474976710673; 2025-03-31T16:03:55.164231Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488005051247514085:2502], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [1:7488005046952546575:2502]Got CONSTRAINT VIOLATION for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[1:7488005051247514085:2502].{
: Error: Duplicate keys have been found., code: 2012 } 2025-03-31T16:03:55.164757Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488005051247514074:2502], SessionActorId: [1:7488005046952546575:2502], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7488005046952546575:2502]. isRollback=0 2025-03-31T16:03:55.165012Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzdjYTkwNzctN2QyZWRlOGUtNzZiOTc1MDEtYzczMDRjY2I=, ActorId: [1:7488005046952546575:2502], ActorState: ExecuteState, TraceId: 01jqpf3x7b1yp8mkckb18acm5w, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7488005051247514075:2502] from: [1:7488005051247514074:2502] 2025-03-31T16:03:55.165107Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7488005051247514075:2502] TxId: 281474976710673. Ctx: { TraceId: 01jqpf3x7b1yp8mkckb18acm5w, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzdjYTkwNzctN2QyZWRlOGUtNzZiOTc1MDEtYzczMDRjY2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-31T16:03:55.165324Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzdjYTkwNzctN2QyZWRlOGUtNzZiOTc1MDEtYzczMDRjY2I=, ActorId: [1:7488005046952546575:2502], ActorState: ExecuteState, TraceId: 01jqpf3x7b1yp8mkckb18acm5w, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 16579, MsgBus: 7186 2025-03-31T16:03:56.589208Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488005053796109995:2195];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:56.589254Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011e1/r3tmp/tmpoQR9Ho/pdisk_1.dat 2025-03-31T16:03:56.834464Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:56.860739Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:56.860824Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:56.864811Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16579, node 2 2025-03-31T16:03:57.043629Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:57.043652Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:57.043661Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:57.043779Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7186 TClient is connected to server localhost:7186 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:57.845845Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:57.857374Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:03:57.868347Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:57.981218Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:58.296975Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:58.447888Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:01.317980Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005075270948102:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:01.318074Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:01.383466Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:01.427268Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:01.470607Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:01.514449Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:01.561777Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:01.590296Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488005053796109995:2195];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:01.590361Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:01.644424Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:01.711871Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005075270948621:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:01.711957Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:01.712197Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005075270948626:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:01.718160Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:01.738534Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-03-31T16:04:01.738932Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488005075270948628:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:04:01.837132Z node 2 :TX_PROXY ERROR: Actor# [2:7488005075270948685:3452] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:03.095101Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:04:03.706673Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488005083860883778:2521], TxId: 281474976715675, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZmFjZTFmMS1mOTY2NTk2NC05ZWU0ODE0MC04NTU1MGQ3Ng==. CustomerSuppliedId : . TraceId : 01jqpf45hnbrevp9x6vyr9kktj. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-03-31T16:04:03.707922Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488005083860883779:2522], TxId: 281474976715675, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jqpf45hnbrevp9x6vyr9kktj. SessionId : ydb://session/3?node_id=2&id=ZmFjZTFmMS1mOTY2NTk2NC05ZWU0ODE0MC04NTU1MGQ3Ng==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7488005083860883775:2491], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-31T16:04:03.708400Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmFjZTFmMS1mOTY2NTk2NC05ZWU0ODE0MC04NTU1MGQ3Ng==, ActorId: [2:7488005083860883540:2491], ActorState: ExecuteState, TraceId: 01jqpf45hnbrevp9x6vyr9kktj, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::InsertDuplicates-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 1797, MsgBus: 3724 2025-03-31T16:03:47.785599Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005016472919018:2271];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:47.785653Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011dc/r3tmp/tmpdTcWsd/pdisk_1.dat 2025-03-31T16:03:48.516078Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:48.521668Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:48.521765Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:48.525051Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1797, node 1 2025-03-31T16:03:48.799552Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:48.799578Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:48.799585Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:48.799681Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3724 TClient is connected to server localhost:3724 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:49.722570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:49.751939Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:03:49.776957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:49.966434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:50.199430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:50.319703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:52.715896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005037947757052:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:52.716006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:52.791189Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005016472919018:2271];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:52.791298Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:53.154439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:53.215429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:53.256801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:53.292858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:53.337530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:53.393852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:53.491165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005042242724866:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:53.491260Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:53.491676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005042242724871:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:53.496056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:53.517089Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005042242724873:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:03:53.591685Z node 1 :TX_PROXY ERROR: Actor# [1:7488005042242724929:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:03:54.916414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:03:55.545842Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=4; 2025-03-31T16:03:55.575066Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 4 at tablet 72075186224037919 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-31T16:03:55.575241Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 4 at tablet 72075186224037919 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-31T16:03:55.575467Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488005050832660072:2502], Table: `/Root/TestImmediateEffects` ([72057594046644480:16:1]), SessionActorId: [1:7488005046537692548:2502]Got CONSTRAINT VIOLATION for table `/Root/TestImmediateEffects`. ShardID=72075186224037919, Sink=[1:7488005050832660072:2502].{
: Error: Duplicate keys have been found., code: 2012 } 2025-03-31T16:03:55.575992Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488005050832660033:2502], SessionActorId: [1:7488005046537692548:2502], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TestImmediateEffects`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7488005046537692548:2502]. isRollback=0 2025-03-31T16:03:55.576265Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmFlNGIzNTktMzQ5Yjg0MzgtZDYzMzViNDAtODgxZDA1MGQ=, ActorId: [1:7488005046537692548:2502], ActorState: ExecuteState, TraceId: 01jqpf3xjh4kaq9vq5y8x2v02j, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7488005050832660047:2502] from: [1:7488005050832660033:2502] 2025-03-31T16:03:55.576363Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7488005050832660047:2502] TxId: 281474976710674. Ctx: { TraceId: 01jqpf3xjh4kaq9vq5y8x2v02j, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmFlNGIzNTktMzQ5Yjg0MzgtZDYzMzViNDAtODgxZDA1MGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TestImmediateEffects`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-31T16:03:55.577195Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmFlNGIzNTktMzQ5Yjg0MzgtZDYzMzViNDAtODgxZDA1MGQ=, ActorId: [1:7488005046537692548:2502], ActorState: ExecuteState, TraceId: 01jqpf3xjh4kaq9vq5y8x2v02j, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 2423, MsgBus: 19302 2025-03-31T16:03:56.651551Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488005055844300447:2234];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:56.651729Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011dc/r3tmp/tmpIXj6xu/pdisk_1.dat 2025-03-31T16:03:56.837497Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:56.865346Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:56.865425Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:56.866616Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2423, node 2 2025-03-31T16:03:57.062548Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:57.062572Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:57.062579Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:57.062679Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19302 TClient is connected to server localhost:19302 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:57.907776Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:57.930047Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:58.029380Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:58.290977Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:58.396414Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:01.395843Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005077319138484:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:01.395924Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:01.451049Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:01.498335Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:01.539914Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:01.586069Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:01.613385Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488005055844300447:2234];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:01.613821Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:01.627499Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:01.681484Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:01.741917Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005077319138999:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:01.742018Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:01.742231Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005077319139004:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:01.748123Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:01.759775Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488005077319139006:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:01.827156Z node 2 :TX_PROXY ERROR: Actor# [2:7488005077319139059:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:03.057049Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:04:04.059646Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488005090204041507:2524], TxId: 281474976710676, task: 1. Ctx: { TraceId : 01jqpf45p501tdgj2epmmxrzxy. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZmI3OGNjMTgtNjA2ZGNlZTgtNjRhZGQyZDctZTdkMDdmZDE=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-03-31T16:04:04.060147Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488005090204041509:2525], TxId: 281474976710676, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZmI3OGNjMTgtNjA2ZGNlZTgtNjRhZGQyZDctZTdkMDdmZDE=. TraceId : 01jqpf45p501tdgj2epmmxrzxy. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7488005090204041504:2491], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-31T16:04:04.060626Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmI3OGNjMTgtNjA2ZGNlZTgtNjRhZGQyZDctZTdkMDdmZDE=, ActorId: [2:7488005085909073916:2491], ActorState: ExecuteState, TraceId: 01jqpf45p501tdgj2epmmxrzxy, Create QueryResponse for error on request, msg: >> KqpEffects::UpdateOn_Literal [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyW1WRR2 [GOOD] Test command err: Trying to start YDB, gRPC: 11938, MsgBus: 64324 2025-03-31T16:03:48.778968Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005021162682270:2132];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:48.784205Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011d9/r3tmp/tmpTzH9Sr/pdisk_1.dat 2025-03-31T16:03:49.389238Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:49.389359Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:49.390971Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:03:49.419027Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11938, node 1 2025-03-31T16:03:49.623708Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:49.623734Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:49.623740Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:49.623850Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64324 TClient is connected to server localhost:64324 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:50.502713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:50.543824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:50.748306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:51.023524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:51.171624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:53.780181Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005021162682270:2132];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:53.780243Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:53.820413Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005042637520481:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:53.820565Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:54.423906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:54.491071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:54.537594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:54.574844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:54.639775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:54.795371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:54.897974Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005046932488311:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:54.898065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:54.898328Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005046932488317:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:54.902527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:54.920051Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005046932488319:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:03:55.010057Z node 1 :TX_PROXY ERROR: Actor# [1:7488005051227455672:3472] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:03:56.500142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 21075, MsgBus: 3907 2025-03-31T16:03:58.414283Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488005061019726561:2194];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:58.414589Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011d9/r3tmp/tmpa6CPxX/pdisk_1.dat 2025-03-31T16:03:58.760455Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:58.760542Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:58.767461Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:58.779966Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21075, node 2 2025-03-31T16:03:59.031704Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:59.031732Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:59.031743Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:59.031873Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3907 TClient is connected to server localhost:3907 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:59.838613Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:59.849428Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:03:59.865465Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:59.985689Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:00.217269Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:00.302521Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:02.772607Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005078199597384:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:02.772717Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:02.827869Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:02.906645Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:02.956151Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:03.000852Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:03.062781Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:03.108766Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:03.195708Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005082494565192:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:03.195817Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:03.199630Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005082494565197:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:03.205526Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:03.226187Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488005082494565199:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:04:03.329831Z node 2 :TX_PROXY ERROR: Actor# [2:7488005082494565255:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:03.415224Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488005061019726561:2194];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:03.415298Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:04.549986Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:04:05.122927Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=7; 2025-03-31T16:04:05.123254Z node 2 :TX_DATASHARD ERROR: Prepare transaction failed. txid 7 at tablet 72075186224037919 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-03-31T16:04:05.124578Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 7 at tablet 72075186224037919 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-03-31T16:04:05.124794Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488005086789533142:2500], Table: `/Root/TestImmediateEffects` ([72057594046644480:16:1]), SessionActorId: [2:7488005086789532851:2500]Got LOCKS BROKEN for table `/Root/TestImmediateEffects`. ShardID=72075186224037919, Sink=[2:7488005086789533142:2500].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-03-31T16:04:05.125364Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488005086789533135:2500], SessionActorId: [2:7488005086789532851:2500], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[2:7488005086789532851:2500]. isRollback=0 2025-03-31T16:04:05.125644Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmRiZDAzNmEtYTVhYmE1ZjYtMjRmNDlkYzktYzk1MTMwY2Q=, ActorId: [2:7488005086789532851:2500], ActorState: ExecuteState, TraceId: 01jqpf478jc2ncss25m89ytx9m, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7488005091084500497:2500] from: [2:7488005086789533135:2500] 2025-03-31T16:04:05.125745Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7488005091084500497:2500] TxId: 281474976715677. Ctx: { TraceId: 01jqpf478jc2ncss25m89ytx9m, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmRiZDAzNmEtYTVhYmE1ZjYtMjRmNDlkYzktYzk1MTMwY2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-03-31T16:04:05.125950Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmRiZDAzNmEtYTVhYmE1ZjYtMjRmNDlkYzktYzk1MTMwY2Q=, ActorId: [2:7488005086789532851:2500], ActorState: ExecuteState, TraceId: 01jqpf478jc2ncss25m89ytx9m, Create QueryResponse for error on request, msg: >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_Default [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True >> KqpImmediateEffects::DeleteAfterInsert [GOOD] |74.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |74.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |74.4%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut >> KqpImmediateEffects::UpsertAfterInsertWithIndex [GOOD] >> KqpImmediateEffects::UpsertConflictInteractiveTxAborted >> DataShardVolatile::UpsertNoLocksArbiter+UseSink [GOOD] >> DataShardVolatile::UpsertNoLocksArbiter-UseSink >> TTopicYqlTest::CreateAndAlterTopicYql [GOOD] >> TTopicYqlTest::BadRequests >> KqpImmediateEffects::UnobservedUncommittedChangeConflict [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::UpdateOn_Literal [GOOD] Test command err: Trying to start YDB, gRPC: 30976, MsgBus: 23445 2025-03-31T16:03:52.358928Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005035343170241:2130];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:52.358976Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011d2/r3tmp/tmppNeUqz/pdisk_1.dat 2025-03-31T16:03:52.878793Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:52.896849Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:52.897045Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:52.902621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30976, node 1 2025-03-31T16:03:53.056199Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:53.056235Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:53.056248Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:53.056421Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23445 TClient is connected to server localhost:23445 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:53.883507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:53.919668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:54.175661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:54.457310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:54.640642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:56.848875Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005052523041115:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:56.849007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:57.177474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:57.265641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:57.347674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:57.376345Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005035343170241:2130];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:57.376454Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:57.409240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:57.457091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:57.517265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:57.609613Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005056818008931:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:57.609735Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:57.609986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005056818008936:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:57.614705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:57.640445Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005056818008938:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:03:57.735054Z node 1 :TX_PROXY ERROR: Actor# [1:7488005056818008993:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 21190, MsgBus: 17486 2025-03-31T16:04:00.427775Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488005073091432337:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:00.427828Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011d2/r3tmp/tmpiRL0mP/pdisk_1.dat 2025-03-31T16:04:00.636203Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:00.639799Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:00.639895Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:00.642317Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21190, node 2 2025-03-31T16:04:00.855156Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:00.855182Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:00.855191Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:00.855323Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17486 TClient is connected to server localhost:17486 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:01.416768Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:01.439181Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:04:01.448980Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:04:01.530917Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:04:01.716530Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:01.790945Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:04.279230Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005090271303300:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:04.279359Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:04.331309Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:04.372342Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:04.425936Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:04.483517Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:04.563819Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:04.647219Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:04.711392Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005090271303824:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:04.711484Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:04.711845Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005090271303829:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:04.717696Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:04.728792Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488005090271303831:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:04:04.805285Z node 2 :TX_PROXY ERROR: Actor# [2:7488005090271303886:3454] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:05.428267Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488005073091432337:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:05.428328Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |74.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest >> KqpImmediateEffects::UpsertDuplicates >> KqpEffects::InsertAbort_Literal_Duplicates+UseSink >> KqpEffects::InsertAbort_Select_Duplicates-UseSink [GOOD] >> KqpWrite::ProjectReplace-UseSink [GOOD] |74.4%| [TA] $(B)/ydb/core/sys_view/query_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::DeleteAfterInsert [GOOD] Test command err: Trying to start YDB, gRPC: 7551, MsgBus: 25313 2025-03-31T16:03:50.596436Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005029513539824:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:50.596464Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011d8/r3tmp/tmp9bq82W/pdisk_1.dat 2025-03-31T16:03:51.315051Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:51.315168Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:51.336490Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:03:51.441722Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7551, node 1 2025-03-31T16:03:51.503946Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:03:51.791494Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:51.791515Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:51.791521Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:51.791618Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25313 TClient is connected to server localhost:25313 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:52.731792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:52.755600Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:03:52.766729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:53.018793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:03:53.231470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:03:53.356291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:55.599321Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005029513539824:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:55.599424Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:56.096086Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005055283345374:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:56.096203Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:56.658927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:56.712340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:56.774945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:56.804455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:56.867633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:56.949823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:57.075555Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005059578313198:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:57.075642Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:57.075969Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005059578313203:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:57.080812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:57.098850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005059578313205:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:03:57.184320Z node 1 :TX_PROXY ERROR: Actor# [1:7488005059578313260:3461] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:03:58.558507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 24735, MsgBus: 4109 2025-03-31T16:04:00.668300Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488005070960148440:2084];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:00.669136Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011d8/r3tmp/tmp5mrGZ8/pdisk_1.dat 2025-03-31T16:04:00.881549Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:00.912521Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:00.912599Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:00.916080Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24735, node 2 2025-03-31T16:04:01.002363Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:01.002391Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:01.002400Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:01.002532Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4109 TClient is connected to server localhost:4109 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:01.526226Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:01.539669Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:04:01.559894Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:01.658581Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:01.854022Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:01.946864Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:04.609528Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005088140019363:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:04.609644Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:04.660627Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:04.699996Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:04.746858Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:04.795739Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:04.846486Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:04.933107Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:05.027380Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005092434987183:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:05.027483Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:05.031368Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005092434987188:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:05.034825Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:05.050261Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488005092434987190:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:04:05.138884Z node 2 :TX_PROXY ERROR: Actor# [2:7488005092434987246:3456] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:05.668735Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488005070960148440:2084];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:05.668815Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:06.130352Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> KqpImmediateEffects::AlreadyBrokenImmediateEffects >> KqpInplaceUpdate::SingleRowArithm-UseSink [GOOD] >> KqpStats::SysViewCancelled [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UnobservedUncommittedChangeConflict [GOOD] Test command err: Trying to start YDB, gRPC: 17435, MsgBus: 24734 2025-03-31T16:03:51.395050Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005032615835850:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:51.395833Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011d6/r3tmp/tmphOal7N/pdisk_1.dat 2025-03-31T16:03:52.239898Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:52.239999Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:52.254116Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:03:52.342866Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17435, node 1 2025-03-31T16:03:52.383814Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:03:52.383917Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:03:52.492171Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:03:52.599632Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:52.599656Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:52.599662Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:52.599764Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24734 TClient is connected to server localhost:24734 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:53.510375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:53.534829Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:03:53.557245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:53.844817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:54.113513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:54.296601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:56.382995Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005032615835850:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:56.383120Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:56.921246Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005054090673999:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:56.921349Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:57.557297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:57.632572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:57.697134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:57.773300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:57.880539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:57.957987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:58.064820Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005062680609115:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:58.064910Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:58.065162Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005062680609120:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:58.070052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:58.094038Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005062680609122:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:03:58.193395Z node 1 :TX_PROXY ERROR: Actor# [1:7488005062680609177:3458] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:03:59.473711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 7050, MsgBus: 6597 2025-03-31T16:04:01.184344Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488005075397772168:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:01.184486Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011d6/r3tmp/tmpGfW0T0/pdisk_1.dat 2025-03-31T16:04:01.352146Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:01.414860Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:01.414963Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:01.416951Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7050, node 2 2025-03-31T16:04:01.511777Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:01.511818Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:01.511826Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:01.511954Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6597 TClient is connected to server localhost:6597 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:02.121010Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:02.127010Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:02.145796Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:04:02.258734Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:02.477916Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:02.603236Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:05.240347Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005092577643135:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:05.240437Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:05.304440Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:05.388340Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:05.428546Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:05.458366Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:05.520160Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:05.556969Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:05.605437Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005092577643646:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:05.605525Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:05.605833Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005092577643651:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:05.609052Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:05.618269Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488005092577643653:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:05.672223Z node 2 :TX_PROXY ERROR: Actor# [2:7488005092577643705:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:06.179665Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488005075397772168:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:06.179731Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:06.626561Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:04:07.161874Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976710675; 2025-03-31T16:04:07.175661Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488005101167578916:2525], Table: `/Root/TestImmediateEffects` ([72057594046644480:16:1]), SessionActorId: [2:7488005096872611540:2525]Got LOCKS BROKEN for table `/Root/TestImmediateEffects`. ShardID=72075186224037919, Sink=[2:7488005101167578916:2525].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-03-31T16:04:07.176151Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488005101167578909:2525], SessionActorId: [2:7488005096872611540:2525], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[2:7488005096872611540:2525]. isRollback=0 2025-03-31T16:04:07.176454Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjUyYTYyNDktNTYyYzhjZS1hYmIxOTJkOS1hZDI2NDRlYw==, ActorId: [2:7488005096872611540:2525], ActorState: ExecuteState, TraceId: 01jqpf49bm7yvg4bcmq8vpbxg9, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7488005101167578910:2525] from: [2:7488005101167578909:2525] 2025-03-31T16:04:07.176594Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7488005101167578910:2525] TxId: 281474976710675. Ctx: { TraceId: 01jqpf49bm7yvg4bcmq8vpbxg9, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjUyYTYyNDktNTYyYzhjZS1hYmIxOTJkOS1hZDI2NDRlYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-03-31T16:04:07.177702Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjUyYTYyNDktNTYyYzhjZS1hYmIxOTJkOS1hZDI2NDRlYw==, ActorId: [2:7488005096872611540:2525], ActorState: ExecuteState, TraceId: 01jqpf49bm7yvg4bcmq8vpbxg9, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Select_Duplicates-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 32603, MsgBus: 28973 2025-03-31T16:03:51.576008Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005031689028308:2137];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:51.577193Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011d4/r3tmp/tmpvjXlHt/pdisk_1.dat 2025-03-31T16:03:52.394409Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:52.410982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:52.411159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:52.412925Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32603, node 1 2025-03-31T16:03:52.647953Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:52.647971Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:52.647978Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:52.648083Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28973 TClient is connected to server localhost:28973 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:53.622948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:53.656491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:53.855038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:54.120587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:54.220130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:56.579344Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005031689028308:2137];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:56.579413Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:56.928578Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005053163866480:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:56.928711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:57.488734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:57.550607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:57.617383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:57.667870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:57.715444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:57.799371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:57.917981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005057458834299:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:57.918072Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:57.918542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005057458834304:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:57.923808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:57.936387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005057458834306:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:03:58.027385Z node 1 :TX_PROXY ERROR: Actor# [1:7488005061753801657:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:03:59.346998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 24262, MsgBus: 61371 2025-03-31T16:04:01.073498Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488005074071991624:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:01.073553Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011d4/r3tmp/tmpKJBtsp/pdisk_1.dat 2025-03-31T16:04:01.271241Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:01.301585Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:01.301667Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:01.302751Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24262, node 2 2025-03-31T16:04:01.361774Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:01.361797Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:01.361805Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:01.361916Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61371 TClient is connected to server localhost:61371 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:04:01.935959Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:04:01.954555Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:02.056072Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:02.249813Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:02.358112Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:04.995730Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005086956895284:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:04.995862Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:05.042227Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:05.079555Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:05.152543Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:05.203354Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:05.235459Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:05.284840Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:05.372585Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005091251863101:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:05.372682Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:05.372857Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005091251863106:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:05.378580Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:05.403728Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488005091251863108:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:04:05.493507Z node 2 :TX_PROXY ERROR: Actor# [2:7488005091251863164:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:06.075998Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488005074071991624:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:06.076134Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:06.531998Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:04:07.167043Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488005099841798227:2519], TxId: 281474976715675, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MTczZDgxYTgtNzk2NTRkMGMtMjY3NmNjNTgtYWJmNzM1ZWE=. TraceId : 01jqpf48ys477qm8srqg6t2nsf. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-03-31T16:04:07.168494Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488005099841798228:2520], TxId: 281474976715675, task: 2. Ctx: { TraceId : 01jqpf48ys477qm8srqg6t2nsf. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MTczZDgxYTgtNzk2NTRkMGMtMjY3NmNjNTgtYWJmNzM1ZWE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7488005099841798224:2489], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-31T16:04:07.169000Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTczZDgxYTgtNzk2NTRkMGMtMjY3NmNjNTgtYWJmNzM1ZWE=, ActorId: [2:7488005095546830718:2489], ActorState: ExecuteState, TraceId: 01jqpf48ys477qm8srqg6t2nsf, Create QueryResponse for error on request, msg: >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::ProjectReplace-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 3448, MsgBus: 22065 2025-03-31T16:03:53.770662Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005040273178821:2209];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:53.771620Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011cc/r3tmp/tmpKKLLS4/pdisk_1.dat 2025-03-31T16:03:54.640081Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:54.706634Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:54.791272Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:54.815293Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3448, node 1 2025-03-31T16:03:55.028592Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:55.028615Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:55.028630Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:55.028744Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22065 TClient is connected to server localhost:22065 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:56.054162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:56.070345Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:03:56.079251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:56.331760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:56.664642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:56.836634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:58.768075Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005040273178821:2209];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:58.768140Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:59.361024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005066042984213:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:59.361124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:59.780926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:59.852384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:59.906916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:59.948731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:00.012584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:00.084399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:00.173084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005070337952035:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:00.173186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:00.173378Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005070337952041:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:00.177561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:00.187424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005070337952043:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:00.282321Z node 1 :TX_PROXY ERROR: Actor# [1:7488005070337952098:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 26580, MsgBus: 17843 2025-03-31T16:04:02.373086Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488005078066018421:2194];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:02.373121Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011cc/r3tmp/tmp5Stdmw/pdisk_1.dat 2025-03-31T16:04:02.591998Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:02.626787Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:02.626868Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:02.629144Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26580, node 2 2025-03-31T16:04:02.739617Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:02.739639Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:02.739648Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:02.739755Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17843 TClient is connected to server localhost:17843 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:03.210598Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:03.222762Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:04:03.230930Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:03.324152Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:03.540889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:03.612243Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:05.904221Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005090950921922:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:05.904367Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:05.944574Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:05.975712Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:06.019147Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:06.056173Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:06.099359Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:06.180020Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:06.265510Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005095245889740:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:06.265706Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:06.266950Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005095245889745:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:06.271464Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:06.286827Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488005095245889747:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:04:06.384910Z node 2 :TX_PROXY ERROR: Actor# [2:7488005095245889802:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:07.373246Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488005078066018421:2194];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:07.373332Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpInplaceUpdate::Negative_BatchUpdate-UseSink [GOOD] >> KqpWrite::CastValuesOptional [GOOD] >> KqpImmediateEffects::ConflictingKeyW1RR2 >> KqpEffects::UpdateOn_Params >> TSchemeShardMoveTest::MoveIndex >> TSchemeShardMoveTest::Reject >> TSchemeShardMoveTest::MoveIndexSameDst >> TSchemeShardMoveTest::TwoTables >> TSchemeShardMoveTest::ResetCachedPath >> TSchemeShardMoveTest::MoveMigratedTable >> TSchemeShardMoveTest::Chain >> TSchemeShardMoveTest::MoveTableForBackup >> TSchemeShardMoveTest::Boot >> TSchemeShardMoveTest::Replace ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowArithm-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 4421, MsgBus: 17383 2025-03-31T16:03:53.031588Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005039366883523:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:53.032214Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011d0/r3tmp/tmpwuPNDz/pdisk_1.dat 2025-03-31T16:03:53.875651Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:53.886925Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:53.887117Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:53.900595Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4421, node 1 2025-03-31T16:03:54.219559Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:54.219579Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:54.219586Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:54.219688Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17383 TClient is connected to server localhost:17383 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:55.513292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:55.553334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:55.779040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:56.074971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:56.207138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:58.021528Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005039366883523:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:58.021596Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:58.491951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005065136688943:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:58.492062Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:59.009761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:59.055108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:59.097946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:59.144535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:59.213071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:59.257166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:59.376825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005069431656761:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:59.376940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:59.377433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005069431656766:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:59.381238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:59.412769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005069431656768:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:03:59.477132Z node 1 :TX_PROXY ERROR: Actor# [1:7488005069431656826:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:00.658477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 5753, MsgBus: 8272 2025-03-31T16:04:02.256999Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488005079704265633:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:02.258389Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011d0/r3tmp/tmp9Gi1KZ/pdisk_1.dat 2025-03-31T16:04:02.499224Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:02.502547Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:02.502632Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:02.504887Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5753, node 2 2025-03-31T16:04:02.585484Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:02.585504Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:02.585513Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:02.585639Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8272 TClient is connected to server localhost:8272 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:03.077589Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:03.087208Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:04:03.097209Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:03.205657Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:03.352598Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:03.510059Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:06.065521Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005096884136579:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:06.065613Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:06.120889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:06.161325Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:06.214833Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:06.254668Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:06.312682Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:06.369790Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:06.434977Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005096884137093:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:06.435049Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:06.435447Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005096884137098:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:06.439872Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:06.459553Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-31T16:04:06.460390Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488005096884137100:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:06.545567Z node 2 :TX_PROXY ERROR: Actor# [2:7488005096884137156:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:07.253953Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488005079704265633:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:07.254033Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:07.727881Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::SysViewCancelled [GOOD] Test command err: Trying to start YDB, gRPC: 2309, MsgBus: 25076 2025-03-31T16:02:55.768781Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004792977726633:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:55.768831Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ab1/r3tmp/tmpjCqiPt/pdisk_1.dat 2025-03-31T16:02:56.387819Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:56.396822Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:56.398626Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:56.402638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2309, node 1 2025-03-31T16:02:56.540179Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:02:56.540206Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:02:56.540213Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:02:56.540323Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25076 TClient is connected to server localhost:25076 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:02:57.610557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:57.628846Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:02:57.654453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:57.846328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:58.039215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:58.115599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:00.353204Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004814452564915:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:00.353327Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:00.673157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:00.721645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:00.791592Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004792977726633:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:00.791696Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:00.828672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:00.881619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:00.945857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:01.031568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:01.135458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004818747532732:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:01.135548Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:01.135921Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004818747532737:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:01.140504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:01.158836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004818747532739:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:03:01.256591Z node 1 :TX_PROXY ERROR: Actor# [1:7488004818747532797:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Plans":[{"E-Size":"No estimate","PlanNodeId":5,"LookupKeyColumns":["Key"],"Node Type":"TableLookup","Path":"\/Root\/TwoShard","Columns":["Key","Value1","Value2"],"E-Rows":"No estimate","Table":"TwoShard","Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/KeyValue","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"Stage","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":5,"Max":5,"Min":5,"History":[4,5]}},"Name":"4","Push":{"WaitTimeUs":{"Count":1,"Sum":2708,"Max":2708,"Min":2708,"History":[4,2708]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3}}}],"DurationUs":{"Count":1,"Sum":1000,"Max":1000,"Min":1000},"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[4,1048576]},"Tasks":1,"OutputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"FinishedTasks":1,"IngressRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"PhysicalStageId":0,"StageDurationUs":1000,"Table":[{"Path":"\/Root\/KeyValue","ReadRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"ReadBytes":{"Count":1,"Sum":16,"Max":16,"Min":16}}],"BaseTimeMs":1743436983213,"OutputBytes":{"Count":1,"Sum":5,"Max":5,"Min":5},"CpuTimeUs":{"Count":1,"Sum":980,"Max":980,"Min":980,"History":[4,980]},"Ingress":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":32,"Max":32,"Min":32,"History":[4,32]}},"External":{},"Name":"KqpReadRangesSource","Ingress":{},"Push":{"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":32,"Max":32,"Min":32,"History":[4,32]},"WaitTimeUs":{"Count":1,"Sum":2800,"Max":2800,"Min":2800,"History":[4,2800]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}]}}],"Node Type":"HashShuffle","KeyColumns":["Key"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":3}],"Name":"PartitionByKey","Input":"NarrowMap"}],"Node Type":"Aggregate","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"FirstMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"Bytes":{"Count":1,"Sum":5,"Max":5,"Min":5,"History":[5,5]}},"Name":"RESULT","Push":{"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"PauseMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"WaitTimeUs":{"Count":1,"Sum":515,"Max":515,"Min":515,"History":[5,515]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}],"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576 ... 0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"Compilation":{"FromCache":false,"DurationUs":313986,"CpuTimeUs":306323},"ProcessCpuTimeUs":595,"TotalDurationUs":350750,"ResourcePoolId":"default","QueuedTimeUs":750},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Operators":[{"E-Rows":"No estimate","Columns":["Key","Value1","Value2"],"E-Size":"No estimate","E-Cost":"No estimate","Name":"TableLookup","Table":"TwoShard","LookupKeyColumns":["Key"]}],"Node Type":"TableLookup","PlanNodeType":"Connection"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Operators":[{"A-Rows":2,"A-SelfCpu":0.898,"A-Cpu":0.898,"A-Size":18,"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"A-Rows":2,"A-SelfCpu":0.795,"A-Cpu":1.693,"A-Size":18,"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query"}} Trying to start YDB, gRPC: 11639, MsgBus: 30888 2025-03-31T16:03:11.704802Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488004859844418844:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:11.826730Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ab1/r3tmp/tmpKtJbxF/pdisk_1.dat 2025-03-31T16:03:11.979728Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:11.992331Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:11.992425Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:11.994999Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11639, node 3 2025-03-31T16:03:12.057448Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:12.057472Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:12.057480Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:12.057612Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30888 TClient is connected to server localhost:30888 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:12.588086Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:12.594478Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:03:12.608210Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:12.687579Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:12.981808Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:13.108434Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:16.666752Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488004859844418844:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:16.667543Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:16.734222Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488004881319257117:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:16.734325Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:16.771551Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:16.809785Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:16.898767Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:16.979458Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:17.062728Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:17.176579Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:17.282851Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488004885614224936:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:17.282943Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:17.283200Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488004885614224941:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:17.287737Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:17.301486Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488004885614224943:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:03:17.404340Z node 3 :TX_PROXY ERROR: Actor# [3:7488004885614225000:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:03:18.844752Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:26.967169Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:03:26.967206Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:06.723556Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437046705, txId: 281474976710672] shutting down 2025-03-31T16:04:07.474876Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7488005100362591771:2809] TxId: 281474976710674. Ctx: { TraceId: 01jqpf49jeddhhczr4w8305mft, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Mjk5NjAxLTFkOGRlZjI2LTdlNmZlNTYxLTZlZWFkMzdi, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 100ms } {
: Error: Cancelling after 100ms during execution } ] 2025-03-31T16:04:07.475716Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7488005100362591800:2837], TxId: 281474976710674, task: 7. Ctx: { SessionId : ydb://session/3?node_id=3&id=Mjk5NjAxLTFkOGRlZjI2LTdlNmZlNTYxLTZlZWFkMzdi. CustomerSuppliedId : . TraceId : 01jqpf49jeddhhczr4w8305mft. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7488005100362591771:2809], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-03-31T16:04:07.476455Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7488005100362591802:2839], TxId: 281474976710674, task: 9. Ctx: { SessionId : ydb://session/3?node_id=3&id=Mjk5NjAxLTFkOGRlZjI2LTdlNmZlNTYxLTZlZWFkMzdi. TraceId : 01jqpf49jeddhhczr4w8305mft. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7488005100362591771:2809], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-03-31T16:04:07.477072Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=Mjk5NjAxLTFkOGRlZjI2LTdlNmZlNTYxLTZlZWFkMzdi, ActorId: [3:7488005096067624416:2809], ActorState: ExecuteState, TraceId: 01jqpf49jeddhhczr4w8305mft, Create QueryResponse for error on request, msg:
: Error: Request canceled after 100ms
: Error: Cancelling after 100ms during execution 2025-03-31T16:04:07.709397Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437047701, txId: 281474976710676] shutting down >> KqpImmediateEffects::TxWithReadAtTheEnd+UseSink >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::CastValuesOptional [GOOD] Test command err: Trying to start YDB, gRPC: 20413, MsgBus: 18531 2025-03-31T16:03:56.043820Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005054470048710:2231];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:56.044208Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011c9/r3tmp/tmpCJ5ChE/pdisk_1.dat 2025-03-31T16:03:56.863248Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:56.863350Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:56.872239Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:03:56.902011Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20413, node 1 2025-03-31T16:03:57.211557Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:57.211580Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:57.211591Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:57.211695Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18531 TClient is connected to server localhost:18531 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:58.175735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:58.199814Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:03:58.213965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:03:58.427473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:03:58.661531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:58.784873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:01.035596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005075944886776:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:01.035695Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005054470048710:2231];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:01.035758Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:01.035807Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:01.393089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:01.454572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:01.482499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:01.552568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:01.589321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:01.634440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:01.688443Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005075944887292:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:01.688511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:01.689139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005075944887297:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:01.693854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:01.706168Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005075944887299:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:01.808539Z node 1 :TX_PROXY ERROR: Actor# [1:7488005075944887355:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 28332, MsgBus: 7751 2025-03-31T16:04:03.996402Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488005084609582791:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:03.996591Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011c9/r3tmp/tmpeGUskH/pdisk_1.dat 2025-03-31T16:04:04.219047Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:04.221527Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:04.221804Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:04.246829Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28332, node 2 2025-03-31T16:04:04.299598Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:04.299620Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:04.299627Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:04.299730Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7751 TClient is connected to server localhost:7751 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:04.767571Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:04.779410Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:04:04.792027Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:04.858972Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:05.053361Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:05.166689Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:07.749668Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005101789453744:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:07.749766Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:07.784515Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:07.823284Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:07.867595Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:07.903479Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:07.945217Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:07.983600Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:08.047151Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005106084421552:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:08.047226Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:08.047375Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005106084421557:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:08.051810Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:08.064020Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488005106084421559:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:04:08.117515Z node 2 :TX_PROXY ERROR: Actor# [2:7488005106084421612:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:08.992515Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488005084609582791:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:08.992585Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_BatchUpdate-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 3059, MsgBus: 11701 2025-03-31T16:03:53.408715Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005041164110177:2195];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:53.408768Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011cd/r3tmp/tmpdW6Loc/pdisk_1.dat 2025-03-31T16:03:54.166922Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:54.207557Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:54.207687Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:54.222195Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3059, node 1 2025-03-31T16:03:54.655491Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:54.655512Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:54.655518Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:54.655624Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11701 TClient is connected to server localhost:11701 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:55.817137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:55.878472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:56.108098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:56.435210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:56.536855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:58.414915Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005041164110177:2195];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:58.415004Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:59.249240Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005066933915603:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:59.249350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:59.634798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:59.681293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:59.726495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:59.763043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:59.846818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:59.905556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:59.980637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005066933916121:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:59.980711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:59.981072Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005066933916126:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:59.985161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:00.000472Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005066933916128:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:00.092173Z node 1 :TX_PROXY ERROR: Actor# [1:7488005071228883480:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:01.157375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 6462, MsgBus: 8808 2025-03-31T16:04:03.058364Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488005083532520917:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:03.058449Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011cd/r3tmp/tmpVTofH8/pdisk_1.dat 2025-03-31T16:04:03.294124Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:03.296624Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:03.296727Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:03.298000Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6462, node 2 2025-03-31T16:04:03.366347Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:03.366367Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:03.366374Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:03.366479Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8808 TClient is connected to server localhost:8808 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:04.060679Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:04.072084Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:04:04.086559Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:04.170234Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:04.416189Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:04.515665Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:06.947767Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005096417424587:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:06.947850Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:06.999436Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:07.075161Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:07.116570Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:07.155033Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:07.193252Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:07.263665Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:07.343069Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005100712392405:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:07.343198Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:07.343231Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005100712392410:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:07.346382Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:07.355709Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488005100712392412:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:07.435815Z node 2 :TX_PROXY ERROR: Actor# [2:7488005100712392467:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:08.054144Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488005083532520917:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:08.054206Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:08.460472Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_EnableLocalDBFlatIndex_False >> KqpEffects::InsertAbort_Params_Success ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:03:18.838959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:03:18.839072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:18.839226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:03:18.839266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:03:18.839319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:03:18.839351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:03:18.839431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:18.839543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:03:18.839984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:19.019366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:03:19.019466Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:19.036397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:19.036517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:03:19.036698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:03:19.063958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:03:19.064253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:03:19.065173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:19.065445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:19.068236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:19.069641Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:19.069743Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:19.069998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:03:19.070059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:19.070111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:03:19.070292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:03:19.078682Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:03:19.247239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:19.247533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:19.247797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:03:19.248211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:03:19.248290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:19.250909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:19.251074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:03:19.251322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:19.251400Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:03:19.251439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:03:19.251506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:03:19.253865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:19.253936Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:19.253993Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:03:19.256225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:19.256288Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:19.256366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:19.256421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:03:19.264701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:19.272078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:03:19.272374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:03:19.273607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:19.273796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:19.273863Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:19.274202Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:03:19.274273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:19.274503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:03:19.274592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:03:19.281964Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:19.282038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:19.282269Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:19.282322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:03:19.282582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:19.282646Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:03:19.282803Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:19.282844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:19.282888Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:19.282926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:19.282968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:03:19.283030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:19.283099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:03:19.283137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:03:19.283229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:19.283286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:03:19.283322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:03:19.285571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:19.285709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:19.285756Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 02:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:04:09.161132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-03-31T16:04:09.161301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:04:09.170277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-03-31T16:04:09.170493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-03-31T16:04:09.172187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:09.172363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:04:09.172433Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-03-31T16:04:09.172554Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-03-31T16:04:09.172706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:04:09.192752Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:3458:5422], attempt# 0 2025-03-31T16:04:09.220435Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:3458:5422], sender# [1:3457:5421] REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:7377 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A2EFAA83-7359-4A83-BE6B-D2E77AD051D7 amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2025-03-31T16:04:09.241013Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:3458:5422], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:7377 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A2D35043-9348-47DA-86A1-FACFE750AC2F amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-03-31T16:04:09.247485Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:04:09.247551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:04:09.247839Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:04:09.247900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-31T16:04:09.248532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:04:09.248598Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:04:09.249004Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:3458:5422], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2025-03-31T16:04:09.249663Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3457:5421] 2025-03-31T16:04:09.250222Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3458:5422], sender# [1:3457:5421], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } 2025-03-31T16:04:09.251258Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:04:09.251398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:04:09.251464Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:04:09.251514Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-31T16:04:09.251572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T16:04:09.251688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:7377 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: AFECAF58-8060-43E4-8B30-1360BE438450 amz-sdk-request: attempt=1 content-length: 740 content-md5: P/a/uWmNWYxyRT1pAtAE7A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 740 2025-03-31T16:04:09.256584Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:3458:5422], result# PutObjectResult { ETag: 3ff6bfb9698d598c72453d6902d004ec } 2025-03-31T16:04:09.256676Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:3458:5422], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-03-31T16:04:09.257477Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3457:5421], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-03-31T16:04:09.271451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:04:09.302168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-03-31T16:04:09.302267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-03-31T16:04:09.302514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-03-31T16:04:09.302662Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-03-31T16:04:09.302732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:09.302792Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:04:09.302839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-31T16:04:09.302891Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-03-31T16:04:09.303055Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:04:09.311958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:04:09.312524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:04:09.312591Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-31T16:04:09.312764Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:04:09.312811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:04:09.312856Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:04:09.312917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:04:09.312973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-31T16:04:09.313063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:333:2312] message: TxId: 102 2025-03-31T16:04:09.313127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:04:09.313173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-31T16:04:09.313221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-31T16:04:09.313328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:04:09.318690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:04:09.318762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3443:5408] TestWaitNotification: OK eventTxId 102 >> KqpImmediateEffects::Replace >> TSchemeShardMoveTest::Boot [GOOD] >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly >> TSchemeShardMoveTest::MoveIndexSameDst [GOOD] >> TSchemeShardMoveTest::MoveIntoBuildingIndex >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd-UseSink >> TSchemeShardMoveTest::TwoTables [GOOD] |74.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |74.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |74.4%| [TA] {RESULT} $(B)/ydb/core/sys_view/query_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |74.4%| [LD] {RESULT} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut >> TSchemeShardMoveTest::MoveTableForBackup [GOOD] >> TSchemeShardMoveTest::MoveTableWithSequence >> TSchemeShardMoveTest::MoveIndex [GOOD] >> TSchemeShardMoveTest::MoveIndexDoesNonExisted >> TPersQueueTest::FetchRequest [GOOD] >> TPersQueueTest::EventBatching >> KqpInplaceUpdate::Negative_SingleRowListFromRange+UseSink >> TSchemeShardMoveTest::Chain [GOOD] >> TSchemeShardMoveTest::Index >> KqpPg::InsertNoTargetColumns_ColumnOrder-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_NotOneSize+useSink >> TSchemeShardMoveTest::ResetCachedPath [GOOD] >> TSchemeShardMoveTest::Reject [GOOD] >> TSchemeShardMoveTest::OneTable >> TSchemeShardMoveTest::MoveMigratedTable [GOOD] >> TSchemeShardMoveTest::MoveOldTableWithIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::TwoTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:04:11.106211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:04:11.106329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:04:11.106373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:04:11.106412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:04:11.106453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:04:11.106479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:04:11.106533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:04:11.106678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:04:11.110546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:04:11.263404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:04:11.263484Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:11.283539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:04:11.283808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:04:11.294116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:04:11.331582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:04:11.338462Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:04:11.347689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:11.355519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:04:11.388437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:04:11.396651Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:04:11.396740Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:04:11.396953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:04:11.397005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:04:11.397066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:04:11.411326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.447988Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:04:11.678311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:04:11.678602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.678843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:04:11.679223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:04:11.679299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.682147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:11.682258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:04:11.682402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.682467Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:04:11.682499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:04:11.682526Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:04:11.684849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.684943Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:04:11.684998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:04:11.686947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.687000Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.687048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:04:11.687153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:04:11.691186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:04:11.703979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:04:11.704224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:04:11.705347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:11.705515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:04:11.705582Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:04:11.705906Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:04:11.705967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:04:11.706139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:04:11.706221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:04:11.711836Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:04:11.711893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:04:11.712113Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:04:11.712167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:04:11.712447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.712505Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:04:11.712611Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:04:11.712649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:04:11.712693Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:04:11.712726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:04:11.712767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:04:11.712841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:04:11.712885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:04:11.712915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:04:11.712996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:04:11.713035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:04:11.713070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:04:11.715237Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:04:11.715384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:04:11.715430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... and all the parts is done, operation id: 103:1 2025-03-31T16:04:12.477475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:1 2025-03-31T16:04:12.477527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-03-31T16:04:12.477550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-31T16:04:12.477877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:04:12.477931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-31T16:04:12.477995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-03-31T16:04:12.478037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T16:04:12.478069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-31T16:04:12.482925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-31T16:04:12.482980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:505:2465] 2025-03-31T16:04:12.483062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 2025-03-31T16:04:12.483726Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:04:12.484014Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table1" took 260us result status StatusPathDoesNotExist 2025-03-31T16:04:12.484216Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Table1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-31T16:04:12.484778Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:04:12.485003Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove1" took 240us result status StatusSuccess 2025-03-31T16:04:12.485418Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove1" PathDescription { Self { Name: "TableMove1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TableMove1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:04:12.486182Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:04:12.486345Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table2" took 165us result status StatusPathDoesNotExist 2025-03-31T16:04:12.486477Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table2\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Table2" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-31T16:04:12.486984Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:04:12.488186Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove2" took 1.16ms result status StatusSuccess 2025-03-31T16:04:12.488595Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove2" PathDescription { Self { Name: "TableMove2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TableMove2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:04:12.489449Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:04:12.489622Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 182us result status StatusSuccess 2025-03-31T16:04:12.490054Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 13 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "TableMove1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "TableMove2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpImmediateEffects::TxWithWriteAtTheEnd+UseSink >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_EnableLocalDBFlatIndex_False [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False_EnableLocalDBFlatIndex_False >> TExportToS3Tests::ShouldExcludeBackupTableFromStats [GOOD] >> TExportToS3Tests::ShouldCheckQuotas >> KqpImmediateEffects::InsertExistingKey+UseSink |74.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::ResetCachedPath [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:04:11.108228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:04:11.108317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:04:11.108359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:04:11.108394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:04:11.108432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:04:11.108461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:04:11.108518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:04:11.108656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:04:11.110517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:04:11.264120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:04:11.264181Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:11.283550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:04:11.283836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:04:11.294109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:04:11.338902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:04:11.339122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:04:11.348083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:11.355885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:04:11.384338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:04:11.394898Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:04:11.395008Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:04:11.395589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:04:11.395652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:04:11.395700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:04:11.409492Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.439696Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:04:11.606810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:04:11.630625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.630924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:04:11.631273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:04:11.631383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.634282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:11.639187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:04:11.639605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.639696Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:04:11.639740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:04:11.639786Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:04:11.648613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.648718Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:04:11.648766Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:04:11.651359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.651420Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.651489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:04:11.651562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:04:11.656048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:04:11.658274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:04:11.658492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:04:11.662284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:11.662453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:04:11.662524Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:04:11.662890Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:04:11.662961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:04:11.663143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:04:11.663274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:04:11.669215Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:04:11.669278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:04:11.669501Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:04:11.669570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:04:11.669822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.669885Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:04:11.670003Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:04:11.670042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:04:11.670083Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:04:11.670117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:04:11.670158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:04:11.670202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:04:11.670243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:04:11.670274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:04:11.670340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:04:11.670380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:04:11.670414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:04:11.672523Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:04:11.672674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:04:11.672722Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... RelatedPartByTabletId, TxId: 105, tablet: 72075186233409549, partId: 0 2025-03-31T16:04:12.946472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: PREPARED TxId: 105 MinStep: 1 MaxStep: 18446744073709551615 PrepareArriveTime: 155500 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 314 } } 2025-03-31T16:04:12.946584Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TConfigureParts operationId# 105:0 HandleReply TEvProposeTransactionResult, at schemeshard: 72057594046678944 message# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: PREPARED TxId: 105 MinStep: 1 MaxStep: 18446744073709551615 PrepareArriveTime: 155500 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 314 } } 2025-03-31T16:04:12.946622Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-03-31T16:04:12.946737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409549, shardIdx: 72057594046678944:4, operationId: 105:0, left await: 0, at schemeshard: 72057594046678944 2025-03-31T16:04:12.946785Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 3 -> 128 2025-03-31T16:04:12.955052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-03-31T16:04:12.955292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-03-31T16:04:12.955346Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 105:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:04:12.955426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 105 ready parts: 1/1 2025-03-31T16:04:12.955606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 105 MinStep: 1 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:04:12.957572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 105:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:105 msg type: 269090816 2025-03-31T16:04:12.957702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 105, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 105 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 105 at step: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72075186233409549 for txId: 105 at step: 5000004 2025-03-31T16:04:12.958155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:12.958287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:04:12.958348Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 105:0 HandleReply TEvOperationPlan, operationId: 105:0, stepId: 5000004, at schemeshard: 72057594046678944 2025-03-31T16:04:12.958645Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 128 -> 129 2025-03-31T16:04:12.958796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2025-03-31T16:04:12.971241Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:04:12.971307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-31T16:04:12.971581Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:04:12.971626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 105, path id: 3 2025-03-31T16:04:12.972377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-03-31T16:04:12.972440Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 105:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 105 2025-03-31T16:04:12.976018Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-03-31T16:04:12.976174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-03-31T16:04:12.976218Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-03-31T16:04:12.976262Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 8 2025-03-31T16:04:12.976304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-31T16:04:12.976412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2025-03-31T16:04:12.977464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000004 OrderId: 105 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1373 } } 2025-03-31T16:04:12.977513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409549, partId: 0 2025-03-31T16:04:12.977657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000004 OrderId: 105 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1373 } } 2025-03-31T16:04:12.977770Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000004 OrderId: 105 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1373 } } 2025-03-31T16:04:12.978367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 675 RawX2: 4294969909 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2025-03-31T16:04:12.978428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409549, partId: 0 2025-03-31T16:04:12.978570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Source { RawX1: 675 RawX2: 4294969909 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2025-03-31T16:04:12.978622Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 105:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-31T16:04:12.978700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 105:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 675 RawX2: 4294969909 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2025-03-31T16:04:12.978768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 105:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:12.978811Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 105:0, at schemeshard: 72057594046678944 2025-03-31T16:04:12.978851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 105:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-03-31T16:04:12.978885Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 129 -> 240 2025-03-31T16:04:12.982232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-03-31T16:04:12.989093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-03-31T16:04:12.989335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-03-31T16:04:12.989702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-03-31T16:04:12.989756Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2025-03-31T16:04:12.989870Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-03-31T16:04:12.989917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-31T16:04:12.989958Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-03-31T16:04:12.990022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-31T16:04:12.990072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: true 2025-03-31T16:04:12.990140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:333:2312] message: TxId: 105 2025-03-31T16:04:12.990211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-31T16:04:12.990280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-03-31T16:04:12.990315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-03-31T16:04:12.990457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-31T16:04:12.993128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-03-31T16:04:12.993230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:837:2756] TestWaitNotification: OK eventTxId 105 >> TSchemeShardMoveTest::MoveIndexDoesNonExisted [GOOD] >> TSchemeShardMoveTest::Replace [GOOD] >> TSchemeShardMoveTest::MoveIntoBuildingIndex [GOOD] |74.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |74.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |74.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange >> TSchemeShardMoveTest::MoveTableWithSequence [GOOD] >> TSchemeShardMoveTest::Index [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:04:11.108526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:04:11.108613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:04:11.108654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:04:11.108687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:04:11.108728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:04:11.108758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:04:11.108813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:04:11.108949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:04:11.110652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:04:11.260404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:04:11.260476Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:11.283569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:04:11.283783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:04:11.293918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:04:11.350449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:04:11.350707Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:04:11.351420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:11.355583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:04:11.387855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:04:11.395143Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:04:11.395217Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:04:11.395434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:04:11.395504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:04:11.395548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:04:11.409442Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.443052Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:04:11.649914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:04:11.650255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.650543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:04:11.650826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:04:11.650898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.655352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:11.655575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:04:11.655824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.655900Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:04:11.655959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:04:11.655995Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:04:11.666979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.667050Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:04:11.667114Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:04:11.674964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.675026Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.675074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:04:11.675151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:04:11.679287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:04:11.682695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:04:11.682931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:04:11.684065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:11.684226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:04:11.684298Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:04:11.684592Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:04:11.684654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:04:11.684806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:04:11.684900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:04:11.687007Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:04:11.687050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:04:11.687260Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:04:11.687323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:04:11.687610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.687658Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:04:11.687750Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:04:11.687784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:04:11.687822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:04:11.687856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:04:11.687890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:04:11.687933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:04:11.687968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:04:11.687999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:04:11.688059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:04:11.688118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:04:11.688171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:04:11.690236Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:04:11.690377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:04:11.690437Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ly execute, operationId: 103:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 103 Step: 5000004 OrderId: 103 ExecLatency: 0 ProposeLatency: 11 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1240 } } 2025-03-31T16:04:13.298831Z node 2 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 103 Step: 5000004 OrderId: 103 ExecLatency: 0 ProposeLatency: 11 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1240 } } FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 FAKE_COORDINATOR: Erasing txId 103 2025-03-31T16:04:13.300678Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 318 RawX2: 8589936894 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 2025-03-31T16:04:13.300740Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409546, partId: 2 2025-03-31T16:04:13.300914Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:2, at schemeshard: 72057594046678944, message: Source { RawX1: 318 RawX2: 8589936894 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 2025-03-31T16:04:13.300975Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-31T16:04:13.301095Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 103:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 318 RawX2: 8589936894 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 2025-03-31T16:04:13.301165Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:13.301206Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:2, at schemeshard: 72057594046678944 2025-03-31T16:04:13.301259Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-31T16:04:13.301308Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:2 129 -> 240 2025-03-31T16:04:13.302500Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 327 RawX2: 8589936901 } Origin: 72075186233409547 State: 2 TxId: 103 Step: 0 Generation: 2 2025-03-31T16:04:13.302554Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409547, partId: 0 2025-03-31T16:04:13.302679Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 327 RawX2: 8589936901 } Origin: 72075186233409547 State: 2 TxId: 103 Step: 0 Generation: 2 2025-03-31T16:04:13.302723Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-31T16:04:13.302793Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 327 RawX2: 8589936901 } Origin: 72075186233409547 State: 2 TxId: 103 Step: 0 Generation: 2 2025-03-31T16:04:13.302844Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:13.302876Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:04:13.302908Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-03-31T16:04:13.302943Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2025-03-31T16:04:13.309846Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:2, at schemeshard: 72057594046678944 2025-03-31T16:04:13.311512Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:04:13.314261Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:2, at schemeshard: 72057594046678944 2025-03-31T16:04:13.314613Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:2, at schemeshard: 72057594046678944 2025-03-31T16:04:13.314672Z node 2 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 103:2 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:04:13.314737Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 103:2 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 4], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-03-31T16:04:13.314877Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:2 progress is 2/3 2025-03-31T16:04:13.314925Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 2/3 2025-03-31T16:04:13.314970Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:2 progress is 2/3 2025-03-31T16:04:13.315011Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 2/3 2025-03-31T16:04:13.315052Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 2/3, is published: true 2025-03-31T16:04:13.316695Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:04:13.317134Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:04:13.317190Z node 2 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:04:13.317233Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 103:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 2], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-03-31T16:04:13.317324Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 3/3 2025-03-31T16:04:13.317361Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 3/3 2025-03-31T16:04:13.317402Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 3/3 2025-03-31T16:04:13.317432Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 3/3 2025-03-31T16:04:13.317467Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 3/3, is published: true 2025-03-31T16:04:13.317508Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 3/3 2025-03-31T16:04:13.317566Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-03-31T16:04:13.317613Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-03-31T16:04:13.317760Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-03-31T16:04:13.317837Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T16:04:13.317910Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:1 2025-03-31T16:04:13.317936Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:1 2025-03-31T16:04:13.317975Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-03-31T16:04:13.318006Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T16:04:13.318029Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:2 2025-03-31T16:04:13.318051Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:2 2025-03-31T16:04:13.318104Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-03-31T16:04:13.318129Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-03-31T16:04:13.318549Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:04:13.318596Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-03-31T16:04:13.318673Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-31T16:04:13.318718Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-31T16:04:13.318747Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T16:04:13.318772Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T16:04:13.318799Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:04:13.326406Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-31T16:04:13.332376Z node 2 :TX_PROXY DEBUG: actor# [2:269:2260] Handle TEvGetProxyServicesRequest TestWaitNotification wait txId: 103 2025-03-31T16:04:13.408062Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-03-31T16:04:13.408146Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-03-31T16:04:13.408740Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-31T16:04:13.408856Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-31T16:04:13.408909Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:673:2558] TestWaitNotification: OK eventTxId 103 >> KqpImmediateEffects::UpsertConflictInteractiveTxAborted [GOOD] |74.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |74.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |74.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview >> TSchemeShardMoveTest::MoveOldTableWithIndex [GOOD] >> KqpEffects::InsertAbort_Literal_Duplicates+UseSink [GOOD] >> KqpEffects::InsertAbort_Literal_Conflict-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveIndexDoesNonExisted [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:04:11.104945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:04:11.105030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:04:11.105067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:04:11.105105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:04:11.105145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:04:11.105175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:04:11.105227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:04:11.105365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:04:11.111553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:04:11.260437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:04:11.260513Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:11.285011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:04:11.285255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:04:11.294423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:04:11.338430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:04:11.338637Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:04:11.347742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:11.355481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:04:11.387274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:04:11.396554Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:04:11.396629Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:04:11.396809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:04:11.396858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:04:11.396933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:04:11.413839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.441600Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:04:11.626443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:04:11.626894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.627334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:04:11.627683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:04:11.627761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.632933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:11.639203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:04:11.639603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.639690Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:04:11.639763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:04:11.639806Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:04:11.645043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.645121Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:04:11.645160Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:04:11.648736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.648791Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.648831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:04:11.648886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:04:11.653798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:04:11.656312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:04:11.656512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:04:11.665041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:11.665220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:04:11.665286Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:04:11.665574Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:04:11.665634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:04:11.665803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:04:11.671294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:04:11.684034Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:04:11.684086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:04:11.684312Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:04:11.684377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:04:11.684615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.684668Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:04:11.684766Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:04:11.684804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:04:11.684866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:04:11.684896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:04:11.684932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:04:11.684987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:04:11.685034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:04:11.685063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:04:11.685119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:04:11.685158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:04:11.685211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:04:11.687290Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:04:11.687416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:04:11.687501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... : 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:04:13.986589Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:04:13.986845Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 3.73ms result status StatusSuccess 2025-03-31T16:04:13.987329Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:04:13.988055Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Sync" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:04:13.988332Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Sync" took 296us result status StatusSuccess 2025-03-31T16:04:13.989114Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Sync" PathDescription { Self { Name: "Sync" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Sync" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value0" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:04:13.989907Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Async" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:04:13.990155Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Async" took 261us result status StatusSuccess 2025-03-31T16:04:13.990885Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Async" PathDescription { Self { Name: "Async" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 5 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Async" LocalPathId: 5 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value1" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveIntoBuildingIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:04:11.106034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:04:11.106113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:04:11.106148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:04:11.106181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:04:11.106237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:04:11.106265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:04:11.106317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:04:11.106451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:04:11.110855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:04:11.266668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:04:11.266724Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:11.283717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:04:11.285507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:04:11.294555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:04:11.337257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:04:11.339203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:04:11.347790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:11.355881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:04:11.387275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:04:11.395225Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:04:11.395313Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:04:11.395534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:04:11.395599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:04:11.395647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:04:11.410768Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.436924Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:04:11.599734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:04:11.624368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.624581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:04:11.624826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:04:11.624893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.632272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:11.639241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:04:11.639624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.639700Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:04:11.639742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:04:11.639778Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:04:11.651879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.651948Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:04:11.651985Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:04:11.659076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.659158Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.659199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:04:11.659262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:04:11.680685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:04:11.683978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:04:11.684154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:04:11.685076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:11.685216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:04:11.685270Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:04:11.685658Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:04:11.685710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:04:11.685853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:04:11.685937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:04:11.691328Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:04:11.691386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:04:11.691601Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:04:11.691673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:04:11.691936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.691987Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:04:11.692079Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:04:11.692113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:04:11.692177Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:04:11.692208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:04:11.692243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:04:11.692285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:04:11.692348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:04:11.692381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:04:11.692439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:04:11.692479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:04:11.692530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:04:11.694431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:04:11.694555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:04:11.694596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... thId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:448:2409], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:04:14.109578Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710760:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710760 msg type: 269090816 2025-03-31T16:04:14.109705Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710760, partId: 4294967295, tablet: 72057594046316545 2025-03-31T16:04:14.109860Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2025-03-31T16:04:14.109896Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2025-03-31T16:04:14.109929Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2025-03-31T16:04:14.110207Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:14.110316Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 8589936749 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:04:14.110368Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2025-03-31T16:04:14.110407Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2025-03-31T16:04:14.116516Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-03-31T16:04:14.116581Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2025-03-31T16:04:14.116664Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-03-31T16:04:14.116692Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-03-31T16:04:14.116725Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-03-31T16:04:14.116767Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-03-31T16:04:14.116795Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-03-31T16:04:14.116852Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:123:2149] message: TxId: 281474976710760 2025-03-31T16:04:14.116887Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-03-31T16:04:14.116915Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2025-03-31T16:04:14.116941Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2025-03-31T16:04:14.117018Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 FAKE_COORDINATOR: Erasing txId 281474976710760 2025-03-31T16:04:14.119386Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-03-31T16:04:14.119454Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2025-03-31T16:04:14.119540Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2025-03-31T16:04:14.119615Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:448:2409], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:04:14.121980Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-03-31T16:04:14.122088Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:448:2409], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:04:14.122148Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-03-31T16:04:14.124868Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-03-31T16:04:14.124959Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:448:2409], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:04:14.125003Z node 2 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-03-31T16:04:14.125172Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:04:14.125235Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:629:2578] TestWaitNotification: OK eventTxId 102 2025-03-31T16:04:14.125850Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:04:14.126099Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 294us result status StatusSuccess 2025-03-31T16:04:14.126569Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "SomeIndex" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableIndexes { Name: "Sync" LocalPathId: 5 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveTableWithSequence [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:04:11.108516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:04:11.108642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:04:11.108683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:04:11.108723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:04:11.108766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:04:11.108796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:04:11.108854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:04:11.109007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:04:11.110426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:04:11.260461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:04:11.260531Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:11.285932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:04:11.286102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:04:11.294704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:04:11.334629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:04:11.338450Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:04:11.347008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:11.355479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:04:11.387735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:04:11.395488Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:04:11.395555Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:04:11.395746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:04:11.395798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:04:11.395835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:04:11.410526Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.447409Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:04:11.617556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:04:11.626559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.626795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:04:11.627069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:04:11.627177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.636294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:11.636470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:04:11.636781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.636859Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:04:11.636934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:04:11.636973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:04:11.642250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.642340Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:04:11.642386Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:04:11.648522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.648587Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.648630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:04:11.648688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:04:11.652661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:04:11.654455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:04:11.654657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:04:11.661956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:11.662161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:04:11.662239Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:04:11.662572Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:04:11.662629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:04:11.662816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:04:11.662911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:04:11.668138Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:04:11.668205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:04:11.668447Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:04:11.668509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:04:11.668758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.668811Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:04:11.668904Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:04:11.668938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:04:11.668982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:04:11.669013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:04:11.669050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:04:11.669097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:04:11.669133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:04:11.669163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:04:11.669224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:04:11.669267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:04:11.669322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:04:11.672302Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:04:11.672474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:04:11.672535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... SHARD DEBUG: TMoveSequence TDropParts HandleReply TEvDropSequenceResult shardId# 72075186233409546 status# SUCCESS operationId# 102:1 at tablet 72057594046678944 2025-03-31T16:04:14.217278Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T16:04:14.217332Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:1 4 -> 240 2025-03-31T16:04:14.219186Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:1, at schemeshard: 72057594046678944 2025-03-31T16:04:14.219358Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:1, at schemeshard: 72057594046678944 2025-03-31T16:04:14.219403Z node 2 :FLAT_TX_SCHEMESHARD INFO: TMoveSequence TDone, operationId: 102:1 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:04:14.219450Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveSequence TDone, operationId: 102:1 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 3], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-03-31T16:04:14.219577Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:1 progress is 2/2 2025-03-31T16:04:14.219609Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-03-31T16:04:14.219645Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:1 progress is 2/2 2025-03-31T16:04:14.219672Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-03-31T16:04:14.219701Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/2, is published: true 2025-03-31T16:04:14.219772Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:376:2345] message: TxId: 102 2025-03-31T16:04:14.219823Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-03-31T16:04:14.219871Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-31T16:04:14.219910Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-31T16:04:14.220034Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-03-31T16:04:14.220097Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T16:04:14.220148Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:1 2025-03-31T16:04:14.220170Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:1 2025-03-31T16:04:14.220218Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-03-31T16:04:14.220243Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-31T16:04:14.220636Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:04:14.220690Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-31T16:04:14.220755Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T16:04:14.220795Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T16:04:14.220827Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-31T16:04:14.224187Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:04:14.224243Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:474:2430] 2025-03-31T16:04:14.224746Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-03-31T16:04:14.228282Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/myseq" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:04:14.228473Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/myseq" took 227us result status StatusPathDoesNotExist 2025-03-31T16:04:14.228624Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/myseq\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Table/myseq" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-31T16:04:14.229017Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:04:14.229187Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 171us result status StatusPathDoesNotExist 2025-03-31T16:04:14.229329Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Table" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-31T16:04:14.229725Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:04:14.229917Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove" took 207us result status StatusSuccess 2025-03-31T16:04:14.230310Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove" PathDescription { Self { Name: "TableMove" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: true } Table { Name: "TableMove" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 DefaultFromSequence: "myseq" NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false Sequences { Name: "myseq" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 2 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:04:14.230818Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove/myseq" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:04:14.230980Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove/myseq" took 199us result status StatusSuccess 2025-03-31T16:04:14.231263Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove/myseq" PathDescription { Self { Name: "myseq" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeSequence CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SequenceVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } SequenceDescription { Name: "myseq" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 2 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::Index [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:04:11.106010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:04:11.106097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:04:11.106143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:04:11.106182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:04:11.106227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:04:11.106257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:04:11.106316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:04:11.106466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:04:11.110433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:04:11.265141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:04:11.265200Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:11.283570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:04:11.283823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:04:11.294452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:04:11.334466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:04:11.338676Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:04:11.348900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:11.355633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:04:11.386837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:04:11.394987Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:04:11.395098Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:04:11.395310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:04:11.395375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:04:11.395421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:04:11.407406Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.438902Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:04:11.671531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:04:11.671822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.672027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:04:11.672267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:04:11.672324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.676770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:11.676945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:04:11.677198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.677330Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:04:11.677376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:04:11.677413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:04:11.683336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.683412Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:04:11.683470Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:04:11.687866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.687927Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.687978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:04:11.688044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:04:11.692823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:04:11.695183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:04:11.695405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:04:11.696547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:11.696724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:04:11.696795Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:04:11.697108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:04:11.697170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:04:11.697355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:04:11.697467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:04:11.699980Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:04:11.700101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:04:11.700324Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:04:11.700392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:04:11.700694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.700747Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:04:11.700860Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:04:11.700901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:04:11.700942Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:04:11.700974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:04:11.701017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:04:11.701089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:04:11.701160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:04:11.701194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:04:11.701268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:04:11.701308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:04:11.701362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:04:11.703734Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:04:11.703892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:04:11.703936Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... : 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:04:14.364593Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:04:14.364769Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 202us result status StatusSuccess 2025-03-31T16:04:14.365152Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 10 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "TableMove" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:04:14.365705Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove/Sync" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:04:14.365956Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove/Sync" took 266us result status StatusSuccess 2025-03-31T16:04:14.366760Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove/Sync" PathDescription { Self { Name: "Sync" PathId: 10 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 11 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 10 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Sync" LocalPathId: 10 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value0" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 10 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:04:14.367522Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove/Async" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:04:14.367766Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove/Async" took 278us result status StatusSuccess 2025-03-31T16:04:14.368482Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove/Async" PathDescription { Self { Name: "Async" PathId: 8 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 8 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Async" LocalPathId: 8 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value1" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 8 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::Replace [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:04:11.105285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:04:11.105505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:04:11.105566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:04:11.105617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:04:11.105669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:04:11.105712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:04:11.105774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:04:11.105926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:04:11.110361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:04:11.271541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:04:11.271626Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:11.284767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:04:11.284939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:04:11.294518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:04:11.350444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:04:11.350701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:04:11.351445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:11.356188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:04:11.392165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:04:11.395260Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:04:11.395351Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:04:11.395605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:04:11.395679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:04:11.395730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:04:11.410168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.441201Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:04:11.640450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:04:11.640715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.640905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:04:11.641146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:04:11.641218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.649359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:11.649552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:04:11.649736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.649822Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:04:11.649867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:04:11.649906Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:04:11.654215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.654303Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:04:11.654344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:04:11.657301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.657362Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.657409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:04:11.657497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:04:11.668744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:04:11.676003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:04:11.676249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:04:11.677427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:11.677586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:04:11.677654Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:04:11.677980Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:04:11.678042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:04:11.678221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:04:11.678306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:04:11.695191Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:04:11.695250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:04:11.695454Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:04:11.695540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:04:11.695819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.695870Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:04:11.695977Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:04:11.696023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:04:11.696064Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:04:11.696096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:04:11.696138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:04:11.696202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:04:11.696250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:04:11.696285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:04:11.696354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:04:11.696396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:04:11.696431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:04:11.698636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:04:11.698789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:04:11.698831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... HARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-31T16:04:13.955348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 1 2025-03-31T16:04:13.955727Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409548 2025-03-31T16:04:13.957523Z node 1 :FLAT_TX_SCHEMESHARD INFO: Transaction 105 reset current state at schemeshard 72057594046678944 because pipe to tablet 72075186233409546 disconnected Forgetting tablet 72075186233409547 2025-03-31T16:04:13.959649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:13.959918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 3 Forgetting tablet 72075186233409548 2025-03-31T16:04:13.960363Z node 1 :FLAT_TX_SCHEMESHARD INFO: Transaction 105 reset current state at schemeshard 72057594046678944 because pipe to tablet 72075186233409547 disconnected 2025-03-31T16:04:13.960771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Transaction 105 reset current state at schemeshard 72057594046678944 because pipe to tablet 72075186233409548 disconnected 2025-03-31T16:04:13.961665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-03-31T16:04:13.961724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:1416:3178] 2025-03-31T16:04:13.961923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 5 paths, skipped 0, left 2 candidates, at schemeshard: 72057594046678944 2025-03-31T16:04:13.962045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:04:13.962092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 16], at schemeshard: 72057594046678944 2025-03-31T16:04:13.962174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 15] was 1 2025-03-31T16:04:13.962216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 15], at schemeshard: 72057594046678944 2025-03-31T16:04:13.962252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 2 2025-03-31T16:04:13.962278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 14], at schemeshard: 72057594046678944 2025-03-31T16:04:13.962308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 1 2025-03-31T16:04:13.962332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 13], at schemeshard: 72057594046678944 2025-03-31T16:04:13.962359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 1 2025-03-31T16:04:13.962385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 12], at schemeshard: 72057594046678944 2025-03-31T16:04:13.962418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:04:13.966141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-03-31T16:04:13.966218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409546 2025-03-31T16:04:13.966384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-31T16:04:13.966413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-03-31T16:04:13.966500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-31T16:04:13.966536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409548 2025-03-31T16:04:13.966960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 5 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 105 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted 2025-03-31T16:04:13.967453Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-03-31T16:04:13.967549Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2025-03-31T16:04:13.967610Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 2025-03-31T16:04:13.968248Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Src" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:04:13.968448Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Src" took 247us result status StatusPathDoesNotExist 2025-03-31T16:04:13.968593Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Src\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Src" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-31T16:04:13.969156Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dst" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:04:13.969370Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Dst" took 231us result status StatusSuccess 2025-03-31T16:04:13.969800Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dst" PathDescription { Self { Name: "Dst" PathId: 22 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000006 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "Async" LocalPathId: 23 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableIndexes { Name: "Sync" LocalPathId: 25 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 2 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 22 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:04:13.970678Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:04:13.970827Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 168us result status StatusSuccess 2025-03-31T16:04:13.971220Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 28 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 28 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 26 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 22 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000006 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardMoveTest::OneTable [GOOD] >> KqpImmediateEffects::UpsertDuplicates [GOOD] >> KqpImmediateEffects::UpsertExistingKey >> TestProgram::YqlKernel >> TestProgram::YqlKernel [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False_EnableLocalDBFlatIndex_False [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_TurnOff ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveOldTableWithIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:04:11.104286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:04:11.104403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:04:11.104448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:04:11.104496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:04:11.104548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:04:11.104603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:04:11.104671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:04:11.104853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:04:11.110114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:04:11.256505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:04:11.256556Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:11.284386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:04:11.284562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:04:11.294923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:04:11.340426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:04:11.340645Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:04:11.348621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:11.356483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:04:11.392218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:04:11.397075Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:04:11.397157Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:04:11.397351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:04:11.397414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:04:11.397460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:04:11.410752Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.436952Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:04:11.623231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:04:11.623588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.623826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:04:11.624114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:04:11.624181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.633052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:11.639251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:04:11.639629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.639711Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:04:11.639757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:04:11.639797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:04:11.642546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.642633Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:04:11.642675Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:04:11.646507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.646572Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.646646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:04:11.646732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:04:11.651162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:04:11.664288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:04:11.664564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:04:11.665668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:11.665829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:04:11.665901Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:04:11.666198Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:04:11.666261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:04:11.666427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:04:11.666525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:04:11.676669Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:04:11.676773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:04:11.677003Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:04:11.677060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:04:11.677349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.677404Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:04:11.677520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:04:11.677563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:04:11.677608Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:04:11.677643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:04:11.677685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:04:11.677732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:04:11.677772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:04:11.677806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:04:11.677877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:04:11.677917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:04:11.677971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:04:11.680289Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:04:11.680455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:04:11.680497Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... p: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 11 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 789 } } 2025-03-31T16:04:14.790368Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-03-31T16:04:14.790481Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 11 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 789 } } 2025-03-31T16:04:14.790561Z node 2 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 11 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 789 } } FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 FAKE_COORDINATOR: Erasing txId 102 2025-03-31T16:04:14.791766Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 8589936898 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-31T16:04:14.791824Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 2 2025-03-31T16:04:14.791969Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:2, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 8589936898 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-31T16:04:14.792021Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-31T16:04:14.792115Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 322 RawX2: 8589936898 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-31T16:04:14.792190Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:14.792234Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:2, at schemeshard: 72057594046678944 2025-03-31T16:04:14.792273Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-31T16:04:14.792316Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:2 129 -> 240 2025-03-31T16:04:14.793219Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 327 RawX2: 8589936902 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-31T16:04:14.793261Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-03-31T16:04:14.793360Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 327 RawX2: 8589936902 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-31T16:04:14.793397Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-31T16:04:14.793462Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 327 RawX2: 8589936902 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-31T16:04:14.793507Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:14.793538Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:04:14.793567Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-03-31T16:04:14.793601Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-03-31T16:04:14.807909Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:2, at schemeshard: 72057594046678944 2025-03-31T16:04:14.809245Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:04:14.810940Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:2, at schemeshard: 72057594046678944 2025-03-31T16:04:14.811391Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:04:14.811652Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:2, at schemeshard: 72057594046678944 2025-03-31T16:04:14.811715Z node 2 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 102:2 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:04:14.811771Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 102:2 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 4], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-03-31T16:04:14.811897Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 2/3 2025-03-31T16:04:14.811938Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/3 2025-03-31T16:04:14.811983Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 2/3 2025-03-31T16:04:14.812024Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/3 2025-03-31T16:04:14.812068Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/3, is published: true 2025-03-31T16:04:14.812287Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:04:14.812324Z node 2 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:04:14.812357Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 102:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 2], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-03-31T16:04:14.812416Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 3/3 2025-03-31T16:04:14.812441Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/3 2025-03-31T16:04:14.812472Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 3/3 2025-03-31T16:04:14.812495Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/3 2025-03-31T16:04:14.812519Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/3, is published: true 2025-03-31T16:04:14.812595Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:378:2346] message: TxId: 102 2025-03-31T16:04:14.812645Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/3 2025-03-31T16:04:14.812712Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-31T16:04:14.812748Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-31T16:04:14.812884Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-03-31T16:04:14.812921Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T16:04:14.812968Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:1 2025-03-31T16:04:14.812994Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:1 2025-03-31T16:04:14.813028Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-03-31T16:04:14.813052Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T16:04:14.813076Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:2 2025-03-31T16:04:14.813099Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:2 2025-03-31T16:04:14.813145Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-03-31T16:04:14.813173Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-03-31T16:04:14.813686Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:04:14.813748Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-03-31T16:04:14.813815Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-31T16:04:14.813856Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-31T16:04:14.813893Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T16:04:14.813924Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T16:04:14.813957Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:04:14.825926Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:04:14.826010Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:477:2438] 2025-03-31T16:04:14.826115Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 |74.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |74.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpsertConflictInteractiveTxAborted [GOOD] Test command err: Trying to start YDB, gRPC: 7040, MsgBus: 17531 2025-03-31T16:03:54.625132Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005047572390312:2271];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:54.625181Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011cb/r3tmp/tmpUufRkM/pdisk_1.dat 2025-03-31T16:03:55.520453Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:55.615058Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:55.620798Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:55.622399Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7040, node 1 2025-03-31T16:03:55.861692Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:55.861710Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:55.861716Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:55.861817Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17531 TClient is connected to server localhost:17531 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:57.124029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:57.175943Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:03:57.193931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:57.466794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:57.716408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:57.811233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:59.626108Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005047572390312:2271];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:59.626199Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:59.868374Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005069047228352:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:59.868491Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:00.238176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:00.277289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:00.342825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:00.376819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:00.436668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:00.476201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:00.587554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005073342196163:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:00.587632Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:00.587991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005073342196168:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:00.592085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:00.607496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005073342196170:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:00.675599Z node 1 :TX_PROXY ERROR: Actor# [1:7488005073342196229:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:01.739454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:04:01.803133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:04:01.879776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 61299, MsgBus: 20057 2025-03-31T16:04:07.807323Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488005101401182440:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:07.807441Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011cb/r3tmp/tmpW4thyG/pdisk_1.dat 2025-03-31T16:04:07.945912Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:07.976409Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:07.976516Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:07.977894Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61299, node 2 2025-03-31T16:04:08.023648Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:08.023691Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:08.023704Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:08.023876Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20057 TClient is connected to server localhost:20057 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:08.528893Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:08.536067Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:04:08.549541Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:08.638815Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:08.863536Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:08.980903Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:11.458758Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005118581053392:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:11.458867Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:11.503608Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:11.540985Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:11.619486Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:11.667416Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:11.743240Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:11.784185Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:11.848183Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005118581053912:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:11.848299Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:11.848549Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005118581053917:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:11.852879Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:11.864639Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488005118581053919:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:11.949482Z node 2 :TX_PROXY ERROR: Actor# [2:7488005118581053972:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:12.808130Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488005101401182440:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:12.808208Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:13.014095Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernel [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 3 } Arguments { Id: 4 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\002\213\002?\000\001\235?\002\001\235?\004\001\002\000\t\211\002?\n\235?\000\001\002\000\t\251\000?\020\014Arg\000\000\t\211\002?\014?\020\002\000\t\211\006?\020\203\005@?\020?\020$BlockFunc\000\003?\034\006Add?\026?\026\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 3 } Arguments { Id: 4 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\002\213\002?\000\001\235?\002\001\235?\004\001\002\000\t\211\002?\n\235?\000\001\002\000\t\251\000?\020\014Arg\000\000\t\211\002?\014?\020\002\000\t\211\006?\020\203\005@?\020?\020$BlockFunc\000\003?\034\006Add?\026?\026\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N3(28):{\"i\":\"3,4\",\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:3,4"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N1(9):{\"i\":\"3\",\"p\":{\"address\":{\"name\":\"sum\",\"id\":3}},\"o\":\"3\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N2(9):{\"i\":\"4\",\"p\":{\"address\":{\"name\":\"vat\",\"id\":4}},\"o\":\"4\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N4(28):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N0(4):{\"p\":{\"data\":[{\"name\":\"sum\",\"id\":3},{\"name\":\"vat\",\"id\":4}]},\"o\":\"3,4\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[]}],"nodes":{"2":{"p":{"i":"3","p":{"address":{"name":"sum","id":3}},"o":"3","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"p":{"data":[{"name":"sum","id":3},{"name":"vat","id":4}]},"o":"3,4","t":"FetchOriginalData"},"w":4,"id":6},"5":{"p":{"i":"15","t":"Projection"},"w":28,"id":5},"4":{"p":{"i":"4","p":{"address":{"name":"vat","id":4}},"o":"4","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"3,4","o":"15","t":"Calculation"},"w":28,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; >> TExportToS3Tests::ShouldRetryAtFinalStage [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::OneTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:04:11.109785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:04:11.109849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:04:11.109879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:04:11.109905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:04:11.109933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:04:11.109956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:04:11.109991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:04:11.110109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:04:11.110360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:04:11.264665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:04:11.264755Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:11.284110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:04:11.284286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:04:11.294346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:04:11.339571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:04:11.339845Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:04:11.350251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:11.356029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:04:11.388351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:04:11.395132Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:04:11.395226Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:04:11.395406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:04:11.395499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:04:11.395541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:04:11.410159Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.438533Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:04:11.638932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:04:11.639258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.639499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:04:11.639790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:04:11.639851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.644992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:11.645193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:04:11.645490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.645570Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:04:11.645616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:04:11.645660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:04:11.658093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.658205Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:04:11.658253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:04:11.664676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.664775Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.664832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:04:11.664916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:04:11.671512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:04:11.678398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:04:11.678606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:04:11.679550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:11.679702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:04:11.679762Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:04:11.680031Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:04:11.680080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:04:11.680228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:04:11.680312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:04:11.682798Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:04:11.682851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:04:11.683109Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:04:11.683187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:04:11.683500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:04:11.683561Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:04:11.683672Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:04:11.683739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:04:11.683788Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:04:11.683820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:04:11.683862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:04:11.683914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:04:11.683954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:04:11.683988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:04:11.684061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:04:11.684105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:04:11.684168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:04:11.686340Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:04:11.686502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:04:11.686556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 1T16:04:15.298552Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 108:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-03-31T16:04:15.298595Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 108, done: 0, blocked: 1 2025-03-31T16:04:15.298685Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 108:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 108 Name: RenamePathBarrier }, at tablet# 72057594046678944 2025-03-31T16:04:15.298824Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 137 -> 129 2025-03-31T16:04:15.298952Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:04:15.299021Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-31T16:04:15.307256Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2025-03-31T16:04:15.307474Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2025-03-31T16:04:15.313206Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:04:15.313286Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:04:15.313474Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-03-31T16:04:15.313645Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:04:15.313692Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 108, path id: 1 2025-03-31T16:04:15.313741Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 108, path id: 4 2025-03-31T16:04:15.314157Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-03-31T16:04:15.314220Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 108:0 ProgressState at tablet: 72057594046678944 2025-03-31T16:04:15.314328Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 108:0, at schemeshard: 72057594046678944 2025-03-31T16:04:15.314374Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 108:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-31T16:04:15.314415Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 129 -> 240 2025-03-31T16:04:15.315818Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 23 PathOwnerId: 72057594046678944, cookie: 108 2025-03-31T16:04:15.315944Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 23 PathOwnerId: 72057594046678944, cookie: 108 2025-03-31T16:04:15.315986Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2025-03-31T16:04:15.316028Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 23 2025-03-31T16:04:15.316070Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:04:15.318112Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 108 2025-03-31T16:04:15.318224Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 108 2025-03-31T16:04:15.318253Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2025-03-31T16:04:15.318290Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-03-31T16:04:15.318321Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-03-31T16:04:15.318411Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 108, ready parts: 0/1, is published: true 2025-03-31T16:04:15.325407Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-03-31T16:04:15.325491Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 108:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:04:15.325815Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-31T16:04:15.325965Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#108:0 progress is 1/1 2025-03-31T16:04:15.326015Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-03-31T16:04:15.326065Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#108:0 progress is 1/1 2025-03-31T16:04:15.326109Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-03-31T16:04:15.326152Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: true 2025-03-31T16:04:15.326249Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:335:2314] message: TxId: 108 2025-03-31T16:04:15.326299Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-03-31T16:04:15.326352Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 108:0 2025-03-31T16:04:15.326394Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 108:0 2025-03-31T16:04:15.326507Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-03-31T16:04:15.329574Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2025-03-31T16:04:15.330298Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2025-03-31T16:04:15.330858Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-03-31T16:04:15.330916Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [2:821:2779] TestWaitNotification: OK eventTxId 108 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2025-03-31T16:04:15.331896Z node 2 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-03-31T16:04:15.331969Z node 2 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409547 2025-03-31T16:04:15.356765Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 8589936886 } TabletId: 72075186233409546 State: 4 2025-03-31T16:04:15.356871Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2025-03-31T16:04:15.358518Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-03-31T16:04:15.358991Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2025-03-31T16:04:15.361485Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:15.361827Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-03-31T16:04:15.362531Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:04:15.362584Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-03-31T16:04:15.362664Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:04:15.365173Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-31T16:04:15.365264Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-03-31T16:04:15.365389Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 Deleted tabletId 72075186233409546 2025-03-31T16:04:15.366369Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:04:15.366535Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 204us result status StatusSuccess 2025-03-31T16:04:15.366831Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 23 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 23 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 21 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TestProgram::YqlKernelEndsWith >> DataShardVolatile::UpsertNoLocksArbiter-UseSink [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiter+UseSink >> KqpImmediateEffects::AlreadyBrokenImmediateEffects [GOOD] >> KqpImmediateEffects::ConflictingKeyR1RWR2 >> TestProgram::YqlKernelEndsWith [GOOD] >> TestProgram::YqlKernelContains >> KqpImmediateEffects::ConflictingKeyW1RR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyW1RWR2 >> TColumnEngineTestLogs::IndexWriteOverload |74.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |74.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelContains [GOOD] >> KqpEffects::UpdateOn_Params [GOOD] >> KqpEffects::UpdateOn_Select >> TestProgram::CountWithNulls >> TestProgram::CountWithNulls [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEndsWith [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \020EndsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \020EndsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N3(28):{\"i\":\"7,9\",\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:7,9"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N1(9):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N2(9):{\"i\":\"9\",\"p\":{\"address\":{\"name\":\"substring\",\"id\":9}},\"o\":\"9\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N4(28):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N0(4):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"7,9\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[]}],"nodes":{"2":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"7,9","t":"FetchOriginalData"},"w":4,"id":6},"5":{"p":{"i":"15","t":"Projection"},"w":28,"id":5},"4":{"p":{"i":"9","p":{"address":{"name":"substring","id":9}},"o":"9","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"7,9","o":"15","t":"Calculation"},"w":28,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; >> TExportToS3Tests::ShouldCheckQuotas [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_TurnOff [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_Generations >> TestProgram::SimpleFunction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelContains [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\005@\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \034StringContains?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\005@\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \034StringContains?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N3(28):{\"i\":\"7,9\",\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:7,9"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N1(9):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N2(9):{\"i\":\"9\",\"p\":{\"address\":{\"name\":\"substring\",\"id\":9}},\"o\":\"9\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N4(28):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N0(4):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"7,9\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[]}],"nodes":{"2":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"7,9","t":"FetchOriginalData"},"w":4,"id":6},"5":{"p":{"i":"15","t":"Projection"},"w":28,"id":5},"4":{"p":{"i":"9","p":{"address":{"name":"substring","id":9}},"o":"9","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"7,9","o":"15","t":"Calculation"},"w":28,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::CountWithNulls [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } } } Command { Projection { Columns { Id: 10001 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } } } Command { Projection { Columns { Id: 10001 } } } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N2(17):{\"i\":\"2\",\"o\":\"10001\",\"t\":\"Calculation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N1[shape=box, label="N0(2):{\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N2[shape=box, label="N1(7):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"uid\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N1 -> N2[label="1"]; N3[shape=box, label="N3(17):{\"i\":\"10001\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N3[label="1"]; N1->N2->N0->N3[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[{"from":0}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"uid","id":2}]},"o":"2","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"i":"10001","t":"Projection"},"w":17,"id":3},"2":{"p":{"i":"2","p":{"address":{"name":"uid","id":2}},"o":"2","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"2","o":"10001","t":"Calculation"},"w":17,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10UInt64TypeE; >> KqpImmediateEffects::TxWithReadAtTheEnd+UseSink [GOOD] >> KqpImmediateEffects::TxWithReadAtTheEnd-UseSink >> BackupRestoreS3::RestoreViewQueryText ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::SimpleFunction [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Id: 8 Arguments { Id: 2 } } } } Command { Projection { Columns { Id: 15 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Id: 8 Arguments { Id: 2 } } } } Command { Projection { Columns { Id: 15 } } } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N2(17):{\"i\":\"2\",\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N1[shape=box, label="N0(2):{\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N2[shape=box, label="N1(7):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"uid\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N1 -> N2[label="1"]; N3[shape=box, label="N3(17):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N3[label="1"]; N1->N2->N0->N3[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[{"from":0}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"uid","id":2}]},"o":"2","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"i":"15","t":"Projection"},"w":17,"id":3},"2":{"p":{"i":"2","p":{"address":{"name":"uid","id":2}},"o":"2","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"2","o":"15","t":"Calculation"},"w":17,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10UInt64TypeE; >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWoIndexes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldRetryAtFinalStage [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:03:29.499966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:03:29.500050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:29.500101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:03:29.500134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:03:29.500171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:03:29.500196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:03:29.500249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:29.500317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:03:29.500617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:29.658812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:03:29.658888Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:29.665614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:29.665739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:03:29.665895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:03:29.681919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:03:29.682128Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:03:29.683063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:29.683344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:29.689332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:29.690905Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:29.691000Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:29.691200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:03:29.691240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:29.691277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:03:29.691433Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:03:29.697689Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:03:30.031404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:30.031590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:30.031786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:03:30.032046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:03:30.032109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:30.034188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:30.034342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:03:30.034516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:30.034559Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:03:30.034613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:03:30.034639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:03:30.037022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:30.037085Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:30.037129Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:03:30.038919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:30.038993Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:30.039038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:30.039120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:03:30.043030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:30.044918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:03:30.045129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:03:30.046258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:30.046395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:30.046465Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:30.046719Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:03:30.046776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:30.047005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:03:30.047120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:03:30.052435Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:30.052520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:30.052736Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:30.052788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:03:30.053027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:30.053076Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:03:30.053177Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:30.053223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:30.053281Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:30.053312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:30.053363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:03:30.053425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:30.053488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:03:30.053521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:03:30.053595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:30.053633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:03:30.053663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:03:30.056396Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:30.056564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:30.056611Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Table, is column=0, is olap=0 2025-03-31T16:04:05.548334Z node 4 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 2, DataSize 70 2025-03-31T16:04:05.548440Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-03-31T16:04:05.548476Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=0, is column=0, is olap=0 2025-03-31T16:04:05.548503Z node 4 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 4: RowCount 0, DataSize 0, with borrowed parts 2025-03-31T16:04:05.560685Z node 4 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-03-31T16:04:09.316002Z node 4 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 70 rowCount 2 cpuUsage 0.0026 2025-03-31T16:04:09.337869Z node 4 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.002 2025-03-31T16:04:09.369161Z node 4 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-03-31T16:04:09.369339Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 70 row count 2 2025-03-31T16:04:09.369427Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Table, is column=0, is olap=0 2025-03-31T16:04:09.369482Z node 4 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 2, DataSize 70 2025-03-31T16:04:09.369610Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-03-31T16:04:09.369659Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=0, is column=0, is olap=0 2025-03-31T16:04:09.369692Z node 4 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 4: RowCount 0, DataSize 0, with borrowed parts 2025-03-31T16:04:09.380579Z node 4 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-03-31T16:04:13.143124Z node 4 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 70 rowCount 2 cpuUsage 0.0019 2025-03-31T16:04:13.165312Z node 4 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0017 2025-03-31T16:04:13.210683Z node 4 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-03-31T16:04:13.210890Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 70 row count 2 2025-03-31T16:04:13.210973Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Table, is column=0, is olap=0 2025-03-31T16:04:13.211028Z node 4 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 2, DataSize 70 2025-03-31T16:04:13.211187Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-03-31T16:04:13.211259Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=0, is column=0, is olap=0 2025-03-31T16:04:13.211307Z node 4 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 4: RowCount 0, DataSize 0, with borrowed parts 2025-03-31T16:04:13.223408Z node 4 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-03-31T16:04:16.267455Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [4:575:2533], attempt# 1 2025-03-31T16:04:16.307456Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvReset: self# [4:574:2532] 2025-03-31T16:04:16.323413Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [4:575:2533], sender# [4:574:2532] 2025-03-31T16:04:16.323551Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [4:574:2532] 2025-03-31T16:04:16.323742Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [4:575:2533], sender# [4:574:2532], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 Checksum: } 2025-03-31T16:04:16.323979Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvDataShard::TEvS3Upload: self# [4:575:2533], upload# { Id: 1 Status: Complete Error: (empty maybe) Parts: [6e3e0a41fdab8add833862f1bd2954c3,1d8dd09e584ce6a47582a31b591900e2,d41d8cd98f00b204e9800998ecf8427e] } REQUEST: POST /data_00.csv?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:6095 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 493D6830-E330-48EB-8F4A-335398C7C6E1 amz-sdk-request: attempt=1 content-length: 459 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /data_00.csv / uploadId=1 2025-03-31T16:04:16.335433Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvCompleteMultipartUploadResponse: self# [4:575:2533], result# 2025-03-31T16:04:16.335734Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [4:574:2532], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-03-31T16:04:16.347872Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 448 RawX2: 17179871601 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-03-31T16:04:16.347949Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2025-03-31T16:04:16.348138Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 448 RawX2: 17179871601 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-03-31T16:04:16.348290Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 448 RawX2: 17179871601 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-03-31T16:04:16.348396Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:16.348448Z node 4 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-31T16:04:16.348495Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-03-31T16:04:16.348547Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2025-03-31T16:04:16.348746Z node 4 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:04:16.356047Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-31T16:04:16.356657Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-31T16:04:16.356730Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2025-03-31T16:04:16.356890Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-03-31T16:04:16.356932Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-03-31T16:04:16.356988Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-03-31T16:04:16.357029Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-03-31T16:04:16.357074Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2025-03-31T16:04:16.357185Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [4:123:2149] message: TxId: 281474976710759 2025-03-31T16:04:16.357268Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-03-31T16:04:16.357315Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2025-03-31T16:04:16.357353Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2025-03-31T16:04:16.357495Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-31T16:04:16.364034Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-03-31T16:04:16.364151Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 2025-03-31T16:04:16.369729Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:04:16.369801Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:596:2550] TestWaitNotification: OK eventTxId 102 >> BackupRestore::RestoreViewQueryText >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast+UseSink [GOOD] >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast-UseSink >> KqpSysColV1::StreamSelectRowById >> KqpEffects::InsertAbort_Params_Success [GOOD] >> KqpEffects::InsertAbort_Select_Conflict+UseSink |74.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_move/test-results/unittest/{meta.json ... results_accumulator.log} >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTable >> KqpSystemView::NodesRange2 >> KqpSysColV0::InnerJoinSelect >> KqpImmediateEffects::Replace [GOOD] >> KqpImmediateEffects::MultipleEffectsWithIndex >> KqpSysColV1::StreamInnerJoinSelect >> KqpSystemView::QueryStatsSimple ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldCheckQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:03:32.549942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:03:32.550047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:32.550085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:03:32.550120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:03:32.550170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:03:32.550200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:03:32.550280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:32.550367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:03:32.550732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:32.643808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:03:32.643882Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:32.651140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:32.651269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:03:32.651479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:03:32.657400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:03:32.657648Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:03:32.658364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:32.658638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:32.661093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:32.662711Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:32.662777Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:32.662996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:03:32.663048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:32.663115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:03:32.663305Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.685778Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:03:32.823630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:32.823916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.824158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:03:32.824353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:03:32.824408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.827833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:32.827971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:03:32.828174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.828232Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:03:32.828284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:03:32.828319Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:03:32.833372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.833432Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:32.833469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:03:32.836661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.836720Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.836753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:32.836788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:03:32.840251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:32.842449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:03:32.842698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:03:32.843830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:32.843987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:32.844057Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:32.844356Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:03:32.844428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:32.844598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:03:32.844668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:03:32.846745Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:32.846808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:32.846988Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:32.847037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:03:32.847278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:32.847355Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:03:32.847467Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:32.847512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:32.847557Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:32.847594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:32.847639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:03:32.847685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:32.847742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:03:32.847779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:03:32.847854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:32.847896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:03:32.847934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:03:32.849719Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:32.849830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:32.849933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 57594046678944 2025-03-31T16:04:17.546542Z node 5 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976720762:0, at schemeshard: 72057594046678944 2025-03-31T16:04:17.546626Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976720762 ready parts: 1/1 2025-03-31T16:04:17.546785Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976720762 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:04:17.550446Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-03-31T16:04:17.550579Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-03-31T16:04:17.550619Z node 5 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976720762 2025-03-31T16:04:17.550681Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720762, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-03-31T16:04:17.550727Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-31T16:04:17.551898Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-03-31T16:04:17.552027Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-03-31T16:04:17.552074Z node 5 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976720762 2025-03-31T16:04:17.552114Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720762, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 7 2025-03-31T16:04:17.552155Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-31T16:04:17.552247Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720762, ready parts: 0/1, is published: true 2025-03-31T16:04:17.555280Z node 5 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-03-31T16:04:17.556233Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976720762, at schemeshard: 72057594046678944 2025-03-31T16:04:17.556322Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720762, ready parts: 0/1, is published: true 2025-03-31T16:04:17.556395Z node 5 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976720762, at schemeshard: 72057594046678944 2025-03-31T16:04:17.557412Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976720762:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976720762 msg type: 269090816 2025-03-31T16:04:17.557574Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976720762, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976720762 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976720762 at step: 5000007 2025-03-31T16:04:17.558184Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720762 2025-03-31T16:04:17.559047Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:17.559210Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720762 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 21474838637 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:04:17.559285Z node 5 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976720762:0, step: 5000007, at schemeshard: 72057594046678944 2025-03-31T16:04:17.559439Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976720762:0, at schemeshard: 72057594046678944 2025-03-31T16:04:17.559539Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720762:0 progress is 1/1 2025-03-31T16:04:17.559601Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720762 ready parts: 1/1 2025-03-31T16:04:17.559674Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720762:0 progress is 1/1 2025-03-31T16:04:17.559721Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720762 ready parts: 1/1 2025-03-31T16:04:17.559828Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:04:17.559932Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T16:04:17.559983Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720762, ready parts: 1/1, is published: false 2025-03-31T16:04:17.560056Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720762 ready parts: 1/1 2025-03-31T16:04:17.560112Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976720762:0 2025-03-31T16:04:17.560159Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976720762:0 2025-03-31T16:04:17.560246Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-31T16:04:17.560303Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976720762, publications: 2, subscribers: 1 2025-03-31T16:04:17.560353Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976720762, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-03-31T16:04:17.560393Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976720762, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-03-31T16:04:17.561261Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720762 2025-03-31T16:04:17.563953Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:04:17.564025Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976720762, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:04:17.564188Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976720762, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-31T16:04:17.564331Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:04:17.564374Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:335:2311], at schemeshard: 72057594046678944, txId: 281474976720762, path id: 1 2025-03-31T16:04:17.564415Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:335:2311], at schemeshard: 72057594046678944, txId: 281474976720762, path id: 3 FAKE_COORDINATOR: Erasing txId 281474976720762 2025-03-31T16:04:17.565400Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-03-31T16:04:17.565525Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-03-31T16:04:17.565573Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976720762 2025-03-31T16:04:17.565638Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720762, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-03-31T16:04:17.565700Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-31T16:04:17.566454Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-03-31T16:04:17.566561Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-03-31T16:04:17.566599Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976720762 2025-03-31T16:04:17.566633Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720762, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-03-31T16:04:17.566670Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T16:04:17.566765Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976720762, subscribers: 1 2025-03-31T16:04:17.566823Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [5:289:2276] 2025-03-31T16:04:17.574158Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720762 2025-03-31T16:04:17.574606Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720762 2025-03-31T16:04:17.574733Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976720762 2025-03-31T16:04:17.574808Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976720762 2025-03-31T16:04:17.574892Z node 5 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-03-31T16:04:17.574929Z node 5 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976720762 2025-03-31T16:04:17.574971Z node 5 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976720762, id# 102, itemIdx# 4294967295 2025-03-31T16:04:17.577355Z node 5 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-03-31T16:04:17.577473Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:04:17.577554Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [5:706:2646] TestWaitNotification: OK eventTxId 102 |74.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::ReadSuccess |74.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |74.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit >> KqpImmediateEffects::TxWithWriteAtTheEnd+UseSink [GOOD] >> KqpImmediateEffects::TxWithWriteAtTheEnd-UseSink >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_Generations [GOOD] >> TFlatTableExecutorGC::TestGCVectorDeduplicaton [GOOD] >> KqpPg::TableSelect-useSink [GOOD] >> KqpPg::TableInsert+useSink >> KqpInplaceUpdate::Negative_SingleRowListFromRange+UseSink [GOOD] >> KqpSystemView::PartitionStatsRange3 >> KqpInplaceUpdate::Negative_SingleRowListFromRange-UseSink >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd-UseSink [GOOD] >> KqpImmediateEffects::InsertExistingKey+UseSink [GOOD] >> KqpImmediateEffects::InteractiveTxWithWriteAtTheEnd >> KqpImmediateEffects::InsertExistingKey-UseSink >> TVersions::Wreck1 [GOOD] >> KqpEffects::InsertAbort_Literal_Conflict-UseSink [GOOD] >> TVersions::Wreck1Reverse |74.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest |74.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest |74.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move/test-results/unittest/{meta.json ... results_accumulator.log} |74.6%| [LD] {RESULT} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit >> KqpImmediateEffects::UpsertExistingKey [GOOD] >> TSubscriberCombinationsTest::CombinationsRootDomain [GOOD] >> TSubscriberCombinationsTest::CombinationsMigratedPath ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TFlatTableExecutorGC::TestGCVectorDeduplicaton [GOOD] Test command err: Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 3750b 40r} data 2915b + FlatIndex{7} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 3 0 620b {0, 1} | 3 39 620b {5, 7} + BTreeIndex{Empty, PageId: 3 RowCount: 40 DataSize: 620 GroupDataSize: 3130 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{3} Label{34 rev 1, 620b}, [0, +40)row | ERowOp 1: {0, 1} | ERowOp 1: {0, 3} | ERowOp 1: {0, 4} | ERowOp 1: {0, 6} | ERowOp 1: {0, 7} | ERowOp 1: {0, 8} | ERowOp 1: {0, 10} | ERowOp 1: {1, 1} | ERowOp 1: {1, 3} | ERowOp 1: {1, 4} | ERowOp 1: {1, 6} | ERowOp 1: {1, 7} | ERowOp 1: {1, 8} | ERowOp 1: {1, 10} | ERowOp 1: {2, 1} | ERowOp 1: {2, 3} | ERowOp 1: {2, 4} | ERowOp 1: {2, 6} | ERowOp 1: {2, 7} | ERowOp 1: {2, 8} | ERowOp 1: {2, 10} | ERowOp 1: {3, 1} | ERowOp 1: {3, 3} | ERowOp 1: {3, 4} | ERowOp 1: {3, 6} | ERowOp 1: {3, 7} | ERowOp 1: {3, 8} | ERowOp 1: {3, 10} | ERowOp 1: {4, 1} | ERowOp 1: {4, 3} | ERowOp 1: {4, 4} | ERowOp 1: {4, 6} | ERowOp 1: {4, 7} | ERowOp 1: {4, 8} | ERowOp 1: {4, 10} | ERowOp 1: {5, 1} | ERowOp 1: {5, 3} | ERowOp 1: {5, 4} | ERowOp 1: {5, 6} | ERowOp 1: {5, 7} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 5129b 40r} data 5373b + FlatIndex{3} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 1 0 2466b {0, 1} | 1 39 2466b {5, 7} + BTreeIndex{Empty, PageId: 1 RowCount: 40 DataSize: 2466 GroupDataSize: 2663 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{1} Label{14 rev 1, 2466b}, [0, +40)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} | ERowOp 1: {0, 4} {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: {0, 6} {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} | ERowOp 1: {0, 7} {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 8474b 40r} data 6832b + FlatIndex{15} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 7 0 1036b {0, 1} | 7 39 1036b {5, 7} + BTreeIndex{Empty, PageId: 7 RowCount: 40 DataSize: 1036 GroupDataSize: 7438 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{7} Label{74 rev 1, 1036b}, [0, +40)row | ERowOp 1: {0, 1} | ERowOp 1: {0, 3} | ERowOp 1: {0, 4} | ERowOp 1: {0, 6} | ERowOp 1: {0, 7} | ERowOp 1: {0, 8} | ERowOp 1: {0, 10} | ERowOp 1: {1, 1} | ERowOp 1: {1, 3} | ERowOp 1: {1, 4} | ERowOp 1: {1, 6} | ERowOp 1: {1, 7} | ERowOp 1: {1, 8} | ERowOp 1: {1, 10} | ERowOp 1: {2, 1} | ERowOp 1: {2, 3} | ERowOp 1: {2, 4} | ERowOp 1: {2, 6} | ERowOp 1: {2, 7} | ERowOp 1: {2, 8} | ERowOp 1: {2, 10} | ERowOp 1: {3, 1} | ERowOp 1: {3, 3} | ERowOp 1: {3, 4} | ERowOp 1: {3, 6} | ERowOp 1: {3, 7} | ERowOp 1: {3, 8} | ERowOp 1: {3, 10} | ERowOp 1: {4, 1} | ERowOp 1: {4, 3} | ERowOp 1: {4, 4} | ERowOp 1: {4, 6} | ERowOp 1: {4, 7} | ERowOp 1: {4, 8} | ERowOp 1: {4, 10} | ERowOp 1: {5, 1} | ERowOp 1: {5, 3} | ERowOp 1: {5, 4} | ERowOp 1: {5, 6} | ERowOp 1: {5, 7} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 2430b 40r} data 4017b + FlatIndex{20} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 120b {0, 1} | 1 2 120b {0, 4} | 2 4 120b {0, 7} | 3 6 120b {0, 10} | 4 8 120b {1, 3} | 5 10 122b {1, 6} | 6 12 122b {1, 8} | 7 14 122b {2, NULL} | 8 16 122b {2, 4} | 9 18 122b {2, 7} | 10 20 122b {2, 10} | 11 22 122b {3, 3} | 12 24 122b {3, 6} | 13 26 122b {3, 8} | 14 28 122b {4, NULL} | 15 30 122b {4, 4} | 16 32 122b {4, 7} | 17 34 122b {4, 10} | 18 36 122b {5, 3} | 19 38 122b {5, 6} | 19 39 122b {5, 7} + BTreeIndex{PageId: 21 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 976b} | PageId: 0 RowCount: 2 DataSize: 120 ErasedRowCount: 0 | > {0, 4} | PageId: 1 RowCount: 4 DataSize: 240 ErasedRowCount: 0 | > {0, 7} | PageId: 2 RowCount: 6 DataSize: 360 ErasedRowCount: 0 | > {0, 10} | PageId: 3 RowCount: 8 DataSize: 480 ErasedRowCount: 0 | > {1, 3} | PageId: 4 RowCount: 10 DataSize: 600 ErasedRowCount: 0 | > {1, 6} | PageId: 5 RowCount: 12 DataSize: 722 ErasedRowCount: 0 | > {1, 8} | PageId: 6 RowCount: 14 DataSize: 844 ErasedRowCount: 0 | > {2, NULL} | PageId: 7 RowCount: 16 DataSize: 966 ErasedRowCount: 0 | > {2, 4} | PageId: 8 RowCount: 18 DataSize: 1088 ErasedRowCount: 0 | > {2, 7} | PageId: 9 RowCount: 20 DataSize: 1210 ErasedRowCount: 0 | > {2, 10} | PageId: 10 RowCount: 22 DataSize: 1332 ErasedRowCount: 0 | > {3, 3} | PageId: 11 RowCount: 24 DataSize: 1454 ErasedRowCount: 0 | > {3, 6} | PageId: 12 RowCount: 26 DataSize: 1576 ErasedRowCount: 0 | > {3, 8} | PageId: 13 RowCount: 28 DataSize: 1698 ErasedRowCount: 0 | > {4, NULL} | PageId: 14 RowCount: 30 DataSize: 1820 ErasedRowCount: 0 | > {4, 4} | PageId: 15 RowCount: 32 DataSize: 1942 ErasedRowCount: 0 | > {4, 7} | PageId: 16 RowCount: 34 DataSize: 2064 ErasedRowCount: 0 | > {4, 10} | PageId: 17 RowCount: 36 DataSize: 2186 ErasedRowCount: 0 | > {5, 3} | PageId: 18 RowCount: 38 DataSize: 2308 ErasedRowCount: 0 | > {5, 6} | PageId: 19 RowCount: 40 DataSize: 2430 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 120b}, [0, +2)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} + Rows{1} Label{14 rev 1, 120b}, [2, +2)row | ERowOp 1: {0, 4} {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: {0, 6} {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} + Rows{2} Label{24 rev 1, 120b}, [4, +2)row | ERowOp 1: {0, 7} {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} + Rows{3} Label{34 rev 1, 120b}, [6, +2)row | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} + Rows{4} Label{44 rev 1, 120b}, [8, +2)row | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} + Rows{5} Label{54 rev 1, 122b}, [10, +2)row | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} + Rows{6} Label{64 rev 1, 122b}, [12, +2)row | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} + Rows{7} Label{74 rev 1, 122b}, [14, +2)row | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} + Rows{8} Label{84 rev 1, 122b}, [16, +2)row | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} + Rows{9} Label{94 rev 1, 122b}, [18, +2)row | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} + Rows{10} Label{104 rev 1, 122b}, [20, +2)row | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} + Rows{11} Label{114 rev 1, 122b}, [22, +2)row | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} + Rows{12} Label{124 rev 1, 122b}, [24, +2)row | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} + Rows{13} Label{134 rev 1, 122b}, [26, +2)row | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_2 ... 6:30:2062]) to queue queue_background_compaction 00000.590 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 300.000000 to 600.000000 (insert task gen2-table-101-tablet-1 (56 by [16:30:2062])) 00000.590 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.604 DD| RESOURCE_BROKER: Finish task gen2-table-101-tablet-1 (56 by [16:30:2062]) (release resources {1, 0}) 00000.604 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 600.000000 to 300.000000 (remove task gen2-table-101-tablet-1 (56 by [16:30:2062])) 00000.604 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task bckg-block (987987987988 by [16:8:2055]) from queue queue_background_compaction 00000.604 DD| RESOURCE_BROKER: Assigning in-fly task bckg-block (987987987988 by [16:8:2055]) to queue queue_background_compaction 00000.604 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 300.000000 to 600.000000 (insert task bckg-block (987987987988 by [16:8:2055])) 00000.604 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.607 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (63 by [16:30:2062]) priority=200 resources={1, 0} 00000.607 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (63 by [16:30:2062]) to queue queue_background_compaction 00000.607 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.608 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (63 by [16:30:2062]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.608 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (63 by [16:30:2062]) to queue queue_compaction_gen0 00000.609 DD| RESOURCE_BROKER: Updated real resource usage for queue queue_compaction_gen0 from 0.000000 to 300.000000 00000.609 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (63 by [16:30:2062]) from queue queue_compaction_gen0 00000.609 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (63 by [16:30:2062]) to queue queue_compaction_gen0 00000.609 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.615 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (63 by [16:30:2062]) (release resources {1, 0}) 00000.615 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.626 II| TABLET_SAUSAGECACHE: Wakeup 1 00000.637 II| TABLET_SAUSAGECACHE: Wakeup 1 00000.648 II| TABLET_SAUSAGECACHE: Wakeup 1 00000.660 II| TABLET_SAUSAGECACHE: Wakeup 1 00000.660 DD| RESOURCE_BROKER: Update task gen1-table-101-tablet-1 (62 by [16:30:2062]) (priority=166 type=background_compaction_gen1 resources={1, 0} resubmit=0) 00000.660 DD| RESOURCE_BROKER: Assigning waiting task gen1-table-101-tablet-1 (62 by [16:30:2062]) to queue queue_background_compaction 00000.660 DD| RESOURCE_BROKER: Updated real resource usage for queue queue_background_compaction from 300.000000 to 600.000000 (in-fly consumption {1, 0}) 00000.660 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.663 DD| RESOURCE_BROKER: Removing task gen1-table-101-tablet-1 (62 by [16:30:2062]) 00000.663 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.663 NN| TABLET_SAUSAGECACHE: Poison cache serviced 55 reqs hit {55 29100b} miss {0 0b} 00000.664 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.664 II| FAKE_ENV: DS.0 gone, left {9696b, 90}, put {69349b, 689} 00000.664 II| FAKE_ENV: DS.1 gone, left {49679b, 125}, put {120827b, 750} 00000.665 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.665 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.665 II| FAKE_ENV: All BS storage groups are stopped 00000.665 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 2.000m 00000.665 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 659}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-31T16:04:05.458269Z 00000.015 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.016 II| FAKE_ENV: Starting storage for BS group 0 00000.017 II| FAKE_ENV: Starting storage for BS group 1 00000.017 II| FAKE_ENV: Starting storage for BS group 2 00000.017 II| FAKE_ENV: Starting storage for BS group 3 00002.123 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00002.124 NN| TABLET_SAUSAGECACHE: Poison cache serviced 353 reqs hit {1164 6970614b} miss {0 0b} 00002.125 II| FAKE_ENV: Shut order, stopping 4 BS groups 00002.125 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {199551b, 2023} 00002.125 II| FAKE_ENV: DS.1 gone, left {2023431b, 4}, put {7269821b, 2026} 00002.125 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00002.125 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00002.125 II| FAKE_ENV: All BS storage groups are stopped 00002.125 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00002.125 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-31T16:04:07.598700Z 00000.009 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.010 II| FAKE_ENV: Starting storage for BS group 0 00000.011 II| FAKE_ENV: Starting storage for BS group 1 00000.011 II| FAKE_ENV: Starting storage for BS group 2 00000.011 II| FAKE_ENV: Starting storage for BS group 3 00002.208 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00002.211 NN| TABLET_SAUSAGECACHE: Poison cache serviced 353 reqs hit {1164 6970614b} miss {0 0b} 00002.212 II| FAKE_ENV: Shut order, stopping 4 BS groups 00002.212 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {199551b, 2023} 00002.212 II| FAKE_ENV: DS.1 gone, left {2023431b, 4}, put {7269821b, 2026} 00002.212 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00002.212 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00002.212 II| FAKE_ENV: All BS storage groups are stopped 00002.212 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00002.213 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-31T16:04:09.854459Z 00000.012 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.013 II| FAKE_ENV: Starting storage for BS group 0 00000.013 II| FAKE_ENV: Starting storage for BS group 1 00000.013 II| FAKE_ENV: Starting storage for BS group 2 00000.013 II| FAKE_ENV: Starting storage for BS group 3 00001.810 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00001.811 NN| TABLET_SAUSAGECACHE: Poison cache serviced 299 reqs hit {1012 6947830b} miss {0 0b} 00001.811 II| FAKE_ENV: Shut order, stopping 4 BS groups 00001.811 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {199548b, 2023} 00001.811 II| FAKE_ENV: DS.1 gone, left {2007005b, 4}, put {7211282b, 2026} 00001.812 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00001.812 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00001.812 II| FAKE_ENV: All BS storage groups are stopped 00001.812 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00001.812 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-31T16:04:11.683039Z 00000.009 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.010 II| FAKE_ENV: Starting storage for BS group 0 00000.010 II| FAKE_ENV: Starting storage for BS group 1 00000.010 II| FAKE_ENV: Starting storage for BS group 2 00000.010 II| FAKE_ENV: Starting storage for BS group 3 00002.044 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00002.045 NN| TABLET_SAUSAGECACHE: Poison cache serviced 353 reqs hit {1164 6970614b} miss {0 0b} 00002.046 II| FAKE_ENV: Shut order, stopping 4 BS groups 00002.046 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {199545b, 2023} 00002.046 II| FAKE_ENV: DS.1 gone, left {2013604b, 4}, put {7237874b, 2026} 00002.046 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00002.046 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00002.047 II| FAKE_ENV: All BS storage groups are stopped 00002.047 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00002.047 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-31T16:04:13.779504Z 00000.009 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.010 II| FAKE_ENV: Starting storage for BS group 0 00000.010 II| FAKE_ENV: Starting storage for BS group 1 00000.011 II| FAKE_ENV: Starting storage for BS group 2 00000.011 II| FAKE_ENV: Starting storage for BS group 3 00002.245 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00002.246 NN| TABLET_SAUSAGECACHE: Poison cache serviced 299 reqs hit {1012 6947830b} miss {0 0b} 00002.247 II| FAKE_ENV: Shut order, stopping 4 BS groups 00002.247 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {199557b, 2023} 00002.247 II| FAKE_ENV: DS.1 gone, left {2007005b, 4}, put {7211282b, 2026} 00002.248 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00002.248 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00002.248 II| FAKE_ENV: All BS storage groups are stopped 00002.248 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00002.248 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-31T16:04:16.072813Z 00000.017 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.018 II| FAKE_ENV: Starting storage for BS group 0 00000.018 II| FAKE_ENV: Starting storage for BS group 1 00000.018 II| FAKE_ENV: Starting storage for BS group 2 00000.026 II| FAKE_ENV: Starting storage for BS group 3 00001.984 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00001.986 NN| TABLET_SAUSAGECACHE: Poison cache serviced 309 reqs hit {1118 6955338b} miss {2 9773b} 00001.986 II| FAKE_ENV: Shut order, stopping 4 BS groups 00001.986 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {199551b, 2023} 00001.986 II| FAKE_ENV: DS.1 gone, left {2023431b, 4}, put {7269804b, 2026} 00001.987 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00001.987 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00001.987 II| FAKE_ENV: All BS storage groups are stopped 00001.987 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00001.987 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-31T16:04:18.073038Z 00000.008 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.009 II| FAKE_ENV: Starting storage for BS group 0 00000.009 II| FAKE_ENV: Starting storage for BS group 1 00000.009 II| FAKE_ENV: Starting storage for BS group 2 00000.009 II| FAKE_ENV: Starting storage for BS group 3 00003.375 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00003.376 NN| TABLET_SAUSAGECACHE: Poison cache serviced 651 reqs hit {780 4008302b} miss {1002 6916040b} 00003.377 II| FAKE_ENV: Shut order, stopping 4 BS groups 00003.377 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {200304b, 2033} 00003.377 II| FAKE_ENV: DS.1 gone, left {2023570b, 4}, put {9254530b, 2039} 00003.377 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00003.378 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00003.378 II| FAKE_ENV: All BS storage groups are stopped 00003.378 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00003.378 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWoIndexes [GOOD] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithSyncIndex >> KqpPg::InsertNoTargetColumns_NotOneSize+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_NotOneSize-useSink |74.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |74.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |74.6%| [LD] {RESULT} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Literal_Conflict-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 22185, MsgBus: 13934 2025-03-31T16:04:08.427699Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005107047267507:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:08.427779Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011c5/r3tmp/tmp9KcPaI/pdisk_1.dat 2025-03-31T16:04:08.856064Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:08.870290Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:08.870413Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 22185, node 1 2025-03-31T16:04:08.874082Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:04:08.955633Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:08.955656Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:08.955661Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:08.955752Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13934 TClient is connected to server localhost:13934 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:09.540547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:09.583485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:09.864426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:04:10.084835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:04:10.176882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:12.041953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005124227138498:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:12.042075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:12.448673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:12.487713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:12.529001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:12.608669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:12.646147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:12.701455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:12.766126Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005124227139011:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:12.766247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:12.766466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005124227139017:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:12.775316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:12.799322Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005124227139019:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:12.895615Z node 1 :TX_PROXY ERROR: Actor# [1:7488005124227139075:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:13.431238Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005107047267507:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:13.431328Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:14.227454Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=3; 2025-03-31T16:04:14.257742Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-31T16:04:14.257954Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-31T16:04:14.258181Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488005132817074005:2498], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [1:7488005132817073964:2498]Got CONSTRAINT VIOLATION for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[1:7488005132817074005:2498].{
: Error: Duplicate keys have been found., code: 2012 } 2025-03-31T16:04:14.258707Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488005132817073998:2498], SessionActorId: [1:7488005132817073964:2498], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7488005132817073964:2498]. isRollback=0 2025-03-31T16:04:14.258910Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODIyMzU3YmItZWRkYTIwMzktM2E0YzkyNDAtMjRhYWJjNzc=, ActorId: [1:7488005132817073964:2498], ActorState: ExecuteState, TraceId: 01jqpf4g5s8fzd350afx9bamjj, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7488005132817073999:2498] from: [1:7488005132817073998:2498] 2025-03-31T16:04:14.258988Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7488005132817073999:2498] TxId: 281474976710671. Ctx: { TraceId: 01jqpf4g5s8fzd350afx9bamjj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODIyMzU3YmItZWRkYTIwMzktM2E0YzkyNDAtMjRhYWJjNzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-31T16:04:14.260097Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODIyMzU3YmItZWRkYTIwMzktM2E0YzkyNDAtMjRhYWJjNzc=, ActorId: [1:7488005132817073964:2498], ActorState: ExecuteState, TraceId: 01jqpf4g5s8fzd350afx9bamjj, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 28034, MsgBus: 19130 2025-03-31T16:04:15.397925Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488005136445851031:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:15.397979Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011c5/r3tmp/tmpWETNKj/pdisk_1.dat 2025-03-31T16:04:15.524865Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28034, node 2 2025-03-31T16:04:15.562090Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:15.562179Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:15.600363Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:04:15.730275Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:15.730298Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:15.730306Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:15.730419Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19130 TClient is connected to server localhost:19130 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:16.343284Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:16.397644Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:16.560536Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:16.774949Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:16.911453Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:19.582830Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005153625721975:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:19.582943Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:19.640553Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:19.719254Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:19.811779Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:19.899868Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:19.947752Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:20.030381Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:20.123438Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005157920689796:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:20.123560Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:20.125444Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005157920689801:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:20.129848Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:20.150033Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488005157920689803:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:04:20.256920Z node 2 :TX_PROXY ERROR: Actor# [2:7488005157920689859:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:20.403202Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488005136445851031:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:20.403351Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:21.967168Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488005162215657504:2505], TxId: 281474976715672, task: 1. Ctx: { TraceId : 01jqpf4qhv133egeg7292ewxzw. SessionId : ydb://session/3?node_id=2&id=YTVlMTNjZTYtNjgxNTY4OGYtZTAzNTNjZC04NTBlMGQwNg==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-03-31T16:04:21.967870Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488005162215657505:2506], TxId: 281474976715672, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=YTVlMTNjZTYtNjgxNTY4OGYtZTAzNTNjZC04NTBlMGQwNg==. CustomerSuppliedId : . TraceId : 01jqpf4qhv133egeg7292ewxzw. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7488005162215657501:2492], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-31T16:04:21.968180Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTVlMTNjZTYtNjgxNTY4OGYtZTAzNTNjZC04NTBlMGQwNg==, ActorId: [2:7488005162215657420:2492], ActorState: ExecuteState, TraceId: 01jqpf4qhv133egeg7292ewxzw, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpsertExistingKey [GOOD] Test command err: Trying to start YDB, gRPC: 24332, MsgBus: 14240 2025-03-31T16:04:08.495031Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005104586715255:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:08.495109Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011c7/r3tmp/tmpvQPbuk/pdisk_1.dat 2025-03-31T16:04:08.955932Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:08.980559Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:08.980645Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:08.983668Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24332, node 1 2025-03-31T16:04:09.119722Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:09.119753Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:09.119761Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:09.119937Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14240 TClient is connected to server localhost:14240 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:09.787627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:09.803744Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:04:09.818476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:10.021919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:10.203320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:10.289035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:12.279260Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005121766586235:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:12.279372Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:12.652183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:12.735397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:12.820319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:12.868491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:12.925417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:13.006964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:13.092173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005126061554053:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:13.092288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:13.092522Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005126061554058:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:13.096840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:13.131346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005126061554060:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:13.210050Z node 1 :TX_PROXY ERROR: Actor# [1:7488005126061554114:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:13.495260Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005104586715255:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:13.495355Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:14.438417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 19647, MsgBus: 61651 2025-03-31T16:04:15.909232Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488005135552915922:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:15.909501Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011c7/r3tmp/tmpiRiDeU/pdisk_1.dat 2025-03-31T16:04:16.041272Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:16.071845Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:16.071937Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:16.072958Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19647, node 2 2025-03-31T16:04:16.143786Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:16.143815Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:16.143824Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:16.143959Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61651 TClient is connected to server localhost:61651 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:16.738242Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:16.750584Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:16.865391Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:17.066745Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:17.175763Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:20.081507Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005157027754129:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:20.081613Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:20.115061Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:20.189430Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:20.233720Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:20.274428Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:20.322580Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:20.415387Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:20.512656Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005157027754648:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:20.512764Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:20.513022Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005157027754653:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:20.517153Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:20.547272Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-31T16:04:20.547578Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488005157027754655:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:20.623319Z node 2 :TX_PROXY ERROR: Actor# [2:7488005157027754710:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:20.913280Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488005135552915922:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:20.913417Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:21.801061Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> KqpImmediateEffects::ConflictingKeyR1RWR2 [GOOD] |74.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpEffects::UpdateOn_Select [GOOD] >> KqpSysColV0::SelectRange >> KqpSysColV0::SelectRowAsterisk >> KqpImmediateEffects::ConflictingKeyW1RWR2 [GOOD] >> BackupRestore::RestoreViewQueryText [GOOD] >> BackupRestore::RestoreViewReferenceTable >> KqpSystemView::FailNavigate >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyR1RWR2 [GOOD] Test command err: Trying to start YDB, gRPC: 5552, MsgBus: 2380 2025-03-31T16:04:09.248920Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005110185107839:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:09.249037Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011c3/r3tmp/tmpE5vET7/pdisk_1.dat 2025-03-31T16:04:09.619430Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5552, node 1 2025-03-31T16:04:09.664726Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:09.664849Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:09.668828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:04:09.759518Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:09.759553Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:09.759561Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:09.759740Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2380 TClient is connected to server localhost:2380 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:10.504080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:10.521167Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:04:10.539374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:04:10.717625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:04:10.903365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:10.996435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:13.110554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005127364978786:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:13.110649Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:13.564550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:13.638879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:13.713515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:13.790581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:13.863957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:13.954177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:14.035160Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005131659946603:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:14.035284Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:14.035627Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005131659946608:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:14.043058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:14.061377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005131659946610:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:14.127822Z node 1 :TX_PROXY ERROR: Actor# [1:7488005131659946666:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:14.249562Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005110185107839:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:14.249662Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:15.330232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:04:16.066067Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmExOWNlODgtNTdkYmY1Yy04YzAwZTBjZS1lZDYxZWQ0Mg==, ActorId: [1:7488005135954914537:2528], ActorState: ExecuteState, TraceId: 01jqpf4j0cfjnnbcc3rnreg411, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken 2025-03-31T16:04:16.075741Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmExOWNlODgtNTdkYmY1Yy04YzAwZTBjZS1lZDYxZWQ0Mg==, ActorId: [1:7488005135954914537:2528], ActorState: ReadyState, TraceId: 01jqpf4j2bfhd70fyqqz3zgtzn, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 30800, MsgBus: 16532 2025-03-31T16:04:17.293379Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488005143147070972:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:17.293453Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011c3/r3tmp/tmpE5GtYp/pdisk_1.dat 2025-03-31T16:04:17.495968Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:17.522912Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:17.523003Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:17.532280Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30800, node 2 2025-03-31T16:04:17.639726Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:17.639750Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:17.639757Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:17.639880Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16532 TClient is connected to server localhost:16532 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:18.376481Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:18.387693Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:04:18.406580Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:18.503400Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:18.661731Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:18.758389Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:22.188185Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005164621909210:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:22.188308Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:22.276597Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:22.297782Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488005143147070972:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:22.297851Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:22.365075Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:22.414300Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:22.504383Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:22.581072Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:22.630576Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:22.726639Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005164621909736:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:22.726773Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:22.727250Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005164621909741:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:22.733788Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:22.754939Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488005164621909743:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:04:22.830425Z node 2 :TX_PROXY ERROR: Actor# [2:7488005164621909799:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:24.023803Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> KqpEffects::InsertAbort_Select_Conflict+UseSink [GOOD] >> BuildStatsHistogram::Many_Mixed [GOOD] >> BuildStatsHistogram::Many_Serial ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::UpdateOn_Select [GOOD] Test command err: Trying to start YDB, gRPC: 29753, MsgBus: 65340 2025-03-31T16:04:10.460338Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005114958081769:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:10.460408Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011ba/r3tmp/tmpilU25O/pdisk_1.dat 2025-03-31T16:04:10.921979Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:10.926662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:10.926766Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:10.930088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29753, node 1 2025-03-31T16:04:11.081998Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:11.082041Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:11.082065Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:11.082241Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65340 TClient is connected to server localhost:65340 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:11.787030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:11.811219Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:04:11.820316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:04:12.028585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:04:12.252812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:12.364593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:14.651464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005132137952712:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:14.651644Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:15.002602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:15.039987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:15.083803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:15.131562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:15.169371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:15.226984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:15.333552Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005136432920526:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:15.333650Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:15.334001Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005136432920531:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:15.339280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:15.354226Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005136432920533:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:15.449662Z node 1 :TX_PROXY ERROR: Actor# [1:7488005136432920588:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:15.462108Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005114958081769:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:15.462203Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 2320, MsgBus: 16058 2025-03-31T16:04:17.992341Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488005143847432307:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:18.003701Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011ba/r3tmp/tmpTgKuaz/pdisk_1.dat 2025-03-31T16:04:18.184016Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2320, node 2 2025-03-31T16:04:18.271483Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:18.271584Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:18.284186Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:04:18.481247Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:18.481271Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:18.481278Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:18.481451Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16058 TClient is connected to server localhost:16058 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:19.119244Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:19.128102Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:04:19.142127Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:19.257597Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:19.576820Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:19.666763Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:22.484349Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005165322270526:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:22.484457Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:22.546500Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:22.598562Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:22.648811Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:22.696623Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:22.770066Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:22.811018Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:22.921522Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005165322271045:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:22.921610Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:22.921976Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005165322271050:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:22.962139Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488005143847432307:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:22.962488Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:22.982097Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:22.999517Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488005165322271052:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:23.099353Z node 2 :TX_PROXY ERROR: Actor# [2:7488005169617238409:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpSystemView::NodesRange1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyW1RWR2 [GOOD] Test command err: Trying to start YDB, gRPC: 19307, MsgBus: 19796 2025-03-31T16:04:10.373554Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005116403197449:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:10.373603Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011bf/r3tmp/tmpjmHtPw/pdisk_1.dat 2025-03-31T16:04:10.866840Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:10.876634Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:10.876770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:10.878229Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19307, node 1 2025-03-31T16:04:11.062010Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:11.062042Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:11.062051Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:11.062208Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19796 TClient is connected to server localhost:19796 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:11.758907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:11.774455Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:04:11.790877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:04:11.941217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:04:12.092747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:04:12.185021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:04:14.064002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005133583068431:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:14.064165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:14.379542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:14.457372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:14.490484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:14.552560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:14.606929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:14.711954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:14.801002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005133583068955:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:14.801076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:14.801419Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005133583068960:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:14.808265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:14.830335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005133583068962:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:14.921333Z node 1 :TX_PROXY ERROR: Actor# [1:7488005133583069017:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:15.375448Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005116403197449:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:15.395369Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:15.978584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 64723, MsgBus: 7044 2025-03-31T16:04:17.649600Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488005145443765243:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:17.649638Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011bf/r3tmp/tmpy8WEKZ/pdisk_1.dat 2025-03-31T16:04:17.958035Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:18.005539Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:18.005628Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:18.012401Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64723, node 2 2025-03-31T16:04:18.171287Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:18.171312Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:18.171322Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:18.171446Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7044 TClient is connected to server localhost:7044 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:18.894713Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:18.907469Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:04:18.929677Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:19.011959Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:19.265433Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:19.408676Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:22.650218Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488005145443765243:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:22.650278Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:22.959602Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005166918603473:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:22.959686Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:23.015498Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:23.069395Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:23.119640Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:23.196395Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:23.239822Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:23.304222Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:23.406599Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005171213571284:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:23.406724Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:23.407173Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005171213571289:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:23.412933Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:23.450752Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488005171213571291:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:23.534859Z node 2 :TX_PROXY ERROR: Actor# [2:7488005171213571347:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:24.934347Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:04:25.756136Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because it cannot acquire locks;tx_id=5; 2025-03-31T16:04:25.756360Z node 2 :TX_DATASHARD ERROR: Prepare transaction failed. txid 5 at tablet 72075186224037919 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because it cannot acquire locks" issue_code: 2001 severity: 1 } 2025-03-31T16:04:25.756535Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 5 at tablet 72075186224037919 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because it cannot acquire locks" issue_code: 2001 severity: 1 } 2025-03-31T16:04:25.756737Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488005179803506628:2502], Table: `/Root/TestImmediateEffects` ([72057594046644480:16:1]), SessionActorId: [2:7488005175508538963:2502]Got LOCKS BROKEN for table `/Root/TestImmediateEffects`. ShardID=72075186224037919, Sink=[2:7488005179803506628:2502].{
: Error: Operation is aborting because it cannot acquire locks, code: 2001 } 2025-03-31T16:04:25.757389Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488005179803506569:2502], SessionActorId: [2:7488005175508538963:2502], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001
: Error: Operation is aborting because it cannot acquire locks, code: 2001 . sessionActorId=[2:7488005175508538963:2502]. isRollback=0 2025-03-31T16:04:25.757745Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWJjMDc3N2UtOTVhMjJlOGUtZjU4NGZkNDUtZjBkOTE2OWE=, ActorId: [2:7488005175508538963:2502], ActorState: ExecuteState, TraceId: 01jqpf4vde9cqb37wcy03zte54, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7488005179803506622:2502] from: [2:7488005179803506569:2502] 2025-03-31T16:04:25.757869Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7488005179803506622:2502] TxId: 281474976710675. Ctx: { TraceId: 01jqpf4vde9cqb37wcy03zte54, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWJjMDc3N2UtOTVhMjJlOGUtZjU4NGZkNDUtZjBkOTE2OWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001 subissue: {
: Error: Operation is aborting because it cannot acquire locks, code: 2001 } } 2025-03-31T16:04:25.758943Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWJjMDc3N2UtOTVhMjJlOGUtZjU4NGZkNDUtZjBkOTE2OWE=, ActorId: [2:7488005175508538963:2502], ActorState: ExecuteState, TraceId: 01jqpf4vde9cqb37wcy03zte54, Create QueryResponse for error on request, msg: >> KqpImmediateEffects::TxWithReadAtTheEnd-UseSink [GOOD] >> KqpSysColV1::StreamSelectRowById [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiter+UseSink [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiter-UseSink >> TTopicYqlTest::BadRequests [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 13646, MsgBus: 24438 2025-03-31T16:04:11.660429Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005118964817587:2209];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:11.668394Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011b6/r3tmp/tmpzVLPen/pdisk_1.dat 2025-03-31T16:04:12.135692Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:12.168483Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:12.168610Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:12.171675Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13646, node 1 2025-03-31T16:04:12.281201Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:12.281228Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:12.281239Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:12.281381Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24438 TClient is connected to server localhost:24438 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:13.084082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:13.140195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:13.301827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:13.489133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:13.590824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:15.549003Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005136144688402:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:15.549181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:15.946890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:16.000159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:16.090577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:16.132927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:16.177002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:16.251062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:16.342431Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005140439656215:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:16.342519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:16.342760Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005140439656220:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:16.347271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:16.365000Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005140439656222:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:16.456475Z node 1 :TX_PROXY ERROR: Actor# [1:7488005140439656276:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:16.662499Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005118964817587:2209];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:16.662597Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:17.659612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 61287, MsgBus: 18051 2025-03-31T16:04:19.459519Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488005155299676305:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:19.459636Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011b6/r3tmp/tmpBBdx4G/pdisk_1.dat 2025-03-31T16:04:19.675834Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:19.705249Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:19.705341Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:19.711386Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61287, node 2 2025-03-31T16:04:19.888896Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:19.888926Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:19.888935Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:19.889068Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18051 TClient is connected to server localhost:18051 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:20.491573Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:20.509462Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:04:20.525197Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:20.628661Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:20.883875Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:21.008850Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:23.220456Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005172479547256:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:23.220539Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:23.314349Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:23.372633Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:23.434378Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:23.543167Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:23.626630Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:23.701107Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:23.795459Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005172479547772:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:23.795555Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:23.797295Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005172479547777:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:23.801858Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:23.826176Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488005172479547779:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:04:23.898280Z node 2 :TX_PROXY ERROR: Actor# [2:7488005172479547834:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:24.459738Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488005155299676305:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:24.459817Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:25.472299Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> KqpSysColV0::SelectRowById >> KqpImmediateEffects::TxWithWriteAtTheEnd-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Select_Conflict+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 12695, MsgBus: 6105 2025-03-31T16:04:11.916170Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005118267435820:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:11.923522Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011b0/r3tmp/tmp5Cy3Ks/pdisk_1.dat 2025-03-31T16:04:12.527152Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:12.532094Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:12.532186Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:12.534815Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12695, node 1 2025-03-31T16:04:12.700882Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:12.700907Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:12.700913Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:12.701026Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6105 TClient is connected to server localhost:6105 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:13.583032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:13.612865Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:04:13.625636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:13.789773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:14.035320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:14.163791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:16.362717Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005139742274049:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:16.362848Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:16.701305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:16.745296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:16.834174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:16.879503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:16.919791Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005118267435820:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:16.919880Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:16.935782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:17.026376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:17.132775Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005144037241874:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:17.132881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:17.133154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005144037241879:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:17.137610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:17.166162Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005144037241881:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:17.265872Z node 1 :TX_PROXY ERROR: Actor# [1:7488005144037241937:3461] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 9456, MsgBus: 3635 2025-03-31T16:04:19.804258Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488005154506180819:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:19.804307Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011b0/r3tmp/tmp5Z5l9H/pdisk_1.dat 2025-03-31T16:04:19.998813Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:20.039330Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:20.039434Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:20.041198Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9456, node 2 2025-03-31T16:04:20.139856Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:20.139894Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:20.139905Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:20.140041Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3635 TClient is connected to server localhost:3635 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:20.668578Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:20.683537Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:04:20.696715Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:20.776817Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:21.021269Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:21.111302Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:23.999285Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005171686051768:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:23.999400Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:24.067616Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:24.139570Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:24.175892Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:24.224846Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:24.268822Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:24.358600Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:24.432767Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005175981019577:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:24.432854Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:24.433053Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005175981019582:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:24.436987Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:24.451334Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488005175981019584:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:04:24.510092Z node 2 :TX_PROXY ERROR: Actor# [2:7488005175981019637:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:24.805316Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488005154506180819:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:24.805376Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:25.647510Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:04:25.936484Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=281474976715673; 2025-03-31T16:04:25.950364Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488005180275987393:2497], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [2:7488005180275987230:2497]Got CONSTRAINT VIOLATION for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[2:7488005180275987393:2497].{
: Error: Duplicate keys have been found., code: 2012 } 2025-03-31T16:04:25.950960Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488005180275987382:2497], SessionActorId: [2:7488005180275987230:2497], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[2:7488005180275987230:2497]. isRollback=0 2025-03-31T16:04:25.951298Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzRlODlkNGUtZDI1MzFkMS00OTkzNDQ1Yy1mZTJiZTQyZg==, ActorId: [2:7488005180275987230:2497], ActorState: ExecuteState, TraceId: 01jqpf4vjr7vc1kmdf6gzyhn07, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [2:7488005180275987383:2497] from: [2:7488005180275987382:2497] 2025-03-31T16:04:25.951397Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7488005180275987383:2497] TxId: 281474976715673. Ctx: { TraceId: 01jqpf4vjr7vc1kmdf6gzyhn07, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzRlODlkNGUtZDI1MzFkMS00OTkzNDQ1Yy1mZTJiZTQyZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-31T16:04:25.951667Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzRlODlkNGUtZDI1MzFkMS00OTkzNDQ1Yy1mZTJiZTQyZg==, ActorId: [2:7488005180275987230:2497], ActorState: ExecuteState, TraceId: 01jqpf4vjr7vc1kmdf6gzyhn07, Create QueryResponse for error on request, msg: >> BackupRestoreS3::RestoreViewQueryText [GOOD] >> BackupRestoreS3::RestoreViewReferenceTable >> KqpSysColV0::InnerJoinSelect [GOOD] >> KqpSysColV0::UpdateAndDelete >> KqpSystemView::ReadSuccess [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRowById [GOOD] Test command err: Trying to start YDB, gRPC: 18310, MsgBus: 2800 2025-03-31T16:04:19.614266Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005151728157469:2267];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:19.614331Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001879/r3tmp/tmpzpASTu/pdisk_1.dat 2025-03-31T16:04:20.458263Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:20.460286Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:20.460389Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:20.466028Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18310, node 1 2025-03-31T16:04:20.775813Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:20.775835Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:20.775841Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:20.775950Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2800 TClient is connected to server localhost:2800 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:21.953598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:21.972962Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:04:21.987189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:22.189898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:22.481518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:22.623588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:24.619244Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005151728157469:2267];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:24.619327Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:25.189746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005177497962832:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:25.189890Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:25.490605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:25.548766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:25.616774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:25.678207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:25.739718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:25.830433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:25.907258Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005177497963353:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:25.907366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:25.908404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005177497963358:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:25.912929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:25.924234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005177497963360:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:26.025519Z node 1 :TX_PROXY ERROR: Actor# [1:7488005181792930713:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:27.633235Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437067659, txId: 281474976710671] shutting down |74.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/client/ut/ydb-core-client-ut |74.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/ut/ydb-core-client-ut >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTable [GOOD] >> KqpSysColV1::StreamInnerJoinSelect [GOOD] >> SystemView::QueryStatsAllTables [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeSubDomain [GOOD] |74.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut >> SystemView::QueryStatsRetries >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeRtmrVolume [GOOD] >> KqpSysColV1::InnerJoinSelectAsterisk >> KqpSystemView::PartitionStatsRange3 [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeSolomonVolume [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTableIndex >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithSyncIndex [GOOD] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithAsyncIndex |74.7%| [LD] {RESULT} $(B)/ydb/core/client/ut/ydb-core-client-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::TxWithReadAtTheEnd-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 26589, MsgBus: 8989 2025-03-31T16:04:11.137381Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005117207170190:2135];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:11.141265Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011b8/r3tmp/tmpBDEccT/pdisk_1.dat 2025-03-31T16:04:11.641041Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:11.663330Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:11.663431Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:11.665479Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26589, node 1 2025-03-31T16:04:11.835612Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:11.835637Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:11.835644Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:11.835749Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8989 TClient is connected to server localhost:8989 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:12.597963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:12.613932Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:04:12.625027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:12.785860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:13.028352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:13.170832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:15.096834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005134387041068:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:15.096939Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:15.676762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:15.738453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:15.829506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:15.883537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:15.953803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:16.004525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:16.070670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005138682008880:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:16.070762Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:16.071128Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005138682008885:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:16.077793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:16.091317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005138682008887:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:16.136828Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005117207170190:2135];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:16.136952Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:16.178336Z node 1 :TX_PROXY ERROR: Actor# [1:7488005138682008942:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:17.488971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 23603, MsgBus: 32623 2025-03-31T16:04:19.164133Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488005153887591898:2283];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:19.164197Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011b8/r3tmp/tmpD9l0vX/pdisk_1.dat 2025-03-31T16:04:19.400222Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:19.400302Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:19.401556Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23603, node 2 2025-03-31T16:04:19.413714Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:19.566451Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:19.566474Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:19.566479Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:19.566551Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32623 TClient is connected to server localhost:32623 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:20.504202Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:20.520903Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:20.639510Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:21.008786Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:21.111102Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:24.139226Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488005153887591898:2283];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:24.148202Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:24.443849Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005175362429911:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:24.443920Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:24.500305Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:24.540755Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:24.620426Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:24.676199Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:24.728889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:24.805577Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:24.932216Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005175362430435:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:24.932341Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:24.932888Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005175362430440:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:24.938047Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:24.959693Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488005175362430442:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:25.017707Z node 2 :TX_PROXY ERROR: Actor# [2:7488005179657397793:3463] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:26.442208Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 |74.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |74.7%| [LD] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut >> KqpImmediateEffects::InteractiveTxWithWriteAtTheEnd [GOOD] >> KqpSysColV1::InnerJoinSelect |74.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSystemView::PartitionStatsRanges ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::TxWithWriteAtTheEnd-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 19683, MsgBus: 21371 2025-03-31T16:04:13.949898Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005126286784279:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:13.959338Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011a5/r3tmp/tmpWQRoiY/pdisk_1.dat 2025-03-31T16:04:14.475893Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:14.479537Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:14.479677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:14.504892Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19683, node 1 2025-03-31T16:04:14.593371Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:14.593397Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:14.593409Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:14.593542Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21371 TClient is connected to server localhost:21371 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:15.290243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:15.315995Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:04:15.339101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:15.511148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:15.695034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:15.772472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:17.644171Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005143466655226:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:17.644295Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:18.158276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:18.222711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:18.284645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:18.402847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:18.483142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:18.574434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:18.665602Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005147761623046:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:18.665684Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:18.667347Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005147761623051:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:18.673485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:18.686310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005147761623053:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:18.756471Z node 1 :TX_PROXY ERROR: Actor# [1:7488005147761623108:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:18.957588Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005126286784279:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:18.957739Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:19.880108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 10568, MsgBus: 19962 2025-03-31T16:04:21.542007Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488005161399819706:2056];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:21.542072Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011a5/r3tmp/tmpWU00zu/pdisk_1.dat 2025-03-31T16:04:21.714001Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:21.745416Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:21.745502Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:21.754713Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10568, node 2 2025-03-31T16:04:21.867231Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:21.867251Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:21.867259Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:21.867364Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19962 TClient is connected to server localhost:19962 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:22.425590Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:22.432103Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:04:22.442868Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:04:22.541717Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:04:22.704740Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:22.789567Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:25.672866Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005178579690652:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:25.672949Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:25.720176Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:25.762537Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:25.814704Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:25.889479Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:25.940647Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:26.004340Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:26.139261Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005182874658466:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:26.139382Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:26.139910Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005182874658471:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:26.145080Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:26.188350Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488005182874658473:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:04:26.254442Z node 2 :TX_PROXY ERROR: Actor# [2:7488005182874658529:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:26.542841Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488005161399819706:2056];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:26.542941Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:27.742361Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinSelect [GOOD] Test command err: Trying to start YDB, gRPC: 20823, MsgBus: 23947 2025-03-31T16:04:20.359890Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005158161980111:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:20.359952Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001856/r3tmp/tmp445aG2/pdisk_1.dat 2025-03-31T16:04:20.939969Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:20.940081Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:20.941515Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20823, node 1 2025-03-31T16:04:21.017556Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:21.242981Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:21.243010Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:21.243023Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:21.243173Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23947 TClient is connected to server localhost:23947 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:22.401341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:22.442516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:22.718067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:23.172016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:23.261007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:25.359343Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005158161980111:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:25.359405Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:26.036141Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005183931785664:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:26.036249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:26.435218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:26.545653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:26.597761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:26.644419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:26.738673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:26.826223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:26.936731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005183931786197:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:26.936881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:26.937033Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005183931786202:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:26.940694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:26.963327Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005183931786204:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:27.023329Z node 1 :TX_PROXY ERROR: Actor# [1:7488005188226753554:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpImmediateEffects::InsertExistingKey-UseSink [GOOD] >> KqpInplaceUpdate::Negative_SingleRowListFromRange-UseSink [GOOD] >> KqpLimits::CancelAfterRoTx [GOOD] >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinSelect [GOOD] Test command err: Trying to start YDB, gRPC: 2435, MsgBus: 15962 2025-03-31T16:04:20.882001Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005156167049316:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:20.882033Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001852/r3tmp/tmpnm88fR/pdisk_1.dat 2025-03-31T16:04:21.900609Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:21.930473Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:21.930560Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:21.930928Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:21.934702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2435, node 1 2025-03-31T16:04:22.091927Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:22.091955Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:22.091969Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:22.092068Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15962 TClient is connected to server localhost:15962 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:23.029390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:23.079807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:23.441713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:23.705499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:23.842333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:25.886179Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005156167049316:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:25.886269Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:26.437922Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005181936854868:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:26.438046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:26.873196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:26.962192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:27.029260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:27.091354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:27.187533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:27.303270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:27.374565Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005186231822691:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:27.374676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:27.379247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005186231822696:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:27.387965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:27.409051Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005186231822698:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:27.499944Z node 1 :TX_PROXY ERROR: Actor# [1:7488005186231822756:3463] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:29.587068Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437069570, txId: 281474976710671] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::ReadSuccess [GOOD] Test command err: Trying to start YDB, gRPC: 11605, MsgBus: 6828 2025-03-31T16:04:21.566552Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005161194113787:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:21.567229Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001846/r3tmp/tmpq6qjiV/pdisk_1.dat 2025-03-31T16:04:22.186927Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:22.189116Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:22.189239Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:22.198707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11605, node 1 2025-03-31T16:04:22.430839Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:22.430859Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:22.430869Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:22.430984Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6828 TClient is connected to server localhost:6828 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:23.534259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:23.564397Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:04:23.579515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:23.847900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:24.119579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:24.237213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:26.331202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005182668951887:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:26.331312Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:26.517756Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005161194113787:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:26.517836Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:26.879894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:26.932802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:27.011491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:27.054008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:27.103145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:27.191233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:27.303574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005186963919712:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:27.303686Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:27.303984Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005186963919717:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:27.309642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:27.332390Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005186963919719:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:27.419710Z node 1 :TX_PROXY ERROR: Actor# [1:7488005186963919774:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:28.586574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:04:28.813016Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jqpf4yb72mj18rzmdajtdsnt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODc3ODFlNGMtOTM1YTJmZDYtNDZhNDhhNDktOWI1MzUyMmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:04:28.826366Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437068811, txId: 281474976710672] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange3 [GOOD] Test command err: Trying to start YDB, gRPC: 6654, MsgBus: 31939 2025-03-31T16:04:21.939793Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005161393121492:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:21.940377Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001841/r3tmp/tmptaFebk/pdisk_1.dat 2025-03-31T16:04:22.766114Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:22.807268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:22.807373Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:22.813011Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6654, node 1 2025-03-31T16:04:23.027624Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:23.027645Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:23.027658Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:23.027769Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31939 TClient is connected to server localhost:31939 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:24.119263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:24.177715Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:04:24.193983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:24.459879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:24.788832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:24.925597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:26.913831Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005161393121492:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:26.913881Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:27.315942Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005187162926908:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:27.316046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:27.781709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:27.822646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:27.897749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:27.949147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:27.999717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:28.057684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:28.156216Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005191457894724:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:28.156290Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:28.156696Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005191457894729:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:28.160966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:28.188747Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005191457894732:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:28.255702Z node 1 :TX_PROXY ERROR: Actor# [1:7488005191457894788:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:29.772592Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437069759, txId: 281474976710671] shutting down >> KqpSysColV0::InnerJoinSelectAsterisk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::InteractiveTxWithWriteAtTheEnd [GOOD] Test command err: Trying to start YDB, gRPC: 27899, MsgBus: 25275 2025-03-31T16:04:12.920055Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005124130991947:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:12.936096Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011aa/r3tmp/tmpDFtDS5/pdisk_1.dat 2025-03-31T16:04:13.549159Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:13.555675Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:13.555812Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:13.562281Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27899, node 1 2025-03-31T16:04:13.807591Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:13.807615Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:13.807624Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:13.807715Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25275 TClient is connected to server localhost:25275 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:14.620552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:04:14.673073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:04:14.923474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:15.146663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:15.242234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:17.384449Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005145605830211:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:17.384601Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:17.828069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:17.889182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:17.933354Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005124130991947:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:17.933451Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:17.980661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:18.040514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:18.097570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:18.165631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:18.274407Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005149900798030:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:18.274504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:18.275005Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005149900798035:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:18.292251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:18.346793Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005149900798037:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:18.421122Z node 1 :TX_PROXY ERROR: Actor# [1:7488005149900798096:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:19.936812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 26757, MsgBus: 26163 2025-03-31T16:04:22.424856Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488005165472381370:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:22.425253Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011aa/r3tmp/tmp4dWaiR/pdisk_1.dat 2025-03-31T16:04:22.678428Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:22.804762Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:22.804853Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:22.808977Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26757, node 2 2025-03-31T16:04:22.966234Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:22.966255Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:22.966267Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:22.966385Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26163 TClient is connected to server localhost:26163 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:23.736601Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:23.756098Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:23.855495Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:24.053760Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:24.205917Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:27.127226Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005186947219472:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:27.127318Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:27.199274Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:27.314512Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:27.365450Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:27.412448Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:27.422111Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488005165472381370:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:27.426074Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:27.458960Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:27.555050Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:27.645368Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005186947219996:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:27.645457Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:27.645858Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005186947220001:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:27.650336Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:27.677118Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-31T16:04:27.678422Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488005186947220003:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:27.780826Z node 2 :TX_PROXY ERROR: Actor# [2:7488005186947220058:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:28.968704Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> KqpSystemView::Join ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TTopicYqlTest::BadRequests [GOOD] Test command err: 2025-03-31T15:59:47.113960Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003983998047395:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:47.114001Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:59:47.277887Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488003986246921066:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:47.277957Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:59:47.689011Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T15:59:47.696032Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0009f0/r3tmp/tmprjqP6j/pdisk_1.dat 2025-03-31T15:59:48.259184Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:59:48.303715Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T15:59:48.464911Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:48.482138Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:48.482223Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:48.491442Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:48.491495Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:48.513890Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T15:59:48.526743Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:59:48.527002Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8429, node 1 2025-03-31T15:59:48.942933Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/0009f0/r3tmp/yandex6MHJ7r.tmp 2025-03-31T15:59:48.942971Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/0009f0/r3tmp/yandex6MHJ7r.tmp 2025-03-31T15:59:48.943170Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/0009f0/r3tmp/yandex6MHJ7r.tmp 2025-03-31T15:59:48.943507Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:59:49.122777Z INFO: TTestServer started on Port 28829 GrpcPort 8429 TClient is connected to server localhost:28829 PQClient connected to localhost:8429 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:59:49.530646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T15:59:49.628279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T15:59:50.024156Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-03-31T15:59:52.115427Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488003983998047395:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:52.115535Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:59:52.276902Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488003986246921066:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:52.276984Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T15:59:53.040062Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004007721757938:2315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:53.040166Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488004012016725244:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:53.040226Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:53.047746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004009767852276:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:53.048058Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:53.048886Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004009767852304:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T15:59:53.053910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-31T15:59:53.079904Z node 2 :TX_PROXY ERROR: Actor# [2:7488004012016725251:2174] txid# 281474976720657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T15:59:53.120128Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004009767852306:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-31T15:59:53.123351Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488004012016725250:2319], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-31T15:59:53.184615Z node 1 :TX_PROXY ERROR: Actor# [1:7488004009767852398:2776] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:53.224854Z node 2 :TX_PROXY ERROR: Actor# [2:7488004012016725277:2180] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T15:59:53.461142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T15:59:53.479599Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488004009767852408:2352], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T15:59:53.483886Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488004012016725284:2323], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T15:59:53.480385Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODk3NWE2MjMtZDM1YzQzMWItMzJmODJjZDAtYjAyZTAxZjg=, ActorId: [1:7488004009767852274:2339], ActorState: ExecuteState, TraceId: 01jqpewh655rkcx301yzntsa67, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T15:59:53.485458Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=N2YwNjAzNDctZWUyZmRmZDAtMTQyZDNmNWUtYzczZGY2YjU=, ActorId: [2:7488004007721757936:2314], ActorState: ExecuteState, TraceId: 01jqpewh54fky279xzbcwtfr6s, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T15:59:53.485854Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR Consu ... ctor { RawX1: 7488005107285246886 RawX2: 107374184607 } Partitions { Partition { PartitionId: 0 } } 2025-03-31T16:04:23.223490Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-31T16:04:23.231006Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-31T16:04:23.231066Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state CALCULATED 2025-03-31T16:04:23.231111Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710678, State CALCULATED 2025-03-31T16:04:23.231142Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710678 State CALCULATED FrontTxId 281474976710678 2025-03-31T16:04:23.231166Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710678, NewState WAIT_RS 2025-03-31T16:04:23.231203Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710678 moved from CALCULATED to WAIT_RS 2025-03-31T16:04:23.231272Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvTxProcessing::TEvReadSet to 0 receivers. Wait TEvTxProcessing::TEvReadSet from 0 senders. 2025-03-31T16:04:23.231313Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] HaveParticipantsDecision 1 2025-03-31T16:04:23.231392Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710678, NewState EXECUTING 2025-03-31T16:04:23.231419Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710678 moved from WAIT_RS to EXECUTING 2025-03-31T16:04:23.231439Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Received 0, Expected 1 2025-03-31T16:04:23.231523Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1743437063235, TxId 281474976710678 2025-03-31T16:04:23.232246Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:04:23.232295Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:04:23.232326Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:04:23.232357Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:04:23.232372Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] I0000000000 2025-03-31T16:04:23.232388Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000cc1 2025-03-31T16:04:23.232412Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000uc1 2025-03-31T16:04:23.232429Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000cc2 2025-03-31T16:04:23.232444Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000uc2 2025-03-31T16:04:23.232459Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] _config_0 2025-03-31T16:04:23.232493Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:04:23.232530Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] =========================== 2025-03-31T16:04:23.232588Z node 26 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-31T16:04:23.241389Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:04:23.241604Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvTxCommitDone Step 1743437063235, TxId 281474976710678, Partition 0 2025-03-31T16:04:23.241631Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state EXECUTING 2025-03-31T16:04:23.241653Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710678, State EXECUTING 2025-03-31T16:04:23.241677Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710678 State EXECUTING FrontTxId 281474976710678 2025-03-31T16:04:23.241694Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Received 1, Expected 1 2025-03-31T16:04:23.241717Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId: 281474976710678 send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2025-03-31T16:04:23.241747Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] complete TxId 281474976710678 2025-03-31T16:04:23.242206Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "rt3.dc1--legacy--topic1" Version: 0 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--legacy--topic1" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } ReadRuleGenerations: 0 ReadRuleGenerations: 0 AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } Consumers { Name: "c2" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } 2025-03-31T16:04:23.242376Z node 26 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:04:23.242519Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete partitions for TxId 281474976710678 2025-03-31T16:04:23.242547Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710678, NewState EXECUTED 2025-03-31T16:04:23.242586Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710678 moved from EXECUTING to EXECUTED 2025-03-31T16:04:23.243173Z node 26 :PERSQUEUE DEBUG: [TxId: 281474976710678] save tx TxId: 281474976710678 State: EXECUTED MinStep: 1743437063000 MaxStep: 18446744073709551615 Step: 1743437063235 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "rt3.dc1--legacy--topic1" Version: 0 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--legacy--topic1" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } ReadRuleGenerations: 0 ReadRuleGenerations: 0 AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } Consumers { Name: "c2" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } } BootstrapConfig { } SourceActor { RawX1: 7488005107285246886 RawX2: 107374184607 } Partitions { Partition { PartitionId: 0 } } 2025-03-31T16:04:23.243509Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-31T16:04:23.256325Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-31T16:04:23.256378Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state EXECUTED 2025-03-31T16:04:23.256408Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710678, State EXECUTED 2025-03-31T16:04:23.256440Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710678 State EXECUTED FrontTxId 281474976710678 2025-03-31T16:04:23.256467Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TPersQueue::SendEvReadSetAckToSenders 2025-03-31T16:04:23.256495Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710678, NewState WAIT_RS_ACKS 2025-03-31T16:04:23.256529Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710678 moved from EXECUTED to WAIT_RS_ACKS 2025-03-31T16:04:23.256563Z node 26 :PERSQUEUE DEBUG: [TxId: 281474976710678] PredicateAcks: 0/0 2025-03-31T16:04:23.256574Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-03-31T16:04:23.256595Z node 26 :PERSQUEUE DEBUG: [TxId: 281474976710678] PredicateAcks: 0/0 2025-03-31T16:04:23.256622Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] add an TxId 281474976710678 to the list for deletion 2025-03-31T16:04:23.256656Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710678, NewState DELETING 2025-03-31T16:04:23.256697Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete key for TxId 281474976710678 2025-03-31T16:04:23.256783Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-31T16:04:23.264974Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-31T16:04:23.265037Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state DELETING 2025-03-31T16:04:23.265066Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710678, State DELETING 2025-03-31T16:04:23.265115Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete TxId 281474976710678 2025-03-31T16:04:23.839245Z node 25 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:04:23.839291Z node 25 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:25.149426Z node 25 :KQP_EXECUTER ERROR: ActorId: [25:7488005180299693062:2534] TxId: 281474976710683. Ctx: { TraceId: 01jqpf4txjd04sytca5a0t9wsv, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=25&id=OTczODNhMzItY2U3ZDhlOGYtMjNjYmFhNGItMWNiNjkwZTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 26 2025-03-31T16:04:25.149591Z node 25 :KQP_COMPUTE ERROR: SelfId: [25:7488005180299693066:2534], TxId: 281474976710683, task: 2. Ctx: { TraceId : 01jqpf4txjd04sytca5a0t9wsv. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=25&id=OTczODNhMzItY2U3ZDhlOGYtMjNjYmFhNGItMWNiNjkwZTM=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [25:7488005180299693062:2534], status: UNAVAILABLE, reason: {
: Error: Terminate execution } |74.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |74.8%| [LD] {RESULT} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |74.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |74.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |74.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |74.8%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::InsertExistingKey-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 8069, MsgBus: 9179 2025-03-31T16:04:14.148802Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005130195732484:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:14.148897Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011a4/r3tmp/tmpqfvFWQ/pdisk_1.dat 2025-03-31T16:04:14.843263Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:14.847268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:14.847379Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:14.851638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8069, node 1 2025-03-31T16:04:14.971662Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:14.971706Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:14.971720Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:14.971829Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9179 TClient is connected to server localhost:9179 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:15.700103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:15.731207Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:04:15.751327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:04:15.955066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:04:16.199411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:16.314857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:18.606101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005147375603439:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:18.606268Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:19.048616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:19.107349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:19.152482Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005130195732484:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:19.152548Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:19.187315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:19.259516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:19.309605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:19.397189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:19.492818Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005151670571259:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:19.492893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:19.493186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005151670571264:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:19.497080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:19.514699Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-31T16:04:19.515355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005151670571266:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:19.603786Z node 1 :TX_PROXY ERROR: Actor# [1:7488005151670571324:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:20.922942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:04:21.516798Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=4; 2025-03-31T16:04:21.531805Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 4 at tablet 72075186224037919 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-31T16:04:21.531983Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 4 at tablet 72075186224037919 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-31T16:04:21.532259Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488005160260506438:2500], Table: `/Root/TestImmediateEffects` ([72057594046644480:16:1]), SessionActorId: [1:7488005155965538917:2500]Got CONSTRAINT VIOLATION for table `/Root/TestImmediateEffects`. ShardID=72075186224037919, Sink=[1:7488005160260506438:2500].{
: Error: Duplicate keys have been found., code: 2012 } 2025-03-31T16:04:21.532732Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488005160260506417:2500], SessionActorId: [1:7488005155965538917:2500], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TestImmediateEffects`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7488005155965538917:2500]. isRollback=0 2025-03-31T16:04:21.532997Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjI5Yjk1OGEtYTEzNzdjMjMtYTBkZGRmZmMtNTBhODU2ZjU=, ActorId: [1:7488005155965538917:2500], ActorState: ExecuteState, TraceId: 01jqpf4pzy80zft7tv7ev7h637, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7488005160260506427:2500] from: [1:7488005160260506417:2500] 2025-03-31T16:04:21.533075Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7488005160260506427:2500] TxId: 281474976710674. Ctx: { TraceId: 01jqpf4pzy80zft7tv7ev7h637, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjI5Yjk1OGEtYTEzNzdjMjMtYTBkZGRmZmMtNTBhODU2ZjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TestImmediateEffects`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-31T16:04:21.535014Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjI5Yjk1OGEtYTEzNzdjMjMtYTBkZGRmZmMtNTBhODU2ZjU=, ActorId: [1:7488005155965538917:2500], ActorState: ExecuteState, TraceId: 01jqpf4pzy80zft7tv7ev7h637, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 17873, MsgBus: 2297 2025-03-31T16:04:22.601664Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488005166711505967:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:22.601711Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011a4/r3tmp/tmpAxUdkx/pdisk_1.dat 2025-03-31T16:04:23.072107Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:23.129280Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:23.129379Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:23.132255Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17873, node 2 2025-03-31T16:04:23.314941Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:23.314961Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:23.314968Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:23.315069Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2297 TClient is connected to server localhost:2297 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:24.071528Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:24.080236Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:04:24.096830Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:24.211365Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:24.517047Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:24.657589Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:27.604982Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488005166711505967:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:27.605068Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:27.860093Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005188186344082:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:27.860174Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:27.902052Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:27.955975Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:28.006431Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:28.053773Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:28.126067Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:28.185442Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:28.267525Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005192481311892:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:28.267612Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:28.268076Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005192481311897:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:28.272326Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:28.293675Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488005192481311899:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:28.372787Z node 2 :TX_PROXY ERROR: Actor# [2:7488005192481311955:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:29.591036Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:04:30.683809Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488005201071247110:2526], TxId: 281474976710676, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=OTE4MTEwMmQtYmQ1MGI5ZGUtZWRjZjQ4MzctNjdhZTZmNWE=. CustomerSuppliedId : . TraceId : 01jqpf4zte4r8k1djy6xewxg2v. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-03-31T16:04:30.694706Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488005201071247111:2527], TxId: 281474976710676, task: 2. Ctx: { TraceId : 01jqpf4zte4r8k1djy6xewxg2v. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OTE4MTEwMmQtYmQ1MGI5ZGUtZWRjZjQ4MzctNjdhZTZmNWE=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7488005201071247107:2491], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-31T16:04:30.695326Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTE4MTEwMmQtYmQ1MGI5ZGUtZWRjZjQ4MzctNjdhZTZmNWE=, ActorId: [2:7488005196776279521:2491], ActorState: ExecuteState, TraceId: 01jqpf4zte4r8k1djy6xewxg2v, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_SingleRowListFromRange-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 64647, MsgBus: 6583 2025-03-31T16:04:13.176935Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005129113962553:2245];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:13.177149Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011a9/r3tmp/tmpsD4rRz/pdisk_1.dat 2025-03-31T16:04:13.763153Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:13.763263Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:13.851057Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:04:13.867499Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64647, node 1 2025-03-31T16:04:14.075667Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:14.075689Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:14.075695Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:14.075811Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6583 TClient is connected to server localhost:6583 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:14.982708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:15.026069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:15.278610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:04:15.523715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:04:15.612999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:17.813038Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005146293833341:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:17.813153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:18.171394Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005129113962553:2245];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:18.171477Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:18.247379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:18.303569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:18.364403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:18.445613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:18.495548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:18.554996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:18.616055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005150588801158:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:18.616148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:18.616541Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005150588801163:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:18.620826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:18.635182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005150588801165:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:18.723834Z node 1 :TX_PROXY ERROR: Actor# [1:7488005150588801221:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:20.191243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 16605, MsgBus: 18590 2025-03-31T16:04:22.060795Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488005166127272929:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:22.060828Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011a9/r3tmp/tmp3H0XLh/pdisk_1.dat 2025-03-31T16:04:22.306484Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:22.330346Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:22.330428Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:22.335890Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16605, node 2 2025-03-31T16:04:22.574675Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:22.574698Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:22.574706Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:22.574813Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18590 TClient is connected to server localhost:18590 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:23.728669Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:23.766582Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:23.952673Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:24.221883Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:24.350399Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:27.063308Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488005166127272929:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:27.063376Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:27.564031Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005187602111189:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:27.564138Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:27.636308Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:27.674185Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:27.712293Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:27.759216Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:27.834610Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:27.923001Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:28.008143Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005191897079007:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:28.008280Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:28.008560Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005191897079013:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:28.012619Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:28.031238Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488005191897079015:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:28.130216Z node 2 :TX_PROXY ERROR: Actor# [2:7488005191897079074:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:29.790640Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> TBackupTests::ShouldSucceedOnLargeData[Zstd] [GOOD] |74.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |74.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |74.8%| [LD] {RESULT} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |74.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRange [GOOD] >> YdbIndexTable::MultiShardTableOneIndex [GOOD] >> YdbIndexTable::MultiShardTableOneIndexDataColumn >> KqpSysColV1::StreamInnerJoinSelectAsterisk >> YdbYqlClient::DiscoveryLocationOverride |74.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |74.8%| [LD] {RESULT} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |74.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut >> YdbImport::Simple >> KqpSysColV0::SelectRowAsterisk [GOOD] >> TGRpcYdbTest::RemoveNotExistedDirectory |74.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |74.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |74.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors >> TGRpcNewCoordinationClient::SessionSemaphoreInfiniteTimeout >> YdbIndexTable::MultiShardTableOneIndexIndexOverlapDataColumn [GOOD] >> YdbIndexTable::MultiShardTableOneIndexPkOverlap >> KqpSystemView::FailNavigate [GOOD] >> KqpPg::InsertNoTargetColumns_NotOneSize-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Alter+useSink >> TPersQueueTest::EventBatching [GOOD] >> TPersQueueTest::DisableWrongSettings >> KqpImmediateEffects::MultipleEffectsWithIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRange [GOOD] Test command err: Trying to start YDB, gRPC: 27800, MsgBus: 6292 2025-03-31T16:04:26.575607Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005183306928946:2129];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:26.575664Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00182f/r3tmp/tmpsTnjvL/pdisk_1.dat 2025-03-31T16:04:27.225284Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:27.226670Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:27.226771Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:27.230895Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27800, node 1 2025-03-31T16:04:27.370946Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:27.370976Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:27.370986Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:27.371131Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6292 TClient is connected to server localhost:6292 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:28.149084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:28.192098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:28.353097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:28.522536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:28.607467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:30.989662Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005200486799847:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:30.989784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:31.575961Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005183306928946:2129];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:31.576018Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:31.588089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:31.658535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:31.716973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:31.753803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:31.789449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:31.859964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:31.921580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005204781767662:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:31.921673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:31.921965Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005204781767667:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:31.926717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:31.946103Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005204781767669:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:32.041744Z node 1 :TX_PROXY ERROR: Actor# [1:7488005209076735023:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> YdbOlapStore::LogLast50 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:03:39.004568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:03:39.004671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:39.004714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:03:39.004752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:03:39.004814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:03:39.004847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:03:39.004900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:39.004976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:03:39.005668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:39.095026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:03:39.095131Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:39.100382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:39.100507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:03:39.100707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:03:39.105987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:03:39.106240Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:03:39.106901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:39.107212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:39.109426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:39.110629Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:39.110702Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:39.110935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:03:39.110994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:39.111037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:03:39.111246Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:03:39.118908Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:03:39.251169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:39.251379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:39.251554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:03:39.251748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:03:39.251796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:39.254584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:39.254752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:03:39.254978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:39.255050Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:03:39.255129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:03:39.255199Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:03:39.261017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:39.261096Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:39.261138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:03:39.267190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:39.267269Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:39.267354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:39.267415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:03:39.271317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:39.273923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:03:39.274162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:03:39.275499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:39.275670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:39.275749Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:39.276084Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:03:39.276152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:39.276352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:03:39.276434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:03:39.279235Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:39.279295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:39.279504Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:39.279543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:03:39.279784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:39.279854Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:03:39.279974Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:39.280010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:39.280048Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:39.280078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:39.280119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:03:39.280262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:39.280306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:03:39.280338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:03:39.280426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:39.280482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:03:39.280517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:03:39.282660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:39.282865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:39.282903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... : } REQUEST: PUT /data_00.csv.zst?partNumber=99&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:23788 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A7D8A4E0-AF16-4F96-A983-654139A797B0 amz-sdk-request: attempt=1 content-length: 55 content-md5: Ry5TonSXZhxkAEEJCUBcTg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / partNumber=99&uploadId=1 / 55 2025-03-31T16:04:32.997272Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3458:5422], result# UploadPartResult { ETag: 472e53a27497661c6400410909405c4e } 2025-03-31T16:04:32.997519Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3457:5421] 2025-03-31T16:04:32.997711Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3458:5422], sender# [1:3457:5421], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 Checksum: } REQUEST: PUT /data_00.csv.zst?partNumber=100&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:23788 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 69140F22-D2BE-46AD-B921-93695067E880 amz-sdk-request: attempt=1 content-length: 55 content-md5: B5SOCmjwb1RI3tHamcoRHA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / partNumber=100&uploadId=1 / 55 2025-03-31T16:04:33.005144Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3458:5422], result# UploadPartResult { ETag: 07948e0a68f06f5448ded1da99ca111c } 2025-03-31T16:04:33.005585Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3457:5421] 2025-03-31T16:04:33.005738Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3458:5422], sender# [1:3457:5421], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv.zst?partNumber=101&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:23788 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: CD89D9A0-2486-4408-A149-E1E76E310C8A amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / partNumber=101&uploadId=1 / 0 2025-03-31T16:04:33.013207Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3458:5422], result# UploadPartResult { ETag: d41d8cd98f00b204e9800998ecf8427e } 2025-03-31T16:04:33.013289Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:3458:5422], success# 1, error# , multipart# 1, uploadId# 1 2025-03-31T16:04:33.028783Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvDataShard::TEvS3Upload: self# [1:3458:5422], upload# { Id: 1 Status: Complete Error: (empty maybe) Parts: [f8f51a1e4a70db44fa91cc2ab9680824,9eba675fd7f187274786dff2f47292df,921325fb6b8811df3d06a44dbe1f8523,4eeb6b90e8e61075275bd8a42f56bd69,2840a487abe8cb9502b3d9c8a8e1c942,607d8f6e3b235a360d63796efd3a51c2,ed22e08df7fb8840f7cabc779cc86885,efeff2c7731061edd9a39059cc078045,4af01cb3455932f28e3bba713dcd57c9,dc94d36ecf3b36d183d75c84b9b2fac6,e2ce425dd2bb582abcc13d0d714c3554,b71e46686939d2cdf046520dd2774281,ab731a82a161e5e044b24e895a1713d6,1df51aaec89711e13a6f95c13113e36c,b6066b2ed343831b1b0ee0076179981e,332d34d77adc2b024a33d87e07d4233f,cf0093cc99590a0e8f9c199ed6deca07,8cc923ec76224e69263ac93b7bfabd30,690d66897e0780f2dfe3614e5a659a22,7502aae0ec253663b1cbfdc8ede92ab9,7d2c6f728ee0c12097dfe5441970b946,5fc7b9b675e0a125eea67cf05f82627f,fc8c5faa99cc7f4ce7ca320f8e7adb58,8e305c5aca758683ff25407a7bbd9220,181bce9c6393e22a0ac359a7b45d8187,639677548f0a8b776a6db92f44d96505,390ff8f57cfa4c04bfbed0d7a63c90e8,3dd76756e6558fd6c8c918210f7dc136,a3f5254fdad3ded54edef910e704c151,e9186373f80dbaa55dd04d07621de277,8898b965060a431b499261ec0cd3cee3,3ed51c736e64defe04980ce328b17aa4,bb0e45971888796588c12ea1c1bec162,e2b3defa84005d3892986ca6894b811f,656c7c809c8c8485f6e91892591cd284,779c6827126f255bde25ae242bf4c8ff,8883fc9b073e683558f1231c5f2142d0,19390a0e3340bcb6ccfe866a790f05cb,305182d3e9745fba3aad1973bb1bfc93,002819d72a6dc7954ecc1bcd2bd20254,325c6bc3cdd6fd83083cf0126c606218,b86932903843b9626e80bd9ccb5d0571,b5054116537a7c467bdb488c9d67dee7,fc3a45bd17a00b147e4f9c55bc2493da,1118e2f41e8839211163250796a65dce,b403ff17c2c269a79201a03ce439dc2a,88f2692ee439cfadef1cd21d58aac8d3,e5bef12f89b101af84d52299a5867d99,ed613335180c53f69d450ef8b176a4d5,150fd7dcdc86eb38c7f821ff4698d8bc,a0c18bf08acc6ebecac04a2520efee9b,e8463d7ce8f502d1575a433c1b30a9af,f123e0fc879e2fdc2c3e2f698fc4176d,d7ab79d73e4648e0a2bf8dec3a19c019,4e74b82f6a8ea7fad8790ee7dfcdb76e,f72bb1d8aa0f5c9265bae10a3784d8e8,924b317371d16363a37962b17a2ae4bb,7214b458c7e25c791e54bd430b835a6e,e79dba1b56122372af3fe7b06ea91bda,6aae345b94d78fc7c1ed0b8697cf5e62,fd3636ed699facb5f0c12f81741cabc5,2c4a198408c3eb9577fcd339ca62c539,59fbf761f9b7574b65fa6877b167bb8c,14f9f5cfdf3a6c33c577a54429b19cb6,c6d078b3be9cd7943e8145fd982baeef,198f55ae25539fbd54a4a6075beac2d1,939123b44e362c76a151a85af0247fb7,0147f8bd741be7780cbc900b6f4b0899,43453200aeaf201420737354cd73cfe4,de26d1339779fe0c538d01d5963fd423,5c903650e719f959dc9f37ea360c6319,23607b3f36e0a2abae7f1ed8e38596f3,0db9af920c6d1cf868e470bf7a349747,aed6ac19c60d08500582eea9dadcdfee,3f4e37ddd3e2e56a725323fad4d85cf6,942b269af420b4277d025cea489dcb25,89eddc25ba615b6cf09b9cd9a11a16bb,1d8e7f0613dc1919ee90133c468380bd,8bf1e4c1266d8437c1bd85e0fca6640a,e9eabcf5b61cf257f530b156dbd77a88,411f1661ae7650d2144e8c6f8a33b28f,6706ec5b8771e555779d5cbeca41aa75,b3a33ef21a8224ddc78a52e8d7ca8357,58749d344f42c192e572eda4ee66fb01,381aeb5ee3014e2c0fd9b85bd59ce005,9aed2297cd10dce10d68de3ff1830b42,be88e095fc3a13708b714db03b1f2744,5628e81ee17fb22fc828ed1b2169578b,a1cfb563fa4af884fe02ced05c26c881,fc602b8ee2e9746fb52823f8fd1f0f28,a1de256e94c7baa9b8ab905c892d1a14,6bff895b0b5f3552ad4bdc61b0d24148,fcba1d258a8651d831767b42e010e439,bef6e3d7088e671809fe584531f96971,f0b489242271d11200dbdbc78e4ce715,372d2d6877fff7c04433e492ad4dbd45,32191cf1972dcccd59c0b5a8b53d4f23,25928b7997b97ac58f18fbbe589573e8,472e53a27497661c6400410909405c4e,07948e0a68f06f5448ded1da99ca111c,d41d8cd98f00b204e9800998ecf8427e] } REQUEST: POST /data_00.csv.zst?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:23788 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 36C27866-4E75-4A74-9912-017E8F43CA56 amz-sdk-request: attempt=1 content-length: 11529 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /data_00.csv.zst / uploadId=1 2025-03-31T16:04:33.083293Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvCompleteMultipartUploadResponse: self# [1:3458:5422], result# CompleteMultipartUploadResult { Bucket: Key: data_00.csv.zst ETag: c902b621cdd1ee89b9f1c4e6c36e6e45 } 2025-03-31T16:04:33.083769Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3457:5421], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-03-31T16:04:33.144265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-03-31T16:04:33.144347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-03-31T16:04:33.144523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-03-31T16:04:33.144635Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-03-31T16:04:33.144702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:33.144748Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:04:33.144789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-31T16:04:33.144831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-03-31T16:04:33.145034Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:04:33.158068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:04:33.158752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:04:33.158816Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-31T16:04:33.158929Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:04:33.158966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:04:33.159011Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:04:33.159043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:04:33.159108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-31T16:04:33.159212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:333:2312] message: TxId: 102 2025-03-31T16:04:33.159266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:04:33.159308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-31T16:04:33.159347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-31T16:04:33.159469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:04:33.169674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:04:33.169750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3443:5408] TestWaitNotification: OK eventTxId 102 >> YdbQueryService::TestCreateAndAttachSession >> TBackupTests::ShouldSucceedOnLargeData[Raw] [GOOD] >> YdbYqlClient::BuildInfo >> YdbS3Internal::TestS3Listing >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithAsyncIndex [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsOnce ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRowAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 12923, MsgBus: 6133 2025-03-31T16:04:26.746988Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005183390059471:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:26.747543Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001833/r3tmp/tmpiMicVN/pdisk_1.dat 2025-03-31T16:04:27.405376Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:27.405504Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:27.408701Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:04:27.438532Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12923, node 1 2025-03-31T16:04:27.679812Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:27.679839Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:27.679850Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:27.679982Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6133 TClient is connected to server localhost:6133 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:28.740305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:28.808253Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:04:28.816822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:29.173762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:29.605610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:04:29.715464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:04:31.747201Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005183390059471:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:31.795767Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:32.067378Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005209159865038:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:32.067499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:32.665508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:32.723962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:32.804143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:32.851742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:32.895287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:32.948521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:33.035364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005213454832854:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:33.035441Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:33.035707Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005213454832859:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:33.043595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:33.058951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005213454832861:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:33.145260Z node 1 :TX_PROXY ERROR: Actor# [1:7488005213454832920:3464] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpSysColV0::SelectRowById [GOOD] >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient >> BackupRestore::RestoreViewReferenceTable [GOOD] >> BackupRestore::RestoreViewToDifferentDatabase ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::FailNavigate [GOOD] Test command err: Trying to start YDB, gRPC: 19140, MsgBus: 2949 2025-03-31T16:04:27.545783Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005186261215646:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:27.546305Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001827/r3tmp/tmpaxioum/pdisk_1.dat 2025-03-31T16:04:28.210157Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:28.210282Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:28.213468Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:04:28.227449Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19140, node 1 2025-03-31T16:04:28.478568Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:28.478608Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:28.478617Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:28.478748Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2949 TClient is connected to server localhost:2949 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:29.407473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:29.460835Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:04:29.464922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:04:29.472912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:29.672835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:29.870636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:30.017155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:32.206215Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005207736053755:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:32.206351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:32.534889Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005186261215646:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:32.534964Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:32.693622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:32.768636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:32.816580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:32.861669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:32.918101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:33.003287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T16:04:33.121057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005212031021579:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:33.121155Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:33.121434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005212031021584:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:33.125950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-03-31T16:04:33.148000Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710669, at schemeshard: 72057594046644480 2025-03-31T16:04:33.151499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005212031021586:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-03-31T16:04:33.231972Z node 1 :TX_PROXY ERROR: Actor# [1:7488005212031021644:3463] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:34.737453Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7488005216325989252:3673], for# user0@builtin, access# DescribeSchema 2025-03-31T16:04:34.737489Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7488005216325989252:3673], for# user0@builtin, access# DescribeSchema 2025-03-31T16:04:34.807305Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488005216325989249:2507], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/.sys/partition_stats]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:04:34.808568Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjI1Zjg4MGEtZThlNmQ1OGItOGJhN2MwNC1mOGQzMTI5OA==, ActorId: [1:7488005216325989242:2503], ActorState: ExecuteState, TraceId: 01jqpf546kff7gew3fqycxna3a, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::MultipleEffectsWithIndex [GOOD] Test command err: Trying to start YDB, gRPC: 12451, MsgBus: 24309 2025-03-31T16:04:12.126634Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005123348167276:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:12.126676Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011ae/r3tmp/tmpWbaDfq/pdisk_1.dat 2025-03-31T16:04:12.867035Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:12.869883Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:12.876971Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:04:12.906478Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12451, node 1 2025-03-31T16:04:13.065026Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:13.065048Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:13.065055Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:13.065179Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24309 TClient is connected to server localhost:24309 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:13.870378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:13.922653Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:04:13.941278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:14.150638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:14.413949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:14.507565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:16.676085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005140528038255:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:16.676191Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:17.072431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:17.104370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:17.128052Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005123348167276:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:17.128242Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:17.143759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:17.189680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:17.230435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:17.279928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:17.380404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005144823006073:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:17.380468Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:17.380929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005144823006078:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:17.384812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:17.398295Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005144823006080:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:17.488653Z node 1 :TX_PROXY ERROR: Actor# [1:7488005144823006136:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:18.609004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 17484, MsgBus: 25500 2025-03-31T16:04:20.538655Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488005156216941976:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011ae/r3tmp/tmpoMcTCB/pdisk_1.dat 2025-03-31T16:04:20.623625Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:04:20.788397Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:20.905784Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:20.905869Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:20.908103Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17484, node 2 2025-03-31T16:04:21.123597Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:21.123622Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:21.123628Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:21.123733Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25500 TClient is connected to server localhost:25500 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:22.183911Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:22.190641Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:04:22.199154Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:22.304056Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:22.673462Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:22.801602Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:25.445540Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488005156216941976:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:25.445694Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:25.475268Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005177691780052:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:25.475366Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:25.518625Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:25.610670Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:25.652863Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:25.709265Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:25.755932Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:25.811554Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:25.890400Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005177691780570:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:25.890495Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:25.890651Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005177691780575:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:25.894065Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:25.906611Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488005177691780577:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:25.998981Z node 2 :TX_PROXY ERROR: Actor# [2:7488005177691780634:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:27.728738Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:04:27.814089Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:04:27.914036Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> TColumnEngineTestLogs::IndexWriteOverload [GOOD] |74.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/ut/ydb-core-security-ut |74.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ut/ydb-core-security-ut |74.9%| [LD] {RESULT} $(B)/ydb/core/security/ut/ydb-core-security-ut |74.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |74.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |74.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRowById [GOOD] Test command err: Trying to start YDB, gRPC: 13646, MsgBus: 23283 2025-03-31T16:04:29.182596Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005195233471854:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:29.182909Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001820/r3tmp/tmpv3H26V/pdisk_1.dat 2025-03-31T16:04:29.877734Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:29.883559Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:29.886443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:04:29.931539Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13646, node 1 2025-03-31T16:04:30.109014Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:30.109046Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:30.109055Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:30.109174Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23283 TClient is connected to server localhost:23283 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:31.085805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:31.167793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:31.446270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:31.731754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:31.867476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:34.005415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005212413342711:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:34.005552Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:34.179815Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005195233471854:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:34.189932Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:34.511115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:34.614530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:34.679844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:34.733082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:34.772277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:34.828122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:34.901549Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005216708310524:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:34.901651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:34.901990Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005216708310530:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:34.905742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:34.922529Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005216708310532:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:35.020559Z node 1 :TX_PROXY ERROR: Actor# [1:7488005221003277886:3461] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTableIndex [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeSequence >> YdbSdkSessions::TestActiveSessionCountAfterBadSession ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:03:39.523353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:03:39.523474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:39.523522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:03:39.523569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:03:39.523623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:03:39.523678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:03:39.523831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:03:39.523932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:03:39.524405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:39.638253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:03:39.638352Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:39.646516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:39.646653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:03:39.646864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:03:39.663450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:03:39.663753Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:03:39.664597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:39.664882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:03:39.668000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:39.669931Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:39.670016Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:39.670266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:03:39.670907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:39.670999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:03:39.671243Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:03:39.679800Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:03:39.841318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:03:39.841638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:39.841921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:03:39.842198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:03:39.842274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:39.845813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:39.846026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:03:39.846249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:39.846329Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:03:39.846368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:03:39.846434Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:03:39.849351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:39.849429Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:03:39.849492Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:03:39.851920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:39.851983Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:39.852062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:39.852122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:03:39.864646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:03:39.867555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:03:39.867816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:03:39.869195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:03:39.869355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:03:39.869435Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:39.869753Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:03:39.869821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:03:39.870027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:03:39.870116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:03:39.873238Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:03:39.873302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:03:39.873536Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:03:39.873591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:03:39.873846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:03:39.873909Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:03:39.874050Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:39.874095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:39.874140Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:03:39.874174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:39.874217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:03:39.874272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:03:39.874324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:03:39.874357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:03:39.874455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:03:39.874505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:03:39.874554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:03:39.879469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:39.879634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:03:39.879677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... EvBuffer { Last: 0 Checksum: } REQUEST: PUT /data_00.csv?partNumber=99&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:2418 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: EC71A420-80CE-48DC-9C86-EAF9C9904E0C amz-sdk-request: attempt=1 content-length: 130 content-md5: rsyfbQ5vVOk4oQ1A/altew== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / partNumber=99&uploadId=1 / 130 2025-03-31T16:04:35.605980Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3458:5422], result# UploadPartResult { ETag: aecc9f6d0e6f54e938a10d40fda96d7b } 2025-03-31T16:04:35.606311Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3457:5421] 2025-03-31T16:04:35.606532Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3458:5422], sender# [1:3457:5421], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 Checksum: } REQUEST: PUT /data_00.csv?partNumber=100&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:2418 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: DECDEEBE-9982-454D-ABC4-4AD679DFE528 amz-sdk-request: attempt=1 content-length: 130 content-md5: Wyd1w7MZYbbZucaVvuRDAw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / partNumber=100&uploadId=1 / 130 2025-03-31T16:04:35.616590Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3458:5422], result# UploadPartResult { ETag: 5b2775c3b31961b6d9b9c695bee44303 } 2025-03-31T16:04:35.616903Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3457:5421] 2025-03-31T16:04:35.617029Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3458:5422], sender# [1:3457:5421], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv?partNumber=101&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:2418 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 2459F0EE-4CE8-45EE-A8BE-B122903D538C amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / partNumber=101&uploadId=1 / 0 2025-03-31T16:04:35.631843Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3458:5422], result# UploadPartResult { ETag: d41d8cd98f00b204e9800998ecf8427e } 2025-03-31T16:04:35.631942Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:3458:5422], success# 1, error# , multipart# 1, uploadId# 1 2025-03-31T16:04:35.649600Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvDataShard::TEvS3Upload: self# [1:3458:5422], upload# { Id: 1 Status: Complete Error: (empty maybe) Parts: [a59dd9a97cf3685e69093fb2d96653c6,bdbb215613239cb3a835fee1fe7e7ca3,cb38dbc776d5763f1926dfb22d508c87,3c430d66d07a0a4b1fa889f321fce197,43baf91083f286b60bf15e7786459cd9,90b5581bef612fa3bf9b38b336af405f,fd4869c26a12d22ee79256d778954d04,a9459bc28198b0b6bd67732c492fd740,697a3f8386ea1ff4e327de943224cb1a,614da0b4ec9464e69cd0c59909e80fbb,9b94eb3f67aa4c8a0bcbf546833ed966,fd45c3afacec641ad19e59d2b31aeba4,fd69678aecbc149601f58cf13c64d33e,90c09ab4923bc9f97f825d36e32bf362,c1586416a281a4cca2b2b4e333d9b079,f31908576272623f9f0a19bf774cde8e,6fe3b42388304d2af07c629aeb683581,7bc90eec21ca5bb3648e6a48e83c5730,8e1dda26de1af89bdffe2eefdcebea1d,14dc42d90caa1575bbfffa9dc8f21d66,92efb2368eecb32d4075c09294fde0b7,98efff5f7c7ecb42e7af65142ce05af9,6206c81807b3b9283b0173ee2c682100,616b431b91aedc9de4593321eb42ba96,9ae4762563ffdec596cc9ca4cb8913e1,946ebf2d95b4796ea2faee21f017be79,45834a9948bb4ab8b62d1894156d13ed,6ad3fe7286856927c1e00422bc8da697,ef89464d20eae46829e1bf557e4d04ce,f128e5de32097d205453080b01c94ac3,c13e650ee2cfcecfdf4f578a2e5b1c2d,fc26314711b25d20fc654cf59301b806,56f6f2c574fba86496a87a7dd5fab46c,c7951eace72cfe0f14f808173e07bc64,3d9ad3340e58b973eaf8d4f14ba3b0f9,fc41d6fdfb52389dda8b26d7a0a3a889,9974b6ae96ffd0b756acb67088e890f9,cde8a5604010abe8fccfa9492144036f,0364e048eaac35c26d48b0c5072b5255,aac5a84927124d6ae4931e2650c80d9f,eab068fe4ca35c2f3e35890bd727eb4f,bc3646bdbcbc7f97dcddf2202ea9421f,6d3f63d672eda4a4617c9e7589a68bfc,0401bade6c3031b5be872238520b993a,1c6405688f86423480173e3e316a20bd,52395f68e877cbb8d7115a247331b0a7,4b0673ac18058554d2c53bf9f99b34b2,87bc1b9e650b31e81a9ad2531e3ef9da,b29053c8cd093c8b92ad3954c42cb7be,faf1084f6b33b00e2e822d1d3c3f0083,eedec03ee8d7eda4654db7206ad0889e,be4469dd028d5519a67098055f25513f,a7afa9827ec27c565cff1ed505a06f4b,91fe8109d2ad934c4364d90c29aaba71,73b81ea00e11db12d66497d30eb48446,cce69ef69777afeab34eefa515abc7f4,4e4ac1a421353964356400b8be8e21da,32cd6083b12660bcd4062af08d89eb05,71957b9db37811c7680638b82dc6384b,a8787e692c423a2dfa07dd261e72790a,283838ab16206b27738ea6653110f833,88bf084fb3029f0d5c0705eece930d70,1ed2f9f7221f1718b81fdf2d846347dd,406706cfbc454922dcad50b9c534b8d1,dbb606c993d798974ed4f5c9ebf195ca,1a4a3868dc6fa26c6b019d237f9ea6f4,82660a3c6b576a1b3fea925f3c179a2e,d393db2749ae42e854e85eeec2ea3592,b42c92ad14ee0e5351fec7e5a045a91b,2c7af27f9dc77efbcbe71c2d7997d6e9,278aba62ab1d9e3ff16df2d82ac5f5c7,6b8380404a7e7ec95ad5f3941d5d404c,c9813b9fc1d6b5087e64849076edd0f8,160785e4dac02a91c43a497ee59eea06,db529a9ba22f60f404031cfe85e966e9,9b70af168e2d3769bd8bc4dffa3202ea,9ac39c3843b6621ace44acf430a59e06,4603ff564a46e93951f246ed18926071,66b85f35ee76a7f71f50e9aad56758de,1665c284ad04d6b893b69372bf8fc6b9,8c1c27ec88fb52f06de6e7516a392672,0a5f992db51277a05ec12f0d6459ef21,8debe3a6023155561cb0890fc05bd7fb,938ece258b7596f8eea7e82bc2b8f88c,767ca0dcf0b154fa3c818044bbfc58fd,914cc7165d994bb05824332ac120446f,ab0ece250f5959a510170ee07aa21b5d,8bf4b44d67f062026b0010a8a0b39cc0,e0aa13fa8246e68c18905d3abadfc44d,27b021b75b6a95f63ea27f7ec238c05f,673e661e4cfea1e431678dd9881c2a8c,f101b34943f1831ae8c0b46ffcb1c2d6,562b32a8142b29c1a88e507ab1981a6b,fdea4c6fc2befb44614992ca8bf34b21,b7c8ec6acc45b037978482996e910b75,aec72fbd2e171b798900b22897d00941,710ef5b5e8eba750b6acc9b32dff42a3,821c7e22ef9c22098171e7f837dcfcc8,aecc9f6d0e6f54e938a10d40fda96d7b,5b2775c3b31961b6d9b9c695bee44303,d41d8cd98f00b204e9800998ecf8427e] } REQUEST: POST /data_00.csv?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:2418 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E1AF74FA-286F-4F53-896A-9CC708BE680A amz-sdk-request: attempt=1 content-length: 11529 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /data_00.csv / uploadId=1 2025-03-31T16:04:35.673888Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvCompleteMultipartUploadResponse: self# [1:3458:5422], result# CompleteMultipartUploadResult { Bucket: Key: data_00.csv ETag: 5d8c28efc812b445ddd02900ff3ee599 } 2025-03-31T16:04:35.674469Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3457:5421], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-03-31T16:04:35.714986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-03-31T16:04:35.715077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-03-31T16:04:35.715380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-03-31T16:04:35.715518Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 305 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-03-31T16:04:35.715591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:04:35.715642Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:04:35.715728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-31T16:04:35.715783Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-03-31T16:04:35.715982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:04:35.721856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:04:35.722736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:04:35.722800Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-31T16:04:35.722958Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:04:35.723000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:04:35.723045Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:04:35.723110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:04:35.723156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-31T16:04:35.723250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:333:2312] message: TxId: 102 2025-03-31T16:04:35.723315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:04:35.723369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-31T16:04:35.723410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-31T16:04:35.723549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:04:35.727994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:04:35.728072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3443:5408] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexWriteOverload [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:144;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=uint64::0;records=1;size=8; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:33376;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37024;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING; ... onent=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; >> YdbSdkSessions::SessionsServerLimit >> KqpSysColV1::InnerJoinSelectAsterisk [GOOD] |74.9%| [TA] $(B)/ydb/core/tx/schemeshard/ut_backup/test-results/unittest/{meta.json ... results_accumulator.log} >> YdbSdkSessions::TestSessionPool |74.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |74.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |74.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/test-results/unittest/{meta.json ... results_accumulator.log} |75.0%| [LD] {RESULT} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall >> DataShardVolatile::UpsertBrokenLockArbiter-UseSink [GOOD] >> DataShardVolatile::UpsertNoLocksArbiterRestart+UseSink >> KqpSystemView::PartitionStatsRanges [GOOD] |75.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |75.0%| [LD] {RESULT} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |75.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinSelectAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 8360, MsgBus: 64603 2025-03-31T16:04:30.873012Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005198692093891:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:30.873095Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00181a/r3tmp/tmpjopPmv/pdisk_1.dat 2025-03-31T16:04:31.633396Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:31.643819Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:31.643908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:31.656920Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8360, node 1 2025-03-31T16:04:31.917683Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:31.917708Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:31.917716Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:31.917818Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64603 TClient is connected to server localhost:64603 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:33.137651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:33.218111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:33.468608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:33.778595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:33.906986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:35.843318Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005198692093891:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:35.843387Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:36.177261Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005224461899306:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:36.177394Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:36.724913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:36.773049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:36.862370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:36.931305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:36.979116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:37.122537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:37.239115Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005228756867134:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:37.239215Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:37.239765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005228756867139:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:37.248176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:37.265676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005228756867141:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:37.364307Z node 1 :TX_PROXY ERROR: Actor# [1:7488005228756867198:3464] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> YdbSdkSessions::CloseSessionAfterDriverDtorWithoutSessionPool >> YdbSdkSessions::CloseSessionWithSessionPoolExplicitDriverStopOnly >> KqpSysColV0::InnerJoinSelectAsterisk [GOOD] >> BasicStatistics::TwoServerlessTwoSharedDbs |75.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |75.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |75.0%| [LD] {RESULT} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut >> YdbYqlClient::DiscoveryLocationOverride [GOOD] >> YdbYqlClient::DeleteTableWithDeletedIndex >> BackupRestoreS3::RestoreViewReferenceTable [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeBackupCollection [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeInvalid [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobal >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryService >> YdbQueryService::TestCreateAndAttachSession [GOOD] >> YdbQueryService::TestForbidExecuteWithoutAttach >> KqpSysColV0::UpdateAndDelete [GOOD] >> KqpSysColV1::InnerJoinSelect [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRanges [GOOD] Test command err: Trying to start YDB, gRPC: 8280, MsgBus: 63910 2025-03-31T16:04:31.667585Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005203006471676:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:31.668173Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001810/r3tmp/tmp685OFp/pdisk_1.dat 2025-03-31T16:04:32.458562Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:32.497683Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:32.497789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:32.500105Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8280, node 1 2025-03-31T16:04:32.679632Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:32.679654Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:32.679662Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:32.679813Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63910 TClient is connected to server localhost:63910 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:33.860960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:33.898199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:34.165909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:34.356069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:34.472684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:36.655471Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005203006471676:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:36.662298Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:37.397556Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005228776277094:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:37.397678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:37.931020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:37.998987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:38.092090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:38.171824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:38.216130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:38.255330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:38.306297Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005233071244910:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:38.306385Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:38.306591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005233071244915:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:38.310836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:38.329228Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005233071244917:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:38.416799Z node 1 :TX_PROXY ERROR: Actor# [1:7488005233071244973:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:40.259034Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437080221, txId: 281474976710671] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinSelectAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 18352, MsgBus: 29846 2025-03-31T16:04:32.755133Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005207190080077:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:32.755195Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00180f/r3tmp/tmpnhuKYl/pdisk_1.dat 2025-03-31T16:04:33.279571Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:33.279699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:33.289954Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:04:33.378378Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18352, node 1 2025-03-31T16:04:33.579287Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:33.579311Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:33.579318Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:33.579436Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29846 TClient is connected to server localhost:29846 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:34.323806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:34.361781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:34.541499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:04:34.765814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:04:34.873411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:37.481864Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005228664918319:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:37.482012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:37.751028Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005207190080077:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:37.751526Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:37.866794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:37.957044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:38.007856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:38.053440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:38.106691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:38.228925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:38.348874Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005232959886144:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:38.348950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:38.349571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005232959886149:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:38.353745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:38.370016Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005232959886152:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:38.447383Z node 1 :TX_PROXY ERROR: Actor# [1:7488005232959886206:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |75.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms >> TGRpcYdbTest::RemoveNotExistedDirectory [GOOD] >> TGRpcYdbTest::SdkUuid |75.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |75.0%| [LD] {RESULT} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms >> AsyncIndexChangeExchange::SenderShouldShakeHandsOnce [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsTwice >> TGRpcNewCoordinationClient::SessionSemaphoreInfiniteTimeout [GOOD] >> TGRpcNewCoordinationClientAuth::OwnersAndPermissions >> YdbYqlClient::BuildInfo [GOOD] >> YdbYqlClient::AlterTableAddIndexWithDataColumn >> KqpPg::InsertNoTargetColumns_Alter+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Alter-useSink >> ColumnStatistics::CountMinSketchStatistics ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::UpdateAndDelete [GOOD] Test command err: Trying to start YDB, gRPC: 18740, MsgBus: 22275 2025-03-31T16:04:30.079067Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005201099359390:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:30.083738Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00181d/r3tmp/tmpDCRZLC/pdisk_1.dat 2025-03-31T16:04:30.808667Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:30.808843Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:30.810335Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18740, node 1 2025-03-31T16:04:30.917142Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:04:30.917166Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:04:30.921420Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:31.100919Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:31.115221Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:31.115245Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:31.115456Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22275 TClient is connected to server localhost:22275 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:32.110178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:32.167515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:32.418727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:32.778885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:32.919602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:35.079401Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005201099359390:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:35.079484Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:35.933932Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005222574197669:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:35.934054Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:36.598365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:36.665750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:36.732241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:36.801605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:36.895187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:36.995019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:37.102004Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005231164132789:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:37.102128Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:37.102523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005231164132795:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:37.106715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:37.138347Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005231164132797:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:37.207047Z node 1 :TX_PROXY ERROR: Actor# [1:7488005231164132854:3465] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinSelect [GOOD] Test command err: Trying to start YDB, gRPC: 2895, MsgBus: 4127 2025-03-31T16:04:31.432165Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005204643556694:2145];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:31.433757Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001814/r3tmp/tmpw5FUxD/pdisk_1.dat 2025-03-31T16:04:32.252968Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:32.299347Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:32.299483Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:32.303256Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2895, node 1 2025-03-31T16:04:32.595643Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:32.595664Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:32.595702Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:32.595813Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4127 TClient is connected to server localhost:4127 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:33.902680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:33.985594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:34.301835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:34.615364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:34.762963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:36.432475Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005204643556694:2145];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:36.432538Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:37.929000Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005230413362145:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:37.929097Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:38.556833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:38.653018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:38.705442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:38.774479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:38.884364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:38.965812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:39.049383Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005239003297269:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:39.049464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:39.049792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005239003297274:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:39.053733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:39.096937Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005239003297276:2468], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:39.180163Z node 1 :TX_PROXY ERROR: Actor# [1:7488005239003297333:3464] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> SystemView::QueryStatsRetries [GOOD] >> YdbImport::Simple [GOOD] >> YdbImport::EmptyData |75.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |75.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |75.0%| [LD] {RESULT} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl >> KqpSysColV1::StreamInnerJoinSelectAsterisk [GOOD] |75.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |75.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |75.1%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |75.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |75.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |75.1%| [LD] {RESULT} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename >> KqpErrors::ProposeResultLost_RwTx+UseSink >> YdbQueryService::TestForbidExecuteWithoutAttach [GOOD] >> YdbQueryService::TestCreateDropAttachSession |75.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |75.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |75.3%| [LD] {RESULT} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinSelectAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 17284, MsgBus: 12201 2025-03-31T16:04:34.767384Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005217345859321:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:34.767449Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001804/r3tmp/tmpeDxoNT/pdisk_1.dat 2025-03-31T16:04:35.820368Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:35.855629Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:35.856663Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:35.856748Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:35.858838Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17284, node 1 2025-03-31T16:04:36.155746Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:36.155770Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:36.155776Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:36.155899Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12201 TClient is connected to server localhost:12201 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:37.799570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:37.831267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:38.012783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:38.388643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:38.580128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:39.729770Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005217345859321:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:39.729830Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:41.483300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005247410632063:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:41.483401Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:41.905871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:41.947151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:41.995986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:42.044456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:42.105484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:42.168901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:42.277125Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005251705599877:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:42.277189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:42.277351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005251705599882:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:42.281626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:42.296972Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005251705599884:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:42.387346Z node 1 :TX_PROXY ERROR: Actor# [1:7488005251705599940:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:45.194291Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437085159, txId: 281474976710671] shutting down >> KqpErrors::ProposeError >> YdbS3Internal::TestS3Listing [GOOD] >> YdbS3Internal::TestAccessCheck ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::QueryStatsRetries [GOOD] Test command err: 2025-03-31T16:00:05.387298Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004061703900224:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:05.387390Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001a27/r3tmp/tmpwsZzSr/pdisk_1.dat 2025-03-31T16:00:06.497969Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:06.511300Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:00:06.528064Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:06.528175Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:06.577385Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5167, node 1 2025-03-31T16:00:06.947647Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:00:06.947670Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:00:06.947680Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:00:06.947816Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15067 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:00:07.654897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:07.751619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:08.197416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:10.383441Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004061703900224:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:10.383522Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:00:12.798059Z node 1 :KQP_COMPILE_SERVICE INFO: Subscribed for config changes 2025-03-31T16:00:12.798109Z node 1 :KQP_COMPILE_SERVICE INFO: Updated config 2025-03-31T16:00:12.852842Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004091768672521:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:12.852989Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:12.856189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004091768672533:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:00:12.860612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-03-31T16:00:12.917245Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004091768672535:2355], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-03-31T16:00:12.987834Z node 1 :TX_PROXY ERROR: Actor# [1:7488004091768672610:2875] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:00:12.990213Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: , DatabaseId: /Root, UserSid: , Text: \n SELECT schemaname, tablename, tableowner, tablespace, hasindexes, hasrules, hastriggers, rowsecurity FROM `Root/.sys/pg_tables` WHERE tablename = PgName(\"Table0\") OR tablename = PgName(\"Table1\") ORDER BY tablename;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"" }, "settings": { "ydb_user":"" }, "rollback_settings": { } } }} 2025-03-31T16:00:12.990337Z node 1 :KQP_COMPILE_SERVICE DEBUG: Perform request, TraceId.SpanIdPtr: 0x000050F0001333E8 2025-03-31T16:00:12.990381Z node 1 :KQP_COMPILE_SERVICE DEBUG: Received compile request, sender: [1:7488004091768672503:2349], queryUid: , queryText: "\n SELECT schemaname, tablename, tableowner, tablespace, hasindexes, hasrules, hastriggers, rowsecurity FROM `Root/.sys/pg_tables` WHERE tablename = PgName(\"Table0\") OR tablename = PgName(\"Table1\") ORDER BY tablename;\n ", keepInCache: 0, split: 0{ TraceId: 01jqpex4hf36g1mhgt38s9tawz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmE0YTc3ZjYtZjg4MmFkMTktNDY1MjEzNjUtNmE5Y2M4YmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default} 2025-03-31T16:00:12.990521Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: , DatabaseId: /Root, UserSid: , Text: \n SELECT schemaname, tablename, tableowner, tablespace, hasindexes, hasrules, hastriggers, rowsecurity FROM `Root/.sys/pg_tables` WHERE tablename = PgName(\"Table0\") OR tablename = PgName(\"Table1\") ORDER BY tablename;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"" }, "settings": { "ydb_user":"" }, "rollback_settings": { } } }} 2025-03-31T16:00:12.990581Z node 1 :KQP_COMPILE_SERVICE DEBUG: Added request to queue, sender: [1:7488004091768672503:2349], queueSize: 1 2025-03-31T16:00:12.991162Z node 1 :KQP_COMPILE_SERVICE DEBUG: Created compile actor, sender: [1:7488004091768672503:2349], compileActor: [1:7488004091768672621:2359] 2025-03-31T16:00:13.558523Z node 1 :KQP_YQL INFO: TraceId: 01jqpex4hf36g1mhgt38s9tawz, SessionId: CompileActor 2025-03-31 16:00:13.557 INFO ydb-core-sys_view-ut(pid=79968, tid=0x00007F4CB8F3F640) [KQP] kqp_host.cpp:1382: Compiled query: ( (let $1 (Read! world (DataSource '"kikimr" '"db") (Key '('table (String '"Root/.sys/pg_tables"))) (Void) '())) (let $2 (DataSink 'result)) (let $3 (Filter (Right! $1) (lambda '($18) (Coalesce (Or (== (Member $18 '"tablename") (PgConst '"Table0" (PgType 'name))) (== (Member $18 '"tablename") (PgConst '"Table1" (PgType 'name)))) (Bool 'false))))) (let $4 (TypeOf $3)) (let $5 (SqlProjectItem $4 '"schemaname" (lambda '($19) (Member $19 '"schemaname")))) (let $6 (SqlProjectItem $4 '"tablename" (lambda '($20) (Member $20 '"tablename")))) (let $7 (SqlProjectItem $4 '"tableowner" (lambda '($21) (Member $21 '"tableowner")))) (let $8 (SqlProjectItem $4 '"tablespace" (lambda '($22) (Member $22 '"tablespace")))) (let $9 (SqlProjectItem $4 '"hasindexes" (lambda '($23) (Member $23 '"hasindexes")))) (let $10 (SqlProjectItem $4 '"hasrules" (lambda '($24) (Member $24 '"hasrules")))) (let $11 (SqlProjectItem $4 '"hastriggers" (lambda '($25) (Member $25 '"hastriggers")))) (let $12 (SqlProjectItem $4 '"rowsecurity" (lambda '($26) (Member $26 '"rowsecurity")))) (let $13 '($5 $6 $7 $8 $9 $10 $11 $12)) (let $14 (Sort (PersistableRepr (SqlProject $3 $13)) (Bool 'true) (lambda '($27) (PersistableRepr (Member $27 '"tablename"))))) (let $15 '('"schemaname" '"tablename" '"tableowner" '"tablespace" '"hasindexes" '"hasrules" '"hastriggers" '"rowsecurity")) (let $16 '('('type) '('autoref) '('columns $15))) (let $17 (Write! (Left! $1) $2 (Key) $14 $16)) (return (Commit! $17 $2)) ) 2025-03-31T16:00:13.560211Z node 1 :KQP_YQL TRACE: TraceId: 01jqpex4hf36g1mhgt38s9tawz, SessionId: CompileActor 2025-03-31 16:00:13.559 TRACE ydb-core-sys_view-ut(pid=79968, tid=0x00007F4CB8F3F640) [KQP] kqp_transform.cpp:33: YqlTransformer: ( (let $1 (Read! world (DataSource '"kikimr" '"db") (Key '('table (String '"Root/.sys/pg_tables"))) (Void) '())) (let $2 (DataSink 'result)) (let $3 (Filter (Right! $1) (lambda '($18) (Coalesce (Or (== (Member $18 '"tablename") (PgConst '"Table0" (PgType 'name))) (== (Member $18 '"tablename") (PgConst '"Table1" (PgType 'name)))) (Bool 'false))))) (let $4 (TypeOf $3)) (let $5 (SqlProjectItem $4 '"schemaname" (lambda '($19) (Member $19 '"schemaname")))) (let $6 (SqlProjectItem $4 '"tablename" (lambda '($20) (Member $20 '"tablename")))) (let $7 (SqlProjectItem $4 '"tableowner" (lambda '($21) (Member $21 '"tableowner")))) (let $8 (SqlProjectItem $4 '"tablespace" (lambda '($22) (Member $22 '"tablespace")))) (let $9 (SqlProjectItem $4 '"hasindexes" (lambda '($23) (Member $23 '"hasindexes")))) (let $10 (SqlProjectItem $4 '"hasrules" (lambda '($24) (Member $24 '"hasrules")))) (let $11 (SqlProjectItem $4 '"hastriggers" (lambda '($25) (Member $25 '"hastriggers")))) (let $12 (SqlProjectItem $4 '"rowsecurity" (lambda '($26) (Member $26 '"rowsecurity")))) (let $13 '($5 $6 $7 $8 $9 $10 $11 $12)) (let $14 (Sort (PersistableRepr (SqlProject $3 $13)) (Bool 'true) (lambda '($27) (PersistableRepr (Member $27 '"tablename"))))) (let $15 '('"schemaname" '"tablename" '"tableowner" '"tablespace" '"hasindexes" '"hasrules" '"hastriggers" '"rowsecurity")) (let $16 '('('type) '('autoref) '('columns $15))) (let $17 (Write! (Left! $1) $2 (Key) $14 $16)) (return (Commit! (Commit! $17 $2) (DataSink '"kikimr" '"db") '('('"mode" '"flush")))) ) 2025-03- ... UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:18.429161Z node 61 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:18.535850Z node 61 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:21.587272Z node 61 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[61:7488005140753100585:2080];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:21.587382Z node 61 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:26.957833Z node 61 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [61:7488005183702774622:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:26.957990Z node 61 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:26.958565Z node 61 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [61:7488005183702774637:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:26.973129Z node 61 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:04:27.022425Z node 61 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [61:7488005183702774639:2356], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:04:27.095927Z node 61 :TX_PROXY ERROR: Actor# [61:7488005187997742010:2740] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:27.352622Z node 61 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqpf4wp82qxgrfqjc3n3d1mq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=61&id=OGRiNjViMDQtNzYyOTdiYTktY2M2ZWE0YzEtMjc2MGM4MzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:04:27.593784Z node 61 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jqpf4x3j98txtb2cpwvcfh8n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=61&id=ZDAzZjMwOTItZWJlMWExZWEtM2U4NzRjOTAtMjU5OWMyODQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:04:27.598239Z node 61 :SYSTEM_VIEWS INFO: Scan started, actor: [61:7488005187997742089:2374], owner: [61:7488005187997742086:2372], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_request_units_one_hour] 2025-03-31T16:04:27.605761Z node 61 :SYSTEM_VIEWS INFO: Scan prepared, actor: [61:7488005187997742089:2374], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-31T16:04:27.606540Z node 61 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [61:7488005187997742089:2374], row count: 1, finished: 1 2025-03-31T16:04:27.606594Z node 61 :SYSTEM_VIEWS INFO: Scan finished, actor: [61:7488005187997742089:2374], owner: [61:7488005187997742086:2372], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_request_units_one_hour] 2025-03-31T16:04:27.614030Z node 61 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437067587, txId: 281474976710662] shutting down 2025-03-31T16:04:31.651764Z node 66 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[66:7488005204779313375:2077];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:31.651873Z node 66 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001a27/r3tmp/tmpIEH3ZZ/pdisk_1.dat 2025-03-31T16:04:32.374066Z node 66 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:32.446075Z node 66 :HIVE WARN: HIVE#72057594037968897 Node(66, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:32.446237Z node 66 :HIVE WARN: HIVE#72057594037968897 Node(66, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:32.471992Z node 66 :HIVE WARN: HIVE#72057594037968897 Node(66, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15007, node 66 2025-03-31T16:04:32.732077Z node 66 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:32.732108Z node 66 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:32.732123Z node 66 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:32.732347Z node 66 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23409 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:33.605154Z node 66 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:33.651611Z node 66 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:36.655475Z node 66 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[66:7488005204779313375:2077];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:36.655601Z node 66 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:41.960119Z node 66 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [66:7488005247728987413:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:41.960288Z node 66 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:41.960384Z node 66 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [66:7488005247728987426:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:41.985348Z node 66 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:04:42.077048Z node 66 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [66:7488005247728987429:2356], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:04:42.169501Z node 66 :TX_PROXY ERROR: Actor# [66:7488005252023954805:2738] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:42.694367Z node 66 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqpf5bb0dkjdt7wyfw4rkgg9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=66&id=NDU2MWNkY2UtNjE4MDM5OTktYzVlNWZjM2MtZmU0N2QwNDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:04:42.957368Z node 66 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jqpf5c387j4nfz4v0xsprhm8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=66&id=MWE0NGQxYjgtMTM3NDFiZjgtYjFkYjgwOC0zMGYyYjhiYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:04:43.068532Z node 66 :SYSTEM_VIEWS INFO: Scan started, actor: [66:7488005256318922195:2377], owner: [66:7488005256318922191:2375], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2025-03-31T16:04:43.075866Z node 66 :SYSTEM_VIEWS INFO: Scan prepared, actor: [66:7488005256318922195:2377], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-31T16:04:43.076714Z node 66 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [66:7488005256318922195:2377], row count: 1, finished: 1 2025-03-31T16:04:43.076774Z node 66 :SYSTEM_VIEWS INFO: Scan finished, actor: [66:7488005256318922195:2377], owner: [66:7488005256318922191:2375], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2025-03-31T16:04:43.089551Z node 66 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437082951, txId: 281474976710662] shutting down >> KqpErrors::ResolveTableError >> YdbYqlClient::DeleteTableWithDeletedIndex [GOOD] >> YdbYqlClient::CreateTableWithUniformPartitions >> YdbSdkSessions::TestActiveSessionCountAfterBadSession [GOOD] >> YdbSdkSessions::TestActiveSessionCountAfterTransportError >> YdbSdkSessions::SessionsServerLimit [GOOD] >> YdbSdkSessions::SessionsServerLimitWithSessionPool |75.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |75.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |75.4%| [LD] {RESULT} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |75.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeSequence [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeResourcePool [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTransfer [GOOD] >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [FAIL] |75.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> YdbSdkSessions::CloseSessionAfterDriverDtorWithoutSessionPool [GOOD] >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit >> DataShardTxOrder::RandomPoints_DelayRS_Reboot [GOOD] |75.4%| [LD] {default-linux-x86_64, release, asan} $(B)/yql/essentials/tools/sql2yql/sql2yql |75.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/tools/sql2yql/sql2yql |75.4%| [LD] {RESULT} $(B)/yql/essentials/tools/sql2yql/sql2yql |75.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |75.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |75.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut >> BuildStatsHistogram::Many_Serial [GOOD] >> KqpSystemView::NodesRange2 [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsTwice [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsAfterAddingIndex >> DataShardTxOrder::RandomPoints_DelayRS_Reboot_Dirty [GOOD] >> KqpSystemView::Join [GOOD] >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryService [GOOD] >> TGRpcNewCoordinationClientAuth::OwnersAndPermissions [GOOD] >> TGRpcYdbTest::AlterTableAddIndexBadRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTransfer [GOOD] Test command err: 2025-03-31T16:04:20.620256Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005159491998820:2279];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:20.620304Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0010a7/r3tmp/tmpV2AcDJ/pdisk_1.dat 2025-03-31T16:04:21.428497Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:21.428582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:21.445434Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:04:21.482608Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21686, node 1 2025-03-31T16:04:21.625268Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:04:21.659302Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:04:21.662228Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:21.731676Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:21.731703Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:21.731710Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:21.731844Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25149 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:22.093995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:25.621780Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005159491998820:2279];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:25.621864Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:25.881327Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005180966836172:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:25.881442Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:26.205968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:04:26.498671Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005185261803639:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:26.498746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:26.499067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005185261803644:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:26.505473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:04:26.555220Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005185261803646:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:04:26.620450Z node 1 :TX_PROXY ERROR: Actor# [1:7488005185261803719:2833] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:26.793002Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqpf4w811zfkeww27155phag, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODg5NjQwOGMtMTA2OWFiZDMtNWUzOTQyMjgtMWZiNWZhOTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:04:27.076503Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jqpf4wjqcxj7z839vtacge6h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODg5NjQwOGMtMTA2OWFiZDMtNWUzOTQyMjgtMWZiNWZhOTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Backup "/Root" to "/home/runner/.ya/build/build_root/0l5c/0010a7/r3tmp/tmp82xFGj/"Create temporary directory "/Root/~backup_20250331T160427" in databaseProcess "/home/runner/.ya/build/build_root/0l5c/0010a7/r3tmp/tmp82xFGj/table"Copy tables: { src: "/Root/table", dst: "/Root/~backup_20250331T160427/table" }Backup table "/Root/~backup_20250331T160427/table" to "/home/runner/.ya/build/build_root/0l5c/0010a7/r3tmp/tmp82xFGj/table"Describe table "/Root/~backup_20250331T160427/table"Write scheme into "/home/runner/.ya/build/build_root/0l5c/0010a7/r3tmp/tmp82xFGj/table/scheme.pb"Describe table "/Root/table"Write ACL into "/home/runner/.ya/build/build_root/0l5c/0010a7/r3tmp/tmp82xFGj/table/permissions.pb"Read table "/Root/~backup_20250331T160427/table"Write data into "/home/runner/.ya/build/build_root/0l5c/0010a7/r3tmp/tmp82xFGj/table/data_00.csv"Drop table "/Root/~backup_20250331T160427/table"2025-03-31T16:04:27.770381Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found Remove temporary directory "/Root/~backup_20250331T160427" in database2025-03-31T16:04:27.800295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710668:0, at schemeshard: 72057594046644480 Backup completed successfully2025-03-31T16:04:27.926407Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found Restore "/home/runner/.ya/build/build_root/0l5c/0010a7/r3tmp/tmp82xFGj/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/0l5c/0010a7/r3tmp/tmp82xFGj/" to "/Root"Process "/home/runner/.ya/build/build_root/0l5c/0010a7/r3tmp/tmp82xFGj/table"Read scheme from "/home/runner/.ya/build/build_root/0l5c/0010a7/r3tmp/tmp82xFGj/table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/0l5c/0010a7/r3tmp/tmp82xFGj/table" to "/Root/table"2025-03-31T16:04:27.989451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/0l5c/0010a7/r3tmp/tmp82xFGj/table/data_00.csv"2025-03-31T16:04:28.163239Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01jqpf4xsg6qt0rsb5pj626hk2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODU1NjVhNDktYjEwOTA4NS00Y2RkZmRjYi04NWY5ODE0Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Restore ACL "/home/runner/.ya/build/build_root/0l5c/0010a7/r3tmp/tmp82xFGj/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/0l5c/0010a7/r3tmp/tmp82xFGj/table/permissions.pb"2025-03-31T16:04:28.196414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710672:0, at schemeshard: 72057594046644480 Restore completed successfully2025-03-31T16:04:28.432156Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jqpf4xydev22179ddadetqcx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODg5NjQwOGMtMTA2OWFiZDMtNWUzOTQyMjgtMWZiNWZhOTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:04:30.799649Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488005199321087897:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:30.799786Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0010a7/r3tmp/tmpwIqVrc/pdisk_1.dat 2025-03-31T16:04:31.325675Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:31.420735Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:31.420836Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:31.425518Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27400, node 4 2025-03-31T16:04:31.764759Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:31.764793Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:31.764806Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:31.765026Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11491 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDe ... 0010a7/r3tmp/tmpqsZVGy/table" to "/Root/table"2025-03-31T16:04:37.266804Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/0l5c/0010a7/r3tmp/tmpqsZVGy/table/data_00.csv"Restore index "byValue" on "/Root/table"2025-03-31T16:04:37.451430Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-03-31T16:04:37.593116Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 Restore ACL "/home/runner/.ya/build/build_root/0l5c/0010a7/r3tmp/tmpqsZVGy/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/0l5c/0010a7/r3tmp/tmpqsZVGy/table/permissions.pb"2025-03-31T16:04:37.820907Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715669:0, at schemeshard: 72057594046644480 Restore completed successfully 2025-03-31T16:04:39.789186Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488005237154510185:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:39.789285Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0010a7/r3tmp/tmpnwx2Lt/pdisk_1.dat 2025-03-31T16:04:40.168895Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:40.221468Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:40.221579Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:40.224456Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26266, node 7 2025-03-31T16:04:40.479942Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:40.479971Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:40.479981Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:40.483183Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14142 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:40.855987Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:44.784087Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7488005237154510185:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:44.784155Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:44.871129Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488005258629347738:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:44.871324Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:44.880398Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:04:45.312877Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488005262924315251:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:45.312993Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:45.313614Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488005262924315256:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:45.318650Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:04:45.419185Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7488005262924315258:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:04:45.572844Z node 7 :TX_PROXY ERROR: Actor# [7:7488005262924315341:2849] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:46.064129Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpf5ekz34wz3qn5rkbpnnzw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YWZhZWEzMmUtOWMyMTVkMmEtZTIwODVkYjktZGFiNTk4MmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:04:46.443400Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpf5fcq0adv2nzq1swqa6ee, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YWZhZWEzMmUtOWMyMTVkMmEtZTIwODVkYjktZGFiNTk4MmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Backup "/Root" to "/home/runner/.ya/build/build_root/0l5c/0010a7/r3tmp/tmpge5Cae/"Create temporary directory "/Root/~backup_20250331T160446" in databaseProcess "/home/runner/.ya/build/build_root/0l5c/0010a7/r3tmp/tmpge5Cae/table"Copy tables: { src: "/Root/table", dst: "/Root/~backup_20250331T160446/table" }2025-03-31T16:04:46.594758Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976715664:1, at schemeshard: 72057594046644480 Backup table "/Root/~backup_20250331T160446/table" to "/home/runner/.ya/build/build_root/0l5c/0010a7/r3tmp/tmpge5Cae/table"Describe table "/Root/~backup_20250331T160446/table"Write scheme into "/home/runner/.ya/build/build_root/0l5c/0010a7/r3tmp/tmpge5Cae/table/scheme.pb"Describe table "/Root/table"Write ACL into "/home/runner/.ya/build/build_root/0l5c/0010a7/r3tmp/tmpge5Cae/table/permissions.pb"Read table "/Root/~backup_20250331T160446/table"Write data into "/home/runner/.ya/build/build_root/0l5c/0010a7/r3tmp/tmpge5Cae/table/data_00.csv"Drop table "/Root/~backup_20250331T160446/table"2025-03-31T16:04:47.175656Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropSequence, opId: 281474976715667:1, at schemeshard: 72057594046644480 Remove temporary directory "/Root/~backup_20250331T160446" in database2025-03-31T16:04:47.308085Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037890 not found 2025-03-31T16:04:47.336179Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715668:0, at schemeshard: 72057594046644480 Backup completed successfully2025-03-31T16:04:47.390728Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropSequence, opId: 281474976715669:1, at schemeshard: 72057594046644480 Restore "/home/runner/.ya/build/build_root/0l5c/0010a7/r3tmp/tmpge5Cae/" to "/Root"2025-03-31T16:04:47.591620Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037889 not found Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/0l5c/0010a7/r3tmp/tmpge5Cae/" to "/Root"Process "/home/runner/.ya/build/build_root/0l5c/0010a7/r3tmp/tmpge5Cae/table"Read scheme from "/home/runner/.ya/build/build_root/0l5c/0010a7/r3tmp/tmpge5Cae/table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/0l5c/0010a7/r3tmp/tmpge5Cae/table" to "/Root/table"2025-03-31T16:04:47.657459Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/0l5c/0010a7/r3tmp/tmpge5Cae/table/data_00.csv"2025-03-31T16:04:48.045399Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jqpf5h3e3kjxd2w4b41pt4a5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=N2ZkMGE0NDktOWFiMWRhNTUtMzMwZmMxYTMtYzJmOWEyNzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Restore ACL "/home/runner/.ya/build/build_root/0l5c/0010a7/r3tmp/tmpge5Cae/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/0l5c/0010a7/r3tmp/tmpge5Cae/table/permissions.pb"2025-03-31T16:04:48.085777Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715672:0, at schemeshard: 72057594046644480 Restore completed successfully2025-03-31T16:04:48.311750Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jqpf5hc2f6kcdwgqgw4t57x9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YWZhZWEzMmUtOWMyMTVkMmEtZTIwODVkYjktZGFiNTk4MmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> YdbSdkSessions::TestSessionPool [GOOD] >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient [FAIL] >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsTableClient >> BackupRestore::RestoreViewToDifferentDatabase [GOOD] >> BackupRestore::RestoreViewDependentOnAnotherView |75.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesRange2 [GOOD] Test command err: Trying to start YDB, gRPC: 25652, MsgBus: 20845 2025-03-31T16:04:21.160869Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005161611827563:2183];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:21.161467Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:04:21.495320Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488005161914914086:2201];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:22.482273Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7488005167202526625:2293];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:22.491406Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:04:22.611022Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001859/r3tmp/tmpKaVDYU/pdisk_1.dat 2025-03-31T16:04:23.106727Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:04:23.139234Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:23.291560Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:23.620867Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:23.730106Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:24.124795Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:24.124892Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:24.231534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:24.231632Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:24.352646Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:24.352742Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:24.352920Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:24.352976Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:24.353072Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:24.353104Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:24.353969Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:24.371911Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:24.371752Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:24.373545Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-03-31T16:04:24.373590Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-03-31T16:04:24.378251Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-03-31T16:04:24.378298Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:04:24.400944Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:04:24.471722Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:04:24.471946Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:04:24.472068Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:04:24.473182Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:04:24.680064Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:24.650727Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:24.731555Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 25652, node 1 2025-03-31T16:04:25.907960Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:25.907985Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:25.907992Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:25.908109Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:04:26.052744Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005161611827563:2183];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:26.052808Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:26.356091Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488005161914914086:2201];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:26.359765Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:27.482932Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7488005167202526625:2293];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:27.482991Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:20845 TClient is connected to server localhost:20845 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:29.775159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:29.977875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:31.524652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:33.576286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:34.440081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:39.575503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:04:39.575531Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:40.117195Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005243216208142:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:40.117339Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:40.692153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:40.924542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:41.102594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:41.208057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:41.359198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:41.492892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:41.735385Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005247511176124:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:41.735447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:41.735978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005247511176129:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:41.741459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:41.840612Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005247511176131:2412], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:41.932549Z node 1 :TX_PROXY ERROR: Actor# [1:7488005247511176207:4128] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:44.617104Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437084554, txId: 281474976710671] shutting down 2025-03-31T16:04:44.963185Z node 5 :BS_PROXY_PUT ERROR: [d9b551d1937cff3e] Result# TEvPutResult {Id# [72075186224037899:1:20:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037899:1:20:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-03-31T16:04:44.964862Z node 4 :BS_PROXY_PUT ERROR: [f3e27ce81c54d9a7] Result# TEvPutResult {Id# [72075186224037913:1:20:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037913:1:20:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-03-31T16:04:44.990235Z node 2 :BS_PROXY_PUT ERROR: [25d18d85f3fb8437] Result# TEvPutResult {Id# [72075186224037915:1:20:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037915:1:20:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-03-31T16:04:45.023677Z node 3 :BS_PROXY_PUT ERROR: [84b02d1a0b6d1cda] Result# TEvPutResult {Id# [72075186224037912:1:20:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037912:1:20:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 |75.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |75.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |75.4%| [LD] {RESULT} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |75.4%| [TA] $(B)/ydb/core/mind/hive/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryService [GOOD] Test command err: 2025-03-31T16:04:43.101863Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005257862888479:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:43.110662Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d60/r3tmp/tmphi1o7M/pdisk_1.dat 2025-03-31T16:04:44.320120Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:44.341377Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:44.417126Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:44.417223Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:44.436237Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11083, node 1 2025-03-31T16:04:44.851801Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:44.851825Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:44.851832Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:44.851947Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26571 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:45.450727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:48.039427Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005257862888479:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:48.039508Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:49.271229Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005283632693326:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:49.271332Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:49.743519Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 2146435072 Duration# 0.108767s 2025-03-31T16:04:49.743585Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.108880s 2025-03-31T16:04:49.773748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:04:50.051459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005287927660815:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:50.051544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:50.051680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005287927660821:2361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:50.055054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:04:50.098271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005287927660823:2362], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:04:50.205001Z node 1 :TX_PROXY ERROR: Actor# [1:7488005287927660896:2811] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:50.373823Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqpf5k809x9s7n31g0zmq5b1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDBhZWYzOS0xMTY0YzVlNy03OTVhNjc5LWNjN2ZjNjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:04:50.461819Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jqpf5kmw32s6hnb8b5vgj6x2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzIxMWEzM2UtMzExOTA2ODYtOWY5NmQwNzQtYmU1NTJmOTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |75.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayRS_Reboot [GOOD] Test command err: 2025-03-31T16:03:00.605121Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:03:00.606588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:03:00.606656Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:00.620670Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:03:00.623997Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-03-31T16:03:00.624361Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:00.636168Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:03:00.696546Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:00.696961Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:03:00.698659Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-31T16:03:00.698753Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-31T16:03:00.698801Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-31T16:03:00.703833Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:03:00.705180Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:03:00.705311Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:197:2154] in generation 2 2025-03-31T16:03:00.837672Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:03:00.917197Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-31T16:03:00.917466Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:03:00.917615Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-31T16:03:00.917665Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-31T16:03:00.917717Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-31T16:03:00.917788Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:03:00.918072Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:03:00.918135Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:03:00.918498Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-31T16:03:00.918613Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-31T16:03:00.918672Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:03:00.918714Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:03:00.918763Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-31T16:03:00.918809Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T16:03:00.918857Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T16:03:00.918896Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-31T16:03:00.918938Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:03:00.923213Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:00.923348Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:00.923443Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-31T16:03:00.926521Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-31T16:03:00.926616Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:03:00.926756Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T16:03:00.926962Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-31T16:03:00.927037Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-31T16:03:00.928813Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-31T16:03:00.928911Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:03:00.928987Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-31T16:03:00.929053Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-31T16:03:00.929091Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:03:00.929505Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-31T16:03:00.929568Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-31T16:03:00.929609Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-31T16:03:00.932161Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:03:00.932256Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-31T16:03:00.932313Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-31T16:03:00.932356Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-31T16:03:00.932391Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-31T16:03:00.932417Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-31T16:03:00.945238Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-31T16:03:00.945332Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:03:00.945365Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:03:00.945442Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-31T16:03:00.945514Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-31T16:03:00.946116Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:00.946170Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:00.946231Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-31T16:03:00.946383Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-31T16:03:00.946419Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:03:00.946624Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-31T16:03:00.946675Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:03:00.946719Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-31T16:03:00.946765Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-31T16:03:00.950691Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-31T16:03:00.950778Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:03:00.955444Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:03:00.955527Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:03:00.955614Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:03:00.955681Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:03:00.955721Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T16:03:00.955768Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-31T16:03:00.955815Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-31T16:03:00.955867Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:03:00.955928Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-31T16:03:00.955968Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-31T16:03:00.956017Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-31T16:03:00.956272Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-31T16:03:00.956320Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:03:00.956359Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-31T16:03:00.956385Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-31T16:03:00.956407Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-31T16:03:00.956492Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:03:00.956516Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-31T16:03:00.956548Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-31T16:03:00.956579Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-31T16:03:00.956659Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-31T16:03:00.956698Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-31T16:03:00.956743Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-31T16:03:00.956787Z node 1 :TX_DATA ... DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:50.270271Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:50.270307Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:22] at 9437184 on unit CompleteOperation 2025-03-31T16:04:50.270355Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 22] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:50.270391Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:50.270646Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:50.270684Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:23] at 9437184 on unit CompleteOperation 2025-03-31T16:04:50.270731Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 23] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:50.270770Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:50.270944Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:50.270975Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:24] at 9437184 on unit CompleteOperation 2025-03-31T16:04:50.271022Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 24] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:50.271059Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:50.283582Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:50.283723Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:25] at 9437184 on unit CompleteOperation 2025-03-31T16:04:50.283815Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 25] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:50.283904Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:50.284179Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:50.284227Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:26] at 9437184 on unit CompleteOperation 2025-03-31T16:04:50.284304Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 26] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:50.284350Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:50.284565Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:50.284604Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:27] at 9437184 on unit CompleteOperation 2025-03-31T16:04:50.284657Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 27] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:50.284697Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:50.284931Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:50.284970Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:28] at 9437184 on unit CompleteOperation 2025-03-31T16:04:50.285023Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 28] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:50.285060Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:50.285269Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:50.285308Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:29] at 9437184 on unit CompleteOperation 2025-03-31T16:04:50.285359Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 29] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:50.285394Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:50.285589Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:50.285625Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:30] at 9437184 on unit CompleteOperation 2025-03-31T16:04:50.285672Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 30] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:50.285706Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:50.285969Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:50.286006Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:31] at 9437184 on unit CompleteOperation 2025-03-31T16:04:50.286055Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 31] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:50.286093Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:50.286304Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:50.286340Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:32] at 9437184 on unit CompleteOperation 2025-03-31T16:04:50.286395Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 32] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:50.286435Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:50.286665Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:50.286701Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:33] at 9437184 on unit CompleteOperation 2025-03-31T16:04:50.286749Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 33] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:50.286785Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:50.287013Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:50.287056Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:34] at 9437184 on unit CompleteOperation 2025-03-31T16:04:50.291721Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 34] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:50.291874Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:50.292303Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:50.292365Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:35] at 9437184 on unit CompleteOperation 2025-03-31T16:04:50.292431Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 35] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:50.292478Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:50.292682Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:50.292724Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:36] at 9437184 on unit CompleteOperation 2025-03-31T16:04:50.292776Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 36] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:50.292820Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:50.293080Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:50.293127Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:37] at 9437184 on unit CompleteOperation 2025-03-31T16:04:50.293181Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 37] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:50.293225Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:50.293565Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:800:2726], Recipient [32:343:2311]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-03-31T16:04:50.293624Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:04:50.293675Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 2025-03-31T16:04:50.293809Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:800:2726], Recipient [32:343:2311]: {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-03-31T16:04:50.293851Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:04:50.293889Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 7 2025-03-31T16:04:50.294033Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:800:2726], Recipient [32:343:2311]: {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-03-31T16:04:50.294076Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:04:50.294113Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 9 2025-03-31T16:04:50.294227Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:800:2726], Recipient [32:343:2311]: {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-03-31T16:04:50.294269Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:04:50.294304Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 10 2025-03-31T16:04:50.294407Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:800:2726], Recipient [32:343:2311]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-03-31T16:04:50.294448Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:04:50.294485Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 2025-03-31T16:04:50.294587Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:800:2726], Recipient [32:343:2311]: {TEvReadSet step# 1000004 txid# 13 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 9} 2025-03-31T16:04:50.294624Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:04:50.294660Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 13 expect 27 30 29 29 24 16 28 31 30 31 31 28 29 26 31 30 28 29 27 31 31 27 31 31 31 29 - - - - 17 - actual 27 30 29 29 24 16 28 31 30 31 31 28 29 26 31 30 28 29 27 31 31 27 31 31 31 29 - - - - 17 - interm - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - >> YdbQueryService::TestCreateDropAttachSession [GOOD] >> YdbQueryService::TestCreateAttachAndDropAttachedSession >> TGRpcYdbTest::SdkUuid [GOOD] >> TGRpcYdbTest::SdkUuidViaParams ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::TestSessionPool [GOOD] Test command err: 2025-03-31T16:04:41.586405Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005249558620999:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:41.586469Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d7c/r3tmp/tmpXk0qaK/pdisk_1.dat 2025-03-31T16:04:42.769058Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:42.786782Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:42.813670Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:42.813774Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:42.843614Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28535, node 1 2025-03-31T16:04:43.119708Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:43.119737Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:43.119744Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:43.119897Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28455 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:43.850699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:43.885720Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:04:46.587272Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005249558620999:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:46.587346Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:48.502548Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005279623393235:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:48.502640Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005279623393226:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:48.502780Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:48.508036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:04:48.543357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005279623393240:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:04:48.648405Z node 1 :TX_PROXY ERROR: Actor# [1:7488005279623393317:2711] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::Join [GOOD] Test command err: Trying to start YDB, gRPC: 28325, MsgBus: 3806 2025-03-31T16:04:33.189768Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005212289665273:2190];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:33.189826Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00180c/r3tmp/tmpiL2ifT/pdisk_1.dat 2025-03-31T16:04:33.996699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:33.996846Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:34.007940Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:34.018200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28325, node 1 2025-03-31T16:04:34.261592Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:34.261626Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:34.261635Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:34.261776Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3806 TClient is connected to server localhost:3806 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:35.190715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:35.253973Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:04:35.267975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:35.520995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:35.808441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:35.978334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:38.191052Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005212289665273:2190];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:38.191129Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:38.581703Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005233764503422:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:38.581857Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:39.052879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:39.109918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:39.188004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:39.229080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:39.276405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:39.351297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:39.462626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005238059471244:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:39.462713Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:39.463171Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005238059471249:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:39.467612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:39.480336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005238059471251:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:39.560453Z node 1 :TX_PROXY ERROR: Actor# [1:7488005238059471307:3464] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:41.432590Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437081412, txId: 281474976710671] shutting down waiting... 2025-03-31T16:04:42.828921Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437082816, txId: 281474976710673] shutting down waiting... 2025-03-31T16:04:44.423171Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437084295, txId: 281474976710675] shutting down waiting... 2025-03-31T16:04:45.767030Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437085739, txId: 281474976710677] shutting down waiting... 2025-03-31T16:04:47.048133Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437087027, txId: 281474976710679] shutting down waiting... 2025-03-31T16:04:48.536070Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437088486, txId: 281474976710681] shutting down 2025-03-31T16:04:48.967368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:04:48.967399Z node 1 :IMPORT WARN: Table profiles were not loaded waiting... 2025-03-31T16:04:50.163600Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437090131, txId: 281474976710683] shutting down 2025-03-31T16:04:50.961934Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437090925, txId: 281474976710685] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> BuildStatsHistogram::Many_Serial [GOOD] Test command err: Got : 24000 2106439 49449 9 9 Expected: 24000 2106439 49449 9 9 { [2455, 2599), [2798, 3624), [4540, 4713), [5654, 7161), [8509, 8794), [8936, 9973), [11888, 14280), [14337, 14882), [15507, 16365), [17368, 19451), [19536, 20135), [20790, 21503), [21589, 23243) } Got : 12816 1121048 49449 9 9 Expected: 12816 1121048 49449 9 9 Got : 24000 3547100 81694 9 9 Expected: 24000 3547100 81694 9 9 { [1012, 1475), [1682, 1985), [2727, 3553), [3599, 3992), [5397, 7244), [9181, 9807), [9993, 10178), [12209, 14029), [15089, 15342), [16198, 16984), [17238, 18436), [21087, 21876), [23701, 23794) } Got : 9582 1425282 81694 9 9 Expected: 9582 1425282 81694 9 9 Got : 24000 2460139 23760 9 9 Expected: 24000 2460139 23760 9 9 { [1296, 2520), [3888, 4320), [5040, 6840), [6912, 7272), [10872, 11160), [11520, 12096), [12096, 13824), [15192, 15624), [17064, 17856), [18216, 19296), [19800, 20160), [20736, 21096), [21096, 22104) } Got : 10440 1060767 23760 9 9 Expected: 10440 1060767 23760 9 9 Got : 24000 4054050 46562 9 9 Expected: 24000 4054050 46562 9 9 { [460, 1518), [2300, 2484), [2760, 4002), [4600, 5842), [6302, 9752), [11178, 12328), [14582, 14858), [16790, 18032), [18216, 18446), [18722, 19504), [19504, 19964), [20378, 20470), [21344, 23506) } Got : 13570 2273213 46562 9 9 Expected: 13570 2273213 46562 9 9 Got : 24000 2106459 49449 9 9 Expected: 24000 2106459 49449 9 9 Got : 24000 2460219 23555 9 9 Expected: 24000 2460219 23555 9 9 Got : 24000 4054270 46543 9 9 Expected: 24000 4054270 46543 9 9 Got : 24000 2106439 25272 38 44 Expected: 24000 2106439 25272 38 44 { [2455, 2599), [2798, 3624), [4540, 4713), [5654, 7161), [8509, 8794), [8936, 9973), [11888, 14280), [14337, 14882), [15507, 16365), [17368, 19451), [19536, 20135), [20790, 21503), [21589, 23243) } Got : 12816 1121048 25272 20 23 Expected: 12816 1121048 25272 20 23 Got : 24000 3547100 49916 64 44 Expected: 24000 3547100 49916 64 44 { [1012, 1475), [1682, 1985), [2727, 3553), [3599, 3992), [5397, 7244), [9181, 9807), [9993, 10178), [12209, 14029), [15089, 15342), [16198, 16984), [17238, 18436), [21087, 21876), [23701, 23794) } Got : 9582 1425198 49916 26 17 Expected: 9582 1425198 49916 26 17 Got : 24000 2460139 13170 42 41 Expected: 24000 2460139 13170 42 41 { [1296, 2520), [3888, 4320), [5040, 6840), [6912, 7272), [10872, 11160), [11520, 12096), [12096, 13824), [15192, 15624), [17064, 17856), [18216, 19296), [19800, 20160), [20736, 21096), [21096, 22104) } Got : 10440 1060798 13170 18 18 Expected: 10440 1060798 13170 18 18 Got : 24000 4054050 29361 68 43 Expected: 24000 4054050 29361 68 43 { [460, 1518), [2300, 2484), [2760, 4002), [4600, 5842), [6302, 9752), [11178, 12328), [14582, 14858), [16790, 18032), [18216, 18446), [18722, 19504), [19504, 19964), [20378, 20470), [21344, 23506) } Got : 13570 2277890 29361 38 24 Expected: 13570 2277890 29361 38 24 Got : 24000 2106459 25428 38 44 Expected: 24000 2106459 25428 38 44 Got : 24000 2460219 13482 41 41 Expected: 24000 2460219 13482 41 41 Got : 24000 4054270 29970 67 43 Expected: 24000 4054270 29970 67 43 Got : 24000 2106479 25458 38 44 Expected: 24000 2106479 25458 38 44 Got : 24000 2460259 13528 42 41 Expected: 24000 2460259 13528 42 41 Got : 24000 4054290 30013 67 43 Expected: 24000 4054290 30013 67 43 1 parts: [0:0:1:0:0:0:0] 240000 rows, 10181 pages, 7 levels: (159964, 53329) (319996, 106673) (479902, 159975) (639565, 213196) (799303, 266442) Checking BTree: Touched 0% bytes, 4 pages RowCountHistogram: 10% (actual 10%) key = (80152, 26725) value = 24033 (actual 24079 - 0% error) 10% (actual 10%) key = (160300, 53441) value = 48088 (actual 48136 - 0% error) 10% (actual 10%) key = (241096, 80373) value = 72280 (actual 72327 - 0% error) 10% (actual 10%) key = (321454, 107159) value = 96428 (actual 96478 - 0% error) 10% (actual 10%) key = (402202, 134075) value = 120604 (actual 120651 - 0% error) 10% (actual 10%) key = (482362, 160795) value = 144727 (actual 144775 - 0% error) 10% (actual 10%) key = (562825, 187616) value = 168893 (actual 168936 - 0% error) 10% (actual 10%) key = (642871, 214298) value = 192974 (actual 193024 - 0% error) 5% (actual 5%) key = (683260, 227761) value = 205073 (actual 205115 - 0% error) 14% (actual 14%) DataSizeHistogram: 10% (actual 10%) key = (80152, 26725) value = 2048715 (actual 2052707 - 0% error) 10% (actual 10%) key = (160300, 53441) value = 4098370 (actual 4102393 - 0% error) 10% (actual 10%) key = (241096, 80373) value = 6145924 (actual 6149966 - 0% error) 10% (actual 10%) key = (321454, 107159) value = 8194622 (actual 8198636 - 0% error) 10% (actual 10%) key = (402202, 134075) value = 10244365 (actual 10248317 - 0% error) 10% (actual 10%) key = (482362, 160795) value = 12292389 (actual 12296360 - 0% error) 10% (actual 10%) key = (562825, 187616) value = 14344066 (actual 14348128 - 0% error) 10% (actual 10%) key = (642871, 214298) value = 16393002 (actual 16396983 - 0% error) 5% (actual 5%) key = (683260, 227761) value = 17416844 (actual 17420850 - 0% error) 14% (actual 14%) Checking Flat: Touched 100% bytes, 1 pages RowCountHistogram: 10% (actual 10%) key = (80065, 26696) value = 24008 (actual 24056 - 0% error) 10% (actual 10%) key = (160045, 53356) value = 48012 (actual 48061 - 0% error) 10% (actual 10%) key = (240238, 80087) value = 72016 (actual 72061 - 0% error) 10% (actual 10%) key = (320152, 106725) value = 96035 (actual 96085 - 0% error) 10% (actual 10%) key = (400354, 133459) value = 120047 (actual 120093 - 0% error) 10% (actual 10%) key = (480133, 160052) value = 144053 (actual 144100 - 0% error) 10% (actual 10%) key = (560080, 186701) value = 168060 (actual 168102 - 0% error) 10% (actual 10%) key = (639892, 213305) value = 192073 (actual 192119 - 0% error) 10% (actual 10%) key = (719776, 239933) value = 216090 (actual 216137 - 0% error) 9% (actual 9%) DataSizeHistogram: 10% (actual 10%) key = (79732, 26585) value = 2038706 (actual 2042645 - 0% error) 10% (actual 10%) key = (159427, 53150) value = 4076220 (actual 4080259 - 0% error) 10% (actual 10%) key = (239872, 79965) value = 6113940 (actual 6117932 - 0% error) 10% (actual 10%) key = (319834, 106619) value = 8152983 (actual 8156951 - 0% error) 10% (actual 10%) key = (400105, 133376) value = 10190566 (actual 10194584 - 0% error) 10% (actual 10%) key = (479833, 159952) value = 12228261 (actual 12232212 - 0% error) 10% (actual 10%) key = (559774, 186599) value = 14265925 (actual 14269984 - 0% error) 10% (actual 10%) key = (639385, 213136) value = 16304923 (actual 16308915 - 0% error) 10% (actual 10%) key = (719437, 239820) value = 18342658 (actual 18346641 - 0% error) 9% (actual 9%) Checking Mixed: Touched 1% bytes, 51 pages RowCountHistogram: 10% (actual 10%) key = (80152, 26725) value = 24033 (actual 24079 - 0% error) 10% (actual 10%) key = (160300, 53441) value = 48088 (actual 48136 - 0% error) 10% (actual 10%) key = (241096, 80373) value = 72280 (actual 72327 - 0% error) 10% (actual 10%) key = (321454, 107159) value = 96428 (actual 96478 - 0% error) 10% (actual 10%) key = (402202, 134075) value = 120604 (actual 120651 - 0% error) 10% (actual 10%) key = (482362, 160795) value = 144727 (actual 144775 - 0% error) 10% (actual 10%) key = (562825, 187616) value = 168893 (actual 168936 - 0% error) 10% (actual 10%) key = (642871, 214298) value = 192974 (actual 193024 - 0% error) 10% (actual 10%) key = (723403, 241142) value = 217180 (actual 217228 - 0% error) 9% (actual 9%) DataSizeHistogram: 10% (actual 10%) key = (80152, 26725) value = 2048715 (actual 2052707 - 0% error) 10% (actual 10%) key = (160300, 53441) value = 4098370 (actual 4102393 - 0% error) 10% (actual 10%) key = (241096, 80373) value = 6145924 (actual 6149966 - 0% error) 10% (actual 10%) key = (321454, 107159) value = 8194622 (actual 8198636 - 0% error) 10% (actual 10%) key = (402202, 134075) value = 10244365 (actual 10248317 - 0% error) 10% (actual 10%) key = (482362, 160795) value = 12292389 (actual 12296360 - 0% error) 10% (actual 10%) key = (562825, 187616) value = 14344066 (actual 14348128 - 0% error) 10% (actual 10%) key = (642871, 214298) value = 16393002 (actual 16396983 - 0% error) 10% (actual 10%) key = (723403, 241142) value = 18443184 (actual 18447186 - 0% error) 9% (actual 9%) { [12965, 17271), [20685, 27602), [31405, 43682), [58051, 73731), [81074, 85635), [86559, 89297), [92588, 112654), [134937, 148111), [152568, 158136), [169526, 171272), [181381, 184364), [188301, 199001), [201179, 227534) } 1 parts: [0:0:1:0:0:0:0] 240000 rows, 10181 pages, 7 levels: (159964, 53329) (319996, 106673) (479902, 159975) (639565, 213196) (799303, 266442) Checking BTree: Touched 3% bytes, 111 pages RowCountHistogram: 6% (actual 6%) key = (80152, 26725) value = 7654 (actual 7700 - 0% error) 11% (actual 11%) key = (140245, 46756) value = 21908 (actual 21959 - 0% error) 12% (actual 12%) key = (241096, 80373) value = 37729 (actual 37776 - 0% error) 5% (actual 5%) key = (291388, 97137) value = 44561 (actual 44610 - 0% error) 14% (actual 14%) key = (361831, 120618) value = 62406 (actual 62455 - 0% error) 6% (actual 6%) key = (462178, 154067) value = 70269 (actual 70314 - 0% error) 10% (actual 10%) key = (522574, 174199) value = 83950 (actual 83996 - 0% error) 9% (actual 9%) key = (647905, 215976) value = 96207 (actual 96256 - 0% error) 11% (actual 11%) key = (703270, 234431) value = 110645 (actual 110694 - 0% error) 12% (actual 12%) DataSizeHistogram: 6% (actual 6%) key = (80152, 26725) value = 650681 (actual 654673 - 0% error) 11% (actual 11%) key = (140245, 46756) value = 1862907 (actual 1866988 - 0% error) 12% (actual 12%) key = (241096, 80373) value = 3200081 (actual 3204123 - 0% error) 5% (actual 5%) key = (291388, 97137) value = 3780473 (actual 3784554 - 0% error) 14% (actual 14%) key = (361831, 120618) value = 5294670 (actual 5298760 - 0% error) 6% (actual 6%) key = (462178, 154067) value = 5965285 (actual 5969310 - 0% error) 10% (actual 10%) key = (522574, 174199) value = 7125413 (actual 7129406 - 0% error) 9% (actual 9%) key = (647905, 215976) value = 8166922 (actual 8170966 - 0% error) 11% (actual 11%) key = (703270, 234431) value = 9391370 (actual 9395383 - 0% error) 12% (actual 12%) { [12965, 17271), [20685, 27602), [31405, 43682), [58051, 73731), [81074, 85635), [86559, 89297), [92588, 112654), [134937, 148111), [152568, 158136), [169526, 171272), [181381, 184364), [188301, 199001), [201179, 227534) } Checking Flat: Touched 100% bytes, 1 pages RowCountHistogram: 10% (actual 10%) key = (109672, 36565) value = 12716 (actual 12760 - 0% error) 10% (actual 10%) key = (200011, 66678) value = 25439 (actual 25485 - 0% error) 10% (actual 10%) key = (242497, 80840) value = 38151 (actual 38197 - 0% error) 10% (actual 10%) key = (323278, 107767) value = 50861 (actual 50910 - 0% error) 9% (actual 9%) key = (365755, 121926) value = 63568 (actual 63614 - 0% error) 10% (actual 10%) key = (482191, 160738) value = 76283 (actual 76335 - 0% error) 10% (actual 9%) key = (610882, 203635) value = 88992 (actual 89039 - 0% error) 10% (actual 10%) key = (673702, 224575) value = 101722 (actual 101768 - 0% error) 10% (actual 10%) key = (715753, 238592) value = 114435 (actual 114484 - 0% error) 9% (actual 9%) DataSizeHistogram: 10% (actual 10%) ... 140, NULL) (311209, NULL) (311281, NULL) (311344, NULL) (311416, NULL) [0:0:935:0:0:0:0] 100 rows, 100 pages, 4 levels: (311479, NULL) (311542, NULL) (311614, NULL) (311683, NULL) (311755, NULL) [0:0:936:0:0:0:0] 100 rows, 100 pages, 4 levels: (311821, NULL) (311890, NULL) (311956, NULL) (312034, NULL) (312100, NULL) [0:0:937:0:0:0:0] 100 rows, 100 pages, 4 levels: (312172, NULL) (312232, NULL) (312301, NULL) (312370, NULL) (312439, NULL) [0:0:938:0:0:0:0] 100 rows, 100 pages, 4 levels: (312508, NULL) (312571, NULL) (312637, NULL) (312700, NULL) (312760, NULL) [0:0:939:0:0:0:0] 100 rows, 100 pages, 4 levels: (312835, NULL) (312904, NULL) (312970, NULL) (313030, NULL) (313102, NULL) [0:0:940:0:0:0:0] 100 rows, 100 pages, 4 levels: (313174, NULL) (313240, NULL) (313300, NULL) (313366, NULL) (313429, NULL) [0:0:941:0:0:0:0] 100 rows, 100 pages, 4 levels: (313498, NULL) (313573, NULL) (313639, NULL) (313699, NULL) (313768, NULL) [0:0:942:0:0:0:0] 100 rows, 100 pages, 4 levels: (313828, NULL) (313891, NULL) (313957, NULL) (314023, NULL) (314086, NULL) [0:0:943:0:0:0:0] 100 rows, 100 pages, 4 levels: (314149, NULL) (314212, NULL) (314275, NULL) (314338, NULL) (314401, NULL) [0:0:944:0:0:0:0] 100 rows, 100 pages, 4 levels: (314464, NULL) (314530, NULL) (314590, NULL) (314656, NULL) (314719, NULL) [0:0:945:0:0:0:0] 100 rows, 100 pages, 4 levels: (314788, NULL) (314854, NULL) (314920, NULL) (314983, NULL) (315046, NULL) [0:0:946:0:0:0:0] 100 rows, 100 pages, 4 levels: (315109, NULL) (315178, NULL) (315238, NULL) (315304, NULL) (315370, NULL) [0:0:947:0:0:0:0] 100 rows, 100 pages, 4 levels: (315433, NULL) (315496, NULL) (315565, NULL) (315631, NULL) (315697, NULL) [0:0:948:0:0:0:0] 100 rows, 100 pages, 4 levels: (315766, NULL) (315826, NULL) (315889, NULL) (315952, NULL) (316024, NULL) [0:0:949:0:0:0:0] 100 rows, 100 pages, 4 levels: (316087, NULL) (316156, NULL) (316222, NULL) (316288, NULL) (316357, NULL) [0:0:950:0:0:0:0] 100 rows, 100 pages, 4 levels: (316432, NULL) (316498, NULL) (316564, NULL) (316636, NULL) (316705, NULL) [0:0:951:0:0:0:0] 100 rows, 100 pages, 4 levels: (316768, NULL) (316831, NULL) (316891, NULL) (316951, NULL) (317011, NULL) [0:0:952:0:0:0:0] 100 rows, 100 pages, 4 levels: (317080, NULL) (317143, NULL) (317218, NULL) (317287, NULL) (317356, NULL) [0:0:953:0:0:0:0] 100 rows, 100 pages, 4 levels: (317422, NULL) (317497, NULL) (317563, NULL) (317632, NULL) (317701, NULL) [0:0:954:0:0:0:0] 100 rows, 100 pages, 4 levels: (317764, NULL) (317824, NULL) (317887, NULL) (317953, NULL) (318019, NULL) [0:0:955:0:0:0:0] 100 rows, 100 pages, 4 levels: (318088, NULL) (318166, NULL) (318235, NULL) (318304, NULL) (318370, NULL) [0:0:956:0:0:0:0] 100 rows, 100 pages, 4 levels: (318442, NULL) (318511, NULL) (318574, NULL) (318640, NULL) (318703, NULL) [0:0:957:0:0:0:0] 100 rows, 100 pages, 4 levels: (318772, NULL) (318838, NULL) (318898, NULL) (318970, NULL) (319036, NULL) [0:0:958:0:0:0:0] 100 rows, 100 pages, 4 levels: (319099, NULL) (319162, NULL) (319225, NULL) (319294, NULL) (319360, NULL) [0:0:959:0:0:0:0] 100 rows, 100 pages, 4 levels: (319423, NULL) (319492, NULL) (319555, NULL) (319621, NULL) (319687, NULL) [0:0:960:0:0:0:0] 100 rows, 100 pages, 4 levels: (319753, NULL) (319828, NULL) (319900, NULL) (319963, NULL) (320035, NULL) [0:0:961:0:0:0:0] 100 rows, 100 pages, 4 levels: (320104, NULL) (320164, NULL) (320233, NULL) (320299, NULL) (320365, NULL) [0:0:962:0:0:0:0] 100 rows, 100 pages, 4 levels: (320428, NULL) (320500, NULL) (320569, NULL) (320629, NULL) (320698, NULL) [0:0:963:0:0:0:0] 100 rows, 100 pages, 4 levels: (320764, NULL) (320833, NULL) (320893, NULL) (320959, NULL) (321019, NULL) [0:0:964:0:0:0:0] 100 rows, 100 pages, 4 levels: (321085, NULL) (321151, NULL) (321214, NULL) (321277, NULL) (321352, NULL) [0:0:965:0:0:0:0] 100 rows, 100 pages, 4 levels: (321421, NULL) (321493, NULL) (321562, NULL) (321631, NULL) (321691, NULL) [0:0:966:0:0:0:0] 100 rows, 100 pages, 4 levels: (321757, NULL) (321823, NULL) (321886, NULL) (321949, NULL) (322009, NULL) [0:0:967:0:0:0:0] 100 rows, 100 pages, 4 levels: (322081, NULL) (322159, NULL) (322225, NULL) (322294, NULL) (322363, NULL) [0:0:968:0:0:0:0] 100 rows, 100 pages, 4 levels: (322429, NULL) (322498, NULL) (322564, NULL) (322642, NULL) (322711, NULL) [0:0:969:0:0:0:0] 100 rows, 100 pages, 4 levels: (322783, NULL) (322846, NULL) (322915, NULL) (322978, NULL) (323041, NULL) [0:0:970:0:0:0:0] 100 rows, 100 pages, 4 levels: (323104, NULL) (323164, NULL) (323230, NULL) (323305, NULL) (323368, NULL) [0:0:971:0:0:0:0] 100 rows, 100 pages, 4 levels: (323434, NULL) (323506, NULL) (323569, NULL) (323632, NULL) (323707, NULL) [0:0:972:0:0:0:0] 100 rows, 100 pages, 4 levels: (323776, NULL) (323851, NULL) (323917, NULL) (323986, NULL) (324052, NULL) [0:0:973:0:0:0:0] 100 rows, 100 pages, 4 levels: (324115, NULL) (324184, NULL) (324256, NULL) (324316, NULL) (324379, NULL) [0:0:974:0:0:0:0] 100 rows, 100 pages, 4 levels: (324442, NULL) (324502, NULL) (324568, NULL) (324631, NULL) (324703, NULL) [0:0:975:0:0:0:0] 100 rows, 100 pages, 4 levels: (324769, NULL) (324838, NULL) (324904, NULL) (324973, NULL) (325033, NULL) [0:0:976:0:0:0:0] 100 rows, 100 pages, 4 levels: (325105, NULL) (325174, NULL) (325234, NULL) (325297, NULL) (325363, NULL) [0:0:977:0:0:0:0] 100 rows, 100 pages, 4 levels: (325438, NULL) (325504, NULL) (325570, NULL) (325630, NULL) (325699, NULL) [0:0:978:0:0:0:0] 100 rows, 100 pages, 4 levels: (325771, NULL) (325834, NULL) (325900, NULL) (325966, NULL) (326032, NULL) [0:0:979:0:0:0:0] 100 rows, 100 pages, 4 levels: (326101, NULL) (326170, NULL) (326233, NULL) (326296, NULL) (326359, NULL) [0:0:980:0:0:0:0] 100 rows, 100 pages, 4 levels: (326434, NULL) (326497, NULL) (326563, NULL) (326632, NULL) (326701, NULL) [0:0:981:0:0:0:0] 100 rows, 100 pages, 4 levels: (326773, NULL) (326836, NULL) (326905, NULL) (326965, NULL) (327025, NULL) [0:0:982:0:0:0:0] 100 rows, 100 pages, 4 levels: (327097, NULL) (327169, NULL) (327232, NULL) (327301, NULL) (327364, NULL) [0:0:983:0:0:0:0] 100 rows, 100 pages, 4 levels: (327430, NULL) (327496, NULL) (327559, NULL) (327622, NULL) (327682, NULL) [0:0:984:0:0:0:0] 100 rows, 100 pages, 4 levels: (327742, NULL) (327811, NULL) (327871, NULL) (327934, NULL) (327997, NULL) [0:0:985:0:0:0:0] 100 rows, 100 pages, 4 levels: (328072, NULL) (328138, NULL) (328222, NULL) (328291, NULL) (328363, NULL) [0:0:986:0:0:0:0] 100 rows, 100 pages, 4 levels: (328432, NULL) (328501, NULL) (328573, NULL) (328648, NULL) (328717, NULL) [0:0:987:0:0:0:0] 100 rows, 100 pages, 4 levels: (328783, NULL) (328849, NULL) (328915, NULL) (328978, NULL) (329044, NULL) [0:0:988:0:0:0:0] 100 rows, 100 pages, 4 levels: (329119, NULL) (329185, NULL) (329248, NULL) (329317, NULL) (329383, NULL) [0:0:989:0:0:0:0] 100 rows, 100 pages, 4 levels: (329455, NULL) (329518, NULL) (329590, NULL) (329662, NULL) (329722, NULL) [0:0:990:0:0:0:0] 100 rows, 100 pages, 4 levels: (329782, NULL) (329854, NULL) (329917, NULL) (329983, NULL) (330049, NULL) [0:0:991:0:0:0:0] 100 rows, 100 pages, 4 levels: (330118, NULL) (330187, NULL) (330253, NULL) (330322, NULL) (330382, NULL) [0:0:992:0:0:0:0] 100 rows, 100 pages, 4 levels: (330454, NULL) (330520, NULL) (330595, NULL) (330673, NULL) (330739, NULL) [0:0:993:0:0:0:0] 100 rows, 100 pages, 4 levels: (330808, NULL) (330874, NULL) (330940, NULL) (331003, NULL) (331072, NULL) [0:0:994:0:0:0:0] 100 rows, 100 pages, 4 levels: (331132, NULL) (331204, NULL) (331276, NULL) (331342, NULL) (331405, NULL) [0:0:995:0:0:0:0] 100 rows, 100 pages, 4 levels: (331465, NULL) (331540, NULL) (331615, NULL) (331684, NULL) (331753, NULL) [0:0:996:0:0:0:0] 100 rows, 100 pages, 4 levels: (331816, NULL) (331891, NULL) (331960, NULL) (332026, NULL) (332086, NULL) [0:0:997:0:0:0:0] 100 rows, 100 pages, 4 levels: (332152, NULL) (332215, NULL) (332284, NULL) (332350, NULL) (332419, NULL) [0:0:998:0:0:0:0] 100 rows, 100 pages, 4 levels: (332491, NULL) (332557, NULL) (332623, NULL) (332686, NULL) (332752, NULL) [0:0:999:0:0:0:0] 100 rows, 100 pages, 4 levels: (332818, NULL) (332884, NULL) (332944, NULL) (333013, NULL) (333073, NULL) [0:0:1000:0:0:0:0] 100 rows, 100 pages, 4 levels: (333148, NULL) (333214, NULL) (333274, NULL) (333340, NULL) (333403, NULL) Checking BTree: Touched 0% bytes, 0 pages RowCountHistogram: 5% (actual 6%) key = (16984, 5669) value = 5100 (actual 6998 - -1% error) 10% (actual 9%) key = (50416, 16813) value = 15100 (actual 16798 - -1% error) 10% (actual 9%) key = (83701, 27908) value = 25100 (actual 26598 - -1% error) 10% (actual 9%) key = (116986, 39003) value = 35100 (actual 36398 - -1% error) 10% (actual 9%) key = (150319, 50114) value = 45100 (actual 46198 - -1% error) 10% (actual 9%) key = (183700, 61241) value = 55100 (actual 55998 - 0% error) 10% (actual 9%) key = (217081, 72368) value = 65100 (actual 65798 - 0% error) 10% (actual 9%) key = (250486, 83503) value = 75100 (actual 75598 - 0% error) 10% (actual 9%) key = (283771, 94598) value = 85100 (actual 85398 - 0% error) 14% (actual 14%) DataSizeHistogram: 5% (actual 6%) key = (16648, 5557) value = 524891 (actual 723287 - -1% error) 10% (actual 9%) key = (50086, 16703) value = 1569936 (actual 1747238 - -1% error) 9% (actual 9%) key = (83356, 27793) value = 2610698 (actual 2767306 - -1% error) 10% (actual 9%) key = (116647, 38890) value = 3652143 (actual 3787394 - -1% error) 9% (actual 9%) key = (149656, 49893) value = 4685435 (actual 4800597 - -1% error) 10% (actual 9%) key = (183040, 61021) value = 5728420 (actual 5822785 - 0% error) 10% (actual 9%) key = (216727, 72250) value = 6776444 (actual 6848929 - 0% error) 9% (actual 9%) key = (250144, 83389) value = 7813547 (actual 7865227 - 0% error) 9% (actual 9%) key = (283444, 94489) value = 8853697 (actual 8884838 - 0% error) 14% (actual 14%) Checking Flat: Touched 100% bytes, 1000 pages RowCountHistogram: 10% (actual 11%) key = (33379, 11134) value = 10000 (actual 11800 - -1% error) 10% (actual 9%) key = (66721, 22248) value = 20000 (actual 21600 - -1% error) 10% (actual 9%) key = (100015, 33346) value = 30000 (actual 31400 - -1% error) 10% (actual 9%) key = (133258, 44427) value = 40000 (actual 41200 - -1% error) 10% (actual 9%) key = (166621, 55548) value = 50000 (actual 51000 - -1% error) 10% (actual 9%) key = (200041, 66688) value = 60000 (actual 60800 - 0% error) 10% (actual 9%) key = (233449, 77824) value = 70000 (actual 70600 - 0% error) 10% (actual 9%) key = (266824, 88949) value = 80000 (actual 80400 - 0% error) 10% (actual 9%) key = (300073, 100032) value = 90000 (actual 90200 - 0% error) 10% (actual 9%) DataSizeHistogram: 10% (actual 11%) key = (33187, NULL) value = 1041247 (actual 1229534 - -1% error) 10% (actual 9%) key = (66517, NULL) value = 2082456 (actual 2249844 - -1% error) 10% (actual 9%) key = (99709, NULL) value = 3123684 (actual 3270138 - -1% error) 10% (actual 9%) key = (132925, NULL) value = 4164886 (actual 4290603 - -1% error) 10% (actual 9%) key = (166246, NULL) value = 5206111 (actual 5311117 - -1% error) 10% (actual 9%) key = (199678, NULL) value = 6247321 (actual 6331068 - 0% error) 10% (actual 9%) key = (233290, NULL) value = 7288529 (actual 7350869 - 0% error) 10% (actual 9%) key = (266701, NULL) value = 8329759 (actual 8371441 - 0% error) 10% (actual 9%) key = (300052, NULL) value = 9371030 (actual 9392083 - 0% error) 9% (actual 9%) Checking Mixed: Touched 0% bytes, 0 pages RowCountHistogram: 100% (actual 100%) DataSizeHistogram: 100% (actual 100%) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayRS_Reboot_Dirty [GOOD] Test command err: 2025-03-31T16:03:00.770713Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:03:00.772283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:03:00.772361Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:00.781471Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:03:00.781995Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-03-31T16:03:00.782283Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:00.793690Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:03:00.867913Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:00.868331Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:03:00.870045Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-31T16:03:00.870133Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-31T16:03:00.870183Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-31T16:03:00.870629Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:03:00.875462Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:03:00.875588Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:197:2154] in generation 2 2025-03-31T16:03:01.032063Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:03:01.073481Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-31T16:03:01.073695Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:03:01.073811Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-31T16:03:01.073846Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-31T16:03:01.073881Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-31T16:03:01.073919Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:03:01.074156Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:03:01.074200Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:03:01.074479Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-31T16:03:01.074576Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-31T16:03:01.074641Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:03:01.074680Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:03:01.074720Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-31T16:03:01.074758Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T16:03:01.074796Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T16:03:01.074835Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-31T16:03:01.074873Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:03:01.074992Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:01.075062Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:01.075410Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-31T16:03:01.078251Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\001J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-31T16:03:01.078338Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:03:01.078441Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T16:03:01.078603Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-31T16:03:01.078645Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-31T16:03:01.078700Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-31T16:03:01.078773Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:03:01.078827Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-31T16:03:01.078867Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-31T16:03:01.078898Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:03:01.079363Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-31T16:03:01.079425Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-31T16:03:01.079463Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-31T16:03:01.079496Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:03:01.079539Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-31T16:03:01.079569Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-31T16:03:01.079623Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-31T16:03:01.079658Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-31T16:03:01.079685Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-31T16:03:01.093444Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-31T16:03:01.093524Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:03:01.093557Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:03:01.093615Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-31T16:03:01.093692Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-31T16:03:01.094233Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:01.094290Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:01.094335Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-31T16:03:01.094466Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-31T16:03:01.094493Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:03:01.094646Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-31T16:03:01.094690Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:03:01.094725Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-31T16:03:01.094761Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-31T16:03:01.098641Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-31T16:03:01.098725Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:03:01.098993Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:03:01.099033Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:03:01.103256Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:03:01.103359Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:03:01.103400Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T16:03:01.103452Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-31T16:03:01.103498Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-31T16:03:01.103555Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:03:01.103606Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-31T16:03:01.103657Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-31T16:03:01.103699Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-31T16:03:01.103915Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-31T16:03:01.103968Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:03:01.104006Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-31T16:03:01.104034Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-31T16:03:01.104057Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-31T16:03:01.104136Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:03:01.104159Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-31T16:03:01.104211Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-31T16:03:01.104245Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-31T16:03:01.104313Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-31T16:03:01.104350Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-31T16:03:01.104382Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-31T16:03:01.104421Z node 1 :TX_DATA ... Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:50.857160Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:50.857195Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:20] at 9437184 on unit CompleteOperation 2025-03-31T16:04:50.857243Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 20] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:50.857281Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:50.857513Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:50.857548Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:21] at 9437184 on unit CompleteOperation 2025-03-31T16:04:50.857598Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 21] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:50.857635Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:50.857796Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:50.857833Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:22] at 9437184 on unit CompleteOperation 2025-03-31T16:04:50.857880Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 22] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:50.857919Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:50.858095Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:50.858131Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:23] at 9437184 on unit CompleteOperation 2025-03-31T16:04:50.858180Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 23] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:50.858216Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:50.858417Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:50.858455Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:24] at 9437184 on unit CompleteOperation 2025-03-31T16:04:50.858505Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 24] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:50.858542Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:50.858730Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:50.858768Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:25] at 9437184 on unit CompleteOperation 2025-03-31T16:04:50.858817Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 25] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:50.858856Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:50.859033Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:50.859069Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:26] at 9437184 on unit CompleteOperation 2025-03-31T16:04:50.863406Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 26] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:50.863498Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:50.863853Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:50.863908Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:27] at 9437184 on unit CompleteOperation 2025-03-31T16:04:50.863969Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 27] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:50.864018Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:50.864243Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:50.864285Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:28] at 9437184 on unit CompleteOperation 2025-03-31T16:04:50.864342Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 28] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:50.864380Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:50.864561Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:50.864599Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:29] at 9437184 on unit CompleteOperation 2025-03-31T16:04:50.864652Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 29] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:50.864690Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:50.864834Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:50.864871Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:30] at 9437184 on unit CompleteOperation 2025-03-31T16:04:50.864918Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 30] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:50.864957Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:50.865203Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:50.865239Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:31] at 9437184 on unit CompleteOperation 2025-03-31T16:04:50.865289Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 31] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:50.865325Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:50.865481Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:50.865521Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:32] at 9437184 on unit CompleteOperation 2025-03-31T16:04:50.865571Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 32] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:50.865608Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:50.865824Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:50.865859Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:33] at 9437184 on unit CompleteOperation 2025-03-31T16:04:50.865909Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 33] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:50.865947Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:50.866114Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:50.866148Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:34] at 9437184 on unit CompleteOperation 2025-03-31T16:04:50.866195Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 34] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:50.866232Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:50.866427Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:50.866466Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:35] at 9437184 on unit CompleteOperation 2025-03-31T16:04:50.866513Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 35] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:50.866551Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:50.866802Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:50.866842Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:36] at 9437184 on unit CompleteOperation 2025-03-31T16:04:50.866891Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 36] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:50.866928Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:50.871342Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:50.871434Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:37] at 9437184 on unit CompleteOperation 2025-03-31T16:04:50.871509Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 37] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:50.871569Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:50.872020Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:801:2727], Recipient [32:342:2310]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-03-31T16:04:50.872080Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:04:50.872131Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 2025-03-31T16:04:50.872294Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:801:2727], Recipient [32:342:2310]: {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-03-31T16:04:50.872335Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:04:50.872372Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 9 2025-03-31T16:04:50.872469Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:801:2727], Recipient [32:342:2310]: {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-03-31T16:04:50.872505Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:04:50.872540Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 10 2025-03-31T16:04:50.872627Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:801:2727], Recipient [32:342:2310]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-03-31T16:04:50.872665Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:04:50.872699Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 expect 31 31 29 27 25 20 28 30 27 31 25 12 31 31 25 27 15 30 30 25 15 30 31 31 31 31 31 8 - - 8 - actual 31 31 29 27 25 20 28 30 27 31 25 12 31 31 25 27 15 30 30 25 15 30 31 31 31 31 31 8 - - 8 - interm - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobal [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalAsync >> YdbYqlClient::AlterTableAddIndexWithDataColumn [GOOD] >> YdbYqlClient::CheckDefaultTableSettings1 |75.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> YdbImport::EmptyData [GOOD] >> YdbImport::ImportFromS3ToExistingTable >> DataShardVolatile::UpsertNoLocksArbiterRestart+UseSink [GOOD] >> DataShardVolatile::UpsertNoLocksArbiterRestart-UseSink >> YdbYqlClient::CreateTableWithUniformPartitions [GOOD] >> YdbYqlClient::CreateTableWithUniformPartitionsAndAutoPartitioning >> TConfigsCacheTests::TestNoNotificationIfConfigIsCached >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionCreate >> TModificationsValidatorTests::TestIsValidationRequired_NONE [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_TENANTS [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_TENANTS_AND_NODE_TYPES [GOOD] >> TNetClassifierUpdaterTest::TestGetUpdatesFromHttpServer >> TConsoleConfigSubscriptionTests::TestAddConfigSubscription >> TConsoleConfigTests::TestModifyConfigItem >> KqpSystemView::NodesRange1 [GOOD] |75.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::ByTableName >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_DOMAIN >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_DOMAIN [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainUnaffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_DOMAIN [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] >> TConfigsCacheTests::TestNoNotificationIfConfigIsCached [GOOD] >> TConfigsCacheTests::TestFullConfigurationRestore |75.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> DataShardTxOrder::RandomDotRanges_DelayRS [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsAfterAddingIndex [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnFreshTable >> KqpPg::InsertNoTargetColumns_Alter-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Serial+useSink >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionCreate [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClient ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesRange1 [GOOD] Test command err: Trying to start YDB, gRPC: 24112, MsgBus: 63456 2025-03-31T16:04:29.378126Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005194384730951:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:29.378180Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:04:29.615861Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488005194691495277:2093];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:29.695506Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488005195695198030:2138];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:29.695589Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:04:29.616857Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:04:29.884154Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7488005195643407845:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:29.884200Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:04:30.971555Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488005194209104083:2277];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:30.971699Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:04:31.061137Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:31.113712Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001821/r3tmp/tmpBVEv46/pdisk_1.dat 2025-03-31T16:04:31.152416Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:31.208490Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:32.066730Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:32.114579Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:32.183205Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:32.203552Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:32.277660Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:33.087276Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:33.119286Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:33.120379Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:33.159696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:33.159821Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:33.175584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:33.175693Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:33.187884Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:33.199714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:33.199808Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:33.199958Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:33.200007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:33.200171Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:33.200218Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:33.223526Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:33.234961Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-03-31T16:04:33.235013Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:04:33.237575Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-03-31T16:04:33.237608Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-03-31T16:04:33.237740Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:04:33.238306Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:04:33.247973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:04:33.248322Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:04:33.248473Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24112, node 1 2025-03-31T16:04:33.851903Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:33.851951Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:33.851962Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:33.852146Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:04:34.394729Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005194384730951:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:34.394819Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:34.571218Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488005194209104083:2277];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:34.571289Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:34.620117Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488005194691495277:2093];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:34.620192Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:34.703050Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7488005195695198030:2138];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:34.703176Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:63456 2025-03-31T16:04:34.891229Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7488005195643407845:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:34.891332Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:63456 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:37.107743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:37.359419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:38.274912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:39.433836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:40.148489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:46.260123Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005267399177065:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:46.260281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:47.513906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:47.657714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:47.882024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:48.060231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:48.094493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:04:48.094525Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:48.341181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:48.525460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:48.921174Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005275989112357:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:48.921325Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:48.927258Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005275989112362:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:48.934805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:49.000023Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005275989112365:2416], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:04:49.074734Z node 1 :TX_PROXY ERROR: Actor# [1:7488005280284079731:4129] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:51.312872Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437091266, txId: 281474976715671] shutting down 2025-03-31T16:04:51.640792Z node 4 :BS_PROXY_PUT ERROR: [6ff05a3c1911b122] Result# TEvPutResult {Id# [72075186224037889:1:20:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037889:1:20:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-03-31T16:04:51.647114Z node 3 :BS_PROXY_PUT ERROR: [09bb989292ed37f8] Result# TEvPutResult {Id# [72075186224037894:1:20:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037894:1:20:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-03-31T16:04:51.647138Z node 5 :BS_PROXY_PUT ERROR: [ad0b46da4d212bdf] Result# TEvPutResult {Id# [72075186224037888:1:20:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037888:1:20:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-03-31T16:04:51.648072Z node 2 :BS_PROXY_PUT ERROR: [8777eda304453337] Result# TEvPutResult {Id# [72075186224037895:1:20:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037895:1:20:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 |75.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] >> TConsoleConfigTests::TestModifyConfigItem [GOOD] >> TConsoleConfigTests::TestRemoveConfigItem >> YdbSdkSessions::CloseSessionWithSessionPoolExplicitDriverStopOnly [GOOD] >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors |75.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> TConsoleConfigSubscriptionTests::TestAddConfigSubscription [GOOD] >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscription >> TConfigsCacheTests::TestFullConfigurationRestore [GOOD] >> TConfigsCacheTests::TestConfigurationSaveOnNotification >> TVersions::Wreck1Reverse [GOOD] >> TVersions::Wreck0 >> YdbSdkSessions::SessionsServerLimitWithSessionPool [GOOD] >> TGRpcYdbTest::AlterTableAddIndexBadRequest [GOOD] |75.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |75.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut >> YdbSdkSessions::TestActiveSessionCountAfterTransportError [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClient [GOOD] >> TGRpcYdbTest::CreateAlterCopyAndDropTable >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdates >> TConfigsCacheTests::TestConfigurationSaveOnNotification [GOOD] >> TConfigsCacheTests::TestOverwrittenConfigurationDoesntCauseNotification >> YdbS3Internal::TestAccessCheck [GOOD] >> TConsoleConfigTests::TestRemoveConfigItem [GOOD] >> YdbQueryService::TestCreateAttachAndDropAttachedSession [GOOD] >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscription [GOOD] >> TConsoleConfigTests::TestRemoveConfigItems >> YdbScripting::BasicV0 >> YdbS3Internal::BadRequests >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscriptions ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [FAIL] Test command err: 2025-03-31T16:04:41.653672Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005247612370811:2277];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:41.653720Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d6d/r3tmp/tmpKC9Wcb/pdisk_1.dat 2025-03-31T16:04:42.507304Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:42.507424Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:42.526462Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:04:42.570890Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23954, node 1 2025-03-31T16:04:42.772466Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:04:42.782196Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:04:42.786623Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:42.864340Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:42.864357Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:42.864367Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:42.864475Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6527 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:43.414625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:46.663326Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005247612370811:2277];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:46.663412Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:46.904008Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005269087208146:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:46.904154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:47.287690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:04:47.695385Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005273382175626:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:47.695541Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:47.696052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005273382175631:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:47.701187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:04:47.802080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005273382175633:2360], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:04:47.884805Z node 1 :TX_PROXY ERROR: Actor# [1:7488005273382175716:2818] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:48.309247Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqpf5gy98s102e6g2997em5h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTNhN2Q5NjctZmQ0YWQ3MWMtODEzOTM2MDQtYjVhMTZhOTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root assertion failed at ydb/services/ydb/sdk_sessions_ut/sdk_sessions_ut.cpp:253, virtual void NTestSuiteYdbSdkSessions::TTestCaseTestSdkFreeSessionAfterBadSessionQueryServiceStreamCall::Execute_(NUnitTest::TTestContext &): (session.GetId() == sessionId) failed: ("ydb://session/3?node_id=1&id=N2JhYTcyYy1hMjI2MjlkNC1lYjJjZDAyYi04ZTRkYmM4Yg==" != "ydb://session/3?node_id=1&id=ZTNhN2Q5NjctZmQ0YWQ3MWMtODEzOTM2MDQtYjVhMTZhOTI=") , with diff: "ydb://session/3?node_id=1&id=(|ZTNh)N2(Jh|Q5NjctZmQ0)Y(|WQ3MWMtODEzO)T(cyYy1h|)M(jI|)2M(jlkNC1l|DQt)Yj(Jj|VhMT)Z(DAyYi04Z|hO)T(RkYmM4Yg|I)=(=|)" TBackTrace::Capture()+28 (0x18BFAE4C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x190C2FB0) NTestSuiteYdbSdkSessions::TTestCaseTestSdkFreeSessionAfterBadSessionQueryServiceStreamCall::Execute_(NUnitTest::TTestContext&)+7545 (0x187BBE19) std::__y1::__function::__func, void ()>::operator()()+280 (0x18830FE8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x190F9FF6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x190C9B29) NTestSuiteYdbSdkSessions::TCurrentTest::Execute()+1204 (0x1882FE94) NUnitTest::TTestFactory::Execute()+2438 (0x190CB3F6) NUnitTest::RunMain(int, char**)+5213 (0x190F456D) ??+0 (0x7F1AF853DD90) __libc_start_main+128 (0x7F1AF853DE40) _start+41 (0x1612E029) >> TEvLocalSyncDataTests::SqueezeBlocks3 [GOOD] >> TEvLocalSyncDataTests::SqueezeBlocks1 [GOOD] >> TQuorumTrackerTests::ErasureNoneNeverHasQuorum_4_1 [GOOD] >> TSyncBrokerTests::ShouldProcessAfterRelease |75.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut >> TSyncBrokerTests::ShouldEnqueue >> TConfigsCacheTests::TestOverwrittenConfigurationDoesntCauseNotification [GOOD] >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscriptions [GOOD] >> TEvLocalSyncDataTests::SqueezeBlocks2 [GOOD] >> TQuorumTrackerTests::Erasure4Plus2BlockIncludingMyFailDomain_8_2 [GOOD] >> TSyncBrokerTests::ShouldEnqueue [GOOD] >> TConfigsCacheTests::TestConfigurationChangeSensor >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_5_2 [GOOD] >> TSyncBrokerTests::ShouldProcessAfterRelease [GOOD] >> TSyncBrokerTests::ShouldEnqueueWithSameVDiskId [GOOD] >> Viewer::Cluster10000Tablets >> TPersQueueTest::DisableWrongSettings [GOOD] >> KqpErrors::ResolveTableError [GOOD] >> TGRpcYdbTest::SdkUuidViaParams [GOOD] >> TConsoleConfigTests::TestRemoveConfigItems [GOOD] >> TSyncBrokerTests::ShouldReleaseInQueue >> TConsoleConfigSubscriptionTests::TestListConfigSubscriptions >> TGRpcYdbTest::ReadTablePg >> TSyncBrokerTests::ShouldReleaseInQueue [GOOD] >> TConsoleConfigTests::TestConfigureOrderConflicts >> TPersQueueTest::DisableDeduplication |75.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |75.5%| [LD] {RESULT} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomDotRanges_DelayRS [GOOD] Test command err: 2025-03-31T16:03:09.242255Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:03:09.245312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:03:09.245380Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:09.264852Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:03:09.265387Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-03-31T16:03:09.265668Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:03:09.278703Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:03:09.364459Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:03:09.364896Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:03:09.366582Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-31T16:03:09.366671Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-31T16:03:09.366721Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-31T16:03:09.370930Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:03:09.377764Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:03:09.377891Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:197:2154] in generation 2 2025-03-31T16:03:09.457237Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:03:09.535279Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-31T16:03:09.535943Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:03:09.536385Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-31T16:03:09.536567Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-31T16:03:09.536751Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-31T16:03:09.537098Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:03:09.537839Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:03:09.538044Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:03:09.538896Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-31T16:03:09.539178Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-31T16:03:09.539399Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:03:09.539566Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:03:09.539714Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-31T16:03:09.539864Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T16:03:09.540043Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T16:03:09.540181Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-31T16:03:09.540311Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:03:09.540604Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:09.540716Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:09.540898Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-31T16:03:09.545397Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-31T16:03:09.545551Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:03:09.545738Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T16:03:09.546104Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-31T16:03:09.546232Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-31T16:03:09.546329Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-31T16:03:09.546512Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:03:09.546603Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-31T16:03:09.546801Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-31T16:03:09.546916Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:03:09.547497Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-31T16:03:09.547578Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-31T16:03:09.547693Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-31T16:03:09.547759Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:03:09.547826Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-31T16:03:09.547928Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-31T16:03:09.548002Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-31T16:03:09.548060Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-31T16:03:09.548097Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-31T16:03:09.569012Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-31T16:03:09.569143Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:03:09.569190Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:03:09.569332Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-31T16:03:09.569498Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-31T16:03:09.570384Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:09.570453Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:03:09.570543Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-31T16:03:09.570740Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-31T16:03:09.570781Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:03:09.571003Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-31T16:03:09.571068Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:03:09.571129Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-31T16:03:09.571200Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-31T16:03:09.582466Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-31T16:03:09.582574Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:03:09.582959Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:03:09.583018Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:03:09.583116Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:03:09.583174Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:03:09.583227Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T16:03:09.583275Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-31T16:03:09.583338Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-31T16:03:09.583400Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:03:09.583448Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-31T16:03:09.583502Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-31T16:03:09.583567Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-31T16:03:09.583825Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-31T16:03:09.583892Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:03:09.583936Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-31T16:03:09.583964Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-31T16:03:09.583990Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-31T16:03:09.584082Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:03:09.584108Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-31T16:03:09.584157Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-31T16:03:09.584200Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-31T16:03:09.584283Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-31T16:03:09.584330Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-31T16:03:09.584372Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-31T16:03:09.584444Z node 1 :TX_DATA ... 6Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:58.033468Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:58.033506Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:23] at 9437184 on unit CompleteOperation 2025-03-31T16:04:58.033553Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 23] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:58.033593Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:58.033832Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:58.033870Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:24] at 9437184 on unit CompleteOperation 2025-03-31T16:04:58.033923Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 24] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:58.033962Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:58.034162Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:58.034198Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:25] at 9437184 on unit CompleteOperation 2025-03-31T16:04:58.034249Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 25] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:58.034286Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:58.034522Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:58.034558Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:26] at 9437184 on unit CompleteOperation 2025-03-31T16:04:58.034610Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 26] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:58.034647Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:58.034823Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:58.034863Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:27] at 9437184 on unit CompleteOperation 2025-03-31T16:04:58.034914Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 27] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:58.034956Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:58.035343Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:58.035388Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:28] at 9437184 on unit CompleteOperation 2025-03-31T16:04:58.035447Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 28] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:58.035482Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:58.035641Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:58.035667Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:29] at 9437184 on unit CompleteOperation 2025-03-31T16:04:58.035702Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 29] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:58.035744Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:58.035913Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:58.035941Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:30] at 9437184 on unit CompleteOperation 2025-03-31T16:04:58.035980Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 30] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:58.036010Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:58.036187Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:58.036209Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:31] at 9437184 on unit CompleteOperation 2025-03-31T16:04:58.036249Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 31] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:58.036275Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:58.036451Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:58.036489Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:32] at 9437184 on unit CompleteOperation 2025-03-31T16:04:58.036539Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 32] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:58.036576Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:58.036750Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:58.036782Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:33] at 9437184 on unit CompleteOperation 2025-03-31T16:04:58.036832Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 33] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:58.036869Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:58.037110Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:58.037145Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:34] at 9437184 on unit CompleteOperation 2025-03-31T16:04:58.037190Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 34] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:58.037226Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:58.037446Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:58.037483Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:35] at 9437184 on unit CompleteOperation 2025-03-31T16:04:58.037531Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 35] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:58.037570Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:58.037780Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:58.037836Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:36] at 9437184 on unit CompleteOperation 2025-03-31T16:04:58.037887Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 36] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:58.037926Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:58.038110Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:04:58.038159Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:37] at 9437184 on unit CompleteOperation 2025-03-31T16:04:58.038205Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 37] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:04:58.038241Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:04:58.038528Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:232:2225], Recipient [32:343:2311]: {TEvReadSet step# 1000004 txid# 36 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2025-03-31T16:04:58.038579Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:04:58.038622Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 36 2025-03-31T16:04:58.038865Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:232:2225], Recipient [32:343:2311]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-03-31T16:04:58.038913Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:04:58.038948Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 2025-03-31T16:04:58.039049Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:232:2225], Recipient [32:343:2311]: {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-03-31T16:04:58.039099Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:04:58.039133Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 6 2025-03-31T16:04:58.039222Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:232:2225], Recipient [32:343:2311]: {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-03-31T16:04:58.039256Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:04:58.039288Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 8 2025-03-31T16:04:58.039378Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:232:2225], Recipient [32:343:2311]: {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-03-31T16:04:58.039413Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:04:58.039446Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 10 2025-03-31T16:04:58.039530Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:232:2225], Recipient [32:343:2311]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-03-31T16:04:58.039564Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:04:58.039594Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 2025-03-31T16:04:58.039678Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:232:2225], Recipient [32:343:2311]: {TEvReadSet step# 1000004 txid# 13 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 9} 2025-03-31T16:04:58.039711Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:04:58.039743Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 13 expect 31 29 26 28 22 25 29 18 25 30 31 31 30 13 31 30 28 20 28 30 20 30 30 30 31 30 30 30 - - - - actual 31 29 26 28 22 25 29 18 25 30 31 31 30 13 31 30 28 20 28 30 20 30 30 30 31 30 30 30 - - - - interm - 6 2 0 1 2 1 6 - - 2 - 2 - 1 - 0 0 - - - - - - - - - - - - - - ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::SessionsServerLimitWithSessionPool [GOOD] Test command err: 2025-03-31T16:04:40.760226Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005242704670491:2279];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:40.760276Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d8e/r3tmp/tmpVAGEW3/pdisk_1.dat 2025-03-31T16:04:41.775036Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:41.795377Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:41.798133Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:41.798226Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:41.815982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2973, node 1 2025-03-31T16:04:42.176261Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:42.176282Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:42.176290Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:42.176415Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63029 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:42.822192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:45.766122Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005242704670491:2279];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:45.766426Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:47.285351Z node 1 :KQP_PROXY WARN: TraceId: "01jqpf5ghm1066qvf5jszkrd1q", Active sessions limit exceeded, maximum allowed: 2
: Error: Active sessions limit exceeded, maximum allowed: 2 2025-03-31T16:04:47.309724Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005272769442499:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:47.309881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:47.310352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005272769442511:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:47.315336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:04:47.366286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005272769442513:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:04:47.456508Z node 1 :TX_PROXY ERROR: Actor# [1:7488005272769442582:2698] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:48.478057Z node 1 :KQP_PROXY WARN: TraceId: "01jqpf5hpx5dgc6mvqk9qn6yn0", Active sessions limit exceeded, maximum allowed: 2
: Error: Active sessions limit exceeded, maximum allowed: 2 2025-03-31T16:04:51.475874Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488005288826644065:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:51.475933Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d8e/r3tmp/tmppJGWzL/pdisk_1.dat 2025-03-31T16:04:51.928093Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:52.016836Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:52.016928Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:52.025599Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21797, node 4 2025-03-31T16:04:52.387936Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:52.387965Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:52.387971Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:52.388109Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22651 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:52.868031Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:56.476052Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7488005288826644065:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:56.476126Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:57.267834Z node 4 :KQP_PROXY WARN: TraceId: "01jqpf5t9k65kdb50km7wsvbft", Active sessions limit exceeded, maximum allowed: 2 2025-03-31T16:04:57.286128Z node 4 :KQP_PROXY WARN: TraceId: "01jqpf5ta598d7yp1ngyks1zeh", Active sessions limit exceeded, maximum allowed: 2 2025-03-31T16:04:57.295688Z node 4 :KQP_PROXY WARN: TraceId: "01jqpf5tafaatzvkh66qr1j4r3", Active sessions limit exceeded, maximum allowed: 2 2025-03-31T16:04:57.305223Z node 4 :KQP_PROXY WARN: TraceId: "01jqpf5tarbt4j3yzz0a5jc17m", Active sessions limit exceeded, maximum allowed: 2 2025-03-31T16:04:57.315621Z node 4 :KQP_PROXY WARN: TraceId: "01jqpf5tb347tfcn8xatks9dzy", Active sessions limit exceeded, maximum allowed: 2 2025-03-31T16:04:57.332575Z node 4 :KQP_PROXY WARN: TraceId: "01jqpf5tbmftea7k3hqbgp8tdw", Active sessions limit exceeded, maximum allowed: 2 2025-03-31T16:04:57.354492Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488005314596448923:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:57.354599Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:57.355253Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488005314596448935:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:57.364230Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:04:57.403094Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488005314596448937:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:04:57.489655Z node 4 :TX_PROXY ERROR: Actor# [4:7488005314596449028:2694] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:57.592324Z node 4 :KQP_PROXY WARN: TraceId: "01jqpf5tkqdedq11pbjfpzmaxd", Active sessions limit exceeded, maximum allowed: 2 2025-03-31T16:04:57.603808Z node 4 :KQP_PROXY WARN: TraceId: "01jqpf5tm32q004jbtffcs8mwm", Active sessions limit exceeded, maximum allowed: 2 2025-03-31T16:04:57.616650Z node 4 :KQP_PROXY WARN: TraceId: "01jqpf5tmg3h9qbkq122yeeq7h", Active sessions limit exceeded, maximum allowed: 2 2025-03-31T16:04:57.626136Z node 4 :KQP_PROXY WARN: TraceId: "01jqpf5tms93zzybfc3pthwxts", Active sessions limit exceeded, maximum allowed: 2 2025-03-31T16:04:57.643490Z node 4 :KQP_PROXY WARN: TraceId: "01jqpf5tnb9n9at0w48q1v794n", Active sessions limit exceeded, maximum allowed: 2 2025-03-31T16:04:57.656619Z node 4 :KQP_PROXY WARN: TraceId: "01jqpf5tnrdz9tw3mb4nkhyadk", Active sessions limit exceeded, maximum allowed: 2 2025-03-31T16:04:57.666899Z node 4 :KQP_PROXY WARN: TraceId: "01jqpf5tp24x3cw9byeamkfhkk", Active sessions limit exceeded, maximum allowed: 2 2025-03-31T16:04:57.676790Z node 4 :KQP_PROXY WARN: TraceId: "01jqpf5tpc3m906shnfh9nvxbp", Active sessions limit exceeded, maximum allowed: 2 2025-03-31T16:04:57.696454Z node 4 :KQP_PROXY WARN: TraceId: "01jqpf5tq008e5mbzzr6f60d5s", Active sessions limit exceeded, maximum allowed: 2 2025-03-31T16:04:57.716438Z node 4 :KQP_PROXY WARN: TraceId: "01jqpf5tqm42trc6nfz2q3mqxf", Active sessions limit exceeded, maximum allowed: 2 2025-03-31T16:04:57.726572Z node 4 :KQP_PROXY WARN: TraceId: "01jqpf5tqy2hhxatx2vj1yg6ce", Active sessions limit exceeded, maximum allowed: 2 2025-03-31T16:04:57.736569Z node 4 :KQP_PROXY WARN: TraceId: "01jqpf5tr8b8mga3xft53krcqg", Active sessions limit exceeded, maximum allowed: 2 |75.5%| [LD] {RESULT} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |75.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TEvLocalSyncDataTests::SqueezeBlocks2 [GOOD] |75.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TQuorumTrackerTests::Erasure4Plus2BlockIncludingMyFailDomain_8_2 [GOOD] |75.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_5_2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncBrokerTests::ShouldReleaseInQueue [GOOD] Test command err: 2025-03-31T16:05:02.359182Z node 1 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [1:5:2052], token sent, active: 1, waiting: 0 2025-03-31T16:05:02.359367Z node 1 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:2], actor id: [1:6:2053], enqueued, active: 1, waiting: 1 2025-03-31T16:05:02.359435Z node 1 :BS_SYNCER DEBUG: TEvReleaseSyncToken, VDisk actor id: [0:1:1], actor id: [1:5:2052], token released, active: 1, waiting: 1 2025-03-31T16:05:02.359508Z node 1 :BS_SYNCER DEBUG: ProcessQueue(), VDisk actor id: [0:1:2], actor id: [1:6:2053], token sent, active: 0, waiting: 1 2025-03-31T16:05:02.538690Z node 2 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [2:5:2052], token sent, active: 1, waiting: 0 2025-03-31T16:05:02.538957Z node 2 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:2], actor id: [2:6:2053], enqueued, active: 1, waiting: 1 2025-03-31T16:05:02.539015Z node 2 :BS_SYNCER DEBUG: TEvReleaseSyncToken, VDisk actor id: [0:1:2], actor id: [2:6:2053], removed from queue, active: 1, waiting: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncBrokerTests::ShouldEnqueueWithSameVDiskId [GOOD] Test command err: 2025-03-31T16:05:02.439994Z node 1 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [1:5:2052], token sent, active: 1, waiting: 0 2025-03-31T16:05:02.440167Z node 1 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:2], actor id: [1:6:2053], enqueued, active: 1, waiting: 1 2025-03-31T16:05:02.543448Z node 2 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [2:5:2052], token sent, active: 1, waiting: 0 2025-03-31T16:05:02.543571Z node 2 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:2], actor id: [2:6:2053], enqueued, active: 1, waiting: 1 2025-03-31T16:05:02.543634Z node 2 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:2], actor id: [2:7:2054], enqueued, active: 1, waiting: 1 >> YdbYqlClient::CheckDefaultTableSettings1 [GOOD] >> TConsoleConfigSubscriptionTests::TestListConfigSubscriptions [GOOD] >> TConsoleConfigSubscriptionTests::TestReplaceConfigSubscriptions >> BackupRestore::RestoreViewDependentOnAnotherView [GOOD] >> TConfigsCacheTests::TestConfigurationChangeSensor [GOOD] >> YdbYqlClient::CheckDefaultTableSettings2 >> TConfigsDispatcherTests::TestSubscriptionNotification >> TQuorumTrackerTests::Erasure4Plus2BlockNotIncludingMyFailDomain_8_2 [GOOD] >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_4_2 [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobal >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdates [GOOD] >> TConsoleConfigTests::TestConfigureOrderConflicts [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdatesAddRemove >> TConsoleConfigTests::TestGetItems >> YdbImport::ImportFromS3ToExistingTable [GOOD] >> TYqlDecimalTests::SimpleUpsertSelect >> TConsoleConfigSubscriptionTests::TestReplaceConfigSubscriptions [GOOD] >> TNetClassifierUpdaterTest::TestGetUpdatesFromHttpServer [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotification [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalAsync [GOOD] >> TSyncBrokerTests::ShouldReturnToken >> TConsoleConfigTests::TestGetItems [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnFreshTable [GOOD] >> TSyncBrokerTests::ShouldReturnTokensWithSameVDiskId >> KqpErrors::ProposeResultLost_RwTx+UseSink [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdatesAddRemove [GOOD] >> YdbYqlClient::CreateTableWithUniformPartitionsAndAutoPartitioning [GOOD] >> KqpErrors::ProposeError [GOOD] >> KqpErrors::ProposeErrorEvWrite >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnAlteredTable |75.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsAndTags >> TSyncNeighborsTests::SerDes3 [GOOD] |75.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |75.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/ydbd/ydbd |75.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |75.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |75.6%| [LD] {RESULT} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut >> TConsoleConfigSubscriptionTests::TestNotificationForNewSubscription >> TSyncBrokerTests::ShouldReturnTokensWithSameVDiskId [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientDeadCausesSubscriptionDeregistration >> KqpErrors::ProposeResultLost_RwTx-UseSink >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalUnique [GOOD] >> TSyncBrokerTests::ShouldReturnToken [GOOD] >> TConsoleConfigTests::TestGetNodeItems >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberAfterUpdate >> TSyncNeighborsTests::SerDes1 [GOOD] |75.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydbd/ydbd |75.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |75.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |75.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_4_2 [GOOD] |75.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |75.6%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |75.6%| [LD] {RESULT} $(B)/ydb/apps/ydbd/ydbd |75.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncNeighborsTests::SerDes3 [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree >> TSyncBrokerTests::ShouldReleaseToken >> TConsoleConfigTests::TestGetNodeItems [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberAfterUpdate [GOOD] >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsTableClient [GOOD] >> TConsoleConfigTests::TestGetNodeConfig >> TSyncBrokerTests::ShouldReleaseToken [GOOD] >> TGRpcYdbTest::CreateAlterCopyAndDropTable [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberDuringUpdate >> TGRpcYdbTest::BeginTxRequestError >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberDuringUpdate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit [GOOD] Test command err: 2025-03-31T16:04:42.157679Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005251634318333:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:42.157751Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d5e/r3tmp/tmpVLcMoy/pdisk_1.dat 2025-03-31T16:04:42.846686Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20679, node 1 2025-03-31T16:04:43.053271Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:43.053375Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:43.078635Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:04:43.253551Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:43.253577Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:43.253585Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:43.253721Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4994 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:43.657476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:43.686528Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:04:47.161184Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005251634318333:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:47.161241Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d5e/r3tmp/tmp2EPOP3/pdisk_1.dat 2025-03-31T16:04:51.929764Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488005291939465361:2078];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:51.929979Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:04:52.254760Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:52.262860Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:52.263001Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:52.279397Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13366, node 4 2025-03-31T16:04:52.720324Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:52.720349Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:52.720357Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:52.720550Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15261 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:53.128899Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:56.634443Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7488005291939465361:2078];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:56.634507Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncNeighborsTests::SerDes1 [GOOD] Test command err: 2025-03-31T16:05:06.308840Z node 1 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [1:5:2052], token sent, active: 1, waiting: 0 2025-03-31T16:05:06.309011Z node 1 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [1:6:2053], token sent, active: 1, waiting: 0 >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientDeadCausesSubscriptionDeregistration [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientReconnectsOnConnectionLoose >> TConfigsDispatcherTests::TestRemoveSubscription >> TConsoleConfigSubscriptionTests::TestNotificationForNewSubscription [GOOD] >> TConfigsDispatcherTests::TestRemoveSubscription [GOOD] >> TConfigsDispatcherTests::TestRemoveSubscriptionWhileUpdateInProcess >> TConsoleConfigSubscriptionTests::TestNotificationForNewConfigItem ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::TestActiveSessionCountAfterTransportError [GOOD] Test command err: 2025-03-31T16:04:40.379453Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005243043981328:2141];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:40.379505Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d80/r3tmp/tmp1IIESy/pdisk_1.dat 2025-03-31T16:04:41.515225Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:41.659783Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:41.672214Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:41.672337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:41.677857Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23887, node 1 2025-03-31T16:04:42.031787Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:42.031808Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:42.031816Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:42.031945Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31818 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:42.698784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:45.383381Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005243043981328:2141];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:45.383457Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:47.589431Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005273108753480:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:47.589568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005273108753492:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:47.590178Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:47.591168Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005273108753498:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:47.595219Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005273108753505:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:47.595277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005273108753508:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:47.595326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:47.607478Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005273108753553:2366], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:47.607548Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:47.608701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005273108753556:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:47.615492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:04:47.624141Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005273108753572:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:47.624214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005273108753574:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:47.624262Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:47.645403Z node 1 :TX_PROXY ERROR: Actor# [1:7488005273108753510:2662] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T16:04:47.645520Z node 1 :TX_PROXY ERROR: Actor# [1:7488005273108753543:2674] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T16:04:47.646623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005273108753591:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:47.646681Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005273108753594:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:47.646732Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:47.672705Z node 1 :TX_PROXY ERROR: Actor# [1:7488005273108753559:2681] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T16:04:47.675396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005273108753616:2390], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:47.675459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005273108753618:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:47.675503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:47.688888Z node 1 :TX_PROXY ERROR: Actor# [1:7488005273108753583:2697] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T16:04:47.696160Z node 1 :TX_PROXY ERROR: Actor# [1:7488005273108753604:2707] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T16:04:47.697469Z node 1 :TX_PROXY ERROR: Actor# [1:7488005273108753636:2730] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T16:04:47.742293Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005273108753635:2394], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:04:47.742360Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005273108753503:2356], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:04:47.742397Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005273108753603:2386], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:04:47.742426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005273108753580:2374], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:04:47.742462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005273108753542:2361], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:04:47.742588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005273108753558:2369], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:04:47.742625Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005273108753506:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:04:47.837544Z node 1 :TX_PROXY ERROR: Actor# [1:7488005273108753707:2774] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:47.837699Z node 1 :TX_PROXY ERROR: Actor# [1:7488005273108753709:2776] txid# 281474976710666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:47.841532Z node 1 :TX_PROXY ERROR: Actor# [1:7488005273108753725:2789] txid# 281474976710668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:47.841707Z node 1 :TX_PROXY ERROR: Actor# [1:7488005273108753714:2780] txid# 281474976710667, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:47.841953Z node 1 :TX_PROXY ERROR: Actor# [1:7488005273108753726:2790] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:47.842081Z node 1 :TX_PROXY ERROR: Actor# [1:7488005273108753740:2800] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:47.856433Z node 1 :TX_PROXY ERROR: Actor# [1:7488005273108753758:2815] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:50.863762Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488005287414943802:2211];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:50.864125Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d80/r3tmp/tmpnS0ml4/pdisk_1.dat 2025-03-31T16:04:51.285964Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:51.365833Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:51.365947Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:51.380896Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4261, node 4 2025-03-31T16:04:51.559899Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:51.559928Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:51.559935Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:51.560096Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2807 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:52.137620Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:55.867270Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7488005287414943802:2211];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:55.867353Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:56.060623Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488005308889781216:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:56.060706Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:56.111044Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:04:57.763910Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488005317479716265:2528], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:57.764010Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:57.764495Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488005317479716270:2531], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:57.769017Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:04:57.821529Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488005317479716272:2532], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:04:57.898242Z node 4 :TX_PROXY ERROR: Actor# [4:7488005317479716380:2833] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:58.139696Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=N2M3OWUzOTItZGFhM2E2M2EtYzIyOWY0MDItMWM2MWU2ZGI=, ActorId: [4:7488005321774683742:2589], ActorState: ExecuteState, TraceId: 01jqpf5v4s9b69j73ng8a5q561, Create QueryResponse for error on request, msg: 2025-03-31T16:04:58.150380Z node 4 :KQP_SESSION ERROR: SessionId: ydb://session/3?node_id=4&id=N2M3OWUzOTItZGFhM2E2M2EtYzIyOWY0MDItMWM2MWU2ZGI=, ActorId: [4:7488005321774683742:2589], ActorState: ReadyState, Internal error, message: TKqpSessionActor in state ReadyState received unexpected event NKikimr::NGRpcService::TEvClientLost(0x108c0001) sender: [4:8320808721877066593:7169396] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncBrokerTests::ShouldReleaseToken [GOOD] Test command err: 2025-03-31T16:05:06.176556Z node 1 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [1:5:2052], token sent, active: 1, waiting: 0 2025-03-31T16:05:06.315323Z node 2 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [2:5:2052], token sent, active: 1, waiting: 0 2025-03-31T16:05:06.315436Z node 2 :BS_SYNCER DEBUG: TEvReleaseSyncToken, VDisk actor id: [0:1:1], actor id: [2:5:2052], token released, active: 1, waiting: 0 >> TConsoleConfigTests::TestGetNodeConfig [GOOD] >> TConsoleConfigTests::TestAutoOrder >> DataShardVolatile::UpsertNoLocksArbiterRestart-UseSink [GOOD] >> TConfigsDispatcherTests::TestRemoveSubscriptionWhileUpdateInProcess [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiterRestart+UseSink >> TConfigsDispatcherTests::TestEmptyChangeCausesNoNotification >> YdbScripting::BasicV0 [GOOD] >> YdbScripting::BasicV1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ResolveTableError [GOOD] Test command err: 2025-03-31T16:04:57.430343Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:57.440385Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001c6d/r3tmp/tmp59QBlh/pdisk_1.dat 2025-03-31T16:04:58.456632Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:58.774068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:04:58.930217Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:58.930346Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:58.960660Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:58.960800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:58.977783Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:04:58.979276Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:04:58.979737Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:04:59.395643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:05:01.554811Z node 1 :KQP_EXECUTER TRACE: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqpf5y1599005efceqq8tjcd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGI1ZTZhZTMtOTQ3NWRjZjItZGRjODdhYTQtYmM3ZDA3NTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Bootstrap done, become ReadyState 2025-03-31T16:05:01.568100Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1559:2949] TxId: 281474976715658. Ctx: { TraceId: 01jqpf5y1599005efceqq8tjcd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGI1ZTZhZTMtOTQ3NWRjZjItZGRjODdhYTQtYmM3ZDA3NTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Executing physical tx, type: 2, stages: 1 2025-03-31T16:05:01.568351Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1559:2949] TxId: 281474976715658. Ctx: { TraceId: 01jqpf5y1599005efceqq8tjcd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGI1ZTZhZTMtOTQ3NWRjZjItZGRjODdhYTQtYmM3ZDA3NTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Got request, become WaitResolveState 2025-03-31T16:05:01.568633Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-03-31T16:05:01.568920Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715658. Resolved key sets: 1 2025-03-31T16:05:01.569105Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715658. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-03-31T16:05:01.569261Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1559:2949] TxId: 281474976715658. Ctx: { TraceId: 01jqpf5y1599005efceqq8tjcd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGI1ZTZhZTMtOTQ3NWRjZjItZGRjODdhYTQtYmM3ZDA3NTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (Iterator (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3))))) )))) ) 2025-03-31T16:05:01.569433Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1559:2949] TxId: 281474976715658. Ctx: { TraceId: 01jqpf5y1599005efceqq8tjcd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGI1ZTZhZTMtOTQ3NWRjZjItZGRjODdhYTQtYmM3ZDA3NTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] create compute task: 1 2025-03-31T16:05:01.569623Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715658. Ctx: { TraceId: 01jqpf5y1599005efceqq8tjcd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGI1ZTZhZTMtOTQ3NWRjZjItZGRjODdhYTQtYmM3ZDA3NTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-03-31T16:05:01.569686Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715658. Ctx: { TraceId: 01jqpf5y1599005efceqq8tjcd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGI1ZTZhZTMtOTQ3NWRjZjItZGRjODdhYTQtYmM3ZDA3NTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-03-31T16:05:01.570154Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715658. Ctx: { TraceId: 01jqpf5y1599005efceqq8tjcd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGI1ZTZhZTMtOTQ3NWRjZjItZGRjODdhYTQtYmM3ZDA3NTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Collect channels updates for task: 1 at actor [1:1562:2949] 2025-03-31T16:05:01.570234Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715658. Ctx: { TraceId: 01jqpf5y1599005efceqq8tjcd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGI1ZTZhZTMtOTQ3NWRjZjItZGRjODdhYTQtYmM3ZDA3NTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Sending channels info to compute actor: [1:1562:2949], channels: 0 2025-03-31T16:05:01.570319Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1559:2949] TxId: 281474976715658. Ctx: { TraceId: 01jqpf5y1599005efceqq8tjcd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGI1ZTZhZTMtOTQ3NWRjZjItZGRjODdhYTQtYmM3ZDA3NTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Total tasks: 1, readonly: 0, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-03-31T16:05:01.570374Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1559:2949] TxId: 281474976715658. Ctx: { TraceId: 01jqpf5y1599005efceqq8tjcd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGI1ZTZhZTMtOTQ3NWRjZjItZGRjODdhYTQtYmM3ZDA3NTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Updating channels after the creation of compute actors 2025-03-31T16:05:01.570414Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715658. Ctx: { TraceId: 01jqpf5y1599005efceqq8tjcd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGI1ZTZhZTMtOTQ3NWRjZjItZGRjODdhYTQtYmM3ZDA3NTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Collect channels updates for task: 1 at actor [1:1562:2949] 2025-03-31T16:05:01.570459Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715658. Ctx: { TraceId: 01jqpf5y1599005efceqq8tjcd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGI1ZTZhZTMtOTQ3NWRjZjItZGRjODdhYTQtYmM3ZDA3NTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Sending channels info to compute actor: [1:1562:2949], channels: 0 2025-03-31T16:05:01.570536Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1559:2949] TxId: 281474976715658. Ctx: { TraceId: 01jqpf5y1599005efceqq8tjcd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGI1ZTZhZTMtOTQ3NWRjZjItZGRjODdhYTQtYmM3ZDA3NTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Waiting for: CA [1:1562:2949], 2025-03-31T16:05:01.570595Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1559:2949] TxId: 281474976715658. Ctx: { TraceId: 01jqpf5y1599005efceqq8tjcd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGI1ZTZhZTMtOTQ3NWRjZjItZGRjODdhYTQtYmM3ZDA3NTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:1562:2949], 2025-03-31T16:05:01.570663Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1559:2949] TxId: 281474976715658. Ctx: { TraceId: 01jqpf5y1599005efceqq8tjcd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGI1ZTZhZTMtOTQ3NWRjZjItZGRjODdhYTQtYmM3ZDA3NTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-03-31T16:05:01.637215Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1559:2949] TxId: 281474976715658. Ctx: { TraceId: 01jqpf5y1599005efceqq8tjcd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGI1ZTZhZTMtOTQ3NWRjZjItZGRjODdhYTQtYmM3ZDA3NTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: ExecuteState, got execution state from compute actor: [1:1562:2949], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-03-31T16:05:01.637352Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1559:2949] TxId: 281474976715658. Ctx: { TraceId: 01jqpf5y1599005efceqq8tjcd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGI1ZTZhZTMtOTQ3NWRjZjItZGRjODdhYTQtYmM3ZDA3NTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Waiting for: CA [1:1562:2949], 2025-03-31T16:05:01.637414Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1559:2949] TxId: 281474976715658. Ctx: { TraceId: 01jqpf5y1599005efceqq8tjcd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGI1ZTZhZTMtOTQ3NWRjZjItZGRjODdhYTQtYmM3ZDA3NTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:1562:2949], 2025-03-31T16:05:01.656031Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1559:2949] TxId: 281474976715658. Ctx: { TraceId: 01jqpf5y1599005efceqq8tjcd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGI1ZTZhZTMtOTQ3NWRjZjItZGRjODdhYTQtYmM3ZDA3NTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: ExecuteState, got execution state from compute actor: [1:1562:2949], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 8662 Tasks { TaskId: 1 CpuTimeUs: 1150 FinishTimeMs: 1743437101638 EgressBytes: 30 EgressRows: 3 ComputeCpuTimeUs: 51 BuildCpuTimeUs: 1099 HostName: "ghrun-tuhoqst4xe" NodeId: 1 CreateTimeMs: 1743437101571 } MaxMemoryUsage: 1048576 } 2025-03-31T16:05:01.656217Z node 1 :KQP_EXECUTER INFO: TxId: 281474976715658. Ctx: { TraceId: 01jqpf5y1599005efceqq8tjcd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGI1ZTZhZTMtOTQ3NWRjZjItZGRjODdhYTQtYmM3ZDA3NTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Compute actor has finished execution: [1:1562:2949] 2025-03-31T16:05:01.656377Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1559:2949] TxId: 281474976715658. Ctx: { TraceId: 01jqpf5y1599005efceqq8tjcd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGI1ZTZhZTMtOTQ3NWRjZjItZGRjODdhYTQtYmM3ZDA3NTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Send Commit to BufferActor=[1:1558:2949] 2025-03-31T16:05:01.656472Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1559:2949] TxId: 281474976715658. Ctx: { TraceId: 01jqpf5y1599005efceqq8tjcd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGI1ZTZhZTMtOTQ3NWRjZjItZGRjODdhYTQtYmM3ZDA3NTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Resource usage for last stat interval: ComputeTime: 0.008662s ReadRows: 0 ReadBytes: 0 ru: 5 rate limiter was not found force flag: 1 2025-03-31T16:05:01.739896Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1559:2949] TxId: 281474976715658. Ctx: { TraceId: 01jqpf5y1599005efceqq8tjcd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGI1ZTZhZTMtOTQ3NWRjZjItZGRjODdhYTQtYmM3ZDA3NTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Full stats: { CpuTimeUs: 99361 DurationUs: 1743437100771794 Tables { TablePath: "/Root/table-1" WriteRows: 3 WriteBytes: 24 AffectedPartitions: 1 } ExecuterCpuTimeUs: 90699 StartTimeMs: 966 FinishTimeMs: 1743437101737 Stages { StageGuid: "7be10978-bdeb201f-7812caaa-84e89618" Program: "(\n(return (lambda \'() (block \'(\n (let $1 (Just (Uint32 \'1)))\n (let $2 (Just (Uint32 \'2)))\n (let $3 (Just (Uint32 \'3)))\n (return (Iterator (AsList (AsStruct \'(\'\"key\" $1) \'(\'\"value\" $1)) (AsStruct \'(\'\"key\" $2) \'(\'\"value\" $2)) (AsStruct \'(\'\"key\" $3) \'(\'\"value\" $3)))))\n))))\n)\n" ComputeActors { CpuTimeUs: 8662 Tasks { TaskId: 1 CpuTimeUs: 1150 FinishTimeMs: 1743437101638 EgressBytes: 30 EgressRows: 3 ComputeCpuTimeUs: 51 BuildCpuTimeUs: 1099 HostName: "ghrun-tuhoqst4xe" NodeId: 1 CreateTimeMs: 1743437101571 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743437101638 } TxPlansWithStats: "{\"Node Type\":\"Phase\",\"PlanNodeId\":3,\"Plans\":[{\"Node Type\":\"Sink\",\"PlanNodeId\":2,\"Plans\":[{\"Node Type\":\"ConstantExpr-Sink\",\"Operators\":[{\"Inputs\":[],\"Iterator\":\"[{key: 1,value: 1},{key: 2,value: 2},{key: 3,value: 3}]\",\"Name\":\"Iterator\"},{\"Inputs\":[],\"Name\":\"Upsert\",\"Path\":\"\\/Root\\/table-1\",\"SinkType\":\"KqpTableSink\",\"Table\":\"table-1\"}],\"PlanNodeId\":1,\"StageGuid\":\"7be10978-bdeb201f-7812caaa-84e89618\",\"Stats\":{\"BaseTimeMs\":1743437101638,\"ComputeNodes\":[{\"CpuTimeUs\":8662,\"Tasks\":[{\"ComputeTimeUs\":51,\"EgressBytes\":30,\"EgressRows\":3,\"FinishTimeMs\":1743437101638,\"Host\":\"ghrun-tuhoqst4xe\",\"NodeId\":1,\"TaskId\":1}]}],\"FinishedTasks\":0,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"},\"Tables\":[\"table-1\"]}],\"StageGuid\":\"\"}],\"StageGuid\":\"\"}" StatConvertBytes: 759 Extra { type_url: "type.googleapis.com/NKqpProto.TKqpExecutionExtraStats" value: "\010\001\022\013\010\326C\020\326C\030\326C \001" } } 2025-03-31T16:05:01.740434Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1559:2949] TxId: 281474976715658. Ctx: { TraceId: 01jqpf5y1599005efceqq8tjcd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGI1ZTZhZTMtOTQ3NWRjZjItZGRjODdhYTQtYmM3ZDA3NTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. terminate execution. 2025-03-31T16:05:01.763381Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1559:2949] TxId: 281474976715658. Ctx: { TraceId: 01jqpf5y1599005efceqq8tjcd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGI1ZTZhZTMtOTQ3NWRjZjItZGRjODdhYTQtYmM3ZDA3NTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Terminate, become ZombieState 2025-03-31T16:05:02.075907Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:1580:2968], status: UNAVAILABLE, issues:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[/Root/table-1]
: Error: LookupError, code: 2005 2025-03-31T16:05:02.077949Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTM0ODU5MGYtZGUxOWU1YWQtZTYwY2Y0NDAtYzMwZjE3M2I=, ActorId: [1:1578:2966], ActorState: ExecuteState, TraceId: 01jqpf5yq43fermh3bg4nd0w1r, ReplyQueryCompileError, status UNAVAILABLE remove tx with tx_id: ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::CreateTableWithUniformPartitionsAndAutoPartitioning [GOOD] Test command err: 2025-03-31T16:04:35.720823Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005223272862814:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:35.720874Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000df4/r3tmp/tmpGkIVfC/pdisk_1.dat 2025-03-31T16:04:36.851904Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:36.915568Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:36.934803Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:36.955258Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:36.989578Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65084, node 1 2025-03-31T16:04:37.503175Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:37.503204Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:37.503212Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:37.503331Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14617 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:38.021768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:40.710773Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005223272862814:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:40.710873Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:42.668956Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488005251669506728:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:42.669238Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000df4/r3tmp/tmplsgxSg/pdisk_1.dat 2025-03-31T16:04:43.089977Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:43.126802Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:43.127045Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:43.132713Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63510, node 4 2025-03-31T16:04:43.268208Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:43.268237Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:43.268245Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:43.268411Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21255 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:43.666600Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:46.258384Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:04:46.600599Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037889 not found 2025-03-31T16:04:46.744599Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037888 not found 2025-03-31T16:04:49.128140Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488005281036261676:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:49.128208Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000df4/r3tmp/tmpFFVAV7/pdisk_1.dat 2025-03-31T16:04:49.445747Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:49.521145Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:49.521239Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:49.524729Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4522, node 7 2025-03-31T16:04:49.733150Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:49.733173Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:49.733185Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:49.733310Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28760 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:50.036671Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:53.905800Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:04:54.135277Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7488005281036261676:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:54.135362Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:56.665112Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488005310641663249:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:56.665743Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000df4/r3tmp/tmpTafNaa/pdisk_1.dat 2025-03-31T16:04:57.269846Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:57.333920Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:57.334028Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:57.338299Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20653, node 10 2025-03-31T16:04:57.659783Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:57.659811Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:57.659822Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:57.659997Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1446 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:57.946496Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:05:01.628189Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7488005310641663249:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:05:01.628276Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:05:03.627135Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 >> YdbS3Internal::BadRequests [GOOD] >> KqpPg::InsertNoTargetColumns_Serial+useSink [GOOD] >> TConfigsDispatcherTests::TestEmptyChangeCausesNoNotification [GOOD] >> YdbYqlClient::CheckDefaultTableSettings2 [GOOD] >> TConfigsDispatcherTests::TestYamlAndNonYamlCoexist >> YdbYqlClient::CheckDefaultTableSettings3 |75.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientReconnectsOnConnectionLoose [GOOD] >> TConsoleConfigTests::TestAutoOrder [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithKnownConfig >> TConsoleConfigTests::TestAutoSplit >> TGRpcYdbTest::ReadTablePg [GOOD] >> TConsoleTests::TestRestartConsoleAndPoolsExtSubdomain >> TConsoleConfigTests::TestAutoSplit [GOOD] >> GroupWriteTest::WithRead >> YdbOlapStore::LogLast50 [GOOD] >> TConfigsDispatcherTests::TestYamlAndNonYamlCoexist [GOOD] |75.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |75.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest |75.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |75.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |75.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |75.7%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |75.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithKnownConfig [GOOD] >> TNetClassifierTest::TestInitFromRemoteSource >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors [GOOD] >> TYqlDecimalTests::SimpleUpsertSelect [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForNewConfigItem [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobal [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnAlteredTable [GOOD] >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [FAIL] >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalAsync >> test_sql_streaming.py::test[suites-GroupByHop-default.txt] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataWithFilter-default.txt] >> TGRpcYdbTest::BeginTxRequestError [GOOD] >> YdbScripting::BasicV1 [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree [GOOD] >> KqpPg::TableInsert+useSink [GOOD] >> YdbScripting::MultiResults >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_AuthNotRequired >> YdbYqlClient::CheckDefaultTableSettings3 [GOOD] >> KqpErrors::ProposeResultLost_RwTx-UseSink [GOOD] >> TSubscriberCombinationsTest::CombinationsMigratedPath [GOOD] >> KqpErrors::ProposeErrorEvWrite [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiterRestart+UseSink [GOOD] >> Viewer::Cluster10000Tablets [GOOD] >> TPersQueueTest::DisableDeduplication [GOOD] >> YdbOlapStore::LogLast50ByResource >> TConfigsDispatcherTests::TestYamlEndToEnd >> TYqlDecimalTests::NegativeValues >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPending >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItem >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterDroppingIndex >> TConsoleTests::TestRestartConsoleAndPoolsExtSubdomain [GOOD] >> IncrementalBackup::ComplexRestoreBackupCollection-WithIncremental >> TConsoleTests::TestSetDefaultStorageUnitsQuota >> KqpPg::TableInsert-useSink >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_AuthNotRequired [GOOD] >> Viewer::FuzzySearcherLimit1OutOf4 [GOOD] >> Viewer::FuzzySearcherLimit2OutOf4 [GOOD] >> Viewer::ExecuteQueryDoesntExecuteSchemeOperationsInsideTransation >> DataShardVolatile::UpsertBrokenLockArbiterRestart-UseSink >> TConsoleConfigTests::TestValidation >> TConfigsDispatcherTests::TestYamlEndToEnd [GOOD] >> TConsoleConfigHelpersTests::TestConfigCourier >> GroupWriteTest::WithRead [GOOD] >> TNetClassifierTest::TestInitFromRemoteSource [GOOD] >> TYqlDecimalTests::NegativeValues [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPending [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItem [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalAsync [GOOD] >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterDroppingIndex [GOOD] >> IncrementalBackup::ComplexRestoreBackupCollection-WithIncremental [GOOD] >> YdbScripting::MultiResults [GOOD] >> Viewer::ExecuteQueryDoesntExecuteSchemeOperationsInsideTransation [GOOD] >> Viewer::FloatPointJsonQuery >> TConsoleConfigHelpersTests::TestConfigCourier [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriber >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalUnique [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPendingExtSubdomain >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree >> TConsoleTests::TestSetDefaultStorageUnitsQuota [GOOD] |75.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |75.7%| [TA] {RESULT} $(B)/ydb/core/mind/hive/ut/test-results/unittest/{meta.json ... results_accumulator.log} |75.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |75.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsTableClient [GOOD] Test command err: 2025-03-31T16:04:38.627570Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005234644286975:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:38.627790Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d8f/r3tmp/tmpUKZZbI/pdisk_1.dat 2025-03-31T16:04:39.733987Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:39.755222Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:39.765776Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:39.765904Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:39.788507Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3859, node 1 2025-03-31T16:04:40.314769Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:40.314804Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:40.314811Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:40.314929Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22033 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:41.002596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:43.676356Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005234644286975:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:43.676423Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:47.791783Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5b4v2ngyk654amfnvftr", Request deadline has expired for 1.698654s seconds 2025-03-31T16:04:47.791885Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5b6b3y8wnp7sa0vrkbda", Request deadline has expired for 1.708795s seconds 2025-03-31T16:04:47.791926Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5bgq88xfb7tnf9ragg72", Request deadline has expired for 1.598834s seconds 2025-03-31T16:04:47.791979Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5bgw6036wtvhg77mgmaw", Request deadline has expired for 1.568883s seconds 2025-03-31T16:04:47.792008Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5bgw0navpfk37e48npba", Request deadline has expired for 1.708920s seconds 2025-03-31T16:04:47.792045Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5bgwbtrp7hd6m562f3yn", Request deadline has expired for 1.598955s seconds 2025-03-31T16:04:47.792095Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5bgx7xrv7xjwhvvzx3qv", Request deadline has expired for 2.109007s seconds 2025-03-31T16:04:47.792123Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5bj5bwyzttksf88rd8vz", Request deadline has expired for 1.849033s seconds 2025-03-31T16:04:47.792145Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5bjv6h4gjjsnjz8k80fy", Request deadline has expired for 2.049058s seconds 2025-03-31T16:04:47.792175Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5bk74rgq8rxpd0bywxy6", Request deadline has expired for 1.849086s seconds 2025-03-31T16:04:47.792206Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5bk78y6mjmj0hxr023jj", Request deadline has expired for 1.849119s seconds 2025-03-31T16:04:47.792253Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5bk7fe76z28epxnkrp7m", Request deadline has expired for 1.699162s seconds 2025-03-31T16:04:47.792276Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5bk789y4pjrekwffh5zj", Request deadline has expired for 1.849189s seconds 2025-03-31T16:04:47.792317Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5bk7871h0egcwkf65pf2", Request deadline has expired for 2.049227s seconds 2025-03-31T16:04:47.792355Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5bk85nm7qheqhqcamarw", Request deadline has expired for 1.379267s seconds 2025-03-31T16:04:47.792387Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5bk83fa966x4v4xyv6bw", Request deadline has expired for 1.379293s seconds 2025-03-31T16:04:47.792409Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5bk80y2004gf1bh39per", Request deadline has expired for 2.109322s seconds 2025-03-31T16:04:47.792437Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5bk878btd6hw8nm3n1rw", Request deadline has expired for 2.109349s seconds 2025-03-31T16:04:47.792465Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5bk8anrbme1qfhq0zvnv", Request deadline has expired for 2.029377s seconds 2025-03-31T16:04:47.792494Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5bk89rrwvtc8nyaskem2", Request deadline has expired for 2.109404s seconds 2025-03-31T16:04:47.792517Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5bk77na3rpnebtd29xry", Request deadline has expired for 1.569430s seconds 2025-03-31T16:04:47.792541Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5bk7f760tj9tw9xcef4x", Request deadline has expired for 1.849454s seconds 2025-03-31T16:04:47.792577Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5bk7eghef9jfw4dwbcej", Request deadline has expired for 1.849490s seconds 2025-03-31T16:04:47.792608Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5bk724dk385aq58c2smh", Request deadline has expired for 1.699517s seconds 2025-03-31T16:04:47.792633Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5bk784y6xedvk2pd3qys", Request deadline has expired for 1.849546s seconds 2025-03-31T16:04:47.792660Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5bk75rf382zmn5j5ymn8", Request deadline has expired for 1.699571s seconds 2025-03-31T16:04:47.792690Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5bk78n63ycn6xcxgcc32", Request deadline has expired for 2.049601s seconds 2025-03-31T16:04:47.792723Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5bk7es5wkjgfcvhvkyvd", Request deadline has expired for 1.709631s seconds 2025-03-31T16:04:47.792760Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5bk702dymhmp65wj27rn", Request deadline has expired for 1.849672s seconds 2025-03-31T16:04:47.792789Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5bk8etywehpmq5vkyvw6", Request deadline has expired for 1.849701s seconds 2025-03-31T16:04:47.792851Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5bk8dtwcznb88rw9ga6w", Request deadline has expired for 1.569761s seconds 2025-03-31T16:04:47.792881Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5bmraryfjrnnhz8ytres", Request deadline has expired for 1.301791s seconds 2025-03-31T16:04:47.792907Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5bn7227gytkchc4e9ev9", Request deadline has expired for 1.361819s seconds 2025-03-31T16:04:47.792937Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5bn727kxzwyaz77rk16d", Request deadline has expired for 1.361848s seconds 2025-03-31T16:04:47.792966Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5bn74y5ycykcvbjpvdy6", Request deadline has expired for 1.361878s seconds 2025-03-31T16:04:47.792994Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5bn76vwjw154fd67nn27", Request deadline has expired for 1.621902s seconds 2025-03-31T16:04:47.793021Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5bn7azyz7apz5jecv39n", Request deadline has expired for 1.361932s seconds 2025-03-31T16:04:47.793050Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5bn7dx4b8jh8n67ffy3e", Request deadline has expired for 0.821958s seconds 2025-03-31T16:04:47.793081Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5bn72ah6b2yd3q5787qg", Request deadline has expired for 1.361992s seconds 2025-03-31T16:04:47.793115Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5bnb7qgvj4q6dpyk9xtw", Request deadline has expired for 1.362023s seconds 2025-03-31T16:04:47.793142Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5bqz5s5nq223q444n3x7", Request deadline has expired for 0.772053s seconds 2025-03-31T16:04:47.793168Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5bqzermm38ey7r7me9zc", Request deadline has expired for 0.882080s seconds 2025-03-31T16:04:47.793197Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5bqz96xyf1x14bvhe61d", Request deadline has expired for 0.681109s seconds 2025-03-31T16:04:47.793223Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5bqv92msf54dbrqpmfjv", Request deadline has expired for 0.772133s seconds 2025-03-31T16:04:47.793247Z node 1 :KQP_PROXY ERROR: TraceId: "01jqpf5bs37svm9xhft4bmc7a6", Request deadline has expired for 0.644160s seconds 2025-03-31T16:04:48.475968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005277593961189:2543], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:48.476086Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005277593961290:2596], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:48.476188Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005277593961311:2603], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:48.476224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005277593961308:2600], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:48.476262Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005277593961313:2605], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:48.476379Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005277593961289:2595], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:48.476404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005277593961309:2601], DatabaseId: /Root, PoolId: default, Failed to fetch pool info ... uestId: 430 2025-03-31T16:05:06.468773Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YWQ3ODFmM2MtZTI1Nzg4ODMtNWRkYTdkMDItYTc1YmNiYTM=, ActorId: [4:7488005328814860683:2360], ActorState: ExecuteState, TraceId: 01jqpf636t8e8dg2mhzcvzrh43, Reply query error, msg: Pending previous query completion proxyRequestId: 447 2025-03-31T16:05:06.468796Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YWQ3ODFmM2MtZTI1Nzg4ODMtNWRkYTdkMDItYTc1YmNiYTM=, ActorId: [4:7488005328814860683:2360], ActorState: ExecuteState, TraceId: 01jqpf636t8e8dg2mhzcvzrh43, Reply query error, msg: Pending previous query completion proxyRequestId: 464 2025-03-31T16:05:06.468822Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OGY5MjdkZjItYjlmYmM4YjEtYmUzNTgyMmUtNmM0ZjBhZGQ=, ActorId: [4:7488005328814860674:2352], ActorState: ExecuteState, TraceId: 01jqpf636g18qed51taqtxd1k7, Reply query error, msg: Pending previous query completion proxyRequestId: 444 2025-03-31T16:05:06.468885Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OGY5MjdkZjItYjlmYmM4YjEtYmUzNTgyMmUtNmM0ZjBhZGQ=, ActorId: [4:7488005328814860674:2352], ActorState: ExecuteState, TraceId: 01jqpf636g18qed51taqtxd1k7, Reply query error, msg: Pending previous query completion proxyRequestId: 450 2025-03-31T16:05:06.468975Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OGY5MjdkZjItYjlmYmM4YjEtYmUzNTgyMmUtNmM0ZjBhZGQ=, ActorId: [4:7488005328814860674:2352], ActorState: ExecuteState, TraceId: 01jqpf636g18qed51taqtxd1k7, Reply query error, msg: Pending previous query completion proxyRequestId: 467 2025-03-31T16:05:06.469010Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OGY5MjdkZjItYjlmYmM4YjEtYmUzNTgyMmUtNmM0ZjBhZGQ=, ActorId: [4:7488005328814860674:2352], ActorState: ExecuteState, TraceId: 01jqpf636g18qed51taqtxd1k7, Reply query error, msg: Pending previous query completion proxyRequestId: 471 2025-03-31T16:05:06.469063Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OGY5MjdkZjItYjlmYmM4YjEtYmUzNTgyMmUtNmM0ZjBhZGQ=, ActorId: [4:7488005328814860674:2352], ActorState: ExecuteState, TraceId: 01jqpf636g18qed51taqtxd1k7, Reply query error, msg: Pending previous query completion proxyRequestId: 475 2025-03-31T16:05:06.469094Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OGY5MjdkZjItYjlmYmM4YjEtYmUzNTgyMmUtNmM0ZjBhZGQ=, ActorId: [4:7488005328814860674:2352], ActorState: ExecuteState, TraceId: 01jqpf636g18qed51taqtxd1k7, Reply query error, msg: Pending previous query completion proxyRequestId: 476 2025-03-31T16:05:06.469172Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OGY5MjdkZjItYjlmYmM4YjEtYmUzNTgyMmUtNmM0ZjBhZGQ=, ActorId: [4:7488005328814860674:2352], ActorState: ExecuteState, TraceId: 01jqpf636g18qed51taqtxd1k7, Reply query error, msg: Pending previous query completion proxyRequestId: 480 2025-03-31T16:05:06.469364Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NTQyNDJiNTctYjk3YWU0N2EtZjYyZmU0ZGYtMjdiN2EzMTc=, ActorId: [4:7488005328814860677:2355], ActorState: ExecuteState, TraceId: 01jqpf63755d95h3q9kt0mma0b, Reply query error, msg: Pending previous query completion proxyRequestId: 425 2025-03-31T16:05:06.474626Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NTQyNDJiNTctYjk3YWU0N2EtZjYyZmU0ZGYtMjdiN2EzMTc=, ActorId: [4:7488005328814860677:2355], ActorState: ExecuteState, TraceId: 01jqpf63755d95h3q9kt0mma0b, Reply query error, msg: Pending previous query completion proxyRequestId: 488 2025-03-31T16:05:06.474757Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ODNmMjYyYjItNjQyOWRlMWEtMTY0ODEzMjEtZmM2YWI4MDU=, ActorId: [4:7488005328814860682:2359], ActorState: ExecuteState, TraceId: 01jqpf63730davs8js80dhe1ba, Reply query error, msg: Pending previous query completion proxyRequestId: 489 2025-03-31T16:05:06.474792Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ODNmMjYyYjItNjQyOWRlMWEtMTY0ODEzMjEtZmM2YWI4MDU=, ActorId: [4:7488005328814860682:2359], ActorState: ExecuteState, TraceId: 01jqpf63730davs8js80dhe1ba, Reply query error, msg: Pending previous query completion proxyRequestId: 498 2025-03-31T16:05:06.474838Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YWQ3ODFmM2MtZTI1Nzg4ODMtNWRkYTdkMDItYTc1YmNiYTM=, ActorId: [4:7488005328814860683:2360], ActorState: ExecuteState, TraceId: 01jqpf636t8e8dg2mhzcvzrh43, Reply query error, msg: Pending previous query completion proxyRequestId: 490 2025-03-31T16:05:06.474875Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YWQ3ODFmM2MtZTI1Nzg4ODMtNWRkYTdkMDItYTc1YmNiYTM=, ActorId: [4:7488005328814860683:2360], ActorState: ExecuteState, TraceId: 01jqpf636t8e8dg2mhzcvzrh43, Reply query error, msg: Pending previous query completion proxyRequestId: 495 2025-03-31T16:05:06.474907Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YWQ3ODFmM2MtZTI1Nzg4ODMtNWRkYTdkMDItYTc1YmNiYTM=, ActorId: [4:7488005328814860683:2360], ActorState: ExecuteState, TraceId: 01jqpf636t8e8dg2mhzcvzrh43, Reply query error, msg: Pending previous query completion proxyRequestId: 505 2025-03-31T16:05:06.474995Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YWQ3ODFmM2MtZTI1Nzg4ODMtNWRkYTdkMDItYTc1YmNiYTM=, ActorId: [4:7488005328814860683:2360], ActorState: ExecuteState, TraceId: 01jqpf636t8e8dg2mhzcvzrh43, Reply query error, msg: Pending previous query completion proxyRequestId: 507 2025-03-31T16:05:06.475035Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OGY5MjdkZjItYjlmYmM4YjEtYmUzNTgyMmUtNmM0ZjBhZGQ=, ActorId: [4:7488005328814860674:2352], ActorState: ExecuteState, TraceId: 01jqpf636g18qed51taqtxd1k7, Reply query error, msg: Pending previous query completion proxyRequestId: 499 2025-03-31T16:05:06.475067Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YmNlOGQ1YjItZmE1Y2U0YjMtZmMwZDg1NDEtOWM0Y2ViYTg=, ActorId: [4:7488005328814860679:2357], ActorState: ExecuteState, TraceId: 01jqpf637003bdb7fydr4era6y, Reply query error, msg: Pending previous query completion proxyRequestId: 491 2025-03-31T16:05:06.475113Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OGY5MjdkZjItYjlmYmM4YjEtYmUzNTgyMmUtNmM0ZjBhZGQ=, ActorId: [4:7488005328814860674:2352], ActorState: ExecuteState, TraceId: 01jqpf636g18qed51taqtxd1k7, Reply query error, msg: Pending previous query completion proxyRequestId: 501 2025-03-31T16:05:06.475147Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OGY5MjdkZjItYjlmYmM4YjEtYmUzNTgyMmUtNmM0ZjBhZGQ=, ActorId: [4:7488005328814860674:2352], ActorState: ExecuteState, TraceId: 01jqpf636g18qed51taqtxd1k7, Reply query error, msg: Pending previous query completion proxyRequestId: 502 2025-03-31T16:05:06.475327Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YmNlOGQ1YjItZmE1Y2U0YjMtZmMwZDg1NDEtOWM0Y2ViYTg=, ActorId: [4:7488005328814860679:2357], ActorState: ExecuteState, TraceId: 01jqpf637003bdb7fydr4era6y, Reply query error, msg: Pending previous query completion proxyRequestId: 493 2025-03-31T16:05:06.475370Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YmNlOGQ1YjItZmE1Y2U0YjMtZmMwZDg1NDEtOWM0Y2ViYTg=, ActorId: [4:7488005328814860679:2357], ActorState: ExecuteState, TraceId: 01jqpf637003bdb7fydr4era6y, Reply query error, msg: Pending previous query completion proxyRequestId: 503 2025-03-31T16:05:06.476389Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NDZlZjUyMzktYTI3YTVlODItZDUzZjJkMzUtY2JjMmFhNmY=, ActorId: [4:7488005328814860646:2351], ActorState: ExecuteState, TraceId: 01jqpf637344v2pk1vgqx7tjjt, Reply query error, msg: Pending previous query completion proxyRequestId: 494 2025-03-31T16:05:06.476447Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NDZlZjUyMzktYTI3YTVlODItZDUzZjJkMzUtY2JjMmFhNmY=, ActorId: [4:7488005328814860646:2351], ActorState: ExecuteState, TraceId: 01jqpf637344v2pk1vgqx7tjjt, Reply query error, msg: Pending previous query completion proxyRequestId: 500 2025-03-31T16:05:06.477808Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZTkxMjliOGQtZTRjOWJjZTMtYmY3ZmEwNi1lZDgxYzk4Nw==, ActorId: [4:7488005328814860645:2350], ActorState: ExecuteState, TraceId: 01jqpf637442r4bw3ervzp7baw, Reply query error, msg: Pending previous query completion proxyRequestId: 485 2025-03-31T16:05:06.477859Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZTkxMjliOGQtZTRjOWJjZTMtYmY3ZmEwNi1lZDgxYzk4Nw==, ActorId: [4:7488005328814860645:2350], ActorState: ExecuteState, TraceId: 01jqpf637442r4bw3ervzp7baw, Reply query error, msg: Pending previous query completion proxyRequestId: 497 2025-03-31T16:05:06.477940Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NDY4OWY5OGMtMmYyYzAxZmMtNWEzYjNjNDEtM2FjMjU0MzE=, ActorId: [4:7488005328814860676:2354], ActorState: ExecuteState, TraceId: 01jqpf636p33qs7qxj6ps0p5ks, Reply query error, msg: Pending previous query completion proxyRequestId: 486 2025-03-31T16:05:06.477982Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NDY4OWY5OGMtMmYyYzAxZmMtNWEzYjNjNDEtM2FjMjU0MzE=, ActorId: [4:7488005328814860676:2354], ActorState: ExecuteState, TraceId: 01jqpf636p33qs7qxj6ps0p5ks, Reply query error, msg: Pending previous query completion proxyRequestId: 492 2025-03-31T16:05:06.478015Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NDY4OWY5OGMtMmYyYzAxZmMtNWEzYjNjNDEtM2FjMjU0MzE=, ActorId: [4:7488005328814860676:2354], ActorState: ExecuteState, TraceId: 01jqpf636p33qs7qxj6ps0p5ks, Reply query error, msg: Pending previous query completion proxyRequestId: 504 2025-03-31T16:05:06.478048Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NDY4OWY5OGMtMmYyYzAxZmMtNWEzYjNjNDEtM2FjMjU0MzE=, ActorId: [4:7488005328814860676:2354], ActorState: ExecuteState, TraceId: 01jqpf636p33qs7qxj6ps0p5ks, Reply query error, msg: Pending previous query completion proxyRequestId: 509 2025-03-31T16:05:06.478488Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZjIyNjBhZDUtZGI1NGQ0ZDQtNzI1ZDQ4YTUtNDlhNGU1ZDQ=, ActorId: [4:7488005328814860684:2361], ActorState: ExecuteState, TraceId: 01jqpf636ebd0m01xvg9fw6pky, Reply query error, msg: Pending previous query completion proxyRequestId: 487 2025-03-31T16:05:06.478527Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZjIyNjBhZDUtZGI1NGQ0ZDQtNzI1ZDQ4YTUtNDlhNGU1ZDQ=, ActorId: [4:7488005328814860684:2361], ActorState: ExecuteState, TraceId: 01jqpf636ebd0m01xvg9fw6pky, Reply query error, msg: Pending previous query completion proxyRequestId: 496 2025-03-31T16:05:06.478560Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZjIyNjBhZDUtZGI1NGQ0ZDQtNzI1ZDQ4YTUtNDlhNGU1ZDQ=, ActorId: [4:7488005328814860684:2361], ActorState: ExecuteState, TraceId: 01jqpf636ebd0m01xvg9fw6pky, Reply query error, msg: Pending previous query completion proxyRequestId: 506 2025-03-31T16:05:06.478615Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZjIyNjBhZDUtZGI1NGQ0ZDQtNzI1ZDQ4YTUtNDlhNGU1ZDQ=, ActorId: [4:7488005328814860684:2361], ActorState: ExecuteState, TraceId: 01jqpf636ebd0m01xvg9fw6pky, Reply query error, msg: Pending previous query completion proxyRequestId: 508 2025-03-31T16:05:06.478644Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZjIyNjBhZDUtZGI1NGQ0ZDQtNzI1ZDQ4YTUtNDlhNGU1ZDQ=, ActorId: [4:7488005328814860684:2361], ActorState: ExecuteState, TraceId: 01jqpf636ebd0m01xvg9fw6pky, Reply query error, msg: Pending previous query completion proxyRequestId: 510 2025-03-31T16:05:06.478673Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZjIyNjBhZDUtZGI1NGQ0ZDQtNzI1ZDQ4YTUtNDlhNGU1ZDQ=, ActorId: [4:7488005328814860684:2361], ActorState: ExecuteState, TraceId: 01jqpf636ebd0m01xvg9fw6pky, Reply query error, msg: Pending previous query completion proxyRequestId: 511 >> TConsoleConfigTests::TestValidation [GOOD] >> TConsoleConfigTests::TestCheckConfigUpdates >> TConsoleConfigHelpersTests::TestConfigSubscriber [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItemScope >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterCancelIndexBuild >> TConsoleConfigTests::TestCheckConfigUpdates [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPendingExtSubdomain [GOOD] >> TConsoleTests::TestSetDefaultComputationalUnitsQuota >> TConsoleTests::TestSetDefaultComputationalUnitsQuota [GOOD] >> TConsoleTests::TestTenantConfigConsistency >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantTenant >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterCancelIndexBuild [GOOD] >> TConsoleConfigTests::TestManageValidators >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunning >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantTenant [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItemScope [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnSplitMerge >> TConsoleConfigTests::TestManageValidators [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors [GOOD] Test command err: 2025-03-31T16:04:42.423521Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005253761901076:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:42.423571Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d72/r3tmp/tmpl2VT9P/pdisk_1.dat 2025-03-31T16:04:43.471970Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:43.583666Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:43.668123Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:43.668246Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:43.691253Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21460, node 1 2025-03-31T16:04:44.223751Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:44.223773Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:44.223797Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:44.223913Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20856 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:45.122470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:47.427356Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005253761901076:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:47.427440Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:59.560673Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488005323745127066:2184];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:59.560728Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d72/r3tmp/tmpyPTxBO/pdisk_1.dat 2025-03-31T16:04:59.816437Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:59.862454Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:59.862694Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:59.874019Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13080, node 4 2025-03-31T16:05:00.188021Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:05:00.188045Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:05:00.188053Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:05:00.188203Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24174 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:05:00.760720Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:05:04.564074Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7488005323745127066:2184];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:05:04.564160Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberCombinationsTest::CombinationsMigratedPath [GOOD] Test command err: =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 2025-03-31T16:03:34.953598Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:34:2065] 2025-03-31T16:03:34.953678Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Successful handshake: owner# 800, generation# 1 2025-03-31T16:03:34.953855Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:34:2065] 2025-03-31T16:03:34.953889Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Commit generation: owner# 800, generation# 1 2025-03-31T16:03:34.953977Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:35:2066] 2025-03-31T16:03:34.954016Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 800, generation# 1 2025-03-31T16:03:34.954288Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:35:2066] 2025-03-31T16:03:34.954322Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Commit generation: owner# 800, generation# 1 2025-03-31T16:03:34.954417Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/tenant] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-31T16:03:34.954851Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [1:41:2068] 2025-03-31T16:03:34.954891Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Upsert description: path# /root/tenant 2025-03-31T16:03:34.955012Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Subscribe: subscriber# [1:41:2068], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2025-03-31T16:03:34.955291Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [1:42:2068] 2025-03-31T16:03:34.955335Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# /root/tenant 2025-03-31T16:03:34.955408Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:42:2068], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2025-03-31T16:03:34.955551Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [1:43:2068] 2025-03-31T16:03:34.955576Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:9:2056] Upsert description: path# /root/tenant 2025-03-31T16:03:34.955612Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:9:2056] Subscribe: subscriber# [1:43:2068], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2025-03-31T16:03:34.955673Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:3:2050] 2025-03-31T16:03:34.955722Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:41:2068] 2025-03-31T16:03:34.955764Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:42:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:6:2053] 2025-03-31T16:03:34.955799Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:42:2068] 2025-03-31T16:03:34.955849Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:43:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:9:2056] 2025-03-31T16:03:34.955882Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:43:2068] 2025-03-31T16:03:34.955949Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:38:2068] 2025-03-31T16:03:34.956033Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:39:2068] 2025-03-31T16:03:34.956074Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:37:2068][/root/tenant] Set up state: owner# [1:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:03:34.956123Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:40:2068] 2025-03-31T16:03:34.956177Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:37:2068][/root/tenant] Ignore empty state: owner# [1:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== !argsLeft.IsDeletion 2025-03-31T16:03:34.956400Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:34:2065], cookie# 0, event size# 103 2025-03-31T16:03:34.956445Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Update description: path# /root/tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-03-31T16:03:34.956503Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Upsert description: path# /root/tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /root/tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-03-31T16:03:34.956672Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant PathId: [OwnerId: 800, LocalPathId: 2] Version: 1 }: sender# [1:3:2050] 2025-03-31T16:03:34.956731Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [1:41:2068] 2025-03-31T16:03:34.956783Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant PathId: [OwnerId: 800, LocalPathId: 2] Version: 1 }: sender# [1:38:2068] 2025-03-31T16:03:34.956831Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:37:2068][/root/tenant] Update to strong state: owner# [1:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 800, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 800, LocalPathId: 2] AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 1 PathOwnerId: 900 2025-03-31T16:03:35.448820Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [3:34:2065] 2025-03-31T16:03:35.448882Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:3:2050] Successful handshake: owner# 800, generation# 1 2025-03-31T16:03:35.449147Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [3:35:2066] 2025-03-31T16:03:35.449178Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 900, generation# 1 2025-03-31T16:03:35.449391Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [3:34:2065] 2025-03-31T16:03:35.449447Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:3:2050] Commit generation: owner# 800, generation# 1 2025-03-31T16:03:35.449839Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [3:35:2066] 2025-03-31T16:03:35.449870Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Commit generation: owner# 900, generation# 1 2025-03-31T16:03:35.449947Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:37:2068][/root/tenant] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-31T16:03:35.450313Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [3:41:2068] 2025-03-31T16:03:35.450343Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:3:2050] Upsert description: path# /root/tenant 2025-03-31T16:03:35.450424Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:3:2050] Subscribe: subscriber# [3:41:2068], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2025-03-31T16:03:35.450588Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [3:42:2068] 2025-03-31T16:03:35.450623Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# /root/tenant 2025-03-31T16:03:35.450675Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:42:2068], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2025-03-31T16:03:35.450796Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [3:43:2068] 2025-03-31T16:03:35.450817Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:9:2056] Upsert description: path# /root/tenant 2025-03-31T16:03:35.450852Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:9:2056] Subscribe: subscriber# [3:43:2068], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2025-03-31T16:03:35.450903Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:41:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:3:2050] 2025-03-31T16:03:35.450946Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:41:2068] 2025-03-31T16:03:35.450997Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:42:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:6:2053] 2025-03-31T16:03:35.451054Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:42:2068] 2025-03-31T16:03:35.451111Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:43:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:9:2056] 2025-03-31T16:03:35.451148Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:43:2068] 2025-03-31T16:03:35.451230Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:37:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:38:2068] 2025-03-31T16:03:35.451297Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:37:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:39:2068] 2025-03-31T16:03:35.451375Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:37:2068][/root/tenant] Set up state: owner# [3:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:03:35.451430Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:37:2068][/ ... omainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() =========== Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 2 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 2025-03-31T16:05:18.143160Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [397:34:2065] 2025-03-31T16:05:18.143234Z node 397 :SCHEME_BOARD_REPLICA NOTICE: [397:3:2050] Successful handshake: owner# 910, generation# 1 2025-03-31T16:05:18.143380Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [397:34:2065] 2025-03-31T16:05:18.143414Z node 397 :SCHEME_BOARD_REPLICA NOTICE: [397:3:2050] Commit generation: owner# 910, generation# 1 2025-03-31T16:05:18.143460Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [397:35:2066] 2025-03-31T16:05:18.143491Z node 397 :SCHEME_BOARD_REPLICA NOTICE: [397:6:2053] Successful handshake: owner# 910, generation# 1 2025-03-31T16:05:18.143689Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [397:35:2066] 2025-03-31T16:05:18.143719Z node 397 :SCHEME_BOARD_REPLICA NOTICE: [397:6:2053] Commit generation: owner# 910, generation# 1 2025-03-31T16:05:18.143809Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][397:37:2068][/Root/Tenant/table_inside] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-31T16:05:18.144258Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [397:41:2068] 2025-03-31T16:05:18.144298Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:3:2050] Upsert description: path# /Root/Tenant/table_inside 2025-03-31T16:05:18.144381Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:3:2050] Subscribe: subscriber# [397:41:2068], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-03-31T16:05:18.144542Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [397:42:2068] 2025-03-31T16:05:18.144572Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:6:2053] Upsert description: path# /Root/Tenant/table_inside 2025-03-31T16:05:18.144617Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:6:2053] Subscribe: subscriber# [397:42:2068], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-03-31T16:05:18.144776Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [397:43:2068] 2025-03-31T16:05:18.144802Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:9:2056] Upsert description: path# /Root/Tenant/table_inside 2025-03-31T16:05:18.144845Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:9:2056] Subscribe: subscriber# [397:43:2068], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-03-31T16:05:18.144917Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][397:41:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:3:2050] 2025-03-31T16:05:18.144971Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [397:41:2068] 2025-03-31T16:05:18.145020Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][397:42:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:6:2053] 2025-03-31T16:05:18.145062Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [397:42:2068] 2025-03-31T16:05:18.145111Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][397:43:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:9:2056] 2025-03-31T16:05:18.145153Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [397:43:2068] 2025-03-31T16:05:18.145231Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][397:37:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:38:2068] 2025-03-31T16:05:18.145302Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][397:37:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:39:2068] 2025-03-31T16:05:18.145351Z node 397 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][397:37:2068][/Root/Tenant/table_inside] Set up state: owner# [397:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:05:18.145401Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][397:37:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:40:2068] 2025-03-31T16:05:18.145445Z node 397 :SCHEME_BOARD_SUBSCRIBER INFO: [main][397:37:2068][/Root/Tenant/table_inside] Ignore empty state: owner# [397:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() =========== Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 2025-03-31T16:05:18.617379Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [399:34:2065] 2025-03-31T16:05:18.617438Z node 399 :SCHEME_BOARD_REPLICA NOTICE: [399:3:2050] Successful handshake: owner# 910, generation# 1 2025-03-31T16:05:18.617561Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [399:34:2065] 2025-03-31T16:05:18.617592Z node 399 :SCHEME_BOARD_REPLICA NOTICE: [399:3:2050] Commit generation: owner# 910, generation# 1 2025-03-31T16:05:18.617636Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [399:35:2066] 2025-03-31T16:05:18.617668Z node 399 :SCHEME_BOARD_REPLICA NOTICE: [399:6:2053] Successful handshake: owner# 910, generation# 1 2025-03-31T16:05:18.617860Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [399:35:2066] 2025-03-31T16:05:18.617889Z node 399 :SCHEME_BOARD_REPLICA NOTICE: [399:6:2053] Commit generation: owner# 910, generation# 1 2025-03-31T16:05:18.617970Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][399:37:2068][/Root/Tenant/table_inside] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-31T16:05:18.618404Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [399:41:2068] 2025-03-31T16:05:18.618439Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:3:2050] Upsert description: path# /Root/Tenant/table_inside 2025-03-31T16:05:18.618522Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:3:2050] Subscribe: subscriber# [399:41:2068], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-03-31T16:05:18.618684Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [399:42:2068] 2025-03-31T16:05:18.618709Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:6:2053] Upsert description: path# /Root/Tenant/table_inside 2025-03-31T16:05:18.618748Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:6:2053] Subscribe: subscriber# [399:42:2068], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-03-31T16:05:18.618877Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [399:43:2068] 2025-03-31T16:05:18.618901Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:9:2056] Upsert description: path# /Root/Tenant/table_inside 2025-03-31T16:05:18.618939Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:9:2056] Subscribe: subscriber# [399:43:2068], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-03-31T16:05:18.619002Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][399:41:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:3:2050] 2025-03-31T16:05:18.619055Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [399:41:2068] 2025-03-31T16:05:18.619150Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][399:42:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:6:2053] 2025-03-31T16:05:18.619192Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [399:42:2068] 2025-03-31T16:05:18.619232Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][399:43:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:9:2056] 2025-03-31T16:05:18.619269Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [399:43:2068] 2025-03-31T16:05:18.619342Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][399:37:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:38:2068] 2025-03-31T16:05:18.619408Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][399:37:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:39:2068] 2025-03-31T16:05:18.619452Z node 399 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][399:37:2068][/Root/Tenant/table_inside] Set up state: owner# [399:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:05:18.619500Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][399:37:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:40:2068] 2025-03-31T16:05:18.619539Z node 399 :SCHEME_BOARD_SUBSCRIBER INFO: [main][399:37:2068][/Root/Tenant/table_inside] Ignore empty state: owner# [399:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ProposeErrorEvWrite [GOOD] Test command err: 2025-03-31T16:04:57.711516Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:04:57.712111Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:57.712379Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:04:57.713880Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:04:57.714352Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:57.714559Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001c81/r3tmp/tmpzDhlTP/pdisk_1.dat 2025-03-31T16:04:58.220519Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:58.460857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:04:58.581757Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:58.582130Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:58.594083Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:58.594206Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:58.610634Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:04:58.611356Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:04:58.611919Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:04:58.955299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:05:00.566230Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1596:2961], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:00.566397Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1606:2966], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:00.566506Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:00.584454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:05:01.464222Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1610:2969], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:05:01.756363Z node 1 :TX_PROXY ERROR: Actor# [1:1764:3056] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:05:02.685589Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution. Operation timeout: 0.000000s, cancelAfter: (empty maybe) 2025-03-31T16:05:02.685679Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution, txs: 1 2025-03-31T16:05:02.685771Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-03-31T16:05:02.685831Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) 2025-03-31T16:05:02.685893Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2025-03-31T16:05:02.689414Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Execution is complete, results: 1 2025-03-31T16:05:02.758400Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqpf5xgj299qyhzegmby8n06, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWVmNzhhYTYtOWQ0MmUwZTctZTBiZmNhMzktMzJhNmEyNzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Begin literal execution. Operation timeout: 299.410228s, cancelAfter: (empty maybe) 2025-03-31T16:05:02.758494Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqpf5xgj299qyhzegmby8n06, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWVmNzhhYTYtOWQ0MmUwZTctZTBiZmNhMzktMzJhNmEyNzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Begin literal execution, txs: 1 2025-03-31T16:05:02.758564Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-03-31T16:05:02.758624Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqpf5xgj299qyhzegmby8n06, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWVmNzhhYTYtOWQ0MmUwZTctZTBiZmNhMzktMzJhNmEyNzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) 2025-03-31T16:05:02.758692Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2025-03-31T16:05:02.759557Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqpf5xgj299qyhzegmby8n06, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWVmNzhhYTYtOWQ0MmUwZTctZTBiZmNhMzktMzJhNmEyNzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Execution is complete, results: 1 2025-03-31T16:05:02.760416Z node 1 :KQP_EXECUTER TRACE: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqpf5xgj299qyhzegmby8n06, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWVmNzhhYTYtOWQ0MmUwZTctZTBiZmNhMzktMzJhNmEyNzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Bootstrap done, become ReadyState 2025-03-31T16:05:02.760794Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1790:2959] TxId: 281474976715660. Ctx: { TraceId: 01jqpf5xgj299qyhzegmby8n06, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWVmNzhhYTYtOWQ0MmUwZTctZTBiZmNhMzktMzJhNmEyNzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Executing physical tx, type: 2, stages: 1 2025-03-31T16:05:02.760991Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1790:2959] TxId: 281474976715660. Ctx: { TraceId: 01jqpf5xgj299qyhzegmby8n06, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWVmNzhhYTYtOWQ0MmUwZTctZTBiZmNhMzktMzJhNmEyNzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got request, become WaitResolveState 2025-03-31T16:05:02.761109Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-03-31T16:05:02.761390Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Resolved key sets: 1 2025-03-31T16:05:02.761591Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-03-31T16:05:02.761735Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1790:2959] TxId: 281474976715660. Ctx: { TraceId: 01jqpf5xgj299qyhzegmby8n06, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWVmNzhhYTYtOWQ0MmUwZTctZTBiZmNhMzktMzJhNmEyNzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"key" (OptionalType (DataType 'Uint32))) '('"value" (OptionalType (DataType 'Uint32)))))) (return (lambda '() (block '( (let $1 (KqpTable '"/Root/table-1" '"72057594046644480:2" '"" '1)) (let $2 (OptionalType (DataType 'Uint32))) (return (KqpEffects (KqpUpsertRows $1 (Iterator %kqp%tx_result_binding_0_0) '('"key" '"value") '('('"Mode" '"upsert"))))) )))) ) 2025-03-31T16:05:02.762064Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1790:2959] TxId: 281474976715660. Ctx: { TraceId: 01jqpf5xgj299qyhzegmby8n06, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWVmNzhhYTYtOWQ0MmUwZTctZTBiZmNhMzktMzJhNmEyNzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] will be executed on 1 shards. 2025-03-31T16:05:02.762191Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1790:2959] TxId: 281474976715660. Ctx: { TraceId: 01jqpf5xgj299qyhzegmby8n06, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWVmNzhhYTYtOWQ0MmUwZTctZTBiZmNhMzktMzJhNmEyNzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, stage: [0,0] create datashard task: 1, shard: 72075186224037888, meta: TTaskMeta{ ShardId: 72075186224037888, Reads: { none }, Writes: { ranges: TShardKeyRanges{ (Uint32 : 1), (Uint32 : 2), (Uint32 : 3), } } } 2025-03-31T16:05:02.762659Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpf5xgj299qyhzegmby8n06, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWVmNzhhYTYtOWQ0MmUwZTctZTBiZmNhMzktMzJhNmEyNzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:02.7 ... db://session/3?node_id=3&id=NWVjZjVlZjQtMjRjYTY5ZWUtZDg1ZTY0NTQtNDY5YTExZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Updating channels after the creation of compute actors 2025-03-31T16:05:17.516495Z node 3 :KQP_EXECUTER DEBUG: TxId: 281474976715672. Ctx: { TraceId: 01jqpf6e0j09cbpdc5ab41s0f7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NWVjZjVlZjQtMjRjYTY5ZWUtZDg1ZTY0NTQtNDY5YTExZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Collect channels updates for task: 1 at actor [3:2055:3204] 2025-03-31T16:05:17.516562Z node 3 :KQP_EXECUTER DEBUG: TxId: 281474976715672. Ctx: { TraceId: 01jqpf6e0j09cbpdc5ab41s0f7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NWVjZjVlZjQtMjRjYTY5ZWUtZDg1ZTY0NTQtNDY5YTExZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Sending channels info to compute actor: [3:2055:3204], channels: 0 2025-03-31T16:05:17.516652Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2052:3204] TxId: 281474976715672. Ctx: { TraceId: 01jqpf6e0j09cbpdc5ab41s0f7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NWVjZjVlZjQtMjRjYTY5ZWUtZDg1ZTY0NTQtNDY5YTExZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [3:2055:3204], 2025-03-31T16:05:17.516728Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2052:3204] TxId: 281474976715672. Ctx: { TraceId: 01jqpf6e0j09cbpdc5ab41s0f7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NWVjZjVlZjQtMjRjYTY5ZWUtZDg1ZTY0NTQtNDY5YTExZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [3:2055:3204], 2025-03-31T16:05:17.516788Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2052:3204] TxId: 281474976715672. Ctx: { TraceId: 01jqpf6e0j09cbpdc5ab41s0f7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NWVjZjVlZjQtMjRjYTY5ZWUtZDg1ZTY0NTQtNDY5YTExZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-03-31T16:05:17.517815Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2052:3204] TxId: 281474976715672. Ctx: { TraceId: 01jqpf6e0j09cbpdc5ab41s0f7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NWVjZjVlZjQtMjRjYTY5ZWUtZDg1ZTY0NTQtNDY5YTExZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [3:2055:3204], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-03-31T16:05:17.517888Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2052:3204] TxId: 281474976715672. Ctx: { TraceId: 01jqpf6e0j09cbpdc5ab41s0f7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NWVjZjVlZjQtMjRjYTY5ZWUtZDg1ZTY0NTQtNDY5YTExZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [3:2055:3204], 2025-03-31T16:05:17.517967Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2052:3204] TxId: 281474976715672. Ctx: { TraceId: 01jqpf6e0j09cbpdc5ab41s0f7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NWVjZjVlZjQtMjRjYTY5ZWUtZDg1ZTY0NTQtNDY5YTExZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [3:2055:3204], 2025-03-31T16:05:17.518980Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2052:3204] TxId: 281474976715672. Ctx: { TraceId: 01jqpf6e0j09cbpdc5ab41s0f7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NWVjZjVlZjQtMjRjYTY5ZWUtZDg1ZTY0NTQtNDY5YTExZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [3:2055:3204], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 727 Tasks { TaskId: 1 CpuTimeUs: 125 FinishTimeMs: 1743437117518 EgressBytes: 10 EgressRows: 1 ComputeCpuTimeUs: 23 BuildCpuTimeUs: 102 HostName: "ghrun-tuhoqst4xe" NodeId: 3 CreateTimeMs: 1743437117517 } MaxMemoryUsage: 1048576 } 2025-03-31T16:05:17.519119Z node 3 :KQP_EXECUTER INFO: TxId: 281474976715672. Ctx: { TraceId: 01jqpf6e0j09cbpdc5ab41s0f7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NWVjZjVlZjQtMjRjYTY5ZWUtZDg1ZTY0NTQtNDY5YTExZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [3:2055:3204] 2025-03-31T16:05:17.519243Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2052:3204] TxId: 281474976715672. Ctx: { TraceId: 01jqpf6e0j09cbpdc5ab41s0f7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NWVjZjVlZjQtMjRjYTY5ZWUtZDg1ZTY0NTQtNDY5YTExZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send Commit to BufferActor=[3:2051:3204] 2025-03-31T16:05:17.519320Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2052:3204] TxId: 281474976715672. Ctx: { TraceId: 01jqpf6e0j09cbpdc5ab41s0f7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NWVjZjVlZjQtMjRjYTY5ZWUtZDg1ZTY0NTQtNDY5YTExZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000727s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-03-31T16:05:17.536941Z node 3 :KQP_COMPUTE WARN: SelfId: [3:2058:3204], Table: `/Root/table-1` ([72057594046644480:2:1]), SessionActorId: [3:2042:3204]Got OUT_OF_SPACE for table `/Root/table-1`. ShardID=72075186224037888, Sink=[3:2058:3204]. Ignored this error. 2025-03-31T16:05:17.537059Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:2051:3204], SessionActorId: [3:2042:3204], statusCode=OVERLOADED. Issue=
: Error: Tablet 72075186224037888 is out of space. Table `/Root/table-1`., code: 2006 . sessionActorId=[3:2042:3204]. isRollback=0 2025-03-31T16:05:17.537293Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NWVjZjVlZjQtMjRjYTY5ZWUtZDg1ZTY0NTQtNDY5YTExZWE=, ActorId: [3:2042:3204], ActorState: ExecuteState, TraceId: 01jqpf6e0j09cbpdc5ab41s0f7, got TEvKqpBuffer::TEvError in ExecuteState, status: OVERLOADED send to: [3:2052:3204] from: [3:2051:3204] 2025-03-31T16:05:17.537473Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2052:3204] TxId: 281474976715672. Ctx: { TraceId: 01jqpf6e0j09cbpdc5ab41s0f7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NWVjZjVlZjQtMjRjYTY5ZWUtZDg1ZTY0NTQtNDY5YTExZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got EvAbortExecution, status: OVERLOADED, message: {
: Error: Tablet 72075186224037888 is out of space. Table `/Root/table-1`., code: 2006 } 2025-03-31T16:05:17.537562Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:2052:3204] TxId: 281474976715672. Ctx: { TraceId: 01jqpf6e0j09cbpdc5ab41s0f7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NWVjZjVlZjQtMjRjYTY5ZWUtZDg1ZTY0NTQtNDY5YTExZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. OVERLOADED: {
: Error: Tablet 72075186224037888 is out of space. Table `/Root/table-1`., code: 2006 } 2025-03-31T16:05:17.537631Z node 3 :KQP_EXECUTER INFO: ActorId: [3:2052:3204] TxId: 281474976715672. Ctx: { TraceId: 01jqpf6e0j09cbpdc5ab41s0f7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NWVjZjVlZjQtMjRjYTY5ZWUtZDg1ZTY0NTQtNDY5YTExZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. task: 1, does not have the CA id yet or is already complete 2025-03-31T16:05:17.537877Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:2052:3204] TxId: 281474976715672. Ctx: { TraceId: 01jqpf6e0j09cbpdc5ab41s0f7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NWVjZjVlZjQtMjRjYTY5ZWUtZDg1ZTY0NTQtNDY5YTExZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ReplyErrorAndDie. Response: Status: OVERLOADED Issues { message: "Tablet 72075186224037888 is out of space. Table `/Root/table-1`." issue_code: 2006 severity: 1 } Result { Stats { CpuTimeUs: 727 Stages { StageGuid: "64f690e3-b242d054-da19bfce-5de864be" Program: "(\n(return (lambda \'() (block \'(\n (let $1 (Just (Uint32 \'5)))\n (return (Iterator (AsList (AsStruct \'(\'\"key\" $1) \'(\'\"value\" $1)))))\n))))\n)\n" ComputeActors { CpuTimeUs: 727 Tasks { TaskId: 1 CpuTimeUs: 125 FinishTimeMs: 1743437117518 EgressBytes: 10 EgressRows: 1 ComputeCpuTimeUs: 23 BuildCpuTimeUs: 102 HostName: "ghrun-tuhoqst4xe" NodeId: 3 CreateTimeMs: 1743437117517 } MaxMemoryUsage: 1048576 } } } } , to ActorId: [3:2042:3204] 2025-03-31T16:05:17.537924Z node 3 :KQP_EXECUTER INFO: ActorId: [3:2052:3204] TxId: 281474976715672. Ctx: { TraceId: 01jqpf6e0j09cbpdc5ab41s0f7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NWVjZjVlZjQtMjRjYTY5ZWUtZDg1ZTY0NTQtNDY5YTExZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shutdown immediately - nothing to wait 2025-03-31T16:05:17.538780Z node 3 :KQP_EXECUTER INFO: ActorId: [3:2052:3204] TxId: 281474976715672. Ctx: { TraceId: 01jqpf6e0j09cbpdc5ab41s0f7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NWVjZjVlZjQtMjRjYTY5ZWUtZDg1ZTY0NTQtNDY5YTExZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Full stats: { CpuTimeUs: 3652 DurationUs: 1743437115971767 ExecuterCpuTimeUs: 2925 StartTimeMs: 1566 FinishTimeMs: 1743437117537 Stages { StageGuid: "64f690e3-b242d054-da19bfce-5de864be" Program: "(\n(return (lambda \'() (block \'(\n (let $1 (Just (Uint32 \'5)))\n (return (Iterator (AsList (AsStruct \'(\'\"key\" $1) \'(\'\"value\" $1)))))\n))))\n)\n" ComputeActors { CpuTimeUs: 727 Tasks { TaskId: 1 CpuTimeUs: 125 FinishTimeMs: 1743437117518 EgressBytes: 10 EgressRows: 1 ComputeCpuTimeUs: 23 BuildCpuTimeUs: 102 HostName: "ghrun-tuhoqst4xe" NodeId: 3 CreateTimeMs: 1743437117517 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743437117518 } TxPlansWithStats: "{\"Node Type\":\"Phase\",\"PlanNodeId\":3,\"Plans\":[{\"Node Type\":\"Sink\",\"PlanNodeId\":2,\"Plans\":[{\"Node Type\":\"ConstantExpr-Sink\",\"Operators\":[{\"Inputs\":[],\"Iterator\":\"[{key: 5,value: 5}]\",\"Name\":\"Iterator\"},{\"Inputs\":[],\"Name\":\"Upsert\",\"Path\":\"\\/Root\\/table-1\",\"SinkType\":\"KqpTableSink\",\"Table\":\"table-1\"}],\"PlanNodeId\":1,\"StageGuid\":\"64f690e3-b242d054-da19bfce-5de864be\",\"Stats\":{\"BaseTimeMs\":1743437117518,\"ComputeNodes\":[{\"CpuTimeUs\":727,\"Tasks\":[{\"ComputeTimeUs\":23,\"EgressBytes\":10,\"EgressRows\":1,\"FinishTimeMs\":1743437117518,\"Host\":\"ghrun-tuhoqst4xe\",\"NodeId\":3,\"TaskId\":1}]}],\"FinishedTasks\":0,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"},\"Tables\":[\"table-1\"]}],\"StageGuid\":\"\"}],\"StageGuid\":\"\"}" StatConvertBytes: 722 Extra { type_url: "type.googleapis.com/NKqpProto.TKqpExecutionExtraStats" value: "\010\001\022\013\010\327\005\020\327\005\030\327\005 \001" } } 2025-03-31T16:05:17.538856Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2052:3204] TxId: 281474976715672. Ctx: { TraceId: 01jqpf6e0j09cbpdc5ab41s0f7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NWVjZjVlZjQtMjRjYTY5ZWUtZDg1ZTY0NTQtNDY5YTExZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-31T16:05:17.538901Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:2052:3204] TxId: 281474976715672. Ctx: { TraceId: 01jqpf6e0j09cbpdc5ab41s0f7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NWVjZjVlZjQtMjRjYTY5ZWUtZDg1ZTY0NTQtNDY5YTExZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2025-03-31T16:05:17.539141Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NWVjZjVlZjQtMjRjYTY5ZWUtZDg1ZTY0NTQtNDY5YTExZWE=, ActorId: [3:2042:3204], ActorState: ExecuteState, TraceId: 01jqpf6e0j09cbpdc5ab41s0f7, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::WithRead [GOOD] Test command err: RandomSeed# 8897266280100836265 2025-03-31T16:05:11.991730Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 3 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-03-31T16:05:12.016385Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-03-31T16:05:12.016454Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 1 going to send TEvBlock {TabletId# 3 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-03-31T16:05:12.018935Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-03-31T16:05:12.036110Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 2 going to send TEvCollectGarbage {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-03-31T16:05:12.039221Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-03-31T16:05:19.362104Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-03-31T16:05:19.362219Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-03-31T16:05:19.362284Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-03-31T16:05:19.362325Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 13 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-03-31T16:05:19.446750Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Status# OK} 2025-03-31T16:05:19.446868Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 13 Channel# 0 Status# OK} ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_AuthNotRequired [GOOD] Test command err: 2025-03-31T16:04:36.680836Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005227122203374:2288];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:36.680963Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000df2/r3tmp/tmpbbB7kr/pdisk_1.dat 2025-03-31T16:04:37.622015Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:37.622119Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:37.633052Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:04:37.637941Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:37.726470Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 25390, node 1 2025-03-31T16:04:37.979672Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:37.979697Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:37.979704Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:37.979893Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5878 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:38.795310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:38.829608Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:04:38.902583Z node 1 :TX_PROXY ERROR: Actor# [1:7488005235712138713:2619] txid# 281474976715658, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-03-31T16:04:44.739875Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488005260984665924:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:44.739920Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000df2/r3tmp/tmpqF5H9B/pdisk_1.dat 2025-03-31T16:04:45.307314Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:45.420576Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:45.420664Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:45.430188Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62644, node 4 2025-03-31T16:04:45.799539Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:45.799559Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:45.799565Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:45.799706Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20794 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:46.410103Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:49.748756Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7488005260984665924:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:49.748846Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:51.334690Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488005291049438139:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:51.334795Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:51.335400Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488005291049438151:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:51.340376Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:04:51.371393Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488005291049438153:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:04:51.466302Z node 4 :TX_PROXY ERROR: Actor# [4:7488005291049438222:2695] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:54.551846Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488005302392433292:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:54.555565Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000df2/r3tmp/tmpw85v6y/pdisk_1.dat 2025-03-31T16:04:54.912313Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:54.983861Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:54.983992Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:55.000922Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4655, node 7 2025-03-31T16:04:55.164597Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:55.164626Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:55.164633Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:55.164798Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16853 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:55.537871Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:59.547765Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7488005302392433292:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:59.547861Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:05:01.451339Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488005332457205513:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: ... G: Can't update SecurityState for /Root - no PublicKeys 2025-03-31T16:05:08.743257Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-31T16:05:08.743298Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-31T16:05:08.775659Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-31T16:05:08.775775Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-31T16:05:08.775784Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-31T16:05:08.775819Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-31T16:05:08.778088Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7488005365087975016:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:05:08.843613Z node 10 :TX_PROXY ERROR: Actor# [10:7488005365087975088:2805] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:05:09.006827Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7488005347908104588:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:05:09.006911Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:05:09.177581Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqpf65fh0542jvnkvqxfyjar, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NWI3NjQzOTMtNmM2YmRhYzUtZjdkNWFkY2ItNjg2YjQ3MjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:09.207929Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jqpf65yqc9dc3ekjja0qs1e8, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:52988, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-03-31T16:05:09.208529Z node 10 :READ_TABLE_API NOTICE: [10:7488005369382942450:2353] Finish grpc stream, status: 400010 2025-03-31T16:05:09.211578Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jqpf65yvekbbd8c5c2jr65qv, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:52988, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-03-31T16:05:09.259457Z node 10 :READ_TABLE_API DEBUG: [10:7488005369382942451:2354] Adding quota request to queue ShardId: 0, TxId: 281474976710662 2025-03-31T16:05:09.259519Z node 10 :READ_TABLE_API DEBUG: [10:7488005369382942451:2354] Assign stream quota to Shard 0, Quota 5, TxId 281474976710662 Reserved: 5 of 25, Queued: 0 2025-03-31T16:05:09.260970Z node 10 :READ_TABLE_API DEBUG: [10:7488005369382942451:2354] got stream part, size: 246, RU required: 128 rate limiter absent 2025-03-31T16:05:09.261377Z node 10 :READ_TABLE_API DEBUG: [10:7488005369382942451:2354] Starting inactivity timer for 600.000000s with tag 3 2025-03-31T16:05:09.267475Z node 10 :READ_TABLE_API NOTICE: [10:7488005369382942451:2354] Finish grpc stream, status: 400000 2025-03-31T16:05:09.273549Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jqpf660se1zdvwykdmp0mfr4, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:52988, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-03-31T16:05:09.300880Z node 10 :READ_TABLE_API DEBUG: [10:7488005369382942475:2356] Adding quota request to queue ShardId: 0, TxId: 281474976710664 2025-03-31T16:05:09.300929Z node 10 :READ_TABLE_API DEBUG: [10:7488005369382942475:2356] Assign stream quota to Shard 0, Quota 5, TxId 281474976710664 Reserved: 5 of 25, Queued: 0 2025-03-31T16:05:09.302213Z node 10 :READ_TABLE_API DEBUG: [10:7488005369382942475:2356] got stream part, size: 84, RU required: 128 rate limiter absent 2025-03-31T16:05:09.302605Z node 10 :READ_TABLE_API DEBUG: [10:7488005369382942475:2356] Starting inactivity timer for 600.000000s with tag 3 2025-03-31T16:05:09.373329Z node 10 :READ_TABLE_API NOTICE: [10:7488005369382942475:2356] Finish grpc stream, status: 400000 2025-03-31T16:05:09.376317Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jqpf66400kma7dsz79qvd8rd, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:52988, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-03-31T16:05:09.394554Z node 10 :READ_TABLE_API DEBUG: [10:7488005369382942498:2358] Adding quota request to queue ShardId: 0, TxId: 281474976710666 2025-03-31T16:05:09.394596Z node 10 :READ_TABLE_API DEBUG: [10:7488005369382942498:2358] Assign stream quota to Shard 0, Quota 5, TxId 281474976710666 Reserved: 5 of 25, Queued: 0 2025-03-31T16:05:09.395826Z node 10 :READ_TABLE_API DEBUG: [10:7488005369382942498:2358] got stream part, size: 210, RU required: 128 rate limiter absent 2025-03-31T16:05:09.396248Z node 10 :READ_TABLE_API DEBUG: [10:7488005369382942498:2358] Starting inactivity timer for 600.000000s with tag 3 2025-03-31T16:05:09.435351Z node 10 :READ_TABLE_API NOTICE: [10:7488005369382942498:2358] Finish grpc stream, status: 400000 2025-03-31T16:05:09.438677Z node 10 :GRPC_SERVER DEBUG: [0x51a000044480] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-03-31T16:05:09.438987Z node 10 :GRPC_SERVER DEBUG: [0x51a000008a80] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-03-31T16:05:09.439226Z node 10 :GRPC_SERVER DEBUG: [0x51a0000ef480] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-03-31T16:05:09.439504Z node 10 :GRPC_SERVER DEBUG: [0x51a000007e80] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-03-31T16:05:09.439735Z node 10 :GRPC_SERVER DEBUG: [0x51a00006fc80] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-03-31T16:05:09.439954Z node 10 :GRPC_SERVER DEBUG: [0x51a00006f680] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-03-31T16:05:09.440182Z node 10 :GRPC_SERVER DEBUG: [0x51a0000abc80] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-03-31T16:05:09.440390Z node 10 :GRPC_SERVER DEBUG: [0x51a000004e80] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-03-31T16:05:09.440593Z node 10 :GRPC_SERVER DEBUG: [0x51a000004280] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-03-31T16:05:09.440873Z node 10 :GRPC_SERVER DEBUG: [0x51a00003d280] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-03-31T16:05:09.441107Z node 10 :GRPC_SERVER DEBUG: [0x51a00003d880] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-03-31T16:05:09.441310Z node 10 :GRPC_SERVER DEBUG: [0x51a0000ace80] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-03-31T16:05:09.441540Z node 10 :GRPC_SERVER DEBUG: [0x51a0000d6880] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-03-31T16:05:09.441768Z node 10 :GRPC_SERVER DEBUG: [0x51a000091e80] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-03-31T16:05:09.441986Z node 10 :GRPC_SERVER DEBUG: [0x51a000048c80] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-03-31T16:05:09.442193Z node 10 :GRPC_SERVER DEBUG: [0x51a000002a80] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-03-31T16:05:09.442423Z node 10 :GRPC_SERVER DEBUG: [0x51a00003de80] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 2025-03-31T16:05:11.933785Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7488005377292133570:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:05:11.933880Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000df2/r3tmp/tmp4un3z4/pdisk_1.dat 2025-03-31T16:05:12.228864Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:12.274705Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:05:12.274825Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:05:12.279477Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10150, node 13 2025-03-31T16:05:12.476669Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:05:12.476705Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:05:12.476717Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:05:12.476909Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1063 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:05:12.882234Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:05:13.067203Z node 13 :TICKET_PARSER DEBUG: Ticket A76545E297896BE668FFFCBBE3E86751F895EB313A793EF53EE423F5DFB0CBB3 (ipv6:[::1]:50692) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-03-31T16:05:13.284945Z node 13 :TICKET_PARSER ERROR: Ticket **** (717F937C): Unknown token 2025-03-31T16:05:13.350035Z node 13 :TICKET_PARSER DEBUG: Ticket 5E61A1B9BE61371AACEBCD29542060902AE2B4ACD4483F68D4EDB79B1552F3DB (ipv6:[::1]:50724) has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2025-03-31T16:05:13.350861Z node 13 :TICKET_PARSER ERROR: Ticket 5E61A1B9BE61371AACEBCD29542060902AE2B4ACD4483F68D4EDB79B1552F3DB: Cannot create token from certificate. Client certificate failed verification ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromRemoteSource [GOOD] Test command err: 2025-03-31T16:05:11.971621Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005374566243945:2193];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:05:11.971674Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a7c/r3tmp/tmp351oWw/pdisk_1.dat 2025-03-31T16:05:12.465596Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:12.474296Z node 1 :HTTP ERROR: (#18,[::1]:31714) connection closed with error: Connection refused 2025-03-31T16:05:12.480610Z node 1 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-03-31T16:05:12.482796Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:05:12.482885Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:05:12.488272Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:05:12.520038Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:05:12.520055Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:05:12.520068Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:05:12.520186Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbScripting::MultiResults [GOOD] Test command err: 2025-03-31T16:04:37.787804Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005229017639230:2272];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:37.787970Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000de4/r3tmp/tmp3kBCvt/pdisk_1.dat 2025-03-31T16:04:38.826979Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:38.827040Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:38.838153Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:38.838241Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:38.852916Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12150, node 1 2025-03-31T16:04:39.158521Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:39.158541Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:39.158553Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:39.164757Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3942 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:39.691194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:42.795300Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005229017639230:2272];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:42.795365Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:44.075052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:04:45.288793Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005263377380499:2447], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:45.288887Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005263377380491:2444], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:45.289236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:45.293239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:04:45.352888Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005263377380505:2448], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:04:45.429278Z node 1 :TX_PROXY ERROR: Actor# [1:7488005263377380603:4162] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:46.162028Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqpf5ek66ttkrkpd7fja7cpk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzA1YWZjNjgtNzk3YWQwMTMtZTNlMzM0MjItYTAzNDdmMjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS 2025-03-31T16:04:49.209160Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488005282473724836:2211];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:49.209316Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000de4/r3tmp/tmpEROKAW/pdisk_1.dat 2025-03-31T16:04:49.643767Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:49.725684Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:49.725765Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:49.734083Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11027, node 4 2025-03-31T16:04:50.036031Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:50.036076Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:50.036084Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:50.036218Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22635 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:50.595706Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:54.150831Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7488005282473724836:2211];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:54.150905Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:55.123197Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:04:56.305253Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488005312538498848:2445], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:56.305324Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488005312538498858:2448], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:56.305428Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:56.309262Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:04:56.363010Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488005312538498862:2449], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:04:56.434508Z node 4 :TX_PROXY ERROR: Actor# [4:7488005312538498947:4170] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:56.745474Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpf5sbf5mz3j94wtzmjmesh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YmZjYTE2ZjYtNmRjYjM0YjItOGMzZGExZTktMTRiZDQ5ODQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:04:56.950588Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:22635 2025-03-31T16:05:01.923714Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488005333828974959:2090];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:05:01.923838Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot de ... IFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:05:10.312076Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:05:10.312085Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:05:10.312222Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26727 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:05:10.507715Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:05:13.657628Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488005386201694792:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:13.657741Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:13.746535Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:05:13.864962Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488005386201694957:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:13.865190Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488005386201694962:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:13.867137Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:13.870153Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:05:13.894243Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7488005386201694964:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:05:13.972555Z node 10 :TX_PROXY ERROR: Actor# [10:7488005386201695045:2797] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:05:14.019857Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpf67b9dg556rb6s4g9amrs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ODQ3NWNjNzgtYmY5ZTFhY2QtN2FkM2UxMmUtYjVlMGRlYjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:14.100590Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqpf6anebenps5qrfc305caf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NWY1MjQwYmYtNmFmMDk3MWQtOTNjZWM2NmItNjFhNTM2ZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:14.106911Z node 10 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437114139, txId: 281474976715662] shutting down 2025-03-31T16:05:15.577564Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7488005391664690674:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:05:15.577604Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000de4/r3tmp/tmpZr6Xdl/pdisk_1.dat 2025-03-31T16:05:15.693195Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:15.729371Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:05:15.729467Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:05:15.732590Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17302, node 13 2025-03-31T16:05:15.788729Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:05:15.788753Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:05:15.788761Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:05:15.788883Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26080 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:05:16.096915Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:05:18.942501Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7488005404549593592:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:18.942621Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:19.245866Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:05:19.336288Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7488005408844561066:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:19.336384Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:19.336701Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7488005408844561071:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:19.340018Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:05:19.368074Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7488005408844561073:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:05:19.447347Z node 13 :TX_PROXY ERROR: Actor# [13:7488005408844561150:2810] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:05:19.498024Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpf6crg5hrj51se14awccqj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZmNlNzk3ZjYtM2Q1YWVhODctYjJlOGNlOTAtOGIxZmUwYzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:19.639691Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpf6crg5hrj51se14awccqj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YTgyOWY2ZmYtNTY0NTc4ZjItZGM3ZWIzNWEtMTBlMTA4MWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:19.700139Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqpf6crg5hrj51se14awccqj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NzVjMjcxYTUtZDU3ZmVlMDItMTM2YWM2MjAtYmE0NDhjZGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:19.861741Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqpf6crg5hrj51se14awccqj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OTg0NzQ2NDItZjZmYzAzZjctNWU5NmMzMGEtOTMxZTRhNmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::CheckDefaultTableSettings3 [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000de9/r3tmp/tmpqrLipC/pdisk_1.dat TServer::EnableGrpc on GrpcPort 10204, node 1 TClient is connected to server localhost:20545 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:04:45.829710Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488005266445475850:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:45.831238Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000de9/r3tmp/tmpCSn2NW/pdisk_1.dat 2025-03-31T16:04:46.244087Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:46.285020Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:46.285107Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:46.308029Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65102, node 4 2025-03-31T16:04:46.458368Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:46.458389Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:46.458397Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:46.458526Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31742 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:46.949000Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:50.815249Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7488005266445475850:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:50.815344Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:51.533517Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488005292215280711:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:51.533639Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:52.217106Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:04:52.602303Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488005296510248201:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:52.602422Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:52.603180Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488005296510248206:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:52.609094Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:04:52.679267Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488005296510248208:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:04:52.737999Z node 4 :TX_PROXY ERROR: Actor# [4:7488005296510248296:2821] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:52.868737Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqpf5nqsa47k78b48xg6ptm7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NDM2YzY4Y2QtYzc3MTI3MDgtMmYzOWYxMjItMmY5ZDQ5M2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:04:52.986926Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-03-31T16:04:53.173118Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-03-31T16:04:53.518590Z node 4 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:04:55.868902Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488005305697636106:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:55.871536Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000de9/r3tmp/tmpIwf72F/pdisk_1.dat 2025-03-31T16:04:56.355598Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:56.409099Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:56.409193Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:56.415031Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12522, node 7 2025-03-31T16:04:56.599848Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:56.599875Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:56.599906Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:56.600092Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14113 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:57.240255Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:05:00.836249Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7488005305697636106:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:05:00.836330Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:05:01.463724Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:05:04.740617Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488005345994090142:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:05:04.758740Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000de9/r3tmp/tmpqPpy9z/pdisk_1.dat 2025-03-31T16:05:05.004692Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10999, node 10 2025-03-31T16:05:05.074925Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:05:05.075066Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:05:05.100199Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:05:05.119843Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:05:05.119867Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:05:05.119879Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:05:05.120087Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4030 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:05:05.913722Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:05:05.945243Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:05:08.869271Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000de9/r3tmp/tmpRH4ebl/pdisk_1.dat 2025-03-31T16:05:10.862509Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:05:10.939514Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19747, node 13 2025-03-31T16:05:11.064746Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:05:11.064860Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:05:11.164585Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:05:11.198221Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:05:11.198249Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:05:11.198263Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:05:11.198442Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7752 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:05:11.571844Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:05:14.463613Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantMultipleTenants >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantMultipleTenants [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantDomain >> TConsoleConfigSubscriptionTests::TestNotificationForRemovedConfigItem >> TConsoleConfigSubscriptionTests::TestNotificationForRemovedConfigItem [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedClient >> TConsoleConfigTests::TestDryRun >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantDomain [GOOD] >> TConsoleConfigTests::TestDryRun [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlWithoutFlag >> TConsoleConfigHelpersTests::TestConfigSubscriptionEraser >> TConsoleConfigHelpersTests::TestConfigSubscriptionEraser [GOOD] >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlWithoutFlag [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestart >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedClient [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForTimeoutedNotificationResponse >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestart [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestartSimplified >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestartSimplified [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestComplexYamlConfigChanges >> TConsoleInMemoryConfigSubscriptionTests::TestComplexYamlConfigChanges [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlResend >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlResend [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApply >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApply [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithDb >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithDb [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree [GOOD] Test command err: 2025-03-31T16:04:19.720248Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005153967205428:2148];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:19.721320Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0010b4/r3tmp/tmpBcYuHC/pdisk_1.dat 2025-03-31T16:04:20.664638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:20.664715Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:20.699640Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:04:20.723359Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:20.731360Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 24030, node 1 2025-03-31T16:04:21.050285Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:21.050309Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:21.050316Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:21.055828Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7591 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:21.791784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:24.719160Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005153967205428:2148];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:24.719223Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:25.233776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005179737010197:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:25.233902Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:25.234156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005179737010209:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:25.234445Z node 1 :TX_PROXY DEBUG: actor# [1:7488005153967205575:2141] Handle TEvProposeTransaction 2025-03-31T16:04:25.234477Z node 1 :TX_PROXY DEBUG: actor# [1:7488005153967205575:2141] TxId# 281474976710658 ProcessProposeTransaction 2025-03-31T16:04:25.234518Z node 1 :TX_PROXY DEBUG: actor# [1:7488005153967205575:2141] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7488005179737010212:2641] 2025-03-31T16:04:25.336876Z node 1 :TX_PROXY DEBUG: Actor# [1:7488005179737010212:2641] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-03-31T16:04:25.337118Z node 1 :TX_PROXY DEBUG: Actor# [1:7488005179737010212:2641] txid# 281474976710658 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:04:25.337140Z node 1 :TX_PROXY DEBUG: Actor# [1:7488005179737010212:2641] txid# 281474976710658 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-03-31T16:04:25.339271Z node 1 :TX_PROXY DEBUG: Actor# [1:7488005179737010212:2641] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-31T16:04:25.339368Z node 1 :TX_PROXY DEBUG: Actor# [1:7488005179737010212:2641] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:04:25.339570Z node 1 :TX_PROXY DEBUG: Actor# [1:7488005179737010212:2641] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:04:25.339743Z node 1 :TX_PROXY DEBUG: Actor# [1:7488005179737010212:2641] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:04:25.339792Z node 1 :TX_PROXY DEBUG: Actor# [1:7488005179737010212:2641] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-31T16:04:25.340268Z node 1 :TX_PROXY DEBUG: Actor# [1:7488005179737010212:2641] txid# 281474976710658 HANDLE EvClientConnected 2025-03-31T16:04:25.341672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:04:25.348477Z node 1 :TX_PROXY DEBUG: Actor# [1:7488005179737010212:2641] txid# 281474976710658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710658} 2025-03-31T16:04:25.348529Z node 1 :TX_PROXY DEBUG: Actor# [1:7488005179737010212:2641] txid# 281474976710658 SEND to# [1:7488005179737010211:2346] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 53} 2025-03-31T16:04:25.368579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005179737010211:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:04:25.449615Z node 1 :TX_PROXY DEBUG: actor# [1:7488005153967205575:2141] Handle TEvProposeTransaction 2025-03-31T16:04:25.449644Z node 1 :TX_PROXY DEBUG: actor# [1:7488005153967205575:2141] TxId# 281474976710659 ProcessProposeTransaction 2025-03-31T16:04:25.449716Z node 1 :TX_PROXY DEBUG: actor# [1:7488005153967205575:2141] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7488005179737010286:2695] 2025-03-31T16:04:25.452193Z node 1 :TX_PROXY DEBUG: Actor# [1:7488005179737010286:2695] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-03-31T16:04:25.452247Z node 1 :TX_PROXY DEBUG: Actor# [1:7488005179737010286:2695] txid# 281474976710659 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:04:25.452260Z node 1 :TX_PROXY DEBUG: Actor# [1:7488005179737010286:2695] txid# 281474976710659 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-03-31T16:04:25.452793Z node 1 :TX_PROXY DEBUG: Actor# [1:7488005179737010286:2695] txid# 281474976710659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-31T16:04:25.452853Z node 1 :TX_PROXY DEBUG: Actor# [1:7488005179737010286:2695] txid# 281474976710659 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:04:25.453019Z node 1 :TX_PROXY DEBUG: Actor# [1:7488005179737010286:2695] txid# 281474976710659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:04:25.453130Z node 1 :TX_PROXY DEBUG: Actor# [1:7488005179737010286:2695] HANDLE EvNavigateKeySetResult, txid# 281474976710659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:04:25.453167Z node 1 :TX_PROXY DEBUG: Actor# [1:7488005179737010286:2695] txid# 281474976710659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710659 TabletId# 72057594046644480} 2025-03-31T16:04:25.453285Z node 1 :TX_PROXY DEBUG: Actor# [1:7488005179737010286:2695] txid# 281474976710659 HANDLE EvClientConnected 2025-03-31T16:04:25.456932Z node 1 :TX_PROXY DEBUG: Actor# [1:7488005179737010286:2695] txid# 281474976710659 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710659 Reason# Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 7205759404664448 ... node 13 :IMPORT DEBUG: TImport::TTxProgress: OnNotifyResult: txId# 281474976715762 2025-03-31T16:05:14.097030Z node 13 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-03-31T16:05:14.574875Z node 13 :TX_PROXY DEBUG: [GetImport] [13:7488005388805809834:2428] [0] Resolve database: name# /Root 2025-03-31T16:05:14.575384Z node 13 :TX_PROXY DEBUG: [GetImport] [13:7488005388805809834:2428] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T16:05:14.575410Z node 13 :TX_PROXY DEBUG: [GetImport] [13:7488005388805809834:2428] [0] Send request: schemeShardId# 72057594046644480 2025-03-31T16:05:14.576184Z node 13 :TX_PROXY DEBUG: [GetImport] [13:7488005388805809834:2428] [0] Handle TEvImport::TEvGetImportResponse: record# Entry { Id: 281474976710661 Status: SUCCESS Progress: PROGRESS_DONE ImportFromS3Settings { endpoint: "localhost:64481" scheme: HTTP bucket: "test_bucket" items { source_prefix: "table" destination_path: "/Root/table" } } StartTime { seconds: 1743437113 } EndTime { seconds: 1743437114 } } 2025-03-31T16:05:14.590398Z node 13 :TX_PROXY DEBUG: actor# [13:7488005358741035244:2110] Handle TEvNavigate describe path /Root/table 2025-03-31T16:05:14.590459Z node 13 :TX_PROXY DEBUG: Actor# [13:7488005388805809840:4617] HANDLE EvNavigateScheme /Root/table 2025-03-31T16:05:14.590815Z node 13 :TX_PROXY DEBUG: Actor# [13:7488005388805809840:4617] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T16:05:14.590935Z node 13 :TX_PROXY DEBUG: Actor# [13:7488005388805809840:4617] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/table" Options { ShowPrivateTable: false } 2025-03-31T16:05:14.592803Z node 13 :TX_PROXY DEBUG: Actor# [13:7488005388805809840:4617] Handle TEvDescribeSchemeResult Forward to# [13:7488005388805809838:2429] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/table" PathDescription { Self { Name: "table" PathId: 8 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715760 CreateStep: 1743437113467 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "table" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Group" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } } TableIndexes { Name: "value_idx" LocalPathId: 9 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "Value" SchemaVersion: 2 PathOwnerId: 72057594046644480 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } } } VectorIndexKmeansTreeDescription { Settings { settings { metric: SIMILARITY_INNER_PRODUCT vector_type: VECTOR_TYPE_FLOAT vector_dimension: 768 } clusters: 80 levels: 2 } } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 8 PathOwnerId: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates [GOOD] Test command err: 2025-03-31T16:04:56.669527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:04:56.669615Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:56.770560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:04:58.289818Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:04:58.289906Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:58.338452Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:04:59.589042Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:04:59.589119Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:59.642908Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:01.344290Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:01.344367Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:01.404522Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:02.983833Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:02.983907Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:03.036654Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:13.338239Z node 15 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:13.338322Z node 15 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:13.403818Z node 15 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:14.578920Z node 16 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:14.579007Z node 16 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:14.639310Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:19.046911Z node 21 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:19.046999Z node 21 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:19.095906Z node 21 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:20.232986Z node 22 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:20.233081Z node 22 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:20.280635Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:21.538153Z node 23 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:21.538247Z node 23 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:21.582683Z node 23 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:22.662585Z node 24 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:22.662658Z node 24 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:22.706871Z node 24 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunning [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunningExtSubdomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithDb [GOOD] Test command err: 2025-03-31T16:04:57.374474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:04:57.374555Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:57.446167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:04:59.676729Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:04:59.676795Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:59.746767Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:01.728131Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:01.728214Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:01.820908Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:03.266802Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:03.266880Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:03.314873Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:04.908194Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:04.908285Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:04.960490Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:06.572205Z node 6 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:06.572285Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:06.653751Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:07.968564Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:07.968623Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:08.026998Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:09.432915Z node 8 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:09.432997Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:09.503272Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:10.707519Z node 9 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:10.707600Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:10.750522Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:11.905687Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:11.905776Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:11.966719Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:13.303953Z node 11 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:13.304054Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:13.355754Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:14.457087Z node 12 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:14.457166Z node 12 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:14.501674Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:15.735620Z node 13 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:15.735702Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:15.783167Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:16.911624Z node 14 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:16.911700Z node 14 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:16.974170Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:18.445631Z node 16 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:18.445726Z node 16 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:18.500243Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:20.031564Z node 18 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:20.031645Z node 18 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:20.096747Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:21.572860Z node 20 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:21.572959Z node 20 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:21.613324Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:23.058073Z node 22 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:23.058139Z node 22 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:23.098713Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:24.227713Z node 23 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:24.227843Z node 23 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:24.273456Z node 23 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:24.751846Z node 23 :BS_CONTROLLER ERROR: {BSC26@console_interaction.cpp:113} failed to parse config obtained from Console ErrorReason# ydb/library/yaml_config/yaml_config_parser.cpp:1268: Condition violated: `config.HasDomainsConfig()' Yaml# --- metadata: kind: MainConfig cluster: "" version: 1 config: log_config: cluster_name: cluster1 allowed_labels: test: type: enum values: ? true selector_config: [] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ProposeResultLost_RwTx-UseSink [GOOD] Test command err: 2025-03-31T16:04:58.074248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:04:58.074708Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:58.074916Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:04:58.084901Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:04:58.085345Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:58.085510Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001c96/r3tmp/tmpT1Ovo9/pdisk_1.dat 2025-03-31T16:04:59.422150Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:59.760944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:04:59.906447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:59.906591Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:59.923263Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:59.923386Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:59.942401Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:04:59.943115Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:04:59.943601Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:05:00.309602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:05:01.874536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1598:2960], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:01.874680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1608:2965], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:01.874782Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:01.922400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:05:02.675873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1612:2968], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:05:02.913646Z node 1 :TX_PROXY ERROR: Actor# [1:1766:3055] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:05:03.345758Z node 1 :KQP_EXECUTER TRACE: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqpf5yrt50rqh3qnwnk9db83, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjAyNmJmMzUtNjBjOWM0MGItOTkyNjk3MTYtYzM5OGI2Y2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Bootstrap done, become ReadyState 2025-03-31T16:05:03.346150Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1792:2958] TxId: 281474976715660. Ctx: { TraceId: 01jqpf5yrt50rqh3qnwnk9db83, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjAyNmJmMzUtNjBjOWM0MGItOTkyNjk3MTYtYzM5OGI2Y2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Executing physical tx, type: 2, stages: 1 2025-03-31T16:05:03.346389Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1792:2958] TxId: 281474976715660. Ctx: { TraceId: 01jqpf5yrt50rqh3qnwnk9db83, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjAyNmJmMzUtNjBjOWM0MGItOTkyNjk3MTYtYzM5OGI2Y2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got request, become WaitResolveState 2025-03-31T16:05:03.346623Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-03-31T16:05:03.346889Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Resolved key sets: 1 2025-03-31T16:05:03.347073Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-03-31T16:05:03.347245Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1792:2958] TxId: 281474976715660. Ctx: { TraceId: 01jqpf5yrt50rqh3qnwnk9db83, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjAyNmJmMzUtNjBjOWM0MGItOTkyNjk3MTYtYzM5OGI2Y2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (Iterator (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3))))) )))) ) 2025-03-31T16:05:03.347400Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1792:2958] TxId: 281474976715660. Ctx: { TraceId: 01jqpf5yrt50rqh3qnwnk9db83, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjAyNmJmMzUtNjBjOWM0MGItOTkyNjk3MTYtYzM5OGI2Y2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] create compute task: 1 2025-03-31T16:05:03.347548Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpf5yrt50rqh3qnwnk9db83, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjAyNmJmMzUtNjBjOWM0MGItOTkyNjk3MTYtYzM5OGI2Y2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:03.347599Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Ctx: { TraceId: 01jqpf5yrt50rqh3qnwnk9db83, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjAyNmJmMzUtNjBjOWM0MGItOTkyNjk3MTYtYzM5OGI2Y2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-03-31T16:05:03.348105Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Ctx: { TraceId: 01jqpf5yrt50rqh3qnwnk9db83, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjAyNmJmMzUtNjBjOWM0MGItOTkyNjk3MTYtYzM5OGI2Y2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Collect channels updates for task: 1 at actor [1:1795:2958] 2025-03-31T16:05:03.348181Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Ctx: { TraceId: 01jqpf5yrt50rqh3qnwnk9db83, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjAyNmJmMzUtNjBjOWM0MGItOTkyNjk3MTYtYzM5OGI2Y2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Sending channels info to compute actor: [1:1795:2958], channels: 0 2025-03-31T16:05:03.348257Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1792:2958] TxId: 281474976715660. Ctx: { TraceId: 01jqpf5yrt50rqh3qnwnk9db83, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjAyNmJmMzUtNjBjOWM0MGItOTkyNjk3MTYtYzM5OGI2Y2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: 0, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-03-31T16:05:03.348302Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1792:2958] TxId: 281474976715660. Ctx: { TraceId: 01jqpf5yrt50rqh3qnwnk9db83, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjAyNmJmMzUtNjBjOWM0MGItOTkyNjk3MTYtYzM5OGI2Y2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Updating channels after the creation of compute actors 2025-03-31T16:05:03.348339Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Ctx: { TraceId: 01jqpf5yrt50rqh3qnwnk9db83, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjAyNmJmMzUtNjBjOWM0MGItOTkyNjk3MTYtYzM5OGI2Y2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Collect channels updates for task: 1 at actor [1:1795:2958] 2025-03-31T16:05:03.348381Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Ctx: { TraceId: 01jqpf5yrt50rqh3qnwnk9db83, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjAyNmJmMzUtNjBjOWM0MGItOTkyNjk3MTYtYzM5OGI2Y2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Sending channels info to compute actor: [1:1795:2958], channels: 0 2025-03-31T16:05:03.348475Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1792:2958] TxId: 281474976715660. Ctx: { TraceId: 01jqpf5yrt50rqh3qnwnk9db83, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjAyNmJmMzUtNjBjOWM0MGItOTkyNjk3MTYtYzM5OGI2Y2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [1:1795:2958], 2025-03-31T16:05:03.348538Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1792:2958] TxId: 281474976715660. Ctx: { TraceId: 01jqpf5yrt50rqh3qnwnk9db83, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjAyNmJmMzUtNjBjOWM0MGItOTkyNjk3MTYtYzM5OGI2Y2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:1795:2958], 2025-03-31T16:05:03.348602Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1792:2958] TxId: 281474976715660. Ctx: { TraceId: 01jqpf5yrt50rqh3qnwnk9db83, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjAyNmJmMzUtNjBjOWM0MGItOTkyNjk3MTYtYzM5OGI2Y2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-03-31T16:05:03.360287Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1792:2958] TxId: 281474976715660. Ctx: { TraceId: 01jqpf5yrt50rqh3qnwnk9db83, Database: , DatabaseId: /Root, SessionId: ydb:// ... d: ydb://session/3?node_id=3&id=YjBkZjEzNTMtMTZlNDM4ZDktMzkyMjQ4NjQtNTI5YjM1ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [4:1855:2478], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-03-31T16:05:16.748234Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1845:3109] TxId: 281474976715663. Ctx: { TraceId: 01jqpf6d662dnn5817a05xc5rg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjBkZjEzNTMtMTZlNDM4ZDktMzkyMjQ4NjQtNTI5YjM1ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [4:1855:2478], CA [3:1853:3109], 2025-03-31T16:05:16.748291Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1845:3109] TxId: 281474976715663. Ctx: { TraceId: 01jqpf6d662dnn5817a05xc5rg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjBkZjEzNTMtMTZlNDM4ZDktMzkyMjQ4NjQtNTI5YjM1ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [4:1855:2478], CA [3:1853:3109], 2025-03-31T16:05:16.748772Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1845:3109] TxId: 281474976715663. Ctx: { TraceId: 01jqpf6d662dnn5817a05xc5rg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjBkZjEzNTMtMTZlNDM4ZDktMzkyMjQ4NjQtNTI5YjM1ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [4:1855:2478], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 721 Tasks { TaskId: 1 CpuTimeUs: 439 ComputeCpuTimeUs: 14 BuildCpuTimeUs: 425 HostName: "ghrun-tuhoqst4xe" NodeId: 4 CreateTimeMs: 1743437116745 } MaxMemoryUsage: 1048576 } 2025-03-31T16:05:16.748868Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1845:3109] TxId: 281474976715663. Ctx: { TraceId: 01jqpf6d662dnn5817a05xc5rg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjBkZjEzNTMtMTZlNDM4ZDktMzkyMjQ4NjQtNTI5YjM1ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [4:1855:2478], CA [3:1853:3109], 2025-03-31T16:05:16.748903Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1845:3109] TxId: 281474976715663. Ctx: { TraceId: 01jqpf6d662dnn5817a05xc5rg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjBkZjEzNTMtMTZlNDM4ZDktMzkyMjQ4NjQtNTI5YjM1ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [4:1855:2478], CA [3:1853:3109], 2025-03-31T16:05:16.756596Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:1845:3109] TxId: 281474976715663. Ctx: { TraceId: 01jqpf6d662dnn5817a05xc5rg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjBkZjEzNTMtMTZlNDM4ZDktMzkyMjQ4NjQtNTI5YjM1ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got result, channelId: 2, shardId: 0, inputIndex: 0, from: [3:1854:3109], finished: 0 2025-03-31T16:05:16.756714Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:1845:3109] TxId: 281474976715663. Ctx: { TraceId: 01jqpf6d662dnn5817a05xc5rg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjBkZjEzNTMtMTZlNDM4ZDktMzkyMjQ4NjQtNTI5YjM1ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send ack to channelId: 2, seqNo: 1, to: [3:1854:3109] 2025-03-31T16:05:16.763643Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:1845:3109] TxId: 281474976715663. Ctx: { TraceId: 01jqpf6d662dnn5817a05xc5rg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjBkZjEzNTMtMTZlNDM4ZDktMzkyMjQ4NjQtNTI5YjM1ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got result, channelId: 2, shardId: 0, inputIndex: 0, from: [3:1854:3109], finished: 1 2025-03-31T16:05:16.763714Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:1845:3109] TxId: 281474976715663. Ctx: { TraceId: 01jqpf6d662dnn5817a05xc5rg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjBkZjEzNTMtMTZlNDM4ZDktMzkyMjQ4NjQtNTI5YjM1ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send ack to channelId: 2, seqNo: 2, to: [3:1854:3109] 2025-03-31T16:05:16.764733Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1845:3109] TxId: 281474976715663. Ctx: { TraceId: 01jqpf6d662dnn5817a05xc5rg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjBkZjEzNTMtMTZlNDM4ZDktMzkyMjQ4NjQtNTI5YjM1ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [3:1853:3109], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 1529 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 863 FinishTimeMs: 1743437116764 InputRows: 3 InputBytes: 12 OutputRows: 3 OutputBytes: 12 ResultRows: 3 ResultBytes: 12 ComputeCpuTimeUs: 225 BuildCpuTimeUs: 638 HostName: "ghrun-tuhoqst4xe" NodeId: 3 CreateTimeMs: 1743437116744 } MaxMemoryUsage: 1048576 } 2025-03-31T16:05:16.764845Z node 3 :KQP_EXECUTER INFO: TxId: 281474976715663. Ctx: { TraceId: 01jqpf6d662dnn5817a05xc5rg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjBkZjEzNTMtMTZlNDM4ZDktMzkyMjQ4NjQtNTI5YjM1ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [3:1853:3109] 2025-03-31T16:05:16.764950Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1845:3109] TxId: 281474976715663. Ctx: { TraceId: 01jqpf6d662dnn5817a05xc5rg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjBkZjEzNTMtMTZlNDM4ZDktMzkyMjQ4NjQtNTI5YjM1ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [4:1855:2478], 2025-03-31T16:05:16.764997Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1845:3109] TxId: 281474976715663. Ctx: { TraceId: 01jqpf6d662dnn5817a05xc5rg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjBkZjEzNTMtMTZlNDM4ZDktMzkyMjQ4NjQtNTI5YjM1ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [4:1855:2478], 2025-03-31T16:05:16.765357Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1845:3109] TxId: 281474976715663. Ctx: { TraceId: 01jqpf6d662dnn5817a05xc5rg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjBkZjEzNTMtMTZlNDM4ZDktMzkyMjQ4NjQtNTI5YjM1ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [4:1855:2478], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 1822 DurationUs: 9000 Tasks { TaskId: 1 CpuTimeUs: 609 FinishTimeMs: 1743437116763 OutputRows: 3 OutputBytes: 12 Tables { TablePath: "/Root/table-1" ReadRows: 3 ReadBytes: 24 AffectedPartitions: 4 } IngressRows: 3 ComputeCpuTimeUs: 184 BuildCpuTimeUs: 425 WaitInputTimeUs: 8129 HostName: "ghrun-tuhoqst4xe" NodeId: 4 StartTimeMs: 1743437116754 CreateTimeMs: 1743437116745 } MaxMemoryUsage: 1048576 } 2025-03-31T16:05:16.765440Z node 3 :KQP_EXECUTER INFO: TxId: 281474976715663. Ctx: { TraceId: 01jqpf6d662dnn5817a05xc5rg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjBkZjEzNTMtMTZlNDM4ZDktMzkyMjQ4NjQtNTI5YjM1ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [4:1855:2478] 2025-03-31T16:05:16.767107Z node 3 :KQP_EXECUTER INFO: ActorId: [3:1845:3109] TxId: 281474976715663. Ctx: { TraceId: 01jqpf6d662dnn5817a05xc5rg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjBkZjEzNTMtMTZlNDM4ZDktMzkyMjQ4NjQtNTI5YjM1ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Full stats: { CpuTimeUs: 8077 DurationUs: 1743437115248152 Tables { TablePath: "/Root/table-1" ReadRows: 3 ReadBytes: 24 AffectedPartitions: 4 } ExecuterCpuTimeUs: 4726 StartTimeMs: 1517 FinishTimeMs: 1743437116765 Stages { StageId: 1 StageGuid: "37ab20bd-931afa2f-3388e93-f4d07214" Program: "(\n(return (lambda \'($1) (FromFlow (NarrowMap (Take (ToFlow $1) (Uint64 \'\"1001\")) (lambda \'($2 $3) (AsStruct \'(\'\"key\" $2) \'(\'\"value\" $3)))))))\n)\n" ComputeActors { CpuTimeUs: 1529 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 863 FinishTimeMs: 1743437116764 InputRows: 3 InputBytes: 12 OutputRows: 3 OutputBytes: 12 ResultRows: 3 ResultBytes: 12 ComputeCpuTimeUs: 225 BuildCpuTimeUs: 638 HostName: "ghrun-tuhoqst4xe" NodeId: 3 CreateTimeMs: 1743437116744 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743437116754 } Stages { StageGuid: "bc99769f-5481fb18-1b1ad5e5-ee696fb1" Program: "(\n(return (lambda \'($1) (block \'(\n (let $2 (lambda \'($3) (Member $3 \'\"key\") (Member $3 \'\"value\")))\n (return (FromFlow (ExpandMap (Take (ToFlow $1) (Uint64 \'\"1001\")) $2)))\n))))\n)\n" BaseTimeMs: 1743437116754 } TxPlansWithStats: "{\"Node Type\":\"Phase\",\"PlanNodeId\":6,\"Plans\":[{\"Node Type\":\"ResultSet\",\"PlanNodeId\":5,\"PlanNodeType\":\"ResultSet\",\"Plans\":[{\"Node Type\":\"Limit\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":3}],\"Limit\":\"1001\",\"Name\":\"Limit\"}],\"PlanNodeId\":4,\"Plans\":[{\"Node Type\":\"Merge\",\"PlanNodeId\":3,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Limit\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":1}],\"Limit\":\"1001\",\"Name\":\"Limit\"}],\"PlanNodeId\":2,\"Plans\":[{\"Node Type\":\"TableFullScan\",\"Operators\":[{\"E-Cost\":\"No estimate\",\"E-Rows\":\"No estimate\",\"E-Size\":\"No estimate\",\"Inputs\":[],\"Name\":\"TableFullScan\",\"Path\":\"\\/Root\\/table-1\",\"ReadColumns\":[\"key\",\"value\"],\"ReadLimit\":\"1001\",\"ReadRanges\":[\"key (-∞, +∞)\"],\"ReadRangesPointPrefixLen\":\"0\",\"Scan\":\"Sequential\",\"Table\":\"table-1\"}],\"PlanNodeId\":1,\"StageGuid\":\"\",\"Tables\":[\"table-1\"]}],\"StageGuid\":\"bc99769f-5481fb18-1b1ad5e5-ee696fb1\",\"Stats\":{\"BaseTimeMs\":1743437116754,\"FinishedTasks\":0,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"SortColumns\":[\"key (Asc)\"],\"StageGuid\":\"\"}],\"StageGuid\":\"37ab20bd-931afa2f-3388e93-f4d07214\",\"Stats\":{\"BaseTimeMs\":1743437116754,\"ComputeNodes\":[{\"CpuTimeUs\":1529,\"Tasks\":[{\"ComputeTimeUs\":225,\"FinishTimeMs\":1743437116764,\"Host\":\"ghrun-tuhoqst4xe\",\"InputBytes\":12,\"InputRows\":3,\"NodeId\":3,\"OutputBytes\":12,\"OutputRows\":3,\"ResultBytes\":12,\"ResultRows\":3,\"TaskId\":2}]}],\"FinishedTasks\":0,\"PhysicalStageId\":1,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"\"}" StatConvertBytes: 1500 Extra { type_url: "type.googleapis.com/NKqpProto.TKqpExecutionExtraStats" value: "\010\004\022\013\010\371\013\020\236\016\030\227\032 \002" } } 2025-03-31T16:05:16.767200Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1845:3109] TxId: 281474976715663. Ctx: { TraceId: 01jqpf6d662dnn5817a05xc5rg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjBkZjEzNTMtMTZlNDM4ZDktMzkyMjQ4NjQtNTI5YjM1ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-31T16:05:16.767264Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:1845:3109] TxId: 281474976715663. Ctx: { TraceId: 01jqpf6d662dnn5817a05xc5rg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjBkZjEzNTMtMTZlNDM4ZDktMzkyMjQ4NjQtNTI5YjM1ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2025-03-31T16:05:16.767329Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1845:3109] TxId: 281474976715663. Ctx: { TraceId: 01jqpf6d662dnn5817a05xc5rg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjBkZjEzNTMtMTZlNDM4ZDktMzkyMjQ4NjQtNTI5YjM1ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.003351s ReadRows: 3 ReadBytes: 24 ru: 3 rate limiter was not found force flag: 1 { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 3 } } ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TYqlDecimalTests::NegativeValues [GOOD] Test command err: 2025-03-31T16:04:36.038204Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005225446187602:2210];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:36.058654Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dee/r3tmp/tmpCB31l2/pdisk_1.dat 2025-03-31T16:04:37.121284Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:37.274038Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:37.274134Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:37.287701Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:04:37.368606Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22141, node 1 2025-03-31T16:04:37.527550Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:04:37.547112Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:04:37.799855Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:37.799875Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:37.799882Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:37.799992Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2920 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:38.539003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:41.047217Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005225446187602:2210];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:41.047293Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:42.222159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 SUCCESS 3 rows in 0.026242s 2025-03-31T16:04:42.521809Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005251215992477:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:42.521898Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:42.522479Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005251215992489:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:42.525999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:04:42.574400Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005251215992491:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:04:42.655229Z node 1 :TX_PROXY ERROR: Actor# [1:7488005251215992578:2824] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:43.848649Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqpf5bwq42zs69qr91egh92r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODdjMzc3ODEtN2EyYTUzMzAtNDEzYjFjMTYtZmE5MGNlOTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 3 rows 2025-03-31T16:04:47.131289Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488005275235583878:2075];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dee/r3tmp/tmpthCi2M/pdisk_1.dat 2025-03-31T16:04:47.304871Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:04:47.568411Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:47.680324Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:47.680406Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:47.735382Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25005, node 4 2025-03-31T16:04:48.104074Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:48.104094Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:48.104101Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:48.104241Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17140 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:48.478180Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:52.031661Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7488005275235583878:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:52.031736Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:53.653774Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:04:56.345884Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488005311180279441:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:56.345939Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dee/r3tmp/tmp8Q9tHY/pdisk_1.dat 2025-03-31T16:04:56.843411Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:56.937181Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:56.937258Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:56.961859Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23354, node 7 2025-03-31T16:04:57.375863Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:57.375900Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:57.375907Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:57.376040Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22756 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 184467440737095516 ... alhost:3463 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:05:06.361911Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:05:09.461125Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:05:09.625097Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488005368070115342:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:09.625207Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:09.626061Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488005368070115356:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:09.630053Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:05:09.670258Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7488005368070115358:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:05:09.769898Z node 10 :TX_PROXY ERROR: Actor# [10:7488005368070115433:2801] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:05:09.978705Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpf66bp3167nd959hqa1erg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NWQ1N2E5ZjYtNTFjMWQ4NzMtNGU4OWYyYWEtNGFhODI3OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:10.283699Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7488005350890244938:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:05:10.283942Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:05:10.334589Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpf66s48wfr18s830x5cemf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NWQ1N2E5ZjYtNTFjMWQ4NzMtNGU4OWYyYWEtNGFhODI3OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:10.583504Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqpf672kcvtjg4vnecb69309, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NWQ1N2E5ZjYtNTFjMWQ4NzMtNGU4OWYyYWEtNGFhODI3OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:10.720159Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqpf67a3dwkqd0f16194fwsc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NWQ1N2E5ZjYtNTFjMWQ4NzMtNGU4OWYyYWEtNGFhODI3OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:10.879205Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqpf67e48x4y2ggwbg7jrysj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NWQ1N2E5ZjYtNTFjMWQ4NzMtNGU4OWYyYWEtNGFhODI3OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:12.651194Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7488005382525055022:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:05:12.651265Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dee/r3tmp/tmpTyDk9b/pdisk_1.dat 2025-03-31T16:05:12.822618Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:12.869486Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:05:12.869599Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:05:12.875238Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20646, node 13 2025-03-31T16:05:12.970021Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:05:12.970058Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:05:12.970066Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:05:12.970219Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20805 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:05:13.315838Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:05:16.057969Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:05:16.164899Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7488005399704925410:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:16.164915Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7488005399704925418:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:16.165047Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:16.168508Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:05:16.190286Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7488005399704925424:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:05:16.287585Z node 13 :TX_PROXY ERROR: Actor# [13:7488005399704925499:2795] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:05:16.382721Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpf6cr3390pdeerp6grjxyc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZGQ5MjE0NmEtZWQ0NjYxNTktOTg3YTA4NC0zMzIxYTA4Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:16.494812Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpf6cz61yby42ktm183vj77, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZGQ5MjE0NmEtZWQ0NjYxNTktOTg3YTA4NC0zMzIxYTA4Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:16.654225Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqpf6d2k5sfcngnpdeeva215, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZGQ5MjE0NmEtZWQ0NjYxNTktOTg3YTA4NC0zMzIxYTA4Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:16.782183Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqpf6d7n6v6vcgn9vvnpw44h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZGQ5MjE0NmEtZWQ0NjYxNTktOTg3YTA4NC0zMzIxYTA4Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:16.948883Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqpf6dbjfzgr6fn0s5fyb8nx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZGQ5MjE0NmEtZWQ0NjYxNTktOTg3YTA4NC0zMzIxYTA4Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::InsertNoTargetColumns_Serial+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 5986, MsgBus: 13891 2025-03-31T16:02:20.830127Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004641632728357:2207];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:20.830289Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012e8/r3tmp/tmpdqBbCm/pdisk_1.dat 2025-03-31T16:02:21.224140Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:21.232482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:21.232619Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:21.235557Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5986, node 1 2025-03-31T16:02:21.334411Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:02:21.334444Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:02:21.334452Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:02:21.334569Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13891 TClient is connected to server localhost:13891 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:02:21.941744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:21.959385Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 16 2025-03-31T16:02:24.062155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:02:24.221332Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:02:24.229246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:02:24.312281Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:02:24.350204Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004658812598231:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:24.350300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:24.360104Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004658812598243:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:24.365161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-31T16:02:24.378803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004658812598245:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-31T16:02:24.477582Z node 1 :TX_PROXY ERROR: Actor# [1:7488004658812598298:2453] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:02:25.825333Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004641632728357:2207];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:25.825402Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; f f t t 18 2025-03-31T16:02:26.366277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:02:26.460575Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:02:26.468243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T16:02:26.528628Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 21 2025-03-31T16:02:26.902426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:02:26.983471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:02:27.039402Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 23 2025-03-31T16:02:27.375331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T16:02:27.489340Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:02:27.496001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T16:02:27.547198Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 20 2025-03-31T16:02:27.930198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-31T16:02:28.009553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-31T16:02:28.086811Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 700 2025-03-31T16:02:28.495387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-31T16:02:28.582310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 0.5 0.5 1.5 1.5 2.5 2.5 3.5 3.5 4.5 4.5 5.5 5.5 6.5 6.5 7.5 7.5 8.5 8.5 9.5 9.5 701 2025-03-31T16:02:29.052829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710692:0, at schemeshard: 72057594046644480 2025-03-31T16:02:29.111952Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:02:29.118668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710694:0, at schemeshard: 72057594046644480 0.5 0.5 1.5 1.5 2.5 2.5 3.5 3.5 4.5 4.5 5.5 5.5 6.5 6.5 7.5 7.5 8.5 8.5 9.5 9.5 25 2025-03-31T16:02:29.568508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710697:0, at schemeshard: 72057594046644480 2025-03-31T16:02:29.682255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710698:0, at schemeshard: 72057594046644480 2025-03-31T16:02:29.742882Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill text 0 text 0 text 1 text 1 text 2 text 2 text 3 text 3 text 4 text 4 text 5 text 5 text 6 text 6 text 7 text 7 text 8 text 8 text 9 text 9 1042 2025-03-31T16:02:30.130731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710702:0, at schemeshard: 72057594046644480 2025-03-31T16:02:30.191481Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:02:30.198104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710704:0, at schemeshard: 72057594046644480 bpchar 0 bpchar 0 bpchar 1 bpchar 1 bpchar 2 bpchar 2 bpchar 3 bpchar 3 bpchar 4 bpchar 4 bpchar 5 bpchar 5 bpchar 6 bpchar 6 bpchar 7 bpchar 7 bpchar 8 bpchar 8 bpchar 9 bpchar 9 1043 2025-03-31T16:02:30.721424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710707:0, at schemeshard: 72057594046644480 2025-03-31T16:02:30.825961Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:02:30.841323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710709:0, at schemeshard: 72057594046644480 2025-03-31T16:02:30.926060Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill varchar 0 varchar 0 varchar 1 varchar 1 varchar 2 varchar 2 varchar 3 varchar 3 varchar 4 varchar 4 varchar 5 varchar 5 varchar 6 varchar 6 varchar 7 varchar 7 varchar 8 varchar 8 varchar 9 varchar 9 19 2025-03-31T16:02:31.283 ... [TPoolCreatorActor] ActorId: [10:7488005251625536677:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:04:42.617166Z node 10 :TX_PROXY ERROR: Actor# [10:7488005251625536728:2347] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:42.651444Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:04:43.271245Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 23042, MsgBus: 5859 2025-03-31T16:04:45.228666Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7488005265544672515:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:45.300301Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012e8/r3tmp/tmpR3VKBP/pdisk_1.dat 2025-03-31T16:04:45.651932Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:45.769630Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:45.769799Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:45.776239Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23042, node 11 2025-03-31T16:04:46.140172Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:46.140209Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:46.140227Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:46.140453Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5859 TClient is connected to server localhost:5859 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:48.313147Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:50.227246Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7488005265544672515:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:50.227384Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:55.558975Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7488005308494346146:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:55.559124Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:55.559603Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7488005308494346165:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:55.565691Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:04:55.584191Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7488005308494346167:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:04:55.671275Z node 11 :TX_PROXY ERROR: Actor# [11:7488005308494346218:2359] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:55.715041Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:04:56.530975Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 20039, MsgBus: 18940 2025-03-31T16:04:58.974108Z node 12 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7488005320638437163:2148];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012e8/r3tmp/tmpNb9kg1/pdisk_1.dat 2025-03-31T16:04:59.144166Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:04:59.435135Z node 12 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:59.539843Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:59.540009Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:59.542538Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20039, node 12 2025-03-31T16:05:00.156924Z node 12 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:05:00.156964Z node 12 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:05:00.156982Z node 12 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:05:00.157221Z node 12 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18940 TClient is connected to server localhost:18940 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:05:02.490334Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:05:03.972914Z node 12 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7488005320638437163:2148];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:05:03.973027Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:05:07.853178Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7488005359293143405:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:07.853308Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:07.853311Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7488005359293143416:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:07.859313Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:05:07.877483Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7488005359293143419:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:05:07.946468Z node 12 :TX_PROXY ERROR: Actor# [12:7488005359293143471:2353] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:05:07.999214Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbS3Internal::BadRequests [GOOD] Test command err: 2025-03-31T16:04:37.287519Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005228953554214:2206];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:37.293523Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000de5/r3tmp/tmpDzWLoz/pdisk_1.dat 2025-03-31T16:04:38.134997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:38.135125Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:38.156980Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:04:38.287387Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:38.313617Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 12779, node 1 2025-03-31T16:04:38.407511Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:04:38.407900Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:04:38.578497Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:38.578524Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:38.578530Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:38.578688Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16524 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:38.893179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:41.388805Z node 1 :KQP_PROXY WARN: Failed to parse session id: unknownSesson 2025-03-31T16:04:43.009358Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488005255734271212:2142];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:43.009430Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000de5/r3tmp/tmpv0ESIs/pdisk_1.dat 2025-03-31T16:04:43.308603Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:43.372930Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:43.373020Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:43.386692Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2963, node 4 2025-03-31T16:04:43.616174Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:43.616197Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:43.616205Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:43.616344Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8795 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:43.971174Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:48.794517Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488005278364153733:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:48.794598Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000de5/r3tmp/tmpadFbef/pdisk_1.dat 2025-03-31T16:04:49.228573Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:49.289285Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:49.289730Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:49.294656Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29306, node 7 2025-03-31T16:04:49.603656Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:49.603678Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:49.603685Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:49.603841Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29159 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:49.916614Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:49.940508Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:04:54.427957Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488005305522801646:2077];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:54.428066Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000de5/r3tmp/tmpeBUBNJ/pdisk_1.dat 2025-03-31T16:04:54.945126Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:54.997422Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:54.997511Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:55.005810Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19434, node 10 2025-03-31T16:04:55.148219Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:55.148246Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:55.148254Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:55.148396Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7123 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:55.441038Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:55.469007Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:04:59.210168Z node 10 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-31T16:04:59.219816Z node 10 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2025-03-31T16:04:59.251583Z node 10 :KQP_PROXY DEBUG: TraceId: "01jqpf5rk4ahed7c139eaqp28x", Request has 18445000636610.300079s seconds to be completed 2025-03-31T16:04:59.254343Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=MTQxYzAyZTYtY2M1Njk5MmUtN2UwYzRkNzAtMjQxMGU3NDU=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MTQxYzAyZTYtY2M1Njk5MmUtN2UwYzRkNzAtMjQxMGU3NDU= 2025-03-31T16:04:59.254452Z node 10 :KQP_PROXY DEBUG: TraceId: "01jqpf5rk4ahed7c139eaqp28x", Created new session, sessionId: ydb://session/3?node_id=10&id=MTQxYzAyZTYtY2M1Njk5MmUtN2UwYzRkNzAtMjQxMGU3NDU=, workerId: [10:7488005326997639200:2333], database: , longSession: 1, local sessions count: 1 2025-03-31T16:04:59.254506Z node 10 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2025-03-31T16:04:59.254692Z node 10 :KQP_PROXY DEBUG: Received create session request, trace_id: 01jqpf5rk4ahed7c139eaqp28x 2025-03-31T16:04:59.255841Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=MTQxYzAyZTYtY2M1Njk5MmUtN2UwYzRkNzAtMjQxMGU3NDU=, ActorId: [10:7488005326997639200:2333], ActorState: unknown state, session actor bootstrapped 2025-03-31T16:04:59.256531Z node 10 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-03-31T16:04:59.256565Z node 10 :KQP_PROXY DEBUG: Updated table service config. 2025-03-31T16:04:59.256601Z node 10 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-31T16:04:59.256664Z node 10 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2025-03-31T16:04:59.256746Z node 10 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:04:59.256802Z node 10 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:04:59.256868Z node 10 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:04:59.262323Z node 10 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:04:59.262414Z node 10 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:04:59.287539Z node 10 :KQP_PROXY DEBUG: Received ping session request, has local session: ydb://session/3?node_id=10&id=MTQxYzAyZTYtY2M1Njk5MmUtN2UwYzRkNzAtMjQxMGU3NDU=, rpc ctrl: [10:7488005326997639216:2334], sameNode: 1, trace_id: 2025-03-31T16:04:59.287604Z node 10 :KQP_PROXY TRACE: Attach local session: [10:7488005326997639200:2333] to rpc: [10:7488005326997639216:2334] on same node 2025-03-31T16:04:59.303023Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=MTQxYzAyZTYtY2M1Njk5MmUtN2UwYzRkNzAtMjQxMGU3NDU=, ActorId: [10:7488005326997639200:2333], ActorState: ReadyState, Session closed due to explicit close event 2025-03-31T16:04:59.303101Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=MTQxYzAyZTYtY2M1Njk5MmUtN2UwYzRkNzAtMjQxMGU3NDU=, ActorId: [10:7488005326997639200:2333], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-31T16:04:59.303134Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=MTQxYzAyZTYtY2M1Njk5MmUtN2UwYzRkNzAtMjQxMGU3NDU=, ActorId: [10:7488005326997639200:2333], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-03-31T16:04:59.303173Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=MTQxYzAyZTYtY2M1Njk5MmUtN2UwYzRkNzAtMjQxMGU3NDU=, ActorId: [10:7488005326997639200:2333], ActorState: unknown state, Cleanup temp tables: 0 2025-03-31T16:04:59.303269Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=MTQxYzAyZTYtY2M1Njk5MmUtN2UwYzRkNzAtMjQxMGU3NDU=, ActorId: [10:7488005326997639200:2333], ActorState: unknown state, Session actor destroyed 2025-03-31T16:04:59.303607Z node 10 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=10&id=MTQxYzAyZTYtY2M1Njk5MmUtN2UwYzRkNzAtMjQxMGU3NDU=, workerId: [10:7488005326997639200:2333], local sessions count: 0 2025-03-31T16:04:59.325806Z node 10 :KQP_PROXY DEBUG: Received ping session request, request_id: 3, sender: [10:7488005326997639219:2336], trace_id: 2025-03-31T16:04:59.325988Z node 10 :KQP_PROXY NOTICE: Session not found: ydb://session/3?node_id=10&id=MTQxYzAyZTYtY2M1Njk5MmUtN2UwYzRkNzAtMjQxMGU3NDU= 2025-03-31T16:04:59.326112Z node 10 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [10:7488005326997639219:2336], selfId: [10:7488005305522801865:2281], source: [10:7488005305522801865:2281] 2025-03-31T16:04:59.422311Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7488005305522801646:2077];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:59.422440Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:05:01.685670Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7488005331867551689:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:05:01.687644Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000de5/r3tmp/tmpuebGJq/pdisk_1.dat 2025-03-31T16:05:02.206276Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:02.272261Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:05:02.272363Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:05:02.276198Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65226, node 13 2025-03-31T16:05:02.604120Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:05:02.604150Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:05:02.604160Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:05:02.604324Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7815 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:05:03.016967Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:05:06.658212Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7488005331867551689:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:05:06.658313Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:05:06.678371Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:05:07.461788Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7488005357637358449:2438], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:07.461894Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:07.461921Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7488005357637358457:2441], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:07.467520Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:05:07.506023Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7488005357637358463:2442], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:05:07.630582Z node 13 :TX_PROXY ERROR: Actor# [13:7488005357637358575:4212] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:05:08.193051Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqpf64834ej2dkpz3tpzhpsh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OTE4MzUzYjktYWRmY2Y2YmUtZjBmNmVkODktNWQ2ODFlZjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::BeginTxRequestError [GOOD] Test command err: 2025-03-31T16:04:36.889343Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005225160030279:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:36.889392Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ded/r3tmp/tmps4VvqX/pdisk_1.dat 2025-03-31T16:04:38.068480Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:38.282866Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:38.282947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:38.292969Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:04:38.385362Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4441, node 1 2025-03-31T16:04:38.464092Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:04:38.501869Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:04:38.809982Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:38.810006Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:38.810014Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:38.810107Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10080 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:39.283865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:39.450129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:04:45.240850Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488005263306659438:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:45.240997Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ded/r3tmp/tmpSCmuZ4/pdisk_1.dat 2025-03-31T16:04:45.639569Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:45.757716Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:45.757803Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:45.766266Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7281, node 4 2025-03-31T16:04:46.206337Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:46.206360Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:46.206367Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:46.206508Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9245 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:46.628897Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:46.649088Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:04:46.821554Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:04:46.968322Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:04:47.145358Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:04:47.256150Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:04:47.461170Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:48.269639Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:48.362936Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:48.439278Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:52.881886Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488005294385918472:2079];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:52.881958Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ded/r3tmp/tmprOG0lQ/pdisk_1.dat 2025-03-31T16:04:53.310199Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:53.356609Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:53.356707Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:53.364884Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28312, node 7 2025-03-31T16:04:53.727975Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:53.728013Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:53.728022Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:53.728192Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15366 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:54.379429Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:54.555908Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:05:00.831040Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488005331231986748:2206];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:05:00.831129Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ded/r3tmp/tmppdXdKq/pdisk_1.dat 2025-03-31T16:05:01.612134Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:01.689606Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:05:01.689720Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:05:01.695439Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15727, node 10 2025-03-31T16:05:02.048079Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:05:02.048115Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:05:02.048127Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:05:02.048405Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11169 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:05:02.795193Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:05:02.910694Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:05:03.076958Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:05:03.464077Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037888 not found 2025-03-31T16:05:08.829543Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7488005361731310626:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:05:08.829680Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ded/r3tmp/tmpbnhVLN/pdisk_1.dat 2025-03-31T16:05:09.278305Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:09.329797Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:05:09.329901Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:05:09.341194Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25245, node 13 2025-03-31T16:05:09.473483Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:05:09.473511Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:05:09.473521Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:05:09.473696Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20871 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:05:09.887392Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:05:12.967949Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7488005378911180887:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:12.968092Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:12.968435Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7488005378911180899:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:12.973514Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:05:12.997876Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7488005378911180901:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:05:13.096113Z node 13 :TX_PROXY ERROR: Actor# [13:7488005383206148268:2679] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:05:13.097627Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=NTY4ZGYxMC0xOTRmMWZiLTIzYzJmZjFjLWMyYjVlOGI2, ActorId: [13:7488005378911180884:2333], ActorState: ExecuteState, TraceId: 01jqpf69m6cesc6m6qj5tgszmg, ReplyQueryCompileError, status NOT_FOUND remove tx with tx_id: 2025-03-31T16:05:13.106205Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=NTY4ZGYxMC0xOTRmMWZiLTIzYzJmZjFjLWMyYjVlOGI2, ActorId: [13:7488005378911180884:2333], ActorState: ExecuteState, TraceId: 01jqpf69rcdej2s1fzzaayzp9g, ReplyQueryCompileError, status NOT_FOUND remove tx with tx_id: 2025-03-31T16:05:13.109355Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=NTY4ZGYxMC0xOTRmMWZiLTIzYzJmZjFjLWMyYjVlOGI2, ActorId: [13:7488005378911180884:2333], ActorState: ExecuteState, TraceId: 01jqpf69rm5p44ya9n9a2zktpp, ReplyQueryCompileError, status NOT_FOUND remove tx with tx_id: >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree [GOOD] |75.8%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp_errors/test-results/unittest/{meta.json ... results_accumulator.log} |75.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |75.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap >> TConsoleTests::TestTenantConfigConsistency [GOOD] >> TConsoleTests::TestSetConfig |75.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_errors/test-results/unittest/{meta.json ... results_accumulator.log} |75.8%| [LD] {RESULT} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |75.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |75.9%| [TA] $(B)/ydb/core/tx/scheme_board/ut_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardVolatile::UpsertBrokenLockArbiterRestart-UseSink [GOOD] >> DataShardVolatile::UpsertDependenciesShardsRestart+UseSink |75.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |75.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |75.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |75.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |75.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans |75.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans |75.9%| [LD] {RESULT} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |75.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans |75.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats >> DataShardReadIterator::ShouldReverseReadMultipleRangesOneByOneWithAcks >> DataShardReadIterator::ShouldReceiveErrorAfterSplit |75.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |75.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |75.9%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardReadIterator::ShouldReadRangeCellVec >> DataShardReadIterator::ShouldRangeReadReverseLeftInclusive >> DataShardReadIterator::ShouldHandleReadAck ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree [GOOD] Test command err: 2025-03-31T16:04:20.069939Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005156725930085:2077];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:20.070007Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00109a/r3tmp/tmpcqEgVo/pdisk_1.dat 2025-03-31T16:04:21.125222Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:21.135537Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:21.163825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:21.163935Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:21.179650Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17026, node 1 2025-03-31T16:04:21.380626Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:21.380657Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:21.380665Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:21.380804Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21438 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:21.734168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:24.658243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005173905800328:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:24.658401Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:24.658870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005173905800340:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:24.665791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:04:24.704583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005173905800342:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:04:24.802715Z node 1 :TX_PROXY ERROR: Actor# [1:7488005173905800418:2704] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:25.071222Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005156725930085:2077];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:25.071310Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Backup "/Root" to "/home/runner/.ya/build/build_root/0l5c/00109a/r3tmp/tmpdmtefR/"Create temporary directory "/Root/~backup_20250331T160425" in databaseProcess "/home/runner/.ya/build/build_root/0l5c/00109a/r3tmp/tmpdmtefR/view"Backup view "/Root/view" to "/home/runner/.ya/build/build_root/0l5c/00109a/r3tmp/tmpdmtefR/view"Write view into "/home/runner/.ya/build/build_root/0l5c/00109a/r3tmp/tmpdmtefR/view/create_view.sql"Write ACL into "/home/runner/.ya/build/build_root/0l5c/00109a/r3tmp/tmpdmtefR/view/permissions.pb"Remove temporary directory "/Root/~backup_20250331T160425" in database2025-03-31T16:04:25.538651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710662:0, at schemeshard: 72057594046644480 Backup completed successfullyRestore "/home/runner/.ya/build/build_root/0l5c/00109a/r3tmp/tmpdmtefR/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/0l5c/00109a/r3tmp/tmpdmtefR/" to "/Root"Process "/home/runner/.ya/build/build_root/0l5c/00109a/r3tmp/tmpdmtefR/view"Restore view "/home/runner/.ya/build/build_root/0l5c/00109a/r3tmp/tmpdmtefR/view" to "/Root/view"Read view from "/home/runner/.ya/build/build_root/0l5c/00109a/r3tmp/tmpdmtefR/view/create_view.sql"Created "/Root/view"Restore ACL "/home/runner/.ya/build/build_root/0l5c/00109a/r3tmp/tmpdmtefR/view" to "/Root/view"Read ACL from "/home/runner/.ya/build/build_root/0l5c/00109a/r3tmp/tmpdmtefR/view/permissions.pb"2025-03-31T16:04:25.760958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 Restore completed successfully 2025-03-31T16:04:27.646134Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488005189087073324:2127];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:27.654587Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00109a/r3tmp/tmpKZcbxE/pdisk_1.dat 2025-03-31T16:04:27.939444Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:27.970788Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:27.970902Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:27.988885Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26476, node 4 2025-03-31T16:04:28.351430Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:28.351455Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:28.351464Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:28.351603Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7922 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:29.032439Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:32.622580Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7488005189087073324:2127];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:32.622655Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:33.181693Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488005214856878163:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:33.181761Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488005214856878174:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:33.181817Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:33.186109Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:04:33.240019Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488005214856878177:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:04:33.313086Z node 4 :TX_PROXY ERROR: Actor# [4:7488005214856878272:2704] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: ... cal::TEvTabletStatus from node 16, TabletId: 72075186224037889 not found Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/0l5c/00109a/r3tmp/tmpxxJjXa/" to "/Root"Process "/home/runner/.ya/build/build_root/0l5c/00109a/r3tmp/tmpxxJjXa/table"Read scheme from "/home/runner/.ya/build/build_root/0l5c/00109a/r3tmp/tmpxxJjXa/table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/0l5c/00109a/r3tmp/tmpxxJjXa/table" to "/Root/table"2025-03-31T16:05:18.356267Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/0l5c/00109a/r3tmp/tmpxxJjXa/table/data_00.csv"Restore index "byValue" on "/Root/table"2025-03-31T16:05:18.457237Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-03-31T16:05:18.485668Z node 16 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[16:7488005385199595516:2211];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:05:18.485758Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:05:18.517045Z node 16 :CHANGE_EXCHANGE WARN: [AsyncIndexChangeSenderMain][72075186224037892:1][16:7488005406674434284:2424] Failed entry at 'ResolveTargetTable': entry# { Path: TableId: [72057594046644480:11:0] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-03-31T16:05:18.537271Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 Restore ACL "/home/runner/.ya/build/build_root/0l5c/00109a/r3tmp/tmpxxJjXa/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/0l5c/00109a/r3tmp/tmpxxJjXa/table/permissions.pb"2025-03-31T16:05:18.782802Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715669:0, at schemeshard: 72057594046644480 Restore completed successfully 2025-03-31T16:05:20.509254Z node 19 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[19:7488005413665822863:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:05:20.509358Z node 19 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00109a/r3tmp/tmp9SkXbo/pdisk_1.dat 2025-03-31T16:05:20.697786Z node 19 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:20.753074Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:05:20.753192Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:05:20.756162Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1283, node 19 2025-03-31T16:05:20.802169Z node 19 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:05:20.802199Z node 19 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:05:20.802213Z node 19 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:05:20.802382Z node 19 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30130 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:05:21.035114Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:05:24.641467Z node 19 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [19:7488005430845693130:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:24.641592Z node 19 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:24.668343Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Backup "/Root" to "/home/runner/.ya/build/build_root/0l5c/00109a/r3tmp/tmpsJf7Mc/"Create temporary directory "/Root/~backup_20250331T160524" in databaseProcess "/home/runner/.ya/build/build_root/0l5c/00109a/r3tmp/tmpsJf7Mc/table"Copy tables: { src: "/Root/table", dst: "/Root/~backup_20250331T160524/table" }Backup table "/Root/~backup_20250331T160524/table" to "/home/runner/.ya/build/build_root/0l5c/00109a/r3tmp/tmpsJf7Mc/table"Describe table "/Root/~backup_20250331T160524/table"Write scheme into "/home/runner/.ya/build/build_root/0l5c/00109a/r3tmp/tmpsJf7Mc/table/scheme.pb"Describe table "/Root/table"Write ACL into "/home/runner/.ya/build/build_root/0l5c/00109a/r3tmp/tmpsJf7Mc/table/permissions.pb"Read table "/Root/~backup_20250331T160524/table"Write data into "/home/runner/.ya/build/build_root/0l5c/00109a/r3tmp/tmpsJf7Mc/table/data_00.csv"Drop table "/Root/~backup_20250331T160524/table"Remove temporary directory "/Root/~backup_20250331T160524" in database2025-03-31T16:05:25.222444Z node 19 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 19, TabletId: 72075186224037891 not found 2025-03-31T16:05:25.222507Z node 19 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 19, TabletId: 72075186224037892 not found 2025-03-31T16:05:25.224884Z node 19 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 19, TabletId: 72075186224037893 not found 2025-03-31T16:05:25.235950Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710664:0, at schemeshard: 72057594046644480 Backup completed successfully2025-03-31T16:05:25.254085Z node 19 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [19:7488005435140661514:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:25.254199Z node 19 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Restore "/home/runner/.ya/build/build_root/0l5c/00109a/r3tmp/tmpsJf7Mc/" to "/Root"2025-03-31T16:05:25.314220Z node 19 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 19, TabletId: 72075186224037889 not found 2025-03-31T16:05:25.314267Z node 19 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 19, TabletId: 72075186224037890 not found 2025-03-31T16:05:25.314303Z node 19 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 19, TabletId: 72075186224037888 not found Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/0l5c/00109a/r3tmp/tmpsJf7Mc/" to "/Root"Process "/home/runner/.ya/build/build_root/0l5c/00109a/r3tmp/tmpsJf7Mc/table"Read scheme from "/home/runner/.ya/build/build_root/0l5c/00109a/r3tmp/tmpsJf7Mc/table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/0l5c/00109a/r3tmp/tmpsJf7Mc/table" to "/Root/table"2025-03-31T16:05:25.345414Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/0l5c/00109a/r3tmp/tmpsJf7Mc/table/data_00.csv"Restore index "byValue" on "/Root/table"2025-03-31T16:05:25.452644Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-03-31T16:05:25.509306Z node 19 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[19:7488005413665822863:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:05:25.509402Z node 19 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:05:25.596203Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-03-31T16:05:25.652391Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715760:0, at schemeshard: 72057594046644480 2025-03-31T16:05:25.837543Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715761:0, at schemeshard: 72057594046644480 2025-03-31T16:05:25.903734Z node 19 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 19, TabletId: 72075186224037898 not found 2025-03-31T16:05:25.903794Z node 19 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 19, TabletId: 72075186224037897 not found Restore ACL "/home/runner/.ya/build/build_root/0l5c/00109a/r3tmp/tmpsJf7Mc/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/0l5c/00109a/r3tmp/tmpsJf7Mc/table/permissions.pb"2025-03-31T16:05:26.221631Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710669:0, at schemeshard: 72057594046644480 Restore completed successfully >> DataShardReadIterator::ShouldReverseReadMultipleKeys >> DataShardReadIterator::ShouldReadKeyCellVec >> DataShardReadIteratorBatchMode::SelectingColumns >> DataShardReadIterator::ShouldReadRangeInclusiveEndsCellVec >> TConsoleConfigSubscriptionTests::TestNotificationForTimeoutedNotificationResponse [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedServer >> TSyncNeighborsTests::SerDes2 [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsAndTags [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsOnly |76.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncNeighborsTests::SerDes2 [GOOD] |76.0%| [TA] $(B)/ydb/core/blobstorage/vdisk/syncer/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardReadIteratorSysTables::ShouldRead >> Viewer::FloatPointJsonQuery [GOOD] >> Viewer::AuthorizeYdbTokenWithDatabaseAttributes >> GroupWriteTest::ByTableName [GOOD] >> TConsoleTests::TestSetConfig [GOOD] >> TConsoleTests::TestTenantGeneration >> YdbOlapStore::LogLast50ByResource [GOOD] >> YdbOlapStore::LogGrepNonExisting >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunningExtSubdomain [GOOD] >> TConsoleTests::TestAlterUnknownTenant ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::ByTableName [GOOD] Test command err: RandomSeed# 1734180442788409924 2025-03-31T16:04:59.230364Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 72058428954028033 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-03-31T16:04:59.273795Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-03-31T16:04:59.273880Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 1 going to send TEvBlock {TabletId# 72058428954028033 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-03-31T16:04:59.277283Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-03-31T16:04:59.297886Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 2 going to send TEvCollectGarbage {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-03-31T16:04:59.301450Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-03-31T16:05:13.801360Z 3 00h01m11.710512s :BS_LOGCUTTER ERROR: VDISK[82000000:_:0:2:0]: KEEPER: RetryCutLogEvent: limit exceeded; FreeUpToLsn# 1101 2025-03-31T16:05:32.519839Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-03-31T16:05:32.519970Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-03-31T16:05:32.520033Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-03-31T16:05:32.520078Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 33 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-03-31T16:05:32.588750Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Status# OK} 2025-03-31T16:05:32.588880Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 33 Channel# 0 Status# OK} >> YdbIndexTable::MultiShardTableOneIndexDataColumn [GOOD] >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap >> DataShardReadIterator::ShouldReverseReadMultipleRangesOneByOneWithAcks [GOOD] >> DataShardReadIterator::ShouldStopWhenNodeDisconnected >> DataShardReadIterator::ShouldReadRangeCellVec [GOOD] >> DataShardReadIterator::ShouldReadRangeArrow >> DataShardReadIterator::ShouldRangeReadReverseLeftInclusive [GOOD] >> DataShardReadIterator::ShouldNotReadAfterCancel >> DataShardReadIterator::ShouldReceiveErrorAfterSplit [GOOD] >> DataShardReadIterator::ShouldReceiveErrorAfterSplitWhenExhausted >> DataShardReadIterator::ShouldReadRangeInclusiveEndsCellVec [GOOD] >> DataShardReadIterator::ShouldReadRangeInclusiveEndsArrow >> test_sql_streaming.py::test[suites-GroupByHopWithDataWatermarks-default.txt] >> YdbIndexTable::MultiShardTableOneIndexPkOverlap [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_sqs_action_counters >> DataShardReadIterator::ShouldReverseReadMultipleKeys [GOOD] >> DataShardReadIterator::ShouldReverseReadMultipleKeysOneByOne >> DataShardReadIterator::ShouldHandleReadAck [GOOD] >> DataShardReadIterator::ShouldHandleOutOfOrderReadAck >> AsyncIndexChangeExchange::ShouldDeliverChangesOnSplitMerge [GOOD] >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowByCount >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v1] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-fifo] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete >> TVersions::Wreck0 [GOOD] >> TVersions::Wreck0Reverse >> DataShardReadIteratorBatchMode::SelectingColumns [GOOD] >> DataShardReadIteratorBatchMode::ShouldHandleReadAck >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-fifo] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-fifo] >> DataShardReadIterator::ShouldReadKeyCellVec [GOOD] >> DataShardReadIterator::ShouldReadKeyArrow >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-fifo] >> TPersQueueTest::ReadFromSeveralPartitionsMigrated >> TConsoleTests::TestAlterUnknownTenant [GOOD] >> TConsoleTests::TestAlterUnknownTenantExtSubdomain >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-fifo] >> TConsoleTests::TestTenantGeneration [GOOD] >> TConsoleTests::TestTenantGenerationExtSubdomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneIndexPkOverlap [GOOD] Test command err: Trying to start YDB, gRPC: 32251, MsgBus: 22748 2025-03-31T16:03:33.813776Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004954108454122:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:33.814655Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000e10/r3tmp/tmpJnT6pm/pdisk_1.dat 2025-03-31T16:03:34.452628Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:34.484687Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:34.484786Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:34.487952Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32251, node 1 2025-03-31T16:03:34.595767Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:34.595820Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:34.595833Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:34.595987Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22748 TClient is connected to server localhost:22748 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:35.235350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:35.254379Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:03:35.269282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:35.470006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:35.657417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:35.743831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:37.545038Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004971288325065:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:37.545212Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:37.855599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:37.884039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:37.909306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:37.937687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:37.972801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:38.025184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:38.068485Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004975583292871:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:38.068603Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:38.068815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004975583292876:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:38.072344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:38.100735Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004975583292878:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:03:38.179634Z node 1 :TX_PROXY ERROR: Actor# [1:7488004975583292932:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:03:38.813944Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004954108454122:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:38.814021Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:39.132374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:03:40.108398Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jqpf3exdenp7sz0ftjssntyx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmMwNTBhODItYjllZjg2MWEtNmQ3OTBlMTktNjdmZTEzMTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:40.111967Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jqpf3exd7f5chndcw911x2kg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTc0ZjdmZGMtYzhhMmExNGEtN2JjNTFhZWItOTZlY2UzNGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:40.112843Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jqpf3exd9eyp027txh6mwbp1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTQwOWE4YTgtMzU0ZjllNTUtYjQ3N2M1ODYtNDgyZTk2ODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:40.114138Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jqpf3exc9d92tekeps6tt89w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGM2NDk1YzgtMTZmYTJlYTUtMjA2MjRjZmEtYjMwNDk4OWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:40.116472Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jqpf3exd8v1cktz1vbhq788r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTcyYzM4NDgtYzJlYmZlMzgtMWY3Yjc1NDUtODA0MjM1NmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:40.165521Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jqpf3exn3350mgtxgzds3mh9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjM5YmIyNWEtOTg3MTFmN2UtMjI0NWM4ZGQtZjBkYjZkMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:40.168681Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710678. Ctx: { TraceId: 01jqpf3exmeaz9pmgkzta70rb5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjUyNzE1ZGQtN2NkMTViN2YtNDQ4YWYzZmQtZGJiZmRjOTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:40.169558Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710679. Ctx: { TraceId: 01jqpf3exd7f5chndcw911x2kg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTc0ZjdmZGMtYzhhMmExNGEtN2JjNTFhZWItOTZlY2UzNGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:40.185430Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710680. Ctx: { TraceId: 01jqpf3exdc5q1j8437ghg75kv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2ZiNTA1YzctOWU1MWE4MjItZmU1MzU2My0zNDE4ODdhMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:40.197469Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710681. Ctx: { TraceId: 01jqpf3exd9eyp027txh6mwbp1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTQwOWE4YTgtMzU0ZjllNTUtYjQ3N2M1ODYtNDgyZTk2ODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:40.198105Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710682. Ctx: { TraceId: 01jqpf3exd8v1cktz1vbhq788r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTcyYzM4NDgtYzJlYmZlMzgtMWY3Yjc1NDUtODA0MjM1NmU=, CurrentExecutionId: , CustomerSuppl ... sion/3?node_id=2&id=ZmViYjcxMGEtYTRlYWZjODQtN2FjNTQwN2ItYzdiZTgxNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:31.532816Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714496. Ctx: { TraceId: 01jqpf6vqw6axn6mph1sycsj7k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWViNjAwNTYtNzk0Y2Q1ZTItNzNmOGU5MzQtYTU3NWE2YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:31.547756Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714497. Ctx: { TraceId: 01jqpf6vr97kp7yqm4hmcvmfp0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2MxNDViMTEtNTEzNTRlOGItYzhlZWVlZTUtODNmZWYxMzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:31.550443Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714498. Ctx: { TraceId: 01jqpf6vrg0mkfme70p5n6sc04, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzExZDUzNTQtMzMyNjRkNTYtOTBkMWUxYzktMjZlYTFhM2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:31.560953Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714499. Ctx: { TraceId: 01jqpf6vrg0mkfme70p5n6sc04, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzExZDUzNTQtMzMyNjRkNTYtOTBkMWUxYzktMjZlYTFhM2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:31.610108Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714500. Ctx: { TraceId: 01jqpf6vtdfargwdps9v6dap23, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmM4OGE3MTItY2FlMTU3MGEtM2NhOWM4ZDEtZDRiYjExZTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:31.611964Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714502. Ctx: { TraceId: 01jqpf6vr97kp7yqm4hmcvmfp0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2MxNDViMTEtNTEzNTRlOGItYzhlZWVlZTUtODNmZWYxMzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:31.614842Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714501. Ctx: { TraceId: 01jqpf6vtd2qm6ktsshn46fakc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmViYjcxMGEtYTRlYWZjODQtN2FjNTQwN2ItYzdiZTgxNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:31.628827Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714503. Ctx: { TraceId: 01jqpf6vtd1jbp7rpt04kp8b16, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzIzNWEzNTgtMThmZmIwZDAtZWQyZTM0Y2ItN2M0NjNlZmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:31.639361Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714504. Ctx: { TraceId: 01jqpf6vv67zv0z29rbw5b7yq3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWViNjAwNTYtNzk0Y2Q1ZTItNzNmOGU5MzQtYTU3NWE2YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:31.651815Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714505. Ctx: { TraceId: 01jqpf6vtd1jbp7rpt04kp8b16, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzIzNWEzNTgtMThmZmIwZDAtZWQyZTM0Y2ItN2M0NjNlZmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:31.677974Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714506. Ctx: { TraceId: 01jqpf6vwkcpc5c1k7g70ev10c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzExZDUzNTQtMzMyNjRkNTYtOTBkMWUxYzktMjZlYTFhM2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:31.679956Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714507. Ctx: { TraceId: 01jqpf6vwk2wszy09m05px1h3s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmM4OGE3MTItY2FlMTU3MGEtM2NhOWM4ZDEtZDRiYjExZTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:31.680933Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714508. Ctx: { TraceId: 01jqpf6vwmevr3ce2bkwwfya6v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmViYjcxMGEtYTRlYWZjODQtN2FjNTQwN2ItYzdiZTgxNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:31.696084Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714509. Ctx: { TraceId: 01jqpf6vwkcpc5c1k7g70ev10c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzExZDUzNTQtMzMyNjRkNTYtOTBkMWUxYzktMjZlYTFhM2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:31.697197Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714510. Ctx: { TraceId: 01jqpf6vwk2wszy09m05px1h3s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmM4OGE3MTItY2FlMTU3MGEtM2NhOWM4ZDEtZDRiYjExZTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:31.698851Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714512. Ctx: { TraceId: 01jqpf6vwmevr3ce2bkwwfya6v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmViYjcxMGEtYTRlYWZjODQtN2FjNTQwN2ItYzdiZTgxNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:31.703324Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714511. Ctx: { TraceId: 01jqpf6vx4fzp0zmzgzk1enh8r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2MxNDViMTEtNTEzNTRlOGItYzhlZWVlZTUtODNmZWYxMzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:31.706098Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714514. Ctx: { TraceId: 01jqpf6vwmevr3ce2bkwwfya6v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmViYjcxMGEtYTRlYWZjODQtN2FjNTQwN2ItYzdiZTgxNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:31.709092Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714513. Ctx: { TraceId: 01jqpf6vwkcpc5c1k7g70ev10c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzExZDUzNTQtMzMyNjRkNTYtOTBkMWUxYzktMjZlYTFhM2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:31.718415Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714515. Ctx: { TraceId: 01jqpf6vx49zqkkyq8bh6fa5wr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWViNjAwNTYtNzk0Y2Q1ZTItNzNmOGU5MzQtYTU3NWE2YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:31.735844Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714516. Ctx: { TraceId: 01jqpf6vx49zqkkyq8bh6fa5wr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWViNjAwNTYtNzk0Y2Q1ZTItNzNmOGU5MzQtYTU3NWE2YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:31.755631Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714517. Ctx: { TraceId: 01jqpf6vz3exg72k5r4rv1x8rr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzIzNWEzNTgtMThmZmIwZDAtZWQyZTM0Y2ItN2M0NjNlZmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:31.766163Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714518. Ctx: { TraceId: 01jqpf6vz3exg72k5r4rv1x8rr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzIzNWEzNTgtMThmZmIwZDAtZWQyZTM0Y2ItN2M0NjNlZmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:31.772382Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714519. Ctx: { TraceId: 01jqpf6vzf0xe250njbtsc2s98, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2MxNDViMTEtNTEzNTRlOGItYzhlZWVlZTUtODNmZWYxMzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:31.777925Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714520. Ctx: { TraceId: 01jqpf6vzjc3gkftgfbk18gmh2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmM4OGE3MTItY2FlMTU3MGEtM2NhOWM4ZDEtZDRiYjExZTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:31.783633Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714521. Ctx: { TraceId: 01jqpf6vzv66g4kpb1hj6776kz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmViYjcxMGEtYTRlYWZjODQtN2FjNTQwN2ItYzdiZTgxNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:31.788870Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714522. Ctx: { TraceId: 01jqpf6vzf0xe250njbtsc2s98, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2MxNDViMTEtNTEzNTRlOGItYzhlZWVlZTUtODNmZWYxMzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:31.797058Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714524. Ctx: { TraceId: 01jqpf6vzjc3gkftgfbk18gmh2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmM4OGE3MTItY2FlMTU3MGEtM2NhOWM4ZDEtZDRiYjExZTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-03-31T16:05:31.798962Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714523. Ctx: { TraceId: 01jqpf6vzyb1g84eph3024d7e4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzExZDUzNTQtMzMyNjRkNTYtOTBkMWUxYzktMjZlYTFhM2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:31.809022Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714525. Ctx: { TraceId: 01jqpf6vzyb1g84eph3024d7e4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzExZDUzNTQtMzMyNjRkNTYtOTBkMWUxYzktMjZlYTFhM2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-03-31T16:05:31.820499Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714526. Ctx: { TraceId: 01jqpf6w1479qjb2048q38w9ms, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWM2Njc1NmYtMmU1OTFlNmMtZWIwNzA2NGMtOGFjM2MxOWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:31.836485Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714527. Ctx: { TraceId: 01jqpf6w1479qjb2048q38w9ms, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWM2Njc1NmYtMmU1OTFlNmMtZWIwNzA2NGMtOGFjM2MxOWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-03-31T16:05:31.839163Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714528. Ctx: { TraceId: 01jqpf6w1c25g3e87w8ngpjmx6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzIzNWEzNTgtMThmZmIwZDAtZWQyZTM0Y2ItN2M0NjNlZmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:05:31.850049Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714529. Ctx: { TraceId: 01jqpf6w1c25g3e87w8ngpjmx6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzIzNWEzNTgtMThmZmIwZDAtZWQyZTM0Y2ItN2M0NjNlZmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-03-31T16:05:31.885346Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976714530. Ctx: { TraceId: 01jqpf6w1479qjb2048q38w9ms, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWM2Njc1NmYtMmU1OTFlNmMtZWIwNzA2NGMtOGFjM2MxOWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedServer [GOOD] >> TConsoleConfigSubscriptionTests::TestAddSubscriptionIdempotency >> DataShardReadIteratorSysTables::ShouldRead [GOOD] >> DataShardReadIteratorSysTables::ShouldNotReadUserTableUsingLocalTid >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-fifo] >> DataShardVolatile::UpsertDependenciesShardsRestart+UseSink [GOOD] >> DataShardVolatile::UpsertDependenciesShardsRestart-UseSink >> TConsoleTests::TestAlterUnknownTenantExtSubdomain [GOOD] >> TConsoleTests::TestAlterBorrowedStorage >> DataShardReadIterator::ShouldReadRangeArrow [GOOD] >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestCellVec >> TConsoleConfigSubscriptionTests::TestAddSubscriptionIdempotency [GOOD] >> TConsoleConfigSubscriptionTests::TestConfigNotificationRetries >> DataShardReadIterator::ShouldReceiveErrorAfterSplitWhenExhausted [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKey+EvWrite >> TConsoleTests::TestTenantGenerationExtSubdomain [GOOD] >> TConsoleTests::TestSchemeShardErrorForwarding >> DataShardReadIterator::ShouldNotReadAfterCancel [GOOD] >> DataShardReadIterator::ShouldLimitReadRangeChunk1Limit100 >> DataShardReadIterator::ShouldReadRangeInclusiveEndsArrow [GOOD] >> DataShardReadIterator::ShouldReadRangeReverse >> Viewer::AuthorizeYdbTokenWithDatabaseAttributes [GOOD] >> DataShardReadIterator::ShouldReadKeyArrow [GOOD] >> DataShardReadIterator::ShouldReadKeyOnlyValueColumn >> KqpSystemView::QueryStatsSimple [GOOD] >> DataShardReadIterator::ShouldHandleOutOfOrderReadAck [GOOD] >> DataShardReadIterator::ShouldHandleReadAckWhenExhaustedRangeRead >> DataShardReadIteratorBatchMode::ShouldHandleReadAck [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadWithPlanQueueRace >> DataShardReadIterator::ShouldReverseReadMultipleKeysOneByOne [GOOD] >> DataShardReadIterator::ShouldReverseReadMultipleRanges |76.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |76.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |76.0%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |76.0%| [LD] {RESULT} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut >> YdbIndexTable::MultiShardTableOneUniqIndex [GOOD] >> YdbIndexTable::MultiShardTableOneUniqIndexDataColumn >> DataShardReadIteratorSysTables::ShouldNotReadUserTableUsingLocalTid [GOOD] >> DataShardReadIteratorSysTables::ShouldNotAllowArrow >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowByCount [GOOD] >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowBySize ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::AuthorizeYdbTokenWithDatabaseAttributes [GOOD] Test command err: 2025-03-31T16:05:15.344086Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:726:2426], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:05:15.344266Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:05:15.344332Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:05:15.346331Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:723:2368], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:05:15.346691Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:05:15.346840Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-31T16:05:15.778142Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:16.008246Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-03-31T16:05:16.027757Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-03-31T16:05:16.513411Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 2310, node 1 TClient is connected to server localhost:15449 2025-03-31T16:05:16.875814Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:05:16.875881Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:05:16.875920Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:05:16.876730Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:05:20.310429Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488005416282890165:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:05:20.310535Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-31T16:05:20.512786Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:20.533591Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:05:20.533714Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:05:20.542451Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5207, node 3 2025-03-31T16:05:20.610983Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:05:20.611025Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:05:20.611037Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:05:20.611262Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19934 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:05:20.965184Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:05:20.990243Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:05:20.992780Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:05:23.175850Z node 3 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T16:05:23.175910Z node 3 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T16:05:23.267042Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488005429167792746:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:23.267041Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488005429167792754:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:23.267184Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:23.271648Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-03-31T16:05:23.281141Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488005429167792760:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-31T16:05:23.348874Z node 3 :TX_PROXY ERROR: Actor# [3:7488005429167792813:2356] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:05:23.854080Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YWQzYWEwNWYtYWE1MWIyOTMtY2MxMWNhOTgtYmNkOGJjNTQ=, ActorId: [3:7488005429167792744:2339], ActorState: ExecuteState, TraceId: 01jqpf6kp19sc7f9a9sh8s9ndx, Create QueryResponse for error on request, msg: Scheme operations cannot be executed inside transaction 2025-03-31T16:05:26.137041Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488005440561787981:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:05:26.137100Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-31T16:05:26.219045Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:26.263569Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:05:26.263662Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:05:26.265310Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9503, node 4 2025-03-31T16:05:26.312869Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:05:26.312893Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:05:26.312900Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:05:26.313038Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26378 2025-03-31T16:05:26.712448Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:05:26.714612Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:05:29.231565Z node 4 :GRPC_SERVER DEBUG: Got grpc request# request auth and check internal request, traceId# undef, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# /Root, peer# , grpcInfo# undef, timeout# 9.999909s 2025-03-31T16:05:29.231699Z node 4 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T16:05:29.231725Z node 4 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T16:05:29.994893Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488005453446690550:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:29.995002Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:30.156167Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488005457741657860:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:30.156292Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:30.156712Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488005457741657865:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found ... false data# peer# 2025-03-31T16:05:40.495667Z node 5 :GRPC_SERVER DEBUG: [0x51b0004cf980] received request Name# Coordination/CreateNode ok# false data# peer# 2025-03-31T16:05:40.495721Z node 5 :GRPC_SERVER DEBUG: [0x51b0004cf280] received request Name# Coordination/AlterNode ok# false data# peer# 2025-03-31T16:05:40.495924Z node 5 :GRPC_SERVER DEBUG: [0x51b0004ceb80] received request Name# Coordination/DropNode ok# false data# peer# 2025-03-31T16:05:40.496005Z node 5 :GRPC_SERVER DEBUG: [0x51b0004ce480] received request Name# Coordination/DescribeNode ok# false data# peer# 2025-03-31T16:05:40.496207Z node 5 :GRPC_SERVER DEBUG: [0x51b0004cdd80] received request Name# CreateDatabase ok# false data# peer# 2025-03-31T16:05:40.496315Z node 5 :GRPC_SERVER DEBUG: [0x51b0004ccf80] received request Name# GetDatabaseStatus ok# false data# peer# 2025-03-31T16:05:40.496475Z node 5 :GRPC_SERVER DEBUG: [0x51b0004cd680] received request Name# AlterDatabase ok# false data# peer# 2025-03-31T16:05:40.496577Z node 5 :GRPC_SERVER DEBUG: [0x51b0004cc880] received request Name# ListDatabases ok# false data# peer# 2025-03-31T16:05:40.496739Z node 5 :GRPC_SERVER DEBUG: [0x51b0004cc180] received request Name# RemoveDatabase ok# false data# peer# 2025-03-31T16:05:40.496877Z node 5 :GRPC_SERVER DEBUG: [0x51b0004cba80] received request Name# DescribeDatabaseOptions ok# false data# peer# 2025-03-31T16:05:40.497017Z node 5 :GRPC_SERVER DEBUG: [0x51b0004cb380] received request Name# GetScaleRecommendation ok# false data# peer# 2025-03-31T16:05:40.497132Z node 5 :GRPC_SERVER DEBUG: [0x51b0004a9880] received request Name# ListEndpoints ok# false data# peer# 2025-03-31T16:05:40.497267Z node 5 :GRPC_SERVER DEBUG: [0x51b0004cac80] received request Name# WhoAmI ok# false data# peer# 2025-03-31T16:05:40.497386Z node 5 :GRPC_SERVER DEBUG: [0x51b0004a9f80] received request Name# NodeRegistration ok# false data# peer# 2025-03-31T16:05:40.497519Z node 5 :GRPC_SERVER DEBUG: [0x51b0004a9180] received request Name# Scan ok# false data# peer# 2025-03-31T16:05:40.497627Z node 5 :GRPC_SERVER DEBUG: [0x51b0004a8a80] received request Name# GetShardLocations ok# false data# peer# 2025-03-31T16:05:40.497774Z node 5 :GRPC_SERVER DEBUG: [0x51b0004a8380] received request Name# DescribeTable ok# false data# peer# 2025-03-31T16:05:40.497915Z node 5 :GRPC_SERVER DEBUG: [0x51b0004a7c80] received request Name# CreateSnapshot ok# false data# peer# 2025-03-31T16:05:40.498050Z node 5 :GRPC_SERVER DEBUG: [0x51b0004a7580] received request Name# RefreshSnapshot ok# false data# peer# 2025-03-31T16:05:40.498183Z node 5 :GRPC_SERVER DEBUG: [0x51b0004a6e80] received request Name# DiscardSnapshot ok# false data# peer# 2025-03-31T16:05:40.498312Z node 5 :GRPC_SERVER DEBUG: [0x51b0004a6080] received request Name# List ok# false data# peer# 2025-03-31T16:05:40.498441Z node 5 :GRPC_SERVER DEBUG: [0x51b0004a5980] received request Name# RateLimiter/CreateResource ok# false data# peer# 2025-03-31T16:05:40.498576Z node 5 :GRPC_SERVER DEBUG: [0x51b0004a5280] received request Name# RateLimiter/AlterResource ok# false data# peer# 2025-03-31T16:05:40.498711Z node 5 :GRPC_SERVER DEBUG: [0x51b0004a4b80] received request Name# RateLimiter/DropResource ok# false data# peer# 2025-03-31T16:05:40.498839Z node 5 :GRPC_SERVER DEBUG: [0x51b0004a4480] received request Name# RateLimiter/ListResources ok# false data# peer# 2025-03-31T16:05:40.507874Z node 5 :GRPC_SERVER DEBUG: [0x51b0004a3d80] received request Name# RateLimiter/DescribeResource ok# false data# peer# 2025-03-31T16:05:40.508326Z node 5 :GRPC_SERVER DEBUG: [0x51b0004a3680] received request Name# RateLimiter/AcquireResource ok# false data# peer# 2025-03-31T16:05:40.508617Z node 5 :GRPC_SERVER DEBUG: [0x51b0004a6780] received request Name# CreateStream ok# false data# peer# 2025-03-31T16:05:40.508890Z node 5 :GRPC_SERVER DEBUG: [0x51b0004a2880] received request Name# ListStreams ok# false data# peer# 2025-03-31T16:05:40.509173Z node 5 :GRPC_SERVER DEBUG: [0x51b0004a2180] received request Name# DeleteStream ok# false data# peer# 2025-03-31T16:05:40.509422Z node 5 :GRPC_SERVER DEBUG: [0x51b0004a2f80] received request Name# DescribeStream ok# false data# peer# 2025-03-31T16:05:40.509671Z node 5 :GRPC_SERVER DEBUG: [0x51b0004a1a80] received request Name# ListShards ok# false data# peer# 2025-03-31T16:05:40.509934Z node 5 :GRPC_SERVER DEBUG: [0x51b000494880] received request Name# SetWriteQuota ok# false data# peer# 2025-03-31T16:05:40.510193Z node 5 :GRPC_SERVER DEBUG: [0x51b000496480] received request Name# UpdateStream ok# false data# peer# 2025-03-31T16:05:40.510463Z node 5 :GRPC_SERVER DEBUG: [0x51b0004a1380] received request Name# PutRecord ok# false data# peer# 2025-03-31T16:05:40.510709Z node 5 :GRPC_SERVER DEBUG: [0x51b0004a0c80] received request Name# PutRecords ok# false data# peer# 2025-03-31T16:05:40.511253Z node 5 :GRPC_SERVER DEBUG: [0x51b00049fe80] received request Name# GetShardIterator ok# false data# peer# 2025-03-31T16:05:40.511581Z node 5 :GRPC_SERVER DEBUG: [0x51b00049f780] received request Name# SubscribeToShard ok# false data# peer# 2025-03-31T16:05:40.511712Z node 5 :GRPC_SERVER DEBUG: [0x51b0004a0580] received request Name# GetRecords ok# false data# peer# 2025-03-31T16:05:40.511869Z node 5 :GRPC_SERVER DEBUG: [0x51b00049e980] received request Name# DescribeLimits ok# false data# peer# 2025-03-31T16:05:40.512021Z node 5 :GRPC_SERVER DEBUG: [0x51b00049e280] received request Name# DescribeStreamSummary ok# false data# peer# 2025-03-31T16:05:40.512145Z node 5 :GRPC_SERVER DEBUG: [0x51b00049db80] received request Name# DecreaseStreamRetentionPeriod ok# false data# peer# 2025-03-31T16:05:40.512298Z node 5 :GRPC_SERVER DEBUG: [0x51b00049d480] received request Name# IncreaseStreamRetentionPeriod ok# false data# peer# 2025-03-31T16:05:40.512422Z node 5 :GRPC_SERVER DEBUG: [0x51b00049cd80] received request Name# UpdateShardCount ok# false data# peer# 2025-03-31T16:05:40.512541Z node 5 :GRPC_SERVER DEBUG: [0x51b00049c680] received request Name# UpdateStreamMode ok# false data# peer# 2025-03-31T16:05:40.512695Z node 5 :GRPC_SERVER DEBUG: [0x51b00049bf80] received request Name# RegisterStreamConsumer ok# false data# peer# 2025-03-31T16:05:40.512793Z node 5 :GRPC_SERVER DEBUG: [0x51b00049b880] received request Name# DeregisterStreamConsumer ok# false data# peer# 2025-03-31T16:05:40.512963Z node 5 :GRPC_SERVER DEBUG: [0x51b00049b180] received request Name# DescribeStreamConsumer ok# false data# peer# 2025-03-31T16:05:40.513041Z node 5 :GRPC_SERVER DEBUG: [0x51b00049aa80] received request Name# ListStreamConsumers ok# false data# peer# 2025-03-31T16:05:40.513228Z node 5 :GRPC_SERVER DEBUG: [0x51b00049a380] received request Name# AddTagsToStream ok# false data# peer# 2025-03-31T16:05:40.513291Z node 5 :GRPC_SERVER DEBUG: [0x51b000499c80] received request Name# DisableEnhancedMonitoring ok# false data# peer# 2025-03-31T16:05:40.513483Z node 5 :GRPC_SERVER DEBUG: [0x51b000499580] received request Name# EnableEnhancedMonitoring ok# false data# peer# 2025-03-31T16:05:40.513535Z node 5 :GRPC_SERVER DEBUG: [0x51b000498e80] received request Name# ListTagsForStream ok# false data# peer# 2025-03-31T16:05:40.513731Z node 5 :GRPC_SERVER DEBUG: [0x51b000498780] received request Name# MergeShards ok# false data# peer# 2025-03-31T16:05:40.513779Z node 5 :GRPC_SERVER DEBUG: [0x51b000498080] received request Name# RemoveTagsFromStream ok# false data# peer# 2025-03-31T16:05:40.514028Z node 5 :GRPC_SERVER DEBUG: [0x51b000497280] received request Name# StartStreamEncryption ok# false data# peer# 2025-03-31T16:05:40.514032Z node 5 :GRPC_SERVER DEBUG: [0x51b000497980] received request Name# SplitShard ok# false data# peer# 2025-03-31T16:05:40.514255Z node 5 :GRPC_SERVER DEBUG: [0x51b000496b80] received request Name# StopStreamEncryption ok# false data# peer# 2025-03-31T16:05:40.514273Z node 5 :GRPC_SERVER DEBUG: [0x51b000495d80] received request Name# SelfCheck ok# false data# peer# 2025-03-31T16:05:40.514507Z node 5 :GRPC_SERVER DEBUG: [0x51b000495680] received request Name# NodeCheck ok# false data# peer# 2025-03-31T16:05:40.514524Z node 5 :GRPC_SERVER DEBUG: [0x51b000492c80] received request Name# CreateSession ok# false data# peer# 2025-03-31T16:05:40.514755Z node 5 :GRPC_SERVER DEBUG: [0x51b000492580] received request Name# DeleteSession ok# false data# peer# 2025-03-31T16:05:40.514769Z node 5 :GRPC_SERVER DEBUG: [0x51b000491e80] received request Name# AttachSession ok# false data# peer# 2025-03-31T16:05:40.515004Z node 5 :GRPC_SERVER DEBUG: [0x51b000491080] received request Name# BeginTransaction ok# false data# peer# 2025-03-31T16:05:40.515037Z node 5 :GRPC_SERVER DEBUG: [0x51b000490980] received request Name# CommitTransaction ok# false data# peer# 2025-03-31T16:05:40.520225Z node 5 :GRPC_SERVER DEBUG: [0x51b000490280] received request Name# RollbackTransaction ok# false data# peer# 2025-03-31T16:05:40.520564Z node 5 :GRPC_SERVER DEBUG: [0x51b000494f80] received request Name# ExecuteQuery ok# false data# peer# 2025-03-31T16:05:40.520889Z node 5 :GRPC_SERVER DEBUG: [0x51b000493a80] received request Name# ExecuteScript ok# false data# peer# 2025-03-31T16:05:40.521170Z node 5 :GRPC_SERVER DEBUG: [0x51b000493380] received request Name# FetchScriptResults ok# false data# peer# 2025-03-31T16:05:40.521443Z node 5 :GRPC_SERVER DEBUG: [0x51b00048fb80] received request Name# ExecuteTabletMiniKQL ok# false data# peer# 2025-03-31T16:05:40.521710Z node 5 :GRPC_SERVER DEBUG: [0x51b00048f480] received request Name# ChangeTabletSchema ok# false data# peer# 2025-03-31T16:05:40.526142Z node 5 :GRPC_SERVER DEBUG: [0x51b00048ed80] received request Name# RestartTablet ok# false data# peer# 2025-03-31T16:05:40.526477Z node 5 :GRPC_SERVER DEBUG: [0x51b0004c5880] received request Name# CreateLogStore ok# false data# peer# 2025-03-31T16:05:40.526798Z node 5 :GRPC_SERVER DEBUG: [0x51b0004c5180] received request Name# DescribeLogStore ok# false data# peer# 2025-03-31T16:05:40.527072Z node 5 :GRPC_SERVER DEBUG: [0x51b0004c4380] received request Name# DropLogStore ok# false data# peer# 2025-03-31T16:05:40.527380Z node 5 :GRPC_SERVER DEBUG: [0x51b0004c3c80] received request Name# AlterLogStore ok# false data# peer# 2025-03-31T16:05:40.527647Z node 5 :GRPC_SERVER DEBUG: [0x51b0004c2e80] received request Name# CreateLogTable ok# false data# peer# 2025-03-31T16:05:40.527915Z node 5 :GRPC_SERVER DEBUG: [0x51b0004c2780] received request Name# DescribeLogTable ok# false data# peer# 2025-03-31T16:05:40.528197Z node 5 :GRPC_SERVER DEBUG: [0x51b0004c1980] received request Name# DropLogTable ok# false data# peer# 2025-03-31T16:05:40.528462Z node 5 :GRPC_SERVER DEBUG: [0x51b0004c1280] received request Name# AlterLogTable ok# false data# peer# 2025-03-31T16:05:40.528699Z node 5 :GRPC_SERVER DEBUG: [0x51b0004c0b80] received request Name# Login ok# false data# peer# 2025-03-31T16:05:40.528948Z node 5 :GRPC_SERVER DEBUG: [0x51b0004bfd80] received request Name# DescribeReplication ok# false data# peer# 2025-03-31T16:05:40.529218Z node 5 :GRPC_SERVER DEBUG: [0x51b0004be180] received request Name# DescribeView ok# false data# peer# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::QueryStatsSimple [GOOD] Test command err: Trying to start YDB, gRPC: 10307, MsgBus: 21488 2025-03-31T16:04:21.343840Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005162244756086:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:21.343898Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:04:21.561448Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488005163362742673:2274];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:21.604279Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:04:21.939254Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488005161452985400:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:21.939795Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00184d/r3tmp/tmpwWMZUg/pdisk_1.dat 2025-03-31T16:04:22.491290Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:22.619858Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:22.823962Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:22.873157Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:22.873249Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:22.880089Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:22.880161Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:22.880355Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:22.880405Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:22.896034Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:04:22.896080Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-03-31T16:04:22.896735Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:04:22.911071Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:04:22.911365Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10307, node 1 2025-03-31T16:04:23.393657Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:23.393683Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:23.393689Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:23.393848Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21488 TClient is connected to server localhost:21488 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:25.080557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:25.201638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:25.834969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:26.338878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:04:26.344416Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005162244756086:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:26.344475Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; waiting... 2025-03-31T16:04:26.519890Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488005163362742673:2274];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:26.519945Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:26.525247Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488005161452985400:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:26.525295Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:04:26.588399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:28.808089Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005192309529182:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:28.808236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:29.213843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:04:29.314260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:04:29.433112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:04:29.570720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:04:29.656029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:04:29.727550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:04:29.838533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005196604497177:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:29.838620Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:29.839046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005196604497182:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:04:29.844932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:04:29.891317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005196604497184:2412], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:04:29.991025Z node 1 :TX_PROXY ERROR: Actor# [1:7488005196604497259:4119] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:04:32.146562Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437072115, txId: 281474976710673] shutting down 2025-03-31T16:04:32.443565Z node 2 :BS_PROXY_PUT ERROR: [368d4c65eebefa1e] Result# TEvPutResult {Id# [72075186224037891:1:22:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037891:1:22:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-03-31T16:04:32.475343Z node 3 :BS_PROXY_PUT ERROR: [f74e18160bc3cb12] Result# TEvPutResult {Id# [72075186224037889:1:22:0:0:41:0] Status# E ... us: 65330 2025-03-31T16:05:28.772994Z node 16 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[16:7488005449437393523:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:05:28.773076Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:05:28.789652Z node 17 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[17:7488005447903348004:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:05:28.789770Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:05:28.795661Z node 18 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[18:7488005450564905118:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:05:28.796941Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00184d/r3tmp/tmpYP2pn0/pdisk_1.dat 2025-03-31T16:05:29.110099Z node 16 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:29.152121Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:05:29.152244Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:05:29.154284Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:05:29.154392Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:05:29.154558Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:05:29.154621Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:05:29.156564Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:05:29.156917Z node 16 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 17 Cookie 17 2025-03-31T16:05:29.156961Z node 16 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 18 Cookie 18 2025-03-31T16:05:29.157542Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:05:29.157770Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5860, node 16 2025-03-31T16:05:29.222681Z node 16 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:05:29.222704Z node 16 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:05:29.222714Z node 16 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:05:29.222865Z node 16 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65330 TClient is connected to server localhost:65330 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:05:29.900062Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:05:29.937168Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:05:30.047359Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:05:30.221558Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:05:30.332805Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:05:33.775370Z node 16 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[16:7488005449437393523:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:05:33.775463Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:05:33.790913Z node 17 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[17:7488005447903348004:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:05:33.791008Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:05:33.803255Z node 18 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[18:7488005450564905118:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:05:33.803343Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:05:34.858124Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7488005475207199366:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:34.858263Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:34.982602Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:05:35.113546Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:05:35.289426Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:05:35.493466Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:05:35.648733Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:05:35.876261Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:05:36.065772Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7488005483797134671:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:36.065909Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:36.066235Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7488005483797134676:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:36.077278Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:05:36.171501Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [16:7488005483797134678:2409], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:05:36.236632Z node 16 :TX_PROXY ERROR: Actor# [16:7488005483797134758:4163] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:05:39.680002Z node 16 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437139655, txId: 281474976710673] shutting down 2025-03-31T16:05:39.841718Z node 18 :BS_PROXY_PUT ERROR: [c6ccaab3c57e6ea8] Result# TEvPutResult {Id# [72075186224037891:1:22:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037891:1:22:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 16 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-03-31T16:05:39.843636Z node 17 :BS_PROXY_PUT ERROR: [46890ceb3c7483a4] Result# TEvPutResult {Id# [72075186224037889:1:22:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037889:1:22:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 16 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 >> TConsoleTests::TestSchemeShardErrorForwarding [GOOD] >> TConsoleTests::TestScaleRecommenderPolicies >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v0] >> TConsoleTests::TestAlterBorrowedStorage [GOOD] >> TConsoleTests::TestAlterStorageUnitsOfSharedTenant |76.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |76.0%| [LD] {RESULT} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |76.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load >> DataShardReadIterator::ShouldHandleReadAckWhenExhaustedRangeRead [GOOD] >> DataShardReadIterator::ShouldHandleReadAckWhenExhaustedRangeReadReverse >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKey+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKey-EvWrite |76.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |76.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |76.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence >> DataShardReadIterator::ShouldReadRangeReverse [GOOD] >> DataShardReadIterator::ShouldReadRangeInclusiveEndsMissingLeftRight |76.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |76.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |76.1%| [LD] {RESULT} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |76.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |76.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |76.1%| [LD] {RESULT} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut >> DataShardReadIterator::ShouldLimitReadRangeChunk1Limit100 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit98 >> DataShardReadIteratorConsistency::LocalSnapshotReadWithPlanQueueRace [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadHasRequiredDependencies |76.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_data_cleanup/ydb-core-tx-datashard-ut_data_cleanup |76.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_data_cleanup/ydb-core-tx-datashard-ut_data_cleanup |76.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_data_cleanup/ydb-core-tx-datashard-ut_data_cleanup >> DataShardReadIterator::ShouldReverseReadMultipleRanges [GOOD] >> DataShardReadIterator::ShouldReturnMvccSnapshotFromFuture >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestCellVec [GOOD] >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestArrow >> DataShardReadIterator::ShouldReadKeyOnlyValueColumn [GOOD] >> DataShardReadIterator::ShouldReadKeyValueColumnAndSomeKeyColumn >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v0] |76.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |76.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |76.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-fifo] >> DataShardReadIteratorSysTables::ShouldNotAllowArrow [GOOD] >> ReadIteratorExternalBlobs::ExtBlobs >> DataShardReadIterator::ShouldStopWhenNodeDisconnected [GOOD] >> DataShardReadIterator::TryCommitLocksPrepared-Volatile-BreakLocks >> TConsoleTests::TestScaleRecommenderPolicies [GOOD] >> TConsoleTests::TestScaleRecommenderPoliciesValidation |76.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |76.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |76.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |76.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |76.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |76.8%| [LD] {RESULT} $(B)/ydb/core/graph/ut/ydb-core-graph-ut >> TConsoleTests::TestAlterStorageUnitsOfSharedTenant [GOOD] >> TConsoleTests::TestAlterServerlessTenant |76.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |77.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |77.0%| [LD] {RESULT} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests >> DataShardReadIterator::ShouldHandleReadAckWhenExhaustedRangeReadReverse [GOOD] >> DataShardReadIterator::ShouldForbidDuplicatedReadId >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowBySize [GOOD] >> AsyncIndexChangeExchange::ShouldNotReorderChangesOnRace >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit98 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit99 >> TConsoleTests::TestScaleRecommenderPoliciesValidation [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorSingle >> DataShardVolatile::UpsertDependenciesShardsRestart-UseSink [GOOD] >> DataShardVolatile::NotCachingAbortingDeletes+UseSink >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKey-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange+EvWrite >> DataShardReadIterator::ShouldReadRangeInclusiveEndsMissingLeftRight [GOOD] >> DataShardReadIterator::ShouldReadRangeNonInclusiveEnds |78.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/sql/ydb-tests-sql |78.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/sql/ydb-tests-sql |78.7%| [LD] {RESULT} $(B)/ydb/tests/sql/ydb-tests-sql >> DataShardReadIteratorConsistency::LocalSnapshotReadHasRequiredDependencies [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadNoUnnecessaryDependencies |78.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |78.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |78.7%| [LD] {RESULT} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts >> DataShardReadIterator::TryCommitLocksPrepared-Volatile-BreakLocks [GOOD] >> DataShardReadIterator::TryCommitLocksPrepared+Volatile-BreakLocks >> DataShardReadIterator::ShouldReturnMvccSnapshotFromFuture [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions+EvWrite >> TConsoleTxProcessorTests::TestTxProcessorSingle [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorSubProcessor >> TPersQueueTest::ReadFromSeveralPartitionsMigrated [GOOD] >> TPersQueueTest::SchemeshardRestart >> DataShardReadIterator::ShouldReadKeyValueColumnAndSomeKeyColumn [GOOD] >> DataShardReadIterator::ShouldRangeReadReverseLeftNonInclusive >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestArrow [GOOD] >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestCellVec |78.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |78.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |78.8%| [LD] {RESULT} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut >> LabeledDbCounters::OneTabletRestart [GOOD] >> LabeledDbCounters::TwoTablets >> TConsoleTests::TestAlterServerlessTenant [GOOD] >> TConsoleTests::TestAttributes >> DataShardReadIterator::ShouldForbidDuplicatedReadId [GOOD] >> DataShardReadIterator::ShouldFailUknownColumns >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit99 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit100 >> TConsoleTxProcessorTests::TestTxProcessorSubProcessor [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorTemporary ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::ComplexRestoreBackupCollection-WithIncremental [GOOD] Test command err: 2025-03-31T16:02:33.232327Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:02:33.232578Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:02:33.232756Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001822/r3tmp/tmpq2zFeT/pdisk_1.dat 2025-03-31T16:02:33.653262Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:596:2520], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:02:33.653349Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:02:33.653386Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-31T16:02:33.653541Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:593:2518], Recipient [1:409:2404]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2025-03-31T16:02:33.653575Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-31T16:02:33.809710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-03-31T16:02:33.809925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:02:33.810167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-31T16:02:33.810435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:02:33.810503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:02:33.810601Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-31T16:02:33.811928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-31T16:02:33.812081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-31T16:02:33.812184Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T16:02:33.812223Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-03-31T16:02:33.812431Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-31T16:02:33.812471Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-31T16:02:33.812540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:02:33.812616Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-31T16:02:33.812653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:02:33.812692Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:02:33.812821Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-31T16:02:33.813290Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T16:02:33.813333Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-03-31T16:02:33.813467Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-31T16:02:33.813506Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-31T16:02:33.813575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:02:33.813616Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-03-31T16:02:33.813653Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:02:33.813735Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-31T16:02:33.814129Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T16:02:33.814170Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-03-31T16:02:33.814274Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-31T16:02:33.814303Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-31T16:02:33.814352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:02:33.814387Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:02:33.814432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-03-31T16:02:33.814461Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-31T16:02:33.814496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:02:33.818502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:02:33.819187Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T16:02:33.819234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:02:33.819426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-03-31T16:02:33.820780Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:601:2525], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046316545 Status: OK ServerId: [1:603:2526] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-03-31T16:02:33.820832Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-03-31T16:02:33.820898Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046316545, status: OK, at schemeshard: 72057594046644480 2025-03-31T16:02:33.821038Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 500 TxId: 1 2025-03-31T16:02:33.821547Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:605:2528], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:02:33.821621Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:02:33.821661Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-31T16:02:33.821799Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [1:593:2518], Recipient [1:409:2404]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2025-03-31T16:02:33.821837Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-03-31T16:02:33.821947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-03-31T16:02:33.821989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-03-31T16:02:33.822026Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-03-31T16:02:33.877214Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 273285138, Sender [1:43:2090], Recipient [1:409:2404]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } } ItemKinds: 26 ItemKinds: 34 ItemKinds: 52 ItemKinds: 54 ItemKinds: 73 Local: true } 2025-03-31T16:02:33.877337Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-03-31T16:02:33.877375Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:33.877868Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-03-31T16:02:33.877943Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } 2025-03-31T16:02:33.921476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:33.921632Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:33.933811Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:02:34.011611Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 17 StepId: 500 TxId: 1 2025-03-31T16:02:34.012399Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:636:2544], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:02:34.012442Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:02:34.012471Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-31T16:02:34.012602Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:409:2404]: {TEvPlanStep step# 500 MediatorId# 72057594046382081 TabletID 72057594046644480} 2025-03-31T16:02:34.012638Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:02:34.012726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T16:02:34.012863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemes ... 4046644480 2025-03-31T16:05:20.114252Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715668:3 ProgressState 2025-03-31T16:05:20.114319Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-31T16:05:20.114348Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:3 progress is 5/7 2025-03-31T16:05:20.114374Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 5/7 2025-03-31T16:05:20.114403Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:3 progress is 5/7 2025-03-31T16:05:20.114426Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 5/7 2025-03-31T16:05:20.114452Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715668, ready parts: 5/7, is published: true 2025-03-31T16:05:20.114725Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [3:409:2404], Recipient [3:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-31T16:05:20.114759Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-31T16:05:20.114821Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715668:6, at schemeshard: 72057594046644480 2025-03-31T16:05:20.114859Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715668:6 ProgressState 2025-03-31T16:05:20.114924Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-31T16:05:20.114954Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:6 progress is 6/7 2025-03-31T16:05:20.114978Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 6/7 2025-03-31T16:05:20.115006Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:6 progress is 6/7 2025-03-31T16:05:20.115030Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 6/7 2025-03-31T16:05:20.115055Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715668, ready parts: 6/7, is published: true 2025-03-31T16:05:20.115430Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [3:409:2404], Recipient [3:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-31T16:05:20.115466Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-31T16:05:20.115511Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715668:4, at schemeshard: 72057594046644480 2025-03-31T16:05:20.115543Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715668:4 ProgressState 2025-03-31T16:05:20.115620Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-31T16:05:20.115654Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:4 progress is 7/7 2025-03-31T16:05:20.115680Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 7/7 2025-03-31T16:05:20.115732Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:4 progress is 7/7 2025-03-31T16:05:20.115755Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 7/7 2025-03-31T16:05:20.115780Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715668, ready parts: 7/7, is published: true 2025-03-31T16:05:20.115867Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:1254:2962] message: TxId: 281474976715668 2025-03-31T16:05:20.115932Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 7/7 2025-03-31T16:05:20.116002Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:0 2025-03-31T16:05:20.116072Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:0 2025-03-31T16:05:20.116163Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 17] was 2 2025-03-31T16:05:20.116224Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:1 2025-03-31T16:05:20.116248Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:1 2025-03-31T16:05:20.116283Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 18] was 2 2025-03-31T16:05:20.116306Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:2 2025-03-31T16:05:20.116328Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:2 2025-03-31T16:05:20.116354Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 19] was 3 2025-03-31T16:05:20.116376Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:3 2025-03-31T16:05:20.116411Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:3 2025-03-31T16:05:20.116516Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 20] was 3 2025-03-31T16:05:20.116555Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 3 2025-03-31T16:05:20.116602Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:4 2025-03-31T16:05:20.116624Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:4 2025-03-31T16:05:20.116681Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 21] was 3 2025-03-31T16:05:20.116705Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 12] was 3 2025-03-31T16:05:20.116730Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:5 2025-03-31T16:05:20.116751Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:5 2025-03-31T16:05:20.116802Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 22] was 3 2025-03-31T16:05:20.116825Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 3 2025-03-31T16:05:20.116854Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:6 2025-03-31T16:05:20.116873Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:6 2025-03-31T16:05:20.116924Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 23] was 3 2025-03-31T16:05:20.116948Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 16] was 3 2025-03-31T16:05:20.117589Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T16:05:20.117851Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T16:05:20.117982Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T16:05:20.118085Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T16:05:20.118196Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [3:1254:2962] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715668 at schemeshard: 72057594046644480 2025-03-31T16:05:20.118846Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [3:1261:2968], Recipient [3:409:2404]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-31T16:05:20.118898Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-31T16:05:20.118929Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-03-31T16:05:20.277059Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037893, clientId# [3:1556:3203], serverId# [3:1557:3204], sessionId# [0:0:0] 2025-03-31T16:05:20.277277Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jqpf6gkxd1z38jj13mftmdga, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OTI4OTg5OTctOTQzZWU1MDgtMjVmYjA1MjctYTAxZmIwMzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 2 } items { uint32_value: 20 } }, { items { uint32_value: 3 } items { uint32_value: 30 } }, { items { uint32_value: 4 } items { uint32_value: 40 } }, { items { uint32_value: 5 } items { uint32_value: 50 } } 2025-03-31T16:05:20.444324Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037895, clientId# [3:1585:3220], serverId# [3:1586:3221], sessionId# [0:0:0] 2025-03-31T16:05:20.444568Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jqpf6gs286fhda1d4t0y3yn5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MWUzOGM1MGItNjA5MWUwMS1kZGZmNWI5Ni0yNDVlZjliOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 11 } items { uint32_value: 101 } }, { items { uint32_value: 21 } items { uint32_value: 201 } }, { items { uint32_value: 31 } items { uint32_value: 301 } }, { items { uint32_value: 41 } items { uint32_value: 401 } }, { items { uint32_value: 51 } items { uint32_value: 501 } } 2025-03-31T16:05:20.585101Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037892, clientId# [3:1614:3237], serverId# [3:1615:3238], sessionId# [0:0:0] 2025-03-31T16:05:20.585280Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jqpf6gy83jkhmw854nq2e78z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=M2ZkMWZhYzctZTBiMjc3Y2QtODBkODJmMWYtMjE2MTcwOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 12 } items { uint32_value: 102 } }, { items { uint32_value: 22 } items { uint32_value: 202 } }, { items { uint32_value: 32 } items { uint32_value: 302 } }, { items { uint32_value: 42 } items { uint32_value: 402 } }, { items { uint32_value: 52 } items { uint32_value: 502 } } 2025-03-31T16:05:20.773079Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [3:1643:3254], serverId# [3:1644:3255], sessionId# [0:0:0] 2025-03-31T16:05:20.773302Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jqpf6h3c67mznm3609gzjq25, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=M2U2ODg1MGEtNTAxNTA2NzktOTFiYTEwY2UtZWQ1YTRhNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 13 } items { uint32_value: 103 } }, { items { uint32_value: 23 } items { uint32_value: 203 } }, { items { uint32_value: 33 } items { uint32_value: 303 } }, { items { uint32_value: 43 } items { uint32_value: 403 } }, { items { uint32_value: 53 } items { uint32_value: 503 } } >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_sqs_action_counters [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] [GOOD] >> DataShardReadIterator::ShouldReadRangeNonInclusiveEnds [GOOD] >> DataShardReadIterator::ShouldReadRangeLeftInclusive >> AsyncIndexChangeExchange::ShouldNotReorderChangesOnRace [GOOD] >> Cdc::AreJsonsEqualReturnsTrueOnEqual [GOOD] >> Cdc::AreJsonsEqualReturnsFalseOnDifferent [GOOD] >> Cdc::AreJsonsEqualFailsOnWildcardInArray [GOOD] >> Cdc::AlterViaTopicService >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefix+EvWrite >> DataShardReadIteratorConsistency::LocalSnapshotReadNoUnnecessaryDependencies [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadWithConcurrentWrites >> TConsoleTxProcessorTests::TestTxProcessorTemporary [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorRandom >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions-EvWrite >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestCellVec [GOOD] >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestArrow >> DataShardReadIterator::ShouldFailUknownColumns [GOOD] >> DataShardReadIterator::ShouldFailWrongSchema >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds [GOOD] |78.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |78.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |78.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats >> DataShardReadIterator::ShouldRangeReadReverseLeftNonInclusive [GOOD] >> DataShardReadIterator::ShouldReadKeyPrefix1 >> TConsoleTests::TestAttributes [GOOD] >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunning >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-fifo] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] >> DataShardReadIterator::TryCommitLocksPrepared+Volatile-BreakLocks [GOOD] >> DataShardReadIterator::TryCommitLocksPrepared-Volatile+BreakLocks |78.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |78.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |78.9%| [LD] {RESULT} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit100 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit101 >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsOnly [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags >> Cdc::AlterViaTopicService [GOOD] >> Cdc::Alter >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] >> YdbOlapStore::LogGrepNonExisting [GOOD] >> YdbOlapStore::LogGrepExisting >> DataShardVolatile::NotCachingAbortingDeletes+UseSink [GOOD] >> DataShardVolatile::NotCachingAbortingDeletes-UseSink >> DataShardReadIterator::ShouldReadRangeLeftInclusive [GOOD] >> DataShardReadIterator::ShouldReadRangeRightInclusive |78.9%| [TA] $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefix+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefix-EvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::DisableDeduplication [GOOD] Test command err: === Server->StartServer(false); 2025-03-31T15:59:47.072857Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488003986646095904:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-31T15:59:47.073768Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T15:59:47.633559Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a3a/r3tmp/tmploxJ17/pdisk_1.dat 2025-03-31T15:59:48.102604Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T15:59:48.115644Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T15:59:48.115745Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T15:59:48.123664Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T15:59:48.123829Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 12374, node 1 2025-03-31T15:59:48.399819Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/000a3a/r3tmp/yandexmVdcuv.tmp 2025-03-31T15:59:48.399842Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/000a3a/r3tmp/yandexmVdcuv.tmp 2025-03-31T15:59:48.399984Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/000a3a/r3tmp/yandexmVdcuv.tmp 2025-03-31T15:59:48.400084Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T15:59:48.459427Z INFO: TTestServer started on Port 7730 GrpcPort 12374 TClient is connected to server localhost:7730 PQClient connected to localhost:12374 === TenantModeEnabled() = 0 === Init PQ - start server on port 12374 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T15:59:49.097403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-31T15:59:49.097608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T15:59:49.097793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-31T15:59:49.097992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T15:59:49.098030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T15:59:49.098681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-31T15:59:49.098788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-31T15:59:49.098929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T15:59:49.098960Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-31T15:59:49.098973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-03-31T15:59:49.098984Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-03-31T15:59:49.103449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T15:59:49.103493Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-03-31T15:59:49.103514Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-03-31T15:59:49.104360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T15:59:49.104401Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T15:59:49.104431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-31T15:59:49.104573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-03-31T15:59:49.119029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T15:59:49.119574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:59:49.119590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-03-31T15:59:49.119625Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T15:59:49.119686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-03-31T15:59:49.119823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-03-31T15:59:49.121025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743436789164, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T15:59:49.121154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743436789164 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-31T15:59:49.121178Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-31T15:59:49.121426Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-03-31T15:59:49.121456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-31T15:59:49.121587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-31T15:59:49.121668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-03-31T15:59:49.122400Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-31T15:59:49.122423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-31T15:59:49.122582Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-31T15:59:49.122598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7488003990941063710:2250], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-03-31T15:59:49.122636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T15:59:49.122661Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-03-31T15:59:49.134254Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-31T15:59:49.134274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-31T15:59:49.134294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-31T15:59:49.134318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-31T15:59:49.134335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-03-31T15:59:49.134362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-31T15:59:49.134403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-03-31T15:59:49.134414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2025-03-31T15:59:49.134478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-03-31T15:59:49.134491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-03-31T15:59:49.134502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710657, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2025-03-31T15:59:49.139387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-03-31T15:59:49.139494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644 ... rpc_core::ClientChannel *> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:621:26 #2 0x1a517b60 in grpc_core::ClientChannel::CreateResolverLocked() /-S/contrib/libs/grpc/src/core/ext/filters/client_channel/client_channel.cc:1514:25 #3 0x1a546fcb in TryToConnectLocked /-S/contrib/libs/grpc/src/core/ext/filters/client_channel/client_channel.cc:1764:5 #4 0x1a546fcb in operator() /-S/contrib/libs/grpc/src/core/ext/filters/client_channel/client_channel.cc:1779:52 #5 0x1a546fcb in __invoke<(lambda at /-S/contrib/libs/grpc/src/core/ext/filters/client_channel/client_channel.cc:1778:27) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #6 0x1a546fcb in __call<(lambda at /-S/contrib/libs/grpc/src/core/ext/filters/client_channel/client_channel.cc:1778:27) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #7 0x1a546fcb in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #8 0x1a546fcb in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #9 0x1a41d2bd in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #10 0x1a41d2bd in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #11 0x1a41d2bd in DrainQueueOwned /-S/contrib/libs/grpc/src/core/lib/gprpp/work_serializer.cc:221:5 #12 0x1a41d2bd in grpc_core::WorkSerializer::WorkSerializerImpl::Run(std::__y1::function, grpc_core::DebugLocation const&) /-S/contrib/libs/grpc/src/core/lib/gprpp/work_serializer.cc:109:5 #13 0x1a41e538 in grpc_core::WorkSerializer::Run(std::__y1::function, grpc_core::DebugLocation const&) /-S/contrib/libs/grpc/src/core/lib/gprpp/work_serializer.cc:237:10 #14 0x1a5082c1 in grpc_core::ClientChannel::FilterBasedCallData::StartTransportStreamOpBatch(grpc_call_element*, grpc_transport_stream_op_batch*) /-S/contrib/libs/grpc/src/core/ext/filters/client_channel/client_channel.cc:2089:32 #15 0x1a13f8be in exec_ctx_run /-S/contrib/libs/grpc/src/core/lib/iomgr/exec_ctx.cc:45:3 #16 0x1a13f8be in grpc_core::ExecCtx::Flush() /-S/contrib/libs/grpc/src/core/lib/iomgr/exec_ctx.cc:72:9 #17 0x1a194027 in ~ExecCtx /-S/contrib/libs/grpc/src/core/lib/iomgr/exec_ctx.h:117:5 #18 0x1a194027 in grpc_call_start_batch /-S/contrib/libs/grpc/src/core/lib/surface/call.cc:3543:3 #19 0x1b675aea in grpc::internal::CallOpSet, grpc::internal::CallNoOp<3>, grpc::internal::CallNoOp<4>, grpc::internal::CallNoOp<5>, grpc::internal::CallNoOp<6>>::ContinueFillOpsAfterInterception() /-S/contrib/libs/grpc/include/grpcpp/impl/call_op_set.h:974:9 #20 0x1b689b3c in PerformOps /-S/contrib/libs/grpc/include/grpcpp/impl/call.h:67:17 #21 0x1b689b3c in grpc::ClientReaderWriter::ClientReaderWriter(grpc::ChannelInterface*, grpc::internal::RpcMethod const&, grpc::ClientContext*) /-S/contrib/libs/grpc/include/grpcpp/support/sync_stream.h:565:13 #22 0x1b661d9a in Create /-S/contrib/libs/grpc/include/grpcpp/support/sync_stream.h:439:16 #23 0x1b661d9a in Ydb::Topic::V1::TopicService::Stub::StreamReadRaw(grpc::ClientContext*) /-B/ydb/public/api/grpc/ydb_topic_v1.grpc.pb.cc:78:10 #24 0x18a5b3d6 in StreamRead /-B/ydb/public/api/grpc/ydb_topic_v1.grpc.pb.h:395:148 #25 0x18a5b3d6 in NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TDirectReadTestSetup::InitControlSession(TBasicString> const&) /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:836:35 #26 0x18a7f828 in NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TTestCaseDirectReadCleanCache::Execute_(NUnitTest::TTestContext&) /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:1327:15 #27 0x18d3c767 in operator() /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:134:1 #28 0x18d3c767 in __invoke<(lambda at /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:134:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #29 0x18d3c767 in __call<(lambda at /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:134:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #30 0x18d3c767 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #31 0x18d3c767 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #32 0x197dba95 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #33 0x197dba95 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #34 0x197dba95 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #35 0x197ab5e8 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #36 0x18d3b713 in NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TCurrentTest::Execute() /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:134:1 #37 0x197aceb5 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #38 0x197d600c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #39 0x7f2f8da50d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) Indirect leak of 16 byte(s) in 1 object(s) allocated from: #0 0x1901a65d in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x1a22fabb in __libcpp_operator_new /-S/contrib/libs/cxxsupp/libcxx/include/new:265:10 #2 0x1a22fabb in __libcpp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:289:10 #3 0x1a22fabb in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:118:32 #4 0x1a22fabb in __value_func<(lambda at /-S/contrib/libs/grpc/src/core/lib/surface/channel.cc:99:33), std::__y1::allocator<(lambda at /-S/contrib/libs/grpc/src/core/lib/surface/channel.cc:99:33)> > /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:364:45 #5 0x1a22fabb in __value_func<(lambda at /-S/contrib/libs/grpc/src/core/lib/surface/channel.cc:99:33), 0> /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:372:60 #6 0x1a22fabb in function<(lambda at /-S/contrib/libs/grpc/src/core/lib/surface/channel.cc:99:33), void> /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:946:50 #7 0x1a22fabb in operator=<(lambda at /-S/contrib/libs/grpc/src/core/lib/surface/channel.cc:99:33), void> /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:975:3 #8 0x1a22fabb in grpc_core::Channel::Channel(bool, bool, TBasicString>, grpc_core::ChannelArgs const&, grpc_compression_options, grpc_core::RefCountedPtr) /-S/contrib/libs/grpc/src/core/lib/surface/channel.cc:99:31 #9 0x1a230b29 in grpc_core::Channel::CreateWithBuilder(grpc_core::ChannelStackBuilder*) /-S/contrib/libs/grpc/src/core/lib/surface/channel.cc:153:37 #10 0x1a231b57 in grpc_core::Channel::Create(char const*, grpc_core::ChannelArgs, grpc_channel_stack_type, grpc_transport*) /-S/contrib/libs/grpc/src/core/lib/surface/channel.cc:230:10 #11 0x1a5b8091 in CreateChannel /-S/contrib/libs/grpc/src/core/ext/transport/chttp2/client/chttp2_connector.cc:323:10 #12 0x1a5b8091 in grpc_channel_create /-S/contrib/libs/grpc/src/core/ext/transport/chttp2/client/chttp2_connector.cc:365:14 #13 0x1ae1bc60 in grpc::(anonymous namespace)::InsecureChannelCredentialsImpl::CreateChannelWithInterceptors(TBasicString> const&, grpc::ChannelArguments const&, std::__y1::vector>, std::__y1::allocator>>>) /-S/contrib/libs/grpc/src/cpp/client/insecure_credentials.cc:55:13 #14 0x1ae1ba3b in grpc::(anonymous namespace)::InsecureChannelCredentialsImpl::CreateChannelImpl(TBasicString> const&, grpc::ChannelArguments const&) /-S/contrib/libs/grpc/src/cpp/client/insecure_credentials.cc:40:12 #15 0x1ae141f4 in grpc::CreateCustomChannel(TBasicString> const&, std::__y1::shared_ptr const&, grpc::ChannelArguments const&) /-S/contrib/libs/grpc/src/cpp/client/create_channel.cc:50:25 #16 0x18d5daa6 in NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TDirectReadTestSetup::Connect(NKikimr::NPersQueueTests::TPersQueueV1TestServer&) /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:824:23 #17 0x18a59c72 in NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TDirectReadTestSetup::TDirectReadTestSetup(NKikimr::NPersQueueTests::TPersQueueV1TestServer&) /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:806:13 #18 0x18a88a7a in NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TTestCaseDirectReadRestartPQRB::Execute_(NUnitTest::TTestContext&) /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:1361:30 #19 0x18d3c767 in operator() /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:134:1 #20 0x18d3c767 in __invoke<(lambda at /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:134:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #21 0x18d3c767 in __call<(lambda at /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:134:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #22 0x18d3c767 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #23 0x18d3c767 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #24 0x197dba95 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #25 0x197dba95 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #26 0x197dba95 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #27 0x197ab5e8 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #28 0x18d3b713 in NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TCurrentTest::Execute() /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:134:1 #29 0x197aceb5 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #30 0x197d600c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #31 0x7f2f8da50d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) SUMMARY: AddressSanitizer: 6494997 byte(s) leaked in 1606 allocation(s). >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] >> TPersQueueTest::SchemeshardRestart [GOOD] >> TPersQueueTest::SameOffset >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] >> YdbIndexTable::OnlineBuild [GOOD] >> YdbIndexTable::OnlineBuildWithDataColumn >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestArrow [GOOD] >> DataShardReadIterator::ShouldReadNonExistingKey >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] >> TVersions::Wreck0Reverse [GOOD] >> test_sql_streaming.py::test[suites-GroupByHop-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopByStringKey-default.txt] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWithDataWatermarks-default.txt] >> Cdc::Alter [GOOD] >> Cdc::AddColumn >> DataShardReadIterator::ShouldReadKeyPrefix1 [GOOD] >> DataShardReadIterator::ShouldReadKeyPrefix2 >> test_sql_streaming.py::test[suites-GroupByHopWithDataWatermarks-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindow-default.txt] >> DataShardReadIterator::ShouldFailWrongSchema [GOOD] >> DataShardReadIterator::ShouldFailReadNextAfterSchemeChange >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit101 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit198 >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange-EvWrite >> DataShardReadIterator::TryCommitLocksPrepared-Volatile+BreakLocks [GOOD] >> DataShardReadIterator::TryCommitLocksPrepared+Volatile+BreakLocks >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] [GOOD] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataWithFilter-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicWithSchema-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TVersions::Wreck0Reverse [GOOD] Test command err: SmallQueue: MainQueue: {11 0f 1b}, {14 1f 1b}, {15 2f 1b}, {18 0f 1b}, {19 0f 1b}, {23 0f 1b}, {27 0f 1b} GhostQueue: 9, 12, 13, 16, 17, 20, 21, 24, 25, 28 0.29266 00000.000 II| FAKE_ENV: Born at 2025-03-31T16:02:21.177887Z 00000.008 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.009 II| FAKE_ENV: Starting storage for BS group 0 00000.010 II| FAKE_ENV: Starting storage for BS group 1 00000.010 II| FAKE_ENV: Starting storage for BS group 2 00000.010 II| FAKE_ENV: Starting storage for BS group 3 00000.015 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.015 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:2} commited cookie 2 for step 1 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxInitSchema} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxInitSchema 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxInitSchema} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxInitSchema} hope 1 -> done Change{2, redo 0b alter 209b annex 0, ~{ } -{ }, 0 gb} 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxInitSchema} release 4194304b of static, Memory{0 dyn 0} 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:3} commited cookie 1 for step 2 00000.017 NN| TABLET_SAUSAGECACHE: Update config MemoryLimit: 8388608 ReplacementPolicy: ThreeLeveledLRU 00000.017 NN| TABLET_SAUSAGECACHE: Switch replacement policy from S3FIFO to ThreeLeveledLRU 00000.017 NN| TABLET_SAUSAGECACHE: Switch replacement policy done from S3FIFO to ThreeLeveledLRU 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{2, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:4} commited cookie 1 for step 3 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{3, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.042 DD| TABLET_EXECUTOR: Leader{1:2:5} commited cookie 1 for step 4 00000.042 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.042 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.043 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{4, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.043 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.043 DD| TABLET_EXECUTOR: Leader{1:2:6} commited cookie 1 for step 5 00000.044 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.044 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.044 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{5, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.044 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.045 DD| TABLET_EXECUTOR: Leader{1:2:7} commited cookie 1 for step 6 00000.045 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.045 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.045 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{6, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.045 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.046 DD| TABLET_EXECUTOR: Leader{1:2:8} commited cookie 1 for step 7 00000.046 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.046 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.047 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{7, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.047 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.047 DD| TABLET_EXECUTOR: Leader{1:2:9} commited cookie 1 for step 8 00000.047 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.047 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.048 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{8, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.048 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.048 DD| TABLET_EXECUTOR: Leader{1:2:10} commited cookie 1 for step 9 00000.049 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.049 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.049 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{9, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.049 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.049 DD| TABLET_EXECUTOR: Leader{1:2:11} commited cookie 1 for step 10 00000.050 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.050 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.050 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{10, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.050 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.051 DD| TABLET_EXECUTOR: Leader{1:2:12} commited cookie 1 for step 11 00000.051 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.051 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.052 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{11, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.052 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.052 DD| TABLET_EXECUTOR: Leader{1:2:13} commited cookie 1 for step 12 00000.052 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.052 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.053 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{12, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.053 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.053 DD| TABLET_EXECUTOR: Leader{1:2:14} commited cookie 1 for step 13 00000.053 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.054 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.054 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{13, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.054 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.054 DD| TABLET_EXECUTOR: Leader{1:2:15} commited cookie 1 for step 14 00000.055 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.055 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.055 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{14, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.055 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxW ... CHE: Dropping page collection [1:2:103:1:12288:2546:0] pages [ 5 ] owner [12:659:2684] 00000.823 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{96, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow 00000.823 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{96, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.823 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{96, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 1 -> retry Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.823 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{96, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} touch new 0b, 102443b lo load (102443b in total), 0b requested for data (4194304b in total) 00000.823 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{96, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 8388608b of static mem, Memory{8388608 dyn 0} 00000.823 D3| TABLET_EXECUTOR: Leader{1:3:2} requests PageCollection [1:2:103:1:12288:2546:0] 102443 bytes, 1 pages: [4 4] 00000.823 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{96, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} postponed, 102443b, pages {1 wait, 1 load}, freshly touched 2 pages 00000.823 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:103:1:12288:2546:0] owner [12:659:2684] class Online from cache [ ] already requested [ ] to request [ 4 ] 00000.823 TT| TABLET_SAUSAGECACHE: Loaded page collection [1:2:103:1:12288:2546:0] status OK pages [ 4 ] 00000.823 DD| TABLET_EXECUTOR: Leader{1:3:2} got result TEvResult{1 pages [1:2:103:1:12288:2546:0] ok OK}, category 1 00000.823 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{96, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 2 -> done Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.823 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{96, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} release 8388608b of static, Memory{0 dyn 0} 00000.823 TT| TABLET_SAUSAGECACHE: Touch page collection [1:2:103:1:12288:2546:0] owner [12:659:2684] pages [ 100 4 ] 00000.823 DD| TABLET_SAUSAGECACHE: Dropping page collection [1:2:103:1:12288:2546:0] pages [ 4 ] owner [12:659:2684] 00000.824 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{97, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow 00000.824 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{97, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.824 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{97, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 1 -> retry Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.824 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{97, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} touch new 0b, 102443b lo load (102443b in total), 0b requested for data (4194304b in total) 00000.824 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{97, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 8388608b of static mem, Memory{8388608 dyn 0} 00000.824 D3| TABLET_EXECUTOR: Leader{1:3:2} requests PageCollection [1:2:103:1:12288:2546:0] 102443 bytes, 1 pages: [3 4] 00000.824 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{97, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} postponed, 102443b, pages {1 wait, 1 load}, freshly touched 2 pages 00000.824 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:103:1:12288:2546:0] owner [12:659:2684] class Online from cache [ ] already requested [ ] to request [ 3 ] 00000.825 TT| TABLET_SAUSAGECACHE: Loaded page collection [1:2:103:1:12288:2546:0] status OK pages [ 3 ] 00000.825 DD| TABLET_EXECUTOR: Leader{1:3:2} got result TEvResult{1 pages [1:2:103:1:12288:2546:0] ok OK}, category 1 00000.825 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{97, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 2 -> done Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.825 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{97, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} release 8388608b of static, Memory{0 dyn 0} 00000.825 TT| TABLET_SAUSAGECACHE: Touch page collection [1:2:103:1:12288:2546:0] owner [12:659:2684] pages [ 100 3 ] 00000.825 DD| TABLET_SAUSAGECACHE: Dropping page collection [1:2:103:1:12288:2546:0] pages [ 3 ] owner [12:659:2684] 00000.825 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{98, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow 00000.825 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{98, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.826 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{98, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 1 -> retry Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.826 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{98, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} touch new 0b, 102443b lo load (102443b in total), 0b requested for data (4194304b in total) 00000.826 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{98, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 8388608b of static mem, Memory{8388608 dyn 0} 00000.826 D3| TABLET_EXECUTOR: Leader{1:3:2} requests PageCollection [1:2:103:1:12288:2546:0] 102443 bytes, 1 pages: [2 4] 00000.826 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{98, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} postponed, 102443b, pages {1 wait, 1 load}, freshly touched 2 pages 00000.826 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:103:1:12288:2546:0] owner [12:659:2684] class Online from cache [ ] already requested [ ] to request [ 2 ] 00000.826 TT| TABLET_SAUSAGECACHE: Loaded page collection [1:2:103:1:12288:2546:0] status OK pages [ 2 ] 00000.826 DD| TABLET_EXECUTOR: Leader{1:3:2} got result TEvResult{1 pages [1:2:103:1:12288:2546:0] ok OK}, category 1 00000.826 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{98, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 2 -> done Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.826 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{98, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} release 8388608b of static, Memory{0 dyn 0} 00000.826 TT| TABLET_SAUSAGECACHE: Touch page collection [1:2:103:1:12288:2546:0] owner [12:659:2684] pages [ 100 2 ] 00000.826 DD| TABLET_SAUSAGECACHE: Dropping page collection [1:2:103:1:12288:2546:0] pages [ 2 ] owner [12:659:2684] 00000.827 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{99, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow 00000.827 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{99, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.827 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{99, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 1 -> retry Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.827 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{99, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} touch new 0b, 102443b lo load (102443b in total), 0b requested for data (4194304b in total) 00000.827 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{99, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 8388608b of static mem, Memory{8388608 dyn 0} 00000.827 D3| TABLET_EXECUTOR: Leader{1:3:2} requests PageCollection [1:2:103:1:12288:2546:0] 102443 bytes, 1 pages: [1 4] 00000.827 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{99, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} postponed, 102443b, pages {1 wait, 1 load}, freshly touched 2 pages 00000.827 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:103:1:12288:2546:0] owner [12:659:2684] class Online from cache [ ] already requested [ ] to request [ 1 ] 00000.827 TT| TABLET_SAUSAGECACHE: Loaded page collection [1:2:103:1:12288:2546:0] status OK pages [ 1 ] 00000.827 DD| TABLET_EXECUTOR: Leader{1:3:2} got result TEvResult{1 pages [1:2:103:1:12288:2546:0] ok OK}, category 1 00000.827 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{99, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 2 -> done Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.828 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{99, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} release 8388608b of static, Memory{0 dyn 0} 00000.828 TT| TABLET_SAUSAGECACHE: Touch page collection [1:2:103:1:12288:2546:0] owner [12:659:2684] pages [ 1 100 ] 00000.828 DD| TABLET_SAUSAGECACHE: Dropping page collection [1:2:103:1:12288:2546:0] pages [ 1 ] owner [12:659:2684] 00000.828 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{100, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow 00000.828 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{100, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.828 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{100, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 1 -> retry Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.828 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{100, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} touch new 0b, 102443b lo load (102443b in total), 0b requested for data (4194304b in total) 00000.828 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{100, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 8388608b of static mem, Memory{8388608 dyn 0} 00000.828 D3| TABLET_EXECUTOR: Leader{1:3:2} requests PageCollection [1:2:103:1:12288:2546:0] 102443 bytes, 1 pages: [0 4] 00000.828 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{100, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} postponed, 102443b, pages {1 wait, 1 load}, freshly touched 2 pages 00000.828 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:103:1:12288:2546:0] owner [12:659:2684] class Online from cache [ ] already requested [ ] to request [ 0 ] 00000.829 TT| TABLET_SAUSAGECACHE: Loaded page collection [1:2:103:1:12288:2546:0] status OK pages [ 0 ] 00000.829 DD| TABLET_EXECUTOR: Leader{1:3:2} got result TEvResult{1 pages [1:2:103:1:12288:2546:0] ok OK}, category 1 00000.829 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{100, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 2 -> done Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.829 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{100, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} release 8388608b of static, Memory{0 dyn 0} 00000.829 TT| TABLET_SAUSAGECACHE: Touch page collection [1:2:103:1:12288:2546:0] owner [12:659:2684] pages [ 0 100 ] 00000.829 DD| TABLET_SAUSAGECACHE: Dropping page collection [1:2:103:1:12288:2546:0] pages [ 0 ] owner [12:659:2684] Counters: Active:0/0, Passive:2772, MemLimit:-1 00000.829 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.830 II| TABLET_EXECUTOR: Leader{1:3:2} suiciding, Waste{2:0, 10249619b +(0, 0b), 1 trc, -48685b acc} 00000.831 DD| TABLET_SAUSAGECACHE: Unregister owner [12:659:2684] 00000.832 NN| TABLET_SAUSAGECACHE: Poison cache serviced 201 reqs hit {0 0b} miss {202 20491164b} 00000.832 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.832 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {10191b, 107} 00000.832 II| FAKE_ENV: DS.1 gone, left {10250914b, 5}, put {10299737b, 107} 00000.838 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.838 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.839 II| FAKE_ENV: All BS storage groups are stopped 00000.839 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.839 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 3357}, stopped >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookup [GOOD] >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookupDepededRead >> DataShardReadIteratorConsistency::LocalSnapshotReadWithConcurrentWrites [GOOD] >> DataShardReadIteratorConsistency::Bug_7674_IteratorDuplicateRows |78.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |78.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefix-EvWrite [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-fifo] [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixLeftBorder+EvWrite >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] >> TRtmrTest::CreateWithoutTimeCastBuckets >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v0] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-fifo] [GOOD] |78.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |78.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut >> DataShardReadIterator::ShouldReadRangeRightInclusive [GOOD] >> DataShardReadIterator::ShouldReadRangeOneByOne >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] |78.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |78.9%| [LD] {RESULT} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut >> DataShardReadIterator::ShouldReadNonExistingKey [GOOD] >> DataShardReadIterator::ShouldReadMultipleKeys >> TRtmrTest::CreateWithoutTimeCastBuckets [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] [GOOD] >> DataShardReadIterator::ShouldReadKeyPrefix2 [GOOD] >> DataShardReadIterator::ShouldReadKeyPrefix3 |78.9%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TRtmrTest::CreateWithoutTimeCastBuckets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:06:30.282263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:06:30.284299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:06:30.284401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:06:30.284461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:06:30.284551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:06:30.284590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:06:30.284691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:06:30.284837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:06:30.289365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:06:30.490638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:06:30.490716Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:06:30.515777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:06:30.515916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:06:30.519576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:06:30.534564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:06:30.537994Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:06:30.544132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:06:30.553918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:06:30.560251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:06:30.587371Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:06:30.587503Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:06:30.591711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:06:30.591820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:06:30.591908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:06:30.597290Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:06:30.618635Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:06:30.897742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:06:30.898588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:06:30.901273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:06:30.901613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:06:30.901685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:06:30.909670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:06:30.912833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:06:30.913263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:06:30.913344Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:06:30.914534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:06:30.914606Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:06:30.918141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:06:30.918219Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:06:30.918287Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:06:30.920970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:06:30.921036Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:06:30.921897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:06:30.921960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:06:30.926403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:06:30.936228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:06:30.936665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:06:30.937981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:06:30.938164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:06:30.938235Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:06:30.939376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:06:30.939513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:06:30.939739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:06:30.939833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:06:30.944062Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:06:30.944149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:06:30.944398Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:06:30.944474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:06:30.944741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:06:30.944797Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:06:30.947160Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:06:30.947265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:06:30.947317Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:06:30.947354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:06:30.947395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:06:30.947471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:06:30.947522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:06:30.947559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:06:30.947665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:06:30.947738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:06:30.947777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:06:30.950185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:06:30.950340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:06:30.950379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... NFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046678944, cookie: 100 2025-03-31T16:06:31.086398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046678944, cookie: 100 2025-03-31T16:06:31.086423Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 100 2025-03-31T16:06:31.086452Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 1 2025-03-31T16:06:31.086483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:06:31.086583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 0/1, is published: true 2025-03-31T16:06:31.090658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-03-31T16:06:31.090731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateRTMR TConfigureParts ProgressState operationId# 100:0 at tablet72057594046678944 2025-03-31T16:06:31.090777Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 3 -> 128 2025-03-31T16:06:31.095213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-03-31T16:06:31.095368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-03-31T16:06:31.098167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-03-31T16:06:31.098236Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateRTMR TPropose, operationId: 100:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:06:31.098305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2025-03-31T16:06:31.098438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:06:31.100320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-03-31T16:06:31.100460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-03-31T16:06:31.100747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:06:31.100856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:06:31.100910Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateRTMR TPropose, operationId: 100:0 HandleReply TEvOperationPlan, at schemeshard: 72057594046678944 2025-03-31T16:06:31.101024Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-03-31T16:06:31.101172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:06:31.101230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T16:06:31.103314Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:06:31.103389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:06:31.103593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:06:31.103719Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:06:31.103761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-03-31T16:06:31.103805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 2 FAKE_COORDINATOR: Erasing txId 100 2025-03-31T16:06:31.104074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-03-31T16:06:31.104125Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-03-31T16:06:31.104250Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-03-31T16:06:31.104288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-31T16:06:31.104333Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-03-31T16:06:31.104391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-31T16:06:31.104461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-03-31T16:06:31.104523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-31T16:06:31.104570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-03-31T16:06:31.104606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-03-31T16:06:31.104691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:06:31.104738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-03-31T16:06:31.104780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-03-31T16:06:31.104814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-03-31T16:06:31.105722Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-03-31T16:06:31.105798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-03-31T16:06:31.105822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-03-31T16:06:31.105856Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-31T16:06:31.105899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:06:31.106793Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2025-03-31T16:06:31.106854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2025-03-31T16:06:31.106872Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-03-31T16:06:31.106894Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-03-31T16:06:31.106914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T16:06:31.106989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-03-31T16:06:31.110056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-03-31T16:06:31.111146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-03-31T16:06:31.111354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-03-31T16:06:31.117470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-03-31T16:06:31.118266Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-03-31T16:06:31.118432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-03-31T16:06:31.118479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:307:2298] TestWaitNotification: OK eventTxId 100 2025-03-31T16:06:31.120704Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/rtmr1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:06:31.120958Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/rtmr1" took 276us result status StatusSuccess 2025-03-31T16:06:31.121292Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/rtmr1" PathDescription { Self { Name: "rtmr1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeRtmrVolume CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 RTMRVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } RtmrVolumeDescription { Name: "rtmr1" PathId: 2 PartitionsCount: 0 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit198 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit900 |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete [GOOD] |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_sqs_action_counters [GOOD] |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds [GOOD] >> DataShardReadIterator::TryCommitLocksPrepared+Volatile+BreakLocks [GOOD] >> DataShardReadIterator::ShouldRollbackLocksWhenWrite >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] [GOOD] >> DataShardReadIteratorConsistency::Bug_7674_IteratorDuplicateRows [GOOD] >> DataShardReadIteratorConsistency::LeaseConfirmationNotOutOfOrder >> DataShardReadIterator::ShouldFailReadNextAfterSchemeChange [GOOD] >> DataShardReadIterator::ShouldFailReadNextAfterSchemeChangeExhausted >> DataShardVolatile::NotCachingAbortingDeletes-UseSink [GOOD] >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] [GOOD] |79.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |79.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |79.1%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut >> TPartitionWriterCacheActorTests::WriteReplyOrder |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> Cdc::AddColumn [GOOD] >> Cdc::AddColumn_TopicAutoPartitioning >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips+EvWrite >> TPartitionWriterCacheActorTests::WriteReplyOrder [GOOD] >> TPartitionWriterCacheActorTests::DropOldWriter >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixLeftBorder+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixLeftBorder-EvWrite |79.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |79.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |79.3%| [LD] {RESULT} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] [GOOD] >> DataShardReadIterator::ShouldReadRangeOneByOne [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk5 >> TPartitionWriterCacheActorTests::DropOldWriter [GOOD] >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] [GOOD] >> DataShardReadIterator::ShouldReadMultipleKeys [GOOD] >> DataShardReadIterator::ShouldReadMultipleKeysOneByOne >> DataShardReadIterator::ShouldReadKeyPrefix3 [GOOD] >> DataShardReadIterator::ShouldReadFromFollower >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit900 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit900 >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-fifo] >> TPersQueueTest::SameOffset [GOOD] >> TPersQueueTest::SchemeOperationsTest >> DataShardReadIterator::ShouldFailReadNextAfterSchemeChangeExhausted [GOOD] >> DataShardReadIterator::NoErrorOnFinalACK >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-fifo] >> DataShardReadIterator::ShouldRollbackLocksWhenWrite [GOOD] >> DataShardReadIterator::TryWriteManyRows+Commit |79.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> test_sql_streaming.py::test[suites-GroupByHoppingWindow-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowByStringKey-default.txt] >> test_sql_streaming.py::test[suites-GroupByHoppingWithDataWatermarks-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopic-default.txt] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips-EvWrite >> DataShardReadIteratorConsistency::LeaseConfirmationNotOutOfOrder [GOOD] >> DataShardReadIteratorFastCancel::ShouldProcessFastCancel >> test_sql_streaming.py::test[suites-ReadTopicWithSchema-default.txt] [FAIL] >> KqpPg::TableInsert-useSink [GOOD] >> KqpPg::TempTablesSessionsIsolation >> test_sql_streaming.py::test[suites-ReadTwoTopics-default.txt] |79.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> test_sql_streaming.py::test[suites-GroupByHopByStringKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopExprKey-default.txt] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixLeftBorder-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixRightBorder+EvWrite >> Cdc::AddColumn_TopicAutoPartitioning [GOOD] >> Cdc::AddIndex >> DataShardReadIterator::ShouldReadRangeChunk5 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk7 >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] >> DataShardReadIterator::ShouldReadMultipleKeysOneByOne [GOOD] >> DataShardReadIterator::ShouldReadNotExistingRange |79.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] [GOOD] >> DataShardReadIterator::ShouldReadFromFollower [GOOD] >> DataShardReadIterator::ShouldReadHeadFromFollower >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit900 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit1000 |79.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] [GOOD] |79.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |79.3%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |79.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut >> TPersQueueTest::UpdatePartitionLocation >> DataShardReadIterator::NoErrorOnFinalACK [GOOD] >> DataShardReadIterator::ShouldCancelMvccSnapshotFromFuture >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] [GOOD] |79.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-fifo] |79.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> YdbIndexTable::MultiShardTableUniqAndNonUniqIndex [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-fifo] [GOOD] >> YdbIndexTable::MultiShardTableTwoIndexes >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-std] >> DataShardReadIteratorFastCancel::ShouldProcessFastCancel [GOOD] >> DataShardReadIteratorLatency::ReadSplitLatency |79.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |79.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |79.4%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |79.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2+EvWrite |79.4%| [TA] $(B)/ydb/core/tablet_flat/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardReadIterator::ShouldReadRangeChunk7 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix1 |79.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] [GOOD] >> KqpPg::TempTablesSessionsIsolation [FAIL] >> KqpPg::TempTablesDrop >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] >> DataShardReadIterator::ShouldReadNotExistingRange [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk1_100 |79.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |79.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut >> DataShardReadIterator::ShouldReadHeadFromFollower [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixRightBorder+EvWrite [GOOD] |79.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixRightBorder-EvWrite >> DataShardReadIterator::ShouldReadFromHead >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] [GOOD] >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap [GOOD] >> TPersQueueTest::ReadFromSeveralPartitions |79.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit1000 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit1001 |79.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] [GOOD] >> Cdc::AddIndex [GOOD] >> Cdc::AddStream >> TKeyValueTracingTest::WriteSmall >> ReadIteratorExternalBlobs::ExtBlobs [GOOD] |79.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |79.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut >> DataShardReadIteratorLatency::ReadSplitLatency [GOOD] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowByStringKey-default.txt] [FAIL] >> DataShardReadIterator::TryWriteManyRows+Commit [GOOD] >> DataShardReadIterator::TryWriteManyRows-Commit >> DataShardReadIterator::ShouldCancelMvccSnapshotFromFuture [GOOD] >> test_sql_streaming.py::test[suites-ReadTopic-default.txt] [FAIL] >> DataShardReadIterator::ShouldCommitLocksWhenReadWriteInOneTransaction >> test_sql_streaming.py::test[suites-GroupByHoppingWindowExprKey-default.txt] >> ReadIteratorExternalBlobs::ExtBlobsWithSpecificKeys >> DataShardReadIteratorPageFaults::CancelPageFaultedReadThenDropTable >> test_sql_streaming.py::test[suites-ReadTopicGroupWriteToSolomon-default.txt] |79.4%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |79.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] [GOOD] |79.5%| [LD] {RESULT} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap [GOOD] Test command err: Trying to start YDB, gRPC: 20557, MsgBus: 2659 2025-03-31T16:03:34.162385Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004959537154387:2204];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:34.169731Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000df9/r3tmp/tmpooGh99/pdisk_1.dat 2025-03-31T16:03:34.632193Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:34.632313Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:34.653695Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:34.656463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20557, node 1 2025-03-31T16:03:34.801402Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:34.801427Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:34.801438Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:34.801640Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2659 TClient is connected to server localhost:2659 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:35.446532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:35.471704Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:03:35.493868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:35.641656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:03:35.801748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:03:35.885460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:37.774434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004972422057931:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:37.774562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:38.123874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:38.164574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:38.196314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:38.225214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:38.264370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:38.352017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:38.442515Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004976717025749:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:38.442598Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:38.442856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004976717025754:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:38.447234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:38.460177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004976717025756:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:03:38.557603Z node 1 :TX_PROXY ERROR: Actor# [1:7488004976717025811:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:03:39.158335Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004959537154387:2204];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:39.158527Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:39.530326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:03:40.578211Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jqpf3fc2a14hrpwcdpjebcxn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODkwMTJkNGEtYjI4NTk4ZjctNDM2ZjBiMWEtNjIzZWU3Mzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:40.582066Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jqpf3fc33n8bqpnj2fchsgb4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGIxYjNmZS04NjAxMGM4NS1mY2Q4YjI1Yi05MjgxZmE5Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:40.594434Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jqpf3fc322r5a2d8e2pcxbvb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWQzNmVkNDQtODllMTQ1NGUtNWQ5OTAyYjgtZmZmNjE3ODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:40.596404Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jqpf3fc9493ghcwjfjgadmbk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTI2OGQxNTMtMzQ5ODJhNmYtNzBiMzZkZDQtZTViNTcwNGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:40.601914Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jqpf3fc9f9nt6ehydsaf8s3a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjZhOWI4MDgtYWMxYzk3ZDItMzQ2NWJkMS01NzhkYWRlNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:40.614472Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jqpf3fc2a14hrpwcdpjebcxn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODkwMTJkNGEtYjI4NTk4ZjctNDM2ZjBiMWEtNjIzZWU3Mzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:40.617714Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710678. Ctx: { TraceId: 01jqpf3fc33n8bqpnj2fchsgb4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGIxYjNmZS04NjAxMGM4NS1mY2Q4YjI1Yi05MjgxZmE5Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:40.627937Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710679. Ctx: { TraceId: 01jqpf3fc322r5a2d8e2pcxbvb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWQzNmVkNDQtODllMTQ1NGUtNWQ5OTAyYjgtZmZmNjE3ODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:40.628597Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710680. Ctx: { TraceId: 01jqpf3fcyfnkyb84z3ddf8jg7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTNhNjU3ZmUtODU0YmFhZGMtOWMzNjgzZTUtYzkwMDMyOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:40.629526Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710681. Ctx: { TraceId: 01jqpf3fcz9y3nyxajvd7p9m5r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDBmMGMxODYtMTYyOTI0MzAtYTNkZjA5NWYtMzJmMDUyY2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:40.630251Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710682. Ctx: { TraceId: 01jqpf3fc9493ghcwjfjgadmbk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTI2OGQxNTMtMzQ5ODJhNmYtNzBiMzZkZDQtZTViNTcwNGU=, CurrentExecutionId: , CustomerSupplied ... sion/3?node_id=3&id=MTc2N2IyMi0xYmVmMjdkZS1lZGFjMTgyZi1iYjliMmIwNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:06:51.822899Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714520. Ctx: { TraceId: 01jqpf9a54632kce6kx70azvbz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YWE4M2M0OC1iNjExZWJkZS02YjRkMWQ5ZS1hZmE0NDRmNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:06:51.832003Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714522. Ctx: { TraceId: 01jqpf9a54632kce6kx70azvbz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YWE4M2M0OC1iNjExZWJkZS02YjRkMWQ5ZS1hZmE0NDRmNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:06:51.833252Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714523. Ctx: { TraceId: 01jqpf9a543darpt8kyasdwps8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTc2N2IyMi0xYmVmMjdkZS1lZGFjMTgyZi1iYjliMmIwNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:06:51.845174Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714524. Ctx: { TraceId: 01jqpf9a5bag6fkj23vcknphrx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MWRjYzVjMjktNGFhNTRiOWEtYTUyN2VhMzctZGNiYWE1OTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:06:51.854578Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714525. Ctx: { TraceId: 01jqpf9a5k6cmcwmrn92na4byw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjE4YTQyNTUtNmY5N2M4NjctYjBlMmQzMmYtZTcyZjExYmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:06:51.865114Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714526. Ctx: { TraceId: 01jqpf9a5b1jh3teqwxaewh2r6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTIxNzJkYmQtMWE3MzBiYzgtYTQwMTQxOWEtNzE4YmY2NWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:06:51.870527Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714527. Ctx: { TraceId: 01jqpf9a5k6cmcwmrn92na4byw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjE4YTQyNTUtNmY5N2M4NjctYjBlMmQzMmYtZTcyZjExYmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:06:51.893574Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714528. Ctx: { TraceId: 01jqpf9a5b1jh3teqwxaewh2r6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTIxNzJkYmQtMWE3MzBiYzgtYTQwMTQxOWEtNzE4YmY2NWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:06:51.913120Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714529. Ctx: { TraceId: 01jqpf9a7vbd99jt2fmgzpc8na, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDhkMjYwNGItOTMyOTdjMGItNjVhZDVjNWUtZTY2OTNiNTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:06:51.916577Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714530. Ctx: { TraceId: 01jqpf9a7x4qpg07j09wba0xbj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MWRjYzVjMjktNGFhNTRiOWEtYTUyN2VhMzctZGNiYWE1OTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:06:51.925430Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714531. Ctx: { TraceId: 01jqpf9a7x2avrba344mzvgnfr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTc2N2IyMi0xYmVmMjdkZS1lZGFjMTgyZi1iYjliMmIwNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:06:51.926827Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714532. Ctx: { TraceId: 01jqpf9a7vbd99jt2fmgzpc8na, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDhkMjYwNGItOTMyOTdjMGItNjVhZDVjNWUtZTY2OTNiNTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:06:51.942515Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714533. Ctx: { TraceId: 01jqpf9a7x2avrba344mzvgnfr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTc2N2IyMi0xYmVmMjdkZS1lZGFjMTgyZi1iYjliMmIwNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:06:51.943961Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714534. Ctx: { TraceId: 01jqpf9a8jb8kew9k4er56axnh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YWE4M2M0OC1iNjExZWJkZS02YjRkMWQ5ZS1hZmE0NDRmNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:06:51.949165Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714535. Ctx: { TraceId: 01jqpf9a7x2avrba344mzvgnfr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTc2N2IyMi0xYmVmMjdkZS1lZGFjMTgyZi1iYjliMmIwNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:06:51.956693Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714536. Ctx: { TraceId: 01jqpf9a7x4qpg07j09wba0xbj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MWRjYzVjMjktNGFhNTRiOWEtYTUyN2VhMzctZGNiYWE1OTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:06:51.958684Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714537. Ctx: { TraceId: 01jqpf9a90edq3c9mt1vdhqwje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjE4YTQyNTUtNmY5N2M4NjctYjBlMmQzMmYtZTcyZjExYmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:06:51.959890Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714538. Ctx: { TraceId: 01jqpf9a8jb8kew9k4er56axnh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YWE4M2M0OC1iNjExZWJkZS02YjRkMWQ5ZS1hZmE0NDRmNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:06:51.973633Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714539. Ctx: { TraceId: 01jqpf9a7x4qpg07j09wba0xbj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MWRjYzVjMjktNGFhNTRiOWEtYTUyN2VhMzctZGNiYWE1OTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:06:51.984203Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714540. Ctx: { TraceId: 01jqpf9a90edq3c9mt1vdhqwje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjE4YTQyNTUtNmY5N2M4NjctYjBlMmQzMmYtZTcyZjExYmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:06:51.996619Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714541. Ctx: { TraceId: 01jqpf9a9q2fjkvn4nbvb88xyq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTIxNzJkYmQtMWE3MzBiYzgtYTQwMTQxOWEtNzE4YmY2NWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:06:52.011791Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714542. Ctx: { TraceId: 01jqpf9ab4f3g28feba3qv9x7m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDhkMjYwNGItOTMyOTdjMGItNjVhZDVjNWUtZTY2OTNiNTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:06:52.025669Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714543. Ctx: { TraceId: 01jqpf9abkf022m79p4y6c48bn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTc2N2IyMi0xYmVmMjdkZS1lZGFjMTgyZi1iYjliMmIwNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:06:52.034047Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714544. Ctx: { TraceId: 01jqpf9abv5cdbhz5zkxz2jvc0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjE4YTQyNTUtNmY5N2M4NjctYjBlMmQzMmYtZTcyZjExYmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:06:52.050214Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714545. Ctx: { TraceId: 01jqpf9ac4152kfhazaa1ejk46, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YWE4M2M0OC1iNjExZWJkZS02YjRkMWQ5ZS1hZmE0NDRmNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:06:52.056527Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714546. Ctx: { TraceId: 01jqpf9acg8pjqd5s9tzr4hnqh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDhkMjYwNGItOTMyOTdjMGItNjVhZDVjNWUtZTY2OTNiNTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:06:52.057949Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714547. Ctx: { TraceId: 01jqpf9acc5j227s6zgn7rw12q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MWRjYzVjMjktNGFhNTRiOWEtYTUyN2VhMzctZGNiYWE1OTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:06:52.061131Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714548. Ctx: { TraceId: 01jqpf9ac4152kfhazaa1ejk46, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YWE4M2M0OC1iNjExZWJkZS02YjRkMWQ5ZS1hZmE0NDRmNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-03-31T16:06:52.070254Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714549. Ctx: { TraceId: 01jqpf9acc5j227s6zgn7rw12q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MWRjYzVjMjktNGFhNTRiOWEtYTUyN2VhMzctZGNiYWE1OTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:06:52.071955Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714550. Ctx: { TraceId: 01jqpf9acg8pjqd5s9tzr4hnqh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDhkMjYwNGItOTMyOTdjMGItNjVhZDVjNWUtZTY2OTNiNTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:06:52.079783Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714551. Ctx: { TraceId: 01jqpf9acc5j227s6zgn7rw12q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MWRjYzVjMjktNGFhNTRiOWEtYTUyN2VhMzctZGNiYWE1OTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:06:52.082624Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714552. Ctx: { TraceId: 01jqpf9acwae0nqbc7bcz67f9s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTIxNzJkYmQtMWE3MzBiYzgtYTQwMTQxOWEtNzE4YmY2NWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:06:52.087729Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714553. Ctx: { TraceId: 01jqpf9adbbjp13zj4tadnvwmt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTc2N2IyMi0xYmVmMjdkZS1lZGFjMTgyZi1iYjliMmIwNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-03-31T16:06:52.100845Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714554. Ctx: { TraceId: 01jqpf9acwae0nqbc7bcz67f9s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTIxNzJkYmQtMWE3MzBiYzgtYTQwMTQxOWEtNzE4YmY2NWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:06:52.102012Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976714555. Ctx: { TraceId: 01jqpf9adbbjp13zj4tadnvwmt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTc2N2IyMi0xYmVmMjdkZS1lZGFjMTgyZi1iYjliMmIwNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS >> DataShardReadIterator::ShouldReadRangePrefix1 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix2 >> KqpPg::TempTablesDrop [FAIL] >> TKeyValueTracingTest::WriteSmall [FAIL] >> KqpPg::TempTablesWithCache >> TTicketParserTest::AuthorizationRetryError >> DataShardReadIterator::ShouldReadFromHead [GOOD] >> DataShardReadIterator::ShouldReadFromHeadWithConflict+UseSink >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2-EvWrite >> test_sql_streaming.py::test[suites-ReadTwoTopics-default.txt] [FAIL] |79.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |79.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view >> test_sql_streaming.py::test[suites-ReadWriteSameTopic-default.txt] |79.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |79.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |79.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |79.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys >> test_sql_streaming.py::test[suites-GroupByHopExprKey-default.txt] [FAIL] >> YdbOlapStore::LogGrepExisting [GOOD] >> YdbOlapStore::LogExistingUserId >> TTicketParserTest::NebiusAuthenticationUnavailable |79.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v0] [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixRightBorder-EvWrite [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk1_100 [GOOD] >> test_sql_streaming.py::test[suites-GroupByHopListKey-default.txt] >> TTicketParserTest::AccessServiceAuthenticationOk >> TPersQueueTest::SchemeOperationsTest [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit1001 [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] >> DataShardReadIterator::ShouldNotReadFutureMvccFromFollower >> TPersQueueTest::SchemeOperationFirstClassCitizen >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinue+EvWrite >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] >> DataShardReadIterator::ShouldReadRangeChunk1 |79.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |79.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |79.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |79.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] [GOOD] |79.5%| [LD] {RESULT} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |79.5%| [LD] {RESULT} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |79.5%| [TA] {RESULT} $(B)/ydb/core/tablet_flat/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TPersQueueTest::UpdatePartitionLocation [GOOD] >> TPersQueueTest::TopicServiceCommitOffset >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorRandom [GOOD] >> TImmediateControlsConfiguratorTests::TestControlsInitialization >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest [GOOD] >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags [GOOD] >> TTicketParserTest::AccessServiceAuthenticationOk [GOOD] >> TTicketParserTest::AccessServiceAuthenticationApiKeyOk >> TTicketParserTest::NebiusAuthenticationUnavailable [GOOD] >> TTicketParserTest::NebiusAuthorizationRetryError >> Cdc::AddStream [GOOD] >> Cdc::AwsRegion |79.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |79.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |79.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |79.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |79.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |79.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |79.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] [GOOD] >> DataShardReadIterator::ShouldCommitLocksWhenReadWriteInOneTransaction [GOOD] >> DataShardReadIterator::ShouldCommitLocksWhenReadWriteInSeparateTransactions |79.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> DataShardReadIteratorPageFaults::CancelPageFaultedReadThenDropTable [GOOD] >> DataShardReadIteratorPageFaults::LocksNotLostOnPageFault >> TImmediateControlsConfiguratorTests::TestControlsInitialization [GOOD] >> TImmediateControlsConfiguratorTests::TestModifiedControls ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags [GOOD] Test command err: 2025-03-31T16:04:56.404234Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005311552326079:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:56.404322Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a64/r3tmp/tmpfSrtxH/pdisk_1.dat 2025-03-31T16:04:57.027539Z node 1 :HTTP ERROR: (#26,[::1]:28946) connection closed with error: Connection refused 2025-03-31T16:04:57.034954Z node 1 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-03-31T16:04:57.066240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:57.066340Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:57.070133Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:04:57.163319Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:01.156081Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488005335678824515:2087];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:05:01.156258Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a64/r3tmp/tmpTYiAsE/pdisk_1.dat 2025-03-31T16:05:01.573314Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:01.574547Z node 2 :HTTP ERROR: (#28,[::1]:19354) connection closed with error: Connection refused 2025-03-31T16:05:01.576045Z node 2 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-03-31T16:05:01.578123Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:05:01.578221Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:05:01.586537Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:05:05.961936Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488005349727195412:2213];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:05:06.026659Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a64/r3tmp/tmpuGnnz2/pdisk_1.dat 2025-03-31T16:05:06.092484Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:06.123049Z node 3 :HTTP ERROR: (#26,[::1]:3793) connection closed with error: Connection refused 2025-03-31T16:05:06.123379Z node 3 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-03-31T16:05:06.129289Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:05:06.129380Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:05:06.132283Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:05:09.715301Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488005367156851539:2126];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:05:09.732663Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a64/r3tmp/tmppdOHgs/pdisk_1.dat 2025-03-31T16:05:09.914825Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:09.935991Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:05:09.936077Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:05:09.936498Z node 4 :HTTP ERROR: (#28,[::1]:8302) connection closed with error: Connection refused 2025-03-31T16:05:09.939785Z node 4 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-03-31T16:05:09.944960Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:05:13.735869Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7488005385290184490:2147];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:05:13.739779Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a64/r3tmp/tmpLTfHbr/pdisk_1.dat 2025-03-31T16:05:13.871722Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:13.882756Z node 5 :HTTP ERROR: (#30,[::1]:10258) connection closed with error: Connection refused 2025-03-31T16:05:13.882977Z node 5 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-03-31T16:05:13.897397Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:05:13.897486Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:05:13.898660Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:05:16.888689Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488005396421190478:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:05:16.888783Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a64/r3tmp/tmprv6Hz5/pdisk_1.dat 2025-03-31T16:05:17.032361Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:17.061858Z node 6 :HTTP ERROR: (#32,[::1]:20673) connection closed with error: Connection refused 2025-03-31T16:05:17.062462Z node 6 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-03-31T16:05:17.062870Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:05:17.062937Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:05:17.064933Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:05:20.306015Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488005415668727578:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:05:20.306064Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a64/r3tmp/tmpzRc7TG/pdisk_1.dat 2025-03-31T16:05:20.444329Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:20.451906Z node 7 :HTTP ERROR: (#34,[::1]:24099) connection closed with error: Connection refused 2025-03-31T16:05:20.452321Z node 7 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-03-31T16:05:20.454294Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:05:20.454391Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:05:20.456287Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:05:24.130387Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7488005432497534211:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:05:24.130474Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a64/r3tmp/tmpp0yrPT/pdisk_1.dat 2025-03-31T16:05:24.257118Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:24.284575Z node 8 :HTTP ERROR: (#36,[::1]:31477) connection closed with error: Connection refused 2025-03-31T16:05:24.284798Z node 8 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-03-31T16:05:24.288101Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:05:24.288199Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:05:24.290168Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:05:27.621315Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7488005444737086678:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:05:27.621495Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a64/r3tmp/tmp31xdFa/pdisk_1.dat 2025-03-31T16:05:27.760819Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:27.791542Z node 9 :HTTP ERROR: (#38,[::1]:30232) connection closed with error: Connection refused 2025-03-31T16:05:27.795630Z node 9 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-03-31T16:05:27.796980Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:05:27.797089Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:05:27.799864Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:05:31.829491Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488005463116997545:2195];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:05:31.856394Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a64/r3tmp/tmp55dmIc/pdisk_1.dat 2025-03-31T16:05:32.060361Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:32.068903Z node 10 :HTTP ERROR: (#26,[::1]:64859) connection closed with error: Connection refused 2025-03-31T16:05:32.071428Z node 10 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-03-31T16:05:32.074282Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileStat ... 6131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a64/r3tmp/tmpPH87dk/pdisk_1.dat 2025-03-31T16:05:58.114633Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:05:58.273765Z node 14 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:58.307880Z node 14 :HTTP ERROR: (#34,[::1]:65207) connection closed with error: Connection refused 2025-03-31T16:05:58.308419Z node 14 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-03-31T16:05:58.308803Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:05:58.308900Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:05:58.312502Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a64/r3tmp/tmpgJlnJg/pdisk_1.dat 2025-03-31T16:06:04.183447Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:06:04.187689Z node 15 :HTTP ERROR: (#36,[::1]:18431) connection closed with error: Connection refused 2025-03-31T16:06:04.189924Z node 15 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-03-31T16:06:04.190274Z node 15 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:06:04.222582Z node 15 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:06:04.223034Z node 15 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:06:04.224905Z node 15 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:06:09.260311Z node 16 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[16:7488005623589009091:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:06:09.261254Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a64/r3tmp/tmp84758o/pdisk_1.dat 2025-03-31T16:06:09.550408Z node 16 :HTTP ERROR: (#38,[::1]:64350) connection closed with error: Connection refused 2025-03-31T16:06:09.555659Z node 16 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-03-31T16:06:09.557498Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:06:09.557596Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:06:09.559680Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:06:09.587502Z node 16 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:06:18.831379Z node 17 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[17:7488005666356729516:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:06:18.831508Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a64/r3tmp/tmpA4IX6f/pdisk_1.dat 2025-03-31T16:06:19.470223Z node 17 :HTTP ERROR: (#26,[::1]:23061) connection closed with error: Connection refused 2025-03-31T16:06:19.507147Z node 17 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-03-31T16:06:19.507689Z node 17 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:06:19.510818Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:06:19.510931Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:06:19.520596Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:06:27.857745Z node 18 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[18:7488005702632972927:2080];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:06:27.858901Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a64/r3tmp/tmpcX91rF/pdisk_1.dat 2025-03-31T16:06:28.271271Z node 18 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:06:28.301306Z node 18 :HTTP ERROR: (#28,[::1]:61408) connection closed with error: Connection refused 2025-03-31T16:06:28.301615Z node 18 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-03-31T16:06:28.302412Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:06:28.302525Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:06:28.305944Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:06:35.964407Z node 19 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[19:7488005735728465861:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:06:35.964452Z node 19 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a64/r3tmp/tmpvY6CQV/pdisk_1.dat 2025-03-31T16:06:36.282688Z node 19 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:06:36.331062Z node 19 :HTTP ERROR: (#30,[::1]:16401) connection closed with error: Connection refused 2025-03-31T16:06:36.331870Z node 19 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-03-31T16:06:36.335810Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:06:36.335943Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:06:36.341203Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:06:42.783024Z node 20 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[20:7488005769190629754:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:06:42.783068Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a64/r3tmp/tmps3FxTQ/pdisk_1.dat 2025-03-31T16:06:43.157052Z node 20 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:06:43.187242Z node 20 :HTTP ERROR: (#32,[::1]:24825) connection closed with error: Connection refused 2025-03-31T16:06:43.188037Z node 20 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-03-31T16:06:43.268968Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:06:43.269113Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:06:43.271072Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:06:49.481342Z node 21 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[21:7488005798417875349:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:06:49.481531Z node 21 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a64/r3tmp/tmpuPNO5L/pdisk_1.dat 2025-03-31T16:06:49.829176Z node 21 :HTTP ERROR: (#34,[::1]:6481) connection closed with error: Connection refused 2025-03-31T16:06:49.829786Z node 21 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-03-31T16:06:49.831280Z node 21 :HIVE WARN: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:06:49.831398Z node 21 :HIVE WARN: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:06:49.836111Z node 21 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:06:49.861398Z node 21 :HIVE WARN: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:06:55.890726Z node 22 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[22:7488005823272924366:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:06:55.890781Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a64/r3tmp/tmpfSvxdV/pdisk_1.dat 2025-03-31T16:06:56.063908Z node 22 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:06:56.091367Z node 22 :HTTP ERROR: (#36,[::1]:25348) connection closed with error: Connection refused 2025-03-31T16:06:56.094485Z node 22 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-03-31T16:06:56.096299Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:06:56.096434Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:06:56.098968Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:07:02.040112Z node 23 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[23:7488005851942195616:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:02.040181Z node 23 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a64/r3tmp/tmplX4v3U/pdisk_1.dat 2025-03-31T16:07:02.316188Z node 23 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:02.348228Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:02.348495Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:02.350437Z node 23 :HTTP ERROR: (#38,[::1]:28801) connection closed with error: Connection refused 2025-03-31T16:07:02.359157Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:07:02.359717Z node 23 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused >> DataShardReadIterator::ShouldReadRangePrefix2 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix3 >> KqpPg::TempTablesWithCache [FAIL] >> KqpPg::TableDeleteWhere+useSink |79.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |79.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |79.7%| [LD] {RESULT} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut >> DataShardReadIterator::ShouldReadFromHeadWithConflict+UseSink [GOOD] >> DataShardReadIterator::ShouldReadFromHeadWithConflict-UseSink >> TTicketParserTest::TicketFromCertificateWithValidationGood >> TImmediateControlsConfiguratorTests::TestModifiedControls [GOOD] >> TImmediateControlsConfiguratorTests::TestResetToDefault |79.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k |79.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k |79.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k >> LabeledDbCounters::TwoTablets [GOOD] >> LabeledDbCounters::TwoTabletsKillOneTablet |79.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |79.7%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |79.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |79.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init >> TTicketParserTest::BulkAuthorizationRetryError |79.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |79.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init >> DataShardReadIterator::ShouldReadRangeChunk1 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk2 >> TTicketParserTest::AccessServiceAuthenticationApiKeyOk [GOOD] >> TTicketParserTest::AuthenticationUnavailable >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder+EvWrite >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinue+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinue-EvWrite >> TImmediateControlsConfiguratorTests::TestResetToDefault [GOOD] >> TImmediateControlsConfiguratorTests::TestMaxLimit >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] >> KqpJoinOrder::ShuffleEliminationDifferentJoinPredicateKeyTypeCorrectness1 >> KqpJoinOrder::ShuffleEliminationOneJoin >> DataShardReadIterator::ShouldNotReadFutureMvccFromFollower [GOOD] >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc+UseSink >> TImmediateControlsConfiguratorTests::TestMaxLimit [GOOD] >> TImmediateControlsConfiguratorTests::TestDynamicMap >> DataShardReadIterator::TryWriteManyRows-Commit [GOOD] >> DataShardReadIteratorBatchMode::RangeFull >> TTicketParserTest::TicketFromCertificateWithValidationGood [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersGood >> TPersQueueTest::ReadFromSeveralPartitions [GOOD] >> TPersQueueTest::Init |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TImmediateControlsConfiguratorTests::TestDynamicMap [GOOD] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowExprKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowListKey-default.txt] >> TTicketParserTest::AuthenticationUnavailable [GOOD] >> TTicketParserTest::AuthenticationRetryError |81.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_rtmr/test-results/unittest/{meta.json ... results_accumulator.log} >> test_sql_streaming.py::test[suites-ReadTopicGroupWriteToSolomon-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadata-default.txt] |80.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |80.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |81.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr/test-results/unittest/{meta.json ... results_accumulator.log} |81.0%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |81.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |81.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |81.0%| [LD] {RESULT} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |81.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |81.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |81.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |81.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |81.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |81.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |81.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/example/ydb-tests-example |81.0%| [LD] {RESULT} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |81.0%| [LD] {RESULT} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |81.0%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |81.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/example/ydb-tests-example |81.1%| [LD] {RESULT} $(B)/ydb/tests/example/ydb-tests-example |81.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |81.1%| [LD] {RESULT} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |81.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption >> DataShardReadIterator::ShouldCommitLocksWhenReadWriteInSeparateTransactions [GOOD] >> DataShardReadIterator::HandlePersistentSnapshotGoneInContinue [GOOD] >> DataShardReadIterator::HandleMvccGoneInContinue [GOOD] >> TTicketParserTest::AuthorizationRetryError [GOOD] >> TTicketParserTest::AuthorizationRetryErrorImmediately >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] [GOOD] >> KqpIndexLookupJoin::InnerJoinOnlyLeftColumn+StreamLookup >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TImmediateControlsConfiguratorTests::TestDynamicMap [GOOD] Test command err: 2025-03-31T16:05:11.607496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:11.607591Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:11.692079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:12.936645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944 2025-03-31T16:05:13.095763Z node 9 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-31T16:05:13.096299Z node 9 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/0l5c/0009ff/r3tmp/tmpmPv0ao/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-31T16:05:13.097036Z node 9 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/0l5c/0009ff/r3tmp/tmpmPv0ao/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/0009ff/r3tmp/tmpmPv0ao/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 11685763256073546241 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-31T16:05:13.147170Z node 8 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-31T16:05:13.147693Z node 8 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/0l5c/0009ff/r3tmp/tmpmPv0ao/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-31T16:05:13.147949Z node 8 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/0l5c/0009ff/r3tmp/tmpmPv0ao/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/0009ff/r3tmp/tmpmPv0ao/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 7696408821674328592 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-31T16:05:13.180916Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-31T16:05:13.181458Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/0l5c/0009ff/r3tmp/tmpmPv0ao/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-31T16:05:13.181688Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/0l5c/0009ff/r3tmp/tmpmPv0ao/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/0009ff/r3tmp/tmpmPv0ao/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 12996129350302149671 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-31T16:05:13.185431Z node 2 :BS_LOCALRECOVERY CRIT: VDISK[80000000:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/0l5c/0009ff/r3tmp/tmpmPv0ao/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-03-31T16:05:13.254772Z node 7 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-31T16:05:13.263001Z node 7 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/0l5c/0009ff/r3tmp/tmpmPv0ao/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-31T16:05:13.263491Z node 7 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/0l5c/0009ff/r3tmp/tmpmPv0ao/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/0009ff/r3tmp/tmpmPv0ao/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 12233698680520573338 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-31T16:05:13.299784Z node 6 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 ... istered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TxLimitControls.PerRequestDataSizeLimit was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TxLimitControls.PerShardReadSizeLimit was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TxLimitControls.PerShardIncomingReadSetSizeLimit was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TxLimitControls.DefaultTimeoutMs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control CoordinatorControls.EnableLeaderLeases was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control CoordinatorControls.MinLeaderLeaseDurationUs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control CoordinatorControls.VolatilePlanLeaseMs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control CoordinatorControls.PlanAheadTimeShiftMs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control CoordinatorControls.MinPlanResolutionMs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control SchemeShardControls.ForceShardSplitDataSize was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control SchemeShardControls.DisableForceShardSplit was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TCMallocControls.ProfileSamplingRate was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TCMallocControls.GuardedSamplingRate was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TCMallocControls.PageCacheTargetSize was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TCMallocControls.PageCacheReleaseRate was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.EnableLocalSyncLogDataCutting was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.EnableSyncLogChunkCompressionHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.EnableSyncLogChunkCompressionSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.MaxSyncLogChunksInFlightHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.MaxSyncLogChunksInFlightSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.BurstThresholdNsHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.BurstThresholdNsSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.BurstThresholdNsNVME was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.DiskTimeAvailableScaleHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.DiskTimeAvailableScaleSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.DiskTimeAvailableScaleNVME was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.DefaultHugeGarbagePerMille was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.HugeDefragFreeSpaceBorderPerMille was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.MaxChunksToDefragInflight was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingDryRun was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMinLevel0SstCount was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMaxLevel0SstCount was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMinInplacedSizeHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMaxInplacedSizeHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMinInplacedSizeSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMaxInplacedSizeSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMinOccupancyPerMille was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMaxOccupancyPerMille was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMinLogChunkCount was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMaxLogChunkCount was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.MaxInProgressSyncCount was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TabletControls.MaxCommitRedoMB was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.SlowDiskThreshold was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.PredictedDelayMultiplier was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.LongRequestThresholdMs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.MaxNumOfSlowDisks was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.SlowDiskThresholdHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.PredictedDelayMultiplierHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.MaxNumOfSlowDisksHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.SlowDiskThresholdSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.PredictedDelayMultiplierSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.MaxNumOfSlowDisksSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.RequestReportingSettings.BucketSize was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.RequestReportingSettings.LeakDurationMs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.RequestReportingSettings.LeakRate was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control PDiskControls.MaxCommonLogChunksHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control PDiskControls.MaxCommonLogChunksSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control PDiskControls.UseNoopSchedulerHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control PDiskControls.UseNoopSchedulerSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control BlobStorageControllerControls.EnableSelfHealWithDegraded was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TableServiceControls.EnableMergeDatashardReads was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TestShardControls.DisableWrites was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. >> DataShardReadIterator::ShouldReadRangePrefix3 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix4 >> DataShardReadIteratorPageFaults::LocksNotLostOnPageFault [GOOD] >> DataShardReadIteratorState::ShouldCalculateQuota [GOOD] >> DataShardReadIteratorSysTables::ShouldForbidSchemaVersion >> DataShardReadIterator::ShouldReadFromHeadWithConflict-UseSink [GOOD] >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict+UseSink >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersGood [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersBad >> TPersQueueTest::SchemeOperationFirstClassCitizen [GOOD] >> TPersQueueTest::SchemeOperationsCheckPropValues >> Cdc::AwsRegion [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::HandleMvccGoneInContinue [GOOD] Test command err: 2025-03-31T16:05:32.811556Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:05:32.811746Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:05:32.811865Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000c55/r3tmp/tmpypKxyN/pdisk_1.dat 2025-03-31T16:05:33.218907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:05:33.272464Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:33.318487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:05:33.318647Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:05:33.330163Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:05:33.420290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:05:33.474666Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:05:33.475933Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:05:33.476494Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:05:33.476768Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:05:33.529531Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:05:33.530320Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:05:33.530446Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:05:33.532472Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:05:33.532575Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:05:33.532673Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:05:33.533103Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:05:33.533256Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:05:33.533341Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:05:33.547681Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:05:33.571912Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:05:33.572107Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:05:33.572268Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:05:33.572317Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:05:33.572354Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:05:33.572408Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:05:33.572623Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:05:33.572679Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:05:33.572965Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:05:33.573089Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:05:33.573212Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:05:33.573275Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:05:33.573333Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T16:05:33.573378Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:05:33.573423Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:05:33.573466Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:05:33.573515Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:05:33.573618Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:05:33.573668Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:05:33.573728Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:05:33.573834Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-31T16:05:33.573871Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:05:33.573989Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:05:33.574238Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T16:05:33.574312Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:05:33.574401Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:05:33.574448Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:05:33.574530Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T16:05:33.574572Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T16:05:33.574621Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:05:33.574932Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T16:05:33.574974Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T16:05:33.575011Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T16:05:33.575046Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:05:33.575130Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T16:05:33.575180Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T16:05:33.575221Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T16:05:33.575253Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:05:33.575276Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T16:05:33.577151Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T16:05:33.577217Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:05:33.591654Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:05:33.591737Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:05:33.591779Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:05:33.591843Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-31T16:05:33.591924Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:05:33.750262Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:05:33.750332Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:05:33.750373Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T16:05:33.750769Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-31T16:05:33.750814Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:05:33.750926Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:05:33.750965Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-31T16:05:33.751025Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-31T16:05:33.751068Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-31T16:05:33.755671Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:05:33.755775Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:05:33.756494Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:05:33.756542Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:05:33.756613Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:05:3 ... 474976715665] at 72075186224037889 executing on unit WaitForPlan 2025-03-31T16:07:18.544143Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit PlanQueue 2025-03-31T16:07:18.544353Z node 13 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715665 at step 3500 at tablet 72075186224037889 { Transactions { TxId: 281474976715665 AckTo { RawX1: 0 RawX2: 0 } } Step: 3500 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-03-31T16:07:18.544408Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:07:18.544734Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [13:884:2711], Recipient [13:884:2711]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:07:18.544779Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:07:18.544833Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T16:07:18.545293Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:07:18.545356Z node 13 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-03-31T16:07:18.545405Z node 13 :TX_DATASHARD DEBUG: Found ready operation [3500:281474976715665] in PlanQueue unit at 72075186224037889 2025-03-31T16:07:18.545461Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit PlanQueue 2025-03-31T16:07:18.545505Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2025-03-31T16:07:18.545545Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit PlanQueue 2025-03-31T16:07:18.545584Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit LoadTxDetails 2025-03-31T16:07:18.545624Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit LoadTxDetails 2025-03-31T16:07:18.545793Z node 13 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3500:281474976715665 keys extracted: 0 2025-03-31T16:07:18.545848Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2025-03-31T16:07:18.545880Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit LoadTxDetails 2025-03-31T16:07:18.545908Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-03-31T16:07:18.545939Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit BuildAndWaitDependencies 2025-03-31T16:07:18.545990Z node 13 :TX_DATASHARD TRACE: Operation [3500:281474976715665] is the new logically complete end at 72075186224037889 2025-03-31T16:07:18.546036Z node 13 :TX_DATASHARD TRACE: Operation [3500:281474976715665] is the new logically incomplete end at 72075186224037889 2025-03-31T16:07:18.546081Z node 13 :TX_DATASHARD TRACE: Activated operation [3500:281474976715665] at 72075186224037889 2025-03-31T16:07:18.546135Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2025-03-31T16:07:18.546163Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-03-31T16:07:18.546192Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit CreateVolatileSnapshot 2025-03-31T16:07:18.546225Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit CreateVolatileSnapshot 2025-03-31T16:07:18.546341Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is ExecutedNoMoreRestarts 2025-03-31T16:07:18.546377Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit CreateVolatileSnapshot 2025-03-31T16:07:18.546426Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit DropVolatileSnapshot 2025-03-31T16:07:18.546468Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit DropVolatileSnapshot 2025-03-31T16:07:18.546502Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2025-03-31T16:07:18.546531Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit DropVolatileSnapshot 2025-03-31T16:07:18.546557Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit CompleteOperation 2025-03-31T16:07:18.546589Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit CompleteOperation 2025-03-31T16:07:18.546768Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is DelayComplete 2025-03-31T16:07:18.546808Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit CompleteOperation 2025-03-31T16:07:18.546851Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit CompletedOperations 2025-03-31T16:07:18.546891Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit CompletedOperations 2025-03-31T16:07:18.546930Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2025-03-31T16:07:18.546960Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit CompletedOperations 2025-03-31T16:07:18.546989Z node 13 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715665] at 72075186224037889 has finished 2025-03-31T16:07:18.547032Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:07:18.547101Z node 13 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-03-31T16:07:18.547147Z node 13 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-03-31T16:07:18.547184Z node 13 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-03-31T16:07:18.559071Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2025-03-31T16:07:18.559272Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:07:18.559367Z node 13 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715665] at 72075186224037888 on unit CompleteOperation 2025-03-31T16:07:18.559496Z node 13 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715665] from 72075186224037888 at tablet 72075186224037888 send result to client [13:1081:2866], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:07:18.559615Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:07:18.560111Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3500} 2025-03-31T16:07:18.560179Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-31T16:07:18.560218Z node 13 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715665] at 72075186224037889 on unit CompleteOperation 2025-03-31T16:07:18.560270Z node 13 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715665] from 72075186224037889 at tablet 72075186224037889 send result to client [13:1081:2866], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:07:18.560324Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:07:18.562712Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [13:593:2518], Recipient [13:666:2570]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715665 } ResultFormat: FORMAT_ARROW KeysSize: 1 2025-03-31T16:07:18.562923Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-31T16:07:18.563036Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit CheckRead 2025-03-31T16:07:18.568444Z node 13 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-03-31T16:07:18.568568Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit CheckRead 2025-03-31T16:07:18.568643Z node 13 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-31T16:07:18.568707Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-31T16:07:18.568764Z node 13 :TX_DATASHARD TRACE: Activated operation [0:8] at 72075186224037888 2025-03-31T16:07:18.568838Z node 13 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-03-31T16:07:18.568874Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-31T16:07:18.568903Z node 13 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit ExecuteRead 2025-03-31T16:07:18.568930Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit ExecuteRead 2025-03-31T16:07:18.569130Z node 13 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715665 } ResultFormat: FORMAT_ARROW } 2025-03-31T16:07:18.569621Z node 13 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/281474976715665 2025-03-31T16:07:18.569715Z node 13 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[13:593:2518], 1} after executionsCount# 1 2025-03-31T16:07:18.569807Z node 13 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[13:593:2518], 1} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-31T16:07:18.570074Z node 13 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[13:593:2518], 1} finished in read 2025-03-31T16:07:18.570202Z node 13 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-03-31T16:07:18.570234Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit ExecuteRead 2025-03-31T16:07:18.570265Z node 13 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit CompletedOperations 2025-03-31T16:07:18.570296Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit CompletedOperations 2025-03-31T16:07:18.570363Z node 13 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-03-31T16:07:18.570388Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit CompletedOperations 2025-03-31T16:07:18.570441Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:8] at 72075186224037888 has finished 2025-03-31T16:07:18.570501Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-31T16:07:18.570697Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 >> test_sql_streaming.py::test[suites-GroupByHopListKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopNoKey-default.txt] |81.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |81.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |81.3%| [LD] {RESULT} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder-EvWrite >> TTicketParserTest::LoginBad >> TTicketParserTest::TicketFromCertificateCheckIssuerGood >> DataShardReadIterator::ShouldReadRangeChunk2 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk3 >> TTicketParserTest::NebiusAuthorizationRetryError [GOOD] >> TTicketParserTest::NebiusAuthorizationRetryErrorImmediately |81.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |81.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |81.3%| [LD] {RESULT} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests >> test_sql_streaming.py::test[suites-ReadWriteSameTopic-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadWriteTopic-default.txt] |81.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |81.3%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |81.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-std] >> TTicketParserTest::AuthorizationRetryErrorImmediately [GOOD] >> TTicketParserTest::AuthorizationWithRequiredPermissions >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinue-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips+EvWrite >> TPersQueueTest::TopicServiceCommitOffset [GOOD] >> TPersQueueTest::TopicServiceCommitOffsetBadOffsets |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::AwsRegion [GOOD] Test command err: 2025-03-31T16:04:23.260626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:04:23.260865Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:23.261000Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001952/r3tmp/tmpa35DLe/pdisk_1.dat 2025-03-31T16:04:23.756687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:04:23.835304Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:23.904842Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:23.904993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:23.916604Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:04:24.020707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:04:24.098057Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:04:24.098399Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:04:24.161663Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:04:24.161819Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:04:24.163901Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:04:24.164016Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:04:24.164096Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:04:24.164559Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:04:24.164775Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:04:24.164916Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:04:24.175986Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:04:24.199285Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:04:24.199547Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:04:24.199711Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:04:24.199755Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:04:24.199788Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:04:24.199819Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:04:24.200210Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:04:24.200305Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:04:24.200418Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:04:24.200468Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:04:24.200504Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:04:24.200545Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:04:24.200611Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:04:24.200765Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:04:24.200998Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:04:24.201111Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:04:24.202865Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:04:24.213860Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:04:24.213976Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:04:24.387768Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T16:04:24.393950Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:04:24.394059Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:04:24.394796Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:04:24.394875Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:04:24.394982Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T16:04:24.400893Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:04:24.401156Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:04:24.402001Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:04:24.402157Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:04:24.404630Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:04:24.405205Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:04:24.409556Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-31T16:04:24.409647Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:04:24.411072Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-31T16:04:24.411220Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:04:24.412277Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:04:24.412322Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:04:24.412385Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:04:24.412451Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:04:24.412507Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T16:04:24.412610Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:04:24.414097Z node 1 :CHANGE_EXCHANGE DEBUG: [ChangeSender][72075186224037888:1][1:684:2580][Inactive] Handle NKikimrChangeExchange.TEvActivateSender 2025-03-31T16:04:24.421210Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:04:24.423683Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T16:04:24.423978Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-31T16:04:24.424059Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:04:29.229085Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:101:2147], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:04:29.229609Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:04:29.229718Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001952/r3tmp/tmp9PreAM/pdisk_1.dat 2025-03-31T16:04:29.591010Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:04:29.635422Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:29.677118Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:29.677264Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:29.692529Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:04:29.790341Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:04:29.821887Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:678:2579] 2025-03-31T16:04:29.822168Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:04:29.872248Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:04:29.872539Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:04:29.874703Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:04:29.874817Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:04:29.874890Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:04:29.889319Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:04:29.890019Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075 ... -31T16:07:20.202741Z node 21 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-31T16:07:20.202952Z node 21 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 0 partNo 0 count 1 size 427 2025-03-31T16:07:20.203137Z node 21 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-03-31T16:07:20.203188Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891] got client message batch for topic 'Table/Stream2/streamImpl' partition 0 2025-03-31T16:07:20.203247Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891] got client message topic: Table/Stream2/streamImpl partition: 0 SourceId: '\00072075186224037888' SeqNo: 2 partNo : 0 messageNo: 1 size 323 offset: -1 2025-03-31T16:07:20.203755Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] Topic 'Table/Stream2/streamImpl' partition 0 part blob processing sourceId '\00072075186224037888' seqNo 2 partNo 0 2025-03-31T16:07:20.204717Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] Topic 'Table/Stream2/streamImpl' partition 0 part blob complete sourceId '\00072075186224037888' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 438 count 1 nextOffset 1 batches 1 2025-03-31T16:07:20.205263Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] Add new write blob: topic 'Table/Stream2/streamImpl' partition 0 compactOffset 0,1 HeadOffset 0 endOffset 0 curOffset 1 d0000000000_00000000000000000000_00000_0000000001_00000| size 426 WTime 2508 2025-03-31T16:07:20.205439Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:07:20.205475Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:07:20.205507Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-03-31T16:07:20.205539Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:07:20.205571Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] m0000000000p72075186224037888 2025-03-31T16:07:20.205602Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] d0000000000_00000000000000000000_00000_0000000001_00000| 2025-03-31T16:07:20.205629Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:07:20.205656Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:07:20.205687Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] =========================== 2025-03-31T16:07:20.205837Z node 21 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-31T16:07:20.205918Z node 21 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 0 partNo 0 count 1 size 426 2025-03-31T16:07:20.207981Z node 21 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 0 count 1 size 427 actorID [21:794:2659] 2025-03-31T16:07:20.208317Z node 21 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037889' partition 0 offset 0 partno 0 count 1 parts 0 size 427 2025-03-31T16:07:20.208648Z node 21 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 0 count 1 size 426 actorID [21:972:2768] 2025-03-31T16:07:20.208774Z node 21 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037891' partition 0 offset 0 partno 0 count 1 parts 0 size 426 >>>>> GetRecords path=/Root/Table/Stream1 partitionId=0 2025-03-31T16:07:20.210314Z node 21 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-03-31T16:07:20.210439Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream1/streamImpl' partition 0 2025-03-31T16:07:20.211451Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 0 Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 0 max time lag 0ms effective offset 0 2025-03-31T16:07:20.211572Z node 21 :PERSQUEUE DEBUG: waiting read cookie 0 partition 0 user $without_consumer offset 0 count 10000 size 26214400 timeout 0 2025-03-31T16:07:20.211794Z node 21 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-31T16:07:20.211930Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] waiting read cookie 0 partition 0 read timeout for $without_consumer offset 0 2025-03-31T16:07:20.212086Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-31T16:07:20.223288Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 342 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:07:20.223504Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:07:20.223684Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037888', Topic: 'Table/Stream1/streamImpl', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2025-03-31T16:07:20.224134Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 1 2025-03-31T16:07:20.224273Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 1 2025-03-31T16:07:20.224482Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 341 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:07:20.224546Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:07:20.224610Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037888', Topic: 'Table/Stream2/streamImpl', Partition: 0, SeqNo: 2, partNo: 0, Offset: 0 is stored on disk 2025-03-31T16:07:20.224846Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 1 requestId: cookie: 1 2025-03-31T16:07:20.225164Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 1 requestId: cookie: 1 2025-03-31T16:07:20.225301Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 1 Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer offset 0 count 1 size 1024000 endOffset 1 max time lag 0ms effective offset 0 2025-03-31T16:07:20.225440Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 1 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 1 2025-03-31T16:07:20.225610Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 1. All data is from uncompacted head. 2025-03-31T16:07:20.225715Z node 21 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-31T16:07:20.226176Z node 21 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][21:1172:2714] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037888" SeqNo: 1 Offset: 0 WriteTimestampMS: 2508 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 1 } } } 2025-03-31T16:07:20.226376Z node 21 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037891][21:1175:2812] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037888" SeqNo: 2 Offset: 0 WriteTimestampMS: 2508 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 1 } } } 2025-03-31T16:07:20.226467Z node 21 :PERSQUEUE DEBUG: Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer readTimeStamp done, result 2508 queuesize 0 startOffset 0 2025-03-31T16:07:20.226700Z node 21 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][21:876:2714] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-03-31T16:07:20.226841Z node 21 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][21:1030:2812] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-03-31T16:07:20.227049Z node 21 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 2, at tablet# 72075186224037888 2025-03-31T16:07:20.230540Z node 21 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 1, at tablet: 72075186224037888 2025-03-31T16:07:20.230869Z node 21 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 2, at tablet: 72075186224037888 2025-03-31T16:07:20.244130Z node 21 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 2, left# 0, at tablet# 72075186224037888 >>>>> GetRecords path=/Root/Table/Stream1 partitionId=0 2025-03-31T16:07:20.596352Z node 21 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-03-31T16:07:20.596447Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream1/streamImpl' partition 0 2025-03-31T16:07:20.596618Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 2 Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 1 max time lag 0ms effective offset 0 2025-03-31T16:07:20.596713Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 2 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 1 2025-03-31T16:07:20.596894Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 2. All data is from uncompacted head. 2025-03-31T16:07:20.596987Z node 21 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-31T16:07:20.597815Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 >>>>> GetRecords path=/Root/Table/Stream2 partitionId=0 2025-03-31T16:07:20.608183Z node 21 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-03-31T16:07:20.608366Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891] got client message batch for topic 'Table/Stream2/streamImpl' partition 0 2025-03-31T16:07:20.609516Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] read cookie 0 Topic 'Table/Stream2/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 1 max time lag 0ms effective offset 0 2025-03-31T16:07:20.609653Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 1 2025-03-31T16:07:20.609798Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-03-31T16:07:20.609899Z node 21 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-31T16:07:20.610711Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] [GOOD] |81.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |81.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |81.3%| [LD] {RESULT} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable >> DataShardReadIteratorBatchMode::RangeFull [GOOD] >> DataShardReadIteratorBatchMode::RangeFromInclusive >> TTicketParserTest::BulkAuthorizationRetryError [GOOD] >> TTicketParserTest::BulkAuthorizationRetryErrorImmediately >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc+UseSink [GOOD] >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc-UseSink >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse [GOOD] >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError |81.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |81.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |81.3%| [LD] {RESULT} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] [GOOD] |81.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans |81.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans |81.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersBad [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationDefaultGroupGood |81.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |81.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |81.4%| [LD] {RESULT} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens >> TTicketParserTest::NebiusAuthorizationRetryErrorImmediately [GOOD] >> TTicketParserTest::LoginBad [GOOD] >> TTicketParserTest::NebiusAuthorization >> TTicketParserTest::BulkAuthorizationWithRequiredPermissions >> TTicketParserTest::AuthorizationWithRequiredPermissions [GOOD] >> TTicketParserTest::AuthorizationWithUserAccount >> TTicketParserTest::TicketFromCertificateCheckIssuerGood [GOOD] >> TTicketParserTest::TicketFromCertificateCheckIssuerBad >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v0] |81.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix4 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix5 >> DataShardReadIteratorSysTables::ShouldForbidSchemaVersion [GOOD] |81.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |81.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |81.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |81.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |81.9%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |81.9%| [LD] {RESULT} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |81.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |82.0%| [LD] {RESULT} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |82.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario >> TTicketParserTest::BulkAuthorizationRetryErrorImmediately [GOOD] >> TTicketParserTest::BulkAuthorization >> TTicketParserTest::AuthenticationRetryError [GOOD] >> TTicketParserTest::AuthenticationRetryErrorImmediately >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-std] [GOOD] |82.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |82.0%| [LD] {RESULT} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |82.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict+UseSink [GOOD] >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict-UseSink >> KqpJoin::RightTableValuePredicate >> DataShardReadIterator::ShouldReadRangeChunk3 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk100 >> TTicketParserTest::BulkAuthorizationWithRequiredPermissions [GOOD] >> TTicketParserTest::BulkAuthorizationWithUserAccount ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIteratorSysTables::ShouldForbidSchemaVersion [GOOD] Test command err: 2025-03-31T16:05:32.849595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:05:32.849929Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:05:32.850157Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000c44/r3tmp/tmpNW0UkY/pdisk_1.dat 2025-03-31T16:05:33.274922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:05:33.337623Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:33.380700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:05:33.380851Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:05:33.392734Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:05:33.483078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:05:33.539930Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:05:33.541239Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:05:33.541759Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:05:33.542041Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:05:33.708022Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:05:33.708590Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:05:33.708692Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:05:33.710528Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:05:33.710637Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:05:33.710714Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:05:33.711547Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:05:33.711769Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:05:33.711859Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:05:33.724144Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:05:33.790871Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:05:33.791146Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:05:33.791343Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:05:33.791399Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:05:33.791447Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:05:33.791505Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:05:33.791774Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:05:33.791833Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:05:33.792216Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:05:33.792331Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:05:33.792468Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:05:33.792522Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:05:33.792612Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T16:05:33.792660Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:05:33.792698Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:05:33.792734Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:05:33.792783Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:05:33.792897Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:05:33.792938Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:05:33.793009Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:05:33.793162Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-31T16:05:33.793211Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:05:33.793335Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:05:33.793574Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T16:05:33.793693Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:05:33.793802Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:05:33.793869Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:05:33.793921Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T16:05:33.793973Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T16:05:33.794015Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:05:33.794353Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T16:05:33.794406Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T16:05:33.794451Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T16:05:33.794499Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:05:33.794574Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T16:05:33.794621Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T16:05:33.794666Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T16:05:33.794702Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:05:33.794733Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T16:05:33.796789Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T16:05:33.796862Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:05:33.807882Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:05:33.807968Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:05:33.808013Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:05:33.808095Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-31T16:05:33.808212Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:05:33.973925Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:05:33.973998Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:05:33.974042Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T16:05:33.974499Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-31T16:05:33.974548Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:05:33.974674Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:05:33.974726Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-31T16:05:33.974796Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-31T16:05:33.974841Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-31T16:05:33.983011Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:05:33.990496Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:05:33.991433Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:05:33.991488Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:05:33.991547Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:05:3 ... -03-31T16:07:30.115578Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is Executed 2025-03-31T16:07:30.115606Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit DropIndexNotice 2025-03-31T16:07:30.115635Z node 13 :TX_DATASHARD TRACE: Add [2500:281474976715663] at 72075186224037890 to execution unit MoveTable 2025-03-31T16:07:30.115663Z node 13 :TX_DATASHARD TRACE: Trying to execute [2500:281474976715663] at 72075186224037890 on unit MoveTable 2025-03-31T16:07:30.115692Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is Executed 2025-03-31T16:07:30.115722Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit MoveTable 2025-03-31T16:07:30.115748Z node 13 :TX_DATASHARD TRACE: Add [2500:281474976715663] at 72075186224037890 to execution unit MoveIndex 2025-03-31T16:07:30.115775Z node 13 :TX_DATASHARD TRACE: Trying to execute [2500:281474976715663] at 72075186224037890 on unit MoveIndex 2025-03-31T16:07:30.115814Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is Executed 2025-03-31T16:07:30.115849Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit MoveIndex 2025-03-31T16:07:30.115884Z node 13 :TX_DATASHARD TRACE: Add [2500:281474976715663] at 72075186224037890 to execution unit CreateCdcStream 2025-03-31T16:07:30.115919Z node 13 :TX_DATASHARD TRACE: Trying to execute [2500:281474976715663] at 72075186224037890 on unit CreateCdcStream 2025-03-31T16:07:30.115956Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is Executed 2025-03-31T16:07:30.115994Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit CreateCdcStream 2025-03-31T16:07:30.116030Z node 13 :TX_DATASHARD TRACE: Add [2500:281474976715663] at 72075186224037890 to execution unit AlterCdcStream 2025-03-31T16:07:30.116068Z node 13 :TX_DATASHARD TRACE: Trying to execute [2500:281474976715663] at 72075186224037890 on unit AlterCdcStream 2025-03-31T16:07:30.116121Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is Executed 2025-03-31T16:07:30.116158Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit AlterCdcStream 2025-03-31T16:07:30.116193Z node 13 :TX_DATASHARD TRACE: Add [2500:281474976715663] at 72075186224037890 to execution unit DropCdcStream 2025-03-31T16:07:30.116223Z node 13 :TX_DATASHARD TRACE: Trying to execute [2500:281474976715663] at 72075186224037890 on unit DropCdcStream 2025-03-31T16:07:30.116256Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is Executed 2025-03-31T16:07:30.116284Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit DropCdcStream 2025-03-31T16:07:30.116311Z node 13 :TX_DATASHARD TRACE: Add [2500:281474976715663] at 72075186224037890 to execution unit CreateIncrementalRestoreSrc 2025-03-31T16:07:30.116349Z node 13 :TX_DATASHARD TRACE: Trying to execute [2500:281474976715663] at 72075186224037890 on unit CreateIncrementalRestoreSrc 2025-03-31T16:07:30.116383Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is Executed 2025-03-31T16:07:30.116412Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit CreateIncrementalRestoreSrc 2025-03-31T16:07:30.116458Z node 13 :TX_DATASHARD TRACE: Add [2500:281474976715663] at 72075186224037890 to execution unit CompleteOperation 2025-03-31T16:07:30.116492Z node 13 :TX_DATASHARD TRACE: Trying to execute [2500:281474976715663] at 72075186224037890 on unit CompleteOperation 2025-03-31T16:07:30.117071Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is DelayComplete 2025-03-31T16:07:30.117157Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit CompleteOperation 2025-03-31T16:07:30.117241Z node 13 :TX_DATASHARD TRACE: Add [2500:281474976715663] at 72075186224037890 to execution unit CompletedOperations 2025-03-31T16:07:30.117316Z node 13 :TX_DATASHARD TRACE: Trying to execute [2500:281474976715663] at 72075186224037890 on unit CompletedOperations 2025-03-31T16:07:30.117370Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is Executed 2025-03-31T16:07:30.117402Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit CompletedOperations 2025-03-31T16:07:30.117449Z node 13 :TX_DATASHARD TRACE: Execution plan for [2500:281474976715663] at 72075186224037890 has finished 2025-03-31T16:07:30.117525Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:07:30.117603Z node 13 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2025-03-31T16:07:30.117679Z node 13 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-03-31T16:07:30.117750Z node 13 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-03-31T16:07:30.122093Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [13:24:2071], Recipient [13:987:2787]: {TEvRegisterTabletResult TabletId# 72075186224037890 Entry# 2000} 2025-03-31T16:07:30.122187Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-03-31T16:07:30.122268Z node 13 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037890 time 2000 2025-03-31T16:07:30.122343Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-31T16:07:30.123497Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 2500} 2025-03-31T16:07:30.123667Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-31T16:07:30.126157Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-31T16:07:30.126230Z node 13 :TX_DATASHARD TRACE: Complete execution for [2500:281474976715663] at 72075186224037890 on unit CreateTable 2025-03-31T16:07:30.126306Z node 13 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-03-31T16:07:30.126400Z node 13 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037890 2025-03-31T16:07:30.126456Z node 13 :TX_DATASHARD TRACE: Complete execution for [2500:281474976715663] at 72075186224037890 on unit CompleteOperation 2025-03-31T16:07:30.126539Z node 13 :TX_DATASHARD DEBUG: Complete [2500 : 281474976715663] from 72075186224037890 at tablet 72075186224037890 send result to client [13:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:07:30.126642Z node 13 :TX_DATASHARD INFO: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976715663 state Ready TxInFly 0 2025-03-31T16:07:30.126820Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-31T16:07:30.131753Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [13:685:2581], Recipient [13:884:2711]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T16:07:30.131837Z node 13 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-31T16:07:30.132481Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [13:685:2581], Recipient [13:987:2787]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T16:07:30.132531Z node 13 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-03-31T16:07:30.132871Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [13:685:2581], Recipient [13:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T16:07:30.132915Z node 13 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:07:30.133274Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [13:1028:2820], Recipient [13:987:2787]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046644480 Status: OK ServerId: [13:1031:2823] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-03-31T16:07:30.133331Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-03-31T16:07:30.134832Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [13:24:2071], Recipient [13:987:2787]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 2500 ReadStep# 2500 } 2025-03-31T16:07:30.134894Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-03-31T16:07:30.134983Z node 13 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 2500 2025-03-31T16:07:30.143835Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [13:409:2404], Recipient [13:987:2787]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715663 2025-03-31T16:07:30.143925Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-03-31T16:07:30.144006Z node 13 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715663 datashard 72075186224037890 state Ready 2025-03-31T16:07:30.144130Z node 13 :TX_DATASHARD DEBUG: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2025-03-31T16:07:30.153877Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [13:1046:2832], Recipient [13:987:2787]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:07:30.154002Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:07:30.154083Z node 13 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [13:1045:2831], serverId# [13:1046:2832], sessionId# [0:0:0] 2025-03-31T16:07:30.154319Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [13:1044:2830], Recipient [13:987:2787]: NKikimrTxDataShard.TEvGetInfoRequest 2025-03-31T16:07:30.161569Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [13:593:2518], Recipient [13:666:2570]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72075186224037888 TableId: 2 SchemaVersion: 1111 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-03-31T16:07:30.162045Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-31T16:07:30.162231Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-03-31T16:07:30.162531Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [13:1048:2834], Recipient [13:987:2787]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:07:30.162585Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:07:30.162687Z node 13 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [13:1047:2833], serverId# [13:1048:2834], sessionId# [0:0:0] >> KqpIndexLookupJoin::InnerJoinOnlyLeftColumn+StreamLookup [GOOD] >> KqpIndexLookupJoin::InnerJoinOnlyLeftColumn-StreamLookup >> TTicketParserTest::AuthorizationWithUserAccount [GOOD] >> TTicketParserTest::AuthorizationUnavailable >> TTicketParserTest::NebiusAuthorization [GOOD] >> TTicketParserTest::NebiusAuthorizationModify >> TTicketParserTest::TicketFromCertificateWithValidationDefaultGroupGood [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationCheckIssuerBad >> TTicketParserTest::TicketFromCertificateCheckIssuerBad [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationBad >> DataShardReadIteratorBatchMode::RangeFromInclusive [GOOD] >> DataShardReadIteratorBatchMode::RangeFromNonInclusive >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder+EvWrite >> TPersQueueTest::Init [GOOD] >> TPersQueueTest::NoDecompressionMemoryLeaks |82.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |82.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |82.1%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteSmall [FAIL] Test command err: assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture()+28 (0x1013BF9C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x105F7E30) TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&)+4253 (0xFD7EA7D) NTestSuiteTKeyValueTracingTest::TTestCaseWriteSmall::Execute_(NUnitTest::TTestContext&)+216 (0xFD8A5A8) std::__y1::__function::__func, void ()>::operator()()+280 (0xFD9E7F8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x10625D96) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x105FE9A9) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFD9D6A4) NUnitTest::TTestFactory::Execute()+2438 (0x10600276) NUnitTest::RunMain(int, char**)+5213 (0x1062030D) ??+0 (0x7F45CBD40D90) __libc_start_main+128 (0x7F45CBD40E40) _start+41 (0xD71A029) >> TTicketParserTest::BulkAuthorization [GOOD] >> TTicketParserTest::AuthorizationWithUserAccount2 >> TTicketParserTest::LoginGood >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips-EvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_volatile/unittest >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck [GOOD] Test command err: 2025-03-31T16:01:48.123851Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:01:48.124105Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:01:48.124275Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d78/r3tmp/tmpQ7GUJb/pdisk_1.dat 2025-03-31T16:01:48.535009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:01:48.607390Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:01:48.655894Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T16:01:48.657065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:01:48.657214Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:01:48.657410Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-31T16:01:48.672435Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:01:48.873354Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-03-31T16:01:48.873424Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-03-31T16:01:48.873585Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:645:2553] 2025-03-31T16:01:49.183145Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value2" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-31T16:01:49.183251Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:01:49.183924Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-31T16:01:49.184039Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:01:49.184503Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:01:49.184694Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 1000 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:01:49.184787Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-31T16:01:49.186555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:01:49.187049Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 HANDLE EvClientConnected 2025-03-31T16:01:49.187661Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-31T16:01:49.187735Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-03-31T16:01:49.219506Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:661:2568], Recipient [1:670:2574]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:01:49.220669Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:661:2568], Recipient [1:670:2574]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:01:49.221123Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:670:2574] 2025-03-31T16:01:49.221364Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:01:49.233588Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:661:2568], Recipient [1:670:2574]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:01:49.282816Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:01:49.282963Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:01:49.284784Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:01:49.284876Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:01:49.284938Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:01:49.285386Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:01:49.285535Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:01:49.285645Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:686:2574] in generation 1 2025-03-31T16:01:49.298197Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:01:49.404208Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:01:49.404459Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:01:49.404608Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:688:2584] 2025-03-31T16:01:49.404689Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:01:49.404743Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:01:49.404784Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:01:49.404977Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:670:2574], Recipient [1:670:2574]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:01:49.405036Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:01:49.405376Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:01:49.405486Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:01:49.405646Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:01:49.405699Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:01:49.405779Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T16:01:49.405851Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:01:49.405910Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:01:49.405948Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:01:49.406001Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:01:49.406584Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:675:2576], Recipient [1:670:2574]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:01:49.406661Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:01:49.406725Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2571], serverId# [1:675:2576], sessionId# [0:0:0] 2025-03-31T16:01:49.406812Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:675:2576] 2025-03-31T16:01:49.406853Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:01:49.406960Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:01:49.407185Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T16:01:49.407256Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:01:49.407377Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:01:49.407439Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:01:49.407494Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T16:01:49.407546Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T16:01:49.407585Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:01:49.407983Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T16:01:49.408046Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T16:01:49.408090Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T16:01:49.408140Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:01:49.408212Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T16:01:49.408254Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T16:01:49.408288Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T16:01:49.408319Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:01:49.408343Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T16:01:49.409877Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:689:2585], Recipient [1:670:2574]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T16:01:49.409938Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:01 ... ::Confirm(unsigned int, NActors::TActorContext const&, NActors::TActorId const&) /-S/ydb/core/tablet_flat/flat_executor_txloglogic.cpp:298:13 #15 0x20653bd3 in NKikimr::NTabletFlatExecutor::TExecutor::Handle(TAutoPtr, TDelete>&, NActors::TActorContext const&) /-S/ydb/core/tablet_flat/flat_executor.cpp:3121:59 #16 0x205f0954 in NKikimr::NTabletFlatExecutor::TExecutor::StateWork(TAutoPtr&) /-S/ydb/core/tablet_flat/flat_executor.cpp:4156:9 #17 0x1a54e5cc in NActors::IActor::Receive(TAutoPtr&) /-S/ydb/library/actors/core/actor.cpp:280:13 #18 0x36c883a4 in NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool) /-S/ydb/library/actors/testlib/test_runtime.cpp:1702:33 #19 0x36c80c19 in NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant) /-S/ydb/library/actors/testlib/test_runtime.cpp:1295:45 #20 0x36c8af93 in NActors::TTestActorRuntimeBase::WaitForEdgeEvents(std::__y1::function&)>, TSet, std::__y1::allocator> const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.cpp:1554:22 #21 0x36e535b3 in NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventIf(TSet, std::__y1::allocator> const&, std::__y1::function const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:477:13 #22 0x36e52b52 in GrabEdgeEvent /-S/ydb/library/actors/testlib/test_runtime.h:526:20 #23 0x36e52b52 in NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEvent(NActors::TActorId const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:532:20 #24 0x36e4adb2 in NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:577:24 #25 0x36e4a98a in NActors::TTestActorRuntime::SimulateSleep(TDuration) /-S/ydb/core/testlib/actors/test_runtime.cpp:296:9 #26 0x18dd8b37 in NKikimr::NTestSuiteDataShardVolatile::TTestCaseGracefulShardRestartNoEarlyReadSetAck::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:3956:17 #27 0x18de9c37 in operator() /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1 #28 0x18de9c37 in __invoke<(lambda at /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #29 0x18de9c37 in __call<(lambda at /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #30 0x18de9c37 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #31 0x18de9c37 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #32 0x198001f5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #33 0x198001f5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #34 0x198001f5 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #35 0x197cfd48 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #36 0x18de8ae3 in NKikimr::NTestSuiteDataShardVolatile::TCurrentTest::Execute() /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1 #37 0x197d1615 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #38 0x197fa76c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #39 0x7f4a15a5ad8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) Indirect leak of 8 byte(s) in 1 object(s) allocated from: #0 0x1903ffcd in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x19c7030e in CreateString > > /-S/contrib/libs/protobuf/src/google/protobuf/arenastring.cc:102:20 #2 0x19c7030e in void google::protobuf::internal::ArenaStringPtr::Set<>(TBasicString> const&, google::protobuf::Arena*) /-S/contrib/libs/protobuf/src/google/protobuf/arenastring.cc:150:38 #3 0x2ac770e5 in set_name > &> /-B/ydb/core/protos/query_stats.pb.h:1740:16 #4 0x2ac770e5 in SetName /-B/ydb/core/protos/query_stats.pb.h:696:56 #5 0x2ac770e5 in NKikimr::NDataShard::KqpFillTxStats(NKikimr::NDataShard::TDataShard&, NKikimr::NMiniKQL::TEngineHostCounters const&, NKikimrQueryStats::TTxStats&) /-S/ydb/core/tx/datashard/datashard_kqp.cpp:917:34 #6 0x2b086ab4 in NKikimr::NDataShard::TExecuteWriteUnit::Execute(TIntrusivePtr>, NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/execute_write_unit.cpp:436:13 #7 0x2aedef1c in NKikimr::NDataShard::TPipeline::RunExecutionPlan(TIntrusivePtr>, TVector>&, NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/datashard_pipeline.cpp:1851:28 #8 0x2ab9fc94 in NKikimr::NDataShard::TDataShard::TTxWrite::Execute(NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/datashard__write.cpp:100:38 #9 0x2062a8d4 in NKikimr::NTabletFlatExecutor::TExecutor::ExecuteTransaction(NKikimr::NTabletFlatExecutor::TSeat*) /-S/ydb/core/tablet_flat/flat_executor.cpp:1910:35 #10 0x20626274 in NKikimr::NTabletFlatExecutor::TExecutor::DoExecute(TAutoPtr, NKikimr::NTabletFlatExecutor::TExecutor::ETxMode) /-S/ydb/core/tablet_flat/flat_executor.cpp:1814:13 #11 0x2062d46e in NKikimr::NTabletFlatExecutor::TExecutor::Execute(TAutoPtr, NActors::TActorContext const&) /-S/ydb/core/tablet_flat/flat_executor.cpp:1828:5 #12 0x205c3ada in Execute /-S/ydb/core/tablet_flat/tablet_flat_executed.cpp:62:46 #13 0x205c3ada in NKikimr::NTabletFlatExecutor::TTabletExecutedFlat::Execute(TAutoPtr, NActors::TActorContext const&) /-S/ydb/core/tablet_flat/tablet_flat_executed.cpp:57:5 #14 0x21c60941 in NKikimr::NDataShard::TDataShard::Handle(TAutoPtr, TDelete>&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/datashard.cpp:3415:21 #15 0x21c0932b in NKikimr::NDataShard::TDataShard::StateWork(TAutoPtr&) /-S/ydb/core/tx/datashard/datashard_impl.h:3157:13 #16 0x1a54e5cc in NActors::IActor::Receive(TAutoPtr&) /-S/ydb/library/actors/core/actor.cpp:280:13 #17 0x36c883a4 in NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool) /-S/ydb/library/actors/testlib/test_runtime.cpp:1702:33 #18 0x36c80c19 in NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant) /-S/ydb/library/actors/testlib/test_runtime.cpp:1295:45 #19 0x36c8af93 in NActors::TTestActorRuntimeBase::WaitForEdgeEvents(std::__y1::function&)>, TSet, std::__y1::allocator> const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.cpp:1554:22 #20 0x36e535b3 in NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventIf(TSet, std::__y1::allocator> const&, std::__y1::function const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:477:13 #21 0x36e52b52 in GrabEdgeEvent /-S/ydb/library/actors/testlib/test_runtime.h:526:20 #22 0x36e52b52 in NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEvent(NActors::TActorId const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:532:20 #23 0x36e4adb2 in NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:577:24 #24 0x36e4a98a in NActors::TTestActorRuntime::SimulateSleep(TDuration) /-S/ydb/core/testlib/actors/test_runtime.cpp:296:9 #25 0x18dd4393 in NKikimr::NTestSuiteDataShardVolatile::TTestCaseGracefulShardRestartNoEarlyReadSetAck::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:3909:17 #26 0x18de9c37 in operator() /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1 #27 0x18de9c37 in __invoke<(lambda at /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #28 0x18de9c37 in __call<(lambda at /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #29 0x18de9c37 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #30 0x18de9c37 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #31 0x198001f5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #32 0x198001f5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #33 0x198001f5 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #34 0x197cfd48 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #35 0x18de8ae3 in NKikimr::NTestSuiteDataShardVolatile::TCurrentTest::Execute() /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1 #36 0x197d1615 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #37 0x197fa76c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #38 0x7f4a15a5ad8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) SUMMARY: AddressSanitizer: 664 byte(s) leaked in 11 allocation(s). >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc-UseSink [GOOD] >> TReplicaTest::Handshake >> test_sql_streaming.py::test[suites-GroupByHoppingWindowListKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowNoKey-default.txt] |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] [GOOD] >> TTicketParserTest::AuthorizationUnavailable [GOOD] >> TReplicaTest::Handshake [GOOD] >> TReplicaTest::DoubleUnsubscribe >> TTicketParserTest::BulkAuthorizationWithUserAccount [GOOD] >> TTicketParserTest::BulkAuthorizationWithUserAccount2 >> TReplicaTest::DoubleUnsubscribe [GOOD] >> TReplicaTest::DoubleDelete >> TTicketParserTest::AuthenticationRetryErrorImmediately [GOOD] >> TTicketParserTest::LoginRefreshGroupsWithError >> TReplicaTest::DoubleDelete [GOOD] >> KqpJoin::IdxLookupPartialLeftPredicate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc-UseSink [GOOD] Test command err: 2025-03-31T16:05:32.478196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:05:32.478489Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:05:32.478701Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000c7b/r3tmp/tmpyfOtPT/pdisk_1.dat 2025-03-31T16:05:32.895891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:05:32.967250Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:33.020941Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:05:33.021079Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:05:33.033311Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:05:33.123035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:05:33.197589Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:05:33.198763Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:05:33.199277Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:05:33.199549Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:05:33.271509Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:05:33.272250Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:05:33.272450Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:05:33.274160Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:05:33.274242Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:05:33.274305Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:05:33.274708Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:05:33.274860Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:05:33.274947Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:05:33.287665Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:05:33.344205Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:05:33.344402Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:05:33.344558Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:05:33.344602Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:05:33.344638Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:05:33.344689Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:05:33.344925Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:05:33.344972Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:05:33.345268Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:05:33.345397Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:05:33.345530Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:05:33.345598Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:05:33.345667Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T16:05:33.345706Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:05:33.345741Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:05:33.345779Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:05:33.345838Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:05:33.345944Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:05:33.345982Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:05:33.346043Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:05:33.346140Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-31T16:05:33.346176Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:05:33.346295Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:05:33.346535Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T16:05:33.346592Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:05:33.346695Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:05:33.346741Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:05:33.346809Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T16:05:33.346861Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T16:05:33.346894Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:05:33.347179Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T16:05:33.347221Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T16:05:33.347258Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T16:05:33.347293Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:05:33.347356Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T16:05:33.347403Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T16:05:33.347449Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T16:05:33.347481Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:05:33.347506Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T16:05:33.349391Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T16:05:33.349452Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:05:33.361890Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:05:33.361987Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:05:33.362034Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:05:33.362105Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-31T16:05:33.362203Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:05:33.539147Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:05:33.539246Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:05:33.539295Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T16:05:33.539795Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-31T16:05:33.539842Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:05:33.539989Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:05:33.540043Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-31T16:05:33.540113Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-31T16:05:33.540176Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-31T16:05:33.545380Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:05:33.545494Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:05:33.546250Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:05:33.546305Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:05:33.546365Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:05:3 ... 888 has finished 2025-03-31T16:07:37.203977Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:07:37.204035Z node 15 :TX_DATASHARD DEBUG: Found ready candidate operation [0:8] at 72075186224037888 for ExecuteRead 2025-03-31T16:07:37.204376Z node 15 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=15&id=YmU5MWY5MDAtMmQ3MTg0YzUtNGZjMjgyNmUtYzM0OGFlMjc=, workerId: [15:1135:2910], local sessions count: 1 2025-03-31T16:07:37.204843Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [15:61:2108], Recipient [15:1063:2855]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715666 LockNode: 15 Status: STATUS_NOT_FOUND 2025-03-31T16:07:37.205006Z node 15 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 6, sender: [15:594:2519], selfId: [15:57:2104], source: [15:1163:2930] 2025-03-31T16:07:37.205129Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [15:667:2571], Recipient [15:667:2571]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:07:37.205155Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:07:37.205194Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:07:37.205222Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:07:37.205254Z node 15 :TX_DATASHARD DEBUG: Return cached ready operation [0:8] at 72075186224037888 2025-03-31T16:07:37.205291Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit ExecuteRead 2025-03-31T16:07:37.205411Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 2, request: { ReadId: 3 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW } 2025-03-31T16:07:37.205792Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3001/18446744073709551615 2025-03-31T16:07:37.205838Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:594:2519], 3} after executionsCount# 2 2025-03-31T16:07:37.205885Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:594:2519], 3} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-31T16:07:37.206061Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:594:2519], 3} finished in read 2025-03-31T16:07:37.206136Z node 15 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-03-31T16:07:37.206170Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit ExecuteRead 2025-03-31T16:07:37.206195Z node 15 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit CompletedOperations 2025-03-31T16:07:37.206217Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit CompletedOperations 2025-03-31T16:07:37.206258Z node 15 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-03-31T16:07:37.206276Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit CompletedOperations 2025-03-31T16:07:37.206293Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:8] at 72075186224037888 has finished 2025-03-31T16:07:37.206315Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:07:37.206352Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-03-31T16:07:37.206415Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:07:37.206476Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:07:37.207167Z node 15 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=15&id=ZTAwMTczZDAtMjkzZDM5ZGYtMWFlNmI2ODYtYzg2N2U5OGI=, workerId: [15:1163:2930], local sessions count: 0 2025-03-31T16:07:37.208692Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:594:2519], Recipient [15:667:2571]: NKikimrTxDataShard.TEvRead ReadId: 4 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW KeysSize: 1 2025-03-31T16:07:37.208900Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-31T16:07:37.209033Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:9] at 72075186224037888 on unit CheckRead 2025-03-31T16:07:37.209184Z node 15 :TX_DATASHARD TRACE: Execution status for [0:9] at 72075186224037888 is Executed 2025-03-31T16:07:37.209268Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:9] at 72075186224037888 executing on unit CheckRead 2025-03-31T16:07:37.209348Z node 15 :TX_DATASHARD TRACE: Add [0:9] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-31T16:07:37.209412Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:9] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-31T16:07:37.209481Z node 15 :TX_DATASHARD TRACE: Activated operation [0:9] at 72075186224037888 2025-03-31T16:07:37.209561Z node 15 :TX_DATASHARD TRACE: Execution status for [0:9] at 72075186224037888 is Executed 2025-03-31T16:07:37.209595Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:9] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-31T16:07:37.209625Z node 15 :TX_DATASHARD TRACE: Add [0:9] at 72075186224037888 to execution unit ExecuteRead 2025-03-31T16:07:37.209654Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:9] at 72075186224037888 on unit ExecuteRead 2025-03-31T16:07:37.209843Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 4 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW } 2025-03-31T16:07:37.210255Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3001/18446744073709551615 2025-03-31T16:07:37.210352Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:594:2519], 4} after executionsCount# 1 2025-03-31T16:07:37.210447Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:594:2519], 4} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-31T16:07:37.210716Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:594:2519], 4} finished in read 2025-03-31T16:07:37.210831Z node 15 :TX_DATASHARD TRACE: Execution status for [0:9] at 72075186224037888 is Executed 2025-03-31T16:07:37.210867Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:9] at 72075186224037888 executing on unit ExecuteRead 2025-03-31T16:07:37.210898Z node 15 :TX_DATASHARD TRACE: Add [0:9] at 72075186224037888 to execution unit CompletedOperations 2025-03-31T16:07:37.210931Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:9] at 72075186224037888 on unit CompletedOperations 2025-03-31T16:07:37.210994Z node 15 :TX_DATASHARD TRACE: Execution status for [0:9] at 72075186224037888 is Executed 2025-03-31T16:07:37.211023Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:9] at 72075186224037888 executing on unit CompletedOperations 2025-03-31T16:07:37.211063Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:9] at 72075186224037888 has finished 2025-03-31T16:07:37.211243Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-31T16:07:37.212520Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:594:2519], Recipient [15:667:2571]: NKikimrTxDataShard.TEvRead ReadId: 5 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW KeysSize: 1 2025-03-31T16:07:37.212760Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-31T16:07:37.212884Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit CheckRead 2025-03-31T16:07:37.213004Z node 15 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is Executed 2025-03-31T16:07:37.213067Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit CheckRead 2025-03-31T16:07:37.213122Z node 15 :TX_DATASHARD TRACE: Add [0:10] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-31T16:07:37.213181Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-31T16:07:37.213237Z node 15 :TX_DATASHARD TRACE: Activated operation [0:10] at 72075186224037888 2025-03-31T16:07:37.213299Z node 15 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is Executed 2025-03-31T16:07:37.213331Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-31T16:07:37.213359Z node 15 :TX_DATASHARD TRACE: Add [0:10] at 72075186224037888 to execution unit ExecuteRead 2025-03-31T16:07:37.213388Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit ExecuteRead 2025-03-31T16:07:37.213551Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 5 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW } 2025-03-31T16:07:37.213960Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3000/18446744073709551615 2025-03-31T16:07:37.214049Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:594:2519], 5} after executionsCount# 1 2025-03-31T16:07:37.214147Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:594:2519], 5} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-31T16:07:37.214385Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:594:2519], 5} finished in read 2025-03-31T16:07:37.214474Z node 15 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is Executed 2025-03-31T16:07:37.214501Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit ExecuteRead 2025-03-31T16:07:37.214524Z node 15 :TX_DATASHARD TRACE: Add [0:10] at 72075186224037888 to execution unit CompletedOperations 2025-03-31T16:07:37.214548Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit CompletedOperations 2025-03-31T16:07:37.214596Z node 15 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is Executed 2025-03-31T16:07:37.214614Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit CompletedOperations 2025-03-31T16:07:37.214649Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:10] at 72075186224037888 has finished 2025-03-31T16:07:37.214707Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::AuthorizationUnavailable [GOOD] Test command err: 2025-03-31T16:07:03.977621Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005857278192988:2264];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:03.977896Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d92/r3tmp/tmpS426P4/pdisk_1.dat 2025-03-31T16:07:05.155700Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:05.164222Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:07:05.175611Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:05.175745Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:05.181907Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:07:05.367468Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.144041s 2025-03-31T16:07:05.367594Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.144162s TServer::EnableGrpc on GrpcPort 4810, node 1 2025-03-31T16:07:05.577121Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:05.577148Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:05.577166Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:05.580044Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12585 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:06.435813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:06.508424Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2025-03-31T16:07:06.511875Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Connect to grpc://localhost:13060 2025-03-31T16:07:06.536218Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-31T16:07:06.676514Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Status 14 Service Unavailable 2025-03-31T16:07:06.685278Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2025-03-31T16:07:06.685349Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-03-31T16:07:06.685383Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2025-03-31T16:07:06.685755Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-31T16:07:06.693062Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Status 14 Service Unavailable 2025-03-31T16:07:06.693673Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2025-03-31T16:07:06.693744Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-03-31T16:07:07.975558Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-03-31T16:07:07.975616Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2025-03-31T16:07:07.983021Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-31T16:07:07.986476Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Status 14 Service Unavailable 2025-03-31T16:07:07.986846Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2025-03-31T16:07:07.986892Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-03-31T16:07:08.924090Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005857278192988:2264];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:08.924190Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:07:08.981642Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-03-31T16:07:08.981705Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2025-03-31T16:07:08.982269Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-31T16:07:08.985864Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-03-31T16:07:08.986831Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a valid subject "user1@as" 2025-03-31T16:07:08.986986Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as 2025-03-31T16:07:19.584140Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488005927070849873:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:19.584227Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d92/r3tmp/tmpubaCee/pdisk_1.dat 2025-03-31T16:07:19.723745Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:19.728609Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:19.728673Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:19.730150Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5899, node 2 2025-03-31T16:07:19.819758Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:19.819782Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:19.819795Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:19.819916Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12295 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:07:20.184532Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:07:20.205221Z node 2 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2025-03-31T16:07:20.205313Z node 2 :GRPC_CLIENT DEBUG: [517000043708] Connect to grpc://localhost:6141 2025-03-31T16:07:20.206196Z node 2 :GRPC_CLIENT DEBUG: [517000043708] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-31T16:07:20.224054Z node 2 :GRPC_CLIENT DEBUG: [517000043708] Status 14 Service Unavailable 2025-03-31T16:07:20.224662Z node 2 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2025-03-31T16:07:20.224716Z node 2 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-03-31T16:07:20.224745Z node 2 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2025-03-31T16:07:20.225081Z node 2 :GRPC_CLIENT DEBUG: [517000043708] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_pat ... 4073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:29.929404Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:29.944729Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:07:29.956801Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-31T16:07:29.956837Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:07:29.956847Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:07:29.956884Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-03-31T16:07:29.956974Z node 4 :GRPC_CLIENT DEBUG: [517000031108] Connect to grpc://localhost:5726 2025-03-31T16:07:29.957899Z node 4 :GRPC_CLIENT DEBUG: [517000031108] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-31T16:07:29.967334Z node 4 :GRPC_CLIENT DEBUG: [517000031108] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-03-31T16:07:29.971357Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-03-31T16:07:29.971397Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2025-03-31T16:07:29.995247Z node 4 :GRPC_CLIENT DEBUG: [517000031808] Connect to grpc://localhost:18172 2025-03-31T16:07:29.996377Z node 4 :GRPC_CLIENT DEBUG: [517000031808] Request GetUserAccountRequest { user_account_id: "user1" } 2025-03-31T16:07:30.016949Z node 4 :GRPC_CLIENT DEBUG: [517000031808] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2025-03-31T16:07:30.019528Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport 2025-03-31T16:07:30.023891Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-31T16:07:30.023922Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:07:30.023933Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:07:30.023972Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2025-03-31T16:07:30.024202Z node 4 :GRPC_CLIENT DEBUG: [517000031108] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-31T16:07:30.032532Z node 4 :GRPC_CLIENT DEBUG: [517000031108] Status 16 Access Denied 2025-03-31T16:07:30.032899Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a permanent error "Access Denied" retryable:0 2025-03-31T16:07:30.032937Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now permanent error message 'Access Denied' 2025-03-31T16:07:30.033788Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-31T16:07:30.033809Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:07:30.033819Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:07:30.033844Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-03-31T16:07:30.033895Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2025-03-31T16:07:30.034063Z node 4 :GRPC_CLIENT DEBUG: [517000031108] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-31T16:07:30.034664Z node 4 :GRPC_CLIENT DEBUG: [517000031108] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-31T16:07:30.037671Z node 4 :GRPC_CLIENT DEBUG: [517000031108] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-03-31T16:07:30.037798Z node 4 :GRPC_CLIENT DEBUG: [517000031108] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-03-31T16:07:30.038034Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-03-31T16:07:30.038094Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a valid subject "user1@as" 2025-03-31T16:07:30.038122Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2025-03-31T16:07:30.038266Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport 2025-03-31T16:07:34.578567Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7488005988618306986:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:34.578902Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d92/r3tmp/tmps4PAhW/pdisk_1.dat 2025-03-31T16:07:34.804223Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:34.836891Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:34.837000Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:34.839548Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18084, node 5 2025-03-31T16:07:34.891612Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:34.891646Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:34.891656Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:34.891802Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31600 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:35.233038Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:35.244124Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:07:35.248208Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-31T16:07:35.248243Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:07:35.248253Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:07:35.248294Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-03-31T16:07:35.248352Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2025-03-31T16:07:35.248444Z node 5 :GRPC_CLIENT DEBUG: [5170000bd488] Connect to grpc://localhost:13023 2025-03-31T16:07:35.249459Z node 5 :GRPC_CLIENT DEBUG: [5170000bd488] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-31T16:07:35.249825Z node 5 :GRPC_CLIENT DEBUG: [5170000bd488] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-31T16:07:35.265057Z node 5 :GRPC_CLIENT DEBUG: [5170000bd488] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-03-31T16:07:35.265246Z node 5 :GRPC_CLIENT DEBUG: [5170000bd488] Status 14 Service Unavailable 2025-03-31T16:07:35.267214Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-03-31T16:07:35.267294Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a permanent error "Service Unavailable" retryable:1 2025-03-31T16:07:35.267339Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-03-31T16:07:35.267375Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-03-31T16:07:35.267428Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2025-03-31T16:07:35.267655Z node 5 :GRPC_CLIENT DEBUG: [5170000bd488] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-31T16:07:35.268342Z node 5 :GRPC_CLIENT DEBUG: [5170000bd488] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-31T16:07:35.272038Z node 5 :GRPC_CLIENT DEBUG: [5170000bd488] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-03-31T16:07:35.272167Z node 5 :GRPC_CLIENT DEBUG: [5170000bd488] Status 14 Service Unavailable 2025-03-31T16:07:35.273434Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-03-31T16:07:35.273504Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a permanent error "Service Unavailable" retryable:1 2025-03-31T16:07:35.273531Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::DoubleDelete [GOOD] Test command err: 2025-03-31T16:07:38.916749Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-03-31T16:07:38.916838Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 1 2025-03-31T16:07:39.412868Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:7:2054] 2025-03-31T16:07:39.412943Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 1 2025-03-31T16:07:39.422023Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-03-31T16:07:39.422132Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-03-31T16:07:39.441310Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-03-31T16:07:39.441504Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:7:2054] 2025-03-31T16:07:39.450677Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:7:2054], path# path, domainOwnerId# 0, capabilities# 2025-03-31T16:07:39.451837Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:7:2054] 2025-03-31T16:07:39.453690Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Unsubscribe: subscriber# [2:7:2054], path# path 2025-03-31T16:07:39.453807Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:7:2054] 2025-03-31T16:07:39.859326Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:7:2054] 2025-03-31T16:07:39.859394Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 1, generation# 1 2025-03-31T16:07:39.859497Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:8:2055] 2025-03-31T16:07:39.859535Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path 2025-03-31T16:07:39.859627Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:8:2055], path# path, domainOwnerId# 0, capabilities# 2025-03-31T16:07:39.859769Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-03-31T16:07:39.859810Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-03-31T16:07:39.859886Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-03-31T16:07:39.861931Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 40 2025-03-31T16:07:39.861997Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-03-31T16:07:39.862071Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-03-31T16:07:39.862351Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:9:2056] 2025-03-31T16:07:39.862415Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:9:2056], path# path, domainOwnerId# 0, capabilities# 2025-03-31T16:07:39.862546Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 40 2025-03-31T16:07:39.862581Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true >> TTicketParserTest::NebiusAuthorizationModify [GOOD] |82.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |82.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |82.2%| [LD] {RESULT} $(B)/ydb/tests/fq/common/ydb-tests-fq-common ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::AuthenticationRetryErrorImmediately [GOOD] Test command err: 2025-03-31T16:07:05.186765Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005864579744772:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:05.186812Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d84/r3tmp/tmpf344aH/pdisk_1.dat 2025-03-31T16:07:05.896837Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:05.935811Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:05.935900Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:05.941429Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13169, node 1 2025-03-31T16:07:06.117710Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:06.117732Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:06.117738Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:06.117854Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21205 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:06.526378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:06.567135Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-03-31T16:07:06.567232Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Connect to grpc://localhost:23823 2025-03-31T16:07:06.577424Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2025-03-31T16:07:06.667706Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Response AuthenticateResponse { subject { user_account { id: "user1" } } } 2025-03-31T16:07:06.682948Z node 1 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-03-31T16:07:09.695782Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488005883760330485:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:09.695840Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d84/r3tmp/tmpFlRmow/pdisk_1.dat 2025-03-31T16:07:10.070060Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:10.107205Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:10.107289Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:10.112365Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20278, node 2 2025-03-31T16:07:10.268687Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:10.268715Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:10.268723Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:10.268880Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12836 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:10.563953Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:10.580228Z node 2 :TICKET_PARSER TRACE: Ticket ApiK****alid (AB5B5EA8) asking for AccessServiceAuthentication 2025-03-31T16:07:10.580288Z node 2 :GRPC_CLIENT DEBUG: [517000031108] Connect to grpc://localhost:8751 2025-03-31T16:07:10.581358Z node 2 :GRPC_CLIENT DEBUG: [517000031108] Request AuthenticateRequest { api_key: "ApiK****alid (AB5B5EA8)" } 2025-03-31T16:07:10.594152Z node 2 :GRPC_CLIENT DEBUG: [517000031108] Response AuthenticateResponse { subject { user_account { id: "ApiKey-value-valid" } } } 2025-03-31T16:07:10.595407Z node 2 :TICKET_PARSER DEBUG: Ticket ApiK****alid (AB5B5EA8) () has now valid token of ApiKey-value-valid@as 2025-03-31T16:07:13.824999Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488005900055152620:2082];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:13.842455Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d84/r3tmp/tmpx0vHyD/pdisk_1.dat 2025-03-31T16:07:13.974962Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:13.997313Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:13.997402Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:14.002834Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3141, node 3 2025-03-31T16:07:14.099317Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:14.099346Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:14.099354Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:14.099509Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26926 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:14.377131Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:14.386415Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-31T16:07:14.386451Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:07:14.386462Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:07:14.386497Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-03-31T16:07:14.386545Z node 3 :GRPC_CLIENT DEBUG: [5170000f0408] Connect to grpc://localhost:9713 2025-03-31T16:07:14.387419Z node 3 :GRPC_CLIENT DEBUG: [5170000f0408] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2025-03-31T16:07:14.394689Z node 3 :GRPC_CLIENT DEBUG: [5170000f0408] Status 14 Service Unavailable 2025-03-31T16:07:14.394866Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-03-31T16:07:14.394902Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-03-31T16:07:14.395061Z node 3 :GRPC_CLIENT DEBUG: [5170000f0408] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2025-03-31T16:07:14.397956Z node 3 :GRPC_CLIENT DEBUG: [5170000f0408] Status 1 CANCELLED 2025-03-31T16:07:14.398084Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'CANCELLED' 2025-03-31T16:07:18.586671Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488005922525837283:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:18.591906Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d84/r3tmp/tmp0XIB97/pdisk_1.dat 2025-03-31T16:07:18.923405Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:18.945639Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:18.945720Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:18.952288Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7208, node 4 2025-03-31T16:07:19.106345Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:19.106375Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:19.106384Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:19.106516Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30606 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:19.484119Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:19.496239Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:07:19.499504Z node 4 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2025-03-31T16:07:19.499590Z node 4 :GRPC_CLIENT DEBUG: [517000110308] Connect to grpc://localhost:29162 2025-03-31T16:07:19.500935Z node 4 :GRPC_CLIENT DEBUG: [517000110308] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2025-03-31T16:07:19.510938Z node 4 :GRPC_CLIENT DEBUG: [517000110308] Status 14 Service Unavailable 2025-03-31T16:07:19.511622Z node 4 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-03-31T16:07:19.511652Z node 4 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2025-03-31T16:07:19.511842Z node 4 :GRPC_CLIENT DEBUG: [517000110308] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2025-03-31T16:07:19.517578Z node 4 :GRPC_CLIENT DEBUG: [517000110308] Status 14 Service Unavailable 2025-03-31T16:07:19.518025Z node 4 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-03-31T16:07:20.675872Z node 4 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-03-31T16:07:20.675919Z node 4 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2025-03-31T16:07:20.676306Z node 4 :GRPC_CLIENT DEBUG: [517000110308] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2025-03-31T16:07:20.687966Z node 4 :GRPC_CLIENT DEBUG: [517000110308] Status 14 Service Unavailable 2025-03-31T16:07:20.688440Z node 4 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-03-31T16:07:21.676041Z node 4 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-03-31T16:07:21.676084Z node 4 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2025-03-31T16:07:21.676450Z node 4 :GRPC_CLIENT DEBUG: [517000110308] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2025-03-31T16:07:21.679240Z node 4 :GRPC_CLIENT DEBUG: [517000110308] Response AuthenticateResponse { subject { user_account { id: "user1" } } } 2025-03-31T16:07:21.679554Z node 4 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as 2025-03-31T16:07:23.591394Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7488005922525837283:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:23.591479Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d84/r3tmp/tmptdLzwr/pdisk_1.dat 2025-03-31T16:07:32.719324Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:07:32.751654Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:32.752518Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:32.752601Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:32.764754Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25970, node 5 2025-03-31T16:07:32.915741Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:32.915769Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:32.915777Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:32.915906Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16708 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:33.409038Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:33.431771Z node 5 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2025-03-31T16:07:33.431884Z node 5 :GRPC_CLIENT DEBUG: [5170000bb508] Connect to grpc://localhost:21692 2025-03-31T16:07:33.433051Z node 5 :GRPC_CLIENT DEBUG: [5170000bb508] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2025-03-31T16:07:33.450036Z node 5 :GRPC_CLIENT DEBUG: [5170000bb508] Status 14 Service Unavailable 2025-03-31T16:07:33.450444Z node 5 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-03-31T16:07:33.450484Z node 5 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2025-03-31T16:07:33.450666Z node 5 :GRPC_CLIENT DEBUG: [5170000bb508] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2025-03-31T16:07:33.452700Z node 5 :GRPC_CLIENT DEBUG: [5170000bb508] Response AuthenticateResponse { subject { user_account { id: "user1" } } } 2025-03-31T16:07:33.453144Z node 5 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as >> TTicketParserTest::TicketFromCertificateWithValidationBad [GOOD] >> TTicketParserTest::NebiusAuthorizationWithRequiredPermissions >> test_sql_streaming.py::test[suites-ReadTopicWithMetadata-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataInsideFilter-default.txt] >> TTicketParserTest::AuthorizationWithUserAccount2 [GOOD] >> TTicketParserTest::BulkAuthorizationModify >> test_sql_streaming.py::test[suites-GroupByHopNoKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopPercentile-default.txt] |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> TPersQueueTest::SchemeOperationsCheckPropValues [GOOD] >> TPersQueueTest::ReadRuleServiceType >> TTicketParserTest::LoginGood [GOOD] >> TTicketParserTest::LoginGoodWithGroups ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::NebiusAuthorizationModify [GOOD] Test command err: 2025-03-31T16:07:05.045526Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005866947832517:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:05.045587Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d83/r3tmp/tmpU00e1Q/pdisk_1.dat 2025-03-31T16:07:05.763811Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:05.763912Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:05.770307Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:07:05.810824Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2008, node 1 2025-03-31T16:07:05.944122Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:05.944155Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:05.944184Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:05.944324Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18440 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:06.453815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:06.509346Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-31T16:07:06.509415Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:07:06.509428Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:07:06.509862Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-03-31T16:07:06.509963Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Connect to grpc://localhost:3425 2025-03-31T16:07:06.534771Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response 14: "Service Unavailable" 2025-03-31T16:07:06.677712Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Status 14 Service Unavailable 2025-03-31T16:07:06.678159Z node 1 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-03-31T16:07:06.678196Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-03-31T16:07:06.678332Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2025-03-31T16:07:06.682767Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Status 1 CANCELLED 2025-03-31T16:07:06.685991Z node 1 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'CANCELLED' 2025-03-31T16:07:09.685570Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488005884382578277:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:09.685631Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d83/r3tmp/tmpWQ20kU/pdisk_1.dat 2025-03-31T16:07:10.035210Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:10.052373Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:10.052468Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:10.060274Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11288, node 2 2025-03-31T16:07:10.243715Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:10.243738Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:10.243745Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:10.243857Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18508 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:10.613246Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:10.624907Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-31T16:07:10.624937Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:07:10.624946Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:07:10.625018Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-03-31T16:07:10.625081Z node 2 :GRPC_CLIENT DEBUG: [517000051008] Connect to grpc://localhost:7824 2025-03-31T16:07:10.632033Z node 2 :GRPC_CLIENT DEBUG: [517000051008] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response 14: "Service Unavailable" 2025-03-31T16:07:10.664657Z node 2 :GRPC_CLIENT DEBUG: [517000051008] Status 14 Service Unavailable 2025-03-31T16:07:10.664826Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-03-31T16:07:10.665037Z node 2 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-03-31T16:07:10.665106Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-03-31T16:07:10.665398Z node 2 :GRPC_CLIENT DEBUG: [517000051008] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response 14: "Service Unavailable" 2025-03-31T16:07:10.674563Z node 2 :GRPC_CLIENT DEBUG: [517000051008] Status 14 Service Unavailable 2025-03-31T16:07:10.683632Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-03-31T16:07:10.683671Z node 2 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-03-31T16:07:11.726689Z node 2 :TICKET_PARSER DEBUG: Refreshing ticket **** (8E120919) 2025-03-31T16:07:11.726774Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-03-31T16:07:11.727026Z node 2 :GRPC_CLIENT DEBUG: [517000051008] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response 14: "Service Unavailable" 2025-03-31T16:07:11.731189Z node 2 :GRPC_CLIENT DEBUG: [517000051008] Status 14 Service Unavailable 2025-03-31T16:07:11.731303Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-03-31T16:07:11.731325Z node 2 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-03-31T16:07:13.727792Z node 2 :TICKET_PARSER DEBUG: Refreshing ticket **** (8E120919) 2025-03-31T16:07:13.727854Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-03-31T16:07:13.728074Z node 2 :GRPC_CLIENT DEBUG: [517000051008] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { ... 4Z node 4 :TICKET_PARSER TRACE: Ticket **** (BE2EA0D0) asking for AccessServiceAuthorization( something.read) 2025-03-31T16:07:29.811113Z node 4 :GRPC_CLIENT DEBUG: [517000065208] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (BE2EA0D0)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "invalid-token1" } } NebiusAccessService::Authorize response results { key: 0 value { resultCode: PERMISSION_DENIED } } 0: "OK" 2025-03-31T16:07:29.813921Z node 4 :GRPC_CLIENT DEBUG: [517000065208] Response AuthorizeResponse { results { key: 0 value { resultCode: PERMISSION_DENIED } } } 2025-03-31T16:07:29.815954Z node 4 :TICKET_PARSER TRACE: Ticket **** (BE2EA0D0) permission something.read access denied for subject "" 2025-03-31T16:07:29.815996Z node 4 :TICKET_PARSER DEBUG: Ticket **** (BE2EA0D0) () has now permanent error message 'Access Denied' 2025-03-31T16:07:29.816669Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-31T16:07:29.816701Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:07:29.816710Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:07:29.816765Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-03-31T16:07:29.817001Z node 4 :GRPC_CLIENT DEBUG: [517000065208] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "XXXXXXXX" resource_path { path { id: "XXXXXXXX" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "XXXXXXXX" resource_path { path { id: "XXXXXXXX" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { resultCode: PERMISSION_DENIED } } 0: "OK" 2025-03-31T16:07:29.819768Z node 4 :GRPC_CLIENT DEBUG: [517000065208] Response AuthorizeResponse { results { key: 0 value { resultCode: PERMISSION_DENIED } } } 2025-03-31T16:07:29.823101Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read access denied for subject "" 2025-03-31T16:07:29.823147Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now permanent error message 'Access Denied' 2025-03-31T16:07:29.823904Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-31T16:07:29.823938Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:07:29.823951Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:07:29.824015Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-03-31T16:07:29.824268Z node 4 :GRPC_CLIENT DEBUG: [517000065208] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "XXXXXXXX" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "XXXXXXXX" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } 0: "OK" 2025-03-31T16:07:29.826672Z node 4 :GRPC_CLIENT DEBUG: [517000065208] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } } 2025-03-31T16:07:29.826949Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-03-31T16:07:29.827786Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-31T16:07:29.827809Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:07:29.827817Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:07:29.827865Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-03-31T16:07:29.828054Z node 4 :GRPC_CLIENT DEBUG: [517000065208] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "XXXXXXXX" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "XXXXXXXX" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } 0: "OK" 2025-03-31T16:07:29.829913Z node 4 :GRPC_CLIENT DEBUG: [517000065208] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } } 2025-03-31T16:07:29.830517Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-03-31T16:07:35.313316Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7488005994587447743:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:35.326845Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d83/r3tmp/tmpG3EZgW/pdisk_1.dat 2025-03-31T16:07:35.745026Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:35.812410Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:35.812528Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:35.820748Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1822, node 5 2025-03-31T16:07:35.959429Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:35.959456Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:35.959464Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:35.959618Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20004 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:36.479446Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:36.493565Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-31T16:07:36.493594Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:07:36.493608Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:07:36.493672Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-03-31T16:07:36.493714Z node 5 :GRPC_CLIENT DEBUG: [51700000e808] Connect to grpc://localhost:8222 2025-03-31T16:07:36.494740Z node 5 :GRPC_CLIENT DEBUG: [51700000e808] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } 0: "OK" 2025-03-31T16:07:36.511457Z node 5 :GRPC_CLIENT DEBUG: [51700000e808] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } } 2025-03-31T16:07:36.511861Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-03-31T16:07:36.512504Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-31T16:07:36.512544Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:07:36.512558Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:07:36.512628Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2025-03-31T16:07:36.512909Z node 5 :GRPC_CLIENT DEBUG: [51700000e808] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { account { user_account { id: "user1" } } } } 0: "OK" 2025-03-31T16:07:36.515813Z node 5 :GRPC_CLIENT DEBUG: [51700000e808] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { account { user_account { id: "user1" } } } } } 2025-03-31T16:07:36.516133Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as >> TTicketParserTest::TicketFromCertificateWithValidationCheckIssuerBad [GOOD] |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> KqpJoin::JoinMismatchDictKeyTypes |82.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |82.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |82.2%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel >> KqpIndexLookupJoin::InnerJoinOnlyLeftColumn-StreamLookup [GOOD] |82.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index |82.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index |82.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index |82.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |82.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction >> DataShardReadIterator::ShouldReadRangePrefix5 [GOOD] >> KqpJoin::RightTableValuePredicate [GOOD] >> TTicketParserTest::BulkAuthorizationWithUserAccount2 [GOOD] >> TTicketParserTest::BulkAuthorizationUnavailable >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::TicketFromCertificateWithValidationCheckIssuerBad [GOOD] Test command err: 2025-03-31T16:07:12.819605Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005897727396969:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:12.826963Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d74/r3tmp/tmpgpeong/pdisk_1.dat 2025-03-31T16:07:13.259903Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:13.315273Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:13.315413Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:13.322653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1367, node 1 2025-03-31T16:07:13.434293Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:13.434336Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:13.434351Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:13.434507Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29797 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:13.833415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:13.868379Z node 1 :TICKET_PARSER DEBUG: Ticket 35AD891EA857912D80C22451A6C222DFFBE024E293D8BC035A6AC614B6428446 () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-03-31T16:07:17.119532Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488005917837896643:2269];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:17.119579Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d74/r3tmp/tmpeD7xb5/pdisk_1.dat 2025-03-31T16:07:17.389684Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:17.415633Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:17.415710Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 32173, node 2 2025-03-31T16:07:17.427054Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:07:17.475543Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:17.475565Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:17.475573Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:17.475692Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12261 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:17.764788Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:17.773833Z node 2 :TICKET_PARSER DEBUG: Ticket A33703A932D3C4E5F0CB5302CAB880150B9B3858A7FC371BA12B1B278ED426C0 () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-03-31T16:07:22.283349Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488005938418773063:2204];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:22.373160Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d74/r3tmp/tmpjT0SIe/pdisk_1.dat 2025-03-31T16:07:22.621075Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:22.641844Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:22.641940Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:22.652705Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2803, node 3 2025-03-31T16:07:22.867960Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:22.867989Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:22.867996Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:22.868152Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26277 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:23.494690Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:23.519474Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:07:23.522917Z node 3 :TICKET_PARSER DEBUG: Ticket 26870F17BEEC5C0DB50A4F76C7C523D78F027E149450C5A00714C4ACA24B3916 () has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2025-03-31T16:07:23.523586Z node 3 :TICKET_PARSER ERROR: Ticket 26870F17BEEC5C0DB50A4F76C7C523D78F027E149450C5A00714C4ACA24B3916: Cannot create token from certificate. Client certificate failed verification 2025-03-31T16:07:28.807814Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488005963001759242:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:28.807857Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d74/r3tmp/tmpz4OOPJ/pdisk_1.dat 2025-03-31T16:07:29.349330Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:29.398908Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:29.399038Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:29.401400Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3447, node 4 2025-03-31T16:07:29.549709Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:29.549746Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:29.549756Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:29.549922Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14171 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:30.012800Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:30.022795Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:07:30.027160Z node 4 :TICKET_PARSER DEBUG: Ticket AD5DBE2811FCE4FD15285FC55E6D3BCD9149BCCF1AED7AA351FBFABC100D474B () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-03-31T16:07:36.372463Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7488005999104255875:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:36.372526Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d74/r3tmp/tmpn9xWNG/pdisk_1.dat 2025-03-31T16:07:36.796402Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:36.848768Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:36.848867Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:36.855518Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17266, node 5 2025-03-31T16:07:37.046949Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:37.046977Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:37.046983Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:37.047139Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62934 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:37.440938Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:37.453627Z node 5 :TICKET_PARSER DEBUG: Ticket E32C3E55803AC3A0F56BFADB5A52CFF6E89FC6FEA32DBD9F2F5B52D2367D2298 () has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2025-03-31T16:07:37.454282Z node 5 :TICKET_PARSER ERROR: Ticket E32C3E55803AC3A0F56BFADB5A52CFF6E89FC6FEA32DBD9F2F5B52D2367D2298: Cannot create token from certificate. Client certificate failed verification >> DataShardReadIterator::ShouldReadRangeChunk100 [GOOD] >> DataShardReadIteratorBatchMode::RangeFromNonInclusive [GOOD] >> DataShardReadIteratorBatchMode::RangeToInclusive >> OlapEstimationRowsCorrectness::TPCDS96 >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightTableValuePredicate [GOOD] Test command err: Trying to start YDB, gRPC: 64071, MsgBus: 20342 2025-03-31T16:07:33.759888Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005985235621984:2209];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:33.760539Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019aa/r3tmp/tmpFSHhk8/pdisk_1.dat 2025-03-31T16:07:34.686051Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:34.693169Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:34.693282Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:34.699554Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64071, node 1 2025-03-31T16:07:34.887653Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:34.887676Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:34.887682Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:34.887796Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20342 TClient is connected to server localhost:20342 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:35.697343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:35.713920Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:07:35.722663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:35.958051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:36.215468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:36.310027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:38.730525Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005985235621984:2209];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:38.745333Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:07:38.776091Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488006006710460053:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:38.776231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:39.241581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:07:39.317596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:07:39.402954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:07:39.472809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:07:39.823663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:07:39.930908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:07:40.132198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488006015300395182:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:40.132273Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:40.132622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488006015300395187:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:40.136953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:07:40.166507Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488006015300395189:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:07:40.230977Z node 1 :TX_PROXY ERROR: Actor# [1:7488006015300395245:3464] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:07:42.267665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> TTicketParserTest::AuthenticationWithUserAccount ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::InnerJoinOnlyLeftColumn-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 14126, MsgBus: 2548 2025-03-31T16:07:20.239799Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005928857008791:2144];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:20.266599Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019ab/r3tmp/tmpkfTJap/pdisk_1.dat 2025-03-31T16:07:21.041338Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:21.062905Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:21.070353Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:21.076097Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14126, node 1 2025-03-31T16:07:21.319633Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:21.319655Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:21.319661Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:21.319783Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2548 TClient is connected to server localhost:2548 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:22.258291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:22.348801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:22.615684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:22.891870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:23.038938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:25.226853Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005928857008791:2144];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:25.226919Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:07:26.185557Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005954626814257:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:26.185693Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:26.601196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:07:26.710266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:07:26.767126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:07:26.821163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:07:26.861994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:07:26.954467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:07:27.047723Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005958921782075:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:27.047799Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:27.048004Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005958921782080:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:27.055802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:07:27.073973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005958921782082:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:07:27.148380Z node 1 :TX_PROXY ERROR: Actor# [1:7488005958921782136:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:07:30.226250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:07:30.292250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:07:30.343320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:07:30.437709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:07:30.499979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:07:30.556397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 15808, MsgBus: 17707 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019ab/r3tmp/tmp7gpKcJ/pdisk_1.dat 2025-03-31T16:07:34.683862Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:07:34.781839Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:34.793603Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:34.793695Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:34.800095Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15808, node 2 2025-03-31T16:07:34.983566Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:34.983593Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:34.983601Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:34.983706Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17707 TClient is connected to server localhost:17707 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:35.608687Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:35.617546Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:07:35.631129Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:35.726969Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:35.946533Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:36.049767Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:38.767671Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488006008886869509:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:38.767763Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:38.857599Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:07:38.931192Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:07:38.986063Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:07:39.060823Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:07:39.121162Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:07:39.225255Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:07:39.368215Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488006013181837322:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:39.368308Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:39.368713Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488006013181837327:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:39.373443Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:07:39.442782Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488006013181837329:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:07:39.542049Z node 2 :TX_PROXY ERROR: Actor# [2:7488006013181837388:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:07:41.926178Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:07:42.004432Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:07:42.087309Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:07:42.153975Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:07:42.231243Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:07:42.291353Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> TTicketParserTest::BulkAuthorizationModify [GOOD] |82.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |82.5%| [LD] {RESULT} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import >> test_sql_streaming.py::test[suites-ReadWriteTopic-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadWriteTopicWithSchema-default.txt] >> ColumnStatistics::CountMinSketchStatistics [GOOD] >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError [GOOD] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict-UseSink [GOOD] Test command err: 2025-03-31T16:05:32.434950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:05:32.435263Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:05:32.435488Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000c30/r3tmp/tmpRxDKPs/pdisk_1.dat 2025-03-31T16:05:32.835012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:05:32.885581Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:32.928143Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:05:32.928297Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:05:32.940954Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:05:33.032767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:05:33.079362Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:05:33.080381Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:05:33.080829Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:05:33.081074Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:05:33.141807Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:05:33.142525Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:05:33.142628Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:05:33.144509Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:05:33.144604Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:05:33.144682Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:05:33.145124Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:05:33.145286Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:05:33.145382Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:05:33.156453Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:05:33.199719Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:05:33.199928Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:05:33.200050Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:05:33.200093Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:05:33.200165Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:05:33.200234Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:05:33.200482Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:05:33.200533Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:05:33.200821Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:05:33.200943Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:05:33.201075Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:05:33.201135Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:05:33.201186Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T16:05:33.201272Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:05:33.201310Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:05:33.201343Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:05:33.201393Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:05:33.201503Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:05:33.201547Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:05:33.201618Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:05:33.201744Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-31T16:05:33.201789Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:05:33.201910Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:05:33.202147Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T16:05:33.202214Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:05:33.202309Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:05:33.202360Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:05:33.202420Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T16:05:33.202469Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T16:05:33.202545Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:05:33.202940Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T16:05:33.203002Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T16:05:33.203041Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T16:05:33.203096Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:05:33.203179Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T16:05:33.203237Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T16:05:33.203283Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T16:05:33.203318Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:05:33.203343Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T16:05:33.205205Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T16:05:33.205272Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:05:33.216195Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:05:33.216308Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:05:33.216361Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:05:33.216429Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-31T16:05:33.216514Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:05:33.393742Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:05:33.393817Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:05:33.393862Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T16:05:33.394350Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-31T16:05:33.394420Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:05:33.394568Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:05:33.394625Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-31T16:05:33.394689Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-31T16:05:33.394741Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-31T16:05:33.401250Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:05:33.401360Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:05:33.402196Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:05:33.402253Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:05:33.402318Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:05:3 ... teState, waiting for 0 compute actor(s) and 1 datashard(s): DS 72075186224037888 (Executing), 2025-03-31T16:07:44.526127Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3001} 2025-03-31T16:07:44.526196Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:07:44.526235Z node 15 :TX_DATASHARD TRACE: Complete execution for [3001:281474976715667] at 72075186224037888 on unit CompleteOperation 2025-03-31T16:07:44.526290Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:07:44.533484Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [15:1168:2935], Recipient [15:667:2571]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:07:44.533563Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:07:44.533612Z node 15 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [15:1167:2934], serverId# [15:1168:2935], sessionId# [0:0:0] 2025-03-31T16:07:44.534552Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:594:2519], Recipient [15:667:2571]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 ResultFormat: FORMAT_ARROW MaxRowsInResult: 2 KeysSize: 6 2025-03-31T16:07:44.534794Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-31T16:07:44.534895Z node 15 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3001/281474976715667 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2025-03-31T16:07:44.534978Z node 15 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v3001/18446744073709551615 2025-03-31T16:07:44.535129Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2025-03-31T16:07:44.535540Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-03-31T16:07:44.535631Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2025-03-31T16:07:44.535713Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-31T16:07:44.535789Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-31T16:07:44.535876Z node 15 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037888 2025-03-31T16:07:44.535954Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-03-31T16:07:44.535993Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-31T16:07:44.536023Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2025-03-31T16:07:44.536060Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2025-03-31T16:07:44.536222Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 ResultFormat: FORMAT_ARROW MaxRowsInResult: 2 } 2025-03-31T16:07:44.536802Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Continue 2025-03-31T16:07:44.536864Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Continue at tablet# 72075186224037888 2025-03-31T16:07:44.536990Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-03-31T16:07:44.579174Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [15:1063:2855], Recipient [15:667:2571]: {TEvReadSet step# 3001 txid# 281474976715667 TabletSource# 72075186224037891 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037891 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-03-31T16:07:44.579316Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-03-31T16:07:44.579397Z node 15 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037891 dest 72075186224037888 producer 72075186224037891 txId 281474976715667 2025-03-31T16:07:44.579593Z node 15 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 3001 txid# 281474976715667 TabletSource# 72075186224037891 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037891 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-03-31T16:07:44.579864Z node 15 :TX_DATASHARD DEBUG: Complete [3001 : 281474976715667] from 72075186224037888 at tablet 72075186224037888 send result to client [15:1157:2910], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:07:44.580012Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:07:44.580107Z node 15 :TX_DATASHARD DEBUG: Found ready candidate operation [0:4] at 72075186224037888 for ExecuteRead 2025-03-31T16:07:44.580601Z node 15 :KQP_EXECUTER DEBUG: ActorId: [15:1157:2910] TxId: 281474976715667. Ctx: { TraceId: 01jqpfaxawbv7ty5e60eq0rt7t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=NWI4MDg5YTAtMWFiMTA4NTEtZjFkNmRmOTAtYzhhNWU4ODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got propose result, shard: 72075186224037888, status: COMPLETE, error: 2025-03-31T16:07:44.580878Z node 15 :KQP_EXECUTER DEBUG: ActorId: [15:1157:2910] TxId: 281474976715667. Ctx: { TraceId: 01jqpfaxawbv7ty5e60eq0rt7t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=NWI4MDg5YTAtMWFiMTA4NTEtZjFkNmRmOTAtYzhhNWU4ODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-31T16:07:44.580995Z node 15 :KQP_EXECUTER DEBUG: ActorId: [15:1157:2910] TxId: 281474976715667. Ctx: { TraceId: 01jqpfaxawbv7ty5e60eq0rt7t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=NWI4MDg5YTAtMWFiMTA4NTEtZjFkNmRmOTAtYzhhNWU4ODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-03-31T16:07:44.581206Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [15:667:2571], Recipient [15:667:2571]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:07:44.581299Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:07:44.588049Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:07:44.588677Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:07:44.588773Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:07:44.588854Z node 15 :TX_DATASHARD DEBUG: Return cached ready operation [0:4] at 72075186224037888 2025-03-31T16:07:44.588924Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2025-03-31T16:07:44.589138Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 2, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 ResultFormat: FORMAT_ARROW MaxRowsInResult: 2 } 2025-03-31T16:07:44.589763Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3001/18446744073709551615 2025-03-31T16:07:44.589868Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:594:2519], 1} after executionsCount# 2 2025-03-31T16:07:44.589979Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:594:2519], 1} sends rowCount# 2, bytes# 32, quota rows left# 18446744073709551613, quota bytes left# 18446744073709551583, hasUnreadQueries# 1, total queries# 6, firstUnprocessed# 0 2025-03-31T16:07:44.590332Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-03-31T16:07:44.590430Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2025-03-31T16:07:44.590504Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2025-03-31T16:07:44.590572Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2025-03-31T16:07:44.590641Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-03-31T16:07:44.590671Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2025-03-31T16:07:44.590712Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037888 has finished 2025-03-31T16:07:44.590792Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:07:44.590863Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-03-31T16:07:44.590938Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:07:44.591018Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:07:44.596564Z node 15 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 5, sender: [15:594:2519], selfId: [15:57:2104], source: [15:1135:2910] 2025-03-31T16:07:44.596890Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:667:2571], Recipient [15:667:2571]: NKikimr::TEvDataShard::TEvReadContinue 2025-03-31T16:07:44.597015Z node 15 :TX_DATASHARD TRACE: 72075186224037888 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 2 2025-03-31T16:07:44.597331Z node 15 :TX_DATASHARD TRACE: 72075186224037888 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 2 2025-03-31T16:07:44.603310Z node 15 :TX_DATASHARD TRACE: 72075186224037888 readContinue iterator# {[15:594:2519], 1} sends rowCount# 2, bytes# 32, quota rows left# 18446744073709551611, quota bytes left# 18446744073709551551, hasUnreadQueries# 1, total queries# 6, firstUnprocessed# 2 2025-03-31T16:07:44.603938Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:667:2571], Recipient [15:667:2571]: NKikimr::TEvDataShard::TEvReadContinue 2025-03-31T16:07:44.604032Z node 15 :TX_DATASHARD TRACE: 72075186224037888 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 4 2025-03-31T16:07:44.604223Z node 15 :TX_DATASHARD TRACE: 72075186224037888 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 4 2025-03-31T16:07:44.604375Z node 15 :TX_DATASHARD TRACE: 72075186224037888 readContinue iterator# {[15:594:2519], 1} sends rowCount# 2, bytes# 32, quota rows left# 18446744073709551609, quota bytes left# 18446744073709551519, hasUnreadQueries# 0, total queries# 6, firstUnprocessed# 4 2025-03-31T16:07:44.604535Z node 15 :TX_DATASHARD DEBUG: 72075186224037888 read iterator# {[15:594:2519], 1} finished in ReadContinue 2025-03-31T16:07:44.605799Z node 15 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=15&id=NWI4MDg5YTAtMWFiMTA4NTEtZjFkNmRmOTAtYzhhNWU4ODg=, workerId: [15:1135:2910], local sessions count: 0 2025-03-31T16:07:44.606106Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [15:61:2108], Recipient [15:1063:2855]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715666 LockNode: 15 Status: STATUS_NOT_FOUND >> TPersQueueTest::TopicServiceCommitOffsetBadOffsets [GOOD] >> TPersQueueTest::TopicServiceReadBudget ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReadRangePrefix5 [GOOD] Test command err: 2025-03-31T16:05:32.931070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:05:32.931374Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:05:32.931557Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000c41/r3tmp/tmpZ6wtp5/pdisk_1.dat 2025-03-31T16:05:33.337176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:05:33.381265Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:33.432879Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:05:33.433055Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:05:33.448352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:05:33.544533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:05:33.601484Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:05:33.602319Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:05:33.602671Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:05:33.602884Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:05:33.691521Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:05:33.692452Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:05:33.692647Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:05:33.694687Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:05:33.694786Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:05:33.694877Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:05:33.695319Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:05:33.695497Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:05:33.695588Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:05:33.706395Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:05:33.731405Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:05:33.731625Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:05:33.731767Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:05:33.731822Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:05:33.731860Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:05:33.731910Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:05:33.732185Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:05:33.732238Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:05:33.732557Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:05:33.732663Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:05:33.732790Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:05:33.732842Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:05:33.732896Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T16:05:33.732932Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:05:33.732964Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:05:33.732995Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:05:33.733037Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:05:33.733137Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:05:33.733169Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:05:33.733232Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:05:33.733327Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-31T16:05:33.733361Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:05:33.733499Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:05:33.733759Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T16:05:33.733832Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:05:33.733917Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:05:33.733966Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:05:33.734012Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T16:05:33.734082Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T16:05:33.734123Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:05:33.734434Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T16:05:33.734474Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T16:05:33.734509Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T16:05:33.734539Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:05:33.734597Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T16:05:33.734636Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T16:05:33.734673Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T16:05:33.734705Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:05:33.734746Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T16:05:33.736609Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T16:05:33.736671Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:05:33.747514Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:05:33.747600Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:05:33.747642Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:05:33.747707Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-31T16:05:33.747786Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:05:33.909948Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:05:33.910052Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:05:33.910079Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T16:05:33.910400Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-31T16:05:33.910423Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:05:33.910518Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:05:33.910553Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-31T16:05:33.910650Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-31T16:05:33.910683Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-31T16:05:33.921469Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:05:33.921565Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:05:33.922362Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:05:33.922431Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:05:33.922491Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:05:3 ... 15 :TX_DATASHARD TRACE: Execution plan for [3000:281474976715664] at 72075186224037888 has finished 2025-03-31T16:07:44.417587Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:07:44.417651Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-03-31T16:07:44.417718Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:07:44.417781Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:07:44.418178Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [15:884:2711], Recipient [15:884:2711]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:07:44.418231Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:07:44.418300Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T16:07:44.418343Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:07:44.418382Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-03-31T16:07:44.418426Z node 15 :TX_DATASHARD DEBUG: Found ready operation [3000:281474976715664] in PlanQueue unit at 72075186224037889 2025-03-31T16:07:44.418462Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit PlanQueue 2025-03-31T16:07:44.418500Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-03-31T16:07:44.418536Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit PlanQueue 2025-03-31T16:07:44.418572Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit LoadTxDetails 2025-03-31T16:07:44.418612Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit LoadTxDetails 2025-03-31T16:07:44.418804Z node 15 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3000:281474976715664 keys extracted: 0 2025-03-31T16:07:44.418866Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-03-31T16:07:44.418900Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit LoadTxDetails 2025-03-31T16:07:44.418934Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-03-31T16:07:44.418969Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit BuildAndWaitDependencies 2025-03-31T16:07:44.419019Z node 15 :TX_DATASHARD TRACE: Operation [3000:281474976715664] is the new logically complete end at 72075186224037889 2025-03-31T16:07:44.419065Z node 15 :TX_DATASHARD TRACE: Operation [3000:281474976715664] is the new logically incomplete end at 72075186224037889 2025-03-31T16:07:44.419123Z node 15 :TX_DATASHARD TRACE: Activated operation [3000:281474976715664] at 72075186224037889 2025-03-31T16:07:44.419174Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-03-31T16:07:44.419204Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-03-31T16:07:44.419232Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CreateVolatileSnapshot 2025-03-31T16:07:44.419265Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CreateVolatileSnapshot 2025-03-31T16:07:44.419432Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is ExecutedNoMoreRestarts 2025-03-31T16:07:44.419475Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CreateVolatileSnapshot 2025-03-31T16:07:44.419518Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit DropVolatileSnapshot 2025-03-31T16:07:44.419566Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit DropVolatileSnapshot 2025-03-31T16:07:44.419598Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-03-31T16:07:44.419628Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit DropVolatileSnapshot 2025-03-31T16:07:44.419657Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CompleteOperation 2025-03-31T16:07:44.419690Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CompleteOperation 2025-03-31T16:07:44.419864Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is DelayComplete 2025-03-31T16:07:44.419898Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CompleteOperation 2025-03-31T16:07:44.419938Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CompletedOperations 2025-03-31T16:07:44.419981Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CompletedOperations 2025-03-31T16:07:44.420016Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-03-31T16:07:44.420044Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CompletedOperations 2025-03-31T16:07:44.420073Z node 15 :TX_DATASHARD TRACE: Execution plan for [3000:281474976715664] at 72075186224037889 has finished 2025-03-31T16:07:44.420120Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:07:44.420160Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-03-31T16:07:44.420200Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-03-31T16:07:44.420242Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-03-31T16:07:44.442452Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3000} 2025-03-31T16:07:44.442627Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:07:44.442710Z node 15 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715664] at 72075186224037888 on unit CompleteOperation 2025-03-31T16:07:44.442828Z node 15 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715664] from 72075186224037888 at tablet 72075186224037888 send result to client [15:1048:2835], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:07:44.442925Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:07:44.443140Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3000} 2025-03-31T16:07:44.443202Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-31T16:07:44.443235Z node 15 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715664] at 72075186224037889 on unit CompleteOperation 2025-03-31T16:07:44.443283Z node 15 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715664] from 72075186224037889 at tablet 72075186224037889 send result to client [15:1048:2835], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:07:44.443326Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:07:44.445443Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:594:2519], Recipient [15:667:2571]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 281474976715664 } ResultFormat: FORMAT_ARROW RangesSize: 1 2025-03-31T16:07:44.445659Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-31T16:07:44.445785Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2025-03-31T16:07:44.445959Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-03-31T16:07:44.446038Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2025-03-31T16:07:44.446107Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-31T16:07:44.446178Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-31T16:07:44.446227Z node 15 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037888 2025-03-31T16:07:44.446283Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-03-31T16:07:44.446319Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-31T16:07:44.446353Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2025-03-31T16:07:44.446385Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2025-03-31T16:07:44.446558Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 281474976715664 } ResultFormat: FORMAT_ARROW } 2025-03-31T16:07:44.447072Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3000/281474976715664 2025-03-31T16:07:44.450630Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:594:2519], 1} after executionsCount# 1 2025-03-31T16:07:44.450762Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:594:2519], 1} sends rowCount# 2, bytes# 32, quota rows left# 18446744073709551613, quota bytes left# 18446744073709551583, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-31T16:07:44.451241Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:594:2519], 1} finished in read 2025-03-31T16:07:44.454668Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-03-31T16:07:44.454738Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2025-03-31T16:07:44.454775Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2025-03-31T16:07:44.454811Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2025-03-31T16:07:44.454875Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-03-31T16:07:44.454903Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2025-03-31T16:07:44.454953Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037888 has finished 2025-03-31T16:07:44.455030Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-31T16:07:44.455307Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder-EvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::BulkAuthorizationModify [GOOD] Test command err: 2025-03-31T16:07:13.464268Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005900479651579:2108];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:13.479060Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d73/r3tmp/tmpNqCqlL/pdisk_1.dat 2025-03-31T16:07:13.818531Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:13.822361Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:13.822460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:13.826500Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3576, node 1 2025-03-31T16:07:13.931711Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:13.931739Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:13.931745Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:13.931866Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25967 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:14.270119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:14.295616Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2025-03-31T16:07:14.303023Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Connect to grpc://localhost:18617 2025-03-31T16:07:14.308839Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-03-31T16:07:14.336227Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Status 14 Service Unavailable 2025-03-31T16:07:14.336382Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-03-31T16:07:14.336435Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-03-31T16:07:14.336509Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2025-03-31T16:07:14.336846Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-03-31T16:07:14.343208Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Status 14 Service Unavailable 2025-03-31T16:07:14.343488Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-03-31T16:07:14.343516Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-03-31T16:07:15.480231Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-03-31T16:07:15.480395Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2025-03-31T16:07:15.480871Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-03-31T16:07:15.487189Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Status 14 Service Unavailable 2025-03-31T16:07:15.487328Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-03-31T16:07:15.487356Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-03-31T16:07:16.483404Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-03-31T16:07:16.483504Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2025-03-31T16:07:16.484106Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-03-31T16:07:16.489355Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2025-03-31T16:07:16.489613Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as 2025-03-31T16:07:18.459339Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005900479651579:2108];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:18.459434Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:07:27.132276Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488005962662259899:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:27.132307Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d73/r3tmp/tmpcLCYPN/pdisk_1.dat 2025-03-31T16:07:27.459437Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:27.474475Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:27.474562Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:27.486825Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31503, node 2 2025-03-31T16:07:27.687841Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:27.687871Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:27.687879Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:27.688004Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10543 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:27.957128Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:27.971854Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:07:27.974972Z node 2 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2025-03-31T16:07:27.975044Z node 2 :GRPC_CLIENT DEBUG: [517000083508] Connect to grpc://localhost:26586 2025-03-31T16:07:27.976222Z node 2 :GRPC_CLIENT DEBUG: [517000083508] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-03-31T16:07:27.987297Z node 2 :GRPC_CLIENT DEBUG: [517000083508] Status 14 Service Unavailable 2025-03-31T16:07:27.987720Z node 2 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-03-31T16:07:27.987751Z node 2 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-03-31T16:07:27.987819Z node 2 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2025-03-31T16:07:27.988077Z node 2 :GRPC_CLIENT DEBUG: [517000083508] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-03-31T16:07:27.990514Z node 2 :GRPC_CLIENT DEBUG: [517000083508] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } ... RN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:37.068968Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4328, node 4 2025-03-31T16:07:37.140358Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:37.140377Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:37.140384Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:37.140546Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29766 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:07:37.419298Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:07:37.431195Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-31T16:07:37.431227Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:07:37.431236Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:07:37.431274Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-03-31T16:07:37.431352Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(somewhere.sleep) 2025-03-31T16:07:37.431374Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.list) 2025-03-31T16:07:37.431396Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2025-03-31T16:07:37.431418Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.eat) 2025-03-31T16:07:37.431487Z node 4 :GRPC_CLIENT DEBUG: [517000094288] Connect to grpc://localhost:16874 2025-03-31T16:07:37.434843Z node 4 :GRPC_CLIENT DEBUG: [517000094288] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-31T16:07:37.438080Z node 4 :GRPC_CLIENT DEBUG: [517000094288] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "somewhere.sleep" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-31T16:07:37.438297Z node 4 :GRPC_CLIENT DEBUG: [517000094288] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.list" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-31T16:07:37.438423Z node 4 :GRPC_CLIENT DEBUG: [517000094288] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-31T16:07:37.438598Z node 4 :GRPC_CLIENT DEBUG: [517000094288] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.eat" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-31T16:07:37.446822Z node 4 :GRPC_CLIENT DEBUG: [517000094288] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-03-31T16:07:37.447239Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a valid subject "user1@as" 2025-03-31T16:07:37.448859Z node 4 :GRPC_CLIENT DEBUG: [517000094288] Status 16 Access Denied 2025-03-31T16:07:37.448994Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission somewhere.sleep now has a permanent error "Access Denied" retryable:0 2025-03-31T16:07:37.451701Z node 4 :GRPC_CLIENT DEBUG: [517000094288] Status 16 Access Denied 2025-03-31T16:07:37.451928Z node 4 :GRPC_CLIENT DEBUG: [517000094288] Status 16 Access Denied 2025-03-31T16:07:37.452216Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a permanent error "Access Denied" retryable:0 2025-03-31T16:07:37.452253Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.eat now has a permanent error "Access Denied" retryable:0 2025-03-31T16:07:37.453195Z node 4 :GRPC_CLIENT DEBUG: [517000094288] Status 16 Access Denied 2025-03-31T16:07:37.453444Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.list now has a permanent error "Access Denied" retryable:0 2025-03-31T16:07:37.453480Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2025-03-31T16:07:37.454957Z node 4 :GRPC_CLIENT DEBUG: [517000094988] Connect to grpc://localhost:26060 2025-03-31T16:07:37.455952Z node 4 :GRPC_CLIENT DEBUG: [517000094988] Request GetUserAccountRequest { user_account_id: "user1" } 2025-03-31T16:07:37.468060Z node 4 :GRPC_CLIENT DEBUG: [517000094988] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2025-03-31T16:07:37.469316Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport 2025-03-31T16:07:43.122681Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7488006030719879301:2093];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:43.125709Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d73/r3tmp/tmpyQmjjd/pdisk_1.dat 2025-03-31T16:07:43.319573Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:43.348782Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:43.348888Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:43.351648Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17878, node 5 2025-03-31T16:07:43.471441Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:43.471466Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:43.471473Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:43.471620Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13648 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:43.845518Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:43.859465Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:07:43.861828Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-31T16:07:43.861859Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:07:43.861871Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:07:43.861969Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read) 2025-03-31T16:07:43.862028Z node 5 :GRPC_CLIENT DEBUG: [51700000e808] Connect to grpc://localhost:12373 2025-03-31T16:07:43.863105Z node 5 :GRPC_CLIENT DEBUG: [51700000e808] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-03-31T16:07:43.875205Z node 5 :GRPC_CLIENT DEBUG: [51700000e808] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2025-03-31T16:07:43.879317Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-03-31T16:07:43.883942Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-31T16:07:43.883976Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:07:43.883996Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:07:43.884096Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2025-03-31T16:07:43.884393Z node 5 :GRPC_CLIENT DEBUG: [51700000e808] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2025-03-31T16:07:43.888807Z node 5 :GRPC_CLIENT DEBUG: [51700000e808] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2025-03-31T16:07:43.890974Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as >> TTicketParserTest::NebiusAuthorizationWithRequiredPermissions [GOOD] >> TTicketParserTest::NebiusAuthorizationUnavailable >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips-EvWrite [GOOD] |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] [GOOD] |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] [GOOD] >> TReplicaTest::CommitWithoutHandshake >> TReplicaTest::CommitWithoutHandshake [GOOD] >> TReplicaTest::CommitWithStaleGeneration >> TTicketParserTest::LoginGoodWithGroups [GOOD] >> TTicketParserTest::LoginRefreshGroupsGood >> TReplicaTest::CommitWithStaleGeneration [GOOD] >> TReplicaTest::Delete >> TReplicaTest::Delete [GOOD] |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReadRangeChunk100 [GOOD] Test command err: 2025-03-31T16:05:32.294890Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:05:32.295149Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:05:32.295330Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000c6a/r3tmp/tmpXaiSwl/pdisk_1.dat 2025-03-31T16:05:32.691308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:05:32.747574Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:32.788574Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:05:32.788715Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:05:32.801017Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:05:32.892738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:05:32.954224Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:05:32.955633Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:05:32.956233Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:05:32.956531Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:05:33.013045Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:05:33.013931Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:05:33.014080Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:05:33.016228Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:05:33.016321Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:05:33.016405Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:05:33.016882Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:05:33.017074Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:05:33.017167Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:05:33.028134Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:05:33.067989Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:05:33.068231Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:05:33.068396Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:05:33.068460Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:05:33.068502Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:05:33.068561Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:05:33.068823Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:05:33.068877Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:05:33.069198Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:05:33.069313Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:05:33.069418Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:05:33.069468Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:05:33.069510Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T16:05:33.069545Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:05:33.069573Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:05:33.069601Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:05:33.069634Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:05:33.069717Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:05:33.069751Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:05:33.069808Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:05:33.069886Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-31T16:05:33.069922Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:05:33.070020Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:05:33.070242Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T16:05:33.070295Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:05:33.070363Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:05:33.070413Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:05:33.070458Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T16:05:33.070489Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T16:05:33.070516Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:05:33.070731Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T16:05:33.070762Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T16:05:33.070794Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T16:05:33.070833Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:05:33.070883Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T16:05:33.070914Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T16:05:33.070947Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T16:05:33.070973Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:05:33.070993Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T16:05:33.072421Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T16:05:33.072483Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:05:33.083945Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:05:33.084032Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:05:33.084079Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:05:33.084174Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-31T16:05:33.084257Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:05:33.265585Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:05:33.265668Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:05:33.265716Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T16:05:33.266201Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-31T16:05:33.266256Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:05:33.266399Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:05:33.266455Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-31T16:05:33.266519Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-31T16:05:33.266565Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-31T16:05:33.277369Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:05:33.277481Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:05:33.278270Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:05:33.278336Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:05:33.278404Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:05:3 ... :2519], 1}, firstUnprocessedQuery# 0 2025-03-31T16:07:45.241607Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-03-31T16:07:45.242008Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709543002, quota bytes left# 18446744073709000383, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-03-31T16:07:45.242159Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:985:2787], Recipient [15:985:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-03-31T16:07:45.242214Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-03-31T16:07:45.242263Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-03-31T16:07:45.245380Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542903, quota bytes left# 18446744073708994047, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-03-31T16:07:45.245702Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:985:2787], Recipient [15:985:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-03-31T16:07:45.245781Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-03-31T16:07:45.245841Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-03-31T16:07:45.246281Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542804, quota bytes left# 18446744073708987711, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-03-31T16:07:45.246504Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:985:2787], Recipient [15:985:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-03-31T16:07:45.246566Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-03-31T16:07:45.246616Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-03-31T16:07:45.247113Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542705, quota bytes left# 18446744073708981375, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-03-31T16:07:45.247328Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:985:2787], Recipient [15:985:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-03-31T16:07:45.247391Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-03-31T16:07:45.247440Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-03-31T16:07:45.247903Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542606, quota bytes left# 18446744073708975039, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-03-31T16:07:45.248095Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:985:2787], Recipient [15:985:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-03-31T16:07:45.248156Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-03-31T16:07:45.248210Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-03-31T16:07:45.248708Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542507, quota bytes left# 18446744073708968703, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-03-31T16:07:45.248915Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:985:2787], Recipient [15:985:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-03-31T16:07:45.248977Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-03-31T16:07:45.249023Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-03-31T16:07:45.249474Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542408, quota bytes left# 18446744073708962367, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-03-31T16:07:45.249639Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:985:2787], Recipient [15:985:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-03-31T16:07:45.249699Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-03-31T16:07:45.249747Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-03-31T16:07:45.250239Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542309, quota bytes left# 18446744073708956031, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-03-31T16:07:45.250458Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:985:2787], Recipient [15:985:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-03-31T16:07:45.250517Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-03-31T16:07:45.250567Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-03-31T16:07:45.251039Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542210, quota bytes left# 18446744073708949695, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-03-31T16:07:45.253044Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:985:2787], Recipient [15:985:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-03-31T16:07:45.253138Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-03-31T16:07:45.253194Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-03-31T16:07:45.253690Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542111, quota bytes left# 18446744073708943359, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-03-31T16:07:45.253981Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:985:2787], Recipient [15:985:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-03-31T16:07:45.254046Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-03-31T16:07:45.254098Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-03-31T16:07:45.254540Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542012, quota bytes left# 18446744073708937023, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-03-31T16:07:45.254752Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:985:2787], Recipient [15:985:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-03-31T16:07:45.254806Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-03-31T16:07:45.254851Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-03-31T16:07:45.255342Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709541913, quota bytes left# 18446744073708930687, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-03-31T16:07:45.255495Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:985:2787], Recipient [15:985:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-03-31T16:07:45.255548Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-03-31T16:07:45.255600Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-03-31T16:07:45.256039Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709541814, quota bytes left# 18446744073708924351, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-03-31T16:07:45.256222Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:985:2787], Recipient [15:985:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-03-31T16:07:45.256279Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-03-31T16:07:45.256323Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-03-31T16:07:45.256813Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709541715, quota bytes left# 18446744073708918015, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-03-31T16:07:45.256997Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:985:2787], Recipient [15:985:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-03-31T16:07:45.257052Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-03-31T16:07:45.257099Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-03-31T16:07:45.257515Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709541616, quota bytes left# 18446744073708911679, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-03-31T16:07:45.257706Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:985:2787], Recipient [15:985:2787]: NKikimr::TEvDataShard::TEvReadContinue 2025-03-31T16:07:45.257763Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:594:2519], 1}, firstUnprocessedQuery# 0 2025-03-31T16:07:45.258395Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:594:2519], 1}, FirstUnprocessedQuery# 0 2025-03-31T16:07:45.258586Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:594:2519], 1} sends rowCount# 1, bytes# 64, quota rows left# 18446744073709541615, quota bytes left# 18446744073708911615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-31T16:07:45.258671Z node 15 :TX_DATASHARD DEBUG: 72075186224037890 read iterator# {[15:594:2519], 1} finished in ReadContinue |82.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |82.6%| [LD] {RESULT} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests >> KqpJoin::IdxLookupPartialLeftPredicate [GOOD] |82.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |82.6%| [LD] {RESULT} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::Delete [GOOD] Test command err: 2025-03-31T16:07:50.324799Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-03-31T16:07:50.324900Z node 1 :SCHEME_BOARD_REPLICA ERROR: [1:6:2053] Reject commit from unknown populator: sender# [1:7:2054], owner# 1, generation# 1 2025-03-31T16:07:50.325002Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-03-31T16:07:50.325045Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 1 2025-03-31T16:07:50.729222Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 0 }: sender# [2:7:2054] 2025-03-31T16:07:50.729311Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 0 2025-03-31T16:07:50.729408Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:8:2055] 2025-03-31T16:07:50.729467Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 1 2025-03-31T16:07:50.729620Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [2:8:2055] 2025-03-31T16:07:50.729675Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Commit generation: owner# 1, generation# 1 2025-03-31T16:07:50.729753Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 0 }: sender# [2:7:2054] 2025-03-31T16:07:50.729813Z node 2 :SCHEME_BOARD_REPLICA ERROR: [2:6:2053] Reject commit from stale populator: sender# [2:7:2054], owner# 1, generation# 0, pending generation# 1 2025-03-31T16:07:50.729870Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 2 }: sender# [2:7:2054] 2025-03-31T16:07:50.729909Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 2 2025-03-31T16:07:50.893466Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:7:2054] 2025-03-31T16:07:50.893554Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 1, generation# 1 2025-03-31T16:07:50.893768Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-03-31T16:07:50.893823Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 42, LocalPathId: 1], deletion# false 2025-03-31T16:07:50.901993Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 42, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 42, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-03-31T16:07:50.902272Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:8:2055] 2025-03-31T16:07:50.902388Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:8:2055], path# path, domainOwnerId# 0, capabilities# 2025-03-31T16:07:50.902577Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 42, LocalPathId: 1] DomainOwnerId: 0 }: sender# [3:9:2056] 2025-03-31T16:07:50.902673Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:9:2056], path# [OwnerId: 42, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-03-31T16:07:50.902856Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 40 2025-03-31T16:07:50.902902Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 42, LocalPathId: 1], deletion# true 2025-03-31T16:07:50.902940Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Delete description: path# path, pathId# [OwnerId: 42, LocalPathId: 1] 2025-03-31T16:07:50.903135Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:10:2057] 2025-03-31T16:07:50.903196Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:10:2057], path# path, domainOwnerId# 0, capabilities# 2025-03-31T16:07:50.903338Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 42, LocalPathId: 1] DomainOwnerId: 0 }: sender# [3:11:2058] 2025-03-31T16:07:50.903388Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:11:2058], path# [OwnerId: 42, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-03-31T16:07:50.903555Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:12:2059] 2025-03-31T16:07:50.903593Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:12:2059], path# path, domainOwnerId# 0, capabilities# >> KqpJoinOrder::CanonizedJoinOrderTPCH19 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips-EvWrite [GOOD] Test command err: 2025-03-31T16:05:32.052600Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:05:32.052882Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:05:32.053065Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000c8a/r3tmp/tmpTryVG9/pdisk_1.dat 2025-03-31T16:05:32.471587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:05:32.527698Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:32.573017Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:05:32.573187Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:05:32.585811Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:05:32.682822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:05:32.750323Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:05:32.751575Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:05:32.752167Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:05:32.752465Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:05:32.803142Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:05:32.803999Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:05:32.804147Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:05:32.806141Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:05:32.806237Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:05:32.806296Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:05:32.806776Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:05:32.806955Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:05:32.807068Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:05:32.818418Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:05:32.846153Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:05:32.846379Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:05:32.846559Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:05:32.846614Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:05:32.846655Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:05:32.846702Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:05:32.846979Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:05:32.847050Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:05:32.851894Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:05:32.852056Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:05:32.852221Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:05:32.852279Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:05:32.852357Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T16:05:32.852409Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:05:32.852451Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:05:32.852490Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:05:32.852548Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:05:32.852665Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:05:32.852712Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:05:32.852763Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:05:32.852883Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-31T16:05:32.852938Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:05:32.853065Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:05:32.853330Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T16:05:32.853428Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:05:32.853523Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:05:32.853578Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:05:32.853626Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T16:05:32.853673Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T16:05:32.853712Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:05:32.854031Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T16:05:32.854098Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T16:05:32.854146Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T16:05:32.854187Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:05:32.854248Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T16:05:32.854284Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T16:05:32.854323Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T16:05:32.854374Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:05:32.854404Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T16:05:32.856718Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T16:05:32.856790Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:05:32.867747Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:05:32.867850Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:05:32.867897Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:05:32.867965Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-31T16:05:32.868046Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:05:33.028835Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:05:33.028925Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:05:33.028977Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T16:05:33.029515Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-31T16:05:33.029604Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:05:33.029746Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:05:33.029795Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-31T16:05:33.029857Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-31T16:05:33.029898Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-31T16:05:33.035001Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:05:33.035106Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:05:33.035854Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:05:33.035914Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:05:33.035990Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:05:3 ... ed 0 immediate 0 planned 0 2025-03-31T16:07:48.961316Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-03-31T16:07:48.961384Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:07:48.961453Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:07:48.961769Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [15:884:2711], Recipient [15:884:2711]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:07:48.961810Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:07:48.961870Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T16:07:48.961910Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:07:48.961953Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-03-31T16:07:48.961996Z node 15 :TX_DATASHARD DEBUG: Found ready operation [3500:281474976715666] in PlanQueue unit at 72075186224037889 2025-03-31T16:07:48.962053Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit PlanQueue 2025-03-31T16:07:48.962093Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-03-31T16:07:48.962127Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit PlanQueue 2025-03-31T16:07:48.962161Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit LoadTxDetails 2025-03-31T16:07:48.962193Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit LoadTxDetails 2025-03-31T16:07:48.962353Z node 15 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3500:281474976715666 keys extracted: 0 2025-03-31T16:07:48.962408Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-03-31T16:07:48.962437Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit LoadTxDetails 2025-03-31T16:07:48.962465Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-03-31T16:07:48.962509Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit BuildAndWaitDependencies 2025-03-31T16:07:48.962561Z node 15 :TX_DATASHARD TRACE: Operation [3500:281474976715666] is the new logically complete end at 72075186224037889 2025-03-31T16:07:48.962602Z node 15 :TX_DATASHARD TRACE: Operation [3500:281474976715666] is the new logically incomplete end at 72075186224037889 2025-03-31T16:07:48.962645Z node 15 :TX_DATASHARD TRACE: Activated operation [3500:281474976715666] at 72075186224037889 2025-03-31T16:07:48.962702Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-03-31T16:07:48.962740Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-03-31T16:07:48.962769Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CreateVolatileSnapshot 2025-03-31T16:07:48.962800Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CreateVolatileSnapshot 2025-03-31T16:07:48.962912Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is ExecutedNoMoreRestarts 2025-03-31T16:07:48.962956Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CreateVolatileSnapshot 2025-03-31T16:07:48.963000Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit DropVolatileSnapshot 2025-03-31T16:07:48.963040Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit DropVolatileSnapshot 2025-03-31T16:07:48.963069Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-03-31T16:07:48.965013Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit DropVolatileSnapshot 2025-03-31T16:07:48.965067Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CompleteOperation 2025-03-31T16:07:48.965104Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CompleteOperation 2025-03-31T16:07:48.965333Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is DelayComplete 2025-03-31T16:07:48.965375Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CompleteOperation 2025-03-31T16:07:48.965425Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CompletedOperations 2025-03-31T16:07:48.965480Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CompletedOperations 2025-03-31T16:07:48.965538Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-03-31T16:07:48.965570Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CompletedOperations 2025-03-31T16:07:48.965602Z node 15 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715666] at 72075186224037889 has finished 2025-03-31T16:07:48.965664Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:07:48.965728Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-03-31T16:07:48.965775Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-03-31T16:07:48.965819Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-03-31T16:07:48.980133Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2025-03-31T16:07:48.980305Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:07:48.980397Z node 15 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715666] at 72075186224037888 on unit CompleteOperation 2025-03-31T16:07:48.980541Z node 15 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037888 at tablet 72075186224037888 send result to client [15:1089:2867], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:07:48.980667Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:07:48.981061Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3500} 2025-03-31T16:07:48.981117Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-31T16:07:48.981149Z node 15 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715666] at 72075186224037889 on unit CompleteOperation 2025-03-31T16:07:48.981220Z node 15 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037889 at tablet 72075186224037889 send result to client [15:1089:2867], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:07:48.981270Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:07:48.983464Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:594:2519], Recipient [15:667:2571]: NKikimrTxDataShard.TEvRead ReadId: 10 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715666 } LockTxId: 1011121314 ResultFormat: FORMAT_ARROW KeysSize: 1 2025-03-31T16:07:48.983718Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-31T16:07:48.983835Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2025-03-31T16:07:48.983992Z node 15 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-03-31T16:07:48.984072Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2025-03-31T16:07:48.984243Z node 15 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-31T16:07:48.984310Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-31T16:07:48.984367Z node 15 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2025-03-31T16:07:48.984457Z node 15 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-03-31T16:07:49.126943Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-31T16:07:49.127010Z node 15 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2025-03-31T16:07:49.127046Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2025-03-31T16:07:49.127546Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 10 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715666 } LockTxId: 1011121314 ResultFormat: FORMAT_ARROW } 2025-03-31T16:07:49.128094Z node 15 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 1011121314, counter# 18446744073709551615 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-31T16:07:49.128188Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/281474976715666 2025-03-31T16:07:49.128272Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:594:2519], 10} after executionsCount# 1 2025-03-31T16:07:49.128368Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:594:2519], 10} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-31T16:07:49.128800Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:594:2519], 10} finished in read 2025-03-31T16:07:49.128952Z node 15 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-03-31T16:07:49.128988Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2025-03-31T16:07:49.129030Z node 15 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2025-03-31T16:07:49.129063Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2025-03-31T16:07:49.129121Z node 15 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-03-31T16:07:49.129144Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-03-31T16:07:49.129183Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2025-03-31T16:07:49.129268Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-31T16:07:49.129658Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 |82.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |82.6%| [LD] {RESULT} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> ColumnStatistics::CountMinSketchStatistics [GOOD] Test command err: 2025-03-31T16:04:50.808917Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:04:50.809199Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:04:50.809281Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d36/r3tmp/tmpDakWw3/pdisk_1.dat 2025-03-31T16:04:51.576784Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11827, node 1 2025-03-31T16:04:51.974189Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:51.974252Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:51.974283Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:51.976973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:04:51.977476Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:04:52.085676Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:52.085829Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:52.110842Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27790 2025-03-31T16:04:52.923796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:04:59.027689Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T16:04:59.112858Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:59.112972Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:59.150981Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:04:59.153366Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:04:59.574475Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:04:59.575207Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:04:59.575828Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:04:59.576015Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:04:59.576278Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:04:59.576405Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:04:59.576491Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:04:59.576602Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:04:59.576710Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:04:59.980797Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:59.980932Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:59.997391Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:05:00.304714Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:00.434870Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T16:05:00.434994Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T16:05:00.507403Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T16:05:00.509844Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T16:05:00.510089Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T16:05:00.510150Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T16:05:00.510198Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T16:05:00.510251Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T16:05:00.510303Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T16:05:00.510361Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T16:05:00.511951Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T16:05:00.560168Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:05:00.560295Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1877:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:05:00.593007Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1888:2606] 2025-03-31T16:05:00.600925Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1918:2621] 2025-03-31T16:05:00.616213Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1918:2621], schemeshard id = 72075186224037897 2025-03-31T16:05:00.658187Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T16:05:00.724180Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T16:05:00.724245Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T16:05:00.724318Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T16:05:00.777855Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T16:05:00.800878Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T16:05:00.801079Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T16:05:01.019071Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T16:05:01.352487Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T16:05:01.431988Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:05:02.833481Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2240:3075], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:02.833659Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:02.893945Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-31T16:05:03.196422Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:05:03.196684Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:05:03.197006Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:05:03.197175Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:05:03.197297Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:05:03.197424Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:05:03.197542Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:05:03.197692Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:05:03.197836Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:05:03.197959Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:05:03.198109Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:05:03.198236Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:05:03.266925Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:05:03.267048Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;desc ... ARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037897 2025-03-31T16:05:06.177303Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; waiting actualization: 0/0.000022s 2025-03-31T16:05:17.609963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:05:17.610049Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:18.780858Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:05:18.780918Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:36.110287Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-03-31T16:07:36.110404Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-31T16:07:36.110447Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-31T16:07:36.110486Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-03-31T16:07:38.105702Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-03-31T16:07:38.105802Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 206.000000s, at schemeshard: 72075186224037897 2025-03-31T16:07:38.106369Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 51 2025-03-31T16:07:38.135613Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-31T16:07:39.573246Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:07:39.573342Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:07:39.573403Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-03-31T16:07:39.573450Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-31T16:07:39.573895Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T16:07:39.667240Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-31T16:07:39.809982Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6971:5158], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:39.810114Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6982:5163], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:39.810728Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:39.868748Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-03-31T16:07:40.168656Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6985:5166], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-03-31T16:07:40.499986Z node 2 :TX_PROXY ERROR: Actor# [2:7081:5212] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:07:40.977301Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7110:5227]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:07:40.977587Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T16:07:40.977679Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7112:5229] 2025-03-31T16:07:40.977762Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7112:5229] 2025-03-31T16:07:40.978102Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7113:5230] 2025-03-31T16:07:40.978294Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7112:5229], server id = [2:7113:5230], tablet id = 72075186224037894, status = OK 2025-03-31T16:07:40.978358Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7113:5230], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-03-31T16:07:40.978584Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-03-31T16:07:40.978709Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:07:40.978801Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7110:5227], StatRequests.size() = 1 2025-03-31T16:07:42.467828Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NDc4NmE4ZDItZmFlYmQ2ZjktNGQwYWQzNS00Nzg5Yzg4Yg==, TxId: 2025-03-31T16:07:42.467931Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDc4NmE4ZDItZmFlYmQ2ZjktNGQwYWQzNS00Nzg5Yzg4Yg==, TxId: 2025-03-31T16:07:42.495995Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:07:42.520569Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-31T16:07:42.520665Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T16:07:42.567627Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-31T16:07:42.567710Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-31T16:07:42.666376Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7112:5229], schemeshard count = 1 2025-03-31T16:07:45.383816Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:07:45.383891Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:07:45.383930Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-31T16:07:45.383973Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:07:45.387196Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-31T16:07:45.416484Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-31T16:07:45.417069Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T16:07:45.417155Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T16:07:45.421390Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-31T16:07:45.448659Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-31T16:07:45.448914Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-03-31T16:07:45.453641Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7229:5296], server id = [2:7230:5297], tablet id = 72075186224037899, status = OK 2025-03-31T16:07:45.454220Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7229:5296], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:07:45.511754Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-31T16:07:45.511887Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-31T16:07:45.512174Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-31T16:07:45.512390Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-31T16:07:45.512667Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7229:5296], server id = [2:7230:5297], tablet id = 72075186224037899 2025-03-31T16:07:45.512708Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:07:45.512908Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T16:07:45.517805Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-31T16:07:45.629230Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7250:5316]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:07:45.629474Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T16:07:45.629544Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7250:5316], StatRequests.size() = 1 2025-03-31T16:07:45.996362Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDU0NmQxMDgtMTk3NzUxNWUtNWM0M2E0NmYtM2I2MDZkZjY=, TxId: 2025-03-31T16:07:45.996457Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDU0NmQxMDgtMTk3NzUxNWUtNWM0M2E0NmYtM2I2MDZkZjY=, TxId: 2025-03-31T16:07:45.997342Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:07:46.007723Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:7263:5452]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-31T16:07:46.008187Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T16:07:46.008251Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-31T16:07:46.010702Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T16:07:46.010771Z node 1 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-03-31T16:07:46.010820Z node 1 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-31T16:07:46.091850Z node 1 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::IdxLookupPartialLeftPredicate [GOOD] Test command err: Trying to start YDB, gRPC: 16424, MsgBus: 5345 2025-03-31T16:07:41.207672Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488006019311675538:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:41.208125Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019a9/r3tmp/tmp70ySWr/pdisk_1.dat 2025-03-31T16:07:41.933431Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:41.952969Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:41.953059Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:41.960492Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16424, node 1 2025-03-31T16:07:42.229952Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:42.229976Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:42.229988Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:42.230100Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5345 TClient is connected to server localhost:5345 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:43.435271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:43.464654Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:43.484882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:07:43.678120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:43.879879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:07:43.977738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:07:46.191214Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488006019311675538:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:46.191294Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:07:46.825299Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488006040786513680:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:46.825427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:47.204872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:07:47.269359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:07:47.321311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:07:47.393328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:07:47.457857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:07:47.574079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:07:47.679570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488006045081481499:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:47.679701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:47.680001Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488006045081481505:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:47.684499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:07:47.702836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488006045081481507:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:07:47.771605Z node 1 :TX_PROXY ERROR: Actor# [1:7488006045081481561:3465] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:07:49.174237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:07:49.214023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:07:49.254808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 query_phases { duration_us: 6451 table_access { name: "/Root/Join1_1" reads { rows: 8 bytes: 136 } partitions_count: 1 } cpu_time_us: 4590 affected_shards: 1 } query_phases { duration_us: 20020 table_access { name: "/Root/Join1_2" reads { rows: 3 bytes: 57 } partitions_count: 1 } cpu_time_us: 17507 affected_shards: 1 } compilation { duration_us: 686146 cpu_time_us: 679803 } process_cpu_time_us: 588 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":12,\"Plans\":[{\"PlanNodeId\":11,\"Plans\":[{\"PlanNodeId\":10,\"Plans\":[{\"PlanNodeId\":9,\"Plans\":[{\"E-Size\":\"No estimate\",\"PlanNodeId\":8,\"LookupKeyColumns\":[\"Key1\"],\"Node Type\":\"TableLookup\",\"Path\":\"\\/Root\\/Join1_2\",\"Columns\":[\"Fk3\",\"Key1\",\"Key2\",\"Value\"],\"E-Rows\":\"No estimate\",\"Table\":\"Join1_2\",\"Plans\":[{\"PlanNodeId\":7,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Iterator\":\"PartitionByKey\",\"Name\":\"Iterator\"},{\"Inputs\":[],\"Name\":\"PartitionByKey\",\"Input\":\"precompute_0_0\"}],\"Node Type\":\"ConstantExpr-Aggregate\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"FinishTimeMs\":1743437270355,\"Host\":\"ghrun-tuhoqst4xe\",\"ResultRows\":2,\"ResultBytes\":7,\"OutputRows\":2,\"ComputeTimeUs\":63,\"NodeId\":1,\"OutputChannels\":[{\"ChannelId\":1,\"Rows\":2,\"DstStageId\":0,\"Bytes\":7}],\"TaskId\":1,\"OutputBytes\":7}],\"PeakMemoryUsageBytes\":2031616,\"CpuTimeUs\":1741}],\"UseLlvm\":\"undefined\",\"Output\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Bytes\":{\"Count\":1,\"Sum\":7,\"Max\":7,\"Min\":7,\"History\":[3,7]}},\"Name\":\"RESULT\",\"Push\":{\"Chunks\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2}}}],\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576,\"History\":[3,1048576]},\"ResultRows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Tasks\":1,\"ResultBytes\":{\"Count\":1,\"Sum\":7,\"Max\":7,\"Min\":7},\"OutputRows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"FinishedTasks\":1,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"BaseTimeMs\":1743437270352,\"CpuTimeUs\":{\"Count\":1,\"Sum\":1186,\"Max\":1186,\"Min\":1186,\"History\":[3,1186]},\"OutputBytes\":{\"Count\":1,\"Sum\":7,\"Max\":7,\"Min\":7}},\"CTE Name\":\"precompute_0_0\"}],\"PlanNodeType\":\"Connection\",\"E-Cost\":\"No estimate\"}],\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"TopSort\",\"Limit\":\"1001\",\"TopSortBy\":\"row.t2.Value\"},{\"Inputs\":[{\"InternalOperatorId\":3},{\"InternalOperatorId\":2}],\"E-Rows\":\"No estimate\",\"Condition\":\"t2.Key1 = t1.Fk21\",\"Name\":\"InnerJoin (MapJoin)\",\"E-Size\":\"No estimate\",\"E-Cost\":\"No estimate\"},{\"Inputs\":[],\"ToFlow\":\"precompute_0_0\",\"Name\":\"ToFlow\"},{\"Inputs\":[{\"ExternalPlanNodeId\":8}],\"E-Rows\":\"No estimate\",\"Predicate\":\"Exist(item.Key1)\",\"Name\":\"Filter\",\"E-Size\":\"No estimate\",\"E-Cost\":\"No estimate\"}],\"Node Type\":\"TopSort-InnerJoin (MapJoin)-ConstantExpr-Filter\",\"Stats\":{\"ComputeNodes\" ... ,\"Sum\":2,\"Max\":2,\"Min\":2},\"LastMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"FirstMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"Bytes\":{\"Count\":1,\"Sum\":34,\"Max\":34,\"Min\":34,\"History\":[4,34]}},\"Name\":\"2\",\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"FirstMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Bytes\":{\"Count\":1,\"Sum\":34,\"Max\":34,\"Min\":34,\"History\":[4,34]},\"PauseMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"WaitTimeUs\":{\"Count\":1,\"Sum\":1476,\"Max\":1476,\"Min\":1476,\"History\":[4,1476]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"WaitMessageMs\":{\"Count\":1,\"Max\":2,\"Min\":1}}}],\"InputRows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Tasks\":1}}],\"Node Type\":\"Precompute_0\",\"Parent Relationship\":\"InitPlan\",\"PlanNodeType\":\"Materialize\"}],\"Node Type\":\"Query\",\"Stats\":{\"Compilation\":{\"FromCache\":false,\"DurationUs\":686146,\"CpuTimeUs\":679803},\"ProcessCpuTimeUs\":588,\"TotalDurationUs\":735783,\"ResourcePoolId\":\"default\",\"QueuedTimeUs\":0},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":6,\"Plans\":[{\"PlanNodeId\":7,\"Operators\":[{\"E-Rows\":\"No estimate\",\"Columns\":[\"Fk3\",\"Key1\",\"Key2\",\"Value\"],\"E-Size\":\"No estimate\",\"E-Cost\":\"No estimate\",\"Name\":\"TableLookup\",\"Table\":\"Join1_2\",\"LookupKeyColumns\":[\"Key1\"]}],\"Node Type\":\"TableLookup\",\"PlanNodeType\":\"Connection\"}],\"Operators\":[{\"E-Rows\":\"No estimate\",\"Predicate\":\"Exist(item.Key1)\",\"Name\":\"Filter\",\"E-Size\":\"No estimate\",\"E-Cost\":\"No estimate\"}],\"Node Type\":\"Filter\"},{\"PlanNodeId\":12,\"Plans\":[{\"PlanNodeId\":13,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"No estimate\",\"ReadRanges\":[\"Key (-\342\210\236, +\342\210\236)\"],\"Name\":\"TableFullScan\",\"Path\":\"\\/Root\\/Join1_1\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"No estimate\",\"Table\":\"Join1_1\",\"ReadColumns\":[\"Fk21\",\"Fk22\",\"Key\",\"Value\"],\"E-Cost\":\"No estimate\"}],\"Node Type\":\"TableFullScan\"}],\"Operators\":[{\"E-Size\":\"No estimate\",\"A-SelfCpu\":1.105,\"Name\":\"Filter\",\"Predicate\":\"Exist(item.Fk21) AND item.Value == \\\"Value3\\\"\",\"A-Rows\":2,\"E-Rows\":\"No estimate\",\"A-Cpu\":1.105,\"E-Cost\":\"No estimate\",\"A-Size\":34}],\"Node Type\":\"Filter\"}],\"Operators\":[{\"E-Rows\":\"No estimate\",\"Condition\":\"t2.Key1 = t1.Fk21\",\"Name\":\"InnerJoin (MapJoin)\",\"E-Size\":\"No estimate\",\"E-Cost\":\"No estimate\"}],\"Node Type\":\"InnerJoin (MapJoin)\"}],\"Operators\":[{\"A-Rows\":3,\"A-SelfCpu\":2.396,\"A-Cpu\":3.501,\"A-Size\":108,\"Name\":\"TopSort\",\"Limit\":\"1001\",\"TopSortBy\":\"row.t2.Value\"}],\"Node Type\":\"TopSort\"}],\"Operators\":[{\"A-Rows\":3,\"A-SelfCpu\":0.718,\"A-Cpu\":4.219,\"A-Size\":108,\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"PlanNodeType\":\"Query\"}}" query_ast: "(\n(declare %kqp%tx_result_binding_0_0 (ListType (StructType \'(\'\"Fk21\" (OptionalType (DataType \'Int32))) \'(\'\"Fk22\" (OptionalType (DataType \'String))) \'(\'\"Key\" (OptionalType (DataType \'Int32))) \'(\'\"Value\" (OptionalType (DataType \'String))))))\n(let $1 (KqpTable \'\"/Root/Join1_1\" \'\"72057594046644480:16\" \'\"\" \'1))\n(let $2 \'(\'\"Fk21\" \'\"Fk22\" \'\"Key\" \'\"Value\"))\n(let $3 (KqpRowsSourceSettings $1 $2 \'() (Void) \'()))\n(let $4 (DqPhyStage \'((DqSource (DataSource \'\"KqpReadRangesSource\") $3)) (lambda \'($32) (FromFlow (Filter (ToFlow $32) (lambda \'($33) (And (Exists (Member $33 \'\"Fk21\")) (Coalesce (== (Member $33 \'\"Value\") (String \'\"Value3\")) (Bool \'false))))))) \'(\'(\'\"_logical_id\" \'1035) \'(\'\"_id\" \'\"ea769d7-e91a6662-62537863-99036195\"))))\n(let $5 (DqCnUnionAll (TDqOutput $4 \'0)))\n(let $6 (DqPhyStage \'($5) (lambda \'($34) $34) \'(\'(\'\"_logical_id\" \'1413) \'(\'\"_id\" \'\"99132fd7-e745a377-10d50c86-a14dc0ae\"))))\n(let $7 (DqCnResult (TDqOutput $6 \'0) \'()))\n(let $8 \'(\'(\'\"type\" \'\"data\")))\n(let $9 (KqpPhysicalTx \'($4 $6) \'($7) \'() $8))\n(let $10 \'\"%kqp%tx_result_binding_0_0\")\n(let $11 (DataType \'Int32))\n(let $12 (OptionalType $11))\n(let $13 (OptionalType (DataType \'String)))\n(let $14 (StructType \'(\'\"Fk21\" $12) \'(\'\"Fk22\" $13) \'(\'\"Key\" $12) \'(\'\"Value\" $13)))\n(let $15 (ListType $14))\n(let $16 %kqp%tx_result_binding_0_0)\n(let $17 \'(\'(\'\"_logical_id\" \'1078) \'(\'\"_id\" \'\"21438d63-29a50e3f-9dec492f-6a2c4af3\") \'(\'\"_partition_mode\" \'\"single\")))\n(let $18 (DqPhyStage \'() (lambda \'() (Iterator (PartitionByKey $16 (lambda \'($35) (Member $35 \'\"Fk21\")) (Void) (Void) (lambda \'($36) (Map (Filter (FlatMap $36 (lambda \'($37) (Map (Take (Nth $37 \'1) (Uint64 \'1)) (lambda \'($38) (AsStruct \'(\'\"Fk21\" (Member $38 \'\"Fk21\"))))))) (lambda \'($39) (Exists (Member $39 \'\"Fk21\")))) (lambda \'($40) (AsStruct \'(\'\"Key1\" (Member $40 \'\"Fk21\"))))))))) $17))\n(let $19 (KqpTable \'\"/Root/Join1_2\" \'\"72057594046644480:17\" \'\"\" \'1))\n(let $20 \'(\'\"Fk3\" \'\"Key1\" \'\"Key2\" \'\"Value\"))\n(let $21 (KqpCnStreamLookup (TDqOutput $18 \'0) $19 $20 (ListType (StructType \'(\'\"Key1\" $12))) \'(\'(\'\"Strategy\" \'\"LookupRows\"))))\n(let $22 (Uint64 \'\"1001\"))\n(let $23 (StructType \'(\'\"t1.Fk21\" $12) \'(\'\"t1.Fk22\" $13) \'(\'\"t1.Key\" $12) \'(\'\"t1.Value\" $13) \'(\'\"t2.Fk3\" $13) \'(\'\"t2.Key1\" $12) \'(\'\"t2.Key2\" $13) \'(\'\"t2.Value\" $13)))\n(let $24 \'(\'(\'\"_logical_id\" \'1298) \'(\'\"_id\" \'\"88c2f573-99471085-6d8026f-748e466c\") \'(\'\"_wide_channels\" $23)))\n(let $25 (DqPhyStage \'($21) (lambda \'($41) (block \'(\n (let $42 \'(\'Many \'Hashed \'Compact))\n (let $43 (SqueezeToDict (FlatMap (ToFlow $16) (lambda \'($46) (block \'(\n (let $47 (Member $46 \'\"Fk21\"))\n (let $48 (Nothing (OptionalType (TupleType $11 $14))))\n (let $49 (IfPresent $47 (lambda \'($50) (Just \'($50 $46))) $48))\n (return (If (Exists $47) $49 $48))\n )))) (lambda \'($51) (Nth $51 \'0)) (lambda \'($52) (Nth $52 \'1)) $42))\n (let $44 (TopSort (FlatMap $43 (lambda \'($53) (block \'(\n (let $54 \'(\'\"Fk3\" \'\"t2.Fk3\" \'\"Key1\" \'\"t2.Key1\" \'\"Key2\" \'\"t2.Key2\" \'\"Value\" \'\"t2.Value\"))\n (let $55 \'(\'\"Fk21\" \'\"t1.Fk21\" \'\"Fk22\" \'\"t1.Fk22\" \'\"Key\" \'\"t1.Key\" \'\"Value\" \'\"t1.Value\"))\n (return (MapJoinCore (OrderedFilter (ToFlow $41) (lambda \'($56) (Exists (Member $56 \'\"Key1\")))) $53 \'\"Inner\" \'(\'\"Key1\") \'(\'\"Fk21\") $54 $55 \'(\'\"t2.Key1\") \'(\'\"t1.Fk21\")))\n )))) $22 (Bool \'true) (lambda \'($57) (Member $57 \'\"t2.Value\"))))\n (let $45 (lambda \'($58) (Member $58 \'\"t1.Fk21\") (Member $58 \'\"t1.Fk22\") (Member $58 \'\"t1.Key\") (Member $58 \'\"t1.Value\") (Member $58 \'\"t2.Fk3\") (Member $58 \'\"t2.Key1\") (Member $58 \'\"t2.Key2\") (Member $58 \'\"t2.Value\")))\n (return (FromFlow (ExpandMap $44 $45)))\n))) $24))\n(let $26 (DqCnMerge (TDqOutput $25 \'0) \'(\'(\'\"7\" \'\"Asc\"))))\n(let $27 (DqPhyStage \'($26) (lambda \'($59) (FromFlow (NarrowMap (Take (ToFlow $59) $22) (lambda \'($60 $61 $62 $63 $64 $65 $66 $67) (AsStruct \'(\'\"t1.Fk21\" $60) \'(\'\"t1.Fk22\" $61) \'(\'\"t1.Key\" $62) \'(\'\"t1.Value\" $63) \'(\'\"t2.Fk3\" $64) \'(\'\"t2.Key1\" $65) \'(\'\"t2.Key2\" $66) \'(\'\"t2.Value\" $67)))))) \'(\'(\'\"_logical_id\" \'1311) \'(\'\"_id\" \'\"a3d72972-7117f234-921f482b-3f54f517\"))))\n(let $28 \'($18 $25 $27))\n(let $29 (DqCnResult (TDqOutput $27 \'0) \'()))\n(let $30 (KqpTxResultBinding $15 \'0 \'0))\n(let $31 (KqpPhysicalTx $28 \'($29) \'(\'($10 $30)) $8))\n(return (KqpPhysicalQuery \'($9 $31) \'((KqpTxResultBinding (ListType $23) \'1 \'0)) \'(\'(\'\"type\" \'\"data_query\"))))\n)\n" total_duration_us: 735783 total_cpu_time_us: 702488 query_meta: "{\"query_database\":\"/Root\",\"query_parameter_types\":{},\"table_metadata\":[\"{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/Join1_2\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":17},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Fk3\\\",\\\"Id\\\":3,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Key1\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"Int32\\\",\\\"TypeId\\\":1,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Key2\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Value\\\",\\\"Id\\\":4,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"Key1\\\",\\\"Key2\\\"],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}\",\"{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/Join1_1\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":16},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Fk21\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"Int32\\\",\\\"TypeId\\\":1,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Fk22\\\",\\\"Id\\\":3,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Key\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"Int32\\\",\\\"TypeId\\\":1,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Value\\\",\\\"Id\\\":4,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"Key\\\"],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}\"],\"table_meta_serialization_type\":2,\"created_at\":\"1743437270\",\"query_type\":\"QUERY_TYPE_SQL_DML\",\"query_syntax\":\"1\",\"query_cluster\":\"db\",\"query_id\":\"26d365c8-7daa7d00-54207d74-5c76d7e3\",\"version\":\"1.0\"}" >> KqpJoinOrder::TPCDS87+ColumnStore >> TTicketParserTest::LoginRefreshGroupsWithError [GOOD] >> TTicketParserTest::NebiusAccessServiceAuthenticationOk >> TTicketParserTest::BulkAuthorizationUnavailable [GOOD] |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |82.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |82.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |82.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication >> KqpJoinOrder::CanonizedJoinOrderTPCH10 >> TTicketParserTest::AuthenticationWithUserAccount [GOOD] >> TTicketParserTest::AuthenticationUnsupported >> KqpJoin::JoinMismatchDictKeyTypes [GOOD] |82.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |82.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |82.7%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests >> KqpJoinOrder::TPCDS16+ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::BulkAuthorizationUnavailable [GOOD] Test command err: 2025-03-31T16:07:23.243425Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005942040588082:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:23.267302Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d64/r3tmp/tmpIVGAav/pdisk_1.dat 2025-03-31T16:07:24.154620Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:24.159540Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:24.164760Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:07:24.257063Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10997, node 1 2025-03-31T16:07:24.489849Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:07:24.489915Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:07:24.489963Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:07:24.527739Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:24.527765Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:24.527770Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:24.527874Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7214 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:25.083938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:25.237286Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:07:25.237616Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-31T16:07:25.237648Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:07:25.238261Z node 1 :TICKET_PARSER DEBUG: Ticket **** (5DAB89DE) () has now permanent error message 'Token is not in correct format' 2025-03-31T16:07:25.238279Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A2 error Token is not in correct format 2025-03-31T16:07:25.238314Z node 1 :TICKET_PARSER ERROR: Ticket **** (5DAB89DE): Token is not in correct format 2025-03-31T16:07:28.783401Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488005967010981301:2203];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d64/r3tmp/tmpmmH5CA/pdisk_1.dat 2025-03-31T16:07:28.843680Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:07:29.053981Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:29.090124Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:29.090258Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:29.094040Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30053, node 2 2025-03-31T16:07:29.275687Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:29.275712Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:29.275720Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:29.275826Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11702 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:07:29.704497Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:07:29.741828Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-31T16:07:29.741868Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:07:29.741881Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:07:29.742033Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2025-03-31T16:07:29.742103Z node 2 :GRPC_CLIENT DEBUG: [517000031b88] Connect to grpc://localhost:26153 2025-03-31T16:07:29.789865Z node 2 :GRPC_CLIENT DEBUG: [517000031b88] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2025-03-31T16:07:29.821410Z node 2 :GRPC_CLIENT DEBUG: [517000031b88] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } results { items { permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } } } 2025-03-31T16:07:29.826511Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write access denied for subject "user1@as" 2025-03-31T16:07:29.826639Z node 2 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-03-31T16:07:29.827568Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-31T16:07:29.827590Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:07:29.827598Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:07:29.827663Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2025-03-31T16:07:29.827879Z node 2 :GRPC_CLIENT DEBUG: [517000031b88] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2025-03-31T16:07:29.830102Z node 2 :GRPC_CLIENT DEBUG: [517000031b88] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } results { items { permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } } } 2025-03-31T16:07:29.835258Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write access denied for subject "user1@as" 2025-03-31T16:07:29.835344Z node 2 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now permanent error message 'something.write for folder_id aaaa1234 - Access Denied' 2025-03-31T16:07:34.080508Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488005989135258294:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:34.080576Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d64/r3tmp/tmp4VMY0q/pdisk_1.dat 2025-03-31T16:07:34.574557Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:34.617529Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:34.617623Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:34.619046Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1710, node 3 2025-03-31T16:07:34.771757Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:34.771789Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:34.771816Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:34.771965Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25484 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 Sche ... 3Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport 2025-03-31T16:07:39.820349Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488006012101602520:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:39.820401Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d64/r3tmp/tmpP09411/pdisk_1.dat 2025-03-31T16:07:40.624546Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:40.720560Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:40.733764Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:40.744307Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8616, node 4 2025-03-31T16:07:41.087810Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:41.087836Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:41.087851Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:41.088002Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2623 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:41.636950Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:41.651898Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:07:41.659466Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-31T16:07:41.659506Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:07:41.659517Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:07:41.659650Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read somewhere.sleep something.list something.write something.eat) 2025-03-31T16:07:41.659715Z node 4 :GRPC_CLIENT DEBUG: [51700003d508] Connect to grpc://localhost:27109 2025-03-31T16:07:41.660898Z node 4 :GRPC_CLIENT DEBUG: [51700003d508] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "somewhere.sleep" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.list" ...(truncated) } 2025-03-31T16:07:41.687791Z node 4 :GRPC_CLIENT DEBUG: [51700003d508] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } results { items { permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } items { permission: "somewhere.sleep" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } items { permission: "something.list" r...(truncated) } 2025-03-31T16:07:41.689077Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read access denied for subject "user1@as" 2025-03-31T16:07:41.689100Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission somewhere.sleep access denied for subject "user1@as" 2025-03-31T16:07:41.689126Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.list access denied for subject "user1@as" 2025-03-31T16:07:41.689142Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.eat access denied for subject "user1@as" 2025-03-31T16:07:41.689163Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2025-03-31T16:07:41.689369Z node 4 :GRPC_CLIENT DEBUG: [51700003d888] Connect to grpc://localhost:30142 2025-03-31T16:07:41.690368Z node 4 :GRPC_CLIENT DEBUG: [51700003d888] Request GetUserAccountRequest { user_account_id: "user1" } 2025-03-31T16:07:41.707117Z node 4 :GRPC_CLIENT DEBUG: [51700003d888] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2025-03-31T16:07:41.711525Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport 2025-03-31T16:07:46.813908Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7488006044280341618:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:46.814053Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d64/r3tmp/tmpq9bh88/pdisk_1.dat 2025-03-31T16:07:47.259719Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:47.321567Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:47.321692Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:47.334925Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7975, node 5 2025-03-31T16:07:47.541702Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:47.541731Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:47.541740Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:47.541879Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19010 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:47.988837Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:48.013782Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-31T16:07:48.013818Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:07:48.013828Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:07:48.013930Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2025-03-31T16:07:48.013984Z node 5 :GRPC_CLIENT DEBUG: [517000053d88] Connect to grpc://localhost:30557 2025-03-31T16:07:48.015000Z node 5 :GRPC_CLIENT DEBUG: [517000053d88] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2025-03-31T16:07:48.024847Z node 5 :GRPC_CLIENT DEBUG: [517000053d88] Status 14 Service Unavailable 2025-03-31T16:07:48.025373Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-03-31T16:07:48.025395Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a retryable error "Service Unavailable" retryable: 1 2025-03-31T16:07:48.025431Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-03-31T16:07:48.025538Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2025-03-31T16:07:48.025833Z node 5 :GRPC_CLIENT DEBUG: [517000053d88] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2025-03-31T16:07:48.033380Z node 5 :GRPC_CLIENT DEBUG: [517000053d88] Status 14 Service Unavailable 2025-03-31T16:07:48.033593Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-03-31T16:07:48.033611Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a retryable error "Service Unavailable" retryable: 1 2025-03-31T16:07:48.033634Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' >> TTicketParserTest::NebiusAuthorizationUnavailable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinMismatchDictKeyTypes [GOOD] Test command err: Trying to start YDB, gRPC: 25884, MsgBus: 18619 2025-03-31T16:07:44.383985Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488006034753755475:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:44.384780Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019a5/r3tmp/tmpkpUmZG/pdisk_1.dat 2025-03-31T16:07:45.291321Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:45.350357Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:45.350456Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:45.356165Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25884, node 1 2025-03-31T16:07:45.627650Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:45.627688Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:45.627698Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:45.627824Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18619 TClient is connected to server localhost:18619 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:46.903668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:46.982661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:47.355800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:47.926767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:48.156350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:49.368455Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488006034753755475:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:49.368499Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:07:51.453657Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488006064818528213:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:51.453806Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:51.777220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:07:51.841013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:07:51.881657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:07:51.920611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:07:51.962494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:07:52.017102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:07:52.090464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488006069113496022:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:52.090542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:52.090894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488006069113496027:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:52.095958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:07:52.114113Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-31T16:07:52.114393Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488006069113496029:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:07:52.203674Z node 1 :TX_PROXY ERROR: Actor# [1:7488006069113496086:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:07:53.316154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-std] [GOOD] |82.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |82.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |82.7%| [LD] {RESULT} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api >> DataShardReadIteratorBatchMode::RangeToInclusive [GOOD] >> DataShardReadIteratorBatchMode::RangeToNonInclusive >> YdbOlapStore::LogExistingUserId [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::NebiusAuthorizationUnavailable [GOOD] Test command err: 2025-03-31T16:07:23.975057Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005944434684952:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:23.978830Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d66/r3tmp/tmprvp7gh/pdisk_1.dat 2025-03-31T16:07:24.758255Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:24.758345Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:24.762447Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:24.818769Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19761, node 1 2025-03-31T16:07:25.101594Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:25.101617Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:25.101624Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:25.101750Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4964 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:25.566451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:25.595102Z node 1 :TICKET_PARSER DEBUG: Ticket 0DAE6848D50297B475CF58B13D5F425F692B8625B821B7F083AA9E4B8EF2A56E () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-03-31T16:07:30.290098Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488005972397395554:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:30.291105Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d66/r3tmp/tmpdiWixS/pdisk_1.dat 2025-03-31T16:07:30.610315Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:30.622034Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:30.622125Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:30.628481Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25676, node 2 2025-03-31T16:07:30.888931Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:30.888953Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:30.888960Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:30.889075Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30610 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:07:31.482559Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:07:31.503282Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:07:31.509582Z node 2 :TICKET_PARSER DEBUG: Ticket 453618DBFC360DF67D7F638947D940423728B633320277BAB907BB488CBC79AD () has now permanent error message 'Cannot create token from certificate. Client`s certificate and server`s certificate have different issuers' 2025-03-31T16:07:31.510179Z node 2 :TICKET_PARSER ERROR: Ticket 453618DBFC360DF67D7F638947D940423728B633320277BAB907BB488CBC79AD: Cannot create token from certificate. Client`s certificate and server`s certificate have different issuers 2025-03-31T16:07:36.614960Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488006001163698324:2268];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:36.615221Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d66/r3tmp/tmplPAn3b/pdisk_1.dat 2025-03-31T16:07:36.879230Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:36.928329Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:36.928453Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:36.936153Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65337, node 3 2025-03-31T16:07:37.045792Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:37.045816Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:37.045834Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:37.045970Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6904 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:37.400630Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:37.417750Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:07:37.422029Z node 3 :TICKET_PARSER DEBUG: Ticket 318E2D219130D78070906FD96948FFB41B209EF110F2402AF00BEE3312D6C5D1 () has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2025-03-31T16:07:37.422622Z node 3 :TICKET_PARSER ERROR: Ticket 318E2D219130D78070906FD96948FFB41B209EF110F2402AF00BEE3312D6C5D1: Cannot create token from certificate. Client certificate failed verification 2025-03-31T16:07:42.410573Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488006026230668728:2086];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:42.516727Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d66/r3tmp/tmp0duJRf/pdisk_1.dat 2025-03-31T16:07:42.757101Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:42.763032Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:42.764516Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:42.768864Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29975, node 4 2025-03-31T16:07:42.931912Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:42.931940Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:42.931947Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:42.932116Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16937 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:43.420305Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:43.436669Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:07:43.444007Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-31T16:07:43.444071Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:07:43.444083Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:07:43.444629Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2025-03-31T16:07:43.444723Z node 4 :GRPC_CLIENT DEBUG: [51700009dc88] Connect to grpc://localhost:1371 2025-03-31T16:07:43.449877Z node 4 :GRPC_CLIENT DEBUG: [51700009dc88] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { resultCode: PERMISSION_DENIED } } 0: "OK" 2025-03-31T16:07:43.463530Z node 4 :GRPC_CLIENT DEBUG: [51700009dc88] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { resultCode: PERMISSION_DENIED } } } 2025-03-31T16:07:43.465409Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write access denied for subject "user1@as" 2025-03-31T16:07:43.465592Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-03-31T16:07:43.472733Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-31T16:07:43.472763Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:07:43.472772Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:07:43.472844Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2025-03-31T16:07:43.473179Z node 4 :GRPC_CLIENT DEBUG: [51700009dc88] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { resultCode: PERMISSION_DENIED } } 0: "OK" 2025-03-31T16:07:43.475932Z node 4 :GRPC_CLIENT DEBUG: [51700009dc88] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { resultCode: PERMISSION_DENIED } } } 2025-03-31T16:07:43.476240Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write access denied for subject "user1@as" 2025-03-31T16:07:43.476317Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now permanent error message 'something.write for aaaa1234 bbbb4554 - PERMISSION_DENIED' 2025-03-31T16:07:50.048167Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7488006057765489465:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:50.056682Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d66/r3tmp/tmpQUcf3g/pdisk_1.dat 2025-03-31T16:07:50.395963Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:50.426479Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:50.426586Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:50.432541Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24409, node 5 2025-03-31T16:07:50.519910Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:50.519939Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:50.519947Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:50.520109Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26755 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:50.919268Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:50.932189Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-31T16:07:50.932226Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:07:50.932236Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:07:50.932306Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2025-03-31T16:07:50.932356Z node 5 :GRPC_CLIENT DEBUG: [5170000d4e88] Connect to grpc://localhost:29081 2025-03-31T16:07:50.933417Z node 5 :GRPC_CLIENT DEBUG: [5170000d4e88] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } 14: "Service Unavailable" 2025-03-31T16:07:50.959962Z node 5 :GRPC_CLIENT DEBUG: [5170000d4e88] Status 14 Service Unavailable 2025-03-31T16:07:50.960558Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-03-31T16:07:50.960588Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a retryable error "Service Unavailable" retryable: 1 2025-03-31T16:07:50.960628Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-03-31T16:07:50.960717Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2025-03-31T16:07:50.961093Z node 5 :GRPC_CLIENT DEBUG: [5170000d4e88] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } 2025-03-31T16:07:50.966844Z node 5 :GRPC_CLIENT DEBUG: [5170000d4e88] Status 1 CANCELLED 2025-03-31T16:07:50.968483Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "CANCELLED" retryable: 1 2025-03-31T16:07:50.968510Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a retryable error "CANCELLED" retryable: 1 2025-03-31T16:07:50.968544Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'CANCELLED' |82.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |82.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |82.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |82.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |82.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |82.8%| [LD] {RESULT} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache >> TTicketParserTest::NebiusAccessServiceAuthenticationOk [GOOD] >> TTicketParserTest::NebiusAuthenticationRetryError |82.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |82.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |82.8%| [LD] {RESULT} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut >> TTicketParserTest::AuthenticationUnsupported [GOOD] >> TTicketParserTest::AuthenticationUnknown >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder-EvWrite [GOOD] >> TPersQueueTest::NoDecompressionMemoryLeaks [GOOD] >> TPersQueueTest::PreferredCluster_TwoEnabledClustersAndWriteSessionsWithDifferentPreferredCluster_SessionWithMismatchedClusterDiesAndOthersAlive |82.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |82.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |82.8%| [LD] {RESULT} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |82.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/kqprun |82.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/kqprun |82.8%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/kqprun |82.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |82.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |82.8%| [LD] {RESULT} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc >> test_sql_streaming.py::test[suites-GroupByHoppingWindowNoKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder-EvWrite [GOOD] Test command err: 2025-03-31T16:05:32.590478Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:05:32.590758Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:05:32.590948Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000c66/r3tmp/tmp4JQIrJ/pdisk_1.dat 2025-03-31T16:05:32.982836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:05:33.038348Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:33.084956Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:05:33.085124Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:05:33.100394Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:05:33.200387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:05:33.248270Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:05:33.249568Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:05:33.250100Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:05:33.250394Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:05:33.305526Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:05:33.306341Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:05:33.306467Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:05:33.308426Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:05:33.308522Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:05:33.308603Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:05:33.309020Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:05:33.309192Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:05:33.309286Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:05:33.321200Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:05:33.368597Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:05:33.368829Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:05:33.368982Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:05:33.369026Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:05:33.369065Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:05:33.369115Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:05:33.369369Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:05:33.369436Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:05:33.369780Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:05:33.369902Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:05:33.370028Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:05:33.370084Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:05:33.370139Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T16:05:33.370179Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:05:33.370216Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:05:33.370255Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:05:33.370301Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:05:33.370405Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:05:33.370442Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:05:33.370508Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:05:33.370614Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-31T16:05:33.370659Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:05:33.370794Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:05:33.371054Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T16:05:33.371158Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:05:33.371263Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:05:33.371323Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:05:33.371374Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T16:05:33.371418Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T16:05:33.371458Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:05:33.371769Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T16:05:33.371807Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T16:05:33.371845Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T16:05:33.371883Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:05:33.371950Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T16:05:33.371992Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T16:05:33.372047Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T16:05:33.372088Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:05:33.372139Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T16:05:33.374005Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T16:05:33.374066Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:05:33.385038Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:05:33.385130Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:05:33.385172Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:05:33.385233Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-31T16:05:33.385339Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:05:33.556297Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:05:33.556377Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:05:33.556427Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T16:05:33.556988Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-31T16:05:33.557035Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:05:33.557170Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:05:33.557234Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-31T16:05:33.557296Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-31T16:05:33.557342Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-31T16:05:33.571396Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:05:33.571502Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:05:33.572381Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:05:33.572435Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:05:33.572503Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:05:3 ... planned 0 immediate 0 planned 0 2025-03-31T16:07:59.341412Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-03-31T16:07:59.341482Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:07:59.341543Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:07:59.341814Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [15:884:2711], Recipient [15:884:2711]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:07:59.341853Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:07:59.341907Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T16:07:59.341951Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:07:59.341986Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-03-31T16:07:59.342027Z node 15 :TX_DATASHARD DEBUG: Found ready operation [3500:281474976715666] in PlanQueue unit at 72075186224037889 2025-03-31T16:07:59.342068Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit PlanQueue 2025-03-31T16:07:59.342120Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-03-31T16:07:59.342156Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit PlanQueue 2025-03-31T16:07:59.342189Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit LoadTxDetails 2025-03-31T16:07:59.342222Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit LoadTxDetails 2025-03-31T16:07:59.342346Z node 15 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3500:281474976715666 keys extracted: 0 2025-03-31T16:07:59.342385Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-03-31T16:07:59.342410Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit LoadTxDetails 2025-03-31T16:07:59.342434Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-03-31T16:07:59.342460Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit BuildAndWaitDependencies 2025-03-31T16:07:59.342503Z node 15 :TX_DATASHARD TRACE: Operation [3500:281474976715666] is the new logically complete end at 72075186224037889 2025-03-31T16:07:59.342545Z node 15 :TX_DATASHARD TRACE: Operation [3500:281474976715666] is the new logically incomplete end at 72075186224037889 2025-03-31T16:07:59.342588Z node 15 :TX_DATASHARD TRACE: Activated operation [3500:281474976715666] at 72075186224037889 2025-03-31T16:07:59.342633Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-03-31T16:07:59.342657Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-03-31T16:07:59.342682Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CreateVolatileSnapshot 2025-03-31T16:07:59.342710Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CreateVolatileSnapshot 2025-03-31T16:07:59.342818Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is ExecutedNoMoreRestarts 2025-03-31T16:07:59.342848Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CreateVolatileSnapshot 2025-03-31T16:07:59.342898Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit DropVolatileSnapshot 2025-03-31T16:07:59.342942Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit DropVolatileSnapshot 2025-03-31T16:07:59.342970Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-03-31T16:07:59.342992Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit DropVolatileSnapshot 2025-03-31T16:07:59.343018Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CompleteOperation 2025-03-31T16:07:59.343045Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CompleteOperation 2025-03-31T16:07:59.343321Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is DelayComplete 2025-03-31T16:07:59.343379Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CompleteOperation 2025-03-31T16:07:59.343422Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CompletedOperations 2025-03-31T16:07:59.343462Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CompletedOperations 2025-03-31T16:07:59.343497Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-03-31T16:07:59.343545Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CompletedOperations 2025-03-31T16:07:59.343581Z node 15 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715666] at 72075186224037889 has finished 2025-03-31T16:07:59.343630Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:07:59.343694Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-03-31T16:07:59.343743Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-03-31T16:07:59.343788Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-03-31T16:07:59.355160Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2025-03-31T16:07:59.355332Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:07:59.355452Z node 15 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715666] at 72075186224037888 on unit CompleteOperation 2025-03-31T16:07:59.355580Z node 15 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037888 at tablet 72075186224037888 send result to client [15:1089:2867], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:07:59.355691Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:07:59.355888Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3500} 2025-03-31T16:07:59.355934Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-31T16:07:59.355964Z node 15 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715666] at 72075186224037889 on unit CompleteOperation 2025-03-31T16:07:59.356004Z node 15 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037889 at tablet 72075186224037889 send result to client [15:1089:2867], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:07:59.356040Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:07:59.358192Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:594:2519], Recipient [15:667:2571]: NKikimrTxDataShard.TEvRead ReadId: 3 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715666 } LockTxId: 1011121314 ResultFormat: FORMAT_ARROW KeysSize: 1 2025-03-31T16:07:59.358448Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-31T16:07:59.358582Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2025-03-31T16:07:59.359819Z node 15 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-03-31T16:07:59.359916Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2025-03-31T16:07:59.359988Z node 15 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-31T16:07:59.360051Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-31T16:07:59.360105Z node 15 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2025-03-31T16:07:59.360175Z node 15 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-03-31T16:07:59.360205Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-31T16:07:59.360250Z node 15 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2025-03-31T16:07:59.360278Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2025-03-31T16:07:59.360521Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 3 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715666 } LockTxId: 1011121314 ResultFormat: FORMAT_ARROW } 2025-03-31T16:07:59.361030Z node 15 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 1011121314, counter# 18446744073709551615 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-31T16:07:59.361124Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/281474976715666 2025-03-31T16:07:59.361213Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:594:2519], 3} after executionsCount# 1 2025-03-31T16:07:59.361331Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:594:2519], 3} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-31T16:07:59.361627Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:594:2519], 3} finished in read 2025-03-31T16:07:59.361738Z node 15 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-03-31T16:07:59.361782Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2025-03-31T16:07:59.361810Z node 15 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2025-03-31T16:07:59.361840Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2025-03-31T16:07:59.361899Z node 15 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-03-31T16:07:59.361919Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-03-31T16:07:59.361953Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2025-03-31T16:07:59.362028Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-31T16:07:59.362250Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 >> test_sql_streaming.py::test[suites-GroupByHopPercentile-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopTimeExtractorUnusedColumns-default.txt] |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::ValidationQueryService |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunning [GOOD] >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataInsideFilter-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataNestedDeep-default.txt] >> TTicketParserTest::AuthenticationUnknown [GOOD] >> TTicketParserTest::Authorization >> TReplicaTest::HandshakeWithStaleGeneration >> TReplicaTest::HandshakeWithStaleGeneration [GOOD] >> TReplicaTest::IdempotencyUpdatesAliveSubscriber >> TPersQueueTest::ReadRuleServiceType [GOOD] >> TPersQueueTest::ReadRuleServiceTypeLimit >> KqpIndexLookupJoin::CheckCastUtf8ToString-StreamLookupJoin-NotNull >> TReplicaTest::IdempotencyUpdatesAliveSubscriber [GOOD] >> TReplicaTest::IdempotencyUpdatesVariant2 >> ColumnBuildTest::ValidDefaultValue >> TReplicaTest::IdempotencyUpdatesVariant2 [GOOD] |82.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |82.9%| [LD] {RESULT} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> DataShardReadIteratorBatchMode::RangeToNonInclusive [GOOD] >> DataShardReadIteratorBatchMode::MultipleRanges |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::IdempotencyUpdatesVariant2 [GOOD] Test command err: 2025-03-31T16:08:05.056713Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 2 }: sender# [1:7:2054] 2025-03-31T16:08:05.056791Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 2 2025-03-31T16:08:05.056931Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-03-31T16:08:05.056966Z node 1 :SCHEME_BOARD_REPLICA ERROR: [1:6:2053] Reject handshake from stale populator: sender# [1:7:2054], owner# 1, generation# 1, pending generation# 2 2025-03-31T16:08:05.352151Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:7:2054] 2025-03-31T16:08:05.352241Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 1 2025-03-31T16:08:05.352373Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [2:8:2055] 2025-03-31T16:08:05.352444Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2025-03-31T16:08:05.352606Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:8:2055], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-03-31T16:08:05.352816Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-03-31T16:08:05.352857Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-03-31T16:08:05.360798Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-03-31T16:08:05.361066Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 40 2025-03-31T16:08:05.361108Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-03-31T16:08:05.361141Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-03-31T16:08:05.361245Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-03-31T16:08:05.361287Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-03-31T16:08:05.361330Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-03-31T16:08:05.361411Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-03-31T16:08:05.361472Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2025-03-31T16:08:05.361541Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-03-31T16:08:05.361682Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 2] DomainOwnerId: 0 }: sender# [2:9:2056] 2025-03-31T16:08:05.361747Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:9:2056], path# [OwnerId: 1, LocalPathId: 2], domainOwnerId# 0, capabilities# 2025-03-31T16:08:05.655640Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:7:2054] 2025-03-31T16:08:05.655721Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 1, generation# 1 2025-03-31T16:08:05.655851Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-03-31T16:08:05.655892Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-03-31T16:08:05.655966Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-03-31T16:08:05.656062Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-03-31T16:08:05.656106Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2025-03-31T16:08:05.656156Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-03-31T16:08:05.656207Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-03-31T16:08:05.656267Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 40 2025-03-31T16:08:05.656296Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# true 2025-03-31T16:08:05.656328Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 2] 2025-03-31T16:08:05.656413Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-03-31T16:08:05.656447Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-03-31T16:08:05.656484Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-03-31T16:08:05.656545Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-03-31T16:08:05.656572Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2025-03-31T16:08:05.656600Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 2] >> KqpJoin::TwoJoinsWithQueryService |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbOlapStore::LogExistingUserId [GOOD] Test command err: 2025-03-31T16:04:36.994326Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005226060166354:2205];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:04:36.994461Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dea/r3tmp/tmpJLR5Tn/pdisk_1.dat 2025-03-31T16:04:38.213398Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:38.363367Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:38.411267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:38.411369Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:38.427487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18314, node 1 2025-03-31T16:04:38.765219Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:38.765244Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:38.765250Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:38.765365Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20357 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:04:39.628522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:20357 2025-03-31T16:04:40.131770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:04:40.341943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488005243240036546:2329];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:04:40.342147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488005243240036546:2329];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:04:40.342429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488005243240036546:2329];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:04:40.342594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488005243240036546:2329];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:04:40.342714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488005243240036546:2329];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:04:40.342825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488005243240036546:2329];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:04:40.342915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488005243240036546:2329];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:04:40.343021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488005243240036546:2329];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:04:40.343225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488005243240036546:2329];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:04:40.343386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488005243240036546:2329];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:04:40.343526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488005243240036546:2329];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:04:40.343645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488005243240036546:2329];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:04:40.440160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488005243240036540:2328];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:04:40.440224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488005243240036540:2328];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:04:40.440482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488005243240036540:2328];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:04:40.440601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488005243240036540:2328];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:04:40.440752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488005243240036540:2328];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:04:40.440873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488005243240036540:2328];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:04:40.440977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488005243240036540:2328];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:04:40.446244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488005243240036540:2328];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:04:40.446412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488005243240036540:2328];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:04:40.446534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488005243240036540:2328];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:04:40.446663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488005243240036540:2328];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:04:40.446794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488005243240036540:2328];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:04:40.531777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488005243240036536:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:04:40.531846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488005243240036536:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:04:40.532085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488005243240036536:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:04:40.532229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488005243240036536:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:04:40.532340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488005243240036536:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:04:40.532479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488005243240036536:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:04:40.532588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488005243240036536:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:04:40.532692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488005243240036536:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:04:40.532826Z no ... elivery in input channelId: 63, seqNo: [1] 2025-03-31T16:07:54.183755Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 64, seqNo: [1] 2025-03-31T16:07:54.183782Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 65. Tasks execution finished 2025-03-31T16:07:54.183818Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7488006070050806823:3620], TxId: 281474976715670, task: 65. Ctx: { TraceId : 01jqpfb4239ag4svmk2bex8p6a. SessionId : ydb://session/3?node_id=28&id=ZDBiOWY2MjktOTZmMGMzMTctOTE4MDA3NzMtMmNiZmE1ZTk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Compute state finished. All channels and sinks finished 2025-03-31T16:07:54.184052Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715670, task: 65. pass away 2025-03-31T16:07:54.184302Z node 28 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715670;task_id=65;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-03-31T16:07:54.185252Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7488006070050806747:3547] TxId: 281474976715670. Ctx: { TraceId: 01jqpfb4239ag4svmk2bex8p6a, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZDBiOWY2MjktOTZmMGMzMTctOTE4MDA3NzMtMmNiZmE1ZTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7488006070050806823:3620], task: 65, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 16782 Tasks { TaskId: 65 StageId: 1 CpuTimeUs: 841 FinishTimeMs: 1743437274182 InputRows: 1 InputBytes: 310 OutputRows: 1 OutputBytes: 310 ResultRows: 1 ResultBytes: 310 ComputeCpuTimeUs: 370 BuildCpuTimeUs: 471 HostName: "ghrun-tuhoqst4xe" NodeId: 28 CreateTimeMs: 1743437272172 } MaxMemoryUsage: 1048576 } 2025-03-31T16:07:54.185336Z node 28 :KQP_EXECUTER INFO: TxId: 281474976715670. Ctx: { TraceId: 01jqpfb4239ag4svmk2bex8p6a, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZDBiOWY2MjktOTZmMGMzMTctOTE4MDA3NzMtMmNiZmE1ZTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [28:7488006070050806823:3620] 2025-03-31T16:07:54.190250Z node 28 :KQP_EXECUTER INFO: ActorId: [28:7488006070050806747:3547] TxId: 281474976715670. Ctx: { TraceId: 01jqpfb4239ag4svmk2bex8p6a, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZDBiOWY2MjktOTZmMGMzMTctOTE4MDA3NzMtMmNiZmE1ZTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Full stats: { CpuTimeUs: 1160364 DurationUs: 2179350 Tables { TablePath: "/Root/OlapStore/log1" ReadRows: 50 ReadBytes: 16000 } ExecuterCpuTimeUs: 618579 StartTimeMs: 1743437272006 FinishTimeMs: 1743437274185 Stages { StageGuid: "ac1bd63e-cde47766-3f9022ba-2c21ef97" Program: "(\n(return (lambda \'() (block \'(\n (let $1 (KqpTable \'\"/Root/OlapStore/log1\" \'\"72057594046644480:3\" \'\"\" \'1))\n (let $2 \'(\'\"json_payload\" \'\"level\" \'\"message\" \'\"resource_id\" \'\"resource_type\" \'\"timestamp\" \'\"uid\"))\n (let $3 (KqpWideReadOlapTableRanges $1 (Void) $2 \'() \'() (lambda \'($6) $6)))\n (let $4 (Bool \'false))\n (let $5 \'(\'(\'5 $4) \'(\'4 $4) \'(\'3 $4) \'(\'6 $4)))\n (return (FromFlow (WideTopSort (WideFilter $3 (lambda \'($7 $8 $9 $10 $11 $12 $13) (block \'(\n (let $14 (DataType \'Utf8))\n (let $15 (OptionalType $14))\n (let $16 \'((ResourceType \'\"JsonPath\")))\n (let $17 (DictType $14 (ResourceType \'\"JsonNode\")))\n (let $18 (CallableType \'() \'((VariantType (TupleType (TupleType (DataType \'Uint8) (DataType \'String)) $15))) \'((OptionalType (DataType \'JsonDocument))) $16 \'($17)))\n (let $19 (Udf \'\"Json2.JsonDocumentSqlValueConvertToUtf8\" (Void) (VoidType) \'\"\" $18 (VoidType) \'\"\" \'(\'(\'\"strict\"))))\n (let $20 (CallableType \'() $16 \'($14)))\n (let $21 (Udf \'\"Json2.CompilePath\" (Void) (VoidType) \'\"\" $20 (VoidType) \'\"\" \'()))\n (let $22 (Apply $19 $7 (Apply $21 (Utf8 \'\"$.auth.user.id\")) (Dict $17)))\n (let $23 (Visit $22 \'0 (lambda \'($24) (Nothing $15)) \'1 (lambda \'($25) $25)))\n (return (Coalesce (== $23 (String \'\"1000042\")) $4))\n )))) (Uint64 \'50) $5)))\n))))\n)\n" ComputeActors { CpuTimeUs: 7382 Tasks { TaskId: 55 CpuTimeUs: 3044 FinishTimeMs: 1743437274120 Tables { TablePath: "/Root/OlapStore/log1" } ComputeCpuTimeUs: 211 BuildCpuTimeUs: 2833 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-tuhoqst4xe" NodeId: 28 CreateTimeMs: 1743437272136 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743437273992 } Stages { StageId: 1 StageGuid: "1fb22a6b-a8403598-db69f132-d8582db7" Program: "(\n(return (lambda \'($1) (FromFlow (NarrowMap (Take (ToFlow $1) (Uint64 \'50)) (lambda \'($2 $3 $4 $5 $6 $7 $8) (AsStruct \'(\'\"json_payload\" $2) \'(\'\"level\" $3) \'(\'\"message\" $4) \'(\'\"resource_id\" $5) \'(\'\"resource_type\" $6) \'(\'\"timestamp\" $7) \'(\'\"uid\" $8)))))))\n)\n" ComputeActors { CpuTimeUs: 16782 Tasks { TaskId: 65 StageId: 1 CpuTimeUs: 841 FinishTimeMs: 1743437274182 InputRows: 1 InputBytes: 310 OutputRows: 1 OutputBytes: 310 ResultRows: 1 ResultBytes: 310 ComputeCpuTimeUs: 370 BuildCpuTimeUs: 471 HostName: "ghrun-tuhoqst4xe" NodeId: 28 CreateTimeMs: 1743437272172 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743437273992 } TxPlansWithStats: "{\"Node Type\":\"Phase\",\"PlanNodeId\":5,\"Plans\":[{\"Node Type\":\"ResultSet\",\"PlanNodeId\":4,\"PlanNodeType\":\"ResultSet\",\"Plans\":[{\"Node Type\":\"Limit\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":2}],\"Limit\":\"50\",\"Name\":\"Limit\"}],\"PlanNodeId\":3,\"Plans\":[{\"Node Type\":\"Merge\",\"PlanNodeId\":2,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"TopSort-Filter-TableFullScan\",\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Limit\":\"50\",\"Name\":\"TopSort\",\"TopSortBy\":\"[row.timestamp,row.resource_type,row.resource_id,row.uid]\"},{\"E-Cost\":\"No estimate\",\"E-Rows\":\"No estimate\",\"E-Size\":\"No estimate\",\"Inputs\":[{\"InternalOperatorId\":2}],\"Name\":\"Filter\",\"Predicate\":\"Visit == \\\"1000042\\\"\"},{\"E-Cost\":\"No estimate\",\"E-Rows\":\"No estimate\",\"E-Size\":\"No estimate\",\"Inputs\":[],\"Name\":\"TableFullScan\",\"Path\":\"\\/Root\\/OlapStore\\/log1\",\"ReadColumns\":[\"json_payload\",\"level\",\"message\",\"resource_id\",\"resource_type\",\"timestamp\",\"uid\"],\"ReadRanges\":[\"timestamp (-∞, +∞)\",\"resource_type (-∞, +∞)\",\"resource_id (-∞, +∞)\",\"uid (-∞, +∞)\"],\"Scan\":\"Parallel\",\"SsaProgram\":{\"Command\":[{\"Projection\":{\"Columns\":[{\"Id\":2},{\"Id\":7},{\"Id\":1},{\"Id\":3},{\"Id\":6},{\"Id\":5},{\"Id\":4}]}}],\"Version\":5},\"Table\":\"OlapStore\\/log1\"}],\"PlanNodeId\":1,\"StageGuid\":\"ac1bd63e-cde47766-3f9022ba-2c21ef97\",\"Stats\":{\"BaseTimeMs\":1743437273992,\"ComputeNodes\":[{\"CpuTimeUs\":7382,\"Tasks\":[{\"ComputeTimeUs\":211,\"FinishTimeMs\":1743437274120,\"Host\":\"ghrun-tuhoqst4xe\",\"NodeId\":28,\"TaskId\":55}]}],\"FinishedTasks\":0,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"},\"Tables\":[\"OlapStore\\/log1\"]}],\"SortColumns\":[\"timestamp (Desc)\",\"resource_type (Desc)\",\"resource_id (Desc)\",\"uid (Desc)\"],\"StageGuid\":\"\"}],\"StageGuid\":\"1fb22a6b-a8403598-db69f132-d8582db7\",\"Stats\":{\"BaseTimeMs\":1743437273992,\"ComputeNodes\":[{\"CpuTimeUs\":16782,\"Tasks\":[{\"ComputeTimeUs\":370,\"FinishTimeMs\":1743437274182,\"Host\":\"ghrun-tuhoqst4xe\",\"InputBytes\":310,\"InputRows\":1,\"NodeId\":28,\"OutputBytes\":310,\"OutputRows\":1,\"ResultBytes\":310,\"ResultRows\":1,\"TaskId\":65}]}],\"FinishedTasks\":0,\"PhysicalStageId\":1,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"\"}" StatConvertBytes: 2136 Extra { type_url: "type.googleapis.com/NKqpProto.TKqpExecutionExtraStats" value: "\010\002\022\r\010\256#\020\321\213\004\030\331\210! A" } } 2025-03-31T16:07:54.190330Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7488006070050806747:3547] TxId: 281474976715670. Ctx: { TraceId: 01jqpfb4239ag4svmk2bex8p6a, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZDBiOWY2MjktOTZmMGMzMTctOTE4MDA3NzMtMmNiZmE1ZTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-31T16:07:54.190421Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7488006070050806747:3547] TxId: 281474976715670. Ctx: { TraceId: 01jqpfb4239ag4svmk2bex8p6a, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZDBiOWY2MjktOTZmMGMzMTctOTE4MDA3NzMtMmNiZmE1ZTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.126688s ReadRows: 30 ReadBytes: 9600 ru: 84 rate limiter was not found force flag: 1 2025-03-31T16:07:54.190568Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=ZDBiOWY2MjktOTZmMGMzMTctOTE4MDA3NzMtMmNiZmE1ZTk=, ActorId: [28:7488006065755839404:3547], ActorState: ExecuteState, TraceId: 01jqpfb4239ag4svmk2bex8p6a, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-03-31T16:07:54.191153Z node 28 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=28&id=ZDBiOWY2MjktOTZmMGMzMTctOTE4MDA3NzMtMmNiZmE1ZTk=, ActorId: [28:7488006065755839404:3547], ActorState: ExecuteState, TraceId: 01jqpfb4239ag4svmk2bex8p6a, txInfo Status: Active Kind: ReadOnly TotalDuration: 0 ServerDuration: 2191.485 QueriesCount: 1 2025-03-31T16:07:54.191257Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=ZDBiOWY2MjktOTZmMGMzMTctOTE4MDA3NzMtMmNiZmE1ZTk=, ActorId: [28:7488006065755839404:3547], ActorState: ExecuteState, TraceId: 01jqpfb4239ag4svmk2bex8p6a, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-03-31T16:07:54.191398Z node 28 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=28&id=ZDBiOWY2MjktOTZmMGMzMTctOTE4MDA3NzMtMmNiZmE1ZTk=, ActorId: [28:7488006065755839404:3547], ActorState: ExecuteState, TraceId: 01jqpfb4239ag4svmk2bex8p6a, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-31T16:07:54.191459Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=ZDBiOWY2MjktOTZmMGMzMTctOTE4MDA3NzMtMmNiZmE1ZTk=, ActorId: [28:7488006065755839404:3547], ActorState: ExecuteState, TraceId: 01jqpfb4239ag4svmk2bex8p6a, EndCleanup, isFinal: 1 2025-03-31T16:07:54.191554Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=ZDBiOWY2MjktOTZmMGMzMTctOTE4MDA3NzMtMmNiZmE1ZTk=, ActorId: [28:7488006065755839404:3547], ActorState: ExecuteState, TraceId: 01jqpfb4239ag4svmk2bex8p6a, Sent query response back to proxy, proxyRequestId: 5, proxyId: [28:7488005966971587094:2280] 2025-03-31T16:07:54.191609Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=ZDBiOWY2MjktOTZmMGMzMTctOTE4MDA3NzMtMmNiZmE1ZTk=, ActorId: [28:7488006065755839404:3547], ActorState: unknown state, TraceId: 01jqpfb4239ag4svmk2bex8p6a, Cleanup temp tables: 0 2025-03-31T16:07:54.196038Z node 28 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437272003, txId: 18446744073709551615] shutting down 2025-03-31T16:07:54.196229Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=ZDBiOWY2MjktOTZmMGMzMTctOTE4MDA3NzMtMmNiZmE1ZTk=, ActorId: [28:7488006065755839404:3547], ActorState: unknown state, TraceId: 01jqpfb4239ag4svmk2bex8p6a, Session actor destroyed RESULT: [[42000u;"nginx";"resource_6";"19";[2];["message"];["{\"auth\":{\"org_id\":7704,\"service\":{\"internal\":\"false\",\"ip\":\"258.258.258.258\"},\"type\":\"token\",\"user\":{\"id\":1000042,\"ip\":\"257.257.257.257\",\"is_cloud\":\"false\"}}}"]]] --------------------- STATS: total CPU: 1201 duration: 2179350 usec cpu: 1160364 usec { name: "/Root/OlapStore/log1" reads { rows: 50 bytes: 16000 } } >> test_sql_streaming.py::test[suites-ReadWriteTopicWithSchema-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-WriteTwoTopics-default.txt] >> KqpPg::TableDeleteWhere+useSink [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_sending_duplicates >> KqpPg::TableDeleteWhere-useSink >> ttl_unavailable_s3.py::TestUnavailableS3::test >> Secret::DeactivatedQueryService |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change >> TTicketParserTest::Authorization [GOOD] >> TTicketParserTest::AuthorizationModify >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] [GOOD] |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> KqpIndexLookupJoin::InnerJoinLeftFilter+StreamLookup |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |83.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |83.0%| [LD] {RESULT} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |83.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/py3test >> data_correctness.py::TestDataCorrectness::test >> TPersQueueTest::TopicServiceReadBudget [GOOD] >> TPersQueueTest::TopicServiceSimpleHappyWrites >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_auth_header |83.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/config/ut/ydb-services-config-ut |83.0%| [LD] {RESULT} $(B)/ydb/services/config/ut/ydb-services-config-ut >> unstable_connection.py::TestUnstableConnection::test |83.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/config/ut/ydb-services-config-ut >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-fifo] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError >> TTicketParserTest::LoginRefreshGroupsGood [GOOD] >> TTicketParserTest::LoginCheckRemovedUser >> TTicketParserTest::NebiusAuthenticationRetryError [GOOD] >> TTicketParserTest::NebiusAuthenticationRetryErrorImmediately |83.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |83.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |83.0%| [LD] {RESULT} $(B)/ydb/tests/olap/load/ydb-tests-olap-load >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v0] >> TTicketParserTest::AuthorizationModify [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-fifo] >> DataShardReadIteratorBatchMode::MultipleRanges [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::AuthorizationModify [GOOD] Test command err: 2025-03-31T16:07:48.829096Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488006051675560608:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:48.829672Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d42/r3tmp/tmpaHiITY/pdisk_1.dat 2025-03-31T16:07:49.871959Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:49.873736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:49.873850Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:49.888171Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:07:49.888448Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 27345, node 1 2025-03-31T16:07:50.167743Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:50.167777Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:50.167785Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:50.167904Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26518 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:50.914719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:50.950033Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-31T16:07:50.950084Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:07:50.950094Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:07:50.950501Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-03-31T16:07:50.950583Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Connect to grpc://localhost:14968 2025-03-31T16:07:50.954605Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2025-03-31T16:07:50.983927Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Response AuthenticateResponse { subject { user_account { id: "user1" } } } 2025-03-31T16:07:50.984341Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2025-03-31T16:07:50.999131Z node 1 :GRPC_CLIENT DEBUG: [517000010788] Connect to grpc://localhost:1709 2025-03-31T16:07:51.000275Z node 1 :GRPC_CLIENT DEBUG: [517000010788] Request GetUserAccountRequest { user_account_id: "user1" } 2025-03-31T16:07:51.011289Z node 1 :GRPC_CLIENT DEBUG: [517000010788] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2025-03-31T16:07:51.011907Z node 1 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d42/r3tmp/tmpyQJCBk/pdisk_1.dat 2025-03-31T16:07:55.206436Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:07:55.231457Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:55.259187Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:55.259269Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:55.260691Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28921, node 2 2025-03-31T16:07:55.404357Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:55.404395Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:55.404418Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:55.404536Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14985 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:55.732650Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:55.739802Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:07:55.744730Z node 2 :TICKET_PARSER ERROR: Ticket **** (8E120919): Token is not supported 2025-03-31T16:07:59.858645Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488006098202395434:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:59.858690Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d42/r3tmp/tmpXhpRSM/pdisk_1.dat 2025-03-31T16:08:00.039556Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:08:00.049381Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:08:00.049471Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:08:00.051329Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10309, node 3 2025-03-31T16:08:00.118537Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:08:00.118567Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:08:00.118574Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:08:00.118706Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4417 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:08:00.377367Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:08:00.392246Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:08:00.399257Z node 3 :TICKET_PARSER ERROR: Ticket **** (8E120919): Unknown token 2025-03-31T16:08:04.320858Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488006120096826288:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:08:04.320948Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d42/r3tmp/tmpBuN8NB/pdisk_1.dat 2025-03-31T16:08:04.526672Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:08:04.557095Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:08:04.557190Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:08:04.559596Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15080, node 4 2025-03-31T16:08:04.695976Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:08:04.695997Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:08:04.696005Z node 4 :NET_CLASSIFIER WARN: failed to initi ... Denied" retryable:0 2025-03-31T16:08:05.143001Z node 4 :TICKET_PARSER DEBUG: Ticket **** (E2D1584C) () has now permanent error message 'Access Denied' 2025-03-31T16:08:05.143685Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-31T16:08:05.143716Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:08:05.143727Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:08:05.143754Z node 4 :TICKET_PARSER TRACE: Ticket **** (BE2EA0D0) asking for AccessServiceAuthorization(something.read) 2025-03-31T16:08:05.143939Z node 4 :GRPC_CLIENT DEBUG: [5170000a0a08] Request AuthorizeRequest { iam_token: "**** (BE2EA0D0)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-31T16:08:05.146345Z node 4 :GRPC_CLIENT DEBUG: [5170000a0a08] Status 16 Access Denied 2025-03-31T16:08:05.146678Z node 4 :TICKET_PARSER TRACE: Ticket **** (BE2EA0D0) permission something.read now has a permanent error "Access Denied" retryable:0 2025-03-31T16:08:05.146707Z node 4 :TICKET_PARSER DEBUG: Ticket **** (BE2EA0D0) () has now permanent error message 'Access Denied' 2025-03-31T16:08:05.147353Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-31T16:08:05.147381Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:08:05.147392Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:08:05.147431Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-03-31T16:08:05.147633Z node 4 :GRPC_CLIENT DEBUG: [5170000a0a08] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "XXXXXXXX" type: "ydb.database" } resource_path { id: "XXXXXXXX" type: "resource-manager.folder" } } 2025-03-31T16:08:05.149920Z node 4 :GRPC_CLIENT DEBUG: [5170000a0a08] Status 16 Access Denied 2025-03-31T16:08:05.150278Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a permanent error "Access Denied" retryable:0 2025-03-31T16:08:05.150329Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now permanent error message 'Access Denied' 2025-03-31T16:08:05.151038Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-31T16:08:05.151064Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:08:05.151076Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:08:05.151122Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-03-31T16:08:05.151318Z node 4 :GRPC_CLIENT DEBUG: [5170000a0a08] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "XXXXXXXX" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-31T16:08:05.153122Z node 4 :GRPC_CLIENT DEBUG: [5170000a0a08] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-03-31T16:08:05.154107Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-03-31T16:08:05.154203Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-03-31T16:08:05.154760Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-31T16:08:05.154790Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:08:05.154801Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:08:05.154834Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-03-31T16:08:05.155034Z node 4 :GRPC_CLIENT DEBUG: [5170000a0a08] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "XXXXXXXX" type: "resource-manager.folder" } } 2025-03-31T16:08:05.160359Z node 4 :GRPC_CLIENT DEBUG: [5170000a0a08] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-03-31T16:08:05.161044Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-03-31T16:08:05.161145Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-03-31T16:08:05.161954Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-31T16:08:05.161979Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:08:05.161990Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:08:05.162016Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(monitoring.view) 2025-03-31T16:08:05.162204Z node 4 :GRPC_CLIENT DEBUG: [5170000a0a08] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "monitoring.view" resource_path { id: "gizmo" type: "iam.gizmo" } } 2025-03-31T16:08:05.166799Z node 4 :GRPC_CLIENT DEBUG: [5170000a0a08] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-03-31T16:08:05.167520Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission monitoring.view now has a valid subject "user1@as" 2025-03-31T16:08:05.167624Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-03-31T16:08:09.406639Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7488006141889912543:2082];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:08:09.421303Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d42/r3tmp/tmp2TJnm9/pdisk_1.dat 2025-03-31T16:08:09.779596Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:08:09.804829Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:08:09.804960Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:08:09.806578Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18959, node 5 2025-03-31T16:08:09.913364Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:08:09.913392Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:08:09.913402Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:08:09.913554Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7645 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:08:10.425484Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:08:10.447579Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:08:10.455156Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-31T16:08:10.455196Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:08:10.455210Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:08:10.455249Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-03-31T16:08:10.455337Z node 5 :GRPC_CLIENT DEBUG: [5170000bd108] Connect to grpc://localhost:9070 2025-03-31T16:08:10.456247Z node 5 :GRPC_CLIENT DEBUG: [5170000bd108] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-31T16:08:10.471380Z node 5 :GRPC_CLIENT DEBUG: [5170000bd108] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-03-31T16:08:10.471744Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-03-31T16:08:10.471854Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-03-31T16:08:10.477474Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-31T16:08:10.477520Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:08:10.477532Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:08:10.477577Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-03-31T16:08:10.477624Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2025-03-31T16:08:10.477808Z node 5 :GRPC_CLIENT DEBUG: [5170000bd108] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-31T16:08:10.478489Z node 5 :GRPC_CLIENT DEBUG: [5170000bd108] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-31T16:08:10.492066Z node 5 :GRPC_CLIENT DEBUG: [5170000bd108] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-03-31T16:08:10.492270Z node 5 :GRPC_CLIENT DEBUG: [5170000bd108] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-03-31T16:08:10.494210Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-03-31T16:08:10.494283Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a valid subject "user1@as" 2025-03-31T16:08:10.494386Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as |83.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |83.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |83.0%| [LD] {RESULT} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq >> KqpJoin::TwoJoinsWithQueryService [GOOD] >> KqpIndexLookupJoin::CheckCastUtf8ToString-StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUtf8ToString-StreamLookupJoin+NotNull >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v0] >> ColumnBuildTest::ValidDefaultValue [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::TwoJoinsWithQueryService [GOOD] Test command err: Trying to start YDB, gRPC: 2755, MsgBus: 62718 2025-03-31T16:08:07.021472Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488006130671987189:2136];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:08:07.025386Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00199b/r3tmp/tmpm4tUyc/pdisk_1.dat 2025-03-31T16:08:07.995840Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:08:08.072632Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:08:08.072725Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:08:08.073717Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:08:08.076108Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2755, node 1 2025-03-31T16:08:08.367541Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:08:08.367571Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:08:08.367581Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:08:08.367675Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62718 TClient is connected to server localhost:62718 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:08:09.397976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:08:09.416369Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:08:12.023485Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488006130671987189:2136];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:08:12.023565Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:08:13.242161Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488006156441791569:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:13.242236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:13.826107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:08:14.071467Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488006160736758973:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:14.071548Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:14.090137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:08:14.186079Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488006160736759057:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:14.186160Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:14.201293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:08:14.284812Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488006160736759135:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:14.284920Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:14.285247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488006160736759141:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:14.289745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-03-31T16:08:14.304193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488006160736759143:2377], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-31T16:08:14.381931Z node 1 :TX_PROXY ERROR: Actor# [1:7488006160736759195:2509] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIteratorBatchMode::MultipleRanges [GOOD] Test command err: 2025-03-31T16:05:31.912777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:05:31.913043Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:05:31.913217Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000c99/r3tmp/tmpAO8xPU/pdisk_1.dat 2025-03-31T16:05:32.318292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:05:32.371163Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:32.417352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:05:32.417503Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:05:32.429229Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:05:32.525948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:05:32.589654Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:05:32.591011Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:05:32.592328Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:05:32.592672Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:05:32.641741Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:05:32.642647Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:05:32.642792Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:05:32.644947Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:05:32.645060Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:05:32.645153Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:05:32.645627Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:05:32.645810Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:05:32.646004Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:05:32.658378Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:05:32.684417Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:05:32.684650Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:05:32.684839Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:05:32.684884Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:05:32.684928Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:05:32.684986Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:05:32.685263Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:05:32.685321Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:05:32.685633Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:05:32.685750Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:05:32.685878Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:05:32.685948Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:05:32.686009Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T16:05:32.686056Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:05:32.686091Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:05:32.686127Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:05:32.686182Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:05:32.686304Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:05:32.686342Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:05:32.686410Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:05:32.686513Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-31T16:05:32.686560Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:05:32.686679Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:05:32.686946Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T16:05:32.687024Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:05:32.687137Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:05:32.687221Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:05:32.687291Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T16:05:32.687334Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T16:05:32.687371Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:05:32.687691Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T16:05:32.687731Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T16:05:32.687770Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T16:05:32.687809Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:05:32.687876Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T16:05:32.687917Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T16:05:32.687963Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T16:05:32.687999Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:05:32.688027Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T16:05:32.689972Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T16:05:32.690034Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:05:32.701478Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:05:32.701570Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:05:32.701610Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:05:32.701679Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-31T16:05:32.701767Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:05:32.858977Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:05:32.859075Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:05:32.859146Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T16:05:32.859617Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-31T16:05:32.859660Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:05:32.859804Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:05:32.859857Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-31T16:05:32.859922Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-31T16:05:32.859972Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-31T16:05:32.872500Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:05:32.872600Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:05:32.873251Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:05:32.873287Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:05:32.873336Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:05:3 ... ssTransaction::Execute at 72075186224037889 2025-03-31T16:08:15.055826Z node 16 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:08:15.055871Z node 16 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-03-31T16:08:15.055914Z node 16 :TX_DATASHARD DEBUG: Found ready operation [3000:281474976715664] in PlanQueue unit at 72075186224037889 2025-03-31T16:08:15.055952Z node 16 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit PlanQueue 2025-03-31T16:08:15.055992Z node 16 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-03-31T16:08:15.056031Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit PlanQueue 2025-03-31T16:08:15.056071Z node 16 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit LoadTxDetails 2025-03-31T16:08:15.056119Z node 16 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit LoadTxDetails 2025-03-31T16:08:15.056268Z node 16 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3000:281474976715664 keys extracted: 0 2025-03-31T16:08:15.056321Z node 16 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-03-31T16:08:15.056354Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit LoadTxDetails 2025-03-31T16:08:15.056402Z node 16 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-03-31T16:08:15.056437Z node 16 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit BuildAndWaitDependencies 2025-03-31T16:08:15.056490Z node 16 :TX_DATASHARD TRACE: Operation [3000:281474976715664] is the new logically complete end at 72075186224037889 2025-03-31T16:08:15.056539Z node 16 :TX_DATASHARD TRACE: Operation [3000:281474976715664] is the new logically incomplete end at 72075186224037889 2025-03-31T16:08:15.056594Z node 16 :TX_DATASHARD TRACE: Activated operation [3000:281474976715664] at 72075186224037889 2025-03-31T16:08:15.056653Z node 16 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-03-31T16:08:15.056679Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-03-31T16:08:15.056706Z node 16 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CreateVolatileSnapshot 2025-03-31T16:08:15.056735Z node 16 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CreateVolatileSnapshot 2025-03-31T16:08:15.056862Z node 16 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is ExecutedNoMoreRestarts 2025-03-31T16:08:15.056896Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CreateVolatileSnapshot 2025-03-31T16:08:15.056948Z node 16 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit DropVolatileSnapshot 2025-03-31T16:08:15.056998Z node 16 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit DropVolatileSnapshot 2025-03-31T16:08:15.057027Z node 16 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-03-31T16:08:15.057053Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit DropVolatileSnapshot 2025-03-31T16:08:15.057080Z node 16 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CompleteOperation 2025-03-31T16:08:15.057111Z node 16 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CompleteOperation 2025-03-31T16:08:15.057290Z node 16 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is DelayComplete 2025-03-31T16:08:15.057329Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CompleteOperation 2025-03-31T16:08:15.057376Z node 16 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CompletedOperations 2025-03-31T16:08:15.057421Z node 16 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CompletedOperations 2025-03-31T16:08:15.057460Z node 16 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-03-31T16:08:15.057487Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CompletedOperations 2025-03-31T16:08:15.057518Z node 16 :TX_DATASHARD TRACE: Execution plan for [3000:281474976715664] at 72075186224037889 has finished 2025-03-31T16:08:15.057560Z node 16 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:08:15.057606Z node 16 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-03-31T16:08:15.057647Z node 16 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-03-31T16:08:15.057692Z node 16 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-03-31T16:08:15.082142Z node 16 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3000} 2025-03-31T16:08:15.082324Z node 16 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-31T16:08:15.082409Z node 16 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715664] at 72075186224037889 on unit CompleteOperation 2025-03-31T16:08:15.082519Z node 16 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715664] from 72075186224037889 at tablet 72075186224037889 send result to client [16:1047:2835], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:08:15.082648Z node 16 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:08:15.083126Z node 16 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3000} 2025-03-31T16:08:15.083175Z node 16 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:08:15.083202Z node 16 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715664] at 72075186224037888 on unit CompleteOperation 2025-03-31T16:08:15.083245Z node 16 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715664] from 72075186224037888 at tablet 72075186224037888 send result to client [16:1047:2835], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:08:15.083284Z node 16 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:08:15.084946Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [16:593:2518], Recipient [16:666:2570]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 281474976715664 } ResultFormat: FORMAT_ARROW Hints: 1 RangesSize: 3 2025-03-31T16:08:15.085164Z node 16 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-31T16:08:15.085285Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2025-03-31T16:08:15.085501Z node 16 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-03-31T16:08:15.085566Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2025-03-31T16:08:15.085635Z node 16 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-31T16:08:15.085697Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-31T16:08:15.085742Z node 16 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037888 2025-03-31T16:08:15.085801Z node 16 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-03-31T16:08:15.085830Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-31T16:08:15.085855Z node 16 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2025-03-31T16:08:15.085886Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2025-03-31T16:08:15.086051Z node 16 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 281474976715664 } ResultFormat: FORMAT_ARROW Hints: 1 } 2025-03-31T16:08:15.086138Z node 16 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3000/281474976715664 2025-03-31T16:08:15.086487Z node 16 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-03-31T16:08:15.086513Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2025-03-31T16:08:15.086535Z node 16 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2025-03-31T16:08:15.086559Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2025-03-31T16:08:15.086608Z node 16 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-03-31T16:08:15.086631Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2025-03-31T16:08:15.086671Z node 16 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037888 has finished 2025-03-31T16:08:15.086734Z node 16 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-31T16:08:15.086895Z node 16 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-03-31T16:08:15.088356Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269553236, Sender [16:1067:2853], Recipient [16:666:2570]: NKikimr::TEvDataShard::TEvReadScanStarted 2025-03-31T16:08:15.088562Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269553237, Sender [16:1067:2853], Recipient [16:666:2570]: NKikimr::TEvDataShard::TEvReadScanFinished 2025-03-31T16:08:15.088788Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [16:666:2570], Recipient [16:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:08:15.088835Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:08:15.088944Z node 16 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:08:15.089016Z node 16 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:08:15.089083Z node 16 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-03-31T16:08:15.089154Z node 16 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:08:15.089209Z node 16 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:08:15.089678Z node 16 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:08:15.089778Z node 16 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::ValidDefaultValue [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:08:08.991682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:08:08.991948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:08:08.992020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:08:08.992066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:08:08.998299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:08:08.998385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:08:08.998517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:08:08.998610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:08:09.006707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:08:09.215839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:08:09.215909Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:08:09.256526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:08:09.256660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:08:09.256837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:08:09.310458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:08:09.310955Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:08:09.355281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:08:09.367370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:08:09.421724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:08:09.495905Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:08:09.496015Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:08:09.496267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:08:09.496347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:08:09.496422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:08:09.496592Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:08:09.521897Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:08:09.771307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:08:09.790410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:08:09.805225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:08:09.815291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:08:09.815415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:08:09.828424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:08:09.828574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:08:09.828796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:08:09.828883Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:08:09.828924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:08:09.828957Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:08:09.831017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:08:09.831109Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:08:09.831167Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:08:09.840030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:08:09.840115Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:08:09.840204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:08:09.840269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:08:09.858218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:08:09.867768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:08:09.879228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:08:09.880618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:08:09.880781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:08:09.880851Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:08:09.891511Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:08:09.891610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:08:09.900007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:08:09.900188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:08:09.922137Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:08:09.922223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:08:09.922428Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:08:09.922480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:08:09.922781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:08:09.922916Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:08:09.923056Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:08:09.923116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:08:09.923169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:08:09.923201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:08:09.923237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:08:09.923277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:08:09.923315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:08:09.923357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:08:09.923431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:08:09.923467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:08:09.923501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:08:09.925755Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:08:09.925882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:08:09.925930Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1141:3012], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }}, record: Status: StatusAccepted TxId: 281474976725761 SchemeshardId: 72075186233409549 PathId: 2 2025-03-31T16:08:17.589415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976725761:0, at schemeshard: 72075186233409549 2025-03-31T16:08:17.589461Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDropLock TPropose opId# 281474976725761:0 ProgressState 2025-03-31T16:08:17.589512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976725761 ready parts: 1/1 2025-03-31T16:08:17.589606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72075186233409550 message:Transaction { AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 281474976725761 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72075186233409550 2025-03-31T16:08:17.601732Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-03-31T16:08:17.601856Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1141:3012], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-03-31T16:08:17.602099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976725761:4294967295 from tablet: 72075186233409549 to tablet: 72075186233409550 cookie: 0:281474976725761 msg type: 269090816 2025-03-31T16:08:17.602210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976725761, partId: 4294967295, tablet: 72075186233409550 2025-03-31T16:08:17.602390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976725761, at schemeshard: 72075186233409549 2025-03-31T16:08:17.602420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 0/1, is published: true 2025-03-31T16:08:17.602454Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976725761, at schemeshard: 72075186233409549 2025-03-31T16:08:17.624473Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [1:1812:3675], Recipient [1:747:2636]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186233409549 ClientId: [1:1812:3675] ServerId: [1:1814:3677] } 2025-03-31T16:08:17.624563Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-03-31T16:08:17.698824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 650, transactions count in step: 1, at schemeshard: 72075186233409549 2025-03-31T16:08:17.698948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976725761 AckTo { RawX1: 0 RawX2: 0 } } Step: 650 MediatorID: 72075186233409551 TabletID: 72075186233409549, at schemeshard: 72075186233409549 2025-03-31T16:08:17.699004Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDropLock TPropose opId# 281474976725761:0 HandleReply TEvOperationPlan: step# 650 2025-03-31T16:08:17.699045Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976725761:0 128 -> 240 2025-03-31T16:08:17.708368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976725761:0, at schemeshard: 72075186233409549 2025-03-31T16:08:17.708462Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDone opId# 281474976725761:0 ProgressState 2025-03-31T16:08:17.708662Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976725761:0 progress is 1/1 2025-03-31T16:08:17.708705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-03-31T16:08:17.708738Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976725761:0 progress is 1/1 2025-03-31T16:08:17.708763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-03-31T16:08:17.708816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 1/1, is published: true 2025-03-31T16:08:17.708881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:561:2499] message: TxId: 281474976725761 2025-03-31T16:08:17.708916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-03-31T16:08:17.708942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976725761:0 2025-03-31T16:08:17.708967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976725761:0 2025-03-31T16:08:17.709044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 3 2025-03-31T16:08:17.717847Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976725761 2025-03-31T16:08:17.717937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976725761 2025-03-31T16:08:17.718008Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976725761, buildInfoId: 106 2025-03-31T16:08:17.718083Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976725761, buildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1141:3012], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-03-31T16:08:17.728206Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-03-31T16:08:17.728328Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1141:3012], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-03-31T16:08:17.728410Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-03-31T16:08:17.734731Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-03-31T16:08:17.734864Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1141:3012], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-03-31T16:08:17.734906Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 106, subscribers count# 1 2025-03-31T16:08:17.735102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-03-31T16:08:17.735158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:1159:3030] TestWaitNotification: OK eventTxId 106 2025-03-31T16:08:17.749232Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 106 2025-03-31T16:08:17.749590Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 106 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "ColumnValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 1111 } } } } } Progress: 100 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 106 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "ColumnValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 1111 } } } } } Progress: 100 } >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-fifo] >> TTicketParserTest::NebiusAuthenticationRetryErrorImmediately [GOOD] >> TTicketParserTest::NebiusAccessKeySignatureUnsupported >> KqpIndexLookupJoin::InnerJoinLeftFilter+StreamLookup [GOOD] >> KqpIndexLookupJoin::InnerJoinLeftFilter-StreamLookup >> TPersQueueTest::PreferredCluster_TwoEnabledClustersAndWriteSessionsWithDifferentPreferredCluster_SessionWithMismatchedClusterDiesAndOthersAlive [GOOD] >> TPersQueueTest::PreferredCluster_DisabledRemoteClusterAndWriteSessionsWithDifferentPreferredClusterAndLaterRemoteClusterEnabled_SessionWithMismatchedClusterDiesAfterPreferredClusterEnabledAndOtherSessionsAlive |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |83.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |83.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |83.1%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut >> KqpJoinOrder::ShuffleEliminationDifferentJoinPredicateKeyTypeCorrectness1 [GOOD] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataNestedDeep-default.txt] [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v0] |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataNestedDeep-default.txt] [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::ShuffleEliminationDifferentJoinPredicateKeyTypeCorrectness1 [GOOD] Test command err: Trying to start YDB, gRPC: 26994, MsgBus: 14090 2025-03-31T16:07:15.698929Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005909252159183:2132];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:15.699006Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019ae/r3tmp/tmpCPSZX9/pdisk_1.dat 2025-03-31T16:07:16.768731Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:16.858708Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:16.858809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:16.860311Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:07:16.883168Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26994, node 1 2025-03-31T16:07:17.503591Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:17.503611Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:17.503622Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:17.503709Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14090 TClient is connected to server localhost:14090 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:19.712265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:20.576716Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005930726996261:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:20.576840Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:20.577055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005930726996269:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:20.584843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:07:20.600958Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005930726996275:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:07:20.703433Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005909252159183:2132];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:20.704715Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:07:20.708894Z node 1 :TX_PROXY ERROR: Actor# [1:7488005930726996326:2342] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:07:22.544103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:07:22.764659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:07:22.806180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:07:22.841765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:07:22.894382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:07:23.216841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:07:23.268852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:07:23.318887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:07:23.367115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T16:07:23.430579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-31T16:07:23.484826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-31T16:07:23.535744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:07:23.606212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:07:25.030869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-31T16:07:25.119006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:07:25.176475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:07:25.242808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:07:25.282750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T16:07:25.326094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-31T16:07:25.401924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T16:07:25.460089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-31T16:07:25.524097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-31T16:07:25.574573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-31T16:07:25.649405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-31T16:07:25.691306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-31T16:07:25.752223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-31T16:07:25.789726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-31T16:07:25.829797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-31T16:07:25.867589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-31T16:07:25.925397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but prop ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.233532Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038550;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.243839Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038551;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.245229Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038524;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.249546Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.250703Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038561;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.256185Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038506;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.256283Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038559;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.261805Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038556;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.268608Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038549;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.270766Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038513;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.274482Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038552;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.280897Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038563;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.287745Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038555;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.294728Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038608;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.301471Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.307920Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038535;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.313023Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038514;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.314751Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.318701Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038529;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.326545Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038515;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.328289Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038530;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.334414Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038537;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.337037Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.341579Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038606;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.342576Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038525;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.346623Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.352117Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038518;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.352117Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038528;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.358166Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038522;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.358816Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038584;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.364505Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038531;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.365437Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038548;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.370921Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038507;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.372369Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.377169Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038538;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.377910Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038558;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.384580Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038545;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.390839Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.396952Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038614;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.404713Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.410037Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038588;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.416062Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.442844Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038512;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.472112Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038534;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.497242Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038527;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:08:08.627454Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpfaddm87r4vhmsmmsetqqc", SessionId: ydb://session/3?node_id=1&id=YTAyOThjMjQtOGI5NGM0ODctM2ZkOTNiZmEtZDNiYzk3NzM=, Slow query, duration: 40.702775s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:08:09.152126Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:08:09.152604Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:08:09.153257Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;self_id=[1:7488005977971643412:2996];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038170;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038331;receive=72075186224038629; 2025-03-31T16:08:09.153603Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> TTicketParserTest::LoginCheckRemovedUser [GOOD] >> TTicketParserTest::LoginEmptyTicketBad >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters >> TTicketParserTest::NebiusAccessKeySignatureUnsupported [GOOD] >> KqpTx::RollbackManyTx >> test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] [FAIL] >> KqpIndexLookupJoin::CheckCastUtf8ToString-StreamLookupJoin+NotNull [GOOD] >> test_sql_streaming.py::test[suites-WriteTwoTopics-default.txt] [FAIL] >> TColumnShardTestReadWrite::CompactionInGranule_PKString_Reboot [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::NebiusAccessKeySignatureUnsupported [GOOD] Test command err: 2025-03-31T16:07:41.111111Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488006018673600968:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:41.111528Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d52/r3tmp/tmp4gcG3F/pdisk_1.dat 2025-03-31T16:07:41.869067Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:41.877649Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:41.877780Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:41.884485Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21494, node 1 2025-03-31T16:07:42.090058Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:42.103150Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:42.103175Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:42.103359Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28760 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:42.578279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:42.598911Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:07:42.695168Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 2025-03-31T16:07:42.722383Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:07:42.722420Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:07:42.723148Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****SReg (8F8A3D12) () has now retryable error message 'Security state is empty' 2025-03-31T16:07:42.723366Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:07:42.723384Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:07:42.723566Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****SReg (8F8A3D12) () has now retryable error message 'Security state is empty' 2025-03-31T16:07:42.723576Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A2 error Security state is empty 2025-03-31T16:07:42.723589Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A2 error Security state is empty 2025-03-31T16:07:42.723614Z node 1 :TICKET_PARSER ERROR: Ticket eyJh****SReg (8F8A3D12): Security state is empty 2025-03-31T16:07:45.054669Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****SReg (8F8A3D12) 2025-03-31T16:07:45.054975Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:07:45.055003Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:07:45.055235Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****SReg (8F8A3D12) () has now retryable error message 'Security state is empty' 2025-03-31T16:07:45.055246Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A2 error Security state is empty 2025-03-31T16:07:45.731333Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:07:46.071382Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488006018673600968:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:46.071454Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:07:48.060292Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****SReg (8F8A3D12) 2025-03-31T16:07:48.060597Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:07:48.060615Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:07:48.061476Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****SReg (8F8A3D12) () has now valid token of user1 2025-03-31T16:07:48.061490Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A4 success 2025-03-31T16:07:52.071919Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****SReg (8F8A3D12) 2025-03-31T16:07:52.072396Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****SReg (8F8A3D12) () has now valid token of user1 2025-03-31T16:07:53.697102Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488006073748361886:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d52/r3tmp/tmpmpnHeF/pdisk_1.dat 2025-03-31T16:07:53.784681Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:07:53.895508Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:53.923051Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:53.927281Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:53.931516Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3155, node 2 2025-03-31T16:07:54.151847Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:54.151883Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:54.151893Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:54.152007Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65498 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:54.660510Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:54.677755Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-03-31T16:07:54.677885Z node 2 :GRPC_CLIENT DEBUG: [51700001cf08] Connect to grpc://localhost:4466 2025-03-31T16:07:54.680917Z node 2 :GRPC_CLIENT DEBUG: [51700001cf08] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response account { user_account { id: "user1" } } 0: "" 2025-03-31T16:07:54.711313Z node 2 :GRPC_CLIENT DEBUG: [51700001cf08] Response AuthenticateResponse { account { user_account { id: "user1" } } } 2025-03-31T16:07:54.719300Z node 2 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-03-31T16:07:59.169090Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488006099489082454:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:59.169889Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d52/r3tmp/tmp2eZ5k8/pdisk_1.dat 2025-03-31T16:07:59.609503Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:59.613120Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:59.613242Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:59.614914Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15224, node 3 2025-03-31T16:07:59.791764Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:59.791800Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:59.791808Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:59.791941Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20028 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 Sub ... thTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:08:00.062049Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:08:00.072261Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:08:00.074317Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-31T16:08:00.074346Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:08:00.074357Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:08:00.074385Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-03-31T16:08:00.074427Z node 3 :GRPC_CLIENT DEBUG: [5170000f0088] Connect to grpc://localhost:23657 2025-03-31T16:08:00.075233Z node 3 :GRPC_CLIENT DEBUG: [5170000f0088] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" 2025-03-31T16:08:00.082098Z node 3 :GRPC_CLIENT DEBUG: [5170000f0088] Status 14 Service Unavailable 2025-03-31T16:08:00.082278Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-03-31T16:08:00.082305Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-03-31T16:08:00.082440Z node 3 :GRPC_CLIENT DEBUG: [5170000f0088] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" 2025-03-31T16:08:00.084071Z node 3 :GRPC_CLIENT DEBUG: [5170000f0088] Status 14 Service Unavailable 2025-03-31T16:08:00.084252Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-03-31T16:08:01.309118Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket **** (8E120919) 2025-03-31T16:08:01.309161Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-03-31T16:08:01.309276Z node 3 :GRPC_CLIENT DEBUG: [5170000f0088] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" 2025-03-31T16:08:01.311688Z node 3 :GRPC_CLIENT DEBUG: [5170000f0088] Status 14 Service Unavailable 2025-03-31T16:08:01.315855Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-03-31T16:08:02.315492Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket **** (8E120919) 2025-03-31T16:08:02.315532Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication NebiusAccessService::Authenticate request 2025-03-31T16:08:02.315663Z node 3 :GRPC_CLIENT DEBUG: [5170000f0088] Request AuthenticateRequest { iam_token: "**** (8E120919)" } iam_token: "user1" NebiusAccessService::Authenticate response account { user_account { id: "user1" } } 0: "" 2025-03-31T16:08:02.325427Z node 3 :GRPC_CLIENT DEBUG: [5170000f0088] Response AuthenticateResponse { account { user_account { id: "user1" } } } 2025-03-31T16:08:02.325978Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-03-31T16:08:04.176939Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488006099489082454:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:08:04.177029Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:08:13.213038Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488006158124288360:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:08:13.213090Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d52/r3tmp/tmparYIeY/pdisk_1.dat 2025-03-31T16:08:13.608009Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:08:13.608116Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:08:13.609669Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:08:13.612080Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29663, node 4 2025-03-31T16:08:13.819704Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:08:13.819728Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:08:13.819738Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:08:13.819859Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1780 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:08:14.466095Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:08:14.483546Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:08:14.486484Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-31T16:08:14.486519Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:08:14.486531Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:08:14.486563Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-03-31T16:08:14.486619Z node 4 :GRPC_CLIENT DEBUG: [517000052c08] Connect to grpc://localhost:19973 2025-03-31T16:08:14.487599Z node 4 :GRPC_CLIENT DEBUG: [517000052c08] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response 14: "Service Unavailable" 2025-03-31T16:08:14.508449Z node 4 :GRPC_CLIENT DEBUG: [517000052c08] Status 14 Service Unavailable 2025-03-31T16:08:14.508945Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-03-31T16:08:14.508975Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-03-31T16:08:14.509138Z node 4 :GRPC_CLIENT DEBUG: [517000052c08] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response account { user_account { id: "user1" } } 0: "" 2025-03-31T16:08:14.511886Z node 4 :GRPC_CLIENT DEBUG: [517000052c08] Response AuthenticateResponse { account { user_account { id: "user1" } } } 2025-03-31T16:08:14.512445Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-03-31T16:08:20.975657Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7488006190360804110:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:08:20.976724Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d52/r3tmp/tmpFcF1BV/pdisk_1.dat 2025-03-31T16:08:21.287179Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:08:21.290658Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:08:21.290756Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:08:21.334806Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62653, node 5 2025-03-31T16:08:21.511879Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:08:21.511904Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:08:21.511914Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:08:21.512043Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17064 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:08:21.979211Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:08:21.988561Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:08:21.997259Z node 5 :TICKET_PARSER ERROR: Ticket AKIA****MPLE (B3EDC139): Access key signature is not supported >> test_sql_streaming.py::test[suites-GroupByHopTimeExtractorUnusedColumns-default.txt] [FAIL] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastUtf8ToString-StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 21504, MsgBus: 11599 2025-03-31T16:08:05.901697Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488006125141717637:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:08:05.902111Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00199c/r3tmp/tmpd7CPMn/pdisk_1.dat 2025-03-31T16:08:06.964496Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:08:06.964599Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:08:06.977078Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:08:06.977482Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:08:07.074020Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21504, node 1 2025-03-31T16:08:07.355280Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:08:07.355308Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:08:07.355316Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:08:07.355461Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11599 TClient is connected to server localhost:11599 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:08:08.748102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:08:08.772600Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:08:08.800678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:08:09.258480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:08:09.669536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:08:09.784676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:08:10.899459Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488006125141717637:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:08:10.899538Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:08:12.636197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488006155206490359:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:12.636352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:13.235732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:08:13.293794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:08:13.374619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:08:13.448830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:08:13.521396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:08:13.610141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:08:13.735414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488006159501458180:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:13.735493Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:13.735744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488006159501458185:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:13.739934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:08:13.772597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488006159501458187:2468], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:08:13.866720Z node 1 :TX_PROXY ERROR: Actor# [1:7488006159501458244:3465] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:08:15.562253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:08:15.666035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 61707, MsgBus: 12329 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00199c/r3tmp/tmptvL1TK/pdisk_1.dat 2025-03-31T16:08:18.471325Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:08:18.520675Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:08:18.556991Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:08:18.557073Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:08:18.564512Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61707, node 2 2025-03-31T16:08:18.790232Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:08:18.790255Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:08:18.790262Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:08:18.790381Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12329 TClient is connected to server localhost:12329 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:08:19.702008Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:08:19.719146Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:08:19.738069Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:08:19.817809Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:08:20.112114Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:08:20.247835Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:08:23.372361Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488006199583625183:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:23.372475Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:23.431105Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:08:23.492243Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:08:23.543753Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:08:23.593999Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:08:23.673924Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:08:23.754759Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:08:23.826256Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488006199583625705:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:23.826351Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:23.826605Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488006199583625710:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:23.830201Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:08:23.857941Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488006199583625712:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:08:23.937946Z node 2 :TX_PROXY ERROR: Actor# [2:7488006199583625768:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:08:25.288951Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:08:25.384886Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 >> TPersQueueTest::ReadRuleServiceTypeLimit [GOOD] >> TPersQueueTest::ReadRuleDisallowDefaultServiceType >> YdbIndexTable::MultiShardTableTwoIndexes [GOOD] >> KqpIndexLookupJoin::InnerJoinLeftFilter-StreamLookup [GOOD] |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_ymq_expiring_counters |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-WriteTwoTopics-default.txt] [FAIL] |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-GroupByHopTimeExtractorUnusedColumns-default.txt] [FAIL] >> Secret::DeactivatedQueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::InnerJoinLeftFilter-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 4581, MsgBus: 5673 2025-03-31T16:08:09.750112Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488006141172157109:2137];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:08:09.797748Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00199a/r3tmp/tmp4rPOqV/pdisk_1.dat 2025-03-31T16:08:10.379447Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:08:10.384948Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:08:10.385037Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:08:10.387327Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4581, node 1 2025-03-31T16:08:10.527707Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:08:10.527734Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:08:10.527743Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:08:10.527856Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5673 TClient is connected to server localhost:5673 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:08:11.974096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:08:11.986734Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:08:12.008124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:08:12.244936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:08:12.472494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:08:12.592278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:08:14.751403Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488006141172157109:2137];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:08:14.751489Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:08:15.317514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488006166941962574:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:15.317637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:16.081710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:08:16.174336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:08:16.230571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:08:16.332645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:08:16.382557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:08:16.470154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:08:16.581994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488006171236930401:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:16.582090Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:16.582556Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488006171236930406:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:16.589671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:08:16.612205Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488006171236930408:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:08:16.702379Z node 1 :TX_PROXY ERROR: Actor# [1:7488006171236930463:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:08:18.606848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:08:18.686148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:08:18.724902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:08:18.768996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:08:18.813507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:08:18.907237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 3485, MsgBus: 16689 2025-03-31T16:08:21.057067Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488006190929278843:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:08:21.057222Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00199a/r3tmp/tmpJ8yl0M/pdisk_1.dat 2025-03-31T16:08:21.322079Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:08:21.324498Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:08:21.324630Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:08:21.325950Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3485, node 2 2025-03-31T16:08:21.427707Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:08:21.427737Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:08:21.427745Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:08:21.427874Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16689 TClient is connected to server localhost:16689 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:08:21.952956Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:08:21.970836Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:08:22.149495Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:08:22.794016Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:08:22.882786Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:08:25.315226Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488006208109149656:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:25.315334Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:25.401424Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:08:25.459923Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:08:25.545221Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:08:25.591384Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:08:25.644622Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:08:25.711997Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:08:25.810755Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488006208109150172:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:25.810828Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:25.811303Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488006208109150177:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:25.815693Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:08:25.837471Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488006208109150179:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:08:25.915367Z node 2 :TX_PROXY ERROR: Actor# [2:7488006208109150233:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:08:26.058918Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488006190929278843:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:08:26.059009Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:08:27.438724Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:08:27.533991Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:08:27.582518Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:08:27.642852Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:08:27.676585Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:08:27.710450Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> TTicketParserTest::LoginEmptyTicketBad [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::DeactivatedQueryService [GOOD] Test command err: 2025-03-31T16:08:14.769354Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:98:2144], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000e48/r3tmp/tmpQHZhNo/pdisk_1.dat TServer::EnableGrpc on GrpcPort 21959, node 1 TClient is connected to server localhost:20091 2025-03-31T16:08:15.834686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:08:15.962888Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:08:15.966851Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:08:15.966915Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:08:15.966951Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:08:15.967409Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:08:16.009066Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:08:16.009238Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:08:16.021707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-03-31T16:08:28.292178Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:694:2580], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:28.292369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:683:2575], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:28.300616Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:28.382865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-03-31T16:08:28.426881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:697:2583], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-03-31T16:08:28.535549Z node 1 :TX_PROXY ERROR: Actor# [1:748:2615] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:08:30.323594Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:757:2623], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled 2025-03-31T16:08:30.325415Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzcxZDE2YjUtOTkxYTljZTctZDQ2MGVlZWItODU0NjY2ZmM=, ActorId: [1:681:2573], ActorState: ExecuteState, TraceId: 01jqpfc83n1vksbf1v11gm06e3, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 >> TPersQueueTest::TopicServiceSimpleHappyWrites [GOOD] >> TPersQueueTest::WhenDisableNodeAndCreateTopic_ThenAllPartitionsAreOnOtherNode >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v0] |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableTwoIndexes [GOOD] Test command err: Trying to start YDB, gRPC: 25732, MsgBus: 9704 2025-03-31T16:03:33.903054Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004954446881196:2114];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:33.903125Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000e07/r3tmp/tmptZ11h4/pdisk_1.dat 2025-03-31T16:03:34.561765Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:34.563068Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:34.563167Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:34.569425Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25732, node 1 2025-03-31T16:03:34.749474Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:34.749497Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:34.749508Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:34.749637Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9704 TClient is connected to server localhost:9704 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:35.438209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:35.475812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:35.609587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:35.776161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:35.860003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:37.503166Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004971626752121:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:37.503281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:37.826197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:37.853461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:37.880939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:37.913685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:37.943345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:37.984107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:38.037955Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004975921719925:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:38.038029Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:38.038214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004975921719930:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:38.041975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:38.059510Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004975921719932:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:03:38.131602Z node 1 :TX_PROXY ERROR: Actor# [1:7488004975921719988:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:03:38.903228Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004954446881196:2114];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:38.946399Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:39.171041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:03:40.989868Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jqpf3frr023ncq2dbmbk2826, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2IwYjcxZmItZmMyODE1M2YtY2U4N2IxZmUtZGJhZWE3Y2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:41.014827Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jqpf3frr6vp0f82f34j7xck3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWYyMTA4MzUtNjg3NDJhMC00YmI2MDA1OC03ZTEwMmI3NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:41.036468Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jqpf3fs0abe28t2shfdqctaz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTkwMzE5OGItODhhNzY4ZDctODgzYWVhNGMtZmJkOGYwMjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:41.076361Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jqpf3ftmcw2qkpbpvd2ac5dc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Zjc2YWRiYmYtN2EzMzE0YmYtMTQ1YmRmMWYtZjRkY2M3ZTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:41.090519Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jqpf3ftm8fk8zm3qgjcff0cr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTEyZDc4NzgtMTdhZDljYjItZGY5MzYwYjMtMzA2YWMzNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:41.092005Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jqpf3frr023ncq2dbmbk2826, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2IwYjcxZmItZmMyODE1M2YtY2U4N2IxZmUtZGJhZWE3Y2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:41.093190Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710681. Ctx: { TraceId: 01jqpf3ftr9v2y9g5bxh9cgqzx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWQ4NjA1Ni0xMjA0NjNjMS02MDk5ZjFlLWEyNjk5YWQ5, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:41.094550Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710678. Ctx: { TraceId: 01jqpf3frr6vp0f82f34j7xck3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWYyMTA4MzUtNjg3NDJhMC00YmI2MDA1OC03ZTEwMmI3NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:41.113545Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710679. Ctx: { TraceId: 01jqpf3ftr1cmw0s99mzbbgh7g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODUzNjVmMTktYmEzZDMzMjctYTA2ZTcwODItM2FmZGU0OTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:41.114669Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710680. Ctx: { TraceId: 01jqpf3ftr5dnvwv92ppjyfb57, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDAxZmVmYTUtZTMzMDY4YWQtYTEwZjdiZDQtM2JhOTNlNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:41.115681Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710682. Ctx: { TraceId: 01jqpf3ftr9vbz79n17pf6d285, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTA1OWU0NjQtNmFhYWNkZGEtNTI3OWRiYWEtNmVkMGMxOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:41.137631Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710683. Ctx: { TraceId: 01jqpf3ftr7r436k7 ... sion/3?node_id=2&id=ODE5NTYzM2UtNDc1OWU0ZjctM2JkMDUyMmItYjRiZGZiZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:21.830172Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719474. Ctx: { TraceId: 01jqpfc2132y9egq3kx8fhcm3w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWVkYTM0NjEtOTMzNjNjYmYtZDVlZDEwNzAtNjY4YzY0NDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:21.832962Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719475. Ctx: { TraceId: 01jqpfc2195yfjzgb4xvte37ve, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWIxZDZmNGMtYTdkNGVmN2YtOTVjYmU5MWItNDI3MjY3MDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:21.833207Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719477. Ctx: { TraceId: 01jqpfc1yqaqvk4e2r903y38et, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzQ2YWY1NTgtMTRhZDkwYzktNTY2OTc5MzItZjNkZmY0ZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:21.834812Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719476. Ctx: { TraceId: 01jqpfc20q1qqyd8807qr7hrdz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODE5NTYzM2UtNDc1OWU0ZjctM2JkMDUyMmItYjRiZGZiZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:21.851976Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719478. Ctx: { TraceId: 01jqpfc1yqaqvk4e2r903y38et, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzQ2YWY1NTgtMTRhZDkwYzktNTY2OTc5MzItZjNkZmY0ZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:21.858219Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719479. Ctx: { TraceId: 01jqpfc2132y9egq3kx8fhcm3w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWVkYTM0NjEtOTMzNjNjYmYtZDVlZDEwNzAtNjY4YzY0NDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:21.862455Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719480. Ctx: { TraceId: 01jqpfc2195yfjzgb4xvte37ve, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWIxZDZmNGMtYTdkNGVmN2YtOTVjYmU5MWItNDI3MjY3MDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:21.873438Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719481. Ctx: { TraceId: 01jqpfc22h937bk70n71emvwm5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OThkZmJkZjktYWVjMmFiMi1iYTkzMmQ2YS05MTgxODAwOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:21.898732Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719482. Ctx: { TraceId: 01jqpfc22h937bk70n71emvwm5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OThkZmJkZjktYWVjMmFiMi1iYTkzMmQ2YS05MTgxODAwOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:21.918420Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719484. Ctx: { TraceId: 01jqpfc24m1pfaz2mh07peg0gj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODE5NTYzM2UtNDc1OWU0ZjctM2JkMDUyMmItYjRiZGZiZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:21.919188Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719483. Ctx: { TraceId: 01jqpfc24kc0tp03cgvf59rd4f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGM2OGY1OTMtMWZkMjFmNDgtZGZiNTgzMzctNTQwNWEyNmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:21.936399Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719485. Ctx: { TraceId: 01jqpfc24r74z4etb8tn3hwd9r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzQ2YWY1NTgtMTRhZDkwYzktNTY2OTc5MzItZjNkZmY0ZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:21.936910Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719486. Ctx: { TraceId: 01jqpfc24m1pfaz2mh07peg0gj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODE5NTYzM2UtNDc1OWU0ZjctM2JkMDUyMmItYjRiZGZiZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:21.938675Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719487. Ctx: { TraceId: 01jqpfc24kc0tp03cgvf59rd4f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGM2OGY1OTMtMWZkMjFmNDgtZGZiNTgzMzctNTQwNWEyNmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:21.942840Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719488. Ctx: { TraceId: 01jqpfc24ze2xkzykk32krye0w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWVkYTM0NjEtOTMzNjNjYmYtZDVlZDEwNzAtNjY4YzY0NDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:21.951748Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719489. Ctx: { TraceId: 01jqpfc24m1pfaz2mh07peg0gj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODE5NTYzM2UtNDc1OWU0ZjctM2JkMDUyMmItYjRiZGZiZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:21.976208Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719490. Ctx: { TraceId: 01jqpfc24r74z4etb8tn3hwd9r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzQ2YWY1NTgtMTRhZDkwYzktNTY2OTc5MzItZjNkZmY0ZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:22.000656Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719491. Ctx: { TraceId: 01jqpfc24ze2xkzykk32krye0w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWVkYTM0NjEtOTMzNjNjYmYtZDVlZDEwNzAtNjY4YzY0NDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:22.030999Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719493. Ctx: { TraceId: 01jqpfc26g9vwm58b7y851g6k2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWIxZDZmNGMtYTdkNGVmN2YtOTVjYmU5MWItNDI3MjY3MDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:22.032636Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719492. Ctx: { TraceId: 01jqpfc24r74z4etb8tn3hwd9r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzQ2YWY1NTgtMTRhZDkwYzktNTY2OTc5MzItZjNkZmY0ZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:22.100407Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719494. Ctx: { TraceId: 01jqpfc29bef89ebhjsqhgvgrv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OThkZmJkZjktYWVjMmFiMi1iYTkzMmQ2YS05MTgxODAwOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:22.111562Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719495. Ctx: { TraceId: 01jqpfc26g9vwm58b7y851g6k2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWIxZDZmNGMtYTdkNGVmN2YtOTVjYmU5MWItNDI3MjY3MDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:22.138045Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719496. Ctx: { TraceId: 01jqpfc26g9vwm58b7y851g6k2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWIxZDZmNGMtYTdkNGVmN2YtOTVjYmU5MWItNDI3MjY3MDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:22.144723Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719497. Ctx: { TraceId: 01jqpfc29r88kzz965kc526wvd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGM2OGY1OTMtMWZkMjFmNDgtZGZiNTgzMzctNTQwNWEyNmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:22.152600Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719498. Ctx: { TraceId: 01jqpfc29bef89ebhjsqhgvgrv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OThkZmJkZjktYWVjMmFiMi1iYTkzMmQ2YS05MTgxODAwOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:22.181975Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719499. Ctx: { TraceId: 01jqpfc29r88kzz965kc526wvd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGM2OGY1OTMtMWZkMjFmNDgtZGZiNTgzMzctNTQwNWEyNmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:22.209604Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719500. Ctx: { TraceId: 01jqpfc2bb41habg7a8025n9ws, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODE5NTYzM2UtNDc1OWU0ZjctM2JkMDUyMmItYjRiZGZiZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:22.270111Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719501. Ctx: { TraceId: 01jqpfc2db7w9snd04f1yyjntr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWVkYTM0NjEtOTMzNjNjYmYtZDVlZDEwNzAtNjY4YzY0NDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:22.309033Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719502. Ctx: { TraceId: 01jqpfc2db7w9snd04f1yyjntr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWVkYTM0NjEtOTMzNjNjYmYtZDVlZDEwNzAtNjY4YzY0NDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:22.309764Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719503. Ctx: { TraceId: 01jqpfc2f884n46gx6gzwkfgxa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzQ2YWY1NTgtMTRhZDkwYzktNTY2OTc5MzItZjNkZmY0ZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:22.313738Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719505. Ctx: { TraceId: 01jqpfc2bb41habg7a8025n9ws, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODE5NTYzM2UtNDc1OWU0ZjctM2JkMDUyMmItYjRiZGZiZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:22.323596Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719504. Ctx: { TraceId: 01jqpfc2gc3gm86xe0z7aw0s59, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OThkZmJkZjktYWVjMmFiMi1iYTkzMmQ2YS05MTgxODAwOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:22.344595Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719506. Ctx: { TraceId: 01jqpfc2fxan723x3atmtg7588, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWIxZDZmNGMtYTdkNGVmN2YtOTVjYmU5MWItNDI3MjY3MDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-03-31T16:08:22.354486Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719507. Ctx: { TraceId: 01jqpfc2f884n46gx6gzwkfgxa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzQ2YWY1NTgtMTRhZDkwYzktNTY2OTc5MzItZjNkZmY0ZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-03-31T16:08:22.376819Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976719508. Ctx: { TraceId: 01jqpfc2fxan723x3atmtg7588, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWIxZDZmNGMtYTdkNGVmN2YtOTVjYmU5MWItNDI3MjY3MDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS >> KqpJoinOrder::CanonizedJoinOrderTPCH6 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKString_Reboot [GOOD] Test command err: 2025-03-31T16:01:53.013813Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:01:53.305868Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:01:53.343371Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:01:53.343716Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:01:53.357816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:01:53.358082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:01:53.358343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:01:53.358465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:01:53.358596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:01:53.358732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:01:53.358869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:01:53.359011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:01:53.361279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:01:53.361511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:01:53.361653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:01:53.361765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:01:53.411499Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:01:53.411679Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:01:53.411737Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:01:53.411970Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:01:53.412158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:01:53.412237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:01:53.412286Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:01:53.412398Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:01:53.412466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:01:53.412510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:01:53.412600Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:01:53.412796Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:01:53.412889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:01:53.412950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:01:53.412987Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:01:53.413100Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:01:53.413170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:01:53.413225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:01:53.413274Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:01:53.413368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:01:53.413415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:01:53.413448Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:01:53.413514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:01:53.413556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:01:53.413586Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:01:53.414119Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=58; 2025-03-31T16:01:53.414217Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=41; 2025-03-31T16:01:53.414305Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=39; 2025-03-31T16:01:53.414398Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=43; 2025-03-31T16:01:53.414609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:01:53.414691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:01:53.414732Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:01:53.414975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:01:53.415037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:01:53.415074Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:01:53.423998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:01:53.424112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:01:53.424174Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:01:53.424442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:01:53.424537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:01:53.424583Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:01:53.424728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:01:53.424789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:01:53.424854Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... B:0:2688];;column_id:8;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:33;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:34;blob_range:[NO_BLOB:0:8352];;column_id:8;chunk_idx:35;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:36;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:8336];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:8352];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:8336];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:10208];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:10208];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:9400];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:9392];;;;switched=(portion_id:60;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2507632;index_size:20;meta:((produced=SPLIT_COMPACTED;)););(portion_id:56;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2109896;index_size:20;meta:((produced=INSERTED;)););; 2025-03-31T16:08:25.585132Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:11572:13199];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=2; 2025-03-31T16:08:25.587136Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:11572:13199];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::LoginEmptyTicketBad [GOOD] Test command err: 2025-03-31T16:07:37.223589Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488006002396348023:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:37.228268Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d54/r3tmp/tmpxf9wr4/pdisk_1.dat 2025-03-31T16:07:38.197506Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:38.218807Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:38.218933Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:38.227611Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5968, node 1 2025-03-31T16:07:38.381622Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:38.381876Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:38.381885Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:38.382016Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18114 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:38.855530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:38.987198Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:07:39.066842Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:07:39.066963Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:07:39.068910Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****0I0A (B8244317) () has now valid token of user1 2025-03-31T16:07:39.068940Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A4 success 2025-03-31T16:07:43.736185Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488006031254472082:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:43.757015Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d54/r3tmp/tmp0IC3QF/pdisk_1.dat 2025-03-31T16:07:44.103940Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:44.152962Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:44.153073Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:44.156516Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7384, node 2 2025-03-31T16:07:44.367151Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:44.367172Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:44.367179Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:44.367293Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9952 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:44.838407Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:44.851924Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:07:44.977494Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:07:45.034105Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:07:45.034138Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:07:45.034903Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****J6-Q (C6AF168F) () has now valid token of user1 2025-03-31T16:07:45.034920Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A4 success 2025-03-31T16:07:50.783688Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488006057556135890:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:50.783736Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d54/r3tmp/tmpjejoZN/pdisk_1.dat 2025-03-31T16:07:51.007855Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:51.058893Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:51.058993Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:51.064576Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2418, node 3 2025-03-31T16:07:51.247905Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:51.247929Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:51.247938Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:51.248089Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22382 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:51.700961Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:51.716683Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:07:51.835119Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:07:51.880106Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:07:51.880146Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:07:51.880993Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****Y6RQ (E5413B2F) () has now valid token of user1 2025-03-31T16:07:51.881008Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A4 success 2025-03-31T16:07:51.882438Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:07:54.797514Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****Y6RQ (E5413B2F) 2025-03-31T16:07:54.797991Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****Y6RQ (E5413B2F) () has now valid token of user1 2025-03-31T16:07:55.794351Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488006057556135890:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:55.794448Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:07:57.803674Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****Y6RQ (E5413B2F) 2025-03-31T16:07:57.804082Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****Y6RQ (E5413B2F) () has now valid token of user1 2025-03-31T16:08:00.812104Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****Y6RQ (E5413B2F) 2025-03-31T16:08:00.812450Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****Y6RQ (E5413B2F) () has now valid token of user1 2025-03-31T16:08:01.886104Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:08:04.823241Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****Y6RQ (E5413B2F) 2025-03-31T16:08:04.823607Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****Y6RQ (E5413B2F) () has now valid token of user1 2025-03-31T16:08:05.982307Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:08:05.982339Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:08:09.831362Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****Y6RQ (E5413B2F) 2025-03-31T16:08:09.831771Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****Y6RQ (E5413B2F) () has now valid token of user1 2025-03-31T16:08:13.124340Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488006159951916281:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:08:13.124398Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d54/r3tmp/tmpngT20J/pdisk_1.dat 2025-03-31T16:08:13.619654Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:08:13.632124Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:08:13.632259Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:08:13.639027Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20279, node 4 2025-03-31T16:08:13.842651Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:08:13.842689Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:08:13.842697Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:08:13.842844Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16204 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:08:14.528853Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:08:14.683049Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:08:14.706681Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:08:14.706726Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:08:14.707630Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****InlA (DAF6D09D) () has now valid token of user1 2025-03-31T16:08:14.707652Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A4 success 2025-03-31T16:08:14.708425Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:08:18.128939Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7488006159951916281:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:08:18.146352Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:08:20.213616Z node 4 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****InlA (DAF6D09D) 2025-03-31T16:08:20.213936Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****InlA (DAF6D09D) () has now permanent error message 'User not found' 2025-03-31T16:08:24.219986Z node 4 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****InlA (DAF6D09D) 2025-03-31T16:08:26.027235Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7488006211364012341:2144];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d54/r3tmp/tmpe3tKS8/pdisk_1.dat 2025-03-31T16:08:26.254625Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:08:26.305690Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:08:26.344515Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:08:26.344620Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:08:26.353908Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25786, node 5 2025-03-31T16:08:26.571871Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:08:26.571893Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:08:26.571903Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:08:26.572056Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23238 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:08:27.309608Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:08:27.319976Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:08:27.507855Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:08:27.522559Z node 5 :TICKET_PARSER ERROR: Ticket **** (00000000): Ticket is empty |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> YdbIndexTable::MultiShardTableOneUniqIndexDataColumn [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-fifo] |83.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |83.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |83.1%| [LD] {RESULT} $(B)/ydb/tests/sql/large/ydb-tests-sql-large >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_sending_duplicates [GOOD] |83.1%| [TA] $(B)/ydb/core/security/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v0] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError [GOOD] >> TPersQueueCommonTest::TestWriteWithRateLimiterWithBlobsRateLimit [GOOD] >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneUniqIndexDataColumn [GOOD] Test command err: Trying to start YDB, gRPC: 6805, MsgBus: 62777 2025-03-31T16:03:33.135326Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004956715056216:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:33.143700Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000e22/r3tmp/tmpuIYJKO/pdisk_1.dat 2025-03-31T16:03:33.504679Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:33.521817Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:33.521967Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:33.525915Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6805, node 1 2025-03-31T16:03:33.613981Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:33.614000Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:33.614012Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:33.614157Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62777 TClient is connected to server localhost:62777 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:34.190280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:34.230532Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:03:34.247328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:34.403470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:34.597651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:34.669795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:36.516213Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004969599959852:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:36.516372Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:36.822191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:36.858570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:36.907172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:36.943589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:36.981513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:37.018884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:37.072553Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004973894927659:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:37.072634Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:37.072892Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004973894927664:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:37.076917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:37.087215Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004973894927666:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:03:37.167426Z node 1 :TX_PROXY ERROR: Actor# [1:7488004973894927719:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:03:38.118205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:03:38.128648Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004956715056216:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:38.128720Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:39.285828Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jqpf3e444cgdjkyqs4bqk7xg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWEyYzdkZTQtODg3MDNjYmQtNzY1NjllNzAtYmJjZTIxZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:39.293436Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jqpf3e3ybrw4kdndxa0w0r0m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjgxN2RlYmItOGMzYmVlZGUtMzJkN2YwZWItOTQ3MTQ1ODY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:39.293975Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jqpf3e3y4gkt0gyy4f45pcmw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWJlY2Q2OTAtZDlhOWVkMDQtYmQ0NzU1ZTAtMzM3NzZjYWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:39.294570Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jqpf3e44ch2ss6sv9dbw2d16, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODdhYzNhMjgtNjUzOTQ4MjktNmU0ZDFiM2MtODgwNDFkNjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:39.295352Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jqpf3e476nnztqc2e5s6cwpn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWZlZjI5MmItNDliNDYzZjgtM2RhMTkwYjMtOWRiN2Y5OGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:39.295720Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710679. Ctx: { TraceId: 01jqpf3e4d3zpep47h53vsrh9y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Njc5Y2I5NjktYjhhNzhlMDQtZDA0ZTU5MTAtNWQ5MjkzZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:39.297844Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jqpf3e4c7whhh44m33qrwv9v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTA3ODkwYWItZjVmMzRkN2QtYTE4YjM1MWMtOTgxMzgxNWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:39.298774Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710678. Ctx: { TraceId: 01jqpf3e4de3jqw7r25x6fhs84, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTI1MGY0ZGYtZmQ3NmRlOWMtOTU0NWJhMy00NDc0MWYxYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:39.300293Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710680. Ctx: { TraceId: 01jqpf3e4d45rdn2n3xxs423fe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWZiMWQwNmUtYmMxMmU4Ni0yNDZjNDE3OC01N2ZlYzg4Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:39.300953Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710681. Ctx: { TraceId: 01jqpf3e4dfyzz2yxz9m6h1qac, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmJlYjg3MDgtOTBiNjk2YTgtNWVjNWZjYS05Zjg4YzM4Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:39.314188Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710682. Ctx: { TraceId: 01jqpf3e3ybrw4kdndxa0w0r0m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjgxN2RlYmItOGMzYmVlZGUtMzJkN2YwZWItOTQ3MTQ1ODY=, CurrentExecutionId: , CustomerSupplie ... sion/3?node_id=2&id=NmZlNmZhMDYtYzA5YWI3MzEtM2M1YTQ3NTItNzUzYjNjODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:26.760019Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976726935. Ctx: { TraceId: 01jqpfc6tnatznbyz6rxavk8ze, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjJhOTQ2MDAtYjJiZTgyNmEtM2FmZTU5MGMtODM5ZTkyNjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:26.764342Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976726936. Ctx: { TraceId: 01jqpfc6v090jkx10bnbv1rxyv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmZlNmZhMDYtYzA5YWI3MzEtM2M1YTQ3NTItNzUzYjNjODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:26.781087Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976726937. Ctx: { TraceId: 01jqpfc6v58a3hkwy3q661cb0d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzNkZTYyOWUtZmEzZDliZjMtNDAwOTc4YmMtZWFmYWNkY2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:26.785360Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976726938. Ctx: { TraceId: 01jqpfc6v090jkx10bnbv1rxyv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmZlNmZhMDYtYzA5YWI3MzEtM2M1YTQ3NTItNzUzYjNjODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:26.797609Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976726939. Ctx: { TraceId: 01jqpfc6v58a3hkwy3q661cb0d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzNkZTYyOWUtZmEzZDliZjMtNDAwOTc4YmMtZWFmYWNkY2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:26.797772Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976726940. Ctx: { TraceId: 01jqpfc6wm9v0z8fnn3wprbrzj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGFlNmIxMi1kY2FiZTFhZS00MDUxNmI3NS1jY2MzN2VlMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:26.814994Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976726941. Ctx: { TraceId: 01jqpfc6v58a3hkwy3q661cb0d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzNkZTYyOWUtZmEzZDliZjMtNDAwOTc4YmMtZWFmYWNkY2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:26.820667Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976726942. Ctx: { TraceId: 01jqpfc6xadmb1kgp66b06xda7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWQ4MzRiNTMtMmIyZmUxNTEtOGM3ZDJhNmQtNTYzZGVjNmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:26.824641Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976726943. Ctx: { TraceId: 01jqpfc6wm9v0z8fnn3wprbrzj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGFlNmIxMi1kY2FiZTFhZS00MDUxNmI3NS1jY2MzN2VlMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:26.833497Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976726944. Ctx: { TraceId: 01jqpfc6xadmb1kgp66b06xda7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWQ4MzRiNTMtMmIyZmUxNTEtOGM3ZDJhNmQtNTYzZGVjNmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:26.840808Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976726945. Ctx: { TraceId: 01jqpfc6wm9v0z8fnn3wprbrzj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGFlNmIxMi1kY2FiZTFhZS00MDUxNmI3NS1jY2MzN2VlMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:26.844612Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976726946. Ctx: { TraceId: 01jqpfc6y9b9bmnrhrwqd27hx9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjJhOTQ2MDAtYjJiZTgyNmEtM2FmZTU5MGMtODM5ZTkyNjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:26.862024Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976726947. Ctx: { TraceId: 01jqpfc6yqbmy98j4ztdfx9m1z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTU2MzhkMmItNmVjN2JhMGItYjMzYjY2NjMtYThjYzU2NDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:26.864070Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976726948. Ctx: { TraceId: 01jqpfc6xadmb1kgp66b06xda7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWQ4MzRiNTMtMmIyZmUxNTEtOGM3ZDJhNmQtNTYzZGVjNmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:26.868350Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976726949. Ctx: { TraceId: 01jqpfc6y9b9bmnrhrwqd27hx9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjJhOTQ2MDAtYjJiZTgyNmEtM2FmZTU5MGMtODM5ZTkyNjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:26.883249Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976726950. Ctx: { TraceId: 01jqpfc6y9b9bmnrhrwqd27hx9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjJhOTQ2MDAtYjJiZTgyNmEtM2FmZTU5MGMtODM5ZTkyNjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:26.894757Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976726951. Ctx: { TraceId: 01jqpfc706557t6gpvkbefa6ek, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmZlNmZhMDYtYzA5YWI3MzEtM2M1YTQ3NTItNzUzYjNjODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:26.906910Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976726952. Ctx: { TraceId: 01jqpfc706557t6gpvkbefa6ek, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmZlNmZhMDYtYzA5YWI3MzEtM2M1YTQ3NTItNzUzYjNjODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:26.914376Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976726953. Ctx: { TraceId: 01jqpfc70j1hg7p6vgfsy9eak5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzNkZTYyOWUtZmEzZDliZjMtNDAwOTc4YmMtZWFmYWNkY2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:26.914714Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976726954. Ctx: { TraceId: 01jqpfc70h65qp2js5h5hefd6f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGFlNmIxMi1kY2FiZTFhZS00MDUxNmI3NS1jY2MzN2VlMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:26.929840Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976726955. Ctx: { TraceId: 01jqpfc70j1hg7p6vgfsy9eak5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzNkZTYyOWUtZmEzZDliZjMtNDAwOTc4YmMtZWFmYWNkY2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:26.940511Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976726956. Ctx: { TraceId: 01jqpfc712354w6rxrzxnm3f64, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTU2MzhkMmItNmVjN2JhMGItYjMzYjY2NjMtYThjYzU2NDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:26.943196Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976726957. Ctx: { TraceId: 01jqpfc70h65qp2js5h5hefd6f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGFlNmIxMi1kY2FiZTFhZS00MDUxNmI3NS1jY2MzN2VlMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:26.945840Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976726958. Ctx: { TraceId: 01jqpfc70j1hg7p6vgfsy9eak5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzNkZTYyOWUtZmEzZDliZjMtNDAwOTc4YmMtZWFmYWNkY2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:26.973054Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976726960. Ctx: { TraceId: 01jqpfc70h65qp2js5h5hefd6f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGFlNmIxMi1kY2FiZTFhZS00MDUxNmI3NS1jY2MzN2VlMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:26.974189Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976726959. Ctx: { TraceId: 01jqpfc71ze6mfykk77mttxmr1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWQ4MzRiNTMtMmIyZmUxNTEtOGM3ZDJhNmQtNTYzZGVjNmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:26.976975Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976726962. Ctx: { TraceId: 01jqpfc7249gbe8t6f74fptedk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjJhOTQ2MDAtYjJiZTgyNmEtM2FmZTU5MGMtODM5ZTkyNjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:26.977513Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976726961. Ctx: { TraceId: 01jqpfc712354w6rxrzxnm3f64, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTU2MzhkMmItNmVjN2JhMGItYjMzYjY2NjMtYThjYzU2NDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:26.999352Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976726963. Ctx: { TraceId: 01jqpfc712354w6rxrzxnm3f64, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTU2MzhkMmItNmVjN2JhMGItYjMzYjY2NjMtYThjYzU2NDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:27.002686Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976726964. Ctx: { TraceId: 01jqpfc7249gbe8t6f74fptedk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjJhOTQ2MDAtYjJiZTgyNmEtM2FmZTU5MGMtODM5ZTkyNjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-03-31T16:08:27.065546Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976726965. Ctx: { TraceId: 01jqpfc7440zy1c03qbt2ae8w5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmZlNmZhMDYtYzA5YWI3MzEtM2M1YTQ3NTItNzUzYjNjODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS finished with status: SUCCESS 2025-03-31T16:08:27.084056Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976726966. Ctx: { TraceId: 01jqpfc75xdb54csr0530200dh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWQ4MzRiNTMtMmIyZmUxNTEtOGM3ZDJhNmQtNTYzZGVjNmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:27.103935Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976726967. Ctx: { TraceId: 01jqpfc7249gbe8t6f74fptedk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjJhOTQ2MDAtYjJiZTgyNmEtM2FmZTU5MGMtODM5ZTkyNjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:27.106662Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976726968. Ctx: { TraceId: 01jqpfc75xdb54csr0530200dh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWQ4MzRiNTMtMmIyZmUxNTEtOGM3ZDJhNmQtNTYzZGVjNmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:08:27.120223Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976726969. Ctx: { TraceId: 01jqpfc75xdb54csr0530200dh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWQ4MzRiNTMtMmIyZmUxNTEtOGM3ZDJhNmQtNTYzZGVjNmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS finished with status: SUCCESS >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-fifo] |83.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |83.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |83.2%| [TA] {RESULT} $(B)/ydb/core/security/ut/test-results/unittest/{meta.json ... results_accumulator.log} |83.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_auth_header [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookupDepededRead [GOOD] |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v0] [GOOD] |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookupDepededRead [GOOD] Test command err: Trying to start YDB, gRPC: 63436, MsgBus: 26139 2025-03-31T16:02:34.852806Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004700862848541:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:34.854256Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ac9/r3tmp/tmpG9YueA/pdisk_1.dat 2025-03-31T16:02:35.371810Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:35.398420Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:35.398506Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:35.400823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63436, node 1 2025-03-31T16:02:35.493543Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:02:35.493567Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:02:35.493582Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:02:35.493730Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26139 TClient is connected to server localhost:26139 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:02:36.140930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:36.169780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:36.280795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:36.462928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:02:36.532230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:02:38.496240Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004718042719490:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:38.496422Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:38.853740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:02:38.888193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:02:38.920740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:02:38.956454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:02:38.989623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:02:39.025704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:02:39.121402Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004722337687304:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:39.121489Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:39.121902Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004722337687309:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:02:39.126195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:02:39.140545Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004722337687311:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:02:39.237321Z node 1 :TX_PROXY ERROR: Actor# [1:7488004722337687366:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:02:39.853270Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004700862848541:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:39.853333Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:02:40.227379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 20193, MsgBus: 4317 2025-03-31T16:02:42.518242Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488004738119920587:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:42.518516Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ac9/r3tmp/tmpX6lZ96/pdisk_1.dat 2025-03-31T16:02:42.688321Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:42.712542Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:42.712639Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:42.718699Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20193, node 2 2025-03-31T16:02:42.782689Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:02:42.782723Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:02:42.782730Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:02:42.782859Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4317 TClient is connected to server localhost:4317 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:02:43.324754Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:02:43.350343Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:43.449685Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:43.629517Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:43.709780Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but ... Tg4ZWRmMTItNTRjZDM2ZWItNjY4MWEwMDAtNzMxNzAxOTk=, ActorId: [4:7488005742877525693:2507], ActorState: ExecuteState, TraceId: 01jqpfbwgr4r6vaaeqn6d0pbpt, Create QueryResponse for error on request, msg: 2025-03-31T16:08:17.059488Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTg4ZWRmMTItNTRjZDM2ZWItNjY4MWEwMDAtNzMxNzAxOTk=, ActorId: [4:7488005742877525693:2507], ActorState: ExecuteState, TraceId: 01jqpfbwytfebrcsnmatyk34qe, Create QueryResponse for error on request, msg: 2025-03-31T16:08:17.751411Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTg4ZWRmMTItNTRjZDM2ZWItNjY4MWEwMDAtNzMxNzAxOTk=, ActorId: [4:7488005742877525693:2507], ActorState: ExecuteState, TraceId: 01jqpfbxmsfs8sdwk6ngs61ap1, Create QueryResponse for error on request, msg: 2025-03-31T16:08:18.219463Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTg4ZWRmMTItNTRjZDM2ZWItNjY4MWEwMDAtNzMxNzAxOTk=, ActorId: [4:7488005742877525693:2507], ActorState: ExecuteState, TraceId: 01jqpfby36btxx8mskepvhk17s, Create QueryResponse for error on request, msg: 2025-03-31T16:08:18.869413Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTg4ZWRmMTItNTRjZDM2ZWItNjY4MWEwMDAtNzMxNzAxOTk=, ActorId: [4:7488005742877525693:2507], ActorState: ExecuteState, TraceId: 01jqpfbyj00kp1x6n1rw3tfsys, Create QueryResponse for error on request, msg: 2025-03-31T16:08:19.744277Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTg4ZWRmMTItNTRjZDM2ZWItNjY4MWEwMDAtNzMxNzAxOTk=, ActorId: [4:7488005742877525693:2507], ActorState: ExecuteState, TraceId: 01jqpfbzk0etq3ep1rvdt6bdmk, Create QueryResponse for error on request, msg: 2025-03-31T16:08:20.223452Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTg4ZWRmMTItNTRjZDM2ZWItNjY4MWEwMDAtNzMxNzAxOTk=, ActorId: [4:7488005742877525693:2507], ActorState: ExecuteState, TraceId: 01jqpfc01vce35qzdngy4v98ej, Create QueryResponse for error on request, msg: 2025-03-31T16:08:20.696823Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTg4ZWRmMTItNTRjZDM2ZWItNjY4MWEwMDAtNzMxNzAxOTk=, ActorId: [4:7488005742877525693:2507], ActorState: ExecuteState, TraceId: 01jqpfc0gb0480nejxnr1nahfx, Create QueryResponse for error on request, msg: 2025-03-31T16:08:21.625895Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTg4ZWRmMTItNTRjZDM2ZWItNjY4MWEwMDAtNzMxNzAxOTk=, ActorId: [4:7488005742877525693:2507], ActorState: ExecuteState, TraceId: 01jqpfc1dg1jpran6t1zc8kjbr, Create QueryResponse for error on request, msg: 2025-03-31T16:08:22.105342Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTg4ZWRmMTItNTRjZDM2ZWItNjY4MWEwMDAtNzMxNzAxOTk=, ActorId: [4:7488005742877525693:2507], ActorState: ExecuteState, TraceId: 01jqpfc1wf7kxecq2f76xcfne4, Create QueryResponse for error on request, msg: 2025-03-31T16:08:22.935629Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTg4ZWRmMTItNTRjZDM2ZWItNjY4MWEwMDAtNzMxNzAxOTk=, ActorId: [4:7488005742877525693:2507], ActorState: ExecuteState, TraceId: 01jqpfc2pb40m75zrgrr1gfq02, Create QueryResponse for error on request, msg: 2025-03-31T16:08:23.417441Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTg4ZWRmMTItNTRjZDM2ZWItNjY4MWEwMDAtNzMxNzAxOTk=, ActorId: [4:7488005742877525693:2507], ActorState: ExecuteState, TraceId: 01jqpfc35ffnv5twkhas67fxv6, Create QueryResponse for error on request, msg: 2025-03-31T16:08:24.292138Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTg4ZWRmMTItNTRjZDM2ZWItNjY4MWEwMDAtNzMxNzAxOTk=, ActorId: [4:7488005742877525693:2507], ActorState: ExecuteState, TraceId: 01jqpfc40r1hmn1ff33y0js09j, Create QueryResponse for error on request, msg: 2025-03-31T16:08:24.771473Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTg4ZWRmMTItNTRjZDM2ZWItNjY4MWEwMDAtNzMxNzAxOTk=, ActorId: [4:7488005742877525693:2507], ActorState: ExecuteState, TraceId: 01jqpfc4fk5qefs5n8wrvwka7f, Create QueryResponse for error on request, msg: 2025-03-31T16:08:25.387555Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTg4ZWRmMTItNTRjZDM2ZWItNjY4MWEwMDAtNzMxNzAxOTk=, ActorId: [4:7488005742877525693:2507], ActorState: ExecuteState, TraceId: 01jqpfc52ve0bmq3zcwkw96599, Create QueryResponse for error on request, msg: 2025-03-31T16:08:25.859762Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTg4ZWRmMTItNTRjZDM2ZWItNjY4MWEwMDAtNzMxNzAxOTk=, ActorId: [4:7488005742877525693:2507], ActorState: ExecuteState, TraceId: 01jqpfc5hp6z0rmnz5nf09981m, Create QueryResponse for error on request, msg: 2025-03-31T16:08:26.471350Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTg4ZWRmMTItNTRjZDM2ZWItNjY4MWEwMDAtNzMxNzAxOTk=, ActorId: [4:7488005742877525693:2507], ActorState: ExecuteState, TraceId: 01jqpfc64eay4e9vkgezbdg1wf, Create QueryResponse for error on request, msg: 2025-03-31T16:08:26.947275Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTg4ZWRmMTItNTRjZDM2ZWItNjY4MWEwMDAtNzMxNzAxOTk=, ActorId: [4:7488005742877525693:2507], ActorState: ExecuteState, TraceId: 01jqpfc6kgbp71s3tg1wzebw8k, Create QueryResponse for error on request, msg: 2025-03-31T16:08:27.752202Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTg4ZWRmMTItNTRjZDM2ZWItNjY4MWEwMDAtNzMxNzAxOTk=, ActorId: [4:7488005742877525693:2507], ActorState: ExecuteState, TraceId: 01jqpfc7cc64kne8f6vnxb3m4c, Create QueryResponse for error on request, msg: 2025-03-31T16:08:28.236268Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTg4ZWRmMTItNTRjZDM2ZWItNjY4MWEwMDAtNzMxNzAxOTk=, ActorId: [4:7488005742877525693:2507], ActorState: ExecuteState, TraceId: 01jqpfc7vp054960g2kxwf9s3z, Create QueryResponse for error on request, msg: 2025-03-31T16:08:29.170735Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTg4ZWRmMTItNTRjZDM2ZWItNjY4MWEwMDAtNzMxNzAxOTk=, ActorId: [4:7488005742877525693:2507], ActorState: ExecuteState, TraceId: 01jqpfc8rtfy33p9tv6bpsm57n, Create QueryResponse for error on request, msg: 2025-03-31T16:08:29.661145Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTg4ZWRmMTItNTRjZDM2ZWItNjY4MWEwMDAtNzMxNzAxOTk=, ActorId: [4:7488005742877525693:2507], ActorState: ExecuteState, TraceId: 01jqpfc983bp0qhmytv7kv6q4m, Create QueryResponse for error on request, msg: 2025-03-31T16:08:30.567933Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTg4ZWRmMTItNTRjZDM2ZWItNjY4MWEwMDAtNzMxNzAxOTk=, ActorId: [4:7488005742877525693:2507], ActorState: ExecuteState, TraceId: 01jqpfca3t1msmmjd7yx00nkhq, Create QueryResponse for error on request, msg: 2025-03-31T16:08:31.059440Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTg4ZWRmMTItNTRjZDM2ZWItNjY4MWEwMDAtNzMxNzAxOTk=, ActorId: [4:7488005742877525693:2507], ActorState: ExecuteState, TraceId: 01jqpfcakqdxm8fknf2gpnpz4m, Create QueryResponse for error on request, msg: 2025-03-31T16:08:31.878327Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTg4ZWRmMTItNTRjZDM2ZWItNjY4MWEwMDAtNzMxNzAxOTk=, ActorId: [4:7488005742877525693:2507], ActorState: ExecuteState, TraceId: 01jqpfcbdc48bxxdze4856wk4p, Create QueryResponse for error on request, msg: 2025-03-31T16:08:32.859977Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTg4ZWRmMTItNTRjZDM2ZWItNjY4MWEwMDAtNzMxNzAxOTk=, ActorId: [4:7488005742877525693:2507], ActorState: ExecuteState, TraceId: 01jqpfccbv53g53am53rznkgsj, Create QueryResponse for error on request, msg: 2025-03-31T16:08:33.358206Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTg4ZWRmMTItNTRjZDM2ZWItNjY4MWEwMDAtNzMxNzAxOTk=, ActorId: [4:7488005742877525693:2507], ActorState: ExecuteState, TraceId: 01jqpfccvccskhfjeb3m63ae59, Create QueryResponse for error on request, msg: 2025-03-31T16:08:34.192848Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTg4ZWRmMTItNTRjZDM2ZWItNjY4MWEwMDAtNzMxNzAxOTk=, ActorId: [4:7488005742877525693:2507], ActorState: ExecuteState, TraceId: 01jqpfcdn7cfsd2e67xb0ps7aj, Create QueryResponse for error on request, msg: 2025-03-31T16:08:34.694848Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTg4ZWRmMTItNTRjZDM2ZWItNjY4MWEwMDAtNzMxNzAxOTk=, ActorId: [4:7488005742877525693:2507], ActorState: ExecuteState, TraceId: 01jqpfce4zb9p1ntf5c1jgknzg, Create QueryResponse for error on request, msg: 2025-03-31T16:08:35.479592Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTg4ZWRmMTItNTRjZDM2ZWItNjY4MWEwMDAtNzMxNzAxOTk=, ActorId: [4:7488005742877525693:2507], ActorState: ExecuteState, TraceId: 01jqpfcewnaar0366tehfewc4b, Create QueryResponse for error on request, msg: 2025-03-31T16:08:35.991065Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTg4ZWRmMTItNTRjZDM2ZWItNjY4MWEwMDAtNzMxNzAxOTk=, ActorId: [4:7488005742877525693:2507], ActorState: ExecuteState, TraceId: 01jqpfcfd98w85kfkzdy6eb2z7, Create QueryResponse for error on request, msg: 2025-03-31T16:08:36.871615Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTg4ZWRmMTItNTRjZDM2ZWItNjY4MWEwMDAtNzMxNzAxOTk=, ActorId: [4:7488005742877525693:2507], ActorState: ExecuteState, TraceId: 01jqpfcg8vasxh57padqgssazj, Create QueryResponse for error on request, msg: 2025-03-31T16:08:37.381331Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTg4ZWRmMTItNTRjZDM2ZWItNjY4MWEwMDAtNzMxNzAxOTk=, ActorId: [4:7488005742877525693:2507], ActorState: ExecuteState, TraceId: 01jqpfcgrv1mtj3m8v8dbcw2jz, Create QueryResponse for error on request, msg: 2025-03-31T16:08:38.202111Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTg4ZWRmMTItNTRjZDM2ZWItNjY4MWEwMDAtNzMxNzAxOTk=, ActorId: [4:7488005742877525693:2507], ActorState: ExecuteState, TraceId: 01jqpfchj3c2706vevknk69bve, Create QueryResponse for error on request, msg: 2025-03-31T16:08:38.731966Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTg4ZWRmMTItNTRjZDM2ZWItNjY4MWEwMDAtNzMxNzAxOTk=, ActorId: [4:7488005742877525693:2507], ActorState: ExecuteState, TraceId: 01jqpfcj255hyszbde73grj6aj, Create QueryResponse for error on request, msg: 2025-03-31T16:08:39.245344Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTg4ZWRmMTItNTRjZDM2ZWItNjY4MWEwMDAtNzMxNzAxOTk=, ActorId: [4:7488005742877525693:2507], ActorState: ExecuteState, TraceId: 01jqpfcjjy2f6y1vx6xf3faghh, Create QueryResponse for error on request, msg: 2025-03-31T16:08:39.842444Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTg4ZWRmMTItNTRjZDM2ZWItNjY4MWEwMDAtNzMxNzAxOTk=, ActorId: [4:7488005742877525693:2507], ActorState: ExecuteState, TraceId: 01jqpfck5g13scec06bykggn9h, Create QueryResponse for error on request, msg: 2025-03-31T16:08:40.366818Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTg4ZWRmMTItNTRjZDM2ZWItNjY4MWEwMDAtNzMxNzAxOTk=, ActorId: [4:7488005742877525693:2507], ActorState: ExecuteState, TraceId: 01jqpfckny7650t9gw52fkcypb, Create QueryResponse for error on request, msg: 2025-03-31T16:08:40.977745Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTg4ZWRmMTItNTRjZDM2ZWItNjY4MWEwMDAtNzMxNzAxOTk=, ActorId: [4:7488005742877525693:2507], ActorState: ExecuteState, TraceId: 01jqpfcm8xaq2ant86phh2keyv, Create QueryResponse for error on request, msg: 2025-03-31T16:08:41.492164Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTg4ZWRmMTItNTRjZDM2ZWItNjY4MWEwMDAtNzMxNzAxOTk=, ActorId: [4:7488005742877525693:2507], ActorState: ExecuteState, TraceId: 01jqpfcmrwemggb4zja0j8a1ks, Create QueryResponse for error on request, msg: 2025-03-31T16:08:42.011379Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTg4ZWRmMTItNTRjZDM2ZWItNjY4MWEwMDAtNzMxNzAxOTk=, ActorId: [4:7488005742877525693:2507], ActorState: ExecuteState, TraceId: 01jqpfcn97ed3eytrfw2vpwk0b, Create QueryResponse for error on request, msg: >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] [GOOD] >> TPersQueueTest::PreferredCluster_DisabledRemoteClusterAndWriteSessionsWithDifferentPreferredClusterAndLaterRemoteClusterEnabled_SessionWithMismatchedClusterDiesAfterPreferredClusterEnabledAndOtherSessionsAlive [GOOD] >> TPersQueueTest::PreferredCluster_EnabledRemotePreferredClusterAndCloseClientSessionWithEnabledRemotePreferredClusterDelaySec_SessionDiesOnlyAfterDelay >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] >> KqpTx::RollbackManyTx [GOOD] >> KqpTx::RollbackRoTx |83.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |83.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |83.2%| [LD] {RESULT} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] >> TPersQueueTest::ReadRuleDisallowDefaultServiceType [GOOD] >> TPersQueueTest::ReadRuleServiceTypeMigration |83.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |83.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |83.2%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] [GOOD] |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] >> TPersQueueTest::WhenDisableNodeAndCreateTopic_ThenAllPartitionsAreOnOtherNode [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedAfterDecompressingTheData_Compressed >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v0] [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithSpecificKeys [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheBeginning >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_sending_duplicates [GOOD] |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] [GOOD] |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] [GOOD] |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_prefix_kmeans/ydb-core-tx-datashard-ut_prefix_kmeans |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_prefix_kmeans/ydb-core-tx-datashard-ut_prefix_kmeans |83.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_prefix_kmeans/ydb-core-tx-datashard-ut_prefix_kmeans >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] [GOOD] >> test_retry.py::TestRetry::test_fail_first[kikimr0] |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |83.3%| [LD] {RESULT} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] [GOOD] |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |83.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters [GOOD] |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |83.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |83.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_not_create_kesus |83.3%| [TA] $(B)/ydb/tests/fq/streaming_optimize/test-results/py3test/{meta.json ... results_accumulator.log} >> KqpTx::RollbackRoTx [GOOD] >> test_retry_high_rate.py::TestRetry::test_high_rate[kikimr0] |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |83.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |83.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |83.4%| [TA] {RESULT} $(B)/ydb/tests/fq/streaming_optimize/test-results/py3test/{meta.json ... results_accumulator.log} |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ydb-core-tx-schemeshard-ut_data_erasure_reboots |83.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ydb-core-tx-schemeshard-ut_data_erasure_reboots |83.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ydb-core-tx-schemeshard-ut_data_erasure_reboots |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |83.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |83.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |83.4%| [LD] {RESULT} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |83.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |83.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |83.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |83.4%| [LD] {RESULT} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v0] |83.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |83.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |83.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::RollbackRoTx [GOOD] Test command err: Trying to start YDB, gRPC: 6290, MsgBus: 22739 2025-03-31T16:08:27.808167Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488006220131520273:2197];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:08:27.808217Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001296/r3tmp/tmpwmfw0S/pdisk_1.dat 2025-03-31T16:08:29.095429Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:08:29.360302Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:08:29.360417Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:08:29.364699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:08:29.461987Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6290, node 1 2025-03-31T16:08:29.580909Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:08:29.580937Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:08:30.469599Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:08:30.469619Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:08:30.469638Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:08:30.469782Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22739 TClient is connected to server localhost:22739 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:08:32.808842Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488006220131520273:2197];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:08:32.835539Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:08:32.876979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:08:33.029819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:08:33.408201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:08:33.614368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:08:33.722829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:08:34.333202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488006250196292938:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:34.333319Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:36.198791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:08:36.244797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:08:36.275882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:08:36.319327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:08:36.395213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:08:36.492137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:08:36.634044Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488006258786228119:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:36.634122Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:36.634333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488006258786228124:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:36.640756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:08:36.661382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488006258786228126:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:08:36.761121Z node 1 :TX_PROXY ERROR: Actor# [1:7488006258786228183:3475] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:08:44.374078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:08:44.374104Z node 1 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 23728, MsgBus: 20700 2025-03-31T16:08:52.020004Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488006321657598742:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:08:52.020047Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001296/r3tmp/tmppWdxkh/pdisk_1.dat 2025-03-31T16:08:52.354238Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:08:52.376776Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:08:52.376878Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:08:52.380100Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23728, node 2 2025-03-31T16:08:52.614026Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:08:52.614057Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:08:52.614064Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:08:52.614196Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20700 TClient is connected to server localhost:20700 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:08:54.221210Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:08:54.266930Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:08:54.394925Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:08:54.729883Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:08:54.900902Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:08:57.024157Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488006321657598742:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:08:57.073637Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:08:59.432890Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488006356017338878:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:59.432980Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:59.476528Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:08:59.539912Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:08:59.619267Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:08:59.702935Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:08:59.810688Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:08:59.906781Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:09:00.007277Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488006356017339402:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:09:00.007412Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:09:00.013376Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488006360312306703:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:09:00.018095Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:09:00.070978Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488006360312306705:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:09:00.142067Z node 2 :TX_PROXY ERROR: Actor# [2:7488006360312306764:3461] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:09:05.053032Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OWU1NmFhYWYtMzc0NTNlNzktM2VkZDg3My03Yjc4ODFm, ActorId: [2:7488006373197208976:2513], ActorState: ReadyState, TraceId: 01jqpfdc7060je14mw1fawfm84, Create QueryResponse for error on request, msg: |83.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |83.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |83.4%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo >> test_quoting.py::TestSqsQuotingWithKesus::test_creates_quoter >> KqpPg::TableDeleteWhere-useSink [GOOD] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] [GOOD] >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit [GOOD] >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_ymq_expiring_counters [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] [GOOD] |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] [GOOD] |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] [GOOD] |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] [GOOD] >> LabeledDbCounters::TwoTabletsKillOneTablet [GOOD] >> SystemView::AuthEffectivePermissions >> test_dispatch.py::TestMapping::test_mapping >> TPersQueueTest::PreferredCluster_EnabledRemotePreferredClusterAndCloseClientSessionWithEnabledRemotePreferredClusterDelaySec_SessionDiesOnlyAfterDelay [GOOD] >> TPersQueueTest::PreferredCluster_NonExistentPreferredCluster_SessionDiesOnlyAfterDelay >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters [GOOD] |83.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |83.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |83.5%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test >> test_cp_ic.py::TestCpIc::test_discovery |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] [GOOD] |83.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |83.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |83.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::TableDeleteWhere-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 20933, MsgBus: 63381 2025-03-31T16:02:20.574997Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004640039413389:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:20.575071Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012f5/r3tmp/tmpcGBp9E/pdisk_1.dat 2025-03-31T16:02:21.043133Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:02:21.062845Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:02:21.062975Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:02:21.065546Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20933, node 1 2025-03-31T16:02:21.203673Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:02:21.203695Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:02:21.203702Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:02:21.203806Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63381 TClient is connected to server localhost:63381 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:02:21.866380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:02:21.881195Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:02:24.008566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgbpchar_17472595041006102391_17823623939509273229' Unable to coerce value for pgbpchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2025-03-31T16:02:24.224550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgbpchar_17472595041006102391_5352544928909966465' Unable to coerce value for _pgbpchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2025-03-31T16:02:24.309062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 abcd 2025-03-31T16:02:24.452678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 {abcd,abcd} 2025-03-31T16:02:24.620500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 abcd 2025-03-31T16:02:24.811358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 {"abcd ","abcd "} 2025-03-31T16:02:25.070910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgvarchar_17472595041006102391_17823623939509273229' Unable to coerce value for pgvarchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character varying(2) 2025-03-31T16:02:25.175536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgvarchar_17472595041006102391_5352544928909966465' Unable to coerce value for _pgvarchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character varying(2) 2025-03-31T16:02:25.267857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 abcd 2025-03-31T16:02:25.404955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 {abcd,abcd} 2025-03-31T16:02:25.571710Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004640039413389:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:02:25.571756Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:02:25.590956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 abcd 2025-03-31T16:02:25.730487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 {abcd,abcd} 2025-03-31T16:02:25.828517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgbit_17472595041006102391_5866627432374416336' Unable to coerce value for pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string length 4 does not match type bit(2) 2025-03-31T16:02:25.947982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgbit_17472595041006102391_11087201080355820517' Unable to coerce value for _pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string length 4 does not match type bit(2) 2025-03-31T16:02:26.064787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 1111 2025-03-31T16:02:26.208111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:0, at schemeshard: 72057594046644480 {1111,1111} 2025-03-31T16:02:26.323774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710694:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgbit_10103374131519304989_5866627432374416336' Unable to coerce value for pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string length 4 does not match type bit(6) 2025-03-31T16:02:26.395211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710695:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgbit_10103374131519304989_11087201080355820517' Unable to coerce value for _pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string length 4 does not match type bit(6) 2025-03-31T16:02:26.505770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710696:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgvarbit_17472595041006102391_5866627432374416336' Unable to coerce value for pgvarbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string too long for type bit varying(2) 2025-03-31T16:02:26.564667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710697:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgvarbit_17472595041006102391_11087201080355820517' Unable to coerce value for _pgvarbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string too long for type bit varying(2) 2025-03-31T16:02:26.647425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710698:0, at schemeshard: 72057594046644480 1111 2025-03-31T16:02:26.811892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710701:0, at schemeshard: 72057594046644480 {1111,1111} 2025-03-31T16:02:26.960691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710704:0, at schemeshard: 72057594046644480 1111 2025-03-31T16:02:27.102605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710707:0, at schemeshard: 72057594046644480 {1111,1111} 2025-03-31T16:02:27.208434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 2814749767107 ... 6644480 2025-03-31T16:08:58.438472Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710804:0, at schemeshard: 72057594046644480 1562 2025-03-31T16:08:58.664967Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:08:58.702321Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710806:0, at schemeshard: 72057594046644480 2025-03-31T16:08:58.978370Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710807:0, at schemeshard: 72057594046644480 600 2025-03-31T16:08:59.296180Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:08:59.345462Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710809:0, at schemeshard: 72057594046644480 2025-03-31T16:08:59.517558Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:08:59.561673Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710811:0, at schemeshard: 72057594046644480 628 2025-03-31T16:08:59.722350Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:08:59.775238Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710813:0, at schemeshard: 72057594046644480 2025-03-31T16:09:00.006393Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710814:0, at schemeshard: 72057594046644480 601 2025-03-31T16:09:00.197398Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:09:00.290610Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710816:0, at schemeshard: 72057594046644480 2025-03-31T16:09:00.485758Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710817:0, at schemeshard: 72057594046644480 603 2025-03-31T16:09:00.698798Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710818:0, at schemeshard: 72057594046644480 2025-03-31T16:09:00.873053Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710819:0, at schemeshard: 72057594046644480 602 2025-03-31T16:09:01.061822Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:09:01.089599Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710821:0, at schemeshard: 72057594046644480 2025-03-31T16:09:01.373554Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710822:0, at schemeshard: 72057594046644480 604 2025-03-31T16:09:02.005769Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710823:0, at schemeshard: 72057594046644480 2025-03-31T16:09:02.166858Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:09:02.200219Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710825:0, at schemeshard: 72057594046644480 718 2025-03-31T16:09:02.349253Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710826:0, at schemeshard: 72057594046644480 2025-03-31T16:09:02.557950Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710827:0, at schemeshard: 72057594046644480 869 2025-03-31T16:09:02.819666Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:09:02.877046Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710829:0, at schemeshard: 72057594046644480 2025-03-31T16:09:03.176880Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710830:0, at schemeshard: 72057594046644480 650 2025-03-31T16:09:03.428319Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:09:03.481877Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710832:0, at schemeshard: 72057594046644480 2025-03-31T16:09:03.633867Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710833:0, at schemeshard: 72057594046644480 2025-03-31T16:09:03.882002Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 829 2025-03-31T16:09:03.935888Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710835:0, at schemeshard: 72057594046644480 2025-03-31T16:09:04.087464Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:09:04.120537Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710837:0, at schemeshard: 72057594046644480 774 2025-03-31T16:09:04.273633Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:09:04.307774Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710839:0, at schemeshard: 72057594046644480 2025-03-31T16:09:04.536409Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710840:0, at schemeshard: 72057594046644480 2950 2025-03-31T16:09:04.755427Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:09:04.780580Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710842:0, at schemeshard: 72057594046644480 2025-03-31T16:09:05.003171Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:09:05.065092Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710845:0, at schemeshard: 72057594046644480 114 2025-03-31T16:09:05.242316Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:09:05.285630Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710847:0, at schemeshard: 72057594046644480 2025-03-31T16:09:05.493130Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:09:05.557939Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710849:0, at schemeshard: 72057594046644480 3802 2025-03-31T16:09:05.741764Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:09:05.781174Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710851:0, at schemeshard: 72057594046644480 2025-03-31T16:09:05.939142Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:09:05.972459Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710853:0, at schemeshard: 72057594046644480 4072 2025-03-31T16:09:06.133401Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:09:06.168280Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710855:0, at schemeshard: 72057594046644480 2025-03-31T16:09:06.301172Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:09:06.332167Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710857:0, at schemeshard: 72057594046644480 2025-03-31T16:09:06.451844Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 142 2025-03-31T16:09:06.494235Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710859:0, at schemeshard: 72057594046644480 2025-03-31T16:09:06.754233Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:09:06.787833Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710861:0, at schemeshard: 72057594046644480 3615 2025-03-31T16:09:06.953869Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710862:0, at schemeshard: 72057594046644480 2025-03-31T16:09:07.101509Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:09:07.133047Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710864:0, at schemeshard: 72057594046644480 3614 2025-03-31T16:09:07.327280Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:09:07.363515Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710866:0, at schemeshard: 72057594046644480 2025-03-31T16:09:07.534520Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710867:0, at schemeshard: 72057594046644480 2025-03-31T16:09:07.666145Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 22 2025-03-31T16:09:07.708972Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710869:0, at schemeshard: 72057594046644480 2025-03-31T16:09:08.088358Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710870:0, at schemeshard: 72057594046644480 2025-03-31T16:09:08.267367Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_not_create_kesus [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedAfterDecompressingTheData_Compressed [GOOD] >> TPersQueueTest::TestWriteStat |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] [GOOD] |83.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/docs/generator/generator |83.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/docs/generator/generator |83.5%| [LD] {RESULT} $(B)/ydb/tests/olap/docs/generator/generator |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] [GOOD] |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] [GOOD] |83.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |83.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |83.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-fifo] >> TPersQueueTest::ReadRuleServiceTypeMigration [GOOD] >> TPersQueueTest::ReadRuleServiceTypeMigrationWithDisallowDefault >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-std] |83.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |83.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |83.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |83.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v0] [GOOD] |83.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |83.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] [GOOD] |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_not_create_kesus [GOOD] |83.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |83.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |83.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |83.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/stress_tool/ydb_stress_tool |83.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ydb_stress_tool |83.6%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ydb_stress_tool >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-fifo] >> test_quoting.py::TestSqsQuotingWithKesus::test_creates_quoter [GOOD] |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] [GOOD] |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] [GOOD] |83.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |83.6%| [LD] {RESULT} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |83.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-std] >> test_retry.py::TestRetry::test_fail_first[kikimr0] [FAIL] >> KqpJoinOrder::ShuffleEliminationOneJoin [GOOD] |83.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/query_replay_yt/query_replay_yt |83.6%| [LD] {RESULT} $(B)/ydb/tools/query_replay_yt/query_replay_yt |83.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay_yt/query_replay_yt |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] [GOOD] |83.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |83.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |83.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |83.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |83.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |83.7%| [LD] {RESULT} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics >> test_cp_ic.py::TestCpIc::test_discovery [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] [GOOD] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_retry.py::TestRetry::test_low_rate[kikimr0] |83.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |83.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |83.7%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |83.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |83.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |83.7%| [LD] {RESULT} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v0] >> BasicStatistics::TwoServerlessTwoSharedDbs [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-std] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |83.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |83.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |83.7%| [LD] {RESULT} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::ShuffleEliminationOneJoin [GOOD] Test command err: Trying to start YDB, gRPC: 8287, MsgBus: 12176 2025-03-31T16:07:15.700586Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005907679042803:2197];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:15.700635Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019ad/r3tmp/tmpNAW2q9/pdisk_1.dat 2025-03-31T16:07:16.795294Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:16.855317Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:16.857455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:16.913356Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:07:16.915627Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8287, node 1 2025-03-31T16:07:17.503203Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:17.503223Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:17.503230Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:17.503366Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12176 TClient is connected to server localhost:12176 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:19.712264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:19.776262Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:07:20.576694Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005929153879814:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:20.576834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:20.577062Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488005929153879825:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:20.584850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:07:20.600927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488005929153879828:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:07:20.688929Z node 1 :TX_PROXY ERROR: Actor# [1:7488005929153879880:2342] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:07:20.703584Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005907679042803:2197];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:20.703732Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:07:22.566270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:07:23.100671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488005937743814697:2355];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:07:23.100991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488005937743814697:2355];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:07:23.101247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488005937743814697:2355];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:07:23.101339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488005937743814697:2355];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:07:23.101442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488005937743814697:2355];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:07:23.101567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488005937743814697:2355];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:07:23.101682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488005937743814697:2355];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:07:23.101832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488005937743814697:2355];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:07:23.101952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488005937743814697:2355];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:07:23.102052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488005937743814697:2355];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:07:23.102138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488005937743814697:2355];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:07:23.102234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488005937743814697:2355];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:07:23.103354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488005937743814742:2369];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:07:23.103397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488005937743814742:2369];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:07:23.103597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488005937743814742:2369];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:07:23.103710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488005937743814742:2369];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:07:23.103801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488005937743814742:2369];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:07:23.103931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488005937743814742:2369];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:07:23.104033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488005937743814742:2369];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:07:23.104155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488005937743814742:2369];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:07:23.104266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488005937743814742:2369];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:07:23.104370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488005937743814742:2369];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:07:23.104487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488005937743814742:2369];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:07:23.104585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900; ... ller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:07.388175Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:07.398801Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:07.411717Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:07.423392Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:07.426753Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:07.446838Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:07.449883Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:07.474360Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039368;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:07.478361Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:07.490814Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:07.496779Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:07.508991Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:07.512556Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:07.519659Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:07.526800Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:07.534329Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:07.538481Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:07.544573Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:07.552997Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:07.554131Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:07.567507Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:07.572798Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:07.577944Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:07.582760Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:07.593741Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:07.597734Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:07.608555Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:07.611855Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:07.622369Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:07.623876Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:07.634759Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:07.641323Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:07.645634Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:07.647999Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:07.657613Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:07.666122Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:07.672775Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:07.681349Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:07.689926Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:07.690884Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:07.798752Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039218;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:07.929452Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039186;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:08.051044Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039229;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:08.136233Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:08.146360Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039372;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:08.339720Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpfc0tb8r7x0mmesaej93da", SessionId: ydb://session/3?node_id=1&id=YzJhNmRhM2EtODc1NjA4ZGQtM2ZkMDQzNjktMzRkNDI1ODY=, Slow query, duration: 47.783595s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:09:09.458987Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:09:09.459555Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:09:09.467815Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7488006315700994914:11610];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039094; 2025-03-31T16:09:09.468419Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit [GOOD] >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit |83.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |83.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |83.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |83.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-std] [GOOD] |83.8%| [LD] {RESULT} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |83.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming >> SystemView::AuthEffectivePermissions [GOOD] |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoServerlessTwoSharedDbs [GOOD] Test command err: 2025-03-31T16:04:48.076317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:526:2414], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:04:48.076812Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:04:48.076987Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d39/r3tmp/tmptxOsik/pdisk_1.dat 2025-03-31T16:04:48.797651Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63349, node 1 2025-03-31T16:04:49.438533Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:04:49.438601Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:04:49.438641Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:04:49.443932Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:04:49.455485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:04:49.636545Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:49.636713Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:49.668932Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26268 2025-03-31T16:04:50.586810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:04:55.628879Z node 3 :STATISTICS INFO: Subscribed for config changes on node 3 2025-03-31T16:04:55.723146Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:55.723306Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:55.777158Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-03-31T16:04:55.788375Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:04:56.186313Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:04:56.187037Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:04:56.187470Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:04:56.188024Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:04:56.188354Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:04:56.188445Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:04:56.188552Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:04:56.188670Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:04:56.188764Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:04:56.413039Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:04:56.413166Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:04:56.448514Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:04:56.925854Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:56.974467Z node 3 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T16:04:56.974610Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T16:04:57.014370Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T16:04:57.016186Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T16:04:57.016432Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T16:04:57.016502Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T16:04:57.016567Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T16:04:57.016633Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T16:04:57.016700Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T16:04:57.016761Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T16:04:57.017573Z node 3 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T16:04:57.073371Z node 3 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:04:57.073535Z node 3 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [3:1951:2599], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:04:57.099855Z node 3 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [3:1964:2610] 2025-03-31T16:04:57.110419Z node 3 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [3:1982:2620] 2025-03-31T16:04:57.110652Z node 3 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [3:1982:2620], schemeshard id = 72075186224037897 2025-03-31T16:04:57.174768Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared1 2025-03-31T16:04:57.249453Z node 3 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T16:04:57.249538Z node 3 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T16:04:57.249617Z node 3 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared1/.metadata/_statistics 2025-03-31T16:04:57.282120Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T16:04:57.309958Z node 3 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T16:04:57.310179Z node 3 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T16:04:57.700530Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T16:04:58.144964Z node 3 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T16:04:58.230648Z node 3 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:04:59.352780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:05:05.312593Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T16:05:05.396943Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:05:05.397088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:05:05.509595Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:05:05.513106Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:05:05.791707Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:05:05.792384Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:05:05.792936Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:05:05.793121Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:05:05.799058Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:05:05.799243Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:05:05.799446Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:05:05.799611Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:05:05.799714Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:05:05.948804Z node 2 :HIVE WARN: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:05:05.948971Z node 2 :HIVE WARN: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:05:05.965415Z node 2 :HIVE WARN: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:05:06.288346Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:06.391257Z node 2 :STATISTICS INFO: [72075186224038895] OnActivateExecutor 2025-03-31T16:05:06.391403Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxInitSchema::Execute 2025-03-31T16:05:06.503708Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxInitSchema::Complete 2025-03-31T16:05:06.504374Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxInit::Execute 2025-03-31T16:05:06.504610Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T16:05:06.504678Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded ColumnStatistics: column count# 0 2025-03-31T16:05:06.504749Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded ScheduleTraversals: table count# 0 2025-03-31T16:05:06.504801Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded ForceTraversalOperations: table count# 0 2025-03-31T16:05:06.504859Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded ForceTraversalTables: table count# 0 2025-03-31T16:05:06.504916Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxInit::Complete 2025-03-31T16:05:06.505894Z node 2 :STATISTICS INFO: [72075186224038895] Subscribed for config changes 2025-03-31T16:05:06.563009Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224038895, at schemeshard: 72075186224038898 2025-03-31T16:05:06.563187Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:3223:2598], at schemeshard: 72075186224038898, StatisticsAggregatorId: 72075186224038895, at schemeshard: 72075186224038898 2025-03-31T16:05:06.571581Z node 2 :STATISTICS DEBUG: [72075186224038895] EvServerConnected, pipe server id = [2:3235:2607] 2025-03-31T16:05:06.581369Z node 2 :STATISTICS DEBUG: [72075186224038895] EvServerConnected, pipe server id = [2:3266:2621] 2 ... ble [OwnerId: 72075186224038898, LocalPathId: 3] 2025-03-31T16:09:31.981626Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared2 2025-03-31T16:09:31.992085Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-31T16:09:32.002385Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:12546:5250], DatabaseId: /Root/Shared2, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:09:32.002510Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:12556:5255], DatabaseId: /Root/Shared2, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:09:32.002597Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Shared2, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:09:32.039652Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976730658:2, at schemeshard: 72075186224038898 2025-03-31T16:09:32.154845Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:12560:5258], DatabaseId: /Root/Shared2, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976730658 completed, doublechecking } 2025-03-31T16:09:32.394201Z node 2 :TX_PROXY ERROR: Actor# [2:12653:5307] txid# 281474976730659, issues: { message: "Check failed: path: \'/Root/Shared2/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224038898, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:09:32.431928Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:12682:5322]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:09:32.432446Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T16:09:32.432577Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:12684:5324] 2025-03-31T16:09:32.432657Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:12684:5324] 2025-03-31T16:09:32.436547Z node 2 :STATISTICS DEBUG: [72075186224038895] EvServerConnected, pipe server id = [2:12685:5325] 2025-03-31T16:09:32.437300Z node 2 :STATISTICS DEBUG: [72075186224038895] EvConnectNode, pipe server id = [2:12685:5325], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-03-31T16:09:32.437404Z node 2 :STATISTICS DEBUG: [72075186224038895] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-03-31T16:09:32.437762Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:12684:5324], server id = [2:12685:5325], tablet id = 72075186224038895, status = OK 2025-03-31T16:09:32.437910Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:09:32.437990Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:12682:5322], StatRequests.size() = 1 2025-03-31T16:09:32.575399Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OTY0YjliNjAtNjJlZTRhYzYtOTQxOWRiMi02MWNmNjdmZQ==, TxId: 2025-03-31T16:09:32.575472Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTY0YjliNjAtNjJlZTRhYzYtOTQxOWRiMi02MWNmNjdmZQ==, TxId: 2025-03-31T16:09:32.576386Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Execute 2025-03-31T16:09:32.593664Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224038898, LocalPathId: 3] 2025-03-31T16:09:32.593730Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T16:09:32.644698Z node 2 :STATISTICS DEBUG: [72075186224038895] EvFastPropagateCheck 2025-03-31T16:09:32.644777Z node 2 :STATISTICS DEBUG: [72075186224038895] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-31T16:09:32.732352Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:12684:5324], schemeshard count = 1 2025-03-31T16:09:33.280441Z node 3 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2025-03-31T16:09:33.280518Z node 3 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 191.000000s, at schemeshard: 72075186224037899 2025-03-31T16:09:33.280903Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 26 2025-03-31T16:09:33.301864Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-31T16:09:33.644481Z node 3 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:09:33.660772Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:09:33.660835Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:09:33.660892Z node 3 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is data table. 2025-03-31T16:09:33.660934Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037899, LocalPathId: 2] 2025-03-31T16:09:33.661229Z node 3 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared1 2025-03-31T16:09:33.665023Z node 3 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-31T16:09:33.688905Z node 3 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=N2IwN2UxOWMtZTM0MGMyOTUtYjZkMDZkY2UtNmQ5NzQxMw==, TxId: 2025-03-31T16:09:33.688977Z node 3 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=N2IwN2UxOWMtZTM0MGMyOTUtYjZkMDZkY2UtNmQ5NzQxMw==, TxId: 2025-03-31T16:09:33.690888Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:09:33.721464Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-03-31T16:09:33.721532Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T16:09:33.840091Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [3:12787:5655]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:09:33.840582Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-03-31T16:09:33.840643Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [3:12787:5655], StatRequests.size() = 1 2025-03-31T16:09:36.232098Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [3:12870:5689]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:09:36.232525Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-03-31T16:09:36.232576Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [3:12870:5689], StatRequests.size() = 1 2025-03-31T16:09:36.920780Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224038900 2025-03-31T16:09:36.920853Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 207.000000s, at schemeshard: 72075186224038900 2025-03-31T16:09:36.921365Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxSchemeShardStats::Execute: schemeshard id# 72075186224038900, stats size# 26 2025-03-31T16:09:36.945285Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxSchemeShardStats::Complete 2025-03-31T16:09:37.232546Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal 2025-03-31T16:09:37.232610Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal. No force traversals. 2025-03-31T16:09:37.232655Z node 2 :STATISTICS DEBUG: [72075186224038895] IsColumnTable. Path [OwnerId: 72075186224038900, LocalPathId: 2] is data table. 2025-03-31T16:09:37.232691Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224038900, LocalPathId: 2] 2025-03-31T16:09:37.233161Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared2 2025-03-31T16:09:37.236866Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-31T16:09:37.260052Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Y2JlMDAzYWEtZjJmZGEyMTctZDFmZmMwLThkNDc5ZTBk, TxId: 2025-03-31T16:09:37.260119Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Y2JlMDAzYWEtZjJmZGEyMTctZDFmZmMwLThkNDc5ZTBk, TxId: 2025-03-31T16:09:37.260777Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Execute 2025-03-31T16:09:37.278652Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224038900, LocalPathId: 2] 2025-03-31T16:09:37.278718Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T16:09:38.404547Z node 3 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 3 2025-03-31T16:09:38.404833Z node 3 :STATISTICS DEBUG: EvPropagateStatistics, node id = 3 2025-03-31T16:09:38.405645Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:09:38.418164Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:09:38.418231Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:09:38.564210Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [3:12972:5709]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:09:38.565115Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-03-31T16:09:38.565176Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [3:12972:5709], StatRequests.size() = 1 2025-03-31T16:09:38.567666Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:12974:5423]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:09:38.575556Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T16:09:38.576031Z node 2 :STATISTICS DEBUG: [72075186224038895] EvRequestStats, node id = 2, schemeshard count = 1, urgent = 0 2025-03-31T16:09:38.576108Z node 2 :STATISTICS DEBUG: [72075186224038895] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-03-31T16:09:38.576586Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:09:38.576693Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:12974:5423], StatRequests.size() = 1 |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v0] |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-fifo] [GOOD] |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_creates_quoter [GOOD] |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |83.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |83.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |83.8%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::AuthEffectivePermissions [GOOD] Test command err: 2025-03-31T16:00:07.134451Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004071901481889:2213];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:00:07.134645Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001a02/r3tmp/tmpVPCBD8/pdisk_1.dat 2025-03-31T16:00:08.017214Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:08.039600Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:08.039696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:08.048404Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31076, node 1 2025-03-31T16:00:08.445397Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:00:08.445423Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:00:08.445429Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:00:08.445551Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63457 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:00:09.322712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:09.417285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:09.435429Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:09.620299Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:09.623867Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:09.675788Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-03-31T16:00:09.681113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:00:10.036604Z node 3 :SYSTEM_VIEWS INFO: [72075186224037893] OnActivateExecutor 2025-03-31T16:00:10.036649Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInitSchema::Execute 2025-03-31T16:00:10.108974Z node 3 :SYSTEM_VIEWS DEBUG: Handle TEvSysView::TEvRegisterDbCounters: service id# [3:7488004077903081769:2074], path id# [OwnerId: 72057594046644480, LocalPathId: 2], service# 2 2025-03-31T16:00:10.124024Z node 3 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector bootstrapped 2025-03-31T16:00:10.124180Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:00:10.124489Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInitSchema::Complete 2025-03-31T16:00:10.124538Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInit::Execute 2025-03-31T16:00:10.125042Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading interval summaries: query count# 0, node ids count# 0, total count# 0 2025-03-31T16:00:10.125073Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading interval metrics: query count# 0 2025-03-31T16:00:10.125109Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading interval query tops: total query count# 0 2025-03-31T16:00:10.125134Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading nodes to request: nodes count# 0, hashes count# 0 2025-03-31T16:00:10.125172Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 6, result count# 0 2025-03-31T16:00:10.125203Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 7, result count# 0 2025-03-31T16:00:10.125223Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 8, result count# 0 2025-03-31T16:00:10.125249Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 9, result count# 0 2025-03-31T16:00:10.125275Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 10, result count# 0 2025-03-31T16:00:10.125300Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 11, result count# 0 2025-03-31T16:00:10.125328Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 12, result count# 0 2025-03-31T16:00:10.125351Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 13, result count# 0 2025-03-31T16:00:10.125410Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 14, result count# 0 2025-03-31T16:00:10.125437Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 15, result count# 0 2025-03-31T16:00:10.125465Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 16, partCount count# 0 2025-03-31T16:00:10.125494Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 19, partCount count# 0 2025-03-31T16:00:10.125529Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 17, result count# 0 2025-03-31T16:00:10.125551Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 18, result count# 0 2025-03-31T16:00:10.125594Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 20, result count# 0 2025-03-31T16:00:10.125623Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 21, result count# 0 2025-03-31T16:00:10.125718Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Reset: interval end# 2025-03-31T16:00:10.000000Z 2025-03-31T16:00:10.125962Z node 3 :SYSTEM_VIEWS INFO: Navigate by path id succeeded: service id# [3:7488004077903081769:2074], path id# [OwnerId: 72057594046644480, LocalPathId: 2], database# /Root/Database1 2025-03-31T16:00:10.147826Z node 3 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [3:7488004077903081769:2074], database# /Root/Database1, no sysview processor 2025-03-31T16:00:10.161521Z node 3 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [3:7488004077903081769:2074] 2025-03-31T16:00:10.163007Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInit::Complete 2025-03-31T16:00:10.187656Z node 2 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [2:7488004065296758621:2061] 2025-03-31T16:00:10.190928Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxConfigure::Execute: database# /Root/Database1 2025-03-31T16:00:10.195151Z node 3 :SYSTEM_VIEWS INFO: NSysView::TPartitionStatsCollector initialized: domain key# [OwnerId: 72057594046644480, LocalPathId: 2], sysview processor id# 72075186224037893 2025-03-31T16:00:10.216458Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxConfigure::Complete 2025-03-31T16:00:10.219663Z node 3 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [3:7488004069313147099:2061] 2025-03-31T16:00:10.288189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:00:10.369429Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488004082476628036:2222];send_to=[0:7307199536658146131:7762515]; waiting... 2025-03-31T16:00:10.463444Z node 1 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [1:7488004067606514333:2070] 2025-03-31T16:00:10.468782Z node 3 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [3:7488004077903081769:2074] 2025-03-31T16:00:10.479399Z node 3 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [3:7488004077903081769:2074], database# /Root/Database1, processor id# 72075186224037893 2025-03-31T16:00:10.523958Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:00:10.524046Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:00:10.546360Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:00:10.547313Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxAggregate::Execute 2025-03-31T16:00:10.547368Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryResults: interval end# 2025-03-31T16:00:10.000000Z, query count# 0 2025-03-31T16:00:10.547400Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 8, interval end# 2025-03-31T16:00:10.000000Z, query count# 0, persisted# 0 2025-03-31T16:00:10.547426Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 10, interval end# 2025-03-31T16:00:10.000000Z, query count# 0, persisted# 0 2025-03-31T16:00:10.547447Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 12, interval end# 2025-03-31T16:00:10.000000Z, query count# 0, persisted# 0 2025-03-31T16:00:10.547468Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 14, interval end# 2025-03-31T16:00:10.000000Z, query count# 0, persisted# 0 2025-03-31T16:00:10.547499Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 9, interval end# 2025-03-31T17:00:00.000000Z, query count# 0, persisted# 0 2025-03-31T16:00:10.547514Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 11, interval end# 2025-03-31T17:00:00.000000Z, query count# 0, persisted# 0 2025-03-31T16:00:10.547530Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 13, interval end# 2025-03-31T17:00:00.000000Z, query count# 0, persisted# 0 2025-03-31T16:00:10.547546Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 15, interval end# 2025-03-31T17:00:00.000000Z, query count# 0, persisted# 0 2025-03-31T16:00:10.555506Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxAggregate::Complete 2025-03-31T16:00:10.563611Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:00:10.599199Z node 1 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [1:7488004067606514333:2070] 2025-03-31T16:00:10.601964Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_ ... [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:09:42.078559Z node 19 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/pools/default TableId: [72057594046644480:8:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindResourcePool DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T16:09:42.078696Z node 19 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [19:7488006534777053598:2424], row count: 5, finished: 0 2025-03-31T16:09:42.085494Z node 19 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:09:42.091392Z node 19 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [72057594046644480:9:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [] }] } 2025-03-31T16:09:42.091485Z node 19 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [19:7488006534777053598:2424], row count: 1, finished: 0 2025-03-31T16:09:42.111201Z node 19 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:09:42.119579Z node 19 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table0 TableId: [72057594046644480:4:1] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T16:09:42.119656Z node 19 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [19:7488006534777053598:2424], row count: 1, finished: 0 2025-03-31T16:09:42.120598Z node 19 :SYSTEM_VIEWS INFO: Scan finished, actor: [19:7488006534777053598:2424], owner: [19:7488006534777053588:2422], scan id: 0, table id: [72057594046644480:1:0:auth_effective_permissions] 2025-03-31T16:09:42.146298Z node 19 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [19:7488006448877705585:2146], database# , query hash# 11342553055430868283, cpu time# 887993 2025-03-31T16:09:42.147692Z node 19 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437381899, txId: 281474976715676] shutting down 2025-03-31T16:09:42.199778Z node 23 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector: TEvProcessOverloaded , top size by CPU # 0, top size by TLI # 0, time# 2025-03-31T16:09:42.199237Z 2025-03-31T16:09:42.431820Z node 19 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jqpfeghx9qjvw0st79q8yaj9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=19&id=NmE4NjBkNDYtMjkxZDI3NDItNmEyZWUwZDctZjAyNDhlZTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:09:42.433710Z node 19 :SYSTEM_VIEWS INFO: Scan started, actor: [19:7488006539072020947:2435], owner: [19:7488006539072020943:2433], scan id: 0, table id: [72075186224037888:1:0:auth_effective_permissions] 2025-03-31T16:09:42.434717Z node 19 :SYSTEM_VIEWS INFO: Scan prepared, actor: [19:7488006539072020947:2435], schemeshard id: 72075186224037888, hive id: 72057594037968897, database: /Root/Tenant1, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 2], database node count: 2 2025-03-31T16:09:42.434750Z node 19 :SYSTEM_VIEWS DEBUG: ProceedToScan, tenant name: /Root/Tenant1 tenant owner: root@builtin subject sid: empty require admin access: 0 is admin: 1 2025-03-31T16:09:42.434841Z node 19 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:09:42.435317Z node 19 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1 TableId: [72075186224037888:1:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037889 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037891 Mediators: 72075186224037892 SchemeShard: 72075186224037888 SysViewProcessor: 72075186224037893 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user2 }] Groups: [] } Children [Dir2,Table1] }] } 2025-03-31T16:09:42.435384Z node 19 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [19:7488006539072020947:2435], row count: 1, finished: 0 2025-03-31T16:09:42.435554Z node 19 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1/Dir2 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:09:42.436204Z node 19 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1/Dir2 TableId: [72075186224037888:3:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037889 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037891 Mediators: 72075186224037892 SchemeShard: 72075186224037888 SysViewProcessor: 72075186224037893 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [] }] } 2025-03-31T16:09:42.436290Z node 19 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [19:7488006539072020947:2435], row count: 2, finished: 0 2025-03-31T16:09:42.444846Z node 19 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1/Table1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:09:42.446399Z node 19 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1/Table1 TableId: [72075186224037888:2:1] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037889 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037891 Mediators: 72075186224037892 SchemeShard: 72075186224037888 SysViewProcessor: 72075186224037893 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T16:09:42.446462Z node 19 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [19:7488006539072020947:2435], row count: 1, finished: 0 2025-03-31T16:09:42.448874Z node 19 :SYSTEM_VIEWS INFO: Scan finished, actor: [19:7488006539072020947:2435], owner: [19:7488006539072020943:2433], scan id: 0, table id: [72075186224037888:1:0:auth_effective_permissions] 2025-03-31T16:09:42.453410Z node 19 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437382429, txId: 281474976715678] shutting down 2025-03-31T16:09:42.453572Z node 19 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [19:7488006448877705585:2146], database# , query hash# 17325808444334437222, cpu time# 217582 2025-03-31T16:09:42.467177Z node 21 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:09:42.466685Z node 19 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 21 2025-03-31T16:09:42.467701Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-31T16:09:42.467960Z node 19 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 23 2025-03-31T16:09:42.468697Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-31T16:09:42.468822Z node 19 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 20 2025-03-31T16:09:42.469004Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-31T16:09:42.470515Z node 19 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 22 2025-03-31T16:09:42.471445Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-31T16:09:42.469857Z node 23 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:09:42.509685Z node 19 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[22:7488006464861719077:2107], Type=268959746 2025-03-31T16:09:42.509807Z node 19 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[22:7488006464861719077:2107], Type=268959746 2025-03-31T16:09:42.509853Z node 19 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[22:7488006464861719077:2107], Type=268959746 2025-03-31T16:09:42.509885Z node 19 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[22:7488006464861719077:2107], Type=268959746 2025-03-31T16:09:42.509917Z node 19 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[22:7488006464861719077:2107], Type=268959746 2025-03-31T16:09:42.509956Z node 19 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[22:7488006464861719077:2107], Type=268959746 |83.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |83.8%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |83.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v1] |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |83.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |83.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |83.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/multi_plane/py3test >> test_cp_ic.py::TestCpIc::test_discovery [GOOD] |83.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/query_replay/ydb_query_replay |83.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay/ydb_query_replay |83.9%| [LD] {RESULT} $(B)/ydb/tools/query_replay/ydb_query_replay >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v1] >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheBeginning [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheEnd |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-fifo] [GOOD] |83.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |83.9%| [LD] {RESULT} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |83.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v0] [GOOD] >> TPersQueueTest::ReadRuleServiceTypeMigrationWithDisallowDefault [GOOD] >> TPersQueueTest::ReadWithoutConsumerFederation |83.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |83.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |83.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol >> TPersQueueTest::PreferredCluster_NonExistentPreferredCluster_SessionDiesOnlyAfterDelay [GOOD] >> TPersQueueTest::PreferredCluster_EnabledRemotePreferredClusterAndRemoteClusterEnabledDelaySec_SessionDiesOnlyAfterDelay |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-fifo] [GOOD] |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-fifo] |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |84.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |84.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |84.0%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue [GOOD] |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain [GOOD] Test command err: 2025-03-31T16:04:57.152937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:04:57.153015Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:57.221211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:04:59.272227Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:04:59.272316Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:59.332613Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:01.112113Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:01.112231Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:01.160941Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:04.816830Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:04.816915Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:04.870339Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:07.741484Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:07.741567Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:07.808801Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:08.996845Z node 6 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:08.996919Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:09.072524Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:09.827437Z node 6 :PIPE_SERVER ERROR: [72057594046447617] NodeDisconnected NodeId# 7 2025-03-31T16:05:09.827826Z node 6 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 7 2025-03-31T16:05:09.827897Z node 6 :PIPE_SERVER ERROR: [72057594037936131] NodeDisconnected NodeId# 7 2025-03-31T16:05:09.828201Z node 7 :TX_PROXY WARN: actor# [7:342:2087] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2025-03-31T16:05:10.741049Z node 8 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:10.741122Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:10.789132Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:11.354852Z node 8 :BS_CONTROLLER ERROR: {BSC26@console_interaction.cpp:113} failed to parse config obtained from Console ErrorReason# ydb/library/yaml_config/yaml_config_parser.cpp:1268: Condition violated: `config.HasDomainsConfig()' Yaml# --- metadata: kind: MainConfig cluster: "" version: 1 config: log_config: cluster_name: cluster1 allowed_labels: test: type: enum values: ? true selector_config: [] 2025-03-31T16:05:12.263785Z node 9 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:12.263863Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:12.326413Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:13.491393Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944 2025-03-31T16:05:13.690791Z node 13 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-31T16:05:13.700343Z node 13 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/0l5c/000a81/r3tmp/tmpptSMEy/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-31T16:05:13.701006Z node 13 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/0l5c/000a81/r3tmp/tmpptSMEy/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/000a81/r3tmp/tmpptSMEy/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 13961981168091287279 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-31T16:05:13.707634Z node 13 :BS_LOCALRECOVERY CRIT: VDISK[80000003:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/0l5c/000a81/r3tmp/tmpptSMEy/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-03-31T16:05:13.782727Z node 17 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-31T16:05:13.790004Z node 17 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/0l5c/000a81/r3tmp/tmpptSMEy/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-31T16:05:13.790258Z node 17 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/0l5c/000a81/r3tmp/tmpptSMEy/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/000a81/r3tmp/tmpptSMEy/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 8170593874804973339 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-31T16:05:13.863023Z node 10 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-31T16:05:13.863517Z node 10 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/0l5c/000a81/r3tmp/tmpptSMEy/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-31T16:05:13.863748Z node 10 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/0l5c/000a81/r3tmp/tmpptSMEy/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/000a81/r3tmp/tmpptSMEy/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 907270012151644842 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 Secto ... up options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-31T16:08:51.051558Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-1) cannot create pool '/dc-1/users/tenant-1:hdd-1' (0): Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-31T16:08:51.051966Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-1 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-31T16:08:57.106266Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-1) cannot create pool '/dc-1/users/tenant-1:hdd-1' (0): Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-31T16:08:57.111302Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-1 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-31T16:09:02.950776Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-1) cannot create pool '/dc-1/users/tenant-1:hdd-1' (0): Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-31T16:09:02.951162Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-1 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-31T16:09:03.148426Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-31T16:09:03.148927Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-31T16:09:08.641634Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-31T16:09:08.642233Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-31T16:09:14.346019Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-31T16:09:14.346287Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-31T16:09:20.691018Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-31T16:09:20.692127Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-31T16:09:26.797247Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-31T16:09:26.797652Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-31T16:09:32.966224Z node 108 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.109479s 2025-03-31T16:09:32.966381Z node 108 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.109671s 2025-03-31T16:09:32.981194Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-31T16:09:32.981483Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-31T16:09:39.294711Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-31T16:09:39.295109Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-31T16:09:45.149269Z node 108 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.100538s 2025-03-31T16:09:45.149361Z node 108 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.100657s 2025-03-31T16:09:45.149452Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-31T16:09:45.149793Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-31T16:09:51.195216Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-31T16:09:51.195611Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-31T16:09:57.187228Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-31T16:09:57.187747Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-fifo] |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v0] [GOOD] |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-std] [GOOD] |84.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |84.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |84.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |84.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |84.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |84.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-std] [GOOD] |84.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |84.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |84.0%| [LD] {RESULT} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-std] [GOOD] >> TStorageTenantTest::DeclareAndDefine >> TStorageTenantTest::RemoveStoragePoolAndCreateOneMore >> TStorageTenantTest::CreateTableInsideSubDomain >> TStorageTenantTest::CreateTableInsideSubDomain2 >> TStorageTenantTest::CreateDummyTabletsInDifferentDomains >> TStorageTenantTest::Boot |84.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |84.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |84.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots >> TStorageTenantTest::GenericCases >> TStorageTenantTest::LsLs >> TStorageTenantTest::CreateSolomonInsideSubDomain >> TStorageTenantTest::RemoveStoragePoolBeforeDroppingTablet |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v1] |84.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |84.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |84.1%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark >> YdbIndexTable::OnlineBuildWithDataColumn [GOOD] |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-fifo] [GOOD] >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit [GOOD] >> TPersQueueTest::AllEqual [GOOD] >> TPersQueueTest::BadSids |84.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |84.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |84.1%| [LD] {RESULT} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |84.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |84.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |84.1%| [LD] {RESULT} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds >> TConsoleConfigSubscriptionTests::TestConfigNotificationRetries [GOOD] >> TConsoleConfigSubscriptionTests::TestConfigSubscriptionsCleanup |84.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |84.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |84.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |84.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |84.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |84.1%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> TStorageTenantTest::CreateTableInsideSubDomain [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-fifo] [GOOD] >> TStorageTenantTest::Boot [GOOD] >> TStorageTenantTest::CopyTableAndConcurrentSplit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::OnlineBuildWithDataColumn [GOOD] Test command err: Trying to start YDB, gRPC: 25068, MsgBus: 23166 2025-03-31T16:03:33.873616Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488004956747098605:2112];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:33.873670Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000e19/r3tmp/tmpGsXjeX/pdisk_1.dat 2025-03-31T16:03:34.453061Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:03:34.453172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:03:34.460659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25068, node 1 2025-03-31T16:03:34.532004Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:03:34.532035Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:03:34.615306Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:03:34.643812Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:03:34.643841Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:03:34.643854Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:03:34.643979Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23166 TClient is connected to server localhost:23166 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:03:35.306472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:35.329134Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:03:35.340409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:35.540986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:03:35.696824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:03:35.768901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:03:37.580161Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004973926969521:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:37.580270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:37.902728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:03:37.929004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:03:37.962924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:03:37.998342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:03:38.030377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:03:38.069905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:03:38.169364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004978221937331:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:38.169451Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:38.169772Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488004978221937336:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:03:38.174288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:03:38.189128Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488004978221937338:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:03:38.273678Z node 1 :TX_PROXY ERROR: Actor# [1:7488004978221937393:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:03:38.875236Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488004956747098605:2112];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:03:38.875305Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:03:39.250372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:03:40.020149Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jqpf3ev91xprvc8gnnamsw9b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzM4ZTkyNzMtMzdiMDQ2YTQtZDA4YThhYTgtMzJlOTIzNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:40.034234Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jqpf3ev91xprvc8gnnamsw9b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzM4ZTkyNzMtMzdiMDQ2YTQtZDA4YThhYTgtMzJlOTIzNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:40.112356Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jqpf3ey43mhbhh31v75j368y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzE4NWFkYjctMzhhMzM5ZWYtNmFmODhlM2MtNTg1M2FiNmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:40.124638Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jqpf3ey43mhbhh31v75j368y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzE4NWFkYjctMzhhMzM5ZWYtNmFmODhlM2MtNTg1M2FiNmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:40.168995Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jqpf3f00c7pjhrfz366qbbtd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzM4ZTkyNzMtMzdiMDQ2YTQtZDA4YThhYTgtMzJlOTIzNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:40.183041Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jqpf3f00c7pjhrfz366qbbtd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzM4ZTkyNzMtMzdiMDQ2YTQtZDA4YThhYTgtMzJlOTIzNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:40.271062Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710678. Ctx: { TraceId: 01jqpf3f36fb6z696n9gbqycw0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzE4NWFkYjctMzhhMzM5ZWYtNmFmODhlM2MtNTg1M2FiNmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:40.287173Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710679. Ctx: { TraceId: 01jqpf3f36fb6z696n9gbqycw0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzE4NWFkYjctMzhhMzM5ZWYtNmFmODhlM2MtNTg1M2FiNmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:40.324590Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710680. Ctx: { TraceId: 01jqpf3f4x4mjqe0xmn7jn09g6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzM4ZTkyNzMtMzdiMDQ2YTQtZDA4YThhYTgtMzJlOTIzNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:40.335814Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710681. Ctx: { TraceId: 01jqpf3f4x4mjqe0xmn7jn09g6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzM4ZTkyNzMtMzdiMDQ2YTQtZDA4YThhYTgtMzJlOTIzNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:03:40.372514Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710682. Ctx: { TraceId: 01jqpf3f6g ... : TxId: 281474976720634. Ctx: { TraceId: 01jqpffaz6bcb8nmx3z6h8fc4f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Zjk3ODRhZTYtNGM2MGY4M2EtMTUxNTE2ODQtOTFjYjA5MDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:10:09.322582Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720635. Ctx: { TraceId: 01jqpffaz6bcb8nmx3z6h8fc4f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Zjk3ODRhZTYtNGM2MGY4M2EtMTUxNTE2ODQtOTFjYjA5MDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:10:09.390099Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720636. Ctx: { TraceId: 01jqpffb367ccrhvezwh8hpd3r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzI4ZTY5MmMtODdkZmZjODQtYWQyNmViZTAtYWFiYTkxYWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:10:09.413694Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720637. Ctx: { TraceId: 01jqpffb367ccrhvezwh8hpd3r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzI4ZTY5MmMtODdkZmZjODQtYWQyNmViZTAtYWFiYTkxYWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:10:09.474977Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720638. Ctx: { TraceId: 01jqpffb5wb9rnxfd4gryed403, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Zjk3ODRhZTYtNGM2MGY4M2EtMTUxNTE2ODQtOTFjYjA5MDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:10:09.483642Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720639. Ctx: { TraceId: 01jqpffb5wb9rnxfd4gryed403, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Zjk3ODRhZTYtNGM2MGY4M2EtMTUxNTE2ODQtOTFjYjA5MDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:10:09.540233Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720640. Ctx: { TraceId: 01jqpffb7z7e73aywtww7fbe27, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzI4ZTY5MmMtODdkZmZjODQtYWQyNmViZTAtYWFiYTkxYWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:10:09.549970Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720641. Ctx: { TraceId: 01jqpffb7z7e73aywtww7fbe27, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzI4ZTY5MmMtODdkZmZjODQtYWQyNmViZTAtYWFiYTkxYWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:10:09.587311Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720642. Ctx: { TraceId: 01jqpffb9e7qw076vp56jhqvd7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Zjk3ODRhZTYtNGM2MGY4M2EtMTUxNTE2ODQtOTFjYjA5MDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:10:09.598822Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720643. Ctx: { TraceId: 01jqpffb9e7qw076vp56jhqvd7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Zjk3ODRhZTYtNGM2MGY4M2EtMTUxNTE2ODQtOTFjYjA5MDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:10:09.666519Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720644. Ctx: { TraceId: 01jqpffbbvetztnsfe3bsxejxz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzI4ZTY5MmMtODdkZmZjODQtYWQyNmViZTAtYWFiYTkxYWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:10:09.680188Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720645. Ctx: { TraceId: 01jqpffbbvetztnsfe3bsxejxz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzI4ZTY5MmMtODdkZmZjODQtYWQyNmViZTAtYWFiYTkxYWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:10:09.780897Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720646. Ctx: { TraceId: 01jqpffbf85wj7hdh6x66zts2m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Zjk3ODRhZTYtNGM2MGY4M2EtMTUxNTE2ODQtOTFjYjA5MDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:10:09.833560Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720647. Ctx: { TraceId: 01jqpffbf85wj7hdh6x66zts2m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Zjk3ODRhZTYtNGM2MGY4M2EtMTUxNTE2ODQtOTFjYjA5MDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:10:09.925982Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720648. Ctx: { TraceId: 01jqpffbkyaj8p1zzwfr7zbm8t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzI4ZTY5MmMtODdkZmZjODQtYWQyNmViZTAtYWFiYTkxYWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:10:09.939694Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720649. Ctx: { TraceId: 01jqpffbkyaj8p1zzwfr7zbm8t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzI4ZTY5MmMtODdkZmZjODQtYWQyNmViZTAtYWFiYTkxYWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:10:10.022389Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720650. Ctx: { TraceId: 01jqpffbpwc3ykg9bypnkdf3r4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Zjk3ODRhZTYtNGM2MGY4M2EtMTUxNTE2ODQtOTFjYjA5MDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:10:10.250116Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720651. Ctx: { TraceId: 01jqpffbpwc3ykg9bypnkdf3r4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Zjk3ODRhZTYtNGM2MGY4M2EtMTUxNTE2ODQtOTFjYjA5MDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:10:10.375583Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720652. Ctx: { TraceId: 01jqpffc1y098nhf3crbpwjq3z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzI4ZTY5MmMtODdkZmZjODQtYWQyNmViZTAtYWFiYTkxYWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:10:10.392750Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720653. Ctx: { TraceId: 01jqpffc1y098nhf3crbpwjq3z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzI4ZTY5MmMtODdkZmZjODQtYWQyNmViZTAtYWFiYTkxYWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:10:10.465963Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720654. Ctx: { TraceId: 01jqpffc4qdcpz2632rmzgz5rw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Zjk3ODRhZTYtNGM2MGY4M2EtMTUxNTE2ODQtOTFjYjA5MDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:10:10.480160Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720655. Ctx: { TraceId: 01jqpffc4qdcpz2632rmzgz5rw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Zjk3ODRhZTYtNGM2MGY4M2EtMTUxNTE2ODQtOTFjYjA5MDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:10:10.576206Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720656. Ctx: { TraceId: 01jqpffc88fxc2mdakfg3p4m77, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzI4ZTY5MmMtODdkZmZjODQtYWQyNmViZTAtYWFiYTkxYWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:10:10.591484Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720657. Ctx: { TraceId: 01jqpffc88fxc2mdakfg3p4m77, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzI4ZTY5MmMtODdkZmZjODQtYWQyNmViZTAtYWFiYTkxYWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:10:10.722886Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720658. Ctx: { TraceId: 01jqpffccq9g39xhn6sf7t8gne, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Zjk3ODRhZTYtNGM2MGY4M2EtMTUxNTE2ODQtOTFjYjA5MDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:10:10.753217Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720659. Ctx: { TraceId: 01jqpffccq9g39xhn6sf7t8gne, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Zjk3ODRhZTYtNGM2MGY4M2EtMTUxNTE2ODQtOTFjYjA5MDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:10:10.854315Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720660. Ctx: { TraceId: 01jqpffcgv6gppjmk41gc99yza, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzI4ZTY5MmMtODdkZmZjODQtYWQyNmViZTAtYWFiYTkxYWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:10:10.903799Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720661. Ctx: { TraceId: 01jqpffcgv6gppjmk41gc99yza, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzI4ZTY5MmMtODdkZmZjODQtYWQyNmViZTAtYWFiYTkxYWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:10:10.988115Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720662. Ctx: { TraceId: 01jqpffcn32aknrz8rdx9dy4j3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Zjk3ODRhZTYtNGM2MGY4M2EtMTUxNTE2ODQtOTFjYjA5MDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:10:10.999611Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720663. Ctx: { TraceId: 01jqpffcn32aknrz8rdx9dy4j3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Zjk3ODRhZTYtNGM2MGY4M2EtMTUxNTE2ODQtOTFjYjA5MDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:10:11.098883Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720664. Ctx: { TraceId: 01jqpffcrm9zsg7vptqnw348gz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzI4ZTY5MmMtODdkZmZjODQtYWQyNmViZTAtYWFiYTkxYWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:10:11.119322Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720665. Ctx: { TraceId: 01jqpffcrm9zsg7vptqnw348gz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzI4ZTY5MmMtODdkZmZjODQtYWQyNmViZTAtYWFiYTkxYWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:10:11.232125Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720666. Ctx: { TraceId: 01jqpffcwk4md8xc324rrd2wp4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Zjk3ODRhZTYtNGM2MGY4M2EtMTUxNTE2ODQtOTFjYjA5MDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:10:11.251185Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720667. Ctx: { TraceId: 01jqpffcwk4md8xc324rrd2wp4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Zjk3ODRhZTYtNGM2MGY4M2EtMTUxNTE2ODQtOTFjYjA5MDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:10:11.326229Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720668. Ctx: { TraceId: 01jqpffczp6wrv14wwcasenaa2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzI4ZTY5MmMtODdkZmZjODQtYWQyNmViZTAtYWFiYTkxYWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:10:11.337079Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976720669. Ctx: { TraceId: 01jqpffczp6wrv14wwcasenaa2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzI4ZTY5MmMtODdkZmZjODQtYWQyNmViZTAtYWFiYTkxYWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS >> TStorageTenantTest::DeclareAndDefine [GOOD] |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-fifo] [GOOD] |84.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |84.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |84.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateTableInsideSubDomain [GOOD] Test command err: 2025-03-31T16:10:11.811204Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488006662903029348:2279];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:10:11.811265Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ccc/r3tmp/tmpa9Wbzi/pdisk_1.dat 2025-03-31T16:10:12.558148Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:10:12.586581Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:10:12.586977Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:10:12.594068Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7154 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T16:10:12.882906Z node 1 :TX_PROXY DEBUG: actor# [1:7488006662903029351:2114] Handle TEvNavigate describe path dc-1 2025-03-31T16:10:12.882991Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006667197997140:2438] HANDLE EvNavigateScheme dc-1 2025-03-31T16:10:12.883108Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488006667197996675:2129], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:12.883185Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006667197997123:2433][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7488006667197996675:2129], cookie# 1 2025-03-31T16:10:12.884364Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006667197997127:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006667197997124:2433], cookie# 1 2025-03-31T16:10:12.884418Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006667197997128:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006667197997125:2433], cookie# 1 2025-03-31T16:10:12.884434Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006667197997129:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006667197997126:2433], cookie# 1 2025-03-31T16:10:12.884491Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006662903029036:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006667197997127:2433], cookie# 1 2025-03-31T16:10:12.884520Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006662903029039:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006667197997128:2433], cookie# 1 2025-03-31T16:10:12.884535Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006662903029042:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006667197997129:2433], cookie# 1 2025-03-31T16:10:12.884559Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006667197997127:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006662903029036:2051], cookie# 1 2025-03-31T16:10:12.884572Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006667197997128:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006662903029039:2054], cookie# 1 2025-03-31T16:10:12.884601Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006667197997129:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006662903029042:2057], cookie# 1 2025-03-31T16:10:12.884629Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006667197997123:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006667197997124:2433], cookie# 1 2025-03-31T16:10:12.884650Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006667197997123:2433][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-31T16:10:12.884675Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006667197997123:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006667197997125:2433], cookie# 1 2025-03-31T16:10:12.884691Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006667197997123:2433][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-31T16:10:12.884711Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006667197997123:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006667197997126:2433], cookie# 1 2025-03-31T16:10:12.884719Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006667197997123:2433][/dc-1] Unexpected sync response: sender# [1:7488006667197997126:2433], cookie# 1 2025-03-31T16:10:12.884764Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7488006667197996675:2129], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-03-31T16:10:12.893168Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7488006667197996675:2129], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7488006667197997123:2433] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-31T16:10:12.893288Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488006667197996675:2129], cacheItem# { Subscriber: { Subscriber: [1:7488006667197997123:2433] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-03-31T16:10:12.895799Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488006667197997141:2439], recipient# [1:7488006667197997140:2438], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T16:10:12.895883Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006667197997140:2438] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T16:10:12.941987Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006667197997140:2438] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-03-31T16:10:12.945264Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006667197997140:2438] Handle TEvDescribeSchemeResult Forward to# [1:7488006667197997139:2437] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:10:13.003325Z node 1 :TX_PROXY DEBUG: actor# [1:7488006662903029351:2114] Handle TEvProposeTransaction 2025-03-31T16:10:13.003353Z node 1 :TX_PROXY DEBUG: actor# [1:7488006662903029351:2114] TxId# 281474976710657 ProcessProposeTransaction 2025-03-31T16:10:13.003509Z node 1 :TX_PROXY DEBUG: actor# [1:7488006662903029351:2114] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7488006671492964447:2448] 2025-03-31T16:10:13.079870Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006671492964447:2448] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-03-31T16:10:13.079920Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006671492964447:2448] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:10:13.080003Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006671492964447:2448] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:10:13.080112Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488006667197996675:2129], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615 ... Table" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1743437415100 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "SimpleTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } FollowerCount: 2 PartitioningPolicy { MinPartitionsCount: 2 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "SimpleTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1743437415100 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "SimpleTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "k... (TRUNCATED) 2025-03-31T16:10:15.324397Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-03-31T16:10:15.325290Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-31T16:10:15.325689Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006662903029036:2051] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7488006672353957624:2106] 2025-03-31T16:10:15.325734Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488006662903029036:2051] Unsubscribe: subscriber# [3:7488006672353957624:2106], path# /dc-1/USER_0 2025-03-31T16:10:15.325778Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006662903029039:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7488006672353957625:2106] 2025-03-31T16:10:15.325789Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488006662903029039:2054] Unsubscribe: subscriber# [3:7488006672353957625:2106], path# /dc-1/USER_0 2025-03-31T16:10:15.325840Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006662903029042:2057] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7488006672353957626:2106] 2025-03-31T16:10:15.325859Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488006662903029042:2057] Unsubscribe: subscriber# [3:7488006672353957626:2106], path# /dc-1/USER_0 2025-03-31T16:10:15.884702Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7488006676648924926:2108], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:15.884825Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488006676648924926:2108], cacheItem# { Subscriber: { Subscriber: [3:7488006676648925124:2227] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:15.884915Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488006680943892627:2348], recipient# [3:7488006680943892626:2325], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:16.891429Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7488006676648924926:2108], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:16.891553Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488006676648924926:2108], cacheItem# { Subscriber: { Subscriber: [3:7488006676648925124:2227] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:16.891631Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488006685238859925:2349], recipient# [3:7488006685238859924:2326], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:17.895524Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7488006676648924926:2108], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:17.895656Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488006676648924926:2108], cacheItem# { Subscriber: { Subscriber: [3:7488006676648925124:2227] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:17.895750Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488006689533827224:2351], recipient# [3:7488006689533827223:2327], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::DeclareAndDefine [GOOD] Test command err: 2025-03-31T16:10:11.602873Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488006666980462485:2205];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:10:11.602944Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000cea/r3tmp/tmpbdjz5O/pdisk_1.dat 2025-03-31T16:10:12.449233Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:10:12.509156Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:10:12.509261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:10:12.546720Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9253 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T16:10:12.855558Z node 1 :TX_PROXY DEBUG: actor# [1:7488006666980462584:2116] Handle TEvNavigate describe path dc-1 2025-03-31T16:10:12.855603Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006671275430378:2449] HANDLE EvNavigateScheme dc-1 2025-03-31T16:10:12.855743Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488006666980462607:2129], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:12.855842Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006671275430300:2395][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7488006666980462607:2129], cookie# 1 2025-03-31T16:10:12.857350Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006671275430304:2395][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006671275430301:2395], cookie# 1 2025-03-31T16:10:12.857389Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006671275430305:2395][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006671275430302:2395], cookie# 1 2025-03-31T16:10:12.857404Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006671275430306:2395][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006671275430303:2395], cookie# 1 2025-03-31T16:10:12.857438Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006666980462260:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006671275430304:2395], cookie# 1 2025-03-31T16:10:12.857461Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006666980462263:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006671275430305:2395], cookie# 1 2025-03-31T16:10:12.857489Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006666980462266:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006671275430306:2395], cookie# 1 2025-03-31T16:10:12.857531Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006671275430304:2395][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006666980462260:2050], cookie# 1 2025-03-31T16:10:12.857568Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006671275430305:2395][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006666980462263:2053], cookie# 1 2025-03-31T16:10:12.857586Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006671275430306:2395][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006666980462266:2056], cookie# 1 2025-03-31T16:10:12.857633Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006671275430300:2395][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006671275430301:2395], cookie# 1 2025-03-31T16:10:12.857661Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006671275430300:2395][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-31T16:10:12.857678Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006671275430300:2395][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006671275430302:2395], cookie# 1 2025-03-31T16:10:12.857695Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006671275430300:2395][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-31T16:10:12.857736Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006671275430300:2395][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006671275430303:2395], cookie# 1 2025-03-31T16:10:12.857751Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006671275430300:2395][/dc-1] Unexpected sync response: sender# [1:7488006671275430303:2395], cookie# 1 2025-03-31T16:10:12.857805Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7488006666980462607:2129], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-03-31T16:10:12.886780Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7488006666980462607:2129], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7488006671275430300:2395] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-31T16:10:12.886909Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488006666980462607:2129], cacheItem# { Subscriber: { Subscriber: [1:7488006671275430300:2395] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-03-31T16:10:12.889131Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488006671275430379:2450], recipient# [1:7488006671275430378:2449], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T16:10:12.889201Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006671275430378:2449] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T16:10:12.952651Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006671275430378:2449] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-03-31T16:10:12.956961Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006671275430378:2449] Handle TEvDescribeSchemeResult Forward to# [1:7488006671275430377:2448] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:10:12.980761Z node 1 :TX_PROXY DEBUG: actor# [1:7488006666980462584:2116] Handle TEvProposeTransaction 2025-03-31T16:10:12.980786Z node 1 :TX_PROXY DEBUG: actor# [1:7488006666980462584:2116] TxId# 281474976715657 ProcessProposeTransaction 2025-03-31T16:10:12.980901Z node 1 :TX_PROXY DEBUG: actor# [1:7488006666980462584:2116] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7488006671275430387:2457] 2025-03-31T16:10:13.112835Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006671275430387:2457] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-03-31T16:10:13.112904Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006671275430387:2457] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:10:13.113044Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006671275430387:2457] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:10:13.113151Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488006666980462607:2129], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615 ... pathId# nullptr 2025-03-31T16:10:17.940951Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488006666980462607:2129], cacheItem# { Subscriber: { Subscriber: [1:7488006692750267675:3035] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:17.941014Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488006692750267682:3036], recipient# [1:7488006692750267674:2339], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:18.615403Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488006666980462607:2129], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:18.615537Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488006666980462607:2129], cacheItem# { Subscriber: { Subscriber: [1:7488006671275430309:2398] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:18.615636Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488006697045234995:3040], recipient# [1:7488006697045234994:2340], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:18.684017Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488006666980462607:2129], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:18.684124Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488006666980462607:2129], cacheItem# { Subscriber: { Subscriber: [1:7488006671275430309:2398] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:18.684208Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488006697045235000:3041], recipient# [1:7488006697045234999:2341], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:18.943569Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488006666980462607:2129], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:18.943695Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488006666980462607:2129], cacheItem# { Subscriber: { Subscriber: [1:7488006692750267675:3035] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:18.943770Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488006697045235002:3042], recipient# [1:7488006697045235001:2342], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:19.619560Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488006666980462607:2129], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:19.619666Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488006666980462607:2129], cacheItem# { Subscriber: { Subscriber: [1:7488006671275430309:2398] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:19.619744Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488006701340202315:3046], recipient# [1:7488006701340202314:2343], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:19.686682Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488006666980462607:2129], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:19.686818Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488006666980462607:2129], cacheItem# { Subscriber: { Subscriber: [1:7488006671275430309:2398] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:19.686928Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488006701340202317:3047], recipient# [1:7488006701340202316:2344], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:19.947692Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488006666980462607:2129], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:19.947880Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488006666980462607:2129], cacheItem# { Subscriber: { Subscriber: [1:7488006692750267675:3035] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:19.948037Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488006701340202322:3048], recipient# [1:7488006701340202321:2345], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-std] [GOOD] |84.2%| [TA] $(B)/ydb/core/kqp/ut/idx_test/test-results/unittest/{meta.json ... results_accumulator.log} >> test_retry_high_rate.py::TestRetry::test_high_rate[kikimr0] [GOOD] >> TPersQueueTest::ReadWithoutConsumerFederation [GOOD] >> TPersQueueTest::ReadWithoutConsumerFirstClassCitizen |84.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |84.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |84.2%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/idx_test/test-results/unittest/{meta.json ... results_accumulator.log} |84.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v1] [GOOD] >> TStorageTenantTest::CreateTableInsideSubDomain2 [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v0] [GOOD] |84.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |84.2%| [LD] {RESULT} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |84.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test >> TStorageTenantTest::LsLs [GOOD] |84.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/ydb-tests-olap |84.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/ydb-tests-olap |84.3%| [LD] {RESULT} $(B)/ydb/tests/olap/ydb-tests-olap |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> TStorageTenantTest::CreateSolomonInsideSubDomain [GOOD] >> TStorageTenantTest::GenericCases [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::LsLs [GOOD] Test command err: 2025-03-31T16:10:12.658943Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488006669348064581:2212];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:10:12.659109Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:10:12.787540Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488006670946241424:2156];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000cb8/r3tmp/tmpAKtuob/pdisk_1.dat 2025-03-31T16:10:13.724741Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:10:13.983576Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:10:14.920426Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:10:14.956180Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:10:15.212460Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:10:15.355853Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:10:15.355955Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:10:15.356804Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:10:15.356866Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:10:15.504558Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:10:15.504746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:10:15.523541Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:10:15.921170Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.119143s 2025-03-31T16:10:15.921241Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.119234s TClient is connected to server localhost:6942 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T16:10:16.594989Z node 1 :TX_PROXY DEBUG: actor# [1:7488006669348064672:2142] Handle TEvNavigate describe path dc-1 2025-03-31T16:10:16.595035Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006686527934354:2475] HANDLE EvNavigateScheme dc-1 2025-03-31T16:10:16.595163Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488006673643031991:2155], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:16.595237Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006673643032098:2206][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7488006673643031991:2155], cookie# 1 2025-03-31T16:10:16.596903Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006673643032115:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006673643032112:2206], cookie# 1 2025-03-31T16:10:16.596946Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006673643032116:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006673643032113:2206], cookie# 1 2025-03-31T16:10:16.596961Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006673643032117:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006673643032114:2206], cookie# 1 2025-03-31T16:10:16.597001Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006669348064309:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006673643032115:2206], cookie# 1 2025-03-31T16:10:16.597037Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006669348064312:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006673643032116:2206], cookie# 1 2025-03-31T16:10:16.597059Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006669348064315:2058] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006673643032117:2206], cookie# 1 2025-03-31T16:10:16.597086Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006673643032115:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006669348064309:2052], cookie# 1 2025-03-31T16:10:16.597102Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006673643032116:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006669348064312:2055], cookie# 1 2025-03-31T16:10:16.597119Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006673643032117:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006669348064315:2058], cookie# 1 2025-03-31T16:10:16.597158Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006673643032098:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006673643032112:2206], cookie# 1 2025-03-31T16:10:16.597198Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006673643032098:2206][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-31T16:10:16.597230Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006673643032098:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006673643032113:2206], cookie# 1 2025-03-31T16:10:16.597248Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006673643032098:2206][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-31T16:10:16.597269Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006673643032098:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006673643032114:2206], cookie# 1 2025-03-31T16:10:16.597281Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006673643032098:2206][/dc-1] Unexpected sync response: sender# [1:7488006673643032114:2206], cookie# 1 2025-03-31T16:10:16.597337Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7488006673643031991:2155], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-03-31T16:10:16.603373Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7488006673643031991:2155], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7488006673643032098:2206] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-31T16:10:16.603645Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488006673643031991:2155], cacheItem# { Subscriber: { Subscriber: [1:7488006673643032098:2206] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-03-31T16:10:16.605988Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488006686527934355:2476], recipient# [1:7488006686527934354:2475], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T16:10:16.606041Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006686527934354:2475] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T16:10:16.679484Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006686527934354:2475] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-03-31T16:10:16.695686Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006686527934354:2475] Handle TEvDescribeSchemeResult Forward to# [1:7488006686527934353:2474] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenEx ... { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:24.493237Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7488006675241208876:2107], cacheItem# { Subscriber: { Subscriber: [2:7488006718190881922:2129] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:24.493297Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7488006675241208876:2107], cacheItem# { Subscriber: { Subscriber: [2:7488006718190881923:2130] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:24.493391Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7488006722485849240:2137], recipient# [2:7488006718190881921:2318], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:24.494181Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7488006718190881921:2318], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:10:24.520553Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7488006675241208876:2107], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:24.520692Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7488006675241208876:2107], cacheItem# { Subscriber: { Subscriber: [2:7488006718190881904:2127] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:24.520768Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7488006722485849242:2138], recipient# [2:7488006722485849241:2321], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:24.521490Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:10:24.821463Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7488006675241208876:2107], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:24.821588Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7488006675241208876:2107], cacheItem# { Subscriber: { Subscriber: [2:7488006683831143493:2113] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:24.821668Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7488006722485849244:2139], recipient# [2:7488006722485849243:2322], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:24.987732Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7488006675241208876:2107], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:24.987866Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7488006675241208876:2107], cacheItem# { Subscriber: { Subscriber: [2:7488006683831143493:2113] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:24.987951Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7488006722485849246:2140], recipient# [2:7488006722485849245:2323], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:25.367500Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7488006675241208876:2107], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:25.367634Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7488006675241208876:2107], cacheItem# { Subscriber: { Subscriber: [2:7488006718190881922:2129] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:25.367683Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7488006675241208876:2107], cacheItem# { Subscriber: { Subscriber: [2:7488006718190881923:2130] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:25.367773Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7488006726780816543:2141], recipient# [2:7488006718190881921:2318], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:25.367942Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7488006718190881921:2318], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v1] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateTableInsideSubDomain2 [GOOD] Test command err: 2025-03-31T16:10:12.293012Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488006667983984075:2292];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:10:12.295812Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000cc7/r3tmp/tmpV21RaA/pdisk_1.dat 2025-03-31T16:10:13.396316Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:10:13.601701Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:10:13.601811Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:10:13.615943Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:10:13.618275Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13581 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T16:10:14.402868Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488006667983984076:2140], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:14.402931Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7488006667983984076:2140], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480 2025-03-31T16:10:14.403142Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006676573919127:2453][/dc-1/.metadata/initialization/migrations] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-31T16:10:14.405276Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006663689016417:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7488006676573919131:2453] 2025-03-31T16:10:14.405315Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488006663689016417:2051] Upsert description: path# /dc-1/.metadata/initialization/migrations 2025-03-31T16:10:14.405386Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488006663689016417:2051] Subscribe: subscriber# [1:7488006676573919131:2453], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:10:14.405455Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006663689016420:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7488006676573919132:2453] 2025-03-31T16:10:14.405463Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488006663689016420:2054] Upsert description: path# /dc-1/.metadata/initialization/migrations 2025-03-31T16:10:14.405492Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488006663689016420:2054] Subscribe: subscriber# [1:7488006676573919132:2453], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:10:14.405524Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006663689016423:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7488006676573919133:2453] 2025-03-31T16:10:14.405531Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488006663689016423:2057] Upsert description: path# /dc-1/.metadata/initialization/migrations 2025-03-31T16:10:14.405551Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488006663689016423:2057] Subscribe: subscriber# [1:7488006676573919133:2453], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:10:14.405585Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006676573919131:2453][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7488006663689016417:2051] 2025-03-31T16:10:14.405604Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006676573919132:2453][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7488006663689016420:2054] 2025-03-31T16:10:14.405624Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006676573919133:2453][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7488006663689016423:2057] 2025-03-31T16:10:14.405691Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006676573919127:2453][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7488006676573919128:2453] 2025-03-31T16:10:14.405726Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006676573919127:2453][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7488006676573919129:2453] 2025-03-31T16:10:14.405755Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7488006676573919127:2453][/dc-1/.metadata/initialization/migrations] Set up state: owner# [1:7488006667983984076:2140], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:10:14.405773Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006676573919127:2453][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7488006676573919130:2453] 2025-03-31T16:10:14.405797Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7488006676573919127:2453][/dc-1/.metadata/initialization/migrations] Ignore empty state: owner# [1:7488006667983984076:2140], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:10:14.405864Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7488006667983984076:2140], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/initialization/migrations PathId: Strong: 1 } 2025-03-31T16:10:14.405957Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7488006667983984076:2140], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/initialization/migrations PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7488006676573919127:2453] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-31T16:10:14.406900Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006663689016417:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7488006676573919131:2453] 2025-03-31T16:10:14.423310Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006663689016420:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7488006676573919132:2453] 2025-03-31T16:10:14.423380Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006663689016423:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7488006676573919133:2453] 2025-03-31T16:10:14.432634Z node 1 :TX_PROXY DEBUG: actor# [1:7488006667983983998:2115] Handle TEvNavigate describe path dc-1 2025-03-31T16:10:14.432697Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006676573919137:2457] HANDLE EvNavigateScheme dc-1 2025-03-31T16:10:14.554499Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488006667983984076:2140], cacheItem# { Subscriber: { Subscriber: [1:7488006676573919127:2453] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:14.554636Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488006676573919138:2458], recipient# [1:7488006676573919126:2300], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:14.554698Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488006667983984076:2140], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:14.554783Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006672278951636:2299][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7488006667983984076:2140], cookie# 1 2025-03-31T16:10:14.554867Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006672278951643:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006672278951640:2299], cookie# 1 2025-03-31T16:10:14.554888Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006672278951644:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006672278951641:2299], cookie# 1 2025-03-31T16:10:14.554901Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006672278951645:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006672278951642:2299], cookie# 1 2025-03-31T16:10:14.554936Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006663689016417:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006672278951643:2299], cookie# 1 2025-03-31T16:10:14.554960Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006663689016420:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006672278951644:2299], cookie# 1 2025-03-31T16:10:14.554975Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006663689016423:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006672278951645:2299], cookie# 1 2025-03-31T16:10:14.555006Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006672278951643:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006663689016417:2051], cookie# 1 2025-03-31T16:10:14.555039Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006672278951644:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006663689016420:2054], cookie# ... 1 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } FollowerCount: 2 PartitioningPolicy { MinPartitionsCount: 2 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "SimpleTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715660 CreateStep: 1743437418400 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "SimpleTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "k... (TRUNCATED) 2025-03-31T16:10:18.734988Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-03-31T16:10:18.735957Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-31T16:10:18.737268Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006663689016417:2051] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7488006685624261119:2106] 2025-03-31T16:10:18.737299Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488006663689016417:2051] Unsubscribe: subscriber# [3:7488006685624261119:2106], path# /dc-1/USER_0 2025-03-31T16:10:18.737328Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006663689016420:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7488006685624261120:2106] 2025-03-31T16:10:18.737338Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488006663689016420:2054] Unsubscribe: subscriber# [3:7488006685624261120:2106], path# /dc-1/USER_0 2025-03-31T16:10:18.737354Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006663689016423:2057] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7488006685624261121:2106] 2025-03-31T16:10:18.737363Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488006663689016423:2057] Unsubscribe: subscriber# [3:7488006685624261121:2106], path# /dc-1/USER_0 2025-03-31T16:10:19.559659Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7488006685624261141:2108], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:19.559769Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488006685624261141:2108], cacheItem# { Subscriber: { Subscriber: [3:7488006689919228623:2227] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:19.559850Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488006698509163463:2375], recipient# [3:7488006698509163462:2326], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:20.565235Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7488006685624261141:2108], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:20.565370Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488006685624261141:2108], cacheItem# { Subscriber: { Subscriber: [3:7488006689919228623:2227] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:20.565451Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488006702804130761:2376], recipient# [3:7488006702804130760:2327], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:21.393793Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488006685624260894:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:10:21.393879Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:10:21.569201Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7488006685624261141:2108], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:21.569353Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488006685624261141:2108], cacheItem# { Subscriber: { Subscriber: [3:7488006689919228623:2227] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:21.569469Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488006707099098059:2377], recipient# [3:7488006707099098058:2328], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:22.396051Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7488006685624261141:2108], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:22.396185Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488006685624261141:2108], cacheItem# { Subscriber: { Subscriber: [3:7488006689919228623:2227] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:22.396278Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488006711394065358:2378], recipient# [3:7488006711394065357:2329], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateSolomonInsideSubDomain [GOOD] Test command err: 2025-03-31T16:10:12.777765Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488006667424607646:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:10:12.777822Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d13/r3tmp/tmpETDD0t/pdisk_1.dat 2025-03-31T16:10:14.031134Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:10:14.034254Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:10:14.081038Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:10:14.081632Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:10:14.108597Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22213 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T16:10:14.979655Z node 1 :TX_PROXY DEBUG: actor# [1:7488006667424607822:2133] Handle TEvNavigate describe path dc-1 2025-03-31T16:10:14.979748Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006676014542941:2454] HANDLE EvNavigateScheme dc-1 2025-03-31T16:10:14.979956Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488006671719575272:2186], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:14.980246Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006671719575508:2349][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7488006671719575272:2186], cookie# 1 2025-03-31T16:10:14.985775Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006671719575523:2349][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006671719575520:2349], cookie# 1 2025-03-31T16:10:14.985830Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006671719575524:2349][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006671719575521:2349], cookie# 1 2025-03-31T16:10:14.985849Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006671719575525:2349][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006671719575522:2349], cookie# 1 2025-03-31T16:10:14.985913Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006667424607523:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006671719575523:2349], cookie# 1 2025-03-31T16:10:14.985957Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006667424607526:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006671719575524:2349], cookie# 1 2025-03-31T16:10:14.985981Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006667424607529:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006671719575525:2349], cookie# 1 2025-03-31T16:10:14.986012Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006671719575523:2349][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006667424607523:2051], cookie# 1 2025-03-31T16:10:14.986027Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006671719575524:2349][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006667424607526:2054], cookie# 1 2025-03-31T16:10:14.986039Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006671719575525:2349][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006667424607529:2057], cookie# 1 2025-03-31T16:10:14.986077Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006671719575508:2349][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006671719575520:2349], cookie# 1 2025-03-31T16:10:14.986121Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006671719575508:2349][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-31T16:10:14.986146Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006671719575508:2349][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006671719575521:2349], cookie# 1 2025-03-31T16:10:14.986167Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006671719575508:2349][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-31T16:10:14.986196Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006671719575508:2349][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006671719575522:2349], cookie# 1 2025-03-31T16:10:14.986215Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006671719575508:2349][/dc-1] Unexpected sync response: sender# [1:7488006671719575522:2349], cookie# 1 2025-03-31T16:10:14.986301Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7488006671719575272:2186], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-03-31T16:10:15.014252Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7488006671719575272:2186], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7488006671719575508:2349] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-31T16:10:15.022240Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488006671719575272:2186], cacheItem# { Subscriber: { Subscriber: [1:7488006671719575508:2349] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-03-31T16:10:15.024526Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488006680309510238:2455], recipient# [1:7488006676014542941:2454], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T16:10:15.024623Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006676014542941:2454] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T16:10:15.034989Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488006671719575272:2186], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:15.035059Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7488006671719575272:2186], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480 2025-03-31T16:10:15.035277Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006680309510240:2456][/dc-1/.metadata/initialization/migrations] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-31T16:10:15.035779Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006667424607523:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7488006680309510244:2456] 2025-03-31T16:10:15.035800Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488006667424607523:2051] Upsert description: path# /dc-1/.metadata/initialization/migrations 2025-03-31T16:10:15.035875Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488006667424607523:2051] Subscribe: subscriber# [1:7488006680309510244:2456], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:10:15.035925Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006667424607526:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7488006680309510245:2456] 2025-03-31T16:10:15.035932Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488006667424607526:2054] Upsert description: path# /dc-1/.metadata/initialization/migrations 2025-03-31T16:10:15.035958Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488006667424607526:2054] Subscribe: subscriber# [1:7488006680309510245:2456], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:10:15.035974Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006667424607529:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7488006680309510246:2456] 2025-03-31T16:10:15.035980Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488006667424607529:2057] Upsert description: path# /dc-1/.metadata/initialization/migrations 2025-03-31T16:10:15.035996Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488006667424607529:2057] Subscribe: subscriber# [1:7488006680309510246:2456], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:10:15.036056Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006680309510244:2456][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7488006667424607523:2051] 2025-03-31T16:10:15.036080Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006680309510245:2456][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7488006667424607526:2054] 2025-03-31T16:10:15.036096Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006680309510246:2456][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7488006667424607529:2057] 2025-03-31T16:10:15.036128Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006680309510240:2456][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initializatio ... : 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:19.499827Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488006697826071796:2305], recipient# [3:7488006697826071795:2320], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:20.511525Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7488006684941169581:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:20.511625Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488006684941169581:2107], cacheItem# { Subscriber: { Subscriber: [3:7488006689236137010:2191] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:20.511689Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488006702121039094:2306], recipient# [3:7488006702121039093:2321], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:21.487302Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488006684941169336:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:10:21.487375Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:10:21.513424Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7488006684941169581:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:21.513593Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488006684941169581:2107], cacheItem# { Subscriber: { Subscriber: [3:7488006689236137010:2191] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:21.513702Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488006706416006392:2307], recipient# [3:7488006706416006391:2322], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:22.491228Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7488006684941169581:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:22.491372Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488006684941169581:2107], cacheItem# { Subscriber: { Subscriber: [3:7488006689236137010:2191] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:22.491466Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488006710710973690:2308], recipient# [3:7488006710710973689:2323], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:22.514587Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7488006684941169581:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:22.514706Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488006684941169581:2107], cacheItem# { Subscriber: { Subscriber: [3:7488006689236137010:2191] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:22.514784Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488006710710973692:2309], recipient# [3:7488006710710973691:2324], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:23.495503Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7488006684941169581:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:23.495618Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488006684941169581:2107], cacheItem# { Subscriber: { Subscriber: [3:7488006689236137010:2191] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:23.495692Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488006715005940991:2310], recipient# [3:7488006715005940990:2325], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:23.523248Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7488006684941169581:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:23.523374Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488006684941169581:2107], cacheItem# { Subscriber: { Subscriber: [3:7488006689236137010:2191] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:23.523451Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488006715005940993:2311], recipient# [3:7488006715005940992:2326], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::GenericCases [GOOD] Test command err: 2025-03-31T16:10:12.823713Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488006670345369559:2279];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:10:12.823774Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d00/r3tmp/tmphoR8i1/pdisk_1.dat 2025-03-31T16:10:14.045958Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:10:15.214661Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:10:15.229605Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:10:15.261118Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:10:15.261216Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:10:15.359362Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:10:15.605100Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.113747s 2025-03-31T16:10:15.605176Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.113848s TClient is connected to server localhost:13423 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T16:10:16.275522Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488006674640336892:2130], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:16.275580Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7488006674640336892:2130], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480 2025-03-31T16:10:16.275771Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006687525239261:2451][/dc-1/.metadata/initialization/migrations] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-31T16:10:16.277680Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006670345369250:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7488006687525239265:2451] 2025-03-31T16:10:16.277703Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488006670345369250:2050] Upsert description: path# /dc-1/.metadata/initialization/migrations 2025-03-31T16:10:16.277803Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488006670345369250:2050] Subscribe: subscriber# [1:7488006687525239265:2451], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:10:16.277858Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006670345369253:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7488006687525239266:2451] 2025-03-31T16:10:16.277869Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488006670345369253:2053] Upsert description: path# /dc-1/.metadata/initialization/migrations 2025-03-31T16:10:16.277892Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488006670345369253:2053] Subscribe: subscriber# [1:7488006687525239266:2451], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:10:16.277912Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006670345369256:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7488006687525239267:2451] 2025-03-31T16:10:16.277919Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488006670345369256:2056] Upsert description: path# /dc-1/.metadata/initialization/migrations 2025-03-31T16:10:16.277941Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488006670345369256:2056] Subscribe: subscriber# [1:7488006687525239267:2451], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:10:16.278021Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006687525239265:2451][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7488006670345369250:2050] 2025-03-31T16:10:16.278045Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006687525239266:2451][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7488006670345369253:2053] 2025-03-31T16:10:16.278060Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006687525239267:2451][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7488006670345369256:2056] 2025-03-31T16:10:16.278113Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006687525239261:2451][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7488006687525239262:2451] 2025-03-31T16:10:16.278169Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006687525239261:2451][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7488006687525239263:2451] 2025-03-31T16:10:16.278201Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7488006687525239261:2451][/dc-1/.metadata/initialization/migrations] Set up state: owner# [1:7488006674640336892:2130], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:10:16.278221Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006687525239261:2451][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7488006687525239264:2451] 2025-03-31T16:10:16.278258Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7488006687525239261:2451][/dc-1/.metadata/initialization/migrations] Ignore empty state: owner# [1:7488006674640336892:2130], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:10:16.278286Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006670345369250:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7488006687525239265:2451] 2025-03-31T16:10:16.278301Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006670345369253:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7488006687525239266:2451] 2025-03-31T16:10:16.278312Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006670345369256:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7488006687525239267:2451] 2025-03-31T16:10:16.278378Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7488006674640336892:2130], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/initialization/migrations PathId: Strong: 1 } 2025-03-31T16:10:16.278460Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7488006674640336892:2130], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/initialization/migrations PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7488006687525239261:2451] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-31T16:10:16.303521Z node 1 :TX_PROXY DEBUG: actor# [1:7488006670345369572:2116] Handle TEvNavigate describe path dc-1 2025-03-31T16:10:16.303569Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006687525239269:2453] HANDLE EvNavigateScheme dc-1 2025-03-31T16:10:16.402831Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488006674640336892:2130], cacheItem# { Subscriber: { Subscriber: [1:7488006687525239261:2451] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:16.403002Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488006687525239272:2456], recipient# [1:7488006687525239260:2301], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:16.427151Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488006674640336892:2130], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:16.430133Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006674640337018:2197][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7488006674640336892:2130], cookie# 1 2025-03-31T16:10:16.430238Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006674640337035:2197][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006674640337032:2197], cookie# 1 2025-03-31T16:10:16.430256Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006674640337036:2197][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006674640337033:2197], cookie# 1 2025-03-31T16:10:16.430300Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006674640337037:2197][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006674640337034:2197], cookie# 1 2025-03-31T16:10:16.443148Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006670345369250:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006674640337035:2197], cookie# 1 2025-03-31T16:10:16.443259Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006674640337035:2197][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006670345369250:2050], cookie# 1 2025-03-31T16:10:16.443293Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:748800667464033 ... ncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:23.863844Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488006674640336892:2130], cacheItem# { Subscriber: { Subscriber: [1:7488006687525239261:2451] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:23.863911Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488006717590011391:3177], recipient# [1:7488006717590011390:2353], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:24.450395Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488006674640336892:2130], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:24.450510Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488006674640336892:2130], cacheItem# { Subscriber: { Subscriber: [1:7488006687525239261:2451] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:24.450581Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488006721884978706:3183], recipient# [1:7488006721884978705:2354], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:24.623589Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488006674640336892:2130], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:24.623704Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488006674640336892:2130], cacheItem# { Subscriber: { Subscriber: [1:7488006713295044041:3163] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:24.623783Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488006721884978708:3184], recipient# [1:7488006721884978707:2355], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:24.866392Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488006674640336892:2130], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:24.866507Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488006674640336892:2130], cacheItem# { Subscriber: { Subscriber: [1:7488006687525239261:2451] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:24.866602Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488006721884978713:3185], recipient# [1:7488006721884978712:2356], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:25.453726Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488006674640336892:2130], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:25.453853Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488006674640336892:2130], cacheItem# { Subscriber: { Subscriber: [1:7488006687525239261:2451] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:25.453930Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488006726179946026:3189], recipient# [1:7488006726179946025:2357], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:25.627551Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488006674640336892:2130], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:25.627674Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488006674640336892:2130], cacheItem# { Subscriber: { Subscriber: [1:7488006713295044041:3163] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:25.627794Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488006726179946028:3190], recipient# [1:7488006726179946027:2358], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:25.871357Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488006674640336892:2130], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:25.871476Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488006674640336892:2130], cacheItem# { Subscriber: { Subscriber: [1:7488006687525239261:2451] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:25.871582Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488006726179946034:3192], recipient# [1:7488006726179946033:2359], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> TStorageTenantTest::RemoveStoragePoolBeforeDroppingTablet [GOOD] >> TStorageTenantTest::RemoveStoragePoolAndCreateOneMore [GOOD] |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v0] [GOOD] |84.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |84.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |84.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::RemoveStoragePoolAndCreateOneMore [GOOD] Test command err: 2025-03-31T16:10:11.679520Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488006666027916593:2082];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:10:11.686205Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ca7/r3tmp/tmpHyK0St/pdisk_1.dat 2025-03-31T16:10:12.522457Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:10:12.584779Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:10:12.584907Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:10:12.596689Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17068 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T16:10:12.836436Z node 1 :TX_PROXY DEBUG: actor# [1:7488006666027916813:2117] Handle TEvNavigate describe path dc-1 2025-03-31T16:10:12.836504Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006670322884595:2438] HANDLE EvNavigateScheme dc-1 2025-03-31T16:10:12.837850Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488006670322884134:2131], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:12.837999Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006670322884573:2428][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7488006670322884134:2131], cookie# 1 2025-03-31T16:10:12.839477Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006670322884578:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006670322884575:2428], cookie# 1 2025-03-31T16:10:12.839533Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006670322884579:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006670322884576:2428], cookie# 1 2025-03-31T16:10:12.839551Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006670322884580:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006670322884577:2428], cookie# 1 2025-03-31T16:10:12.839602Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006666027916488:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006670322884578:2428], cookie# 1 2025-03-31T16:10:12.839647Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006666027916491:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006670322884579:2428], cookie# 1 2025-03-31T16:10:12.839670Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006666027916494:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006670322884580:2428], cookie# 1 2025-03-31T16:10:12.839722Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006670322884578:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006666027916488:2051], cookie# 1 2025-03-31T16:10:12.839747Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006670322884579:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006666027916491:2054], cookie# 1 2025-03-31T16:10:12.839758Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006670322884580:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006666027916494:2057], cookie# 1 2025-03-31T16:10:12.839792Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006670322884573:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006670322884575:2428], cookie# 1 2025-03-31T16:10:12.839832Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006670322884573:2428][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-31T16:10:12.839847Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006670322884573:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006670322884576:2428], cookie# 1 2025-03-31T16:10:12.839865Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006670322884573:2428][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-31T16:10:12.839884Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006670322884573:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006670322884577:2428], cookie# 1 2025-03-31T16:10:12.839895Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006670322884573:2428][/dc-1] Unexpected sync response: sender# [1:7488006670322884577:2428], cookie# 1 2025-03-31T16:10:12.839959Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7488006670322884134:2131], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-03-31T16:10:12.846900Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7488006670322884134:2131], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7488006670322884573:2428] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-31T16:10:12.847026Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488006670322884134:2131], cacheItem# { Subscriber: { Subscriber: [1:7488006670322884573:2428] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-03-31T16:10:12.848709Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488006670322884596:2439], recipient# [1:7488006670322884595:2438], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T16:10:12.848783Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006670322884595:2438] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T16:10:12.939380Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006670322884595:2438] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } TClient::Ls response: 2025-03-31T16:10:12.945718Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006670322884595:2438] Handle TEvDescribeSchemeResult Forward to# [1:7488006670322884594:2437] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:10:12.967034Z node 1 :TX_PROXY DEBUG: actor# [1:7488006666027916813:2117] Handle TEvProposeTransaction 2025-03-31T16:10:12.967070Z node 1 :TX_PROXY DEBUG: actor# [1:7488006666027916813:2117] TxId# 281474976710657 ProcessProposeTransaction 2025-03-31T16:10:12.968426Z node 1 :TX_PROXY DEBUG: actor# [1:7488006666027916813:2117] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7488006670322884606:2448] 2025-03-31T16:10:13.059744Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006670322884606:2448] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-03-31T16:10:13.059791Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006670322884606:2448] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:10:13.059903Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006670322884606:2448] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:10:13.060004Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488006670322884134:2131], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:1844674407370955161 ... criber: [3:7488006703154563358:2292] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:27.775831Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488006733219341954:4819], recipient# [3:7488006733219341953:4228], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:27.831890Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7488006673089792172:2231], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:27.831997Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488006673089792172:2231], cacheItem# { Subscriber: { Subscriber: [3:7488006703154563358:2292] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:27.832052Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7488006673089792172:2231], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:27.832102Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488006673089792172:2231], cacheItem# { Subscriber: { Subscriber: [3:7488006703154563392:2304] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:27.832138Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488006733219341957:4820], recipient# [3:7488006733219341955:4229], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:27.832181Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488006733219341958:4821], recipient# [3:7488006733219341956:4230], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:27.917306Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7488006710667874489:2229], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:27.917424Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7488006710667874489:2229], cacheItem# { Subscriber: { Subscriber: [2:7488006727847743803:2301] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:27.917501Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7488006732142711212:2343], recipient# [2:7488006732142711211:2572], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:27.975918Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7488006710667874489:2229], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:27.976073Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7488006710667874489:2229], cacheItem# { Subscriber: { Subscriber: [2:7488006727847743804:2302] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:27.976158Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7488006710667874489:2229], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:27.976235Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7488006710667874489:2229], cacheItem# { Subscriber: { Subscriber: [2:7488006727847743803:2301] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:27.976302Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7488006732142711215:2344], recipient# [2:7488006732142711213:2573], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:27.976363Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7488006732142711216:2345], recipient# [2:7488006732142711214:2574], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:28.221072Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7488006710667874489:2229], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:28.221187Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7488006710667874489:2229], cacheItem# { Subscriber: { Subscriber: [2:7488006727847743803:2301] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:28.221262Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7488006736437678514:2346], recipient# [2:7488006736437678513:2575], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> KqpJoinOrder::CanonizedJoinOrderTPCH19 [GOOD] |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::RemoveStoragePoolBeforeDroppingTablet [GOOD] Test command err: 2025-03-31T16:10:12.817802Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488006669649367726:2280];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:10:12.817853Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000c9b/r3tmp/tmpxMI6g1/pdisk_1.dat 2025-03-31T16:10:14.201606Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:10:14.590938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:10:14.591047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:10:14.613992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:10:14.709473Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:9404 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T16:10:15.538902Z node 1 :TX_PROXY DEBUG: actor# [1:7488006669649367760:2135] Handle TEvNavigate describe path dc-1 2025-03-31T16:10:15.538948Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006682534270141:2463] HANDLE EvNavigateScheme dc-1 2025-03-31T16:10:15.539134Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488006673944335079:2148], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:15.539304Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006678239302586:2276][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7488006673944335079:2148], cookie# 1 2025-03-31T16:10:15.544721Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006678239302593:2276][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006678239302590:2276], cookie# 1 2025-03-31T16:10:15.544774Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006678239302594:2276][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006678239302591:2276], cookie# 1 2025-03-31T16:10:15.544793Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006678239302595:2276][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006678239302592:2276], cookie# 1 2025-03-31T16:10:15.544839Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006669649367404:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006678239302593:2276], cookie# 1 2025-03-31T16:10:15.544869Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006669649367407:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006678239302594:2276], cookie# 1 2025-03-31T16:10:15.544889Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006669649367410:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006678239302595:2276], cookie# 1 2025-03-31T16:10:15.544919Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006678239302593:2276][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006669649367404:2051], cookie# 1 2025-03-31T16:10:15.544936Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006678239302594:2276][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006669649367407:2054], cookie# 1 2025-03-31T16:10:15.544949Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006678239302595:2276][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006669649367410:2057], cookie# 1 2025-03-31T16:10:15.544987Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006678239302586:2276][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006678239302590:2276], cookie# 1 2025-03-31T16:10:15.545027Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006678239302586:2276][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-31T16:10:15.545069Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006678239302586:2276][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006678239302591:2276], cookie# 1 2025-03-31T16:10:15.545095Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006678239302586:2276][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-31T16:10:15.545134Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006678239302586:2276][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006678239302592:2276], cookie# 1 2025-03-31T16:10:15.545154Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006678239302586:2276][/dc-1] Unexpected sync response: sender# [1:7488006678239302592:2276], cookie# 1 2025-03-31T16:10:15.545238Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7488006673944335079:2148], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-03-31T16:10:15.570248Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7488006673944335079:2148], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7488006678239302586:2276] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-31T16:10:15.578556Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488006673944335079:2148], cacheItem# { Subscriber: { Subscriber: [1:7488006678239302586:2276] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-03-31T16:10:15.580721Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488006682534270142:2464], recipient# [1:7488006682534270141:2463], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T16:10:15.580848Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006682534270141:2463] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T16:10:15.717863Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006682534270141:2463] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-03-31T16:10:15.721945Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006682534270141:2463] Handle TEvDescribeSchemeResult Forward to# [1:7488006682534270140:2462] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:10:15.891615Z node 1 :TX_PROXY DEBUG: actor# [1:7488006669649367760:2135] Handle TEvProposeTransaction 2025-03-31T16:10:15.891645Z node 1 :TX_PROXY DEBUG: actor# [1:7488006669649367760:2135] TxId# 281474976710657 ProcessProposeTransaction 2025-03-31T16:10:15.891751Z node 1 :TX_PROXY DEBUG: actor# [1:7488006669649367760:2135] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7488006682534270152:2470] 2025-03-31T16:10:16.109618Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006682534270152:2470] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-03-31T16:10:16.109671Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006682534270152:2470] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:10:16.109837Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006682534270152:2470] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:10:16.109946Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handl ... tify { Path: /dc-1/USER_0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Version: 18446744073709551615 }: sender# [3:7488006692771643907:2216] 2025-03-31T16:10:17.695803Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7488006692771643904:2216][/dc-1/USER_0] Path was already updated: owner# [3:7488006688476676396:2105], state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 2], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 2], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:10:17.695939Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7488006688476676396:2105], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Strong: 1 } 2025-03-31T16:10:17.696142Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7488006688476676396:2105], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7488006692771643904:2216] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1743437416525 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# { Subscriber: { Subscriber: [3:7488006692771643904:2216] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1743437416525 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 } 2025-03-31T16:10:17.739176Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7488006692771643908:2216][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:7488006669649367404:2051] 2025-03-31T16:10:17.770133Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006669649367404:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 18446744073709551615 }: sender# [3:7488006692771643908:2216] 2025-03-31T16:10:17.814190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:10:17.814233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:10:17.814243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:10:17.814254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:10:17.816711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710660 2025-03-31T16:10:17.816779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710660 2025-03-31T16:10:17.808369Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7488006692771643904:2216][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Version: 18446744073709551615 }: sender# [3:7488006692771643905:2216] 2025-03-31T16:10:17.827622Z node 1 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) 2025-03-31T16:10:17.832950Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7488006692771643904:2216][/dc-1/USER_0] Path was already updated: owner# [3:7488006688476676396:2105], state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 2], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 2], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:10:17.861563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-03-31T16:10:17.861926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 5 2025-03-31T16:10:17.862173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-03-31T16:10:17.862307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2025-03-31T16:10:17.862420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-03-31T16:10:17.862546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-03-31T16:10:17.862651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-03-31T16:10:17.862740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-03-31T16:10:17.862859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-03-31T16:10:17.862874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-03-31T16:10:17.862981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-03-31T16:10:17.863410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-03-31T16:10:17.863430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-03-31T16:10:17.863474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-31T16:10:17.866324Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488006669649367726:2280];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:10:17.866397Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:10:17.893957Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037889 2025-03-31T16:10:17.894055Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037891 2025-03-31T16:10:17.894091Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037888 2025-03-31T16:10:17.894146Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037890 2025-03-31T16:10:17.928834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-03-31T16:10:17.935241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-03-31T16:10:17.943468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-03-31T16:10:17.943494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-03-31T16:10:17.943518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-03-31T16:10:17.943527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-03-31T16:10:17.943544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-03-31T16:10:17.943559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-03-31T16:10:17.943597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-03-31T16:10:17.943629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 TabletID: 72075186224037888 Status: OK Info { TabletID: 72075186224037888 Channels { Channel: 0 ChannelType: 0 History { FromGeneration: 0 GroupID: 2181038081 } StoragePool: "name_USER_0_kind_storage-pool-number-2" } Channels { Channel: 1 ChannelType: 0 History { FromGeneration: 0 GroupID: 2181038081 } StoragePool: "name_USER_0_kind_storage-pool-number-2" } Channels { Channel: 2 ChannelType: 0 History { FromGeneration: 0 GroupID: 2181038081 } StoragePool: "name_USER_0_kind_storage-pool-number-2" } TabletType: Coordinator Version: 1 TenantIdOwner: 72057594046644480 TenantIdLocalId: 2 } 2025-03-31T16:10:18.001933Z node 1 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) 2025-03-31T16:10:18.211503Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488006673944335079:2148], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:18.211639Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488006673944335079:2148], cacheItem# { Subscriber: { Subscriber: [1:7488006682534270130:2458] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:18.211719Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488006695419172629:2904], recipient# [1:7488006695419172628:2337], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-std] [GOOD] >> TStorageTenantTest::CopyTableAndConcurrentSplit [GOOD] >> OlapEstimationRowsCorrectness::TPCDS96 [GOOD] |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |84.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |84.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-std] [GOOD] |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CopyTableAndConcurrentSplit [GOOD] Test command err: 2025-03-31T16:10:11.940041Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488006665430074319:2142];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:10:11.940866Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000cbe/r3tmp/tmp14cYTG/pdisk_1.dat 2025-03-31T16:10:13.077123Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:10:13.373763Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:10:13.423051Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:10:13.423174Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:10:13.435117Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16084 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T16:10:14.312746Z node 1 :TX_PROXY DEBUG: actor# [1:7488006669725041795:2120] Handle TEvNavigate describe path dc-1 2025-03-31T16:10:14.312784Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006678314976887:2455] HANDLE EvNavigateScheme dc-1 2025-03-31T16:10:14.312889Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488006669725041877:2158], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:14.312967Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006669725042019:2260][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7488006669725041877:2158], cookie# 1 2025-03-31T16:10:14.314176Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006674020009328:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006674020009325:2260], cookie# 1 2025-03-31T16:10:14.314199Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006674020009329:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006674020009326:2260], cookie# 1 2025-03-31T16:10:14.314212Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006674020009330:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006674020009327:2260], cookie# 1 2025-03-31T16:10:14.314243Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006665430074169:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006674020009328:2260], cookie# 1 2025-03-31T16:10:14.314264Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006665430074172:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006674020009329:2260], cookie# 1 2025-03-31T16:10:14.314278Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006665430074175:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006674020009330:2260], cookie# 1 2025-03-31T16:10:14.314306Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006674020009328:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006665430074169:2049], cookie# 1 2025-03-31T16:10:14.314322Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006674020009329:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006665430074172:2052], cookie# 1 2025-03-31T16:10:14.314334Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006674020009330:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006665430074175:2055], cookie# 1 2025-03-31T16:10:14.314363Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006669725042019:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006674020009325:2260], cookie# 1 2025-03-31T16:10:14.314407Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006669725042019:2260][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-31T16:10:14.314420Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006669725042019:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006674020009326:2260], cookie# 1 2025-03-31T16:10:14.314437Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006669725042019:2260][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-31T16:10:14.314454Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006669725042019:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006674020009327:2260], cookie# 1 2025-03-31T16:10:14.314470Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006669725042019:2260][/dc-1] Unexpected sync response: sender# [1:7488006674020009327:2260], cookie# 1 2025-03-31T16:10:14.314511Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7488006669725041877:2158], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-03-31T16:10:14.344771Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7488006669725041877:2158], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7488006669725042019:2260] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-31T16:10:14.344883Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488006669725041877:2158], cacheItem# { Subscriber: { Subscriber: [1:7488006669725042019:2260] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-03-31T16:10:14.354295Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488006678314976890:2458], recipient# [1:7488006678314976887:2455], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T16:10:14.354361Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006678314976887:2455] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T16:10:14.429747Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006678314976887:2455] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-03-31T16:10:14.433312Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006678314976887:2455] Handle TEvDescribeSchemeResult Forward to# [1:7488006678314976886:2454] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:10:14.559888Z node 1 :TX_PROXY DEBUG: actor# [1:7488006669725041795:2120] Handle TEvProposeTransaction 2025-03-31T16:10:14.559915Z node 1 :TX_PROXY DEBUG: actor# [1:7488006669725041795:2120] TxId# 281474976710657 ProcessProposeTransaction 2025-03-31T16:10:14.560041Z node 1 :TX_PROXY DEBUG: actor# [1:7488006669725041795:2120] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7488006678314976895:2462] 2025-03-31T16:10:14.818714Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006678314976895:2462] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-03-31T16:10:14.821780Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006678314976895:2462] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:10:14.821883Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006678314976895:2462] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:10:14.821979Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Hand ... nknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:31.510682Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7488006749680568617:2817], recipient# [4:7488006749680568594:2379], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:31.510750Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7488006749680568618:2818], recipient# [4:7488006749680568595:2380], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:31.512238Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7488006749680568594:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:10:31.512418Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:10:31.587446Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7488006715320828967:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:31.587649Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7488006715320828967:2107], cacheItem# { Subscriber: { Subscriber: [4:7488006749680568596:2814] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:31.587701Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7488006715320828967:2107], cacheItem# { Subscriber: { Subscriber: [4:7488006749680568597:2815] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:31.587810Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7488006749680568619:2819], recipient# [4:7488006749680568594:2379], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:31.588242Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7488006749680568594:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:10:31.720309Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7488006715320828967:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:31.720437Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7488006715320828967:2107], cacheItem# { Subscriber: { Subscriber: [4:7488006749680568596:2814] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:31.720486Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7488006715320828967:2107], cacheItem# { Subscriber: { Subscriber: [4:7488006749680568597:2815] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:31.720611Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7488006749680568620:2820], recipient# [4:7488006749680568594:2379], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:31.723353Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7488006749680568594:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:10:31.995636Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7488006715320828967:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:31.995758Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7488006715320828967:2107], cacheItem# { Subscriber: { Subscriber: [4:7488006749680568596:2814] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:31.995813Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7488006715320828967:2107], cacheItem# { Subscriber: { Subscriber: [4:7488006749680568597:2815] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:31.995914Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7488006749680568621:2821], recipient# [4:7488006749680568594:2379], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:31.996182Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7488006749680568594:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-fifo] [GOOD] |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH19 [GOOD] Test command err: Trying to start YDB, gRPC: 11669, MsgBus: 31200 2025-03-31T16:07:52.423594Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488006069459851857:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:52.436906Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019a3/r3tmp/tmpI13vXJ/pdisk_1.dat 2025-03-31T16:07:53.205574Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:53.207912Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:53.211371Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:53.217778Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11669, node 1 2025-03-31T16:07:53.453542Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:53.453577Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:53.453587Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:53.453703Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31200 TClient is connected to server localhost:31200 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:54.614441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:54.648126Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:07:57.427274Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488006069459851857:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:57.427354Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:07:58.049321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488006095229656304:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:58.049474Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:58.051223Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488006095229656316:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:58.056891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:07:58.072078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488006095229656318:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:07:58.166867Z node 1 :TX_PROXY ERROR: Actor# [1:7488006095229656369:2344] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:07:58.581023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:07:58.826704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488006095229656629:2353];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:07:58.826924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488006095229656629:2353];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:07:58.828247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488006095229656638:2357];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:07:58.828290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488006095229656638:2357];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:07:58.828581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488006095229656638:2357];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:07:58.828714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488006095229656638:2357];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:07:58.828849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488006095229656638:2357];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:07:58.828963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488006095229656638:2357];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:07:58.829066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488006095229656638:2357];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:07:58.829262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488006095229656638:2357];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:07:58.829389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488006095229656638:2357];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:07:58.829495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488006095229656638:2357];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:07:58.829596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488006095229656638:2357];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:07:58.829728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488006095229656638:2357];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:07:58.839376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488006095229656629:2353];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:07:58.839664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488006095229656629:2353];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:07:58.839805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488006095229656629:2353];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:07:58.839939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488006095229656629:2353];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:07:58.840052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488006095229656629:2353];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:07:58.840183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488006095229656629:2353];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:07:58.840341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488006095229656629:2353];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:07:58.840476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488006095229656629:2353];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:07:58.840621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488006095229656629:2353];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:07:58.840728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488006095229656629:2353];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:07:58.871780Z node 1 :T ... ller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.513975Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039237;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.521200Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039257;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.521200Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039269;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.527690Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.531336Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.535693Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.538332Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039261;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.544903Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039187;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.547493Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039241;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.560952Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039265;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.564194Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039277;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.566794Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039235;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.574762Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039243;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.584730Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039205;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.586287Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039191;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.593735Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.599877Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.607187Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.613476Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.619858Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.626166Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.634191Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.640506Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.649491Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.665125Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.672306Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.678680Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.684891Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039304;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.691489Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.697958Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.704820Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.710845Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.717454Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.723603Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.729745Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039255;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.736582Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.767734Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.772689Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.782457Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.785779Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.798384Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.800783Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.814214Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.820437Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:03.824372Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:04.386065Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpfd3kafbqkz012sxm3wssz", SessionId: ydb://session/3?node_id=1&id=NmQ5NDYyZTYtYjFhYzI0Y2QtYTZlNTU4YWEtM2IwODljMjM=, Slow query, duration: 68.210930s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:10:05.114868Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:10:05.115009Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:10:05.115915Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7488006576266056276:12651];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039094; 2025-03-31T16:10:05.116419Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCDS96 [GOOD] Test command err: Trying to start YDB, gRPC: 30013, MsgBus: 29282 2025-03-31T16:07:48.495834Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488006049009273881:2195];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:48.503432Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019a4/r3tmp/tmpLDAdbS/pdisk_1.dat 2025-03-31T16:07:49.534154Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:49.620866Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:49.620957Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:49.622300Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:07:49.636142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30013, node 1 2025-03-31T16:07:49.895700Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:49.895744Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:49.895751Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:49.895870Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29282 TClient is connected to server localhost:29282 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:51.026362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:53.487300Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488006049009273881:2195];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:53.487384Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:07:53.964710Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488006070484110902:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:53.964929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:53.965984Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488006070484110915:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:53.971283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:07:53.987873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488006070484110917:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:07:54.095645Z node 1 :TX_PROXY ERROR: Actor# [1:7488006074779078264:2344] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:07:54.585969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:07:54.981131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488006074779078484:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:07:54.981366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488006074779078484:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:07:54.981642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488006074779078484:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:07:54.981751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488006074779078484:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:07:54.981854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488006074779078484:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:07:54.981959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488006074779078484:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:07:54.982058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488006074779078484:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:07:54.982164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488006074779078484:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:07:54.982268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488006074779078484:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:07:54.982369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488006074779078484:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:07:54.982480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488006074779078484:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:07:54.982633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488006074779078484:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:07:55.043679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488006074779078493:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:07:55.043749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488006074779078493:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:07:55.043966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488006074779078493:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:07:55.044069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488006074779078493:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:07:55.044171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488006074779078493:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:07:55.044263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488006074779078493:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:07:55.044356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488006074779078493:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:07:55.044471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488006074779078493:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:07:55.044570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488006074779078493:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:07:55.044684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488006074779078493:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:07:55.044812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488006074779078493:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:07:55.044910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488006074779078493:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CL ... ller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.591947Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.619689Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039215;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.628532Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.653990Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.655939Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.678597Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.686289Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039207;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.699026Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039225;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.701823Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.709725Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.724602Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039227;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.742898Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039211;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.746301Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039249;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.754845Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039199;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.769519Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039257;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.772139Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.777216Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.795539Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039261;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.802326Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039213;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.809028Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.810165Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.829841Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039255;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.838195Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.846234Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.852939Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.860701Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.867324Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.867324Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.875826Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039223;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.878970Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.882322Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.885107Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.889767Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.892121Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.896555Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039267;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.898479Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039221;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.902759Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.904695Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.909407Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.910904Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039293;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.916574Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.917421Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.924349Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.924769Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:00.934153Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039352;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:01.167619Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpfd04g73mvhne15ta1mmq7", SessionId: ydb://session/3?node_id=1&id=N2FkYjA0Y2ItM2EzYmM5NmUtZmFlYjE4My1lNDQxNWFiZA==, Slow query, duration: 68.542673s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:10:01.833526Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:10:01.834042Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:10:01.834673Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7488006568700377656:12220];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-03-31T16:10:01.835073Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] >> TStorageTenantTest::CreateDummyTabletsInDifferentDomains [GOOD] >> test_auditlog.py::test_single_dml_query_logged[insert] |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> TPersQueueTest::PreferredCluster_EnabledRemotePreferredClusterAndRemoteClusterEnabledDelaySec_SessionDiesOnlyAfterDelay [GOOD] >> TPersQueueTest::PreferredCluster_RemotePreferredClusterEnabledWhileSessionInitializing_SessionDiesOnlyAfterInitializationAndDelay |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |84.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |84.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateDummyTabletsInDifferentDomains [GOOD] Test command err: 2025-03-31T16:10:12.437899Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488006667557761301:2084];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:10:12.437961Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000cd4/r3tmp/tmpTHCW0d/pdisk_1.dat 2025-03-31T16:10:13.884296Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:10:14.022826Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:10:14.692398Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:10:14.805137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:10:14.819765Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:10:14.820757Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:10:14.820811Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:10:14.844203Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:10:14.844346Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:10:14.851864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:10:14.988811Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.122135s 2025-03-31T16:10:14.988886Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.122228s TClient is connected to server localhost:10261 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T16:10:15.653468Z node 1 :TX_PROXY DEBUG: actor# [1:7488006667557761514:2119] Handle TEvNavigate describe path dc-1 2025-03-31T16:10:15.653553Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006680442663906:2454] HANDLE EvNavigateScheme dc-1 2025-03-31T16:10:15.653686Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488006671852728838:2133], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:15.653784Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006676147696293:2222][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7488006671852728838:2133], cookie# 1 2025-03-31T16:10:15.731946Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006676147696298:2222][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006676147696295:2222], cookie# 1 2025-03-31T16:10:15.732047Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006663262793889:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006676147696298:2222], cookie# 1 2025-03-31T16:10:15.732083Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006676147696299:2222][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006676147696296:2222], cookie# 1 2025-03-31T16:10:15.732096Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006676147696300:2222][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006676147696297:2222], cookie# 1 2025-03-31T16:10:15.732129Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006676147696298:2222][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006663262793889:2052], cookie# 1 2025-03-31T16:10:15.732183Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006676147696293:2222][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006676147696295:2222], cookie# 1 2025-03-31T16:10:15.732263Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006676147696293:2222][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-31T16:10:15.732285Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006663262793895:2058] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006676147696300:2222], cookie# 1 2025-03-31T16:10:15.732311Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006676147696300:2222][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006663262793895:2058], cookie# 1 2025-03-31T16:10:15.732327Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006676147696293:2222][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006676147696297:2222], cookie# 1 2025-03-31T16:10:15.732347Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006676147696293:2222][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-31T16:10:15.732428Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7488006671852728838:2133], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-03-31T16:10:15.740045Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488006663262793892:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488006676147696299:2222], cookie# 1 2025-03-31T16:10:15.740130Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488006676147696299:2222][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006663262793892:2055], cookie# 1 2025-03-31T16:10:15.740157Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006676147696293:2222][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488006676147696296:2222], cookie# 1 2025-03-31T16:10:15.740181Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488006676147696293:2222][/dc-1] Unexpected sync response: sender# [1:7488006676147696296:2222], cookie# 1 2025-03-31T16:10:15.747560Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7488006671852728838:2133], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7488006676147696293:2222] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-31T16:10:15.747691Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488006671852728838:2133], cacheItem# { Subscriber: { Subscriber: [1:7488006676147696293:2222] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-03-31T16:10:15.764370Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488006680442663912:2457], recipient# [1:7488006680442663906:2454], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T16:10:15.764449Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006680442663906:2454] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T16:10:15.945751Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006680442663906:2454] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-03-31T16:10:15.949225Z node 1 :TX_PROXY DEBUG: Actor# [1:7488006680442663906:2454] Handle TEvDescribeSchemeResult Forward to# [1:7488006680442663905:2453] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: 2025-03-31T16:10:16.049038Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488006671852728838:2133], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:16.049162Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488006671852728838:2133], cacheItem# { Subscriber: { Subscriber: [1:7488006680442663858:2423] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:16.0 ... 446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:35.544075Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488006735908057398:2197], cacheItem# { Subscriber: { Subscriber: [3:7488006740203024728:2214] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:35.544159Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488006765972828610:2254], recipient# [3:7488006765972828609:2584], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:35.698448Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7488006735908057398:2197], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:35.698568Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488006735908057398:2197], cacheItem# { Subscriber: { Subscriber: [3:7488006765972828588:2247] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:35.698611Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488006735908057398:2197], cacheItem# { Subscriber: { Subscriber: [3:7488006765972828589:2248] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:35.698704Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488006765972828611:2255], recipient# [3:7488006765972828585:2581], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:35.699656Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7488006765972828585:2581], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:10:35.901145Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7488006671470086698:2107], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:35.901271Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7488006671470086698:2107], cacheItem# { Subscriber: { Subscriber: [2:7488006723009694323:2125] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:35.901346Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7488006765959367353:2149], recipient# [2:7488006765959367352:2335], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:36.165466Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7488006671470086698:2107], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:36.165587Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7488006671470086698:2107], cacheItem# { Subscriber: { Subscriber: [2:7488006675765054025:2113] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:36.165671Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7488006770254334651:2150], recipient# [2:7488006770254334650:2336], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:36.904032Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7488006671470086698:2107], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:36.904194Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7488006671470086698:2107], cacheItem# { Subscriber: { Subscriber: [2:7488006723009694323:2125] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:36.904297Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7488006770254334653:2151], recipient# [2:7488006770254334652:2337], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:37.167215Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7488006671470086698:2107], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:10:37.167379Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7488006671470086698:2107], cacheItem# { Subscriber: { Subscriber: [2:7488006675765054025:2113] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:10:37.167479Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7488006774549301951:2152], recipient# [2:7488006774549301950:2338], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |84.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |84.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |84.5%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/test-results/unittest/{meta.json ... results_accumulator.log} >> test_dispatch.py::TestMapping::test_mapping [GOOD] |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v1] [GOOD] |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] [GOOD] |84.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |84.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |84.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |84.6%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/multi_plane/py3test >> test_retry_high_rate.py::TestRetry::test_high_rate[kikimr0] [GOOD] |84.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |84.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |84.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-fifo] [GOOD] |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v1] [GOOD] >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous >> test_auditlog.py::test_single_dml_query_logged[update] |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> TPersQueueTest::BadSids [GOOD] |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> KqpJoinOrder::CanonizedJoinOrderTPCH10 [GOOD] |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |84.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |84.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots >> test_auditlog.py::test_single_dml_query_logged[upsert] |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v0] [GOOD] >> TPersQueueTest::ReadWithoutConsumerFirstClassCitizen [GOOD] |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[replace] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH10 [GOOD] Test command err: Trying to start YDB, gRPC: 11026, MsgBus: 26670 2025-03-31T16:07:54.831545Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488006078608019045:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:54.833007Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019a1/r3tmp/tmpxBQttD/pdisk_1.dat 2025-03-31T16:07:55.499492Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:55.527016Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:55.527407Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:55.536166Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11026, node 1 2025-03-31T16:07:55.665843Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:55.665863Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:55.665873Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:55.665981Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26670 TClient is connected to server localhost:26670 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:56.658492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:56.711478Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:07:59.831643Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488006078608019045:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:59.831738Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:08:00.012223Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488006104377823486:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:00.012350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:00.012619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488006104377823498:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:00.016928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:08:00.032430Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488006104377823500:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:08:00.123280Z node 1 :TX_PROXY ERROR: Actor# [1:7488006104377823551:2343] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:08:00.536698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:08:00.804660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488006104377823806:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:08:00.804903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488006104377823806:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:08:00.805243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488006104377823806:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:08:00.805392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488006104377823806:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:08:00.805542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488006104377823806:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:08:00.805674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488006104377823806:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:08:00.805751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488006104377823806:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:08:00.805847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488006104377823806:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:08:00.805977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488006104377823806:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:08:00.806059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488006104377823806:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:08:00.806183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488006104377823806:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:08:00.806306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488006104377823806:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:08:00.824121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488006104377823810:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:08:00.824243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488006104377823810:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:08:00.824541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488006104377823810:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:08:00.824672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488006104377823810:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:08:00.824818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488006104377823810:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:08:00.824953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488006104377823810:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:08:00.825092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488006104377823810:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:08:00.825243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488006104377823810:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:08:00.825394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488006104377823810:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:08:00.825525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488006104377823810:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:08:00.825737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488006104377823810:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:08:00.825884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488006104377823810:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:08:00.838879Z node 1 :T ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.716583Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.724357Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039370;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.734586Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039358;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.735819Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039366;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.750469Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.753331Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.777345Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.792573Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.808255Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.815335Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039218;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.826383Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039277;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.841433Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.855427Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039322;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.859394Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.874808Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039330;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.876384Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.894786Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.903633Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.918641Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039348;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.918655Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.941545Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039352;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.958646Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.968816Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039320;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.989854Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:16.000600Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039200;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:16.018857Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:16.020975Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039191;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:16.055469Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039217;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:16.063496Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:16.454075Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039368;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:16.493684Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039336;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:16.557043Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:16.572947Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:16.592079Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:16.699628Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039252;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:16.760789Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:16.765473Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:16.776250Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:16.789345Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:16.801971Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:16.816433Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:16.818818Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039186;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:16.842035Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:16.855885Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:17.074459Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:17.321684Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpfd6ze3dxt12ggsf8vb1gj", SessionId: ydb://session/3?node_id=1&id=NWFkNDhjY2EtMzRhMGJjZTYtZDljODZkNzMtYWMwMzY2YWY=, Slow query, duration: 77.691195s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:10:18.180839Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:10:18.181293Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:10:18.217213Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7488006383550735139:8021];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224039094; 2025-03-31T16:10:18.217686Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |84.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |84.7%| [LD] {RESULT} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::ReadWithoutConsumerFirstClassCitizen [GOOD] Test command err: 2025-03-31T16:05:37.867657Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005488266858921:2205];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:05:37.867956Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:05:38.002844Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488005488358371487:2206];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:05:38.039630Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:05:38.468984Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:05:38.474122Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0009e9/r3tmp/tmplI7t9R/pdisk_1.dat 2025-03-31T16:05:38.963234Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:05:39.055338Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:05:39.189565Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:39.225267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:05:39.225399Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:05:39.227823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:05:39.227901Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:05:39.248665Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:05:39.248813Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:05:39.249524Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6186, node 1 2025-03-31T16:05:39.862361Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/0009e9/r3tmp/yandex95oiWO.tmp 2025-03-31T16:05:39.862390Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/0009e9/r3tmp/yandex95oiWO.tmp 2025-03-31T16:05:39.871619Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/0009e9/r3tmp/yandex95oiWO.tmp 2025-03-31T16:05:39.871772Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:05:40.021366Z INFO: TTestServer started on Port 28012 GrpcPort 6186 TClient is connected to server localhost:28012 PQClient connected to localhost:6186 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:05:41.132943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:05:41.279348Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-03-31T16:05:41.310676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:05:42.275475Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:05:42.867288Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005488266858921:2205];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:05:42.867381Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:05:43.007349Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488005488358371487:2206];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:05:43.011212Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:05:47.184161Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005531308044706:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:47.184340Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:47.189591Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005531308044733:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:05:47.203163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2025-03-31T16:05:47.244113Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488005531308044735:2326], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2025-03-31T16:05:47.339689Z node 2 :TX_PROXY ERROR: Actor# [2:7488005531308044764:2189] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:05:48.470493Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488005531308044771:2331], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:05:48.474360Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTgwNjhmZDgtOGE0MTVkYmEtNjBjNTI3MGMtOTcxY2JhOGI=, ActorId: [2:7488005531308044704:2321], ActorState: ExecuteState, TraceId: 01jqpf7b158br6ccdncq2w2ppx, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:05:48.476404Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:05:48.490868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:05:48.495661Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488005531216533013:2354], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:05:48.497700Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWZmOTMwMWItY2E0YTRiYjYtYzA2MGY2NDgtNzk2YjQ3MzY=, ActorId: [1:7488005531216532963:2346], ActorState: ExecuteState, TraceId: 01jqpf7b554p9vy3h7b9m4kedh, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:05:48.498072Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:05:48.818999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:05:49.225186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-31T16:05:49.765028Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqpf7d769ergqrbjy4kpv8gs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmM0Yzc5NC03NjlkNzUwMy0zYzI1MTQwMS04ZDA0Yjg4MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:748800553980 ... r disconnected, pipe [24:7488006810308530307:2581] destroyed 2025-03-31T16:10:45.650158Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_1_6877292893870029940_v1 2025-03-31T16:10:45.650182Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7488006810308530305:2579] destroyed 2025-03-31T16:10:45.650200Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_1_6877292893870029940_v1 2025-03-31T16:10:45.650220Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7488006810308530306:2580] destroyed 2025-03-31T16:10:45.650385Z node 25 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-03-31T16:10:45.650433Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 1 2025-03-31T16:10:45.650563Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_1_6877292893870029940_v1 2025-03-31T16:10:45.650587Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_1_6877292893870029940_v1 2025-03-31T16:10:45.650610Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_1_6877292893870029940_v1 2025-03-31T16:10:45.650630Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_1_6877292893870029940_v1 2025-03-31T16:10:45.650650Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_1_6877292893870029940_v1 2025-03-31T16:10:45.650761Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] read cookie 33 Topic 'rt3.dc1--topic1' partition 1 user $without_consumer offset 35 count 6 size 292 endOffset 40 max time lag 0ms effective offset 35 2025-03-31T16:10:45.650805Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] read cookie 33 added 0 blobs, size 0 count 0 last offset 35, current partition end offset: 40 2025-03-31T16:10:45.650900Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] Reading cookie 33. All data is from uncompacted head. 2025-03-31T16:10:45.650933Z node 25 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-31T16:10:45.651140Z node 25 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 1 messageNo: 0 requestId: cookie: 35 2025-03-31T16:10:45.655159Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_1491106710520174429_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2) initDone 1 event { CmdReadResult { MaxOffset: 40 Result { Offset: 35 Data: "... 94 bytes ..." SourceId: "\000source" SeqNo: 37 WriteTimestampMS: 1743437445170 CreateTimestampMS: 1743437445161 UncompressedSize: 6 PartitionKey: "" ExplicitHash: "" } Result { Offset: 36 Data: "... 94 bytes ..." SourceId: "\000source" SeqNo: 38 WriteTimestampMS: 1743437445273 CreateTimestampMS: 1743437445267 UncompressedSize: 6 PartitionKey: "" ExplicitHash: "" } Result { Offset: 37 Data: "... 94 bytes ..." SourceId: "\000source" SeqNo: 39 WriteTimestampMS: 1743437445303 CreateTimestampMS: 1743437445285 UncompressedSize: 6 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 372 RealReadOffset: 37 WaitQuotaTimeMs: 0 EndOffset: 40 StartOffset: 0 } Cookie: 35 } 2025-03-31T16:10:45.655490Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_1491106710520174429_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2) ready for read with readOffset 38 endOffset 40 2025-03-31T16:10:45.655550Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_1491106710520174429_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2) EndOffset 40 ReadOffset 38 ReadGuid cc0e00e6-981ff9a-6275e9d4-abc93686 has messages 1 Bytes readed: 519 Offset: 35 from session 2 Offset: 36 from session 2 Offset: 37 from session 2 2025-03-31T16:10:45.655660Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_1491106710520174429_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2), readOffset# 38, endOffset# 40, WTime# 1743437445303, sizeLag# 372 2025-03-31T16:10:45.655689Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_1491106710520174429_v1TEvPartitionReady. Aval parts: 0 2025-03-31T16:10:45.655748Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_1491106710520174429_v1 read done: guid# cc0e00e6-981ff9a-6275e9d4-abc93686, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2), size# 519 2025-03-31T16:10:45.655785Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_1491106710520174429_v1 response to read: guid# cc0e00e6-981ff9a-6275e9d4-abc93686 2025-03-31T16:10:45.656106Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_1491106710520174429_v1 Process answer. Aval parts: 1 2025-03-31T16:10:45.661648Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_1491106710520174429_v1 grpc read done: success# 1, data# { read_request { bytes_size: 400 } } 2025-03-31T16:10:45.661884Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_1491106710520174429_v1 got read request: guid# 8450b09f-ae656fba-b6a0660e-aab5c7e5 2025-03-31T16:10:45.661953Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_1491106710520174429_v1 performing read request: guid# d4423465-4e32303b-6000f920-c5bab825, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2), count# 2, size# 173, partitionsAsked# 1, maxTimeLag# 0ms 2025-03-31T16:10:45.662811Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_1491106710520174429_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2)maxCount 2 maxSize 173 maxTimeLagMs 0 readTimestampMs 0 readOffset 38 EndOffset 40 ClientCommitOffset 0 committedOffset 0 Guid d4423465-4e32303b-6000f920-c5bab825 2025-03-31T16:10:45.666866Z node 25 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-03-31T16:10:45.666958Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 1 2025-03-31T16:10:45.667171Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] read cookie 34 Topic 'rt3.dc1--topic1' partition 1 user $without_consumer offset 38 count 2 size 173 endOffset 40 max time lag 0ms effective offset 38 2025-03-31T16:10:45.667221Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] read cookie 34 added 0 blobs, size 0 count 0 last offset 38, current partition end offset: 40 2025-03-31T16:10:45.667304Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] Reading cookie 34. All data is from uncompacted head. 2025-03-31T16:10:45.667339Z node 25 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-31T16:10:45.667510Z node 25 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 1 messageNo: 0 requestId: cookie: 38 2025-03-31T16:10:45.668773Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_1491106710520174429_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2) initDone 1 event { CmdReadResult { MaxOffset: 40 Result { Offset: 38 Data: "... 94 bytes ..." SourceId: "\000source" SeqNo: 40 WriteTimestampMS: 1743437445339 CreateTimestampMS: 1743437445332 UncompressedSize: 6 PartitionKey: "" ExplicitHash: "" } Result { Offset: 39 Data: "... 94 bytes ..." SourceId: "\000source" SeqNo: 41 WriteTimestampMS: 1743437445360 CreateTimestampMS: 1743437445352 UncompressedSize: 6 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 40 RealReadOffset: 39 WaitQuotaTimeMs: 0 EndOffset: 40 StartOffset: 0 } Cookie: 38 } 2025-03-31T16:10:45.669098Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_1491106710520174429_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2) wait data in partition inited, cookie 1 from offset40 2025-03-31T16:10:45.669171Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_1491106710520174429_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2) EndOffset 40 ReadOffset 40 ReadGuid d4423465-4e32303b-6000f920-c5bab825 has messages 1 2025-03-31T16:10:45.669368Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_1491106710520174429_v1 read done: guid# d4423465-4e32303b-6000f920-c5bab825, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2), size# 352 2025-03-31T16:10:45.669427Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_1491106710520174429_v1 response to read: guid# d4423465-4e32303b-6000f920-c5bab825 2025-03-31T16:10:45.669731Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_1491106710520174429_v1 Process answer. Aval parts: 0 Bytes readed: 352 Offset: 38 from session 2 Offset: 39 from session 2 2025-03-31T16:10:45.670881Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_1491106710520174429_v1 grpc read done: success# 1, data# { commit_offset_request { commit_offsets { partition_session_id: 2 offsets { end: 39 } } } } 2025-03-31T16:10:45.670928Z node 24 :PQ_READ_PROXY INFO: session cookie 2 consumer session _24_2_1491106710520174429_v1 closed with error: reason# can't commit when reading without a consumer 2025-03-31T16:10:45.671173Z node 24 :PQ_READ_PROXY INFO: session cookie 2 consumer session _24_2_1491106710520174429_v1 is DEAD 2025-03-31T16:10:45.672838Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_2_1491106710520174429_v1 2025-03-31T16:10:45.672895Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7488006810308530321:2589] destroyed 2025-03-31T16:10:45.672922Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_2_1491106710520174429_v1 2025-03-31T16:10:45.672941Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7488006810308530320:2588] destroyed 2025-03-31T16:10:45.672958Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_2_1491106710520174429_v1 2025-03-31T16:10:45.672976Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7488006810308530319:2587] destroyed 2025-03-31T16:10:45.672993Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_2_1491106710520174429_v1 2025-03-31T16:10:45.673012Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7488006810308530323:2591] destroyed 2025-03-31T16:10:45.673027Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_2_1491106710520174429_v1 2025-03-31T16:10:45.673045Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7488006810308530322:2590] destroyed 2025-03-31T16:10:45.673100Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_2_1491106710520174429_v1 2025-03-31T16:10:45.673128Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_2_1491106710520174429_v1 2025-03-31T16:10:45.673150Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_2_1491106710520174429_v1 2025-03-31T16:10:45.673168Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_2_1491106710520174429_v1 2025-03-31T16:10:45.673185Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_2_1491106710520174429_v1 |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::BadSids [GOOD] Test command err: 2025-03-31T16:06:36.931401Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:06:36.931497Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:06:37.644258Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:06:37.644334Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info === Server->StartServer(false); 2025-03-31T16:06:38.261323Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488005749873341405:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:06:38.262623Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:06:38.344122Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488005751814910596:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:06:38.344175Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:06:38.654283Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0009cc/r3tmp/tmpsSRnbU/pdisk_1.dat 2025-03-31T16:06:38.695400Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:06:39.196607Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:06:39.196717Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:06:39.198627Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:06:39.198713Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:06:39.205122Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-03-31T16:06:39.205271Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:06:39.240682Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:06:39.398242Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:06:39.398668Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 24442, node 3 2025-03-31T16:06:39.606085Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:06:39.738168Z node 3 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:06:39.738235Z node 3 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:06:39.738800Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/0009cc/r3tmp/yandexZUZzfn.tmp 2025-03-31T16:06:39.738814Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/0009cc/r3tmp/yandexZUZzfn.tmp 2025-03-31T16:06:39.738980Z node 3 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/0009cc/r3tmp/yandexZUZzfn.tmp 2025-03-31T16:06:39.739125Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:06:39.792346Z INFO: TTestServer started on Port 25004 GrpcPort 24442 TClient is connected to server localhost:25004 PQClient connected to localhost:24442 === TenantModeEnabled() = 0 === Init PQ - start server on port 24442 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:06:40.442591Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-31T16:06:40.443261Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:06:40.443477Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-31T16:06:40.444246Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:06:40.444285Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:06:40.447998Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-31T16:06:40.448158Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-31T16:06:40.448331Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:06:40.448385Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-31T16:06:40.448401Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-03-31T16:06:40.448412Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-03-31T16:06:40.452050Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:06:40.452092Z node 3 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-03-31T16:06:40.452112Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-03-31T16:06:40.453767Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:06:40.453798Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-03-31T16:06:40.453812Z node 3 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:06:40.460496Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:06:40.460554Z node 3 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:06:40.460610Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-31T16:06:40.460636Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-03-31T16:06:40.477795Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:06:40.483953Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-03-31T16:06:40.484089Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-03-31T16:06:40.487107Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743437200533, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T16:06:40.487236Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743437200533 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-31T16:06:40.487281Z node 3 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-31T16:06:40.487548Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-03-31T16:06:40.487587Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-31T16:06:40.487750Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-31T16:06:40.487822Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-03-31T16:06:40.494954Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-31T16:06:40.495013Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-31T16:06:40.495249Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-31T16:06:40.495264Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:7488005754168309441:2444], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-03-31T16:06:40.495323Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:06:40.495356Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-03-31T16:06:40.495443Z nod ... :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [21:7488006796772324750:2569] (SourceId=base64:aa, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-03-31T16:10:42.811359Z node 21 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [21:7488006796772324750:2569] (SourceId=base64:aa, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-03-31T16:10:42.811393Z node 21 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [21:7488006796772324750:2569] (SourceId=base64:aa, PreferedPartition=(NULL)) Start idle 2025-03-31T16:10:42.811452Z node 21 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 5 sessionId: partition: 0 expectedGeneration: (NULL) 2025-03-31T16:10:42.814087Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [21:7488006796772324785:2569], now have 1 active actors on pipe 2025-03-31T16:10:42.814451Z node 21 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 22, Generation: 1 2025-03-31T16:10:42.816054Z node 21 :PQ_WRITE_PROXY INFO: session inited cookie: 5 partition: 0 MaxSeqNo: 0 sessionId: base64:aa|72e612b5-6a3606e6-c4d8fca5-dbb37529_0 2025-03-31T16:10:42.814692Z node 22 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-03-31T16:10:42.814727Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 0 2025-03-31T16:10:42.814829Z node 22 :PERSQUEUE INFO: new Cookie base64:aa|72e612b5-6a3606e6-c4d8fca5-dbb37529_0 generated for partition 0 topic 'rt3.dc1--topic1' owner base64:aa 2025-03-31T16:10:42.814958Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-03-31T16:10:42.815035Z node 22 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-31T16:10:42.815582Z node 22 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-03-31T16:10:42.815603Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 0 2025-03-31T16:10:42.815719Z node 22 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-31T16:10:42.819776Z :INFO: [] MessageGroupId [base64:aa] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1743437442819 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T16:10:42.819999Z :INFO: [] MessageGroupId [base64:aa] SessionId [] Write session established. Init response: session_id: "base64:aa|72e612b5-6a3606e6-c4d8fca5-dbb37529_0" topic: "topic1" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-03-31T16:10:42.823251Z :DEBUG: [] MessageGroupId [base64:aa] SessionId [base64:aa|72e612b5-6a3606e6-c4d8fca5-dbb37529_0] Write 1 messages with Id from 1 to 1 2025-03-31T16:10:42.823440Z :INFO: [] MessageGroupId [base64:aa] SessionId [base64:aa|72e612b5-6a3606e6-c4d8fca5-dbb37529_0] Write session: close. Timeout = 18446744073709551 ms 2025-03-31T16:10:42.824111Z :DEBUG: [] MessageGroupId [base64:aa] SessionId [base64:aa|72e612b5-6a3606e6-c4d8fca5-dbb37529_0] Write session: try to update token 2025-03-31T16:10:42.824197Z :DEBUG: [] MessageGroupId [base64:aa] SessionId [base64:aa|72e612b5-6a3606e6-c4d8fca5-dbb37529_0] Send 1 message(s) (0 left), first sequence number is 1 2025-03-31T16:10:42.831834Z node 21 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: base64:aa|72e612b5-6a3606e6-c4d8fca5-dbb37529_0 grpc read done: success: 1 data: write_request[data omitted] 2025-03-31T16:10:42.832370Z node 21 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-03-31T16:10:42.834319Z node 22 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-03-31T16:10:42.834370Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 0 2025-03-31T16:10:42.834507Z node 22 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 1 2025-03-31T16:10:42.836095Z node 21 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2025-03-31T16:10:42.836852Z node 22 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-03-31T16:10:42.836901Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 0 2025-03-31T16:10:42.837366Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message topic: rt3.dc1--topic1 partition: 0 SourceId: '\0base64:aa' SeqNo: 1 partNo : 0 messageNo: 1 size 92 offset: -1 2025-03-31T16:10:42.837697Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--topic1' partition 0 part blob processing sourceId '\0base64:aa' seqNo 1 partNo 0 2025-03-31T16:10:42.877628Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--topic1' partition 0 part blob complete sourceId '\0base64:aa' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 169 count 1 nextOffset 1 batches 1 2025-03-31T16:10:42.878746Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--topic1' partition 0 compactOffset 0,1 HeadOffset 0 endOffset 0 curOffset 1 d0000000000_00000000000000000000_00000_0000000001_00000| size 157 WTime 1743437442875 2025-03-31T16:10:42.878968Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:10:42.879025Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:10:42.879073Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-03-31T16:10:42.879187Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:10:42.879243Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000pbase64:aa 2025-03-31T16:10:42.879269Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] d0000000000_00000000000000000000_00000_0000000001_00000| 2025-03-31T16:10:42.879289Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:10:42.879334Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:10:42.879393Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] =========================== 2025-03-31T16:10:42.879462Z node 22 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-31T16:10:42.879568Z node 22 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 0 partNo 0 count 1 size 157 2025-03-31T16:10:42.886415Z node 22 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 0 count 1 size 157 actorID [22:7488006793882805343:2441] 2025-03-31T16:10:42.886595Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 102 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:10:42.886677Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:10:42.886748Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0base64:aa', Topic: 'rt3.dc1--topic1', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2025-03-31T16:10:42.887008Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--topic1' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-31T16:10:42.887049Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--topic1' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-03-31T16:10:42.887191Z node 22 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 1 requestId: cookie: 1 2025-03-31T16:10:42.887359Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 Topic 'rt3.dc1--topic1' partition 0 user user offset 0 count 1 size 1024000 endOffset 1 max time lag 0ms effective offset 0 2025-03-31T16:10:42.887399Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 1 2025-03-31T16:10:42.887450Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-03-31T16:10:42.887476Z node 22 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-31T16:10:42.887570Z node 22 :PERSQUEUE DEBUG: Topic 'rt3.dc1--topic1' partition 0 user user readTimeStamp done, result 1743437442835 queuesize 0 startOffset 0 2025-03-31T16:10:42.887761Z node 22 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 0 offset 0 partno 0 count 1 parts 0 size 157 2025-03-31T16:10:42.888457Z node 21 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2025-03-31T16:10:42.895410Z :DEBUG: [] MessageGroupId [base64:aa] SessionId [base64:aa|72e612b5-6a3606e6-c4d8fca5-dbb37529_0] Write session got write response: sequence_numbers: 1 offsets: 0 already_written: false write_statistics { persist_duration_ms: 9 queued_in_partition_duration_ms: 39 } 2025-03-31T16:10:42.895490Z :DEBUG: [] MessageGroupId [base64:aa] SessionId [base64:aa|72e612b5-6a3606e6-c4d8fca5-dbb37529_0] Write session: acknoledged message 1 2025-03-31T16:10:42.924956Z :INFO: [] MessageGroupId [base64:aa] SessionId [base64:aa|72e612b5-6a3606e6-c4d8fca5-dbb37529_0] Write session will now close 2025-03-31T16:10:42.925093Z :DEBUG: [] MessageGroupId [base64:aa] SessionId [base64:aa|72e612b5-6a3606e6-c4d8fca5-dbb37529_0] Write session: aborting 2025-03-31T16:10:42.925823Z :INFO: [] MessageGroupId [base64:aa] SessionId [base64:aa|72e612b5-6a3606e6-c4d8fca5-dbb37529_0] Write session: gracefully shut down, all writes complete 2025-03-31T16:10:42.925902Z :DEBUG: [] MessageGroupId [base64:aa] SessionId [base64:aa|72e612b5-6a3606e6-c4d8fca5-dbb37529_0] Write session: destroy 2025-03-31T16:10:42.926766Z node 21 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: base64:aa|72e612b5-6a3606e6-c4d8fca5-dbb37529_0 grpc read done: success: 0 data: 2025-03-31T16:10:42.926800Z node 21 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: base64:aa|72e612b5-6a3606e6-c4d8fca5-dbb37529_0 grpc read failed 2025-03-31T16:10:42.927322Z node 21 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 5 sessionId: base64:aa|72e612b5-6a3606e6-c4d8fca5-dbb37529_0 2025-03-31T16:10:42.927380Z node 21 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: base64:aa|72e612b5-6a3606e6-c4d8fca5-dbb37529_0 is DEAD 2025-03-31T16:10:42.931184Z node 21 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-31T16:10:42.935630Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [21:7488006796772324785:2569] destroyed 2025-03-31T16:10:42.935712Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. >> KqpJoinOrder::TPCDS87+ColumnStore [GOOD] |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dynconfig |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_begin_commit_logged |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |84.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |84.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots >> test_dispatch.py::TestMapping::test_idle ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS87+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 6961, MsgBus: 14407 2025-03-31T16:07:53.411319Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488006072756965524:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:53.411815Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019a2/r3tmp/tmprKTn9Q/pdisk_1.dat 2025-03-31T16:07:54.161238Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:54.197678Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:54.197764Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:54.202404Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6961, node 1 2025-03-31T16:07:54.565488Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:54.565508Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:54.565520Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:54.565634Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14407 TClient is connected to server localhost:14407 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:55.532154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:07:55.547038Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:07:58.393993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488006094231802552:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:58.393993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488006094231802540:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:58.394141Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:58.398026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:07:58.398546Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488006072756965524:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:58.398617Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:07:58.412709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488006094231802554:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:07:58.515646Z node 1 :TX_PROXY ERROR: Actor# [1:7488006094231802605:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:07:58.882635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:07:59.187555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488006098526770137:2350];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:07:59.187851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488006098526770137:2350];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:07:59.188157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488006098526770137:2350];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:07:59.188343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488006098526770137:2350];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:07:59.188490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488006098526770137:2350];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:07:59.188598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488006098526770137:2350];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:07:59.188702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488006098526770137:2350];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:07:59.189312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488006098526770137:2350];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:07:59.189477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488006098526770137:2350];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:07:59.189609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488006098526770137:2350];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:07:59.189732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488006098526770137:2350];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:07:59.189859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488006098526770137:2350];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:07:59.206586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488006098526770164:2360];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:07:59.206917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488006098526770164:2360];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:07:59.207163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488006098526770164:2360];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:07:59.207285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488006098526770164:2360];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:07:59.207403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488006098526770164:2360];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:07:59.207525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488006098526770164:2360];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:07:59.207617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488006098526770164:2360];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:07:59.207711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488006098526770164:2360];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:07:59.207846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488006098526770164:2360];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:07:59.207978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488006098526770164:2360];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:07:59.208086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488006098526770164:2360];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:07:59.215409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488006098526770164:2360];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:07:59.245123Z node 1 :TX_ ... WARN: tablet_id=72075186224039362;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:58.369918Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:58.373792Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:58.377017Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:58.381218Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:58.385094Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039299;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:58.388605Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:58.392426Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:58.395131Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:58.400696Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:58.400696Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039289;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:58.407374Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039366;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:58.407587Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:58.416234Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:58.418708Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039374;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:58.423737Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039370;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:58.425351Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:58.431056Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039354;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:58.432508Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:58.438399Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039352;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:58.440066Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:58.447753Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039358;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:58.454523Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039346;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:58.461801Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039328;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:58.470147Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:58.476652Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039372;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:58.482862Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039350;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:58.488686Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:58.495137Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:58.501901Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:58.505362Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:58.509132Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:58.512610Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:58.516070Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:58.521863Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039368;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:58.523822Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039364;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:09:58.644191Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpfd4p0daqdzrt1ddtd2p2e", SessionId: ydb://session/3?node_id=1&id=ZDBjZjJhYjctYzllMWU0YzktMTA3NWM5MmYtYzQwZTBmODI=, Slow query, duration: 61.363521s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:09:59.168566Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:09:59.168930Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:09:59.169035Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7488006549498396908:12116];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039094; 2025-03-31T16:09:59.169464Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:10:41.990802Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpffqpk7hb3e1jdexw2brbb", SessionId: ydb://session/3?node_id=1&id=ZDBjZjJhYjctYzllMWU0YzktMTA3NWM5MmYtYzQwZTBmODI=, Slow query, duration: 19.698707s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "PRAGMA TablePathPrefix='/Root/test/ds';\n\n-- NB: Subquerys\n$bla1 = (select distinct\n COALESCE(c_last_name,'') as c_last_name,\n COALESCE(c_first_name,'') as c_first_name,\n COALESCE(cast(d_date as date), cast(0 as Date)) as d_date\n from store_sales as store_sales\n cross join date_dim as date_dim\n cross join customer as customer\n where store_sales.ss_sold_date_sk = date_dim.d_date_sk\n and store_sales.ss_customer_sk = customer.c_customer_sk\n and d_month_seq between 1221 and 1221+11);\n\n$bla2 = ((select distinct\n COALESCE(c_last_name,'') as c_last_name,\n COALESCE(c_first_name,'') as c_first_name,\n COALESCE(cast(d_date as date), cast(0 as Date)) as d_date\n from catalog_sales as catalog_sales\n cross join date_dim as date_dim\n cross join customer as customer\n where catalog_sales.cs_sold_date_sk = date_dim.d_date_sk\n and catalog_sales.cs_bill_customer_sk = customer.c_customer_sk\n and d_month_seq between 1221 and 1221+11)\n union all\n (select distinct\n COALESCE(c_last_name,'') as c_last_name,\n COALESCE(c_first_name,'') as c_first_name,\n COALESCE(cast(d_date as date), cast(0 as Date)) as d_date\n from web_sales as web_sales\n cross join date_dim as date_dim\n cross join customer as customer\n where web_sales.ws_sold_date_sk = date_dim.d_date_sk\n and web_sales.ws_bill_customer_sk = customer.c_customer_sk\n and d_month_seq between 1221 and 1221+11));\n\n-- start query 1 in stream 0 using template query87.tpl and seed 1819994127\nselect count(*)\nfrom $bla1 bla1 left only join $bla2 bla2 using (c_last_name, c_first_name, d_date)\n;\n\n-- end query 1 in stream 0 using template query87.tpl", parameters: 0b |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> unstable_connection.py::TestUnstableConnection::test [GOOD] |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[insert] [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] [GOOD] |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-fifo] [GOOD] >> test_copy_ops.py::TestSchemeShardCopyOps::test_given_table_when_create_copy_of_it_then_ok >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_cant_add_existing_column >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_column_after_table_creation_with_data_and_success >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_partition_config_options >> test_copy_ops.py::TestSchemeShardCopyOps::test_when_copy_table_partition_config |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v1] [GOOD] |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_create_path_second_time_then_it_is_ok >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_compaction_policy_options >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_decreasing_number_of_generations_it_is_raise_error >> TPersQueueTest::PreferredCluster_RemotePreferredClusterEnabledWhileSessionInitializing_SessionDiesOnlyAfterInitializationAndDelay [GOOD] >> TPersQueueTest::PartitionsMapping >> test_auditlog.py::test_dml_requests_arent_logged_when_sid_is_expected |84.9%| [TA] $(B)/ydb/tests/functional/sqs/with_quotas/test-results/py3test/{meta.json ... results_accumulator.log} |84.9%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/with_quotas/test-results/py3test/{meta.json ... results_accumulator.log} |84.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |84.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |84.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_scheme_then_ok >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_single_key_column_failure >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheEnd [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheMiddle >> KqpJoinOrder::TPCDS16+ColumnStore [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_other_keys_then_ok >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous [GOOD] |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[replace] [GOOD] >> test_auditlog.py::test_dynconfig [GOOD] |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_path_with_long_name_failed |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[upsert] [GOOD] >> test_auditlog.py::test_single_dml_query_logged[update] [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_not_single_key_column_failure |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS16+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 24708, MsgBus: 12944 2025-03-31T16:07:55.646830Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488006081596341532:2201];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:55.647329Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019a0/r3tmp/tmpWn4VQ7/pdisk_1.dat 2025-03-31T16:07:56.456745Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:07:56.460848Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:07:56.460937Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:07:56.464227Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24708, node 1 2025-03-31T16:07:56.771663Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:07:56.771685Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:07:56.771693Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:07:56.771823Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12944 TClient is connected to server localhost:12944 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:07:57.920083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:08:00.264547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488006103071178536:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:00.264702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:00.264998Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488006103071178548:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:00.269332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:08:00.279911Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488006103071178550:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:08:00.363884Z node 1 :TX_PROXY ERROR: Actor# [1:7488006103071178601:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:08:00.645338Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488006081596341532:2201];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:08:00.645425Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:08:00.716526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:08:01.160985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488006103071178824:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:08:01.161195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488006103071178824:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:08:01.161516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488006103071178824:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:08:01.161655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488006103071178824:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:08:01.161789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488006103071178824:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:08:01.161914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488006103071178824:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:08:01.162011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488006103071178824:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:08:01.162128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488006103071178824:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:08:01.162230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488006103071178824:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:08:01.162324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488006103071178824:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:08:01.162447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488006103071178824:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:08:01.162578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488006103071178824:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:08:01.165668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488006103071178820:2353];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:08:01.165776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488006103071178820:2353];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:08:01.165996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488006103071178820:2353];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:08:01.166104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488006103071178820:2353];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:08:01.166199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488006103071178820:2353];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:08:01.166293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488006103071178820:2353];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:08:01.166384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488006103071178820:2353];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:08:01.166499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488006103071178820:2353];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:08:01.166681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488006103071178820:2353];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:08:01.166792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488006103071178820:2353];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:08:01.166901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488006103071178820:2353];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:08:01.166997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488006103071178820:2353];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:08:01.213029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488006107366146159:2361];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.c ... :211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.036062Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039346;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.042807Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039364;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.049657Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039342;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.056586Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039222;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.063760Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039226;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.071273Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039344;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.078274Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039264;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.084139Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039326;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.091056Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.098349Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039268;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.105077Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039198;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.106455Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.110509Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039340;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.113859Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039310;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.120562Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039192;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.124831Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.127026Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039370;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.133884Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.137176Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039230;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.141867Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039374;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.151702Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039199;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.159192Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039352;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.167962Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039328;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.167963Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039201;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.179785Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039197;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.184251Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039316;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.192274Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039207;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.198500Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039266;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.205892Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039265;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.496406Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039237;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.506994Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039227;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.517699Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039223;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.522227Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039187;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.532086Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.532401Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.540705Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.557388Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.559401Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:15.724090Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpfd9jpc14ds1dz3azczeyx", SessionId: ydb://session/3?node_id=1&id=Yzg1NjkwYzgtZTY0NDA2NTgtNzM3ZWM3ZjQtOWIwODM0MDA=, Slow query, duration: 73.428264s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:10:16.556091Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:10:16.556634Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:10:16.557208Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7488006588402543315:12146];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039094; 2025-03-31T16:10:16.557584Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:10:56.639829Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpfgad27zyf4r13ngj1qn3n", SessionId: ydb://session/3?node_id=1&id=Yzg1NjkwYzgtZTY0NDA2NTgtNzM3ZWM3ZjQtOWIwODM0MDA=, Slow query, duration: 15.134190s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "-- NB: Subquerys\n$orders_with_several_warehouses = (\n select cs_order_number\n from `/Root/test/ds/catalog_sales`\n group by cs_order_number\n having count(distinct cs_warehouse_sk) > 1\n);\n\n-- start query 1 in stream 0 using template query16.tpl and seed 171719422\nselect\n count(distinct cs1.cs_order_number) as `order count`\n ,sum(cs_ext_ship_cost) as `total shipping cost`\n ,sum(cs_net_profit) as `total net profit`\nfrom\n `/Root/test/ds/catalog_sales` cs1\n cross join `/Root/test/ds/date_dim`\n cross join `/Root/test/ds/customer_address`\n cross join `/Root/test/ds/call_center`\n left semi join $orders_with_several_warehouses cs2 on cs1.cs_order_number = cs2.cs_order_number\n left only join `/Root/test/ds/catalog_returns` cr1 on cs1.cs_order_number = cr1.cr_order_number\nwhere\n cast(d_date as date) between cast('1999-4-01' as date) and\n (cast('1999-4-01' as date) + DateTime::IntervalFromDays(60))\nand cs1.cs_ship_date_sk = d_date_sk\nand cs1.cs_ship_addr_sk = ca_address_sk\nand ca_state = 'IL'\nand cs1.cs_call_center_sk = cc_call_center_sk\nand cc_county in ('Richland County','Bronx County','Maverick County','Mesa County',\n 'Raleigh County'\n)\norder by `order count`\nlimit 100;\n", parameters: 0b |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> KqpJoinOrder::CanonizedJoinOrderTPCH6 [GOOD] |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_partition_config_options [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_other_keys_then_ok [GOOD] |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_cant_add_existing_column [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_after_create_table_it_is_success >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_and_drop_table_many_times_in_range ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/0l5c/00152f/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk5/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_other-_good_dynconfig/audit.txt 2025-03-31T16:10:57.804240Z: {"sanitized_token":"othe****ltin (27F910A9)","subject":"other-user@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[insert] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/0l5c/00153a/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk17/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.insert/audit.txt 2025-03-31T16:10:57.692934Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-03-31T16:10:57.692877Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"insert into `/Root/test_auditlog.py/test-table` (id, value) values (100, 100), (101, 101)","start_time":"2025-03-31T16:10:57.604216Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_create_and_remove_directory_success >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_single_key_column_failure [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH6 [GOOD] Test command err: Trying to start YDB, gRPC: 24678, MsgBus: 4668 2025-03-31T16:08:34.260012Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488006249208283512:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:08:34.264774Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001999/r3tmp/tmpLOSaez/pdisk_1.dat 2025-03-31T16:08:35.168759Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:08:35.178417Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:08:35.178502Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:08:35.185229Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24678, node 1 2025-03-31T16:08:35.399935Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:08:35.399962Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:08:35.399969Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:08:35.400146Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4668 TClient is connected to server localhost:4668 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:08:36.414413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:08:36.473181Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:08:39.250143Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488006249208283512:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:08:39.250248Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:08:39.890911Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488006270683120530:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:39.891029Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:39.891449Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488006270683120542:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:39.900890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:08:39.920027Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488006270683120544:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:08:40.014638Z node 1 :TX_PROXY ERROR: Actor# [1:7488006274978087891:2343] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:08:40.814381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:08:41.091732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488006274978088151:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:08:41.091732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488006274978088142:2354];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:08:41.091960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488006274978088142:2354];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:08:41.092301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488006274978088142:2354];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:08:41.092376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488006274978088151:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:08:41.092505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488006274978088142:2354];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:08:41.092589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488006274978088151:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:08:41.092630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488006274978088142:2354];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:08:41.092702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488006274978088151:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:08:41.092762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488006274978088142:2354];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:08:41.092809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488006274978088151:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:08:41.092871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488006274978088142:2354];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:08:41.092933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488006274978088151:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:08:41.093012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488006274978088142:2354];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:08:41.093069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488006274978088151:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:08:41.093149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488006274978088142:2354];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:08:41.093185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488006274978088151:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:08:41.093272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488006274978088142:2354];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:08:41.093316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488006274978088151:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:08:41.093457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488006274978088142:2354];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:08:41.093537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488006274978088151:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:08:41.093599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488006274978088142:2354];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:08:41.093655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488006274978088151:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:08:41.093753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488006274978088151:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:08:41.132717Z node 1 :TX_C ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.596048Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.596048Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039187;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.603474Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.603474Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039286;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.611328Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039318;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.611329Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039360;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.618953Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.618958Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.626176Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039354;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.626318Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039344;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.633185Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039298;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.633813Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039316;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.640226Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039364;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.640936Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.647210Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039362;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.647208Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039350;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.654329Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039308;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.654328Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039352;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.661501Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.661501Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.668580Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039252;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.668578Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.675735Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.675735Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039312;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.683148Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.683524Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.690455Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039220;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.690466Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.697039Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039348;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.697668Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.702276Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.704773Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.707692Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039288;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.711823Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.712076Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.717182Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.719127Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039356;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.722184Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.725949Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.728348Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.732961Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.734843Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039340;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.740254Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.740604Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.746142Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:10:56.925334Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpff9yz8vz7eqt1a01rkddn", SessionId: ydb://session/3?node_id=1&id=OGM5N2Y4OTQtOGI4NWRhNjQtMjJlMDkzOTgtYjY4ZDFmMjk=, Slow query, duration: 48.696485s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:10:57.216262Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:10:57.216442Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7488006683000019917:8672];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224039094; 2025-03-31T16:10:57.216673Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:10:57.216815Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_create_path_second_time_then_it_is_ok [GOOD] |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_directory_from_leaf_success >> test_auditlog.py::test_dml_begin_commit_logged [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_primary_key_and_other_scheme_then_ok |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/0l5c/0014fb/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk11/testing_out_stuff/test_auditlog.py.test_dml_requests_arent_logged_when_anonymous/audit.txt >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_decreasing_number_of_generations_it_is_raise_error [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_compaction_policy_options [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_path_with_long_name_failed [GOOD] >> test_auditlog.py::test_single_dml_query_logged[delete] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_table_and_path_with_name_clash_unsuccessful [GOOD] >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_bad_dynconfig] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_column_after_table_creation_with_data_and_success [GOOD] >> test_copy_ops.py::TestSchemeShardCopyOps::test_when_copy_table_partition_config [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dynconfig [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/0l5c/00147d/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk15/testing_out_stuff/test_auditlog.py.test_dynconfig/audit.txt 2025-03-31T16:11:07.847475Z: {"sanitized_token":"**** (B6C6F477)","subject":"root@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_and_remove_column_many_times_success >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_create_and_remove_directory_success [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_scheme_then_ok [GOOD] |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/py3test >> unstable_connection.py::TestUnstableConnection::test [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_not_single_key_column_failure [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_and_drop_table_many_times_in_range [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_delete_path_with_folder_then_get_error_response >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_many_directories_success >> TPersQueueTest::PartitionsMapping [GOOD] >> TPersQueueTest::MessageMetadata >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_after_create_table_it_is_success [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_many_directories_success [GOOD] |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[replace] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/0l5c/0014c9/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk18/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.replace/audit.txt 2025-03-31T16:11:07.701569Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-03-31T16:11:07.701510Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"replace into `/Root/test_auditlog.py/test-table` (id, value) values (2, 3), (3, 3)","start_time":"2025-03-31T16:11:07.613925Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[upsert] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/0l5c/0014e5/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk21/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.upsert/audit.txt 2025-03-31T16:11:08.654283Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-03-31T16:11:08.654236Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"upsert into `/Root/test_auditlog.py/test-table` (id, value) values (4, 4), (5, 5)","start_time":"2025-03-31T16:11:08.575824Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} >> test_auditlog.py::test_dml_requests_arent_logged_when_sid_is_expected [GOOD] |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_partition_config_options [GOOD] |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_directory_from_leaf_success [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_table_that_doesnt_exist_failure [GOOD] |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/0l5c/0014b0/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk2/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_no_auth-_bad_dynconfig/audit.txt 2025-03-31T16:11:11.692393Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1053: \n6:12 plain scalar cannot start with '%'","sanitized_token":"{none}","remote_address":"127.0.0.1","status":"ERROR","subject":"{none}","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} >> test_copy_ops.py::TestSchemeShardCopyOps::test_given_table_when_create_copy_of_it_then_ok [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_primary_key_and_other_scheme_then_ok [GOOD] |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_delete_path_with_folder_then_get_error_response [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/0l5c/0014d8/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk4/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_other-_bad_dynconfig/audit.txt 2025-03-31T16:11:08.971447Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1053: \n6:12 plain scalar cannot start with '%'","sanitized_token":"othe****ltin (27F910A9)","remote_address":"127.0.0.1","status":"ERROR","subject":"other-user@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[update] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/0l5c/001506/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk20/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.update/audit.txt 2025-03-31T16:11:08.611929Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-03-31T16:11:08.611875Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-03-31T16:11:08.425542Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_remove_directory_that_does_not_exist_failure |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_decreasing_number_of_generations_it_is_raise_error [GOOD] |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_compaction_policy_options [GOOD] |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> data_correctness.py::TestDataCorrectness::test [GOOD] |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] [GOOD] |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_begin_commit_logged [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/0l5c/001468/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk10/testing_out_stuff/test_auditlog.py.test_dml_begin_commit_logged/audit.txt 2025-03-31T16:11:16.184097Z: {"tx_id":"01jqpfhcapf8p0brs5rvb5rw7b","database":"/Root/test_auditlog.py","end_time":"2025-03-31T16:11:16.184041Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","status":"SUCCESS","start_time":"2025-03-31T16:11:16.182317Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"BeginTransactionRequest","component":"grpc-proxy"} 2025-03-31T16:11:16.364755Z: {"tx_id":"01jqpfhcapf8p0brs5rvb5rw7b","database":"/Root/test_auditlog.py","end_time":"2025-03-31T16:11:16.364692Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","commit_tx":"0","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-03-31T16:11:16.193366Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-03-31T16:11:16.412014Z: {"tx_id":"01jqpfhcapf8p0brs5rvb5rw7b","database":"/Root/test_auditlog.py","end_time":"2025-03-31T16:11:16.411914Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","status":"SUCCESS","start_time":"2025-03-31T16:11:16.379748Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"CommitTransactionRequest","component":"grpc-proxy"} |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[delete] [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_bad_dynconfig] [GOOD] |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs0] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_good_dynconfig] |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_cant_add_existing_column [GOOD] |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_after_create_table_it_is_success [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_remove_directory_that_does_not_exist_failure [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] [GOOD] |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_and_remove_column_many_times_success [GOOD] |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_single_key_column_failure [GOOD] |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_create_path_second_time_then_it_is_ok [GOOD] |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/0l5c/001411/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk1/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_bad_auth-_good_dynconfig/audit.txt 2025-03-31T16:11:23.005834Z: {"sanitized_token":"**** (C877DF61)","subject":"__bad__@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/0l5c/001463/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk9/testing_out_stuff/test_auditlog.py.test_cloud_ids_are_logged.attrs1/audit.txt 2025-03-31T16:11:18.710950Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-03-31T16:11:18.710893Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-03-31T16:11:18.508843Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","folder_id":"folder-id-B","component":"grpc-proxy"} |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_arent_logged_when_sid_is_expected [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/0l5c/0013f8/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk12/testing_out_stuff/test_auditlog.py.test_dml_requests_arent_logged_when_sid_is_expected/audit.txt |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> TPersQueueTest::MessageMetadata [GOOD] >> TPersQueueTest::LOGBROKER_7820 |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_column_after_table_creation_with_data_and_success [GOOD] |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_delete_path_with_folder_then_get_error_response [GOOD] |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_create_and_remove_directory_success [GOOD] >> ttl_unavailable_s3.py::TestUnavailableS3::test [GOOD] |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[delete] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/0l5c/00137a/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk16/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.delete/audit.txt 2025-03-31T16:11:33.569965Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-03-31T16:11:33.569905Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"delete from `/Root/test_auditlog.py/test-table` where id = 100 or id = 101","start_time":"2025-03-31T16:11:33.406707Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_other_keys_then_ok [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/0l5c/001378/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk6/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_root-_bad_dynconfig/audit.txt 2025-03-31T16:11:33.799122Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1053: \n6:12 plain scalar cannot start with '%'","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","status":"ERROR","subject":"root@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_not_single_key_column_failure [GOOD] >> test_auditlog.py::test_single_dml_query_logged[select] |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_copy_ops.py::TestSchemeShardCopyOps::test_when_copy_table_partition_config [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/0l5c/00136b/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk7/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_root-_good_dynconfig/audit.txt 2025-03-31T16:11:36.131397Z: {"sanitized_token":"**** (B6C6F477)","subject":"root@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_good_dynconfig] [GOOD] >> test_dispatch.py::TestMapping::test_idle [GOOD] |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_scheme_then_ok [GOOD] >> Secret::ValidationQueryService [GOOD] |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs0] [GOOD] |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::ValidationQueryService [GOOD] Test command err: 2025-03-31T16:08:10.629684Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:08:10.630083Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:08:10.630263Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000e58/r3tmp/tmpwoY8uk/pdisk_1.dat 2025-03-31T16:08:12.616014Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.148400s 2025-03-31T16:08:12.616137Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.148551s TServer::EnableGrpc on GrpcPort 9121, node 1 TClient is connected to server localhost:10494 2025-03-31T16:08:15.703107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:08:15.867792Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:08:15.868026Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:08:15.868069Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:08:15.868105Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:08:15.877048Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:08:15.927354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:08:15.927548Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:08:15.945874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-03-31T16:08:28.294228Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:748:2628], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:28.294380Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:757:2633], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:28.294490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:08:28.386733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-03-31T16:08:28.436292Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:762:2636], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-03-31T16:08:28.535064Z node 1 :TX_PROXY ERROR: Actor# [1:813:2668] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:08:29.362902Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:823:2677], status: GENERIC_ERROR, issues:
:1:20: Error: mismatched input '-' expecting '(' 2025-03-31T16:08:29.427739Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmE4M2NkMjEtNmYzODc2YWMtYjAwNjFiMmMtYmY1YzZhZmU=, ActorId: [1:746:2626], ActorState: ExecuteState, TraceId: 01jqpfc83wcg8a2qeafst1bqxb, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;RESULT=
:1:20: Error: mismatched input '-' expecting '(' ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 2025-03-31T16:08:40.438071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480 2025-03-31T16:08:45.597142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:08:46.136698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:1, at schemeshard: 72057594046644480 2025-03-31T16:08:47.969854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-03-31T16:08:49.297001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:08:50.488616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-03-31T16:08:51.806234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-03-31T16:08:56.442790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-03-31T16:09:00.212172Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmVjMGZlNmQtMmUwYzg4MjItNDc4MzVmOTEtNzgwMjA0N2U=, ActorId: [1:841:2687], ActorState: ExecuteState, TraceId: 01jqpfckfw4z9rcgcpjwxk47gb, Create QueryResponse for error on request, msg: 2025-03-31T16:09:00.212345Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpfckfw4z9rcgcpjwxk47gb", SessionId: ydb://session/3?node_id=1&id=MmVjMGZlNmQtMmUwYzg4MjItNDc4MzVmOTEtNzgwMjA0N2U=, Slow query, duration: 20.527458s, status: GENERIC_ERROR, user: root@builtin, results: 0b, text: "ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`", parameters: 0b 2025-03-31T16:09:00.224434Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715689. Ctx: { TraceId: 01jqpfckfw4z9rcgcpjwxk47gb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmVjMGZlNmQtMmUwYzg4MjItNDc4MzVmOTEtNzgwMjA0N2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=
: Error: preparation problem: secret secret1 not found for alter ;EXPECTATION=0 2025-03-31T16:09:01.389690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:09:01.389758Z node 1 :IMPORT WARN: Table profiles were not loaded FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-03-31T16:09:46.563629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715713:0, at schemeshard: 72057594046644480 2025-03-31T16:09:49.094616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715720:0, at schemeshard: 72057594046644480 2025-03-31T16:09:52.970979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715727:0, at schemeshard: 72057594046644480 2025-03-31T16:09:54.503923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715732:0, at schemeshard: 72057594046644480 2025-03-31T16:09:58.185597Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpfemq35xq9pr5sby4v3jxv", SessionId: ydb://session/3?node_id=1&id=OGE0YzNhZmUtNDYyODE1NjItNDVkNzQ1YmEtYWIwOWJlZTA=, Slow query, duration: 11.707712s, status: STATUS_CODE_UNSPECIFIED, user: root@builtin, results: 0b, text: "CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS)", parameters: 0b REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 2025-03-31T16:10:12.272696Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODNmMDBhMTUtNGFiY2Q1Y2UtMWI1ODZiZjktZjM5NDk1Mg==, ActorId: [1:3334:4570], ActorState: ExecuteState, TraceId: 01jqpffc7je31xwr8nk2njw60v, Create QueryResponse for error on request, msg: 2025-03-31T16:10:12.275153Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715745. Ctx: { TraceId: 01jqpffc7je31xwr8nk2njw60v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODNmMDBhMTUtNGFiY2Q1Y2UtMWI1ODZiZjktZjM5NDk1Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);RESULT=
: Error: preparation problem: used in access secret secret2 not found ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-03-31T16:10:27.711575Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=15; 2025-03-31T16:10:27.712017Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 15 at tablet 72075186224037892 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-31T16:10:27.712302Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 15 at tablet 72075186224037892 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-31T16:10:27.712919Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:3738:4803], Table: `//Root/.metadata/secrets/access` ([72057594046644480:13:1]), SessionActorId: [1:3645:4803]Got CONSTRAINT VIOLATION for table `//Root/.metadata/secrets/access`. ShardID=72075186224037892, Sink=[1:3738:4803].{
: Error: Duplicate keys have been found., code: 2012 } 2025-03-31T16:10:27.713836Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:3731:4803], SessionActorId: [1:3645:4803], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `//Root/.metadata/secrets/access`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:3645:4803]. isRollback=0 2025-03-31T16:10:27.714554Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmY4YzA5OTUtNmE4NzI5N2YtYzNlYTk2NDYtYWE1MjIwOWI=, ActorId: [1:3645:4803], ActorState: ExecuteState, TraceId: 01jqpffwvccpjacakhb6t3hjgx, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:3732:4803] from: [1:3731:4803] 2025-03-31T16:10:27.714811Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:3732:4803] TxId: 281474976715758. Ctx: { TraceId: 01jqpffwvccpjacakhb6t3hjgx, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmY4YzA5OTUtNmE4NzI5N2YtYzNlYTk2NDYtYWE1MjIwOWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `//Root/.metadata/secrets/access`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-31T16:10:27.718276Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmY4YzA5OTUtNmE4NzI5N2YtYzNlYTk2NDYtYWE1MjIwOWI=, ActorId: [1:3645:4803], ActorState: ExecuteState, TraceId: 01jqpffwvccpjacakhb6t3hjgx, Create QueryResponse for error on request, msg: 2025-03-31T16:10:27.763609Z node 1 :METADATA_PROVIDER ERROR: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: PRECONDITION_FAILED issues { message: "Constraint violated. Table: `//Root/.metadata/secrets/access`." issue_code: 2012 severity: 1 issues { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { id: "01jqpffwhy9p9f19hnndvbzesn" } } } } ;request=session_id: "ydb://session/3?node_id=1&id=MmY4YzA5OTUtNmE4NzI5N2YtYzNlYTk2NDYtYWE1MjIwOWI=" tx_control { tx_id: "01jqpffwhy9p9f19hnndvbzesn" } query { yql_text: "DECLARE $objects AS List>;\nINSERT INTO `//Root/.metadata/secrets/access`\nSELECT ownerUserId,secretId,accessSID FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "ownerUserId" type { type_id: UTF8 } } members { name: "secretId" type { type_id: UTF8 } } members { name: "accessSID" type { type_id: UTF8 } } } } } } value { items { items { text_value: "root@builtin" } items { text_value: "secret1" } items { text_value: "test@test1" } } } } } ; REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 2025-03-31T16:10:41.086429Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTJjZWYxZGQtYWM1NTkxNGYtZjMxMDI3ZWYtMTU4MzdiOTY=, ActorId: [1:3944:5023], ActorState: ExecuteState, TraceId: 01jqpfg8xscv0jgxfaxq54ajg7, Create QueryResponse for error on request, msg: 2025-03-31T16:10:41.088040Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715770. Ctx: { TraceId: 01jqpfg8xscv0jgxfaxq54ajg7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTJjZWYxZGQtYWM1NTkxNGYtZjMxMDI3ZWYtMTU4MzdiOTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=
: Error: preparation problem: secret secret1 using in access for test@test1 ;EXPECTATION=0 FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 2025-03-31T16:10:53.063589Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:4294:5294], for# root@builtin, access# DescribeSchema 2025-03-31T16:10:53.063701Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:4294:5294], for# root@builtin, access# DescribeSchema 2025-03-31T16:10:53.065878Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:4291:5291], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:10:53.067578Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjBjNzE3MmYtMTc2ZjhjZmYtZDAwYjZkZjAtMjI2MWFhNjE=, ActorId: [1:4259:5261], ActorState: ExecuteState, TraceId: 01jqpfgnkg185hjeeevh1jcf52, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;EXPECTATION=0 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-03-31T16:11:04.911250Z node 1 :TICKET_PARSER ERROR: Ticket **** (51449FAE): Could not find correct token validator 2025-03-31T16:11:05.867704Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODYxMmFlMTItODBiNzQ1ZTQtNGEzYzdhMmYtOTgyMWZiYw==, ActorId: [1:4524:5460], ActorState: ExecuteState, TraceId: 01jqpfh16yeh56679402ncdm75, Create QueryResponse for error on request, msg: 2025-03-31T16:11:05.869065Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715794. Ctx: { TraceId: 01jqpfh16yeh56679402ncdm75, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODYxMmFlMTItODBiNzQ1ZTQtNGEzYzdhMmYtOTgyMWZiYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: cannot CREATE objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-03-31T16:11:19.142729Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Mzc1YTNjYjEtNzY1YTVhYjctM2QwYWRmZDYtMmQxMTZlYTc=, ActorId: [1:4917:5754], ActorState: ExecuteState, TraceId: 01jqpfhe4s6zhe4jvq2ghhhvez, Create QueryResponse for error on request, msg: 2025-03-31T16:11:19.144427Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715810. Ctx: { TraceId: 01jqpfhe4s6zhe4jvq2ghhhvez, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mzc1YTNjYjEtNzY1YTVhYjctM2QwYWRmZDYtMmQxMTZlYTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: cannot UPSERT objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 2025-03-31T16:11:46.075231Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715843. Ctx: { TraceId: 01jqpfj91dfqq8q1w09zn4285e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTZiOTNiMjAtY2JhMzBmM2EtZDQ3ZTQ4NDgtYmZlNTg4Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_many_directories_success [GOOD] |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_table_and_path_with_name_clash_unsuccessful [GOOD] |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_unauthorized |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 2025-03-31 16:11:49,050 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-31 16:11:49,212 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 107501 46.3M 46.1M 23.3M test_tool run_ut @/home/runner/.ya/build/build_root/0l5c/001af6/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk29/testing_out_stuff/test_tool.args 107928 1.5G 1.5G 1.5G └─ ydb-core-tx-columnshard-ut_rw --trace-path-append /home/runner/.ya/build/build_root/0l5c/001af6/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chu Test command err: 2025-03-31T16:01:51.643059Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:01:51.814513Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:01:51.840144Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:01:51.840489Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:01:51.849127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:01:51.849367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:01:51.849619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:01:51.849753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:01:51.849854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:01:51.849990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:01:51.850129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:01:51.850244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:01:51.850352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:01:51.850458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:01:51.850577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:01:51.850691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:01:51.886032Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:01:51.886224Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:01:51.886285Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:01:51.886492Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:01:51.886648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:01:51.886719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:01:51.886785Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:01:51.886897Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:01:51.886964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:01:51.887004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:01:51.887033Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:01:51.887277Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:01:51.887373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:01:51.887419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:01:51.887454Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:01:51.887571Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:01:51.887629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:01:51.887682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:01:51.887715Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:01:51.887810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:01:51.887845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:01:51.887879Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:01:51.887947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:01:51.888000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:01:51.888038Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:01:51.888451Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=50; 2025-03-31T16:01:51.888534Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=33; 2025-03-31T16:01:51.888614Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=33; 2025-03-31T16:01:51.888716Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=39; 2025-03-31T16:01:51.888872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:01:51.888950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:01:51.888993Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:01:51.889199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:01:51.889260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:01:51.889293Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:01:51.889459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:01:51.889507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:01:51.889538Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:01:51.889743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:01:51.889787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:01:51.889816Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:01:51.889948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:01:51.889992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:01:51.890039Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... e.cpp:106;method=PutObject;id=[9437184:2:69:255:166:2832:0]; 2025-03-31T16:11:47.782238Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:167:2848:0]; 2025-03-31T16:11:47.782303Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:168:2848:0]; 2025-03-31T16:11:47.782363Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:169:2840:0]; 2025-03-31T16:11:47.782423Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:170:2800:0]; 2025-03-31T16:11:47.782479Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:171:2920:0]; 2025-03-31T16:11:47.782540Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:172:2832:0]; 2025-03-31T16:11:47.782606Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:173:2800:0]; 2025-03-31T16:11:47.782664Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:174:2824:0]; 2025-03-31T16:11:47.782725Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:175:2792:0]; 2025-03-31T16:11:47.782788Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:176:2856:0]; 2025-03-31T16:11:47.782851Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:177:2784:0]; 2025-03-31T16:11:47.782909Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:178:2784:0]; 2025-03-31T16:11:47.782974Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:179:2800:0]; 2025-03-31T16:11:47.783040Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:180:2792:0]; 2025-03-31T16:11:47.783132Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:181:2792:0]; 2025-03-31T16:11:47.783195Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:182:2792:0]; 2025-03-31T16:11:47.783255Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:183:2848:0]; 2025-03-31T16:11:47.783315Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:184:2792:0]; 2025-03-31T16:11:47.783376Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:185:2784:0]; 2025-03-31T16:11:47.783432Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:186:2800:0]; 2025-03-31T16:11:47.783490Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:187:2776:0]; 2025-03-31T16:11:47.783564Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:188:2776:0]; 2025-03-31T16:11:47.783630Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:189:2784:0]; 2025-03-31T16:11:47.783692Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:190:2848:0]; 2025-03-31T16:11:47.783751Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:191:2784:0]; 2025-03-31T16:11:47.783811Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:192:2792:0]; 2025-03-31T16:11:47.783866Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:193:2824:0]; 2025-03-31T16:11:47.783989Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:194:2776:0]; 2025-03-31T16:11:47.784057Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:195:2776:0]; 2025-03-31T16:11:47.784120Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:196:2776:0]; 2025-03-31T16:11:47.784178Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:197:2816:0]; 2025-03-31T16:11:47.784238Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:198:2760:0]; 2025-03-31T16:11:47.784303Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:199:2760:0]; 2025-03-31T16:11:47.784360Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:200:7800:0]; 2025-03-31T16:11:47.784424Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:201:2840:0]; 2025-03-31T16:11:47.784482Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:202:2880:0]; 2025-03-31T16:11:47.784543Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:203:2840:0]; 2025-03-31T16:11:47.784605Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:204:2840:0]; 2025-03-31T16:11:47.784673Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:205:2912:0]; 2025-03-31T16:11:47.784735Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:206:2840:0]; 2025-03-31T16:11:47.784798Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:207:2832:0]; 2025-03-31T16:11:47.784859Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:208:2840:0]; 2025-03-31T16:11:47.784922Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:209:2792:0]; 2025-03-31T16:11:47.784980Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:210:2792:0]; 2025-03-31T16:11:47.785043Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:211:2800:0]; 2025-03-31T16:11:47.785104Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:212:2792:0]; 2025-03-31T16:11:47.785167Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:213:2784:0]; 2025-03-31T16:11:47.785225Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:214:2792:0]; 2025-03-31T16:11:47.785288Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:215:2816:0]; 2025-03-31T16:11:47.785347Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:216:2792:0]; 2025-03-31T16:11:47.785413Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:217:2792:0]; 2025-03-31T16:11:47.785470Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:218:2784:0]; 2025-03-31T16:11:47.785534Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:219:2840:0]; 2025-03-31T16:11:47.785596Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:220:2784:0]; 2025-03-31T16:11:47.785661Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:221:2792:0]; 2025-03-31T16:11:47.785720Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:222:2840:0]; 2025-03-31T16:11:47.785779Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:223:2792:0]; 2025-03-31T16:11:47.785838Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:224:2784:0]; 2025-03-31T16:11:47.785899Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:225:2784:0]; 2025-03-31T16:11:47.785957Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:226:2808:0]; 2025-03-31T16:11:47.786012Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:227:2784:0]; 2025-03-31T16:11:47.786070Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:228:2784:0]; 2025-03-31T16:11:47.786133Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:229:2856:0]; 2025-03-31T16:11:47.786196Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:230:2784:0]; 2025-03-31T16:11:47.786253Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:231:2768:0]; 2025-03-31T16:11:47.786308Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:232:2784:0]; 2025-03-31T16:11:47.786369Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:233:2784:0]; 2025-03-31T16:11:47.786433Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:234:2776:0]; 2025-03-31T16:11:47.786494Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:235:2784:0]; 2025-03-31T16:11:47.786557Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:236:2840:0]; 2025-03-31T16:11:47.786615Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:237:2768:0]; 2025-03-31T16:11:47.786673Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:238:2768:0]; 2025-03-31T16:11:47.786730Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:239:2760:0]; 2025-03-31T16:11:47.786793Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:69:255:240:7768:0]; 2025-03-31T16:11:48.703199Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-03-31T16:11:48.704152Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[210] (CS::GENERAL) apply at tablet 9437184 2025-03-31T16:11:48.775744Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:87 Blob count: 709 2025-03-31T16:11:48.783014Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=6015332;raw_bytes=9095060;count=3;records=85000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=89645276;raw_bytes=131940390;count=47;records=1285000} inactive {blob_bytes=168719104;raw_bytes=244677952;count=84;records=2406191} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8330113388/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/0l5c/001af6/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk29/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8330113388/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/0l5c/001af6/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk29/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheMiddle [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloaded |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_table_that_doesnt_exist_failure [GOOD] |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/py3test >> data_correctness.py::TestDataCorrectness::test [GOOD] |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/0l5c/001358/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk13/testing_out_stuff/test_auditlog.py.test_dml_requests_logged_when_sid_is_unexpected/audit.txt 2025-03-31T16:11:43.542597Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-03-31T16:11:43.542544Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"insert into `/Root/test_auditlog.py/test-table` (id, value) values (100, 100), (101, 101)","start_time":"2025-03-31T16:11:43.482636Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-03-31T16:11:43.811199Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-03-31T16:11:43.811156Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"delete from `/Root/test_auditlog.py/test-table` where id = 100 or id = 101","start_time":"2025-03-31T16:11:43.651320Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-03-31T16:11:44.094236Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-03-31T16:11:44.094197Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"select id from `/Root/test_auditlog.py/test-table`","start_time":"2025-03-31T16:11:43.919986Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-03-31T16:11:44.361228Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-03-31T16:11:44.361193Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-03-31T16:11:44.203566Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-03-31T16:11:44.541318Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-03-31T16:11:44.541283Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"replace into `/Root/test_auditlog.py/test-table` (id, value) values (2, 3), (3, 3)","start_time":"2025-03-31T16:11:44.472217Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-03-31T16:11:44.714261Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-03-31T16:11:44.714224Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"upsert into `/Root/test_auditlog.py/test-table` (id, value) values (4, 4), (5, 5)","start_time":"2025-03-31T16:11:44.651753Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_primary_key_and_other_scheme_then_ok [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/py3test >> ttl_unavailable_s3.py::TestUnavailableS3::test [GOOD] Test command err: !!! simulating S3 hang up -- sending SIGSTOP !!! simulating S3 recovery -- sending SIGCONT |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_remove_directory_that_does_not_exist_failure [GOOD] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[DROP TABLE {}-`.metadata/script_executions`] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast, DROP COLUMN stats-`.metadata/script_executions`] >> TRegisterCheckTest::ShouldRegisterCheckNewGeneration |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/multi_plane/py3test >> test_dispatch.py::TestMapping::test_idle [GOOD] |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> TRegisterCheckTest::ShouldRegisterCheckSameGeneration >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v1] |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] [GOOD] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax-`.metadata/script_executions`] |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> TFqYdbTest::ShouldStatusToIssuesProcessExceptions [GOOD] |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_copy_ops.py::TestSchemeShardCopyOps::test_given_table_when_create_copy_of_it_then_ok [GOOD] >> TRegisterCheckTest::ShouldRegisterCheckSameGenerationAndTransact >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-fifo] >> test_auditlog.py::test_single_dml_query_logged[select] [GOOD] |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> TRegisterCheckTest::ShouldRegisterCheckNewGeneration [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-fifo] >> TRegisterCheckTest::ShouldRegisterCheckSameGeneration [GOOD] |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v0] >> TRegisterCheckTest::ShouldRegisterCheckNewGenerationAndTransact >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration2 >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails >> TRegisterCheckTest::ShouldRegisterCheckSameGenerationAndTransact [GOOD] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast-`.metadata/script_executions`] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v1] >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration2 [GOOD] >> TRegisterCheckTest::ShouldRegisterCheckNewGenerationAndTransact [GOOD] >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails [GOOD] >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails2 |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_and_remove_column_many_times_success [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/0l5c/001348/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk3/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_no_auth-_good_dynconfig/audit.txt 2025-03-31T16:11:48.403961Z: {"sanitized_token":"{none}","subject":"{none}","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-fifo] >> TFqYdbTest::ShouldStatusToIssuesProcessEmptyIssues [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v0] |85.8%| [TA] $(B)/ydb/tests/functional/scheme_shard/test-results/py3test/{meta.json ... results_accumulator.log} |85.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TFqYdbTest::ShouldStatusToIssuesProcessExceptions [GOOD] |85.8%| [TA] {RESULT} $(B)/ydb/tests/functional/scheme_shard/test-results/py3test/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v0] >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails2 [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v0] |85.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckNewGeneration [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-std] |85.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration [GOOD] |85.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckSameGeneration [GOOD] >> TRegisterCheckTest::ShouldRegisterCheckNextGeneration >> TPersQueueTest::LOGBROKER_7820 [GOOD] >> TPersQueueTest::InflightLimit >> test_auditlog.py::test_dml_requests_logged_when_unauthorized [GOOD] |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |85.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckSameGenerationAndTransact [GOOD] |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> TStorageServiceTest::ShouldRegister >> TCheckpointStorageTest::ShouldRegisterCoordinator >> TCheckpointStorageTest::ShouldUpdateCheckpointStatusForCheckpointsWithTheSameGenAndNo >> TStateStorageTest::ShouldSaveGetOldSmallState >> TCheckpointStorageTest::ShouldCreateCheckpoint >> TStateStorageTest::ShouldIssueErrorOnWrongGetStateParams >> TStateStorageTest::ShouldSaveGetOldSmallState2Tasks |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> TStorageServiceTest::ShouldNotRegisterPrevGeneration >> TStateStorageTest::ShouldDeleteNoCheckpoints >> TStateStorageTest::ShouldIssueErrorOnWrongGetStateParams [GOOD] >> TStateStorageTest::ShouldIssueErrorOnNonExistentState >> TStorageServiceTest::ShouldNotCreateCheckpointAfterGenerationChanged >> TRegisterCheckTest::ShouldRegisterCheckNextGeneration [GOOD] |85.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckNewGenerationAndTransact [GOOD] |85.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs0] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/0l5c/00134c/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk8/testing_out_stuff/test_auditlog.py.test_cloud_ids_are_logged.attrs0/audit.txt 2025-03-31T16:11:49.152270Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","cloud_id":"cloud-id-A","end_time":"2025-03-31T16:11:49.152215Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-03-31T16:11:49.005288Z","subject":"root@builtin","detailed_status":"SUCCESS","resource_id":"database-id-C","operation":"ExecuteDataQueryRequest","folder_id":"folder-id-B","component":"grpc-proxy"} >> TCheckpointStorageTest::ShouldRegisterCoordinator [GOOD] >> TCheckpointStorageTest::ShouldGetCoordinators |85.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TFqYdbTest::ShouldStatusToIssuesProcessEmptyIssues [GOOD] >> TCheckpointStorageTest::ShouldCreateCheckpoint [GOOD] >> TCheckpointStorageTest::ShouldCreateGetCheckpoints >> TStateStorageTest::ShouldSaveGetOldSmallState [GOOD] >> TStateStorageTest::ShouldDeleteNoCheckpoints [GOOD] >> TStateStorageTest::ShouldSaveGetOldSmallState2Tasks [GOOD] >> TStateStorageTest::ShouldSaveGetOldBigState >> TStateStorageTest::ShouldDeleteNoCheckpoints2 >> TStorageServiceTest::ShouldCreateCheckpoint >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] |85.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails2 [GOOD] >> TStorageServiceTest::ShouldRegister [GOOD] >> TStorageServiceTest::ShouldRegisterNextGeneration >> TCheckpointStorageTest::ShouldGetCoordinators [GOOD] >> TCheckpointStorageTest::ShouldMarkCheckpointsGc >> TStorageServiceTest::ShouldNotRegisterPrevGeneration [GOOD] >> TStorageServiceTest::ShouldNotCreateCheckpointWhenUnregistered >> TStateStorageTest::ShouldSaveGetOldBigState [GOOD] >> TStateStorageTest::ShouldSaveGetIncrementSmallState |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v0] >> TStateStorageTest::ShouldIssueErrorOnNonExistentState [GOOD] >> TStateStorageTest::ShouldLoadLastSnapshot >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-fifo] >> TCheckpointStorageTest::ShouldUpdateCheckpointStatusForCheckpointsWithTheSameGenAndNo [GOOD] >> TGcTest::ShouldRemovePreviousCheckpoints >> TStateStorageTest::ShouldDeleteNoCheckpoints2 [GOOD] >> TStateStorageTest::ShouldDeleteCheckpoints |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-fifo] >> TStorageServiceTest::ShouldNotCreateCheckpointAfterGenerationChanged [GOOD] >> TStorageServiceTest::ShouldNotCompleteCheckpointWithoutCreation >> TStorageServiceTest::ShouldNotCreateCheckpointWhenUnregistered [GOOD] >> TStorageServiceTest::ShouldNotCreateCheckpointTwice >> TStateStorageTest::ShouldSaveGetIncrementSmallState [GOOD] >> TStateStorageTest::ShouldSaveGetIncrementBigState >> TStorageServiceTest::ShouldRegisterNextGeneration [GOOD] >> TStorageServiceTest::ShouldPendingAndCompleteCheckpoint |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> TStateStorageTest::ShouldLoadLastSnapshot [GOOD] >> TStateStorageTest::ShouldNotGetNonExistendSnaphotState |85.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckNextGeneration [GOOD] >> TStorageServiceTest::ShouldCreateCheckpoint [GOOD] >> TStorageServiceTest::ShouldGetCheckpoints |85.9%| [TA] $(B)/ydb/core/fq/libs/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.9%| [TA] {RESULT} $(B)/ydb/core/fq/libs/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] >> TCheckpointStorageTest::ShouldCreateGetCheckpoints [GOOD] >> TCheckpointStorageTest::ShouldGetCheckpointsEmpty >> TStateStorageTest::ShouldSaveGetIncrementBigState [GOOD] >> TStateStorageTest::ShouldDeleteCheckpoints [GOOD] >> TStateStorageTest::ShouldNotGetNonExistendState >> TStateStorageTest::ShouldDeleteGraph >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] >> TStateStorageTest::ShouldNotGetNonExistendSnaphotState [GOOD] >> TStorageServiceTest::ShouldNotCompleteCheckpointWithoutCreation [GOOD] >> TStorageServiceTest::ShouldNotAbortCheckpointWithoutCreation |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> TStateStorageTest::ShouldLoadIncrementSnapshot ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/0l5c/001323/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk0/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_bad_auth-_bad_dynconfig/audit.txt 2025-03-31T16:12:00.181196Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1053: \n6:12 plain scalar cannot start with '%'","sanitized_token":"**** (C877DF61)","remote_address":"127.0.0.1","status":"ERROR","subject":"__bad__@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> TCheckpointStorageTest::ShouldMarkCheckpointsGc [GOOD] >> TCheckpointStorageTest::ShouldNotDeleteUnmarkedCheckpoints >> TCheckpointStorageTest::ShouldGetCheckpointsEmpty [GOOD] >> TCheckpointStorageTest::ShouldDeleteGraph >> TStateStorageTest::ShouldNotGetNonExistendState [GOOD] >> TStateStorageTest::ShouldDeleteGraph [GOOD] >> TStateStorageTest::ShouldGetMultipleStates ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[select] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/0l5c/00131d/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk19/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.select/audit.txt 2025-03-31T16:12:01.185005Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-03-31T16:12:01.184956Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"select id from `/Root/test_auditlog.py/test-table`","start_time":"2025-03-31T16:12:00.980859Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} >> TStorageServiceTest::ShouldNotCreateCheckpointTwice [GOOD] >> TStorageServiceTest::ShouldNotPendingCheckpointWithoutCreation |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> TStorageServiceTest::ShouldGetCheckpoints [GOOD] >> TStorageServiceTest::ShouldAbortCheckpoint >> TStateStorageTest::ShouldLoadIncrementSnapshot [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] >> TStorageServiceTest::ShouldNotAbortCheckpointWithoutCreation [GOOD] >> TStorageServiceTest::ShouldNotCompleteCheckpointWithoutPending >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v0] >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills >> TStorageServiceTest::ShouldPendingAndCompleteCheckpoint [GOOD] >> TStorageServiceTest::ShouldNotPendingCheckpointWithoutCreation [GOOD] >> TStorageServiceTest::ShouldSaveState >> TStorageServiceTest::ShouldNotPendingCheckpointGenerationChanged >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v0] |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStateStorageTest::ShouldNotGetNonExistendState [GOOD] >> test_disk.py::TestSafeDiskBreak::test_erase_method >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v1] >> TStorageServiceTest::ShouldNotCompleteCheckpointWithoutPending [GOOD] >> TStorageServiceTest::ShouldNotCompleteCheckpointGenerationChanged >> TCheckpointStorageTest::ShouldDeleteGraph [GOOD] >> TCheckpointStorageTest::ShouldDeleteMarkedCheckpoints ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_unauthorized [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/0l5c/0012ef/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk14/testing_out_stuff/test_auditlog.py.test_dml_requests_logged_when_unauthorized/audit.txt 2025-03-31T16:12:06.918767Z: {"database":"/Root/test_auditlog.py","end_time":"2025-03-31T16:12:06.918714Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"insert into `/Root/test_auditlog.py/test-table` (id, value) values (100, 100), (101, 101)","start_time":"2025-03-31T16:12:06.904600Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-03-31T16:12:07.054477Z: {"database":"/Root/test_auditlog.py","end_time":"2025-03-31T16:12:07.054434Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"delete from `/Root/test_auditlog.py/test-table` where id = 100 or id = 101","start_time":"2025-03-31T16:12:07.030723Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-03-31T16:12:07.180616Z: {"database":"/Root/test_auditlog.py","end_time":"2025-03-31T16:12:07.180552Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"select id from `/Root/test_auditlog.py/test-table`","start_time":"2025-03-31T16:12:07.165568Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-03-31T16:12:07.311991Z: {"database":"/Root/test_auditlog.py","end_time":"2025-03-31T16:12:07.311958Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-03-31T16:12:07.290643Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-03-31T16:12:07.523338Z: {"database":"/Root/test_auditlog.py","end_time":"2025-03-31T16:12:07.523305Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"replace into `/Root/test_auditlog.py/test-table` (id, value) values (2, 3), (3, 3)","start_time":"2025-03-31T16:12:07.423850Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-03-31T16:12:07.653088Z: {"database":"/Root/test_auditlog.py","end_time":"2025-03-31T16:12:07.653054Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"upsert into `/Root/test_auditlog.py/test-table` (id, value) values (4, 4), (5, 5)","start_time":"2025-03-31T16:12:07.636458Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} >> TStateStorageTest::ShouldGetMultipleStates [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-fifo] [GOOD] >> TGcTest::ShouldRemovePreviousCheckpoints [GOOD] >> TGcTest::ShouldIgnoreIncrementCheckpoint >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-std] |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStateStorageTest::ShouldLoadIncrementSnapshot [GOOD] |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v0] >> TStorageServiceTest::ShouldNotPendingCheckpointGenerationChanged [GOOD] >> TStorageServiceTest::ShouldSaveState [GOOD] >> TStorageServiceTest::ShouldUseGc |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v0] >> TCheckpointStorageTest::ShouldNotDeleteUnmarkedCheckpoints [GOOD] >> TCheckpointStorageTest::ShouldRetryOnExistingGraphDescId >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] |85.9%| [TA] $(B)/ydb/tests/functional/audit/test-results/py3test/{meta.json ... results_accumulator.log} >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v1] |85.9%| [TA] {RESULT} $(B)/ydb/tests/functional/audit/test-results/py3test/{meta.json ... results_accumulator.log} >> TStorageServiceTest::ShouldAbortCheckpoint [GOOD] >> TStorageServiceTest::ShouldGetState >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v0] >> TPersQueueTest::TestWriteStat [GOOD] >> TPersQueueTest::TestWriteSessionsConflicts >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v0] >> TStorageServiceTest::ShouldNotCompleteCheckpointGenerationChanged [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-std] >> TCheckpointStorageTest::ShouldRetryOnExistingGraphDescId [GOOD] |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStateStorageTest::ShouldGetMultipleStates [GOOD] >> TConsoleConfigSubscriptionTests::TestConfigSubscriptionsCleanup [GOOD] >> TConsoleConfigTests::TestAddConfigItem >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-std] |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStorageServiceTest::ShouldNotPendingCheckpointGenerationChanged [GOOD] Test command err: 2025-03-31T16:12:09.839498Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [1:7488007169408672973:2048] with connection to localhost:13084:local 2025-03-31T16:12:09.842377Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-03-31T16:12:10.845929Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-03-31T16:12:10.845959Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-03-31T16:12:10.847620Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.16] Got TEvRegisterCoordinatorRequest 2025-03-31T16:12:11.046724Z node 1 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.16] Failed to register graph:
: Warning: Table: local/TStorageServiceTestShouldNotRegisterPrevGeneration/coordinators_sync, pk: graph_graphich, current generation: 17, expected/new generation: 16, operation: RegisterCheck, code: 400130 2025-03-31T16:12:11.046765Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.16] Send TEvRegisterCoordinatorResponse 2025-03-31T16:12:12.558826Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [2:7488007179782680108:2048] with connection to localhost:13084:local 2025-03-31T16:12:12.558974Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-03-31T16:12:13.279319Z node 2 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to create checkpoint:
: Warning: Table: local/TStorageServiceTestShouldNotCreateCheckpointWhenUnregistered/coordinators_sync, pk: graph_graphich, current generation: 0, expected/new generation: 17, operation: Check, code: 400130 2025-03-31T16:12:13.279394Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-03-31T16:12:14.596510Z node 3 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [3:7488007188744158365:2048] with connection to localhost:13084:local 2025-03-31T16:12:14.596611Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-03-31T16:12:14.950634Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-03-31T16:12:14.950668Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-03-31T16:12:14.955213Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-03-31T16:12:16.174234Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-03-31T16:12:16.174271Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-03-31T16:12:16.179217Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-03-31T16:12:16.785328Z node 3 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to create checkpoint:
: Error: Constraint violated. Table: `local/TStorageServiceTestShouldNotCreateCheckpointTwice/checkpoints_metadata`., code: 2012
: Error: Duplicate keys have been found., code: 2012 2025-03-31T16:12:16.785380Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-03-31T16:12:18.365805Z node 4 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [4:7488007205100601759:2048] with connection to localhost:13084:local 2025-03-31T16:12:18.366042Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-03-31T16:12:18.770177Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-03-31T16:12:18.770212Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-03-31T16:12:18.770494Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-03-31T16:12:19.426374Z node 4 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to set 'PendingCommit' status:
: Warning: Failed to select checkpoint '17:1', code: 400080 2025-03-31T16:12:19.426417Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2025-03-31T16:12:21.288889Z node 5 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [5:7488007220219759963:2048] with connection to localhost:13084:local 2025-03-31T16:12:21.288979Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-03-31T16:12:21.654819Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-03-31T16:12:21.654851Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-03-31T16:12:21.655444Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-03-31T16:12:23.099485Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-03-31T16:12:23.099522Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-03-31T16:12:23.101612Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Got TEvRegisterCoordinatorRequest 2025-03-31T16:12:23.575522Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.18] Graph registered 2025-03-31T16:12:23.575557Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Send TEvRegisterCoordinatorResponse 2025-03-31T16:12:23.576169Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-03-31T16:12:23.881960Z node 5 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to set 'PendingCommit' status:
: Warning: Table: local/TStorageServiceTestShouldNotPendingCheckpointGenerationChanged/coordinators_sync, pk: graph_graphich, current generation: 18, expected/new generation: 17, operation: Check, code: 400130 2025-03-31T16:12:23.882000Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v0] [GOOD] >> TConsoleConfigTests::TestAddConfigItem [GOOD] >> TConsoleConfigTests::TestAutoKind >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-std] >> TStorageServiceTest::ShouldGetState [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] >> TConsoleConfigTests::TestAutoKind [GOOD] >> TConsoleConfigTests::TestAllowedScopes >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-std] |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v1] >> TStorageServiceTest::ShouldUseGc [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-std] [GOOD] >> TCheckpointStorageTest::ShouldDeleteMarkedCheckpoints [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStorageServiceTest::ShouldNotCompleteCheckpointGenerationChanged [GOOD] Test command err: 2025-03-31T16:12:10.428326Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [1:7488007169590262163:2048] with connection to localhost:14467:local 2025-03-31T16:12:10.428460Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-03-31T16:12:11.443384Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-03-31T16:12:11.443416Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-03-31T16:12:11.443759Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-03-31T16:12:12.587251Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-03-31T16:12:12.587299Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-03-31T16:12:12.587692Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Got TEvRegisterCoordinatorRequest 2025-03-31T16:12:12.899878Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.18] Graph registered 2025-03-31T16:12:12.899941Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Send TEvRegisterCoordinatorResponse 2025-03-31T16:12:12.900261Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2025-03-31T16:12:13.192317Z node 1 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:2] Failed to create checkpoint:
: Warning: Table: local/TStorageServiceTestShouldNotCreateCheckpointAfterGenerationChanged/coordinators_sync, pk: graph_graphich, current generation: 18, expected/new generation: 17, operation: Check, code: 400130 2025-03-31T16:12:13.192352Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2025-03-31T16:12:14.773053Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [2:7488007187808882424:2048] with connection to localhost:14467:local 2025-03-31T16:12:14.773410Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-03-31T16:12:15.088049Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-03-31T16:12:15.088075Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-03-31T16:12:15.088392Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCompleteCheckpointRequest 2025-03-31T16:12:15.451785Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Failed to set 'Completed' status:
: Warning: Failed to select checkpoint '17:1', code: 400080 2025-03-31T16:12:15.451824Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCompleteCheckpointResponse 2025-03-31T16:12:17.055205Z node 3 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [3:7488007196222511933:2048] with connection to localhost:14467:local 2025-03-31T16:12:17.063141Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-03-31T16:12:17.353025Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-03-31T16:12:17.353051Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-03-31T16:12:17.353476Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvAbortCheckpointRequest 2025-03-31T16:12:17.943533Z node 3 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to abort checkpoint:
: Warning: Failed to select checkpoint '17:1', code: 400080 2025-03-31T16:12:17.943578Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvAbortCheckpointResponse 2025-03-31T16:12:20.113804Z node 4 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [4:7488007211053325612:2048] with connection to localhost:14467:local 2025-03-31T16:12:20.113907Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-03-31T16:12:20.473166Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-03-31T16:12:20.473199Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-03-31T16:12:20.473774Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-03-31T16:12:22.085749Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-03-31T16:12:22.085775Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-03-31T16:12:22.086325Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCompleteCheckpointRequest 2025-03-31T16:12:22.676961Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Failed to set 'Completed' status:
: Warning: Selected checkpoint '17:1' with status Pending, while expected PendingCommit, code: 400080 2025-03-31T16:12:22.676994Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCompleteCheckpointResponse 2025-03-31T16:12:25.135355Z node 5 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [5:7488007232150146572:2048] with connection to localhost:14467:local 2025-03-31T16:12:25.135472Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-03-31T16:12:25.600740Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-03-31T16:12:25.600768Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-03-31T16:12:25.601453Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-03-31T16:12:27.969008Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-03-31T16:12:27.969041Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-03-31T16:12:27.973019Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-03-31T16:12:28.966761Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'PendingCommit' 2025-03-31T16:12:28.966810Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2025-03-31T16:12:28.968781Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Got TEvRegisterCoordinatorRequest 2025-03-31T16:12:29.419857Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.18] Graph registered 2025-03-31T16:12:29.419888Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Send TEvRegisterCoordinatorResponse 2025-03-31T16:12:29.423548Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCompleteCheckpointRequest 2025-03-31T16:12:29.678409Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Failed to set 'Completed' status:
: Warning: Table: local/TStorageServiceTestShouldNotPendingCheckpointGenerationChanged/coordinators_sync, pk: graph_graphich, current generation: 18, expected/new generation: 17, operation: Check, code: 400130 2025-03-31T16:12:29.678446Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCompleteCheckpointResponse >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v0] >> TConsoleConfigTests::TestAllowedScopes [GOOD] >> TConsoleConfigTests::TestAffectedConfigs |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TCheckpointStorageTest::ShouldRetryOnExistingGraphDescId [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] >> TGcTest::ShouldIgnoreIncrementCheckpoint [GOOD] >> TStateStorageTest::ShouldCountStates >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-fifo] >> TConsoleConfigTests::TestAffectedConfigs [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStorageServiceTest::ShouldGetState [GOOD] Test command err: 2025-03-31T16:12:12.549592Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [1:7488007179495064183:2048] with connection to localhost:3411:local 2025-03-31T16:12:12.549725Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-03-31T16:12:12.770201Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-03-31T16:12:12.770246Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-03-31T16:12:12.770797Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-03-31T16:12:14.030165Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-03-31T16:12:14.030206Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-03-31T16:12:15.492272Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [2:7488007194875886435:2048] with connection to localhost:3411:local 2025-03-31T16:12:15.492387Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-03-31T16:12:15.809563Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-03-31T16:12:15.809598Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-03-31T16:12:15.810387Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-03-31T16:12:16.901002Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-03-31T16:12:16.901035Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-03-31T16:12:16.913688Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2025-03-31T16:12:17.261855Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint created 2025-03-31T16:12:17.261889Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2025-03-31T16:12:17.263939Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Got TEvCreateCheckpointRequest 2025-03-31T16:12:17.640585Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:3] Checkpoint created 2025-03-31T16:12:17.640631Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvCreateCheckpointResponse 2025-03-31T16:12:17.663155Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-31T16:12:18.093851Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-03-31T16:12:20.297073Z node 3 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [3:7488007208875033788:2048] with connection to localhost:3411:local 2025-03-31T16:12:20.297157Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-03-31T16:12:20.909759Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-03-31T16:12:20.909791Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-03-31T16:12:20.923177Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-03-31T16:12:22.457606Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-03-31T16:12:22.457646Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-03-31T16:12:22.458100Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-03-31T16:12:23.178717Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'PendingCommit' 2025-03-31T16:12:23.178759Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2025-03-31T16:12:23.180390Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2025-03-31T16:12:23.724316Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint created 2025-03-31T16:12:23.724358Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2025-03-31T16:12:23.729026Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvSetCheckpointPendingCommitStatusRequest 2025-03-31T16:12:24.143203Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'PendingCommit' 2025-03-31T16:12:24.143241Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvSetCheckpointPendingCommitStatusResponse 2025-03-31T16:12:24.153260Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCompleteCheckpointRequest 2025-03-31T16:12:24.610857Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'Completed' 2025-03-31T16:12:24.610900Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCompleteCheckpointResponse 2025-03-31T16:12:24.611376Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvAbortCheckpointRequest 2025-03-31T16:12:24.992557Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint aborted 2025-03-31T16:12:24.992598Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvAbortCheckpointResponse 2025-03-31T16:12:24.995206Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvAbortCheckpointRequest 2025-03-31T16:12:25.544424Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint aborted 2025-03-31T16:12:25.544466Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvAbortCheckpointResponse 2025-03-31T16:12:25.546971Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-31T16:12:26.143697Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-03-31T16:12:28.511216Z node 4 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [4:7488007250313850617:2048] with connection to localhost:3411:local 2025-03-31T16:12:28.511352Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-03-31T16:12:29.273457Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-03-31T16:12:29.273490Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-03-31T16:12:29.275011Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-03-31T16:12:30.953888Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-03-31T16:12:30.953926Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-03-31T16:12:30.958567Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Got TEvSaveTaskState: task 1317 2025-03-31T16:12:31.334097Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] TEvSaveTaskState Apply: task: 1317 2025-03-31T16:12:31.334156Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Send TEvSaveTaskStateResult: task: 1317 2025-03-31T16:12:31.340126Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Got TEvGetTaskState: tasks {1317} 2025-03-31T16:12:31.340173Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] GetState, tasks: 1317 2025-03-31T16:12:32.399006Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] ListOfStates results: 2025-03-31T16:12:32.399138Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] taskId 1317 checkpoint id: 17:1, rows count: 1 2025-03-31T16:12:32.399184Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] SkipStatesInFuture, skip 0 checkpoints 2025-03-31T16:12:32.518329Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] SelectState: task_id 1317, seq_no 1, blob_seq_num 0 2025-03-31T16:12:33.423148Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] DeserializeState, task id 1317, blob size 49 2025-03-31T16:12:33.423247Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] ApplyIncrements 2025-03-31T16:12:33.455031Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [{ Id: 1 Generation: 17 }] Send TEvGetTaskStateResult: tasks: {1317} >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] >> TStateStorageTest::ShouldCountStates [GOOD] >> TStateStorageTest::ShouldCountStatesNonExistentCheckpoint ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TConsoleConfigTests::TestAffectedConfigs [GOOD] Test command err: 2025-03-31T16:04:57.245630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:04:57.245708Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:57.300008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:04:59.552324Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:04:59.552395Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:04:59.652287Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:01.442436Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:01.442511Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:01.524444Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:02.903057Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:02.903148Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:02.945148Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:04.319795Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:04.319871Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:04.388639Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:06.113505Z node 6 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:06.113581Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:06.163573Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:07.652461Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:07.652541Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:07.708939Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:09.014613Z node 8 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:09.014715Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:09.075261Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:11.291437Z node 9 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:11.291515Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:11.344114Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:13.171860Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:13.171956Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:13.256641Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:15.089339Z node 11 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:15.089407Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:15.127375Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:16.838735Z node 12 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:16.838828Z node 12 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:16.894260Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:18.736192Z node 13 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:18.736273Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:18.786468Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:20.708016Z node 14 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:20.708113Z node 14 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:20.753744Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:22.443958Z node 15 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:22.444047Z node 15 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:22.495822Z node 15 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:24.169766Z node 16 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:24.169852Z node 16 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:24.212059Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:26.297608Z node 17 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:26.297701Z node 17 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:26.337743Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:26.842840Z node 17 :CMS_CONFIGS ERROR: Unexpected config sender died for subscription id=1 2025-03-31T16:05:27.500777Z node 18 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:27.500887Z node 18 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:27.553799Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:28.398809Z node 18 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:05:28.398883Z node 18 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:28.494740Z node 18 :CMS_CONFIGS ERROR: Couldn't deliver config notification for subscription id=1 tabletid=8651011 serviceid=[0:0:0] nodeid=1 host=host1 tenant=tenant1 nodetype=type1 kinds=2 lastprovidedconfig= 2025-03-31T16:05:29.137576Z node 19 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:29.137669Z node 19 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:29.183967Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:30.006698Z node 19 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:05:30.006811Z node 19 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:30.106574Z node 19 :CMS_CONFIGS ERROR: Couldn't deliver config notification for subscription id=1 tabletid=0 serviceid=[19:8246204620103118691:7960687] nodeid=1 host=host1 tenant=tenant1 nodetype=type1 kinds=2 lastprovidedconfig= 2025-03-31T16:05:30.812363Z node 20 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:30.812481Z node 20 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:30.892799Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:35.216470Z node 21 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:35.216564Z node 21 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:35.279023Z node 21 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:39.840516Z node 22 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:39.840615Z node 22 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:39.919369Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:42.265662Z node 23 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:42.265755Z node 23 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:42.332753Z node 23 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:44.890974Z node 24 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:05:44.891073Z node 24 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:44.984711Z node 24 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:05:52.432014Z node 24 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:05:52.432149Z node 24 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:10:18.640031Z node 24 :CMS_CONFIGS ERROR: Couldn't deliver config notification for subscription id=1 tabletid=0 serviceid=[100:28538277257700723:0] nodeid=100 host=host100 tenant=tenant-100 nodetype=type100 kinds=2 lastprovidedconfig= 2025-03-31T16:10:20.931274Z node 25 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:10:20.931385Z node 25 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:10:21.052556Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:10:28.763524Z node 25 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:10:28.763643Z node 25 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:12:32.168837Z node 26 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:12:32.168943Z node 26 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:12:32.232767Z node 26 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:12:34.226422Z node 27 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:12:34.226523Z node 27 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:12:34.280595Z node 27 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:12:36.029079Z node 28 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:12:36.029181Z node 28 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:12:36.104800Z node 28 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:12:37.971555Z node 29 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:12:37.971687Z node 29 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:12:38.042950Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStorageServiceTest::ShouldUseGc [GOOD] Test command err: 2025-03-31T16:12:10.188622Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [1:7488007166147870241:2048] with connection to localhost:23759:local 2025-03-31T16:12:10.189022Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-03-31T16:12:11.166180Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-03-31T16:12:11.166220Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-03-31T16:12:12.551912Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [2:7488007180393790329:2048] with connection to localhost:23759:local 2025-03-31T16:12:12.552014Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-03-31T16:12:12.773629Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-03-31T16:12:12.773679Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-03-31T16:12:12.774819Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Got TEvRegisterCoordinatorRequest 2025-03-31T16:12:13.182033Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.18] Graph registered 2025-03-31T16:12:13.182056Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Send TEvRegisterCoordinatorResponse 2025-03-31T16:12:13.187146Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-03-31T16:12:13.452344Z node 2 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] Failed to register graph:
: Warning: Table: local/TStorageServiceTestShouldRegisterNextGeneration/coordinators_sync, pk: graph_graphich, current generation: 18, expected/new generation: 17, operation: RegisterCheck, code: 400130 2025-03-31T16:12:13.452380Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-03-31T16:12:15.331560Z node 3 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [3:7488007195412386723:2048] with connection to localhost:23759:local 2025-03-31T16:12:15.331697Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-03-31T16:12:15.678631Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-03-31T16:12:15.678663Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-03-31T16:12:15.681535Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-03-31T16:12:17.347526Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-03-31T16:12:17.347564Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-03-31T16:12:17.350120Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-03-31T16:12:18.094370Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'PendingCommit' 2025-03-31T16:12:18.094424Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2025-03-31T16:12:18.095662Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2025-03-31T16:12:18.433734Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint created 2025-03-31T16:12:18.433768Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2025-03-31T16:12:18.438674Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvSetCheckpointPendingCommitStatusRequest 2025-03-31T16:12:18.831459Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'PendingCommit' 2025-03-31T16:12:18.831505Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvSetCheckpointPendingCommitStatusResponse 2025-03-31T16:12:18.833902Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCompleteCheckpointRequest 2025-03-31T16:12:19.186101Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'Completed' 2025-03-31T16:12:19.186137Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCompleteCheckpointResponse 2025-03-31T16:12:19.187891Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-31T16:12:19.574344Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-03-31T16:12:21.696883Z node 4 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [4:7488007220682116473:2048] with connection to localhost:23759:local 2025-03-31T16:12:21.697945Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-03-31T16:12:22.050164Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-03-31T16:12:22.050202Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-03-31T16:12:22.050869Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-03-31T16:12:23.666848Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-03-31T16:12:23.666885Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-03-31T16:12:23.669201Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Got TEvSaveTaskState: task 1317 2025-03-31T16:12:23.979517Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] TEvSaveTaskState Apply: task: 1317 2025-03-31T16:12:23.981059Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Send TEvSaveTaskStateResult: task: 1317 2025-03-31T16:12:26.456488Z node 5 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [5:7488007237792432730:2048] with connection to localhost:23759:local 2025-03-31T16:12:26.456549Z node 5 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped storage GC [5:7488007246382367431:2130] 2025-03-31T16:12:26.456604Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-03-31T16:12:26.907441Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-03-31T16:12:26.907474Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-03-31T16:12:26.908397Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-03-31T16:12:28.687172Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-03-31T16:12:28.687204Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-03-31T16:12:28.687804Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-03-31T16:12:29.566057Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'PendingCommit' 2025-03-31T16:12:29.566091Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2025-03-31T16:12:29.567757Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCompleteCheckpointRequest 2025-03-31T16:12:30.046019Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'Completed' 2025-03-31T16:12:30.046087Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvNewCheckpointSucceeded 2025-03-31T16:12:30.046122Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCompleteCheckpointResponse 2025-03-31T16:12:30.048498Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 17:1 for graph 'graph_graphich' 2025-03-31T16:12:30.048940Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2025-03-31T16:12:30.496089Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint created 2025-03-31T16:12:30.496128Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2025-03-31T16:12:30.496803Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvSetCheckpointPendingCommitStatusRequest 2025-03-31T16:12:30.749837Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'PendingCommit' 2025-03-31T16:12:30.749883Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvSetCheckpointPendingCommitStatusResponse 2025-03-31T16:12:30.751133Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCompleteCheckpointRequest 2025-03-31T16:12:30.956413Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'Completed' 2025-03-31T16:12:30.956450Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvNewCheckpointSucceeded 2025-03-31T16:12:30.956491Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCompleteCheckpointResponse 2025-03-31T16:12:30.956876Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 17:2 for graph 'graph_graphich' 2025-03-31T16:12:30.957865Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Got TEvCreateCheckpointRequest 2025-03-31T16:12:31.204832Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:3] Checkpoint created 2025-03-31T16:12:31.204865Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvCreateCheckpointResponse 2025-03-31T16:12:31.205512Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Got TEvSetCheckpointPendingCommitStatusRequest 2025-03-31T16:12:31.403482Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:3] Status updated to 'PendingCommit' 2025-03-31T16:12:31.403518Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvSetCheckpointPendingCommitStatusResponse 2025-03-31T16:12:31.404299Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Got TEvCompleteCheckpointRequest 2025-03-31T16:12:31.432371Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC deleted checkpoints of graph 'graph_graphich' up to 17:1 2025-03-31T16:12:31.434584Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC deleted checkpoints of graph 'graph_graphich' up to 17:2 2025-03-31T16:12:31.755042Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:3] Status updated to 'Completed' 2025-03-31T16:12:31.755109Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvNewCheckpointSucceeded 2025-03-31T16:12:31.755140Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvCompleteCheckpointResponse 2025-03-31T16:12:31.755336Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 17:3 for graph 'graph_graphich' 2025-03-31T16:12:31.759601Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-31T16:12:31.875046Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC deleted checkpoints of graph 'graph_graphich' up to 17:3 2025-03-31T16:12:32.375927Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-03-31T16:12:32.478877Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-31T16:12:32.504250Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-03-31T16:12:32.609499Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-31T16:12:32.631696Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-03-31T16:12:32.735516Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-31T16:12:32.755592Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-03-31T16:12:32.863486Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-31T16:12:32.883705Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-03-31T16:12:32.989883Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-31T16:12:33.009749Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-03-31T16:12:33.115618Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-31T16:12:33.135684Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-03-31T16:12:33.239356Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-31T16:12:33.252942Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-03-31T16:12:33.355746Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-31T16:12:33.380217Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-03-31T16:12:33.483588Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-31T16:12:33.496835Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-03-31T16:12:33.599678Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-31T16:12:33.623694Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-03-31T16:12:33.725470Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-31T16:12:33.785382Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-03-31T16:12:33.891753Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-31T16:12:33.916537Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-03-31T16:12:34.022610Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-31T16:12:34.048144Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-03-31T16:12:34.152488Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-31T16:12:34.181315Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-03-31T16:12:34.284039Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-31T16:12:34.332058Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-03-31T16:12:34.443215Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-31T16:12:34.472431Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-03-31T16:12:34.581480Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-31T16:12:34.601168Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-03-31T16:12:34.703628Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-31T16:12:34.725684Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-03-31T16:12:34.826792Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-31T16:12:34.858846Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-03-31T16:12:34.964941Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-31T16:12:35.020174Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-03-31T16:12:35.125159Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-31T16:12:35.175914Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-fifo] >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v1] [GOOD] >> TStateStorageTest::ShouldCountStatesNonExistentCheckpoint [GOOD] |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TCheckpointStorageTest::ShouldDeleteMarkedCheckpoints [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v1] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStateStorageTest::ShouldCountStatesNonExistentCheckpoint [GOOD] Test command err: 2025-03-31T16:12:17.643762Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped storage GC [1:36:2083] Count graph descriptions query: --!syntax_v1 PRAGMA TablePathPrefix("local/TGcTestShouldRemovePreviousCheckpoints"); SELECT * FROM checkpoints_graphs_description; 2025-03-31T16:12:18.134597Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 11:3 for graph 'graph' 2025-03-31T16:12:19.054044Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: GC deleted checkpoints of graph 'graph' up to 11:3 Count graph descriptions query: --!syntax_v1 PRAGMA TablePathPrefix("local/TGcTestShouldRemovePreviousCheckpoints"); SELECT * FROM checkpoints_graphs_description; 2025-03-31T16:12:33.361896Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped storage GC [2:36:2083] Count graph descriptions query: --!syntax_v1 PRAGMA TablePathPrefix("local/ShouldIgnoreIncrementCheckpoint"); SELECT * FROM checkpoints_graphs_description; 2025-03-31T16:12:33.965706Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 11:3 for graph 'graph' 2025-03-31T16:12:33.965794Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: GC skip increment checkpoint for graph 'graph' >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast, DROP COLUMN stats-`.metadata/script_executions`] [GOOD] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[DROP TABLE {}-`.metadata/script_executions`] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-std] |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-std] [GOOD] |86.0%| [TA] $(B)/ydb/core/fq/libs/checkpoint_storage/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.0%| [TA] {RESULT} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast-`.metadata/script_executions`] [GOOD] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax-`.metadata/script_executions`] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-fifo] >> TPersQueueTest::TestWriteSessionsConflicts [GOOD] >> TPersQueueTest::TestReadRuleServiceTypePassword >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v1] |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_disk.py::TestSafeDiskBreak::test_erase_method [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v0] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-fifo] >> ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloaded [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloadedWithReboot >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-by_deduplication_id] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v0] |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v1] [GOOD] |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v1] |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v0] |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] >> TPersQueueTest::InflightLimit [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v0] [GOOD] |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_disk.py::TestSafeDiskBreak::test_erase_method [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_fifo_read_delete_single_message >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v0] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::InflightLimit [GOOD] Test command err: 2025-03-31T16:06:57.158593Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005831809219853:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:06:57.158660Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:06:57.253768Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488005833490409648:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:06:57.253818Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:06:57.609225Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:06:57.644747Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000956/r3tmp/tmpQeD5xx/pdisk_1.dat 2025-03-31T16:06:58.203040Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:06:58.228334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:06:58.228465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:06:58.229220Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:06:58.229267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:06:58.242359Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:06:58.242540Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:06:58.243982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:06:58.259470Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:06:58.303882Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24161, node 1 2025-03-31T16:06:58.340656Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:06:58.344039Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:06:58.596111Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/000956/r3tmp/yandexz9RO7b.tmp 2025-03-31T16:06:58.596143Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/000956/r3tmp/yandexz9RO7b.tmp 2025-03-31T16:06:58.596320Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/000956/r3tmp/yandexz9RO7b.tmp 2025-03-31T16:06:58.596477Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:06:58.682665Z INFO: TTestServer started on Port 4418 GrpcPort 24161 TClient is connected to server localhost:4418 PQClient connected to localhost:24161 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:06:59.067904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:06:59.186555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-31T16:07:02.005808Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005854965246459:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:02.005888Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488005854965246447:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:02.006218Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:07:02.015615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-03-31T16:07:02.066771Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488005854965246462:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-03-31T16:07:02.154506Z node 2 :TX_PROXY ERROR: Actor# [2:7488005854965246490:2131] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:07:02.255181Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488005833490409648:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:02.255264Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:07:02.534612Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488005831809219853:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:07:02.534933Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:07:02.566620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:07:02.641058Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488005853284057572:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:07:02.641258Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2YwYjRmNjAtZGVhOTc5YTQtYzA1MTgzNDktMjFmNWNlODk=, ActorId: [1:7488005853284057524:2342], ActorState: ExecuteState, TraceId: 01jqpf9m6mdst4mshajdndmaw6, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:07:02.642800Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:07:02.652261Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488005854965246505:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:07:02.652487Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWEwNGM5N2QtNmU4MjkyMzYtMTUwYzVjZmQtYTFjYTNkNjI=, ActorId: [2:7488005854965246445:2309], ActorState: ExecuteState, TraceId: 01jqpf9m3k1q8qws1x3nkp5yz3, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:07:02.652895Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:07:02.716677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:07:02.876436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-31T16:07:03.240963Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jqpf9n4e4yz4weebeakf0zf9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDYxMGI5OTUtZjY2YTM0NzAtMTI3NGUwZmEtYzkwNDYzYTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7488005857579025274:3102] === CheckClustersList. Ok PQ Client: create topic: rt3.dc1--topic1 with 10 partitions CallPersQueueGRPC request to localhost:24161 ... d:1)maxCount 4 maxSize 99043 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 4 ClientCommitOffset 0 committedOffset 0 Guid 712df4d0-ee79a9ca-ae38a222-ff44d0ef 2025-03-31T16:12:57.007904Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_12724935471184065007_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 read_offset: 0 } } 2025-03-31T16:12:57.008037Z node 27 :PQ_READ_PROXY INFO: session cookie 3 consumer session _27_3_12724935471184065007_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, commitOffset# (empty maybe) 2025-03-31T16:12:57.008111Z node 27 :PQ_READ_PROXY INFO: session cookie 3 consumer session _27_3_12724935471184065007_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 0 2025-03-31T16:12:57.008177Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_12724935471184065007_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 0 endOffset 4 2025-03-31T16:12:57.008262Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_12724935471184065007_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, endOffset# 4, WTime# 1743437573207, sizeLag# 82536 2025-03-31T16:12:57.008286Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_12724935471184065007_v1TEvPartitionReady. Aval parts: 1 2025-03-31T16:12:57.009425Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_12724935471184065007_v1 grpc read done: success# 1, data# { read_request { bytes_size: 1048576 } } 2025-03-31T16:12:57.009658Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_12724935471184065007_v1 got read request: guid# 96ffb89e-94dfc79d-eff098f5-84e9e02d 2025-03-31T16:12:57.009713Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_12724935471184065007_v1 performing read request: guid# d753738a-f20c978e-b7122655-a2b67058, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 4, size# 99043, partitionsAsked# 1, maxTimeLag# 0ms 2025-03-31T16:12:57.009824Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_12724935471184065007_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 4 maxSize 99043 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 4 ClientCommitOffset 0 committedOffset 0 Guid d753738a-f20c978e-b7122655-a2b67058 2025-03-31T16:12:57.010529Z node 28 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-03-31T16:12:57.010647Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 0 2025-03-31T16:12:57.010799Z node 28 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-03-31T16:12:57.010818Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 0 2025-03-31T16:13:00.081429Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 2 Topic 'rt3.dc1--topic1' partition 0 user $without_consumer offset 0 count 4 size 99043 endOffset 4 max time lag 0ms effective offset 0 2025-03-31T16:13:00.081511Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 2 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 4 2025-03-31T16:13:00.081719Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 2. All data is from uncompacted head. 2025-03-31T16:13:00.081760Z node 28 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-31T16:13:00.081975Z node 28 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-31T16:13:00.083795Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_5035297151919466703_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 4 Result { Offset: 0 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 2 WriteTimestampMS: 1743437573207 CreateTimestampMS: 1743437573193 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 3 WriteTimestampMS: 1743437573235 CreateTimestampMS: 1743437573225 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 4 WriteTimestampMS: 1743437573272 CreateTimestampMS: 1743437573266 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 3 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 5 WriteTimestampMS: 1743437573296 CreateTimestampMS: 1743437573292 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 88 RealReadOffset: 3 WaitQuotaTimeMs: 3068 EndOffset: 4 StartOffset: 0 } Cookie: 0 } 2025-03-31T16:13:00.084180Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_5035297151919466703_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset4 2025-03-31T16:13:00.084259Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_5035297151919466703_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 ReadOffset 4 ReadGuid 712df4d0-ee79a9ca-ae38a222-ff44d0ef has messages 1 2025-03-31T16:13:00.084516Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_5035297151919466703_v1 read done: guid# 712df4d0-ee79a9ca-ae38a222-ff44d0ef, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 82611 2025-03-31T16:13:00.084555Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_5035297151919466703_v1 response to read: guid# 712df4d0-ee79a9ca-ae38a222-ff44d0ef 2025-03-31T16:13:00.084944Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_5035297151919466703_v1 Process answer. Aval parts: 0 Bytes readed: 82611 Offset: 0 from session 1 Offset: 1 from session 1 Offset: 2 from session 1 Offset: 3 from session 1 2025-03-31T16:13:00.096556Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _27_2_5035297151919466703_v1 2025-03-31T16:13:00.096648Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [27:7488007373129747949:2590] destroyed 2025-03-31T16:13:00.096725Z node 28 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _27_2_5035297151919466703_v1 2025-03-31T16:13:00.094421Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_5035297151919466703_v1 grpc read done: success# 0, data# { } 2025-03-31T16:13:00.094452Z node 27 :PQ_READ_PROXY INFO: session cookie 2 consumer session _27_2_5035297151919466703_v1 grpc read failed 2025-03-31T16:13:00.094481Z node 27 :PQ_READ_PROXY INFO: session cookie 2 consumer session _27_2_5035297151919466703_v1 grpc closed 2025-03-31T16:13:00.094515Z node 27 :PQ_READ_PROXY INFO: session cookie 2 consumer session _27_2_5035297151919466703_v1 is DEAD 2025-03-31T16:13:04.082249Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 3 Topic 'rt3.dc1--topic1' partition 0 user $without_consumer offset 0 count 4 size 99043 endOffset 4 max time lag 0ms effective offset 0 2025-03-31T16:13:04.082337Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 3 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 4 2025-03-31T16:13:04.082467Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 3. All data is from uncompacted head. 2025-03-31T16:13:04.082496Z node 28 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-31T16:13:04.084111Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_12724935471184065007_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 4 Result { Offset: 0 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 2 WriteTimestampMS: 1743437573207 CreateTimestampMS: 1743437573193 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 3 WriteTimestampMS: 1743437573235 CreateTimestampMS: 1743437573225 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 4 WriteTimestampMS: 1743437573272 CreateTimestampMS: 1743437573266 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 3 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 5 WriteTimestampMS: 1743437573296 CreateTimestampMS: 1743437573292 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 88 RealReadOffset: 3 WaitQuotaTimeMs: 7071 EndOffset: 4 StartOffset: 0 } Cookie: 0 } 2025-03-31T16:13:04.084457Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_12724935471184065007_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset4 2025-03-31T16:13:04.084533Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_12724935471184065007_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 ReadOffset 4 ReadGuid d753738a-f20c978e-b7122655-a2b67058 has messages 1 2025-03-31T16:13:04.084781Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_12724935471184065007_v1 read done: guid# d753738a-f20c978e-b7122655-a2b67058, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 82611 2025-03-31T16:13:04.084818Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_12724935471184065007_v1 response to read: guid# d753738a-f20c978e-b7122655-a2b67058 Bytes readed: 82611 Offset: 0 from session 1 Offset: 1 from session 1 Offset: 2 from session 1 Offset: 3 from session 1 2025-03-31T16:13:04.082666Z node 28 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-31T16:13:04.085868Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_12724935471184065007_v1 Process answer. Aval parts: 0 2025-03-31T16:13:04.089838Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_12724935471184065007_v1 grpc read done: success# 0, data# { } 2025-03-31T16:13:04.089859Z node 27 :PQ_READ_PROXY INFO: session cookie 3 consumer session _27_3_12724935471184065007_v1 grpc read failed 2025-03-31T16:13:04.089884Z node 27 :PQ_READ_PROXY INFO: session cookie 3 consumer session _27_3_12724935471184065007_v1 grpc closed 2025-03-31T16:13:04.089911Z node 27 :PQ_READ_PROXY INFO: session cookie 3 consumer session _27_3_12724935471184065007_v1 is DEAD 2025-03-31T16:13:04.092091Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _27_3_12724935471184065007_v1 2025-03-31T16:13:04.092162Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [27:7488007373129747954:2594] destroyed 2025-03-31T16:13:04.092218Z node 28 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _27_3_12724935471184065007_v1 >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_fifo_read_delete_single_message [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_only_single_read_infly_from_fifo >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-fifo] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-std] |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast, DROP COLUMN stats-`.metadata/script_executions`] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] [GOOD] |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[DROP TABLE {}-`.metadata/script_executions`] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-std] [GOOD] |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast-`.metadata/script_executions`] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v1] |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax-`.metadata/script_executions`] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v0] |86.1%| [TA] $(B)/ydb/tests/functional/script_execution/test-results/py3test/{meta.json ... results_accumulator.log} |86.1%| [TA] {RESULT} $(B)/ydb/tests/functional/script_execution/test-results/py3test/{meta.json ... results_accumulator.log} >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_only_single_read_infly_from_fifo [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v0] |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-std] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-by_deduplication_id] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-content_based] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-fifo] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v1] |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v0] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-content_based] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-content_based] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v1] |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-fifo] |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-std] [GOOD] |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] [GOOD] |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-content_based] [GOOD] |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v1] |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-std] >> TReplicaTest::Merge >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] >> TReplicaTest::Merge [GOOD] >> TReplicaTest::IdempotencyUpdatesWithoutSubscribers >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v1] >> TReplicaTest::IdempotencyUpdatesWithoutSubscribers [GOOD] >> TReplicaTest::StrongNotificationAfterCommit >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] >> TReplicaTest::StrongNotificationAfterCommit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::StrongNotificationAfterCommit [GOOD] Test command err: 2025-03-31T16:13:37.969574Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:8:2055] 2025-03-31T16:13:37.970749Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# path 2025-03-31T16:13:37.976533Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:8:2055], path# path, domainOwnerId# 0, capabilities# 2025-03-31T16:13:37.977935Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [1:9:2056] 2025-03-31T16:13:37.977997Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2025-03-31T16:13:37.978067Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:9:2056], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-03-31T16:13:37.978975Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-03-31T16:13:37.979038Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 1 2025-03-31T16:13:37.987449Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 72 2025-03-31T16:13:37.987539Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-03-31T16:13:37.998158Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-03-31T16:13:38.003451Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 40 2025-03-31T16:13:38.003522Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-03-31T16:13:38.003561Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-03-31T16:13:38.254123Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:7:2054] 2025-03-31T16:13:38.254195Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 1 2025-03-31T16:13:38.254349Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [2:8:2055] 2025-03-31T16:13:38.254390Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2025-03-31T16:13:38.254473Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:8:2055], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-03-31T16:13:38.254646Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-03-31T16:13:38.254689Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-03-31T16:13:38.254752Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-03-31T16:13:38.254888Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 40 2025-03-31T16:13:38.254921Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-03-31T16:13:38.254950Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-03-31T16:13:38.255053Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [2:8:2055] 2025-03-31T16:13:38.263187Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Unsubscribe: subscriber# [2:8:2055], path# [OwnerId: 1, LocalPathId: 1] 2025-03-31T16:13:38.263354Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-03-31T16:13:38.263396Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-03-31T16:13:38.263434Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-03-31T16:13:38.263524Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-03-31T16:13:38.263561Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2025-03-31T16:13:38.263621Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-03-31T16:13:38.263748Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 2] DomainOwnerId: 0 }: sender# [2:9:2056] 2025-03-31T16:13:38.263808Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:9:2056], path# [OwnerId: 1, LocalPathId: 2], domainOwnerId# 0, capabilities# 2025-03-31T16:13:38.565634Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 1 }: sender# [3:8:2055] 2025-03-31T16:13:38.565694Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path 2025-03-31T16:13:38.565763Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:8:2055], path# path, domainOwnerId# 1, capabilities# 2025-03-31T16:13:38.566655Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:7:2054] 2025-03-31T16:13:38.566709Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 1, generation# 1 2025-03-31T16:13:38.566829Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [3:7:2054] 2025-03-31T16:13:38.566897Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Commit generation: owner# 1, generation# 1 2025-03-31T16:13:38.567003Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimr::NSchemeBoard::TReplica::TEvPrivate::TEvSendStrongNotifications { Owner: 1 } >> KqpJoinOrder::CanonizedJoinOrderTPCH4 >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-fifo] |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v1] [GOOD] |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-fifo] |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v1] |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v0] [GOOD] >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-std] >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills [GOOD] |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> TPersQueueTest::TestReadRuleServiceTypePassword [GOOD] >> TPersQueueTest::TestReadPartitionByGroupId >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-std] >> test_ttl.py::TestTTLAlterSettings::test_case >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] |86.2%| [TA] $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_fifo_read_delete_single_message >> test_ttl.py::TestTTLDefaultEnv::test_case >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] |86.2%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-std] [GOOD] |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-std] [GOOD] >> test_ttl.py::TestTTLOnIndexedTable::test_case >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_fifo_read_delete_single_message [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-std] [GOOD] |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v1] >> overlapping_portions.py::TestOverlappingPortions::test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] [GOOD] >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-true >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] >> TSchemeShardTest::RmDirTwice >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] >> TSchemeShardTest::InitRootAgain >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v0] >> TSchemeShardTest::DropIndexedTableAndForceDropSimultaneously >> TSchemeShardTest::CreateTable >> TSchemeShardTest::ConsistentCopyTable >> TSchemeShardCheckProposeSize::CopyTable >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v0] >> TSchemeShardTest::RmDirTwice [GOOD] >> TSchemeShardTest::TopicMeteringMode >> TSchemeShardTest::InitRootAgain [GOOD] >> TSchemeShardTest::InitRootWithOwner >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-true [GOOD] >> TSchemeShardTest::AlterTableAndConcurrentSplit |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] [GOOD] >> test_ttl.py::TestTTLAlterSettings::test_case [GOOD] >> TSchemeShardCheckProposeSize::CopyTable [GOOD] >> TSchemeShardCheckProposeSize::CopyTables >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v1] >> TSchemeShardTest::DropIndexedTableAndForceDropSimultaneously [GOOD] >> TSchemeShardTest::DependentOps >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v1] >> TSchemeShardTest::InitRootWithOwner [GOOD] >> TSchemeShardTest::DropTableTwice >> TSchemeShardTest::CreateTable [GOOD] >> TSchemeShardTest::CreateTableWithDate >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-std] >> TSchemeShardTest::TopicMeteringMode [GOOD] >> TSchemeShardTest::Restart >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-std] >> TSchemeShardTest::DependentOps [GOOD] >> TSchemeShardTest::DefaultColumnFamiliesWithNonCanonicName |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-content_based] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v1] [GOOD] >> TSchemeShardTest::DropTableTwice [GOOD] >> TSchemeShardTest::IgnoreUserColumnIds >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-fifo] >> TSchemeShardTest::AlterTableAndConcurrentSplit [GOOD] >> TSchemeShardTest::AlterTable >> TSchemeShardTest::CreateIndexedTable >> TSchemeShardTest::ConsistentCopyTable [GOOD] >> TSchemeShardTest::ConsistentCopyTableAwait >> TSchemeShardTest::Restart [GOOD] >> TSchemeShardTest::SchemeErrors |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills [GOOD] >> TSchemeShardTest::CreateTableWithDate [GOOD] >> TSchemeShardTest::CreateIndexedTableRejects >> TSchemeShardTest::IgnoreUserColumnIds [GOOD] >> TSchemeShardTest::DropTableAndConcurrentSplit >> TSchemeShardTest::DefaultColumnFamiliesWithNonCanonicName [GOOD] >> TSchemeShardTest::DropPQ >> TSchemeShardTest::SchemeErrors [GOOD] >> TSchemeShardTest::SerializedCellVec >> TSchemeShardTest::AlterTable [GOOD] >> TSchemeShardTest::AlterTableDropColumnReCreateSplit |86.3%| [TA] $(B)/ydb/tests/tools/nemesis/ut/test-results/py3test/{meta.json ... results_accumulator.log} >> TSchemeShardTest::SerializedCellVec [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldNotUpdate |86.3%| [TA] {RESULT} $(B)/ydb/tests/tools/nemesis/ut/test-results/py3test/{meta.json ... results_accumulator.log} >> TSchemeShardTest::MkRmDir >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v1] >> TSchemeShardTest::ConsistentCopyTableAwait [GOOD] >> TSchemeShardTest::ConsistentCopyTableRejects >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] >> TSchemeShardTest::CreateIndexedTable [GOOD] >> TSchemeShardTest::CreateAlterTableWithCodec >> TSchemeShardTest::DropTableAndConcurrentSplit [GOOD] >> TSchemeShardTest::DropTable >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldNotUpdate [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldUpdate >> TSchemeShardTest::AlterTableDropColumnReCreateSplit [GOOD] >> TSchemeShardTest::AlterTableDropColumnSplitThenReCreate >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v0] [GOOD] >> TSchemeShardTest::MkRmDir [GOOD] >> TSchemeShardTest::PathName >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldUpdate [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonStorageConfig >> TSchemeShardTest::CreateAlterTableWithCodec [GOOD] >> TSchemeShardTest::CopyTableTwiceSimultaneously >> TSchemeShardTest::PathName [GOOD] >> TSchemeShardTest::PathName_SetLocale >> TSchemeShardTest::AlterTableDropColumnSplitThenReCreate [GOOD] >> TSchemeShardTest::AlterTableById >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-std] [GOOD] >> TSchemeShardTest::Boot >> TSchemeShardTest::CreateIndexedTableRejects [GOOD] >> TSchemeShardTest::CreateIndexedTableAndForceDrop >> TSchemeShardTest::ConsistentCopyTableRejects [GOOD] >> TSchemeShardTest::ConsistentCopyTableToDeletedPath |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] >> TSchemeShardTest::PathName_SetLocale [GOOD] >> TSchemeShardTest::ModifyACL >> TSchemeShardTest::DropTable [GOOD] >> TSchemeShardTest::DropTableById >> TSchemeShardTest::UpdateChannelsBindingSolomonStorageConfig [GOOD] >> TSchemeShardTest::RejectAlterSolomon >> TSchemeShardTest::AlterTableById [GOOD] >> TSchemeShardTest::AlterTableConfig >> TSchemeShardTest::CopyTableTwiceSimultaneously [GOOD] >> TSchemeShardTest::CopyTableWithAlterConfig >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v1] [GOOD] >> TSchemeShardTest::DropPQ [GOOD] >> TSchemeShardTest::DropBlockStoreVolume >> TSchemeShardTest::Boot [GOOD] >> TSchemeShardTest::CacheEffectiveACL [GOOD] >> TSchemeShardTest::AlterTableKeyColumns >> TSchemeShardTest::ModifyACL [GOOD] >> TSchemeShardTest::NameFormat >> TSchemeShardTest::CreateIndexedTableAndForceDrop [GOOD] >> TSchemeShardTest::CreateIndexedTableAndForceDropSimultaneously >> TSchemeShardTest::DropTableById [GOOD] >> TSchemeShardTest::DropPQFail >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-fifo] [GOOD] >> TSchemeShardTest::RejectAlterSolomon [GOOD] >> TSchemeShardTest::SimultaneousDropForceDrop >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] >> TSchemeShardTest::DropBlockStoreVolume [GOOD] >> TSchemeShardTest::DropBlockStoreVolumeWithNonReplicatedPartitions >> ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloadedWithReboot [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsMultipleColumns >> TSchemeShardTest::CreateIndexedTableAndForceDropSimultaneously [GOOD] >> TSchemeShardTest::CreateTableWithUniformPartitioning >> TSchemeShardTest::AlterTableKeyColumns [GOOD] >> TSchemeShardTest::AlterTableSizeToSplit >> TSchemeShardTest::ConsistentCopyTableToDeletedPath [GOOD] >> TSchemeShardTest::CopyIndexedTable >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-fifo] >> TSchemeShardTest::NameFormat [GOOD] >> TSchemeShardTest::ParallelCreateTable >> TSchemeShardTest::SimultaneousDropForceDrop [GOOD] >> TSchemeShardTest::RejectSystemViewPath >> TSchemeShardTest::AlterTableConfig [GOOD] >> TSchemeShardTest::AlterTableCompactionPolicy >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v1] >> TSchemeShardTest::DropBlockStoreVolumeWithNonReplicatedPartitions [GOOD] >> TSchemeShardTest::DropBlockStoreVolume2 >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v0] >> TSchemeShardTest::AlterTableSizeToSplit [GOOD] >> TSchemeShardTest::AlterTableSplitSchema >> TSchemeShardTest::CopyTableWithAlterConfig [GOOD] >> TSchemeShardTest::CopyTableOmitFollowers >> TSchemeShardTest::RejectSystemViewPath [GOOD] >> TSchemeShardTest::SplitKey [GOOD] >> TSchemeShardTest::SplitAlterCopy >> TSchemeShardTest::ParallelCreateTable [GOOD] >> TSchemeShardTest::ParallelCreateSameTable >> TSchemeShardTest::AlterTableCompactionPolicy [GOOD] >> TSchemeShardTest::AlterTableFollowers >> TSchemeShardTest::CopyIndexedTable [GOOD] >> TSchemeShardTest::CopyTable |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-std] [GOOD] >> TSchemeShardTest::DropBlockStoreVolume2 [GOOD] >> TSchemeShardTest::DropBlockStoreVolumeWithFillGeneration >> TSchemeShardTest::CopyTableOmitFollowers [GOOD] >> TSchemeShardTest::CreateIndexedTableAfterBackup |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v0] [GOOD] >> TSchemeShardTest::AlterTableSplitSchema [GOOD] >> TSchemeShardTest::AlterTableSettings >> ResourcePoolsDdl::TestPoolSwitchToLimitedState >> ResourcePoolClassifiersDdl::TestResourcePoolClassifiersPermissions >> TSchemeShardTest::CreateTableWithUniformPartitioning [GOOD] >> TSchemeShardTest::CreateTableWithSplitBoundaries >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlag |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v0] >> TSchemeShardTest::SplitAlterCopy [GOOD] >> TSchemeShardTest::TopicReserveSize >> TSchemeShardTest::CopyTable [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentChanges >> TSchemeShardTest::DropPQFail [GOOD] >> TSchemeShardTest::DropPQAbort >> TSchemeShardTest::ParallelCreateSameTable [GOOD] >> TSchemeShardTest::MultipleColumnFamilies >> TSchemeShardTest::DropBlockStoreVolumeWithFillGeneration [GOOD] >> TSchemeShardTest::CreateWithIntermediateDirs |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-std] [GOOD] >> TSchemeShardTest::AlterTableFollowers [GOOD] >> TSchemeShardTest::AlterPersQueueGroup >> TSchemeShardTest::CreateTableWithSplitBoundaries [GOOD] >> TSchemeShardTest::CreateTableWithConfig >> ResourcePoolsDdl::TestCreateResourcePool |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] >> TSchemeShardTest::AlterTableSettings [GOOD] >> TSchemeShardTest::AssignBlockStoreVolume >> TSchemeShardTest::CreateIndexedTableAfterBackup [GOOD] >> TSchemeShardTest::CreateFinishedInDescription >> KqpWorkloadService::TestQueueSizeSimple >> KqpWorkloadService::TestQueryCancelAfterPoolWithLimits >> KqpWorkloadServiceActors::TestPoolFetcher >> TSchemeShardTest::CreateWithIntermediateDirs [GOOD] >> TSchemeShardTest::DocumentApiVersion >> TSchemeShardTest::CreateTableWithConfig [GOOD] >> TSchemeShardTest::CreateTableWithNamedConfig >> TSchemeShardTest::CreateFinishedInDescription [GOOD] >> TSchemeShardTest::CreateBlockStoreVolume >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v1] [GOOD] >> TSchemeShardTest::AssignBlockStoreVolume [GOOD] >> TSchemeShardTest::AssignBlockStoreVolumeDuringAlter >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-fifo] >> TSchemeShardTest::CopyTableAndConcurrentChanges [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentSplit >> KqpWorkloadServiceTables::TestTablesIsNotCreatingForUnlimitedPool >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v0] [GOOD] |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLAlterSettings::test_case [GOOD] >> TSchemeShardTest::TopicReserveSize [GOOD] >> TSchemeShardTest::TopicWithAutopartitioningReserveSize >> KqpWorkloadServiceDistributed::TestDistributedQueue >> TSchemeShardTest::AssignBlockStoreVolumeDuringAlter [GOOD] >> TSchemeShardTest::AssignBlockStoreCheckVersionInAlter >> TSchemeShardTest::CreateBlockStoreVolume [GOOD] >> TSchemeShardTest::CreateBlockStoreVolumeWithVolumeChannelsProfiles >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-std] >> TSchemeShardTest::CreateTableWithNamedConfig [GOOD] >> TSchemeShardTest::CreateTableWithUnknownNamedConfig >> TSchemeShardTest::DocumentApiVersion [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Dir >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-std] >> TPersQueueTest::TestReadPartitionByGroupId [GOOD] >> TPersQueueTest::TestReadPartitionStatus >> TSchemeShardTest::MultipleColumnFamilies [GOOD] >> TSchemeShardTest::MultipleColumnFamiliesWithStorage >> TSchemeShardTest::AssignBlockStoreCheckVersionInAlter [GOOD] >> TSchemeShardTest::AssignBlockStoreCheckFillGenerationInAlter >> TSchemeShardTest::CreateTableWithUnknownNamedConfig [GOOD] >> TSchemeShardTest::CreatePersQueueGroup >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-fifo] >> TSchemeShardTest::CreateBlockStoreVolumeWithVolumeChannelsProfiles [GOOD] >> TSchemeShardTest::CreateBlockStoreVolumeWithNonReplicatedPartitions >> TSchemeShardTest::CopyTableAndConcurrentSplit [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentMerge >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlag [GOOD] >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlagOnServerless >> TSchemeShardTest::DisablePublicationsOfDropping_Dir [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Table |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v1] [GOOD] >> TSchemeShardTest::TopicWithAutopartitioningReserveSize [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-std] [GOOD] >> TSchemeShardTest::CreateBlockStoreVolumeWithNonReplicatedPartitions [GOOD] >> TSchemeShardTest::CreateAlterBlockStoreVolumeWithInvalidPoolKinds >> TSchemeShardTest::AssignBlockStoreCheckFillGenerationInAlter [GOOD] >> TSchemeShardTest::BlockStoreVolumeLimits >> KqpWorkloadServiceActors::TestPoolFetcher [GOOD] >> KqpWorkloadServiceActors::TestPoolFetcherAclValidation |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-fifo] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::TopicWithAutopartitioningReserveSize [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:13:53.836302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:13:53.836414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:13:53.836453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:13:53.836513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:13:53.836556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:13:53.836584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:13:53.836654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:13:53.836740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:13:53.837153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:13:53.927554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:13:53.927601Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:13:53.932968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:13:53.933073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:13:53.933245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:13:53.938929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:13:53.939194Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:13:53.944555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:13:53.946102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:13:53.956073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:13:53.966302Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:13:53.966375Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:13:53.966568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:13:53.966627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:13:53.966717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:13:53.968248Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:13:53.987919Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:13:54.137426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:13:54.137599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.137763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:13:54.137961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:13:54.138014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.140740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:13:54.140865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:13:54.141023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.141079Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:13:54.141113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:13:54.141167Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:13:54.146043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.146109Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:13:54.146145Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:13:54.148052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.148093Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.148155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:13:54.148204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:13:54.152332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:13:54.154163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:13:54.154370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:13:54.155449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:13:54.155570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:13:54.155620Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:13:54.155964Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:13:54.156031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:13:54.156207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:13:54.156275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:13:54.160114Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:13:54.160176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:13:54.160338Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:13:54.160384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:13:54.160614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.160807Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:13:54.160920Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:13:54.160956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:13:54.161004Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:13:54.161034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:13:54.161071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:13:54.161108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:13:54.161139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:13:54.161167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:13:54.161228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:13:54.161270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:13:54.161302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:13:54.163362Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:13:54.163459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:13:54.163491Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... oseTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 2, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-03-31T16:14:09.099587Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: false 2025-03-31T16:14:09.099621Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 2 2025-03-31T16:14:09.099963Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409551, partId: 0 2025-03-31T16:14:09.100063Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409551 Status: COMPLETE TxId: 104 Step: 5000005 2025-03-31T16:14:09.100118Z node 13 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409551 Status: COMPLETE TxId: 104 Step: 5000005 2025-03-31T16:14:09.100159Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:6, shard: 72075186233409551, left await: 1, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-03-31T16:14:09.100214Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: false 2025-03-31T16:14:09.100248Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-03-31T16:14:09.100542Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409550, partId: 0 2025-03-31T16:14:09.100631Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409550 Status: COMPLETE TxId: 104 Step: 5000005 2025-03-31T16:14:09.100704Z node 13 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409550 Status: COMPLETE TxId: 104 Step: 5000005 2025-03-31T16:14:09.100742Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:5, shard: 72075186233409550, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-03-31T16:14:09.100773Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-03-31T16:14:09.101036Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-03-31T16:14:09.101361Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2025-03-31T16:14:09.114169Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-31T16:14:09.114430Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-31T16:14:09.114578Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-31T16:14:09.114725Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-31T16:14:09.114820Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-31T16:14:09.114933Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-31T16:14:09.115284Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:14:09.115353Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:14:09.115665Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:14:09.115710Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [13:206:2208], at schemeshard: 72057594046678944, txId: 104, path id: 2 2025-03-31T16:14:09.115791Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-31T16:14:09.115853Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-03-31T16:14:09.116090Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-31T16:14:09.116170Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T16:14:09.116247Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-31T16:14:09.116339Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T16:14:09.116411Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-03-31T16:14:09.116485Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T16:14:09.116577Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-03-31T16:14:09.116637Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-03-31T16:14:09.116947Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 10 2025-03-31T16:14:09.117022Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 1, subscribers: 0 2025-03-31T16:14:09.117081Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-03-31T16:14:09.118104Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T16:14:09.118206Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T16:14:09.118258Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-03-31T16:14:09.118351Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-03-31T16:14:09.118423Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2025-03-31T16:14:09.118572Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-03-31T16:14:09.129628Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-03-31T16:14:09.172098Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-03-31T16:14:09.172182Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-03-31T16:14:09.172963Z node 13 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-03-31T16:14:09.173125Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-03-31T16:14:09.173195Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [13:1468:3266] TestWaitNotification: OK eventTxId 104 2025-03-31T16:14:09.173939Z node 13 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:14:09.174159Z node 13 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 262us result status StatusSuccess 2025-03-31T16:14:09.174811Z node 13 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 4 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 6 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 7 PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "A" } Status: Inactive ParentPartitionIds: 0 ChildPartitionIds: 3 ChildPartitionIds: 4 } Partitions { PartitionId: 2 TabletId: 72075186233409549 KeyRange { FromBound: "A" } Status: Inactive ParentPartitionIds: 0 ChildPartitionIds: 5 } Partitions { PartitionId: 3 TabletId: 72075186233409550 KeyRange { ToBound: "0" } Status: Active ParentPartitionIds: 1 } Partitions { PartitionId: 4 TabletId: 72075186233409551 KeyRange { FromBound: "0" ToBound: "A" } Status: Inactive ParentPartitionIds: 1 ChildPartitionIds: 5 } Partitions { PartitionId: 5 TabletId: 72075186233409552 KeyRange { FromBound: "0" } Status: Active ParentPartitionIds: 2 ParentPartitionIds: 4 } AlterVersion: 4 BalancerTabletID: 72075186233409547 NextPartitionId: 6 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 494 AccountSize: 494 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 6 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-std] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterDrop >> TSchemeShardTest::MultipleColumnFamiliesWithStorage [GOOD] >> TSchemeShardTest::ParallelModifying >> TSchemeShardTest::CreateAlterBlockStoreVolumeWithInvalidPoolKinds [GOOD] >> TSchemeShardTest::CreateDropKesus >> TSchemeShardTest::DisablePublicationsOfDropping_Table [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_IndexedTable >> TSchemeShardTest::CopyTableAndConcurrentMerge [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentSplitMerge >> KqpWorkloadServiceTables::TestTablesIsNotCreatingForUnlimitedPool [GOOD] >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifier >> TSchemeShardTest::BlockStoreVolumeLimits [GOOD] >> TSchemeShardTest::BlockStoreNonreplVolumeLimits >> TSchemeShardTest::CreatePersQueueGroup [GOOD] >> TSchemeShardTest::CreatePersQueueGroupWithKeySchema >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-std] [GOOD] >> TSchemeShardTest::CreateDropKesus [GOOD] >> TSchemeShardTest::CreateAlterKesus >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-fifo] |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-std] [GOOD] >> ResourcePoolsDdl::TestPoolSwitchToLimitedState [GOOD] >> ResourcePoolsDdl::TestPoolSwitchToUnlimitedState >> TSchemeShardTest::AlterPersQueueGroup [GOOD] >> TSchemeShardTest::AlterPersQueueGroupWithKeySchema >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] >> ResourcePoolsDdl::TestCreateResourcePool [GOOD] >> ResourcePoolsDdl::TestCreateResourcePoolOnServerless >> TSchemeShardTest::DisablePublicationsOfDropping_IndexedTable [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Pq >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] >> TSchemeShardTest::BlockStoreNonreplVolumeLimits [GOOD] >> TSchemeShardTest::BlockStoreSystemVolumeLimits >> KqpWorkloadService::TestQueueSizeSimple [GOOD] >> KqpWorkloadService::TestQueueSizeManyQueries >> TSchemeShardTest::CreateAlterKesus [GOOD] >> TSchemeShardTest::CreateDropSolomon >> TSchemeShardTest::AlterPersQueueGroupWithKeySchema [GOOD] >> TSchemeShardTest::AlterBlockStoreVolume >> TSchemeShardTest::CopyTableAndConcurrentSplitMerge [GOOD] >> TSchemeShardTest::CopyTableForBackup >> ResourcePoolClassifiersDdl::TestResourcePoolClassifiersPermissions [GOOD] >> ResourcePoolClassifiersDdl::TestResourcePoolClassifierRanks >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] [GOOD] |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-by_deduplication_id] >> KqpWorkloadServiceActors::TestPoolFetcherAclValidation [GOOD] >> KqpWorkloadServiceActors::TestPoolFetcherNotExistingPool >> TSchemeShardTest::CreatePersQueueGroupWithKeySchema [GOOD] >> TSchemeShardTest::CreateTableWithCompactionStrategies >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] [GOOD] |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] >> TSchemeShardTest::BlockStoreSystemVolumeLimits [GOOD] >> TSchemeShardTest::AlterTableWithCompactionStrategies |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-std] [GOOD] |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Pq [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Solomon |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterDrop [GOOD] >> KqpWorkloadServiceTables::TestCreateWorkloadSerivceTables |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromBadlyFormattedFile >> TNetClassifierTest::TestInitFromFile >> TSchemeShardTest::CreateDropSolomon [GOOD] >> TSchemeShardTest::CreateAlterDropSolomon >> TSchemeShardTest::AlterBlockStoreVolume [GOOD] >> TSchemeShardTest::AlterBlockStoreVolumeWithNonReplicatedPartitions >> TSchemeShardTest::CreateTableWithCompactionStrategies [GOOD] >> TSchemeShardTest::CreateTopicOverDiskSpaceQuotas >> TSchemeShardTest::AlterTableWithCompactionStrategies [GOOD] >> TSchemeShardTest::BackupBackupCollection-WithIncremental-false >> TSchemeShardTest::DisablePublicationsOfDropping_Solomon [GOOD] >> TSchemeShardTest::AlterBlockStoreVolumeWithNonReplicatedPartitions [GOOD] >> TSchemeShardTest::AdoptDropSolomon >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] >> TSchemeShardTest::ParallelModifying [GOOD] >> TSchemeShardTest::PQGroupExplicitChannels |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] >> TSchemeShardTest::CreateTopicOverDiskSpaceQuotas [GOOD] >> TSchemeShardTest::CreateSystemColumn >> TSchemeShardTest::CreateAlterDropSolomon [GOOD] |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::WriteHardRateDispatcher >> GroupWriteTest::Simple ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::DisablePublicationsOfDropping_Solomon [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-31T16:13:53.809571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:13:53.809663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:13:53.809700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:13:53.809736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:13:53.810694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:13:53.810760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:13:53.810851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:13:53.810940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:13:53.813358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:13:53.923192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:13:53.923251Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:13:53.933562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:13:53.933977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:13:53.934204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:13:53.955667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:13:53.955928Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:13:53.956547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:13:53.956888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:13:53.958982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:13:53.966405Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:13:53.966495Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:13:53.967776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:13:53.967853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:13:53.967917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:13:53.968881Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:13:53.975802Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-31T16:13:54.120371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:13:54.120594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.120770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:13:54.121030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:13:54.121118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.136525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:13:54.136650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:13:54.136825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.136901Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:13:54.136944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:13:54.136989Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:13:54.139028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.139113Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:13:54.139185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:13:54.141107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.141158Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.141214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:13:54.141270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:13:54.144825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:13:54.146704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:13:54.146860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:13:54.154257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:13:54.154387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:13:54.154435Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:13:54.155993Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:13:54.156059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:13:54.156240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:13:54.156325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:13:54.159352Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:13:54.159413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:13:54.159567Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:13:54.159609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:13:54.160158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.160802Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:13:54.160943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:13:54.160995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:13:54.161037Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:13:54.161068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:13:54.161102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:13:54.161136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:13:54.161171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:13:54.161199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:13:54.161262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:13:54.161303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:13:54.161346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:13:54.163219Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:13:54.163317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:13:54.163355Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 5-03-31T16:14:15.480026Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-03-31T16:14:15.480100Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:14:15.481850Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T16:14:15.481962Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T16:14:15.481992Z node 15 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-03-31T16:14:15.482026Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-03-31T16:14:15.482063Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-31T16:14:15.482155Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-03-31T16:14:15.483698Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2025-03-31T16:14:15.483899Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000005 2025-03-31T16:14:15.488173Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:14:15.488368Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 64424511595 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:14:15.488476Z node 15 :FLAT_TX_SCHEMESHARD INFO: TDropSolomon TPropose operationId# 104:0 HandleReply TEvOperationPlan, step: 5000005, at schemeshard: 72057594046678944 2025-03-31T16:14:15.488617Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 3] name: Obj type: EPathTypeSolomonVolume state: EPathStateDrop stepDropped: 0 droppedTxId: 104 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:14:15.488682Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-31T16:14:15.488825Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-31T16:14:15.488942Z node 15 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 130 2025-03-31T16:14:15.489115Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:14:15.489207Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T16:14:15.490092Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-31T16:14:15.492425Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-31T16:14:15.492796Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:14:15.492838Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:14:15.492973Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-31T16:14:15.493144Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:14:15.493181Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:446:2406], at schemeshard: 72057594046678944, txId: 104, path id: 1 2025-03-31T16:14:15.493221Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:446:2406], at schemeshard: 72057594046678944, txId: 104, path id: 3 FAKE_COORDINATOR: Erasing txId 104 2025-03-31T16:14:15.493650Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-31T16:14:15.493720Z node 15 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 104:0 ProgressState 2025-03-31T16:14:15.493814Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-31T16:14:15.493877Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T16:14:15.493941Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-31T16:14:15.494005Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T16:14:15.494079Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-03-31T16:14:15.494139Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T16:14:15.494204Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-03-31T16:14:15.494261Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-03-31T16:14:15.494443Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-31T16:14:15.494531Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-03-31T16:14:15.494602Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-03-31T16:14:15.494656Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-03-31T16:14:15.495202Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T16:14:15.495290Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T16:14:15.495330Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-03-31T16:14:15.495396Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-03-31T16:14:15.495462Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T16:14:15.495846Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T16:14:15.495920Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T16:14:15.495949Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-03-31T16:14:15.495981Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-03-31T16:14:15.496012Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:14:15.496097Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-03-31T16:14:15.498396Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-03-31T16:14:15.499878Z node 15 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-03-31T16:14:15.501391Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-31T16:14:15.501837Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409547 2025-03-31T16:14:15.502705Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:14:15.502784Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-31T16:14:15.502893Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:14:15.503388Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-31T16:14:15.503508Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-31T16:14:15.505378Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-31T16:14:15.505492Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-03-31T16:14:15.505817Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-03-31T16:14:15.506162Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-03-31T16:14:15.506237Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-03-31T16:14:15.506884Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-03-31T16:14:15.507021Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-03-31T16:14:15.507116Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [15:571:2513] TestWaitNotification: OK eventTxId 104 >> GroupWriteTest::TwoTables ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::CreateAlterDropSolomon [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-31T16:13:57.208560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:13:57.208644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:13:57.208681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:13:57.208713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:13:57.208757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:13:57.208783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:13:57.208854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:13:57.208957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:13:57.209305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:13:57.293337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:13:57.293387Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:13:57.299481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:13:57.299741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:13:57.299902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:13:57.306387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:13:57.306666Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:13:57.307273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:13:57.307587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:13:57.309334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:13:57.310532Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:13:57.310609Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:13:57.310734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:13:57.310780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:13:57.310820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:13:57.311020Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:13:57.317389Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-31T16:13:57.446854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:13:57.447021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:57.447222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:13:57.447454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:13:57.447523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:57.449833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:13:57.449951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:13:57.450150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:57.450206Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:13:57.450241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:13:57.450291Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:13:57.452804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:57.452901Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:13:57.452975Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:13:57.455467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:57.455534Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:57.455599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:13:57.455658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:13:57.460408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:13:57.462686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:13:57.462932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:13:57.464203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:13:57.464451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:13:57.464514Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:13:57.464849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:13:57.464914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:13:57.465121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:13:57.465234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:13:57.467889Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:13:57.467957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:13:57.468163Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:13:57.468212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:13:57.468666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:57.468723Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:13:57.468860Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:13:57.468901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:13:57.468939Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:13:57.468972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:13:57.469014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:13:57.469058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:13:57.469097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:13:57.469133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:13:57.469212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:13:57.469258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:13:57.469295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:13:57.480147Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:13:57.480311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:13:57.480353Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ], 18446744073709551615 2025-03-31T16:14:16.066142Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:14:16.066258Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:14:16.066311Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-03-31T16:14:16.066388Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-31T16:14:16.066481Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-31T16:14:16.067161Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:14:16.067255Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:14:16.067288Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-03-31T16:14:16.067321Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-03-31T16:14:16.067358Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:14:16.067445Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-03-31T16:14:16.071369Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-03-31T16:14:16.071465Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-03-31T16:14:16.071498Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-03-31T16:14:16.071532Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-03-31T16:14:16.072319Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-31T16:14:16.072590Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-31T16:14:16.072923Z node 15 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-03-31T16:14:16.073200Z node 15 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-03-31T16:14:16.073389Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-03-31T16:14:16.073868Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409548 2025-03-31T16:14:16.076060Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-03-31T16:14:16.076425Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409546 2025-03-31T16:14:16.077450Z node 15 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-03-31T16:14:16.077619Z node 15 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-03-31T16:14:16.077757Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-03-31T16:14:16.077990Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409549 2025-03-31T16:14:16.078977Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-31T16:14:16.079235Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 Forgetting tablet 72075186233409547 2025-03-31T16:14:16.080374Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:14:16.080460Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T16:14:16.080563Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:14:16.083705Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-03-31T16:14:16.083808Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-03-31T16:14:16.084732Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-31T16:14:16.084775Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-03-31T16:14:16.086293Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-03-31T16:14:16.086335Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-03-31T16:14:16.086414Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-31T16:14:16.086491Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-03-31T16:14:16.086857Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-03-31T16:14:16.087322Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-03-31T16:14:16.087397Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-03-31T16:14:16.088067Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-31T16:14:16.088207Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-31T16:14:16.088267Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [15:532:2486] TestWaitNotification: OK eventTxId 103 2025-03-31T16:14:16.089043Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:14:16.089363Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Solomon" took 353us result status StatusPathDoesNotExist 2025-03-31T16:14:16.089597Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted 2025-03-31T16:14:16.090197Z node 15 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-03-31T16:14:16.090364Z node 15 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2025-03-31T16:14:16.090426Z node 15 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2025-03-31T16:14:16.090535Z node 15 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 2025-03-31T16:14:16.091288Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:14:16.091584Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 321us result status StatusSuccess 2025-03-31T16:14:16.092133Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTest::CreateSystemColumn [GOOD] >> TSchemeShardTest::BackupBackupCollection-WithIncremental-false [GOOD] >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true >> TSchemeShardTest::AdoptDropSolomon [GOOD] >> TSchemeShardTest::AlterTableAndAfterSplit >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlagOnServerless [GOOD] >> KqpWorkloadService::WorkloadServiceDisabledByInvalidDatabasePath |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-std] [GOOD] >> TSchemeShardTest::PQGroupExplicitChannels [GOOD] >> TSchemeShardTest::ReadOnlyMode >> KqpJoinOrder::TPCDS61+ColumnStore >> TNetClassifierTest::TestInitFromBadlyFormattedFile [GOOD] >> TNetClassifierTest::TestInitFromFile [GOOD] >> KqpWorkloadServiceActors::TestPoolFetcherNotExistingPool [GOOD] >> KqpWorkloadServiceActors::TestDefaultPoolUsePermissions |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-content_based] >> Secret::Simple ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::CreateSystemColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:13:53.830737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:13:53.830850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:13:53.830884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:13:53.830921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:13:53.830961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:13:53.830989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:13:53.831052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:13:53.831199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:13:53.831539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:13:53.921019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:13:53.921070Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:13:53.925822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:13:53.925930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:13:53.926130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:13:53.933191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:13:53.939490Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:13:53.944558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:13:53.946121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:13:53.955328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:13:53.970564Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:13:53.970684Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:13:53.970883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:13:53.970937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:13:53.970997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:13:53.971166Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:13:53.984429Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:13:54.143161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:13:54.143377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.143544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:13:54.143750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:13:54.143804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.147977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:13:54.148098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:13:54.148286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.148386Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:13:54.148433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:13:54.148479Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:13:54.151846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.151926Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:13:54.151977Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:13:54.155864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.155914Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.155977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:13:54.156045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:13:54.159383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:13:54.163807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:13:54.164055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:13:54.164979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:13:54.165073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:13:54.165113Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:13:54.165343Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:13:54.165379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:13:54.165503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:13:54.165560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:13:54.168310Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:13:54.168354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:13:54.168494Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:13:54.168539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:13:54.168850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.168904Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:13:54.169008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:13:54.169033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:13:54.169064Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:13:54.169085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:13:54.169120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:13:54.169145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:13:54.169168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:13:54.169188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:13:54.169235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:13:54.169260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:13:54.169284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:13:54.170882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:13:54.171014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:13:54.171048Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... eadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:14:17.135249Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:14:17.135311Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:14:17.135381Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-31T16:14:17.135497Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [15:340:2319] message: TxId: 102 2025-03-31T16:14:17.135582Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:14:17.135652Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-31T16:14:17.135715Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-31T16:14:17.135889Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:14:17.138092Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:14:17.138167Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [15:341:2320] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 2025-03-31T16:14:17.141814Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "SystemColumnInCopyAllowed" CopyFromTable: "/MyRoot/SystemColumnAllowed" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:14:17.142192Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /MyRoot/SystemColumnInCopyAllowed, opId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:14:17.142858Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: SystemColumnInCopyAllowed, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-31T16:14:17.142956Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-03-31T16:14:17.143017Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T16:14:17.143184Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-31T16:14:17.143312Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T16:14:17.143557Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:14:17.144168Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:14:17.144243Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-31T16:14:17.147706Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944 PathId: 3, at schemeshard: 72057594046678944 2025-03-31T16:14:17.147948Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /MyRoot/SystemColumnInCopyAllowed 2025-03-31T16:14:17.148252Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:14:17.148310Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:14:17.148562Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-31T16:14:17.148681Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:14:17.148752Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:205:2207], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-03-31T16:14:17.148823Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:205:2207], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-03-31T16:14:17.149426Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:14:17.149516Z node 15 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 103:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046678944 2025-03-31T16:14:17.149849Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 103:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046678944 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-03-31T16:14:17.150784Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:14:17.150937Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:14:17.150997Z node 15 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-03-31T16:14:17.151069Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-03-31T16:14:17.151195Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-31T16:14:17.152465Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 1 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:14:17.152552Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 1 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:14:17.152585Z node 15 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-03-31T16:14:17.152619Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 1 2025-03-31T16:14:17.152650Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-31T16:14:17.152730Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-03-31T16:14:17.154586Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:2 msg type: 268697601 2025-03-31T16:14:17.154807Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72057594037968897 2025-03-31T16:14:17.154965Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 103, shardIdx: 72057594046678944:2, partId: 0 2025-03-31T16:14:17.155410Z node 15 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-03-31T16:14:17.155710Z node 15 :HIVE INFO: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 2, type DataShard, boot OK, tablet id 72075186233409547 2025-03-31T16:14:17.155952Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2025-03-31T16:14:17.156028Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 103, shardIdx: 72057594046678944:2, partId: 0 2025-03-31T16:14:17.156223Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2025-03-31T16:14:17.156306Z node 15 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 103:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2025-03-31T16:14:17.156431Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 103:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2025-03-31T16:14:17.156559Z node 15 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 2 -> 3 2025-03-31T16:14:17.161426Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-31T16:14:17.161757Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-31T16:14:17.163980Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:14:17.164196Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:14:17.164280Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TCopyTable TConfigureParts operationId# 103:0 ProgressState at tablet# 72057594046678944 2025-03-31T16:14:17.164386Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TCopyTable TConfigureParts operationId# 103:0 Propose modify scheme on dstDatashard# 72075186233409547 idx# 72057594046678944:2 srcDatashard# 72075186233409546 idx# 72057594046678944:1 operationId# 103:0 seqNo# 2:2 at tablet# 72057594046678944 2025-03-31T16:14:17.169634Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2025-03-31T16:14:17.169804Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 269549568 2025-03-31T16:14:17.169885Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72075186233409547 2025-03-31T16:14:17.169915Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72075186233409546 TestModificationResult got TxId: 103, wait until txId: 103 |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v1] [GOOD] |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TJaegerTracingConfiguratorTests::DefaultConfig >> TConsoleTests::TestCreateTenant >> TConsoleTests::TestGetUnknownTenantStatus ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromFile [GOOD] Test command err: 2025-03-31T16:14:15.167022Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488007713103859291:2197];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:15.167118Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a3c/r3tmp/tmp4tpI7J/pdisk_1.dat 2025-03-31T16:14:15.561925Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:15.604838Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:15.604965Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:15.630431Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:14:15.648664Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/000a3c/r3tmp/yandexuBDavs.tmp 2025-03-31T16:14:15.648689Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/000a3c/r3tmp/yandexuBDavs.tmp 2025-03-31T16:14:15.650441Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/000a3c/r3tmp/yandexuBDavs.tmp 2025-03-31T16:14:15.650730Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromBadlyFormattedFile [GOOD] Test command err: 2025-03-31T16:14:15.174045Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488007711516951556:2095];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:15.175468Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a13/r3tmp/tmpLW35TG/pdisk_1.dat 2025-03-31T16:14:15.579467Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:15.585644Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:15.592714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:15.606727Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:14:15.648545Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/000a13/r3tmp/yandexSK10o9.tmp 2025-03-31T16:14:15.648577Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/000a13/r3tmp/yandexSK10o9.tmp 2025-03-31T16:14:15.650282Z node 1 :NET_CLASSIFIER ERROR: invalid NetData format 2025-03-31T16:14:15.650331Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: /home/runner/.ya/build/build_root/0l5c/000a13/r3tmp/yandexSK10o9.tmp 2025-03-31T16:14:15.650501Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration >> KqpWorkloadServiceDistributed::TestDistributedQueue [GOOD] >> KqpWorkloadServiceDistributed::TestNodeDisconnect >> ResourcePoolsDdl::TestPoolSwitchToUnlimitedState [GOOD] >> ResourcePoolsDdl::TestResourcePoolAcl >> TSchemeShardTest::ReadOnlyMode [GOOD] >> TSchemeShardTest::PathErrors |86.4%| [TA] $(B)/ydb/core/mind/address_classification/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.4%| [TA] {RESULT} $(B)/ydb/core/mind/address_classification/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpJoinOrder::TPCDS88+ColumnStore >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] >> TSchemeShardTest::AlterTableAndAfterSplit [GOOD] >> TSchemeShardTest::AlterIndexTableDirectly >> Secret::Deactivated |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> TJaegerTracingConfiguratorTests::DefaultConfig [GOOD] >> TJaegerTracingConfiguratorTests::GlobalRules >> TSchemeShardCheckProposeSize::CopyTables [GOOD] >> TSchemeShardDecimalTypesInTables::Parameterless >> KqpJoin::JoinAggregateSingleRow >> KqpWorkloadService::TestQueueSizeManyQueries [GOOD] >> KqpWorkloadService::TestZeroQueueSize >> Secret::Validation |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> TSchemeShardTest::PathErrors [GOOD] >> TSchemeShardTest::NestedDirs >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifier [GOOD] >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifierOnServerless >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true [GOOD] >> TJaegerTracingConfiguratorTests::GlobalRules [GOOD] >> TJaegerTracingConfiguratorTests::ExternalTracePlusSampling >> TSchemeShardDecimalTypesInTables::Parameterless [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-false >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-fifo] >> TReplicaCombinationTest::UpdatesCombinationsDomainRoot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:14:00.498521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:14:00.498647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:14:00.498684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:14:00.498718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:14:00.498765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:14:00.498791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:14:00.498848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:14:00.498916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:14:00.499300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:14:00.578071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:14:00.578129Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:00.582949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:14:00.583054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:14:00.583267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:14:00.588960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:14:00.589172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:14:00.589761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:14:00.589998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:14:00.591987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:14:00.593349Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:14:00.593436Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:14:00.593664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:14:00.593717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:14:00.593765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:14:00.593940Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:14:00.600942Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:14:00.737106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:14:00.737322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:14:00.737511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:14:00.737801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:14:00.737862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:14:00.740882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:14:00.741026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:14:00.741211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:14:00.741262Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:14:00.741354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:14:00.741391Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:14:00.743159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:14:00.743205Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:14:00.743237Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:14:00.745198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:14:00.745241Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:14:00.745310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:14:00.745363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:14:00.748863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:14:00.750750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:14:00.750955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:14:00.752032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:14:00.752169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:14:00.752236Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:14:00.752457Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:14:00.752504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:14:00.752678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:14:00.752756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:14:00.754825Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:14:00.754861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:14:00.755197Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:14:00.755243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:14:00.755442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:14:00.755483Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:14:00.755602Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:14:00.755638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:14:00.755679Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:14:00.755711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:14:00.755744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:14:00.755778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:14:00.755808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:14:00.755834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:14:00.755889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:14:00.755923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:14:00.755969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:14:00.757808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:14:00.757901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:14:00.757934Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 32 PathsLimit: 10000 ShardsInside: 18 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 31 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:14:20.625645Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:14:20.625890Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA" took 264us result status StatusSuccess 2025-03-31T16:14:20.626330Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA" PathDescription { Self { Name: "DirA" PathId: 29 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 28 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 } ChildrenExist: true } Children { Name: "DirB" PathId: 30 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 29 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "Table2" PathId: 32 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 29 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 32 PathsLimit: 10000 ShardsInside: 18 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 29 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:14:20.627514Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/Table2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:14:20.627885Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/Table2" took 380us result status StatusSuccess 2025-03-31T16:14:20.628361Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/Table2" PathDescription { Self { Name: "Table2" PathId: 32 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 29 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 32 PathsLimit: 10000 ShardsInside: 18 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 32 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:14:20.629731Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:14:20.629999Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/DirB" took 295us result status StatusSuccess 2025-03-31T16:14:20.630409Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/DirB" PathDescription { Self { Name: "DirB" PathId: 30 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 29 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "Table3" PathId: 33 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 30 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 32 PathsLimit: 10000 ShardsInside: 18 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 30 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:14:20.631658Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/DirB/Table3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:14:20.632028Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/DirB/Table3" took 411us result status StatusSuccess 2025-03-31T16:14:20.632540Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/DirB/Table3" PathDescription { Self { Name: "Table3" PathId: 33 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 30 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table3" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 32 PathsLimit: 10000 ShardsInside: 18 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 33 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TJaegerTracingConfiguratorTests::ExternalTracePlusSampling [GOOD] >> TJaegerTracingConfiguratorTests::RequestTypeThrottler >> KqpWorkloadService::WorkloadServiceDisabledByInvalidDatabasePath [GOOD] >> KqpWorkloadService::TestZeroQueueSizeManyQueries >> TReplicaCombinationTest::UpdatesCombinationsDomainRoot [GOOD] >> TReplicaCombinationTest::UpdatesCombinationsMigratedPath >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-false [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-true >> TConsoleTests::TestGetUnknownTenantStatus [GOOD] >> TConsoleTests::TestGetUnknownTenantStatusExtSubdomain >> KqpWorkloadService::TestQueryCancelAfterPoolWithLimits [GOOD] >> KqpWorkloadService::TestLargeConcurrentQueryLimit >> KqpWorkloadServiceActors::TestDefaultPoolUsePermissions [GOOD] >> KqpWorkloadServiceActors::TestDefaultPoolAdminPermissions |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-fifo] [GOOD] >> TReplicaCombinationTest::UpdatesCombinationsMigratedPath [GOOD] >> TReplicaCombinationTest::MigratedPathRecreation >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-std] >> TSchemeShardTest::NestedDirs [GOOD] >> TSchemeShardTest::NewOwnerOnDatabase >> TReplicaCombinationTest::MigratedPathRecreation [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-content_based] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-by_deduplication_id] >> TJaegerTracingConfiguratorTests::RequestTypeThrottler [GOOD] >> TJaegerTracingConfiguratorTests::RequestTypeSampler >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-true [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaCombinationTest::MigratedPathRecreation [GOOD] Test command err: 2025-03-31T16:14:22.005706Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:7:2054] 2025-03-31T16:14:22.005796Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 800, generation# 1 2025-03-31T16:14:22.005907Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:7:2054] 2025-03-31T16:14:22.005942Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Commit generation: owner# 800, generation# 1 2025-03-31T16:14:22.006030Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:8:2055] 2025-03-31T16:14:22.006057Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 800, generation# 1 2025-03-31T16:14:22.006115Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:8:2055] 2025-03-31T16:14:22.006160Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Commit generation: owner# 800, generation# 1 2025-03-31T16:14:22.006374Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 103 2025-03-31T16:14:22.006447Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-03-31T16:14:22.013566Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-03-31T16:14:22.013775Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:8:2055], cookie# 0, event size# 103 2025-03-31T16:14:22.013811Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-03-31T16:14:22.013868Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-03-31T16:14:22.014000Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant DomainOwnerId: 0 }: sender# [1:9:2056] 2025-03-31T16:14:22.014110Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:9:2056], path# /Root/Tenant, domainOwnerId# 0, capabilities# =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 2025-03-31T16:14:22.056854Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:10:2057] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:11:2058] 2025-03-31T16:14:22.056913Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:10:2057] Successful handshake: owner# 800, generation# 1 2025-03-31T16:14:22.056991Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:10:2057] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:11:2058] 2025-03-31T16:14:22.057020Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:10:2057] Commit generation: owner# 800, generation# 1 2025-03-31T16:14:22.057092Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:10:2057] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [1:12:2059] 2025-03-31T16:14:22.057121Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:10:2057] Successful handshake: owner# 900, generation# 1 2025-03-31T16:14:22.057196Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:10:2057] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [1:12:2059] 2025-03-31T16:14:22.057224Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:10:2057] Commit generation: owner# 900, generation# 1 2025-03-31T16:14:22.057314Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:10:2057] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:11:2058], cookie# 0, event size# 103 2025-03-31T16:14:22.057344Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:10:2057] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-03-31T16:14:22.057408Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:10:2057] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-03-31T16:14:22.057487Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:10:2057] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 900 Generation: 1 }: sender# [1:12:2059], cookie# 0, event size# 103 2025-03-31T16:14:22.057512Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:10:2057] Update description: path# /Root/Tenant, pathId# [OwnerId: 900, LocalPathId: 1], deletion# false 2025-03-31T16:14:22.057551Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:10:2057] Replace GSS by TSS description: path# /Root/Tenant, pathId# [OwnerId: 900, LocalPathId: 1], domainId# [OwnerId: 800, LocalPathId: 2], curPathId# [OwnerId: 800, LocalPathId: 2], curDomainId# [OwnerId: 800, LocalPathId: 2] 2025-03-31T16:14:22.057610Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:10:2057] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 900, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 900, LocalPathId: 1], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-03-31T16:14:22.057704Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:10:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant DomainOwnerId: 0 }: sender# [1:13:2060] 2025-03-31T16:14:22.057755Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:10:2057] Subscribe: subscriber# [1:13:2060], path# /Root/Tenant, domainOwnerId# 0, capabilities# =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 1 PathOwnerId: 900 2025-03-31T16:14:22.058097Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:14:2061] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:15:2062] 2025-03-31T16:14:22.058136Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:14:2061] Successful handshake: owner# 800, generation# 1 2025-03-31T16:14:22.058195Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:14:2061] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:15:2062] 2025-03-31T16:14:22.058218Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:14:2061] Commit generation: owner# 800, generation# 1 2025-03-31T16:14:22.058269Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:14:2061] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:16:2063] 2025-03-31T16:14:22.058290Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:14:2061] Successful handshake: owner# 800, generation# 1 2025-03-31T16:14:22.058349Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:14:2061] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:16:2063] 2025-03-31T16:14:22.058373Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:14:2061] Commit generation: owner# 800, generation# 1 2025-03-31T16:14:22.058430Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:14:2061] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:15:2062], cookie# 0, event size# 103 2025-03-31T16:14:22.058490Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:14:2061] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-03-31T16:14:22.058540Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:14:2061] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-03-31T16:14:22.058622Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:14:2061] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:16:2063], cookie# 0, event size# 103 2025-03-31T16:14:22.058650Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:14:2061] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-03-31T16:14:22.058688Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:14:2061] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 2, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-03-31T16:14:22.058754Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:14:2061] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant DomainOwnerId: 0 }: sender# [1:17:2064] 2025-03-31T16:14:22.058787Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:14:2061] Subscribe: subscriber# [1:17:2064], path# /Root/Tenant, domainOwnerId# 0, capabilities# =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 2 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 2025-03-31T16:14:22.059116Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:18:2065] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:19:2066] 2025-03-31T16:14:22.059156Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:18:2065] Successful handshake: owner# 800, generation# 1 2025-03-31T16:14:22.059218Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:18:2065] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:19:2066] 2025-03-31T16:14:22.059242Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:18:2065] Commit generation: owner# 800, generation# 1 2025-03-31T16:14:22.059289Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:18:2065] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [1:20:2067] 2025-03-31T16:14:22.059310Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:18:2065] Successful handshake: owner# 900, generation# 1 2025-03-31T16:14:22.059347Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:18:2065] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [1:20:2067] 2025-03-31T16:14:22.059366Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:18:2065] Commit generation: owner# 900, generation# 1 2025-03-31T16:14:22.059435Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:18:2065] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:19:2066], cookie# 0, event size# 103 2025-03-31T16:14:22.059457Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:18:2065] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-03-31T16:14:22.059498Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:18:2065] Upsert description: path# /Root/Te ... ble_inside, pathId# [OwnerId: 910, LocalPathId: 9] 2025-03-31T16:14:22.685596Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:394:2441] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 0 }: sender# [2:397:2444] 2025-03-31T16:14:22.685620Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:394:2441] Upsert description: path# /Root/Tenant/table_inside 2025-03-31T16:14:22.685686Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:394:2441] Subscribe: subscriber# [2:397:2444], path# /Root/Tenant/table_inside, domainOwnerId# 0, capabilities# =========== Left ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Right ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== super id == DomainId: [OwnerId: 800, LocalPathId: 333] IsDeletion: 1 PathId: [OwnerId: 910, LocalPathId: 9] Verions: 18446744073709551615 =========== WIN ==/Root/Tenant/table_inside PathID: [OwnerId: 0, LocalPathId: 0] deleted: 1 version: 0 domainId: [OwnerId: 0, LocalPathId: 0] 2025-03-31T16:14:22.688230Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:398:2445] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:399:2446] 2025-03-31T16:14:22.688274Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:398:2445] Successful handshake: owner# 910, generation# 1 2025-03-31T16:14:22.688345Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:398:2445] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:399:2446] 2025-03-31T16:14:22.688372Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:398:2445] Commit generation: owner# 910, generation# 1 2025-03-31T16:14:22.688425Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:398:2445] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:400:2447] 2025-03-31T16:14:22.688448Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:398:2445] Successful handshake: owner# 910, generation# 1 2025-03-31T16:14:22.688491Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:398:2445] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:400:2447] 2025-03-31T16:14:22.688519Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:398:2445] Commit generation: owner# 910, generation# 1 2025-03-31T16:14:22.688591Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:398:2445] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:399:2446], cookie# 0, event size# 64 2025-03-31T16:14:22.688619Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:398:2445] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# true 2025-03-31T16:14:22.688643Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:398:2445] Upsert description: path# [OwnerId: 910, LocalPathId: 9] 2025-03-31T16:14:22.688732Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:398:2445] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:400:2447], cookie# 0, event size# 130 2025-03-31T16:14:22.688774Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:398:2445] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# false 2025-03-31T16:14:22.688802Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:398:2445] Path was explicitly deleted, ignoring: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9] 2025-03-31T16:14:22.688881Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:398:2445] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 0 }: sender# [2:401:2448] 2025-03-31T16:14:22.688910Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:398:2445] Upsert description: path# /Root/Tenant/table_inside 2025-03-31T16:14:22.688954Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:398:2445] Subscribe: subscriber# [2:401:2448], path# /Root/Tenant/table_inside, domainOwnerId# 0, capabilities# =========== Left ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Right ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 2 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== super id == DomainId: [OwnerId: 800, LocalPathId: 333] IsDeletion: 1 PathId: [OwnerId: 910, LocalPathId: 9] Verions: 18446744073709551615 =========== WIN ==/Root/Tenant/table_inside PathID: [OwnerId: 0, LocalPathId: 0] deleted: 1 version: 0 domainId: [OwnerId: 0, LocalPathId: 0] 2025-03-31T16:14:22.691514Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:402:2449] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:403:2450] 2025-03-31T16:14:22.691566Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:402:2449] Successful handshake: owner# 910, generation# 1 2025-03-31T16:14:22.691632Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:402:2449] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:403:2450] 2025-03-31T16:14:22.691658Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:402:2449] Commit generation: owner# 910, generation# 1 2025-03-31T16:14:22.691719Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:402:2449] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:404:2451] 2025-03-31T16:14:22.691753Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:402:2449] Successful handshake: owner# 910, generation# 1 2025-03-31T16:14:22.691804Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:402:2449] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:404:2451] 2025-03-31T16:14:22.691828Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:402:2449] Commit generation: owner# 910, generation# 1 2025-03-31T16:14:22.691907Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:402:2449] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:403:2450], cookie# 0, event size# 64 2025-03-31T16:14:22.691934Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:402:2449] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# true 2025-03-31T16:14:22.691958Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:402:2449] Upsert description: path# [OwnerId: 910, LocalPathId: 9] 2025-03-31T16:14:22.692035Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:402:2449] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:404:2451], cookie# 0, event size# 64 2025-03-31T16:14:22.692067Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:402:2449] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# true 2025-03-31T16:14:22.692133Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:402:2449] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 0 }: sender# [2:405:2452] 2025-03-31T16:14:22.692161Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:402:2449] Upsert description: path# /Root/Tenant/table_inside 2025-03-31T16:14:22.692207Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:402:2449] Subscribe: subscriber# [2:405:2452], path# /Root/Tenant/table_inside, domainOwnerId# 0, capabilities# =========== Left ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Right ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== super id == DomainId: [OwnerId: 800, LocalPathId: 333] IsDeletion: 1 PathId: [OwnerId: 910, LocalPathId: 9] Verions: 18446744073709551615 =========== WIN ==/Root/Tenant/table_inside PathID: [OwnerId: 0, LocalPathId: 0] deleted: 1 version: 0 domainId: [OwnerId: 0, LocalPathId: 0] 2025-03-31T16:14:22.980773Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [3:7:2054] 2025-03-31T16:14:22.980851Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 800, generation# 1 2025-03-31T16:14:22.980969Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [3:7:2054] 2025-03-31T16:14:22.981011Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Commit generation: owner# 800, generation# 1 2025-03-31T16:14:22.981097Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [3:8:2055] 2025-03-31T16:14:22.981130Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 900, generation# 1 2025-03-31T16:14:22.981190Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [3:8:2055] 2025-03-31T16:14:22.981237Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Commit generation: owner# 900, generation# 1 2025-03-31T16:14:22.981407Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 118 2025-03-31T16:14:22.981445Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111], deletion# false 2025-03-31T16:14:22.981521Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111], pathDescription# {Status StatusSuccess, Path /root/db/dir_inside, PathId [OwnerId: 800, LocalPathId: 1111], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 1], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 67} 2025-03-31T16:14:22.981623Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 900 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 117 2025-03-31T16:14:22.981685Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], deletion# false 2025-03-31T16:14:22.981740Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description by newest path form tenant schemeshard: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], domainId# [OwnerId: 800, LocalPathId: 1], curPathId# [OwnerId: 800, LocalPathId: 1111], curDomainId# [OwnerId: 800, LocalPathId: 1] 2025-03-31T16:14:22.981777Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Delete description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111] 2025-03-31T16:14:22.981838Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], pathDescription# {Status StatusSuccess, Path /root/db/dir_inside, PathId [OwnerId: 900, LocalPathId: 11], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 1], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 67} 2025-03-31T16:14:22.981965Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/db/dir_inside DomainOwnerId: 0 }: sender# [3:9:2056] 2025-03-31T16:14:22.982025Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:9:2056], path# /root/db/dir_inside, domainOwnerId# 0, capabilities# =========== Path: "/root/db/dir_inside" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 1 } } } PathId: 1111 PathOwnerId: 800 =========== Path: "/root/db/dir_inside" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 1 } } } PathId: 11 PathOwnerId: 900 =========== DomainId: [OwnerId: 800, LocalPathId: 1] IsDeletion: 0 PathId: [OwnerId: 900, LocalPathId: 11] Versions: 1 >> KqpJoin::JoinLeftPureFull >> ResourcePoolsDdl::TestCreateResourcePoolOnServerless [GOOD] >> ResourcePoolsDdl::TestDefaultPoolRestrictions |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> TJaegerTracingConfiguratorTests::RequestTypeSampler [GOOD] >> TJaegerTracingConfiguratorTests::SamplingSameScope >> TConsoleTests::TestCreateTenant [GOOD] >> TConsoleTests::TestCreateTenantExtSubdomain >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-false [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-true |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> TSchemeShardTest::NewOwnerOnDatabase [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-std] [GOOD] |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-fifo] [GOOD] |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] >> TJaegerTracingConfiguratorTests::SamplingSameScope [GOOD] >> TJaegerTracingConfiguratorTests::ThrottlingByDb |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::NewOwnerOnDatabase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:13:58.304687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:13:58.304775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:13:58.304816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:13:58.304850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:13:58.304893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:13:58.304925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:13:58.304988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:13:58.305074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:13:58.305406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:13:58.393587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:13:58.393653Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:13:58.399147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:13:58.399259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:13:58.399440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:13:58.407332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:13:58.407601Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:13:58.408283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:13:58.408553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:13:58.410760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:13:58.412025Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:13:58.412096Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:13:58.412292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:13:58.412339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:13:58.412377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:13:58.412509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:13:58.419698Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:13:58.561520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:13:58.561762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:58.561968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:13:58.562235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:13:58.562319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:58.568158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:13:58.568320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:13:58.568530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:58.568595Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:13:58.568633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:13:58.568691Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:13:58.570735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:58.570779Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:13:58.570805Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:13:58.576087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:58.576147Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:58.576223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:13:58.576293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:13:58.586222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:13:58.591985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:13:58.592198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:13:58.593373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:13:58.593500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:13:58.593550Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:13:58.593815Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:13:58.593862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:13:58.594013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:13:58.594085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:13:58.596266Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:13:58.596333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:13:58.596534Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:13:58.596586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:13:58.596863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:58.596918Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:13:58.597051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:13:58.597092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:13:58.597128Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:13:58.597161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:13:58.597198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:13:58.597239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:13:58.597275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:13:58.597304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:13:58.597386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:13:58.597426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:13:58.597453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:13:58.599759Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:13:58.599880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:13:58.599908Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 594037968897 2025-03-31T16:14:24.097606Z node 15 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 2 -> 3 2025-03-31T16:14:24.099374Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-03-31T16:14:24.101687Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-03-31T16:14:24.104471Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-03-31T16:14:24.104772Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-03-31T16:14:24.104854Z node 15 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId# 107:0 ProgressState at tabletId# 72057594046678944 2025-03-31T16:14:24.104978Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable TConfigureParts operationId# 107:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409549 seqNo: 2:1 2025-03-31T16:14:24.105489Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable TConfigureParts operationId# 107:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409549 message: TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 122 RawX2: 64424511588 } TxBody: "\n\223\004\n\005Table\020\005\032\r\n\003key\030\004 \001(\000@\000\032\020\n\005value\030\200$ \002(\000@\000(\001:\262\003\022\253\003\010\200\200\200\002\020\254\002\030\364\003 \200\200\200\010(\0000\200\200\200 8\200\200\200\010@\2008H\000RX\010\000\020\000\030\010 \010(\200\200\200@0\377\377\377\377\0178\001B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen1P\nX\200\200\001`nh\000p\000Rb\010\001\020\200\200\200\024\030\005 \020(\200\200\200\200\0020\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen2P\nX\200\200\001`nh\200\200\200\004p\200\200\200\004Rc\010\002\020\200\200\200\310\001\030\005 \020(\200\200\200\200@0\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen3P\nX\200\200\001`nh\200\200\200(p\200\200\200(X\001`\005j$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionr\017compaction_gen0z\017compaction_gen0\202\001\004scan\210\001\200\200\200\010\220\001\364\003\230\0012\270\001\2008\300\001\006R\002\020\001J\r/MyRoot/Table\242\001\006\001\000\000\000\000\200\252\001\000\260\001\001\270\001\000\210\002\001\222\002\013\t\240\207\205\000\000\000\000\001\020\005:\004\010\002\020\001" TxId: 107 ExecLevel: 0 Flags: 0 SchemeShardId: 72057594046678944 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } SubDomainPathId: 1 2025-03-31T16:14:24.110553Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 107:0 from tablet: 72057594046678944 to tablet: 72075186233409549 cookie: 72057594046678944:4 msg type: 269549568 2025-03-31T16:14:24.110776Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 107, partId: 0, tablet: 72075186233409549 TestModificationResult got TxId: 107, wait until txId: 107 TestModificationResults wait txId: 108 2025-03-31T16:14:24.212437Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpModifyACL ModifyACL { Name: "Table" NewOwner: "user1" } ApplyIf { PathTypes: EPathTypeSubDomain PathTypes: EPathTypeExtSubDomain } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:14:24.212764Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /MyRoot/Table, operationId: 108:0, at schemeshard: 72057594046678944 2025-03-31T16:14:24.212963Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusPreconditionFailed, reason: fail in ApplyIf section: wrong Path type. Expected types: EPathTypeSubDomain, EPathTypeExtSubDomain; But actual Path type is EPathTypeTable, at schemeshard: 72057594046678944 2025-03-31T16:14:24.216645Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusPreconditionFailed Reason: "fail in ApplyIf section: wrong Path type. Expected types: EPathTypeSubDomain, EPathTypeExtSubDomain; But actual Path type is EPathTypeTable" TxId: 108 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:14:24.216906Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: fail in ApplyIf section: wrong Path type. Expected types: EPathTypeSubDomain, EPathTypeExtSubDomain; But actual Path type is EPathTypeTable, operation: MODIFY ACL, path: /MyRoot/Table, set owner:user1 TestModificationResult got TxId: 108, wait until txId: 108 TestModificationResults wait txId: 109 2025-03-31T16:14:24.221328Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpModifyACL ModifyACL { Name: "Table" NewOwner: "user1" } } TxId: 109 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:14:24.221601Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /MyRoot/Table, operationId: 109:0, at schemeshard: 72057594046678944 2025-03-31T16:14:24.221755Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 5] name: Table type: EPathTypeTable state: EPathStateCreate stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:14:24.221814Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-03-31T16:14:24.222062Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 109:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2025-03-31T16:14:24.222148Z node 15 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 109:0, at schemeshard: 72057594046678944 2025-03-31T16:14:24.222300Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#109:0 progress is 1/1 2025-03-31T16:14:24.222375Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 109 ready parts: 1/1 2025-03-31T16:14:24.222475Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#109:0 progress is 1/1 2025-03-31T16:14:24.222541Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 109 ready parts: 1/1 2025-03-31T16:14:24.222647Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-03-31T16:14:24.222747Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-03-31T16:14:24.222801Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 109, ready parts: 1/1, is published: false 2025-03-31T16:14:24.222869Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-03-31T16:14:24.222937Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 109 ready parts: 1/1 2025-03-31T16:14:24.222999Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 109:0 2025-03-31T16:14:24.223060Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 109, publications: 2, subscribers: 0 2025-03-31T16:14:24.223233Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 109, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2025-03-31T16:14:24.223282Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 109, [OwnerId: 72057594046678944, LocalPathId: 5], 2 2025-03-31T16:14:24.226607Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 109, response: Status: StatusSuccess TxId: 109 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:14:24.226868Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 109, database: /MyRoot, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /MyRoot/Table, set owner:user1 2025-03-31T16:14:24.227218Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:14:24.227299Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 109, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-03-31T16:14:24.227496Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 109, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:14:24.227721Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:14:24.227818Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:205:2207], at schemeshard: 72057594046678944, txId: 109, path id: 5 2025-03-31T16:14:24.227903Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:205:2207], at schemeshard: 72057594046678944, txId: 109, path id: 1 2025-03-31T16:14:24.228733Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 109 2025-03-31T16:14:24.228913Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 109 2025-03-31T16:14:24.228984Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 109 2025-03-31T16:14:24.229069Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 109, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2025-03-31T16:14:24.229149Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-03-31T16:14:24.229906Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 109 2025-03-31T16:14:24.229993Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 109 2025-03-31T16:14:24.230024Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 109 2025-03-31T16:14:24.230058Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 109, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2025-03-31T16:14:24.230094Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-03-31T16:14:24.230182Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 109, subscribers: 0 2025-03-31T16:14:24.232634Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 109 2025-03-31T16:14:24.233699Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 109 TestModificationResult got TxId: 109, wait until txId: 109 >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-true [GOOD] >> TSchemeShardDecimalTypesInTables::CreateWithWrongParameters >> TConsoleTests::TestGetUnknownTenantStatusExtSubdomain [GOOD] >> TConsoleTests::TestRestartConsoleAndPools >> KqpJoinOrder::GeneralPrioritiesBug4 |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> TSchemeShardDecimalTypesInTables::CreateWithWrongParameters [GOOD] >> TSchemeShardDecimalTypesInTables::AlterWithWrongParameters |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> TJaegerTracingConfiguratorTests::ThrottlingByDb [GOOD] >> TJaegerTracingConfiguratorTests::SamplingByDb |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> KqpWorkloadServiceActors::TestDefaultPoolAdminPermissions [GOOD] >> KqpWorkloadServiceDistributed::TestDistributedLargeConcurrentQueryLimit >> ResourcePoolsDdl::TestResourcePoolAcl [GOOD] >> ResourcePoolsSysView::TestResourcePoolsSysViewOnServerless >> TSchemeShardDecimalTypesInTables::AlterWithWrongParameters [GOOD] >> TSchemeShardInfoTypesTest::EmptyFamilies [GOOD] >> TSchemeShardInfoTypesTest::LostId [GOOD] >> TSchemeShardInfoTypesTest::DeduplicationOrder [GOOD] >> TSchemeShardInfoTypesTest::MultipleDeduplications [GOOD] >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-false |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> KqpFlipJoin::LeftSemi_2 >> TPersQueueTest::TestReadPartitionStatus [GOOD] >> TPersQueueTest::TxCounters >> KqpWorkloadService::TestZeroQueueSize [GOOD] >> KqpWorkloadService::TestQueryCancelAfterUnlimitedPool >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] >> TJaegerTracingConfiguratorTests::SamplingByDb [GOOD] >> TJaegerTracingConfiguratorTests::SharedThrottlingLimits |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> KqpJoin::JoinAggregateSingleRow [GOOD] >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-false [GOOD] >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-false >> KqpIndexLookupJoin::RightSemi >> ResourcePoolClassifiersDdl::TestResourcePoolClassifierRanks [GOOD] >> ResourcePoolClassifiersDdl::TestExplicitPoolId |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> GroupWriteTest::TwoTables [GOOD] >> TJaegerTracingConfiguratorTests::SharedThrottlingLimits [GOOD] >> TJaegerTracingConfiguratorTests::SharedSamplingLimits >> ResourcePoolsDdl::TestDefaultPoolRestrictions [GOOD] >> ResourcePoolsDdl::TestAlterResourcePool >> TConsoleTests::TestCreateTenantExtSubdomain [GOOD] >> TConsoleTests::TestCreateSharedTenant |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinAggregateSingleRow [GOOD] Test command err: Trying to start YDB, gRPC: 17376, MsgBus: 3591 2025-03-31T16:14:20.954479Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488007733606823039:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:20.955775Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001992/r3tmp/tmpaDS6Ul/pdisk_1.dat 2025-03-31T16:14:21.323915Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17376, node 1 2025-03-31T16:14:21.355377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:21.355465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:21.366128Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:14:21.415244Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:21.415269Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:21.415281Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:21.415384Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3591 TClient is connected to server localhost:3591 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:14:21.924103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:21.952609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:22.089342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:22.254235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:22.339497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:24.257189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007750786694003:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:24.257297Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:24.625729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:14:24.656288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:14:24.696649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:14:24.723548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:14:24.754514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:14:24.784494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:14:24.830574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007750786694512:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:24.830653Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:24.830839Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007750786694517:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:24.834566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:14:24.847101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007750786694519:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:14:24.937278Z node 1 :TX_PROXY ERROR: Actor# [1:7488007750786694573:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:14:25.861135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:14:25.889453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:14:25.917084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:14:25.953846Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488007733606823039:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:25.953979Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TReplicaTest::Commit >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-false [GOOD] >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::TwoTables [GOOD] Test command err: RandomSeed# 6793430643597960514 2025-03-31T16:14:18.040555Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 72058679074007041 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-03-31T16:14:18.040635Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 72058502699329537 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-03-31T16:14:18.062075Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-03-31T16:14:18.062132Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 1 going to send TEvBlock {TabletId# 72058679074007041 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-03-31T16:14:18.062204Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-03-31T16:14:18.062223Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 1 going to send TEvBlock {TabletId# 72058502699329537 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-03-31T16:14:18.065816Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-03-31T16:14:18.065899Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-03-31T16:14:18.083240Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-03-31T16:14:18.083325Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-03-31T16:14:18.086375Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-03-31T16:14:18.086447Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-03-31T16:14:24.377803Z 4 00h01m13.310512s :BS_LOGCUTTER ERROR: VDISK[82000000:_:0:3:0]: KEEPER: RetryCutLogEvent: limit exceeded; FreeUpToLsn# 1465 2025-03-31T16:14:28.329572Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-03-31T16:14:28.329702Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-03-31T16:14:28.329783Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-03-31T16:14:28.329825Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-03-31T16:14:28.329871Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 23 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-03-31T16:14:28.329920Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 23 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-03-31T16:14:28.329959Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-03-31T16:14:28.330004Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 24 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-03-31T16:14:28.330054Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 24 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-03-31T16:14:28.413386Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Status# OK} 2025-03-31T16:14:28.413507Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Status# OK} 2025-03-31T16:14:28.413566Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 23 Channel# 0 Status# OK} 2025-03-31T16:14:28.413619Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 23 Channel# 0 Status# OK} 2025-03-31T16:14:28.413676Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 24 Channel# 0 Status# OK} 2025-03-31T16:14:28.413727Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 24 Channel# 0 Status# OK} >> TReplicaTest::Commit [GOOD] >> TReplicaTest::AckNotifications >> KqpWorkloadServiceTables::TestCreateWorkloadSerivceTables [GOOD] >> KqpWorkloadServiceTables::TestPoolStateFetcherActor >> TJaegerTracingConfiguratorTests::SharedSamplingLimits [GOOD] >> TLogSettingsConfiguratorTests::TestNoChanges >> TReplicaTest::AckNotifications [GOOD] >> TReplicaTest::AckNotificationsUponPathRecreation >> TReplicaTest::AckNotificationsUponPathRecreation [GOOD] |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> KqpJoin::JoinLeftPureFull [GOOD] >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-true [GOOD] >> TSchemeShardTest::CopyTableForBackup [GOOD] >> TSchemeShardTest::ConsistentCopyTablesForBackup >> KqpWorkloadService::TestZeroQueueSizeManyQueries [GOOD] >> KqpWorkloadServiceActors::TestCreateDefaultPool |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v0] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::AckNotificationsUponPathRecreation [GOOD] Test command err: 2025-03-31T16:14:29.202773Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-03-31T16:14:29.202842Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 1 2025-03-31T16:14:29.202966Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-03-31T16:14:29.203001Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Commit generation: owner# 1, generation# 1 2025-03-31T16:14:29.203049Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 2 }: sender# [1:7:2054] 2025-03-31T16:14:29.203100Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 2 2025-03-31T16:14:29.469851Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:8:2055] 2025-03-31T16:14:29.470032Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path 2025-03-31T16:14:29.470222Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:8:2055], path# path, domainOwnerId# 0, capabilities# AckNotifications: true 2025-03-31T16:14:29.470370Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:7:2054] 2025-03-31T16:14:29.470425Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 1 2025-03-31T16:14:29.470559Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-03-31T16:14:29.470598Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-03-31T16:14:29.475740Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-03-31T16:14:29.475895Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:8:2055] 2025-03-31T16:14:29.475977Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 40 2025-03-31T16:14:29.476002Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-03-31T16:14:29.476031Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-03-31T16:14:29.476131Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [2:8:2055] 2025-03-31T16:14:29.738223Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:7:2054] 2025-03-31T16:14:29.738288Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 1, generation# 1 2025-03-31T16:14:29.738444Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-03-31T16:14:29.738505Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-03-31T16:14:29.738564Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 2, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-03-31T16:14:29.738678Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:8:2055] 2025-03-31T16:14:29.738753Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:8:2055], path# path, domainOwnerId# 0, capabilities# AckNotifications: true 2025-03-31T16:14:29.738897Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-03-31T16:14:29.738939Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-03-31T16:14:29.738996Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 3, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-03-31T16:14:29.739197Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-03-31T16:14:29.739232Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2025-03-31T16:14:29.739269Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-03-31T16:14:29.739349Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path 2025-03-31T16:14:29.739408Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:8:2055], path# path, domainOwnerId# 0, capabilities# AckNotifications: true 2025-03-31T16:14:29.739467Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-03-31T16:14:29.739545Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 3 }: sender# [3:8:2055] |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> TLogSettingsConfiguratorTests::TestNoChanges [GOOD] >> TLogSettingsConfiguratorTests::TestAddComponentEntries >> KqpJoinOrder::CanonizedJoinOrderTPCH15 >> TConsoleTests::TestRestartConsoleAndPools [GOOD] >> TConsoleTests::TestRemoveTenantWithBorrowedStorageUnits |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinLeftPureFull [GOOD] Test command err: Trying to start YDB, gRPC: 65388, MsgBus: 25165 2025-03-31T16:14:24.300180Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488007750924198403:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:24.313645Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001991/r3tmp/tmp7FCDZb/pdisk_1.dat 2025-03-31T16:14:24.758966Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:24.764064Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:24.764155Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:24.767268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65388, node 1 2025-03-31T16:14:24.856001Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:24.856028Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:24.856035Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:24.856144Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25165 TClient is connected to server localhost:25165 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:14:25.374817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:25.403431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:25.565959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:25.733901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:25.813335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:27.345099Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007763809102058:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:27.345239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:27.636903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:14:27.669665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:14:27.700055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:14:27.730646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:14:27.765016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:14:27.802600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:14:27.862114Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007763809102567:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:27.862191Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:27.862379Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007763809102572:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:27.867683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:14:27.883605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007763809102574:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:14:27.970428Z node 1 :TX_PROXY ERROR: Actor# [1:7488007763809102629:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:14:29.298120Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488007750924198403:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:29.298196Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:13:53.837170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:13:53.837293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:13:53.837338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:13:53.837374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:13:53.837411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:13:53.837438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:13:53.837493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:13:53.837568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:13:53.837874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:13:53.931559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:13:53.931621Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:13:53.937105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:13:53.937217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:13:53.937386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:13:53.942420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:13:53.942706Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:13:53.944559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:13:53.946353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:13:53.955816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:13:53.966359Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:13:53.966447Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:13:53.966633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:13:53.966704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:13:53.966768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:13:53.967904Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:13:53.982348Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:13:54.107910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:13:54.109317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.110621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:13:54.112169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:13:54.112263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.132969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:13:54.134095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:13:54.134326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.134381Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:13:54.134434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:13:54.134480Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:13:54.137091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.137163Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:13:54.137219Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:13:54.139057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.139126Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.139192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:13:54.139237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:13:54.144011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:13:54.145960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:13:54.146129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:13:54.154221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:13:54.154347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:13:54.154428Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:13:54.155962Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:13:54.156034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:13:54.156211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:13:54.156280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:13:54.160598Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:13:54.160642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:13:54.160798Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:13:54.160841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:13:54.161035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.161096Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:13:54.161203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:13:54.161238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:13:54.161273Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:13:54.161302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:13:54.161336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:13:54.161391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:13:54.161425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:13:54.161457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:13:54.161516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:13:54.161554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:13:54.161584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:13:54.163544Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:13:54.163665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:13:54.163701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 16:14:29.977633Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: PREPARED TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 PrepareArriveTime: 60500 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 243 } } 2025-03-31T16:14:29.977799Z node 12 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TConfigureParts operationId# 102:0 HandleReply TEvProposeTransactionResult, at schemeshard: 72057594046678944 message# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: PREPARED TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 PrepareArriveTime: 60500 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 243 } } 2025-03-31T16:14:29.977861Z node 12 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-03-31T16:14:29.978040Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409546, shardIdx: 72057594046678944:1, operationId: 102:0, left await: 0, at schemeshard: 72057594046678944 2025-03-31T16:14:29.978117Z node 12 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 3 -> 128 2025-03-31T16:14:29.981067Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:14:29.981306Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:14:29.981386Z node 12 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 102:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:14:29.981519Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-03-31T16:14:29.981739Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:14:29.984054Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-03-31T16:14:29.984281Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-03-31T16:14:29.985760Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:14:29.985939Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 51539609709 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:14:29.986044Z node 12 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2025-03-31T16:14:29.986478Z node 12 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-03-31T16:14:29.986725Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-03-31T16:14:29.994858Z node 12 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:14:29.994950Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:14:29.995470Z node 12 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:14:29.995567Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [12:205:2207], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-31T16:14:29.996425Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:14:29.996507Z node 12 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-03-31T16:14:29.997569Z node 12 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:14:29.997729Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:14:29.997788Z node 12 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:14:29.997873Z node 12 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-03-31T16:14:29.997952Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T16:14:29.998081Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-03-31T16:14:29.998925Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1827 } } 2025-03-31T16:14:29.998992Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-03-31T16:14:29.999201Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1827 } } 2025-03-31T16:14:29.999347Z node 12 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1827 } } 2025-03-31T16:14:30.000082Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 306 RawX2: 51539609845 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-31T16:14:30.000200Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-03-31T16:14:30.000457Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 306 RawX2: 51539609845 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-31T16:14:30.000547Z node 12 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-31T16:14:30.000698Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 306 RawX2: 51539609845 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-31T16:14:30.000808Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:14:30.000889Z node 12 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:14:30.000949Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-31T16:14:30.001023Z node 12 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-03-31T16:14:30.006271Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:14:30.006471Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:14:30.006638Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:14:30.007041Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:14:30.007145Z node 12 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-31T16:14:30.007345Z node 12 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:14:30.007422Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:14:30.007488Z node 12 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:14:30.007544Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:14:30.007640Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-31T16:14:30.007754Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [12:334:2313] message: TxId: 102 2025-03-31T16:14:30.007838Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:14:30.007910Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-31T16:14:30.007963Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-31T16:14:30.008151Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:14:30.010935Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:14:30.011067Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [12:393:2365] TestWaitNotification: OK eventTxId 102 >> TLogSettingsConfiguratorTests::TestAddComponentEntries [GOOD] >> TLogSettingsConfiguratorTests::TestRemoveComponentEntries >> KqpJoinOrder::TPCDS92-ColumnStore >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] [GOOD] |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] [GOOD] >> TSchemeShardTest::ConsistentCopyTablesForBackup [GOOD] >> TSchemeShardTest::CopyLockedTableForBackup |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> KqpJoinOrder::FiveWayJoinStatsOverride+ColumnStore |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> TLogSettingsConfiguratorTests::TestRemoveComponentEntries [GOOD] >> TLogSettingsConfiguratorTests::TestChangeDefaults |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-std] [GOOD] >> OlapEstimationRowsCorrectness::TPCH3 |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] >> TLogSettingsConfiguratorTests::TestChangeDefaults [GOOD] >> TModificationsValidatorTests::TestApplyValidators_TENANTS [GOOD] >> TModificationsValidatorTests::TestApplyValidators_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestApplyValidatorsWithOldConfig [GOOD] >> TModificationsValidatorTests::TestChecksLimitError [GOOD] >> TModificationsValidatorTests::TestChecksLimitWarning [GOOD] >> TConsoleTests::TestCreateSharedTenant [GOOD] >> TConsoleTests::TestCreateServerlessTenant |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> TSchemeShardTest::CopyLockedTableForBackup [GOOD] >> TSchemeShardTest::ConfigColumnFamily >> KqpFlipJoin::LeftSemi_2 [GOOD] >> KqpFlipJoin::LeftSemi_3 |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TModificationsValidatorTests::TestChecksLimitWarning [GOOD] Test command err: 2025-03-31T16:14:19.504555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:14:19.504630Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:19.557329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:14:20.709817Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:14:20.709891Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:20.754636Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:14:21.558133Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:14:21.558196Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:21.599401Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:14:22.408849Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:14:22.408914Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:22.446851Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:14:23.494471Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:14:23.494547Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:23.537530Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:14:24.635308Z node 6 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:14:24.635378Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:24.680094Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:14:25.704889Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:14:25.704987Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:25.751765Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:14:26.781434Z node 8 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:14:26.781490Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:26.826380Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:14:27.883114Z node 9 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:14:27.883190Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:27.927299Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:14:28.985628Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:14:28.985710Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:29.046528Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:14:30.250648Z node 11 :CMS_CONFIGS TRACE: StateWork, received event# 268637729, Sender [11:136:2149], Recipient [11:355:2294]: {TEvControllerProposeConfigRequest Record# } 2025-03-31T16:14:30.256121Z node 11 :CMS_CONFIGS TRACE: StateWork, processing event TEvBlobStorage::TEvControllerProposeConfigRequest 2025-03-31T16:14:30.266883Z node 11 :CMS_CONFIGS TRACE: StateWork, received event# 269877760, Sender [11:316:2283], Recipient [11:315:2280]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936131 Status: OK ServerId: [11:408:2340] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-03-31T16:14:30.266970Z node 11 :CMS_CONFIGS TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-03-31T16:14:30.280121Z node 11 :CMS_CONFIGS TRACE: StateWork, received event# 273285144, Sender [11:315:2280], Recipient [11:355:2294]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionRequest { Generation: 1 Options { NodeId: 11 Host: "ghrun-tuhoqst4xe.auto.internal" Tenant: "" NodeType: "" } ConfigItemKinds: 29 ConfigItemKinds: 1 ConfigItemKinds: 89 ConfigItemKinds: 2 ConfigItemKinds: 32 ConfigItemKinds: 3 ConfigItemKinds: 33 ConfigItemKinds: 34 ConfigItemKinds: 6 ConfigItemKinds: 36 ConfigItemKinds: 37 ConfigItemKinds: 8 ConfigItemKinds: 38 ConfigItemKinds: 10 ConfigItemKinds: 39 ConfigItemKinds: 43 ConfigItemKinds: 73 ConfigItemKinds: 75 ConfigItemKinds: 46 ConfigItemKinds: 77 ConfigItemKinds: 80 ConfigItemKinds: 81 ConfigItemKinds: 52 ConfigItemKinds: 54 ConfigItemKinds: 25 ConfigItemKinds: 55 ConfigItemKinds: 26 ServeYaml: true YamlApiVersion: 1 } 2025-03-31T16:14:30.280476Z node 11 :CMS_CONFIGS TRACE: StateWork, received event# 273285144, Sender [11:315:2280], Recipient [11:361:2305]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionRequest { Generation: 1 Options { NodeId: 11 Host: "ghrun-tuhoqst4xe.auto.internal" Tenant: "" NodeType: "" } ConfigItemKinds: 29 ConfigItemKinds: 1 ConfigItemKinds: 89 ConfigItemKinds: 2 ConfigItemKinds: 32 ConfigItemKinds: 3 ConfigItemKinds: 33 ConfigItemKinds: 34 ConfigItemKinds: 6 ConfigItemKinds: 36 ConfigItemKinds: 37 ConfigItemKinds: 8 ConfigItemKinds: 38 ConfigItemKinds: 10 ConfigItemKinds: 39 ConfigItemKinds: 43 ConfigItemKinds: 73 ConfigItemKinds: 75 ConfigItemKinds: 46 ConfigItemKinds: 77 ConfigItemKinds: 80 ConfigItemKinds: 81 ConfigItemKinds: 52 ConfigItemKinds: 54 ConfigItemKinds: 25 ConfigItemKinds: 55 ConfigItemKinds: 26 ServeYaml: true YamlApiVersion: 1 } 2025-03-31T16:14:30.280559Z node 11 :CMS_CONFIGS TRACE: StateWork, processing event TEvConsole::TEvConfigSubscriptionRequest 2025-03-31T16:14:30.280679Z node 11 :CMS_CONFIGS DEBUG: TConfigsProvider registered new subscription [11:315:2280]:1 2025-03-31T16:14:30.280785Z node 11 :CMS_CONFIGS TRACE: TConfigsProvider: check if update is required for volatile subscription [11:315:2280]:1 2025-03-31T16:14:30.280863Z node 11 :CMS_CONFIGS TRACE: TConfigsProvider: new config found for subscription [11:315:2280]:1 version= 2025-03-31T16:14:30.280988Z node 11 :CMS_CONFIGS TRACE: TSubscriptionClientSender([11:315:2280]) send TEvConfigSubscriptionResponse 2025-03-31T16:14:30.282081Z node 11 :CMS_CONFIGS TRACE: StateWork, received event# 273286169, Sender [11:409:2305], Recipient [11:315:2280]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionResponse { Generation: 1 Status { Code: SUCCESS } } 2025-03-31T16:14:30.282137Z node 11 :CMS_CONFIGS TRACE: StateWork, processing event TEvConsole::TEvConfigSubscriptionResponse 2025-03-31T16:14:30.282422Z node 11 :CMS_CONFIGS TRACE: StateWork, received event# 273285146, Sender [11:361:2305], Recipient [11:409:2305]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionNotification { Generation: 1 Config { } MainYamlConfigNotChanged: true } 2025-03-31T16:14:30.282489Z node 11 :CMS_CONFIGS TRACE: StateWork, processing event TEvConsole::TEvConfigSubscriptionNotification 2025-03-31T16:14:30.282591Z node 11 :CMS_CONFIGS TRACE: TSubscriptionClientSender([11:315:2280]) send TEvConfigSubscriptionNotificationRequest: Order: 1 Generation: 1 Config { } MainYamlConfigNotChanged: true 2025-03-31T16:14:30.282754Z node 11 :CMS_CONFIGS TRACE: StateWork, received event# 273285146, Sender [11:409:2305], Recipient [11:315:2280]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionNotification { Order: 1 Generation: 1 Config { } MainYamlConfigNotChanged: true } 2025-03-31T16:14:30.282795Z node 11 :CMS_CONFIGS TRACE: StateWork, processing event TEvConsole::TEvConfigSubscriptionNotification 2025-03-31T16:14:30.283050Z node 11 :CMS_CONFIGS TRACE: StateWork, received event# 273285129, Sender [11:315:2280], Recipient [11:355:2294]: NKikimr::NConsole::TEvConsole::TEvGetNodeConfigRequest { Node { NodeId: 11 Host: "ghrun-tuhoqst4xe.auto.internal" Tenant: "" NodeType: "" } ItemKinds: 29 ItemKinds: 1 ItemKinds: 89 ItemKinds: 2 ItemKinds: 32 ItemKinds: 3 ItemKinds: 33 ItemKinds: 34 ItemKinds: 6 ItemKinds: 36 ItemKinds: 37 ItemKinds: 8 ItemKinds: 38 ItemKinds: 10 ItemKinds: 39 ItemKinds: 43 ItemKinds: 73 ItemKinds: 75 ItemKinds: 46 ItemKinds: 77 ItemKinds: 80 ItemKinds: 81 ItemKinds: 52 ItemKinds: 54 ItemKinds: 25 ItemKinds: 55 ItemKinds: 26 } 2025-03-31T16:14:30.283275Z node 11 :CMS_CONFIGS TRACE: StateWork, received event# 273285129, Sender [11:315:2280], Recipient [11:361:2305]: NKikimr::NConsole::TEvConsole::TEvGetNodeConfigRequest { Node { NodeId: 11 Host: "ghrun-tuhoqst4xe.auto.internal" Tenant: "" NodeType: "" } ItemKinds: 29 ItemKinds: 1 ItemKinds: 89 ItemKinds: 2 ItemKinds: 32 ItemKinds: 3 ItemKinds: 33 ItemKinds: 34 ItemKinds: 6 ItemKinds: 36 ItemKinds: 37 ItemKinds: 8 ItemKinds: 38 ItemKinds: 10 ItemKinds: 39 ItemKinds: 43 ItemKinds: 73 ItemKinds: 75 ItemKinds: 46 ItemKinds: 77 ItemKinds: 80 ItemKinds: 81 ItemKinds: 52 ItemKinds: 54 ItemKinds: 25 ItemKinds: 55 ItemKinds: 26 } 2025-03-31T16:14:30.283317Z node 11 :CMS_CONFIGS TRACE: StateWork, processing event TEvConsole::TEvGetNodeConfigRequest 2025-03-31T16:14:30.283454Z node 11 :CMS_CONFIGS TRACE: Send TEvGetNodeConfigResponse: Status { Code: SUCCESS } Config { } 2025-03-31T16:14:30.289258Z node 11 :CMS_CONFIGS TRACE: StateWork, received event# 273285138, Sender [11:311:2280], Recipient [11:355:2294]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { } ItemKinds: 75 Local: true } 2025-03-31T16:14:30.289373Z node 11 :CMS_CONFIGS INFO: TLogSettingsConfigurator: got new config: 2025-03-31T16:14:30.289484Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component GLOBAL has been changed from WARN to NOTICE 2025-03-31T16:14:30.289551Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component GLOBAL has been changed from WARN to DEBUG 2025-03-31T16:14:30.289596Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component INTERCONNECT has been changed from WARN to NOTICE 2025-03-31T16:14:30.289627Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component INTERCONNECT has been changed from WARN to DEBUG 2025-03-31T16:14:30.289657Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component TEST has been changed from WARN to NOTICE 2025-03-31T16:14:30.289684Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component TEST has been changed from WARN to DEBUG 2025-03-31T16:14:30.289714Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component PROTOCOLS has been changed from WARN to NOTICE 2025-03-31T16:14:30.289742Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component PROTOCOLS has been changed from WARN to DEBUG 2025-03-31T16:14:30.289777Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component INTERCONNECT_SPEED_TEST has been changed from WARN to NOTICE 2025-03-31T16:14:30.289804Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component INTERCONNECT_SPEED_TEST has been changed from WARN to DEBUG 2025- ... ZER has been changed from 0 to 10 2025-03-31T16:14:33.282328Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component METADATA_MANAGER has been changed from NOTICE to ALERT 2025-03-31T16:14:33.282386Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component METADATA_MANAGER has been changed from DEBUG to ALERT 2025-03-31T16:14:33.282430Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component METADATA_MANAGER has been changed from 0 to 10 2025-03-31T16:14:33.282462Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component METADATA_SECRET has been changed from NOTICE to ALERT 2025-03-31T16:14:33.282492Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component METADATA_SECRET has been changed from DEBUG to ALERT 2025-03-31T16:14:33.282519Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component METADATA_SECRET has been changed from 0 to 10 2025-03-31T16:14:33.282550Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component TX_TIERING has been changed from NOTICE to ALERT 2025-03-31T16:14:33.282580Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component TX_TIERING has been changed from DEBUG to ALERT 2025-03-31T16:14:33.282608Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component TX_TIERING has been changed from 0 to 10 2025-03-31T16:14:33.282667Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BG_TASKS has been changed from NOTICE to ALERT 2025-03-31T16:14:33.282701Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BG_TASKS has been changed from DEBUG to ALERT 2025-03-31T16:14:33.282730Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BG_TASKS has been changed from 0 to 10 2025-03-31T16:14:33.282763Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component DISCOVERY has been changed from NOTICE to ALERT 2025-03-31T16:14:33.282792Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component DISCOVERY has been changed from DEBUG to ALERT 2025-03-31T16:14:33.282820Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component DISCOVERY has been changed from 0 to 10 2025-03-31T16:14:33.282850Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component DISCOVERY_CACHE has been changed from NOTICE to ALERT 2025-03-31T16:14:33.282877Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component DISCOVERY_CACHE has been changed from DEBUG to ALERT 2025-03-31T16:14:33.282902Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component DISCOVERY_CACHE has been changed from 0 to 10 2025-03-31T16:14:33.282948Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component EXT_INDEX has been changed from NOTICE to ALERT 2025-03-31T16:14:33.283008Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component EXT_INDEX has been changed from DEBUG to ALERT 2025-03-31T16:14:33.283047Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component EXT_INDEX has been changed from 0 to 10 2025-03-31T16:14:33.283106Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component TX_CONVEYOR has been changed from NOTICE to ALERT 2025-03-31T16:14:33.283141Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component TX_CONVEYOR has been changed from DEBUG to ALERT 2025-03-31T16:14:33.283177Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component TX_CONVEYOR has been changed from 0 to 10 2025-03-31T16:14:33.283207Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component TX_LIMITER has been changed from NOTICE to ALERT 2025-03-31T16:14:33.283278Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component TX_LIMITER has been changed from DEBUG to ALERT 2025-03-31T16:14:33.283317Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component TX_LIMITER has been changed from 0 to 10 2025-03-31T16:14:33.283354Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component ARROW_HELPER has been changed from NOTICE to ALERT 2025-03-31T16:14:33.283385Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component ARROW_HELPER has been changed from DEBUG to ALERT 2025-03-31T16:14:33.283416Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component ARROW_HELPER has been changed from 0 to 10 2025-03-31T16:14:33.283511Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component SSA_GRAPH_EXECUTION has been changed from NOTICE to ALERT 2025-03-31T16:14:33.283566Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component SSA_GRAPH_EXECUTION has been changed from DEBUG to ALERT 2025-03-31T16:14:33.283598Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component SSA_GRAPH_EXECUTION has been changed from 0 to 10 2025-03-31T16:14:33.283631Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component KAFKA_PROXY has been changed from NOTICE to ALERT 2025-03-31T16:14:33.283662Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component KAFKA_PROXY has been changed from DEBUG to ALERT 2025-03-31T16:14:33.283691Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component KAFKA_PROXY has been changed from 0 to 10 2025-03-31T16:14:33.283723Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component OBJECTS_MONITORING has been changed from NOTICE to ALERT 2025-03-31T16:14:33.283753Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component OBJECTS_MONITORING has been changed from DEBUG to ALERT 2025-03-31T16:14:33.283780Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component OBJECTS_MONITORING has been changed from 0 to 10 2025-03-31T16:14:33.283810Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component STATISTICS has been changed from NOTICE to ALERT 2025-03-31T16:14:33.283841Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component STATISTICS has been changed from DEBUG to ALERT 2025-03-31T16:14:33.283893Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component STATISTICS has been changed from 0 to 10 2025-03-31T16:14:33.283933Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BS_REQUEST_COST has been changed from NOTICE to ALERT 2025-03-31T16:14:33.283976Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BS_REQUEST_COST has been changed from DEBUG to ALERT 2025-03-31T16:14:33.284010Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BS_REQUEST_COST has been changed from 0 to 10 2025-03-31T16:14:33.284046Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BS_VDISK_BALANCING has been changed from NOTICE to ALERT 2025-03-31T16:14:33.284076Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BS_VDISK_BALANCING has been changed from DEBUG to ALERT 2025-03-31T16:14:33.284113Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BS_VDISK_BALANCING has been changed from 0 to 10 2025-03-31T16:14:33.284144Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BS_PROXY_GETBLOCK has been changed from NOTICE to ALERT 2025-03-31T16:14:33.284178Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BS_PROXY_GETBLOCK has been changed from DEBUG to ALERT 2025-03-31T16:14:33.284207Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BS_PROXY_GETBLOCK has been changed from 0 to 10 2025-03-31T16:14:33.284237Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BS_SHRED has been changed from NOTICE to ALERT 2025-03-31T16:14:33.284264Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BS_SHRED has been changed from DEBUG to ALERT 2025-03-31T16:14:33.284290Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BS_SHRED has been changed from 0 to 10 2025-03-31T16:14:33.284319Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component LDAP_AUTH_PROVIDER has been changed from NOTICE to ALERT 2025-03-31T16:14:33.284348Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component LDAP_AUTH_PROVIDER has been changed from DEBUG to ALERT 2025-03-31T16:14:33.284376Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component LDAP_AUTH_PROVIDER has been changed from 0 to 10 2025-03-31T16:14:33.284438Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component GROUPED_MEMORY_LIMITER has been changed from NOTICE to ALERT 2025-03-31T16:14:33.284473Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component GROUPED_MEMORY_LIMITER has been changed from DEBUG to ALERT 2025-03-31T16:14:33.284501Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component GROUPED_MEMORY_LIMITER has been changed from 0 to 10 2025-03-31T16:14:33.284534Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component DATA_INTEGRITY has been changed from NOTICE to ALERT 2025-03-31T16:14:33.284564Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component DATA_INTEGRITY has been changed from DEBUG to ALERT 2025-03-31T16:14:33.284591Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component DATA_INTEGRITY has been changed from 0 to 10 2025-03-31T16:14:33.284621Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component TX_PRIORITIES_QUEUE has been changed from NOTICE to ALERT 2025-03-31T16:14:33.284653Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component TX_PRIORITIES_QUEUE has been changed from DEBUG to ALERT 2025-03-31T16:14:33.284682Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component TX_PRIORITIES_QUEUE has been changed from 0 to 10 2025-03-31T16:14:33.284711Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BSCONFIG has been changed from NOTICE to ALERT 2025-03-31T16:14:33.284740Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BSCONFIG has been changed from DEBUG to ALERT 2025-03-31T16:14:33.284769Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BSCONFIG has been changed from 0 to 10 2025-03-31T16:14:33.284797Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component NAMESERVICE has been changed from NOTICE to ALERT 2025-03-31T16:14:33.284824Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component NAMESERVICE has been changed from DEBUG to ALERT 2025-03-31T16:14:33.284856Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component NAMESERVICE has been changed from 0 to 10 2025-03-31T16:14:33.285013Z node 14 :CMS_CONFIGS TRACE: TLogSettingsConfigurator: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } >> KqpJoin::LeftJoinPushdownPredicate_NoPushdown >> KqpIndexLookupJoin::RightSemi [GOOD] |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> TReplicaTest::Update >> TReplicaTest::Update [GOOD] >> TReplicaTest::UnsubscribeWithoutSubscribe >> TSchemeShardTest::ConfigColumnFamily [GOOD] >> TSchemeShardTest::ConsistentCopyAfterDropIndexes >> KqpWorkloadServiceActors::TestCreateDefaultPool [GOOD] >> KqpWorkloadServiceActors::TestCpuLoadActor >> Secret::SimpleQueryService >> TReplicaTest::UnsubscribeWithoutSubscribe [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::RightSemi [GOOD] Test command err: Trying to start YDB, gRPC: 25262, MsgBus: 18370 2025-03-31T16:14:28.213158Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488007766869306777:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:28.213244Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00198e/r3tmp/tmpJkeyKO/pdisk_1.dat 2025-03-31T16:14:28.548997Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25262, node 1 2025-03-31T16:14:28.631167Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:28.631196Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:28.631212Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:28.631397Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:14:28.632383Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:28.632490Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:28.634163Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18370 TClient is connected to server localhost:18370 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:14:29.131402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:29.158112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:29.283897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:29.424098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:29.494145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:31.276706Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007779754210450:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:31.276853Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:31.627378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:14:31.659779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:14:31.691382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:14:31.723511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:14:31.766493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:14:31.799146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:14:31.838814Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007779754210957:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:31.838893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:31.839134Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007779754210962:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:31.843304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:14:31.853237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007779754210964:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:14:31.931820Z node 1 :TX_PROXY ERROR: Actor# [1:7488007779754211017:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:14:32.874517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:14:32.940719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:14:32.973911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:14:33.008336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:14:33.036553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:14:33.068065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:14:33.212186Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488007766869306777:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:33.212249Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> KqpJoinOrder::FiveWayJoinWithPreds-ColumnStore >> TConsoleTests::TestRemoveTenantWithBorrowedStorageUnits [GOOD] >> TConsoleTests::TestListTenants >> Secret::Deactivated [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::UnsubscribeWithoutSubscribe [GOOD] Test command err: 2025-03-31T16:14:35.361992Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-03-31T16:14:35.362064Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 1 2025-03-31T16:14:35.362296Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 72 2025-03-31T16:14:35.362334Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-03-31T16:14:35.369029Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-03-31T16:14:35.369220Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:7:2054] 2025-03-31T16:14:35.369304Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:7:2054], path# path, domainOwnerId# 0, capabilities# 2025-03-31T16:14:35.369431Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [1:7:2054] 2025-03-31T16:14:35.369500Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Unsubscribe: subscriber# [1:7:2054], path# path 2025-03-31T16:14:35.369560Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [1:7:2054] 2025-03-31T16:14:35.369620Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:7:2054], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-03-31T16:14:35.369715Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [1:7:2054] 2025-03-31T16:14:35.369755Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Unsubscribe: subscriber# [1:7:2054], path# [OwnerId: 1, LocalPathId: 1] 2025-03-31T16:14:35.648814Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:7:2054] 2025-03-31T16:14:35.648881Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 1 2025-03-31T16:14:35.649012Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-03-31T16:14:35.649058Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-03-31T16:14:35.649131Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-03-31T16:14:35.649325Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:7:2054] >> GroupWriteTest::Simple [GOOD] |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |86.7%| [TA] $(B)/ydb/core/tx/scheme_board/ut_replica/test-results/unittest/{meta.json ... results_accumulator.log} |86.7%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_replica/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::Simple [GOOD] Test command err: RandomSeed# 12357719207996448966 2025-03-31T16:14:17.950277Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 1 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-03-31T16:14:17.975493Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-03-31T16:14:17.975583Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 1 going to send TEvBlock {TabletId# 1 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-03-31T16:14:17.978472Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-03-31T16:14:17.993474Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 2 going to send TEvCollectGarbage {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-03-31T16:14:17.996496Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-03-31T16:14:36.207069Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-03-31T16:14:36.207232Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-03-31T16:14:36.207292Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-03-31T16:14:36.207336Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 33 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-03-31T16:14:36.271006Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Status# OK} 2025-03-31T16:14:36.271144Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 33 Channel# 0 Status# OK} ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Deactivated [GOOD] Test command err: 2025-03-31T16:14:23.121397Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:98:2144], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000e39/r3tmp/tmpWbQioZ/pdisk_1.dat TServer::EnableGrpc on GrpcPort 29172, node 1 TClient is connected to server localhost:2115 2025-03-31T16:14:23.723795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:14:23.762323Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:23.769419Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:23.769480Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:23.769513Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:23.769812Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:14:23.807013Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:23.807193Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:23.818700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-03-31T16:14:35.798731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:691:2582], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:35.798881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifierOnServerless [GOOD] >> ResourcePoolClassifiersDdl::TestAlterResourcePoolClassifier |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] >> KqpIndexLookupJoin::CheckCastInt32ToInt16-StreamLookupJoin-NotNull >> ResourcePoolsSysView::TestResourcePoolsSysViewOnServerless [GOOD] >> ResourcePoolsSysView::TestResourcePoolsSysViewFilters |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> ResourcePoolsDdl::TestAlterResourcePool [GOOD] >> ResourcePoolsDdl::TestDropResourcePool >> KqpWorkloadService::TestLargeConcurrentQueryLimit [GOOD] >> KqpWorkloadService::TestLessConcurrentQueryLimit >> TraverseDatashard::TraverseOneTableServerless |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> KqpSinkLocks::InvalidateOnCommit >> ResourcePoolClassifiersDdl::TestExplicitPoolId [GOOD] >> ResourcePoolClassifiersDdl::TestMultiGroupClassification >> TSchemeShardTest::ConsistentCopyAfterDropIndexes [GOOD] >> KqpIndexLookupJoin::LeftJoinSkipNullFilter+StreamLookup |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_transfer_writer/unittest >> KqpWorkloadServiceTables::TestPoolStateFetcherActor [GOOD] >> KqpWorkloadServiceTables::TestCleanupOnServiceRestart |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> TConsoleTests::TestCreateServerlessTenant [GOOD] >> TConsoleTests::TestCreateServerlessTenantWrongSharedDb ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::ConsistentCopyAfterDropIndexes [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:13:53.823017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:13:53.823273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:13:53.823315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:13:53.823347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:13:53.823407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:13:53.823442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:13:53.823507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:13:53.823594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:13:53.823938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:13:53.919410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:13:53.919468Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:13:53.929639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:13:53.929733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:13:53.929892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:13:53.934652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:13:53.939321Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:13:53.944560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:13:53.946105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:13:53.955347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:13:53.966350Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:13:53.966454Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:13:53.966651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:13:53.966710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:13:53.966780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:13:53.967880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:13:53.979732Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:13:54.143347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:13:54.143538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.143895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:13:54.144121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:13:54.144176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.146022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:13:54.146137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:13:54.146277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.146361Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:13:54.146397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:13:54.146440Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:13:54.148070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.148113Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:13:54.148163Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:13:54.149829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.149869Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.149917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:13:54.149951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:13:54.153565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:13:54.155123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:13:54.155336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:13:54.156295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:13:54.156409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:13:54.156462Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:13:54.156704Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:13:54.156752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:13:54.156901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:13:54.156967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:13:54.160083Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:13:54.160129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:13:54.160295Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:13:54.160337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:13:54.160545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.160810Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:13:54.160928Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:13:54.160964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:13:54.161013Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:13:54.161046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:13:54.161090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:13:54.161136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:13:54.161170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:13:54.161198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:13:54.161274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:13:54.161309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:13:54.161336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:13:54.163004Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:13:54.163125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:13:54.163163Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... athId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000009 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "Table1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCopying Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:14:38.354035Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:14:38.354468Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table1" took 430us result status StatusSuccess 2025-03-31T16:14:38.355212Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table1" PathDescription { Self { Name: "Table1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 TableSchemaVersion: 4 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:14:38.356854Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Copy1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:14:38.357285Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Copy1" took 445us result status StatusSuccess 2025-03-31T16:14:38.357942Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Copy1" PathDescription { Self { Name: "Copy1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000007 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Copy1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "Sync" LocalPathId: 6 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:14:38.359661Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Copy2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:14:38.360072Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Copy2" took 424us result status StatusSuccess 2025-03-31T16:14:38.360638Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Copy2" PathDescription { Self { Name: "Copy2" PathId: 8 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000009 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Copy2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 8 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:14:38.362734Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Copy3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:14:38.363203Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Copy3" took 459us result status StatusSuccess 2025-03-31T16:14:38.363863Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Copy3" PathDescription { Self { Name: "Copy3" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000009 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Copy3" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "Sync" LocalPathId: 10 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 9 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable+StreamLookupJoin >> TConsoleTests::TestListTenants [GOOD] >> TConsoleTests::TestListTenantsExtSubdomain >> TSchemeShardTest::DropPQAbort [GOOD] >> TSchemeShardTest::ManyDirs |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> KqpFlipJoin::LeftSemi_3 [GOOD] >> KqpJoin::LeftJoinPushdownPredicate_NoPushdown [GOOD] >> KqpJoinOrder::TestJoinOrderHintsSimple+ColumnStore |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> KqpWorkloadService::TestQueryCancelAfterUnlimitedPool [GOOD] >> KqpWorkloadService::TestStartQueryAfterCancel ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::LeftJoinPushdownPredicate_NoPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 18668, MsgBus: 28573 2025-03-31T16:14:34.776765Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488007793358208201:2130];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:34.807546Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001986/r3tmp/tmprQPIo5/pdisk_1.dat 2025-03-31T16:14:35.174663Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:35.191574Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:35.191636Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:35.193071Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18668, node 1 2025-03-31T16:14:35.257013Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:35.257040Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:35.257052Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:35.257191Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28573 TClient is connected to server localhost:28573 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:14:35.821218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:35.842277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:36.019210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:36.182461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:36.270154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:37.972243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007806243111790:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:37.972380Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:38.343987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:14:38.374270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:14:38.409799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:14:38.484287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:14:38.521377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:14:38.582655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:14:38.643531Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007810538079601:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:38.643627Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:38.644001Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007810538079606:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:38.648038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:14:38.672461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007810538079608:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:14:38.753610Z node 1 :TX_PROXY ERROR: Actor# [1:7488007810538079665:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:14:39.775867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:14:39.776455Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488007793358208201:2130];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:39.776514Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:14:39.861349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:14:39.897022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpFlipJoin::LeftSemi_3 [GOOD] Test command err: Trying to start YDB, gRPC: 14897, MsgBus: 10931 2025-03-31T16:14:27.503726Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488007763712365838:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:27.503865Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00198f/r3tmp/tmp82Rkxh/pdisk_1.dat 2025-03-31T16:14:27.896877Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:27.931445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:27.931692Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 14897, node 1 2025-03-31T16:14:27.933472Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:14:27.989440Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:27.989458Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:27.989463Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:27.989566Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10931 TClient is connected to server localhost:10931 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:14:28.502244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:28.533111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:28.694292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:28.844847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:28.912718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:30.361221Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007776597269500:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:30.361344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:30.714857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:14:30.749014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:14:30.785310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:14:30.820664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:14:30.853150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:14:30.890931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:14:30.982076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007776597270018:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:30.982165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:30.982440Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007776597270023:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:30.986515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:14:30.999450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007776597270025:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:14:31.096011Z node 1 :TX_PROXY ERROR: Actor# [1:7488007780892237377:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:14:32.019302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:14:32.060859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:14:32.105564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:14:32.143882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:14:32.503230Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488007763712365838:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:32.503313Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 6287, MsgBus: 21386 2025-03-31T16:14:34.167749Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488007796607416421:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:34.167830Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00198f/r3tmp/tmpHR6in1/pdisk_1.dat 2025-03-31T16:14:34.276647Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6287, node 2 2025-03-31T16:14:34.317780Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:34.317895Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:34.334147Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:14:34.355632Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:34.355655Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:34.355662Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:34.355772Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21386 TClient is connected to server localhost:21386 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:14:34.732934Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:34.770587Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:34.854600Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:35.015305Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:14:35.092805Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:14:37.690204Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488007809492320067:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:37.690400Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:37.741402Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:14:37.811274Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:14:37.842544Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:14:37.879868Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:14:37.913297Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:14:37.976036Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:14:38.022101Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488007813787287874:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:38.022183Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:38.022371Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488007813787287879:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:38.025520Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:14:38.035296Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488007813787287881:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:14:38.090285Z node 2 :TX_PROXY ERROR: Actor# [2:7488007813787287934:3439] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:14:39.017242Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:14:39.060813Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:14:39.136421Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:14:39.167973Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488007796607416421:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:39.168026Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:14:39.171526Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-std] [GOOD] >> KqpJoin::LeftJoinWithNull-StreamLookupJoin >> TConsoleTests::TestCreateServerlessTenantWrongSharedDb [GOOD] >> TConsoleTests::TestCreateTenantWrongName |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH21 |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_transfer_writer/unittest >> KqpIndexLookupJoin::CheckCastInt32ToInt16-StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastInt32ToInt16-StreamLookupJoin+NotNull >> TConsoleTests::TestListTenantsExtSubdomain [GOOD] >> TConsoleTests::TestModifyUsedZoneKind >> KqpNewEngine::Select1 >> TPersQueueTest::TxCounters [GOOD] >> KqpIndexLookupJoin::LeftJoinSkipNullFilter+StreamLookup [GOOD] >> KqpWorkloadServiceActors::TestCpuLoadActor [GOOD] >> KqpMergeCn::TopSortByDesc_Double_Limit3 |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable+StreamLookupJoin [GOOD] >> TConsoleTests::TestCreateTenantWrongName [GOOD] >> TConsoleTests::TestCreateTenantWrongNameExtSubdomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftJoinSkipNullFilter+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 8855, MsgBus: 8981 2025-03-31T16:14:39.327835Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488007816688781360:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:39.328081Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001980/r3tmp/tmp8pA5Fa/pdisk_1.dat 2025-03-31T16:14:39.776055Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:39.776157Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:39.778016Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:14:39.812253Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8855, node 1 2025-03-31T16:14:39.907809Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:39.907844Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:39.907855Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:39.907987Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8981 TClient is connected to server localhost:8981 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:14:40.499754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:40.516630Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:14:40.523562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:40.672239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:40.842834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:40.926525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:42.625494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007829573685029:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:42.625653Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:42.961669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:14:43.005074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:14:43.045152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:14:43.116053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:14:43.148725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:14:43.194019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:14:43.245582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007833868652839:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:43.245675Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:43.245992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007833868652844:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:43.249950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:14:43.263818Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007833868652846:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:14:43.318043Z node 1 :TX_PROXY ERROR: Actor# [1:7488007833868652899:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:14:44.274217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:14:44.327794Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488007816688781360:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:44.327877Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:14:44.352054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:14:44.389253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:14:44.468205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:14:44.497347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:14:44.529661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::TxCounters [GOOD] Test command err: === Server->StartServer(false); 2025-03-31T16:06:51.208467Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488005806589817585:2276];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:06:51.208510Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:06:51.351064Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488005805231038995:2138];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:06:51.351118Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000996/r3tmp/tmpPGZK6x/pdisk_1.dat 2025-03-31T16:06:51.840036Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:06:51.853068Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:06:52.222067Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:06:52.319416Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:06:52.322936Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:06:52.323529Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:06:52.327683Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:06:52.327756Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:06:52.332359Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:06:52.332506Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:06:52.334202Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6106, node 1 2025-03-31T16:06:52.647706Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/000996/r3tmp/yandexsRDhWg.tmp 2025-03-31T16:06:52.647739Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/000996/r3tmp/yandexsRDhWg.tmp 2025-03-31T16:06:52.647888Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/000996/r3tmp/yandexsRDhWg.tmp 2025-03-31T16:06:52.647999Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:06:52.751492Z INFO: TTestServer started on Port 8725 GrpcPort 6106 TClient is connected to server localhost:8725 PQClient connected to localhost:6106 === TenantModeEnabled() = 0 === Init PQ - start server on port 6106 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:06:53.318076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-31T16:06:53.318287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:06:53.318485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-31T16:06:53.318756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:06:53.318810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:06:53.324327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-31T16:06:53.324549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-31T16:06:53.324735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:06:53.324775Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-31T16:06:53.324809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-03-31T16:06:53.324820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-03-31T16:06:53.327267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:06:53.327316Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-03-31T16:06:53.327332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-03-31T16:06:53.331989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:06:53.332025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-03-31T16:06:53.332041Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:06:53.332320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:06:53.332346Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:06:53.332374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-31T16:06:53.332423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-03-31T16:06:53.337025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:06:53.345391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-03-31T16:06:53.345588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-03-31T16:06:53.354352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743437213392, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T16:06:53.354556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743437213392 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-31T16:06:53.354589Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-31T16:06:53.354890Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-03-31T16:06:53.354937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-31T16:06:53.355118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-31T16:06:53.355167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-03-31T16:06:53.357315Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-31T16:06:53.357352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-31T16:06:53.357507Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-31T16:06:53.357525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7488005810884785357:2403], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-03-31T16:06:53.357565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:06:53.357602Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-03-31T16:06:53.357740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-31T16:06:53.357763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-31T16:06:53.357780Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-31T16:06:53.357793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-31T16:06:53.357807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-03-31T16:06:53.357828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-31T16:06:53.357841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-03-31T16:06:53.357850Z node ... erId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-03-31T16:14:43.643272Z node 32 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-03-31T16:14:43.643305Z node 32 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [32:7488007834473252646:2501] (SourceId=123, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-03-31T16:14:43.643358Z node 32 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2025-03-31T16:14:43.646650Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 32, Generation: 1 2025-03-31T16:14:43.647203Z node 32 :PERSQUEUE INFO: new Cookie 123|868da6d-e9862bcc-87f8c89c-74a6d52d_0 generated for partition 0 topic 'topic' owner 123 2025-03-31T16:14:43.648048Z node 32 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: 123|868da6d-e9862bcc-87f8c89c-74a6d52d_0 2025-03-31T16:14:43.675953Z node 32 :PQ_READ_PROXY DEBUG: new Describe partition request 2025-03-31T16:14:43.678011Z node 32 :PQ_READ_PROXY DEBUG: TDescribePartitionActor for request path: "/Root/topic" include_location: true 2025-03-31T16:14:43.678758Z node 32 :PQ_READ_PROXY DEBUG: TDescribePartitionActor[32:7488007834473252656:2504]: Bootstrap 2025-03-31T16:14:43.684018Z node 32 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [32:7488007834473252656:2504]: Request location 2025-03-31T16:14:43.691677Z node 32 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][topic] pipe [32:7488007834473252666:2505] connected; active server actors: 1 2025-03-31T16:14:43.693205Z node 32 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][topic] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 32, Generation 1 2025-03-31T16:14:43.693300Z node 32 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [32:7488007834473252656:2504]: Got location 2025-03-31T16:14:43.694639Z node 32 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][topic] pipe [32:7488007834473252666:2505] disconnected; active server actors: 1 2025-03-31T16:14:43.694693Z node 32 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][topic] pipe [32:7488007834473252666:2505] disconnected no session 2025-03-31T16:14:43.704827Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: 123|868da6d-e9862bcc-87f8c89c-74a6d52d_0 grpc read done: success: 0 data: 2025-03-31T16:14:43.704868Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 123|868da6d-e9862bcc-87f8c89c-74a6d52d_0 grpc read failed 2025-03-31T16:14:43.704934Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 123|868da6d-e9862bcc-87f8c89c-74a6d52d_0 grpc closed 2025-03-31T16:14:43.704972Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 123|868da6d-e9862bcc-87f8c89c-74a6d52d_0 is DEAD 2025-03-31T16:14:43.706516Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-31T16:14:43.706751Z node 32 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-03-31T16:14:43.706772Z node 32 :PQ_WRITE_PROXY DEBUG: new session created cookie 3 2025-03-31T16:14:43.708037Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: grpc read done: success: 1 data: init_request { path: "topic" producer_id: "123" partition_with_generation { generation: 1 } } 2025-03-31T16:14:43.708201Z node 32 :PQ_WRITE_PROXY INFO: session request cookie: 3 path: "topic" producer_id: "123" partition_with_generation { generation: 1 } from ipv6:[::1]:45982 2025-03-31T16:14:43.708229Z node 32 :PQ_WRITE_PROXY INFO: write session: cookie=3 sessionId= userAgent="topic server" ip=ipv6:[::1]:45982 proto=topic topic=topic durationSec=0 2025-03-31T16:14:43.708245Z node 32 :PQ_WRITE_PROXY INFO: init check schema 2025-03-31T16:14:43.708292Z node 32 :PQ_WRITE_PROXY INFO: session to partition: 0, generation: 1 2025-03-31T16:14:43.709821Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: describe result for acl check 2025-03-31T16:14:43.710142Z node 32 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-03-31T16:14:43.710178Z node 32 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-03-31T16:14:43.710207Z node 32 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-03-31T16:14:43.710241Z node 32 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [32:7488007834473252674:2508] (SourceId=123, PreferedPartition=0) ReplyResult: Partition=0, SeqNo=0 2025-03-31T16:14:43.710269Z node 32 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 3 sessionId: partition: 0 expectedGeneration: 1 2025-03-31T16:14:43.711074Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 32, Generation: 1 2025-03-31T16:14:43.711209Z node 32 :PERSQUEUE INFO: new Cookie 123|b8bb57ac-bc49cef2-d003082f-5a1b7956_0 generated for partition 0 topic 'topic' owner 123 2025-03-31T16:14:43.711823Z node 32 :PQ_WRITE_PROXY INFO: session inited cookie: 3 partition: 0 MaxSeqNo: 0 sessionId: 123|b8bb57ac-bc49cef2-d003082f-5a1b7956_0 2025-03-31T16:14:43.718749Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: 123|b8bb57ac-bc49cef2-d003082f-5a1b7956_0 grpc read done: success: 1 data: write_request[data omitted] 2025-03-31T16:14:43.721316Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: 123|b8bb57ac-bc49cef2-d003082f-5a1b7956_0 grpc read done: success: 1 data: write_request[data omitted] 2025-03-31T16:14:43.722063Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: 123|b8bb57ac-bc49cef2-d003082f-5a1b7956_0 grpc read done: success: 1 data: write_request[data omitted] 2025-03-31T16:14:43.722126Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 32, Generation: 1 2025-03-31T16:14:43.722418Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: 123|b8bb57ac-bc49cef2-d003082f-5a1b7956_0 grpc read done: success: 1 data: write_request[data omitted] 2025-03-31T16:14:43.727349Z node 32 :PQ_WRITE_PROXY DEBUG: SessionId: ydb://session/3?node_id=32&id=YjMyMDhkYjYtYWU0ZmVjZmItYjhkOTVmM2QtYWZlOWQwNTA= TxId: 01jqpfqpq05p7k44exs982sdac WriteId: {32, 281474976710673} 2025-03-31T16:14:43.732684Z node 32 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: {0, {32, 281474976710673}, 100000}, State: StateInit] bootstrapping {0, {32, 281474976710673}, 100000} [32:7488007834473252687:2510] 2025-03-31T16:14:43.739281Z node 32 :PERSQUEUE INFO: [topic:{0, {32, 281474976710673}, 100000}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:14:43.739371Z node 32 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: {0, {32, 281474976710673}, 100000}, State: StateInit] init complete for topic 'topic' partition {0, {32, 281474976710673}, 100000} generation 1 [32:7488007834473252687:2510] 2025-03-31T16:14:43.739859Z node 32 :PERSQUEUE INFO: new Cookie 123|fb3869f-684a20cf-1e6e1fe9-84b94bbd_0 generated for partition {0, {32, 281474976710673}, 100000} topic 'topic' owner 123 2025-03-31T16:14:43.742905Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-03-31T16:14:43.742969Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-03-31T16:14:43.742994Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-03-31T16:14:43.743023Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-03-31T16:14:43.747782Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-03-31T16:14:43.812387Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-03-31T16:14:43.812454Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-03-31T16:14:43.812487Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-03-31T16:14:43.826073Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: 123|b8bb57ac-bc49cef2-d003082f-5a1b7956_0 grpc read done: success: 0 data: 2025-03-31T16:14:43.826119Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: 123|b8bb57ac-bc49cef2-d003082f-5a1b7956_0 grpc read failed 2025-03-31T16:14:43.826178Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: 123|b8bb57ac-bc49cef2-d003082f-5a1b7956_0 grpc closed 2025-03-31T16:14:43.826192Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: 123|b8bb57ac-bc49cef2-d003082f-5a1b7956_0 is DEAD 2025-03-31T16:14:43.827511Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-31T16:14:43.827572Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-31T16:14:43.862872Z node 32 :PERSQUEUE WARN: [PQ: 72075186224037892] Unknown transaction 0 Counters: ================================
name=api.grpc.topic.stream_write.bytes: 20796
name=api.grpc.topic.stream_write.messages: 4
name=topic.write.bytes: 20796
name=topic.write.discarded_bytes: 0
name=topic.write.discarded_messages: 0
name=topic.write.messages: 4
name=topic.write.uncompressed_bytes: 16
name=topic.write.lag_milliseconds:
    bin=100: 0
    bin=1000: 0
    bin=10000: 0
    bin=180000: 0
    bin=200: 0
    bin=2000: 3
    bin=30000: 0
    bin=500: 0
    bin=5000: 1
    bin=60000: 0
    bin=999999: 0
name=topic.write.message_size_bytes:
    bin=1024: 1
    bin=10240: 2
    bin=102400: 0
    bin=1048576: 0
    bin=10485760: 0
    bin=20480: 1
    bin=204800: 0
    bin=2097152: 0
    bin=5120: 0
    bin=51200: 0
    bin=524288: 0
    bin=5242880: 0
    bin=67108864: 0
    bin=99999999: 0
name=topic.write.partition_throttled_milliseconds:
    bin=0: 4
    bin=1: 0
    bin=10: 0
    bin=100: 0
    bin=1000: 0
    bin=10000: 0
    bin=20: 0
    bin=2500: 0
    bin=5: 0
    bin=50: 0
    bin=500: 0
    bin=5000: 0
    bin=999999: 0
>> ResourcePoolsDdl::TestDropResourcePool [GOOD] >> TSchemeShardTest::AlterIndexTableDirectly [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceActors::TestCpuLoadActor [GOOD] Test command err: 2025-03-31T16:14:04.815447Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488007664929798212:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:04.815584Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00116f/r3tmp/tmpL9LaC4/pdisk_1.dat 2025-03-31T16:14:05.234623Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:05.264441Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:05.264596Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:05.267864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29562, node 1 2025-03-31T16:14:05.451757Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:05.451786Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:05.451793Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:05.451920Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2440 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:14:05.949882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:07.746143Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-03-31T16:14:07.746182Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was disabled 2025-03-31T16:14:07.764382Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NjZhM2Y0YjUtNjU2OTZlOWEtNDdkNmI0OGMtNGZmYzU0YTU=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NjZhM2Y0YjUtNjU2OTZlOWEtNDdkNmI0OGMtNGZmYzU0YTU= 2025-03-31T16:14:07.764551Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NjZhM2Y0YjUtNjU2OTZlOWEtNDdkNmI0OGMtNGZmYzU0YTU=, ActorId: [1:7488007677814700733:2326], ActorState: unknown state, session actor bootstrapped 2025-03-31T16:14:07.789286Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OGI4NmIyMjgtMTlkNTUyNjQtZDE1MDE2YmMtYWJjOTMwZmQ=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id OGI4NmIyMjgtMTlkNTUyNjQtZDE1MDE2YmMtYWJjOTMwZmQ= 2025-03-31T16:14:07.789398Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OGI4NmIyMjgtMTlkNTUyNjQtZDE1MDE2YmMtYWJjOTMwZmQ=, ActorId: [1:7488007677814700735:2327], ActorState: unknown state, session actor bootstrapped 2025-03-31T16:14:07.790282Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OGI4NmIyMjgtMTlkNTUyNjQtZDE1MDE2YmMtYWJjOTMwZmQ=, ActorId: [1:7488007677814700735:2327], ActorState: ReadyState, TraceId: 01jqpfpkxe1ns12kmjvpkxs259, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7488007677814700734:2296] database: Root databaseId: /Root pool id: 2025-03-31T16:14:07.790432Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OGI4NmIyMjgtMTlkNTUyNjQtZDE1MDE2YmMtYWJjOTMwZmQ=, ActorId: [1:7488007677814700735:2327], ActorState: ExecuteState, TraceId: 01jqpfpkxe1ns12kmjvpkxs259, Sending CompileQuery request 2025-03-31T16:14:08.487650Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OGI4NmIyMjgtMTlkNTUyNjQtZDE1MDE2YmMtYWJjOTMwZmQ=, ActorId: [1:7488007677814700735:2327], ActorState: ExecuteState, TraceId: 01jqpfpkxe1ns12kmjvpkxs259, ExecutePhyTx, tx: 0x000050C00023E798 literal: 0 commit: 1 txCtx.DeferredEffects.size(): 0 2025-03-31T16:14:08.488292Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OGI4NmIyMjgtMTlkNTUyNjQtZDE1MDE2YmMtYWJjOTMwZmQ=, ActorId: [1:7488007677814700735:2327], ActorState: ExecuteState, TraceId: 01jqpfpkxe1ns12kmjvpkxs259, Sending to Executer TraceId: 0 8 2025-03-31T16:14:08.492093Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OGI4NmIyMjgtMTlkNTUyNjQtZDE1MDE2YmMtYWJjOTMwZmQ=, ActorId: [1:7488007677814700735:2327], ActorState: ExecuteState, TraceId: 01jqpfpkxe1ns12kmjvpkxs259, Created new KQP executer: [1:7488007682109668038:2327] isRollback: 0 2025-03-31T16:14:08.565014Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OGI4NmIyMjgtMTlkNTUyNjQtZDE1MDE2YmMtYWJjOTMwZmQ=, ActorId: [1:7488007677814700735:2327], ActorState: ExecuteState, TraceId: 01jqpfpkxe1ns12kmjvpkxs259, Forwarded TEvStreamData to [1:7488007677814700734:2296] 2025-03-31T16:14:08.573545Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OGI4NmIyMjgtMTlkNTUyNjQtZDE1MDE2YmMtYWJjOTMwZmQ=, ActorId: [1:7488007677814700735:2327], ActorState: ExecuteState, TraceId: 01jqpfpkxe1ns12kmjvpkxs259, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-03-31T16:14:08.574495Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=OGI4NmIyMjgtMTlkNTUyNjQtZDE1MDE2YmMtYWJjOTMwZmQ=, ActorId: [1:7488007677814700735:2327], ActorState: ExecuteState, TraceId: 01jqpfpkxe1ns12kmjvpkxs259, txInfo Status: Committed Kind: Pure TotalDuration: 88.047 ServerDuration: 87.94 QueriesCount: 2 2025-03-31T16:14:08.574576Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OGI4NmIyMjgtMTlkNTUyNjQtZDE1MDE2YmMtYWJjOTMwZmQ=, ActorId: [1:7488007677814700735:2327], ActorState: ExecuteState, TraceId: 01jqpfpkxe1ns12kmjvpkxs259, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-03-31T16:14:08.574775Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=OGI4NmIyMjgtMTlkNTUyNjQtZDE1MDE2YmMtYWJjOTMwZmQ=, ActorId: [1:7488007677814700735:2327], ActorState: ExecuteState, TraceId: 01jqpfpkxe1ns12kmjvpkxs259, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-31T16:14:08.574799Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OGI4NmIyMjgtMTlkNTUyNjQtZDE1MDE2YmMtYWJjOTMwZmQ=, ActorId: [1:7488007677814700735:2327], ActorState: ExecuteState, TraceId: 01jqpfpkxe1ns12kmjvpkxs259, EndCleanup, isFinal: 1 2025-03-31T16:14:08.574844Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OGI4NmIyMjgtMTlkNTUyNjQtZDE1MDE2YmMtYWJjOTMwZmQ=, ActorId: [1:7488007677814700735:2327], ActorState: ExecuteState, TraceId: 01jqpfpkxe1ns12kmjvpkxs259, Sent query response back to proxy, proxyRequestId: 3, proxyId: [1:7488007664929798460:2277] 2025-03-31T16:14:08.574866Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OGI4NmIyMjgtMTlkNTUyNjQtZDE1MDE2YmMtYWJjOTMwZmQ=, ActorId: [1:7488007677814700735:2327], ActorState: unknown state, TraceId: 01jqpfpkxe1ns12kmjvpkxs259, Cleanup temp tables: 0 2025-03-31T16:14:08.575191Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OGI4NmIyMjgtMTlkNTUyNjQtZDE1MDE2YmMtYWJjOTMwZmQ=, ActorId: [1:7488007677814700735:2327], ActorState: unknown state, TraceId: 01jqpfpkxe1ns12kmjvpkxs259, Session actor destroyed 2025-03-31T16:14:08.590480Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=NjZhM2Y0YjUtNjU2OTZlOWEtNDdkNmI0OGMtNGZmYzU0YTU=, ActorId: [1:7488007677814700733:2326], ActorState: ReadyState, Session closed due to explicit close event 2025-03-31T16:14:08.590538Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=NjZhM2Y0YjUtNjU2OTZlOWEtNDdkNmI0OGMtNGZmYzU0YTU=, ActorId: [1:7488007677814700733:2326], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-31T16:14:08.590575Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NjZhM2Y0YjUtNjU2OTZlOWEtNDdkNmI0OGMtNGZmYzU0YTU=, ActorId: [1:7488007677814700733:2326], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-03-31T16:14:08.590597Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NjZhM2Y0YjUtNjU2OTZlOWEtNDdkNmI0OGMtNGZmYzU0YTU=, ActorId: [1:7488007677814700733:2326], ActorState: unknown state, Cleanup temp tables: 0 2025-03-31T16:14:08.590664Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NjZhM2Y0YjUtNjU2OTZlOWEtNDdkNmI0OGMtNGZmYzU0YTU=, ActorId: [1:7488007677814700733:2326], ActorState: unknown state, Session actor destroyed 2025-03-31T16:14:09.761551Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488007687284913111:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:09.761616Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00116f/r3tmp/tmpQaNnqn/pdisk_1.dat 2025-03-31T16:14:09.923897Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:09.956339Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:09.956446Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:09.960497Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15052, node 2 2025-03-31T16:14:10.016052Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:10.016086Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:10.016094Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:10.016260Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18435 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVers ... :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TCpuLoadFetcherActor] RunDataQuery: -- TCpuLoadFetcherActor::OnRunQuery SELECT SUM(CpuThreads) AS ThreadsCount, SUM(CpuThreads * (1.0 - CpuIdle)) AS TotalLoad FROM `.sys/nodes`; 2025-03-31T16:14:42.826631Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-03-31T16:14:42.826688Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7488007828007486060:2361], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-03-31T16:14:42.826941Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTYwMmQ4MDAtYTIwNTA3NDAtOTBlZDIzYzctZmFiZTI4MzM=, ActorId: [8:7488007828007486057:2359], ActorState: ReadyState, TraceId: 01jqpfqp4achtnd2s1bngvpwbd, received request, proxyRequestId: 6 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: -- TCpuLoadFetcherActor::OnRunQuery SELECT SUM(CpuThreads) AS ThreadsCount, SUM(CpuThreads * (1.0 - CpuIdle)) AS TotalLoad FROM `.sys/nodes`; rpcActor: [8:7488007828007486058:2360] database: /Root databaseId: /Root pool id: default 2025-03-31T16:14:42.827068Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7488007828007486060:2361], DatabaseId: /Root, PoolId: default, Pool info successfully fetched 2025-03-31T16:14:42.827177Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [8:7488007828007486057:2359], DatabaseId: /Root, PoolId: default, SessionId: ydb://session/3?node_id=8&id=MTYwMmQ4MDAtYTIwNTA3NDAtOTBlZDIzYzctZmFiZTI4MzM= 2025-03-31T16:14:42.827261Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool default, DatabaseId: /Root 2025-03-31T16:14:42.827280Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [8:7488007828007486062:2362], DatabaseId: /Root, PoolId: default, SessionId: ydb://session/3?node_id=8&id=MTYwMmQ4MDAtYTIwNTA3NDAtOTBlZDIzYzctZmFiZTI4MzM=, Start pool fetching 2025-03-31T16:14:42.827311Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7488007828007486063:2363], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-03-31T16:14:42.827538Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7488007828007486063:2363], DatabaseId: /Root, PoolId: default, Pool info successfully fetched 2025-03-31T16:14:42.827602Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [8:7488007828007486062:2362], DatabaseId: /Root, PoolId: default, SessionId: ydb://session/3?node_id=8&id=MTYwMmQ4MDAtYTIwNTA3NDAtOTBlZDIzYzctZmFiZTI4MzM=, Pool info successfully resolved 2025-03-31T16:14:42.827651Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool default, DatabaseId: /Root, SessionId: ydb://session/3?node_id=8&id=MTYwMmQ4MDAtYTIwNTA3NDAtOTBlZDIzYzctZmFiZTI4MzM= 2025-03-31T16:14:42.827704Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7488007828007486011:2347], DatabaseId: /Root, PoolId: default, Received new request, worker id: [8:7488007828007486057:2359], session id: ydb://session/3?node_id=8&id=MTYwMmQ4MDAtYTIwNTA3NDAtOTBlZDIzYzctZmFiZTI4MzM= 2025-03-31T16:14:42.827745Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7488007828007486011:2347], DatabaseId: /Root, PoolId: default, Reply continue success to [8:7488007828007486057:2359], session id: ydb://session/3?node_id=8&id=MTYwMmQ4MDAtYTIwNTA3NDAtOTBlZDIzYzctZmFiZTI4MzM=, local in flight: 1 2025-03-31T16:14:42.827754Z node 8 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: default, SessionId: ydb://session/3?node_id=8&id=MTYwMmQ4MDAtYTIwNTA3NDAtOTBlZDIzYzctZmFiZTI4MzM= 2025-03-31T16:14:42.827796Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTYwMmQ4MDAtYTIwNTA3NDAtOTBlZDIzYzctZmFiZTI4MzM=, ActorId: [8:7488007828007486057:2359], ActorState: ExecuteState, TraceId: 01jqpfqp4achtnd2s1bngvpwbd, continue request, pool id: default 2025-03-31T16:14:42.828040Z node 8 :KQP_SESSION INFO: Scheme error, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], status: PathNotTable 2025-03-31T16:14:43.372029Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTYwMmQ4MDAtYTIwNTA3NDAtOTBlZDIzYzctZmFiZTI4MzM=, ActorId: [8:7488007828007486057:2359], ActorState: ExecuteState, TraceId: 01jqpfqp4achtnd2s1bngvpwbd, ExecutePhyTx, tx: 0x000050C00022C618 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2025-03-31T16:14:43.372100Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTYwMmQ4MDAtYTIwNTA3NDAtOTBlZDIzYzctZmFiZTI4MzM=, ActorId: [8:7488007828007486057:2359], ActorState: ExecuteState, TraceId: 01jqpfqp4achtnd2s1bngvpwbd, Sending to Executer TraceId: 0 8 2025-03-31T16:14:43.372201Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTYwMmQ4MDAtYTIwNTA3NDAtOTBlZDIzYzctZmFiZTI4MzM=, ActorId: [8:7488007828007486057:2359], ActorState: ExecuteState, TraceId: 01jqpfqp4achtnd2s1bngvpwbd, Created new KQP executer: [8:7488007832302453374:2359] isRollback: 0 2025-03-31T16:14:43.402661Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTYwMmQ4MDAtYTIwNTA3NDAtOTBlZDIzYzctZmFiZTI4MzM=, ActorId: [8:7488007828007486057:2359], ActorState: ExecuteState, TraceId: 01jqpfqp4achtnd2s1bngvpwbd, TEvTxResponse, CurrentTx: 1/2 response.status: SUCCESS 2025-03-31T16:14:43.402764Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTYwMmQ4MDAtYTIwNTA3NDAtOTBlZDIzYzctZmFiZTI4MzM=, ActorId: [8:7488007828007486057:2359], ActorState: ExecuteState, TraceId: 01jqpfqp4achtnd2s1bngvpwbd, ExecutePhyTx, tx: 0x000050C00022C558 literal: 1 commit: 1 txCtx.DeferredEffects.size(): 0 2025-03-31T16:14:43.403673Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTYwMmQ4MDAtYTIwNTA3NDAtOTBlZDIzYzctZmFiZTI4MzM=, ActorId: [8:7488007828007486057:2359], ActorState: ExecuteState, TraceId: 01jqpfqp4achtnd2s1bngvpwbd, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-03-31T16:14:43.403838Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=MTYwMmQ4MDAtYTIwNTA3NDAtOTBlZDIzYzctZmFiZTI4MzM=, ActorId: [8:7488007828007486057:2359], ActorState: ExecuteState, TraceId: 01jqpfqp4achtnd2s1bngvpwbd, txInfo Status: Committed Kind: ReadOnly TotalDuration: 31.904 ServerDuration: 31.834 QueriesCount: 2 2025-03-31T16:14:43.403956Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTYwMmQ4MDAtYTIwNTA3NDAtOTBlZDIzYzctZmFiZTI4MzM=, ActorId: [8:7488007828007486057:2359], ActorState: ExecuteState, TraceId: 01jqpfqp4achtnd2s1bngvpwbd, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-03-31T16:14:43.404035Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=MTYwMmQ4MDAtYTIwNTA3NDAtOTBlZDIzYzctZmFiZTI4MzM=, ActorId: [8:7488007828007486057:2359], ActorState: ExecuteState, TraceId: 01jqpfqp4achtnd2s1bngvpwbd, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 1 2025-03-31T16:14:43.404417Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7488007828007486011:2347], DatabaseId: /Root, PoolId: default, Received cleanup request, worker id: [8:7488007828007486057:2359], session id: ydb://session/3?node_id=8&id=MTYwMmQ4MDAtYTIwNTA3NDAtOTBlZDIzYzctZmFiZTI4MzM=, duration: 0.576854s, cpu consumed: 0.002106s 2025-03-31T16:14:43.404469Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7488007828007486011:2347], DatabaseId: /Root, PoolId: default, Reply cleanup success to [8:7488007828007486057:2359], session id: ydb://session/3?node_id=8&id=MTYwMmQ4MDAtYTIwNTA3NDAtOTBlZDIzYzctZmFiZTI4MzM=, local in flight: 0 2025-03-31T16:14:43.404531Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTYwMmQ4MDAtYTIwNTA3NDAtOTBlZDIzYzctZmFiZTI4MzM=, ActorId: [8:7488007828007486057:2359], ActorState: CleanupState, TraceId: 01jqpfqp4achtnd2s1bngvpwbd, EndCleanup, isFinal: 0 2025-03-31T16:14:43.404597Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTYwMmQ4MDAtYTIwNTA3NDAtOTBlZDIzYzctZmFiZTI4MzM=, ActorId: [8:7488007828007486057:2359], ActorState: CleanupState, TraceId: 01jqpfqp4achtnd2s1bngvpwbd, Sent query response back to proxy, proxyRequestId: 6, proxyId: [8:7488007802237681392:2279] 2025-03-31T16:14:43.404973Z node 8 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request finished in pool, DatabaseId: /Root, PoolId: default, Duration: 0.576854s, CpuConsumed: 0.002106s, AdjustCpuQuota: 0 2025-03-31T16:14:43.405117Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TCpuLoadFetcherActor] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=8&id=MTYwMmQ4MDAtYTIwNTA3NDAtOTBlZDIzYzctZmFiZTI4MzM=, TxId: 2025-03-31T16:14:43.405222Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TCpuLoadFetcherActor] Finish with SUCCESS, SessionId: ydb://session/3?node_id=8&id=MTYwMmQ4MDAtYTIwNTA3NDAtOTBlZDIzYzctZmFiZTI4MzM=, TxId: 2025-03-31T16:14:43.405512Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=MTYwMmQ4MDAtYTIwNTA3NDAtOTBlZDIzYzctZmFiZTI4MzM=, ActorId: [8:7488007828007486057:2359], ActorState: ReadyState, Session closed due to explicit close event 2025-03-31T16:14:43.405551Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=MTYwMmQ4MDAtYTIwNTA3NDAtOTBlZDIzYzctZmFiZTI4MzM=, ActorId: [8:7488007828007486057:2359], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-31T16:14:43.405582Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTYwMmQ4MDAtYTIwNTA3NDAtOTBlZDIzYzctZmFiZTI4MzM=, ActorId: [8:7488007828007486057:2359], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-03-31T16:14:43.405611Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTYwMmQ4MDAtYTIwNTA3NDAtOTBlZDIzYzctZmFiZTI4MzM=, ActorId: [8:7488007828007486057:2359], ActorState: unknown state, Cleanup temp tables: 0 2025-03-31T16:14:43.405680Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTYwMmQ4MDAtYTIwNTA3NDAtOTBlZDIzYzctZmFiZTI4MzM=, ActorId: [8:7488007828007486057:2359], ActorState: unknown state, Session actor destroyed 2025-03-31T16:14:43.452661Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=YTc5Mjk1MGMtNGRiZjM4NDQtZDQ0ZmQxYTAtNGYzZDdjODY=, ActorId: [8:7488007823712518539:2336], ActorState: ReadyState, Session closed due to explicit close event 2025-03-31T16:14:43.452717Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=YTc5Mjk1MGMtNGRiZjM4NDQtZDQ0ZmQxYTAtNGYzZDdjODY=, ActorId: [8:7488007823712518539:2336], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-31T16:14:43.452755Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YTc5Mjk1MGMtNGRiZjM4NDQtZDQ0ZmQxYTAtNGYzZDdjODY=, ActorId: [8:7488007823712518539:2336], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-03-31T16:14:43.452786Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YTc5Mjk1MGMtNGRiZjM4NDQtZDQ0ZmQxYTAtNGYzZDdjODY=, ActorId: [8:7488007823712518539:2336], ActorState: unknown state, Cleanup temp tables: 0 2025-03-31T16:14:43.452892Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YTc5Mjk1MGMtNGRiZjM4NDQtZDQ0ZmQxYTAtNGYzZDdjODY=, ActorId: [8:7488007823712518539:2336], ActorState: unknown state, Session actor destroyed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable+StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 27755, MsgBus: 15488 2025-03-31T16:14:40.729692Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488007822221377158:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:40.730737Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00197f/r3tmp/tmpMcVVH7/pdisk_1.dat 2025-03-31T16:14:41.157844Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:41.192759Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:41.192838Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 27755, node 1 2025-03-31T16:14:41.197892Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:14:41.243696Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:41.243729Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:41.243740Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:41.243888Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15488 TClient is connected to server localhost:15488 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:14:41.823119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:41.859710Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:41.875419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:42.031391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:42.181783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:14:42.270000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:44.008771Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007835106280824:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:44.008901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:44.345228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:14:44.379857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:14:44.414197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:14:44.449994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:14:44.491380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:14:44.536626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:14:44.588515Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007839401248630:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:44.588588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:44.589186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007839401248635:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:44.593686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:14:44.604379Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007839401248637:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:14:44.667594Z node 1 :TX_PROXY ERROR: Actor# [1:7488007839401248690:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:14:45.625785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:14:45.659844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:14:45.729873Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488007822221377158:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:45.729958Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpJoinOrder::CanonizedJoinOrderTPCH13 >> overlapping_portions.py::TestOverlappingPortions::test [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::AlterIndexTableDirectly [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:13:53.823831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:13:53.823927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:13:53.823981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:13:53.824028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:13:53.824087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:13:53.824113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:13:53.824176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:13:53.824310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:13:53.824642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:13:53.952891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:13:53.952951Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:13:53.957940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:13:53.958039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:13:53.958195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:13:53.964544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:13:53.964769Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:13:53.965437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:13:53.965657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:13:53.968846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:13:53.970434Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:13:53.970507Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:13:53.970720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:13:53.970770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:13:53.970837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:13:53.970991Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:13:53.980154Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:13:54.116488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:13:54.116737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.116930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:13:54.117206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:13:54.117271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.133562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:13:54.134063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:13:54.134284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.134338Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:13:54.134374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:13:54.134418Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:13:54.136505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.136576Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:13:54.136610Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:13:54.138394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.138443Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.138497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:13:54.138543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:13:54.149194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:13:54.151066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:13:54.151278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:13:54.155069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:13:54.155213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:13:54.155268Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:13:54.155967Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:13:54.156032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:13:54.156207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:13:54.156300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:13:54.165829Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:13:54.165869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:13:54.166005Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:13:54.166037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:13:54.166230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.166283Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:13:54.166372Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:13:54.166398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:13:54.166426Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:13:54.166448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:13:54.166482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:13:54.166509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:13:54.166537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:13:54.166560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:13:54.166603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:13:54.166630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:13:54.166655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:13:54.168374Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:13:54.168479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:13:54.168515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... itioningPolicy { SizeToSplit: 100500 MinPartitionsCount: 1 FastSplitSettings { SizeThreshold: 100500 RowCountThreshold: 100500 } } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1592 DataSize: 1592 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:14:47.471398Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/table/indexByValue" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:14:47.471975Z node 16 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/table/indexByValue" took 876us result status StatusSuccess 2025-03-31T16:14:47.473353Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/table/indexByValue" PathDescription { Self { Name: "indexByValue" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 3 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1592 DataSize: 1592 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "indexByValue" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 3 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 100500 MinPartitionsCount: 1 FastSplitSettings { SizeThreshold: 100500 RowCountThreshold: 100500 } } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:14:47.475970Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/table/indexByValue/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:14:47.476520Z node 16 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/table/indexByValue/indexImplTable" took 579us result status StatusSuccess 2025-03-31T16:14:47.477776Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/table/indexByValue/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 3 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 100500 MinPartitionsCount: 1 FastSplitSettings { SizeThreshold: 100500 RowCountThreshold: 100500 } } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1592 DataSize: 1592 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolsDdl::TestDropResourcePool [GOOD] Test command err: 2025-03-31T16:14:05.843051Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488007671118186265:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:05.843217Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001160/r3tmp/tmp3yEsVA/pdisk_1.dat 2025-03-31T16:14:06.207968Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16219, node 1 2025-03-31T16:14:06.275387Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:06.280908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:06.288148Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:14:06.307709Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:06.307734Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:06.307764Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:06.307887Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63545 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:14:06.627527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:08.909489Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-03-31T16:14:08.909738Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-03-31T16:14:08.909859Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-03-31T16:14:08.913142Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjFiYTJhNWItMmU3YjExNjMtZDJkNjY5NzctZGU4NTY2M2I=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZjFiYTJhNWItMmU3YjExNjMtZDJkNjY5NzctZGU4NTY2M2I= 2025-03-31T16:14:08.913707Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-03-31T16:14:08.922814Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488007684003088798:2329], Start check tables existence, number paths: 2 2025-03-31T16:14:08.923017Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjFiYTJhNWItMmU3YjExNjMtZDJkNjY5NzctZGU4NTY2M2I=, ActorId: [1:7488007684003088799:2330], ActorState: unknown state, session actor bootstrapped 2025-03-31T16:14:08.927285Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488007684003088798:2329], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-03-31T16:14:08.927371Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488007684003088798:2329], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-03-31T16:14:08.927398Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488007684003088798:2329], Successfully finished 2025-03-31T16:14:08.927457Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-03-31T16:14:08.939846Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007684003088816:2301], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-03-31T16:14:08.944987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:14:08.946284Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007684003088816:2301], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-03-31T16:14:08.946474Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007684003088816:2301], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-03-31T16:14:08.956752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007684003088816:2301], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:14:09.055496Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007684003088816:2301], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-03-31T16:14:09.059874Z node 1 :TX_PROXY ERROR: Actor# [1:7488007688298056163:2333] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:14:09.060026Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007684003088816:2301], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-03-31T16:14:09.070684Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-03-31T16:14:09.070718Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-03-31T16:14:09.070782Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007688298056172:2332], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-03-31T16:14:09.071021Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjFiYTJhNWItMmU3YjExNjMtZDJkNjY5NzctZGU4NTY2M2I=, ActorId: [1:7488007684003088799:2330], ActorState: ReadyState, TraceId: 01jqpfpn5e1td4f4fvpe6mf4tr, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: CREATE RESOURCE POOL my_pool WITH ( CONCURRENT_QUERY_LIMIT=1, QUEUE_SIZE=0 ); rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2025-03-31T16:14:09.075794Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007688298056172:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:09.075959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:09.346742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:14:09.354279Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=ZjFiYTJhNWItMmU3YjExNjMtZDJkNjY5NzctZGU4NTY2M2I=, ActorId: [1:7488007684003088799:2330], ActorState: ExecuteState, TraceId: 01jqpfpn5e1td4f4fvpe6mf4tr, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [1:7488007688298056181:2330] WorkloadServiceCleanup: 0 2025-03-31T16:14:09.355889Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjFiYTJhNWItMmU3YjExNjMtZDJkNjY5NzctZGU4NTY2M2I=, ActorId: [1:7488007684003088799:2330], ActorState: CleanupState, TraceId: 01jqpfpn5e1td4f4fvpe6mf4tr, EndCleanup, isFinal: 0 2025-03-31T16:14:09.355989Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjFiYTJhNWItMmU3YjExNjMtZDJkNjY5NzctZGU4NTY2M2I=, ActorId: [1:7488007684003088799:2330], ActorState: CleanupState, TraceId: 01jqpfpn5e1td4f4fvpe6mf4tr, Sent query response back to proxy, proxyRequestId: 3, proxyId: [1:7488007671118186513:2277] 2025-03-31T16:14:09.363413Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OWVhODgyYzEtNmFiNjEwY2MtNmU4NWQ0ZDItZGZmNzc2MTU=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id OWVhODgyYzEtNmFiNjEwY2MtNmU4NWQ0ZDItZGZmNzc2MTU= 2025-03-31T16:14:09.363691Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OWVhODgyYzEtNmFiNjEwY2MtNmU4NWQ0ZDItZGZmNzc2MTU=, ActorId: [1:7488007688298056203:2333], ActorState: unknown state, session actor bootstrapped 2025-03-31T16:14:09.363797Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OWVhODgyYzEtNmFiNjEwY2MtNmU4NWQ0ZDItZGZmNzc2MTU=, ActorId: [1:7488007688298056203:2333], ActorState: ReadyState, TraceId: 01jqpfpnek4vsd15k5srv2h1j7, received request, proxyRequestId: 4 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7488007688298056202:2360] database: Root databaseId: /Root pool id: my_pool 2025-03-31T16:14:09.363833Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: my_pool 2025-03-31T16:14:09.363909Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7488007688298056203:2333], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=1&id=OWVhODgyYzEtNmFiNjEwY2MtNmU4NWQ0ZDItZGZmNzc2MTU= 2025-03-31T16:14:09.363946Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007688298056205:2334], DatabaseId: /Root, PoolId: my_pool, Start pool fetching 2025-03-31T16:14:09.364020Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7488007688298056206:2335], Database: /Root, Start database fetching 2025-03-31T16:14:09.364401Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7488007688298056206:2335], Database: /Root, Database info successfully fetched, serverless: 0 2025-03-31T16:14:09.364492Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2025-03-31T16:14:09.364549Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7488007688298056215:2336], DatabaseId: /Root, PoolId: my_pool, SessionId: ... sCount: 2 2025-03-31T16:14:46.821945Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NjdiMmY5ZGMtOWYzNjY4OTYtOWUwY2MxNjgtZjA5Nzg2OTU=, ActorId: [7:7488007844909112597:2444], ActorState: ExecuteState, TraceId: 01jqpfqt0vfj4e2rjrr3yg9swt, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-03-31T16:14:46.822001Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=NjdiMmY5ZGMtOWYzNjY4OTYtOWUwY2MxNjgtZjA5Nzg2OTU=, ActorId: [7:7488007844909112597:2444], ActorState: ExecuteState, TraceId: 01jqpfqt0vfj4e2rjrr3yg9swt, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-31T16:14:46.822030Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NjdiMmY5ZGMtOWYzNjY4OTYtOWUwY2MxNjgtZjA5Nzg2OTU=, ActorId: [7:7488007844909112597:2444], ActorState: ExecuteState, TraceId: 01jqpfqt0vfj4e2rjrr3yg9swt, EndCleanup, isFinal: 0 2025-03-31T16:14:46.822070Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NjdiMmY5ZGMtOWYzNjY4OTYtOWUwY2MxNjgtZjA5Nzg2OTU=, ActorId: [7:7488007844909112597:2444], ActorState: ExecuteState, TraceId: 01jqpfqt0vfj4e2rjrr3yg9swt, Sent query response back to proxy, proxyRequestId: 18, proxyId: [7:7488007810549373035:2081] 2025-03-31T16:14:46.822448Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: my_pool, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=7&id=NjdiMmY5ZGMtOWYzNjY4OTYtOWUwY2MxNjgtZjA5Nzg2OTU=, TxId: 2025-03-31T16:14:46.822538Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: my_pool, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Finish with SUCCESS, SessionId: ydb://session/3?node_id=7&id=NjdiMmY5ZGMtOWYzNjY4OTYtOWUwY2MxNjgtZjA5Nzg2OTU=, TxId: 2025-03-31T16:14:46.822653Z node 7 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolHandlerActorBase] ActorId: [7:7488007832024210243:2338], DatabaseId: /Root, PoolId: my_pool, succefully refreshed pool state, in flight: 0, delayed: 0 2025-03-31T16:14:46.822723Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=NjdiMmY5ZGMtOWYzNjY4OTYtOWUwY2MxNjgtZjA5Nzg2OTU=, ActorId: [7:7488007844909112597:2444], ActorState: ReadyState, Session closed due to explicit close event 2025-03-31T16:14:46.822750Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=NjdiMmY5ZGMtOWYzNjY4OTYtOWUwY2MxNjgtZjA5Nzg2OTU=, ActorId: [7:7488007844909112597:2444], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-31T16:14:46.822773Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NjdiMmY5ZGMtOWYzNjY4OTYtOWUwY2MxNjgtZjA5Nzg2OTU=, ActorId: [7:7488007844909112597:2444], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-03-31T16:14:46.822796Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NjdiMmY5ZGMtOWYzNjY4OTYtOWUwY2MxNjgtZjA5Nzg2OTU=, ActorId: [7:7488007844909112597:2444], ActorState: unknown state, Cleanup temp tables: 0 2025-03-31T16:14:46.822864Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NjdiMmY5ZGMtOWYzNjY4OTYtOWUwY2MxNjgtZjA5Nzg2OTU=, ActorId: [7:7488007844909112597:2444], ActorState: unknown state, Session actor destroyed 2025-03-31T16:14:46.825814Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=ODkzNjNkZjQtZjA0YjU5NGQtYjE1NDM1ZmYtNmRlZDFjYTU=, ActorId: [7:7488007827729242840:2331], ActorState: ExecuteState, TraceId: 01jqpfqt017650vvn0jgdmgwvf, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [7:7488007844909112605:2331] WorkloadServiceCleanup: 0 2025-03-31T16:14:46.883405Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ODkzNjNkZjQtZjA0YjU5NGQtYjE1NDM1ZmYtNmRlZDFjYTU=, ActorId: [7:7488007827729242840:2331], ActorState: CleanupState, TraceId: 01jqpfqt017650vvn0jgdmgwvf, EndCleanup, isFinal: 0 2025-03-31T16:14:46.883480Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ODkzNjNkZjQtZjA0YjU5NGQtYjE1NDM1ZmYtNmRlZDFjYTU=, ActorId: [7:7488007827729242840:2331], ActorState: CleanupState, TraceId: 01jqpfqt017650vvn0jgdmgwvf, Sent query response back to proxy, proxyRequestId: 16, proxyId: [7:7488007810549373035:2081] 2025-03-31T16:14:46.894434Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ZWUxZjk2MTgtOGRkNmI1OGQtNmRjNzExNzgtNmY1MmExOA==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZWUxZjk2MTgtOGRkNmI1OGQtNmRjNzExNzgtNmY1MmExOA== 2025-03-31T16:14:46.894767Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ZWUxZjk2MTgtOGRkNmI1OGQtNmRjNzExNzgtNmY1MmExOA==, ActorId: [7:7488007844909112673:2459], ActorState: unknown state, session actor bootstrapped 2025-03-31T16:14:46.894910Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ZWUxZjk2MTgtOGRkNmI1OGQtNmRjNzExNzgtNmY1MmExOA==, ActorId: [7:7488007844909112673:2459], ActorState: ReadyState, TraceId: 01jqpfqt3e1yp833243c5tzpb0, received request, proxyRequestId: 19 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [7:7488007844909112672:2649] database: Root databaseId: /Root pool id: my_pool 2025-03-31T16:14:46.894938Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: my_pool 2025-03-31T16:14:46.894979Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [7:7488007844909112673:2459], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=7&id=ZWUxZjk2MTgtOGRkNmI1OGQtNmRjNzExNzgtNmY1MmExOA== 2025-03-31T16:14:46.895021Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488007844909112675:2460], DatabaseId: /Root, PoolId: my_pool, Start pool fetching 2025-03-31T16:14:46.895143Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [7:7488007844909112676:2461], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=7&id=ZWUxZjk2MTgtOGRkNmI1OGQtNmRjNzExNzgtNmY1MmExOA==, Start pool fetching 2025-03-31T16:14:46.895167Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488007844909112677:2462], DatabaseId: /Root, PoolId: my_pool, Start pool fetching 2025-03-31T16:14:46.895858Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488007844909112677:2462], DatabaseId: /Root, PoolId: my_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-03-31T16:14:46.895964Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488007844909112675:2460], DatabaseId: /Root, PoolId: my_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-03-31T16:14:46.896082Z node 7 :KQP_WORKLOAD_SERVICE ERROR: [WorkloadService] [TPoolResolverActor] ActorId: [7:7488007844909112676:2461], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=7&id=ZWUxZjk2MTgtOGRkNmI1OGQtNmRjNzExNzgtNmY1MmExOA==, Failed to fetch pool info NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-03-31T16:14:46.896200Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolResolverActor] ActorId: [7:7488007844909112676:2461], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=7&id=ZWUxZjk2MTgtOGRkNmI1OGQtNmRjNzExNzgtNmY1MmExOA==, Failed to resolve pool, NOT_FOUND, issues: {
: Error: Failed to resolve pool id my_pool subissue: {
: Error: Resource pool my_pool not found or you don't have access permissions } } 2025-03-31T16:14:46.896270Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool my_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-03-31T16:14:46.896388Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply continue error NOT_FOUND to [7:7488007844909112673:2459]: {
: Error: Failed to resolve pool id my_pool subissue: {
: Error: Resource pool my_pool not found or you don't have access permissions } } 2025-03-31T16:14:46.896507Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=ZWUxZjk2MTgtOGRkNmI1OGQtNmRjNzExNzgtNmY1MmExOA==, ActorId: [7:7488007844909112673:2459], ActorState: ExecuteState, TraceId: 01jqpfqt3e1yp833243c5tzpb0, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool 2025-03-31T16:14:46.896650Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=ZWUxZjk2MTgtOGRkNmI1OGQtNmRjNzExNzgtNmY1MmExOA==, ActorId: [7:7488007844909112673:2459], ActorState: ExecuteState, TraceId: 01jqpfqt3e1yp833243c5tzpb0, Cleanup start, isFinal: 1 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 1 2025-03-31T16:14:46.896787Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Finished request with worker actor [7:7488007844909112673:2459], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=7&id=ZWUxZjk2MTgtOGRkNmI1OGQtNmRjNzExNzgtNmY1MmExOA== 2025-03-31T16:14:46.896856Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ZWUxZjk2MTgtOGRkNmI1OGQtNmRjNzExNzgtNmY1MmExOA==, ActorId: [7:7488007844909112673:2459], ActorState: CleanupState, TraceId: 01jqpfqt3e1yp833243c5tzpb0, EndCleanup, isFinal: 1 2025-03-31T16:14:46.896970Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ZWUxZjk2MTgtOGRkNmI1OGQtNmRjNzExNzgtNmY1MmExOA==, ActorId: [7:7488007844909112673:2459], ActorState: CleanupState, TraceId: 01jqpfqt3e1yp833243c5tzpb0, Sent query response back to proxy, proxyRequestId: 19, proxyId: [7:7488007810549373035:2081] 2025-03-31T16:14:46.897001Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ZWUxZjk2MTgtOGRkNmI1OGQtNmRjNzExNzgtNmY1MmExOA==, ActorId: [7:7488007844909112673:2459], ActorState: unknown state, TraceId: 01jqpfqt3e1yp833243c5tzpb0, Cleanup temp tables: 0 2025-03-31T16:14:46.897122Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ZWUxZjk2MTgtOGRkNmI1OGQtNmRjNzExNzgtNmY1MmExOA==, ActorId: [7:7488007844909112673:2459], ActorState: unknown state, TraceId: 01jqpfqt3e1yp833243c5tzpb0, Session actor destroyed 2025-03-31T16:14:46.907219Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=ODkzNjNkZjQtZjA0YjU5NGQtYjE1NDM1ZmYtNmRlZDFjYTU=, ActorId: [7:7488007827729242840:2331], ActorState: ReadyState, Session closed due to explicit close event 2025-03-31T16:14:46.907271Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=ODkzNjNkZjQtZjA0YjU5NGQtYjE1NDM1ZmYtNmRlZDFjYTU=, ActorId: [7:7488007827729242840:2331], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-31T16:14:46.907298Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ODkzNjNkZjQtZjA0YjU5NGQtYjE1NDM1ZmYtNmRlZDFjYTU=, ActorId: [7:7488007827729242840:2331], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-03-31T16:14:46.907326Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ODkzNjNkZjQtZjA0YjU5NGQtYjE1NDM1ZmYtNmRlZDFjYTU=, ActorId: [7:7488007827729242840:2331], ActorState: unknown state, Cleanup temp tables: 0 2025-03-31T16:14:46.907400Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ODkzNjNkZjQtZjA0YjU5NGQtYjE1NDM1ZmYtNmRlZDFjYTU=, ActorId: [7:7488007827729242840:2331], ActorState: unknown state, Session actor destroyed |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] [GOOD] >> TConsoleTests::TestModifyUsedZoneKind [GOOD] >> TConsoleTests::TestMergeConfig >> KqpIndexLookupJoin::InnerJoinCustomColumnOrder+StreamLookup >> KqpNewEngine::Select1 [GOOD] >> KqpNewEngine::Replace >> TConsoleTests::TestCreateTenantWrongNameExtSubdomain [GOOD] >> TConsoleTests::TestCreateTenantWrongPool >> KqpJoin::LeftJoinWithNull-StreamLookupJoin [GOOD] >> ResourcePoolsSysView::TestResourcePoolsSysViewFilters [GOOD] >> KqpIndexLookupJoin::CheckCastInt32ToInt16-StreamLookupJoin+NotNull [GOOD] >> KqpWorkloadServiceTables::TestCleanupOnServiceRestart [GOOD] >> KqpWorkloadServiceTables::TestLeaseExpiration >> test_ttl.py::TestTTLOnIndexedTable::test_case [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::LeftJoinWithNull-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 16999, MsgBus: 5892 2025-03-31T16:14:43.088563Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488007831999855307:2128];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:43.088620Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00197d/r3tmp/tmpEYsX8h/pdisk_1.dat 2025-03-31T16:14:43.487629Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:43.531677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:43.531769Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:43.535535Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16999, node 1 2025-03-31T16:14:43.618740Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:43.618761Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:43.618768Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:43.618892Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5892 TClient is connected to server localhost:5892 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:14:44.134007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:44.163837Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:14:44.182084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:44.369158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:44.575103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:44.660108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:46.492954Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007844884758909:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:46.493061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:46.800224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:14:46.871269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:14:46.902331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:14:46.933519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:14:47.017188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:14:47.062559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:14:47.142570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007849179726728:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:47.142640Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:47.142996Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007849179726733:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:47.146540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:14:47.156518Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007849179726735:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:14:47.215810Z node 1 :TX_PROXY ERROR: Actor# [1:7488007849179726788:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:14:48.089062Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488007831999855307:2128];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:48.089123Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:14:48.135187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:14:48.164348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:14:48.191959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolsSysView::TestResourcePoolsSysViewFilters [GOOD] Test command err: 2025-03-31T16:14:04.815434Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488007667267981987:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:04.815620Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00116d/r3tmp/tmp19dkKF/pdisk_1.dat 2025-03-31T16:14:05.221003Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:05.262851Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:05.263007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:05.267891Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26708, node 1 2025-03-31T16:14:05.451754Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:05.451778Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:05.451792Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:05.451915Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2762 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:14:05.947326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:07.867529Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-03-31T16:14:07.867641Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488007680152884514:2328], Start check tables existence, number paths: 2 2025-03-31T16:14:07.868041Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-03-31T16:14:07.868066Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-03-31T16:14:07.868127Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-03-31T16:14:07.872231Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YmVjNjUxYzEtODBmYjgzYWItNTBmZmI0NTYtYWFmZTcwNWY=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id YmVjNjUxYzEtODBmYjgzYWItNTBmZmI0NTYtYWFmZTcwNWY= 2025-03-31T16:14:07.879729Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488007680152884514:2328], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-03-31T16:14:07.879858Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488007680152884514:2328], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-03-31T16:14:07.879906Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488007680152884514:2328], Successfully finished 2025-03-31T16:14:07.881321Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-03-31T16:14:07.881409Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YmVjNjUxYzEtODBmYjgzYWItNTBmZmI0NTYtYWFmZTcwNWY=, ActorId: [1:7488007680152884532:2330], ActorState: unknown state, session actor bootstrapped 2025-03-31T16:14:07.884150Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007680152884534:2303], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-03-31T16:14:07.890298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:14:07.892816Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007680152884534:2303], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-03-31T16:14:07.895228Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007680152884534:2303], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-03-31T16:14:07.905832Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007680152884534:2303], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:14:07.961186Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007680152884534:2303], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-03-31T16:14:07.968200Z node 1 :TX_PROXY ERROR: Actor# [1:7488007680152884585:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:14:07.968304Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007680152884534:2303], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-03-31T16:14:07.970762Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NzdjMmU0ODAtOTA2ZTdkM2QtNTY5ZDdhZjYtYTNiZDg5ODY=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NzdjMmU0ODAtOTA2ZTdkM2QtNTY5ZDdhZjYtYTNiZDg5ODY= 2025-03-31T16:14:07.971075Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-03-31T16:14:07.971110Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-03-31T16:14:07.971158Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NzdjMmU0ODAtOTA2ZTdkM2QtNTY5ZDdhZjYtYTNiZDg5ODY=, ActorId: [1:7488007680152884592:2331], ActorState: unknown state, session actor bootstrapped 2025-03-31T16:14:07.971187Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007680152884594:2332], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-03-31T16:14:07.971370Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NzdjMmU0ODAtOTA2ZTdkM2QtNTY5ZDdhZjYtYTNiZDg5ODY=, ActorId: [1:7488007680152884592:2331], ActorState: ReadyState, TraceId: 01jqpfpm33e073km9375gh6swf, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7488007680152884591:2340] database: Root databaseId: /Root pool id: sample_pool_id 2025-03-31T16:14:07.971574Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7488007680152884592:2331], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NzdjMmU0ODAtOTA2ZTdkM2QtNTY5ZDdhZjYtYTNiZDg5ODY= 2025-03-31T16:14:07.971674Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7488007680152884600:2333], Database: /Root, Start database fetching 2025-03-31T16:14:07.971920Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7488007680152884600:2333], Database: /Root, Database info successfully fetched, serverless: 0 2025-03-31T16:14:07.971981Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2025-03-31T16:14:07.972074Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7488007680152884604:2334], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NzdjMmU0ODAtOTA2ZTdkM2QtNTY5ZDdhZjYtYTNiZDg5ODY=, Start pool fetching 2025-03-31T16:14:07.972129Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007680152884605:2335], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-03-31T16:14:07.972360Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007680152884594:2332], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-03-31T16:14:07.972410Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2025-03-31T16:14:07.972421Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007680152884605:2335], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-03-31T16:14:07.972427Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-03-31T16:14:07.972524Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7488007680152884604:2334], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NzdjMmU0ODAtOTA2ZTdkM2QtNTY5ZDdhZjYtYTNiZDg5ODY=, Pool info successfully resolved 2025-03-31T16:14:07.972607Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzdjMmU0ODAtOTA2ZTdkM2QtNTY5ZDdhZjYtYTNiZDg5ODY= 2025-03-31T16:14:07.973432Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7488007680152884608:2336], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2025-03-31T16:14:07.973571Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7488007680152884608:2336], DatabaseId: /Root, PoolId: sample_pool_id, Received new request, worker id: [1:7488007680152884592:2331], session id: ydb://session/3?node_id=1&id=NzdjMmU0ODAtOTA2ZTdkM2QtNTY5ZDdhZjYtYTNiZDg5ODY= 2025-03-31T16:14:07.973583Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NzdjMmU0ODAtOTA2ZTdkM2QtNTY5ZDdhZjYtYTNiZDg5ODY= 2025-03-31T16:14:07.973641Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7488007680152884608:2336], DatabaseId: /Root, PoolId: sample_pool_id, Reply continue success to [1:7488007680152884592:2331] ... sionId: ydb://session/3?node_id=8&id=NTk4OGRlYjAtMTI2YTE3NGYtODlhNDY0NzgtNjRhOWRiMjM=, ActorId: [8:7488007853780138741:2405], ActorState: ReadyState, TraceId: 01jqpfqvkf3c85g5c635mdakk4, received request, proxyRequestId: 5 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT * FROM `.sys/resource_pools` WHERE "a" < Name AND Name < "c" rpcActor: [7:7488007853520992910:3355] database: /Root/test-dedicated databaseId: /Root/test-dedicated pool id: default 2025-03-31T16:14:48.431423Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NTk4OGRlYjAtMTI2YTE3NGYtODlhNDY0NzgtNjRhOWRiMjM=, ActorId: [8:7488007853780138741:2405], ActorState: ReadyState, TraceId: 01jqpfqvkf3c85g5c635mdakk4, request placed into pool from cache: default 2025-03-31T16:14:48.431523Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NTk4OGRlYjAtMTI2YTE3NGYtODlhNDY0NzgtNjRhOWRiMjM=, ActorId: [8:7488007853780138741:2405], ActorState: ExecuteState, TraceId: 01jqpfqvkf3c85g5c635mdakk4, Sending CompileQuery request 2025-03-31T16:14:48.590623Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NTk4OGRlYjAtMTI2YTE3NGYtODlhNDY0NzgtNjRhOWRiMjM=, ActorId: [8:7488007853780138741:2405], ActorState: ExecuteState, TraceId: 01jqpfqvkf3c85g5c635mdakk4, ExecutePhyTx, tx: 0x000050C0000D5AD8 literal: 0 commit: 1 txCtx.DeferredEffects.size(): 0 2025-03-31T16:14:48.590682Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NTk4OGRlYjAtMTI2YTE3NGYtODlhNDY0NzgtNjRhOWRiMjM=, ActorId: [8:7488007853780138741:2405], ActorState: ExecuteState, TraceId: 01jqpfqvkf3c85g5c635mdakk4, Sending to Executer TraceId: 0 8 2025-03-31T16:14:48.590782Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NTk4OGRlYjAtMTI2YTE3NGYtODlhNDY0NzgtNjRhOWRiMjM=, ActorId: [8:7488007853780138741:2405], ActorState: ExecuteState, TraceId: 01jqpfqvkf3c85g5c635mdakk4, Created new KQP executer: [8:7488007853780138751:2405] isRollback: 0 2025-03-31T16:14:48.648092Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NTk4OGRlYjAtMTI2YTE3NGYtODlhNDY0NzgtNjRhOWRiMjM=, ActorId: [8:7488007853780138741:2405], ActorState: ExecuteState, TraceId: 01jqpfqvkf3c85g5c635mdakk4, Forwarded TEvStreamData to [7:7488007853520992910:3355] 2025-03-31T16:14:48.650238Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NTk4OGRlYjAtMTI2YTE3NGYtODlhNDY0NzgtNjRhOWRiMjM=, ActorId: [8:7488007853780138741:2405], ActorState: ExecuteState, TraceId: 01jqpfqvkf3c85g5c635mdakk4, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-03-31T16:14:48.650621Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=NTk4OGRlYjAtMTI2YTE3NGYtODlhNDY0NzgtNjRhOWRiMjM=, ActorId: [8:7488007853780138741:2405], ActorState: ExecuteState, TraceId: 01jqpfqvkf3c85g5c635mdakk4, txInfo Status: Committed Kind: ReadOnly TotalDuration: 59.939 ServerDuration: 59.85 QueriesCount: 2 2025-03-31T16:14:48.650702Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NTk4OGRlYjAtMTI2YTE3NGYtODlhNDY0NzgtNjRhOWRiMjM=, ActorId: [8:7488007853780138741:2405], ActorState: ExecuteState, TraceId: 01jqpfqvkf3c85g5c635mdakk4, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-03-31T16:14:48.651179Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=NTk4OGRlYjAtMTI2YTE3NGYtODlhNDY0NzgtNjRhOWRiMjM=, ActorId: [8:7488007853780138741:2405], ActorState: ExecuteState, TraceId: 01jqpfqvkf3c85g5c635mdakk4, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-31T16:14:48.651220Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NTk4OGRlYjAtMTI2YTE3NGYtODlhNDY0NzgtNjRhOWRiMjM=, ActorId: [8:7488007853780138741:2405], ActorState: ExecuteState, TraceId: 01jqpfqvkf3c85g5c635mdakk4, EndCleanup, isFinal: 1 2025-03-31T16:14:48.651279Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NTk4OGRlYjAtMTI2YTE3NGYtODlhNDY0NzgtNjRhOWRiMjM=, ActorId: [8:7488007853780138741:2405], ActorState: ExecuteState, TraceId: 01jqpfqvkf3c85g5c635mdakk4, Sent query response back to proxy, proxyRequestId: 5, proxyId: [8:7488007832305301178:2274] 2025-03-31T16:14:48.651301Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NTk4OGRlYjAtMTI2YTE3NGYtODlhNDY0NzgtNjRhOWRiMjM=, ActorId: [8:7488007853780138741:2405], ActorState: unknown state, TraceId: 01jqpfqvkf3c85g5c635mdakk4, Cleanup temp tables: 0 2025-03-31T16:14:48.651907Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NTk4OGRlYjAtMTI2YTE3NGYtODlhNDY0NzgtNjRhOWRiMjM=, ActorId: [8:7488007853780138741:2405], ActorState: unknown state, TraceId: 01jqpfqvkf3c85g5c635mdakk4, Session actor destroyed 2025-03-31T16:14:48.658880Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=N2M4NjFjOGQtMTY0ZTBkNzktNWEzYmNjZmQtZTUyYjNkYjM=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id N2M4NjFjOGQtMTY0ZTBkNzktNWEzYmNjZmQtZTUyYjNkYjM= 2025-03-31T16:14:48.659645Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=N2M4NjFjOGQtMTY0ZTBkNzktNWEzYmNjZmQtZTUyYjNkYjM=, ActorId: [8:7488007853780138765:2413], ActorState: unknown state, session actor bootstrapped 2025-03-31T16:14:48.659806Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=N2M4NjFjOGQtMTY0ZTBkNzktNWEzYmNjZmQtZTUyYjNkYjM=, ActorId: [8:7488007853780138765:2413], ActorState: ReadyState, TraceId: 01jqpfqvtk7y2fhvttw769ap4z, received request, proxyRequestId: 6 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT * FROM `.sys/resource_pools` WHERE Name >= "default" rpcActor: [7:7488007853520992928:3364] database: /Root/test-dedicated databaseId: /Root/test-dedicated pool id: default 2025-03-31T16:14:48.659841Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=N2M4NjFjOGQtMTY0ZTBkNzktNWEzYmNjZmQtZTUyYjNkYjM=, ActorId: [8:7488007853780138765:2413], ActorState: ReadyState, TraceId: 01jqpfqvtk7y2fhvttw769ap4z, request placed into pool from cache: default 2025-03-31T16:14:48.659937Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=N2M4NjFjOGQtMTY0ZTBkNzktNWEzYmNjZmQtZTUyYjNkYjM=, ActorId: [8:7488007853780138765:2413], ActorState: ExecuteState, TraceId: 01jqpfqvtk7y2fhvttw769ap4z, Sending CompileQuery request 2025-03-31T16:14:48.791187Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=N2M4NjFjOGQtMTY0ZTBkNzktNWEzYmNjZmQtZTUyYjNkYjM=, ActorId: [8:7488007853780138765:2413], ActorState: ExecuteState, TraceId: 01jqpfqvtk7y2fhvttw769ap4z, ExecutePhyTx, tx: 0x000050C0002BE598 literal: 0 commit: 1 txCtx.DeferredEffects.size(): 0 2025-03-31T16:14:48.791259Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=N2M4NjFjOGQtMTY0ZTBkNzktNWEzYmNjZmQtZTUyYjNkYjM=, ActorId: [8:7488007853780138765:2413], ActorState: ExecuteState, TraceId: 01jqpfqvtk7y2fhvttw769ap4z, Sending to Executer TraceId: 0 8 2025-03-31T16:14:48.791371Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=N2M4NjFjOGQtMTY0ZTBkNzktNWEzYmNjZmQtZTUyYjNkYjM=, ActorId: [8:7488007853780138765:2413], ActorState: ExecuteState, TraceId: 01jqpfqvtk7y2fhvttw769ap4z, Created new KQP executer: [8:7488007853780138771:2413] isRollback: 0 2025-03-31T16:14:48.805198Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=N2M4NjFjOGQtMTY0ZTBkNzktNWEzYmNjZmQtZTUyYjNkYjM=, ActorId: [8:7488007853780138765:2413], ActorState: ExecuteState, TraceId: 01jqpfqvtk7y2fhvttw769ap4z, Forwarded TEvStreamData to [7:7488007853520992928:3364] 2025-03-31T16:14:48.808029Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=N2M4NjFjOGQtMTY0ZTBkNzktNWEzYmNjZmQtZTUyYjNkYjM=, ActorId: [8:7488007853780138765:2413], ActorState: ExecuteState, TraceId: 01jqpfqvtk7y2fhvttw769ap4z, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-03-31T16:14:48.808194Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=N2M4NjFjOGQtMTY0ZTBkNzktNWEzYmNjZmQtZTUyYjNkYjM=, ActorId: [8:7488007853780138765:2413], ActorState: ExecuteState, TraceId: 01jqpfqvtk7y2fhvttw769ap4z, txInfo Status: Committed Kind: ReadOnly TotalDuration: 17.139 ServerDuration: 17.054 QueriesCount: 2 2025-03-31T16:14:48.808263Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=N2M4NjFjOGQtMTY0ZTBkNzktNWEzYmNjZmQtZTUyYjNkYjM=, ActorId: [8:7488007853780138765:2413], ActorState: ExecuteState, TraceId: 01jqpfqvtk7y2fhvttw769ap4z, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-03-31T16:14:48.808655Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=N2M4NjFjOGQtMTY0ZTBkNzktNWEzYmNjZmQtZTUyYjNkYjM=, ActorId: [8:7488007853780138765:2413], ActorState: ExecuteState, TraceId: 01jqpfqvtk7y2fhvttw769ap4z, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-31T16:14:48.808686Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=N2M4NjFjOGQtMTY0ZTBkNzktNWEzYmNjZmQtZTUyYjNkYjM=, ActorId: [8:7488007853780138765:2413], ActorState: ExecuteState, TraceId: 01jqpfqvtk7y2fhvttw769ap4z, EndCleanup, isFinal: 1 2025-03-31T16:14:48.808744Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=N2M4NjFjOGQtMTY0ZTBkNzktNWEzYmNjZmQtZTUyYjNkYjM=, ActorId: [8:7488007853780138765:2413], ActorState: ExecuteState, TraceId: 01jqpfqvtk7y2fhvttw769ap4z, Sent query response back to proxy, proxyRequestId: 6, proxyId: [8:7488007832305301178:2274] 2025-03-31T16:14:48.808765Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=N2M4NjFjOGQtMTY0ZTBkNzktNWEzYmNjZmQtZTUyYjNkYjM=, ActorId: [8:7488007853780138765:2413], ActorState: unknown state, TraceId: 01jqpfqvtk7y2fhvttw769ap4z, Cleanup temp tables: 0 2025-03-31T16:14:48.809200Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=N2M4NjFjOGQtMTY0ZTBkNzktNWEzYmNjZmQtZTUyYjNkYjM=, ActorId: [8:7488007853780138765:2413], ActorState: unknown state, TraceId: 01jqpfqvtk7y2fhvttw769ap4z, Session actor destroyed 2025-03-31T16:14:48.818971Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 8 2025-03-31T16:14:48.819438Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-31T16:14:48.819594Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 9 2025-03-31T16:14:48.819802Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-31T16:14:48.828146Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=MzAwMjNhN2YtODcxZjE2MjktZThmNWVjZDItNmQ4Yjk2OTg=, ActorId: [7:7488007827751188027:2333], ActorState: ReadyState, Session closed due to explicit close event 2025-03-31T16:14:48.828206Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=MzAwMjNhN2YtODcxZjE2MjktZThmNWVjZDItNmQ4Yjk2OTg=, ActorId: [7:7488007827751188027:2333], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-31T16:14:48.828252Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=MzAwMjNhN2YtODcxZjE2MjktZThmNWVjZDItNmQ4Yjk2OTg=, ActorId: [7:7488007827751188027:2333], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-03-31T16:14:48.828283Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=MzAwMjNhN2YtODcxZjE2MjktZThmNWVjZDItNmQ4Yjk2OTg=, ActorId: [7:7488007827751188027:2333], ActorState: unknown state, Cleanup temp tables: 0 2025-03-31T16:14:48.828366Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=MzAwMjNhN2YtODcxZjE2MjktZThmNWVjZDItNmQ4Yjk2OTg=, ActorId: [7:7488007827751188027:2333], ActorState: unknown state, Session actor destroyed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastInt32ToInt16-StreamLookupJoin+NotNull [GOOD] >> test_ttl.py::TestTTLDefaultEnv::test_case [GOOD] Test command err: Trying to start YDB, gRPC: 17862, MsgBus: 16186 2025-03-31T16:14:38.118652Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488007812974501078:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:38.118705Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001981/r3tmp/tmpSAJ5JA/pdisk_1.dat 2025-03-31T16:14:38.548280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:38.548382Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:38.568189Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:14:38.604715Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17862, node 1 2025-03-31T16:14:38.683717Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:38.683742Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:38.683751Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:38.683879Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16186 TClient is connected to server localhost:16186 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:14:39.250412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:39.277785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:39.410523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:39.586281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:39.659481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:41.408466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007825859404730:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:41.408655Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:41.753446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:14:41.796294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:14:41.827034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:14:41.858167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:14:41.928008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:14:41.970528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:14:42.010774Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007830154372539:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:42.010854Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:42.010854Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007830154372544:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:42.015405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:14:42.024389Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007830154372546:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:14:42.088914Z node 1 :TX_PROXY ERROR: Actor# [1:7488007830154372598:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:14:43.047477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:14:43.116003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:14:43.116493Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488007812974501078:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:43.116664Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 7322, MsgBus: 63054 2025-03-31T16:14:44.896644Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488007838463819782:2128];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:44.896688Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001981/r3tmp/tmpJk7uyi/pdisk_1.dat 2025-03-31T16:14:45.098143Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:45.132169Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:45.132247Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:45.133990Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7322, node 2 2025-03-31T16:14:45.256230Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:45.256258Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:45.256265Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:45.256375Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63054 TClient is connected to server localhost:63054 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:14:45.704174Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:45.710603Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:14:45.718533Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:45.777465Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:45.963162Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:46.049074Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:48.365054Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488007855643690660:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:48.365168Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:48.407823Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:14:48.443045Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:14:48.514167Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:14:48.546115Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:14:48.579413Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:14:48.623891Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:14:48.686437Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488007855643691171:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:48.686546Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:48.687313Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488007855643691176:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:48.691912Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:14:48.707320Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488007855643691178:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:14:48.765402Z node 2 :TX_PROXY ERROR: Actor# [2:7488007855643691231:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:14:49.648308Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:14:49.749419Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:14:49.896835Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488007838463819782:2128];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:49.896890Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> ResourcePoolClassifiersDdl::TestAlterResourcePoolClassifier [GOOD] >> ResourcePoolClassifiersDdl::TestDropResourcePoolClassifier >> KqpSinkLocks::InvalidateOnCommit [GOOD] >> KqpSinkLocks::InvalidateOlapOnCommit >> KqpMergeCn::TopSortByDesc_Double_Limit3 [GOOD] >> KqpMergeCn::TopSortBy_Date_Limit4 >> TConsoleTests::TestMergeConfig [GOOD] >> TConsoleTests::TestRemoveTenant >> ResourcePoolClassifiersDdl::TestMultiGroupClassification [GOOD] >> ResourcePoolClassifiersSysView::TestResourcePoolClassifiersSysViewOnServerless >> TConsoleTests::TestCreateTenantWrongPool [GOOD] >> TConsoleTests::TestCreateTenantWrongPoolExtSubdomain |86.8%| [TA] $(B)/ydb/tests/functional/sqs/messaging/test-results/py3test/{meta.json ... results_accumulator.log} >> KqpWorkloadServiceDistributed::TestDistributedLargeConcurrentQueryLimit [GOOD] >> KqpNewEngine::Replace [GOOD] >> KqpNewEngine::SelfJoin |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case [GOOD] >> ColumnStatistics::CountMinSketchServerlessStatistics >> BasicStatistics::TwoTables >> TraverseDatashard::TraverseOneTableServerless [GOOD] |86.9%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/messaging/test-results/py3test/{meta.json ... results_accumulator.log} >> KqpIndexLookupJoin::InnerJoinCustomColumnOrder+StreamLookup [GOOD] >> KqpIndexLookupJoin::InnerJoinCustomColumnOrder-StreamLookup >> TConsoleTests::TestCreateTenantWrongPoolExtSubdomain [GOOD] >> TConsoleTests::TestCreateTenantAlreadyExists >> Viewer::JsonAutocompleteEmpty >> KqpWorkloadService::TestStartQueryAfterCancel [GOOD] >> KqpWorkloadService::TestZeroConcurrentQueryLimit >> Viewer::JsonAutocompleteStartOfDatabaseName >> Viewer::PDiskMerging ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceDistributed::TestDistributedLargeConcurrentQueryLimit [GOOD] Test command err: 2025-03-31T16:14:06.674435Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488007675497539470:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:06.675659Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00114c/r3tmp/tmp8Hny4H/pdisk_1.dat 2025-03-31T16:14:07.038831Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26738, node 1 2025-03-31T16:14:07.074206Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:07.074372Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:07.076186Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:14:07.136203Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:07.136225Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:07.136235Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:07.136359Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17201 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:14:07.450975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:07.465687Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:14:09.481302Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-03-31T16:14:09.481563Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488007688382441988:2328], Start check tables existence, number paths: 2 2025-03-31T16:14:09.481653Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-03-31T16:14:09.481699Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-03-31T16:14:09.481716Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-03-31T16:14:09.486762Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MWQyODA4NjgtNWQ2MTEyZjQtNjI3MTQzMmItMWM3NzU3NGI=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MWQyODA4NjgtNWQ2MTEyZjQtNjI3MTQzMmItMWM3NzU3NGI= 2025-03-31T16:14:09.487448Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488007688382441988:2328], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-03-31T16:14:09.487507Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488007688382441988:2328], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-03-31T16:14:09.487563Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488007688382441988:2328], Successfully finished 2025-03-31T16:14:09.494223Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MWQyODA4NjgtNWQ2MTEyZjQtNjI3MTQzMmItMWM3NzU3NGI=, ActorId: [1:7488007688382442004:2329], ActorState: unknown state, session actor bootstrapped 2025-03-31T16:14:09.494524Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-03-31T16:14:09.511547Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007688382442006:2301], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-03-31T16:14:09.516292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:14:09.517517Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007688382442006:2301], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-03-31T16:14:09.517662Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007688382442006:2301], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-03-31T16:14:09.526953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007688382442006:2301], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:14:09.622007Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007688382442006:2301], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-03-31T16:14:09.626601Z node 1 :TX_PROXY ERROR: Actor# [1:7488007688382442057:2333] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:14:09.626719Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007688382442006:2301], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-03-31T16:14:09.627021Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007688382442064:2339], DatabaseId: Root, PoolId: sample_pool_id, Start pool fetching 2025-03-31T16:14:09.628167Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007688382442064:2339], DatabaseId: Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-03-31T16:14:09.636011Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=MWQyODA4NjgtNWQ2MTEyZjQtNjI3MTQzMmItMWM3NzU3NGI=, ActorId: [1:7488007688382442004:2329], ActorState: ReadyState, Session closed due to explicit close event 2025-03-31T16:14:09.636179Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=MWQyODA4NjgtNWQ2MTEyZjQtNjI3MTQzMmItMWM3NzU3NGI=, ActorId: [1:7488007688382442004:2329], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-31T16:14:09.636229Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MWQyODA4NjgtNWQ2MTEyZjQtNjI3MTQzMmItMWM3NzU3NGI=, ActorId: [1:7488007688382442004:2329], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-03-31T16:14:09.636291Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MWQyODA4NjgtNWQ2MTEyZjQtNjI3MTQzMmItMWM3NzU3NGI=, ActorId: [1:7488007688382442004:2329], ActorState: unknown state, Cleanup temp tables: 0 2025-03-31T16:14:09.636465Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MWQyODA4NjgtNWQ2MTEyZjQtNjI3MTQzMmItMWM3NzU3NGI=, ActorId: [1:7488007688382442004:2329], ActorState: unknown state, Session actor destroyed 2025-03-31T16:14:10.313374Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488007690589704413:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:10.313436Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00114c/r3tmp/tmpBK6tyD/pdisk_1.dat 2025-03-31T16:14:10.415900Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21002, node 2 2025-03-31T16:14:10.468500Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:10.468670Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:10.483484Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:14:10.503628Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:10.503652Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:10.503659Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:10.503767Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8219 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:14:10.734857Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:13.182194Z node 2 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-03-31T16:14:13.182398Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-03-31T16:14:13.182423Z node 2 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-03-31T16:14:13.183981Z node 2 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=2&id=M2M2MDI1NTMtYTNi ... rState: ExecuteState, TraceId: 01jqpfqzzv2db14qv0cyy5bxqq, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-03-31T16:14:52.961267Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=MmY1MGRhMzEtNDVkNDk0YzYtMzRmMjAxM2ItYmExNWM2NTc=, ActorId: [8:7488007873908644393:4563], ActorState: ExecuteState, TraceId: 01jqpfqzzv2db14qv0cyy5bxqq, txInfo Status: Committed Kind: ReadWrite TotalDuration: 35.477 ServerDuration: 34.983 QueriesCount: 2 2025-03-31T16:14:52.961374Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MmY1MGRhMzEtNDVkNDk0YzYtMzRmMjAxM2ItYmExNWM2NTc=, ActorId: [8:7488007873908644393:4563], ActorState: ExecuteState, TraceId: 01jqpfqzzv2db14qv0cyy5bxqq, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-03-31T16:14:52.961423Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=MmY1MGRhMzEtNDVkNDk0YzYtMzRmMjAxM2ItYmExNWM2NTc=, ActorId: [8:7488007873908644393:4563], ActorState: ExecuteState, TraceId: 01jqpfqzzv2db14qv0cyy5bxqq, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-31T16:14:52.961444Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MmY1MGRhMzEtNDVkNDk0YzYtMzRmMjAxM2ItYmExNWM2NTc=, ActorId: [8:7488007873908644393:4563], ActorState: ExecuteState, TraceId: 01jqpfqzzv2db14qv0cyy5bxqq, EndCleanup, isFinal: 0 2025-03-31T16:14:52.961497Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MmY1MGRhMzEtNDVkNDk0YzYtMzRmMjAxM2ItYmExNWM2NTc=, ActorId: [8:7488007873908644393:4563], ActorState: ExecuteState, TraceId: 01jqpfqzzv2db14qv0cyy5bxqq, Sent query response back to proxy, proxyRequestId: 477, proxyId: [8:7488007766534453756:2089] 2025-03-31T16:14:52.962303Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Update lease, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=8&id=MmY1MGRhMzEtNDVkNDk0YzYtMzRmMjAxM2ItYmExNWM2NTc=, TxId: 2025-03-31T16:14:52.962431Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Update lease, RunDataQuery: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); 2025-03-31T16:14:52.963478Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MmY1MGRhMzEtNDVkNDk0YzYtMzRmMjAxM2ItYmExNWM2NTc=, ActorId: [8:7488007873908644393:4563], ActorState: ReadyState, TraceId: 01jqpfr0139cncm3qf3zek7ech, received request, proxyRequestId: 478 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); rpcActor: [8:7488007873908644416:4568] database: /Root databaseId: /Root pool id: default 2025-03-31T16:14:52.963520Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MmY1MGRhMzEtNDVkNDk0YzYtMzRmMjAxM2ItYmExNWM2NTc=, ActorId: [8:7488007873908644393:4563], ActorState: ReadyState, TraceId: 01jqpfr0139cncm3qf3zek7ech, request placed into pool from cache: default 2025-03-31T16:14:52.964381Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MmY1MGRhMzEtNDVkNDk0YzYtMzRmMjAxM2ItYmExNWM2NTc=, ActorId: [8:7488007873908644393:4563], ActorState: ExecuteState, TraceId: 01jqpfr0139cncm3qf3zek7ech, ExecutePhyTx, tx: 0x000050C0001FCF18 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2025-03-31T16:14:52.964444Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MmY1MGRhMzEtNDVkNDk0YzYtMzRmMjAxM2ItYmExNWM2NTc=, ActorId: [8:7488007873908644393:4563], ActorState: ExecuteState, TraceId: 01jqpfr0139cncm3qf3zek7ech, Sending to Executer TraceId: 0 8 2025-03-31T16:14:52.964508Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MmY1MGRhMzEtNDVkNDk0YzYtMzRmMjAxM2ItYmExNWM2NTc=, ActorId: [8:7488007873908644393:4563], ActorState: ExecuteState, TraceId: 01jqpfr0139cncm3qf3zek7ech, Created new KQP executer: [8:7488007873908644420:4563] isRollback: 0 2025-03-31T16:14:52.969165Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=MWIwNTE5MjYtOGVhNmZhOGYtMTBmNDgyYmMtZjMzYTk2Yzk=, ActorId: [6:7488007782466328829:2334], ActorState: ReadyState, Session closed due to explicit close event 2025-03-31T16:14:52.969223Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=MWIwNTE5MjYtOGVhNmZhOGYtMTBmNDgyYmMtZjMzYTk2Yzk=, ActorId: [6:7488007782466328829:2334], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-31T16:14:52.969250Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MWIwNTE5MjYtOGVhNmZhOGYtMTBmNDgyYmMtZjMzYTk2Yzk=, ActorId: [6:7488007782466328829:2334], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-03-31T16:14:52.969276Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MWIwNTE5MjYtOGVhNmZhOGYtMTBmNDgyYmMtZjMzYTk2Yzk=, ActorId: [6:7488007782466328829:2334], ActorState: unknown state, Cleanup temp tables: 0 2025-03-31T16:14:52.969349Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MWIwNTE5MjYtOGVhNmZhOGYtMTBmNDgyYmMtZjMzYTk2Yzk=, ActorId: [6:7488007782466328829:2334], ActorState: unknown state, Session actor destroyed 2025-03-31T16:14:52.977318Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MmY1MGRhMzEtNDVkNDk0YzYtMzRmMjAxM2ItYmExNWM2NTc=, ActorId: [8:7488007873908644393:4563], ActorState: ExecuteState, TraceId: 01jqpfr0139cncm3qf3zek7ech, TEvTxResponse, CurrentTx: 1/2 response.status: SUCCESS 2025-03-31T16:14:52.977430Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MmY1MGRhMzEtNDVkNDk0YzYtMzRmMjAxM2ItYmExNWM2NTc=, ActorId: [8:7488007873908644393:4563], ActorState: ExecuteState, TraceId: 01jqpfr0139cncm3qf3zek7ech, ExecutePhyTx, tx: 0x000050C000106618 literal: 1 commit: 1 txCtx.DeferredEffects.size(): 0 2025-03-31T16:14:52.978143Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MmY1MGRhMzEtNDVkNDk0YzYtMzRmMjAxM2ItYmExNWM2NTc=, ActorId: [8:7488007873908644393:4563], ActorState: ExecuteState, TraceId: 01jqpfr0139cncm3qf3zek7ech, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-03-31T16:14:52.978311Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=MmY1MGRhMzEtNDVkNDk0YzYtMzRmMjAxM2ItYmExNWM2NTc=, ActorId: [8:7488007873908644393:4563], ActorState: ExecuteState, TraceId: 01jqpfr0139cncm3qf3zek7ech, txInfo Status: Committed Kind: ReadOnly TotalDuration: 14.036 ServerDuration: 13.938 QueriesCount: 2 2025-03-31T16:14:52.978425Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MmY1MGRhMzEtNDVkNDk0YzYtMzRmMjAxM2ItYmExNWM2NTc=, ActorId: [8:7488007873908644393:4563], ActorState: ExecuteState, TraceId: 01jqpfr0139cncm3qf3zek7ech, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-03-31T16:14:52.978486Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=MmY1MGRhMzEtNDVkNDk0YzYtMzRmMjAxM2ItYmExNWM2NTc=, ActorId: [8:7488007873908644393:4563], ActorState: ExecuteState, TraceId: 01jqpfr0139cncm3qf3zek7ech, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-31T16:14:52.978517Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MmY1MGRhMzEtNDVkNDk0YzYtMzRmMjAxM2ItYmExNWM2NTc=, ActorId: [8:7488007873908644393:4563], ActorState: ExecuteState, TraceId: 01jqpfr0139cncm3qf3zek7ech, EndCleanup, isFinal: 0 2025-03-31T16:14:52.978567Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MmY1MGRhMzEtNDVkNDk0YzYtMzRmMjAxM2ItYmExNWM2NTc=, ActorId: [8:7488007873908644393:4563], ActorState: ExecuteState, TraceId: 01jqpfr0139cncm3qf3zek7ech, Sent query response back to proxy, proxyRequestId: 478, proxyId: [8:7488007766534453756:2089] 2025-03-31T16:14:52.978917Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=8&id=MmY1MGRhMzEtNDVkNDk0YzYtMzRmMjAxM2ItYmExNWM2NTc=, TxId: 2025-03-31T16:14:52.978996Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Finish with SUCCESS, SessionId: ydb://session/3?node_id=8&id=MmY1MGRhMzEtNDVkNDk0YzYtMzRmMjAxM2ItYmExNWM2NTc=, TxId: 2025-03-31T16:14:52.979186Z node 8 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7488007783714323364:2340], DatabaseId: /Root, PoolId: sample_pool_id, succefully refreshed pool state, in flight: 0, delayed: 0 2025-03-31T16:14:52.979279Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=MmY1MGRhMzEtNDVkNDk0YzYtMzRmMjAxM2ItYmExNWM2NTc=, ActorId: [8:7488007873908644393:4563], ActorState: ReadyState, Session closed due to explicit close event 2025-03-31T16:14:52.979320Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=MmY1MGRhMzEtNDVkNDk0YzYtMzRmMjAxM2ItYmExNWM2NTc=, ActorId: [8:7488007873908644393:4563], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-31T16:14:52.979348Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MmY1MGRhMzEtNDVkNDk0YzYtMzRmMjAxM2ItYmExNWM2NTc=, ActorId: [8:7488007873908644393:4563], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-03-31T16:14:52.979375Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MmY1MGRhMzEtNDVkNDk0YzYtMzRmMjAxM2ItYmExNWM2NTc=, ActorId: [8:7488007873908644393:4563], ActorState: unknown state, Cleanup temp tables: 0 2025-03-31T16:14:52.979457Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MmY1MGRhMzEtNDVkNDk0YzYtMzRmMjAxM2ItYmExNWM2NTc=, ActorId: [8:7488007873908644393:4563], ActorState: unknown state, Session actor destroyed 2025-03-31T16:14:53.134045Z node 8 :BS_PROXY_PUT ERROR: [a97a22ee1ccd826d] Result# TEvPutResult {Id# [72075186224037889:1:703:0:0:42:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037889:1:703:0:0:42:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 6 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseOneTableServerless [GOOD] Test command err: 2025-03-31T16:14:42.653554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:14:42.654039Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:14:42.654208Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ed8/r3tmp/tmpyBCAuI/pdisk_1.dat 2025-03-31T16:14:43.330321Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24032, node 1 2025-03-31T16:14:44.063733Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:44.063810Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:44.063849Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:44.064497Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:14:44.085012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:14:44.190347Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:44.190970Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:44.208099Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8450 2025-03-31T16:14:44.862142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:14:48.446000Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T16:14:48.513431Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:48.513548Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:48.557515Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:14:48.559922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:14:48.848104Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:14:48.852160Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:14:48.852945Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:14:48.853090Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:14:48.853309Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:14:48.853416Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:14:48.853523Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:14:48.853638Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:14:48.853735Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:14:49.036661Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:49.036786Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:49.059021Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:14:49.244261Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:49.308474Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T16:14:49.308608Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T16:14:49.383936Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T16:14:49.384804Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T16:14:49.385070Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T16:14:49.385139Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T16:14:49.385203Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T16:14:49.385259Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T16:14:49.385333Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T16:14:49.385414Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T16:14:49.389306Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T16:14:49.427320Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:14:49.427481Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1870:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:14:49.437398Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1881:2607] 2025-03-31T16:14:49.442680Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2618] 2025-03-31T16:14:49.442847Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1904:2618], schemeshard id = 72075186224037897 2025-03-31T16:14:49.469394Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-03-31T16:14:49.517071Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T16:14:49.517126Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T16:14:49.517186Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-03-31T16:14:49.541193Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T16:14:49.557232Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T16:14:49.557455Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T16:14:49.762610Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T16:14:49.957523Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T16:14:50.059636Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:14:50.874070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:14:51.843896Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:51.987035Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-03-31T16:14:51.987167Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-03-31T16:14:51.987278Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2585:2944], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-03-31T16:14:51.988753Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2587:2945] 2025-03-31T16:14:51.988981Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2587:2945], schemeshard id = 72075186224037899 2025-03-31T16:14:53.571176Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2716:3239], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:53.571626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:53.732166Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-03-31T16:14:54.411581Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3024:3288], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:54.411973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:54.413577Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3029:3292]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:14:54.413791Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T16:14:54.414025Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-03-31T16:14:54.414100Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:3032:3295] 2025-03-31T16:14:54.415856Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:3032:3295] 2025-03-31T16:14:54.416703Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:3033:3185] 2025-03-31T16:14:54.417035Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:3032:3295], server id = [2:3033:3185], tablet id = 72075186224037894, status = OK 2025-03-31T16:14:54.417266Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:3033:3185], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-03-31T16:14:54.417358Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-03-31T16:14:54.418494Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:14:54.418632Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:3029:3292], StatRequests.size() = 1 2025-03-31T16:14:54.513338Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3037:3299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:54.513637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:54.514243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3042:3304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:54.530688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-03-31T16:14:54.705973Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-31T16:14:54.706075Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-31T16:14:54.740868Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:3032:3295], schemeshard count = 1 2025-03-31T16:14:55.132397Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3044:3306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-03-31T16:14:55.317990Z node 1 :TX_PROXY ERROR: Actor# [1:3163:3373] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:14:55.332102Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3186:3389]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:14:55.332350Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T16:14:55.332403Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:3186:3389], StatRequests.size() = 1 2025-03-31T16:14:55.555252Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqpfr19x7qbybe7jfeyxxjw6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTMyMGMyZmEtNWZmMzc2ZDktYTU3NjVkNWYtYjc0Yzc0MGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:14:55.725737Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:3228:3237]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-31T16:14:55.728711Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T16:14:55.728784Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-31T16:14:55.729116Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T16:14:55.729172Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-03-31T16:14:55.729227Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-31T16:14:55.787587Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-03-31T16:14:55.788833Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> Viewer::PDiskMerging [GOOD] >> Viewer::SelectStringWithBase64Encoding >> HttpRequest::ProbeServerless |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> Viewer::TabletMergingPacked |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLOnIndexedTable::test_case [GOOD] >> Viewer::FuzzySearcherLimit3OutOf4 [GOOD] >> Viewer::FuzzySearcherLimit4OutOf4 [GOOD] >> Viewer::FuzzySearcherLongWord [GOOD] >> Viewer::FuzzySearcherPriority [GOOD] >> Viewer::JsonAutocompleteColumns |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> KqpMergeCn::TopSortBy_Date_Limit4 [GOOD] >> KqpMergeCn::TopSortByDesc_Datetime_Limit3 >> Viewer::SelectStringWithNoBase64Encoding |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/oom/py3test >> overlapping_portions.py::TestOverlappingPortions::test [FAIL] |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLDefaultEnv::test_case [GOOD] >> BasicStatistics::NotFullStatisticsDatashard >> Viewer::TabletMergingPacked [GOOD] >> Viewer::VDiskMerging |86.9%| [TA] $(B)/ydb/tests/functional/ttl/test-results/py3test/{meta.json ... results_accumulator.log} |86.9%| [TA] {RESULT} $(B)/ydb/tests/functional/ttl/test-results/py3test/{meta.json ... results_accumulator.log} >> Viewer::LevenshteinDistance [GOOD] >> Viewer::JsonStorageListingV2 |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> KqpWorkloadServiceDistributed::TestNodeDisconnect [GOOD] >> KqpWorkloadServiceDistributed::TestDistributedLessConcurrentQueryLimit >> Viewer::TabletMerging >> KqpJoinOrder::CanonizedJoinOrderTPCH4 [GOOD] |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TConsoleTests::TestCreateTenantAlreadyExists [GOOD] >> TConsoleTests::TestCreateTenantAlreadyExistsExtSubdomain |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TConsoleTests::TestRemoveTenant [GOOD] >> TConsoleTests::TestRemoveTenantExtSubdomain >> BasicStatistics::ServerlessGlobalIndex >> Viewer::VDiskMerging [GOOD] >> Viewer::TenantInfo5kkTablets >> KqpNewEngine::SelfJoin [GOOD] >> KqpNewEngine::ScalarFunctions |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> KqpWorkloadService::TestZeroConcurrentQueryLimit [GOOD] >> KqpWorkloadService::TestLessConcurrentQueryLimit [GOOD] >> KqpWorkloadService::TestCpuLoadThreshold >> Viewer::JsonAutocompleteSimilarDatabaseName |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::SimpleGlobalIndex >> KqpIndexLookupJoin::InnerJoinCustomColumnOrder-StreamLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH4 [GOOD] Test command err: Trying to start YDB, gRPC: 15394, MsgBus: 11306 2025-03-31T16:13:39.740908Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488007557301831287:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:13:39.740969Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001998/r3tmp/tmpF98zxp/pdisk_1.dat 2025-03-31T16:13:40.290848Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:13:40.334268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:13:40.334358Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 15394, node 1 2025-03-31T16:13:40.336392Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:13:40.459714Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:13:40.459735Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:13:40.459742Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:13:40.459867Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11306 TClient is connected to server localhost:11306 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:13:41.158813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:13:43.620836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007574481701141:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:13:43.620977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:13:43.621864Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007574481701153:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:13:43.625644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:13:43.638703Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007574481701155:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:13:43.700587Z node 1 :TX_PROXY ERROR: Actor# [1:7488007574481701206:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:13:44.189127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:13:44.455018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488007578776668791:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:13:44.455233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488007578776668791:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:13:44.455526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488007578776668791:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:13:44.455657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488007578776668791:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:13:44.455788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488007578776668791:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:13:44.455849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488007578776668789:2362];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:13:44.455876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488007578776668789:2362];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:13:44.455906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488007578776668791:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:13:44.455984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488007578776668789:2362];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:13:44.456001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488007578776668791:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:13:44.456076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488007578776668789:2362];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:13:44.456102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488007578776668791:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:13:44.456167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488007578776668789:2362];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:13:44.456196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488007578776668791:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:13:44.456282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488007578776668789:2362];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:13:44.456336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488007578776668791:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:13:44.456370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488007578776668789:2362];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:13:44.456440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488007578776668791:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:13:44.456458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488007578776668789:2362];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:13:44.456657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488007578776668791:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:13:44.457655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488007578776668789:2362];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:13:44.457914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488007578776668789:2362];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:13:44.458084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488007578776668789:2362];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:13:44.458220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488007578776668789:2362];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:13:44.501570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488007578776668777:2356];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:13:44.501644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488007578776668777:2356];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:13:44.501845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;sel ... tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.737971Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039374;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.741756Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.743571Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.749029Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.749252Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.756144Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039249;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.756960Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.763236Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.763242Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.770209Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.770209Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.777458Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.777511Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.783288Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.784653Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.789020Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.791647Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.795371Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039358;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.799099Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.801127Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.805421Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039356;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.807716Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.812815Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.814395Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.819679Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.820741Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.826627Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.827069Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.834152Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.834152Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.841204Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.841208Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.848075Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.848397Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.855352Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.855384Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.862685Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.862685Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.869969Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.869970Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.877221Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.877222Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.884413Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.886417Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.891866Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:48.893494Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:49.067563Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpfq0ak4p9meaq177ceyqms", SessionId: ydb://session/3?node_id=1&id=NjM1YjhmODQtYjg2ODQ4OGYtMzVhZTgwYTYtYTljMGZkNGE=, Slow query, duration: 28.567739s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:14:49.329351Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:14:49.329355Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:14:49.330104Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadService::TestZeroConcurrentQueryLimit [GOOD] Test command err: 2025-03-31T16:14:06.464933Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488007675195614015:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:06.464995Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00114d/r3tmp/tmpjIINS2/pdisk_1.dat 2025-03-31T16:14:06.802819Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:06.807665Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:06.807810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:06.811331Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23397, node 1 2025-03-31T16:14:06.880446Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:06.880482Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:06.880494Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:06.880626Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14882 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:14:07.194678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:09.378881Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-03-31T16:14:09.382470Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZmE0MmU3NmMtODYzNjk2ZjgtZjc2MjQyMjktNjdlYTY2NDE=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZmE0MmU3NmMtODYzNjk2ZjgtZjc2MjQyMjktNjdlYTY2NDE= 2025-03-31T16:14:09.382985Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488007688080516528:2328], Start check tables existence, number paths: 2 2025-03-31T16:14:09.383076Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZmE0MmU3NmMtODYzNjk2ZjgtZjc2MjQyMjktNjdlYTY2NDE=, ActorId: [1:7488007688080516529:2329], ActorState: unknown state, session actor bootstrapped 2025-03-31T16:14:09.383609Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-03-31T16:14:09.383668Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-03-31T16:14:09.383696Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-03-31T16:14:09.389206Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488007688080516528:2328], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-03-31T16:14:09.389286Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488007688080516528:2328], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-03-31T16:14:09.389311Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488007688080516528:2328], Successfully finished 2025-03-31T16:14:09.389357Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-03-31T16:14:09.413261Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007688080516546:2302], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-03-31T16:14:09.417228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:14:09.419769Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007688080516546:2302], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976715658 2025-03-31T16:14:09.419914Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007688080516546:2302], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-03-31T16:14:09.430964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007688080516546:2302], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:14:09.494396Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007688080516546:2302], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-03-31T16:14:09.498999Z node 1 :TX_PROXY ERROR: Actor# [1:7488007688080516599:2335] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:14:09.499286Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007688080516546:2302], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-03-31T16:14:09.502197Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NDk5NjIzYTgtM2QzYzA4NTUtYjM3NTZiYWEtYjMyZDU5MDQ=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NDk5NjIzYTgtM2QzYzA4NTUtYjM3NTZiYWEtYjMyZDU5MDQ= 2025-03-31T16:14:09.502380Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NDk5NjIzYTgtM2QzYzA4NTUtYjM3NTZiYWEtYjMyZDU5MDQ=, ActorId: [1:7488007688080516607:2331], ActorState: unknown state, session actor bootstrapped 2025-03-31T16:14:09.502499Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-03-31T16:14:09.502514Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-03-31T16:14:09.502642Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007688080516609:2332], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-03-31T16:14:09.502834Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NDk5NjIzYTgtM2QzYzA4NTUtYjM3NTZiYWEtYjMyZDU5MDQ=, ActorId: [1:7488007688080516607:2331], ActorState: ReadyState, TraceId: 01jqpfpnjyecbq7kmzsezkvhzm, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7488007688080516606:2341] database: Root databaseId: /Root pool id: sample_pool_id 2025-03-31T16:14:09.502902Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7488007688080516607:2331], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NDk5NjIzYTgtM2QzYzA4NTUtYjM3NTZiYWEtYjMyZDU5MDQ= 2025-03-31T16:14:09.502968Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7488007688080516611:2333], Database: /Root, Start database fetching 2025-03-31T16:14:09.503853Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7488007688080516611:2333], Database: /Root, Database info successfully fetched, serverless: 0 2025-03-31T16:14:09.504020Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007688080516609:2332], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-03-31T16:14:09.504062Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2025-03-31T16:14:09.504225Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2025-03-31T16:14:09.504242Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-03-31T16:14:09.504289Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7488007688080516620:2334], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NDk5NjIzYTgtM2QzYzA4NTUtYjM3NTZiYWEtYjMyZDU5MDQ=, Start pool fetching 2025-03-31T16:14:09.504323Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007688080516621:2335], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-03-31T16:14:09.504532Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007688080516621:2335], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-03-31T16:14:09.504589Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7488007688080516623:2336], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2025-03-31T16:14:09.504633Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7488007688080516620:2334], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NDk5NjIzYTgtM2QzYzA4NTUtYjM3NTZiYWEtYjMyZDU5MDQ=, Pool info successfully resolved 2025-03-31T16:14:09.504702Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDk5NjIzYTgtM2QzYzA4NTUtYjM3NTZiYWEtYjMyZDU5MDQ= 2025-03-31T16:14:09.504760Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7488007688080516623:2336], DatabaseId: /Root, PoolId: sample_pool_id, Received new request, worker id: [1:7488007688080516607:2331], session id: ydb://session/3?node_id=1&id=NDk5NjIzYTgtM2QzYzA4NTUtYjM3NTZiYWEtYjMyZDU5MDQ= 2025-03-31T16:14:09.504809Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NDk5NjIzYTgtM2QzYzA4NTUtYjM3NTZiYWEtYjMyZDU5MDQ= 2025-03-31T16:14:09.504868Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Got create teables request, DatabaseId: /Root, PoolId: sample_pool_id 2025-03-31T16:14:09.504888Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService ... DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7488007915902955782:2330], Start check tables existence, number paths: 2 2025-03-31T16:15:02.998328Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=YWE0NzhkOWUtOTBhYjA5NmQtNTA0YjE0NTgtMzdiMjFmZjA=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id YWE0NzhkOWUtOTBhYjA5NmQtNTA0YjE0NTgtMzdiMjFmZjA= 2025-03-31T16:15:02.998908Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=YWE0NzhkOWUtOTBhYjA5NmQtNTA0YjE0NTgtMzdiMjFmZjA=, ActorId: [6:7488007915902955798:2331], ActorState: unknown state, session actor bootstrapped 2025-03-31T16:15:02.999675Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-03-31T16:15:02.999707Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-03-31T16:15:02.999726Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-03-31T16:15:02.999793Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7488007915902955782:2330], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-03-31T16:15:02.999854Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7488007915902955782:2330], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-03-31T16:15:02.999894Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7488007915902955782:2330], Successfully finished 2025-03-31T16:15:03.000592Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-03-31T16:15:03.003869Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488007920197923096:2304], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-03-31T16:15:03.012691Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:15:03.015338Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488007920197923096:2304], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976715658 2025-03-31T16:15:03.017479Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488007920197923096:2304], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-03-31T16:15:03.024921Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488007920197923096:2304], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:15:03.075193Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488007920197923096:2304], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-03-31T16:15:03.078819Z node 6 :TX_PROXY ERROR: Actor# [6:7488007920197923147:2336] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:15:03.079125Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488007920197923096:2304], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-03-31T16:15:03.082709Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MjJiOTI4NmEtOTA4OWVkY2QtNDIyNDk0ODQtZTViMTI0Ng==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MjJiOTI4NmEtOTA4OWVkY2QtNDIyNDk0ODQtZTViMTI0Ng== 2025-03-31T16:15:03.083104Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-03-31T16:15:03.083127Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-03-31T16:15:03.083194Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MjJiOTI4NmEtOTA4OWVkY2QtNDIyNDk0ODQtZTViMTI0Ng==, ActorId: [6:7488007920197923154:2332], ActorState: unknown state, session actor bootstrapped 2025-03-31T16:15:03.083388Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MjJiOTI4NmEtOTA4OWVkY2QtNDIyNDk0ODQtZTViMTI0Ng==, ActorId: [6:7488007920197923154:2332], ActorState: ReadyState, TraceId: 01jqpfr9xb69161dq971t4t4pp, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [6:7488007920197923153:2341] database: Root databaseId: /Root pool id: sample_pool_id 2025-03-31T16:15:03.083438Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488007920197923156:2333], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-03-31T16:15:03.083531Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [6:7488007920197923154:2332], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=MjJiOTI4NmEtOTA4OWVkY2QtNDIyNDk0ODQtZTViMTI0Ng== 2025-03-31T16:15:03.083580Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [6:7488007920197923157:2334], Database: /Root, Start database fetching 2025-03-31T16:15:03.085555Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [6:7488007920197923157:2334], Database: /Root, Database info successfully fetched, serverless: 0 2025-03-31T16:15:03.085716Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488007920197923156:2333], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-03-31T16:15:03.085759Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2025-03-31T16:15:03.085794Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2025-03-31T16:15:03.085815Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-03-31T16:15:03.086056Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [6:7488007920197923167:2335], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=MjJiOTI4NmEtOTA4OWVkY2QtNDIyNDk0ODQtZTViMTI0Ng==, Start pool fetching 2025-03-31T16:15:03.086096Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488007920197923169:2337], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-03-31T16:15:03.086214Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7488007920197923168:2336], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2025-03-31T16:15:03.087248Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488007920197923169:2337], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-03-31T16:15:03.087335Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [6:7488007920197923167:2335], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=MjJiOTI4NmEtOTA4OWVkY2QtNDIyNDk0ODQtZTViMTI0Ng==, Pool info successfully resolved 2025-03-31T16:15:03.087380Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root, SessionId: ydb://session/3?node_id=6&id=MjJiOTI4NmEtOTA4OWVkY2QtNDIyNDk0ODQtZTViMTI0Ng== 2025-03-31T16:15:03.087480Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=MjJiOTI4NmEtOTA4OWVkY2QtNDIyNDk0ODQtZTViMTI0Ng== 2025-03-31T16:15:03.087589Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=MjJiOTI4NmEtOTA4OWVkY2QtNDIyNDk0ODQtZTViMTI0Ng==, ActorId: [6:7488007920197923154:2332], ActorState: ExecuteState, TraceId: 01jqpfr9xb69161dq971t4t4pp, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool sample_pool_id 2025-03-31T16:15:03.087726Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=MjJiOTI4NmEtOTA4OWVkY2QtNDIyNDk0ODQtZTViMTI0Ng==, ActorId: [6:7488007920197923154:2332], ActorState: ExecuteState, TraceId: 01jqpfr9xb69161dq971t4t4pp, Cleanup start, isFinal: 1 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 1 2025-03-31T16:15:03.087949Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Finished request with worker actor [6:7488007920197923154:2332], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=MjJiOTI4NmEtOTA4OWVkY2QtNDIyNDk0ODQtZTViMTI0Ng== 2025-03-31T16:15:03.087992Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7488007920197923168:2336], DatabaseId: /Root, PoolId: sample_pool_id, Got watch notification 2025-03-31T16:15:03.088180Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MjJiOTI4NmEtOTA4OWVkY2QtNDIyNDk0ODQtZTViMTI0Ng==, ActorId: [6:7488007920197923154:2332], ActorState: CleanupState, TraceId: 01jqpfr9xb69161dq971t4t4pp, EndCleanup, isFinal: 1 2025-03-31T16:15:03.088294Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MjJiOTI4NmEtOTA4OWVkY2QtNDIyNDk0ODQtZTViMTI0Ng==, ActorId: [6:7488007920197923154:2332], ActorState: CleanupState, TraceId: 01jqpfr9xb69161dq971t4t4pp, Sent query response back to proxy, proxyRequestId: 3, proxyId: [6:7488007898723086171:2268] 2025-03-31T16:15:03.088336Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MjJiOTI4NmEtOTA4OWVkY2QtNDIyNDk0ODQtZTViMTI0Ng==, ActorId: [6:7488007920197923154:2332], ActorState: unknown state, TraceId: 01jqpfr9xb69161dq971t4t4pp, Cleanup temp tables: 0 2025-03-31T16:15:03.088453Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MjJiOTI4NmEtOTA4OWVkY2QtNDIyNDk0ODQtZTViMTI0Ng==, ActorId: [6:7488007920197923154:2332], ActorState: unknown state, TraceId: 01jqpfr9xb69161dq971t4t4pp, Session actor destroyed 2025-03-31T16:15:03.102128Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=YWE0NzhkOWUtOTBhYjA5NmQtNTA0YjE0NTgtMzdiMjFmZjA=, ActorId: [6:7488007915902955798:2331], ActorState: ReadyState, Session closed due to explicit close event 2025-03-31T16:15:03.102200Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=YWE0NzhkOWUtOTBhYjA5NmQtNTA0YjE0NTgtMzdiMjFmZjA=, ActorId: [6:7488007915902955798:2331], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-31T16:15:03.102237Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=YWE0NzhkOWUtOTBhYjA5NmQtNTA0YjE0NTgtMzdiMjFmZjA=, ActorId: [6:7488007915902955798:2331], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-03-31T16:15:03.102272Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=YWE0NzhkOWUtOTBhYjA5NmQtNTA0YjE0NTgtMzdiMjFmZjA=, ActorId: [6:7488007915902955798:2331], ActorState: unknown state, Cleanup temp tables: 0 2025-03-31T16:15:03.102357Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=YWE0NzhkOWUtOTBhYjA5NmQtNTA0YjE0NTgtMzdiMjFmZjA=, ActorId: [6:7488007915902955798:2331], ActorState: unknown state, Session actor destroyed >> BasicStatistics::Serverless >> BasicStatistics::Simple ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::InnerJoinCustomColumnOrder-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 19412, MsgBus: 16136 2025-03-31T16:14:50.288570Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488007861774651241:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:50.288686Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001977/r3tmp/tmpzKRE44/pdisk_1.dat 2025-03-31T16:14:50.626036Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19412, node 1 2025-03-31T16:14:50.692596Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:50.692828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:50.694804Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:14:50.727938Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:50.727962Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:50.727970Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:50.728146Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16136 TClient is connected to server localhost:16136 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:14:51.331028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:51.348150Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:14:51.362031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:51.501307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:51.684861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:51.770056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:53.568965Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007874659554909:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:53.569065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:53.904778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:14:53.972897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:14:54.001747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:14:54.049329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:14:54.099521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:14:54.174569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:14:54.287788Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007878954522729:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:54.287922Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:54.291157Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007878954522734:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:54.297063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:14:54.320722Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007878954522736:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:14:54.386749Z node 1 :TX_PROXY ERROR: Actor# [1:7488007878954522794:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:14:55.289071Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488007861774651241:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:55.289148Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:14:55.385061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:14:55.417569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:14:55.490873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:14:55.535485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:14:55.608935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:14:55.648504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 31624, MsgBus: 3035 2025-03-31T16:14:57.498033Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488007894047694108:2172];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:57.498081Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001977/r3tmp/tmpYaiFYB/pdisk_1.dat 2025-03-31T16:14:57.605828Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:57.610075Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:57.610150Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:57.619551Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31624, node 2 2025-03-31T16:14:57.680489Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:57.680514Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:57.680522Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:57.680630Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3035 TClient is connected to server localhost:3035 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:14:58.132646Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:14:58.153659Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:58.227517Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:58.414839Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:58.505565Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:15:00.991573Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488007906932597631:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:00.991684Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:01.030204Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:15:01.084429Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:15:01.137313Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:15:01.188203Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:15:01.274040Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:15:01.345990Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:15:01.462666Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488007911227565445:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:01.462755Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:01.462976Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488007911227565450:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:01.466641Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:15:01.486241Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488007911227565452:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:15:01.542331Z node 2 :TX_PROXY ERROR: Actor# [2:7488007911227565509:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:15:02.500575Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488007894047694108:2172];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:15:02.500671Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:15:02.593220Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:15:02.680153Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:15:02.744272Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:15:02.821717Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:15:02.884308Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:15:02.927858Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> KqpJoinOrder::GeneralPrioritiesBug4 [GOOD] |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> Viewer::JsonAutocompleteEmpty [GOOD] >> Viewer::JsonAutocompleteEndOfDatabaseName >> Viewer::JsonAutocompleteStartOfDatabaseName [GOOD] >> Viewer::JsonStorageListingV1 >> HttpRequest::AnalyzeServerless >> TConsoleTests::TestCreateTenantAlreadyExistsExtSubdomain [GOOD] >> TConsoleTests::TestCreateSubSubDomain >> KqpMergeCn::TopSortByDesc_Datetime_Limit3 [GOOD] >> KqpMergeCn::TopSortByDesc_Bool_And_PKUint64_Limit4 |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> Viewer::JsonAutocompleteColumns [GOOD] >> BasicStatistics::TwoServerlessDbs >> ResourcePoolClassifiersDdl::TestDropResourcePoolClassifier [GOOD] >> ResourcePoolClassifiersDdl::TestDropResourcePool |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::GeneralPrioritiesBug4 [GOOD] Test command err: Trying to start YDB, gRPC: 17542, MsgBus: 5302 2025-03-31T16:14:26.177305Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488007758603037289:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:26.177545Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001990/r3tmp/tmp8NoLIT/pdisk_1.dat 2025-03-31T16:14:26.482273Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17542, node 1 2025-03-31T16:14:26.558516Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:26.558544Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:26.558559Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:26.558717Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:14:26.578590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:26.578702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:26.580169Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5302 TClient is connected to server localhost:5302 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:14:27.027782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:29.027711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007771487939838:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:29.027728Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007771487939833:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:29.027842Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:29.031379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:14:29.041264Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007771487939847:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:14:29.105000Z node 1 :TX_PROXY ERROR: Actor# [1:7488007771487939898:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:14:29.375530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:14:29.471951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:14:29.501274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:14:29.532003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:14:29.560398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:14:29.680790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:14:29.734385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:14:29.761557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:14:29.788154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T16:14:29.814639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-31T16:14:29.837873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-31T16:14:29.864608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:14:29.890337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:14:30.567675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-31T16:14:30.599242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:14:30.632162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:14:30.661605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:14:30.691349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T16:14:30.726525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-31T16:14:30.756482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T16:14:30.789270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-31T16:14:30.860959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-31T16:14:30.889716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-31T16:14:30.973292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-31T16:14:31.007238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-31T16:14:31.078135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-31T16:14:31.132617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-31T16:14:31.177648Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488007758603037289:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:31.177750Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:14:31.206431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-31T16:14:31.236434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-31T16:14:31.283539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-03-31T16:14:31.318275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but prop ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.348129Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038487;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.352708Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038529;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.354159Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.359387Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.360196Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.366231Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038605;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.373293Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.374459Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.380873Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038507;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.383567Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038616;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.389381Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038614;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.389552Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038501;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.395704Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.396572Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.403342Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.403961Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038495;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.410467Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038547;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.412758Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.415979Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038537;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.420277Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038545;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.421285Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038583;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.425901Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038561;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.426503Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.431610Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.431621Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038533;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.437346Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.440889Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038581;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.442661Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.445017Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.448546Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.449105Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.454026Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038613;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.454377Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.459860Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.459869Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038563;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.465355Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.465436Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.470585Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.474407Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.476121Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.480423Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.481650Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.486647Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.487380Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.493345Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:14:59.603885Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpfqc14dx2mvgvm65antvmm", SessionId: ydb://session/3?node_id=1&id=M2IzMWE4ZTMtOGM3MzhiNTgtNDJlZjE1NTMtZDJlMzc1NjU=, Slow query, duration: 27.119159s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:14:59.853556Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:14:59.854200Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;self_id=[1:7488007792962783415:2977];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038170;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038629;receive=72075186224038331; 2025-03-31T16:14:59.854918Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:14:59.856137Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::JsonAutocompleteColumns [GOOD] Test command err: 2025-03-31T16:15:05.954646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:336:2378], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:05.955109Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:05.955167Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 31400, node 1 TClient is connected to server localhost:21509 |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::Probe >> BasicStatistics::NotFullStatisticsColumnshard |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoNodes >> TConsoleTests::TestRemoveTenantExtSubdomain [GOOD] >> TConsoleTests::TestRemoveSharedTenantWoServerlessTenants >> Viewer::JsonAutocompleteSimilarDatabaseName [GOOD] >> Viewer::JsonAutocompleteSimilarDatabaseNameWithLimit >> BasicStatistics::TwoDatabases >> KqpSinkLocks::InvalidateOlapOnCommit [GOOD] |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TConsoleTests::TestCreateSubSubDomain [GOOD] >> TConsoleTests::TestCreateSubSubDomainExtSubdomain |87.0%| [TA] $(B)/ydb/tests/olap/oom/test-results/py3test/{meta.json ... results_accumulator.log} >> KqpMergeCn::TopSortByDesc_Bool_And_PKUint64_Limit4 [GOOD] >> KqpMergeCn::TopSortBy_Date_And_Datetime_Limit4 >> Viewer::JsonAutocompleteEndOfDatabaseName [GOOD] >> Viewer::JsonAutocompleteScheme >> KqpJoinOrder::FiveWayJoinWithPreds-ColumnStore [GOOD] |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TConsoleTests::TestRemoveSharedTenantWoServerlessTenants [GOOD] >> TConsoleTests::TestRemoveSharedTenantWithServerlessTenants >> ResourcePoolClassifiersSysView::TestResourcePoolClassifiersSysViewOnServerless [GOOD] >> ResourcePoolClassifiersSysView::TestResourcePoolClassifiersSysViewFilters >> TConsoleTests::TestCreateSubSubDomainExtSubdomain [GOOD] >> TConsoleTests::TestAuthorization |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |87.1%| [TA] {RESULT} $(B)/ydb/tests/olap/oom/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::InvalidateOlapOnCommit [GOOD] Test command err: Trying to start YDB, gRPC: 16703, MsgBus: 32570 2025-03-31T16:14:39.265750Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488007814831269313:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:39.266095Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001293/r3tmp/tmpBtELgq/pdisk_1.dat 2025-03-31T16:14:39.727455Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:39.750595Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:39.750749Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:39.759008Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16703, node 1 2025-03-31T16:14:40.034586Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:40.034615Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:40.034630Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:40.034800Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32570 TClient is connected to server localhost:32570 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:14:40.893087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:40.924864Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:14:42.346896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007827716171880:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:42.346895Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007827716171868:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:42.347033Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:42.354665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:14:42.366338Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007827716171882:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:14:42.441273Z node 1 :TX_PROXY ERROR: Actor# [1:7488007827716171933:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:14:43.171130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:14:43.324498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:14:44.344659Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488007814831269313:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:44.345980Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:14:44.392659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:14:46.679117Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=5; 2025-03-31T16:14:46.679344Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 5 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-03-31T16:14:46.679524Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 5 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-03-31T16:14:46.679785Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488007844896050037:2972], Table: `/Root/Test` ([72057594046644480:6:1]), SessionActorId: [1:7488007844896049789:2972]Got LOCKS BROKEN for table `/Root/Test`. ShardID=72075186224037888, Sink=[1:7488007844896050037:2972].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-03-31T16:14:46.680290Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488007844896050026:2972], SessionActorId: [1:7488007844896049789:2972], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7488007844896049789:2972]. isRollback=0 2025-03-31T16:14:46.698579Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZGY3NmIxMzItYzdiNGE4ZTEtYmE1ZDk4YzAtZTYyMjIwODk=, ActorId: [1:7488007844896049789:2972], ActorState: ExecuteState, TraceId: 01jqpfqsv61kshwqtzwzvy2v01, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7488007844896050027:2972] from: [1:7488007844896050026:2972] 2025-03-31T16:14:46.698719Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7488007844896050027:2972] TxId: 281474976710665. Ctx: { TraceId: 01jqpfqsv61kshwqtzwzvy2v01, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGY3NmIxMzItYzdiNGE4ZTEtYmE1ZDk4YzAtZTYyMjIwODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-03-31T16:14:46.698958Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZGY3NmIxMzItYzdiNGE4ZTEtYmE1ZDk4YzAtZTYyMjIwODk=, ActorId: [1:7488007844896049789:2972], ActorState: ExecuteState, TraceId: 01jqpfqsv61kshwqtzwzvy2v01, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 12553, MsgBus: 10152 2025-03-31T16:14:53.123474Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488007875778983034:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:53.123526Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001293/r3tmp/tmpcnkfZX/pdisk_1.dat 2025-03-31T16:14:53.245675Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12553, node 2 2025-03-31T16:14:53.294570Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:53.294658Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:53.300861Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:14:53.355722Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:53.355752Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:53.355761Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:53.355895Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10152 TClient is connected to server localhost:10152 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:14:53.817159Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:53.832686Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594 ... 24038007;self_id=[2:7488007910138727159:3253];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038007;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:15:06.751115Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038050;self_id=[2:7488007914433696162:3493];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038050;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:15:06.751388Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[2:7488007910138727159:3253];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038007;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:15:06.766307Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038061;self_id=[2:7488007914433695766:3458];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038061;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:15:06.766896Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038061;self_id=[2:7488007914433695766:3458];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038061;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:15:06.774167Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038020;self_id=[2:7488007914433696284:3519];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038020;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:15:06.777357Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038020;self_id=[2:7488007914433696284:3519];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038020;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:15:06.784389Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[2:7488007910138727178:3260];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038016;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:15:06.784692Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[2:7488007910138727178:3260];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038016;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:15:06.792850Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=281474976715667;tx_id=281474976715667;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715667; 2025-03-31T16:15:06.795556Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038043;self_id=[2:7488007914433696174:3497];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038043;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:15:06.796374Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038043;self_id=[2:7488007914433696174:3497];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038043;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:15:06.796551Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Complete;commit_tx_id=281474976715667;commit_lock_id=281474976715666;fline=manager.cpp:94;broken_lock_id=281474976715665; 2025-03-31T16:15:06.796836Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:15:06.809510Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037897;self_id=[2:7488007888663885804:2345];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037897;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=tablet lock have another internal generation counter: 18446744073709551615 != 0;tx_id=0; 2025-03-31T16:15:06.809710Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488007931613568414:4144], SessionActorId: [2:7488007927318601091:4144], Got LOCKS BROKEN for table. ShardID=72075186224037897, Sink=[2:7488007931613568414:4144].{
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 } 2025-03-31T16:15:06.809867Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488007931613568414:4144], SessionActorId: [2:7488007927318601091:4144], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 . sessionActorId=[2:7488007927318601091:4144]. isRollback=0 2025-03-31T16:15:06.810191Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NzAwMDY5MGEtNDQ3NTM5MDQtZjNiNzc1NDMtNGQ2MzVlNA==, ActorId: [2:7488007927318601091:4144], ActorState: ExecuteState, TraceId: 01jqpfrdhmcpyz55hg4pcya9jv, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7488007931613568619:4144] from: [2:7488007931613568414:4144] 2025-03-31T16:15:06.810269Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7488007931613568619:4144] TxId: 281474976715668. Ctx: { TraceId: 01jqpfrdhmcpyz55hg4pcya9jv, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzAwMDY5MGEtNDQ3NTM5MDQtZjNiNzc1NDMtNGQ2MzVlNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 } } 2025-03-31T16:15:06.810425Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NzAwMDY5MGEtNDQ3NTM5MDQtZjNiNzc1NDMtNGQ2MzVlNA==, ActorId: [2:7488007927318601091:4144], ActorState: ExecuteState, TraceId: 01jqpfrdhmcpyz55hg4pcya9jv, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 2025-03-31T16:15:06.814353Z node 2 :TX_COLUMNSHARD WARN: TColumnShard.StateWork at 72075186224037888 unhandled event type: NKikimr::TEvDataShard::TEvCancelTransactionProposal event: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715668 2025-03-31T16:15:06.814480Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037888;self_id=[2:7488007892958853143:2352];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037888;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:15:06.814776Z node 2 :TX_COLUMNSHARD WARN: TColumnShard.StateWork at 72075186224037889 unhandled event type: NKikimr::TEvDataShard::TEvCancelTransactionProposal event: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715668 2025-03-31T16:15:06.814828Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037889;self_id=[2:7488007888663885802:2344];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037889;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:15:06.815013Z node 2 :TX_COLUMNSHARD WARN: TColumnShard.StateWork at 72075186224037890 unhandled event type: NKikimr::TEvDataShard::TEvCancelTransactionProposal event: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715668 2025-03-31T16:15:06.815108Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037890;self_id=[2:7488007892958853110:2350];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037890;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:15:06.815257Z node 2 :TX_COLUMNSHARD WARN: TColumnShard.StateWork at 72075186224037891 unhandled event type: NKikimr::TEvDataShard::TEvCancelTransactionProposal event: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715668 2025-03-31T16:15:06.815301Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037891;self_id=[2:7488007892958853112:2351];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037891;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:15:06.815429Z node 2 :TX_COLUMNSHARD WARN: TColumnShard.StateWork at 72075186224037892 unhandled event type: NKikimr::TEvDataShard::TEvCancelTransactionProposal event: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715668 2025-03-31T16:15:06.815470Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037892;self_id=[2:7488007888663885812:2349];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037892;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:15:06.815583Z node 2 :TX_COLUMNSHARD WARN: TColumnShard.StateWork at 72075186224037893 unhandled event type: NKikimr::TEvDataShard::TEvCancelTransactionProposal event: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715668 2025-03-31T16:15:06.815624Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037893;self_id=[2:7488007888663885810:2348];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037893;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:15:06.815740Z node 2 :TX_COLUMNSHARD WARN: TColumnShard.StateWork at 72075186224037894 unhandled event type: NKikimr::TEvDataShard::TEvCancelTransactionProposal event: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715668 2025-03-31T16:15:06.815784Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037894;self_id=[2:7488007888663885808:2347];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037894;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:15:06.815898Z node 2 :TX_COLUMNSHARD WARN: TColumnShard.StateWork at 72075186224037896 unhandled event type: NKikimr::TEvDataShard::TEvCancelTransactionProposal event: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715668 2025-03-31T16:15:06.815951Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037896;self_id=[2:7488007892958853197:2353];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037896;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:15:06.816059Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037897;self_id=[2:7488007888663885804:2345];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037897;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:15:06.893906Z node 2 :TX_COLUMNSHARD WARN: TColumnShard.StateWork at 72075186224037895 unhandled event type: NKikimr::TEvDataShard::TEvCancelTransactionProposal event: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715668 2025-03-31T16:15:06.894208Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037895;self_id=[2:7488007888663885806:2346];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037895;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; WAIT_INDEXATION: 0 2025-03-31T16:15:08.231883Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:15:08.231917Z node 2 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithPreds-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 6562, MsgBus: 22776 2025-03-31T16:14:36.337852Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488007804955195999:2139];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:36.337971Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001985/r3tmp/tmp1yTpSg/pdisk_1.dat 2025-03-31T16:14:36.775837Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:36.778802Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:36.778922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:36.782947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6562, node 1 2025-03-31T16:14:36.851493Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:36.851511Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:36.851517Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:36.851612Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22776 TClient is connected to server localhost:22776 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:14:37.502552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:39.540520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007817840098459:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:39.540542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007817840098467:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:39.540662Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:39.545168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:14:39.562886Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007817840098473:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:14:39.646609Z node 1 :TX_PROXY ERROR: Actor# [1:7488007817840098524:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:14:39.891372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:14:40.016607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:14:40.051136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:14:40.080909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:14:40.109409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:14:40.316450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:14:40.378509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:14:40.454957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:14:40.488396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T16:14:40.554363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-31T16:14:40.591990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-31T16:14:40.625455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:14:40.663990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:14:41.311373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-31T16:14:41.334561Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488007804955195999:2139];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:41.334626Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:14:41.348119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:14:41.378337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:14:41.410042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:14:41.437032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T16:14:41.463529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-31T16:14:41.491538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T16:14:41.517480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-31T16:14:41.547704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-31T16:14:41.577793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-31T16:14:41.615851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-31T16:14:41.648902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-31T16:14:41.685125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-31T16:14:41.724871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-31T16:14:41.777233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-31T16:14:41.809424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-31T16:14:41.842650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-03-31T16:14:41.876544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but pro ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:09.871145Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:09.877580Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:09.877618Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038517;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:09.885128Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038497;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:09.888266Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:09.891142Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:09.900833Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:09.905319Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:09.910985Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038513;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:09.915726Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:09.921833Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038613;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:09.923193Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038523;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:09.929422Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:09.930015Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:09.936192Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:09.936634Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:09.943177Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:09.943483Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038616;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:09.948835Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038556;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:09.949619Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:09.955047Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038547;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:09.956645Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:09.961756Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:09.963412Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:09.968915Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:09.969840Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:09.975946Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038491;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:09.975957Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:09.983044Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038603;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:09.983062Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:09.990514Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:09.991668Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:09.997729Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:09.997864Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:10.005034Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:10.005085Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:10.013043Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038605;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:10.014071Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:10.020804Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:10.021677Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:10.027969Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:10.027969Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:10.034843Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:10.035033Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:10.042458Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:10.152231Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpfqpfkfd6q3qqgghtd7mb3", SessionId: ydb://session/3?node_id=1&id=ZGRlNTY5YTUtZGQ2NmYzNDEtMTc4ZTdlNjYtZGJiOTMxODQ=, Slow query, duration: 26.963883s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:15:10.386687Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:15:10.387183Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:15:10.387522Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7488007920919340236:5984];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038170;receive=72075186224038331; 2025-03-31T16:15:10.387884Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> Viewer::JsonAutocompleteSimilarDatabaseNameWithLimit [GOOD] >> Viewer::JsonAutocompleteSimilarDatabaseNamePOST >> KqpWorkloadService::TestCpuLoadThreshold [GOOD] >> KqpWorkloadService::TestCpuLoadThresholdRefresh >> KqpNewEngine::ScalarFunctions [GOOD] >> KqpNewEngine::ScalarMultiUsage |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> KqpMergeCn::TopSortBy_Date_And_Datetime_Limit4 [GOOD] >> KqpMergeCn::SortBy_PK_Uint64_Desc |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::Analyze >> KqpJoinOrder::TPCDS92-ColumnStore [GOOD] >> ResourcePoolClassifiersDdl::TestDropResourcePool [GOOD] >> TConsoleTests::TestRemoveSharedTenantWithServerlessTenants [GOOD] >> TConsoleTests::TestRemoveSharedTenantAfterRemoveServerlessTenant |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> Viewer::JsonAutocompleteScheme [GOOD] >> Viewer::JsonAutocompleteEmptyColumns >> TConsoleTests::TestAuthorization [GOOD] >> TConsoleTests::TestAuthorizationExtSubdomain >> Viewer::SelectStringWithNoBase64Encoding [GOOD] >> Viewer::ServerlessNodesPage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolClassifiersDdl::TestDropResourcePool [GOOD] Test command err: 2025-03-31T16:14:07.495690Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488007677407354311:2171];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:07.496161Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001135/r3tmp/tmpaxavFf/pdisk_1.dat 2025-03-31T16:14:07.839895Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17280, node 1 2025-03-31T16:14:07.907727Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:07.908128Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:07.910977Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:14:07.953894Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:07.953920Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:07.953933Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:07.954037Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30468 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:14:08.238941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:10.588473Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-03-31T16:14:10.591998Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YjBiNWY3ODAtOTQ1MjYxYTMtMTE3OTUzNjgtYzBlOWI3M2M=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id YjBiNWY3ODAtOTQ1MjYxYTMtMTE3OTUzNjgtYzBlOWI3M2M= 2025-03-31T16:14:10.592653Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488007690292256730:2329], Start check tables existence, number paths: 2 2025-03-31T16:14:10.592776Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YjBiNWY3ODAtOTQ1MjYxYTMtMTE3OTUzNjgtYzBlOWI3M2M=, ActorId: [1:7488007690292256731:2330], ActorState: unknown state, session actor bootstrapped 2025-03-31T16:14:10.593212Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-03-31T16:14:10.593253Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-03-31T16:14:10.593274Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-03-31T16:14:10.598694Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488007690292256730:2329], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-03-31T16:14:10.598780Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488007690292256730:2329], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-03-31T16:14:10.598805Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488007690292256730:2329], Successfully finished 2025-03-31T16:14:10.598921Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-03-31T16:14:10.614554Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007690292256748:2303], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-03-31T16:14:10.618644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:14:10.620067Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007690292256748:2303], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-03-31T16:14:10.620226Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007690292256748:2303], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-03-31T16:14:10.628969Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007690292256748:2303], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:14:10.726400Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007690292256748:2303], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-03-31T16:14:10.730249Z node 1 :TX_PROXY ERROR: Actor# [1:7488007690292256799:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:14:10.730389Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007690292256748:2303], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-03-31T16:14:10.733116Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NGFkYjg3YWYtZGJhNGE1NmMtNWYyZDIxMDItNjY3N2Q2MjI=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NGFkYjg3YWYtZGJhNGE1NmMtNWYyZDIxMDItNjY3N2Q2MjI= 2025-03-31T16:14:10.733247Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NGFkYjg3YWYtZGJhNGE1NmMtNWYyZDIxMDItNjY3N2Q2MjI=, ActorId: [1:7488007690292256806:2331], ActorState: unknown state, session actor bootstrapped 2025-03-31T16:14:10.733423Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-03-31T16:14:10.733444Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-03-31T16:14:10.733524Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007690292256808:2332], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-03-31T16:14:10.733589Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NGFkYjg3YWYtZGJhNGE1NmMtNWYyZDIxMDItNjY3N2Q2MjI=, ActorId: [1:7488007690292256806:2331], ActorState: ReadyState, TraceId: 01jqpfppsd2snnncc793zcep3f, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7488007690292256805:2340] database: Root databaseId: /Root pool id: sample_pool_id 2025-03-31T16:14:10.733663Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7488007690292256806:2331], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NGFkYjg3YWYtZGJhNGE1NmMtNWYyZDIxMDItNjY3N2Q2MjI= 2025-03-31T16:14:10.733736Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7488007690292256810:2333], Database: /Root, Start database fetching 2025-03-31T16:14:10.734740Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7488007690292256810:2333], Database: /Root, Database info successfully fetched, serverless: 0 2025-03-31T16:14:10.734854Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2025-03-31T16:14:10.734902Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007690292256808:2332], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-03-31T16:14:10.734933Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2025-03-31T16:14:10.734947Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-03-31T16:14:10.734986Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7488007690292256819:2334], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NGFkYjg3YWYtZGJhNGE1NmMtNWYyZDIxMDItNjY3N2Q2MjI=, Start pool fetching 2025-03-31T16:14:10.735017Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007690292256820:2335], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-03-31T16:14:10.735222Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7488007690292256821:2336], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2025-03-31T16:14:10.735230Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007690292256820:2335], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-03-31T16:14:10.735326Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7488007690292256819:2334], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NGFkYjg3YWYtZGJhNGE1NmMtNWYyZDIxMDItNjY3N2Q2MjI=, Pool info successfully resolved 2025-03-31T16:14:10.735377Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGFkYjg3YWYtZGJhNGE1NmMtNWYyZDIxMDItNjY3N2Q2MjI= 2025-03-31T16:14:10.735494Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7488007690292256821:2336], DatabaseId: /Root, PoolId: sample_pool_id, Received new request, worker id: [1:7488007690292256806:2331], session id: ydb://session/3?node_id=1&id=NGFkYjg3YWYtZGJhNGE1NmMtNWYyZDIxMDItNjY3N2Q2MjI= 2025-03-31T16:14:10.735496Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NGFkYjg3YWYtZGJhNGE1NmMtNWYyZDIxMDItNjY3N2Q2MjI= 2025-03-31T16:14:10.735563Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7488007690292256821:2336], DatabaseId: /Root, PoolId: sample_pool_id, Reply continue success to [1:7488007690292256806:2331 ... frwyx5h0n50g4jzw71t3s, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool my_pool 2025-03-31T16:15:22.591301Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=ODYxMThjNS1lZDBhMjU3Mi1mZDEwM2I1OS0zOWQ1YzFhNQ==, ActorId: [8:7488007999519667853:2647], ActorState: ExecuteState, TraceId: 01jqpfrwyx5h0n50g4jzw71t3s, Cleanup start, isFinal: 1 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 1 2025-03-31T16:15:22.591445Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Finished request with worker actor [8:7488007999519667853:2647], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=8&id=ODYxMThjNS1lZDBhMjU3Mi1mZDEwM2I1OS0zOWQ1YzFhNQ== 2025-03-31T16:15:22.591507Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ODYxMThjNS1lZDBhMjU3Mi1mZDEwM2I1OS0zOWQ1YzFhNQ==, ActorId: [8:7488007999519667853:2647], ActorState: CleanupState, TraceId: 01jqpfrwyx5h0n50g4jzw71t3s, EndCleanup, isFinal: 1 2025-03-31T16:15:22.591636Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ODYxMThjNS1lZDBhMjU3Mi1mZDEwM2I1OS0zOWQ1YzFhNQ==, ActorId: [8:7488007999519667853:2647], ActorState: CleanupState, TraceId: 01jqpfrwyx5h0n50g4jzw71t3s, Sent query response back to proxy, proxyRequestId: 52, proxyId: [8:7488007943685091233:2276] 2025-03-31T16:15:22.591677Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ODYxMThjNS1lZDBhMjU3Mi1mZDEwM2I1OS0zOWQ1YzFhNQ==, ActorId: [8:7488007999519667853:2647], ActorState: unknown state, TraceId: 01jqpfrwyx5h0n50g4jzw71t3s, Cleanup temp tables: 0 2025-03-31T16:15:22.591827Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ODYxMThjNS1lZDBhMjU3Mi1mZDEwM2I1OS0zOWQ1YzFhNQ==, ActorId: [8:7488007999519667853:2647], ActorState: unknown state, TraceId: 01jqpfrwyx5h0n50g4jzw71t3s, Session actor destroyed 2025-03-31T16:15:22.602280Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YTUwOGNkNmItOWQ5ZGY5ZDgtZjlmMzEwOTYtYzQ5MjlhNWM=, ActorId: [8:7488007965159928114:2330], ActorState: ReadyState, TraceId: 01jqpfrwz98ds5ay4y3895355m, received request, proxyRequestId: 53 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: DROP RESOURCE POOL my_pool; rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2025-03-31T16:15:22.617189Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7488007999519667832:2646], DatabaseId: /Root, PoolId: my_pool, Got delete notification 2025-03-31T16:15:22.617306Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: my_pool 2025-03-31T16:15:22.617364Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7488007999519667874:2651], DatabaseId: /Root, PoolId: my_pool, Start pool fetching 2025-03-31T16:15:22.618019Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7488007999519667874:2651], DatabaseId: /Root, PoolId: my_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-03-31T16:15:22.618157Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool my_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-03-31T16:15:22.621552Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=YTUwOGNkNmItOWQ5ZGY5ZDgtZjlmMzEwOTYtYzQ5MjlhNWM=, ActorId: [8:7488007965159928114:2330], ActorState: ExecuteState, TraceId: 01jqpfrwz98ds5ay4y3895355m, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [8:7488007999519667863:2330] WorkloadServiceCleanup: 0 2025-03-31T16:15:22.623511Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YTUwOGNkNmItOWQ5ZGY5ZDgtZjlmMzEwOTYtYzQ5MjlhNWM=, ActorId: [8:7488007965159928114:2330], ActorState: CleanupState, TraceId: 01jqpfrwz98ds5ay4y3895355m, EndCleanup, isFinal: 0 2025-03-31T16:15:22.623581Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YTUwOGNkNmItOWQ5ZGY5ZDgtZjlmMzEwOTYtYzQ5MjlhNWM=, ActorId: [8:7488007965159928114:2330], ActorState: CleanupState, TraceId: 01jqpfrwz98ds5ay4y3895355m, Sent query response back to proxy, proxyRequestId: 53, proxyId: [8:7488007943685091233:2276] 2025-03-31T16:15:22.627781Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NTM4MzdjOS1hZTc5ZWMwMC01NzgxMzg3Yi0xNDBhNDA2MQ==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NTM4MzdjOS1hZTc5ZWMwMC01NzgxMzg3Yi0xNDBhNDA2MQ== 2025-03-31T16:15:22.627899Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NTM4MzdjOS1hZTc5ZWMwMC01NzgxMzg3Yi0xNDBhNDA2MQ==, ActorId: [8:7488007999519667887:2652], ActorState: unknown state, session actor bootstrapped 2025-03-31T16:15:22.628184Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: my_pool 2025-03-31T16:15:22.628302Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NTM4MzdjOS1hZTc5ZWMwMC01NzgxMzg3Yi0xNDBhNDA2MQ==, ActorId: [8:7488007999519667887:2652], ActorState: ReadyState, TraceId: 01jqpfrx047bt9kjj77n3gbh3k, received request, proxyRequestId: 54 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [8:7488007999519667886:2991] database: Root databaseId: /Root pool id: default 2025-03-31T16:15:22.628347Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NTM4MzdjOS1hZTc5ZWMwMC01NzgxMzg3Yi0xNDBhNDA2MQ==, ActorId: [8:7488007999519667887:2652], ActorState: ReadyState, TraceId: 01jqpfrx047bt9kjj77n3gbh3k, request placed into pool from cache: default 2025-03-31T16:15:22.628445Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NTM4MzdjOS1hZTc5ZWMwMC01NzgxMzg3Yi0xNDBhNDA2MQ==, ActorId: [8:7488007999519667887:2652], ActorState: ExecuteState, TraceId: 01jqpfrx047bt9kjj77n3gbh3k, Sending CompileQuery request 2025-03-31T16:15:22.628457Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7488007999519667889:2653], DatabaseId: /Root, PoolId: my_pool, Start pool fetching 2025-03-31T16:15:22.629340Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7488007999519667889:2653], DatabaseId: /Root, PoolId: my_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-03-31T16:15:22.629430Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool my_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-03-31T16:15:22.718464Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NTM4MzdjOS1hZTc5ZWMwMC01NzgxMzg3Yi0xNDBhNDA2MQ==, ActorId: [8:7488007999519667887:2652], ActorState: ExecuteState, TraceId: 01jqpfrx047bt9kjj77n3gbh3k, ExecutePhyTx, tx: 0x000050C00010C798 literal: 0 commit: 1 txCtx.DeferredEffects.size(): 0 2025-03-31T16:15:22.718550Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NTM4MzdjOS1hZTc5ZWMwMC01NzgxMzg3Yi0xNDBhNDA2MQ==, ActorId: [8:7488007999519667887:2652], ActorState: ExecuteState, TraceId: 01jqpfrx047bt9kjj77n3gbh3k, Sending to Executer TraceId: 0 8 2025-03-31T16:15:22.718657Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NTM4MzdjOS1hZTc5ZWMwMC01NzgxMzg3Yi0xNDBhNDA2MQ==, ActorId: [8:7488007999519667887:2652], ActorState: ExecuteState, TraceId: 01jqpfrx047bt9kjj77n3gbh3k, Created new KQP executer: [8:7488007999519667893:2652] isRollback: 0 2025-03-31T16:15:22.720589Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NTM4MzdjOS1hZTc5ZWMwMC01NzgxMzg3Yi0xNDBhNDA2MQ==, ActorId: [8:7488007999519667887:2652], ActorState: ExecuteState, TraceId: 01jqpfrx047bt9kjj77n3gbh3k, Forwarded TEvStreamData to [8:7488007999519667886:2991] 2025-03-31T16:15:22.722437Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NTM4MzdjOS1hZTc5ZWMwMC01NzgxMzg3Yi0xNDBhNDA2MQ==, ActorId: [8:7488007999519667887:2652], ActorState: ExecuteState, TraceId: 01jqpfrx047bt9kjj77n3gbh3k, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-03-31T16:15:22.722663Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=NTM4MzdjOS1hZTc5ZWMwMC01NzgxMzg3Yi0xNDBhNDA2MQ==, ActorId: [8:7488007999519667887:2652], ActorState: ExecuteState, TraceId: 01jqpfrx047bt9kjj77n3gbh3k, txInfo Status: Committed Kind: Pure TotalDuration: 4.297 ServerDuration: 4.226 QueriesCount: 2 2025-03-31T16:15:22.722785Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NTM4MzdjOS1hZTc5ZWMwMC01NzgxMzg3Yi0xNDBhNDA2MQ==, ActorId: [8:7488007999519667887:2652], ActorState: ExecuteState, TraceId: 01jqpfrx047bt9kjj77n3gbh3k, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-03-31T16:15:22.723044Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=NTM4MzdjOS1hZTc5ZWMwMC01NzgxMzg3Yi0xNDBhNDA2MQ==, ActorId: [8:7488007999519667887:2652], ActorState: ExecuteState, TraceId: 01jqpfrx047bt9kjj77n3gbh3k, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-31T16:15:22.723105Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NTM4MzdjOS1hZTc5ZWMwMC01NzgxMzg3Yi0xNDBhNDA2MQ==, ActorId: [8:7488007999519667887:2652], ActorState: ExecuteState, TraceId: 01jqpfrx047bt9kjj77n3gbh3k, EndCleanup, isFinal: 1 2025-03-31T16:15:22.723178Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NTM4MzdjOS1hZTc5ZWMwMC01NzgxMzg3Yi0xNDBhNDA2MQ==, ActorId: [8:7488007999519667887:2652], ActorState: ExecuteState, TraceId: 01jqpfrx047bt9kjj77n3gbh3k, Sent query response back to proxy, proxyRequestId: 54, proxyId: [8:7488007943685091233:2276] 2025-03-31T16:15:22.723208Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NTM4MzdjOS1hZTc5ZWMwMC01NzgxMzg3Yi0xNDBhNDA2MQ==, ActorId: [8:7488007999519667887:2652], ActorState: unknown state, TraceId: 01jqpfrx047bt9kjj77n3gbh3k, Cleanup temp tables: 0 2025-03-31T16:15:22.723546Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NTM4MzdjOS1hZTc5ZWMwMC01NzgxMzg3Yi0xNDBhNDA2MQ==, ActorId: [8:7488007999519667887:2652], ActorState: unknown state, TraceId: 01jqpfrx047bt9kjj77n3gbh3k, Session actor destroyed 2025-03-31T16:15:22.735753Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=YTUwOGNkNmItOWQ5ZGY5ZDgtZjlmMzEwOTYtYzQ5MjlhNWM=, ActorId: [8:7488007965159928114:2330], ActorState: ReadyState, Session closed due to explicit close event 2025-03-31T16:15:22.735810Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=YTUwOGNkNmItOWQ5ZGY5ZDgtZjlmMzEwOTYtYzQ5MjlhNWM=, ActorId: [8:7488007965159928114:2330], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-31T16:15:22.735849Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YTUwOGNkNmItOWQ5ZGY5ZDgtZjlmMzEwOTYtYzQ5MjlhNWM=, ActorId: [8:7488007965159928114:2330], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-03-31T16:15:22.735879Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YTUwOGNkNmItOWQ5ZGY5ZDgtZjlmMzEwOTYtYzQ5MjlhNWM=, ActorId: [8:7488007965159928114:2330], ActorState: unknown state, Cleanup temp tables: 0 2025-03-31T16:15:22.735986Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YTUwOGNkNmItOWQ5ZGY5ZDgtZjlmMzEwOTYtYzQ5MjlhNWM=, ActorId: [8:7488007965159928114:2330], ActorState: unknown state, Session actor destroyed >> Viewer::SelectStringWithBase64Encoding [GOOD] >> Viewer::QueryExecuteScript ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS92-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 25936, MsgBus: 12423 2025-03-31T16:14:32.000379Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488007783578921571:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:32.000456Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001989/r3tmp/tmp7PGLNA/pdisk_1.dat 2025-03-31T16:14:32.316973Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25936, node 1 2025-03-31T16:14:32.380453Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:32.380536Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:32.387549Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:14:32.420164Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:32.420190Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:32.420203Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:32.420339Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12423 TClient is connected to server localhost:12423 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:14:32.964569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:35.013799Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007800758791423:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:35.013960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:35.014461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007800758791435:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:35.019767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:14:35.035387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007800758791437:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:14:35.090757Z node 1 :TX_PROXY ERROR: Actor# [1:7488007800758791490:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:14:35.400585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:14:35.539374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:14:35.573427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:14:35.595702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:14:35.659188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:14:35.793411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:14:35.820366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:14:35.851350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:14:35.883983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T16:14:35.923037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-31T16:14:35.954272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-31T16:14:36.022118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:14:36.049393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:14:36.673183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-31T16:14:36.753839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:14:36.792548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:14:36.881697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:14:36.930787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T16:14:36.970811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-31T16:14:37.000456Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488007783578921571:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:37.000558Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:14:37.010564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T16:14:37.085156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-31T16:14:37.120542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-31T16:14:37.158580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-31T16:14:37.195520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-31T16:14:37.233638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-31T16:14:37.267708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-31T16:14:37.303189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-31T16:14:37.374516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-31T16:14:37.411695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-31T16:14:37.446988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-03-31T16:14:37.485040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but p ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.014719Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038510;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.021546Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038578;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.021609Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038564;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.028626Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038542;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.028670Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.035422Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038580;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.038215Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038616;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.042170Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038588;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.048502Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038486;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.051929Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038566;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.055756Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.057614Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038536;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.062150Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038590;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.063480Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038602;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.068835Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038522;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.069346Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038568;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.075201Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038534;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.075366Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038576;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.082066Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.082287Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.088391Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.089072Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038560;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.094616Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.100220Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038488;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.100635Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.106469Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.107583Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038608;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.113040Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038614;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.114248Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.120499Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.120661Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.127152Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.127311Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.133831Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.133831Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.140728Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.145514Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.146562Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.153378Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.153378Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.159242Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.159243Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.164142Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.168877Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.182932Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:07.271590Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpfqj274b9wvanjn7c8ewhm", SessionId: ydb://session/3?node_id=1&id=ZTBhNjlkMTgtOTE1ODQyOWYtNDY3Y2JjN2EtY2YzMWM5Y2E=, Slow query, duration: 28.607331s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:15:07.769671Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:15:07.770193Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:15:07.771302Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;self_id=[1:7488007882363192217:5513];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038331;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038629;receive=72075186224038170; 2025-03-31T16:15:07.771699Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::Status |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::AnalyzeServerless [GOOD] >> KqpNewEngine::ScalarMultiUsage [GOOD] >> KqpNewEngine::SequentialReadsPragma+Enabled >> Viewer::JsonAutocompleteSimilarDatabaseNamePOST [GOOD] >> Viewer::JsonAutocompleteSimilarDatabaseNameLowerCase |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::AnalyzeServerless [GOOD] Test command err: 2025-03-31T16:15:11.058594Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:11.058882Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:11.058977Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d0c/r3tmp/tmpkJUhoO/pdisk_1.dat 2025-03-31T16:15:11.482288Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11943, node 1 2025-03-31T16:15:11.763816Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:11.763880Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:11.763914Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:11.766639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:15:11.767127Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:15:11.861614Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:11.861806Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:11.876758Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10082 2025-03-31T16:15:12.491660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:15:16.076924Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T16:15:16.114468Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:16.114634Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:16.152513Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:15:16.155049Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:15:16.425080Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:16.425749Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:16.426353Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:16.426498Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:16.426743Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:16.426856Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:16.426952Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:16.427029Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:16.428370Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:16.656481Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:16.656606Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:16.672801Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:15:16.854768Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:16.914251Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T16:15:16.914391Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T16:15:16.954194Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T16:15:16.956203Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T16:15:16.956427Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T16:15:16.956494Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T16:15:16.956549Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T16:15:16.956631Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T16:15:16.956692Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T16:15:16.956745Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T16:15:16.958771Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T16:15:16.991191Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:15:16.991327Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1877:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:15:16.997469Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1888:2606] 2025-03-31T16:15:17.003461Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1918:2621] 2025-03-31T16:15:17.004553Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1918:2621], schemeshard id = 72075186224037897 2025-03-31T16:15:17.011458Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-03-31T16:15:17.029435Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T16:15:17.029494Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T16:15:17.029573Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-03-31T16:15:17.049117Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T16:15:17.065779Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T16:15:17.065961Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T16:15:17.277980Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T16:15:17.452722Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T16:15:17.551568Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:15:18.374270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:15:19.115210Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:19.349454Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-03-31T16:15:19.349529Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-03-31T16:15:19.349670Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2595:2948], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-03-31T16:15:19.352706Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2596:2949] 2025-03-31T16:15:19.353055Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2596:2949], schemeshard id = 72075186224037899 2025-03-31T16:15:20.788495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2727:3245], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:20.788658Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:20.812535Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-03-31T16:15:21.243509Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2879:3089];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:15:21.243772Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2879:3089];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:15:21.244143Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2879:3089];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:15:21.244350Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2879:3089];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:15:21.244469Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2879:3089];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:15:21.244599Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2879:3089];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:15:21.244716Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2879:3089];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:15:21.244834Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2879:3089];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:15:21.244965Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2879:3089];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16 ... tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:15:22.215603Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:15:22.220795Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:15:22.224222Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:15:22.229520Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:15:22.234739Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:15:22.238376Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:15:22.242163Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:15:22.249426Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:15:22.254914Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:15:22.261014Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:15:23.594489Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3550:3340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:23.594956Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:23.620882Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715662:0, at schemeshard: 72075186224037899 2025-03-31T16:15:24.324582Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715662; 2025-03-31T16:15:24.325619Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715662; 2025-03-31T16:15:24.326041Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715662; 2025-03-31T16:15:24.326777Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715662; 2025-03-31T16:15:24.327409Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715662; 2025-03-31T16:15:24.328444Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715662; 2025-03-31T16:15:24.328823Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715662; 2025-03-31T16:15:24.329496Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715662; 2025-03-31T16:15:24.329828Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715662; 2025-03-31T16:15:24.330166Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715662; 2025-03-31T16:15:25.655324Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:4339:3413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:25.655525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:25.672032Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715663:0, at schemeshard: 72075186224037899 2025-03-31T16:15:25.774800Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715663; 2025-03-31T16:15:25.775429Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715663; 2025-03-31T16:15:25.776409Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715663; 2025-03-31T16:15:25.777512Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715663; 2025-03-31T16:15:25.778217Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715663; 2025-03-31T16:15:25.778672Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715663; 2025-03-31T16:15:25.779273Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715663; 2025-03-31T16:15:25.780074Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715663; 2025-03-31T16:15:25.780566Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715663; 2025-03-31T16:15:25.781019Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715663; waiting actualization: 0/0.000020s 2025-03-31T16:15:27.782598Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:4626:4476] 2025-03-31T16:15:27.785709Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:4622:3496] , Record { OperationId: "\000\000\000\000\034=\003\352\032\022\256n\\p\304\373" Tables { PathId { OwnerId: 72057594046644480 LocalId: 2 } } } 2025-03-31T16:15:27.785783Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Create new force traversal operation, OperationId= =n\p 2025-03-31T16:15:27.785828Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Create new force traversal table, OperationId= =n\p , PathId [OwnerId: 72057594046644480, LocalPathId: 2] Answer: 'Analyze sent. OperationId: 000000071x0fn1m4nedse71h7v' FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> KqpWorkloadService::TestCpuLoadThresholdRefresh [GOOD] >> KqpWorkloadService::TestHandlerActorCleanup |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TConsoleTests::TestAuthorizationExtSubdomain [GOOD] >> TConsoleTests::TestAttributesExtSubdomain >> KqpWorkloadServiceTables::TestLeaseExpiration [GOOD] >> KqpWorkloadServiceTables::TestLeaseUpdates >> KqpMergeCn::SortBy_PK_Uint64_Desc [GOOD] >> KqpMergeCn::SortBy_Int32 >> TConsoleTests::TestRemoveSharedTenantAfterRemoveServerlessTenant [GOOD] >> TConsoleTests::TestRemoveServerlessTenant >> THealthCheckTest::StaticGroupIssue >> THealthCheckTest::SpecificServerless >> THealthCheckTest::StorageLimit95 >> THealthCheckTest::Issues100Groups100VCardListing ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> ReadIteratorExternalBlobs::ExtBlobsMultipleColumns 2025-03-31 16:15:30,582 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-31 16:15:30,754 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 179640 46.1M 45.1M 23.1M test_tool run_ut @/home/runner/.ya/build/build_root/0l5c/000c21/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/testing_out_stuff/chunk9/testing_out_stuff/test_to 179853 1.5G 1.4G 1.4G └─ ydb-core-tx-datashard-ut_read_iterator --trace-path-append /home/runner/.ya/build/build_root/0l5c/000c21/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/tes Test command err: 2025-03-31T16:05:36.428042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:05:36.428420Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:05:36.428621Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000c21/r3tmp/tmppOcz9l/pdisk_1.dat 2025-03-31T16:05:37.045446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:05:37.112487Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:05:37.165265Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:05:37.165448Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:05:37.178544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:05:37.273231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:05:37.321673Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:05:37.322944Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:05:37.323495Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:05:37.323773Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:05:37.393941Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:05:37.394865Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:05:37.395000Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:05:37.396942Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:05:37.397029Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:05:37.397101Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:05:37.397608Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:05:37.397782Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:05:37.397893Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:05:37.409725Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:05:37.446850Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:05:37.447133Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:05:37.447305Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:05:37.447349Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:05:37.447411Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:05:37.447471Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:05:37.447747Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:05:37.447805Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:05:37.448198Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:05:37.448327Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:05:37.448498Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:05:37.449098Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:05:37.449203Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T16:05:37.449253Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:05:37.449321Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:05:37.449361Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:05:37.449446Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:05:37.449603Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:05:37.449644Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:05:37.449725Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:05:37.449848Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-31T16:05:37.449908Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:05:37.450047Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:05:37.450345Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T16:05:37.450443Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:05:37.450545Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:05:37.450607Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:05:37.450666Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T16:05:37.450713Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T16:05:37.450763Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:05:37.451110Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T16:05:37.451157Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T16:05:37.451196Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T16:05:37.451235Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:05:37.451306Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T16:05:37.451348Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T16:05:37.451393Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T16:05:37.451428Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:05:37.451456Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T16:05:37.453324Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T16:05:37.453382Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:05:37.465894Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:05:37.465985Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:05:37.466029Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:05:37.466111Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-31T16:05:37.466195Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:05:37.638399Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:05:37.638473Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:05:37.638514Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T16:05:37.638951Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-31T16:05:37.638995Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:05:37.639196Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:05:37.639248Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-31T16:05:37.639316Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-31T16:05:37.639364Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-31T16:05:37.653011Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:05:37.655730Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:05:37.656617Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:05:37.656681Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:05:37.656751Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:05:3 ... anges)" severity: 1 } 2025-03-31T16:13:14.449628Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpfmrmx9tzt052c76yn99xp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZGM1ODdkMzYtM2EwZmIzMGItMjNhYTAwOTMtOWI3ZGU1OWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:13:19.449216Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpfmzz2ftadd96za6h5cpfa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=Y2U4NTNiMTMtNWM4NGJjODEtOGMzZGRiNzktZGFiNjkzN2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:13:24.952113Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpfn4vecnjhdxbb2j9frjbn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=MmYyMDMwZjctYTlkY2Q2NWQtNTFiNzEyZTAtNDYwYjIwZTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:13:30.377127Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqpfna8b7s5hg20kx8gwcy6h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YTg1MTU3YWItZjA4MjU3ZmQtMmFkNmFjZWEtNjM1ZjhkOTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:13:35.823995Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqpfnfgfbq23vx1km0km0sfy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZTBiYTBlMGQtZGUwYzkyODUtMmQxNTNhY2ItYTIxMmMyYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:13:40.858448Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqpfnmvjcdfze3tqawsd8jad, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=MzQ1ZTNmMmUtYmRlMDJiMDItNTQ0ZDQzNTAtMmJlYjQ3MTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:13:45.974939Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqpfnsr253yekax9v50rjb0r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YjYxZjIyZjctNjJjNzllYTItZDExNDgwYjQtY2I3ZGY0MTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:13:51.034120Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jqpfnytvagxx0y00b7mn173c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NTM4ZDcwZmItODI2MmNlOGYtYjg5OTM5YzUtYmQzMmU5ZmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:13:55.795038Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jqpfp3p64vgg5nhc6vfp0a0v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NjM4ZDFmMTAtM2NhMDgyNmUtYzUzOTVlYjMtNmIzZmMwOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:14:00.268940Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jqpfp8ae7a84hz6h6c6x2mxg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=MmY3ODQ0NzQtOTU4M2VjOTItMmY1ODAzZTAtNGU4NTVhZTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:14:00.789080Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jqpfpcy50c7029zt6b9cypbb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=OTcwNzIzYTAtMjllYTA0ZDMtNTU5NjM5YmYtYjYxNTY5ODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... waiting for blocked results 2025-03-31T16:14:01.117530Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jqpfpd39dcf497k593q28hy3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZGVkNjA5NGItODY3MzE4MTItM2FjOTJlZmItODZiZDdk, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... waiting for blocked results (done) 2025-03-31T16:14:06.668563Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:14:06.669007Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:14:06.669172Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000c21/r3tmp/tmpIesVvP/pdisk_1.dat 2025-03-31T16:14:07.043546Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:14:07.081696Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:07.120526Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:07.120699Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:07.132549Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:14:07.220898Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:14:07.536326Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:742:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:07.536458Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:752:2628], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:07.536568Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:07.544612Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:14:07.728934Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:756:2631], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:14:07.764781Z node 11 :TX_PROXY ERROR: Actor# [11:830:2674] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:14:16.510904Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpfpkndadk2cj5wrqw14add, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=ZmIwMDMyOTYtMzExY2E5MjQtNjY2MWM5OTAtOTY5NWNjYjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:14:25.439710Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpfpwnw0p4ac1q4f1r879dc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=YThmMWEwNDAtOWNlOGEyNTktOTg0ZWRiYzAtZTQ3Y2Y2Y2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:14:33.834161Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpfq5dn62yfm37g8wfck16b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=OGY2MzEzOGYtNGI1YzExYjItMWIzMmVlZTktMTZkODY3ZTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:14:42.686531Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqpfqdjnenbn2k3sqnbcfxwt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=Y2NhNWU4Mi05OGIxMDlhMC05ZWQ3YmViMC1hNjBiYzg4Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:14:52.153534Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqpfqp884ra2g085k601cva6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=ODJjZDRlM2QtNzcyODM1YzgtZGQ4MjVkNS1hYzc5NjIzZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:15:01.463466Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqpfqzfg2s7kjx2jgv006eeh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=YTM3MTA4Y2EtYThkODAyNDEtYzgwYWQxMWYtMjRiMjdhZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:15:10.549465Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqpfr8k30ahj78dbdctwjdej, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=ZDViMzBkNDEtN2Q4ZDM0MGMtZDNlMzIyODMtNDBkY2NmZmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:15:19.561420Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jqpfrhek4ta4pvwz9kr6yah2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=ZTIxMTA0YTEtZDA2ZGY1ZGQtMWMyMmYyNDMtZjZhNjAyYTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:15:28.241658Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jqpfrtah2rheh32vaj54j9yp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=MjY2MmY3Zi0yMjA1ODVjNC1hY2QyNGFhMy04ZDU3MGU3ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8330113388/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/0l5c/000c21/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/testing_out_stuff/chunk9/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8330113388/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/0l5c/000c21/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/testing_out_stuff/chunk9/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) >> Viewer::JsonAutocompleteEmptyColumns [GOOD] >> Viewer::JsonAutocompleteColumnsPOST |87.2%| [TA] $(B)/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/{meta.json ... results_accumulator.log} |87.2%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/{meta.json ... results_accumulator.log} >> THealthCheckTest::RedGroupIssueWhenDisintegratedGroupStatus >> KqpNewEngine::SequentialReadsPragma+Enabled [GOOD] >> Viewer::QueryExecuteScript [FAIL] >> Viewer::Plan2SvgOK ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::SequentialReadsPragma+Enabled [GOOD] Test command err: Trying to start YDB, gRPC: 28727, MsgBus: 2764 2025-03-31T16:14:45.552263Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488007840019016201:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:45.552309Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000acb/r3tmp/tmp0Fy62T/pdisk_1.dat 2025-03-31T16:14:46.008299Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:46.041592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:46.043634Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:46.050890Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28727, node 1 2025-03-31T16:14:46.259798Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:46.259824Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:46.259831Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:46.259998Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2764 TClient is connected to server localhost:2764 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:14:47.205781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:47.232281Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:14:48.810950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007852903918749:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:48.811059Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:48.811183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007852903918761:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:48.826491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:14:48.843645Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007852903918763:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:14:48.936321Z node 1 :TX_PROXY ERROR: Actor# [1:7488007852903918814:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 15468, MsgBus: 23193 2025-03-31T16:14:50.470198Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488007864758712035:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:50.470260Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000acb/r3tmp/tmp2C5BKu/pdisk_1.dat 2025-03-31T16:14:50.561605Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:50.610467Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:50.610551Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:50.616197Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15468, node 2 2025-03-31T16:14:50.662380Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:50.662405Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:50.662410Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:50.662552Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23193 TClient is connected to server localhost:23193 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:14:51.125818Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:51.138030Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:14:51.145638Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:51.343154Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:51.526858Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:51.603040Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:53.676100Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488007877643615674:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:53.676193Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:53.764874Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:14:53.840001Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:14:53.879375Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:14:53.919444Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:14:53.974691Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:14:54.032503Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:14:54.083733Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488007881938583486:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:54.083803Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:54.083875Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488007881938583491:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:54.087389Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 20 ... [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488008018832061111:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:26.165046Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:26.211480Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:15:26.251411Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:15:26.292792Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:15:26.326705Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:15:26.376935Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:15:26.427232Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:15:26.532786Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488008018832061630:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:26.532875Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488008018832061635:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:26.532919Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:26.537176Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:15:26.548131Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7488008018832061637:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:15:26.645923Z node 5 :TX_PROXY ERROR: Actor# [5:7488008018832061693:3453] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:15:27.153782Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7488008001652190160:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:15:27.153890Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 9446, MsgBus: 24656 2025-03-31T16:15:29.421884Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488008031847972702:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:15:29.421988Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000acb/r3tmp/tmpHpKS0W/pdisk_1.dat 2025-03-31T16:15:29.553649Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:29.575401Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:29.575513Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:29.577515Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9446, node 6 2025-03-31T16:15:29.625109Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:29.625138Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:29.625147Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:29.625312Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24656 TClient is connected to server localhost:24656 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:15:30.242952Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:15:30.260595Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:15:30.337357Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:15:30.535578Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:15:30.621227Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:15:33.382295Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488008049027843662:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:33.382452Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:33.422589Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:15:33.465599Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:15:33.501986Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:15:33.537822Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:15:33.573804Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:15:33.637239Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:15:33.684846Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488008049027844174:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:33.684956Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:33.685146Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488008049027844179:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:33.689503Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:15:33.700476Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488008049027844181:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:15:33.776667Z node 6 :TX_PROXY ERROR: Actor# [6:7488008049027844234:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:15:34.422155Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488008031847972702:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:15:34.422259Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TConsoleTests::TestAttributesExtSubdomain [GOOD] >> TConsoleTests::TestDatabaseQuotas >> Viewer::ServerlessNodesPage [GOOD] >> Viewer::ServerlessWithExclusiveNodes >> TConsoleTests::TestRemoveServerlessTenant [GOOD] >> TConsoleTests::TestRegisterComputationalUnitsForPending >> THealthCheckTest::Basic >> THealthCheckTest::SpecificServerless [GOOD] >> THealthCheckTest::SharedWhenTroublesWithExclusiveNodes >> Viewer::JsonAutocompleteSimilarDatabaseNameLowerCase [GOOD] >> Viewer::JsonAutocompleteSchemePOST >> KqpMergeCn::SortBy_Int32 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpMergeCn::SortBy_Int32 [GOOD] Test command err: Trying to start YDB, gRPC: 19900, MsgBus: 24964 2025-03-31T16:14:46.555362Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488007847176123654:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:46.556654Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000aca/r3tmp/tmp21L72Q/pdisk_1.dat 2025-03-31T16:14:46.943409Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:46.957147Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:46.957229Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:46.958682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19900, node 1 2025-03-31T16:14:47.100301Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:47.100340Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:47.100348Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:47.100460Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24964 TClient is connected to server localhost:24964 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:14:47.743887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:47.766843Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:14:47.792146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:47.979019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:48.143238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:48.218048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:49.929308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007860061027324:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:49.929496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:50.401160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:14:50.442158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:14:50.469869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:14:50.505987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:14:50.543649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:14:50.584805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:14:50.642488Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007864355995134:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:50.642554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:50.642699Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007864355995139:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:50.645473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:14:50.655428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007864355995141:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:14:50.720844Z node 1 :TX_PROXY ERROR: Actor# [1:7488007864355995194:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:14:51.554996Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488007847176123654:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:51.555069Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:14:51.744598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:14:52.859337Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437692850, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 28078, MsgBus: 2200 2025-03-31T16:14:53.695533Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488007875088199696:2239];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:53.704367Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000aca/r3tmp/tmpDH5yjj/pdisk_1.dat 2025-03-31T16:14:53.839644Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28078, node 2 2025-03-31T16:14:53.877717Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:53.877825Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:53.882699Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:14:53.935687Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:53.935710Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:53.935718Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:53.935852Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2200 TClient is connected to server localhost:2200 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:14:54.432483Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:54.439332Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:14:54.466589Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:14:54.541465Z node 2 :FLAT_TX_SCHEMESHARD WAR ... T16:15:28.629339Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:15:28.671379Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:15:28.709514Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:15:28.747403Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:15:28.833222Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488008027679244066:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:28.833328Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488008027679244071:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:28.833335Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:28.837184Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:15:28.848446Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488008027679244073:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:15:28.922715Z node 6 :TX_PROXY ERROR: Actor# [6:7488008027679244128:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:15:29.251936Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488008010499372582:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:15:29.252041Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:15:30.133450Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:15:31.113979Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437731140, txId: 281474976715673] shutting down Trying to start YDB, gRPC: 12305, MsgBus: 5709 2025-03-31T16:15:32.181535Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488008043335103877:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:15:32.181692Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000aca/r3tmp/tmp0oq3nF/pdisk_1.dat 2025-03-31T16:15:32.328115Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:32.358119Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:32.358236Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:32.359591Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12305, node 7 2025-03-31T16:15:32.410607Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:32.410640Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:32.410650Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:32.410847Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5709 TClient is connected to server localhost:5709 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:15:33.110234Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:15:33.116426Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:15:33.130808Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:15:33.213072Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:15:33.441615Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:15:33.527589Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:15:36.447035Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488008060514974836:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:36.447185Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:36.510305Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:15:36.581081Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:15:36.652627Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:15:36.687851Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:15:36.726178Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:15:36.766722Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:15:36.813708Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488008060514975350:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:36.813802Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:36.813821Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488008060514975355:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:36.818347Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:15:36.830345Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7488008060514975357:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:15:36.886759Z node 7 :TX_PROXY ERROR: Actor# [7:7488008060514975410:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:15:37.181814Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7488008043335103877:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:15:37.181928Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:15:38.020690Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:15:38.949512Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437738980, txId: 281474976715673] shutting down >> THealthCheckTest::StorageLimit95 [GOOD] >> THealthCheckTest::StorageNoQuota >> HttpRequest::Analyze [GOOD] >> THealthCheckTest::Issues100Groups100VCardListing [GOOD] >> THealthCheckTest::GreenStatusWhenInitPending >> THealthCheckTest::StaticGroupIssue [GOOD] >> THealthCheckTest::StorageLimit87 >> THealthCheckTest::Basic [GOOD] >> THealthCheckTest::BasicNodeCheckRequest >> KqpWorkloadServiceDistributed::TestDistributedLessConcurrentQueryLimit [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscription >> THealthCheckTest::YellowGroupIssueWhenPartialGroupStatus ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::Analyze [GOOD] Test command err: 2025-03-31T16:15:27.570744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:27.571219Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:27.571370Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000cd3/r3tmp/tmp1J3M9b/pdisk_1.dat 2025-03-31T16:15:27.958136Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12884, node 1 2025-03-31T16:15:28.194305Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:28.194363Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:28.194405Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:28.194927Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:15:28.197230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:15:28.293426Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:28.293623Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:28.308674Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29354 2025-03-31T16:15:28.826263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:15:31.967630Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T16:15:32.002820Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:32.002936Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:32.031739Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:15:32.034302Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:15:32.265953Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:32.266476Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:32.267148Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:32.267301Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:32.267530Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:32.267649Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:32.267703Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:32.267753Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:32.267814Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:32.429635Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:32.429751Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:32.442983Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:15:32.603180Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:32.660515Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T16:15:32.660631Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T16:15:32.694028Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T16:15:32.695324Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T16:15:32.695552Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T16:15:32.695623Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T16:15:32.695688Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T16:15:32.695735Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T16:15:32.695787Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T16:15:32.695848Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T16:15:32.696317Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T16:15:32.731658Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:15:32.731789Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1872:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:15:32.738485Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1883:2604] 2025-03-31T16:15:32.744021Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1915:2621] 2025-03-31T16:15:32.744106Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1915:2621], schemeshard id = 72075186224037897 2025-03-31T16:15:32.747522Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T16:15:32.770975Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T16:15:32.771028Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T16:15:32.771107Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T16:15:32.784831Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T16:15:32.792429Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T16:15:32.792595Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T16:15:33.025411Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T16:15:33.198060Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T16:15:33.303415Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:15:34.359270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2236:3073], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:34.359394Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:34.380954Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-31T16:15:34.709082Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:15:34.709314Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:15:34.709612Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:15:34.709749Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:15:34.709861Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:15:34.710001Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:15:34.710128Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:15:34.710239Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:15:34.710354Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:15:34.710466Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:15:34.710586Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:15:34.710715Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:15:34.740453Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2397:2889];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:15:34.740545Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2397:2889];tablet_id=72075186224037900;process= ... ARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:15:35.580928Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-03-31T16:15:35.587841Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-03-31T16:15:35.592618Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-03-31T16:15:35.598231Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-03-31T16:15:35.602061Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-03-31T16:15:35.608271Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-03-31T16:15:35.612794Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-03-31T16:15:35.616753Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-03-31T16:15:35.621452Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-03-31T16:15:35.626617Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-03-31T16:15:36.862444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3181:3177], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:36.900969Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:36.924132Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72075186224037897 2025-03-31T16:15:37.449251Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-03-31T16:15:37.449937Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-03-31T16:15:37.450539Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-03-31T16:15:37.451047Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-03-31T16:15:37.451542Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-03-31T16:15:37.453307Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-03-31T16:15:37.454215Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-03-31T16:15:37.454851Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-03-31T16:15:37.455367Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-03-31T16:15:37.455927Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-03-31T16:15:38.283877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3804:3233], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:38.284040Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:38.300405Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037897 2025-03-31T16:15:38.353888Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:15:38.354553Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:15:38.355140Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:15:38.355674Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:15:38.356220Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:15:38.358619Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:15:38.359437Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:15:38.359938Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:15:38.360428Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:15:38.361223Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; waiting actualization: 0/0.000019s 2025-03-31T16:15:40.306115Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:4092:4242] 2025-03-31T16:15:40.308322Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:4089:3319] , Record { OperationId: "\000\000\000\000\026[c\253Y;\363Q\226s\n\341" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } } 2025-03-31T16:15:40.308377Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Create new force traversal operation, OperationId=[cY;Qs 2025-03-31T16:15:40.308410Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Create new force traversal table, OperationId=[cY;Qs , PathId [OwnerId: 72075186224037897, LocalPathId: 4] Answer: 'Analyze sent. OperationId: 00000005jvcennjezka6b762q1' FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> THealthCheckTest::RedGroupIssueWhenDisintegratedGroupStatus [GOOD] >> THealthCheckTest::RedGroupIssueOnRedSpace >> TConsoleTests::TestDatabaseQuotas [GOOD] >> TConsoleTests::TestDatabaseQuotasBadOverallQuota >> ResourcePoolClassifiersSysView::TestResourcePoolClassifiersSysViewFilters [GOOD] >> THealthCheckTest::SharedWhenTroublesWithExclusiveNodes [GOOD] >> THealthCheckTest::ShardsLimit999 >> THealthCheckTest::OneIssueListing >> TConsoleTests::TestRegisterComputationalUnitsForPending [GOOD] >> TConsoleTests::TestNotifyOperationCompletion >> Viewer::Plan2SvgOK [GOOD] >> Viewer::Plan2SvgBad ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolClassifiersSysView::TestResourcePoolClassifiersSysViewFilters [GOOD] Test command err: 2025-03-31T16:14:04.835803Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488007663867954520:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:04.835876Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001182/r3tmp/tmpJSg9lV/pdisk_1.dat 2025-03-31T16:14:05.225769Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:05.263211Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:05.263326Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:05.267868Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19282, node 1 2025-03-31T16:14:05.451763Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:05.451786Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:05.451798Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:05.451920Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19293 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:14:05.973605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:07.895349Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-03-31T16:14:07.898404Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=Mjg3N2EwYTYtMThiZTIwZDAtZTc5NDZmNjItOGI2NDg4YjA=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id Mjg3N2EwYTYtMThiZTIwZDAtZTc5NDZmNjItOGI2NDg4YjA= 2025-03-31T16:14:07.898980Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-03-31T16:14:07.899004Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-03-31T16:14:07.899038Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-03-31T16:14:07.906373Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488007676752857046:2329], Start check tables existence, number paths: 2 2025-03-31T16:14:07.906503Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=Mjg3N2EwYTYtMThiZTIwZDAtZTc5NDZmNjItOGI2NDg4YjA=, ActorId: [1:7488007676752857054:2330], ActorState: unknown state, session actor bootstrapped 2025-03-31T16:14:07.907695Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488007676752857046:2329], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-03-31T16:14:07.907747Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488007676752857046:2329], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-03-31T16:14:07.907805Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488007676752857046:2329], Successfully finished 2025-03-31T16:14:07.907868Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-03-31T16:14:07.917143Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007676752857071:2303], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-03-31T16:14:07.920626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:14:07.923883Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007676752857071:2303], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-03-31T16:14:07.926295Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007676752857071:2303], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-03-31T16:14:07.932772Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007676752857071:2303], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:14:08.025942Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007676752857071:2303], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-03-31T16:14:08.031116Z node 1 :TX_PROXY ERROR: Actor# [1:7488007681047824418:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:14:08.031458Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007676752857071:2303], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-03-31T16:14:08.039579Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-03-31T16:14:08.039611Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-03-31T16:14:08.039684Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=Mjg3N2EwYTYtMThiZTIwZDAtZTc5NDZmNjItOGI2NDg4YjA=, ActorId: [1:7488007676752857054:2330], ActorState: ReadyState, TraceId: 01jqpfpm57fb8vtxj0qdfn3btr, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: GRANT DESCRIBE SCHEMA ON `/Root` TO `user@test`; GRANT DESCRIBE SCHEMA, SELECT ROW ON `/Root/.metadata/workload_manager/pools/sample_pool_id` TO `user@test`; rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2025-03-31T16:14:08.039696Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007681047824427:2332], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-03-31T16:14:08.041428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007681047824427:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:08.041547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:08.362202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:14:08.368293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:14:08.370367Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=Mjg3N2EwYTYtMThiZTIwZDAtZTc5NDZmNjItOGI2NDg4YjA=, ActorId: [1:7488007676752857054:2330], ActorState: ExecuteState, TraceId: 01jqpfpm57fb8vtxj0qdfn3btr, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [1:7488007681047824436:2330] WorkloadServiceCleanup: 0 2025-03-31T16:14:08.373555Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=Mjg3N2EwYTYtMThiZTIwZDAtZTc5NDZmNjItOGI2NDg4YjA=, ActorId: [1:7488007676752857054:2330], ActorState: CleanupState, TraceId: 01jqpfpm57fb8vtxj0qdfn3btr, EndCleanup, isFinal: 0 2025-03-31T16:14:08.373659Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=Mjg3N2EwYTYtMThiZTIwZDAtZTc5NDZmNjItOGI2NDg4YjA=, ActorId: [1:7488007676752857054:2330], ActorState: CleanupState, TraceId: 01jqpfpm57fb8vtxj0qdfn3btr, Sent query response back to proxy, proxyRequestId: 3, proxyId: [1:7488007663867954763:2277] 2025-03-31T16:14:08.380901Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YWY0MzNjOWMtN2U0MDNhODMtZjU5MDJjM2EtZWUwZWZmNTc=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id YWY0MzNjOWMtN2U0MDNhODMtZjU5MDJjM2EtZWUwZWZmNTc= 2025-03-31T16:14:08.381024Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YWY0MzNjOWMtN2U0MDNhODMtZjU5MDJjM2EtZWUwZWZmNTc=, ActorId: [1:7488007681047824466:2335], ActorState: unknown state, session actor bootstrapped 2025-03-31T16:14:08.381185Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-03-31T16:14:08.381250Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007681047824468:2336], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-03-31T16:14:08.381258Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YWY0MzNjOWMtN2U0MDNhODMtZjU5MDJjM2EtZWUwZWZmNTc=, ActorId: [1:7488007681047824466:2335], ActorState: ReadyState, TraceId: 01jqpfpmfx034rm1v0chx11q3v, received request, proxyRequestId: 4 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: DROP RESOURCE POOL CLASSIFIER MyResourcePoolClassifier rpcActor: [1:7488007681047824465:2361] database: Root databaseId: /Root pool id: sample_pool_id 2025-03-31T16:14:08.381345Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7488007681047824466:2335], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=YWY0MzNjOWMtN2U0MDNhODMtZjU5MDJjM2EtZWUwZWZmNTc= 2025-03-31T16:14:08.381404Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7488007681047824469:2337], Database: /Root, Start database fetching 2025-03-31T16:14:08.381586Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007681047824468:2336], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-03-31T16:14:08. ... :42.505351Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=YzQzMzk1NDktNTk5MWMyNTEtYWY0OThkYy1lZTRkN2Nj, ActorId: [9:7488008088173019886:2935], ActorState: ReadyState, TraceId: 01jqpfsgd9e52rp616b7mtrf35, received request, proxyRequestId: 90 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: SELECT * FROM `//Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers`; rpcActor: [9:7488008088173019887:2936] database: /Root/test-shared databaseId: /Root/test-shared pool id: default 2025-03-31T16:15:42.505395Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=YzQzMzk1NDktNTk5MWMyNTEtYWY0OThkYy1lZTRkN2Nj, ActorId: [9:7488008088173019886:2935], ActorState: ReadyState, TraceId: 01jqpfsgd9e52rp616b7mtrf35, request placed into pool from cache: default 2025-03-31T16:15:42.505501Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=YzQzMzk1NDktNTk5MWMyNTEtYWY0OThkYy1lZTRkN2Nj, ActorId: [9:7488008088173019886:2935], ActorState: ExecuteState, TraceId: 01jqpfsgd9e52rp616b7mtrf35, Sending CompileQuery request 2025-03-31T16:15:42.517966Z node 9 :SCHEME_BOARD_SUBSCRIBER WARN: [main][9:7488008036633409615:2656][/Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers] Sync is done: cookie# 42, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2025-03-31T16:15:42.518060Z node 9 :SCHEME_BOARD_SUBSCRIBER WARN: [main][9:7488008036633409615:2656][/Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers] Sync is done: cookie# 43, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2025-03-31T16:15:42.518926Z node 9 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [9:7488008088173019889:2937], status: UNAVAILABLE, issues:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[//Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers]
: Error: LookupError, code: 2005 2025-03-31T16:15:42.519176Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=YzQzMzk1NDktNTk5MWMyNTEtYWY0OThkYy1lZTRkN2Nj, ActorId: [9:7488008088173019886:2935], ActorState: ExecuteState, TraceId: 01jqpfsgd9e52rp616b7mtrf35, ReplyQueryCompileError, status UNAVAILABLE remove tx with tx_id: 2025-03-31T16:15:42.519230Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=YzQzMzk1NDktNTk5MWMyNTEtYWY0OThkYy1lZTRkN2Nj, ActorId: [9:7488008088173019886:2935], ActorState: ExecuteState, TraceId: 01jqpfsgd9e52rp616b7mtrf35, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-31T16:15:42.519264Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=YzQzMzk1NDktNTk5MWMyNTEtYWY0OThkYy1lZTRkN2Nj, ActorId: [9:7488008088173019886:2935], ActorState: ExecuteState, TraceId: 01jqpfsgd9e52rp616b7mtrf35, EndCleanup, isFinal: 0 2025-03-31T16:15:42.519410Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=YzQzMzk1NDktNTk5MWMyNTEtYWY0OThkYy1lZTRkN2Nj, ActorId: [9:7488008088173019886:2935], ActorState: ExecuteState, TraceId: 01jqpfsgd9e52rp616b7mtrf35, Sent query response back to proxy, proxyRequestId: 90, proxyId: [9:7488008015158572261:2277] 2025-03-31T16:15:42.520106Z node 9 :METADATA_PROVIDER ERROR: fline=request_actor_cb.h:34;event=unexpected reply;response=operation { ready: true status: UNAVAILABLE issues { message: "Table metadata loading" issue_code: 1050 severity: 1 issues { position { row: 1 column: 1 } message: "Failed to load metadata for table: db.[//Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers]" end_position { row: 1 column: 1 } severity: 1 issues { message: "LookupError" issue_code: 2005 severity: 1 } } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ; 2025-03-31T16:15:42.520380Z node 9 :METADATA_PROVIDER ERROR: accessor_snapshot_base.cpp:16 :cannot construct snapshot: on request failed:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[//Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers]
: Error: LookupError, code: 2005 2025-03-31T16:15:42.520511Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=YzQzMzk1NDktNTk5MWMyNTEtYWY0OThkYy1lZTRkN2Nj, ActorId: [9:7488008088173019886:2935], ActorState: ReadyState, Session closed due to explicit close event 2025-03-31T16:15:42.520549Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=YzQzMzk1NDktNTk5MWMyNTEtYWY0OThkYy1lZTRkN2Nj, ActorId: [9:7488008088173019886:2935], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-31T16:15:42.520572Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=YzQzMzk1NDktNTk5MWMyNTEtYWY0OThkYy1lZTRkN2Nj, ActorId: [9:7488008088173019886:2935], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-03-31T16:15:42.520596Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=YzQzMzk1NDktNTk5MWMyNTEtYWY0OThkYy1lZTRkN2Nj, ActorId: [9:7488008088173019886:2935], ActorState: unknown state, Cleanup temp tables: 0 2025-03-31T16:15:42.520663Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=YzQzMzk1NDktNTk5MWMyNTEtYWY0OThkYy1lZTRkN2Nj, ActorId: [9:7488008088173019886:2935], ActorState: unknown state, Session actor destroyed 2025-03-31T16:15:42.691738Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MzRmZjZhMzgtZmQ5YjgzNDItZDliYTYwMTMtMzFiY2IwODk=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MzRmZjZhMzgtZmQ5YjgzNDItZDliYTYwMTMtMzFiY2IwODk= 2025-03-31T16:15:42.691866Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MzRmZjZhMzgtZmQ5YjgzNDItZDliYTYwMTMtMzFiY2IwODk=, ActorId: [9:7488008088173019896:2941], ActorState: unknown state, session actor bootstrapped 2025-03-31T16:15:42.692458Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MzRmZjZhMzgtZmQ5YjgzNDItZDliYTYwMTMtMzFiY2IwODk=, ActorId: [9:7488008088173019896:2941], ActorState: ReadyState, TraceId: 01jqpfsgk4d11emxf6zx91y76k, received request, proxyRequestId: 92 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: SELECT * FROM `//Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers`; rpcActor: [9:7488008088173019897:2942] database: /Root/test-shared databaseId: /Root/test-shared pool id: default 2025-03-31T16:15:42.692503Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MzRmZjZhMzgtZmQ5YjgzNDItZDliYTYwMTMtMzFiY2IwODk=, ActorId: [9:7488008088173019896:2941], ActorState: ReadyState, TraceId: 01jqpfsgk4d11emxf6zx91y76k, request placed into pool from cache: default 2025-03-31T16:15:42.692626Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MzRmZjZhMzgtZmQ5YjgzNDItZDliYTYwMTMtMzFiY2IwODk=, ActorId: [9:7488008088173019896:2941], ActorState: ExecuteState, TraceId: 01jqpfsgk4d11emxf6zx91y76k, Sending CompileQuery request 2025-03-31T16:15:42.703833Z node 9 :SCHEME_BOARD_SUBSCRIBER WARN: [main][9:7488008036633409615:2656][/Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers] Sync is done: cookie# 44, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2025-03-31T16:15:42.703957Z node 9 :SCHEME_BOARD_SUBSCRIBER WARN: [main][9:7488008036633409615:2656][/Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers] Sync is done: cookie# 45, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2025-03-31T16:15:42.704908Z node 9 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [9:7488008088173019899:2943], status: UNAVAILABLE, issues:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[//Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers]
: Error: LookupError, code: 2005 2025-03-31T16:15:42.705172Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=MzRmZjZhMzgtZmQ5YjgzNDItZDliYTYwMTMtMzFiY2IwODk=, ActorId: [9:7488008088173019896:2941], ActorState: ExecuteState, TraceId: 01jqpfsgk4d11emxf6zx91y76k, ReplyQueryCompileError, status UNAVAILABLE remove tx with tx_id: 2025-03-31T16:15:42.705213Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=MzRmZjZhMzgtZmQ5YjgzNDItZDliYTYwMTMtMzFiY2IwODk=, ActorId: [9:7488008088173019896:2941], ActorState: ExecuteState, TraceId: 01jqpfsgk4d11emxf6zx91y76k, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-31T16:15:42.705241Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MzRmZjZhMzgtZmQ5YjgzNDItZDliYTYwMTMtMzFiY2IwODk=, ActorId: [9:7488008088173019896:2941], ActorState: ExecuteState, TraceId: 01jqpfsgk4d11emxf6zx91y76k, EndCleanup, isFinal: 0 2025-03-31T16:15:42.705416Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MzRmZjZhMzgtZmQ5YjgzNDItZDliYTYwMTMtMzFiY2IwODk=, ActorId: [9:7488008088173019896:2941], ActorState: ExecuteState, TraceId: 01jqpfsgk4d11emxf6zx91y76k, Sent query response back to proxy, proxyRequestId: 92, proxyId: [9:7488008015158572261:2277] 2025-03-31T16:15:42.706324Z node 9 :METADATA_PROVIDER ERROR: fline=request_actor_cb.h:34;event=unexpected reply;response=operation { ready: true status: UNAVAILABLE issues { message: "Table metadata loading" issue_code: 1050 severity: 1 issues { position { row: 1 column: 1 } message: "Failed to load metadata for table: db.[//Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers]" end_position { row: 1 column: 1 } severity: 1 issues { message: "LookupError" issue_code: 2005 severity: 1 } } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ; 2025-03-31T16:15:42.706798Z node 9 :METADATA_PROVIDER ERROR: accessor_snapshot_base.cpp:16 :cannot construct snapshot: on request failed:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[//Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers]
: Error: LookupError, code: 2005 2025-03-31T16:15:42.706882Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=MzRmZjZhMzgtZmQ5YjgzNDItZDliYTYwMTMtMzFiY2IwODk=, ActorId: [9:7488008088173019896:2941], ActorState: ReadyState, Session closed due to explicit close event 2025-03-31T16:15:42.706926Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=MzRmZjZhMzgtZmQ5YjgzNDItZDliYTYwMTMtMzFiY2IwODk=, ActorId: [9:7488008088173019896:2941], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-31T16:15:42.706960Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MzRmZjZhMzgtZmQ5YjgzNDItZDliYTYwMTMtMzFiY2IwODk=, ActorId: [9:7488008088173019896:2941], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-03-31T16:15:42.706995Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MzRmZjZhMzgtZmQ5YjgzNDItZDliYTYwMTMtMzFiY2IwODk=, ActorId: [9:7488008088173019896:2941], ActorState: unknown state, Cleanup temp tables: 0 2025-03-31T16:15:42.707112Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MzRmZjZhMzgtZmQ5YjgzNDItZDliYTYwMTMtMzFiY2IwODk=, ActorId: [9:7488008088173019896:2941], ActorState: unknown state, Session actor destroyed >> THealthCheckTest::BasicNodeCheckRequest [GOOD] >> THealthCheckTest::BlueGroupIssueWhenPartialGroupStatusAndReplicationDisks >> HttpRequest::Status [GOOD] >> Viewer::JsonAutocompleteColumnsPOST [GOOD] >> TConsoleTests::TestDatabaseQuotasBadOverallQuota [GOOD] >> TConsoleTests::TestDatabaseQuotasBadStorageQuota ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::Status [GOOD] Test command err: 2025-03-31T16:15:30.873152Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:30.873554Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:30.873701Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000cba/r3tmp/tmpSmd1nB/pdisk_1.dat 2025-03-31T16:15:31.293435Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20061, node 1 2025-03-31T16:15:31.527732Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:31.527792Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:31.527831Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:31.528362Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:15:31.530855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:15:31.623114Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:31.623262Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:31.637432Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14303 2025-03-31T16:15:32.160063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:15:35.579570Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T16:15:35.616820Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:35.616947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:35.656555Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:15:35.659155Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:15:35.903545Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:35.903977Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:35.904432Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:35.904551Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:35.904780Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:35.904869Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:35.904947Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:35.905015Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:35.905088Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:36.070734Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:36.070848Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:36.084395Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:15:36.243198Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:36.293309Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T16:15:36.293433Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T16:15:36.325270Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T16:15:36.325784Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T16:15:36.326022Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T16:15:36.326088Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T16:15:36.326146Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T16:15:36.326200Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T16:15:36.326250Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T16:15:36.326322Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T16:15:36.328077Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T16:15:36.360001Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:15:36.360119Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1870:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:15:36.368502Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1881:2607] 2025-03-31T16:15:36.372175Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2618] 2025-03-31T16:15:36.372323Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1904:2618], schemeshard id = 72075186224037897 2025-03-31T16:15:36.386521Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T16:15:36.408429Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T16:15:36.408518Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T16:15:36.408596Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T16:15:36.427648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T16:15:36.435766Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T16:15:36.435931Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T16:15:36.678172Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T16:15:36.842904Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T16:15:36.943651Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:15:37.892810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2238:3072], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:37.892937Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:37.913162Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-31T16:15:38.262729Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:15:38.262992Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:15:38.263360Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:15:38.263520Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:15:38.263642Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:15:38.263766Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:15:38.263898Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:15:38.264059Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:15:38.264279Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:15:38.264466Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:15:38.264608Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:15:38.264728Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:15:38.313554Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2397:2888];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:15:38.313676Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2397:2888];tablet_id=72075186224037900;process= ... storeV1Chunks_V2;id=15; 2025-03-31T16:15:38.719263Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:15:38.719405Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:15:38.719453Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:15:39.192171Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-03-31T16:15:39.198581Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-03-31T16:15:39.203757Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-03-31T16:15:39.208986Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-03-31T16:15:39.217648Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-03-31T16:15:39.223271Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-03-31T16:15:39.229248Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-03-31T16:15:39.234545Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-03-31T16:15:39.240860Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-03-31T16:15:39.246413Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-03-31T16:15:40.560320Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3245:3172], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:40.579693Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:40.583797Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72075186224037897 2025-03-31T16:15:41.095860Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-03-31T16:15:41.096905Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-03-31T16:15:41.098159Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-03-31T16:15:41.098768Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-03-31T16:15:41.099363Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-03-31T16:15:41.099911Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-03-31T16:15:41.100430Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-03-31T16:15:41.102601Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-03-31T16:15:41.103333Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-03-31T16:15:41.104242Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-03-31T16:15:41.973768Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3805:3228], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:41.973983Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:41.990740Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037897 2025-03-31T16:15:42.047498Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:15:42.048124Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:15:42.048567Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:15:42.049103Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:15:42.049485Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:15:42.051251Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:15:42.051632Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:15:42.054020Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:15:42.054680Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:15:42.055271Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; waiting actualization: 0/0.000017s 2025-03-31T16:15:44.215788Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:4093:4251] 2025-03-31T16:15:44.218805Z node 2 :STATISTICS DEBUG: [72075186224037894] Send TEvStatistics::TEvAnalyzeStatusResponse. Status STATUS_NO_OPERATION Answer: 'No analyze operation' FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> THealthCheckTest::Issues100GroupsListing ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::JsonAutocompleteColumnsPOST [GOOD] Test command err: 2025-03-31T16:15:05.039647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:336:2378], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:05.040144Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:05.040206Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 15283, node 1 TClient is connected to server localhost:5823 2025-03-31T16:15:14.316240Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:319:2362], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:14.316670Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:14.316830Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 15282, node 2 TClient is connected to server localhost:22781 2025-03-31T16:15:23.503751Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:337:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:23.503974Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:23.504271Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 21748, node 3 TClient is connected to server localhost:9544 2025-03-31T16:15:32.883893Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:336:2378], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:32.884399Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:32.884588Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 63327, node 4 TClient is connected to server localhost:25683 2025-03-31T16:15:43.063438Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:317:2360], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:43.063850Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:43.064044Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 7822, node 5 TClient is connected to server localhost:30175 >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscription [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAlter >> KqpJoinOrder::FiveWayJoinStatsOverride+ColumnStore [GOOD] >> THealthCheckTest::StorageLimit87 [GOOD] >> THealthCheckTest::StorageLimit80 >> THealthCheckTest::StorageNoQuota [GOOD] >> THealthCheckTest::TestBootingTabletIsNotDead >> THealthCheckTest::GreenStatusWhenInitPending [GOOD] >> THealthCheckTest::IgnoreOtherGenerations >> Viewer::JsonAutocompleteSchemePOST [GOOD] >> THealthCheckTest::ServerlessBadTablets >> THealthCheckTest::YellowGroupIssueWhenPartialGroupStatus [GOOD] >> THealthCheckTest::YellowGroupIssueOnYellowSpace >> TConsoleTests::TestDatabaseQuotasBadStorageQuota [GOOD] >> THealthCheckTest::RedGroupIssueOnRedSpace [GOOD] >> THealthCheckTest::ProtobufBelowLimitFor10VdisksIssues >> THealthCheckTest::OneIssueListing [GOOD] >> THealthCheckTest::OrangeGroupIssueWhenDegradedGroupStatus ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::JsonAutocompleteSchemePOST [GOOD] Test command err: 2025-03-31T16:15:10.290339Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:336:2378], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:10.290756Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:10.290811Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 20549, node 1 TClient is connected to server localhost:9092 2025-03-31T16:15:18.749047Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:319:2362], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:18.749500Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:18.749657Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 13179, node 2 TClient is connected to server localhost:6336 2025-03-31T16:15:27.469017Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:337:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:27.469189Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:27.469373Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 28893, node 3 TClient is connected to server localhost:62602 2025-03-31T16:15:37.211576Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:336:2378], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:37.211987Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:37.212156Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 8559, node 4 TClient is connected to server localhost:3538 2025-03-31T16:15:46.350534Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:317:2360], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:46.350913Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:46.351037Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 8843, node 5 TClient is connected to server localhost:7231 >> TConsoleTests::TestNotifyOperationCompletion [GOOD] >> TConsoleTests::TestNotifyOperationCompletionExtSubdomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinStatsOverride+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 21498, MsgBus: 5826 2025-03-31T16:14:32.669599Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488007787741451650:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:32.669672Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001988/r3tmp/tmpVuVol8/pdisk_1.dat 2025-03-31T16:14:33.010796Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21498, node 1 2025-03-31T16:14:33.024998Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:33.025455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:33.029474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:14:33.071653Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:33.071673Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:33.071680Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:33.071791Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5826 TClient is connected to server localhost:5826 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:14:33.538926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:33.560544Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:14:35.528371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007800626354191:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:35.528398Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007800626354179:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:35.528564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:35.532851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:14:35.547037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007800626354193:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:14:35.650614Z node 1 :TX_PROXY ERROR: Actor# [1:7488007800626354244:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:14:35.994138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:14:36.247308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488007804921321788:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:14:36.247478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488007804921321788:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:14:36.247673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488007804921321788:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:14:36.247737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488007804921321788:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:14:36.247822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488007804921321788:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:14:36.247897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488007804921321788:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:14:36.247971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488007804921321788:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:14:36.248031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488007804921321788:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:14:36.248086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488007804921321788:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:14:36.248146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488007804921321788:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:14:36.248202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488007804921321788:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:14:36.248255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488007804921321788:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:14:36.275994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488007804921321786:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:14:36.276073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488007804921321786:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:14:36.276297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488007804921321786:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:14:36.276410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488007804921321786:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:14:36.276534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488007804921321786:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:14:36.276642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488007804921321786:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:14:36.276745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488007804921321786:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:14:36.276848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488007804921321786:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:14:36.276970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488007804921321786:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:14:36.277072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488007804921321786:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:14:36.277072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488007804921321823:2354];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:14:36.277111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488007804921321823:2354];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:14:36.277165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488007804921321786:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:14:36.277282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488007804921321786:2348];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp: ... tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.230602Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039360;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.236075Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.236667Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039350;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.241608Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.242633Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.247142Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039253;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.248472Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039299;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.252420Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039354;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.254290Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.257635Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.260187Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.262796Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.266569Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.268161Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.272744Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.273160Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.278297Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.278699Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.283344Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.284359Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039269;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.288621Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.290104Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.293693Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.295625Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.298575Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.301459Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.303881Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.307581Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.308940Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.313783Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.314117Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.319534Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.319913Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039251;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.324703Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.325905Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.328870Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.331864Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.332792Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.337123Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039293;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.337717Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.341383Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.343557Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.345420Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.349398Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.349609Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.353799Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.447710Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpfrkcsa5weed12c4bmz8gs", SessionId: ydb://session/3?node_id=1&id=ZWI2ZGM1YzEtOTg5YzVmZWUtNjMxNWFiMGYtZTYwZWUxM2Y=, Slow query, duration: 25.653782s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:15:38.647226Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:15:38.647228Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:15:38.647739Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TConsoleTests::TestDatabaseQuotasBadStorageQuota [GOOD] Test command err: 2025-03-31T16:14:19.684661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:14:19.684733Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:19.749835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:14:21.045280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944 2025-03-31T16:14:21.216967Z node 4 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-31T16:14:21.218373Z node 4 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/0l5c/000a46/r3tmp/tmpvp9oTi/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-31T16:14:21.221429Z node 4 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/0l5c/000a46/r3tmp/tmpvp9oTi/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/000a46/r3tmp/tmpvp9oTi/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 14326324434536781020 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-31T16:14:21.225615Z node 4 :BS_LOCALRECOVERY CRIT: VDISK[80000002:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/0l5c/000a46/r3tmp/tmpvp9oTi/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-03-31T16:14:21.286907Z node 9 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-31T16:14:21.287566Z node 9 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/0l5c/000a46/r3tmp/tmpvp9oTi/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-31T16:14:21.287827Z node 9 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/0l5c/000a46/r3tmp/tmpvp9oTi/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/000a46/r3tmp/tmpvp9oTi/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 10519174701266243003 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-31T16:14:21.353799Z node 7 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-31T16:14:21.354291Z node 7 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/0l5c/000a46/r3tmp/tmpvp9oTi/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-31T16:14:21.354638Z node 7 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/0l5c/000a46/r3tmp/tmpvp9oTi/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/000a46/r3tmp/tmpvp9oTi/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 3621827375540107395 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-31T16:14:21.396246Z node 6 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-31T16:14:21.396719Z node 6 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/0l5c/000a46/r3tmp/tmpvp9oTi/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-31T16:14:21.396911Z node 6 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/0l5c/000a46/r3tmp/tmpvp9oTi/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/000a46/r3tmp/tmpvp9oTi/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 7112476434848525504 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-31T16:14:21.444061Z node 3 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 ... 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/0l5c/000a46/r3tmp/tmpP63R53/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-03-31T16:15:40.207097Z node 148 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-31T16:15:40.207528Z node 148 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/0l5c/000a46/r3tmp/tmpP63R53/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-31T16:15:40.207680Z node 148 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/0l5c/000a46/r3tmp/tmpP63R53/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/000a46/r3tmp/tmpP63R53/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 1984520467011946073 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-31T16:15:40.248735Z node 151 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-31T16:15:40.249093Z node 151 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/0l5c/000a46/r3tmp/tmpP63R53/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-31T16:15:40.249222Z node 151 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/0l5c/000a46/r3tmp/tmpP63R53/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/000a46/r3tmp/tmpP63R53/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 2482158712527499596 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-31T16:15:40.320298Z node 152 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-31T16:15:40.320850Z node 152 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/0l5c/000a46/r3tmp/tmpP63R53/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-31T16:15:40.321042Z node 152 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/0l5c/000a46/r3tmp/tmpP63R53/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/000a46/r3tmp/tmpP63R53/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 4954480523319142290 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-31T16:15:40.552333Z node 145 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:15:40.552429Z node 145 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:40.647122Z node 145 :STATISTICS WARN: [72075186233409554] TTxInit::Complete. EnableColumnStatistics=false 2025-03-31T16:15:40.755874Z node 153 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-31T16:15:40.756492Z node 153 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/0l5c/000a46/r3tmp/tmpP63R53/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-31T16:15:40.756797Z node 153 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/0l5c/000a46/r3tmp/tmpP63R53/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/000a46/r3tmp/tmpP63R53/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 8304535570581775388 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-31T16:15:43.579160Z node 154 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:15:43.579273Z node 154 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:43.648434Z node 154 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:15:47.040733Z node 163 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:15:47.040829Z node 163 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:47.108094Z node 163 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 >> THealthCheckTest::ShardsLimit999 [GOOD] >> THealthCheckTest::ShardsLimit995 >> Viewer::ServerlessWithExclusiveNodes [GOOD] >> Viewer::SharedDoesntShowExclusiveNodes >> KqpJoinOrder::CanonizedJoinOrderTPCH15 [GOOD] >> Viewer::Plan2SvgBad [GOOD] >> THealthCheckTest::BlueGroupIssueWhenPartialGroupStatusAndReplicationDisks [GOOD] >> THealthCheckTest::GreenStatusWhenCreatingGroup >> YdbSdkSessions::MultiThreadSync >> YdbSdkSessions::TestMultipleSessions >> THealthCheckTest::Issues100GroupsListing [GOOD] >> THealthCheckTest::Issues100VCardListing >> THealthCheckTest::ServerlessBadTablets [GOOD] >> THealthCheckTest::ServerlessWhenTroublesWithSharedNodes >> OlapEstimationRowsCorrectness::TPCH3 [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAlter [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAclChange ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH15 [GOOD] Test command err: Trying to start YDB, gRPC: 2726, MsgBus: 61324 2025-03-31T16:14:30.902442Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488007778294731644:2268];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:30.902619Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00198a/r3tmp/tmpwRsL6l/pdisk_1.dat 2025-03-31T16:14:31.235985Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:31.276851Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:31.276960Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:31.278721Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2726, node 1 2025-03-31T16:14:31.324349Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:31.324390Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:31.324399Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:31.324551Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61324 TClient is connected to server localhost:61324 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:14:31.877584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:31.914133Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:14:34.022190Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007795474601285:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:34.022641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007795474601274:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:34.022760Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:34.026655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:14:34.040583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007795474601288:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:14:34.097181Z node 1 :TX_PROXY ERROR: Actor# [1:7488007795474601339:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:14:34.416724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:14:34.629328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488007795474601608:2353];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:14:34.629537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488007795474601608:2353];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:14:34.629808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488007795474601608:2353];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:14:34.630006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488007795474601608:2353];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:14:34.630156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488007795474601608:2353];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:14:34.630297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488007795474601608:2353];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:14:34.630434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488007795474601608:2353];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:14:34.630583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488007795474601608:2353];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:14:34.630714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488007795474601608:2353];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:14:34.630858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488007795474601608:2353];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:14:34.630999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488007795474601608:2353];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:14:34.631147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488007795474601608:2353];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:14:34.653587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488007795474601616:2357];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:14:34.653662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488007795474601616:2357];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:14:34.653884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488007795474601616:2357];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:14:34.653981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488007795474601616:2357];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:14:34.654078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488007795474601616:2357];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:14:34.654168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488007795474601616:2357];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:14:34.654299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488007795474601616:2357];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:14:34.654434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488007795474601616:2357];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:14:34.654518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488007795474601616:2357];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:14:34.654603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488007795474601616:2357];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:14:34.654690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488007795474601616:2357];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:14:34.654788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488007795474601616:2357];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:14:34.667664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488007795474601597:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:14:34.667728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488007795474601597:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstr ... tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.535646Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.540197Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.541962Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.546036Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.548081Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.550813Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.554766Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039293;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.555472Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.559014Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.560198Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.563322Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039370;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.565609Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.567698Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.571253Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.571757Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.576103Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.576904Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.580464Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.582121Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.584205Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.587711Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.587878Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.591644Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.592961Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.595634Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.598130Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.599916Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.603376Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.604595Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.609423Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.609511Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.615164Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.615165Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.621343Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.621358Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.626608Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.627406Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.631743Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.633369Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.636069Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.639313Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.641297Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.645420Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.646609Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.652079Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.652121Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:38.760945Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpfrhzm1nx399vy9q2b3ngy", SessionId: ydb://session/3?node_id=1&id=OTNkYjUxYjUtZDlhZWYwZjItNmZlOWY0N2ItNGQ0MDk2ZDc=, Slow query, duration: 27.411569s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:15:38.988327Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:15:38.988327Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:15:38.988938Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> THealthCheckTest::StorageLimit80 [GOOD] >> THealthCheckTest::StorageLimit50 >> KqpJoinOrder::TestJoinOrderHintsSimple+ColumnStore [GOOD] >> KqpWorkloadServiceTables::TestLeaseUpdates [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCH3 [GOOD] Test command err: Trying to start YDB, gRPC: 16961, MsgBus: 3045 2025-03-31T16:14:33.293818Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488007792224806726:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:33.293894Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001987/r3tmp/tmpv8ZEPK/pdisk_1.dat 2025-03-31T16:14:33.640378Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:33.697540Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:33.697627Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 16961, node 1 2025-03-31T16:14:33.701390Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:14:33.765711Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:33.765733Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:33.765757Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:33.765863Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3045 TClient is connected to server localhost:3045 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:14:34.328335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:34.344934Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:14:36.292140Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007805109709277:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:36.292257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:36.295562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007805109709289:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:36.300564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:14:36.342595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007805109709291:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:14:36.443437Z node 1 :TX_PROXY ERROR: Actor# [1:7488007805109709344:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:14:36.808035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:14:37.070918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488007805109709614:2357];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:14:37.071527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488007805109709614:2357];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:14:37.071809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488007805109709614:2357];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:14:37.071962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488007805109709614:2357];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:14:37.072083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488007805109709614:2357];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:14:37.072214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488007805109709614:2357];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:14:37.072309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488007805109709614:2357];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:14:37.072418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488007805109709614:2357];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:14:37.072516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488007805109709614:2357];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:14:37.072646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488007805109709614:2357];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:14:37.072754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488007805109709614:2357];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:14:37.072850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488007805109709614:2357];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:14:37.077997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488007805109709589:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:14:37.078052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488007805109709589:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:14:37.078218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488007805109709589:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:14:37.078316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488007805109709589:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:14:37.078433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488007805109709589:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:14:37.078517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488007805109709589:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:14:37.078596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488007805109709589:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:14:37.078686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488007805109709589:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:14:37.078796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488007805109709589:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:14:37.078906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488007805109709589:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:14:37.078989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488007805109709589:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:14:37.079071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488007805109709589:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:14:37.104257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488007805109709647:2361];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:14:37.104307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488007805109709647:2361];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstra ... tablet_id=72075186224039372;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:40.910031Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:40.915824Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:40.921663Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:40.927623Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039247;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:40.933271Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039293;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:40.938723Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:40.944310Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:40.950066Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:40.955908Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:40.961461Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039245;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:40.967071Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:40.967073Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:40.973156Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:40.973156Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:40.979391Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:40.979391Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:40.985686Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:40.985687Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:40.991854Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:40.991854Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:40.998143Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:40.998143Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:41.004509Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:41.004509Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:41.010800Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:41.010800Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:41.016852Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:41.016864Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:41.022843Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:41.022863Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:41.029023Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:41.029024Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:41.035176Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:41.035177Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:41.041153Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:41.041164Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:41.045992Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:41.047291Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:41.050173Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039289;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:41.053252Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:41.054187Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:41.058995Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:41.059250Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:41.063552Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:41.065517Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:41.067726Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:41.223977Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpfrma8fqvtk8jt2pxcvejd", SessionId: ydb://session/3?node_id=1&id=MmQ2MTA3YjAtN2MyZDBhZmYtMTllYWEzY2ItZmY0ZjQ2Y2E=, Slow query, duration: 27.487352s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:15:41.458540Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:15:41.458540Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:15:41.459265Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> THealthCheckTest::IgnoreOtherGenerations [GOOD] >> THealthCheckTest::IgnoreServerlessWhenNotSpecific >> TConsoleTests::TestNotifyOperationCompletionExtSubdomain [GOOD] >> TConsoleTests::TestRemoveAttributes >> THealthCheckTest::YellowGroupIssueOnYellowSpace [GOOD] >> THealthCheckTest::YellowIssueReadyVDisksOnFaultyPDisks >> TFlatTest::Mix_DML_DDL >> TFlatTest::CopyCopiedTableAndRead >> TFlatTest::SelectRangeReverse >> THealthCheckTest::ServerlessWhenTroublesWithSharedNodes [GOOD] >> THealthCheckTest::ServerlessWithExclusiveNodesWhenTroublesWithSharedNodes >> THealthCheckTest::OrangeGroupIssueWhenDegradedGroupStatus [GOOD] >> THealthCheckTest::NoStoragePools ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceTables::TestLeaseUpdates [GOOD] Test command err: 2025-03-31T16:14:10.734585Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488007689557432508:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:10.734799Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001101/r3tmp/tmpTh9U4j/pdisk_1.dat 2025-03-31T16:14:11.088420Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17563, node 1 2025-03-31T16:14:11.158547Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:11.158700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:11.160491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:14:11.169860Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:11.169894Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:11.169906Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:11.170046Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32002 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:14:11.513407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:11.532072Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:14:13.642693Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-03-31T16:14:13.642921Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-03-31T16:14:13.642958Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-03-31T16:14:13.646200Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MmRjNTA1OGQtODE5Y2NlMmItNDZiYjQ4MmMtZGEyMDhiZDg=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MmRjNTA1OGQtODE5Y2NlMmItNDZiYjQ4MmMtZGEyMDhiZDg= 2025-03-31T16:14:13.647117Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488007702442335042:2328], Start check tables existence, number paths: 2 2025-03-31T16:14:13.647244Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MmRjNTA1OGQtODE5Y2NlMmItNDZiYjQ4MmMtZGEyMDhiZDg=, ActorId: [1:7488007702442335044:2329], ActorState: unknown state, session actor bootstrapped 2025-03-31T16:14:13.647535Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-03-31T16:14:13.653469Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488007702442335042:2328], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-03-31T16:14:13.653547Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488007702442335042:2328], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-03-31T16:14:13.653584Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488007702442335042:2328], Successfully finished 2025-03-31T16:14:13.653673Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-03-31T16:14:13.664494Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007702442335061:2300], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-03-31T16:14:13.668383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:14:13.669685Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007702442335061:2300], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-03-31T16:14:13.669839Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007702442335061:2300], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-03-31T16:14:13.678088Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007702442335061:2300], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:14:13.757407Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007702442335061:2300], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-03-31T16:14:13.761910Z node 1 :TX_PROXY ERROR: Actor# [1:7488007702442335114:2333] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:14:13.762036Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007702442335061:2300], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-03-31T16:14:13.762359Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: Root, PoolId: sample_pool_id 2025-03-31T16:14:13.762388Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id Root 2025-03-31T16:14:13.762465Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007702442335121:2331], DatabaseId: Root, PoolId: sample_pool_id, Start pool fetching 2025-03-31T16:14:13.763615Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007702442335121:2331], DatabaseId: Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-03-31T16:14:13.763724Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: Root 2025-03-31T16:14:13.763778Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-03-31T16:14:13.764077Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7488007702442335130:2332], DatabaseId: Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2025-03-31T16:14:13.765077Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7488007702442335130:2332], DatabaseId: Root, PoolId: sample_pool_id, Got watch notification 2025-03-31T16:14:13.770903Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-03-31T16:14:13.770957Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-03-31T16:14:13.771007Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007702442335142:2334], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-03-31T16:14:13.771065Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MmRjNTA1OGQtODE5Y2NlMmItNDZiYjQ4MmMtZGEyMDhiZDg=, ActorId: [1:7488007702442335044:2329], ActorState: ReadyState, TraceId: 01jqpfpsra7avrjfqwb393h0ev, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: DROP RESOURCE POOL sample_pool_id; rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2025-03-31T16:14:13.780853Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007702442335142:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:13.781037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:13.999903Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7488007702442335130:2332], DatabaseId: Root, PoolId: sample_pool_id, Got delete notification 2025-03-31T16:14:14.005824Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=MmRjNTA1OGQtODE5Y2NlMmItNDZiYjQ4MmMtZGEyMDhiZDg=, ActorId: [1:7488007702442335044:2329], ActorState: ExecuteState, TraceId: 01jqpfpsra7avrjfqwb393h0ev, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [1:7488007702442335151:2329] WorkloadServiceCleanup: 0 2025-03-31T16:14:14.007345Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MmRjNTA1OGQtODE5Y2NlMmItNDZiYjQ4MmMtZGEyMDhiZDg=, ActorId: [1:7488007702442335044:2329], ActorState: CleanupState, TraceId: 01jqpfpsra7avrjfqwb393h0ev, EndCleanup, isFinal: 0 2025-03-31T16:14:14.007441Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MmRjNTA1OGQtODE5Y2NlMmItNDZiYjQ4MmMtZGEyMDhiZDg=, ActorId: [1:7488007702442335044:2329], ActorState: CleanupState, TraceId: 01jqpfpsra7avrjfqwb393h0ev, Sent query response back to proxy, proxyRequestId: 3, proxyId: [1:7488007689557432761:2277] 2025-03-31T16:14:14.016448Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=MmRjNTA1OGQtODE5Y2NlMmItNDZiYjQ4MmMtZGEyMDhiZDg=, ActorId: [1:7488007702442335044:2329], ActorState: ReadyState, Session closed due to explicit close event 2025-03-31T16:14:14.016520Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=MmRjNTA1OGQtODE5Y2NlMmItNDZiYjQ4MmMtZGEyMDhiZDg=, ActorId: [1:7488007702442335044:2329], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-31T16:14:14.016544Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MmRjNTA1OGQtODE5Y2NlMmItNDZiYjQ4MmMtZGEyMDhiZDg=, ActorId: [1:7488007702442335044:2329], ActorState: ReadyState, EndCleanup, isFinal: 1 2025 ... ?node_id=10&id=M2ExY2QzNTgtNjNmN2MxZDktMTkxMTQ0NTUtYzhmMmE3YWM=, ActorId: [10:7488008137991502631:2540], ActorState: ExecuteState, TraceId: 01jqpfsw7c2td5zvnvg3xs2k14, Sending to Executer TraceId: 0 8 2025-03-31T16:15:54.606637Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=M2ExY2QzNTgtNjNmN2MxZDktMTkxMTQ0NTUtYzhmMmE3YWM=, ActorId: [10:7488008137991502631:2540], ActorState: ExecuteState, TraceId: 01jqpfsw7c2td5zvnvg3xs2k14, Created new KQP executer: [10:7488008137991502636:2540] isRollback: 0 2025-03-31T16:15:54.616288Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=M2ExY2QzNTgtNjNmN2MxZDktMTkxMTQ0NTUtYzhmMmE3YWM=, ActorId: [10:7488008137991502631:2540], ActorState: ExecuteState, TraceId: 01jqpfsw7c2td5zvnvg3xs2k14, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-03-31T16:15:54.616457Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=M2ExY2QzNTgtNjNmN2MxZDktMTkxMTQ0NTUtYzhmMmE3YWM=, ActorId: [10:7488008137991502631:2540], ActorState: ExecuteState, TraceId: 01jqpfsw7c2td5zvnvg3xs2k14, txInfo Status: Committed Kind: ReadWrite TotalDuration: 10.683 ServerDuration: 10.148 QueriesCount: 2 2025-03-31T16:15:54.616587Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=M2ExY2QzNTgtNjNmN2MxZDktMTkxMTQ0NTUtYzhmMmE3YWM=, ActorId: [10:7488008137991502631:2540], ActorState: ExecuteState, TraceId: 01jqpfsw7c2td5zvnvg3xs2k14, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-03-31T16:15:54.616664Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=M2ExY2QzNTgtNjNmN2MxZDktMTkxMTQ0NTUtYzhmMmE3YWM=, ActorId: [10:7488008137991502631:2540], ActorState: ExecuteState, TraceId: 01jqpfsw7c2td5zvnvg3xs2k14, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-31T16:15:54.616702Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=M2ExY2QzNTgtNjNmN2MxZDktMTkxMTQ0NTUtYzhmMmE3YWM=, ActorId: [10:7488008137991502631:2540], ActorState: ExecuteState, TraceId: 01jqpfsw7c2td5zvnvg3xs2k14, EndCleanup, isFinal: 0 2025-03-31T16:15:54.616777Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=M2ExY2QzNTgtNjNmN2MxZDktMTkxMTQ0NTUtYzhmMmE3YWM=, ActorId: [10:7488008137991502631:2540], ActorState: ExecuteState, TraceId: 01jqpfsw7c2td5zvnvg3xs2k14, Sent query response back to proxy, proxyRequestId: 28, proxyId: [10:7488008039207253465:2271] 2025-03-31T16:15:54.617123Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Update lease, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=10&id=M2ExY2QzNTgtNjNmN2MxZDktMTkxMTQ0NTUtYzhmMmE3YWM=, TxId: 2025-03-31T16:15:54.617247Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Update lease, RunDataQuery: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); 2025-03-31T16:15:54.617812Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=M2ExY2QzNTgtNjNmN2MxZDktMTkxMTQ0NTUtYzhmMmE3YWM=, ActorId: [10:7488008137991502631:2540], ActorState: ReadyState, TraceId: 01jqpfsw7s4jtr31yyrbj0arqn, received request, proxyRequestId: 29 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); rpcActor: [10:7488008137991502661:2546] database: /Root databaseId: /Root pool id: default 2025-03-31T16:15:54.617860Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=M2ExY2QzNTgtNjNmN2MxZDktMTkxMTQ0NTUtYzhmMmE3YWM=, ActorId: [10:7488008137991502631:2540], ActorState: ReadyState, TraceId: 01jqpfsw7s4jtr31yyrbj0arqn, request placed into pool from cache: default 2025-03-31T16:15:54.618463Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=M2ExY2QzNTgtNjNmN2MxZDktMTkxMTQ0NTUtYzhmMmE3YWM=, ActorId: [10:7488008137991502631:2540], ActorState: ExecuteState, TraceId: 01jqpfsw7s4jtr31yyrbj0arqn, ExecutePhyTx, tx: 0x000050C0004E5F98 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2025-03-31T16:15:54.618553Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=M2ExY2QzNTgtNjNmN2MxZDktMTkxMTQ0NTUtYzhmMmE3YWM=, ActorId: [10:7488008137991502631:2540], ActorState: ExecuteState, TraceId: 01jqpfsw7s4jtr31yyrbj0arqn, Sending to Executer TraceId: 0 8 2025-03-31T16:15:54.618639Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=M2ExY2QzNTgtNjNmN2MxZDktMTkxMTQ0NTUtYzhmMmE3YWM=, ActorId: [10:7488008137991502631:2540], ActorState: ExecuteState, TraceId: 01jqpfsw7s4jtr31yyrbj0arqn, Created new KQP executer: [10:7488008137991502664:2540] isRollback: 0 2025-03-31T16:15:54.623559Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=M2ExY2QzNTgtNjNmN2MxZDktMTkxMTQ0NTUtYzhmMmE3YWM=, ActorId: [10:7488008137991502631:2540], ActorState: ExecuteState, TraceId: 01jqpfsw7s4jtr31yyrbj0arqn, TEvTxResponse, CurrentTx: 1/2 response.status: SUCCESS 2025-03-31T16:15:54.623644Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=M2ExY2QzNTgtNjNmN2MxZDktMTkxMTQ0NTUtYzhmMmE3YWM=, ActorId: [10:7488008137991502631:2540], ActorState: ExecuteState, TraceId: 01jqpfsw7s4jtr31yyrbj0arqn, ExecutePhyTx, tx: 0x000050C0003DEA18 literal: 1 commit: 1 txCtx.DeferredEffects.size(): 0 2025-03-31T16:15:54.624599Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=M2ExY2QzNTgtNjNmN2MxZDktMTkxMTQ0NTUtYzhmMmE3YWM=, ActorId: [10:7488008137991502631:2540], ActorState: ExecuteState, TraceId: 01jqpfsw7s4jtr31yyrbj0arqn, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-03-31T16:15:54.624764Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=M2ExY2QzNTgtNjNmN2MxZDktMTkxMTQ0NTUtYzhmMmE3YWM=, ActorId: [10:7488008137991502631:2540], ActorState: ExecuteState, TraceId: 01jqpfsw7s4jtr31yyrbj0arqn, txInfo Status: Committed Kind: ReadOnly TotalDuration: 6.438 ServerDuration: 6.3 QueriesCount: 2 2025-03-31T16:15:54.624894Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=M2ExY2QzNTgtNjNmN2MxZDktMTkxMTQ0NTUtYzhmMmE3YWM=, ActorId: [10:7488008137991502631:2540], ActorState: ExecuteState, TraceId: 01jqpfsw7s4jtr31yyrbj0arqn, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-03-31T16:15:54.624969Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=M2ExY2QzNTgtNjNmN2MxZDktMTkxMTQ0NTUtYzhmMmE3YWM=, ActorId: [10:7488008137991502631:2540], ActorState: ExecuteState, TraceId: 01jqpfsw7s4jtr31yyrbj0arqn, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-31T16:15:54.625005Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=M2ExY2QzNTgtNjNmN2MxZDktMTkxMTQ0NTUtYzhmMmE3YWM=, ActorId: [10:7488008137991502631:2540], ActorState: ExecuteState, TraceId: 01jqpfsw7s4jtr31yyrbj0arqn, EndCleanup, isFinal: 0 2025-03-31T16:15:54.625065Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=M2ExY2QzNTgtNjNmN2MxZDktMTkxMTQ0NTUtYzhmMmE3YWM=, ActorId: [10:7488008137991502631:2540], ActorState: ExecuteState, TraceId: 01jqpfsw7s4jtr31yyrbj0arqn, Sent query response back to proxy, proxyRequestId: 29, proxyId: [10:7488008039207253465:2271] 2025-03-31T16:15:54.625445Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=10&id=M2ExY2QzNTgtNjNmN2MxZDktMTkxMTQ0NTUtYzhmMmE3YWM=, TxId: 2025-03-31T16:15:54.625572Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Finish with SUCCESS, SessionId: ydb://session/3?node_id=10&id=M2ExY2QzNTgtNjNmN2MxZDktMTkxMTQ0NTUtYzhmMmE3YWM=, TxId: 2025-03-31T16:15:54.625750Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=M2ExY2QzNTgtNjNmN2MxZDktMTkxMTQ0NTUtYzhmMmE3YWM=, ActorId: [10:7488008137991502631:2540], ActorState: ReadyState, Session closed due to explicit close event 2025-03-31T16:15:54.625793Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=M2ExY2QzNTgtNjNmN2MxZDktMTkxMTQ0NTUtYzhmMmE3YWM=, ActorId: [10:7488008137991502631:2540], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-31T16:15:54.625819Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=M2ExY2QzNTgtNjNmN2MxZDktMTkxMTQ0NTUtYzhmMmE3YWM=, ActorId: [10:7488008137991502631:2540], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-03-31T16:15:54.625847Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=M2ExY2QzNTgtNjNmN2MxZDktMTkxMTQ0NTUtYzhmMmE3YWM=, ActorId: [10:7488008137991502631:2540], ActorState: unknown state, Cleanup temp tables: 0 2025-03-31T16:15:54.625929Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=M2ExY2QzNTgtNjNmN2MxZDktMTkxMTQ0NTUtYzhmMmE3YWM=, ActorId: [10:7488008137991502631:2540], ActorState: unknown state, Session actor destroyed 2025-03-31T16:15:54.634750Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=N2EwYjYzNWUtZTRkM2U1ZjQtMjZjMjg5ZTYtYzczYzliNjY=, ActorId: [10:7488008060682090379:2331], ActorState: ReadyState, Session closed due to explicit close event 2025-03-31T16:15:54.634806Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=N2EwYjYzNWUtZTRkM2U1ZjQtMjZjMjg5ZTYtYzczYzliNjY=, ActorId: [10:7488008060682090379:2331], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-31T16:15:54.634858Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=N2EwYjYzNWUtZTRkM2U1ZjQtMjZjMjg5ZTYtYzczYzliNjY=, ActorId: [10:7488008060682090379:2331], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-03-31T16:15:54.634907Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=N2EwYjYzNWUtZTRkM2U1ZjQtMjZjMjg5ZTYtYzczYzliNjY=, ActorId: [10:7488008060682090379:2331], ActorState: unknown state, Cleanup temp tables: 0 2025-03-31T16:15:54.635006Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=N2EwYjYzNWUtZTRkM2U1ZjQtMjZjMjg5ZTYtYzczYzliNjY=, ActorId: [10:7488008060682090379:2331], ActorState: unknown state, Session actor destroyed >> YdbSdkSessions::MultiThreadSync [GOOD] >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncTableClient >> THealthCheckTest::ProtobufBelowLimitFor10VdisksIssues [GOOD] >> THealthCheckTest::ProtobufUnderLimitFor70LargeVdisksIssues >> YdbSdkSessions::TestMultipleSessions [GOOD] >> TFlatTest::PathSorting ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinOrderHintsSimple+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 28928, MsgBus: 27882 2025-03-31T16:14:41.748703Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488007825580954084:2197];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:41.751018Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00197e/r3tmp/tmpg0QPAw/pdisk_1.dat 2025-03-31T16:14:42.097259Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:42.114890Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:42.114996Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:42.117306Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28928, node 1 2025-03-31T16:14:42.178827Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:42.179025Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:42.179040Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:42.179187Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27882 TClient is connected to server localhost:27882 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:14:42.753200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:42.787462Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:14:44.901973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007838465856502:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:44.902130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:44.903184Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007838465856511:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:44.909566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:14:44.923796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007838465856516:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:14:44.999725Z node 1 :TX_PROXY ERROR: Actor# [1:7488007838465856567:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:14:45.315472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:14:45.583860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488007842760824094:2359];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:14:45.584090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488007842760824094:2359];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:14:45.584462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488007842760824094:2359];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:14:45.584617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488007842760824094:2359];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:14:45.584760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488007842760824094:2359];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:14:45.584909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488007842760824094:2359];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:14:45.585063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488007842760824094:2359];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:14:45.585225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488007842760824094:2359];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:14:45.585372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488007842760824094:2359];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:14:45.585508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488007842760824094:2359];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:14:45.585640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488007842760824094:2359];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:14:45.585749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488007842760824094:2359];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:14:45.587667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488007842760824086:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:14:45.587731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488007842760824086:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:14:45.587965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488007842760824086:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:14:45.588073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488007842760824086:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:14:45.588201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488007842760824086:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:14:45.588317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488007842760824086:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:14:45.588426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488007842760824086:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:14:45.588547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488007842760824086:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:14:45.588648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488007842760824086:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:14:45.588765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488007842760824086:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:14:45.588883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488007842760824086:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:14:45.588979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488007842760824086:2355];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:14:45.636003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488007842760824088:2356];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:14:45.636080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488007842760824088:2356];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abs ... 0714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.493975Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.497781Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.498482Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.502116Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.503885Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.506093Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.508975Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.509431Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.512606Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.513872Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.516206Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.518685Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.519909Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.523365Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.523394Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.527292Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.527293Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.531460Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039215;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.531460Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.535188Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.535227Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.538893Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.538971Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.542487Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.542687Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.546483Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.546889Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.550827Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.551029Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.555274Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.555297Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.559845Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.559857Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.564418Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.564527Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.569197Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.569232Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.573957Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.573959Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.578725Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.578756Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.582825Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.583569Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039263;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.587155Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:46.654392Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpfrwvka3kgqejv18wddx18", SessionId: ydb://session/3?node_id=1&id=MTU1NDkxMWQtMTA2NzM3NTUtN2JmMGViNjktNjM1MWJiMzY=, Slow query, duration: 24.170480s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:15:46.809610Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7488008061804207965:10297];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224038933; 2025-03-31T16:15:46.809644Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:15:46.810063Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:15:46.810263Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716;
: Warning: Execution, code: 1060
: Warning: Unapplied hint: Rows(R T # 1), code: 4534
: Warning: Execution, code: 1060
: Warning: Unapplied hint: Rows(R T # 1), code: 4534 >> THealthCheckTest::TestBootingTabletIsNotDead [GOOD] >> THealthCheckTest::TestReBootingTabletIsDead >> THealthCheckTest::ShardsLimit995 [GOOD] >> THealthCheckTest::ShardsNoLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::TestMultipleSessions [GOOD] Test command err: 2025-03-31T16:15:52.933405Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008131559561930:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:15:52.933502Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d3e/r3tmp/tmpw5A6ue/pdisk_1.dat 2025-03-31T16:15:53.251394Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:53.292491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:53.292592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 4088, node 1 2025-03-31T16:15:53.300760Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:15:53.301159Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:15:53.309823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:15:53.412639Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:53.412677Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:53.412692Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:53.412844Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13637 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:15:53.854695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:15:55.358514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008144444464854:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:55.358532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008144444464842:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:55.358717Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:55.360084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008144444464860:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:55.360115Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008144444464862:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:55.360173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:55.365318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:15:55.368214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008144444464905:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:55.368333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008144444464903:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:55.368422Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:55.370784Z node 1 :TX_PROXY ERROR: Actor# [1:7488008144444464857:2630] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T16:15:55.375261Z node 1 :TX_PROXY ERROR: Actor# [1:7488008144444464912:2653] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T16:15:55.386203Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008144444464856:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:15:55.386263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008144444464910:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:15:55.386321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008144444464866:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:15:55.440876Z node 1 :TX_PROXY ERROR: Actor# [1:7488008144444465025:2721] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:15:55.477750Z node 1 :TX_PROXY ERROR: Actor# [1:7488008144444465035:2729] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:15:55.478713Z node 1 :TX_PROXY ERROR: Actor# [1:7488008144444465042:2735] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAclChange [GOOD] >> TLocksTest::Range_BrokenLock2 >> TFlatTest::SelectRangeReverse [GOOD] >> TFlatTest::SelectRangeReverseExcludeKeys >> TFlatTest::Mix_DML_DDL [GOOD] >> TFlatTest::OutOfDiskSpace [GOOD] >> TFlatTest::CopyCopiedTableAndRead [GOOD] >> TFlatTest::CopyTableAndAddFollowers >> THealthCheckTest::GreenStatusWhenCreatingGroup [GOOD] >> THealthCheckTest::DontIgnoreServerlessWithExclusiveNodesWhenNotSpecific >> TFlatTest::LargeDatashardReplyDistributed >> Viewer::JsonStorageListingV2 [GOOD] >> Viewer::JsonStorageListingV2GroupIdFilter >> TFlatTest::PathSorting [GOOD] >> TFlatTest::PartBloomFilter >> THealthCheckTest::Issues100VCardListing [GOOD] >> THealthCheckTest::Issues100GroupsMerging >> THealthCheckTest::IgnoreServerlessWhenNotSpecific [GOOD] >> TFlatTest::RejectByPerShardReadSize >> THealthCheckTest::HealthCheckConfigUpdate >> KqpJoinOrder::TPCDS61+ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAclChange [GOOD] Test command err: 2025-03-31T16:14:08.084898Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488007683004849171:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:08.084974Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:14:08.115678Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488007681312347651:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:08.116827Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00112a/r3tmp/tmpIB15zI/pdisk_1.dat 2025-03-31T16:14:08.512515Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:08.512615Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:08.515419Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:08.515500Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:08.518265Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:14:08.519198Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:14:08.520134Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9651, node 1 2025-03-31T16:14:08.596928Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:14:08.596958Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:14:08.611257Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:08.613979Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:08.614002Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:08.614015Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:08.614151Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15198 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:14:08.927846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:11.448920Z node 2 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-03-31T16:14:11.449064Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-03-31T16:14:11.449095Z node 2 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-03-31T16:14:11.449333Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7488007694197249830:2306], Start check tables existence, number paths: 2 2025-03-31T16:14:11.450701Z node 2 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 2 2025-03-31T16:14:11.452045Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7488007694197249830:2306], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-03-31T16:14:11.452157Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7488007694197249830:2306], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-03-31T16:14:11.452230Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7488007694197249830:2306], Successfully finished 2025-03-31T16:14:11.452300Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-03-31T16:14:11.478656Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-03-31T16:14:11.478858Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488007695889751930:2333], Start check tables existence, number paths: 2 2025-03-31T16:14:11.478928Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-03-31T16:14:11.478942Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-03-31T16:14:11.478970Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 2 2025-03-31T16:14:11.483034Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OTQ2MTNhOTEtOWUwNTJjYWEtZmE2ZDc2YWEtOGMzODQyMWI=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id OTQ2MTNhOTEtOWUwNTJjYWEtZmE2ZDc2YWEtOGMzODQyMWI= 2025-03-31T16:14:11.483636Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OTQ2MTNhOTEtOWUwNTJjYWEtZmE2ZDc2YWEtOGMzODQyMWI=, ActorId: [1:7488007695889751946:2334], ActorState: unknown state, session actor bootstrapped 2025-03-31T16:14:11.483748Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488007695889751930:2333], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-03-31T16:14:11.483773Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488007695889751930:2333], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-03-31T16:14:11.483795Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488007695889751930:2333], Successfully finished 2025-03-31T16:14:11.490739Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-03-31T16:14:11.504380Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007695889751948:2514], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-03-31T16:14:11.508142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:14:11.515864Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007695889751948:2514], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-03-31T16:14:11.517034Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007695889751948:2514], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-03-31T16:14:11.533112Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007695889751948:2514], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:14:11.592383Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007695889751948:2514], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-03-31T16:14:11.597435Z node 1 :TX_PROXY ERROR: Actor# [1:7488007695889752026:2573] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:14:11.597551Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007695889751948:2514], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-03-31T16:14:11.600201Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZDg1MzI0MTgtNjdlM2ViZDgtNjhmMTdmNjUtYzE5NmE1MmY=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZDg1MzI0MTgtNjdlM2ViZDgtNjhmMTdmNjUtYzE5NmE1MmY= 2025-03-31T16:14:11.600304Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZDg1MzI0MTgtNjdlM2ViZDgtNjhmMTdmNjUtYzE5NmE1MmY=, ActorId: [1:7488007695889752039:2335], ActorState: unknown state, session actor bootstrapped 2025-03-31T16:14:11.600456Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-03-31T16:14:11.600495Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-03-31T16:14:11.600563Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZDg1MzI0MTgtNjdlM2ViZDgtNjhmMTdmNjUtYzE5NmE1MmY=, ActorId: [1:7488007695889752039:2335], ActorState: ReadyState, TraceId: 01jqpfpqmg24c7tvcwt6akt0hn, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7488007695889752035:2581] database: Root databaseId: /Root pool id: sample_pool_id 2025-03-31T16:14:11.600574Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007695889752041:2336], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-03-31T16:14:11.600644Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7488007695889752039:2335], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ZDg1MzI0MTgtNjdlM2ViZDgtNjhmMTdmNjUtYzE5NmE1MmY= 2025-03-31T16:14:11.600685Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7488007695889752042:2337], Database: /Root, Start database fetching 2025-03-31T16:14:11.600848Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7488007695889752042:2337], Database: /Root, Database info successfully fetched, serverless: 0 2025-03-31T16:14:11.600922Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2025-03-31T16:14:11.600976Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7488007695889752048:2338], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ZDg1MzI0MTgtNjdlM2ViZDgtNjhmMTdmNjUtYzE5NmE1MmY=, Start pool fetching 2025-03-31T16:14:11.601001Z nod ... 94037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:15:53.522744Z node 12 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9702, node 12 2025-03-31T16:15:53.529154Z node 12 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:15:53.529207Z node 12 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:15:53.565478Z node 12 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:53.565503Z node 12 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:53.565516Z node 12 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:53.565670Z node 12 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31799 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:15:54.013570Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:15:57.480117Z node 12 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-03-31T16:15:57.480319Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-03-31T16:15:57.480350Z node 12 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-03-31T16:15:57.480415Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [12:7488008151983623655:2330], Start check tables existence, number paths: 2 2025-03-31T16:15:57.480498Z node 12 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-03-31T16:15:57.483800Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=NDZjNDkwM2UtOTNjMzRjMDEtZmEyNzg3ZWItMWE0MjBlZTM=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NDZjNDkwM2UtOTNjMzRjMDEtZmEyNzg3ZWItMWE0MjBlZTM= 2025-03-31T16:15:57.485422Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [12:7488008151983623655:2330], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-03-31T16:15:57.485494Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [12:7488008151983623655:2330], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-03-31T16:15:57.485543Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [12:7488008151983623655:2330], Successfully finished 2025-03-31T16:15:57.485635Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=NDZjNDkwM2UtOTNjMzRjMDEtZmEyNzg3ZWItMWE0MjBlZTM=, ActorId: [12:7488008151983623671:2331], ActorState: unknown state, session actor bootstrapped 2025-03-31T16:15:57.485849Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-03-31T16:15:57.486360Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7488008151983623680:2301], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-03-31T16:15:57.491512Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:15:57.492882Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7488008151983623680:2301], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-03-31T16:15:57.493082Z node 12 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7488008151983623680:2301], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-03-31T16:15:57.501635Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7488008151983623680:2301], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:15:57.580529Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7488008151983623680:2301], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-03-31T16:15:57.584022Z node 12 :TX_PROXY ERROR: Actor# [12:7488008151983623732:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:15:57.584129Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7488008151983623680:2301], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-03-31T16:15:57.584434Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: Root, PoolId: sample_pool_id 2025-03-31T16:15:57.584456Z node 12 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id Root 2025-03-31T16:15:57.584537Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7488008151983623739:2332], DatabaseId: Root, PoolId: sample_pool_id, Start pool fetching 2025-03-31T16:15:57.586165Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7488008151983623739:2332], DatabaseId: Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-03-31T16:15:57.586280Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: Root 2025-03-31T16:15:57.586321Z node 12 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-03-31T16:15:57.586612Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [12:7488008151983623748:2333], DatabaseId: Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2025-03-31T16:15:57.587729Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [12:7488008151983623748:2333], DatabaseId: Root, PoolId: sample_pool_id, Got watch notification 2025-03-31T16:15:57.594580Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-03-31T16:15:57.594616Z node 12 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-03-31T16:15:57.594660Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=NDZjNDkwM2UtOTNjMzRjMDEtZmEyNzg3ZWItMWE0MjBlZTM=, ActorId: [12:7488008151983623671:2331], ActorState: ReadyState, TraceId: 01jqpfsz4se8h6h2xvtt6m2drb, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: GRANT ALL ON `/Root/.metadata/workload_manager/pools/sample_pool_id` TO `test@user`; rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2025-03-31T16:15:57.594680Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7488008151983623760:2335], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-03-31T16:15:57.596014Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7488008151983623760:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:57.596108Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:57.610446Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:15:57.612378Z node 12 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=12&id=NDZjNDkwM2UtOTNjMzRjMDEtZmEyNzg3ZWItMWE0MjBlZTM=, ActorId: [12:7488008151983623671:2331], ActorState: ExecuteState, TraceId: 01jqpfsz4se8h6h2xvtt6m2drb, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [12:7488008151983623769:2331] WorkloadServiceCleanup: 0 2025-03-31T16:15:57.612766Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [12:7488008151983623748:2333], DatabaseId: Root, PoolId: sample_pool_id, Got watch notification 2025-03-31T16:15:57.614385Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=NDZjNDkwM2UtOTNjMzRjMDEtZmEyNzg3ZWItMWE0MjBlZTM=, ActorId: [12:7488008151983623671:2331], ActorState: CleanupState, TraceId: 01jqpfsz4se8h6h2xvtt6m2drb, EndCleanup, isFinal: 0 2025-03-31T16:15:57.614443Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=NDZjNDkwM2UtOTNjMzRjMDEtZmEyNzg3ZWItMWE0MjBlZTM=, ActorId: [12:7488008151983623671:2331], ActorState: CleanupState, TraceId: 01jqpfsz4se8h6h2xvtt6m2drb, Sent query response back to proxy, proxyRequestId: 3, proxyId: [12:7488008134803754059:2270] 2025-03-31T16:15:57.622913Z node 12 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=12&id=NDZjNDkwM2UtOTNjMzRjMDEtZmEyNzg3ZWItMWE0MjBlZTM=, ActorId: [12:7488008151983623671:2331], ActorState: ReadyState, Session closed due to explicit close event 2025-03-31T16:15:57.622983Z node 12 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=12&id=NDZjNDkwM2UtOTNjMzRjMDEtZmEyNzg3ZWItMWE0MjBlZTM=, ActorId: [12:7488008151983623671:2331], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-31T16:15:57.623035Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=NDZjNDkwM2UtOTNjMzRjMDEtZmEyNzg3ZWItMWE0MjBlZTM=, ActorId: [12:7488008151983623671:2331], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-03-31T16:15:57.623071Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=NDZjNDkwM2UtOTNjMzRjMDEtZmEyNzg3ZWItMWE0MjBlZTM=, ActorId: [12:7488008151983623671:2331], ActorState: unknown state, Cleanup temp tables: 0 2025-03-31T16:15:57.623204Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=NDZjNDkwM2UtOTNjMzRjMDEtZmEyNzg3ZWItMWE0MjBlZTM=, ActorId: [12:7488008151983623671:2331], ActorState: unknown state, Session actor destroyed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::OutOfDiskSpace [GOOD] Test command err: 2025-03-31T16:15:56.431471Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008144855678059:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:15:56.431640Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001d08/r3tmp/tmp5wn1Z8/pdisk_1.dat 2025-03-31T16:15:56.769395Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:56.825377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:56.825466Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:56.827477Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63433 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:15:57.078010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:15:57.100417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:15:57.292343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... proxy error code: Unknown error:
: Error: Resolve failed for table: /dc-1/Table, error: column 'value' not exist, code: 200400 2025-03-31T16:15:57.314277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:15:57.331886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:15:57.349558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... proxy error code: Unknown error:
:5:24: Error: At function: AsList
:5:32: Error: At function: SetResult
:4:27: Error: At function: SelectRow
:4:27: Error: Mismatch of key columns count for table [/dc-1/Table], expected: 2, but got 1., code: 2028 >> THealthCheckTest::ServerlessWithExclusiveNodesWhenTroublesWithSharedNodes [GOOD] >> THealthCheckTest::ShardsLimit905 >> THealthCheckTest::NoStoragePools [GOOD] >> THealthCheckTest::NoBscResponse >> Viewer::TabletMerging [GOOD] >> Viewer::StorageGroupOutputWithoutFilterNoDepends >> TFlatTest::SelectRangeReverseExcludeKeys [GOOD] >> TConsoleTests::TestRemoveAttributes [GOOD] >> TConsoleTests::TestRemoveAttributesExtSubdomain >> THealthCheckTest::StorageLimit50 [GOOD] >> THealthCheckTest::SpecificServerlessWithExclusiveNodes >> TFlatTest::CopyTableAndAddFollowers [GOOD] >> TFlatTest::CopyCopiedTableAndDropFirstCopy >> TCancelTx::CrossShardReadOnly >> TFlatTest::PartBloomFilter [GOOD] >> TFlatTest::SelectRangeForbidNullArgs2 >> THealthCheckTest::YellowIssueReadyVDisksOnFaultyPDisks [GOOD] >> THealthCheckTest::TestTabletIsDead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeReverseExcludeKeys [GOOD] Test command err: 2025-03-31T16:15:56.431418Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008145881412081:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:15:56.431574Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001d09/r3tmp/tmp7xCl0e/pdisk_1.dat 2025-03-31T16:15:56.763794Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:56.813618Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:56.814223Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:56.816898Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12307 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:15:57.073473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:15:57.113035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:15:59.010500Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008157930650627:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:15:59.010572Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001d09/r3tmp/tmpkUH7aY/pdisk_1.dat 2025-03-31T16:15:59.105975Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:59.145985Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:59.146080Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:59.147945Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29576 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:15:59.268480Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:15:59.286267Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS61+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 8383, MsgBus: 10090 2025-03-31T16:14:18.311212Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488007727404799423:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:18.311404Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001997/r3tmp/tmpZ5XMQp/pdisk_1.dat 2025-03-31T16:14:18.737447Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:18.742704Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:18.742793Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:18.744832Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8383, node 1 2025-03-31T16:14:18.824332Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:18.824360Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:18.824369Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:18.824508Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10090 TClient is connected to server localhost:10090 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:14:19.350610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:19.372953Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:14:21.502607Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007740289701987:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:21.502972Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007740289701982:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:21.503042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:21.505857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:14:21.518838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007740289701996:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:14:21.603623Z node 1 :TX_PROXY ERROR: Actor# [1:7488007740289702047:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:14:21.869358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:14:22.067912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488007740289702321:2357];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:14:22.067922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488007740289702309:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:14:22.068160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488007740289702309:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:14:22.068411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488007740289702309:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:14:22.068525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488007740289702309:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:14:22.068624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488007740289702309:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:14:22.068721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488007740289702309:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:14:22.068839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488007740289702309:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:14:22.068865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488007740289702321:2357];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:14:22.068964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488007740289702309:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:14:22.069044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488007740289702321:2357];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:14:22.069072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488007740289702309:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:14:22.069151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488007740289702321:2357];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:14:22.069172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488007740289702309:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:14:22.069288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488007740289702321:2357];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:14:22.069288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488007740289702309:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:14:22.069396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488007740289702321:2357];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:14:22.069411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488007740289702309:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:14:22.069478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488007740289702321:2357];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:14:22.069565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488007740289702321:2357];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:14:22.069655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488007740289702321:2357];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:14:22.069743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488007740289702321:2357];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:14:22.069832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488007740289702321:2357];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:14:22.069918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488007740289702321:2357];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:14:22.102939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488007740289702307:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:14:22.103007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488007740289702307:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstr ... 710714; 2025-03-31T16:15:23.982491Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:23.983038Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:23.988008Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:23.988309Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:23.993587Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:23.993855Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039225;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:23.999078Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:23.999309Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:24.004753Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:24.004756Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039269;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:24.010228Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:24.010711Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:24.015775Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039289;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:24.016163Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:24.021636Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:24.021892Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:24.027684Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:24.027692Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:24.033164Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:24.033591Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:24.038850Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:24.038959Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039271;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:24.044882Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:24.044966Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:24.049858Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039237;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:24.050082Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:24.055236Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:24.055815Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:24.060513Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:24.061390Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:24.066226Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:24.066480Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:24.072075Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:24.072077Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:24.077826Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039253;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:24.078504Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:24.083664Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:24.209149Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpfr3g60fb0d6ncqz7nnav1", SessionId: ydb://session/3?node_id=1&id=ZDI2ZmIyMDAtNzhmNzlkZDYtN2NlNjVhMWUtNjVhNTJlZWM=, Slow query, duration: 27.689896s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:15:24.765042Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7488007980807927342:10719];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-03-31T16:15:24.765114Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:15:24.765547Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:15:24.765834Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:15:52.301426Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpfschp2fcmcr2nv0by7n8c", SessionId: ydb://session/3?node_id=1&id=ZDI2ZmIyMDAtNzhmNzlkZDYtN2NlNjVhMWUtNjVhNTJlZWM=, Slow query, duration: 13.750530s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/test/ds/\";\n\n-- NB: Subquerys\n-- start query 1 in stream 0 using template query61.tpl and seed 1930872976\nselect promotions,total,cast(promotions as float)/cast(total as float)*100\nfrom\n (select sum(ss_ext_sales_price) promotions\n from store_sales\n cross join store\n cross join promotion\n cross join date_dim\n cross join customer\n cross join customer_address\n cross join item\n where ss_sold_date_sk = d_date_sk\n and ss_store_sk = s_store_sk\n and ss_promo_sk = p_promo_sk\n and ss_customer_sk= c_customer_sk\n and ca_address_sk = c_current_addr_sk\n and ss_item_sk = i_item_sk\n and ca_gmt_offset = -6\n and i_category = 'Sports'\n and (p_channel_dmail = 'Y' or p_channel_email = 'Y' or p_channel_tv = 'Y')\n and s_gmt_offset = -6\n and d_year = 2001\n and d_moy = 12) promotional_sales cross join\n (select sum(ss_ext_sales_price) total\n from store_sales\n cross join store\n cross join date_dim\n cross join customer\n cross join customer_address\n cross join item\n where ss_sold_date_sk = d_date_sk\n and ss_store_sk = s_store_sk\n and ss_customer_sk= c_customer_sk\n and ca_address_sk = c_current_addr_sk\n and ss_item_sk = i_item_sk\n and ca_gmt_offset = -6\n and i_category = 'Sports'\n and s_gmt_offset = -6\n and d_year = 2001\n and d_moy = 12) all_sales\norder by promotions, total\nlimit 100;\n", parameters: 0b >> KqpJoinOrder::CanonizedJoinOrderTPCH13 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::PartBloomFilter [GOOD] Test command err: 2025-03-31T16:15:57.534846Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008151698488709:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:15:57.534981Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001d07/r3tmp/tmpCN1M9E/pdisk_1.dat 2025-03-31T16:15:57.777343Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:57.888723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:57.888847Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:57.890651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61604 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:15:58.020959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... waiting... waiting... waiting... TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743437758076 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 13 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "A" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710663 CreateStep: 1743437758125 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "B" PathId: 4 Sche... (TRUNCATED) 2025-03-31T16:15:59.963659Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008160355710028:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:15:59.963736Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001d07/r3tmp/tmpNn7z0c/pdisk_1.dat 2025-03-31T16:16:00.064373Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:00.111045Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:00.111173Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:00.112968Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13309 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:00.274033Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:00.285465Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:00.752555Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715719:0, at schemeshard: 72057594046644480 waiting... >> Viewer::TenantInfo5kkTablets [GOOD] >> Viewer::UseTransactionWhenExecuteDataActionQuery >> KqpJoinOrder::CanonizedJoinOrderTPCH21 [GOOD] >> Viewer::SharedDoesntShowExclusiveNodes [GOOD] >> Viewer::ServerlessWithExclusiveNodesCheckTable >> TFlatTest::CopyCopiedTableAndDropFirstCopy [GOOD] >> THealthCheckTest::DontIgnoreServerlessWithExclusiveNodesWhenNotSpecific [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::Plan2SvgBad [GOOD] Test command err: test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-31T16:14:59.590185Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:14:59.872036Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:59.956718Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:59.956862Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:59.963344Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25628, node 1 2025-03-31T16:15:00.604704Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:00.604740Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:00.604755Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:00.604919Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18484 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:15:01.938618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:15:02.084690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:15:02.089502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:15:02.103328Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-03-31T16:15:03.774057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007920341996588:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:03.774156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007920341996600:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:03.774222Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:03.780844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-03-31T16:15:03.795689Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-03-31T16:15:03.796237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007920341996602:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-31T16:15:03.918176Z node 1 :TX_PROXY ERROR: Actor# [1:7488007920341996653:2364] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:15:07.346462Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488007935554008235:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:15:07.346550Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-31T16:15:07.533599Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:07.556089Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:07.556186Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:07.557696Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31504, node 2 2025-03-31T16:15:07.753070Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:07.753112Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:07.753123Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:07.753263Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8892 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:15:08.124190Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:15:08.132362Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:15:08.152914Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:15:08.157215Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:15:08.166094Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2025-03-31T16:15:11.539470Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488007952733878123:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:11.539639Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488007952733878131:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:11.539745Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:11.544973Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-03-31T16:15:11.555649Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488007952733878137:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-03-31T16:15:11.657322Z node 2 :TX_PROXY ERROR: Actor# [2:7488007952733878188:2360] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-31T16:15:13.852392Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:13.884058Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:13.914827Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:13.914927Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:13.917185Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1807, node 3 2025-03-31T16:15:13.988167Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:13.988193Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:13.988203Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:13.988334Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26492 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 ... unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:15:27.512807Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:15:27.516053Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:15:30.891766Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T16:15:30.891841Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T16:15:31.013401Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488008041189536907:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:31.013492Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488008041189536899:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:31.014006Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:31.018974Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-03-31T16:15:31.029820Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7488008041189536913:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-03-31T16:15:31.092319Z node 5 :TX_PROXY ERROR: Actor# [5:7488008041189536964:2358] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:15:31.240336Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:15:31.341740Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T16:15:31.341797Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T16:15:31.486696Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T16:15:31.486741Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T16:15:31.561857Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T16:15:31.561928Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T16:15:31.679913Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T16:15:31.679976Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T16:15:31.754383Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T16:15:31.754441Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T16:15:31.757800Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7488008019714699718:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:15:31.757881Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:15:31.835633Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T16:15:31.835688Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T16:15:31.917772Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T16:15:31.917838Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T16:15:32.000019Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T16:15:32.000075Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T16:15:32.070568Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T16:15:32.070616Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T16:15:32.148127Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T16:15:32.148181Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T16:15:32.223262Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T16:15:32.223315Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T16:15:32.299367Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T16:15:32.299422Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T16:15:32.381480Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T16:15:32.381544Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T16:15:32.460052Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T16:15:32.460134Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T16:15:32.535448Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T16:15:32.535499Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T16:15:32.633934Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T16:15:32.633982Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T16:15:32.642115Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2025-03-31T16:15:32.643364Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-03-31T16:15:32.645175Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2025-03-31T16:15:34.134952Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T16:15:34.135032Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success assertion failed at ydb/core/viewer/viewer_ut.cpp:1948, virtual void NTestSuiteViewer::TTestCaseQueryExecuteScript::Execute_(NUnitTest::TTestContext &): (json.GetMap().contains("metadata")) {} TBackTrace::Capture()+28 (0x18DF0F1C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x192ACDE0) NTestSuiteViewer::TTestCaseQueryExecuteScript::Execute_(NUnitTest::TTestContext&)+8947 (0x18984533) std::__y1::__function::__func, void ()>::operator()()+280 (0x1899A2A8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x192E3E06) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x192B3959) NTestSuiteViewer::TCurrentTest::Execute()+1204 (0x18999154) NUnitTest::TTestFactory::Execute()+2438 (0x192B5226) NUnitTest::RunMain(int, char**)+5213 (0x192DE37D) ??+0 (0x7F3711B6ED90) __libc_start_main+128 (0x7F3711B6EE40) _start+41 (0x16292029) 2025-03-31T16:15:37.656486Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488008064655187371:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:15:37.656640Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-31T16:15:37.808686Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:37.841551Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:37.841679Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:37.843676Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32085, node 6 2025-03-31T16:15:37.889336Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:37.889367Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:37.889378Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:37.889554Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8369 2025-03-31T16:15:42.463580Z node 6 :TICKET_PARSER ERROR: Ticket **** (8C3E2D8D): Could not find correct token validator 2025-03-31T16:15:44.844228Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488008094387795098:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:15:44.844384Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-31T16:15:45.015413Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:45.043019Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:45.043175Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:45.045383Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24874, node 7 2025-03-31T16:15:45.103156Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:45.103185Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:45.103198Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:45.103392Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1493 2025-03-31T16:15:49.407957Z node 7 :TICKET_PARSER ERROR: Ticket **** (8C3E2D8D): Could not find correct token validator >> THealthCheckTest::ProtobufUnderLimitFor70LargeVdisksIssues [GOOD] >> THealthCheckTest::ProtobufUnderLimitFor100LargeVdisksIssues >> TLocksTest::UpdateLockedKey >> TFlatTest::SelectRangeForbidNullArgs2 [GOOD] >> TFlatTest::SelectRangeForbidNullArgs3 >> THealthCheckTest::ShardsNoLimit [GOOD] >> THealthCheckTest::HealthCheckConfigUpdate [GOOD] >> TCancelTx::CrossShardReadOnly [GOOD] >> TCancelTx::CrossShardReadOnlyWithReadSets >> TFlatTest::SplitEmptyAndWrite >> TObjectStorageListingTest::MaxKeysAndSharding ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::DontIgnoreServerlessWithExclusiveNodesWhenNotSpecific [GOOD] Test command err: 2025-03-31T16:15:39.465347Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008074003041075:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:15:39.465408Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d5a/r3tmp/tmpRPGpcZ/pdisk_1.dat 2025-03-31T16:15:39.736224Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2248, node 1 2025-03-31T16:15:39.792552Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:39.792579Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:39.792609Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:39.792739Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:15:39.822254Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:39.822398Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:39.825343Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15977 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:15:40.056835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:15:42.413473Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008085458117279:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:15:42.413565Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d5a/r3tmp/tmpPLYJE2/pdisk_1.dat 2025-03-31T16:15:42.521645Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62642, node 2 2025-03-31T16:15:42.549381Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:42.549459Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:42.550847Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:15:42.555276Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:42.555310Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:42.555320Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:42.555445Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20122 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:15:42.678122Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:15:50.603625Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:284:2217], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:50.604051Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:50.604216Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:50.605160Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:700:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:50.605369Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:50.605456Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d5a/r3tmp/tmp3usNca/pdisk_1.dat 2025-03-31T16:15:50.855250Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62895, node 3 TClient is connected to server localhost:9312 2025-03-31T16:15:51.190752Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:51.190791Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:51.190818Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:51.191357Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:15:57.838038Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:57.838484Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:57.838711Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:57.840560Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:57.840936Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:57.841083Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d5a/r3tmp/tmpSeoolx/pdisk_1.dat 2025-03-31T16:15:58.103940Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29483, node 5 TClient is connected to server localhost:26958 2025-03-31T16:15:58.543123Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:58.543186Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:58.543218Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:58.543656Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: GOOD issue_log { id: "YELLOW-f489-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-1ba8-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-5" reason: "YELLOW-e9e2-1231c6b1-6" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-5" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 5 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-6" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 6 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } location { id: 5 host: "::1" port: 12001 } 2025-03-31T16:16:02.995001Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:528:2414], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:16:02.995276Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:02.995371Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d5a/r3tmp/tmpCSPRXQ/pdisk_1.dat 2025-03-31T16:16:03.294296Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14928, node 7 TClient is connected to server localhost:3409 2025-03-31T16:16:03.776777Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:03.776865Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:03.776921Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:03.777226Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH13 [GOOD] Test command err: Trying to start YDB, gRPC: 2534, MsgBus: 16044 2025-03-31T16:14:48.382714Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488007853672682755:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:48.387231Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001978/r3tmp/tmpqva4Pr/pdisk_1.dat 2025-03-31T16:14:48.767170Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:48.808575Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected TServer::EnableGrpc on GrpcPort 2534, node 1 2025-03-31T16:14:48.808656Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:48.810288Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:14:48.869265Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:48.869291Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:48.869299Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:48.869404Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16044 TClient is connected to server localhost:16044 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:14:49.387521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:51.444371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007866557585309:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:51.444470Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007866557585317:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:51.444536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:51.448973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:14:51.465710Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007866557585323:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:14:51.566857Z node 1 :TX_PROXY ERROR: Actor# [1:7488007866557585374:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:14:51.924224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:14:52.177247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488007870852552917:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:14:52.177424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488007870852552917:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:14:52.177675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488007870852552917:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:14:52.177789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488007870852552917:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:14:52.177877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488007870852552917:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:14:52.177970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488007870852552917:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:14:52.178056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488007870852552917:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:14:52.178158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488007870852552917:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:14:52.178264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488007870852552917:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:14:52.178361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488007870852552917:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:14:52.178450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488007870852552917:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:14:52.178541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488007870852552917:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:14:52.201246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488007870852552926:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:14:52.201304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488007870852552926:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:14:52.201520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488007870852552926:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:14:52.201626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488007870852552926:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:14:52.201728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488007870852552926:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:14:52.201826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488007870852552926:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:14:52.201915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488007870852552926:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:14:52.202011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488007870852552926:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:14:52.202111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488007870852552926:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:14:52.202209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488007870852552926:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:14:52.202355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488007870852552926:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:14:52.202477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488007870852552926:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:14:52.209831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488007870852552930:2351];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:14:52.209891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488007870852552930:2351];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:14:52.210096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_ ... tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.027162Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.028251Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039289;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.032890Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.033462Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.037616Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039237;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.039543Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039299;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.042012Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039229;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.045536Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.045956Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.050284Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.051527Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.054614Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.057379Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.059551Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.063523Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.063959Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.068775Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.069646Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.075403Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.081174Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.086932Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.092885Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.098587Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.105052Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.110801Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.116367Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.120534Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.125091Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.129988Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.133789Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.137780Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.143276Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.148907Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.149707Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.155823Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039269;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.156112Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039253;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.161827Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.161895Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.167309Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.169463Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.172450Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.176512Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.177321Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.181501Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.182516Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.187877Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:53.292885Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpfs2b431tfst04phxgvn65", SessionId: ydb://session/3?node_id=1&id=YTkyOTUwNDctYzY5ZDQ5ZGUtZDdhZWMxZDAtOTI5NDM5YmY=, Slow query, duration: 25.191637s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:15:53.498331Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:15:53.498333Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:15:53.499016Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::CopyCopiedTableAndDropFirstCopy [GOOD] Test command err: 2025-03-31T16:15:56.431393Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008146613923627:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:15:56.431565Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001d0a/r3tmp/tmpSYYpqS/pdisk_1.dat 2025-03-31T16:15:56.773998Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:56.846327Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:56.846514Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:56.848833Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3515 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:15:57.073640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:15:57.114909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:15:57.311939Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.005s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-03-31T16:15:57.312251Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-03-31T16:15:57.332405Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-03-31T16:15:57.336318Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 Copy TableOld to Table 2025-03-31T16:15:57.429126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1/Dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" CopyFromTable: "/dc-1/Dir/TableOld" } } TxId: 281474976710676 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-31T16:15:57.429360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /dc-1/Dir/Table, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:15:57.429840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 2], parent name: Dir, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-03-31T16:15:57.429891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 0 2025-03-31T16:15:57.429903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-03-31T16:15:57.429932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-03-31T16:15:57.429961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-03-31T16:15:57.430091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 3 2025-03-31T16:15:57.430208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710676:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:15:57.432472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-03-31T16:15:57.432510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 4 2025-03-31T16:15:57.434072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710676, response: Status: StatusAccepted TxId: 281474976710676 SchemeshardId: 72057594046644480 PathId: 4, at schemeshard: 72057594046644480 2025-03-31T16:15:57.434204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710676, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /dc-1/Dir/Table 2025-03-31T16:15:57.434403Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-31T16:15:57.434421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-31T16:15:57.434570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 4] 2025-03-31T16:15:57.434679Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-31T16:15:57.434708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7488008146613923998:2241], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 2 2025-03-31T16:15:57.434750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7488008146613923998:2241], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 4 2025-03-31T16:15:57.434789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:15:57.434824Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710676:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046644480 2025-03-31T16:15:57.435135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-03-31T16:15:57.435307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } waiting... 2025-03-31T16:15:57.436790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710676:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2025-03-31T16:15:57.436871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710676:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:4 msg type: 268697601 2025-03-31T16:15:57.436973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710676, partId: 0, tablet: 72057594037968897 2025-03-31T16:15:57.436996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710676, shardIdx: 72057594046644480:3, partId: 0 2025-03-31T16:15:57.437012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710676, shardIdx: 72057594046644480:4, partId: 0 2025-03-31T16:15:57.441107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-03-31T16:15:57.441192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-03-31T16:15:57.441201Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710676 2025-03-31T16:15:57.441217Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710676, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 6 2025-03-31T16:15:57.441235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-03-31T16:15:57.441441Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-03-31T16:15:57.441486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-03-31T16:15:57.441492Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710676 2025-03-31T16:15:57.441500Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710676, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 1 2025-03-31T16:15:57.441509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 5 2025-03-31T16:15:57.441547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710676, ready parts: 0/1, is published: true 2025-03-31T16:15:57.441680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710676, at schemeshard: 72057594046644480 2025-03-31T16:15:57.441721Z nod ... MESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:16:02.879401Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-03-31T16:16:02.879682Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-03-31T16:16:02.879862Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-03-31T16:16:02.879996Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-03-31T16:16:02.880033Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-03-31T16:16:02.880046Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-03-31T16:16:02.880158Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037888 reason = ReasonStop 2025-03-31T16:16:02.880182Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-03-31T16:16:02.880189Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:7488008174513337959:2385], serverId# [3:7488008174513337960:2386], sessionId# [0:0:0] 2025-03-31T16:16:02.880204Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-03-31T16:16:02.880217Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-03-31T16:16:02.880323Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 Check that tablet 72075186224037892 was deleted 2025-03-31T16:16:02.880431Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-03-31T16:16:02.880537Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-03-31T16:16:02.880628Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046644480 2025-03-31T16:16:02.880650Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-03-31T16:16:02.880714Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-03-31T16:16:02.880738Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-03-31T16:16:02.880752Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-03-31T16:16:02.881027Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-03-31T16:16:02.881880Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2025-03-31T16:16:02.881911Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2025-03-31T16:16:02.881924Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-03-31T16:16:02.881941Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2025-03-31T16:16:02.882025Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-03-31T16:16:02.882055Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-03-31T16:16:02.882180Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-03-31T16:16:02.882286Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-03-31T16:16:02.882305Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-03-31T16:16:02.882330Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-03-31T16:16:02.882338Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-03-31T16:16:02.882357Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-03-31T16:16:02.882377Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-03-31T16:16:02.882406Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-03-31T16:16:02.883815Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-03-31T16:16:02.883892Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2025-03-31T16:16:02.885296Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-03-31T16:16:02.885354Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-03-31T16:16:02.886321Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-03-31T16:16:02.886367Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2025-03-31T16:16:02.887236Z node 3 :TX_DATASHARD DEBUG: 72075186224037893 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-03-31T16:16:02.887307Z node 3 :TX_DATASHARD INFO: 72075186224037893 Initiating switch from PreOffline to Offline state 2025-03-31T16:16:02.888314Z node 3 :TX_DATASHARD INFO: 72075186224037892 Reporting state Offline to schemeshard 72057594046644480 2025-03-31T16:16:02.888542Z node 3 :TX_DATASHARD INFO: 72075186224037893 Reporting state Offline to schemeshard 72057594046644480 2025-03-31T16:16:02.888678Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7488008174513338305 RawX2: 4503612512274751 } TabletId: 72075186224037892 State: 4 2025-03-31T16:16:02.888720Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2025-03-31T16:16:02.888854Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7488008174513338304 RawX2: 4503612512274750 } TabletId: 72075186224037893 State: 4 2025-03-31T16:16:02.888871Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037893, state: Offline, at schemeshard: 72057594046644480 2025-03-31T16:16:02.889022Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2025-03-31T16:16:02.889051Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:16:02.889153Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037893 state Offline 2025-03-31T16:16:02.889157Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:6 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:16:02.890484Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-03-31T16:16:02.890541Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037892 reason = ReasonStop 2025-03-31T16:16:02.890561Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037893 reason = ReasonStop 2025-03-31T16:16:02.890657Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 5] was 2 2025-03-31T16:16:02.890820Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046644480 ShardLocalIdx: 6, at schemeshard: 72057594046644480 2025-03-31T16:16:02.890833Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2025-03-31T16:16:02.890886Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037892 2025-03-31T16:16:02.890929Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 5] was 1 2025-03-31T16:16:02.891054Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-03-31T16:16:02.891071Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 5], at schemeshard: 72057594046644480 2025-03-31T16:16:02.891120Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-03-31T16:16:02.891428Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2025-03-31T16:16:02.891451Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037893 not found 2025-03-31T16:16:02.891650Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-03-31T16:16:02.891669Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-03-31T16:16:02.891696Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:6 2025-03-31T16:16:02.891707Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037893 2025-03-31T16:16:02.891937Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-03-31T16:16:02.892234Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037893 2025-03-31T16:16:02.892294Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037893 2025-03-31T16:16:03.183070Z node 3 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037892) Check that tablet 72075186224037893 was deleted 2025-03-31T16:16:03.183395Z node 3 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037893) Check that tablet 72075186224037888 was deleted 2025-03-31T16:16:03.183787Z node 3 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) Check that tablet 72075186224037889 was deleted 2025-03-31T16:16:03.184162Z node 3 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) Check that tablet 72075186224037890 was deleted 2025-03-31T16:16:03.184426Z node 3 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) Check that tablet 72075186224037891 was deleted 2025-03-31T16:16:03.184697Z node 3 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::ShardsNoLimit [GOOD] Test command err: 2025-03-31T16:15:37.372591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:37.373180Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:37.373391Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d65/r3tmp/tmp2dyisY/pdisk_1.dat 2025-03-31T16:15:37.924738Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18112, node 1 TClient is connected to server localhost:28264 2025-03-31T16:15:38.619481Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:38.619545Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:38.619571Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:38.620020Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:15:42.478454Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:42.478558Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:42.478707Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d65/r3tmp/tmpWfIZq6/pdisk_1.dat 2025-03-31T16:15:42.736657Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3355, node 3 TClient is connected to server localhost:15989 2025-03-31T16:15:43.070617Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:43.070677Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:43.070705Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:43.070904Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:15:49.235563Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:49.235872Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:49.236077Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:49.237579Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:49.237896Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:49.237995Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d65/r3tmp/tmpv1rilf/pdisk_1.dat 2025-03-31T16:15:49.536963Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26943, node 5 TClient is connected to server localhost:19287 2025-03-31T16:15:49.952648Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:49.952731Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:49.952787Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:49.953356Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:15:56.206607Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:413:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:56.206906Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:56.207286Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:56.208048Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:703:2356], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:56.208221Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:56.208279Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d65/r3tmp/tmp3hOS4T/pdisk_1.dat 2025-03-31T16:15:56.452091Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5435, node 7 TClient is connected to server localhost:25115 2025-03-31T16:15:56.762252Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:56.762315Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:56.762344Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:56.762556Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:16:03.860344Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:284:2217], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:16:03.860675Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:16:03.860783Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:03.861204Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:700:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:16:03.861442Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:03.861539Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d65/r3tmp/tmpndvBoR/pdisk_1.dat 2025-03-31T16:16:04.128337Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13395, node 9 TClient is connected to server localhost:7026 2025-03-31T16:16:04.413151Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:04.413191Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:04.413211Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:04.413714Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration >> THealthCheckTest::Issues100GroupsMerging [GOOD] >> THealthCheckTest::Issues100VCardMerging ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::HealthCheckConfigUpdate [GOOD] Test command err: 2025-03-31T16:15:39.891776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:700:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:39.892215Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:39.892364Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:39.894018Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:697:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:39.894370Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:39.894487Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d75/r3tmp/tmpKQWrK4/pdisk_1.dat 2025-03-31T16:15:40.246751Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62996, node 1 TClient is connected to server localhost:25178 2025-03-31T16:15:40.618536Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:40.618593Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:40.618626Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:40.618982Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:15:47.082584Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:284:2217], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:47.083045Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:47.083215Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:47.084167Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:700:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:47.084438Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:47.084564Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d75/r3tmp/tmp19dgW8/pdisk_1.dat 2025-03-31T16:15:47.371410Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28436, node 3 TClient is connected to server localhost:21510 2025-03-31T16:15:47.736205Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:47.736278Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:47.736331Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:47.736967Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: GOOD issue_log { id: "YELLOW-f489-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-1ba8-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-3" reason: "YELLOW-e9e2-1231c6b1-4" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-3" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 3 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-4" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 4 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } location { id: 3 host: "::1" port: 12001 } 2025-03-31T16:15:53.524510Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:53.524830Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:53.525035Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:53.526201Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:53.526496Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:53.526595Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d75/r3tmp/tmpgEcYBK/pdisk_1.dat 2025-03-31T16:15:53.820683Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16424, node 5 TClient is connected to server localhost:9965 2025-03-31T16:15:54.259582Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:54.259650Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:54.259691Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:54.260252Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: GOOD issue_log { id: "YELLOW-f489-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-1ba8-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-5" reason: "YELLOW-e9e2-1231c6b1-6" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-5" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 5 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-6" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 6 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } location { id: 5 host: "::1" port: 12001 } 2025-03-31T16:15:58.558233Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:58.558441Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:58.558538Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d75/r3tmp/tmp6wgMjm/pdisk_1.dat 2025-03-31T16:15:58.801339Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9008, node 7 TClient is connected to server localhost:1498 2025-03-31T16:15:59.201887Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:59.201955Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:59.201991Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:59.202257Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:16:03.728078Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:452:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:16:03.728211Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:03.728283Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d75/r3tmp/tmpptcfn4/pdisk_1.dat 2025-03-31T16:16:04.038911Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61658, node 9 TClient is connected to server localhost:15562 2025-03-31T16:16:04.465235Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:04.465294Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:04.465325Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:04.465771Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration >> THealthCheckTest::SpecificServerlessWithExclusiveNodes [GOOD] >> TFlatTest::SelectBigRangePerf ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH21 [GOOD] Test command err: Trying to start YDB, gRPC: 61743, MsgBus: 15138 2025-03-31T16:14:44.686448Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488007837604894638:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:44.686479Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00197c/r3tmp/tmp0XxP3s/pdisk_1.dat 2025-03-31T16:14:45.217828Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:45.222722Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:45.222803Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:45.225309Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61743, node 1 2025-03-31T16:14:45.315367Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:45.315388Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:45.315399Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:45.315619Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15138 TClient is connected to server localhost:15138 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:14:45.838104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:47.785297Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007850489797207:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:47.785302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007850489797199:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:47.785435Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:47.790591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:14:47.801289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007850489797213:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:14:47.891797Z node 1 :TX_PROXY ERROR: Actor# [1:7488007850489797264:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:14:48.146576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:14:48.405081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488007854784764852:2359];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:14:48.405303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488007854784764852:2359];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:14:48.405623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488007854784764852:2359];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:14:48.405744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488007854784764852:2359];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:14:48.405859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488007854784764852:2359];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:14:48.405970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488007854784764852:2359];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:14:48.406068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488007854784764852:2359];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:14:48.406172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488007854784764852:2359];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:14:48.406296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488007854784764852:2359];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:14:48.406409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488007854784764852:2359];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:14:48.406516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488007854784764852:2359];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:14:48.406619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488007854784764852:2359];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:14:48.432693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488007854784764854:2360];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:14:48.432761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488007854784764854:2360];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:14:48.433018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488007854784764854:2360];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:14:48.433142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488007854784764854:2360];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:14:48.433248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488007854784764854:2360];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:14:48.433360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488007854784764854:2360];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:14:48.433456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488007854784764854:2360];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:14:48.433554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488007854784764854:2360];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:14:48.433677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488007854784764854:2360];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:14:48.433796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488007854784764854:2360];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:14:48.433898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488007854784764854:2360];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:14:48.433990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488007854784764854:2360];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:14:48.440654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488007854784764820:2355];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:14:48.440706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488007854784764820:2355];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:14:48.440864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;sel ... ller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:48.930309Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:48.931865Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:48.935186Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:48.938511Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:48.939831Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:48.944524Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:48.945241Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:48.949117Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:48.951498Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:48.954427Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:48.957997Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039368;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:48.960119Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:48.965094Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:48.965539Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:48.970533Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:48.971776Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:48.975682Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:48.978575Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:48.980378Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:48.985122Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:48.985304Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:48.989925Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:48.990203Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:48.994947Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:48.999701Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:49.004457Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:49.009196Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:49.013867Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:49.018055Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039227;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:49.022194Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:49.026817Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:49.031480Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:49.036060Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:49.037767Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:49.040773Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:49.044608Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:49.045237Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:49.049972Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:49.050907Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:49.054586Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:49.055837Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:49.059355Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:49.060019Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:49.064217Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:49.068674Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:49.143454Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpfrz95cbfztez7jw8873t3", SessionId: ydb://session/3?node_id=1&id=ODBhOWE3MmMtYTlmM2E4YTQtN2FmOGUxNjYtYWU5ZjgyM2U=, Slow query, duration: 24.177773s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:15:49.331694Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:15:49.331776Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7488008095302991610:11055];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039094; 2025-03-31T16:15:49.332085Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:15:49.332506Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::SpecificServerlessWithExclusiveNodes [GOOD] Test command err: 2025-03-31T16:15:39.815256Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:700:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:39.815666Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:39.815798Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:39.817528Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:697:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:39.817914Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:39.818033Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d85/r3tmp/tmpgzKNCX/pdisk_1.dat 2025-03-31T16:15:40.205426Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20830, node 1 TClient is connected to server localhost:24742 2025-03-31T16:15:40.619008Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:40.619063Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:40.619112Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:40.619422Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: DEGRADED issue_log { id: "YELLOW-70fb-1231c6b1" status: YELLOW message: "Database has multiple issues" location { database { name: "/Root" } } reason: "YELLOW-1ba8-1231c6b1" reason: "YELLOW-5321-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-1" reason: "YELLOW-e9e2-1231c6b1-2" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-1" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 1 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-2" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 2 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-5321-1231c6b1" status: YELLOW message: "Storage degraded" location { database { name: "/Root" } } reason: "YELLOW-595f-1231c6b1-80c02825" type: "STORAGE" level: 2 } issue_log { id: "YELLOW-595f-1231c6b1-80c02825" status: YELLOW message: "Pool degraded" location { storage { pool { name: "static" } } database { name: "/Root" } } reason: "YELLOW-ef3e-1231c6b1-0" type: "STORAGE_POOL" level: 3 } issue_log { id: "RED-4847-1231c6b1-1-0-3-55-0-55" status: RED message: "VDisk is not available" location { storage { node { id: 1 host: "::1" port: 12001 } pool { name: "static" group { vdisk { id: "0-3-55-0-55" } } } } database { name: "/Root" } } type: "VDISK" level: 5 } issue_log { id: "YELLOW-ef3e-1231c6b1-0" status: YELLOW message: "Group degraded" location { storage { pool { name: "static" group { id: "0" } } } database { name: "/Root" } } reason: "RED-4847-1231c6b1-1-0-3-55-0-55" type: "STORAGE_GROUP" level: 4 } location { id: 1 host: "::1" port: 12001 } 2025-03-31T16:15:46.421190Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:284:2217], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:46.421693Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:46.421868Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:46.422974Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:700:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:46.423349Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:46.423519Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d85/r3tmp/tmpetiPEn/pdisk_1.dat 2025-03-31T16:15:46.706725Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2153, node 3 TClient is connected to server localhost:22194 2025-03-31T16:15:47.098756Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:47.098834Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:47.098874Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:47.099665Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:15:52.727520Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:52.727775Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:52.727901Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:52.729063Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:52.729340Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:52.729431Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d85/r3tmp/tmpqT6gye/pdisk_1.dat 2025-03-31T16:15:53.018962Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12399, node 5 TClient is connected to server localhost:28452 2025-03-31T16:15:53.320080Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:53.320134Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:53.320158Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:53.320621Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:16:00.230401Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:413:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:16:00.230634Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:16:00.230973Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:00.231797Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:703:2356], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:16:00.231981Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:00.232040Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d85/r3tmp/tmpSzEC4A/pdisk_1.dat 2025-03-31T16:16:00.492266Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10553, node 7 TClient is connected to server localhost:6825 2025-03-31T16:16:00.758786Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:00.758830Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:00.758868Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:00.759023Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:16:05.094900Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:16:05.095162Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:329:2219], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:16:05.095263Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d85/r3tmp/tmpgIPchJ/pdisk_1.dat 2025-03-31T16:16:05.359336Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5450, node 9 TClient is connected to server localhost:17252 2025-03-31T16:16:05.751163Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:05.751223Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:05.751267Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:05.751566Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration >> TObjectStorageListingTest::Listing >> TLocksFatTest::PointSetBreak >> TFlatTest::SelectRangeForbidNullArgs3 [GOOD] >> TConsoleTests::TestRemoveAttributesExtSubdomain [GOOD] >> TFlatTest::LargeDatashardReplyDistributed [GOOD] >> TFlatTest::LargeDatashardReplyRW >> TFlatTest::SplitEmptyAndWrite [GOOD] >> TFlatTest::SplitBoundaryRead >> TFlatTest::CrossRW >> TFlatTest::WriteMergeAndRead >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncTableClient [GOOD] >> TFlatTest::CopyTableAndCompareColumnsSchema >> THealthCheckTest::ShardsLimit905 [GOOD] >> THealthCheckTest::ShardsLimit800 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TConsoleTests::TestRemoveAttributesExtSubdomain [GOOD] Test command err: 2025-03-31T16:14:19.685924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:14:19.686008Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:19.753665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:14:22.958907Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:14:22.958976Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:23.022585Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:14:26.276965Z node 19 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:14:26.277048Z node 19 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:26.342445Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:14:27.350915Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944 2025-03-31T16:14:27.497236Z node 23 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-31T16:14:27.497902Z node 23 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/0l5c/000a1f/r3tmp/tmpLxDwxS/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-31T16:14:27.498753Z node 23 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/0l5c/000a1f/r3tmp/tmpLxDwxS/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/000a1f/r3tmp/tmpLxDwxS/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 17865211738154931893 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-31T16:14:27.546519Z node 24 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-31T16:14:27.547304Z node 24 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/0l5c/000a1f/r3tmp/tmpLxDwxS/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-31T16:14:27.547586Z node 24 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/0l5c/000a1f/r3tmp/tmpLxDwxS/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/000a1f/r3tmp/tmpLxDwxS/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 15542985046301243739 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-31T16:14:27.613250Z node 20 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-31T16:14:27.613850Z node 20 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/0l5c/000a1f/r3tmp/tmpLxDwxS/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-31T16:14:27.614124Z node 20 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/0l5c/000a1f/r3tmp/tmpLxDwxS/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/000a1f/r3tmp/tmpLxDwxS/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 12133113603372183216 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-31T16:14:27.617686Z node 20 :BS_LOCALRECOVERY CRIT: VDISK[80000000:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/0l5c/000a1f/r3tmp/tmpLxDwxS/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-03-31T16:14:27.673765Z node 27 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-31T16:14:27.674286Z node 27 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/0l5c/000a1f/r3tmp/tmpLxDwxS/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-31T16:14:27.674519Z node 27 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/0l5c/000a1f/r3tmp/tmpLxDwxS/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/000a1f/r3tmp/tmpLxDwxS/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 17968000903351548529 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# ... node 163 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715661:0 progress is 1/1 2025-03-31T16:16:06.355831Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715661 ready parts: 1/1 2025-03-31T16:16:06.355937Z node 163 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715661:0 progress is 1/1 2025-03-31T16:16:06.356029Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715661 ready parts: 1/1 2025-03-31T16:16:06.356143Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046578944, LocalPathId: 3] was 11 2025-03-31T16:16:06.356218Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715661, ready parts: 1/1, is published: false 2025-03-31T16:16:06.356407Z node 163 :FLAT_TX_SCHEMESHARD INFO: Send TEvUpdateTenantSchemeShard, to actor: [163:1378:2617], msg: TabletId: 72057594046578944 Generation: 2 UserAttributes { Key: "name1" Value: "value1" } UserAttributesVersion: 3, at schemeshard: 72057594046578944 2025-03-31T16:16:06.356463Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715661 ready parts: 1/1 2025-03-31T16:16:06.356556Z node 163 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715661:0 2025-03-31T16:16:06.356633Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715661:0 2025-03-31T16:16:06.356731Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046578944, LocalPathId: 3] was 12 2025-03-31T16:16:06.356825Z node 163 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715661, publications: 1, subscribers: 1 2025-03-31T16:16:06.356919Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715661, [OwnerId: 72057594046578944, LocalPathId: 3], 7 2025-03-31T16:16:06.360027Z node 163 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateTenantSchemeShard, at schemeshard: 72075186233409546, msg: TabletId: 72057594046578944 Generation: 2 UserAttributes { Key: "name1" Value: "value1" } UserAttributesVersion: 3 2025-03-31T16:16:06.360139Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpdateTenant DoExecute, msg: TabletId: 72057594046578944 Generation: 2 UserAttributes { Key: "name1" Value: "value1" } UserAttributesVersion: 3, at schemeshard: 72075186233409546 2025-03-31T16:16:06.360426Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: Cannot publish paths for unknown operation id#0 2025-03-31T16:16:06.360747Z node 163 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2025-03-31T16:16:06.360790Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046578944, txId: 281474976715661, path id: [OwnerId: 72057594046578944, LocalPathId: 3] 2025-03-31T16:16:06.360963Z node 163 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2025-03-31T16:16:06.361000Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [163:686:2241], at schemeshard: 72057594046578944, txId: 281474976715661, path id: 3 2025-03-31T16:16:06.361667Z node 163 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-03-31T16:16:06.361759Z node 163 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:21] persistent tx 281474976715661 for mediator 72057594046382081 tablet 72057594046578944 removed=1 2025-03-31T16:16:06.361794Z node 163 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:21] persistent tx 281474976715661 for mediator 72057594046382081 acknowledged 2025-03-31T16:16:06.361825Z node 163 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:21] persistent tx 281474976715661 acknowledged 2025-03-31T16:16:06.362463Z node 163 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046578944, cookie: 281474976715661 2025-03-31T16:16:06.362553Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046578944, cookie: 281474976715661 2025-03-31T16:16:06.362620Z node 163 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046578944, txId: 281474976715661 2025-03-31T16:16:06.362703Z node 163 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046578944, txId: 281474976715661, pathId: [OwnerId: 72057594046578944, LocalPathId: 3], version: 7 2025-03-31T16:16:06.362801Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046578944, LocalPathId: 3] was 11 2025-03-31T16:16:06.362959Z node 163 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046578944, txId: 281474976715661, subscribers: 1 2025-03-31T16:16:06.363045Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046578944, to actorId: [163:1931:2389] 2025-03-31T16:16:06.367471Z node 163 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-03-31T16:16:06.367526Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-03-31T16:16:06.367677Z node 163 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-03-31T16:16:06.367713Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [163:1652:2814], at schemeshard: 72075186233409546, txId: 0, path id: 1 2025-03-31T16:16:06.368354Z node 163 :FLAT_TX_SCHEMESHARD INFO: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046578944, msg: DomainSchemeShard: 72057594046578944 DomainPathId: 3 TabletID: 72075186233409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 3 UserAttributesVersion: 3 TenantHive: 18446744073709551615 TenantSysViewProcessor: 72075186233409553 TenantRootACL: "" TenantStatisticsAggregator: 72075186233409554 TenantGraphShard: 18446744073709551615 2025-03-31T16:16:06.368429Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046578944, LocalPathId: 3], at schemeshard: 72057594046578944 2025-03-31T16:16:06.368511Z node 163 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046578944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046578944, LocalPathId: 3], Generation: 2, ActorId:[163:1378:2617], EffectiveACLVersion: 0, SubdomainVersion: 3, UserAttributesVersion: 3, TenantHive: 18446744073709551615, TenantSysViewProcessor: 72075186233409553, TenantStatisticsAggregator: 72075186233409554, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 3, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 3, tenantHive: 18446744073709551615, tenantSysViewProcessor: 72075186233409553, at schemeshard: 72057594046578944 2025-03-31T16:16:06.369360Z node 163 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72075186233409546, cookie: 0 2025-03-31T16:16:06.369602Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046578944, cookie: 281474976715661 2025-03-31T16:16:06.369696Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046578944, LocalPathId: 3], at schemeshard: 72057594046578944 Reply: Status: StatusSuccess Path: "/dc-1/users/tenant-1" PathDescription { Self { Name: "tenant-1" PathId: 3 SchemeshardId: 72057594046578944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1000 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 3 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 10 Coordinators: 72075186233409547 Coordinators: 72075186233409548 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 Mediators: 72075186233409551 Mediators: 72075186233409552 SchemeShard: 72075186233409546 SysViewProcessor: 72075186233409553 StatisticsAggregator: 72075186233409554 } DomainKey { SchemeShard: 72057594046578944 PathId: 3 } StoragePools { Name: "/dc-1/users/tenant-1:hdd" Kind: "hdd" } StoragePools { Name: "/dc-1/users/tenant-1:hdd-1" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 9 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046578944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { } SecurityState { } } UserAttributes { Key: "name1" Value: "value1" } } PathId: 3 PathOwnerId: 72057594046578944 Reply: Status: StatusSuccess Path: "/dc-1/users/tenant-1" PathDescription { Self { Name: "tenant-1" PathId: 3 SchemeshardId: 72057594046578944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1000 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 3 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 10 Coordinators: 72075186233409547 Coordinators: 72075186233409548 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 Mediators: 72075186233409551 Mediators: 72075186233409552 SchemeShard: 72075186233409546 SysViewProcessor: 72075186233409553 StatisticsAggregator: 72075186233409554 } DomainKey { SchemeShard: 72057594046578944 PathId: 3 } StoragePools { Name: "/dc-1/users/tenant-1:hdd" Kind: "hdd" } StoragePools { Name: "/dc-1/users/tenant-1:hdd-1" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 9 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046578944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { } SecurityState { } } UserAttributes { Key: "name1" Value: "value1" } } PathId: 3 PathOwnerId: 72057594046578944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeForbidNullArgs3 [GOOD] Test command err: 2025-03-31T16:16:02.676609Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008174256452118:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:02.676873Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001d02/r3tmp/tmpxX8P7I/pdisk_1.dat 2025-03-31T16:16:02.976387Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:02.990507Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:02.990653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:02.993192Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18258 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:03.229449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:03.256291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:05.425300Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008184520041752:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:05.425370Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001d02/r3tmp/tmpdn1V4W/pdisk_1.dat 2025-03-31T16:16:05.538595Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:05.580600Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:05.580705Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:05.582364Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8138 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:05.754315Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:05.775131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncTableClient [GOOD] Test command err: 2025-03-31T16:15:52.813045Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008131111948879:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:15:52.813220Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d4e/r3tmp/tmphBdCEt/pdisk_1.dat 2025-03-31T16:15:53.226857Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:53.227780Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:53.249783Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:53.287382Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32176, node 1 2025-03-31T16:15:53.300166Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:15:53.300194Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:15:53.412472Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:53.412498Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:53.412520Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:53.412659Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26131 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:15:53.861148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:15:57.175918Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008149073688742:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:15:57.176078Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d4e/r3tmp/tmpJoZkw8/pdisk_1.dat 2025-03-31T16:15:57.320932Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:57.354184Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:57.354303Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:57.362832Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18854, node 4 2025-03-31T16:15:57.424644Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:57.424663Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:57.424670Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:57.424770Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12210 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:15:57.671913Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:00.267879Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008161958591825:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:00.268079Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:00.269013Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008161958591979:2550], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:00.269103Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008161958591975:2547], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:00.269132Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008161958591985:2554], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:00.269211Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008161958591984:2553], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:00.269229Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008161958591987:2555], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:00.269251Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008161958591989:2557], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:00.269319Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008161958591991:2559], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:00.269359Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008161958591990:2558], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:00.269361Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008161958592017:2580], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:00.269420Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008161958591993:2560], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:00.269478Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008161958591976:2548], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:00.269524Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008161958592008:2572], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:00.269561Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008161958592001:2566], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:00.269607Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008161958591982:2552], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:00.269614Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008161958592010:2574], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:00.269648Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008161958591997:2564], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:00.269702Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008161958591996:2563], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:00.269765Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008161958592002:2567], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: { < ... ROR: Actor# [4:7488008161958592693:3195] txid# 281474976715715, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:16:00.410919Z node 4 :TX_PROXY ERROR: Actor# [4:7488008161958592705:3201] txid# 281474976715717, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:16:00.413594Z node 4 :TX_PROXY ERROR: Actor# [4:7488008161958592713:3206] txid# 281474976715718, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:16:00.413903Z node 4 :TX_PROXY ERROR: Actor# [4:7488008161958592715:3208] txid# 281474976715720, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:16:00.414488Z node 4 :TX_PROXY ERROR: Actor# [4:7488008161958592727:3218] txid# 281474976715724, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:16:00.414608Z node 4 :TX_PROXY ERROR: Actor# [4:7488008161958592714:3207] txid# 281474976715719, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:16:00.416246Z node 4 :TX_PROXY ERROR: Actor# [4:7488008161958592725:3217] txid# 281474976715723, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:16:00.417186Z node 4 :TX_PROXY ERROR: Actor# [4:7488008161958592724:3216] txid# 281474976715722, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:16:00.417556Z node 4 :TX_PROXY ERROR: Actor# [4:7488008161958592716:3209] txid# 281474976715721, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:16:00.417943Z node 4 :TX_PROXY ERROR: Actor# [4:7488008161958592734:3223] txid# 281474976715725, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:16:00.418316Z node 4 :TX_PROXY ERROR: Actor# [4:7488008161958592751:3237] txid# 281474976715727, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:16:00.422044Z node 4 :TX_PROXY ERROR: Actor# [4:7488008161958592767:3251] txid# 281474976715728, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:16:00.422214Z node 4 :TX_PROXY ERROR: Actor# [4:7488008161958592750:3236] txid# 281474976715726, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:16:00.423268Z node 4 :TX_PROXY ERROR: Actor# [4:7488008161958592768:3252] txid# 281474976715729, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:16:00.423488Z node 4 :TX_PROXY ERROR: Actor# [4:7488008161958592801:3278] txid# 281474976715731, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:16:00.428293Z node 4 :TX_PROXY ERROR: Actor# [4:7488008161958592795:3273] txid# 281474976715730, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:16:00.428653Z node 4 :TX_PROXY ERROR: Actor# [4:7488008161958592809:3286] txid# 281474976715732, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:16:00.428786Z node 4 :TX_PROXY ERROR: Actor# [4:7488008161958592811:3288] txid# 281474976715734, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:16:00.429432Z node 4 :TX_PROXY ERROR: Actor# [4:7488008161958592810:3287] txid# 281474976715733, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:16:00.430700Z node 4 :TX_PROXY ERROR: Actor# [4:7488008161958592839:3307] txid# 281474976715735, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:16:00.434053Z node 4 :TX_PROXY ERROR: Actor# [4:7488008161958592879:3339] txid# 281474976715736, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:16:00.434329Z node 4 :TX_PROXY ERROR: Actor# [4:7488008161958592896:3350] txid# 281474976715738, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:16:00.435543Z node 4 :TX_PROXY ERROR: Actor# [4:7488008161958592907:3359] txid# 281474976715741, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:16:00.435626Z node 4 :TX_PROXY ERROR: Actor# [4:7488008161958592905:3357] txid# 281474976715739, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:16:00.435854Z node 4 :TX_PROXY ERROR: Actor# [4:7488008161958592895:3349] txid# 281474976715737, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:16:00.436332Z node 4 :TX_PROXY ERROR: Actor# [4:7488008161958592909:3361] txid# 281474976715742, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:16:00.436484Z node 4 :TX_PROXY ERROR: Actor# [4:7488008161958592906:3358] txid# 281474976715740, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:16:00.436644Z node 4 :TX_PROXY ERROR: Actor# [4:7488008161958592913:3365] txid# 281474976715743, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:16:00.437864Z node 4 :TX_PROXY ERROR: Actor# [4:7488008161958592929:3379] txid# 281474976715744, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:16:00.438029Z node 4 :TX_PROXY ERROR: Actor# [4:7488008161958592943:3390] txid# 281474976715745, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:16:00.439511Z node 4 :TX_PROXY ERROR: Actor# [4:7488008161958592970:3404] txid# 281474976715747, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:16:00.439710Z node 4 :TX_PROXY ERROR: Actor# [4:7488008161958592969:3403] txid# 281474976715746, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:16:02.176049Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7488008149073688742:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:02.176167Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TFlatTest::MiniKQLRanges |87.3%| [TA] $(B)/ydb/core/cms/console/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.3%| [TA] {RESULT} $(B)/ydb/core/cms/console/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TLocksTest::GoodLock |87.3%| [TA] $(B)/ydb/services/ydb/sdk_sessions_ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.3%| [TA] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TFlatTest::SelectBigRangePerf [GOOD] >> TFlatTest::SelectRangeBothLimit >> Viewer::UseTransactionWhenExecuteDataActionQuery [GOOD] >> ViewerTopicDataTests::TopicDataTest >> TFlatTest::CrossRW [GOOD] >> TFlatTest::GetTabletCounters >> TFlatTest::SelectRangeNullArgs3 >> TCancelTx::CrossShardReadOnlyWithReadSets [GOOD] >> TCancelTx::ImmediateReadOnly >> TFlatTest::SplitBoundaryRead [GOOD] >> TFlatTest::WriteMergeAndRead [GOOD] >> TFlatTest::WriteSplitAndRead >> TFlatTest::AutoSplitBySize >> TFlatTest::Init >> THealthCheckTest::TestReBootingTabletIsDead [GOOD] >> THealthCheckTest::NoBscResponse [GOOD] >> THealthCheckTest::LayoutIncorrect >> TObjectStorageListingTest::Listing [GOOD] >> TObjectStorageListingTest::ManyDeletes >> TFlatTest::MiniKQLRanges [GOOD] >> TFlatTest::MergeEmptyAndWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SplitBoundaryRead [GOOD] Test command err: 2025-03-31T16:16:05.815229Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008184141179893:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:05.815368Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001d00/r3tmp/tmpogvTSq/pdisk_1.dat 2025-03-31T16:16:06.132122Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:06.169627Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:06.169871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:06.172161Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29369 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:06.300452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:06.333326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:06.507985Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-03-31T16:16:06.513600Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-03-31T16:16:06.535583Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-03-31T16:16:06.540465Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743437766448 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) waiting... 2025-03-31T16:16:06.646111Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.22, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-03-31T16:16:06.646139Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.23, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-03-31T16:16:06.646423Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.24, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-03-31T16:16:06.646585Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.25, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-03-31T16:16:06.647673Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.26, eph 3} end=0, 4 blobs 2r (max 2), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (1907 1533 0)b }, ecr=1.000 2025-03-31T16:16:06.650848Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.32, eph 3} end=0, 4 blobs 8r (max 8), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3250 2180 6413)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743437766448 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) waiting... TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-03-31T16:16:06.747582Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-03-31T16:16:06.747750Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-03-31T16:16:06.749867Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-03-31T16:16:06.753137Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-03-31T16:16:06.754536Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2025-03-31T16:16:08.529665Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008197270324463:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:08.529755Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001d00/r3tmp/tmpERhKCD/pdisk_1.dat 2025-03-31T16:16:08.626696Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:08.670255Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:08.670336Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:08.675497Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2280 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:08.806899Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:08.827870Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:08.898478Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-03-31T16:16:08.903915Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-03-31T16:16:08.924035Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-03-31T16:16:08.927539Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld 2025-03-31T16:16:08.949769Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 3} end=0, 4 blobs 9r (max 9), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3307 2180 6413)b }, ecr=1.000 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743437768905 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: " ... ode 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-03-31T16:16:09.143946Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TConfigureDestination ProgressState, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-03-31T16:16:09.144008Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Initializing scheme on dst datashard: 72075186224037890 splitOp: 281474976715678:0 alterVersion: 1 at tablet: 72057594046644480 2025-03-31T16:16:09.144113Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Initializing scheme on dst datashard: 72075186224037891 splitOp: 281474976715678:0 alterVersion: 1 at tablet: 72057594046644480 2025-03-31T16:16:09.144469Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715678:0 from tablet: 72057594046644480 to tablet: 72075186224037890 cookie: 72057594046644480:3 msg type: 269553152 2025-03-31T16:16:09.144646Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715678:0 from tablet: 72057594046644480 to tablet: 72075186224037891 cookie: 72057594046644480:4 msg type: 269553152 2025-03-31T16:16:09.144744Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715678, partId: 0, tablet: 72075186224037890 2025-03-31T16:16:09.144763Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715678, partId: 0, tablet: 72075186224037891 2025-03-31T16:16:09.166363Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715678 TabletId: 72075186224037891 2025-03-31T16:16:09.166412Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TConfigureDestination operationId# 281474976715678:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976715678:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976715678 TabletId: 72075186224037891 2025-03-31T16:16:09.166694Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-03-31T16:16:09.168491Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715678 TabletId: 72075186224037890 2025-03-31T16:16:09.168520Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TConfigureDestination operationId# 281474976715678:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976715678:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976715678 TabletId: 72075186224037890 2025-03-31T16:16:09.168538Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715678:0 3 -> 131 2025-03-31T16:16:09.168897Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-03-31T16:16:09.168983Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-03-31T16:16:09.169009Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TTransferData operationId# 281474976715678:0 ProgressState, at schemeshard: 72057594046644480 2025-03-31T16:16:09.169028Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TSplitMerge TTransferData operationId# 281474976715678:0 Starting split on src datashard 72075186224037888 splitOpId# 281474976715678:0 at tablet 72057594046644480 2025-03-31T16:16:09.169277Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715678:0 from tablet: 72057594046644480 to tablet: 72075186224037888 cookie: 72057594046644480:1 msg type: 269553154 2025-03-31T16:16:09.169367Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715678, partId: 0, tablet: 72075186224037888 2025-03-31T16:16:09.171750Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.27, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-03-31T16:16:09.171765Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.26, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-03-31T16:16:09.172009Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.29, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-03-31T16:16:09.172075Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.28, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-03-31T16:16:09.172236Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.30, eph -9223372036854775808} end=0, 0 blobs 0r (max 0), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-03-31T16:16:09.176672Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715678 TabletId: 72075186224037888 2025-03-31T16:16:09.176723Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TTransferData operationId# 281474976715678:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715678 TabletId: 72075186224037888 2025-03-31T16:16:09.177021Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715678:0 131 -> 132 2025-03-31T16:16:09.177171Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 6 2025-03-31T16:16:09.177562Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-03-31T16:16:09.177725Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-31T16:16:09.177748Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715678, path id: [OwnerId: 72057594046644480, LocalPathId: 3] 2025-03-31T16:16:09.177927Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-31T16:16:09.177947Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:7488008197270324973:2239], at schemeshard: 72057594046644480, txId: 281474976715678, path id: 3 2025-03-31T16:16:09.177982Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-03-31T16:16:09.178006Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 281474976715678:0 ProgressState, at schemeshard: 72057594046644480 2025-03-31T16:16:09.178025Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Notify src datashard 72075186224037888 on partitioning changed splitOp# 281474976715678 at tablet 72057594046644480 2025-03-31T16:16:09.178502Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715678:0 from tablet: 72057594046644480 to tablet: 72075186224037888 cookie: 72057594046644480:1 msg type: 269553158 2025-03-31T16:16:09.180194Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715678 2025-03-31T16:16:09.180295Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715678 2025-03-31T16:16:09.180313Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715678 2025-03-31T16:16:09.180330Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715678, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 4 2025-03-31T16:16:09.180350Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 7 2025-03-31T16:16:09.180416Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715678, ready parts: 0/1, is published: true 2025-03-31T16:16:09.180647Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715678 TabletId: 72075186224037888 2025-03-31T16:16:09.180688Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 281474976715678:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037888, at schemeshard: 72057594046644480 2025-03-31T16:16:09.180751Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715678:0 progress is 1/1 2025-03-31T16:16:09.180765Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715678 ready parts: 1/1 2025-03-31T16:16:09.180803Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715678:0 progress is 1/1 2025-03-31T16:16:09.180815Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715678 ready parts: 1/1 2025-03-31T16:16:09.180831Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715678, ready parts: 1/1, is published: true 2025-03-31T16:16:09.180876Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7488008201565292687:2359] message: TxId: 281474976715678 2025-03-31T16:16:09.180912Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715678 ready parts: 1/1 2025-03-31T16:16:09.180929Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715678:0 2025-03-31T16:16:09.180944Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715678:0 2025-03-31T16:16:09.181078Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 6 2025-03-31T16:16:09.181329Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715678 2025-03-31T16:16:09.181448Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-03-31T16:16:09.181467Z node 2 :FLAT_TX_SCHEMESHARD INFO: Unable to activate 281474976715678:0 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743437768905 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) >> THealthCheckTest::ProtobufUnderLimitFor100LargeVdisksIssues [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::TestReBootingTabletIsDead [GOOD] Test command err: 2025-03-31T16:15:39.976735Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:700:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:39.977086Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:39.977198Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:39.978492Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:697:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:39.978751Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:39.978846Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d6f/r3tmp/tmpw93s2t/pdisk_1.dat 2025-03-31T16:15:40.289360Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12028, node 1 TClient is connected to server localhost:21213 2025-03-31T16:15:40.654171Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:40.654216Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:40.654249Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:40.654505Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:15:46.703634Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:284:2217], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:46.704100Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:46.704233Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:46.705267Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:700:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:46.705553Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:46.705683Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d6f/r3tmp/tmpmSeLyW/pdisk_1.dat 2025-03-31T16:15:46.988314Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30166, node 3 TClient is connected to server localhost:25044 2025-03-31T16:15:47.243112Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:47.243168Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:47.243201Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:47.243606Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:15:53.432788Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:775:2414], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:53.433210Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:53.433408Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:53.434095Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:772:2356], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:53.434642Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:53.434734Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d6f/r3tmp/tmpqJJ7oc/pdisk_1.dat 2025-03-31T16:15:53.751108Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26191, node 5 TClient is connected to server localhost:11851 2025-03-31T16:15:56.446035Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:56.446118Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:56.446163Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:56.447357Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:15:56.465012Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:56.465166Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:56.509708Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 7 Cookie 7 2025-03-31T16:15:56.510449Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected self_check_result: GOOD issue_log { id: "YELLOW-f489-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-1ba8-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-5" reason: "YELLOW-e9e2-1231c6b1-6" reason: "YELLOW-e9e2-1231c6b1-7" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-5" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 5 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-6" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 6 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-7" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 7 host: "::1" port: 12003 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } location { id: 5 host: "::1" port: 12001 } 2025-03-31T16:16:03.688671Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:851:2415], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:16:03.689391Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:03.689813Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:16:03.691063Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:848:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:16:03.691447Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:03.691634Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d6f/r3tmp/tmpNSigBf/pdisk_1.dat 2025-03-31T16:16:03.969423Z node 8 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15920, node 8 TClient is connected to server localhost:2043 2025-03-31T16:16:07.154651Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:07.154709Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:07.154751Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:07.155626Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:16:07.156046Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerConnected([8:1336:2701]) [8:1602:2706] 2025-03-31T16:16:07.156190Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(PersQueue(72057594046578946,0)) 2025-03-31T16:16:07.168465Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046578946 OwnerIdx: 0 TabletType: PersQueue BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } 2025-03-31T16:16:07.168580Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2025-03-31T16:16:07.168837Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type PersQueue: {} 2025-03-31T16:16:07.168912Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2025-03-31T16:16:07.169087Z node 8 :HIVE DEBUG: HIVE#72057594037968897 CreateTabletFollowers Tablet PersQueue.72075186224037888.Leader.0 2025-03-31T16:16:07.169156Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2025-03-31T16:16:07.169321Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "/Root:test" } } ReturnAllMatchingGroups: true 2025-03-31T16:16:07.170829Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Connected to tablet 72057594037932033 from tablet 72057594037968 ... VE#72057594037968897 [FBN] Tablet PersQueue.72075186224037888.Leader.0 selected nodes count 1 2025-03-31T16:16:07.233095Z node 8 :HIVE TRACE: HIVE#72057594037968897 [FBN] Tablet PersQueue.72075186224037888.Leader.0 selected max priority nodes count 1 2025-03-31T16:16:07.233174Z node 8 :HIVE TRACE: HIVE#72057594037968897 [FBN] Tablet PersQueue.72075186224037888.Leader.0 selected node 10 2025-03-31T16:16:07.233260Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Tablet(PersQueue.72075186224037888.Leader.0) VolatileState: Booting -> Starting (Node 10) 2025-03-31T16:16:07.233322Z node 8 :HIVE TRACE: HIVE#72057594037968897 Node(10, (0,0,0,0)->(0,1048576,0,0)) 2025-03-31T16:16:07.233457Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: ObjectId (72057594046578946,0): {} -> {Memory: 1048576} 2025-03-31T16:16:07.233556Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: Type PersQueue: {} -> {Memory: 1048576} 2025-03-31T16:16:07.233644Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-03-31T16:16:07.233753Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStartTablet::Execute Tablet (72075186224037888,0) 2025-03-31T16:16:07.233954Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStartTablet::Execute, Sending TEvBootTablet(PersQueue.72075186224037888.Leader.1) to node 10 storage {Version# 1 TabletID# 72075186224037888 TabletType# PersQueue Channels# {0:{Channel# 0 Type# none StoragePool# /Root:test History# {0:{FromGeneration# 0 GroupID# 2181038080 Timestamp# 1970-01-01T00:00:00.066536Z}}, 1:{Channel# 1 Type# none StoragePool# /Root:test History# {0:{FromGeneration# 0 GroupID# 2181038080 Timestamp# 1970-01-01T00:00:00.066536Z}}, 2:{Channel# 2 Type# none StoragePool# /Root:test History# {0:{FromGeneration# 0 GroupID# 2181038080 Timestamp# 1970-01-01T00:00:00.066536Z}}} Tenant: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-31T16:16:07.246510Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(10)::Complete 2025-03-31T16:16:07.246615Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-03-31T16:16:07.246837Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStartTablet::Complete Tablet (72075186224037888,0) SideEffects: {Notifications: 0x10080002 [10:1577:2365] NKikimrLocal.TEvBootTablet Info { TabletID: 72075186224037888 Channels { Channel: 0 ChannelType: 0 History { FromGeneration: 0 GroupID: 2181038080 } StoragePool: "/Root:test" } Channels { Channel: 1 ChannelType: 0 History { FromGeneration: 0 GroupID: 2181038080 } StoragePool: "/Root:test" } Channels { Channel: 2 ChannelType: 0 History { FromGeneration: 0 GroupID: 2181038080 } StoragePool: "/Root:test" } TabletType: PersQueue Version: 1 TenantIdOwner: 72057594046644480 TenantIdLocalId: 1 } SuggestedGeneration: 1 BootMode: BOOT_MODE_LEADER FollowerId: 0} 2025-03-31T16:16:07.247437Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected (duplicate), NodeId 10 Cookie 72075186224037888 2025-03-31T16:16:07.344418Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037888 2025-03-31T16:16:07.344548Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletStatus::Execute for tablet PersQueue.72075186224037888.Leader.1 status 0 generation 1 follower 0 from local [10:1577:2365] 2025-03-31T16:16:07.344626Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Tablet(PersQueue.72075186224037888.Leader.1) VolatileState: Starting -> Running (Node 10) 2025-03-31T16:16:07.344681Z node 8 :HIVE TRACE: HIVE#72057594037968897 Node(10, (0,1048576,0,0)->(0,0,0,0)) 2025-03-31T16:16:07.344813Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: ObjectId (72057594046578946,0): {Memory: 1048576} -> {} 2025-03-31T16:16:07.344919Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: Type PersQueue: {Memory: 1048576} -> {} 2025-03-31T16:16:07.344976Z node 8 :HIVE TRACE: HIVE#72057594037968897 Node(10, (0,0,0,0)->(0,1048576,0,0)) 2025-03-31T16:16:07.345068Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: ObjectId (72057594046578946,0): {} -> {Memory: 1048576} 2025-03-31T16:16:07.345187Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: Type PersQueue: {} -> {Memory: 1048576} 2025-03-31T16:16:07.345298Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-03-31T16:16:07.345335Z node 8 :HIVE TRACE: HIVE#72057594037968897 ProcessBootQueue - sending 2025-03-31T16:16:07.345675Z node 8 :HIVE TRACE: HIVE#72057594037968897 ProcessBootQueue - executing 2025-03-31T16:16:07.345739Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-03-31T16:16:07.345783Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-03-31T16:16:07.345840Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-03-31T16:16:07.377485Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletStatus::Complete TabletId: 72075186224037888 SideEffects: {Notifications: 0x10040207 [8:1335:2700] {EvTabletCreationResult Status: OK TabletID: 72075186224037888}} 2025-03-31T16:16:07.377565Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-03-31T16:16:10.780135Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 10: Status: 2 2025-03-31T16:16:10.780277Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(10)::Execute 2025-03-31T16:16:10.780341Z node 8 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 10 2025-03-31T16:16:10.780480Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(10)::Complete 2025-03-31T16:16:10.780701Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRestartTablet(PersQueue.72075186224037888.Leader.1)::Execute 2025-03-31T16:16:10.780852Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Tablet(PersQueue.72075186224037888.Leader.1) VolatileState: Running -> Stopped (Node 10) 2025-03-31T16:16:10.780919Z node 8 :HIVE TRACE: HIVE#72057594037968897 Node(10, (0,1048576,0,0)->(0,0,0,0)) 2025-03-31T16:16:10.781067Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: ObjectId (72057594046578946,0): {Memory: 1048576} -> {} 2025-03-31T16:16:10.781182Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: Type PersQueue: {Memory: 1048576} -> {} 2025-03-31T16:16:10.781269Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Sending TEvStopTablet(PersQueue.72075186224037888.Leader.1 gen 1) to node 10 2025-03-31T16:16:10.781348Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Tablet(PersQueue.72075186224037888.Leader.1) VolatileState: Stopped -> Booting 2025-03-31T16:16:10.781411Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (1) 2025-03-31T16:16:10.781461Z node 8 :HIVE TRACE: HIVE#72057594037968897 ProcessBootQueue - sending 2025-03-31T16:16:10.782503Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxKillNode(10)::Execute 2025-03-31T16:16:10.782640Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-31T16:16:10.782692Z node 8 :HIVE TRACE: Node(10) DeregisterInDomains (72057594046644480:1) : 1 -> 0 2025-03-31T16:16:10.782752Z node 8 :HIVE DEBUG: HIVE#72057594037968897 RemoveRegisteredDataCentersNode(3, 10) 2025-03-31T16:16:10.782810Z node 8 :HIVE TRACE: HIVE#72057594037968897 THive::TTxKillNode - killing pipe server [8:1642:2711] 2025-03-31T16:16:10.782877Z node 8 :HIVE DEBUG: HIVE#72057594037968897 TryToDeleteNode(10): waiting 3600.000000s 2025-03-31T16:16:10.784532Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerDisconnected([10:1578:2365]) [8:1642:2711] 2025-03-31T16:16:10.788234Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerConnected([8:2039:2733]) [8:2040:2738] 2025-03-31T16:16:10.796051Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerDisconnected([8:2039:2733]) [8:2040:2738] 2025-03-31T16:16:10.800278Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerConnected([11:2012:2364]) [8:2074:2741] 2025-03-31T16:16:10.812149Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvRegisterNode from [11:2011:2364] HiveId: 72057594037968897 ServicedDomains { SchemeShard: 72057594046644480 PathId: 1 } TabletAvailability { Type: Mediator Priority: 0 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: KeyValue Priority: 0 } TabletAvailability { Type: Coordinator Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } TabletAvailability { Type: SchemeShard Priority: 0 } TabletAvailability { Type: DataShard Priority: 0 } TabletAvailability { Type: PersQueue Priority: 0 } TabletAvailability { Type: PersQueueReadBalancer Priority: 0 } TabletAvailability { Type: Kesus Priority: 0 } TabletAvailability { Type: SysViewProcessor Priority: 0 } TabletAvailability { Type: ColumnShard Priority: 0 } TabletAvailability { Type: SequenceShard Priority: 0 } TabletAvailability { Type: ReplicationController Priority: 0 } TabletAvailability { Type: StatisticsAggregator Priority: 0 } 2025-03-31T16:16:10.812296Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(11)::Execute 2025-03-31T16:16:10.812448Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:10.812505Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-03-31T16:16:10.812562Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (1) 2025-03-31T16:16:10.812611Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-03-31T16:16:10.812658Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (1) 2025-03-31T16:16:10.812771Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:10.813191Z node 8 :HIVE DEBUG: HIVE#72057594037968897 TEvInterconnect::TEvNodeInfo NodeId 11 Location DataCenter: "4" Module: "4" Rack: "4" Unit: "4" self_check_result: EMERGENCY issue_log { id: "RED-f489-1231c6b1" status: RED message: "Database has compute issues" location { database { name: "/Root" } } reason: "RED-6fa7-1231c6b1" reason: "YELLOW-1ba8-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "RED-6fa7-1231c6b1" status: RED message: "Compute has issues with tablets" location { database { name: "/Root" } } reason: "RED-e5e3-1231c6b1-PersQueue" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-10" reason: "YELLOW-e9e2-1231c6b1-11" reason: "YELLOW-e9e2-1231c6b1-8" reason: "YELLOW-e9e2-1231c6b1-9" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-8" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 8 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-9" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 9 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-10" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 10 host: "::1" port: 12003 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-11" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 11 host: "::1" port: 12004 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "RED-e5e3-1231c6b1-PersQueue" status: RED message: "Tablets are dead" location { compute { tablet { type: "PersQueue" id: "72075186224037888" count: 1 } } database { name: "/Root" } node { } } type: "TABLET" level: 4 } location { id: 8 host: "::1" port: 12001 } >> TFlatTest::SelectRangeBothLimit [GOOD] >> TLocksTest::Range_BrokenLock2 [GOOD] >> TLocksTest::Range_BrokenLock3 >> TLocksFatTest::PointSetBreak [GOOD] >> TLocksFatTest::LocksLimit >> THealthCheckTest::TestTabletIsDead [GOOD] >> TFlatTest::SelectRangeNullArgs3 [GOOD] >> TFlatTest::SelectRangeNullArgs4 >> TFlatTest::GetTabletCounters [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::ProtobufUnderLimitFor100LargeVdisksIssues [GOOD] Test command err: 2025-03-31T16:15:41.427364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:700:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:41.427646Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:41.427744Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:41.428875Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:697:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:41.429108Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:41.429173Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d5f/r3tmp/tmpERfxcZ/pdisk_1.dat 2025-03-31T16:15:41.781340Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22970, node 1 TClient is connected to server localhost:17497 2025-03-31T16:15:42.120913Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:42.120955Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:42.120978Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:42.121175Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:15:48.175605Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:284:2217], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:48.176007Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:48.176126Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:48.176893Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:700:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:48.177089Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:48.177175Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d5f/r3tmp/tmpdyMS2c/pdisk_1.dat 2025-03-31T16:15:48.426948Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19646, node 3 TClient is connected to server localhost:12851 2025-03-31T16:15:48.818879Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:48.818955Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:48.819009Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:48.819738Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: EMERGENCY issue_log { id: "RED-70fb-1231c6b1" status: RED message: "Database has multiple issues" location { database { name: "/Root" } } reason: "RED-d6d1-1231c6b1" reason: "YELLOW-1ba8-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-3" reason: "YELLOW-e9e2-1231c6b1-4" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-3" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 3 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-4" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 4 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "RED-d6d1-1231c6b1" status: RED message: "Storage failed" location { database { name: "/Root" } } reason: "RED-258e-1231c6b1-f7549920" type: "STORAGE" level: 2 } issue_log { id: "RED-258e-1231c6b1-f7549920" status: RED message: "Pool failed" location { storage { pool { name: "/Root:test" } } database { name: "/Root" } } reason: "RED-819b-1231c6b1-2147483648" type: "STORAGE_POOL" level: 3 } issue_log { id: "RED-99d2-1231c6b1-3-2147483648-3-55-0-55" status: RED message: "VDisks have space issue" location { storage { node { id: 3 host: "::1" port: 12001 } pool { name: "/Root:test" group { vdisk { id: "2147483648-3-55-0-55" id: "2147483648-3-56-0-56" id: "2147483648-3-57-0-57" } } } } database { name: "/Root" } } reason: "RED-8ac8-3-3-42" reason: "RED-8ac8-3-3-43" reason: "RED-8ac8-3-3-44" type: "VDISK" level: 5 listed: 3 count: 3 } issue_log { id: "RED-8ac8-3-3-42" status: RED message: "Available size is less than 6%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-42" path: "/home/runner/.ya/build/build_root/0l5c/000d5f/r3tmp/tmpdyMS2c/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-8ac8-3-3-43" status: RED message: "Available size is less than 6%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-43" path: "/home/runner/.ya/build/build_root/0l5c/000d5f/r3tmp/tmpdyMS2c/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-8ac8-3-3-44" status: RED message: "Available size is less than 6%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-44" path: "/home/runner/.ya/build/build_root/0l5c/000d5f/r3tmp/tmpdyMS2c/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-819b-1231c6b1-2147483648" status: RED message: "Group failed" location { storage { pool { name: "/Root:test" group { id: "2147483648" } } } database { name: "/Root" } } reason: "RED-99d2-1231c6b1-3-2147483648-3-55-0-55" type: "STORAGE_GROUP" level: 4 } location { id: 3 host: "::1" port: 12001 } 2025-03-31T16:15:55.536028Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:55.536529Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:55.536745Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:55.538504Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:55.538883Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:55.539034Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d5f/r3tmp/tmpPHpPAF/pdisk_1.dat 2025-03-31T16:15:55.879228Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3149, node 5 TClient is connected to server localhost:22775 2025-03-31T16:15:56.263568Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:56.263634Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:56.263669Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:56.264117Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:16:03.444642Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:413:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:16:03.445109Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:16:03.445746Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:03.446624Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:703:2356], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:16:03.446866Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:03.446964Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d5f/r3tmp/tmpsTcdbr/pdisk_1.dat 2025-03-31T16:16:03.745466Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28384, node 7 TClient is connected to server localhost:8722 2025-03-31T16:16:04.151958Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:04.152023Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:04.152058Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:04.152286Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:16:11.171434Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:284:2217], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:16:11.171851Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:16:11.172060Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:11.172529Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:700:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:16:11.172810Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:11.172910Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d5f/r3tmp/tmpCYBJk7/pdisk_1.dat 2025-03-31T16:16:11.496544Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27267, node 9 TClient is connected to server localhost:21538 2025-03-31T16:16:11.995602Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:11.995690Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:11.995754Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:11.996737Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration >> TFlatTest::WriteSplitAndRead [GOOD] >> TFlatTest::RejectByPerShardReadSize [GOOD] >> TFlatTest::RejectByPerRequestSize >> TFlatTest::Init [GOOD] >> TFlatTest::LargeDatashardReply >> THealthCheckTest::Issues100VCardMerging [GOOD] >> THealthCheckTest::Issues100Groups100VCardMerging >> TCancelTx::ImmediateReadOnly [GOOD] >> TFlatTest::ReadOnlyMode ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeBothLimit [GOOD] Test command err: 2025-03-31T16:16:07.333555Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008195284472813:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:07.333690Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cfe/r3tmp/tmpn1Mj9S/pdisk_1.dat 2025-03-31T16:16:07.611726Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:07.691805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:07.691968Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:07.693590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19048 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:07.834143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-31T16:16:07.867722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... insert finished 10290 usec 9565 usec 9123 usec 10314 usec 9646 usec 9424 usec 9314 usec 9656 usec 9193 usec 9747 usec 2025-03-31T16:16:10.505132Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008204932220824:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:10.505275Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cfe/r3tmp/tmpdBphMY/pdisk_1.dat 2025-03-31T16:16:10.591476Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:10.636806Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:10.636896Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:10.638411Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18803 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:10.797162Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:10.817234Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> TLocksTest::BrokenLockErase ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::TestTabletIsDead [GOOD] Test command err: 2025-03-31T16:15:47.598375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:700:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:47.598729Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:47.598839Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:47.600211Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:697:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:47.600501Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:47.600602Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d59/r3tmp/tmpDXvlkQ/pdisk_1.dat 2025-03-31T16:15:47.924510Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25406, node 1 TClient is connected to server localhost:7193 2025-03-31T16:15:48.329952Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:48.330022Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:48.330056Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:48.330390Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:15:54.465889Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:284:2217], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:54.466374Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:54.466516Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:54.467518Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:700:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:54.467795Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:54.467922Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d59/r3tmp/tmpMQ9iA3/pdisk_1.dat 2025-03-31T16:15:54.727993Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17626, node 3 TClient is connected to server localhost:28740 2025-03-31T16:15:55.050361Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:55.050438Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:55.050494Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:55.051035Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: DEGRADED issue_log { id: "YELLOW-70fb-1231c6b1" status: YELLOW message: "Database has multiple issues" location { database { name: "/Root" } } reason: "YELLOW-1ba8-1231c6b1" reason: "YELLOW-5321-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-3" reason: "YELLOW-e9e2-1231c6b1-4" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-3" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 3 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-4" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 4 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-5321-1231c6b1" status: YELLOW message: "Storage degraded" location { database { name: "/Root" } } reason: "YELLOW-595f-1231c6b1-f7549920" type: "STORAGE" level: 2 } issue_log { id: "YELLOW-595f-1231c6b1-f7549920" status: YELLOW message: "Pool degraded" location { storage { pool { name: "/Root:test" } } database { name: "/Root" } } reason: "YELLOW-ef3e-1231c6b1-2147483648" type: "STORAGE_POOL" level: 3 } issue_log { id: "YELLOW-99d2-1231c6b1-3-2147483648-3-55-0-55" status: YELLOW message: "VDisks have space issue" location { storage { node { id: 3 host: "::1" port: 12001 } pool { name: "/Root:test" group { vdisk { id: "2147483648-3-55-0-55" id: "2147483648-3-56-0-56" id: "2147483648-3-57-0-57" } } } } database { name: "/Root" } } reason: "YELLOW-e463-3-3-42" reason: "YELLOW-e463-3-3-43" reason: "YELLOW-e463-3-3-44" type: "VDISK" level: 5 listed: 3 count: 3 } issue_log { id: "YELLOW-e463-3-3-42" status: YELLOW message: "Available size is less than 12%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-42" path: "/home/runner/.ya/build/build_root/0l5c/000d59/r3tmp/tmpMQ9iA3/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "YELLOW-e463-3-3-43" status: YELLOW message: "Available size is less than 12%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-43" path: "/home/runner/.ya/build/build_root/0l5c/000d59/r3tmp/tmpMQ9iA3/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "YELLOW-e463-3-3-44" status: YELLOW message: "Available size is less than 12%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-44" path: "/home/runner/.ya/build/build_root/0l5c/000d59/r3tmp/tmpMQ9iA3/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "YELLOW-ef3e-1231c6b1-2147483648" status: YELLOW message: "Group degraded" location { storage { pool { name: "/Root:test" group { id: "2147483648" } } } database { name: "/Root" } } reason: "YELLOW-99d2-1231c6b1-3-2147483648-3-55-0-55" type: "STORAGE_GROUP" level: 4 } location { id: 3 host: "::1" port: 12001 } 2025-03-31T16:16:01.257290Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:16:01.257661Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:01.257876Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:16:01.259175Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:16:01.259428Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:01.259517Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d59/r3tmp/tmp1rDrmE/pdisk_1.dat 2025-03-31T16:16:01.500182Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27316, node 5 TClient is connected to server localhost:19888 2025-03-31T16:16:01.947026Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:01.947120Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:01.947160Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:01.947738Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: DEGRADED issue_log { id: "YELLOW-70fb-1231c6b1" status: YELLOW message: "Database has multiple issues" location { database { name: "/Root" } } reason: "YELLOW-1ba8-1231c6b1" reason: "YELLOW-5321-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-5" reason: "YELLOW-e9e2-1231c6b1-6" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-5" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 5 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-6" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 6 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-5321-1231c6b1" status: YELLOW message: "Storage degraded" location { database { name: "/Root" } } reason: "YELLOW-595f-1231c6b1-f7549920" type: "STORAGE" level: 2 } issue_log { id: "YELLOW-595f-1231c6b1-f7549920" status: YELLOW message: "Pool degraded" location { storage { pool { name: "/Root:test" } } database { name: "/Root" } } reason: "YELLOW-ef3e-1231c6b1-2147483648" type: "STORAGE_POOL" level: 3 } issue_log { id: "RED-a594-5-5-42" status: RED message: "PDisk state is FAULTY" location { storage { node { id: 5 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "5-42" path: "/home/runner/.ya/build/build_root/0l5c/000d59/r3tmp/tmp1rDrmE/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-a594-5-5-43" status: RED message: "PDisk state is FAULTY" location { storage { node { id: 5 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "5-43" path: "/home/runner/.ya/build/build_root/0l5c/000d59/r3tmp/tmp1rDrmE/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-a594-5-5-44" status: RED message: "PDisk state is FAULTY" location { storage { node { id: 5 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "5-44" path: "/home/runner/.ya/build/build_root/0l5c/000d59/r3tmp/tmp1rDrmE/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "YELLOW-ef3e-1231c6b1-2147483648" status: YELLOW message: "Group degraded" location { storage { pool { name: "/Root:test" group { id: "2147483648" } } } database { name: "/Root" } } type: "STORAGE_GROUP" level: 4 } location { id: 5 host: "::1" port: 12001 } 2025-03-31T16:16:08.999416Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:773:2414], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:16:09.000046Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:09.000337Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:16:09.000793Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:770:2356], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:16:09.001175Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:09.001332Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d59/r3tmp/tmp9UWENR/pdisk_1.dat 2025-03-31T16:16:09.353124Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17342, node 7 TClient is connected to server localhost:19374 2025-03-31T16:16:12.972231Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:12.972302Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:12.972341Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:12.972677Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:16:12.989636Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:12.989784Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:13.019118Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 9 Cookie 9 2025-03-31T16:16:13.019810Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:16:13.183855Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 9 2025-03-31T16:16:13.184474Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connected -> Disconnected self_check_result: EMERGENCY issue_log { id: "RED-f489-1231c6b1" status: RED message: "Database has compute issues" location { database { name: "/Root" } } reason: "RED-6fa7-1231c6b1" reason: "YELLOW-1ba8-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "RED-6fa7-1231c6b1" status: RED message: "Compute has issues with tablets" location { database { name: "/Root" } } reason: "RED-e5e3-1231c6b1-PersQueue" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-7" reason: "YELLOW-e9e2-1231c6b1-8" reason: "YELLOW-e9e2-1231c6b1-9" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-7" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 7 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-8" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 8 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-9" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 9 host: "::1" port: 12003 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "RED-e5e3-1231c6b1-PersQueue" status: RED message: "Tablets are dead" location { compute { tablet { type: "PersQueue" id: "72075186224037888" count: 1 } } database { name: "/Root" } node { } } type: "TABLET" level: 4 } location { id: 7 host: "::1" port: 12001 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::GetTabletCounters [GOOD] Test command err: 2025-03-31T16:16:08.622256Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008197329273820:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:08.622951Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cfb/r3tmp/tmp4cacOS/pdisk_1.dat 2025-03-31T16:16:08.890431Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:08.957213Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:08.957422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:08.959023Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29119 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:09.182648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:09.199993Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:09.213434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:11.469157Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008210452346773:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:11.469242Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cfb/r3tmp/tmpFRfMZd/pdisk_1.dat 2025-03-31T16:16:11.577941Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:11.616096Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:11.616223Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:11.617626Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2982 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:11.779404Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:11.803944Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743437771887 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) >> TFlatTest::MergeEmptyAndWrite [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::WriteSplitAndRead [GOOD] Test command err: 2025-03-31T16:16:08.707134Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008196686870479:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:08.707289Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cfa/r3tmp/tmpTZ17Vt/pdisk_1.dat 2025-03-31T16:16:08.976771Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:09.069336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:09.069490Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:09.071590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22142 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:09.256469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:09.286362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:09.443637Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-03-31T16:16:09.447873Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-03-31T16:16:09.473903Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-03-31T16:16:09.478460Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2025-03-31T16:16:09.500928Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:16:09.502605Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:16:09.502678Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:16:09.504983Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 3} end=0, 4 blobs 8r (max 9), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3250 2180 6413)b }, ecr=1.000 2025-03-31T16:16:09.505465Z node 1 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-31T16:16:09.505500Z node 1 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2025-03-31T16:16:09.505815Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:16:09.508211Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:16:09.508276Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:16:09.511077Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-31T16:16:09.512565Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-03-31T16:16:09.512634Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:16:09.514838Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.21, eph 3} end=0, 4 blobs 9r (max 9), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (4073 2983 5183)b }, ecr=1.000 2025-03-31T16:16:09.515331Z node 1 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-31T16:16:09.515372Z node 1 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 1001, finished edge# 0, front# 0 2025-03-31T16:16:09.515621Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-31T16:16:09.517855Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-03-31T16:16:09.517911Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743437769409 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) 2025-03-31T16:16:09.526553Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:16:09.528528Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-03-31T16:16:09.528759Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-31T16:16:09.530125Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-03-31T16:16:09.530274Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:16:09.530897Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037888 restored its data 2025-03-31T16:16:09.531797Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-03-31T16:16:09.531938Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-31T16:16:09.532406Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037889 restored its data 2025-03-31T16:16:09.533263Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-03-31T16:16:09.533415Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:16:09.533956Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037888 restored its data 2025-03-31T16:16:09.534640Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-03-31T16:16:09.534735Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-31T16:16:09.535164Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037889 restored its data 2025-03-31T16:16:09.535841Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-03-31T16:16:09.535938Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:16:09.536399Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037888 restored its data 2025-03-31T16:16:09.537088Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-03-31T16:16:09.537177Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-31T16:16:09.537685Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037889 restored its data 2025-03-31T16:16:09.538435Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-03-31T16:16:09.538518Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:16:09.538928Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037888 restored its data 2025-03-31T16:16:09.539614Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-03-31T16:16:09.539724Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-31T16:16:09.540216Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037889 restored its data 2025-03-31T16:16:09.540935Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-03-31T16:16:09.541036Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:16:09.541441Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037888 restored its data 2025-03-31T16:16:09.542250Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-03-31T16:16:09.542349Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-31T16:16:09.542764Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037889 restored its data 2025-03-31T16:16:09.543504Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-03-31T16:16:09.543614Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:16:09.544105Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037888 restored its data 2025-03-31T16:16:09.544759Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-03-31T16:16:09.544854Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-31T16:16:09.545345Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037889 restored its data 2025-03-31T16:16:09.546072Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-03-31T16:16:09.546196Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:16:09.546848Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037888 restored its data 2025-03-31T16:16:09.547604Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-03-31T16:16:09.547697Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-31T16:16:09.548210Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037889 restored its data 2025-03-31T16:16:09.548907Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-03-31T16:16:09.549068Z node 1 :TX_DATASHAR ... ait to activation from: TClient::Ls request: /dc-1/Dir/TableOld 2025-03-31T16:16:12.518978Z node 2 :TX_DATASHARD INFO: 72075186224037890 Initiating switch from PreOffline to Offline state TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-03-31T16:16:12.520168Z node 2 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-03-31T16:16:12.520269Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-03-31T16:16:12.520330Z node 2 :TX_DATASHARD INFO: 72075186224037892 Initiating switch from PreOffline to Offline state 2025-03-31T16:16:12.521706Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-03-31T16:16:12.521772Z node 2 :TX_DATASHARD INFO: 72075186224037889 Initiating switch from PreOffline to Offline state 2025-03-31T16:16:12.522890Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-03-31T16:16:12.522949Z node 2 :TX_DATASHARD INFO: 72075186224037891 Initiating switch from PreOffline to Offline state 2025-03-31T16:16:12.523923Z node 2 :TX_DATASHARD INFO: 72075186224037892 Reporting state Offline to schemeshard 72057594046644480 2025-03-31T16:16:12.523994Z node 2 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-03-31T16:16:12.524040Z node 2 :TX_DATASHARD INFO: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2025-03-31T16:16:12.524180Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:7488008215118610085:2403], serverId# [2:7488008215118610098:2704], sessionId# [0:0:0] 2025-03-31T16:16:12.524197Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7488008215118609500 RawX2: 4503608217307386 } TabletId: 72075186224037888 State: 4 2025-03-31T16:16:12.524245Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2025-03-31T16:16:12.524476Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7488008215118609854 RawX2: 4503608217307457 } TabletId: 72075186224037890 State: 4 2025-03-31T16:16:12.524517Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-03-31T16:16:12.524665Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7488008215118609856 RawX2: 4503608217307459 } TabletId: 72075186224037892 State: 4 2025-03-31T16:16:12.524703Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2025-03-31T16:16:12.524852Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7488008215118609524 RawX2: 4503608217307387 } TabletId: 72075186224037889 State: 4 2025-03-31T16:16:12.524878Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-03-31T16:16:12.524996Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7488008215118609855 RawX2: 4503608217307458 } TabletId: 72075186224037891 State: 4 2025-03-31T16:16:12.525024Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-03-31T16:16:12.525218Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037888 state Offline 2025-03-31T16:16:12.525223Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:16:12.525332Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:16:12.525337Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-03-31T16:16:12.525370Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2025-03-31T16:16:12.525373Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:16:12.525414Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-03-31T16:16:12.525417Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:16:12.525446Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-03-31T16:16:12.525451Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:16:12.526964Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037888 reason = ReasonStop 2025-03-31T16:16:12.527000Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-03-31T16:16:12.527014Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037892 reason = ReasonStop 2025-03-31T16:16:12.527027Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-03-31T16:16:12.527040Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-03-31T16:16:12.527242Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-03-31T16:16:12.527364Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-03-31T16:16:12.527436Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-03-31T16:16:12.527453Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-03-31T16:16:12.527623Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-03-31T16:16:12.527779Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-03-31T16:16:12.527910Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-03-31T16:16:12.527938Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-03-31T16:16:12.528024Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-03-31T16:16:12.528116Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-03-31T16:16:12.528219Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-03-31T16:16:12.528341Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-03-31T16:16:12.528440Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-03-31T16:16:12.528594Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-03-31T16:16:12.528608Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-03-31T16:16:12.528640Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-03-31T16:16:12.528775Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-03-31T16:16:12.528796Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-03-31T16:16:12.528806Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037892 not found 2025-03-31T16:16:12.528812Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2025-03-31T16:16:12.528814Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-03-31T16:16:12.528824Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-03-31T16:16:12.529605Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-03-31T16:16:12.529622Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-03-31T16:16:12.529654Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-03-31T16:16:12.529661Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-03-31T16:16:12.529678Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-03-31T16:16:12.529688Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-03-31T16:16:12.529878Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2025-03-31T16:16:12.529906Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-03-31T16:16:12.529912Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037892 2025-03-31T16:16:12.529915Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-03-31T16:16:12.529952Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-03-31T16:16:12.529967Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-03-31T16:16:12.529998Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-03-31T16:16:12.554985Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-03-31T16:16:12.555052Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2025-03-31T16:16:12.556079Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-03-31T16:16:12.556138Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 >> Viewer::JsonStorageListingV1 [GOOD] >> Viewer::JsonStorageListingV1GroupIdFilter >> Viewer::StorageGroupOutputWithoutFilterNoDepends [GOOD] >> Viewer::StorageGroupOutputWithSpaceCheckDependsOnVDiskSpaceStatus >> TFlatTest::WriteSplitKillRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TCancelTx::ImmediateReadOnly [GOOD] Test command err: 2025-03-31T16:16:02.512123Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008171122731399:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:02.512234Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001d03/r3tmp/tmpuHE3BH/pdisk_1.dat 2025-03-31T16:16:02.805062Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:02.854656Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:02.854773Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:02.856529Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19559 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:02.993732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:03.019668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:19559 2025-03-31T16:16:03.340174Z node 1 :TX_PROXY ERROR: Actor# [1:7488008175417699434:2385] txid# 281474976710660 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-03-31T16:16:03.340266Z node 1 :TX_PROXY ERROR: Actor# [1:7488008175417699434:2385] txid# 281474976710660 RESPONSE Status# ExecCancelled marker# P13c 2025-03-31T16:16:03.349708Z node 1 :TX_PROXY ERROR: Actor# [1:7488008175417699447:2395] txid# 281474976710661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-03-31T16:16:03.349796Z node 1 :TX_PROXY ERROR: Actor# [1:7488008175417699447:2395] txid# 281474976710661 RESPONSE Status# ExecCancelled marker# P13c 2025-03-31T16:16:03.363694Z node 1 :TX_PROXY ERROR: Actor# [1:7488008175417699460:2405] txid# 281474976710662 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-03-31T16:16:03.363828Z node 1 :TX_PROXY ERROR: Actor# [1:7488008175417699460:2405] txid# 281474976710662 RESPONSE Status# ExecCancelled marker# P13c 2025-03-31T16:16:03.391315Z node 1 :TX_PROXY ERROR: Actor# [1:7488008175417699486:2425] txid# 281474976710664 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-03-31T16:16:03.391380Z node 1 :TX_PROXY ERROR: Actor# [1:7488008175417699486:2425] txid# 281474976710664 RESPONSE Status# ExecCancelled marker# P13c 2025-03-31T16:16:03.405923Z node 1 :TX_PROXY ERROR: Actor# [1:7488008175417699499:2435] txid# 281474976710665 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-03-31T16:16:03.405991Z node 1 :TX_PROXY ERROR: Actor# [1:7488008175417699499:2435] txid# 281474976710665 RESPONSE Status# ExecCancelled marker# P13c 2025-03-31T16:16:03.419788Z node 1 :TX_PROXY ERROR: Actor# [1:7488008175417699512:2445] txid# 281474976710666 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-03-31T16:16:03.419869Z node 1 :TX_PROXY ERROR: Actor# [1:7488008175417699512:2445] txid# 281474976710666 RESPONSE Status# ExecCancelled marker# P13c 2025-03-31T16:16:05.589859Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008183567091039:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:05.589949Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001d03/r3tmp/tmp1ZQqus/pdisk_1.dat 2025-03-31T16:16:05.702005Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:05.732314Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:05.732434Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:05.736266Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9948 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:05.907098Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:05.915687Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:9948 2025-03-31T16:16:08.433595Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488008197028832661:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:08.433691Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001d03/r3tmp/tmpOvk43Y/pdisk_1.dat 2025-03-31T16:16:08.551310Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:08.580637Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:08.580730Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:08.582086Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14543 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:08.761014Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:08.773342Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:14543 2025-03-31T16:16:09.050994Z node 3 :TX_PROXY ERROR: Actor# [3:7488008201323800688:2383] txid# 281474976715660 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-03-31T16:16:09.051066Z node 3 :TX_PROXY ERROR: Actor# [3:7488008201323800688:2383] txid# 281474976715660 RESPONSE Status# ExecCancelled marker# P13c 2025-03-31T16:16:09.061408Z node 3 :TX_PROXY ERROR: Actor# [3:7488008201323800703:2395] txid# 281474976715661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-03-31T16:16:09.061471Z node 3 :TX_PROXY ERROR: Actor# [3:7488008201323800703:2395] txid# 281474976715661 RESPONSE Status# ExecCancelled marker# P13c 2025-03-31T16:16:09.074676Z node 3 :TX_PROXY ERROR: Actor# [3:7488008201323800717:2406] txid# 281474976715662 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-03-31T16:16:09.074755Z node 3 :TX_PROXY ERROR: Actor# [3:7488008201323800717:2406] txid# 281474976715662 RESPONSE Status# ExecCancelled marker# P13c 2025-03-31T16:16:09.103987Z node 3 :TX_PROXY ERROR: Actor# [3:7488008201323800743:2426] txid# 281474976715664 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-03-31T16:16:09.104061Z node 3 :TX_PROXY ERROR: Actor# [3:7488008201323800743:2426] txid# 281474976715664 RESPONSE Status# ExecCancelled marker# P13c 2025-03-31T16:16:09.117544Z node 3 :TX_PROXY ERROR: Actor# [3:7488008201323800756:2436] txid# 281474976715665 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-03-31T16:16:09.117625Z node 3 :TX_PROXY ERROR: Actor# [3:7488008201323800756:2436] txid# 281474976715665 RESPONSE Status# ExecCancelled marker# P13c 2025-03-31T16:16:09.133154Z node 3 :TX_PROXY ERROR: Actor# [3:7488008201323800769:2446] txid# 281474976715666 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-03-31T16:16:09.133233Z node 3 :TX_PROXY ERROR: Actor# [3:7488008201323800769:2446] txid# 281474976715666 RESPONSE Status# ExecCancelled marker# P13c 2025-03-31T16:16:11.686962Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008213179012799:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:11.687053Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001d03/r3tmp/tmpKpzKaH/pdisk_1.dat 2025-03-31T16:16:11.791557Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:11.827832Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:11.827919Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:11.829155Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13923 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:11.999905Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:12.008522Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:13923 2025-03-31T16:16:12.358205Z node 4 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715660 at tablet 72075186224037888 status: CANCELLED errors: EXECUTION_CANCELLED (Tx was cancelled) | 2025-03-31T16:16:12.358648Z node 4 :TX_PROXY ERROR: Actor# [4:7488008217473980826:2384] txid# 281474976715660 RESPONSE Status# ExecCancelled marker# P13c 2025-03-31T16:16:12.372000Z node 4 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715662 at tablet 72075186224037889 status: CANCELLED errors: EXECUTION_CANCELLED (Tx was cancelled) | 2025-03-31T16:16:12.372113Z node 4 :TX_PROXY ERROR: Actor# [4:7488008217473980840:2392] txid# 281474976715662 RESPONSE Status# ExecCancelled marker# P13c >> TFlatTest::LargeDatashardReplyRW [GOOD] >> TFlatTest::SelectRangeNullArgs4 [GOOD] >> THealthCheckTest::LayoutIncorrect [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::MergeEmptyAndWrite [GOOD] Test command err: 2025-03-31T16:16:10.040789Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008205147973455:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:10.040927Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cf8/r3tmp/tmpUC5JjB/pdisk_1.dat 2025-03-31T16:16:10.355058Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:10.376506Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:10.376622Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:10.378258Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14524 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:10.645027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:10.676217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:13.117421Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008218895026782:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:13.117558Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cf8/r3tmp/tmpBXtNvc/pdisk_1.dat 2025-03-31T16:16:13.218864Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:13.258541Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:13.258660Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:13.260016Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16873 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:13.424668Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:13.450615Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:13.563561Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-03-31T16:16:13.568506Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.013s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-03-31T16:16:13.589325Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-03-31T16:16:13.594055Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743437773560 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) 2025-03-31T16:16:13.617854Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:16:13.619663Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-31T16:16:13.619845Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-31T16:16:13.621082Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-31T16:16:13.621237Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:16:13.621916Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-03-31T16:16:13.622705Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-31T16:16:13.622836Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-31T16:16:13.623286Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-03-31T16:16:13.623893Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-31T16:16:13.624004Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:16:13.624386Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-03-31T16:16:13.624986Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-31T16:16:13.625087Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-31T16:16:13.625465Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-03-31T16:16:13.626094Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-31T16:16:13.626189Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:16:13.626578Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-03-31T16:16:13.627179Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-31T16:16:13.627279Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-31T16:16:13.627657Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-03-31T16:16:13.628248Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-31T16:16:13.628341Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:16:13.628707Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-03-31T16:16:13.629289Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-31T16:16:13.629393Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-31T16:16:13.629771Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-03-31T16:16:13.630337Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-31T16:16:13.630431Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:16:13.630810Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-03-31T16:16:13.631441Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-31T16:16:13.631536Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-31T16:16:13.631895Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-03-31T16:16:13.632480Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-31T16:16:13.632577Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:16:13.632951Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-03-31T16:16:13.633564Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:16:13.633609Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:16:13.633781Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-31T16:16:13.634157Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-03-31T16:16:13.634654Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-03-31T16:16:13.634675Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:16:13.735805Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { OperationType: ESchemeOpSplitMergeTablePartitions SplitMergeTablePartitions { TablePath: "/dc-1/Dir/TableO ... sage: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037890 Status: COMPLETE TxId: 281474976715687 Step: 1743437773903 OrderId: 281474976715687 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 738 } } 2025-03-31T16:16:13.861619Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715687 2025-03-31T16:16:13.861676Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715687 2025-03-31T16:16:13.861690Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715687 2025-03-31T16:16:13.861705Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715687, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2025-03-31T16:16:13.861721Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-03-31T16:16:13.861859Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715687 2025-03-31T16:16:13.861906Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715687 2025-03-31T16:16:13.861919Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715687 2025-03-31T16:16:13.861927Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715687, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 7 2025-03-31T16:16:13.861937Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-03-31T16:16:13.862039Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976715687 2025-03-31T16:16:13.862103Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976715687 2025-03-31T16:16:13.862117Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715687 2025-03-31T16:16:13.862127Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715687, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 18446744073709551615 2025-03-31T16:16:13.862140Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-03-31T16:16:13.862173Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715687, ready parts: 0/1, is published: true 2025-03-31T16:16:13.862380Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 7488008218895027703 RawX2: 4503608217307445 } Origin: 72075186224037890 State: 5 TxId: 281474976715687 Step: 0 Generation: 1 2025-03-31T16:16:13.862407Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715687, tablet: 72075186224037890, partId: 0 2025-03-31T16:16:13.862513Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715687:0, at schemeshard: 72057594046644480, message: Source { RawX1: 7488008218895027703 RawX2: 4503608217307445 } Origin: 72075186224037890 State: 5 TxId: 281474976715687 Step: 0 Generation: 1 2025-03-31T16:16:13.862541Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715687:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-03-31T16:16:13.862599Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715687:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 7488008218895027703 RawX2: 4503608217307445 } Origin: 72075186224037890 State: 5 TxId: 281474976715687 Step: 0 Generation: 1 2025-03-31T16:16:13.862636Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715687:0, shardIdx: 72057594046644480:3, datashard: 72075186224037890, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-03-31T16:16:13.862653Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715687:0, at schemeshard: 72057594046644480 2025-03-31T16:16:13.862664Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715687:0, datashard: 72075186224037890, at schemeshard: 72057594046644480 2025-03-31T16:16:13.862677Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715687:0 129 -> 240 2025-03-31T16:16:13.862850Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715687:0, at schemeshard: 72057594046644480 2025-03-31T16:16:13.862903Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715687 2025-03-31T16:16:13.862931Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715687 2025-03-31T16:16:13.862947Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715687 2025-03-31T16:16:13.862981Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715687:0, at schemeshard: 72057594046644480 2025-03-31T16:16:13.863027Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715687:0, at schemeshard: 72057594046644480 2025-03-31T16:16:13.863050Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 281474976715687:0 ProgressState, at schemeshard: 72057594046644480 2025-03-31T16:16:13.863103Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715687 datashard 72075186224037890 state PreOffline 2025-03-31T16:16:13.863152Z node 2 :TX_DATASHARD DEBUG: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2025-03-31T16:16:13.863327Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-03-31T16:16:13.863449Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715687:0 progress is 1/1 2025-03-31T16:16:13.863464Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715687 ready parts: 1/1 2025-03-31T16:16:13.863478Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715687:0 progress is 1/1 2025-03-31T16:16:13.863487Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715687 ready parts: 1/1 2025-03-31T16:16:13.863498Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715687, ready parts: 1/1, is published: true 2025-03-31T16:16:13.863533Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7488008218895027925:2393] message: TxId: 281474976715687 2025-03-31T16:16:13.863548Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715687 ready parts: 1/1 2025-03-31T16:16:13.863561Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715687:0 2025-03-31T16:16:13.863569Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715687:0 2025-03-31T16:16:13.863633Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-03-31T16:16:13.864290Z node 2 :TX_DATASHARD DEBUG: 72075186224037890 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: TClient::Ls request: /dc-1/Dir/TableOld 2025-03-31T16:16:13.864363Z node 2 :TX_DATASHARD INFO: 72075186224037890 Initiating switch from PreOffline to Offline state 2025-03-31T16:16:13.865933Z node 2 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-03-31T16:16:13.866202Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7488008218895027703 RawX2: 4503608217307445 } TabletId: 72075186224037890 State: 4 2025-03-31T16:16:13.866253Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-03-31T16:16:13.866550Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-03-31T16:16:13.866555Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-03-31T16:16:13.867929Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-03-31T16:16:13.867959Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-03-31T16:16:13.868148Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-03-31T16:16:13.868456Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-03-31T16:16:13.868491Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-03-31T16:16:13.868511Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-03-31T16:16:13.868533Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2025-03-31T16:16:13.868561Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-03-31T16:16:13.869252Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-03-31T16:16:13.869271Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-03-31T16:16:13.869278Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-03-31T16:16:13.869322Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 >> TFlatTest::ShardFreezeUnfreezeAlreadySet >> THealthCheckTest::ShardsLimit800 [GOOD] >> TObjectStorageListingTest::CornerCases >> TFlatTest::CopyTableAndCompareColumnsSchema [GOOD] >> TFlatTest::CopyTableAndDropCopy >> TLocksTest::Range_BrokenLock0 >> TLocksTest::Range_BrokenLockMax >> TFlatTest::ReadOnlyMode [GOOD] >> TFlatTest::RejectByIncomingReadSetSize >> ViewerTopicDataTests::TopicDataTest [GOOD] >> TFlatTest::RejectByPerRequestSize [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::LargeDatashardReplyRW [GOOD] Test command err: 2025-03-31T16:15:59.795847Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008161272531235:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:15:59.795981Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001d05/r3tmp/tmpCRfNAf/pdisk_1.dat 2025-03-31T16:16:00.046329Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:00.126457Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:00.126702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:00.128727Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3937 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:00.292399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:00.333349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:04.795777Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008161272531235:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:04.795906Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:16:07.893914Z node 1 :MINIKQL_ENGINE ERROR: Shard %72075186224037889, txid %281474976711360, engine error: Error executing transaction (read-only: 1): Datashard 72075186224037889: reply size limit exceeded. (61442990 > 50331648) 2025-03-31T16:16:07.904316Z node 1 :TX_DATASHARD ERROR: Datashard execution error for [1743437767477:281474976711360] at 72075186224037889: Datashard 72075186224037889: reply size limit exceeded. (61442990 > 50331648) 2025-03-31T16:16:07.907677Z node 1 :TX_PROXY ERROR: Actor# [1:7488008195632276058:5938] txid# 281474976711360 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# RESULT_UNAVAILABLE shard id 72075186224037889 marker# P12 2025-03-31T16:16:07.907815Z node 1 :TX_PROXY ERROR: Actor# [1:7488008195632276058:5938] txid# 281474976711360 RESPONSE Status# ExecResultUnavailable marker# P13c DataShardErrors: [REPLY_SIZE_EXCEEDED] Datashard 72075186224037889: reply size limit exceeded. (61442990 > 50331648) proxy error code: ExecResultUnavailable 2025-03-31T16:16:08.491048Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008197936032513:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:08.491133Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001d05/r3tmp/tmpxC4Olc/pdisk_1.dat 2025-03-31T16:16:08.606696Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:08.634368Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:08.634470Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:08.635924Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29586 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:08.792110Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:08.813737Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:13.477260Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008197936032513:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:13.477331Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:16:16.958483Z node 2 :MINIKQL_ENGINE ERROR: Shard %72075186224037888, txid %281474976716361, engine error: Error executing transaction (read-only: 0): Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2025-03-31T16:16:16.970471Z node 2 :TX_DATASHARD ERROR: Datashard execution error for [0:281474976716361] at 72075186224037888: Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2025-03-31T16:16:16.973499Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976716361 at tablet 72075186224037888 status: RESULT_UNAVAILABLE errors: REPLY_SIZE_EXCEEDED (Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648)) | 2025-03-31T16:16:16.973674Z node 2 :TX_PROXY ERROR: Actor# [2:7488008232295777295:5913] txid# 281474976716361 RESPONSE Status# ExecResultUnavailable marker# P13c DataShardErrors: [REPLY_SIZE_EXCEEDED] Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) proxy error code: ExecResultUnavailable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::ShardsLimit800 [GOOD] Test command err: 2025-03-31T16:15:51.572333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:51.572588Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:51.572676Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d3a/r3tmp/tmpbwRrmw/pdisk_1.dat 2025-03-31T16:15:51.882739Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13240, node 1 TClient is connected to server localhost:7634 2025-03-31T16:15:52.201463Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:52.201504Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:52.201527Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:52.201844Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:15:55.322580Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:304:2347], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:55.322720Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:55.322765Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d3a/r3tmp/tmpE7i1b0/pdisk_1.dat TServer::EnableGrpc on GrpcPort 4606, node 3 TClient is connected to server localhost:13071 2025-03-31T16:15:58.871105Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:58.871308Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:58.871351Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d3a/r3tmp/tmpxaKtla/pdisk_1.dat 2025-03-31T16:15:59.104425Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23458, node 4 TClient is connected to server localhost:27933 2025-03-31T16:15:59.532131Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:59.532201Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:59.532249Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:59.532595Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:16:06.945274Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:16:06.945728Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:06.945988Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:16:06.947639Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:06.947741Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:16:06.947914Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d3a/r3tmp/tmpZoT5wV/pdisk_1.dat 2025-03-31T16:16:07.237789Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20826, node 6 TClient is connected to server localhost:8032 2025-03-31T16:16:07.660558Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:07.660617Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:07.660660Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:07.661199Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:16:15.958152Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:699:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:16:15.958552Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:15.958721Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:16:15.959558Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:696:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:16:15.959916Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:15.960166Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d3a/r3tmp/tmpND58y6/pdisk_1.dat 2025-03-31T16:16:16.253638Z node 8 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9350, node 8 TClient is connected to server localhost:20615 2025-03-31T16:16:16.597143Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:16.597182Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:16.597200Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:16.597974Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeNullArgs4 [GOOD] Test command err: 2025-03-31T16:16:11.651467Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008211313925772:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:11.651580Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cf6/r3tmp/tmpQbcq2q/pdisk_1.dat 2025-03-31T16:16:11.948586Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:12.037109Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:12.037236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:12.039046Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2645 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:12.205096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:12.229960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:14.539920Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008222041224604:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:14.540058Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cf6/r3tmp/tmppDlqj0/pdisk_1.dat 2025-03-31T16:16:14.656641Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:14.681984Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:14.682058Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:14.683749Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11008 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:14.837755Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:14.862878Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> TLocksTest::CK_GoodLock >> TFlatTest::WriteSplitKillRead [GOOD] >> TFlatTest::WriteSplitWriteSplit >> TLocksFatTest::RangeSetRemove >> Viewer::ServerlessWithExclusiveNodesCheckTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::RejectByPerRequestSize [GOOD] Test command err: 2025-03-31T16:16:00.308812Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008163322669028:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:00.309118Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001d04/r3tmp/tmpyXx1h5/pdisk_1.dat 2025-03-31T16:16:00.606636Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:00.668580Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:00.668724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:00.670601Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20190 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:00.849761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:00.883416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:05.308820Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008163322669028:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:05.308899Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:16:07.203627Z node 1 :TX_DATASHARD ERROR: Transaction read size 51002229 exceeds limit 10000 at tablet 72075186224037888 txId 281474976710760 2025-03-31T16:16:07.203777Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710760 at tablet 72075186224037888 status: BAD_REQUEST errors: READ_SIZE_EXECEEDED (Transaction read size 51002229 exceeds limit 10000 at tablet 72075186224037888 txId 281474976710760) | 2025-03-31T16:16:07.203901Z node 1 :TX_PROXY ERROR: Actor# [1:7488008193387441753:2932] txid# 281474976710760 RESPONSE Status# WrongRequest marker# P13c 2025-03-31T16:16:07.717806Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008194087390939:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:07.717890Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001d04/r3tmp/tmpASJcrj/pdisk_1.dat 2025-03-31T16:16:07.836897Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:07.858471Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:07.858538Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:07.862829Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7450 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:08.006312Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:08.029850Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:12.718029Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008194087390939:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:12.718105Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:16:14.387342Z node 2 :TX_DATASHARD ERROR: Transaction read size 51002389 exceeds limit 10000 at tablet 72075186224037888 txId 281474976715760 2025-03-31T16:16:14.387436Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715760 at tablet 72075186224037888 status: BAD_REQUEST errors: READ_SIZE_EXECEEDED (Transaction read size 51002389 exceeds limit 10000 at tablet 72075186224037888 txId 281474976715760) | 2025-03-31T16:16:14.387695Z node 2 :TX_PROXY ERROR: Actor# [2:7488008224152163657:2933] txid# 281474976715760 RESPONSE Status# WrongRequest marker# P13c 2025-03-31T16:16:15.116721Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488008228515122316:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:15.116863Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001d04/r3tmp/tmpnMidjh/pdisk_1.dat 2025-03-31T16:16:15.234815Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:15.265695Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:15.265806Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:15.267480Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11975 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:15.469803Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:15.494793Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:18.137057Z node 3 :TX_PROXY DEBUG: actor# [3:7488008228515122535:2092] Handle TEvProposeTransaction 2025-03-31T16:16:18.137095Z node 3 :TX_PROXY DEBUG: actor# [3:7488008228515122535:2092] TxId# 281474976715700 ProcessProposeTransaction 2025-03-31T16:16:18.137142Z node 3 :TX_PROXY DEBUG: actor# [3:7488008228515122535:2092] Cookie# 0 userReqId# "" txid# 281474976715700 SEND to# [3:7488008241400025299:2610] DataReq marker# P0 2025-03-31T16:16:18.137207Z node 3 :TX_PROXY DEBUG: Actor# [3:7488008241400025299:2610] Cookie# 0 txid# 281474976715700 HANDLE TDataReq marker# P1 2025-03-31T16:16:18.137787Z node 3 :TX_PROXY DEBUG: Actor [3:7488008241400025299:2610] txid 281474976715700 disallow followers cause of operation 2 read target mode 0 2025-03-31T16:16:18.137815Z node 3 :TX_PROXY DEBUG: Actor [3:7488008241400025299:2610] txid 281474976715700 disallow followers cause of operation 2 read target mode 0 2025-03-31T16:16:18.137850Z node 3 :TX_PROXY DEBUG: Actor# [3:7488008241400025299:2610] txid# 281474976715700 SEND to# [3:7488008228515122579:2114] TSchemeCache with 2 scheme entries. DataReq marker# P2 2025-03-31T16:16:18.137952Z node 3 :TX_PROXY DEBUG: Actor# [3:7488008241400025299:2610] txid# 281474976715700 HANDLE EvResolveKeySetResult TDataReq marker# P3 ErrorCount# 0 2025-03-31T16:16:18.139364Z node 3 :TX_PROXY DEBUG: Actor# [3:7488008241400025299:2610] txid# 281474976715700 SEND TEvProposeTransaction to datashard 72075186224037888 with 734 bytes program affected shards 2 followers disallowed marker# P4 2025-03-31T16:16:18.139677Z node 3 :TX_PROXY DEBUG: Actor# [3:7488008241400025299:2610] txid# 281474976715700 SEND TEvProposeTransaction to datashard 72075186224037889 with 734 bytes program affected shards 2 followers disallowed marker# P4 2025-03-31T16:16:18.139911Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-31T16:16:18.140831Z node 3 :TX_DATASHARD DEBUG: Prepared DataTx transaction txId 281474976715700 at tablet 72075186224037889 2025-03-31T16:16:18.141055Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:16:18.141774Z node 3 :TX_DATASHARD DEBUG: Prepared DataTx transaction txId 281474976715700 at tablet 72075186224037888 2025-03-31T16:16:18.142253Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-03-31T16:16:18.142387Z node 3 :TX_PROXY DEBUG: Actor# [3:7488008241400025299:2610] txid# 281474976715700 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# PREPARED shard id 72075186224037889 read size 9000479 out readset size 0 marker# P6 2025-03-31T16:16:18.142789Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:16:18.142875Z node 3 :TX_PROXY DEBUG: Actor# [3:7488008241400025299:2610] txid# 281474976715700 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# PREPARED shard id 72075186224037888 read size 17000887 out readset size 0 marker# P6 2025-03-31T16:16:18.142954Z node 3 :TX_PROXY ERROR: Actor# [3:7488008241400025299:2610] txid# 281474976715700 FailProposedRequest: Transaction total read size 26001366 exceeded limit 10000 Status# ExecError 2025-03-31T16:16:18.143027Z node 3 :TX_PROXY ERROR: Actor# [3:7488008241400025299:2610] txid# 281474976715700 RESPONSE Status# ExecError marker# P13c 2025-03-31T16:16:18.143075Z node 3 :TX_DATASHARD DEBUG: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037888 txId 281474976715700 2025-03-31T16:16:18.143148Z node 3 :TX_DATASHARD DEBUG: Start TTxCancelTransactionProposal at tablet 72075186224037888 txId 281474976715700 2025-03-31T16:16:18.143617Z node 3 :TX_DATASHARD DEBUG: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037889 txId 281474976715700 2025-03-31T16:16:18.143655Z node 3 :TX_DATASHARD DEBUG: Start TTxCancelTransactionProposal at tablet 72075186224037889 txId 281474976715700 >> TFlatTest::ShardFreezeUnfreezeAlreadySet [GOOD] >> TFlatTest::ShardFreezeUnfreeze >> TLocksFatTest::LocksLimit [GOOD] >> TFlatTest::AutoSplitBySize [GOOD] >> TFlatTest::AutoMergeBySize >> TObjectStorageListingTest::CornerCases [GOOD] >> TObjectStorageListingTest::Decimal >> TLocksTest::CK_Range_BrokenLock >> TObjectStorageListingTest::Split >> TFlatTest::CopyTableAndDropCopy [GOOD] >> TLocksTest::Range_IncorrectNullDot1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::ServerlessWithExclusiveNodesCheckTable [GOOD] Test command err: 2025-03-31T16:15:00.604244Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488007907405152985:2099];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:15:00.604993Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-31T16:15:01.547194Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:01.551015Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:01.551159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:01.555603Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2633, node 1 2025-03-31T16:15:01.859818Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:01.859853Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:01.859862Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:01.859975Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27433 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:15:02.387499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:15:02.420704Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:15:02.443067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:15:02.447430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:15:02.453484Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-03-31T16:15:05.193638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007928879990129:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:05.193738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:05.194073Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007928879990141:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:05.198609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-03-31T16:15:05.211470Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007928879990143:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-31T16:15:05.295571Z node 1 :TX_PROXY ERROR: Actor# [1:7488007928879990194:2362] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:15:05.607209Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488007907405152985:2099];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:15:05.607297Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:15:07.749672Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488007936969214820:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:15:07.749760Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-31T16:15:07.903706Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:07.903800Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:07.905228Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:15:07.906447Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5661, node 2 2025-03-31T16:15:08.011764Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:08.011788Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:08.011798Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:08.011928Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11366 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:15:08.386181Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:15:08.395265Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:15:08.405492Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:15:08.408893Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:15:08.414123Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2025-03-31T16:15:11.695430Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488007954149084690:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:11.695527Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:11.697023Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488007954149084704:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:11.701585Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-03-31T16:15:11.711120Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488007954149084706:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-03-31T16:15:11.798537Z node 2 :TX_PROXY ERROR: Actor# [2:7488007954149084760:2358] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-31T16:15:13.652362Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488007961516388229:2231];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:15:13.683869Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:15:13.799228Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:13.825501Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:13.825603Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:13.828630Z nod ... RN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:20.247710Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:20.248516Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28510, node 4 2025-03-31T16:15:20.323409Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:20.323440Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:20.323452Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:20.323618Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18346 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:15:20.712640Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:15:20.720468Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:15:20.733182Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:15:20.737849Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:15:24.290177Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008011358685843:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:24.290316Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:24.290720Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008011358685856:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:24.295474Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-03-31T16:15:24.309823Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488008011358685858:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-03-31T16:15:24.363889Z node 4 :TX_PROXY ERROR: Actor# [4:7488008011358685909:2358] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:15:33.913710Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:464:2426], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:33.914006Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:33.914134Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-31T16:15:34.297840Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:34.457773Z node 5 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-03-31T16:15:34.486572Z node 5 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-03-31T16:15:35.161697Z node 5 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 25572, node 5 TClient is connected to server localhost:20707 2025-03-31T16:15:35.650164Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:35.650263Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:35.650318Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:35.651471Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:15:46.344964Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:541:2427], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:46.345414Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:46.345513Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-31T16:15:46.764677Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:46.915240Z node 7 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-03-31T16:15:46.940896Z node 7 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-03-31T16:15:47.550442Z node 7 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 8262, node 7 TClient is connected to server localhost:5654 2025-03-31T16:15:48.116968Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:48.117063Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:48.117126Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:48.118108Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:15:59.893886Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:539:2427], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:59.894275Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:59.894568Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-31T16:16:00.315633Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:00.472680Z node 10 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-03-31T16:16:00.505298Z node 10 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-03-31T16:16:01.096373Z node 10 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 26468, node 10 TClient is connected to server localhost:17279 2025-03-31T16:16:01.657770Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:01.657876Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:01.657956Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:01.658425Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:16:14.220477Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [13:617:2428], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:16:14.221086Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:14.221287Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-31T16:16:14.707152Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:14.875053Z node 13 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-03-31T16:16:14.909383Z node 13 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-03-31T16:16:15.780112Z node 13 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 9887, node 13 TClient is connected to server localhost:25730 2025-03-31T16:16:16.448148Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:16.448258Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:16.448351Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:16.449247Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration >> TLocksTest::UpdateLockedKey [GOOD] >> TLocksTest::SetLockNothing >> TFlatTest::RejectByIncomingReadSetSize [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksFatTest::LocksLimit [GOOD] Test command err: 2025-03-31T16:16:08.338956Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008199478214996:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:08.339100Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cfd/r3tmp/tmpz9mvv3/pdisk_1.dat 2025-03-31T16:16:08.658553Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:08.717564Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:08.717810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:08.720156Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5589 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:08.924174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:08.955822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:09.065455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:09.109097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:13.339531Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008199478214996:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:13.339589Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:16:14.428347Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008225038220751:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:14.428439Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cfd/r3tmp/tmp4jnWWy/pdisk_1.dat 2025-03-31T16:16:14.526256Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:14.570565Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:14.570666Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:14.572312Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23900 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:14.716380Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:14.734692Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:14.803277Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:14.873516Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:17.473275Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488008236539493525:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:17.473392Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cfd/r3tmp/tmp8SanfS/pdisk_1.dat 2025-03-31T16:16:17.556971Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:17.604771Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:17.604836Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:17.606376Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9826 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:17.732562Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:17.752707Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:17.822772Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:17.866171Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> TFlatTest::WriteSplitWriteSplit [GOOD] >> TFlatTest::CopyTableAndReturnPartAfterCompaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> ViewerTopicDataTests::TopicDataTest [GOOD] Test command err: 2025-03-31T16:15:31.713144Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:1732:2429], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:31.713549Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:31.714422Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:31.715509Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:1729:2372], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:31.715851Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:31.716023Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:31.716709Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:1741:2372], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:31.717127Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:31.717654Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:1735:2372], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:31.717760Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:31.718225Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:31.718497Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:31.718618Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:1738:2372], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:31.719637Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:31.720403Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-31T16:15:32.143165Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:32.309699Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-03-31T16:15:32.326674Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-03-31T16:15:32.827433Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 2925, node 1 TClient is connected to server localhost:2787 2025-03-31T16:15:33.129530Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:33.129599Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:33.129639Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:33.130036Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:16:04.157602Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488008180746862370:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:04.157777Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-31T16:16:04.355592Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:04.373122Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:04.373239Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:04.374919Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14109, node 6 2025-03-31T16:16:04.430317Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:04.430346Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:04.430361Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:04.430488Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16583 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:16:04.958180Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:04.980927Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:16:04.983645Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:07.251887Z node 6 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T16:16:07.251946Z node 6 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T16:16:07.898172Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488008193631764947:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:07.898236Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488008193631764955:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:07.898300Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:07.904258Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-03-31T16:16:07.916539Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488008193631764961:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-31T16:16:08.001747Z node 6 :TX_PROXY ERROR: Actor# [6:7488008193631765012:2356] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:16:08.328614Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:16:08.442020Z node 6 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T16:16:08.442059Z node 6 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T16:16:08.609883Z node 6 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T16:16:08.609917Z node 6 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T16:16:11.251071Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488008209784868149:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:11.251168Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0015b7/r3tmp/tmpVk1cEH/pdisk_1.dat 2025-03-31T16:16:11.423623Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:11.423728Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:11.425006Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25982, node 7 2025-03-31T16:16:11.434632Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:11.482570Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:11.482598Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:11.482609Z node 7 :NET_CLASSIFIE ... nse: sequence_numbers: 13 offsets: 52 already_written: false write_statistics { persist_duration_ms: 2 queued_in_partition_duration_ms: 1 } 2025-03-31T16:16:14.999402Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|f44022be-11c45402-1fd17668-155efcd_0] Write session: acknoledged message 13 2025-03-31T16:16:15.065751Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|f44022be-11c45402-1fd17668-155efcd_0] Write session: try to update token 2025-03-31T16:16:15.065807Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|f44022be-11c45402-1fd17668-155efcd_0] Send 4 message(s) (3 left), first sequence number is 14 2025-03-31T16:16:15.068059Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|f44022be-11c45402-1fd17668-155efcd_0] Write session: try to update token 2025-03-31T16:16:15.068106Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|f44022be-11c45402-1fd17668-155efcd_0] Send 1 message(s) (2 left), first sequence number is 18 2025-03-31T16:16:15.090353Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|f44022be-11c45402-1fd17668-155efcd_0] Write session: try to update token 2025-03-31T16:16:15.090414Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|f44022be-11c45402-1fd17668-155efcd_0] Send 1 message(s) (1 left), first sequence number is 19 2025-03-31T16:16:15.091795Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|f44022be-11c45402-1fd17668-155efcd_0] Write session: try to update token 2025-03-31T16:16:15.091833Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|f44022be-11c45402-1fd17668-155efcd_0] Send 1 message(s) (0 left), first sequence number is 20 2025-03-31T16:16:15.143412Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|f44022be-11c45402-1fd17668-155efcd_0] Write session got write response: sequence_numbers: 14 sequence_numbers: 15 sequence_numbers: 16 sequence_numbers: 17 offsets: 53 offsets: 54 offsets: 55 offsets: 56 already_written: false already_written: false already_written: false already_written: false write_statistics { persist_duration_ms: 2 queued_in_partition_duration_ms: 2 } 2025-03-31T16:16:15.143459Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|f44022be-11c45402-1fd17668-155efcd_0] Write session: acknoledged message 14 2025-03-31T16:16:15.143493Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|f44022be-11c45402-1fd17668-155efcd_0] Write session: acknoledged message 15 2025-03-31T16:16:15.143517Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|f44022be-11c45402-1fd17668-155efcd_0] Write session: acknoledged message 16 2025-03-31T16:16:15.143532Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|f44022be-11c45402-1fd17668-155efcd_0] Write session: acknoledged message 17 2025-03-31T16:16:15.147490Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|f44022be-11c45402-1fd17668-155efcd_0] Write session got write response: sequence_numbers: 18 offsets: 57 already_written: false write_statistics { persist_duration_ms: 2 queued_in_partition_duration_ms: 70 } 2025-03-31T16:16:15.147548Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|f44022be-11c45402-1fd17668-155efcd_0] Write session: acknoledged message 18 2025-03-31T16:16:15.149092Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|f44022be-11c45402-1fd17668-155efcd_0] Write session got write response: sequence_numbers: 19 offsets: 58 already_written: false write_statistics { persist_duration_ms: 2 queued_in_partition_duration_ms: 1 } 2025-03-31T16:16:15.149135Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|f44022be-11c45402-1fd17668-155efcd_0] Write session: acknoledged message 19 2025-03-31T16:16:15.149317Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|f44022be-11c45402-1fd17668-155efcd_0] Write session got write response: sequence_numbers: 20 offsets: 59 already_written: false write_statistics { persist_duration_ms: 2 queued_in_partition_duration_ms: 1 } 2025-03-31T16:16:15.149343Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|f44022be-11c45402-1fd17668-155efcd_0] Write session: acknoledged message 20 2025-03-31T16:16:15.213218Z :INFO: [] MessageGroupId [producer3] SessionId [producer3|f44022be-11c45402-1fd17668-155efcd_0] Write session will now close 2025-03-31T16:16:15.213333Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|f44022be-11c45402-1fd17668-155efcd_0] Write session: aborting 2025-03-31T16:16:15.214498Z :INFO: [] MessageGroupId [producer3] SessionId [producer3|f44022be-11c45402-1fd17668-155efcd_0] Write session: gracefully shut down, all writes complete 2025-03-31T16:16:15.214942Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|f44022be-11c45402-1fd17668-155efcd_0] Write session is aborting and will not restart 2025-03-31T16:16:15.215008Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|f44022be-11c45402-1fd17668-155efcd_0] Write session: destroy Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 2025-03-31T16:16:15.944643Z :DEBUG: [] MessageGroupId [producer4] SessionId [] Write session: try to update token 2025-03-31T16:16:15.945295Z :INFO: [] MessageGroupId [producer4] SessionId [] Write session: Do CDS request 2025-03-31T16:16:15.945364Z :INFO: [] MessageGroupId [producer4] SessionId [] Start write session. Will connect to endpoint: localhost:25982 2025-03-31T16:16:15.953042Z :DEBUG: [] MessageGroupId [producer4] SessionId [] Write session: send init request: init_request { topic: "/Root/topic1" message_group_id: "producer4" } 2025-03-31T16:16:15.956210Z :INFO: [] MessageGroupId [producer4] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1743437775956 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T16:16:15.956315Z :INFO: [] MessageGroupId [producer4] SessionId [] Write session established. Init response: session_id: "producer4|adecdb20-180b34da-5af47990-8fe32ab5_0" topic: "topic1" 2025-03-31T16:16:15.961045Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|adecdb20-180b34da-5af47990-8fe32ab5_0] Write 1 messages with Id from 1 to 1 2025-03-31T16:16:15.962414Z :INFO: [] MessageGroupId [producer4] SessionId [producer4|adecdb20-180b34da-5af47990-8fe32ab5_0] Write session: close. Timeout = 18446744073709551 ms 2025-03-31T16:16:16.003996Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|adecdb20-180b34da-5af47990-8fe32ab5_0] Write session: try to update token 2025-03-31T16:16:16.004044Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|adecdb20-180b34da-5af47990-8fe32ab5_0] Send 1 message(s) (0 left), first sequence number is 1 2025-03-31T16:16:16.026731Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|adecdb20-180b34da-5af47990-8fe32ab5_0] Write session got write response: sequence_numbers: 1 offsets: 60 already_written: false write_statistics { persist_duration_ms: 2 queued_in_partition_duration_ms: 17 } 2025-03-31T16:16:16.026793Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|adecdb20-180b34da-5af47990-8fe32ab5_0] Write session: acknoledged message 1 2025-03-31T16:16:16.045760Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488008231259705680:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:16.045773Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488008231259705665:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:16.045847Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:16.049853Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-03-31T16:16:16.060773Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7488008231259705694:2400], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-03-31T16:16:16.062583Z :INFO: [] MessageGroupId [producer4] SessionId [producer4|adecdb20-180b34da-5af47990-8fe32ab5_0] Write session will now close 2025-03-31T16:16:16.062682Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|adecdb20-180b34da-5af47990-8fe32ab5_0] Write session: aborting 2025-03-31T16:16:16.063389Z :INFO: [] MessageGroupId [producer4] SessionId [producer4|adecdb20-180b34da-5af47990-8fe32ab5_0] Write session: gracefully shut down, all writes complete 2025-03-31T16:16:16.064255Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|adecdb20-180b34da-5af47990-8fe32ab5_0] Write session is aborting and will not restart 2025-03-31T16:16:16.064521Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|adecdb20-180b34da-5af47990-8fe32ab5_0] Write session: destroy 2025-03-31T16:16:16.119716Z node 7 :TX_PROXY ERROR: Actor# [7:7488008231259705751:2532] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:16:16.212871Z node 7 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [7:7488008231259705767:2406], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:16:16.213252Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=ZjZmMzZhNGEtNDc3NWRhY2YtNzFhNmM3ZjgtNzBjMWQ0Mjk=, ActorId: [7:7488008231259705663:2395], ActorState: ExecuteState, TraceId: 01jqpfth5b2z150pp78x3dq877, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:16:16.214252Z node 7 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:16:16.251586Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7488008209784868149:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:16.251705Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Size: 4194320 Got response:400: PathErrorUnknown Got response:400: No such partition in topic 2025-03-31T16:16:16.280052Z node 7 :PERSQUEUE ERROR: [PQ: 72075186224037889, Partition: 0, State: StateIdle] reading from too big offset - topic topic1 partition 0 client $without_consumer EndOffset 61 offset 10000 Got response:400: Bad offset ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::CopyTableAndDropCopy [GOOD] Test command err: 2025-03-31T16:16:08.972438Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008197124074245:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:08.972542Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cf9/r3tmp/tmpUtpmQA/pdisk_1.dat 2025-03-31T16:16:09.294728Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:09.340250Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:09.340385Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:09.342126Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20203 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:09.530416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:09.556554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... TClient::Ls request: /dc-1/Dir/Table_1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743437769668 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_1" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "col_0" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) TClient::Ls request: /dc-1/Dir/Table_1_Copy TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_1_Copy" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1743437769745 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_1_Copy" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "col_0" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot... (TRUNCATED) 2025-03-31T16:16:09.716567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... waiting... TClient::Ls request: /dc-1/Dir/Table_2 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_2" PathId: 5 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710661 CreateStep: 1743437769787 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_2" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "col_0" ... (TRUNCATED) TClient::Ls request: /dc-1/Dir/Table_2_Copy TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_2_Copy" PathId: 6 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710662 CreateStep: 1743437769815 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_2_Copy" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: ... (TRUNCATED) 2025-03-31T16:16:09.784989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 waiting... waiting... TClient::Ls request: /dc-1/Dir/Table_3 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_3" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710663 CreateStep: 1743437769857 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_3" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "col... (TRUNCATED) TClient::Ls request: /dc-1/Dir/Table_3_Copy TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_3_Copy" PathId: 8 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710664 CreateStep: 1743437769878 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_3_Copy" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } Columns { ... (TRUNCATED) 2025-03-31T16:16:09.849936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 waiting... waiting... TClient::Ls request: /dc-1/Dir/Table_4 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_4" PathId: 9 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710665 CreateStep: 1743437769920 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_4" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "col... (TRUNCATED) TClient::Ls request: /dc-1/Dir/Table_4_Copy TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_4_Copy" PathId: 10 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710666 CreateStep: 1743437769948 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_4_Copy" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false ... r::TEvDataShard::TEvProposeTransactionResult> complete, operationId: 281474976715686:0, at schemeshard: 72057594046644480 2025-03-31T16:16:19.243961Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715686:0, at schemeshard: 72057594046644480 2025-03-31T16:16:19.244006Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715686:0, at schemeshard: 72057594046644480 2025-03-31T16:16:19.244066Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715686 datashard 72075186224037895 state Ready 2025-03-31T16:16:19.244078Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715686:0, at schemeshard: 72057594046644480 2025-03-31T16:16:19.244094Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715686:0 ProgressState 2025-03-31T16:16:19.244101Z node 2 :TX_DATASHARD DEBUG: 72075186224037895 Got TEvSchemaChangedResult from SS at 72075186224037895 2025-03-31T16:16:19.244164Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715686:0 progress is 1/1 2025-03-31T16:16:19.244183Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715686 ready parts: 1/1 2025-03-31T16:16:19.244198Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715686:0 progress is 1/1 2025-03-31T16:16:19.244211Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715686 ready parts: 1/1 2025-03-31T16:16:19.244228Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715686, ready parts: 1/1, is published: true 2025-03-31T16:16:19.244228Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715686 datashard 72075186224037894 state Ready 2025-03-31T16:16:19.244249Z node 2 :TX_DATASHARD DEBUG: 72075186224037894 Got TEvSchemaChangedResult from SS at 72075186224037894 2025-03-31T16:16:19.244267Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7488008245009957244:2401] message: TxId: 281474976715686 2025-03-31T16:16:19.244285Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715686 ready parts: 1/1 2025-03-31T16:16:19.244350Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715686:0 2025-03-31T16:16:19.244375Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715686:0 2025-03-31T16:16:19.244485Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 4 2025-03-31T16:16:19.248059Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:7488008245009957358:3008], serverId# [2:7488008245009957359:3009], sessionId# [0:0:0] 2025-03-31T16:16:19.248152Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-03-31T16:16:19.249142Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-03-31T16:16:19.249213Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-03-31T16:16:19.252432Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037895, clientId# [2:7488008245009957368:3015], serverId# [2:7488008245009957369:3016], sessionId# [0:0:0] 2025-03-31T16:16:19.252549Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-03-31T16:16:19.253724Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-03-31T16:16:19.253763Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-03-31T16:16:19.257374Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-03-31T16:16:19.258509Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-03-31T16:16:19.258564Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-03-31T16:16:19.261840Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-03-31T16:16:19.263424Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-03-31T16:16:19.263480Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-03-31T16:16:19.266085Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-03-31T16:16:19.267224Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-03-31T16:16:19.267276Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-03-31T16:16:19.268152Z node 2 :OPS_COMPACT INFO: Compact{72075186224037894.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-03-31T16:16:19.268550Z node 2 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037894, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-31T16:16:19.268590Z node 2 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037894, table# 1001, finished edge# 0, front# 0 2025-03-31T16:16:19.270589Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-03-31T16:16:19.271819Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-03-31T16:16:19.271873Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-03-31T16:16:19.272880Z node 2 :OPS_COMPACT INFO: Compact{72075186224037895.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-03-31T16:16:19.273546Z node 2 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037895, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-31T16:16:19.273594Z node 2 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037895, table# 1001, finished edge# 0, front# 0 2025-03-31T16:16:19.274958Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-03-31T16:16:19.275792Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-03-31T16:16:19.275846Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-03-31T16:16:19.278072Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-03-31T16:16:19.278864Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-03-31T16:16:19.278905Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-03-31T16:16:19.281740Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-03-31T16:16:19.283186Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-03-31T16:16:19.283259Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-03-31T16:16:19.286515Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-03-31T16:16:19.287569Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-03-31T16:16:19.287614Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-03-31T16:16:19.290438Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-03-31T16:16:19.291532Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-03-31T16:16:19.291586Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-03-31T16:16:19.293002Z node 2 :OPS_COMPACT INFO: Compact{72075186224037894.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-03-31T16:16:19.293334Z node 2 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037894, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-31T16:16:19.293360Z node 2 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037894, table# 1001, finished edge# 0, front# 0 2025-03-31T16:16:19.293990Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-03-31T16:16:19.295105Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-03-31T16:16:19.295153Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-03-31T16:16:19.296950Z node 2 :OPS_COMPACT INFO: Compact{72075186224037895.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2025-03-31T16:16:19.297350Z node 2 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037895, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-31T16:16:19.297372Z node 2 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037895, table# 1001, finished edge# 0, front# 0 2025-03-31T16:16:19.297609Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-03-31T16:16:19.299272Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-03-31T16:16:19.299326Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-03-31T16:16:19.301667Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-03-31T16:16:19.303346Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-03-31T16:16:19.303393Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-03-31T16:16:19.305554Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-03-31T16:16:19.307445Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-03-31T16:16:19.307509Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-03-31T16:16:19.309680Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-03-31T16:16:19.311572Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-03-31T16:16:19.311625Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 Check that tablet 72075186224037892 was deleted 2025-03-31T16:16:19.312244Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037892) Check that tablet 72075186224037893 was deleted 2025-03-31T16:16:19.312537Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037893) Check that tablet 72075186224037888 was deleted 2025-03-31T16:16:19.312755Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) Check that tablet 72075186224037889 was deleted 2025-03-31T16:16:19.313070Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) Check that tablet 72075186224037890 was deleted 2025-03-31T16:16:19.313330Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) Check that tablet 72075186224037891 was deleted 2025-03-31T16:16:19.313664Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) >> TFlatTest::ShardFreezeUnfreeze [GOOD] >> TFlatTest::SelectRangeItemsLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::RejectByIncomingReadSetSize [GOOD] Test command err: 2025-03-31T16:16:15.528887Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008228979559148:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:15.529010Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cef/r3tmp/tmpXtoeOP/pdisk_1.dat 2025-03-31T16:16:15.957918Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:15.982647Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:15.982769Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:15.988456Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62753 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:16.294359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:16.379202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1" OperationType: ESchemeOpMkDir MkDir { Name: "Dir1" } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-31T16:16:16.379369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /dc-1/Dir1, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:16:16.379477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: dc-1, child name: Dir1, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-03-31T16:16:16.379517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-03-31T16:16:16.379576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:16:16.379731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-31T16:16:16.379764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-03-31T16:16:16.381498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusAccepted TxId: 281474976710658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-03-31T16:16:16.381640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /dc-1/Dir1 2025-03-31T16:16:16.381787Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-31T16:16:16.381810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-31T16:16:16.381892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-31T16:16:16.381985Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-31T16:16:16.382005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7488008228979559813:2374], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 1 2025-03-31T16:16:16.382041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7488008228979559813:2374], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 2 2025-03-31T16:16:16.382068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:16:16.382083Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2025-03-31T16:16:16.382109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710658 ready parts: 1/1 waiting... 2025-03-31T16:16:16.384898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710658 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:16:16.386217Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-03-31T16:16:16.386297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-03-31T16:16:16.386310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-03-31T16:16:16.386323Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-03-31T16:16:16.386375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-03-31T16:16:16.386545Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-03-31T16:16:16.386611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-03-31T16:16:16.386618Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-03-31T16:16:16.386625Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2025-03-31T16:16:16.386648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-03-31T16:16:16.386676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710658, ready parts: 0/1, is published: true 2025-03-31T16:16:16.386784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T16:16:16.386798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710658, ready parts: 0/1, is published: true 2025-03-31T16:16:16.386810Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T16:16:16.387029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710658:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710658 msg type: 269090816 2025-03-31T16:16:16.387163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710658, partId: 4294967295, tablet: 72057594046316545 2025-03-31T16:16:16.388279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710658 2025-03-31T16:16:16.388336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710658 2025-03-31T16:16:16.388552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743437776437, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T16:16:16.388658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743437776437 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-31T16:16:16.388687Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 281474976710658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1743437776437, at schemeshard: 72057594046644480 2025-03-31T16:16:16.388792Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2025-03-31T16:16:16.388897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-31T16:16:16.388927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-03-31T16:16:16.390278Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-31T16:16:16.390301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-31T16:16:16.390429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-31T16:16:16.390571Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-31T16:16:16.390593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7488008228979559813:2374], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 1 2025-03-31T16:16:16.390608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7488008228979559813:2374], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 2 2025-03-31T16:16:16.390638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:16:16.390659Z node 1 : ... cation still in progress, tx: 281474976710661, publications: 2, subscribers: 1 2025-03-31T16:16:16.588300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710661, [OwnerId: 72057594046644480, LocalPathId: 1], 9 2025-03-31T16:16:16.588306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710661, [OwnerId: 72057594046644480, LocalPathId: 4], 3 2025-03-31T16:16:16.588419Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:16} Tx{27, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{36, redo 162b alter 0b annex 0, ~{ 42, 4 } -{ }, 0 gb} 2025-03-31T16:16:16.588444Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:16} Tx{27, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T16:16:16.588858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046644480, cookie: 281474976710661 2025-03-31T16:16:16.588904Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard 2025-03-31T16:16:16.588959Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T16:16:16.589012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046644480, cookie: 281474976710661 2025-03-31T16:16:16.589032Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710661 2025-03-31T16:16:16.589050Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710661, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 9 2025-03-31T16:16:16.589062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 4 2025-03-31T16:16:16.589155Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} hope 1 -> done Change{37, redo 166b alter 0b annex 0, ~{ 48, 59 } -{ }, 0 gb} 2025-03-31T16:16:16.589182Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T16:16:16.589280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710661 2025-03-31T16:16:16.589337Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard 2025-03-31T16:16:16.589365Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T16:16:16.589404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710661 2025-03-31T16:16:16.589422Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710661 2025-03-31T16:16:16.589431Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710661, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2025-03-31T16:16:16.589439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-03-31T16:16:16.589479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710661, subscribers: 1 2025-03-31T16:16:16.589490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:7488008233274527479:2303] 2025-03-31T16:16:16.589561Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} hope 1 -> done Change{38, redo 166b alter 0b annex 0, ~{ 48, 59 } -{ }, 0 gb} 2025-03-31T16:16:16.589588Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T16:16:16.589625Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:17:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:16:16.589651Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:17:1:24576:107:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:16:16.589690Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:18} commited cookie 1 for step 17 2025-03-31T16:16:16.590297Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:4:16:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:16:16.590321Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:4:16:1:24576:121:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:16:16.590374Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:19} commited cookie 1 for step 16 2025-03-31T16:16:16.590380Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:18} Tx{19, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-03-31T16:16:16.590395Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:18} Tx{19, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T16:16:16.590434Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:18} Tx{19, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{14, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-31T16:16:16.590463Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:18} Tx{19, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T16:16:16.590484Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046382081] send [1:7488008228979559797:2359] 2025-03-31T16:16:16.590498Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046382081] push event to server [1:7488008228979559797:2359] 2025-03-31T16:16:16.590527Z node 1 :PIPE_SERVER DEBUG: [72057594046382081] HandleSend Sender# [1:7488008228979559795:2359] EventType# 269156352 2025-03-31T16:16:16.590764Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:4:17:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:16:16.590771Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:4:18:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:16:16.590787Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:4:18:1:24576:131:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:16:16.590789Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:4:17:1:24576:118:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:16:16.590825Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:19} commited cookie 1 for step 17 2025-03-31T16:16:16.590838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710661 2025-03-31T16:16:16.590876Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:19} commited cookie 1 for step 18 2025-03-31T16:16:16.590889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710661 2025-03-31T16:16:16.590979Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] received poison pill [1:7488008233274527480:2303] 2025-03-31T16:16:16.591010Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] notify reset [1:7488008233274527480:2303] 2025-03-31T16:16:16.591028Z node 1 :PIPE_SERVER DEBUG: [72057594046644480] Got PeerClosed from# [1:7488008233274527480:2303] 2025-03-31T16:16:18.623492Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008241178386227:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:18.623562Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cef/r3tmp/tmpMsRchs/pdisk_1.dat 2025-03-31T16:16:18.714013Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:18.759204Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:18.759309Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:18.760978Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13926 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:18.884305Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:18.907767Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:21.774107Z node 2 :TX_PROXY ERROR: Actor# [2:7488008254063289247:2613] txid# 281474976715700 FailProposedRequest: Transaction incoming read set size 1000081 for tablet 72075186224037889 exceeded limit 1000 Status# ExecError 2025-03-31T16:16:21.774192Z node 2 :TX_PROXY ERROR: Actor# [2:7488008254063289247:2613] txid# 281474976715700 RESPONSE Status# ExecError marker# P13c >> TObjectStorageListingTest::Decimal [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::WriteSplitWriteSplit [GOOD] Test command err: 2025-03-31T16:16:16.841565Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008233425429735:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:16.841664Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ceb/r3tmp/tmpbFJEmc/pdisk_1.dat 2025-03-31T16:16:17.103572Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:17.180362Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:17.180546Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:17.182791Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14542 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:17.352838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:17.384827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:17.524138Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-03-31T16:16:17.527985Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-03-31T16:16:17.548945Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-03-31T16:16:17.552187Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2025-03-31T16:16:17.571757Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 3} end=0, 4 blobs 8r (max 9), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3250 2180 6413)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743437777494 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) 2025-03-31T16:16:17.718382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { OperationType: ESchemeOpSplitMergeTablePartitions SplitMergeTablePartitions { TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 200 } } } } } } TxId: 281474976710680 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-31T16:16:17.718631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TSplitMerge Propose, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-31T16:16:17.718948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-03-31T16:16:17.719025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-03-31T16:16:17.719043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-03-31T16:16:17.719301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 6 2025-03-31T16:16:17.719433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710680:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:16:17.720400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710680, response: Status: StatusAccepted TxId: 281474976710680 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-03-31T16:16:17.720504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710680, subject: , status: StatusAccepted, operation: ALTER TABLE PARTITIONS, path: /dc-1/Dir/TableOld 2025-03-31T16:16:17.720655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-31T16:16:17.720698Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710680:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 2025-03-31T16:16:17.721029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710680:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } waiting... 2025-03-31T16:16:17.721214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710680:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-03-31T16:16:17.721291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710680:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 5 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-03-31T16:16:17.721770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710680:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2025-03-31T16:16:17.721891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710680:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:4 msg type: 268697601 2025-03-31T16:16:17.721957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710680:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:5 msg type: 268697601 2025-03-31T16:16:17.722090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710680, partId: 0, tablet: 72057594037968897 2025-03-31T16:16:17.722128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710680, shardIdx: 72057594046644480:3, partId: 0 2025-03-31T16:16:17.722139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710680, shardIdx: 72057594046644480:4, partId: 0 2025-03-31T16:16:17.722146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710680, shardIdx: 72057594046644480:5, partId: 0 2025-03-31T16:16:17.722256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710680, at schemeshard: 72057594046644480 2025-03-31T16:16:17.722302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710680, ready parts: 0/1, is published: true 2025-03-31T16:16:17.722326Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710680, at schemeshard: 72057594046644480 2025-03-31T16:16:17.725301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046644480 message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2025-03-31T16:16:17.725347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 281474976710680, shardIdx: 72057594046644480:3, partId: 0 2025-03-31T16:16:17.725488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710680:0, at schemeshard: 72057594046644480, message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2025-03-31T16:16:17.725534Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710680:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-03-31T16:16:17.725659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710680:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2025-03-31T16:16:17.726002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046644480 message: Status: OK Owner: 72057594046644480 OwnerIdx: 4 TabletID: 72075186224037891 Origin: 72057594037968897 2025-03-31T16:16:17.726035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRela ... 9 2025-03-31T16:16:20.306034Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715694:0 progress is 1/1 2025-03-31T16:16:20.306050Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715694 ready parts: 1/1 2025-03-31T16:16:20.306067Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715694:0 progress is 1/1 2025-03-31T16:16:20.306075Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715694 ready parts: 1/1 2025-03-31T16:16:20.306088Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715694, ready parts: 1/1, is published: true 2025-03-31T16:16:20.306135Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7488008251434232238:2423] message: TxId: 281474976715694 2025-03-31T16:16:20.306156Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715694 ready parts: 1/1 2025-03-31T16:16:20.306172Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715694:0 2025-03-31T16:16:20.306178Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715694:0 2025-03-31T16:16:20.306287Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 8 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-03-31T16:16:20.313883Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7488008247139264254 RawX2: 4503608217307386 } TabletId: 72075186224037888 State: 4 2025-03-31T16:16:20.313936Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2025-03-31T16:16:20.314158Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7488008251434231894 RawX2: 4503608217307456 } TabletId: 72075186224037891 State: 4 2025-03-31T16:16:20.314179Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-03-31T16:16:20.314295Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7488008251434232090 RawX2: 4503608217307482 } TabletId: 72075186224037893 State: 4 2025-03-31T16:16:20.314312Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037893, state: Offline, at schemeshard: 72057594046644480 2025-03-31T16:16:20.314389Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7488008251434232092 RawX2: 4503608217307483 } TabletId: 72075186224037894 State: 4 2025-03-31T16:16:20.314410Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037894, state: Offline, at schemeshard: 72057594046644480 2025-03-31T16:16:20.314514Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7488008251434231888 RawX2: 4503608217307455 } TabletId: 72075186224037892 State: 4 2025-03-31T16:16:20.314538Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2025-03-31T16:16:20.314623Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7488008247139264255 RawX2: 4503608217307387 } TabletId: 72075186224037889 State: 4 2025-03-31T16:16:20.314646Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-03-31T16:16:20.314740Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7488008251434231887 RawX2: 4503608217307454 } TabletId: 72075186224037890 State: 4 2025-03-31T16:16:20.314787Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-03-31T16:16:20.314969Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:16:20.315119Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:16:20.315193Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:6 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:16:20.315237Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:7 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:16:20.315284Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:16:20.315313Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:16:20.315343Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:16:20.316615Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-03-31T16:16:20.316931Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 7 2025-03-31T16:16:20.317145Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-03-31T16:16:20.317308Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 6 2025-03-31T16:16:20.317435Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046644480 ShardLocalIdx: 6, at schemeshard: 72057594046644480 2025-03-31T16:16:20.317576Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-03-31T16:16:20.317693Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 7 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2025-03-31T16:16:20.317817Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-03-31T16:16:20.317924Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-03-31T16:16:20.318042Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-03-31T16:16:20.318151Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-03-31T16:16:20.318273Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-03-31T16:16:20.318325Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-03-31T16:16:20.318376Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-03-31T16:16:20.318499Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-03-31T16:16:20.318649Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-03-31T16:16:20.318675Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-03-31T16:16:20.318716Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-03-31T16:16:20.319633Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-03-31T16:16:20.319660Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-03-31T16:16:20.319700Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-03-31T16:16:20.319709Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-03-31T16:16:20.319729Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:6 2025-03-31T16:16:20.319736Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037893 2025-03-31T16:16:20.319995Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-03-31T16:16:20.320025Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037893 not found 2025-03-31T16:16:20.320037Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037892 not found 2025-03-31T16:16:20.320050Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-03-31T16:16:20.320114Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-03-31T16:16:20.320134Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037894 not found 2025-03-31T16:16:20.320147Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:7 2025-03-31T16:16:20.320162Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2025-03-31T16:16:20.320196Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-03-31T16:16:20.320215Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-03-31T16:16:20.320457Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-03-31T16:16:20.320477Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-03-31T16:16:20.320498Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-03-31T16:16:20.320513Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-03-31T16:16:20.320538Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 >> TFlatTest::LargeDatashardReply [GOOD] >> TObjectStorageListingTest::Split [GOOD] >> TObjectStorageListingTest::SuffixColumns >> TFlatTest::LargeProxyReply >> THealthCheckTest::Issues100Groups100VCardMerging [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::ShardFreezeUnfreeze [GOOD] Test command err: 2025-03-31T16:16:18.016972Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008239326509035:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:18.017107Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ce9/r3tmp/tmpXGhJf6/pdisk_1.dat 2025-03-31T16:16:18.311865Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:18.317306Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:18.317577Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:18.320083Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29278 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:18.565917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:18.582740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:18.689682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:18.705257Z node 1 :TX_PROXY ERROR: Actor# [1:7488008239326509761:2393] txid# 281474976710660, issues: { message: "Requested freeze state already set" severity: 1 } Error 1: Requested freeze state already set waiting... 2025-03-31T16:16:18.707377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:16:18.718186Z node 1 :TX_PROXY ERROR: Actor# [1:7488008239326509801:2427] txid# 281474976710662, issues: { message: "Requested freeze state already set" severity: 1 } Error 1: Requested freeze state already set 2025-03-31T16:16:20.715864Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008249058655570:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:20.715944Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ce9/r3tmp/tmp7bRzmk/pdisk_1.dat 2025-03-31T16:16:20.801541Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:20.845075Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:20.845142Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:20.846766Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8481 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:20.989045Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:20.996998Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:21.049721Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:21.072289Z node 2 :TX_DATASHARD ERROR: Shard 72075186224037888 cannot parse tx 281474976715660: 2025-03-31T16:16:21.072429Z node 2 :TX_PROXY ERROR: Actor# [2:7488008253353623597:2392] txid# 281474976715660 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-03-31T16:16:21.072511Z node 2 :TX_PROXY ERROR: Actor# [2:7488008253353623597:2392] txid# 281474976715660 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-03-31T16:16:21.072544Z node 2 :TX_PROXY ERROR: Actor# [2:7488008253353623597:2392] txid# 281474976715660 invalidateDistCache: 0 DIE TDataReq MarkShardError TabletsLeft# 1 2025-03-31T16:16:21.075259Z node 2 :TX_DATASHARD ERROR: Shard 72075186224037888 cannot parse tx 281474976715661: 2025-03-31T16:16:21.075350Z node 2 :TX_PROXY ERROR: Actor# [2:7488008253353623605:2397] txid# 281474976715661 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-03-31T16:16:21.075411Z node 2 :TX_PROXY ERROR: Actor# [2:7488008253353623605:2397] txid# 281474976715661 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-03-31T16:16:21.075433Z node 2 :TX_PROXY ERROR: Actor# [2:7488008253353623605:2397] txid# 281474976715661 invalidateDistCache: 0 DIE TDataReq MarkShardError TabletsLeft# 1 2025-03-31T16:16:21.083981Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 waiting... >> TLocksTest::SetLockFail >> TFlatTest::SelectRangeReverseItemsLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::Decimal [GOOD] Test command err: 2025-03-31T16:16:18.074840Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008240051021299:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:18.075066Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ce7/r3tmp/tmpnLjJ03/pdisk_1.dat 2025-03-31T16:16:18.361114Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30002, node 1 2025-03-31T16:16:18.423956Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:18.424165Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:18.425890Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:16:18.443558Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:18.443587Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:18.443599Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:18.443737Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23086 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:18.716200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:18.761151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:21.009194Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008255443920971:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:21.009287Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ce7/r3tmp/tmp8NYmeQ/pdisk_1.dat 2025-03-31T16:16:21.112056Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22500, node 2 2025-03-31T16:16:21.155511Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:21.155661Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:21.157777Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:16:21.176172Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:21.176189Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:21.176196Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:21.176312Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22946 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:21.384230Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:21.390209Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:21.408575Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> TFlatTest::CopyTableAndReturnPartAfterCompaction [GOOD] >> TFlatTest::CopyTableDropOriginalAndReturnPartAfterCompaction >> TObjectStorageListingTest::TestFilter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::Issues100Groups100VCardMerging [GOOD] Test command err: 2025-03-31T16:15:51.462950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:700:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:51.463284Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:51.463376Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:51.464641Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:697:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:51.464861Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:51.464928Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d4a/r3tmp/tmplxYBPA/pdisk_1.dat 2025-03-31T16:15:51.757460Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61942, node 1 TClient is connected to server localhost:22617 2025-03-31T16:15:52.177680Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:52.177741Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:52.177795Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:52.178176Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:15:58.160637Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:284:2217], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:58.160989Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:58.161084Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:58.161870Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:700:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:58.162066Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:58.162157Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d4a/r3tmp/tmpRQcQiS/pdisk_1.dat 2025-03-31T16:15:58.398276Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29423, node 3 TClient is connected to server localhost:31325 2025-03-31T16:15:58.788726Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:58.788804Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:58.788873Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:58.789589Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:16:05.124799Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:16:05.125070Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:05.125204Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:16:05.126292Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:16:05.126516Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:05.126596Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d4a/r3tmp/tmptECJ6s/pdisk_1.dat 2025-03-31T16:16:05.396261Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25732, node 5 TClient is connected to server localhost:15485 2025-03-31T16:16:05.799390Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:05.799453Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:05.799480Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:05.799938Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:16:13.450807Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:413:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:16:13.451264Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:16:13.451858Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:13.453220Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:703:2356], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:16:13.453570Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:13.453712Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d4a/r3tmp/tmpBgmqaY/pdisk_1.dat 2025-03-31T16:16:13.776147Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18323, node 7 TClient is connected to server localhost:18648 2025-03-31T16:16:14.208251Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:14.208323Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:14.208362Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:14.208657Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:16:22.364304Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:284:2217], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:16:22.364934Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:16:22.365157Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:22.365903Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:700:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:16:22.366331Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:22.366501Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d4a/r3tmp/tmpdyvxMD/pdisk_1.dat 2025-03-31T16:16:22.756247Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62708, node 9 TClient is connected to server localhost:26110 2025-03-31T16:16:23.240396Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:23.240459Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:23.240497Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:23.241178Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::LargeDatashardReply [GOOD] Test command err: 2025-03-31T16:16:12.208280Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008213863000761:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:12.208416Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cf1/r3tmp/tmpDDQehJ/pdisk_1.dat 2025-03-31T16:16:12.486724Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:12.580787Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvRegisterNode from [1:7488008213863000994:2099] HiveId: 72057594037968897 ServicedDomains { SchemeShard: 72057594046644480 PathId: 1 } TabletAvailability { Type: Mediator Priority: 0 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: KeyValue Priority: 0 } TabletAvailability { Type: Coordinator Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } TabletAvailability { Type: SchemeShard Priority: 0 } TabletAvailability { Type: DataShard Priority: 0 } TabletAvailability { Type: PersQueue Priority: 0 } TabletAvailability { Type: PersQueueReadBalancer Priority: 0 } TabletAvailability { Type: Kesus Priority: 0 } TabletAvailability { Type: SysViewProcessor Priority: 0 } TabletAvailability { Type: ColumnShard Priority: 0 } TabletAvailability { Type: SequenceShard Priority: 0 } TabletAvailability { Type: ReplicationController Priority: 0 } TabletAvailability { Type: StatisticsAggregator Priority: 0 } 2025-03-31T16:16:12.580933Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Execute 2025-03-31T16:16:12.581033Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:12.581046Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-03-31T16:16:12.581071Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-03-31T16:16:12.581085Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-03-31T16:16:12.581095Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-03-31T16:16:12.581157Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:12.581306Z node 1 :HIVE DEBUG: HIVE#72057594037968897 TEvInterconnect::TEvNodeInfo NodeId 1 Location DataCenter: "1" Module: "1" Rack: "1" Unit: "1" 2025-03-31T16:16:12.581348Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-03-31T16:16:12.581359Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-03-31T16:16:12.581418Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2025-03-31T16:16:12.581433Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-03-31T16:16:12.582241Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Complete 2025-03-31T16:16:12.582290Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Node(1) Ping([1:7488008213863000994:2099]) 2025-03-31T16:16:12.582347Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-03-31T16:16:12.582741Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle::TEvSyncTablets 2025-03-31T16:16:12.582812Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:7488008213863000994:2099])::Execute 2025-03-31T16:16:12.582837Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-03-31T16:16:12.582877Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:7488008213863000994:2099])::Complete 2025-03-31T16:16:12.582999Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 1: Status: 0 StartTime: 1743437772217290 ResourceMaximum { Memory: 270443347968 } 2025-03-31T16:16:12.583061Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Execute 2025-03-31T16:16:12.583126Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:16:12.583293Z node 1 :HIVE DEBUG: HIVE#72057594037968897 AddRegisteredDataCentersNode(1, 1) 2025-03-31T16:16:12.583354Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-03-31T16:16:12.583374Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-03-31T16:16:12.583490Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-03-31T16:16:12.583518Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-03-31T16:16:12.583534Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2025-03-31T16:16:12.583575Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-03-31T16:16:12.584689Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Complete 2025-03-31T16:16:12.584725Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete TClient is connected to server localhost:18984 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T16:16:12.660503Z node 1 :TX_PROXY DEBUG: actor# [1:7488008213863001016:2103] Handle TEvNavigate describe path dc-1 2025-03-31T16:16:12.660572Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008213863001293:2256] HANDLE EvNavigateScheme dc-1 2025-03-31T16:16:12.662135Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008213863001293:2256] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T16:16:12.726621Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008213863001293:2256] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-03-31T16:16:12.735758Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008213863001293:2256] Handle TEvDescribeSchemeResult Forward to# [1:7488008213863001292:2255] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:12.750419Z node 1 :TX_PROXY DEBUG: actor# [1:7488008213863001016:2103] Handle TEvProposeTransaction 2025-03-31T16:16:12.750452Z node 1 :TX_PROXY DEBUG: actor# [1:7488008213863001016:2103] TxId# 281474976710657 ProcessProposeTransaction 2025-03-31T16:16:12.750575Z node 1 :TX_PROXY DEBUG: actor# [1:7488008213863001016:2103] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7488008213863001307:2263] 2025-03-31T16:16:12.878414Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008213863001307:2263] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-03-31T16:16:12.878469Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008213863001307:2263] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:16:12.878542Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008213863001307:2263] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:16:12.878939Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008213863001307:2263] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:16:12.879061Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008213863001307:2263] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-31T16:16:12.879161Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008213863001307:2263] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-03-31T16:16:12.879348Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008213863001307:2263] txid# 281474976710657 HANDLE EvClientConnected 2025-03-31T16:16:12.881674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-31T16:16:12.881858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //dc-1, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:16:12.882100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-31T16:16:12.882329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:16:12.882370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:16:12.882914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-31T16:16:12.883046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //dc-1 2025-03-31T16:16:12.883157Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008213863001307:2263] txid# ... 8775Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 281474976710674:0 ProgressState, at schemeshard: 72057594046644480 2025-03-31T16:16:13.558813Z node 1 :TX_DATASHARD DEBUG: 72075186224037899 Got TEvSchemaChangedResult from SS at 72075186224037899 2025-03-31T16:16:13.558941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 3 2025-03-31T16:16:13.559007Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710674:0 progress is 1/1 2025-03-31T16:16:13.559035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710674 ready parts: 1/1 2025-03-31T16:16:13.559047Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710674:0 progress is 1/1 2025-03-31T16:16:13.559051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710674 ready parts: 1/1 2025-03-31T16:16:13.559059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710674, ready parts: 1/1, is published: true 2025-03-31T16:16:13.559126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:7488008218157969669:2380] message: TxId: 281474976710674 2025-03-31T16:16:13.559150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710674 ready parts: 1/1 2025-03-31T16:16:13.559163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710674:0 2025-03-31T16:16:13.559171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710674:0 2025-03-31T16:16:13.559250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 2 2025-03-31T16:16:13.560121Z node 1 :TX_DATASHARD DEBUG: 72075186224037899 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-03-31T16:16:13.560193Z node 1 :TX_DATASHARD INFO: 72075186224037899 Initiating switch from PreOffline to Offline state 2025-03-31T16:16:13.566036Z node 1 :TX_DATASHARD INFO: 72075186224037899 Reporting state Offline to schemeshard 72057594046644480 2025-03-31T16:16:13.566405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7488008218157969219 RawX2: 4503603922340127 } TabletId: 72075186224037899 State: 4 2025-03-31T16:16:13.566481Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037899, state: Offline, at schemeshard: 72057594046644480 2025-03-31T16:16:13.566881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:12 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:16:13.567194Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 12 TxId_Deprecated: 12 TabletID: 72075186224037899 2025-03-31T16:16:13.567237Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037899 2025-03-31T16:16:13.567304Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Tablet(DataShard.72075186224037899.Leader.1) VolatileState: Running -> Stopped (Node 1) 2025-03-31T16:16:13.567396Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037899.Leader.1 gen 1) to node 1 2025-03-31T16:16:13.567490Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 12 ShardOwnerId: 72057594046644480 ShardLocalIdx: 12 2025-03-31T16:16:13.569037Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Complete() SideEffects: {Notifications: 0x10080003 [1:7488008213863000994:2099] NKikimrLocal.TEvStopTablet TabletId: 72075186224037899 FollowerId: 0 Generation: 1,0x10040206 [1:7488008213863001185:2190] NKikimrHive.TEvDeleteTabletReply Status: OK Origin: 72057594037968897 TxId_Deprecated: 12 ShardOwnerId: 72057594046644480 ShardLocalIdx: 12 Actions: NKikimr::TTabletReqBlockBlobStorage} 2025-03-31T16:16:13.569208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 12 ShardOwnerId: 72057594046644480 ShardLocalIdx: 12, at schemeshard: 72057594046644480 2025-03-31T16:16:13.569396Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxBlockStorageResult::Execute(72075186224037899 OK) 2025-03-31T16:16:13.569446Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxBlockStorageResult::Complete(72075186224037899 OK) 2025-03-31T16:16:13.569485Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle::TEvInitiateDeleteStorage TabletId=72075186224037899 2025-03-31T16:16:13.569492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 1 2025-03-31T16:16:13.569731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-03-31T16:16:13.569761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 6], at schemeshard: 72057594046644480 2025-03-31T16:16:13.569808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-03-31T16:16:13.569824Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTabletResult::Execute(72075186224037899 OK) 2025-03-31T16:16:13.570222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:12 2025-03-31T16:16:13.570263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:12 tabletId 72075186224037899 2025-03-31T16:16:13.570312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-03-31T16:16:13.571141Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037899 state Offline 2025-03-31T16:16:13.571187Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037899 reason = ReasonStop 2025-03-31T16:16:13.571217Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037899, clientId# [1:7488008218157969390:2770], serverId# [1:7488008218157969391:2771], sessionId# [0:0:0] 2025-03-31T16:16:13.571301Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037899, clientId# [1:7488008218157969215:2670], serverId# [1:7488008218157969238:2681], sessionId# [0:0:0] 2025-03-31T16:16:13.571329Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037899, clientId# [1:7488008218157969243:2686], serverId# [1:7488008218157969244:2687], sessionId# [0:0:0] 2025-03-31T16:16:13.571536Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTabletResult(72075186224037899)::Complete SideEffects {} 2025-03-31T16:16:13.571562Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037899 2025-03-31T16:16:13.571585Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037899 2025-03-31T16:16:13.571600Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2025-03-31T16:16:13.571633Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037899 2025-03-31T16:16:13.583491Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessTabletBalancer MaxUsage=0.000000000 on #1 MinUsage=0.000000000 on #1 Scatter=0.000000000 2025-03-31T16:16:13.583571Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle BalancerOut 2025-03-31T16:16:15.276511Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008228423797014:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:15.276574Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cf1/r3tmp/tmpdKs81L/pdisk_1.dat 2025-03-31T16:16:15.383923Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:15.415965Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:15.416041Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:15.417452Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21399 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:15.566730Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:15.589338Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:20.276826Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008228423797014:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:20.276938Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:16:23.792377Z node 2 :MINIKQL_ENGINE ERROR: Shard %72075186224037888, txid %281474976716360, engine error: Error executing transaction (read-only: 1): Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2025-03-31T16:16:23.802954Z node 2 :TX_DATASHARD ERROR: Datashard execution error for [0:281474976716360] at 72075186224037888: Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2025-03-31T16:16:23.805415Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976716360 at tablet 72075186224037888 status: RESULT_UNAVAILABLE errors: REPLY_SIZE_EXCEEDED (Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648)) | 2025-03-31T16:16:23.833370Z node 2 :TX_PROXY ERROR: Actor# [2:7488008262783541787:5908] txid# 281474976716360 RESPONSE Status# ExecResultUnavailable marker# P13c DataShardErrors: [REPLY_SIZE_EXCEEDED] Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) proxy error code: ExecResultUnavailable >> TLocksTest::Range_IncorrectDot1 >> TFlatTest::SelectRangeItemsLimit [GOOD] >> TFlatTest::SelectRangeForbidNullArgs4 >> TObjectStorageListingTest::ManyDeletes [GOOD] >> TLocksTest::Range_CorrectNullDot >> TLocksTest::GoodLock [GOOD] >> TLocksTest::GoodNullLock >> TLocksFatTest::RangeSetRemove [GOOD] >> TLocksFatTest::ShardLocks >> TObjectStorageListingTest::SuffixColumns [GOOD] >> TLocksFatTest::PointSetNotBreak >> TObjectStorageListingTest::MaxKeysAndSharding [GOOD] >> TObjectStorageListingTest::SchemaChecks >> TFlatTest::SelectRangeReverseItemsLimit [GOOD] >> TFlatTest::SelectRangeReverseIncludeKeys >> TLocksTest::SetLockFail [GOOD] >> TLocksTest::SetEraseSet >> TFlatTest::SplitEmptyToMany >> TFlatTest::WriteSplitByPartialKeyAndRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::ManyDeletes [GOOD] Test command err: 2025-03-31T16:16:08.307236Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008198637450874:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:08.307314Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cfc/r3tmp/tmpEVHYMo/pdisk_1.dat 2025-03-31T16:16:08.584639Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11731, node 1 2025-03-31T16:16:08.639799Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:08.639907Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:08.641766Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:16:08.651462Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:08.651485Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:08.651491Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:08.651608Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61372 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:08.887881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:08.924809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:12.990257Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008216044518852:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:12.990350Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cfc/r3tmp/tmpx4fzdg/pdisk_1.dat 2025-03-31T16:16:13.098546Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1693, node 2 2025-03-31T16:16:13.124214Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:13.124301Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:13.128424Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:16:13.170898Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:13.170915Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:13.170919Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:13.171043Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63705 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:13.408993Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:13.429021Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... ..2025-03-31T16:16:17.990845Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008216044518852:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:17.990953Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:16:20.068175Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-31T16:16:20.068323Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-03-31T16:16:20.069123Z node 2 :TX_DATASHARD DEBUG: Prepared DataTx transaction txId 281474976716500 at tablet 72075186224037890 2025-03-31T16:16:20.069139Z node 2 :TX_DATASHARD DEBUG: Prepared DataTx transaction txId 281474976716500 at tablet 72075186224037889 2025-03-31T16:16:20.069424Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037891 2025-03-31T16:16:20.069454Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037892 2025-03-31T16:16:20.070010Z node 2 :TX_DATASHARD DEBUG: Prepared DataTx transaction txId 281474976716500 at tablet 72075186224037891 2025-03-31T16:16:20.070135Z node 2 :TX_DATASHARD DEBUG: Prepared DataTx transaction txId 281474976716500 at tablet 72075186224037892 2025-03-31T16:16:20.070529Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-03-31T16:16:20.070545Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037890 2025-03-31T16:16:20.071446Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037892 2025-03-31T16:16:20.071455Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037891 2025-03-31T16:16:20.077735Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976716500 at step 1743437780119 at tablet 72075186224037890 { Transactions { TxId: 281474976716500 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743437780119 MediatorID: 72057594046382081 TabletID: 72075186224037890 } 2025-03-31T16:16:20.077753Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976716500 at step 1743437780119 at tablet 72075186224037892 { Transactions { TxId: 281474976716500 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743437780119 MediatorID: 72057594046382081 TabletID: 72075186224037892 } 2025-03-31T16:16:20.077766Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-31T16:16:20.077783Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-03-31T16:16:20.077862Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-03-31T16:16:20.077878Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:16:20.077897Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1743437780119:281474976716500] in PlanQueue unit at 72075186224037890 2025-03-31T16:16:20.077923Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037890 got data tx from cache 1743437780119:281474976716500 2025-03-31T16:16:20.077925Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-03-31T16:16:20.077943Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:16:20.077966Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1743437780119:281474976716500] in PlanQueue unit at 72075186224037892 2025-03-31T16:16:20.078003Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037892 got data tx from cache 1743437780119:281474976716500 2025-03-31T16:16:20.079143Z node 2 :TX_DATASHARD DEBUG: tx 281474976716500 released its data 2025-03-31T16:16:20.079146Z node 2 :TX_DATASHARD DEBUG: tx 281474976716500 released its data 2025-03-31T16:16:20.079179Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:16:20.079181Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:16:20.079549Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976716500 at step 1743437780119 at tablet 72075186224037891 { Transactions { TxId: 281474976716500 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743437780119 MediatorID: 72057594046382081 TabletID: 72075186224037891 } 2025-03-31T16:16:20.079566Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-03-31T16:16:20.079568Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976716500 at step 1743437780119 at tablet 72075186224037889 { Transactions { TxId: 281474976716500 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743437780119 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-03-31T16:16:20.079582Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:16:20.079651Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-03-31T16:16:20.079663Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:16:20.079673Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T16:16:20.079680Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1743437780119:281474976716500] in PlanQueue unit at 72075186224037891 2025-03-31T16:16:20.079685Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-03 ... D: 72075186224037891 } 2025-03-31T16:16:26.464145Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-03-31T16:16:26.464153Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-03-31T16:16:26.464244Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-03-31T16:16:26.464257Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:16:26.464260Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-03-31T16:16:26.464272Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1743437786510:281474976716911] in PlanQueue unit at 72075186224037891 2025-03-31T16:16:26.464273Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:16:26.464288Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1743437786510:281474976716911] in PlanQueue unit at 72075186224037892 2025-03-31T16:16:26.464298Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037891 got data tx from cache 1743437786510:281474976716911 2025-03-31T16:16:26.464327Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037892 got data tx from cache 1743437786510:281474976716911 2025-03-31T16:16:26.464973Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:16:26.465205Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 1743437786510} 2025-03-31T16:16:26.465283Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T16:16:26.465324Z node 2 :TX_DATASHARD DEBUG: tx 281474976716911 released its data 2025-03-31T16:16:26.465357Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:16:26.465558Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 1743437786510} 2025-03-31T16:16:26.465602Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-03-31T16:16:26.465850Z node 2 :TX_DATASHARD DEBUG: tx 281474976716911 at 72075186224037889 restored its data 2025-03-31T16:16:26.466215Z node 2 :TX_DATASHARD DEBUG: tx 281474976716911 at 72075186224037890 restored its data 2025-03-31T16:16:26.466878Z node 2 :TX_DATASHARD DEBUG: tx 281474976716911 released its data 2025-03-31T16:16:26.466903Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:16:26.467030Z node 2 :TX_DATASHARD DEBUG: tx 281474976716911 released its data 2025-03-31T16:16:26.467060Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:16:26.467203Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037891 step# 1743437786510} 2025-03-31T16:16:26.467241Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-03-31T16:16:26.467282Z node 2 :TX_DATASHARD DEBUG: Complete [1743437786510 : 281474976716911] from 72075186224037891 at tablet 72075186224037891 send result to client [2:7488008276174075319:11457], exec latency: 0 ms, propose latency: 2 ms 2025-03-31T16:16:26.467309Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-03-31T16:16:26.467503Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037892 step# 1743437786510} 2025-03-31T16:16:26.467614Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-03-31T16:16:26.468136Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-03-31T16:16:26.468384Z node 2 :TX_DATASHARD DEBUG: tx 281474976716911 at 72075186224037892 restored its data 2025-03-31T16:16:26.468847Z node 2 :TX_DATASHARD DEBUG: tx 281474976716911 at 72075186224037890 restored its data 2025-03-31T16:16:26.469293Z node 2 :TX_DATASHARD DEBUG: tx 281474976716911 released its data 2025-03-31T16:16:26.469326Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:16:26.469544Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:16:26.469781Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-03-31T16:16:26.469792Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T16:16:26.470228Z node 2 :TX_DATASHARD DEBUG: tx 281474976716911 at 72075186224037892 restored its data 2025-03-31T16:16:26.470378Z node 2 :TX_DATASHARD DEBUG: tx 281474976716911 at 72075186224037889 restored its data 2025-03-31T16:16:26.470750Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:16:26.471060Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-31T16:16:26.471115Z node 2 :TX_DATASHARD DEBUG: Complete [1743437786510 : 281474976716911] from 72075186224037890 at tablet 72075186224037890 send result to client [2:7488008276174075319:11457], exec latency: 6 ms, propose latency: 8 ms 2025-03-31T16:16:26.471135Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-31T16:16:26.471974Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-03-31T16:16:26.472008Z node 2 :TX_DATASHARD DEBUG: Complete [1743437786510 : 281474976716911] from 72075186224037892 at tablet 72075186224037892 send result to client [2:7488008276174075319:11457], exec latency: 6 ms, propose latency: 7 ms 2025-03-31T16:16:26.472025Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-03-31T16:16:26.472699Z node 2 :TX_DATASHARD DEBUG: tx 281474976716911 released its data 2025-03-31T16:16:26.472732Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:16:26.484037Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T16:16:26.484889Z node 2 :TX_DATASHARD DEBUG: tx 281474976716911 at 72075186224037889 restored its data 2025-03-31T16:16:26.490758Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:16:26.494916Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-31T16:16:26.494978Z node 2 :TX_DATASHARD DEBUG: Complete [1743437786510 : 281474976716911] from 72075186224037889 at tablet 72075186224037889 send result to client [2:7488008276174075319:11457], exec latency: 28 ms, propose latency: 32 ms 2025-03-31T16:16:26.495006Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:16:26.511744Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-03-31T16:16:26.516897Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 1 last path: "" contents: 0 common prefixes: 0 2025-03-31T16:16:26.517475Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 2 last path: "" contents: 0 common prefixes: 0 2025-03-31T16:16:26.517928Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 3 last path: "" contents: 0 common prefixes: 0 2025-03-31T16:16:26.518107Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 S3 Listing: finished status: 0 description: "" contents: 0 common prefixes: 1 2025-03-31T16:16:26.518504Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-03-31T16:16:26.518613Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 S3 Listing: finished status: 0 description: "" contents: 0 common prefixes: 0 2025-03-31T16:16:26.518867Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-03-31T16:16:26.519385Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 1 last path: "" contents: 0 common prefixes: 0 2025-03-31T16:16:26.519871Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 2 last path: "" contents: 0 common prefixes: 0 2025-03-31T16:16:26.520351Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/Godfather.avi") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 3 last path: "/Videos/Godfather.avi" contents: 2 common prefixes: 0 2025-03-31T16:16:26.520760Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/House of Cards/Season 1/Chapter 1.avi") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 4 last path: "/Videos/House of Cards/Season 1/Chapter 1.avi" contents: 3 common prefixes: 1 2025-03-31T16:16:26.521214Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/Terminator 2.avi") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 5 last path: "/Videos/Terminator 2.avi" contents: 4 common prefixes: 1 2025-03-31T16:16:26.521368Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: finished status: 0 description: "" contents: 4 common prefixes: 1 >> TObjectStorageListingTest::TestFilter [GOOD] >> TObjectStorageListingTest::TestSkipShards >> TFlatTest::CopyTableDropOriginalAndReturnPartAfterCompaction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::SuffixColumns [GOOD] Test command err: 2025-03-31T16:16:21.471970Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008254261309914:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:21.472112Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cdd/r3tmp/tmpgexC5m/pdisk_1.dat TServer::EnableGrpc on GrpcPort 26754, node 1 2025-03-31T16:16:21.824810Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-03-31T16:16:21.826569Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-03-31T16:16:21.848231Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:21.848558Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:21.850723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:16:21.856151Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:21.870477Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:21.870504Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:21.870510Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:21.870617Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9394 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:22.197648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:22.232544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /dc-1/Dir/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743437782401 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "Hash" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Path" ... (TRUNCATED) waiting... TClient::Ls request: /dc-1/Dir/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743437782401 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "Hash" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Path" ... (TRUNCATED) 2025-03-31T16:16:24.595642Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008267295972235:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:24.595710Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cdd/r3tmp/tmp3cYVWc/pdisk_1.dat 2025-03-31T16:16:24.681978Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3122, node 2 2025-03-31T16:16:24.728821Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:24.728938Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:24.732579Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:16:24.747018Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:24.747050Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:24.747059Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:24.747194Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5685 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:24.949761Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:24.974295Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:25.361674Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553163, Sender [2:7488008271590940865:2483], Recipient [2:7488008267295972918:2313]: NKikimrTxDataShard.TEvObjectStorageListingRequest TableId: 3 SerializedKeyPrefix: "\002\000\010\000\000\0002\000\000\000\000\000\000\000\010\000\000\000Bucket50" PathColumnPrefix: "Music/AC DC/" PathColumnDelimiter: "/" SerializedStartAfterKeySuffix: "\002\000\037\000\000\000Music/AC DC/Shoot to Thrill.mp3\010\000\000\000B\000\000\000\000\000\000\000" ColumnsToReturn: 3 ColumnsToReturn: 4 ColumnsToReturn: 6 MaxKeys: 10 2025-03-31T16:16:25.361717Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvObjectStorageListingRequest 2025-03-31T16:16:25.361967Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: start at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC/Shoot to Thrill.mp3") (type:4, value:"B\0\0\0\0\0\0\0")), end at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-03-31T16:16:25.362198Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Shoot to Thrill.mp3" -> (Utf8 : Music/AC DC/Shoot to Thrill.mp3, Uint64 : 77, String : ) 2025-03-31T16:16:25.362254Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Shoot to Thrill.mp3" -> (Utf8 : Music/AC DC/Shoot to Thrill.mp3, Uint64 : 88, String : ) 2025-03-31T16:16:25.362287Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Shoot to Thrill.mp3" -> (Utf8 : Music/AC DC/Shoot to Thrill.mp3, Uint64 : 666, String : ) 2025-03-31T16:16:25.362320Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 1, String : ) 2025-03-31T16:16:25.362355Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 66, String : ) 2025-03-31T16:16:25.362423Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 5 common prefixes: 0 2025-03-31T16:16:25.371446Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553163, Sender [2:7488008271590940869:2484], Recipient [2:7488008267295972918:2313]: NKikimrTxDataShard.TEvObjectStorageListingRequest TableId: 3 SerializedKeyPrefix: "\002\000\010\000\000\0002\000\000\000\000\000\000\000\010\000\000\000Bucket50" PathColumnPrefix: "Music/AC DC/" PathColumnDelimiter: "/" SerializedStartAfterKeySuffix: "\001\000\037\000\000\000Music/AC DC/Shoot to Thrill.mp3" ColumnsToReturn: 3 ColumnsToReturn: 4 ColumnsToReturn: 5 MaxKeys: 10 2025-03-31T16:16:25.371479Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvObjectStorageListingRequest 2025-03-31T16:16:25.371638Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: start at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC/Shoot to Thrill.mp3")), end at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-03-31T16:16:25.371822Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 1, Uint64 : 10) 2025-03-31T16:16:25.371859Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 66, Uint64 : 10) 2025-03-31T16:16:25.371923Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 2 common prefixes: 0 >> TLocksTest::SetLockNothing [GOOD] >> TFlatTest::LargeProxyReply [GOOD] >> TFlatTest::LargeProxyReplyRW >> TFlatTest::SelectRangeForbidNullArgs4 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::CopyTableDropOriginalAndReturnPartAfterCompaction [GOOD] Test command err: 2025-03-31T16:16:22.822599Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008257514576390:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:22.822656Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cd8/r3tmp/tmpgrUvDl/pdisk_1.dat 2025-03-31T16:16:23.106726Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:23.188844Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:23.188983Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:23.190676Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22516 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:23.385203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:23.419006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:23.540162Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-03-31T16:16:23.543808Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-03-31T16:16:23.565330Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-03-31T16:16:23.570147Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743437783514 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) Copy TableOld to Table 2025-03-31T16:16:23.672794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1/Dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 100000 InMemStepsToSnapshot: 2 InMemForceStepsToSnapshot: 3 InMemForceSizeToSnapshot: 1000000 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 200000 ReadAheadLoThreshold: 100000 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 10000 CountToCompact: 2 ForceCountToCompact: 2 ForceSizeToCompact: 20000 CompactionBrokerQueue: 1 KeepInCache: true } } ColumnFamilies { Id: 0 ColumnCache: ColumnCacheNone Storage: ColumnStorageTest_1_2_1k } } CopyFromTable: "/dc-1/Dir/TableOld" } } TxId: 281474976710676 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-31T16:16:23.673229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /dc-1/Dir/Table, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:16:23.673676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 2], parent name: Dir, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-03-31T16:16:23.673730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 0 2025-03-31T16:16:23.673795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-03-31T16:16:23.673831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-03-31T16:16:23.673866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-03-31T16:16:23.673998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 3 2025-03-31T16:16:23.674100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710676:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:16:23.674855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-03-31T16:16:23.674915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 4 2025-03-31T16:16:23.675483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710676, response: Status: StatusAccepted TxId: 281474976710676 SchemeshardId: 72057594046644480 PathId: 4, at schemeshard: 72057594046644480 2025-03-31T16:16:23.675611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710676, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /dc-1/Dir/Table 2025-03-31T16:16:23.675797Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-31T16:16:23.675818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-31T16:16:23.675955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 4] 2025-03-31T16:16:23.676040Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-31T16:16:23.676068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7488008261809544204:2241], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 2 2025-03-31T16:16:23.676092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7488008261809544204:2241], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 4 2025-03-31T16:16:23.676126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:16:23.676184Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710676:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046644480 waiting... 2025-03-31T16:16:23.676528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-03-31T16:16:23.676662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-03-31T16:16:23.677052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710676, at schemeshard: 72057594046644480 2025-03-31T16:16:23.677079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710676, ready parts: 0/1, is published: false 2025-03-31T16:16:23.677096Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710676, at schemeshard: 72057594046644480 2025-03-31T16:16:23.677169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710676:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2025-03-31T16:16:23.677292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710676:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:4 msg type: 268697601 2025-03-31T16:16:23.677353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710676, partId: 0, tablet: 72057594037968897 2025-03-31T16:16:23.677395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710676, shardIdx: 72057594046644480:3, partId: 0 2025-03-31T16:16:23.677411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710676, shardIdx: 72057594046644480:4, partId: 0 2025-03-31T16:16:23.679170Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-03-31T16:16:23.679325Z node 1 :F ... :26.897829Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715784:0, at schemeshard: 72057594046644480 2025-03-31T16:16:26.897847Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 281474976715784:0 ProgressState, at schemeshard: 72057594046644480 2025-03-31T16:16:26.898112Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 4 2025-03-31T16:16:26.898216Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715784:0 progress is 1/1 2025-03-31T16:16:26.898228Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715784 ready parts: 1/1 2025-03-31T16:16:26.898244Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715784:0 progress is 1/1 2025-03-31T16:16:26.898254Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715784 ready parts: 1/1 2025-03-31T16:16:26.898267Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715784, ready parts: 1/1, is published: true 2025-03-31T16:16:26.898303Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7488008276705446790:2687] message: TxId: 281474976715784 2025-03-31T16:16:26.898318Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715784 ready parts: 1/1 2025-03-31T16:16:26.898333Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715784:0 2025-03-31T16:16:26.898343Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715784:0 2025-03-31T16:16:26.898371Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715784 datashard 72075186224037891 state PreOffline 2025-03-31T16:16:26.898396Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 Got TEvSchemaChangedResult from SS at 72075186224037891 2025-03-31T16:16:26.898420Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 3 2025-03-31T16:16:26.898500Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 parts [ [72075186224037889:1:16:1:12288:306:0] [72075186224037889:1:23:1:12288:253:0] ] return ack processed 2025-03-31T16:16:26.898538Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-03-31T16:16:26.898590Z node 2 :TX_DATASHARD INFO: 72075186224037891 Initiating switch from PreOffline to Offline state 2025-03-31T16:16:26.899931Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715784 datashard 72075186224037890 state PreOffline 2025-03-31T16:16:26.899967Z node 2 :TX_DATASHARD DEBUG: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2025-03-31T16:16:26.900074Z node 2 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-03-31T16:16:26.900229Z node 2 :TX_DATASHARD INFO: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2025-03-31T16:16:26.900290Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [2:7488008276705446815:2690], serverId# [2:7488008276705446816:3442], sessionId# [0:0:0] Check that tablet 72075186224037888 was deleted 2025-03-31T16:16:26.900734Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7488008272410477793 RawX2: 4503608217307386 } TabletId: 72075186224037889 State: 4 2025-03-31T16:16:26.900788Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-03-31T16:16:26.900958Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7488008276705445385 RawX2: 4503608217307443 } TabletId: 72075186224037891 State: 4 2025-03-31T16:16:26.901002Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-03-31T16:16:26.901027Z node 2 :TX_DATASHARD DEBUG: 72075186224037890 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-03-31T16:16:26.901089Z node 2 :TX_DATASHARD INFO: 72075186224037890 Initiating switch from PreOffline to Offline state 2025-03-31T16:16:26.901263Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:16:26.901363Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) 2025-03-31T16:16:26.901509Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 Check that tablet 72075186224037889 was deleted 2025-03-31T16:16:26.902126Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) Check that tablet 72075186224037890 was deleted 2025-03-31T16:16:26.902465Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-03-31T16:16:26.902502Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-03-31T16:16:26.902721Z node 2 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-03-31T16:16:26.902913Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7488008276705445383 RawX2: 4503608217307442 } TabletId: 72075186224037890 State: 4 2025-03-31T16:16:26.902965Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-03-31T16:16:26.903019Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-03-31T16:16:26.903045Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-03-31T16:16:26.903156Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-03-31T16:16:26.903378Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-03-31T16:16:26.903454Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-03-31T16:16:26.903536Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2025-03-31T16:16:26.903560Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-03-31T16:16:26.903705Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-03-31T16:16:26.903809Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-03-31T16:16:26.903831Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-03-31T16:16:26.903869Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-03-31T16:16:26.904112Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:16:26.904160Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-03-31T16:16:26.904178Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-03-31T16:16:26.904493Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-03-31T16:16:26.904519Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-03-31T16:16:26.904553Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-03-31T16:16:26.904569Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-03-31T16:16:26.904598Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-03-31T16:16:26.904945Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-03-31T16:16:26.905009Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-03-31T16:16:26.905616Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-03-31T16:16:26.905791Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-03-31T16:16:26.905931Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-03-31T16:16:26.905956Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-03-31T16:16:26.905988Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-03-31T16:16:26.906262Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-03-31T16:16:26.906270Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-03-31T16:16:26.906287Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-03-31T16:16:26.906302Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-03-31T16:16:26.906322Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-03-31T16:16:26.906333Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [2:7488008276705445568:2626], serverId# [2:7488008276705445569:2627], sessionId# [0:0:0] 2025-03-31T16:16:26.906348Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [2:7488008276705445458:2550], serverId# [2:7488008276705445459:2551], sessionId# [0:0:0] 2025-03-31T16:16:26.906582Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-03-31T16:16:26.906636Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2025-03-31T16:16:26.906790Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-03-31T16:16:27.203272Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) Check that tablet 72075186224037891 was deleted 2025-03-31T16:16:27.204010Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) >> TFlatTest::SelectRangeReverseIncludeKeys [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::LayoutIncorrect [FAIL] Test command err: 2025-03-31T16:15:48.729235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:700:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:48.729511Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:48.729605Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:48.730843Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:697:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:48.731206Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:48.731316Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d56/r3tmp/tmpeRpH5Q/pdisk_1.dat 2025-03-31T16:15:49.043762Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13721, node 1 TClient is connected to server localhost:3922 2025-03-31T16:15:49.364390Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:49.364431Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:49.364467Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:49.364693Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:15:55.157431Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:284:2217], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:55.157863Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:55.157979Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:55.158945Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:700:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:55.159190Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:55.159292Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d56/r3tmp/tmp7EUMPk/pdisk_1.dat 2025-03-31T16:15:55.391123Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62065, node 3 TClient is connected to server localhost:11422 2025-03-31T16:15:55.699530Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:55.699587Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:55.699625Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:55.700133Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:15:59.257134Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:382:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:59.257403Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:59.257454Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d56/r3tmp/tmpMs89Su/pdisk_1.dat 2025-03-31T16:15:59.491803Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2180, node 5 TClient is connected to server localhost:12193 2025-03-31T16:15:59.802223Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:59.802281Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:59.802317Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:59.802880Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:16:04.167830Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:16:04.168202Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:04.168361Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d56/r3tmp/tmpbpLTsf/pdisk_1.dat 2025-03-31T16:16:04.530601Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29601, node 7 TClient is connected to server localhost:9549 2025-03-31T16:16:05.062288Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:05.062351Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:05.062406Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:05.062856Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:16:05.132996Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:05.133184Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:05.148688Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:16:15.775820Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:452:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:16:15.775972Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:15.776035Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d56/r3tmp/tmpJe5OcV/pdisk_1.dat 2025-03-31T16:16:16.102167Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27089, node 9 TClient is connected to server localhost:27917 2025-03-31T16:16:16.531790Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:16.531856Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:16.531885Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:16.532369Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration assertion failed at ydb/core/health_check/health_check_ut.cpp:2275, virtual void NKikimr::NTestSuiteTHealthCheckTest::TTestCaseLayoutIncorrect::Execute_(NUnitTest::TTestContext &): (issue_log.message() == "Database has storage issues") failed: ("Database has multiple issues" != Database has storage issues) , with diff: ("|)Database has (mul|s)t(ipl|orag)e issues("|) TBackTrace::Capture()+28 (0x18D5BEFC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x192194C0) NKikimr::NTestSuiteTHealthCheckTest::TTestCaseLayoutIncorrect::Execute_(NUnitTest::TTestContext&)+45740 (0x1886024C) std::__y1::__function::__func, void ()>::operator()()+280 (0x1898A1E8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x192504E6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x19220039) NKikimr::NTestSuiteTHealthCheckTest::TCurrentTest::Execute()+1204 (0x18989194) NUnitTest::TTestFactory::Execute()+2438 (0x19221906) NUnitTest::RunMain(int, char**)+5213 (0x1924AA5D) ??+0 (0x7F848E0FED90) __libc_start_main+128 (0x7F848E0FEE40) _start+41 (0x16139029) >> TLocksTest::GoodSameKeyLock |87.5%| [TA] $(B)/ydb/core/health_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.5%| [TA] {RESULT} $(B)/ydb/core/health_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::SetLockNothing [GOOD] Test command err: 2025-03-31T16:16:05.180216Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008185212498082:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:05.180408Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001d01/r3tmp/tmpOJl5oc/pdisk_1.dat 2025-03-31T16:16:05.502759Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:05.583388Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:05.583511Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:05.585305Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20878 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:05.748192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:05.775630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:05.912820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:05.982358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:07.975511Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008192085926249:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:07.975578Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001d01/r3tmp/tmpe6TzwR/pdisk_1.dat 2025-03-31T16:16:08.082267Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:08.111546Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:08.111676Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:08.113145Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63866 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-03-31T16:16:08.284052Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:08.302295Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:08.354234Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:08.405649Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:11.275565Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488008209920782662:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:11.275632Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001d01/r3tmp/tmpu9UGp9/pdisk_1.dat 2025-03-31T16:16:11.388789Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:11.419288Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:11.419367Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:11.420775Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12221 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:11.585431Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:11.606930Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:11.662573Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:11.732955Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:14.827114Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008223692855506:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:14.827173Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001d01/r3tmp/tmpb7kJbM/pdisk_1.dat 2025-03-31T16:16:14.939700Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:14.968487Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:14.968580Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:14.969859Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19518 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:15.127511Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:15.148848Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:15.203985Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:15.272667Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:18.342903Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7488008242842966857:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:18.342981Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001d01/r3tmp/tmpRlRm9d/pdisk_1.dat 2025-03-31T16:16:18.437397Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:18.481098Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:18.481193Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:18.482573Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8337 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:18.681399Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:18.705258Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:18.762044Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:16:18.812483Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:21.848548Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488008255502353270:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:21.848679Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001d01/r3tmp/tmpNgb2ld/pdisk_1.dat 2025-03-31T16:16:21.962014Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:21.990592Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:21.990694Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:21.995980Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9878 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:22.212656Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:22.232894Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:22.307053Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:22.354266Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:25.637154Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488008269305138710:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:25.637268Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001d01/r3tmp/tmpkuklmt/pdisk_1.dat 2025-03-31T16:16:25.732424Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:25.772306Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:25.772395Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:25.774159Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24038 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:25.987830Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:26.006674Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:26.080128Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:26.136897Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> TLocksTest::BrokenLockErase [GOOD] >> TLocksTest::BrokenDupLock >> TObjectStorageListingTest::SchemaChecks [GOOD] >> TLocksTest::Range_Pinhole >> TFlatTest::WriteSplitByPartialKeyAndRead [GOOD] >> TFlatTest::WriteSplitAndReadFromFollower ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeForbidNullArgs4 [GOOD] Test command err: 2025-03-31T16:16:23.684352Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008264150341604:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:23.684516Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cd5/r3tmp/tmprMpnqU/pdisk_1.dat 2025-03-31T16:16:23.975106Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:24.034472Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:24.034605Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:24.036463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18988 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:24.247712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:24.279200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:26.811045Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008276147257516:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:26.811153Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cd5/r3tmp/tmp6moEZb/pdisk_1.dat 2025-03-31T16:16:26.922737Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:26.948999Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:26.949081Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:26.951041Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3022 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:27.086506Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:27.103133Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> TObjectStorageListingTest::TestSkipShards [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeReverseIncludeKeys [GOOD] Test command err: 2025-03-31T16:16:25.186809Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008273353362602:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:25.186919Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ccf/r3tmp/tmpWsRT0A/pdisk_1.dat 2025-03-31T16:16:25.505164Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:25.550974Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:25.551178Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:25.552703Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1045 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:25.693324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:25.721393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:27.969097Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008280278611273:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:27.969208Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ccf/r3tmp/tmpIY6OsD/pdisk_1.dat 2025-03-31T16:16:28.074396Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:28.106695Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:28.106795Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:28.108546Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2658 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:28.229756Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:28.244694Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> TLocksTest::BrokenSameKeyLock >> TFlatTest::SelectRangeBytesLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::SchemaChecks [GOOD] Test command err: 2025-03-31T16:16:06.101204Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008191340545428:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:06.101420Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cff/r3tmp/tmpCV3Gvs/pdisk_1.dat 2025-03-31T16:16:06.421434Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16329, node 1 2025-03-31T16:16:06.492903Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:06.493083Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:06.494969Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:16:06.620991Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:06.621019Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:06.621201Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:06.621324Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4723 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:07.125077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:07.161905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:11.100974Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008191340545428:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:11.101074Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:16:21.420887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:16:21.420934Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:27.978105Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008279662789891:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:27.978176Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cff/r3tmp/tmpxIzu7B/pdisk_1.dat 2025-03-31T16:16:28.095164Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:28.162722Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:28.162822Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:28.166022Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4270, node 2 2025-03-31T16:16:28.209911Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:28.209938Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:28.209964Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:28.210157Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28749 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:28.432923Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:28.460784Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> KqpJoinOrder::TPCDS88+ColumnStore [GOOD] >> TFlatTest::CopyTableAndRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::TestSkipShards [GOOD] Test command err: 2025-03-31T16:16:25.967409Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008273124031518:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:25.967524Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ccc/r3tmp/tmpf8t9Zr/pdisk_1.dat 2025-03-31T16:16:26.274245Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2358, node 1 2025-03-31T16:16:26.330100Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:26.330239Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:26.339512Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:16:26.352910Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:26.352959Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:26.352972Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:26.353112Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4826 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:26.612189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:26.650782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ccc/r3tmp/tmprMnxFH/pdisk_1.dat TServer::EnableGrpc on GrpcPort 28765, node 2 TClient is connected to server localhost:22324 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... waiting... waiting... >> TLocksFatTest::ShardLocks [GOOD] >> TLocksFatTest::PointSetNotBreak [GOOD] >> TLocksFatTest::PointSetRemove >> TLocksTest::GoodDupLock >> TFlatTest::ShardUnfreezeNonFrozen >> TLocksTest::Range_BrokenLock0 [GOOD] >> TLocksTest::Range_BrokenLock1 >> TLocksTest::Range_BrokenLock3 [GOOD] >> TLocksTest::SetEraseSet [GOOD] >> TFlatTest::LargeProxyReplyRW [GOOD] >> TLocksTest::Range_BrokenLockMax [GOOD] >> TLocksTest::Range_CorrectDot >> TLocksFatTest::RangeSetBreak ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksFatTest::ShardLocks [GOOD] Test command err: 2025-03-31T16:16:19.995429Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008246807726050:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:19.995770Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ce0/r3tmp/tmp11isWA/pdisk_1.dat 2025-03-31T16:16:20.328643Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:20.367557Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:20.367696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:20.369306Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25871 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:20.557201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:20.580027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:20.681243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:20.748766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:23.775936Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008261870304412:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:23.776066Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ce0/r3tmp/tmpYL286P/pdisk_1.dat 2025-03-31T16:16:23.853710Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:23.903280Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:23.903361Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:23.904914Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6601 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:24.041436Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:24.058016Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:24.126077Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:24.170566Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:27.415291Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488008278880896966:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:27.415352Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ce0/r3tmp/tmpQVcGKK/pdisk_1.dat 2025-03-31T16:16:27.509968Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:27.544519Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:27.544610Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:27.546265Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21550 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:27.748389Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:27.769495Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:27.840504Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:27.907979Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:30.456390Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008294565325178:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:30.456445Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ce0/r3tmp/tmpOIvuWQ/pdisk_1.dat 2025-03-31T16:16:30.546727Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:30.590011Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:30.590149Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:30.592125Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6133 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:30.785996Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:30.806538Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:30.855153Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:30.901120Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> TFlatTest::WriteSplitAndReadFromFollower [GOOD] >> TLocksTest::CK_GoodLock [GOOD] >> TLocksTest::CK_BrokenLock >> TFlatTest::SplitInvalidPath >> Viewer::StorageGroupOutputWithSpaceCheckDependsOnVDiskSpaceStatus [GOOD] >> Viewer::StorageGroupOutputWithSpaceCheckDependsOnUsage >> TFlatTest::SelectRangeBytesLimit [GOOD] >> TFlatTest::SelectRangeForbidNullArgs1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS88+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 21694, MsgBus: 8290 2025-03-31T16:14:20.053453Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488007735759135710:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:20.053517Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001993/r3tmp/tmpyc6MBq/pdisk_1.dat 2025-03-31T16:14:20.404431Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21694, node 1 2025-03-31T16:14:20.457654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:20.458421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:20.465034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:14:20.524132Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:20.524164Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:20.524183Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:20.524336Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8290 TClient is connected to server localhost:8290 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:14:21.013167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:23.031883Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007748644038263:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:23.032009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007748644038268:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:23.032084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:23.035172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:14:23.044429Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007748644038277:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:14:23.140398Z node 1 :TX_PROXY ERROR: Actor# [1:7488007748644038330:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:14:23.440815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:14:23.668253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488007748644038592:2351];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:14:23.668253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488007748644038589:2350];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:14:23.668440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488007748644038589:2350];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:14:23.668729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488007748644038589:2350];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:14:23.668868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488007748644038589:2350];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:14:23.668982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488007748644038589:2350];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:14:23.669103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488007748644038589:2350];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:14:23.669236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488007748644038589:2350];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:14:23.669379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488007748644038589:2350];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:14:23.669521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488007748644038589:2350];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:14:23.669661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488007748644038589:2350];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:14:23.669797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488007748644038589:2350];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:14:23.669917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488007748644038589:2350];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:14:23.670869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488007748644038592:2351];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:14:23.671111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488007748644038592:2351];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:14:23.671237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488007748644038592:2351];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:14:23.671349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488007748644038592:2351];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:14:23.671465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488007748644038592:2351];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:14:23.671582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488007748644038592:2351];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:14:23.671702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488007748644038592:2351];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:14:23.671864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488007748644038592:2351];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:14:23.671982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488007748644038592:2351];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:14:23.672094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488007748644038592:2351];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:14:23.672202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488007748644038592:2351];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:14:23.709949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488007748644038583:2348];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:14:23.709949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488007748644038652:2361];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:14:23.710009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:74880077486 ... d_tx;tx_id=281474976710714; 2025-03-31T16:15:25.867956Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:25.874007Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:25.874110Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:25.880462Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:25.880493Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:25.886361Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:25.887020Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:25.892663Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:25.893017Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:25.899196Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:25.899202Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:25.905553Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:25.905557Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:25.912522Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:25.912555Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:25.918990Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:25.918997Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:25.925292Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:25.925293Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:25.931391Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:15:26.095705Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpfr5fq6m9ykd4vrjw1bbd3", SessionId: ydb://session/3?node_id=1&id=MzZhNDlhNTYtZWQwYzc2Y2UtOWJiNDg4MS1jMDEzYWNk, Slow query, duration: 27.543879s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:15:26.495539Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:15:26.495558Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:15:26.496141Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:16:24.146778Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpfsvda60q3n5bh2qe3e3r1", SessionId: ydb://session/3?node_id=1&id=MzZhNDlhNTYtZWQwYzc2Y2UtOWJiNDg4MS1jMDEzYWNk, Slow query, duration: 30.375492s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/test/ds/\";\n-- NB: Subquerys\n-- start query 1 in stream 0 using template query88.tpl and seed 318176889\nselect *\nfrom\n (select count(*) h8_30_to_9\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 8\n and time_dim.t_minute >= 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s1 cross join\n (select count(*) h9_to_9_30\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 9\n and time_dim.t_minute < 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s2 cross join\n (select count(*) h9_30_to_10\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 9\n and time_dim.t_minute >= 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s3 cross join\n (select count(*) h10_to_10_30\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 10\n and time_dim.t_minute < 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s4 cross join\n (select count(*) h10_30_to_11\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 10\n and time_dim.t_minute >= 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s5 cross join\n (select count(*) h11_to_11_30\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 11\n and time_dim.t_minute < 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s6 cross join\n (select count(*) h11_30_to_12\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 11\n and time_dim.t_minute >= 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s7 cross join\n (select count(*) h12_to_12_30\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 12\n and time_dim.t_minute < 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s8\n;", parameters: 0b ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_BrokenLock3 [GOOD] Test command err: 2025-03-31T16:15:58.872961Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008154529523676:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:15:58.873241Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001d06/r3tmp/tmpBkgHRv/pdisk_1.dat 2025-03-31T16:15:59.147974Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:59.219174Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:59.219302Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:59.221024Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14282 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:15:59.339878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:15:59.362736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:15:59.479304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:15:59.546687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:01.633572Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008167643416657:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:01.633649Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001d06/r3tmp/tmpITSXfj/pdisk_1.dat 2025-03-31T16:16:01.743011Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:01.770004Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:01.770168Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:01.771976Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25066 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:01.910475Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:01.924187Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:01.995128Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:02.037307Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:04.556982Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488008183018094641:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:04.557087Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001d06/r3tmp/tmpQPZift/pdisk_1.dat 2025-03-31T16:16:04.676377Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:04.701981Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:04.702067Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:04.703910Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27575 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:04.897037Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:04.914544Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:04.970241Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:05.013718Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:07.461965Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008194139661924:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:07.462410Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001d06/r3tmp/tmpP5wsPE/pdisk_1.dat 2025-03-31T16:16:07.552149Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:07.590690Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:07.590783Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:07.592632Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14724 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:07.737431Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:07.756424Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiti ... 5c/001d06/r3tmp/tmprFNkTI/pdisk_1.dat 2025-03-31T16:16:18.274385Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:18.306407Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:18.306507Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:18.307988Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23040 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:18.554890Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-31T16:16:18.578856Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:16:18.653835Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:18.725365Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:21.736034Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7488008252878303709:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:21.736203Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001d06/r3tmp/tmpw1t6Hv/pdisk_1.dat 2025-03-31T16:16:21.888096Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:21.892873Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:21.892982Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:21.894529Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6872 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:22.146670Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:22.170177Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:22.232792Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:16:22.286662Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:25.788858Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7488008270402160043:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:25.788938Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001d06/r3tmp/tmpdz5b4u/pdisk_1.dat 2025-03-31T16:16:25.897548Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:25.927546Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:25.927659Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:25.929250Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2290 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:26.183916Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:26.201933Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:26.272379Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:26.386378Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:30.171557Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008291018289521:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:30.171738Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001d06/r3tmp/tmpJ7T1Sm/pdisk_1.dat 2025-03-31T16:16:30.297812Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:30.330743Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:30.330855Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:30.332357Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1417 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting...2025-03-31T16:16:30.669463Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:30.689821Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:30.754167Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:30.810334Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::SetEraseSet [GOOD] Test command err: 2025-03-31T16:16:25.140115Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008270494619168:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:25.140235Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cd0/r3tmp/tmpyU5XIz/pdisk_1.dat 2025-03-31T16:16:25.465552Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:25.513324Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:25.513465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:25.515129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27529 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:25.752372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:25.782155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:25.916677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:25.967827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:26.005031Z node 1 :TX_DATASHARD ERROR: Shard 72075186224037888 cannot parse tx 281474976710662: Validate (783): Key validation status: 3 2025-03-31T16:16:26.005182Z node 1 :TX_PROXY ERROR: Actor# [1:7488008274789587355:2496] txid# 281474976710662 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-03-31T16:16:26.005272Z node 1 :TX_PROXY ERROR: Actor# [1:7488008274789587355:2496] txid# 281474976710662 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-03-31T16:16:26.005315Z node 1 :TX_PROXY ERROR: Actor# [1:7488008274789587355:2496] txid# 281474976710662 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 2025-03-31T16:16:26.008349Z node 1 :TX_DATASHARD ERROR: Shard 72075186224037888 cannot parse tx 281474976710663: Validate (783): Key validation status: 3 2025-03-31T16:16:26.008447Z node 1 :TX_PROXY ERROR: Actor# [1:7488008274789587377:2503] txid# 281474976710663 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-03-31T16:16:26.008510Z node 1 :TX_PROXY ERROR: Actor# [1:7488008274789587377:2503] txid# 281474976710663 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-03-31T16:16:26.008534Z node 1 :TX_PROXY ERROR: Actor# [1:7488008274789587377:2503] txid# 281474976710663 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 DataShardErrors: [SCHEME_ERROR] Validate (783): Key validation status: 3 proxy error code: ProxyShardNotAvailable 2025-03-31T16:16:26.011822Z node 1 :TX_DATASHARD ERROR: Shard 72075186224037888 cannot parse tx 281474976710664: Validate (783): Key validation status: 3 2025-03-31T16:16:26.011925Z node 1 :TX_PROXY ERROR: Actor# [1:7488008274789587384:2507] txid# 281474976710664 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-03-31T16:16:26.011993Z node 1 :TX_PROXY ERROR: Actor# [1:7488008274789587384:2507] txid# 281474976710664 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-03-31T16:16:26.012016Z node 1 :TX_PROXY ERROR: Actor# [1:7488008274789587384:2507] txid# 281474976710664 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 2025-03-31T16:16:26.014580Z node 1 :TX_DATASHARD ERROR: Shard 72075186224037888 cannot parse tx 281474976710665: Validate (783): Key validation status: 3 2025-03-31T16:16:26.014666Z node 1 :TX_PROXY ERROR: Actor# [1:7488008274789587390:2510] txid# 281474976710665 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-03-31T16:16:26.014789Z node 1 :TX_PROXY ERROR: Actor# [1:7488008274789587390:2510] txid# 281474976710665 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-03-31T16:16:26.014814Z node 1 :TX_PROXY ERROR: Actor# [1:7488008274789587390:2510] txid# 281474976710665 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 DataShardErrors: [SCHEME_ERROR] Validate (783): Key validation status: 3 proxy error code: ProxyShardNotAvailable 2025-03-31T16:16:28.080739Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008282731651544:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:28.080858Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cd0/r3tmp/tmp4mhrBZ/pdisk_1.dat 2025-03-31T16:16:28.171494Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:28.217973Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:28.218030Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:28.219303Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16276 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:28.345417Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:28.364558Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:28.418380Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:28.465988Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:31.211755Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488008297718270905:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:31.211863Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cd0/r3tmp/tmpgGHgvK/pdisk_1.dat 2025-03-31T16:16:31.330678Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:31.359127Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:31.359222Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:31.360859Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31198 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:31.529703Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:31.550353Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:31.605448Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:31.683642Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::LargeProxyReplyRW [GOOD] Test command err: 2025-03-31T16:16:24.695322Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008268002141189:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:24.695437Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cd3/r3tmp/tmprR0bve/pdisk_1.dat 2025-03-31T16:16:24.989805Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:25.037515Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:25.037669Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:25.039471Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27158 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:25.263656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:25.291942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:29.261201Z node 1 :TX_PROXY ERROR: Actor# [1:7488008285182013951:4138] txid# 281474976711010 MergeResult Result too large TDataReq marker# P18 2025-03-31T16:16:29.261253Z node 1 :TX_PROXY ERROR: Actor# [1:7488008285182013951:4138] txid# 281474976711010 RESPONSE Status# ExecResultUnavailable marker# P13c MiniKQLErrors: Query result size limit exceeded. (71692241 > 50331648) proxy error code: ExecResultUnavailable 2025-03-31T16:16:29.916844Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008290079409197:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:29.917011Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cd3/r3tmp/tmp2oZsld/pdisk_1.dat 2025-03-31T16:16:30.033855Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:30.062975Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:30.063115Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:30.065032Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32469 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:30.249878Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:30.268376Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:34.482927Z node 2 :TX_PROXY ERROR: Actor# [2:7488008307259281952:4140] txid# 281474976716011 MergeResult Result too large TDataReq marker# P18 2025-03-31T16:16:34.483009Z node 2 :TX_PROXY ERROR: Actor# [2:7488008307259281952:4140] txid# 281474976716011 RESPONSE Status# ExecResultUnavailable marker# P13c MiniKQLErrors: Query result size limit exceeded. (71692241 > 50331648) proxy error code: ExecResultUnavailable >> TFlatTest::CopyTableAndRead [GOOD] >> TFlatTest::CopyTableAndDropOriginal >> TLocksTest::CK_Range_BrokenLock [GOOD] >> TLocksTest::CK_Range_BrokenLockInf >> TFlatTest::ShardUnfreezeNonFrozen [GOOD] >> TFlatTest::ShardFreezeUnfreezeRejectScheme ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::WriteSplitAndReadFromFollower [GOOD] Test command err: 2025-03-31T16:16:28.481287Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008282779537657:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:28.481366Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cb6/r3tmp/tmpr14KSH/pdisk_1.dat 2025-03-31T16:16:28.778543Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:28.855346Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:28.855465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:28.857162Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4908 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:28.998380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:29.030004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743437789135 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Key2" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Va... (TRUNCATED) 2025-03-31T16:16:29.194466Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:16:29.195742Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:16:29.195767Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:16:29.284926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { OperationType: ESchemeOpSplitMergeTablePartitions SplitMergeTablePartitions { TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } } } TxId: 281474976710668 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-31T16:16:29.285064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TSplitMerge Propose, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T16:16:29.285308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-03-31T16:16:29.285337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-03-31T16:16:29.285600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-03-31T16:16:29.285680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710668:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:16:29.286398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710668, response: Status: StatusAccepted TxId: 281474976710668 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-03-31T16:16:29.286468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710668, subject: , status: StatusAccepted, operation: ALTER TABLE PARTITIONS, path: /dc-1/Dir/TableOld 2025-03-31T16:16:29.286589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T16:16:29.286665Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710668:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 2025-03-31T16:16:29.287042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710668:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } waiting... 2025-03-31T16:16:29.287181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710668:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-03-31T16:16:29.287457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710668:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:2 msg type: 268697601 2025-03-31T16:16:29.287547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710668:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2025-03-31T16:16:29.287590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710668, partId: 0, tablet: 72057594037968897 2025-03-31T16:16:29.287614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710668, shardIdx: 72057594046644480:2, partId: 0 2025-03-31T16:16:29.287623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710668, shardIdx: 72057594046644480:3, partId: 0 2025-03-31T16:16:29.287795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-31T16:16:29.287814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710668, ready parts: 0/1, is published: true 2025-03-31T16:16:29.287826Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-31T16:16:29.290095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046644480 message: Status: OK Owner: 72057594046644480 OwnerIdx: 2 TabletID: 72075186224037889 Origin: 72057594037968897 2025-03-31T16:16:29.290119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 281474976710668, shardIdx: 72057594046644480:2, partId: 0 2025-03-31T16:16:29.290197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710668:0, at schemeshard: 72057594046644480, message: Status: OK Owner: 72057594046644480 OwnerIdx: 2 TabletID: 72075186224037889 Origin: 72057594037968897 2025-03-31T16:16:29.290232Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710668:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-03-31T16:16:29.290299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710668:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046644480 OwnerIdx: 2 TabletID: 72075186224037889 Origin: 72057594037968897 2025-03-31T16:16:29.290465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046644480 message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2025-03-31T16:16:29.290486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 281474976710668, shardIdx: 72057594046644480:3, partId: 0 2025-03-31T16:16:29.290557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710668:0, at schemeshard: 72057594046644480, message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2025-03-31T16:16:29.290570Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710668:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-03-31T16:16:29.290625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710668:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2025-03-31T16:16:29.290665Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710668:0 2 -> 3 2025-03-31T16:16:29.290852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T16:16:29.291206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T16:16:29.291263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T16:16:29.291274Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TConfigureDestination ProgressState, operationId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T16:16:29.291325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Initializing scheme on dst datashard: 72075186224037889 splitOp: 281474976710668:0 alterVersion: 1 at tablet: 72057594046644480 2025-03-31T16:16:29.291471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Initializing scheme on dst datashard: 72075186224037890 ... node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-03-31T16:16:32.922755Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7488008302942939051 RawX2: 4503612512274684 } TabletId: 72075186224037890 State: 4 2025-03-31T16:16:32.922829Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-03-31T16:16:32.922941Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7488008302942939052 RawX2: 4503612512274685 } TabletId: 72075186224037892 State: 4 2025-03-31T16:16:32.922966Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2025-03-31T16:16:32.923515Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:16:32.928347Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:16:32.928350Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-03-31T16:16:32.928492Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-03-31T16:16:32.928536Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-03-31T16:16:32.928629Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-03-31T16:16:32.928926Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-03-31T16:16:32.930890Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037888 state Offline 2025-03-31T16:16:32.930950Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-03-31T16:16:32.930962Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-03-31T16:16:32.930986Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2025-03-31T16:16:32.929161Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-03-31T16:16:32.929327Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-03-31T16:16:32.929357Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-03-31T16:16:32.929449Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:16:32.929504Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:16:32.929544Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:16:32.929717Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-03-31T16:16:32.930126Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-03-31T16:16:32.930157Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-03-31T16:16:32.930927Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-03-31T16:16:32.931116Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-03-31T16:16:32.932210Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-03-31T16:16:32.932261Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2025-03-31T16:16:32.932296Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-03-31T16:16:32.932929Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-03-31T16:16:32.933662Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037888 reason = ReasonStop 2025-03-31T16:16:32.933704Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:7488008300874676321:2726], serverId# [3:7488008302942938918:2176], sessionId# [0:0:0] 2025-03-31T16:16:32.933931Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-03-31T16:16:32.933994Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-03-31T16:16:32.934166Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-03-31T16:16:32.936306Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-03-31T16:16:32.937246Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-03-31T16:16:32.935885Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-03-31T16:16:32.935917Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-03-31T16:16:32.935961Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-03-31T16:16:32.935969Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-03-31T16:16:32.936171Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-03-31T16:16:32.936404Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-03-31T16:16:32.936547Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-03-31T16:16:32.936699Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-03-31T16:16:32.936811Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-03-31T16:16:32.936929Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-03-31T16:16:32.937031Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-03-31T16:16:32.937194Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-03-31T16:16:32.937210Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-03-31T16:16:32.937258Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-03-31T16:16:32.937431Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-03-31T16:16:32.937449Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-03-31T16:16:32.938042Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2025-03-31T16:16:32.938061Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2025-03-31T16:16:32.938079Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2025-03-31T16:16:32.938093Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2025-03-31T16:16:32.938108Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037892 not found 2025-03-31T16:16:32.939043Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-03-31T16:16:32.939110Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [2:7488008300874676330:2732], serverId# [2:7488008300874676331:2733], sessionId# [0:0:0] 2025-03-31T16:16:32.939170Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-03-31T16:16:32.940141Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2025-03-31T16:16:32.940163Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-03-31T16:16:32.937336Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-03-31T16:16:32.938347Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-03-31T16:16:32.938415Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2025-03-31T16:16:32.939504Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037892 reason = ReasonStop 2025-03-31T16:16:32.939550Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037892, clientId# [2:7488008300874676858:3157], serverId# [3:7488008302942939253:2442], sessionId# [0:0:0] 2025-03-31T16:16:32.939596Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2025-03-31T16:16:32.940268Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2025-03-31T16:16:32.940311Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037892 2025-03-31T16:16:32.950721Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-03-31T16:16:32.950750Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-03-31T16:16:32.950785Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-03-31T16:16:32.950792Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-03-31T16:16:32.950811Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-03-31T16:16:32.950827Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-03-31T16:16:32.950842Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-03-31T16:16:32.950881Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-03-31T16:16:33.005526Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-03-31T16:16:33.006163Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2025-03-31T16:16:33.006910Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-03-31T16:16:33.006977Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 >> TLocksTest::Range_IncorrectNullDot1 [GOOD] >> TLocksTest::Range_IncorrectNullDot2 >> TFlatTest::Ls >> TLocksTest::NoLocksSet >> TFlatTest::ShardFreezeRejectBadProtobuf >> TLocksTest::BrokenLockUpdate >> TFlatTest::SplitInvalidPath [GOOD] >> TFlatTest::SplitThenMerge >> TFlatTest::SelectRangeForbidNullArgs1 [GOOD] >> TFlatTest::CopyTableAndDropOriginal [GOOD] >> TLocksTest::Range_GoodLock0 >> TFlatTest::ShardFreezeUnfreezeRejectScheme [GOOD] >> TFlatTest::Ls [GOOD] >> TFlatTest::LsPathId ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeForbidNullArgs1 [GOOD] Test command err: 2025-03-31T16:16:33.057422Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008306592999084:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:33.057513Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ca9/r3tmp/tmpbbmNMA/pdisk_1.dat 2025-03-31T16:16:33.332639Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:33.413758Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:33.413907Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:33.415747Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22660 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:33.625924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:33.652335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:36.165134Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008319109727343:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:36.165202Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ca9/r3tmp/tmp0wq3g4/pdisk_1.dat 2025-03-31T16:16:36.268390Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:36.304917Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:36.305028Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:36.306584Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26177 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-03-31T16:16:36.468955Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:36.485020Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::CopyTableAndDropOriginal [GOOD] Test command err: 2025-03-31T16:16:33.518024Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008306461404484:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:33.518131Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ca4/r3tmp/tmprvPT2d/pdisk_1.dat 2025-03-31T16:16:33.823846Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:33.884662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:33.884793Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:33.886298Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62645 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:34.069925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:34.103448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:34.279783Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-03-31T16:16:34.286430Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-03-31T16:16:34.313929Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-03-31T16:16:34.319169Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 Copy TableOld to Table 2025-03-31T16:16:34.427839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1/Dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" CopyFromTable: "/dc-1/Dir/TableOld" } } TxId: 281474976710676 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-31T16:16:34.428144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /dc-1/Dir/Table, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:16:34.428728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 2], parent name: Dir, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-03-31T16:16:34.428781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 0 2025-03-31T16:16:34.428795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-03-31T16:16:34.428854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-03-31T16:16:34.428874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-03-31T16:16:34.428995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 3 2025-03-31T16:16:34.429109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710676:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:16:34.429767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-03-31T16:16:34.429813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 4 2025-03-31T16:16:34.430439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710676, response: Status: StatusAccepted TxId: 281474976710676 SchemeshardId: 72057594046644480 PathId: 4, at schemeshard: 72057594046644480 2025-03-31T16:16:34.430584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710676, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /dc-1/Dir/Table 2025-03-31T16:16:34.430784Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:34.430811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-31T16:16:34.430977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 4] 2025-03-31T16:16:34.431046Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-31T16:16:34.431061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7488008306461405009:2249], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 2 2025-03-31T16:16:34.431105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7488008306461405009:2249], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 4 2025-03-31T16:16:34.431142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:16:34.431181Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710676:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046644480 2025-03-31T16:16:34.431469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-03-31T16:16:34.431556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-03-31T16:16:34.433627Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-03-31T16:16:34.433759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-03-31T16:16:34.433772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710676 2025-03-31T16:16:34.433794Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710676, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 6 2025-03-31T16:16:34.433836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-03-31T16:16:34.434062Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-03-31T16:16:34.434179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-03-31T16:16:34.434215Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710676 2025-03-31T16:16:34.434237Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710676, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 1 2025-03-31T16:16:34.434252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 5 2025-03-31T16:16:34.434308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710676, ready parts: 0/1, is published: true 2025-03-31T16:16:34.434469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710676:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2025-03-31T16:16:34.434588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710676:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:4 msg type: 268697601 2025-03-31T16:16:34.434672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710676, partId: 0, tablet: 72057594037968897 2025-03-31T16:16:34.434720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710676, shardIdx: 72057594046644480:3, partId: 0 2025-03-31T16:16:34.434738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710676, shardIdx: 72057594046644480:4, partId: 0 2025-03-31T16:16:34.434858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710676, at schemeshard: 72057594046644480 2025-03-31T16:16:34.434875Z no ... T16:16:37.347416Z node 2 :TX_DATASHARD INFO: 72075186224037888 Reporting state Offline to schemeshard 72057594046644480 2025-03-31T16:16:37.347565Z node 2 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-03-31T16:16:37.347627Z node 2 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-03-31T16:16:37.347709Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-03-31T16:16:37.347779Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 parts [ [72075186224037889:1:16:1:12288:306:0] [72075186224037889:1:23:1:12288:253:0] ] return ack processed Check that tablet 72075186224037888 was deleted 2025-03-31T16:16:37.347815Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-03-31T16:16:37.347888Z node 2 :TX_DATASHARD INFO: 72075186224037891 Initiating switch from PreOffline to Offline state 2025-03-31T16:16:37.348853Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:7488008323527887795:2374], serverId# [2:7488008323527887801:2675], sessionId# [0:0:0] 2025-03-31T16:16:37.348961Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [2:7488008323527887797:2376], serverId# [2:7488008323527887802:2676], sessionId# [0:0:0] 2025-03-31T16:16:37.348989Z node 2 :TX_DATASHARD INFO: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2025-03-31T16:16:37.349014Z node 2 :TX_DATASHARD INFO: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2025-03-31T16:16:37.349168Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7488008323527887264 RawX2: 4503608217307386 } TabletId: 72075186224037888 State: 4 2025-03-31T16:16:37.349224Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2025-03-31T16:16:37.349441Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7488008323527887578 RawX2: 4503608217307445 } TabletId: 72075186224037890 State: 4 2025-03-31T16:16:37.349470Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-03-31T16:16:37.349575Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7488008323527887288 RawX2: 4503608217307387 } TabletId: 72075186224037889 State: 4 2025-03-31T16:16:37.349600Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-03-31T16:16:37.349731Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7488008323527887580 RawX2: 4503608217307446 } TabletId: 72075186224037891 State: 4 2025-03-31T16:16:37.349755Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-03-31T16:16:37.349847Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7488008323527887580 RawX2: 4503608217307446 } TabletId: 72075186224037891 State: 4 2025-03-31T16:16:37.349868Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-03-31T16:16:37.349989Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037888 state Offline 2025-03-31T16:16:37.350036Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:16:37.350116Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:16:37.350118Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-03-31T16:16:37.350159Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-03-31T16:16:37.350165Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:16:37.350196Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-03-31T16:16:37.350201Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:16:37.350229Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-03-31T16:16:37.350234Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:16:37.351662Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-03-31T16:16:37.351738Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037888 reason = ReasonStop 2025-03-31T16:16:37.351779Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-03-31T16:16:37.351807Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-03-31T16:16:37.351820Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-03-31T16:16:37.351915Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-03-31T16:16:37.352124Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-03-31T16:16:37.352191Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-03-31T16:16:37.352276Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-03-31T16:16:37.352363Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2025-03-31T16:16:37.352410Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-03-31T16:16:37.352537Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-03-31T16:16:37.352691Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-03-31T16:16:37.352803Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-03-31T16:16:37.352894Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-03-31T16:16:37.353018Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046644480 2025-03-31T16:16:37.353044Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-03-31T16:16:37.353087Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-03-31T16:16:37.353111Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-03-31T16:16:37.353126Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-03-31T16:16:37.353510Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-03-31T16:16:37.353542Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-03-31T16:16:37.353555Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-03-31T16:16:37.353569Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-03-31T16:16:37.353820Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-03-31T16:16:37.353882Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2025-03-31T16:16:37.353884Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-03-31T16:16:37.353906Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-03-31T16:16:37.353939Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-03-31T16:16:37.353946Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-03-31T16:16:37.353966Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-03-31T16:16:37.353973Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-03-31T16:16:37.353989Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-03-31T16:16:37.354003Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-03-31T16:16:37.354018Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-03-31T16:16:37.354155Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-03-31T16:16:37.354991Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-03-31T16:16:37.355030Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-03-31T16:16:37.356197Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-03-31T16:16:37.356244Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 Check that tablet 72075186224037889 was deleted 2025-03-31T16:16:37.648701Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) 2025-03-31T16:16:37.648958Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) Check that tablet 72075186224037890 was deleted 2025-03-31T16:16:37.649278Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) Check that tablet 72075186224037891 was deleted 2025-03-31T16:16:37.649798Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) >> TLocksFatTest::PointSetRemove [GOOD] >> TFlatTest::ShardFreezeRejectBadProtobuf [GOOD] >> TFlatTest::SelectRangeSkipNullKeys >> Secret::Simple [GOOD] >> TLocksFatTest::RangeSetBreak [GOOD] >> TLocksFatTest::RangeSetNotBreak >> TFlatTest::SplitThenMerge [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::ShardFreezeUnfreezeRejectScheme [GOOD] Test command err: 2025-03-31T16:16:34.455778Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008310739516290:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:34.455875Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001c9c/r3tmp/tmp4rH63i/pdisk_1.dat 2025-03-31T16:16:34.762204Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:34.833144Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:34.833332Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:34.834956Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63510 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:35.017921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:35.041657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:35.156707Z node 1 :TX_PROXY ERROR: Actor# [1:7488008315034484275:2362] txid# 281474976715659, issues: { message: "Requested freeze state already set" severity: 1 } Error 1: Requested freeze state already set 2025-03-31T16:16:37.310385Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008324473631321:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:37.310440Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001c9c/r3tmp/tmptoXRL8/pdisk_1.dat 2025-03-31T16:16:37.405654Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:37.448890Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:37.448969Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:37.450676Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11199 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:37.580109Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:37.588357Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:37.636294Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:37.649058Z node 2 :TX_PROXY ERROR: Actor# [2:7488008324473632042:2391] txid# 281474976715660, issues: { message: "Table is frozen. Only unfreeze alter is allowed" severity: 1 } Error 128: Table is frozen. Only unfreeze alter is allowed 2025-03-31T16:16:37.651563Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:37.663222Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 waiting... >> GrpcConnectionStringParserTest::NoDatabaseFlag ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Simple [GOOD] Test command err: 2025-03-31T16:14:21.816904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:14:21.817210Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:14:21.817418Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000e3a/r3tmp/tmpIpwuBw/pdisk_1.dat TServer::EnableGrpc on GrpcPort 22524, node 1 TClient is connected to server localhost:17035 2025-03-31T16:14:22.867451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:14:22.918308Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:22.918560Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:22.918597Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:22.918632Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:22.919072Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:14:22.956510Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:22.957686Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:22.971233Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 2025-03-31T16:14:35.180840Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:811:2677], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:35.181048Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:35.184804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480 2025-03-31T16:14:35.381614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:927:2755], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:35.381752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:35.382085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:932:2760], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:35.391571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-03-31T16:14:35.485123Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:934:2762], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:14:35.825466Z node 1 :TX_PROXY ERROR: Actor# [1:1030:2829] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:14:36.460718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:14:36.950043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480 2025-03-31T16:14:37.647771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-03-31T16:14:38.366967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:14:38.916118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-03-31T16:14:40.178553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: 2025-03-31T16:14:40.630114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:100;ACCESS: REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:100;ACCESS: 2025-03-31T16:14:57.491173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:14:57.491236Z node 1 :IMPORT WARN: Table profiles were not loaded FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 2025-03-31T16:15:21.065259Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715723. Ctx: { TraceId: 01jqpfrv0tbxtc87h5wfw9v690, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YThjZWY5ZTUtOTcwMWZhYmYtOTZmYzM0OGQtODNlZGYzMjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-03-31T16:15:44.509029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715745:0, at schemeshard: 72057594046644480 2025-03-31T16:15:45.735955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715754:0, at schemeshard: 72057594046644480 2025-03-31T16:15:47.758210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715767:0, at schemeshard: 72057594046644480 2025-03-31T16:15:48.234510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715770:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 snapshot->GetAccess().size() incorrect (zero expects): SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS:root@builtin:secret1:test@test1; FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-03-31T16:16:00.751966Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715782. Ctx: { TraceId: 01jqpft1yj2e0a4htwt128m9b8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWQ0ZmNlMTgtZWUxZjc1YjgtNmRhN2QzMjQtYjYzMDZiMTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS: REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1_1:200;ACCESS: REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 2025-03-31T16:16:39.043418Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715826. Ctx: { TraceId: 01jqpfv7fy6jx3kbqk19q308bj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTZkNzFhNzEtNWMzMTkwNWMtOTg5NTNhMDYtMmI4ZDYyMTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts_EmptyAllowedSids >> YdbYqlClient::SecurityTokenAuth >> TLocksTest::Range_IncorrectDot1 [GOOD] >> TLocksTest::Range_IncorrectDot2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksFatTest::PointSetRemove [GOOD] Test command err: 2025-03-31T16:16:27.880249Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008281254356939:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:27.880381Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cbe/r3tmp/tmpPJQjG0/pdisk_1.dat 2025-03-31T16:16:28.182584Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:28.233752Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:28.233885Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:28.235555Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12547 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:28.472769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:28.498508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:28.616160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:28.658800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:32.880160Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008281254356939:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:32.880241Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:16:33.870660Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008304374945088:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:33.870756Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cbe/r3tmp/tmptwz8LK/pdisk_1.dat 2025-03-31T16:16:33.965280Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:34.007818Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:34.007904Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:34.009125Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31623 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:34.174770Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:34.194552Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:34.254362Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:34.299604Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:37.568368Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488008321248268181:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:37.568427Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cbe/r3tmp/tmpHBBo9L/pdisk_1.dat 2025-03-31T16:16:37.677684Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:37.708355Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:37.708453Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:37.709982Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28576 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:37.904686Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:37.929813Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:37.979425Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:38.048976Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SplitThenMerge [GOOD] Test command err: 2025-03-31T16:16:35.996425Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008315097911599:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:35.996490Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001c95/r3tmp/tmpHqGE5U/pdisk_1.dat 2025-03-31T16:16:36.282542Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:36.355216Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:36.355366Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:36.357607Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28927 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:36.506610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:36.518627Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:36.542850Z node 1 :TX_PROXY ERROR: Actor# [1:7488008319392879479:2298] txid# 281474976710659, issues: { message: "Check failed: path: \'/dc-1/Dir1\', error: path is not a table (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } Error 128: Check failed: path: '/dc-1/Dir1', error: path is not a table (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges) 2025-03-31T16:16:38.903442Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008329262626109:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:38.903534Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001c95/r3tmp/tmpBkjWNo/pdisk_1.dat 2025-03-31T16:16:39.014841Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:39.053561Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:39.053646Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:39.055203Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29775 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:39.233979Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:39.254847Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:39.399740Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-03-31T16:16:39.407485Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-03-31T16:16:39.433043Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-03-31T16:16:39.448942Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.008s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743437799355 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) 2025-03-31T16:16:39.465759Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:16:39.467437Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-31T16:16:39.467629Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-31T16:16:39.468517Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-31T16:16:39.468870Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:16:39.469311Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-03-31T16:16:39.469880Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-31T16:16:39.469966Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-31T16:16:39.470354Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-03-31T16:16:39.470923Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-31T16:16:39.471540Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:16:39.471899Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-03-31T16:16:39.472435Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-31T16:16:39.472532Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-31T16:16:39.472835Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-03-31T16:16:39.473320Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-31T16:16:39.473732Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:16:39.474148Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-03-31T16:16:39.474906Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-31T16:16:39.475005Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-31T16:16:39.475455Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-03-31T16:16:39.476133Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-31T16:16:39.476238Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:16:39.476650Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-03-31T16:16:39.477566Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-31T16:16:39.477677Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-31T16:16:39.478139Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-03-31T16:16:39.479222Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-31T16:16:39.479321Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:16:39.479747Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-03-31T16:16:39.480408Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-31T16:16:39.480969Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-31T16:16:39.481406Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-03-31T16:16:39.482366Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-31T16:16:39.482473Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:16:39.483044Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-03-31T16:16:39.483836Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:16:39.483862Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:16:39.484062Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-31T16:16:39.484485Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-03-31T16:16:39.484875Z node 2 :TX_DATASHARD DEBUG: ... 715693 Step: 0 Generation: 1 2025-03-31T16:16:39.963984Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715693:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-03-31T16:16:39.964061Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715693:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 7488008333557594027 RawX2: 4503608217307386 } Origin: 72075186224037889 State: 5 TxId: 281474976715693 Step: 0 Generation: 1 2025-03-31T16:16:39.964105Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715693:0, shardIdx: 72057594046644480:2, datashard: 72075186224037889, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-03-31T16:16:39.964125Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715693:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2025-03-31T16:16:39.964282Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 7488008333557594661 RawX2: 4503608217307478 } Origin: 72075186224037894 State: 5 TxId: 281474976715693 Step: 0 Generation: 1 2025-03-31T16:16:39.964300Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715693, tablet: 72075186224037894, partId: 0 2025-03-31T16:16:39.964374Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715693:0, at schemeshard: 72057594046644480, message: Source { RawX1: 7488008333557594661 RawX2: 4503608217307478 } Origin: 72075186224037894 State: 5 TxId: 281474976715693 Step: 0 Generation: 1 2025-03-31T16:16:39.964391Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715693:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-03-31T16:16:39.964432Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715693:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 7488008333557594661 RawX2: 4503608217307478 } Origin: 72075186224037894 State: 5 TxId: 281474976715693 Step: 0 Generation: 1 2025-03-31T16:16:39.964455Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715693:0, shardIdx: 72057594046644480:7, datashard: 72075186224037894, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-03-31T16:16:39.964476Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2025-03-31T16:16:39.964495Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715693:0, datashard: 72075186224037889, at schemeshard: 72057594046644480 2025-03-31T16:16:39.964509Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715693:0, datashard: 72075186224037894, at schemeshard: 72057594046644480 2025-03-31T16:16:39.964522Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715693:0 129 -> 240 2025-03-31T16:16:39.964711Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2025-03-31T16:16:39.964780Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2025-03-31T16:16:39.964824Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2025-03-31T16:16:39.964890Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2025-03-31T16:16:39.964902Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715693 datashard 72075186224037889 state PreOffline 2025-03-31T16:16:39.964920Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 281474976715693:0 ProgressState, at schemeshard: 72057594046644480 2025-03-31T16:16:39.964939Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-03-31T16:16:39.965035Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715693 datashard 72075186224037894 state PreOffline 2025-03-31T16:16:39.965065Z node 2 :TX_DATASHARD DEBUG: 72075186224037894 Got TEvSchemaChangedResult from SS at 72075186224037894 2025-03-31T16:16:39.965216Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-03-31T16:16:39.965331Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715693:0 progress is 1/1 2025-03-31T16:16:39.965344Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715693 ready parts: 1/1 2025-03-31T16:16:39.965388Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715693:0 progress is 1/1 2025-03-31T16:16:39.965408Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715693 ready parts: 1/1 2025-03-31T16:16:39.965424Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715693, ready parts: 1/1, is published: true 2025-03-31T16:16:39.965477Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7488008333557594896:2428] message: TxId: 281474976715693 2025-03-31T16:16:39.965499Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715693 ready parts: 1/1 2025-03-31T16:16:39.965516Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715693:0 2025-03-31T16:16:39.965530Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715693:0 2025-03-31T16:16:39.965618Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-03-31T16:16:39.966163Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-03-31T16:16:39.966222Z node 2 :TX_DATASHARD INFO: 72075186224037889 Initiating switch from PreOffline to Offline state TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-03-31T16:16:39.967416Z node 2 :TX_DATASHARD DEBUG: 72075186224037894 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-03-31T16:16:39.967470Z node 2 :TX_DATASHARD INFO: 72075186224037894 Initiating switch from PreOffline to Offline state 2025-03-31T16:16:39.968621Z node 2 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-03-31T16:16:39.968970Z node 2 :TX_DATASHARD INFO: 72075186224037894 Reporting state Offline to schemeshard 72057594046644480 2025-03-31T16:16:39.969012Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7488008333557594027 RawX2: 4503608217307386 } TabletId: 72075186224037889 State: 4 2025-03-31T16:16:39.969070Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-03-31T16:16:39.969273Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7488008333557594661 RawX2: 4503608217307478 } TabletId: 72075186224037894 State: 4 2025-03-31T16:16:39.969298Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037894, state: Offline, at schemeshard: 72057594046644480 2025-03-31T16:16:39.969451Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-03-31T16:16:39.969459Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:16:39.969512Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037894 state Offline 2025-03-31T16:16:39.969546Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:7 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:16:39.970736Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-03-31T16:16:39.970794Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-03-31T16:16:39.970826Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037894 reason = ReasonStop 2025-03-31T16:16:39.971009Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-03-31T16:16:39.971172Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 7 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2025-03-31T16:16:39.971280Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037894 2025-03-31T16:16:39.971296Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-03-31T16:16:39.971360Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037894 2025-03-31T16:16:39.971518Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-03-31T16:16:39.971543Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-03-31T16:16:39.971588Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-03-31T16:16:39.971706Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037894 not found 2025-03-31T16:16:39.971732Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-03-31T16:16:39.972017Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-03-31T16:16:39.972040Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-03-31T16:16:39.972071Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:7 2025-03-31T16:16:39.972084Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2025-03-31T16:16:39.972370Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-03-31T16:16:39.972541Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-03-31T16:16:39.972612Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 >> TLocksTest::Range_CorrectNullDot [GOOD] >> TLocksTest::Range_EmptyKey >> TGRpcYdbTest::MakeListRemoveDirectory >> TFlatTest::LsPathId [GOOD] >> YdbYqlClient::CreateTableWithPartitionAtKeys >> TFlatTest::SelectRangeSkipNullKeys [GOOD] >> TGRpcAuthentication::ValidCredentials >> YdbIndexTable::AlterIndexImplBySuperUser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::LsPathId [GOOD] Test command err: 2025-03-31T16:16:37.885808Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008324982159480:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:37.885892Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001c92/r3tmp/tmp0In8Nm/pdisk_1.dat 2025-03-31T16:16:38.202672Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:38.249136Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:38.249287Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:38.251073Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18259 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:38.480015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: / TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "/" PathId: 1 SchemeshardId: 0 PathType: EPathTypeDir CreateFinished: true } Children { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true } } Path: "/" TClient::Ls request: TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 7 ErrorReason: "Invalid path" TClient::Ls request: // TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 7 ErrorReason: "Invalid path" TClient::Ls request: / TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "/" PathId: 1 SchemeshardId: 0 PathType: EPathTypeDir CreateFinished: true } Children { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true } } Path: "/" TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743437798550 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePo... (TRUNCATED) TClient::Ls request: /dc-11 TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Root not found" TClient::Ls request: /dc-2 TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Root not found" waiting... TClient::Ls request: / TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "/" PathId: 1 SchemeshardId: 0 PathType: EPathTypeDir CreateFinished: true } Children { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true } } Path: "/" TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743437798550 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Berkanavt" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743437798564 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depr... (TRUNCATED) TClient::Ls request: /dc-1/Berkanavt TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Berkanavt" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743437798564 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 1 PathsLimit: 10000 Shard... (TRUNCATED) 2025-03-31T16:16:38.527249Z node 1 :TX_PROXY ERROR: Actor# [1:7488008329277127397:2324] txid# 281474976710659, issues: { message: "Check failed: path: \'/dc-1/Berkanavt\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } Error 1: Check failed: path: '/dc-1/Berkanavt', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges) TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743437798550 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Berkanavt" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743437798564 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depr... (TRUNCATED) TClient::Ls request: /dc-1/arcadia TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" waiting... TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743437798550 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Berkanavt" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743437798564 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "arcadia" Path... (TRUNCATED) TClient::Ls request: /dc-1/arcadia TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "arcadia" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1743437798585 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsI... (TRUNCATED) 2025-03-31T16:16:40.914733Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008337290096924:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:40.914854Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001c92/r3tmp/tmpuGseoz/pdisk_1.dat 2025-03-31T16:16:41.013252Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:41.055058Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:41.055172Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:41.056797Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13359 TClient::Ls request: / TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "/" PathId: 1 SchemeshardId: 0 PathType: EPathTypeDir CreateFinished: true } Children { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true } } Path: "/" WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:41.198991Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeSkipNullKeys [GOOD] Test command err: 2025-03-31T16:16:38.660980Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008328580255831:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:38.661033Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001c8c/r3tmp/tmpnIU6BS/pdisk_1.dat 2025-03-31T16:16:38.949098Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:38.949252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:38.951587Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:16:38.989028Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:2158 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:39.212491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:39.228954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:39.358066Z node 1 :TX_PROXY ERROR: Actor# [1:7488008332875223809:2361] txid# 281474976710659, issues: { message: "Mix freeze cmd with other options is forbidden" severity: 1 } Error 128: Mix freeze cmd with other options is forbidden 2025-03-31T16:16:39.360689Z node 1 :TX_PROXY ERROR: Actor# [1:7488008332875223822:2367] txid# 281474976710660, issues: { message: "Unexpected freeze state" severity: 1 } Error 128: Unexpected freeze state 2025-03-31T16:16:39.363042Z node 1 :TX_PROXY ERROR: Actor# [1:7488008332875223828:2372] txid# 281474976710661, issues: { message: "Mix freeze cmd with other options is forbidden" severity: 1 } Error 128: Mix freeze cmd with other options is forbidden 2025-03-31T16:16:39.365473Z node 1 :TX_PROXY ERROR: Actor# [1:7488008332875223834:2377] txid# 281474976710662, issues: { message: "Mix freeze cmd with other options is forbidden" severity: 1 } Error 128: Mix freeze cmd with other options is forbidden 2025-03-31T16:16:41.745427Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008339592616033:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:41.745482Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001c8c/r3tmp/tmpWsGNmX/pdisk_1.dat 2025-03-31T16:16:41.859045Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:41.892915Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:41.893029Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:41.894741Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9589 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:42.091664Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:42.113543Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> GrpcConnectionStringParserTest::NoDatabaseFlag [GOOD] >> GrpcConnectionStringParserTest::IncorrectConnectionString [GOOD] >> GrpcConnectionStringParserTest::CommonClientSettingsFromConnectionString >> TGRpcLdapAuthentication::LdapAuthWithInvalidRobouserLogin >> YdbYqlClient::ConnectDbAclIsOffWhenYdbRequestsWithoutDatabase >> TGRpcYdbTest::MakeListRemoveDirectory [GOOD] >> TGRpcYdbTest::ReadTable >> TLocksFatTest::RangeSetNotBreak [GOOD] >> TLocksTest::GoodSameKeyLock [GOOD] >> TLocksTest::GoodSameShardLock >> YdbYqlClient::SecurityTokenAuth [GOOD] >> YdbYqlClient::RetryOperationTemplate >> TLocksTest::Range_Pinhole [GOOD] >> TLocksTest::SetBreakSetEraseBreak >> TLocksTest::GoodNullLock [GOOD] >> YdbYqlClient::CreateTableWithPartitionAtKeys [GOOD] >> YdbYqlClient::CreateTableWithPartitionAtKeysAndAutoPartitioning >> YdbIndexTable::AlterIndexImplBySuperUser [GOOD] >> YdbIndexTable::CreateTableAddIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksFatTest::RangeSetNotBreak [GOOD] Test command err: 2025-03-31T16:16:35.489880Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008312876069233:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:35.490048Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001c98/r3tmp/tmpdYg4zU/pdisk_1.dat 2025-03-31T16:16:35.802026Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:35.852657Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:35.852827Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:35.854613Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7546 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:36.056115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:36.086119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:36.252193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:16:36.323935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:40.490379Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008312876069233:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:40.490530Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:16:41.740842Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008338169017878:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:41.740888Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001c98/r3tmp/tmpZnRN7T/pdisk_1.dat 2025-03-31T16:16:41.857703Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:41.881960Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:41.882035Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:41.883858Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19656 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:42.008272Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:42.027276Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:42.077577Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:42.137155Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:46.741857Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008338169017878:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:46.741908Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; >> TLocksTest::BrokenSameKeyLock [GOOD] >> TLocksTest::BrokenSameShardLock >> TGRpcAuthentication::ValidCredentials [GOOD] >> TGRpcAuthentication::NoDescribeRights ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::GoodNullLock [GOOD] Test command err: 2025-03-31T16:16:10.385331Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008207587622342:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:10.385394Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cf7/r3tmp/tmp9lCwiR/pdisk_1.dat 2025-03-31T16:16:10.673576Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:10.769003Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:10.769149Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:10.771861Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27297 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:10.948867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:10.976913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:11.089093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:11.131740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:13.298148Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008221458026609:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:13.298235Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cf7/r3tmp/tmpofhBGI/pdisk_1.dat 2025-03-31T16:16:13.403434Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:13.436126Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:13.436238Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:13.437877Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8710 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:13.592227Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:13.616870Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:13.690213Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:13.734330Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:16.479585Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488008233892696819:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:16.479651Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cf7/r3tmp/tmpKsS4Ru/pdisk_1.dat 2025-03-31T16:16:16.612983Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:16.626534Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:16.626653Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:16.628151Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19771 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:16.795934Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:16.814224Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:16.887303Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:16.932941Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:19.930168Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008244697852229:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:19.930252Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cf7/r3tmp/tmpXT6ksq/pdisk_1.dat 2025-03-31T16:16:20.031494Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:20.069111Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:20.069176Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:20.070864Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29724 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:20.256209Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:20.271695Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waitin ... 001cf7/r3tmp/tmpg9B72N/pdisk_1.dat 2025-03-31T16:16:31.760727Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:31.779857Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:31.779957Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:31.782487Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27507 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:32.033783Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:32.053436Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:32.111820Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:32.168577Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:16:36.074200Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7488008320293292826:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:36.074250Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cf7/r3tmp/tmpZGBsAA/pdisk_1.dat 2025-03-31T16:16:36.210441Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:36.238074Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:36.238174Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:36.239681Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3253 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:36.480308Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:36.500936Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:36.577212Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:36.635148Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:40.176417Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7488008334586401064:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:40.176540Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cf7/r3tmp/tmpyUeQ4y/pdisk_1.dat 2025-03-31T16:16:40.297780Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:40.333837Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:40.333955Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:40.335453Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23046 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:40.578092Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:40.600848Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:40.690985Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:40.765813Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:44.646053Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008353530358905:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:44.646116Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cf7/r3tmp/tmpTiPX5i/pdisk_1.dat 2025-03-31T16:16:44.781129Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:44.810108Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:44.810219Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:44.812960Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64027 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-03-31T16:16:45.089694Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:45.124492Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:45.191673Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:45.261257Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> TLocksTest::GoodDupLock [GOOD] >> TLocksTest::CK_Range_GoodLock >> GrpcConnectionStringParserTest::CommonClientSettingsFromConnectionString [GOOD] >> LocalityOperation::LocksFromAnotherTenants+UseSink >> YdbYqlClient::TestReadTableOneBatch >> TGRpcLdapAuthentication::LdapAuthWithInvalidRobouserLogin [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithInvalidRobouserPassword >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts_EmptyAllowedSids [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithIssuerVerification_ClientWithSameIssuer >> TGRpcClientLowTest::SimpleRequest >> YdbYqlClient::ConnectDbAclIsOffWhenYdbRequestsWithoutDatabase [GOOD] >> YdbYqlClient::ConnectDbAclIsOffWhenTokenIsOptionalAndNull >> TGRpcYdbTest::ReadTable [GOOD] >> TGRpcYdbTest::OperationTimeout >> YdbYqlClient::RetryOperationTemplate [GOOD] >> YdbYqlClient::RetryOperationSync >> TLocksTest::BrokenDupLock [GOOD] >> YdbYqlClient::CreateTableWithPartitionAtKeysAndAutoPartitioning [GOOD] >> YdbYqlClient::CreateAndAltertTableWithPartitioningBySize >> TFlatTest::AutoMergeBySize [GOOD] >> TFlatTest::AutoSplitMergeQueue >> YdbIndexTable::CreateTableAddIndex [GOOD] >> YdbIndexTable::AlterTableAddIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::BrokenDupLock [GOOD] Test command err: 2025-03-31T16:16:15.962799Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008227735786148:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:15.962927Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ced/r3tmp/tmpmccXhA/pdisk_1.dat 2025-03-31T16:16:16.267816Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:16.328934Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:16.329139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:16.330839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15492 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:16.502407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:16.527849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:16.685971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:16.756201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:18.912438Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008239827929270:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:18.912507Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ced/r3tmp/tmpoLKAgf/pdisk_1.dat 2025-03-31T16:16:19.015378Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:19.052118Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:19.052197Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:19.053968Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11099 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:19.203911Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:19.221756Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:19.292539Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:19.361540Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:21.757425Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488008254499691625:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:21.757489Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ced/r3tmp/tmpc3TBN5/pdisk_1.dat 2025-03-31T16:16:21.869201Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:21.899834Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:21.899913Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:21.901782Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10961 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:22.066834Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:22.086059Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:22.140672Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:22.212100Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:24.872153Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008266587611528:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:24.872204Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ced/r3tmp/tmpTsylRY/pdisk_1.dat 2025-03-31T16:16:24.960830Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:25.005012Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:25.005092Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:25.006223Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12717 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:25.195834Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:25.214813Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiti ... 01ced/r3tmp/tmpqFK9K0/pdisk_1.dat 2025-03-31T16:16:35.352135Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:35.383503Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:35.383595Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:35.384975Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:65531 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:35.603131Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:35.623525Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:35.683664Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:35.737890Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:39.629918Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7488008330277330064:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:39.629997Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ced/r3tmp/tmpFmDUpv/pdisk_1.dat 2025-03-31T16:16:39.783358Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:39.816818Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:39.816904Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:39.818516Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10777 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:40.067069Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:40.090999Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:40.208163Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:40.261740Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:43.921801Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7488008350496837250:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:43.928315Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ced/r3tmp/tmpiPAMuZ/pdisk_1.dat 2025-03-31T16:16:44.092510Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:44.112153Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:44.112235Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:44.113555Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15726 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:44.412206Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:44.433213Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:44.511608Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:44.571053Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:48.885076Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008368801493027:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:48.885136Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ced/r3tmp/tmp1lVwGN/pdisk_1.dat 2025-03-31T16:16:49.016595Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:49.046804Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:49.046892Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:49.048607Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17907 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:49.331912Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:49.351953Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:49.427924Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:49.502743Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> TLocksTest::NoLocksSet [GOOD] >> TLocksTest::MultipleLocks >> TGRpcAuthentication::NoDescribeRights [GOOD] >> TGRpcClientLowTest::BiStreamPing >> TLocksTest::Range_BrokenLock1 [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithInvalidRobouserPassword [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithInvalidSearchFilter >> TLocksTest::Range_GoodLock0 [GOOD] >> TLocksTest::Range_GoodLock1 >> TGRpcClientLowTest::SimpleRequest [GOOD] >> TGRpcClientLowTest::SimpleRequestDummyService >> TLocksTest::BrokenLockUpdate [GOOD] >> TLocksTest::BrokenNullLock >> TLocksTest::SetBreakSetEraseBreak [GOOD] >> TLocksTest::Range_CorrectDot [GOOD] >> Secret::SimpleQueryService [GOOD] >> YdbYqlClient::TestReadTableOneBatch [GOOD] >> YdbYqlClient::TestReadTableNotNullBorder >> TLocksTest::CK_BrokenLock [GOOD] >> YdbMonitoring::SelfCheckWithNodesDying >> YdbYqlClient::ConnectDbAclIsOffWhenTokenIsOptionalAndNull [GOOD] >> YdbYqlClient::ColumnFamiliesWithStorageAndIndex >> TGRpcYdbTest::OperationTimeout [GOOD] >> TGRpcYdbTest::OperationCancelAfter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_BrokenLock1 [GOOD] Test command err: 2025-03-31T16:16:18.267580Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008240836769518:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:18.267694Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ce5/r3tmp/tmpbKoWUl/pdisk_1.dat 2025-03-31T16:16:18.565350Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:18.565552Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:18.567642Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:16:18.586192Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:29450 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:18.863022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:18.886667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:19.008179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:19.077811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:21.553873Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008255259861507:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:21.553950Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ce5/r3tmp/tmpjCkDV6/pdisk_1.dat 2025-03-31T16:16:21.651727Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:21.692298Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:21.692382Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:21.694009Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24662 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:21.872519Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:21.889452Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:21.960945Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:22.007826Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:24.382893Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488008265922025745:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:24.382973Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ce5/r3tmp/tmpTHjtXt/pdisk_1.dat 2025-03-31T16:16:24.496609Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:24.525840Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:24.525920Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:24.527644Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23873 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:24.677617Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:24.696324Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:24.768674Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:24.839269Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:27.775665Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008278259131059:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:27.775768Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ce5/r3tmp/tmpftAKcc/pdisk_1.dat 2025-03-31T16:16:27.877126Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:27.916369Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:27.916463Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:27.918038Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29565 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:28.116696Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:28.132926Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiti ... onnecting -> Connected TClient is connected to server localhost:4990 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:39.513610Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:39.520202Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:39.539835Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:39.607189Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:39.658318Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:43.425656Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7488008347588578896:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:43.425735Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ce5/r3tmp/tmpNkJ2CA/pdisk_1.dat 2025-03-31T16:16:43.556999Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:43.566651Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:43.566759Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:43.568458Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5194 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:43.857829Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:43.864973Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:43.877959Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:43.958599Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:44.019009Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:47.306929Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7488008365285104976:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:47.307025Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ce5/r3tmp/tmpHBwMcY/pdisk_1.dat 2025-03-31T16:16:47.421637Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:47.451910Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:47.452007Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:47.453344Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2819 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:47.712709Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:47.733828Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:47.857203Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:47.940956Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:51.482829Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008381095044445:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:51.482935Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ce5/r3tmp/tmpoVvKEv/pdisk_1.dat 2025-03-31T16:16:51.629183Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:51.661190Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:51.661323Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:51.663174Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15075 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:51.892597Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:51.909627Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-03-31T16:16:51.914451Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:52.002688Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:52.077088Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::SimpleQueryService [GOOD] Test command err: 2025-03-31T16:14:38.746967Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:14:38.747633Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:14:38.747878Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000e11/r3tmp/tmpz3dgp0/pdisk_1.dat TServer::EnableGrpc on GrpcPort 15306, node 1 TClient is connected to server localhost:28234 2025-03-31T16:14:39.437602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:14:39.480916Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:39.481107Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:39.481151Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:39.481190Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:39.481652Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:14:39.520087Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:39.520270Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:39.532058Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 2025-03-31T16:14:51.427717Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:813:2681], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:51.427883Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:823:2686], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:51.427986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:51.433139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-03-31T16:14:51.453204Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:827:2689], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-03-31T16:14:51.501361Z node 1 :TX_PROXY ERROR: Actor# [1:878:2721] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:14:51.754436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480 2025-03-31T16:14:52.752639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:14:53.249922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:1, at schemeshard: 72057594046644480 2025-03-31T16:14:54.222317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-03-31T16:14:55.120049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:14:55.665739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-03-31T16:14:56.994003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: 2025-03-31T16:14:57.494745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:100;ACCESS: REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:100;ACCESS: 2025-03-31T16:15:14.727698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:15:14.727779Z node 1 :IMPORT WARN: Table profiles were not loaded FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 2025-03-31T16:15:37.834913Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715719. Ctx: { TraceId: 01jqpfsbh2bmhv3rm5sktma073, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjdhZjA1MTMtM2YyNWYxM2MtZmI3MmZkZWItNzU1YmQyNWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-03-31T16:16:00.808306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715737:0, at schemeshard: 72057594046644480 2025-03-31T16:16:01.890582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715744:0, at schemeshard: 72057594046644480 2025-03-31T16:16:03.370341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715755:0, at schemeshard: 72057594046644480 2025-03-31T16:16:03.937172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715758:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 snapshot->GetAccess().size() incorrect (zero expects): SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS:root@builtin:secret1:test@test1; FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-03-31T16:16:16.790011Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715770. Ctx: { TraceId: 01jqpfthmr3xdgqy40qwc1z231, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjY3YWE1OWItNzEzZWQxMTUtYTVmYjkyMDgtMTczMjUxNmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 2025-03-31T16:16:54.319448Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715810. Ctx: { TraceId: 01jqpfvpd3a4jb6vys684777th, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzA5YmYxZjMtNjI0N2UyOTUtMmI3MTE5N2UtOWExOWY4MTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 >> TLocksTest::CK_Range_BrokenLockInf [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::SetBreakSetEraseBreak [GOOD] Test command err: 2025-03-31T16:16:31.649103Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008297532824663:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:31.649261Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001caf/r3tmp/tmpjhylr2/pdisk_1.dat 2025-03-31T16:16:31.967231Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:32.024045Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:32.024244Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:32.026044Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62157 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:32.246342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:32.291946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:32.429317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:32.477564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:34.929332Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008309157314662:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:34.929415Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001caf/r3tmp/tmpKSlQl1/pdisk_1.dat 2025-03-31T16:16:35.022428Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:35.064615Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:35.064690Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:35.066137Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31164 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:35.201673Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:35.216933Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:35.272375Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:35.318026Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:38.109457Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488008325770402710:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:38.109513Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001caf/r3tmp/tmpUTHSOv/pdisk_1.dat 2025-03-31T16:16:38.225233Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:38.253201Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:38.253279Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:38.254711Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21260 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:38.376142Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:38.395383Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:38.448116Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:38.488550Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:41.461599Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008340118043203:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:41.461668Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001caf/r3tmp/tmpLp9ttO/pdisk_1.dat 2025-03-31T16:16:41.573459Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:41.607364Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:41.607429Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:41.608753Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24362 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-03-31T16:16:41.839258Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:41.856633Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-03-31T16:16:41.863555Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:41.936264Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:42.008169Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:45.197413Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7488008355911061210:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:45.197464Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001caf/r3tmp/tmpBaCM2z/pdisk_1.dat 2025-03-31T16:16:45.331813Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:45.354487Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:45.354581Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:45.356263Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12591 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-03-31T16:16:45.564082Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:45.587888Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:45.645924Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:45.716618Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:48.818204Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488008371355170842:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:48.818274Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001caf/r3tmp/tmpX2abPb/pdisk_1.dat 2025-03-31T16:16:49.009114Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:49.012137Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:49.012227Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:49.013540Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19587 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:49.205235Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:49.224313Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:49.294305Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:49.345104Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:52.754184Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488008386933774864:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:52.754256Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001caf/r3tmp/tmpsxj2Sa/pdisk_1.dat 2025-03-31T16:16:52.883132Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:52.907566Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:52.907661Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:52.911592Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14362 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:53.099052Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:53.118405Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:53.195324Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:16:53.253712Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_CorrectDot [GOOD] Test command err: 2025-03-31T16:16:18.472897Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008240001047106:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:18.473805Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ce4/r3tmp/tmp362caL/pdisk_1.dat 2025-03-31T16:16:18.802088Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:18.864882Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:18.865031Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:18.866808Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27658 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:19.080249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:19.104052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:19.226037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:19.293961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:21.330262Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008254588318934:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:21.330381Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ce4/r3tmp/tmpU2wf32/pdisk_1.dat 2025-03-31T16:16:21.419174Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:21.464416Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:21.464506Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:21.466049Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31790 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:21.623775Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:21.645332Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:21.720053Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:21.766337Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:24.479621Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488008267064869039:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:24.479692Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ce4/r3tmp/tmpEV2ado/pdisk_1.dat 2025-03-31T16:16:24.599127Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:24.629578Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:24.629674Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:24.631053Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17324 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:24.802550Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:24.822152Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:24.893114Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:24.936369Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:27.688896Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008280433121020:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:27.688978Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ce4/r3tmp/tmpEBzxw8/pdisk_1.dat 2025-03-31T16:16:27.784278Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:27.831480Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:27.831567Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:27.833213Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23746 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:28.015817Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:28.022427Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:28.037984Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation ... UCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:39.378950Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:39.396482Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:39.478069Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:39.537927Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:43.306155Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7488008348564278044:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:43.306216Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ce4/r3tmp/tmpTFVm9c/pdisk_1.dat 2025-03-31T16:16:43.444562Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:43.461426Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:43.461804Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:43.463346Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62169 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:43.720289Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:43.730117Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:43.740725Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-03-31T16:16:43.745799Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:43.819975Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:43.885887Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:47.916244Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7488008364184994687:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:47.916407Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ce4/r3tmp/tmpXdhTEC/pdisk_1.dat 2025-03-31T16:16:48.044460Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:48.060443Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:48.060540Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:48.062317Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30986 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:48.273603Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:48.295830Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:48.370219Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:48.450689Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:52.273719Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008387536821463:2140];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:52.274300Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ce4/r3tmp/tmp8XHhoq/pdisk_1.dat 2025-03-31T16:16:52.414694Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:52.432104Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:52.432210Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:52.433919Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19684 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:52.691568Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:52.699274Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:52.713726Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-03-31T16:16:52.719664Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:52.810106Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:52.886697Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> YdbYqlClient::CreateAndAltertTableWithPartitioningBySize [GOOD] >> YdbYqlClient::CreateAndAltertTableWithReadReplicasSettings >> LocalityOperation::LocksFromAnotherTenants+UseSink [GOOD] >> LocalityOperation::LocksFromAnotherTenants-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::CK_BrokenLock [GOOD] Test command err: 2025-03-31T16:16:19.443609Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008246618474572:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:19.444105Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ce2/r3tmp/tmpAgcX9Y/pdisk_1.dat 2025-03-31T16:16:19.757348Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:19.831335Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:19.831537Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:19.833242Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:65149 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:20.044324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:20.076765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:20.206768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:20.251107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:22.255393Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008258785597309:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:22.255480Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ce2/r3tmp/tmpDonXP9/pdisk_1.dat 2025-03-31T16:16:22.364264Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:22.391939Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:22.392022Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:22.393443Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10988 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:22.553561Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:22.574563Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:22.646161Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:22.692093Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:25.127169Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488008269971132338:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:25.127207Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ce2/r3tmp/tmpmWEjgH/pdisk_1.dat 2025-03-31T16:16:25.216648Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:25.260031Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:25.260110Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:25.261620Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21518 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:25.430456Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:25.452468Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:25.522947Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:25.563437Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:28.617254Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008286353523917:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:28.617353Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ce2/r3tmp/tmpxWHshL/pdisk_1.dat 2025-03-31T16:16:28.704762Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:28.752099Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:28.752172Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:28.753513Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29633 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:28.936156Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:28.951901Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiti ... (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:40.043950Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6741 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:40.291689Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:40.314942Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:40.366522Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:40.412719Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:43.850855Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7488008347909357234:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:43.850929Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ce2/r3tmp/tmpZrF7Wb/pdisk_1.dat 2025-03-31T16:16:44.011785Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:44.044228Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:44.044325Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:44.046008Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8234 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-03-31T16:16:44.285049Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:16:44.292118Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:44.308260Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:44.382896Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:44.507815Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:48.092926Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7488008371678656312:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:48.093052Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ce2/r3tmp/tmpWNV0dl/pdisk_1.dat 2025-03-31T16:16:48.234027Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:48.270069Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:48.270189Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:48.271548Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18618 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:48.540514Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:48.575979Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:48.675259Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:48.754903Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:52.236175Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008386825205132:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:52.236230Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ce2/r3tmp/tmpVVz8I5/pdisk_1.dat 2025-03-31T16:16:52.362357Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:52.395777Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:52.395883Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:52.397306Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64160 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:52.641835Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:52.648318Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:52.662843Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:52.744264Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:52.823926Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> TLocksTest::Range_IncorrectNullDot2 [GOOD] >> YdbIndexTable::AlterTableAddIndex [GOOD] >> YdbLogStore::AlterLogStore >> YdbYqlClient::TestReadTableMultiShardWholeTable >> YdbTableBulkUpsert::Nulls >> TGRpcClientLowTest::BiStreamPing [GOOD] >> TGRpcClientLowTest::BiStreamCancelled ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::CK_Range_BrokenLockInf [GOOD] Test command err: 2025-03-31T16:16:21.367198Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008252211999570:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:21.367273Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cde/r3tmp/tmpwLAZWh/pdisk_1.dat 2025-03-31T16:16:21.697089Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:21.732813Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:21.732973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:21.734318Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63818 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:21.960979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:21.987712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:22.100130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:22.170885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:24.134289Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008265497171239:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:24.134371Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cde/r3tmp/tmpPdYsx0/pdisk_1.dat 2025-03-31T16:16:24.229734Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:24.268910Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:24.269009Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:24.270544Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10626 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:24.445579Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:24.465543Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:24.514484Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:24.582804Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:27.205263Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488008280240175818:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:27.205352Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cde/r3tmp/tmpaoqxqh/pdisk_1.dat 2025-03-31T16:16:27.323455Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:27.351405Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:27.351495Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:27.353043Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13257 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:27.526824Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:27.545822Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:27.595009Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:27.667676Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:30.582001Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008293267536365:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:30.582084Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cde/r3tmp/tmpk0TLa5/pdisk_1.dat 2025-03-31T16:16:30.669548Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:30.719438Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:30.719520Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:30.721203Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6383 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:30.859985Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:30.878319Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waitin ... (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:41.242763Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29482 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:41.425089Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:41.447812Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:41.520643Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:41.590646Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:45.029816Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7488008358257676151:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:45.029920Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cde/r3tmp/tmpTIjzvK/pdisk_1.dat 2025-03-31T16:16:45.168574Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:45.201268Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:45.201391Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:45.203248Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62850 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:45.445399Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:45.453025Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:45.463999Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-03-31T16:16:45.469645Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:45.561001Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:45.617630Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:16:49.375786Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7488008374426871037:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:49.375899Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cde/r3tmp/tmpUnezCE/pdisk_1.dat 2025-03-31T16:16:49.515131Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:49.547248Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:49.547354Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:49.549098Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31462 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:49.854516Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:49.876563Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:49.958874Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:50.032706Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:53.664680Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008392324253661:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:53.664730Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cde/r3tmp/tmpe1q1NY/pdisk_1.dat 2025-03-31T16:16:53.787841Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:53.827170Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:53.827276Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:53.830191Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20931 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:54.080042Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:54.105671Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:54.189617Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:54.244341Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> TGRpcClientLowTest::SimpleRequestDummyService [GOOD] >> TGRpcLdapAuthentication::LdapAuthServerIsUnavailable >> YdbOlapStore::ManyTables >> TRegisterNodeOverLegacyService::ServerWithoutCertVerification_ClientProvidesCorrectCerts >> TGRpcLdapAuthentication::LdapAuthWithInvalidSearchFilter [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithInvalidLogin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_IncorrectNullDot2 [GOOD] Test command err: 2025-03-31T16:16:21.498099Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008253537768369:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:21.498227Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cdb/r3tmp/tmpPmqJpz/pdisk_1.dat 2025-03-31T16:16:21.815229Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:21.878485Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:21.878583Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:21.880366Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24894 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:22.104385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:22.128398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:22.248448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:22.322282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:24.248000Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008265102061997:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:24.248084Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cdb/r3tmp/tmpz6TdoC/pdisk_1.dat 2025-03-31T16:16:24.350567Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:24.379654Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:24.379759Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:24.381278Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1841 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:24.537718Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:24.556010Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:24.628815Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:24.698165Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:27.583515Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488008279440695185:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:27.583652Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cdb/r3tmp/tmp2I0iJU/pdisk_1.dat 2025-03-31T16:16:27.697249Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:27.734311Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:27.734407Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:27.735964Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19134 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:27.937820Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:27.958920Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:28.036376Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:28.081997Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:30.972089Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008294092635876:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:30.972251Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cdb/r3tmp/tmpDQYJrB/pdisk_1.dat 2025-03-31T16:16:31.071985Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:31.118342Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:31.118425Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:31.119680Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29803 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:31.296218Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:31.311324Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waitin ... : HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:41.360227Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:41.361700Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3936 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:41.556361Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:41.573928Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:41.630929Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:41.702657Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:45.215505Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7488008358247673812:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:45.215588Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cdb/r3tmp/tmpZJEeJp/pdisk_1.dat 2025-03-31T16:16:45.397798Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:45.432054Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:45.432155Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:45.434015Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14832 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-03-31T16:16:45.696178Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:45.718644Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:45.818972Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:45.891760Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:49.773062Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7488008374844179650:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:49.773142Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cdb/r3tmp/tmpabcxQY/pdisk_1.dat 2025-03-31T16:16:49.910834Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:49.944719Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:49.944820Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:49.946458Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13008 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:50.221022Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:50.227446Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:50.245150Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:50.310444Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:50.363729Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:54.446815Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008393818888910:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:54.446897Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cdb/r3tmp/tmpUFXpN1/pdisk_1.dat 2025-03-31T16:16:54.609508Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:54.629665Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:54.629776Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:54.631245Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17537 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-03-31T16:16:54.914949Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:54.940345Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:55.079967Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:55.135252Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> YdbYqlClient::TestYqlWrongTable >> TRegisterNodeOverDiscoveryService::ServerWithIssuerVerification_ClientWithSameIssuer [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientProvidesCorrectCerts >> TGRpcNewClient::YqlQueryWithParams >> YdbYqlClient::TestReadTableNotNullBorder [GOOD] >> YdbYqlClient::TestReadTableNotNullBorder2 >> TGRpcYdbTest::OperationCancelAfter [GOOD] >> TGRpcYdbTest::KeepAlive >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts >> YdbYqlClient::RetryOperationSync [GOOD] >> YdbYqlClient::RetryOperationLimitedDuration >> YdbYqlClient::ColumnFamiliesWithStorageAndIndex [GOOD] >> YdbYqlClient::ColumnFamiliesDescriptionWithStorageAndIndex >> YdbYqlClient::CreateAndAltertTableWithReadReplicasSettings [GOOD] >> YdbYqlClient::CreateTableWithMESettings >> YdbLogStore::AlterLogStore [GOOD] >> YdbLogStore::AlterLogTable >> TLocksTest::MultipleLocks [GOOD] >> TLocksTest::Range_IncorrectDot2 [GOOD] >> TGRpcLdapAuthentication::LdapAuthServerIsUnavailable [GOOD] >> TGRpcLdapAuthentication::LdapAuthSetIncorrectDomain >> YdbYqlClient::TestReadTableMultiShardWholeTable [GOOD] >> YdbYqlClient::TestReadTableMultiShardWholeTableUseSnapshot >> TGRpcLdapAuthentication::LdapAuthWithInvalidLogin [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithInvalidPassword >> TRegisterNodeOverLegacyService::ServerWithoutCertVerification_ClientProvidesCorrectCerts [GOOD] >> TRegisterNodeOverLegacyService::ServerWithoutCertVerification_ClientProvidesEmptyClientCerts >> TGRpcClientLowTest::BiStreamCancelled [GOOD] >> TGRpcClientLowTest::ChangeAcl >> YdbYqlClient::TestYqlWrongTable [GOOD] >> YdbYqlClient::TraceId >> TLocksTest::Range_EmptyKey [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_IncorrectDot2 [GOOD] Test command err: 2025-03-31T16:16:26.420843Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008276296793334:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:26.420980Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cc8/r3tmp/tmpub0V6W/pdisk_1.dat 2025-03-31T16:16:26.726289Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:26.805895Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:26.806032Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:26.807665Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16931 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:27.020722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:27.048340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:27.156453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:27.195969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:29.228342Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008289343724573:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:29.228427Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cc8/r3tmp/tmpLooXCk/pdisk_1.dat 2025-03-31T16:16:29.326720Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:29.365205Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:29.365273Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:29.366941Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23226 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:29.500703Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:29.518218Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:29.587166Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:29.631680Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:32.536839Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488008299974228179:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:32.536924Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cc8/r3tmp/tmpm06Hn2/pdisk_1.dat 2025-03-31T16:16:32.653889Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:32.677833Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:32.677914Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:32.679614Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26729 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:32.868641Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:32.877077Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:32.884011Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-03-31T16:16:32.889392Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:32.969427Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:33.018659Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:35.880099Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008312508362073:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:35.880151Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cc8/r3tmp/tmpflLS2Z/pdisk_1.dat 2025-03-31T16:16:35.990374Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:36.030325Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:36.030408Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:36.032183Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14556 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:36.252864Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubD ... 001cc8/r3tmp/tmpBA3QKH/pdisk_1.dat 2025-03-31T16:16:47.146371Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:47.174083Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:47.174180Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:47.175872Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2226 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:47.425522Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:47.449091Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:47.518043Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:47.594590Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:51.210255Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7488008383764304792:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:51.210367Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cc8/r3tmp/tmp3uJWJO/pdisk_1.dat 2025-03-31T16:16:51.296374Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:51.339386Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:51.339486Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:51.340934Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19581 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:51.565709Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:51.583966Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:51.657033Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:51.706071Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:55.341645Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7488008400928116181:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:55.341735Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cc8/r3tmp/tmpcdQjei/pdisk_1.dat 2025-03-31T16:16:55.459210Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:55.489701Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:55.489820Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:55.492146Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32012 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:55.734840Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:55.755947Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:55.823623Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:55.899966Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:59.755389Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008418519580158:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:59.755519Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cc8/r3tmp/tmpcNkKpc/pdisk_1.dat 2025-03-31T16:16:59.910019Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:59.942398Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:59.942489Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:59.948278Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26232 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:17:00.248716Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:17:00.272335Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:00.345654Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:00.415395Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::MultipleLocks [GOOD] Test command err: 2025-03-31T16:16:38.585845Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008325039096840:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:38.585983Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001c8f/r3tmp/tmp5jA2an/pdisk_1.dat 2025-03-31T16:16:38.868999Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:38.952970Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:38.953119Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:38.954855Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5832 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:39.158342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:39.174150Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:39.190133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:39.314619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:16:39.361842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:41.730055Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008338762350202:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:41.730142Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001c8f/r3tmp/tmpavSRIy/pdisk_1.dat 2025-03-31T16:16:41.831955Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:41.874529Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:41.874629Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:41.876069Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27562 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:42.025734Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:42.040879Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:42.119411Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:42.189488Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:44.999488Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488008352533858606:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:44.999576Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001c8f/r3tmp/tmpbeFBKN/pdisk_1.dat 2025-03-31T16:16:45.106239Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:45.142729Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:45.142799Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:45.144310Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21024 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:45.294471Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:45.310951Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:45.384709Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:45.456878Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:48.556560Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008372159470059:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:48.556610Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001c8f/r3tmp/tmpwokFgi/pdisk_1.dat 2025-03-31T16:16:48.694839Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:48.723848Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:48.723957Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:48.726121Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62813 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:48.960770Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:48.977916Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T16:16:48.983433Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:49.045021Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:49.098399Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:51.817620Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7488008381925852226:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:51.817689Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001c8f/r3tmp/tmpLpwCAE/pdisk_1.dat 2025-03-31T16:16:51.926359Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:51.955503Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:51.955592Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:51.956811Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:65005 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:52.168454Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:52.174874Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:52.188811Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:16:52.262317Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:52.320250Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:55.594451Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488008401670972748:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:55.594577Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001c8f/r3tmp/tmpOrFBe1/pdisk_1.dat 2025-03-31T16:16:55.734583Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:55.748844Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:55.748945Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:55.750360Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14152 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:55.985049Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:56.012857Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:56.068862Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:56.117034Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:59.819685Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488008418550293280:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:59.819764Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001c8f/r3tmp/tmpJoeb9U/pdisk_1.dat 2025-03-31T16:16:59.998946Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:00.018489Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:00.018597Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:00.022205Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26446 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:17:00.291477Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-31T16:17:00.313290Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:17:00.394691Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:00.474671Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... >> KqpWorkloadService::TestHandlerActorCleanup [GOOD] >> TGRpcNewClient::YqlQueryWithParams [GOOD] >> TGRpcNewClient::YqlExplainDataQuery >> LocalityOperation::LocksFromAnotherTenants-UseSink [GOOD] >> YdbYqlClient::TestReadTableNotNullBorder2 [GOOD] >> YdbYqlClient::TestReadTableSnapshot >> TGRpcYdbTest::KeepAlive [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_EmptyKey [GOOD] Test command err: 2025-03-31T16:16:27.218961Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008279976214621:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:27.219167Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cc4/r3tmp/tmpqDsRu4/pdisk_1.dat 2025-03-31T16:16:27.558807Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:27.610893Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:27.611103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:27.612695Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64844 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:27.856334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:27.881410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:27.978661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:28.050138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:30.093621Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008293365209033:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:30.093688Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cc4/r3tmp/tmpTZbgUf/pdisk_1.dat 2025-03-31T16:16:30.196967Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:30.231544Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:30.231631Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:30.233120Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18096 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:30.384017Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:30.400520Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:30.471481Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:30.519632Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:33.226219Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488008304786034785:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:33.226295Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cc4/r3tmp/tmpg9SK5P/pdisk_1.dat 2025-03-31T16:16:33.346074Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:33.372485Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:33.372574Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:33.374141Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25130 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:33.558324Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:33.579353Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:33.625823Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:33.697702Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:36.488297Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008317848914977:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:36.488389Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cc4/r3tmp/tmpPIgS4o/pdisk_1.dat 2025-03-31T16:16:36.591100Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:36.631484Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:36.631580Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:36.633202Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9435 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:36.845607Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:36.861919Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waitin ... (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:47.713352Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21924 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:47.972082Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:47.992772Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:48.054250Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:48.112213Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:16:52.188994Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7488008387873101936:2104];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:52.189233Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cc4/r3tmp/tmpSxgImS/pdisk_1.dat 2025-03-31T16:16:52.355552Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:52.360960Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:52.361076Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:52.363099Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2800 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:52.610951Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:52.618063Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:52.634637Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:52.721670Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:52.814899Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:56.538406Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7488008405518828654:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:56.538468Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cc4/r3tmp/tmphQ0sZ3/pdisk_1.dat 2025-03-31T16:16:56.752342Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:56.769290Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:56.769400Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:56.772862Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22322 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:57.077427Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:57.101468Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:57.180843Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:57.241562Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:01.223798Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008424279898316:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:01.226139Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cc4/r3tmp/tmpw8ZjZZ/pdisk_1.dat 2025-03-31T16:17:01.404434Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:01.422281Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:01.422388Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:01.424749Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9912 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:17:01.731617Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:01.739225Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:01.758043Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:01.837695Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:01.913905Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadService::TestHandlerActorCleanup [GOOD] Test command err: 2025-03-31T16:14:06.442898Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488007674459815139:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:14:06.443040Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00115f/r3tmp/tmpD3NCRW/pdisk_1.dat 2025-03-31T16:14:06.818696Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14140, node 1 2025-03-31T16:14:06.858759Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:06.859248Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:06.883331Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:14:06.932750Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:06.932778Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:06.932787Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:06.932910Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20232 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:14:07.270501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:14:09.503765Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-03-31T16:14:09.511439Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YzMxMzBkY2MtYjAxMjRhZDUtNzE3MWM4NzktOGRmNjI5YmQ=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id YzMxMzBkY2MtYjAxMjRhZDUtNzE3MWM4NzktOGRmNjI5YmQ= 2025-03-31T16:14:09.511988Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488007687344717665:2329], Start check tables existence, number paths: 2 2025-03-31T16:14:09.512088Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YzMxMzBkY2MtYjAxMjRhZDUtNzE3MWM4NzktOGRmNjI5YmQ=, ActorId: [1:7488007687344717674:2330], ActorState: unknown state, session actor bootstrapped 2025-03-31T16:14:09.512494Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-03-31T16:14:09.512536Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-03-31T16:14:09.512555Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-03-31T16:14:09.523837Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488007687344717665:2329], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-03-31T16:14:09.523956Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488007687344717665:2329], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-03-31T16:14:09.523988Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7488007687344717665:2329], Successfully finished 2025-03-31T16:14:09.524075Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-03-31T16:14:09.533602Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007687344717691:2302], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-03-31T16:14:09.537930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:14:09.542920Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007687344717691:2302], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-03-31T16:14:09.544711Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007687344717691:2302], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-03-31T16:14:09.551911Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007687344717691:2302], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:14:09.641049Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007687344717691:2302], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-03-31T16:14:09.645450Z node 1 :TX_PROXY ERROR: Actor# [1:7488007687344717742:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:14:09.645656Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488007687344717691:2302], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-03-31T16:14:09.649736Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZmQyZWM4NTYtYWVjZGJjNDAtZGVkOWQ5YTAtYTE3MDRkZjc=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZmQyZWM4NTYtYWVjZGJjNDAtZGVkOWQ5YTAtYTE3MDRkZjc= 2025-03-31T16:14:09.649872Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZmQyZWM4NTYtYWVjZGJjNDAtZGVkOWQ5YTAtYTE3MDRkZjc=, ActorId: [1:7488007687344717750:2331], ActorState: unknown state, session actor bootstrapped 2025-03-31T16:14:09.650017Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-03-31T16:14:09.650044Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-03-31T16:14:09.650182Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007687344717752:2332], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-03-31T16:14:09.650214Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZmQyZWM4NTYtYWVjZGJjNDAtZGVkOWQ5YTAtYTE3MDRkZjc=, ActorId: [1:7488007687344717750:2331], ActorState: ReadyState, TraceId: 01jqpfpnqj0cakp6p1ygzfj586, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7488007687344717749:2340] database: Root databaseId: /Root pool id: sample_pool_id 2025-03-31T16:14:09.650265Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7488007687344717750:2331], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ZmQyZWM4NTYtYWVjZGJjNDAtZGVkOWQ5YTAtYTE3MDRkZjc= 2025-03-31T16:14:09.650349Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7488007687344717753:2333], Database: /Root, Start database fetching 2025-03-31T16:14:09.650558Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7488007687344717753:2333], Database: /Root, Database info successfully fetched, serverless: 0 2025-03-31T16:14:09.650681Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2025-03-31T16:14:09.650755Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7488007687344717761:2334], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ZmQyZWM4NTYtYWVjZGJjNDAtZGVkOWQ5YTAtYTE3MDRkZjc=, Start pool fetching 2025-03-31T16:14:09.650803Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007687344717763:2335], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-03-31T16:14:09.651625Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007687344717752:2332], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-03-31T16:14:09.651700Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488007687344717763:2335], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-03-31T16:14:09.651737Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2025-03-31T16:14:09.651754Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-03-31T16:14:09.652044Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7488007687344717761:2334], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ZmQyZWM4NTYtYWVjZGJjNDAtZGVkOWQ5YTAtYTE3MDRkZjc=, Pool info successfully resolved 2025-03-31T16:14:09.652106Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7488007687344717766:2336], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2025-03-31T16:14:09.652158Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmQyZWM4NTYtYWVjZGJjNDAtZGVkOWQ5YTAtYTE3MDRkZjc= 2025-03-31T16:14:09.652225Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7488007687344717766:2336], DatabaseId: /Root, PoolId: sample_pool_id, Received new request, worker id: [1:7488007687344717750:2331], session id: ydb://session/3?node_id=1&id=ZmQyZWM4NTYtYWVjZGJjNDAtZGVkOWQ5YTAtYTE3MDRkZjc= 2025-03-31T16:14:09.652311Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ZmQyZWM4NTYtYWVjZGJjNDAtZGVkOWQ5YTAtYTE3MDRkZjc= 2025-03-31T16:14:09.652379Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Got create teables request, DatabaseId: /Root, PoolId: sample_pool_id 2025-03-31T16:14:09.652406Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService ... 1jqpfscqgaww4ya2ec5ycxgyv, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-31T16:15:38.799512Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MzEzN2I2NGEtNjQ5NTE1NzUtMmVmZWMwZGMtMzkyODM4, ActorId: [6:7488008069693067912:2442], ActorState: ExecuteState, TraceId: 01jqpfscqgaww4ya2ec5ycxgyv, EndCleanup, isFinal: 1 2025-03-31T16:15:38.799552Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MzEzN2I2NGEtNjQ5NTE1NzUtMmVmZWMwZGMtMzkyODM4, ActorId: [6:7488008069693067912:2442], ActorState: ExecuteState, TraceId: 01jqpfscqgaww4ya2ec5ycxgyv, Sent query response back to proxy, proxyRequestId: 15, proxyId: [6:7488008039628295843:2267] 2025-03-31T16:15:38.799584Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MzEzN2I2NGEtNjQ5NTE1NzUtMmVmZWMwZGMtMzkyODM4, ActorId: [6:7488008069693067912:2442], ActorState: unknown state, TraceId: 01jqpfscqgaww4ya2ec5ycxgyv, Cleanup temp tables: 0 2025-03-31T16:15:38.799848Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MzEzN2I2NGEtNjQ5NTE1NzUtMmVmZWMwZGMtMzkyODM4, ActorId: [6:7488008069693067912:2442], ActorState: unknown state, TraceId: 01jqpfscqgaww4ya2ec5ycxgyv, Session actor destroyed 2025-03-31T16:15:38.806269Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=N2NmNjkyMTAtYjRiNmVjYzEtNDQwZDY0NzYtNjkwNmQ5Zjc=, ActorId: [6:7488008056808165465:2330], ActorState: ReadyState, TraceId: 01jqpfscsnec3pyjwvb2cz9qyf, received request, proxyRequestId: 18 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: DROP RESOURCE POOL sample_pool_id; DROP RESOURCE POOL default; rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2025-03-31T16:15:38.832362Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7488008056808165543:2337], DatabaseId: /Root, PoolId: sample_pool_id, Got delete notification 2025-03-31T16:15:38.832490Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-03-31T16:15:38.832553Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488008069693067983:2456], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-03-31T16:15:38.833063Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488008069693067983:2456], DatabaseId: /Root, PoolId: sample_pool_id, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool sample_pool_id not found or you don't have access permissions } 2025-03-31T16:15:38.833199Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool sample_pool_id, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool sample_pool_id not found or you don't have access permissions } 2025-03-31T16:15:38.838907Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7488008056808165739:2360], DatabaseId: /Root, PoolId: default, Got delete notification 2025-03-31T16:15:38.839008Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-03-31T16:15:38.839101Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488008069693068001:2457], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-03-31T16:15:38.839373Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488008069693068001:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:38.839468Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:38.842998Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=N2NmNjkyMTAtYjRiNmVjYzEtNDQwZDY0NzYtNjkwNmQ5Zjc=, ActorId: [6:7488008056808165465:2330], ActorState: ExecuteState, TraceId: 01jqpfscsnec3pyjwvb2cz9qyf, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [6:7488008069693067969:2330] WorkloadServiceCleanup: 0 2025-03-31T16:15:38.844832Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=N2NmNjkyMTAtYjRiNmVjYzEtNDQwZDY0NzYtNjkwNmQ5Zjc=, ActorId: [6:7488008056808165465:2330], ActorState: CleanupState, TraceId: 01jqpfscsnec3pyjwvb2cz9qyf, EndCleanup, isFinal: 0 2025-03-31T16:15:38.844902Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=N2NmNjkyMTAtYjRiNmVjYzEtNDQwZDY0NzYtNjkwNmQ5Zjc=, ActorId: [6:7488008056808165465:2330], ActorState: CleanupState, TraceId: 01jqpfscsnec3pyjwvb2cz9qyf, Sent query response back to proxy, proxyRequestId: 18, proxyId: [6:7488008039628295843:2267] Wait pool handlers 0.000016s: number handlers = 2 Wait pool handlers 1.000151s: number handlers = 2 Wait pool handlers 2.000288s: number handlers = 2 Wait pool handlers 3.000427s: number handlers = 2 Wait pool handlers 4.000541s: number handlers = 2 Wait pool handlers 5.000671s: number handlers = 2 Wait pool handlers 6.000807s: number handlers = 2 Wait pool handlers 7.000943s: number handlers = 2 2025-03-31T16:15:46.190840Z node 6 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:15:46.190875Z node 6 :IMPORT WARN: Table profiles were not loaded Wait pool handlers 8.001082s: number handlers = 2 Wait pool handlers 9.001244s: number handlers = 2 Wait pool handlers 10.001396s: number handlers = 2 Wait pool handlers 11.001548s: number handlers = 2 Wait pool handlers 12.001662s: number handlers = 2 Wait pool handlers 13.001791s: number handlers = 2 2025-03-31T16:15:52.258519Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7488008056808165543:2337], DatabaseId: /Root, PoolId: sample_pool_id, Try to start scheduled refresh Wait pool handlers 14.001937s: number handlers = 2 Wait pool handlers 15.002068s: number handlers = 2 Wait pool handlers 16.002197s: number handlers = 2 Wait pool handlers 17.002330s: number handlers = 2 Wait pool handlers 18.002487s: number handlers = 2 Wait pool handlers 19.002644s: number handlers = 2 Wait pool handlers 20.002763s: number handlers = 2 Wait pool handlers 21.002890s: number handlers = 2 Wait pool handlers 22.003024s: number handlers = 2 Wait pool handlers 23.003154s: number handlers = 2 Wait pool handlers 24.003317s: number handlers = 2 Wait pool handlers 25.003462s: number handlers = 2 Wait pool handlers 26.003587s: number handlers = 2 Wait pool handlers 27.003717s: number handlers = 2 Wait pool handlers 28.003888s: number handlers = 2 Wait pool handlers 29.004034s: number handlers = 2 Wait pool handlers 30.004176s: number handlers = 2 Wait pool handlers 31.004324s: number handlers = 2 Wait pool handlers 32.004484s: number handlers = 2 Wait pool handlers 33.004633s: number handlers = 2 Wait pool handlers 34.004802s: number handlers = 2 Wait pool handlers 35.004894s: number handlers = 2 Wait pool handlers 36.005045s: number handlers = 2 Wait pool handlers 37.005204s: number handlers = 2 Wait pool handlers 38.005347s: number handlers = 2 Wait pool handlers 39.005489s: number handlers = 2 Wait pool handlers 40.005638s: number handlers = 2 Wait pool handlers 41.005788s: number handlers = 2 Wait pool handlers 42.005955s: number handlers = 2 Wait pool handlers 43.006103s: number handlers = 2 Wait pool handlers 44.006268s: number handlers = 2 Wait pool handlers 45.006431s: number handlers = 2 Wait pool handlers 46.006575s: number handlers = 2 Wait pool handlers 47.007750s: number handlers = 2 Wait pool handlers 48.007889s: number handlers = 2 Wait pool handlers 49.008021s: number handlers = 2 Wait pool handlers 50.008161s: number handlers = 2 Wait pool handlers 51.008303s: number handlers = 2 Wait pool handlers 52.008444s: number handlers = 2 Wait pool handlers 53.008846s: number handlers = 2 Wait pool handlers 54.009099s: number handlers = 2 Wait pool handlers 55.009250s: number handlers = 2 Wait pool handlers 56.009396s: number handlers = 2 Wait pool handlers 57.009534s: number handlers = 2 Wait pool handlers 58.009658s: number handlers = 2 Wait pool handlers 59.009788s: number handlers = 2 Wait pool handlers 60.009947s: number handlers = 2 Wait pool handlers 61.010069s: number handlers = 2 Wait pool handlers 62.010211s: number handlers = 2 Wait pool handlers 63.010340s: number handlers = 2 Wait pool handlers 64.010572s: number handlers = 2 Wait pool handlers 65.012829s: number handlers = 2 Wait pool handlers 66.014727s: number handlers = 2 Wait pool handlers 67.014863s: number handlers = 2 Wait pool handlers 68.015000s: number handlers = 2 Wait pool handlers 69.015123s: number handlers = 2 Wait pool handlers 70.015569s: number handlers = 2 Wait pool handlers 71.016789s: number handlers = 2 Wait pool handlers 72.016909s: number handlers = 2 Wait pool handlers 73.017028s: number handlers = 2 Wait pool handlers 74.017141s: number handlers = 2 Wait pool handlers 75.020795s: number handlers = 2 Wait pool handlers 76.020945s: number handlers = 2 Wait pool handlers 77.021070s: number handlers = 2 Wait pool handlers 78.023783s: number handlers = 2 Wait pool handlers 79.023918s: number handlers = 2 Wait pool handlers 80.024038s: number handlers = 2 Wait pool handlers 81.024175s: number handlers = 2 Wait pool handlers 82.024566s: number handlers = 2 Wait pool handlers 83.024756s: number handlers = 2 Wait pool handlers 84.024891s: number handlers = 2 Wait pool handlers 85.025026s: number handlers = 2 Wait pool handlers 86.025167s: number handlers = 2 2025-03-31T16:17:05.161793Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7488008056808165543:2337], DatabaseId: /Root, PoolId: sample_pool_id, Got stop pool handler request, waiting for 0 requests 2025-03-31T16:17:05.161792Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7488008056808165739:2360], DatabaseId: /Root, PoolId: default, Got stop pool handler request, waiting for 0 requests 2025-03-31T16:17:05.161945Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Got stop pool handler response, DatabaseId: /Root, PoolId: sample_pool_id 2025-03-31T16:17:05.161989Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Got stop pool handler response, DatabaseId: /Root, PoolId: default 2025-03-31T16:17:05.887627Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=N2NmNjkyMTAtYjRiNmVjYzEtNDQwZDY0NzYtNjkwNmQ5Zjc=, ActorId: [6:7488008056808165465:2330], ActorState: ReadyState, Session closed due to explicit close event 2025-03-31T16:17:05.887689Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=N2NmNjkyMTAtYjRiNmVjYzEtNDQwZDY0NzYtNjkwNmQ5Zjc=, ActorId: [6:7488008056808165465:2330], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-31T16:17:05.887723Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=N2NmNjkyMTAtYjRiNmVjYzEtNDQwZDY0NzYtNjkwNmQ5Zjc=, ActorId: [6:7488008056808165465:2330], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-03-31T16:17:05.887756Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=N2NmNjkyMTAtYjRiNmVjYzEtNDQwZDY0NzYtNjkwNmQ5Zjc=, ActorId: [6:7488008056808165465:2330], ActorState: unknown state, Cleanup temp tables: 0 2025-03-31T16:17:05.887851Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=N2NmNjkyMTAtYjRiNmVjYzEtNDQwZDY0NzYtNjkwNmQ5Zjc=, ActorId: [6:7488008056808165465:2330], ActorState: unknown state, Session actor destroyed >> YdbYqlClient::TestDoubleKey >> TAuthenticationWithSqlExecution::CreateAlterUserWithHash ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> LocalityOperation::LocksFromAnotherTenants-UseSink [GOOD] Test command err: 2025-03-31T16:16:43.157715Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008350420306172:2142];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:43.158467Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000de3/r3tmp/tmpQTVjlo/pdisk_1.dat 2025-03-31T16:16:43.614033Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:43.615529Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:43.622363Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:16:43.623052Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11323, node 1 2025-03-31T16:16:43.855678Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:43.855707Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:43.855716Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:43.855861Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4775 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:16:44.405595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:47.618084Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008366753500878:2246];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:47.629278Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000de3/r3tmp/tmpQ6CBlp/pdisk_1.dat 2025-03-31T16:16:47.772948Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:47.815538Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:47.815669Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:47.819532Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4366, node 4 2025-03-31T16:16:47.903695Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:47.903719Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:47.903727Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:47.903840Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23602 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:16:48.136323Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:51.897943Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488008382482043212:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:51.898042Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000de3/r3tmp/tmpnB9U5Z/pdisk_1.dat 2025-03-31T16:16:52.005394Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:52.035773Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:52.035865Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:52.040281Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64069, node 7 2025-03-31T16:16:52.153133Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:52.153158Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:52.153166Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:52.153315Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10027 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:16:52.392458Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:10027 2025-03-31T16:16:52.774302Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:52.796130Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:53.301438Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7488008392039615781:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:53.301514Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_tenant_0/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:16:53.320280Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:53.320378Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:53.338253Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 9 Cookie 9 2025-03-31T16:16:53.339006Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:16:53.371538Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:53.401268Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:53.925105Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7488008393125408954:2274];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:53.925219Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_tenant_1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:16:53.937510Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:53.937611Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:53.940398Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 8 Cookie 8 2025-03-31T16:16:53.945197Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:16:55.533712Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:16:55.693500Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshar ... hildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:16:59.917834Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:5124 2025-03-31T16:17:00.245880Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:00.282224Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:00.788735Z node 12 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7488008420091538433:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:00.788817Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_tenant_0/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:17:00.861823Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:00.861915Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:00.884650Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 12 Cookie 12 2025-03-31T16:17:00.885895Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:17:00.948439Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:00.982496Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:01.496576Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7488008423930074141:2142];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:01.497265Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_tenant_1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:17:01.584984Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:01.585069Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:01.588215Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 11 Cookie 11 2025-03-31T16:17:01.589021Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:17:03.217263Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:17:03.329417Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:17:03.416701Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488008432487219363:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:03.416717Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488008432487219355:2366], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:03.416786Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:03.420880Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715664:3, at schemeshard: 72057594046644480 2025-03-31T16:17:03.441567Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7488008432487219369:2370], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking } 2025-03-31T16:17:03.503886Z node 10 :TX_PROXY ERROR: Actor# [10:7488008432487219434:3378] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:03.609750Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqpfvzdp516hft1xrb0x1c9p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZGVmZjczZTAtOTg5ZjAwOTgtYjgyOWFkZDktOGVlYjAxYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:03.735046Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jqpfvzn1bdkp78d9wfy9h1hk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZGVmZjczZTAtOTg5ZjAwOTgtYjgyOWFkZDktOGVlYjAxYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:03.865533Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jqpfvzrcamh9kgx7a5vez775, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZGVmZjczZTAtOTg5ZjAwOTgtYjgyOWFkZDktOGVlYjAxYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:04.240319Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7488008415307348144:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:04.240443Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:17:05.243799Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jqpfvzrcamh9kgx7a5vez775, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZGVmZjczZTAtOTg5ZjAwOTgtYjgyOWFkZDktOGVlYjAxYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:05.247954Z node 10 :KQP_EXECUTER ERROR: ActorId: [10:7488008441077154222:2359] TxId: 281474976715669. Ctx: { TraceId: 01jqpfvzrcamh9kgx7a5vez775, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZGVmZjczZTAtOTg5ZjAwOTgtYjgyOWFkZDktOGVlYjAxYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Handle TEvProposeTransactionResult: unable to select coordinator. Tx canceled, actorId: [10:7488008441077154222:2359], previously selected coordinator: 72075186224037888, coordinator selected at propose result: 72075186224037890 2025-03-31T16:17:05.248394Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=ZGVmZjczZTAtOTg5ZjAwOTgtYjgyOWFkZDktOGVlYjAxYWM=, ActorId: [10:7488008432487219180:2359], ActorState: ExecuteState, TraceId: 01jqpfvzrcamh9kgx7a5vez775, Create QueryResponse for error on request, msg: 2025-03-31T16:17:05.249690Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jqpfvzrcamh9kgx7a5vez775, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZGVmZjczZTAtOTg5ZjAwOTgtYjgyOWFkZDktOGVlYjAxYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:05.256616Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 12 2025-03-31T16:17:05.257118Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-31T16:17:05.259226Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 11 2025-03-31T16:17:05.336591Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-31T16:17:05.504187Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7488008441109943719:2320], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:17:05.504360Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_tenant_1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:17:05.602229Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7488008441109943719:2320], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:17:05.789080Z node 12 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7488008420091538433:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:05.789180Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_tenant_0/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:17:05.793271Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7488008441109943719:2320], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:17:06.090361Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7488008441109943719:2320], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:17:06.496167Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7488008423930074141:2142];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:06.496256Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_tenant_1/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:17:06.505252Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_tenant_1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:17:06.579675Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7488008441109943719:2320], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::KeepAlive [GOOD] Test command err: 2025-03-31T16:16:44.394629Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008351187493930:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:44.394681Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dda/r3tmp/tmpbieQN5/pdisk_1.dat 2025-03-31T16:16:44.766995Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24306, node 1 2025-03-31T16:16:44.833332Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:44.833445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:44.872834Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:16:44.881639Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:44.881658Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:44.881666Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:44.881775Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20177 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:16:45.169530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:45.298619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:16:48.505889Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008370593199805:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:48.506947Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dda/r3tmp/tmpOfb78K/pdisk_1.dat 2025-03-31T16:16:48.643934Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:48.670343Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:48.670407Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:48.676720Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17874, node 4 2025-03-31T16:16:48.739812Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:48.739842Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:48.739849Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:48.739990Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24349 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:16:49.048705Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:49.157451Z node 4 :GRPC_SERVER DEBUG: Got grpc request# CreateTableRequest, traceId# 01jqpfvhg5635nb4gqmkkvc5sw, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:51772, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-03-31T16:16:49.160230Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:16:49.163445Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-31T16:16:49.163516Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-31T16:16:49.163523Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-31T16:16:49.163545Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-31T16:16:49.238388Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-31T16:16:49.238481Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-31T16:16:49.238492Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-31T16:16:49.238550Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-31T16:16:49.302353Z node 4 :GRPC_SERVER DEBUG: Got grpc request# CreateSessionRequest, traceId# 01jqpfvhmp93nvv02tn1zfpkat, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:51772, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-03-31T16:16:51.444594Z node 4 :GRPC_SERVER DEBUG: Got grpc request# ExecuteDataQueryRequest, traceId# 01jqpfvkqm77nn7st1s4m2hx2x, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:51772, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-03-31T16:16:51.446811Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008383478102849:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:51.446837Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008383478102854:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:51.446911Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:51.449967Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:16:51.453858Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-31T16:16:51.453853Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-31T16:16:51.453948Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-31T16:16:51.453967Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-31T16:16:51.462476Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-31T16:16:51.462479Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-31T16:16:51.462567Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-31T16:16:51.462577Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-31T16:16:51.466195Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488008383478102863:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:16:51.554134Z node 4 :TX_PROXY ERROR: Actor# [4:7488008383478102941:2801] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:16:52.034243Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpfvkqm77nn7st1s4m2hx2x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YTQ4YjJhZTMtNDNlYTkyMjAtMTNlMGJjNmYtNTAxZWM1MjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:16:52.050750Z node 4 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jqpfvmajc5tm3gvzg6ytwyyz, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:51772, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-03-31T16:16:52.052310Z node 4 :READ_TABLE_API NOTICE: [4:7488008387773070299:2351] Finish grpc stream, status: 400010 2025-03-31T16:16:52.054503Z node 4 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jqpfvmap067eb1x2p1rctngs, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:51772, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-03-31T16:16:52.067010Z node 4 :READ_TABLE_API DEBUG: [4:7488008387773070300:2352] Adding quota request to queue ShardId: 0, TxId: 281474976715662 2025-03-31T16:16:52.067061Z node 4 :READ_TABLE_API DEBUG: [4:7488008387773070300:2352] Assign stream quota to Shard 0, Quota 5, TxId 281474976715662 Reserved: 5 of 25, Queued: 0 2025-03-31T16:16:52.072596Z node 4 :READ_TABLE_API DEBUG: [4:7488008387773070300:2352] got stream part, size: 290, RU required: 128 rate limiter absent 2025-03-31T16:16:52.073007Z node 4 :READ_TABLE_API DEBUG: [4:7488008387773070300:2352] Starting inactivity timer for 600.000000s with tag 3 2025-03-31T16:16:5 ... am quota to Shard 0, Quota 5, TxId 281474976715666 Reserved: 5 of 25, Queued: 0 2025-03-31T16:16:52.161519Z node 4 :READ_TABLE_API DEBUG: [4:7488008387773070346:2356] got stream part, size: 244, RU required: 128 rate limiter absent 2025-03-31T16:16:52.161871Z node 4 :READ_TABLE_API DEBUG: [4:7488008387773070346:2356] Starting inactivity timer for 600.000000s with tag 3 2025-03-31T16:16:52.164409Z node 4 :READ_TABLE_API NOTICE: [4:7488008387773070346:2356] Finish grpc stream, status: 400000 2025-03-31T16:16:52.170039Z node 4 :GRPC_SERVER DEBUG: [0x51a00004bc80] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-03-31T16:16:52.170731Z node 4 :GRPC_SERVER DEBUG: [0x51a0000d4480] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-03-31T16:16:52.170921Z node 4 :GRPC_SERVER DEBUG: [0x51a00004c280] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-03-31T16:16:52.171099Z node 4 :GRPC_SERVER DEBUG: [0x51a0000d3880] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-03-31T16:16:52.171246Z node 4 :GRPC_SERVER DEBUG: [0x51a00004aa80] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-03-31T16:16:52.171441Z node 4 :GRPC_SERVER DEBUG: [0x51a0000d4a80] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-03-31T16:16:52.171585Z node 4 :GRPC_SERVER DEBUG: [0x51a0000d2c80] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-03-31T16:16:52.171733Z node 4 :GRPC_SERVER DEBUG: [0x51a0000d2080] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-03-31T16:16:52.171927Z node 4 :GRPC_SERVER DEBUG: [0x51a0000d1480] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-03-31T16:16:52.172101Z node 4 :GRPC_SERVER DEBUG: [0x51a00011fa80] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-03-31T16:16:52.172250Z node 4 :GRPC_SERVER DEBUG: [0x51a000120080] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-03-31T16:16:52.172579Z node 4 :GRPC_SERVER DEBUG: [0x51a0000d5080] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-03-31T16:16:52.172831Z node 4 :GRPC_SERVER DEBUG: [0x51a0000d6280] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-03-31T16:16:52.173008Z node 4 :GRPC_SERVER DEBUG: [0x51a0000d5680] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-03-31T16:16:52.173159Z node 4 :GRPC_SERVER DEBUG: [0x51a00004a480] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-03-31T16:16:52.173330Z node 4 :GRPC_SERVER DEBUG: [0x51a000120680] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 2025-03-31T16:16:52.175919Z node 4 :GRPC_SERVER DEBUG: [0x51a0000d5c80] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-03-31T16:16:53.495638Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488008390707908411:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:53.495681Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dda/r3tmp/tmpubndUj/pdisk_1.dat 2025-03-31T16:16:53.609833Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:53.641696Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:53.641785Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:53.644400Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22824, node 7 2025-03-31T16:16:53.692652Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:53.692675Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:53.692682Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:53.692783Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2312 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:16:53.887683Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting...
: Error: Operation timeout. 2025-03-31T16:16:57.921225Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008409565354734:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:57.922065Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dda/r3tmp/tmpjfnHGU/pdisk_1.dat 2025-03-31T16:16:58.082697Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:58.109745Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:58.109842Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:58.112630Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14054, node 10 2025-03-31T16:16:58.178934Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:58.178958Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:58.178967Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:58.179137Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11189 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:16:58.467305Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting...
: Error: Operation cancelled. 2025-03-31T16:17:02.610372Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7488008430106095959:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:02.610449Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dda/r3tmp/tmpJClUUS/pdisk_1.dat 2025-03-31T16:17:02.757426Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:02.792958Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:02.793060Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:02.797048Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62196, node 13 2025-03-31T16:17:02.896711Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:02.896748Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:02.896758Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:02.896919Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61460 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:03.208576Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... >> YdbYqlClient::CreateTableWithMESettings [GOOD] |87.7%| [TA] $(B)/ydb/core/kqp/workload_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> YdbLogStore::AlterLogTable [FAIL] >> TGRpcLdapAuthentication::LdapAuthSetIncorrectDomain [GOOD] >> TGRpcLdapAuthentication::DisableBuiltinAuthMechanism >> YdbYqlClient::ColumnFamiliesDescriptionWithStorageAndIndex [GOOD] >> YdbYqlClient::ColumnFamiliesExternalBlobsWithoutDefaultProfile |87.7%| [TA] {RESULT} $(B)/ydb/core/kqp/workload_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TTableProfileTests::OverwriteCachingPolicy >> YdbYqlClient::TestDecimal1 >> TRegisterNodeOverLegacyService::ServerWithoutCertVerification_ClientProvidesEmptyClientCerts [GOOD] >> TTableProfileTests::DescribeTableWithPartitioningPolicy >> YdbYqlClient::TestReadTableMultiShardWholeTableUseSnapshot [GOOD] >> YdbYqlClient::TestReadTableMultiShardWithDescribe >> TGRpcLdapAuthentication::LdapAuthWithInvalidPassword [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::CreateTableWithMESettings [GOOD] Test command err: 2025-03-31T16:16:44.974945Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008354252175298:2151];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:44.975296Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dd9/r3tmp/tmpa3tuED/pdisk_1.dat 2025-03-31T16:16:45.332608Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:45.361636Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:45.361748Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:45.367588Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16959, node 1 2025-03-31T16:16:45.482892Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:45.482918Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:45.482924Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:45.483040Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5461 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:16:45.758750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:47.849625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:16:49.642614Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008372417780041:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:49.642736Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dd9/r3tmp/tmpqzJreV/pdisk_1.dat 2025-03-31T16:16:49.783269Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:49.820037Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:49.820117Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:49.824150Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1941, node 4 2025-03-31T16:16:49.916236Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:49.916264Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:49.916273Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:49.916427Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22336 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:16:50.187903Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:52.629987Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:16:54.223290Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488008395113896135:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:54.223400Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dd9/r3tmp/tmpmN4ju1/pdisk_1.dat 2025-03-31T16:16:54.348771Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:54.378261Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:54.378348Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:54.385383Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61216, node 7 2025-03-31T16:16:54.443520Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:54.443544Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:54.443556Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:54.443816Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19381 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:16:54.746112Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:57.368406Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:16:57.477775Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:16:57.520632Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:16:57.562497Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:16:59.026869Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008419134945711:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:59.026962Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dd9/r3tmp/tmpENkbU1/pdisk_1.dat 2025-03-31T16:16:59.129639Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:59.157553Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:59.157650Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:59.161789Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2334, node 10 2025-03-31T16:16:59.241682Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:59.241706Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:59.241713Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:59.241848Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9378 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:16:59.509540Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:02.260302Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:02.375771Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:17:02.384142Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037888 not found 2025-03-31T16:17:02.384191Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037888 not found 2025-03-31T16:17:04.100409Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7488008438766184732:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:04.101266Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dd9/r3tmp/tmpNCgBtG/pdisk_1.dat 2025-03-31T16:17:04.231640Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:04.268688Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:04.268788Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:04.272268Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9632, node 13 2025-03-31T16:17:04.329227Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:04.329255Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:04.329263Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:04.329417Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19121 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:04.617793Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:07.664790Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 >> TGRpcYdbTest::CreateTableBadRequest >> YdbYqlClient::TraceId [GOOD] >> YdbYqlClient::Utf8DatabasePassViaHeader >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientProvidesEmptyClientCerts >> TLocksTest::GoodSameShardLock [GOOD] >> TGRpcClientLowTest::ChangeAcl [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientProvidesCorrectCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithOutCertVerification_ClientProvidesExpiredCert ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcLdapAuthentication::LdapAuthWithInvalidPassword [GOOD] Test command err: 2025-03-31T16:16:47.969685Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008366387324483:2270];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:47.969793Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dd2/r3tmp/tmpOCgn0a/pdisk_1.dat 2025-03-31T16:16:48.343805Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:48.356370Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:48.356479Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 14687, node 1 2025-03-31T16:16:48.362356Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:16:48.362410Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:16:48.368651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:16:48.410633Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:48.410683Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:48.410695Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:48.410845Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3362 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:16:48.755410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:52.014452Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008389441384158:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:52.014594Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dd2/r3tmp/tmpZxtgg7/pdisk_1.dat 2025-03-31T16:16:52.112011Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:52.136543Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:52.136604Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:52.138618Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26372, node 4 2025-03-31T16:16:52.194853Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:52.194875Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:52.194883Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:52.194992Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61848 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:16:52.513341Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:56.374030Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488008405887074587:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:56.374077Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dd2/r3tmp/tmp7lm4Gf/pdisk_1.dat 2025-03-31T16:16:56.499911Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:56.533859Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:56.533944Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:56.536162Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27217, node 7 2025-03-31T16:16:56.607044Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:56.607069Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:56.607077Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:56.607255Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18983 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:16:56.850552Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:00.868869Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008423547027000:2072];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dd2/r3tmp/tmp0kLIPp/pdisk_1.dat 2025-03-31T16:17:00.936639Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:17:01.040790Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:01.078566Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:01.078658Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:01.082619Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19388, node 10 2025-03-31T16:17:01.187379Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:01.187403Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:01.187416Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:01.187550Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10891 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:01.446189Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:05.535375Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7488008442552527954:2082];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:05.535446Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dd2/r3tmp/tmpD41mh4/pdisk_1.dat 2025-03-31T16:17:05.682934Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:05.721374Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:05.721483Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:05.726146Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9383, node 13 2025-03-31T16:17:05.824040Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:05.824071Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:05.824081Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:05.824242Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4154 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:06.146656Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts_AllowOnlyDefaultGroup >> TLocksTest::BrokenSameShardLock [GOOD] >> TGRpcNewClient::YqlExplainDataQuery [GOOD] >> TGRpcNewCoordinationClient::CheckUnauthorized >> TLocksTest::CK_Range_GoodLock [GOOD] >> YdbYqlClient::TestTzTypesFullStack ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcClientLowTest::ChangeAcl [GOOD] Test command err: 2025-03-31T16:16:45.481570Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008358711058931:2211];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:45.481890Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dd8/r3tmp/tmpOrAGzo/pdisk_1.dat 2025-03-31T16:16:45.893045Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13098, node 1 2025-03-31T16:16:45.908853Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:16:45.908884Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:16:45.949226Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:45.949256Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:45.949263Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:45.949409Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:16:45.949808Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:45.949960Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:45.961515Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9066 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:16:46.246747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:9066 TClient is connected to server localhost:9066 2025-03-31T16:16:46.651217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:16:48.211978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008371595961759:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:48.212055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008371595961749:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:48.212247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:48.216253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-03-31T16:16:48.232743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008371595961764:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-03-31T16:16:48.311711Z node 1 :TX_PROXY ERROR: Actor# [1:7488008371595961837:2719] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } TClient is connected to server localhost:9066 TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743437806313 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\016\010\001\020\200\204\002\032\004user \003" EffectiveACL: "\n\016\010\001\020\200\204\002\032\004user \003" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: true } Children { Name: ".metadata" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1743437808273 ParentPathId: 1 PathState: EPathStateCreate Owner: "met... (TRUNCATED) 2025-03-31T16:16:50.584579Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008378276731687:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:50.584703Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dd8/r3tmp/tmpMq9d68/pdisk_1.dat 2025-03-31T16:16:50.701718Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:50.731432Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:50.731558Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:50.734976Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27479, node 4 2025-03-31T16:16:50.802969Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:50.802996Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:50.803005Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:50.803450Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8257 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:16:51.079374Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:8257 TClient is connected to server localhost:8257 2025-03-31T16:16:51.505729Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:16:53.896955Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008391161634647:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:53.897093Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008391161634636:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:53.897228Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:53.902221Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-03-31T16:16:53.923468Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488008391161634650:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-03-31T16:16:53.984548Z node 4 :TX_PROXY ERROR: Actor# [4:7488008391161634725:2701] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } TClient is connected to server localhost:8257 TClient::Ls request: Root 2025-03-31T16:16:54.358101Z node 4 :TX_PROXY ERROR: Access denied for user with access DescribeSchema to path Root TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 12 ErrorReason: "Access denied" 2025-03-31T16:16:55.801797Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488008399402016123:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:55.801870Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dd8/r3tmp/tmpjqxCcb/pdisk_1.dat 2025-03-31T16:16:55.924146Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:55.958345Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:55.958448Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:55.962693Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15222, node 7 2025-03-31T16:16:56.025695Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:56.025722Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:56.025728Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:56.025828Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27156 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:16:56.262304Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:56.329445Z node 7 :TICKET_PARSER ERROR: Ticket some****oken (BB86510A): Could not find correct token validator 2025-03-31T16:16:56.329593Z node 7 :GRPC_SERVER ERROR: Received TEvRefreshTokenResponse, Authenticated = 0 2025-03-31T16:17:00.405217Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008420334773939:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:00.405543Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dd8/r3tmp/tmpu34iKM/pdisk_1.dat 2025-03-31T16:17:00.644685Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:00.694609Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:00.694724Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:00.699520Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18031, node 10 2025-03-31T16:17:00.866637Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:00.866670Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:00.866680Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:00.866862Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6052 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:01.231745Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:05.760050Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7488008443244842273:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:05.760227Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dd8/r3tmp/tmpqfyeHW/pdisk_1.dat 2025-03-31T16:17:06.014215Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20101, node 13 2025-03-31T16:17:06.114791Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:06.114889Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:06.169096Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:17:06.190169Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:06.190204Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:06.190213Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:06.190424Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3282 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:06.511692Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:3282 2025-03-31T16:17:06.910309Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710660:0, at schemeshard: 72057594046644480 >> YdbYqlClient::TestReadTableSnapshot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::GoodSameShardLock [GOOD] Test command err: 2025-03-31T16:16:31.025702Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008295845260147:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:31.025763Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cb2/r3tmp/tmp7GwXcO/pdisk_1.dat 2025-03-31T16:16:31.271628Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:31.271729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:31.274618Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:16:31.306526Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:65376 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:31.537822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:31.574683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:31.705682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:31.752954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:34.195943Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008309189563221:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:34.195985Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cb2/r3tmp/tmpQFvYJ3/pdisk_1.dat 2025-03-31T16:16:34.319969Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:34.342698Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:34.342829Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:34.343942Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17305 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:34.527673Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:34.532781Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:34.547249Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:34.620221Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:34.670745Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:37.725166Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488008324990359229:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:37.725226Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cb2/r3tmp/tmpSjqIc0/pdisk_1.dat 2025-03-31T16:16:37.837386Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:37.872338Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:37.872426Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:37.875641Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18400 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:38.044912Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:38.065662Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:38.135846Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:38.205019Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:41.217827Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008337920729630:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:41.217912Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cb2/r3tmp/tmpDv12Ta/pdisk_1.dat 2025-03-31T16:16:41.350497Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:41.362509Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:41.362594Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:41.363801Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8732 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:41.615543Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:41.636654Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation ... N: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:52.701242Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:52.703322Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15542 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:52.975213Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:52.979575Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:52.999921Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:53.061505Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:53.120279Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:56.833473Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7488008406261940541:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:56.833545Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cb2/r3tmp/tmpN8moQA/pdisk_1.dat 2025-03-31T16:16:56.940300Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:56.974094Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:56.974200Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:56.975907Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9871 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:57.242315Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:57.270371Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:57.342082Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:57.406672Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:01.398809Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7488008424341592562:2105];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:01.398971Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cb2/r3tmp/tmpY2943E/pdisk_1.dat 2025-03-31T16:17:01.615004Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:01.617603Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:01.617702Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:01.621333Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8363 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-03-31T16:17:01.883821Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:01.906087Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:01.989029Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:02.122180Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:06.245708Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008447310203713:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:06.245776Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cb2/r3tmp/tmpPytXFx/pdisk_1.dat 2025-03-31T16:17:06.375696Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:06.406492Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:06.406592Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:06.408480Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10291 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:17:06.665083Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:17:06.684889Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:06.767351Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:06.833109Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> YdbYqlClient::TestDoubleKey [GOOD] >> YdbYqlClient::TestMultipleModifications >> TGRpcYdbTest::DropTableBadRequest >> TAuthenticationWithSqlExecution::CreateAlterUserWithHash [GOOD] >> TDatabaseQuotas::DisableWritesToDatabase ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::BrokenSameShardLock [GOOD] Test command err: 2025-03-31T16:16:32.822286Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008299896617117:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:32.822394Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cad/r3tmp/tmpCV5Lap/pdisk_1.dat 2025-03-31T16:16:33.104129Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:33.183680Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:33.183808Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:33.185667Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62232 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:33.413334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:33.440552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:33.532366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:33.574670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:35.962992Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008313920363635:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:35.963056Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cad/r3tmp/tmpv36RhY/pdisk_1.dat 2025-03-31T16:16:36.061489Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:36.098299Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:36.098412Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:36.100053Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11218 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:36.267164Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:36.294406Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:36.353427Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:36.409600Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:39.287259Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488008330160086455:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:39.287330Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cad/r3tmp/tmpt0ubOr/pdisk_1.dat 2025-03-31T16:16:39.401960Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:39.434273Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:39.434358Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:39.435999Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7529 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:39.609637Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:39.613710Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:39.627710Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:39.703032Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:39.774749Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:42.802263Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008344566408313:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:42.802339Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cad/r3tmp/tmpWVpgDf/pdisk_1.dat 2025-03-31T16:16:42.922397Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:42.951528Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:42.951638Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:42.953312Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19787 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:43.168877Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:43.190809Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation ... : HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:54.015844Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:54.017536Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11367 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:54.232310Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:54.250553Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:54.316181Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:54.390721Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:57.768135Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7488008409510269628:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:57.768230Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cad/r3tmp/tmpvQQq9r/pdisk_1.dat 2025-03-31T16:16:57.892156Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:57.928207Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:57.928331Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:57.930095Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11116 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:58.189076Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:58.218965Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting...2025-03-31T16:16:58.295478Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:58.392928Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:17:02.443465Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7488008432069234016:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:02.443684Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cad/r3tmp/tmpoZUny4/pdisk_1.dat 2025-03-31T16:17:02.598470Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:02.615026Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:02.615400Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:02.617182Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21455 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:17:02.926809Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:02.941423Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:02.964818Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:03.034881Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:03.114606Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:06.798970Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008448063655923:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:06.799050Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cad/r3tmp/tmpKLGBmP/pdisk_1.dat 2025-03-31T16:17:06.904102Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:06.943161Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:06.943275Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:06.945117Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15599 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:17:07.269497Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:17:07.292984Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:07.377490Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:07.442622Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestReadTableSnapshot [GOOD] Test command err: 2025-03-31T16:16:52.085480Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008386069130760:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:52.086208Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dcd/r3tmp/tmpkvDVoj/pdisk_1.dat 2025-03-31T16:16:52.484699Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:52.534067Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:52.534150Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 25642, node 1 2025-03-31T16:16:52.540870Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:16:52.622208Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:52.622236Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:52.622247Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:52.622529Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4637 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:16:52.920129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:54.736633Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008394659066371:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:54.736778Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:54.981122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:16:55.133753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008398954033839:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:55.133845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:55.133927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008398954033844:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:55.137789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:16:55.155172Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008398954033846:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:16:55.216152Z node 1 :TX_PROXY ERROR: Actor# [1:7488008398954033917:2805] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:16:55.337985Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqpfvqawc1g3stak2279c9nd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjkwODVjMmEtMzgxMzlkMmYtZGNkMTgwMS0xNzc4MjNjYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:16:55.518013Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jqpfvqhx5ngynhrjmz4shptd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjkwODVjMmEtMzgxMzlkMmYtZGNkMTgwMS0xNzc4MjNjYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:16:55.534404Z node 1 :TX_PROXY ERROR: [ReadTable [1:7488008398954033982:2367] TxId# 281474976710663] RESPONSE Status# ResolveError shard: 0 table: Root/Test 2025-03-31T16:16:55.542323Z node 1 :TX_PROXY ERROR: [ReadTable [1:7488008398954033985:2368] TxId# 281474976710664] RESPONSE Status# ResolveError shard: 0 table: Root/Test 2025-03-31T16:16:57.189071Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008409707914726:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:57.189230Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dcd/r3tmp/tmp8PPVOO/pdisk_1.dat 2025-03-31T16:16:57.344056Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:57.393721Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:57.393832Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:57.397245Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30852, node 4 2025-03-31T16:16:57.493187Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:57.493239Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:57.493248Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:57.493385Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29747 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:16:57.756474Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:00.558023Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008422592817657:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:00.558121Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:00.589130Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:00.717558Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008422592817826:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:00.717683Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:00.717714Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008422592817831:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:00.721278Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:17:00.742013Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488008422592817833:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:17:00.836846Z node 4 :TX_PROXY ERROR: Actor# [4:7488008422592817910:2810] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:00.968871Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpfvwsc7v4x7zm569s1sbx2, Database: , DatabaseId: ... rvice] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:05.878850Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:05.984937Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488008444979252710:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:05.985024Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488008444979252715:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:05.985033Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:05.988552Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:17:06.011604Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7488008444979252717:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:17:06.119553Z node 7 :TX_PROXY ERROR: Actor# [7:7488008449274220094:2804] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:06.182909Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpfw1y08mk0098av0m4edzx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZDk0OGZjMzctNGE3YWNjYTgtYTZhZTJhZWYtNGJlMDFmODY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:06.316267Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpfw24q9pk1710tq1kz8ph2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZDk0OGZjMzctNGE3YWNjYTgtYTZhZTJhZWYtNGJlMDFmODY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:07.846458Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008453060383231:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:07.846541Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dcd/r3tmp/tmpLQvAKH/pdisk_1.dat 2025-03-31T16:17:07.997729Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:08.025071Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:08.025165Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:08.026759Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18677, node 10 2025-03-31T16:17:08.109219Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:08.109254Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:08.109265Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:08.109443Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14396 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:08.354363Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:08.405000Z node 10 :GRPC_SERVER INFO: Got grpc request# ListEndpointsRequest, traceId# 01jqpfw49m3sc3pef9nbw590kg, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:54380, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# 9.998120s 2025-03-31T16:17:08.413367Z node 10 :GRPC_SERVER DEBUG: Got grpc request# CreateSessionRequest, traceId# 01jqpfw49x7gajrvh6pgp8ayy8, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:54392, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-03-31T16:17:10.984816Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ExecuteSchemeQueryRequest, traceId# 01jqpfw6t8cfc8yh655w2n4qam, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:54406, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-03-31T16:17:10.986412Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488008465945286140:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:10.986492Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:11.004518Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:11.007519Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-31T16:17:11.007529Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-31T16:17:11.007631Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-31T16:17:11.007631Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-31T16:17:11.082482Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-31T16:17:11.082634Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-31T16:17:11.082663Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-31T16:17:11.082729Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-31T16:17:11.102811Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jqpfw6xyeajcva56wbqptmzb, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:54418, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-03-31T16:17:11.114424Z node 10 :READ_TABLE_API DEBUG: [10:7488008470240253611:2346] Adding quota request to queue ShardId: 0, TxId: 281474976710659 2025-03-31T16:17:11.114479Z node 10 :READ_TABLE_API DEBUG: [10:7488008470240253611:2346] Assign stream quota to Shard 0, Quota 5, TxId 281474976710659 Reserved: 5 of 25, Queued: 0 2025-03-31T16:17:11.117179Z node 10 :READ_TABLE_API DEBUG: [10:7488008470240253611:2346] got stream part, size: 35, RU required: 128 rate limiter absent 2025-03-31T16:17:11.117619Z node 10 :READ_TABLE_API DEBUG: [10:7488008470240253611:2346] Starting inactivity timer for 600.000000s with tag 3 2025-03-31T16:17:11.117851Z node 10 :READ_TABLE_API NOTICE: [10:7488008470240253611:2346] Finish grpc stream, status: 400000 2025-03-31T16:17:11.128744Z node 10 :GRPC_SERVER DEBUG: [0x51a0000d6e80] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-03-31T16:17:11.128887Z node 10 :GRPC_SERVER DEBUG: [0x51a0000d6280] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-03-31T16:17:11.129238Z node 10 :GRPC_SERVER DEBUG: [0x51a0000f7e80] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-03-31T16:17:11.129416Z node 10 :GRPC_SERVER DEBUG: [0x51a0000d5c80] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-03-31T16:17:11.129606Z node 10 :GRPC_SERVER DEBUG: [0x51a000103880] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-03-31T16:17:11.130605Z node 10 :GRPC_SERVER DEBUG: [0x51a000103280] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-03-31T16:17:11.130876Z node 10 :GRPC_SERVER DEBUG: [0x51a0000d5680] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-03-31T16:17:11.131063Z node 10 :GRPC_SERVER DEBUG: [0x51a0000d5080] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-03-31T16:17:11.131264Z node 10 :GRPC_SERVER DEBUG: [0x51a0000d4a80] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-03-31T16:17:11.131611Z node 10 :GRPC_SERVER DEBUG: [0x51a00000de80] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-03-31T16:17:11.131835Z node 10 :GRPC_SERVER DEBUG: [0x51a0000d6880] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-03-31T16:17:11.132040Z node 10 :GRPC_SERVER DEBUG: [0x51a0000d3e80] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-03-31T16:17:11.132243Z node 10 :GRPC_SERVER DEBUG: [0x51a0000d3880] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-03-31T16:17:11.132444Z node 10 :GRPC_SERVER DEBUG: [0x51a0000d3280] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-03-31T16:17:11.132632Z node 10 :GRPC_SERVER DEBUG: [0x51a0000d2c80] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-03-31T16:17:11.132820Z node 10 :GRPC_SERVER DEBUG: [0x51a0000d4480] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-03-31T16:17:11.133005Z node 10 :GRPC_SERVER DEBUG: [0x51a00004e080] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 >> Viewer::StorageGroupOutputWithSpaceCheckDependsOnUsage [GOOD] >> Viewer::SimpleFeatureFlags ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::CK_Range_GoodLock [GOOD] Test command err: 2025-03-31T16:16:33.978293Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008307617970251:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:33.978396Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ca0/r3tmp/tmpogjCj4/pdisk_1.dat 2025-03-31T16:16:34.249939Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:34.332826Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:34.332966Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:34.334761Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23815 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:34.526361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:34.551875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:34.649132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:34.720658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:36.924047Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008320056568639:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:36.924111Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ca0/r3tmp/tmpjKk539/pdisk_1.dat 2025-03-31T16:16:36.998185Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:37.054239Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:37.054352Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:37.055721Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15399 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:37.152887Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:37.170284Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:37.240624Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:37.281851Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:40.128814Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488008337411874555:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:40.128884Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ca0/r3tmp/tmpiPIhrA/pdisk_1.dat 2025-03-31T16:16:40.240373Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:40.274020Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:40.274117Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:40.279561Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7043 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:40.459566Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:40.482306Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:40.541069Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:40.614227Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:43.857085Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008347525464942:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:43.857152Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ca0/r3tmp/tmpQLaKev/pdisk_1.dat 2025-03-31T16:16:43.970487Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:44.007706Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:44.007797Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:44.009129Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8716 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-03-31T16:16:44.188643Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:44.206781Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting ... (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:54.756518Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61996 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:54.976622Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:55.001407Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:55.077679Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:16:55.148419Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:58.833679Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7488008412930192477:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:58.833754Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ca0/r3tmp/tmpKEJY8X/pdisk_1.dat 2025-03-31T16:16:58.965937Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:58.998749Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:58.998864Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:59.000369Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17377 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:59.264782Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:59.285649Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:59.353734Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:59.429565Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:03.038007Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7488008434207264581:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:03.038098Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ca0/r3tmp/tmpXSYcKj/pdisk_1.dat 2025-03-31T16:17:03.198186Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:03.211463Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:03.211559Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:03.213401Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19091 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:17:03.532778Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:03.542869Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:03.556349Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-03-31T16:17:03.563544Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:03.646113Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:03.706226Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:07.546684Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008453173089273:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:07.556467Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ca0/r3tmp/tmpbFQcyY/pdisk_1.dat 2025-03-31T16:17:07.693677Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:07.727686Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:07.727804Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:07.729658Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21169 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:17:08.013057Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:17:08.042632Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:08.116957Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:08.174725Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... >> TGRpcNewCoordinationClient::CreateDropDescribe >> TGRpcLdapAuthentication::DisableBuiltinAuthMechanism [GOOD] >> YdbYqlClient::TestDecimal1 [GOOD] >> YdbYqlClient::TestDecimal35 >> YdbYqlClient::TestYqlIssues >> TGRpcYdbTest::CreateTableBadRequest [GOOD] >> TGRpcYdbTest::CreateTableBadRequest2 >> Secret::Validation [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcLdapAuthentication::DisableBuiltinAuthMechanism [GOOD] Test command err: 2025-03-31T16:16:52.656734Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008386369887903:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:52.656784Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dcc/r3tmp/tmpQDRpYA/pdisk_1.dat 2025-03-31T16:16:53.075253Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12004, node 1 2025-03-31T16:16:53.138563Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:53.138635Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:53.141895Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:16:53.161390Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:53.161420Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:53.161429Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:53.161553Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3367 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:16:53.483486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:56.548671Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008403735175199:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:56.548736Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dcc/r3tmp/tmpm1GD67/pdisk_1.dat 2025-03-31T16:16:56.671278Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:56.703138Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:56.703219Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:56.710775Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9049, node 4 2025-03-31T16:16:56.803595Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:56.803622Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:56.803629Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:56.803756Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6015 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:16:57.037545Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:00.555860Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488008420197056323:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:00.555912Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dcc/r3tmp/tmp9gHx6j/pdisk_1.dat 2025-03-31T16:17:00.774697Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14301, node 7 2025-03-31T16:17:00.875168Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:00.875264Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:00.887024Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:17:00.891257Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:00.891278Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:00.891285Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:00.891416Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15739 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:01.144294Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:05.093010Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008444620599659:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:05.093062Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dcc/r3tmp/tmpAbw0IN/pdisk_1.dat 2025-03-31T16:17:05.226610Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:05.262624Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:05.262712Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:05.270486Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19873, node 10 2025-03-31T16:17:05.334879Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:05.334905Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:05.334913Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:05.335063Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18682 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:05.588852Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:09.874816Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7488008461538515863:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:09.874917Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dcc/r3tmp/tmpbc5ipN/pdisk_1.dat 2025-03-31T16:17:10.000737Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:10.033548Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:10.033624Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:10.036204Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3699, node 13 2025-03-31T16:17:10.103907Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:10.103930Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:10.103938Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:10.104049Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11860 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:10.405285Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... >> YdbYqlClient::TestReadTableMultiShardWithDescribe [GOOD] >> YdbYqlClient::TestReadTableMultiShardWithDescribeAndRowLimit >> YdbYqlClient::SecurityTokenAuthMultiTenantSDK >> YdbYqlClient::ConnectDbAclIsStrictlyChecked >> YdbYqlClient::Utf8DatabasePassViaHeader [GOOD] >> YdbYqlClient::TestYqlTypesFromPreparedQuery >> TTableProfileTests::OverwriteCachingPolicy [GOOD] >> TTableProfileTests::ExplicitPartitionsSimple >> YdbYqlClient::ColumnFamiliesExternalBlobsWithoutDefaultProfile [GOOD] >> TTableProfileTests::DescribeTableWithPartitioningPolicy [GOOD] >> TTableProfileTests::ExplicitPartitionsComplex >> TTableProfileTests::UseTableProfilePreset ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Validation [GOOD] Test command err: 2025-03-31T16:14:23.997099Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:14:23.997346Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:14:23.997483Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000e36/r3tmp/tmpv8uMuW/pdisk_1.dat TServer::EnableGrpc on GrpcPort 19210, node 1 TClient is connected to server localhost:64159 2025-03-31T16:14:24.670537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:14:24.714876Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:14:24.715076Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:14:24.715146Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:14:24.715187Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:14:24.715667Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:14:24.752939Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:14:24.753088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:14:24.764605Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-03-31T16:14:36.550019Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:751:2629], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:36.550232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;RESULT=
:1:20: Error: mismatched input '-' expecting '(' ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 2025-03-31T16:14:46.888281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:781:2648], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:46.888403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:46.894239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480 2025-03-31T16:14:47.066512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:891:2725], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:47.066605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:47.066905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:896:2730], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:14:47.071436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-03-31T16:14:47.202615Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:898:2732], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:14:47.496417Z node 1 :TX_PROXY ERROR: Actor# [1:994:2799] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:14:47.990489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:14:48.466876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480 2025-03-31T16:14:49.260541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-03-31T16:14:49.984843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:14:50.414342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-03-31T16:14:51.481050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-03-31T16:14:51.801450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=
: Error: Execution, code: 1060
:1:48: Error: Executing ALTER OBJECT SECRET
: Error: preparation problem: secret secret1 not found for alter ;EXPECTATION=0 2025-03-31T16:14:54.174157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:14:54.174243Z node 1 :IMPORT WARN: Table profiles were not loaded FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-03-31T16:15:29.828074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715712:0, at schemeshard: 72057594046644480 2025-03-31T16:15:30.746207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715717:0, at schemeshard: 72057594046644480 2025-03-31T16:15:32.355184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715726:0, at schemeshard: 72057594046644480 2025-03-31T16:15:32.823042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715729:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);RESULT=
: Error: Execution, code: 1060
:1:42: Error: Executing CREATE OBJECT SECRET_ACCESS
: Error: preparation problem: used in access secret secret2 not found ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-03-31T16:15:58.231371Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=16; 2025-03-31T16:15:58.231685Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 16 at tablet 72075186224037892 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-31T16:15:58.231879Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 16 at tablet 72075186224037892 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-31T16:15:58.232387Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:3722:4774], Table: `//Root/.metadata/secrets/access` ([72057594046644480:13:1]), SessionActorId: [1:3629:4774]Got CONSTRAINT VIOLATION for table `//Root/.metadata/secrets/access`. ShardID=72075186224037892, Sink=[1:3722:4774].{
: Error: Duplicate keys have been found., code: 2012 } 2025-03-31T16:15:58.233024Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:3715:4774], SessionActorId: [1:3629:4774], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `//Root/.metadata/secrets/access`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:3629:4774]. isRollback=0 2025-03-31T16:15:58.233553Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGI4OGZmMzUtY2EzNDVmOS00YTNhNTgwNS1jNjdlZjkzYw==, ActorId: [1:3629:4774], ActorState: ExecuteState, TraceId: 01jqpfsznm9tc73jd83q9ngmxf, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:3716:4774] from: [1:3715:4774] 2025-03-31T16:15:58.233795Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:3716:4774] TxId: 281474976715756. Ctx: { TraceId: 01jqpfsznm9tc73jd83q9ngmxf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGI4OGZmMzUtY2EzNDVmOS00YTNhNTgwNS1jNjdlZjkzYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `//Root/.metadata/secrets/access`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-31T16:15:58.235175Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGI4OGZmMzUtY2EzNDVmOS00YTNhNTgwNS1jNjdlZjkzYw==, ActorId: [1:3629:4774], ActorState: ExecuteState, TraceId: 01jqpfsznm9tc73jd83q9ngmxf, Create QueryResponse for error on request, msg: 2025-03-31T16:15:58.246128Z node 1 :METADATA_PROVIDER ERROR: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: PRECONDITION_FAILED issues { message: "Constraint violated. Table: `//Root/.metadata/secrets/access`." issue_code: 2012 severity: 1 issues { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { id: "01jqpfszfj62xf74y5zjed0szn" } } } } ;request=session_id: "ydb://session/3?node_id=1&id=OGI4OGZmMzUtY2EzNDVmOS00YTNhNTgwNS1jNjdlZjkzYw==" tx_control { tx_id: "01jqpfszfj62xf74y5zjed0szn" } query { yql_text: "DECLARE $objects AS List>;\nINSERT INTO `//Root/.metadata/secrets/access`\nSELECT ownerUserId,secretId,accessSID FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "ownerUserId" type { type_id: UTF8 } } members { name: "secretId" type { type_id: UTF8 } } members { name: "accessSID" type { type_id: UTF8 } } } } } } value { items { items { text_value: "root@builtin" } items { text_value: "secret1" } items { text_value: "test@test1" } } } } } ; REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=
: Error: Execution, code: 1060
:1:29: Error: Executing DROP OBJECT SECRET
: Error: preparation problem: secret secret1 using in access for test@test1 ;EXPECTATION=0 FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 2025-03-31T16:16:21.392795Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:4243:5230], for# root@builtin, access# DescribeSchema 2025-03-31T16:16:21.392887Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:4243:5230], for# root@builtin, access# DescribeSchema 2025-03-31T16:16:21.394884Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:4240:5227], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:16:21.396257Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTBhZGQ1NmYtZjc3NzlmM2QtNzA1M2U3MDItYTgxNzg4NTc=, ActorId: [1:4236:5224], ActorState: ExecuteState, TraceId: 01jqpftpc30tgwna9dmqep8apg, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;EXPECTATION=0 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-03-31T16:16:32.879703Z node 1 :TICKET_PARSER ERROR: Ticket **** (51449FAE): Could not find correct token validator REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: cannot CREATE objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing UPSERT OBJECT SECRET
: Error: cannot UPSERT objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 2025-03-31T16:17:12.351830Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715836. Ctx: { TraceId: 01jqpfw7qj774ss8n8rmds5bpm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTZmZWQ5OGUtZTdiMTEwZjgtYmZiNTViLTM0NDY3OGZk, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 >> TGRpcNewCoordinationClient::CheckUnauthorized [GOOD] >> TGRpcNewCoordinationClient::CreateAlter >> YdbYqlClient::TestTzTypesFullStack [GOOD] >> YdbYqlClient::TestVariant |87.7%| [TA] $(B)/ydb/services/metadata/secret/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.7%| [TA] {RESULT} $(B)/ydb/services/metadata/secret/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TGRpcYdbTest::DropTableBadRequest [GOOD] >> TGRpcYdbTest::CreateYqlSession ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::ColumnFamiliesExternalBlobsWithoutDefaultProfile [GOOD] Test command err: 2025-03-31T16:16:48.523224Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008368599414818:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:48.523304Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dd1/r3tmp/tmpSmlLBe/pdisk_1.dat 2025-03-31T16:16:48.916742Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:48.927102Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:48.927259Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:48.939177Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20015, node 1 2025-03-31T16:16:49.070780Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:49.070804Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:49.070813Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:49.070963Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6175 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:16:49.380749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:53.219049Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008391601825792:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:53.219246Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dd1/r3tmp/tmptITvbE/pdisk_1.dat 2025-03-31T16:16:53.373780Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:53.413915Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:53.414004Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:53.417459Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9383, node 4 2025-03-31T16:16:53.494351Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:53.494377Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:53.494384Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:53.494565Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12875 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:16:53.722943Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:57.758151Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488008408210452754:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:57.758246Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dd1/r3tmp/tmpt1ZNYp/pdisk_1.dat 2025-03-31T16:16:57.897542Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:57.933725Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:57.933820Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:57.938170Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6288, node 7 2025-03-31T16:16:57.996452Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:57.996476Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:57.996485Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:57.996621Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10569 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:16:58.257417Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:10569 2025-03-31T16:16:58.532742Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:16:58.533247Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:16:58.533275Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:16:58.536332Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/ydb_ut_tenant waiting... 2025-03-31T16:16:58.545671Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743437818591, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T16:16:58.548075Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-03-31T16:16:58.548147Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 1 2025-03-31T16:16:58.549566Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2025-03-31T16:16:58.553850Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:16:58.554606Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:16:58.554639Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:16:58.556930Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: /Root/ydb_ut_tenant waiting... 2025-03-31T16:16:59.061528Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7488008418512753422:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:59.061622Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:16:59.075508Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:59.075610Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:59.083842Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 9 Cookie 9 2025-03-31T16:16:59.084795Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:16:59.548230Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743437819585, ... 8Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 1 2025-03-31T16:17:05.664306Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 2025-03-31T16:17:07.926922Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/ydb_ut_tenant/Table-1, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:17:07.928518Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:17:07.928554Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:17:07.928680Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTableIndex Propose, path: /Root/ydb_ut_tenant/Table-1/MyIndex, operationId: 281474976715660:1, transaction: WorkingDir: "/Root/ydb_ut_tenant/Table-1" OperationType: ESchemeOpCreateTableIndex CreateTableIndex { Name: "MyIndex" KeyColumnNames: "Value" Type: EIndexTypeGlobal IndexImplTableDescriptions { } } Internal: false FailOnExist: false AllowCreateInTempDir: false, at schemeshard: 72057594046644480 2025-03-31T16:17:07.928842Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:17:07.928895Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/ydb_ut_tenant/Table-1/MyIndex/indexImplTable, opId: 281474976715660:2, at schemeshard: 72057594046644480 2025-03-31T16:17:07.929774Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:17:07.932334Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root/ydb_ut_tenant, subject: , status: StatusAccepted, operation: CREATE TABLE WITH INDEXES, path: /Root/ydb_ut_tenant/Table-1 2025-03-31T16:17:08.012349Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743437828050, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T16:17:08.030396Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:0 2025-03-31T16:17:08.030540Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:1 2025-03-31T16:17:08.030558Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:2 2025-03-31T16:17:08.050964Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 12 2025-03-31T16:17:08.051663Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-31T16:17:08.853082Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:17:08.853372Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [12:7488008456197068097:2326], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:17:08.932055Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [12:7488008456197068097:2326], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:17:09.894626Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7488008458699055905:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:09.894751Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dd1/r3tmp/tmp6ypRaH/pdisk_1.dat 2025-03-31T16:17:10.021219Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:10.065811Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:10.065927Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:10.069419Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62151, node 13 2025-03-31T16:17:10.150877Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:10.150908Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:10.150918Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:10.151077Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27685 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:10.500409Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:27685 2025-03-31T16:17:10.839219Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:10.839664Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:17:10.839700Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:10.842512Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/ydb_ut_tenant waiting... 2025-03-31T16:17:10.855884Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743437830904, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T16:17:10.858228Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-03-31T16:17:10.858286Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 1 2025-03-31T16:17:10.859356Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2025-03-31T16:17:10.863236Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:17:10.863915Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:17:10.863954Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:17:10.865818Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: /Root/ydb_ut_tenant waiting... 2025-03-31T16:17:11.368488Z node 15 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[15:7488008466915522899:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:11.368570Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:17:11.378127Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:11.378230Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:11.383943Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2025-03-31T16:17:11.407948Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:17:11.741201Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743437831786, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T16:17:11.743853Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2025-03-31T16:17:11.744061Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 1 2025-03-31T16:17:11.745540Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 2025-03-31T16:17:14.000024Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/ydb_ut_tenant/Table-1, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:17:14.001208Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:17:14.001239Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:17:14.004259Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root/ydb_ut_tenant, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/ydb_ut_tenant/Table-1 2025-03-31T16:17:14.096025Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743437834140, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T16:17:14.107527Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:0 2025-03-31T16:17:14.138655Z node 13 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 15 2025-03-31T16:17:14.139317Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connected -> Disconnected >> ClientStatsCollector::PrepareQuery >> TGRpcNewCoordinationClient::CreateDropDescribe [GOOD] >> TGRpcNewCoordinationClient::NodeNotFound >> TGRpcClientLowTest::GrpcRequestProxy >> YdbYqlClient::TestMultipleModifications [GOOD] >> YdbYqlClient::TestDescribeTableWithShardStats >> TLocksTest::BrokenNullLock [GOOD] >> TLocksTest::Range_GoodLock1 [GOOD] >> TGRpcYdbTest::CreateTableBadRequest2 [GOOD] >> TGRpcYdbTest::CreateTableBadRequest3 >> TYqlDateTimeTests::SimpleUpsertSelect >> YdbYqlClient::TestDecimal35 [GOOD] >> YdbYqlClient::TestDecimalFullStack >> YdbYqlClient::TestYqlIssues [GOOD] >> YdbYqlClient::TestYqlSessionClosed >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientProvidesEmptyClientCerts [GOOD] >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientWithCorrectCerts >> YdbMonitoring::SelfCheckWithNodesDying [GOOD] >> YdbOlapStore::BulkUpsert >> YdbYqlClient::ConnectDbAclIsStrictlyChecked [GOOD] >> YdbYqlClient::CopyTables >> YdbYqlClient::SecurityTokenAuthMultiTenantSDK [GOOD] >> YdbYqlClient::SecurityTokenAuthMultiTenantSDKAsync ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::BrokenNullLock [GOOD] Test command err: 2025-03-31T16:16:38.805988Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008327099308348:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:38.806036Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001c89/r3tmp/tmpNo5SSb/pdisk_1.dat 2025-03-31T16:16:39.121862Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:39.168988Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:39.169123Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:39.170786Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17574 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:39.324068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:39.355448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:39.470021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:39.544888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:41.600254Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008341491775359:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:41.600327Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001c89/r3tmp/tmpHozd05/pdisk_1.dat 2025-03-31T16:16:41.688837Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:41.731909Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:41.732727Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:41.734313Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31669 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:41.884164Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:41.902296Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:41.977886Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:42.019496Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:45.107849Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488008359011618089:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:45.107909Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001c89/r3tmp/tmpXBEBYB/pdisk_1.dat 2025-03-31T16:16:45.250411Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:45.272549Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:45.272633Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:45.274093Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26531 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:45.446141Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:45.452625Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:45.465053Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:45.543453Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:45.590721Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:48.649964Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008370670837028:2204];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:48.651447Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001c89/r3tmp/tmp3W8BkE/pdisk_1.dat 2025-03-31T16:16:48.756184Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:48.785696Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:48.785796Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:48.787946Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24488 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:49.027929Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:49.050214Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation ... HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:01.033402Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:01.035260Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11285 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-03-31T16:17:01.305442Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:01.331340Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:01.340384Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:17:01.417419Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:01.478619Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:04.905016Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7488008438776526085:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:04.905084Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001c89/r3tmp/tmpyR11xh/pdisk_1.dat 2025-03-31T16:17:05.042676Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:05.077021Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:05.077160Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:05.079298Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18339 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:17:05.347710Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:17:05.368722Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:05.449073Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:05.507328Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:09.409068Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7488008461253302597:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:09.409214Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001c89/r3tmp/tmp0d64fl/pdisk_1.dat 2025-03-31T16:17:09.531232Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:09.560084Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:09.560196Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:09.561916Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28064 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:17:09.781483Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:17:09.797413Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:09.869729Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:09.971415Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:13.893054Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008478515424513:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:13.893189Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001c89/r3tmp/tmpXMQs3S/pdisk_1.dat 2025-03-31T16:17:14.074548Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:14.110077Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:14.110200Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:14.112484Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15643 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:17:14.463401Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:17:14.484414Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:14.554413Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:14.621718Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_GoodLock1 [GOOD] Test command err: 2025-03-31T16:16:40.018995Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008335975249268:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:40.019244Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001c86/r3tmp/tmpww8VPH/pdisk_1.dat 2025-03-31T16:16:40.371590Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:40.424548Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:40.424654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:40.426740Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13323 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:40.635349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:40.665025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:40.789509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:40.831827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:43.071946Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008350505845302:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:43.072043Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001c86/r3tmp/tmpN9yQmg/pdisk_1.dat 2025-03-31T16:16:43.157255Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:43.206329Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:43.206419Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:43.207835Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2398 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:43.375862Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:43.382352Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:43.391882Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-03-31T16:16:43.396074Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:43.470431Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:43.543308Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:46.174718Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488008361028973372:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:46.174817Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001c86/r3tmp/tmpIOeD0Q/pdisk_1.dat 2025-03-31T16:16:46.278330Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:46.314517Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:46.314601Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:46.316186Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15810 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:46.458867Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:46.480537Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:46.551513Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:46.601006Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:49.237004Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008374758458127:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:49.237113Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001c86/r3tmp/tmp7jo53D/pdisk_1.dat 2025-03-31T16:16:49.318511Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:49.371974Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:49.372058Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:49.373437Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21857 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:49.546610Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDo ... , (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:00.690032Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1426 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-03-31T16:17:01.004421Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:17:01.010732Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:17:01.035070Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:17:01.123860Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:01.206964Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:04.751180Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7488008439245088616:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:04.751276Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001c86/r3tmp/tmpX2bnmD/pdisk_1.dat 2025-03-31T16:17:04.881611Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:04.912818Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:04.912932Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:04.914266Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3037 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:17:05.236844Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:17:05.257097Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:05.318201Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:05.393008Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:09.397923Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7488008460680504327:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:09.398104Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001c86/r3tmp/tmpf2Cufk/pdisk_1.dat 2025-03-31T16:17:09.523567Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:09.560533Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:09.560648Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:09.562248Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24447 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:17:09.789056Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:17:09.812181Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:09.872054Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:09.934618Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:14.020059Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008481429197719:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:14.020187Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001c86/r3tmp/tmplnRAny/pdisk_1.dat 2025-03-31T16:17:14.296981Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:14.321060Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:14.321193Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:14.322301Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2909 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:17:14.604749Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:14.612112Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:14.628909Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:14.697961Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:14.783172Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... >> YdbYqlClient::TestReadTableMultiShardWithDescribeAndRowLimit [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts_AllowOnlyDefaultGroup [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesEmptyClientCerts >> YdbYqlClient::TestYqlTypesFromPreparedQuery [GOOD] >> TGRpcNewCoordinationClient::CreateAlter [GOOD] >> TGRpcNewCoordinationClient::BasicMethods >> TTableProfileTests::ExplicitPartitionsSimple [GOOD] >> TTableProfileTests::ExplicitPartitionsUnordered >> YdbYqlClient::TestVariant [GOOD] >> YdbYqlClient::TestTransactionQueryError >> Viewer::SimpleFeatureFlags [GOOD] >> TGRpcYdbTest::CreateYqlSession [GOOD] >> TGRpcYdbTest::ExecuteDmlQuery >> TTableProfileTests::UseTableProfilePreset [GOOD] >> TTableProfileTests::UseTableProfilePresetViaSdk >> ClientStatsCollector::PrepareQuery [GOOD] >> ClientStatsCollector::CounterCacheMiss ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestYqlTypesFromPreparedQuery [GOOD] Test command err: 2025-03-31T16:17:01.147161Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008426432484813:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:01.147243Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dbf/r3tmp/tmppnke0Y/pdisk_1.dat 2025-03-31T16:17:01.615998Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:01.616096Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:01.624121Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13542, node 1 2025-03-31T16:17:01.671529Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:17:01.671555Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:17:01.706625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:17:01.735250Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:01.735282Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:01.735288Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:01.735390Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7414 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:02.023308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:04.065646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008439317387726:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:04.065750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:04.304610Z node 1 :TX_PROXY ERROR: Actor# [1:7488008439317387752:2638] txid# 281474976710658, issues: { message: "Column Key has wrong key type Json" severity: 1 } 2025-03-31T16:17:04.329509Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008439317387762:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:04.329607Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:04.344743Z node 1 :TX_PROXY ERROR: Actor# [1:7488008439317387769:2648] txid# 281474976710659, issues: { message: "Column Key has wrong key type Yson" severity: 1 } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dbf/r3tmp/tmpP3PNDd/pdisk_1.dat TServer::EnableGrpc on GrpcPort 30896, node 4 TClient is connected to server localhost:12194 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dbf/r3tmp/tmpl5zRqi/pdisk_1.dat TServer::EnableGrpc on GrpcPort 11131, node 7 TClient is connected to server localhost:13075 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:17:16.146591Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008491761539624:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:16.147849Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dbf/r3tmp/tmp2Yx0Wd/pdisk_1.dat 2025-03-31T16:17:16.339295Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:16.385800Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:16.385898Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:16.388517Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4663, node 10 2025-03-31T16:17:16.497423Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:16.497452Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:16.497469Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:16.497629Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24976 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:16.815624Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:19.760110Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488008504646442554:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:19.760110Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488008504646442561:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:19.760182Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:19.763691Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:17:19.784067Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7488008504646442568:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:17:19.880430Z node 10 :TX_PROXY ERROR: Actor# [10:7488008504646442637:2675] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestReadTableMultiShardWithDescribeAndRowLimit [GOOD] Test command err: 2025-03-31T16:16:59.893769Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008418403141804:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:59.894125Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dc7/r3tmp/tmpFP8mKu/pdisk_1.dat 2025-03-31T16:17:00.229612Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:00.282929Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:00.283032Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 20469, node 1 2025-03-31T16:17:00.296523Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:17:00.338828Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:00.338846Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:00.338855Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:00.338930Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6585 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:00.611571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:00.692397Z node 1 :GRPC_SERVER INFO: Got grpc request# ListEndpointsRequest, traceId# 01jqpfvwrga03x3fbgggnrx4h0, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:57522, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# 9.984723s 2025-03-31T16:17:00.728640Z node 1 :GRPC_SERVER DEBUG: Got grpc request# CreateSessionRequest, traceId# 01jqpfvws5b8qjm3tws363fwk5, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:57532, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-03-31T16:17:02.634897Z node 1 :GRPC_SERVER DEBUG: Got grpc request# CreateTableRequest, traceId# 01jqpfvyna1dtfw29jks28fd9s, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:53752, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-03-31T16:17:02.635735Z node 1 :TX_PROXY DEBUG: actor# [1:7488008418403142029:2140] Handle TEvProposeTransaction 2025-03-31T16:17:02.635772Z node 1 :TX_PROXY DEBUG: actor# [1:7488008418403142029:2140] TxId# 281474976710658 ProcessProposeTransaction 2025-03-31T16:17:02.635862Z node 1 :TX_PROXY DEBUG: actor# [1:7488008418403142029:2140] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7488008431288044712:2628] 2025-03-31T16:17:02.704690Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008431288044712:2628] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Test" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Fk" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" KeyColumnNames: "Fk" UniformPartitionsCount: 16 PartitionConfig { } Temporary: false } CreateIndexedTable { } } } DatabaseName: "" RequestType: "" PeerName: "ipv6:[::1]:53752" 2025-03-31T16:17:02.704743Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008431288044712:2628] txid# 281474976710658 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:17:02.705094Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008431288044712:2628] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-31T16:17:02.705188Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008431288044712:2628] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:17:02.705368Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008431288044712:2628] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:17:02.705502Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008431288044712:2628] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:17:02.705542Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008431288044712:2628] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-31T16:17:02.705659Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008431288044712:2628] txid# 281474976710658 HANDLE EvClientConnected 2025-03-31T16:17:02.707488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:02.712167Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008431288044712:2628] txid# 281474976710658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710658} 2025-03-31T16:17:02.712236Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008431288044712:2628] txid# 281474976710658 SEND to# [1:7488008431288044711:2335] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 53} 2025-03-31T16:17:02.715748Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-31T16:17:02.715871Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-31T16:17:02.715915Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-31T16:17:02.715956Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-31T16:17:02.757298Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7488008431288044778:2692], Recipient [1:7488008431288044964:2353]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:17:02.757759Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7488008431288044785:2699], Recipient [1:7488008431288044936:2341]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:17:02.758432Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7488008431288044781:2695], Recipient [1:7488008431288044941:2342]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:17:02.758641Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7488008431288044775:2689], Recipient [1:7488008431288044947:2347]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:17:02.759205Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7488008431288044780:2694], Recipient [1:7488008431288044953:2348]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:17:02.759323Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7488008431288044784:2698], Recipient [1:7488008431288044935:2340]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:17:02.760007Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7488008431288044786:2700], Recipient [1:7488008431288044934:2339]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:17:02.760125Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7488008431288044787:2701], Recipient [1:7488008431288044944:2344]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:17:02.760754Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7488008431288044777:2691], Recipient [1:7488008431288044960:2350]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:17:02.760961Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7488008431288044783:2697], Recipient [1:7488008431288044959:2349]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:17:02.761585Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7488008431288044789:2703], Recipient [1:7488008431288044933:2338]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:17:02.761609Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7488008431288044779:2693], Recipient [1:7488008431288044945:2345]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:17:02.762875Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7488008431288044782:2696], Recipient [1:7488008431288044943:2343]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:17:02.762960Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7488008431288044776:2690], Recipient [1:7488008431288044962:2352]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:17:02.763501Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7488008431288044781:2695], Recipient [1:7488008431288044941:2342]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:17:02.763643Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7488008431288044790:2704], Recipient [1:7488008431288044946:2346]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:17:02.763959Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037901 actor [1:7488008431288044941:2342] 2025-03-31T16:17:02.764173Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:17:02.764189Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7488008431288044788:2702], Recipient [1:7488008431288044961:2351]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:17:02.764852Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7488008431288044788:2702], Recipient [1:7488008431288044961:2351]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:17:02.765201Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037893 actor [1:7488008431288044961:2351] 2025-03-31T16:17:02.765480Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:17:02.775888Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7488008431288044777:2691], Recipient [1:7488008431288044960:2350]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:17:02.775887Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7488008431288044783:2697], Recipient [1:7488008431288044959:2349]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:17:02.776324Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037899 actor [1:7488008431288044959:2349] 2025-03-31T16:17:02.776327Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet ... ed event# 2146435072, Sender [10:7488008502712442199:2342], Recipient [10:7488008502712442199:2342]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:17:20.053097Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:17:20.053128Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037897 2025-03-31T16:17:20.053142Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:17:20.053160Z node 10 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715681] at 72075186224037897 for WaitForStreamClearance 2025-03-31T16:17:20.053172Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit WaitForStreamClearance 2025-03-31T16:17:20.053192Z node 10 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715681] at 72075186224037897 2025-03-31T16:17:20.053205Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is Executed 2025-03-31T16:17:20.053220Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715681] at 72075186224037897 executing on unit WaitForStreamClearance 2025-03-31T16:17:20.053232Z node 10 :TX_DATASHARD TRACE: Add [0:281474976715681] at 72075186224037897 to execution unit ReadTableScan 2025-03-31T16:17:20.053244Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit ReadTableScan 2025-03-31T16:17:20.053445Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is Continue 2025-03-31T16:17:20.053466Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:17:20.053478Z node 10 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037897 2025-03-31T16:17:20.053491Z node 10 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037897 has no attached operations 2025-03-31T16:17:20.053503Z node 10 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037897 2025-03-31T16:17:20.053536Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037897 2025-03-31T16:17:20.053724Z node 10 :READ_TABLE_API DEBUG: [10:7488008507007410496:2401] Adding quota request to queue ShardId: 0, TxId: 281474976715680 2025-03-31T16:17:20.053733Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [10:7488008507007410514:2140], Recipient [10:7488008502712442199:2342]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-03-31T16:17:20.053753Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-03-31T16:17:20.053770Z node 10 :READ_TABLE_API DEBUG: [10:7488008507007410496:2401] Assign stream quota to Shard 0, Quota 5, TxId 281474976715680 Reserved: 5 of 25, Queued: 0 2025-03-31T16:17:20.054005Z node 10 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037897, TxId: 281474976715681, MessageQuota: 5 2025-03-31T16:17:20.054225Z node 10 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037897, TxId: 281474976715681, Size: 54, Rows: 0, PendingAcks: 1, MessageQuota: 4 2025-03-31T16:17:20.054277Z node 10 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037897, TxId: 281474976715681, PendingAcks: 0 2025-03-31T16:17:20.054301Z node 10 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037897, TxId: 281474976715681, MessageQuota: 4 2025-03-31T16:17:20.054382Z node 10 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037897 2025-03-31T16:17:20.054392Z node 10 :READ_TABLE_API DEBUG: [10:7488008507007410496:2401] got stream part, size: 75, RU required: 128 rate limiter absent 2025-03-31T16:17:20.054397Z node 10 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715681, at: 72075186224037897 2025-03-31T16:17:20.054485Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [10:7488008502712442199:2342], Recipient [10:7488008502712442199:2342]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:17:20.054504Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:17:20.054550Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037897 2025-03-31T16:17:20.054566Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:17:20.054593Z node 10 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715681] at 72075186224037897 for ReadTableScan 2025-03-31T16:17:20.054605Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit ReadTableScan 2025-03-31T16:17:20.054622Z node 10 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715681] at 72075186224037897 error: , IsFatalError: 0 2025-03-31T16:17:20.054643Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is Executed 2025-03-31T16:17:20.054658Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715681] at 72075186224037897 executing on unit ReadTableScan 2025-03-31T16:17:20.054688Z node 10 :TX_DATASHARD TRACE: Add [0:281474976715681] at 72075186224037897 to execution unit FinishPropose 2025-03-31T16:17:20.054701Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit FinishPropose 2025-03-31T16:17:20.054721Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is DelayComplete 2025-03-31T16:17:20.054737Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715681] at 72075186224037897 executing on unit FinishPropose 2025-03-31T16:17:20.054749Z node 10 :TX_DATASHARD TRACE: Add [0:281474976715681] at 72075186224037897 to execution unit CompletedOperations 2025-03-31T16:17:20.054750Z node 10 :READ_TABLE_API DEBUG: [10:7488008507007410496:2401] Starting inactivity timer for 600.000000s with tag 3 2025-03-31T16:17:20.054772Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit CompletedOperations 2025-03-31T16:17:20.054797Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is Executed 2025-03-31T16:17:20.054809Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715681] at 72075186224037897 executing on unit CompletedOperations 2025-03-31T16:17:20.054822Z node 10 :TX_DATASHARD TRACE: Execution plan for [0:281474976715681] at 72075186224037897 has finished 2025-03-31T16:17:20.054835Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:17:20.054845Z node 10 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037897 2025-03-31T16:17:20.054865Z node 10 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037897 has no attached operations 2025-03-31T16:17:20.054876Z node 10 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037897 2025-03-31T16:17:20.054905Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037897 2025-03-31T16:17:20.054919Z node 10 :TX_DATASHARD TRACE: Complete execution for [0:281474976715681] at 72075186224037897 on unit FinishPropose 2025-03-31T16:17:20.054940Z node 10 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715681 at tablet 72075186224037897 send to client, exec latency: 2 ms, propose latency: 2 ms, status: COMPLETE 2025-03-31T16:17:20.054986Z node 10 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037897 2025-03-31T16:17:20.055168Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269549569, Sender [10:7488008507007410497:2401], Recipient [10:7488008502712442199:2342]: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715681 2025-03-31T16:17:20.055186Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvCancelTransactionProposal 2025-03-31T16:17:20.055196Z node 10 :TX_DATASHARD DEBUG: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037897 txId 281474976715681 2025-03-31T16:17:20.055223Z node 10 :TX_DATASHARD DEBUG: Start TTxCancelTransactionProposal at tablet 72075186224037897 txId 281474976715681 2025-03-31T16:17:20.055284Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269287431, Sender [10:7488008507007410497:2401], Recipient [10:7488008502712442199:2342]: NKikimrTx.TEvInterruptTransaction TxId: 281474976715681 2025-03-31T16:17:20.055300Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvInterruptTransaction 2025-03-31T16:17:20.055365Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7488008507007410497:2401], Recipient [10:7488008502712442199:2342]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1743437840095 TxId: 281474976715680 2025-03-31T16:17:20.056887Z node 10 :READ_TABLE_API NOTICE: [10:7488008507007410496:2401] Finish grpc stream, status: 400000 2025-03-31T16:17:20.059971Z node 10 :GRPC_SERVER DEBUG: [0x51a000037280] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-03-31T16:17:20.059989Z node 10 :GRPC_SERVER DEBUG: [0x51a000181e80] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-03-31T16:17:20.060225Z node 10 :GRPC_SERVER DEBUG: [0x51a000015080] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-03-31T16:17:20.060230Z node 10 :GRPC_SERVER DEBUG: [0x51a00016f880] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-03-31T16:17:20.060429Z node 10 :GRPC_SERVER DEBUG: [0x51a000024680] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-03-31T16:17:20.060439Z node 10 :GRPC_SERVER DEBUG: [0x51a00007ce80] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-03-31T16:17:20.060578Z node 10 :GRPC_SERVER DEBUG: [0x51a00016fe80] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-03-31T16:17:20.060626Z node 10 :GRPC_SERVER DEBUG: [0x51a000170a80] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-03-31T16:17:20.060698Z node 10 :GRPC_SERVER DEBUG: [0x51a000168680] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-03-31T16:17:20.060820Z node 10 :GRPC_SERVER DEBUG: [0x51a000013e80] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-03-31T16:17:20.060845Z node 10 :GRPC_SERVER DEBUG: [0x51a000039680] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-03-31T16:17:20.060971Z node 10 :GRPC_SERVER DEBUG: [0x51a0000a4480] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-03-31T16:17:20.061003Z node 10 :GRPC_SERVER DEBUG: [0x51a000109880] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-03-31T16:17:20.061102Z node 10 :GRPC_SERVER DEBUG: [0x51a00007bc80] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-03-31T16:17:20.061195Z node 10 :GRPC_SERVER DEBUG: [0x51a000025280] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-03-31T16:17:20.061228Z node 10 :GRPC_SERVER DEBUG: [0x51a00017ca80] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-03-31T16:17:20.061337Z node 10 :GRPC_SERVER DEBUG: [0x51a000020a80] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 >> TTableProfileTests::ExplicitPartitionsComplex [GOOD] >> TTableProfileTests::DescribeTableOptions >> YdbLogStore::LogStore >> TGRpcClientLowTest::GrpcRequestProxy [GOOD] >> TGRpcClientLowTest::GrpcRequestProxyWithoutToken >> TGRpcNewCoordinationClient::SessionMethods >> TGRpcNewCoordinationClient::NodeNotFound [GOOD] >> TGRpcNewCoordinationClient::MultipleSessionsSemaphores >> TGRpcYdbTest::CreateTableBadRequest3 [GOOD] >> TGRpcYdbTest::CreateTableWithIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbLogStore::AlterLogTable [FAIL] Test command err: 2025-03-31T16:16:45.625534Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008355510565049:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:45.625608Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dd3/r3tmp/tmpowVqut/pdisk_1.dat 2025-03-31T16:16:45.960749Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13006, node 1 2025-03-31T16:16:45.968756Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:16:45.968789Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:16:46.008178Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:46.008209Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:46.008217Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:46.008374Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:16:46.025052Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:46.025178Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:46.043344Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17146 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:16:46.326259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:17146 2025-03-31T16:16:48.306097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:16:48.529315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: Root/Foo/TimestampIndex/indexImplTable, pathId: , opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:16:48.529506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusNameConflict, reason: Check failed: path: '/Root/Foo/TimestampIndex/indexImplTable', error: path is not a common path (id: [OwnerId: 72057594046644480, LocalPathId: 4], type: EPathTypeTable, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2025-03-31T16:16:48.531750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, database: /Root, subject: , status: StatusNameConflict, reason: Check failed: path: '/Root/Foo/TimestampIndex/indexImplTable', error: path is not a common path (id: [OwnerId: 72057594046644480, LocalPathId: 4], type: EPathTypeTable, state: EPathStateNoChanges), operation: ALTER TABLE, path: Root/Foo/TimestampIndex/indexImplTable 2025-03-31T16:16:48.532114Z node 1 :TX_PROXY ERROR: Actor# [1:7488008368395468376:2952] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/Foo/TimestampIndex/indexImplTable\', error: path is not a common path (id: [OwnerId: 72057594046644480, LocalPathId: 4], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } Error 128: Administrative access denied TClient::Ls request: /Root/Foo/TimestampIndex/indexImplTable TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743437808469 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "Timestamp" Type: "Int64" TypeId: 3 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false ... (TRUNCATED) 2025-03-31T16:16:48.557261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTableIndex Propose, path: /Root/Foo/TimestampIndex, operationId: 281474976710660:0, transaction: WorkingDir: "/Root/Foo" OperationType: ESchemeOpAlterTableIndex AlterTableIndex { Name: "TimestampIndex" State: EIndexStateReady } Internal: false, at schemeshard: 72057594046644480 2025-03-31T16:16:48.557472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:16:48.557504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /Root/Foo/TimestampIndex/indexImplTable, pathId: , opId: 281474976710660:1, at schemeshard: 72057594046644480 2025-03-31T16:16:48.557958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:16:48.558003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710660:1, at schemeshard: 72057594046644480 2025-03-31T16:16:48.564389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: root@builtin, status: StatusAccepted, operation: ALTER TABLE, path: /Root/Foo/TimestampIndex/indexImplTable waiting... 2025-03-31T16:16:48.582227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743437808623, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T16:16:48.593837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710660:0 2025-03-31T16:16:48.593888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710660:1 TClient::Ls request: /Root/Foo/TimestampIndex/indexImplTable TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743437808469 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "Timestamp" Type: "Int64" TypeId: 3 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false ... (TRUNCATED) 2025-03-31T16:16:48.602287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTableIndex Propose, path: /Root/Foo/TimestampIndex, operationId: 281474976710661:0, transaction: WorkingDir: "/Root/Foo" OperationType: ESchemeOpAlterTableIndex AlterTableIndex { Name: "TimestampIndex" State: EIndexStateReady } Internal: false, at schemeshard: 72057594046644480 2025-03-31T16:16:48.602418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710661:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:16:48.602432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /Root/Foo/TimestampIndex/indexImplTable, pathId: , opId: 281474976710661:1, at schemeshard: 72057594046644480 2025-03-31T16:16:48.602720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710661:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:16:48.602739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710661:1, at schemeshard: 72057594046644480 2025-03-31T16:16:48.604850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710661, database: /Root, subject: root@builtin, status: StatusAccepted, operation: ALTER TABLE, path: /Root/Foo/TimestampIndex/indexImplTable waiting... 2025-03-31T16:16:48.620042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743437808665, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T16:16:48.630311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710661:0 2025-03-31T16:16:48.630348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710661:1 TClient::Ls request: /Root/Foo/TimestampIndex/indexImplTable TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743437808469 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "Timestamp" Type: "Int64" TypeId: 3 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false ... (TRUNCATED) 2025-03-31T16:16:48.636679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTableIndex Propose, path: /Root/Foo/TimestampIndex, operationId: 281474976710662:0, transaction: WorkingDir: "/Root/Foo" OperationType: ESchemeOpAlterTableIndex AlterTableIndex { Name: "TimestampIndex" State: EIndexStateReady } Internal: ... T16:16:55.055739Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:55.055766Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:55.055775Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:55.055924Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1649 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:16:55.317277Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:55.380944Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:16:55.505708Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-03-31T16:16:55.669843Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-03-31T16:16:59.559535Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008416530557677:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:59.559605Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dd3/r3tmp/tmpJLS5zf/pdisk_1.dat 2025-03-31T16:16:59.714501Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:59.737693Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:59.737791Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:59.740286Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64205, node 10 2025-03-31T16:16:59.807747Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:59.807773Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:59.807782Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:59.807933Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30350 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:00.082544Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:04.584445Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7488008439170913585:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:04.584518Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dd3/r3tmp/tmpiFvAHF/pdisk_1.dat 2025-03-31T16:17:04.722242Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:04.764836Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:04.764939Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:04.768575Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27876, node 13 2025-03-31T16:17:04.830890Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:04.830917Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:04.830925Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:04.831075Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11332 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:05.142719Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:05.234150Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "LogStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "resource_type" Type: "Utf8" NotNull: true } Columns { Name: "resource_id" Type: "Utf8" NotNull: true } Columns { Name: "uid" Type: "Utf8" NotNull: true } Columns { Name: "level" Type: "Int32" } Columns { Name: "message" Type: "Utf8" } Columns { Name: "json_payload" Type: "JsonDocument" } Columns { Name: "request_id" Type: "Utf8" } Columns { Name: "ingested_at" Type: "Timestamp" } Columns { Name: "saved_at" Type: "Timestamp" } KeyColumnNames: "timestamp" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" DefaultCompression { Codec: ColumnCodecLZ4 } } } } } TxId: 281474976715658 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:48400" , at schemeshard: 72057594046644480 2025-03-31T16:17:05.234579Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/LogStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:05.234618Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusPreconditionFailed, reason: Column stores are not supported, at schemeshard: 72057594046644480 2025-03-31T16:17:05.237031Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715658, response: Status: StatusPreconditionFailed Reason: "Column stores are not supported" TxId: 281474976715658 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-03-31T16:17:05.237313Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusPreconditionFailed, reason: Column stores are not supported, operation: CREATE COLUMN STORE, path: /Root/LogStore 2025-03-31T16:17:05.237626Z node 13 :TX_PROXY ERROR: Actor# [13:7488008443465881807:2601] txid# 281474976715658, issues: { message: "Column stores are not supported" severity: 1 } assertion failed at ydb/services/ydb/ydb_logstore_ut.cpp:435, virtual void NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext &): (res.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: Column stores are not supported , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) TBackTrace::Capture()+28 (0x1C6D843C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1CB954F0) NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext&)+8721 (0x1C20B181) std::__y1::__function::__func, void ()>::operator()()+280 (0x1C234018) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x1CBCC516) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x1CB9C069) NTestSuiteYdbLogStore::TCurrentTest::Execute()+1204 (0x1C2331E4) NUnitTest::TTestFactory::Execute()+2438 (0x1CB9D936) NUnitTest::RunMain(int, char**)+5213 (0x1CBC6A8D) ??+0 (0x7FC50DC22D90) __libc_start_main+128 (0x7FC50DC22E40) _start+41 (0x1907A029) >> YdbYqlClient::TestDescribeTableWithShardStats [GOOD] >> YdbYqlClient::TestExplicitPartitioning >> Viewer::JsonStorageListingV2GroupIdFilter [GOOD] >> Viewer::JsonStorageListingV2NodeIdFilter >> BasicStatistics::SimpleGlobalIndex [GOOD] >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientWithCorrectCerts [GOOD] >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientWithCorrectCerts_AccessDenied >> TGRpcYdbTest::GetOperationBadRequest >> YdbOlapStore::LogNonExistingRequest >> YdbYqlClient::TestYqlSessionClosed [GOOD] >> YdbYqlClient::TestYqlLongSessionPrepareError >> TYqlDateTimeTests::SimpleUpsertSelect [GOOD] >> TYqlDateTimeTests::TimestampKey >> YdbYqlClient::SecurityTokenAuthMultiTenantSDKAsync [GOOD] >> YdbYqlClient::SimpleColumnFamilies ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::SimpleGlobalIndex [GOOD] Test command err: 2025-03-31T16:15:08.410463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:08.410955Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:08.412752Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d18/r3tmp/tmp9qOfGs/pdisk_1.dat 2025-03-31T16:15:08.833595Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30639, node 1 2025-03-31T16:15:09.151727Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:09.151792Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:09.151824Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:09.152393Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:15:09.154831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:15:09.259163Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:09.259310Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:09.280640Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:65094 2025-03-31T16:15:09.909818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:15:13.384692Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T16:15:13.438589Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:13.438715Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:13.477605Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:15:13.483997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:15:13.753631Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:13.754266Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:13.754931Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:13.755078Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:13.758108Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:13.758247Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:13.758342Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:13.758432Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:13.758572Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:13.946821Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:13.946941Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:13.972983Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:15:14.138823Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:14.200546Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T16:15:14.200653Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T16:15:14.236269Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T16:15:14.237525Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T16:15:14.237750Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T16:15:14.237814Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T16:15:14.237866Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T16:15:14.237951Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T16:15:14.238012Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T16:15:14.238078Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T16:15:14.238488Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T16:15:14.274314Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:15:14.274463Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1870:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:15:14.290196Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1885:2608] 2025-03-31T16:15:14.299036Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1902:2617] 2025-03-31T16:15:14.299911Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1902:2617], schemeshard id = 72075186224037897 2025-03-31T16:15:14.321609Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T16:15:14.345870Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T16:15:14.345933Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T16:15:14.346008Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T16:15:14.405782Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T16:15:14.414187Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T16:15:14.414344Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T16:15:14.630126Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T16:15:14.800462Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T16:15:14.871797Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:15:15.855410Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2243:3077], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:15.855605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:15.879924Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-31T16:15:16.294094Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2451:3121], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:16.294322Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:16.296073Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2456:3125]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:15:16.296347Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T16:15:16.296446Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2458:3127] 2025-03-31T16:15:16.296519Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2458:3127] 2025-03-31T16:15:16.297146Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2459:2926] 2025-03-31T16:15:16.297435Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2458:3127], server id = [2:2459:2926], tablet id = 72075186224037894, status = OK 2025-03-31T16:15:16.297656Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2459:2926], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-03-31T16:15:16.297722Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-03-31T16:15:16.297957Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:15:16.298052Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2456:3125], StatRequests.size() = 1 2025-03-31T16:15:16.311761Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2492:3136]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:15:16.311927Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T16:15:16.311975Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2492:3136], StatRequests.size() = 1 2025-03-31T16:15:16.312211Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:2494:3138]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:15:16.312443Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-31T16:15:16.312473Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [1:2494:3138], StatRequests.size() = 1 2025-03-31T16:15:16.336988Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2499:3143], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:16.337084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:16.337589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2504:3148], DatabaseI ... 1T16:17:16.661873Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-31T16:17:16.661917Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-03-31T16:17:17.254213Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:6788:4780]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:17:17.254472Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-03-31T16:17:17.254516Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:6788:4780], StatRequests.size() = 1 2025-03-31T16:17:17.815202Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-03-31T16:17:17.815393Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:17:17.815665Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:17:17.878728Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 3, at schemeshard: 72075186224037897 2025-03-31T16:17:17.878806Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 198.000000s, at schemeshard: 72075186224037897 2025-03-31T16:17:17.879135Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 73 2025-03-31T16:17:17.892855Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-31T16:17:18.266078Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:6823:4798]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:17:18.266398Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-03-31T16:17:18.266444Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:6823:4798], StatRequests.size() = 1 2025-03-31T16:17:18.737542Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:17:18.737622Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:17:18.737702Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-03-31T16:17:18.737752Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-31T16:17:18.738077Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T16:17:18.752835Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-31T16:17:18.758043Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6846:4817], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:18.758199Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6856:4822], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:18.758337Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:18.781636Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-03-31T16:17:18.852717Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6860:4825], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-03-31T16:17:19.000786Z node 2 :TX_PROXY ERROR: Actor# [2:6958:4873] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:19.066296Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:6987:4888]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:17:19.066555Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-03-31T16:17:19.066616Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:6987:4888], StatRequests.size() = 1 2025-03-31T16:17:19.226608Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NzU4Yzg0NDUtYjRlNzZhZTMtNzNmYmJmYTEtZmIwNWE0NDA=, TxId: 2025-03-31T16:17:19.226680Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NzU4Yzg0NDUtYjRlNzZhZTMtNzNmYmJmYTEtZmIwNWE0NDA=, TxId: 2025-03-31T16:17:19.227144Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:17:19.243211Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-31T16:17:19.243306Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T16:17:19.542170Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:7019:4907]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:17:19.542443Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-03-31T16:17:19.542494Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:7019:4907], StatRequests.size() = 1 2025-03-31T16:17:20.442852Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:7058:4929]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:17:20.443166Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-03-31T16:17:20.443221Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:7058:4929], StatRequests.size() = 1 2025-03-31T16:17:20.938567Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:17:20.949702Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:17:20.949767Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:17:20.949805Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 6] is data table. 2025-03-31T16:17:20.949838Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 6] 2025-03-31T16:17:20.950147Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T16:17:20.954654Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-31T16:17:20.971707Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODA1MmE1ZjAtZDEyNzcwMTAtZTUzM2E4NGMtYmFhMWFlZmQ=, TxId: 2025-03-31T16:17:20.971782Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODA1MmE1ZjAtZDEyNzcwMTAtZTUzM2E4NGMtYmFhMWFlZmQ=, TxId: 2025-03-31T16:17:20.972288Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:17:20.986863Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 6] 2025-03-31T16:17:20.986936Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T16:17:21.371296Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:7126:4968]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:17:21.371609Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-03-31T16:17:21.371665Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:7126:4968], StatRequests.size() = 1 2025-03-31T16:17:22.322496Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 125 ], ReplyToActorId[ [2:7169:4992]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:17:22.322834Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 125 ] 2025-03-31T16:17:22.322879Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 125, ReplyToActorId = [2:7169:4992], StatRequests.size() = 1 2025-03-31T16:17:22.813197Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-03-31T16:17:22.813435Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:17:22.813853Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:17:22.825130Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:17:22.825201Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:17:22.825239Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-03-31T16:17:22.825274Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:17:22.825686Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T16:17:22.829017Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-31T16:17:22.845305Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MTYzNTFmYmEtYWUxYTFjMGUtYjNkMzUyMDQtYjZlODhjMmU=, TxId: 2025-03-31T16:17:22.845377Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MTYzNTFmYmEtYWUxYTFjMGUtYjNkMzUyMDQtYjZlODhjMmU=, TxId: 2025-03-31T16:17:22.846038Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:17:22.860692Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:17:22.860751Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T16:17:23.265911Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 126 ], ReplyToActorId[ [2:7234:5028]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:17:23.266208Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 126 ] 2025-03-31T16:17:23.266255Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 126, ReplyToActorId = [2:7234:5028], StatRequests.size() = 1 >> YdbTableBulkUpsertOlap::UpsertCsvBug >> YdbYqlClient::TestDecimalFullStack [GOOD] >> YdbYqlClient::TestDescribeDirectory >> YdbYqlClient::CopyTables [GOOD] >> YdbYqlClient::CreateAndAltertTableWithCompactionPolicy ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::SimpleFeatureFlags [GOOD] Test command err: BASE_PERF = 3.908899098 2025-03-31T16:16:05.730984Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:336:2378], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:16:05.731614Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:05.731698Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 21801, node 1 TClient is connected to server localhost:23248 2025-03-31T16:16:14.513388Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:319:2362], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:16:14.513700Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:14.513802Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 17119, node 2 TClient is connected to server localhost:21337 2025-03-31T16:16:22.764855Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:337:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:16:22.765067Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:22.765361Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 6055, node 3 TClient is connected to server localhost:10221 2025-03-31T16:16:33.219676Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:336:2378], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:16:33.220345Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:33.220604Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 9142, node 4 TClient is connected to server localhost:5950 2025-03-31T16:16:43.810506Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:317:2360], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:16:43.810985Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:43.811202Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 11425, node 5 TClient is connected to server localhost:3950 2025-03-31T16:16:56.573232Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:337:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:16:56.573501Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:56.573819Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 7506, node 6 TClient is connected to server localhost:61753 2025-03-31T16:17:10.127674Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:336:2378], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:17:10.128145Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:17:10.128395Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 21511, node 7 TClient is connected to server localhost:32199 2025-03-31T16:17:13.958565Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7488008476150153807:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:13.958650Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-31T16:17:14.215043Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:14.215351Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:14.220053Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:14.222598Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2366, node 8 2025-03-31T16:17:14.304064Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:14.304109Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:14.304131Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:14.304352Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26266 >> TGRpcClientLowTest::GrpcRequestProxyWithoutToken [GOOD] >> TGRpcClientLowTest::GrpcRequestProxyCheckTokenWhenItIsSpecified_Ignore >> YdbYqlClient::RetryOperationLimitedDuration [GOOD] >> TGRpcNewCoordinationClient::SessionMethods [GOOD] >> TGRpcNewCoordinationClient::SessionDescribeWatchData >> TGRpcNewCoordinationClient::MultipleSessionsSemaphores [GOOD] >> TGRpcNewCoordinationClient::SessionCreateUpdateDeleteSemaphore >> TGRpcNewCoordinationClient::BasicMethods [GOOD] >> ClientStatsCollector::CounterCacheMiss [GOOD] >> ClientStatsCollector::CounterRetryOperation >> YdbTableBulkUpsert::ValidRetry >> YdbYqlClient::TestTransactionQueryError [GOOD] >> YdbYqlClient::TestReadWrongTable >> TGRpcYdbTest::ExecuteDmlQuery [GOOD] >> TGRpcYdbTest::CreateYqlSessionExecuteQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::RetryOperationLimitedDuration [GOOD] Test command err: 2025-03-31T16:16:43.320414Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008349914030218:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:43.320506Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ddf/r3tmp/tmpoNb7j1/pdisk_1.dat 2025-03-31T16:16:43.685922Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:43.748974Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:43.749078Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 23423, node 1 2025-03-31T16:16:43.763853Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:16:43.857654Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:43.857679Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:43.857690Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:43.857821Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22256 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:16:44.404550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:46.157484Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008362798933133:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:46.157619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:46.777802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:16:46.995606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:16:47.116009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008367093900648:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:47.116101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008367093900653:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:47.116148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:47.120767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-03-31T16:16:47.139356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008367093900655:2361], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-03-31T16:16:47.223941Z node 1 :TX_PROXY ERROR: Actor# [1:7488008367093900730:2828] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:16:47.285899Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7488008367093900753:2840], for# test_user@builtin, access# DescribeSchema 2025-03-31T16:16:47.285927Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7488008367093900753:2840], for# test_user@builtin, access# DescribeSchema 2025-03-31T16:16:47.299938Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488008367093900748:2365], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:21: Error: At function: KiReadTable!
:2:21: Error: Cannot find table 'db.[Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:16:47.300204Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTU0YmQ1YTctYjJjNmFhYzctNjA5MDNjY2MtOTE2ZGExMzI=, ActorId: [1:7488008367093900644:2355], ActorState: ExecuteState, TraceId: 01jqpfvfgb67bsgffhm7sdtskj, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:16:48.680887Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008368749616494:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:48.681028Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ddf/r3tmp/tmpTBRRp8/pdisk_1.dat 2025-03-31T16:16:48.808487Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:48.835470Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:48.835554Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:48.838278Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27170, node 4 2025-03-31T16:16:48.961360Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:48.961379Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:48.961386Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:48.961514Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18413 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:16:49.223253Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:51.396647Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008381634519389:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:51.396792Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:51.411951Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:16:51.518670Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008381634519554:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:51.518789Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:51.518870Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008381634519559:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:51.522409Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:16:51.541408Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488008381634519561:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:16:51.606290Z node 4 :TX_PROXY ERROR: Actor# [4:7488008381634519640:2803] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:16:51.762292Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpfvksxeammt07gw88gzjh6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZTFjMmZmOGUtZDRhNzMzZDktMzNiMzA4NjUtZjYwYmJlYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:16:51.943112Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpfvm2ddatahmmj2j18v9jb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZTFjMmZmOGUtZDRhNzMzZDktMzNiMzA4NjUtZjYwYmJlYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:16:53.685915Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488008393624575908:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:53.685981Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ddf/r3tmp/tmptpzNE5/pdisk_1.dat 2025-03-31T16:16:53.784916Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:53.826039Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:53.826140Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:53.829044Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20174, node 7 2025-03-31T16:16:53.884740Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:53.884770Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:53.884780Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:53.884972Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26060 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:16:54.243115Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 5 2025-03-31T16:16:58.686129Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7488008393624575908:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:58.686226Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Previous query attempt was finished with unsuccessful status CLIENT_RESOURCE_EXHAUSTED: Sending retry attempt 2 of 5 Previous query attempt was finished with unsuccessful status UNAVAILABLE: Sending retry attempt 3 of 5 Previous query attempt was finished with unsuccessful status BAD_SESSION: Sending retry attempt 4 of 5 Previous query attempt was finished with unsuccessful status SESSION_BUSY: Sending retry attempt 5 of 5 2025-03-31T16:16:59.046522Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488008419394380777:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:59.046604Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488008419394380769:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:59.046665Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:16:59.050525Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:16:59.070230Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7488008419394380783:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:16:59.163469Z node 7 :TX_PROXY ERROR: Actor# [7:7488008419394380862:2701] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 5 Previous query attempt was finished with unsuccessful status CLIENT_RESOURCE_EXHAUSTED: Sending retry attempt 2 of 5 Previous query attempt was finished with unsuccessful status UNAVAILABLE: Sending retry attempt 3 of 5 Previous query attempt was finished with unsuccessful status BAD_SESSION: Sending retry attempt 4 of 5 Previous query attempt was finished with unsuccessful status SESSION_BUSY: Sending retry attempt 5 of 5 Previous query attempt was finished with unsuccessful status NOT_FOUND: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status NOT_FOUND: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status UNDETERMINED: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status UNDETERMINED: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status TRANSPORT_UNAVAILABLE: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status TRANSPORT_UNAVAILABLE: Sending retry attempt 1 of 1 2025-03-31T16:17:03.553065Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008433055144349:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:03.553175Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ddf/r3tmp/tmpiwmwrh/pdisk_1.dat 2025-03-31T16:17:03.758611Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:03.813858Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:03.814010Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:03.818478Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15674, node 10 2025-03-31T16:17:03.917109Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:03.917164Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:03.917176Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:03.917359Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7927 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:04.193828Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 2 of 3 2025-03-31T16:17:08.553341Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7488008433055144349:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:08.553418Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 3 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 3 2025-03-31T16:17:18.747293Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:17:18.747327Z node 10 :IMPORT WARN: Table profiles were not loaded Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 2 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 3 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 3 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcNewCoordinationClient::BasicMethods [GOOD] Test command err: 2025-03-31T16:17:02.363502Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008428800036081:2261];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:02.364094Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dbb/r3tmp/tmpck5eoK/pdisk_1.dat 2025-03-31T16:17:02.769878Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:02.777007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:02.777106Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:02.784344Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19947, node 1 2025-03-31T16:17:02.895675Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:02.895698Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:02.895707Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:02.895830Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26233 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:03.196513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:05.301714Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008441684938788:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:05.301721Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008441684938797:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:05.301834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:05.306023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:17:05.333726Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008441684938802:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:17:05.410617Z node 1 :TX_PROXY ERROR: Actor# [1:7488008441684938884:2684] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:07.379260Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008449955275084:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:07.379349Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dbb/r3tmp/tmpg5azU4/pdisk_1.dat 2025-03-31T16:17:07.570574Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:07.608329Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:07.608422Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:07.612816Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12511, node 4 2025-03-31T16:17:07.698266Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:07.698291Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:07.698298Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:07.698445Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13618 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:07.961186Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:10.443788Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008462840177991:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:10.443872Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:10.475388Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:10.653680Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008462840178156:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:10.653777Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:10.653848Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008462840178161:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:10.658035Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:17:10.678988Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488008462840178163:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:17:10.756015Z node 4 :TX_PROXY ERROR: Actor# [4:7488008462840178247:2796] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:12.377544Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488008472359727428:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:12.377632Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dbb/r3tmp/tmpikiPgy/pdisk_1.dat 2025-03-31T16:17:12.591788Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:12.636095Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:12.636226Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:12.644101Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26140, node 7 2025-03-31T16:17:12.783033Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:12.783063Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:12.783071Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:12.783260Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4117 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:13.282508Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:13.361970Z node 7 :TX_PROXY ERROR: Actor# [7:7488008476654695677:2596] txid# 281474976715658, Access denied for bad@builtin on path /Root, with access CreateTable 2025-03-31T16:17:13.362134Z node 7 :TX_PROXY ERROR: Actor# [7:7488008476654695677:2596] txid# 281474976715658, issues: { message: "Access denied for bad@builtin on path /Root" issue_code: 200000 severity: 1 } 2025-03-31T16:17:17.262328Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008494954943100:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:17.262532Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dbb/r3tmp/tmpFeDhwR/pdisk_1.dat 2025-03-31T16:17:17.432011Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:17.474251Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:17.474344Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:17.478338Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24202, node 10 2025-03-31T16:17:17.559834Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:17.559876Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:17.559885Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:17.560068Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26142 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:17.858510Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:17.928648Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:18.054188Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterKesus, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:17:18.092944Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterKesus, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:17:18.139431Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterKesus, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:17:22.361266Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7488008515227356381:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:22.361432Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dbb/r3tmp/tmpcusdXC/pdisk_1.dat 2025-03-31T16:17:22.504054Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:22.538647Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:22.538717Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:22.541546Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18965, node 13 2025-03-31T16:17:22.627863Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:22.627897Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:22.627905Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:22.628053Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19563 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:22.971641Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:23.114432Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 >> TTableProfileTests::ExplicitPartitionsUnordered [GOOD] >> TTableProfileTests::ExplicitPartitionsWrongKeyFormat >> YdbYqlClient::TestReadTableMultiShard >> TGRpcYdbTest::GetOperationBadRequest [GOOD] >> TGRpcYdbTest::ExecuteQueryWithUuid >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientWithCorrectCerts_AccessDenied [GOOD] >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientProvidesEmptyClientCerts >> TDatabaseQuotas::DisableWritesToDatabase [GOOD] >> TGRpcAuthentication::InvalidPassword >> TGRpcYdbTest::CreateTableWithIndex [GOOD] >> TGRpcYdbTest::CreateDeleteYqlSession >> TTableProfileTests::DescribeTableOptions [GOOD] >> test_retry.py::TestRetry::test_low_rate[kikimr0] [GOOD] >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyHosts >> YdbYqlClient::TestYqlLongSessionPrepareError [GOOD] >> YdbYqlClient::TestYqlLongSessionMultipleErrors >> TYqlDateTimeTests::TimestampKey [GOOD] >> TYqlDateTimeTests::IntervalKey >> TTableProfileTests::UseTableProfilePresetViaSdk [GOOD] >> TTableProfileTests::WrongTableProfile >> YdbTableBulkUpsertOlap::UpsertArrowBatch ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TTableProfileTests::DescribeTableOptions [GOOD] Test command err: 2025-03-31T16:17:01.461498Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008426637310083:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:01.461628Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dc0/r3tmp/tmpTuDsaa/pdisk_1.dat 2025-03-31T16:17:01.814099Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:01.843401Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:01.843477Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:01.869991Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9142, node 1 2025-03-31T16:17:01.961941Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:01.961965Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:01.961970Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:01.962072Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61691 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:02.224919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... Trying to register node Register node result Status { Code: OK } NodeId: 1024 DomainPath: "Root" Expire: 1743445021763294 Nodes { NodeId: 1024 Host: "localhost" Port: 1773 ResolveHost: "localhost" Address: "localhost" Location { DataCenter: "DataCenter" Rack: "Rack" Unit: "Body" } Expire: 1743445021763294 } Nodes { NodeId: 1 Host: "::1" Port: 12001 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 49 RoomNum: 1 RackNum: 1 BodyNum: 1 DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } } Nodes { NodeId: 2 Host: "::1" Port: 12002 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 50 RoomNum: 2 RackNum: 2 BodyNum: 2 DataCenter: "2" Module: "2" Rack: "2" Unit: "2" } } Nodes { NodeId: 3 Host: "::1" Port: 12003 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 51 RoomNum: 3 RackNum: 3 BodyNum: 3 DataCenter: "3" Module: "3" Rack: "3" Unit: "3" } } 2025-03-31T16:17:05.738373Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008444290733032:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:05.738493Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dc0/r3tmp/tmptnQrbt/pdisk_1.dat 2025-03-31T16:17:05.955998Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23507, node 4 2025-03-31T16:17:06.045258Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:06.045370Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:06.066569Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:17:06.111565Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:06.111587Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:06.111593Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:06.111754Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27340 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:06.391598Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... Trying to register node Register node result Status { Code: OK } NodeId: 1024 DomainPath: "Root" Expire: 1743445025969927 Nodes { NodeId: 1024 Host: "localhost" Port: 16690 ResolveHost: "localhost" Address: "localhost" Location { DataCenter: "DataCenter" Rack: "Rack" Unit: "Body" } Expire: 1743445025969927 } Nodes { NodeId: 4 Host: "::1" Port: 12001 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 49 RoomNum: 1 RackNum: 1 BodyNum: 1 DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } } Nodes { NodeId: 5 Host: "::1" Port: 12002 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 50 RoomNum: 2 RackNum: 2 BodyNum: 2 DataCenter: "2" Module: "2" Rack: "2" Unit: "2" } } Nodes { NodeId: 6 Host: "::1" Port: 12003 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 51 RoomNum: 3 RackNum: 3 BodyNum: 3 DataCenter: "3" Module: "3" Rack: "3" Unit: "3" } } 2025-03-31T16:17:10.395859Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488008463130492944:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:10.395955Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dc0/r3tmp/tmpoC6fS5/pdisk_1.dat 2025-03-31T16:17:10.557491Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:10.592129Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:10.592239Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:10.600376Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4797, node 7 2025-03-31T16:17:10.707993Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:10.708024Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:10.708037Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:10.708183Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12289 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:10.961278Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:12289 2025-03-31T16:17:11.282473Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:11.303115Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:11.808797Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7488008469182706374:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:11.808885Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:17:11.828444Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:11.828547Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:11.836411Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 9 Cookie 9 2025-03-31T16:17:11.837390Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12289 2025-03-31T16:17:13.956500Z node 7 :FLAT_TX_SCHEMESHARD WARN: ... . (TRUNCATED) 2025-03-31T16:17:14.658479Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 9 2025-03-31T16:17:14.660421Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-31T16:17:16.509691Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008488449607691:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:16.509780Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dc0/r3tmp/tmp1f867v/pdisk_1.dat 2025-03-31T16:17:16.655770Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:16.700131Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:16.700285Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:16.702730Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61431, node 10 2025-03-31T16:17:16.783009Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:16.783033Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:16.783041Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:16.783225Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5410 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:17.077988Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:5410 2025-03-31T16:17:17.451794Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:17.476373Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:17.982025Z node 12 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7488008493083458702:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:17.982098Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:17:18.014801Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:18.014913Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:18.019838Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 12 Cookie 12 2025-03-31T16:17:18.023545Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5410 2025-03-31T16:17:18.345322Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:5410 TClient::Ls request: /Root/ydb_ut_tenant/table-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715660 CreateStep: 1743437838550 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key1" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key2" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "ke... (TRUNCATED) 2025-03-31T16:17:18.836838Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 12 2025-03-31T16:17:18.837760Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-31T16:17:23.147396Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7488008518403864294:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:23.147478Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dc0/r3tmp/tmp3uBAZY/pdisk_1.dat 2025-03-31T16:17:23.381874Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25508, node 13 2025-03-31T16:17:23.487671Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:23.487793Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:23.490918Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:17:23.507934Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:23.507967Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:23.507979Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:23.508154Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29361 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:23.906375Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:29361 2025-03-31T16:17:24.313157Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:24.342825Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:24.853795Z node 15 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[15:7488008523122040558:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:24.853862Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:17:24.897188Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:24.897294Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:24.899957Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2025-03-31T16:17:24.901371Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29361 2025-03-31T16:17:25.268173Z node 13 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 15 2025-03-31T16:17:25.268765Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-31T16:17:25.855724Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:17:26.859669Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:17:27.860533Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:17:28.860982Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; >> YdbTableBulkUpsertOlap::UpsertCsvBug [GOOD] >> YdbTableBulkUpsertOlap::UpsertCSV_DataShard >> YdbYqlClient::TestDescribeDirectory [GOOD] >> TGRpcClientLowTest::GrpcRequestProxyCheckTokenWhenItIsSpecified_Ignore [GOOD] >> TGRpcClientLowTest::GrpcRequestProxyCheckTokenWhenItIsSpecified_Check >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesEmptyClientCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesServerCerts >> TFlatTest::AutoSplitMergeQueue [GOOD] >> TGRpcNewCoordinationClient::SessionDescribeWatchData [GOOD] >> TGRpcNewCoordinationClient::SessionDescribeWatchOwners >> TGRpcNewCoordinationClient::SessionCreateUpdateDeleteSemaphore [GOOD] >> TGRpcNewCoordinationClient::SessionAcquireAcceptedCallback >> YdbYqlClient::CreateAndAltertTableWithCompactionPolicy [GOOD] >> YdbYqlClient::CreateAndAltertTableWithPartitioningByLoad ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestDescribeDirectory [GOOD] Test command err: 2025-03-31T16:17:10.321650Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008464806593489:2077];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:10.321810Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dae/r3tmp/tmpXRbinf/pdisk_1.dat 2025-03-31T16:17:10.667630Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:10.683708Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:10.683822Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:10.712880Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1300, node 1 2025-03-31T16:17:10.783789Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:10.783817Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:10.783825Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:10.783940Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12685 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:11.076575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:13.219636Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008477691496405:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:13.219644Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008477691496394:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:13.219750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:13.223290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:17:13.250632Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008477691496408:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:17:13.339183Z node 1 :TX_PROXY ERROR: Actor# [1:7488008477691496489:2691] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:15.217097Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008487587614260:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:15.217219Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dae/r3tmp/tmpzt3FWe/pdisk_1.dat 2025-03-31T16:17:15.408206Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:15.445224Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:15.445293Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:15.449259Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26082, node 4 2025-03-31T16:17:15.552591Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:15.552624Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:15.552630Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:15.552816Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5373 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:15.821909Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:18.393660Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008500472517182:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:18.393675Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008500472517174:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:18.393725Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:18.397068Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:17:18.421685Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488008500472517188:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:17:18.476010Z node 4 :TX_PROXY ERROR: Actor# [4:7488008500472517265:2685] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:20.199512Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488008509514272029:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:20.200119Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dae/r3tmp/tmpd1NxID/pdisk_1.dat 2025-03-31T16:17:20.392566Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:20.433668Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:20.433756Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:20.443519Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1658, node 7 2025-03-31T16:17:20.528980Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:20.529025Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:20.529033Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:20.529177Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24496 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:20.877647Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:23.483168Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:23.640768Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488008522399175125:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:23.640815Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488008522399175135:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:23.640871Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:23.644300Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:17:23.672901Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7488008522399175139:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:17:23.736812Z node 7 :TX_PROXY ERROR: Actor# [7:7488008522399175210:2786] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:23.843673Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqpfwk5p8x7kw9bc4eeashe6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=N2RhNWVhYjYtZTg4MmQ4ZTgtZTAyNjdlMjEtZmIzNThlMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:23.923947Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jqpfwkcs7jj9rhe9b4jhex91, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=N2RhNWVhYjYtZTg4MmQ4ZTgtZTAyNjdlMjEtZmIzNThlMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:24.012599Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jqpfwkf8ft6dcrd98mehdntz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=N2RhNWVhYjYtZTg4MmQ4ZTgtZTAyNjdlMjEtZmIzNThlMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:24.102321Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jqpfwkhv6qbwz690a1nac84j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=N2RhNWVhYjYtZTg4MmQ4ZTgtZTAyNjdlMjEtZmIzNThlMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:24.226417Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jqpfwkmpfg2rbvdvhxxxnemn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=N2RhNWVhYjYtZTg4MmQ4ZTgtZTAyNjdlMjEtZmIzNThlMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:25.199644Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7488008509514272029:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:25.199705Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:17:25.213100Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jqpfwkrv32c523jax8zy9kvs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=N2RhNWVhYjYtZTg4MmQ4ZTgtZTAyNjdlMjEtZmIzNThlMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:25.231285Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jqpfwkrv32c523jax8zy9kvs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=N2RhNWVhYjYtZTg4MmQ4ZTgtZTAyNjdlMjEtZmIzNThlMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:26.851140Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008532543167384:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:26.851290Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dae/r3tmp/tmplSJ4SB/pdisk_1.dat 2025-03-31T16:17:27.028980Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:27.065423Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:27.065517Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:27.068899Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32002, node 10 2025-03-31T16:17:27.159032Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:27.159061Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:27.159067Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:27.159258Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62991 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:27.452511Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:30.392985Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488008549723037649:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:30.393091Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:30.426234Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 >> YdbYqlClient::TestReadWrongTable [GOOD] >> TGRpcYdbTest::CreateYqlSessionExecuteQuery [GOOD] >> TGRpcYdbTest::DeleteFromAfterCreate >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvideIncorrectCerts >> YdbTableBulkUpsert::ValidRetry [GOOD] >> YdbTableBulkUpsert::Types ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::AutoSplitMergeQueue [GOOD] Test command err: 2025-03-31T16:16:12.184690Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008214443882463:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:12.184813Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cf3/r3tmp/tmpnOfjgR/pdisk_1.dat 2025-03-31T16:16:12.487880Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:12.573852Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:12.573955Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:12.575675Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5269 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:12.776081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:12.806901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743437772909 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "String" TypeId: 4097 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) A-0 B-0 2025-03-31T16:16:13.208562Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.10, eph 1} end=0, 2 blobs 1r (max 1), put Spent{time=0.031s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2025-03-31T16:16:13.235498Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 6291502 rowCount 1 cpuUsage 0 2025-03-31T16:16:13.248782Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.12, eph 1} end=0, 2 blobs 1r (max 1), put Spent{time=0.015s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2025-03-31T16:16:13.335662Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-03-31T16:16:13.335822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:1 data size 6291502 row count 1 2025-03-31T16:16:13.336882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0 2025-03-31T16:16:13.336928Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037888 followerId=0, pathId 3: RowCount 1, DataSize 6291502 2025-03-31T16:16:13.337167Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-03-31T16:16:13.355234Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 6291502 rowCount 1 cpuUsage 0 2025-03-31T16:16:13.455536Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-03-31T16:16:13.455651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:1 data size 6291502 row count 1 2025-03-31T16:16:13.455701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0 2025-03-31T16:16:13.455731Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037888 followerId=0, pathId 3: RowCount 1, DataSize 6291502 2025-03-31T16:16:13.455917Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 A-1 2025-03-31T16:16:13.553674Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.15, eph 2} end=0, 2 blobs 1r (max 1), put Spent{time=0.021s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2025-03-31T16:16:13.565158Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 12583004 rowCount 2 cpuUsage 0 2025-03-31T16:16:13.590977Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.17, eph 2} end=0, 3 blobs 2r (max 2), put Spent{time=0.026s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (12583126 0 0)b }, ecr=1.000 2025-03-31T16:16:13.663427Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 12583004 rowCount 2 cpuUsage 0 2025-03-31T16:16:13.665379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-03-31T16:16:13.665455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:1 data size 12583004 row count 2 2025-03-31T16:16:13.665491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0 2025-03-31T16:16:13.665509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037888 followerId=0, pathId 3: RowCount 2, DataSize 12583004 2025-03-31T16:16:13.665867Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 B-1 2025-03-31T16:16:13.813612Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.20, eph 3} end=0, 2 blobs 1r (max 1), put Spent{time=0.028s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2025-03-31T16:16:13.822296Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 18874506 rowCount 3 cpuUsage 0 2025-03-31T16:16:13.855573Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.22, eph 3} end=0, 4 blobs 3r (max 3), put Spent{time=0.034s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (18874656 0 0)b }, ecr=1.000 2025-03-31T16:16:13.922418Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-03-31T16:16:13.922523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:1 data size 18874506 row count 3 2025-03-31T16:16:13.922570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0 2025-03-31T16:16:13.922602Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037888 followerId=0, pathId 3: RowCount 3, DataSize 18874506 2025-03-31T16:16:13.922753Z node 1 :FLAT_TX_SCHEMESHARD INFO: [BackgroundCompaction] [Start] Compacting for pathId# [OwnerId: 72057594046644480, LocalPathId: 3], datashard# 72075186224037888, compactionInfo# {72057594046644480:1, SH# 2, Rows# 3, Deletes# 0, Compaction# 1970-01-01T00:00:00.000000Z}, next wakeup in# 0.000000s, rate# 5.787037037e-06, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046644480 2025-03-31T16:16:13.924357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Operation queue set wakeup after delta# 599 seconds 2025-03-31T16:16:13.924527Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-03-31T16:16:13.953037Z node 1 :FLAT_TX_SCHEMESHARD INFO: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046644480, LocalPathId: 3], datashard# 72075186224037888, shardIdx# 72057594046644480:1 in# 30 ms, with status# 1, next wakeup in# 599.969661s, rate# 5.787037037e-06, in queue# 1 shards, waiting after compaction# 1 shards, running# 0 shards at schemeshard 72057594046644480 2025-03-31T16:16:13.953091Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 18874506 rowCount 3 cpuUsage 0 2025-03-31T16:16:14.053599Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-03-31T16:16:14.053701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:1 data size 18874506 row count 3 2025-03-31T16:16:14.053746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0 2025-03-31T16:16:14.053770Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037888 followerId=0, pathId 3: RowCount 3, DataSize 18874506 2025-03-31T16:16:14.05394 ... shed: true CreateTxId: 281474976715659 CreateStep: 1743437815168 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 25 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 25 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 23 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743437815168 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 25 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 25 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 23 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743437815168 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 25 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 25 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 23 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743437815168 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 25 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 25 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 23 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) 2025-03-31T16:17:29.529634Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037920 not found 2025-03-31T16:17:29.531492Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037915 not found TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743437815168 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 26 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 26 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 24 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) 2025-03-31T16:17:29.711777Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037925 not found 2025-03-31T16:17:29.711822Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037917 not found 2025-03-31T16:17:29.783099Z node 3 :OPS_COMPACT ERROR: Compact{72075186224037928.1.6, eph 548} end=2, 0 blobs 0r (max 17), put Spent{time=0.001s,wait=0.000s,interrupts=0} 2025-03-31T16:17:29.783380Z node 3 :TABLET_EXECUTOR ERROR: Leader{72075186224037928:1:10} Compact 1 on TGenCompactionParams{1001: gen 255 epoch 0, 1 parts} step 6, product {0 parts epoch 0} thrown 2025-03-31T16:17:29.784270Z node 3 :OPS_COMPACT ERROR: Compact{72075186224037927.1.6, eph 548} end=2, 0 blobs 0r (max 17), put Spent{time=0.002s,wait=0.000s,interrupts=0} 2025-03-31T16:17:29.784753Z node 3 :TABLET_EXECUTOR ERROR: Leader{72075186224037927:1:10} Compact 1 on TGenCompactionParams{1001: gen 255 epoch 0, 1 parts} step 6, product {0 parts epoch 0} thrown 2025-03-31T16:17:29.816702Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037921 not found 2025-03-31T16:17:29.936396Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037923 not found 2025-03-31T16:17:29.963419Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037926 not found 2025-03-31T16:17:30.060134Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037927 not found 2025-03-31T16:17:30.163703Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037930 not found TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743437815168 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 31 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 31 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 29 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) 2025-03-31T16:17:30.987504Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037929 not found 2025-03-31T16:17:30.987544Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037928 not found 2025-03-31T16:17:31.137137Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037934 not found 2025-03-31T16:17:31.137185Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037931 not found 2025-03-31T16:17:31.239954Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037933 not found 2025-03-31T16:17:31.239996Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037924 not found 2025-03-31T16:17:31.281520Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037935 not found 2025-03-31T16:17:31.281560Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037932 not found 2025-03-31T16:17:31.370388Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037936 not found 2025-03-31T16:17:31.370425Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037937 not found TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743437815168 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 36 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 36 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 34 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743437815168 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 36 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 36 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 34 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) >> TGRpcYdbTest::ExecuteQueryWithUuid [GOOD] >> TGRpcYdbTest::ExecuteQueryWithParametersBadRequest >> YdbYqlClient::TestReadTableMultiShard [GOOD] >> YdbYqlClient::TestReadTableMultiShardUseSnapshot ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestReadWrongTable [GOOD] Test command err: 2025-03-31T16:17:12.658494Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008474125393281:2272];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:12.658635Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000da4/r3tmp/tmpykCiP4/pdisk_1.dat 2025-03-31T16:17:13.040597Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:13.096614Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:13.096717Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 8715, node 1 2025-03-31T16:17:13.116876Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:17:13.168576Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:13.168605Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:13.168616Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:13.168737Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61949 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:13.476016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:15.471770Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008487010295990:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:15.472140Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008487010295981:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:15.472210Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:15.475378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:17:15.494233Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008487010295995:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:17:15.589485Z node 1 :TX_PROXY ERROR: Actor# [1:7488008487010296071:2687] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:17.638830Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008495245005763:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:17.638969Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000da4/r3tmp/tmpdkn7CT/pdisk_1.dat 2025-03-31T16:17:17.797025Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:17.839578Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:17.839703Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:17.844230Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21551, node 4 2025-03-31T16:17:17.959816Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:17.959843Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:17.959849Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:17.959986Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26897 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:18.232833Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:20.853597Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008508129908667:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:20.853762Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:20.854102Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008508129908679:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:20.857325Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:17:20.890893Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488008508129908681:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:17:20.991823Z node 4 :TX_PROXY ERROR: Actor# [4:7488008508129908761:2666] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:22.553325Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488008514915143518:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:22.554582Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000da4/r3tmp/tmpE5J6dg/pdisk_1.dat 2025-03-31T16:17:22.755115Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:22.797316Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:22.797420Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:22.801557Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21110, node 7 2025-03-31T16:17:22.917365Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:22.917391Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:22.917399Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:22.917572Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5308 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:23.252334Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:26.095465Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488008532095013777:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:26.095557Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:26.128910Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:26.305876Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488008532095013943:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:26.305952Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:26.306247Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488008532095013948:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:26.310552Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:17:26.338490Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7488008532095013950:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:17:26.422499Z node 7 :TX_PROXY ERROR: Actor# [7:7488008532095014027:2811] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:26.504228Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqpfwns1b5w72pc5rv4s8ew9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OGQ2ZDAxZDEtNmUyMTE4YjQtYmZiYTRjYWYtY2UyOWRkNGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:26.626843Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jqpfwp177weyaa3ddx8wcg9t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YWQ5MTI0Yy05OWY5Y2I2NC0yMzQ2NjI0MS04Y2NiZDIwNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:26.726245Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=OGQ2ZDAxZDEtNmUyMTE4YjQtYmZiYTRjYWYtY2UyOWRkNGI=, ActorId: [7:7488008532095013750:2334], ActorState: ExecuteState, TraceId: 01jqpfwp3r91h6xcwj6x2a0se2, Create QueryResponse for error on request, msg: 2025-03-31T16:17:28.478726Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008542409848946:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:28.479205Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000da4/r3tmp/tmpbvoSlK/pdisk_1.dat 2025-03-31T16:17:28.647948Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:28.691895Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:28.691963Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:28.694684Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28947, node 10 2025-03-31T16:17:28.758795Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:28.758822Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:28.758830Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:28.758978Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25158 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:29.140322Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:29.195795Z node 10 :GRPC_SERVER INFO: Got grpc request# ListEndpointsRequest, traceId# 01jqpfwrkb85d270hyptvvbv27, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:54582, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# 9.995330s 2025-03-31T16:17:29.204721Z node 10 :GRPC_SERVER DEBUG: Got grpc request# CreateSessionRequest, traceId# 01jqpfwrkm063beg0r7s248v1e, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:54586, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-03-31T16:17:32.101270Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jqpfwve5c4a8me3b4gmmg4ac, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:53026, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-03-31T16:17:32.103131Z node 10 :TX_PROXY ERROR: [ReadTable [10:7488008559589719162:2336] TxId# 281474976715658] Navigate request failed for table 'Root/NoTable' 2025-03-31T16:17:32.103256Z node 10 :TX_PROXY ERROR: [ReadTable [10:7488008559589719162:2336] TxId# 281474976715658] RESPONSE Status# ResolveError shard: 0 table: Root/NoTable 2025-03-31T16:17:32.103723Z node 10 :READ_TABLE_API NOTICE: [10:7488008559589719161:2336] Finish grpc stream, status: 400070
: Error: Failed to resolve table Root/NoTable, code: 200400
: Error: Got ResolveError response from TxProxy
: Error: Failed to resolve table Root/NoTable 2025-03-31T16:17:32.108194Z node 10 :GRPC_SERVER DEBUG: [0x51a00008c480] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-03-31T16:17:32.108193Z node 10 :GRPC_SERVER DEBUG: [0x51a00010c280] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-03-31T16:17:32.108466Z node 10 :GRPC_SERVER DEBUG: [0x51a000051080] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-03-31T16:17:32.108467Z node 10 :GRPC_SERVER DEBUG: [0x51a00004da80] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-03-31T16:17:32.108659Z node 10 :GRPC_SERVER DEBUG: [0x51a00010ce80] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-03-31T16:17:32.108664Z node 10 :GRPC_SERVER DEBUG: [0x51a0000c7e80] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-03-31T16:17:32.108838Z node 10 :GRPC_SERVER DEBUG: [0x51a000109880] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-03-31T16:17:32.108851Z node 10 :GRPC_SERVER DEBUG: [0x51a0000b5880] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-03-31T16:17:32.109035Z node 10 :GRPC_SERVER DEBUG: [0x51a000096c80] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-03-31T16:17:32.109057Z node 10 :GRPC_SERVER DEBUG: [0x51a00009d280] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-03-31T16:17:32.109217Z node 10 :GRPC_SERVER DEBUG: [0x51a0000c7280] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-03-31T16:17:32.109273Z node 10 :GRPC_SERVER DEBUG: [0x51a000098480] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-03-31T16:17:32.109448Z node 10 :GRPC_SERVER DEBUG: [0x51a00017dc80] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-03-31T16:17:32.109507Z node 10 :GRPC_SERVER DEBUG: [0x51a00010c880] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-03-31T16:17:32.109634Z node 10 :GRPC_SERVER DEBUG: [0x51a00010e080] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-03-31T16:17:32.109679Z node 10 :GRPC_SERVER DEBUG: [0x51a000097880] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-03-31T16:17:32.109806Z node 10 :GRPC_SERVER DEBUG: [0x51a00008d080] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 >> BasicStatistics::TwoTables [GOOD] >> TGRpcAuthentication::InvalidPassword [GOOD] >> TGRpcAuthentication::DisableLoginAuthentication >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientProvidesEmptyClientCerts [GOOD] >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientDoesNotProvideCorrectCerts >> ClientStatsCollector::CounterRetryOperation [GOOD] >> ClientStatsCollector::ExternalMetricRegistryByRawPtr >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyHosts [GOOD] >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBaseDn >> TGRpcYdbTest::ExecuteQueryBadRequest >> YdbTableBulkUpsert::Nulls [GOOD] >> YdbTableBulkUpsert::NotNulls >> TGRpcYdbTest::CreateDeleteYqlSession [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoTables [GOOD] Test command err: 2025-03-31T16:15:01.267684Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:01.268048Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:01.268136Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d31/r3tmp/tmpiNwAeH/pdisk_1.dat 2025-03-31T16:15:02.043590Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2841, node 1 2025-03-31T16:15:02.636863Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:02.636924Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:02.636959Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:02.639651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:15:02.640134Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:15:02.740094Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:02.740282Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:02.763153Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16262 2025-03-31T16:15:03.453567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:15:07.286416Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T16:15:07.331014Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:07.331164Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:07.369343Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:15:07.376512Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:15:07.653881Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:07.654560Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:07.655893Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:07.656060Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:07.656394Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:07.656493Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:07.656579Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:07.656650Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:07.656738Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:07.858245Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:07.858366Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:07.872725Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:15:08.088843Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:08.186194Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T16:15:08.186301Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T16:15:08.221156Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T16:15:08.221815Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T16:15:08.222046Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T16:15:08.222126Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T16:15:08.222196Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T16:15:08.222248Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T16:15:08.222311Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T16:15:08.222363Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T16:15:08.222889Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T16:15:08.246081Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:15:08.246207Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1872:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:15:08.253147Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-03-31T16:15:08.260566Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1908:2617] 2025-03-31T16:15:08.261189Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1908:2617], schemeshard id = 72075186224037897 2025-03-31T16:15:08.263633Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T16:15:08.286063Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T16:15:08.286141Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T16:15:08.286215Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T16:15:08.301061Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T16:15:08.309002Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T16:15:08.309153Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T16:15:08.596167Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T16:15:08.804162Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T16:15:08.908041Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:15:10.252938Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2241:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:10.253190Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:10.301119Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-31T16:15:10.607733Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2382:3110], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:10.607916Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:10.610047Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2387:3114]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:15:10.610354Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T16:15:10.610450Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2389:3116] 2025-03-31T16:15:10.611142Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2389:3116] 2025-03-31T16:15:10.611825Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2390:2877] 2025-03-31T16:15:10.612086Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2389:3116], server id = [2:2390:2877], tablet id = 72075186224037894, status = OK 2025-03-31T16:15:10.612316Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2390:2877], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-03-31T16:15:10.612396Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-03-31T16:15:10.613792Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:15:10.613881Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2387:3114], StatRequests.size() = 1 2025-03-31T16:15:10.643916Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2394:3120], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:10.644040Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:10.644450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2399:3125], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:10.652033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-03-31T16:15:10.885946Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-31T16:15:10.886037Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-31T16:15:10.983416Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2389:3116], schemeshard count = 1 2025-03-31T16:15:11.508390Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorA ... t[ 1 ] 2025-03-31T16:17:24.681290Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-03-31T16:17:24.681331Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:6703:4775], StatRequests.size() = 1 2025-03-31T16:17:25.443305Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-03-31T16:17:25.443470Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:17:25.443767Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:17:25.495773Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 3, at schemeshard: 72075186224037897 2025-03-31T16:17:25.495858Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 202.000000s, at schemeshard: 72075186224037897 2025-03-31T16:17:25.496142Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 73 2025-03-31T16:17:25.520642Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-31T16:17:26.122398Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:6736:4791]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:17:26.122631Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-03-31T16:17:26.122668Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:6736:4791], StatRequests.size() = 1 2025-03-31T16:17:26.861052Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:17:26.861127Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:17:26.861181Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 5] is data table. 2025-03-31T16:17:26.861232Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 5] 2025-03-31T16:17:26.861595Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T16:17:26.890160Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-31T16:17:26.893718Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6771:4817], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:26.893900Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6761:4812], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:26.894134Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:26.906219Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-03-31T16:17:26.975918Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6775:4820], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-03-31T16:17:27.152266Z node 2 :TX_PROXY ERROR: Actor# [2:6873:4867] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:27.200845Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:6902:4882]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:17:27.201162Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-03-31T16:17:27.201223Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:6902:4882], StatRequests.size() = 1 2025-03-31T16:17:27.336821Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MWUyZjQ5OTgtYzU5ZDVlMzMtOWNlMGM1ZWItYjRhYWVjYzU=, TxId: 2025-03-31T16:17:27.336924Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MWUyZjQ5OTgtYzU5ZDVlMzMtOWNlMGM1ZWItYjRhYWVjYzU=, TxId: 2025-03-31T16:17:27.337594Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:17:27.363389Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 5] 2025-03-31T16:17:27.363474Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T16:17:27.905339Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:6937:4901]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:17:27.905651Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-03-31T16:17:27.905691Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:6937:4901], StatRequests.size() = 1 2025-03-31T16:17:29.280386Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:6978:4923]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:17:29.280675Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-03-31T16:17:29.280717Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:6978:4923], StatRequests.size() = 1 2025-03-31T16:17:29.993341Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:17:30.004592Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:17:30.004657Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:17:30.004701Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-03-31T16:17:30.004756Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-31T16:17:30.005083Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T16:17:30.007304Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-31T16:17:30.020224Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MjVjOWFhODgtNWY2ZDg5M2ItYmY4Nzk1ODUtNTMwYjQ5ZDQ=, TxId: 2025-03-31T16:17:30.020289Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MjVjOWFhODgtNWY2ZDg5M2ItYmY4Nzk1ODUtNTMwYjQ5ZDQ=, TxId: 2025-03-31T16:17:30.020761Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:17:30.041043Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-31T16:17:30.041112Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T16:17:30.641254Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:7044:4962]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:17:30.641521Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-03-31T16:17:30.641565Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:7044:4962], StatRequests.size() = 1 2025-03-31T16:17:32.018871Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 125 ], ReplyToActorId[ [2:7087:4986]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:17:32.019171Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 125 ] 2025-03-31T16:17:32.019213Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 125, ReplyToActorId = [2:7087:4986], StatRequests.size() = 1 2025-03-31T16:17:32.744554Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-03-31T16:17:32.744775Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:17:32.745184Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:17:32.759813Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:17:32.759866Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:17:32.759901Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-03-31T16:17:32.759931Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:17:32.760247Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T16:17:32.762685Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-31T16:17:32.780532Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODdlNmVjNTctMWJkNjU2NDYtODY3NDhmN2EtZTE4MGNmNTc=, TxId: 2025-03-31T16:17:32.780607Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODdlNmVjNTctMWJkNjU2NDYtODY3NDhmN2EtZTE4MGNmNTc=, TxId: 2025-03-31T16:17:32.781099Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:17:32.795362Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:17:32.795417Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T16:17:33.392205Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 126 ], ReplyToActorId[ [2:7148:5020]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:17:33.392478Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 126 ] 2025-03-31T16:17:33.392519Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 126, ReplyToActorId = [2:7148:5020], StatRequests.size() = 1 2025-03-31T16:17:33.393202Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 127 ], ReplyToActorId[ [2:7150:5022]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:17:33.397390Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 127 ] 2025-03-31T16:17:33.397476Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 127, ReplyToActorId = [2:7150:5022], StatRequests.size() = 1 >> TTableProfileTests::UseDefaultProfile >> YdbYqlClient::TestYqlLongSessionMultipleErrors [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithOutCertVerification_ClientProvidesExpiredCert [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientDoesNotProvideClientCerts >> TYqlDateTimeTests::IntervalKey [GOOD] >> TYqlDateTimeTests::SimpleOperations >> TTableProfileTests::ExplicitPartitionsWrongKeyFormat [GOOD] >> TTableProfileTests::ExplicitPartitionsWrongKeyType >> YdbTableBulkUpsert::Simple >> TGRpcClientLowTest::GrpcRequestProxyCheckTokenWhenItIsSpecified_Check [GOOD] >> TGRpcClientLowTest::MultipleSimpleRequests >> YdbTableBulkUpsertOlap::UpsertArrowBatch [GOOD] >> YdbTableBulkUpsertOlap::UpsertArrowDupField ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::CreateDeleteYqlSession [GOOD] Test command err: 2025-03-31T16:17:11.042716Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008468380087194:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:11.042785Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000da9/r3tmp/tmpckRNQg/pdisk_1.dat 2025-03-31T16:17:11.400918Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6458, node 1 2025-03-31T16:17:11.432797Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:17:11.433157Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:17:11.439789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:11.439920Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:11.457805Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:11.457832Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:11.457840Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:11.457996Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:17:11.458176Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19967 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:11.749428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:15.203536Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008485883554944:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:15.203646Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000da9/r3tmp/tmpTFKPrs/pdisk_1.dat 2025-03-31T16:17:15.326757Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:15.352441Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:15.352520Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:15.355855Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5771, node 4 2025-03-31T16:17:15.455985Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:15.456019Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:15.456034Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:15.456269Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25714 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:15.713121Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:15.815640Z node 4 :TX_PROXY ERROR: Actor# [4:7488008485883555850:2598] txid# 281474976715658, issues: { message: "Unknown column \'BlaBla\' specified in key column list" severity: 1 } 2025-03-31T16:17:19.580187Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488008504343013682:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:19.580307Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000da9/r3tmp/tmpuQszLh/pdisk_1.dat 2025-03-31T16:17:19.736185Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:19.766497Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:19.766583Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:19.770383Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31857, node 7 2025-03-31T16:17:19.849397Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:19.849419Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:19.849427Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:19.849567Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21085 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:20.093281Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:24.000697Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008526732938375:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:24.000744Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000da9/r3tmp/tmpSAFjZV/pdisk_1.dat 2025-03-31T16:17:24.169119Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:24.205371Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:24.205472Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:24.208194Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21552, node 10 2025-03-31T16:17:24.366858Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:24.366884Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:24.366893Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:24.367058Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10469 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:24.620738Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:24.724624Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:24.949308Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:17:27.486086Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488008539617841751:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:27.486178Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:27.486767Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488008539617841763:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:27.495391Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-03-31T16:17:27.537499Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7488008539617841765:2356], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-03-31T16:17:27.606675Z node 10 :TX_PROXY ERROR: Actor# [10:7488008539617841843:3065] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:28.652859Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpfwpxwcdg9b3d4716esv7d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NGI5MWM5NTItODg0YzQ2NTktNjJkYTQyYy05NTM0ZDdmMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:28.685528Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqpfwpxwcdg9b3d4716esv7d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NGI5MWM5NTItODg0YzQ2NTktNjJkYTQyYy05NTM0ZDdmMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:28.867111Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqpfwr4g6kng3ks8r6rh3bf5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZTZlNjkyNWYtZjU0ZWNlNTYtYzc4ZGJmNzktZWRhMWIwNzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:30.583629Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7488008549882359974:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:30.583722Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000da9/r3tmp/tmpbcjV4Z/pdisk_1.dat 2025-03-31T16:17:30.739474Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:30.781524Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:30.781624Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:30.785825Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15861, node 13 2025-03-31T16:17:30.878813Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:30.878839Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:30.878848Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:30.879008Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1140 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:31.194459Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... >> YdbTableBulkUpsertOlap::UpsertCSV_DataShard [GOOD] >> YdbTableBulkUpsertOlap::UpsertMixed >> TGRpcNewCoordinationClient::SessionDescribeWatchOwners [GOOD] >> TGRpcNewCoordinationClient::SessionDescribeWatchReplace >> TGRpcNewCoordinationClient::SessionAcquireAcceptedCallback [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestYqlLongSessionMultipleErrors [GOOD] Test command err: 2025-03-31T16:17:15.292162Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008487518457078:2250];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:15.292766Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d98/r3tmp/tmpujigQl/pdisk_1.dat 2025-03-31T16:17:15.614427Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29681, node 1 2025-03-31T16:17:15.668281Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:15.668376Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:15.705886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:17:15.727858Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:15.727961Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:15.728001Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:15.728143Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32221 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:16.029414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:18.107808Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008500403359791:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:18.107911Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:18.393709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:18.519749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008500403359961:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:18.519828Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008500403359966:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:18.519872Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:18.524097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:17:18.546759Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008500403359968:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:17:18.632027Z node 1 :TX_PROXY ERROR: Actor# [1:7488008500403360037:2790] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:18.694960Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488008500403360055:2355], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:25: Error: At function: KiWriteTable!
:2:43: Error: Failed to convert type: Struct<'Key':String,'Value':String> to Struct<'Key':Uint32?,'Value':String?>
:2:43: Error: Failed to convert 'Key': String to Optional
:2:43: Error: Failed to convert input columns types to scheme types, code: 2031 2025-03-31T16:17:18.695285Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTkwMjYxYjYtZjNjZjJjNjktN2VlMzdjNDMtNTNiM2U5ZTM=, ActorId: [1:7488008500403359773:2334], ActorState: ExecuteState, TraceId: 01jqpfwe5pbj4b1wkv4dab9vyv, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-31T16:17:20.172100Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008506173824892:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:20.172304Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d98/r3tmp/tmpiD0aaa/pdisk_1.dat 2025-03-31T16:17:20.435400Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:20.495073Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:20.495176Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:20.497999Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16544, node 4 2025-03-31T16:17:20.627874Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:20.627897Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:20.627905Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:20.628046Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11856 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:20.910733Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:23.495023Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008519058727838:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:23.495141Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:25.112056Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488008528942751673:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:25.112175Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d98/r3tmp/tmpHrec4q/pdisk_1.dat 2025-03-31T16:17:25.315383Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:25.349712Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:25.349789Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:25.354123Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25751, node 7 2025-03-31T16:17:25.485786Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:25.485808Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:25.485819Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:25.486000Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5387 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:25.777801Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:28.644412Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488008541827654626:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:28.644550Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:28.663707Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:28.768587Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488008541827654793:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:28.768662Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:28.768724Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488008541827654798:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:28.772201Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:17:28.796530Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7488008541827654800:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:17:28.860366Z node 7 :TX_PROXY ERROR: Actor# [7:7488008541827654873:2801] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:28.920906Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpfwr5z90bha7d0c8drce8n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YzFmN2Q0YjYtNmU3MGMzZWUtM2ZlNTliOTQtMTcyM2Y0MmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:28.984869Z node 7 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [7:7488008541827654919:2362], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[Root/BadTable]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:17:28.986308Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=YzFmN2Q0YjYtNmU3MGMzZWUtM2ZlNTliOTQtMTcyM2Y0MmQ=, ActorId: [7:7488008541827654623:2335], ActorState: ExecuteState, TraceId: 01jqpfwrbgfaan30vppyp7mqy2, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:17:29.076306Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpfwrdcdvp31ccnaekancrf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YzFmN2Q0YjYtNmU3MGMzZWUtM2ZlNTliOTQtMTcyM2Y0MmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:29.212926Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqpfwrg6cevkz7xznnrc2vnk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YzFmN2Q0YjYtNmU3MGMzZWUtM2ZlNTliOTQtMTcyM2Y0MmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:31.057292Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008554303347150:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:31.057353Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d98/r3tmp/tmpHfxr59/pdisk_1.dat 2025-03-31T16:17:31.375194Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61469, node 10 2025-03-31T16:17:31.430552Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:31.430657Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:31.481523Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:17:31.556211Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:31.556248Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:31.556256Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:31.556415Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25331 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:17:31.896956Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:17:34.607698Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488008567188250070:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:34.607832Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:34.629018Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:34.733914Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488008567188250234:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:34.734007Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:34.734235Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488008567188250239:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:34.739111Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:17:34.762939Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7488008567188250241:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:17:34.849341Z node 10 :TX_PROXY ERROR: Actor# [10:7488008567188250314:2790] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:34.864959Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7488008567188250325:2355], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:25: Error: At function: KiWriteTable!
:2:25: Error: Cannot find table 'db.[Root/BadTable1]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:17:34.865992Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=MWI1MjAzNTYtMWUyOTBmNjYtNGRmOWVkODgtNmY1OWUzNmE=, ActorId: [10:7488008567188250067:2335], ActorState: ExecuteState, TraceId: 01jqpfwy0d3c6rn907fmzxz5b1, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:17:34.904629Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7488008567188250352:2361], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:25: Error: At function: KiWriteTable!
:2:25: Error: Cannot find table 'db.[Root/BadTable2]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:17:34.906312Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=MWI1MjAzNTYtMWUyOTBmNjYtNGRmOWVkODgtNmY1OWUzNmE=, ActorId: [10:7488008567188250067:2335], ActorState: ExecuteState, TraceId: 01jqpfwy57d3fqe1tn86asf0cp, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: >> TGRpcLdapAuthentication::LdapAuthWithValidCredentials >> YdbYqlClient::CreateAndAltertTableWithPartitioningByLoad [GOOD] >> YdbYqlClient::CreateAndAltertTableWithKeyBloomFilter >> TTableProfileTests::WrongTableProfile [GOOD] >> TYqlDateTimeTests::DateKey >> YdbTableBulkUpsert::Types [GOOD] >> YdbTableBulkUpsert::Uint8 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcNewCoordinationClient::SessionAcquireAcceptedCallback [GOOD] Test command err: 2025-03-31T16:17:14.704235Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008481651076360:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:14.704334Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d99/r3tmp/tmpLXUBOC/pdisk_1.dat 2025-03-31T16:17:15.044826Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24448, node 1 2025-03-31T16:17:15.058843Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:17:15.059964Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:17:15.079352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:15.079492Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:15.095052Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:17:15.098834Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:15.098861Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:15.098871Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:15.099033Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6549 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:15.416115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:15.524405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:15.629657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:17:15.702681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropKesus, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:17:15.714826Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-03-31T16:17:15.722152Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found 2025-03-31T16:17:18.950833Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008500117702655:2277];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:18.950909Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d99/r3tmp/tmpMxWzS9/pdisk_1.dat 2025-03-31T16:17:19.110769Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:19.132702Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:19.132780Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:19.141631Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12887, node 4 2025-03-31T16:17:19.208616Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:19.208646Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:19.208653Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:19.208809Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10442 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:19.486063Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:23.332276Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488008522526129748:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:23.332386Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d99/r3tmp/tmpmcdPZv/pdisk_1.dat 2025-03-31T16:17:23.442501Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:23.473267Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:23.473346Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:23.475599Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2149, node 7 2025-03-31T16:17:23.531892Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:23.531909Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:23.531916Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:23.532053Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14134 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:23.753027Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:23.810775Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:27.878728Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008537946546640:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:27.878829Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d99/r3tmp/tmp4QwO5X/pdisk_1.dat 2025-03-31T16:17:28.087901Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:28.122139Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:28.122231Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:28.124063Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26042, node 10 2025-03-31T16:17:28.191228Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:28.191253Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:28.191260Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:28.191419Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27065 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:28.456239Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:28.539560Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:32.813095Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7488008560838155780:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:32.813189Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d99/r3tmp/tmpUctc2B/pdisk_1.dat 2025-03-31T16:17:33.041970Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26654, node 13 2025-03-31T16:17:33.165277Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:33.165394Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:33.186868Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:17:33.212034Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:33.212059Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:33.212067Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:33.212205Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28827 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:33.507663Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:33.618990Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 >> YdbYqlClient::RetryOperationAsync >> TGRpcYdbTest::DeleteFromAfterCreate [GOOD] >> YdbScripting::Params >> YdbYqlClient::TestReadTableMultiShardUseSnapshot [GOOD] >> YdbYqlClient::TestReadTableMultiShardOneRow >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBaseDn [GOOD] >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBindDn >> TGRpcYdbTest::ExecuteQueryWithParametersBadRequest [GOOD] >> TGRpcYdbTest::ExecuteQueryWithParametersExplicitSession >> TGRpcYdbTest::ExecuteQueryBadRequest [GOOD] >> TGRpcYdbTest::ExecuteQueryImplicitSession >> TGRpcAuthentication::DisableLoginAuthentication [GOOD] >> TGRpcAuthentication::NoConnectRights >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientDoesNotProvideCorrectCerts [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::DeleteFromAfterCreate [GOOD] Test command err: 2025-03-31T16:17:13.744749Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008478971780804:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:13.744890Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000da0/r3tmp/tmpPUBgEn/pdisk_1.dat 2025-03-31T16:17:14.126358Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:14.126462Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:14.129140Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:14.134703Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26033, node 1 2025-03-31T16:17:14.260771Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:14.260813Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:14.260826Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:14.260960Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5140 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:14.523128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:14.604548Z node 1 :TX_PROXY ERROR: Actor# [1:7488008483266749018:2605] txid# 281474976710658, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-03-31T16:17:17.842072Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008495111104221:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:17.842106Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000da0/r3tmp/tmpld0Kxv/pdisk_1.dat 2025-03-31T16:17:17.952932Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:17.978048Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:17.978129Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:17.982997Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1189, node 4 2025-03-31T16:17:18.034001Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:18.034021Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:18.034028Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:18.034145Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30360 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:18.329504Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:22.647503Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488008514261169194:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:22.647587Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000da0/r3tmp/tmpwu9mlF/pdisk_1.dat 2025-03-31T16:17:22.779923Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:22.804882Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:22.804971Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:22.809079Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24876, node 7 2025-03-31T16:17:22.900555Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:22.900584Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:22.900592Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:22.900773Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22755 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:23.181721Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:25.705905Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488008527146072107:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:25.705995Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:26.019914Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:26.181719Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488008531441039585:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:26.181809Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:26.182072Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488008531441039590:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:26.186362Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:17:26.206880Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7488008531441039592:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:17:26.285535Z node 7 :TX_PROXY ERROR: Actor# [7:7488008531441039666:2804] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:26.490911Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpfwnx74m65xfnfedb5vy56, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZDJjYmI3ZjMtODIyZjQwZC0zZmM2YTQyLWFiZTRmMDMz, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:26.724766Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpfwp63e75aw8eb422ztw63, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZDJjYmI3ZjMtODIyZjQwZC0zZmM2YTQyLWFiZTRmMDMz, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:28.740831Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008543149118369:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:28.740987Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000da0/r3tmp/tmptcTUfC/pdisk_1.dat 2025-03-31T16:17:28.882785Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:28.917631Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:28.917723Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:28.924863Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2424, node 10 2025-03-31T16:17:29.059737Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:29.059767Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:29.059775Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:29.059913Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3265 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:17:29.323292Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:17:32.213009Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488008560328988590:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:32.213133Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:32.213428Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488008560328988602:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:32.217219Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:17:32.238706Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7488008560328988604:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:17:32.328518Z node 10 :TX_PROXY ERROR: Actor# [10:7488008560328988674:2678] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:32.463398Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7488008560328988703:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[Root/NotFound]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:17:32.463638Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=MmY0MDdkNzktNjdlOTcwNWEtOWNjNDUyNy03MzVkYmRiYQ==, ActorId: [10:7488008560328988587:2332], ActorState: ExecuteState, TraceId: 01jqpfwvrc75e8hzpa1zfe3sc6, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:17:34.166791Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7488008568012814996:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:34.166884Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000da0/r3tmp/tmpRNzaIJ/pdisk_1.dat 2025-03-31T16:17:34.345431Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:34.386199Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:34.386315Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:34.389679Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6512, node 13 2025-03-31T16:17:34.497290Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:34.497319Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:34.497331Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:34.497543Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12827 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:34.860923Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:35.001765Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:35.162838Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:17:37.702134Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7488008580897718229:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:37.702222Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7488008580897718218:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:37.702338Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:37.705069Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-03-31T16:17:37.732611Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7488008580897718232:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-03-31T16:17:37.828387Z node 13 :TX_PROXY ERROR: Actor# [13:7488008580897718309:2915] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:37.924601Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpfx0x4d3w80zjyf9w387jh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OTkzODQ0MGUtZGJkNDVjOGYtZjg0YWMwNGMtMzA3MDFkNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:38.017651Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqpfx14f00768dd8zyprmz8s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OTkzODQ0MGUtZGJkNDVjOGYtZjg0YWMwNGMtMzA3MDFkNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> ClientStatsCollector::ExternalMetricRegistryByRawPtr [GOOD] >> ClientStatsCollector::ExternalMetricRegistryStdSharedPtr >> YdbYqlClient::TestColumnOrder >> YdbTableBulkUpsert::NotNulls [GOOD] >> YdbTableBulkUpsert::Errors ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientDoesNotProvideCorrectCerts [GOOD] Test command err: 2025-03-31T16:17:11.398567Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008468292374934:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:11.398637Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000da5/r3tmp/tmpT0xLPY/pdisk_1.dat 2025-03-31T16:17:11.759714Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19235, node 1 2025-03-31T16:17:11.799862Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:17:11.799883Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:17:11.824927Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:11.825016Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:11.839266Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:17:11.849996Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:11.850017Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:11.850024Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:11.850152Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8568 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:12.126424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:12.277247Z node 1 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:54214) has now valid token of root@builtin 2025-03-31T16:17:12.340109Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-03-31T16:17:12.340172Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:17:12.340182Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:17:12.340223Z node 1 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-03-31T16:17:15.602894Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008486651708995:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:15.602988Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000da5/r3tmp/tmpJO3szQ/pdisk_1.dat 2025-03-31T16:17:15.742772Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:15.780144Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:15.780238Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:15.783672Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20839, node 4 2025-03-31T16:17:15.849400Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:15.849428Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:15.849434Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:15.849643Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24699 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:16.131046Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:16.287962Z node 4 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:38778) has now valid token of root@builtin 2025-03-31T16:17:16.336972Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-03-31T16:17:16.337003Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:17:16.337012Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:17:16.337042Z node 4 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-03-31T16:17:20.167217Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488008509565093330:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:20.167290Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000da5/r3tmp/tmpTZDSds/pdisk_1.dat 2025-03-31T16:17:20.299130Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:20.339600Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:20.339674Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:20.342443Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25129, node 7 2025-03-31T16:17:20.453794Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:20.453815Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:20.453822Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:20.453964Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15079 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:20.709628Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... Trying to register node Register node result Status { Code: OK } NodeId: 1024 DomainPath: "Root" Expire: 1743445040292473 Nodes { NodeId: 1024 Host: "localhost" Port: 8890 ResolveHost: "localhost" Address: "localhost" Location { DataCenter: "DataCenter" Rack: "Rack" Unit: "Body" } Expire: 1743445040292473 } Nodes { NodeId: 7 Host: "::1" Port: 12001 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 49 RoomNum: 1 RackNum: 1 BodyNum: 1 DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } } Nodes { NodeId: 8 Host: "::1" Port: 12002 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 50 RoomNum: 2 RackNum: 2 BodyNum: 2 DataCenter: "2" Module: "2" Rack: "2" Unit: "2" } } Nodes { NodeId: 9 Host: "::1" Port: 12003 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 51 RoomNum: 3 RackNum: 3 BodyNum: 3 DataCenter: "3" Module: "3" Rack: "3" Unit: "3" } } 2025-03-31T16:17:24.902955Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008526419568081:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:24.903003Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000da5/r3tmp/tmpbFaJXS/pdisk_1.dat 2025-03-31T16:17:25.062767Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:25.098349Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:25.098439Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:25.102680Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17429, node 10 2025-03-31T16:17:25.211759Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:25.211787Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:25.211796Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:25.211945Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4820 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:25.529403Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... Trying to register node Register node result Status { Code: UNAUTHORIZED Reason: "Cannot authorize node. Access denied" } 2025-03-31T16:17:30.050006Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7488008548400506135:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:30.050095Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000da5/r3tmp/tmp2TfGqh/pdisk_1.dat 2025-03-31T16:17:30.211768Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:30.250422Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:30.250506Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:30.254446Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1996, node 13 2025-03-31T16:17:30.368349Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:30.368379Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:30.368388Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:30.368542Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28367 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:30.682409Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... Trying to register node Register node result Status { Code: OK } NodeId: 1024 DomainPath: "Root" Expire: 1743445050208883 Nodes { NodeId: 1024 Host: "localhost" Port: 65475 ResolveHost: "localhost" Address: "localhost" Location { DataCenter: "DataCenter" Rack: "Rack" Unit: "Body" } Expire: 1743445050208883 } Nodes { NodeId: 13 Host: "::1" Port: 12001 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 49 RoomNum: 1 RackNum: 1 BodyNum: 1 DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } } Nodes { NodeId: 14 Host: "::1" Port: 12002 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 50 RoomNum: 2 RackNum: 2 BodyNum: 2 DataCenter: "2" Module: "2" Rack: "2" Unit: "2" } } Nodes { NodeId: 15 Host: "::1" Port: 12003 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 51 RoomNum: 3 RackNum: 3 BodyNum: 3 DataCenter: "3" Module: "3" Rack: "3" Unit: "3" } } 2025-03-31T16:17:35.311012Z node 16 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[16:7488008572494406804:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:35.311206Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000da5/r3tmp/tmpE05So1/pdisk_1.dat 2025-03-31T16:17:35.498639Z node 16 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:35.552275Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:35.552388Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:35.558142Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64926, node 16 2025-03-31T16:17:35.720070Z node 16 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:35.720096Z node 16 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:35.720105Z node 16 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:35.720257Z node 16 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18730 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:36.056912Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... Trying to register node 2025-03-31T16:17:36.322703Z node 16 :TICKET_PARSER ERROR: Ticket 814E318A995E923C511C1A8F97DDE735027ECC0B7DD454F1DCB0469AD630DCC3: Cannot create token from certificate. Client certificate failed verification Register node result Status { Code: ERROR Reason: "Cannot create token from certificate. Client certificate failed verification" } >> YdbTableBulkUpsertOlap::UpsertArrowDupField [GOOD] >> YdbTableBulkUpsertOlap::ParquetImportBug_Datashard >> YdbOlapStore::ManyTables [GOOD] >> YdbOlapStore::LogWithUnionAllAscending >> BasicStatistics::TwoNodes [GOOD] >> TTableProfileTests::UseDefaultProfile [GOOD] >> TTableProfileTests::OverwriteCompactionPolicy >> YdbTableBulkUpsertOlap::UpsertMixed [GOOD] >> YdbYqlClient::AlterTableAddIndex >> TGRpcNewCoordinationClient::SessionDescribeWatchReplace [GOOD] >> TGRpcNewCoordinationClient::SessionReconnectReattach >> TGRpcLdapAuthentication::LdapAuthWithValidCredentials [GOOD] >> TGRpcNewClient::SimpleYqlQuery >> YdbTableBulkUpsert::Simple [GOOD] >> YdbTableBulkUpsert::SyncIndexShouldSucceed |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/multi_plane/py3test >> test_retry.py::TestRetry::test_low_rate[kikimr0] [GOOD] |87.8%| [TA] $(B)/ydb/tests/fq/multi_plane/test-results/py3test/{meta.json ... results_accumulator.log} |87.8%| [TA] {RESULT} $(B)/ydb/tests/fq/multi_plane/test-results/py3test/{meta.json ... results_accumulator.log} >> TYqlDateTimeTests::SimpleOperations [GOOD] >> TYqlDecimalTests::DecimalKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoNodes [GOOD] Test command err: 2025-03-31T16:15:16.024772Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:526:2414], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:16.025066Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:16.025166Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000cf6/r3tmp/tmpK0Z1Z5/pdisk_1.dat 2025-03-31T16:15:16.493499Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9071, node 1 2025-03-31T16:15:16.788716Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:16.788793Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:16.788850Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:16.789591Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:15:16.792240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:15:16.893623Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:16.893783Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:16.912749Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23816 2025-03-31T16:15:17.485698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:15:23.611983Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T16:15:23.619725Z node 3 :STATISTICS INFO: Subscribed for config changes on node 3 2025-03-31T16:15:23.675825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:23.675930Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:23.676662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:23.676714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:23.716499Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:15:23.716758Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-03-31T16:15:23.725092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:15:23.725650Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:15:23.989356Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:23.990148Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:23.990755Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:23.990915Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:23.991004Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:23.991293Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:23.991450Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:23.991557Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:23.991636Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:24.178581Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:24.178703Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:24.179510Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:24.179600Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:24.193292Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:15:24.194451Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:15:24.196036Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:15:24.367677Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:24.423956Z node 3 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T16:15:24.424082Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T16:15:24.454100Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T16:15:24.454517Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T16:15:24.454717Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T16:15:24.454777Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T16:15:24.454835Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T16:15:24.454882Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T16:15:24.454930Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T16:15:24.454988Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T16:15:24.455490Z node 3 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T16:15:24.478548Z node 3 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [3:2276:2589] 2025-03-31T16:15:24.479182Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T16:15:24.487137Z node 3 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T16:15:24.487202Z node 3 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T16:15:24.487272Z node 3 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T16:15:24.493392Z node 3 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:15:24.493517Z node 3 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [3:2320:2612], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:15:24.510167Z node 3 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [3:2363:2634] 2025-03-31T16:15:24.510665Z node 3 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [3:2363:2634], schemeshard id = 72075186224037897 2025-03-31T16:15:24.549659Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T16:15:24.559211Z node 3 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T16:15:24.559403Z node 3 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T16:15:24.734762Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T16:15:24.983607Z node 3 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T16:15:25.066653Z node 3 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:15:26.247399Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2698:3089], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:26.247568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:26.266280Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-31T16:15:26.555572Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2848:3127], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:26.555709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:26.556971Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2853:3131]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:15:26.557107Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T16:15:26.557169Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2855:3133] 2025-03-31T16:15:26.557218Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2855:3133] 2025-03-31T16:15:26.557704Z node 3 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [3:2856:2818] 2025-03-31T16:15:26.558081Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2855:3133], server id = [3:2856:2818], tablet id = 72075186224037894, status = OK 2025-03-31T16:15:26.558259Z node 3 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [3:2856:2818], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-03-31T16:15:26.558329Z node 3 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-03-31T16:15:26.558489Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:15:26.558544Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2853:3131], StatRequests.size() = 1 2025-03-31T16:15:26.577391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2860:3137], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:26.577514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool ... 000s, at schemeshard: 72057594046644480 2025-03-31T16:17:34.281397Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:7495:3222]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:17:34.281711Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-03-31T16:17:34.281754Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:7495:3222], StatRequests.size() = 1 2025-03-31T16:17:34.882863Z node 3 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-03-31T16:17:34.883411Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:17:34.883738Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:17:34.939928Z node 3 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-03-31T16:17:34.940012Z node 3 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 195.000000s, at schemeshard: 72075186224037897 2025-03-31T16:17:34.940332Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 49 2025-03-31T16:17:34.956824Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-31T16:17:35.407276Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:7532:3228]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:17:35.407586Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-03-31T16:17:35.407625Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:7532:3228], StatRequests.size() = 1 2025-03-31T16:17:35.921692Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:17:35.921784Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:17:35.921840Z node 3 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-03-31T16:17:35.921894Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-31T16:17:35.922256Z node 3 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T16:17:35.938448Z node 3 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-31T16:17:35.943549Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7559:4377], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:35.943704Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7570:4382], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:35.943851Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:35.971894Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-03-31T16:17:36.065364Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7573:4385], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-03-31T16:17:36.198194Z node 3 :TX_PROXY ERROR: Actor# [3:7668:4433] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:36.246579Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [3:7698:4449]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:17:36.246908Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T16:17:36.247004Z node 3 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [3:7700:4451] 2025-03-31T16:17:36.247103Z node 3 :STATISTICS DEBUG: SyncNode(), pipe client id = [3:7700:4451] 2025-03-31T16:17:36.247566Z node 3 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [3:7701:4452] 2025-03-31T16:17:36.247695Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:7700:4451], server id = [3:7701:4452], tablet id = 72075186224037894, status = OK 2025-03-31T16:17:36.247795Z node 3 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [3:7701:4452], node id = 3, have schemeshards count = 0, need schemeshards count = 1 2025-03-31T16:17:36.247857Z node 3 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 3, schemeshard count = 1 2025-03-31T16:17:36.247986Z node 3 :STATISTICS DEBUG: EvPropagateStatistics, node id = 3 2025-03-31T16:17:36.248070Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [3:7698:4449], StatRequests.size() = 1 2025-03-31T16:17:36.390909Z node 3 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=Y2NjNzMzMTEtYjgwOGJiNTEtNDk1YmNjN2MtNTk5NDIxOTg=, TxId: 2025-03-31T16:17:36.390997Z node 3 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=Y2NjNzMzMTEtYjgwOGJiNTEtNDk1YmNjN2MtNTk5NDIxOTg=, TxId: 2025-03-31T16:17:36.392397Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:17:36.408829Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-31T16:17:36.408926Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T16:17:36.465827Z node 3 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-31T16:17:36.465896Z node 3 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-31T16:17:36.498712Z node 3 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [3:7700:4451], schemeshard count = 1 2025-03-31T16:17:36.798784Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:7739:3244]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:17:36.799119Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-03-31T16:17:36.799166Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:7739:3244], StatRequests.size() = 1 2025-03-31T16:17:37.756818Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:7782:3258]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:17:37.757121Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-03-31T16:17:37.757164Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:7782:3258], StatRequests.size() = 1 2025-03-31T16:17:38.287828Z node 3 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:17:38.299804Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:17:38.299863Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:17:38.299905Z node 3 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-03-31T16:17:38.299964Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:17:38.300298Z node 3 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T16:17:38.307426Z node 3 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-31T16:17:38.337284Z node 3 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=YjBkYmFjMjUtZmZlMWQ2MzktOGZjMTVhZWYtYzhjODE0N2M=, TxId: 2025-03-31T16:17:38.337348Z node 3 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=YjBkYmFjMjUtZmZlMWQ2MzktOGZjMTVhZWYtYzhjODE0N2M=, TxId: 2025-03-31T16:17:38.338399Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:17:38.362479Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:17:38.362544Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T16:17:38.813333Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:7854:3271]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:17:38.813623Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-03-31T16:17:38.813663Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:7854:3271], StatRequests.size() = 1 2025-03-31T16:17:39.885549Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:7903:3285]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:17:39.885860Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-03-31T16:17:39.885906Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:7903:3285], StatRequests.size() = 1 2025-03-31T16:17:40.385354Z node 3 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 3, schemeshard count = 1 2025-03-31T16:17:40.385784Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:17:40.386162Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:17:40.386236Z node 3 :STATISTICS DEBUG: EvPropagateStatistics, node id = 3 2025-03-31T16:17:40.397878Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:17:40.397938Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:17:40.814686Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 125 ], ReplyToActorId[ [2:7940:3291]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:17:40.814969Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 125 ] 2025-03-31T16:17:40.815014Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 125, ReplyToActorId = [2:7940:3291], StatRequests.size() = 1 2025-03-31T16:17:40.815824Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [3:7942:4530]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:17:40.823303Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T16:17:40.823393Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [3:7942:4530], StatRequests.size() = 1 >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvideIncorrectCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientDoesNotProvideAnyCerts >> YdbYqlClient::CreateAndAltertTableWithKeyBloomFilter [GOOD] >> TGRpcClientLowTest::MultipleSimpleRequests [GOOD] >> YdbOlapStore::LogNonExistingRequest [GOOD] >> YdbOlapStore::LogNonExistingUserId >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesServerCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesExpiredCert >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBindDn [GOOD] >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBindPassword >> TTableProfileTests::ExplicitPartitionsWrongKeyType [GOOD] >> YdbScripting::Params [GOOD] >> YdbTableBulkUpsert::DataValidation >> TGRpcYdbTest::ExecuteQueryImplicitSession [GOOD] >> TGRpcYdbTest::ExecuteQueryExplicitSession >> YdbYqlClient::TestReadTableMultiShardOneRow [GOOD] >> YdbYqlClient::TestReadTableBatchLimits ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcClientLowTest::MultipleSimpleRequests [GOOD] Test command err: 2025-03-31T16:17:18.993573Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008499399060050:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:18.993636Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d76/r3tmp/tmpAN5oPH/pdisk_1.dat 2025-03-31T16:17:19.372620Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:19.396822Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:19.396956Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:19.404275Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21895, node 1 2025-03-31T16:17:19.546303Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:19.546332Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:19.546343Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:19.546457Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24020 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:19.812442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TestRequest(database="/Root", token="root@builtin") => {SUCCESS, 0} 2025-03-31T16:17:21.829873Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="/blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2025-03-31T16:17:21.838638Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2025-03-31T16:17:23.188963Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008522427581008:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:23.189025Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d76/r3tmp/tmpV8DZwV/pdisk_1.dat 2025-03-31T16:17:23.349602Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:23.373244Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:23.373319Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:23.375982Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22432, node 4 2025-03-31T16:17:23.513419Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:23.513443Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:23.513449Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:23.513564Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12804 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:23.740334Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TestRequest(database="/Root", token="") => {STATUS_CODE_UNSPECIFIED, 16} 2025-03-31T16:17:23.801995Z node 4 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="/blabla", token="") => {STATUS_CODE_UNSPECIFIED, 16} 2025-03-31T16:17:23.811544Z node 4 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="blabla", token="") => {STATUS_CODE_UNSPECIFIED, 16} 2025-03-31T16:17:27.429900Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488008537195253724:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:27.429947Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d76/r3tmp/tmpNVd5cL/pdisk_1.dat 2025-03-31T16:17:27.610020Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:27.645473Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:27.645548Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:27.648710Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20392, node 7 2025-03-31T16:17:27.702243Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:27.702264Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:27.702268Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:27.702358Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2746 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:17:27.931540Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 TestRequest(database="/Root", token="") => {SUCCESS, 0} TestRequest(database="/blabla", token="") => {SUCCESS, 0} TestRequest(database="blabla", token="") => {SUCCESS, 0} TestRequest(database="/Root", token="root@builtin") => {SUCCESS, 0} 2025-03-31T16:17:30.415928Z node 7 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="/blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2025-03-31T16:17:30.434490Z node 7 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2025-03-31T16:17:30.451477Z node 7 :TICKET_PARSER ERROR: Ticket **** (717F937C): Unknown token TestRequest(database="/Root", token="invalid token") => {UNAUTHORIZED, 0} 2025-03-31T16:17:30.461570Z node 7 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="/blabla", token="invalid token") => {STATUS_CODE_UNSPECIFIED, 16} 2025-03-31T16:17:30.478297Z node 7 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="blabla", token="invalid token") => {STATUS_CODE_UNSPECIFIED, 16} 2025-03-31T16:17:32.113532Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008559090137917:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:32.113617Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d76/r3tmp/tmpNxhxs7/pdisk_1.dat 2025-03-31T16:17:32.270822Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:32.303750Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:32.303831Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:32.306472Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10706, node 10 2025-03-31T16:17:32.406465Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:32.406493Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:32.406501Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:32.406647Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2390 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:32.553480Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TestRequest(database="/Root", token="") => {SUCCESS, 0} TestRequest(database="/blabla", token="") => {SUCCESS, 0} TestRequest(database="blabla", token="") => {SUCCESS, 0} TestRequest(database="/Root", token="root@builtin") => {SUCCESS, 0} 2025-03-31T16:17:35.356085Z node 10 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="/blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2025-03-31T16:17:35.373998Z node 10 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} TestRequest(database="/Root", token="invalid token") => {STATUS_CODE_UNSPECIFIED, 16} 2025-03-31T16:17:35.399335Z node 10 :TICKET_PARSER ERROR: Ticket **** (717F937C): Unknown token TestRequest(database="/blabla", token="invalid token") => {STATUS_CODE_UNSPECIFIED, 16} 2025-03-31T16:17:35.418016Z node 10 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 2025-03-31T16:17:35.440016Z node 10 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="blabla", token="invalid token") => {STATUS_CODE_UNSPECIFIED, 16} 2025-03-31T16:17:36.902517Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7488008576315948110:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:36.902623Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d76/r3tmp/tmp3AoB5g/pdisk_1.dat 2025-03-31T16:17:37.190380Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:37.234552Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:37.234642Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:37.240687Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15726, node 13 2025-03-31T16:17:37.319810Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:37.319838Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:37.319844Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:37.319999Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31688 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:37.580796Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:41.901692Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7488008576315948110:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:41.901979Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::CreateAndAltertTableWithKeyBloomFilter [GOOD] Test command err: 2025-03-31T16:17:16.039156Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008489629343481:2077];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:16.039271Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d87/r3tmp/tmpdpCiAO/pdisk_1.dat 2025-03-31T16:17:16.351517Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8694, node 1 2025-03-31T16:17:16.426100Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:16.427512Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:16.467395Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:17:16.495796Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:16.495821Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:16.495828Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:16.495943Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21829 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:16.828850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:16.913283Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: Skip check permission connect db, AllowYdbRequestsWithoutDatabase is off, there is no db provided from user, database: /Root, user: root@builtin, from ip: ipv6:[::1]:46210 Call 2025-03-31T16:17:16.934634Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: Skip check permission connect db, AllowYdbRequestsWithoutDatabase is off, there is no db provided from user, database: /Root, user: root@builtin, from ip: ipv6:[::1]:46220 2025-03-31T16:17:19.000475Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: Skip check permission connect db, AllowYdbRequestsWithoutDatabase is off, there is no db provided from user, database: /Root, user: root@builtin, from ip: ipv6:[::1]:46230 Call Call 2025-03-31T16:17:19.040706Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: Skip check permission connect db, user is a admin, database: /Root, user: root@builtin, from ip: ipv6:[::1]:46242 2025-03-31T16:17:19.051913Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: Skip check permission connect db, user is a admin, database: /Root, user: root@builtin, from ip: ipv6:[::1]:46252 2025-03-31T16:17:19.053406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:20.520686Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008508721483402:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:20.521543Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d87/r3tmp/tmpXzfYgv/pdisk_1.dat 2025-03-31T16:17:20.690457Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:20.720380Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:20.720461Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:20.722332Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16443, node 4 2025-03-31T16:17:20.813867Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:20.813892Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:20.813898Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:20.814069Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26505 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:21.087417Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:23.726974Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Table-1, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:23.728164Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:17:23.728205Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:23.731499Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Table-1 2025-03-31T16:17:23.809571Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743437843854, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T16:17:23.872421Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-03-31T16:17:23.886755Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /Root/Table-2, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:17:23.887427Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:17:23.889845Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, subject: , status: StatusAccepted, operation: CREATE TABLE COPY FROM, path: /Root/Table-2 2025-03-31T16:17:23.927051Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743437843973, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T16:17:23.942819Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715659, done: 0, blocked: 1 2025-03-31T16:17:23.947138Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2025-03-31T16:17:23.959711Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /Root/Table-3, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:17:23.960259Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:17:23.960276Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /Root/Table-4, opId: 281474976715660:1, at schemeshard: 72057594046644480 2025-03-31T16:17:23.960586Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:17:23.963628Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, subject: , status: StatusAccepted, operation: CREATE TABLE COPY FROM, dst path: /Root/Table-3, dst path: /Root/Table-4 2025-03-31T16:17:24.017735Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743437844064, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T16:17:24.037251Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715660, done: 0, blocked: 2 2025-03-31T16:17:24.039419Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:0 2025-03-31T16:17:24.039538Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:1 2025-03-31T16:17:24.052792Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /Root/Table-5, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:17:24.053209Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715661:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:17:24.053254Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /Root/Table-6, opId: 281474976715661:1, at schemeshard: 72057594046644480 2025-03-31T16:17:24.053518Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715661:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:17:24.053536Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /Root/Table-7, opId: 281474976715661:2, at schemeshard: 72057594046644480 2025-03-31T16:17:24.053736Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715661:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:17:24.053751Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /Root/Table-8, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-03-31T16:17:24.053994Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715661:4, propose status:StatusAccepted, reason: ... 17:27.413604Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:27.450362Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:27.450450Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:27.456647Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16456, node 7 2025-03-31T16:17:27.548054Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:27.548078Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:27.548086Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:27.548247Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23768 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:27.828837Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:23768 2025-03-31T16:17:30.599929Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:23768 TClient::Ls request: Root/Test TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Test" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1743437850714 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Test" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyCo... (TRUNCATED) 2025-03-31T16:17:30.952356Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:23768 TClient::Ls request: Root/Test TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Test" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1743437850714 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Test" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyCo... (TRUNCATED) 2025-03-31T16:17:32.908807Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008560604347274:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:32.908857Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d87/r3tmp/tmpjyYFFo/pdisk_1.dat 2025-03-31T16:17:33.140733Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10443, node 10 2025-03-31T16:17:33.226754Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:33.226899Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:33.247609Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:17:33.279685Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:33.279707Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:33.279717Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:33.279920Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26467 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:33.561305Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:36.629172Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:36.751694Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:17:36.819177Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:17:38.675144Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7488008586969484123:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:38.680762Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d87/r3tmp/tmpir9Utm/pdisk_1.dat 2025-03-31T16:17:38.889893Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:38.920656Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:38.920752Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:38.924409Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17408, node 13 2025-03-31T16:17:39.058147Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:39.058171Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:39.058182Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:39.058344Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6020 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:39.388998Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:42.612937Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:42.732759Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 >> TGRpcYdbTest::ExecuteQueryWithParametersExplicitSession [GOOD] >> TGRpcYdbTest::ExplainQuery >> TYqlDateTimeTests::DateKey [GOOD] >> TYqlDateTimeTests::DatetimeKey >> TGRpcAuthentication::NoConnectRights [GOOD] >> ClientStatsCollector::ExternalMetricRegistryStdSharedPtr [GOOD] |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsSimple ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TTableProfileTests::ExplicitPartitionsWrongKeyType [GOOD] Test command err: 2025-03-31T16:17:10.258775Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008464405849848:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:10.259104Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000db4/r3tmp/tmpDmtfLg/pdisk_1.dat 2025-03-31T16:17:10.594440Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:10.638833Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:10.638941Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:10.641939Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21659, node 1 2025-03-31T16:17:10.711398Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:10.711426Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:10.711438Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:10.711575Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10012 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:10.991936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:10012 2025-03-31T16:17:11.245172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:11.268208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:11.777979Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488008470017032494:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:11.778063Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:17:11.781602Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:11.781691Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:11.784397Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-03-31T16:17:11.787825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10012 2025-03-31T16:17:12.134767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:10012 TClient::Ls request: /Root/ydb_ut_tenant/table-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1743437832510 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-03-31T16:17:12.884422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:10012 TClient::Ls request: /Root/ydb_ut_tenant/table-2 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-2" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710661 CreateStep: 1743437833150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-03-31T16:17:13.453551Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-03-31T16:17:13.456796Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-31T16:17:16.373298Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008490374905095:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:16.373398Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000db4/r3tmp/tmp04l8Kf/pdisk_1.dat 2025-03-31T16:17:16.524987Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:16.565502Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:16.565596Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:16.569497Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26781, node 4 2025-03-31T16:17:16.669775Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:16.669800Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:16.669838Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:16.670048Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24336 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:16.989715Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:24336 2025-03-31T16:17:17.261671Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:17.281483Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:17.786131Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488008494301412680:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:17.786226Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:17:17.808091Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:17.808207Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:17.812351Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 6 Cookie 6 2025-03-31T16:17:17.816389Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24336 2025-03-31T16:17:18.133741Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0 ... 8764Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:17:27.090307Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:17:29.419700Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008547157691090:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:29.419769Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000db4/r3tmp/tmpa2bVjE/pdisk_1.dat 2025-03-31T16:17:29.626284Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:29.669475Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:29.669580Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:29.674272Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8207, node 10 2025-03-31T16:17:29.769820Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:29.769844Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:29.769856Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:29.770038Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18777 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:30.129060Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:18777 2025-03-31T16:17:30.545444Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:30.572746Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:31.078445Z node 12 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7488008554560628168:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:31.079215Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:17:31.140561Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:31.140696Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:31.154650Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 12 Cookie 12 2025-03-31T16:17:31.158204Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18777 2025-03-31T16:17:31.658894Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 12 2025-03-31T16:17:31.659590Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-31T16:17:32.081060Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:17:33.082165Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:17:34.082570Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:17:35.087391Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:17:37.109984Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7488008581193426429:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:37.110060Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000db4/r3tmp/tmpW9ogdJ/pdisk_1.dat 2025-03-31T16:17:37.376526Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:37.450207Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:37.450452Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:37.468961Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1047, node 13 2025-03-31T16:17:37.630327Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:37.630360Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:37.630376Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:37.630497Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15282 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:38.115949Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:15282 2025-03-31T16:17:38.577371Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:38.617991Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:39.129223Z node 15 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[15:7488008591145218951:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:39.129378Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:17:39.222209Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:39.222319Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:39.228478Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2025-03-31T16:17:39.232359Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15282 2025-03-31T16:17:39.670210Z node 13 :TX_PROXY ERROR: Actor# [13:7488008589783362439:2915] txid# 281474976710660, issues: { message: "Error at split boundary 0: Value of type Uint64 expected in tuple at position 1" severity: 1 } 2025-03-31T16:17:39.675279Z node 13 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 15 2025-03-31T16:17:39.675807Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-31T16:17:40.133887Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:17:41.139636Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:17:42.140476Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:17:43.144043Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; >> KqpSystemView::Sessions >> KqpSysColV1::InnerJoinTables >> YdbTableBulkUpsertOlap::ParquetImportBug_Datashard [GOOD] >> YdbTableBulkUpsertOlap::UpsertCSV |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcAuthentication::NoConnectRights [GOOD] Test command err: 2025-03-31T16:17:08.621218Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008457981504219:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:08.621516Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000db6/r3tmp/tmpKAr8jo/pdisk_1.dat 2025-03-31T16:17:09.001943Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:09.011766Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:09.011874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 5471, node 1 2025-03-31T16:17:09.028550Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:17:09.039687Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:17:09.039970Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:17:09.074888Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:09.074912Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:09.074920Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:09.075241Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4980 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:09.353723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:4980 TClient is connected to server localhost:4980 2025-03-31T16:17:09.783760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:17:11.569523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008470866407160:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:11.569523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008470866407170:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:11.569673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:11.575606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-03-31T16:17:11.601105Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008470866407174:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-03-31T16:17:11.701379Z node 1 :TX_PROXY ERROR: Actor# [1:7488008470866407251:2707] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } TClient is connected to server localhost:4980 2025-03-31T16:17:12.188648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:17:17.023797Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:17:17.024084Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:17:17.024193Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000db6/r3tmp/tmp0YWhrU/pdisk_1.dat 2025-03-31T16:17:17.516409Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:17:17.587261Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:17.587419Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:17.602899Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:17:17.900583Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [4:1001:2805], Recipient [4:555:2468]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:17:17.900690Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:17:17.900741Z node 4 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-31T16:17:17.900893Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [4:998:2803], Recipient [4:555:2468]: {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-31T16:17:17.900931Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-31T16:17:17.999742Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateSubDomain SubDomain { Name: "tenant" } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-31T16:17:18.000046Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/tenant, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:17:18.000193Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: tenant, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-03-31T16:17:18.000394Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-03-31T16:17:18.000598Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-03-31T16:17:18.000750Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:17:18.000815Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:17:18.000927Z node 4 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-31T16:17:18.001009Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-31T16:17:18.001123Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-03-31T16:17:18.003800Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-03-31T16:17:18.004037Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/tenant 2025-03-31T16:17:18.004129Z node 4 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T16:17:18.004175Z node 4 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715657:0 2025-03-31T16:17:18.004519Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [4:555:2468], Recipient [4:555:2468]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-31T16:17:18.004571Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-31T16:17:18.004757Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-31T16:17:18.004811Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-31T16:17:18.005026Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-31T16:17:18.005194Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-31T16:17:18.005251Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:768:2616], at schemeshard: 72057594046644480, txId: 281474976715657, path id: 1 2025-03-31T16:17:18.005308Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:768:2616], at schemeshard: 72057594046644480, txId: 281474976715657, path id: 2 2025-03-31T16:17:18.005957Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:17:18.006038Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxCreateSubDomain, at tablet# 72057594046644480 2025-03-31T16:1 ... TxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 1 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 1 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 2 HasLoanedParts: false Channels { Channel: 1 DataSize: 30 IndexSize: 0 } Channels { Channel: 2 DataSize: 45 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { Memory: 82488 } ShardState: 2 UserTablePartOwners: 72075186224037890 NodeId: 5 StartTime: 1458 TableOwnerId: 72057594046644480 FollowerId: 0 2025-03-31T16:17:28.924097Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [5:1645:2459], Recipient [4:555:2468]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037890 TableLocalId: 3 Generation: 1 Round: 2 TableStats { DataSize: 75 RowCount: 1 IndexSize: 0 InMemSize: 0 LastAccessTime: 2027 LastUpdateTime: 2027 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 1 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 1 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 2 HasLoanedParts: false Channels { Channel: 1 DataSize: 30 IndexSize: 0 } Channels { Channel: 2 DataSize: 45 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { Memory: 82488 } ShardState: 2 UserTablePartOwners: 72075186224037890 NodeId: 5 StartTime: 1458 TableOwnerId: 72057594046644480 FollowerId: 0 2025-03-31T16:17:28.924159Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-03-31T16:17:28.924226Z node 4 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 75 rowCount 1 cpuUsage 0 2025-03-31T16:17:28.924399Z node 4 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] raw table stats: DataSize: 75 RowCount: 1 IndexSize: 0 InMemSize: 0 LastAccessTime: 2027 LastUpdateTime: 2027 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 1 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 1 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 2 HasLoanedParts: false Channels { Channel: 1 DataSize: 30 IndexSize: 0 } Channels { Channel: 2 DataSize: 45 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-03-31T16:17:28.924453Z node 4 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-03-31T16:17:28.978207Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqpfwrb018mdktvhshkn7dbv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NDY2MDYxZmYtZWZkZTA3NDUtNTQxZTIxMTMtNjdmODYxMDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:30.577616Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488008551328259705:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:30.577681Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000db6/r3tmp/tmpATZN95/pdisk_1.dat 2025-03-31T16:17:30.715636Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:30.748813Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:30.748900Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:30.751020Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22184, node 6 2025-03-31T16:17:30.824702Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:30.824731Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:30.824738Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:30.824880Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29463 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:31.120318Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:29463 2025-03-31T16:17:35.250950Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7488008570631468740:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:35.251100Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000db6/r3tmp/tmpzzYgbr/pdisk_1.dat 2025-03-31T16:17:35.497016Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:35.538381Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:35.538528Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:35.548874Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25249, node 9 2025-03-31T16:17:35.624019Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:35.624059Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:35.624069Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:35.624209Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27838 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:35.982684Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:40.800012Z node 12 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7488008591663177238:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:40.800104Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000db6/r3tmp/tmpiKX7mF/pdisk_1.dat 2025-03-31T16:17:41.067457Z node 12 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11650, node 12 2025-03-31T16:17:41.200949Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:41.202399Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:41.229695Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:17:41.291961Z node 12 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:41.291989Z node 12 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:41.292001Z node 12 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:41.292159Z node 12 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32619 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:41.654518Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:32619 >> KqpSysColV1::StreamSelectRange >> YdbYqlClient::TestColumnOrder [GOOD] >> YdbYqlClient::TestDecimal >> YdbTableBulkUpsert::SyncIndexShouldSucceed [GOOD] >> YdbTableBulkUpsert::Timeout ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> ClientStatsCollector::ExternalMetricRegistryStdSharedPtr [GOOD] Test command err: 2025-03-31T16:17:17.921821Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008494740564064:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:17.921855Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d77/r3tmp/tmpTiW2dt/pdisk_1.dat 2025-03-31T16:17:18.299618Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:18.347518Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:18.347605Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:18.352199Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5614, node 1 2025-03-31T16:17:18.423781Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:18.423814Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:18.423821Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:18.423935Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6962 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:18.719926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:20.970833Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008507625466978:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:20.970833Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008507625466986:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:20.970953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:20.975374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:17:20.995131Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008507625466992:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:17:21.091066Z node 1 :TX_PROXY ERROR: Actor# [1:7488008511920434360:2684] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:21.469169Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710660. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=1&id=NTdjZjZhYTctOWMzNTY5MTQtYTdlMjMzNDMtOWRhYjk4, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-03-31T16:17:23.011823Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008520782616543:2083];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:23.016448Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d77/r3tmp/tmpuNtCzS/pdisk_1.dat 2025-03-31T16:17:23.223098Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:23.263132Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:23.263242Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:23.271140Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29941, node 4 2025-03-31T16:17:23.349041Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:23.349068Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:23.349076Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:23.349282Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25071 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:23.654318Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:26.331581Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008533667519429:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:26.331655Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008533667519440:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:26.331712Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:26.336515Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:17:26.366319Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488008533667519443:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:17:26.469249Z node 4 :TX_PROXY ERROR: Actor# [4:7488008533667519529:2678] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:28.191952Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488008541176209817:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:28.200084Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d77/r3tmp/tmpBG8Wml/pdisk_1.dat 2025-03-31T16:17:28.447061Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7461, node 7 2025-03-31T16:17:28.505897Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:28.506011Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:28.515529Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:17:28.609842Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:28.609866Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:28.609879Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:28.610016Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13878 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 1844674407370955161 ... w5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YmI4ZWI3MzUtNzA3MWQ4N2EtOWM1ODcyYWMtZDgxYzI1Mjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:33.811847Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jqpfwx3j8nb703ze066t94an, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzZhZjRjOWQtZTVlNTg1ZGQtYTAyMTljOTctYmI3ZThkMmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:33.813924Z node 7 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=24; 2025-03-31T16:17:33.814235Z node 7 :KQP_COMPUTE ERROR: SelfId: [7:7488008562651048017:2362], Table: `Root/names` ([72057594046644480:2:1]), SessionActorId: [7:7488008558356080348:2362]Got LOCKS BROKEN for table `Root/names`. ShardID=72075186224037888, Sink=[7:7488008562651048017:2362].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-03-31T16:17:33.814300Z node 7 :KQP_COMPUTE ERROR: SelfId: [7:7488008562651047978:2362], SessionActorId: [7:7488008558356080348:2362], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `Root/names`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[7:7488008558356080348:2362]. isRollback=0 2025-03-31T16:17:33.814461Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=NzZhZjRjOWQtZTVlNTg1ZGQtYTAyMTljOTctYmI3ZThkMmE=, ActorId: [7:7488008558356080348:2362], ActorState: ExecuteState, TraceId: 01jqpfwx3j8nb703ze066t94an, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [7:7488008562651048011:2362] from: [7:7488008562651047978:2362] 2025-03-31T16:17:33.814538Z node 7 :KQP_EXECUTER ERROR: ActorId: [7:7488008562651048011:2362] TxId: 281474976715678. Ctx: { TraceId: 01jqpfwx3j8nb703ze066t94an, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzZhZjRjOWQtZTVlNTg1ZGQtYTAyMTljOTctYmI3ZThkMmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `Root/names`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-03-31T16:17:33.815202Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=NzZhZjRjOWQtZTVlNTg1ZGQtYTAyMTljOTctYmI3ZThkMmE=, ActorId: [7:7488008558356080348:2362], ActorState: ExecuteState, TraceId: 01jqpfwx3j8nb703ze066t94an, Create QueryResponse for error on request, msg: 2025-03-31T16:17:35.487306Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008570486370172:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:35.487374Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d77/r3tmp/tmpOnRhKz/pdisk_1.dat 2025-03-31T16:17:35.705356Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24255, node 10 2025-03-31T16:17:35.824659Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:35.824678Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:35.824686Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:35.824783Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:17:35.830699Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:35.830957Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:35.863554Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62263 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:36.139816Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:39.100298Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488008587666240424:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:39.100407Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:39.100639Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488008587666240436:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:39.104494Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:17:39.142669Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7488008587666240438:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:17:39.235645Z node 10 :TX_PROXY ERROR: Actor# [10:7488008587666240509:2677] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:41.083172Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7488008598651327427:2131];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:41.083258Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d77/r3tmp/tmpbMLS63/pdisk_1.dat 2025-03-31T16:17:41.301035Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11910, node 13 2025-03-31T16:17:41.408006Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:41.408082Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:41.421072Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:41.421096Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:41.421105Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:41.421242Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:17:41.425932Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23393 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:41.719593Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:44.638928Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7488008611536230296:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:44.639042Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7488008611536230291:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:44.639246Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:44.643966Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:17:44.665405Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7488008611536230305:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:17:44.728071Z node 13 :TX_PROXY ERROR: Actor# [13:7488008611536230386:2688] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> YdbTableBulkUpsert::Errors [GOOD] >> YdbTableBulkUpsert::Limits >> KqpSystemView::QueryStatsScan >> KqpSysColV1::StreamSelectRowAsterisk >> TGRpcNewCoordinationClient::SessionReconnectReattach [GOOD] >> TGRpcNewClient::SimpleYqlQuery [GOOD] >> TGRpcNewClient::TestAuth >> TTableProfileTests::OverwriteCompactionPolicy [GOOD] >> TTableProfileTests::OverwriteExecutionPolicy >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientDoesNotProvideClientCerts [GOOD] >> BasicStatistics::Simple [GOOD] >> YdbYqlClient::AlterTableAddIndex [GOOD] >> YdbYqlClient::AlterTableAddIndexAsyncOp >> KqpSystemView::PartitionStatsFollower |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinTables ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcNewCoordinationClient::SessionReconnectReattach [GOOD] Test command err: 2025-03-31T16:17:23.448005Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008520038036260:2245];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:23.448325Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d57/r3tmp/tmpXJEaPt/pdisk_1.dat 2025-03-31T16:17:23.871999Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:23.896009Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:23.896122Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:23.903654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10317, node 1 2025-03-31T16:17:24.011787Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:24.011819Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:24.011828Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:24.011941Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21245 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:24.335124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:24.412330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:27.898529Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008536624065972:2207];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:27.906640Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d57/r3tmp/tmpjt2Jgi/pdisk_1.dat 2025-03-31T16:17:28.109225Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2666, node 4 2025-03-31T16:17:28.244367Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:28.244464Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:28.264780Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:17:28.277078Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:28.277104Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:28.277119Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:28.277301Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20138 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:28.636421Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:28.707304Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:32.636098Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488008559064961115:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:32.636157Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d57/r3tmp/tmpr3Xe2i/pdisk_1.dat 2025-03-31T16:17:32.866249Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14011, node 7 2025-03-31T16:17:32.945680Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:32.945821Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:33.046150Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:17:33.119684Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:33.119708Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:33.119716Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:33.119842Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6721 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:33.433624Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:33.549637Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:37.733737Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008578879765357:2112];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:37.733982Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d57/r3tmp/tmpX9hUhA/pdisk_1.dat 2025-03-31T16:17:38.016715Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21956, node 10 2025-03-31T16:17:38.105784Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:38.105895Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:38.106870Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:38.106888Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:38.106897Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:38.107052Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:17:38.111616Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13908 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:38.408579Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:38.495524Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:43.179069Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7488008604545329153:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:43.179164Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d57/r3tmp/tmpoQdAiq/pdisk_1.dat 2025-03-31T16:17:43.424151Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:43.459853Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:43.459947Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:43.463555Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24196, node 13 2025-03-31T16:17:43.647178Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:43.647200Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:43.647209Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:43.647346Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1887 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:43.965876Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:44.042914Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBindPassword [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithEmptyPassword ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientDoesNotProvideClientCerts [GOOD] Test command err: 2025-03-31T16:16:43.618126Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008348996036635:2271];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:43.618268Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ddd/r3tmp/tmp2gnuhH/pdisk_1.dat 2025-03-31T16:16:43.996547Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:43.996671Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 6328, node 1 2025-03-31T16:16:44.005072Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:44.046730Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:16:44.046775Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:16:44.092082Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:16:44.102563Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:44.102591Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:44.102599Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:44.102779Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16730 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:16:44.416391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:44.505572Z node 1 :TICKET_PARSER DEBUG: Ticket 0F5D764C69185DED2A319922D0A5BB344648BA98DDB6A8F7886F42758E54FFEC (ipv6:[::1]:52030) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-03-31T16:16:44.585470Z node 1 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:52044) has now valid token of root@builtin 2025-03-31T16:16:44.663237Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-03-31T16:16:44.663296Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:16:44.663308Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:16:44.663351Z node 1 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ddd/r3tmp/tmp7tbEOY/pdisk_1.dat 2025-03-31T16:16:47.840369Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:47.938173Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:47.984665Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:47.984755Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:47.990893Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22777, node 4 2025-03-31T16:16:48.069898Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:48.069928Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:48.069936Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:48.070090Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26754 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:16:48.328974Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:48.419879Z node 4 :TICKET_PARSER DEBUG: Ticket 0F5D764C69185DED2A319922D0A5BB344648BA98DDB6A8F7886F42758E54FFEC (ipv6:[::1]:57092) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-03-31T16:16:48.508340Z node 4 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:57106) has now valid token of root@builtin 2025-03-31T16:16:48.607457Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-03-31T16:16:48.607487Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:16:48.607500Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:16:48.607533Z node 4 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-03-31T16:16:52.115678Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488008387285300911:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:52.115761Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ddd/r3tmp/tmpZeGr2R/pdisk_1.dat 2025-03-31T16:16:52.238681Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:52.286393Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:52.286478Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:52.291536Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14549, node 7 2025-03-31T16:16:52.363770Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:52.363803Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:52.363811Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:52.363948Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27662 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:16:52.613667Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:52.744910Z node 7 :TICKET_PARSER DEBUG: Ticket BE7A58AD5DD7DB945E6F2C2110700C5D8F0E880A41B8BEC20957E94B9374443F (ipv6:[::1]:42964) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-03-31T16:16:52.831852Z node 7 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:42982) has now valid token of root@builtin 2025-03-31T16:16:52.903064Z node 7 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-03-31T16:16:52.903115Z node 7 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:16:52.903139Z node 7 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:16:52.903173Z node 7 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-03-31T16:16:56.493306Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008405876704641:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:56.493381Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ddd/r3tmp/tmpUnzhdU/pdisk_1.dat 2025-03-31T16:16:56.636329Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:56.671698Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:56.671799Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16: ... at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:16.705087Z node 19 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[19:7488008468820901506:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:16.705190Z node 19 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:17:22.493303Z node 19 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:49562) has now valid token of root@builtin 2025-03-31T16:17:22.582727Z node 19 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-03-31T16:17:22.582771Z node 19 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:17:22.582785Z node 19 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:17:22.582832Z node 19 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-03-31T16:17:24.043153Z node 22 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[22:7488008525670330412:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:24.043220Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ddd/r3tmp/tmpvycUtx/pdisk_1.dat 2025-03-31T16:17:24.287951Z node 22 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:24.325076Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:24.325171Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:24.328156Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22037, node 22 2025-03-31T16:17:24.453963Z node 22 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:24.453987Z node 22 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:24.454003Z node 22 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:24.454188Z node 22 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10226 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:24.799397Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:29.043533Z node 22 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[22:7488008525670330412:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:29.043625Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:17:35.003703Z node 22 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:35068) has now valid token of root@builtin 2025-03-31T16:17:35.094147Z node 22 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-03-31T16:17:35.094183Z node 22 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:17:35.094199Z node 22 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:17:35.094241Z node 22 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-03-31T16:17:36.791595Z node 25 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[25:7488008576093861293:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:36.791671Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ddd/r3tmp/tmpG7fQa5/pdisk_1.dat 2025-03-31T16:17:36.960409Z node 25 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:36.999509Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:36.999614Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:37.003785Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7625, node 25 2025-03-31T16:17:37.105429Z node 25 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:37.105453Z node 25 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:37.105465Z node 25 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:37.105632Z node 25 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6558 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:37.468902Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:37.675564Z node 25 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:37182) has now valid token of root@builtin 2025-03-31T16:17:37.796699Z node 25 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-03-31T16:17:37.796736Z node 25 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:17:37.796750Z node 25 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:17:37.796795Z node 25 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-03-31T16:17:43.261699Z node 28 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[28:7488008606917651695:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:43.267016Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ddd/r3tmp/tmpgaVFgR/pdisk_1.dat 2025-03-31T16:17:43.484764Z node 28 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7716, node 28 2025-03-31T16:17:43.558984Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:43.559128Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:43.568128Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:17:43.631035Z node 28 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:43.631058Z node 28 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:43.631064Z node 28 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:43.631247Z node 28 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1564 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:43.983479Z node 28 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:44.165094Z node 28 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:52196) has now valid token of root@builtin 2025-03-31T16:17:44.229901Z node 28 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-03-31T16:17:44.229943Z node 28 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:17:44.229974Z node 28 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:17:44.230017Z node 28 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator >> YdbTableBulkUpsert::DataValidation [GOOD] >> YdbTableBulkUpsert::DecimalPK ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::Simple [GOOD] Test command err: 2025-03-31T16:15:10.239364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:10.239654Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:10.239757Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d11/r3tmp/tmpcqGHal/pdisk_1.dat 2025-03-31T16:15:10.686907Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24487, node 1 2025-03-31T16:15:11.012834Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:11.012894Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:11.012921Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:11.015586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:15:11.016051Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:15:11.114100Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:11.114229Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:11.132571Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22162 2025-03-31T16:15:11.759006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:15:15.261735Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T16:15:15.307831Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:15.308028Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:15.341608Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:15:15.345614Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:15:15.614875Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:15.615834Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:15.616449Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:15.616652Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:15.617084Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:15.617213Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:15.617304Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:15.617388Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:15.617460Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:15.807196Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:15.807334Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:15.821274Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:15:15.987385Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:16.046760Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T16:15:16.046898Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T16:15:16.082065Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T16:15:16.082811Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T16:15:16.083051Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T16:15:16.083138Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T16:15:16.083197Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T16:15:16.083265Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T16:15:16.083331Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T16:15:16.083392Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T16:15:16.083915Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T16:15:16.108325Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:15:16.108448Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1872:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:15:16.115975Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-03-31T16:15:16.123971Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1908:2617] 2025-03-31T16:15:16.124688Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1908:2617], schemeshard id = 72075186224037897 2025-03-31T16:15:16.127518Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T16:15:16.154270Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T16:15:16.154345Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T16:15:16.154426Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T16:15:16.170597Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T16:15:16.178198Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T16:15:16.178351Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T16:15:16.437463Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T16:15:16.609421Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T16:15:16.731784Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:15:18.009459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2241:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:18.009599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:18.028726Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-31T16:15:18.342760Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2380:3110], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:18.342887Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:18.344115Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2385:3114]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:15:18.344246Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T16:15:18.344358Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2387:3116] 2025-03-31T16:15:18.344423Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2387:3116] 2025-03-31T16:15:18.344951Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2388:2876] 2025-03-31T16:15:18.345191Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2387:3116], server id = [2:2388:2876], tablet id = 72075186224037894, status = OK 2025-03-31T16:15:18.345351Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2388:2876], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-03-31T16:15:18.345400Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-03-31T16:15:18.345601Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:15:18.345673Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2385:3114], StatRequests.size() = 1 2025-03-31T16:15:18.364345Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2392:3120], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:18.364486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:18.364909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2397:3125], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:18.372652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-03-31T16:15:18.625996Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-31T16:15:18.626092Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-31T16:15:18.712097Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2387:3116], schemeshard count = 1 2025-03-31T16:15:19.244303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreator ... TATISTICS DEBUG: ReplySuccess(), request id = 116, ReplyToActorId = [2:6402:4625], StatRequests.size() = 1 2025-03-31T16:17:36.360560Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 117 ], ReplyToActorId[ [2:6435:4639]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:17:36.360862Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 117 ] 2025-03-31T16:17:36.360918Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 117, ReplyToActorId = [2:6435:4639], StatRequests.size() = 1 2025-03-31T16:17:37.065219Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:17:37.676825Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 118 ], ReplyToActorId[ [2:6470:4655]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:17:37.677125Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 118 ] 2025-03-31T16:17:37.677169Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 118, ReplyToActorId = [2:6470:4655], StatRequests.size() = 1 2025-03-31T16:17:38.538618Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-03-31T16:17:38.538708Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-31T16:17:38.538752Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-31T16:17:38.538808Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-03-31T16:17:39.316685Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:6510:4672]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:17:39.316963Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-03-31T16:17:39.317010Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:6510:4672], StatRequests.size() = 1 2025-03-31T16:17:40.111250Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-03-31T16:17:40.111659Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:17:40.111935Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:17:40.159718Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-03-31T16:17:40.159813Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 213.000000s, at schemeshard: 72075186224037897 2025-03-31T16:17:40.160129Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 49 2025-03-31T16:17:40.175761Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-31T16:17:40.784223Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:6543:4688]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:17:40.788737Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-03-31T16:17:40.788812Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:6543:4688], StatRequests.size() = 1 2025-03-31T16:17:41.497997Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:17:41.498092Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:17:41.498154Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-03-31T16:17:41.498206Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-31T16:17:41.498660Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T16:17:41.561735Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-31T16:17:41.570393Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6566:4707], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:41.570519Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6576:4712], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:41.570670Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:41.584350Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-03-31T16:17:41.647338Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6580:4715], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-03-31T16:17:41.793112Z node 2 :TX_PROXY ERROR: Actor# [2:6675:4763] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:41.839629Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:6704:4778]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:17:41.839917Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-03-31T16:17:41.839971Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:6704:4778], StatRequests.size() = 1 2025-03-31T16:17:41.990212Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODNmMTlhZWQtNjg4ZjVmODEtOGU5M2RlNTUtYWRlODA2YzU=, TxId: 2025-03-31T16:17:41.990290Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODNmMTlhZWQtNjg4ZjVmODEtOGU5M2RlNTUtYWRlODA2YzU=, TxId: 2025-03-31T16:17:41.990929Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:17:42.017329Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-31T16:17:42.017405Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T16:17:42.541488Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:6739:4797]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:17:42.541804Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-03-31T16:17:42.541845Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:6739:4797], StatRequests.size() = 1 2025-03-31T16:17:43.916949Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:6778:4819]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:17:43.917232Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-03-31T16:17:43.917276Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:6778:4819], StatRequests.size() = 1 2025-03-31T16:17:44.626840Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:17:44.637834Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:17:44.637906Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:17:44.637948Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-03-31T16:17:44.637984Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:17:44.638351Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T16:17:44.641250Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-31T16:17:44.655727Z node 2 :SYSTEM_VIEWS WARN: [72075186224037891] TEvIntervalQuerySummary, time mismath: node id# 2, interval end# 1970-01-01T00:02:04.000000Z, event interval end# 2025-03-31T16:17:42.000000Z 2025-03-31T16:17:44.656274Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=M2JmMDU2NC1jN2E2MTI4NC0xZTViZDFhNi05OThhMjJhYw==, TxId: 2025-03-31T16:17:44.656323Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=M2JmMDU2NC1jN2E2MTI4NC0xZTViZDFhNi05OThhMjJhYw==, TxId: 2025-03-31T16:17:44.656791Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:17:44.670797Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:17:44.670857Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T16:17:45.269540Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:6846:4858]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:17:45.269834Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-03-31T16:17:45.269889Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:6846:4858], StatRequests.size() = 1 2025-03-31T16:17:46.690414Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 125 ], ReplyToActorId[ [2:6889:4882]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:17:46.690741Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 125 ] 2025-03-31T16:17:46.690859Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 125, ReplyToActorId = [2:6889:4882], StatRequests.size() = 1 2025-03-31T16:17:47.378999Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-03-31T16:17:47.379193Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:17:47.379580Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:17:47.390681Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:17:47.390739Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:17:47.960580Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 126 ], ReplyToActorId[ [2:6922:4898]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:17:47.960957Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 126 ] 2025-03-31T16:17:47.961012Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 126, ReplyToActorId = [2:6922:4898], StatRequests.size() = 1 >> HttpRequest::ProbeServerless [GOOD] >> YdbTableBulkUpsert::Uint8 [GOOD] >> YdbTableBulkUpsert::ZeroRows |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> TGRpcYdbTest::ExecuteQueryExplicitSession [GOOD] >> TGRpcYdbTest::ExecutePreparedQuery >> TYqlDecimalTests::DecimalKey [GOOD] |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> TGRpcYdbTest::ExplainQuery [GOOD] >> KqpSystemView::PartitionStatsRange2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::ProbeServerless [GOOD] Test command err: 2025-03-31T16:15:02.768204Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:02.768630Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:02.768801Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d2e/r3tmp/tmp8prRyK/pdisk_1.dat 2025-03-31T16:15:03.336721Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63454, node 1 2025-03-31T16:15:03.617777Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:03.617834Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:03.617884Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:03.618364Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:15:03.620695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:15:03.738501Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:03.738656Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:03.760441Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20909 2025-03-31T16:15:04.453257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:15:08.211524Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T16:15:08.249424Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:08.249561Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:08.290374Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:15:08.293437Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:15:08.686412Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:08.686967Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:08.687604Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:08.687762Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:08.688022Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:08.688118Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:08.688201Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:08.688281Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:08.688391Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:08.888759Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:08.888886Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:08.907148Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:15:09.088367Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:09.139999Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T16:15:09.140126Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T16:15:09.174030Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T16:15:09.175360Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T16:15:09.175579Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T16:15:09.175635Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T16:15:09.175688Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T16:15:09.175739Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T16:15:09.175788Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T16:15:09.175840Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T16:15:09.176290Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T16:15:09.220365Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:15:09.220494Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1872:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:15:09.280567Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1883:2604] 2025-03-31T16:15:09.287220Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1915:2621] 2025-03-31T16:15:09.287325Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1915:2621], schemeshard id = 72075186224037897 2025-03-31T16:15:09.291197Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-03-31T16:15:09.326310Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T16:15:09.326373Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T16:15:09.326452Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-03-31T16:15:09.342007Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T16:15:09.350301Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T16:15:09.350454Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T16:15:09.580338Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T16:15:09.805532Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T16:15:09.891793Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:15:10.863114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:15:11.750045Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:11.945599Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-03-31T16:15:11.945660Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-03-31T16:15:11.945752Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2589:2945], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-03-31T16:15:11.947283Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2590:2946] 2025-03-31T16:15:11.947586Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2590:2946], schemeshard id = 72075186224037899 2025-03-31T16:15:13.351552Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2721:3245], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:13.351728Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:13.390889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-03-31T16:15:13.889297Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2869:3085];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:15:13.889579Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2869:3085];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:15:13.889921Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2869:3085];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:15:13.890096Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2869:3085];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:15:13.890263Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2869:3085];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:15:13.890427Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2869:3085];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:15:13.890584Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2869:3085];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:15:13.890751Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2869:3085];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:15:13.890897Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2869:3085];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16 ... STICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-03-31T16:17:47.013135Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-03-31T16:17:47.013158Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-03-31T16:17:48.020855Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T16:17:48.021084Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:17:48.199990Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T16:17:48.200069Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=hUw8 2025-03-31T16:17:48.200124Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-31T16:17:49.355072Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:17:49.355248Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is column table. 2025-03-31T16:17:49.355285Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-03-31T16:17:49.355929Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-31T16:17:49.373916Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-31T16:17:49.374385Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T16:17:49.374459Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T16:17:49.375009Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-31T16:17:49.401719Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-31T16:17:49.401932Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-03-31T16:17:49.403024Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9943:7453], server id = [2:9948:7458], tablet id = 72075186224037905, status = OK 2025-03-31T16:17:49.403141Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9943:7453], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-03-31T16:17:49.403405Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9944:7454], server id = [2:9949:7459], tablet id = 72075186224037906, status = OK 2025-03-31T16:17:49.403471Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9944:7454], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-03-31T16:17:49.404482Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9945:7455], server id = [2:9950:7460], tablet id = 72075186224037907, status = OK 2025-03-31T16:17:49.404549Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9945:7455], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-03-31T16:17:49.405338Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9946:7456], server id = [2:9951:7461], tablet id = 72075186224037908, status = OK 2025-03-31T16:17:49.405400Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9946:7456], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-03-31T16:17:49.406364Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9947:7457], server id = [2:9952:7462], tablet id = 72075186224037909, status = OK 2025-03-31T16:17:49.406422Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9947:7457], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-03-31T16:17:49.406602Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-03-31T16:17:49.407352Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9943:7453], server id = [2:9948:7458], tablet id = 72075186224037905 2025-03-31T16:17:49.407388Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:17:49.408625Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-03-31T16:17:49.409046Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-03-31T16:17:49.409193Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-03-31T16:17:49.411877Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9944:7454], server id = [2:9949:7459], tablet id = 72075186224037906 2025-03-31T16:17:49.411914Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:17:49.412390Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9957:7467], server id = [2:9958:7468], tablet id = 72075186224037910, status = OK 2025-03-31T16:17:49.412487Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9957:7467], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-03-31T16:17:49.412978Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9945:7455], server id = [2:9950:7460], tablet id = 72075186224037907 2025-03-31T16:17:49.413003Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:17:49.413171Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9946:7456], server id = [2:9951:7461], tablet id = 72075186224037908 2025-03-31T16:17:49.413187Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:17:49.413296Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037909 2025-03-31T16:17:49.413672Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9960:7470], server id = [2:9963:7473], tablet id = 72075186224037911, status = OK 2025-03-31T16:17:49.413722Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9960:7470], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-03-31T16:17:49.413827Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9961:7471], server id = [2:9965:7475], tablet id = 72075186224037912, status = OK 2025-03-31T16:17:49.413860Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9961:7471], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-03-31T16:17:49.414817Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9962:7472], server id = [2:9964:7474], tablet id = 72075186224037913, status = OK 2025-03-31T16:17:49.414866Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9962:7472], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-03-31T16:17:49.414954Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9947:7457], server id = [2:9952:7462], tablet id = 72075186224037909 2025-03-31T16:17:49.414970Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:17:49.415904Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037910 2025-03-31T16:17:49.416389Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9967:7477], server id = [2:9968:7478], tablet id = 72075186224037914, status = OK 2025-03-31T16:17:49.416454Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9967:7477], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-03-31T16:17:49.416589Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037911 2025-03-31T16:17:49.417428Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9957:7467], server id = [2:9958:7468], tablet id = 72075186224037910 2025-03-31T16:17:49.417459Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:17:49.417651Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037912 2025-03-31T16:17:49.417957Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9960:7470], server id = [2:9963:7473], tablet id = 72075186224037911 2025-03-31T16:17:49.417987Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:17:49.420127Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037913 2025-03-31T16:17:49.420469Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9961:7471], server id = [2:9965:7475], tablet id = 72075186224037912 2025-03-31T16:17:49.420507Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:17:49.420618Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037914 2025-03-31T16:17:49.420664Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-31T16:17:49.420928Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-31T16:17:49.421109Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-31T16:17:49.421471Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9962:7472], server id = [2:9964:7474], tablet id = 72075186224037913 2025-03-31T16:17:49.421504Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:17:49.421564Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-03-31T16:17:49.423915Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9967:7477], server id = [2:9968:7478], tablet id = 72075186224037914 2025-03-31T16:17:49.423951Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:17:49.424753Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-31T16:17:49.447981Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODcyNTE0YzYtZjY2OGZlMTctZjVlMzAwNi1hMGEyYjhlMQ==, TxId: 2025-03-31T16:17:49.448048Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODcyNTE0YzYtZjY2OGZlMTctZjVlMzAwNi1hMGEyYjhlMQ==, TxId: 2025-03-31T16:17:49.453250Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:17:49.472641Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-03-31T16:17:49.472717Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=hUw8, ActorId=[1:4617:3500] 2025-03-31T16:17:49.474128Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:9995:5773]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-31T16:17:49.474703Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T16:17:49.474792Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-31T16:17:49.475136Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T16:17:49.475207Z node 1 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-03-31T16:17:49.475267Z node 1 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 2 ] 2025-03-31T16:17:49.509682Z node 1 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 Answer: '/Root/Database/Table1[Value]=4' |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> YdbYqlClient::TestDecimal [GOOD] >> YdbYqlClient::TestBusySession ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TYqlDecimalTests::DecimalKey [GOOD] Test command err: 2025-03-31T16:17:19.923408Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008502702158778:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:19.923459Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d68/r3tmp/tmp47Vytq/pdisk_1.dat 2025-03-31T16:17:20.371779Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:20.371895Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:20.378260Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:17:20.402743Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19895, node 1 2025-03-31T16:17:20.532875Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:17:20.535871Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:17:20.645659Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:20.645689Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:20.645698Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:20.645855Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63780 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:20.936620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:23.047611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:23.203655Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008519882029141:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:23.203740Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008519882029133:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:23.203901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:23.207728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:17:23.225301Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008519882029147:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:17:23.293455Z node 1 :TX_PROXY ERROR: Actor# [1:7488008519882029224:2811] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:23.721387Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqpfwjr1981c6bbb7pfjhxbw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzA3MDNmNWEtZTQ0ZjdkZTgtY2RjMDEzMzAtN2FlNmE0MDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:23.892877Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jqpfwk908eyhfn9pvs4x3gs5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzA3MDNmNWEtZTQ0ZjdkZTgtY2RjMDEzMzAtN2FlNmE0MDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:24.000096Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jqpfwkdvd3mdwg0avfb4306j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzA3MDNmNWEtZTQ0ZjdkZTgtY2RjMDEzMzAtN2FlNmE0MDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:24.142865Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jqpfwkh8b2gd1kd16thq1sd1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzA3MDNmNWEtZTQ0ZjdkZTgtY2RjMDEzMzAtN2FlNmE0MDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:24.255098Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jqpfwknq3cwtpy9vga3cykb6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzA3MDNmNWEtZTQ0ZjdkZTgtY2RjMDEzMzAtN2FlNmE0MDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:25.776161Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008528919783652:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:25.776243Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d68/r3tmp/tmpRQlgvm/pdisk_1.dat 2025-03-31T16:17:25.906319Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:25.938799Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:25.938885Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:25.949009Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12529, node 4 2025-03-31T16:17:26.028569Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:26.028599Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:26.028607Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:26.028805Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1825 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:26.269370Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:28.750472Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:28.860608Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008541804686695:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:28.860678Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:28.860857Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008541804686707:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:28.864875Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:17:28.900093Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488008541804686709:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:17:28.998048Z node 4 :TX_PROXY ERROR: Actor# [4:7488008541804686805:2799] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:29.072078Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpfwr8vdgs07kx6jf3hvptz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ODZmNTQxZjUtZGMyY2E0MDctNWQ4ODMwMTQtM2VlM2M3ZDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:29.212863Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpfwrfv98he ... 715666. Ctx: { TraceId: 01jqpfx4329fg93wdwjna98n6r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=OTBkYjA5NDAtNTc0OWVkZTMtMmQ1MjA4ZGItYTMyYWUwNTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:41.395487Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jqpfx4329fg93wdwjna98n6r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=OTBkYjA5NDAtNTc0OWVkZTMtMmQ1MjA4ZGItYTMyYWUwNTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:41.500309Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jqpfx4h37cprfqbwts513dgy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=OTBkYjA5NDAtNTc0OWVkZTMtMmQ1MjA4ZGItYTMyYWUwNTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:41.616807Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jqpfx4m306gysgpdqbx8p34n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=OTBkYjA5NDAtNTc0OWVkZTMtMmQ1MjA4ZGItYTMyYWUwNTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:41.731471Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jqpfx4qp4wmwnbntyhhndt7z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=OTBkYjA5NDAtNTc0OWVkZTMtMmQ1MjA4ZGItYTMyYWUwNTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:41.739625Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7488008577916962615:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:41.739686Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:17:41.854726Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jqpfx4v8ddcf0gefq5jy7c4y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=OTBkYjA5NDAtNTc0OWVkZTMtMmQ1MjA4ZGItYTMyYWUwNTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:41.959032Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jqpfx4z31aaj3z8f4j138rwx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=OTBkYjA5NDAtNTc0OWVkZTMtMmQ1MjA4ZGItYTMyYWUwNTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:42.262050Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jqpfx52c3547as7v2s0n0vsx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=OTBkYjA5NDAtNTc0OWVkZTMtMmQ1MjA4ZGItYTMyYWUwNTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:42.265378Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jqpfx52c3547as7v2s0n0vsx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=OTBkYjA5NDAtNTc0OWVkZTMtMmQ1MjA4ZGItYTMyYWUwNTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:44.108898Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7488008612269799340:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:44.108970Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d68/r3tmp/tmpTGP45s/pdisk_1.dat 2025-03-31T16:17:44.276333Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:44.319952Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:44.320051Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:44.326574Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10318, node 13 2025-03-31T16:17:44.451881Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:44.451911Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:44.451920Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:44.452080Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18274 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:44.754083Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:47.820016Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:47.936791Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7488008625154702402:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:47.936795Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7488008625154702395:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:47.936891Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:47.940869Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:17:47.961335Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7488008625154702409:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:17:48.019008Z node 13 :TX_PROXY ERROR: Actor# [13:7488008629449669777:2792] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:48.139372Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpfxawz4n75j89jbp8cwc3w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZDJmNTQwMGEtYjIyYjZhODctNmNkNmU1NjctOTcyN2IxOGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:48.285643Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpfxb3r0rext93rymtvn3gz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZDJmNTQwMGEtYjIyYjZhODctNmNkNmU1NjctOTcyN2IxOGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:48.455257Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqpfxb891n3h622krfmpcrmz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZDJmNTQwMGEtYjIyYjZhODctNmNkNmU1NjctOTcyN2IxOGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:48.604073Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqpfxbdd6tn95pdk35stzmby, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZDJmNTQwMGEtYjIyYjZhODctNmNkNmU1NjctOTcyN2IxOGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:48.740992Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqpfxbj57c0x0gg0cazpe4vs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZDJmNTQwMGEtYjIyYjZhODctNmNkNmU1NjctOTcyN2IxOGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:48.904245Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqpfxbpb9fqv4z2pxhxjzxy6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZDJmNTQwMGEtYjIyYjZhODctNmNkNmU1NjctOTcyN2IxOGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:49.044228Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jqpfxbvdc746pz2hrgfsh9nm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZDJmNTQwMGEtYjIyYjZhODctNmNkNmU1NjctOTcyN2IxOGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:49.109826Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7488008612269799340:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:49.109913Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:17:49.326096Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jqpfxbzsfjzr4zadgttbk965, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZDJmNTQwMGEtYjIyYjZhODctNmNkNmU1NjctOTcyN2IxOGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:49.669217Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jqpfxc8newp6qp10gs1njb4v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZDJmNTQwMGEtYjIyYjZhODctNmNkNmU1NjctOTcyN2IxOGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:49.972531Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jqpfxckk39bc7aamnd1f846z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZDJmNTQwMGEtYjIyYjZhODctNmNkNmU1NjctOTcyN2IxOGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> KqpSystemView::PartitionStatsRange1 >> YdbYqlClient::TestReadTableBatchLimits [GOOD] >> KqpSystemView::PartitionStatsSimple [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::ExplainQuery [GOOD] Test command err: 2025-03-31T16:17:25.070850Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008529554297221:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:25.070915Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d53/r3tmp/tmpeVISXL/pdisk_1.dat 2025-03-31T16:17:25.505989Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:25.516959Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:25.517115Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:25.525615Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6110, node 1 2025-03-31T16:17:25.695679Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:25.695702Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:25.695709Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:25.695811Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10497 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:26.002363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:29.567709Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008544687476828:2144];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:29.568655Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d53/r3tmp/tmpnZrgfL/pdisk_1.dat 2025-03-31T16:17:29.712101Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:29.745227Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:29.745306Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:29.753932Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5024, node 4 2025-03-31T16:17:29.856242Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:29.856263Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:29.856269Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:29.856387Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20542 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:30.126922Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:32.845899Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008557572379674:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:32.846018Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:32.846332Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008557572379686:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:32.849971Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:17:32.885762Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488008557572379688:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:17:32.958581Z node 4 :TX_PROXY ERROR: Actor# [4:7488008557572379760:2679] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:34.918939Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488008566648469341:2077];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:34.919281Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d53/r3tmp/tmp0Z2ELv/pdisk_1.dat 2025-03-31T16:17:35.182974Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6317, node 7 2025-03-31T16:17:35.281170Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:35.281278Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:35.349440Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:17:35.355635Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:35.355658Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:35.355667Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:35.355800Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61391 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:35.645691Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR E0000 00:00:1743437858.427297 378914 text_format.cc:383] Error parsing text-format Ydb.Type: 3:13: Unknown enumeration value of "TYPE_UNDEFINED" for field "type_id". 2025-03-31T16:17:38.435491Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488008583828339575:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:38.435577Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:38.435799Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488008583828339587:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:38.439934Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:17:38.475282Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7488008583828339589:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 2814749767 ... 2057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:38.634235Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=OGZlNWFlMDEtNDE1ZWZiYTgtM2NlM2ExMTEtNTk0MTZhMDI=, ActorId: [7:7488008583828339549:2332], ActorState: ExecuteState, TraceId: 01jqpfx1m1fvyam5trtx5tvm3h, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:997: ydb/library/mkql_proto/mkql_proto.cpp:1435: Unknown protobuf type:
: Error: Unsupported protobuf type:
: Error: ydb/core/kqp/session_actor/kqp_session_actor.cpp:997: ydb/library/mkql_proto/mkql_proto.cpp:1435: Unknown protobuf type: E0000 00:00:1743437858.642646 378914 text_format.cc:383] Error parsing text-format Ydb.Type: 5:21: Unknown enumeration value of "Int32" for field "type_id". 2025-03-31T16:17:38.640880Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710660. Ctx: { TraceId: 01jqpfx1m1fvyam5trtx5tvm3h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OGZlNWFlMDEtNDE1ZWZiYTgtM2NlM2ExMTEtNTk0MTZhMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:38.703677Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=OGZlNWFlMDEtNDE1ZWZiYTgtM2NlM2ExMTEtNTk0MTZhMDI=, ActorId: [7:7488008583828339549:2332], ActorState: ExecuteState, TraceId: 01jqpfx1tmc1avr9j4paen7n2d, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:997: ydb/library/mkql_proto/mkql_proto.cpp:1435: Unknown protobuf type: 2025-03-31T16:17:38.705550Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqpfx1tmc1avr9j4paen7n2d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OGZlNWFlMDEtNDE1ZWZiYTgtM2NlM2ExMTEtNTk0MTZhMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root
: Error: Unsupported protobuf type:
: Error: ydb/core/kqp/session_actor/kqp_session_actor.cpp:997: ydb/library/mkql_proto/mkql_proto.cpp:1435: Unknown protobuf type: 2025-03-31T16:17:40.518687Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008591627562190:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:40.518775Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d53/r3tmp/tmplSEzrT/pdisk_1.dat 2025-03-31T16:17:40.787825Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:40.817958Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:40.818056Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:40.820686Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14840, node 10 2025-03-31T16:17:40.882815Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:40.882838Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:40.882845Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:40.882975Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11375 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:41.405386Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:44.166156Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488008608807432437:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:44.166252Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488008608807432448:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:44.166308Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:44.171914Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:17:44.199309Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7488008608807432451:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:17:44.276340Z node 10 :TX_PROXY ERROR: Actor# [10:7488008608807432530:2682] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:46.199822Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7488008619548744454:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:46.199877Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d53/r3tmp/tmp6Fd5xh/pdisk_1.dat 2025-03-31T16:17:46.362400Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:46.387658Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:46.387765Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:46.392890Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3707, node 13 2025-03-31T16:17:46.459941Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:46.459964Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:46.459974Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:46.460120Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5524 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:46.749136Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:46.813817Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:49.823251Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7488008632433647535:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:49.823410Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7488008632433647526:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:49.823757Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:49.827966Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:17:49.862540Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7488008632433647540:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:17:49.928618Z node 13 :TX_PROXY ERROR: Actor# [13:7488008632433647611:2791] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:50.012877Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpfxcqwbx1z1ep1xk3z6xa5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZmVjYzFlYTEtYTkxMDU3ZGUtOTNiNjEwZmYtNWIzNmRjNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> YdbYqlClient::RetryOperationAsync [GOOD] >> YdbYqlClient::QueryLimits >> KqpSysColV1::SelectRowById >> YdbTableBulkUpsert::Timeout [GOOD] >> YdbTableBulkUpsert::RetryOperationSync >> TYqlDateTimeTests::DatetimeKey [GOOD] >> KqpSysColV1::StreamSelectRange [GOOD] >> KqpSysColV1::InnerJoinTables [GOOD] >> TGRpcNewClient::TestAuth [GOOD] >> TGRpcNewClient::CreateAlterUpsertDrop |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRange ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsSimple [GOOD] Test command err: Trying to start YDB, gRPC: 14545, MsgBus: 24886 2025-03-31T16:17:46.702699Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008621138757120:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:46.702747Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0017fa/r3tmp/tmpoMOZkF/pdisk_1.dat 2025-03-31T16:17:47.178705Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:47.184662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:47.185392Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:47.196010Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14545, node 1 2025-03-31T16:17:47.399685Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:47.399710Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:47.399715Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:47.399835Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24886 TClient is connected to server localhost:24886 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:48.141045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:48.180309Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:17:48.193844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:48.420081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:48.592171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:48.676264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:50.046914Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008638318628074:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:50.047050Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:50.667030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:17:50.703014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:17:50.779813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:17:50.806683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:17:50.837851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:17:50.872529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:17:50.936439Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008638318628586:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:50.936532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:50.936543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008638318628591:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:50.939692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:17:50.950396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008638318628593:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:17:51.043758Z node 1 :TX_PROXY ERROR: Actor# [1:7488008642613595948:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:51.703410Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008621138757120:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:51.703460Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:17:52.649136Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437872622, txId: 281474976710671] shutting down >> KqpSysColV1::StreamSelectRowAsterisk [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientDoesNotProvideAnyCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesCorruptedCert >> KqpSystemView::NodesSimple ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestReadTableBatchLimits [GOOD] Test command err: 2025-03-31T16:17:29.440387Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008546710718593:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:29.440439Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d4b/r3tmp/tmp8q5Djn/pdisk_1.dat 2025-03-31T16:17:29.827756Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:29.841202Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:29.841289Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:29.845879Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15741, node 1 2025-03-31T16:17:29.941758Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:29.941789Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:29.941804Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:29.941920Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20650 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:30.243915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:30.354176Z node 1 :GRPC_SERVER INFO: Got grpc request# ListEndpointsRequest, traceId# 01jqpfwsq3fkbczb5vtdsv6xhz, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:49158, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# 9.980944s 2025-03-31T16:17:30.398832Z node 1 :GRPC_SERVER DEBUG: Got grpc request# CreateSessionRequest, traceId# 01jqpfwsr50x99rzze3a92d3vx, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:49166, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-03-31T16:17:32.381902Z node 1 :GRPC_SERVER DEBUG: Got grpc request# CreateTableRequest, traceId# 01jqpfwvpx8q41cjj95yphbxww, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:54296, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-03-31T16:17:32.382498Z node 1 :TX_PROXY DEBUG: actor# [1:7488008546710718821:2141] Handle TEvProposeTransaction 2025-03-31T16:17:32.382548Z node 1 :TX_PROXY DEBUG: actor# [1:7488008546710718821:2141] TxId# 281474976710658 ProcessProposeTransaction 2025-03-31T16:17:32.382647Z node 1 :TX_PROXY DEBUG: actor# [1:7488008546710718821:2141] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7488008559595621500:2626] 2025-03-31T16:17:32.456733Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008559595621500:2626] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Test" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Fk" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" KeyColumnNames: "Fk" UniformPartitionsCount: 16 PartitionConfig { } Temporary: false } CreateIndexedTable { } } } DatabaseName: "" RequestType: "" PeerName: "ipv6:[::1]:54296" 2025-03-31T16:17:32.456776Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008559595621500:2626] txid# 281474976710658 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:17:32.457187Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008559595621500:2626] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-31T16:17:32.457274Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008559595621500:2626] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:17:32.457455Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008559595621500:2626] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:17:32.457572Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008559595621500:2626] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:17:32.457625Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008559595621500:2626] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-31T16:17:32.457828Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008559595621500:2626] txid# 281474976710658 HANDLE EvClientConnected 2025-03-31T16:17:32.459913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:32.464505Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008559595621500:2626] txid# 281474976710658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710658} 2025-03-31T16:17:32.464568Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008559595621500:2626] txid# 281474976710658 SEND to# [1:7488008559595621499:2335] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 53} 2025-03-31T16:17:32.465320Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-31T16:17:32.465347Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-31T16:17:32.465403Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-31T16:17:32.465403Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-31T16:17:32.531218Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7488008559595621576:2696], Recipient [1:7488008559595621680:2342]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:17:32.532517Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7488008559595621566:2686], Recipient [1:7488008559595621707:2348]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:17:32.533143Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7488008559595621565:2685], Recipient [1:7488008559595621715:2349]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:17:32.533662Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7488008559595621574:2694], Recipient [1:7488008559595621687:2345]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:17:32.533957Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7488008559595621561:2681], Recipient [1:7488008559595621716:2350]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:17:32.534318Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7488008559595621564:2684], Recipient [1:7488008559595621717:2351]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:17:32.535062Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7488008559595621571:2691], Recipient [1:7488008559595621670:2339]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:17:32.535354Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7488008559595621562:2682], Recipient [1:7488008559595621681:2343]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:17:32.535696Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7488008559595621573:2693], Recipient [1:7488008559595621672:2341]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:17:32.536090Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7488008559595621575:2695], Recipient [1:7488008559595621695:2346]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:17:32.536397Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7488008559595621567:2687], Recipient [1:7488008559595621740:2354]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:17:32.536894Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7488008559595621563:2683], Recipient [1:7488008559595621682:2344]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:17:32.537127Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7488008559595621570:2690], Recipient [1:7488008559595621722:2353]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:17:32.537524Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7488008559595621572:2692], Recipient [1:7488008559595621671:2340]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:17:32.537709Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7488008559595621568:2688], Recipient [1:7488008559595621721:2352]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:17:32.538032Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7488008559595621569:2689], Recipient [1:7488008559595621706:2347]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:17:32.540890Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7488008559595621566:2686], Recipient [1:7488008559595621707:2348]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:17:32.541416Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037902 actor [1:7488008559595621707:2348] 2025-03-31T16:17:32.541446Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7488008559595621575:2695], Recipient [1:7488008559595621695:2346]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:17:32.541673Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:17:32.541809Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037892 actor [1:7488008559595621695:2346] 2025-03-31T16:17:32.542011Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:17:32.554332Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:7488008559595621575:2695], Recipient [1:7488008559595621695:2346]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:17:32.554475Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7488008559595621563:2683], Recipient [1:7488008559595621682:2344]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:17:32.554936Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7488008559595621572:2692], Recipient [1:7488008559595621671:2340]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:17:32. ... received event# 269553190, Sender [10:7488008640747302928:2405], Recipient [10:7488008632157366441:2344]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1743437871973 TxId: 281474976715678 2025-03-31T16:17:51.991377Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7488008640747302928:2405], Recipient [10:7488008632157366481:2347]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1743437871973 TxId: 281474976715678 2025-03-31T16:17:51.991515Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7488008640747302928:2405], Recipient [10:7488008632157366434:2341]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1743437871973 TxId: 281474976715678 2025-03-31T16:17:51.991527Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7488008640747302928:2405], Recipient [10:7488008632157366433:2340]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1743437871973 TxId: 281474976715678 2025-03-31T16:17:51.991658Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7488008640747302928:2405], Recipient [10:7488008632157366422:2339]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1743437871973 TxId: 281474976715678 2025-03-31T16:17:51.991662Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7488008640747302928:2405], Recipient [10:7488008632157366456:2346]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1743437871973 TxId: 281474976715678 2025-03-31T16:17:51.991798Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7488008640747302928:2405], Recipient [10:7488008632157366435:2342]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1743437871973 TxId: 281474976715678 2025-03-31T16:17:51.991798Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7488008640747302928:2405], Recipient [10:7488008632157366440:2343]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1743437871973 TxId: 281474976715678 2025-03-31T16:17:51.992058Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7488008640747302928:2405], Recipient [10:7488008632157366442:2345]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1743437871973 TxId: 281474976715678 2025-03-31T16:17:51.992079Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7488008640747302928:2405], Recipient [10:7488008632157366491:2348]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1743437871973 TxId: 281474976715678 ---- batch start ---- [[0u];[0u];["A"]] ---- batch end ---- ---- batch start ---- [[1u];[2u];["A"]] ---- batch end ---- ---- batch start ---- [[2u];[4u];["A"]] ---- batch end ---- ---- batch start ---- [[3u];[6u];["A"]] ---- batch end ---- ---- batch start ---- [[4u];[8u];["A"]] ---- batch end ---- ---- batch start ---- [[5u];[10u];["A"]] ---- batch end ---- ---- batch start ---- [[6u];[12u];["A"]] ---- batch end ---- ---- batch start ---- [[7u];[14u];["A"]] ---- batch end ---- ---- batch start ---- [[8u];[16u];["A"]] ---- batch end ---- ---- batch start ---- [[9u];[18u];["A"]] ---- batch end ---- ---- batch start ---- [[10u];[20u];["A"]] ---- batch end ---- ---- batch start ---- [[11u];[22u];["A"]] ---- batch end ---- ---- batch start ---- [[12u];[24u];["A"]] ---- batch end ---- ---- batch start ---- [[13u];[26u];["A"]] ---- batch end ---- ---- batch start ---- [[14u];[28u];["A"]] ---- batch end ---- ---- batch start ---- [[15u];[30u];["A"]] ---- batch end ---- ---- batch start ---- [[16u];[32u];["A"]] ---- batch end ---- ---- batch start ---- [[17u];[34u];["A"]] ---- batch end ---- ---- batch start ---- [[18u];[36u];["A"]] ---- batch end ---- ---- batch start ---- [[19u];[38u];["A"]] ---- batch end ---- ---- batch start ---- [[20u];[40u];["A"]] ---- batch end ---- ---- batch start ---- [[21u];[42u];["A"]] ---- batch end ---- ---- batch start ---- [[22u];[44u];["A"]] ---- batch end ---- ---- batch start ---- [[23u];[46u];["A"]] ---- batch end ---- ---- batch start ---- [[24u];[48u];["A"]] ---- batch end ---- ---- batch start ---- [[25u];[50u];["A"]] ---- batch end ---- ---- batch start ---- [[26u];[52u];["A"]] ---- batch end ---- ---- batch start ---- [[27u];[54u];["A"]] ---- batch end ---- ---- batch start ---- [[28u];[56u];["A"]] ---- batch end ---- ---- batch start ---- [[29u];[58u];["A"]] ---- batch end ---- ---- batch start ---- [[30u];[60u];["A"]] ---- batch end ---- ---- batch start ---- [[31u];[62u];["A"]] ---- batch end ---- ---- batch start ---- [[32u];[64u];["A"]] ---- batch end ---- ---- batch start ---- [[33u];[66u];["A"]] ---- batch end ---- ---- batch start ---- [[34u];[68u];["A"]] ---- batch end ---- ---- batch start ---- [[35u];[70u];["A"]] ---- batch end ---- ---- batch start ---- [[36u];[72u];["A"]] ---- batch end ---- ---- batch start ---- [[37u];[74u];["A"]] ---- batch end ---- ---- batch start ---- [[38u];[76u];["A"]] ---- batch end ---- ---- batch start ---- [[39u];[78u];["A"]] ---- batch end ---- ---- batch start ---- [[40u];[80u];["A"]] ---- batch end ---- ---- batch start ---- [[41u];[82u];["A"]] ---- batch end ---- ---- batch start ---- [[42u];[84u];["A"]] ---- batch end ---- ---- batch start ---- [[43u];[86u];["A"]] ---- batch end ---- ---- batch start ---- [[44u];[88u];["A"]] ---- batch end ---- ---- batch start ---- [[45u];[90u];["A"]] ---- batch end ---- ---- batch start ---- [[46u];[92u];["A"]] ---- batch end ---- ---- batch start ---- [[47u];[94u];["A"]] ---- batch end ---- ---- batch start ---- [[48u];[96u];["A"]] ---- batch end ---- ---- batch start ---- [[49u];[98u];["A"]] ---- batch end ---- ---- batch start ---- [[50u];[100u];["A"]] ---- batch end ---- ---- batch start ---- [[51u];[102u];["A"]] ---- batch end ---- ---- batch start ---- [[52u];[104u];["A"]] ---- batch end ---- ---- batch start ---- [[53u];[106u];["A"]] ---- batch end ---- ---- batch start ---- [[54u];[108u];["A"]] ---- batch end ---- ---- batch start ---- [[55u];[110u];["A"]] ---- batch end ---- ---- batch start ---- [[56u];[112u];["A"]] ---- batch end ---- ---- batch start ---- [[57u];[114u];["A"]] ---- batch end ---- ---- batch start ---- [[58u];[116u];["A"]] ---- batch end ---- ---- batch start ---- [[59u];[118u];["A"]] ---- batch end ---- ---- batch start ---- [[60u];[120u];["A"]] ---- batch end ---- ---- batch start ---- [[61u];[122u];["A"]] ---- batch end ---- ---- batch start ---- [[62u];[124u];["A"]] ---- batch end ---- ---- batch start ---- [[63u];[126u];["A"]] ---- batch end ---- ---- batch start ---- [[64u];[128u];["A"]] ---- batch end ---- ---- batch start ---- [[65u];[130u];["A"]] ---- batch end ---- ---- batch start ---- [[66u];[132u];["A"]] ---- batch end ---- ---- batch start ---- [[67u];[134u];["A"]] ---- batch end ---- ---- batch start ---- [[68u];[136u];["A"]] ---- batch end ---- ---- batch start ---- [[69u];[138u];["A"]] ---- batch end ---- ---- batch start ---- [[70u];[140u];["A"]] ---- batch end ---- ---- batch start ---- [[71u];[142u];["A"]] ---- batch end ---- ---- batch start ---- [[72u];[144u];["A"]] ---- batch end ---- ---- batch start ---- [[73u];[146u];["A"]] ---- batch end ---- ---- batch start ---- [[74u];[148u];["A"]] ---- batch end ---- ---- batch start ---- [[75u];[150u];["A"]] ---- batch end ---- ---- batch start ---- [[76u];[152u];["A"]] ---- batch end ---- ---- batch start ---- [[77u];[154u];["A"]] ---- batch end ---- ---- batch start ---- [[78u];[156u];["A"]] ---- batch end ---- ---- batch start ---- [[79u];[158u];["A"]] ---- batch end ---- ---- batch start ---- [[80u];[160u];["A"]] ---- batch end ---- ---- batch start ---- [[81u];[162u];["A"]] ---- batch end ---- ---- batch start ---- [[82u];[164u];["A"]] ---- batch end ---- ---- batch start ---- [[83u];[166u];["A"]] ---- batch end ---- ---- batch start ---- [[84u];[168u];["A"]] ---- batch end ---- ---- batch start ---- [[85u];[170u];["A"]] ---- batch end ---- ---- batch start ---- [[86u];[172u];["A"]] ---- batch end ---- ---- batch start ---- [[87u];[174u];["A"]] ---- batch end ---- ---- batch start ---- [[88u];[176u];["A"]] ---- batch end ---- ---- batch start ---- [[89u];[178u];["A"]] ---- batch end ---- ---- batch start ---- [[90u];[180u];["A"]] ---- batch end ---- ---- batch start ---- [[91u];[182u];["A"]] ---- batch end ---- ---- batch start ---- [[92u];[184u];["A"]] ---- batch end ---- ---- batch start ---- [[93u];[186u];["A"]] ---- batch end ---- ---- batch start ---- [[94u];[188u];["A"]] ---- batch end ---- ---- batch start ---- [[95u];[190u];["A"]] ---- batch end ---- ---- batch start ---- [[96u];[192u];["A"]] ---- batch end ---- ---- batch start ---- [[97u];[194u];["A"]] ---- batch end ---- ---- batch start ---- [[98u];[196u];["A"]] ---- batch end ---- ---- batch start ---- [[99u];[198u];["A"]] ---- batch end ---- 2025-03-31T16:17:52.001572Z node 10 :GRPC_SERVER DEBUG: [0x51a0000f8a80] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-03-31T16:17:52.001799Z node 10 :GRPC_SERVER DEBUG: [0x51a000157880] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-03-31T16:17:52.001926Z node 10 :GRPC_SERVER DEBUG: [0x51a0000f9080] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-03-31T16:17:52.002065Z node 10 :GRPC_SERVER DEBUG: [0x51a000157280] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-03-31T16:17:52.002191Z node 10 :GRPC_SERVER DEBUG: [0x51a0000f8480] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-03-31T16:17:52.002307Z node 10 :GRPC_SERVER DEBUG: [0x51a0000f7e80] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-03-31T16:17:52.002457Z node 10 :GRPC_SERVER DEBUG: [0x51a00012f680] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-03-31T16:17:52.002592Z node 10 :GRPC_SERVER DEBUG: [0x51a0000ada80] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-03-31T16:17:52.002756Z node 10 :GRPC_SERVER DEBUG: [0x51a0000ad480] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-03-31T16:17:52.002886Z node 10 :GRPC_SERVER DEBUG: [0x51a000156c80] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-03-31T16:17:52.003013Z node 10 :GRPC_SERVER DEBUG: [0x51a000153c80] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-03-31T16:17:52.003188Z node 10 :GRPC_SERVER DEBUG: [0x51a0000ac880] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-03-31T16:17:52.003355Z node 10 :GRPC_SERVER DEBUG: [0x51a0000ac280] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-03-31T16:17:52.003508Z node 10 :GRPC_SERVER DEBUG: [0x51a0000abc80] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-03-31T16:17:52.003661Z node 10 :GRPC_SERVER DEBUG: [0x51a0000f7880] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-03-31T16:17:52.003829Z node 10 :GRPC_SERVER DEBUG: [0x51a0000ace80] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-03-31T16:17:52.003967Z node 10 :GRPC_SERVER DEBUG: [0x51a000158480] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRange [GOOD] Test command err: Trying to start YDB, gRPC: 19015, MsgBus: 30977 2025-03-31T16:17:47.774156Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008622234669733:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:47.774202Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0017e7/r3tmp/tmp2ccCJ5/pdisk_1.dat 2025-03-31T16:17:48.266880Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:48.269675Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:48.269778Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:48.276291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19015, node 1 2025-03-31T16:17:48.369138Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:48.369168Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:48.369177Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:48.369288Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30977 TClient is connected to server localhost:30977 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:49.006777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:49.034039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:49.161946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:49.363589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:17:49.447519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:17:51.210912Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008639414540700:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:51.211053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:51.555391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:17:51.587476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:17:51.618459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:17:51.652567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:17:51.682473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:17:51.720473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:17:51.774530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008639414541209:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:51.774608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:51.774977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008639414541214:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:51.781074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:17:51.796473Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008639414541216:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:17:51.861605Z node 1 :TX_PROXY ERROR: Actor# [1:7488008639414541272:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:52.774275Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008622234669733:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:52.784211Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:17:52.949708Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437872967, txId: 281474976710671] shutting down |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinTables [GOOD] Test command err: Trying to start YDB, gRPC: 64893, MsgBus: 7217 2025-03-31T16:17:46.702165Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008620171835267:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:46.702856Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0017f2/r3tmp/tmpENsVah/pdisk_1.dat 2025-03-31T16:17:47.176806Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:47.186942Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:47.187050Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:47.203408Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64893, node 1 2025-03-31T16:17:47.400052Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:47.400081Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:47.400093Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:47.400242Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7217 TClient is connected to server localhost:7217 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:48.140988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:48.193111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:48.362184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:48.540487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:48.623577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:49.979691Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008633056738927:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:49.981632Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:50.665621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:17:50.693860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:17:50.727591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:17:50.758456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:17:50.790699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:17:50.821772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:17:50.904958Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008637351706739:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:50.905048Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:50.905313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008637351706744:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:50.909408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:17:50.918364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008637351706746:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:17:51.011239Z node 1 :TX_PROXY ERROR: Actor# [1:7488008641646674097:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:51.702814Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008620171835267:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:51.702877Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TGRpcLdapAuthentication::LdapAuthWithEmptyPassword [GOOD] >> YdbYqlClient::AlterTableAddIndexAsyncOp [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TYqlDateTimeTests::DatetimeKey [GOOD] Test command err: 2025-03-31T16:17:16.726396Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008489561935162:2143];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:16.726473Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d86/r3tmp/tmpPkN3pm/pdisk_1.dat 2025-03-31T16:17:17.081323Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:17.127709Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected TServer::EnableGrpc on GrpcPort 2025-03-31T16:17:17.127825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 6034, node 1 2025-03-31T16:17:17.151792Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:17:17.175385Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:17.175418Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:17.175429Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:17.175569Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26792 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:17.485030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:26792 2025-03-31T16:17:17.734825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:17.754135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:18.258681Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488008497177542894:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:18.258741Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:17:18.295986Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:18.298866Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:18.305804Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-03-31T16:17:18.320549Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26792 2025-03-31T16:17:18.588778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:26792 TClient::Ls request: /Root/ydb_ut_tenant/table-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1743437839030 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-03-31T16:17:19.389449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:26792 TClient::Ls request: /Root/ydb_ut_tenant/table-2 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-2" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710661 CreateStep: 1743437839930 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-03-31T16:17:20.423904Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-03-31T16:17:20.431986Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-31T16:17:22.759430Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008516794082322:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:22.759517Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d86/r3tmp/tmpgxOOz7/pdisk_1.dat 2025-03-31T16:17:22.947854Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:22.986214Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:22.986301Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:22.988803Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28497, node 4 2025-03-31T16:17:23.094564Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:23.094594Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:23.094602Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:23.094759Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29748 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:23.342862Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:29748 2025-03-31T16:17:23.686243Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:23.714381Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:24.227537Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488008525578098073:2080];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:24.227657Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:17:24.239283Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:24.239366Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:24.253870Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 6 Cookie 6 2025-03-31T16:17:24.255051Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29748 2025-03-31T16:17:26.538606Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, ... initialize from file: (empty maybe) 2025-03-31T16:17:39.434592Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:39.434781Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9762 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:39.833224Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:43.400592Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:43.527672Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488008606100288410:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:43.527672Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488008606100288402:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:43.527774Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:43.532359Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:17:43.568498Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7488008606100288416:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:17:43.665347Z node 10 :TX_PROXY ERROR: Actor# [10:7488008606100288489:2797] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:43.829695Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqpfx6k59gsccgwyt4aq3rn4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YWUxNjk3ZTMtNmM2OTJhNjgtNGU4OWFjNTUtOGRjYzAyYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:43.967710Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7488008584625450722:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:43.967779Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:17:44.069626Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jqpfx6xedw6kjtb04gxzjn2p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YWUxNjk3ZTMtNmM2OTJhNjgtNGU4OWFjNTUtOGRjYzAyYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:44.331289Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jqpfx74f4pzkt4m5yqjb1nzs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YWUxNjk3ZTMtNmM2OTJhNjgtNGU4OWFjNTUtOGRjYzAyYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:44.567880Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jqpfx7cjd60dz041f2t297ph, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YWUxNjk3ZTMtNmM2OTJhNjgtNGU4OWFjNTUtOGRjYzAyYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:46.335245Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7488008620931560408:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:46.335333Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d86/r3tmp/tmpdxUHNx/pdisk_1.dat 2025-03-31T16:17:46.592722Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29739, node 13 2025-03-31T16:17:46.673077Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:46.673223Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:46.710479Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:17:46.719901Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:46.719934Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:46.719943Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:46.720122Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24013 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:47.228426Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:51.061691Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:51.171734Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7488008642406398124:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:51.171766Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7488008642406398116:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:51.171863Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:51.175626Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:17:51.201037Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7488008642406398130:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:17:51.260049Z node 13 :TX_PROXY ERROR: Actor# [13:7488008642406398203:2789] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:51.335442Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7488008620931560408:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:51.335582Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:17:51.347561Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqpfxe2181cvvyct8kjzbbe0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=Y2YyOWMyNzYtZjFhN2Q5NjMtODA0MjUzMzAtODAyODI5Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:51.501209Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jqpfxe84fw41kdwe9a85bw6k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=Y2YyOWMyNzYtZjFhN2Q5NjMtODA0MjUzMzAtODAyODI5Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:51.773442Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jqpfxecm54m67n6n7nhadf4e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=Y2YyOWMyNzYtZjFhN2Q5NjMtODA0MjUzMzAtODAyODI5Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:52.038198Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jqpfxenc6wgxs7j6kbb865s6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=Y2YyOWMyNzYtZjFhN2Q5NjMtODA0MjUzMzAtODAyODI5Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> YdbTableBulkUpsert::DecimalPK [GOOD] >> YdbTableBulkUpsert::AsyncIndexShouldFail >> KqpSystemView::QueryStatsScan [GOOD] >> YdbYqlClient::TestExplicitPartitioning [GOOD] >> KqpSysColV1::StreamInnerJoinTables [GOOD] >> YdbTableBulkUpsert::Limits [GOOD] >> YdbTableBulkUpsert::Overload ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRowAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 6686, MsgBus: 6027 2025-03-31T16:17:48.451902Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008626694812457:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:48.452788Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0017dc/r3tmp/tmpr82byu/pdisk_1.dat 2025-03-31T16:17:48.878666Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:48.882145Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:48.882249Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:48.886865Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6686, node 1 2025-03-31T16:17:49.007387Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:49.007412Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:49.007419Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:49.007530Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6027 TClient is connected to server localhost:6027 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:49.669914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:49.719285Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:17:49.745518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:49.922570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:50.077858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:50.142605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:51.978684Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008639579716125:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:51.978795Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:52.277462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:17:52.320575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:17:52.353573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:17:52.384428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:17:52.414477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:17:52.488612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:17:52.570129Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008643874683940:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:52.570201Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:52.570263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008643874683945:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:52.574630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:17:52.584186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008643874683947:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:17:52.642855Z node 1 :TX_PROXY ERROR: Actor# [1:7488008643874683999:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:53.453788Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008626694812457:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:53.454354Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:17:53.679120Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437873709, txId: 281474976710671] shutting down >> KqpSysColV1::UpdateAndDelete >> KqpSysColV0::InnerJoinTables >> Viewer::JsonStorageListingV1GroupIdFilter [GOOD] >> Viewer::JsonStorageListingV1NodeIdFilter ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcLdapAuthentication::LdapAuthWithEmptyPassword [GOOD] Test command err: 2025-03-31T16:17:31.113016Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008554489680646:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:31.113066Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d49/r3tmp/tmpbhBdEI/pdisk_1.dat 2025-03-31T16:17:31.635763Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:31.660564Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:31.660674Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:31.668093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64036, node 1 2025-03-31T16:17:31.766497Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:31.766540Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:31.766753Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:31.766868Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9018 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:32.048789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:35.656373Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008573021885911:2140];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:35.656441Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d49/r3tmp/tmpZ6qv4t/pdisk_1.dat 2025-03-31T16:17:35.865905Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:35.906705Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:35.906803Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:35.910175Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14346, node 4 2025-03-31T16:17:35.999953Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:35.999977Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:35.999985Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:36.000968Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2914 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:36.290429Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:40.372929Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488008592336295851:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:40.372987Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d49/r3tmp/tmpsHXUWQ/pdisk_1.dat 2025-03-31T16:17:40.558556Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:40.592970Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:40.593061Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:40.598731Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16811, node 7 2025-03-31T16:17:40.710762Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:40.710786Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:40.710794Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:40.710917Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19913 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:41.032742Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:45.170984Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008613154996929:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:45.171056Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d49/r3tmp/tmpYm8b0j/pdisk_1.dat 2025-03-31T16:17:45.315612Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:45.352816Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:45.352912Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:45.356491Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19865, node 10 2025-03-31T16:17:45.425473Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:45.425498Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:45.425507Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:45.425682Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5853 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:45.693660Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:50.267326Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7488008636470346526:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:50.267407Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d49/r3tmp/tmp5yO7JJ/pdisk_1.dat 2025-03-31T16:17:50.449077Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:50.476484Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:50.476584Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:50.479397Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31924, node 13 2025-03-31T16:17:50.563263Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:50.563287Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:50.563296Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:50.563429Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11239 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:50.854839Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... >> YdbTableBulkUpsert::ZeroRows [GOOD] >> YdbTableBulkUpsertOlap::ParquetImportBug >> KqpSystemView::FailResolve ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::QueryStatsScan [GOOD] Test command err: Trying to start YDB, gRPC: 1977, MsgBus: 14989 2025-03-31T16:17:48.431696Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008628844277241:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:48.432669Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0017e4/r3tmp/tmpuWlcZg/pdisk_1.dat 2025-03-31T16:17:48.888570Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:48.897696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:48.897812Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:48.902080Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1977, node 1 2025-03-31T16:17:48.984535Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:48.984572Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:48.984583Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:48.984737Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14989 TClient is connected to server localhost:14989 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:49.547357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:49.563340Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:17:49.577747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:49.732625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:49.922492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:50.006673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:51.920333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008641729180904:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:51.920475Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:52.239340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:17:52.308926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:17:52.337660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:17:52.366543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:17:52.397754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:17:52.449685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:17:52.510441Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008646024148716:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:52.510537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:52.510750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008646024148721:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:52.513978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:17:52.522215Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008646024148723:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:17:52.579268Z node 1 :TX_PROXY ERROR: Actor# [1:7488008646024148776:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:53.435430Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008628844277241:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:53.435504Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:17:54.738319Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437873842, txId: 281474976710671] shutting down 2025-03-31T16:17:54.921261Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437874855, txId: 281474976710674] shutting down >> TGRpcYdbTest::ExecutePreparedQuery [GOOD] >> TGRpcYdbTest::ExecuteQueryCache ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinTables [GOOD] Test command err: Trying to start YDB, gRPC: 10112, MsgBus: 13702 2025-03-31T16:17:49.856081Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008631887925711:2137];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:49.863684Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0017d7/r3tmp/tmp55k1NI/pdisk_1.dat 2025-03-31T16:17:50.228519Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:50.230217Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:50.230305Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:50.234319Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10112, node 1 2025-03-31T16:17:50.329337Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:50.329362Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:50.329374Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:50.329498Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13702 TClient is connected to server localhost:13702 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:50.820804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:50.839805Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:17:50.851489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:50.994632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:51.155294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:51.225678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:52.929264Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008644772829294:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:52.929407Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:53.219433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:17:53.247495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:17:53.277290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:17:53.313690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:17:53.343301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:17:53.421544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:17:53.478430Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008649067797105:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:53.478514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:53.478761Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008649067797110:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:53.482389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:17:53.492200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008649067797112:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:17:53.586658Z node 1 :TX_PROXY ERROR: Actor# [1:7488008649067797167:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:54.855691Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008631887925711:2137];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:54.855764Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:17:55.079501Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437875095, txId: 281474976710671] shutting down [[[108u];["One"];[8];["Value5"];[108u];["One"];#;["Value31"]]] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::AlterTableAddIndexAsyncOp [GOOD] Test command err: 2025-03-31T16:17:26.298822Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008532741196235:2077];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:26.299299Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d50/r3tmp/tmpkhozo0/pdisk_1.dat 2025-03-31T16:17:26.770577Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:26.770712Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:26.772735Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:26.778901Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19491, node 1 2025-03-31T16:17:26.916229Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:26.916253Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:26.916284Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:26.916402Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16892 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:27.159970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:29.344524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:1, at schemeshard: 72057594046644480 SUCCESS 2025-03-31T16:17:29.523582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008545626099352:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:29.523662Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008545626099363:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:29.523715Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:29.527168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:17:29.544988Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008545626099366:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:17:29.626666Z node 1 :TX_PROXY ERROR: Actor# [1:7488008545626099447:2827] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:30.004224Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jqpfwrxe2d1rmnws9bpygxdr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDllYWM0NGYtMWNjYzQzMjEtMWZiNmE1NWUtOTRhMzA1Njk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:30.020186Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437850042, txId: 281474976710661] shutting down 2025-03-31T16:17:30.089866Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-03-31T16:17:30.093862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 BAD_REQUEST 2025-03-31T16:17:30.283284Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-03-31T16:17:30.284759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 SUCCESS 2025-03-31T16:17:30.461760Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-03-31T16:17:32.015343Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008558215856427:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:32.015473Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d50/r3tmp/tmp8T4Kpd/pdisk_1.dat 2025-03-31T16:17:32.171306Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:32.207715Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:32.207806Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:32.213244Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7744, node 4 2025-03-31T16:17:32.277520Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:32.277543Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:32.277550Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:32.277739Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16553 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:32.474809Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:35.098375Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:35.124610Z node 4 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [4:7488008571100759379:2339] 2025-03-31T16:17:35.124855Z node 4 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:17:35.147244Z node 4 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:17:35.147330Z node 4 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:17:35.148694Z node 4 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:17:35.148735Z node 4 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:17:35.148772Z node 4 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:17:35.149072Z node 4 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:17:35.149129Z node 4 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:17:35.149154Z node 4 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [4:7488008571100759403:2339] in generation 1 2025-03-31T16:17:35.159191Z node 4 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:17:35.159238Z node 4 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:17:35.159332Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:17:35.159373Z node 4 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [4:7488008571100759406:2340] 2025-03-31T16:17:35.159386Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:17:35.159399Z node 4 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:17:35.159412Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:17:35.159552Z node 4 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:17:35.159630Z node 4 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:17:35.159664Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:17:35.159689Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:17:35.159710Z node 4 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:17:35.159730Z no ... nitialize from file: (empty maybe) 2025-03-31T16:17:43.322645Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:43.322791Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13297 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:43.636587Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:46.786454Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488008619159833289:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:46.786576Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:46.828434Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:46.952040Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488008619159833458:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:46.952175Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:46.954481Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488008619159833463:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:46.959003Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:17:46.992008Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7488008619159833465:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:17:47.091977Z node 10 :TX_PROXY ERROR: Actor# [10:7488008623454800841:2808] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:47.180342Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpfx9y6260gfbp9gwzz5vpe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NDMxMzQxYmMtZmQyYTQ5MzEtODE0NTRhZDktNGNkMzZlM2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:47.241187Z node 10 :TX_PROXY WARN: [AlterTableAddIndex [10:7488008623454800891:2370] TxId# 281474976715663] Access check failed 2025-03-31T16:17:47.270987Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-03-31T16:17:47.368505Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-03-31T16:17:47.462625Z node 10 :TX_PROXY ERROR: [AlterTableAddIndex [10:7488008623454801274:2384] TxId# 281474976715665] Unable to navigate: Root/WrongPath status: PathErrorUnknown 2025-03-31T16:17:47.569801Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037889 not found 2025-03-31T16:17:49.455811Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7488008630732734782:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:49.455875Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d50/r3tmp/tmpHUmMFE/pdisk_1.dat 2025-03-31T16:17:49.653286Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:49.696484Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:49.696584Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:49.700660Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23845, node 13 2025-03-31T16:17:49.800146Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:49.800172Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:49.800183Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:49.800358Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29640 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:50.130283Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:53.373676Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7488008647912605052:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:53.373852Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:53.392660Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:53.524330Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7488008647912605220:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:53.524478Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:53.526588Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7488008647912605225:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:53.531120Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:17:53.558666Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7488008647912605227:2355], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:17:53.654552Z node 13 :TX_PROXY ERROR: Actor# [13:7488008647912605302:2808] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:53.726840Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpfxgbhd4rwz9gjnsd42hhr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MjZmY2FkNTEtNThjYzQwYzItNmUxNjdmM2EtMzUxZmVjZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:53.797514Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-03-31T16:17:53.960493Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-03-31T16:17:54.063296Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 >> KqpSystemView::PartitionStatsParametricRanges >> KqpSysColV1::SelectRowAsterisk ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestExplicitPartitioning [GOOD] Test command err: 2025-03-31T16:17:08.604161Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008456804599073:2276];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:08.604466Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000db5/r3tmp/tmpjUvOBc/pdisk_1.dat 2025-03-31T16:17:08.979319Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:09.000487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:09.000580Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:09.030608Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27229, node 1 2025-03-31T16:17:09.083238Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:09.083270Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:09.083279Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:09.083438Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4676 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:09.377090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:11.440150Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008469689501765:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:11.440235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:11.732324Z node 1 :TX_PROXY ERROR: Actor# [1:7488008469689501791:2628] txid# 281474976710658, issues: { message: "Column Key has wrong key type Double" severity: 1 } 2025-03-31T16:17:13.320200Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008476191617998:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:13.320378Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000db5/r3tmp/tmpIzPZCx/pdisk_1.dat 2025-03-31T16:17:13.501591Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:13.556523Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:13.556646Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:13.559791Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25227, node 4 2025-03-31T16:17:13.629920Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:13.629942Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:13.629954Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:13.630110Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23507 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:13.893995Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:16.755884Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008489076520933:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:16.758563Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:16.788133Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008489076520945:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:16.788803Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:16.832509Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008489076520975:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:16.833159Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:16.835739Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:16.840449Z node 4 :TX_PROXY ERROR: Actor# [4:7488008489076521004:2648] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-03-31T16:17:16.863726Z node 4 :TX_PROXY ERROR: Actor# [4:7488008489076521047:2679] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-03-31T16:17:16.866982Z node 4 :TX_PROXY ERROR: Actor# [4:7488008489076521067:2690] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-03-31T16:17:16.869496Z node 4 :TX_PROXY ERROR: Actor# [4:7488008489076521071:2694] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-03-31T16:17:16.870379Z node 4 :TX_PROXY ERROR: Actor# [4:7488008489076521068:2691] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-03-31T16:17:16.872280Z node 4 :TX_PROXY ERROR: Actor# [4:7488008489076521076:2697] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-03-31T16:17:16.872425Z node 4 :TX_PROXY ERROR: Actor# [4:7488008489076521077:2698] txid# 281474976715667, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-03-31T16:17:16.872578Z node 4 :TX_PROXY ERROR: Actor# [4:7488008489076521070:2693] txid# 281474976715664, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-03-31T16:17:16.872688Z node 4 :TX_PROXY ERROR: Actor# [4:7488008489076521069:2692] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-03-31T16:17:16.988530Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008489076521238:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:16.988659Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:16.988723Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] Ac ... :17.110935Z node 4 :TX_PROXY ERROR: Actor# [4:7488008493371488612:2881] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:17.222177Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jqpfwcnv8x5fph4szh1pvjqj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MWQxMWVjZjgtYTBkMmE0N2QtM2MxZTdiZDUtOTZiYmNiNmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:18.922979Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488008496922349791:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:18.923358Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000db5/r3tmp/tmpAmAQhV/pdisk_1.dat 2025-03-31T16:17:19.046990Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:19.080149Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:19.080214Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:19.083612Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8680, node 7 2025-03-31T16:17:19.147709Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:19.147733Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:19.147741Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:19.147906Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3784 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:19.412528Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:22.285170Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:22.386123Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488008514102220236:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:22.386130Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488008514102220228:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:22.386189Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:22.389160Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:17:22.405094Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7488008514102220242:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:17:22.474198Z node 7 :TX_PROXY ERROR: Actor# [7:7488008514102220319:2852] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:22.530642Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpfwhyg341rbjsx9f1kdwee, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=M2MyODRlMTctZGE1ODc5ODItZmMzZWMwMWYtYmM1YjMxOWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:24.210301Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008524019310012:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:24.210414Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000db5/r3tmp/tmpvT8aa2/pdisk_1.dat 2025-03-31T16:17:24.473250Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:24.503799Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:24.503890Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:24.506891Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27760, node 10 2025-03-31T16:17:24.603791Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:24.603815Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:24.603823Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:24.603964Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30103 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:24.931351Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:27.802460Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:29.210699Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7488008524019310012:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:29.210783Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:17:39.430791Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:17:39.430825Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:53.708453Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488008648573363896:2521], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:53.708516Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488008648573363885:2518], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:53.708608Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:53.712487Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:17:53.733715Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7488008648573363899:2522], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:17:53.830984Z node 10 :TX_PROXY ERROR: Actor# [10:7488008648573363975:3188] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:53.937260Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpfxghafgj9t4amrwg1wvms, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NmU0ODM3NTYtZWUzZjNjM2UtNGI2NGY3MDItMmJjOTAzZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:54.428415Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpfxgstdawcnfw4cfn7yqx3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NmU0ODM3NTYtZWUzZjNjM2UtNGI2NGY3MDItMmJjOTAzZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TTableProfileTests::OverwriteExecutionPolicy [GOOD] >> TTableProfileTests::OverwritePartitioningPolicy >> YdbYqlClient::TestBusySession [GOOD] >> YdbYqlClient::TestConstraintViolation >> KqpSystemView::Sessions [GOOD] |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange2 [GOOD] >> KqpSystemView::PartitionStatsRange1 [GOOD] >> YdbYqlClient::QueryLimits [GOOD] >> YdbYqlClient::QueryStats ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::Sessions [GOOD] Test command err: Trying to start YDB, gRPC: 21490, MsgBus: 14809 2025-03-31T16:17:46.731276Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008621041804766:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:46.731367Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0017f8/r3tmp/tmpzKyiay/pdisk_1.dat 2025-03-31T16:17:47.172479Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:47.185845Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:47.185959Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:47.203304Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21490, node 1 2025-03-31T16:17:47.399959Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:47.400005Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:47.400020Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:47.400198Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14809 TClient is connected to server localhost:14809 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:48.148133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:48.179743Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:17:48.209695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:48.222731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:48.376567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:48.561720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:48.637286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:49.985946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008633926708420:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:49.986069Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:50.665803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:17:50.710422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:17:50.744432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:17:50.775179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:17:50.806930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:17:50.842176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T16:17:50.911975Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008638221676232:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:50.912100Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:50.912437Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008638221676237:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:50.915726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-03-31T16:17:50.925784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008638221676239:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-03-31T16:17:51.011245Z node 1 :TX_PROXY ERROR: Actor# [1:7488008642516643589:3458] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:51.731866Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008621041804766:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:51.731919Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 1 ydb-cpp-sdk/dev 2025-03-31T16:17:57.023070Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437877006, txId: 281474976710684] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange2 [GOOD] Test command err: Trying to start YDB, gRPC: 15586, MsgBus: 18487 2025-03-31T16:17:52.391165Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008645222744195:2197];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:52.391843Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0017d2/r3tmp/tmpCXfadq/pdisk_1.dat 2025-03-31T16:17:52.746789Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:52.749781Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:52.749901Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:52.754195Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15586, node 1 2025-03-31T16:17:52.813568Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:52.813620Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:52.813634Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:52.813804Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18487 TClient is connected to server localhost:18487 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:53.341420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:53.363911Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:17:53.369400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:53.579277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:53.768827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:53.840790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:55.483034Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008658107647726:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:55.483153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:55.813582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:17:55.852174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:17:55.887446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:17:55.965885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:17:55.999462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:17:56.071733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:17:56.117217Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008662402615538:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:56.117316Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:56.117481Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008662402615543:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:56.123332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:17:56.133444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008662402615545:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:17:56.223652Z node 1 :TX_PROXY ERROR: Actor# [1:7488008662402615600:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:57.386177Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008645222744195:2197];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:57.386250Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:17:57.458870Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437877425, txId: 281474976710671] shutting down >> KqpSysColV1::SelectRowById [GOOD] >> TGRpcNewClient::CreateAlterUpsertDrop [GOOD] >> TGRpcNewClient::InMemoryTables >> YdbTableBulkUpsert::RetryOperationSync [GOOD] >> YdbTableBulkUpsert::RetryOperation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange1 [GOOD] Test command err: Trying to start YDB, gRPC: 13325, MsgBus: 4125 2025-03-31T16:17:53.227359Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008648910784722:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:53.227649Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0017cb/r3tmp/tmpFamW3i/pdisk_1.dat 2025-03-31T16:17:53.602172Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13325, node 1 2025-03-31T16:17:53.628270Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:53.628366Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:53.632365Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:17:53.658909Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:53.658927Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:53.658932Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:53.659031Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4125 TClient is connected to server localhost:4125 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:54.156774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:54.190726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:54.358125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:54.522653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:54.591221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:56.291951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008661795688386:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:56.292053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:56.627400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:17:56.658626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:17:56.687635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:17:56.718625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:17:56.749034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:17:56.805107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:17:56.852036Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008661795688897:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:56.852109Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:56.852257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008661795688902:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:56.855666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:17:56.865246Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008661795688904:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:17:56.960349Z node 1 :TX_PROXY ERROR: Actor# [1:7488008661795688957:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:58.056997Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437878039, txId: 281474976710671] shutting down 2025-03-31T16:17:58.227626Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008648910784722:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:58.227696Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSysColV1::SelectRange [GOOD] >> YdbTableBulkUpsert::AsyncIndexShouldFail [GOOD] >> YdbTableBulkUpsert::AsyncIndexShouldSucceed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRowById [GOOD] Test command err: Trying to start YDB, gRPC: 7009, MsgBus: 3980 2025-03-31T16:17:53.736976Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008651151338185:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:53.737040Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0017c8/r3tmp/tmp18MYOm/pdisk_1.dat 2025-03-31T16:17:54.208988Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:54.215021Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:54.215172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:54.221306Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7009, node 1 2025-03-31T16:17:54.327710Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:54.327737Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:54.327760Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:54.327892Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3980 TClient is connected to server localhost:3980 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:54.950750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:54.968536Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:17:54.984004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:55.161871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:55.323030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:55.400100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:57.053124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008668331209143:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:57.053325Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:57.395247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:17:57.433985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:17:57.468614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:17:57.500162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:17:57.538002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:17:57.581963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:17:57.645431Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008668331209652:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:57.645512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:57.645936Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008668331209657:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:57.649948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:17:57.676083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008668331209659:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:17:57.772391Z node 1 :TX_PROXY ERROR: Actor# [1:7488008668331209716:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:58.739207Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008651151338185:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:58.739278Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRange [GOOD] Test command err: Trying to start YDB, gRPC: 16003, MsgBus: 9413 2025-03-31T16:17:54.487050Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008655069965642:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:54.487120Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0017bf/r3tmp/tmp0hxswu/pdisk_1.dat 2025-03-31T16:17:54.930951Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:54.941537Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:54.941655Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:54.971830Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16003, node 1 2025-03-31T16:17:55.055559Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:55.055581Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:55.055590Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:55.055701Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9413 TClient is connected to server localhost:9413 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:55.591294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:55.634082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:55.803586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:55.979830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:56.051337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:57.861617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008667954869302:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:57.861728Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:58.245194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:17:58.279650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:17:58.361166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:17:58.393749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:17:58.427943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:17:58.507743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:17:58.558555Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008672249837116:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:58.558682Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:58.559003Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008672249837121:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:58.562385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:17:58.570677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008672249837123:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:17:58.660395Z node 1 :TX_PROXY ERROR: Actor# [1:7488008672249837176:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:59.487836Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008655069965642:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:59.487892Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> Cdc::UuidExchange[PqRunner] >> KqpSystemView::FailResolve [GOOD] >> Cdc::KeysOnlyLog[PqRunner] >> Cdc::DocApi[PqRunner] >> KqpSysColV0::InnerJoinTables [GOOD] >> TGRpcYdbTest::ExecuteQueryCache [GOOD] >> KqpSystemView::NodesSimple [GOOD] >> KqpSysColV1::SelectRowAsterisk [GOOD] >> BackupRestore::RestoreTablePartitioningSettings >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeView >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeDir >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeInvalid [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypePersQueueGroup [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeRtmrVolume [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeKesus [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeFileStore [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeReplication [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeExternalTable [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeResourcePool [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeInvalid [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypePersQueueGroup >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeDir [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeBlockStoreVolume [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeExtSubDomain [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeColumnStore [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeColumnTable [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeCdcStream >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::FailResolve [GOOD] Test command err: Trying to start YDB, gRPC: 20764, MsgBus: 30161 2025-03-31T16:17:56.677660Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008663368664530:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:56.678074Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001766/r3tmp/tmpcxcw1a/pdisk_1.dat 2025-03-31T16:17:57.036051Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:57.082364Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:57.082507Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:57.084292Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20764, node 1 2025-03-31T16:17:57.132500Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:57.132527Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:57.132536Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:57.132690Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30161 TClient is connected to server localhost:30161 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:57.729761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:57.743636Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:17:57.772588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:57.922798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:58.092633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:17:58.160539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:17:59.728824Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008676253568203:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:59.728952Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:59.975248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:00.001868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:00.031550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:00.059221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:00.087557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:00.121448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:00.167906Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008680548536007:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:00.167979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:00.168014Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008680548536012:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:00.173022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:00.188549Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008680548536014:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:00.268504Z node 1 :TX_PROXY ERROR: Actor# [1:7488008680548536068:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:01.065236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:18:01.177457Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7488008684843503720:3695], for# user0@builtin, access# SelectRow 2025-03-31T16:18:01.177632Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Error resolving keys for entry: { TableId: [OwnerId: 72057594046644480, LocalPathId: 1] Access: 1 SyncVersion: false Status: AccessDenied Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Uint64 : NULL, Uint64 : NULL, Uint64 : NULL, Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-03-31T16:18:01.186789Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTg4YjYzZmItYTI4NmQ3YTgtYTZlMDQ3ZjktZTUxNzAyOWQ=, ActorId: [1:7488008684843503683:2500], ActorState: ExecuteState, TraceId: 01jqpfxqqs8y0kdntp5fxsfs78, Create QueryResponse for error on request, msg: 2025-03-31T16:18:01.187125Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437881176, txId: 281474976710672] shutting down 2025-03-31T16:18:01.187496Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jqpfxqqs8y0kdntp5fxsfs78, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTg4YjYzZmItYTI4NmQ3YTgtYTZlMDQ3ZjktZTUxNzAyOWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> YdbTableBulkUpsertOlap::ParquetImportBug [GOOD] >> KqpSysColV1::UpdateAndDelete [GOOD] >> KqpSystemView::PartitionStatsParametricRanges [GOOD] >> YdbYqlClient::TestConstraintViolation [GOOD] >> YdbTableBulkUpsertOlap::UpsertCSV [GOOD] >> YdbTableBulkUpsertOlap::UpsertArrowBatch_DataShard |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeResourcePool [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::ExecuteQueryCache [GOOD] Test command err: 2025-03-31T16:17:35.714795Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008570057515387:2145];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:35.715480Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d43/r3tmp/tmpRa03LF/pdisk_1.dat 2025-03-31T16:17:36.171725Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:36.182518Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:36.182628Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:36.186519Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22113, node 1 2025-03-31T16:17:36.312149Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:36.312173Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:36.312181Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:36.312304Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30547 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:36.625173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:40.584572Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008591825024849:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:40.584642Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d43/r3tmp/tmpKQHVs5/pdisk_1.dat 2025-03-31T16:17:40.935276Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:40.982325Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:40.982444Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:40.988891Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23927, node 4 2025-03-31T16:17:41.113510Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:41.113538Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:41.113548Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:41.113693Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28173 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:41.388944Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:45.708601Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488008615785719374:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:45.708655Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d43/r3tmp/tmpIU3yF2/pdisk_1.dat 2025-03-31T16:17:45.899151Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:45.937251Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:45.937340Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:45.943561Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16698, node 7 2025-03-31T16:17:46.095809Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:46.095831Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:46.095840Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:46.095965Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12275 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:46.362217Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:49.134795Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488008632965589615:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:49.134798Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488008632965589607:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:49.134885Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:49.138939Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:17:49.183305Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7488008632965589621:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:17:49.280577Z node 7 :TX_PROXY ERROR: Actor# [7:7488008632965589692:2688] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:51.703792Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008638768670887:2238];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d43/r3tmp/tmpPIvTPU/pdisk_1.dat 2025-03-31T16:17:51.749991Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:17:51.837797Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:51.880616Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:51.880708Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:51.884262Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12813, node 10 2025-03-31T16:17:51.976856Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:51.976891Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:51.976900Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:51.977037Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24887 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:52.265506Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:55.266922Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488008655948540922:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:55.267054Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:55.267295Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488008655948540934:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:55.270785Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:17:55.294486Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7488008655948540936:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:17:55.396366Z node 10 :TX_PROXY ERROR: Actor# [10:7488008655948541018:2683] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:57.247253Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7488008665522101276:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:57.247354Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d43/r3tmp/tmpaozaQ0/pdisk_1.dat 2025-03-31T16:17:57.484557Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27770, node 13 2025-03-31T16:17:57.611714Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:57.611742Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:57.611758Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:57.611894Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:17:57.618328Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:57.618415Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:57.632756Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25056 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:57.957692Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:57.976716Z node 13 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:18:00.989624Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7488008678407004205:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:00.989639Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7488008678407004213:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:00.989703Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:00.992785Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:18:01.012751Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7488008678407004219:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:18:01.079735Z node 13 :TX_PROXY ERROR: Actor# [13:7488008682701971592:2681] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRowAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 4842, MsgBus: 4184 2025-03-31T16:17:57.255154Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008667752440245:2266];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:57.255330Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001734/r3tmp/tmp9AaB1b/pdisk_1.dat 2025-03-31T16:17:57.718930Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:57.719038Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:57.720839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:17:57.743016Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4842, node 1 2025-03-31T16:17:57.835646Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:57.835667Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:57.835674Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:57.835772Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4184 TClient is connected to server localhost:4184 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:58.369394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:58.383840Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:17:58.388146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:58.538232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:58.698481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:58.775304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:00.412555Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008680637343704:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:00.412656Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:00.709286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:00.737720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:00.772240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:00.799459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:00.829780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:00.867111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:00.930075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008680637344215:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:00.930183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008680637344220:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:00.930183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:00.934609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:00.947339Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008680637344222:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:01.065375Z node 1 :TX_PROXY ERROR: Actor# [1:7488008684932311574:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> YdbTableBulkUpsert::Overload [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinTables [GOOD] Test command err: Trying to start YDB, gRPC: 7950, MsgBus: 10030 2025-03-31T16:17:56.489045Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008663685621052:2204];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:56.491994Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001774/r3tmp/tmpwBBCiL/pdisk_1.dat 2025-03-31T16:17:56.833610Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7950, node 1 2025-03-31T16:17:56.875203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:56.875327Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:56.877433Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:17:56.921519Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:56.921535Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:56.921544Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:56.921631Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10030 TClient is connected to server localhost:10030 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:57.494680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:57.515364Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:17:57.524966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:57.741870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:57.941512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:58.029092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:59.696286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008676570524560:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:59.696425Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:00.082040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:00.113736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:00.146858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:00.186443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:00.227072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:00.300167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:00.400901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008680865492377:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:00.400997Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:00.401275Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008680865492382:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:00.406750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:00.418516Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008680865492384:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:18:00.492735Z node 1 :TX_PROXY ERROR: Actor# [1:7488008680865492439:3453] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:01.488574Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008663685621052:2204];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:01.488628Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesSimple [GOOD] Test command err: Trying to start YDB, gRPC: 20258, MsgBus: 10201 2025-03-31T16:17:55.031793Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008656945161284:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:55.031877Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:17:55.127270Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488008659271009538:2277];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:55.127568Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0017b2/r3tmp/tmpE2QMWY/pdisk_1.dat 2025-03-31T16:17:55.338865Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:17:55.564847Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:55.585641Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:55.585747Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:55.596498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:55.596595Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:55.597530Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:55.597617Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:55.603059Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:17:55.603782Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:17:55.605605Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:17:55.605776Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-03-31T16:17:55.606782Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20258, node 1 2025-03-31T16:17:55.725000Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:55.725029Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:55.725037Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:55.725176Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10201 TClient is connected to server localhost:10201 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:56.474294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:56.519130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:56.694597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:57.061709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:57.166184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:59.151767Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008674125032502:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:59.151887Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:59.473941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:17:59.526947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:17:59.613175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:17:59.700652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:17:59.753565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:17:59.811870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:17:59.886338Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008674125033192:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:59.886428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:59.886622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008674125033197:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:59.890509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:17:59.960721Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008674125033199:2404], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:00.024134Z node 1 :TX_PROXY ERROR: Actor# [1:7488008678420000572:4129] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:00.031863Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008656945161284:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:00.031916Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:00.130539Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488008659271009538:2277];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:00.130600Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:01.191975Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437881181, txId: 281474976710671] shutting down 2025-03-31T16:18:01.344371Z node 2 :BS_PROXY_PUT ERROR: [45e6d59f39b402d4] Result# TEvPutResult {Id# [72075186224037900:1:17:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037900:1:17:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE is not ready", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-03-31T16:18:01.345611Z node 3 :BS_PROXY_PUT ERROR: [b58943145bb6badd] Result# TEvPutResult {Id# [72075186224037889:1:17:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037889:1:17:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::UpdateAndDelete [GOOD] Test command err: Trying to start YDB, gRPC: 28688, MsgBus: 61045 2025-03-31T16:17:55.999371Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008657199491476:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:55.999534Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001795/r3tmp/tmpXsHkNX/pdisk_1.dat 2025-03-31T16:17:56.446382Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:56.447185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 28688, node 1 2025-03-31T16:17:56.455986Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:17:56.459446Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:17:56.459466Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:17:56.492010Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:56.540289Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:56.540313Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:56.540320Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:56.540440Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61045 TClient is connected to server localhost:61045 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:57.136896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:57.150576Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:17:57.166207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:57.320206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:57.506315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:57.597062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:59.325406Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008674379362434:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:59.325562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:59.643747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:17:59.673265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:17:59.700891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:17:59.729177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:17:59.757832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:17:59.799180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:17:59.839550Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008674379362942:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:59.839649Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:59.839866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008674379362947:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:59.843804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:17:59.853704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008674379362949:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:17:59.923646Z node 1 :TX_PROXY ERROR: Actor# [1:7488008674379363002:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:00.999699Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008657199491476:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:00.999761Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsParametricRanges [GOOD] Test command err: Trying to start YDB, gRPC: 19041, MsgBus: 31355 2025-03-31T16:17:57.218676Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008665984626333:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:57.219321Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001756/r3tmp/tmp7R1aVQ/pdisk_1.dat 2025-03-31T16:17:57.705899Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:57.706017Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:57.707528Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:17:57.715847Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19041, node 1 2025-03-31T16:17:57.839786Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:57.839813Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:57.839826Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:57.839968Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31355 TClient is connected to server localhost:31355 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:58.474411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:58.491979Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:17:58.509275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:58.704949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:58.881586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:58.963323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:00.606191Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008678869530010:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:00.606357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:00.926537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:01.001153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:01.031602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:01.064085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:01.093736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:01.136649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:01.178948Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008683164497817:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:01.179033Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:01.179024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008683164497822:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:01.182863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:01.193300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008683164497824:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:01.264203Z node 1 :TX_PROXY ERROR: Actor# [1:7488008683164497880:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:02.218636Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008665984626333:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:02.218696Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:02.647270Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437882624, txId: 281474976710671] shutting down |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsertOlap::ParquetImportBug [GOOD] Test command err: 2025-03-31T16:17:28.240486Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008542104173738:2275];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:28.240613Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d4d/r3tmp/tmpWSmVkl/pdisk_1.dat 2025-03-31T16:17:28.673724Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:28.682629Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:28.682723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:28.689282Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23860, node 1 2025-03-31T16:17:28.851813Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:28.851839Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:28.851849Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:28.852003Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62259 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:29.188163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:31.401074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 CLIENT_DEADLINE_EXCEEDED 2025-03-31T16:17:31.962939Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008554989078438:2440], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:31.962945Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008554989078433:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:31.963033Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:31.966819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:17:31.984403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008554989078447:2441], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:17:32.071358Z node 1 :TX_PROXY ERROR: Actor# [1:7488008559284045876:4251] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:32.577198Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqpfwv9s0txv6rh310aaf3nn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDA3ZDQ1ODEtNGYwNDQ0YjMtMTVjNWQ3YTYtNjY4MzdlYzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:34.203854Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008569580656729:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:34.203956Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d4d/r3tmp/tmpLpZZke/pdisk_1.dat 2025-03-31T16:17:34.357368Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:34.392039Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:34.392137Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:34.395255Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20035, node 4 2025-03-31T16:17:34.483821Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:34.483845Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:34.483852Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:34.483983Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26405 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:34.719361Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:37.048818Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:38.949629Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488008583857075979:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:38.949732Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d4d/r3tmp/tmpOtGY9a/pdisk_1.dat 2025-03-31T16:17:39.088170Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:39.123564Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:39.123660Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:39.128054Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22346, node 7 2025-03-31T16:17:39.199769Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:39.199792Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:39.199799Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:39.199939Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4518 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:39.461778Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:42.275210Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:43.943909Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7488008583857075979:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:43.944005Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:17:51.367611Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008642014557786:2073];s ... dy;task=agents_waiting=0;additional_info=();;external_task_id=b75721ac-e4b11f0-96bcd184-41cff473; 2025-03-31T16:18:00.985008Z node 13 :TX_COLUMNSHARD DEBUG: external_task_id=b75721ac-e4b11f0-96bcd184-41cff473;fline=actor.cpp:48;task=agents_waiting=0;additional_info=();; SUCCESS Upsert done: 0.029766s 2025-03-31T16:18:00.992307Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[13:7488008677858373219:2339];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; 2025-03-31T16:18:00.994130Z node 13 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 72075186224037888 2025-03-31T16:18:00.994508Z node 13 :TX_COLUMNSHARD DEBUG: TxWriteIndex[5] (CS::INDEXATION) apply at tablet 72075186224037888 2025-03-31T16:18:00.995516Z node 13 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 72075186224037888 Save Batch GenStep: 1:2 Blob count: 1 2025-03-31T16:18:00.995688Z node 13 :TX_COLUMNSHARD DEBUG: Index: tables 0 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 72075186224037888 2025-03-31T16:18:00.998868Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=b75721ac-e4b11f0-96bcd184-41cff473;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; 2025-03-31T16:18:00.998960Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=b75721ac-e4b11f0-96bcd184-41cff473;fline=with_appended.cpp:65;portions=1,;task_id=b75721ac-e4b11f0-96bcd184-41cff473; 2025-03-31T16:18:00.999286Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=b75721ac-e4b11f0-96bcd184-41cff473;fline=manager.cpp:15;event=unlock;process_id=CS::INDEXATION::b75721ac-e4b11f0-96bcd184-41cff473; 2025-03-31T16:18:00.999372Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=b75721ac-e4b11f0-96bcd184-41cff473;tablet_id=72075186224037888;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:18:00.999444Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=b75721ac-e4b11f0-96bcd184-41cff473;tablet_id=72075186224037888;fline=columnshard_impl.cpp:781;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=0; 2025-03-31T16:18:00.999511Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=b75721ac-e4b11f0-96bcd184-41cff473;tablet_id=72075186224037888;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=0; 2025-03-31T16:18:00.999988Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=b75721ac-e4b11f0-96bcd184-41cff473;tablet_id=72075186224037888;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:18:01.000050Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=b75721ac-e4b11f0-96bcd184-41cff473;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:18:01.000095Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=b75721ac-e4b11f0-96bcd184-41cff473;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:18:01.000236Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=b75721ac-e4b11f0-96bcd184-41cff473;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:18:01.000596Z node 13 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037888 Delete Blob DS:2181038080:[72075186224037888:1:1:3:0:3864:0] 2025-03-31T16:18:01.000631Z node 13 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037888 Save Batch GenStep: 1:2 Blob count: 1 2025-03-31T16:18:01.000751Z node 13 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=1;external_task_id=b75721ac-e4b11f0-96bcd184-41cff473;mem=3380;cpu=0; 2025-03-31T16:18:01.000916Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:781;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=0; 2025-03-31T16:18:01.003478Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7488008682153340710:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:01.003553Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7488008682153340716:2414], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:01.003619Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:01.018693Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:18:01.028009Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[13:7488008677858373219:2339];ev=NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated;fline=columnshard_subdomain_path_id.cpp:90;notify_subdomain=[OwnerId: 72057594046644480, LocalPathId: 1]; 2025-03-31T16:18:01.041560Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[13:7488008677858373219:2339];ev=NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated;fline=columnshard_subdomain_path_id.cpp:90;notify_subdomain=[OwnerId: 72057594046644480, LocalPathId: 1]; 2025-03-31T16:18:01.048754Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7488008682153340726:2415], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:18:01.106582Z node 13 :TX_PROXY ERROR: Actor# [13:7488008682153340862:2896] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:01.329007Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[13:7488008677858373219:2339];fline=actor.cpp:33;event=skip_flush_writing; 2025-03-31T16:18:01.380080Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpfxqn59kakg4szeqw40j8e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YTAwNmY3ODEtNDg1NGRmZDUtOGZlMmE2N2EtNTM3MmRiOGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:18:01.425852Z node 13 :TX_COLUMNSHARD DEBUG: EvScan txId: 281474976715662 scanId: 1 version: {1743437881087:max} readable: {1743437881416:max} at tablet 72075186224037888 2025-03-31T16:18:01.426054Z node 13 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 281474976715662 scanId: 1 at tablet 72075186224037888 2025-03-31T16:18:01.426812Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[13:7488008677858373219:2339];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976715662;scan_id=1;gen=1;table=/Root/OlapStore/OlapTable;snapshot={1743437881087:max};tablet=72075186224037888;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 5 } Columns { Id: 3 } Columns { Id: 4 } Columns { Id: 1 } Columns { Id: 7 } Columns { Id: 2 } Columns { Id: 6 } } } Version: 5 Kernels: "O\002\020AsScalar\t\211\004\235\213\004\213\000?\000\000\235?\000\000\235?\000\0000BlockAsTuple\000\t\211\002?\006?\000\002\000\013?\000\001\t\211\002?\010?\000\002\000?\016\001\000/" ; 2025-03-31T16:18:01.559162Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[13:7488008677858373219:2339];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976715662;scan_id=1;gen=1;table=/Root/OlapStore/OlapTable;snapshot={1743437881087:max};tablet=72075186224037888;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 5 } Columns { Id: 3 } Columns { Id: 4 } Columns { Id: 1 } Columns { Id: 7 } Columns { Id: 2 } Columns { Id: 6 } } } Version: 5 Kernels: "O\002\020AsScalar\t\211\004\235\213\004\213\000?\000\000\235?\000\000\235?\000\0000BlockAsTuple\000\t\211\002?\006?\000\002\000\013?\000\001\t\211\002?\010?\000\002\000?\016\001\000/" ; 2025-03-31T16:18:01.561077Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[13:7488008677858373219:2339];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976715662;scan_id=1;gen=1;table=/Root/OlapStore/OlapTable;snapshot={1743437881087:max};tablet=72075186224037888;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":8},{"from":12},{"from":4},{"from":6},{"from":2},{"from":14},{"from":10}]},{"owner_id":2,"inputs":[{"from":15}]},{"owner_id":4,"inputs":[{"from":15}]},{"owner_id":6,"inputs":[{"from":15}]},{"owner_id":8,"inputs":[{"from":15}]},{"owner_id":10,"inputs":[{"from":15}]},{"owner_id":12,"inputs":[{"from":15}]},{"owner_id":14,"inputs":[{"from":15}]},{"owner_id":15,"inputs":[]}],"nodes":{"15":{"p":{"p":{"data":[{"name":"stringToString","id":7},{"name":"id","id":1},{"name":"timestamp","id":2},{"name":"dateTimeS","id":3},{"name":"dateTimeU","id":4},{"name":"date","id":5},{"name":"utf8ToString","id":6}]},"o":"7,1,2,3,4,5,6","t":"FetchOriginalData"},"w":14,"id":15},"2":{"p":{"i":"5","p":{"address":{"name":"date","id":5}},"o":"5","t":"AssembleOriginalData"},"w":19,"id":2},"8":{"p":{"i":"1","p":{"address":{"name":"id","id":1}},"o":"1","t":"AssembleOriginalData"},"w":19,"id":8},"0":{"p":{"i":"5,3,4,1,7,2,6","t":"Projection"},"w":133,"id":0},"4":{"p":{"i":"3","p":{"address":{"name":"dateTimeS","id":3}},"o":"3","t":"AssembleOriginalData"},"w":19,"id":4},"14":{"p":{"i":"6","p":{"address":{"name":"utf8ToString","id":6}},"o":"6","t":"AssembleOriginalData"},"w":19,"id":14},"10":{"p":{"i":"7","p":{"address":{"name":"stringToString","id":7}},"o":"7","t":"AssembleOriginalData"},"w":19,"id":10},"6":{"p":{"i":"4","p":{"address":{"name":"dateTimeU","id":4}},"o":"4","t":"AssembleOriginalData"},"w":19,"id":6},"12":{"p":{"i":"2","p":{"address":{"name":"timestamp","id":2}},"o":"2","t":"AssembleOriginalData"},"w":19,"id":12}}}; 2025-03-31T16:18:01.568780Z node 13 :TX_COLUMNSHARD INFO: self_id=[13:7488008677858373245:2343];tablet_id=72075186224037888;parent=[13:7488008677858373219:2339];fline=manager.cpp:82;event=ask_data;request=request_id=3;3={portions_count=1};; 2025-03-31T16:18:01.571285Z node 13 :TX_COLUMNSHARD DEBUG: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2025-03-31T16:18:01.572678Z node 13 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2025-03-31T16:18:01.580322Z node 13 :TX_COLUMNSHARD DEBUG: Finished read cookie: 1 at tablet 72075186224037888 2025-03-31T16:18:01.608038Z node 13 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437881087, txId: 18446744073709551615] shutting down >> TestProgram::YqlKernelStartsWithScalar ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestConstraintViolation [GOOD] Test command err: 2025-03-31T16:17:42.103034Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008600714251724:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:42.103206Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d35/r3tmp/tmpJ7qcpH/pdisk_1.dat 2025-03-31T16:17:42.527323Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27900, node 1 2025-03-31T16:17:42.581594Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:42.581708Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:42.582340Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:17:42.582432Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:17:42.674274Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:17:42.691734Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:42.691758Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:42.691767Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:42.691900Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6984 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:43.153236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:45.380638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008613599154629:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:45.380754Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:45.699528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:45.885047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008613599154811:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:45.885117Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:45.885387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008613599154816:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:45.889075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:17:45.920260Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008613599154818:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:17:45.995593Z node 1 :TX_PROXY ERROR: Actor# [1:7488008613599154895:2809] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:46.130061Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqpfx8ww9eeg4sxpggz4waqm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWU2M2I3N2YtZjQwYWU5ZWQtZTQxYzU3MjItNDg0OThjMzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:46.330849Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jqpfx95kd7c7erdtzgh4sz3x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWU2M2I3N2YtZjQwYWU5ZWQtZTQxYzU3MjItNDg0OThjMzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:47.905362Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008622583926634:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:47.905427Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d35/r3tmp/tmpUqYAvY/pdisk_1.dat 2025-03-31T16:17:48.057466Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:48.096301Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:48.096382Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:48.104144Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4241, node 4 2025-03-31T16:17:48.163774Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:48.163802Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:48.163818Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:48.163958Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3055 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:48.400241Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:50.986853Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008635468829538:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:50.986867Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008635468829544:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:50.986947Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:50.990480Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:17:51.017868Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488008635468829552:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:17:51.096227Z node 4 :TX_PROXY ERROR: Actor# [4:7488008639763796923:2682] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:52.750786Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488008645902917310:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:52.750838Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d35/r3tmp/tmpTpbIHy/pdisk_1.dat 2025-03-31T16:17:52.915300Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:52.947783Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:52.947867Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:52.956434Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1188, node 7 2025-03-31T16:17:53.004951Z node 7 :NET_CLASSIFIER WARN: distributab ... d=NWQwNTQ5NjMtNjhhNDRkYTEtNTY2M2RhNzgtNWRjN2NjMGI=, ActorId: [7:7488008658787820212:2335], ActorState: ExecuteState, TraceId: 01jqpfxjqneetv4rn86t68vd6b, Reply query error, msg: Pending previous query completion proxyRequestId: 8 2025-03-31T16:17:55.959419Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=NWQwNTQ5NjMtNjhhNDRkYTEtNTY2M2RhNzgtNWRjN2NjMGI=, ActorId: [7:7488008658787820212:2335], ActorState: ExecuteState, TraceId: 01jqpfxjqneetv4rn86t68vd6b, Reply query error, msg: Pending previous query completion proxyRequestId: 9 2025-03-31T16:17:55.959459Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=NWQwNTQ5NjMtNjhhNDRkYTEtNTY2M2RhNzgtNWRjN2NjMGI=, ActorId: [7:7488008658787820212:2335], ActorState: ExecuteState, TraceId: 01jqpfxjqneetv4rn86t68vd6b, Reply query error, msg: Pending previous query completion proxyRequestId: 10 2025-03-31T16:17:55.959497Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=NWQwNTQ5NjMtNjhhNDRkYTEtNTY2M2RhNzgtNWRjN2NjMGI=, ActorId: [7:7488008658787820212:2335], ActorState: ExecuteState, TraceId: 01jqpfxjqneetv4rn86t68vd6b, Reply query error, msg: Pending previous query completion proxyRequestId: 11 2025-03-31T16:17:55.959586Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=NWQwNTQ5NjMtNjhhNDRkYTEtNTY2M2RhNzgtNWRjN2NjMGI=, ActorId: [7:7488008658787820212:2335], ActorState: ExecuteState, TraceId: 01jqpfxjqneetv4rn86t68vd6b, Reply query error, msg: Pending previous query completion proxyRequestId: 12 2025-03-31T16:17:55.960922Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488008658787820247:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:55.960970Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488008658787820219:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:55.961045Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:55.965617Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:17:55.989460Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7488008658787820250:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:17:56.087735Z node 7 :TX_PROXY ERROR: Actor# [7:7488008663082787634:2695] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:57.868409Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008667732185233:2086];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:57.868541Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d35/r3tmp/tmpHcAF1E/pdisk_1.dat 2025-03-31T16:17:58.193743Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22287, node 10 2025-03-31T16:17:58.277420Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:58.277542Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:58.287353Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:17:58.327759Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:58.327783Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:58.327792Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:58.327937Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11764 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:58.638537Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:01.460488Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488008684912055434:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:01.460583Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:01.482745Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:18:01.612593Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488008684912055599:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:01.612747Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:01.613174Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488008684912055604:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:01.618851Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:18:01.646529Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7488008684912055606:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:18:01.742303Z node 10 :TX_PROXY ERROR: Actor# [10:7488008684912055681:2799] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:01.812222Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqpfxr8b0n3zmvsq5t8vwk2v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=MmIxMTM5NDItMjBlMzI2NjYtMWU4MGY0MTktZTlkMzlmNWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:18:01.895801Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jqpfxrf8f640ecebswen3g33, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=MmIxMTM5NDItMjBlMzI2NjYtMWU4MGY0MTktZTlkMzlmNWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:18:01.905328Z node 10 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=3; 2025-03-31T16:18:01.916022Z node 10 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-31T16:18:01.916234Z node 10 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-31T16:18:01.916492Z node 10 :KQP_COMPUTE ERROR: SelfId: [10:7488008684912055746:2335], Table: `Root/Test` ([72057594046644480:2:1]), SessionActorId: [10:7488008684912055431:2335]Got CONSTRAINT VIOLATION for table `Root/Test`. ShardID=72075186224037888, Sink=[10:7488008684912055746:2335].{
: Error: Duplicate keys have been found., code: 2012 } 2025-03-31T16:18:01.917070Z node 10 :KQP_COMPUTE ERROR: SelfId: [10:7488008684912055739:2335], SessionActorId: [10:7488008684912055431:2335], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `Root/Test`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[10:7488008684912055431:2335]. isRollback=0 2025-03-31T16:18:01.917356Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=MmIxMTM5NDItMjBlMzI2NjYtMWU4MGY0MTktZTlkMzlmNWY=, ActorId: [10:7488008684912055431:2335], ActorState: ExecuteState, TraceId: 01jqpfxrf8f640ecebswen3g33, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [10:7488008684912055740:2335] from: [10:7488008684912055739:2335] 2025-03-31T16:18:01.917473Z node 10 :KQP_EXECUTER ERROR: ActorId: [10:7488008684912055740:2335] TxId: 281474976710662. Ctx: { TraceId: 01jqpfxrf8f640ecebswen3g33, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=MmIxMTM5NDItMjBlMzI2NjYtMWU4MGY0MTktZTlkMzlmNWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `Root/Test`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-31T16:18:01.918569Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=MmIxMTM5NDItMjBlMzI2NjYtMWU4MGY0MTktZTlkMzlmNWY=, ActorId: [10:7488008684912055431:2335], ActorState: ExecuteState, TraceId: 01jqpfxrf8f640ecebswen3g33, Create QueryResponse for error on request, msg: >> TestProgram::JsonExistsBinary >> TestProgram::YqlKernelStartsWith >> TestProgram::YqlKernelStartsWithScalar [GOOD] >> TestProgram::JsonExistsBinary [GOOD] >> TestProgram::YqlKernelStartsWith [GOOD] |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsert::Overload [GOOD] Test command err: 2025-03-31T16:17:00.367696Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008420099538840:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:00.371116Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dc6/r3tmp/tmpvfYZj9/pdisk_1.dat 2025-03-31T16:17:00.748662Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:00.793088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:00.793219Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:00.796145Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26114, node 1 2025-03-31T16:17:00.887995Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:00.888022Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:00.888029Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:00.888126Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15439 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:01.210640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:03.405956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 SUCCESS 2025-03-31T16:17:03.587793Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008432984441934:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:03.587854Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008432984441926:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:03.588159Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:03.591950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:17:03.609250Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008432984441940:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:17:03.696656Z node 1 :TX_PROXY ERROR: Actor# [1:7488008432984442019:2823] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:04.537894Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpfvzk2fjcrtz3eq3pg3yfh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTI1Yjc4MDktMzdiMjM1YzEtYzdjMzE2ZC01NmFlYzQ1Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 0 rows 2025-03-31T16:17:04.928037Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpfw0ht9xd3vzg1p2gnsp98, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTI1Yjc4MDktMzdiMjM1YzEtYzdjMzE2ZC01NmFlYzQ1Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 1 rows 2025-03-31T16:17:04.987530Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-03-31T16:17:05.003785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 SUCCESS 2025-03-31T16:17:05.367668Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008420099538840:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:05.367747Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:17:05.408399Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqpfw11pc1defpmgwgrm04ak, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDA0MDA0NWMtZDQxNTBkMy0yZWU5Njk2ZC04YzZmNWE2NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 0 rows 2025-03-31T16:17:05.769327Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqpfw1cdeterjce8wza512sb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDA0MDA0NWMtZDQxNTBkMy0yZWU5Njk2ZC04YzZmNWE2NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 1 rows 2025-03-31T16:17:05.820651Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-03-31T16:17:05.851505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 SUCCESS 2025-03-31T16:17:06.284014Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jqpfw1w05n8saf3hb27ex15c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTk0NTI5MWYtY2FmMmFiYjQtMzIzNDU4ZGItYzAyMGQ3ZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 0 rows 2025-03-31T16:17:06.660072Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jqpfw27qewqjnax5444t17v6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTk0NTI5MWYtY2FmMmFiYjQtMzIzNDU4ZGItYzAyMGQ3ZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 1 rows 2025-03-31T16:17:06.706447Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-03-31T16:17:06.732761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 SUCCESS 2025-03-31T16:17:07.169856Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jqpfw2qtfjnzhyagf2jb4fh2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGI4YTBmNzctZWNhOGNiYjUtMzMyNjdhMDUtNTFjZWFlNDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 0 rows 2025-03-31T16:17:07.517789Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jqpfw33aeg73p9gc46gxmkdk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGI4YTBmNzctZWNhOGNiYjUtMzMyNjdhMDUtNTFjZWFlNDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 1 rows 2025-03-31T16:17:07.562717Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-03-31T16:17:07.592547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 SUCCESS 2025-03-31T16:17:08.043221Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jqpfw3k01a55jw5gj9cdzvdj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTk2MTJlMzUtNTQ0MjM1MTItZDg4OTIyMGYtNmI2ZmMyNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 0 rows 2025-03-31T16:17:08.418419Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jqpfw3ym85g69ns3syatr8q3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTk2MTJlMzUtNTQ0MjM1MTItZDg4OTIyMGYtNmI2ZmMyNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 1 rows 2025-03-31T16:17:08.478875Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2025-03-31T16:17:08.503288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 SUCCESS 2025-03-31T16:17:08.911548Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jqpfw4er57c2get5qwnv2nj2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2UzOTdkMzctNDk3YzA5YjYtN2JmNTRkNjQtZjNiNzJmMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 0 rows 2025-03-31T16:17:09.285581Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jqpfw4sr7dhwhbz3qw6rws80, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2UzOTdkMzctNDk3YzA5YjYtN2JmNTRkNjQtZjNiNzJmMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 1 rows 2025-03-31T16:17:09.324292Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037893 not found 2025-03-31T16:17:09.347652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:0, at schemeshard: 72057594046644480 SUCCESS 2025-03-31T16:17:09.769877Z node 1 :KQP_ ... tablet# 72075186224037890, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-31T16:18:02.322799Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037890, table# 1001, finished edge# 0, front# 0 2025-03-31T16:18:02.324244Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037892, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-31T16:18:02.324274Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037892, table# 1001, finished edge# 0, front# 0 2025-03-31T16:18:02.350584Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037891, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-31T16:18:02.350650Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037891, table# 1001, finished edge# 0, front# 0 2025-03-31T16:18:02.353541Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037890 2025-03-31T16:18:02.353894Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037888 2025-03-31T16:18:02.359871Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037890 2025-03-31T16:18:02.359914Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-31T16:18:02.363743Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-31T16:18:02.363780Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2025-03-31T16:18:02.365652Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2025-03-31T16:18:02.365694Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:18:02.366260Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037890, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-31T16:18:02.366275Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037890, table# 1001, finished edge# 0, front# 0 2025-03-31T16:18:02.370272Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037892, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-31T16:18:02.370313Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037892, table# 1001, finished edge# 0, front# 0 2025-03-31T16:18:02.370883Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037892 2025-03-31T16:18:02.371497Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037890, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-31T16:18:02.371529Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037890, table# 1001, finished edge# 0, front# 0 2025-03-31T16:18:02.373349Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037892 2025-03-31T16:18:02.373404Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-03-31T16:18:02.374440Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037891, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-31T16:18:02.374468Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037891, table# 1001, finished edge# 0, front# 0 2025-03-31T16:18:02.374849Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037891 2025-03-31T16:18:02.376537Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037892, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-31T16:18:02.376567Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037892, table# 1001, finished edge# 0, front# 0 2025-03-31T16:18:02.377162Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037891 2025-03-31T16:18:02.377212Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 .2025-03-31T16:18:02.379471Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-31T16:18:02.379500Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2025-03-31T16:18:02.382291Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037890, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-31T16:18:02.382320Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037890, table# 1001, finished edge# 0, front# 0 2025-03-31T16:18:02.383050Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037891, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-31T16:18:02.383073Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037891, table# 1001, finished edge# 0, front# 0 2025-03-31T16:18:02.387243Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037888 2025-03-31T16:18:02.389666Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2025-03-31T16:18:02.389708Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:18:02.391096Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-31T16:18:02.391129Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2025-03-31T16:18:02.392409Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037892, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-31T16:18:02.392438Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037892, table# 1001, finished edge# 0, front# 0 2025-03-31T16:18:02.392952Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-31T16:18:02.392980Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2025-03-31T16:18:02.393248Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037892 2025-03-31T16:18:02.393553Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037891, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-31T16:18:02.393571Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037891, table# 1001, finished edge# 0, front# 0 2025-03-31T16:18:02.393962Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037891 2025-03-31T16:18:02.395197Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037892 2025-03-31T16:18:02.395247Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-03-31T16:18:02.395674Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037891 2025-03-31T16:18:02.395720Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 .2025-03-31T16:18:02.398485Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037891, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-31T16:18:02.398524Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037891, table# 1001, finished edge# 0, front# 0 2025-03-31T16:18:02.398958Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037892, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-31T16:18:02.398979Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037892, table# 1001, finished edge# 0, front# 0 2025-03-31T16:18:02.411833Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-31T16:18:02.411870Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2025-03-31T16:18:02.411906Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037891, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-31T16:18:02.411925Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037891, table# 1001, finished edge# 0, front# 0 2025-03-31T16:18:02.412110Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037888 2025-03-31T16:18:02.412346Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037891 2025-03-31T16:18:02.413082Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037892, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-31T16:18:02.413112Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037892, table# 1001, finished edge# 0, front# 0 2025-03-31T16:18:02.413551Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037892 2025-03-31T16:18:02.416922Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037891 2025-03-31T16:18:02.416966Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-03-31T16:18:02.416994Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037892 2025-03-31T16:18:02.417019Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-03-31T16:18:02.417079Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2025-03-31T16:18:02.417094Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 .2025-03-31T16:18:02.420813Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037892, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-31T16:18:02.420817Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-31T16:18:02.420835Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2025-03-31T16:18:02.420851Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037892, table# 1001, finished edge# 0, front# 0 2025-03-31T16:18:02.423823Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037891, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-31T16:18:02.423857Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037891, table# 1001, finished edge# 0, front# 0 2025-03-31T16:18:02.441118Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-31T16:18:02.441155Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2025-03-31T16:18:02.442021Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037892, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-31T16:18:02.442053Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037892, table# 1001, finished edge# 0, front# 0 2025-03-31T16:18:02.443476Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037891, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-31T16:18:02.443498Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037891, table# 1001, finished edge# 0, front# 0 >> KqpSystemView::PartitionStatsFollower [GOOD] >> TGRpcNewClient::InMemoryTables [GOOD] |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelStartsWith [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \024StartsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \024StartsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N3(28):{\"i\":\"7,9\",\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:7,9"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N1(9):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N2(9):{\"i\":\"9\",\"p\":{\"address\":{\"name\":\"substring\",\"id\":9}},\"o\":\"9\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N4(28):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N0(4):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"7,9\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[]}],"nodes":{"2":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"7,9","t":"FetchOriginalData"},"w":4,"id":6},"5":{"p":{"i":"15","t":"Projection"},"w":28,"id":5},"4":{"p":{"i":"9","p":{"address":{"name":"substring","id":9}},"o":"9","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"7,9","o":"15","t":"Calculation"},"w":28,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; >> TColumnEngineTestLogs::IndexReadWithPredicates ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelStartsWithScalar [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Bytes: "Lorem" } } } Command { Assign { Column { Id: 16 } Function { Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\024StartsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Bytes: "Lorem" } } } Command { Assign { Column { Id: 16 } Function { Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\024StartsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"Lorem\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(17):{\"i\":\"7,15\",\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:7,15"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"7\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(17):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"7,15","o":"16","t":"Calculation"},"w":17,"id":1},"3":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"string","id":7}]},"o":"7","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":17,"id":4},"0":{"p":{"p":{"v":"Lorem"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonExistsBinary [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\203\021H\214\n\210\203\001H\214\002?6\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?J\203\005@\200\203\005@\202\022\000\003?d6Json2.JsonDocumentSqlExists\202\003?f\000\002\017\003?L\000\003?N\000\003?P\000\003?R\000\027?T?<\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?|\203\005@\200\203\005@\202\022\000\003?\210\"Json2.CompilePath\202\003?\212\000\002\017\003?~\000\003?\200\000\003?\202\000\003?\204\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\203\021H\214\n\210\203\001H\214\002?6\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?J\203\005@\200\203\005@\202\022\000\003?d6Json2.JsonDocumentSqlExists\202\003?f\000\002\017\003?L\000\003?N\000\003?P\000\003?R\000\027?T?<\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?|\203\005@\200\203\005@\202\022\000\003?\210\"Json2.CompilePath\202\003?\212\000\002\017\003?~\000\003?\200\000\003?\202\000\003?\204\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(17):{\"i\":\"6,15\",\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,6"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(17):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"6,15","o":"16","t":"Calculation"},"w":17,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":17,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 010100000000000000000000 ] FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesCorruptedCert [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesCorruptedPrivatekey >> TColumnEngineTestLogs::IndexReadWithPredicates [GOOD] >> TColumnEngineTestLogs::IndexTtl |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> YdbTableBulkUpsert::RetryOperation [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsFollower [GOOD] Test command err: Trying to start YDB, gRPC: 21816, MsgBus: 20747 2025-03-31T16:17:49.503912Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008632104327250:2267];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:49.504125Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0017da/r3tmp/tmpZLaTaQ/pdisk_1.dat 2025-03-31T16:17:49.958423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:49.958561Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:49.964224Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:17:49.990612Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21816, node 1 2025-03-31T16:17:50.002012Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:17:50.008733Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:17:50.059342Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:50.059387Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:50.059395Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:50.059533Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20747 TClient is connected to server localhost:20747 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:50.612233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:51.003290Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7488008632104327466:2194]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:17:51.003345Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:17:51.003439Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:7488008632104327466:2194], Recipient [1:7488008632104327466:2194]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:17:51.003455Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:17:52.004378Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7488008632104327466:2194]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:17:52.004415Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:17:52.004517Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:7488008632104327466:2194], Recipient [1:7488008632104327466:2194]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:17:52.004532Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:17:52.731353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008644989229595:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:52.731483Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:53.004768Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7488008632104327466:2194]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:17:53.004811Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:17:53.004866Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:7488008632104327466:2194], Recipient [1:7488008632104327466:2194]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:17:53.004878Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:17:53.016636Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7488008649284196917:2313], Recipient [1:7488008632104327466:2194]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:17:53.016675Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:17:53.016688Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-31T16:17:53.016726Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7488008649284196913:2310], Recipient [1:7488008632104327466:2194]: {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-31T16:17:53.016759Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-31T16:17:53.074940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Followers" Columns { Name: "Key" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" PartitionConfig { ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } FollowerGroups { FollowerCount: 3 RequireAllDataCenters: false } } Temporary: false } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-31T16:17:53.075462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Followers, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:53.075599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /Root/Followers, opId: 281474976710658:0, schema: Name: "Followers" Columns { Name: "Key" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" PartitionConfig { ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } FollowerGroups { FollowerCount: 3 RequireAllDataCenters: false } } Temporary: false, at schemeshard: 72057594046644480 2025-03-31T16:17:53.076174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: Followers, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-03-31T16:17:53.076211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-03-31T16:17:53.076246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-03-31T16:17:53.076368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new path created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-03-31T16:17:53.076382Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-03-31T16:17:53.076428Z node 1 :SYSTEM_VIEWS TRACE: TEvSysView::TEvSetPartitioning: domainKey [OwnerId: 72057594046644480, LocalPathId: 1] pathId [OwnerId: 72057594046644480, LocalPathId: 2] path /Root/Followers ShardIndices size 1 2025-03-31T16:17:53.077142Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TCreateTable Propose creating new table opId# 281474976710658:0 path# /Root/Followers pathId# [OwnerId: 72057594046644480, LocalPathId: 2] schemeshard# 72057594046644480 tx# WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Followers" Columns { Name: "Key" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" PartitionConfig { ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } FollowerGroups { FollowerCount: 3 RequireAllDataCenters: false } } Temporary: false } FailOnExist: false 2025-03-31T16:17:53.077270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:17:53.077291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:53.077380Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-31T16:17:53.077417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-31T16:17:53.077443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-03-31T16:17:53.077995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusAccepted TxId: 281474976710658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-03-31T16:17:53.078149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Followers 2025-03-31T16:17:53.078171Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T16:17:53.078183Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976710658:0 2025-03-31T16:17:53.078371Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:7488008632104327466:2194] ... e ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { Memory: 82488 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 1 StartTime: 1743437873156 TableOwnerId: 72057594046644480 FollowerId: 0 2025-03-31T16:18:03.190746Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-03-31T16:18:03.190785Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 800 rowCount 4 cpuUsage 0 2025-03-31T16:18:03.190882Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] raw table stats: DataSize: 800 RowCount: 4 IndexSize: 0 InMemSize: 800 LastAccessTime: 1743437873663 LastUpdateTime: 1743437873521 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 4 RowDeletes: 0 RowReads: 1 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-03-31T16:18:03.190918Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.099992s, queue# 1 2025-03-31T16:18:03.207320Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 2146435079, Sender [0:0:0], Recipient [1:7488008649284196981:2339]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-03-31T16:18:03.207356Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvPrivate::TEvPeriodicWakeup 2025-03-31T16:18:03.207482Z node 1 :TX_DATASHARD DEBUG: SendPeriodicTableStats register new pipe at datashard 72075186224037888 FollowerId 3, TableInfos size = 1 2025-03-31T16:18:03.207551Z node 1 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 3, tableId 2 2025-03-31T16:18:03.208034Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7488008692233870262:2530], Recipient [1:7488008632104327466:2194]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:18:03.208060Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:18:03.208074Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-31T16:18:03.210102Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 269877760, Sender [1:7488008692233870261:2412], Recipient [1:7488008649284196981:2339]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046644480 Status: OK ServerId: [1:7488008692233870262:2530] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-03-31T16:18:03.210141Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvTabletPipe::TEvClientConnected 2025-03-31T16:18:03.210851Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [1:7488008649284196981:2339], Recipient [1:7488008632104327466:2194]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 0 TableStats { ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 1 RangeReadRows: 2 LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { } ShardState: 3 NodeId: 1 StartTime: 1743437873195 TableOwnerId: 72057594046644480 FollowerId: 3 2025-03-31T16:18:03.210881Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-03-31T16:18:03.210912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 3 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Readonly' dataSize 0 rowCount 0 cpuUsage 0 2025-03-31T16:18:03.210961Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 3 pathId [OwnerId: 72057594046644480, LocalPathId: 2] raw table stats: ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 1 RangeReadRows: 2 LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-03-31T16:18:03.211345Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 2146435079, Sender [0:0:0], Recipient [1:7488008649284196983:2340]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-03-31T16:18:03.211371Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvPrivate::TEvPeriodicWakeup 2025-03-31T16:18:03.211423Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 2146435079, Sender [0:0:0], Recipient [1:7488008649284196979:2338]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-03-31T16:18:03.211441Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvPrivate::TEvPeriodicWakeup 2025-03-31T16:18:03.291361Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [1:7488008632104327466:2194]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-31T16:18:03.291420Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-31T16:18:03.291443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2025-03-31T16:18:03.291509Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 2 2025-03-31T16:18:03.291532Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 2 2025-03-31T16:18:03.291605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046644480:1 data size 800 row count 4 2025-03-31T16:18:03.291667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], pathId map=Followers, is column=0, is olap=0 2025-03-31T16:18:03.291709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037888 followerId=0, pathId 2: RowCount 4, DataSize 800 2025-03-31T16:18:03.291727Z node 1 :FLAT_TX_SCHEMESHARD TRACE: BuildStatsForCollector: datashardId 72075186224037888, followerId 0 2025-03-31T16:18:03.291786Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:1 with partCount# 0, rowCount# 4, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2025-03-31T16:18:03.291867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046644480:1 data size 0 row count 0 2025-03-31T16:18:03.291886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=3, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], pathId map=Followers, is column=0, is olap=0 2025-03-31T16:18:03.291896Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037888 followerId=3, pathId 2: RowCount 0, DataSize 0 2025-03-31T16:18:03.291904Z node 1 :FLAT_TX_SCHEMESHARD TRACE: BuildStatsForCollector: datashardId 72075186224037888, followerId 3 2025-03-31T16:18:03.291956Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T16:18:03.292067Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [1:7488008632104327466:2194]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-31T16:18:03.292079Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-31T16:18:03.292094Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-03-31T16:18:03.292893Z node 1 :SYSTEM_VIEWS TRACE: TEvSysView::TEvSendPartitionStats: domainKey [OwnerId: 72057594046644480, LocalPathId: 1] pathId [OwnerId: 72057594046644480, LocalPathId: 2] shardIdx 72057594046644480 1 followerId 0 stats DataSize: 800 RowCount: 4 IndexSize: 0 CPUCores: 0 TabletId: 72075186224037888 NodeId: 1 StartTime: 1743437873156 AccessTime: 1743437873663 UpdateTime: 1743437873521 InFlightTxCount: 0 RowUpdates: 4 RowDeletes: 0 RowReads: 1 RangeReads: 0 RangeReadRows: 0 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 ByKeyFilterSize: 0 FollowerId: 0 LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-03-31T16:18:03.293037Z node 1 :SYSTEM_VIEWS TRACE: TEvSysView::TEvSendPartitionStats: domainKey [OwnerId: 72057594046644480, LocalPathId: 1] pathId [OwnerId: 72057594046644480, LocalPathId: 2] shardIdx 72057594046644480 1 followerId 3 stats DataSize: 0 RowCount: 0 IndexSize: 0 CPUCores: 0 TabletId: 72075186224037888 NodeId: 1 StartTime: 1743437873195 AccessTime: 0 UpdateTime: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 1 RangeReadRows: 2 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 ByKeyFilterSize: 0 FollowerId: 3 LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-03-31T16:18:04.023269Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7488008632104327466:2194]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:18:04.023303Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:18:04.023337Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:7488008632104327466:2194], Recipient [1:7488008632104327466:2194]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:18:04.023362Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime ... SELECT from partition_stats, attempt 2 2025-03-31T16:18:04.840536Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7488008696528837585:2424], owner: [1:7488008696528837581:2422], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-03-31T16:18:04.841022Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7488008696528837585:2424], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-31T16:18:04.841225Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274595843, Sender [1:7488008696528837585:2424], Recipient [1:7488008632104327466:2194]: NKikimrSysView.TEvGetPartitionStats DomainKeyOwnerId: 72057594046644480 DomainKeyPathId: 1 From { } FromInclusive: true To { } ToInclusive: false IncludePathColumn: true 2025-03-31T16:18:04.841254Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSysView::TEvSysView::TEvGetPartitionStats 2025-03-31T16:18:04.841399Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7488008696528837585:2424], row count: 2, finished: 1 2025-03-31T16:18:04.841441Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7488008696528837585:2424], owner: [1:7488008696528837581:2422], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-03-31T16:18:04.847499Z node 1 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [1:7488008632104327041:2061], database# /Root, query hash# 14960494650040056739, cpu time# 326340 |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TTableProfileTests::OverwritePartitioningPolicy [GOOD] >> TTableProfileTests::OverwriteStoragePolicy >> TColumnEngineTestLogs::IndexTtl [GOOD] >> TestProgram::YqlKernelEquals >> YdbTableBulkUpsert::AsyncIndexShouldSucceed [GOOD] >> TestProgram::YqlKernelEquals [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexReadWithPredicates [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:144;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=uint64::0;records=1;size=8; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:33376;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37024;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING; ... t=portion_selected;pathId=1;portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:33376;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37024;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37024;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:6;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37072;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:6;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37072;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:7;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37136;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:7;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37136;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:8;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37120;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:8;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37120;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:9;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37160;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:9;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37160;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:10;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37088;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:10;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37088;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:11;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37560;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:11;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37560;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:12;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37488;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:12;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37488;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:13;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:13;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:14;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:14;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:15;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:15;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:16;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:16;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:17;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:17;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:18;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:18;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:19;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:19;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:20;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37592;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:20;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37592;index_size:0;meta:((produced=INSERTED;));); ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcNewClient::InMemoryTables [GOOD] Test command err: 2025-03-31T16:17:38.622956Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008586844264411:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:38.623128Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d3d/r3tmp/tmpZ01VTC/pdisk_1.dat 2025-03-31T16:17:39.137212Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:39.147663Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:39.147905Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:39.154386Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26265, node 1 2025-03-31T16:17:39.255652Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:39.255679Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:39.255686Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:39.255804Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4278 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:39.562889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:43.329332Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008605803607346:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:43.329441Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d3d/r3tmp/tmpSfpQkS/pdisk_1.dat 2025-03-31T16:17:43.637619Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:43.685501Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:43.685563Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:43.688246Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10883, node 4 2025-03-31T16:17:43.783537Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:43.783571Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:43.783607Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:43.783768Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64396 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:44.069977Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:46.853149Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:47.000954Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008618688510445:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:47.000958Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008618688510453:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:47.001052Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:47.005024Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:17:47.023950Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488008618688510459:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:17:47.115975Z node 4 :TX_PROXY ERROR: Actor# [4:7488008622983477836:2793] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:49.019809Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488008631451912698:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:49.024379Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d3d/r3tmp/tmpanRbGc/pdisk_1.dat 2025-03-31T16:17:49.254598Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:49.285051Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:49.285143Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:49.288305Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7147, node 7 2025-03-31T16:17:49.406642Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:49.406672Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:49.406681Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:49.406845Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15558 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:49.697033Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:54.314378Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008653775208251:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:54.314492Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d3d/r3tmp/tmpkfi9Fn/pdisk_1.dat 2025-03-31T16:17:54.510214Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:54.560283Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:54.560386Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:54.569832Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31193, node 10 2025-03-31T16:17:54.718406Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:54.718441Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:54.718453Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:54.718605Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23676 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:55.005547Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:57.657353Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:17:57.768870Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:17:57.831434Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488008666660111435:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:57.831527Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488008666660111446:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:57.831611Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:57.836467Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-03-31T16:17:57.861697Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7488008666660111449:2360], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-03-31T16:17:57.964388Z node 10 :TX_PROXY ERROR: Actor# [10:7488008666660111528:2899] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:58.064849Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqpfxmj5aqgjj9ar35z7n1xs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NDYxYjY1NDEtNTQyM2E2NDQtY2QwN2M1ZGQtMmFhZWI1Y2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:58.175859Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037888 not found 2025-03-31T16:18:00.003894Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7488008674861504127:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:00.004021Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d3d/r3tmp/tmpZHBCcT/pdisk_1.dat 2025-03-31T16:18:00.191989Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29866, node 13 2025-03-31T16:18:00.316201Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:00.316310Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:00.318918Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:18:00.323707Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:00.323731Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:00.323738Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:00.323874Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6016 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:00.640680Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:03.764565Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:18:03.894265Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:18:03.964993Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:144;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=uint64::0;records=1;size=8; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8424;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8424;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8424;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8424;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38360;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8408;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8408;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8408;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8408;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38296;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8408;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8408;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8408;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8408;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38296;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8400;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8400;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8400;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8400;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38264;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8392;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8392;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8392;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8392;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING; ... ALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38360;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38296;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38296;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38264;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:6;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38200;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:7;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:8;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:9;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:10;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38264;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:11;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38360;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:11;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38360;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:12;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38296;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:12;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38296;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:13;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38296;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:13;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38296;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:14;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38264;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:14;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38264;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:15;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38200;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:15;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38200;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:16;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38200;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:16;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38200;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:17;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:17;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:18;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:18;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:19;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:19;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:20;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:20;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;));); ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEquals [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 10 } Arguments { Id: 11 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\020\203B\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\001\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\014Equals?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 10 } Arguments { Id: 11 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\020\203B\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\001\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\014Equals?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N3(28):{\"i\":\"10,11\",\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:10,11"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N1(9):{\"i\":\"10\",\"p\":{\"address\":{\"name\":\"i16\",\"id\":10}},\"o\":\"10\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N2(9):{\"i\":\"11\",\"p\":{\"address\":{\"name\":\"float\",\"id\":11}},\"o\":\"11\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N4(28):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N0(4):{\"p\":{\"data\":[{\"name\":\"i16\",\"id\":10},{\"name\":\"float\",\"id\":11}]},\"o\":\"10,11\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[]}],"nodes":{"2":{"p":{"i":"10","p":{"address":{"name":"i16","id":10}},"o":"10","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"p":{"data":[{"name":"i16","id":10},{"name":"float","id":11}]},"o":"10,11","t":"FetchOriginalData"},"w":4,"id":6},"5":{"p":{"i":"15","t":"Projection"},"w":28,"id":5},"4":{"p":{"i":"11","p":{"address":{"name":"float","id":11}},"o":"11","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"10,11","o":"15","t":"Calculation"},"w":28,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9FloatTypeE; digraph program {N0[shape=box, label="N3(28):{\"i\":\"10,11\",\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:10,11"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N1(9):{\"i\":\"10\",\"p\":{\"address\":{\"name\":\"i16\",\"id\":10}},\"o\":\"10\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N2(9):{\"i\":\"11\",\"p\":{\"address\":{\"name\":\"float\",\"id\":11}},\"o\":\"11\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N4(28):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N0(4):{\"p\":{\"data\":[{\"name\":\"i16\",\"id\":10},{\"name\":\"float\",\"id\":11}]},\"o\":\"10,11\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N6->N2->N4->N0->N5[color=red]; } FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; >> Cdc::KeysOnlyLog[PqRunner] [GOOD] >> Cdc::KeysOnlyLog[YdsRunner] >> TColumnEngineTestLogs::IndexReadWithPredicatesStrPK |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> BasicStatistics::ServerlessGlobalIndex [GOOD] >> Cdc::UuidExchange[PqRunner] [GOOD] >> Cdc::UuidExchange[YdsRunner] >> TColumnEngineTestLogs::IndexWriteLoadRead [GOOD] >> TestScript::StepMerging [GOOD] |88.2%| [TA] $(B)/ydb/core/kqp/ut/sysview/test-results/unittest/{meta.json ... results_accumulator.log} |88.2%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/sysview/test-results/unittest/{meta.json ... results_accumulator.log} >> BasicStatistics::Serverless [GOOD] >> TColumnEngineTestLogs::IndexReadWithPredicatesStrPK [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexWriteLoadRead [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:144;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:144;event=RegisterTable;path_id=2; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=uint64::0;records=1;size=8; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=1072;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=0;snapshot=plan_step=1;tx_id=0;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=0;snapshot=plan_step=1;tx_id=2;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=2;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;));); |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsert::RetryOperation [GOOD] Test command err: 2025-03-31T16:17:36.970746Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008578321469189:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:36.970867Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d40/r3tmp/tmpMYROXQ/pdisk_1.dat 2025-03-31T16:17:37.463818Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:37.468205Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:37.468312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:37.476664Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9546, node 1 2025-03-31T16:17:37.599743Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:37.599768Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:37.599796Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:37.599904Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14855 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:37.920379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:40.220000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 SUCCESS 3 rows in 0.021886s 2025-03-31T16:17:40.749478Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008595501341393:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:40.749630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:40.749940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008595501341405:2440], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:40.754205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:17:40.783823Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008595501341407:2441], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:17:40.851105Z node 1 :TX_PROXY ERROR: Actor# [1:7488008595501341538:4245] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:41.650362Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqpfx3wbb061y5x0cpk3ppjm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmRhOWI0MzctOWE4YmI2YmUtNTRmYzZhMDUtNDg1YTdhMDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 3 rows 2025-03-31T16:17:43.302137Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008605364468168:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:43.303829Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d40/r3tmp/tmp0YeURa/pdisk_1.dat 2025-03-31T16:17:43.491300Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:43.524035Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:43.524118Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:43.530544Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31553, node 4 2025-03-31T16:17:43.685432Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:43.685457Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:43.685466Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:43.685602Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27824 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:43.911595Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:46.409001Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/ui8' Only async-indexed tables are supported by BulkUpsert
: Error: Bulk upsert to table '/Root/ui8/Value_index/indexImplTable' unknown table 2025-03-31T16:17:47.973964Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488008623309773935:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:47.974047Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d40/r3tmp/tmpMtdsj8/pdisk_1.dat 2025-03-31T16:17:48.162940Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10180, node 7 2025-03-31T16:17:48.310424Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:48.310519Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:48.321399Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:17:48.343626Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:48.343666Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:48.343673Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:48.343818Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24653 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:48.615377Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:51.269615Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 1 usec
: Error: Bulk upsert to table '/Root/ui32' longTx ydb://long-tx/read-only timed out, duration: 0 sec 2 usec
: Error: Bulk upsert to table '/Root/ui32' longTx ydb://long-tx/read-only timed out, duration: 0 sec 4 usec
: Error: Bulk upsert to table '/Root/ui32' longTx ydb://long-tx/read-only timed out, duration: 0 sec 8 ... Error: Bulk upsert to table '/Root/ui32' longTx ydb://long-tx/read-only timed out, duration: 0 sec 2048 usec
: Error: Bulk upsert to table '/Root/ui32' longTx ydb://long-tx/read-only timed out, duration: 0 sec 4096 usec
: Error: Bulk upsert to table '/Root/ui32' longTx ydb://long-tx/read-only timed out, duration: 0 sec 8192 usec
: Error: Bulk upsert to table '/Root/ui32' Deadline exceeded 16384 usec
: Error: Bulk upsert to table '/Root/ui32' longTx ydb://long-tx/read-only timed out, duration: 0 sec 32768 usec
: Error: Bulk upsert to table '/Root/ui32' Deadline exceeded 65536 usec 2025-03-31T16:17:53.951922Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008648212322530:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:53.951978Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d40/r3tmp/tmp6JJrZM/pdisk_1.dat 2025-03-31T16:17:54.159375Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11559, node 10 2025-03-31T16:17:54.280654Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:54.280757Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:54.280812Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:54.280823Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:54.280831Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:54.280957Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:17:54.284005Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13334 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:54.588641Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:54.604032Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:17:57.151175Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Injecting ABORTED 10 times Result: ABORTED Injecting ABORTED 6 times Result: ABORTED Injecting ABORTED 5 times Result: SUCCESS Injecting ABORTED 3 times Result: SUCCESS Injecting ABORTED 0 times Result: SUCCESS Injecting OVERLOADED 10 times Result: OVERLOADED Injecting OVERLOADED 6 times Result: OVERLOADED Injecting OVERLOADED 5 times Result: SUCCESS Injecting OVERLOADED 3 times Result: SUCCESS Injecting OVERLOADED 0 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 10 times Result: CLIENT_RESOURCE_EXHAUSTED Injecting CLIENT_RESOURCE_EXHAUSTED 6 times Result: CLIENT_RESOURCE_EXHAUSTED Injecting CLIENT_RESOURCE_EXHAUSTED 5 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 3 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 0 times Result: SUCCESS Injecting UNAVAILABLE 10 times Result: UNAVAILABLE Injecting UNAVAILABLE 6 times Result: UNAVAILABLE Injecting UNAVAILABLE 5 times Result: SUCCESS Injecting UNAVAILABLE 3 times Result: SUCCESS Injecting UNAVAILABLE 0 times Result: SUCCESS Injecting BAD_SESSION 10 times Result: BAD_SESSION Injecting BAD_SESSION 6 times Result: BAD_SESSION Injecting BAD_SESSION 5 times Result: SUCCESS Injecting BAD_SESSION 3 times Result: SUCCESS Injecting BAD_SESSION 0 times Result: SUCCESS Injecting SESSION_BUSY 10 times Result: SESSION_BUSY Injecting SESSION_BUSY 6 times Result: SESSION_BUSY Injecting SESSION_BUSY 5 times Result: SUCCESS Injecting SESSION_BUSY 3 times Result: SUCCESS Injecting SESSION_BUSY 0 times Result: SUCCESS Injecting NOT_FOUND 10 times Result: NOT_FOUND Injecting NOT_FOUND 6 times Result: NOT_FOUND Injecting NOT_FOUND 5 times Result: SUCCESS Injecting NOT_FOUND 3 times Result: SUCCESS Injecting NOT_FOUND 0 times Result: SUCCESS Injecting UNDETERMINED 10 times Result: UNDETERMINED Injecting UNDETERMINED 6 times Result: UNDETERMINED Injecting UNDETERMINED 5 times Result: SUCCESS Injecting UNDETERMINED 3 times Result: SUCCESS Injecting UNDETERMINED 0 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 10 times Result: TRANSPORT_UNAVAILABLE Injecting TRANSPORT_UNAVAILABLE 6 times Result: TRANSPORT_UNAVAILABLE Injecting TRANSPORT_UNAVAILABLE 5 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 3 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 0 times Result: SUCCESS 2025-03-31T16:18:00.010455Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7488008679567286511:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:00.010515Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d40/r3tmp/tmplgh4Vs/pdisk_1.dat 2025-03-31T16:18:00.141480Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:00.182553Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:00.182657Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:00.186398Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18734, node 13 2025-03-31T16:18:00.275765Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:00.275793Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:00.275803Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:00.275956Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10325 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:00.585912Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:03.748385Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Injecting ABORTED 10 times Result: ABORTED Injecting ABORTED 6 times Result: ABORTED Injecting ABORTED 5 times Result: SUCCESS Injecting ABORTED 3 times Result: SUCCESS Injecting ABORTED 0 times Result: SUCCESS Injecting OVERLOADED 10 times Result: OVERLOADED Injecting OVERLOADED 6 times Result: OVERLOADED Injecting OVERLOADED 5 times Result: SUCCESS Injecting OVERLOADED 3 times Result: SUCCESS Injecting OVERLOADED 0 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 10 times Result: CLIENT_RESOURCE_EXHAUSTED Injecting CLIENT_RESOURCE_EXHAUSTED 6 times Result: CLIENT_RESOURCE_EXHAUSTED Injecting CLIENT_RESOURCE_EXHAUSTED 5 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 3 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 0 times Result: SUCCESS Injecting UNAVAILABLE 10 times Result: UNAVAILABLE Injecting UNAVAILABLE 6 times Result: UNAVAILABLE Injecting UNAVAILABLE 5 times Result: SUCCESS Injecting UNAVAILABLE 3 times Result: SUCCESS Injecting UNAVAILABLE 0 times Result: SUCCESS Injecting BAD_SESSION 10 times Result: BAD_SESSION Injecting BAD_SESSION 6 times Result: BAD_SESSION Injecting BAD_SESSION 5 times Result: SUCCESS Injecting BAD_SESSION 3 times Result: SUCCESS Injecting BAD_SESSION 0 times Result: SUCCESS Injecting SESSION_BUSY 10 times Result: SESSION_BUSY Injecting SESSION_BUSY 6 times Result: SESSION_BUSY Injecting SESSION_BUSY 5 times Result: SUCCESS Injecting SESSION_BUSY 3 times Result: SUCCESS Injecting SESSION_BUSY 0 times Result: SUCCESS Injecting NOT_FOUND 10 times Result: NOT_FOUND Injecting NOT_FOUND 6 times Result: NOT_FOUND Injecting NOT_FOUND 5 times Result: SUCCESS Injecting NOT_FOUND 3 times Result: SUCCESS Injecting NOT_FOUND 0 times Result: SUCCESS Injecting UNDETERMINED 10 times Result: UNDETERMINED Injecting UNDETERMINED 6 times Result: UNDETERMINED Injecting UNDETERMINED 5 times Result: SUCCESS Injecting UNDETERMINED 3 times Result: SUCCESS Injecting UNDETERMINED 0 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 10 times Result: TRANSPORT_UNAVAILABLE Injecting TRANSPORT_UNAVAILABLE 6 times Result: TRANSPORT_UNAVAILABLE Injecting TRANSPORT_UNAVAILABLE 5 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 3 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 0 times Result: SUCCESS 2025-03-31T16:18:05.010593Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7488008679567286511:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:05.010671Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeDir [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeBlockStoreVolume [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeColumnStore [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeColumnTable [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeCdcStream |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestScript::StepMerging [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsert::AsyncIndexShouldSucceed [GOOD] Test command err: 2025-03-31T16:17:40.066842Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008592080026028:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:40.066911Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d38/r3tmp/tmpjFY7ks/pdisk_1.dat 2025-03-31T16:17:40.484290Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:40.484389Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:40.488781Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:40.506300Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5386, node 1 2025-03-31T16:17:40.703593Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:40.703636Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:40.703644Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:40.703757Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28796 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:41.142511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:43.125763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008604964928942:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:43.125853Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:43.577941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:43.725398Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008604964929123:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:43.725499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:43.725823Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008604964929128:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:43.728711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:17:43.744599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008604964929130:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:17:43.821723Z node 1 :TX_PROXY ERROR: Actor# [1:7488008604964929207:2813] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:43.881847Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqpfx4c5bams934wc1ntsesz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2U4OWNjMTItZTc1YzYzMmMtZmUxY2I5NS03NjYyNDczMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:43.981927Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jqpfx6z19dnx9ge5s789bd7j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTljOGNjYzQtNTI0YjYwOWEtNzgzMWZjM2MtZTY3NDNjZDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:43.987807Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437864021, txId: 281474976710662] shutting down 2025-03-31T16:17:45.498778Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008615158724054:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:45.499259Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d38/r3tmp/tmpWXUk6b/pdisk_1.dat 2025-03-31T16:17:45.673913Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:45.699327Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:45.699408Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:45.707342Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18843, node 4 2025-03-31T16:17:45.816550Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:45.816576Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:45.816582Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:45.816715Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4438 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:46.076954Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:48.529554Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid Decimal(22,9) value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid Date value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid Datetime value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid Timestamp value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid Interval value CLIENT_INTERNAL_ERROR
: Error: GRpc error: (13): Unable to parse request
: Error: Grpc error response on endpoint localhost:18843 BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid Yson value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid Json value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid JSON for JsonDocument provided: TAPE_ERROR: The JSON document has an improper structure: missing or superfluous commas, braces, missing keys, etc. BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid DyNumber string representation 2025-03-31T16:17:50.592784Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488008636122870904:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:50.593725Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d38/r3tmp/tmp0GHojI/pdisk_1.dat 2025-03-31T16:17:50.794752Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:50.834366Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:50.834459Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:50.837737Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22877, node 7 2025-03-31T16:17:50.955247Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:50.955272Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:50.955279Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:50.955425Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1261 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:51.219979Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:53.939052Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:54.048145Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488008653302741305:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:54.048208Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488008653302741297:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:54.048364Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:54.051943Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:17:54.079694Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7488008653302741311:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:17:54.182899Z node 7 :TX_PROXY ERROR: Actor# [7:7488008653302741388:2806] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:54.447510Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqpfxgvyear5nx1w35a2hxv9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OWE4ZDU0MDUtNDZmMmZiYWEtNjAyNzdjNzItY2M5YWMyMmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:17:55.971530Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008658343942841:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:55.971601Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d38/r3tmp/tmpEkTDG8/pdisk_1.dat 2025-03-31T16:17:56.150997Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:56.160218Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:56.160311Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:56.166986Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12946, node 10 2025-03-31T16:17:56.290651Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:56.290676Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:56.290684Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:56.290813Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14533 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:17:56.674517Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:17:59.157007Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/ui8' Only async-indexed tables are supported by BulkUpsert
: Error: Bulk upsert to table '/Root/ui8/Value_index/indexImplTable' unknown table 2025-03-31T16:18:00.917571Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7488008678645841277:2180];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:00.919029Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d38/r3tmp/tmpFFEXiM/pdisk_1.dat 2025-03-31T16:18:01.022027Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:01.057423Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:01.057503Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:01.061560Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25469, node 13 2025-03-31T16:18:01.117700Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:01.117719Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:01.117724Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:01.117854Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5086 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:01.318568Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:04.088981Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:18:05.252414Z node 13 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill
: Error: Bulk upsert to table '/Root/ui8/Value_index/indexImplTable' unknown table >> TestProgram::NumRowsWithNulls |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexReadWithPredicatesStrPK [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:144;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=uint64::0;records=1;size=8; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:33376;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37024;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING; ... t=portion_selected;pathId=1;portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:33376;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37024;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37024;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:6;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37072;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:6;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37072;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:7;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37136;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:7;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37136;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:8;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37120;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:8;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37120;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:9;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37160;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:9;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37160;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:10;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37088;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:10;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37088;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:11;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37560;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:11;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37560;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:12;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37488;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:12;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37488;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:13;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:13;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:14;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:14;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:15;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:15;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:16;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:16;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:17;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:17;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:18;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:18;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:19;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:19;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:20;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37592;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:20;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37592;index_size:0;meta:((produced=INSERTED;));); |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> YdbOlapStore::LogNonExistingUserId [GOOD] >> TestProgram::NumRowsWithNulls [GOOD] >> YdbOlapStore::LogPagingBefore >> TColumnEngineTestInsertTable::TestInsertCommit [GOOD] |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexWriteLoadReadStrPK [GOOD] |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::ServerlessGlobalIndex [GOOD] Test command err: 2025-03-31T16:15:06.986677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:06.986961Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:06.987035Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d1e/r3tmp/tmp6XM8V6/pdisk_1.dat 2025-03-31T16:15:07.531523Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30717, node 1 2025-03-31T16:15:07.963258Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:07.963322Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:07.963352Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:07.965872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:15:07.966361Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:15:08.119922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:08.120075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:08.140785Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8570 2025-03-31T16:15:08.790334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:15:12.356642Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T16:15:12.396070Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:12.396243Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:12.429754Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:15:12.432296Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:15:12.715367Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:12.715924Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:12.716426Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:12.716536Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:12.716778Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:12.716887Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:12.716947Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:12.717049Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:12.717108Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:12.909007Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:12.909136Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:12.924678Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:15:13.088338Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:13.145967Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T16:15:13.146095Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T16:15:13.179890Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T16:15:13.180595Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T16:15:13.180874Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T16:15:13.180949Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T16:15:13.181001Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T16:15:13.181058Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T16:15:13.181118Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T16:15:13.181177Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T16:15:13.181708Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T16:15:13.209396Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:15:13.209542Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1872:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:15:13.271618Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-03-31T16:15:13.295814Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1908:2617] 2025-03-31T16:15:13.296457Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1908:2617], schemeshard id = 72075186224037897 2025-03-31T16:15:13.298941Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-03-31T16:15:13.330934Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T16:15:13.330993Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T16:15:13.331057Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-03-31T16:15:13.347559Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T16:15:13.355816Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T16:15:13.355972Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T16:15:13.565334Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T16:15:13.793298Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T16:15:13.929313Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:15:14.895401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:15:15.804053Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:16.030920Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-03-31T16:15:16.030989Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-03-31T16:15:16.031133Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2596:2947], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-03-31T16:15:16.032910Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2604:2951] 2025-03-31T16:15:16.033530Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2604:2951], schemeshard id = 72075186224037899 2025-03-31T16:15:17.398780Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2727:3248], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:17.398954Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:17.420915Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-03-31T16:15:17.759450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2957:3293], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:17.759609Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:17.812554Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2962:3297]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:15:17.812876Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T16:15:17.813099Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-03-31T16:15:17.813168Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2965:3300] 2025-03-31T16:15:17.813254Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2965:3300] 2025-03-31T16:15:17.813956Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2966:3135] 2025-03-31T16:15:17.814357Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2965:3300], server id = [2:2966:3135], tablet id = 72075186224037894, status = OK 2025-03-31T16:15:17.814583Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2966:3135], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-03-31T16:15:17.814657Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-03-31T16:15:17.814963Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:15:17.815052Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2962:3297], StatRequests.size() = 1 2025-03-31T16:15:17.824397Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2999:3309]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:15:17.824570Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] Re ... .000000s, at schemeshard: 72075186224037897 2025-03-31T16:17:58.107192Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 25 2025-03-31T16:17:58.126716Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-31T16:17:58.178795Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 118 ], ReplyToActorId[ [2:7855:5566]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:17:58.179118Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 118 ] 2025-03-31T16:17:58.179175Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 118, ReplyToActorId = [2:7855:5566], StatRequests.size() = 1 2025-03-31T16:17:59.524722Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:17:59.524813Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:17:59.524875Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-03-31T16:17:59.524928Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-31T16:17:59.525255Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-03-31T16:17:59.540139Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-31T16:17:59.544590Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7893:5593], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:59.544729Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7903:5598], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:59.545347Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Shared, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:59.558914Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-03-31T16:17:59.622177Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7907:5601], DatabaseId: /Root/Shared, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-03-31T16:17:59.728661Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:8004:5650]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:17:59.729004Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-03-31T16:17:59.729055Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:8004:5650], StatRequests.size() = 1 2025-03-31T16:17:59.819657Z node 2 :TX_PROXY ERROR: Actor# [2:8009:5652] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Shared/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:17:59.865936Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:8038:5667]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:17:59.866236Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-03-31T16:17:59.866532Z node 2 :STATISTICS DEBUG: [72075186224037894] EvRequestStats, node id = 2, schemeshard count = 1, urgent = 0 2025-03-31T16:17:59.866614Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-03-31T16:17:59.866852Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:17:59.866929Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:8038:5667], StatRequests.size() = 1 2025-03-31T16:18:00.002373Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OTFhMmM5Y2UtMjY4MDc2YTMtOWNjMDNjMTYtMzZhOGU0M2U=, TxId: 2025-03-31T16:18:00.002469Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTFhMmM5Y2UtMjY4MDc2YTMtOWNjMDNjMTYtMzZhOGU0M2U=, TxId: 2025-03-31T16:18:00.003249Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:18:00.025157Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-31T16:18:00.025240Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T16:18:00.095766Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-31T16:18:00.095841Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-31T16:18:00.163839Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:3237:3178], schemeshard count = 1 2025-03-31T16:18:00.496166Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037899 2025-03-31T16:18:00.496241Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 231.000000s, at schemeshard: 72075186224037899 2025-03-31T16:18:00.496558Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 50 2025-03-31T16:18:00.513042Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-31T16:18:01.554911Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:8108:5712]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:18:01.555337Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-03-31T16:18:01.555392Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:8108:5712], StatRequests.size() = 1 2025-03-31T16:18:02.991297Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:18:03.002473Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:18:03.002541Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:18:03.002583Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 4] is data table. 2025-03-31T16:18:03.002618Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037899, LocalPathId: 4] 2025-03-31T16:18:03.002954Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-03-31T16:18:03.006056Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-31T16:18:03.023928Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NWMyN2YzNDctNmQ3MjM2YzYtZTYxMzI1NDEtZjlhOTViNzk=, TxId: 2025-03-31T16:18:03.023996Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NWMyN2YzNDctNmQ3MjM2YzYtZTYxMzI1NDEtZjlhOTViNzk=, TxId: 2025-03-31T16:18:03.024550Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:18:03.039473Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 4] 2025-03-31T16:18:03.039536Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T16:18:03.097570Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:8179:5755]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:18:03.097894Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-03-31T16:18:03.097936Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:8179:5755], StatRequests.size() = 1 2025-03-31T16:18:04.673453Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:8231:5786]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:18:04.673724Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-03-31T16:18:04.673767Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:8231:5786], StatRequests.size() = 1 2025-03-31T16:18:05.960331Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2025-03-31T16:18:05.960890Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:18:05.961320Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:18:05.972909Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:18:05.972971Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:18:05.973016Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is data table. 2025-03-31T16:18:05.973049Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037899, LocalPathId: 2] 2025-03-31T16:18:05.973363Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-03-31T16:18:05.976428Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-31T16:18:05.990160Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZmYyYmU5MjEtN2M3ZTZmM2EtMmVjOTg4YzMtNzZhYmVmZGI=, TxId: 2025-03-31T16:18:05.990230Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZmYyYmU5MjEtN2M3ZTZmM2EtMmVjOTg4YzMtNzZhYmVmZGI=, TxId: 2025-03-31T16:18:05.990838Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:18:06.006137Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-03-31T16:18:06.006194Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T16:18:06.091367Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:8297:5824]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:18:06.091716Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-03-31T16:18:06.091760Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:8297:5824], StatRequests.size() = 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestInsertTable::TestInsertCommit [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=uint64::0;records=1;size=8; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=insert_table.cpp:43;event=commit_insertion;path_id=0;blob_range={ Blob: DS:0:[2222:1:1:2:100:1:0] Offset: 0 Size: 0 }; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=152;columns=1; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::NumRowsWithNulls [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 10001 } Function { Id: 7 Arguments { Id: 2 } } } } Command { Filter { Predicate { Id: 10001 } } } Command { GroupBy { Aggregates { Column { Id: 10002 } Function { Id: 2 } } } } Command { Projection { Columns { Id: 10002 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 10001 } Function { Id: 7 Arguments { Id: 2 } } } } Command { Filter { Predicate { Id: 10001 } } } Command { GroupBy { Aggregates { Column { Id: 10002 } Function { Id: 2 } } } } Command { Projection { Columns { Id: 10002 } } } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N2(17):{\"i\":\"2\",\"o\":\"10001\",\"t\":\"Calculation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N1[shape=box, label="N0(2):{\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N2[shape=box, label="N1(7):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"uid\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N1 -> N2[label="1"]; N3[shape=box, label="N3(17):{\"i\":\"10001\",\"t\":\"Filter\"}\nREMOVE:10001",style=filled,color="#FFAAAA"]; N0 -> N3[label="1"]; N4[shape=box, label="N4(10):{\"o\":\"10002\",\"t\":\"Calculation\"}\n"]; N5[shape=box, label="N5(10):{\"i\":\"10002\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N4 -> N5[label="1"]; N1->N2->N0->N3->N4->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[{"from":0}]},{"owner_id":4,"inputs":[]},{"owner_id":5,"inputs":[{"from":4}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"uid","id":2}]},"o":"2","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"i":"10001","t":"Filter"},"w":17,"id":3},"2":{"p":{"i":"2","p":{"address":{"name":"uid","id":2}},"o":"2","t":"AssembleOriginalData"},"w":7,"id":2},"5":{"p":{"i":"10002","t":"Projection"},"w":10,"id":5},"4":{"p":{"o":"10002","t":"Calculation"},"w":10,"id":4},"0":{"p":{"i":"2","o":"10001","t":"Calculation"},"w":17,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10UInt64TypeE; >> TestProgram::JsonValue |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::Serverless [GOOD] Test command err: 2025-03-31T16:15:09.184497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:09.184767Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:09.184843Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d12/r3tmp/tmprRw0MW/pdisk_1.dat 2025-03-31T16:15:09.601895Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1077, node 1 2025-03-31T16:15:09.896640Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:09.896695Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:09.896727Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:09.898593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:15:09.898990Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:15:10.003076Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:10.003268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:10.024674Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17644 2025-03-31T16:15:10.690886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:15:14.333662Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T16:15:14.390105Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:14.390237Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:14.432518Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:15:14.435055Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:15:14.711348Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:14.711962Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:14.712570Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:14.712674Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:14.712891Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:14.712996Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:14.713108Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:14.713191Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:14.713294Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:14.904835Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:14.904964Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:14.918762Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:15:15.087603Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:15.146107Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T16:15:15.146217Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T16:15:15.180838Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T16:15:15.181548Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T16:15:15.181775Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T16:15:15.181860Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T16:15:15.181925Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T16:15:15.181990Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T16:15:15.182073Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T16:15:15.182131Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T16:15:15.182627Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T16:15:15.244959Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:15:15.245065Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1872:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:15:15.252310Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-03-31T16:15:15.263049Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1908:2617] 2025-03-31T16:15:15.263714Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1908:2617], schemeshard id = 72075186224037897 2025-03-31T16:15:15.266153Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-03-31T16:15:15.299295Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T16:15:15.299350Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T16:15:15.299418Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-03-31T16:15:15.315960Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T16:15:15.328411Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T16:15:15.328563Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T16:15:15.536168Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T16:15:15.709837Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T16:15:15.805667Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:15:16.850794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:15:17.809987Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:18.070369Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-03-31T16:15:18.070445Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-03-31T16:15:18.070563Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2596:2948], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-03-31T16:15:18.071233Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2597:2949] 2025-03-31T16:15:18.071973Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2597:2949], schemeshard id = 72075186224037899 2025-03-31T16:15:19.437161Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2723:3245], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:19.437390Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:19.458637Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-03-31T16:15:19.676850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2876:3281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:19.677034Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:19.773211Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2881:3285]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:15:19.773464Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T16:15:19.773629Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-03-31T16:15:19.773703Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2884:3288] 2025-03-31T16:15:19.773772Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2884:3288] 2025-03-31T16:15:19.774554Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2885:3079] 2025-03-31T16:15:19.774864Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2884:3288], server id = [2:2885:3079], tablet id = 72075186224037894, status = OK 2025-03-31T16:15:19.775227Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2885:3079], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-03-31T16:15:19.775316Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-03-31T16:15:19.775668Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:15:19.775848Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2881:3285], StatRequests.size() = 1 2025-03-31T16:15:19.799923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2889:3292], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't ... plyToActorId = [2:7541:5420], StatRequests.size() = 1 2025-03-31T16:17:56.323927Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-03-31T16:17:56.324024Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-31T16:17:56.324072Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-31T16:17:56.324141Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-03-31T16:17:56.663678Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 117 ], ReplyToActorId[ [2:7585:5440]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:17:56.663969Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 117 ] 2025-03-31T16:17:56.664013Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 117, ReplyToActorId = [2:7585:5440], StatRequests.size() = 1 2025-03-31T16:17:58.179372Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-03-31T16:17:58.179616Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:17:58.179984Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:17:58.235500Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037897 2025-03-31T16:17:58.235594Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 180.000000s, at schemeshard: 72075186224037897 2025-03-31T16:17:58.235949Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 25 2025-03-31T16:17:58.253926Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-31T16:17:58.319049Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 118 ], ReplyToActorId[ [2:7626:5463]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:17:58.319424Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 118 ] 2025-03-31T16:17:58.319475Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 118, ReplyToActorId = [2:7626:5463], StatRequests.size() = 1 2025-03-31T16:17:59.681376Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:17:59.681473Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:17:59.681536Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-03-31T16:17:59.681587Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-31T16:17:59.682030Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-03-31T16:17:59.718174Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-31T16:17:59.723044Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7664:5490], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:59.723205Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7674:5495], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:59.723411Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Shared, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:59.748483Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-03-31T16:17:59.826152Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7678:5498], DatabaseId: /Root/Shared, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-03-31T16:17:59.937858Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:7775:5547]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:17:59.938172Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-03-31T16:17:59.938224Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:7775:5547], StatRequests.size() = 1 2025-03-31T16:18:00.038850Z node 2 :TX_PROXY ERROR: Actor# [2:7780:5549] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Shared/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:00.088892Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:7809:5564]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:18:00.089304Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-03-31T16:18:00.089582Z node 2 :STATISTICS DEBUG: [72075186224037894] EvRequestStats, node id = 2, schemeshard count = 1, urgent = 0 2025-03-31T16:18:00.089639Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-03-31T16:18:00.089797Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:18:00.089885Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:7809:5564], StatRequests.size() = 1 2025-03-31T16:18:00.253561Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZjBkNmYwMDYtZGI1NzZkZjAtYzJmZGFhOWEtNTk2YzA5Yjc=, TxId: 2025-03-31T16:18:00.253670Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZjBkNmYwMDYtZGI1NzZkZjAtYzJmZGFhOWEtNTk2YzA5Yjc=, TxId: 2025-03-31T16:18:00.254343Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:18:00.270318Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-31T16:18:00.270406Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T16:18:00.343880Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-31T16:18:00.343983Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-31T16:18:00.410590Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:3068:3114], schemeshard count = 1 2025-03-31T16:18:00.710464Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2025-03-31T16:18:00.710535Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 205.000000s, at schemeshard: 72075186224037899 2025-03-31T16:18:00.710776Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 26 2025-03-31T16:18:00.725305Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-31T16:18:01.795648Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:7877:5607]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:18:01.795959Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-03-31T16:18:01.796001Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:7877:5607], StatRequests.size() = 1 2025-03-31T16:18:03.201042Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:18:03.212365Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:18:03.212442Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:18:03.212497Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is data table. 2025-03-31T16:18:03.212538Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037899, LocalPathId: 2] 2025-03-31T16:18:03.212893Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-03-31T16:18:03.216924Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-31T16:18:03.233931Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZjUyNGY0NzYtZTk1OTRmZWYtZWQ4ZmIyNzQtMWZmNzc1Yg==, TxId: 2025-03-31T16:18:03.234012Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZjUyNGY0NzYtZTk1OTRmZWYtZWQ4ZmIyNzQtMWZmNzc1Yg==, TxId: 2025-03-31T16:18:03.234610Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:18:03.251725Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-03-31T16:18:03.251787Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T16:18:03.310276Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:7948:5650]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:18:03.310524Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-03-31T16:18:03.310559Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:7948:5650], StatRequests.size() = 1 2025-03-31T16:18:04.883726Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:8002:5682]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:18:04.884027Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-03-31T16:18:04.884070Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:8002:5682], StatRequests.size() = 1 2025-03-31T16:18:06.217803Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2025-03-31T16:18:06.218053Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:18:06.218436Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:18:06.229952Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:18:06.230023Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:18:06.305398Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:8042:5704]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:18:06.305688Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-03-31T16:18:06.305737Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:8042:5704], StatRequests.size() = 1 >> TestProgram::Like >> BackupRestore::TestAllSchemeObjectTypes-EPathTypePersQueueGroup [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeKesus >> TestProgram::Like [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexWriteLoadReadStrPK [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:144;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:144;event=RegisterTable;path_id=2; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=uint64::0;records=1;size=8; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=1072;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=0;snapshot=plan_step=1;tx_id=0;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=0;snapshot=plan_step=1;tx_id=2;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=2;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;));); >> TestProgram::JsonValue [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeView [GOOD] >> BackupRestoreS3::RestoreTablePartitioningSettings >> BackupRestore::RestoreTablePartitioningSettings [GOOD] >> BackupRestore::RestoreIndexTablePartitioningSettings |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEndsWithScalar |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::Like [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Bytes: "001" } } } Command { Assign { Column { Id: 16 } Constant { Bytes: "uid" } } } Command { Assign { Column { Id: 17 } Function { Id: 33 Arguments { Id: 7 } Arguments { Id: 16 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Assign { Column { Id: 18 } Function { Id: 34 Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 1 } } } Command { Assign { Column { Id: 19 } Function { Id: 18 Arguments { Id: 17 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 20 } Function { Id: 18 Arguments { Id: 18 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 21 } Function { Id: 11 Arguments { Id: 19 } Arguments { Id: 20 } FunctionType: SIMPLE_ARROW } } } Command { Projection { Columns { Id: 21 } } } Kernels: "O\006\006Arg\022BlockFunc\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\004\203\014?\006\001\235?\004\001\235?\010\001\n\000\t\211\004?\016\235?\000\001\235?\002\000\n\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\004?\020\235?\006\001?$\n\000\t\211\006?$\203\005@?\024?\026\006\000\003?(\024StartsWith?\034? \001\t\211\006?$\203\005@?\024?\026\006\000\003?0\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Bytes: "001" } } } Command { Assign { Column { Id: 16 } Constant { Bytes: "uid" } } } Command { Assign { Column { Id: 17 } Function { Id: 33 Arguments { Id: 7 } Arguments { Id: 16 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Assign { Column { Id: 18 } Function { Id: 34 Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 1 } } } Command { Assign { Column { Id: 19 } Function { Id: 18 Arguments { Id: 17 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 20 } Function { Id: 18 Arguments { Id: 18 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 21 } Function { Id: 11 Arguments { Id: 19 } Arguments { Id: 20 } FunctionType: SIMPLE_ARROW } } } Command { Projection { Columns { Id: 21 } } } Kernels: "O\006\006Arg\022BlockFunc\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\004\203\014?\006\001\235?\004\001\235?\010\001\n\000\t\211\004?\016\235?\000\001\235?\002\000\n\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\004?\020\235?\006\001?$\n\000\t\211\006?$\203\005@?\024?\026\006\000\003?(\024StartsWith?\034? \001\t\211\006?$\203\005@?\024?\026\006\000\003?0\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N5(0):{\"p\":{\"v\":\"001\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N0(0):{\"p\":{\"v\":\"uid\"},\"o\":\"16\",\"t\":\"Const\"}\n"]; N2[shape=box, label="N3(17):{\"i\":\"7,16\",\"o\":\"17\",\"t\":\"Calculation\"}\nREMOVE:16"]; N1 -> N2[label="1"]; N4 -> N2[label="2"]; N3[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"7\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N4[shape=box, label="N2(7):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N3 -> N4[label="1"]; N5[shape=box, label="N6(17):{\"i\":\"7,15\",\"o\":\"18\",\"t\":\"Calculation\"}\nREMOVE:7,15"]; N0 -> N5[label="1"]; N4 -> N5[label="2"]; N6[shape=box, label="N4(27):{\"i\":\"17\",\"o\":\"19\",\"t\":\"Calculation\"}\nREMOVE:17"]; N2 -> N6[label="1"]; N7[shape=box, label="N7(27):{\"i\":\"18\",\"o\":\"20\",\"t\":\"Calculation\"}\nREMOVE:18"]; N5 -> N7[label="1"]; N8[shape=box, label="N8(64):{\"i\":\"19,20\",\"o\":\"21\",\"t\":\"Calculation\"}\nREMOVE:19,20"]; N6 -> N8[label="1"]; N7 -> N8[label="2"]; N9[shape=box, label="N9(64):{\"i\":\"21\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N8 -> N9[label="1"]; N1->N3->N4->N2->N6->N0->N5->N7->N8->N9[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1},{"from":4}]},{"owner_id":3,"inputs":[]},{"owner_id":4,"inputs":[{"from":3}]},{"owner_id":5,"inputs":[{"from":0},{"from":4}]},{"owner_id":6,"inputs":[{"from":2}]},{"owner_id":7,"inputs":[{"from":5}]},{"owner_id":8,"inputs":[{"from":6},{"from":7}]},{"owner_id":9,"inputs":[{"from":8}]}],"nodes":{"1":{"p":{"p":{"v":"uid"},"o":"16","t":"Const"},"w":0,"id":1},"3":{"p":{"p":{"data":[{"name":"string","id":7}]},"o":"7","t":"FetchOriginalData"},"w":2,"id":3},"8":{"p":{"i":"19,20","o":"21","t":"Calculation"},"w":64,"id":8},"2":{"p":{"i":"7,16","o":"17","t":"Calculation"},"w":17,"id":2},"0":{"p":{"p":{"v":"001"},"o":"15","t":"Const"},"w":0,"id":0},"5":{"p":{"i":"7,15","o":"18","t":"Calculation"},"w":17,"id":5},"9":{"p":{"i":"21","t":"Projection"},"w":64,"id":9},"7":{"p":{"i":"18","o":"20","t":"Calculation"},"w":27,"id":7},"4":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":7,"id":4},"6":{"p":{"i":"17","o":"19","t":"Calculation"},"w":27,"id":6}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow11BooleanTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow11BooleanTypeE; |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEndsWithScalar [GOOD] >> TSchemeShardTest::ManyDirs [GOOD] >> TSchemeShardTest::ListNotCreatedDirCase ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonValue [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\"\000\t\211\004?\020\235?\002\001\235?\004\000\"\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\"\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?p6Json2.SqlValueConvertToUtf8\202\003?r\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?d\t\211\014?b\311\002?b\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\214\005\205\004\203\010\203\005@\032\036\003?\222\002\003?\224\000\003\001\003?\216\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\244\203\005@\200\203\005@\202\022\000\003?\260\026Json2.Parse\202\003?\262\000\002\017\003?\246\000\003?\250\000\003?\252\000\003?\254\000?:\036\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\312\203\005@\200\203\005@\202\022\000\003?\326\"Json2.CompilePath\202\003?\330\000\002\017\003?\314\000\003?\316\000\003?\320\000\003?\322\000?2\036\010\000?l\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\370\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\"\000\t\211\004?\020\235?\002\001\235?\004\000\"\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\"\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?p6Json2.SqlValueConvertToUtf8\202\003?r\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?d\t\211\014?b\311\002?b\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\214\005\205\004\203\010\203\005@\032\036\003?\222\002\003?\224\000\003\001\003?\216\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\244\203\005@\200\203\005@\202\022\000\003?\260\026Json2.Parse\202\003?\262\000\002\017\003?\246\000\003?\250\000\003?\252\000\003?\254\000?:\036\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\312\203\005@\200\203\005@\202\022\000\003?\326\"Json2.CompilePath\202\003?\330\000\002\017\003?\314\000\003?\316\000\003?\320\000\003?\322\000?2\036\010\000?l\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\370\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(17):{\"i\":\"5,15\",\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(17):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"5,15","o":"16","t":"Calculation"},"w":17,"id":1},"3":{"p":{"i":"5","p":{"address":{"name":"json_string","id":5}},"o":"5","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"json_string","id":5}]},"o":"5","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":17,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] Check output for Utf8 FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\014?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r$Json2.SqlValueBool\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\014?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r$Json2.SqlValueBool\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(17):{\"i\":\"5,15\",\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape= ... 003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000\t\211\004?6\203\005@?F\030Invoke\000\003?\374\016Convert?\372\001\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203B\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?6 VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r(Json2.SqlValueNumber\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000\t\211\004?6\203\005@?F\030Invoke\000\003?\374\016Convert?\372\001\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(17):{\"i\":\"5,15\",\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(17):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"5,15","o":"16","t":"Calculation"},"w":17,"id":1},"3":{"p":{"i":"5","p":{"address":{"name":"json_string","id":5}},"o":"5","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"json_string","id":5}]},"o":"5","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":17,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] Check output for Float FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r(Json2.SqlValueNumber\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r(Json2.SqlValueNumber\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(17):{\"i\":\"5,15\",\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(17):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"5,15","o":"16","t":"Calculation"},"w":17,"id":1},"3":{"p":{"i":"5","p":{"address":{"name":"json_string","id":5}},"o":"5","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"json_string","id":5}]},"o":"5","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":17,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] Check output for Double FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10DoubleTypeE; |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::CountUIDByVAT ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEndsWithScalar [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Bytes: "amet." } } } Command { Assign { Column { Id: 16 } Function { Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Bytes: "amet." } } } Command { Assign { Column { Id: 16 } Function { Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"amet.\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(17):{\"i\":\"7,15\",\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:7,15"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"7\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(17):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"7,15","o":"16","t":"Calculation"},"w":17,"id":1},"3":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"string","id":7}]},"o":"7","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":17,"id":4},"0":{"p":{"p":{"v":"amet."},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::CountUIDByVAT [GOOD] >> TestProgram::JsonExists >> YdbTableBulkUpsertOlap::UpsertArrowBatch_DataShard [GOOD] |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesExpiredCert [GOOD] >> TestProgram::JsonExists [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeCdcStream [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeBlobDepot [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeExternalDataSource [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::CountUIDByVAT [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } KeyColumns { Id: 4 } } } Command { Projection { Columns { Id: 10001 } Columns { Id: 4 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } KeyColumns { Id: 4 } } } Command { Projection { Columns { Id: 10001 } Columns { Id: 4 } } } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N3(18):{\"i\":\"2,4\",\"o\":\"10001\",\"t\":\"Aggregation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"uid\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N1(9):{\"i\":\"4\",\"p\":{\"address\":{\"name\":\"vat\",\"id\":4}},\"o\":\"4\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N4(27):{\"i\":\"10001,4\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N4 -> N5[label="1"]; N0 -> N5[label="2"]; N6[shape=box, label="N0(4):{\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2},{\"name\":\"vat\",\"id\":4}]},\"o\":\"2,4\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N6->N4->N2->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":4},{"from":0}]},{"owner_id":6,"inputs":[]}],"nodes":{"2":{"p":{"i":"2","p":{"address":{"name":"uid","id":2}},"o":"2","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"p":{"data":[{"name":"uid","id":2},{"name":"vat","id":4}]},"o":"2,4","t":"FetchOriginalData"},"w":4,"id":6},"5":{"p":{"i":"10001,4","t":"Projection"},"w":27,"id":5},"4":{"p":{"i":"4","p":{"address":{"name":"vat","id":4}},"o":"4","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"2,4","o":"10001","t":"Aggregation"},"w":18,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTable [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeSubDomain [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeSolomonVolume [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTableIndex >> TSchemeShardTest::ListNotCreatedDirCase [GOOD] >> TSchemeShardTest::ListNotCreatedIndexCase ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonExists [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\214\002\214\n\210\203\001H?>?6\016\000\203\004\203\005@\203\004\203\004\207\214\002\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?H\203\005@\200\203\005@\202\022\000\003?d\036Json2.SqlExists\202\003?f\000\002\017\003?J\000\003?L\000\003?N\000\003?P\000\027?T\t\211\014?R\311\002?R\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\200\005\205\004\203\010\203\005@\032\036\003?\206\002\003?\210\000\003\001\003?\202\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\230\203\005@\200\203\005@\202\022\000\003?\244\026Json2.Parse\202\003?\246\000\002\017\003?\232\000\003?\234\000\003?\236\000\003?\240\000?<\036\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\276\203\005@\200\203\005@\202\022\000\003?\312\"Json2.CompilePath\202\003?\314\000\002\017\003?\300\000\003?\302\000\003?\304\000\003?\306\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\214\002\214\n\210\203\001H?>?6\016\000\203\004\203\005@\203\004\203\004\207\214\002\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?H\203\005@\200\203\005@\202\022\000\003?d\036Json2.SqlExists\202\003?f\000\002\017\003?J\000\003?L\000\003?N\000\003?P\000\027?T\t\211\014?R\311\002?R\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\200\005\205\004\203\010\203\005@\032\036\003?\206\002\003?\210\000\003\001\003?\202\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\230\203\005@\200\203\005@\202\022\000\003?\244\026Json2.Parse\202\003?\246\000\002\017\003?\232\000\003?\234\000\003?\236\000\003?\240\000?<\036\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\276\203\005@\200\203\005@\202\022\000\003?\312\"Json2.CompilePath\202\003?\314\000\002\017\003?\300\000\003?\302\000\003?\304\000\003?\306\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(17):{\"i\":\"5,15\",\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(17):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"5,15","o":"16","t":"Calculation"},"w":17,"id":1},"3":{"p":{"i":"5","p":{"address":{"name":"json_string","id":5}},"o":"5","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"json_string","id":5}]},"o":"5","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":17,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "[]" ] FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; >> Cdc::DocApi[PqRunner] [GOOD] >> Cdc::DocApi[YdsRunner] >> Cdc::KeysOnlyLog[YdsRunner] [GOOD] >> Cdc::KeysOnlyLog[TopicRunner] >> TestProgram::JsonValueBinary >> Cdc::UuidExchange[YdsRunner] [GOOD] >> Cdc::UuidExchange[TopicRunner] >> YdbYqlClient::QueryStats [GOOD] >> YdbYqlClient::RenameTables >> BasicStatistics::TwoServerlessDbs [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesExpiredCert [GOOD] Test command err: 2025-03-31T16:17:03.183022Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008436462077757:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:03.183106Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dba/r3tmp/tmp0C3SKQ/pdisk_1.dat 2025-03-31T16:17:03.522421Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:03.574625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:03.574699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:03.578573Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14578, node 1 2025-03-31T16:17:03.662769Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:03.662792Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:03.662799Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:03.662924Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15035 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:03.946151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:04.033005Z node 1 :TICKET_PARSER DEBUG: Ticket F37934BA3016CAFC70F9F7FE778F8AD33DCE23B8444143626BE87E6FB0884A7D (ipv6:[::1]:49450) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-03-31T16:17:04.107343Z node 1 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:49464) has now valid token of root@builtin 2025-03-31T16:17:04.178225Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-03-31T16:17:04.178278Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:17:04.178289Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:17:04.178322Z node 1 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-03-31T16:17:07.244101Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008451834890900:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:07.244457Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dba/r3tmp/tmprH7O0D/pdisk_1.dat 2025-03-31T16:17:07.407911Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:07.449801Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:07.449903Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:07.457245Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19818, node 4 2025-03-31T16:17:07.604184Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:07.604212Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:07.604218Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:07.604346Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17524 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:07.883387Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:07.971266Z node 4 :TICKET_PARSER DEBUG: Ticket F37934BA3016CAFC70F9F7FE778F8AD33DCE23B8444143626BE87E6FB0884A7D (ipv6:[::1]:45818) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-03-31T16:17:08.050211Z node 4 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:45826) has now valid token of root@builtin 2025-03-31T16:17:08.129609Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-03-31T16:17:08.129648Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:17:08.129658Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:17:08.129738Z node 4 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-03-31T16:17:12.004612Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488008473687919497:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:12.004689Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dba/r3tmp/tmpuA1qqO/pdisk_1.dat 2025-03-31T16:17:12.134285Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:12.154690Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:12.154755Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:12.159184Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14654, node 7 2025-03-31T16:17:12.230903Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:12.230926Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:12.230932Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:12.231072Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11204 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:12.553180Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:12.646358Z node 7 :TICKET_PARSER DEBUG: Ticket 23888C85E7F39E236BE9EE8DD2B49C9351A7142EA68DB3F679BEC9C38ADCF1FC (ipv6:[::1]:45952) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-03-31T16:17:12.728104Z node 7 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:45972) has now valid token of root@builtin 2025-03-31T16:17:12.802966Z node 7 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-03-31T16:17:12.803002Z node 7 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:17:12.803013Z node 7 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:17:12.803051Z node 7 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-03-31T16:17:16.603076Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488008489139676819:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:16.603147Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dba/r3tmp/tmpRKza8m/pdisk_1.dat 2025-03-31T16:17:16.778266Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:16.814825Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:16.815026Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnect ... nt_certificate:certificate verify failed. E0331 16:17:39.903944978 386892 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0331 16:17:39.925682050 387522 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. 2025-03-31T16:17:44.948650Z node 25 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[25:7488008610018094470:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:44.948731Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dba/r3tmp/tmpziaeTJ/pdisk_1.dat 2025-03-31T16:17:45.128356Z node 25 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:45.164506Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:45.164619Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:45.172341Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19732, node 25 2025-03-31T16:17:45.268101Z node 25 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:45.268133Z node 25 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:45.268141Z node 25 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:45.268306Z node 25 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27904 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:45.636661Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:49.949451Z node 25 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[25:7488008610018094470:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:49.949533Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; E0331 16:17:55.739776002 390190 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0331 16:17:55.764283657 390362 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0331 16:17:55.802664614 390363 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0331 16:17:55.824029148 390190 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0331 16:17:55.868644255 390192 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0331 16:17:55.889008410 390363 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0331 16:17:55.928379470 390192 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0331 16:17:55.956998275 390192 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0331 16:17:56.003393961 390190 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0331 16:17:56.024602910 390362 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0331 16:17:56.064137798 390192 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0331 16:17:56.089613780 394850 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. 2025-03-31T16:17:57.657427Z node 28 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[28:7488008664360690364:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:57.657524Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dba/r3tmp/tmp5s5ilq/pdisk_1.dat 2025-03-31T16:17:57.978769Z node 28 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:58.033401Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:58.033523Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:58.038835Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15307, node 28 2025-03-31T16:17:58.144313Z node 28 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:58.144345Z node 28 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:58.144355Z node 28 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:58.144566Z node 28 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14626 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:58.596747Z node 28 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:02.657791Z node 28 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[28:7488008664360690364:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:02.657886Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; E0331 16:18:08.700105913 400109 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0331 16:18:08.721216084 400109 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0331 16:18:08.757962613 395904 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0331 16:18:08.778097641 395904 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0331 16:18:08.820823759 400164 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0331 16:18:08.847746960 396201 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0331 16:18:08.886632497 400164 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0331 16:18:08.908656901 400165 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0331 16:18:08.943428976 400214 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0331 16:18:08.968447225 400213 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0331 16:18:09.002879757 396201 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0331 16:18:09.024209612 396201 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsertOlap::UpsertArrowBatch_DataShard [GOOD] Test command err: 2025-03-31T16:17:31.297878Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008555551744576:2232];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:31.297974Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d45/r3tmp/tmplNjPhh/pdisk_1.dat 2025-03-31T16:17:31.792053Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:31.801565Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:31.801657Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:31.807143Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12934, node 1 2025-03-31T16:17:31.943132Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:31.943199Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:31.943214Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:31.943333Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31649 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:32.282835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:31649 2025-03-31T16:17:32.549074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:32.663966Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037891;self_id=[1:7488008559846712795:2322];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:17:32.692771Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037891;self_id=[1:7488008559846712795:2322];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:17:32.692993Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037891 2025-03-31T16:17:32.701784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488008559846712795:2322];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:17:32.702123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488008559846712795:2322];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:17:32.702398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488008559846712795:2322];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:17:32.702523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488008559846712795:2322];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:17:32.702640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488008559846712795:2322];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:17:32.702714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488008559846712795:2322];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:17:32.702806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488008559846712795:2322];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:17:32.702925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488008559846712795:2322];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:17:32.703522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488008559846712795:2322];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:17:32.703755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488008559846712795:2322];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:17:32.703897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488008559846712795:2322];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:17:32.704012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488008559846712795:2322];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:17:32.707227Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037890;self_id=[1:7488008559846712787:2320];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:17:32.751481Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037890;self_id=[1:7488008559846712787:2320];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:17:32.751637Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037890 2025-03-31T16:17:32.758610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488008559846712787:2320];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:17:32.758763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488008559846712787:2320];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:17:32.759015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488008559846712787:2320];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:17:32.759163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488008559846712787:2320];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:17:32.759278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488008559846712787:2320];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:17:32.759387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488008559846712787:2320];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:17:32.759511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488008559846712787:2320];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:17:32.759648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488008559846712787:2320];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:17:32.759779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488008559846712787:2320];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:17:32.759921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488008559846712787:2320];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:17:32.760035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488008559846712787:2320];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:17:32.760150Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488008559846712787:2320];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:17:32.763592Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:7488008559846712790:2321];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:17:32.788733Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:7488008559846712790:2321];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:17:32.788882Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2025-03-31T16:17:32.795883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488008559846712790:2321];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:17:32.795955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488008559846712790:2321];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:17:32.796176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488008559846712790:2321];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:17:32.796303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id ... ready operation [1743437887933:281474976715658] in PlanQueue unit at 72075186224037888 2025-03-31T16:18:07.913260Z node 13 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1743437887933:281474976715658 keys extracted: 0 2025-03-31T16:18:07.913389Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:18:07.913482Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:18:07.913548Z node 13 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:18:07.914067Z node 13 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:18:07.914461Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:18:07.916631Z node 13 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1743437887932 2025-03-31T16:18:07.916668Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:18:07.920319Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1743437887933} 2025-03-31T16:18:07.920429Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:18:07.920706Z node 13 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1743437887940 2025-03-31T16:18:07.922647Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:18:07.922687Z node 13 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:18:07.922738Z node 13 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:18:07.922804Z node 13 :TX_DATASHARD DEBUG: Complete [1743437887933 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [13:7488008697830202173:2199], exec latency: 0 ms, propose latency: 9 ms 2025-03-31T16:18:07.922857Z node 13 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-03-31T16:18:07.922911Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:18:07.926223Z node 13 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-03-31T16:18:07.926351Z node 13 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8984;columns=10; 2025-03-31T16:18:07.946105Z node 13 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [13:7488008710715104926:2752], serverId# [13:7488008710715104927:2753], sessionId# [0:0:0] 2025-03-31T16:18:07.946227Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037888 2025-03-31T16:18:07.954003Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2025-03-31T16:18:07.954061Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 SUCCESS Upsert done: 0.026719s 2025-03-31T16:18:07.968364Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7488008710715104943:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:07.968471Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7488008710715104935:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:07.968587Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:07.972432Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:18:07.977590Z node 13 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:18:07.989785Z node 13 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:18:07.993500Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7488008710715104949:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:18:08.092767Z node 13 :TX_PROXY ERROR: Actor# [13:7488008715010072326:2823] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:08.203266Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:18:08.203404Z node 13 :TX_DATASHARD DEBUG: Prepared Snapshot transaction txId 281474976715661 at tablet 72075186224037888 2025-03-31T16:18:08.206201Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:18:08.209204Z node 13 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715661 at step 1743437888255 at tablet 72075186224037888 { Transactions { TxId: 281474976715661 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743437888255 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:18:08.209231Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:18:08.209348Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:18:08.209364Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:18:08.209383Z node 13 :TX_DATASHARD DEBUG: Found ready operation [1743437888255:281474976715661] in PlanQueue unit at 72075186224037888 2025-03-31T16:18:08.209528Z node 13 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1743437888255:281474976715661 keys extracted: 0 2025-03-31T16:18:08.209782Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:18:08.211402Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1743437888255} 2025-03-31T16:18:08.211469Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:18:08.211513Z node 13 :TX_DATASHARD DEBUG: Complete [1743437888255 : 281474976715661] from 72075186224037888 at tablet 72075186224037888 send result to client [13:7488008715010072353:2837], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:18:08.211537Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:18:08.213938Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpfxyev7w9p0zrdh2sc7228, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NDE3N2FkMDYtZTYzM2RmNmItN2JlNTMxODEtYmFjZjQzYTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:18:08.216528Z node 13 :TX_DATASHARD INFO: Start scan, at: [13:7488008715010072381:2131], tablet: [13:7488008710715104818:2339], scanId: 4, table: /Root/LogsX, gen: 1, deadline: 2025-03-31T16:28:08.216258Z 2025-03-31T16:18:08.216664Z node 13 :TX_DATASHARD DEBUG: Got ScanDataAck, at: [13:7488008715010072381:2131], scanId: 4, table: /Root/LogsX, gen: 1, tablet: [13:7488008710715104818:2339], freeSpace: 8388608;limits:(bytes=0;chunks=0); 2025-03-31T16:18:08.216678Z node 13 :TX_DATASHARD DEBUG: Wakeup driver at: [13:7488008715010072381:2131] 2025-03-31T16:18:08.218072Z node 13 :TX_DATASHARD DEBUG: Range 0 of 1 exhausted: try next one. table: /Root/LogsX range: [(Utf8 : NULL, Timestamp : NULL) ; ()) next range: 2025-03-31T16:18:08.218106Z node 13 :TX_DATASHARD DEBUG: TableRanges is over, at: [13:7488008715010072381:2131], scanId: 4, table: /Root/LogsX 2025-03-31T16:18:08.218142Z node 13 :TX_DATASHARD DEBUG: Finish scan, at: [13:7488008715010072381:2131], scanId: 4, table: /Root/LogsX, reason: 0, abortEvent: 2025-03-31T16:18:08.218180Z node 13 :TX_DATASHARD DEBUG: Send ScanData, from: [13:7488008715010072381:2131], to: [13:7488008715010072377:2359], scanId: 4, table: /Root/LogsX, bytes: 11000, rows: 100, page faults: 0, finished: 1, pageFault: 0 2025-03-31T16:18:08.218296Z node 13 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-03-31T16:18:08.218392Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:18:08.218411Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:18:08.218450Z node 13 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:18:08.218500Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:18:08.230143Z node 13 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437888255, txId: 281474976715661] shutting down 2025-03-31T16:18:08.762836Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqpfxyqs8t17jf4ry149neft, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NTFlNmJjZjQtZGI1YWYzOGUtY2Y0MzVjM2QtYWE5MGE0NTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 100 rows Negative (wrong format): BAD_REQUEST Negative (wrong data): SCHEME_ERROR FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8016;columns=9; 2025-03-31T16:18:08.805678Z node 13 :ARROW_HELPER ERROR: fline=arrow_helpers.cpp:142;event=cannot_parse;message=Invalid: Ran out of field metadata, likely malformed;schema_columns_count=10;schema_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; Negative (less columns): BAD_REQUEST FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8984;columns=10; 2025-03-31T16:18:08.821091Z node 13 :ARROW_HELPER ERROR: fline=arrow_helpers.cpp:142;event=cannot_parse;message=Serialization error: batch is not valid: Invalid: Offsets buffer size (bytes): 400 isn't large enough for length: 100;schema_columns_count=10;schema_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; Negative (reordered columns): BAD_REQUEST 2025-03-31T16:18:08.840732Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7488008693535234539:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:08.840816Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TestProgram::JsonValueBinary [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeExternalDataSource [GOOD] Test command err: 2025-03-31T16:18:03.569190Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008691154113806:2270];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:03.569473Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001051/r3tmp/tmpxTlk2H/pdisk_1.dat 2025-03-31T16:18:04.051585Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:04.066406Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:04.066491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:04.073237Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17221, node 1 2025-03-31T16:18:04.251132Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:04.251154Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:04.251161Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:04.251293Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62930 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:04.682356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:06.651116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008704039016520:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:06.651220Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:07.160562Z node 1 :TX_PROXY DEBUG: actor# [1:7488008691154113832:2141] Handle TEvProposeTransaction 2025-03-31T16:18:07.160625Z node 1 :TX_PROXY DEBUG: actor# [1:7488008691154113832:2141] TxId# 281474976710658 ProcessProposeTransaction 2025-03-31T16:18:07.160671Z node 1 :TX_PROXY DEBUG: actor# [1:7488008691154113832:2141] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7488008708333983840:2635] 2025-03-31T16:18:07.272959Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008708333983840:2635] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Value" Type: "Utf8" NotNull: false } KeyColumnNames: "Key" PartitionConfig { } Temporary: false } } } UserToken: "" DatabaseName: "" 2025-03-31T16:18:07.273015Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008708333983840:2635] txid# 281474976710658 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:18:07.273393Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008708333983840:2635] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-31T16:18:07.273474Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008708333983840:2635] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:18:07.273655Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008708333983840:2635] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:18:07.273817Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008708333983840:2635] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:18:07.273863Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008708333983840:2635] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-31T16:18:07.274013Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008708333983840:2635] txid# 281474976710658 HANDLE EvClientConnected 2025-03-31T16:18:07.275750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:18:07.277615Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008708333983840:2635] txid# 281474976710658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710658} 2025-03-31T16:18:07.277672Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008708333983840:2635] txid# 281474976710658 SEND to# [1:7488008708333983839:2343] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 53} 2025-03-31T16:18:07.447389Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008708333984000:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:07.447445Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:07.523108Z node 1 :TX_PROXY DEBUG: actor# [1:7488008691154113832:2141] Handle TEvProposeTransaction 2025-03-31T16:18:07.523138Z node 1 :TX_PROXY DEBUG: actor# [1:7488008691154113832:2141] TxId# 281474976710659 ProcessProposeTransaction 2025-03-31T16:18:07.523182Z node 1 :TX_PROXY DEBUG: actor# [1:7488008691154113832:2141] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7488008708333984014:2758] 2025-03-31T16:18:07.525578Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008708333984014:2758] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateCdcStream CreateCdcStream { TableName: "table" StreamDescription { Name: "a" Mode: ECdcStreamModeUpdate Format: ECdcStreamFormatJson VirtualTimestamps: false AwsRegion: "" } } } } UserToken: "" DatabaseName: "" PeerName: "" 2025-03-31T16:18:07.525637Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008708333984014:2758] txid# 281474976710659 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:18:07.525698Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008708333984014:2758] txid# 281474976710659 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:18:07.526006Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008708333984014:2758] txid# 281474976710659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:18:07.526133Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008708333984014:2758] HANDLE EvNavigateKeySetResult, txid# 281474976710659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:18:07.526200Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008708333984014:2758] txid# 281474976710659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710659 TabletId# 72057594046644480} 2025-03-31T16:18:07.526354Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008708333984014:2758] txid# 281474976710659 HANDLE EvClientConnected 2025-03-31T16:18:07.530317Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008708333984014:2758] txid# 281474976710659 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-03-31T16:18:07.530370Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008708333984014:2758] txid# 281474976710659 SEND to# [1:7488008708333984013:2355] Source {TEvProposeTransactionStatus txid# 281474976710659 Status# 53} 2025-03-31T16:18:07.626870Z node 1 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037888:1][1:7488008708333984207:2364] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:4:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-03-31T16:18:07.653272Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008708333984306:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:07.653367Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:07.661575Z node 1 :TX_PROXY DEBUG: actor# [1:7488008691154113832:2141] Handle TEvProposeTransaction 2025-03-31T16:18:07.661609Z node 1 :TX_PROXY DEBUG: actor# [1:7488008691154113832:2141] TxId# 281474976710660 ProcessProposeTransaction 2025-03-31T16:18:07.661653Z node 1 :TX_PROXY DEBUG: actor# [1:7488008691154113832:2141] Cookie# 0 userReqId# "" txid# 281474976710660 SEND to# [1:7488008708333984318:2976] 2025-03-31T16:18:07.663522Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008708333984318:2976] txid# 281474976710660 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateCdcStream CreateCdcStream { TableName: "table" StreamDescription { Name: "b" Mode: ECdcStreamModeUpdate Format: ECdcStreamFormatJson VirtualTimestamps: false AwsRegion: "" } } } } UserToken: "" DatabaseName: "" PeerName: "" 2025-03-31T16:18:07.663585Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008708333984318:2976] txid# 281474976710660 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:18:07.663628Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008708333984318:2976] txid# 281474976710660 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:18:07.663877Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008708333984318:2976] txid# 281474976710660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:18:07.663945Z node 1 :TX_PROXY DEBUG: ... TImport::TTxProgress: DoComplete 2025-03-31T16:18:09.185976Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-03-31T16:18:09.185989Z node 1 :IMPORT DEBUG: TImport::TTxProgress: OnAllocateResult: txId# 281474976715766, id# 281474976710664 2025-03-31T16:18:09.186043Z node 1 :IMPORT INFO: TImport::TTxProgress: CreateChangefeed propose: info# { Id: 281474976710664 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/table' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 11] State: CreateChangefeed SubState: Proposed WaitTxId: 0 Issue: '' }, txId# 281474976715766 2025-03-31T16:18:09.186137Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-03-31T16:18:09.190173Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-03-31T16:18:09.190203Z node 1 :IMPORT DEBUG: TImport::TTxProgress: OnModifyResult: txId# 281474976715766, status# StatusAccepted 2025-03-31T16:18:09.190328Z node 1 :IMPORT INFO: TImport::TTxProgress: Wait for completion: info# { Id: 281474976710664 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/table' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 11] State: CreateChangefeed SubState: Subscribed WaitTxId: 281474976715766 Issue: '' } 2025-03-31T16:18:09.194107Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-03-31T16:18:09.239552Z node 1 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037896:1][1:7488008716923921128:2488] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:17:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-03-31T16:18:09.251804Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-03-31T16:18:09.251835Z node 1 :IMPORT DEBUG: TImport::TTxProgress: OnNotifyResult: txId# 281474976715766 2025-03-31T16:18:09.251911Z node 1 :IMPORT INFO: TImport::TTxProgress: Allocate txId: info# { Id: 281474976710664 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/table' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 11] State: CreateChangefeed SubState: AllocateTxId WaitTxId: 0 Issue: '' } 2025-03-31T16:18:09.253091Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-03-31T16:18:09.253173Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-03-31T16:18:09.253187Z node 1 :IMPORT DEBUG: TImport::TTxProgress: OnAllocateResult: txId# 281474976715767, id# 281474976710664 2025-03-31T16:18:09.253229Z node 1 :IMPORT INFO: TImport::TTxProgress: CreateConsumers propose: info# { Id: 281474976710664 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/table' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 11] State: CreateChangefeed SubState: Proposed WaitTxId: 0 Issue: '' }, txId# 281474976715767 2025-03-31T16:18:09.253568Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-03-31T16:18:09.254123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715767:0, at schemeshard: 72057594046644480 2025-03-31T16:18:09.255753Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-03-31T16:18:09.255775Z node 1 :IMPORT DEBUG: TImport::TTxProgress: OnModifyResult: txId# 281474976715767, status# StatusAccepted 2025-03-31T16:18:09.255847Z node 1 :IMPORT INFO: TImport::TTxProgress: Wait for completion: info# { Id: 281474976710664 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/table' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 11] State: CreateChangefeed SubState: Subscribed WaitTxId: 281474976715767 Issue: '' } 2025-03-31T16:18:09.257396Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-03-31T16:18:09.281060Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-03-31T16:18:09.281093Z node 1 :IMPORT DEBUG: TImport::TTxProgress: OnNotifyResult: txId# 281474976715767 2025-03-31T16:18:09.283018Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-03-31T16:18:09.303684Z node 1 :TX_PROXY DEBUG: [GetImport] [1:7488008716923921304:2501] [0] Resolve database: name# /Root 2025-03-31T16:18:09.304050Z node 1 :TX_PROXY DEBUG: [GetImport] [1:7488008716923921304:2501] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T16:18:09.304091Z node 1 :TX_PROXY DEBUG: [GetImport] [1:7488008716923921304:2501] [0] Send request: schemeShardId# 72057594046644480 2025-03-31T16:18:09.304674Z node 1 :TX_PROXY DEBUG: [GetImport] [1:7488008716923921304:2501] [0] Handle TEvImport::TEvGetImportResponse: record# Entry { Id: 281474976710664 Status: SUCCESS Progress: PROGRESS_DONE ImportFromS3Settings { endpoint: "localhost:17220" scheme: HTTP bucket: "test_bucket" items { source_prefix: "table" destination_path: "/Root/table" } } StartTime { seconds: 1743437888 } EndTime { seconds: 1743437889 } } 2025-03-31T16:18:09.320563Z node 1 :TX_PROXY DEBUG: actor# [1:7488008691154113832:2141] Handle TEvNavigate describe path /Root/table 2025-03-31T16:18:09.320617Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008716923921310:4913] HANDLE EvNavigateScheme /Root/table 2025-03-31T16:18:09.320770Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008716923921310:4913] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T16:18:09.320835Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008716923921310:4913] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/table" Options { ShowPrivateTable: false } 2025-03-31T16:18:09.321642Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008716923921310:4913] Handle TEvDescribeSchemeResult Forward to# [1:7488008716923921308:2502] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/table" PathDescription { Self { Name: "table" PathId: 11 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715760 CreateStep: 1743437888976 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 4 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "table" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } } TableSchemaVersion: 4 IsBackup: false CdcStreams { Name: "a" Mode: ECdcStreamModeUpdate PathId { OwnerId: 72057594046644480 LocalId: 14 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatJson VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } CdcStreams { Name: "b" Mode: ECdcStreamModeUpdate PathId { OwnerId: 72057594046644480 LocalId: 16 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatJson VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } CdcStreams { Name: "c" Mode: ECdcStreamModeUpdate PathId { OwnerId: 72057594046644480 LocalId: 12 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatJson VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 9 PathsLimit: 10000 ShardsInside: 8 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 11 PathOwnerId: 72057594046644480 >> TSchemeShardTest::ListNotCreatedIndexCase [GOOD] >> TSchemeShardTest::FindSubDomainPathId ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonValueBinary [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\032\000\t\211\004?\020\235?\002\001\235?\004\000\032\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\032\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?nNJson2.JsonDocumentSqlValueConvertToUtf8\202\003?p\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?b?:\t\211\014?d\211\002?d\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\206\203\005@\200\203\005@\202\022\000\003?\222\"Json2.CompilePath\202\003?\224\000\002\017\003?\210\000\003?\212\000\003?\214\000\003?\216\000?2\036\010\000?j\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\264\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\032\000\t\211\004?\020\235?\002\001\235?\004\000\032\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\032\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?nNJson2.JsonDocumentSqlValueConvertToUtf8\202\003?p\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?b?:\t\211\014?d\211\002?d\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\206\203\005@\200\203\005@\202\022\000\003?\222\"Json2.CompilePath\202\003?\224\000\002\017\003?\210\000\003?\212\000\003?\214\000\003?\216\000?2\036\010\000?j\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\264\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(17):{\"i\":\"6,15\",\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,6"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(17):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"6,15","o":"16","t":"Calculation"},"w":17,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":17,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 7B226B6579223A31307D, 7B226B6579223A302E317D, 7B226B6579223A66616C73657D, 7B22616E6F74686572223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 0102000021000000140000008403000001000000800300006B6579000000000000002440, 0102000021000000140000008403000001000000800300006B6579009A9999999999B93F, 0102000021000000140000000000000001000000800300006B657900, 01020000210000001400000003030000020000008004000040050000616E6F746865720076616C756500, 010100000000000000000000 ] Check output for Utf8 FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\014?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(17):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"6,15","o":"16","t":"Calculation"},"w":17,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":17,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T ... 21H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p@Json2.JsonDocumentSqlValueNumber\202\003?r\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000\t\211\004?6\203\005@?F\030Invoke\000\003?\270\016Convert?\266\001\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203B\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?6 VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p@Json2.JsonDocumentSqlValueNumber\202\003?r\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000\t\211\004?6\203\005@?F\030Invoke\000\003?\270\016Convert?\266\001\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(17):{\"i\":\"6,15\",\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,6"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(17):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"6,15","o":"16","t":"Calculation"},"w":17,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":17,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 7B226B6579223A31307D, 7B226B6579223A302E317D, 7B226B6579223A66616C73657D, 7B22616E6F74686572223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 0102000021000000140000008403000001000000800300006B6579000000000000002440, 0102000021000000140000008403000001000000800300006B6579009A9999999999B93F, 0102000021000000140000000000000001000000800300006B657900, 01020000210000001400000003030000020000008004000040050000616E6F746865720076616C756500, 010100000000000000000000 ] Check output for Float FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p@Json2.JsonDocumentSqlValueNumber\202\003?r\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\266\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p@Json2.JsonDocumentSqlValueNumber\202\003?r\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\266\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(17):{\"i\":\"6,15\",\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,6"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(17):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"6,15","o":"16","t":"Calculation"},"w":17,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":17,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 7B226B6579223A31307D, 7B226B6579223A302E317D, 7B226B6579223A66616C73657D, 7B22616E6F74686572223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 0102000021000000140000008403000001000000800300006B6579000000000000002440, 0102000021000000140000008403000001000000800300006B6579009A9999999999B93F, 0102000021000000140000000000000001000000800300006B657900, 01020000210000001400000003030000020000008004000040050000616E6F746865720076616C756500, 010100000000000000000000 ] Check output for Double FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10DoubleTypeE; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoServerlessDbs [GOOD] Test command err: 2025-03-31T16:15:12.315947Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:12.316194Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:12.316267Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d04/r3tmp/tmpam71Nr/pdisk_1.dat 2025-03-31T16:15:12.734282Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13123, node 1 2025-03-31T16:15:13.002401Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:13.002457Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:13.002500Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:13.004874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:15:13.005301Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:15:13.103997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:13.104198Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:13.121622Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4511 2025-03-31T16:15:13.726422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:15:17.410520Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T16:15:17.455569Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:17.455723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:17.487847Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:15:17.490393Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:15:17.769476Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:17.770124Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:17.770755Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:17.770895Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:17.771159Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:17.771289Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:17.771414Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:17.771496Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:17.771599Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:17.944495Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:17.944619Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:17.962604Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:15:18.122345Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:18.187057Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T16:15:18.187243Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T16:15:18.221576Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T16:15:18.222328Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T16:15:18.222550Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T16:15:18.222628Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T16:15:18.222802Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T16:15:18.222897Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T16:15:18.222959Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T16:15:18.223022Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T16:15:18.223561Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T16:15:18.248654Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:15:18.248834Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1872:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:15:18.258738Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-03-31T16:15:18.267498Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1908:2617] 2025-03-31T16:15:18.268262Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1908:2617], schemeshard id = 72075186224037897 2025-03-31T16:15:18.271112Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-03-31T16:15:18.297632Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T16:15:18.297691Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T16:15:18.297755Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-03-31T16:15:18.316095Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T16:15:18.324879Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T16:15:18.325045Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T16:15:18.518725Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T16:15:18.680739Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T16:15:18.814551Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:15:19.749719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:15:20.667013Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:20.904924Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-03-31T16:15:20.904995Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-03-31T16:15:20.905111Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2596:2948], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-03-31T16:15:20.906568Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2598:2950] 2025-03-31T16:15:20.906717Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2598:2950], schemeshard id = 72075186224037899 2025-03-31T16:15:22.028033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:15:22.631976Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:22.892144Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037905 2025-03-31T16:15:22.892225Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037905 2025-03-31T16:15:22.892381Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:3081:3154], at schemeshard: 72075186224037905, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037905 2025-03-31T16:15:22.893683Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:3082:3155] 2025-03-31T16:15:22.894188Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:3082:3155], schemeshard id = 72075186224037905 2025-03-31T16:15:23.968600Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3204:3409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:23.968775Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:23.987614Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72075186224037899 2025-03-31T16:15:24.190065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3354:3444], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:24.190429Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:24.192097Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3359:3448]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:15:24.192432Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T16:15:24.192687Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-03-31T16:15:24.192769Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:3362:3451] 2025-03-31T16:15:24.192844Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:33 ... 25-03-31T16:18:03.385578Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-03-31T16:18:03.385613Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-31T16:18:03.386081Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-03-31T16:18:03.407693Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-31T16:18:03.412962Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:8940:6405], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:03.413106Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:8950:6410], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:03.413255Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Shared, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:03.430482Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-03-31T16:18:03.507841Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:8954:6413], DatabaseId: /Root/Shared, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-03-31T16:18:03.707132Z node 2 :TX_PROXY ERROR: Actor# [2:9052:6461] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Shared/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:03.762438Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 118 ], ReplyToActorId[ [2:9081:6476]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:18:03.762783Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 118 ] 2025-03-31T16:18:03.763220Z node 2 :STATISTICS DEBUG: [72075186224037894] EvRequestStats, node id = 2, schemeshard count = 1, urgent = 0 2025-03-31T16:18:03.763294Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-03-31T16:18:03.763444Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:18:03.763530Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 118, ReplyToActorId = [2:9081:6476], StatRequests.size() = 1 2025-03-31T16:18:03.909690Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MjI4YzhiZmItNzQ1MmMzNjktZTEwYWVlYzAtMjkxZTc2ZWQ=, TxId: 2025-03-31T16:18:03.909771Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MjI4YzhiZmItNzQ1MmMzNjktZTEwYWVlYzAtMjkxZTc2ZWQ=, TxId: 2025-03-31T16:18:03.910391Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:18:03.928614Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-31T16:18:03.928709Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T16:18:03.985246Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-31T16:18:03.985324Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-31T16:18:04.056632Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:3728:3401], schemeshard count = 1 2025-03-31T16:18:04.375523Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2025-03-31T16:18:04.375584Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 203.000000s, at schemeshard: 72075186224037899 2025-03-31T16:18:04.375836Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 26 2025-03-31T16:18:04.395783Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-31T16:18:04.613770Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:9119:6501]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:18:04.614030Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-03-31T16:18:04.614062Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:9119:6501], StatRequests.size() = 1 2025-03-31T16:18:06.039337Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:9170:6533]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:18:06.039738Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-03-31T16:18:06.039784Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:9170:6533], StatRequests.size() = 1 2025-03-31T16:18:06.525842Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037905 2025-03-31T16:18:06.525921Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 229.000000s, at schemeshard: 72075186224037905 2025-03-31T16:18:06.526201Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037905, stats size# 26 2025-03-31T16:18:06.540508Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-31T16:18:06.796790Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:18:06.808125Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:18:06.808212Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:18:06.808250Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is data table. 2025-03-31T16:18:06.808283Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037899, LocalPathId: 2] 2025-03-31T16:18:06.808836Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-03-31T16:18:06.811416Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-31T16:18:06.824190Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NjU1YzZhMTAtNWI4YmI2YzktM2I0YTE4YjctMTNmMmQ1NWI=, TxId: 2025-03-31T16:18:06.824251Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NjU1YzZhMTAtNWI4YmI2YzktM2I0YTE4YjctMTNmMmQ1NWI=, TxId: 2025-03-31T16:18:06.824718Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:18:06.839228Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-03-31T16:18:06.839282Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T16:18:07.498943Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:9250:6586]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:18:07.499377Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-03-31T16:18:07.499429Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:9250:6586], StatRequests.size() = 1 2025-03-31T16:18:09.057505Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:9304:6618]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:18:09.057911Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-03-31T16:18:09.057969Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:9304:6618], StatRequests.size() = 1 2025-03-31T16:18:09.815193Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 3 2025-03-31T16:18:09.815836Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:18:09.816297Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:18:09.830801Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:18:09.830861Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:18:09.830894Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037905, LocalPathId: 2] is data table. 2025-03-31T16:18:09.830927Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037905, LocalPathId: 2] 2025-03-31T16:18:09.831459Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-03-31T16:18:09.834179Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-31T16:18:09.848440Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MjgxODQxMjMtYzlhM2VlNTAtZTM5NWE4ZjMtNzViOTdkOTI=, TxId: 2025-03-31T16:18:09.848509Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MjgxODQxMjMtYzlhM2VlNTAtZTM5NWE4ZjMtNzViOTdkOTI=, TxId: 2025-03-31T16:18:09.849087Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:18:09.866915Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037905, LocalPathId: 2] 2025-03-31T16:18:09.866987Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T16:18:10.536848Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:9381:6667]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:18:10.537305Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-03-31T16:18:10.537360Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:9381:6667], StatRequests.size() = 1 2025-03-31T16:18:10.538299Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:9383:6669]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:18:10.543365Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-03-31T16:18:10.543440Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:9383:6669], StatRequests.size() = 1 >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeKesus [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExtSubDomain [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeFileStore [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeReplication |88.4%| [TA] $(B)/ydb/core/tx/columnshard/engines/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.4%| [TA] {RESULT} $(B)/ydb/core/tx/columnshard/engines/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTest::FindSubDomainPathId [GOOD] >> TSchemeShardTest::FindSubDomainPathIdActor >> KqpEffects::InsertAbort_Params_Duplicates+UseSink >> KqpInplaceUpdate::SingleRowPgNotNull+UseSink >> KqpImmediateEffects::DeleteAfterUpsert >> KqpEffects::InsertRevert_Literal_Duplicates >> KqpInplaceUpdate::SingleRowSimple+UseSink >> KqpImmediateEffects::UpdateOn >> KqpInplaceUpdate::Negative_SingleRowWithValueCast+UseSink >> KqpImmediateEffects::WriteThenReadWithCommit >> KqpEffects::InsertAbort_Params_Conflict+UseSink >> KqpEffects::InsertAbort_Literal_Success >> KqpImmediateEffects::Insert >> KqpEffects::InsertAbort_Literal_Conflict+UseSink >> KqpImmediateEffects::ReplaceDuplicates >> KqpImmediateEffects::ImmediateUpdate >> KqpImmediateEffects::ConflictingKeyRW1WR2 >> KqpImmediateEffects::MultiShardUpsertAfterRead >> KqpImmediateEffects::ConflictingKeyR1WR2 >> KqpInplaceUpdate::SingleRowIf+UseSink >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd+UseSink >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeCdcStream [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeBlobDepot [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeBackupCollection [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeInvalid [GOOD] >> TSchemeShardTest::FindSubDomainPathIdActor [GOOD] >> TSchemeShardTest::FindSubDomainPathIdActorAsync >> KqpImmediateEffects::ConflictingKeyRW1RR2 >> BackupRestoreS3::RestoreTablePartitioningSettings [GOOD] >> BackupRestoreS3::RestoreIndexTablePartitioningSettings ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/py3test >> ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change 2025-03-31 16:17:56,743 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-31 16:17:57,372 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 208704 549M 527M 530M ydb-tests-olap-ttl_tiering --basetemp /home/runner/.ya/build/build_root/0l5c/001a0c/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modu 210593 5.0G 5.0G 4.5G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/0l5c/001a0c/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_de 213737 360M 344M 350M └─ moto_server s3 --port 5077 Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/ttl_tiering/ttl_delete_s3.py", line 117, in test_data_unchanged_after_ttl_change self.ydb_client.query(""" File "ydb/tests/olap/common/ydb_client.py", line 24, in query return self.session_pool.execute_with_retries(statement) File "contrib/python/ydb/py3/ydb/query/pool.py", line 202, in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) File "contrib/python/ydb/py3/ydb/retries.py", line 133, in retry_operation_sync for next_opt in opt_generator: File "contrib/python/ydb/py3/ydb/retries.py", line 94, in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) File "contrib/python/ydb/py3/ydb/query/pool.py", line 200, in wrapped_callee return [result_set for result_set in it] File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ return self._next() File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next res = self.wrapper(next(self.it)) File "contrib/python/grpcio/py3/grpc/_channel.py", line 475, in __next__ return self._next() File "contrib/python/grpcio/py3/grpc/_channel.py", line 872, in _next _common.wait(self._state.condition.wait, _response_ready) File "contrib/python/grpcio/py3/grpc/_common.py", line 150, in wait _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) File "contrib/python/grpcio/py3/grpc/_common.py", line 112, in _wait_once wait_fn(timeout=timeout) File "contrib/tools/python3/Lib/threading.py", line 359, in wait gotit = waiter.acquire(True, timeout) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...nner/.ya/build/build_root/0l5c/001a0c/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/0l5c/001a0c/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/0l5c/001a0c', '--source-root', '/home/runner/.ya/build/build_root/0l5c/001a0c/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/0l5c/001a0c/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'ttl_delete_s3.py']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...nner/.ya/build/build_root/0l5c/001a0c/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/0l5c/001a0c/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/0l5c/001a0c', '--source-root', '/home/runner/.ya/build/build_root/0l5c/001a0c/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/0l5c/001a0c/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'ttl_delete_s3.py']' stopped by 600 seconds timeout",), {}) >> BackupRestore::RestoreIndexTablePartitioningSettings [GOOD] >> BackupRestore::RestoreTableSplitBoundaries ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestore::TestAllIndexTypes-EIndexTypeInvalid [GOOD] Test command err: 2025-03-31T16:18:03.516000Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008693259466651:2207];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:03.516129Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpmNSpHX/pdisk_1.dat 2025-03-31T16:18:03.988742Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:03.988845Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:03.993823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:18:04.040666Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1027, node 1 2025-03-31T16:18:04.072962Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:18:04.083291Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:18:04.232573Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:04.232597Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:04.232604Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:04.232760Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22615 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:04.681563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... Backup "/Root" to "/home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpuMx6T9/"Create temporary directory "/Root/~backup_20250331T161804" in databaseProcess "/home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpuMx6T9/dir"Create directory "/Root/~backup_20250331T161804/dir" in databaseWrite ACL into "/home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpuMx6T9/dir/permissions.pb"Remove directory "/Root/~backup_20250331T161804/dir"2025-03-31T16:18:04.910489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710661:0, at schemeshard: 72057594046644480 Remove temporary directory "/Root/~backup_20250331T161804" in database2025-03-31T16:18:04.942693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710662:0, at schemeshard: 72057594046644480 Backup completed successfully2025-03-31T16:18:04.973402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710663:0, at schemeshard: 72057594046644480 Restore "/home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpuMx6T9/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpuMx6T9/" to "/Root"Process "/home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpuMx6T9/dir"Restore empty directory "/home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpuMx6T9/dir" to "/Root/dir"Restore ACL "/home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpuMx6T9/dir" to "/Root/dir"Read ACL from "/home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpuMx6T9/dir/permissions.pb"2025-03-31T16:18:05.055322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 Restore completed successfully 2025-03-31T16:18:08.007032Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008711623359454:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:08.007137Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpfZakZw/pdisk_1.dat 2025-03-31T16:18:08.180073Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:08.217800Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:08.217885Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:08.236884Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30508, node 4 2025-03-31T16:18:08.294394Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:08.294419Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:08.294429Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:08.294564Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15443 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:08.566059Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:11.283115Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008724508262376:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:11.283222Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:11.461837Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:18:11.613328Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008724508262558:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:11.613403Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:11.744701Z node 4 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037888:1][4:7488008724508262745:2359] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:4:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-03-31T16:18:11.791747Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008724508262850:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:11.791854Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:11.838809Z node 4 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037888:1][4:7488008724508263022:2379] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:6:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-03-31T16:18:11.856991Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008724508263126:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:11.857043Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:11.914117Z node 4 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037888:1][4:7488008724508263306:2400] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:8:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } GetChangefeedAndTopicDescriptions: Backup "/Root" to "/home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpzDlWCT/"Create temporary directory "/Root/~backup_20250331T161811" in databaseProcess "/home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpzDlWCT/table"Copy tables: { src: "/Root/table", dst: "/Root/~backup_20250331T161811/table" }Backup table "/Root/~backup_20250331T161811/table" to "/home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpzDlWCT/table"Describe table "/Root/~backup_20250331T161811/table"Write scheme into "/home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpzDlWCT/table/scheme.pb"Describe table "/Root/table"Process "/home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpzDlWCT/table/a"Write changefeed into "/home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpzDlWCT/table/a/changefeed_description.pb"Write topic into "/home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpzDlWCT/table/a/topic_description.pb"Process "/home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpzDlWCT/table/b"Write changefeed into "/home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpzDlWCT/table/b/changefeed_description.pb"Write topic into "/home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpzDlWCT/table/b/topic_description.pb"Process "/home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpzDlWCT/table/c"Write changefeed into "/home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpzDlWCT/table/c/changefeed_description.pb"Write topic into "/home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpzDlWCT/table/c/topic_description.pb"Write ACL into "/home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpzDlWCT/table/permissions.pb"Read table "/Root/~backup_20250331T161811/table"Write data into "/home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpzDlWCT/table/data_00.csv"Drop table "/Root/~backup_20250331T161811/table"Remove temporary directory "/Root/~backup_20250331T161811" in database2025-03-31T16:18:12.209219Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037895 not found 2025-03-31T16:18:12.218315Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710667:0, at schemeshard: 72057594046644480 Backup completed successfully2025-03-31T16:18:12.234747Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008728803231141:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:12.234856Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:12.247388Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710668:2, at schemeshard: 72057594046644480 2025-03-31T16:18:12.268218Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037889 not found 2025-03-31T16:18:12.268263Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037890 not found 2025-03-31T16:18:12.268899Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037893 not found 2025-03-31T16:18:12.271381Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037891 not found 2025-03-31T16:18:12.271598Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037892 not found 2025-03-31T16:18:12.271639Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037894 not found 2025-03-31T16:18:12.281859Z node 4 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,5) wasn't found 2025-03-31T16:18:12.281931Z node 4 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2025-03-31T16:18:12.281995Z node 4 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,4) wasn't found 2025-03-31T16:18:12.282030Z node 4 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,6) wasn't found 2025-03-31T16:18:12.282077Z node 4 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found 2025-03-31T16:18:12.282122Z node 4 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,7) wasn't found 2025-03-31T16:18:12.306593Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037888 not found Restore "/home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpzDlWCT/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpzDlWCT/" to "/Root"Process "/home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpzDlWCT/table"Read scheme from "/home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpzDlWCT/table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpzDlWCT/table" to "/Root/table"2025-03-31T16:18:12.341808Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpzDlWCT/table/data_00.csv"Process "/home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpzDlWCT/table/c"Read changefeed from "/home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpzDlWCT/table/c/changefeed_description.pb"Read topic from "/home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpzDlWCT/table/c/topic_description.pb"2025-03-31T16:18:12.446917Z node 4 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037896:1][4:7488008728803231727:2480] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:13:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } Created "/home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpzDlWCT/table/c"Process "/home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpzDlWCT/table/a"Read changefeed from "/home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpzDlWCT/table/a/changefeed_description.pb"Read topic from "/home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpzDlWCT/table/a/topic_description.pb"2025-03-31T16:18:12.497024Z node 4 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037896:1][4:7488008728803231974:2494] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:15:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } Created "/home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpzDlWCT/table/a"Process "/home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpzDlWCT/table/b"Read changefeed from "/home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpzDlWCT/table/b/changefeed_description.pb"Read topic from "/home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpzDlWCT/table/b/topic_description.pb"2025-03-31T16:18:12.549590Z node 4 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037896:1][4:7488008728803232237:2510] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:17:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } Created "/home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpzDlWCT/table/b"Restore ACL "/home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpzDlWCT/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/0l5c/00107e/r3tmp/tmpzDlWCT/table/permissions.pb"2025-03-31T16:18:12.568907Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710673:0, at schemeshard: 72057594046644480 Restore completed successfully >> Cdc::UuidExchange[TopicRunner] [GOOD] >> Cdc::UpdatesLog[PqRunner] >> Cdc::KeysOnlyLog[TopicRunner] [GOOD] >> Cdc::KeysOnlyLogDebezium >> YdbLogStore::LogStore [GOOD] >> YdbLogStore::LogStoreNegative >> TTableProfileTests::OverwriteStoragePolicy [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTableIndex [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeSequence >> TSchemeShardTest::FindSubDomainPathIdActorAsync [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesCorruptedPrivatekey [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientDoesNotProvideClientCerts >> YdbYqlClient::RenameTables [GOOD] |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TTableProfileTests::OverwriteStoragePolicy [GOOD] Test command err: 2025-03-31T16:17:36.543356Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008575547290888:2079];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:36.546121Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d41/r3tmp/tmprYrVFJ/pdisk_1.dat 2025-03-31T16:17:36.964144Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:36.972117Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:36.972224Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:36.981602Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28064, node 1 2025-03-31T16:17:37.143715Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:37.143737Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:37.143747Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:37.143847Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64810 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:37.435647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:64810 2025-03-31T16:17:37.736063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:37.759366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:38.295900Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488008586456779638:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:38.296263Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:17:38.299414Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:38.299548Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:38.311898Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-03-31T16:17:38.338897Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64810 2025-03-31T16:17:38.885921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:64810 TClient::Ls request: /Root/table-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1743437859030 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-03-31T16:17:39.425305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:64810 TClient::Ls request: /Root/table-2 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-2" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710661 CreateStep: 1743437859513 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-03-31T16:17:39.762758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:64810 TClient::Ls request: /Root/table-3 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-3" PathId: 5 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710662 CreateStep: 1743437859884 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-3" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-03-31T16:17:40.086567Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-03-31T16:17:40.087113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-31T16:17:42.943363Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008603127964777:2141];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:42.943423Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d41/r3tmp/tmp9ar6v7/pdisk_1.dat 2025-03-31T16:17:43.168939Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11507, node 4 2025-03-31T16:17:43.255450Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:43.255554Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:43.260376Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:17:43.290039Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:43.290062Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:43.290070Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:43.290209Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12843 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:43.593090Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:12843 2025-03-31T16:17:43.887623Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:43.908053Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo ... existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d41/r3tmp/tmpMBI91w/pdisk_1.dat 2025-03-31T16:18:07.059353Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:07.114717Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:07.114846Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:07.119188Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20557, node 13 2025-03-31T16:18:07.212064Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:07.212098Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:07.212134Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:07.212341Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19222 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:07.704948Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:19222 2025-03-31T16:18:08.223830Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:08.259551Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:08.768010Z node 15 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[15:7488008714311697010:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:08.768135Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:18:08.797570Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:08.797692Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:08.804854Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2025-03-31T16:18:08.808303Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19222 2025-03-31T16:18:09.253553Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:19222 TClient::Ls request: /Root/ydb_ut_tenant/table-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1743437889670 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-03-31T16:18:10.339890Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:19222 TClient::Ls request: /Root/ydb_ut_tenant/table-2 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-2" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710661 CreateStep: 1743437890700 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-03-31T16:18:11.218546Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:11.873282Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7488008704851182571:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:11.873380Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:19222 TClient::Ls request: /Root/ydb_ut_tenant/table-3 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-3" PathId: 5 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710662 CreateStep: 1743437891630 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-3" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-03-31T16:18:12.100944Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:19222 TClient::Ls request: /Root/ydb_ut_tenant/table-4 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-4" PathId: 6 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710663 CreateStep: 1743437892460 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-4" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-03-31T16:18:12.910575Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:13.768353Z node 15 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[15:7488008714311697010:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:13.768444Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:19222 TClient::Ls request: /Root/ydb_ut_tenant/table-5 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-5" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710664 CreateStep: 1743437893330 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-5" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-03-31T16:18:14.218329Z node 13 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 15 2025-03-31T16:18:14.262488Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connected -> Disconnected |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAtLeast8BytesInStorage+useSink >> KqpCost::OlapRangeFullScan ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::RenameTables [GOOD] Test command err: 2025-03-31T16:17:39.697279Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008588265468784:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:39.697428Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d3b/r3tmp/tmp1uD7rz/pdisk_1.dat 2025-03-31T16:17:40.277047Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:40.286889Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:40.287010Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:40.294623Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23847, node 1 2025-03-31T16:17:40.644540Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:40.644556Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:40.644561Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:40.644644Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4309 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:41.014918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 5 Previous query attempt was finished with unsuccessful status CLIENT_RESOURCE_EXHAUSTED: Sending retry attempt 2 of 5 2025-03-31T16:17:44.696007Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008588265468784:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:44.696084Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Previous query attempt was finished with unsuccessful status UNAVAILABLE: Sending retry attempt 3 of 5 Previous query attempt was finished with unsuccessful status BAD_SESSION: Sending retry attempt 4 of 5 Previous query attempt was finished with unsuccessful status SESSION_BUSY: Sending retry attempt 5 of 5 2025-03-31T16:17:47.558352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008622625208314:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:47.558616Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008622625208306:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:47.558743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:47.564508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:17:47.585576Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008622625208320:2355], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:17:47.647726Z node 1 :TX_PROXY ERROR: Actor# [1:7488008622625208403:2714] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 5 Previous query attempt was finished with unsuccessful status CLIENT_RESOURCE_EXHAUSTED: Sending retry attempt 2 of 5 Previous query attempt was finished with unsuccessful status UNAVAILABLE: Sending retry attempt 3 of 5 Previous query attempt was finished with unsuccessful status BAD_SESSION: Sending retry attempt 4 of 5 Previous query attempt was finished with unsuccessful status SESSION_BUSY: Sending retry attempt 5 of 5 Previous query attempt was finished with unsuccessful status NOT_FOUND: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status NOT_FOUND: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status UNDETERMINED: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status UNDETERMINED: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status TRANSPORT_UNAVAILABLE: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status TRANSPORT_UNAVAILABLE: Sending retry attempt 1 of 1 2025-03-31T16:17:53.746303Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008649611236190:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:53.746420Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d3b/r3tmp/tmp5U37Dz/pdisk_1.dat 2025-03-31T16:17:53.896416Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:53.938877Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:53.938953Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:53.944854Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20786, node 4 2025-03-31T16:17:54.041322Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:54.041346Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:54.041353Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:54.041490Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3455 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:54.304597Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:56.866028Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008662496139135:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:56.866127Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:56.911069Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:57.067706Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008666791106614:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:57.067786Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:57.067979Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008666791106619:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:57.070930Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:17:57.100505Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488008666791106621:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:17:57.200318Z node 4 :TX_PROXY ERROR: Actor# [4:7488008666791106696:2804] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', e ... difyScheme { WorkingDir: "Root" OperationType: ESchemeOpDropTable Drop { Name: "Table-1" } } } DatabaseName: "" RequestType: "" PeerName: "ipv6:[::1]:33624" 2025-03-31T16:18:15.801930Z node 13 :TX_PROXY DEBUG: Actor# [13:7488008743891623659:3544] txid# 281474976715672 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:18:15.802001Z node 13 :TX_PROXY DEBUG: Actor# [13:7488008743891623659:3544] txid# 281474976715672 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:18:15.802582Z node 13 :TX_PROXY DEBUG: Actor# [13:7488008743891623659:3544] txid# 281474976715672 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:18:15.802677Z node 13 :TX_PROXY DEBUG: Actor# [13:7488008743891623659:3544] HANDLE EvNavigateKeySetResult, txid# 281474976715672 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:18:15.802714Z node 13 :TX_PROXY DEBUG: Actor# [13:7488008743891623659:3544] txid# 281474976715672 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715672 TabletId# 72057594046644480} 2025-03-31T16:18:15.802880Z node 13 :TX_PROXY DEBUG: Actor# [13:7488008743891623659:3544] txid# 281474976715672 HANDLE EvClientConnected 2025-03-31T16:18:15.803412Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TDropTable Propose, path: Root/Table-1, pathId: 0, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:18:15.803594Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715672:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:18:15.806277Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715672, database: /Root, subject: , status: StatusAccepted, operation: DROP TABLE, path: Root/Table-1 2025-03-31T16:18:15.807245Z node 13 :TX_PROXY DEBUG: Actor# [13:7488008743891623659:3544] txid# 281474976715672 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715672} 2025-03-31T16:18:15.807285Z node 13 :TX_PROXY DEBUG: Actor# [13:7488008743891623659:3544] txid# 281474976715672 SEND to# [13:7488008743891623658:2393] Source {TEvProposeTransactionStatus txid# 281474976715672 Status# 53} 2025-03-31T16:18:15.808517Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-31T16:18:15.808663Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-31T16:18:15.808675Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-31T16:18:15.808734Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-31T16:18:15.827377Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743437895871, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T16:18:15.834071Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715672, done: 0, blocked: 1 2025-03-31T16:18:15.843028Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-31T16:18:15.843191Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-31T16:18:15.843204Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-31T16:18:15.843266Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-31T16:18:15.846403Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715672:0 2025-03-31T16:18:15.860082Z node 13 :GRPC_SERVER DEBUG: Got grpc request# DropTableRequest, traceId# 01jqpfy65k5vay1k1hq6v40vhd, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:33628, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-03-31T16:18:15.860848Z node 13 :TX_PROXY DEBUG: actor# [13:7488008726711752537:2138] Handle TEvProposeTransaction 2025-03-31T16:18:15.860889Z node 13 :TX_PROXY DEBUG: actor# [13:7488008726711752537:2138] TxId# 281474976715673 ProcessProposeTransaction 2025-03-31T16:18:15.860925Z node 13 :TX_PROXY DEBUG: actor# [13:7488008726711752537:2138] Cookie# 0 userReqId# "" txid# 281474976715673 SEND to# [13:7488008743891623733:3609] 2025-03-31T16:18:15.863797Z node 13 :TX_PROXY DEBUG: Actor# [13:7488008743891623733:3609] txid# 281474976715673 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "Root" OperationType: ESchemeOpDropTable Drop { Name: "Table-2" } } } DatabaseName: "" RequestType: "" PeerName: "ipv6:[::1]:33628" 2025-03-31T16:18:15.863831Z node 13 :TX_PROXY DEBUG: Actor# [13:7488008743891623733:3609] txid# 281474976715673 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:18:15.863888Z node 13 :TX_PROXY DEBUG: Actor# [13:7488008743891623733:3609] txid# 281474976715673 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:18:15.864159Z node 13 :TX_PROXY DEBUG: Actor# [13:7488008743891623733:3609] txid# 281474976715673 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:18:15.864247Z node 13 :TX_PROXY DEBUG: Actor# [13:7488008743891623733:3609] HANDLE EvNavigateKeySetResult, txid# 281474976715673 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:18:15.864284Z node 13 :TX_PROXY DEBUG: Actor# [13:7488008743891623733:3609] txid# 281474976715673 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715673 TabletId# 72057594046644480} 2025-03-31T16:18:15.864828Z node 13 :TX_PROXY DEBUG: Actor# [13:7488008743891623733:3609] txid# 281474976715673 HANDLE EvClientConnected 2025-03-31T16:18:15.864869Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-03-31T16:18:15.865109Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TDropTable Propose, path: Root/Table-2, pathId: 0, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:18:15.865250Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715673:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:18:15.868266Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 13, TabletId: 72075186224037890 not found 2025-03-31T16:18:15.870813Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715673, database: /Root, subject: , status: StatusAccepted, operation: DROP TABLE, path: Root/Table-2 2025-03-31T16:18:15.871031Z node 13 :TX_PROXY DEBUG: Actor# [13:7488008743891623733:3609] txid# 281474976715673 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715673} 2025-03-31T16:18:15.871075Z node 13 :TX_PROXY DEBUG: Actor# [13:7488008743891623733:3609] txid# 281474976715673 SEND to# [13:7488008743891623732:2397] Source {TEvProposeTransactionStatus txid# 281474976715673 Status# 53} 2025-03-31T16:18:15.874185Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-31T16:18:15.874323Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-31T16:18:15.874337Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-31T16:18:15.874397Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-31T16:18:15.886113Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743437895927, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T16:18:15.901052Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715673, done: 0, blocked: 1 2025-03-31T16:18:15.906495Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715673:0 2025-03-31T16:18:15.906590Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715673, publications: 2, subscribers: 1 2025-03-31T16:18:15.908640Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715673, subscribers: 1 2025-03-31T16:18:15.910171Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-31T16:18:15.910293Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-31T16:18:15.910304Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-31T16:18:15.910359Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-31T16:18:15.928626Z node 13 :GRPC_SERVER DEBUG: [0x51a000043880] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-03-31T16:18:15.928905Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 13, TabletId: 72075186224037889 not found 2025-03-31T16:18:15.928925Z node 13 :GRPC_SERVER DEBUG: [0x51a0000f7e80] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-03-31T16:18:15.929147Z node 13 :GRPC_SERVER DEBUG: [0x51a000082280] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-03-31T16:18:15.929351Z node 13 :GRPC_SERVER DEBUG: [0x51a0000f7880] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-03-31T16:18:15.929559Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-03-31T16:18:15.929565Z node 13 :GRPC_SERVER DEBUG: [0x51a00016b080] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-03-31T16:18:15.929752Z node 13 :GRPC_SERVER DEBUG: [0x51a00005a680] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-03-31T16:18:15.930066Z node 13 :GRPC_SERVER DEBUG: [0x51a0000f7280] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-03-31T16:18:15.930251Z node 13 :GRPC_SERVER DEBUG: [0x51a0000f6c80] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-03-31T16:18:15.930337Z node 13 :GRPC_SERVER DEBUG: [0x51a000109e80] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-03-31T16:18:15.930463Z node 13 :GRPC_SERVER DEBUG: [0x51a000028e80] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-03-31T16:18:15.930530Z node 13 :GRPC_SERVER DEBUG: [0x51a000024680] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-03-31T16:18:15.930678Z node 13 :GRPC_SERVER DEBUG: [0x51a00010f280] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-03-31T16:18:15.930725Z node 13 :GRPC_SERVER DEBUG: [0x51a000117080] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-03-31T16:18:15.930886Z node 13 :GRPC_SERVER DEBUG: [0x51a000059a80] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-03-31T16:18:15.930915Z node 13 :GRPC_SERVER DEBUG: [0x51a00005e280] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-03-31T16:18:15.931127Z node 13 :GRPC_SERVER DEBUG: [0x51a000059480] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-03-31T16:18:15.931132Z node 13 :GRPC_SERVER DEBUG: [0x51a00010d480] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::FindSubDomainPathIdActorAsync [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:13:53.818677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:13:53.818778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:13:53.818815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:13:53.818847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:13:53.818889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:13:53.818917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:13:53.818979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:13:53.819065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:13:53.819509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:13:53.927474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:13:53.927553Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:13:53.933558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:13:53.933676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:13:53.933822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:13:53.939998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:13:53.940225Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:13:53.945818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:13:53.946118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:13:53.956148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:13:53.966860Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:13:53.967006Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:13:53.967234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:13:53.967289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:13:53.967354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:13:53.968297Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:13:53.983254Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:13:54.135713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:13:54.135918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.136104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:13:54.136394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:13:54.136462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.138769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:13:54.138924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:13:54.139145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.139205Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:13:54.139242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:13:54.139293Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:13:54.141227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.141293Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:13:54.141327Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:13:54.142942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.142975Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.143040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:13:54.143078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:13:54.152815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:13:54.154765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:13:54.154946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:13:54.155900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:13:54.156033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:13:54.156092Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:13:54.156350Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:13:54.156409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:13:54.156562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:13:54.156655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:13:54.161060Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:13:54.161109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:13:54.161277Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:13:54.161337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:13:54.161577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:13:54.161629Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:13:54.161754Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:13:54.161793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:13:54.161831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:13:54.161861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:13:54.161893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:13:54.161934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:13:54.161981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:13:54.162008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:13:54.162068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:13:54.162104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:13:54.162133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:13:54.164055Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:13:54.164175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:13:54.164219Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... EMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-03-31T16:18:15.499161Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-03-31T16:18:15.499213Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-03-31T16:18:15.500579Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:18:15.500687Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:18:15.500742Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:18:15.500813Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-03-31T16:18:15.500893Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:18:15.502013Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:18:15.502108Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:18:15.502141Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:18:15.502176Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-03-31T16:18:15.502213Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-31T16:18:15.502304Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-03-31T16:18:15.509613Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:18:15.512341Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-31T16:18:15.517761Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-31T16:18:15.517854Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-31T16:18:15.518549Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-31T16:18:15.518718Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:18:15.518785Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [15:516:2467] TestWaitNotification: OK eventTxId 102 2025-03-31T16:18:15.519571Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomenA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:18:15.519944Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SubDomenA" took 422us result status StatusSuccess 2025-03-31T16:18:15.520584Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomenA" PathDescription { Self { Name: "SubDomenA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ChildrenExist: false BalancerTabletID: 72075186233409547 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 247 AccountSize: 247 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:18:15.521437Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomenA/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:18:15.521730Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SubDomenA/Topic1" took 320us result status StatusSuccess 2025-03-31T16:18:15.522376Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomenA/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 247 AccountSize: 247 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:18:16.035407Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 3 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:18:16.035789Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 3 took 415us result status StatusSuccess 2025-03-31T16:18:16.036490Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomenA/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 247 AccountSize: 247 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:18:16.417375Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 3 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:18:16.417595Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 3 took 242us result status StatusSuccess 2025-03-31T16:18:16.418075Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomenA/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 247 AccountSize: 247 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:18:16.439710Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: FindTabletSubDomainPathId for tablet 72075186233409546 |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> Cdc::DocApi[YdsRunner] [GOOD] >> Cdc::DocApi[TopicRunner] >> KqpCost::Range >> KqpCost::IndexLookupAtLeast8BytesInStorage-useSink >> KqpCost::IndexLookupJoin-StreamLookupJoin >> Cdc::KeysOnlyLogDebezium [GOOD] >> Cdc::NewAndOldImagesLog[PqRunner] |88.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_base/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpImmediateEffects::ReplaceDuplicates [GOOD] >> Cdc::UpdatesLog[PqRunner] [GOOD] >> KqpImmediateEffects::ReplaceExistingKey >> KqpEffects::InsertAbort_Literal_Success [GOOD] >> Cdc::UpdatesLog[YdsRunner] >> KqpEffects::InsertAbort_Literal_Duplicates-UseSink >> KqpImmediateEffects::ConflictingKeyRW1WR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1WRR2 >> KqpImmediateEffects::MultiShardUpsertAfterRead [GOOD] >> KqpImmediateEffects::WriteThenReadWithCommit [GOOD] >> KqpImmediateEffects::ManyFlushes >> KqpInplaceUpdate::BigRow >> KqpInplaceUpdate::Negative_SingleRowWithValueCast+UseSink [GOOD] >> KqpInplaceUpdate::Negative_SingleRowWithValueCast-UseSink >> KqpEffects::InsertAbort_Params_Duplicates+UseSink [GOOD] >> KqpEffects::InsertAbort_Params_Duplicates-UseSink >> KqpCost::OlapWriteRow >> KqpEffects::InsertAbort_Literal_Conflict+UseSink [GOOD] >> KqpEffects::DeletePkPrefixWithIndex >> KqpEffects::InsertAbort_Params_Conflict+UseSink [GOOD] >> KqpEffects::InsertAbort_Params_Conflict-UseSink >> KqpImmediateEffects::ConflictingKeyR1WR2 [GOOD] >> KqpInplaceUpdate::SingleRowSimple+UseSink [GOOD] >> KqpEffects::InsertRevert_Literal_Duplicates [GOOD] >> KqpInplaceUpdate::SingleRowSimple-UseSink >> KqpImmediateEffects::ConflictingKeyR1WRR2 >> KqpEffects::InsertRevert_Literal_Conflict >> KqpImmediateEffects::ImmediateUpdate [GOOD] >> KqpImmediateEffects::DeleteAfterUpsert [GOOD] >> KqpImmediateEffects::DeleteOnAfterInsertWithIndex >> KqpInplaceUpdate::SingleRowIf+UseSink [GOOD] >> KqpImmediateEffects::ImmediateUpdateSelect >> KqpInplaceUpdate::SingleRowIf-UseSink >> KqpCost::AAARangeFullScan >> KqpImmediateEffects::UpdateOn [GOOD] >> KqpImmediateEffects::UpdateAfterUpsert >> KqpImmediateEffects::Insert [GOOD] >> KqpImmediateEffects::InsertConflictTxAborted >> KqpInplaceUpdate::SingleRowPgNotNull+UseSink [GOOD] >> KqpInplaceUpdate::SingleRowPgNotNull-UseSink |88.5%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd+UseSink [GOOD] >> KqpImmediateEffects::Interactive >> BackupRestoreS3::RestoreIndexTablePartitioningSettings [GOOD] >> BackupRestoreS3::RestoreTableSplitBoundaries >> KqpImmediateEffects::ConflictingKeyRW1RR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1RWR2 |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> YdbLogStore::LogStoreNegative [GOOD] >> YdbLogStore::Dirs >> KqpCost::OlapRange >> BackupRestore::RestoreTableSplitBoundaries [GOOD] >> BackupRestore::ImportDataShouldHandleErrors >> GroupWriteTest::WriteHardRateDispatcher [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeSequence [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeView >> KqpCost::ScanScriptingRangeFullScan+SourceRead ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/py3test >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test 2025-03-31 16:18:12,003 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-31 16:18:12,286 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 210884 648M 640M 630M ydb-tests-olap-ttl_tiering --basetemp /home/runner/.ya/build/build_root/0l5c/0019e0/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modu 212767 2.7G 2.7G 2.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/0l5c/0019e0/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_m 215072 424M 394M 414M └─ moto_server s3 --port 1247 Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/ttl_tiering/data_migration_when_alter_ttl.py", line 171, in test if not self.wait_for( File "ydb/tests/olap/ttl_tiering/base.py", line 68, in wait_for if condition_func(): File "ydb/tests/olap/ttl_tiering/data_migration_when_alter_ttl.py", line 172, in lambda: get_rows_in_portion(bucket1_path) == self.row_count File "ydb/tests/olap/ttl_tiering/data_migration_when_alter_ttl.py", line 159, in get_rows_in_portion portions_stat = table.get_portion_stat_by_tier() File "ydb/tests/olap/common/column_table_helper.py", line 26, in get_portion_stat_by_tier results = self.ydb_client.query( File "ydb/tests/olap/common/ydb_client.py", line 24, in query return self.session_pool.execute_with_retries(statement) File "contrib/python/ydb/py3/ydb/query/pool.py", line 202, in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) File "contrib/python/ydb/py3/ydb/retries.py", line 133, in retry_operation_sync for next_opt in opt_generator: File "contrib/python/ydb/py3/ydb/retries.py", line 94, in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) File "contrib/python/ydb/py3/ydb/query/pool.py", line 200, in wrapped_callee return [result_set for result_set in it] File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ return self._next() File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next res = self.wrapper(next(self.it)) File "contrib/python/grpcio/py3/grpc/_channel.py", line 475, in __next__ return self._next() File "contrib/python/grpcio/py3/grpc/_channel.py", line 872, in _next _common.wait(self._state.condition.wait, _response_ready) File "contrib/python/grpcio/py3/grpc/_common.py", line 150, in wait _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) File "contrib/python/grpcio/py3/grpc/_common.py", line 112, in _wait_once wait_fn(timeout=timeout) File "contrib/tools/python3/Lib/threading.py", line 359, in wait gotit = waiter.acquire(True, timeout) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...apture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/0l5c/0019e0/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/0l5c/0019e0', '--source-root', '/home/runner/.ya/build/build_root/0l5c/0019e0/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/0l5c/0019e0/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'data_migration_when_alter_ttl.py']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...apture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/0l5c/0019e0/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/0l5c/0019e0', '--source-root', '/home/runner/.ya/build/build_root/0l5c/0019e0/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/0l5c/0019e0/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'data_migration_when_alter_ttl.py']' stopped by 600 seconds timeout",), {}) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::WriteHardRateDispatcher [GOOD] Test command err: RandomSeed# 17216137036124450573 2025-03-31T16:14:17.909458Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 5 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-03-31T16:14:17.955426Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-03-31T16:14:17.955518Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 1 going to send TEvBlock {TabletId# 5 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-03-31T16:14:17.958353Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-03-31T16:14:17.977483Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 2 going to send TEvCollectGarbage {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-03-31T16:14:17.980793Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-03-31T16:14:57.584244Z 6 00h01m03.975388s :BS_LOGCUTTER ERROR: VDISK[82000000:_:0:5:0]: KEEPER: RetryCutLogEvent: limit exceeded; FreeUpToLsn# 6272 2025-03-31T16:16:41.427386Z 8 00h01m07.897270s :BS_LOGCUTTER ERROR: VDISK[82000000:_:0:7:0]: KEEPER: RetryCutLogEvent: limit exceeded; FreeUpToLsn# 16675 2025-03-31T16:18:20.729433Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-03-31T16:18:20.729538Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-03-31T16:18:20.729616Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-03-31T16:18:20.729657Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 13 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-03-31T16:18:20.995412Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Status# OK} 2025-03-31T16:18:20.995523Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 13 Channel# 0 Status# OK} |88.5%| [TA] $(B)/ydb/core/load_test/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.5%| [TA] {RESULT} $(B)/ydb/core/load_test/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> Cdc::NewAndOldImagesLog[PqRunner] [GOOD] >> Cdc::NewAndOldImagesLog[YdsRunner] >> Cdc::UpdatesLog[YdsRunner] [GOOD] >> Cdc::UpdatesLog[TopicRunner] >> KqpCost::Range [GOOD] >> KqpCost::IndexLookupAtLeast8BytesInStorage+useSink [GOOD] >> KqpEffects::InsertAbort_Literal_Duplicates-UseSink [GOOD] >> KqpEffects::DeletePkPrefixWithIndex [GOOD] >> KqpInplaceUpdate::BigRow [GOOD] >> KqpInplaceUpdate::SingleRowSimple-UseSink [GOOD] >> KqpCost::OlapRangeFullScan [GOOD] >> KqpImmediateEffects::ReplaceExistingKey [GOOD] |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpEffects::InsertRevert_Literal_Conflict [GOOD] >> KqpImmediateEffects::ConflictingKeyR1WRR2 [GOOD] >> KqpCost::IndexLookupAtLeast8BytesInStorage-useSink [GOOD] >> KqpEffects::InsertAbort_Params_Duplicates-UseSink [GOOD] >> KqpImmediateEffects::InsertConflictTxAborted [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1WRR2 [GOOD] >> KqpInplaceUpdate::Negative_SingleRowWithValueCast-UseSink [GOOD] >> KqpImmediateEffects::ImmediateUpdateSelect [GOOD] >> KqpEffects::InsertAbort_Params_Conflict-UseSink [GOOD] >> KqpImmediateEffects::UpdateAfterUpsert [GOOD] >> KqpInplaceUpdate::SingleRowIf-UseSink [GOOD] >> KqpInplaceUpdate::SingleRowPgNotNull-UseSink [GOOD] >> KqpImmediateEffects::ManyFlushes [GOOD] >> KqpImmediateEffects::Interactive [GOOD] >> KqpCost::IndexLookupJoin-StreamLookupJoin [GOOD] >> KqpCost::AAARangeFullScan [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::Range [GOOD] Test command err: Trying to start YDB, gRPC: 6963, MsgBus: 13441 2025-03-31T16:18:19.901773Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008761138821213:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:19.902208Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b10/r3tmp/tmppOtgNe/pdisk_1.dat 2025-03-31T16:18:20.404065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:20.404168Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:20.406902Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:18:20.439636Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6963, node 1 2025-03-31T16:18:20.571621Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:20.571650Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:20.571657Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:20.571756Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13441 TClient is connected to server localhost:13441 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:21.358600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.410929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.628589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.886326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:22.027394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:23.730374Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008778318692151:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:23.730487Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.049299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.080873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.110948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.155589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.187836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.265206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.351279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008782613659967:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.351471Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.352101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008782613659972:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.356913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:24.369918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008782613659975:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:24.454947Z node 1 :TX_PROXY ERROR: Actor# [1:7488008782613660030:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:24.902030Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008761138821213:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:24.902110Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAtLeast8BytesInStorage+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 25995, MsgBus: 25805 2025-03-31T16:18:18.323955Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008755435790739:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:18.324100Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b37/r3tmp/tmpBemo1k/pdisk_1.dat 2025-03-31T16:18:18.793775Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:18.808844Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:18.808964Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:18.824220Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25995, node 1 2025-03-31T16:18:19.050786Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:19.050814Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:19.050821Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:19.050953Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25805 TClient is connected to server localhost:25805 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:19.924786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:19.984205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:20.185010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:20.378972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:20.465468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.991918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008768320694399:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:21.992023Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:22.869171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:22.921569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:22.963289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:22.999826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:23.046749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:23.091261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:23.182590Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008776910629512:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:23.182685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:23.182945Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008776910629517:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:23.188282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:23.216065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008776910629519:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:23.271121Z node 1 :TX_PROXY ERROR: Actor# [1:7488008776910629574:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:23.323892Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008755435790739:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:23.323975Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:24.574681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 /Root/SecondaryKeys/Index/indexImplTable 1 8 /Root/SecondaryKeys 1 8 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Literal_Duplicates-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 6219, MsgBus: 15461 2025-03-31T16:18:13.662950Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008733769739729:2190];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:13.663014Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001178/r3tmp/tmpo47DgG/pdisk_1.dat 2025-03-31T16:18:14.181625Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:14.196396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:14.196516Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:14.214190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6219, node 1 2025-03-31T16:18:14.428809Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:14.428848Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:14.428866Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:14.429011Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15461 TClient is connected to server localhost:15461 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:15.212950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.247943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.489834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.742090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.817931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:17.408374Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008750949610548:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.408488Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.760214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.789877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.820119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.848928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.877189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.962692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:18.054891Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008755244578367:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.055018Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.055410Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008755244578372:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.059233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:18.068990Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008755244578374:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:18.130107Z node 1 :TX_PROXY ERROR: Actor# [1:7488008755244578428:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:18.665784Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008733769739729:2190];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:18.665863Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 23076, MsgBus: 20121 2025-03-31T16:18:20.428851Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008763435024012:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:20.428970Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001178/r3tmp/tmpvcEAq2/pdisk_1.dat 2025-03-31T16:18:20.537151Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23076, node 2 2025-03-31T16:18:20.585791Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:20.585936Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:20.593081Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:18:20.632228Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:20.632248Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:20.632257Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:20.632366Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20121 TClient is connected to server localhost:20121 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:21.226727Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.241711Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:18:21.254603Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.397587Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.559105Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:18:21.681009Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:18:23.776368Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008776319927651:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:23.776448Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:23.813083Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:23.846878Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:23.930895Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:23.972012Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.004843Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.047941Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.139323Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008780614895464:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.139419Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008780614895469:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.139456Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.142771Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:24.159448Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008780614895471:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:18:24.226393Z node 2 :TX_PROXY ERROR: Actor# [2:7488008780614895526:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:25.429204Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008763435024012:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:25.429266Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:25.629211Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488008784909863152:2502], TxId: 281474976715672, task: 1. Ctx: { TraceId : 01jqpfyfcv2yyr1abwddbb3dx0. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MTY1Nzc2YzgtYjQyYjYyNTUtMjRmZTRlYjgtNTAzNzcxODM=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-03-31T16:18:25.629667Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488008784909863153:2503], TxId: 281474976715672, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MTY1Nzc2YzgtYjQyYjYyNTUtMjRmZTRlYjgtNTAzNzcxODM=. TraceId : 01jqpfyfcv2yyr1abwddbb3dx0. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7488008784909863149:2488], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-31T16:18:25.630181Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTY1Nzc2YzgtYjQyYjYyNTUtMjRmZTRlYjgtNTAzNzcxODM=, ActorId: [2:7488008784909863079:2488], ActorState: ExecuteState, TraceId: 01jqpfyfcv2yyr1abwddbb3dx0, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::DeletePkPrefixWithIndex [GOOD] Test command err: Trying to start YDB, gRPC: 21280, MsgBus: 27084 2025-03-31T16:18:13.651772Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008733906763523:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:13.651815Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001183/r3tmp/tmpr5SQzE/pdisk_1.dat 2025-03-31T16:18:14.204904Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:14.205040Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:14.207453Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:14.210822Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21280, node 1 2025-03-31T16:18:14.425924Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:14.425953Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:14.425961Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:14.426103Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27084 TClient is connected to server localhost:27084 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:15.354241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.377406Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:18:15.398001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.597885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.855327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.937655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:17.244011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008751086634466:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.244137Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.759943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.798414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.830018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.862524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.909682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.944496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.990360Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008751086634977:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.990448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.990651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008751086634982:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.995992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:18.005646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008751086634984:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:18.076230Z node 1 :TX_PROXY ERROR: Actor# [1:7488008755381602334:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:18.652084Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008733906763523:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:18.661468Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:19.218369Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=3; 2025-03-31T16:18:19.230976Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-31T16:18:19.231216Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-31T16:18:19.231442Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488008759676569950:2497], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [1:7488008759676569928:2497]Got CONSTRAINT VIOLATION for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[1:7488008759676569950:2497].{
: Error: Duplicate keys have been found., code: 2012 } 2025-03-31T16:18:19.232083Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488008759676569943:2497], SessionActorId: [1:7488008759676569928:2497], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7488008759676569928:2497]. isRollback=0 2025-03-31T16:18:19.232290Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWVlZDQ0MzQtZTQxM2M4ZWUtMmZkYjJmNmQtOTRiMmZkMmY=, ActorId: [1:7488008759676569928:2497], ActorState: ExecuteState, TraceId: 01jqpfy9cpcwbrrca190jfttxt, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7488008759676569944:2497] from: [1:7488008759676569943:2497] 2025-03-31T16:18:19.232353Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7488008759676569944:2497] TxId: 281474976710671. Ctx: { TraceId: 01jqpfy9cpcwbrrca190jfttxt, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWVlZDQ0MzQtZTQxM2M4ZWUtMmZkYjJmNmQtOTRiMmZkMmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-31T16:18:19.233160Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWVlZDQ0MzQtZTQxM2M4ZWUtMmZkYjJmNmQtOTRiMmZkMmY=, ActorId: [1:7488008759676569928:2497], ActorState: ExecuteState, TraceId: 01jqpfy9cpcwbrrca190jfttxt, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 20573, MsgBus: 63727 2025-03-31T16:18:20.622048Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008764880413532:2197];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:20.631489Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001183/r3tmp/tmpZ1tIM7/pdisk_1.dat 2025-03-31T16:18:20.817341Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:20.844107Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:20.844195Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:20.847825Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20573, node 2 2025-03-31T16:18:20.995100Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:20.995126Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:20.995135Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:20.995275Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63727 TClient is connected to server localhost:63727 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:21.565497Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.587642Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:18:21.609166Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.717514Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.886634Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.951978Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:24.232042Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008782060284308:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.232147Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.293161Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.331887Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.381163Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.413781Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.450279Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.497185Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.566588Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008782060284817:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.566725Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.567190Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008782060284822:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.570570Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:24.582713Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008782060284824:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:18:24.649088Z node 2 :TX_PROXY ERROR: Actor# [2:7488008782060284877:3437] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:25.613995Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008764880413532:2197];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:25.614053Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:25.614704Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::BigRow [GOOD] Test command err: Trying to start YDB, gRPC: 7172, MsgBus: 21272 2025-03-31T16:18:13.668399Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008733294705198:2131];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:13.668433Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001181/r3tmp/tmpbty42W/pdisk_1.dat 2025-03-31T16:18:14.307620Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:14.320348Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:14.320432Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:14.324268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7172, node 1 2025-03-31T16:18:14.426799Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:14.426820Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:14.426831Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:14.426938Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21272 TClient is connected to server localhost:21272 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:15.193533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.231220Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:18:15.245660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.471771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.658987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.750441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:17.331270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008750474576075:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.331375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.760425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.838480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.912806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.950009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:18.022961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:18.064072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:18.113729Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008754769543891:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.113845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008754769543896:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.113893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.117282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:18.129287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008754769543898:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:18.207251Z node 1 :TX_PROXY ERROR: Actor# [1:7488008754769543951:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:18.678047Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008733294705198:2131];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:18.678201Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:19.206226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:18:19.537025Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7488008759064511789:2497] TxId: 281474976710673. Ctx: { TraceId: 01jqpfy9meajhd8x8bxmmzqs3a, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWE1MjRkOTgtYjhhZTJhOTktMjkyMjkzNDAtY2E3NGY4Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Executing physical tx, type: 2, stages: 1 2025-03-31T16:18:19.537176Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 1, OutputsCount: 1 2025-03-31T16:18:19.537392Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710673. Resolved key sets: 1 2025-03-31T16:18:19.537544Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710673. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 16] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-03-31T16:18:19.537607Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7488008759064511789:2497] TxId: 281474976710673. Ctx: { TraceId: 01jqpfy9meajhd8x8bxmmzqs3a, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWE1MjRkOTgtYjhhZTJhOTktMjkyMjkzNDAtY2E3NGY4Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (return (lambda '($1) $1)) ) 2025-03-31T16:18:19.537837Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2025-03-31T16:18:19.538130Z node 1 :KQP_EXECUTER DEBUG: [ShardsResolver] TxId: 281474976710673. Shard resolve complete, resolved shards: 1 2025-03-31T16:18:19.538185Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7488008759064511789:2497] TxId: 281474976710673. Ctx: { TraceId: 01jqpfy9meajhd8x8bxmmzqs3a, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWE1MjRkOTgtYjhhZTJhOTktMjkyMjkzNDAtY2E3NGY4Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolved, success: 1, failed: 0 2025-03-31T16:18:19.538235Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7488008759064511789:2497] TxId: 281474976710673. Ctx: { TraceId: 01jqpfy9meajhd8x8bxmmzqs3a, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWE1MjRkOTgtYjhhZTJhOTktMjkyMjkzNDAtY2E3NGY4Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards on nodes: node 1: [72075186224037919] 2025-03-31T16:18:19.538301Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710673. Ctx: { TraceId: 01jqpfy9meajhd8x8bxmmzqs3a, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWE1MjRkOTgtYjhhZTJhOTktMjkyMjkzNDAtY2E3NGY4Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: true, 1 scan tasks on 1 nodes, localComputeTasks: 0, snapshot: {18446744073709551615, 1743437899434} 2025-03-31T16:18:19.538544Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710673. Ctx: { TraceId: 01jqpfy9meajhd8x8bxmmzqs3a, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWE1MjRkOTgtYjhhZTJhOTktMjkyMjkzNDAtY2E3NGY4Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Collect channels updates for task: 1 at actor [1:7488008759064511793:2497] 2025-03-31T16:18:19.538592Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710673. Ctx: { TraceId: 01jqpfy9meajhd8x8bxmmzqs3a, Database: ... hoqst4xe" NodeId: 1 StartTimeMs: 1743437899724 CreateTimeMs: 1743437899721 } MaxMemoryUsage: 1048576 } 2025-03-31T16:18:19.725031Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710675. Ctx: { TraceId: 01jqpfy9vcajhzh74k8thzmatd, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWE1MjRkOTgtYjhhZTJhOTktMjkyMjkzNDAtY2E3NGY4Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7488008759064511851:2497] 2025-03-31T16:18:19.725170Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7488008759064511845:2497] TxId: 281474976710675. Ctx: { TraceId: 01jqpfy9vcajhzh74k8thzmatd, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWE1MjRkOTgtYjhhZTJhOTktMjkyMjkzNDAtY2E3NGY4Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-31T16:18:19.725202Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7488008759064511845:2497] TxId: 281474976710675. Ctx: { TraceId: 01jqpfy9vcajhzh74k8thzmatd, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWE1MjRkOTgtYjhhZTJhOTktMjkyMjkzNDAtY2E3NGY4Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.001142s ReadRows: 1 ReadBytes: 22 ru: 1 rate limiter was not found force flag: 1 2025-03-31T16:18:19.725717Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710676. Resolved key sets: 0 2025-03-31T16:18:19.725798Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710676. Ctx: { TraceId: 01jqpfy9vcajhzh74k8thzmatd, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWE1MjRkOTgtYjhhZTJhOTktMjkyMjkzNDAtY2E3NGY4Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {18446744073709551615, 1743437899434} 2025-03-31T16:18:19.725830Z node 1 :KQP_EXECUTER INFO: ActorId: [1:7488008759064511854:2497] TxId: 281474976710676. Ctx: { TraceId: 01jqpfy9vcajhzh74k8thzmatd, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWE1MjRkOTgtYjhhZTJhOTktMjkyMjkzNDAtY2E3NGY4Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 0, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-03-31T16:18:19.725868Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7488008759064511854:2497] TxId: 281474976710676. Ctx: { TraceId: 01jqpfy9vcajhzh74k8thzmatd, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWE1MjRkOTgtYjhhZTJhOTktMjkyMjkzNDAtY2E3NGY4Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send Commit to BufferActor=[1:7488008759064511835:2497] 2025-03-31T16:18:19.725911Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7488008759064511854:2497] TxId: 281474976710676. Ctx: { TraceId: 01jqpfy9vcajhzh74k8thzmatd, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWE1MjRkOTgtYjhhZTJhOTktMjkyMjkzNDAtY2E3NGY4Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-03-31T16:18:19.728485Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7488008759064511854:2497] TxId: 281474976710676. Ctx: { TraceId: 01jqpfy9vcajhzh74k8thzmatd, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWE1MjRkOTgtYjhhZTJhOTktMjkyMjkzNDAtY2E3NGY4Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. Trying to start YDB, gRPC: 12680, MsgBus: 12781 2025-03-31T16:18:20.595337Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008765750176300:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:20.595806Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001181/r3tmp/tmpvmkzwU/pdisk_1.dat 2025-03-31T16:18:20.772308Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:20.772381Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:20.773140Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:20.789002Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12680, node 2 2025-03-31T16:18:20.911493Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:20.911513Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:20.911520Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:20.911622Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12781 TClient is connected to server localhost:12781 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:21.496346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.523269Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.541788Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:18:21.671279Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.855210Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.965276Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:24.062124Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008782930047228:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.062262Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.103624Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.137492Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.172835Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.215742Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.286274Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.325141Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.405975Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008782930047748:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.406069Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.406300Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008782930047753:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.410635Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:24.420282Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008782930047755:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:18:24.501746Z node 2 :TX_PROXY ERROR: Actor# [2:7488008782930047810:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:25.386726Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:18:25.598322Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008765750176300:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:25.607198Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpImmediateEffects::ConflictingKeyRW1RWR2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowSimple-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 17690, MsgBus: 7288 2025-03-31T16:18:13.663819Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008735557147724:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:13.663863Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00117a/r3tmp/tmpLmey9Y/pdisk_1.dat 2025-03-31T16:18:14.163883Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:14.213211Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:14.213312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:14.216795Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17690, node 1 2025-03-31T16:18:14.427699Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:14.427728Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:14.427735Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:14.427850Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7288 TClient is connected to server localhost:7288 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:15.266249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.297485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.459446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.618578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.721782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:17.279332Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008752737018684:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.279432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.760432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.788384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.818011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.886764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.922603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.967550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:18.034347Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008757031986494:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.034429Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.034851Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008757031986499:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.038381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:18.048751Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008757031986501:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:18.145561Z node 1 :TX_PROXY ERROR: Actor# [1:7488008757031986557:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:18.663929Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008735557147724:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:18.664032Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:19.179446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22323, MsgBus: 11524 2025-03-31T16:18:20.606259Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008764282536302:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:20.606304Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00117a/r3tmp/tmpFJy2H1/pdisk_1.dat 2025-03-31T16:18:20.750270Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:20.767503Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:20.767577Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:20.776181Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22323, node 2 2025-03-31T16:18:20.831587Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:20.831650Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:20.831662Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:20.831761Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11524 TClient is connected to server localhost:11524 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:21.306803Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.336321Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.432965Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.564930Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.665429Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:23.996048Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008777167439953:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:23.996148Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.040146Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.112091Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.157198Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.197711Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.281955Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.349958Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.407476Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008781462407766:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.407563Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.407633Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008781462407771:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.411423Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:24.422912Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008781462407773:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:18:24.518619Z node 2 :TX_PROXY ERROR: Actor# [2:7488008781462407829:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:25.533605Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:18:25.620958Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008764282536302:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:25.621114Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapRangeFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 3028, MsgBus: 19522 2025-03-31T16:18:18.333526Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008756349531672:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:18.333607Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b4b/r3tmp/tmpUJUVcc/pdisk_1.dat 2025-03-31T16:18:18.794238Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:18.823264Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:18.823374Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:18.833012Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3028, node 1 2025-03-31T16:18:19.055710Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:19.055735Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:19.055745Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:19.055879Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19522 TClient is connected to server localhost:19522 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:19.932838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:19.984105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:20.182450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:18:20.375403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:18:20.450324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.850939Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008769234435318:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:21.851050Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:22.863915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:22.899501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:22.931787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:22.973130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:23.021233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:23.069690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:23.148866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008777824370429:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:23.148958Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:23.149012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008777824370434:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:23.154491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:23.169045Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008777824370436:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:23.233900Z node 1 :TX_PROXY ERROR: Actor# [1:7488008777824370489:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:23.333975Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008756349531672:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:23.334031Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:24.626847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.795162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7488008782119338199:2504];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:18:24.795400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7488008782119338199:2504];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:18:24.795676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7488008782119338199:2504];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:18:24.795804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7488008782119338199:2504];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:18:24.795905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7488008782119338199:2504];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:18:24.796020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7488008782119338199:2504];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:18:24.796138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7488008782119338199:2504];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:18:24.796267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7488008782119338199:2504];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:18:24.796401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7488008782119338199:2504];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:18:24.796490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7488008782119338199:2504];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:18:24.796557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7488008782119338199:2504];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:18:24.796616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7488008782119338199:2504];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:18:24.798389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7488008782119338201:2505];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:18:24.798435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7488008782119338201:2505];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:18:24.798661Z node 1 :TX_COLUM ... MNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:18:24.968685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:18:24.968735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:18:24.968756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:18:24.968802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:18:24.968821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:18:24.968846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:18:24.968858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:18:24.970064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:18:24.970118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:18:24.970282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:18:24.970314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:18:24.970427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:18:24.970455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:18:24.970590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:18:24.970612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:18:24.970691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:18:24.970709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:18:24.988444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:18:24.988520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:18:24.988607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:18:24.988633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:18:24.988781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:18:24.988804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:18:24.988887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:18:24.988914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:18:24.988969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:18:24.988994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:18:24.989030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:18:24.989056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:18:24.989571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:18:24.989610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:18:24.989776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:18:24.989802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:18:24.989918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:18:24.989950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:18:24.990126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:18:24.990176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:18:24.990287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:18:24.990329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:18:25.033377Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T16:18:25.034064Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T16:18:25.040191Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T16:18:25.040262Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T16:18:25.046218Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T16:18:25.046218Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T16:18:25.052550Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T16:18:25.053180Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T16:18:25.058792Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T16:18:25.059576Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037920;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T16:18:25.313137Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2025-03-31T16:18:25.314161Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2025-03-31T16:18:25.314868Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; query_phases { duration_us: 269136 table_access { name: "/Root/TestTable" reads { rows: 2 bytes: 72 } } cpu_time_us: 104000 } compilation { duration_us: 450957 cpu_time_us: 446845 } process_cpu_time_us: 374 total_duration_us: 728483 total_cpu_time_us: 551219 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ReplaceExistingKey [GOOD] Test command err: Trying to start YDB, gRPC: 16600, MsgBus: 18936 2025-03-31T16:18:13.680213Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008735129152733:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:13.680321Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00118b/r3tmp/tmpA0ROT3/pdisk_1.dat 2025-03-31T16:18:14.326314Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:14.329465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:14.329566Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:14.331617Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16600, node 1 2025-03-31T16:18:14.434947Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:14.434969Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:14.434976Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:14.435117Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18936 TClient is connected to server localhost:18936 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:15.340777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.366592Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:18:15.384083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.568372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.843034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.950681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:17.389053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008752309023685:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.389172Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.759948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.789285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.817817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.852812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.887682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.957138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:18.038975Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008756603991500:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.039074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.041914Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008756603991505:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.046450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:18.056484Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008756603991507:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:18.130850Z node 1 :TX_PROXY ERROR: Actor# [1:7488008756603991562:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:18.680659Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008735129152733:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:18.680729Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:19.172542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 15327, MsgBus: 17796 2025-03-31T16:18:20.471396Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008765924207564:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:20.471937Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00118b/r3tmp/tmpK3pLIE/pdisk_1.dat 2025-03-31T16:18:20.655771Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:20.671006Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:20.671664Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:20.673159Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15327, node 2 2025-03-31T16:18:20.879695Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:20.879716Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:20.879723Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:20.879841Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17796 TClient is connected to server localhost:17796 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:21.405185Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.419722Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:18:21.434131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.511640Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.699409Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.792323Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:24.055278Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008783104078535:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.055358Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.101306Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.134413Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.175282Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.209269Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.241650Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.324364Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.409793Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008783104079056:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.409902Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.410250Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008783104079061:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.414672Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:24.425147Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008783104079063:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:18:24.484325Z node 2 :TX_PROXY ERROR: Actor# [2:7488008783104079115:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:25.400979Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:18:25.473441Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008765924207564:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:25.505387Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAtLeast8BytesInStorage-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 20837, MsgBus: 7539 2025-03-31T16:18:19.982353Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008761687354709:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:19.982768Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b1f/r3tmp/tmpRp5bjK/pdisk_1.dat 2025-03-31T16:18:20.461778Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:20.476043Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:20.476139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:20.478163Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20837, node 1 2025-03-31T16:18:20.575595Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:20.575614Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:20.575624Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:20.575731Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7539 TClient is connected to server localhost:7539 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:21.310408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.348855Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:18:21.371324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.578331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.772734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.858371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:23.567568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008778867225690:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:23.567728Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:23.979316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.020044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.060329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.123474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.161054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.231965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.319868Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008783162193510:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.319976Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.320619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008783162193515:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.324895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:24.339800Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-31T16:18:24.340084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008783162193517:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:24.429667Z node 1 :TX_PROXY ERROR: Actor# [1:7488008783162193573:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:24.987307Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008761687354709:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:24.987369Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:25.469088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 /Root/SecondaryKeys/Index/indexImplTable 1 8 /Root/SecondaryKeys 1 8 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyR1WRR2 [GOOD] Test command err: Trying to start YDB, gRPC: 18676, MsgBus: 2480 2025-03-31T16:18:13.687550Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008735800823091:2276];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:13.687601Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00118f/r3tmp/tmpW6ZKxQ/pdisk_1.dat 2025-03-31T16:18:14.228700Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:14.244427Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:14.244545Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:14.249367Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18676, node 1 2025-03-31T16:18:14.427672Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:14.427696Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:14.427718Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:14.427839Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2480 TClient is connected to server localhost:2480 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:15.225227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.240631Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:18:15.254237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.473669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:18:15.687064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:18:15.774373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:17.337457Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008752980693813:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.337609Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.760215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.799544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.842761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.876160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.905439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.944286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.996378Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008752980694325:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.996444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.996787Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008752980694330:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.000180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:18.014482Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008752980694332:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:18.098726Z node 1 :TX_PROXY ERROR: Actor# [1:7488008757275661681:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:18.691228Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008735800823091:2276];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:18.691334Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:19.211172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 4903, MsgBus: 16194 2025-03-31T16:18:20.663668Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008766491843652:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:20.664587Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00118f/r3tmp/tmppFRlo8/pdisk_1.dat 2025-03-31T16:18:20.925025Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:20.941153Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:20.941213Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:20.942305Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4903, node 2 2025-03-31T16:18:21.067659Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:21.067685Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:21.067692Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:21.067806Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16194 TClient is connected to server localhost:16194 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:21.562790Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.575462Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:18:21.585278Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.691749Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.887696Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.996767Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:24.512252Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008783671714583:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.512336Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.556097Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.592371Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.631830Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.692427Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.732837Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.768753Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.851055Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008783671715100:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.851165Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.851373Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008783671715105:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.856100Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:24.867134Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008783671715107:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:24.922134Z node 2 :TX_PROXY ERROR: Actor# [2:7488008783671715160:3440] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:25.664098Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008766491843652:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:25.670938Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:25.820701Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertRevert_Literal_Conflict [GOOD] Test command err: Trying to start YDB, gRPC: 28595, MsgBus: 8694 2025-03-31T16:18:13.651661Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008735274038457:2130];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:13.651818Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001176/r3tmp/tmpUc6Bz1/pdisk_1.dat 2025-03-31T16:18:14.277579Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:14.293145Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:14.293268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:14.295007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28595, node 1 2025-03-31T16:18:14.431889Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:14.431913Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:14.431927Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:14.432036Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8694 TClient is connected to server localhost:8694 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:15.195675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.245028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:18:15.414371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:18:15.591735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.721963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:17.143678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008752453909353:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.143857Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.759999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.801864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.835492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.903005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.933737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:18.009837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:18.094312Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008756748877170:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.094392Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.094444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008756748877175:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.098541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:18.107685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008756748877177:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:18.185886Z node 1 :TX_PROXY ERROR: Actor# [1:7488008756748877235:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:18.651858Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008735274038457:2130];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:18.651949Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 19927, MsgBus: 9513 2025-03-31T16:18:20.579744Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008766447568350:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:20.579792Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001176/r3tmp/tmpOvpPq5/pdisk_1.dat 2025-03-31T16:18:20.870779Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:20.887047Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:20.887309Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:20.889321Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19927, node 2 2025-03-31T16:18:21.015063Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:21.015098Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:21.015105Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:21.015212Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9513 TClient is connected to server localhost:9513 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:21.648426Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.654611Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:18:21.670167Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.777628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.963600Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:22.078093Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:24.291525Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008783627439301:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.291646Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.357403Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.396121Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.435310Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.478306Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.520627Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.568585Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.633110Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008783627439811:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.633171Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.633446Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008783627439816:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.636275Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:24.646318Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008783627439818:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:18:24.733935Z node 2 :TX_PROXY ERROR: Actor# [2:7488008783627439876:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:25.580193Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008766447568350:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:25.580248Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Params_Duplicates-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 23178, MsgBus: 4592 2025-03-31T16:18:13.650465Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008734689538129:2191];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:13.650566Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001194/r3tmp/tmprXjrB7/pdisk_1.dat 2025-03-31T16:18:14.188302Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:14.211148Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:14.211241Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:14.217404Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23178, node 1 2025-03-31T16:18:14.431137Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:14.431159Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:14.431168Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:14.431267Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4592 TClient is connected to server localhost:4592 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:15.290561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.315945Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:18:15.333816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.588503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.861089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.953953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:17.145709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008751869408956:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.145870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.759948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.793411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.821556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.856440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.886769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.954828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:18.005492Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008756164376767:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.005569Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.005874Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008756164376772:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.009492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:18.026642Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008756164376774:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:18.094925Z node 1 :TX_PROXY ERROR: Actor# [1:7488008756164376827:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:18.650930Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008734689538129:2191];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:18.650994Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:19.352499Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=3; 2025-03-31T16:18:19.362811Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-31T16:18:19.362988Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-31T16:18:19.363224Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488008760459344465:2499], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [1:7488008760459344423:2499]Got CONSTRAINT VIOLATION for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[1:7488008760459344465:2499].{
: Error: Duplicate keys have been found., code: 2012 } 2025-03-31T16:18:19.363861Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488008760459344458:2499], SessionActorId: [1:7488008760459344423:2499], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7488008760459344423:2499]. isRollback=0 2025-03-31T16:18:19.364102Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDljMzg1Y2EtNTIxMjdjYWQtZjg5NDIzMDgtNDZhNzQzODU=, ActorId: [1:7488008760459344423:2499], ActorState: ExecuteState, TraceId: 01jqpfy9crebmywpqz6ww34stv, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7488008760459344459:2499] from: [1:7488008760459344458:2499] 2025-03-31T16:18:19.364199Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7488008760459344459:2499] TxId: 281474976710671. Ctx: { TraceId: 01jqpfy9crebmywpqz6ww34stv, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDljMzg1Y2EtNTIxMjdjYWQtZjg5NDIzMDgtNDZhNzQzODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-31T16:18:19.365070Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDljMzg1Y2EtNTIxMjdjYWQtZjg5NDIzMDgtNDZhNzQzODU=, ActorId: [1:7488008760459344423:2499], ActorState: ExecuteState, TraceId: 01jqpfy9crebmywpqz6ww34stv, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 5564, MsgBus: 9563 2025-03-31T16:18:20.540925Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008764544813884:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:20.540985Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001194/r3tmp/tmpwUWU4U/pdisk_1.dat 2025-03-31T16:18:20.710405Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:20.736237Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:20.736323Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:20.741828Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5564, node 2 2025-03-31T16:18:20.868997Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:20.869024Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:20.869032Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:20.869139Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9563 TClient is connected to server localhost:9563 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:21.444464Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.453596Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:18:21.471135Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.605279Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.768294Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.870724Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:24.272667Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008781724684815:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.272768Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.325217Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.367219Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.414097Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.465353Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.535881Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.615959Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.685880Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008781724685337:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.686008Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.686342Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008781724685342:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.690199Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:24.705533Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008781724685344:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:24.806679Z node 2 :TX_PROXY ERROR: Actor# [2:7488008781724685401:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:25.543482Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008764544813884:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:25.543546Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:26.151239Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488008790314620324:2501], TxId: 281474976710672, task: 1. Ctx: { TraceId : 01jqpfyfy0eatycmy039ag9ydt. SessionId : ydb://session/3?node_id=2&id=NmE2MzZlNDYtNTc1MDI2ZjEtNTc2NmQ3NTktM2E2OGMzMzM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-03-31T16:18:26.151875Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488008790314620325:2502], TxId: 281474976710672, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=NmE2MzZlNDYtNTc1MDI2ZjEtNTc2NmQ3NTktM2E2OGMzMzM=. TraceId : 01jqpfyfy0eatycmy039ag9ydt. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7488008790314620321:2489], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-31T16:18:26.152222Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NmE2MzZlNDYtNTc1MDI2ZjEtNTc2NmQ3NTktM2E2OGMzMzM=, ActorId: [2:7488008786019652955:2489], ActorState: ExecuteState, TraceId: 01jqpfyfy0eatycmy039ag9ydt, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::InsertConflictTxAborted [GOOD] Test command err: Trying to start YDB, gRPC: 13852, MsgBus: 10660 2025-03-31T16:18:13.650216Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008735906312200:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:13.650274Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001187/r3tmp/tmpDp000N/pdisk_1.dat 2025-03-31T16:18:14.222552Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:14.222659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:14.224710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:18:14.252145Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13852, node 1 2025-03-31T16:18:14.425890Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:14.425915Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:14.425928Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:14.426072Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10660 TClient is connected to server localhost:10660 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:15.290405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.307926Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:18:15.316913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.451032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.630712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.721362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:17.270833Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008753086183151:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.270949Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.760023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.789910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.815774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.842905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.876449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.912514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.993674Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008753086183662:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.993819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.994021Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008753086183667:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.997196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:18.007563Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008753086183669:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:18.067352Z node 1 :TX_PROXY ERROR: Actor# [1:7488008757381151018:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:18.650618Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008735906312200:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:18.650689Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:19.186704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9627, MsgBus: 21321 2025-03-31T16:18:20.836852Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008766624661632:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:20.894325Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001187/r3tmp/tmpcKSxWI/pdisk_1.dat 2025-03-31T16:18:21.077817Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:21.091954Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:21.092027Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:21.093310Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9627, node 2 2025-03-31T16:18:21.256003Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:21.256026Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:21.256033Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:21.256139Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21321 TClient is connected to server localhost:21321 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:21.840323Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.847955Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:18:21.873570Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.987257Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:22.134600Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:22.222591Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:24.525885Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008783804532447:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.526006Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.579544Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.611417Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.657565Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.695693Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.733358Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.822680Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.879589Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008783804532959:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.879672Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.884244Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008783804532964:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.887999Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:24.898939Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008783804532966:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:24.986316Z node 2 :TX_PROXY ERROR: Actor# [2:7488008783804533021:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:25.794365Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008766624661632:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:25.794458Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:26.057185Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:18:26.342047Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=281474976710673; 2025-03-31T16:18:26.352793Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488008792394468164:2497], Table: `/Root/TestImmediateEffects` ([72057594046644480:16:1]), SessionActorId: [2:7488008792394467911:2497]Got CONSTRAINT VIOLATION for table `/Root/TestImmediateEffects`. ShardID=72075186224037919, Sink=[2:7488008792394468164:2497].{
: Error: Duplicate keys have been found., code: 2012 } 2025-03-31T16:18:26.353210Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488008792394468155:2497], SessionActorId: [2:7488008792394467911:2497], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TestImmediateEffects`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[2:7488008792394467911:2497]. isRollback=0 2025-03-31T16:18:26.353443Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTljMWE3MTgtN2I3ODcwNjQtYTM0M2QxZmQtMmZjZGZmOTk=, ActorId: [2:7488008792394467911:2497], ActorState: ExecuteState, TraceId: 01jqpfyga7awwe09dewrbbcpe3, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [2:7488008792394468156:2497] from: [2:7488008792394468155:2497] 2025-03-31T16:18:26.353543Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7488008792394468156:2497] TxId: 281474976710673. Ctx: { TraceId: 01jqpfyga7awwe09dewrbbcpe3, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTljMWE3MTgtN2I3ODcwNjQtYTM0M2QxZmQtMmZjZGZmOTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TestImmediateEffects`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-31T16:18:26.354443Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976710673; 2025-03-31T16:18:26.354615Z node 2 :TX_DATASHARD ERROR: Complete volatile write [1743437906385 : 281474976710673] from 72075186224037920 at tablet 72075186224037920, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } 2025-03-31T16:18:26.354709Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976710673; 2025-03-31T16:18:26.354775Z node 2 :TX_DATASHARD ERROR: Complete volatile write [1743437906385 : 281474976710673] from 72075186224037921 at tablet 72075186224037921, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } 2025-03-31T16:18:26.354836Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTljMWE3MTgtN2I3ODcwNjQtYTM0M2QxZmQtMmZjZGZmOTk=, ActorId: [2:7488008792394467911:2497], ActorState: ExecuteState, TraceId: 01jqpfyga7awwe09dewrbbcpe3, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyRW1WRR2 [GOOD] Test command err: Trying to start YDB, gRPC: 28388, MsgBus: 18093 2025-03-31T16:18:13.657237Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008735891728909:2202];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:13.657366Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001166/r3tmp/tmpNUONIf/pdisk_1.dat 2025-03-31T16:18:14.204836Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:14.206824Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:14.206907Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:14.209190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28388, node 1 2025-03-31T16:18:14.428899Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:14.428933Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:14.428941Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:14.429077Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18093 TClient is connected to server localhost:18093 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:15.222628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.265424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.449550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.698890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.795985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:17.366721Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008753071599713:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.366854Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.759989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.790249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.832502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.865083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.894861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.969660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:18.058357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008757366567532:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.058427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.058698Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008757366567537:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.062298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:18.072122Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008757366567539:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:18.129877Z node 1 :TX_PROXY ERROR: Actor# [1:7488008757366567591:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:18.655400Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008735891728909:2202];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:18.655506Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:19.193967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 3188, MsgBus: 11547 2025-03-31T16:18:20.594552Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008766250874783:2217];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001166/r3tmp/tmpWuL2y0/pdisk_1.dat 2025-03-31T16:18:20.673088Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:18:20.810850Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:20.837709Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:20.837786Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:20.844188Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3188, node 2 2025-03-31T16:18:20.955707Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:20.955730Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:20.955738Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:20.955852Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11547 TClient is connected to server localhost:11547 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:21.610595Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.653183Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:18:21.678398Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.822203Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:22.038815Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:22.123210Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:24.259256Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008783430745541:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.259373Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.299628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.364981Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.405517Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.440214Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.488553Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.539102Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.621836Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008783430746057:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.621931Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.625181Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008783430746062:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.629138Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:24.641057Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008783430746064:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:24.727811Z node 2 :TX_PROXY ERROR: Actor# [2:7488008783430746121:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:25.583023Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008766250874783:2217];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:25.586343Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:25.704954Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:18:26.383440Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=8; 2025-03-31T16:18:26.383760Z node 2 :TX_DATASHARD ERROR: Prepare transaction failed. txid 8 at tablet 72075186224037919 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-03-31T16:18:26.387707Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 8 at tablet 72075186224037919 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-03-31T16:18:26.387939Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488008792020681305:2498], Table: `/Root/TestImmediateEffects` ([72057594046644480:16:1]), SessionActorId: [2:7488008787725713708:2498]Got LOCKS BROKEN for table `/Root/TestImmediateEffects`. ShardID=72075186224037919, Sink=[2:7488008792020681305:2498].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-03-31T16:18:26.388581Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488008792020681298:2498], SessionActorId: [2:7488008787725713708:2498], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[2:7488008787725713708:2498]. isRollback=0 2025-03-31T16:18:26.388833Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjMxYTM4M2EtNGQ3ZDVkNGQtNjkyODZhYzktODdlZjI0ZQ==, ActorId: [2:7488008787725713708:2498], ActorState: ExecuteState, TraceId: 01jqpfygb8265y7xe5y1e86vpb, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7488008792020681363:2498] from: [2:7488008792020681298:2498] 2025-03-31T16:18:26.388924Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7488008792020681363:2498] TxId: 281474976710678. Ctx: { TraceId: 01jqpfygb8265y7xe5y1e86vpb, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjMxYTM4M2EtNGQ3ZDVkNGQtNjkyODZhYzktODdlZjI0ZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-03-31T16:18:26.389134Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjMxYTM4M2EtNGQ3ZDVkNGQtNjkyODZhYzktODdlZjI0ZQ==, ActorId: [2:7488008787725713708:2498], ActorState: ExecuteState, TraceId: 01jqpfygb8265y7xe5y1e86vpb, Create QueryResponse for error on request, msg: >> YdbLogStore::Dirs [GOOD] >> YdbLogStore::LogTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_SingleRowWithValueCast-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 22983, MsgBus: 28626 2025-03-31T16:18:13.650647Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008736912546727:2191];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:13.650698Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00119e/r3tmp/tmpVkIENN/pdisk_1.dat 2025-03-31T16:18:14.195716Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:14.195848Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:14.216818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:18:14.243474Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22983, node 1 2025-03-31T16:18:14.427694Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:14.427731Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:14.427740Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:14.427870Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28626 TClient is connected to server localhost:28626 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:15.297621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.321626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.524322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.749454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.858761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:17.222783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008754092417559:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.222915Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.759943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.828289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.859786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.930848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.993547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:18.041531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:18.089407Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008758387385373:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.089500Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.089672Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008758387385378:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.093748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:18.104044Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008758387385380:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:18.160672Z node 1 :TX_PROXY ERROR: Actor# [1:7488008758387385434:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:18.651165Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008736912546727:2191];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:18.651224Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:19.216886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 19439, MsgBus: 17610 2025-03-31T16:18:20.580456Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008766937104964:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00119e/r3tmp/tmp7usgaY/pdisk_1.dat 2025-03-31T16:18:20.651776Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:18:20.745250Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:20.791640Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:20.791728Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:20.792962Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19439, node 2 2025-03-31T16:18:20.971650Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:20.971672Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:20.971678Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:20.971805Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17610 TClient is connected to server localhost:17610 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:21.577281Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.609867Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:18:21.625230Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.699950Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.902575Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:22.005349Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:24.208193Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008784116975763:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.208285Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.251072Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.297131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.350248Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.404099Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.454978Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.536853Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.616098Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008784116976282:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.616197Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.616541Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008784116976287:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.620547Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:24.634586Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008784116976289:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:24.724227Z node 2 :TX_PROXY ERROR: Actor# [2:7488008784116976346:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:25.527235Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008766937104964:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:25.527302Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:25.843375Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Params_Conflict-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 30130, MsgBus: 14393 2025-03-31T16:18:13.657742Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008736855754661:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:13.657798Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001199/r3tmp/tmp1pdaX0/pdisk_1.dat 2025-03-31T16:18:14.160195Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:14.192413Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:14.192540Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:14.195524Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30130, node 1 2025-03-31T16:18:14.436970Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:14.436997Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:14.437003Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:14.437139Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14393 TClient is connected to server localhost:14393 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:15.343097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.390964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.616478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.835071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.947440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:17.290405Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008754035625606:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.290522Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.760543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.789970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.823913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.860655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.894214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.931640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.993978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008754035626118:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.994057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.994300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008754035626123:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.998633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:18.010391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008754035626125:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:18.092043Z node 1 :TX_PROXY ERROR: Actor# [1:7488008758330593474:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:18.658373Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008736855754661:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:18.658446Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:19.358693Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=3; 2025-03-31T16:18:19.369199Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-31T16:18:19.369364Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-31T16:18:19.370771Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488008762625561108:2497], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [1:7488008762625561066:2497]Got CONSTRAINT VIOLATION for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[1:7488008762625561108:2497].{
: Error: Duplicate keys have been found., code: 2012 } 2025-03-31T16:18:19.371427Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488008762625561101:2497], SessionActorId: [1:7488008762625561066:2497], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7488008762625561066:2497]. isRollback=0 2025-03-31T16:18:19.371651Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWFhNzUwZjQtMmI5Y2RkYTYtM2E5NmZjZjEtYzRmNDc0YTM=, ActorId: [1:7488008762625561066:2497], ActorState: ExecuteState, TraceId: 01jqpfy9cg9r4h8s0qpr45a6jm, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7488008762625561102:2497] from: [1:7488008762625561101:2497] 2025-03-31T16:18:19.372055Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7488008762625561102:2497] TxId: 281474976710671. Ctx: { TraceId: 01jqpfy9cg9r4h8s0qpr45a6jm, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWFhNzUwZjQtMmI5Y2RkYTYtM2E5NmZjZjEtYzRmNDc0YTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-31T16:18:19.372952Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWFhNzUwZjQtMmI5Y2RkYTYtM2E5NmZjZjEtYzRmNDc0YTM=, ActorId: [1:7488008762625561066:2497], ActorState: ExecuteState, TraceId: 01jqpfy9cg9r4h8s0qpr45a6jm, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 1207, MsgBus: 2883 2025-03-31T16:18:20.567472Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008765397416969:2122];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:20.568358Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001199/r3tmp/tmpaKPlnM/pdisk_1.dat 2025-03-31T16:18:20.755759Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:20.755847Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:20.756193Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:20.770083Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1207, node 2 2025-03-31T16:18:21.007619Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:21.007641Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:21.007650Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:21.007758Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2883 TClient is connected to server localhost:2883 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:21.756423Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.770353Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:18:21.789922Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:18:21.912503Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:18:22.186002Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:22.275332Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:24.447063Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008782577287860:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.447183Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.509221Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.553051Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.593643Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.626223Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.658117Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.736604Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.817761Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008782577288379:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.817854Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.818133Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008782577288384:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.821047Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:24.831183Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008782577288386:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:24.891940Z node 2 :TX_PROXY ERROR: Actor# [2:7488008782577288439:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:25.559814Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008765397416969:2122];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:25.559908Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:26.261669Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488008791167223360:2501], TxId: 281474976710672, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=M2EyZWUzODktOTcxYzBhMzUtNzZlZTVhOTAtM2I5ZDRhODM=. TraceId : 01jqpfyg082c59kj9j0kxff27d. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-03-31T16:18:26.262222Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488008791167223361:2502], TxId: 281474976710672, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=M2EyZWUzODktOTcxYzBhMzUtNzZlZTVhOTAtM2I5ZDRhODM=. TraceId : 01jqpfyg082c59kj9j0kxff27d. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7488008791167223357:2489], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-31T16:18:26.262665Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=M2EyZWUzODktOTcxYzBhMzUtNzZlZTVhOTAtM2I5ZDRhODM=, ActorId: [2:7488008786872255995:2489], ActorState: ExecuteState, TraceId: 01jqpfyg082c59kj9j0kxff27d, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ImmediateUpdateSelect [GOOD] Test command err: Trying to start YDB, gRPC: 1300, MsgBus: 13634 2025-03-31T16:18:13.715104Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008736136896434:2201];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:13.727611Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00118d/r3tmp/tmpHxLbdl/pdisk_1.dat 2025-03-31T16:18:14.280129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:14.280244Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:14.283471Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:14.285887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1300, node 1 2025-03-31T16:18:14.427565Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:14.427588Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:14.427593Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:14.427673Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13634 TClient is connected to server localhost:13634 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:15.350348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.378983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.560716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.773918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.867817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:17.505849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008753316767245:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.505979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.822400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.849070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.879386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.912269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.944721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.994716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:18.059510Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008757611735055:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.059615Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.059818Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008757611735060:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.062670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:18.072119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008757611735062:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:18.169788Z node 1 :TX_PROXY ERROR: Actor# [1:7488008757611735117:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:18.723888Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008736136896434:2201];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:18.723964Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:19.181167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 23003, MsgBus: 18404 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00118d/r3tmp/tmpMjoW0H/pdisk_1.dat 2025-03-31T16:18:20.771273Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:18:20.798020Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:20.803496Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:20.803587Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:20.827531Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23003, node 2 2025-03-31T16:18:20.947637Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:20.947660Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:20.947668Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:20.948015Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18404 TClient is connected to server localhost:18404 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:21.520787Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.537026Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:18:21.549660Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.607493Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.827006Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.924271Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:24.343650Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008782634711231:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.343763Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.383277Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.459347Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.501411Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.547567Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.589288Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.682544Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.771682Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008782634711754:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.771793Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.772024Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008782634711759:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.777980Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:24.789583Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008782634711761:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:18:24.859191Z node 2 :TX_PROXY ERROR: Actor# [2:7488008782634711818:3455] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:25.801719Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupJoin-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 61320, MsgBus: 21162 2025-03-31T16:18:20.122054Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008766722682046:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:20.122096Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b0b/r3tmp/tmpvIN3Hy/pdisk_1.dat 2025-03-31T16:18:20.597028Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:20.615577Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:20.615675Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:20.619922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61320, node 1 2025-03-31T16:18:20.808467Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:20.808487Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:20.808506Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:20.808611Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21162 TClient is connected to server localhost:21162 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:21.621089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.649968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.837941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:22.010639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:22.097442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:23.789191Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008779607585705:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:23.789306Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.140788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.210725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.284904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.326596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.378005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.466124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.531277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008783902553525:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.531369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.531711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008783902553531:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.534978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:24.548519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008783902553533:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:24.611148Z node 1 :TX_PROXY ERROR: Actor# [1:7488008783902553585:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:25.125238Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008766722682046:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:25.125311Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:25.658492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:18:25.691398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:18:25.725385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 /Root/Join1_2 1 19 /Root/Join1_1 8 136 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowIf-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 31629, MsgBus: 10457 2025-03-31T16:18:13.650164Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008735895257701:2171];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:13.650227Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001193/r3tmp/tmplj8LlR/pdisk_1.dat 2025-03-31T16:18:14.222854Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:14.244677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:14.244785Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:14.249626Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31629, node 1 2025-03-31T16:18:14.426581Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:14.426607Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:14.426615Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:14.426729Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10457 TClient is connected to server localhost:10457 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:15.236734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.248348Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:18:15.256579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.449287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.695038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.785717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:17.308271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008753075128548:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.308418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.760086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.797694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.829036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.903300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.938484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:18.025770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:18.073846Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008757370096359:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.073934Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.074130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008757370096364:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.078054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:18.088401Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008757370096366:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:18.165772Z node 1 :TX_PROXY ERROR: Actor# [1:7488008757370096420:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:18.650133Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008735895257701:2171];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:18.650217Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:19.193426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 24090, MsgBus: 17186 2025-03-31T16:18:20.761199Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008764720642411:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:20.761281Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001193/r3tmp/tmpg9r0zF/pdisk_1.dat 2025-03-31T16:18:20.967205Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:21.034759Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:21.034850Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:21.036135Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24090, node 2 2025-03-31T16:18:21.207711Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:21.207742Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:21.207757Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:21.207886Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17186 TClient is connected to server localhost:17186 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:21.921744Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.933791Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:18:21.949309Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:22.064844Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:22.264001Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:22.347701Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:24.420776Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008781900513348:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.420843Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.474955Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.503215Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.571880Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.605062Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.644351Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.692817Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.754136Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008781900513860:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.754231Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.754473Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008781900513865:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.758417Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:24.778959Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008781900513867:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:18:24.870500Z node 2 :TX_PROXY ERROR: Actor# [2:7488008781900513925:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:25.763135Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008764720642411:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:25.763216Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:25.976411Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpdateAfterUpsert [GOOD] Test command err: Trying to start YDB, gRPC: 18761, MsgBus: 23745 2025-03-31T16:18:13.650771Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008735376833215:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:13.650849Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001156/r3tmp/tmpfcEOh2/pdisk_1.dat 2025-03-31T16:18:14.183724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:14.185170Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:14.188463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:18:14.233274Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18761, node 1 2025-03-31T16:18:14.427919Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:14.427945Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:14.427951Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:14.428045Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23745 TClient is connected to server localhost:23745 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:15.293377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.319713Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:18:15.335759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.493104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.747527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.862016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:17.404453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008752556704163:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.404614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.759947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.830398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.860344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.893536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.977119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:18.021280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:18.070921Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008756851671978:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.071009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.071046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008756851671983:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.075618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:18.085362Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008756851671985:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:18.158188Z node 1 :TX_PROXY ERROR: Actor# [1:7488008756851672037:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:18.651266Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008735376833215:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:18.651341Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:19.183392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 7707, MsgBus: 26000 2025-03-31T16:18:20.769784Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008765725431419:2083];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:20.795434Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001156/r3tmp/tmpkfvxYL/pdisk_1.dat 2025-03-31T16:18:20.974648Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:21.013553Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:21.013639Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:21.019893Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7707, node 2 2025-03-31T16:18:21.183616Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:21.183638Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:21.183645Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:21.183754Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26000 TClient is connected to server localhost:26000 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:21.824970Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.849683Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.947118Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:22.196056Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:22.285979Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:24.520546Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008782905302338:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.520635Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.570527Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.653224Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.692631Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.732654Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.783200Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.836839Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.918420Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008782905302861:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.918515Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.918768Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008782905302866:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.921916Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:24.933270Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008782905302868:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:18:25.032756Z node 2 :TX_PROXY ERROR: Actor# [2:7488008787200270220:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:25.770061Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008765725431419:2083];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:25.770132Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:26.000387Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::AAARangeFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 4348, MsgBus: 64860 2025-03-31T16:18:20.910992Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008765273903584:2235];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:20.911308Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000afa/r3tmp/tmplIwAUh/pdisk_1.dat 2025-03-31T16:18:21.433209Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:21.433305Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:21.439510Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:21.443863Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4348, node 1 2025-03-31T16:18:21.663516Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:21.663544Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:21.663551Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:21.663680Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64860 TClient is connected to server localhost:64860 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:22.633738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:22.669139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:22.869945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:18:23.057816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:18:23.145989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:24.840199Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008782453774357:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.840317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:25.132195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:25.168258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:25.200978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:25.233118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:25.264849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:25.336489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:25.397650Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008786748742170:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:25.397751Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:25.398045Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008786748742175:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:25.401691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:25.412774Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008786748742177:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:25.476090Z node 1 :TX_PROXY ERROR: Actor# [1:7488008786748742229:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:25.909886Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008765273903584:2235];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:25.909974Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; PONOS {"Plan":{"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Test"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Group (-∞, +∞)","Name (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/Test","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"Test","ReadColumns":["Amount","Comment","Group","Name"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"1"},{"Inputs":[{"ExternalPlanNodeId":1}],"E-Rows":"No estimate","Predicate":"item.Amount \u003C 5000","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Limit-Filter","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":1,"Max":1,"Min":1},"LastMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"FirstMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"Bytes":{"Count":1,"Sum":19,"Max":19,"Min":19,"History":[3,19]}},"Name":"4","Push":{"WaitTimeUs":{"Count":1,"Sum":2143,"Max":2143,"Min":2143,"History":[3,2143]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"Rows":{"Count":1,"Sum":1,"Max":1,"Min":1},"LastMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"FirstMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2}}}],"DurationUs":{"Count":1,"Sum":1000,"Max":1000,"Min":1000},"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[3,1048576]},"Tasks":1,"OutputRows":{"Count":1,"Sum":1,"Max":1,"Min":1},"FinishedTasks":1,"IngressRows":{"Count":1,"Sum":3,"Max":3,"Min":3},"PhysicalStageId":0,"StageDurationUs":1000,"Table":[{"Path":"\/Root\/Test","ReadRows":{"Count":1,"Sum":1,"Max":1,"Min":1},"ReadBytes":{"Count":1,"Sum":20,"Max":20,"Min":20}}],"BaseTimeMs":1743437906775,"OutputBytes":{"Count":1,"Sum":19,"Max":19,"Min":19},"CpuTimeUs":{"Count":1,"Sum":1955,"Max":1955,"Min":1955,"History":[3,1955]},"Ingress":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":3,"Max":3,"Min":3},"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Bytes":{"Count":1,"Sum":192,"Max":192,"Min":192,"History":[3,192]}},"External":{},"Name":"KqpReadRangesSource","Ingress":{},"Push":{"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":3,"Max":3,"Min":3},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Bytes":{"Count":1,"Sum":192,"Max":192,"Min":192,"History":[3,192]},"WaitTimeUs":{"Count":1,"Sum":1345,"Max":1345,"Min":1345,"History":[3,1345]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}]}}],"Node Type":"Merge","SortColumns":["Group (Asc)"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":3}],"Name":"Limit","Limit":"1"}],"Node Type":"Limit","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":1,"Max":1,"Min":1},"LastMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"FirstMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"Bytes":{"Count":1,"Sum":19,"Max":19,"Min":19,"History":[3,19]}},"Name":"RESULT","Push":{"WaitTimeUs":{"Count":1,"Sum":1975,"Max":1975,"Min":1975,"History":[3,1975]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"Rows":{"Count":1,"Sum":1,"Max":1,"Min":1},"LastMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"FirstMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2}}}],"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[3,1048576]},"DurationUs":{"Count":1,"Sum":1000,"Max":1000,"Min":1000},"InputBytes":{"Count":1,"Sum":19,"Max":19,"Min":19},"ResultRows":{"Count":1,"Sum":1,"Max":1,"Min":1},"Tas ... t"}],"Operators":[{"A-Rows":1,"A-SelfCpu":0.808,"A-Cpu":2.763,"A-Size":19,"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query"}} query_phases { duration_us: 6539 table_access { name: "/Root/Test" reads { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 6316 affected_shards: 1 } compilation { duration_us: 205328 cpu_time_us: 201583 } process_cpu_time_us: 297 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"Test\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"No estimate\",\"ReadRanges\":[\"Group (-\342\210\236, +\342\210\236)\",\"Name (-\342\210\236, +\342\210\236)\"],\"Name\":\"TableFullScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/Test\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"No estimate\",\"Table\":\"Test\",\"ReadColumns\":[\"Amount\",\"Comment\",\"Group\",\"Name\"],\"E-Cost\":\"No estimate\"}],\"Node Type\":\"TableFullScan\"}],\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"Limit\",\"Limit\":\"1\"},{\"Inputs\":[{\"ExternalPlanNodeId\":1}],\"E-Rows\":\"No estimate\",\"Predicate\":\"item.Amount \\u003C 5000\",\"Name\":\"Filter\",\"E-Size\":\"No estimate\",\"E-Cost\":\"No estimate\"}],\"Node Type\":\"Limit-Filter\",\"Stats\":{\"UseLlvm\":\"undefined\",\"Output\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"LastMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"FirstMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Bytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19,\"History\":[3,19]}},\"Name\":\"4\",\"Push\":{\"WaitTimeUs\":{\"Count\":1,\"Sum\":2143,\"Max\":2143,\"Min\":2143,\"History\":[3,2143]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"LastMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"FirstMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2}}}],\"DurationUs\":{\"Count\":1,\"Sum\":1000,\"Max\":1000,\"Min\":1000},\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576,\"History\":[3,1048576]},\"Tasks\":1,\"OutputRows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FinishedTasks\":1,\"IngressRows\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"PhysicalStageId\":0,\"StageDurationUs\":1000,\"Table\":[{\"Path\":\"\\/Root\\/Test\",\"ReadRows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ReadBytes\":{\"Count\":1,\"Sum\":20,\"Max\":20,\"Min\":20}}],\"BaseTimeMs\":1743437906775,\"OutputBytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19},\"CpuTimeUs\":{\"Count\":1,\"Sum\":1955,\"Max\":1955,\"Min\":1955,\"History\":[3,1955]},\"Ingress\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"LastMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FirstMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Bytes\":{\"Count\":1,\"Sum\":192,\"Max\":192,\"Min\":192,\"History\":[3,192]}},\"External\":{},\"Name\":\"KqpReadRangesSource\",\"Ingress\":{},\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FirstMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Bytes\":{\"Count\":1,\"Sum\":192,\"Max\":192,\"Min\":192,\"History\":[3,192]},\"WaitTimeUs\":{\"Count\":1,\"Sum\":1345,\"Max\":1345,\"Min\":1345,\"History\":[3,1345]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1}}}]}}],\"Node Type\":\"Merge\",\"SortColumns\":[\"Group (Asc)\"],\"PlanNodeType\":\"Connection\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":3}],\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\",\"Stats\":{\"UseLlvm\":\"undefined\",\"Output\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"LastMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"FirstMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Bytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19,\"History\":[3,19]}},\"Name\":\"RESULT\",\"Push\":{\"WaitTimeUs\":{\"Count\":1,\"Sum\":1975,\"Max\":1975,\"Min\":1975,\"History\":[3,1975]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"LastMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"FirstMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2}}}],\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576,\"History\":[3,1048576]},\"DurationUs\":{\"Count\":1,\"Sum\":1000,\"Max\":1000,\"Min\":1000},\"InputBytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19},\"ResultRows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Tasks\":1,\"ResultBytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19},\"OutputRows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FinishedTasks\":1,\"InputRows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"PhysicalStageId\":1,\"StageDurationUs\":1000,\"BaseTimeMs\":1743437906775,\"OutputBytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19},\"CpuTimeUs\":{\"Count\":1,\"Sum\":808,\"Max\":808,\"Min\":808,\"History\":[3,808]},\"Input\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"LastMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"FirstMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Bytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19,\"History\":[3,19]}},\"Name\":\"2\",\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"FirstMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Bytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19,\"History\":[3,19]},\"WaitTimeUs\":{\"Count\":1,\"Sum\":1856,\"Max\":1856,\"Min\":1856,\"History\":[3,1856]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1}}}]}}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"Compilation\":{\"FromCache\":false,\"DurationUs\":205328,\"CpuTimeUs\":201583},\"ProcessCpuTimeUs\":297,\"TotalDurationUs\":222036,\"ResourcePoolId\":\"default\",\"QueuedTimeUs\":3375},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":6,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"No estimate\",\"ReadRanges\":[\"Group (-\342\210\236, +\342\210\236)\",\"Name (-\342\210\236, +\342\210\236)\"],\"Name\":\"TableFullScan\",\"Path\":\"\\/Root\\/Test\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"No estimate\",\"Table\":\"Test\",\"ReadColumns\":[\"Amount\",\"Comment\",\"Group\",\"Name\"],\"E-Cost\":\"No estimate\"}],\"Node Type\":\"TableFullScan\"}],\"Operators\":[{\"E-Rows\":\"No estimate\",\"Predicate\":\"item.Amount \\u003C 5000\",\"Name\":\"Filter\",\"E-Size\":\"No estimate\",\"E-Cost\":\"No estimate\"}],\"Node Type\":\"Filter\"}],\"Operators\":[{\"A-Rows\":1,\"A-SelfCpu\":1.955,\"A-Cpu\":1.955,\"A-Size\":19,\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Operators\":[{\"A-Rows\":1,\"A-SelfCpu\":0.808,\"A-Cpu\":2.763,\"A-Size\":19,\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"PlanNodeType\":\"Query\"}}" query_ast: "(\n(let $1 (KqpTable \'\"/Root/Test\" \'\"72057594046644480:9\" \'\"\" \'1))\n(let $2 \'(\'\"Amount\" \'\"Comment\" \'\"Group\" \'\"Name\"))\n(let $3 (KqpRowsSourceSettings $1 $2 \'(\'(\'\"Sorted\")) (Void) \'()))\n(let $4 (Uint64 \'1))\n(let $5 (OptionalType (DataType \'String)))\n(let $6 (StructType \'(\'\"Amount\" (OptionalType (DataType \'Uint64))) \'(\'\"Comment\" $5) \'(\'\"Group\" (OptionalType (DataType \'Uint32))) \'(\'\"Name\" $5)))\n(let $7 \'(\'(\'\"_logical_id\" \'559) \'(\'\"_id\" \'\"dc25d1b7-230edb5-378e0b04-9d4ffb8c\") \'(\'\"_wide_channels\" $6)))\n(let $8 (DqPhyStage \'((DqSource (DataSource \'\"KqpReadRangesSource\") $3)) (lambda \'($12) (block \'(\n (let $13 (lambda \'($16) (block \'(\n (let $17 (Member $16 \'\"Amount\"))\n (return $17 (Member $16 \'\"Comment\") (Member $16 \'\"Group\") (Member $16 \'\"Name\") (Coalesce (< $17 (Uint64 \'\"5000\")) (Bool \'false)))\n ))))\n (let $14 (WideFilter (ExpandMap (ToFlow $12) $13) (lambda \'($18 $19 $20 $21 $22) $22) $4))\n (let $15 (lambda \'($23 $24 $25 $26 $27) $23 $24 $25 $26))\n (return (FromFlow (WideMap $14 $15)))\n))) $7))\n(let $9 (DqCnMerge (TDqOutput $8 \'0) \'(\'(\'\"2\" \'\"Asc\"))))\n(let $10 (DqPhyStage \'($9) (lambda \'($28) (FromFlow (NarrowMap (Take (ToFlow $28) $4) (lambda \'($29 $30 $31 $32) (AsStruct \'(\'\"Amount\" $29) \'(\'\"Comment\" $30) \'(\'\"Group\" $31) \'(\'\"Name\" $32)))))) \'(\'(\'\"_logical_id\" \'572) \'(\'\"_id\" \'\"162a1b31-e5f8af7-71a38327-68804d30\"))))\n(let $11 (DqCnResult (TDqOutput $10 \'0) \'()))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($8 $10) \'($11) \'() \'(\'(\'\"type\" \'\"data\")))) \'((KqpTxResultBinding (ListType $6) \'0 \'0)) \'(\'(\'\"type\" \'\"data_query\"))))\n)\n" total_duration_us: 222036 total_cpu_time_us: 208196 query_meta: "{\"query_database\":\"/Root\",\"query_parameter_types\":{},\"table_metadata\":[\"{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/Test\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":9},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Amount\\\",\\\"Id\\\":3,\\\"Type\\\":\\\"Uint64\\\",\\\"TypeId\\\":4,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Comment\\\",\\\"Id\\\":4,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Group\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"Uint32\\\",\\\"TypeId\\\":2,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Name\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"Group\\\",\\\"Name\\\"],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}\"],\"table_meta_serialization_type\":2,\"created_at\":\"1743437906\",\"query_type\":\"QUERY_TYPE_SQL_DML\",\"query_syntax\":\"1\",\"query_cluster\":\"db\",\"query_id\":\"7742f4ec-19a2ef5c-8d78f51-d907c27\",\"version\":\"1.0\"}" ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowPgNotNull-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 14326, MsgBus: 19149 2025-03-31T16:18:13.654483Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008734828122085:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:13.654719Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00117b/r3tmp/tmpLNPHYV/pdisk_1.dat 2025-03-31T16:18:14.264256Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:14.285556Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:14.285691Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:14.289504Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14326, node 1 2025-03-31T16:18:14.439885Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:14.439901Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:14.439906Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:14.439978Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19149 TClient is connected to server localhost:19149 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:15.206946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.240251Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:18:15.247527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.430150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.618111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.714095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:17.455660Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008752007993039:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.455792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.759970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.795808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.833107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.863773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.891527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.970659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:18.058943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008756302960858:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.059028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.059950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008756302960863:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.063890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:18.074729Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008756302960865:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:18.130139Z node 1 :TX_PROXY ERROR: Actor# [1:7488008756302960917:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:18.655046Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008734828122085:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:18.655178Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:19.186445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 61485, MsgBus: 22392 2025-03-31T16:18:20.768534Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008766487899559:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:20.768577Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00117b/r3tmp/tmpNeApJj/pdisk_1.dat 2025-03-31T16:18:20.968301Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:20.974916Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:20.974993Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:20.976673Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61485, node 2 2025-03-31T16:18:21.070383Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:21.070404Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:21.070412Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:21.070511Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22392 TClient is connected to server localhost:22392 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:21.644971Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.652001Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:18:21.666187Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.755698Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:22.012048Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:18:22.147699Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.425711Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008783667770514:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.425881Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.468431Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.507513Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.543856Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.584850Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.666753Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.740216Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.808816Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008783667771032:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.808903Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.809007Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008783667771037:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.812774Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:24.828324Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008783667771039:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:18:24.912668Z node 2 :TX_PROXY ERROR: Actor# [2:7488008783667771095:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:25.771217Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008766487899559:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:25.771275Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:25.981342Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::Interactive [GOOD] Test command err: Trying to start YDB, gRPC: 7877, MsgBus: 14041 2025-03-31T16:18:13.700110Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008734552801824:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:13.700153Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001168/r3tmp/tmpaqCAB3/pdisk_1.dat 2025-03-31T16:18:14.299581Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:14.299674Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:14.304177Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:18:14.331515Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7877, node 1 2025-03-31T16:18:14.427481Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:14.427505Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:14.427510Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:14.427612Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14041 TClient is connected to server localhost:14041 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:15.226649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.290188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.533928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.821313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.909061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:17.587703Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008751732672788:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.587834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.886191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.916596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.952189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.989754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:18.020260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:18.085424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:18.129849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008756027640599:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.129916Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.129968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008756027640604:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.134623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:18.146057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008756027640606:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:18.219274Z node 1 :TX_PROXY ERROR: Actor# [1:7488008756027640659:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:18.701196Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008734552801824:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:18.701271Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:19.179107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 24120, MsgBus: 24732 2025-03-31T16:18:20.844592Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008765448209002:2214];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001168/r3tmp/tmp8ODTmR/pdisk_1.dat 2025-03-31T16:18:20.917473Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:18:21.041555Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:21.041636Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:21.044182Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:21.056532Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24120, node 2 2025-03-31T16:18:21.151676Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:21.151696Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:21.151705Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:21.151819Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24732 TClient is connected to server localhost:24732 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:21.691012Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.727348Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:18:21.738761Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.879551Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:22.033108Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:22.114889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:24.529546Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008782628079762:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.529634Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.575482Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.623890Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.669039Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.716519Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.783235Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.831458Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.880598Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008782628080274:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.880708Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.880970Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008782628080279:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.883839Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:24.896160Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008782628080281:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:18:24.954121Z node 2 :TX_PROXY ERROR: Actor# [2:7488008782628080333:3436] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:25.837041Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:18:25.844310Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008765448209002:2214];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:25.844556Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ManyFlushes [GOOD] Test command err: Trying to start YDB, gRPC: 12924, MsgBus: 29136 2025-03-31T16:18:13.665134Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008733447418547:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:13.665205Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00119d/r3tmp/tmpdQ3qq4/pdisk_1.dat 2025-03-31T16:18:14.261232Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:14.273527Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:14.273661Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:14.279390Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12924, node 1 2025-03-31T16:18:14.434328Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:14.434350Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:14.434357Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:14.434459Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29136 TClient is connected to server localhost:29136 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:15.322369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.361253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.561218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.808238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.919562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:17.570034Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008750627289357:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.570147Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.859903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.890010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.916870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.959322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:18.014914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:18.084020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:18.123742Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008754922257169:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.123821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008754922257174:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.123820Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.127434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:18.137815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008754922257176:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:18.227548Z node 1 :TX_PROXY ERROR: Actor# [1:7488008754922257229:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:18.658749Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008733447418547:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:18.658826Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:19.184567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 21151, MsgBus: 26282 2025-03-31T16:18:20.621145Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008763424700668:2125];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00119d/r3tmp/tmpx1rSNn/pdisk_1.dat 2025-03-31T16:18:20.689002Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:18:20.813041Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:20.826245Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:20.826484Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:20.829290Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21151, node 2 2025-03-31T16:18:21.013908Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:21.013927Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:21.013936Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:21.014037Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26282 TClient is connected to server localhost:26282 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:21.684532Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.710165Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.821940Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:22.030836Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:22.106186Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:24.631535Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008780604571519:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.631632Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.671583Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.708546Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.747167Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.819976Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.867557Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.906318Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.969624Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008780604572037:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.969689Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008780604572042:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.969732Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:25.005774Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:25.018136Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008780604572044:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:25.109001Z node 2 :TX_PROXY ERROR: Actor# [2:7488008784899539397:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:25.625797Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008763424700668:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:25.625850Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:26.027325Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> BackupRestore::ImportDataShouldHandleErrors [GOOD] >> BackupRestore::RestoreKesusResources >> BackupRestoreS3::RestoreTableSplitBoundaries [GOOD] >> BackupRestoreS3::RestoreIndexTableSplitBoundaries ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyRW1RWR2 [GOOD] Test command err: Trying to start YDB, gRPC: 4353, MsgBus: 19554 2025-03-31T16:18:14.750988Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008739878162045:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:14.751400Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001113/r3tmp/tmphD1uzv/pdisk_1.dat 2025-03-31T16:18:15.343805Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:15.347917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:15.348029Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:15.353877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4353, node 1 2025-03-31T16:18:15.462484Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:15.462513Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:15.462525Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:15.462664Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19554 TClient is connected to server localhost:19554 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:16.114426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:16.138621Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:18:16.147627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:16.296887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:16.457012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:16.534334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:18.266028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008757058032882:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.266149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.613396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:18.643877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:18.675645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:18.711561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:18.746983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:18.823946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:18.897689Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008757058033399:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.897788Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.897985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008757058033404:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.901490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:18.913727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008757058033406:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:18.994110Z node 1 :TX_PROXY ERROR: Actor# [1:7488008757058033461:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:19.741368Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008739878162045:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:19.741456Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:19.990132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 14141, MsgBus: 15406 2025-03-31T16:18:21.512643Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008768714335823:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:21.512680Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001113/r3tmp/tmpabdsoT/pdisk_1.dat 2025-03-31T16:18:21.785482Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:21.803237Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:21.803331Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:21.804418Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14141, node 2 2025-03-31T16:18:22.040282Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:22.040302Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:22.040310Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:22.040427Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15406 TClient is connected to server localhost:15406 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:22.675676Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:22.684543Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:18:22.707675Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:22.791946Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:22.950650Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:23.038803Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:25.144657Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008785894206771:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:25.144749Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:25.187625Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:25.220940Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:25.256430Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:25.285947Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:25.323003Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:25.395269Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:25.496779Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008785894207293:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:25.496882Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:25.497262Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008785894207298:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:25.501884Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:25.516557Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008785894207300:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:18:25.597564Z node 2 :TX_PROXY ERROR: Actor# [2:7488008785894207357:3452] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:26.513049Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008768714335823:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:26.513117Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:26.534781Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:18:27.138430Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because it cannot acquire locks;tx_id=6; 2025-03-31T16:18:27.138635Z node 2 :TX_DATASHARD ERROR: Prepare transaction failed. txid 6 at tablet 72075186224037919 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because it cannot acquire locks" issue_code: 2001 severity: 1 } 2025-03-31T16:18:27.138824Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 6 at tablet 72075186224037919 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because it cannot acquire locks" issue_code: 2001 severity: 1 } 2025-03-31T16:18:27.139039Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488008794484142585:2498], Table: `/Root/TestImmediateEffects` ([72057594046644480:16:1]), SessionActorId: [2:7488008790189174945:2498]Got LOCKS BROKEN for table `/Root/TestImmediateEffects`. ShardID=72075186224037919, Sink=[2:7488008794484142585:2498].{
: Error: Operation is aborting because it cannot acquire locks, code: 2001 } 2025-03-31T16:18:27.139654Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488008794484142531:2498], SessionActorId: [2:7488008790189174945:2498], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001
: Error: Operation is aborting because it cannot acquire locks, code: 2001 . sessionActorId=[2:7488008790189174945:2498]. isRollback=0 2025-03-31T16:18:27.140002Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTA5MTYxYzEtOGI3YjkwMWUtNWRlODVkYzctZjljZTAzYjI=, ActorId: [2:7488008790189174945:2498], ActorState: ExecuteState, TraceId: 01jqpfyh39d0nhxkgsxf11hrqk, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7488008794484142579:2498] from: [2:7488008794484142531:2498] 2025-03-31T16:18:27.140106Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7488008794484142579:2498] TxId: 281474976715676. Ctx: { TraceId: 01jqpfyh39d0nhxkgsxf11hrqk, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OTA5MTYxYzEtOGI3YjkwMWUtNWRlODVkYzctZjljZTAzYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001 subissue: {
: Error: Operation is aborting because it cannot acquire locks, code: 2001 } } 2025-03-31T16:18:27.141212Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTA5MTYxYzEtOGI3YjkwMWUtNWRlODVkYzctZjljZTAzYjI=, ActorId: [2:7488008790189174945:2498], ActorState: ExecuteState, TraceId: 01jqpfyh39d0nhxkgsxf11hrqk, Create QueryResponse for error on request, msg: >> KqpCost::OlapRange [GOOD] >> Cdc::DocApi[TopicRunner] [GOOD] >> Cdc::HugeKey[PqRunner] >> KqpCost::ScanScriptingRangeFullScan+SourceRead [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientDoesNotProvideClientCerts [GOOD] >> Cdc::UpdatesLog[TopicRunner] [GOOD] >> Cdc::VirtualTimestamps[PqRunner] >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-false >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-false >> TExtSubDomainTest::DeclareAndLs >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-true >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-false >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-false >> TExtSubDomainTest::GenericCases >> Cdc::NewAndOldImagesLog[YdsRunner] [GOOD] >> Cdc::NewAndOldImagesLog[TopicRunner] >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapRange [GOOD] Test command err: Trying to start YDB, gRPC: 26411, MsgBus: 16653 2025-03-31T16:18:21.986333Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008768385461294:2143];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:21.989624Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ae9/r3tmp/tmpqe5kI3/pdisk_1.dat 2025-03-31T16:18:22.485191Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:22.485279Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:22.488520Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:18:22.511214Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26411, node 1 2025-03-31T16:18:22.679347Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:22.679368Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:22.679375Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:22.679496Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16653 TClient is connected to server localhost:16653 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:23.391069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:23.406960Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:18:23.413093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:23.542899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:23.733219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:23.818208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:25.556327Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008785565332165:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:25.556443Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:25.925119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:25.968276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:26.039289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:26.070411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:26.106741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:26.154064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:26.222587Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008789860299978:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:26.222694Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:26.223255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008789860299983:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:26.226740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:26.240502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008789860299985:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:26.332151Z node 1 :TX_PROXY ERROR: Actor# [1:7488008789860300041:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:26.984599Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008768385461294:2143];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:26.984692Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:27.416506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:18:27.592952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[1:7488008794155267775:2505];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:18:27.592952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7488008794155267752:2501];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:18:27.593201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7488008794155267752:2501];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:18:27.593498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7488008794155267752:2501];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:18:27.593629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7488008794155267752:2501];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:18:27.593732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7488008794155267752:2501];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:18:27.593753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[1:7488008794155267775:2505];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:18:27.593845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7488008794155267752:2501];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:18:27.593940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[1:7488008794155267775:2505];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:18:27.593951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7488008794155267752:2501];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:18:27.594055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7488008794155267752:2501];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:18:27.594065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[1:7488008794155267775:2505];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:18:27.594152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7488008794155267752:2501];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:18:27.594246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[1:7488008794155267775:2505];tablet_id=72075186224037927;process=TTxInitSchema::Ex ... iption=CLASS_NAME=Chunks;id=2; 2025-03-31T16:18:27.777802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:18:27.777944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:18:27.777975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:18:27.778052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:18:27.778076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:18:27.778127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:18:27.778158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:18:27.778198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:18:27.778229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:18:27.778771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:18:27.778818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:18:27.778974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:18:27.779008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:18:27.779141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:18:27.779172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:18:27.779347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:18:27.779384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:18:27.779483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:18:27.779512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:18:27.780325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:18:27.780372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:18:27.780471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:18:27.780499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:18:27.780637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:18:27.780660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:18:27.780728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:18:27.780767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:18:27.780833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:18:27.780856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:18:27.780890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:18:27.780914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:18:27.781390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:18:27.781441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:18:27.781612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:18:27.781648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:18:27.781760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:18:27.781790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:18:27.781944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:18:27.781973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:18:27.782064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:18:27.782091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:18:27.818535Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037920;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T16:18:27.818920Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T16:18:27.824892Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T16:18:27.825523Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T16:18:27.831124Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T16:18:27.831395Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T16:18:27.837544Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T16:18:27.837554Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T16:18:27.843564Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T16:18:27.843886Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T16:18:27.974741Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2025-03-31T16:18:27.974753Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2025-03-31T16:18:27.975391Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; >> KqpImmediateEffects::DeleteOnAfterInsertWithIndex [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeView [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTransfer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanScriptingRangeFullScan+SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 63082, MsgBus: 5111 2025-03-31T16:18:23.811941Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008777749587780:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:23.811976Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ae8/r3tmp/tmpx9woii/pdisk_1.dat 2025-03-31T16:18:24.306789Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:24.324952Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:24.325023Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:24.327710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63082, node 1 2025-03-31T16:18:24.429320Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:24.429349Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:24.429357Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:24.429479Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5111 TClient is connected to server localhost:5111 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:25.026536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:25.042342Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:18:25.064241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:25.209526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:25.382818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:25.467713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:27.335367Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008794929458740:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:27.335539Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:27.703848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:27.733639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:27.768982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:27.798898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:27.830605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:27.884870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:27.924881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008794929459253:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:27.924949Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:27.924953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008794929459258:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:27.927886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:27.937616Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008794929459260:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:28.016001Z node 1 :TX_PROXY ERROR: Actor# [1:7488008799224426610:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:28.812381Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008777749587780:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:28.812479Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:29.245932Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437909255, txId: 281474976710671] shutting down >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-false ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientDoesNotProvideClientCerts [GOOD] Test command err: 2025-03-31T16:17:34.363292Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008568953109266:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:34.363347Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d44/r3tmp/tmpiQl8rZ/pdisk_1.dat 2025-03-31T16:17:34.825966Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:34.828724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:34.828803Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:34.834482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14926, node 1 2025-03-31T16:17:35.011885Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:35.011920Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:35.011930Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:35.012064Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17509 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:35.337481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:35.422934Z node 1 :TICKET_PARSER DEBUG: Ticket E948BC6C305F4B1DC97E4457C436F9E43C0C07702032CFB506AFD58121E61A72 (ipv6:[::1]:51148) has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2025-03-31T16:17:35.423577Z node 1 :TICKET_PARSER ERROR: Ticket E948BC6C305F4B1DC97E4457C436F9E43C0C07702032CFB506AFD58121E61A72: Cannot create token from certificate. Client certificate failed verification 2025-03-31T16:17:35.503563Z node 1 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:51174) has now valid token of root@builtin 2025-03-31T16:17:35.604035Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-03-31T16:17:35.604095Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:17:35.604105Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:17:35.604143Z node 1 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-03-31T16:17:39.260127Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008588266752445:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:39.260280Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d44/r3tmp/tmp2raF7X/pdisk_1.dat 2025-03-31T16:17:39.462080Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:39.482000Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:39.482078Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:39.486272Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64140, node 4 2025-03-31T16:17:39.559988Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:39.560014Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:39.560023Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:39.560161Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6170 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:39.834978Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:39.951061Z node 4 :TICKET_PARSER DEBUG: Ticket E948BC6C305F4B1DC97E4457C436F9E43C0C07702032CFB506AFD58121E61A72 (ipv6:[::1]:52690) has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2025-03-31T16:17:39.951665Z node 4 :TICKET_PARSER ERROR: Ticket E948BC6C305F4B1DC97E4457C436F9E43C0C07702032CFB506AFD58121E61A72: Cannot create token from certificate. Client certificate failed verification 2025-03-31T16:17:40.096650Z node 4 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:52708) has now valid token of root@builtin 2025-03-31T16:17:40.203049Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-03-31T16:17:40.203100Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:17:40.203111Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:17:40.203144Z node 4 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-03-31T16:17:44.307470Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488008610533097288:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:44.307591Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d44/r3tmp/tmpzJskcQ/pdisk_1.dat 2025-03-31T16:17:44.475284Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:44.522130Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:44.522287Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:44.528691Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8606, node 7 2025-03-31T16:17:44.623302Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:44.623330Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:44.623344Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:44.623480Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30000 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:44.909358Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... E0331 16:17:44.960491273 390183 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1408F10B:SSL routines:ssl3_get_record:wrong version number. E0331 16:17:44.964916886 390182 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1408F10B:SSL routines:ssl3_get_record:wrong version number. E0331 16:17:44.973732060 390062 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1408F10B:SSL routines:ssl3_get_record:wrong version number. E0331 16:17:44.978186979 389923 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1408F10B:SSL routines:ssl3_get_record:wrong version number. E0331 16:17:44.992319714 390200 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1408F10B:SSL routines:ssl3_get_record:wrong version number. E0331 16:17:44.995162821 390061 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1408F10B:SSL routines:ssl3_get_record:wrong version number. E0331 16:17:45.002512469 389924 ssl_transport_security.cc:1431] Handshake failed ... secure subchannel for secure name 'localhost:8239'; Got args: {grpc.client_channel_factory=0x502000091cf0, grpc.default_authority=localhost:8239, grpc.default_compression_algorithm=0, grpc.http2.max_pings_without_data=0, grpc.internal.channel_credentials=0x506000096aa0, grpc.internal.event_engine=0x502000074b50, grpc.internal.subchannel_pool=0x5040000e9f10, grpc.keepalive_permit_without_calls=0, grpc.keepalive_time_ms=1250, grpc.keepalive_timeout_ms=10000, grpc.max_receive_message_length=64000000, grpc.max_send_message_length=64000000, grpc.primary_user_agent=grpc-c++/1.54.3, grpc.resource_quota=0x50400042bf90, grpc.server_uri=dns:///localhost:8239} E0331 16:18:12.162819878 401442 ssl_transport_security.cc:791] Invalid private key. E0331 16:18:12.162998378 401442 ssl_security_connector.cc:129] Handshaker factory creation failed with TSI_INVALID_ARGUMENT. E0331 16:18:12.163196441 401442 chttp2_connector.cc:269] Failed to create channel args during subchannel creation: INTERNAL: Failed to create secure subchannel for secure name 'localhost:8239'; Got args: {grpc.client_channel_factory=0x502000091cf0, grpc.default_authority=localhost:8239, grpc.default_compression_algorithm=0, grpc.http2.max_pings_without_data=0, grpc.internal.channel_credentials=0x50600002db00, grpc.internal.event_engine=0x502000548390, grpc.internal.subchannel_pool=0x5040000e9f10, grpc.keepalive_permit_without_calls=0, grpc.keepalive_time_ms=1250, grpc.keepalive_timeout_ms=10000, grpc.max_receive_message_length=64000000, grpc.max_send_message_length=64000000, grpc.primary_user_agent=grpc-c++/1.54.3, grpc.resource_quota=0x50400042bf90, grpc.server_uri=dns:///localhost:8239} E0331 16:18:12.164803755 401442 ssl_transport_security.cc:791] Invalid private key. E0331 16:18:12.164956098 401442 ssl_security_connector.cc:129] Handshaker factory creation failed with TSI_INVALID_ARGUMENT. E0331 16:18:12.165123377 401442 chttp2_connector.cc:269] Failed to create channel args during subchannel creation: INTERNAL: Failed to create secure subchannel for secure name 'localhost:8239'; Got args: {grpc.client_channel_factory=0x502000091cf0, grpc.default_authority=localhost:8239, grpc.default_compression_algorithm=0, grpc.http2.max_pings_without_data=0, grpc.internal.channel_credentials=0x50600002db00, grpc.internal.event_engine=0x5020000732d0, grpc.internal.subchannel_pool=0x5040000e9f10, grpc.keepalive_permit_without_calls=0, grpc.keepalive_time_ms=1250, grpc.keepalive_timeout_ms=10000, grpc.max_receive_message_length=64000000, grpc.max_send_message_length=64000000, grpc.primary_user_agent=grpc-c++/1.54.3, grpc.resource_quota=0x50400042bf90, grpc.server_uri=dns:///localhost:8239} E0331 16:18:12.170286307 401442 ssl_transport_security.cc:791] Invalid private key. E0331 16:18:12.170462527 401442 ssl_security_connector.cc:129] Handshaker factory creation failed with TSI_INVALID_ARGUMENT. E0331 16:18:12.170640808 401442 chttp2_connector.cc:269] Failed to create channel args during subchannel creation: INTERNAL: Failed to create secure subchannel for secure name 'localhost:8239'; Got args: {grpc.client_channel_factory=0x502000091cf0, grpc.default_authority=localhost:8239, grpc.default_compression_algorithm=0, grpc.http2.max_pings_without_data=0, grpc.internal.channel_credentials=0x50600002ce40, grpc.internal.event_engine=0x50200064e770, grpc.internal.subchannel_pool=0x5040000e9f10, grpc.keepalive_permit_without_calls=0, grpc.keepalive_time_ms=1250, grpc.keepalive_timeout_ms=10000, grpc.max_receive_message_length=64000000, grpc.max_send_message_length=64000000, grpc.primary_user_agent=grpc-c++/1.54.3, grpc.resource_quota=0x50400042bf90, grpc.server_uri=dns:///localhost:8239} E0331 16:18:12.172251796 401442 ssl_transport_security.cc:791] Invalid private key. E0331 16:18:12.172458708 401442 ssl_security_connector.cc:129] Handshaker factory creation failed with TSI_INVALID_ARGUMENT. E0331 16:18:12.172660879 401442 chttp2_connector.cc:269] Failed to create channel args during subchannel creation: INTERNAL: Failed to create secure subchannel for secure name 'localhost:8239'; Got args: {grpc.client_channel_factory=0x502000091cf0, grpc.default_authority=localhost:8239, grpc.default_compression_algorithm=0, grpc.http2.max_pings_without_data=0, grpc.internal.channel_credentials=0x50600002ce40, grpc.internal.event_engine=0x502000268810, grpc.internal.subchannel_pool=0x5040000e9f10, grpc.keepalive_permit_without_calls=0, grpc.keepalive_time_ms=1250, grpc.keepalive_timeout_ms=10000, grpc.max_receive_message_length=64000000, grpc.max_send_message_length=64000000, grpc.primary_user_agent=grpc-c++/1.54.3, grpc.resource_quota=0x50400042bf90, grpc.server_uri=dns:///localhost:8239} 2025-03-31T16:18:17.301579Z node 25 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[25:7488008753946188922:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:17.301717Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d44/r3tmp/tmpyfEghV/pdisk_1.dat 2025-03-31T16:18:17.456441Z node 25 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:17.498444Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:17.498555Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:17.502756Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19953, node 25 2025-03-31T16:18:17.581497Z node 25 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:17.581530Z node 25 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:17.581542Z node 25 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:17.581712Z node 25 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4248 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:17.965670Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:17.988780Z node 25 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:18:18.129021Z node 25 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:42508) has now valid token of root@builtin 2025-03-31T16:18:18.205933Z node 25 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-03-31T16:18:18.205972Z node 25 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:18:18.205990Z node 25 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:18:18.206038Z node 25 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-03-31T16:18:23.839518Z node 28 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[28:7488008777467533976:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:23.839961Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d44/r3tmp/tmpEQxlIw/pdisk_1.dat 2025-03-31T16:18:24.000547Z node 28 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:24.049080Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:24.049345Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:24.053403Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12369, node 28 2025-03-31T16:18:24.144831Z node 28 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:24.144860Z node 28 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:24.144873Z node 28 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:24.145053Z node 28 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13933 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:24.561992Z node 28 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:24.772688Z node 28 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:47948) has now valid token of root@builtin 2025-03-31T16:18:24.856191Z node 28 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-03-31T16:18:24.856227Z node 28 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:18:24.856249Z node 28 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-31T16:18:24.856301Z node 28 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeReplication [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExternalTable ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTransfer [GOOD] Test command err: 2025-03-31T16:18:03.520607Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008692983259453:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:03.520675Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001039/r3tmp/tmpsTIA82/pdisk_1.dat 2025-03-31T16:18:03.986218Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:03.990697Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:03.990776Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:04.003847Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8632, node 1 2025-03-31T16:18:04.233288Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:04.233306Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:04.233312Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:04.233414Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3589 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:04.672492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:06.645275Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008705868162400:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:06.645433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:07.162855Z node 1 :TX_PROXY DEBUG: actor# [1:7488008692983259712:2141] Handle TEvProposeTransaction 2025-03-31T16:18:07.162897Z node 1 :TX_PROXY DEBUG: actor# [1:7488008692983259712:2141] TxId# 281474976710658 ProcessProposeTransaction 2025-03-31T16:18:07.163000Z node 1 :TX_PROXY DEBUG: actor# [1:7488008692983259712:2141] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7488008710163129735:2635] 2025-03-31T16:18:07.238125Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008710163129735:2635] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Value" Type: "Utf8" NotNull: false } KeyColumnNames: "Key" PartitionConfig { } Temporary: false } } } UserToken: "" DatabaseName: "" 2025-03-31T16:18:07.238196Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008710163129735:2635] txid# 281474976710658 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:18:07.238605Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008710163129735:2635] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-31T16:18:07.238670Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008710163129735:2635] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:18:07.238914Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008710163129735:2635] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:18:07.239045Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008710163129735:2635] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:18:07.239116Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008710163129735:2635] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-31T16:18:07.239250Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008710163129735:2635] txid# 281474976710658 HANDLE EvClientConnected 2025-03-31T16:18:07.240835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:18:07.243728Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008710163129735:2635] txid# 281474976710658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710658} 2025-03-31T16:18:07.243782Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008710163129735:2635] txid# 281474976710658 SEND to# [1:7488008710163129734:2343] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 53} 2025-03-31T16:18:07.385526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008710163129881:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:07.385604Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:07.385616Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008710163129886:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:07.385967Z node 1 :TX_PROXY DEBUG: actor# [1:7488008692983259712:2141] Handle TEvProposeTransaction 2025-03-31T16:18:07.385995Z node 1 :TX_PROXY DEBUG: actor# [1:7488008692983259712:2141] TxId# 281474976710659 ProcessProposeTransaction 2025-03-31T16:18:07.386096Z node 1 :TX_PROXY DEBUG: actor# [1:7488008692983259712:2141] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7488008710163129889:2756] 2025-03-31T16:18:07.390059Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008710163129889:2756] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-03-31T16:18:07.390139Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008710163129889:2756] txid# 281474976710659 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:18:07.390164Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008710163129889:2756] txid# 281474976710659 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-03-31T16:18:07.391628Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008710163129889:2756] txid# 281474976710659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-31T16:18:07.391721Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008710163129889:2756] txid# 281474976710659 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:18:07.391963Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008710163129889:2756] txid# 281474976710659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:18:07.392094Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008710163129889:2756] HANDLE EvNavigateKeySetResult, txid# 281474976710659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:18:07.392141Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008710163129889:2756] txid# 281474976710659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710659 TabletId# 72057594046644480} 2025-03-31T16:18:07.392294Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008710163129889:2756] txid# 281474976710659 HANDLE EvClientConnected 2025-03-31T16:18:07.393737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:18:07.397956Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008710163129889:2756] txid# 281474976710659 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-03-31T16:18:07.398005Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008710163129889:2756] txid# 281474976710659 SEND to# [1:7488008710163129888:2354] Source {TEvProposeTransactionStatus txid# 281474976710659 Status# 53} 2025-03-31T16:18:07.417878Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008710163129888:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:18:07.477999Z node 1 :TX_PROXY DEBUG: actor# [1:7488008692983259712:2141] Handle TEvProposeTransaction 2025-03-31T16:18:07.478026Z node 1 :TX_PROXY DEBUG: actor# [1:7488008692983259712:2141] TxId# 281474976710660 ProcessProposeTransaction 2025-03-31T16:18:07.478069Z node 1 :TX_PROXY DEBUG: actor# [1:7488008692983259712:2141] Cookie# 0 userReqId# "" txid# 281474976710660 SEND to# [1:7488008710163129959:2806] 2025-03-31T16:18:07.480653Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008710163129959:2806] txid# 281474976710660 Bootstrap EvSchemeRequest record: Transaction { ... TEvExternalStorage::TEvHeadObjectResponse: self# [10:7488008799445516744:2200], result# No response body. REQUEST: HEAD /test_bucket/view/create_view.sql HTTP/1.1 HEADERS: Host: localhost:27697 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 13D1B314-3DA1-4CAF-8BE4-32F7EC45F8B4 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250331/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=a6452e036feb55edcc589d9aa841cca35052212b6ba49d509484bf9c64653463 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250331T161828Z S3_MOCK::HttpServeRead: /test_bucket/view/create_view.sql / 165 2025-03-31T16:18:28.757772Z node 10 :IMPORT DEBUG: HandleScheme TEvExternalStorage::TEvHeadObjectResponse: self# [10:7488008799445516744:2200], result# HeadObjectResult { ETag: 54623f53d68141118383b3390c4965d5 ContentLength: 165 } REQUEST: GET /test_bucket/view/create_view.sql HTTP/1.1 HEADERS: Host: localhost:27697 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E2319A8E-1520-44EC-9C0A-FBE7E1711AA8 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250331/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;range;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=655a80c2cffb354537aae8c3e90b9da9ab9bde17c9d45c74bd2b3841106421ec content-type: application/xml range: bytes=0-164 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250331T161828Z S3_MOCK::HttpServeRead: /test_bucket/view/create_view.sql / 165 2025-03-31T16:18:28.762048Z node 10 :IMPORT DEBUG: HandleScheme TEvExternalStorage::TEvGetObjectResponse: self# [10:7488008799445516744:2200], result# 54623f53d68141118383b3390c4965d5 REQUEST: HEAD /test_bucket/view/permissions.pb HTTP/1.1 HEADERS: Host: localhost:27697 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 9A73ED86-96AE-426B-A96A-C6F3BD20BC7C amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250331/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=d52bde27641d8da0f6bee676163d40d0106d10699a1616bd2c6f4e812f558737 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250331T161828Z 2025-03-31T16:18:28.803830Z node 10 :IMPORT DEBUG: HandlePermissions TEvExternalStorage::TEvHeadObjectResponse: self# [10:7488008799445516744:2200], result# No response body. REQUEST: GET /test_bucket?prefix=view HTTP/1.1 HEADERS: Host: localhost:27697 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 79D8D4B0-94AE-47BF-8893-1145267B3777 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250331/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=7cc5b82d17a2866d4a4ac80e49c696851bfc4eabfdd6b3b5d736aeac8c2614ed content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250331T161828Z S3_MOCK::HttpServeList: view 2025-03-31T16:18:28.844777Z node 10 :IMPORT DEBUG: HandleChangefeeds TEvExternalStorage::TEvListObjectResponse: self# [10:7488008799445516744:2200], result# ListObjectsResult { } 2025-03-31T16:18:28.844844Z node 10 :IMPORT INFO: Reply: self# [10:7488008799445516744:2200], success# 1, error# 2025-03-31T16:18:28.844997Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-03-31T16:18:28.845015Z node 10 :IMPORT DEBUG: TImport::TTxProgress: OnSchemeResult: id# 281474976715664, itemIdx# 0, success# 1 2025-03-31T16:18:28.865404Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-03-31T16:18:28.899861Z node 10 :IMPORT DEBUG: TSchemeQueryExecutor HandleCompileResponse, self: [10:7488008799445516753:2832], status: SUCCESS 2025-03-31T16:18:28.899955Z node 10 :IMPORT INFO: TSchemeQueryExecutor Reply, self: [10:7488008799445516753:2832], status: SUCCESS 2025-03-31T16:18:28.900231Z node 10 :IMPORT DEBUG: TSchemeQueryExecutor Reply, self: [10:7488008799445516753:2832], status: SUCCESS, prepared query: "WorkingDir: \"/Root\" OperationType: ESchemeOpCreateView FailedOnAlreadyExists: false CreateView { Name: \"view\" QueryText: \"SELECT 1 AS Key UNION SELECT 2 AS Key UNION SELECT 3 AS Key\" CapturedContext { PathPrefix: \"/Root\" SyntaxVersion: 1 AnsiLexer: false PgParser: false Pragmas: \"AnsiInForEmptyOrNullableItemsCollections\" Pragmas: \"AnsiLike\" Pragmas: \"FlexibleTypes\" Pragmas: \"AnsiCurrentRow\" Pragmas: \"WarnOnAnsiAliasShadowing\" Pragmas: \"AnsiOptionalAs\" Pragmas: \"EmitAggApply\" } }" 2025-03-31T16:18:28.900403Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-03-31T16:18:28.900464Z node 10 :IMPORT DEBUG: TImport::TTxProgress: OnSchemeQueryPreparation: id# 281474976715664, itemIdx# 0, status# SUCCESS, error# 2025-03-31T16:18:28.900671Z node 10 :IMPORT INFO: TImport::TTxProgress: Allocate txId: info# { Id: 281474976715664 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/view' DstPathId: State: CreateSchemeObject SubState: AllocateTxId WaitTxId: 0 Issue: '' } 2025-03-31T16:18:28.903915Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-03-31T16:18:28.904090Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-03-31T16:18:28.904115Z node 10 :IMPORT DEBUG: TImport::TTxProgress: OnAllocateResult: txId# 281474976710758, id# 281474976715664 2025-03-31T16:18:28.904203Z node 10 :IMPORT INFO: TImport::TTxProgress: ExecutePreparedQuery: info# { Id: 281474976715664 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/view' DstPathId: State: CreateSchemeObject SubState: Proposed WaitTxId: 0 Issue: '' }, txId# 281474976710758 2025-03-31T16:18:28.904312Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-03-31T16:18:28.907282Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-03-31T16:18:28.907318Z node 10 :IMPORT DEBUG: TImport::TTxProgress: OnModifyResult: txId# 281474976710758, status# StatusAccepted 2025-03-31T16:18:28.907494Z node 10 :IMPORT INFO: TImport::TTxProgress: Wait for completion: info# { Id: 281474976715664 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/view' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 8] State: CreateSchemeObject SubState: Subscribed WaitTxId: 281474976710758 Issue: '' } 2025-03-31T16:18:28.909677Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-03-31T16:18:28.914135Z node 10 :TX_PROXY DEBUG: [GetImport] [10:7488008799445516781:2371] [0] Resolve database: name# /Root 2025-03-31T16:18:28.914551Z node 10 :TX_PROXY DEBUG: [GetImport] [10:7488008799445516781:2371] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T16:18:28.914584Z node 10 :TX_PROXY DEBUG: [GetImport] [10:7488008799445516781:2371] [0] Send request: schemeShardId# 72057594046644480 2025-03-31T16:18:28.915898Z node 10 :TX_PROXY DEBUG: [GetImport] [10:7488008799445516781:2371] [0] Handle TEvImport::TEvGetImportResponse: record# Entry { Id: 281474976715664 Status: SUCCESS Progress: PROGRESS_PREPARING ImportFromS3Settings { endpoint: "localhost:27697" scheme: HTTP bucket: "test_bucket" items { source_prefix: "view" destination_path: "/Root/view" } } StartTime { seconds: 1743437908 } } 2025-03-31T16:18:28.928744Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-03-31T16:18:28.928784Z node 10 :IMPORT DEBUG: TImport::TTxProgress: OnNotifyResult: txId# 281474976710758 2025-03-31T16:18:28.968688Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-03-31T16:18:29.149582Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7488008782265646183:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:29.149688Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:29.322138Z node 10 :TX_PROXY DEBUG: [GetImport] [10:7488008803740484109:2373] [0] Resolve database: name# /Root 2025-03-31T16:18:29.322543Z node 10 :TX_PROXY DEBUG: [GetImport] [10:7488008803740484109:2373] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T16:18:29.322571Z node 10 :TX_PROXY DEBUG: [GetImport] [10:7488008803740484109:2373] [0] Send request: schemeShardId# 72057594046644480 2025-03-31T16:18:29.323146Z node 10 :TX_PROXY DEBUG: [GetImport] [10:7488008803740484109:2373] [0] Handle TEvImport::TEvGetImportResponse: record# Entry { Id: 281474976715664 Status: SUCCESS Progress: PROGRESS_DONE ImportFromS3Settings { endpoint: "localhost:27697" scheme: HTTP bucket: "test_bucket" items { source_prefix: "view" destination_path: "/Root/view" } } StartTime { seconds: 1743437908 } EndTime { seconds: 1743437908 } } 2025-03-31T16:18:29.468512Z node 10 :TX_PROXY DEBUG: actor# [10:7488008782265646370:2114] Handle TEvExecuteKqpTransaction 2025-03-31T16:18:29.468557Z node 10 :TX_PROXY DEBUG: actor# [10:7488008782265646370:2114] TxId# 281474976715665 ProcessProposeKqpTransaction 2025-03-31T16:18:29.469085Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqpfykag1s5rtt4b4qd5nw7x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NTg5YWJmMWItYzAwMjUxNGItM2FlNTZlMTEtMjI2ZTQ1ZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::DeleteOnAfterInsertWithIndex [GOOD] Test command err: Trying to start YDB, gRPC: 13215, MsgBus: 32426 2025-03-31T16:18:13.651499Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008735613028836:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:13.651564Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001158/r3tmp/tmpAFObIH/pdisk_1.dat 2025-03-31T16:18:14.186191Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:14.186326Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:14.203678Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:18:14.241163Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13215, node 1 2025-03-31T16:18:14.431667Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:14.431705Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:14.431727Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:14.431845Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32426 TClient is connected to server localhost:32426 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:15.241977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:18:15.305562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:18:15.487202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.697150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:15.781190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:17.150743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008752792899778:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.150845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:17.760231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.803836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.838146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.910587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:17.939517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:18.005003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:18.059653Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008757087867590:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.059753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.059965Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008757087867595:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:18.064014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:18.075166Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008757087867597:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:18.180015Z node 1 :TX_PROXY ERROR: Actor# [1:7488008757087867656:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:18.651659Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008735613028836:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:18.651721Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:19.174329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 17402, MsgBus: 29146 2025-03-31T16:18:20.745607Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008767110418229:2250];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001158/r3tmp/tmplfCXK4/pdisk_1.dat 2025-03-31T16:18:20.796763Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:18:20.888246Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:20.904757Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:20.904834Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:20.906543Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17402, node 2 2025-03-31T16:18:21.061707Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:21.061727Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:21.061733Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:21.061839Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29146 TClient is connected to server localhost:29146 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:21.615841Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.679691Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.770841Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:21.950562Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:22.034279Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:24.244284Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008784290288960:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.244391Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.295176Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.331561Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.375289Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.415266Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.454791Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.517270Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.569536Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008784290289470:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.569613Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.569771Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008784290289475:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.573532Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:24.586979Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008784290289477:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:24.683855Z node 2 :TX_PROXY ERROR: Actor# [2:7488008784290289533:3434] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:25.580612Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:18:25.628382Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:18:25.714141Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:18:25.719193Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008767110418229:2250];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:25.719268Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |88.6%| [TA] $(B)/ydb/core/kqp/ut/effects/test-results/unittest/{meta.json ... results_accumulator.log} |88.6%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/effects/test-results/unittest/{meta.json ... results_accumulator.log} |88.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |88.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |88.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |88.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |88.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |88.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |88.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |88.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |88.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |88.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |88.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |88.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |88.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert+LogEnabled-UseSink |88.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |88.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |88.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |88.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Ddl >> TExtSubDomainTest::DeclareAndLs [GOOD] >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap-useOltpSink >> KqpDataIntegrityTrails::BrokenReadLockAbortedTx |88.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> BackupRestore::RestoreKesusResources [GOOD] >> BackupRestore::RestoreReplicationWithoutSecret >> KqpDataIntegrityTrails::Select >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-true [GOOD] >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-false [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndLs [GOOD] Test command err: 2025-03-31T16:18:30.718165Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008806689078600:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:30.718218Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001b48/r3tmp/tmpB5S6cb/pdisk_1.dat 2025-03-31T16:18:31.125342Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:31.146523Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:31.146643Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:31.149997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28115 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T16:18:31.353869Z node 1 :TX_PROXY DEBUG: actor# [1:7488008806689078842:2103] Handle TEvNavigate describe path dc-1 2025-03-31T16:18:31.353937Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008810984046416:2257] HANDLE EvNavigateScheme dc-1 2025-03-31T16:18:31.354854Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488008806689078866:2116], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:31.354913Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7488008806689078866:2116], path# /dc-1, domainOwnerId# 72057594046644480 2025-03-31T16:18:31.355148Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008810984046417:2258][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-31T16:18:31.361362Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008806689078540:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7488008810984046422:2258] 2025-03-31T16:18:31.361374Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008806689078537:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7488008810984046421:2258] 2025-03-31T16:18:31.361447Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488008806689078540:2052] Subscribe: subscriber# [1:7488008810984046422:2258], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:18:31.361447Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488008806689078537:2049] Subscribe: subscriber# [1:7488008810984046421:2258], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:18:31.361529Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008806689078543:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7488008810984046423:2258] 2025-03-31T16:18:31.361540Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008810984046421:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008806689078537:2049] 2025-03-31T16:18:31.361580Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488008806689078543:2055] Subscribe: subscriber# [1:7488008810984046423:2258], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:18:31.361593Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008810984046422:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008806689078540:2052] 2025-03-31T16:18:31.361609Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008810984046423:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008806689078543:2055] 2025-03-31T16:18:31.361625Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008806689078537:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7488008810984046421:2258] 2025-03-31T16:18:31.361637Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008810984046417:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008810984046418:2258] 2025-03-31T16:18:31.361649Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008806689078540:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7488008810984046422:2258] 2025-03-31T16:18:31.361661Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008810984046417:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008810984046419:2258] 2025-03-31T16:18:31.361665Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008806689078543:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7488008810984046423:2258] 2025-03-31T16:18:31.361720Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7488008810984046417:2258][/dc-1] Set up state: owner# [1:7488008806689078866:2116], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:18:31.363052Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008810984046417:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008810984046420:2258] 2025-03-31T16:18:31.363133Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7488008810984046417:2258][/dc-1] Path was already updated: owner# [1:7488008806689078866:2116], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:18:31.363180Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008810984046421:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008810984046418:2258], cookie# 1 2025-03-31T16:18:31.363195Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008810984046422:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008810984046419:2258], cookie# 1 2025-03-31T16:18:31.363208Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008810984046423:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008810984046420:2258], cookie# 1 2025-03-31T16:18:31.363253Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008806689078537:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008810984046421:2258], cookie# 1 2025-03-31T16:18:31.363307Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008806689078540:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008810984046422:2258], cookie# 1 2025-03-31T16:18:31.363329Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008806689078543:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008810984046423:2258], cookie# 1 2025-03-31T16:18:31.363359Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008810984046421:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008806689078537:2049], cookie# 1 2025-03-31T16:18:31.363393Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008810984046422:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008806689078540:2052], cookie# 1 2025-03-31T16:18:31.363408Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008810984046423:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008806689078543:2055], cookie# 1 2025-03-31T16:18:31.363434Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008810984046417:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008810984046418:2258], cookie# 1 2025-03-31T16:18:31.363476Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008810984046417:2258][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-31T16:18:31.363497Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008810984046417:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008810984046419:2258], cookie# 1 2025-03-31T16:18:31.363519Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008810984046417:2258][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-31T16:18:31.363544Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008810984046417:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008810984046420:2258], cookie# 1 2025-03-31T16:18:31.363555Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008810984046417:2258][/dc-1] Unexpected sync response: sender# [1:7488008810984046420:2258], cookie# 1 2025-03-31T16:18:31.419188Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7488008806689078866:2116], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-03-31T16:18:31.420418Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7488008806689078866:2116], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... SyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 } 2025-03-31T16:18:31.601216Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7488008806689078866:2116], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7488008810984046472:2299] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1743437911642 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-31T16:18:31.601333Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488008806689078866:2116], cacheItem# { Subscriber: { Subscriber: [1:7488008810984046472:2299] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1743437911642 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-03-31T16:18:31.601546Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488008810984046479:2300], recipient# [1:7488008810984046471:2298], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: RedirectLookupError Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T16:18:31.601606Z node 1 :TX_PROXY INFO: Actor# [1:7488008810984046471:2298] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 1 TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 13 ErrorReason: "Could not resolve redirected path" TClient::Ls request: /dc-1 2025-03-31T16:18:31.607489Z node 1 :TX_PROXY DEBUG: actor# [1:7488008806689078842:2103] Handle TEvNavigate describe path /dc-1 2025-03-31T16:18:31.607553Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008810984046481:2302] HANDLE EvNavigateScheme /dc-1 2025-03-31T16:18:31.607654Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488008806689078866:2116], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:31.607731Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008810984046417:2258][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7488008806689078866:2116], cookie# 4 2025-03-31T16:18:31.607786Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008810984046421:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008810984046418:2258], cookie# 4 2025-03-31T16:18:31.607814Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008810984046422:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008810984046419:2258], cookie# 4 2025-03-31T16:18:31.607825Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008810984046423:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008810984046420:2258], cookie# 4 2025-03-31T16:18:31.607840Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008806689078543:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008810984046423:2258], cookie# 4 2025-03-31T16:18:31.607840Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008806689078537:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008810984046421:2258], cookie# 4 2025-03-31T16:18:31.607862Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008806689078540:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008810984046422:2258], cookie# 4 2025-03-31T16:18:31.607867Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008810984046423:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7488008806689078543:2055], cookie# 4 2025-03-31T16:18:31.607881Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008810984046421:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7488008806689078537:2049], cookie# 4 2025-03-31T16:18:31.607913Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008810984046422:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7488008806689078540:2052], cookie# 4 2025-03-31T16:18:31.607938Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008810984046417:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7488008810984046420:2258], cookie# 4 2025-03-31T16:18:31.607953Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008810984046417:2258][/dc-1] Sync is in progress: cookie# 4, size# 3, half# 1, successes# 1, faulires# 0 2025-03-31T16:18:31.607965Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008810984046417:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7488008810984046418:2258], cookie# 4 2025-03-31T16:18:31.607976Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008810984046417:2258][/dc-1] Sync is done: cookie# 4, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-31T16:18:31.607994Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008810984046417:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7488008810984046419:2258], cookie# 4 2025-03-31T16:18:31.608002Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008810984046417:2258][/dc-1] Unexpected sync response: sender# [1:7488008810984046419:2258], cookie# 4 2025-03-31T16:18:31.608035Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7488008806689078866:2116], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-03-31T16:18:31.608097Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7488008806689078866:2116], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7488008810984046417:2258] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743437911628 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-31T16:18:31.608155Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488008806689078866:2116], cacheItem# { Subscriber: { Subscriber: [1:7488008810984046417:2258] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743437911628 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2025-03-31T16:18:31.608263Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488008810984046482:2303], recipient# [1:7488008810984046481:2302], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T16:18:31.608309Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008810984046481:2302] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T16:18:31.608362Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008810984046481:2302] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2025-03-31T16:18:31.609027Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008810984046481:2302] Handle TEvDescribeSchemeResult Forward to# [1:7488008810984046480:2301] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 63 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743437911628 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743437911628 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743437911642 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 ... (TRUNCATED) |88.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |88.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |88.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> Cdc::VirtualTimestamps[PqRunner] [GOOD] >> Cdc::VirtualTimestamps[YdsRunner] |88.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |88.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |88.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |88.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |88.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |88.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |88.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |88.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-false [GOOD] Test command err: 2025-03-31T16:18:30.783752Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008810019857086:2131];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:30.784054Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001b27/r3tmp/tmpFkpJ6V/pdisk_1.dat 2025-03-31T16:18:31.187536Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:31.187657Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:31.190098Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:31.192468Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15979 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T16:18:31.384733Z node 1 :TX_PROXY DEBUG: actor# [1:7488008810019857239:2093] Handle TEvNavigate describe path dc-1 2025-03-31T16:18:31.384837Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008814314825041:2429] HANDLE EvNavigateScheme dc-1 2025-03-31T16:18:31.385014Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488008810019857293:2119], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:31.385084Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7488008810019857293:2119], path# /dc-1, domainOwnerId# 72057594046644480 2025-03-31T16:18:31.385300Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008814314825042:2430][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-31T16:18:31.387410Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008810019856966:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7488008814314825046:2430] 2025-03-31T16:18:31.387430Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008810019856969:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7488008814314825047:2430] 2025-03-31T16:18:31.387469Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488008810019856966:2049] Subscribe: subscriber# [1:7488008814314825046:2430], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:18:31.387470Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488008810019856969:2052] Subscribe: subscriber# [1:7488008814314825047:2430], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:18:31.387528Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008810019856972:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7488008814314825048:2430] 2025-03-31T16:18:31.387553Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008814314825046:2430][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008810019856966:2049] 2025-03-31T16:18:31.387567Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488008810019856972:2055] Subscribe: subscriber# [1:7488008814314825048:2430], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:18:31.387604Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008814314825047:2430][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008810019856969:2052] 2025-03-31T16:18:31.387613Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008810019856966:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7488008814314825046:2430] 2025-03-31T16:18:31.387655Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008814314825048:2430][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008810019856972:2055] 2025-03-31T16:18:31.387671Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008810019856969:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7488008814314825047:2430] 2025-03-31T16:18:31.387694Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008810019856972:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7488008814314825048:2430] 2025-03-31T16:18:31.387823Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008814314825042:2430][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008814314825043:2430] 2025-03-31T16:18:31.387867Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008814314825042:2430][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008814314825044:2430] 2025-03-31T16:18:31.387912Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7488008814314825042:2430][/dc-1] Set up state: owner# [1:7488008810019857293:2119], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:18:31.388020Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008814314825042:2430][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008814314825045:2430] 2025-03-31T16:18:31.388064Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7488008814314825042:2430][/dc-1] Path was already updated: owner# [1:7488008810019857293:2119], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:18:31.388102Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008814314825046:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008814314825043:2430], cookie# 1 2025-03-31T16:18:31.388116Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008814314825047:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008814314825044:2430], cookie# 1 2025-03-31T16:18:31.388129Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008814314825048:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008814314825045:2430], cookie# 1 2025-03-31T16:18:31.388707Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008810019856966:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008814314825046:2430], cookie# 1 2025-03-31T16:18:31.388746Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008810019856969:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008814314825047:2430], cookie# 1 2025-03-31T16:18:31.388795Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008810019856972:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008814314825048:2430], cookie# 1 2025-03-31T16:18:31.388831Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008814314825046:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008810019856966:2049], cookie# 1 2025-03-31T16:18:31.388845Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008814314825047:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008810019856969:2052], cookie# 1 2025-03-31T16:18:31.388857Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008814314825048:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008810019856972:2055], cookie# 1 2025-03-31T16:18:31.388900Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008814314825042:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008814314825043:2430], cookie# 1 2025-03-31T16:18:31.388922Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008814314825042:2430][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-31T16:18:31.388939Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008814314825042:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008814314825044:2430], cookie# 1 2025-03-31T16:18:31.388956Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008814314825042:2430][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-31T16:18:31.389001Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008814314825042:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008814314825045:2430], cookie# 1 2025-03-31T16:18:31.389020Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008814314825042:2430][/dc-1] Unexpected sync response: sender# [1:7488008814314825045:2430], cookie# 1 2025-03-31T16:18:31.441248Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7488008810019857293:2119], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-03-31T16:18:31.441707Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7488008810019857293:2119], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... HEME_CACHE DEBUG: ResolveCacheItem: self# [1:7488008810019857293:2119], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7488008814314825237:2581] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1743437911649 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-31T16:18:32.154105Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488008810019857293:2119], cacheItem# { Subscriber: { Subscriber: [1:7488008814314825237:2581] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1743437911649 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 2 IsSync: true Partial: 0 } 2025-03-31T16:18:32.154347Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488008818609792675:2667], recipient# [1:7488008818609792674:2666], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: RedirectLookupError Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T16:18:32.154383Z node 1 :TX_PROXY INFO: Actor# [1:7488008818609792674:2666] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 1 TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 13 ErrorReason: "Could not resolve redirected path" TClient::Ls request: /dc-1 2025-03-31T16:18:32.157855Z node 1 :TX_PROXY DEBUG: actor# [1:7488008810019857239:2093] Handle TEvNavigate describe path /dc-1 2025-03-31T16:18:32.157908Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008818609792677:2669] HANDLE EvNavigateScheme /dc-1 2025-03-31T16:18:32.157985Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488008810019857293:2119], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:32.158044Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008814314825042:2430][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7488008810019857293:2119], cookie# 4 2025-03-31T16:18:32.158092Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008814314825046:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008814314825043:2430], cookie# 4 2025-03-31T16:18:32.158108Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008814314825047:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008814314825044:2430], cookie# 4 2025-03-31T16:18:32.158123Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008814314825048:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008814314825045:2430], cookie# 4 2025-03-31T16:18:32.158144Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008810019856966:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008814314825046:2430], cookie# 4 2025-03-31T16:18:32.158166Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008810019856969:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008814314825047:2430], cookie# 4 2025-03-31T16:18:32.158182Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008810019856972:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008814314825048:2430], cookie# 4 2025-03-31T16:18:32.158204Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008814314825046:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7488008810019856966:2049], cookie# 4 2025-03-31T16:18:32.158219Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008814314825047:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7488008810019856969:2052], cookie# 4 2025-03-31T16:18:32.158233Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008814314825048:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7488008810019856972:2055], cookie# 4 2025-03-31T16:18:32.158257Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008814314825042:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7488008814314825043:2430], cookie# 4 2025-03-31T16:18:32.158272Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008814314825042:2430][/dc-1] Sync is in progress: cookie# 4, size# 3, half# 1, successes# 1, faulires# 0 2025-03-31T16:18:32.158308Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008814314825042:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7488008814314825044:2430], cookie# 4 2025-03-31T16:18:32.158324Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008814314825042:2430][/dc-1] Sync is done: cookie# 4, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-31T16:18:32.158344Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008814314825042:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7488008814314825045:2430], cookie# 4 2025-03-31T16:18:32.158354Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008814314825042:2430][/dc-1] Unexpected sync response: sender# [1:7488008814314825045:2430], cookie# 4 TClient::Ls response: 2025-03-31T16:18:32.158394Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7488008810019857293:2119], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-03-31T16:18:32.158457Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7488008810019857293:2119], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7488008814314825042:2430] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743437911621 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-31T16:18:32.158534Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488008810019857293:2119], cacheItem# { Subscriber: { Subscriber: [1:7488008814314825042:2430] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743437911621 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2025-03-31T16:18:32.158657Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488008818609792678:2670], recipient# [1:7488008818609792677:2669], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T16:18:32.158681Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008818609792677:2669] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T16:18:32.158783Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008818609792677:2669] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2025-03-31T16:18:32.159364Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008818609792677:2669] Handle TEvDescribeSchemeResult Forward to# [1:7488008818609792676:2668] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 63 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743437911621 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743437911621 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743437911649 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046... (TRUNCATED) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2025-03-31T16:18:30.771931Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008809875044358:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:30.772016Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001b31/r3tmp/tmpud4kD5/pdisk_1.dat 2025-03-31T16:18:31.170652Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:31.180564Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:31.180679Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:31.186661Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15236 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T16:18:31.382429Z node 1 :TX_PROXY DEBUG: actor# [1:7488008809875044579:2093] Handle TEvNavigate describe path dc-1 2025-03-31T16:18:31.382498Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008814170012384:2429] HANDLE EvNavigateScheme dc-1 2025-03-31T16:18:31.382646Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488008809875044634:2120], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:31.382698Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7488008809875044634:2120], path# /dc-1, domainOwnerId# 72057594046644480 2025-03-31T16:18:31.383041Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008814170012385:2430][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-31T16:18:31.385004Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008809875044306:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7488008814170012389:2430] 2025-03-31T16:18:31.385017Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008809875044309:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7488008814170012390:2430] 2025-03-31T16:18:31.385077Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488008809875044306:2049] Subscribe: subscriber# [1:7488008814170012389:2430], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:18:31.385103Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488008809875044309:2052] Subscribe: subscriber# [1:7488008814170012390:2430], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:18:31.385141Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008809875044312:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7488008814170012391:2430] 2025-03-31T16:18:31.385162Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488008809875044312:2055] Subscribe: subscriber# [1:7488008814170012391:2430], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:18:31.385217Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008814170012389:2430][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008809875044306:2049] 2025-03-31T16:18:31.385266Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008814170012390:2430][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008809875044309:2052] 2025-03-31T16:18:31.385320Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008814170012391:2430][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008809875044312:2055] 2025-03-31T16:18:31.385365Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008814170012385:2430][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008814170012386:2430] 2025-03-31T16:18:31.385398Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008814170012385:2430][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008814170012387:2430] 2025-03-31T16:18:31.385452Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7488008814170012385:2430][/dc-1] Set up state: owner# [1:7488008809875044634:2120], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:18:31.385600Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008814170012385:2430][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008814170012388:2430] 2025-03-31T16:18:31.385644Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7488008814170012385:2430][/dc-1] Path was already updated: owner# [1:7488008809875044634:2120], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:18:31.385706Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008814170012389:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008814170012386:2430], cookie# 1 2025-03-31T16:18:31.385718Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008814170012390:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008814170012387:2430], cookie# 1 2025-03-31T16:18:31.385726Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008814170012391:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008814170012388:2430], cookie# 1 2025-03-31T16:18:31.385756Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008809875044309:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7488008814170012390:2430] 2025-03-31T16:18:31.385776Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008809875044309:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008814170012390:2430], cookie# 1 2025-03-31T16:18:31.385790Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008809875044312:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7488008814170012391:2430] 2025-03-31T16:18:31.385799Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008809875044312:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008814170012391:2430], cookie# 1 2025-03-31T16:18:31.386027Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008809875044306:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7488008814170012389:2430] 2025-03-31T16:18:31.386059Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008809875044306:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008814170012389:2430], cookie# 1 2025-03-31T16:18:31.386093Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008814170012390:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008809875044309:2052], cookie# 1 2025-03-31T16:18:31.386271Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008814170012391:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008809875044312:2055], cookie# 1 2025-03-31T16:18:31.386324Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008814170012389:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008809875044306:2049], cookie# 1 2025-03-31T16:18:31.386364Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008814170012385:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008814170012387:2430], cookie# 1 2025-03-31T16:18:31.386384Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008814170012385:2430][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-31T16:18:31.386397Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008814170012385:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008814170012388:2430], cookie# 1 2025-03-31T16:18:31.386424Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008814170012385:2430][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-31T16:18:31.386445Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008814170012385:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008814170012386:2430], cookie# 1 2025-03-31T16:18:31.386465Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008814170012385:2430][/dc-1] Unexpected sync response: sender# [1:7488008814170012386:2430], cookie# 1 2025-03-31T16:18:31.453954Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7488008809875044634:2120], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-03-31T16:18:31.454341Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7488008809875044634:2120], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... HEME_CACHE DEBUG: ResolveCacheItem: self# [1:7488008809875044634:2120], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7488008814170012557:2562] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1743437911649 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-31T16:18:32.155846Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488008809875044634:2120], cacheItem# { Subscriber: { Subscriber: [1:7488008814170012557:2562] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1743437911649 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 2 IsSync: true Partial: 0 } 2025-03-31T16:18:32.155988Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488008818464980024:2673], recipient# [1:7488008818464980023:2672], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: RedirectLookupError Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T16:18:32.156010Z node 1 :TX_PROXY INFO: Actor# [1:7488008818464980023:2672] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 1 TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 13 ErrorReason: "Could not resolve redirected path" TClient::Ls request: /dc-1 2025-03-31T16:18:32.159696Z node 1 :TX_PROXY DEBUG: actor# [1:7488008809875044579:2093] Handle TEvNavigate describe path /dc-1 2025-03-31T16:18:32.159726Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008818464980026:2675] HANDLE EvNavigateScheme /dc-1 2025-03-31T16:18:32.159807Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488008809875044634:2120], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:32.159867Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008814170012385:2430][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7488008809875044634:2120], cookie# 4 2025-03-31T16:18:32.159960Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008814170012389:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008814170012386:2430], cookie# 4 2025-03-31T16:18:32.159975Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008814170012390:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008814170012387:2430], cookie# 4 2025-03-31T16:18:32.159987Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008814170012391:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008814170012388:2430], cookie# 4 2025-03-31T16:18:32.160009Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008809875044306:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008814170012389:2430], cookie# 4 2025-03-31T16:18:32.160036Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008809875044309:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008814170012390:2430], cookie# 4 2025-03-31T16:18:32.160069Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008809875044312:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008814170012391:2430], cookie# 4 2025-03-31T16:18:32.160099Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008814170012389:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7488008809875044306:2049], cookie# 4 2025-03-31T16:18:32.160132Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008814170012390:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7488008809875044309:2052], cookie# 4 2025-03-31T16:18:32.160151Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008814170012391:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7488008809875044312:2055], cookie# 4 2025-03-31T16:18:32.160173Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008814170012385:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7488008814170012386:2430], cookie# 4 2025-03-31T16:18:32.160189Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008814170012385:2430][/dc-1] Sync is in progress: cookie# 4, size# 3, half# 1, successes# 1, faulires# 0 2025-03-31T16:18:32.160202Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008814170012385:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7488008814170012387:2430], cookie# 4 2025-03-31T16:18:32.160215Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008814170012385:2430][/dc-1] Sync is done: cookie# 4, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-31T16:18:32.160232Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008814170012385:2430][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7488008814170012388:2430], cookie# 4 2025-03-31T16:18:32.160242Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008814170012385:2430][/dc-1] Unexpected sync response: sender# [1:7488008814170012388:2430], cookie# 4 2025-03-31T16:18:32.160294Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7488008809875044634:2120], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-03-31T16:18:32.160349Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7488008809875044634:2120], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7488008814170012385:2430] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743437911621 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-31T16:18:32.160500Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488008809875044634:2120], cacheItem# { Subscriber: { Subscriber: [1:7488008814170012385:2430] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743437911621 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2025-03-31T16:18:32.160619Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488008818464980027:2676], recipient# [1:7488008818464980026:2675], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T16:18:32.160648Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008818464980026:2675] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T16:18:32.160715Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008818464980026:2675] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2025-03-31T16:18:32.161277Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008818464980026:2675] Handle TEvDescribeSchemeResult Forward to# [1:7488008818464980025:2674] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 63 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743437911621 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743437911621 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743437911649 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046... (TRUNCATED) |88.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap+useOltpSink >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap-useOltpSink >> KqpDataIntegrityTrails::Upsert-LogEnabled+UseSink |88.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLock-UseSink >> KqpDataIntegrityTrails::Upsert+LogEnabled+UseSink >> Cdc::NewAndOldImagesLog[TopicRunner] [GOOD] >> Cdc::NewAndOldImagesLogDebezium >> TExtSubDomainTest::GenericCases [GOOD] >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-true >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-true >> BackupRestoreS3::RestoreIndexTableSplitBoundaries [GOOD] >> BackupRestoreS3::RestoreIndexTableDecimalSplitBoundaries >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-true >> KqpDataIntegrityTrails::Upsert-LogEnabled-UseSink >> Cdc::HugeKey[PqRunner] [GOOD] >> Cdc::HugeKey[YdsRunner] |88.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLock+UseSink |88.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |88.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |88.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |88.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap+useOltpSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::GenericCases [GOOD] Test command err: 2025-03-31T16:18:30.820000Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008808414657753:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:30.820223Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001b54/r3tmp/tmpWDooSY/pdisk_1.dat 2025-03-31T16:18:31.179918Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:31.228083Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:31.228225Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:31.231519Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5108 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T16:18:31.380160Z node 1 :TX_PROXY DEBUG: actor# [1:7488008808414657984:2115] Handle TEvNavigate describe path dc-1 2025-03-31T16:18:31.380241Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008812709625757:2443] HANDLE EvNavigateScheme dc-1 2025-03-31T16:18:31.380358Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488008808414658026:2130], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:31.380421Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7488008808414658026:2130], path# /dc-1, domainOwnerId# 72057594046644480 2025-03-31T16:18:31.380767Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008812709625758:2444][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-31T16:18:31.382414Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008808414657662:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7488008812709625762:2444] 2025-03-31T16:18:31.382442Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008808414657665:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7488008812709625763:2444] 2025-03-31T16:18:31.382460Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488008808414657662:2050] Subscribe: subscriber# [1:7488008812709625762:2444], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:18:31.382508Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488008808414657665:2053] Subscribe: subscriber# [1:7488008812709625763:2444], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:18:31.382563Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008808414657668:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7488008812709625764:2444] 2025-03-31T16:18:31.382589Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488008808414657668:2056] Subscribe: subscriber# [1:7488008812709625764:2444], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:18:31.382599Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008812709625762:2444][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008808414657662:2050] 2025-03-31T16:18:31.382631Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008808414657662:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7488008812709625762:2444] 2025-03-31T16:18:31.382641Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008812709625763:2444][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008808414657665:2053] 2025-03-31T16:18:31.382652Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008808414657665:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7488008812709625763:2444] 2025-03-31T16:18:31.382669Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008812709625764:2444][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008808414657668:2056] 2025-03-31T16:18:31.382684Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008808414657668:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7488008812709625764:2444] 2025-03-31T16:18:31.382712Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008812709625758:2444][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008812709625759:2444] 2025-03-31T16:18:31.382741Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008812709625758:2444][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008812709625760:2444] 2025-03-31T16:18:31.382881Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7488008812709625758:2444][/dc-1] Set up state: owner# [1:7488008808414658026:2130], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:18:31.383055Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008812709625758:2444][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008812709625761:2444] 2025-03-31T16:18:31.383143Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7488008812709625758:2444][/dc-1] Path was already updated: owner# [1:7488008808414658026:2130], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:18:31.383235Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008812709625762:2444][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008812709625759:2444], cookie# 1 2025-03-31T16:18:31.383263Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008812709625763:2444][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008812709625760:2444], cookie# 1 2025-03-31T16:18:31.383280Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008812709625764:2444][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008812709625761:2444], cookie# 1 2025-03-31T16:18:31.383336Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008808414657662:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008812709625762:2444], cookie# 1 2025-03-31T16:18:31.383368Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008808414657665:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008812709625763:2444], cookie# 1 2025-03-31T16:18:31.383385Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008808414657668:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008812709625764:2444], cookie# 1 2025-03-31T16:18:31.383414Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008812709625762:2444][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008808414657662:2050], cookie# 1 2025-03-31T16:18:31.383434Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008812709625763:2444][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008808414657665:2053], cookie# 1 2025-03-31T16:18:31.383473Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008812709625764:2444][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008808414657668:2056], cookie# 1 2025-03-31T16:18:31.383511Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008812709625758:2444][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008812709625759:2444], cookie# 1 2025-03-31T16:18:31.383535Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008812709625758:2444][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-31T16:18:31.383553Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008812709625758:2444][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008812709625760:2444], cookie# 1 2025-03-31T16:18:31.383580Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008812709625758:2444][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-31T16:18:31.383635Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008812709625758:2444][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008812709625761:2444], cookie# 1 2025-03-31T16:18:31.383654Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008812709625758:2444][/dc-1] Unexpected sync response: sender# [1:7488008812709625761:2444], cookie# 1 2025-03-31T16:18:31.448599Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7488008808414658026:2130], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-03-31T16:18:31.449012Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7488008808414658026:2130], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVers ... pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:18:34.002111Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488008825594528459:3021], recipient# [1:7488008821299561141:2338], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:34.003323Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008808414657662:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [1:7488008825594528460:3019] 2025-03-31T16:18:34.003364Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488008808414657662:2050] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2025-03-31T16:18:34.003419Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488008808414657662:2050] Subscribe: subscriber# [1:7488008825594528460:3019], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:18:34.004100Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008825594528460:3019][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7488008808414657662:2050] 2025-03-31T16:18:34.004157Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008821299561143:3019][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7488008821299561151:3019] 2025-03-31T16:18:34.004216Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008808414657665:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [1:7488008825594528461:3019] 2025-03-31T16:18:34.004229Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488008808414657665:2053] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2025-03-31T16:18:34.004270Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488008808414657665:2053] Subscribe: subscriber# [1:7488008825594528461:3019], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:18:34.004305Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008808414657668:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [1:7488008825594528462:3019] 2025-03-31T16:18:34.004314Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488008808414657668:2056] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2025-03-31T16:18:34.004335Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488008808414657668:2056] Subscribe: subscriber# [1:7488008825594528462:3019], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:18:34.004360Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008808414657662:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7488008825594528460:3019] 2025-03-31T16:18:34.004414Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008825594528461:3019][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7488008808414657665:2053] 2025-03-31T16:18:34.004438Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008825594528462:3019][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7488008808414657668:2056] 2025-03-31T16:18:34.004464Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008821299561143:3019][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7488008821299561152:3019] 2025-03-31T16:18:34.004513Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7488008821299561143:3019][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [1:7488008808414658026:2130], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:18:34.004548Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008821299561143:3019][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7488008821299561153:3019] 2025-03-31T16:18:34.004568Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7488008821299561143:3019][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [1:7488008808414658026:2130], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:18:34.004586Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008808414657665:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7488008825594528461:3019] 2025-03-31T16:18:34.004600Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008808414657668:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7488008825594528462:3019] 2025-03-31T16:18:34.004641Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7488008808414658026:2130], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-03-31T16:18:34.004699Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7488008808414658026:2130], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7488008821299561143:3019] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-31T16:18:34.004770Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488008808414658026:2130], cacheItem# { Subscriber: { Subscriber: [1:7488008821299561143:3019] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:18:34.004854Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488008825594528463:3022], recipient# [1:7488008821299561140:2337], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:34.829026Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488008808414658026:2130], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:34.829137Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488008808414658026:2130], cacheItem# { Subscriber: { Subscriber: [1:7488008812709626159:2757] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:18:34.829208Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488008825594528477:3023], recipient# [1:7488008825594528476:2339], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:35.007398Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488008808414658026:2130], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:35.007519Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488008808414658026:2130], cacheItem# { Subscriber: { Subscriber: [1:7488008821299561144:3020] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:18:35.007597Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488008829889495781:3027], recipient# [1:7488008829889495780:2340], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> YdbOlapStore::LogPagingBefore [GOOD] >> YdbOlapStore::LogPagingBetween >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExternalTable [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExternalDataSource |88.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Ddl [GOOD] >> KqpDataIntegrityTrails::Upsert+LogEnabled-UseSink [GOOD] >> KqpDataIntegrityTrails::Select [GOOD] >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-true >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap-useOltpSink [GOOD] >> ColumnStatistics::CountMinSketchServerlessStatistics [GOOD] >> KqpDataIntegrityTrails::BrokenReadLockAbortedTx [GOOD] >> KqpScripting::StreamExecuteYqlScriptWriteCancelAfterBruteForced >> KqpScripting::StreamExecuteYqlScriptScanClientTimeoutBruteForce >> KqpScripting::ScriptValidate >> KqpScripting::StreamExecuteYqlScriptOperationTmeoutBruteForce >> KqpYql::EvaluateExprPgNull >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-false [GOOD] >> KqpPragma::ResetPerQuery >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-true >> KqpYql::BinaryJsonOffsetNormal >> KqpScripting::ScanQuery >> Cdc::VirtualTimestamps[YdsRunner] [GOOD] >> Cdc::VirtualTimestamps[TopicRunner] >> KqpScripting::StreamExecuteYqlScriptData >> KqpPragma::OrderedColumns >> KqpScripting::StreamExecuteYqlScriptSeveralQueries >> KqpScripting::ExecuteYqlScriptScanScalar >> KqpYql::TableUseBeforeCreate >> KqpScripting::LimitOnShard >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-true [GOOD] >> YdbOlapStore::LogWithUnionAllAscending [GOOD] >> YdbOlapStore::LogWithUnionAllDescending >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap+useOltpSink [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert+LogEnabled-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 61847, MsgBus: 10719 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f2b/r3tmp/tmpg8KKGh/pdisk_1.dat TServer::EnableGrpc on GrpcPort 61847, node 1 TClient is connected to server localhost:10719 TClient is connected to server localhost:10719 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Ddl [GOOD] Test command err: Trying to start YDB, gRPC: 7339, MsgBus: 19025 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f7f/r3tmp/tmp90Hecf/pdisk_1.dat TServer::EnableGrpc on GrpcPort 7339, node 1 TClient is connected to server localhost:19025 TClient is connected to server localhost:19025 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> KqpDataIntegrityTrails::Upsert-LogEnabled+UseSink [GOOD] >> KqpDataIntegrityTrails::BrokenReadLock-UseSink [GOOD] >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-true [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Select [GOOD] Test command err: Trying to start YDB, gRPC: 62760, MsgBus: 10571 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ee0/r3tmp/tmpRGj748/pdisk_1.dat TServer::EnableGrpc on GrpcPort 62760, node 1 TClient is connected to server localhost:10571 TClient is connected to server localhost:10571 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLockAbortedTx [GOOD] Test command err: Trying to start YDB, gRPC: 62189, MsgBus: 25908 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f84/r3tmp/tmpTdH2wo/pdisk_1.dat TServer::EnableGrpc on GrpcPort 62189, node 1 TClient is connected to server localhost:25908 TClient is connected to server localhost:25908 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> KqpDataIntegrityTrails::Upsert+LogEnabled+UseSink [GOOD] >> Cdc::NewAndOldImagesLogDebezium [GOOD] >> Cdc::OldImageLogDebezium ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap-useOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 7267, MsgBus: 4355 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f4e/r3tmp/tmpBF8KFH/pdisk_1.dat TServer::EnableGrpc on GrpcPort 7267, node 1 TClient is connected to server localhost:4355 TClient is connected to server localhost:4355 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-true [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> ColumnStatistics::CountMinSketchServerlessStatistics [GOOD] Test command err: 2025-03-31T16:15:01.263688Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:605:2415], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:01.264114Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:01.264338Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d34/r3tmp/tmpnf3Qls/pdisk_1.dat 2025-03-31T16:15:02.097018Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12146, node 1 2025-03-31T16:15:02.629456Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:02.629539Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:02.629574Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:02.630097Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:15:02.636996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:15:02.751891Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:02.752043Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:02.769665Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26952 2025-03-31T16:15:03.427251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:15:07.033906Z node 4 :STATISTICS INFO: Subscribed for config changes on node 4 2025-03-31T16:15:07.107914Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:07.108037Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:07.149999Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-03-31T16:15:07.158364Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:15:07.472429Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:07.475221Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:07.475837Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:07.475971Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:07.476195Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:07.476289Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:07.476404Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:07.476483Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:07.476558Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:07.704531Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:07.704639Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:07.735959Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:15:07.905953Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:08.012260Z node 4 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T16:15:08.012345Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T16:15:08.048355Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T16:15:08.051072Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T16:15:08.051327Z node 4 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T16:15:08.051381Z node 4 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T16:15:08.051433Z node 4 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T16:15:08.051483Z node 4 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T16:15:08.051533Z node 4 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T16:15:08.051585Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T16:15:08.059661Z node 4 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T16:15:08.124221Z node 4 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:15:08.124334Z node 4 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [4:2038:2601], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:15:08.135106Z node 4 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [4:2049:2609] 2025-03-31T16:15:08.151811Z node 4 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [4:2081:2625] 2025-03-31T16:15:08.152287Z node 4 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [4:2081:2625], schemeshard id = 72075186224037897 2025-03-31T16:15:08.154331Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-03-31T16:15:08.186404Z node 4 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T16:15:08.186473Z node 4 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T16:15:08.186551Z node 4 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-03-31T16:15:08.226209Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T16:15:08.240536Z node 4 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T16:15:08.240694Z node 4 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T16:15:08.509917Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T16:15:08.780212Z node 4 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T16:15:08.903228Z node 4 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:15:09.694959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:15:13.330382Z node 3 :STATISTICS INFO: Subscribed for config changes on node 3 2025-03-31T16:15:13.385033Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:13.385163Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:13.385540Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:13.385610Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:13.426973Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-03-31T16:15:13.429917Z node 4 :HIVE WARN: HIVE#72075186224037888 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-03-31T16:15:13.430839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:15:13.433394Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:15:13.557107Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:13.754149Z node 4 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-03-31T16:15:13.754226Z node 4 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-03-31T16:15:13.754328Z node 4 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [4:3109:2955], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-03-31T16:15:13.757727Z node 4 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [4:3110:2956] 2025-03-31T16:15:13.758470Z node 4 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [4:3110:2956], schemeshard id = 72075186224037899 2025-03-31T16:15:15.029279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:15:18.834902Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T16:15:18.886524Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:18.886654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:18.886860Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:18.886919Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:18.904099Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:15:18.906271Z node 4 :HIVE WARN: HIVE#72075186224037888 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:15:18.906858Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:15:18.909344Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:15:18.998116Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:19.232272Z node 4 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037905 2025-03-31T16:15:19.232344Z node 4 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037905 2025-03-31T16:15:19.232455Z node 4 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [4:3948:3156], at schemeshard: 72075186224037905, StatisticsAggregatorId: 720751862 ... zkwNA==, TxId: 2025-03-31T16:18:32.285906Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:18:32.302568Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-31T16:18:32.302646Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T16:18:32.336917Z node 4 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-31T16:18:32.337005Z node 4 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-31T16:18:32.393561Z node 4 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [4:11601:7147], schemeshard count = 1 2025-03-31T16:18:34.060156Z node 4 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2025-03-31T16:18:34.060239Z node 4 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 220.000000s, at schemeshard: 72075186224037899 2025-03-31T16:18:34.060613Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 28 2025-03-31T16:18:34.081097Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-31T16:18:35.127880Z node 4 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:18:35.127955Z node 4 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:18:35.127997Z node 4 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is column table. 2025-03-31T16:18:35.128055Z node 4 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-03-31T16:18:35.133528Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-31T16:18:35.169380Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-31T16:18:35.170227Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T16:18:35.170325Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T16:18:35.172251Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-31T16:18:35.188092Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-31T16:18:35.188337Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 4, Round: 2, current Round: 0 2025-03-31T16:18:35.189278Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:11764:7243], server id = [4:11765:7244], tablet id = 72075186224037911, status = OK 2025-03-31T16:18:35.189722Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:11764:7243], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-03-31T16:18:35.196067Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037911 2025-03-31T16:18:35.196228Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 4 2025-03-31T16:18:35.196467Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-31T16:18:35.196711Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-31T16:18:35.197232Z node 4 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-03-31T16:18:35.199662Z node 4 :STATISTICS DEBUG: EvClientDestroyed, node id = 4, client id = [4:11764:7243], server id = [4:11765:7244], tablet id = 72075186224037911 2025-03-31T16:18:35.199722Z node 4 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:18:35.200577Z node 4 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-31T16:18:35.241188Z node 4 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [4:11785:7263]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:18:35.241394Z node 4 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T16:18:35.241431Z node 4 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [4:11785:7263], StatRequests.size() = 1 2025-03-31T16:18:35.358284Z node 4 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=OTNlYTc2YjEtOTY5YTMxYjUtMTlmZmY1YjQtYWY3ZDg4ZmQ=, TxId: 2025-03-31T16:18:35.358366Z node 4 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=OTNlYTc2YjEtOTY5YTMxYjUtMTlmZmY1YjQtYWY3ZDg4ZmQ=, TxId: 2025-03-31T16:18:35.359282Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:18:35.379017Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-03-31T16:18:35.379104Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T16:18:36.040247Z node 4 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-03-31T16:18:36.040371Z node 4 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-03-31T16:18:36.560868Z node 4 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037905 2025-03-31T16:18:36.560943Z node 4 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 227.000000s, at schemeshard: 72075186224037905 2025-03-31T16:18:36.561378Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037905, stats size# 28 2025-03-31T16:18:36.576817Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-31T16:18:38.444267Z node 4 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:18:38.444363Z node 4 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:18:38.444413Z node 4 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037905, LocalPathId: 2] is column table. 2025-03-31T16:18:38.444453Z node 4 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037905, LocalPathId: 2] 2025-03-31T16:18:38.453681Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-31T16:18:38.474242Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-31T16:18:38.474887Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T16:18:38.474961Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T16:18:38.475665Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-31T16:18:38.502030Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-31T16:18:38.502398Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 4, Round: 3, current Round: 0 2025-03-31T16:18:38.503349Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:11935:7337], server id = [4:11936:7338], tablet id = 72075186224037912, status = OK 2025-03-31T16:18:38.503458Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:11935:7337], path = { OwnerId: 72075186224037905 LocalId: 2 } 2025-03-31T16:18:38.507902Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037912 2025-03-31T16:18:38.508014Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 4 2025-03-31T16:18:38.508189Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-31T16:18:38.508429Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-31T16:18:38.508872Z node 4 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-03-31T16:18:38.511337Z node 4 :STATISTICS DEBUG: EvClientDestroyed, node id = 4, client id = [4:11935:7337], server id = [4:11936:7338], tablet id = 72075186224037912 2025-03-31T16:18:38.511396Z node 4 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:18:38.512331Z node 4 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-31T16:18:38.539014Z node 4 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=OTRhODE0MzctYjFlZmM1NzgtMWVlZWFmNTEtYjdmMmJiZmI=, TxId: 2025-03-31T16:18:38.539109Z node 4 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=OTRhODE0MzctYjFlZmM1NzgtMWVlZWFmNTEtYjdmMmJiZmI=, TxId: 2025-03-31T16:18:38.540491Z node 4 :SYSTEM_VIEWS WARN: [72075186224037891] TEvIntervalQuerySummary, time mismath: node id# 4, interval end# 1970-01-01T00:02:05.000000Z, event interval end# 2025-03-31T16:18:36.000000Z 2025-03-31T16:18:38.540746Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:18:38.542102Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:11959:6020]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-31T16:18:38.542528Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T16:18:38.542593Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-31T16:18:38.546054Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T16:18:38.546131Z node 1 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-03-31T16:18:38.546186Z node 1 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-31T16:18:38.563478Z node 1 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 2025-03-31T16:18:38.564947Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:11959:6020]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-31T16:18:38.565310Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T16:18:38.565393Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-31T16:18:38.565789Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T16:18:38.565850Z node 1 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-03-31T16:18:38.565914Z node 1 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037905, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-31T16:18:38.570820Z node 1 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2025-03-31T16:18:30.845946Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008809625462149:2077];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:30.846808Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001b30/r3tmp/tmpCrIdYW/pdisk_1.dat 2025-03-31T16:18:31.207307Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:31.237423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:31.237534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:31.240123Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12138 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T16:18:31.442636Z node 1 :TX_PROXY DEBUG: actor# [1:7488008809625462370:2115] Handle TEvNavigate describe path dc-1 2025-03-31T16:18:31.442723Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008813920430133:2433] HANDLE EvNavigateScheme dc-1 2025-03-31T16:18:31.442848Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488008813920429742:2153], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:31.442896Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7488008813920429742:2153], path# /dc-1, domainOwnerId# 72057594046644480 2025-03-31T16:18:31.443046Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008813920430134:2434][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-31T16:18:31.444619Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008809625462052:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7488008813920430139:2434] 2025-03-31T16:18:31.444620Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008809625462049:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7488008813920430138:2434] 2025-03-31T16:18:31.444665Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488008809625462049:2050] Subscribe: subscriber# [1:7488008813920430138:2434], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:18:31.444684Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488008809625462052:2053] Subscribe: subscriber# [1:7488008813920430139:2434], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:18:31.444728Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008809625462055:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7488008813920430140:2434] 2025-03-31T16:18:31.444747Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488008809625462055:2056] Subscribe: subscriber# [1:7488008813920430140:2434], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:18:31.444759Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008813920430138:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008809625462049:2050] 2025-03-31T16:18:31.444788Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008809625462049:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7488008813920430138:2434] 2025-03-31T16:18:31.444836Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008813920430139:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008809625462052:2053] 2025-03-31T16:18:31.444867Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008813920430140:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008809625462055:2056] 2025-03-31T16:18:31.444900Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008813920430134:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008813920430135:2434] 2025-03-31T16:18:31.444893Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008809625462052:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7488008813920430139:2434] 2025-03-31T16:18:31.444924Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008809625462055:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7488008813920430140:2434] 2025-03-31T16:18:31.444952Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008813920430134:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008813920430136:2434] 2025-03-31T16:18:31.445002Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7488008813920430134:2434][/dc-1] Set up state: owner# [1:7488008813920429742:2153], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:18:31.445091Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008813920430134:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008813920430137:2434] 2025-03-31T16:18:31.445131Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7488008813920430134:2434][/dc-1] Path was already updated: owner# [1:7488008813920429742:2153], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:18:31.445172Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008813920430138:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008813920430135:2434], cookie# 1 2025-03-31T16:18:31.445215Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008813920430139:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008813920430136:2434], cookie# 1 2025-03-31T16:18:31.445238Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008813920430140:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008813920430137:2434], cookie# 1 2025-03-31T16:18:31.445274Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008809625462049:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008813920430138:2434], cookie# 1 2025-03-31T16:18:31.445289Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008809625462052:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008813920430139:2434], cookie# 1 2025-03-31T16:18:31.445298Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008809625462055:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008813920430140:2434], cookie# 1 2025-03-31T16:18:31.445315Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008813920430138:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008809625462049:2050], cookie# 1 2025-03-31T16:18:31.445325Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008813920430139:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008809625462052:2053], cookie# 1 2025-03-31T16:18:31.445335Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008813920430140:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008809625462055:2056], cookie# 1 2025-03-31T16:18:31.445365Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008813920430134:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008813920430135:2434], cookie# 1 2025-03-31T16:18:31.445381Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008813920430134:2434][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-31T16:18:31.445401Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008813920430134:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008813920430136:2434], cookie# 1 2025-03-31T16:18:31.445417Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008813920430134:2434][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-31T16:18:31.445442Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008813920430134:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008813920430137:2434], cookie# 1 2025-03-31T16:18:31.445453Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008813920430134:2434][/dc-1] Unexpected sync response: sender# [1:7488008813920430137:2434], cookie# 1 2025-03-31T16:18:31.500611Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7488008813920429742:2153], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-03-31T16:18:31.501071Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7488008813920429742:2153], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... Cookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:18:39.248930Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7488008833047875185:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/delayed_requests DomainOwnerId: 72057594046644480 }: sender# [3:7488008845932778378:2792] 2025-03-31T16:18:39.248940Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7488008833047875185:2050] Upsert description: path# /dc-1/.metadata/workload_manager/delayed_requests 2025-03-31T16:18:39.248962Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7488008845932778373:2793][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-31T16:18:39.249008Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7488008833047875185:2050] Subscribe: subscriber# [3:7488008845932778378:2792], path# /dc-1/.metadata/workload_manager/delayed_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:18:39.249056Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7488008845932778378:2792][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7488008833047875185:2050] 2025-03-31T16:18:39.249089Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7488008845932778372:2792][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7488008845932778375:2792] 2025-03-31T16:18:39.249118Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7488008845932778372:2792][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [3:7488008833047875560:2139], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:18:39.249136Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7488008833047875185:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7488008845932778378:2792] 2025-03-31T16:18:39.249299Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7488008833047875185:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [3:7488008845932778384:2793] 2025-03-31T16:18:39.249315Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7488008833047875185:2050] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2025-03-31T16:18:39.249356Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7488008833047875185:2050] Subscribe: subscriber# [3:7488008845932778384:2793], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:18:39.249401Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7488008833047875188:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [3:7488008845932778385:2793] 2025-03-31T16:18:39.249411Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7488008833047875188:2053] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2025-03-31T16:18:39.249426Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7488008845932778384:2793][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7488008833047875185:2050] 2025-03-31T16:18:39.249438Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7488008833047875188:2053] Subscribe: subscriber# [3:7488008845932778385:2793], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:18:39.249477Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7488008833047875191:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [3:7488008845932778386:2793] 2025-03-31T16:18:39.249493Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7488008833047875191:2056] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2025-03-31T16:18:39.249497Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7488008845932778373:2793][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7488008845932778381:2793] 2025-03-31T16:18:39.249517Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7488008833047875191:2056] Subscribe: subscriber# [3:7488008845932778386:2793], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:18:39.249560Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7488008845932778385:2793][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7488008833047875188:2053] 2025-03-31T16:18:39.249568Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7488008833047875185:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7488008845932778384:2793] 2025-03-31T16:18:39.249601Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7488008845932778386:2793][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7488008833047875191:2056] 2025-03-31T16:18:39.249618Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7488008833047875188:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7488008845932778385:2793] 2025-03-31T16:18:39.249649Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7488008833047875191:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7488008845932778386:2793] 2025-03-31T16:18:39.249667Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7488008845932778373:2793][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7488008845932778382:2793] 2025-03-31T16:18:39.249745Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7488008845932778373:2793][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [3:7488008833047875560:2139], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:18:39.249813Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7488008845932778373:2793][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7488008845932778383:2793] 2025-03-31T16:18:39.249817Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7488008833047875560:2139], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-03-31T16:18:39.249929Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7488008833047875560:2139], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7488008845932778373:2793] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-31T16:18:39.249997Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488008833047875560:2139], cacheItem# { Subscriber: { Subscriber: [3:7488008845932778373:2793] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:18:39.250074Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7488008845932778373:2793][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [3:7488008833047875560:2139], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:18:39.250103Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488008845932778387:2795], recipient# [3:7488008845932778371:2320], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:39.391582Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7488008833047875560:2139], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:39.391709Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488008833047875560:2139], cacheItem# { Subscriber: { Subscriber: [3:7488008837342843708:2776] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:18:39.391824Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488008845932778389:2796], recipient# [3:7488008845932778388:2321], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap+useOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 5880, MsgBus: 2913 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ecc/r3tmp/tmpnEh57o/pdisk_1.dat TServer::EnableGrpc on GrpcPort 5880, node 1 TClient is connected to server localhost:2913 TClient is connected to server localhost:2913 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> KqpDataIntegrityTrails::Upsert-LogEnabled-UseSink [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert-LogEnabled+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 26624, MsgBus: 8695 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ea6/r3tmp/tmpDHEmER/pdisk_1.dat TServer::EnableGrpc on GrpcPort 26624, node 1 TClient is connected to server localhost:8695 TClient is connected to server localhost:8695 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLock-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 8524, MsgBus: 24490 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000eaa/r3tmp/tmpg20qW4/pdisk_1.dat TServer::EnableGrpc on GrpcPort 8524, node 1 TClient is connected to server localhost:24490 TClient is connected to server localhost:24490 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert+LogEnabled+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 2029, MsgBus: 63899 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000e8f/r3tmp/tmpiVNN04/pdisk_1.dat TServer::EnableGrpc on GrpcPort 2029, node 1 TClient is connected to server localhost:63899 TClient is connected to server localhost:63899 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2025-03-31T16:18:30.808871Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008809963867883:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:30.809063Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001b63/r3tmp/tmpS8fbXU/pdisk_1.dat 2025-03-31T16:18:31.183699Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:31.201579Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:31.201714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:31.204289Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25281 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T16:18:31.390901Z node 1 :TX_PROXY DEBUG: actor# [1:7488008809963868117:2115] Handle TEvNavigate describe path dc-1 2025-03-31T16:18:31.390985Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008814258835876:2428] HANDLE EvNavigateScheme dc-1 2025-03-31T16:18:31.391250Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488008809963868142:2129], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:31.391381Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7488008809963868142:2129], path# /dc-1, domainOwnerId# 72057594046644480 2025-03-31T16:18:31.391661Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008814258835877:2429][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-31T16:18:31.393648Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008809963867796:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7488008814258835881:2429] 2025-03-31T16:18:31.393729Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488008809963867796:2050] Subscribe: subscriber# [1:7488008814258835881:2429], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:18:31.393781Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008809963867799:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7488008814258835882:2429] 2025-03-31T16:18:31.393798Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488008809963867799:2053] Subscribe: subscriber# [1:7488008814258835882:2429], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:18:31.393818Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008809963867802:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7488008814258835883:2429] 2025-03-31T16:18:31.393837Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488008809963867802:2056] Subscribe: subscriber# [1:7488008814258835883:2429], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:18:31.393913Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008814258835881:2429][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008809963867796:2050] 2025-03-31T16:18:31.393981Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008814258835882:2429][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008809963867799:2053] 2025-03-31T16:18:31.394010Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008814258835883:2429][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008809963867802:2056] 2025-03-31T16:18:31.394067Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008814258835877:2429][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008814258835878:2429] 2025-03-31T16:18:31.394103Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008814258835877:2429][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008814258835879:2429] 2025-03-31T16:18:31.394158Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7488008814258835877:2429][/dc-1] Set up state: owner# [1:7488008809963868142:2129], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:18:31.394302Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008814258835877:2429][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008814258835880:2429] 2025-03-31T16:18:31.394373Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7488008814258835877:2429][/dc-1] Path was already updated: owner# [1:7488008809963868142:2129], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:18:31.394455Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008814258835881:2429][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008814258835878:2429], cookie# 1 2025-03-31T16:18:31.394482Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008814258835882:2429][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008814258835879:2429], cookie# 1 2025-03-31T16:18:31.394494Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008814258835883:2429][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008814258835880:2429], cookie# 1 2025-03-31T16:18:31.394542Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008809963867796:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7488008814258835881:2429] 2025-03-31T16:18:31.394571Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008809963867796:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008814258835881:2429], cookie# 1 2025-03-31T16:18:31.394590Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008809963867799:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7488008814258835882:2429] 2025-03-31T16:18:31.394603Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008809963867799:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008814258835882:2429], cookie# 1 2025-03-31T16:18:31.394624Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008809963867802:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7488008814258835883:2429] 2025-03-31T16:18:31.394639Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008809963867802:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008814258835883:2429], cookie# 1 2025-03-31T16:18:31.395140Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008814258835881:2429][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008809963867796:2050], cookie# 1 2025-03-31T16:18:31.395165Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008814258835882:2429][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008809963867799:2053], cookie# 1 2025-03-31T16:18:31.395208Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008814258835883:2429][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008809963867802:2056], cookie# 1 2025-03-31T16:18:31.395253Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008814258835877:2429][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008814258835878:2429], cookie# 1 2025-03-31T16:18:31.395274Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008814258835877:2429][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-31T16:18:31.395290Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008814258835877:2429][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008814258835879:2429], cookie# 1 2025-03-31T16:18:31.395309Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008814258835877:2429][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-31T16:18:31.395334Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008814258835877:2429][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008814258835880:2429], cookie# 1 2025-03-31T16:18:31.395348Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008814258835877:2429][/dc-1] Unexpected sync response: sender# [1:7488008814258835880:2429], cookie# 1 2025-03-31T16:18:31.449663Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7488008809963868142:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-03-31T16:18:31.450018Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7488008809963868142:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... 5287370794:2933], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:18:39.658601Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7488008832402467442:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/delayed_requests DomainOwnerId: 72057594046644480 }: sender# [3:7488008845287370795:2932] 2025-03-31T16:18:39.658607Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7488008832402467442:2056] Upsert description: path# /dc-1/.metadata/workload_manager/delayed_requests 2025-03-31T16:18:39.658617Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7488008832402467439:2053] Subscribe: subscriber# [3:7488008845287370793:2933], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:18:39.658619Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7488008832402467442:2056] Subscribe: subscriber# [3:7488008845287370795:2932], path# /dc-1/.metadata/workload_manager/delayed_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:18:39.658639Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7488008845287370791:2933][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7488008832402467436:2050] 2025-03-31T16:18:39.658651Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7488008832402467439:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/delayed_requests DomainOwnerId: 72057594046644480 }: sender# [3:7488008845287370792:2932] 2025-03-31T16:18:39.658661Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7488008832402467439:2053] Upsert description: path# /dc-1/.metadata/workload_manager/delayed_requests 2025-03-31T16:18:39.658683Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7488008845287370794:2933][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7488008832402467442:2056] 2025-03-31T16:18:39.658693Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7488008832402467439:2053] Subscribe: subscriber# [3:7488008845287370792:2932], path# /dc-1/.metadata/workload_manager/delayed_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:18:39.658709Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7488008845287370793:2933][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7488008832402467439:2053] 2025-03-31T16:18:39.658731Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7488008845287370790:2932][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7488008832402467436:2050] 2025-03-31T16:18:39.658748Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7488008845287370782:2933][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7488008845287370786:2933] 2025-03-31T16:18:39.658754Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7488008845287370795:2932][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7488008832402467442:2056] 2025-03-31T16:18:39.658775Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7488008845287370792:2932][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7488008832402467439:2053] 2025-03-31T16:18:39.658780Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7488008845287370782:2933][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7488008845287370789:2933] 2025-03-31T16:18:39.658807Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7488008845287370782:2933][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [3:7488008832402467774:2127], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:18:39.658809Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7488008845287370781:2932][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7488008845287370784:2932] 2025-03-31T16:18:39.658823Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7488008845287370782:2933][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7488008845287370788:2933] 2025-03-31T16:18:39.658840Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7488008845287370782:2933][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [3:7488008832402467774:2127], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:18:39.658844Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7488008845287370781:2932][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7488008845287370787:2932] 2025-03-31T16:18:39.658857Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7488008832402467436:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7488008845287370791:2933] 2025-03-31T16:18:39.658867Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7488008832402467436:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7488008845287370790:2932] 2025-03-31T16:18:39.658871Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7488008845287370781:2932][/dc-1/.metadata/workload_manager/delayed_requests] Set up state: owner# [3:7488008832402467774:2127], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:18:39.658876Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7488008832402467442:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7488008845287370794:2933] 2025-03-31T16:18:39.658887Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7488008832402467442:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7488008845287370795:2932] 2025-03-31T16:18:39.658894Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7488008845287370781:2932][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7488008845287370785:2932] 2025-03-31T16:18:39.658896Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7488008832402467439:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7488008845287370793:2933] 2025-03-31T16:18:39.658908Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7488008832402467439:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7488008845287370792:2932] 2025-03-31T16:18:39.658916Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7488008845287370781:2932][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [3:7488008832402467774:2127], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:18:39.658940Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7488008832402467774:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-03-31T16:18:39.658990Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7488008832402467774:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7488008845287370782:2933] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-31T16:18:39.659057Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488008832402467774:2127], cacheItem# { Subscriber: { Subscriber: [3:7488008845287370782:2933] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:18:39.659110Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7488008832402467774:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2025-03-31T16:18:39.659193Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7488008832402467774:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7488008845287370781:2932] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-31T16:18:39.659244Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488008832402467774:2127], cacheItem# { Subscriber: { Subscriber: [3:7488008845287370781:2932] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:18:39.659331Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488008845287370796:2935], recipient# [3:7488008845287370780:2324], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> KqpDataIntegrityTrails::BrokenReadLock+UseSink [GOOD] >> KqpScripting::StreamExecuteYqlScriptScan >> KqpYql::UuidPrimaryKeyBulkUpsert ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert-LogEnabled-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 11422, MsgBus: 29811 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000e8d/r3tmp/tmpDOWKLR/pdisk_1.dat TServer::EnableGrpc on GrpcPort 11422, node 1 TClient is connected to server localhost:29811 TClient is connected to server localhost:29811 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> KqpScripting::StreamExecuteYqlScriptMixed >> KqpYql::RefSelect >> KqpScripting::StreamScanQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2025-03-31T16:18:31.251772Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008810467562948:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:31.251873Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001b1b/r3tmp/tmp8c3Gif/pdisk_1.dat 2025-03-31T16:18:31.617111Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:31.642983Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:31.643194Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:31.648700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16051 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T16:18:31.802855Z node 1 :TX_PROXY DEBUG: actor# [1:7488008810467563206:2134] Handle TEvNavigate describe path dc-1 2025-03-31T16:18:31.802907Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008810467563651:2440] HANDLE EvNavigateScheme dc-1 2025-03-31T16:18:31.803007Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488008810467563298:2181], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:31.803041Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7488008810467563298:2181], path# /dc-1, domainOwnerId# 72057594046644480 2025-03-31T16:18:31.803264Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008810467563652:2441][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-31T16:18:31.804695Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008810467562856:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7488008810467563656:2441] 2025-03-31T16:18:31.804702Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008810467562859:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7488008810467563657:2441] 2025-03-31T16:18:31.804748Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488008810467562859:2053] Subscribe: subscriber# [1:7488008810467563657:2441], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:18:31.804752Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488008810467562856:2050] Subscribe: subscriber# [1:7488008810467563656:2441], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:18:31.804805Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008810467562862:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7488008810467563658:2441] 2025-03-31T16:18:31.804824Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488008810467562862:2056] Subscribe: subscriber# [1:7488008810467563658:2441], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:18:31.804885Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008810467563657:2441][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008810467562859:2053] 2025-03-31T16:18:31.804921Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008810467563656:2441][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008810467562856:2050] 2025-03-31T16:18:31.804938Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008810467563658:2441][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008810467562862:2056] 2025-03-31T16:18:31.804979Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008810467562859:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7488008810467563657:2441] 2025-03-31T16:18:31.805009Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008810467562856:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7488008810467563656:2441] 2025-03-31T16:18:31.805013Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008810467563652:2441][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008810467563654:2441] 2025-03-31T16:18:31.805023Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008810467562862:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7488008810467563658:2441] 2025-03-31T16:18:31.805040Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008810467563652:2441][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008810467563653:2441] 2025-03-31T16:18:31.805086Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7488008810467563652:2441][/dc-1] Set up state: owner# [1:7488008810467563298:2181], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:18:31.805183Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008810467563652:2441][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008810467563655:2441] 2025-03-31T16:18:31.805233Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7488008810467563652:2441][/dc-1] Path was already updated: owner# [1:7488008810467563298:2181], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:18:31.805289Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008810467563656:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008810467563653:2441], cookie# 1 2025-03-31T16:18:31.805304Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008810467563657:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008810467563654:2441], cookie# 1 2025-03-31T16:18:31.805326Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008810467563658:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008810467563655:2441], cookie# 1 2025-03-31T16:18:31.805389Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008810467562859:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008810467563657:2441], cookie# 1 2025-03-31T16:18:31.805417Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008810467562862:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008810467563658:2441], cookie# 1 2025-03-31T16:18:31.805446Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008810467563657:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008810467562859:2053], cookie# 1 2025-03-31T16:18:31.805481Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008810467563658:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008810467562862:2056], cookie# 1 2025-03-31T16:18:31.805527Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008810467563652:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008810467563654:2441], cookie# 1 2025-03-31T16:18:31.805548Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008810467563652:2441][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-31T16:18:31.805567Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008810467563652:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008810467563655:2441], cookie# 1 2025-03-31T16:18:31.805593Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008810467563652:2441][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-31T16:18:31.805622Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008810467562856:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008810467563656:2441], cookie# 1 2025-03-31T16:18:31.805646Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008810467563656:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008810467562856:2050], cookie# 1 2025-03-31T16:18:31.805663Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008810467563652:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008810467563653:2441], cookie# 1 2025-03-31T16:18:31.805674Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008810467563652:2441][/dc-1] Unexpected sync response: sender# [1:7488008810467563653:2441], cookie# 1 2025-03-31T16:18:31.846547Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7488008810467563298:2181], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-03-31T16:18:31.846876Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7488008810467563298:2181], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... [3:7488008833307457870:2056] 2025-03-31T16:18:40.028113Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7488008833307457864:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7488008850487328588:3003] 2025-03-31T16:18:40.028136Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7488008850487328577:3003][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7488008850487328587:3003] 2025-03-31T16:18:40.028168Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7488008850487328595:3004][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7488008833307457867:2053] 2025-03-31T16:18:40.028177Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7488008850487328577:3003][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [3:7488008837602425499:2127], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:18:40.028213Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7488008850487328596:3004][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7488008833307457870:2056] 2025-03-31T16:18:40.028217Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7488008833307457867:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7488008850487328589:3003] 2025-03-31T16:18:40.028242Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7488008833307457867:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7488008850487328595:3004] 2025-03-31T16:18:40.028259Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7488008850487328578:3004][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7488008850487328592:3004] 2025-03-31T16:18:40.028282Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7488008837602425499:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-03-31T16:18:40.028298Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7488008850487328578:3004][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [3:7488008837602425499:2127], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:18:40.028321Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7488008850487328578:3004][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7488008850487328593:3004] 2025-03-31T16:18:40.028353Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7488008837602425499:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7488008850487328577:3003] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-31T16:18:40.028385Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7488008850487328578:3004][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [3:7488008837602425499:2127], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:18:40.028413Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7488008833307457870:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7488008850487328590:3003] 2025-03-31T16:18:40.028429Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7488008833307457870:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7488008850487328596:3004] 2025-03-31T16:18:40.028431Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488008837602425499:2127], cacheItem# { Subscriber: { Subscriber: [3:7488008850487328577:3003] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:18:40.028477Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7488008837602425499:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2025-03-31T16:18:40.028516Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7488008837602425499:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7488008850487328578:3004] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-31T16:18:40.028521Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488008850487328597:3005], recipient# [3:7488008850487328574:2324], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:40.028559Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488008837602425499:2127], cacheItem# { Subscriber: { Subscriber: [3:7488008850487328578:3004] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:18:40.028602Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488008850487328598:3006], recipient# [3:7488008850487328575:2325], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:40.932394Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7488008837602425499:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:40.932539Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488008837602425499:2127], cacheItem# { Subscriber: { Subscriber: [3:7488008837602426444:2833] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:18:40.932666Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488008850487328615:3007], recipient# [3:7488008850487328614:2326], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:41.031757Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7488008837602425499:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:41.031882Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488008837602425499:2127], cacheItem# { Subscriber: { Subscriber: [3:7488008850487328578:3004] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:18:41.031965Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488008854782295916:3011], recipient# [3:7488008854782295915:2327], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> KqpYql::EvaluateExpr2 >> KqpYql::UpdatePk >> KqpYql::InsertIgnore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapWriteRow Test command err: Trying to start YDB, gRPC: 30655, MsgBus: 3227 2025-03-31T16:18:20.593560Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008766742534453:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:20.593631Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000afb/r3tmp/tmpSnj9HQ/pdisk_1.dat 2025-03-31T16:18:21.279624Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:21.316633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:21.316795Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:21.321143Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30655, node 1 2025-03-31T16:18:21.468645Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:21.468664Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:21.468671Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:21.468774Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3227 TClient is connected to server localhost:3227 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:22.418967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:22.436175Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:18:22.461477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:18:22.611572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:18:22.783661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:22.851341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:24.480337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008783922405413:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.480437Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:24.747524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.790688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.831908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.874235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.909558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:24.982780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:25.068296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008788217373234:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:25.068375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:25.068589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008788217373239:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:25.072603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:25.084900Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008788217373241:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:25.142085Z node 1 :TX_PROXY ERROR: Actor# [1:7488008788217373293:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:25.591379Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008766742534453:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:25.591446Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:26.034760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:18:26.220918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7488008792512341035:2509];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:18:26.221116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7488008792512341035:2509];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:18:26.221501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7488008792512341035:2509];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:18:26.221623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7488008792512341035:2509];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:18:26.221714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7488008792512341035:2509];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:18:26.221810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7488008792512341035:2509];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:18:26.221913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7488008792512341035:2509];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:18:26.222016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7488008792512341035:2509];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:18:26.222115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7488008792512341035:2509];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:18:26.222205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7488008792512341035:2509];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:18:26.222298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7488008792512341035:2509];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:18:26.222398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7488008792512341035:2509];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:18:26.239033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7488008792512341041:2512];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:18:26.239282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7488008792512341041:2512];tablet_id=720751862240379 ... 75186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:18:26.409785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:18:26.409833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:18:26.409910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:18:26.409931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:18:26.410125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:18:26.410155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:18:26.410239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:18:26.410323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:18:26.410388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:18:26.410421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:18:26.410459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:18:26.410490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:18:26.410991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:18:26.411038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:18:26.411269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:18:26.411306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:18:26.411418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:18:26.411477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:18:26.411665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:18:26.411690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:18:26.411790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:18:26.411813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:18:26.436620Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T16:18:26.437415Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T16:18:26.442133Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T16:18:26.443655Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T16:18:26.448758Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T16:18:26.451524Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T16:18:26.452475Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T16:18:26.456846Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T16:18:26.457140Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037920;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T16:18:26.462077Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T16:18:26.585718Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2025-03-31T16:18:26.585718Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2025-03-31T16:18:26.586293Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2025-03-31T16:18:26.661484Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=281474976710675;tx_id=281474976710675;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710675; query_phases { duration_us: 8841 cpu_time_us: 2290 affected_shards: 1 } query_phases { duration_us: 6317 cpu_time_us: 383 affected_shards: 1 } compilation { duration_us: 52234 cpu_time_us: 49780 } process_cpu_time_us: 675 total_duration_us: 70062 total_cpu_time_us: 53128 AddressSanitizer:DEADLYSIGNAL ================================================================= ==404811==ERROR: AddressSanitizer: SEGV on unknown address 0x000000000008 (pc 0x000018b718ad bp 0x7ffdbef80940 sp 0x7ffdbef807a0 T0) ==404811==The signal is caused by a READ memory access. ==404811==Hint: address points to the zero page. 2025-03-31T16:18:36.275368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:18:36.275441Z node 1 :IMPORT WARN: Table profiles were not loaded #0 0x18b718ad in Get::TypeHandler> /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 #1 0x18b718ad in Get /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:1348:32 #2 0x18b718ad in _internal_table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1762:31 #3 0x18b718ad in table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1766:10 #4 0x18b718ad in NKikimr::NKqp::NTestSuiteKqpCost::TTestCaseOlapWriteRow::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:636:13 #5 0x18b96437 in operator() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #6 0x18b96437 in __invoke<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #7 0x18b96437 in __call<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #8 0x18b96437 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #9 0x18b96437 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #10 0x194e85e5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #11 0x194e85e5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #12 0x194e85e5 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #13 0x194b8138 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #14 0x18b952e3 in NKikimr::NKqp::NTestSuiteKqpCost::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #15 0x194b9a05 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #16 0x194e2b5c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #17 0x7fb6d06ebd8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #18 0x7fb6d06ebe3f in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x29e3f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #19 0x16306028 in _start (/home/runner/.ya/build/build_root/0l5c/000afb/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost+0x16306028) (BuildId: 5128b0b237a145c7e4cbd2034645258e10874b06) AddressSanitizer can not provide additional info. SUMMARY: AddressSanitizer: SEGV /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 in Get::TypeHandler> ==404811==ABORTING >> Cdc::HugeKey[YdsRunner] [GOOD] >> Cdc::HugeKey[TopicRunner] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLock+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 63799, MsgBus: 23029 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000e81/r3tmp/tmpMrwLk9/pdisk_1.dat TServer::EnableGrpc on GrpcPort 63799, node 1 TClient is connected to server localhost:23029 TClient is connected to server localhost:23029 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap-useOltpSink [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanCancelation >> KqpYql::ColumnNameConflict >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExternalDataSource [GOOD] >> KqpYql::UuidPrimaryKey >> KqpYql::InsertCVList+useSink >> BackupRestoreS3::RestoreIndexTableDecimalSplitBoundaries [GOOD] >> BackupRestoreS3::RestoreViewDependentOnAnotherView |88.9%| [TA] $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} |88.9%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpYql::FlexibleTypes >> KqpScripting::EndOfQueryCommit >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap+useOltpSink [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap-useOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 4708, MsgBus: 6898 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000eb8/r3tmp/tmp8SZgPr/pdisk_1.dat TServer::EnableGrpc on GrpcPort 4708, node 1 TClient is connected to server localhost:6898 TClient is connected to server localhost:6898 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExternalDataSource [GOOD] Test command err: 2025-03-31T16:18:03.512707Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008692478747752:2206];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:03.513348Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmp9dXBrN/pdisk_1.dat 2025-03-31T16:18:03.964862Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:04.002242Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:04.002358Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:04.018622Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3996, node 1 2025-03-31T16:18:04.231876Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:04.231915Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:04.231928Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:04.232050Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25557 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:04.708924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:06.653118Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008705363650565:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:06.653118Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008705363650573:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:06.653230Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:06.659265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:18:06.678990Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008705363650579:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:18:06.755421Z node 1 :TX_PROXY ERROR: Actor# [1:7488008705363650652:2683] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Backup "/Root" to "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpMcaEgn/"Create temporary directory "/Root/~backup_20250331T161807" in databaseProcess "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpMcaEgn/topic"Backup topic "/Root/topic" to "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpMcaEgn/topic"Write topic into "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpMcaEgn/topic/create_topic.pb"Write ACL into "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpMcaEgn/topic/permissions.pb"Remove temporary directory "/Root/~backup_20250331T161807" in database2025-03-31T16:18:07.446238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710662:0, at schemeshard: 72057594046644480 Backup completed successfully2025-03-31T16:18:07.491308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:07.538394Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-03-31T16:18:07.538438Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-03-31T16:18:07.540077Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found Restore "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpMcaEgn/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpMcaEgn/" to "/Root"Process "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpMcaEgn/topic"Restore topic "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpMcaEgn/topic" to "/Root/topic"Read topic from "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpMcaEgn/topic/create_topic.pb"Created "/Root/topic"Restore ACL "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpMcaEgn/topic" to "/Root/topic"Read ACL from "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpMcaEgn/topic/permissions.pb"2025-03-31T16:18:07.655069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 Restore completed successfully 2025-03-31T16:18:09.009489Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008717199238966:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:09.009571Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpiWeoO6/pdisk_1.dat 2025-03-31T16:18:09.129060Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:09.163941Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:09.164028Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:09.166300Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31166, node 4 2025-03-31T16:18:09.256350Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:09.256378Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:09.256389Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:09.256546Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63663 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:09.495509Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:09.583985Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 Backup "/Root" to "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpp6EDth/"Create temporary directory "/Root/~backup_20250331T161809" in databaseProcess "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpp6EDth/kesus"Backup coordination node "/Root/kesus" to "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpp6EDth/kesus"Write coordination node into "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpp6EDth/kesus/create_coordination_node.pb"Write ACL into "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpp6EDth/kesus/permissions.pb"Remove temporary directory "/Root/~backup_20250331T161809" in database2025-03-31T16:18:09.788451Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715660:0, at schemeshard: 72057594046644480 Backup completed successfully2025-03-31T16:18:09.817851Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropKesus, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:18:09.828762Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037888 not found 2025-03-31T16:18:09.830340Z node 4 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found Restore "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpp6EDth/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpp6EDth/" to "/Root"Process "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpp6EDth/kesus"Restore coordination node "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpp6EDth ... it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:36.096178Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:18:36.133231Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 Backup "/Root" to "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpm5E7rd/"Create temporary directory "/Root/~backup_20250331T161836" in databaseProcess "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpm5E7rd/externalTable"Backup external table "/Root/externalTable" to "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpm5E7rd/externalTable"Write external table into "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpm5E7rd/externalTable/create_external_table.sql"Write ACL into "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpm5E7rd/externalTable/permissions.pb"Process "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpm5E7rd/externalDataSource"Backup external data source "/Root/externalDataSource" to "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpm5E7rd/externalDataSource"Write external data source into "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpm5E7rd/externalDataSource/create_external_data_source.sql"Write ACL into "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpm5E7rd/externalDataSource/permissions.pb"Remove temporary directory "/Root/~backup_20250331T161836" in database2025-03-31T16:18:36.312322Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715663:0, at schemeshard: 72057594046644480 Backup completed successfullyRestore "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpm5E7rd/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpm5E7rd/" to "/Root"Process "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpm5E7rd/externalDataSource"Restore external data source "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpm5E7rd/externalDataSource" to "/Root/externalDataSource"Read external data source from "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpm5E7rd/externalDataSource/create_external_data_source.sql"2025-03-31T16:18:36.519716Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715666:0, at schemeshard: 72057594046644480 Created "/Root/externalDataSource"Restore ACL "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpm5E7rd/externalDataSource" to "/Root/externalDataSource"Read ACL from "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpm5E7rd/externalDataSource/permissions.pb"2025-03-31T16:18:36.566157Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715667:0, at schemeshard: 72057594046644480 Process "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpm5E7rd/externalTable"Restore external table "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpm5E7rd/externalTable" to "/Root/externalTable"Read external table from "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpm5E7rd/externalTable/create_external_table.sql"2025-03-31T16:18:36.611270Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 Created "/Root/externalTable"Restore ACL "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpm5E7rd/externalTable" to "/Root/externalTable"Read ACL from "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpm5E7rd/externalTable/permissions.pb"2025-03-31T16:18:36.646552Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715669:0, at schemeshard: 72057594046644480 Restore completed successfully 2025-03-31T16:18:38.412546Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7488008841334425300:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:38.412619Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpsm5rEn/pdisk_1.dat 2025-03-31T16:18:38.556443Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:38.594926Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:38.595036Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:38.599538Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24781, node 13 2025-03-31T16:18:38.694720Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:38.694744Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:38.694757Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:38.694928Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3804 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:39.013504Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:42.791171Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7488008858514295571:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:42.791317Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:42.791697Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7488008858514295583:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:42.797019Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:18:42.859293Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7488008858514295585:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:18:42.941686Z node 13 :TX_PROXY ERROR: Actor# [13:7488008858514295659:2691] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:42.985547Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715660:0, at schemeshard: 72057594046644480 Backup "/Root" to "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpxmJk5D/"Create temporary directory "/Root/~backup_20250331T161843" in databaseProcess "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpxmJk5D/externalDataSource"Backup external data source "/Root/externalDataSource" to "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpxmJk5D/externalDataSource"Write external data source into "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpxmJk5D/externalDataSource/create_external_data_source.sql"Write ACL into "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpxmJk5D/externalDataSource/permissions.pb"Remove temporary directory "/Root/~backup_20250331T161843" in database2025-03-31T16:18:43.192224Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715662:0, at schemeshard: 72057594046644480 Backup completed successfullyRestore "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpxmJk5D/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpxmJk5D/" to "/Root"Process "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpxmJk5D/externalDataSource"Restore external data source "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpxmJk5D/externalDataSource" to "/Root/externalDataSource"Read external data source from "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpxmJk5D/externalDataSource/create_external_data_source.sql"2025-03-31T16:18:43.391736Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:43.414199Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7488008841334425300:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:43.414314Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Created "/Root/externalDataSource"Restore ACL "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpxmJk5D/externalDataSource" to "/Root/externalDataSource"Read ACL from "/home/runner/.ya/build/build_root/0l5c/001029/r3tmp/tmpxmJk5D/externalDataSource/permissions.pb"2025-03-31T16:18:43.435707Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715665:0, at schemeshard: 72057594046644480 Restore completed successfully >> KqpScripting::ScanQueryInvalid >> Cdc::VirtualTimestamps[TopicRunner] [GOOD] >> Cdc::Write[PqRunner] >> KqpScripting::StreamExecuteYqlScriptClientTimeoutBruteForce ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap+useOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 10728, MsgBus: 1216 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000e77/r3tmp/tmp08RVbK/pdisk_1.dat TServer::EnableGrpc on GrpcPort 10728, node 1 TClient is connected to server localhost:1216 TClient is connected to server localhost:1216 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> KqpYql::EvaluateExprPgNull [GOOD] >> KqpYql::EvaluateExprYsonAndType >> KqpYql::BinaryJsonOffsetNormal [GOOD] >> KqpYql::Closure >> KqpScripting::ScriptValidate [GOOD] >> KqpScripting::ScriptStats >> KqpYql::TableUseBeforeCreate [GOOD] >> KqpScripting::StreamExecuteYqlScriptSeveralQueries [GOOD] >> KqpScripting::StreamExecuteYqlScriptSeveralQueriesComplex >> KqpScripting::LimitOnShard [GOOD] >> KqpScripting::NoAstSizeLimit |89.0%| [TA] $(B)/ydb/core/kqp/ut/data_integrity/test-results/unittest/{meta.json ... results_accumulator.log} |89.0%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/data_integrity/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpPragma::OrderedColumns [GOOD] >> KqpPragma::MatchRecognizeWithoutTimeOrderRecoverer >> KqpScripting::ScanQuery [GOOD] >> KqpScripting::ScanQueryDisable >> Cdc::OldImageLogDebezium [GOOD] >> Cdc::NewImageLogDebezium >> KqpScripting::StreamExecuteYqlScriptData [GOOD] >> KqpScripting::StreamExecuteYqlScriptEmptyResults >> KqpPragma::ResetPerQuery [GOOD] >> KqpPragma::Warning >> KqpYql::EvaluateIf >> KqpYql::TestUuidDefaultColumn >> KqpScripting::ExecuteYqlScriptScanScalar [GOOD] >> KqpScripting::JoinIndexLookup >> KqpYql::UuidPrimaryKeyBulkUpsert [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TableUseBeforeCreate [GOOD] Test command err: Trying to start YDB, gRPC: 14722, MsgBus: 2813 2025-03-31T16:18:40.909597Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008849623252831:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:40.912599Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000bea/r3tmp/tmpEyWtu9/pdisk_1.dat 2025-03-31T16:18:41.378282Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:41.397084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:41.397180Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:41.408654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14722, node 1 2025-03-31T16:18:41.606296Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:41.606312Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:41.606317Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:41.606408Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2813 TClient is connected to server localhost:2813 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:42.795916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:42.846935Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:18:42.860493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:43.085341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:18:43.305351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:18:43.403249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:44.817388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008866803123775:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:44.817499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.477465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.514530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.563836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.612182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.655337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.708563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.784676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008871098091583:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.784838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.785155Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008871098091589:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.788780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:45.803750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008871098091592:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:45.876412Z node 1 :TX_PROXY ERROR: Actor# [1:7488008871098091646:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:45.913300Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008849623252831:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:45.913423Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: Type annotation, code: 1030
:3:13: Error: At function: KiReadTable!
:3:13: Error: Cannot find table 'db.[/Root/NewTable]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 >> KqpYql::TableRange >> KqpYql::InsertIgnore [GOOD] >> KqpYql::JsonCast >> KqpYql::RefSelect [GOOD] >> KqpScripting::StreamExecuteYqlScriptScan [GOOD] >> KqpYql::EvaluateExpr2 [GOOD] |89.0%| [TA] $(B)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} >> KqpYql::UpdatePk [GOOD] >> KqpYql::NonStrictDml >> KqpYql::ColumnNameConflict [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanCancelAfterBruteForce >> KqpYql::EvaluateExpr3 >> KqpYql::PgIntPrimaryKey >> KqpYql::ColumnTypeMismatch >> KqpScripting::StreamExecuteYqlScriptMixed [GOOD] >> KqpScripting::StreamScanQuery [GOOD] >> KqpYql::InsertCVList+useSink [GOOD] >> KqpYql::UuidPrimaryKey [GOOD] >> KqpYql::FlexibleTypes [GOOD] >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-true [GOOD] >> KqpScripting::EndOfQueryCommit [GOOD] >> Cdc::Write[PqRunner] [GOOD] >> Cdc::HugeKey[TopicRunner] [GOOD] >> KqpScripting::NoAstSizeLimit [GOOD] >> KqpScripting::ScanQueryInvalid [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanCancelation [GOOD] >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-true [GOOD] >> KqpYql::Closure [GOOD] >> KqpYql::TestUuidDefaultColumn [GOOD] >> KqpYql::EvaluateExprYsonAndType [GOOD] >> BackupRestoreS3::RestoreViewDependentOnAnotherView [GOOD] >> KqpScripting::StreamExecuteYqlScriptSeveralQueriesComplex [GOOD] >> KqpScripting::StreamExecuteYqlScriptLeadingEmptyScan >> KqpYql::InsertCVList-useSink >> KqpScripting::StreamExecuteYqlScriptScanClientOperationTimeoutBruteForce >> KqpScripting::SyncExecuteYqlScriptSeveralQueries >> Cdc::Write[YdsRunner] >> KqpYql::FromBytes >> BackupRestoreS3::PrefixedVectorIndex >> KqpScripting::ScanQueryTruncate >> Cdc::NewImageLogDebezium [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanClientTimeoutBruteForce [GOOD] >> KqpScripting::ExecuteYqlScriptPg >> Cdc::HugeKeyDebezium >> Cdc::NaN[PqRunner] >> KqpScripting::StreamExecuteYqlScriptScanOperationTmeoutBruteForce ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::NoAstSizeLimit [GOOD] Test command err: Trying to start YDB, gRPC: 17551, MsgBus: 6181 2025-03-31T16:18:40.935440Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008849948838784:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:40.935492Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000bbe/r3tmp/tmpHq0poj/pdisk_1.dat 2025-03-31T16:18:41.549744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:41.549844Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:41.557128Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:18:41.563261Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17551, node 1 2025-03-31T16:18:41.741172Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:41.741192Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:41.741199Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:41.741304Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6181 TClient is connected to server localhost:6181 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:42.621062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:42.658810Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:18:42.675103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:42.875400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:43.114147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:43.208222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:45.037473Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008871423677026:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.037578Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.477473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.527413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.590350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.633793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.666816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.711565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.779543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008871423677541:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.779639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.779978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008871423677546:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.785190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:45.801181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008871423677548:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:45.876387Z node 1 :TX_PROXY ERROR: Actor# [1:7488008871423677602:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:45.936293Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008849948838784:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:45.936383Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:47.403204Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437927385, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 17278, MsgBus: 24200 2025-03-31T16:18:48.251637Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008883928625079:2198];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000bbe/r3tmp/tmpWdteza/pdisk_1.dat 2025-03-31T16:18:48.304046Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:18:48.382772Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17278, node 2 2025-03-31T16:18:48.397856Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:48.397946Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:48.403041Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:18:48.499430Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:48.499454Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:48.499462Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:48.499581Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24200 TClient is connected to server localhost:24200 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:49.052863Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:49.065681Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:18:51.829783Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008896813527456:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:51.829883Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:51.845261Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:18:51.919471Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008896813527583:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:51.919557Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:51.945036Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008896813527594:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:51.945122Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:51.945331Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008896813527599:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:51.949204Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:18:51.965041Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008896813527601:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:18:52.025692Z node 2 :TX_PROXY ERROR: Actor# [2:7488008901108494948:2399] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpPragma::Warning [GOOD] >> KqpPragma::MatchRecognizeWithoutTimeOrderRecoverer [GOOD] >> KqpScripting::StreamExecuteYqlScriptEmptyResults [GOOD] >> KqpYql::EvaluateIf [GOOD] >> BackupRestore::RestoreReplicationWithoutSecret [GOOD] >> KqpScripting::ScanQueryDisable [GOOD] >> BackupRestore::RestoreExternalDataSourceWithoutSecret >> KqpYql::TableRange [GOOD] >> KqpScripting::StreamExecuteYqlScriptOperationTmeoutBruteForce [GOOD] >> BasicStatistics::TwoDatabases [GOOD] >> KqpYql::PgIntPrimaryKey [GOOD] >> KqpScripting::JoinIndexLookup [GOOD] >> KqpYql::EvaluateExpr3 [GOOD] >> KqpYql::JsonCast [GOOD] >> KqpScripting::ScriptStats [GOOD] >> KqpYql::DdlDmlMix >> KqpYql::FromBytes [GOOD] >> KqpYql::ColumnTypeMismatch [GOOD] >> KqpYql::NonStrictDml [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanCancelAfterBruteForce [GOOD] >> KqpScripting::SyncExecuteYqlScriptSeveralQueries [GOOD] >> Cdc::Write[YdsRunner] [GOOD] >> KqpScripting::ScanQueryTruncate [GOOD] >> KqpYql::InsertCVList-useSink [GOOD] >> KqpScripting::ExecuteYqlScriptPg [GOOD] >> KqpScripting::StreamExecuteYqlScriptLeadingEmptyScan [GOOD] >> KqpScripting::StreamExecuteYqlScriptClientTimeoutBruteForce [GOOD] >> KqpScripting::StreamExecuteYqlScriptPg >> KqpYql::EvaluateFor >> KqpYql::JsonNumberPrecision >> Cdc::Write[TopicRunner] >> KqpScripting::StreamExecuteYqlScriptClientOperationTimeoutBruteForce ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TestUuidDefaultColumn [GOOD] Test command err: Trying to start YDB, gRPC: 29923, MsgBus: 21987 2025-03-31T16:18:48.889661Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008887644812262:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:48.890068Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b5e/r3tmp/tmp7a6Mnh/pdisk_1.dat 2025-03-31T16:18:49.532064Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:49.535158Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:49.535237Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:49.537193Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29923, node 1 2025-03-31T16:18:49.635603Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:49.635627Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:49.635634Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:49.635755Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21987 TClient is connected to server localhost:21987 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:50.319701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:50.335630Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:18:52.462789Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008904824681972:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:52.462914Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:52.790500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:18:52.960454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008904824682076:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:52.960561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:52.961125Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008904824682081:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:52.964764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:18:52.976672Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-03-31T16:18:52.977388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008904824682083:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:18:53.041562Z node 1 :TX_PROXY ERROR: Actor# [1:7488008909119649431:2400] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |89.0%| [TA] {RESULT} $(B)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptEmptyResults [GOOD] Test command err: Trying to start YDB, gRPC: 6776, MsgBus: 16123 2025-03-31T16:18:40.870933Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008852838531748:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:40.871022Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000be8/r3tmp/tmpoblsZa/pdisk_1.dat 2025-03-31T16:18:41.441779Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:41.441870Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:41.447764Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6776, node 1 2025-03-31T16:18:41.479271Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:18:41.479294Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:18:41.496297Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:41.600184Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:41.600206Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:41.600216Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:41.600370Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16123 TClient is connected to server localhost:16123 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:42.612087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:18:42.636971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:18:42.889877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:43.082632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:43.220387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:45.030296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008874313369987:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.030384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.476122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.557720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.604868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.636393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.671676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.726025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.780016Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008874313370499:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.780087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.780334Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008874313370504:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.785186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:45.803009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008874313370506:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:45.870934Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008852838531748:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:45.871035Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:45.902558Z node 1 :TX_PROXY ERROR: Actor# [1:7488008874313370563:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:47.643062Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437927644, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 5353, MsgBus: 16790 2025-03-31T16:18:48.615574Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008885727062608:2215];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:48.699294Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000be8/r3tmp/tmpMENoai/pdisk_1.dat 2025-03-31T16:18:48.867334Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:48.871566Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:48.871660Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:48.876041Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5353, node 2 2025-03-31T16:18:49.083755Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:49.083780Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:49.083786Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:49.083915Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16790 TClient is connected to server localhost:16790 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:49.737081Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:49.759017Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:49.839477Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:50.033616Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:18:50.115963Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:18:52.378650Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008902906933384:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:52.378713Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:52.424337Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:52.454818Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:52.521744Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:52.551217Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:52.588611Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:52.633514Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:52.677807Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008902906933894:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:52.677873Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:52.678045Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008902906933899:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:52.681310Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:52.695118Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008902906933901:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:18:52.798531Z node 2 :TX_PROXY ERROR: Actor# [2:7488008902906933960:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:53.614431Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008885727062608:2215];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:53.624290Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:53.994954Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437934028, txId: 281474976715671] shutting down 2025-03-31T16:18:54.126122Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437934154, txId: 281474976715673] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpPragma::Warning [GOOD] Test command err: Trying to start YDB, gRPC: 61911, MsgBus: 14958 2025-03-31T16:18:40.937995Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008853099454949:2278];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:41.197912Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000bee/r3tmp/tmpZQZRma/pdisk_1.dat 2025-03-31T16:18:41.551682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:41.551813Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:41.560025Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:18:41.588047Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61911, node 1 2025-03-31T16:18:41.759856Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:41.759874Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:41.759882Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:41.759995Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14958 TClient is connected to server localhost:14958 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:42.571163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:42.602975Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:18:42.633871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:42.784010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:42.989717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:43.078908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:45.040698Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008874574292976:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.040801Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.477841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.582298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.617161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.667552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.755908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.809670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.871382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008874574293497:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.871485Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.871637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008874574293502:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.878982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:45.895002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008874574293504:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:45.939329Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008853099454949:2278];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:45.939408Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:45.999419Z node 1 :TX_PROXY ERROR: Actor# [1:7488008874574293560:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:47.648822Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488008883164228527:2511], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:2:50: Error: At function: AssumeColumnOrderPartial
:2:20: Error: At function: Aggregate /lib/yql/aggregate.yqls:648:18: Error: At function: AggregationTraits /lib/yql/aggregate.yqls:60:31: Error: At function: AggrCountInit
:2:20: Error: At function: PersistableRepr
:2:26: Error: At function: Member
:2:26: Error: Member not found: _yql_partition_id 2025-03-31T16:18:47.649102Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjhlNWVkYmItOGFkOGEyODAtYTBkZGUwYWYtZWVjZDhlZmM=, ActorId: [1:7488008883164228448:2499], ActorState: ExecuteState, TraceId: 01jqpfz5610a8j4gs3nwhys93a, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 13783, MsgBus: 3670 2025-03-31T16:18:48.663762Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008883406720520:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:48.663826Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000bee/r3tmp/tmpbzK7im/pdisk_1.dat 2025-03-31T16:18:48.848824Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13783, node 2 2025-03-31T16:18:49.015628Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:49.015716Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:49.016466Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:49.016497Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:49.016505Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:49.016618Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:18:49.017546Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3670 TClient is connected to server localhost:3670 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:49.645031Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:49.652320Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:18:49.661822Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:49.770607Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:49.950567Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:50.038335Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:52.300212Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008900586591467:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:52.300299Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:52.347558Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:52.390014Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:52.423279Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:52.453608Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:52.484635Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:52.518427Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:52.573250Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008900586591975:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:52.573329Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:52.573509Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008900586591980:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:52.577344Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:52.590620Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008900586591982:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:18:52.678811Z node 2 :TX_PROXY ERROR: Actor# [2:7488008900586592038:3437] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:53.664550Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008883406720520:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:53.664598Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpPragma::MatchRecognizeWithoutTimeOrderRecoverer [GOOD] Test command err: Trying to start YDB, gRPC: 5560, MsgBus: 14020 2025-03-31T16:18:40.856852Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008852363842225:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:40.857044Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000bdb/r3tmp/tmpWUZGdu/pdisk_1.dat 2025-03-31T16:18:41.532487Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:41.536561Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:41.536638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:41.547567Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5560, node 1 2025-03-31T16:18:41.667016Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:41.667042Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:41.667053Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:41.667189Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14020 TClient is connected to server localhost:14020 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:42.524794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:42.552749Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:18:42.581573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:42.777557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:43.017173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:43.110569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:44.884255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008869543713191:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:44.884396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.476754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.521770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.581097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.620874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.665338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.730756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.821618Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008873838681005:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.821702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.821844Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008873838681010:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.826497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:45.853082Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008873838681012:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:45.854984Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008852363842225:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:45.855036Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:45.957698Z node 1 :TX_PROXY ERROR: Actor# [1:7488008873838681072:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:47.152453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:18:47.427871Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437927462, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 27684, MsgBus: 14574 2025-03-31T16:18:48.322477Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008885750199271:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:48.322526Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000bdb/r3tmp/tmpdrLCO4/pdisk_1.dat 2025-03-31T16:18:48.504000Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:48.517220Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:48.517311Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:48.518644Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27684, node 2 2025-03-31T16:18:48.607634Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:48.607657Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:48.607665Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:48.607783Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14574 TClient is connected to server localhost:14574 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:49.248208Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:49.265866Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:49.396561Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:49.585801Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:49.688958Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:51.876239Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008898635102929:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:51.876336Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:51.931591Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:51.973432Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:52.019762Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:52.094765Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:52.137357Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:52.209874Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:52.300513Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008902930070751:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:52.300612Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:52.301027Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008902930070756:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:52.305657Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:52.321490Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008902930070758:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:18:52.407127Z node 2 :TX_PROXY ERROR: Actor# [2:7488008902930070814:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:53.323000Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008885750199271:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:53.323070Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:53.534530Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:18:54.233705Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437934252, txId: 281474976715673] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScanQueryDisable [GOOD] Test command err: Trying to start YDB, gRPC: 6271, MsgBus: 15146 2025-03-31T16:18:40.869722Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008851618273799:2131];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:40.879841Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000bc2/r3tmp/tmpXMiiiR/pdisk_1.dat 2025-03-31T16:18:41.357941Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:41.358615Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:41.365958Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:41.385129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6271, node 1 2025-03-31T16:18:41.605933Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:41.605956Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:41.605962Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:41.606062Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15146 TClient is connected to server localhost:15146 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:42.647135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:42.669318Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:18:42.685812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:42.888918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:43.216449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:43.364498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:44.853610Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008868798144693:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:44.853739Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.481062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.569881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.605503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.641897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.677740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.769403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.836873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008873093112512:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.836963Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.837184Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008873093112517:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.841563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:45.855154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008873093112519:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:45.871547Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008851618273799:2131];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:45.871612Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:45.950927Z node 1 :TX_PROXY ERROR: Actor# [1:7488008873093112575:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:47.613422Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437927623, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 15139, MsgBus: 12367 2025-03-31T16:18:48.454093Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008886620311192:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:48.454145Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000bc2/r3tmp/tmpHFUP0n/pdisk_1.dat 2025-03-31T16:18:48.687060Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:48.705212Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:48.705303Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:48.706210Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15139, node 2 2025-03-31T16:18:48.879828Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:48.879853Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:48.879860Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:48.880431Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12367 TClient is connected to server localhost:12367 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:49.517906Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:49.534277Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:18:49.546150Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:49.603546Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:49.768847Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:49.838247Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:52.183499Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008903800182132:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:52.183576Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:52.231054Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:52.318336Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:52.351975Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:52.388331Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:52.459988Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:52.529401Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:52.581928Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008903800182652:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:52.581999Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008903800182657:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:52.582014Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:52.585730Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:52.594880Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008903800182659:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:52.655387Z node 2 :TX_PROXY ERROR: Actor# [2:7488008903800182712:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:53.454826Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008886620311192:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:53.454882Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:54.146373Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437934175, txId: 281474976710671] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2025-03-31T16:18:30.799747Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008808222355451:2080];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:30.801570Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001b3c/r3tmp/tmpK3XZWp/pdisk_1.dat 2025-03-31T16:18:31.187847Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:31.187959Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:31.190819Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:18:31.220223Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:13670 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T16:18:31.405396Z node 1 :TX_PROXY DEBUG: actor# [1:7488008808222355674:2115] Handle TEvNavigate describe path dc-1 2025-03-31T16:18:31.405469Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008812517323440:2435] HANDLE EvNavigateScheme dc-1 2025-03-31T16:18:31.405616Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488008808222355698:2128], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:31.405670Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7488008808222355698:2128], path# /dc-1, domainOwnerId# 72057594046644480 2025-03-31T16:18:31.405834Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008812517323441:2436][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-31T16:18:31.407350Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008808222355354:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7488008812517323445:2436] 2025-03-31T16:18:31.407350Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008808222355360:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7488008812517323447:2436] 2025-03-31T16:18:31.407409Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488008808222355360:2056] Subscribe: subscriber# [1:7488008812517323447:2436], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:18:31.407409Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488008808222355354:2050] Subscribe: subscriber# [1:7488008812517323445:2436], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:18:31.407460Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008808222355357:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7488008812517323446:2436] 2025-03-31T16:18:31.407476Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488008808222355357:2053] Subscribe: subscriber# [1:7488008812517323446:2436], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:18:31.407480Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008812517323447:2436][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008808222355360:2056] 2025-03-31T16:18:31.407509Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008808222355360:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7488008812517323447:2436] 2025-03-31T16:18:31.407514Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008812517323445:2436][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008808222355354:2050] 2025-03-31T16:18:31.407526Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008808222355354:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7488008812517323445:2436] 2025-03-31T16:18:31.407545Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008812517323446:2436][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008808222355357:2053] 2025-03-31T16:18:31.407559Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008808222355357:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7488008812517323446:2436] 2025-03-31T16:18:31.407591Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008812517323441:2436][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008812517323444:2436] 2025-03-31T16:18:31.407644Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008812517323441:2436][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008812517323442:2436] 2025-03-31T16:18:31.407698Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7488008812517323441:2436][/dc-1] Set up state: owner# [1:7488008808222355698:2128], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:18:31.407820Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008812517323441:2436][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008812517323443:2436] 2025-03-31T16:18:31.407872Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7488008812517323441:2436][/dc-1] Path was already updated: owner# [1:7488008808222355698:2128], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:18:31.407957Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008812517323445:2436][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008812517323442:2436], cookie# 1 2025-03-31T16:18:31.407980Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008812517323446:2436][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008812517323443:2436], cookie# 1 2025-03-31T16:18:31.407995Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008812517323447:2436][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008812517323444:2436], cookie# 1 2025-03-31T16:18:31.408023Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008808222355354:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008812517323445:2436], cookie# 1 2025-03-31T16:18:31.408066Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008808222355357:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008812517323446:2436], cookie# 1 2025-03-31T16:18:31.408089Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008808222355360:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008812517323447:2436], cookie# 1 2025-03-31T16:18:31.408121Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008812517323445:2436][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008808222355354:2050], cookie# 1 2025-03-31T16:18:31.408140Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008812517323446:2436][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008808222355357:2053], cookie# 1 2025-03-31T16:18:31.408162Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008812517323447:2436][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008808222355360:2056], cookie# 1 2025-03-31T16:18:31.408204Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008812517323441:2436][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008812517323442:2436], cookie# 1 2025-03-31T16:18:31.408245Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008812517323441:2436][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-31T16:18:31.408268Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008812517323441:2436][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008812517323443:2436], cookie# 1 2025-03-31T16:18:31.408286Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008812517323441:2436][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-31T16:18:31.408310Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008812517323441:2436][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008812517323444:2436], cookie# 1 2025-03-31T16:18:31.408327Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008812517323441:2436][/dc-1] Unexpected sync response: sender# [1:7488008812517323444:2436], cookie# 1 2025-03-31T16:18:31.445824Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7488008808222355698:2128], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-03-31T16:18:31.446136Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7488008808222355698:2128], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... ncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:50.115616Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488008856904871075:2127], cacheItem# { Subscriber: { Subscriber: [3:7488008861199839126:2666] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:18:50.115731Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488008895559579237:4045], recipient# [3:7488008895559579236:2352], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:50.791833Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7488008856904871075:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:50.791979Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488008856904871075:2127], cacheItem# { Subscriber: { Subscriber: [3:7488008874084741693:3188] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:18:50.792088Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488008895559579245:4046], recipient# [3:7488008895559579244:2353], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:50.943324Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7488008856904871075:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:50.943568Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488008856904871075:2127], cacheItem# { Subscriber: { Subscriber: [3:7488008861199839126:2666] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:18:50.943720Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488008895559579257:4047], recipient# [3:7488008895559579256:2354], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:51.121132Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7488008856904871075:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:51.121263Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488008856904871075:2127], cacheItem# { Subscriber: { Subscriber: [3:7488008861199839126:2666] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:18:51.121391Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488008899854546559:4052], recipient# [3:7488008899854546558:2355], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:51.795826Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7488008856904871075:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:51.795981Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488008856904871075:2127], cacheItem# { Subscriber: { Subscriber: [3:7488008874084741693:3188] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:18:51.796101Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488008899854546570:4056], recipient# [3:7488008899854546569:2356], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:51.947497Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7488008856904871075:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:51.947633Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488008856904871075:2127], cacheItem# { Subscriber: { Subscriber: [3:7488008861199839126:2666] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:18:51.947719Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488008899854546584:4060], recipient# [3:7488008899854546583:2357], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:52.123680Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7488008856904871075:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:52.123814Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488008856904871075:2127], cacheItem# { Subscriber: { Subscriber: [3:7488008861199839126:2666] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:18:52.123897Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488008904149513887:4066], recipient# [3:7488008904149513886:2358], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::JsonCast [GOOD] Test command err: Trying to start YDB, gRPC: 17774, MsgBus: 3028 2025-03-31T16:18:44.614709Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008870306895334:2273];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:44.614764Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b96/r3tmp/tmpeCnmbI/pdisk_1.dat 2025-03-31T16:18:45.117333Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:45.117445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:45.123776Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:45.126104Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17774, node 1 2025-03-31T16:18:45.237558Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:45.237581Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:45.237587Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:45.237703Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3028 TClient is connected to server localhost:3028 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:45.833024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:45.852063Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:18:45.862869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:46.040623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:18:46.229699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:18:46.323855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:48.056898Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008887486766056:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:48.057012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:48.389631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:48.437241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:48.480574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:48.527288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:48.565958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:48.638421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:48.725769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008887486766572:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:48.725836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:48.726162Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008887486766577:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:48.730265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:48.746921Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-31T16:18:48.747187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008887486766579:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:48.846881Z node 1 :TX_PROXY ERROR: Actor# [1:7488008887486766634:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:49.623309Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008870306895334:2273];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:49.623374Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: Table intent determination, code: 1040
:3:35: Error: INSERT OR IGNORE is not yet supported for Kikimr. Trying to start YDB, gRPC: 3687, MsgBus: 22402 2025-03-31T16:18:50.932480Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008894870613770:2064];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b96/r3tmp/tmpfCCck8/pdisk_1.dat 2025-03-31T16:18:50.979969Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:18:51.056144Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3687, node 2 2025-03-31T16:18:51.086869Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:51.086990Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:51.088033Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:18:51.123655Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:51.123677Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:51.123686Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:51.123797Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22402 TClient is connected to server localhost:22402 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:18:51.554019Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:18:51.573613Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:51.653398Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:51.863481Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:18:51.930822Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:18:54.083915Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008912050484721:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:54.084015Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:54.127216Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:54.160811Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:54.190650Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:54.222938Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:54.256126Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:54.290324Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:54.337730Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008912050485235:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:54.337798Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:54.338123Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008912050485240:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:54.341830Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:54.352977Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008912050485242:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:18:54.437350Z node 2 :TX_PROXY ERROR: Actor# [2:7488008912050485296:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } [[#]] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::SyncExecuteYqlScriptSeveralQueries [GOOD] Test command err: Trying to start YDB, gRPC: 25607, MsgBus: 3151 2025-03-31T16:18:44.050199Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008868987812796:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:44.054688Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000bac/r3tmp/tmpd5LaaF/pdisk_1.dat 2025-03-31T16:18:44.595768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:44.595866Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:44.603702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25607, node 1 2025-03-31T16:18:44.651611Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:18:44.651701Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:18:44.664559Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:44.736145Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:44.736178Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:44.736186Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:44.736298Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3151 TClient is connected to server localhost:3151 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:45.464799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:45.486615Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:18:45.502289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:45.732443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:45.906537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:45.967865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:48.167243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008886167683765:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:48.167388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:48.533011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:48.568758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:48.653986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:48.695099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:48.729717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:48.806786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:48.906692Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008886167684291:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:48.906786Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:48.906989Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008886167684296:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:48.911895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:48.931299Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008886167684298:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:49.001097Z node 1 :TX_PROXY ERROR: Actor# [1:7488008886167684352:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:49.043309Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008868987812796:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:49.043350Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:50.836885Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437930850, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 4763, MsgBus: 27731 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000bac/r3tmp/tmpeZsZQr/pdisk_1.dat 2025-03-31T16:18:51.839313Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:18:51.903275Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:51.928327Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:51.928409Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:51.929652Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4763, node 2 2025-03-31T16:18:52.035612Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:52.035633Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:52.035641Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:52.035757Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27731 TClient is connected to server localhost:27731 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:18:52.491057Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:18:52.515135Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:52.601517Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:52.758110Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:52.840114Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:55.166788Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008914818566395:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:55.166904Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:55.215033Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:55.245146Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:55.277688Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:55.347439Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:55.377616Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:55.446653Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:55.499625Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008914818566913:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:55.499720Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008914818566918:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:55.499746Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:55.503035Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:55.511866Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008914818566920:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:18:55.564413Z node 2 :TX_PROXY ERROR: Actor# [2:7488008914818566973:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpYql::EvaluateFor [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateExpr3 [GOOD] Test command err: Trying to start YDB, gRPC: 10405, MsgBus: 13113 2025-03-31T16:18:44.287856Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008866240750511:2213];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:44.290178Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b9f/r3tmp/tmpEr61FZ/pdisk_1.dat 2025-03-31T16:18:44.854984Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:44.857562Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:44.857669Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:44.861933Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10405, node 1 2025-03-31T16:18:44.983911Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:44.983944Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:44.983951Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:44.984143Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13113 TClient is connected to server localhost:13113 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:45.839521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:45.853267Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:45.862828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:18:46.007324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:46.227047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:46.343337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:48.088402Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008883420621325:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:48.088525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:48.451148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:48.515655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:48.560635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:48.642856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:48.699672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:48.765025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:48.828411Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008883420621840:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:48.828488Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:48.828787Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008883420621845:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:48.833140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:48.855291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008883420621847:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:48.930528Z node 1 :TX_PROXY ERROR: Actor# [1:7488008883420621902:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:49.287386Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008866240750511:2213];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:49.295981Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 24408, MsgBus: 8059 2025-03-31T16:18:51.040581Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008897773026307:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:51.040632Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b9f/r3tmp/tmpXYt6FR/pdisk_1.dat 2025-03-31T16:18:51.203557Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24408, node 2 2025-03-31T16:18:51.221588Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:51.221687Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:51.223759Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:18:51.274718Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:51.274750Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:51.274757Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:51.274842Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8059 TClient is connected to server localhost:8059 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:51.691953Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:51.703151Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:18:51.716286Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:51.795390Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:18:51.949592Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:52.007757Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:18:54.090548Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008910657929952:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:54.090619Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:54.143582Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:54.195388Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:54.231686Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:54.261798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:54.331861Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:54.364354Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:54.442074Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008910657930471:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:54.442149Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:54.442204Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008910657930476:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:54.445390Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:54.459046Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008910657930478:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:18:54.542932Z node 2 :TX_PROXY ERROR: Actor# [2:7488008910657930533:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoDatabases [GOOD] Test command err: 2025-03-31T16:15:17.274569Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2213], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:17.274944Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:17.275012Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000cf1/r3tmp/tmp37zTuL/pdisk_1.dat 2025-03-31T16:15:17.724064Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26784, node 1 2025-03-31T16:15:17.993505Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:17.993560Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:17.993597Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:17.994127Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:15:17.996922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:15:18.122466Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:18.122627Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:18.137694Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63503 2025-03-31T16:15:18.688608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:15:21.995075Z node 3 :STATISTICS INFO: Subscribed for config changes on node 3 2025-03-31T16:15:22.032266Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:22.032377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:22.061057Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-03-31T16:15:22.063170Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:15:22.307961Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:22.308614Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:22.309525Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:22.309703Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:22.310054Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:22.310173Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:22.310261Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:22.310354Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:22.310469Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:22.476742Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:22.476864Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:22.492362Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:15:22.632532Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:22.694662Z node 3 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T16:15:22.694780Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T16:15:22.730302Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T16:15:22.730500Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T16:15:22.730727Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T16:15:22.730796Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T16:15:22.730849Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T16:15:22.730912Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T16:15:22.730969Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T16:15:22.731036Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T16:15:22.731528Z node 3 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T16:15:22.768936Z node 3 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:15:22.769090Z node 3 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [3:1954:2600], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:15:22.777730Z node 3 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [3:1965:2609] 2025-03-31T16:15:22.784752Z node 3 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [3:1998:2625] 2025-03-31T16:15:22.785871Z node 3 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [3:1998:2625], schemeshard id = 72075186224037897 2025-03-31T16:15:22.793109Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database1 2025-03-31T16:15:22.861355Z node 3 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T16:15:22.861430Z node 3 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T16:15:22.861504Z node 3 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database1/.metadata/_statistics 2025-03-31T16:15:22.875528Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T16:15:22.883371Z node 3 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T16:15:22.883501Z node 3 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T16:15:23.072964Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T16:15:23.260408Z node 3 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T16:15:23.327634Z node 3 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:15:24.165330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:15:27.803683Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T16:15:27.839212Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:27.839348Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:27.890524Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:15:27.892532Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:15:28.116500Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:28.117249Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:28.117807Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:28.118006Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:28.118127Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:28.118425Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:28.118538Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:28.118660Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:28.118786Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:28.255019Z node 2 :HIVE WARN: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:28.255139Z node 2 :HIVE WARN: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:28.269205Z node 2 :HIVE WARN: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:15:28.417698Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:28.475598Z node 2 :STATISTICS INFO: [72075186224038895] OnActivateExecutor 2025-03-31T16:15:28.475690Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxInitSchema::Execute 2025-03-31T16:15:28.506608Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxInitSchema::Complete 2025-03-31T16:15:28.508181Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxInit::Execute 2025-03-31T16:15:28.508381Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T16:15:28.508435Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded ColumnStatistics: column count# 0 2025-03-31T16:15:28.508490Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded ScheduleTraversals: table count# 0 2025-03-31T16:15:28.508544Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded ForceTraversalOperations: table count# 0 2025-03-31T16:15:28.508593Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded ForceTraversalTables: table count# 0 2025-03-31T16:15:28.508653Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxInit::Complete 2025-03-31T16:15:28.509635Z node 2 :STATISTICS INFO: [72075186224038895] Subscribed for config changes 2025-03-31T16:15:28.563318Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224038895, at schemeshard: 72075186224038898 2025-03-31T16:15:28.563486Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:3220:2596], at schemeshard: 72075186224038898, StatisticsAggregatorId: 72075186224038895, at schemeshard: 72075186224038898 2025-03-31T16:15:28.571095Z node 2 :STATISTICS DEBUG: [72075186224038895] EvServerConnected, pipe server id = [2:3234:2607] 2025-03-31T16:15:28.576606Z node 2 :STATISTICS DEBUG: [72075186224038895] EvServerConnected, pipe server id = [2:3253:261 ... 2 :STATISTICS DEBUG: [72075186224038895] TTxSchemeShardStats::Execute: schemeshard id# 72075186224038898, stats size# 49 2025-03-31T16:18:46.596847Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxSchemeShardStats::Complete 2025-03-31T16:18:47.694776Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [3:10322:4780]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:18:47.695113Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-03-31T16:18:47.695155Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [3:10322:4780], StatRequests.size() = 1 2025-03-31T16:18:48.586668Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal 2025-03-31T16:18:48.586746Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal. No force traversals. 2025-03-31T16:18:48.586803Z node 2 :STATISTICS DEBUG: [72075186224038895] IsColumnTable. Path [OwnerId: 72075186224038898, LocalPathId: 3] is data table. 2025-03-31T16:18:48.586847Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224038898, LocalPathId: 3] 2025-03-31T16:18:48.587393Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database2 2025-03-31T16:18:48.590651Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-31T16:18:48.596077Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:10349:4459], DatabaseId: /Root/Database2, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:48.596194Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:10360:4464], DatabaseId: /Root/Database2, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:48.596688Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database2, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:48.617093Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976730658:2, at schemeshard: 72075186224038898 2025-03-31T16:18:48.762299Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:10363:4467], DatabaseId: /Root/Database2, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976730658 completed, doublechecking } 2025-03-31T16:18:48.884347Z node 2 :TX_PROXY ERROR: Actor# [2:10451:4515] txid# 281474976730659, issues: { message: "Check failed: path: \'/Root/Database2/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224038898, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:48.914351Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:10480:4530]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:18:48.914664Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T16:18:48.914769Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:10482:4532] 2025-03-31T16:18:48.914843Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:10482:4532] 2025-03-31T16:18:48.916621Z node 2 :STATISTICS DEBUG: [72075186224038895] EvServerConnected, pipe server id = [2:10483:4533] 2025-03-31T16:18:48.916850Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:10482:4532], server id = [2:10483:4533], tablet id = 72075186224038895, status = OK 2025-03-31T16:18:48.917025Z node 2 :STATISTICS DEBUG: [72075186224038895] EvConnectNode, pipe server id = [2:10483:4533], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-03-31T16:18:48.917099Z node 2 :STATISTICS DEBUG: [72075186224038895] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-03-31T16:18:48.917301Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:18:48.917395Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:10480:4530], StatRequests.size() = 1 2025-03-31T16:18:49.062826Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OTZmNGMyMjYtODQzYzFjMGEtNDM4MDIxMmQtZDMyNzVkNmE=, TxId: 2025-03-31T16:18:49.062901Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTZmNGMyMjYtODQzYzFjMGEtNDM4MDIxMmQtZDMyNzVkNmE=, TxId: 2025-03-31T16:18:49.070075Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Execute 2025-03-31T16:18:49.093014Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224038898, LocalPathId: 3] 2025-03-31T16:18:49.093074Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T16:18:49.156008Z node 2 :STATISTICS DEBUG: [72075186224038895] EvFastPropagateCheck 2025-03-31T16:18:49.156087Z node 2 :STATISTICS DEBUG: [72075186224038895] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-31T16:18:49.264016Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:10482:4532], schemeshard count = 1 2025-03-31T16:18:50.148977Z node 3 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:18:50.160936Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:18:50.161008Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:18:50.161054Z node 3 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-03-31T16:18:50.161107Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:18:50.161467Z node 3 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database1 2025-03-31T16:18:50.164626Z node 3 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-31T16:18:50.182049Z node 3 :SYSTEM_VIEWS WARN: [72075186224037891] TEvIntervalQuerySummary, time mismath: node id# 3, interval end# 1970-01-01T00:02:04.000000Z, event interval end# 2025-03-31T16:18:48.000000Z 2025-03-31T16:18:50.183097Z node 3 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=YmJmNzM3OTktMjQ1MTg0YjctZmIzYjhkMWItM2NlOTIwMjM=, TxId: 2025-03-31T16:18:50.183170Z node 3 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=YmJmNzM3OTktMjQ1MTg0YjctZmIzYjhkMWItM2NlOTIwMjM=, TxId: 2025-03-31T16:18:50.183787Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:18:50.200122Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:18:50.200195Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T16:18:50.259648Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [3:10578:4819]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:18:50.259997Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-03-31T16:18:50.260042Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [3:10578:4819], StatRequests.size() = 1 2025-03-31T16:18:52.327138Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [3:10649:4845]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:18:52.327480Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-03-31T16:18:52.327523Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [3:10649:4845], StatRequests.size() = 1 2025-03-31T16:18:53.090653Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal 2025-03-31T16:18:53.090716Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal. No force traversals. 2025-03-31T16:18:53.090757Z node 2 :STATISTICS DEBUG: [72075186224038895] IsColumnTable. Path [OwnerId: 72075186224038898, LocalPathId: 4] is data table. 2025-03-31T16:18:53.090792Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224038898, LocalPathId: 4] 2025-03-31T16:18:53.091311Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database2 2025-03-31T16:18:53.097315Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-31T16:18:53.113440Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NDNhM2EwZi1mZTQwYjg5MC1lZTM5ODk0Yy1lNjBhYzM1OA==, TxId: 2025-03-31T16:18:53.113507Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDNhM2EwZi1mZTQwYjg5MC1lZTM5ODk0Yy1lNjBhYzM1OA==, TxId: 2025-03-31T16:18:53.114121Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Execute 2025-03-31T16:18:53.130053Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224038898, LocalPathId: 4] 2025-03-31T16:18:53.130117Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T16:18:54.080631Z node 3 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2025-03-31T16:18:54.080929Z node 3 :STATISTICS DEBUG: EvPropagateStatistics, node id = 3 2025-03-31T16:18:54.081473Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:18:54.092958Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:18:54.093026Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:18:54.143610Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [3:10733:4859]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:18:54.144080Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-03-31T16:18:54.144133Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [3:10733:4859], StatRequests.size() = 1 2025-03-31T16:18:54.145199Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:10735:4614]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:18:54.153133Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T16:18:54.153220Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:10735:4614], StatRequests.size() = 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptLeadingEmptyScan [GOOD] Test command err: Trying to start YDB, gRPC: 5783, MsgBus: 62458 2025-03-31T16:18:43.995127Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008862056132868:2206];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:44.001181Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ba0/r3tmp/tmpndwpTS/pdisk_1.dat 2025-03-31T16:18:44.505600Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:44.505687Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 5783, node 1 2025-03-31T16:18:44.507626Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:18:44.537940Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:44.639120Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:44.639169Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:44.639182Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:44.639315Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62458 TClient is connected to server localhost:62458 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:45.322492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:45.337504Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:18:45.355779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:45.523886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:45.717004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:45.819679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:47.484660Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008879236003664:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:47.484740Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:47.818763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:47.850051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:47.886269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:47.965351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:47.996783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:48.043787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:48.104825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008883530971475:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:48.104939Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:48.105205Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008883530971480:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:48.109565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:48.123894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008883530971482:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:48.178691Z node 1 :TX_PROXY ERROR: Actor# [1:7488008883530971535:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:49.004134Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008862056132868:2206];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:49.004193Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:49.980958Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437929940, txId: 281474976710671] shutting down 2025-03-31T16:18:50.355128Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437930381, txId: 281474976710673] shutting down 2025-03-31T16:18:51.085533Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437931102, txId: 281474976710677] shutting down Trying to start YDB, gRPC: 14620, MsgBus: 23397 2025-03-31T16:18:51.942124Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008897049134976:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:51.942193Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ba0/r3tmp/tmpO7fb8u/pdisk_1.dat 2025-03-31T16:18:52.136639Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:52.149066Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:52.149154Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:52.150798Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14620, node 2 2025-03-31T16:18:52.202995Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:52.203018Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:52.203025Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:52.203162Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23397 TClient is connected to server localhost:23397 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:18:52.667967Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:18:52.672958Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:18:52.685721Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:52.770515Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:52.893531Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:52.999014Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:54.927292Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008909934038599:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:54.927373Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:54.975416Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:55.005606Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:55.036805Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:55.068606Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:55.101611Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:55.135362Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:55.212088Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008914229006409:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:55.212167Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:55.212421Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008914229006414:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:55.216092Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:55.226267Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008914229006416:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:18:55.301620Z node 2 :TX_PROXY ERROR: Actor# [2:7488008914229006471:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:56.425966Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437936457, txId: 281474976715671] shutting down 2025-03-31T16:18:56.823669Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437936856, txId: 281474976715673] shutting down 2025-03-31T16:18:56.942360Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008897049134976:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:56.942421Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanCancelAfterBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 64075, MsgBus: 6997 2025-03-31T16:18:43.482542Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008863383404821:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:43.487769Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000bba/r3tmp/tmpxBhGaS/pdisk_1.dat 2025-03-31T16:18:43.945450Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:43.945570Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:43.947425Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:18:43.986036Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64075, node 1 2025-03-31T16:18:44.109676Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:44.109703Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:44.109711Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:44.109849Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6997 TClient is connected to server localhost:6997 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:44.890098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:44.951364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:45.105745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:45.355053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:45.448038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:47.407986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008880563275769:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:47.408097Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:47.749971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:47.795920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:47.874131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:47.910760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:47.947044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:48.020971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:48.105697Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008884858243586:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:48.105826Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:48.106259Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008884858243591:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:48.109941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:48.126074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008884858243593:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:48.216513Z node 1 :TX_PROXY ERROR: Actor# [1:7488008884858243649:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:48.483908Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008863383404821:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:48.483984Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:49.892008Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437929884, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 22907, MsgBus: 3886 2025-03-31T16:18:50.828611Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008895005713080:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:50.828692Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000bba/r3tmp/tmp5FojBW/pdisk_1.dat 2025-03-31T16:18:50.916458Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:50.940339Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:50.940425Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:50.941784Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22907, node 2 2025-03-31T16:18:50.999003Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:50.999025Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:50.999033Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:50.999165Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3886 TClient is connected to server localhost:3886 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:51.430795Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:51.436577Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:18:51.441312Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:51.499466Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:51.637361Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:51.710977Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:54.058119Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008912185584008:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:54.058212Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:54.114645Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:54.158967Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:54.191746Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:54.224189Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:54.258991Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:54.328738Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:54.411390Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008912185584525:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:54.411479Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:54.411652Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008912185584530:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:54.425640Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:54.434878Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008912185584532:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:18:54.508356Z node 2 :TX_PROXY ERROR: Actor# [2:7488008912185584587:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:55.555865Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437935589, txId: 281474976715671] shutting down 2025-03-31T16:18:55.770278Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437935806, txId: 281474976715673] shutting down 2025-03-31T16:18:55.828772Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008895005713080:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:55.828881Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:56.008705Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437936037, txId: 281474976715675] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::FromBytes [GOOD] Test command err: Trying to start YDB, gRPC: 1110, MsgBus: 3130 2025-03-31T16:18:46.048791Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008877713180217:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:46.048921Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b7e/r3tmp/tmpbMTUsP/pdisk_1.dat 2025-03-31T16:18:46.549496Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:46.549599Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:46.555732Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1110, node 1 2025-03-31T16:18:46.561408Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:18:46.579972Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:18:46.580016Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:18:46.641849Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:46.641882Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:46.641900Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:46.642030Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3130 TClient is connected to server localhost:3130 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:47.358585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:47.394615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:47.586487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:47.751515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:47.840890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:49.577088Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008890598083850:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:49.577197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:49.923719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:49.961597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:50.001444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:50.045694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:50.091807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:50.165693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:50.230564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008894893051663:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:50.230658Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:50.230891Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008894893051668:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:50.235494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:50.250677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008894893051670:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:50.346979Z node 1 :TX_PROXY ERROR: Actor# [1:7488008894893051726:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:51.049163Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008877713180217:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:51.049223Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 5861, MsgBus: 12016 2025-03-31T16:18:52.199979Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008901628470170:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:52.200080Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b7e/r3tmp/tmpqtfjuX/pdisk_1.dat 2025-03-31T16:18:52.342140Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:52.353823Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:52.353911Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 5861, node 2 2025-03-31T16:18:52.355711Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:18:52.411818Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:52.411841Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:52.411849Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:52.411958Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12016 TClient is connected to server localhost:12016 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:52.930363Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:52.954866Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:53.013717Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:53.197078Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:53.286075Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:55.328927Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008914513373822:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:55.329071Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:55.371402Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:55.417339Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:55.450642Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:55.480158Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:55.507624Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:55.580391Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:55.660319Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008914513374339:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:55.660430Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:55.660465Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008914513374344:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:55.664459Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:55.673890Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008914513374346:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:18:55.770425Z node 2 :TX_PROXY ERROR: Actor# [2:7488008914513374401:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::ColumnTypeMismatch [GOOD] Test command err: Trying to start YDB, gRPC: 5982, MsgBus: 10404 2025-03-31T16:18:45.116470Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008871378799238:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:45.116526Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b95/r3tmp/tmpyb6uAf/pdisk_1.dat 2025-03-31T16:18:45.555277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:45.555350Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:45.557503Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:18:45.564821Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5982, node 1 2025-03-31T16:18:45.799658Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:45.799688Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:45.799697Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:45.799805Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10404 TClient is connected to server localhost:10404 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:46.499447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:46.544667Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:18:46.561731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:46.752232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:18:46.945190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:18:47.058332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:49.013201Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008888558670211:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:49.013332Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:49.408068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:49.453102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:49.532080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:49.578353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:49.622921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:49.682878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:49.755257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008888558670728:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:49.755361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:49.755505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008888558670733:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:49.760955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:49.775945Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-31T16:18:49.777023Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008888558670735:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:49.837648Z node 1 :TX_PROXY ERROR: Actor# [1:7488008888558670788:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:50.121912Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008871378799238:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:50.122022Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: Type annotation, code: 1030
:7:30: Error: At function: KiCreateTable!
:7:30: Error: Duplicate column: Value. Trying to start YDB, gRPC: 15995, MsgBus: 6738 2025-03-31T16:18:51.526638Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008899300917793:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:51.526757Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b95/r3tmp/tmpdCgq9N/pdisk_1.dat 2025-03-31T16:18:51.663268Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:51.691445Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:51.691530Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:51.692963Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15995, node 2 2025-03-31T16:18:51.739913Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:51.739937Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:51.739945Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:51.740050Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6738 TClient is connected to server localhost:6738 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:52.236562Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:52.252609Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:18:52.269915Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:52.357477Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:18:52.507606Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:18:52.589941Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:54.582181Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008912185821438:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:54.582254Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:54.634656Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:54.674898Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:54.713706Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:54.744422Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:54.780160Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:54.814574Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:54.864401Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008912185821947:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:54.864484Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:54.864541Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008912185821952:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:54.868500Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:54.881448Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008912185821954:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:18:54.954401Z node 2 :TX_PROXY ERROR: Actor# [2:7488008912185822008:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:55.754883Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488008916480789603:2501], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:5:26: Error: At function: KiWriteTable!
:6:27: Error: Failed to convert type: Struct<'Key':Uint64,'Value':Uint64> to Struct<'Key':Uint64?,'Value':String?>
:6:27: Error: Failed to convert 'Value': Uint64 to Optional
:6:27: Error: Failed to convert input columns types to scheme types, code: 2031 2025-03-31T16:18:55.755205Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDY3Zjc3OGUtNTIwNDdiZWEtZGE5YTY0OTYtZDM3MzEwN2Y=, ActorId: [2:7488008916480789595:2496], ActorState: ExecuteState, TraceId: 01jqpfzd3q33b7yw1hvjrpar7g, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:5:26: Error: At function: KiWriteTable!
:6:27: Error: Failed to convert type: Struct<'Key':Uint64,'Value':Uint64> to Struct<'Key':Uint64?,'Value':String?>
:6:27: Error: Failed to convert 'Value': Uint64 to Optional
:6:27: Error: Failed to convert input columns types to scheme types, code: 2031 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::InsertCVList-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 23129, MsgBus: 18065 2025-03-31T16:18:45.216575Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008871192221215:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:45.217021Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b8a/r3tmp/tmpMKakbK/pdisk_1.dat 2025-03-31T16:18:45.673521Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:45.680935Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:45.681221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:45.685966Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23129, node 1 2025-03-31T16:18:45.857390Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:45.857414Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:45.857421Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:45.857564Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18065 TClient is connected to server localhost:18065 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:46.647071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:46.688836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:46.877090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:47.106788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:47.233838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:49.009620Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008888372092033:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:49.009721Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:49.493243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:49.529483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:49.569951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:49.614262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:49.659713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:49.716619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:49.794312Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008888372092548:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:49.794380Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:49.794571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008888372092553:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:49.799681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:49.813285Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008888372092555:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:49.877577Z node 1 :TX_PROXY ERROR: Actor# [1:7488008888372092608:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:50.208244Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008871192221215:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:50.217475Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:51.017254Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=3; 2025-03-31T16:18:51.028074Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037914 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-31T16:18:51.028261Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037914 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-31T16:18:51.028507Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488008892667060228:2502], Table: `/Root/Test` ([72057594046644480:9:1]), SessionActorId: [1:7488008892667060211:2502]Got CONSTRAINT VIOLATION for table `/Root/Test`. ShardID=72075186224037914, Sink=[1:7488008892667060228:2502].{
: Error: Duplicate keys have been found., code: 2012 } 2025-03-31T16:18:51.028991Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488008892667060221:2502], SessionActorId: [1:7488008892667060211:2502], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/Test`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7488008892667060211:2502]. isRollback=0 2025-03-31T16:18:51.029223Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmRlNjQ5MjgtOGI3YjI3YmQtNDUyZTExYTYtYjEyZDI1ZDU=, ActorId: [1:7488008892667060211:2502], ActorState: ExecuteState, TraceId: 01jqpfz8b7e61ed80ha3y9d70y, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7488008892667060222:2502] from: [1:7488008892667060221:2502] 2025-03-31T16:18:51.029298Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7488008892667060222:2502] TxId: 281474976710671. Ctx: { TraceId: 01jqpfz8b7e61ed80ha3y9d70y, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmRlNjQ5MjgtOGI3YjI3YmQtNDUyZTExYTYtYjEyZDI1ZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/Test`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-31T16:18:51.030166Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmRlNjQ5MjgtOGI3YjI3YmQtNDUyZTExYTYtYjEyZDI1ZDU=, ActorId: [1:7488008892667060211:2502], ActorState: ExecuteState, TraceId: 01jqpfz8b7e61ed80ha3y9d70y, Create QueryResponse for error on request, msg:
: Error: Execution, code: 1060
: Error: Constraint violated. Table: `/Root/Test`., code: 2012
: Error: Duplicate keys have been found., code: 2012 Trying to start YDB, gRPC: 8469, MsgBus: 28983 2025-03-31T16:18:51.985308Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008896948459663:2109];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:52.026577Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b8a/r3tmp/tmpLOLvSO/pdisk_1.dat 2025-03-31T16:18:52.122329Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:52.153047Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:52.153126Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:52.154618Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8469, node 2 2025-03-31T16:18:52.204157Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:52.204178Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:52.204187Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:52.204300Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28983 TClient is connected to server localhost:28983 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:52.677464Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:52.683995Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:18:52.687456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:18:52.758196Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:18:52.918001Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:53.000931Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:55.193719Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008914128330562:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:55.193830Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:55.235531Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:55.268885Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:55.309117Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:55.378977Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:55.408854Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:55.441980Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:55.480609Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008914128331076:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:55.480699Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:55.480767Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008914128331081:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:55.484604Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:55.496779Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008914128331083:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:18:55.579283Z node 2 :TX_PROXY ERROR: Actor# [2:7488008914128331136:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:56.713068Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488008918423298752:2502], TxId: 281474976715672, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=N2U2ZDUxOGUtODM3NjJhYWEtYmE4ZmU5ZDUtOWE2ZDMyOGE=. TraceId : 01jqpfzds20k6xnm980h12bnaf. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-03-31T16:18:56.713497Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488008918423298753:2503], TxId: 281474976715672, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jqpfzds20k6xnm980h12bnaf. SessionId : ydb://session/3?node_id=2&id=N2U2ZDUxOGUtODM3NjJhYWEtYmE4ZmU5ZDUtOWE2ZDMyOGE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7488008918423298749:2493], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-31T16:18:56.713882Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=N2U2ZDUxOGUtODM3NjJhYWEtYmE4ZmU5ZDUtOWE2ZDMyOGE=, ActorId: [2:7488008918423298702:2493], ActorState: ExecuteState, TraceId: 01jqpfzds20k6xnm980h12bnaf, Create QueryResponse for error on request, msg:
: Error: Execution, code: 1060
: Error: Duplicated keys found., code: 2012 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScriptStats [GOOD] Test command err: Trying to start YDB, gRPC: 12292, MsgBus: 8853 2025-03-31T16:18:40.866496Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008852780712456:2192];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:40.866617Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000bdd/r3tmp/tmpNlqWKL/pdisk_1.dat 2025-03-31T16:18:41.518133Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:41.525723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:41.525813Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:41.533106Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12292, node 1 2025-03-31T16:18:41.729060Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:41.729085Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:41.729092Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:41.729227Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8853 TClient is connected to server localhost:8853 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:42.524836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:42.586076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:42.777484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:43.096816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:43.209725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:44.936779Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008869960583307:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:44.936865Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.479518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.532583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.601863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.635600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.676040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.756396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.822661Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008874255551121:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.822733Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.822930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008874255551126:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.826068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:45.837832Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008874255551128:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:45.866250Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008852780712456:2192];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:45.866324Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:45.900768Z node 1 :TX_PROXY ERROR: Actor# [1:7488008874255551181:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 28671, MsgBus: 23083 2025-03-31T16:18:48.065099Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008885419893758:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:48.065197Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000bdd/r3tmp/tmp8opkgN/pdisk_1.dat 2025-03-31T16:18:48.303604Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:48.328146Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:48.328225Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:48.329546Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28671, node 2 2025-03-31T16:18:48.427593Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:48.427611Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:48.427617Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:48.427707Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23083 TClient is connected to server localhost:23083 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:48.952753Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:48.963756Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:18:48.974697Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:18:49.073404Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:18:49.283222Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:49.377883Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:51.869822Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008898304797402:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:51.869937Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:51.955362Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:51.987534Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:52.026824Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:52.109914Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:52.148772Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:52.230794Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:52.309258Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008902599765219:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:52.309342Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:52.309474Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008902599765224:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:52.313397Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:52.324156Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008902599765226:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:52.406875Z node 2 :TX_PROXY ERROR: Actor# [2:7488008902599765281:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:53.067224Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008885419893758:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:53.067286Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:53.923689Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:18:54.463427Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437934434, txId: 281474976710674] shutting down 2025-03-31T16:18:55.120617Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437935127, txId: 281474976710678] shutting down 2025-03-31T16:18:55.458944Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437935451, txId: 281474976710682] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UpdatePk [GOOD] Test command err: Trying to start YDB, gRPC: 21293, MsgBus: 19813 2025-03-31T16:18:44.471775Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008868957418911:2264];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:44.471951Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b9d/r3tmp/tmp3eieWh/pdisk_1.dat 2025-03-31T16:18:45.006516Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:45.006612Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:45.007479Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:45.011419Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21293, node 1 2025-03-31T16:18:45.123619Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:45.123637Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:45.123646Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:45.123753Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19813 TClient is connected to server localhost:19813 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:45.830303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:45.845241Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:18:45.858592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:46.017529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:46.226609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:46.315276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:48.235518Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008886137289659:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:48.235647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:48.600863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:48.649228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:48.701573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:48.777109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:48.817269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:48.863865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:48.925395Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008886137290176:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:48.925546Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:48.925618Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008886137290181:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:48.929679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:48.946085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008886137290183:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:49.036628Z node 1 :TX_PROXY ERROR: Actor# [1:7488008890432257536:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:49.465163Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008868957418911:2264];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:49.465216Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: Type annotation, code: 1030
:3:20: Warning: At function: AsStruct
:4:31: Warning: At function: +
:4:31: Warning: Integral type implicit bitcast: Optional and Int32, code: 1107
:5:27: Error: At function: KiUpdateTable!
:5:27: Error: Cannot update primary key column: Group ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TableRange [GOOD] Test command err: Trying to start YDB, gRPC: 27776, MsgBus: 16262 2025-03-31T16:18:49.832588Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008891285208235:2206];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:49.832631Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b5d/r3tmp/tmpWJSKWH/pdisk_1.dat 2025-03-31T16:18:50.300841Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:50.305318Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:50.305398Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:50.308700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27776, node 1 2025-03-31T16:18:50.375608Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:50.375638Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:50.375652Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:50.375765Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16262 TClient is connected to server localhost:16262 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:50.936816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:50.954932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:51.097053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:51.264921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:51.341711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:53.118841Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008908465079037:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:53.119051Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:53.466077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:53.502498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:53.535239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:53.577454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:53.643780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:53.713041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:53.798079Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008908465079559:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:53.798151Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:53.798411Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008908465079564:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:53.802250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:53.816871Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008908465079566:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:53.918443Z node 1 :TX_PROXY ERROR: Actor# [1:7488008908465079625:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Table intent determination, code: 1040
:3:27: Error: RANGE is not supported on Kikimr clusters. 2025-03-31T16:18:54.825486Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008891285208235:2206];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:54.825559Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ExecuteYqlScriptPg [GOOD] Test command err: Trying to start YDB, gRPC: 19927, MsgBus: 28854 2025-03-31T16:18:46.101877Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008878397336974:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:46.101946Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b76/r3tmp/tmpC1K6cD/pdisk_1.dat 2025-03-31T16:18:46.710371Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:46.712429Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:46.712544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:46.717887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19927, node 1 2025-03-31T16:18:46.907780Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:46.907808Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:46.907820Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:46.911224Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28854 TClient is connected to server localhost:28854 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:47.584693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:47.619157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:47.787659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:18:47.978089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:18:48.069299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:50.034706Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008895577207935:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:50.034798Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:50.413377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:50.490491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:50.556806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:50.589704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:50.624973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:50.702861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:50.762664Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008895577208454:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:50.762767Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:50.763029Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008895577208459:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:50.767431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:50.780543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008895577208461:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:50.884714Z node 1 :TX_PROXY ERROR: Actor# [1:7488008895577208516:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:51.099173Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008878397336974:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:51.099304Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:51.794729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:18:52.437172Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437932467, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 20679, MsgBus: 11376 2025-03-31T16:18:53.183154Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008905029824894:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:53.183228Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b76/r3tmp/tmpJwxSl4/pdisk_1.dat 2025-03-31T16:18:53.322085Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20679, node 2 2025-03-31T16:18:53.337846Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:53.337988Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:53.345746Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:18:53.390973Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:53.390994Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:53.391003Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:53.391145Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11376 TClient is connected to server localhost:11376 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:53.846057Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:53.868409Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:53.950573Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:54.092224Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:54.171503Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:56.397437Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008917914728561:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:56.397531Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:56.444606Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:56.470853Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:56.497091Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:56.525800Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:56.555523Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:56.585670Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:56.640355Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008917914729068:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:56.640426Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:56.640474Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008917914729073:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:56.643407Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:56.652818Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008917914729075:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:18:56.738870Z node 2 :TX_PROXY ERROR: Actor# [2:7488008917914729129:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScanQueryTruncate [GOOD] Test command err: Trying to start YDB, gRPC: 11925, MsgBus: 4152 2025-03-31T16:18:46.891618Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008877559470815:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:46.891903Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b68/r3tmp/tmpBlfXDg/pdisk_1.dat 2025-03-31T16:18:47.433165Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:47.433255Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:47.434969Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:18:47.505764Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11925, node 1 2025-03-31T16:18:47.742575Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:47.742609Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:47.742616Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:47.742774Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4152 TClient is connected to server localhost:4152 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:48.424381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:48.450064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:48.662262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:48.927774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:49.024126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:50.943631Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008894739341775:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:50.943741Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:51.284962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:51.317103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:51.355365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:51.393875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:51.431995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:51.507892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:51.561241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008899034309588:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:51.561312Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:51.561721Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008899034309593:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:51.565305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:51.575039Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008899034309595:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:51.638942Z node 1 :TX_PROXY ERROR: Actor# [1:7488008899034309647:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:51.892108Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008877559470815:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:51.892183Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:52.662967Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488008903329277271:2505], status: PRECONDITION_FAILED, issues:
: Error: Default error
:1:746: Error: Scan query should have a single result set., code: 2029
: Error: Default error
:1:746: Error: Scan query should have a single result set., code: 2029 2025-03-31T16:18:52.663220Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjZlYjQ0MzgtZmNiMWEyN2EtM2M1ZTM0ZTQtZTcyYjBmZTg=, ActorId: [1:7488008903329277269:2504], ActorState: ExecuteState, TraceId: 01jqpfza33f903kt7xy80arhwg, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id:
: Error: Execution, code: 1060
: Error: Default error
:1:746: Error: Scan query should have a single result set., code: 2029 2025-03-31T16:18:52.765453Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488008903329277304:2518], status: PRECONDITION_FAILED, issues:
: Error: Default error
:1:375: Error: Scan query cannot have data modifications., code: 2029
: Error: Default error
:1:375: Error: Scan query cannot have data modifications., code: 2029 2025-03-31T16:18:52.765669Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2E4MGNiNTQtM2NiNGZjOWEtMzEzMDc5MTQtYjg3MWI5MmM=, ActorId: [1:7488008903329277302:2517], ActorState: ExecuteState, TraceId: 01jqpfza6n6a8ypst3jwy0az9x, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id:
: Error: Execution, code: 1060
: Error: Default error
:1:375: Error: Scan query cannot have data modifications., code: 2029 Trying to start YDB, gRPC: 26454, MsgBus: 19995 2025-03-31T16:18:53.636439Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008906603051309:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:53.636562Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b68/r3tmp/tmpxVySVV/pdisk_1.dat 2025-03-31T16:18:53.751467Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26454, node 2 2025-03-31T16:18:53.794159Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:53.794246Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:53.798075Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:18:53.839698Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:53.839725Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:53.839733Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:53.839852Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19995 TClient is connected to server localhost:19995 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:54.227272Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:54.233899Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:18:54.244685Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:54.294440Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:54.453536Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:54.516864Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:56.572339Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008919487954964:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:56.572430Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:56.618400Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:56.650144Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:56.683766Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:56.710370Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:56.775777Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:56.804836Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:56.838227Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008919487955476:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:56.838308Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008919487955481:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:56.838315Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:56.841332Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:56.849972Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008919487955483:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:18:56.917847Z node 2 :TX_PROXY ERROR: Actor# [2:7488008919487955536:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:57.738314Z node 2 :TX_DATASHARD ERROR: Undelivered event: 65542, at: [2:7488008923782923232:2057], tablet: [2:7488008910898019388:2328], scanId: 1, table: /Root/EightShard 2025-03-31T16:18:57.746603Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437937773, txId: 281474976715671] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::Closure [GOOD] Test command err: Trying to start YDB, gRPC: 10135, MsgBus: 25658 2025-03-31T16:18:40.863830Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008852401585766:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:40.863974Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000bf9/r3tmp/tmpAJAhBr/pdisk_1.dat 2025-03-31T16:18:41.409396Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:41.412763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:41.413012Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:41.415648Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10135, node 1 2025-03-31T16:18:41.601527Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:41.601550Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:41.601559Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:41.601668Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25658 TClient is connected to server localhost:25658 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:42.666716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:42.705908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:42.993995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:43.373904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:18:43.484028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.192030Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008873876424023:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.192136Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.563713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.608170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.662250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.723190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.760994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.834755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.868529Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008852401585766:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:45.869056Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:45.890066Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008873876424537:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.890166Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.891518Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008873876424542:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.898021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:45.913737Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008873876424544:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:45.993093Z node 1 :TX_PROXY ERROR: Actor# [1:7488008873876424600:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 27699, MsgBus: 21786 2025-03-31T16:18:48.012191Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008885304137484:2126];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:48.033802Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000bf9/r3tmp/tmpiAODJu/pdisk_1.dat 2025-03-31T16:18:48.205587Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:48.227407Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:48.227489Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:48.231127Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27699, node 2 2025-03-31T16:18:48.319652Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:48.319673Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:48.319680Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:48.319773Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21786 TClient is connected to server localhost:21786 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:48.939349Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:48.950746Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:18:48.959162Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:49.070092Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:49.255792Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:49.346202Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:51.706931Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008898189041062:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:51.707018Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:51.760316Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:51.799279Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:51.842818Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:51.877909Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:51.924159Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:51.994661Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:52.065090Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008902484008869:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:52.065172Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:52.065450Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008902484008876:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:52.069658Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:52.078562Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008902484008878:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:52.134120Z node 2 :TX_PROXY ERROR: Actor# [2:7488008902484008931:3438] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:53.013814Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008885304137484:2126];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:53.013868Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateExprYsonAndType [GOOD] Test command err: Trying to start YDB, gRPC: 10203, MsgBus: 16177 2025-03-31T16:18:40.892231Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008852068935761:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:40.896163Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000bcc/r3tmp/tmpJXdDK4/pdisk_1.dat 2025-03-31T16:18:41.546778Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:41.563588Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:41.563657Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:41.571363Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10203, node 1 2025-03-31T16:18:41.707680Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:41.707709Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:41.707716Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:41.707841Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16177 TClient is connected to server localhost:16177 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:42.564938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:42.599292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:42.755346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:43.020883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:18:43.106384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:18:44.752643Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008869248806701:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:44.752746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.475914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.521597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.574688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.644310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.671600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.725952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.818085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008873543774522:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.818186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.824794Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008873543774527:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.830441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:45.842501Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008873543774529:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:45.877234Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008852068935761:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:45.877308Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:45.927430Z node 1 :TX_PROXY ERROR: Actor# [1:7488008873543774585:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 5820, MsgBus: 10414 2025-03-31T16:18:48.037304Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008883884295519:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:48.037362Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000bcc/r3tmp/tmpdOtxfu/pdisk_1.dat 2025-03-31T16:18:48.146634Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:48.146736Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:48.148161Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5820, node 2 2025-03-31T16:18:48.156822Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:48.231701Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:48.231720Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:48.231727Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:48.231840Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10414 TClient is connected to server localhost:10414 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:48.859470Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:48.868649Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:18:48.881894Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:48.988880Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:49.178405Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:49.268358Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:51.467205Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008896769199140:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:51.467298Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:51.520545Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:51.557880Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:51.589871Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:51.624429Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:51.693144Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:51.766947Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:51.832939Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008896769199656:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:51.833041Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:51.838335Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008896769199661:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:51.842618Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:51.856498Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008896769199663:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:18:51.929799Z node 2 :TX_PROXY ERROR: Actor# [2:7488008896769199716:3435] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:53.037875Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008883884295519:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:53.037965Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::JoinIndexLookup [GOOD] Test command err: Trying to start YDB, gRPC: 8293, MsgBus: 17033 2025-03-31T16:18:40.864383Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008851399275495:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:40.864449Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000bd9/r3tmp/tmpxAQqvi/pdisk_1.dat 2025-03-31T16:18:41.390002Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:41.390172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:41.398728Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:18:41.432396Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8293, node 1 2025-03-31T16:18:41.601072Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:41.601089Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:41.601094Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:41.601190Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17033 TClient is connected to server localhost:17033 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:42.570367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:42.605726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:42.775178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:42.987047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:43.082805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:44.752371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008868579146443:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:44.752489Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.476037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.516180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.552695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.623564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.665173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.711811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.782799Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008872874114258:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.782872Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.782914Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008872874114264:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.786706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:45.798581Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008872874114266:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:45.864426Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008851399275495:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:45.864496Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:45.887932Z node 1 :TX_PROXY ERROR: Actor# [1:7488008872874114321:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:48.053507Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437928008, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 21506, MsgBus: 10511 2025-03-31T16:18:49.211538Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008891489860880:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:49.226009Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000bd9/r3tmp/tmpAMX6pH/pdisk_1.dat 2025-03-31T16:18:49.462037Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:49.512265Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:49.512346Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:49.518468Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21506, node 2 2025-03-31T16:18:49.631698Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:49.631718Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:49.631731Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:49.631833Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10511 TClient is connected to server localhost:10511 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:50.105667Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:50.122983Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:18:50.137411Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:50.239808Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:50.395969Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:50.485168Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:52.902820Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008904374764539:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:52.905709Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:52.949891Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:53.026993Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:53.065941Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:53.109841Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:53.149818Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:53.232124Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:53.316265Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008908669732357:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:53.316361Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:53.316497Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008908669732362:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:53.319645Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:53.329732Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008908669732364:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:18:53.389213Z node 2 :TX_PROXY ERROR: Actor# [2:7488008908669732416:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:54.212277Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008891489860880:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:54.212351Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::PgIntPrimaryKey [GOOD] Test command err: Trying to start YDB, gRPC: 62459, MsgBus: 22601 2025-03-31T16:18:44.014792Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008867528141278:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:44.017745Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000bae/r3tmp/tmp3SOjyg/pdisk_1.dat 2025-03-31T16:18:44.486160Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:44.490119Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:44.490219Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:44.493467Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62459, node 1 2025-03-31T16:18:44.631446Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:44.631468Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:44.631473Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:44.631598Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22601 TClient is connected to server localhost:22601 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:45.274726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:45.321651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:45.482556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:45.647936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:45.747263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:47.519167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008880413044802:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:47.519283Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:47.881245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:47.965308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:48.039506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:48.078611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:48.125178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:48.168139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:48.236835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008884708012618:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:48.236925Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:48.237137Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008884708012623:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:48.241058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:48.266009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008884708012625:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:48.350220Z node 1 :TX_PROXY ERROR: Actor# [1:7488008884708012681:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:49.011249Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008867528141278:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:49.011375Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: Optimization, code: 1070
:4:20: Error: RefSelect mode isn't supported by provider: kikimr Trying to start YDB, gRPC: 21645, MsgBus: 12575 2025-03-31T16:18:50.353500Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008895896211295:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:50.353567Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000bae/r3tmp/tmpPgKYmk/pdisk_1.dat 2025-03-31T16:18:50.468964Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:50.481771Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:50.481838Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:50.483442Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21645, node 2 2025-03-31T16:18:50.544922Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:50.544947Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:50.544954Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:50.545058Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12575 TClient is connected to server localhost:12575 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:51.024296Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:53.731790Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008908781113816:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:53.731869Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:53.756473Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:18:53.833880Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008908781113919:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:53.833991Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:53.834323Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008908781113924:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:53.838324Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:18:53.850034Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008908781113926:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:18:53.928166Z node 2 :TX_PROXY ERROR: Actor# [2:7488008908781113977:2394] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2025-03-31T16:18:30.844308Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008808015686128:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:30.844667Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001b21/r3tmp/tmpjqzacX/pdisk_1.dat 2025-03-31T16:18:31.172946Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:31.229229Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:31.229338Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:31.233035Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2543 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T16:18:31.370638Z node 1 :TX_PROXY DEBUG: actor# [1:7488008808015686367:2123] Handle TEvNavigate describe path dc-1 2025-03-31T16:18:31.370756Z node 1 :TX_PROXY DEBUG: Actor# [1:7488008812310654123:2439] HANDLE EvNavigateScheme dc-1 2025-03-31T16:18:31.370935Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488008808015686397:2139], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:31.371166Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7488008808015686397:2139], path# /dc-1, domainOwnerId# 72057594046644480 2025-03-31T16:18:31.371473Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008812310654124:2440][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-31T16:18:31.373394Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008808015686036:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7488008812310654129:2440] 2025-03-31T16:18:31.373393Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008808015686033:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7488008812310654128:2440] 2025-03-31T16:18:31.373460Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488008808015686036:2053] Subscribe: subscriber# [1:7488008812310654129:2440], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:18:31.373461Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488008808015686033:2050] Subscribe: subscriber# [1:7488008812310654128:2440], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:18:31.373523Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008808015686039:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7488008812310654130:2440] 2025-03-31T16:18:31.373547Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7488008808015686039:2056] Subscribe: subscriber# [1:7488008812310654130:2440], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-31T16:18:31.373567Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008812310654128:2440][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008808015686033:2050] 2025-03-31T16:18:31.373600Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008808015686033:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7488008812310654128:2440] 2025-03-31T16:18:31.373610Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008812310654129:2440][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008808015686036:2053] 2025-03-31T16:18:31.373631Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008808015686036:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7488008812310654129:2440] 2025-03-31T16:18:31.373643Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008812310654130:2440][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008808015686039:2056] 2025-03-31T16:18:31.373658Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008808015686039:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7488008812310654130:2440] 2025-03-31T16:18:31.373689Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008812310654124:2440][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008812310654125:2440] 2025-03-31T16:18:31.373716Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008812310654124:2440][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008812310654126:2440] 2025-03-31T16:18:31.373760Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7488008812310654124:2440][/dc-1] Set up state: owner# [1:7488008808015686397:2139], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:18:31.373917Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008812310654124:2440][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7488008812310654127:2440] 2025-03-31T16:18:31.373973Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7488008812310654124:2440][/dc-1] Path was already updated: owner# [1:7488008808015686397:2139], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-31T16:18:31.374019Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008812310654128:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008812310654125:2440], cookie# 1 2025-03-31T16:18:31.374033Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008812310654129:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008812310654126:2440], cookie# 1 2025-03-31T16:18:31.374058Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008812310654130:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008812310654127:2440], cookie# 1 2025-03-31T16:18:31.374090Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008808015686033:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008812310654128:2440], cookie# 1 2025-03-31T16:18:31.374113Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008808015686036:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008812310654129:2440], cookie# 1 2025-03-31T16:18:31.374129Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7488008808015686039:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7488008812310654130:2440], cookie# 1 2025-03-31T16:18:31.374153Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008812310654128:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008808015686033:2050], cookie# 1 2025-03-31T16:18:31.374167Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008812310654129:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008808015686036:2053], cookie# 1 2025-03-31T16:18:31.374195Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7488008812310654130:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008808015686039:2056], cookie# 1 2025-03-31T16:18:31.374225Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008812310654124:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008812310654125:2440], cookie# 1 2025-03-31T16:18:31.374250Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008812310654124:2440][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-31T16:18:31.374267Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008812310654124:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008812310654126:2440], cookie# 1 2025-03-31T16:18:31.374289Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008812310654124:2440][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-31T16:18:31.374316Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008812310654124:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7488008812310654127:2440], cookie# 1 2025-03-31T16:18:31.374357Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7488008812310654124:2440][/dc-1] Unexpected sync response: sender# [1:7488008812310654127:2440], cookie# 1 2025-03-31T16:18:31.419379Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7488008808015686397:2139], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-03-31T16:18:31.420503Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7488008808015686397:2139], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVers ... ncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:49.496644Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488008850843899855:2127], cacheItem# { Subscriber: { Subscriber: [3:7488008855138867925:2690] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:18:49.496804Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488008889498607505:3538], recipient# [3:7488008889498607504:2344], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:49.552459Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7488008850843899855:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:49.552613Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488008850843899855:2127], cacheItem# { Subscriber: { Subscriber: [3:7488008855138867925:2690] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:18:49.552750Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488008889498607507:3539], recipient# [3:7488008889498607506:2345], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:50.400783Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7488008850843899855:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:50.400936Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488008850843899855:2127], cacheItem# { Subscriber: { Subscriber: [3:7488008872318737569:3001] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:18:50.401016Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488008893793574827:3543], recipient# [3:7488008893793574826:2346], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:50.497547Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7488008850843899855:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:50.497676Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488008850843899855:2127], cacheItem# { Subscriber: { Subscriber: [3:7488008855138867925:2690] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:18:50.497758Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488008893793574830:3544], recipient# [3:7488008893793574829:2347], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:50.556449Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7488008850843899855:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:50.556617Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488008850843899855:2127], cacheItem# { Subscriber: { Subscriber: [3:7488008855138867925:2690] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:18:50.556706Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488008893793574833:3546], recipient# [3:7488008893793574832:2348], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:51.401846Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7488008850843899855:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:51.401994Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488008850843899855:2127], cacheItem# { Subscriber: { Subscriber: [3:7488008872318737569:3001] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:18:51.402080Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488008898088542155:3556], recipient# [3:7488008898088542154:2349], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:51.501666Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7488008850843899855:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:18:51.501807Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7488008850843899855:2127], cacheItem# { Subscriber: { Subscriber: [3:7488008855138867925:2690] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:18:51.501894Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7488008898088542159:3557], recipient# [3:7488008898088542158:2350], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UuidPrimaryKeyBulkUpsert [GOOD] Test command err: Trying to start YDB, gRPC: 4234, MsgBus: 3694 2025-03-31T16:18:43.797124Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008863671573080:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:43.797175Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000bb3/r3tmp/tmpkyx2zS/pdisk_1.dat 2025-03-31T16:18:44.357833Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:44.363054Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:44.367167Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:44.372344Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4234, node 1 2025-03-31T16:18:44.611628Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:44.611650Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:44.611657Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:44.611780Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3694 TClient is connected to server localhost:3694 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:45.257607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:47.484383Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008880851442864:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:47.484489Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:47.799819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:18:47.993381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008880851442976:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:47.993463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:47.993537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008880851442981:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:47.997255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:18:48.012690Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008880851442983:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:18:48.090817Z node 1 :TX_PROXY ERROR: Actor# [1:7488008885146410331:2406] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UuidPrimaryKey [GOOD] Test command err: Trying to start YDB, gRPC: 24150, MsgBus: 14053 2025-03-31T16:18:45.215759Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008874043900648:2080];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:45.225178Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b82/r3tmp/tmpisLUJa/pdisk_1.dat 2025-03-31T16:18:45.745245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:45.745356Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:45.751993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:18:45.810289Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24150, node 1 2025-03-31T16:18:45.923613Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:45.923636Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:45.923647Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:45.923766Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14053 TClient is connected to server localhost:14053 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:46.820556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:46.848257Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:18:49.089269Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008891223770463:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:49.089386Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:49.461602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:18:49.614227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008891223770567:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:49.614308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:49.614683Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008891223770572:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:49.618965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:18:49.651563Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-03-31T16:18:49.651868Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008891223770574:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:18:49.731479Z node 1 :TX_PROXY ERROR: Actor# [1:7488008891223770626:2399] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:50.211218Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488008895518738023:2369], status: GENERIC_ERROR, issues:
:3:25: Error: Invalid value "invalid-uuid" for type Uuid 2025-03-31T16:18:50.212498Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzIxZmY4YTMtNGQzMjA5ZjItZGY2MWJmMzUtNDdjMGFmOGY=, ActorId: [1:7488008891223770437:2329], ActorState: ExecuteState, TraceId: 01jqpfz7pm5g6r1qsmfmqeytqq, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-31T16:18:50.216923Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008874043900648:2080];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:50.216978Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptSeveralQueriesComplex [GOOD] Test command err: Trying to start YDB, gRPC: 23955, MsgBus: 24702 2025-03-31T16:18:40.882079Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008849193473130:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:40.899737Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000bf5/r3tmp/tmpOeFp9q/pdisk_1.dat 2025-03-31T16:18:41.496502Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:41.501911Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:41.502001Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:41.507827Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23955, node 1 2025-03-31T16:18:41.607173Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:41.607199Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:41.607204Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:41.607292Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24702 TClient is connected to server localhost:24702 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:42.680154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:42.703515Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:18:42.725448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:42.957401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:43.202998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:43.370588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:45.044870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008870668311252:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.044993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.480429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.519666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.555604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.609371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.657600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.713132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.808901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008870668311766:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.808979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.809400Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008870668311771:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.814656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:45.825869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008870668311773:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:45.880893Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008849193473130:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:45.880966Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:45.903798Z node 1 :TX_PROXY ERROR: Actor# [1:7488008870668311829:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 17993, MsgBus: 14345 2025-03-31T16:18:48.143462Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008886069273951:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:48.143531Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000bf5/r3tmp/tmpBwODar/pdisk_1.dat 2025-03-31T16:18:48.289964Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:48.331357Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:48.331443Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 17993, node 2 2025-03-31T16:18:48.336800Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:18:48.426105Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:48.426128Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:48.426135Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:48.426256Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14345 TClient is connected to server localhost:14345 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:49.056331Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:49.067357Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:18:49.085362Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:49.177543Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:49.363057Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:49.453424Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:51.402549Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008898954177581:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:51.402670Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:51.449002Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:51.481785Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:51.513730Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:51.589242Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:51.654968Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:51.738485Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:51.794001Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008898954178102:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:51.794110Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:51.794404Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008898954178107:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:51.798101Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:51.815129Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008898954178109:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:18:51.892324Z node 2 :TX_PROXY ERROR: Actor# [2:7488008898954178163:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:53.144395Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008886069273951:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:53.144454Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Result: [[[[101u]]];[[[102u]]];[[[103u]]];[[[104u]]];[[[105u]]]] >> KqpScripting::StreamExecuteYqlScriptPg [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanClientOperationTimeoutBruteForce [GOOD] |89.1%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/{meta.json ... results_accumulator.log} >> Cdc::HugeKeyDebezium [GOOD] >> Cdc::Drop[PqRunner] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateFor [GOOD] Test command err: Trying to start YDB, gRPC: 21024, MsgBus: 24476 2025-03-31T16:18:48.819960Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008884577930216:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:48.824216Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b66/r3tmp/tmpM679Yw/pdisk_1.dat 2025-03-31T16:18:49.455929Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:49.490023Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:49.490123Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:49.496208Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21024, node 1 2025-03-31T16:18:49.663618Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:49.663639Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:49.663646Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:49.663756Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24476 TClient is connected to server localhost:24476 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:50.297115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:50.336000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:18:50.471110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:50.639126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:18:50.720618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:52.392243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008901757801024:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:52.392358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:52.758579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:52.805218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:52.840631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:52.911765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:52.953259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:52.996433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:53.103704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008906052768842:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:53.103792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:53.104054Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008906052768847:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:53.107715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:53.117909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008906052768849:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:53.227678Z node 1 :TX_PROXY ERROR: Actor# [1:7488008906052768905:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:53.825308Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008884577930216:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:53.825417Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 6783, MsgBus: 1604 2025-03-31T16:18:54.824945Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008911098775180:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:54.824988Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b66/r3tmp/tmpdFsWLy/pdisk_1.dat 2025-03-31T16:18:54.913158Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6783, node 2 2025-03-31T16:18:54.960029Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:54.960113Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:54.961717Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:18:54.978085Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:54.978103Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:54.978108Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:54.978181Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1604 TClient is connected to server localhost:1604 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:55.380099Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:55.385488Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:18:55.397938Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:55.448347Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:55.581748Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:18:55.637514Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:18:57.852930Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008923983678828:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:57.853076Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:57.871853Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:57.895639Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:57.918592Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:57.945300Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:57.980966Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:58.010788Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:58.047333Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008928278646635:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:58.047419Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:58.047455Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008928278646640:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:58.050452Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:58.058991Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008928278646642:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:18:58.159006Z node 2 :TX_PROXY ERROR: Actor# [2:7488008928278646697:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |89.1%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpScripting::StreamExecuteYqlScriptScanWriteCancelAfterBruteForced >> KqpScripting::UnsafeTimestampCast >> KqpYql::TestUuidPrimaryKeyPrefixSearch >> KqpScripting::SelectNullType >> KqpScripting::ScriptingCreateAndAlterTableTest >> KqpYql::UpdateBadType >> KqpYql::TableConcat >> KqpYql::EvaluateExpr1 >> KqpYql::ScriptUdf >> KqpScripting::QueryStats >> KqpYql::UuidPrimaryKeyDisabled >> KqpYql::InsertCV+useSink >> KqpYql::BinaryJsonOffsetBound >> YdbOlapStore::BulkUpsert [GOOD] >> YdbOlapStore::DuplicateRows >> KqpYql::JsonNumberPrecision [GOOD] >> KqpYql::DdlDmlMix [GOOD] >> KqpYql::CreateUseTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanClientOperationTimeoutBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 1363, MsgBus: 26211 2025-03-31T16:18:45.059941Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008874329016347:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:45.065179Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b90/r3tmp/tmpln06d7/pdisk_1.dat 2025-03-31T16:18:45.619536Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:45.622384Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:45.622469Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:45.625424Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1363, node 1 2025-03-31T16:18:45.799922Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:45.799945Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:45.799951Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:45.800049Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26211 TClient is connected to server localhost:26211 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:46.502434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:18:46.540584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:18:46.659020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:46.845349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:46.933221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:48.667490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008887213919877:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:48.667623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:49.185098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:49.262246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:49.302234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:49.389027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:49.475044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:49.544379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:49.619568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008891508887695:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:49.619689Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:49.620033Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008891508887701:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:49.624384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:49.646403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008891508887703:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:49.711826Z node 1 :TX_PROXY ERROR: Actor# [1:7488008891508887758:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:50.054041Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008874329016347:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:50.054100Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:51.655178Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7488008895803855347:2497] 2025-03-31T16:18:51.657223Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7488008895803855413:2503] TxId: 281474976710672. Ctx: { TraceId: 01jqpfz84ye55e5fgvwtv1rzee, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTI1MzVmMmEtYWIzYTQxMzUtMWMyODE1ZDQtYzUyNDQwOWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-03-31T16:18:51.657537Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTI1MzVmMmEtYWIzYTQxMzUtMWMyODE1ZDQtYzUyNDQwOWM=, ActorId: [1:7488008895803855360:2503], ActorState: ExecuteState, TraceId: 01jqpfz84ye55e5fgvwtv1rzee, Create QueryResponse for error on request, msg: 2025-03-31T16:18:51.667377Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488008895803855417:2507], TxId: 281474976710672, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=NTI1MzVmMmEtYWIzYTQxMzUtMWMyODE1ZDQtYzUyNDQwOWM=. CustomerSuppliedId : . TraceId : 01jqpfz84ye55e5fgvwtv1rzee. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7488008895803855413:2503], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-31T16:18:51.667377Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488008895803855423:2513], TxId: 281474976710672, task: 7. Ctx: { TraceId : 01jqpfz84ye55e5fgvwtv1rzee. SessionId : ydb://session/3?node_id=1&id=NTI1MzVmMmEtYWIzYTQxMzUtMWMyODE1ZDQtYzUyNDQwOWM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7488008895803855413:2503], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-31T16:18:51.667867Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488008895803855424:2514], TxId: 281474976710672, task: 8. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NTI1MzVmMmEtYWIzYTQxMzUtMWMyODE1ZDQtYzUyNDQwOWM=. TraceId : 01jqpfz84ye55e5fgvwtv1rzee. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7488008895803855413:2503], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-31T16:18:51.668036Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488008895803855425:2515], TxId: 281474976710672, task: 9. Ctx: { SessionId : ydb://session/3?node_id=1&id=NTI1MzVmMmEtYWIzYTQxMzUtMWMyODE1ZDQtYzUyNDQwOWM=. CustomerSuppliedId : . TraceId : 01jqpfz84ye55e5fgvwtv1rzee. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7488008895803855413:2503], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-31T16:18:51.668300Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488008895803855418:2508], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jqpfz84ye55e5fgvwtv1rzee. SessionId : ydb://session/3?node_id=1&id=NTI1MzVmMmEtYWIzYTQxMzUtMWMyODE1ZDQtYzUyNDQwOWM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7488008895803855413:2503], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-31T16:18:51.668616Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437930906, txId: 281474976710671] shutting down 2025-03-31T16:18:51.668687Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488008895803855419:2509], TxId: 281474976710672, task: 3. Ctx: { SessionId : ydb://session/3?node_id=1&id=NTI1MzVmMmEtYWIzYTQxMzUtMWMyODE1ZDQtYzUyNDQwOWM=. CustomerSuppliedId : . TraceId : 01jqpfz84ye55e5fgvwtv1rzee. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7488008895803855413:2503], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-31T16:18:51.668823Z node 1 ... 88008922247282030:2520] 2025-03-31T16:18:57.528135Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7488008922247282043:2526] 2025-03-31T16:18:57.559254Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7488008922247282068:2532] 2025-03-31T16:18:57.594519Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7488008922247282086:2537] 2025-03-31T16:18:57.629145Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7488008922247282101:2543] 2025-03-31T16:18:57.668253Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7488008922247282118:2550] 2025-03-31T16:18:57.711184Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7488008922247282134:2556] 2025-03-31T16:18:57.757145Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7488008922247282168:2564] 2025-03-31T16:18:57.804967Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7488008922247282178:2568] 2025-03-31T16:18:57.863547Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7488008922247282200:2577] 2025-03-31T16:18:57.911314Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7488008922247282220:2585] 2025-03-31T16:18:57.968859Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7488008922247282250:2591] 2025-03-31T16:18:58.029494Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7488008922247282274:2600] 2025-03-31T16:18:58.093243Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7488008926542249587:2606] 2025-03-31T16:18:58.159296Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7488008926542249662:2620] TxId: 281474976715672. Ctx: { TraceId: 01jqpfzfdgamz4y65grpdpq2zw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OTBhMDMxZjctMjc3N2I1MzktMTU0NDFjNGQtNzc1MDU0Y2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-03-31T16:18:58.159471Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTBhMDMxZjctMjc3N2I1MzktMTU0NDFjNGQtNzc1MDU0Y2Q=, ActorId: [2:7488008926542249634:2620], ActorState: ExecuteState, TraceId: 01jqpfzfdgamz4y65grpdpq2zw, Create QueryResponse for error on request, msg: 2025-03-31T16:18:58.159523Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488008926542249671:2624], TxId: 281474976715672, task: 4. Ctx: { CustomerSuppliedId : . TraceId : 01jqpfzfdgamz4y65grpdpq2zw. SessionId : ydb://session/3?node_id=2&id=OTBhMDMxZjctMjc3N2I1MzktMTU0NDFjNGQtNzc1MDU0Y2Q=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7488008926542249662:2620], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-31T16:18:58.159881Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437938200, txId: 281474976715671] shutting down 2025-03-31T16:18:58.160103Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488008926542249667:2621], TxId: 281474976715672, task: 1. Ctx: { TraceId : 01jqpfzfdgamz4y65grpdpq2zw. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OTBhMDMxZjctMjc3N2I1MzktMTU0NDFjNGQtNzc1MDU0Y2Q=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7488008926542249662:2620], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-31T16:18:58.160108Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488008926542249670:2623], TxId: 281474976715672, task: 3. Ctx: { SessionId : ydb://session/3?node_id=2&id=OTBhMDMxZjctMjc3N2I1MzktMTU0NDFjNGQtNzc1MDU0Y2Q=. TraceId : 01jqpfzfdgamz4y65grpdpq2zw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7488008926542249662:2620], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-31T16:18:58.160638Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7488008926542249623:2615] 2025-03-31T16:18:58.160674Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488008926542249669:2622], TxId: 281474976715672, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=OTBhMDMxZjctMjc3N2I1MzktMTU0NDFjNGQtNzc1MDU0Y2Q=. CustomerSuppliedId : . TraceId : 01jqpfzfdgamz4y65grpdpq2zw. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7488008926542249662:2620], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-31T16:18:58.161014Z node 2 :TX_DATASHARD ERROR: Undelivered event: 65542, at: [2:7488008926542249715:2056], tablet: [2:7488008909362378257:2329], scanId: 3, table: /Root/EightShard 2025-03-31T16:18:58.162333Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488008926542249672:2625], TxId: 281474976715672, task: 5. Ctx: { TraceId : 01jqpfzfdgamz4y65grpdpq2zw. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OTBhMDMxZjctMjc3N2I1MzktMTU0NDFjNGQtNzc1MDU0Y2Q=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7488008926542249662:2620], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-31T16:18:58.162735Z node 2 :TX_DATASHARD ERROR: Undelivered event: 65542, at: [2:7488008926542249718:2057], tablet: [2:7488008909362378253:2327], scanId: 4, table: /Root/EightShard 2025-03-31T16:18:58.229034Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7488008926542249730:2630] 2025-03-31T16:18:58.301141Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7488008926542249762:2639] 2025-03-31T16:18:58.376866Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7488008926542249831:2653] TxId: 281474976715675. Ctx: { TraceId: 01jqpfzfm1fmt2btjvehjg4wnj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjQxOTliN2EtMjhkYzlkNDQtYmRlMzY2ZGQtNDk4MTYwZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-03-31T16:18:58.377074Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjQxOTliN2EtMjhkYzlkNDQtYmRlMzY2ZGQtNDk4MTYwZDQ=, ActorId: [2:7488008926542249802:2653], ActorState: ExecuteState, TraceId: 01jqpfzfm1fmt2btjvehjg4wnj, Create QueryResponse for error on request, msg: 2025-03-31T16:18:58.377141Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7488008926542249784:2648] 2025-03-31T16:18:58.377531Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437938417, txId: 281474976715674] shutting down 2025-03-31T16:18:58.378281Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488008926542249836:2654], TxId: 281474976715675, task: 1. Ctx: { TraceId : 01jqpfzfm1fmt2btjvehjg4wnj. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YjQxOTliN2EtMjhkYzlkNDQtYmRlMzY2ZGQtNDk4MTYwZDQ=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7488008926542249831:2653], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-31T16:18:58.378598Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488008926542249838:2655], TxId: 281474976715675, task: 2. Ctx: { TraceId : 01jqpfzfm1fmt2btjvehjg4wnj. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YjQxOTliN2EtMjhkYzlkNDQtYmRlMzY2ZGQtNDk4MTYwZDQ=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7488008926542249831:2653], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-31T16:18:58.378751Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488008926542249839:2656], TxId: 281474976715675, task: 3. Ctx: { SessionId : ydb://session/3?node_id=2&id=YjQxOTliN2EtMjhkYzlkNDQtYmRlMzY2ZGQtNDk4MTYwZDQ=. TraceId : 01jqpfzfm1fmt2btjvehjg4wnj. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7488008926542249831:2653], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-31T16:18:58.378918Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488008926542249840:2657], TxId: 281474976715675, task: 4. Ctx: { SessionId : ydb://session/3?node_id=2&id=YjQxOTliN2EtMjhkYzlkNDQtYmRlMzY2ZGQtNDk4MTYwZDQ=. CustomerSuppliedId : . TraceId : 01jqpfzfm1fmt2btjvehjg4wnj. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7488008926542249831:2653], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-31T16:18:58.379045Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488008926542249841:2658], TxId: 281474976715675, task: 5. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YjQxOTliN2EtMjhkYzlkNDQtYmRlMzY2ZGQtNDk4MTYwZDQ=. TraceId : 01jqpfzfm1fmt2btjvehjg4wnj. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7488008926542249831:2653], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-31T16:18:58.379949Z node 2 :TX_DATASHARD ERROR: Undelivered event: 65542, at: [2:7488008926542249882:2062], tablet: [2:7488008909362378230:2322], scanId: 5, table: /Root/EightShard 2025-03-31T16:18:58.387472Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715675. Snapshot is not valid, tabletId: 72075186224037896, step: 1743437938417 2025-03-31T16:18:58.388875Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715675. Snapshot is not valid, tabletId: 72075186224037895, step: 1743437938417 2025-03-31T16:18:58.390871Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715675. Snapshot is not valid, tabletId: 72075186224037897, step: 1743437938417 2025-03-31T16:18:58.454119Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7488008926542249884:2663] 2025-03-31T16:18:58.524910Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008905067410151:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:58.524987Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:58.553324Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437938585, txId: 281474976715677] shutting down 2025-03-31T16:18:58.559015Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7488008926542249915:2673] 2025-03-31T16:18:58.642962Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7488008926542250023:2688] 2025-03-31T16:18:58.715903Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437938753, txId: 281474976715679] shutting down 2025-03-31T16:18:58.729921Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7488008926542250047:2698] 2025-03-31T16:18:58.820322Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7488008926542250150:2713] 2025-03-31T16:18:58.913003Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7488008926542250171:2722] 2025-03-31T16:18:59.014439Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7488008926542250193:2731] 2025-03-31T16:18:59.089374Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437939124, txId: 281474976715681] shutting down 2025-03-31T16:18:59.203954Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7488008930837217617:2758] 2025-03-31T16:18:59.278634Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437939299, txId: 281474976715683] shutting down 2025-03-31T16:18:59.389308Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7488008930837217742:2782] 2025-03-31T16:18:59.500323Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7488008930837217771:2791] 2025-03-31T16:18:59.615283Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7488008930837217797:2801] 2025-03-31T16:18:59.714323Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437939747, txId: 281474976715685] shutting down >> BackupRestoreS3::PrefixedVectorIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptPg [GOOD] Test command err: Trying to start YDB, gRPC: 6493, MsgBus: 8451 2025-03-31T16:18:40.865507Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008851245732570:2140];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:40.868878Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000bd1/r3tmp/tmpIKLpQl/pdisk_1.dat 2025-03-31T16:18:41.426782Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:41.426886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:41.428632Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:41.436806Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6493, node 1 2025-03-31T16:18:41.618456Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:41.618493Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:41.618510Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:41.618638Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8451 TClient is connected to server localhost:8451 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:42.605592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:42.647022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:42.929272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:43.172095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:43.265238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:45.148921Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008872720570731:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.149031Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.512944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.555496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.599872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.681877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.722391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.796762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.865250Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008872720571246:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.865324Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.865799Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008872720571251:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.867564Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008851245732570:2140];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:45.867655Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:45.869542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:45.886366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008872720571253:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:45.974469Z node 1 :TX_PROXY ERROR: Actor# [1:7488008872720571312:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:47.073257Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDg1Y2I3Zi01YjkwZTQ5OS1lMGZjMjVlZS0zNjNiZjkwNQ==, ActorId: [1:7488008881310506199:2499], ActorState: ExecuteState, TraceId: 01jqpfz4jx9mjdbb54jy9wf7md, Create QueryResponse for error on request, msg: 2025-03-31T16:18:47.081522Z node 1 :KQP_PROXY ERROR: Unknown sender for proxy response, requestId: 5 2025-03-31T16:18:47.081920Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzM1MjFjNTQtOTkxOGEwYTYtNjUwNTRmMDMtZGFlYWU3YzE=, ActorId: [1:7488008881310506204:2502], ActorState: ExecuteState, TraceId: 01jqpfz4my95czbaavycwm9ycb, Create QueryResponse for error on request, msg: 2025-03-31T16:18:47.110023Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWNhYmY2MjQtYjAyZTk5YmItNzlmMjc4ZTctMjQzNjZkOGU=, ActorId: [1:7488008881310506219:2508], ActorState: ExecuteState, TraceId: 01jqpfz4nrb2rxexbh1rxcbnm8, Create QueryResponse for error on request, msg: 2025-03-31T16:18:47.136574Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTZmOWZmYTYtYTZmYjVkOWItY2FhMTJjYzEtYTJjZGRjYmE=, ActorId: [1:7488008881310506235:2512], ActorState: ExecuteState, TraceId: 01jqpfz4phasfm9rs0wfhyh1ng, Create QueryResponse for error on request, msg: 2025-03-31T16:18:47.171996Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZGIyNzcxNmUtZmQzOWNiMjUtODhkZWU3YjMtYzU5ZWJiMzg=, ActorId: [1:7488008881310506258:2517], ActorState: ExecuteState, TraceId: 01jqpfz4qe6zy1ypet88sry6hy, Create QueryResponse for error on request, msg: 2025-03-31T16:18:47.240436Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmJiOWUyOWMtOWFkYjgyZi1kMjhlOGFkLTVhNDJjMDk=, ActorId: [1:7488008881310506267:2521], ActorState: ExecuteState, TraceId: 01jqpfz4re2rmcvmfkcmd5gysd, Create QueryResponse for error on request, msg: 2025-03-31T16:18:47.306697Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGM3MjgyNTMtNTljY2ZiYzItNmQwM2E4YjEtZWFjNjYzOGI=, ActorId: [1:7488008881310506283:2529], ActorState: ExecuteState, TraceId: 01jqpfz4tp76t7nw9ytgh36kge, Create QueryResponse for error on request, msg: 2025-03-31T16:18:47.382023Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTllZTQ2OTktYjQ0NDY0NTQtZjJkMTYxMjYtYjNhYWM3ZjM=, ActorId: [1:7488008881310506317:2536], ActorState: ExecuteState, TraceId: 01jqpfz4wxb68hzapyhnh9fpcz, Create QueryResponse for error on request, msg: 2025-03-31T16:18:47.452598Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzA4NGZiZWYtMjU0MzNkZTItY2MwM2ZhYzgtMWU4ZjQxNDM=, ActorId: [1:7488008881310506329:2542], ActorState: ExecuteState, TraceId: 01jqpfz4z2es6w3v76zpzjrw54, Create QueryResponse for error on request, msg: 2025-03-31T16:18:47.544803Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmNiNWM1MDItZDdkYTRjNDAtMzczYTdmOGQtMzU2MjY2NGE=, ActorId: [1:7488008881310506408:2551], ActorState: ExecuteState, TraceId: 01jqpfz51h9wsskdp17rs2nkp2, Create QueryResponse for error on request, msg: 2025-03-31T16:18:47.613825Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODBjMTUyZTAtMzU1MjAwYmUtMTkxMmNkYzgtZjcwZmNlOTU=, ActorId: [1:7488008881310506529:2584], ActorState: ExecuteState, TraceId: 01jqpfz54r8ma8rb1kax5skv04, Create QueryResponse for error on request, msg: 2025-03-31T16:18:47.632760Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437927518, txId: 281474976710672] shutting down 2025-03-31T16:18:47.634801Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437927518, txId: 281474976710671] shutting down 2025-03-31T16:18:47.636787Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437927518, txId: 281474976710673] shutting down 2025-03-31T16:18:47.731356Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmM1NWY1NWYtM2E4OGI3NTItOWM2ZDE1YjgtZmQwOGJ ... ExecuteState, TraceId: 01jqpfza3naj78pc0z5pvqyqqb, Create QueryResponse for error on request, msg: 2025-03-31T16:18:52.897301Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437932915, txId: 281474976710763] shutting down 2025-03-31T16:18:53.004018Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NmQ0OGY2NzItM2ZmNTkwNGYtNWMyYWQ1YWMtMzYwMTg1ZDk=, ActorId: [1:7488008902785347414:3328], ActorState: ExecuteState, TraceId: 01jqpfza9be26pxrkg7szqjzre, Create QueryResponse for error on request, msg: 2025-03-31T16:18:53.057758Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437933083, txId: 281474976710765] shutting down 2025-03-31T16:18:53.184667Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTQ5NzZkYmItYzlmMTBjNmYtNWU3MzZkMDctZTUwZTBjNjg=, ActorId: [1:7488008907080314822:3348], ActorState: ExecuteState, TraceId: 01jqpfzaev42kejtjqg81rbxea, Create QueryResponse for error on request, msg: 2025-03-31T16:18:53.229478Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437933244, txId: 281474976710767] shutting down 2025-03-31T16:18:53.378517Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjU4ZmRiMGItYzEzMmVlNjctYzAxZGU2ZGEtNDA5ZGFmZDI=, ActorId: [1:7488008907080314974:3375], ActorState: ExecuteState, TraceId: 01jqpfzamrf0aphwghtw2q26d1, Create QueryResponse for error on request, msg: 2025-03-31T16:18:53.440156Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437933468, txId: 281474976710769] shutting down 2025-03-31T16:18:53.565341Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTMyZGEwMDYtNTZiMmI0OTMtYjI5M2ZhM2EtZDJhNDRmMTM=, ActorId: [1:7488008907080315062:3396], ActorState: ExecuteState, TraceId: 01jqpfzate4k7vm48cypnthfe5, Create QueryResponse for error on request, msg: 2025-03-31T16:18:53.579459Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437933601, txId: 281474976710771] shutting down 2025-03-31T16:18:53.767301Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzRlZTFmMGMtYWFkZTU1NjktZmE1NGNhZGUtYmEzOGZlOWQ=, ActorId: [1:7488008907080315279:3433], ActorState: ExecuteState, TraceId: 01jqpfzb0rca7cvd7j3qve4v6c, Create QueryResponse for error on request, msg: 2025-03-31T16:18:53.823394Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437933846, txId: 281474976710773] shutting down 2025-03-31T16:18:53.963907Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTJkNzMzMzMtNTBiYmUxMGYtNWFlOWRjNmYtNDU5YzFlNGM=, ActorId: [1:7488008907080315298:3442], ActorState: ExecuteState, TraceId: 01jqpfzb6vcwhgq01cxjpy91gc, Create QueryResponse for error on request, msg: 2025-03-31T16:18:53.979546Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437934007, txId: 281474976710775] shutting down 2025-03-31T16:18:54.161498Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjAyNDdiMDgtMWI5N2M0ZTMtYmMwNTZhMDUtYjc4YmVkYTM=, ActorId: [1:7488008907080315490:3470], ActorState: ExecuteState, TraceId: 01jqpfzbcv8s58c7852k6dg039, Create QueryResponse for error on request, msg: 2025-03-31T16:18:54.169997Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437934196, txId: 281474976710777] shutting down 2025-03-31T16:18:54.359773Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTBhMGU4MDMtOTk4YWQzY2ItYWM5OWIzMzctYWI2MGJmMDg=, ActorId: [1:7488008911375282904:3489], ActorState: ExecuteState, TraceId: 01jqpfzbk021nc0d89v012nc6f, Create QueryResponse for error on request, msg: 2025-03-31T16:18:54.367159Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437934392, txId: 281474976710779] shutting down 2025-03-31T16:18:54.544746Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437934574, txId: 281474976710781] shutting down 2025-03-31T16:18:54.723689Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437934756, txId: 281474976710783] shutting down Trying to start YDB, gRPC: 20508, MsgBus: 2659 2025-03-31T16:18:55.618302Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008917115829920:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:55.618371Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000bd1/r3tmp/tmpKGfmh3/pdisk_1.dat 2025-03-31T16:18:55.755556Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:55.755645Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:55.757751Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20508, node 2 2025-03-31T16:18:55.765282Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:55.802318Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:55.802352Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:55.802360Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:55.802505Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2659 TClient is connected to server localhost:2659 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:56.289888Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:56.305313Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:56.382747Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:56.539551Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:56.619220Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:58.434847Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008930000733558:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:58.434904Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:58.467298Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:58.491757Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:58.515795Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:58.538344Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:58.561393Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:58.596217Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:58.630283Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008930000734067:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:58.630386Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:58.630390Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008930000734072:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:58.633082Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:58.641165Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008930000734074:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:18:58.739155Z node 2 :TX_PROXY ERROR: Actor# [2:7488008930000734129:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpScripting::StreamExecuteYqlScriptScanOperationTmeoutBruteForce [GOOD] >> Cdc::Write[TopicRunner] [GOOD] >> Cdc::UpdateStream ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::JsonNumberPrecision [GOOD] Test command err: Trying to start YDB, gRPC: 7809, MsgBus: 22115 2025-03-31T16:18:51.620676Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008898396835628:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:51.620732Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b56/r3tmp/tmp8Ai7xA/pdisk_1.dat 2025-03-31T16:18:52.132048Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:52.152113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:52.152212Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:52.157424Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7809, node 1 2025-03-31T16:18:52.327715Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:52.327735Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:52.327744Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:52.327855Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22115 TClient is connected to server localhost:22115 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:52.902684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:52.918125Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:18:52.926933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:53.072725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:53.246132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:53.322568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:55.027009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008915576706582:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:55.027214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:55.304149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:55.332151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:55.360813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:55.387651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:55.416357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:55.484554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:55.570717Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008915576707103:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:55.570788Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008915576707108:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:55.570801Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:55.574407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:55.583570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008915576707110:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:55.638901Z node 1 :TX_PROXY ERROR: Actor# [1:7488008915576707164:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:56.621074Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008898396835628:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:56.621138Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 24607, MsgBus: 14234 2025-03-31T16:18:57.298185Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008924232162519:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:57.298246Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b56/r3tmp/tmpua5EAT/pdisk_1.dat 2025-03-31T16:18:57.372490Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24607, node 2 2025-03-31T16:18:57.426908Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:57.426990Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:57.428530Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:18:57.430504Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:57.430520Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:57.430524Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:57.430620Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14234 TClient is connected to server localhost:14234 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:57.813007Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:57.829204Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:57.871153Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:58.000409Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:58.071315Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:59.548345Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008932822098875:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:59.548446Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:59.572471Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:59.599781Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:59.628056Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:59.656972Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:59.688442Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:59.760681Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:59.812501Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008932822099389:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:59.812594Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:59.812809Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008932822099394:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:59.816960Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:59.826261Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008932822099396:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:18:59.910890Z node 2 :TX_PROXY ERROR: Actor# [2:7488008932822099449:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpScripting::ScriptExplainCreatedTable ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::PrefixedVectorIndex [GOOD] Test command err: 2025-03-31T16:18:03.552237Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008693377221337:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:03.552413Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001083/r3tmp/tmp06cYWm/pdisk_1.dat 2025-03-31T16:18:04.046593Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:04.078794Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:04.078920Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:04.113174Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9330, node 1 2025-03-31T16:18:04.239680Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:04.239704Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:04.239715Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:04.239905Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11465 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:04.672598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:06.731457Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008706262124258:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:06.731460Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008706262124265:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:06.731581Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:06.735119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:18:06.762640Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008706262124272:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:18:06.830346Z node 1 :TX_PROXY ERROR: Actor# [1:7488008706262124347:2688] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:07.612445Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqpfxxr2d1hv4dx4p10vce9h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjQ4ODg1YTgtMmRkODVkYWEtYmM5ZGQ4NDMtYTQyYzQxYjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Backup "/Root" to "/home/runner/.ya/build/build_root/0l5c/001083/r3tmp/tmpr9s43r/"Create temporary directory "/Root/~backup_20250331T161807" in databaseProcess "/home/runner/.ya/build/build_root/0l5c/001083/r3tmp/tmpr9s43r/view"Backup view "/Root/view" to "/home/runner/.ya/build/build_root/0l5c/001083/r3tmp/tmpr9s43r/view"Write view into "/home/runner/.ya/build/build_root/0l5c/001083/r3tmp/tmpr9s43r/view/create_view.sql"Write ACL into "/home/runner/.ya/build/build_root/0l5c/001083/r3tmp/tmpr9s43r/view/permissions.pb"Remove temporary directory "/Root/~backup_20250331T161807" in database2025-03-31T16:18:07.809175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710663:0, at schemeshard: 72057594046644480 Backup completed successfullyRestore "/home/runner/.ya/build/build_root/0l5c/001083/r3tmp/tmpr9s43r/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/0l5c/001083/r3tmp/tmpr9s43r/" to "/Root"Process "/home/runner/.ya/build/build_root/0l5c/001083/r3tmp/tmpr9s43r/view"Restore view "/home/runner/.ya/build/build_root/0l5c/001083/r3tmp/tmpr9s43r/view" to "/Root/view"Read view from "/home/runner/.ya/build/build_root/0l5c/001083/r3tmp/tmpr9s43r/view/create_view.sql"Created "/Root/view"Restore ACL "/home/runner/.ya/build/build_root/0l5c/001083/r3tmp/tmpr9s43r/view" to "/Root/view"Read ACL from "/home/runner/.ya/build/build_root/0l5c/001083/r3tmp/tmpr9s43r/view/permissions.pb"2025-03-31T16:18:07.958969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710666:0, at schemeshard: 72057594046644480 Restore completed successfully2025-03-31T16:18:08.099931Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jqpfxyf1cn13a502y82cyp02, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjQ4ODg1YTgtMmRkODVkYWEtYmM5ZGQ4NDMtYTQyYzQxYjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:18:09.523493Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008716244904061:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:09.523545Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001083/r3tmp/tmpdFr8mD/pdisk_1.dat 2025-03-31T16:18:09.718130Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:09.751748Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:09.751836Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:09.758274Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14442, node 4 2025-03-31T16:18:09.820613Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:09.820635Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:09.820642Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:09.820797Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18744 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:10.045183Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:12.278251Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008729129806985:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:12.278374Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:12.308464Z node 4 :TX_PROXY DEBUG: actor# [4:7488008716244904290:2130] Handle TEvProposeTransaction 2025-03-31T16:18:12.308497Z node 4 :TX_PROXY DEBUG: actor# [4:7488008716244904290:2130] TxId# 281474976710658 ProcessProposeTransaction 2025-03-31T16:18:12.308581Z node 4 :TX_PROXY DEBUG: actor# [4:7488008716244904290:2130] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [4:7488008729129807006:2618] 2025-03-31T16:18:12.384823Z node 4 :TX_PROXY DEBUG: Actor# [4:7488008729129807006:2618] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Value" Type: "Utf8" NotNull: false } KeyColumnNames: "Key" PartitionConfig { PartitioningPolicy { MinPartitionsCount: 10 SplitByLoadSettings { Enabled: true } } } Temporary: false } } } UserToken: "" DatabaseName: "" 2025-03-31T16:18:12.384887Z node 4 :TX_PROXY DEBUG: Actor# [4:7488008729129807006:2618] txid# 281474976710658 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:18:12.385319Z node 4 :TX_PROXY DEBUG: Actor# [4:7488008729129807006:2618] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-31T16:18:12.385394Z node 4 :TX_PROXY DEBUG: Actor# [4:7488008729129807006:2618] txid# 281474976710658 TEvNavigateKeySet requested from S ... vateTable: false } 2025-03-31T16:19:00.342977Z node 22 :TX_PROXY DEBUG: Actor# [22:7488008936525207048:4903] Handle TEvDescribeSchemeResult Forward to# [22:7488008936525207046:2443] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/table" PathDescription { Self { Name: "table" PathId: 9 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715760 CreateStep: 1743437939110 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "table" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Group" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } } TableIndexes { Name: "value_idx" LocalPathId: 10 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "Group" KeyColumnNames: "Value" SchemaVersion: 2 PathOwnerId: 72057594046644480 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } VectorIndexKmeansTreeDescription { Settings { settings { metric: SIMILARITY_INNER_PRODUCT vector_type: VECTOR_TYPE_FLOAT vector_dimension: 768 } clusters: 80 levels: 2 } } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 7 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 9 PathOwnerId: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanOperationTmeoutBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 11136, MsgBus: 11050 2025-03-31T16:18:40.882246Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008851846982259:2136];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:40.893712Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000bc6/r3tmp/tmpHeMe91/pdisk_1.dat 2025-03-31T16:18:41.464463Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:41.467962Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:41.468070Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:41.474261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11136, node 1 2025-03-31T16:18:41.608249Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:41.608277Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:41.608285Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:41.608468Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11050 TClient is connected to server localhost:11050 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:42.524608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:42.555755Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:18:42.584678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:42.797814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:43.069717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:18:43.151313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:18:44.975964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008869026853147:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:44.976074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.481345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.522016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.599263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.679798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.719499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.772859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.829091Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008873321820960:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.829175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.829471Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008873321820965:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.833361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:45.846714Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008873321820967:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:45.878043Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008851846982259:2136];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:45.878086Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:45.927890Z node 1 :TX_PROXY ERROR: Actor# [1:7488008873321821022:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:47.031188Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7488008881911755915:2500] 2025-03-31T16:18:47.090525Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7488008881911755922:2503] 2025-03-31T16:18:47.093493Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7488008881911755930:2506] 2025-03-31T16:18:47.109355Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7488008881911755931:2507] 2025-03-31T16:18:47.132397Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7488008881911755977:2517] 2025-03-31T16:18:47.163503Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7488008881911755988:2521] 2025-03-31T16:18:47.202305Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7488008881911755998:2525] 2025-03-31T16:18:47.227595Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7488008881911756011:2531] 2025-03-31T16:18:47.286791Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7488008881911756028:2537] 2025-03-31T16:18:47.302880Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7488008881911756048:2542] 2025-03-31T16:18:47.341420Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7488008881911756078:2549] 2025-03-31T16:18:47.388851Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7488008881911756093:2555] 2025-03-31T16:18:47.437291Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7488008881911756110:2564] 2025-03-31T16:18:47.504510Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7488008881911756122:2568] 2025-03-31T16:18:47.535171Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7488008881911756156:2574] 2025-03-31T16:18:47.591429Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7488008881911756171:2580] 2025-03-31T16:18:47.651473Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7488008881911756186:2586] 2025-03-31T16:18:47.714891Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7488008881911756220:2592] 2025-03-31T16:18:47.778333Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7488008881911756237:2598] 2025-03-31T16:18:47.846511Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7488008881911756266:2606] 2025-03-31T16:18:47.922907Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7488008881911756298:2615] 2025-03-31T16:18:47.995178Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7488008881911756315:2622] 2025-03-31T16:18:48.082529Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7488008881911756350:2629] 2025-03-31T16:18:48.159194Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7488008886206723668:2634] 2025-03-31T16:18:48.239293Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7488008886206723698:2645] 2025-03-31T16:18:48.324833Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7488008886206723713:2651] 2025-03-31T16:18:48.416626Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7488008886206723785:2665] TxId: 281474976710672. Ctx: { TraceId: 01jqpfz5wh1gs42ex14xknknm7, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjk2OWUzZjYtOWJjYTk5YzEtNWE4YmYwY2YtYzZjYWFiYTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-03-31T16:18:48.417925Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Yjk2OWUzZjYtOWJjYTk5YzEtNWE4YmYwY2YtYzZjYWFiYTA=, ActorId: [1:7488008886206723746:2665], ActorState: ExecuteState, TraceId: 01jqpfz5wh1gs42ex14xknknm7, Create QueryResponse for error on request, msg: 2025-03-31T16:18:48.423584Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437928449, txId: 281474976710671] shutting down 2025-03-31T16:18:48.423653Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7488008886206723735:2660] 2025-03-31T16:18:48.426508Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488008886206723793:2669], TxId: 281474976710672, task: 4. Ctx: { TraceId : 01jqpfz ... ing snapshot; our snapshot: [step: 1743437933244, txId: 281474976710685] shutting down 2025-03-31T16:18:53.407284Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7488008907681561581:3036] 2025-03-31T16:18:53.600032Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7488008907681561653:3055] 2025-03-31T16:18:53.769390Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437933804, txId: 281474976710690] shutting down Trying to start YDB, gRPC: 2051, MsgBus: 9031 2025-03-31T16:18:54.582062Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008913055829705:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:54.582136Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000bc6/r3tmp/tmpjsU0nx/pdisk_1.dat 2025-03-31T16:18:54.710502Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:54.732685Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:54.732764Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:54.734550Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2051, node 2 2025-03-31T16:18:54.794271Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:54.794306Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:54.794316Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:54.794444Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9031 TClient is connected to server localhost:9031 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:55.137495Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:55.154034Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:18:55.235656Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:18:55.424343Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:55.501524Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:57.740007Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008925940733375:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:57.740084Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:57.758154Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:57.784194Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:57.812056Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:57.836221Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:57.861848Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:57.926873Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:57.961803Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008925940733890:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:57.961885Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008925940733895:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:57.961905Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:57.964983Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:57.973038Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008925940733897:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:18:58.035442Z node 2 :TX_PROXY ERROR: Actor# [2:7488008930235701247:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:58.663771Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=N2UxYWNhMi04MDNjMWQyLTQ3MjBiNjI5LTNkNjcxMDA2, ActorId: [2:7488008930235701533:2497], ActorState: ExecuteState, TraceId: 01jqpfzfyyfrwdd4hcdc0dnmcb, Create QueryResponse for error on request, msg: 2025-03-31T16:18:58.674814Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=N2FkYmIwZGEtMmM0ZWVmMmMtZmJjMGIxNDEtNmFmZjhhYw==, ActorId: [2:7488008930235701550:2504], ActorState: ExecuteState, TraceId: 01jqpfzfz93c8m4bbkjkjgmm4h, Create QueryResponse for error on request, msg: 2025-03-31T16:18:58.713176Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODg1OWUwMjMtNDgxMmQ5ZDMtYzIyZDAzMjQtM2U4NTNmZmI=, ActorId: [2:7488008930235701561:2508], ActorState: ExecuteState, TraceId: 01jqpfzfzp0hb4csccctaqt0ce, Create QueryResponse for error on request, msg: 2025-03-31T16:18:58.738247Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODdiZjAzN2ItYzdiMjlkNGMtNDc4YjhiYjMtNzMwOWI2ZGE=, ActorId: [2:7488008930235701572:2512], ActorState: ExecuteState, TraceId: 01jqpfzg0baznay6fqgers8f2t, Create QueryResponse for error on request, msg: 2025-03-31T16:18:58.779229Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzJlODE0ZTAtYWI4MDc0NWUtZjJhODNhNDQtOGZiNjNhMjQ=, ActorId: [2:7488008930235701602:2525], ActorState: ExecuteState, TraceId: 01jqpfzg1p7kbq9atpjwcw8gg7, Create QueryResponse for error on request, msg: 2025-03-31T16:18:59.582287Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008913055829705:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:59.582369Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:59.881399Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDNmYTYwYTUtMWFkMzNkMDYtNGI1MjhjY2QtMWU5ZDE2Y2Y=, ActorId: [2:7488008934530669336:2664], ActorState: ExecuteState, TraceId: 01jqpfzh2rb9cz0k4xfm2va2d8, Create QueryResponse for error on request, msg: 2025-03-31T16:18:59.974889Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OWQ2YjkzY2ItZTgwNGNkMjYtYjI4NzI3Yy1hYjQzYmE0NQ==, ActorId: [2:7488008934530669357:2673], ActorState: ExecuteState, TraceId: 01jqpfzh5g6vry1pgvj2qaceqb, Create QueryResponse for error on request, msg: 2025-03-31T16:19:00.165745Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437940195, txId: 281474976715672] shutting down 2025-03-31T16:19:00.267916Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=N2UwNDQwNjMtOTE1OWZkYjQtNzBhMTRiNi05OTQ1M2JhNA==, ActorId: [2:7488008938825636830:2703], ActorState: ExecuteState, TraceId: 01jqpfzhec3hk0m21emjsn6yfg, Create QueryResponse for error on request, msg: 2025-03-31T16:19:00.379935Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437940412, txId: 281474976715674] shutting down 2025-03-31T16:19:00.400174Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=N2E0MjhiZGItYzE2NGM5MzQtNjRmNThlMGUtZDc1Zjc2NTg=, ActorId: [2:7488008938825636854:2712], ActorState: ExecuteState, TraceId: 01jqpfzhhveh26ygkbpzykqxyc, Create QueryResponse for error on request, msg: 2025-03-31T16:19:00.682797Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437940713, txId: 281474976715676] shutting down 2025-03-31T16:19:00.881162Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437940916, txId: 281474976715678] shutting down 2025-03-31T16:19:01.122717Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437941154, txId: 281474976715680] shutting down |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformClusterMirror3dcWithUnusableDomain >> TGroupMapperTest::Mirror3dc3Nodes [GOOD] >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesTrivial [GOOD] >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesHuge >> TGroupMapperTest::NonUniformCluster2 >> TGroupMapperTest::NonUniformClusterDifferentSlotsPerDisk [GOOD] >> TGroupMapperTest::NonUniformClusterMirror3dc >> TGroupMapperTest::ReassignGroupTest3dc >> TGroupMapperTest::CheckNotToBreakFailModel [GOOD] >> TGroupMapperTest::SanitizeGroupTest3dc >> TBlobStorageControllerGrouperTest::when_one_server_per_rack_in_4_racks_then_can_construct_group_with_4_domains [GOOD] >> TGroupMapperTest::NonUniformCluster >> TGroupMapperTest::Block42_1disk >> TGroupMapperTest::MakeDisksNonoperational [GOOD] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::CheckNotToBreakFailModel [GOOD] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformClusterDifferentSlotsPerDisk [GOOD] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::Mirror3dc3Nodes [GOOD] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesTrivial [GOOD] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::when_one_server_per_rack_in_4_racks_then_can_construct_group_with_4_domains [GOOD] >> TGroupMapperTest::NonUniformClusterMirror3dcWithUnusableDomain [GOOD] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MakeDisksNonoperational [GOOD] >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesHuge [GOOD] >> TGroupMapperTest::NonUniformClusterMirror3dc [GOOD] >> TGroupMapperTest::InterlacedRacksWithoutInterlacedNodes [GOOD] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformClusterMirror3dcWithUnusableDomain [GOOD] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesHuge [GOOD] >> TBlobStorageControllerGrouperTest::when_one_server_per_rack_in_4_racks_then_can_construct_group_with_4_domains_and_one_small_node [GOOD] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformClusterMirror3dc [GOOD] >> TGroupMapperTest::MakeDisksForbidden [GOOD] >> TMultiversionObjectMap::MonteCarlo |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::InterlacedRacksWithoutInterlacedNodes [GOOD] |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::when_one_server_per_rack_in_4_racks_then_can_construct_group_with_4_domains_and_one_small_node [GOOD] >> Cdc::Drop[PqRunner] [GOOD] >> Cdc::Drop[YdsRunner] |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MakeDisksForbidden [GOOD] >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesEmpty [GOOD] >> TGroupMapperTest::MakeDisksUnusable >> KqpYql::UuidPrimaryKeyDisabled [GOOD] >> Cdc::NaN[PqRunner] [GOOD] >> Cdc::NaN[YdsRunner] >> TGroupMapperTest::MapperSequentialCalls >> TGroupMapperTest::MakeDisksUnusable [GOOD] |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> KqpYql::TestUuidPrimaryKeyPrefixSearch [GOOD] |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MakeDisksUnusable [GOOD] |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesEmpty [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UuidPrimaryKeyDisabled [GOOD] Test command err: Trying to start YDB, gRPC: 21998, MsgBus: 10487 2025-03-31T16:19:01.447914Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008941213455846:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:01.448002Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000af5/r3tmp/tmp5lunft/pdisk_1.dat 2025-03-31T16:19:01.806382Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:01.828929Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:01.829106Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:01.857437Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21998, node 1 2025-03-31T16:19:01.899602Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:19:01.899630Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:19:01.899659Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:19:01.899806Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10487 TClient is connected to server localhost:10487 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:19:02.372177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:02.391138Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:19:04.448243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008954098358381:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:04.448364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:04.722339Z node 1 :TX_PROXY ERROR: Actor# [1:7488008954098358404:2307] txid# 281474976710658, issues: { message: "Uuid as primary key is forbiden by configuration: key" severity: 1 } 2025-03-31T16:19:04.751818Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008954098358412:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:04.751890Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:04.767303Z node 1 :TX_PROXY ERROR: Actor# [1:7488008954098358419:2315] txid# 281474976710659, issues: { message: "Uuid as primary key is forbiden by configuration: key" severity: 1 } 2025-03-31T16:19:04.781296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008954098358427:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:04.781417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:04.803926Z node 1 :TX_PROXY ERROR: Actor# [1:7488008954098358434:2323] txid# 281474976710660, issues: { message: "Uuid as primary key is forbiden by configuration: val" severity: 1 } 2025-03-31T16:19:04.817182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008954098358442:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:04.817279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:04.832643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:19:04.989017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008954098358531:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:04.989153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } >> KqpYql::UpdateBadType [GOOD] >> KqpYql::ScriptUdf [GOOD] >> KqpYql::SelectNoAsciiValue >> KqpScripting::SelectNullType [GOOD] >> KqpScripting::StreamDdlAndDml >> KqpYql::TableConcat [GOOD] >> KqpYql::TableNameConflict >> KqpYql::EvaluateExpr1 [GOOD] >> KqpYql::Discard >> KqpScripting::UnsafeTimestampCast [GOOD] >> KqpScripting::SystemTables >> KqpYql::BinaryJsonOffsetBound [GOOD] >> KqpYql::AnsiIn >> TGroupMapperTest::MonteCarlo ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TestUuidPrimaryKeyPrefixSearch [GOOD] Test command err: Trying to start YDB, gRPC: 29039, MsgBus: 26779 2025-03-31T16:19:01.317973Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008942949590456:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:01.318043Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b2b/r3tmp/tmp5ntfjD/pdisk_1.dat 2025-03-31T16:19:01.690209Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:01.738812Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:01.738919Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:01.744196Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29039, node 1 2025-03-31T16:19:01.795635Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:19:01.795666Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:19:01.795675Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:19:01.795785Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26779 TClient is connected to server localhost:26779 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:19:02.274896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:04.316707Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008955834493007:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:04.316891Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:04.587011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:19:04.726197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008955834493111:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:04.726299Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:04.728165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008955834493116:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:04.732714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:19:04.747147Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008955834493118:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:19:04.822063Z node 1 :TX_PROXY ERROR: Actor# [1:7488008955834493169:2395] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpYql::CreateUseTable [GOOD] >> KqpYql::InsertCV+useSink [GOOD] >> KqpYql::InsertCV-useSink >> Cdc::UpdateStream [GOOD] >> Cdc::UpdateShardCount ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UpdateBadType [GOOD] Test command err: Trying to start YDB, gRPC: 9662, MsgBus: 30039 2025-03-31T16:19:01.321778Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008942772248958:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:01.321848Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b09/r3tmp/tmpI6TBNf/pdisk_1.dat 2025-03-31T16:19:01.656617Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9662, node 1 2025-03-31T16:19:01.704423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:01.704513Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:01.709966Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:01.743801Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:19:01.743822Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:19:01.743829Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:19:01.743948Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30039 TClient is connected to server localhost:30039 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:19:02.234041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:19:02.268194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:19:02.411064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:02.565712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:02.632857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:04.319190Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008955657152605:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:04.319285Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:04.665144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:19:04.704439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:19:04.734955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:19:04.761550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:19:04.790220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:19:04.830177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:19:04.874260Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008955657153117:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:04.874321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008955657153122:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:04.874340Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:04.877860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:19:04.886892Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008955657153124:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:19:04.979531Z node 1 :TX_PROXY ERROR: Actor# [1:7488008955657153178:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Type annotation, code: 1030
:4:26: Error: At function: KiUpdateTable!
:3:20: Error: Failed to convert type: Struct<'Amount':String?> to Struct<'Amount':Uint64?>
:3:20: Error: Failed to convert 'Amount': Optional to Optional
:3:20: Error: Row type mismatch for table: db.[/Root/Test] >> KqpScripting::QueryStats [GOOD] >> KqpScripting::Pure >> TGroupMapperTest::NonUniformCluster2 [GOOD] >> KqpScripting::ScriptingCreateAndAlterTableTest [GOOD] >> KqpScripting::SecondaryIndexes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::CreateUseTable [GOOD] Test command err: Trying to start YDB, gRPC: 6651, MsgBus: 29595 2025-03-31T16:18:56.681357Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008921665552046:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:56.681438Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b4f/r3tmp/tmplCbWga/pdisk_1.dat 2025-03-31T16:18:56.968478Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6651, node 1 2025-03-31T16:18:57.041647Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:57.041665Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:57.041672Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:57.041776Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:18:57.048204Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:57.048344Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:57.049841Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29595 TClient is connected to server localhost:29595 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:57.497542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:57.514461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:57.618768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:57.736961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:57.810371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:59.380346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008934550455695:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:59.380452Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:59.646324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:59.680596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:59.718398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:59.748614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:59.781501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:59.816338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:59.898427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008934550456210:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:59.898541Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:59.898596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008934550456215:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:59.901844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:59.911356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008934550456217:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:59.987669Z node 1 :TX_PROXY ERROR: Actor# [1:7488008934550456272:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Optimization, code: 1070
:4:24: Error: Queries with mixed data and scheme operations are not supported. Use separate queries for different types of operations., code: 2009 Trying to start YDB, gRPC: 28817, MsgBus: 23479 2025-03-31T16:19:01.716390Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008940150247819:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:01.716479Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b4f/r3tmp/tmpn1Rqls/pdisk_1.dat 2025-03-31T16:19:01.845315Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:01.845399Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:01.845773Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:01.860319Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28817, node 2 2025-03-31T16:19:01.901424Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:19:01.901446Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:19:01.901454Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:19:01.901565Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23479 TClient is connected to server localhost:23479 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:19:02.340568Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:02.357316Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:02.418479Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:02.583207Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:02.673903Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:04.979835Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008953035151462:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:04.979958Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:05.025156Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:19:05.061826Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:19:05.095982Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:19:05.131335Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:19:05.164160Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:19:05.205435Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:19:05.254145Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008957330119268:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:05.254270Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:05.254538Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008957330119273:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:05.258779Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:19:05.282956Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008957330119275:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:19:05.340311Z node 2 :TX_PROXY ERROR: Actor# [2:7488008957330119329:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:19:06.333149Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:19:06.533433Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437946544, txId: 281474976715673] shutting down 2025-03-31T16:19:06.716722Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008940150247819:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:06.716841Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformCluster2 [GOOD] >> YdbOlapStore::LogPagingBetween [GOOD] >> YdbOlapStore::LogPagingAfter >> BasicUsage::BasicWriteSession >> BasicUsage::RetryDiscoveryWithCancel >> BasicUsage::PropagateSessionClosed >> BasicUsage::WriteSessionNoAvailableDatabase >> BasicUsage::GetAllStartPartitionSessions >> BasicUsage::WriteSessionWriteInHandlers >> BasicUsage::FallbackToSingleDb >> BasicUsage::WriteSessionCloseWaitsForWrites >> BasicUsage::WaitEventBlocksBeforeDiscovery >> BasicUsage::SelectDatabaseByHash [GOOD] >> BasicUsage::SelectDatabase [GOOD] >> KqpScripting::ScriptExplainCreatedTable [GOOD] >> KqpScripting::ScriptExplain |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::SelectDatabase [GOOD] >> Cdc::Drop[YdsRunner] [GOOD] >> Cdc::Drop[TopicRunner] >> DataShardOutOfOrder::TestShardRestartNoUndeterminedImmediate >> DataShardTxOrder::RandomPoints_ReproducerDelayRS1 >> DataShardTxOrder::RandomPointsAndRanges >> DataShardTxOrder::ImmediateBetweenOnline_oo8 >> DataShardTxOrder::ImmediateBetweenOnline >> DataShardOutOfOrder::TestReadTableWriteConflict >> DataShardTxOrder::RandomPoints_DelayRS >> KqpYql::TableNameConflict [GOOD] >> KqpYql::AnsiIn [GOOD] >> KqpYql::Discard [GOOD] >> KqpYql::SelectNoAsciiValue [GOOD] >> KqpYql::InsertCV-useSink [GOOD] >> DataShardOutOfOrder::TestPlannedTimeoutSplit >> BackupRestore::RestoreExternalDataSourceWithoutSecret [GOOD] >> BackupRestore::PrefixedVectorIndex >> KqpScripting::StreamDdlAndDml [GOOD] >> Cdc::UpdateShardCount [GOOD] >> Cdc::UpdateRetentionPeriod ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::AnsiIn [GOOD] Test command err: Trying to start YDB, gRPC: 10054, MsgBus: 5592 2025-03-31T16:19:01.468724Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008940071759321:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:01.468815Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000aed/r3tmp/tmpJdk2uD/pdisk_1.dat 2025-03-31T16:19:01.900969Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:01.909688Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected TServer::EnableGrpc on GrpcPort 10054, node 1 2025-03-31T16:19:01.909788Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:01.911575Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:01.955793Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:19:01.955826Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:19:01.955835Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:19:01.955960Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5592 TClient is connected to server localhost:5592 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:19:02.566393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:02.587263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:02.723508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:02.885548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:02.962146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:04.722400Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008952956663005:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:04.722520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:05.034551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:19:05.066394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:19:05.131593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:19:05.167953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:19:05.199428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:19:05.270821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:19:05.322641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008957251630820:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:05.322711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:05.323164Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008957251630826:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:05.326492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:19:05.338960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008957251630828:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:19:05.431064Z node 1 :TX_PROXY ERROR: Actor# [1:7488008957251630882:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:19:06.457624Z node 1 :KQP_SESSION ERROR: SessionId: ydb://session/3?node_id=1&id=MTc1Mjc2MTgtZjAyZThjOWEtNDQ5YWNiOWUtZTM2MWQ4Njc=, ActorId: [1:7488008961546598471:2496], ActorState: ExecuteState, TraceId: 01jqpfzqe841fnmyq8jf0cs6hj, Internal error, message: yql/essentials/types/binary_json/read.cpp:161: StringOffset must be inside buffer 2025-03-31T16:19:06.457671Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTc1Mjc2MTgtZjAyZThjOWEtNDQ5YWNiOWUtZTM2MWQ4Njc=, ActorId: [1:7488008961546598471:2496], ActorState: ExecuteState, TraceId: 01jqpfzqe841fnmyq8jf0cs6hj, Create QueryResponse for error on request, msg: yql/essentials/types/binary_json/read.cpp:161: StringOffset must be inside buffer 2025-03-31T16:19:06.468171Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008940071759321:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:06.468242Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 16550, MsgBus: 64213 2025-03-31T16:19:07.256350Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008967529955554:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:07.256401Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000aed/r3tmp/tmpTk9h3O/pdisk_1.dat 2025-03-31T16:19:07.346375Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16550, node 2 2025-03-31T16:19:07.384318Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:07.384432Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:07.387213Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:07.428473Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:19:07.428508Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:19:07.428517Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:19:07.428618Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64213 TClient is connected to server localhost:64213 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:19:07.863185Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:07.869362Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:19:07.873978Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:07.946184Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:08.091075Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:08.168025Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:09.912627Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008976119891909:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:09.912712Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:09.954840Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:19:09.982031Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:19:10.007810Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:19:10.032969Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:19:10.060916Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:19:10.094884Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:19:10.133171Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008980414859712:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:10.133251Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:10.133478Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008980414859717:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:10.136952Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:19:10.153338Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008980414859719:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:19:10.242122Z node 2 :TX_PROXY ERROR: Actor# [2:7488008980414859774:3437] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TableNameConflict [GOOD] Test command err: Trying to start YDB, gRPC: 29794, MsgBus: 1482 2025-03-31T16:19:01.385226Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008943292856621:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:01.385401Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b44/r3tmp/tmpbdroSX/pdisk_1.dat 2025-03-31T16:19:01.760085Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29794, node 1 2025-03-31T16:19:01.818247Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:01.818362Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:01.820103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:01.848601Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:19:01.848622Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:19:01.848630Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:19:01.848758Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1482 TClient is connected to server localhost:1482 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:19:02.340930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:19:02.364089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:19:02.532813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:19:02.708233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:19:02.775754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:04.618207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008956177760270:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:04.618356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:04.976955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:19:05.015187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:19:05.045269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:19:05.079073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:19:05.108305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:19:05.149316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:19:05.193997Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008960472728076:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:05.194068Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008960472728081:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:05.194082Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:05.198250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:19:05.208366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008960472728083:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:19:05.292604Z node 1 :TX_PROXY ERROR: Actor# [1:7488008960472728137:3442] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Table intent determination, code: 1040
:3:27: Error: CONCAT is not supported on Kikimr clusters. 2025-03-31T16:19:06.386317Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008943292856621:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:06.386407Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 1535, MsgBus: 4345 2025-03-31T16:19:06.942957Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008962466424011:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:06.943010Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b44/r3tmp/tmp3ADzxl/pdisk_1.dat 2025-03-31T16:19:07.045163Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1535, node 2 2025-03-31T16:19:07.090340Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:07.090447Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:07.093282Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:07.113383Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:19:07.113407Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:19:07.113415Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:19:07.113523Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4345 TClient is connected to server localhost:4345 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:19:07.546796Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:19:07.552594Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:19:07.562819Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:07.609153Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:07.734438Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:07.802834Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:09.737272Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008975351327654:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:09.737395Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:09.763287Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:19:09.790848Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:19:09.817416Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:19:09.849489Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:19:09.879141Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:19:09.925766Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:19:09.973790Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008975351328167:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:09.973868Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:09.974011Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008975351328172:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:09.977379Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:19:09.985925Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008975351328174:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:19:10.085688Z node 2 :TX_PROXY ERROR: Actor# [2:7488008979646295524:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Type annotation, code: 1030
:12:30: Error: At function: KiCreateTable!
:12:30: Error: Table name conflict: db.[/Root/Test] is used to reference multiple tables. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::Discard [GOOD] Test command err: Trying to start YDB, gRPC: 28011, MsgBus: 63454 2025-03-31T16:19:01.454944Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008943019582543:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:01.455035Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b16/r3tmp/tmpe2jojB/pdisk_1.dat 2025-03-31T16:19:01.855615Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28011, node 1 2025-03-31T16:19:01.880338Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:01.880480Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:01.887877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:01.891753Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:19:01.891765Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:19:01.934291Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:19:01.934316Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:19:01.934322Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:19:01.934458Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63454 TClient is connected to server localhost:63454 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:19:02.477364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:02.496341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:02.639360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:02.804966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:19:02.877505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:19:04.650027Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008955904486216:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:04.650142Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:04.949972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:19:04.981747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:19:05.054185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:19:05.128377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:19:05.160543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:19:05.194114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:19:05.274261Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008960199454031:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:05.274380Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:05.274654Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008960199454036:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:05.278685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:19:05.291975Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008960199454038:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:19:05.362996Z node 1 :TX_PROXY ERROR: Actor# [1:7488008960199454094:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:19:06.455192Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008943019582543:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:06.455333Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 6653, MsgBus: 4815 2025-03-31T16:19:07.041915Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008968515432545:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:07.041975Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b16/r3tmp/tmpE9qvSe/pdisk_1.dat 2025-03-31T16:19:07.159882Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:07.185574Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:07.185658Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:07.187410Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6653, node 2 2025-03-31T16:19:07.228702Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:19:07.228730Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:19:07.228739Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:19:07.228852Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4815 TClient is connected to server localhost:4815 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:19:07.653169Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:19:07.669498Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:07.736324Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:19:07.889428Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:19:07.968492Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:19:09.742115Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008977105368904:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:09.742192Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:09.817516Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:19:09.847212Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:19:09.876585Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:19:09.904893Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:19:09.969556Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:19:09.999182Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:19:10.074107Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008981400336717:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:10.074194Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008981400336722:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:10.074200Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:10.077524Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:19:10.088971Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008981400336724:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:19:10.188224Z node 2 :TX_PROXY ERROR: Actor# [2:7488008981400336781:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:19:11.128888Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488008985695304376:2499], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:2:13: Error: DISCARD not supported in YDB queries, code: 2008 2025-03-31T16:19:11.130966Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NWVmODFjNDEtZjhiYzM0LWUyMWQ1YjJhLTUxZWI0Njg=, ActorId: [2:7488008985695304369:2495], ActorState: ExecuteState, TraceId: 01jqpfzw2f1grgaem69wz3g3x1, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> KqpScripting::SystemTables [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::SelectNoAsciiValue [GOOD] Test command err: Trying to start YDB, gRPC: 26071, MsgBus: 8315 2025-03-31T16:19:01.401769Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008941222113691:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:01.401881Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b25/r3tmp/tmpnKJUu2/pdisk_1.dat 2025-03-31T16:19:01.773684Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:01.777142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:01.777253Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:01.780998Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26071, node 1 2025-03-31T16:19:01.866389Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:19:01.866420Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:19:01.866432Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:19:01.866599Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8315 TClient is connected to server localhost:8315 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:19:02.412239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:02.446458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:02.574618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:02.723969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:02.805146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:04.487057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008954107017358:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:04.487190Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:04.790433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:19:04.818594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:19:04.855879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:19:04.924151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:19:04.963661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:19:04.997631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:19:05.043300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008958401985166:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:05.043387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008958401985171:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:05.043389Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:05.048221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:19:05.058198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008958401985173:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:19:05.155727Z node 1 :TX_PROXY ERROR: Actor# [1:7488008958401985227:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Type annotation, code: 1030
:10:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At function: SqlProjectItem
:10:20: Error: At function: Apply
:8:28: Error: At function: ScriptUdf
:8:28: Error: Module not loaded for script type: Python3 Trying to start YDB, gRPC: 12210, MsgBus: 11160 2025-03-31T16:19:06.840353Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008963503085864:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:06.840445Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b25/r3tmp/tmpktvIXS/pdisk_1.dat 2025-03-31T16:19:06.930723Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12210, node 2 2025-03-31T16:19:06.975437Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:06.975596Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:06.977421Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:07.001491Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:19:07.001515Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:19:07.001524Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:19:07.001662Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11160 TClient is connected to server localhost:11160 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:19:07.399335Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:07.404255Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:19:07.416768Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:07.470354Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:07.621711Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:07.677069Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:09.941010Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008976387989517:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:09.941143Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:09.982100Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:19:10.009125Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:19:10.035673Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:19:10.061281Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:19:10.088963Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:19:10.121100Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:19:10.160489Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008980682957322:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:10.160563Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:10.160667Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008980682957327:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:10.164889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:19:10.176658Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008980682957329:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:19:10.247894Z node 2 :TX_PROXY ERROR: Actor# [2:7488008980682957383:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:19:11.214611Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:19:11.586435Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437951584, txId: 281474976715673] shutting down 2025-03-31T16:19:11.843497Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008963503085864:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:11.843586Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::InsertCV-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 11179, MsgBus: 27245 2025-03-31T16:19:01.500015Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008942817362624:2112];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:01.502267Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b00/r3tmp/tmpWE8sqA/pdisk_1.dat 2025-03-31T16:19:01.905588Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:01.933885Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:01.933973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 11179, node 1 2025-03-31T16:19:01.936483Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:01.994221Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:19:01.994243Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:19:01.994251Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:19:01.994421Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27245 TClient is connected to server localhost:27245 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:19:02.532410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:02.563325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:02.738896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:02.908810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:02.979542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:04.876985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008955702266236:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:04.877102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:05.217401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:19:05.250482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:19:05.282108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:19:05.316731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:19:05.350231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:19:05.384005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:19:05.435720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008959997234048:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:05.435803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008959997234053:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:05.435805Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:05.440211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:19:05.451072Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008959997234055:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:19:05.511883Z node 1 :TX_PROXY ERROR: Actor# [1:7488008959997234108:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:19:06.500285Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008942817362624:2112];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:06.500372Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:19:06.540360Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=3; 2025-03-31T16:19:06.549420Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037914 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-31T16:19:06.549629Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037914 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-31T16:19:06.549826Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488008964292201728:2501], Table: `/Root/Test` ([72057594046644480:9:1]), SessionActorId: [1:7488008964292201710:2501]Got CONSTRAINT VIOLATION for table `/Root/Test`. ShardID=72075186224037914, Sink=[1:7488008964292201728:2501].{
: Error: Duplicate keys have been found., code: 2012 } 2025-03-31T16:19:06.550420Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488008964292201721:2501], SessionActorId: [1:7488008964292201710:2501], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/Test`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7488008964292201710:2501]. isRollback=0 2025-03-31T16:19:06.550679Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTFkNTI5NzMtZTU5OTJiZmQtZGM1Y2MwOWMtNGQ0ZGU4ZmI=, ActorId: [1:7488008964292201710:2501], ActorState: ExecuteState, TraceId: 01jqpfzqjw9dkg15x301ajh5je, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7488008964292201722:2501] from: [1:7488008964292201721:2501] 2025-03-31T16:19:06.550767Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7488008964292201722:2501] TxId: 281474976710671. Ctx: { TraceId: 01jqpfzqjw9dkg15x301ajh5je, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTFkNTI5NzMtZTU5OTJiZmQtZGM1Y2MwOWMtNGQ0ZGU4ZmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/Test`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-31T16:19:06.551784Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTFkNTI5NzMtZTU5OTJiZmQtZGM1Y2MwOWMtNGQ0ZGU4ZmI=, ActorId: [1:7488008964292201710:2501], ActorState: ExecuteState, TraceId: 01jqpfzqjw9dkg15x301ajh5je, Create QueryResponse for error on request, msg:
: Error: Execution, code: 1060
: Error: Constraint violated. Table: `/Root/Test`., code: 2012
: Error: Duplicate keys have been found., code: 2012 Trying to start YDB, gRPC: 13163, MsgBus: 64654 2025-03-31T16:19:07.350450Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008966679779358:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:07.350521Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b00/r3tmp/tmpCTQsCG/pdisk_1.dat 2025-03-31T16:19:07.450045Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13163, node 2 2025-03-31T16:19:07.484445Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:07.484552Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:07.486342Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:07.518517Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:19:07.518548Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:19:07.518556Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:19:07.518676Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64654 TClient is connected to server localhost:64654 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:19:07.956874Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:07.965213Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:19:07.977177Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:08.051734Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:08.213228Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:08.285217Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:10.160790Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008979564683019:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:10.160879Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:10.206004Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:19:10.236350Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:19:10.265850Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:19:10.294575Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:19:10.324323Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:19:10.392854Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:19:10.450500Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008979564683534:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:10.450607Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:10.450785Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008979564683539:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:10.454018Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:19:10.469346Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008979564683541:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:19:10.533627Z node 2 :TX_PROXY ERROR: Actor# [2:7488008979564683594:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:19:11.853001Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488008983859651222:2502], TxId: 281474976715672, task: 1. Ctx: { TraceId : 01jqpfzwhkfpcwxk0rz30w4pnh. SessionId : ydb://session/3?node_id=2&id=ZGU3OGM3NGYtZmRlZDllNmUtZTRjMzQ4OWQtYjdhYThmYzA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-03-31T16:19:11.853281Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488008983859651224:2503], TxId: 281474976715672, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZGU3OGM3NGYtZmRlZDllNmUtZTRjMzQ4OWQtYjdhYThmYzA=. TraceId : 01jqpfzwhkfpcwxk0rz30w4pnh. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7488008983859651219:2493], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-31T16:19:11.853561Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGU3OGM3NGYtZmRlZDllNmUtZTRjMzQ4OWQtYjdhYThmYzA=, ActorId: [2:7488008983859651160:2493], ActorState: ExecuteState, TraceId: 01jqpfzwhkfpcwxk0rz30w4pnh, Create QueryResponse for error on request, msg:
: Error: Execution, code: 1060
: Error: Conflict with existing key., code: 2012 >> TGroupMapperTest::MapperSequentialCalls [GOOD] >> KqpScripting::Pure [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamDdlAndDml [GOOD] Test command err: Trying to start YDB, gRPC: 27873, MsgBus: 30995 2025-03-31T16:19:01.347529Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008939821438044:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:01.347642Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b47/r3tmp/tmpbrRpvD/pdisk_1.dat 2025-03-31T16:19:01.732230Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:01.757341Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:01.757438Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 27873, node 1 2025-03-31T16:19:01.771154Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:01.851036Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:19:01.851053Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:19:01.851068Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:19:01.851206Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30995 TClient is connected to server localhost:30995 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:19:02.374887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:02.391475Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:02.403200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:02.571892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:19:02.735955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:02.816308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:04.455469Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008952706341706:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:04.455580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:04.811552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:19:04.844870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:19:04.881316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:19:04.920996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:19:04.957479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:19:04.995969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:19:05.043276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008957001309512:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:05.043350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:05.044054Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008957001309517:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:05.048528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:19:05.061481Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008957001309520:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:19:05.145627Z node 1 :TX_PROXY ERROR: Actor# [1:7488008957001309573:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:19:05.982803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:19:06.255156Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437946285, txId: 281474976710673] shutting down 2025-03-31T16:19:06.347189Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008939821438044:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:06.347257Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 63030, MsgBus: 19628 2025-03-31T16:19:06.898817Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008961011469603:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:06.898915Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b47/r3tmp/tmpY3pDLN/pdisk_1.dat 2025-03-31T16:19:07.007480Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63030, node 2 2025-03-31T16:19:07.032894Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:07.033005Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:07.034754Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:07.062088Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:19:07.062107Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:19:07.062112Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:19:07.062202Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19628 TClient is connected to server localhost:19628 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:19:07.515673Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:07.524787Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:07.531958Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:19:07.586985Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:07.739592Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:07.822348Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:10.002343Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008978191340560:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:10.002428Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:10.049065Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:19:10.077158Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:19:10.102854Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:19:10.138947Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:19:10.167499Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:19:10.197560Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:19:10.246551Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008978191341067:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:10.246632Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008978191341072:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:10.246644Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:10.249810Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:19:10.258711Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008978191341074:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:19:10.339395Z node 2 :TX_PROXY ERROR: Actor# [2:7488008978191341128:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:19:11.332264Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:19:11.891463Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437951906, txId: 281474976715673] shutting down 2025-03-31T16:19:11.901200Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008961011469603:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:11.901449Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpScripting::StreamExecuteYqlScriptClientOperationTimeoutBruteForce [GOOD] >> DataShardOutOfOrder::UncommittedReads |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MapperSequentialCalls [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::SystemTables [GOOD] Test command err: Trying to start YDB, gRPC: 32330, MsgBus: 1750 2025-03-31T16:19:01.318714Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008942113842301:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:01.318804Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b28/r3tmp/tmp6c3cST/pdisk_1.dat 2025-03-31T16:19:01.687873Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:01.734158Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:01.734257Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 32330, node 1 2025-03-31T16:19:01.736042Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:01.794677Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:19:01.794694Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:19:01.794702Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:19:01.794790Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1750 TClient is connected to server localhost:1750 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:19:02.302286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:02.338432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:02.502686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:02.672889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:02.753939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:04.408015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008954998745965:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:04.408195Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:04.710489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:19:04.737891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:19:04.762184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:19:04.791052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:19:04.827276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:19:04.894429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:19:04.946204Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008954998746479:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:04.946270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:04.946325Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008954998746484:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:04.951853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:19:04.962724Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008954998746486:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:19:05.064425Z node 1 :TX_PROXY ERROR: Actor# [1:7488008959293713837:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:19:06.141835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:19:06.319303Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008942113842301:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:06.319735Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 27470, MsgBus: 11620 2025-03-31T16:19:07.116765Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008968557214255:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:07.116884Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b28/r3tmp/tmpkPFMbM/pdisk_1.dat 2025-03-31T16:19:07.221060Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27470, node 2 2025-03-31T16:19:07.252656Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:07.252742Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:07.254392Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:07.319633Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:19:07.319659Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:19:07.319666Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:19:07.319776Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11620 TClient is connected to server localhost:11620 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:19:07.740220Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:07.751434Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:19:07.807511Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:07.959958Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:08.021651Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:09.928836Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008977147150615:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:09.928911Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:09.975764Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:19:10.004763Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:19:10.052275Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:19:10.080958Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:19:10.107641Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:19:10.152445Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:19:10.194519Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008981442118422:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:10.194605Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:10.194744Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008981442118427:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:10.197848Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:19:10.206548Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008981442118429:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:19:10.277425Z node 2 :TX_PROXY ERROR: Actor# [2:7488008981442118483:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:19:11.571359Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437951547, txId: 281474976715671] shutting down 2025-03-31T16:19:11.739768Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437951723, txId: 281474976715673] shutting down 2025-03-31T16:19:12.119439Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008968557214255:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:12.119515Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:19:12.730769Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437952739, txId: 281474976715675] shutting down >> DataShardOutOfOrder::TestReadTableImmediateWriteBlock >> DataShardScan::ScanFollowedByUpdate >> TGroupMapperTest::SanitizeGroupTest3dc [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::Pure [GOOD] Test command err: Trying to start YDB, gRPC: 11127, MsgBus: 7565 2025-03-31T16:19:01.439548Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008942168387232:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:01.439620Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000afd/r3tmp/tmpJkGbIi/pdisk_1.dat 2025-03-31T16:19:01.785991Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11127, node 1 2025-03-31T16:19:01.835268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:01.835700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:01.840553Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:01.912058Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:19:01.912080Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:19:01.912090Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:19:01.912239Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7565 TClient is connected to server localhost:7565 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:19:02.425973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:02.464219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:02.611409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:02.793528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:19:02.882869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:19:04.751748Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008955053290907:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:04.751866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:05.086581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:19:05.117407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:19:05.147828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:19:05.176887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:19:05.208180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:19:05.283747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:19:05.338496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008959348258718:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:05.338568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:05.338615Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008959348258723:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:05.342242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:19:05.352391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008959348258726:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:19:05.420698Z node 1 :TX_PROXY ERROR: Actor# [1:7488008959348258778:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:19:06.439395Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008942168387232:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:06.439506Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:19:07.494007Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437947531, txId: 281474976715673] shutting down Trying to start YDB, gRPC: 17415, MsgBus: 6685 2025-03-31T16:19:08.283791Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008971582383334:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:08.283880Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000afd/r3tmp/tmpfkwNGr/pdisk_1.dat 2025-03-31T16:19:08.388373Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17415, node 2 2025-03-31T16:19:08.414262Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:08.414356Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:08.415935Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:08.443023Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:19:08.443049Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:19:08.443058Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:19:08.443195Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6685 TClient is connected to server localhost:6685 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:19:08.814693Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:08.832819Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:08.874794Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:09.013853Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:09.066626Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:11.547235Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008984467287000:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:11.547349Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:11.596708Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:19:11.639708Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:19:11.680216Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:19:11.726740Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:19:11.798350Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:19:11.880518Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:19:11.967687Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008984467287520:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:11.967792Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:11.968082Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008984467287525:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:11.971651Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:19:11.991586Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008984467287527:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:19:12.059022Z node 2 :TX_PROXY ERROR: Actor# [2:7488008988762254878:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:19:13.284351Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008971582383334:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:13.284502Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> DataShardTxOrder::ZigZag_oo >> DataShardOutOfOrder::TestOutOfOrderLockLost >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptClientOperationTimeoutBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 12799, MsgBus: 27908 2025-03-31T16:18:46.948976Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008878908541606:2190];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:46.949016Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b70/r3tmp/tmpfSqQNg/pdisk_1.dat 2025-03-31T16:18:47.490366Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12799, node 1 2025-03-31T16:18:47.561139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:47.561259Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:47.572420Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:18:47.607324Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:47.607343Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:47.607349Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:47.607445Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27908 TClient is connected to server localhost:27908 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:48.310458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:48.327981Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:18:48.342162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:48.477532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:48.684075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:48.770850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:50.567981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008896088412461:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:50.568083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:50.896378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:50.927796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:50.963960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:51.000768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:51.029044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:51.074071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:51.131811Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008900383380271:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:51.131870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:51.131954Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008900383380276:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:51.136151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:51.147993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008900383380278:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:51.241191Z node 1 :TX_PROXY ERROR: Actor# [1:7488008900383380333:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:51.951195Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008878908541606:2190];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:51.951274Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:52.667555Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437932544, txId: 281474976710671] shutting down 2025-03-31T16:18:52.668113Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437932544, txId: 281474976710672] shutting down 2025-03-31T16:18:52.668649Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437932544, txId: 281474976710673] shutting down 2025-03-31T16:18:52.712852Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437932712, txId: 281474976710677] shutting down 2025-03-31T16:18:52.775917Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437932719, txId: 281474976710678] shutting down 2025-03-31T16:18:52.938376Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437932880, txId: 281474976710682] shutting down 2025-03-31T16:18:52.939361Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437932880, txId: 281474976710681] shutting down 2025-03-31T16:18:53.023643Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437933006, txId: 281474976710686] shutting down 2025-03-31T16:18:53.029473Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437933006, txId: 281474976710685] shutting down 2025-03-31T16:18:53.221237Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437933223, txId: 281474976710689] shutting down 2025-03-31T16:18:53.226477Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437933223, txId: 281474976710691] shutting down 2025-03-31T16:18:53.248791Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437933223, txId: 281474976710690] shutting down 2025-03-31T16:18:53.355139Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437933370, txId: 281474976710695] shutting down 2025-03-31T16:18:53.357274Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437933370, txId: 281474976710696] shutting down 2025-03-31T16:18:53.479012Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437933503, txId: 281474976710700] shutting down 2025-03-31T16:18:53.480741Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437933503, txId: 281474976710699] shutting down 2025-03-31T16:18:53.720511Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437933734, txId: 281474976710704] shutting down 2025-03-31T16:18:53.721526Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437933734, txId: 281474976710703] shutting down 2025-03-31T16:18:53.868323Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437933888, txId: 281474976710708] shutting down 2025-03-31T16:18:53.872237Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437933888, txId: 281474976710707] shutting down 2025-03-31T16:18:54.030413Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437934063, txId: 281474976710711] shutting down 2025-03-31T16:18:54.128619Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 174 ... pp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008927647495938:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:04.017216Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:19:04.055400Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437943926, txId: 281474976715678] shutting down 2025-03-31T16:19:04.060741Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437943926, txId: 281474976715677] shutting down 2025-03-31T16:19:04.218504Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437944087, txId: 281474976715681] shutting down 2025-03-31T16:19:04.220724Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437944087, txId: 281474976715682] shutting down 2025-03-31T16:19:04.370291Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437944318, txId: 281474976715685] shutting down 2025-03-31T16:19:04.526332Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437944409, txId: 281474976715687] shutting down 2025-03-31T16:19:04.532724Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437944409, txId: 281474976715688] shutting down 2025-03-31T16:19:04.594847Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437944570, txId: 281474976715691] shutting down 2025-03-31T16:19:04.598692Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437944570, txId: 281474976715692] shutting down 2025-03-31T16:19:04.598782Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437944570, txId: 281474976715693] shutting down 2025-03-31T16:19:04.890040Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437944822, txId: 281474976715698] shutting down 2025-03-31T16:19:04.902769Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437944822, txId: 281474976715697] shutting down 2025-03-31T16:19:04.923145Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437944913, txId: 281474976715702] shutting down 2025-03-31T16:19:04.923983Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437944913, txId: 281474976715701] shutting down 2025-03-31T16:19:05.081966Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437945088, txId: 281474976715706] shutting down 2025-03-31T16:19:05.088074Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437945095, txId: 281474976715705] shutting down 2025-03-31T16:19:05.273515Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437945277, txId: 281474976715709] shutting down 2025-03-31T16:19:05.282866Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437945284, txId: 281474976715710] shutting down 2025-03-31T16:19:05.483064Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437945494, txId: 281474976715713] shutting down 2025-03-31T16:19:05.483923Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437945494, txId: 281474976715714] shutting down 2025-03-31T16:19:05.650029Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437945676, txId: 281474976715717] shutting down 2025-03-31T16:19:05.650820Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437945676, txId: 281474976715718] shutting down 2025-03-31T16:19:05.913116Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437945928, txId: 281474976715722] shutting down 2025-03-31T16:19:05.917368Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437945928, txId: 281474976715721] shutting down 2025-03-31T16:19:06.103564Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437946131, txId: 281474976715725] shutting down 2025-03-31T16:19:06.253701Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437946271, txId: 281474976715727] shutting down 2025-03-31T16:19:06.376232Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437946404, txId: 281474976715729] shutting down 2025-03-31T16:19:06.517923Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437946544, txId: 281474976715731] shutting down 2025-03-31T16:19:06.518362Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437946544, txId: 281474976715732] shutting down 2025-03-31T16:19:06.751272Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437946782, txId: 281474976715735] shutting down 2025-03-31T16:19:06.847230Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437946873, txId: 281474976715737] shutting down 2025-03-31T16:19:07.043169Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437947027, txId: 281474976715739] shutting down 2025-03-31T16:19:07.156335Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437947181, txId: 281474976715741] shutting down 2025-03-31T16:19:07.349709Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437947335, txId: 281474976715743] shutting down 2025-03-31T16:19:07.494674Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437947517, txId: 281474976715745] shutting down 2025-03-31T16:19:07.623495Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437947650, txId: 281474976715747] shutting down 2025-03-31T16:19:07.759787Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437947783, txId: 281474976715749] shutting down 2025-03-31T16:19:07.983948Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437947979, txId: 281474976715751] shutting down 2025-03-31T16:19:08.105224Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437948126, txId: 281474976715753] shutting down 2025-03-31T16:19:08.277620Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437948280, txId: 281474976715755] shutting down 2025-03-31T16:19:08.414830Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437948448, txId: 281474976715757] shutting down 2025-03-31T16:19:08.561821Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437948595, txId: 281474976715759] shutting down 2025-03-31T16:19:08.780636Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437948812, txId: 281474976715761] shutting down 2025-03-31T16:19:08.931263Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437948966, txId: 281474976715763] shutting down 2025-03-31T16:19:09.109687Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437949141, txId: 281474976715765] shutting down 2025-03-31T16:19:09.306205Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437949337, txId: 281474976715767] shutting down 2025-03-31T16:19:09.510118Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437949533, txId: 281474976715769] shutting down 2025-03-31T16:19:09.709083Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437949736, txId: 281474976715771] shutting down 2025-03-31T16:19:09.973810Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437950002, txId: 281474976715773] shutting down 2025-03-31T16:19:10.122636Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437950149, txId: 281474976715775] shutting down 2025-03-31T16:19:10.310684Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437950338, txId: 281474976715777] shutting down 2025-03-31T16:19:10.491820Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437950520, txId: 281474976715779] shutting down 2025-03-31T16:19:10.765513Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437950779, txId: 281474976715781] shutting down 2025-03-31T16:19:10.944911Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437950961, txId: 281474976715783] shutting down 2025-03-31T16:19:11.151949Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437951171, txId: 281474976715785] shutting down 2025-03-31T16:19:11.462979Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437951472, txId: 281474976715787] shutting down 2025-03-31T16:19:11.657739Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437951675, txId: 281474976715789] shutting down 2025-03-31T16:19:11.887722Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437951836, txId: 281474976715791] shutting down 2025-03-31T16:19:12.039380Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437952067, txId: 281474976715793] shutting down 2025-03-31T16:19:12.291922Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437952312, txId: 281474976715795] shutting down 2025-03-31T16:19:12.499022Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437952522, txId: 281474976715797] shutting down 2025-03-31T16:19:12.744721Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437952774, txId: 281474976715799] shutting down 2025-03-31T16:19:12.962217Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437952991, txId: 281474976715801] shutting down 2025-03-31T16:19:13.224510Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437953250, txId: 281474976715803] shutting down 2025-03-31T16:19:13.484059Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437953509, txId: 281474976715805] shutting down 2025-03-31T16:19:13.700535Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437953733, txId: 281474976715807] shutting down >> Cdc::Drop[TopicRunner] [GOOD] >> Cdc::DescribeStream >> KqpScripting::SecondaryIndexes [GOOD] |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::SanitizeGroupTest3dc [GOOD] >> KqpScripting::ScriptExplain [GOOD] >> DataShardOutOfOrder::TestUnprotectedReadsThenWriteVisibility >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed+EvWrite >> Cdc::NaN[YdsRunner] [GOOD] >> Cdc::NaN[TopicRunner] >> DataShardTxOrder::RandomPoints_ReproducerDelayRS1 [GOOD] >> DataShardTxOrder::ZigZag_oo8_dirty ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::SecondaryIndexes [GOOD] Test command err: Trying to start YDB, gRPC: 31704, MsgBus: 8484 2025-03-31T16:19:01.371183Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008940503782879:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:01.371248Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b0f/r3tmp/tmpJ9Pyke/pdisk_1.dat 2025-03-31T16:19:01.759488Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:01.794075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected TServer::EnableGrpc on GrpcPort 31704, node 1 2025-03-31T16:19:01.794167Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:01.796180Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:01.839603Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:19:01.839649Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:19:01.839662Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:19:01.839793Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8484 TClient is connected to server localhost:8484 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:19:02.346319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:02.377675Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:19:02.393534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:02.579099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:02.735039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:02.804963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:04.720082Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008953388686549:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:04.720208Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:05.106740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:19:05.142860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:19:05.173301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:19:05.204408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:19:05.232520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:19:05.268176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:19:05.338190Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008957683654358:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:05.338311Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:05.338613Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008957683654363:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:05.342983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:19:05.356716Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008957683654365:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:19:05.456845Z node 1 :TX_PROXY ERROR: Actor# [1:7488008957683654422:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:19:06.350486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:19:06.371657Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008940503782879:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:06.371752Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:19:06.793573Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437946831, txId: 281474976710673] shutting down 2025-03-31T16:19:06.879325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:19:06.924050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T16:19:06.935141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-31T16:19:06.945180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T16:19:06.997780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-31T16:19:07.009608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-31T16:19:07.419886Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437947454, txId: 281474976710682] shutting down 2025-03-31T16:19:07.488311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-31T16:19:07.519074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-31T16:19:07.837153Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437947874, txId: 281474976710687] shutting down Trying to start YDB, gRPC: 22246, MsgBus: 20248 2025-03-31T16:19:08.564586Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008973428560913:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:08.564741Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b0f/r3tmp/tmpRaUq6M/pdisk_1.dat 2025-03-31T16:19:08.644555Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22246, node 2 2025-03-31T16:19:08.689894Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:08.689990Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:08.691678Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:08.710234Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:19:08.710258Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:19:08.710263Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:19:08.710369Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20248 TClient is connected to server localhost:20248 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:19:09.064985Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:09.082110Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:09.134914Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:09.295353Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:09.360782Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:11.891651Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008986313464565:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:11.891726Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:11.942474Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:19:12.019764Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:19:12.050390Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:19:12.122711Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:19:12.160919Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:19:12.207636Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:19:12.267639Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008990608432379:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:12.267729Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:12.267781Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008990608432384:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:12.272133Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:19:12.290478Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008990608432386:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:19:12.377949Z node 2 :TX_PROXY ERROR: Actor# [2:7488008990608432442:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:19:13.451398Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:19:13.501604Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:19:13.553777Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:19:13.582180Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008973428560913:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:13.582323Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TFlatTest::SplitEmptyToMany [GOOD] >> TFlatTest::SplitEmptyTwice ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScriptExplain [GOOD] Test command err: Trying to start YDB, gRPC: 5410, MsgBus: 31490 2025-03-31T16:19:03.076570Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008950827938568:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:03.076678Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000aec/r3tmp/tmpK0ntDJ/pdisk_1.dat 2025-03-31T16:19:03.474709Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5410, node 1 2025-03-31T16:19:03.500462Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:03.501222Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:03.504408Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:03.540234Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:19:03.540259Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:19:03.540266Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:19:03.540411Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31490 TClient is connected to server localhost:31490 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:19:04.091325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:04.112147Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:19:04.125072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:04.282730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:04.449105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:04.527551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:06.289991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008963712842232:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:06.290138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:06.632134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:19:06.660260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:19:06.688256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:19:06.715178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:19:06.744259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:19:06.776574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:19:06.854442Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008963712842745:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:06.854554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:06.854638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008963712842750:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:06.858348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:19:06.867100Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008963712842752:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:19:06.938182Z node 1 :TX_PROXY ERROR: Actor# [1:7488008963712842805:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:19:07.919610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:19:08.077198Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008950827938568:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:08.077263Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 9173, MsgBus: 15723 2025-03-31T16:19:10.511943Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008978063616257:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:10.512015Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000aec/r3tmp/tmpEe3GmX/pdisk_1.dat 2025-03-31T16:19:10.632730Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:10.656318Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:10.656405Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:10.665789Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9173, node 2 2025-03-31T16:19:10.827645Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:19:10.827667Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:19:10.827674Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:19:10.827793Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15723 TClient is connected to server localhost:15723 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:19:11.377946Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:11.387541Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:19:11.402083Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:11.477523Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:11.698196Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:11.781474Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:13.934780Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008990948519888:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:13.934869Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:13.975993Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:19:14.016752Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:19:14.054640Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:19:14.087945Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:19:14.125565Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:19:14.207024Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:19:14.298388Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008995243487709:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:14.298469Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:14.298527Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008995243487714:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:14.302033Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:19:14.312754Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008995243487716:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:19:14.380467Z node 2 :TX_PROXY ERROR: Actor# [2:7488008995243487770:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:19:15.512170Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008978063616257:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:15.512251Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:19:15.986694Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488008999538455453:2503], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:168: Error: At function: DataQueryBlocks
:1:185: Error: At function: TKiDataQueryBlock
:1:208: Error: At function: KiEffects
:1:219: Error: At function: KiWriteTable!
:1:219: Error: Cannot find table 'db.[/Root/ScriptingTest]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:19:15.987133Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjcwYzA3ZDYtZGExMTAwMzItNjdlZDU4NDQtYTllNzI0NTY=, ActorId: [2:7488008999538455451:2502], ActorState: ExecuteState, TraceId: 01jqpg00w1avhqfw8e4rkk9bck, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: >> DataShardScan::ScanFollowedByUpdate [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline_Init ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_ReproducerDelayRS1 [GOOD] Test command err: 2025-03-31T16:19:12.089807Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:19:12.091041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:19:12.091159Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:12.104551Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:19:12.105007Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-03-31T16:19:12.105256Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:19:12.116031Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:19:12.178037Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:19:12.178398Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:19:12.180157Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-31T16:19:12.180228Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-31T16:19:12.180287Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-31T16:19:12.180637Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:19:12.181430Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:19:12.181505Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:197:2154] in generation 2 2025-03-31T16:19:12.252947Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:19:12.287855Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-31T16:19:12.288023Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:19:12.288141Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-31T16:19:12.288175Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-31T16:19:12.288206Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-31T16:19:12.288238Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:12.288443Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:12.288735Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:12.289910Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-31T16:19:12.290011Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-31T16:19:12.290065Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:19:12.290101Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:19:12.290152Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-31T16:19:12.290184Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T16:19:12.290217Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T16:19:12.290248Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-31T16:19:12.290287Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:19:12.290384Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:12.290438Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:12.290492Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-31T16:19:12.294881Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-31T16:19:12.294957Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:19:12.295034Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T16:19:12.295221Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-31T16:19:12.295268Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-31T16:19:12.295320Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-31T16:19:12.295368Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:19:12.295409Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-31T16:19:12.295459Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-31T16:19:12.295506Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:19:12.295815Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-31T16:19:12.295875Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-31T16:19:12.295924Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-31T16:19:12.295956Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:19:12.295993Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-31T16:19:12.296023Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-31T16:19:12.296086Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-31T16:19:12.296119Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-31T16:19:12.296146Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-31T16:19:12.315688Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-31T16:19:12.315756Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:19:12.315809Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:19:12.315868Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-31T16:19:12.315949Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-31T16:19:12.316418Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:12.316480Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:12.316528Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-31T16:19:12.316645Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-31T16:19:12.316672Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:19:12.316779Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-31T16:19:12.316815Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:19:12.316845Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-31T16:19:12.316875Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-31T16:19:12.320466Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-31T16:19:12.320535Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:12.320748Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:12.320783Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:12.320834Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:19:12.320869Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:19:12.320900Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T16:19:12.320936Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-31T16:19:12.320964Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-31T16:19:12.375135Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:19:12.375215Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-31T16:19:12.375251Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-31T16:19:12.375295Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-31T16:19:12.375498Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-31T16:19:12.375557Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:19:12.375601Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-31T16:19:12.375627Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-31T16:19:12.375646Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-31T16:19:12.375723Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:19:12.375745Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-31T16:19:12.375777Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-31T16:19:12.375807Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-31T16:19:12.375872Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-31T16:19:12.375912Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-31T16:19:12.375942Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-31T16:19:12.375983Z node 1 :TX_DATA ... 2025-03-31T16:19:16.996660Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:19:16.996723Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:4] at 9437184 on unit CompleteOperation 2025-03-31T16:19:16.996772Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 4] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 4 ms 2025-03-31T16:19:16.996860Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 4 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-03-31T16:19:16.996910Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:16.997037Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-03-31T16:19:16.997066Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:19:16.997086Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-03-31T16:19:16.997109Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-03-31T16:19:16.997146Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:19:16.997167Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:6] at 9437184 on unit CompleteOperation 2025-03-31T16:19:16.997202Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 6] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 4 ms 2025-03-31T16:19:16.997235Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-03-31T16:19:16.997252Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:16.997341Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-03-31T16:19:17.082117Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-03-31T16:19:17.082175Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-03-31T16:19:17.082205Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:19:17.082254Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:7] at 9437184 on unit CompleteOperation 2025-03-31T16:19:17.082323Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 7] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 4 ms 2025-03-31T16:19:17.082451Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-03-31T16:19:17.082495Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:17.082691Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:19:17.082723Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:8] at 9437184 on unit CompleteOperation 2025-03-31T16:19:17.082765Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 8] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 4 ms 2025-03-31T16:19:17.082811Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-03-31T16:19:17.082860Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:17.083005Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:19:17.083037Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:9] at 9437184 on unit CompleteOperation 2025-03-31T16:19:17.083074Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 9] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 4 ms 2025-03-31T16:19:17.083141Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-03-31T16:19:17.083169Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:17.083290Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:19:17.083324Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:14] at 9437184 on unit FinishPropose 2025-03-31T16:19:17.083390Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 14 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2025-03-31T16:19:17.083479Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:17.083624Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:19:17.083653Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:10] at 9437184 on unit CompleteOperation 2025-03-31T16:19:17.083712Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 10] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 4 ms 2025-03-31T16:19:17.083772Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-03-31T16:19:17.083800Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:17.083940Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:19:17.083970Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:12] at 9437184 on unit CompleteOperation 2025-03-31T16:19:17.084057Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 12] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 4 ms 2025-03-31T16:19:17.084112Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-03-31T16:19:17.084145Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:17.084265Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:19:17.084290Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:13] at 9437184 on unit CompleteOperation 2025-03-31T16:19:17.084326Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 13] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:19:17.084364Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:17.084480Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-03-31T16:19:17.084511Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:19:17.084542Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:5] at 9437184 on unit CompleteOperation 2025-03-31T16:19:17.084578Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 5] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 4 ms 2025-03-31T16:19:17.084624Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-03-31T16:19:17.084653Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:17.084912Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000004 txid# 4 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-03-31T16:19:17.084967Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:17.085012Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 4 2025-03-31T16:19:17.085120Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-03-31T16:19:17.085162Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:17.085259Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 6 2025-03-31T16:19:17.085343Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-03-31T16:19:17.085372Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:17.085395Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 7 2025-03-31T16:19:17.085447Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-03-31T16:19:17.085484Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:17.085512Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 8 2025-03-31T16:19:17.085570Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-03-31T16:19:17.085594Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:17.085616Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 9 2025-03-31T16:19:17.085701Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-03-31T16:19:17.085730Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:17.085758Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 10 2025-03-31T16:19:17.085839Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-03-31T16:19:17.085880Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:17.085907Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 2025-03-31T16:19:17.085980Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-03-31T16:19:17.086010Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:17.086034Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 expect 7 2 5 4 - 3 - - - - - - - - - - - - - - - - - - - - - - - - - - actual 7 2 5 4 - 3 - - - - - - - - - - - - - - - - - - - - - - - - - - interm - 2 5 4 - 3 - - - - - - - - - - - - - - - - - - - - - - - - - - >> DataShardOutOfOrder::TestShardRestartNoUndeterminedImmediate [GOOD] >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead >> DataShardOutOfOrder::TestPlannedTimeoutSplit [GOOD] >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit-UseSink >> Cdc::UpdateRetentionPeriod [GOOD] >> Cdc::SupportedTypes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardScan::ScanFollowedByUpdate [GOOD] Test command err: 2025-03-31T16:19:15.678415Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:19:15.679725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:19:15.679796Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:15.688483Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:19:15.688999Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-03-31T16:19:15.689283Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:19:15.699068Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:19:15.737985Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:19:15.738330Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:19:15.739880Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-31T16:19:15.739964Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-31T16:19:15.740034Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-31T16:19:15.740355Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:19:15.740860Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:19:15.740918Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:197:2154] in generation 2 2025-03-31T16:19:15.814173Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:19:15.843206Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-31T16:19:15.843369Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:19:15.843460Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-31T16:19:15.843496Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-31T16:19:15.843529Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-31T16:19:15.843561Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:15.843745Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:15.843779Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:15.843994Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-31T16:19:15.844080Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-31T16:19:15.844119Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:19:15.844148Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:19:15.844180Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-31T16:19:15.844206Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T16:19:15.844237Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T16:19:15.844273Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-31T16:19:15.844315Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:19:15.844408Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:15.844450Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:15.844499Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-31T16:19:15.847144Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-31T16:19:15.847215Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:19:15.847308Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T16:19:15.847462Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-31T16:19:15.847507Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-31T16:19:15.847565Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-31T16:19:15.847611Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:19:15.847657Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-31T16:19:15.847725Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-31T16:19:15.847760Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:19:15.848073Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-31T16:19:15.848146Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-31T16:19:15.848212Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-31T16:19:15.848248Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:19:15.848303Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-31T16:19:15.848341Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-31T16:19:15.848390Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-31T16:19:15.848426Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-31T16:19:15.848454Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-31T16:19:15.864027Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-31T16:19:15.864089Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:19:15.864133Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:19:15.864217Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-31T16:19:15.864290Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-31T16:19:15.864747Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:15.864795Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:15.864836Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-31T16:19:15.864933Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-31T16:19:15.864988Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:19:15.865098Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-31T16:19:15.865131Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:19:15.865165Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-31T16:19:15.865193Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-31T16:19:15.868729Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-31T16:19:15.868808Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:15.869033Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:15.869077Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:15.869132Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:19:15.869176Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:19:15.869216Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T16:19:15.869259Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-31T16:19:15.869296Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-31T16:19:15.869342Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:19:15.869381Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-31T16:19:15.869419Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-31T16:19:15.869457Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-31T16:19:15.869644Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-31T16:19:15.869688Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:19:15.869728Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-31T16:19:15.869757Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-31T16:19:15.869782Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-31T16:19:15.869853Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:19:15.869884Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-31T16:19:15.869917Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-31T16:19:15.869951Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-31T16:19:15.870016Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-31T16:19:15.870146Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-31T16:19:15.870182Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-31T16:19:15.870238Z node 1 :TX_D ... essageQuota: 9 2025-03-31T16:19:17.619988Z node 1 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 9437186, TxId: 36, MessageQuota: 10 2025-03-31T16:19:17.620145Z node 1 :TX_DATASHARD DEBUG: Send response data ShardId: 9437186, TxId: 36, Size: 22, Rows: 0, PendingAcks: 1, MessageQuota: 9 2025-03-31T16:19:17.620293Z node 1 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 9437184, TxId: 36, MessageQuota: 10 2025-03-31T16:19:17.620442Z node 1 :TX_DATASHARD DEBUG: Send response data ShardId: 9437184, TxId: 36, Size: 22, Rows: 0, PendingAcks: 1, MessageQuota: 9 2025-03-31T16:19:17.620548Z node 1 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 9437185, TxId: 36, PendingAcks: 0 2025-03-31T16:19:17.620594Z node 1 :TX_DATASHARD DEBUG: Finish scan ShardId: 9437185, TxId: 36, MessageQuota: 9 2025-03-31T16:19:17.620969Z node 1 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 9437186, TxId: 36, PendingAcks: 0 2025-03-31T16:19:17.621011Z node 1 :TX_DATASHARD DEBUG: Finish scan ShardId: 9437186, TxId: 36, MessageQuota: 9 2025-03-31T16:19:17.621241Z node 1 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 9437184, TxId: 36, PendingAcks: 0 2025-03-31T16:19:17.621277Z node 1 :TX_DATASHARD DEBUG: Finish scan ShardId: 9437184, TxId: 36, MessageQuota: 9 2025-03-31T16:19:17.621624Z node 1 :TX_DATASHARD DEBUG: FullScan complete at 9437186 2025-03-31T16:19:17.621667Z node 1 :TX_DATASHARD DEBUG: Found op: cookie: 36, at: 9437186 2025-03-31T16:19:17.621728Z node 1 :TX_DATASHARD DEBUG: FullScan complete at 9437184 2025-03-31T16:19:17.621748Z node 1 :TX_DATASHARD DEBUG: Found op: cookie: 36, at: 9437184 2025-03-31T16:19:17.621833Z node 1 :TX_DATASHARD DEBUG: FullScan complete at 9437185 2025-03-31T16:19:17.621852Z node 1 :TX_DATASHARD DEBUG: Found op: cookie: 36, at: 9437185 2025-03-31T16:19:17.621984Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:234:2227], Recipient [1:234:2227]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:17.622037Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:17.622116Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:19:17.622149Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:19:17.622198Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [1000006:36] at 9437184 for ReadTableScan 2025-03-31T16:19:17.622232Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437184 on unit ReadTableScan 2025-03-31T16:19:17.622263Z node 1 :TX_DATASHARD TRACE: ReadTable scan complete for [1000006:36] at 9437184 error: , IsFatalError: 0 2025-03-31T16:19:17.622305Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437184 is Executed 2025-03-31T16:19:17.622332Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437184 executing on unit ReadTableScan 2025-03-31T16:19:17.622361Z node 1 :TX_DATASHARD TRACE: Add [1000006:36] at 9437184 to execution unit CompleteOperation 2025-03-31T16:19:17.622384Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437184 on unit CompleteOperation 2025-03-31T16:19:17.622580Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437184 is DelayComplete 2025-03-31T16:19:17.622616Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437184 executing on unit CompleteOperation 2025-03-31T16:19:17.622648Z node 1 :TX_DATASHARD TRACE: Add [1000006:36] at 9437184 to execution unit CompletedOperations 2025-03-31T16:19:17.622703Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437184 on unit CompletedOperations 2025-03-31T16:19:17.622756Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437184 is Executed 2025-03-31T16:19:17.622780Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437184 executing on unit CompletedOperations 2025-03-31T16:19:17.622804Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000006:36] at 9437184 has finished 2025-03-31T16:19:17.622853Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:19:17.622881Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T16:19:17.622918Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T16:19:17.622949Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T16:19:17.623163Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:345:2313], Recipient [1:345:2313]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:17.623195Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:17.623227Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437185 2025-03-31T16:19:17.623248Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:19:17.623275Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [1000006:36] at 9437185 for ReadTableScan 2025-03-31T16:19:17.623308Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437185 on unit ReadTableScan 2025-03-31T16:19:17.623340Z node 1 :TX_DATASHARD TRACE: ReadTable scan complete for [1000006:36] at 9437185 error: , IsFatalError: 0 2025-03-31T16:19:17.623376Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437185 is Executed 2025-03-31T16:19:17.623393Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437185 executing on unit ReadTableScan 2025-03-31T16:19:17.623407Z node 1 :TX_DATASHARD TRACE: Add [1000006:36] at 9437185 to execution unit CompleteOperation 2025-03-31T16:19:17.623421Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437185 on unit CompleteOperation 2025-03-31T16:19:17.623534Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437185 is DelayComplete 2025-03-31T16:19:17.623550Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437185 executing on unit CompleteOperation 2025-03-31T16:19:17.623565Z node 1 :TX_DATASHARD TRACE: Add [1000006:36] at 9437185 to execution unit CompletedOperations 2025-03-31T16:19:17.623586Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437185 on unit CompletedOperations 2025-03-31T16:19:17.623613Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437185 is Executed 2025-03-31T16:19:17.623630Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437185 executing on unit CompletedOperations 2025-03-31T16:19:17.623643Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000006:36] at 9437185 has finished 2025-03-31T16:19:17.623658Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:19:17.623682Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-03-31T16:19:17.623698Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-03-31T16:19:17.623713Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-03-31T16:19:17.623817Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:457:2399], Recipient [1:457:2399]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:17.623844Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:17.623870Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437186 2025-03-31T16:19:17.623885Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:19:17.623899Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [1000006:36] at 9437186 for ReadTableScan 2025-03-31T16:19:17.623914Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437186 on unit ReadTableScan 2025-03-31T16:19:17.623928Z node 1 :TX_DATASHARD TRACE: ReadTable scan complete for [1000006:36] at 9437186 error: , IsFatalError: 0 2025-03-31T16:19:17.623950Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437186 is Executed 2025-03-31T16:19:17.624011Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437186 executing on unit ReadTableScan 2025-03-31T16:19:17.624023Z node 1 :TX_DATASHARD TRACE: Add [1000006:36] at 9437186 to execution unit CompleteOperation 2025-03-31T16:19:17.624072Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437186 on unit CompleteOperation 2025-03-31T16:19:17.624166Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437186 is DelayComplete 2025-03-31T16:19:17.624179Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437186 executing on unit CompleteOperation 2025-03-31T16:19:17.624192Z node 1 :TX_DATASHARD TRACE: Add [1000006:36] at 9437186 to execution unit CompletedOperations 2025-03-31T16:19:17.624205Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437186 on unit CompletedOperations 2025-03-31T16:19:17.624221Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437186 is Executed 2025-03-31T16:19:17.624233Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437186 executing on unit CompletedOperations 2025-03-31T16:19:17.624247Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000006:36] at 9437186 has finished 2025-03-31T16:19:17.624269Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:19:17.624286Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437186 2025-03-31T16:19:17.624313Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437186 has no attached operations 2025-03-31T16:19:17.624336Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437186 2025-03-31T16:19:17.636855Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:19:17.636927Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:19:17.636965Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000006:36] at 9437184 on unit CompleteOperation 2025-03-31T16:19:17.637029Z node 1 :TX_DATASHARD DEBUG: Complete [1000006 : 36] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 4 ms, propose latency: 5 ms 2025-03-31T16:19:17.637106Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:17.637290Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-03-31T16:19:17.637315Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-03-31T16:19:17.637356Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000006:36] at 9437185 on unit CompleteOperation 2025-03-31T16:19:17.637391Z node 1 :TX_DATASHARD DEBUG: Complete [1000006 : 36] from 9437185 at tablet 9437185 send result to client [1:99:2134], exec latency: 4 ms, propose latency: 5 ms 2025-03-31T16:19:17.637418Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-03-31T16:19:17.637589Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-31T16:19:17.637617Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-31T16:19:17.637641Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000006:36] at 9437186 on unit CompleteOperation 2025-03-31T16:19:17.637671Z node 1 :TX_DATASHARD DEBUG: Complete [1000006 : 36] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 4 ms, propose latency: 5 ms 2025-03-31T16:19:17.637696Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 >> DataShardTxOrder::DelayData >> DataShardTxOrder::RandomPoints_DelayData >> DataShardOutOfOrder::TestReadTableWriteConflict [GOOD] >> DataShardOutOfOrder::TestSecondaryClearanceAfterShardRestartRace >> DataShardTxOrder::ZigZag >> DataShardOutOfOrder::TestReadTableImmediateWriteBlock [GOOD] >> DataShardOutOfOrder::TestReadTableSingleShardImmediate >> DataShardOutOfOrder::UncommittedReads [GOOD] >> DataShardOutOfOrder::TestSnapshotReadPriority ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::UncommittedReads [GOOD] Test command err: 2025-03-31T16:19:17.516889Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:19:17.517122Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:19:17.517283Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001190/r3tmp/tmpx3TljH/pdisk_1.dat 2025-03-31T16:19:17.889186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:19:17.932423Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:17.970103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:17.970241Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:17.981554Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:18.064008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:19:18.100042Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:19:18.100957Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:19:18.101324Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:19:18.101566Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:19:18.144929Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:19:18.145683Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:19:18.145796Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:19:18.147606Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:19:18.147694Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:19:18.147761Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:19:18.148177Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:19:18.148297Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:19:18.148383Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:19:18.148776Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:19:18.173991Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:19:18.174220Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:19:18.174362Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:19:18.174394Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:19:18.174426Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:19:18.174465Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:19:18.174695Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:18.174745Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:18.175007Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:19:18.175112Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:19:18.175218Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:19:18.175315Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:19:18.175366Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T16:19:18.175412Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:19:18.175446Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:19:18.175482Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:19:18.175531Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:19:18.175643Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:18.175680Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:18.175719Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:19:18.175821Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-31T16:19:18.175879Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:19:18.176002Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:19:18.176304Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T16:19:18.176375Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:19:18.176478Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:19:18.176531Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:19:18.176569Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T16:19:18.176609Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T16:19:18.176643Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:19:18.176984Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T16:19:18.177055Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T16:19:18.177097Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T16:19:18.177134Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:19:18.177191Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T16:19:18.177223Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T16:19:18.177259Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T16:19:18.177291Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:19:18.177333Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T16:19:18.178605Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:19:18.178662Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:19:18.178714Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:19:18.178776Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-31T16:19:18.178849Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:19:18.181551Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T16:19:18.181647Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:19:18.342597Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:18.342651Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:18.342686Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T16:19:18.343520Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-31T16:19:18.343555Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:19:18.344163Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:19:18.344212Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-31T16:19:18.344249Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-31T16:19:18.344289Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-31T16:19:18.348480Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:19:18.348588Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:19:18.349290Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:18.349337Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:18.349404Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:19:1 ... 1T16:19:19.474365Z node 1 :TX_DATASHARD TRACE: TTxWrite complete: at tablet# 72075186224037888 2025-03-31T16:19:19.474400Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:5] at 72075186224037888 on unit FinishProposeWrite 2025-03-31T16:19:19.474434Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 5 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2025-03-31T16:19:19.474516Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ... waiting for blocked commit 2025-03-31T16:19:19.533819Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqpg049s9mgp3gdbgcbhkr4b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDhiMGUwMTctZWRiZWZiZGMtMTM2NTY3YTYtNzhkZDdiNTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:19:19.536428Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 278003712, Sender [1:961:2749], Recipient [1:666:2570]: NKikimrDataEvents.TEvWrite Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC } TxMode: MODE_IMMEDIATE 2025-03-31T16:19:19.536494Z node 1 :TX_DATASHARD TRACE: Handle TTxWrite: at tablet# 72075186224037888 2025-03-31T16:19:19.536590Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-03-31T16:19:19.536627Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-03-31T16:19:19.536682Z node 1 :TX_DATASHARD TRACE: TTxWrite:: execute at tablet# 72075186224037888 2025-03-31T16:19:19.536794Z node 1 :TX_DATASHARD TRACE: Parsing write transaction for 0 at 72075186224037888, record: Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC } TxMode: MODE_IMMEDIATE 2025-03-31T16:19:19.536851Z node 1 :TX_DATASHARD TRACE: Table /Root/table-1, shard: 72075186224037888, write point (Uint32 : 4) 2025-03-31T16:19:19.536895Z node 1 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint32 : 4) table: [72057594046644480:2:1] 2025-03-31T16:19:19.536961Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit CheckWrite 2025-03-31T16:19:19.537003Z node 1 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-03-31T16:19:19.537054Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit CheckWrite 2025-03-31T16:19:19.537107Z node 1 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-31T16:19:19.537146Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-31T16:19:19.537192Z node 1 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2025-03-31T16:19:19.537239Z node 1 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037888 2025-03-31T16:19:19.537268Z node 1 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-03-31T16:19:19.537287Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-31T16:19:19.537302Z node 1 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit ExecuteWrite 2025-03-31T16:19:19.537325Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit ExecuteWrite 2025-03-31T16:19:19.537350Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:6] at 72075186224037888 2025-03-31T16:19:19.537391Z node 1 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2025-03-31T16:19:19.537494Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:6] at 72075186224037888, row count=1 2025-03-31T16:19:19.537538Z node 1 :TX_DATASHARD TRACE: add locks to result: 0 2025-03-31T16:19:19.537582Z node 1 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:19:19.537619Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteWrite 2025-03-31T16:19:19.537671Z node 1 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit FinishProposeWrite 2025-03-31T16:19:19.537699Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit FinishProposeWrite 2025-03-31T16:19:19.537733Z node 1 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is DelayComplete 2025-03-31T16:19:19.537765Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit FinishProposeWrite 2025-03-31T16:19:19.537798Z node 1 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2025-03-31T16:19:19.537820Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2025-03-31T16:19:19.537852Z node 1 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-03-31T16:19:19.537876Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2025-03-31T16:19:19.537907Z node 1 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037888 has finished ... blocked commit for tablet 72075186224037888 2025-03-31T16:19:19.641611Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqpg04bj2jyzz9zy5nc67c7s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjM1NDdiZTYtMjg5ZTBjZC1iMTk3Y2NiMS03OGQ4NDAwYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:19:19.643282Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [1:979:2773], Recipient [1:666:2570]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-03-31T16:19:19.643506Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-31T16:19:19.643571Z node 1 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2025-03-31T16:19:19.643619Z node 1 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v2500/18446744073709551615 2025-03-31T16:19:19.643704Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2025-03-31T16:19:19.643785Z node 1 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-03-31T16:19:19.643824Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2025-03-31T16:19:19.643860Z node 1 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-31T16:19:19.643893Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-31T16:19:19.643936Z node 1 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2025-03-31T16:19:19.644021Z node 1 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-03-31T16:19:19.644050Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-31T16:19:19.644073Z node 1 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2025-03-31T16:19:19.644106Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2025-03-31T16:19:19.644219Z node 1 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-03-31T16:19:19.644468Z node 1 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is DelayComplete 2025-03-31T16:19:19.644504Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2025-03-31T16:19:19.644539Z node 1 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2025-03-31T16:19:19.644574Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2025-03-31T16:19:19.644621Z node 1 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-03-31T16:19:19.644643Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-03-31T16:19:19.644668Z node 1 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2025-03-31T16:19:19.644709Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-31T16:19:19.722787Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 1 LatestStep: 3000 2025-03-31T16:19:19.722875Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 3000} 2025-03-31T16:19:19.864504Z node 1 :TX_DATASHARD TRACE: TTxWrite complete: at tablet# 72075186224037888 2025-03-31T16:19:19.864606Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:6] at 72075186224037888 on unit FinishProposeWrite 2025-03-31T16:19:19.864664Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 6 at tablet 72075186224037888 send to client, propose latency: 1000 ms, status: STATUS_COMPLETED 2025-03-31T16:19:19.864783Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:19:19.864893Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-03-31T16:19:19.864947Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:7] at 72075186224037888 on unit ExecuteRead 2025-03-31T16:19:19.865003Z node 1 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[1:979:2773], 0} after executionsCount# 1 2025-03-31T16:19:19.865135Z node 1 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[1:979:2773], 0} sends rowCount# 4, bytes# 128, quota rows left# 997, quota bytes left# 5242752, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-31T16:19:19.865253Z node 1 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[1:979:2773], 0} finished in read 2025-03-31T16:19:19.869359Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [1:979:2773], Recipient [1:666:2570]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-03-31T16:19:19.869461Z node 1 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 3 } }, { items { uint32_value: 4 } items { uint32_value: 4 } } >> BackupRestore::PrefixedVectorIndex [GOOD] >> Cdc::DescribeStream [GOOD] >> Cdc::DecimalKey >> DataShardOutOfOrder::TestOutOfOrderLockLost [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW [GOOD] >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed+EvWrite [GOOD] >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-EvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestore::PrefixedVectorIndex [GOOD] Test command err: 2025-03-31T16:18:03.510232Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008694037440051:2206];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:03.510890Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001046/r3tmp/tmpipyUan/pdisk_1.dat 2025-03-31T16:18:03.978936Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:03.989036Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:03.989120Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:04.008844Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13405, node 1 2025-03-31T16:18:04.232876Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:04.232897Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:04.232905Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:04.233041Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28759 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:04.719028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:06.635490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008706922342867:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:06.635628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:07.166456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Backup "/Root" to "/home/runner/.ya/build/build_root/0l5c/001046/r3tmp/tmpkIbuwp/"Create temporary directory "/Root/~backup_20250331T161807" in databaseProcess "/home/runner/.ya/build/build_root/0l5c/001046/r3tmp/tmpkIbuwp/table"Copy tables: { src: "/Root/table", dst: "/Root/~backup_20250331T161807/table" }Backup table "/Root/~backup_20250331T161807/table" to "/home/runner/.ya/build/build_root/0l5c/001046/r3tmp/tmpkIbuwp/table"Describe table "/Root/~backup_20250331T161807/table"Write scheme into "/home/runner/.ya/build/build_root/0l5c/001046/r3tmp/tmpkIbuwp/table/scheme.pb"Describe table "/Root/table"Write ACL into "/home/runner/.ya/build/build_root/0l5c/001046/r3tmp/tmpkIbuwp/table/permissions.pb"Read table "/Root/~backup_20250331T161807/table"Write data into "/home/runner/.ya/build/build_root/0l5c/001046/r3tmp/tmpkIbuwp/table/data_00.csv"Drop table "/Root/~backup_20250331T161807/table"Remove temporary directory "/Root/~backup_20250331T161807" in database2025-03-31T16:18:07.724293Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-03-31T16:18:07.736250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710664:0, at schemeshard: 72057594046644480 Backup completed successfully2025-03-31T16:18:07.753972Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008711217310756:2387], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:07.754046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:07.815463Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found Restore "/home/runner/.ya/build/build_root/0l5c/001046/r3tmp/tmpkIbuwp/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/0l5c/001046/r3tmp/tmpkIbuwp/" to "/Root"Process "/home/runner/.ya/build/build_root/0l5c/001046/r3tmp/tmpkIbuwp/table"Read scheme from "/home/runner/.ya/build/build_root/0l5c/001046/r3tmp/tmpkIbuwp/table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/0l5c/001046/r3tmp/tmpkIbuwp/table" to "/Root/table"2025-03-31T16:18:07.860745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/0l5c/001046/r3tmp/tmpkIbuwp/table/data_00.csv"Restore ACL "/home/runner/.ya/build/build_root/0l5c/001046/r3tmp/tmpkIbuwp/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/0l5c/001046/r3tmp/tmpkIbuwp/table/permissions.pb"2025-03-31T16:18:07.945937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710667:0, at schemeshard: 72057594046644480 Restore completed successfully 2025-03-31T16:18:09.492465Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008718627640088:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:09.492553Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001046/r3tmp/tmpD08V1L/pdisk_1.dat 2025-03-31T16:18:09.633803Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:09.658899Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:09.658998Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:09.662458Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24085, node 4 2025-03-31T16:18:09.759332Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:09.759370Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:09.759381Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:09.759546Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8030 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:09.975251Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:12.489865Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008731512542998:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:12.489928Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:12.512747Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:18:12.607167Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008731512543249:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:12.607251Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:12.629235Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:1, at schemeshard: 72057594046644480 Backup "/Root" to "/home/runner/.ya/build/build_root/0l5c/001046/r3tmp/tmpC1zM9n/"Create temporary directory "/Root/~backup_20250331T161812" in databaseProcess "/home/runner/.ya/build/build_root/0l5c/001046/r3tmp/tmpC1zM9n/table"Copy tables: { src: "/Root/table", dst: "/Root/~backup_20250331T161812/table" }Backup table "/Root/~backup_20250331T161812/table" to "/home/runner/.ya/build/build_root/0l5c/001046/r3tmp/tmpC1zM9n/table"Describe table "/Root/~backup_20250331T161812/table"Write scheme into ... D WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715717:0, at schemeshard: 72057594046644480 2025-03-31T16:19:08.185986Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715724:0, at schemeshard: 72057594046644480 2025-03-31T16:19:08.844874Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715729:0, at schemeshard: 72057594046644480 2025-03-31T16:19:09.477057Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715736:0, at schemeshard: 72057594046644480 2025-03-31T16:19:10.415873Z node 19 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:19:10.415904Z node 19 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:10.979672Z node 19 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=19&id=ZDViNDA0YzItZjUzYmMyMDgtZjIyMDk3ZWUtMjdhYmMwYzA=, ActorId: [19:7488008965275409075:2801], ActorState: ExecuteState, TraceId: 01jqpfzrk17p2ekvxqc7gp3xm8, Create QueryResponse for error on request, msg: 2025-03-31T16:19:10.980798Z node 19 :KQP_EXECUTER ERROR: TxId: 281474976715749. Ctx: { TraceId: 01jqpfzrk17p2ekvxqc7gp3xm8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=19&id=ZDViNDA0YzItZjUzYmMyMDgtZjIyMDk3ZWUtMjdhYmMwYzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Restore failed: [ {
: Info: path: /home/runner/.ya/build/build_root/0l5c/001046/r3tmp/tmpa2Vysg/externalDataSource } {
: Error: Secret "secret" does not exist or you do not have access permissions } ]Cleanup 2025-03-31T16:19:13.215899Z node 22 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[22:7488008994734139789:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:13.216046Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001046/r3tmp/tmpIqr6dH/pdisk_1.dat 2025-03-31T16:19:13.510355Z node 22 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:13.566504Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:13.566645Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:13.570592Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62142, node 22 2025-03-31T16:19:13.694433Z node 22 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:19:13.694471Z node 22 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:19:13.694481Z node 22 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:19:13.694692Z node 22 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7747 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:19:14.135331Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:17.985141Z node 22 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [22:7488009011914010075:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:17.985250Z node 22 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:18.016356Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Backup "/Root" to "/home/runner/.ya/build/build_root/0l5c/001046/r3tmp/tmpzfWPHA/"Create temporary directory "/Root/~backup_20250331T161918" in database2025-03-31T16:19:18.204462Z node 22 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[22:7488008994734139789:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:18.204563Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Process "/home/runner/.ya/build/build_root/0l5c/001046/r3tmp/tmpzfWPHA/table"Copy tables: { src: "/Root/table", dst: "/Root/~backup_20250331T161918/table" }Backup table "/Root/~backup_20250331T161918/table" to "/home/runner/.ya/build/build_root/0l5c/001046/r3tmp/tmpzfWPHA/table"Describe table "/Root/~backup_20250331T161918/table"Write scheme into "/home/runner/.ya/build/build_root/0l5c/001046/r3tmp/tmpzfWPHA/table/scheme.pb"Describe table "/Root/table"Write ACL into "/home/runner/.ya/build/build_root/0l5c/001046/r3tmp/tmpzfWPHA/table/permissions.pb"Read table "/Root/~backup_20250331T161918/table"Write data into "/home/runner/.ya/build/build_root/0l5c/001046/r3tmp/tmpzfWPHA/table/data_00.csv"Drop table "/Root/~backup_20250331T161918/table"2025-03-31T16:19:18.759073Z node 22 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 22, TabletId: 72075186224037895 not found 2025-03-31T16:19:18.759149Z node 22 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 22, TabletId: 72075186224037892 not found Remove temporary directory "/Root/~backup_20250331T161918" in database2025-03-31T16:19:18.764299Z node 22 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 22, TabletId: 72075186224037893 not found 2025-03-31T16:19:18.764352Z node 22 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 22, TabletId: 72075186224037894 not found 2025-03-31T16:19:18.779584Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710664:0, at schemeshard: 72057594046644480 Backup completed successfully2025-03-31T16:19:18.803186Z node 22 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [22:7488009016208978644:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:18.803317Z node 22 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Restore "/home/runner/.ya/build/build_root/0l5c/001046/r3tmp/tmpzfWPHA/" to "/Root"2025-03-31T16:19:18.894949Z node 22 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 22, TabletId: 72075186224037888 not found 2025-03-31T16:19:18.895010Z node 22 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 22, TabletId: 72075186224037889 not found 2025-03-31T16:19:18.898739Z node 22 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 22, TabletId: 72075186224037890 not found 2025-03-31T16:19:18.898783Z node 22 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 22, TabletId: 72075186224037891 not found Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/0l5c/001046/r3tmp/tmpzfWPHA/" to "/Root"Process "/home/runner/.ya/build/build_root/0l5c/001046/r3tmp/tmpzfWPHA/table"Read scheme from "/home/runner/.ya/build/build_root/0l5c/001046/r3tmp/tmpzfWPHA/table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/0l5c/001046/r3tmp/tmpzfWPHA/table" to "/Root/table"2025-03-31T16:19:18.937260Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/0l5c/001046/r3tmp/tmpzfWPHA/table/data_00.csv"Restore index "byValue" on "/Root/table"2025-03-31T16:19:19.040831Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-03-31T16:19:19.148357Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-03-31T16:19:19.221412Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715760:0, at schemeshard: 72057594046644480 2025-03-31T16:19:19.318035Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715762:0, at schemeshard: 72057594046644480 2025-03-31T16:19:19.324585Z node 22 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 22, TabletId: 72075186224037900 not found 2025-03-31T16:19:19.414752Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715763:0, at schemeshard: 72057594046644480 2025-03-31T16:19:19.494828Z node 22 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 22, TabletId: 72075186224037901 not found 2025-03-31T16:19:19.494877Z node 22 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 22, TabletId: 72075186224037902 not found Restore ACL "/home/runner/.ya/build/build_root/0l5c/001046/r3tmp/tmpzfWPHA/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/0l5c/001046/r3tmp/tmpzfWPHA/table/permissions.pb"2025-03-31T16:19:19.810291Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710669:0, at schemeshard: 72057594046644480 Restore completed successfully >> DataShardTxOrder::ImmediateBetweenOnline_Init_oo8 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW [GOOD] Test command err: 2025-03-31T16:19:18.325302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:19:18.325526Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:19:18.325698Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00117e/r3tmp/tmpmiZRw1/pdisk_1.dat 2025-03-31T16:19:18.698752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:19:18.736816Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:18.775778Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:18.775911Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:18.787413Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:18.867835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:19:18.908253Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:19:18.909362Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:19:18.909802Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:19:18.910051Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:19:18.960081Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:19:18.960750Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:19:18.960860Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:19:18.962504Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:19:18.962588Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:19:18.962674Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:19:18.963050Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:19:18.963210Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:19:18.963343Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:19:18.974108Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:19:19.002745Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:19:19.002957Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:19:19.003136Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:19:19.003181Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:19:19.003229Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:19:19.003262Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:19:19.003442Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:19.003481Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:19.003681Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:19:19.003748Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:19:19.003836Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:19:19.003865Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:19:19.003891Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T16:19:19.003919Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:19:19.003943Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:19:19.003964Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:19:19.004011Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:19:19.004073Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:19.004095Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:19.004127Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:19:19.004186Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-31T16:19:19.004215Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:19:19.004285Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:19:19.004499Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T16:19:19.004547Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:19:19.004638Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:19:19.004682Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:19:19.004716Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T16:19:19.004750Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T16:19:19.004774Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:19:19.004979Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T16:19:19.005007Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T16:19:19.005033Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T16:19:19.005055Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:19:19.005086Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T16:19:19.005108Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T16:19:19.005129Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T16:19:19.005151Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:19:19.005175Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T16:19:19.006479Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T16:19:19.006537Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:19:19.017143Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:19:19.017213Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:19:19.017250Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:19:19.017290Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-31T16:19:19.017340Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:19:19.165574Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:19.165634Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:19.165667Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T16:19:19.166050Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-31T16:19:19.166085Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:19:19.166187Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:19:19.166224Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-31T16:19:19.166261Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-31T16:19:19.166307Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-31T16:19:19.170720Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:19:19.170798Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:19:19.171493Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:19.171533Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:19.171579Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:19:1 ... de 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:19:20.898149Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [1:61:2108], Recipient [1:666:2570]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715663 LockNode: 1 Status: STATUS_NOT_FOUND 2025-03-31T16:19:20.898275Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [1:61:2108], Recipient [1:756:2634]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715663 LockNode: 1 Status: STATUS_NOT_FOUND 2025-03-31T16:19:20.909154Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-03-31T16:19:20.909299Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:666:2570], Recipient [1:756:2634]: {TEvReadSet step# 3044 txid# 281474976715664 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2025-03-31T16:19:20.909337Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:20.909403Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715664 2025-03-31T16:19:20.909519Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-03-31T16:19:20.909613Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:756:2634], Recipient [1:666:2570]: {TEvReadSet step# 3044 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2025-03-31T16:19:20.909648Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:20.909674Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715664 ... performing the first select 2025-03-31T16:19:21.645291Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqpg05zb03z1gy205vfzax3n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTM2MTIwZmUtMWUyNjQyM2MtM2ZlMzI2MGEtMzYxYWZlMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:19:21.649993Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [1:1106:2876], Recipient [1:666:2570]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC KeysSize: 1 2025-03-31T16:19:21.650253Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [1:1108:2877], Recipient [1:756:2634]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 3 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC KeysSize: 1 2025-03-31T16:19:21.650435Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-31T16:19:21.650561Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2025-03-31T16:19:21.650652Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-03-31T16:19:21.650699Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2025-03-31T16:19:21.650770Z node 1 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-31T16:19:21.650809Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-31T16:19:21.650863Z node 1 :TX_DATASHARD TRACE: Activated operation [0:5] at 72075186224037888 2025-03-31T16:19:21.650909Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-03-31T16:19:21.650934Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-31T16:19:21.650960Z node 1 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2025-03-31T16:19:21.650986Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2025-03-31T16:19:21.651168Z node 1 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-03-31T16:19:21.651467Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715665, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-31T16:19:21.651528Z node 1 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v4000/18446744073709551615 2025-03-31T16:19:21.651581Z node 1 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[1:1106:2876], 0} after executionsCount# 1 2025-03-31T16:19:21.651625Z node 1 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[1:1106:2876], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-31T16:19:21.651705Z node 1 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[1:1106:2876], 0} finished in read 2025-03-31T16:19:21.651798Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-03-31T16:19:21.651828Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2025-03-31T16:19:21.651857Z node 1 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2025-03-31T16:19:21.651885Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2025-03-31T16:19:21.651931Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-03-31T16:19:21.651954Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2025-03-31T16:19:21.651995Z node 1 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037888 has finished 2025-03-31T16:19:21.652053Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-31T16:19:21.652158Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-03-31T16:19:21.652255Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-03-31T16:19:21.652302Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037889 on unit CheckRead 2025-03-31T16:19:21.652362Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037889 is Executed 2025-03-31T16:19:21.652386Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037889 executing on unit CheckRead 2025-03-31T16:19:21.652409Z node 1 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-03-31T16:19:21.652439Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037889 on unit BuildAndWaitDependencies 2025-03-31T16:19:21.652485Z node 1 :TX_DATASHARD TRACE: Activated operation [0:5] at 72075186224037889 2025-03-31T16:19:21.652553Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037889 is Executed 2025-03-31T16:19:21.652580Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-03-31T16:19:21.652620Z node 1 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037889 to execution unit ExecuteRead 2025-03-31T16:19:21.652645Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037889 on unit ExecuteRead 2025-03-31T16:19:21.652751Z node 1 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 3 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-03-31T16:19:21.652941Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 Acquired lock# 281474976715665, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 3] 2025-03-31T16:19:21.652975Z node 1 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v4000/18446744073709551615 2025-03-31T16:19:21.653008Z node 1 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[1:1108:2877], 0} after executionsCount# 1 2025-03-31T16:19:21.653040Z node 1 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[1:1108:2877], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-31T16:19:21.653092Z node 1 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[1:1108:2877], 0} finished in read 2025-03-31T16:19:21.653153Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037889 is Executed 2025-03-31T16:19:21.653184Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037889 executing on unit ExecuteRead 2025-03-31T16:19:21.653213Z node 1 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037889 to execution unit CompletedOperations 2025-03-31T16:19:21.653237Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037889 on unit CompletedOperations 2025-03-31T16:19:21.653318Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037889 is Executed 2025-03-31T16:19:21.653360Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037889 executing on unit CompletedOperations 2025-03-31T16:19:21.653391Z node 1 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037889 has finished 2025-03-31T16:19:21.653419Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-03-31T16:19:21.653488Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-03-31T16:19:21.653757Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [1:61:2108], Recipient [1:666:2570]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715665 LockNode: 1 Status: STATUS_SUBSCRIBED 2025-03-31T16:19:21.653903Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [1:61:2108], Recipient [1:756:2634]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715665 LockNode: 1 Status: STATUS_SUBSCRIBED 2025-03-31T16:19:21.656406Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [1:1106:2876], Recipient [1:666:2570]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-03-31T16:19:21.656488Z node 1 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-03-31T16:19:21.657168Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [1:1108:2877], Recipient [1:756:2634]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-03-31T16:19:21.657214Z node 1 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } |89.4%| [TA] $(B)/ydb/services/ydb/backup_ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.4%| [TA] {RESULT} $(B)/ydb/services/ydb/backup_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TFlatTest::SplitEmptyTwice [GOOD] >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead [GOOD] >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline_Init [GOOD] >> BasicUsage::BasicWriteSession [GOOD] >> BasicUsage::CloseWriteSessionImmediately >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit-UseSink [GOOD] >> DataShardTxOrder::ZigZag_oo8_dirty [GOOD] >> DataShardOutOfOrder::TestReadTableSingleShardImmediate [GOOD] >> BasicUsage::FallbackToSingleDb [GOOD] >> BasicUsage::FallbackToSingleDbAfterBadRequest >> BasicUsage::WriteSessionNoAvailableDatabase [GOOD] >> BasicUsage::WriteSessionSwitchDatabases >> DataShardOutOfOrder::TestOutOfOrderRestartLocksReorderedWithoutBarrier >> BasicUsage::WriteSessionWriteInHandlers [GOOD] >> BasicUsage::WriteSessionCloseWaitsForWrites [GOOD] >> BasicUsage::WriteSessionCloseIgnoresWrites ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead [GOOD] Test command err: 2025-03-31T16:19:14.541415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:19:14.541677Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:19:14.541858Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00119c/r3tmp/tmp7T4836/pdisk_1.dat 2025-03-31T16:19:14.987715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:19:15.031745Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:15.074582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:15.074752Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:15.088078Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:15.183200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:19:15.550936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:19:15.832153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:925:2736], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:15.832313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:934:2741], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:15.832403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:15.840360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:19:16.014435Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:939:2744], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:19:16.096122Z node 1 :TX_PROXY ERROR: Actor# [1:1000:2786] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:19:16.726372Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpg00qm7dndqam1461wt7gk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGU2ZTkyNzQtMjRhZDQ3NzUtOWZiM2U2MWUtZjc2OTEzM2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:19:16.828561Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpg01n904eackp9rcp42t29, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjNjYmFjYjItNzgyYzIyNjMtNTNlMzdlZGUtY2MxYzMyMWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:19:17.360222Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqpg01qy1f4gb99ydxp53zds, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDYzMWIwYzItNzc2YjhhNzctZTgxMGI2ZTYtNWQwZDNiZjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ... waiting for commit read sets 2025-03-31T16:19:17.485101Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqpg028q05nypps96g71ddg2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDYzMWIwYzItNzc2YjhhNzctZTgxMGI2ZTYtNWQwZDNiZjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... sending immediate upsert ... waiting for immediate propose 2025-03-31T16:19:17.567445Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqpg02c028hpe7sybb5cwdh3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTdmZWM0MmItODUxYTg1ZWItNGNlZjdjZDItYmMxMWVjMWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... immediate upsert is blocked 2025-03-31T16:19:17.579890Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_OVERLOADED;details=Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting;tx_id=281474976715666; 2025-03-31T16:19:17.591392Z node 1 :KQP_COMPUTE WARN: SelfId: [1:1198:2840], Table: `/Root/table-1` ([72057594046644480:2:1]), SessionActorId: [1:1078:2840]Got OVERLOADED for table `/Root/table-1`. ShardID=72075186224037889, Sink=[1:1198:2840]. Ignored this error.{
: Error: Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting, code: 2006 } 2025-03-31T16:19:17.592028Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:1191:2840], SessionActorId: [1:1078:2840], statusCode=OVERLOADED. Issue=
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037889 is overloaded. Table `/Root/table-1`., code: 2006
: Error: Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting, code: 2006 . sessionActorId=[1:1078:2840]. isRollback=0 2025-03-31T16:19:17.594562Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTdmZWM0MmItODUxYTg1ZWItNGNlZjdjZDItYmMxMWVjMWE=, ActorId: [1:1078:2840], ActorState: ExecuteState, TraceId: 01jqpg02c028hpe7sybb5cwdh3, got TEvKqpBuffer::TEvError in ExecuteState, status: OVERLOADED send to: [1:1192:2840] from: [1:1191:2840] 2025-03-31T16:19:17.594769Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:1192:2840] TxId: 281474976715665. Ctx: { TraceId: 01jqpg02c028hpe7sybb5cwdh3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTdmZWM0MmItODUxYTg1ZWItNGNlZjdjZDItYmMxMWVjMWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. OVERLOADED: {
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037889 is overloaded. Table `/Root/table-1`., code: 2006 subissue: {
: Error: Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting, code: 2006 } } 2025-03-31T16:19:17.596250Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTdmZWM0MmItODUxYTg1ZWItNGNlZjdjZDItYmMxMWVjMWE=, ActorId: [1:1078:2840], ActorState: ExecuteState, TraceId: 01jqpg02c028hpe7sybb5cwdh3, Create QueryResponse for error on request, msg: 2025-03-31T16:19:17.596799Z node 1 :KQP_COMPUTE WARN: SelfId: [1:1164:2842], Table: `/Root/table-1` ([72057594046644480:2:1]), SessionActorId: [1:1080:2842]TEvDeliveryProblem was received from tablet: 72075186224037889 2025-03-31T16:19:17.596917Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:1154:2842], SessionActorId: [1:1080:2842], statusCode=UNDETERMINED. Issue=
: Error: State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037889., code: 2026 . sessionActorId=[1:1080:2842]. isRollback=0 2025-03-31T16:19:17.597728Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDYzMWIwYzItNzc2YjhhNzctZTgxMGI2ZTYtNWQwZDNiZjc=, ActorId: [1:1080:2842], ActorState: ExecuteState, TraceId: 01jqpg028q05nypps96g71ddg2, got TEvKqpBuffer::TEvError in ExecuteState, status: UNDETERMINED send to: [1:1155:2842] from: [1:1154:2842] 2025-03-31T16:19:17.598509Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:1155:2842] TxId: 281474976715664. Ctx: { TraceId: 01jqpg028q05nypps96g71ddg2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDYzMWIwYzItNzc2YjhhNzctZTgxMGI2ZTYtNWQwZDNiZjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNDETERMINED: {
: Error: State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037889., code: 2026 } 2025-03-31T16:19:17.599659Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDYzMWIwYzItNzc2YjhhNzctZTgxMGI2ZTYtNWQwZDNiZjc=, ActorId: [1:1080:2842], ActorState: ExecuteState, TraceId: 01jqpg028q05nypps96g71ddg2, Create QueryResponse for error on request, msg: 2025-03-31T16:19:17.940555Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jqpg02kv3y6q3vn9j05jccje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2QyZjE1NTYtYTljMDZhZWMtOTZlYzk5NC02OGUyMWQ0Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } } 2025-03-31T16:19:21.337427Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:101:2147], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:19:21.337790Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:19:21.337874Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00119c/r3tmp/tmpfpPZfF/pdisk_1.dat 2025-03-31T16:19:21.637258Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:19:21.666360Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:21.705990Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:21.706138Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:21.717457Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:21.800474Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:19:22.059743Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:19:22.325915Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:832:2684], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:22.326034Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:843:2689], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:22.326116Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:22.331652Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:19:22.489291Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:846:2692], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:19:22.526651Z node 2 :TX_PROXY ERROR: Actor# [2:906:2733] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:19:22.605556Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpg072m6dgr3fkmekrbmckn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzdkZmMzM2ItMzFjOTllY2UtZDZiYTdjMDItODZhMGI0ZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:19:22.685503Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpg07c11v3h5qfwzvpm4zj1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDNiNzc2NzctYTk0NGU5MGItZjcwMWUyNzQtMWRkZWE4Njk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... waiting for readsets 2025-03-31T16:19:23.304171Z node 2 :KQP_COMPUTE WARN: SelfId: [2:993:2772], Table: `/Root/table-1` ([72057594046644480:2:1]), SessionActorId: [2:968:2772]TEvDeliveryProblem was received from tablet: 72075186224037888 2025-03-31T16:19:23.304320Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:983:2772], SessionActorId: [2:968:2772], statusCode=UNDETERMINED. Issue=
: Error: State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037888., code: 2026 . sessionActorId=[2:968:2772]. isRollback=0 2025-03-31T16:19:23.304576Z node 2 :KQP_COMPUTE WARN: TxId: 281474976715664, task: 1, CA Id [2:1020:2812]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 0 2025-03-31T16:19:23.304902Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NzlmNDA1OGQtNThkNWQwYTItMjRmNzMxYzItMjdlN2Y0YzE=, ActorId: [2:968:2772], ActorState: ExecuteState, TraceId: 01jqpg07eh26y4n2vvm8af2vbc, got TEvKqpBuffer::TEvError in ExecuteState, status: UNDETERMINED send to: [2:984:2772] from: [2:983:2772] 2025-03-31T16:19:23.305191Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:984:2772] TxId: 281474976715663. Ctx: { TraceId: 01jqpg07eh26y4n2vvm8af2vbc, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzlmNDA1OGQtNThkNWQwYTItMjRmNzMxYzItMjdlN2Y0YzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNDETERMINED: {
: Error: State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037888., code: 2026 } 2025-03-31T16:19:23.307267Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NzlmNDA1OGQtNThkNWQwYTItMjRmNzMxYzItMjdlN2Y0YzE=, ActorId: [2:968:2772], ActorState: ExecuteState, TraceId: 01jqpg07eh26y4n2vvm8af2vbc, Create QueryResponse for error on request, msg: { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 3 } items { uint32_value: 30 } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_Init [GOOD] Test command err: 2025-03-31T16:19:18.539884Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:19:18.541203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:19:18.541252Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:18.550612Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:19:18.551134Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-03-31T16:19:18.551408Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:19:18.561935Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:19:18.602068Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:19:18.602413Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:19:18.604080Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-31T16:19:18.604161Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-31T16:19:18.604211Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-31T16:19:18.604543Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:19:18.605097Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:19:18.605181Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:197:2154] in generation 2 2025-03-31T16:19:18.679539Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:19:18.714519Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-31T16:19:18.714695Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:19:18.714797Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-31T16:19:18.714832Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-31T16:19:18.714869Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-31T16:19:18.714915Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:18.715155Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:18.715200Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:18.715472Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-31T16:19:18.715574Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-31T16:19:18.715628Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:19:18.715668Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:19:18.715704Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-31T16:19:18.715736Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T16:19:18.715768Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T16:19:18.715799Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-31T16:19:18.715833Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:19:18.715902Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:18.715943Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:18.716004Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-31T16:19:18.717740Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-31T16:19:18.717774Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:19:18.717843Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T16:19:18.717956Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-31T16:19:18.717987Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-31T16:19:18.718030Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-31T16:19:18.718060Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:19:18.718087Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-31T16:19:18.718118Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-31T16:19:18.718149Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:19:18.718364Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-31T16:19:18.718396Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-31T16:19:18.718423Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-31T16:19:18.718445Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:19:18.718503Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-31T16:19:18.718543Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-31T16:19:18.718570Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-31T16:19:18.718591Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-31T16:19:18.718608Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-31T16:19:18.730777Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-31T16:19:18.730849Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:19:18.730890Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:19:18.730944Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-31T16:19:18.731017Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-31T16:19:18.731543Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:18.731608Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:18.731653Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-31T16:19:18.731798Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-31T16:19:18.731838Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:19:18.731953Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-31T16:19:18.732010Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:19:18.732048Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-31T16:19:18.732080Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-31T16:19:18.741624Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-31T16:19:18.741710Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:18.741959Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:18.741998Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:18.742058Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:19:18.742096Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:19:18.742131Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T16:19:18.742169Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-31T16:19:18.742206Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-31T16:19:18.742243Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:19:18.742277Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-31T16:19:18.742313Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-31T16:19:18.742353Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-31T16:19:18.742564Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-31T16:19:18.742614Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:19:18.742649Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-31T16:19:18.742674Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-31T16:19:18.742695Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-31T16:19:18.742762Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:19:18.742784Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-31T16:19:18.742816Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-31T16:19:18.742847Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-31T16:19:18.742902Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-31T16:19:18.742942Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-31T16:19:18.742972Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-31T16:19:18.743037Z node 1 :TX_DATA ... lt to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-03-31T16:19:23.813906Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-31T16:19:23.813984Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-31T16:19:23.814002Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:140] at 9437186 on unit CompleteOperation 2025-03-31T16:19:23.814023Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 140] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-03-31T16:19:23.814041Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-31T16:19:23.814092Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-31T16:19:23.814106Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:143] at 9437186 on unit CompleteOperation 2025-03-31T16:19:23.814151Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 143] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-03-31T16:19:23.814173Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-31T16:19:23.814244Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-31T16:19:23.814269Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:146] at 9437186 on unit CompleteOperation 2025-03-31T16:19:23.814318Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 146] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-03-31T16:19:23.814346Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-31T16:19:23.814431Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-31T16:19:23.814453Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2025-03-31T16:19:23.814495Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-03-31T16:19:23.814522Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-31T16:19:23.814583Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-31T16:19:23.814614Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-03-31T16:19:23.814809Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 104 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 34} 2025-03-31T16:19:23.814843Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:23.814872Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 104 2025-03-31T16:19:23.814955Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 107 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 35} 2025-03-31T16:19:23.814969Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:23.814981Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 107 2025-03-31T16:19:23.815024Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 110 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 36} 2025-03-31T16:19:23.815038Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:23.815054Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 110 2025-03-31T16:19:23.815138Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 113 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 37} 2025-03-31T16:19:23.815162Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:23.815184Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 113 2025-03-31T16:19:23.815268Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 116 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 38} 2025-03-31T16:19:23.815300Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:23.815323Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 116 2025-03-31T16:19:23.815373Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 119 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 39} 2025-03-31T16:19:23.815395Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:23.815416Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 119 2025-03-31T16:19:23.815492Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 122 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 40} 2025-03-31T16:19:23.815522Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:23.815537Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 122 2025-03-31T16:19:23.815600Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 125 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 41} 2025-03-31T16:19:23.815615Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:23.815629Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 125 2025-03-31T16:19:23.815697Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2025-03-31T16:19:23.815716Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:23.815730Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2025-03-31T16:19:23.815772Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2025-03-31T16:19:23.815802Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:23.815823Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2025-03-31T16:19:23.815881Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-03-31T16:19:23.815898Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:23.815912Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-03-31T16:19:23.815980Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-03-31T16:19:23.815999Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:23.816013Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-03-31T16:19:23.816092Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-03-31T16:19:23.816109Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:23.816122Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-03-31T16:19:23.816165Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-03-31T16:19:23.816190Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:23.816222Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-03-31T16:19:23.816281Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-03-31T16:19:23.816323Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:23.816345Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-03-31T16:19:23.816406Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-03-31T16:19:23.816421Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:23.816437Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-03-31T16:19:23.830575Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-31T16:19:23.830631Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2025-03-31T16:19:23.830687Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 4 ms 2025-03-31T16:19:23.830744Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-03-31T16:19:23.830779Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-31T16:19:23.831052Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-03-31T16:19:23.831117Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:23.831151Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 [GOOD] Test command err: 2025-03-31T16:19:17.214875Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:19:17.216178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:19:17.216246Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:17.228428Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:19:17.228966Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-03-31T16:19:17.229252Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:19:17.240987Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:19:17.284258Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:19:17.284677Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:19:17.286357Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-31T16:19:17.286439Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-31T16:19:17.286497Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-31T16:19:17.286851Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:19:17.287615Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:19:17.287753Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:197:2154] in generation 2 2025-03-31T16:19:17.346063Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:19:17.378829Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-31T16:19:17.379027Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:19:17.379165Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-31T16:19:17.379200Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-31T16:19:17.379235Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-31T16:19:17.379290Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:17.379500Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:17.379549Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:17.379835Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-31T16:19:17.379945Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-31T16:19:17.380017Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:19:17.380054Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:19:17.380089Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-31T16:19:17.380122Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T16:19:17.380158Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T16:19:17.380188Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-31T16:19:17.380231Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:19:17.380327Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:17.380381Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:17.380434Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-31T16:19:17.383013Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-31T16:19:17.383068Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:19:17.383174Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T16:19:17.383319Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-31T16:19:17.383364Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-31T16:19:17.383416Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-31T16:19:17.383464Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:19:17.383508Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-31T16:19:17.383568Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-31T16:19:17.383606Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:19:17.383908Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-31T16:19:17.383960Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-31T16:19:17.384015Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-31T16:19:17.384043Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:19:17.384087Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-31T16:19:17.384121Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-31T16:19:17.384159Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-31T16:19:17.384191Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-31T16:19:17.384218Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-31T16:19:17.396465Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-31T16:19:17.396537Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:19:17.396571Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:19:17.396621Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-31T16:19:17.396684Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-31T16:19:17.397120Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:17.397161Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:17.397198Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-31T16:19:17.397295Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-31T16:19:17.397324Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:19:17.397431Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-31T16:19:17.397464Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:19:17.397496Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-31T16:19:17.397521Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-31T16:19:17.400373Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-31T16:19:17.400441Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:17.400610Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:17.400641Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:17.400691Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:19:17.400729Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:19:17.400765Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T16:19:17.400806Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-31T16:19:17.400836Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-31T16:19:17.400876Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:19:17.400902Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-31T16:19:17.400930Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-31T16:19:17.400958Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-31T16:19:17.401096Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-31T16:19:17.401131Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:19:17.401160Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-31T16:19:17.401177Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-31T16:19:17.401192Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-31T16:19:17.401236Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:19:17.401256Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-31T16:19:17.401283Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-31T16:19:17.401309Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-31T16:19:17.401364Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-31T16:19:17.401395Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-31T16:19:17.401427Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-31T16:19:17.401461Z node 1 :TX_DATA ... SHARD TRACE: Complete execution for [1000005:146] at 9437186 on unit CompleteOperation 2025-03-31T16:19:23.664725Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 146] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 4 ms 2025-03-31T16:19:23.664765Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-03-31T16:19:23.664785Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-31T16:19:23.664859Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-31T16:19:23.664875Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2025-03-31T16:19:23.664895Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 4 ms 2025-03-31T16:19:23.664929Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-03-31T16:19:23.664946Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-31T16:19:23.665009Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-31T16:19:23.665023Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2025-03-31T16:19:23.665043Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 4 ms 2025-03-31T16:19:23.665073Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-03-31T16:19:23.665103Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-31T16:19:23.665315Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-03-31T16:19:23.665342Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:23.665365Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-03-31T16:19:23.665653Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-03-31T16:19:23.665673Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:23.665701Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-03-31T16:19:23.665760Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-03-31T16:19:23.665785Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:23.665802Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-03-31T16:19:23.665871Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-03-31T16:19:23.665886Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:23.665899Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-03-31T16:19:23.665957Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-03-31T16:19:23.665991Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:23.666010Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-03-31T16:19:23.666060Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-03-31T16:19:23.666077Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:23.666093Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-03-31T16:19:23.666129Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-03-31T16:19:23.666143Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:23.666156Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 2025-03-31T16:19:23.666191Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:19:23.666209Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:149] at 9437184 on unit CompleteOperation 2025-03-31T16:19:23.666249Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 4 ms 2025-03-31T16:19:23.666278Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2025-03-31T16:19:23.666297Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:23.666420Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-03-31T16:19:23.666440Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-03-31T16:19:23.666457Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:19:23.666471Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:150] at 9437184 on unit CompleteOperation 2025-03-31T16:19:23.666493Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 150] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-03-31T16:19:23.666509Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:23.666564Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:19:23.666576Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:151] at 9437184 on unit CompleteOperation 2025-03-31T16:19:23.666594Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 151] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 4 ms 2025-03-31T16:19:23.666629Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-03-31T16:19:23.666657Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:23.666728Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:19:23.666746Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437184 on unit CompleteOperation 2025-03-31T16:19:23.666776Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 4 ms 2025-03-31T16:19:23.666801Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-03-31T16:19:23.666815Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:23.666890Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:19:23.666911Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:153] at 9437184 on unit CompleteOperation 2025-03-31T16:19:23.666935Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 153] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-03-31T16:19:23.666951Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:23.667013Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:19:23.667025Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:154] at 9437184 on unit CompleteOperation 2025-03-31T16:19:23.667055Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 154] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 4 ms 2025-03-31T16:19:23.667108Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-03-31T16:19:23.667130Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:23.667277Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2025-03-31T16:19:23.667301Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:23.667328Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2025-03-31T16:19:23.667455Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-03-31T16:19:23.667476Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:23.667491Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2025-03-31T16:19:23.667548Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-03-31T16:19:23.667566Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:23.667587Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2025-03-31T16:19:23.667646Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-03-31T16:19:23.667662Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:23.667678Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed+EvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag_oo8_dirty [GOOD] Test command err: 2025-03-31T16:19:17.880308Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:19:17.881640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:19:17.881686Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:17.888914Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:19:17.889267Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-03-31T16:19:17.889454Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:19:17.897228Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:19:17.939590Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:19:17.939865Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:19:17.941392Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-31T16:19:17.941483Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-31T16:19:17.941532Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-31T16:19:17.941849Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:19:17.942440Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:19:17.942509Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:197:2154] in generation 2 2025-03-31T16:19:18.001788Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:19:18.040567Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-31T16:19:18.040743Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:19:18.040847Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-31T16:19:18.040879Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-31T16:19:18.040911Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-31T16:19:18.040945Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:18.041155Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:18.041197Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:18.041441Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-31T16:19:18.041529Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-31T16:19:18.041575Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:19:18.041611Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:19:18.041645Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-31T16:19:18.041675Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T16:19:18.041705Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T16:19:18.041732Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-31T16:19:18.041768Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:19:18.041869Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:18.041918Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:18.041971Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-31T16:19:18.048306Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\001J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-31T16:19:18.048374Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:19:18.048485Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T16:19:18.048654Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-31T16:19:18.048702Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-31T16:19:18.048758Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-31T16:19:18.048801Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:19:18.048842Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-31T16:19:18.048889Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-31T16:19:18.048919Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:19:18.049251Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-31T16:19:18.049301Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-31T16:19:18.049336Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-31T16:19:18.049374Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:19:18.049424Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-31T16:19:18.049448Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-31T16:19:18.049476Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-31T16:19:18.049518Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-31T16:19:18.049551Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-31T16:19:18.061662Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-31T16:19:18.061720Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:19:18.061750Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:19:18.061805Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-31T16:19:18.061867Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-31T16:19:18.062358Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:18.062411Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:18.062465Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-31T16:19:18.062599Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-31T16:19:18.062630Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:19:18.062735Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-31T16:19:18.062774Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:19:18.062808Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-31T16:19:18.062840Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-31T16:19:18.070821Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-31T16:19:18.070893Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:18.071108Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:18.071144Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:18.071194Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:19:18.071230Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:19:18.071261Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T16:19:18.071298Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-31T16:19:18.071329Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-31T16:19:18.071371Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:19:18.071401Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-31T16:19:18.071433Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-31T16:19:18.071466Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-31T16:19:18.071628Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-31T16:19:18.071692Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:19:18.071733Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-31T16:19:18.071755Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-31T16:19:18.071773Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-31T16:19:18.071830Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:19:18.071856Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-31T16:19:18.071888Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-31T16:19:18.071912Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-31T16:19:18.071969Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-31T16:19:18.072015Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-31T16:19:18.072045Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-31T16:19:18.072080Z node 1 :TX_DATA ... aitInRS 2025-03-31T16:19:24.069967Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-03-31T16:19:24.069988Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadAndWaitInRS 2025-03-31T16:19:24.070016Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit ExecuteDataTx 2025-03-31T16:19:24.070047Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit ExecuteDataTx 2025-03-31T16:19:24.070527Z node 2 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437184 with status COMPLETE 2025-03-31T16:19:24.070603Z node 2 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437184: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-31T16:19:24.070661Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-03-31T16:19:24.070686Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit ExecuteDataTx 2025-03-31T16:19:24.070730Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompleteOperation 2025-03-31T16:19:24.070762Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompleteOperation 2025-03-31T16:19:24.070992Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is DelayComplete 2025-03-31T16:19:24.071027Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompleteOperation 2025-03-31T16:19:24.071429Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompletedOperations 2025-03-31T16:19:24.071481Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompletedOperations 2025-03-31T16:19:24.071535Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-03-31T16:19:24.071560Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompletedOperations 2025-03-31T16:19:24.071588Z node 2 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437184 has finished 2025-03-31T16:19:24.071627Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:19:24.071666Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T16:19:24.071709Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T16:19:24.071746Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T16:19:24.072001Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:347:2314], Recipient [2:347:2314]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:24.072050Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:24.072127Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437185 2025-03-31T16:19:24.072161Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:19:24.072188Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-03-31T16:19:24.072219Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000016:45] in PlanQueue unit at 9437185 2025-03-31T16:19:24.072246Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit PlanQueue 2025-03-31T16:19:24.072283Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-03-31T16:19:24.072312Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit PlanQueue 2025-03-31T16:19:24.072337Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit LoadTxDetails 2025-03-31T16:19:24.072368Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit LoadTxDetails 2025-03-31T16:19:24.073023Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 9437185 loaded tx from db 1000016:45 keys extracted: 2 2025-03-31T16:19:24.073068Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-03-31T16:19:24.073093Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadTxDetails 2025-03-31T16:19:24.073119Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit FinalizeDataTxPlan 2025-03-31T16:19:24.073145Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit FinalizeDataTxPlan 2025-03-31T16:19:24.073182Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-03-31T16:19:24.073206Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit FinalizeDataTxPlan 2025-03-31T16:19:24.073263Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit BuildAndWaitDependencies 2025-03-31T16:19:24.073294Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit BuildAndWaitDependencies 2025-03-31T16:19:24.073356Z node 2 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically complete end at 9437185 2025-03-31T16:19:24.073404Z node 2 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically incomplete end at 9437185 2025-03-31T16:19:24.073433Z node 2 :TX_DATASHARD TRACE: Activated operation [1000016:45] at 9437185 2025-03-31T16:19:24.073475Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-03-31T16:19:24.073503Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit BuildAndWaitDependencies 2025-03-31T16:19:24.073526Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit BuildDataTxOutRS 2025-03-31T16:19:24.073548Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit BuildDataTxOutRS 2025-03-31T16:19:24.073593Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-03-31T16:19:24.073616Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit BuildDataTxOutRS 2025-03-31T16:19:24.073637Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit StoreAndSendOutRS 2025-03-31T16:19:24.073659Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit StoreAndSendOutRS 2025-03-31T16:19:24.073692Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-03-31T16:19:24.073721Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit StoreAndSendOutRS 2025-03-31T16:19:24.073745Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit PrepareDataTxInRS 2025-03-31T16:19:24.073769Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit PrepareDataTxInRS 2025-03-31T16:19:24.073797Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-03-31T16:19:24.073830Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit PrepareDataTxInRS 2025-03-31T16:19:24.073854Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit LoadAndWaitInRS 2025-03-31T16:19:24.073877Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit LoadAndWaitInRS 2025-03-31T16:19:24.073901Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-03-31T16:19:24.073924Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadAndWaitInRS 2025-03-31T16:19:24.073953Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit ExecuteDataTx 2025-03-31T16:19:24.073983Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit ExecuteDataTx 2025-03-31T16:19:24.074348Z node 2 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437185 with status COMPLETE 2025-03-31T16:19:24.074398Z node 2 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437185: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-31T16:19:24.074446Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-03-31T16:19:24.074470Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit ExecuteDataTx 2025-03-31T16:19:24.074493Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompleteOperation 2025-03-31T16:19:24.074530Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompleteOperation 2025-03-31T16:19:24.074727Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is DelayComplete 2025-03-31T16:19:24.074764Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompleteOperation 2025-03-31T16:19:24.074823Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompletedOperations 2025-03-31T16:19:24.074856Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompletedOperations 2025-03-31T16:19:24.074886Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-03-31T16:19:24.074907Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompletedOperations 2025-03-31T16:19:24.074933Z node 2 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437185 has finished 2025-03-31T16:19:24.074962Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:19:24.074988Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-03-31T16:19:24.075029Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-03-31T16:19:24.075057Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-03-31T16:19:24.090070Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000016 txid# 45} 2025-03-31T16:19:24.090151Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000016} 2025-03-31T16:19:24.090218Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-03-31T16:19:24.090265Z node 2 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437185 on unit CompleteOperation 2025-03-31T16:19:24.090330Z node 2 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437185 at tablet 9437185 send result to client [2:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-03-31T16:19:24.090383Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-03-31T16:19:24.090954Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000016 txid# 45} 2025-03-31T16:19:24.090999Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000016} 2025-03-31T16:19:24.091041Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:19:24.091072Z node 2 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437184 on unit CompleteOperation 2025-03-31T16:19:24.091165Z node 2 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437184 at tablet 9437184 send result to client [2:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-03-31T16:19:24.091204Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 >> Viewer::JsonStorageListingV2NodeIdFilter [GOOD] >> Viewer::JsonStorageListingV2PDiskIdFilter >> DataShardOutOfOrder::TestSecondaryClearanceAfterShardRestartRace [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit-UseSink [GOOD] Test command err: 2025-03-31T16:19:15.645935Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:19:15.646153Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:19:15.646285Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001191/r3tmp/tmpYugixM/pdisk_1.dat 2025-03-31T16:19:15.993393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:19:16.047873Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:16.088509Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:16.088672Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:16.100170Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:16.181850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:19:16.222150Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:19:16.223331Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:19:16.223797Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:19:16.224152Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:19:16.277048Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:19:16.277849Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:19:16.277964Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:19:16.279758Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:19:16.279869Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:19:16.279942Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:19:16.280361Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:19:16.280502Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:19:16.280602Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:19:16.291461Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:19:16.322750Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:19:16.322966Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:19:16.323135Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:19:16.323176Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:19:16.323217Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:19:16.323271Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:19:16.323498Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:16.323549Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:16.323827Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:19:16.323924Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:19:16.324090Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:19:16.324144Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:19:16.324212Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T16:19:16.324265Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:19:16.324307Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:19:16.324340Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:19:16.324383Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:19:16.324473Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:16.324523Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:16.324583Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:19:16.324681Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-31T16:19:16.324776Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:19:16.324904Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:19:16.325112Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T16:19:16.325175Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:19:16.325275Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:19:16.325327Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:19:16.325370Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T16:19:16.325409Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T16:19:16.325445Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:19:16.325764Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T16:19:16.325813Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T16:19:16.325859Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T16:19:16.325893Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:19:16.325939Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T16:19:16.325971Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T16:19:16.326017Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T16:19:16.326059Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:19:16.326102Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T16:19:16.327988Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T16:19:16.328060Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:19:16.338845Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:19:16.338927Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:19:16.338964Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:19:16.339026Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-31T16:19:16.339137Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:19:16.488985Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:16.489049Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:16.489085Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T16:19:16.489492Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-31T16:19:16.489529Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:19:16.489643Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:19:16.489699Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-31T16:19:16.489747Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-31T16:19:16.489783Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-31T16:19:16.494556Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:19:16.494661Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:19:16.495513Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:16.495559Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:16.495616Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:19:1 ... actorId: [2:1186:2938] 2025-03-31T16:19:23.508372Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037892 2025-03-31T16:19:23.508419Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037892 2025-03-31T16:19:23.508454Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-03-31T16:19:23.508649Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553157, Sender [2:1026:2823], Recipient [2:756:2634]: NKikimrTxDataShard.TEvSplitTransferSnapshotAck TabletId: 72075186224037892 OperationCookie: 281474976715665 2025-03-31T16:19:23.508716Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037892 for split OpId 281474976715665 2025-03-31T16:19:23.509025Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:1026:2823], Recipient [2:1026:2823]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:23.509060Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:23.509277Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [2:1181:2933], Recipient [2:756:2634]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037892 ClientId: [2:1181:2933] ServerId: [2:1183:2935] } 2025-03-31T16:19:23.509312Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-03-31T16:19:23.509747Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [2:24:2071], Recipient [2:1026:2823]: {TEvRegisterTabletResult TabletId# 72075186224037892 Entry# 2000} 2025-03-31T16:19:23.509785Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-03-31T16:19:23.509827Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037892 time 2000 2025-03-31T16:19:23.509868Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-03-31T16:19:23.509994Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-03-31T16:19:23.510040Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:19:23.510081Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037892 2025-03-31T16:19:23.510118Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037892 has no attached operations 2025-03-31T16:19:23.510158Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037892 2025-03-31T16:19:23.510190Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037892 TxInFly 0 2025-03-31T16:19:23.510246Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-03-31T16:19:23.510334Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877764, Sender [2:1183:2935], Recipient [2:1026:2823]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-31T16:19:23.510367Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-31T16:19:23.510412Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037892, clientId# [2:1181:2933], serverId# [2:1183:2935], sessionId# [0:0:0] 2025-03-31T16:19:23.510764Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [2:24:2071], Recipient [2:1026:2823]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 2000 ReadStep# 2000 } 2025-03-31T16:19:23.510803Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-03-31T16:19:23.510844Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037892 coordinator 72057594046316545 last step 0 next step 2000 2025-03-31T16:19:23.510897Z node 2 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037892: waitStep# 2000 readStep# 2000 observedStep# 2000 2025-03-31T16:19:23.510954Z node 2 :TX_DATASHARD TRACE: CheckMediatorStateRestored at 72075186224037892 promoting UnprotectedReadEdge to v2000/18446744073709551615 2025-03-31T16:19:23.523008Z node 2 :TX_DATASHARD DEBUG: 72075186224037893 ack snapshot OpId 281474976715665 2025-03-31T16:19:23.523172Z node 2 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037893 2025-03-31T16:19:23.523261Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037893 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:19:23.523322Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037893 2025-03-31T16:19:23.523381Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037893, actorId: [2:1190:2942] 2025-03-31T16:19:23.523411Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037893 2025-03-31T16:19:23.523448Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037893 2025-03-31T16:19:23.523470Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2025-03-31T16:19:23.523712Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553157, Sender [2:1031:2825], Recipient [2:756:2634]: NKikimrTxDataShard.TEvSplitTransferSnapshotAck TabletId: 72075186224037893 OperationCookie: 281474976715665 2025-03-31T16:19:23.523772Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037893 for split OpId 281474976715665 2025-03-31T16:19:23.524018Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:1031:2825], Recipient [2:1031:2825]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:23.524043Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:23.524334Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [2:1182:2934], Recipient [2:756:2634]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037893 ClientId: [2:1182:2934] ServerId: [2:1184:2936] } 2025-03-31T16:19:23.524357Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-03-31T16:19:23.524524Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [2:24:2071], Recipient [2:1031:2825]: {TEvRegisterTabletResult TabletId# 72075186224037893 Entry# 2000} 2025-03-31T16:19:23.524547Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-03-31T16:19:23.524569Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037893 time 2000 2025-03-31T16:19:23.524593Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2025-03-31T16:19:23.524630Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037893 2025-03-31T16:19:23.524658Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:19:23.524680Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037893 2025-03-31T16:19:23.524703Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037893 has no attached operations 2025-03-31T16:19:23.524724Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037893 2025-03-31T16:19:23.524746Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037893 TxInFly 0 2025-03-31T16:19:23.524776Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037893 2025-03-31T16:19:23.524893Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877764, Sender [2:1184:2936], Recipient [2:1031:2825]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-31T16:19:23.524914Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-31T16:19:23.524940Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037893, clientId# [2:1182:2934], serverId# [2:1184:2936], sessionId# [0:0:0] 2025-03-31T16:19:23.525156Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [2:24:2071], Recipient [2:1031:2825]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 2000 ReadStep# 2000 } 2025-03-31T16:19:23.525187Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-03-31T16:19:23.525209Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037893 coordinator 72057594046316545 last step 0 next step 2000 2025-03-31T16:19:23.525241Z node 2 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037893: waitStep# 2000 readStep# 2000 observedStep# 2000 2025-03-31T16:19:23.525272Z node 2 :TX_DATASHARD TRACE: CheckMediatorStateRestored at 72075186224037893 promoting UnprotectedReadEdge to v2000/18446744073709551615 2025-03-31T16:19:23.535895Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 ack split to schemeshard 281474976715665 2025-03-31T16:19:23.538662Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553158, Sender [2:410:2405], Recipient [2:763:2638] 2025-03-31T16:19:23.538743Z node 2 :TX_DATASHARD DEBUG: Got TEvSplitPartitioningChanged: opId: 281474976715665, at datashard: 72075186224037889, state: SplitSrcWaitForPartitioningChanged 2025-03-31T16:19:23.540917Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 ack split partitioning changed to schemeshard 281474976715665 2025-03-31T16:19:23.541000Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-03-31T16:19:23.541641Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [2:748:2629], Recipient [2:756:2634]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-03-31T16:19:24.104951Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [2:977:2681], Recipient [2:667:2571]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 977 RawX2: 8589937273 } TxBody: " \0008\000`\200\200\200\005j\324\006\010\001\022\225\006\010\001\022\024\n\022\t\321\003\000\000\000\000\000\000\021y\n\000\000\002\000\000\000\032\256\002\010\240\215\006\022\207\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004?\004\014key\024valueh%kqp%tx_result_binding_0_1\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\n\014Arg\000\002)\211\002?\016\204\214\002(KqpEffects\000)\211\010?\032\213\010\203\010\203\010\203\005@\203\010\204?\006\210\203\004\203\004\203\0144KqpUpsertRows\000\013?&\003?\036\177\000\001\205\000\000\000\000\001\003? \004\003?\"\000\003?$\002\017)\211\002?(?\010 Iterator\000)\211\004?\010?\n\203\004\030Member\000?\026\003?@\000\002\004\000\006\010\002?.\003\203\004\004\003\203\004\002\003\003?0\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\000@i\000\000\000\000\000\000\360?q\000\000\000\000\ 2025-03-31T16:19:24.105026Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:19:24.105148Z node 2 :TX_DATASHARD NOTICE: Rejecting data TxId 281474976715663 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state) 2025-03-31T16:19:24.105594Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715664, at schemeshard: 72057594046644480 2025-03-31T16:19:24.105947Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715665, at schemeshard: 72057594046644480 >> BasicUsage::GetAllStartPartitionSessions [GOOD] >> BasicUsage::PreferredDatabaseNoFallback ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestReadTableSingleShardImmediate [GOOD] Test command err: 2025-03-31T16:19:17.911302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:19:17.911517Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:19:17.911656Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00118a/r3tmp/tmpm8M8d0/pdisk_1.dat 2025-03-31T16:19:18.237237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:19:18.280479Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:18.331837Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T16:19:18.333093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:18.333216Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:18.333380Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-31T16:19:18.344772Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:18.425084Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-03-31T16:19:18.425156Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-03-31T16:19:18.425336Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-03-31T16:19:18.575359Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 2 } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-31T16:19:18.575472Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:19:18.576120Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-31T16:19:18.576212Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:19:18.576507Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:19:18.576683Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:19:18.576782Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-31T16:19:18.577115Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-03-31T16:19:18.578648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:19:18.579743Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-31T16:19:18.579825Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-03-31T16:19:18.611421Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:672:2573]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:19:18.612464Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:672:2573]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:19:18.612824Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:672:2573] 2025-03-31T16:19:18.613063Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:19:18.658677Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:672:2573]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:19:18.659095Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:658:2565], Recipient [1:674:2575]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:19:18.660401Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:19:18.660612Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:19:18.662180Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:19:18.662248Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:19:18.662292Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:19:18.662624Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:19:18.662731Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:658:2565], Recipient [1:674:2575]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:19:18.663221Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:674:2575] 2025-03-31T16:19:18.663393Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:19:18.670150Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:19:18.670229Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:700:2573] in generation 1 2025-03-31T16:19:18.670404Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:658:2565], Recipient [1:674:2575]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:19:18.670923Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:19:18.670982Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:19:18.672056Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-03-31T16:19:18.672116Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-03-31T16:19:18.672147Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-03-31T16:19:18.672361Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:19:18.672456Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:19:18.672499Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:706:2575] in generation 1 2025-03-31T16:19:18.683719Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:19:18.708088Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:19:18.708341Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:19:18.708480Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:709:2594] 2025-03-31T16:19:18.708522Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:19:18.708558Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:19:18.708608Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:19:18.708916Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:672:2573], Recipient [1:672:2573]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:18.708982Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:18.709071Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:19:18.709105Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-03-31T16:19:18.709161Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:19:18.709217Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:710:2595] 2025-03-31T16:19:18.709241Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-03-31T16:19:18.709262Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-03-31T16:19:18.709292Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:19:18.709616Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:674:2575], Recipient [1:674:2575]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:18.709652Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:18.709861Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:19:18.709957Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:19:18.710114Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:19:18.710153Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:19:18.710193Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T16:19:18.710233Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:19:18.710270Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:19:18.710304Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:19:18.710349Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:19:18.710394Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-03-31T16:19:18.710451Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-03-31T16:19:18.710845Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:687:2581], Recipient [1:672:2573]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:18.710891Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:18.710959Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:664:2569], serverId# [1:687:2581], sessionId# [0:0:0] 2025-03-31T16:19:18.711011Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 202 ... an for [0:2] at 72075186224037888 executing on unit CheckWrite 2025-03-31T16:19:24.211707Z node 2 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-31T16:19:24.211737Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-31T16:19:24.211780Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-03-31T16:19:24.211824Z node 2 :TX_DATASHARD TRACE: Activated operation [0:2] at 72075186224037888 2025-03-31T16:19:24.211857Z node 2 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-03-31T16:19:24.211878Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-31T16:19:24.211891Z node 2 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit ExecuteWrite 2025-03-31T16:19:24.211904Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit ExecuteWrite 2025-03-31T16:19:24.211955Z node 2 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-03-31T16:19:24.211998Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-03-31T16:19:24.212116Z node 2 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=1 2025-03-31T16:19:24.212162Z node 2 :TX_DATASHARD TRACE: add locks to result: 0 2025-03-31T16:19:24.212218Z node 2 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:19:24.212247Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteWrite 2025-03-31T16:19:24.212291Z node 2 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit FinishProposeWrite 2025-03-31T16:19:24.212329Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit FinishProposeWrite 2025-03-31T16:19:24.212397Z node 2 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T16:19:24.212422Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit FinishProposeWrite 2025-03-31T16:19:24.212456Z node 2 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2025-03-31T16:19:24.212487Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2025-03-31T16:19:24.212525Z node 2 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-03-31T16:19:24.212563Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2025-03-31T16:19:24.212589Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:2] at 72075186224037888 has finished 2025-03-31T16:19:24.223269Z node 2 :TX_DATASHARD TRACE: TTxWrite complete: at tablet# 72075186224037888 2025-03-31T16:19:24.223334Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:2] at 72075186224037888 on unit FinishProposeWrite 2025-03-31T16:19:24.223392Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 2 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2025-03-31T16:19:24.223462Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:19:24.225718Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] Handle TEvProposeTransaction 2025-03-31T16:19:24.225768Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] TxId# 281474976715661 ProcessProposeTransaction 2025-03-31T16:19:24.225849Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [2:861:2691] DataReq marker# P0 2025-03-31T16:19:24.226006Z node 2 :TX_PROXY DEBUG: Actor# [2:861:2691] Cookie# 0 txid# 281474976715661 HANDLE TDataReq marker# P1 2025-03-31T16:19:24.226260Z node 2 :TX_PROXY DEBUG: Actor# [2:861:2691] txid# 281474976715661 HANDLE EvNavigateKeySetResult TDataReq marker# P3b ErrorCount# 0 2025-03-31T16:19:24.226436Z node 2 :TX_PROXY DEBUG: Actor# [2:861:2691] txid# 281474976715661 HANDLE EvResolveKeySetResult TDataReq marker# P3 ErrorCount# 0 2025-03-31T16:19:24.226524Z node 2 :TX_PROXY DEBUG: Actor# [2:861:2691] txid# 281474976715661 SEND TEvProposeTransaction to datashard 72075186224037888 with read table request affected shards 1 followers disallowed marker# P4b 2025-03-31T16:19:24.226779Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [2:861:2691], Recipient [2:667:2571]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCAN SourceDeprecated { RawX1: 861 RawX2: 8589937283 } TxBody: " \0018\001B8\n\014\010\200\202\224\204\200\200\200\200\001\020\002\022\t\010\001\022\003key\030\002\022\013\010\002\022\005value\030\002\032\016\n\006\001\000\000\000\000\200\022\000\030\001 \001 \001H\001R\022\t]\003\000\000\000\000\000\000\021\203\n\000\000\002\000\000\000" TxId: 281474976715661 ExecLevel: 0 Flags: 8 2025-03-31T16:19:24.226834Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:19:24.226931Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:19:24.227120Z node 2 :TX_DATASHARD TRACE: -- AddReadRange: [(Uint32 : NULL) ; ()] table: [72057594046644480:2:0] 2025-03-31T16:19:24.227185Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit CheckDataTx 2025-03-31T16:19:24.227234Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-03-31T16:19:24.227285Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit CheckDataTx 2025-03-31T16:19:24.227337Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-31T16:19:24.227372Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-31T16:19:24.227415Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2025-03-31T16:19:24.227479Z node 2 :TX_DATASHARD TRACE: Activated operation [0:281474976715661] at 72075186224037888 2025-03-31T16:19:24.227519Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-03-31T16:19:24.227542Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-31T16:19:24.227562Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit MakeScanSnapshot 2025-03-31T16:19:24.227596Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit MakeScanSnapshot 2025-03-31T16:19:24.227632Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-03-31T16:19:24.227671Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit MakeScanSnapshot 2025-03-31T16:19:24.227694Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit WaitForStreamClearance 2025-03-31T16:19:24.227713Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit WaitForStreamClearance 2025-03-31T16:19:24.227756Z node 2 :TX_DATASHARD TRACE: Requested stream clearance from [2:861:2691] for [0:281474976715661] at 72075186224037888 2025-03-31T16:19:24.227792Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Continue 2025-03-31T16:19:24.227838Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:19:24.227910Z node 2 :TX_PROXY DEBUG: Got clearance request, shard: 72075186224037888, txid: 281474976715661 2025-03-31T16:19:24.227986Z node 2 :TX_PROXY DEBUG: Collected all clerance requests, txid: 281474976715661 2025-03-31T16:19:24.228030Z node 2 :TX_PROXY DEBUG: Send stream clearance, shard: 72075186224037888, txid: 281474976715661, cleared: 1 2025-03-31T16:19:24.228130Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287942, Sender [2:861:2691], Recipient [2:667:2571]: NKikimrTx.TEvStreamClearancePending TxId: 281474976715661 2025-03-31T16:19:24.228167Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearancePending 2025-03-31T16:19:24.228241Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [2:861:2691], Recipient [2:667:2571]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715661 Cleared: true 2025-03-31T16:19:24.228263Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-03-31T16:19:24.228337Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:667:2571], Recipient [2:667:2571]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:24.228369Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:24.228457Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:19:24.228508Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:19:24.228553Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-03-31T16:19:24.228604Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit WaitForStreamClearance 2025-03-31T16:19:24.228641Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715661] at 72075186224037888 2025-03-31T16:19:24.228675Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-03-31T16:19:24.228716Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit WaitForStreamClearance 2025-03-31T16:19:24.228753Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit ReadTableScan 2025-03-31T16:19:24.228788Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit ReadTableScan 2025-03-31T16:19:24.229055Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Continue 2025-03-31T16:19:24.229099Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:19:24.229136Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-03-31T16:19:24.229165Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:19:24.229194Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:19:24.229271Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:19:24.229782Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:867:2696], Recipient [2:667:2571]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-03-31T16:19:24.229823Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::WriteSessionWriteInHandlers [GOOD] Test command err: 2025-03-31T16:19:10.053543Z :WriteSessionWriteInHandlers INFO: Random seed for debugging is 1743437950053522 2025-03-31T16:19:10.484338Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008981226871671:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:10.484436Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001650/r3tmp/tmpgKUtwk/pdisk_1.dat 2025-03-31T16:19:10.870639Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:19:10.900813Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:19:11.016077Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:19:11.310501Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:11.362929Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:11.363020Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:11.363914Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:11.363984Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:11.368827Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:19:11.368960Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:11.369875Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28692, node 1 2025-03-31T16:19:11.583832Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/001650/r3tmp/yandexKTF7Ma.tmp 2025-03-31T16:19:11.583871Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/001650/r3tmp/yandexKTF7Ma.tmp 2025-03-31T16:19:11.584358Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/001650/r3tmp/yandexKTF7Ma.tmp 2025-03-31T16:19:11.584525Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:19:11.765762Z INFO: TTestServer started on Port 61112 GrpcPort 28692 TClient is connected to server localhost:61112 PQClient connected to localhost:28692 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:19:12.214344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-03-31T16:19:14.545070Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008996434289806:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:14.545088Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008996434289818:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:14.545174Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:14.550324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2025-03-31T16:19:14.568345Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008996434289824:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2025-03-31T16:19:14.792381Z node 2 :TX_PROXY ERROR: Actor# [2:7488008996434289852:2131] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:19:14.817331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720661:0, at schemeshard: 72057594046644480 2025-03-31T16:19:14.821342Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488008996434289867:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:19:14.821397Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488008998406741930:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:19:14.821729Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzRlNTQ0YWMtMzhmM2EwMDItMWRkN2QxYmMtNjVjZDZjMzU=, ActorId: [1:7488008998406741880:2336], ActorState: ExecuteState, TraceId: 01jqpfzzgc3stt8yqsteq3vm52, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:19:14.821787Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2Q3OGFhZjItMTRmNDNkZGMtODBmMzU0ZmQtNGM4NWU1ZTk=, ActorId: [2:7488008996434289794:2308], ActorState: ExecuteState, TraceId: 01jqpfzzff7dda0vhv0wmda60z, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:19:14.825054Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:19:14.825042Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:19:14.953003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2025-03-31T16:19:15.114452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:28692", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-03-31T16:19:15.413194Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720664. Ctx: { TraceId: 01jqpg003t14b7yv8dsfnfwpcq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjQ1ZDMyOTItZWMxNDYwNjEtMTdmZTRhNWUtM2MyYzY4ZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7488009002701709637:2965] 2025-03-31T16:19:15.483944Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008981226871671:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:15.484000Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-03-31T16:19:21.538408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720678:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:28692 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-03-31T16:19:21.629644Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:28692 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 } } } CallPersQueueGRPC response: Status: 129 ProxyErrorCode: 53 SchemeStatus: 1 FlatTxId { TxId: 2814749767 ... [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:19:23.382673Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0src_id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2025-03-31T16:19:23.382869Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-31T16:19:23.382928Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-03-31T16:19:23.383097Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 Topic 'rt3.dc1--test-topic' partition 0 user user offset 0 count 1 size 1024000 endOffset 1 max time lag 0ms effective offset 0 2025-03-31T16:19:23.383139Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 1 2025-03-31T16:19:23.383175Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-03-31T16:19:23.383199Z node 2 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-31T16:19:23.383272Z node 2 :PERSQUEUE DEBUG: Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp done, result 1743437963352 queuesize 0 startOffset 0 2025-03-31T16:19:23.383367Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-03-31T16:19:23.383580Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2025-03-31T16:19:23.384371Z :DEBUG: [/Root] TraceId [] SessionId [src_id|9b2c4bb9-e93b2281-96281f92-e1f37b3b_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2025-03-31T16:19:23.384921Z :DEBUG: [/Root] TraceId [] SessionId [src_id|9b2c4bb9-e93b2281-96281f92-e1f37b3b_0] MessageGroupId [src_id] Write session got write response: acks { seq_no: 1 written { } } write_statistics { persisting_time { nanos: 3000000 } min_queue_wait_time { nanos: 26000000 } max_queue_wait_time { nanos: 26000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2025-03-31T16:19:23.384962Z :DEBUG: [/Root] TraceId [] SessionId [src_id|9b2c4bb9-e93b2281-96281f92-e1f37b3b_0] MessageGroupId [src_id] OnAck: seqNo=1, txId=? 2025-03-31T16:19:23.384994Z :DEBUG: [/Root] TraceId [] SessionId [src_id|9b2c4bb9-e93b2281-96281f92-e1f37b3b_0] MessageGroupId [src_id] Write session: acknoledged message 1 === Inside AcksHandler 2025-03-31T16:19:23.385217Z :DEBUG: [/Root] TraceId [] SessionId [src_id|9b2c4bb9-e93b2281-96281f92-e1f37b3b_0] MessageGroupId [src_id] Write 1 messages with Id from 2 to 2 === Inside ReadyToAcceptHandler === AcksHandler has written a message, closing the session 2025-03-31T16:19:23.385500Z :DEBUG: [/Root] TraceId [] SessionId [src_id|9b2c4bb9-e93b2281-96281f92-e1f37b3b_0] MessageGroupId [src_id] Write session: try to update token 2025-03-31T16:19:23.385537Z :DEBUG: [/Root] TraceId [] SessionId [src_id|9b2c4bb9-e93b2281-96281f92-e1f37b3b_0] MessageGroupId [src_id] Send 1 message(s) (0 left), first sequence number is 2 2025-03-31T16:19:23.386061Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: src_id|9b2c4bb9-e93b2281-96281f92-e1f37b3b_0 grpc read done: success: 1 data: write_request[data omitted] 2025-03-31T16:19:23.386317Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-03-31T16:19:23.386791Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-03-31T16:19:23.386835Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-03-31T16:19:23.386980Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 2 requestId: cookie: 2 2025-03-31T16:19:23.387202Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2025-03-31T16:19:23.387528Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-03-31T16:19:23.387560Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-03-31T16:19:23.387629Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message topic: rt3.dc1--test-topic partition: 0 SourceId: '\0src_id' SeqNo: 2 partNo : 0 messageNo: 3 size 107 offset: -1 2025-03-31T16:19:23.387825Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0src_id' seqNo 2 partNo 0 2025-03-31T16:19:23.388911Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0src_id' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 1 PartNo 0 PackedSize 181 count 1 nextOffset 2 batches 1 2025-03-31T16:19:23.389372Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 1,1 HeadOffset 0 endOffset 1 curOffset 2 d0000000000_00000000000000000001_00000_0000000001_00000| size 169 WTime 1743437963388 2025-03-31T16:19:23.389481Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:19:23.389500Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:19:23.389514Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-03-31T16:19:23.389528Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:19:23.389542Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000psrc_id 2025-03-31T16:19:23.389554Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] d0000000000_00000000000000000001_00000_0000000001_00000| 2025-03-31T16:19:23.389564Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:19:23.389576Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:19:23.389588Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] =========================== 2025-03-31T16:19:23.389633Z node 2 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-31T16:19:23.389696Z node 2 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 1 partNo 0 count 1 size 169 2025-03-31T16:19:23.392168Z node 2 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 1 count 1 size 169 actorID [2:7488009026499061582:2425] 2025-03-31T16:19:23.392253Z node 2 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 0 offset 1 partno 0 count 1 parts 0 size 169 2025-03-31T16:19:23.392278Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 114 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:19:23.392318Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:19:23.392366Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0src_id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 1 is stored on disk 2025-03-31T16:19:23.392381Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 3 requestId: cookie: 2 2025-03-31T16:19:23.392573Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2025-03-31T16:19:23.393287Z :DEBUG: [/Root] TraceId [] SessionId [src_id|9b2c4bb9-e93b2281-96281f92-e1f37b3b_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2025-03-31T16:19:23.393462Z :DEBUG: [/Root] TraceId [] SessionId [src_id|9b2c4bb9-e93b2281-96281f92-e1f37b3b_0] MessageGroupId [src_id] Write session got write response: acks { seq_no: 2 written { offset: 1 } } write_statistics { persisting_time { nanos: 3000000 } min_queue_wait_time { nanos: 1000000 } max_queue_wait_time { nanos: 1000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2025-03-31T16:19:23.393498Z :DEBUG: [/Root] TraceId [] SessionId [src_id|9b2c4bb9-e93b2281-96281f92-e1f37b3b_0] MessageGroupId [src_id] OnAck: seqNo=2, txId=? 2025-03-31T16:19:23.393526Z :DEBUG: [/Root] TraceId [] SessionId [src_id|9b2c4bb9-e93b2281-96281f92-e1f37b3b_0] MessageGroupId [src_id] Write session: acknoledged message 2 === Inside AcksHandler === Inside SessionClosedHandler 2025-03-31T16:19:23.393814Z :DEBUG: [/Root] TraceId [] SessionId [src_id|9b2c4bb9-e93b2281-96281f92-e1f37b3b_0] MessageGroupId [src_id] Write 1 messages with Id from 3 to 3 === SessionClosedHandler has 'written' a message 2025-03-31T16:19:23.393979Z :INFO: [/Root] TraceId [] SessionId [src_id|9b2c4bb9-e93b2281-96281f92-e1f37b3b_0] MessageGroupId [src_id] Write session: close. Timeout 0.000000s 2025-03-31T16:19:23.394021Z :INFO: [/Root] TraceId [] SessionId [src_id|9b2c4bb9-e93b2281-96281f92-e1f37b3b_0] MessageGroupId [src_id] Write session will now close 2025-03-31T16:19:23.394074Z :DEBUG: [/Root] TraceId [] SessionId [src_id|9b2c4bb9-e93b2281-96281f92-e1f37b3b_0] MessageGroupId [src_id] Write session: aborting 2025-03-31T16:19:23.394858Z :WARNING: [/Root] TraceId [] SessionId [src_id|9b2c4bb9-e93b2281-96281f92-e1f37b3b_0] MessageGroupId [src_id] Write session: could not confirm all writes in time or session aborted, perform hard shutdown 2025-03-31T16:19:23.394915Z :DEBUG: [/Root] TraceId [] SessionId [src_id|9b2c4bb9-e93b2281-96281f92-e1f37b3b_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2025-03-31T16:19:23.394964Z :DEBUG: [/Root] TraceId [] SessionId [src_id|9b2c4bb9-e93b2281-96281f92-e1f37b3b_0] MessageGroupId [src_id] Write session is aborting and will not restart 2025-03-31T16:19:23.395647Z :DEBUG: [/Root] TraceId [] SessionId [src_id|9b2c4bb9-e93b2281-96281f92-e1f37b3b_0] MessageGroupId [src_id] Write session: destroy 2025-03-31T16:19:23.395598Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: src_id|9b2c4bb9-e93b2281-96281f92-e1f37b3b_0 grpc read done: success: 0 data: 2025-03-31T16:19:23.395622Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|9b2c4bb9-e93b2281-96281f92-e1f37b3b_0 grpc read failed 2025-03-31T16:19:23.395661Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|9b2c4bb9-e93b2281-96281f92-e1f37b3b_0 grpc closed 2025-03-31T16:19:23.395695Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|9b2c4bb9-e93b2281-96281f92-e1f37b3b_0 is DEAD 2025-03-31T16:19:23.396922Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-31T16:19:23.397295Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7488009037061449029:2535] destroyed 2025-03-31T16:19:23.397337Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. >> TMultiversionObjectMap::MonteCarlo [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSecondaryClearanceAfterShardRestartRace [GOOD] Test command err: 2025-03-31T16:19:14.570884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:19:14.571114Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:19:14.571255Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011a7/r3tmp/tmpw9rhCk/pdisk_1.dat 2025-03-31T16:19:14.991035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:19:15.036057Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:15.074968Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T16:19:15.076235Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:15.076374Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:15.076542Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-31T16:19:15.088300Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:15.174190Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-03-31T16:19:15.174292Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-03-31T16:19:15.175245Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-03-31T16:19:15.317836Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 2 } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-31T16:19:15.317957Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:19:15.318599Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-31T16:19:15.318692Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:19:15.319002Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:19:15.319217Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:19:15.319329Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-31T16:19:15.319668Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-03-31T16:19:15.321300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:19:15.322381Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-31T16:19:15.322458Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-03-31T16:19:15.357472Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:672:2573]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:19:15.358864Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:672:2573]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:19:15.359370Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:672:2573] 2025-03-31T16:19:15.359647Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:19:15.404691Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:672:2573]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:19:15.405121Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:658:2565], Recipient [1:674:2575]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:19:15.406360Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:19:15.406571Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:19:15.408416Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:19:15.408497Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:19:15.408555Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:19:15.408983Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:19:15.409089Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:658:2565], Recipient [1:674:2575]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:19:15.409540Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:674:2575] 2025-03-31T16:19:15.409756Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:19:15.418632Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:19:15.418769Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:700:2573] in generation 1 2025-03-31T16:19:15.418961Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:658:2565], Recipient [1:674:2575]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:19:15.419721Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:19:15.419808Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:19:15.421260Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-03-31T16:19:15.421325Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-03-31T16:19:15.421368Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-03-31T16:19:15.421669Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:19:15.421788Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:19:15.421853Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:706:2575] in generation 1 2025-03-31T16:19:15.432786Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:19:15.470510Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:19:15.470750Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:19:15.470878Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:709:2594] 2025-03-31T16:19:15.470909Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:19:15.470937Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:19:15.470971Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:19:15.471251Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:672:2573], Recipient [1:672:2573]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:15.471299Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:15.471364Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:19:15.471391Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-03-31T16:19:15.471430Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:19:15.471469Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:710:2595] 2025-03-31T16:19:15.471483Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-03-31T16:19:15.471498Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-03-31T16:19:15.471521Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:19:15.471771Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:674:2575], Recipient [1:674:2575]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:15.471794Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:15.471950Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:19:15.472080Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:19:15.472289Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:19:15.472341Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:19:15.472422Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T16:19:15.472469Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:19:15.472506Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:19:15.472539Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:19:15.472595Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:19:15.472658Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-03-31T16:19:15.472778Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-03-31T16:19:15.473261Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:687:2581], Recipient [1:672:2573]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:15.473304Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:15.473377Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:664:2569], serverId# [1:687:2581], sessionId# [0:0:0] 2025-03-31T16:19:15.473434Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 202 ... e tx, become ExecuteState 2025-03-31T16:19:24.867529Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:974:2765] TxId: 281474976715662. Ctx: { TraceId: 01jqpg09gea0ahn3wmxtd7zmmg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjI5Y2NlNWItM2QwYTE0YTktNzZkODI2NjktYzAxMDRiM2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:977:2765], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-03-31T16:19:24.867608Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:974:2765] TxId: 281474976715662. Ctx: { TraceId: 01jqpg09gea0ahn3wmxtd7zmmg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjI5Y2NlNWItM2QwYTE0YTktNzZkODI2NjktYzAxMDRiM2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:977:2765], 2025-03-31T16:19:24.867668Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:974:2765] TxId: 281474976715662. Ctx: { TraceId: 01jqpg09gea0ahn3wmxtd7zmmg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjI5Y2NlNWItM2QwYTE0YTktNzZkODI2NjktYzAxMDRiM2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:977:2765], 2025-03-31T16:19:24.868410Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:974:2765] TxId: 281474976715662. Ctx: { TraceId: 01jqpg09gea0ahn3wmxtd7zmmg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjI5Y2NlNWItM2QwYTE0YTktNzZkODI2NjktYzAxMDRiM2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:977:2765], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 986 Tasks { TaskId: 1 CpuTimeUs: 422 FinishTimeMs: 1743437964868 EgressBytes: 10 EgressRows: 1 ComputeCpuTimeUs: 19 BuildCpuTimeUs: 403 HostName: "ghrun-tuhoqst4xe" NodeId: 2 CreateTimeMs: 1743437964866 } MaxMemoryUsage: 1048576 } 2025-03-31T16:19:24.868491Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715662. Ctx: { TraceId: 01jqpg09gea0ahn3wmxtd7zmmg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjI5Y2NlNWItM2QwYTE0YTktNzZkODI2NjktYzAxMDRiM2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:977:2765] 2025-03-31T16:19:24.868574Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:974:2765] TxId: 281474976715662. Ctx: { TraceId: 01jqpg09gea0ahn3wmxtd7zmmg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjI5Y2NlNWItM2QwYTE0YTktNzZkODI2NjktYzAxMDRiM2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send Commit to BufferActor=[2:973:2765] 2025-03-31T16:19:24.868636Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:974:2765] TxId: 281474976715662. Ctx: { TraceId: 01jqpg09gea0ahn3wmxtd7zmmg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjI5Y2NlNWItM2QwYTE0YTktNzZkODI2NjktYzAxMDRiM2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000986s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-03-31T16:19:24.869049Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:982:2781], Recipient [2:932:2748]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:24.869108Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:24.869147Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:981:2780], serverId# [2:982:2781], sessionId# [0:0:0] 2025-03-31T16:19:24.869299Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 278003712, Sender [2:980:2765], Recipient [2:932:2748]: NKikimrDataEvents.TEvWrite Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC } TxMode: MODE_IMMEDIATE 2025-03-31T16:19:24.869320Z node 2 :TX_DATASHARD TRACE: Handle TTxWrite: at tablet# 72075186224037888 2025-03-31T16:19:24.869416Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [2:932:2748], Recipient [2:932:2748]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-03-31T16:19:24.869436Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-03-31T16:19:24.869502Z node 2 :TX_DATASHARD TRACE: TTxWrite:: execute at tablet# 72075186224037888 2025-03-31T16:19:24.869622Z node 2 :TX_DATASHARD TRACE: Parsing write transaction for 0 at 72075186224037888, record: Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC } TxMode: MODE_IMMEDIATE 2025-03-31T16:19:24.970170Z node 2 :TX_DATASHARD TRACE: Table /Root/table-1, shard: 72075186224037888, write point (Uint32 : 4) 2025-03-31T16:19:24.970272Z node 2 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint32 : 4) table: [72057594046644480:2:1] 2025-03-31T16:19:24.970406Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit CheckWrite 2025-03-31T16:19:24.970474Z node 2 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-03-31T16:19:24.970524Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit CheckWrite 2025-03-31T16:19:24.970570Z node 2 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-31T16:19:24.970611Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-31T16:19:24.970658Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2000/281474976715661 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2025-03-31T16:19:24.970737Z node 2 :TX_DATASHARD TRACE: Activated operation [0:2] at 72075186224037888 2025-03-31T16:19:24.970780Z node 2 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-03-31T16:19:24.970821Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-31T16:19:24.970846Z node 2 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit ExecuteWrite 2025-03-31T16:19:24.970871Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit ExecuteWrite 2025-03-31T16:19:24.970903Z node 2 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-03-31T16:19:24.970964Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2000/281474976715661 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2025-03-31T16:19:24.971126Z node 2 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=1 2025-03-31T16:19:24.971184Z node 2 :TX_DATASHARD TRACE: add locks to result: 0 2025-03-31T16:19:24.971258Z node 2 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:19:24.971292Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteWrite 2025-03-31T16:19:24.971337Z node 2 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit FinishProposeWrite 2025-03-31T16:19:24.971380Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit FinishProposeWrite 2025-03-31T16:19:24.971455Z node 2 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T16:19:24.971492Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit FinishProposeWrite 2025-03-31T16:19:24.971529Z node 2 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2025-03-31T16:19:24.971564Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2025-03-31T16:19:24.971612Z node 2 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-03-31T16:19:24.971638Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2025-03-31T16:19:24.971668Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:2] at 72075186224037888 has finished 2025-03-31T16:19:24.983335Z node 2 :TX_DATASHARD TRACE: TTxWrite complete: at tablet# 72075186224037888 2025-03-31T16:19:24.983443Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:2] at 72075186224037888 on unit FinishProposeWrite 2025-03-31T16:19:24.983507Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 2 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2025-03-31T16:19:24.983620Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:19:24.985225Z node 2 :KQP_EXECUTER INFO: ActorId: [2:974:2765] TxId: 281474976715662. Ctx: { TraceId: 01jqpg09gea0ahn3wmxtd7zmmg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjI5Y2NlNWItM2QwYTE0YTktNzZkODI2NjktYzAxMDRiM2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Full stats: { CpuTimeUs: 3129 DurationUs: 1743437962983565 Tables { TablePath: "/Root/table-1" WriteRows: 1 WriteBytes: 8 AffectedPartitions: 1 } ExecuterCpuTimeUs: 2143 StartTimeMs: 2000 FinishTimeMs: 1743437964984 Stages { StageGuid: "d1a76da8-e8876ab2-7c9991d3-4de95eca" Program: "(\n(return (lambda \'() (block \'(\n (let $1 (Just (Uint32 \'4)))\n (return (Iterator (AsList (AsStruct \'(\'\"key\" $1) \'(\'\"value\" $1)))))\n))))\n)\n" ComputeActors { CpuTimeUs: 986 Tasks { TaskId: 1 CpuTimeUs: 422 FinishTimeMs: 1743437964868 EgressBytes: 10 EgressRows: 1 ComputeCpuTimeUs: 19 BuildCpuTimeUs: 403 HostName: "ghrun-tuhoqst4xe" NodeId: 2 CreateTimeMs: 1743437964866 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743437964868 } TxPlansWithStats: "{\"Node Type\":\"Phase\",\"PlanNodeId\":3,\"Plans\":[{\"Node Type\":\"Sink\",\"PlanNodeId\":2,\"Plans\":[{\"Node Type\":\"ConstantExpr-Sink\",\"Operators\":[{\"Inputs\":[],\"Iterator\":\"[{key: 4,value: 4}]\",\"Name\":\"Iterator\"},{\"Inputs\":[],\"Name\":\"Upsert\",\"Path\":\"\\/Root\\/table-1\",\"SinkType\":\"KqpTableSink\",\"Table\":\"table-1\"}],\"PlanNodeId\":1,\"StageGuid\":\"d1a76da8-e8876ab2-7c9991d3-4de95eca\",\"Stats\":{\"BaseTimeMs\":1743437964868,\"ComputeNodes\":[{\"CpuTimeUs\":986,\"Tasks\":[{\"ComputeTimeUs\":19,\"EgressBytes\":10,\"EgressRows\":1,\"FinishTimeMs\":1743437964868,\"Host\":\"ghrun-tuhoqst4xe\",\"NodeId\":2,\"TaskId\":1}]}],\"FinishedTasks\":0,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"},\"Tables\":[\"table-1\"]}],\"StageGuid\":\"\"}],\"StageGuid\":\"\"}" StatConvertBytes: 722 Extra { type_url: "type.googleapis.com/NKqpProto.TKqpExecutionExtraStats" value: "\010\001\022\013\010\332\007\020\332\007\030\332\007 \001" } } 2025-03-31T16:19:24.985335Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:974:2765] TxId: 281474976715662. Ctx: { TraceId: 01jqpg09gea0ahn3wmxtd7zmmg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjI5Y2NlNWItM2QwYTE0YTktNzZkODI2NjktYzAxMDRiM2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-31T16:19:24.985391Z node 2 :KQP_EXECUTER TRACE: ActorId: [2:974:2765] TxId: 281474976715662. Ctx: { TraceId: 01jqpg09gea0ahn3wmxtd7zmmg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjI5Y2NlNWItM2QwYTE0YTktNzZkODI2NjktYzAxMDRiM2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState >> TGroupMapperTest::NonUniformCluster [GOOD] |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TMultiversionObjectMap::MonteCarlo [GOOD] >> DataShardTxOrder::ZigZag [GOOD] >> DataShardOutOfOrder::TestImmediateQueueThenSplit+UseSink >> Cdc::DecimalKey [GOOD] >> Cdc::DropColumn >> DataShardOutOfOrder::TestUnprotectedReadsThenWriteVisibility [GOOD] |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformCluster [GOOD] >> Cdc::SupportedTypes [GOOD] >> Cdc::SplitTopicPartition_TopicAutoPartitioning ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag [GOOD] Test command err: 2025-03-31T16:19:20.374142Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:19:20.375391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:19:20.375434Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:20.384450Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:19:20.384892Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-03-31T16:19:20.385160Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:19:20.395362Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:19:20.437023Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:19:20.437371Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:19:20.438966Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-31T16:19:20.439068Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-31T16:19:20.439165Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-31T16:19:20.439562Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:19:20.440275Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:19:20.440370Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:197:2154] in generation 2 2025-03-31T16:19:20.511890Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:19:20.545338Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-31T16:19:20.545522Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:19:20.545642Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-31T16:19:20.545680Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-31T16:19:20.545735Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-31T16:19:20.545771Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:20.545995Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:20.546065Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:20.546339Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-31T16:19:20.546428Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-31T16:19:20.546481Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:19:20.546524Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:19:20.546557Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-31T16:19:20.546590Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T16:19:20.546622Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T16:19:20.546653Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-31T16:19:20.546700Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:19:20.546807Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:20.546868Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:20.546931Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-31T16:19:20.549148Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-31T16:19:20.549206Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:19:20.549289Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T16:19:20.549455Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-31T16:19:20.549499Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-31T16:19:20.549555Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-31T16:19:20.549593Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:19:20.549639Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-31T16:19:20.549688Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-31T16:19:20.549721Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:19:20.550005Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-31T16:19:20.550059Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-31T16:19:20.550104Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-31T16:19:20.550134Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:19:20.550172Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-31T16:19:20.550208Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-31T16:19:20.550246Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-31T16:19:20.550295Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-31T16:19:20.550329Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-31T16:19:20.563921Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-31T16:19:20.564009Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:19:20.564043Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:19:20.564105Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-31T16:19:20.564179Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-31T16:19:20.564692Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:20.564770Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:20.564832Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-31T16:19:20.564995Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-31T16:19:20.565029Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:19:20.565162Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-31T16:19:20.565209Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:19:20.565244Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-31T16:19:20.565277Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-31T16:19:20.569202Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-31T16:19:20.569271Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:20.569494Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:20.569537Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:20.569583Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:19:20.569619Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:19:20.569649Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T16:19:20.569688Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-31T16:19:20.569725Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-31T16:19:20.569762Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:19:20.569798Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-31T16:19:20.569833Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-31T16:19:20.569868Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-31T16:19:20.570067Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-31T16:19:20.570115Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:19:20.570153Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-31T16:19:20.570178Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-31T16:19:20.570199Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-31T16:19:20.570252Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:19:20.570280Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-31T16:19:20.570313Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-31T16:19:20.570346Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-31T16:19:20.570408Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-31T16:19:20.570440Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-31T16:19:20.570471Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-31T16:19:20.570508Z node 1 :TX_DATA ... CE: Advance execution plan for [1000016:45] at 9437185 executing on unit ExecuteDataTx 2025-03-31T16:19:27.300567Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompleteOperation 2025-03-31T16:19:27.300591Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompleteOperation 2025-03-31T16:19:27.300794Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is DelayComplete 2025-03-31T16:19:27.300829Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompleteOperation 2025-03-31T16:19:27.300862Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompletedOperations 2025-03-31T16:19:27.300894Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompletedOperations 2025-03-31T16:19:27.300927Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-03-31T16:19:27.300949Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompletedOperations 2025-03-31T16:19:27.300973Z node 2 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437185 has finished 2025-03-31T16:19:27.301021Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:19:27.301068Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-03-31T16:19:27.301130Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-03-31T16:19:27.301168Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-03-31T16:19:27.301386Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [2:99:2134], Recipient [2:236:2229]: {TEvPlanStep step# 1000016 MediatorId# 0 TabletID 9437184} 2025-03-31T16:19:27.301431Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:19:27.301532Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit WaitForPlan 2025-03-31T16:19:27.301565Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-03-31T16:19:27.301591Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit WaitForPlan 2025-03-31T16:19:27.301618Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit PlanQueue 2025-03-31T16:19:27.301762Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 45 at step 1000016 at tablet 9437184 { Transactions { TxId: 45 AckTo { RawX1: 99 RawX2: 8589936726 } } Step: 1000016 MediatorID: 0 TabletID: 9437184 } 2025-03-31T16:19:27.301812Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:27.302014Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:236:2229], Recipient [2:236:2229]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:27.302049Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:27.302100Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:19:27.302136Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:19:27.302184Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T16:19:27.302217Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000016:45] in PlanQueue unit at 9437184 2025-03-31T16:19:27.302243Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit PlanQueue 2025-03-31T16:19:27.302272Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-03-31T16:19:27.302309Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit PlanQueue 2025-03-31T16:19:27.302339Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit LoadTxDetails 2025-03-31T16:19:27.302373Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit LoadTxDetails 2025-03-31T16:19:27.303121Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000016:45 keys extracted: 2 2025-03-31T16:19:27.303166Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-03-31T16:19:27.303193Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadTxDetails 2025-03-31T16:19:27.303221Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit FinalizeDataTxPlan 2025-03-31T16:19:27.303259Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit FinalizeDataTxPlan 2025-03-31T16:19:27.303296Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-03-31T16:19:27.303326Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit FinalizeDataTxPlan 2025-03-31T16:19:27.303357Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-31T16:19:27.303390Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit BuildAndWaitDependencies 2025-03-31T16:19:27.303452Z node 2 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically complete end at 9437184 2025-03-31T16:19:27.303484Z node 2 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically incomplete end at 9437184 2025-03-31T16:19:27.303510Z node 2 :TX_DATASHARD TRACE: Activated operation [1000016:45] at 9437184 2025-03-31T16:19:27.303543Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-03-31T16:19:27.303564Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildAndWaitDependencies 2025-03-31T16:19:27.303586Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit BuildDataTxOutRS 2025-03-31T16:19:27.303607Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit BuildDataTxOutRS 2025-03-31T16:19:27.303654Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-03-31T16:19:27.303687Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildDataTxOutRS 2025-03-31T16:19:27.303710Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit StoreAndSendOutRS 2025-03-31T16:19:27.303735Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit StoreAndSendOutRS 2025-03-31T16:19:27.303761Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-03-31T16:19:27.303782Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit StoreAndSendOutRS 2025-03-31T16:19:27.303824Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit PrepareDataTxInRS 2025-03-31T16:19:27.303848Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit PrepareDataTxInRS 2025-03-31T16:19:27.303876Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-03-31T16:19:27.303898Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit PrepareDataTxInRS 2025-03-31T16:19:27.303951Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit LoadAndWaitInRS 2025-03-31T16:19:27.303978Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit LoadAndWaitInRS 2025-03-31T16:19:27.304023Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-03-31T16:19:27.304048Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadAndWaitInRS 2025-03-31T16:19:27.304085Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit ExecuteDataTx 2025-03-31T16:19:27.304121Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit ExecuteDataTx 2025-03-31T16:19:27.304494Z node 2 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437184 with status COMPLETE 2025-03-31T16:19:27.304554Z node 2 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437184: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-31T16:19:27.304600Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-03-31T16:19:27.304626Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit ExecuteDataTx 2025-03-31T16:19:27.304650Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompleteOperation 2025-03-31T16:19:27.304673Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompleteOperation 2025-03-31T16:19:27.304857Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is DelayComplete 2025-03-31T16:19:27.304911Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompleteOperation 2025-03-31T16:19:27.304938Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompletedOperations 2025-03-31T16:19:27.304980Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompletedOperations 2025-03-31T16:19:27.305017Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-03-31T16:19:27.305040Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompletedOperations 2025-03-31T16:19:27.305066Z node 2 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437184 has finished 2025-03-31T16:19:27.305094Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:19:27.305118Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T16:19:27.305146Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T16:19:27.305171Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T16:19:27.318480Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000016 txid# 45} 2025-03-31T16:19:27.318547Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000016} 2025-03-31T16:19:27.318618Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:19:27.318655Z node 2 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437184 on unit CompleteOperation 2025-03-31T16:19:27.318707Z node 2 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437184 at tablet 9437184 send result to client [2:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-03-31T16:19:27.318746Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:27.319166Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000016 txid# 45} 2025-03-31T16:19:27.319210Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000016} 2025-03-31T16:19:27.319246Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-03-31T16:19:27.319279Z node 2 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437185 on unit CompleteOperation 2025-03-31T16:19:27.319322Z node 2 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437185 at tablet 9437185 send result to client [2:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-03-31T16:19:27.319361Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SplitEmptyTwice [GOOD] Test command err: 2025-03-31T16:16:28.254385Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008283209639592:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:28.254518Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001cbb/r3tmp/tmpQPedcz/pdisk_1.dat 2025-03-31T16:16:28.521867Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:28.626694Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:28.626821Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:28.628525Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29458 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:16:28.775427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:16:28.800933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:16:28.928095Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-03-31T16:16:28.931562Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-03-31T16:16:28.951827Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-03-31T16:16:28.955652Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2342 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743437788890 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743437788890 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) waiting... 2025-03-31T16:16:30.650671Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.97, eph 1} end=0, 2 blobs 252r (max 252), put Spent{time=0.011s,wait=0.002s,interrupts=1} Part{ 1 pk, lobs 0 +0, (73067 0 0)b }, ecr=1.000 2025-03-31T16:16:30.656334Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.98, eph 1} end=0, 2 blobs 756r (max 756), put Spent{time=0.016s,wait=0.004s,interrupts=1} Part{ 1 pk, lobs 0 +0, (49618 0 0)b }, ecr=1.000 2025-03-31T16:16:30.889405Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.515, eph 1} end=0, 2 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (187 0 0)b }, ecr=1.000 2025-03-31T16:16:30.910013Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.200, eph 2} end=0, 2 blobs 498r (max 507), put Spent{time=0.010s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (144119 0 0)b }, ecr=1.000 2025-03-31T16:16:30.927196Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.207, eph 1} end=0, 2 blobs 1500r (max 1500), put Spent{time=0.015s,wait=0.005s,interrupts=1} Part{ 1 pk, lobs 0 +0, (103070 0 0)b }, ecr=1.000 2025-03-31T16:16:30.927944Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.202, eph 1} end=0, 2 blobs 2r (max 2), put Spent{time=0.006s,wait=0.005s,interrupts=1} Part{ 1 pk, lobs 0 +0, (252 0 0)b }, ecr=1.000 2025-03-31T16:16:30.929003Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.203, eph 1} end=0, 2 blobs 2r (max 2), put Spent{time=0.006s,wait=0.006s,interrupts=1} Part{ 1 pk, lobs 0 +0, (181 0 0)b }, ecr=1.000 2025-03-31T16:16:30.930132Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.206, eph 1} end=0, 2 blobs 501r (max 501), put Spent{time=0.007s,wait=0.003s,interrupts=1} Part{ 1 pk, lobs 0 +0, (31966 0 0)b }, ecr=1.000 2025-03-31T16:16:30.954692Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.541, eph 1} end=0, 2 blobs 10001r (max 10001), put Spent{time=0.053s,wait=0.006s,interrupts=1} Part{ 1 pk, lobs 0 +0, (553660 0 0)b }, ecr=1.000 2025-03-31T16:16:30.956268Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.210, eph 2} end=0, 2 blobs 1509r (max 1512), put Spent{time=0.034s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (98941 0 0)b }, ecr=1.000 2025-03-31T16:16:31.065674Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.327, eph 3} end=0, 2 blobs 749r (max 750), put Spent{time=0.014s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (216651 0 0)b }, ecr=1.000 2025-03-31T16:16:31.073535Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.331, eph 3} end=0, 2 blobs 2262r (max 2265), put Spent{time=0.019s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (148264 0 0)b }, ecr=1.000 2025-03-31T16:16:31.177603Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.1022, eph 2} end=0, 2 blobs 3r (max 5), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (187 0 0)b }, ecr=1.000 2025-03-31T16:16:31.208475Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.439, eph 2} end=0, 2 blobs 2r (max 3), put Spent{time=0.018s,wait=0.016s,interrupts=1} Part{ 1 pk, lobs 0 +0, (252 0 0)b }, ecr=1.000 2025-03-31T16:16:31.208515Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.440, eph 2} end=0, 2 blobs 2r (max 3), put Spent{time=0.018s,wait=0.015s,interrupts=1} Part{ 1 pk, lobs 0 +0, (181 0 0)b }, ecr=1.000 2025-03-31T16:16:31.209698Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.437, eph 4} end=0, 2 blobs 1000r (max 1001), put Spent{time=0.020s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (289138 0 0)b }, ecr=1.000 2025-03-31T16:16:31.248834Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.441, eph 2} end=0, 2 blobs 3000r (max 3000), put Spent{time=0.058s,wait=0.024s,interrupts=1} Part{ 1 pk, lobs 0 +0, (205952 0 0)b }, ecr=1.000 2025-03-31T16:16:31.263986Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.443, eph 2} end=0, 2 blobs 1002r (max 1002), put Spent{time=0.073s,wait=0.034s,interrupts=1} Part{ 1 pk, lobs 0 +0, (63729 0 0)b }, ecr=1.000 2025-03-31T16:16:31.282475Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.1050, eph 2} end=0, 2 blobs 10001r (max 10503), put Spent{time=0.091s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (553660 0 0)b }, ecr=1.000 2025-03-31T16:16:31.289254Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.446, eph 4} end=0, 2 blobs 3015r (max 3018), put Spent{time=0.081s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (197587 0 0)b }, ecr=1.000 2025-03-31T16:16:31.330653Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.547, eph 5} end=0, 2 blobs 1251r (max 1252), put Spent{time=0.017s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (361670 0 0)b }, ecr=1.000 2025-03-31T16:16:31.338128Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.550, eph 5} end=0, 2 blobs 3768r (max 3771), put Spent{time=0.022s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (246910 0 0)b }, ecr=1.000 2025-03-31T16:16:31.412624Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.1528, eph 3} end=0, 2 blobs 3r (max 5), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (187 0 0)b }, ecr=1.000 2025-03-31T16:16:31.428022Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.648, eph 3} end=0, 2 blobs 2r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (252 0 0)b }, ecr=1.000 2025-03-31T16:16:31.429204Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.649, eph 3} end=0, 2 blobs 2r (max 3), put Spent{time=0.002s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (181 0 0)b }, ecr=1.000 2025-03-31T16:16:31.465895Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.646, eph 3} end=0, 2 blobs 4503r (max 4503), put Spent{time=0.040s,wait=0.015s,interrupts=1} Part{ 1 pk, lobs 0 +0, (309038 0 0)b }, ecr=1.000 2025-03-31T16:16:31.467005Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.650, eph 6} end=0, 2 blobs 1502r (max 1503), put Spent{time=0.040s,wait=0.009s,interrupts=1} Part{ 1 pk, lobs 0 +0, (434157 0 0)b }, ecr=1.000 2025-03-31T16:16:31.481153Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.652, eph 3} end=0, 2 blobs 1503r (max 1503), put Spent{time=0.054s,wait=0.010s,interrupts=1} Part{ 1 pk, lobs 0 +0, (95542 0 0)b }, ecr=1.000 2025-03-31T16:16:31.503386Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.1558, eph 3} ... ARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:19:18.989942Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715690:0, at schemeshard: 72057594046644480 2025-03-31T16:19:18.989985Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 281474976715690:0 ProgressState, at schemeshard: 72057594046644480 2025-03-31T16:19:18.990410Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 9 2025-03-31T16:19:18.990571Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715690:0 progress is 1/1 2025-03-31T16:19:18.990592Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715690 ready parts: 1/1 2025-03-31T16:19:18.990615Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715690:0 progress is 1/1 2025-03-31T16:19:18.990627Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715690 ready parts: 1/1 2025-03-31T16:19:18.990641Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715690, ready parts: 1/1, is published: true 2025-03-31T16:19:18.990694Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7488009013724466246:2417] message: TxId: 281474976715690 2025-03-31T16:19:18.990719Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715690 ready parts: 1/1 2025-03-31T16:19:18.990750Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715690:0 2025-03-31T16:19:18.990767Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715690:0 2025-03-31T16:19:18.990930Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 8 2025-03-31T16:19:18.991439Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-03-31T16:19:18.991734Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 7 2025-03-31T16:19:18.992294Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-03-31T16:19:18.992329Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-03-31T16:19:18.994250Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-03-31T16:19:19.003453Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7488009013724466020 RawX2: 4503608217307465 } TabletId: 72075186224037895 State: 4 2025-03-31T16:19:19.003528Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037895, state: Offline, at schemeshard: 72057594046644480 2025-03-31T16:19:19.003753Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7488009013724466017 RawX2: 4503608217307464 } TabletId: 72075186224037893 State: 4 2025-03-31T16:19:19.003790Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037893, state: Offline, at schemeshard: 72057594046644480 2025-03-31T16:19:19.003943Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7488009013724465843 RawX2: 4503608217307446 } TabletId: 72075186224037890 State: 4 2025-03-31T16:19:19.003971Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-03-31T16:19:19.004092Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7488009013724466034 RawX2: 4503608217307466 } TabletId: 72075186224037894 State: 4 2025-03-31T16:19:19.004122Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037894, state: Offline, at schemeshard: 72057594046644480 2025-03-31T16:19:19.004222Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7488009013724465840 RawX2: 4503608217307445 } TabletId: 72075186224037891 State: 4 2025-03-31T16:19:19.004248Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-03-31T16:19:19.004351Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7488009013724465524 RawX2: 4503608217307386 } TabletId: 72075186224037889 State: 4 2025-03-31T16:19:19.004376Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-03-31T16:19:19.004547Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:8 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:19:19.004651Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:6 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:19:19.004704Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:19:19.004757Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:7 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:19:19.004805Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:19:19.004877Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:19:19.006377Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 8 ShardOwnerId: 72057594046644480 ShardLocalIdx: 8, at schemeshard: 72057594046644480 2025-03-31T16:19:19.006582Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 6 2025-03-31T16:19:19.006740Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046644480 ShardLocalIdx: 6, at schemeshard: 72057594046644480 2025-03-31T16:19:19.006890Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-03-31T16:19:19.007010Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-03-31T16:19:19.007183Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-03-31T16:19:19.007295Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 7 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2025-03-31T16:19:19.007434Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-03-31T16:19:19.007540Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-03-31T16:19:19.007681Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-03-31T16:19:19.007777Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-03-31T16:19:19.007931Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-03-31T16:19:19.008134Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-03-31T16:19:19.008169Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-03-31T16:19:19.008221Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-03-31T16:19:19.008233Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037894 not found 2025-03-31T16:19:19.008256Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-03-31T16:19:19.009018Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-03-31T16:19:19.009473Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:8 2025-03-31T16:19:19.009500Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:8 tabletId 72075186224037895 2025-03-31T16:19:19.009538Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:6 2025-03-31T16:19:19.009550Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037893 2025-03-31T16:19:19.009572Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-03-31T16:19:19.009581Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-03-31T16:19:19.009600Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:7 2025-03-31T16:19:19.009608Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2025-03-31T16:19:19.009628Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-03-31T16:19:19.009636Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-03-31T16:19:19.009654Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-03-31T16:19:19.009679Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-03-31T16:19:19.009759Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-03-31T16:19:19.009821Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-03-31T16:19:19.010298Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037895 not found 2025-03-31T16:19:19.010329Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037893 not found >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-EvWrite [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline_Init_oo8 [GOOD] >> BsControllerConfig::MergeIntersectingBoxes >> BsControllerConfig::ManyPDisksRestarts >> BsControllerConfig::ExtendByCreatingSeparateBox >> BsControllerConfig::SelectAllGroups ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestUnprotectedReadsThenWriteVisibility [GOOD] Test command err: 2025-03-31T16:19:22.558089Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:700:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:19:22.558475Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:19:22.558580Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:19:22.559859Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:697:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:19:22.560151Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:19:22.560282Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00117c/r3tmp/tmpvXqQqf/pdisk_1.dat 2025-03-31T16:19:23.008646Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:23.209301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:19:23.314603Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:23.314727Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:23.320334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:23.320444Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:23.336833Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:19:23.337476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:23.337872Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:23.617653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:19:23.697919Z node 2 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [2:1257:2378], Recipient [2:1282:2390]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:19:23.702548Z node 2 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [2:1257:2378], Recipient [2:1282:2390]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:19:23.703126Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1282:2390] 2025-03-31T16:19:23.703416Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:19:23.761247Z node 2 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [2:1257:2378], Recipient [2:1282:2390]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:19:23.770181Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:19:23.770884Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:19:23.772735Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:19:23.772826Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:19:23.772884Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:19:23.773302Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:19:23.773627Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:19:23.773713Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:1306:2390] in generation 1 2025-03-31T16:19:23.776986Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:19:23.815835Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:19:23.816058Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:19:23.816208Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:1310:2407] 2025-03-31T16:19:23.816266Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:19:23.816310Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:19:23.816349Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:19:23.816608Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:1282:2390], Recipient [2:1282:2390]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:23.816658Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:23.816930Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:19:23.817027Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:19:23.817093Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:19:23.817172Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:19:23.817231Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T16:19:23.817291Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:19:23.817332Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:19:23.817371Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:19:23.817424Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:19:23.882801Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:1314:2408], Recipient [2:1282:2390]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:23.882862Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:23.882917Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:1266:2774], serverId# [2:1314:2408], sessionId# [0:0:0] 2025-03-31T16:19:23.883288Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:837:2464], Recipient [2:1314:2408] 2025-03-31T16:19:23.883342Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:19:23.883466Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:19:23.883695Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T16:19:23.883782Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:19:23.883883Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:19:23.883953Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:19:23.883998Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T16:19:23.884049Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T16:19:23.884104Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:19:23.884491Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T16:19:23.884536Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T16:19:23.884583Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T16:19:23.884632Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:19:23.884682Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T16:19:23.884719Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T16:19:23.884768Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T16:19:23.884806Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:19:23.884835Z node 2 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T16:19:23.892885Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:19:23.892956Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:19:23.893028Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:19:23.893091Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-31T16:19:23.893196Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:19:23.893650Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [2:1315:2409], Recipient [2:1282:2390]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T16:19:23.893702Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:19:24.256551Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:1348:2418], Recipient [2:1282:2390]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:24.256606Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:24.256646Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:1345:2797], serverId# [2:1348:2418], sessionId# [0:0:0] 2025-03-31T16:19:24.258142Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:1057:2618], Recipient [2:1348:2418] 2025-03-31T16:19:24.258204Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:19:24.258351Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:19:24.258412Z node 2 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] ... 1714:2454], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-31T16:19:26.672039Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1714:2454], 0} finished in read 2025-03-31T16:19:26.672094Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-03-31T16:19:26.672116Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteRead 2025-03-31T16:19:26.672138Z node 2 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2025-03-31T16:19:26.672161Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2025-03-31T16:19:26.672200Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-03-31T16:19:26.672219Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2025-03-31T16:19:26.672246Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037888 has finished 2025-03-31T16:19:26.672282Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-31T16:19:26.672372Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-03-31T16:19:26.673265Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:1714:2454], Recipient [2:1282:2390]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-03-31T16:19:26.673327Z node 2 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2025-03-31T16:19:26.786948Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqpg0banemy8ew352swbztfp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDQ4Y2Q2MmYtYzM5Y2FlZTYtNzQzN2EwLTlmYjViY2Ex, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:19:26.789330Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:1732:2455], Recipient [2:1282:2390]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2000 TxId: 18446744073709551615 } LockTxId: 281474976715662 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2025-03-31T16:19:26.789632Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-31T16:19:26.789708Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2025-03-31T16:19:26.789790Z node 2 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-03-31T16:19:26.789832Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2025-03-31T16:19:26.789870Z node 2 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-31T16:19:26.789904Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-31T16:19:26.789953Z node 2 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2025-03-31T16:19:26.790013Z node 2 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-03-31T16:19:26.790053Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-31T16:19:26.790090Z node 2 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2025-03-31T16:19:26.790116Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2025-03-31T16:19:26.790239Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2000 TxId: 18446744073709551615 } LockTxId: 281474976715662 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-03-31T16:19:26.790510Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715662, counter# 18446744073709551612 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-31T16:19:26.790562Z node 2 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v2000/18446744073709551615 2025-03-31T16:19:26.790605Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[2:1732:2455], 0} after executionsCount# 1 2025-03-31T16:19:26.790654Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1732:2455], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-31T16:19:26.790731Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1732:2455], 0} finished in read 2025-03-31T16:19:26.790791Z node 2 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-03-31T16:19:26.790819Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2025-03-31T16:19:26.790843Z node 2 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2025-03-31T16:19:26.790870Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2025-03-31T16:19:26.790912Z node 2 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-03-31T16:19:26.790932Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-03-31T16:19:26.790970Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2025-03-31T16:19:26.791008Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-31T16:19:26.791128Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-03-31T16:19:26.791900Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:1732:2455], Recipient [2:1282:2390]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-03-31T16:19:26.791976Z node 2 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } } 2025-03-31T16:19:26.925155Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqpg0benaq4g9qk5w74jwbtm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzkyMmQyNDMtNWM1MmViOWMtYTg4Y2IxNzAtYzJlYmFkNDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:19:26.927504Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:1756:2456], Recipient [2:1282:2390]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } LockTxId: 281474976715666 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2025-03-31T16:19:26.927777Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-31T16:19:26.927859Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit CheckRead 2025-03-31T16:19:26.927957Z node 2 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-03-31T16:19:26.927989Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit CheckRead 2025-03-31T16:19:26.928023Z node 2 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-31T16:19:26.928053Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-31T16:19:26.928103Z node 2 :TX_DATASHARD TRACE: Activated operation [0:8] at 72075186224037888 2025-03-31T16:19:26.928149Z node 2 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-03-31T16:19:26.928171Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-31T16:19:26.928194Z node 2 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit ExecuteRead 2025-03-31T16:19:26.928215Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit ExecuteRead 2025-03-31T16:19:26.928336Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } LockTxId: 281474976715666 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-03-31T16:19:26.928618Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715666, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-31T16:19:26.928672Z node 2 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v2500/18446744073709551615 2025-03-31T16:19:26.928714Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[2:1756:2456], 0} after executionsCount# 1 2025-03-31T16:19:26.928758Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1756:2456], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-31T16:19:26.928830Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1756:2456], 0} finished in read 2025-03-31T16:19:26.928928Z node 2 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-03-31T16:19:26.928953Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit ExecuteRead 2025-03-31T16:19:26.928975Z node 2 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit CompletedOperations 2025-03-31T16:19:26.928997Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit CompletedOperations 2025-03-31T16:19:26.929035Z node 2 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-03-31T16:19:26.929054Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit CompletedOperations 2025-03-31T16:19:26.929079Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:8] at 72075186224037888 has finished 2025-03-31T16:19:26.929130Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-31T16:19:26.929247Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-03-31T16:19:26.930257Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:1756:2456], Recipient [2:1282:2390]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-03-31T16:19:26.930312Z node 2 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-03-31T16:19:26.930504Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [2:240:2131], Recipient [2:1282:2390]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715666 LockNode: 1 Status: STATUS_SUBSCRIBED { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } >> DataShardOutOfOrder::TestSnapshotReadPriority [GOOD] >> Cdc::NaN[TopicRunner] [GOOD] >> Cdc::RacyRebootAndSplitWithTxInflight |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithDatabaseShouldSuccess [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail [GOOD] >> SplitPathTests::WithoutDatabaseShouldSuccess [GOOD] |89.5%| [TA] $(B)/ydb/core/client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.5%| [TA] {RESULT} $(B)/ydb/core/client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithoutDatabaseShouldSuccess [GOOD] >> BasicUsage::PropagateSessionClosed [GOOD] >> BasicUsage::ReadMirrored |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithDatabaseShouldSuccess [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_Init_oo8 [GOOD] Test command err: 2025-03-31T16:19:23.686753Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:19:23.688074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:19:23.688139Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:23.696715Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:19:23.697173Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-03-31T16:19:23.697459Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:19:23.708114Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:19:23.754066Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:19:23.754358Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:19:23.755741Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-31T16:19:23.755796Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-31T16:19:23.755841Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-31T16:19:23.756165Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:19:23.756752Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:19:23.756844Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:197:2154] in generation 2 2025-03-31T16:19:23.822826Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:19:23.856666Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-31T16:19:23.856847Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:19:23.856965Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-31T16:19:23.857007Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-31T16:19:23.857042Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-31T16:19:23.857075Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:23.857286Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:23.857332Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:23.857615Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-31T16:19:23.857710Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-31T16:19:23.857762Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:19:23.857799Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:19:23.857838Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-31T16:19:23.857870Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T16:19:23.857903Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T16:19:23.857931Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-31T16:19:23.857967Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:19:23.858072Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:23.858125Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:23.858180Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-31T16:19:23.860995Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-31T16:19:23.861065Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:19:23.861150Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T16:19:23.861308Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-31T16:19:23.861349Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-31T16:19:23.861405Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-31T16:19:23.861447Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:19:23.861493Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-31T16:19:23.861538Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-31T16:19:23.861573Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:19:23.861871Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-31T16:19:23.861936Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-31T16:19:23.861980Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-31T16:19:23.862032Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:19:23.862080Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-31T16:19:23.862107Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-31T16:19:23.862161Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-31T16:19:23.862196Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-31T16:19:23.862223Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-31T16:19:23.879853Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-31T16:19:23.879921Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:19:23.879975Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:19:23.880046Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-31T16:19:23.880129Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-31T16:19:23.880579Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:23.880613Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:23.880656Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-31T16:19:23.880768Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2025-03-31T16:19:23.880797Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:19:23.880895Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2025-03-31T16:19:23.880929Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-03-31T16:19:23.880964Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2025-03-31T16:19:23.880988Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit PlanQueue 2025-03-31T16:19:23.884795Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2025-03-31T16:19:23.884866Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:23.885054Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:23.885092Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:23.885141Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:19:23.885176Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:19:23.885207Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T16:19:23.885247Z node 1 :TX_DATASHARD DEBUG: Found ready operation [2:1] in PlanQueue unit at 9437184 2025-03-31T16:19:23.885286Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit PlanQueue 2025-03-31T16:19:23.885323Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-03-31T16:19:23.885353Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit PlanQueue 2025-03-31T16:19:23.885388Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit LoadTxDetails 2025-03-31T16:19:23.885420Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2025-03-31T16:19:23.885585Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 2:1 keys extracted: 0 2025-03-31T16:19:23.885621Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-03-31T16:19:23.885664Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit LoadTxDetails 2025-03-31T16:19:23.885697Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-31T16:19:23.885720Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-31T16:19:23.885781Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:19:23.885807Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-31T16:19:23.885868Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-31T16:19:23.885901Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-31T16:19:23.885960Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is the new logically complete end at 9437184 2025-03-31T16:19:23.885997Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is the new logically incomplete end at 9437184 2025-03-31T16:19:23.886047Z node 1 :TX_DATASHARD TRACE: Activated operation [2:1] at 9437184 2025-03-31T16:19:23.886098Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-03-31T16:19:23.886121Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1 ... 134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:19:28.758109Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-31T16:19:28.758336Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 6 txid# 116 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 38} 2025-03-31T16:19:28.758364Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:28.758400Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 116 2025-03-31T16:19:28.758516Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 6 txid# 119 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 39} 2025-03-31T16:19:28.758540Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:28.758598Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 119 2025-03-31T16:19:28.758682Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 6 txid# 122 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 40} 2025-03-31T16:19:28.758726Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:28.758745Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 122 2025-03-31T16:19:28.758793Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 6 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-03-31T16:19:28.758807Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:28.758828Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-03-31T16:19:28.758874Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 6 txid# 125 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 41} 2025-03-31T16:19:28.758889Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:28.758914Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 125 2025-03-31T16:19:28.758982Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 6 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-03-31T16:19:28.758999Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:28.759012Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-03-31T16:19:28.759068Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 6 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-03-31T16:19:28.759110Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:28.759129Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-03-31T16:19:28.759178Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 6 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-03-31T16:19:28.759205Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:28.759228Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 2025-03-31T16:19:28.759282Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 6 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2025-03-31T16:19:28.759296Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:28.759322Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2025-03-31T16:19:28.759380Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 6 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2025-03-31T16:19:28.759395Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:28.759407Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2025-03-31T16:19:28.759443Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 6 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-03-31T16:19:28.759467Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:28.759481Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-03-31T16:19:28.759531Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 6 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-03-31T16:19:28.759557Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:28.759572Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-03-31T16:19:28.759627Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 6 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-03-31T16:19:28.759646Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:28.759713Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-03-31T16:19:28.759773Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:19:28.759835Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:149] at 9437184 on unit CompleteOperation 2025-03-31T16:19:28.759870Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 149] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 3 ms 2025-03-31T16:19:28.759919Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2025-03-31T16:19:28.759942Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:28.760046Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-03-31T16:19:28.760076Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:19:28.760098Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:151] at 9437184 on unit CompleteOperation 2025-03-31T16:19:28.760129Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 151] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 3 ms 2025-03-31T16:19:28.760197Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-03-31T16:19:28.760221Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:28.760340Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-03-31T16:19:28.760363Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-03-31T16:19:28.760382Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:19:28.760400Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:152] at 9437184 on unit CompleteOperation 2025-03-31T16:19:28.760434Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 152] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 3 ms 2025-03-31T16:19:28.760460Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-03-31T16:19:28.760474Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:28.760533Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:19:28.760549Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:154] at 9437184 on unit CompleteOperation 2025-03-31T16:19:28.760577Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 154] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 3 ms 2025-03-31T16:19:28.760614Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-03-31T16:19:28.760635Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:28.760745Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 6 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2025-03-31T16:19:28.760783Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:28.760803Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2025-03-31T16:19:28.760904Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 6 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-03-31T16:19:28.760930Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:28.760964Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2025-03-31T16:19:28.761025Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 6 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-03-31T16:19:28.761057Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:28.761081Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2025-03-31T16:19:28.761164Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 6 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-03-31T16:19:28.761200Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:19:28.761222Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 >> OperationMapping::IndexBuildRejected >> OperationMapping::IndexBuildRejected [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-EvWrite [GOOD] Test command err: 2025-03-31T16:19:19.756998Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:19:19.757285Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:19:19.757482Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001171/r3tmp/tmpSfEoMx/pdisk_1.dat 2025-03-31T16:19:20.155905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:19:20.201695Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:20.240300Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:20.240431Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:20.251963Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:20.336229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:19:20.652991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 ===== UPSERT initial rows 2025-03-31T16:19:20.911846Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:830:2682], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:20.911935Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:841:2687], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:20.912007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:20.916603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:19:21.070917Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:844:2690], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:19:21.152077Z node 1 :TX_PROXY ERROR: Actor# [1:904:2731] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:19:21.455270Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpg05pe6gh21h3pj0xscrgk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Zjk5OTAzM2QtY2Y2ZTRlNjUtNmFmNjFkZjctNzNjYmY3ODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:19:21.526540Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpg0685cf7ccew1qdvt43dn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2ZmMjQ1ZGEtMTYxNmQzMmUtYWE5NGE4OTYtY2NjZjc0N2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ===== Begin SELECT 2025-03-31T16:19:21.930633Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqpg06b3f4z44127kqyygtsz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmM4YjRmYzctYjZkNDczNzAtYTg1NjcyOWEtMjJlYWI5NjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ===== UPSERT and commit ... waiting for commit read sets 2025-03-31T16:19:22.022526Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqpg06px02cen97vq9k35ajf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmM4YjRmYzctYjZkNDczNzAtYTg1NjcyOWEtMjJlYWI5NjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... captured readset ... captured readset ===== restarting tablet 2025-03-31T16:19:22.192328Z node 1 :KQP_COMPUTE WARN: SelfId: [1:1033:2772], Table: `/Root/table-1` ([72057594046644480:2:1]), SessionActorId: [1:968:2772]TEvDeliveryProblem was received from tablet: 72075186224037888 ===== Waiting for commit response ===== Last SELECT 2025-03-31T16:19:22.449867Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqpg072dfnwq9hcj9va362he, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzVkYmZjMzMtMjNjZWU2NTEtYjMzNjgyNWYtOGY2MDJlZGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 3 } items { uint32_value: 2 } } 2025-03-31T16:19:26.084084Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:101:2147], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:19:26.084348Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:19:26.084403Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001171/r3tmp/tmphH2rvA/pdisk_1.dat 2025-03-31T16:19:26.388966Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:19:26.418458Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:26.456704Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:26.456833Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:26.468474Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:26.561141Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:19:26.851257Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 ===== UPSERT initial rows 2025-03-31T16:19:27.114559Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:830:2682], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:27.114642Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:841:2687], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:27.114710Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:27.119528Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:19:27.274849Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:844:2690], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:19:27.311850Z node 2 :TX_PROXY ERROR: Actor# [2:904:2731] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:19:27.400467Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpg0br9cehbd951frcgn8b0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmZkY2JkNTUtNDUyOTczMjMtYTVhZDZjNjktOTE3MzJmMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:19:27.505624Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpg0c1y0p8tmmwxyh0tgcnh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODYwNzcxNGItMmJhYzg5YjYtMWE1ZmY1ZDktYzM1ZmNhYWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ===== Begin SELECT 2025-03-31T16:19:27.945541Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqpg0c58br5p2hn65652zjyy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTFlNjU4NTQtY2I2NTM4ZGMtNmJhNDdjZi0xZDVjNGVjNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ===== UPSERT and commit ... waiting for commit read sets 2025-03-31T16:19:28.097478Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqpg0cjva8js2b03kd27e7cf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTFlNjU4NTQtY2I2NTM4ZGMtNmJhNDdjZi0xZDVjNGVjNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... captured readset ... captured readset ===== restarting tablet ===== Waiting for commit response ===== Last SELECT 2025-03-31T16:19:28.520626Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqpg0d0dbngczretvrxb0ep0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjYzMDAwMjUtODA1ODgwMDctMzFkMDUwMzAtNDg3MDIzOTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 3 } items { uint32_value: 2 } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadPriority [GOOD] Test command err: 2025-03-31T16:19:23.465466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:19:23.465644Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:19:23.465778Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001143/r3tmp/tmpZO8ww8/pdisk_1.dat 2025-03-31T16:19:23.819166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:19:23.826968Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2025-03-31T16:19:23.827031Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 step# 500 Status# 16 SEND to# [1:409:2404] Proxy marker# C1 2025-03-31T16:19:23.861219Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:23.901608Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:23.901735Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:23.913143Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:23.986406Z node 1 :TX_COORDINATOR DEBUG: Transaction 1 has been planned 2025-03-31T16:19:23.986542Z node 1 :TX_COORDINATOR DEBUG: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2025-03-31T16:19:23.986803Z node 1 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 1000 in 0.500000s at 0.950000s 2025-03-31T16:19:23.987170Z node 1 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 500, txid# 1 marker# C2 2025-03-31T16:19:23.987231Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 stepId# 500 Status# 17 SEND EvProposeTransactionStatus to# [1:409:2404] Proxy 2025-03-31T16:19:23.989166Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-03-31T16:19:23.989254Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-03-31T16:19:23.989289Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 acknowledged 2025-03-31T16:19:23.989338Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 acknowledged 2025-03-31T16:19:23.994264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:19:24.030824Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2564], Recipient [1:665:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:19:24.031750Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2564], Recipient [1:665:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:19:24.032199Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:665:2570] 2025-03-31T16:19:24.032399Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:19:24.071715Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2564], Recipient [1:665:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:19:24.072566Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:19:24.072687Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:19:24.074296Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:19:24.074378Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:19:24.074431Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:19:24.074769Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:19:24.074917Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:19:24.075005Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:681:2570] in generation 1 2025-03-31T16:19:24.075460Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:19:24.098319Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:19:24.098502Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:19:24.098601Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:683:2580] 2025-03-31T16:19:24.098632Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:19:24.098658Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:19:24.098687Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:19:24.098878Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:665:2570], Recipient [1:665:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:24.098925Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:24.099178Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:19:24.099316Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:19:24.099448Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:19:24.099488Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:19:24.099546Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T16:19:24.099588Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:19:24.099648Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:19:24.099681Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:19:24.099726Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:19:24.099823Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:672:2574], Recipient [1:665:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:24.099856Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:24.099899Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:661:2567], serverId# [1:672:2574], sessionId# [0:0:0] 2025-03-31T16:19:24.100014Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:672:2574] 2025-03-31T16:19:24.100065Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:19:24.100173Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:19:24.100374Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T16:19:24.100436Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:19:24.100550Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:19:24.100601Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:19:24.100636Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T16:19:24.100674Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T16:19:24.100703Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:19:24.100980Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T16:19:24.101024Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T16:19:24.101058Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T16:19:24.101080Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:19:24.101113Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T16:19:24.101132Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T16:19:24.101152Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T16:19:24.101174Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:19:24.101191Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T16:19:24.102252Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:19:24.102300Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:19:24.102332Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:19:24.102372Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-31T16:19:24.102427Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:19:24.104425Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715657 HANDLE EvProposeTransaction marker# C0 2025-03-31T16:19:24.104480Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715657 step# 1000 Status# 16 SEND to# [1:409:2404] Proxy marker# C1 2025-03-31T16:19:24.104812Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:684:2581], Recipient [1:665:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T16:19:24.104855Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:19:24.250825Z node 1 :TX_COORDINATOR DEBUG: Transaction 281474976715657 has been planned 2025-03-31T16:19:24.250935Z node 1 :TX_COORDINATOR DEBUG: Planned transaction 281474976715657 for mediator 72057594046382081 tablet 72057594046644480 2025-03-31T16:19:24.251175Z node 1 :TX_COORDINATOR DEBUG: Planned transaction 281474976715657 for ... 000 2025-03-31T16:19:28.645495Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 4000} 2025-03-31T16:19:28.645589Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 270270977, Sender [1:24:2071], Recipient [1:1376:3052]: {TEvNotifyPlanStep TabletId# 72075186224037888 PlanStep# 4000} 2025-03-31T16:19:28.645634Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvNotifyPlanStep 2025-03-31T16:19:28.645679Z node 1 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 4000 at tablet 72075186224037888 2025-03-31T16:19:28.645749Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:19:28.647007Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 1 SubscriptionId: 2 LatestStep: 4000 2025-03-31T16:19:28.647101Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 1 TimeBarrier# 4000} 2025-03-31T16:19:28.779354Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jqpg0d882wzjaattg3nrvth9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTM4YzZhNWYtZWQ4NWVhNGUtZDA4NDJjZDItMmVjZDJkNzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:19:28.781232Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [1:1439:3091], Recipient [1:1376:3052]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3500 TxId: 18446744073709551615 } LockTxId: 281474976715682 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2025-03-31T16:19:28.781401Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-31T16:19:28.781480Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit CheckRead 2025-03-31T16:19:28.781585Z node 1 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-03-31T16:19:28.781627Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit CheckRead 2025-03-31T16:19:28.781673Z node 1 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-31T16:19:28.781711Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-31T16:19:28.781772Z node 1 :TX_DATASHARD TRACE: Activated operation [0:2] at 72075186224037888 2025-03-31T16:19:28.781814Z node 1 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-03-31T16:19:28.781840Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-31T16:19:28.781866Z node 1 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit ExecuteRead 2025-03-31T16:19:28.781912Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit ExecuteRead 2025-03-31T16:19:28.782046Z node 1 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3500 TxId: 18446744073709551615 } LockTxId: 281474976715682 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-03-31T16:19:28.782325Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715682, counter# 18446744073709551612 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-31T16:19:28.782386Z node 1 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/18446744073709551615 2025-03-31T16:19:28.782441Z node 1 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[1:1439:3091], 0} after executionsCount# 1 2025-03-31T16:19:28.782495Z node 1 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[1:1439:3091], 0} sends rowCount# 5, bytes# 160, quota rows left# 996, quota bytes left# 5242720, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-31T16:19:28.782583Z node 1 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[1:1439:3091], 0} finished in read 2025-03-31T16:19:28.782655Z node 1 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-03-31T16:19:28.782686Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteRead 2025-03-31T16:19:28.782713Z node 1 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2025-03-31T16:19:28.782740Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2025-03-31T16:19:28.782803Z node 1 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-03-31T16:19:28.782827Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2025-03-31T16:19:28.782856Z node 1 :TX_DATASHARD TRACE: Execution plan for [0:2] at 72075186224037888 has finished 2025-03-31T16:19:28.782898Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-31T16:19:28.783002Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-03-31T16:19:28.784112Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [1:1439:3091], Recipient [1:1376:3052]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-03-31T16:19:28.784177Z node 1 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 3 } items { uint32_value: 3 } }, { items { uint32_value: 5 } items { uint32_value: 5 } }, { items { uint32_value: 7 } items { uint32_value: 7 } }, { items { uint32_value: 9 } items { uint32_value: 9 } } 2025-03-31T16:19:28.922815Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE TEvAcquireReadStep 2025-03-31T16:19:28.922927Z node 1 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 4500 in 0.499900s at 4.450000s 2025-03-31T16:19:28.924285Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jqpg0dcq0f3t41mqrad5a9z6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjBiMTQyNTctMmYwMWYzZGYtODMzOTkyZjctZjQ1ZjVmYTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:19:28.926301Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [1:1463:3108], Recipient [1:1376:3052]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715685 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2025-03-31T16:19:28.926522Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-31T16:19:28.926613Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2025-03-31T16:19:28.926736Z node 1 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-03-31T16:19:28.926786Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2025-03-31T16:19:28.926840Z node 1 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-31T16:19:28.926896Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-31T16:19:28.926960Z node 1 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037888 2025-03-31T16:19:28.927021Z node 1 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-03-31T16:19:28.927048Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-31T16:19:28.927072Z node 1 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2025-03-31T16:19:28.927112Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2025-03-31T16:19:28.927264Z node 1 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715685 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-03-31T16:19:28.927611Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715685, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-31T16:19:28.927671Z node 1 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v4000/18446744073709551615 2025-03-31T16:19:28.927719Z node 1 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[1:1463:3108], 0} after executionsCount# 1 2025-03-31T16:19:28.927787Z node 1 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[1:1463:3108], 0} sends rowCount# 6, bytes# 192, quota rows left# 995, quota bytes left# 5242688, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-31T16:19:28.927874Z node 1 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[1:1463:3108], 0} finished in read 2025-03-31T16:19:28.927973Z node 1 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-03-31T16:19:28.928000Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2025-03-31T16:19:28.928026Z node 1 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2025-03-31T16:19:28.928051Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2025-03-31T16:19:28.928101Z node 1 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-03-31T16:19:28.928123Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2025-03-31T16:19:28.928148Z node 1 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037888 has finished 2025-03-31T16:19:28.928189Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-31T16:19:28.928297Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-03-31T16:19:28.928563Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [1:61:2108], Recipient [1:1376:3052]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715685 LockNode: 1 Status: STATUS_SUBSCRIBED 2025-03-31T16:19:28.929440Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [1:1463:3108], Recipient [1:1376:3052]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-03-31T16:19:28.929527Z node 1 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 3 } items { uint32_value: 3 } }, { items { uint32_value: 5 } items { uint32_value: 5 } }, { items { uint32_value: 7 } items { uint32_value: 7 } }, { items { uint32_value: 9 } items { uint32_value: 9 } }, { items { uint32_value: 11 } items { uint32_value: 11 } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail [GOOD] Test command err: 2025-03-31T16:19:18.576741Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:19:18.576970Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:19:18.577124Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001180/r3tmp/tmpEgbGDN/pdisk_1.dat 2025-03-31T16:19:18.945624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:19:18.976441Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:19.015105Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:19.015225Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:19.026609Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:19.105433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:19:19.137160Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:19:19.137897Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:19:19.138168Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:19:19.138366Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:19:19.174350Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:19:19.174906Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:19:19.174996Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:19:19.176279Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:19:19.176341Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:19:19.176386Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:19:19.176702Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:19:19.176801Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:19:19.176855Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:19:19.187526Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:19:19.217938Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:19:19.218111Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:19:19.218229Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:19:19.218270Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:19:19.218320Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:19:19.218354Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:19:19.218548Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:19.218598Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:19.218833Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:19:19.218927Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:19:19.219026Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:19:19.219106Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:19:19.219152Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T16:19:19.219191Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:19:19.219224Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:19:19.219254Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:19:19.219302Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:19:19.219397Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:19.219433Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:19.219475Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:19:19.219589Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-31T16:19:19.219628Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:19:19.219720Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:19:19.219928Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T16:19:19.219999Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:19:19.220088Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:19:19.220134Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:19:19.220210Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T16:19:19.220251Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T16:19:19.220284Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:19:19.220577Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T16:19:19.220638Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T16:19:19.220676Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T16:19:19.220705Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:19:19.220746Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T16:19:19.220791Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T16:19:19.220844Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T16:19:19.220878Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:19:19.220900Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T16:19:19.222604Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T16:19:19.222660Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:19:19.233329Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:19:19.233388Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:19:19.233420Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:19:19.233462Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-31T16:19:19.233517Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:19:19.382020Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:19.382070Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:19.382103Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T16:19:19.382491Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-31T16:19:19.382533Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:19:19.382643Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:19:19.382692Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-31T16:19:19.382732Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-31T16:19:19.382762Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-31T16:19:19.391326Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:19:19.391404Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:19:19.392104Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:19.392150Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:19.392200Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:19:1 ... qpg0dhd6vzg9syw243qspn0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjhjY2YxZmMtMzc0NWI2MzgtZGE1MzI2OWEtMjMzZWY0Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (return (lambda '($1) $1)) ) 2025-03-31T16:19:29.027836Z node 2 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2025-03-31T16:19:29.028069Z node 2 :KQP_EXECUTER DEBUG: [ShardsResolver] TxId: 281474976715670. Shard resolve complete, resolved shards: 1 2025-03-31T16:19:29.028144Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1274:2994] TxId: 281474976715670. Ctx: { TraceId: 01jqpg0dhd6vzg9syw243qspn0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjhjY2YxZmMtMzc0NWI2MzgtZGE1MzI2OWEtMjMzZWY0Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolved, success: 1, failed: 0 2025-03-31T16:19:29.028215Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1274:2994] TxId: 281474976715670. Ctx: { TraceId: 01jqpg0dhd6vzg9syw243qspn0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjhjY2YxZmMtMzc0NWI2MzgtZGE1MzI2OWEtMjMzZWY0Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards on nodes: node 2: [72075186224037888] 2025-03-31T16:19:29.028288Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jqpg0dhd6vzg9syw243qspn0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjhjY2YxZmMtMzc0NWI2MzgtZGE1MzI2OWEtMjMzZWY0Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:19:29.028340Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715670. Ctx: { TraceId: 01jqpg0dhd6vzg9syw243qspn0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjhjY2YxZmMtMzc0NWI2MzgtZGE1MzI2OWEtMjMzZWY0Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: true, 1 scan tasks on 1 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-03-31T16:19:29.028628Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715670. Ctx: { TraceId: 01jqpg0dhd6vzg9syw243qspn0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjhjY2YxZmMtMzc0NWI2MzgtZGE1MzI2OWEtMjMzZWY0Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Collect channels updates for task: 1 at actor [2:1278:2994] 2025-03-31T16:19:29.028699Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715670. Ctx: { TraceId: 01jqpg0dhd6vzg9syw243qspn0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjhjY2YxZmMtMzc0NWI2MzgtZGE1MzI2OWEtMjMzZWY0Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Sending channels info to compute actor: [2:1278:2994], channels: 1 2025-03-31T16:19:29.028758Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1274:2994] TxId: 281474976715670. Ctx: { TraceId: 01jqpg0dhd6vzg9syw243qspn0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjhjY2YxZmMtMzc0NWI2MzgtZGE1MzI2OWEtMjMzZWY0Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-03-31T16:19:29.028813Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1274:2994] TxId: 281474976715670. Ctx: { TraceId: 01jqpg0dhd6vzg9syw243qspn0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjhjY2YxZmMtMzc0NWI2MzgtZGE1MzI2OWEtMjMzZWY0Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1278:2994], 2025-03-31T16:19:29.028887Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1274:2994] TxId: 281474976715670. Ctx: { TraceId: 01jqpg0dhd6vzg9syw243qspn0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjhjY2YxZmMtMzc0NWI2MzgtZGE1MzI2OWEtMjMzZWY0Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1278:2994], 2025-03-31T16:19:29.028942Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1274:2994] TxId: 281474976715670. Ctx: { TraceId: 01jqpg0dhd6vzg9syw243qspn0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjhjY2YxZmMtMzc0NWI2MzgtZGE1MzI2OWEtMjMzZWY0Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-03-31T16:19:29.029743Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1274:2994] TxId: 281474976715670. Ctx: { TraceId: 01jqpg0dhd6vzg9syw243qspn0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjhjY2YxZmMtMzc0NWI2MzgtZGE1MzI2OWEtMjMzZWY0Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1278:2994], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-03-31T16:19:29.029813Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1274:2994] TxId: 281474976715670. Ctx: { TraceId: 01jqpg0dhd6vzg9syw243qspn0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjhjY2YxZmMtMzc0NWI2MzgtZGE1MzI2OWEtMjMzZWY0Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1278:2994], 2025-03-31T16:19:29.029880Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1274:2994] TxId: 281474976715670. Ctx: { TraceId: 01jqpg0dhd6vzg9syw243qspn0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjhjY2YxZmMtMzc0NWI2MzgtZGE1MzI2OWEtMjMzZWY0Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1278:2994], 2025-03-31T16:19:29.030166Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:1280:2994], Recipient [2:1204:2950]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false KeysSize: 1 2025-03-31T16:19:29.030341Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-31T16:19:29.030399Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v4003/281474976715667 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v2000/18446744073709551615 ImmediateWriteEdgeReplied# v4003/18446744073709551615 2025-03-31T16:19:29.030436Z node 2 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v5000/18446744073709551615 2025-03-31T16:19:29.030495Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2025-03-31T16:19:29.030577Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-03-31T16:19:29.030614Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2025-03-31T16:19:29.030651Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-31T16:19:29.030702Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-31T16:19:29.030752Z node 2 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037888 2025-03-31T16:19:29.030791Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-03-31T16:19:29.030813Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-31T16:19:29.030833Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2025-03-31T16:19:29.030852Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2025-03-31T16:19:29.030947Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2025-03-31T16:19:29.031169Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[2:1280:2994], 0} after executionsCount# 1 2025-03-31T16:19:29.031225Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1280:2994], 0} sends rowCount# 1, bytes# 32, quota rows left# 32766, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-31T16:19:29.031310Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1280:2994], 0} finished in read 2025-03-31T16:19:29.031362Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-03-31T16:19:29.031388Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2025-03-31T16:19:29.031426Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2025-03-31T16:19:29.031451Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2025-03-31T16:19:29.031498Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-03-31T16:19:29.031517Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2025-03-31T16:19:29.031560Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037888 has finished 2025-03-31T16:19:29.031600Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-31T16:19:29.032247Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:1280:2994], Recipient [2:1204:2950]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-03-31T16:19:29.032309Z node 2 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-03-31T16:19:29.060497Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1274:2994] TxId: 281474976715670. Ctx: { TraceId: 01jqpg0dhd6vzg9syw243qspn0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjhjY2YxZmMtMzc0NWI2MzgtZGE1MzI2OWEtMjMzZWY0Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1278:2994], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 1043 DurationUs: 28000 Tasks { TaskId: 1 CpuTimeUs: 278 FinishTimeMs: 1743437969059 OutputRows: 1 OutputBytes: 5 Tables { TablePath: "/Root/table-1" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ResultRows: 1 ResultBytes: 5 ComputeCpuTimeUs: 105 BuildCpuTimeUs: 173 HostName: "ghrun-tuhoqst4xe" NodeId: 2 StartTimeMs: 1743437969031 CreateTimeMs: 1743437969029 } MaxMemoryUsage: 1048576 } 2025-03-31T16:19:29.060636Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715670. Ctx: { TraceId: 01jqpg0dhd6vzg9syw243qspn0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjhjY2YxZmMtMzc0NWI2MzgtZGE1MzI2OWEtMjMzZWY0Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1278:2994] 2025-03-31T16:19:29.060801Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1274:2994] TxId: 281474976715670. Ctx: { TraceId: 01jqpg0dhd6vzg9syw243qspn0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjhjY2YxZmMtMzc0NWI2MzgtZGE1MzI2OWEtMjMzZWY0Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-31T16:19:29.060865Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1274:2994] TxId: 281474976715670. Ctx: { TraceId: 01jqpg0dhd6vzg9syw243qspn0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjhjY2YxZmMtMzc0NWI2MzgtZGE1MzI2OWEtMjMzZWY0Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.001043s ReadRows: 1 ReadBytes: 8 ru: 1 rate limiter was not found force flag: 1 { items { uint32_value: 7 } items { uint32_value: 4 } } |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildRejected [GOOD] >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed+EvWrite [GOOD] >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-EvWrite >> DataShardTxOrder::DelayData [GOOD] >> DataShardOutOfOrder::TestOutOfOrderRestartLocksReorderedWithoutBarrier [GOOD] >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit+UseSink >> OperationMapping::IndexBuildSuccess [GOOD] >> BsControllerConfig::SelectAllGroups [GOOD] |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildSuccess [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::DelayData [GOOD] Test command err: 2025-03-31T16:19:19.724476Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:19:19.725832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:19:19.725880Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:19.736547Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:19:19.737045Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-03-31T16:19:19.737307Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:19:19.748468Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:19:19.807692Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:19:19.808061Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:19:19.809687Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-31T16:19:19.809769Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-31T16:19:19.809836Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-31T16:19:19.810244Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:19:19.810934Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:19:19.811023Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:197:2154] in generation 2 2025-03-31T16:19:19.873467Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:19:19.903850Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-31T16:19:19.904042Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:19:19.904138Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-31T16:19:19.904170Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-31T16:19:19.904204Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-31T16:19:19.904237Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:19.904469Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:19.904516Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:19.904806Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-31T16:19:19.904897Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-31T16:19:19.904947Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:19:19.904980Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:19:19.905019Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-31T16:19:19.905050Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T16:19:19.905080Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T16:19:19.905109Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-31T16:19:19.905141Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:19:19.905230Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:19.905436Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:19.905492Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-31T16:19:19.908501Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\002\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-31T16:19:19.908560Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:19:19.908642Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T16:19:19.908823Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-31T16:19:19.908867Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-31T16:19:19.908915Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-31T16:19:19.908956Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:19:19.909012Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-31T16:19:19.909057Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-31T16:19:19.909087Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:19:19.909410Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-31T16:19:19.909470Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-31T16:19:19.909505Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-31T16:19:19.909534Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:19:19.909572Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-31T16:19:19.909598Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-31T16:19:19.909631Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-31T16:19:19.909674Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-31T16:19:19.909699Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-31T16:19:19.924076Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-31T16:19:19.924143Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:19:19.924176Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:19:19.924234Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-31T16:19:19.924295Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-31T16:19:19.924816Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:19.924871Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:19.924916Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-31T16:19:19.925041Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-31T16:19:19.925071Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:19:19.925194Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-31T16:19:19.925230Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:19:19.925262Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-31T16:19:19.925308Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-31T16:19:19.929397Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-31T16:19:19.929474Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:19.929694Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:19.929729Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:19.929782Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:19:19.929817Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:19:19.929848Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T16:19:19.929886Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-31T16:19:19.929922Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-31T16:19:19.929961Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:19:19.929994Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-31T16:19:19.930025Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-31T16:19:19.930062Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-31T16:19:19.930241Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-31T16:19:19.930296Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:19:19.930334Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-31T16:19:19.930358Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-31T16:19:19.930380Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-31T16:19:19.930456Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:19:19.930480Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-31T16:19:19.930508Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-31T16:19:19.930536Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-31T16:19:19.930599Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-31T16:19:19.930631Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-31T16:19:19.930660Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-31T16:19:19.930695Z node 1 :TX_D ... e 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit PrepareDataTxInRS 2025-03-31T16:19:30.919181Z node 1 :TX_DATASHARD TRACE: Add [1000005:506] at 9437184 to execution unit LoadAndWaitInRS 2025-03-31T16:19:30.919206Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:506] at 9437184 on unit LoadAndWaitInRS 2025-03-31T16:19:30.919229Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:506] at 9437184 is Executed 2025-03-31T16:19:30.919262Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit LoadAndWaitInRS 2025-03-31T16:19:30.919291Z node 1 :TX_DATASHARD TRACE: Add [1000005:506] at 9437184 to execution unit ExecuteDataTx 2025-03-31T16:19:30.919309Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:506] at 9437184 on unit ExecuteDataTx 2025-03-31T16:19:30.919752Z node 1 :TX_DATASHARD TRACE: Executed operation [1000005:506] at tablet 9437184 with status COMPLETE 2025-03-31T16:19:30.919832Z node 1 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:506] at 9437184: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 1, SelectRowBytes: 81, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-31T16:19:30.919901Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:506] at 9437184 is Executed 2025-03-31T16:19:30.919925Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit ExecuteDataTx 2025-03-31T16:19:30.919949Z node 1 :TX_DATASHARD TRACE: Add [1000005:506] at 9437184 to execution unit CompleteOperation 2025-03-31T16:19:30.919972Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:506] at 9437184 on unit CompleteOperation 2025-03-31T16:19:30.920125Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:506] at 9437184 is DelayComplete 2025-03-31T16:19:30.920183Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit CompleteOperation 2025-03-31T16:19:30.920220Z node 1 :TX_DATASHARD TRACE: Add [1000005:506] at 9437184 to execution unit CompletedOperations 2025-03-31T16:19:30.920250Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:506] at 9437184 on unit CompletedOperations 2025-03-31T16:19:30.920283Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:506] at 9437184 is Executed 2025-03-31T16:19:30.920315Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit CompletedOperations 2025-03-31T16:19:30.920344Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000005:506] at 9437184 has finished 2025-03-31T16:19:30.920377Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:19:30.920405Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T16:19:30.920464Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000005:507] in PlanQueue unit at 9437184 2025-03-31T16:19:30.920788Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:234:2227], Recipient [1:234:2227]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:30.920829Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:30.920877Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:19:30.920905Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:19:30.920936Z node 1 :TX_DATASHARD DEBUG: Return cached ready operation [1000005:507] at 9437184 2025-03-31T16:19:30.920979Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit PlanQueue 2025-03-31T16:19:30.921006Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-03-31T16:19:30.921029Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit PlanQueue 2025-03-31T16:19:30.921052Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit LoadTxDetails 2025-03-31T16:19:30.921082Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit LoadTxDetails 2025-03-31T16:19:30.921614Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000005:507 keys extracted: 1 2025-03-31T16:19:30.921654Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-03-31T16:19:30.921691Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit LoadTxDetails 2025-03-31T16:19:30.921717Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit FinalizeDataTxPlan 2025-03-31T16:19:30.921754Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit FinalizeDataTxPlan 2025-03-31T16:19:30.921787Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-03-31T16:19:30.921820Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit FinalizeDataTxPlan 2025-03-31T16:19:30.921842Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-31T16:19:30.921863Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit BuildAndWaitDependencies 2025-03-31T16:19:30.921909Z node 1 :TX_DATASHARD TRACE: Operation [1000005:507] is the new logically complete end at 9437184 2025-03-31T16:19:30.921934Z node 1 :TX_DATASHARD TRACE: Operation [1000005:507] is the new logically incomplete end at 9437184 2025-03-31T16:19:30.921957Z node 1 :TX_DATASHARD TRACE: Activated operation [1000005:507] at 9437184 2025-03-31T16:19:30.921985Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-03-31T16:19:30.922004Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit BuildAndWaitDependencies 2025-03-31T16:19:30.922032Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit BuildDataTxOutRS 2025-03-31T16:19:30.922064Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit BuildDataTxOutRS 2025-03-31T16:19:30.922115Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-03-31T16:19:30.922139Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit BuildDataTxOutRS 2025-03-31T16:19:30.922158Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit StoreAndSendOutRS 2025-03-31T16:19:30.922177Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit StoreAndSendOutRS 2025-03-31T16:19:30.922256Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-03-31T16:19:30.922279Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit StoreAndSendOutRS 2025-03-31T16:19:30.922297Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit PrepareDataTxInRS 2025-03-31T16:19:30.922316Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit PrepareDataTxInRS 2025-03-31T16:19:30.922344Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-03-31T16:19:30.922367Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit PrepareDataTxInRS 2025-03-31T16:19:30.922385Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit LoadAndWaitInRS 2025-03-31T16:19:30.922404Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit LoadAndWaitInRS 2025-03-31T16:19:30.922424Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-03-31T16:19:30.922443Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit LoadAndWaitInRS 2025-03-31T16:19:30.922462Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit ExecuteDataTx 2025-03-31T16:19:30.922482Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit ExecuteDataTx 2025-03-31T16:19:30.922836Z node 1 :TX_DATASHARD TRACE: Executed operation [1000005:507] at tablet 9437184 with status COMPLETE 2025-03-31T16:19:30.922890Z node 1 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:507] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 11, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-31T16:19:30.922960Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:19:30.922989Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit ExecuteDataTx 2025-03-31T16:19:30.923024Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit CompleteOperation 2025-03-31T16:19:30.923057Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit CompleteOperation 2025-03-31T16:19:30.923215Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is DelayComplete 2025-03-31T16:19:30.923283Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit CompleteOperation 2025-03-31T16:19:30.923312Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit CompletedOperations 2025-03-31T16:19:30.923336Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit CompletedOperations 2025-03-31T16:19:30.923366Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-03-31T16:19:30.923388Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit CompletedOperations 2025-03-31T16:19:30.923411Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000005:507] at 9437184 has finished 2025-03-31T16:19:30.923437Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:19:30.923477Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T16:19:30.923508Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T16:19:30.923548Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T16:19:30.937970Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000005 txid# 506 txid# 507} 2025-03-31T16:19:30.938036Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000005} 2025-03-31T16:19:30.938099Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:19:30.938146Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:506] at 9437184 on unit CompleteOperation 2025-03-31T16:19:30.938221Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 506] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:19:30.938267Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:30.938421Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:19:30.938520Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:507] at 9437184 on unit CompleteOperation 2025-03-31T16:19:30.938574Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 507] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:19:30.938606Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::SelectAllGroups [GOOD] Test command err: 2025-03-31T16:19:29.444906Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-03-31T16:19:29.458829Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-03-31T16:19:29.462304Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-03-31T16:19:29.465132Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:19:29.466319Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-03-31T16:19:29.466983Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-03-31T16:19:29.467680Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-03-31T16:19:29.468103Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-03-31T16:19:29.481650Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-03-31T16:19:29.481900Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-03-31T16:19:29.483720Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-03-31T16:19:29.483937Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-31T16:19:29.484067Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-31T16:19:29.484145Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-03-31T16:19:29.691523Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.149626s 2025-03-31T16:19:29.691684Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.149837s |89.5%| [TA] $(B)/ydb/core/grpc_services/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.6%| [TA] {RESULT} $(B)/ydb/core/grpc_services/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardOutOfOrder::TestImmediateQueueThenSplit+UseSink [GOOD] >> DataShardOutOfOrder::TestImmediateQueueThenSplit-UseSink >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOkNewApi >> TKeyValueTest::TestRewriteThenLastValue >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorks >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorks >> TKeyValueTest::TestInlineWriteReadWithRestartsWithNotCorrectUTF8NewApi >> TKeyValueTest::TestWriteReadPatchRead >> TKeyValueTest::TestCleanUpDataOnEmptyTablet >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOk >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOk >> KeyValueReadStorage::ReadOk [GOOD] >> KeyValueReadStorage::ReadNotWholeBlobOk [GOOD] >> KeyValueReadStorage::ReadOneItemError [GOOD] >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOk >> TKeyValueTest::TestWriteTrimWithRestartsThenResponseOk >> TKeyValueTest::TestRenameWorks >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorks >> TKeyValueTest::TestWriteReadPatchRead [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadOneItemError [GOOD] Test command err: 2025-03-31T16:19:33.192711Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-03-31T16:19:33.195164Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2025-03-31T16:19:33.209947Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-03-31T16:19:33.210033Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2025-03-31T16:19:33.216394Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-03-31T16:19:33.216499Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV317@keyvalue_storage_read_request.cpp:310} Unexpected EvGetResult. KeyValue# 1 Status# OK Id# [1:2:3:2:0:1:0] ResponseStatus# ERROR Deadline# 586524-01-19T08:01:49.551615Z Now# 1970-01-01T00:00:00.000000Z SentAt# 1970-01-01T00:00:00.000000Z GotAt# 2025-03-31T16:19:33.216217Z ErrorReason# >> YdbLogStore::LogTable [GOOD] >> YdbMonitoring::SelfCheck >> DataShardTxOrder::ZigZag_oo [GOOD] >> Cdc::DropColumn [GOOD] >> Cdc::DropIndex >> KeyValueReadStorage::ReadRangeOk1Key [GOOD] >> KeyValueReadStorage::ReadRangeOk [GOOD] >> KeyValueReadStorage::ReadRangeNoData [GOOD] >> Cdc::SplitTopicPartition_TopicAutoPartitioning [GOOD] >> Cdc::ShouldDeliverChangesOnSplitMerge ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadRangeNoData [GOOD] Test command err: 2025-03-31T16:19:35.015693Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-03-31T16:19:35.017852Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2025-03-31T16:19:35.023193Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 2 ErrorReason# ReadRequestCookie# 0 2025-03-31T16:19:35.023247Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2025-03-31T16:19:35.028236Z 1 00h00m00.000000s :KEYVALUE INFO: {KV320@keyvalue_storage_read_request.cpp:122} Inline read request KeyValue# 1 Status# OK 2025-03-31T16:19:35.028279Z 1 00h00m00.000000s :KEYVALUE DEBUG: {KV322@keyvalue_storage_read_request.cpp:134} Expected OK or UNKNOWN and given OK readCount# 0 2025-03-31T16:19:35.028320Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag_oo [GOOD] Test command err: 2025-03-31T16:19:15.937564Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:19:15.938921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:19:15.938978Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:15.948133Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:19:15.948629Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-03-31T16:19:15.949012Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:19:15.960502Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:19:16.003783Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:19:16.004151Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:19:16.005912Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-31T16:19:16.006009Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-31T16:19:16.006067Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-31T16:19:16.006441Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:19:16.007122Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:19:16.007206Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:197:2154] in generation 2 2025-03-31T16:19:16.077472Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:19:16.119745Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-31T16:19:16.119996Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:19:16.120112Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-31T16:19:16.120160Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-31T16:19:16.120194Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-31T16:19:16.120226Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:16.120421Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:16.120462Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:16.120716Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-31T16:19:16.120800Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-31T16:19:16.120844Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:19:16.120877Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:19:16.120916Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-31T16:19:16.120945Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T16:19:16.120975Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T16:19:16.121029Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-31T16:19:16.121056Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:19:16.121136Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:16.121175Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:16.121221Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-31T16:19:16.123295Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\004\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-31T16:19:16.123360Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:19:16.123444Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T16:19:16.123601Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-31T16:19:16.123657Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-31T16:19:16.123716Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-31T16:19:16.123767Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:19:16.123813Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-31T16:19:16.123865Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-31T16:19:16.123899Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:19:16.124234Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-31T16:19:16.124276Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-31T16:19:16.124306Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-31T16:19:16.124334Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:19:16.124377Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-31T16:19:16.124403Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-31T16:19:16.124431Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-31T16:19:16.124480Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-31T16:19:16.124527Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-31T16:19:16.137106Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-31T16:19:16.137172Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:19:16.137209Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:19:16.137266Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-31T16:19:16.137336Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-31T16:19:16.137770Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:16.137822Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:16.137863Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-31T16:19:16.137972Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-31T16:19:16.137997Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:19:16.138101Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-31T16:19:16.138131Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:19:16.138163Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-31T16:19:16.138190Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-31T16:19:16.141318Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-31T16:19:16.141403Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:16.141600Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:16.141630Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:16.141677Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:19:16.141709Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:19:16.141739Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T16:19:16.141771Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-31T16:19:16.141800Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-31T16:19:16.141844Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:19:16.141873Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-31T16:19:16.141901Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-31T16:19:16.141932Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-31T16:19:16.142064Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-31T16:19:16.142104Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:19:16.142131Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-31T16:19:16.142148Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-31T16:19:16.142164Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-31T16:19:16.142218Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:19:16.142233Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-31T16:19:16.142259Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-31T16:19:16.142290Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-31T16:19:16.142337Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-31T16:19:16.142367Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-31T16:19:16.142404Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-31T16:19:16.142432Z node 1 :TX_DATA ... aitInRS 2025-03-31T16:19:34.498028Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-03-31T16:19:34.498048Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadAndWaitInRS 2025-03-31T16:19:34.498071Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit ExecuteDataTx 2025-03-31T16:19:34.498096Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit ExecuteDataTx 2025-03-31T16:19:34.498587Z node 6 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437185 with status COMPLETE 2025-03-31T16:19:34.498665Z node 6 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437185: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-31T16:19:34.498746Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-03-31T16:19:34.498773Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit ExecuteDataTx 2025-03-31T16:19:34.498799Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompleteOperation 2025-03-31T16:19:34.498824Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompleteOperation 2025-03-31T16:19:34.499053Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is DelayComplete 2025-03-31T16:19:34.499108Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompleteOperation 2025-03-31T16:19:34.499150Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompletedOperations 2025-03-31T16:19:34.499187Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompletedOperations 2025-03-31T16:19:34.499223Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-03-31T16:19:34.499247Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompletedOperations 2025-03-31T16:19:34.499277Z node 6 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437185 has finished 2025-03-31T16:19:34.499320Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:19:34.499358Z node 6 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-03-31T16:19:34.499406Z node 6 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-03-31T16:19:34.499450Z node 6 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-03-31T16:19:34.499702Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [6:234:2227], Recipient [6:234:2227]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:34.499743Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:34.499792Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:19:34.499823Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:19:34.499865Z node 6 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T16:19:34.499895Z node 6 :TX_DATASHARD DEBUG: Found ready operation [1000016:45] in PlanQueue unit at 9437184 2025-03-31T16:19:34.499922Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit PlanQueue 2025-03-31T16:19:34.499949Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-03-31T16:19:34.499973Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit PlanQueue 2025-03-31T16:19:34.499999Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit LoadTxDetails 2025-03-31T16:19:34.500028Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit LoadTxDetails 2025-03-31T16:19:34.500687Z node 6 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000016:45 keys extracted: 2 2025-03-31T16:19:34.500731Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-03-31T16:19:34.500761Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadTxDetails 2025-03-31T16:19:34.500787Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit FinalizeDataTxPlan 2025-03-31T16:19:34.500813Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit FinalizeDataTxPlan 2025-03-31T16:19:34.500849Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-03-31T16:19:34.500871Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit FinalizeDataTxPlan 2025-03-31T16:19:34.500890Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-31T16:19:34.500914Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit BuildAndWaitDependencies 2025-03-31T16:19:34.500977Z node 6 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically complete end at 9437184 2025-03-31T16:19:34.501009Z node 6 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically incomplete end at 9437184 2025-03-31T16:19:34.501037Z node 6 :TX_DATASHARD TRACE: Activated operation [1000016:45] at 9437184 2025-03-31T16:19:34.501077Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-03-31T16:19:34.501100Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildAndWaitDependencies 2025-03-31T16:19:34.501122Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit BuildDataTxOutRS 2025-03-31T16:19:34.501147Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit BuildDataTxOutRS 2025-03-31T16:19:34.501190Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-03-31T16:19:34.501211Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildDataTxOutRS 2025-03-31T16:19:34.501233Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit StoreAndSendOutRS 2025-03-31T16:19:34.501255Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit StoreAndSendOutRS 2025-03-31T16:19:34.501280Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-03-31T16:19:34.501304Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit StoreAndSendOutRS 2025-03-31T16:19:34.501326Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit PrepareDataTxInRS 2025-03-31T16:19:34.501349Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit PrepareDataTxInRS 2025-03-31T16:19:34.501376Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-03-31T16:19:34.501398Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit PrepareDataTxInRS 2025-03-31T16:19:34.501421Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit LoadAndWaitInRS 2025-03-31T16:19:34.501444Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit LoadAndWaitInRS 2025-03-31T16:19:34.501469Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-03-31T16:19:34.501492Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadAndWaitInRS 2025-03-31T16:19:34.501514Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit ExecuteDataTx 2025-03-31T16:19:34.501542Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit ExecuteDataTx 2025-03-31T16:19:34.501894Z node 6 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437184 with status COMPLETE 2025-03-31T16:19:34.501947Z node 6 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437184: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-31T16:19:34.501994Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-03-31T16:19:34.502019Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit ExecuteDataTx 2025-03-31T16:19:34.502041Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompleteOperation 2025-03-31T16:19:34.502065Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompleteOperation 2025-03-31T16:19:34.502260Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is DelayComplete 2025-03-31T16:19:34.502289Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompleteOperation 2025-03-31T16:19:34.502317Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompletedOperations 2025-03-31T16:19:34.502347Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompletedOperations 2025-03-31T16:19:34.502382Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-03-31T16:19:34.502403Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompletedOperations 2025-03-31T16:19:34.502427Z node 6 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437184 has finished 2025-03-31T16:19:34.502456Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:19:34.502480Z node 6 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T16:19:34.502508Z node 6 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T16:19:34.502536Z node 6 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T16:19:34.515876Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000016 txid# 45} 2025-03-31T16:19:34.515953Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000016} 2025-03-31T16:19:34.516019Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-03-31T16:19:34.516075Z node 6 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437185 on unit CompleteOperation 2025-03-31T16:19:34.516148Z node 6 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437185 at tablet 9437185 send result to client [6:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:19:34.516195Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-03-31T16:19:34.516439Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000016 txid# 45} 2025-03-31T16:19:34.516474Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000016} 2025-03-31T16:19:34.516516Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:19:34.516541Z node 6 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437184 on unit CompleteOperation 2025-03-31T16:19:34.516582Z node 6 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437184 at tablet 9437184 send result to client [6:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:19:34.516604Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 >> TGroupMapperTest::ReassignGroupTest3dc [GOOD] >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit+UseSink [GOOD] >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-EvWrite [GOOD] >> TKeyValueTest::TestWrite200KDeleteThenResponseError |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::ReassignGroupTest3dc [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-EvWrite [GOOD] Test command err: 2025-03-31T16:19:27.973427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:19:27.973575Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:19:27.973690Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001102/r3tmp/tmptni2Ck/pdisk_1.dat 2025-03-31T16:19:28.325633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:19:28.369545Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:28.411771Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:28.411934Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:28.423387Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:28.505445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:19:28.540426Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:19:28.541359Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:19:28.541758Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:19:28.541968Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:19:28.584300Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:19:28.585034Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:19:28.585178Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:19:28.586670Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:19:28.586743Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:19:28.586796Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:19:28.587226Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:19:28.587385Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:19:28.587502Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:19:28.598235Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:19:28.620270Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:19:28.620505Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:19:28.620668Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:19:28.620708Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:19:28.620746Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:19:28.620784Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:19:28.621039Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:28.621094Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:28.621432Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:19:28.621544Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:19:28.621695Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:19:28.621759Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:19:28.621812Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T16:19:28.621857Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:19:28.621890Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:19:28.621928Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:19:28.621977Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:19:28.622085Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:28.622121Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:28.622169Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:19:28.622268Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-31T16:19:28.622312Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:19:28.622438Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:19:28.622749Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T16:19:28.622819Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:19:28.622974Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:19:28.623040Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:19:28.623101Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T16:19:28.623144Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T16:19:28.623177Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:19:28.623488Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T16:19:28.623547Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T16:19:28.623592Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T16:19:28.623629Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:19:28.623680Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T16:19:28.623709Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T16:19:28.623748Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T16:19:28.623787Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:19:28.623812Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T16:19:28.625744Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T16:19:28.625801Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:19:28.636492Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:19:28.636554Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:19:28.636588Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:19:28.636647Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-31T16:19:28.636701Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:19:28.789677Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:28.789748Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:28.789789Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T16:19:28.790192Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-31T16:19:28.790238Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:19:28.790350Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:19:28.790392Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-31T16:19:28.790437Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-31T16:19:28.790471Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-31T16:19:28.794806Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:19:28.794878Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:19:28.795572Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:28.795617Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:28.795692Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:19:2 ... ], 2025-03-31T16:19:35.958323Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2824] TxId: 281474976715665. Ctx: { TraceId: 01jqpg0m3647h5f8b3tx5zhjje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGIyMzM2ZjItNDU4NWQ0MzUtNWQ2ZDNjNjEtMjU4MDgwZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:1050:2846], CA [2:1051:2847], 2025-03-31T16:19:35.958467Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2824] TxId: 281474976715665. Ctx: { TraceId: 01jqpg0m3647h5f8b3tx5zhjje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGIyMzM2ZjItNDU4NWQ0MzUtNWQ2ZDNjNjEtMjU4MDgwZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1050:2846], task: 6, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 446 Tasks { TaskId: 6 StageId: 5 CpuTimeUs: 209 FinishTimeMs: 1743437975957 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 128 BuildCpuTimeUs: 81 HostName: "ghrun-tuhoqst4xe" NodeId: 2 CreateTimeMs: 1743437975947 } MaxMemoryUsage: 1048576 } 2025-03-31T16:19:35.958497Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jqpg0m3647h5f8b3tx5zhjje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGIyMzM2ZjItNDU4NWQ0MzUtNWQ2ZDNjNjEtMjU4MDgwZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1050:2846] 2025-03-31T16:19:35.958551Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2824] TxId: 281474976715665. Ctx: { TraceId: 01jqpg0m3647h5f8b3tx5zhjje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGIyMzM2ZjItNDU4NWQ0MzUtNWQ2ZDNjNjEtMjU4MDgwZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1051:2847], 2025-03-31T16:19:35.958572Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2824] TxId: 281474976715665. Ctx: { TraceId: 01jqpg0m3647h5f8b3tx5zhjje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGIyMzM2ZjItNDU4NWQ0MzUtNWQ2ZDNjNjEtMjU4MDgwZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1051:2847], 2025-03-31T16:19:35.958750Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2824] TxId: 281474976715665. Ctx: { TraceId: 01jqpg0m3647h5f8b3tx5zhjje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGIyMzM2ZjItNDU4NWQ0MzUtNWQ2ZDNjNjEtMjU4MDgwZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1051:2847], task: 7, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 402 DurationUs: 1000 Tasks { TaskId: 7 StageId: 6 CpuTimeUs: 178 FinishTimeMs: 1743437975958 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ResultRows: 2 ResultBytes: 7 ComputeCpuTimeUs: 132 BuildCpuTimeUs: 46 HostName: "ghrun-tuhoqst4xe" NodeId: 2 StartTimeMs: 1743437975957 CreateTimeMs: 1743437975947 } MaxMemoryUsage: 1048576 } 2025-03-31T16:19:35.958787Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jqpg0m3647h5f8b3tx5zhjje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGIyMzM2ZjItNDU4NWQ0MzUtNWQ2ZDNjNjEtMjU4MDgwZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1051:2847] 2025-03-31T16:19:35.961350Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1039:2824] TxId: 281474976715665. Ctx: { TraceId: 01jqpg0m3647h5f8b3tx5zhjje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGIyMzM2ZjItNDU4NWQ0MzUtNWQ2ZDNjNjEtMjU4MDgwZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Full stats: { CpuTimeUs: 11302 DurationUs: 1743437973954018 Tables { TablePath: "/Root/table-1" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } Tables { TablePath: "/Root/table-2" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } ExecuterCpuTimeUs: 6947 StartTimeMs: 2004 FinishTimeMs: 1743437975958 Stages { StageId: 5 StageGuid: "af6f9afb-e414f3b0-60054f8f-f1f4caed" Program: "(\n(return (lambda \'($1) (FromFlow (Take (ToFlow $1) (Uint64 \'\"1001\")))))\n)\n" ComputeActors { CpuTimeUs: 446 Tasks { TaskId: 6 StageId: 5 CpuTimeUs: 209 FinishTimeMs: 1743437975957 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 128 BuildCpuTimeUs: 81 HostName: "ghrun-tuhoqst4xe" NodeId: 2 CreateTimeMs: 1743437975947 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743437975953 } Stages { StageGuid: "8fb1e881-351c03ee-5741c6c0-ab63ea7a" Program: "(\n(return (lambda \'($1) (block \'(\n (let $2 (lambda \'($3) (Member $3 \'\"key\") (Member $3 \'\"value\")))\n (return (FromFlow (ExpandMap (Take (ToFlow $1) (Uint64 \'\"1001\")) $2)))\n))))\n)\n" BaseTimeMs: 1743437975953 } Stages { StageId: 3 StageGuid: "ae3d00a9-f64cd0cc-6fb0e770-2357ca87" Program: "(\n(return (lambda \'($1) (FromFlow (WideTop (ToFlow $1) (Uint64 \'\"1001\") \'(\'(\'0 (Bool \'true)))))))\n)\n" BaseTimeMs: 1743437975953 } Stages { StageId: 2 StageGuid: "ee7759e-63fe5b2a-7266daab-b198087a" Program: "(\n(return (lambda \'($1) (block \'(\n (let $2 (lambda \'($3) (Member $3 \'\"key\") (Member $3 \'\"value\")))\n (return (FromFlow (ExpandMap (Take (ToFlow $1) (Uint64 \'\"1001\")) $2)))\n))))\n)\n" BaseTimeMs: 1743437975953 } Stages { StageId: 4 StageGuid: "c21d1738-217fd474-19ee5b7d-32bafc39" Program: "(\n(return (lambda \'($1 $2) (block \'(\n (let $3 (lambda \'($6 $7) (AsStruct \'(\'\"key\" $6) \'(\'\"value\" $7))))\n (let $4 (Sort (Extend (NarrowMap (ToFlow $1) $3) (NarrowMap (ToFlow $2) $3)) (Bool \'true) (lambda \'($8) (Member $8 \'\"key\"))))\n (let $5 (lambda \'($9) (Member $9 \'\"key\") (Member $9 \'\"value\")))\n (return (FromFlow (ExpandMap $4 $5)))\n))))\n)\n" BaseTimeMs: 1743437975953 } Stages { StageId: 6 StageGuid: "23dac3a9-42229cb6-8c07aef4-ae2217e" Program: "(\n(return (lambda \'($1) (FromFlow (NarrowMap (Take (ToFlow $1) (Uint64 \'\"1001\")) (lambda \'($2 $3) (AsStruct \'(\'\"key\" $2) \'(\'\"value\" $3)))))))\n)\n" BaseTimeMs: 1743437975953 } Stages { StageId: 1 StageGuid: "2003a6b0-e416708d-bc41dc9d-57397810" Program: "(\n(return (lambda \'($1) (FromFlow (WideTop (ToFlow $1) (Uint64 \'\"1001\") \'(\'(\'0 (Bool \'true)))))))\n)\n" BaseTimeMs: 1743437975953 } TxPlansWithStats: "{\"Node Type\":\"Phase\",\"PlanNodeId\":17,\"Plans\":[{\"Node Type\":\"ResultSet\",\"PlanNodeId\":16,\"PlanNodeType\":\"ResultSet\",\"Plans\":[{\"Node Type\":\"Limit\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":14}],\"Limit\":\"1001\",\"Name\":\"Limit\"}],\"PlanNodeId\":15,\"Plans\":[{\"Node Type\":\"UnionAll\",\"PlanNodeId\":14,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Limit\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":12}],\"Limit\":\"1001\",\"Name\":\"Limit\"}],\"PlanNodeId\":13,\"Plans\":[{\"Node Type\":\"Merge\",\"PlanNodeId\":12,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Sort-Union\",\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"Sort\",\"SortBy\":\"row.key\"},{\"Inputs\":[{\"ExternalPlanNodeId\":10},{\"ExternalPlanNodeId\":5}],\"Name\":\"Union\"}],\"PlanNodeId\":11,\"Plans\":[{\"Node Type\":\"UnionAll\",\"PlanNodeId\":10,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Top\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":8}],\"Limit\":\"1001\",\"Name\":\"Top\",\"TopBy\":\"row.key\"}],\"PlanNodeId\":9,\"Plans\":[{\"Node Type\":\"UnionAll\",\"PlanNodeId\":8,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Limit\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":6}],\"Limit\":\"1001\",\"Name\":\"Limit\"}],\"PlanNodeId\":7,\"Plans\":[{\"Node Type\":\"TablePointLookup\",\"Operators\":[{\"E-Cost\":\"No estimate\",\"E-Rows\":\"No estimate\",\"E-Size\":\"No estimate\",\"Inputs\":[],\"Name\":\"TablePointLookup\",\"Path\":\"\\/Root\\/table-1\",\"ReadColumns\":[\"key\",\"value\"],\"ReadLimit\":\"1001\",\"ReadRange\":[\"key (3)\"],\"Scan\":\"Sequential\",\"Table\":\"table-1\"}],\"PlanNodeId\":6,\"StageGuid\":\"\",\"Tables\":[\"table-1\"]}],\"StageGuid\":\"8fb1e881-351c03ee-5741c6c0-ab63ea7a\",\"Stats\":{\"BaseTimeMs\":1743437975953,\"FinishedTasks\":0,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"2003a6b0-e416708d-bc41dc9d-57397810\",\"Stats\":{\"BaseTimeMs\":1743437975953,\"FinishedTasks\":0,\"PhysicalStageId\":1,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"},{\"Node Type\":\"UnionAll\",\"PlanNodeId\":5,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Top\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":3}],\"Limit\":\"1001\",\"Name\":\"Top\",\"TopBy\":\"row.key\"}],\"PlanNodeId\":4,\"Plans\":[{\"Node Type\":\"UnionAll\",\"PlanNodeId\":3,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Limit\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":1}],\"Limit\":\"1001\",\"Name\":\"Limit\"}],\"PlanNodeId\":2,\"Plans\":[{\"Node Type\":\"TablePointLookup\",\"Operators\":[{\"E-Cost\":\"No estimate\",\"E-Rows\":\"No estimate\",\"E-Size\":\"No estimate\",\"Inputs\":[],\"Name\":\"TablePointLookup\",\"Path\":\"\\/Root\\/table-2\",\"ReadColumns\":[\"key\",\"value\"],\"ReadLimit\":\"1001\",\"ReadRange\":[\"key (4)\"],\"Scan\":\"Sequential\",\"Table\":\"table-2\"}],\"PlanNodeId\":1,\"StageGuid\":\"\",\"Tables\":[\"table-2\"]}],\"StageGuid\":\"ee7759e-63fe5b2a-7266daab-b198087a\",\"Stats\":{\"BaseTimeMs\":1743437975953,\"FinishedTasks\":0,\"PhysicalStageId\":2,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"ae3d00a9-f64cd0cc-6fb0e770-2357ca87\",\"Stats\":{\"BaseTimeMs\":1743437975953,\"FinishedTasks\":0,\"PhysicalStageId\":3,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"c21d1738-217fd474-19ee5b7d-32bafc39\",\"Stats\":{\"BaseTimeMs\":1743437975953,\"FinishedTasks\":0,\"PhysicalStageId\":4,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"SortColumns\":[\"key (Asc)\"],\"StageGuid\":\"\"}],\"StageGuid\":\"af6f9afb-e414f3b0-60054f8f-f1f4caed\",\"Stats\":{\"BaseTimeMs\":1743437975953,\"ComputeNodes\":[{\"CpuTimeUs\":446,\"Tasks\":[{\"ComputeTimeUs\":128,\"FinishTimeMs\":1743437975957,\"Host\":\"ghrun-tuhoqst4xe\",\"InputBytes\":7,\"InputRows\":2,\"NodeId\":2,\"OutputBytes\":7,\"OutputRows\":2,\"TaskId\":6}]}],\"FinishedTasks\":0,\"PhysicalStageId\":5,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"23dac3a9-42229cb6-8c07aef4-ae2217e\",\"Stats\":{\"BaseTimeMs\":1743437975953,\"FinishedTasks\":0,\"PhysicalStageId\":6,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"\"}" StatConvertBytes: 3892 Extra { type_url: "type.googleapis.com/NKqpProto.TKqpExecutionExtraStats" value: "\010\002\022\013\010\222\003\020\233\t\030\203\" \007" } } 2025-03-31T16:19:35.961435Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2824] TxId: 281474976715665. Ctx: { TraceId: 01jqpg0m3647h5f8b3tx5zhjje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGIyMzM2ZjItNDU4NWQ0MzUtNWQ2ZDNjNjEtMjU4MDgwZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-31T16:19:35.961484Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2824] TxId: 281474976715665. Ctx: { TraceId: 01jqpg0m3647h5f8b3tx5zhjje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGIyMzM2ZjItNDU4NWQ0MzUtNWQ2ZDNjNjEtMjU4MDgwZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.004355s ReadRows: 2 ReadBytes: 16 ru: 2 rate limiter was not found force flag: 1 { items { uint32_value: 3 } items { uint32_value: 2 } }, { items { uint32_value: 4 } items { uint32_value: 2 } } >> TKeyValueCollectorTest::TestKeyValueCollectorEmpty >> Cdc::RacyRebootAndSplitWithTxInflight [GOOD] >> Cdc::RacyActivateAndEnqueue >> TKeyValueCollectorTest::TestKeyValueCollectorEmpty [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorMany >> BasicUsage::CloseWriteSessionImmediately [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit+UseSink [GOOD] Test command err: 2025-03-31T16:19:27.534099Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:19:27.534322Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:19:27.534496Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00111c/r3tmp/tmp6uJetT/pdisk_1.dat 2025-03-31T16:19:27.881572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:19:27.918768Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:27.957346Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:27.957461Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:27.968989Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:28.049097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:19:28.080851Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:19:28.081784Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:19:28.082132Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:19:28.082382Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:19:28.128560Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:19:28.129101Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:19:28.129189Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:19:28.130583Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:19:28.130658Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:19:28.130714Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:19:28.131073Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:19:28.131217Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:19:28.131282Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:19:28.141947Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:19:28.167591Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:19:28.167771Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:19:28.167877Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:19:28.167941Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:19:28.167982Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:19:28.168014Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:19:28.168252Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:28.168309Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:28.168572Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:19:28.168652Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:19:28.168764Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:19:28.168825Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:19:28.168864Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T16:19:28.168900Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:19:28.168934Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:19:28.168963Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:19:28.169006Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:19:28.169106Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:28.169144Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:28.169187Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:19:28.169301Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-31T16:19:28.169341Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:19:28.169442Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:19:28.169651Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T16:19:28.169703Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:19:28.169810Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:19:28.169855Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:19:28.169893Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T16:19:28.169941Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T16:19:28.169973Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:19:28.170271Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T16:19:28.170335Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T16:19:28.170375Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T16:19:28.170409Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:19:28.170466Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T16:19:28.170506Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T16:19:28.170543Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T16:19:28.170577Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:19:28.170603Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T16:19:28.172388Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T16:19:28.172450Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:19:28.183183Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:19:28.183258Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:19:28.183295Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:19:28.183340Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-31T16:19:28.183399Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:19:28.332119Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:28.332173Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:28.332205Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T16:19:28.332582Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-31T16:19:28.332641Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:19:28.332768Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:19:28.332806Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-31T16:19:28.332859Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-31T16:19:28.332911Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-31T16:19:28.337195Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:19:28.337260Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:19:28.337894Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:28.337933Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:28.337976Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:19:2 ... 86224037892 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:19:35.217174Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037892 2025-03-31T16:19:35.217256Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037892, actorId: [2:1199:2936] 2025-03-31T16:19:35.217295Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037892 2025-03-31T16:19:35.217338Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037892 2025-03-31T16:19:35.217372Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-03-31T16:19:35.217540Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553157, Sender [2:1043:2823], Recipient [2:756:2634]: NKikimrTxDataShard.TEvSplitTransferSnapshotAck TabletId: 72075186224037892 OperationCookie: 281474976715665 2025-03-31T16:19:35.217585Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037892 for split OpId 281474976715665 2025-03-31T16:19:35.217800Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:1043:2823], Recipient [2:1043:2823]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:35.217824Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:35.218017Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [2:1194:2931], Recipient [2:756:2634]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037892 ClientId: [2:1194:2931] ServerId: [2:1196:2933] } 2025-03-31T16:19:35.218045Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-03-31T16:19:35.218321Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [2:24:2071], Recipient [2:1043:2823]: {TEvRegisterTabletResult TabletId# 72075186224037892 Entry# 2000} 2025-03-31T16:19:35.218354Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-03-31T16:19:35.218401Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037892 time 2000 2025-03-31T16:19:35.218442Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-03-31T16:19:35.218540Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-03-31T16:19:35.218577Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:19:35.218607Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037892 2025-03-31T16:19:35.218643Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037892 has no attached operations 2025-03-31T16:19:35.218688Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037892 2025-03-31T16:19:35.218719Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037892 TxInFly 0 2025-03-31T16:19:35.218760Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-03-31T16:19:35.218864Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877764, Sender [2:1196:2933], Recipient [2:1043:2823]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-31T16:19:35.218894Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-31T16:19:35.218934Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037892, clientId# [2:1194:2931], serverId# [2:1196:2933], sessionId# [0:0:0] 2025-03-31T16:19:35.219212Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [2:24:2071], Recipient [2:1043:2823]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 2000 ReadStep# 2000 } 2025-03-31T16:19:35.219242Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-03-31T16:19:35.219291Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037892 coordinator 72057594046316545 last step 0 next step 2000 2025-03-31T16:19:35.219345Z node 2 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037892: waitStep# 2000 readStep# 2000 observedStep# 2000 2025-03-31T16:19:35.219397Z node 2 :TX_DATASHARD TRACE: CheckMediatorStateRestored at 72075186224037892 promoting UnprotectedReadEdge to v2000/18446744073709551615 2025-03-31T16:19:35.230603Z node 2 :TX_DATASHARD DEBUG: 72075186224037893 ack snapshot OpId 281474976715665 2025-03-31T16:19:35.230752Z node 2 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037893 2025-03-31T16:19:35.230871Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037893 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:19:35.230958Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037893 2025-03-31T16:19:35.231016Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037893, actorId: [2:1203:2940] 2025-03-31T16:19:35.231047Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037893 2025-03-31T16:19:35.231108Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037893 2025-03-31T16:19:35.231142Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2025-03-31T16:19:35.231368Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:1051:2825], Recipient [2:1051:2825]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:35.231414Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:35.231618Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553157, Sender [2:1051:2825], Recipient [2:756:2634]: NKikimrTxDataShard.TEvSplitTransferSnapshotAck TabletId: 72075186224037893 OperationCookie: 281474976715665 2025-03-31T16:19:35.231694Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037893 for split OpId 281474976715665 2025-03-31T16:19:35.232174Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [2:1195:2932], Recipient [2:756:2634]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037893 ClientId: [2:1195:2932] ServerId: [2:1197:2934] } 2025-03-31T16:19:35.232218Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-03-31T16:19:35.232600Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [2:24:2071], Recipient [2:1051:2825]: {TEvRegisterTabletResult TabletId# 72075186224037893 Entry# 2000} 2025-03-31T16:19:35.232635Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-03-31T16:19:35.232668Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037893 time 2000 2025-03-31T16:19:35.232702Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2025-03-31T16:19:35.232838Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037893 2025-03-31T16:19:35.232875Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:19:35.232908Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037893 2025-03-31T16:19:35.232940Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037893 has no attached operations 2025-03-31T16:19:35.232968Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037893 2025-03-31T16:19:35.233001Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037893 TxInFly 0 2025-03-31T16:19:35.233043Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037893 2025-03-31T16:19:35.233179Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877764, Sender [2:1197:2934], Recipient [2:1051:2825]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-31T16:19:35.233213Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-31T16:19:35.233250Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037893, clientId# [2:1195:2932], serverId# [2:1197:2934], sessionId# [0:0:0] 2025-03-31T16:19:35.233554Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [2:24:2071], Recipient [2:1051:2825]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 2000 ReadStep# 2000 } 2025-03-31T16:19:35.233588Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-03-31T16:19:35.233620Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037893 coordinator 72057594046316545 last step 0 next step 2000 2025-03-31T16:19:35.233662Z node 2 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037893: waitStep# 2000 readStep# 2000 observedStep# 2000 2025-03-31T16:19:35.233712Z node 2 :TX_DATASHARD TRACE: CheckMediatorStateRestored at 72075186224037893 promoting UnprotectedReadEdge to v2000/18446744073709551615 2025-03-31T16:19:35.244517Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 ack split to schemeshard 281474976715665 2025-03-31T16:19:35.247581Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553158, Sender [2:410:2405], Recipient [2:763:2638] 2025-03-31T16:19:35.247661Z node 2 :TX_DATASHARD DEBUG: Got TEvSplitPartitioningChanged: opId: 281474976715665, at datashard: 72075186224037889, state: SplitSrcWaitForPartitioningChanged 2025-03-31T16:19:35.249276Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 ack split partitioning changed to schemeshard 281474976715665 2025-03-31T16:19:35.249360Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-03-31T16:19:35.249878Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [2:748:2629], Recipient [2:756:2634]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-03-31T16:19:35.789822Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 278003712, Sender [2:995:2681], Recipient [2:756:2634]: NKikimrDataEvents.TEvWrite Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 3 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC } TxId: 281474976715663 TxMode: MODE_VOLATILE_PREPARE Locks { SendingShards: 72075186224037888 SendingShards: 72075186224037889 ReceivingShards: 72075186224037888 ReceivingShards: 72075186224037889 Op: Commit } 2025-03-31T16:19:35.789912Z node 2 :TX_DATASHARD TRACE: Handle TTxWrite: at tablet# 72075186224037889 2025-03-31T16:19:35.790042Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_WRONG_SHARD_STATE;details=Rejecting data TxId 281474976715663 because datashard 72075186224037889: is in a pre/offline state assuming this is due to a finished split (wrong shard state);tx_id=281474976715663; 2025-03-31T16:19:35.790104Z node 2 :TX_DATASHARD NOTICE: Rejecting data TxId 281474976715663 because datashard 72075186224037889: is in a pre/offline state assuming this is due to a finished split (wrong shard state) 2025-03-31T16:19:35.790545Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715664, at schemeshard: 72057594046644480 2025-03-31T16:19:35.791147Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715665, at schemeshard: 72057594046644480 >> DataShardOutOfOrder::TestImmediateQueueThenSplit-UseSink [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorMany [GOOD] >> KeyValueReadStorage::ReadWithTwoPartsOk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadWithTwoPartsOk [GOOD] Test command err: 2025-03-31T16:19:37.668085Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 2 ErrorReason# ReadRequestCookie# 0 2025-03-31T16:19:37.669686Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOk >> TKeyValueTest::TestInlineCopyRangeWorks ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::CloseWriteSessionImmediately [GOOD] Test command err: 2025-03-31T16:19:10.049689Z :BasicWriteSession INFO: Random seed for debugging is 1743437950049645 2025-03-31T16:19:10.433045Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008981979104159:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:10.433131Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:19:10.510292Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008980562710156:2157];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0016bd/r3tmp/tmpSAKTf6/pdisk_1.dat 2025-03-31T16:19:10.818860Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:19:10.819111Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:19:10.819328Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:19:11.225282Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:11.225402Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:11.226567Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:11.226669Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:11.230643Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:19:11.230807Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:11.232351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13255, node 1 2025-03-31T16:19:11.280405Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:11.295344Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:19:11.295389Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:19:11.513078Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/0016bd/r3tmp/yandexWVJXh3.tmp 2025-03-31T16:19:11.513106Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/0016bd/r3tmp/yandexWVJXh3.tmp 2025-03-31T16:19:11.519242Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/0016bd/r3tmp/yandexWVJXh3.tmp 2025-03-31T16:19:11.519400Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:19:11.763279Z INFO: TTestServer started on Port 25230 GrpcPort 13255 TClient is connected to server localhost:25230 PQClient connected to localhost:13255 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:19:12.114854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-31T16:19:12.223421Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:14.238518Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008999158974354:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:14.238637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:14.238929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008999158974381:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:14.242638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-03-31T16:19:14.266216Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008999158974383:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-31T16:19:14.362955Z node 1 :TX_PROXY ERROR: Actor# [1:7488008999158974476:2696] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:19:14.686552Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488008999158974486:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:19:14.688117Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488008997742579587:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:19:14.688872Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjIzZTM1Yy01NGQzYzNiNS0xMWYxODExLWMwM2VkMDQ3, ActorId: [2:7488008997742579546:2308], ActorState: ExecuteState, TraceId: 01jqpfzz8qf3c4zp1nffwqr0kd, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:19:14.693529Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWVkMzYxYTEtN2JlMjI3OWYtNmE2MDgzMy0zMGI4ZDYyNQ==, ActorId: [1:7488008999158974351:2336], ActorState: ExecuteState, TraceId: 01jqpfzz5v48wbq33ne3w36ceh, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:19:14.705940Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:19:14.706276Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:19:14.749272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:19:14.938563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:19:15.078599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:13255", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-03-31T16:19:15.413167Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jqpg004dfhs1fhqtkbyq5kwb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTliNzA1ZTYtMWY2MzIwYS0xZTJlMzE4Ni1mNjI2N2ZhMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:19:15.443725Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008981979104159:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:15.443785Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Subcribe to ClusterTracker from [1:7488009003453942197:3003] 2025-03-31T16:19:15.509109Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008980562710156:2157];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:15.509201Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-03-31T16:19:21.593757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:13255 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-03-31T16:19:21.700035Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: S ... 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 YdbDatabasePath: "/Root" } Partitions { PartitionId: 0 TabletId: 72075186224037892 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186224037893 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 12 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } Path: "/Root/PQ/rt3.dc1--test-topic" name rt3.dc1--test-topic version1 CallPersQueueGRPC request to localhost:23117 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-03-31T16:19:34.922546Z node 3 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:23117 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-03-31T16:19:35.424963Z node 3 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:23117 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-03-31T16:19:35.928689Z node 3 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 1 ErrorCode: OK MetaResponse { CmdGetTopicMetadataResult { TopicInfo { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } Version: 1 LocalDC: true Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } TopicPath: "/Root/PQ/rt3.dc1--test-topic" YdbDatabasePath: "/Root" Consumers { Name: "user" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } Version: 0 Important: false } } ErrorCode: OK } } } === Topic created, have version: 1 2025-03-31T16:19:35.935251Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: try to update token 2025-03-31T16:19:35.935852Z :INFO: [] MessageGroupId [src] SessionId [] Write session: Do CDS request 2025-03-31T16:19:35.935900Z :INFO: [] MessageGroupId [src] SessionId [] Start write session. Will connect to endpoint: localhost:23117 2025-03-31T16:19:35.940081Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: send init request: init_request { topic: "test-topic" message_group_id: "src" } 2025-03-31T16:19:35.940562Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-03-31T16:19:35.940593Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 1 2025-03-31T16:19:35.940879Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "src" } 2025-03-31T16:19:35.940965Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "test-topic" message_group_id: "src" from ipv6:[::1]:49182 2025-03-31T16:19:35.940986Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:49182 proto=v1 topic=test-topic durationSec=0 2025-03-31T16:19:35.940994Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-03-31T16:19:35.942281Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2025-03-31T16:19:35.942385Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-03-31T16:19:35.942399Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-03-31T16:19:35.942409Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-03-31T16:19:35.942425Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7488009088093338100:2532] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2025-03-31T16:19:35.944594Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7488009088093338100:2532] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2025-03-31T16:19:36.106578Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7488009088093338100:2532] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2025-03-31T16:19:36.106949Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7488009092388305461:2532] connected; active server actors: 1 2025-03-31T16:19:36.107347Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7488009088093338100:2532] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2025-03-31T16:19:36.107374Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7488009088093338100:2532] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-03-31T16:19:36.107749Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7488009092388305461:2532] disconnected; active server actors: 1 2025-03-31T16:19:36.107775Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7488009092388305461:2532] disconnected no session 2025-03-31T16:19:36.196472Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7488009088093338100:2532] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-03-31T16:19:36.196509Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7488009088093338100:2532] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-03-31T16:19:36.196525Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7488009088093338100:2532] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-03-31T16:19:36.196553Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-03-31T16:19:36.197031Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 3, Generation: 1 2025-03-31T16:19:36.197053Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [3:7488009092388305485:2532], now have 1 active actors on pipe 2025-03-31T16:19:36.197085Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-03-31T16:19:36.197109Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-03-31T16:19:36.197201Z node 3 :PERSQUEUE INFO: new Cookie src|9d31cd4a-44fc63fa-2421ec33-3fa87515_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-03-31T16:19:36.197303Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-03-31T16:19:36.197363Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-31T16:19:36.197472Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-03-31T16:19:36.197496Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-03-31T16:19:36.197573Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-31T16:19:36.197687Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|9d31cd4a-44fc63fa-2421ec33-3fa87515_0 2025-03-31T16:19:36.198495Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1743437976198 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T16:19:36.198624Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|9d31cd4a-44fc63fa-2421ec33-3fa87515_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-03-31T16:19:36.198808Z :INFO: [] MessageGroupId [src] SessionId [src|9d31cd4a-44fc63fa-2421ec33-3fa87515_0] Write session: close. Timeout = 0 ms 2025-03-31T16:19:36.198839Z :INFO: [] MessageGroupId [src] SessionId [src|9d31cd4a-44fc63fa-2421ec33-3fa87515_0] Write session will now close 2025-03-31T16:19:36.198868Z :DEBUG: [] MessageGroupId [src] SessionId [src|9d31cd4a-44fc63fa-2421ec33-3fa87515_0] Write session: aborting 2025-03-31T16:19:36.199531Z :INFO: [] MessageGroupId [src] SessionId [src|9d31cd4a-44fc63fa-2421ec33-3fa87515_0] Write session: gracefully shut down, all writes complete 2025-03-31T16:19:36.199593Z :DEBUG: [] MessageGroupId [src] SessionId [src|9d31cd4a-44fc63fa-2421ec33-3fa87515_0] Write session is aborting and will not restart 2025-03-31T16:19:36.199653Z :DEBUG: [] MessageGroupId [src] SessionId [src|9d31cd4a-44fc63fa-2421ec33-3fa87515_0] Write session: destroy 2025-03-31T16:19:36.199651Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|9d31cd4a-44fc63fa-2421ec33-3fa87515_0 grpc read done: success: 0 data: 2025-03-31T16:19:36.199677Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|9d31cd4a-44fc63fa-2421ec33-3fa87515_0 grpc read failed 2025-03-31T16:19:36.200146Z node 3 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 1 sessionId: src|9d31cd4a-44fc63fa-2421ec33-3fa87515_0 2025-03-31T16:19:36.200177Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|9d31cd4a-44fc63fa-2421ec33-3fa87515_0 is DEAD 2025-03-31T16:19:36.200519Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-31T16:19:36.200640Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7488009092388305485:2532] destroyed 2025-03-31T16:19:36.200699Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. Session was created ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestImmediateQueueThenSplit-UseSink [GOOD] Test command err: 2025-03-31T16:19:30.321508Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:19:30.321705Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:19:30.321868Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0010f0/r3tmp/tmpXmAeCm/pdisk_1.dat 2025-03-31T16:19:30.664558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:19:30.704600Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:30.742553Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:30.742694Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:30.754088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:30.834409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:19:30.873786Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:19:30.874781Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:19:30.875220Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:19:30.875436Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:19:30.922012Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:19:30.922745Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:19:30.922858Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:19:30.924492Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:19:30.924565Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:19:30.924635Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:19:30.925033Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:19:30.925154Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:19:30.925223Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:19:30.935842Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:19:30.963977Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:19:30.964136Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:19:30.964232Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:19:30.964259Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:19:30.964290Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:19:30.964316Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:19:30.964462Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:30.964493Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:30.964715Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:19:30.964790Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:19:30.964887Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:19:30.964980Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:19:30.965010Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T16:19:30.965043Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:19:30.965065Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:19:30.965087Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:19:30.965115Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:19:30.965205Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:30.965230Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:30.965258Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:19:30.965322Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-31T16:19:30.965359Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:19:30.965436Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:19:30.965595Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T16:19:30.965634Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:19:30.965759Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:19:30.965798Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:19:30.965825Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T16:19:30.965849Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T16:19:30.965869Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:19:30.966090Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T16:19:30.966130Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T16:19:30.966154Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T16:19:30.966182Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:19:30.966225Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T16:19:30.966243Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T16:19:30.966267Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T16:19:30.966285Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:19:30.966303Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T16:19:30.967864Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T16:19:30.967922Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:19:30.978613Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:19:30.978686Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:19:30.978720Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:19:30.978767Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-31T16:19:30.978820Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:19:31.127516Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:31.127577Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:31.127613Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T16:19:31.127998Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-31T16:19:31.128033Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:19:31.128147Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:19:31.128187Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-31T16:19:31.128223Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-31T16:19:31.128262Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-31T16:19:31.132401Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:19:31.132464Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:19:31.133065Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:31.133118Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:31.133169Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:19:3 ... se: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjRiMGY2YWItY2U2MmY2OGMtMjM0ZDg3MWYtNWE0OGU4OTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-03-31T16:19:37.221106Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1140:2715] TxId: 281474976715676. Ctx: { TraceId: 01jqpg0mvjd3k5y741cvj4tcw5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjRiMGY2YWItY2U2MmY2OGMtMjM0ZDg3MWYtNWE0OGU4OTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-31T16:19:37.221132Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1140:2715] TxId: 281474976715676. Ctx: { TraceId: 01jqpg0mvjd3k5y741cvj4tcw5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjRiMGY2YWItY2U2MmY2OGMtMjM0ZDg3MWYtNWE0OGU4OTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-03-31T16:19:37.221171Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715677. Resolved key sets: 0 2025-03-31T16:19:37.221296Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YmYyODQzYmItZGVlODcyM2QtNzVmZjgwMjEtODc1MmNiMjA=, ActorId: [2:899:2734], ActorState: ExecuteState, TraceId: 01jqpg0mvqejtcgps6szc9thg8, Create QueryResponse for error on request, msg: 2025-03-31T16:19:37.221863Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jqpg0mvk34740yvz725xnm1m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NThjNDI4MTItZjhhNTYyMDMtNzE4MGM3M2YtMzI4NjQ0OWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:19:37.221899Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715677. Ctx: { TraceId: 01jqpg0mvk34740yvz725xnm1m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NThjNDI4MTItZjhhNTYyMDMtNzE4MGM3M2YtMzI4NjQ0OWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-03-31T16:19:37.221929Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1146:2719] TxId: 281474976715677. Ctx: { TraceId: 01jqpg0mvk34740yvz725xnm1m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NThjNDI4MTItZjhhNTYyMDMtNzE4MGM3M2YtMzI4NjQ0OWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-03-31T16:19:37.221962Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1146:2719] TxId: 281474976715677. Ctx: { TraceId: 01jqpg0mvk34740yvz725xnm1m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NThjNDI4MTItZjhhNTYyMDMtNzE4MGM3M2YtMzI4NjQ0OWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-31T16:19:37.221988Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1146:2719] TxId: 281474976715677. Ctx: { TraceId: 01jqpg0mvk34740yvz725xnm1m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NThjNDI4MTItZjhhNTYyMDMtNzE4MGM3M2YtMzI4NjQ0OWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-03-31T16:19:37.222684Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715678. Resolved key sets: 0 2025-03-31T16:19:37.222734Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715679. Resolved key sets: 0 2025-03-31T16:19:37.222971Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jqpg0mvna0mcqprzr5svppde, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Njg1ZDUyYWQtZGJkMzJmOGMtZjZkOTVkYjItYWVkOGQ3MjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:19:37.223005Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715678. Ctx: { TraceId: 01jqpg0mvna0mcqprzr5svppde, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Njg1ZDUyYWQtZGJkMzJmOGMtZjZkOTVkYjItYWVkOGQ3MjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-03-31T16:19:37.223040Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1152:2724] TxId: 281474976715678. Ctx: { TraceId: 01jqpg0mvna0mcqprzr5svppde, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Njg1ZDUyYWQtZGJkMzJmOGMtZjZkOTVkYjItYWVkOGQ3MjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-03-31T16:19:37.223109Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1152:2724] TxId: 281474976715678. Ctx: { TraceId: 01jqpg0mvna0mcqprzr5svppde, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Njg1ZDUyYWQtZGJkMzJmOGMtZjZkOTVkYjItYWVkOGQ3MjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-31T16:19:37.223146Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1152:2724] TxId: 281474976715678. Ctx: { TraceId: 01jqpg0mvna0mcqprzr5svppde, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Njg1ZDUyYWQtZGJkMzJmOGMtZjZkOTVkYjItYWVkOGQ3MjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-03-31T16:19:37.223187Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jqpg0mvn4xj7z7sabcbcspc4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTQ5OGQ1MjAtNjRmNTAxYTQtMmE1ZjI2NTAtZTYwNmEzMjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:19:37.223211Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715679. Ctx: { TraceId: 01jqpg0mvn4xj7z7sabcbcspc4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTQ5OGQ1MjAtNjRmNTAxYTQtMmE1ZjI2NTAtZTYwNmEzMjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-03-31T16:19:37.223240Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1154:2727] TxId: 281474976715679. Ctx: { TraceId: 01jqpg0mvn4xj7z7sabcbcspc4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTQ5OGQ1MjAtNjRmNTAxYTQtMmE1ZjI2NTAtZTYwNmEzMjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-03-31T16:19:37.223280Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1154:2727] TxId: 281474976715679. Ctx: { TraceId: 01jqpg0mvn4xj7z7sabcbcspc4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTQ5OGQ1MjAtNjRmNTAxYTQtMmE1ZjI2NTAtZTYwNmEzMjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-31T16:19:37.223306Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1154:2727] TxId: 281474976715679. Ctx: { TraceId: 01jqpg0mvn4xj7z7sabcbcspc4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTQ5OGQ1MjAtNjRmNTAxYTQtMmE1ZjI2NTAtZTYwNmEzMjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-03-31T16:19:37.223333Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715680. Resolved key sets: 0 2025-03-31T16:19:37.223905Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jqpg0mvqbcczk1ztg9gezg0y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmQ4NmFlN2UtNjIxNDQzNWMtZDg1ZGQxNjctZGYxNGI0MmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:19:37.223933Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715680. Ctx: { TraceId: 01jqpg0mvqbcczk1ztg9gezg0y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmQ4NmFlN2UtNjIxNDQzNWMtZDg1ZGQxNjctZGYxNGI0MmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-03-31T16:19:37.223956Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1156:2731] TxId: 281474976715680. Ctx: { TraceId: 01jqpg0mvqbcczk1ztg9gezg0y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmQ4NmFlN2UtNjIxNDQzNWMtZDg1ZGQxNjctZGYxNGI0MmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-03-31T16:19:37.223989Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1156:2731] TxId: 281474976715680. Ctx: { TraceId: 01jqpg0mvqbcczk1ztg9gezg0y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmQ4NmFlN2UtNjIxNDQzNWMtZDg1ZGQxNjctZGYxNGI0MmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-31T16:19:37.224012Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1156:2731] TxId: 281474976715680. Ctx: { TraceId: 01jqpg0mvqbcczk1ztg9gezg0y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmQ4NmFlN2UtNjIxNDQzNWMtZDg1ZGQxNjctZGYxNGI0MmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-03-31T16:19:37.224383Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715681. Resolved key sets: 0 2025-03-31T16:19:37.224527Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jqpg0mvqejtcgps6szc9thg8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmYyODQzYmItZGVlODcyM2QtNzVmZjgwMjEtODc1MmNiMjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:19:37.224550Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715681. Ctx: { TraceId: 01jqpg0mvqejtcgps6szc9thg8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmYyODQzYmItZGVlODcyM2QtNzVmZjgwMjEtODc1MmNiMjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-03-31T16:19:37.224574Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1163:2734] TxId: 281474976715681. Ctx: { TraceId: 01jqpg0mvqejtcgps6szc9thg8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmYyODQzYmItZGVlODcyM2QtNzVmZjgwMjEtODc1MmNiMjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-03-31T16:19:37.224637Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1163:2734] TxId: 281474976715681. Ctx: { TraceId: 01jqpg0mvqejtcgps6szc9thg8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmYyODQzYmItZGVlODcyM2QtNzVmZjgwMjEtODc1MmNiMjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-31T16:19:37.224662Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1163:2734] TxId: 281474976715681. Ctx: { TraceId: 01jqpg0mvqejtcgps6szc9thg8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmYyODQzYmItZGVlODcyM2QtNzVmZjgwMjEtODc1MmNiMjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 >> TKeyValueTest::TestConcatWorks >> BasicUsage::FallbackToSingleDbAfterBadRequest [GOOD] >> TKeyValueTest::TestBasicWriteRead >> TKeyValueTest::TestCopyRangeWorks >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOk >> YdbMonitoring::SelfCheck [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::FallbackToSingleDbAfterBadRequest [GOOD] Test command err: 2025-03-31T16:19:10.055860Z :FallbackToSingleDb INFO: Random seed for debugging is 1743437950055831 2025-03-31T16:19:10.464583Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008980953427173:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:10.464683Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:19:10.542363Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008979528967585:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:10.542459Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:19:10.796840Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:19:10.807288Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0016ac/r3tmp/tmpCnXAv8/pdisk_1.dat 2025-03-31T16:19:11.292041Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:11.319593Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:11.319678Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:11.320526Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:11.320568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:11.327694Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:11.331905Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:19:11.333981Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1349, node 1 2025-03-31T16:19:11.513147Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/0016ac/r3tmp/yandexPsAUKV.tmp 2025-03-31T16:19:11.513176Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/0016ac/r3tmp/yandexPsAUKV.tmp 2025-03-31T16:19:11.514260Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/0016ac/r3tmp/yandexPsAUKV.tmp 2025-03-31T16:19:11.514452Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:19:11.764122Z INFO: TTestServer started on Port 3872 GrpcPort 1349 TClient is connected to server localhost:3872 PQClient connected to localhost:1349 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:19:12.175907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-03-31T16:19:14.322974Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008996708837091:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:14.323062Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008996708837077:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:14.323262Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:14.328909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2025-03-31T16:19:14.354742Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008996708837100:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-03-31T16:19:14.436122Z node 2 :TX_PROXY ERROR: Actor# [2:7488008996708837128:2130] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:19:14.709570Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488008996708837135:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:19:14.709871Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGM0ODU5ZTItMTU4Mzg1NzktZDUxZDQzNDctMWI2NTE2NTE=, ActorId: [2:7488008996708837060:2307], ActorState: ExecuteState, TraceId: 01jqpfzz8g2m3dwsnpt0847gsv, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:19:14.711918Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488008998133297447:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:19:14.712381Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:19:14.713477Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTgzMWE1NDAtM2QyNDgzODEtYzZkNzc3ZjEtN2MwYWNhOTQ=, ActorId: [1:7488008998133297418:2336], ActorState: ExecuteState, TraceId: 01jqpfzza9fgzfjbx03mpvbst6, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:19:14.713868Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:19:14.749321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:19:14.924841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:19:15.060615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:1349", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-03-31T16:19:15.413125Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqpg002db0mjbdvycdhyambz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2EzYjk4MDQtNzJiOWFjMzItMmI4YjFjZjgtMWVmYWRlZmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:19:15.461439Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008980953427173:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:15.461543Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Subcribe to ClusterTracker from [1:7488009002428265154:2964] 2025-03-31T16:19:15.527163Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008979528967585:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:15.527231Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-03-31T16:19:21.417867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:1349 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-03-31T16:19:21.538903Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:1349 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWat ... ceId=src, PreferedPartition=(NULL)) Select from the table 2025-03-31T16:19:36.258324Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7488009092249782274:2514] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2025-03-31T16:19:36.258609Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7488009092249782320:2514] connected; active server actors: 1 2025-03-31T16:19:36.258656Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7488009092249782274:2514] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2025-03-31T16:19:36.258680Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7488009092249782274:2514] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-03-31T16:19:36.258875Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7488009092249782320:2514] disconnected; active server actors: 1 2025-03-31T16:19:36.258901Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7488009092249782320:2514] disconnected no session 2025-03-31T16:19:36.343320Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7488009092249782274:2514] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-03-31T16:19:36.343351Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7488009092249782274:2514] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-03-31T16:19:36.343363Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7488009092249782274:2514] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-03-31T16:19:36.343388Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-03-31T16:19:36.344010Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 4, Generation: 1 2025-03-31T16:19:36.343998Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [3:7488009092249782345:2514], now have 1 active actors on pipe 2025-03-31T16:19:36.344113Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-03-31T16:19:36.344145Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-03-31T16:19:36.344238Z node 4 :PERSQUEUE INFO: new Cookie src|b622d5f-e9f1bbe2-50a39fc6-a4bea26e_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-03-31T16:19:36.344356Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-03-31T16:19:36.344410Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-31T16:19:36.344802Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-03-31T16:19:36.344842Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-03-31T16:19:36.344934Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-31T16:19:36.345134Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|b622d5f-e9f1bbe2-50a39fc6-a4bea26e_0 2025-03-31T16:19:36.345825Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1743437976345 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T16:19:36.345926Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|b622d5f-e9f1bbe2-50a39fc6-a4bea26e_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-03-31T16:19:36.346145Z :INFO: [] MessageGroupId [src] SessionId [src|b622d5f-e9f1bbe2-50a39fc6-a4bea26e_0] Write session: close. Timeout = 0 ms 2025-03-31T16:19:36.346200Z :INFO: [] MessageGroupId [src] SessionId [src|b622d5f-e9f1bbe2-50a39fc6-a4bea26e_0] Write session will now close 2025-03-31T16:19:36.346237Z :DEBUG: [] MessageGroupId [src] SessionId [src|b622d5f-e9f1bbe2-50a39fc6-a4bea26e_0] Write session: aborting 2025-03-31T16:19:36.346698Z :INFO: [] MessageGroupId [src] SessionId [src|b622d5f-e9f1bbe2-50a39fc6-a4bea26e_0] Write session: gracefully shut down, all writes complete 2025-03-31T16:19:36.346741Z :DEBUG: [] MessageGroupId [src] SessionId [src|b622d5f-e9f1bbe2-50a39fc6-a4bea26e_0] Write session: destroy 2025-03-31T16:19:36.347695Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|b622d5f-e9f1bbe2-50a39fc6-a4bea26e_0 grpc read done: success: 0 data: 2025-03-31T16:19:36.347715Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|b622d5f-e9f1bbe2-50a39fc6-a4bea26e_0 grpc read failed 2025-03-31T16:19:36.347739Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|b622d5f-e9f1bbe2-50a39fc6-a4bea26e_0 grpc closed 2025-03-31T16:19:36.347752Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|b622d5f-e9f1bbe2-50a39fc6-a4bea26e_0 is DEAD PORTS 3925 29903 2025-03-31T16:19:36.348515Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-31T16:19:36.348738Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7488009092249782345:2514] destroyed 2025-03-31T16:19:36.348801Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. Session was created >>> Ready to answer: ok 2025-03-31T16:19:37.357733Z :INFO: [/Root] OnFederationDiscovery fall back to single mode, database=/Root 2025-03-31T16:19:37.357847Z :INFO: [/Root] [] [d605d65a-c662446c-764b2aa8-7416b899] Open read subsessions to databases: { name: , endpoint: localhost:29903, path: /Root } 2025-03-31T16:19:37.358026Z :INFO: [/Root] [/Root] [c5cca48d-eda7f0f1-7e846b43-2c10e225] Starting read session 2025-03-31T16:19:37.358061Z :DEBUG: [/Root] [/Root] [c5cca48d-eda7f0f1-7e846b43-2c10e225] Starting single session 2025-03-31T16:19:37.358519Z :DEBUG: [/Root] [/Root] [c5cca48d-eda7f0f1-7e846b43-2c10e225] [] In Reconnect, ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2025-03-31T16:19:37.358605Z :DEBUG: [/Root] [/Root] [c5cca48d-eda7f0f1-7e846b43-2c10e225] [] New values: ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2025-03-31T16:19:37.358743Z :DEBUG: [/Root] [/Root] [c5cca48d-eda7f0f1-7e846b43-2c10e225] [] Reconnecting session to cluster in 0.000000s 2025-03-31T16:19:37.359103Z :ERROR: [/Root] [/Root] [c5cca48d-eda7f0f1-7e846b43-2c10e225] [] Got error. Status: CLIENT_CALL_UNIMPLEMENTED. Description:
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:29903
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:29903. 2025-03-31T16:19:37.359162Z :DEBUG: [/Root] [/Root] [c5cca48d-eda7f0f1-7e846b43-2c10e225] [] In Reconnect, ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2025-03-31T16:19:37.359197Z :DEBUG: [/Root] [/Root] [c5cca48d-eda7f0f1-7e846b43-2c10e225] [] New values: ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2025-03-31T16:19:37.359325Z :INFO: [/Root] [/Root] [c5cca48d-eda7f0f1-7e846b43-2c10e225] [] Closing session to cluster: SessionClosed { Status: CLIENT_CALL_UNIMPLEMENTED Issues: "
: Error: Failed to establish connection to server "localhost:29903" ( cluster ). Attempts done: 1
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:29903
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:29903. " } 2025-03-31T16:19:37.359591Z :NOTICE: [/Root] [/Root] [c5cca48d-eda7f0f1-7e846b43-2c10e225] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-31T16:19:37.359628Z :DEBUG: [/Root] [/Root] [c5cca48d-eda7f0f1-7e846b43-2c10e225] [] Abort session to cluster Got new read session event: SessionClosed { Status: CLIENT_CALL_UNIMPLEMENTED Issues: "
: Error: Failed to establish connection to server "localhost:29903" ( cluster ). Attempts done: 1
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:29903
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:29903. " } 2025-03-31T16:19:37.359731Z :INFO: [/Root] [/Root] [c5cca48d-eda7f0f1-7e846b43-2c10e225] Closing read session. Close timeout: 0.010000s 2025-03-31T16:19:37.359777Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-03-31T16:19:37.359823Z :INFO: [/Root] [/Root] [c5cca48d-eda7f0f1-7e846b43-2c10e225] Counters: { Errors: 1 CurrentSessionLifetimeMs: 1 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T16:19:37.359854Z :INFO: [/Root] [/Root] [c5cca48d-eda7f0f1-7e846b43-2c10e225] Closing read session. Close timeout: 0.000000s 2025-03-31T16:19:37.359883Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-03-31T16:19:37.359915Z :INFO: [/Root] [/Root] [c5cca48d-eda7f0f1-7e846b43-2c10e225] Counters: { Errors: 1 CurrentSessionLifetimeMs: 1 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T16:19:37.359964Z :INFO: [/Root] [/Root] [c5cca48d-eda7f0f1-7e846b43-2c10e225] Closing read session. Close timeout: 0.000000s 2025-03-31T16:19:37.359991Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-03-31T16:19:37.360033Z :INFO: [/Root] [/Root] [c5cca48d-eda7f0f1-7e846b43-2c10e225] Counters: { Errors: 1 CurrentSessionLifetimeMs: 2 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T16:19:37.360093Z :NOTICE: [/Root] [/Root] [c5cca48d-eda7f0f1-7e846b43-2c10e225] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-31T16:19:37.528073Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7488009096544749721:2533] TxId: 281474976715688. Ctx: { TraceId: 01jqpg0nk84dcxakqzx6bk9351, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MWRlMDQ5MDMtOGVlZDFhY2ItMWY5ZDNhMTItYjhlNTQ0Y2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 4 2025-03-31T16:19:37.528884Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7488009096544749728:2541], TxId: 281474976715688, task: 2. Ctx: { SessionId : ydb://session/3?node_id=3&id=MWRlMDQ5MDMtOGVlZDFhY2ItMWY5ZDNhMTItYjhlNTQ0Y2Y=. CustomerSuppliedId : . TraceId : 01jqpg0nk84dcxakqzx6bk9351. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7488009096544749721:2533], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-03-31T16:19:37.529474Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7488009096544749729:2542], TxId: 281474976715688, task: 4. Ctx: { TraceId : 01jqpg0nk84dcxakqzx6bk9351. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=MWRlMDQ5MDMtOGVlZDFhY2ItMWY5ZDNhMTItYjhlNTQ0Y2Y=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7488009096544749721:2533], status: UNAVAILABLE, reason: {
: Error: Terminate execution } ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbMonitoring::SelfCheck [GOOD] Test command err: 2025-03-31T16:17:23.111673Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008520041355237:2213];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:23.120352Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d67/r3tmp/tmpfZSUui/pdisk_1.dat 2025-03-31T16:17:23.566619Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:23.566700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:23.569907Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:17:23.596830Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30417, node 1 2025-03-31T16:17:23.657760Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:17:23.658132Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:17:23.688008Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:23.688032Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:23.688042Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:23.688166Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7190 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:23.951161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:23.971815Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:17:24.158008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "LogStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Uint8" NotNull: true } Columns { Name: "resource_type" Type: "Utf8" NotNull: true } Columns { Name: "resource_id" Type: "Utf8" NotNull: true } Columns { Name: "uid" Type: "Utf8" NotNull: true } Columns { Name: "level" Type: "Int32" } Columns { Name: "message" Type: "Utf8" } Columns { Name: "json_payload" Type: "JsonDocument" } Columns { Name: "request_id" Type: "Utf8" } Columns { Name: "ingested_at" Type: "Timestamp" } Columns { Name: "saved_at" Type: "Timestamp" } KeyColumnNames: "timestamp" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" DefaultCompression { Codec: ColumnCodecLZ4 } } } } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:59892" , at schemeshard: 72057594046644480 2025-03-31T16:17:24.158494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/LogStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:24.159109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: LogStore, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-03-31T16:17:24.159177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-03-31T16:17:24.159253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-03-31T16:17:24.159310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-03-31T16:17:24.159354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-03-31T16:17:24.159418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2025-03-31T16:17:24.159692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 5 2025-03-31T16:17:24.161868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-03-31T16:17:24.162135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:17:24.162155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:24.162291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-31T16:17:24.162328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 6 2025-03-31T16:17:24.165011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusAccepted TxId: 281474976710658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-03-31T16:17:24.165183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE COLUMN STORE, path: /Root/LogStore 2025-03-31T16:17:24.165421Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-31T16:17:24.165450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-31T16:17:24.165582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-31T16:17:24.165654Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-31T16:17:24.165670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7488008520041355896:2517], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 1 2025-03-31T16:17:24.165704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7488008520041355896:2517], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 2 2025-03-31T16:17:24.165745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:24.165802Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateOlapStore, at tablet# 72057594046644480 2025-03-31T16:17:24.166489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710658:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 1 TabletType: ColumnShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-03-31T16:17:24.167017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710658:0 CreateRequest Event to Hive: 72057594037 ... 480 2025-03-31T16:19:29.927201Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:19:29.927275Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:19:29.927348Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:19:29.927381Z node 64 :FLAT_TX_SCHEMESHARD INFO: TDropOlapStore TProposedDeleteParts operationId# 281474976715667:0 ProgressState, at schemeshard: 72057594046644480 2025-03-31T16:19:29.927434Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 6 2025-03-31T16:19:29.927637Z node 64 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715667:0 progress is 1/1 2025-03-31T16:19:29.927660Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715667 ready parts: 1/1 2025-03-31T16:19:29.927687Z node 64 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715667:0 progress is 1/1 2025-03-31T16:19:29.927698Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715667 ready parts: 1/1 2025-03-31T16:19:29.927717Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715667, ready parts: 1/1, is published: true 2025-03-31T16:19:29.927766Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [64:7488009062868399988:2370] message: TxId: 281474976715667 2025-03-31T16:19:29.927796Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715667 ready parts: 1/1 2025-03-31T16:19:29.927824Z node 64 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715667:0 2025-03-31T16:19:29.927836Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715667:0 2025-03-31T16:19:29.928000Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 5 2025-03-31T16:19:29.929718Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:19:29.929747Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:19:29.929761Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:19:29.929780Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:19:29.932966Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-03-31T16:19:29.933627Z node 64 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[64:7488009062868399327:2327];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-31T16:19:29.933891Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2025-03-31T16:19:29.934181Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-03-31T16:19:29.934236Z node 64 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 64, TabletId: 72075186224037891 not found 2025-03-31T16:19:29.934273Z node 64 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 64, TabletId: 72075186224037888 not found 2025-03-31T16:19:29.934296Z node 64 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 64, TabletId: 72075186224037890 not found 2025-03-31T16:19:29.935148Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-03-31T16:19:29.935463Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-03-31T16:19:29.936344Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-03-31T16:19:29.936583Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-03-31T16:19:29.937416Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-03-31T16:19:29.937689Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-03-31T16:19:29.937716Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-03-31T16:19:29.937760Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-31T16:19:29.939011Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-03-31T16:19:29.939050Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-03-31T16:19:29.939358Z node 64 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[64:7488009062868399323:2325];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-31T16:19:29.944239Z node 64 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[64:7488009062868399325:2326];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-31T16:19:29.946338Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-03-31T16:19:29.946380Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-03-31T16:19:29.952368Z node 64 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[64:7488009062868399321:2324];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-31T16:19:29.959287Z node 64 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 64, TabletId: 72075186224037889 not found 2025-03-31T16:19:29.965743Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-03-31T16:19:29.965790Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-03-31T16:19:29.965857Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-03-31T16:19:29.965877Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-03-31T16:19:29.965916Z node 64 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-03-31T16:19:34.685081Z node 67 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[67:7488009081856145554:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:34.685187Z node 67 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d67/r3tmp/tmpNLLWp8/pdisk_1.dat 2025-03-31T16:19:34.865397Z node 67 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:34.897552Z node 67 :HIVE WARN: HIVE#72057594037968897 Node(67, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:34.897674Z node 67 :HIVE WARN: HIVE#72057594037968897 Node(67, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:34.900531Z node 67 :HIVE WARN: HIVE#72057594037968897 Node(67, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15631, node 67 2025-03-31T16:19:34.967752Z node 67 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:19:34.967776Z node 67 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:19:34.967790Z node 67 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:19:34.967958Z node 67 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9821 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:19:35.256220Z node 67 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... self_check_result: GOOD issue_log { id: "YELLOW-f489-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-1ba8-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-67" reason: "YELLOW-e9e2-1231c6b1-68" reason: "YELLOW-e9e2-1231c6b1-69" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-67" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 67 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-68" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 68 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-69" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 69 host: "::1" port: 12003 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } location { id: 67 host: "::1" port: 12001 } >> YdbOlapStore::LogPagingAfter [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEvents >> BasicUsage::WriteSessionCloseIgnoresWrites [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEvents [GOOD] >> TKeyValueTest::TestWriteLongKey >> Cdc::DropIndex [GOOD] >> Cdc::DisableStream >> Cdc::ShouldDeliverChangesOnSplitMerge [GOOD] >> Cdc::ResolvedTimestamps >> KeyValueReadStorage::ReadError [GOOD] >> KeyValueReadStorage::ReadErrorWithWrongGroupId [GOOD] >> KeyValueReadStorage::ReadErrorWithUncorrectCookie [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::WriteSessionCloseIgnoresWrites [GOOD] Test command err: 2025-03-31T16:19:10.111177Z :WriteSessionCloseWaitsForWrites INFO: Random seed for debugging is 1743437950111146 2025-03-31T16:19:10.436069Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008978708258651:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:10.436302Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:19:10.815364Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:19:10.826104Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008978373847066:2215];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00165f/r3tmp/tmpC7SJch/pdisk_1.dat 2025-03-31T16:19:10.826609Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:19:10.826952Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:19:11.500001Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:11.509672Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:19:11.521315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:11.521410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:11.528187Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:11.528255Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:11.540841Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:11.541792Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:19:11.544352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14731, node 1 2025-03-31T16:19:11.780473Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/00165f/r3tmp/yandex210Zjg.tmp 2025-03-31T16:19:11.780498Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/00165f/r3tmp/yandex210Zjg.tmp 2025-03-31T16:19:11.780673Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/00165f/r3tmp/yandex210Zjg.tmp 2025-03-31T16:19:11.780818Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:19:11.893517Z INFO: TTestServer started on Port 7251 GrpcPort 14731 TClient is connected to server localhost:7251 PQClient connected to localhost:14731 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:19:12.354507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-03-31T16:19:12.473431Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976720660, at schemeshard: 72057594046644480 2025-03-31T16:19:14.844289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008995888128864:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:14.844396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008995888128844:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:14.844484Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:14.865116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720661:3, at schemeshard: 72057594046644480 2025-03-31T16:19:14.867575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008995888128904:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:14.867991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:14.872760Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008995553716433:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:14.872922Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008995553716408:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:14.873603Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:14.884803Z node 2 :TX_PROXY ERROR: Actor# [2:7488008995553716438:2125] txid# 281474976710657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T16:19:14.893112Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008995553716437:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720661 completed, doublechecking } 2025-03-31T16:19:14.893489Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008995888128873:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720661 completed, doublechecking } 2025-03-31T16:19:14.993313Z node 2 :TX_PROXY ERROR: Actor# [2:7488008995553716465:2131] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:19:14.998119Z node 1 :TX_PROXY ERROR: Actor# [1:7488008995888128963:2694] txid# 281474976720662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:19:15.194526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2025-03-31T16:19:15.196613Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488008995888128982:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:19:15.196821Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTc1NDYzODQtNDk1NzRlNzQtZDFkODRjMjUtYzU3NTU0YTg=, ActorId: [1:7488008995888128841:2337], ActorState: ExecuteState, TraceId: 01jqpfzzqm6b3vc4k0878mynms, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:19:15.199921Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488008995553716476:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:19:15.200101Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzczYTRmNzYtZDZjYWZhYy1lNjY3MzIzYS1kN2RhZjYxOA==, ActorId: [2:7488008995553716406:2309], ActorState: ExecuteState, TraceId: 01jqpfzzsna9mwv3z3dghmz6bz, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:19:15.229224Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:19:15.229224Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } i ... 72075186224037893 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 12 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } Path: "/Root/PQ/rt3.dc1--test-topic" name rt3.dc1--test-topic version1 CallPersQueueGRPC request to localhost:21872 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-03-31T16:19:35.805594Z node 3 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:21872 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-03-31T16:19:36.307946Z node 3 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:21872 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-03-31T16:19:36.811209Z node 3 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 1 ErrorCode: OK MetaResponse { CmdGetTopicMetadataResult { TopicInfo { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } Version: 1 LocalDC: true Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } TopicPath: "/Root/PQ/rt3.dc1--test-topic" YdbDatabasePath: "/Root" Consumers { Name: "user" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } Version: 0 Important: false } } ErrorCode: OK } } } === Topic created, have version: 1 2025-03-31T16:19:36.818750Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: try to update token 2025-03-31T16:19:36.819271Z :INFO: [] MessageGroupId [src] SessionId [] Write session: Do CDS request 2025-03-31T16:19:36.819322Z :INFO: [] MessageGroupId [src] SessionId [] Start write session. Will connect to endpoint: localhost:21872 2025-03-31T16:19:36.823955Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: send init request: init_request { topic: "test-topic" message_group_id: "src" } 2025-03-31T16:19:36.824726Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-03-31T16:19:36.824762Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 1 2025-03-31T16:19:36.825078Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "src" } 2025-03-31T16:19:36.825193Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "test-topic" message_group_id: "src" from ipv6:[::1]:58258 2025-03-31T16:19:36.825216Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:58258 proto=v1 topic=test-topic durationSec=0 2025-03-31T16:19:36.825226Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-03-31T16:19:36.826617Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2025-03-31T16:19:36.826797Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-03-31T16:19:36.826816Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-03-31T16:19:36.826824Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-03-31T16:19:36.826844Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7488009091696415122:2526] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2025-03-31T16:19:36.829209Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7488009091696415122:2526] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2025-03-31T16:19:36.980306Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7488009091696415122:2526] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2025-03-31T16:19:36.980789Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7488009091696415176:2526] connected; active server actors: 1 2025-03-31T16:19:36.981097Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7488009091696415122:2526] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2025-03-31T16:19:36.981121Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7488009091696415122:2526] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-03-31T16:19:36.981455Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7488009091696415176:2526] disconnected; active server actors: 1 2025-03-31T16:19:36.981482Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7488009091696415176:2526] disconnected no session 2025-03-31T16:19:37.062333Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7488009091696415122:2526] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-03-31T16:19:37.062373Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7488009091696415122:2526] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-03-31T16:19:37.062388Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7488009091696415122:2526] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-03-31T16:19:37.062419Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-03-31T16:19:37.062901Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [3:7488009095991382503:2526], now have 1 active actors on pipe 2025-03-31T16:19:37.062906Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 3, Generation: 1 2025-03-31T16:19:37.062932Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-03-31T16:19:37.062954Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-03-31T16:19:37.063021Z node 3 :PERSQUEUE INFO: new Cookie src|e1531d80-1f3d26f5-817ba4cc-7b863cd2_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-03-31T16:19:37.063156Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-03-31T16:19:37.063241Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-31T16:19:37.063342Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-03-31T16:19:37.063362Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-03-31T16:19:37.063425Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-31T16:19:37.063533Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|e1531d80-1f3d26f5-817ba4cc-7b863cd2_0 2025-03-31T16:19:37.064226Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1743437977064 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T16:19:37.064366Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|e1531d80-1f3d26f5-817ba4cc-7b863cd2_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-03-31T16:19:37.064552Z :INFO: [] MessageGroupId [src] SessionId [src|e1531d80-1f3d26f5-817ba4cc-7b863cd2_0] Write session: close. Timeout = 0 ms 2025-03-31T16:19:37.064591Z :INFO: [] MessageGroupId [src] SessionId [src|e1531d80-1f3d26f5-817ba4cc-7b863cd2_0] Write session will now close 2025-03-31T16:19:37.064630Z :DEBUG: [] MessageGroupId [src] SessionId [src|e1531d80-1f3d26f5-817ba4cc-7b863cd2_0] Write session: aborting 2025-03-31T16:19:37.065319Z :INFO: [] MessageGroupId [src] SessionId [src|e1531d80-1f3d26f5-817ba4cc-7b863cd2_0] Write session: gracefully shut down, all writes complete 2025-03-31T16:19:37.065378Z :DEBUG: [] MessageGroupId [src] SessionId [src|e1531d80-1f3d26f5-817ba4cc-7b863cd2_0] Write session is aborting and will not restart 2025-03-31T16:19:37.065460Z :DEBUG: [] MessageGroupId [src] SessionId [src|e1531d80-1f3d26f5-817ba4cc-7b863cd2_0] Write session: destroy 2025-03-31T16:19:37.065700Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|e1531d80-1f3d26f5-817ba4cc-7b863cd2_0 grpc read done: success: 0 data: 2025-03-31T16:19:37.065717Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|e1531d80-1f3d26f5-817ba4cc-7b863cd2_0 grpc read failed 2025-03-31T16:19:37.065740Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|e1531d80-1f3d26f5-817ba4cc-7b863cd2_0 grpc closed 2025-03-31T16:19:37.065753Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|e1531d80-1f3d26f5-817ba4cc-7b863cd2_0 is DEAD 2025-03-31T16:19:37.066530Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-31T16:19:37.066641Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7488009095991382503:2526] destroyed 2025-03-31T16:19:37.066670Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. Session was created >>> Ready to answer: ok 2025-03-31T16:19:37.127657Z :ERROR: [/Root] OnFederationDiscovery: Got error. Status: UNAVAILABLE. Description: 2025-03-31T16:19:40.564574Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:19:40.564601Z node 3 :IMPORT WARN: Table profiles were not loaded >> TKeyValueTest::TestInlineWriteReadWithRestartsWithNotCorrectUTF8NewApi [GOOD] >> TKeyValueTest::TestLargeWriteAndDelete ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadErrorWithUncorrectCookie [GOOD] Test command err: 2025-03-31T16:19:42.193192Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# ERROR ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-03-31T16:19:42.193268Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV316@keyvalue_storage_read_request.cpp:270} Unexpected EvGetResult. KeyValue# 1 Status# ERROR Deadline# 18446744073709551 Now# 0 SentAt# 1970-01-01T00:00:00.000000Z GotAt# 1743437982192 ErrorReason# 2025-03-31T16:19:42.200110Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 2 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-03-31T16:19:42.200182Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV318@keyvalue_storage_read_request.cpp:240} Received EvGetResult from an unexpected storage group. KeyValue# 1 GroupId# 2 ExpecetedGroupId# 3 Status# OK Deadline# 18446744073709551 Now# 0 SentAt# 1970-01-01T00:00:00.000000Z GotAt# 1743437982199 ErrorReason# 2025-03-31T16:19:42.204878Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-03-31T16:19:42.204926Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV319@keyvalue_storage_read_request.cpp:222} Received EvGetResult with an unexpected cookie. KeyValue# 1 Cookie# 1000 SentGets# 1 GroupId# 3 Status# OK Deadline# 18446744073709551 Now# 0 GotAt# 1743437982204 ErrorReason# ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbOlapStore::LogPagingAfter [GOOD] Test command err: 2025-03-31T16:17:25.006953Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008527850326319:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:25.007013Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d55/r3tmp/tmpyKYZe5/pdisk_1.dat 2025-03-31T16:17:25.443216Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:25.452521Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:25.452592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 16927, node 1 2025-03-31T16:17:25.488357Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:17:25.488377Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:17:25.550152Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:17:25.579901Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:25.579926Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:25.579937Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:25.584122Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64663 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:25.842340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:64663 2025-03-31T16:17:26.119660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:26.258531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488008532145294672:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:17:26.258785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488008532145294672:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:17:26.259127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488008532145294672:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:17:26.259253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488008532145294672:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:17:26.259387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488008532145294672:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:17:26.259513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488008532145294672:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:17:26.259661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488008532145294672:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:17:26.259815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488008532145294672:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:17:26.259930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488008532145294672:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:17:26.260039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488008532145294672:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:17:26.260156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488008532145294672:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:17:26.260276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488008532145294672:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:17:26.299938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488008532145294668:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:17:26.300016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488008532145294668:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:17:26.300277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488008532145294668:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:17:26.300392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488008532145294668:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:17:26.300537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488008532145294668:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:17:26.300656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488008532145294668:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:17:26.300763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488008532145294668:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:17:26.300882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488008532145294668:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:17:26.301018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488008532145294668:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:17:26.301171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488008532145294668:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:17:26.301289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488008532145294668:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:17:26.301384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488008532145294668:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:17:26.338097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488008532145294670:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:17:26.338175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488008532145294670:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:17:26.338431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488008532145294670:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:17:26.338578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488008532145294670:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:17:26.338706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488008532145294670:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:17:26.338828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488008532145294670:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:17:26.338933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488008532145294670:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:17:26.339045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488008532145294670:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:17:26.339379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=720751862240 ... roblem=finish_compute_actor;tx_id=281474976710670;task_id=66;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-03-31T16:19:39.613192Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7488009104791540591:3521] TxId: 281474976710670. Ctx: { TraceId: 01jqpg0ptvaj3tzb9ykyrm4bfy, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZDNhNTMxNTMtOTExYzA5NTEtOGFiZTczOTQtMjIwNmU3NWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7488009104791540682:3597], task: 66, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 1734 Tasks { TaskId: 66 StageId: 2 CpuTimeUs: 251 FinishTimeMs: 1743437979612 ComputeCpuTimeUs: 39 BuildCpuTimeUs: 212 HostName: "ghrun-tuhoqst4xe" NodeId: 28 CreateTimeMs: 1743437979519 } MaxMemoryUsage: 1048576 } 2025-03-31T16:19:39.613270Z node 28 :KQP_EXECUTER INFO: TxId: 281474976710670. Ctx: { TraceId: 01jqpg0ptvaj3tzb9ykyrm4bfy, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZDNhNTMxNTMtOTExYzA5NTEtOGFiZTczOTQtMjIwNmU3NWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [28:7488009104791540682:3597] 2025-03-31T16:19:39.618182Z node 28 :KQP_EXECUTER INFO: ActorId: [28:7488009104791540591:3521] TxId: 281474976710670. Ctx: { TraceId: 01jqpg0ptvaj3tzb9ykyrm4bfy, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZDNhNTMxNTMtOTExYzA5NTEtOGFiZTczOTQtMjIwNmU3NWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Full stats: { CpuTimeUs: 186538 DurationUs: 169429 Tables { TablePath: "/Root/OlapStore/log1" } ExecuterCpuTimeUs: 84228 StartTimeMs: 1743437979443 FinishTimeMs: 1743437979613 Stages { StageGuid: "3536612d-f0c9c7c3-28f516f5-d1482684" Program: "(\n(declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType \'Timestamp)) (OptionalType (DataType \'Utf8)) (OptionalType (DataType \'Utf8)) (OptionalType (DataType \'Utf8)) (DataType \'Int32)) (TupleType (OptionalType (DataType \'Timestamp)) (OptionalType (DataType \'Utf8)) (OptionalType (DataType \'Utf8)) (OptionalType (DataType \'Utf8)) (DataType \'Int32))))))\n(return (lambda \'() (block \'(\n (let $1 (KqpTable \'\"/Root/OlapStore/log1\" \'\"72057594046644480:3\" \'\"\" \'1))\n (let $2 (OptionalType (DataType \'Utf8)))\n (let $3 (TupleType (OptionalType (DataType \'Timestamp)) $2 $2 $2 (DataType \'Int32)))\n (let $4 \'(\'\"level\" \'\"message\" \'\"resource_id\" \'\"resource_type\" \'\"timestamp\" \'\"uid\"))\n (let $5 \'(\'\"timestamp\" \'\"resource_type\" \'\"resource_id\" \'\"uid\"))\n (let $6 \'(\'(\'\"UsedKeyColumns\" $5) \'(\'\"ExpectedMaxRanges\" \'4) \'(\'\"PointPrefixLen\" \'0)))\n (let $7 (KqpWideReadOlapTableRanges $1 %kqp%tx_result_binding_0_0 $4 \'() $6 (lambda \'($10) (block \'(\n (let $11 \'(\'eq \'\"resource_type\" (String \'\"app\")))\n (let $12 \'(\'eq \'\"resource_id\" (String \'\"resource_1\")))\n (return (KqpOlapFilter $10 (KqpOlapAnd $11 $12)))\n )))))\n (let $8 (Bool \'true))\n (let $9 \'(\'(\'4 $8) \'(\'3 $8) \'(\'2 $8) \'(\'5 $8)))\n (return (FromFlow (WideTop $7 (Uint64 \'50) $9)))\n))))\n)\n" ComputeActors { CpuTimeUs: 1307 Tasks { TaskId: 60 CpuTimeUs: 607 FinishTimeMs: 1743437979594 Tables { TablePath: "/Root/OlapStore/log1" } ComputeCpuTimeUs: 38 BuildCpuTimeUs: 569 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-tuhoqst4xe" NodeId: 28 CreateTimeMs: 1743437979501 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743437979569 } Stages { StageId: 2 StageGuid: "5928d270-c00d1601-8e4fc514-a61bf651" Program: "(\n(return (lambda \'($1) (FromFlow (NarrowMap (ToFlow $1) (lambda \'($2 $3 $4 $5 $6 $7) (AsStruct \'(\'\"level\" $2) \'(\'\"message\" $3) \'(\'\"resource_id\" $4) \'(\'\"resource_type\" $5) \'(\'\"timestamp\" $6) \'(\'\"uid\" $7)))))))\n)\n" ComputeActors { CpuTimeUs: 1734 Tasks { TaskId: 66 StageId: 2 CpuTimeUs: 251 FinishTimeMs: 1743437979612 ComputeCpuTimeUs: 39 BuildCpuTimeUs: 212 HostName: "ghrun-tuhoqst4xe" NodeId: 28 CreateTimeMs: 1743437979519 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743437979569 } Stages { StageId: 1 StageGuid: "7fe36037-54f770cf-a6668c82-d1309616" Program: "(\n(return (lambda \'($1) (block \'(\n (let $2 (Bool \'true))\n (let $3 \'(\'(\'4 $2) \'(\'3 $2) \'(\'2 $2) \'(\'5 $2)))\n (let $4 (WideTop (ToFlow $1) (Uint64 \'50) $3))\n (let $5 (Bool \'false))\n (let $6 \'(\'(\'4 $5) \'(\'3 $5) \'(\'2 $5) \'(\'5 $5)))\n (return (FromFlow (WideSort $4 $6)))\n))))\n)\n" BaseTimeMs: 1743437979569 } TxPlansWithStats: "{\"Node Type\":\"Phase\",\"PlanNodeId\":7,\"Plans\":[{\"Node Type\":\"ResultSet_1\",\"PlanNodeId\":6,\"PlanNodeType\":\"ResultSet\",\"Plans\":[{\"Node Type\":\"Stage\",\"PlanNodeId\":5,\"Plans\":[{\"Node Type\":\"Merge\",\"PlanNodeId\":4,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Sort-Top\",\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"Sort\",\"SortBy\":\"[row.timestamp,row.resource_type,row.resource_id,row.uid]\"},{\"Inputs\":[{\"ExternalPlanNodeId\":2}],\"Limit\":\"50\",\"Name\":\"Top\",\"TopBy\":\"[row.timestamp,row.resource_type,row.resource_id,row.uid]\"}],\"PlanNodeId\":3,\"Plans\":[{\"Node Type\":\"UnionAll\",\"PlanNodeId\":2,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Top-Filter-TableRangeScan\",\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Limit\":\"50\",\"Name\":\"Top\",\"TopBy\":\"[row.timestamp,row.resource_type,row.resource_id,row.uid]\"},{\"E-Cost\":\"No estimate\",\"E-Rows\":\"No estimate\",\"E-Size\":\"No estimate\",\"Inputs\":[{\"InternalOperatorId\":2}],\"Name\":\"Filter\",\"Predicate\":\"resource_type == \\\"app\\\" AND resource_id == \\\"resource_1\\\"\",\"Pushdown\":\"True\"},{\"E-Cost\":\"No estimate\",\"E-Rows\":\"No estimate\",\"E-Size\":\"No estimate\",\"Inputs\":[],\"Name\":\"TableRangeScan\",\"Path\":\"\\/Root\\/OlapStore\\/log1\",\"ReadColumns\":[\"level\",\"message\",\"resource_id\",\"resource_type\",\"timestamp\",\"uid\"],\"ReadRanges\":[\"timestamp (3000000, +∞)\",\"resource_type (nginx, +∞)\",\"resource_id (resource_), +∞)\",\"uid (10, +∞)\"],\"ReadRangesExpectedSize\":4,\"ReadRangesKeys\":[\"timestamp\",\"resource_type\",\"resource_id\",\"uid\"],\"Scan\":\"Parallel\",\"SsaProgram\":{\"Command\":[{\"Assign\":{\"Column\":{\"Id\":11},\"Constant\":{\"Bytes\":\"app\"}}},{\"Assign\":{\"Column\":{\"Id\":12},\"Function\":{\"Arguments\":[{\"Id\":6},{\"Id\":11}],\"FunctionType\":2,\"KernelIdx\":0,\"YqlOperationId\":11}}},{\"Assign\":{\"Column\":{\"Id\":13},\"Constant\":{\"Bytes\":\"resource_1\"}}},{\"Assign\":{\"Column\":{\"Id\":14},\"Function\":{\"Arguments\":[{\"Id\":3},{\"Id\":13}],\"FunctionType\":2,\"KernelIdx\":1,\"YqlOperationId\":11}}},{\"Assign\":{\"Column\":{\"Id\":15},\"Function\":{\"Arguments\":[{\"Id\":12},{\"Id\":14}],\"FunctionType\":2,\"KernelIdx\":2,\"YqlOperationId\":0}}},{\"Filter\":{\"Predicate\":{\"Id\":15}}},{\"Projection\":{\"Columns\":[{\"Id\":7},{\"Id\":1},{\"Id\":3},{\"Id\":6},{\"Id\":5},{\"Id\":4}]}}],\"Version\":5},\"Table\":\"OlapStore\\/log1\"}],\"PlanNodeId\":1,\"StageGuid\":\"3536612d-f0c9c7c3-28f516f5-d1482684\",\"Stats\":{\"BaseTimeMs\":1743437979569,\"ComputeNodes\":[{\"CpuTimeUs\":1307,\"Tasks\":[{\"ComputeTimeUs\":38,\"FinishTimeMs\":1743437979594,\"Host\":\"ghrun-tuhoqst4xe\",\"NodeId\":28,\"TaskId\":60}]}],\"FinishedTasks\":0,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"},\"Tables\":[\"OlapStore\\/log1\"]}],\"StageGuid\":\"\"}],\"StageGuid\":\"7fe36037-54f770cf-a6668c82-d1309616\",\"Stats\":{\"BaseTimeMs\":1743437979569,\"FinishedTasks\":0,\"PhysicalStageId\":1,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"SortColumns\":[\"timestamp (Desc)\",\"resource_type (Desc)\",\"resource_id (Desc)\",\"uid (Desc)\"],\"StageGuid\":\"\"}],\"StageGuid\":\"5928d270-c00d1601-8e4fc514-a61bf651\",\"Stats\":{\"BaseTimeMs\":1743437979569,\"ComputeNodes\":[{\"CpuTimeUs\":1734,\"Tasks\":[{\"ComputeTimeUs\":39,\"FinishTimeMs\":1743437979612,\"Host\":\"ghrun-tuhoqst4xe\",\"NodeId\":28,\"TaskId\":66}]}],\"FinishedTasks\":0,\"PhysicalStageId\":2,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"\"}" StatConvertBytes: 3216 Extra { type_url: "type.googleapis.com/NKqpProto.TKqpExecutionExtraStats" value: "\010\002\022\014\010\246\010\020\267<\030\246\237\006 B" } } 2025-03-31T16:19:39.618252Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7488009104791540591:3521] TxId: 281474976710670. Ctx: { TraceId: 01jqpg0ptvaj3tzb9ykyrm4bfy, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZDNhNTMxNTMtOTExYzA5NTEtOGFiZTczOTQtMjIwNmU3NWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-31T16:19:39.618347Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7488009104791540591:3521] TxId: 281474976710670. Ctx: { TraceId: 01jqpg0ptvaj3tzb9ykyrm4bfy, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=ZDNhNTMxNTMtOTExYzA5NTEtOGFiZTczOTQtMjIwNmU3NWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.102310s ReadRows: 0 ReadBytes: 0 ru: 68 rate limiter was not found force flag: 1 2025-03-31T16:19:39.618468Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=ZDNhNTMxNTMtOTExYzA5NTEtOGFiZTczOTQtMjIwNmU3NWE=, ActorId: [28:7488009100496573240:3521], ActorState: ExecuteState, TraceId: 01jqpg0ptvaj3tzb9ykyrm4bfy, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-03-31T16:19:39.619058Z node 28 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=28&id=ZDNhNTMxNTMtOTExYzA5NTEtOGFiZTczOTQtMjIwNmU3NWE=, ActorId: [28:7488009100496573240:3521], ActorState: ExecuteState, TraceId: 01jqpg0ptvaj3tzb9ykyrm4bfy, txInfo Status: Active Kind: ReadOnly TotalDuration: 0 ServerDuration: 181.243 QueriesCount: 1 2025-03-31T16:19:39.619168Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=ZDNhNTMxNTMtOTExYzA5NTEtOGFiZTczOTQtMjIwNmU3NWE=, ActorId: [28:7488009100496573240:3521], ActorState: ExecuteState, TraceId: 01jqpg0ptvaj3tzb9ykyrm4bfy, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-03-31T16:19:39.619313Z node 28 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=28&id=ZDNhNTMxNTMtOTExYzA5NTEtOGFiZTczOTQtMjIwNmU3NWE=, ActorId: [28:7488009100496573240:3521], ActorState: ExecuteState, TraceId: 01jqpg0ptvaj3tzb9ykyrm4bfy, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-31T16:19:39.619363Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=ZDNhNTMxNTMtOTExYzA5NTEtOGFiZTczOTQtMjIwNmU3NWE=, ActorId: [28:7488009100496573240:3521], ActorState: ExecuteState, TraceId: 01jqpg0ptvaj3tzb9ykyrm4bfy, EndCleanup, isFinal: 1 2025-03-31T16:19:39.619462Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=ZDNhNTMxNTMtOTExYzA5NTEtOGFiZTczOTQtMjIwNmU3NWE=, ActorId: [28:7488009100496573240:3521], ActorState: ExecuteState, TraceId: 01jqpg0ptvaj3tzb9ykyrm4bfy, Sent query response back to proxy, proxyRequestId: 5, proxyId: [28:7488009048956961500:2280] 2025-03-31T16:19:39.619508Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=ZDNhNTMxNTMtOTExYzA5NTEtOGFiZTczOTQtMjIwNmU3NWE=, ActorId: [28:7488009100496573240:3521], ActorState: unknown state, TraceId: 01jqpg0ptvaj3tzb9ykyrm4bfy, Cleanup temp tables: 0 2025-03-31T16:19:39.622855Z node 28 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437979000, txId: 18446744073709551615] shutting down 2025-03-31T16:19:39.623005Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=ZDNhNTMxNTMtOTExYzA5NTEtOGFiZTczOTQtMjIwNmU3NWE=, ActorId: [28:7488009100496573240:3521], ActorState: unknown state, TraceId: 01jqpg0ptvaj3tzb9ykyrm4bfy, Session actor destroyed >> TKeyValueTest::TestObtainLockNewApi >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOk >> TKeyValueTest::TestWriteTrimWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOkNewApi >> BasicUsage::ReadMirrored [GOOD] >> TKeyValueTest::TestIncorrectRequestThenResponseError >> TGroupMapperTest::Block42_1disk [GOOD] >> Cdc::RacyActivateAndEnqueue [GOOD] >> Cdc::RacyCreateAndSend >> YdbOlapStore::LogWithUnionAllDescending [GOOD] >> YdbOlapStore::LogTsRangeDescending >> TKeyValueTest::TestIncorrectRequestThenResponseError [GOOD] >> TKeyValueTest::TestIncrementalKeySet |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::Block42_1disk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::ReadMirrored [GOOD] Test command err: 2025-03-31T16:19:10.053556Z :PropagateSessionClosed INFO: Random seed for debugging is 1743437950053532 2025-03-31T16:19:10.452171Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008978050949702:2091];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:10.461400Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:19:10.496652Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008980042568850:2146];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:10.497591Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:19:10.716982Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:19:10.773598Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00163e/r3tmp/tmpfqeM8w/pdisk_1.dat 2025-03-31T16:19:11.178302Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:11.200010Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:11.200123Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:11.201074Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:11.201117Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:11.207142Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:19:11.207283Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:11.207864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17242, node 1 2025-03-31T16:19:11.522303Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/00163e/r3tmp/yandex9NwlaL.tmp 2025-03-31T16:19:11.522324Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/00163e/r3tmp/yandex9NwlaL.tmp 2025-03-31T16:19:11.522482Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/00163e/r3tmp/yandex9NwlaL.tmp 2025-03-31T16:19:11.522585Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:19:11.769391Z INFO: TTestServer started on Port 26812 GrpcPort 17242 TClient is connected to server localhost:26812 PQClient connected to localhost:17242 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:19:12.201732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-03-31T16:19:14.356457Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008997222438246:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:14.356644Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008997222438255:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:14.356717Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:14.368489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2025-03-31T16:19:14.393276Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008997222438283:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-03-31T16:19:14.646210Z node 2 :TX_PROXY ERROR: Actor# [2:7488008997222438311:2130] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:19:14.686336Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488008997222438328:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:19:14.686613Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWY3YTYwNWItYTFlZjEwZjEtYjNkNzIxMDEtNzU4NWNkZDA=, ActorId: [2:7488008997222438243:2307], ActorState: ExecuteState, TraceId: 01jqpfzz9e3fp8vexb0aqwk3eh, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:19:14.686480Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488008995230819917:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:19:14.691154Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:19:14.693478Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjlhZTNkZTktOWVkMmQ2NjgtNmFhYjJlNWUtY2JhZGVkMDI=, ActorId: [1:7488008995230819858:2336], ActorState: ExecuteState, TraceId: 01jqpfzza6encrtcqtefq9p55p, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:19:14.693838Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:19:14.749300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:19:14.902402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:19:15.035247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:17242", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-03-31T16:19:15.413167Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jqpg0020cz1zxyenp7e021kz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjlhZGJjYmUtY2E3MDk3ZWYtNDdhMjNiYzItNmFiYTMyNDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:19:15.451792Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008978050949702:2091];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:15.451853Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Subcribe to ClusterTracker from [1:7488008999525787733:3032] 2025-03-31T16:19:15.496339Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008980042568850:2146];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:15.496397Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-03-31T16:19:21.304829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:17242 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-03-31T16:19:21.521637Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:17242 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 ... es 1 2025-03-31T16:19:43.091231Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_9079215532933047865_v1 read done: guid# 881a1df5-cc12237c-8494c500-5e509c6d, partition# TopicId: Topic rt3.dc1--test-topic-mirrored-from-dc3 in dc dc1 in database: Root, partition 0(assignId:1), size# 1562 2025-03-31T16:19:43.091255Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_9079215532933047865_v1 response to read: guid# 881a1df5-cc12237c-8494c500-5e509c6d 2025-03-31T16:19:43.091390Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_9079215532933047865_v1 Process answer. Aval parts: 0 2025-03-31T16:19:43.091588Z :DEBUG: [/Root] [/Root] [8a3c5221-cbb33253-5940765b-1a7f6721] [] Got ReadResponse, serverBytesSize = 1562, now ReadSizeBudget = 0, ReadSizeServerDelta = 8387046 2025-03-31T16:19:43.091663Z :DEBUG: [/Root] [/Root] [8a3c5221-cbb33253-5940765b-1a7f6721] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 8387046 2025-03-31T16:19:43.091862Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-03-31T16:19:43.091907Z :DEBUG: [/Root] [/Root] [8a3c5221-cbb33253-5940765b-1a7f6721] [] Returning serverBytesSize = 1562 to budget 2025-03-31T16:19:43.091932Z :DEBUG: [/Root] [/Root] [8a3c5221-cbb33253-5940765b-1a7f6721] [] In ContinueReadingDataImpl, ReadSizeBudget = 1562, ReadSizeServerDelta = 8387046 2025-03-31T16:19:43.092114Z :DEBUG: [/Root] [/Root] [8a3c5221-cbb33253-5940765b-1a7f6721] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 8388608 2025-03-31T16:19:43.092165Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (1-1) 2025-03-31T16:19:43.092191Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 0} (2-2) 2025-03-31T16:19:43.092219Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 1} (3-3) 2025-03-31T16:19:43.092241Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 2} (4-4) 2025-03-31T16:19:43.092250Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_9079215532933047865_v1 grpc read done: success# 1, data# { read_request { bytes_size: 1562 } } >>> event from dataHandler: 2025-03-31T16:19:43.092335Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_9079215532933047865_v1 got read request: guid# 804de0df-fea91e5e-352b44e7-899058a2 DataReceived { Partition session id: 2 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 Message { Data: ..130 bytes.. Information: { Offset: 1 ProducerId: "src_id" SeqNo: 2 CreateTime: 2025-03-31T16:19:43.079000Z WriteTime: 2025-03-31T16:19:43.084000Z MessageGroupId: "src_id" Meta: { "ident": "unknown", "_ip": "ipv6:[::1]:56060", "logtype": "unknown", "server": "ipv6:[::1]:56060" } MessageMeta: { } } Partition session id: 2 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 } Message { Data: ..240 bytes.. Information: { Offset: 2 ProducerId: "src_id" SeqNo: 3 CreateTime: 2025-03-31T16:19:43.079000Z WriteTime: 2025-03-31T16:19:43.085000Z MessageGroupId: "src_id" Meta: { "ident": "unknown", "logtype": "unknown", "_ip": "ipv6:[::1]:56060", "server": "ipv6:[::1]:56060" } MessageMeta: { } } Partition session id: 2 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 } Message { Data: ..350 bytes.. Information: { Offset: 3 ProducerId: "src_id" SeqNo: 4 CreateTime: 2025-03-31T16:19:43.079000Z WriteTime: 2025-03-31T16:19:43.085000Z MessageGroupId: "src_id" Meta: { "ident": "unknown", "logtype": "unknown", "_ip": "ipv6:[::1]:56060", "server": "ipv6:[::1]:56060" } MessageMeta: { } } Partition session id: 2 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 } Message { Data: ..460 bytes.. Information: { Offset: 4 ProducerId: "src_id" SeqNo: 5 CreateTime: 2025-03-31T16:19:43.079000Z WriteTime: 2025-03-31T16:19:43.085000Z MessageGroupId: "src_id" Meta: { "ident": "unknown", "logtype": "unknown", "_ip": "ipv6:[::1]:56060", "server": "ipv6:[::1]:56060" } MessageMeta: { } } Partition session id: 2 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 } } >>> get 4 messages in this event 2025-03-31T16:19:43.092509Z :DEBUG: [/Root] [/Root] [8a3c5221-cbb33253-5940765b-1a7f6721] [] The application data is transferred to the client. Number of messages 4, size 1180 bytes 2025-03-31T16:19:43.092536Z :DEBUG: [/Root] [/Root] [8a3c5221-cbb33253-5940765b-1a7f6721] [] Returning serverBytesSize = 0 to budget 2025-03-31T16:19:43.180042Z :INFO: [/Root] MessageGroupId [src_id] SessionId [src_id|31cabc54-1edd8e9e-79bf938a-79e38e58_0] Write session will now close 2025-03-31T16:19:43.180121Z :DEBUG: [/Root] MessageGroupId [src_id] SessionId [src_id|31cabc54-1edd8e9e-79bf938a-79e38e58_0] Write session: aborting 2025-03-31T16:19:43.180883Z :DEBUG: [/Root] MessageGroupId [src_id] SessionId [src_id|31cabc54-1edd8e9e-79bf938a-79e38e58_0] Write session is aborting and will not restart 2025-03-31T16:19:43.180886Z :INFO: [/Root] MessageGroupId [src_id] SessionId [src_id|31cabc54-1edd8e9e-79bf938a-79e38e58_0] Write session: gracefully shut down, all writes complete >>> Writes to test-topic-mirrored-from-dc3 successful 2025-03-31T16:19:43.180950Z :DEBUG: [/Root] MessageGroupId [src_id] SessionId [src_id|31cabc54-1edd8e9e-79bf938a-79e38e58_0] Write session: destroy 2025-03-31T16:19:43.181137Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: src_id|31cabc54-1edd8e9e-79bf938a-79e38e58_0 grpc read done: success: 0 data: 2025-03-31T16:19:43.181165Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|31cabc54-1edd8e9e-79bf938a-79e38e58_0 grpc read failed 2025-03-31T16:19:43.181200Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|31cabc54-1edd8e9e-79bf938a-79e38e58_0 grpc closed 2025-03-31T16:19:43.181220Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|31cabc54-1edd8e9e-79bf938a-79e38e58_0 is DEAD 2025-03-31T16:19:43.181655Z :INFO: [/Root] [/Root] [8a3c5221-cbb33253-5940765b-1a7f6721] Closing read session. Close timeout: 18446744073709.551615s 2025-03-31T16:19:43.181705Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:3:4:0 -:test-topic-mirrored-from-dc3:0:2:4:0 -:test-topic-mirrored-from-dc2:0:1:4:0 2025-03-31T16:19:43.181748Z :INFO: [/Root] [/Root] [8a3c5221-cbb33253-5940765b-1a7f6721] Counters: { Errors: 0 CurrentSessionLifetimeMs: 375 BytesRead: 3600 MessagesRead: 15 BytesReadCompressed: 3600 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T16:19:43.181993Z :INFO: [/Root] [/Root] [8a3c5221-cbb33253-5940765b-1a7f6721] Closing read session. Close timeout: 0.000000s 2025-03-31T16:19:43.182033Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:3:4:0 -:test-topic-mirrored-from-dc3:0:2:4:0 -:test-topic-mirrored-from-dc2:0:1:4:0 2025-03-31T16:19:43.182084Z :INFO: [/Root] [/Root] [8a3c5221-cbb33253-5940765b-1a7f6721] Counters: { Errors: 0 CurrentSessionLifetimeMs: 375 BytesRead: 3600 MessagesRead: 15 BytesReadCompressed: 3600 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T16:19:43.182065Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037896 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-31T16:19:43.182116Z :INFO: [/Root] [/Root] [8a3c5221-cbb33253-5940765b-1a7f6721] Closing read session. Close timeout: 0.000000s 2025-03-31T16:19:43.182151Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:3:4:0 -:test-topic-mirrored-from-dc3:0:2:4:0 -:test-topic-mirrored-from-dc2:0:1:4:0 2025-03-31T16:19:43.182175Z :INFO: [/Root] [/Root] [8a3c5221-cbb33253-5940765b-1a7f6721] Counters: { Errors: 0 CurrentSessionLifetimeMs: 376 BytesRead: 3600 MessagesRead: 15 BytesReadCompressed: 3600 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T16:19:43.182248Z :NOTICE: [/Root] [/Root] [8a3c5221-cbb33253-5940765b-1a7f6721] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-31T16:19:43.182457Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [3:7488009121675657338:2616] destroyed 2025-03-31T16:19:43.182481Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-03-31T16:19:43.182976Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_9079215532933047865_v1 grpc read done: success# 0, data# { } 2025-03-31T16:19:43.182993Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_9079215532933047865_v1 grpc read failed 2025-03-31T16:19:43.183021Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_9079215532933047865_v1 closed 2025-03-31T16:19:43.183723Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_9079215532933047865_v1 is DEAD 2025-03-31T16:19:43.184009Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Destroy direct read session shared/user_3_1_9079215532933047865_v1 2025-03-31T16:19:43.184015Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_3_1_9079215532933047865_v1 2025-03-31T16:19:43.184042Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7488009117380689919:2609] destroyed 2025-03-31T16:19:43.184044Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [3:7488009117380689917:2607] destroyed 2025-03-31T16:19:43.184074Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Destroy direct read session shared/user_3_1_9079215532933047865_v1 2025-03-31T16:19:43.184087Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_9079215532933047865_v1 2025-03-31T16:19:43.184099Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [3:7488009117380689918:2608] destroyed 2025-03-31T16:19:43.184116Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_9079215532933047865_v1 2025-03-31T16:19:43.184132Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_9079215532933047865_v1 2025-03-31T16:19:43.184227Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037897][rt3.dc1--test-topic-mirrored-from-dc3] pipe [3:7488009117380689907:2602] disconnected; active server actors: 1 2025-03-31T16:19:43.184240Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7488009117380689908:2602] disconnected; active server actors: 1 2025-03-31T16:19:43.184248Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037897][rt3.dc1--test-topic-mirrored-from-dc3] pipe [3:7488009117380689907:2602] client user disconnected session shared/user_3_1_9079215532933047865_v1 2025-03-31T16:19:43.184254Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7488009117380689908:2602] client user disconnected session shared/user_3_1_9079215532933047865_v1 2025-03-31T16:19:43.184315Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037895][rt3.dc1--test-topic-mirrored-from-dc2] pipe [3:7488009117380689909:2602] disconnected; active server actors: 1 2025-03-31T16:19:43.184327Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037895][rt3.dc1--test-topic-mirrored-from-dc2] pipe [3:7488009117380689909:2602] client user disconnected session shared/user_3_1_9079215532933047865_v1 >> TKeyValueCollectorTest::TestKeyValueCollectorSingle >> TKeyValueCollectorTest::TestKeyValueCollectorSingle [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorSingleWithOneError >> TKeyValueCollectorTest::TestKeyValueCollectorSingleWithOneError [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple >> TKeyValueTest::TestIncrementalKeySet [GOOD] >> TKeyValueTest::TestGetStatusWorksNewApi >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple [GOOD] >> TKeyValueTest::TestWrite200KDeleteThenResponseErrorNewApi |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple [GOOD] >> Cdc::DisableStream [GOOD] >> Cdc::InitialScan >> TKeyValueTest::TestWrite200KDeleteThenResponseError [GOOD] >> TKeyValueTest::TestSetExecutorFastLogPolicy >> TKeyValueTest::TestBasicWriteRead [GOOD] >> TKeyValueTest::TestBasicWriteReadOverrun >> BsControllerConfig::MergeIntersectingBoxes [GOOD] >> BsControllerConfig::MoveGroups >> TKeyValueTest::TestRenameWorks [GOOD] >> TKeyValueTest::TestRenameWorksNewApi >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorks [GOOD] >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorksNewApi >> BsControllerConfig::ExtendByCreatingSeparateBox [GOOD] >> BsControllerConfig::ExtendBoxAndStoragePool >> Cdc::RacyCreateAndSend [GOOD] >> Cdc::RacySplitAndDropTable >> KqpScripting::StreamExecuteYqlScriptWriteCancelAfterBruteForced [GOOD] >> KqpScripting::StreamOperationTimeout >> Cdc::ResolvedTimestamps [GOOD] >> Cdc::ResolvedTimestampsMultiplePartitions >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi [GOOD] >> TKeyValueTest::TestWriteLongKey [GOOD] >> YdbOlapStore::DuplicateRows [GOOD] >> YdbOlapStore::LogExistingRequest >> KqpLimits::WaitCAsStateOnAbort ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:76:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:79:2057] recipient: [4:78:2110] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:81:2111] sender: [4:82:2057] recipient: [4:78:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:81:2111] Leader for TabletID 72057594037927937 is [4:81:2111] sender: [4:135:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:77:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:79:2110] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:81:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:82:2111] sender: [5:83:2057] recipient: [5:79:2110] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:82:2111] Leader for TabletID 72057594037927937 is [5:82:2111] sender: [5:136:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:80:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:83:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:82:2113] Leader for TabletID 72057594037927937 is [7:85:2114] sender: [7:86:2057] recipient: [7:82:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:85:2114] Leader for TabletID 72057594037927937 is [7:85:2114] sender: [7:139:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:81:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:84:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:85:2057] recipient: [8:83:2113] Leader for TabletID 72057594037927937 is [8:86:2114] sender: [8:87:2057] recipient: [8:83:2113] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:86:2114] Leader for TabletID 72057594037927937 is [8:86:2114] sender: [8:104:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:52:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:52:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:83:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:86:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:85:2115] Leader for TabletID 72057594037927937 is [10:88:2116] sender: [10:89:2057] recipient: [10:85:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:88:2116] Leader for TabletID 72057594037927937 is [10:88:2116] sender: [10:142:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:84:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:87:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:88:2057] recipient: [11:86:2115] Leader for TabletID 72057594037927937 is [11:89:2116] sender: [11:90:2057] recipient: [11:86:2115] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:89:2116] Leader for TabletID 72057594037927937 is [11:89:2116] sender: [11:143:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:56:2097]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:85:2057] recipient: [12:36:2083] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:87:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:89:2057] recipient: [12:88:2116] Leader for TabletID 72057594037927937 is [12:90:2117] sender: [12:91:2057] recipient: [12:88:2116] !Reboot 72057594037927937 (actor [12:56:2097]) rebooted! !Reboot 72057594037927937 (actor [12:56:2097]) tablet resolver refreshed! new actor is[12:90:2117] Leader for TabletID 72057594037927937 is [12:90:2117] sender: [12:110:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:57:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:74:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:56:2097]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:86:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:89:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:90:2057] recipient: [13:88:2117] Leader for TabletID 72057594037927937 is [13:91:2118] sender: [13:92:2057] recipient: [13:88:2117] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:91:2118] Leader for TabletID 72057594037927937 is [13:91:2118] sender: [13:111:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:52:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:52:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:89:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:92:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:93:2057] recipient: [14:91:2120] Leader for TabletID 72057594037927937 is [14:94:2121] sender: [14:95:2057] recipient: [14:91:2120] !Reboot 72057594037927937 (actor [14:56:2097]) rebooted! !Reboot 72057594037927937 (actor [14:56:2097]) tablet resolver refreshed! new actor is[14:94:2121] Leader for TabletID 72057594037927937 is [14:94:2121] sender: [14:148:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:50:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:50:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:89:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:92:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:93:2057] recipient: [15:91:2120] Leader for TabletID 72057594037927937 is [15:94:2121] sender: [15:95:2057] recipient: [15:91:2120] !Reboot 72057594037927937 (actor [15:56:2097]) rebooted! !Reboot 72057594037927937 (actor [15:56:2097]) tablet resolver refreshed! new actor is[15:94:2121] Leader for TabletID 72057594037927937 is [15:94:2121] sender: [15:148:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] >> TKeyValueTest::TestGetStatusWorksNewApi [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:76:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:79:2057] recipient: [4:78:2110] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:81:2111] sender: [4:82:2057] recipient: [4:78:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:81:2111] Leader for TabletID 72057594037927937 is [4:81:2111] sender: [4:135:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:77:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:81:2057] recipient: [5:79:2110] Leader for TabletID 72057594037927937 is [5:82:2111] sender: [5:83:2057] recipient: [5:79:2110] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:82:2111] Leader for TabletID 72057594037927937 is [5:82:2111] sender: [5:136:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:80:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:83:2057] recipient: [7:82:2113] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:85:2114] sender: [7:86:2057] recipient: [7:82:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:85:2114] Leader for TabletID 72057594037927937 is [7:85:2114] sender: [7:139:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:81:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:84:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:85:2057] recipient: [8:83:2113] Leader for TabletID 72057594037927937 is [8:86:2114] sender: [8:87:2057] recipient: [8:83:2113] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:86:2114] Leader for TabletID 72057594037927937 is [8:86:2114] sender: [8:140:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestGetStatusWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:76:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:79:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:78:2110] Leader for TabletID 72057594037927937 is [4:81:2111] sender: [4:82:2057] recipient: [4:78:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:81:2111] Leader for TabletID 72057594037927937 is [4:81:2111] sender: [4:135:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvKeyValue::TEvGetStorageChannelStatus ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:76:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:79:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:78:2110] Leader for TabletID 72057594037927937 is [5:81:2111] sender: [5:82:2057] recipient: [5:78:2110] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:81:2111] Leader for TabletID 72057594037927937 is [5:81:2111] sender: [5:135:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:77:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:79:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:81:2057] recipient: [6:80:2110] Leader for TabletID 72057594037927937 is [6:82:2111] sender: [6:83:2057] recipient: [6:80:2110] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:82:2111] Leader for TabletID 72057594037927937 is [6:82:2111] sender: [6:136:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOkNewApi [GOOD] >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk >> KqpStats::SysViewClientLost >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestWriteReadWhileWriteWorks >> KqpStats::MultiTxStatsFullYql >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEventsWithSlowInitialGC >> Cdc::InitialScan [GOOD] >> Cdc::InitialScanDebezium >> KqpExplain::ExplainStream >> KqpScripting::StreamOperationTimeout [GOOD] >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorks [GOOD] >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamOperationTimeout [GOOD] Test command err: Trying to start YDB, gRPC: 18984, MsgBus: 64413 2025-03-31T16:18:40.865761Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008851466447083:2132];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:40.866072Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000bfd/r3tmp/tmpClQUwB/pdisk_1.dat 2025-03-31T16:18:41.379313Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:41.381923Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:41.382003Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:41.387659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18984, node 1 2025-03-31T16:18:41.602507Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:41.602532Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:41.602556Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:41.602675Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64413 TClient is connected to server localhost:64413 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:18:42.588358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:42.635903Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:18:42.653836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:42.867102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:43.126049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:43.212801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:18:44.786489Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008868646317981:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:44.786594Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.480265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.543540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.590347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.647164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.678354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.724016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:18:45.779261Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008872941285792:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.779316Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.779510Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008872941285797:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:18:45.785194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:18:45.799207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008872941285799:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:18:45.867396Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008851466447083:2132];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:45.867443Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:18:45.901993Z node 1 :TX_PROXY ERROR: Actor# [1:7488008872941285855:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:18:47.404042Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437927413, txId: 281474976710672] shutting down 2025-03-31T16:18:47.677165Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437927707, txId: 281474976710675] shutting down 2025-03-31T16:18:47.979564Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437928008, txId: 281474976710678] shutting down 2025-03-31T16:18:48.300309Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437928323, txId: 281474976710681] shutting down 2025-03-31T16:18:48.592840Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437928624, txId: 281474976710684] shutting down 2025-03-31T16:18:48.937365Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437928911, txId: 281474976710687] shutting down 2025-03-31T16:18:49.289606Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437929310, txId: 281474976710690] shutting down 2025-03-31T16:18:49.680001Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437929709, txId: 281474976710693] shutting down 2025-03-31T16:18:50.007238Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437930024, txId: 281474976710696] shutting down 2025-03-31T16:18:50.310466Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437930332, txId: 281474976710699] shutting down 2025-03-31T16:18:50.611038Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437930640, txId: 281474976710702] shutting down 2025-03-31T16:18:50.918100Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437930948, txId: 281474976710705] shutting down 2025-03-31T16:18:51.235609Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437931263, txId: 281474976710708] shutting down 2025-03-31T16:18:51.541572Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437931571, txId: 281474976710711] shutting down 2025-03-31T16:18:51.834576Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437931837, txId: 281474976710714] shutting down 2025-03-31T16:18:52.129379Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437932145, txId: 281474976710717] shutting down 2025-03-31T16:18:52.412622Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437932446, txId: 281474976710720] shutting down 2025-03-31T16:18:52.696614Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437932726, txId: 281474976710723] shutting down 2025-03-31T16:18:53.026725Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437933048, txId: 281474976710726] shutting down 2025-03-31T16:18:53.310117Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437933342, txId: 281474976710729] shutting down 2025-03-31T16:18:53.593373Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437933622, txId: 281474976710732] shutting down 2025-03-31T16:18:53.936108Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 174 ... .840007Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437982867, txId: 281474976711197] shutting down 2025-03-31T16:19:43.165909Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437983196, txId: 281474976711200] shutting down 2025-03-31T16:19:43.488263Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437983504, txId: 281474976711203] shutting down 2025-03-31T16:19:43.767812Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437983798, txId: 281474976711206] shutting down 2025-03-31T16:19:44.006698Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437984036, txId: 281474976711209] shutting down 2025-03-31T16:19:44.312621Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437984344, txId: 281474976711212] shutting down 2025-03-31T16:19:44.576693Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437984610, txId: 281474976711215] shutting down 2025-03-31T16:19:44.887948Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437984918, txId: 281474976711218] shutting down 2025-03-31T16:19:45.170421Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437985205, txId: 281474976711221] shutting down 2025-03-31T16:19:45.478145Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437985513, txId: 281474976711224] shutting down 2025-03-31T16:19:45.809869Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437985842, txId: 281474976711227] shutting down 2025-03-31T16:19:46.126549Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437986157, txId: 281474976711230] shutting down 2025-03-31T16:19:46.395351Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437986430, txId: 281474976711233] shutting down 2025-03-31T16:19:46.667605Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437986696, txId: 281474976711236] shutting down 2025-03-31T16:19:46.965009Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437986990, txId: 281474976711239] shutting down 2025-03-31T16:19:47.237370Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437987270, txId: 281474976711242] shutting down 2025-03-31T16:19:47.473197Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437987508, txId: 281474976711245] shutting down 2025-03-31T16:19:47.740100Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437987774, txId: 281474976711248] shutting down 2025-03-31T16:19:48.038449Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437988033, txId: 281474976711251] shutting down 2025-03-31T16:19:48.291942Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437988327, txId: 281474976711254] shutting down 2025-03-31T16:19:48.581467Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437988607, txId: 281474976711257] shutting down 2025-03-31T16:19:48.866941Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437988887, txId: 281474976711260] shutting down 2025-03-31T16:19:49.171192Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437989202, txId: 281474976711263] shutting down 2025-03-31T16:19:49.464923Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437989496, txId: 281474976711266] shutting down 2025-03-31T16:19:49.754596Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437989783, txId: 281474976711269] shutting down Trying to start YDB, gRPC: 8862, MsgBus: 19790 2025-03-31T16:19:50.544055Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488009151983314438:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:50.544187Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000bfd/r3tmp/tmpP3ajTk/pdisk_1.dat 2025-03-31T16:19:50.627553Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8862, node 2 2025-03-31T16:19:50.671171Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:50.671269Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:50.672747Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:50.682192Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:19:50.682219Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:19:50.682226Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:19:50.682361Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19790 TClient is connected to server localhost:19790 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:19:51.062080Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:51.072936Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:51.129803Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:51.272241Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:51.363235Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:53.369645Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009164868218108:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:53.369706Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:53.407360Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:19:53.431228Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:19:53.454379Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:19:53.478369Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:19:53.506728Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:19:53.533263Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:19:53.574703Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009164868218615:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:53.574777Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009164868218620:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:53.574785Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:53.578698Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:19:53.586215Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488009164868218622:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:19:53.643286Z node 2 :TX_PROXY ERROR: Actor# [2:7488009164868218677:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEventsWithSlowInitialGC [GOOD] >> Cdc::RacySplitAndDropTable [GOOD] >> Cdc::RenameTable >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorks [GOOD] >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi >> Viewer::JsonStorageListingV1NodeIdFilter [GOOD] >> Viewer::JsonStorageListingV1PDiskIdFilter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEventsWithSlowInitialGC [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:78:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:52:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:52:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:86:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:87:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:87:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:85:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:88:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:89:2057] recipient: [11:87:2116] Leader for TabletID 72057594037927937 is [11:90:2117] sender: [11:91:2057] recipient: [11:87:2116] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:90:2117] Leader for TabletID 72057594037927937 is [11:90:2117] sender: [11:110:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:56:2097]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:86:2057] recipient: [12:36:2083] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:89:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:90:2057] recipient: [12:88:2117] Leader for TabletID 72057594037927937 is [12:91:2118] sender: [12:92:2057] recipient: [12:88:2117] !Reboot 72057594037927937 (actor [12:56:2097]) rebooted! !Reboot 72057594037927937 (actor [12:56:2097]) tablet resolver refreshed! new actor is[12:91:2118] Leader for TabletID 72057594037927937 is [12:91:2118] sender: [12:111:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:57:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:74:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:89:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:91:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:93:2057] recipient: [13:92:2120] Leader for TabletID 72057594037927937 is [13:94:2121] sender: [13:95:2057] recipient: [13:92:2120] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:94:2121] Leader for TabletID 72057594037927937 is [13:94:2121] sender: [13:148:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:52:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:52:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:89:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:92:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:93:2057] recipient: [14:91:2120] Leader for TabletID 72057594037927937 is [14:94:2121] sender: [14:95:2057] recipient: [14:91:2120] !Reboot 72057594037927937 (actor [14:56:2097]) rebooted! !Reboot 72057594037927937 (actor [14:56:2097]) tablet resolver refreshed! new actor is[14:94:2121] Leader for TabletID 72057594037927937 is [14:94:2121] sender: [14:148:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:50:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:50:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:90:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:93:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:94:2057] recipient: [15:92:2120] Leader for TabletID 72057594037927937 is [15:95:2121] sender: [15:96:2057] recipient: [15:92:2120] !Reboot 72057594037927937 (actor [15:56:2097]) rebooted! !Reboot 72057594037927937 (actor [15:56:2097]) tablet resolver refreshed! new actor is[15:95:2121] Leader for TabletID 72057594037927937 is [15:95:2121] sender: [15:149:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] >> Cdc::ResolvedTimestampsMultiplePartitions [GOOD] >> Cdc::ResolvedTimestampsVolatileOutOfOrder >> KqpQuery::YqlSyntaxV0 >> TKeyValueTest::TestBasicWriteReadOverrun [GOOD] >> TKeyValueTest::TestBlockedEvGetRequest >> TKeyValueTest::TestBlockedEvGetRequest [GOOD] >> KqpLimits::OutOfSpaceBulkUpsertFail ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestBlockedEvGetRequest [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:78:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:52:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:52:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:76:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:78:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:80:2057] recipient: [10:79:2110] Leader for TabletID 72057594037927937 is [10:81:2111] sender: [10:82:2057] recipient: [10:79:2110] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:81:2111] Leader for TabletID 72057594037927937 is [10:81:2111] sender: [10:135:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:76:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:79:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:80:2057] recipient: [11:78:2110] Leader for TabletID 72057594037927937 is [11:81:2111] sender: [11:82:2057] recipient: [11:78:2110] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:81:2111] Leader for TabletID 72057594037927937 is [11:81:2111] sender: [11:135:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:77:2057] recipient: [12:36:2083] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:80:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:81:2057] recipient: [12:79:2110] Leader for TabletID 72057594037927937 is [12:82:2111] sender: [12:83:2057] recipient: [12:79:2110] !Reboot 72057594037927937 (actor [12:56:2097]) rebooted! !Reboot 72057594037927937 (actor [12:56:2097]) tablet resolver refreshed! new actor is[12:82:2111] Leader for TabletID 72057594037927937 is [12:82:2111] sender: [12:136:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:57:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:74:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:80:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:83:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:84:2057] recipient: [13:82:2113] Leader for TabletID 72057594037927937 is [13:85:2114] sender: [13:86:2057] recipient: [13:82:2113] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:85:2114] Leader for TabletID 72057594037927937 is [13:85:2114] sender: [13:139:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:52:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:52:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:80:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:82:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:84:2057] recipient: [14:83:2113] Leader for TabletID 72057594037927937 is [14:85:2114] sender: [14:86:2057] recipient: [14:83:2113] !Reboot 72057594037927937 (actor [14:56:2097]) rebooted! !Reboot 72057594037927937 (actor [14:56:2097]) tablet resolver refreshed! new actor is[14:85:2114] Leader for TabletID 72057594037927937 is [14:85:2114] sender: [14:139:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:50:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:50:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:81:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:84:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:85:2057] recipient: [15:83:2113] Leader for TabletID 72057594037927937 is [15:86:2114] sender: [15:87:2057] recipient: [15:83:2113] !Reboot 72057594037927937 (actor [15:56:2097]) rebooted! !Reboot 72057594037927937 (actor [15:56:2097]) tablet resolver refreshed! new actor is[15:86:2114] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] 2025-03-31T16:19:57.693013Z node 17 :KEYVALUE ERROR: {KV323@keyvalue_storage_read_request.cpp:254} Received BLOCKED EvGetResult. KeyValue# 72057594037927937 Status# BLOCKED Deadline# 18446744073709551 Now# 0 SentAt# 1970-01-01T00:00:00.000000Z GotAt# 0 ErrorReason# block race detected 2025-03-31T16:19:57.697058Z node 17 :TABLET_MAIN ERROR: Tablet: 72057594037927937 HandleBlockBlobStorageResult, msg->Status: ALREADY, not discovered Marker# TSYS21 2025-03-31T16:19:57.697128Z node 17 :TABLET_MAIN ERROR: Tablet: 72057594037927937 Type: KeyValue, EReason: ReasonBootBSError, SuggestedGeneration: 0, KnownGeneration: 3 Marker# TSYS31 >> KqpStats::MultiTxStatsFullYql [GOOD] >> KqpStats::MultiTxStatsFullScan >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOkNewApi >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi >> KqpExplain::ExplainStream [GOOD] >> KqpExplain::ExplainScanQueryWithParams >> TKeyValueTest::TestWrite200KDeleteThenResponseErrorNewApi [GOOD] >> TKeyValueTest::TestWriteDeleteThenReadRemaining >> BasicUsage::WaitEventBlocksBeforeDiscovery [GOOD] >> BasicUsage::SimpleHandlers >> KqpQuery::QueryTimeout >> KqpLimits::WaitCAsStateOnAbort [GOOD] >> KqpLimits::WaitCAsTimeout >> Cdc::InitialScanDebezium [GOOD] >> Cdc::InitialScanRacyCompleteAndRequest >> TKeyValueTest::TestCopyRangeWorks [GOOD] >> TKeyValueTest::TestCopyRangeWorksNewApi >> KqpQuery::YqlSyntaxV0 [GOOD] >> KqpQuery::YqlTableSample >> TKeyValueTest::TestCleanUpDataOnEmptyTablet [GOOD] >> TKeyValueTest::TestCleanUpDataOnEmptyTabletResetGeneration >> TKeyValueTest::TestCleanUpDataOnEmptyTabletResetGeneration [GOOD] >> TKeyValueTest::TestCleanUpDataWithMockDisk >> KqpStats::MultiTxStatsFullScan [GOOD] >> KqpStats::OneShardLocalExec+UseSink >> Cdc::RenameTable [GOOD] >> Cdc::InitialScan_WithTopicSchemeTx >> KqpExplain::ExplainScanQueryWithParams [GOOD] >> KqpExplain::FewEffects+UseSink >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOkNewApi [GOOD] >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorksNewApi [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanWriteCancelAfterBruteForced [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanScalar >> TKeyValueTest::TestLargeWriteAndDelete [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:78:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:82:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:83:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:83:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:52:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:52:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:57:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:74:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:76:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:79:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:80:2057] recipient: [13:78:2110] Leader for TabletID 72057594037927937 is [13:81:2111] sender: [13:82:2057] recipient: [13:78:2110] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:81:2111] Leader for TabletID 72057594037927937 is [13:81:2111] sender: [13:135:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:52:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:52:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:76:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:79:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:80:2057] recipient: [14:78:2110] Leader for TabletID 72057594037927937 is [14:81:2111] sender: [14:82:2057] recipient: [14:78:2110] !Reboot 72057594037927937 (actor [14:56:2097]) rebooted! !Reboot 72057594037927937 (actor [14:56:2097]) tablet resolver refreshed! new actor is[14:81:2111] Leader for TabletID 72057594037927937 is [14:81:2111] sender: [14:135:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:50:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:50:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:77:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:80:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:81:2057] recipient: [15:79:2110] Leader for TabletID 72057594037927937 is [15:82:2111] sender: [15:83:2057] recipient: [15:79:2110] !Reboot 72057594037927937 (actor [15:56:2097]) rebooted! !Reboot 72057594037927937 (actor [15:56:2097]) tablet resolver refreshed! new actor is[15:82:2111] Leader for TabletID 72057594037927937 is [15:82:2111] sender: [15:136:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:80:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:83:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:84:2057] recipient: [16:82:2113] Leader for TabletID 72057594037927937 is [16:85:2114] sender: [16:86:2057] recipient: [16:82:2113] !Reboot 72057594037927937 (actor [16:56:2097]) rebooted! !Reboot 72057594037927937 (actor [16:56:2097]) tablet resolver refreshed! new actor is[16:85:2114] Leader for TabletID 72057594037927937 is [16:85:2114] sender: [16:139:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:52:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:57:2057] recipient: [17:52:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:74:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:80:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:83:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:84:2057] recipient: [17:82:2113] Leader for TabletID 72057594037927937 is [17:85:2114] sender: [17:86:2057] recipient: [17:82:2113] !Reboot 72057594037927937 (actor [17:56:2097]) rebooted! !Reboot 72057594037927937 (actor [17:56:2097]) tablet resolver refreshed! new actor is[17:85:2114] Leader for TabletID 72057594037927937 is [17:85:2114] sender: [17:139:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:50:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:57:2057] recipient: [18:50:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:74:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:81:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:83:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:85:2057] recipient: [18:84:2113] Leader for TabletID 72057594037927937 is [18:86:2114] sender: [18:87:2057] recipient: [18:84:2113] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:86:2114] Leader for TabletID 72057594037927937 is [18:86:2114] sender: [18:104:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:83:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:86:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:87:2057] recipient: [19:85:2115] Leader for TabletID 72057594037927937 is [19:88:2116] sender: [19:89:2057] recipient: [19:85:2115] !Reboot 72057594037927937 (actor [19:56:2097]) rebooted! !Reboot 72057594037927937 (actor [19:56:2097]) tablet resolver refreshed! new actor is[19:88:2116] Leader for TabletID 72057594037927937 is [19:88:2116] sender: [19:142:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:51:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:57:2057] recipient: [20:51:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:74:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:83:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:86:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:87:2057] recipient: [20:85:2115] Leader for TabletID 72057594037927937 is [20:88:2116] sender: [20:89:2057] recipient: [20:85:2115] !Reboot 72057594037927937 (actor [20:56:2097]) rebooted! !Reboot 72057594037927937 (actor [20:56:2097]) tablet resolver refreshed! new actor is[20:88:2116] Leader for TabletID 72057594037927937 is [20:88:2116] sender: [20:142:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:50:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:57:2057] recipient: [21:50:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:74:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:84:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:87:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:88:2057] recipient: [21:86:2115] Leader for TabletID 72057594037927937 is [21:89:2116] sender: [21:90:2057] recipient: [21:86:2115] !Reboot 72057594037927937 (actor [21:56:2097]) rebooted! !Reboot 72057594037927937 (actor [21:56:2097]) tablet resolver refreshed! new actor is[21:89:2116] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:57:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:74:2057] recipient: [22:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:78:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:52:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:52:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:76:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:78:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:80:2057] recipient: [10:79:2110] Leader for TabletID 72057594037927937 is [10:81:2111] sender: [10:82:2057] recipient: [10:79:2110] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:81:2111] Leader for TabletID 72057594037927937 is [10:81:2111] sender: [10:135:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:76:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:79:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:80:2057] recipient: [11:78:2110] Leader for TabletID 72057594037927937 is [11:81:2111] sender: [11:82:2057] recipient: [11:78:2110] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:81:2111] Leader for TabletID 72057594037927937 is [11:81:2111] sender: [11:135:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:77:2057] recipient: [12:36:2083] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:80:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:81:2057] recipient: [12:79:2110] Leader for TabletID 72057594037927937 is [12:82:2111] sender: [12:83:2057] recipient: [12:79:2110] !Reboot 72057594037927937 (actor [12:56:2097]) rebooted! !Reboot 72057594037927937 (actor [12:56:2097]) tablet resolver refreshed! new actor is[12:82:2111] Leader for TabletID 72057594037927937 is [12:82:2111] sender: [12:136:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:57:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:74:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:80:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:83:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:84:2057] recipient: [13:82:2113] Leader for TabletID 72057594037927937 is [13:85:2114] sender: [13:86:2057] recipient: [13:82:2113] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:85:2114] Leader for TabletID 72057594037927937 is [13:85:2114] sender: [13:139:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:52:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:52:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:80:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:82:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:84:2057] recipient: [14:83:2113] Leader for TabletID 72057594037927937 is [14:85:2114] sender: [14:86:2057] recipient: [14:83:2113] !Reboot 72057594037927937 (actor [14:56:2097]) rebooted! !Reboot 72057594037927937 (actor [14:56:2097]) tablet resolver refreshed! new actor is[14:85:2114] Leader for TabletID 72057594037927937 is [14:85:2114] sender: [14:139:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:50:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:50:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:81:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:84:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:85:2057] recipient: [15:83:2113] Leader for TabletID 72057594037927937 is [15:86:2114] sender: [15:87:2057] recipient: [15:83:2113] !Reboot 72057594037927937 (actor [15:56:2097]) rebooted! !Reboot 72057594037927937 (actor [15:56:2097]) tablet resolver refreshed! new actor is[15:86:2114] Leader for TabletID 72057594037927937 is [15:86:2114] sender: [15:140:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:84:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:87:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:88:2057] recipient: [16:86:2116] Leader for TabletID 72057594037927937 is [16:89:2117] sender: [16:90:2057] recipient: [16:86:2116] !Reboot 72057594037927937 (actor [16:56:2097]) rebooted! !Reboot 72057594037927937 (actor [16:56:2097]) tablet resolver refreshed! new actor is[16:89:2117] Leader for TabletID 72057594037927937 is [16:89:2117] sender: [16:143:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:52:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:57:2057] recipient: [17:52:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:74:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:84:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:87:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:88:2057] recipient: [17:86:2116] Leader for TabletID 72057594037927937 is [17:89:2117] sender: [17:90:2057] recipient: [17:86:2116] !Reboot 72057594037927937 (actor [17:56:2097]) rebooted! !Reboot 72057594037927937 (actor [17:56:2097]) tablet resolver refreshed! new actor is[17:89:2117] Leader for TabletID 72057594037927937 is [17:89:2117] sender: [17:143:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:50:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:57:2057] recipient: [18:50:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:74:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:85:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:88:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:89:2057] recipient: [18:87:2116] Leader for TabletID 72057594037927937 is [18:90:2117] sender: [18:91:2057] recipient: [18:87:2116] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:90:2117] Leader for TabletID 72057594037927937 is [18:90:2117] sender: [18:144:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:88:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:91:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:92:2057] recipient: [19:90:2119] Leader for TabletID 72057594037927937 is [19:93:2120] sender: [19:94:2057] recipient: [19:90:2119] !Reboot 72057594037927937 (actor [19:56:2097]) rebooted! !Reboot 72057594037927937 (actor [19:56:2097]) tablet resolver refreshed! new actor is[19:93:2120] Leader for TabletID 72057594037927937 is [19:93:2120] sender: [19:147:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:51:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:57:2057] recipient: [20:51:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:74:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:88:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:90:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:92:2057] recipient: [20:91:2119] Leader for TabletID 72057594037927937 is [20:93:2120] sender: [20:94:2057] recipient: [20:91:2119] !Reboot 72057594037927937 (actor [20:56:2097]) rebooted! !Reboot 72057594037927937 (actor [20:56:2097]) tablet resolver refreshed! new actor is[20:93:2120] Leader for TabletID 72057594037927937 is [20:93:2120] sender: [20:147:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:50:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:57:2057] recipient: [21:50:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:74:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:89:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:92:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:93:2057] recipient: [21:91:2119] Leader for TabletID 72057594037927937 is [21:94:2120] sender: [21:95:2057] recipient: [21:91:2119] !Reboot 72057594037927937 (actor [21:56:2097]) rebooted! !Reboot 72057594037927937 (actor [21:56:2097]) tablet resolver refreshed! new actor is[21:94:2120] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:57:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:74:2057] recipient: [22:14:2061] >> KqpQuery::QueryTimeout [GOOD] >> KqpQuery::RandomNumber >> TKeyValueTest::TestConcatWorks [GOOD] >> TKeyValueTest::TestConcatWorksNewApi ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestLargeWriteAndDelete [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:78:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:82:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:83:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:83:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:52:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:52:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:76:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:78:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:80:2057] recipient: [10:79:2110] Leader for TabletID 72057594037927937 is [10:81:2111] sender: [10:82:2057] recipient: [10:79:2110] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:81:2111] Leader for TabletID 72057594037927937 is [10:81:2111] sender: [10:135:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:76:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:79:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:80:2057] recipient: [11:78:2110] Leader for TabletID 72057594037927937 is [11:81:2111] sender: [11:82:2057] recipient: [11:78:2110] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:81:2111] Leader for TabletID 72057594037927937 is [11:81:2111] sender: [11:135:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:77:2057] recipient: [12:36:2083] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:80:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:81:2057] recipient: [12:79:2110] Leader for TabletID 72057594037927937 is [12:82:2111] sender: [12:83:2057] recipient: [12:79:2110] !Reboot 72057594037927937 (actor [12:56:2097]) rebooted! !Reboot 72057594037927937 (actor [12:56:2097]) tablet resolver refreshed! new actor is[12:82:2111] Leader for TabletID 72057594037927937 is [12:82:2111] sender: [12:136:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:57:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:74:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:80:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:83:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:84:2057] recipient: [13:82:2113] Leader for TabletID 72057594037927937 is [13:85:2114] sender: [13:86:2057] recipient: [13:82:2113] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:85:2114] Leader for TabletID 72057594037927937 is [13:85:2114] sender: [13:139:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:52:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:52:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:80:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:82:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:84:2057] recipient: [14:83:2113] Leader for TabletID 72057594037927937 is [14:85:2114] sender: [14:86:2057] recipient: [14:83:2113] !Reboot 72057594037927937 (actor [14:56:2097]) rebooted! !Reboot 72057594037927937 (actor [14:56:2097]) tablet resolver refreshed! new actor is[14:85:2114] Leader for TabletID 72057594037927937 is [14:85:2114] sender: [14:139:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:50:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:50:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:81:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:84:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:85:2057] recipient: [15:83:2113] Leader for TabletID 72057594037927937 is [15:86:2114] sender: [15:87:2057] recipient: [15:83:2113] !Reboot 72057594037927937 (actor [15:56:2097]) rebooted! !Reboot 72057594037927937 (actor [15:56:2097]) tablet resolver refreshed! new actor is[15:86:2114] Leader for TabletID 72057594037927937 is [15:86:2114] sender: [15:140:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:84:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:87:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:88:2057] recipient: [16:86:2116] Leader for TabletID 72057594037927937 is [16:89:2117] sender: [16:90:2057] recipient: [16:86:2116] !Reboot 72057594037927937 (actor [16:56:2097]) rebooted! !Reboot 72057594037927937 (actor [16:56:2097]) tablet resolver refreshed! new actor is[16:89:2117] Leader for TabletID 72057594037927937 is [16:89:2117] sender: [16:143:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:52:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:57:2057] recipient: [17:52:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:74:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:84:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:87:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:88:2057] recipient: [17:86:2116] Leader for TabletID 72057594037927937 is [17:89:2117] sender: [17:90:2057] recipient: [17:86:2116] !Reboot 72057594037927937 (actor [17:56:2097]) rebooted! !Reboot 72057594037927937 (actor [17:56:2097]) tablet resolver refreshed! new actor is[17:89:2117] Leader for TabletID 72057594037927937 is [17:89:2117] sender: [17:143:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:50:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:57:2057] recipient: [18:50:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:74:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:85:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:88:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:89:2057] recipient: [18:87:2116] Leader for TabletID 72057594037927937 is [18:90:2117] sender: [18:91:2057] recipient: [18:87:2116] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:90:2117] Leader for TabletID 72057594037927937 is [18:90:2117] sender: [18:144:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] >> TKeyValueTest::TestRenameWorksNewApi [GOOD] >> TKeyValueTest::TestSetExecutorFastLogPolicy [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestRenameWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:78:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:82:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:85:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:84:2114] Leader for TabletID 72057594037927937 is [8:87:2115] sender: [8:88:2057] recipient: [8:84:2114] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:87:2115] Leader for TabletID 72057594037927937 is [8:87:2115] sender: [8:107:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:108:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:52:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:52:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:86:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:89:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:90:2057] recipient: [10:88:2118] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:92:2057] recipient: [10:88:2118] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:91:2119] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:145:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:86:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:89:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:88:2118] Leader for TabletID 72057594037927937 is [11:91:2119] sender: [11:92:2057] recipient: [11:88:2118] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:91:2119] Leader for TabletID 72057594037927937 is [11:91:2119] sender: [11:145:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:56:209 ... recipient: [15:78:2110] !Reboot 72057594037927937 (actor [15:56:2097]) rebooted! !Reboot 72057594037927937 (actor [15:56:2097]) tablet resolver refreshed! new actor is[15:81:2111] Leader for TabletID 72057594037927937 is [15:81:2111] sender: [15:135:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:76:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:79:2057] recipient: [16:78:2110] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:80:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:81:2111] sender: [16:82:2057] recipient: [16:78:2110] !Reboot 72057594037927937 (actor [16:56:2097]) rebooted! !Reboot 72057594037927937 (actor [16:56:2097]) tablet resolver refreshed! new actor is[16:81:2111] Leader for TabletID 72057594037927937 is [16:81:2111] sender: [16:135:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:52:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:57:2057] recipient: [17:52:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:74:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:77:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:80:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:81:2057] recipient: [17:79:2110] Leader for TabletID 72057594037927937 is [17:82:2111] sender: [17:83:2057] recipient: [17:79:2110] !Reboot 72057594037927937 (actor [17:56:2097]) rebooted! !Reboot 72057594037927937 (actor [17:56:2097]) tablet resolver refreshed! new actor is[17:82:2111] Leader for TabletID 72057594037927937 is [17:82:2111] sender: [17:136:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:50:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:57:2057] recipient: [18:50:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:74:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:80:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:82:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:84:2057] recipient: [18:83:2113] Leader for TabletID 72057594037927937 is [18:85:2114] sender: [18:86:2057] recipient: [18:83:2113] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:85:2114] Leader for TabletID 72057594037927937 is [18:85:2114] sender: [18:139:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:80:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:83:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:84:2057] recipient: [19:82:2113] Leader for TabletID 72057594037927937 is [19:85:2114] sender: [19:86:2057] recipient: [19:82:2113] !Reboot 72057594037927937 (actor [19:56:2097]) rebooted! !Reboot 72057594037927937 (actor [19:56:2097]) tablet resolver refreshed! new actor is[19:85:2114] Leader for TabletID 72057594037927937 is [19:85:2114] sender: [19:139:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:51:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:57:2057] recipient: [20:51:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:74:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:81:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:84:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:85:2057] recipient: [20:83:2113] Leader for TabletID 72057594037927937 is [20:86:2114] sender: [20:87:2057] recipient: [20:83:2113] !Reboot 72057594037927937 (actor [20:56:2097]) rebooted! !Reboot 72057594037927937 (actor [20:56:2097]) tablet resolver refreshed! new actor is[20:86:2114] Leader for TabletID 72057594037927937 is [20:86:2114] sender: [20:140:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:50:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:57:2057] recipient: [21:50:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:74:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:56:2097]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:82:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:85:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:86:2057] recipient: [21:84:2114] Leader for TabletID 72057594037927937 is [21:87:2115] sender: [21:88:2057] recipient: [21:84:2114] !Reboot 72057594037927937 (actor [21:56:2097]) rebooted! !Reboot 72057594037927937 (actor [21:56:2097]) tablet resolver refreshed! new actor is[21:87:2115] Leader for TabletID 72057594037927937 is [21:87:2115] sender: [21:107:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:57:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:74:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:56:2097]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:83:2057] recipient: [22:36:2083] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:86:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:87:2057] recipient: [22:85:2115] Leader for TabletID 72057594037927937 is [22:88:2116] sender: [22:89:2057] recipient: [22:85:2115] !Reboot 72057594037927937 (actor [22:56:2097]) rebooted! !Reboot 72057594037927937 (actor [22:56:2097]) tablet resolver refreshed! new actor is[22:88:2116] Leader for TabletID 72057594037927937 is [22:88:2116] sender: [22:108:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:51:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:57:2057] recipient: [23:51:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:74:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:86:2057] recipient: [23:36:2083] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:89:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:90:2057] recipient: [23:88:2118] Leader for TabletID 72057594037927937 is [23:91:2119] sender: [23:92:2057] recipient: [23:88:2118] !Reboot 72057594037927937 (actor [23:56:2097]) rebooted! !Reboot 72057594037927937 (actor [23:56:2097]) tablet resolver refreshed! new actor is[23:91:2119] Leader for TabletID 72057594037927937 is [23:91:2119] sender: [23:145:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:57:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:74:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:86:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:89:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:90:2057] recipient: [24:88:2118] Leader for TabletID 72057594037927937 is [24:91:2119] sender: [24:92:2057] recipient: [24:88:2118] !Reboot 72057594037927937 (actor [24:56:2097]) rebooted! !Reboot 72057594037927937 (actor [24:56:2097]) tablet resolver refreshed! new actor is[24:91:2119] Leader for TabletID 72057594037927937 is [24:91:2119] sender: [24:145:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:57:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:74:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:87:2057] recipient: [25:36:2083] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:90:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:91:2057] recipient: [25:89:2118] Leader for TabletID 72057594037927937 is [25:92:2119] sender: [25:93:2057] recipient: [25:89:2118] !Reboot 72057594037927937 (actor [25:56:2097]) rebooted! !Reboot 72057594037927937 (actor [25:56:2097]) tablet resolver refreshed! new actor is[25:92:2119] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:52:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:57:2057] recipient: [26:52:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:74:2057] recipient: [26:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestSetExecutorFastLogPolicy [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:76:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:79:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:78:2110] Leader for TabletID 72057594037927937 is [4:81:2111] sender: [4:82:2057] recipient: [4:78:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:81:2111] Leader for TabletID 72057594037927937 is [4:81:2111] sender: [4:135:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:76:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:79:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:78:2110] Leader for TabletID 72057594037927937 is [5:81:2111] sender: [5:82:2057] recipient: [5:78:2110] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:81:2111] Leader for TabletID 72057594037927937 is [5:81:2111] sender: [5:135:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:77:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:81:2057] recipient: [6:79:2110] Leader for TabletID 72057594037927937 is [6:82:2111] sender: [6:83:2057] recipient: [6:79:2110] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:82:2111] Leader for TabletID 72057594037927937 is [6:82:2111] sender: [6:136:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:79:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:82:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:83:2057] recipient: [7:81:2112] Leader for TabletID 72057594037927937 is [7:84:2113] sender: [7:85:2057] recipient: [7:81:2112] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:84:2113] Leader for TabletID 72057594037927937 is [7:84:2113] sender: [7:138:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:79:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:82:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:81:2112] Leader for TabletID 72057594037927937 is [8:84:2113] sender: [8:85:2057] recipient: [8:81:2112] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:84:2113] Leader for TabletID 72057594037927937 is [8:84:2113] sender: [8:138:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:80:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:84:2057] recipient: [9:82:2112] Leader for TabletID 72057594037927937 is [9:85:2113] sender: [9:86:2057] recipient: [9:82:2112] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:85:2113] Leader for TabletID 72057594037927937 is [9:85:2113] sender: [9:139:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:52:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:52:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:83:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:86:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:85:2115] Leader for TabletID 72057594037927937 is [10:88:2116] sender: [10:89:2057] recipient: [10:85:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:88:2116] Leader for TabletID 72057594037927937 is [10:88:2116] sender: [10:142:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:83:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:86:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:87:2057] recipient: [11:85:2115] Leader for TabletID 72057594037927937 is [11:88:2116] sender: [11:89:2057] recipient: [11:85:2115] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:88:2116] Leader for TabletID 72057594037927937 is [11:88:2116] sender: [11:142:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:84:2057] recipient: [12:36:2083] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:87:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:88:2057] recipient: [12:86:2115] Leader for TabletID 72057594037927937 is [12:89:2116] sender: [12:90:2057] recipient: [12:86:2115] !Reboot 72057594037927937 (actor [12:56:2097]) rebooted! !Reboot 72057594037927937 (actor [12:56:2097]) tablet resolver refreshed! new actor is[12:89:2116] Leader for TabletID 72057594037927937 is [12:89:2116] sender: [12:143:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:57:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:74:2057] recipient: [13:14:2061] >> KqpParams::Decimal-QueryService-UseSink >> KqpStats::DataQueryWithEffects+UseSink >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi >> KqpQuery::YqlTableSample [GOOD] >> KqpQuery::UpdateWhereInSubquery >> TKeyValueTest::TestWriteDeleteThenReadRemaining [GOOD] >> KqpLimits::QueryReplySize >> Cdc::InitialScanRacyCompleteAndRequest [GOOD] >> Cdc::InitialScanAndLimits ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteDeleteThenReadRemaining [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] 2025-03-31T16:19:52.383668Z node 1 :KEYVALUE ERROR: KeyValue# 72057594037927937 PrepareExecuteTransactionRequest return flase, Marker# KV73 Submsg# KeyValue# 72057594037927937 Can't delete Range, in DeleteRange, total limit of deletions per request (100000) reached, Marker# KV90 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] 2025-03-31T16:19:58.353623Z node 2 :KEYVALUE ERROR: KeyValue# 72057594037927937 PrepareExecuteTransactionRequest return flase, Marker# KV73 Submsg# KeyValue# 72057594037927937 Can't delete Range, in DeleteRange, total limit of deletions per request (100000) reached, Marker# KV90 Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:449:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:452:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:453:2057] recipient: [4:451:2377] Leader for TabletID 72057594037927937 is [4:454:2378] sender: [4:455:2057] recipient: [4:451:2377] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:454:2378] Leader for TabletID 72057594037927937 is [4:454:2378] sender: [4:508:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:449:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:452:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:453:2057] recipient: [5:451:2377] Leader for TabletID 72057594037927937 is [5:454:2378] sender: [5:455:2057] recipient: [5:451:2377] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:454:2378] Leader for TabletID 72057594037927937 is [5:454:2378] sender: [5:508:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:450:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:453:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:454:2057] recipient: [6:452:2377] Leader for TabletID 72057594037927937 is [6:455:2378] sender: [6:456:2057] recipient: [6:452:2377] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:455:2378] Leader for TabletID 72057594037927937 is [6:455:2378] sender: [6:509:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] >> KqpAnalyze::AnalyzeTable+ColumnStore >> YdbOlapStore::LogTsRangeDescending [GOOD] >> YdbQueryService::TestAttachTwice >> KqpExplain::PrecomputeRange >> DataShardTxOrder::ImmediateBetweenOnline [GOOD] >> KqpStats::OneShardLocalExec+UseSink [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline_oo8 [GOOD] >> KqpExplain::FewEffects+UseSink [GOOD] >> KqpExplain::FewEffects-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline [GOOD] Test command err: 2025-03-31T16:19:12.092967Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:19:12.094491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:19:12.094551Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:12.104144Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:19:12.104719Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-03-31T16:19:12.105013Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:19:12.116129Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:19:12.181717Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:19:12.182104Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:19:12.183743Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-31T16:19:12.183826Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-31T16:19:12.183880Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-31T16:19:12.184290Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:19:12.185946Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:19:12.186037Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:197:2154] in generation 2 2025-03-31T16:19:12.260697Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:19:12.296303Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-31T16:19:12.296502Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:19:12.296622Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-31T16:19:12.296658Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-31T16:19:12.296688Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-31T16:19:12.296733Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:12.296958Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:12.297009Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:12.297288Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-31T16:19:12.297432Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-31T16:19:12.297486Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:19:12.297518Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:19:12.297561Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-31T16:19:12.297596Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T16:19:12.297625Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T16:19:12.297655Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-31T16:19:12.297688Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:19:12.297798Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:12.297850Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:12.297906Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-31T16:19:12.300473Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-31T16:19:12.300535Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:19:12.300631Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T16:19:12.300788Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-31T16:19:12.300830Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-31T16:19:12.300882Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-31T16:19:12.300926Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:19:12.300970Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-31T16:19:12.301013Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-31T16:19:12.301043Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:19:12.301355Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-31T16:19:12.301404Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-31T16:19:12.301438Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-31T16:19:12.301467Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:19:12.301509Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-31T16:19:12.301554Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-31T16:19:12.301594Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-31T16:19:12.301630Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-31T16:19:12.301656Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-31T16:19:12.314582Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-31T16:19:12.314668Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:19:12.314702Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:19:12.314759Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-31T16:19:12.314843Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-31T16:19:12.315422Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:12.315480Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:12.315524Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-31T16:19:12.315669Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-31T16:19:12.315705Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:19:12.315842Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-31T16:19:12.315883Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:19:12.315917Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-31T16:19:12.315961Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-31T16:19:12.321955Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-31T16:19:12.322045Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:12.322285Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:12.322322Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:12.322379Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:19:12.322419Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:19:12.322454Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T16:19:12.322492Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-31T16:19:12.322528Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-31T16:19:12.364657Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:19:12.364738Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-31T16:19:12.364792Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-31T16:19:12.364840Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-31T16:19:12.365057Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-31T16:19:12.365099Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:19:12.365137Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-31T16:19:12.365163Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-31T16:19:12.365187Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-31T16:19:12.365269Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:19:12.365296Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-31T16:19:12.365328Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-31T16:19:12.365358Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-31T16:19:12.365419Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-31T16:19:12.365453Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-31T16:19:12.365486Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-31T16:19:12.365524Z node 1 :TX_DATA ... lt to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-03-31T16:20:06.449835Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-31T16:20:06.449908Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-31T16:20:06.449935Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:140] at 9437186 on unit CompleteOperation 2025-03-31T16:20:06.449963Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 140] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-03-31T16:20:06.449981Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-31T16:20:06.450033Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-31T16:20:06.450049Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:143] at 9437186 on unit CompleteOperation 2025-03-31T16:20:06.450084Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 143] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-03-31T16:20:06.450103Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-31T16:20:06.450167Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-31T16:20:06.450189Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:146] at 9437186 on unit CompleteOperation 2025-03-31T16:20:06.450212Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 146] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-03-31T16:20:06.450241Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-31T16:20:06.450298Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-31T16:20:06.450312Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2025-03-31T16:20:06.450344Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-03-31T16:20:06.450363Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-31T16:20:06.450419Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-31T16:20:06.450461Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-03-31T16:20:06.450693Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 104 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 34} 2025-03-31T16:20:06.450729Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:20:06.450754Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 104 2025-03-31T16:20:06.450845Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 107 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 35} 2025-03-31T16:20:06.450860Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:20:06.450874Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 107 2025-03-31T16:20:06.450912Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 110 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 36} 2025-03-31T16:20:06.450936Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:20:06.450960Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 110 2025-03-31T16:20:06.451028Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 113 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 37} 2025-03-31T16:20:06.451043Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:20:06.451059Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 113 2025-03-31T16:20:06.451160Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 116 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 38} 2025-03-31T16:20:06.451182Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:20:06.451198Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 116 2025-03-31T16:20:06.451247Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 119 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 39} 2025-03-31T16:20:06.451266Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:20:06.451286Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 119 2025-03-31T16:20:06.451338Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 122 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 40} 2025-03-31T16:20:06.451366Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:20:06.451382Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 122 2025-03-31T16:20:06.451461Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 125 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 41} 2025-03-31T16:20:06.451481Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:20:06.451500Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 125 2025-03-31T16:20:06.451586Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2025-03-31T16:20:06.451604Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:20:06.451617Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2025-03-31T16:20:06.451660Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2025-03-31T16:20:06.451674Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:20:06.451687Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2025-03-31T16:20:06.550279Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-03-31T16:20:06.550348Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:20:06.550398Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-03-31T16:20:06.550496Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-03-31T16:20:06.550544Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:20:06.550570Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-03-31T16:20:06.550661Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-03-31T16:20:06.550688Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:20:06.550711Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-03-31T16:20:06.550809Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-03-31T16:20:06.550849Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:20:06.550877Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-03-31T16:20:06.550957Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-03-31T16:20:06.551000Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:20:06.551022Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-03-31T16:20:06.551110Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-03-31T16:20:06.551134Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:20:06.551158Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-03-31T16:20:06.565491Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-31T16:20:06.565549Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2025-03-31T16:20:06.565595Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 4 ms 2025-03-31T16:20:06.565641Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-03-31T16:20:06.565682Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-31T16:20:06.565887Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:455:2397], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-03-31T16:20:06.565914Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:20:06.565941Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 >> KqpQuery::RandomNumber [GOOD] >> KqpQuery::RandomUuid >> KqpQuery::QueryClientTimeout >> KqpLimits::WaitCAsTimeout [GOOD] >> KqpParams::BadParameterType ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_oo8 [GOOD] Test command err: 2025-03-31T16:19:12.090487Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:19:12.091898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:19:12.091971Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:12.109730Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:19:12.110315Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-03-31T16:19:12.110613Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:19:12.121440Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:19:12.186931Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:19:12.187333Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:19:12.189078Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-31T16:19:12.189152Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-31T16:19:12.189208Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-31T16:19:12.189578Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:19:12.191404Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:19:12.191499Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:197:2154] in generation 2 2025-03-31T16:19:12.263037Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:19:12.298241Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-31T16:19:12.298429Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:19:12.298532Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-31T16:19:12.298569Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-31T16:19:12.298600Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-31T16:19:12.298635Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:12.298853Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:12.298907Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:12.299249Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-31T16:19:12.299361Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-31T16:19:12.299431Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:19:12.299471Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:19:12.299517Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-31T16:19:12.299555Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T16:19:12.299591Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T16:19:12.299623Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-31T16:19:12.299665Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:19:12.299766Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:12.299818Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:12.299876Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-31T16:19:12.302593Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-31T16:19:12.302678Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:19:12.302765Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T16:19:12.302929Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-31T16:19:12.302974Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-31T16:19:12.303025Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-31T16:19:12.303071Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:19:12.303300Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-31T16:19:12.303354Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-31T16:19:12.303405Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:19:12.303734Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-31T16:19:12.303783Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-31T16:19:12.303824Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-31T16:19:12.303870Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:19:12.303917Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-31T16:19:12.303946Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-31T16:19:12.303994Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-31T16:19:12.304028Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-31T16:19:12.304070Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-31T16:19:12.320520Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-31T16:19:12.320593Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:19:12.320628Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:19:12.320687Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-31T16:19:12.320787Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-31T16:19:12.321330Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:12.321392Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:12.321438Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-31T16:19:12.321581Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-31T16:19:12.321617Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:19:12.321743Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-31T16:19:12.321782Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:19:12.321818Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-31T16:19:12.321853Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-31T16:19:12.336929Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-31T16:19:12.337013Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:12.337269Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:12.337312Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:12.337372Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:19:12.337411Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:19:12.337447Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T16:19:12.337488Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-31T16:19:12.337524Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-31T16:19:12.371322Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:19:12.371402Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-31T16:19:12.371445Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-31T16:19:12.371493Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-31T16:19:12.371705Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-31T16:19:12.371746Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:19:12.371788Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-31T16:19:12.371815Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-31T16:19:12.371843Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-31T16:19:12.371923Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:19:12.371949Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-31T16:19:12.371990Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-31T16:19:12.372025Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-31T16:19:12.372109Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-31T16:19:12.372146Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-31T16:19:12.372180Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-31T16:19:12.372233Z node 1 :TX_DATA ... 6 Flags# 0 Seqno# 44} 2025-03-31T16:20:07.372560Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-31T16:20:07.372649Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-31T16:20:07.372675Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:137] at 9437186 on unit CompleteOperation 2025-03-31T16:20:07.372706Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 137] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 4 ms 2025-03-31T16:20:07.372742Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-03-31T16:20:07.372761Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-31T16:20:07.372856Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-31T16:20:07.372881Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:140] at 9437186 on unit CompleteOperation 2025-03-31T16:20:07.372912Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 140] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-03-31T16:20:07.372964Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-03-31T16:20:07.372997Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-31T16:20:07.373084Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-31T16:20:07.373106Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:143] at 9437186 on unit CompleteOperation 2025-03-31T16:20:07.373135Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 143] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-03-31T16:20:07.373164Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-31T16:20:07.373245Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-31T16:20:07.373266Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:146] at 9437186 on unit CompleteOperation 2025-03-31T16:20:07.373293Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 146] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-03-31T16:20:07.373316Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-31T16:20:07.373411Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-31T16:20:07.373435Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2025-03-31T16:20:07.373489Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-03-31T16:20:07.373516Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-31T16:20:07.373595Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-31T16:20:07.373618Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2025-03-31T16:20:07.373646Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-03-31T16:20:07.373693Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-31T16:20:07.472166Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 116 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 38} 2025-03-31T16:20:07.472235Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:20:07.472289Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 116 2025-03-31T16:20:07.472363Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-03-31T16:20:07.472387Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:20:07.472429Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2025-03-31T16:20:07.472591Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 119 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 39} 2025-03-31T16:20:07.472621Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:20:07.472646Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 119 2025-03-31T16:20:07.472708Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-03-31T16:20:07.472748Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:20:07.472793Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2025-03-31T16:20:07.472879Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 122 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 40} 2025-03-31T16:20:07.472904Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:20:07.472926Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 122 2025-03-31T16:20:07.472976Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:234:2227], Recipient [1:345:2313]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-03-31T16:20:07.473000Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:20:07.473050Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2025-03-31T16:20:07.473145Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 125 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 41} 2025-03-31T16:20:07.473171Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:20:07.473207Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 125 2025-03-31T16:20:07.473272Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-03-31T16:20:07.473301Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:20:07.473350Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-03-31T16:20:07.473451Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-03-31T16:20:07.473479Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:20:07.473499Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-03-31T16:20:07.473572Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-03-31T16:20:07.473592Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:20:07.473624Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-03-31T16:20:07.473699Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-03-31T16:20:07.473720Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:20:07.473734Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 2025-03-31T16:20:07.473770Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2025-03-31T16:20:07.473784Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:20:07.473798Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2025-03-31T16:20:07.473864Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2025-03-31T16:20:07.473890Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:20:07.473913Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2025-03-31T16:20:07.473974Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-03-31T16:20:07.473991Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:20:07.474016Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-03-31T16:20:07.474054Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-03-31T16:20:07.474066Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:20:07.474078Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-03-31T16:20:07.474121Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:234:2227]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-03-31T16:20:07.474133Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:20:07.474157Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::OneShardLocalExec+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 20606, MsgBus: 1521 2025-03-31T16:19:54.148460Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009170509515145:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:54.148569Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000aa2/r3tmp/tmpuSsml1/pdisk_1.dat 2025-03-31T16:19:54.378260Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20606, node 1 2025-03-31T16:19:54.430808Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:19:54.430842Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:19:54.430852Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:19:54.430999Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:19:54.488546Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:54.488658Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:54.490278Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1521 TClient is connected to server localhost:1521 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:19:54.914404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:54.950195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:55.073553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:55.195553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:55.268112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:56.277807Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009179099451531:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:56.277884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:56.504066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:19:56.528948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:19:56.552516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:19:56.576018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:19:56.597841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:19:56.624764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:19:56.695512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009179099452044:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:56.695572Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009179099452049:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:56.695579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:56.698356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:19:56.705169Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009179099452051:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:19:56.804557Z node 1 :TX_PROXY ERROR: Actor# [1:7488009179099452107:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:19:57.944973Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437997931, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 11379, MsgBus: 25521 2025-03-31T16:19:58.641807Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488009184941049613:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:58.641867Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000aa2/r3tmp/tmp2xWKgH/pdisk_1.dat 2025-03-31T16:19:58.749550Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:58.770479Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:58.770541Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:58.772142Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11379, node 2 2025-03-31T16:19:58.802724Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:19:58.802743Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:19:58.802748Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:19:58.802834Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25521 TClient is connected to server localhost:25521 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:19:59.150688Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:59.166767Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:59.235775Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:59.385589Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:59.459055Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:01.018164Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009197825953281:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:01.018243Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:01.053530Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:01.078511Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:01.105101Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:01.168642Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:01.192573Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:01.259621Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:01.291561Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009197825953798:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:01.291667Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:01.291676Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009197825953803:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:01.294411Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:01.302172Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488009197825953805:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:20:01.379651Z node 2 :TX_PROXY ERROR: Actor# [2:7488009197825953858:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:02.077674Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438002103, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 15016, MsgBus: 9728 2025-03-31T16:20:02.742712Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488009201481419165:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:02.742787Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000aa2/r3tmp/tmpxDcgZK/pdisk_1.dat 2025-03-31T16:20:02.847432Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15016, node 3 2025-03-31T16:20:02.884119Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:02.884189Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:02.885822Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:02.899228Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:02.899246Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:02.899252Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:02.899350Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9728 TClient is connected to server localhost:9728 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:03.301274Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:03.313355Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:03.384930Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:03.504560Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:03.556094Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:05.392767Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009214366322842:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:05.392833Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:05.412448Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:05.439400Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:05.467041Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:05.493669Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:05.519893Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:05.584922Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:05.623534Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009214366323354:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:05.623604Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009214366323359:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:05.623615Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:05.626445Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:05.634781Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488009214366323361:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:20:05.733657Z node 3 :TX_PROXY ERROR: Actor# [3:7488009214366323415:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> Cdc::ResolvedTimestampsVolatileOutOfOrder [GOOD] >> Cdc::SequentialSplitMerge >> KqpStats::JoinNoStatsYql >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanScalar [GOOD] >> KqpStats::DataQueryWithEffects+UseSink [GOOD] >> KqpStats::DataQueryWithEffects-UseSink >> KqpQuery::UpdateWhereInSubquery [GOOD] >> KqpQuery::ExecuteDataQueryCollectMeta >> Cdc::InitialScan_WithTopicSchemeTx [GOOD] >> KqpQuery::DdlInDataQuery >> Cdc::InitialScan_TopicAutoPartitioning >> BsControllerConfig::MoveGroups [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:78:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:52:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:52:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:57:2057] recipient: [13:50:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:74:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:76:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:79:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:80:2057] recipient: [13:78:2110] Leader for TabletID 72057594037927937 is [13:81:2111] sender: [13:82:2057] recipient: [13:78:2110] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:81:2111] Leader for TabletID 72057594037927937 is [13:81:2111] sender: [13:135:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:52:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:52:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:76:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:79:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:80:2057] recipient: [14:78:2110] Leader for TabletID 72057594037927937 is [14:81:2111] sender: [14:82:2057] recipient: [14:78:2110] !Reboot 72057594037927937 (actor [14:56:2097]) rebooted! !Reboot 72057594037927937 (actor [14:56:2097]) tablet resolver refreshed! new actor is[14:81:2111] Leader for TabletID 72057594037927937 is [14:81:2111] sender: [14:135:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:50:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:50:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:77:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:80:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:81:2057] recipient: [15:79:2110] Leader for TabletID 72057594037927937 is [15:82:2111] sender: [15:83:2057] recipient: [15:79:2110] !Reboot 72057594037927937 (actor [15:56:2097]) rebooted! !Reboot 72057594037927937 (actor [15:56:2097]) tablet resolver refreshed! new actor is[15:82:2111] Leader for TabletID 72057594037927937 is [15:82:2111] sender: [15:136:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:80:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:82:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:84:2057] recipient: [16:83:2113] Leader for TabletID 72057594037927937 is [16:85:2114] sender: [16:86:2057] recipient: [16:83:2113] !Reboot 72057594037927937 (actor [16:56:2097]) rebooted! !Reboot 72057594037927937 (actor [16:56:2097]) tablet resolver refreshed! new actor is[16:85:2114] Leader for TabletID 72057594037927937 is [16:85:2114] sender: [16:139:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:52:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:57:2057] recipient: [17:52:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:74:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:80:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:83:2057] recipient: [17:82:2113] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:84:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:85:2114] sender: [17:86:2057] recipient: [17:82:2113] !Reboot 72057594037927937 (actor [17:56:2097]) rebooted! !Reboot 72057594037927937 (actor [17:56:2097]) tablet resolver refreshed! new actor is[17:85:2114] Leader for TabletID 72057594037927937 is [17:85:2114] sender: [17:139:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:50:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:57:2057] recipient: [18:50:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:74:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:81:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:84:2057] recipient: [18:83:2113] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:85:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:86:2114] sender: [18:87:2057] recipient: [18:83:2113] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:86:2114] Leader for TabletID 72057594037927937 is [18:86:2114] sender: [18:104:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:83:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:86:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:87:2057] recipient: [19:85:2115] Leader for TabletID 72057594037927937 is [19:88:2116] sender: [19:89:2057] recipient: [19:85:2115] !Reboot 72057594037927937 (actor [19:56:2097]) rebooted! !Reboot 72057594037927937 (actor [19:56:2097]) tablet resolver refreshed! new actor is[19:88:2116] Leader for TabletID 72057594037927937 is [19:88:2116] sender: [19:142:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:51:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:57:2057] recipient: [20:51:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:74:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:83:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:86:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:87:2057] recipient: [20:85:2115] Leader for TabletID 72057594037927937 is [20:88:2116] sender: [20:89:2057] recipient: [20:85:2115] !Reboot 72057594037927937 (actor [20:56:2097]) rebooted! !Reboot 72057594037927937 (actor [20:56:2097]) tablet resolver refreshed! new actor is[20:88:2116] Leader for TabletID 72057594037927937 is [20:88:2116] sender: [20:142:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:50:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:57:2057] recipient: [21:50:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:74:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:84:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:87:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:88:2057] recipient: [21:86:2115] Leader for TabletID 72057594037927937 is [21:89:2116] sender: [21:90:2057] recipient: [21:86:2115] !Reboot 72057594037927937 (actor [21:56:2097]) rebooted! !Reboot 72057594037927937 (actor [21:56:2097]) tablet resolver refreshed! new actor is[21:89:2116] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:57:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:74:2057] recipient: [22:14:2061] >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi [GOOD] >> BasicUsage::PreferredDatabaseNoFallback [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::MoveGroups [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3014:2106] recipient: [1:2914:2116] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3014:2106] recipient: [1:2914:2116] Leader for TabletID 72057594037932033 is [1:3016:2118] sender: [1:3017:2106] recipient: [1:2914:2116] 2025-03-31T16:19:29.797685Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-03-31T16:19:29.802006Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-03-31T16:19:29.802296Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-03-31T16:19:29.803812Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:19:29.804219Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-03-31T16:19:29.804743Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-03-31T16:19:29.804766Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-03-31T16:19:29.804987Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-03-31T16:19:29.814427Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-03-31T16:19:29.814538Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-03-31T16:19:29.814692Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-03-31T16:19:29.814763Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-31T16:19:29.814861Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-31T16:19:29.814919Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:3016:2118] sender: [1:3042:2106] recipient: [1:60:2107] 2025-03-31T16:19:29.826691Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-03-31T16:19:29.826808Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-31T16:19:29.837618Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-31T16:19:29.837774Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-31T16:19:29.837877Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-31T16:19:29.837974Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-31T16:19:29.838104Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-31T16:19:29.838159Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-31T16:19:29.838208Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-31T16:19:29.838296Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-31T16:19:29.848940Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-31T16:19:29.849049Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-31T16:19:29.859782Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-31T16:19:29.859938Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-03-31T16:19:29.861012Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-03-31T16:19:29.861094Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-03-31T16:19:29.861268Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-03-31T16:19:29.861327Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-03-31T16:19:29.873219Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 150 PDiskFilter { Property { Type: ROT } } } } } 2025-03-31T16:19:29.874511Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1000 Path# /dev/disk1 2025-03-31T16:19:29.874562Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1001 Path# /dev/disk2 2025-03-31T16:19:29.874579Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1002 Path# /dev/disk3 2025-03-31T16:19:29.874592Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1000 Path# /dev/disk1 2025-03-31T16:19:29.874606Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1001 Path# /dev/disk2 2025-03-31T16:19:29.874628Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1002 Path# /dev/disk3 2025-03-31T16:19:29.874649Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1000 Path# /dev/disk1 2025-03-31T16:19:29.874697Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1001 Path# /dev/disk2 2025-03-31T16:19:29.874717Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1002 Path# /dev/disk3 2025-03-31T16:19:29.874737Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1000 Path# /dev/disk1 2025-03-31T16:19:29.874750Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1001 Path# /dev/disk2 2025-03-31T16:19:29.874764Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1002 Path# /dev/disk3 2025-03-31T16:19:29.874777Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1000 Path# /dev/disk1 2025-03-31T16:19:29.874790Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2025-03-31T16:19:29.874806Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1002 Path# /dev/disk3 2025-03-31T16:19:29.874829Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1000 Path# /dev/disk1 2025-03-31T16:19:29.874844Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1001 Path# /dev/disk2 2025-03-31T16:19:29.874857Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1002 Path# /dev/disk3 2025-03-31T16:19:29.874878Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1000 Path# /dev/disk1 2025-03-31T16:19:29.874896Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1001 Path# /dev/disk2 2025-03-31T16:19:29.874919Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:3 ... 78:1000 Path# /dev/disk1 2025-03-31T16:20:00.389935Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 178:1001 Path# /dev/disk2 2025-03-31T16:20:00.389953Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 178:1002 Path# /dev/disk3 2025-03-31T16:20:00.389972Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 179:1000 Path# /dev/disk1 2025-03-31T16:20:00.389990Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 179:1001 Path# /dev/disk2 2025-03-31T16:20:00.390008Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 179:1002 Path# /dev/disk3 2025-03-31T16:20:00.390028Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 180:1000 Path# /dev/disk1 2025-03-31T16:20:00.390053Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 180:1001 Path# /dev/disk2 2025-03-31T16:20:00.390070Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 180:1002 Path# /dev/disk3 2025-03-31T16:20:00.390086Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 181:1000 Path# /dev/disk1 2025-03-31T16:20:00.390102Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 181:1001 Path# /dev/disk2 2025-03-31T16:20:00.390120Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 181:1002 Path# /dev/disk3 2025-03-31T16:20:00.390137Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 182:1000 Path# /dev/disk1 2025-03-31T16:20:00.390155Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 182:1001 Path# /dev/disk2 2025-03-31T16:20:00.390175Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 182:1002 Path# /dev/disk3 2025-03-31T16:20:00.390193Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 183:1000 Path# /dev/disk1 2025-03-31T16:20:00.390211Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 183:1001 Path# /dev/disk2 2025-03-31T16:20:00.390229Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 183:1002 Path# /dev/disk3 2025-03-31T16:20:00.390246Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 184:1000 Path# /dev/disk1 2025-03-31T16:20:00.390263Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 184:1001 Path# /dev/disk2 2025-03-31T16:20:00.390281Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 184:1002 Path# /dev/disk3 2025-03-31T16:20:00.390307Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 185:1000 Path# /dev/disk1 2025-03-31T16:20:00.390328Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 185:1001 Path# /dev/disk2 2025-03-31T16:20:00.390348Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 185:1002 Path# /dev/disk3 2025-03-31T16:20:00.390365Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 186:1000 Path# /dev/disk1 2025-03-31T16:20:00.390383Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 186:1001 Path# /dev/disk2 2025-03-31T16:20:00.390401Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 186:1002 Path# /dev/disk3 2025-03-31T16:20:00.390425Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 187:1000 Path# /dev/disk1 2025-03-31T16:20:00.390452Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 187:1001 Path# /dev/disk2 2025-03-31T16:20:00.390481Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 187:1002 Path# /dev/disk3 2025-03-31T16:20:00.390510Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 188:1000 Path# /dev/disk1 2025-03-31T16:20:00.390539Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 188:1001 Path# /dev/disk2 2025-03-31T16:20:00.390567Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 188:1002 Path# /dev/disk3 2025-03-31T16:20:00.390596Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 189:1000 Path# /dev/disk1 2025-03-31T16:20:00.390620Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 189:1001 Path# /dev/disk2 2025-03-31T16:20:00.390649Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 189:1002 Path# /dev/disk3 2025-03-31T16:20:00.390676Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 190:1000 Path# /dev/disk1 2025-03-31T16:20:00.390703Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 190:1001 Path# /dev/disk2 2025-03-31T16:20:00.390730Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 190:1002 Path# /dev/disk3 2025-03-31T16:20:00.390755Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 191:1000 Path# /dev/disk1 2025-03-31T16:20:00.390786Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 191:1001 Path# /dev/disk2 2025-03-31T16:20:00.390804Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 191:1002 Path# /dev/disk3 2025-03-31T16:20:00.390821Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 192:1000 Path# /dev/disk1 2025-03-31T16:20:00.390839Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 192:1001 Path# /dev/disk2 2025-03-31T16:20:00.390856Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 192:1002 Path# /dev/disk3 2025-03-31T16:20:00.390876Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 193:1000 Path# /dev/disk1 2025-03-31T16:20:00.390894Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 193:1001 Path# /dev/disk2 2025-03-31T16:20:00.390915Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 193:1002 Path# /dev/disk3 2025-03-31T16:20:00.390932Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 194:1000 Path# /dev/disk1 2025-03-31T16:20:00.390950Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 194:1001 Path# /dev/disk2 2025-03-31T16:20:00.390968Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 194:1002 Path# /dev/disk3 2025-03-31T16:20:00.390991Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 195:1000 Path# /dev/disk1 2025-03-31T16:20:00.391012Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 195:1001 Path# /dev/disk2 2025-03-31T16:20:00.391031Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 195:1002 Path# /dev/disk3 2025-03-31T16:20:00.391050Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 196:1000 Path# /dev/disk1 2025-03-31T16:20:00.391070Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 196:1001 Path# /dev/disk2 2025-03-31T16:20:00.391113Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 196:1002 Path# /dev/disk3 2025-03-31T16:20:00.391142Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 197:1000 Path# /dev/disk1 2025-03-31T16:20:00.391162Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 197:1001 Path# /dev/disk2 2025-03-31T16:20:00.391181Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 197:1002 Path# /dev/disk3 2025-03-31T16:20:00.391197Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 198:1000 Path# /dev/disk1 2025-03-31T16:20:00.391215Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 198:1001 Path# /dev/disk2 2025-03-31T16:20:00.391231Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 198:1002 Path# /dev/disk3 2025-03-31T16:20:00.391249Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 199:1000 Path# /dev/disk1 2025-03-31T16:20:00.391266Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 199:1001 Path# /dev/disk2 2025-03-31T16:20:00.391282Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 199:1002 Path# /dev/disk3 2025-03-31T16:20:00.391299Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 200:1000 Path# /dev/disk1 2025-03-31T16:20:00.391316Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 200:1001 Path# /dev/disk2 2025-03-31T16:20:00.391333Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 200:1002 Path# /dev/disk3 2025-03-31T16:20:00.609746Z node 151 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.223076s 2025-03-31T16:20:00.609938Z node 151 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.223288s 2025-03-31T16:20:00.635773Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2025-03-31T16:20:00.706701Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 1 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 1 ExplicitGroupId: 2147483748 } } } 2025-03-31T16:20:00.722280Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2025-03-31T16:20:00.794619Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 2 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 2 ExplicitGroupId: 2147483749 } } } 2025-03-31T16:20:00.811233Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2025-03-31T16:20:00.879978Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 3 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 3 } } } 2025-03-31T16:20:00.896921Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } >> KqpParams::Decimal-QueryService-UseSink [GOOD] >> KqpParams::Decimal+QueryService-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::UpdateWhereInSubquery [GOOD] Test command err: Trying to start YDB, gRPC: 22506, MsgBus: 13298 2025-03-31T16:19:57.506596Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009182819051929:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:57.506695Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a97/r3tmp/tmplhEljr/pdisk_1.dat 2025-03-31T16:19:57.735035Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22506, node 1 2025-03-31T16:19:57.795129Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:19:57.795154Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:19:57.795162Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:19:57.795315Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:19:57.858454Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:57.858551Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:57.860149Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13298 TClient is connected to server localhost:13298 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:19:58.233800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:58.248146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:58.365250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:58.499459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:58.576035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:59.739635Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009191408988319:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:59.739746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:59.966165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:19:59.991383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:00.014715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:00.038667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:00.063060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:00.105535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:00.179227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009195703956133:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:00.179317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:00.179337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009195703956138:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:00.183052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:00.192350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009195703956140:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:20:00.283372Z node 1 :TX_PROXY ERROR: Actor# [1:7488009195703956195:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:01.149514Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488009199998923819:2504], status: GENERIC_ERROR, issues:
:3:26: Error: mismatched input '[' expecting {'*', '(', '@', '$', ABORT, ACTION, ADD, AFTER, ALL, ALTER, ANALYZE, AND, ANSI, ANY, ARRAY, AS, ASC, ASSUME, ASYMMETRIC, ASYNC, AT, ATTACH, ATTRIBUTES, AUTOINCREMENT, BACKUP, BATCH, COLLECTION, BEFORE, BEGIN, BERNOULLI, BETWEEN, BITCAST, BY, CALLABLE, CASCADE, CASE, CAST, CHANGEFEED, CHECK, CLASSIFIER, COLLATE, COLUMN, COLUMNS, COMMIT, COMPACT, CONDITIONAL, CONFLICT, CONNECT, CONSTRAINT, CONSUMER, COVER, CREATE, CROSS, CUBE, CURRENT, CURRENT_DATE, CURRENT_TIME, CURRENT_TIMESTAMP, DATA, DATABASE, DECIMAL, DECLARE, DEFAULT, DEFERRABLE, DEFERRED, DEFINE, DELETE, DESC, DESCRIBE, DETACH, DICT, DIRECTORY, DISABLE, DISCARD, DISTINCT, DO, DROP, EACH, ELSE, EMPTY, EMPTY_ACTION, ENCRYPTED, END, ENUM, ERASE, ERROR, ESCAPE, EVALUATE, EXCEPT, EXCLUDE, EXCLUSION, EXCLUSIVE, EXISTS, EXPLAIN, EXPORT, EXTERNAL, FAIL, FAMILY, FILTER, FIRST, FLATTEN, FLOW, FOLLOWING, FOR, FOREIGN, FROM, FULL, FUNCTION, GLOB, GLOBAL, GRANT, GROUP, GROUPING, GROUPS, HASH, HAVING, HOP, IF, IGNORE, ILIKE, IMMEDIATE, IMPORT, IN, INCREMENT, INCREMENTAL, INDEX, INDEXED, INHERITS, INITIAL, INITIALLY, INNER, INSERT, INSTEAD, INTERSECT, INTO, IS, ISNULL, JOIN, JSON_EXISTS, JSON_QUERY, JSON_VALUE, KEY, LAST, LEFT, LEGACY, LIKE, LIMIT, LIST, LOCAL, LOGIN, MANAGE, MATCH, MATCHES, MATCH_RECOGNIZE, MEASURES, MICROSECONDS, MILLISECONDS, MODIFY, NANOSECONDS, NATURAL, NEXT, NO, NOLOGIN, NOT, NOTNULL, NULL, NULLS, OBJECT, OF, OFFSET, OMIT, ON, ONE, ONLY, OPTION, OPTIONAL, OR, ORDER, OTHERS, OUTER, OVER, OWNER, PARALLEL, PARTITION, PASSING, PASSWORD, PAST, PATTERN, PER, PERMUTE, PLAN, POOL, PRAGMA, PRECEDING, PRESORT, PRIMARY, PRIVILEGES, PROCESS, QUERY, QUEUE, RAISE, RANGE, REDUCE, REFERENCES, REGEXP, REINDEX, RELEASE, REMOVE, RENAME, REPLACE, REPLICATION, RESET, RESOURCE, RESPECT, RESTART, RESTORE, RESTRICT, RESULT, RETURN, RETURNING, REVERT, REVOKE, RIGHT, RLIKE, ROLLBACK, ROLLUP, ROW, ROWS, SAMPLE, SAVEPOINT, SCHEMA, SECONDS, SEEK, SELECT, SEMI, SET, SETS, SHOW, TSKIP, SEQUENCE, SOURCE, START, STREAM, STRUCT, SUBQUERY, SUBSET, SYMBOLS, SYMMETRIC, SYNC, SYSTEM, TABLE, TABLES, TABLESAMPLE, TABLESTORE, TAGGED, TEMP, TEMPORARY, THEN, TIES, TO, TOPIC, TRANSACTION, TRANSFER, TRIGGER, TUPLE, TYPE, UNBOUNDED, UNCONDITIONAL, UNION, UNIQUE, UNKNOWN, UNMATCHED, UPDATE, UPSERT, USE, USER, USING, VACUUM, VALUES, VARIANT, VIEW, VIRTUAL, WHEN, WHERE, WINDOW, WITH, WITHOUT, WRAPPER, XOR, STRING_VALUE, ID_PLAIN, ID_QUOTED} 2025-03-31T16:20:01.149706Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTcyODQ0YjAtODQzODU0NzQtMjgxZTA5YmItYWZiZDIwMzE=, ActorId: [1:7488009195703956477:2495], ActorState: ExecuteState, TraceId: 01jqpg1cz5474m2st17na4th98, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
:3:26: Error: mismatched input '[' expecting {'*', '(', '@', '$', ABORT, ACTION, ADD, AFTER, ALL, ALTER, ANALYZE, AND, ANSI, ANY, ARRAY, AS, ASC, ASSUME, ASYMMETRIC, ASYNC, AT, ATTACH, ATTRIBUTES, AUTOINCREMENT, BACKUP, BATCH, COLLECTION, BEFORE, BEGIN, BERNOULLI, BETWEEN, BITCAST, BY, CALLABLE, CASCADE, CASE, CAST, CHANGEFEED, CHECK, CLASSIFIER, COLLATE, COLUMN, COLUMNS, COMMIT, COMPACT, CONDITIONAL, CONFLICT, CONNECT, CONSTRAINT, CONSUMER, COVER, CREATE, CROSS, CUBE, CURRENT, CURRENT_DATE, CURRENT_TIME, CURRENT_TIMESTAMP, DATA, DATABASE, DECIMAL, DECLARE, DEFAULT, DEFERRABLE, DEFERRED, DEFINE, DELETE, DESC, DESCRIBE, DETACH, DICT, DIRECTORY, DISABLE, DISCARD, DISTINCT, DO, DROP, EACH, ELSE, EMPTY, EMPTY_ACTION, ENCRYPTED, END, ENUM, ERASE, ERROR, ESCAPE, EVALUATE, EXCEPT, EXCLUDE, EXCLUSION, EXCLUSIVE, EXISTS, EXPLAIN, EXPORT, EXTERNAL, FAIL, FAMILY, FILTER, FIRST, FLATTEN, FLOW, FOLLOWING, FOR, FOREIGN, FROM, FULL, FUNCTION, GLOB, GLOBAL, GRANT, GROUP, GROUPING, GROUPS, HASH, HAVING, HOP, IF, IGNORE, ILIKE, IMMEDIATE, IMPORT, IN, INCREMENT, INCREMENTAL, INDEX, INDEXED, INHERITS, INITIAL, INITIALLY, INNER, INSERT, INSTEAD, INTERSECT, INTO, IS, ISNULL, JOIN, JSON_EXISTS, JSON_QUERY, JSON_VALUE, KEY, LAST, LEFT, LEGACY, LIKE, LIMIT, LIST, LOCAL, LOGIN, MANAGE, MATCH, MATCHES, MATCH_RECOGNIZE, MEASURES, MICROSECONDS, MILLISECONDS, MODIFY, NANOSECONDS, NATURAL, NEXT, NO, NOLOGIN, NOT, NOTNULL, NULL, NULLS, OBJECT, OF, OFFSET, OMIT, ON, ONE, ONLY, OPTION, OPTIONAL, ... art proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:04.103762Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009213041980555:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:04.103861Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:04.138367Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:04.160645Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:04.183714Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:04.210986Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:04.240687Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:04.272160Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:04.310635Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009213041981061:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:04.310741Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009213041981066:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:04.310776Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:04.313742Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:04.330976Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488009213041981068:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:20:04.430493Z node 2 :TX_PROXY ERROR: Actor# [2:7488009213041981123:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:05.134498Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488009217336948714:2500], status: UNSUPPORTED, issues:
: Error: Default error
:1:15: Error: ATOM evaluation is not supported in YDB queries., code: 2030 2025-03-31T16:20:05.134697Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MmZjNmEyNmMtNWMxMTRjZWMtODIwMDY4ODgtZDE4NTQ2MTQ=, ActorId: [2:7488009217336948706:2495], ActorState: ExecuteState, TraceId: 01jqpg1gvya5rqcm54c5fev3pf, ReplyQueryCompileError, status UNSUPPORTED remove tx with tx_id: Trying to start YDB, gRPC: 4077, MsgBus: 27803 2025-03-31T16:20:05.700337Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488009216616517820:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:05.700384Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a97/r3tmp/tmpGQDmNu/pdisk_1.dat 2025-03-31T16:20:05.804695Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4077, node 3 2025-03-31T16:20:05.824178Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:05.824278Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:05.826111Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:05.846969Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:05.846990Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:05.846997Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:05.847128Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27803 TClient is connected to server localhost:27803 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:06.220249Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:06.228090Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:06.297760Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:06.421724Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:06.473590Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:08.177569Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009229501421484:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:08.177663Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:08.227764Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:08.254846Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:08.280660Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:08.308227Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:08.336826Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:08.366083Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:08.440295Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009229501421997:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:08.440394Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:08.440461Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009229501422002:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:08.443656Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:08.451252Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488009229501422004:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:20:08.517570Z node 3 :TX_PROXY ERROR: Actor# [3:7488009229501422058:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanScalar [GOOD] Test command err: Trying to start YDB, gRPC: 3727, MsgBus: 21821 2025-03-31T16:19:01.329385Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008941484535794:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:01.329446Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b3f/r3tmp/tmpuGlmcF/pdisk_1.dat 2025-03-31T16:19:01.691277Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:01.741854Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:01.741965Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:01.745367Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3727, node 1 2025-03-31T16:19:01.849494Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:19:01.849519Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:19:01.849526Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:19:01.849645Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21821 TClient is connected to server localhost:21821 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:19:02.356812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:02.384672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:02.505906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:02.677776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:02.759454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:04.353031Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008954369439431:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:04.353214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:04.685428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:19:04.716832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:19:04.785407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:19:04.820291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:19:04.851559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:19:04.882686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:19:04.932763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008954369439942:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:04.932853Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:04.933077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008954369439947:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:04.937776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:19:04.949854Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008954369439949:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:19:05.041691Z node 1 :TX_PROXY ERROR: Actor# [1:7488008958664407299:3442] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:19:06.255676Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437946285, txId: 281474976710672] shutting down 2025-03-31T16:19:06.329533Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008941484535794:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:06.329601Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:19:06.536965Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437946572, txId: 281474976710675] shutting down 2025-03-31T16:19:06.782356Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437946817, txId: 281474976710678] shutting down 2025-03-31T16:19:07.035589Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437947069, txId: 281474976710681] shutting down 2025-03-31T16:19:07.288463Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437947314, txId: 281474976710684] shutting down 2025-03-31T16:19:07.544249Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437947573, txId: 281474976710687] shutting down 2025-03-31T16:19:07.851563Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437947881, txId: 281474976710690] shutting down 2025-03-31T16:19:08.143805Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437948168, txId: 281474976710693] shutting down 2025-03-31T16:19:08.405670Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437948441, txId: 281474976710696] shutting down 2025-03-31T16:19:08.664559Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437948700, txId: 281474976710699] shutting down 2025-03-31T16:19:08.919467Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437948952, txId: 281474976710702] shutting down 2025-03-31T16:19:09.214203Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437949246, txId: 281474976710705] shutting down 2025-03-31T16:19:09.529641Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437949561, txId: 281474976710708] shutting down 2025-03-31T16:19:09.827616Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437949855, txId: 281474976710711] shutting down 2025-03-31T16:19:10.126810Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437950163, txId: 281474976710714] shutting down 2025-03-31T16:19:10.398704Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437950429, txId: 281474976710717] shutting down 2025-03-31T16:19:10.697158Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437950723, txId: 281474976710720] shutting down 2025-03-31T16:19:11.068831Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437951052, txId: 281474976710723] shutting down 2025-03-31T16:19:11.377613Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437951409, txId: 281474976710726] shutting down 2025-03-31T16:19:11.711297Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437951731, txId: 281474976710729] shutting down 2025-03-31T16:19:12.039166Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437952060, txId: 281474976710732] shutting down 2025-03-31T16:19:12.365839Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437952396, txId: 281474976710735] shutting down 2025-03-31T16:19:12.673202Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snaps ... qpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437996433, txId: 281474976711206] shutting down 2025-03-31T16:19:56.666721Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437996664, txId: 281474976711209] shutting down 2025-03-31T16:19:56.909417Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437996944, txId: 281474976711212] shutting down 2025-03-31T16:19:57.210130Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437997245, txId: 281474976711215] shutting down 2025-03-31T16:19:57.451875Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437997483, txId: 281474976711218] shutting down 2025-03-31T16:19:57.770428Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437997756, txId: 281474976711221] shutting down 2025-03-31T16:19:58.054668Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437998085, txId: 281474976711224] shutting down 2025-03-31T16:19:58.332521Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437998358, txId: 281474976711227] shutting down 2025-03-31T16:19:58.617426Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437998652, txId: 281474976711230] shutting down 2025-03-31T16:19:58.990091Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437998967, txId: 281474976711233] shutting down 2025-03-31T16:19:59.304133Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437999338, txId: 281474976711236] shutting down 2025-03-31T16:19:59.576963Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437999611, txId: 281474976711239] shutting down 2025-03-31T16:19:59.876538Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743437999905, txId: 281474976711242] shutting down 2025-03-31T16:20:00.162982Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438000192, txId: 281474976711245] shutting down 2025-03-31T16:20:00.554145Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438000535, txId: 281474976711248] shutting down 2025-03-31T16:20:00.882579Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438000913, txId: 281474976711251] shutting down 2025-03-31T16:20:01.140379Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438001172, txId: 281474976711254] shutting down 2025-03-31T16:20:01.404195Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438001417, txId: 281474976711257] shutting down 2025-03-31T16:20:01.676187Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438001711, txId: 281474976711260] shutting down 2025-03-31T16:20:01.938523Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438001970, txId: 281474976711263] shutting down 2025-03-31T16:20:02.205998Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438002236, txId: 281474976711266] shutting down 2025-03-31T16:20:02.502485Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438002530, txId: 281474976711269] shutting down Trying to start YDB, gRPC: 3908, MsgBus: 22584 2025-03-31T16:20:03.507269Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488009208520187551:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:03.507342Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000b3f/r3tmp/tmpYupU7P/pdisk_1.dat 2025-03-31T16:20:03.618244Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3908, node 2 2025-03-31T16:20:03.645393Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:03.645487Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:03.646859Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:03.684661Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:03.684683Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:03.684689Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:03.684822Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22584 TClient is connected to server localhost:22584 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:04.075818Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:04.083322Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:04.130337Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:04.279966Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:04.400903Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:06.794795Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009221405091220:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:06.794886Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:06.823717Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:06.853721Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:06.883036Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:06.912222Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:06.942898Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:06.976890Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:07.052447Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009225700059029:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:07.052466Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009225700059034:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:07.052527Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:07.055947Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:07.065664Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488009225700059036:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:20:07.121856Z node 2 :TX_PROXY ERROR: Actor# [2:7488009225700059089:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:08.507849Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488009208520187551:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:08.507935Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:20:09.140146Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438009159, txId: 281474976715671] shutting down >> KqpExplain::PrecomputeRange [GOOD] >> KqpExplain::MultiUsedStage >> BasicUsage::SimpleHandlers [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:78:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:52:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:52:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:86:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:88:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:89:2117] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:92:2057] recipient: [11:89:2117] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:91:2118] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:145:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... bletID 72057594037927937 is [13:56:2097] sender: [13:87:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:90:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:91:2057] recipient: [13:89:2117] Leader for TabletID 72057594037927937 is [13:92:2118] sender: [13:93:2057] recipient: [13:89:2117] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:92:2118] Leader for TabletID 72057594037927937 is [13:92:2118] sender: [13:146:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:52:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:52:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:50:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:50:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:76:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:79:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:80:2057] recipient: [16:78:2110] Leader for TabletID 72057594037927937 is [16:81:2111] sender: [16:82:2057] recipient: [16:78:2110] !Reboot 72057594037927937 (actor [16:56:2097]) rebooted! !Reboot 72057594037927937 (actor [16:56:2097]) tablet resolver refreshed! new actor is[16:81:2111] Leader for TabletID 72057594037927937 is [16:81:2111] sender: [16:135:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:52:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:57:2057] recipient: [17:52:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:74:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:76:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:79:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:80:2057] recipient: [17:78:2110] Leader for TabletID 72057594037927937 is [17:81:2111] sender: [17:82:2057] recipient: [17:78:2110] !Reboot 72057594037927937 (actor [17:56:2097]) rebooted! !Reboot 72057594037927937 (actor [17:56:2097]) tablet resolver refreshed! new actor is[17:81:2111] Leader for TabletID 72057594037927937 is [17:81:2111] sender: [17:135:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:50:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:57:2057] recipient: [18:50:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:74:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:77:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:80:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:81:2057] recipient: [18:79:2110] Leader for TabletID 72057594037927937 is [18:82:2111] sender: [18:83:2057] recipient: [18:79:2110] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:82:2111] Leader for TabletID 72057594037927937 is [18:82:2111] sender: [18:136:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:80:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:83:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:84:2057] recipient: [19:82:2113] Leader for TabletID 72057594037927937 is [19:85:2114] sender: [19:86:2057] recipient: [19:82:2113] !Reboot 72057594037927937 (actor [19:56:2097]) rebooted! !Reboot 72057594037927937 (actor [19:56:2097]) tablet resolver refreshed! new actor is[19:85:2114] Leader for TabletID 72057594037927937 is [19:85:2114] sender: [19:139:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:51:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:57:2057] recipient: [20:51:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:74:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:80:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:83:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:84:2057] recipient: [20:82:2113] Leader for TabletID 72057594037927937 is [20:85:2114] sender: [20:86:2057] recipient: [20:82:2113] !Reboot 72057594037927937 (actor [20:56:2097]) rebooted! !Reboot 72057594037927937 (actor [20:56:2097]) tablet resolver refreshed! new actor is[20:85:2114] Leader for TabletID 72057594037927937 is [20:85:2114] sender: [20:139:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:50:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:57:2057] recipient: [21:50:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:74:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:81:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:84:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:85:2057] recipient: [21:83:2113] Leader for TabletID 72057594037927937 is [21:86:2114] sender: [21:87:2057] recipient: [21:83:2113] !Reboot 72057594037927937 (actor [21:56:2097]) rebooted! !Reboot 72057594037927937 (actor [21:56:2097]) tablet resolver refreshed! new actor is[21:86:2114] Leader for TabletID 72057594037927937 is [21:86:2114] sender: [21:104:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:57:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:74:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:83:2057] recipient: [22:36:2083] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:86:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:87:2057] recipient: [22:85:2115] Leader for TabletID 72057594037927937 is [22:88:2116] sender: [22:89:2057] recipient: [22:85:2115] !Reboot 72057594037927937 (actor [22:56:2097]) rebooted! !Reboot 72057594037927937 (actor [22:56:2097]) tablet resolver refreshed! new actor is[22:88:2116] Leader for TabletID 72057594037927937 is [22:88:2116] sender: [22:142:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:51:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:57:2057] recipient: [23:51:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:74:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:83:2057] recipient: [23:36:2083] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:86:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:87:2057] recipient: [23:85:2115] Leader for TabletID 72057594037927937 is [23:88:2116] sender: [23:89:2057] recipient: [23:85:2115] !Reboot 72057594037927937 (actor [23:56:2097]) rebooted! !Reboot 72057594037927937 (actor [23:56:2097]) tablet resolver refreshed! new actor is[23:88:2116] Leader for TabletID 72057594037927937 is [23:88:2116] sender: [23:142:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:57:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:74:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:84:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:87:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:88:2057] recipient: [24:86:2115] Leader for TabletID 72057594037927937 is [24:89:2116] sender: [24:90:2057] recipient: [24:86:2115] !Reboot 72057594037927937 (actor [24:56:2097]) rebooted! !Reboot 72057594037927937 (actor [24:56:2097]) tablet resolver refreshed! new actor is[24:89:2116] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:57:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:74:2057] recipient: [25:14:2061] |89.7%| [TA] $(B)/ydb/core/kqp/ut/yql/test-results/unittest/{meta.json ... results_accumulator.log} |89.7%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/yql/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::PreferredDatabaseNoFallback [GOOD] Test command err: 2025-03-31T16:19:10.074465Z :GetAllStartPartitionSessions INFO: Random seed for debugging is 1743437950074431 2025-03-31T16:19:10.496819Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008981888296781:2277];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:10.496928Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:19:10.750613Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:19:10.762878Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0015f7/r3tmp/tmpfwYdId/pdisk_1.dat 2025-03-31T16:19:10.871254Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:19:11.250557Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:11.293251Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:11.293354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:11.293841Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:11.293889Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:11.298713Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:19:11.298877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:11.304323Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32079, node 1 2025-03-31T16:19:11.671503Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/0015f7/r3tmp/yandexyB0aBE.tmp 2025-03-31T16:19:11.671532Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/0015f7/r3tmp/yandexyB0aBE.tmp 2025-03-31T16:19:11.679178Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/0015f7/r3tmp/yandexyB0aBE.tmp 2025-03-31T16:19:11.679371Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:19:11.763072Z INFO: TTestServer started on Port 1546 GrpcPort 32079 TClient is connected to server localhost:1546 PQClient connected to localhost:32079 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:19:12.204625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-03-31T16:19:14.574907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008999068166770:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:14.575073Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:14.575132Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008999068166783:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:14.578941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-03-31T16:19:14.602443Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008999068166785:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-31T16:19:14.907176Z node 1 :TX_PROXY ERROR: Actor# [1:7488008999068166876:2687] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:19:14.936757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:19:14.937300Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488008998162882980:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:19:14.937762Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTZiMGExYzAtZjNjM2Y4NTctZWI2MmNjMTgtYTExNDA5MDc=, ActorId: [2:7488008998162882940:2308], ActorState: ExecuteState, TraceId: 01jqpfzznk77rkyk41xxhjpm9f, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:19:14.939325Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488008999068166896:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:19:14.939514Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzJkMzEzNTktODRiMGE0NjEtNzM2YWFkZDctMzZiYWI1Yzk=, ActorId: [1:7488008999068166753:2337], ActorState: ExecuteState, TraceId: 01jqpfzzfx1yx2p39v73fcr7ac, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:19:14.940949Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:19:14.941336Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:19:15.076815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:19:15.209095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:32079", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-03-31T16:19:15.452480Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jqpg007k6z2jzh9sst65459t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTE2M2RiNTgtNzVlZGFjNzQtMzljYWY2ZmQtM2UzMzJkMmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:19:15.471279Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008981888296781:2277];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:15.471361Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Subcribe to ClusterTracker from [1:7488009003363134592:2983] === CheckClustersList. Ok 2025-03-31T16:19:21.562754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 5 partitions CallPersQueueGRPC request to localhost:32079 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:32079 2025-03-31T16:19:21.660070Z node 1 :PERSQUEUE INFO: proxy answer MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 5 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 } } } CallPersQueueGRPC response: Status: 129 ProxyErrorCode: 53 SchemeStatus: 1 FlatTxId { TxId: 28147497671 ... NFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1743437977809 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T16:19:37.810119Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|6c91e920-d793ecd6-e9add18b-7a12a832_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-03-31T16:19:37.810351Z :INFO: [] MessageGroupId [src] SessionId [src|6c91e920-d793ecd6-e9add18b-7a12a832_0] Write session: close. Timeout = 0 ms 2025-03-31T16:19:37.810398Z :INFO: [] MessageGroupId [src] SessionId [src|6c91e920-d793ecd6-e9add18b-7a12a832_0] Write session will now close 2025-03-31T16:19:37.810451Z :DEBUG: [] MessageGroupId [src] SessionId [src|6c91e920-d793ecd6-e9add18b-7a12a832_0] Write session: aborting 2025-03-31T16:19:37.811334Z :INFO: [] MessageGroupId [src] SessionId [src|6c91e920-d793ecd6-e9add18b-7a12a832_0] Write session: gracefully shut down, all writes complete 2025-03-31T16:19:37.811363Z :DEBUG: [] MessageGroupId [src] SessionId [src|6c91e920-d793ecd6-e9add18b-7a12a832_0] Write session is aborting and will not restart 2025-03-31T16:19:37.811434Z :DEBUG: [] MessageGroupId [src] SessionId [src|6c91e920-d793ecd6-e9add18b-7a12a832_0] Write session: destroy 2025-03-31T16:19:37.811399Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|6c91e920-d793ecd6-e9add18b-7a12a832_0 grpc read done: success: 0 data: 2025-03-31T16:19:37.811419Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|6c91e920-d793ecd6-e9add18b-7a12a832_0 grpc read failed 2025-03-31T16:19:37.811437Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|6c91e920-d793ecd6-e9add18b-7a12a832_0 grpc closed 2025-03-31T16:19:37.811449Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|6c91e920-d793ecd6-e9add18b-7a12a832_0 is DEAD 2025-03-31T16:19:37.812211Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-31T16:19:37.812530Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7488009097502084345:2524] destroyed 2025-03-31T16:19:37.812548Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. ====TYdbPqTestRetryPolicy() ====ExpectBreakDown === Session was created, waiting for retries >>> Ready to answer: ok ====CreateRetryState ====CreateRetryState Initialized Test retry state: get retry delay 2025-03-31T16:19:37.872327Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-03-31T16:19:39.873350Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s 2025-03-31T16:19:40.986789Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:19:40.986812Z node 3 :IMPORT WARN: Table profiles were not loaded Test retry state: get retry delay === In the next federation discovery response dc2 will be available 2025-03-31T16:19:41.874374Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-03-31T16:19:43.875434Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-03-31T16:19:45.876322Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-03-31T16:19:47.877306Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-03-31T16:19:49.878364Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-03-31T16:19:51.879265Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-03-31T16:19:53.880308Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-03-31T16:19:55.881299Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-03-31T16:19:57.882285Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-03-31T16:19:59.883280Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-03-31T16:20:01.884371Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-03-31T16:20:03.885400Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-03-31T16:20:05.886350Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s 2025-03-31T16:20:05.957565Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] TPersQueueReadBalancer::HandleWakeup 2025-03-31T16:20:05.957665Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] Send TEvPersQueue::TEvStatus TabletId: 72075186224037892 Cookie: 1 2025-03-31T16:20:05.958219Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } 2025-03-31T16:20:05.958863Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] Send TEvPeriodicTopicStats PathId: 13 Generation: 1 StatsReportRound: 1 DataSize: 0 UsedReserveSize: 0 2025-03-31T16:20:05.959222Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] ProcessPendingStats. PendingUpdates size 1 Test retry state: get retry delay 2025-03-31T16:20:07.887278Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s === Waiting for repair >>> Ready to answer: ok === Closing the session 2025-03-31T16:20:09.889089Z :INFO: [/Root] [] [] Start federated write session to database 'dc2' (previous was ) FederationState: { Status: SUCCESS SelfLocation: "fancy_datacenter" DbInfos: [ { name: "dc1" path: "/Root" id: "account-dc1" endpoint: "localhost:15162" location: "dc1" status: AVAILABLE weight: 1000 } { name: "dc2" path: "/Root" id: "account-dc2" endpoint: "localhost:15162" location: "dc2" status: AVAILABLE weight: 500 } { name: "dc3" path: "/Root" id: "account-dc3" endpoint: "localhost:15162" location: "dc3" status: AVAILABLE weight: 500 } ] ControlPlaneEndpoint: cp.logbroker-federation:2135 } 2025-03-31T16:20:09.898256Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: try to update token 2025-03-31T16:20:09.898870Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Start write session. Will connect to nodeId: 0 2025-03-31T16:20:09.903099Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: write to message_group: src_id 2025-03-31T16:20:09.903239Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: send init request: init_request { path: "test-topic" message_group_id: "src_id" } 2025-03-31T16:20:09.903518Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-03-31T16:20:09.903634Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: OnWriteDone gRpcStatusCode: 0 2025-03-31T16:20:09.903592Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 2 2025-03-31T16:20:09.904025Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { path: "test-topic" message_group_id: "src_id" } 2025-03-31T16:20:09.904243Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 2 path: "test-topic" message_group_id: "src_id" from ipv6:[::1]:55194 2025-03-31T16:20:09.904278Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=2 sessionId= userAgent="topic server" ip=ipv6:[::1]:55194 proto=topic topic=test-topic durationSec=0 2025-03-31T16:20:09.904292Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-03-31T16:20:09.906043Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: describe result for acl check 2025-03-31T16:20:09.906190Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-03-31T16:20:09.906210Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-03-31T16:20:09.906221Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-03-31T16:20:09.906243Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7488009234941039770:2892] (SourceId=src_id, PreferedPartition=(NULL)) StartKqpSession 2025-03-31T16:20:09.906359Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: close. Timeout 0.000000s 2025-03-31T16:20:09.906416Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session will now close 2025-03-31T16:20:09.906491Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: aborting 2025-03-31T16:20:09.907149Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: gracefully shut down, all writes complete 2025-03-31T16:20:09.907753Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2025-03-31T16:20:09.907844Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1743438009907 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T16:20:09.907928Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session is aborting and will not restart 2025-03-31T16:20:09.908028Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: destroy 2025-03-31T16:20:09.909148Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: grpc closed 2025-03-31T16:20:09.909183Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: is DEAD 2025-03-31T16:20:09.912420Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7488009234941039770:2892] (SourceId=src_id, PreferedPartition=(NULL)) Select from the table 2025-03-31T16:20:10.283243Z node 3 :KQP_COMPUTE WARN: TxId: 281474976715748, task: 1, CA Id [3:7488009239236007115:2898]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2025-03-31T16:20:10.315665Z node 3 :KQP_COMPUTE WARN: TxId: 281474976715748, task: 1, CA Id [3:7488009239236007115:2898]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-31T16:20:10.364286Z node 3 :KQP_COMPUTE WARN: TxId: 281474976715748, task: 1, CA Id [3:7488009239236007115:2898]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 >> KqpQuery::RowsLimit >> TKeyValueTest::TestObtainLockNewApi [GOOD] >> TKeyValueTest::TestRenameToLongKey >> KqpQuery::RandomUuid [GOOD] >> KqpLimits::QueryReplySize [GOOD] >> KqpLimits::ReadsetCountLimit >> KqpQuery::Now >> KqpLimits::TooBigQuery+useSink >> KqpExplain::LimitOffset ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::SimpleHandlers [GOOD] Test command err: 2025-03-31T16:19:10.104463Z :WaitEventBlocksBeforeDiscovery INFO: Random seed for debugging is 1743437950104435 2025-03-31T16:19:10.440464Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008980360597109:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:10.440541Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:19:10.492664Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008981798605181:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:10.493337Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:19:10.706099Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:19:10.712117Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0016a3/r3tmp/tmpBNS8en/pdisk_1.dat 2025-03-31T16:19:11.135646Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:11.186348Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:11.186902Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:11.187584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:11.187664Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:11.194126Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:19:11.194283Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:11.208121Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27520, node 1 2025-03-31T16:19:11.583708Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/0016a3/r3tmp/yandexJv5ViW.tmp 2025-03-31T16:19:11.583731Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/0016a3/r3tmp/yandexJv5ViW.tmp 2025-03-31T16:19:11.583925Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/0016a3/r3tmp/yandexJv5ViW.tmp 2025-03-31T16:19:11.584080Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:19:11.767513Z INFO: TTestServer started on Port 4056 GrpcPort 27520 TClient is connected to server localhost:4056 PQClient connected to localhost:27520 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:19:12.134509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:19:12.190488Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:19:14.216813Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008997540467299:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:14.216952Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008997540467314:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:14.216754Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008998978474696:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:14.217079Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:14.216962Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008998978474685:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:14.217389Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:14.223772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-03-31T16:19:14.231179Z node 2 :TX_PROXY ERROR: Actor# [2:7488008998978474700:2124] txid# 281474976715657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T16:19:14.242409Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008997540467316:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-31T16:19:14.242643Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008998978474699:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-31T16:19:14.301457Z node 2 :TX_PROXY ERROR: Actor# [2:7488008998978474727:2130] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:19:14.324248Z node 1 :TX_PROXY ERROR: Actor# [1:7488008997540467423:2694] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:19:14.690383Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488008997540467433:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:19:14.691954Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488008998978474734:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:19:14.692196Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NWUwOGIxN2ItNjc2MDc4YWYtY2YyZmVkOGYtNmE4NzBlZGQ=, ActorId: [2:7488008998978474683:2308], ActorState: ExecuteState, TraceId: 01jqpfzz4j6qzs7wta6a8en2pa, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:19:14.693966Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:19:14.741509Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmQ5MDg2ZjYtMmI3ZjlhYzktNTkwNWY3YWYtNDJkMjA1MGQ=, ActorId: [1:7488008997540467275:2335], ActorState: ExecuteState, TraceId: 01jqpfzz4j0mbsh7skfefmtad3, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:19:14.743500Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:19:14.749269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:19:14.877511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:19:15.011685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:27520", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-03-31T16:19:15.413176Z node 1 :KQP_EXECU ... session cookie 3 consumer shared/user session shared/user_3_3_2919205033808863471_v1 grpc read failed 2025-03-31T16:20:10.547927Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_2919205033808863471_v1 closed 2025-03-31T16:20:10.548127Z :INFO: [/Root] [/Root] [4c7db9d4-49a2bf20-108c47b-d3b9b073] Closing read session. Close timeout: 18446744073709.551615s 2025-03-31T16:20:10.548162Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-03-31T16:20:10.548217Z :INFO: [/Root] [/Root] [4c7db9d4-49a2bf20-108c47b-d3b9b073] Counters: { Errors: 0 CurrentSessionLifetimeMs: 533 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T16:20:10.548341Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_2919205033808863471_v1 is DEAD 2025-03-31T16:20:10.548501Z :INFO: [/Root] MessageGroupId [src_id] SessionId [src_id|3dd0f0d4-3e1968c6-71c495ba-8f8429c2_0] Write session: close. Timeout = 0 ms 2025-03-31T16:20:10.548543Z :INFO: [/Root] MessageGroupId [src_id] SessionId [src_id|3dd0f0d4-3e1968c6-71c495ba-8f8429c2_0] Write session will now close 2025-03-31T16:20:10.548589Z :DEBUG: [/Root] MessageGroupId [src_id] SessionId [src_id|3dd0f0d4-3e1968c6-71c495ba-8f8429c2_0] Write session: aborting 2025-03-31T16:20:10.548711Z :INFO: [/Root] MessageGroupId [src_id] SessionId [src_id|3dd0f0d4-3e1968c6-71c495ba-8f8429c2_0] Write session: gracefully shut down, all writes complete 2025-03-31T16:20:10.548744Z :DEBUG: [/Root] MessageGroupId [src_id] SessionId [src_id|3dd0f0d4-3e1968c6-71c495ba-8f8429c2_0] Write session: destroy 2025-03-31T16:20:10.548843Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7488009236686286216:2536] disconnected; active server actors: 1 2025-03-31T16:20:10.548875Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7488009236686286216:2536] client user disconnected session shared/user_3_3_2919205033808863471_v1 2025-03-31T16:20:10.548927Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user rebalancing was scheduled 2025-03-31T16:20:10.548980Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user balancing. Sessions=2, Families=1, UnradableFamilies=0 [], RequireBalancing=0 [] 2025-03-31T16:20:10.549002Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_1893264525934015682_v1 grpc read done: success# 0, data# { } 2025-03-31T16:20:10.549012Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user start rebalancing. familyCount=1, sessionCount=2, desiredFamilyCount=0, allowPlusOne=1 2025-03-31T16:20:10.549018Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_1893264525934015682_v1 grpc read failed 2025-03-31T16:20:10.549032Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_1893264525934015682_v1 closed 2025-03-31T16:20:10.549039Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user balancing duration: 0.000034s 2025-03-31T16:20:10.549070Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_1893264525934015682_v1 is DEAD 2025-03-31T16:20:10.549327Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_3_1_1893264525934015682_v1 2025-03-31T16:20:10.549373Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7488009236686286217:2534] disconnected; active server actors: 1 2025-03-31T16:20:10.549398Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7488009236686286234:2548] destroyed 2025-03-31T16:20:10.549397Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7488009236686286217:2534] client user disconnected session shared/user_3_1_1893264525934015682_v1 2025-03-31T16:20:10.549446Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_1893264525934015682_v1 2025-03-31T16:20:10.549461Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user rebalancing was scheduled 2025-03-31T16:20:10.549507Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user balancing. Sessions=1, Families=1, UnradableFamilies=1 [1 (0), ], RequireBalancing=0 [] 2025-03-31T16:20:10.549566Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user balancing family=1 (Status=Free, Partitions=[0]) for ReadingSession "shared/user_3_2_12482107212017852688_v1" (Sender=[3:7488009236686286206:2535], Pipe=[3:7488009236686286219:2535], Partitions=[], ActiveFamilyCount=0) 2025-03-31T16:20:10.549614Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_3_2_12482107212017852688_v1 grpc read done: success# 0, data# { } 2025-03-31T16:20:10.549625Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] consumer user family 1 status Active partitions [0] session "shared/user_3_2_12482107212017852688_v1" sender [3:7488009236686286206:2535] lock partition 0 for ReadingSession "shared/user_3_2_12482107212017852688_v1" (Sender=[3:7488009236686286206:2535], Pipe=[3:7488009236686286219:2535], Partitions=[], ActiveFamilyCount=1) generation 1 step 3 2025-03-31T16:20:10.549631Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_12482107212017852688_v1 grpc read failed 2025-03-31T16:20:10.549648Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_12482107212017852688_v1 closed 2025-03-31T16:20:10.549660Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2025-03-31T16:20:10.549679Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user balancing duration: 0.000150s 2025-03-31T16:20:10.549764Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_12482107212017852688_v1 is DEAD 2025-03-31T16:20:10.549787Z :INFO: [/Root] [/Root] [30f26d36-7b9d9421-78047957-17495c8d] Closing read session. Close timeout: 0.000000s 2025-03-31T16:20:10.549846Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-03-31T16:20:10.549894Z :INFO: [/Root] [/Root] [30f26d36-7b9d9421-78047957-17495c8d] Counters: { Errors: 0 CurrentSessionLifetimeMs: 539 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T16:20:10.549919Z :INFO: [/Root] [/Root] [b8866e5f-21eaa947-ecd261b1-67a77670] Closing read session. Close timeout: 0.000000s 2025-03-31T16:20:10.549939Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:299:0 2025-03-31T16:20:10.549975Z :INFO: [/Root] [/Root] [b8866e5f-21eaa947-ecd261b1-67a77670] Counters: { Errors: 0 CurrentSessionLifetimeMs: 536 BytesRead: 4936800 MessagesRead: 300 BytesReadCompressed: 4936800 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T16:20:10.549991Z :INFO: [/Root] [/Root] [4c7db9d4-49a2bf20-108c47b-d3b9b073] Closing read session. Close timeout: 0.000000s 2025-03-31T16:20:10.550009Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-03-31T16:20:10.550026Z :INFO: [/Root] [/Root] [4c7db9d4-49a2bf20-108c47b-d3b9b073] Counters: { Errors: 0 CurrentSessionLifetimeMs: 535 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T16:20:10.550057Z :INFO: [/Root] [/Root] [4c7db9d4-49a2bf20-108c47b-d3b9b073] Closing read session. Close timeout: 0.000000s 2025-03-31T16:20:10.550049Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7488009236686286219:2535] disconnected; active server actors: 1 2025-03-31T16:20:10.550070Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7488009236686286219:2535] client user disconnected session shared/user_3_2_12482107212017852688_v1 2025-03-31T16:20:10.550086Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-03-31T16:20:10.550136Z :INFO: [/Root] [/Root] [4c7db9d4-49a2bf20-108c47b-d3b9b073] Counters: { Errors: 0 CurrentSessionLifetimeMs: 535 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T16:20:10.550203Z :NOTICE: [/Root] [/Root] [4c7db9d4-49a2bf20-108c47b-d3b9b073] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-31T16:20:10.550346Z :INFO: [/Root] [/Root] [b8866e5f-21eaa947-ecd261b1-67a77670] Closing read session. Close timeout: 0.000000s 2025-03-31T16:20:10.550363Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:299:0 2025-03-31T16:20:10.550380Z :INFO: [/Root] [/Root] [b8866e5f-21eaa947-ecd261b1-67a77670] Counters: { Errors: 0 CurrentSessionLifetimeMs: 536 BytesRead: 4936800 MessagesRead: 300 BytesReadCompressed: 4936800 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T16:20:10.550406Z :NOTICE: [/Root] [/Root] [b8866e5f-21eaa947-ecd261b1-67a77670] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-31T16:20:10.551049Z :INFO: [/Root] [/Root] [30f26d36-7b9d9421-78047957-17495c8d] Closing read session. Close timeout: 0.000000s 2025-03-31T16:20:10.551069Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-03-31T16:20:10.551116Z :INFO: [/Root] [/Root] [30f26d36-7b9d9421-78047957-17495c8d] Counters: { Errors: 0 CurrentSessionLifetimeMs: 540 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T16:20:10.551158Z :NOTICE: [/Root] [/Root] [30f26d36-7b9d9421-78047957-17495c8d] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-31T16:20:10.551779Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: src_id|3dd0f0d4-3e1968c6-71c495ba-8f8429c2_0 grpc read done: success: 0 data: 2025-03-31T16:20:10.551800Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|3dd0f0d4-3e1968c6-71c495ba-8f8429c2_0 grpc read failed 2025-03-31T16:20:10.551825Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|3dd0f0d4-3e1968c6-71c495ba-8f8429c2_0 grpc closed 2025-03-31T16:20:10.551840Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|3dd0f0d4-3e1968c6-71c495ba-8f8429c2_0 is DEAD 2025-03-31T16:20:10.556843Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-31T16:20:10.557177Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7488009236686286261:2537] destroyed 2025-03-31T16:20:10.557242Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. >> KqpParams::BadParameterType [GOOD] >> KqpParams::CheckCacheByAst >> KqpLimits::ComputeActorMemoryAllocationFailure+useSink >> KqpExplain::FewEffects-UseSink [GOOD] >> KqpStats::MultiTxStatsFullExpYql ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::RandomUuid [GOOD] Test command err: Trying to start YDB, gRPC: 23221, MsgBus: 1701 2025-03-31T16:19:59.953232Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009191473964755:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:59.953360Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a84/r3tmp/tmpLwhfIo/pdisk_1.dat 2025-03-31T16:20:00.201158Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23221, node 1 2025-03-31T16:20:00.257913Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:00.257930Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:00.257935Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:00.258037Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:20:00.295744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:00.295879Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:00.297541Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1701 TClient is connected to server localhost:1701 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:00.656106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:00.680418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:00.764488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:00.875820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:00.931510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:02.159074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009204358868438:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:02.159211Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:02.420748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:02.448141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:02.474319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:02.499653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:02.526185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:02.566617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:02.600571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009204358868946:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:02.600633Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009204358868951:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:02.600635Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:02.603579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:02.610919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009204358868953:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:20:02.678963Z node 1 :TX_PROXY ERROR: Actor# [1:7488009204358869007:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:03.478458Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWNhYjljN2MtMWE2MDE4NTYtY2YyZTdiMDQtNmQzOTM0NA==, ActorId: [1:7488009208653836593:2495], ActorState: ExecuteState, TraceId: 01jqpg1f74cn12ypdjqj0tkst4, Create QueryResponse for error on request, msg:
: Error: Request timeout 50ms exceeded
: Error: Cancelling after 50ms during compilation Trying to start YDB, gRPC: 1120, MsgBus: 27831 2025-03-31T16:20:04.225543Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488009209808540734:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:04.225612Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a84/r3tmp/tmpiYimMV/pdisk_1.dat 2025-03-31T16:20:04.293725Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1120, node 2 2025-03-31T16:20:04.339558Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:04.339604Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:04.339611Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:04.339716Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:20:04.352365Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:04.352421Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:04.353744Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27831 TClient is connected to server localhost:27831 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:04.761915Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:04.772934Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:04.841793Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:04.950598Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:05.022326Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:06.556247Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009218398477110:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:06.556358Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:06.572466Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:06.596113Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:06.625619Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:06.654991Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:06.684547Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:06.714991Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:06.749989Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009218398477618:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:06.750072Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:06.750087Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009218398477623:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:06.753366Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:06.762659Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488009218398477625:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:20:06.858508Z node 2 :TX_PROXY ERROR: Actor# [2:7488009218398477680:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 8931, MsgBus: 9693 2025-03-31T16:20:08.099952Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488009227877252322:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:08.100048Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a84/r3tmp/tmpT5ka3M/pdisk_1.dat 2025-03-31T16:20:08.212952Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:20:08.236312Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:08.236380Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:08.237752Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8931, node 3 2025-03-31T16:20:08.279061Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:08.279105Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:08.279112Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:08.279218Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9693 TClient is connected to server localhost:9693 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:08.635283Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:08.653295Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:08.721719Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:08.856508Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:08.911115Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:10.785061Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009236467188709:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:10.785145Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:10.834838Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:10.865996Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:10.894785Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:10.924031Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:10.953442Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:10.990429Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:11.031695Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009240762156513:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:11.031810Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:11.031844Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009240762156518:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:11.035633Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:11.046474Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488009240762156520:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:20:11.106750Z node 3 :TX_PROXY ERROR: Actor# [3:7488009240762156573:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> Cdc::InitialScanAndLimits [GOOD] >> Cdc::InitialScanComplete >> YdbOlapStore::LogExistingRequest [GOOD] >> YdbOlapStore::LogCountByResource >> KqpStats::DataQueryWithEffects-UseSink [GOOD] >> KqpStats::DataQueryMulti ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::FewEffects-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 2210, MsgBus: 19787 2025-03-31T16:19:54.521274Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009170658906647:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:54.521409Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a9b/r3tmp/tmpUgc3on/pdisk_1.dat 2025-03-31T16:19:54.778764Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2210, node 1 2025-03-31T16:19:54.785174Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:19:54.811342Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:19:54.811372Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:19:54.811395Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:19:54.811571Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:19:54.864456Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:54.864569Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:54.866132Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19787 TClient is connected to server localhost:19787 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:19:55.182265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:55.199106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:55.300107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:55.419228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:55.493100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:56.642875Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009179248843042:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:56.642996Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:56.850580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:19:56.872968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:19:56.897836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:19:56.924462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:19:56.950029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:19:56.977228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:19:57.065436Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009183543810849:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:57.065486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:57.065544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009183543810854:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:57.068740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:19:57.078827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009183543810856:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:19:57.179552Z node 1 :TX_PROXY ERROR: Actor# [1:7488009183543810911:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":8,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Plans":[{"Tables":["EightShard"],"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/KeyValue","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"Broadcast","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate","Phase":"Intermediate"},{"Inputs":[{"InternalOperatorId":2},{"ExternalPlanNodeId":4}],"E-Rows":"No estimate","Condition":"t.Data = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"},{"Inputs":[{"InternalOperatorId":3}],"E-Rows":"No estimate","Predicate":"Exist(item.Data)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data"],"E-Cost":"No estimate"}],"Node Type":"Aggregate-InnerJoin (MapJoin)-Filter-TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate","Phase":"Final"},{"Inputs":[{"InternalOperatorId":2}],"Name":"Limit","Limit":"1"},{"Inputs":[{"ExternalPlanNodeId":6}],"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate-Limit-Aggregate"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]},{"name":"\/Root\/KeyValue","reads":[{"columns":["Key"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":11,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Data)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"},{"PlanNodeId":15,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/KeyValue","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Condition":"t.Data = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"InnerJoin (MapJoin)"}],"Operators":[{"Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":2},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 9683, MsgBus: 22395 2025-03-31T16:19:59.021820Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488009190428273521:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:59.021919Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detec ... from file: (empty maybe) 2025-03-31T16:20:07.908459Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:07.908589Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18256 TClient is connected to server localhost:18256 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:08.332453Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:08.349347Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:08.418544Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:08.553160Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:08.626401Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:10.771300Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488009237797121613:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:10.771417Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:10.830678Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:10.867950Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:10.901220Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:10.934202Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:10.968914Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:11.004302Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:11.080830Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488009242092089425:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:11.080952Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:11.080966Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488009242092089430:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:11.084701Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:11.093971Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488009242092089432:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:20:11.178635Z node 4 :TX_PROXY ERROR: Actor# [4:7488009242092089487:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":26,"Plans":[{"Tables":["EightShard"],"PlanNodeId":25,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/EightShard","Name":"Delete","Table":"EightShard"},{"Inputs":[],"Iterator":"precompute_5_0","Name":"Iterator"}],"Node Type":"Delete-ConstantExpr","CTE Name":"precompute_5_0"}],"Node Type":"Effect"},{"PlanNodeId":23,"Plans":[{"PlanNodeId":22,"Plans":[{"PlanNodeId":21,"Plans":[{"PlanNodeId":20,"Plans":[{"Tables":["EightShard"],"PlanNodeId":19,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["Key (350, +∞)"],"Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Node Type":"Collect"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Subplan Name":"CTE precompute_5_0","Node Type":"Precompute_5","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"},{"PlanNodeId":17,"Plans":[{"Tables":["EightShard"],"PlanNodeId":16,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/EightShard","Name":"Upsert","Table":"EightShard"},{"Inputs":[],"Iterator":"precompute_3_0","Name":"Iterator"}],"Node Type":"Upsert-ConstantExpr","CTE Name":"precompute_3_0"}],"Node Type":"Effect"},{"PlanNodeId":14,"Plans":[{"PlanNodeId":13,"Plans":[{"PlanNodeId":12,"Plans":[{"PlanNodeId":11,"Plans":[{"Tables":["EightShard"],"PlanNodeId":10,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key [100, 100]","Key [200, 200]","Key [300, 300]"],"Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"No estimate","ReadRangesPointPrefixLen":"1","ReadRangesKeys":["Key"],"Table":"EightShard","ReadColumns":["Data","Key"],"E-Cost":"No estimate","ReadRangesExpectedSize":"3"}],"Node Type":"TableRangeScan"}],"Node Type":"Stage"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Subplan Name":"CTE precompute_3_0","Node Type":"Precompute_3","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"},{"PlanNodeId":8,"Plans":[{"Tables":["EightShard"],"PlanNodeId":7,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/EightShard","Name":"Upsert","Table":"EightShard"},{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"Upsert-ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"Effect"},{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"No estimate","ReadRangesPointPrefixLen":"0","Table":"EightShard","ReadColumns":["Data","Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"Stage"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Subplan Name":"CTE precompute_0_0","Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"ResourcePoolId":"default"}},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data","Key"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"},{"columns":["Key"],"scan_by":["Key (350, +∞)"],"type":"Scan"},{"columns":["Data","Key"],"scan_by":["Key [100, 100]","Key [200, 200]","Key [300, 300]"],"type":"Scan"}],"writes":[{"columns":["Data","Key"],"type":"MultiUpsert"},{"columns":["Data","Key"],"type":"MultiUpsert"},{"type":"MultiErase"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"Path":"\/Root\/EightShard","Name":"Delete","Table":"EightShard"}],"Plans":[{"PlanNodeId":8,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["Key (350, +∞)"],"Name":"TableRangeScan","Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Node Type":"Delete"}],"Node Type":"Effect"},{"PlanNodeId":9,"Plans":[{"PlanNodeId":10,"Operators":[{"Path":"\/Root\/EightShard","Name":"Upsert","Table":"EightShard"}],"Plans":[{"PlanNodeId":16,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key [100, 100]","Key [200, 200]","Key [300, 300]"],"Name":"TableRangeScan","Path":"\/Root\/EightShard","ReadRangesPointPrefixLen":"1","E-Rows":"No estimate","ReadRangesKeys":["Key"],"Table":"EightShard","ReadColumns":["Data","Key"],"E-Cost":"No estimate","ReadRangesExpectedSize":"3"}],"Node Type":"TableRangeScan"}],"Node Type":"Upsert"}],"Node Type":"Effect"},{"PlanNodeId":17,"Plans":[{"PlanNodeId":18,"Operators":[{"Path":"\/Root\/EightShard","Name":"Upsert","Table":"EightShard"}],"Plans":[{"PlanNodeId":24,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/EightShard","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"Upsert"}],"Node Type":"Effect"}],"Node Type":"Query","PlanNodeType":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0}}} 2025-03-31T16:20:12.763479Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7488009224912217956:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:12.763555Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpLimits::StreamWrite+Allowed >> YdbQueryService::TestAttachTwice [GOOD] >> KqpQuery::DdlInDataQuery [GOOD] >> KqpQuery::DeleteWhereInSubquery >> TKeyValueTest::TestWriteReadWhileWriteWorks [GOOD] >> KqpStats::JoinNoStatsYql [GOOD] >> KqpStats::JoinStatsBasicYql+StreamLookupJoin >> BsControllerConfig::ExtendBoxAndStoragePool [GOOD] >> KqpQuery::ExecuteDataQueryCollectMeta [GOOD] >> KqpQuery::GenericQueryNoRowsLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadWhileWriteWorks [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:78:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:52:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:52:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:86:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:89:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:88:2117] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:92:2057] recipient: [11:88:2117] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:91:2118] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:145:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... TabletID 72057594037927937 is [13:56:2097] sender: [13:89:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:91:2057] recipient: [13:90:2117] Leader for TabletID 72057594037927937 is [13:92:2118] sender: [13:93:2057] recipient: [13:90:2117] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:92:2118] Leader for TabletID 72057594037927937 is [13:92:2118] sender: [13:146:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:52:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:52:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:50:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:50:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:76:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:79:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:80:2057] recipient: [16:78:2110] Leader for TabletID 72057594037927937 is [16:81:2111] sender: [16:82:2057] recipient: [16:78:2110] !Reboot 72057594037927937 (actor [16:56:2097]) rebooted! !Reboot 72057594037927937 (actor [16:56:2097]) tablet resolver refreshed! new actor is[16:81:2111] Leader for TabletID 72057594037927937 is [16:81:2111] sender: [16:135:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:52:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:57:2057] recipient: [17:52:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:74:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:76:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:79:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:80:2057] recipient: [17:78:2110] Leader for TabletID 72057594037927937 is [17:81:2111] sender: [17:82:2057] recipient: [17:78:2110] !Reboot 72057594037927937 (actor [17:56:2097]) rebooted! !Reboot 72057594037927937 (actor [17:56:2097]) tablet resolver refreshed! new actor is[17:81:2111] Leader for TabletID 72057594037927937 is [17:81:2111] sender: [17:135:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:50:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:57:2057] recipient: [18:50:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:74:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:77:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:80:2057] recipient: [18:79:2110] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:81:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:82:2111] sender: [18:83:2057] recipient: [18:79:2110] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:82:2111] Leader for TabletID 72057594037927937 is [18:82:2111] sender: [18:136:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:80:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:83:2057] recipient: [19:82:2113] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:84:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:85:2114] sender: [19:86:2057] recipient: [19:82:2113] !Reboot 72057594037927937 (actor [19:56:2097]) rebooted! !Reboot 72057594037927937 (actor [19:56:2097]) tablet resolver refreshed! new actor is[19:85:2114] Leader for TabletID 72057594037927937 is [19:85:2114] sender: [19:139:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:51:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:57:2057] recipient: [20:51:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:74:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:80:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:83:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:84:2057] recipient: [20:82:2113] Leader for TabletID 72057594037927937 is [20:85:2114] sender: [20:86:2057] recipient: [20:82:2113] !Reboot 72057594037927937 (actor [20:56:2097]) rebooted! !Reboot 72057594037927937 (actor [20:56:2097]) tablet resolver refreshed! new actor is[20:85:2114] Leader for TabletID 72057594037927937 is [20:85:2114] sender: [20:139:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:50:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:57:2057] recipient: [21:50:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:74:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:81:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:84:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:85:2057] recipient: [21:83:2113] Leader for TabletID 72057594037927937 is [21:86:2114] sender: [21:87:2057] recipient: [21:83:2113] !Reboot 72057594037927937 (actor [21:56:2097]) rebooted! !Reboot 72057594037927937 (actor [21:56:2097]) tablet resolver refreshed! new actor is[21:86:2114] Leader for TabletID 72057594037927937 is [21:86:2114] sender: [21:140:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:57:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:74:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:84:2057] recipient: [22:36:2083] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:87:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:88:2057] recipient: [22:86:2116] Leader for TabletID 72057594037927937 is [22:89:2117] sender: [22:90:2057] recipient: [22:86:2116] !Reboot 72057594037927937 (actor [22:56:2097]) rebooted! !Reboot 72057594037927937 (actor [22:56:2097]) tablet resolver refreshed! new actor is[22:89:2117] Leader for TabletID 72057594037927937 is [22:89:2117] sender: [22:143:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:51:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:57:2057] recipient: [23:51:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:74:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:84:2057] recipient: [23:36:2083] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:87:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:88:2057] recipient: [23:86:2116] Leader for TabletID 72057594037927937 is [23:89:2117] sender: [23:90:2057] recipient: [23:86:2116] !Reboot 72057594037927937 (actor [23:56:2097]) rebooted! !Reboot 72057594037927937 (actor [23:56:2097]) tablet resolver refreshed! new actor is[23:89:2117] Leader for TabletID 72057594037927937 is [23:89:2117] sender: [23:143:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:57:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:74:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:85:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:88:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:89:2057] recipient: [24:87:2116] Leader for TabletID 72057594037927937 is [24:90:2117] sender: [24:91:2057] recipient: [24:87:2116] !Reboot 72057594037927937 (actor [24:56:2097]) rebooted! !Reboot 72057594037927937 (actor [24:56:2097]) tablet resolver refreshed! new actor is[24:90:2117] Leader for TabletID 72057594037927937 is [24:90:2117] sender: [24:144:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:57:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:74:2057] recipient: [25:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::ExtendBoxAndStoragePool [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3014:2106] recipient: [1:2914:2116] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3014:2106] recipient: [1:2914:2116] Leader for TabletID 72057594037932033 is [1:3016:2118] sender: [1:3017:2106] recipient: [1:2914:2116] 2025-03-31T16:19:29.813178Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-03-31T16:19:29.816610Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-03-31T16:19:29.816877Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-03-31T16:19:29.818504Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:19:29.818849Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-03-31T16:19:29.819317Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-03-31T16:19:29.819365Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-03-31T16:19:29.819617Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-03-31T16:19:29.827353Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-03-31T16:19:29.827496Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-03-31T16:19:29.827676Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-03-31T16:19:29.827793Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-31T16:19:29.827924Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-31T16:19:29.828051Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:3016:2118] sender: [1:3042:2106] recipient: [1:60:2107] 2025-03-31T16:19:29.840184Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-03-31T16:19:29.840322Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-31T16:19:29.851137Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-31T16:19:29.851273Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-31T16:19:29.851374Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-31T16:19:29.851465Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-31T16:19:29.851593Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-31T16:19:29.851644Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-31T16:19:29.851687Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-31T16:19:29.851766Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-31T16:19:29.862602Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-31T16:19:29.862746Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-31T16:19:29.873481Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-31T16:19:29.873613Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-03-31T16:19:29.874738Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-03-31T16:19:29.874786Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-03-31T16:19:29.874962Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-03-31T16:19:29.875011Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-03-31T16:19:29.885498Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 60 PDiskFilter { Property { Type: ROT } } } } Command { QueryBaseConfig { } } } 2025-03-31T16:19:29.886110Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1000 Path# /dev/disk1 2025-03-31T16:19:29.886159Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1001 Path# /dev/disk2 2025-03-31T16:19:29.886175Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1002 Path# /dev/disk3 2025-03-31T16:19:29.886189Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1000 Path# /dev/disk1 2025-03-31T16:19:29.886202Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1001 Path# /dev/disk2 2025-03-31T16:19:29.886228Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1002 Path# /dev/disk3 2025-03-31T16:19:29.886242Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1000 Path# /dev/disk1 2025-03-31T16:19:29.886274Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1001 Path# /dev/disk2 2025-03-31T16:19:29.886289Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1002 Path# /dev/disk3 2025-03-31T16:19:29.886304Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1000 Path# /dev/disk1 2025-03-31T16:19:29.886317Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1001 Path# /dev/disk2 2025-03-31T16:19:29.886330Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1002 Path# /dev/disk3 2025-03-31T16:19:29.886345Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1000 Path# /dev/disk1 2025-03-31T16:19:29.886358Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2025-03-31T16:19:29.886378Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1002 Path# /dev/disk3 2025-03-31T16:19:29.886404Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1000 Path# /dev/disk1 2025-03-31T16:19:29.886421Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1001 Path# /dev/disk2 2025-03-31T16:19:29.886435Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1002 Path# /dev/disk3 2025-03-31T16:19:29.886450Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1000 Path# /dev/disk1 2025-03-31T16:19:29.886473Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1001 Path# /dev/disk2 2025-03-31T16:19:29.886509Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1002 Path# /dev/disk3 2025-03-31T16:19:29.886556Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 8:1000 Path# /dev/disk1 2025-03-31T16:19:29.886581Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 8:1001 Path# /dev/disk2 2025-03-31T16:19:29.886617Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 8:1002 Path# /dev/disk3 2025-03-31T16:19:29.886646Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 9:1000 Path# /dev/disk1 2025-03-31T16:19:29.886663Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 9:1001 Path# /dev/disk2 2025-03-31T16:19:29.886677Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 9:1002 Path# /dev/disk3 2025-03-31T16:19:29.886690Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 10:1000 Path# /dev/disk1 2025-03-31T16:19:29.886704Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 10:1001 Path# /dev/disk2 2025-03-31T16:19:29.886716Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 10:1002 Path# /dev/disk3 2025-03-31T16:19:29.886734Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 11:1000 Path# /dev/disk1 2025-03-31T16:19:29.886747Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 11:1001 Path# /dev/disk2 2025-03-31T16:19:29.886760Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Cr ... ER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 204:1000 Path# /dev/disk1 2025-03-31T16:20:02.795346Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 204:1001 Path# /dev/disk2 2025-03-31T16:20:02.795363Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 204:1002 Path# /dev/disk3 2025-03-31T16:20:02.795379Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 205:1000 Path# /dev/disk1 2025-03-31T16:20:02.795400Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 205:1001 Path# /dev/disk2 2025-03-31T16:20:02.795423Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 205:1002 Path# /dev/disk3 2025-03-31T16:20:02.795439Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 206:1000 Path# /dev/disk1 2025-03-31T16:20:02.795455Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 206:1001 Path# /dev/disk2 2025-03-31T16:20:02.795473Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 206:1002 Path# /dev/disk3 2025-03-31T16:20:02.795488Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 207:1000 Path# /dev/disk1 2025-03-31T16:20:02.795506Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 207:1001 Path# /dev/disk2 2025-03-31T16:20:02.795522Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 207:1002 Path# /dev/disk3 2025-03-31T16:20:02.795537Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 208:1000 Path# /dev/disk1 2025-03-31T16:20:02.795552Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 208:1001 Path# /dev/disk2 2025-03-31T16:20:02.795566Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 208:1002 Path# /dev/disk3 2025-03-31T16:20:02.795582Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 209:1000 Path# /dev/disk1 2025-03-31T16:20:02.795628Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 209:1001 Path# /dev/disk2 2025-03-31T16:20:02.795650Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 209:1002 Path# /dev/disk3 2025-03-31T16:20:02.795667Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 210:1000 Path# /dev/disk1 2025-03-31T16:20:02.795683Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 210:1001 Path# /dev/disk2 2025-03-31T16:20:02.795698Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 210:1002 Path# /dev/disk3 2025-03-31T16:20:03.077070Z node 161 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.286190s 2025-03-31T16:20:03.077268Z node 161 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.286407s 2025-03-31T16:20:03.113925Z node 161 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 4 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12051 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12052 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12053 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12054 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12055 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12056 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12057 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12058 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12059 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12060 } HostConfigId: 4 } ItemConfigGeneration: 1 } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 180 PDiskFilter { Property { Type: ROT } } ItemConfigGeneration: 1 } } Command { QueryBaseConfig { } } } 2025-03-31T16:20:03.116114Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 211:1000 Path# /dev/disk1 2025-03-31T16:20:03.116195Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 211:1001 Path# /dev/disk2 2025-03-31T16:20:03.116227Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 211:1002 Path# /dev/disk3 2025-03-31T16:20:03.116255Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 212:1000 Path# /dev/disk1 2025-03-31T16:20:03.116285Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 212:1001 Path# /dev/disk2 2025-03-31T16:20:03.116311Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 212:1002 Path# /dev/disk3 2025-03-31T16:20:03.116341Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 213:1000 Path# /dev/disk1 2025-03-31T16:20:03.116367Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 213:1001 Path# /dev/disk2 2025-03-31T16:20:03.116392Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 213:1002 Path# /dev/disk3 2025-03-31T16:20:03.116419Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 214:1000 Path# /dev/disk1 2025-03-31T16:20:03.116444Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 214:1001 Path# /dev/disk2 2025-03-31T16:20:03.116468Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 214:1002 Path# /dev/disk3 2025-03-31T16:20:03.116492Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 215:1000 Path# /dev/disk1 2025-03-31T16:20:03.116516Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 215:1001 Path# /dev/disk2 2025-03-31T16:20:03.116539Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 215:1002 Path# /dev/disk3 2025-03-31T16:20:03.116564Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 216:1000 Path# /dev/disk1 2025-03-31T16:20:03.116588Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 216:1001 Path# /dev/disk2 2025-03-31T16:20:03.116614Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 216:1002 Path# /dev/disk3 2025-03-31T16:20:03.116639Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 217:1000 Path# /dev/disk1 2025-03-31T16:20:03.116667Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 217:1001 Path# /dev/disk2 2025-03-31T16:20:03.116691Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 217:1002 Path# /dev/disk3 2025-03-31T16:20:03.116715Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 218:1000 Path# /dev/disk1 2025-03-31T16:20:03.116739Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 218:1001 Path# /dev/disk2 2025-03-31T16:20:03.116764Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 218:1002 Path# /dev/disk3 2025-03-31T16:20:03.116789Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 219:1000 Path# /dev/disk1 2025-03-31T16:20:03.116816Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 219:1001 Path# /dev/disk2 2025-03-31T16:20:03.116840Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 219:1002 Path# /dev/disk3 2025-03-31T16:20:03.116863Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 220:1000 Path# /dev/disk1 2025-03-31T16:20:03.116888Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 220:1001 Path# /dev/disk2 2025-03-31T16:20:03.116912Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 220:1002 Path# /dev/disk3 >> KqpQuery::QueryClientTimeout [GOOD] >> KqpQuery::QueryCancelWrite >> KqpParams::CheckQueryCacheForPreparedQuery >> KqpQuery::QueryResultsTruncated >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] >> KqpExplain::MultiUsedStage [GOOD] >> KqpExplain::Predicates >> KqpStats::RequestUnitForBadRequestExecute ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:78:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:82:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:83:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:83:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:52:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:52:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:87:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:91:2057] recipient: [11:89:2118] Leader for TabletID 72057594037927937 is [11:92:2119] sender: [11:93:2057] recipient: [11:89:2118] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:92:2119] Leader for TabletID 72057594037927937 is [11:92:2119] sender: [11:146:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... 7927937 (actor [16:56:2097]) tablet resolver refreshed! new actor is[16:81:2111] Leader for TabletID 72057594037927937 is [16:81:2111] sender: [16:135:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:52:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:57:2057] recipient: [17:52:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:74:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:76:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:79:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:80:2057] recipient: [17:78:2110] Leader for TabletID 72057594037927937 is [17:81:2111] sender: [17:82:2057] recipient: [17:78:2110] !Reboot 72057594037927937 (actor [17:56:2097]) rebooted! !Reboot 72057594037927937 (actor [17:56:2097]) tablet resolver refreshed! new actor is[17:81:2111] Leader for TabletID 72057594037927937 is [17:81:2111] sender: [17:135:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:50:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:57:2057] recipient: [18:50:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:74:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:77:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:80:2057] recipient: [18:79:2110] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:81:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:82:2111] sender: [18:83:2057] recipient: [18:79:2110] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:82:2111] Leader for TabletID 72057594037927937 is [18:82:2111] sender: [18:136:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:80:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:83:2057] recipient: [19:82:2113] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:84:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:85:2114] sender: [19:86:2057] recipient: [19:82:2113] !Reboot 72057594037927937 (actor [19:56:2097]) rebooted! !Reboot 72057594037927937 (actor [19:56:2097]) tablet resolver refreshed! new actor is[19:85:2114] Leader for TabletID 72057594037927937 is [19:85:2114] sender: [19:139:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:51:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:57:2057] recipient: [20:51:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:74:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:80:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:83:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:84:2057] recipient: [20:82:2113] Leader for TabletID 72057594037927937 is [20:85:2114] sender: [20:86:2057] recipient: [20:82:2113] !Reboot 72057594037927937 (actor [20:56:2097]) rebooted! !Reboot 72057594037927937 (actor [20:56:2097]) tablet resolver refreshed! new actor is[20:85:2114] Leader for TabletID 72057594037927937 is [20:85:2114] sender: [20:139:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:50:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:57:2057] recipient: [21:50:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:74:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:81:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:84:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:85:2057] recipient: [21:83:2113] Leader for TabletID 72057594037927937 is [21:86:2114] sender: [21:87:2057] recipient: [21:83:2113] !Reboot 72057594037927937 (actor [21:56:2097]) rebooted! !Reboot 72057594037927937 (actor [21:56:2097]) tablet resolver refreshed! new actor is[21:86:2114] Leader for TabletID 72057594037927937 is [21:86:2114] sender: [21:104:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:57:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:74:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:83:2057] recipient: [22:36:2083] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:86:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:87:2057] recipient: [22:85:2115] Leader for TabletID 72057594037927937 is [22:88:2116] sender: [22:89:2057] recipient: [22:85:2115] !Reboot 72057594037927937 (actor [22:56:2097]) rebooted! !Reboot 72057594037927937 (actor [22:56:2097]) tablet resolver refreshed! new actor is[22:88:2116] Leader for TabletID 72057594037927937 is [22:88:2116] sender: [22:142:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:51:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:57:2057] recipient: [23:51:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:74:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:83:2057] recipient: [23:36:2083] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:86:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:87:2057] recipient: [23:85:2115] Leader for TabletID 72057594037927937 is [23:88:2116] sender: [23:89:2057] recipient: [23:85:2115] !Reboot 72057594037927937 (actor [23:56:2097]) rebooted! !Reboot 72057594037927937 (actor [23:56:2097]) tablet resolver refreshed! new actor is[23:88:2116] Leader for TabletID 72057594037927937 is [23:88:2116] sender: [23:142:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:57:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:74:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:84:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:87:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:88:2057] recipient: [24:86:2115] Leader for TabletID 72057594037927937 is [24:89:2116] sender: [24:90:2057] recipient: [24:86:2115] !Reboot 72057594037927937 (actor [24:56:2097]) rebooted! !Reboot 72057594037927937 (actor [24:56:2097]) tablet resolver refreshed! new actor is[24:89:2116] Leader for TabletID 72057594037927937 is [24:89:2116] sender: [24:143:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:57:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:74:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:87:2057] recipient: [25:36:2083] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:90:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:91:2057] recipient: [25:89:2118] Leader for TabletID 72057594037927937 is [25:92:2119] sender: [25:93:2057] recipient: [25:89:2118] !Reboot 72057594037927937 (actor [25:56:2097]) rebooted! !Reboot 72057594037927937 (actor [25:56:2097]) tablet resolver refreshed! new actor is[25:92:2119] Leader for TabletID 72057594037927937 is [25:92:2119] sender: [25:146:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:52:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:57:2057] recipient: [26:52:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:74:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:87:2057] recipient: [26:36:2083] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:90:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:91:2057] recipient: [26:89:2118] Leader for TabletID 72057594037927937 is [26:92:2119] sender: [26:93:2057] recipient: [26:89:2118] !Reboot 72057594037927937 (actor [26:56:2097]) rebooted! !Reboot 72057594037927937 (actor [26:56:2097]) tablet resolver refreshed! new actor is[26:92:2119] Leader for TabletID 72057594037927937 is [26:92:2119] sender: [26:146:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:54:2057] recipient: [27:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:54:2057] recipient: [27:50:2095] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:57:2057] recipient: [27:50:2095] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:74:2057] recipient: [27:14:2061] >> KqpExplain::Explain >> KqpQuery::RowsLimit [GOOD] >> KqpQuery::RowsLimitServiceOverride >> Cdc::SequentialSplitMerge [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentSchemeTx >> Cdc::InitialScan_TopicAutoPartitioning [GOOD] >> Cdc::InitialScanUpdatedRows >> KqpLimits::ComputeActorMemoryAllocationFailure+useSink [GOOD] >> KqpLimits::ComputeActorMemoryAllocationFailure-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbQueryService::TestAttachTwice [GOOD] Test command err: 2025-03-31T16:17:00.676490Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008422557890308:2082];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:00.676534Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dc1/r3tmp/tmpj0e0hQ/pdisk_1.dat 2025-03-31T16:17:01.072741Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:01.072908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:01.077297Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:17:01.100632Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15819, node 1 2025-03-31T16:17:01.141539Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:17:01.141566Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:17:01.206704Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:01.206724Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:01.206729Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:01.206832Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10618 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:01.514255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:10618 2025-03-31T16:17:01.887865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "OlapStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "message" Type: "Utf8" } Columns { Name: "json_payload" Type: "JsonDocument" } Columns { Name: "resource_id" Type: "Utf8" NotNull: true } Columns { Name: "uid" Type: "Utf8" NotNull: true } Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "resource_type" Type: "Utf8" NotNull: true } Columns { Name: "level" Type: "Int32" } Columns { Name: "ingested_at" Type: "Timestamp" } Columns { Name: "saved_at" Type: "Timestamp" } Columns { Name: "request_id" Type: "Utf8" } KeyColumnNames: "timestamp" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" } } } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-31T16:17:01.888282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/OlapStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:01.893268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: OlapStore, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-03-31T16:17:01.893320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-03-31T16:17:01.893408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-03-31T16:17:01.893486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-03-31T16:17:01.893533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-03-31T16:17:01.893604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2025-03-31T16:17:01.893885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 5 2025-03-31T16:17:01.895746Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-03-31T16:17:01.896012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:17:01.896042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:01.896174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-31T16:17:01.896224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 6 2025-03-31T16:17:01.900203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusAccepted TxId: 281474976710658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-03-31T16:17:01.900374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE COLUMN STORE, path: /Root/OlapStore 2025-03-31T16:17:01.900579Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-31T16:17:01.900592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-31T16:17:01.900717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-31T16:17:01.900818Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-31T16:17:01.900846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7488008426852858325:2467], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 1 2025-03-31T16:17:01.900857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7488008426852858325:2467], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 2 2025-03-31T16:17:01.900899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:01.900925Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateOlapStore, at tablet# 72057594046644480 2025-03-31T16:17:01.901655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710658:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 1 TabletType: ColumnShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-03-31T16:17:01.902160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710658:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 2 TabletType: ColumnShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { ... Tables { TablePath: "/Root/OlapStore/log1" } ExecuterCpuTimeUs: 94598 StartTimeMs: 1743438004848 FinishTimeMs: 1743438005039 Stages { StageGuid: "6702e4e3-ed9d3036-41faade1-3a67cc43" Program: "(\n(declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType \'Timestamp)) (OptionalType (DataType \'Utf8)) (OptionalType (DataType \'Utf8)) (OptionalType (DataType \'Utf8)) (DataType \'Int32)) (TupleType (OptionalType (DataType \'Timestamp)) (OptionalType (DataType \'Utf8)) (OptionalType (DataType \'Utf8)) (OptionalType (DataType \'Utf8)) (DataType \'Int32))))))\n(return (lambda \'() (block \'(\n (let $1 (KqpTable \'\"/Root/OlapStore/log1\" \'\"72057594046644480:3\" \'\"\" \'1))\n (let $2 (OptionalType (DataType \'Utf8)))\n (let $3 (TupleType (OptionalType (DataType \'Timestamp)) $2 $2 $2 (DataType \'Int32)))\n (let $4 \'(\'\"ingested_at\" \'\"json_payload\" \'\"level\" \'\"message\" \'\"request_id\" \'\"resource_id\" \'\"resource_type\" \'\"saved_at\" \'\"timestamp\" \'\"uid\"))\n (let $5 (Uint64 \'50))\n (let $6 \'(\'(\'\"UsedKeyColumns\" \'(\'\"timestamp\")) \'(\'\"ExpectedMaxRanges\" \'1) \'(\'\"PointPrefixLen\" \'0)))\n (let $7 (KqpWideReadOlapTableRanges $1 %kqp%tx_result_binding_0_0 $4 \'(\'(\'\"ItemsLimit\" $5) \'(\'\"Reverse\")) $6 (lambda \'($10) $10)))\n (let $8 (Bool \'false))\n (let $9 \'(\'(\'8 $8) \'(\'6 $8) \'(\'5 $8) \'(\'9 $8)))\n (return (FromFlow (WideTopSort $7 $5 $9)))\n))))\n)\n" ComputeActors { CpuTimeUs: 1156 Tasks { TaskId: 37 CpuTimeUs: 440 FinishTimeMs: 1743438005004 Tables { TablePath: "/Root/OlapStore/log1" } ComputeCpuTimeUs: 46 BuildCpuTimeUs: 394 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-tuhoqst4xe" NodeId: 22 CreateTimeMs: 1743438004897 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743438004968 } Stages { StageId: 1 StageGuid: "2549b939-5fe8358b-1fb8e71e-aa10c78b" Program: "(\n(return (lambda \'($1) (FromFlow (NarrowMap (Take (ToFlow $1) (Uint64 \'50)) (lambda \'($2 $3 $4 $5 $6 $7 $8 $9 $10 $11) (AsStruct \'(\'\"ingested_at\" $2) \'(\'\"json_payload\" $3) \'(\'\"level\" $4) \'(\'\"message\" $5) \'(\'\"request_id\" $6) \'(\'\"resource_id\" $7) \'(\'\"resource_type\" $8) \'(\'\"saved_at\" $9) \'(\'\"timestamp\" $10) \'(\'\"uid\" $11)))))))\n)\n" ComputeActors { CpuTimeUs: 8386 Tasks { TaskId: 65 StageId: 1 CpuTimeUs: 387 FinishTimeMs: 1743438005037 ComputeCpuTimeUs: 103 BuildCpuTimeUs: 284 HostName: "ghrun-tuhoqst4xe" NodeId: 22 CreateTimeMs: 1743438004917 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743438004968 } TxPlansWithStats: "{\"Node Type\":\"Phase\",\"PlanNodeId\":5,\"Plans\":[{\"Node Type\":\"ResultSet_1\",\"PlanNodeId\":4,\"PlanNodeType\":\"ResultSet\",\"Plans\":[{\"Node Type\":\"Limit\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":2}],\"Limit\":\"50\",\"Name\":\"Limit\"}],\"PlanNodeId\":3,\"Plans\":[{\"Node Type\":\"Merge\",\"PlanNodeId\":2,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"TopSort-TableRangeScan\",\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Limit\":\"50\",\"Name\":\"TopSort\",\"TopSortBy\":\"[row.timestamp,row.resource_type,row.resource_id,row.uid]\"},{\"E-Cost\":\"No estimate\",\"E-Rows\":\"No estimate\",\"E-Size\":\"No estimate\",\"Inputs\":[],\"Name\":\"TableRangeScan\",\"Path\":\"\\/Root\\/OlapStore\\/log1\",\"ReadColumns\":[\"ingested_at\",\"json_payload\",\"level\",\"message\",\"request_id\",\"resource_id\",\"resource_type\",\"saved_at\",\"timestamp\",\"uid\"],\"ReadLimit\":\"50\",\"ReadRanges\":[\"timestamp [4000000, 4093000]\"],\"ReadRangesExpectedSize\":1,\"ReadRangesKeys\":[\"timestamp\"],\"Reverse\":true,\"Scan\":\"Parallel\",\"SsaProgram\":{\"Command\":[{\"Projection\":{\"Columns\":[{\"Id\":8},{\"Id\":2},{\"Id\":7},{\"Id\":1},{\"Id\":10},{\"Id\":3},{\"Id\":6},{\"Id\":9},{\"Id\":5},{\"Id\":4}]}}],\"Version\":5},\"Table\":\"OlapStore\\/log1\"}],\"PlanNodeId\":1,\"StageGuid\":\"6702e4e3-ed9d3036-41faade1-3a67cc43\",\"Stats\":{\"BaseTimeMs\":1743438004968,\"ComputeNodes\":[{\"CpuTimeUs\":1156,\"Tasks\":[{\"ComputeTimeUs\":46,\"FinishTimeMs\":1743438005004,\"Host\":\"ghrun-tuhoqst4xe\",\"NodeId\":22,\"TaskId\":37}]}],\"FinishedTasks\":0,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"},\"Tables\":[\"OlapStore\\/log1\"]}],\"SortColumns\":[\"timestamp (Desc)\",\"resource_type (Desc)\",\"resource_id (Desc)\",\"uid (Desc)\"],\"StageGuid\":\"\"}],\"StageGuid\":\"2549b939-5fe8358b-1fb8e71e-aa10c78b\",\"Stats\":{\"BaseTimeMs\":1743438004968,\"ComputeNodes\":[{\"CpuTimeUs\":8386,\"Tasks\":[{\"ComputeTimeUs\":103,\"FinishTimeMs\":1743438005037,\"Host\":\"ghrun-tuhoqst4xe\",\"NodeId\":22,\"TaskId\":65}]}],\"FinishedTasks\":0,\"PhysicalStageId\":1,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"\"}" StatConvertBytes: 1963 Extra { type_url: "type.googleapis.com/NKqpProto.TKqpExecutionExtraStats" value: "\010\002\022\014\010\350\007\020\302A\030\210\347\005 A" } } 2025-03-31T16:20:05.042589Z node 22 :KQP_EXECUTER DEBUG: ActorId: [22:7488009211904705219:3507] TxId: 281474976710670. Ctx: { TraceId: 01jqpg1g204rhvanr4n3zera5v, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=22&id=ZTVhZTA2NTUtYTk4MmRjM2MtMzNhYzMyMTQtYTU2ZGI5ODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-31T16:20:05.042687Z node 22 :KQP_EXECUTER DEBUG: ActorId: [22:7488009211904705219:3507] TxId: 281474976710670. Ctx: { TraceId: 01jqpg1g204rhvanr4n3zera5v, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=22&id=ZTVhZTA2NTUtYTk4MmRjM2MtMzNhYzMyMTQtYTU2ZGI5ODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.095112s ReadRows: 0 ReadBytes: 0 ru: 63 rate limiter was not found force flag: 1 2025-03-31T16:20:05.042810Z node 22 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=22&id=ZTVhZTA2NTUtYTk4MmRjM2MtMzNhYzMyMTQtYTU2ZGI5ODc=, ActorId: [22:7488009211904705177:3507], ActorState: ExecuteState, TraceId: 01jqpg1g204rhvanr4n3zera5v, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-03-31T16:20:05.043289Z node 22 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=22&id=ZTVhZTA2NTUtYTk4MmRjM2MtMzNhYzMyMTQtYTU2ZGI5ODc=, ActorId: [22:7488009211904705177:3507], ActorState: ExecuteState, TraceId: 01jqpg1g204rhvanr4n3zera5v, txInfo Status: Active Kind: ReadOnly TotalDuration: 0 ServerDuration: 200.877 QueriesCount: 1 2025-03-31T16:20:05.043367Z node 22 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=22&id=ZTVhZTA2NTUtYTk4MmRjM2MtMzNhYzMyMTQtYTU2ZGI5ODc=, ActorId: [22:7488009211904705177:3507], ActorState: ExecuteState, TraceId: 01jqpg1g204rhvanr4n3zera5v, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-03-31T16:20:05.043500Z node 22 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=22&id=ZTVhZTA2NTUtYTk4MmRjM2MtMzNhYzMyMTQtYTU2ZGI5ODc=, ActorId: [22:7488009211904705177:3507], ActorState: ExecuteState, TraceId: 01jqpg1g204rhvanr4n3zera5v, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-31T16:20:05.043546Z node 22 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=22&id=ZTVhZTA2NTUtYTk4MmRjM2MtMzNhYzMyMTQtYTU2ZGI5ODc=, ActorId: [22:7488009211904705177:3507], ActorState: ExecuteState, TraceId: 01jqpg1g204rhvanr4n3zera5v, EndCleanup, isFinal: 1 2025-03-31T16:20:05.043630Z node 22 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=22&id=ZTVhZTA2NTUtYTk4MmRjM2MtMzNhYzMyMTQtYTU2ZGI5ODc=, ActorId: [22:7488009211904705177:3507], ActorState: ExecuteState, TraceId: 01jqpg1g204rhvanr4n3zera5v, Sent query response back to proxy, proxyRequestId: 5, proxyId: [22:7488009173249995502:2280] 2025-03-31T16:20:05.043683Z node 22 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=22&id=ZTVhZTA2NTUtYTk4MmRjM2MtMzNhYzMyMTQtYTU2ZGI5ODc=, ActorId: [22:7488009211904705177:3507], ActorState: unknown state, TraceId: 01jqpg1g204rhvanr4n3zera5v, Cleanup temp tables: 0 RESULT: [] --------------------- STATS: total CPU: 2566 duration: 1218 usec cpu: 1218 usec duration: 190198 usec cpu: 189710 usec { name: "/Root/OlapStore/log1" } 2025-03-31T16:20:05.046712Z node 22 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438004308, txId: 18446744073709551615] shutting down 2025-03-31T16:20:05.046852Z node 22 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=22&id=ZTVhZTA2NTUtYTk4MmRjM2MtMzNhYzMyMTQtYTU2ZGI5ODc=, ActorId: [22:7488009211904705177:3507], ActorState: unknown state, TraceId: 01jqpg1g204rhvanr4n3zera5v, Session actor destroyed 2025-03-31T16:20:07.008390Z node 25 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[25:7488009222897968562:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:07.008571Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dc1/r3tmp/tmp7BgESQ/pdisk_1.dat 2025-03-31T16:20:07.220142Z node 25 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:20:07.255975Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:07.256135Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:07.260894Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10264, node 25 2025-03-31T16:20:07.353767Z node 25 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:07.353806Z node 25 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:07.353830Z node 25 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:07.354074Z node 25 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4031 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:07.956106Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:12.008350Z node 25 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[25:7488009222897968562:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:12.008477Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpExplain::LimitOffset [GOOD] >> KqpExplain::FullOuterJoin >> KqpParams::Decimal+QueryService-UseSink [GOOD] >> KqpParams::Decimal-QueryService+UseSink >> KqpQuery::Now [GOOD] >> KqpQuery::NoEvaluate >> KqpLimits::StreamWrite+Allowed [GOOD] >> KqpLimits::StreamWrite-Allowed >> KqpQuery::RewriteIfPresentToMap >> KqpLimits::ReadsetCountLimit [GOOD] >> KqpLimits::QueryExecTimeoutCancel >> KqpStats::MultiTxStatsFullExpYql [GOOD] >> KqpStats::MultiTxStatsFullExpScan >> KqpParams::RowsList >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk [GOOD] >> KqpQuery::DeleteWhereInSubquery [GOOD] >> KqpQuery::DictJoin >> TKeyValueTest::TestInlineCopyRangeWorks [GOOD] >> TKeyValueTest::TestInlineCopyRangeWorksNewApi >> KqpStats::DataQueryMulti [GOOD] >> KqpQuery::GenericQueryNoRowsLimit [GOOD] >> KqpQuery::GenericQueryNoRowsLimitLotsOfRows >> KqpStats::JoinStatsBasicYql+StreamLookupJoin [GOOD] >> KqpStats::JoinStatsBasicScan ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:78:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:104:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:52:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:52:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:107:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:86:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:89:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:88:2117] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:92:2057] recipient: [11:88:2117] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:91:2118] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:145:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (a ... 594037927937 (actor [23:56:2097]) tablet resolver refreshed! new actor is[23:85:2114] Leader for TabletID 72057594037927937 is [23:85:2114] sender: [23:139:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:57:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:74:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:81:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:84:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:85:2057] recipient: [24:83:2113] Leader for TabletID 72057594037927937 is [24:86:2114] sender: [24:87:2057] recipient: [24:83:2113] !Reboot 72057594037927937 (actor [24:56:2097]) rebooted! !Reboot 72057594037927937 (actor [24:56:2097]) tablet resolver refreshed! new actor is[24:86:2114] Leader for TabletID 72057594037927937 is [24:86:2114] sender: [24:140:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:57:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:74:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:84:2057] recipient: [25:36:2083] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:87:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:88:2057] recipient: [25:86:2116] Leader for TabletID 72057594037927937 is [25:89:2117] sender: [25:90:2057] recipient: [25:86:2116] !Reboot 72057594037927937 (actor [25:56:2097]) rebooted! !Reboot 72057594037927937 (actor [25:56:2097]) tablet resolver refreshed! new actor is[25:89:2117] Leader for TabletID 72057594037927937 is [25:89:2117] sender: [25:143:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:52:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:57:2057] recipient: [26:52:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:74:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:84:2057] recipient: [26:36:2083] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:86:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:88:2057] recipient: [26:87:2116] Leader for TabletID 72057594037927937 is [26:89:2117] sender: [26:90:2057] recipient: [26:87:2116] !Reboot 72057594037927937 (actor [26:56:2097]) rebooted! !Reboot 72057594037927937 (actor [26:56:2097]) tablet resolver refreshed! new actor is[26:89:2117] Leader for TabletID 72057594037927937 is [26:89:2117] sender: [26:143:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:54:2057] recipient: [27:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:54:2057] recipient: [27:50:2095] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:57:2057] recipient: [27:50:2095] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:74:2057] recipient: [27:14:2061] !Reboot 72057594037927937 (actor [27:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:85:2057] recipient: [27:36:2083] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:88:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:89:2057] recipient: [27:87:2116] Leader for TabletID 72057594037927937 is [27:90:2117] sender: [27:91:2057] recipient: [27:87:2116] !Reboot 72057594037927937 (actor [27:56:2097]) rebooted! !Reboot 72057594037927937 (actor [27:56:2097]) tablet resolver refreshed! new actor is[27:90:2117] Leader for TabletID 72057594037927937 is [27:90:2117] sender: [27:144:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:54:2057] recipient: [28:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:54:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:57:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:74:2057] recipient: [28:14:2061] !Reboot 72057594037927937 (actor [28:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:88:2057] recipient: [28:36:2083] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:91:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:92:2057] recipient: [28:90:2119] Leader for TabletID 72057594037927937 is [28:93:2120] sender: [28:94:2057] recipient: [28:90:2119] !Reboot 72057594037927937 (actor [28:56:2097]) rebooted! !Reboot 72057594037927937 (actor [28:56:2097]) tablet resolver refreshed! new actor is[28:93:2120] Leader for TabletID 72057594037927937 is [28:93:2120] sender: [28:147:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:54:2057] recipient: [29:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:54:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:57:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:74:2057] recipient: [29:14:2061] !Reboot 72057594037927937 (actor [29:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:88:2057] recipient: [29:36:2083] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:91:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:92:2057] recipient: [29:90:2119] Leader for TabletID 72057594037927937 is [29:93:2120] sender: [29:94:2057] recipient: [29:90:2119] !Reboot 72057594037927937 (actor [29:56:2097]) rebooted! !Reboot 72057594037927937 (actor [29:56:2097]) tablet resolver refreshed! new actor is[29:93:2120] Leader for TabletID 72057594037927937 is [29:93:2120] sender: [29:147:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:57:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:74:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:89:2057] recipient: [30:36:2083] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:92:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:93:2057] recipient: [30:91:2119] Leader for TabletID 72057594037927937 is [30:94:2120] sender: [30:95:2057] recipient: [30:91:2119] !Reboot 72057594037927937 (actor [30:56:2097]) rebooted! !Reboot 72057594037927937 (actor [30:56:2097]) tablet resolver refreshed! new actor is[30:94:2120] Leader for TabletID 72057594037927937 is [30:94:2120] sender: [30:148:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:52:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:57:2057] recipient: [31:52:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:74:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:91:2057] recipient: [31:36:2083] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:94:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:95:2057] recipient: [31:93:2121] Leader for TabletID 72057594037927937 is [31:96:2122] sender: [31:97:2057] recipient: [31:93:2121] !Reboot 72057594037927937 (actor [31:56:2097]) rebooted! !Reboot 72057594037927937 (actor [31:56:2097]) tablet resolver refreshed! new actor is[31:96:2122] Leader for TabletID 72057594037927937 is [31:96:2122] sender: [31:150:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:57:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:74:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:91:2057] recipient: [32:36:2083] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:94:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:95:2057] recipient: [32:93:2121] Leader for TabletID 72057594037927937 is [32:96:2122] sender: [32:97:2057] recipient: [32:93:2121] !Reboot 72057594037927937 (actor [32:56:2097]) rebooted! !Reboot 72057594037927937 (actor [32:56:2097]) tablet resolver refreshed! new actor is[32:96:2122] Leader for TabletID 72057594037927937 is [32:96:2122] sender: [32:150:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:50:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:57:2057] recipient: [33:50:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:74:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:92:2057] recipient: [33:36:2083] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:95:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:96:2057] recipient: [33:94:2121] Leader for TabletID 72057594037927937 is [33:97:2122] sender: [33:98:2057] recipient: [33:94:2121] !Reboot 72057594037927937 (actor [33:56:2097]) rebooted! !Reboot 72057594037927937 (actor [33:56:2097]) tablet resolver refreshed! new actor is[33:97:2122] Leader for TabletID 72057594037927937 is [33:97:2122] sender: [33:151:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:57:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:74:2057] recipient: [34:14:2061] >> KqpQuery::QueryCancelWrite [GOOD] >> KqpQuery::QueryCancelWriteImmediate >> TKeyValueTest::TestCopyRangeWorksNewApi [GOOD] >> TKeyValueTest::TestCopyRangeToLongKey >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] >> TKeyValueTest::TestGetStatusWorks >> KqpParams::CheckQueryCacheForPreparedQuery [GOOD] >> KqpParams::CheckQueryCacheForUnpreparedQuery >> KqpLimits::ComputeActorMemoryAllocationFailure-useSink [GOOD] >> KqpLimits::CancelAfterRwTx+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::DataQueryMulti [GOOD] Test command err: Trying to start YDB, gRPC: 3939, MsgBus: 23588 2025-03-31T16:20:05.465069Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009214168487146:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:05.465192Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a7a/r3tmp/tmpdxdNPC/pdisk_1.dat 2025-03-31T16:20:05.704338Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3939, node 1 2025-03-31T16:20:05.761296Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:05.761322Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:05.761327Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:05.761447Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:20:05.804480Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:05.804635Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:05.806127Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23588 TClient is connected to server localhost:23588 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:06.170434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:06.185127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:06.267724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:06.405330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:06.462162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:07.997083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009222758423505:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:07.997191Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:08.269569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:08.295504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:08.320835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:08.347191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:08.371927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:08.409708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:08.487035Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009227053391317:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:08.487182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009227053391322:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:08.487225Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:08.490630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:08.498668Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009227053391324:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:20:08.595785Z node 1 :TX_PROXY ERROR: Actor# [1:7488009227053391379:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 12270, MsgBus: 7556 2025-03-31T16:20:10.250442Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488009236927083443:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:10.250616Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a7a/r3tmp/tmpf7qlYJ/pdisk_1.dat 2025-03-31T16:20:10.350422Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12270, node 2 2025-03-31T16:20:10.389053Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:10.389132Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:10.391728Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:10.417244Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:10.417270Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:10.417280Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:10.417410Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7556 TClient is connected to server localhost:7556 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:10.817771Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:10.832139Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:10.888188Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:11.018187Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:11.093530Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:12.933752Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009245517019805:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:12.933833Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:12.961035Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:12.988780Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:13.015671Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:13.045507Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:13.076777Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:13.111006Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:13.148944Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009249811987609:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:13.149019Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:13.149046Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009249811987614:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:13.152723Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:13.161538Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488009249811987616:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:20:13.228372Z node 2 :TX_PROXY ERROR: Actor# [2:7488009249811987670:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 28221, MsgBus: 20399 2025-03-31T16:20:14.781466Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488009254946084350:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:14.781507Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a7a/r3tmp/tmpQ5hKl9/pdisk_1.dat 2025-03-31T16:20:14.865940Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28221, node 3 2025-03-31T16:20:14.910216Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:14.910302Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:14.911989Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:14.916246Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:14.916268Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:14.916276Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:14.916384Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20399 TClient is connected to server localhost:20399 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:15.386775Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:15.404306Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:15.475740Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:15.609358Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:15.674896Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:18.127563Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009272125955305:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:18.127631Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:18.175780Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:18.205519Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:18.232262Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:18.262869Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:18.290870Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:18.356896Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:18.400325Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009272125955818:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:18.400415Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:18.400443Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009272125955823:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:18.403539Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:18.411830Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488009272125955825:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:20:18.478273Z node 3 :TX_PROXY ERROR: Actor# [3:7488009272125955878:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> Cdc::InitialScanComplete [GOOD] >> Cdc::InitialScanEnqueuesZeroRecords >> KqpStats::RequestUnitForBadRequestExecute [GOOD] >> KqpStats::RequestUnitForBadRequestExplicitPrepare >> KqpLimits::StreamWrite-Allowed [GOOD] >> KqpLimits::TooBigColumn+useSink >> KqpStats::RequestUnitForSuccessExplicitPrepare >> KqpQuery::QueryResultsTruncated [GOOD] >> KqpQuery::QueryStats+UseSink >> KqpQuery::RowsLimitServiceOverride [GOOD] >> KqpQuery::SelectCountAsteriskFromVar >> KqpQuery::NoEvaluate [GOOD] >> KqpQuery::OlapCreateAsSelect_Complex >> KqpExplain::Explain [GOOD] >> KqpExplain::CompoundKeyRange >> KqpExplain::Predicates [GOOD] >> KqpExplain::MultiJoinCteLinks >> KqpLimits::QSReplySizeEnsureMemoryLimits+useSink >> BsControllerConfig::ManyPDisksRestarts [GOOD] >> BsControllerConfig::MergeBoxes >> KqpExplain::FullOuterJoin [GOOD] >> KqpExplain::MergeConnection >> KqpQuery::RewriteIfPresentToMap [GOOD] >> KqpQuery::ReadOverloaded+StreamLookup >> KqpStats::MultiTxStatsFullExpScan [GOOD] >> KqpStats::JoinStatsBasicYql-StreamLookupJoin >> KqpParams::RowsList [GOOD] >> KqpQuery::CurrentUtcTimestamp >> KqpParams::Decimal-QueryService+UseSink [GOOD] >> KqpParams::Decimal+QueryService+UseSink >> KqpQuery::QueryCancelWriteImmediate [GOOD] >> KqpQuery::DictJoin [GOOD] >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOkNewApi >> Cdc::ShouldBreakLocksOnConcurrentSchemeTx [GOOD] >> Cdc::ResolvedTimestampsContinueAfterMerge >> Cdc::InitialScanUpdatedRows [GOOD] >> Cdc::MustNotLoseSchemaSnapshot >> KqpParams::CheckQueryCacheForUnpreparedQuery [GOOD] >> KqpParams::CheckQueryCacheForExecuteAndPreparedQueries ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryCancelWriteImmediate [GOOD] Test command err: Trying to start YDB, gRPC: 19021, MsgBus: 12212 2025-03-31T16:20:08.137914Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009227383540790:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:08.138005Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a5a/r3tmp/tmpuRltxn/pdisk_1.dat 2025-03-31T16:20:08.449857Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19021, node 1 2025-03-31T16:20:08.510151Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:08.510170Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:08.510189Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:08.510306Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:20:08.529039Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:08.529119Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:08.530816Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12212 TClient is connected to server localhost:12212 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:08.970561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:08.991041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:09.109681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:09.262975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:09.339291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:10.684608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009235973477165:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:10.684754Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:10.937193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:10.967812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:10.993389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:11.019613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:11.043779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:11.108174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:11.182120Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009240268444980:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:11.182194Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:11.182364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009240268444985:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:11.185913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:11.194432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009240268444987:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:20:11.266442Z node 1 :TX_PROXY ERROR: Actor# [1:7488009240268445042:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:12.038233Z node 1 :GRPC_SERVER DEBUG: [0x51b00031fc80] received request Name# ExecuteDataQuery ok# true data# session_id: "ydb://session/3?node_id=1&id=MzMzN2U5MWItZWI2N2EyYTQtY2E1ZDY3NDEtMWFiZTZhZDE=" tx_control { begin_tx { serializable_read_write { } } commit_tx: true } query { yql_text: "\n SELECT * FROM `/Root/TwoShard`;\n " } query_cache_policy { } operation_params { } peer# ipv6:%5B::1%5D:39734 2025-03-31T16:20:12.038309Z node 1 :GRPC_SERVER DEBUG: [0x51b000320380] created request Name# ExecuteDataQuery 2025-03-31T16:20:12.038469Z node 1 :GRPC_SERVER DEBUG: [0x51b00031fc80] received request without user token Name# ExecuteDataQuery data# session_id: "ydb://session/3?node_id=1&id=MzMzN2U5MWItZWI2N2EyYTQtY2E1ZDY3NDEtMWFiZTZhZDE=" tx_control { begin_tx { serializable_read_write { } } commit_tx: true } query { yql_text: "\n SELECT * FROM `/Root/TwoShard`;\n " } query_cache_policy { } operation_params { } peer# ipv6:%5B::1%5D:39734 database# /Root 2025-03-31T16:20:12.038755Z node 1 :GRPC_SERVER DEBUG: Got grpc request# ExecuteDataQueryRequest, traceId# 01jqpg1qm65ptjz0r1fbsawekr, sdkBuildInfo# ydb-cpp-sdk/dev, state# AS_NOT_PERFORMED, database# /Root, peer# ipv6:[::1]:39734, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# 2.992365s 2025-03-31T16:20:13.137977Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488009227383540790:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:13.138035Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:19021 2025-03-31T16:20:15.030695Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7488009244563412655:2495] TxId: 281474976710671. Ctx: { TraceId: 01jqpg1qm65ptjz0r1fbsawekr, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzMzN2U5MWItZWI2N2EyYTQtY2E1ZDY3NDEtMWFiZTZhZDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-03-31T16:20:15.031311Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488009244563412662:2504], TxId: 281474976710671, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=MzMzN2U5MWItZWI2N2EyYTQtY2E1ZDY3NDEtMWFiZTZhZDE=. CustomerSuppliedId : . TraceId : 01jqpg1qm65ptjz0r1fbsawekr. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7488009244563412655:2495], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-31T16:20:15.031756Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488009244563412664:2505], TxId: 281474976710671, task: 2. Ctx: { TraceId : 01jqpg1qm65ptjz0r1fbsawekr. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MzMzN2U5MWItZWI2N2EyYTQtY2E1ZDY3NDEtMWFiZTZhZDE=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7488009244563412655:2495], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-31T16:20:15.032270Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzMzN2U5MWItZWI2N2EyYTQtY2E1ZDY3NDEtMWFiZTZhZDE=, ActorId: [1:7488009244563412626:2495], ActorState: ExecuteState, TraceId: 01jqpg1qm65ptjz0r1fbsawekr, Create QueryResponse for error on request, msg: 2025-03-31T16:20:15.032286Z node 1 :GRPC_SERVER DEBUG: [0x51b00031fc80] issuing response Name# ExecuteDataQuery data# operation { ready: true status: INTERNAL_ERROR issues { message: "Closing Grpc request, client should not see this message." severity: 1 } } peer# ipv6:%5B::1%5D:39734 2025-03-31T16:20:15.032553Z node 1 :GRPC_SERVER DEBUG: [0x51b00031fc80] finished request Name# ExecuteDataQuery ok# false peer# unknown 2025-03-31T16:20:15.033938Z node 1 :GRPC_SERVER DEBUG: [0x51b000320380] received request Name# ExecuteDataQuery ok# true data# session_id: "ydb://session/3?node_id=1&id=MzMzN2U5MWItZWI2N2EyYTQtY2E1ZDY3NDEtMWFiZTZhZDE=" tx_control { begin_tx { serializable_read_write { } } commit_tx: true } query { yql_text: "\n SELECT * FROM `/Root/TwoShard`;\n " } query_cache_policy { } operation_params { } peer# ipv6:%5B::1%5D:60478 2025-03-31T16:2 ... 94046644480 waiting... 2025-03-31T16:20:16.441920Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:16.572283Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:16.628663Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:18.845570Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009270873812922:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:18.845656Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:18.893826Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:18.922594Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:18.948874Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:18.993947Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:19.029962Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:19.100171Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:19.139692Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009275168780733:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:19.139776Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:19.139833Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009275168780738:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:19.143367Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:19.156029Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488009275168780740:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:20:19.225649Z node 2 :TX_PROXY ERROR: Actor# [2:7488009275168780793:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 26243, MsgBus: 64000 2025-03-31T16:20:20.654593Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488009281267531931:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:20.654710Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a5a/r3tmp/tmpR2IA4o/pdisk_1.dat 2025-03-31T16:20:20.753690Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:20:20.784857Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:20.784943Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:20.790138Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26243, node 3 2025-03-31T16:20:20.822285Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:20.822308Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:20.822317Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:20.822444Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64000 TClient is connected to server localhost:64000 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:21.173972Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:21.181450Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:21.228145Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:21.359291Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:21.433573Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:23.418490Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009294152435587:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:23.418576Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:23.452544Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:23.480943Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:23.508244Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:23.574654Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:23.607403Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:23.678845Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:23.762589Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009294152436110:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:23.762655Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:23.762775Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009294152436115:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:23.766793Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:23.777800Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488009294152436117:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:20:23.854080Z node 3 :TX_PROXY ERROR: Actor# [3:7488009294152436173:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQuery::GenericQueryNoRowsLimitLotsOfRows [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::DictJoin [GOOD] Test command err: Trying to start YDB, gRPC: 14778, MsgBus: 9061 2025-03-31T16:20:10.481302Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009236521415411:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:10.481379Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a44/r3tmp/tmp9SO95Y/pdisk_1.dat 2025-03-31T16:20:10.828920Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14778, node 1 2025-03-31T16:20:10.872611Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:10.872745Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:10.880031Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:10.910301Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:10.910340Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:10.910348Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:10.910495Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9061 TClient is connected to server localhost:9061 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:11.357722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:11.386151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:11.499301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:11.653921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:11.715767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:13.036434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009249406319090:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:13.036561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:13.267938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:13.295003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:13.320302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:13.344085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:13.369315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:13.407128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:13.440811Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009249406319600:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:13.440883Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:13.440934Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009249406319605:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:13.443901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:13.452833Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009249406319607:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:20:13.553238Z node 1 :TX_PROXY ERROR: Actor# [1:7488009249406319664:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:14.244770Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488009253701287252:2500], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:5:30: Error: Operation 'CreateTable' can't be performed in data query, code: 2008 2025-03-31T16:20:14.245078Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjFmNTJiY2UtMmRjYzdmZTQtZmVkZjI5N2EtZjc5MDc2MDE=, ActorId: [1:7488009253701287244:2495], ActorState: ExecuteState, TraceId: 01jqpg1srb8e1t3dd4dd0z1n6r, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Execution, code: 1060
:5:30: Error: Operation 'CreateTable' can't be performed in data query, code: 2008 2025-03-31T16:20:14.274155Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488009253701287268:2503], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:2:24: Error: Operation 'DropTable' can't be performed in data query, code: 2008 2025-03-31T16:20:14.274384Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjFmNTJiY2UtMmRjYzdmZTQtZmVkZjI5N2EtZjc5MDc2MDE=, ActorId: [1:7488009253701287244:2495], ActorState: ExecuteState, TraceId: 01jqpg1ssb57p55h87ax86pbgr, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Execution, code: 1060
:2:24: Error: Operation 'DropTable' can't be performed in data query, code: 2008 2025-03-31T16:20:14.297650Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488009253701287279:2507], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:2:54: Error: Operation 'AlterTable' can't be performed in data query, code: 2008 2025-03-31T16:20:14.297873Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjFmNTJiY2UtMmRjYzdmZTQtZmVkZjI5N2EtZjc5MDc2MDE=, ActorId: [1:7488009253701287244:2495], ActorState: ExecuteState, TraceId: 01jqpg1st9f5tr07c91htwe0dr, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Execution, code: 1060
:2:54: Error: Operation 'AlterTable' can't be performed in data query, code: 2008 Trying to start YDB, gRPC: 23513, MsgBus: 30919 2025-03-31T16:20:14.976090Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488009254848171156:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:14.976158Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a44/r3tmp/tmpIBfiqp/pdisk_1.dat 2025-03-31T16:20:15.150394Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:20:15.168192Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:15.168285Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:15.169818Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23513, node 2 2025-03-31T16:20:15.205499Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:15.205519Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:15.205527Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:15.205636Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30919 TClient is connected to server localhost:30919 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVers ... ype: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:15.901368Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:17.915386Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009267733074839:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:17.915475Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:17.963563Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:17.994798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:18.022426Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:18.047061Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:18.075574Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:18.109651Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:18.150847Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009272028042647:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:18.150913Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:18.150977Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009272028042652:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:18.153266Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:18.160786Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488009272028042654:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:20:18.248368Z node 2 :TX_PROXY ERROR: Actor# [2:7488009272028042709:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 21383, MsgBus: 12740 2025-03-31T16:20:20.051351Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488009279199638434:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:20.051396Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a44/r3tmp/tmpa87PZB/pdisk_1.dat 2025-03-31T16:20:20.213316Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21383, node 3 2025-03-31T16:20:20.233787Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:20.233879Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:20.234913Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:20.260663Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:20.260691Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:20.260700Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:20.260837Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12740 TClient is connected to server localhost:12740 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:20.719376Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:20.730576Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:20.783690Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:20.948329Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:21.015871Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:23.203665Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009292084542099:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:23.203760Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:23.251933Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:23.284080Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:23.324763Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:23.358829Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:23.392714Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:23.463791Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:23.505150Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009292084542613:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:23.505273Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:23.505325Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009292084542618:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:23.508799Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:23.518279Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488009292084542620:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:20:23.586426Z node 3 :TX_PROXY ERROR: Actor# [3:7488009292084542673:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:25.051547Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488009279199638434:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:25.051706Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpStats::RequestUnitForBadRequestExplicitPrepare [GOOD] >> KqpStats::RequestUnitForExecute >> KqpQuery::QueryStats+UseSink [GOOD] >> KqpQuery::QueryStats-UseSink >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] >> KqpStats::RequestUnitForSuccessExplicitPrepare [GOOD] >> KqpStats::StatsProfile >> KqpExplain::CompoundKeyRange [GOOD] >> KqpExplain::ExplainDataQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::GenericQueryNoRowsLimitLotsOfRows [GOOD] Test command err: Trying to start YDB, gRPC: 24703, MsgBus: 62856 2025-03-31T16:20:10.427699Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009237009315241:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:10.427841Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a49/r3tmp/tmpyOwHLT/pdisk_1.dat 2025-03-31T16:20:10.714753Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24703, node 1 2025-03-31T16:20:10.786038Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:10.786065Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:10.786075Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:10.786173Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:20:10.798796Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:10.798904Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:10.800682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62856 TClient is connected to server localhost:62856 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:11.272503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:11.288682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:11.415258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:11.570731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:11.644147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:13.006530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009249894218915:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:13.006704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:13.238358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:13.267042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:13.295314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:13.320438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:13.347267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:13.399923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:13.436618Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009249894219424:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:13.436709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:13.436731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009249894219429:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:13.439966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:13.447935Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009249894219431:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:20:13.507400Z node 1 :TX_PROXY ERROR: Actor# [1:7488009249894219486:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:14.367714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 27320, MsgBus: 29446 2025-03-31T16:20:15.381256Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488009259919559191:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:15.381367Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a49/r3tmp/tmp1ZGkpk/pdisk_1.dat 2025-03-31T16:20:15.481839Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27320, node 2 2025-03-31T16:20:15.524280Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:15.524372Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:15.526000Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:15.535731Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:15.535751Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:15.535765Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:15.535873Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29446 TClient is connected to server localhost:29446 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:15.900075Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:15.917435Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:15.974125Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:16.120809Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:16.180027Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:18.231203Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009272804462842:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:18.231266Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:18.251592Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:18.279904Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:18.305058Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:18.327939Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:18.394751Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:18.424067Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:18.463117Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009272804463354:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:18.463194Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009272804463359:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:18.463203Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:18.466338Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:18.475441Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488009272804463361:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:20:18.540707Z node 2 :TX_PROXY ERROR: Actor# [2:7488009272804463414:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 9925, MsgBus: 8157 2025-03-31T16:20:20.305336Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488009281227679580:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:20.305411Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a49/r3tmp/tmpHiezhp/pdisk_1.dat 2025-03-31T16:20:20.412262Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9925, node 3 2025-03-31T16:20:20.447755Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:20.447861Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:20.450140Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:20.487483Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:20.487508Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:20.487517Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:20.487643Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8157 TClient is connected to server localhost:8157 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:20.935298Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:20.951633Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:21.000889Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:21.132057Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:21.203306Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:23.582197Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009294112583224:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:23.582288Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:23.633702Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:23.669463Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:23.700907Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:23.771539Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:23.815030Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:23.848833Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:23.894747Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009294112583734:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:23.894885Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:23.894918Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009294112583739:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:23.898409Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:23.909207Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488009294112583741:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:20:23.984746Z node 3 :TX_PROXY ERROR: Actor# [3:7488009294112583794:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:24.737057Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:25.307327Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488009281227679580:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:25.307504Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQuery::SelectCountAsteriskFromVar [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:78:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:82:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:83:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:83:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:52:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:52:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:86:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:89:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:88:2117] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:92:2057] recipient: [11:88:2117] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:91:2118] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:145:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... boot 72057594037927937 (actor [22:56:2097]) tablet resolver refreshed! new actor is[22:85:2114] Leader for TabletID 72057594037927937 is [22:85:2114] sender: [22:139:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:51:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:57:2057] recipient: [23:51:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:74:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:80:2057] recipient: [23:36:2083] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:83:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:84:2057] recipient: [23:82:2113] Leader for TabletID 72057594037927937 is [23:85:2114] sender: [23:86:2057] recipient: [23:82:2113] !Reboot 72057594037927937 (actor [23:56:2097]) rebooted! !Reboot 72057594037927937 (actor [23:56:2097]) tablet resolver refreshed! new actor is[23:85:2114] Leader for TabletID 72057594037927937 is [23:85:2114] sender: [23:139:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:57:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:74:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:81:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:84:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:85:2057] recipient: [24:83:2113] Leader for TabletID 72057594037927937 is [24:86:2114] sender: [24:87:2057] recipient: [24:83:2113] !Reboot 72057594037927937 (actor [24:56:2097]) rebooted! !Reboot 72057594037927937 (actor [24:56:2097]) tablet resolver refreshed! new actor is[24:86:2114] Leader for TabletID 72057594037927937 is [24:86:2114] sender: [24:104:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:57:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:74:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:83:2057] recipient: [25:36:2083] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:86:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:87:2057] recipient: [25:85:2115] Leader for TabletID 72057594037927937 is [25:88:2116] sender: [25:89:2057] recipient: [25:85:2115] !Reboot 72057594037927937 (actor [25:56:2097]) rebooted! !Reboot 72057594037927937 (actor [25:56:2097]) tablet resolver refreshed! new actor is[25:88:2116] Leader for TabletID 72057594037927937 is [25:88:2116] sender: [25:142:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:52:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:57:2057] recipient: [26:52:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:74:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:83:2057] recipient: [26:36:2083] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:86:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:87:2057] recipient: [26:85:2115] Leader for TabletID 72057594037927937 is [26:88:2116] sender: [26:89:2057] recipient: [26:85:2115] !Reboot 72057594037927937 (actor [26:56:2097]) rebooted! !Reboot 72057594037927937 (actor [26:56:2097]) tablet resolver refreshed! new actor is[26:88:2116] Leader for TabletID 72057594037927937 is [26:88:2116] sender: [26:142:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:54:2057] recipient: [27:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:54:2057] recipient: [27:50:2095] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:57:2057] recipient: [27:50:2095] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:74:2057] recipient: [27:14:2061] !Reboot 72057594037927937 (actor [27:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:84:2057] recipient: [27:36:2083] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:87:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:88:2057] recipient: [27:86:2115] Leader for TabletID 72057594037927937 is [27:89:2116] sender: [27:90:2057] recipient: [27:86:2115] !Reboot 72057594037927937 (actor [27:56:2097]) rebooted! !Reboot 72057594037927937 (actor [27:56:2097]) tablet resolver refreshed! new actor is[27:89:2116] Leader for TabletID 72057594037927937 is [27:89:2116] sender: [27:107:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:54:2057] recipient: [28:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:54:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:57:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:74:2057] recipient: [28:14:2061] !Reboot 72057594037927937 (actor [28:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:86:2057] recipient: [28:36:2083] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:89:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:90:2057] recipient: [28:88:2117] Leader for TabletID 72057594037927937 is [28:91:2118] sender: [28:92:2057] recipient: [28:88:2117] !Reboot 72057594037927937 (actor [28:56:2097]) rebooted! !Reboot 72057594037927937 (actor [28:56:2097]) tablet resolver refreshed! new actor is[28:91:2118] Leader for TabletID 72057594037927937 is [28:91:2118] sender: [28:145:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:54:2057] recipient: [29:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:54:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:57:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:74:2057] recipient: [29:14:2061] !Reboot 72057594037927937 (actor [29:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:86:2057] recipient: [29:36:2083] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:89:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:90:2057] recipient: [29:88:2117] Leader for TabletID 72057594037927937 is [29:91:2118] sender: [29:92:2057] recipient: [29:88:2117] !Reboot 72057594037927937 (actor [29:56:2097]) rebooted! !Reboot 72057594037927937 (actor [29:56:2097]) tablet resolver refreshed! new actor is[29:91:2118] Leader for TabletID 72057594037927937 is [29:91:2118] sender: [29:145:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:57:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:74:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:87:2057] recipient: [30:36:2083] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:90:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:91:2057] recipient: [30:89:2117] Leader for TabletID 72057594037927937 is [30:92:2118] sender: [30:93:2057] recipient: [30:89:2117] !Reboot 72057594037927937 (actor [30:56:2097]) rebooted! !Reboot 72057594037927937 (actor [30:56:2097]) tablet resolver refreshed! new actor is[30:92:2118] Leader for TabletID 72057594037927937 is [30:92:2118] sender: [30:146:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:52:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:57:2057] recipient: [31:52:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:74:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:90:2057] recipient: [31:36:2083] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:93:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:94:2057] recipient: [31:92:2120] Leader for TabletID 72057594037927937 is [31:95:2121] sender: [31:96:2057] recipient: [31:92:2120] !Reboot 72057594037927937 (actor [31:56:2097]) rebooted! !Reboot 72057594037927937 (actor [31:56:2097]) tablet resolver refreshed! new actor is[31:95:2121] Leader for TabletID 72057594037927937 is [31:95:2121] sender: [31:149:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:57:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:74:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:90:2057] recipient: [32:36:2083] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:93:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:94:2057] recipient: [32:92:2120] Leader for TabletID 72057594037927937 is [32:95:2121] sender: [32:96:2057] recipient: [32:92:2120] !Reboot 72057594037927937 (actor [32:56:2097]) rebooted! !Reboot 72057594037927937 (actor [32:56:2097]) tablet resolver refreshed! new actor is[32:95:2121] Leader for TabletID 72057594037927937 is [32:95:2121] sender: [32:149:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:50:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:57:2057] recipient: [33:50:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:74:2057] recipient: [33:14:2061] >> KqpLimits::KqpMkqlMemoryLimitException >> KqpStats::JoinStatsBasicScan [GOOD] >> TKeyValueTest::TestConcatWorksNewApi [GOOD] >> TKeyValueTest::TestConcatToLongKey >> KqpParams::MissingParameter >> KqpExplain::MultiJoinCteLinks [GOOD] >> KqpLimits::QSReplySizeEnsureMemoryLimits+useSink [GOOD] >> KqpLimits::QSReplySizeEnsureMemoryLimits-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::SelectCountAsteriskFromVar [GOOD] Test command err: Trying to start YDB, gRPC: 23589, MsgBus: 5079 2025-03-31T16:20:12.560783Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009245224768407:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:12.560887Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a37/r3tmp/tmpeStuwk/pdisk_1.dat TServer::EnableGrpc on GrpcPort 23589, node 1 2025-03-31T16:20:12.885459Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:20:12.885498Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:20:12.897252Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:20:12.912995Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:12.913021Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:12.913038Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:12.913179Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:20:12.934310Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:12.934420Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:12.936034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5079 TClient is connected to server localhost:5079 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:13.408350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:13.432527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:13.532521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:13.669216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:13.735135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:15.183376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009258109672075:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:15.183496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:15.446768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:15.475683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:15.503830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:15.560896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:15.589689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:15.655408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:15.730207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009258109672594:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:15.730336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:15.730434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009258109672599:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:15.733339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:15.741929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009258109672601:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:20:15.831516Z node 1 :TX_PROXY ERROR: Actor# [1:7488009258109672656:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 17717, MsgBus: 6035 2025-03-31T16:20:17.526462Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488009267357600496:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:17.526543Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a37/r3tmp/tmpwnw7id/pdisk_1.dat 2025-03-31T16:20:17.633872Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17717, node 2 2025-03-31T16:20:17.675448Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:17.675626Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:17.678210Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:17.711266Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:17.711296Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:17.711305Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:17.711423Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6035 TClient is connected to server localhost:6035 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:18.119564Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:18.124428Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:20:18.137044Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:20:18.184608Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:20:18.325512Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:18.396107Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:20.430605Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009280242504171:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:20.430696Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:20.470510Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:20.502123Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:20.532790Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:20.566818Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:20.597402Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:20.631771Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:20.711789Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009280242504689:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:20.711862Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:20.711906Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009280242504694:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:20.715110Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:20.724379Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488009280242504696:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:20:20.805384Z node 2 :TX_PROXY ERROR: Actor# [2:7488009280242504751:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 19597, MsgBus: 9990 2025-03-31T16:20:22.620039Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488009288548513687:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:22.620124Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a37/r3tmp/tmpPmpBeN/pdisk_1.dat 2025-03-31T16:20:22.711349Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19597, node 3 2025-03-31T16:20:22.753607Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:22.753722Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:22.756151Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:22.783255Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:22.783283Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:22.783292Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:22.783442Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9990 TClient is connected to server localhost:9990 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:23.230279Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:23.239188Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:20:23.247613Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:23.322816Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:23.477893Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:23.556736Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:25.692922Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009301433417343:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:25.693021Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:25.744615Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:25.780408Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:25.817726Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:25.846113Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:25.874026Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:25.942619Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:25.986340Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009301433417862:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:25.986466Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:25.986531Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009301433417867:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:25.990194Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:25.999662Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488009301433417869:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:20:26.083920Z node 3 :TX_PROXY ERROR: Actor# [3:7488009305728385218:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpExplain::MergeConnection [GOOD] >> KqpExplain::IdxFullscan >> TKeyValueTest::TestRenameToLongKey [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::JoinStatsBasicScan [GOOD] Test command err: Trying to start YDB, gRPC: 18729, MsgBus: 11297 2025-03-31T16:20:09.399431Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009235218828469:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:09.399523Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a54/r3tmp/tmpSsQlyK/pdisk_1.dat 2025-03-31T16:20:09.676917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:09.677010Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:09.680881Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18729, node 1 2025-03-31T16:20:09.706298Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:20:09.707150Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:20:09.722325Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:20:09.737562Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:09.737589Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:09.737596Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:09.737718Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11297 TClient is connected to server localhost:11297 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:10.173528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:10.192812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:10.309841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:10.447555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:10.524267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:12.217544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009248103732146:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:12.217671Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:12.488953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:12.516169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:12.543730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:12.571911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:12.599956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:12.666745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:12.703703Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009248103732660:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:12.703808Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:12.704048Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009248103732665:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:12.707386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:12.716343Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009248103732667:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:20:12.784150Z node 1 :TX_PROXY ERROR: Actor# [1:7488009248103732721:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:14.399419Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488009235218828469:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:14.399511Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 29691, MsgBus: 17832 2025-03-31T16:20:15.134514Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488009260843734742:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:15.134569Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a54/r3tmp/tmprd3fBi/pdisk_1.dat 2025-03-31T16:20:15.246726Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:20:15.276366Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:15.276449Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:15.278181Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29691, node 2 2025-03-31T16:20:15.312445Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:15.312471Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:15.312490Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:15.312614Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17832 TClient is connected to server localhost:17832 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:15.693287Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:15.700641Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:15.769886Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:15.912088Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:15.973622Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:17.930572Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009269433671092:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:17.930677Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:17.974309Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:18.003001Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:18.033124Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:18.059516Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:18.101048Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:18.166528Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:18.205629Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009273728638904:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:18.205742Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:18.205834Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009273728638909:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:18.209102Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:18.217754Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488009273728638911:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:20:18.311129Z node 2 :TX_PROXY ERROR: Actor# [2:7488009273728638964:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 6064, MsgBus: 32435 2025-03-31T16:20:20.377284Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488009280999530754:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:20.377338Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a54/r3tmp/tmpY9uVZZ/pdisk_1.dat 2025-03-31T16:20:20.478194Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6064, node 3 2025-03-31T16:20:20.511311Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:20.511384Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:20.513799Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:20.549570Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:20.549596Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:20.549607Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:20.549756Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32435 TClient is connected to server localhost:32435 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:20.991883Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:20.999687Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:21.071515Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:21.223068Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:21.298262Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:23.458786Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009293884434416:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:23.459022Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:23.495522Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:23.532163Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:23.560804Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:23.587948Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:23.617577Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:23.684857Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:23.729819Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009293884434932:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:23.729913Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009293884434937:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:23.729915Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:23.734047Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:23.743159Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488009293884434939:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:20:23.821707Z node 3 :TX_PROXY ERROR: Actor# [3:7488009293884434992:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:26.895834Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488009280999530754:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:27.085789Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:20:27.574131Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438025392, txId: 281474976715671] shutting down >> KqpQuery::OlapCreateAsSelect_Complex [GOOD] >> KqpLimits::LargeParametersAndMkqlFailure ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestRenameToLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:78:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:82:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:83:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:83:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvGetStorageChannelStatus ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:82:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:86:2057] recipient: [7:84:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:84:2115] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:141:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:82:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:85:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:84:2115] Leader for TabletID 72057594037927937 is [8:87:2116] sender: [8:88:2057] recipient: [8:84:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:87:2116] Leader for TabletID 72057594037927937 is [8:87:2116] sender: [8:141:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:84:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:88:2057] recipient: [9:86:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:86:2117] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:143:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:52:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:52:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2117] Leader for TabletID 72057594037927937 is [10:89:2118] sender: [10:90:2057] recipient: [10:86:2117] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2118] Leader for TabletID 72057594037927937 is [10:89:2118] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:86:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:89:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:88:2119] Leader for TabletID 72057594037927937 is [11:91:2120] sender: [11:92:2057] recipient: [11:88:2119] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:91:2120] Leader for TabletID 72057594037927937 is [11:91:2120] sender: [11:145:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Re ... is [18:56:2097] sender: [18:95:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:96:2057] recipient: [18:94:2123] Leader for TabletID 72057594037927937 is [18:97:2124] sender: [18:98:2057] recipient: [18:94:2123] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:97:2124] Leader for TabletID 72057594037927937 is [18:97:2124] sender: [18:151:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:51:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:57:2057] recipient: [20:51:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:74:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:50:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:57:2057] recipient: [21:50:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:74:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:76:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:78:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:80:2057] recipient: [21:79:2110] Leader for TabletID 72057594037927937 is [21:81:2111] sender: [21:82:2057] recipient: [21:79:2110] !Reboot 72057594037927937 (actor [21:56:2097]) rebooted! !Reboot 72057594037927937 (actor [21:56:2097]) tablet resolver refreshed! new actor is[21:81:2111] Leader for TabletID 72057594037927937 is [21:81:2111] sender: [21:135:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:57:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:74:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:56:2097]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:76:2057] recipient: [22:36:2083] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:79:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:80:2057] recipient: [22:78:2110] Leader for TabletID 72057594037927937 is [22:81:2111] sender: [22:82:2057] recipient: [22:78:2110] !Reboot 72057594037927937 (actor [22:56:2097]) rebooted! !Reboot 72057594037927937 (actor [22:56:2097]) tablet resolver refreshed! new actor is[22:81:2111] Leader for TabletID 72057594037927937 is [22:81:2111] sender: [22:135:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:51:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:57:2057] recipient: [23:51:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:74:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:77:2057] recipient: [23:36:2083] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:80:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:81:2057] recipient: [23:79:2110] Leader for TabletID 72057594037927937 is [23:82:2111] sender: [23:83:2057] recipient: [23:79:2110] !Reboot 72057594037927937 (actor [23:56:2097]) rebooted! !Reboot 72057594037927937 (actor [23:56:2097]) tablet resolver refreshed! new actor is[23:82:2111] Leader for TabletID 72057594037927937 is [23:82:2111] sender: [23:136:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:57:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:74:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:80:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:83:2057] recipient: [24:82:2113] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:84:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:85:2114] sender: [24:86:2057] recipient: [24:82:2113] !Reboot 72057594037927937 (actor [24:56:2097]) rebooted! !Reboot 72057594037927937 (actor [24:56:2097]) tablet resolver refreshed! new actor is[24:85:2114] Leader for TabletID 72057594037927937 is [24:85:2114] sender: [24:139:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:57:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:74:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:80:2057] recipient: [25:36:2083] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:83:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:84:2057] recipient: [25:82:2113] Leader for TabletID 72057594037927937 is [25:85:2114] sender: [25:86:2057] recipient: [25:82:2113] !Reboot 72057594037927937 (actor [25:56:2097]) rebooted! !Reboot 72057594037927937 (actor [25:56:2097]) tablet resolver refreshed! new actor is[25:85:2114] Leader for TabletID 72057594037927937 is [25:85:2114] sender: [25:139:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:52:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:57:2057] recipient: [26:52:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:74:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:81:2057] recipient: [26:36:2083] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:84:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:85:2057] recipient: [26:83:2113] Leader for TabletID 72057594037927937 is [26:86:2114] sender: [26:87:2057] recipient: [26:83:2113] !Reboot 72057594037927937 (actor [26:56:2097]) rebooted! !Reboot 72057594037927937 (actor [26:56:2097]) tablet resolver refreshed! new actor is[26:86:2114] Leader for TabletID 72057594037927937 is [26:86:2114] sender: [26:140:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:54:2057] recipient: [27:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:54:2057] recipient: [27:50:2095] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:57:2057] recipient: [27:50:2095] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:74:2057] recipient: [27:14:2061] !Reboot 72057594037927937 (actor [27:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:84:2057] recipient: [27:36:2083] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:87:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:88:2057] recipient: [27:86:2116] Leader for TabletID 72057594037927937 is [27:89:2117] sender: [27:90:2057] recipient: [27:86:2116] !Reboot 72057594037927937 (actor [27:56:2097]) rebooted! !Reboot 72057594037927937 (actor [27:56:2097]) tablet resolver refreshed! new actor is[27:89:2117] Leader for TabletID 72057594037927937 is [27:89:2117] sender: [27:143:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:54:2057] recipient: [28:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:54:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:57:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:74:2057] recipient: [28:14:2061] !Reboot 72057594037927937 (actor [28:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:84:2057] recipient: [28:36:2083] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:87:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:88:2057] recipient: [28:86:2116] Leader for TabletID 72057594037927937 is [28:89:2117] sender: [28:90:2057] recipient: [28:86:2116] !Reboot 72057594037927937 (actor [28:56:2097]) rebooted! !Reboot 72057594037927937 (actor [28:56:2097]) tablet resolver refreshed! new actor is[28:89:2117] Leader for TabletID 72057594037927937 is [28:89:2117] sender: [28:143:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:54:2057] recipient: [29:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:54:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:57:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:74:2057] recipient: [29:14:2061] !Reboot 72057594037927937 (actor [29:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:85:2057] recipient: [29:36:2083] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:87:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:89:2057] recipient: [29:88:2116] Leader for TabletID 72057594037927937 is [29:90:2117] sender: [29:91:2057] recipient: [29:88:2116] !Reboot 72057594037927937 (actor [29:56:2097]) rebooted! !Reboot 72057594037927937 (actor [29:56:2097]) tablet resolver refreshed! new actor is[29:90:2117] Leader for TabletID 72057594037927937 is [29:90:2117] sender: [29:144:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:57:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:74:2057] recipient: [30:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::MultiJoinCteLinks [GOOD] Test command err: Trying to start YDB, gRPC: 12552, MsgBus: 18444 2025-03-31T16:20:06.798458Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009221006913517:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:06.798536Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a62/r3tmp/tmp11rIml/pdisk_1.dat 2025-03-31T16:20:07.057854Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12552, node 1 2025-03-31T16:20:07.132704Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:07.132756Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:07.132769Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:07.132933Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:20:07.144209Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:07.144366Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:07.146595Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18444 TClient is connected to server localhost:18444 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:07.593682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:07.621550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:07.727013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:07.862912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:07.914377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:09.491135Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009233891817178:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:09.491258Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:09.754582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:09.779372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:09.808503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:09.839057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:09.864869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:09.893248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:09.974239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009233891817695:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:09.974331Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:09.974366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009233891817700:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:09.977334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:09.986184Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009233891817702:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:20:10.077547Z node 1 :TX_PROXY ERROR: Actor# [1:7488009238186785053:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"TopSort","Limit":"4","TopSortBy":"row.Data"},{"Scan":"Parallel","ReadRange":["Key [150, 266]"],"E-Size":"No estimate","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"No estimate"}],"Node Type":"TopSort-TableRangeScan"}],"Node Type":"Merge","SortColumns":["Data (Asc)"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"4"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data","Key","Text"],"scan_by":["Key [150, 266]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","ReadRange":["Key [150, 266]"],"E-Size":"No estimate","Name":"TableRangeScan","Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"TopSort","Limit":"4","TopSortBy":"row.Data"}],"Node Type":"TopSort"}],"Operators":[{"Name":"Limit","Limit":"4"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 10468, MsgBus: 13868 2025-03-31T16:20:11.799782Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488009242978891825:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:11.799882Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a62/r3tmp/tmp8M5y02/pdisk_1.dat 2025-03-31T16:20:11.901917Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10468, node 2 2025-03-31T16:20:11.940763Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:11.940858Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:11.943053Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:11.962885Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:11.962909Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:11.962922Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:11.963031Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13868 TClient is connected to server localhost:13868 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 ... :7762515]; 2025-03-31T16:20:22.015115Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 30388, MsgBus: 22074 2025-03-31T16:20:23.162848Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488009292427271363:2139];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:23.168765Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a62/r3tmp/tmpRRtb92/pdisk_1.dat 2025-03-31T16:20:23.275618Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:20:23.292206Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:23.292279Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:23.293541Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30388, node 4 2025-03-31T16:20:23.360800Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:23.360826Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:23.360836Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:23.360974Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22074 TClient is connected to server localhost:22074 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:23.880485Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:23.897055Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:23.973045Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:24.163294Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:24.270066Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:26.485981Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488009305312174956:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:26.486074Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:26.528901Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:26.559545Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:26.589089Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:26.618765Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:26.650070Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:26.682852Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:26.723268Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488009305312175466:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:26.723348Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:26.723432Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488009305312175471:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:26.727201Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:26.737528Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488009305312175473:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:20:26.830174Z node 4 :TX_PROXY ERROR: Actor# [4:7488009305312175527:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:28.161074Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7488009292427271363:2139];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:28.161152Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":12,"Plans":[{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":9,"Plans":[{"E-Size":"No estimate","PlanNodeId":8,"LookupKeyColumns":["Key"],"Node Type":"TableLookup","Path":"\/Root\/EightShard","Columns":["Data","Key","Text"],"E-Rows":"No estimate","Table":"EightShard","Plans":[{"PlanNodeId":7,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Iterator":"PartitionByKey","Name":"Iterator"},{"Inputs":[],"Name":"PartitionByKey","Input":"precompute_0_0"}],"Node Type":"ConstantExpr-Aggregate","CTE Name":"precompute_0_0"}],"PlanNodeType":"Connection","E-Cost":"No estimate"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"1001"},{"Inputs":[{"InternalOperatorId":3},{"InternalOperatorId":2}],"E-Rows":"No estimate","Condition":"es.Key = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"},{"Inputs":[],"ToFlow":"precompute_0_0","Name":"ToFlow"},{"Inputs":[{"ExternalPlanNodeId":8}],"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Limit-InnerJoin (MapJoin)-ConstantExpr-Filter","CTE Name":"precompute_0_0"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":10}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":5,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/KeyValue","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key","Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"Collect"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"lookup_by":["Key"],"columns":["Data","Key","Text"],"type":"Lookup"}]},{"name":"\/Root\/KeyValue","reads":[{"columns":["Key","Value"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":7,"Operators":[{"E-Rows":"No estimate","Columns":["Data","Key","Text"],"E-Size":"No estimate","E-Cost":"No estimate","Name":"TableLookup","Table":"EightShard","LookupKeyColumns":["Key"]}],"Node Type":"TableLookup","PlanNodeType":"Connection"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"},{"PlanNodeId":13,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/KeyValue","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key","Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Condition":"es.Key = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"InnerJoin (MapJoin)"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} >> KqpQuery::CurrentUtcTimestamp [GOOD] >> KqpQuery::CreateAsSelect_BadCases >> Cdc::InitialScanEnqueuesZeroRecords [GOOD] >> Cdc::InitialScanRacyProgressAndDrop >> KqpQuery::QueryCacheTtl >> KqpStats::JoinStatsBasicYql-StreamLookupJoin [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::OlapCreateAsSelect_Complex [GOOD] Test command err: Trying to start YDB, gRPC: 16680, MsgBus: 18375 2025-03-31T16:20:13.025536Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009248836431348:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:13.025631Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a2f/r3tmp/tmpPiPGmr/pdisk_1.dat 2025-03-31T16:20:13.299816Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16680, node 1 2025-03-31T16:20:13.358602Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:13.358630Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:13.358636Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:13.358735Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:20:13.377930Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:13.378071Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:13.379819Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18375 TClient is connected to server localhost:18375 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:13.855798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:13.879121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:13.992997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:14.127149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:14.201063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:15.964466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009257426367721:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:15.964590Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:16.244992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:16.275394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:16.302630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:16.329076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:16.355463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:16.384557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:16.425584Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009261721335527:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:16.425654Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009261721335532:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:16.425692Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:16.429560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:16.441412Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009261721335534:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:20:16.504100Z node 1 :TX_PROXY ERROR: Actor# [1:7488009261721335588:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 23736, MsgBus: 20288 2025-03-31T16:20:18.445769Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488009272223347150:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:18.445889Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a2f/r3tmp/tmprUw0JF/pdisk_1.dat 2025-03-31T16:20:18.545951Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23736, node 2 2025-03-31T16:20:18.577528Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:18.577618Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:18.579386Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:18.593640Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:18.593665Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:18.593672Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:18.593776Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20288 TClient is connected to server localhost:20288 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:19.005139Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:19.017214Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:19.087234Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:19.226195Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:19.294905Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:21.257432Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009285108250805:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:21.257518Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: { ... apshotFromChunks;id=13; 2025-03-31T16:20:27.586872Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037911;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:20:27.587017Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:20:27.587073Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:20:27.587107Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037911;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:20:27.587139Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037911;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:20:27.587182Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:20:27.587216Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:20:27.587282Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037911;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:20:27.587320Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037911;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:20:27.587383Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:20:27.587417Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:20:27.587532Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:20:27.587571Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:20:27.587629Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:20:27.587659Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:20:27.587698Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:20:27.587730Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:20:27.588030Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:20:27.588076Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:20:27.588185Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:20:27.588219Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:20:27.588241Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:20:27.588266Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:20:27.588410Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:20:27.588445Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:20:27.588446Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:20:27.588494Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:20:27.588554Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:20:27.588609Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:20:27.588648Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:20:27.588679Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:20:27.588685Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:20:27.588709Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:20:27.588757Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:20:27.588802Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:20:27.588904Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:20:27.588939Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:20:27.589047Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:20:27.589072Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:20:27.589292Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:20:27.589343Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:20:27.589542Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:20:27.589580Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:20:27.589733Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:20:27.589781Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:20:27.589964Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:20:27.589999Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:20:27.590128Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:20:27.590165Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:20:27.607554Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715666;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715666; 2025-03-31T16:20:27.607602Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715666;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715666; 2025-03-31T16:20:27.613438Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715666;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715666; 2025-03-31T16:20:27.613925Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715666;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715666; 2025-03-31T16:20:28.122470Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488009293586681922:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:28.122550Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpParams::CheckCacheByAst [GOOD] >> KqpParams::CheckQueryCacheForExecuteAndPreparedQueries [GOOD] >> KqpParams::CheckCacheWithRecompilationQuery >> TKeyValueTest::TestGetStatusWorks [GOOD] >> KqpParams::Decimal+QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::JoinStatsBasicYql-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 27539, MsgBus: 20370 2025-03-31T16:20:14.125082Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009256787757793:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:14.125229Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a0a/r3tmp/tmpjUUFhg/pdisk_1.dat 2025-03-31T16:20:14.442777Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:20:14.490384Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:14.490506Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 27539, node 1 2025-03-31T16:20:14.492350Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:14.520137Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:14.520163Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:14.520171Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:14.520307Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20370 TClient is connected to server localhost:20370 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:14.966512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:14.990275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:15.151533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:15.298046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:15.373065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:16.945194Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009265377694188:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:16.945315Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:17.270797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:17.298137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:17.321431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:17.344646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:17.371117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:17.438925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:17.476861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009269672661999:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:17.476915Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:17.476972Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009269672662004:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:17.480205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:17.489446Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009269672662006:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:20:17.576057Z node 1 :TX_PROXY ERROR: Actor# [1:7488009269672662060:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:18.764099Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438018756, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 12573, MsgBus: 3602 2025-03-31T16:20:19.487733Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488009276183076929:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:19.487832Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a0a/r3tmp/tmpDk8Unl/pdisk_1.dat 2025-03-31T16:20:19.604085Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12573, node 2 2025-03-31T16:20:19.635464Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:19.635588Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:19.637789Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:19.658266Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:19.658286Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:19.658293Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:19.658398Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3602 TClient is connected to server localhost:3602 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:20.033569Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:20.049050Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:20.106312Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:20.246803Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:20.306704Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:22.677779Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009289067980593:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:22.677867Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:22.721654Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:22.750672Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:22.780037Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:22.809319Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:22.848452Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:22.879370Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:22.916286Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009289067981101:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:22.916378Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009289067981106:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:22.916390Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:22.919249Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:22.927899Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488009289067981108:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:20:23.007834Z node 2 :TX_PROXY ERROR: Actor# [2:7488009293362948458:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:23.899106Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438023915, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 19005, MsgBus: 17717 2025-03-31T16:20:24.671801Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488009297842111907:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:24.671908Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a0a/r3tmp/tmpZlbrt8/pdisk_1.dat 2025-03-31T16:20:24.782263Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19005, node 3 2025-03-31T16:20:24.818390Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:24.818483Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:24.820284Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:24.842899Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:24.842924Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:24.842932Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:24.843064Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17717 TClient is connected to server localhost:17717 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:25.282966Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:25.295587Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:25.372558Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:25.522539Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:25.582915Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:27.931532Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009310727015570:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:27.931622Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:27.976539Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:28.005394Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:28.048133Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:28.075668Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:28.106642Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:28.137714Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:28.217291Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009315021983380:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:28.217369Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:28.217409Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009315021983385:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:28.220645Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:28.229130Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488009315021983387:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:20:28.322796Z node 3 :TX_PROXY ERROR: Actor# [3:7488009315021983442:3452] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:29.671876Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488009297842111907:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:29.671936Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpStats::RequestUnitForExecute [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestGetStatusWorks [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:78:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:82:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:83:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:83:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:52:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:52:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:86:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:89:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:88:2117] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:92:2057] recipient: [11:88:2117] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:91:2118] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:145:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... or TabletID 72057594037927937 is [29:56:2097] sender: [29:89:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:90:2057] recipient: [29:88:2117] Leader for TabletID 72057594037927937 is [29:91:2118] sender: [29:92:2057] recipient: [29:88:2117] !Reboot 72057594037927937 (actor [29:56:2097]) rebooted! !Reboot 72057594037927937 (actor [29:56:2097]) tablet resolver refreshed! new actor is[29:91:2118] Leader for TabletID 72057594037927937 is [29:91:2118] sender: [29:145:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:57:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:74:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:87:2057] recipient: [30:36:2083] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:90:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:91:2057] recipient: [30:89:2117] Leader for TabletID 72057594037927937 is [30:92:2118] sender: [30:93:2057] recipient: [30:89:2117] !Reboot 72057594037927937 (actor [30:56:2097]) rebooted! !Reboot 72057594037927937 (actor [30:56:2097]) tablet resolver refreshed! new actor is[30:92:2118] Leader for TabletID 72057594037927937 is [30:92:2118] sender: [30:146:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:52:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:57:2057] recipient: [31:52:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:74:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:90:2057] recipient: [31:36:2083] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:93:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:94:2057] recipient: [31:92:2120] Leader for TabletID 72057594037927937 is [31:95:2121] sender: [31:96:2057] recipient: [31:92:2120] !Reboot 72057594037927937 (actor [31:56:2097]) rebooted! !Reboot 72057594037927937 (actor [31:56:2097]) tablet resolver refreshed! new actor is[31:95:2121] Leader for TabletID 72057594037927937 is [31:95:2121] sender: [31:149:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:57:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:74:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:90:2057] recipient: [32:36:2083] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:93:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:94:2057] recipient: [32:92:2120] Leader for TabletID 72057594037927937 is [32:95:2121] sender: [32:96:2057] recipient: [32:92:2120] !Reboot 72057594037927937 (actor [32:56:2097]) rebooted! !Reboot 72057594037927937 (actor [32:56:2097]) tablet resolver refreshed! new actor is[32:95:2121] Leader for TabletID 72057594037927937 is [32:95:2121] sender: [32:149:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:50:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:57:2057] recipient: [33:50:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:74:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:57:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:74:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:54:2057] recipient: [35:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:54:2057] recipient: [35:51:2095] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:57:2057] recipient: [35:51:2095] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:74:2057] recipient: [35:14:2061] !Reboot 72057594037927937 (actor [35:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:76:2057] recipient: [35:36:2083] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:79:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:80:2057] recipient: [35:78:2110] Leader for TabletID 72057594037927937 is [35:81:2111] sender: [35:82:2057] recipient: [35:78:2110] !Reboot 72057594037927937 (actor [35:56:2097]) rebooted! !Reboot 72057594037927937 (actor [35:56:2097]) tablet resolver refreshed! new actor is[35:81:2111] Leader for TabletID 72057594037927937 is [35:81:2111] sender: [35:135:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:54:2057] recipient: [36:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:54:2057] recipient: [36:51:2095] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:57:2057] recipient: [36:51:2095] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:74:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:76:2057] recipient: [36:36:2083] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:79:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:80:2057] recipient: [36:78:2110] Leader for TabletID 72057594037927937 is [36:81:2111] sender: [36:82:2057] recipient: [36:78:2110] !Reboot 72057594037927937 (actor [36:56:2097]) rebooted! !Reboot 72057594037927937 (actor [36:56:2097]) tablet resolver refreshed! new actor is[36:81:2111] Leader for TabletID 72057594037927937 is [36:81:2111] sender: [36:135:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:54:2057] recipient: [37:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:54:2057] recipient: [37:50:2095] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:57:2057] recipient: [37:50:2095] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:74:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:77:2057] recipient: [37:36:2083] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:80:2057] recipient: [37:79:2110] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:81:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [37:82:2111] sender: [37:83:2057] recipient: [37:79:2110] !Reboot 72057594037927937 (actor [37:56:2097]) rebooted! !Reboot 72057594037927937 (actor [37:56:2097]) tablet resolver refreshed! new actor is[37:82:2111] Leader for TabletID 72057594037927937 is [37:82:2111] sender: [37:136:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:54:2057] recipient: [38:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:54:2057] recipient: [38:51:2095] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:57:2057] recipient: [38:51:2095] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:74:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:79:2057] recipient: [38:36:2083] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:82:2057] recipient: [38:81:2112] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:83:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [38:84:2113] sender: [38:85:2057] recipient: [38:81:2112] !Reboot 72057594037927937 (actor [38:56:2097]) rebooted! !Reboot 72057594037927937 (actor [38:56:2097]) tablet resolver refreshed! new actor is[38:84:2113] Leader for TabletID 72057594037927937 is [38:84:2113] sender: [38:138:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:54:2057] recipient: [39:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:54:2057] recipient: [39:50:2095] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:57:2057] recipient: [39:50:2095] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:74:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:79:2057] recipient: [39:36:2083] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:82:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:83:2057] recipient: [39:81:2112] Leader for TabletID 72057594037927937 is [39:84:2113] sender: [39:85:2057] recipient: [39:81:2112] !Reboot 72057594037927937 (actor [39:56:2097]) rebooted! !Reboot 72057594037927937 (actor [39:56:2097]) tablet resolver refreshed! new actor is[39:84:2113] Leader for TabletID 72057594037927937 is [39:84:2113] sender: [39:138:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:54:2057] recipient: [40:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:54:2057] recipient: [40:51:2095] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:57:2057] recipient: [40:51:2095] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:74:2057] recipient: [40:14:2061] !Reboot 72057594037927937 (actor [40:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:80:2057] recipient: [40:36:2083] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:83:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:84:2057] recipient: [40:82:2112] Leader for TabletID 72057594037927937 is [40:85:2113] sender: [40:86:2057] recipient: [40:82:2112] !Reboot 72057594037927937 (actor [40:56:2097]) rebooted! !Reboot 72057594037927937 (actor [40:56:2097]) tablet resolver refreshed! new actor is[40:85:2113] Leader for TabletID 72057594037927937 is [40:85:2113] sender: [40:139:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:54:2057] recipient: [41:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:54:2057] recipient: [41:51:2095] Leader for TabletID 72057594037927937 is [41:56:2097] sender: [41:57:2057] recipient: [41:51:2095] Leader for TabletID 72057594037927937 is [41:56:2097] sender: [41:74:2057] recipient: [41:14:2061] >> KqpStats::StatsProfile [GOOD] >> KqpStats::SelfJoin >> KqpQuery::QueryStats-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::CheckCacheByAst [GOOD] Test command err: Trying to start YDB, gRPC: 26608, MsgBus: 31729 2025-03-31T16:19:54.656506Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:19:54.656856Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:19:54.657036Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000aa7/r3tmp/tmpU2PgmF/pdisk_1.dat TServer::EnableGrpc on GrpcPort 26608, node 1 2025-03-31T16:19:55.083014Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:55.089994Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:19:55.090060Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:19:55.090096Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:19:55.090366Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:19:55.137157Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:55.137334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:31729 2025-03-31T16:19:55.149010Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31729 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:19:55.363561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:55.472257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:55.812647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:56.156494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:56.420939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:57.072409Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1809:3405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:57.072556Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:57.093103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:19:57.307789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:19:57.533117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:19:57.782704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:19:58.020057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:19:58.333500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:19:58.595259Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2395:3856], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:58.595372Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:58.595706Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2400:3861], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:58.599826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:19:58.783931Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2402:3863], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:19:58.829678Z node 1 :TX_PROXY ERROR: Actor# [1:2467:3909] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:19:59.907003Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:2738:4088] TxId: 281474976715671. Ctx: { TraceId: 01jqpg1brqc0qz8zjy7wbqp436, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjlkMGYyOWItZWJlOGVkZTQtYmEwYmI5NmQtNWM0OWY3MWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. STATUS_CODE_UNSPECIFIED: 2025-03-31T16:19:59.908112Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:2746:4129], TxId: 281474976715671, task: 3. Ctx: { SessionId : ydb://session/3?node_id=1&id=NjlkMGYyOWItZWJlOGVkZTQtYmEwYmI5NmQtNWM0OWY3MWY=. CustomerSuppliedId : . TraceId : 01jqpg1brqc0qz8zjy7wbqp436. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:2738:4088], status: UNSPECIFIED, reason: {
: Error: Terminate execution } 2025-03-31T16:19:59.909102Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:2744:4127], TxId: 281474976715671, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jqpg1brqc0qz8zjy7wbqp436. SessionId : ydb://session/3?node_id=1&id=NjlkMGYyOWItZWJlOGVkZTQtYmEwYmI5NmQtNWM0OWY3MWY=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:2738:4088], status: UNSPECIFIED, reason: {
: Error: Terminate execution } 2025-03-31T16:19:59.909322Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:2745:4128], TxId: 281474976715671, task: 2. Ctx: { TraceId : 01jqpg1brqc0qz8zjy7wbqp436. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NjlkMGYyOWItZWJlOGVkZTQtYmEwYmI5NmQtNWM0OWY3MWY=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:2738:4088], status: UNSPECIFIED, reason: {
: Error: Terminate execution } 2025-03-31T16:19:59.910008Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjlkMGYyOWItZWJlOGVkZTQtYmEwYmI5NmQtNWM0OWY3MWY=, ActorId: [1:2702:4088], ActorState: ExecuteState, TraceId: 01jqpg1brqc0qz8zjy7wbqp436, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 31705, MsgBus: 3115 2025-03-31T16:20:02.684398Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:101:2147], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:20:02.684704Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:20:02.684787Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000aa7/r3tmp/tmpnYBUFQ/pdisk_1.dat TServer::EnableGrpc on GrpcPort 31705, node 2 2025-03-31T16:20:03.027565Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:20:03.028672Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:03.028731Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:03.028773Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:03.029162Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3115 2025-03-31T16:20:03.119976Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:03.120119Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:3115 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-03-31T16:20:03.275422Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 ... WorkloadService] [TPoolFetcherActor] ActorId: [5:7488009287392237694:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:22.843318Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:22.882199Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:22.913407Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:22.943622Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:22.974293Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:23.003664Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:23.035046Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:23.077336Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488009291687205496:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:23.077445Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:23.077492Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488009291687205501:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:23.080612Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:23.089345Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7488009291687205503:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:20:23.150356Z node 5 :TX_PROXY ERROR: Actor# [5:7488009291687205556:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:24.352477Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7488009274507334032:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:24.352553Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 19231, MsgBus: 10089 2025-03-31T16:20:25.143566Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488009303474876605:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:25.143701Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000aa7/r3tmp/tmpnF2ERD/pdisk_1.dat 2025-03-31T16:20:25.267296Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:20:25.300046Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:25.300148Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:25.301704Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19231, node 6 2025-03-31T16:20:25.354115Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:25.354142Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:25.354151Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:25.354320Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10089 TClient is connected to server localhost:10089 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:26.008618Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:26.023460Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:26.099775Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:26.276311Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:26.345130Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:29.173967Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488009320654747571:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:29.174063Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:29.214196Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:29.245664Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:29.277407Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:29.309945Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:29.342494Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:29.376162Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:29.446835Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488009320654748085:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:29.446933Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:29.446980Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488009320654748090:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:29.450346Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:29.459660Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488009320654748092:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:20:29.531611Z node 6 :TX_PROXY ERROR: Actor# [6:7488009320654748145:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:30.143720Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488009303474876605:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:30.143802Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::Decimal+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 17237, MsgBus: 22437 2025-03-31T16:20:05.434428Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009214644239459:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:05.434516Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a7e/r3tmp/tmpLmkyhD/pdisk_1.dat 2025-03-31T16:20:05.700485Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17237, node 1 2025-03-31T16:20:05.769692Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:05.769728Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:05.769738Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:05.769895Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:20:05.793377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:05.793516Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:05.794983Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22437 TClient is connected to server localhost:22437 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:06.112732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:06.130367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:06.245306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:06.385235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:06.454898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:08.004589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009227529143129:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:08.004677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:08.234184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:08.261143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:08.285866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:08.313877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:08.339981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:08.371560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:08.403425Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009227529143638:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:08.403481Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:08.403552Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009227529143643:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:08.406674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:08.414459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009227529143645:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:20:08.507228Z node 1 :TX_PROXY ERROR: Actor# [1:7488009227529143702:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:09.324144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:20:10.254442Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488009236119078826:2530], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:17: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At function: SqlProjectItem
:3:25: Error: At function: Parameter, At function: DataType
:3:25: Error: Invalid decimal precision: 99 2025-03-31T16:20:10.255307Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmZhYzQwMTktM2YwOTUxZTQtY2E5ZmJhZGUtYWQ5NGNhOTI=, ActorId: [1:7488009231824111251:2487], ActorState: ExecuteState, TraceId: 01jqpg1nvsebjsksrjs6576jwq, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-31T16:20:10.360620Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmZhYzQwMTktM2YwOTUxZTQtY2E5ZmJhZGUtYWQ5NGNhOTI=, ActorId: [1:7488009231824111251:2487], ActorState: ExecuteState, TraceId: 01jqpg1nwrap0vycajr6zs4qav, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1294: ydb/core/kqp/query_data/kqp_query_data.cpp:271: Parameter $value22 type mismatch, expected: { Kind: Data Data { Scheme: 4865 DecimalParams { Precision: 22 Scale: 9 } } }, actual: Type (Data), schemeType: Decimal(35,10), schemeTypeId: 4865 2025-03-31T16:20:10.380264Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488009236119078854:2536], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:7:29: Error: At function: KiWriteTable!
:7:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:4:25: Error: Implicit decimal cast would lose precision
:7:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:7:50: Error: Failed to convert input columns types to scheme types, code: 2031 2025-03-31T16:20:10.380532Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmZhYzQwMTktM2YwOTUxZTQtY2E5ZmJhZGUtYWQ5NGNhOTI=, ActorId: [1:7488009231824111251:2487], ActorState: ExecuteState, TraceId: 01jqpg1p005etjx9a568xr6z51, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-31T16:20:10.397167Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488009236119078863:2540], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:29: Error: At function: KiWriteTable!
:3:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:0:14: Error: Implicit decimal cast would lose precision
:3:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:3:50: Error: Failed to convert input columns types to scheme types, code: 2031 2025-03-31T16:20:10.397463Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmZhYzQwMTktM2YwOTUxZTQtY2E5ZmJhZGUtYWQ5NGNhOTI=, ActorId: [1:7488009231824111251:2487], ActorState: ExecuteState, TraceId: 01jqpg1p0ja5nkp8s1yxtgy78s, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-31T16:20:10.434502Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488009214644239459:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:10.434562Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 64282, MsgBus: 29143 2025-03-31T16:20:11.532775Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488009241862574714:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:11.532905Z node 2 :METADATA_PROVIDER ERROR: fline=access ... , code: 2031 2025-03-31T16:20:23.853081Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZDE1NzhhMTAtOTE2ZGI0ZjYtMWYxNTJmMTgtNTk0NzdjZmI=, ActorId: [3:7488009289742153716:2496], ActorState: ExecuteState, TraceId: 01jqpg234y8dn7r4aajr9h8hzn, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 15174, MsgBus: 32274 2025-03-31T16:20:25.252894Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488009303064346412:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:25.252948Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a7e/r3tmp/tmpFJdML4/pdisk_1.dat 2025-03-31T16:20:25.363175Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:20:25.392514Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:25.392617Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:25.394597Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15174, node 4 2025-03-31T16:20:25.433700Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:25.433731Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:25.433739Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:25.433880Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32274 TClient is connected to server localhost:32274 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:25.973371Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:25.992335Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:26.061620Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:26.202181Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:26.271480Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:28.762133Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488009315949250075:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:28.762237Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:28.810550Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:28.844469Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:28.878337Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:28.913259Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:28.946480Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:28.981278Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:29.025733Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488009320244217879:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:29.025840Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:29.025897Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488009320244217884:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:29.029533Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:29.039492Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488009320244217886:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:20:29.093142Z node 4 :TX_PROXY ERROR: Actor# [4:7488009320244217939:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:29.972760Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:20:30.252975Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7488009303064346412:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:30.253057Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:20:30.843855Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7488009324539185817:2546], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:17: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At function: SqlProjectItem
:3:25: Error: At function: Parameter, At function: DataType
:3:25: Error: Invalid decimal precision: 99 2025-03-31T16:20:30.846088Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZWJjMGEwNmMtNGE0YzM0M2MtZDQ4OWE4NWMtMjk4YWI4YjA=, ActorId: [4:7488009324539185815:2545], ActorState: ExecuteState, TraceId: 01jqpg29zcfg8cb4frkk1f72h3, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-31T16:20:30.949146Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=M2JiZjM5N2EtNzM4NTFmNi04NjAxNGY1MS03NGUxZmNmNw==, ActorId: [4:7488009324539185827:2548], ActorState: ExecuteState, TraceId: 01jqpg2a06abh8w86hkzsg0wbt, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1294: ydb/core/kqp/query_data/kqp_query_data.cpp:271: Parameter $value22 type mismatch, expected: { Kind: Data Data { Scheme: 4865 DecimalParams { Precision: 22 Scale: 9 } } }, actual: Type (Data), schemeType: Decimal(35,10), schemeTypeId: 4865 2025-03-31T16:20:30.975937Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7488009324539185854:2554], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:7:29: Error: At function: KiWriteTable!
:7:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:4:25: Error: Implicit decimal cast would lose precision
:7:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:7:50: Error: Failed to convert input columns types to scheme types, code: 2031 2025-03-31T16:20:30.976207Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YjhkNjFiYjAtYTZkMWVlNjEtYzQ0NzQ2M2ItM2QzMjQ0ODM=, ActorId: [4:7488009324539185852:2553], ActorState: ExecuteState, TraceId: 01jqpg2a3a88gpgmtr42vmmdc7, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-31T16:20:31.000016Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7488009324539185865:2559], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:29: Error: At function: KiWriteTable!
:3:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:0:14: Error: Implicit decimal cast would lose precision
:3:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:3:50: Error: Failed to convert input columns types to scheme types, code: 2031 2025-03-31T16:20:31.000323Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NDBmYWIzZGMtODgyYWM1ZDUtYWQzYTQ1MzctODJlNzQ1NjU=, ActorId: [4:7488009324539185863:2558], ActorState: ExecuteState, TraceId: 01jqpg2a45aectm0s0nbxr3geb, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> KqpParams::MissingParameter [GOOD] >> KqpParams::ImplicitSameParameterTypesQueryCacheCheck ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::RequestUnitForExecute [GOOD] Test command err: Trying to start YDB, gRPC: 9539, MsgBus: 18531 2025-03-31T16:20:17.381628Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009266763582892:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:17.381792Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0009f4/r3tmp/tmp6gYTnB/pdisk_1.dat 2025-03-31T16:20:17.703583Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9539, node 1 2025-03-31T16:20:17.744868Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:17.744900Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:17.744932Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:17.745076Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:20:17.751931Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:17.752032Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:17.753824Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18531 TClient is connected to server localhost:18531 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:18.206311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:18.231861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:18.355238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:18.504401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:18.582960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:20.188770Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009279648486571:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:20.188876Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:20.507885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:20.540431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:20.606110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:20.647188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:20.682132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:20.715878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:20.754662Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009279648487086:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:20.754727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:20.754794Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009279648487091:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:20.757783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:20.766714Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009279648487093:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:20:20.823279Z node 1 :TX_PROXY ERROR: Actor# [1:7488009279648487146:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:21.750820Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488009283943454746:2501], status: GENERIC_ERROR, issues:
:2:12: Error: mismatched input 'INCORRECT_STMT' expecting {';', '(', '$', ALTER, ANALYZE, BACKUP, BATCH, COMMIT, CREATE, DECLARE, DEFINE, DELETE, DISCARD, DO, DROP, EVALUATE, EXPLAIN, EXPORT, FOR, FROM, GRANT, IF, IMPORT, INSERT, PARALLEL, PRAGMA, PROCESS, REDUCE, REPLACE, RESTORE, REVOKE, ROLLBACK, SELECT, SHOW, UPDATE, UPSERT, USE, VALUES} 2025-03-31T16:20:21.751112Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzliMTIxYzYtNTY3MDg1YWMtYjFiMmQ5OTUtNjU0YmRkOTI=, ActorId: [1:7488009283943454736:2496], ActorState: ExecuteState, TraceId: 01jqpg213gb9k59v8ymmsvta7x, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
:2:12: Error: mismatched input 'INCORRECT_STMT' expecting {';', '(', '$', ALTER, ANALYZE, BACKUP, BATCH, COMMIT, CREATE, DECLARE, DEFINE, DELETE, DISCARD, DO, DROP, EVALUATE, EXPLAIN, EXPORT, FOR, FROM, GRANT, IF, IMPORT, INSERT, PARALLEL, PRAGMA, PROCESS, REDUCE, REPLACE, RESTORE, REVOKE, ROLLBACK, SELECT, SHOW, UPDATE, UPSERT, USE, VALUES} Trying to start YDB, gRPC: 17586, MsgBus: 9419 2025-03-31T16:20:22.282971Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488009288349199480:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:22.283026Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0009f4/r3tmp/tmpbJuf3L/pdisk_1.dat 2025-03-31T16:20:22.385084Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17586, node 2 2025-03-31T16:20:22.416709Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:22.416817Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:22.418856Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:22.442773Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:22.442800Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:22.442808Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:22.442954Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9419 TClient is connected to server localhost:9419 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:20:22.799120Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:20:22.816044Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:22.891036Z node ... UND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:25.311643Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:25.343821Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:25.369616Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:25.395992Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:25.423650Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:25.455116Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:25.530544Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009301234103658:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:25.530627Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:25.530634Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009301234103663:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:25.534221Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:25.543610Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488009301234103665:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:20:25.640367Z node 2 :TX_PROXY ERROR: Actor# [2:7488009301234103721:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:26.474782Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488009305529071318:2501], status: GENERIC_ERROR, issues:
:2:8: Error: mismatched input 'INCORRECT_STMT' expecting {';', '(', '$', ALTER, ANALYZE, BACKUP, BATCH, COMMIT, CREATE, DECLARE, DEFINE, DELETE, DISCARD, DO, DROP, EVALUATE, EXPLAIN, EXPORT, FOR, FROM, GRANT, IF, IMPORT, INSERT, PARALLEL, PRAGMA, PROCESS, REDUCE, REPLACE, RESTORE, REVOKE, ROLLBACK, SELECT, SHOW, UPDATE, UPSERT, USE, VALUES} 2025-03-31T16:20:26.474984Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OThmMmRlMmUtYzI0ZGYyMTktMzYxNDU2NWEtY2E0ZDdjZDk=, ActorId: [2:7488009305529071310:2496], ActorState: ExecuteState, TraceId: 01jqpg25q50ab3vf6r4hq08kps, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
:2:8: Error: mismatched input 'INCORRECT_STMT' expecting {';', '(', '$', ALTER, ANALYZE, BACKUP, BATCH, COMMIT, CREATE, DECLARE, DEFINE, DELETE, DISCARD, DO, DROP, EVALUATE, EXPLAIN, EXPORT, FOR, FROM, GRANT, IF, IMPORT, INSERT, PARALLEL, PRAGMA, PROCESS, REDUCE, REPLACE, RESTORE, REVOKE, ROLLBACK, SELECT, SHOW, UPDATE, UPSERT, USE, VALUES} Trying to start YDB, gRPC: 31331, MsgBus: 19004 2025-03-31T16:20:27.295904Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488009311823927490:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:27.295983Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0009f4/r3tmp/tmpHPsAhi/pdisk_1.dat 2025-03-31T16:20:27.410596Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31331, node 3 2025-03-31T16:20:27.432376Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:27.432472Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:27.434203Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:27.468221Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:27.468250Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:27.468286Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:27.468433Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19004 TClient is connected to server localhost:19004 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:27.917452Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:27.934125Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:28.008614Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:28.159245Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:28.222747Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:30.282238Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009324708831141:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:30.282327Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:30.349088Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:30.382023Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:30.415725Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:30.447157Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:30.477315Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:30.512384Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:30.557479Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009324708831653:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:30.557557Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009324708831658:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:30.557578Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:30.561519Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:30.571826Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488009324708831660:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:20:30.669264Z node 3 :TX_PROXY ERROR: Actor# [3:7488009324708831714:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Consumed units: 295 Consumed units: 6 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryStats-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 3554, MsgBus: 16354 2025-03-31T16:20:16.495825Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009262407367605:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:16.495881Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0009f6/r3tmp/tmpKR62dR/pdisk_1.dat 2025-03-31T16:20:16.836673Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3554, node 1 2025-03-31T16:20:16.896992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:16.897288Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:16.899891Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:16.917577Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:16.917599Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:16.917603Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:16.917718Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16354 TClient is connected to server localhost:16354 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:17.332818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:17.352279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:17.497632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:17.642090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:17.698666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:19.417185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009275292271277:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:19.417323Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:19.687714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:19.713922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:19.742212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:19.774736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:19.802124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:19.830140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:19.869746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009275292271789:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:19.869858Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:19.869924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009275292271794:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:19.873922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:19.883327Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009275292271796:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:20:19.980878Z node 1 :TX_PROXY ERROR: Actor# [1:7488009275292271851:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:20.974647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:20:21.496135Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488009262407367605:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:21.496176Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 3947, MsgBus: 7816 2025-03-31T16:20:22.269718Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488009287146722122:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:22.269767Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0009f6/r3tmp/tmpmS5uka/pdisk_1.dat 2025-03-31T16:20:22.383047Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:20:22.408125Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:22.408212Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:22.409506Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3947, node 2 2025-03-31T16:20:22.444487Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:22.444509Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:22.444521Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:22.444628Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7816 TClient is connected to server localhost:7816 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:22.807925Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:22.821400Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:22.890973Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:23.057152Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:23.132078Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but pro ... ot found or you don't have access permissions } 2025-03-31T16:20:25.230965Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:25.274195Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:25.302126Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:25.328474Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:25.352216Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:25.374228Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:25.399601Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:25.433987Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009300031626301:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:25.434072Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009300031626306:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:25.434085Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:25.437509Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:25.446244Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488009300031626308:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:20:25.531240Z node 2 :TX_PROXY ERROR: Actor# [2:7488009300031626362:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } query_phases { duration_us: 7444 table_access { name: "/Root/EightShard" updates { rows: 3 bytes: 47 } partitions_count: 1 } table_access { name: "/Root/TwoShard" reads { rows: 3 bytes: 35 } partitions_count: 1 } cpu_time_us: 3503 affected_shards: 2 } compilation { duration_us: 202018 cpu_time_us: 198807 } process_cpu_time_us: 444 total_duration_us: 211539 total_cpu_time_us: 202754 Trying to start YDB, gRPC: 20570, MsgBus: 28330 2025-03-31T16:20:27.530649Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488009310873551644:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:27.530712Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0009f6/r3tmp/tmp8q8tPM/pdisk_1.dat 2025-03-31T16:20:27.655606Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:20:27.682745Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:27.682834Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:27.684310Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20570, node 3 2025-03-31T16:20:27.725957Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:27.725985Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:27.725993Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:27.726116Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28330 TClient is connected to server localhost:28330 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:20:28.145022Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:20:28.162836Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:28.235179Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:28.383511Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:28.457770Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:30.811459Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009323758455302:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:30.811576Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:30.856396Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:30.890415Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:30.918159Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:30.946673Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:30.977456Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:31.044946Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:31.135587Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009328053423117:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:31.135665Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:31.135722Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009328053423122:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:31.138598Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:31.146739Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488009328053423124:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:20:31.233490Z node 3 :TX_PROXY ERROR: Actor# [3:7488009328053423179:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } query_phases { duration_us: 5224 table_access { name: "/Root/TwoShard" reads { rows: 3 bytes: 35 } partitions_count: 1 } cpu_time_us: 3847 affected_shards: 1 } query_phases { duration_us: 6476 table_access { name: "/Root/EightShard" updates { rows: 3 bytes: 47 } partitions_count: 1 } cpu_time_us: 2603 affected_shards: 2 } compilation { duration_us: 168903 cpu_time_us: 166006 } process_cpu_time_us: 754 total_duration_us: 183978 total_cpu_time_us: 173210 >> KqpPg::CreateTableSerialColumns+useSink >> KqpPg::CreateTableBulkUpsertAndRead >> KqpPg::EmptyQuery+useSink >> KqpPg::NoTableQuery+useSink >> KqpPg::TypeCoercionInsert-useSink >> KqpPg::JoinWithQueryService+StreamLookup >> Cdc::MustNotLoseSchemaSnapshot [GOOD] >> Cdc::MustNotLoseSchemaSnapshotWithVolatileTx >> KqpPg::InsertNoTargetColumns_Simple+useSink >> KqpLimits::LargeParametersAndMkqlFailure [GOOD] >> KqpLimits::ManyPartitions >> KqpLimits::QSReplySizeEnsureMemoryLimits-useSink [GOOD] >> KqpLimits::QueryExecTimeout >> KqpExplain::ExplainDataQuery [GOOD] >> KqpExplain::ExplainDataQueryWithParams >> KqpQuery::ReadOverloaded+StreamLookup [GOOD] >> KqpQuery::ReadOverloaded-StreamLookup >> KqpPg::ReadPgArray >> KqpLimits::KqpMkqlMemoryLimitException [GOOD] >> KqpLimits::DatashardProgramSize+useSink >> KqpExplain::IdxFullscan [GOOD] >> KqpPg::ReadPgArray [GOOD] >> KqpPg::TableArrayInsert+useSink >> Cdc::ResolvedTimestampsContinueAfterMerge [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::IdxFullscan [GOOD] Test command err: Trying to start YDB, gRPC: 31623, MsgBus: 32701 2025-03-31T16:20:13.256084Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009251507601444:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:13.256226Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a1d/r3tmp/tmpPiRiVZ/pdisk_1.dat 2025-03-31T16:20:13.545551Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31623, node 1 2025-03-31T16:20:13.610986Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:13.611139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:13.612940Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:13.616705Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:13.616727Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:13.616732Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:13.616840Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32701 TClient is connected to server localhost:32701 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:14.038900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:14.062121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:14.184836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:14.337356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:14.409552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:16.012486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009264392505118:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:16.012601Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:16.307370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:16.336278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:16.364576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:16.392915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:16.420396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:16.450844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:16.488616Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009264392505627:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:16.488690Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:16.488722Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009264392505632:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:16.492117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:16.501422Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009264392505634:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:20:16.575139Z node 1 :TX_PROXY ERROR: Actor# [1:7488009264392505688:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"TopSort","Limit":"SUM(10,15)","TopSortBy":"row.Text"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"No estimate"}],"Node Type":"TopSort-TableFullScan"}],"Node Type":"Merge","SortColumns":["Text (Asc)"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"Min(If,SUM(10,15))"}],"Node Type":"Limit"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"10"},{"Inputs":[{"ExternalPlanNodeId":4}],"Offset":"15","Name":"Offset"}],"Node Type":"Limit-Offset"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data","Key","Text"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":8,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"Name":"TopSort","Limit":"SUM(10,15)","TopSortBy":"row.Text"}],"Node Type":"TopSort"}],"Operators":[{"Name":"Limit","Limit":"Min(If,SUM(10,15))"}],"Node Type":"Limit"}],"Operators":[{"Offset":"15","Name":"Offset"}],"Node Type":"Offset"}],"Operators":[{"Name":"Limit","Limit":"10"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 61690, MsgBus: 8538 2025-03-31T16:20:18.231237Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488009272205687185:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:18.231354Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a1d/r3tmp/tmpimuKWl/pdisk_1.dat 2025-03-31T16:20:18.334933Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:20:18.369758Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:18.369850Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 61690, node 2 2025-03-31T16:20:18.373026Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:18.415691Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:18.415712Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:18.415722Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:18.415865Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8538 TClient is connected to server localhost:8538 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 ... RN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3251, node 4 2025-03-31T16:20:29.525285Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:29.525410Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:29.527068Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:29.550344Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:29.550373Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:29.550383Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:29.550515Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32106 TClient is connected to server localhost:32106 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:30.027270Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:30.043212Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:30.100554Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:30.279337Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:30.371575Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:32.610496Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488009333305489992:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:32.610581Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:32.660032Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:32.692800Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:32.721022Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:32.757280Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:32.784092Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:32.816610Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:32.893481Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488009333305490503:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:32.893586Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:32.893676Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488009333305490508:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:32.896949Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:32.906417Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488009333305490510:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:20:32.982696Z node 4 :TX_PROXY ERROR: Actor# [4:7488009333305490565:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:33.991422Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:34.287704Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:20:34.321752Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-03-31T16:20:34.376233Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7488009320420586345:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:34.376289Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":12,"Plans":[{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":9,"Plans":[{"E-Size":"No estimate","PlanNodeId":8,"LookupKeyColumns":["id"],"Node Type":"TableLookup","Path":"\/Root\/test_table_idx","Columns":["Value","complex_field","id","str_field"],"E-Rows":"No estimate","Table":"test_table_idx","Plans":[{"PlanNodeId":7,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Iterator":"PartitionByKey","Name":"Iterator"},{"Inputs":[],"Name":"PartitionByKey","Input":"precompute_0_0"}],"Node Type":"ConstantExpr-Aggregate","CTE Name":"precompute_0_0"}],"PlanNodeType":"Connection","E-Cost":"No estimate"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"1001"},{"Inputs":[{"InternalOperatorId":3},{"InternalOperatorId":2}],"E-Rows":"No estimate","Condition":"t.id = idx.id","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"},{"Inputs":[],"ToFlow":"precompute_0_0","Name":"ToFlow"},{"Inputs":[{"ExternalPlanNodeId":8}],"E-Rows":"No estimate","Predicate":"Exist(item.id)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Limit-InnerJoin (MapJoin)-ConstantExpr-Filter","CTE Name":"precompute_0_0"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":10}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":5,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["test_table_idx_idx"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","ReadRange":["str_field (null)","complex_field (-∞, +∞)"],"E-Size":"No estimate","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/test_table_idx_idx","E-Rows":"No estimate","Table":"test_table_idx_idx","ReadColumns":["id"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Node Type":"Collect"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/test_table_idx","reads":[{"lookup_by":["id"],"columns":["Value","complex_field","id","str_field"],"type":"Lookup"}]},{"name":"\/Root\/test_table_idx_idx","reads":[{"lookup_by":["str_field (null)"],"columns":["id"],"scan_by":["complex_field (-∞, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":7,"Operators":[{"E-Rows":"No estimate","Columns":["Value","complex_field","id","str_field"],"E-Size":"No estimate","E-Cost":"No estimate","Name":"TableLookup","Table":"test_table_idx","LookupKeyColumns":["id"]}],"Node Type":"TableLookup","PlanNodeType":"Connection"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.id)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"},{"PlanNodeId":13,"Operators":[{"Scan":"Parallel","ReadRange":["str_field (null)","complex_field (-∞, +∞)"],"E-Size":"No estimate","Name":"TableRangeScan","Path":"\/Root\/test_table_idx_idx","E-Rows":"No estimate","Table":"test_table_idx_idx","ReadColumns":["id"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"E-Rows":"No estimate","Condition":"t.id = idx.id","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"InnerJoin (MapJoin)"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":2},"PlanNodeType":"Query"}} >> KqpParams::CheckCacheWithRecompilationQuery [GOOD] >> TKeyValueTest::TestCopyRangeToLongKey [GOOD] >> KqpPg::EmptyQuery+useSink [GOOD] >> KqpPg::EmptyQuery-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::ResolvedTimestampsContinueAfterMerge [GOOD] Test command err: 2025-03-31T16:18:02.192396Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008687169756930:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:02.192481Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00191c/r3tmp/tmpWDgH0Z/pdisk_1.dat 2025-03-31T16:18:02.656572Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:02.663760Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:02.663861Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 7401, node 1 2025-03-31T16:18:02.667953Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:18:02.822000Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:02.822042Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:02.822052Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:02.822254Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:18:03.096688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:18:03.152030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:18:03.201530Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7488008691464724849:2309] 2025-03-31T16:18:03.201875Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:18:03.231019Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:18:03.231121Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:18:03.233262Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:18:03.233318Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:18:03.233363Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:18:03.233789Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:18:03.233841Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:18:03.233880Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:7488008691464724866:2309] in generation 1 2025-03-31T16:18:03.235793Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:18:03.264169Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:18:03.265214Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:18:03.265318Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:7488008691464724869:2310] 2025-03-31T16:18:03.265334Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:18:03.265344Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:18:03.265380Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:18:03.266731Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:18:03.266808Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:18:03.266829Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:18:03.266863Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:18:03.266894Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:18:03.266910Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:18:03.271336Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7488008691464724842:2301], serverId# [1:7488008691464724856:2305], sessionId# [0:0:0] 2025-03-31T16:18:03.273193Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:18:03.273601Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:18:03.273708Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-03-31T16:18:03.274934Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:18:03.275847Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:18:03.275962Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:18:03.277892Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7488008691464724883:2320], serverId# [1:7488008691464724884:2321], sessionId# [0:0:0] 2025-03-31T16:18:03.282757Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710657 at step 1743437883327 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743437883327 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:18:03.282790Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:18:03.282944Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:18:03.283005Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:18:03.283025Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:18:03.283050Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1743437883327:281474976710657] in PlanQueue unit at 72075186224037888 2025-03-31T16:18:03.283395Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1743437883327:281474976710657 keys extracted: 0 2025-03-31T16:18:03.283527Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:18:03.283631Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:18:03.283663Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:18:03.286133Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:18:03.288567Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:18:03.290023Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1743437883326 2025-03-31T16:18:03.290040Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:18:03.290107Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1743437883334 2025-03-31T16:18:03.290155Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1743437883327} 2025-03-31T16:18:03.290194Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:18:03.290250Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:18:03.290332Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:18:03.290398Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:18:03.290464Z node 1 :TX_DATASHARD DEBUG: Complete [1743437883327 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7488008687169757365:2199], exec latency: 4 ms, propose latency: 6 ms 2025-03-31T16:18:03.290493Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-03-31T16:18:03.290553Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:18:03.301132Z node 1 :CHANGE_EXCHANGE DEBUG: [ChangeSender][72075186224037888:1][1:7488008691464724869:2310][Inactive] Handle NKikimrChangeExchange.TEvActivateSender 2025-03-31T16:18:03.302833Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2025-03-31T16:18:03.302879Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:18:03.319738Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:18:03.319882Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:18:03.319962Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710658 ssId 72057594046644480 seqNo 2:2 2025-03-31T16:18:03.319981Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976710658 2025-03-31T16:18:03.319988Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710658 at tablet 72075186224037888 2025-03-31T16:18:03.321380Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:18:03.356298Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Registered with mediator time cast 2025-03-31T16:18:03.371318Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:18:03.400214Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:18:03.400548Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] doesn't have tx info 2025-03-31T16:18:03.400579Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:18:03.400687Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] no config, start with empty partitions and default config 2025-03-31T16:18:03.400719Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:18:03.400746Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037889] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:18:03.400793Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037889] doesn't have tx writes info 2025-03-31T16:18:03.401489Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] server connected, pipe [1:7488008691464724951:2371], now have 1 active actors on pipe 2025-03-31T16:18:03.401509Z node 1 :PERSQUEUE DEBUG: [PQ ... ASHARD INFO: OnTabletDead: 72075186224037892 2025-03-31T16:20:35.177551Z node 24 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 24, TabletId: 72075186224037891 not found 2025-03-31T16:20:35.177857Z node 24 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 24, TabletId: 72075186224037892 not found 2025-03-31T16:20:35.201444Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889] server connected, pipe [24:1268:2645], now have 1 active actors on pipe ... release register requests ... wait for merge tx notification 2025-03-31T16:20:35.223144Z node 24 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-03-31T16:20:35.223303Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-03-31T16:20:35.223867Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:20:35.223992Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:20:35.224110Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:20:35.224226Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] m0000000000p72075186224037893 2025-03-31T16:20:35.224302Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:20:35.224382Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:20:35.224513Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] =========================== 2025-03-31T16:20:35.224744Z node 24 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-31T16:20:35.226338Z node 24 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 ... wait for final heartbeat >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2025-03-31T16:20:35.230215Z node 24 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-03-31T16:20:35.230356Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-03-31T16:20:35.231522Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 0 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 2 max time lag 0ms effective offset 0 2025-03-31T16:20:35.231707Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 2 2025-03-31T16:20:35.231897Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-03-31T16:20:35.232001Z node 24 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-31T16:20:35.232857Z node 24 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-31T16:20:35.244307Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:20:35.244613Z node 24 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-31T16:20:35.244958Z node 24 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-03-31T16:20:35.245099Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-03-31T16:20:35.245366Z node 24 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-31T16:20:35.245796Z node 24 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037893:1][0][72075186224037889][24:1319:3043] Handle NKikimr::NPQ::TEvPartitionWriter::TEvInitResult { SessionId: TxId: Success { OwnerCookie: 72075186224037893|7ad083b8-8b4c7cb0-bff0ad54-eed6fdd0_0 SourceIdInfo: SourceId: "\00072075186224037893" SeqNo: 0 Offset: 2 WriteTimestampMS: 0 Explicit: true State: STATE_REGISTERED } } 2025-03-31T16:20:35.246093Z node 24 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037893:1][24:1316:3043] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-03-31T16:20:35.246392Z node 24 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037893:1][0][72075186224037889][24:1319:3043] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 6000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-03-31T16:20:35.246858Z node 24 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-03-31T16:20:35.246917Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-03-31T16:20:35.247074Z node 24 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 1 2025-03-31T16:20:35.247266Z node 24 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-03-31T16:20:35.247316Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-03-31T16:20:35.247474Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message topic: Table/Stream/streamImpl partition: 0 SourceId: '\00072075186224037893' SeqNo: 1 partNo : 0 messageNo: 1 size 26 offset: -1 2025-03-31T16:20:35.247806Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 process heartbeat sourceId '\00072075186224037893' version v6000/0 2025-03-31T16:20:35.248001Z node 24 :PERSQUEUE INFO: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 emit heartbeat v6000/0 2025-03-31T16:20:35.248291Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob processing sourceId '\00072075186224037889' seqNo 0 partNo 0 2025-03-31T16:20:35.284639Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob complete sourceId '\00072075186224037889' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 2 PartNo 0 PackedSize 107 count 1 nextOffset 3 batches 1 2025-03-31T16:20:35.286355Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Add new write blob: topic 'Table/Stream/streamImpl' partition 0 compactOffset 2,1 HeadOffset 0 endOffset 2 curOffset 3 d0000000000_00000000000000000002_00000_0000000001_00000| size 93 WTime 6505 2025-03-31T16:20:35.286789Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:20:35.286924Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:20:35.287039Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-03-31T16:20:35.287168Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:20:35.287286Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] m0000000000p72075186224037889 2025-03-31T16:20:35.287369Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] d0000000000_00000000000000000002_00000_0000000001_00000| 2025-03-31T16:20:35.287403Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:20:35.287478Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:20:35.287607Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] =========================== 2025-03-31T16:20:35.287862Z node 24 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-31T16:20:35.288107Z node 24 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 2 partNo 0 count 1 size 93 2025-03-31T16:20:35.289960Z node 24 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 2 count 1 size 93 actorID [24:1288:3023] 2025-03-31T16:20:35.290319Z node 24 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037889' partition 0 offset 2 partno 0 count 1 parts 0 size 93 2025-03-31T16:20:35.300860Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 44 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:20:35.301079Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:20:35.301329Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037893', Topic: 'Table/Stream/streamImpl', Partition: 0, SeqNo: 1, partNo: 0, Offset: 2 is stored on disk 2025-03-31T16:20:35.301908Z node 24 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 1 requestId: cookie: 1 2025-03-31T16:20:35.302349Z node 24 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037893:1][0][72075186224037889][24:1319:3043] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037893" SeqNo: 1 Offset: 2 WriteTimestampMS: 6505 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 1 } } } 2025-03-31T16:20:35.302540Z node 24 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037893:1][24:1316:3043] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-03-31T16:20:35.302791Z node 24 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 1, at tablet# 72075186224037893 2025-03-31T16:20:35.302890Z node 24 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 1, at tablet: 72075186224037893 2025-03-31T16:20:35.314327Z node 24 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 1, left# 0, at tablet# 72075186224037893 >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2025-03-31T16:20:35.722362Z node 24 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-03-31T16:20:35.722433Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-03-31T16:20:35.722565Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 1 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 3 max time lag 0ms effective offset 0 2025-03-31T16:20:35.722603Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 1 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 3 2025-03-31T16:20:35.722672Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 1. All data is from uncompacted head. 2025-03-31T16:20:35.722702Z node 24 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-31T16:20:35.722842Z node 24 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 >> YdbSdkSessionsPool::WaitQueue10 >> YdbSdkSessionsPool::StressTestSync10 >> YdbSdkSessionsPool::CustomPlan >> KqpPg::NoTableQuery+useSink [GOOD] >> KqpPg::NoTableQuery-useSink >> KqpStats::SelfJoin [GOOD] >> KqpPg::InsertNoTargetColumns_Simple+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Simple-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestCopyRangeToLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:78:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:79:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:80:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:80:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:84:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:88:2057] recipient: [8:86:2116] Leader for TabletID 72057594037927937 is [8:89:2117] sender: [8:90:2057] recipient: [8:86:2116] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:89:2117] Leader for TabletID 72057594037927937 is [8:89:2117] sender: [8:143:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:84:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:88:2057] recipient: [9:86:2116] Leader for TabletID 72057594037927937 is [9:89:2117] sender: [9:90:2057] recipient: [9:86:2116] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:89:2117] Leader for TabletID 72057594037927937 is [9:89:2117] sender: [9:143:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:52:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:52:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:85:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:89:2057] recipient: [10:87:2116] Leader for TabletID 72057594037927937 is [10:90:2117] sender: [10:91:2057] recipient: [10:87:2116] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:90:2117] Leader for TabletID 72057594037927937 is [10:90:2117] sender: [10:144:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:88:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:91:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:92:2057] recipient: [11:90:2119] Leader for TabletID 72057594037927937 is [11:93:2120] sender: [11:94:2057] recipient: [11:90:2119] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:93:2120] Leader for TabletID 72057594037927937 is [11:93:2120] sender: [11:147:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... 7 is [27:56:2097] sender: [27:89:2057] recipient: [27:36:2083] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:92:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:93:2057] recipient: [27:91:2119] Leader for TabletID 72057594037927937 is [27:94:2120] sender: [27:95:2057] recipient: [27:91:2119] !Reboot 72057594037927937 (actor [27:56:2097]) rebooted! !Reboot 72057594037927937 (actor [27:56:2097]) tablet resolver refreshed! new actor is[27:94:2120] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:54:2057] recipient: [28:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:54:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:57:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:74:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:54:2057] recipient: [29:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:54:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:57:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:74:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:57:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:74:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:76:2057] recipient: [30:36:2083] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:79:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:80:2057] recipient: [30:78:2110] Leader for TabletID 72057594037927937 is [30:81:2111] sender: [30:82:2057] recipient: [30:78:2110] !Reboot 72057594037927937 (actor [30:56:2097]) rebooted! !Reboot 72057594037927937 (actor [30:56:2097]) tablet resolver refreshed! new actor is[30:81:2111] Leader for TabletID 72057594037927937 is [30:81:2111] sender: [30:135:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:52:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:57:2057] recipient: [31:52:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:74:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:56:2097]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:76:2057] recipient: [31:36:2083] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:79:2057] recipient: [31:78:2110] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:80:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:81:2111] sender: [31:82:2057] recipient: [31:78:2110] !Reboot 72057594037927937 (actor [31:56:2097]) rebooted! !Reboot 72057594037927937 (actor [31:56:2097]) tablet resolver refreshed! new actor is[31:81:2111] Leader for TabletID 72057594037927937 is [31:81:2111] sender: [31:135:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:57:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:74:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:77:2057] recipient: [32:36:2083] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:80:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:81:2057] recipient: [32:79:2110] Leader for TabletID 72057594037927937 is [32:82:2111] sender: [32:83:2057] recipient: [32:79:2110] !Reboot 72057594037927937 (actor [32:56:2097]) rebooted! !Reboot 72057594037927937 (actor [32:56:2097]) tablet resolver refreshed! new actor is[32:82:2111] Leader for TabletID 72057594037927937 is [32:82:2111] sender: [32:136:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:50:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:57:2057] recipient: [33:50:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:74:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:80:2057] recipient: [33:36:2083] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:83:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:84:2057] recipient: [33:82:2113] Leader for TabletID 72057594037927937 is [33:85:2114] sender: [33:86:2057] recipient: [33:82:2113] !Reboot 72057594037927937 (actor [33:56:2097]) rebooted! !Reboot 72057594037927937 (actor [33:56:2097]) tablet resolver refreshed! new actor is[33:85:2114] Leader for TabletID 72057594037927937 is [33:85:2114] sender: [33:139:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:57:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:74:2057] recipient: [34:14:2061] !Reboot 72057594037927937 (actor [34:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:80:2057] recipient: [34:36:2083] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:83:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:84:2057] recipient: [34:82:2113] Leader for TabletID 72057594037927937 is [34:85:2114] sender: [34:86:2057] recipient: [34:82:2113] !Reboot 72057594037927937 (actor [34:56:2097]) rebooted! !Reboot 72057594037927937 (actor [34:56:2097]) tablet resolver refreshed! new actor is[34:85:2114] Leader for TabletID 72057594037927937 is [34:85:2114] sender: [34:139:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:54:2057] recipient: [35:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:54:2057] recipient: [35:51:2095] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:57:2057] recipient: [35:51:2095] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:74:2057] recipient: [35:14:2061] !Reboot 72057594037927937 (actor [35:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:81:2057] recipient: [35:36:2083] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:83:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:85:2057] recipient: [35:84:2113] Leader for TabletID 72057594037927937 is [35:86:2114] sender: [35:87:2057] recipient: [35:84:2113] !Reboot 72057594037927937 (actor [35:56:2097]) rebooted! !Reboot 72057594037927937 (actor [35:56:2097]) tablet resolver refreshed! new actor is[35:86:2114] Leader for TabletID 72057594037927937 is [35:86:2114] sender: [35:140:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:54:2057] recipient: [36:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:54:2057] recipient: [36:51:2095] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:57:2057] recipient: [36:51:2095] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:74:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:84:2057] recipient: [36:36:2083] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:87:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:88:2057] recipient: [36:86:2116] Leader for TabletID 72057594037927937 is [36:89:2117] sender: [36:90:2057] recipient: [36:86:2116] !Reboot 72057594037927937 (actor [36:56:2097]) rebooted! !Reboot 72057594037927937 (actor [36:56:2097]) tablet resolver refreshed! new actor is[36:89:2117] Leader for TabletID 72057594037927937 is [36:89:2117] sender: [36:143:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:54:2057] recipient: [37:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:54:2057] recipient: [37:50:2095] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:57:2057] recipient: [37:50:2095] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:74:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:84:2057] recipient: [37:36:2083] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:87:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:88:2057] recipient: [37:86:2116] Leader for TabletID 72057594037927937 is [37:89:2117] sender: [37:90:2057] recipient: [37:86:2116] !Reboot 72057594037927937 (actor [37:56:2097]) rebooted! !Reboot 72057594037927937 (actor [37:56:2097]) tablet resolver refreshed! new actor is[37:89:2117] Leader for TabletID 72057594037927937 is [37:89:2117] sender: [37:143:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:54:2057] recipient: [38:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:54:2057] recipient: [38:51:2095] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:57:2057] recipient: [38:51:2095] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:74:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:85:2057] recipient: [38:36:2083] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:88:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:89:2057] recipient: [38:87:2116] Leader for TabletID 72057594037927937 is [38:90:2117] sender: [38:91:2057] recipient: [38:87:2116] !Reboot 72057594037927937 (actor [38:56:2097]) rebooted! !Reboot 72057594037927937 (actor [38:56:2097]) tablet resolver refreshed! new actor is[38:90:2117] Leader for TabletID 72057594037927937 is [38:90:2117] sender: [38:144:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:54:2057] recipient: [39:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:54:2057] recipient: [39:50:2095] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:57:2057] recipient: [39:50:2095] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:74:2057] recipient: [39:14:2061] >> KqpPg::JoinWithQueryService+StreamLookup [GOOD] >> KqpPg::Insert_Serial+useSink >> KqpParams::ImplicitSameParameterTypesQueryCacheCheck [GOOD] >> KqpParams::ParameterTypes >> YdbSdkSessionsPool::PeriodicTask10 >> Cdc::InitialScanRacyProgressAndDrop [GOOD] >> Cdc::EnqueueRequestProcessSend >> KqpPg::CreateTableSerialColumns+useSink [GOOD] >> KqpPg::CreateTableSerialColumns-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::CheckCacheWithRecompilationQuery [GOOD] Test command err: Trying to start YDB, gRPC: 12034, MsgBus: 10281 2025-03-31T16:20:15.880351Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009257972648779:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:15.880543Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0009fd/r3tmp/tmpmDygoS/pdisk_1.dat 2025-03-31T16:20:16.202418Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12034, node 1 2025-03-31T16:20:16.278809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:16.278956Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:16.280668Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:16.287658Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:16.287715Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:16.287724Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:16.287833Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10281 TClient is connected to server localhost:10281 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:16.789459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:16.814692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:16.965786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:17.108828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:17.183480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:18.842751Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009270857552447:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:18.842891Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:19.149386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:19.181622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:19.210535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:19.275622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:19.304943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:19.370356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:19.408703Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009275152520263:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:19.408770Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:19.408844Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009275152520268:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:19.411914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:19.419550Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009275152520270:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:20:19.491738Z node 1 :TX_PROXY ERROR: Actor# [1:7488009275152520323:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 11402, MsgBus: 11063 2025-03-31T16:20:21.271588Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488009284333854204:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:21.271662Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0009fd/r3tmp/tmp0O1Fhq/pdisk_1.dat 2025-03-31T16:20:21.364017Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11402, node 2 2025-03-31T16:20:21.409165Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:21.409246Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:21.410376Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:21.418909Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:21.418925Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:21.418930Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:21.419006Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11063 TClient is connected to server localhost:11063 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:21.771789Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:21.789540Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:21.858740Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:21.991568Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:22.059141Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:24.212343Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009297218757862:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:24.212430Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: { ... ] received request Name# Coordination/DropNode ok# false data# peer# 2025-03-31T16:20:36.941226Z node 4 :GRPC_SERVER DEBUG: [0x51b0003b2580] received request Name# Coordination/DescribeNode ok# false data# peer# 2025-03-31T16:20:36.941343Z node 4 :GRPC_SERVER DEBUG: [0x51b000285c80] received request Name# CreateDatabase ok# false data# peer# 2025-03-31T16:20:36.941411Z node 4 :GRPC_SERVER DEBUG: [0x51b000284e80] received request Name# GetDatabaseStatus ok# false data# peer# 2025-03-31T16:20:36.941519Z node 4 :GRPC_SERVER DEBUG: [0x51b0003f9a80] received request Name# AlterDatabase ok# false data# peer# 2025-03-31T16:20:36.941584Z node 4 :GRPC_SERVER DEBUG: [0x51b000277c80] received request Name# ListDatabases ok# false data# peer# 2025-03-31T16:20:36.941687Z node 4 :GRPC_SERVER DEBUG: [0x51b00047c780] received request Name# RemoveDatabase ok# false data# peer# 2025-03-31T16:20:36.941722Z node 4 :GRPC_SERVER DEBUG: [0x51b000274480] received request Name# DescribeDatabaseOptions ok# false data# peer# 2025-03-31T16:20:36.941902Z node 4 :GRPC_SERVER DEBUG: [0x51b000250d80] received request Name# GetScaleRecommendation ok# false data# peer# 2025-03-31T16:20:36.941910Z node 4 :GRPC_SERVER DEBUG: [0x51b000419280] received request Name# ListEndpoints ok# false data# peer# 2025-03-31T16:20:36.941996Z node 4 :GRPC_SERVER DEBUG: [0x51b0003f9380] received request Name# WhoAmI ok# false data# peer# 2025-03-31T16:20:36.942036Z node 4 :GRPC_SERVER DEBUG: [0x51b00047b980] received request Name# NodeRegistration ok# false data# peer# 2025-03-31T16:20:36.942171Z node 4 :GRPC_SERVER DEBUG: [0x51b00047ab80] received request Name# Scan ok# false data# peer# 2025-03-31T16:20:36.942172Z node 4 :GRPC_SERVER DEBUG: [0x51b000275980] received request Name# GetShardLocations ok# false data# peer# 2025-03-31T16:20:36.942319Z node 4 :GRPC_SERVER DEBUG: [0x51b00024ff80] received request Name# DescribeTable ok# false data# peer# 2025-03-31T16:20:36.942369Z node 4 :GRPC_SERVER DEBUG: [0x51b0003f8580] received request Name# CreateSnapshot ok# false data# peer# 2025-03-31T16:20:36.942481Z node 4 :GRPC_SERVER DEBUG: [0x51b00047e380] received request Name# RefreshSnapshot ok# false data# peer# 2025-03-31T16:20:36.942542Z node 4 :GRPC_SERVER DEBUG: [0x51b000276080] received request Name# DiscardSnapshot ok# false data# peer# 2025-03-31T16:20:36.942645Z node 4 :GRPC_SERVER DEBUG: [0x51b000480680] received request Name# List ok# false data# peer# 2025-03-31T16:20:36.942750Z node 4 :GRPC_SERVER DEBUG: [0x51b0003f7e80] received request Name# RateLimiter/CreateResource ok# false data# peer# 2025-03-31T16:20:36.942826Z node 4 :GRPC_SERVER DEBUG: [0x51b000481480] received request Name# RateLimiter/AlterResource ok# false data# peer# 2025-03-31T16:20:36.942937Z node 4 :GRPC_SERVER DEBUG: [0x51b0003f7780] received request Name# RateLimiter/DropResource ok# false data# peer# 2025-03-31T16:20:36.943008Z node 4 :GRPC_SERVER DEBUG: [0x51b00024f180] received request Name# RateLimiter/ListResources ok# false data# peer# 2025-03-31T16:20:36.943211Z node 4 :GRPC_SERVER DEBUG: [0x51b000273d80] received request Name# RateLimiter/DescribeResource ok# false data# peer# 2025-03-31T16:20:36.943405Z node 4 :GRPC_SERVER DEBUG: [0x51b00047d580] received request Name# RateLimiter/AcquireResource ok# false data# peer# 2025-03-31T16:20:36.943620Z node 4 :GRPC_SERVER DEBUG: [0x51b00005da80] received request Name# CreateStream ok# false data# peer# 2025-03-31T16:20:36.943778Z node 4 :GRPC_SERVER DEBUG: [0x51b00027fa80] received request Name# ListStreams ok# false data# peer# 2025-03-31T16:20:36.943951Z node 4 :GRPC_SERVER DEBUG: [0x51b000043680] received request Name# DeleteStream ok# false data# peer# 2025-03-31T16:20:36.944105Z node 4 :GRPC_SERVER DEBUG: [0x51b0003f7080] received request Name# DescribeStream ok# false data# peer# 2025-03-31T16:20:36.944275Z node 4 :GRPC_SERVER DEBUG: [0x51b0003f0080] received request Name# ListShards ok# false data# peer# 2025-03-31T16:20:36.944323Z node 4 :GRPC_SERVER DEBUG: [0x51b00016b280] received request Name# SetWriteQuota ok# false data# peer# 2025-03-31T16:20:36.944462Z node 4 :GRPC_SERVER DEBUG: [0x51b000169680] received request Name# UpdateStream ok# false data# peer# 2025-03-31T16:20:36.944541Z node 4 :GRPC_SERVER DEBUG: [0x51b000042180] received request Name# PutRecord ok# false data# peer# 2025-03-31T16:20:36.944617Z node 4 :GRPC_SERVER DEBUG: [0x51b0003ef980] received request Name# PutRecords ok# false data# peer# 2025-03-31T16:20:36.944732Z node 4 :GRPC_SERVER DEBUG: [0x51b00011f780] received request Name# GetRecords ok# false data# peer# 2025-03-31T16:20:36.944742Z node 4 :GRPC_SERVER DEBUG: [0x51b000445580] received request Name# GetShardIterator ok# false data# peer# 2025-03-31T16:20:36.944898Z node 4 :GRPC_SERVER DEBUG: [0x51b000139b80] received request Name# SubscribeToShard ok# false data# peer# 2025-03-31T16:20:36.944911Z node 4 :GRPC_SERVER DEBUG: [0x51b0002cdf80] received request Name# DescribeLimits ok# false data# peer# 2025-03-31T16:20:36.945061Z node 4 :GRPC_SERVER DEBUG: [0x51b000083b80] received request Name# DescribeStreamSummary ok# false data# peer# 2025-03-31T16:20:36.945093Z node 4 :GRPC_SERVER DEBUG: [0x51b000136a80] received request Name# DecreaseStreamRetentionPeriod ok# false data# peer# 2025-03-31T16:20:36.945207Z node 4 :GRPC_SERVER DEBUG: [0x51b000169d80] received request Name# IncreaseStreamRetentionPeriod ok# false data# peer# 2025-03-31T16:20:36.945271Z node 4 :GRPC_SERVER DEBUG: [0x51b000168880] received request Name# UpdateShardCount ok# false data# peer# 2025-03-31T16:20:36.945376Z node 4 :GRPC_SERVER DEBUG: [0x51b000167a80] received request Name# UpdateStreamMode ok# false data# peer# 2025-03-31T16:20:36.945454Z node 4 :GRPC_SERVER DEBUG: [0x51b000168180] received request Name# RegisterStreamConsumer ok# false data# peer# 2025-03-31T16:20:36.945526Z node 4 :GRPC_SERVER DEBUG: [0x51b000166c80] received request Name# DeregisterStreamConsumer ok# false data# peer# 2025-03-31T16:20:36.945619Z node 4 :GRPC_SERVER DEBUG: [0x51b000165e80] received request Name# DescribeStreamConsumer ok# false data# peer# 2025-03-31T16:20:36.945663Z node 4 :GRPC_SERVER DEBUG: [0x51b000165780] received request Name# ListStreamConsumers ok# false data# peer# 2025-03-31T16:20:36.945806Z node 4 :GRPC_SERVER DEBUG: [0x51b000165080] received request Name# AddTagsToStream ok# false data# peer# 2025-03-31T16:20:36.945808Z node 4 :GRPC_SERVER DEBUG: [0x51b000164280] received request Name# DisableEnhancedMonitoring ok# false data# peer# 2025-03-31T16:20:36.945951Z node 4 :GRPC_SERVER DEBUG: [0x51b000164980] received request Name# EnableEnhancedMonitoring ok# false data# peer# 2025-03-31T16:20:36.945997Z node 4 :GRPC_SERVER DEBUG: [0x51b000163480] received request Name# ListTagsForStream ok# false data# peer# 2025-03-31T16:20:36.946101Z node 4 :GRPC_SERVER DEBUG: [0x51b000162d80] received request Name# MergeShards ok# false data# peer# 2025-03-31T16:20:36.946185Z node 4 :GRPC_SERVER DEBUG: [0x51b000163b80] received request Name# RemoveTagsFromStream ok# false data# peer# 2025-03-31T16:20:36.946245Z node 4 :GRPC_SERVER DEBUG: [0x51b000147b80] received request Name# SplitShard ok# false data# peer# 2025-03-31T16:20:36.946361Z node 4 :GRPC_SERVER DEBUG: [0x51b000166580] received request Name# StartStreamEncryption ok# false data# peer# 2025-03-31T16:20:36.946421Z node 4 :GRPC_SERVER DEBUG: [0x51b000167380] received request Name# StopStreamEncryption ok# false data# peer# 2025-03-31T16:20:36.946530Z node 4 :GRPC_SERVER DEBUG: [0x51b00016ce80] received request Name# SelfCheck ok# false data# peer# 2025-03-31T16:20:36.946615Z node 4 :GRPC_SERVER DEBUG: [0x51b000262c80] received request Name# NodeCheck ok# false data# peer# 2025-03-31T16:20:36.946712Z node 4 :GRPC_SERVER DEBUG: [0x51b00025d180] received request Name# CreateSession ok# false data# peer# 2025-03-31T16:20:36.946776Z node 4 :GRPC_SERVER DEBUG: [0x51b000023780] received request Name# DeleteSession ok# false data# peer# 2025-03-31T16:20:36.946900Z node 4 :GRPC_SERVER DEBUG: [0x51b000027680] received request Name# AttachSession ok# false data# peer# 2025-03-31T16:20:36.946925Z node 4 :GRPC_SERVER DEBUG: [0x51b0003dda80] received request Name# BeginTransaction ok# false data# peer# 2025-03-31T16:20:36.947055Z node 4 :GRPC_SERVER DEBUG: [0x51b0003d8d80] received request Name# RollbackTransaction ok# false data# peer# 2025-03-31T16:20:36.947057Z node 4 :GRPC_SERVER DEBUG: [0x51b0003dd380] received request Name# CommitTransaction ok# false data# peer# 2025-03-31T16:20:36.947217Z node 4 :GRPC_SERVER DEBUG: [0x51b000261e80] received request Name# ExecuteQuery ok# false data# peer# 2025-03-31T16:20:36.947223Z node 4 :GRPC_SERVER DEBUG: [0x51b000260280] received request Name# ExecuteScript ok# false data# peer# 2025-03-31T16:20:36.947358Z node 4 :GRPC_SERVER DEBUG: [0x51b00025ed80] received request Name# FetchScriptResults ok# false data# peer# 2025-03-31T16:20:36.947400Z node 4 :GRPC_SERVER DEBUG: [0x51b0003d9480] received request Name# ExecuteTabletMiniKQL ok# false data# peer# 2025-03-31T16:20:36.947479Z node 4 :GRPC_SERVER DEBUG: [0x51b0003da980] received request Name# ChangeTabletSchema ok# false data# peer# 2025-03-31T16:20:36.947591Z node 4 :GRPC_SERVER DEBUG: [0x51b00004e580] received request Name# RestartTablet ok# false data# peer# 2025-03-31T16:20:36.947622Z node 4 :GRPC_SERVER DEBUG: [0x51b0003d7180] received request Name# CreateLogStore ok# false data# peer# 2025-03-31T16:20:36.947738Z node 4 :GRPC_SERVER DEBUG: [0x51b0003d7880] received request Name# DescribeLogStore ok# false data# peer# 2025-03-31T16:20:36.947757Z node 4 :GRPC_SERVER DEBUG: [0x51b000448d80] received request Name# DropLogStore ok# false data# peer# 2025-03-31T16:20:36.947863Z node 4 :GRPC_SERVER DEBUG: [0x51b000214080] received request Name# AlterLogStore ok# false data# peer# 2025-03-31T16:20:36.947901Z node 4 :GRPC_SERVER DEBUG: [0x51b000084980] received request Name# CreateLogTable ok# false data# peer# 2025-03-31T16:20:36.947980Z node 4 :GRPC_SERVER DEBUG: [0x51b00016ea80] received request Name# DescribeLogTable ok# false data# peer# 2025-03-31T16:20:36.948033Z node 4 :GRPC_SERVER DEBUG: [0x51b000141280] received request Name# DropLogTable ok# false data# peer# 2025-03-31T16:20:36.948109Z node 4 :GRPC_SERVER DEBUG: [0x51b000364080] received request Name# AlterLogTable ok# false data# peer# 2025-03-31T16:20:36.948198Z node 4 :GRPC_SERVER DEBUG: [0x51b000363280] received request Name# Login ok# false data# peer# 2025-03-31T16:20:36.948250Z node 4 :GRPC_SERVER DEBUG: [0x51b000362b80] received request Name# DescribeReplication ok# false data# peer# 2025-03-31T16:20:36.948362Z node 4 :GRPC_SERVER DEBUG: [0x51b000361d80] received request Name# DescribeView ok# false data# peer# 2025-03-31T16:20:36.998894Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7488009329519607171:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:36.998994Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpStats::SysViewClientLost [FAIL] >> KqpTypes::DyNumberCompare >> KqpLimits::ManyPartitions [GOOD] >> KqpLimits::ManyPartitionsSorting ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::SelfJoin [GOOD] Test command err: Trying to start YDB, gRPC: 11117, MsgBus: 9197 2025-03-31T16:20:22.268324Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009290227773534:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:22.268366Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000975/r3tmp/tmpngmJJj/pdisk_1.dat TServer::EnableGrpc on GrpcPort 11117, node 1 2025-03-31T16:20:22.591316Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:20:22.591616Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:20:22.601833Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:20:22.618114Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:22.618139Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:22.618147Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:22.618248Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:20:22.648179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:22.648280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:22.649880Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9197 TClient is connected to server localhost:9197 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:23.124683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:23.154037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:23.309788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:23.463267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:23.538619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:25.359578Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009303112677199:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:25.359697Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:25.719800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:25.746707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:25.774948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:25.799845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:25.825531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:25.854915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:25.888710Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009303112677706:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:25.888777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:25.888842Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009303112677711:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:25.891810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:25.915474Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009303112677713:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:20:26.002997Z node 1 :TX_PROXY ERROR: Actor# [1:7488009303112677768:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:27.268448Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488009290227773534:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:27.268512Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 29241, MsgBus: 12126 2025-03-31T16:20:27.844471Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488009311825381420:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:27.847182Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000975/r3tmp/tmpPbV72P/pdisk_1.dat 2025-03-31T16:20:27.919747Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29241, node 2 2025-03-31T16:20:27.971747Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:27.971770Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:27.971777Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:27.971881Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:20:27.975059Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:27.975153Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:27.976559Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12126 TClient is connected to server localhost:12126 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:28.328247Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:28.346682Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:28.415692Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:28.552920Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:28.618425Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: E ... Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:20:36.339952Z node 3 :TX_PROXY ERROR: Actor# [3:7488009349617116844:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"E-Size":"No estimate","PlanNodeId":3,"LookupKeyColumns":["Key"],"Node Type":"TableLookupJoin","Path":"\/Root\/TwoShard","Columns":["Key"],"E-Rows":"No estimate","Table":"TwoShard","Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["TwoShard"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/TwoShard","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"TwoShard","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"Stage","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"LastMessageMs":{"Count":2,"Sum":2,"Max":1,"Min":1},"FirstMessageMs":{"Count":2,"Sum":2,"Max":1,"Min":1},"Bytes":{"Count":2,"Sum":48,"Max":36,"Min":12,"History":[3,48]}},"Name":"RESULT","Push":{"WaitTimeUs":{"Count":2,"Sum":2453,"Max":1413,"Min":1040,"History":[3,2453]},"WaitPeriods":{"Count":2,"Sum":2,"Max":1,"Min":1},"Chunks":{"Count":2,"Sum":6,"Max":3,"Min":3},"ResumeMessageMs":{"Count":2,"Sum":2,"Max":1,"Min":1},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"LastMessageMs":{"Count":2,"Sum":2,"Max":1,"Min":1},"FirstMessageMs":{"Count":2,"Sum":2,"Max":1,"Min":1}}}],"DurationUs":{"Count":2,"Sum":2000,"Max":1000,"Min":1000},"MaxMemoryUsage":{"Count":2,"Sum":2097152,"Max":1048576,"Min":1048576,"History":[3,2097152]},"ResultRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"Tasks":2,"ResultBytes":{"Count":2,"Sum":48,"Max":36,"Min":12},"OutputRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"FinishedTasks":2,"IngressRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"PhysicalStageId":0,"StageDurationUs":1000,"Table":[{"Path":"\/Root\/TwoShard","ReadRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"ReadBytes":{"Count":2,"Sum":48,"Max":24,"Min":24}}],"BaseTimeMs":1743438037371,"OutputBytes":{"Count":2,"Sum":48,"Max":36,"Min":12},"CpuTimeUs":{"Count":2,"Sum":995,"Max":568,"Min":427,"History":[3,995]},"Ingress":[{"Pop":{"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"LastMessageMs":{"Count":2,"Sum":2,"Max":1,"Min":1},"FirstMessageMs":{"Count":2,"Sum":2,"Max":1,"Min":1},"Bytes":{"Count":2,"Sum":96,"Max":48,"Min":48,"History":[3,96]}},"External":{},"Name":"KqpReadRangesSource","Ingress":{},"Push":{"LastMessageMs":{"Count":2,"Sum":2,"Max":1,"Min":1},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"ResumeMessageMs":{"Count":2,"Sum":2,"Max":1,"Min":1},"FirstMessageMs":{"Count":2,"Sum":2,"Max":1,"Min":1},"Bytes":{"Count":2,"Sum":96,"Max":48,"Min":48,"History":[3,96]},"WaitTimeUs":{"Count":2,"Sum":2505,"Max":1444,"Min":1061,"History":[3,2505]},"WaitPeriods":{"Count":2,"Sum":2,"Max":1,"Min":1}}}]}}],"PlanNodeType":"Connection","E-Cost":"No estimate"}],"Node Type":"Collect","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"LastMessageMs":{"Count":2,"Sum":6,"Max":3,"Min":3},"FirstMessageMs":{"Count":2,"Sum":6,"Max":3,"Min":3},"Bytes":{"Count":2,"Sum":48,"Max":36,"Min":12,"History":[4,48]}},"Name":"6","Push":{"LastMessageMs":{"Count":2,"Sum":6,"Max":3,"Min":3},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"Chunks":{"Count":2,"Sum":6,"Max":3,"Min":3},"ResumeMessageMs":{"Count":2,"Sum":6,"Max":3,"Min":3},"FirstMessageMs":{"Count":2,"Sum":6,"Max":3,"Min":3},"PauseMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"WaitTimeUs":{"Count":2,"Sum":4562,"Max":2567,"Min":1995,"History":[4,4562]},"WaitPeriods":{"Count":2,"Sum":2,"Max":1,"Min":1},"WaitMessageMs":{"Count":2,"Max":3,"Min":2}}}],"MaxMemoryUsage":{"Count":2,"Sum":2097152,"Max":1048576,"Min":1048576,"History":[4,2097152]},"DurationUs":{"Count":2,"Sum":3000,"Max":2000,"Min":1000},"InputBytes":{"Count":2,"Sum":48,"Max":36,"Min":12},"Tasks":2,"OutputRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"FinishedTasks":2,"InputRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"PhysicalStageId":1,"StageDurationUs":2000,"Table":[{"Path":"\/Root\/TwoShard","ReadRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"ReadBytes":{"Count":2,"Sum":24,"Max":12,"Min":12}}],"BaseTimeMs":1743438037371,"WaitInputTimeUs":{"Count":2,"Sum":4353,"Max":2444,"Min":1909,"History":[4,4353]},"OutputBytes":{"Count":2,"Sum":48,"Max":36,"Min":12},"CpuTimeUs":{"Count":2,"Sum":641,"Max":451,"Min":190,"History":[4,641]},"Input":[{"Pop":{"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"LastMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"FirstMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"Bytes":{"Count":2,"Sum":48,"Max":36,"Min":12,"History":[4,48]}},"Name":"2","Push":{"LastMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"ResumeMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"FirstMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"Bytes":{"Count":2,"Sum":48,"Max":36,"Min":12,"History":[4,48]},"PauseMessageMs":{"Count":2,"Sum":1,"Max":1,"Min":0},"WaitTimeUs":{"Count":2,"Sum":2205,"Max":1233,"Min":972,"History":[4,2205]},"WaitPeriods":{"Count":2,"Sum":2,"Max":1,"Min":1}}}]}}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":5}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":27,"Max":27,"Min":27,"History":[4,27]}},"Name":"8","Push":{"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Chunks":{"Count":1,"Sum":6,"Max":6,"Min":6},"ResumeMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"PauseMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"WaitTimeUs":{"Count":1,"Sum":1766,"Max":1766,"Min":1766,"History":[4,1766]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":3,"Min":2}}}],"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[4,1048576]},"DurationUs":{"Count":1,"Sum":1000,"Max":1000,"Min":1000},"InputBytes":{"Count":1,"Sum":48,"Max":48,"Min":48},"Tasks":1,"OutputRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"FinishedTasks":1,"InputRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"PhysicalStageId":2,"StageDurationUs":1000,"BaseTimeMs":1743438037371,"OutputBytes":{"Count":1,"Sum":27,"Max":27,"Min":27},"CpuTimeUs":{"Count":1,"Sum":754,"Max":754,"Min":754,"History":[4,754]},"Input":[{"Pop":{"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":48,"Max":48,"Min":48,"History":[4,48]}},"Name":"4","Push":{"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":48,"Max":48,"Min":48,"History":[4,48]},"PauseMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"WaitTimeUs":{"Count":1,"Sum":1629,"Max":1629,"Min":1629,"History":[4,1629]},"WaitPeriods":{"Count":1,"Sum":2,"Max":2,"Min":2},"WaitMessageMs":{"Count":1,"Max":3,"Min":2}}}]}}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":7}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"FirstMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"Bytes":{"Count":1,"Sum":24,"Max":24,"Min":24,"History":[4,24]}},"Name":"RESULT","Push":{"LastMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Chunks":{"Count":1,"Sum":6,"Max":6,"Min":6},"ResumeMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"FirstMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"PauseMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"WaitTimeUs":{"Count":1,"Sum":1802,"Max":1802,"Min":1802,"History":[4,1802]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":4,"Min":2}}}],"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[4,1048576]},"DurationUs":{"Count":1,"Sum":1000,"Max":1000,"Min":1000},"InputBytes":{"Count":1,"Sum":27,"Max":27,"Min":27},"ResultRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Tasks":1,"ResultBytes":{"Count":1,"Sum":24,"Max":24,"Min":24},"OutputRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"FinishedTasks":1,"InputRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"PhysicalStageId":3,"StageDurationUs":1000,"BaseTimeMs":1743438037371,"OutputBytes":{"Count":1,"Sum":24,"Max":24,"Min":24},"CpuTimeUs":{"Count":1,"Sum":465,"Max":465,"Min":465,"History":[4,465]},"Input":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"FirstMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"Bytes":{"Count":1,"Sum":27,"Max":27,"Min":27,"History":[4,27]}},"Name":"6","Push":{"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":27,"Max":27,"Min":27,"History":[4,27]},"PauseMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"WaitTimeUs":{"Count":1,"Sum":1742,"Max":1742,"Min":1742,"History":[4,1742]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":3,"Min":2}}}]}}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"Compilation":{"FromCache":false,"DurationUs":243643,"CpuTimeUs":240175},"ProcessCpuTimeUs":288,"TotalDurationUs":262221,"ResourcePoolId":"default","QueuedTimeUs":487},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":9,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/TwoShard","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"TwoShard","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"},{"Operators":[{"E-Rows":"No estimate","Columns":["Key"],"E-Size":"No estimate","E-Cost":"No estimate","Name":"TableLookup","Table":"TwoShard","LookupKeyColumns":["Key"]}],"Node Type":"TableLookup","PlanNodeType":"TableLookup"}],"Operators":[{"Name":"LookupJoin","LookupKeyColumns":["Key"]}],"Node Type":"LookupJoin","PlanNodeType":"Connection"}],"Operators":[{"A-Rows":6,"A-SelfCpu":0.754,"A-Cpu":0.754,"A-Size":27,"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"A-Rows":6,"A-SelfCpu":0.465,"A-Cpu":1.219,"A-Size":24,"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query"}} >> YdbSdkSessionsPool::Get1Session >> KqpQuery::QueryCacheTtl [GOOD] >> KqpQuery::QueryCacheInvalidate >> YdbSdkSessionsPool::StressTestSync1 >> TKeyValueTest::TestInlineCopyRangeWorksNewApi [GOOD] >> KqpExplain::ExplainDataQueryWithParams [GOOD] >> KqpPg::EmptyQuery-useSink [GOOD] >> KqpPg::DuplicatedColumns+useSink >> YdbSdkSessionsPool::StressTestAsync10 >> KqpPg::NoTableQuery-useSink [GOOD] >> KqpPg::PgCreateTable >> KqpLimits::DatashardProgramSize+useSink [GOOD] >> KqpLimits::DatashardProgramSize-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineCopyRangeWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:78:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:82:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:83:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:83:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:84:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:88:2057] recipient: [8:86:2116] Leader for TabletID 72057594037927937 is [8:89:2117] sender: [8:90:2057] recipient: [8:86:2116] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:89:2117] Leader for TabletID 72057594037927937 is [8:89:2117] sender: [8:143:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:84:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:88:2057] recipient: [9:86:2116] Leader for TabletID 72057594037927937 is [9:89:2117] sender: [9:90:2057] recipient: [9:86:2116] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:89:2117] Leader for TabletID 72057594037927937 is [9:89:2117] sender: [9:143:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:52:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:52:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:85:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:89:2057] recipient: [10:88:2116] Leader for TabletID 72057594037927937 is [10:90:2117] sender: [10:91:2057] recipient: [10:88:2116] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:90:2117] Leader for TabletID 72057594037927937 is [10:90:2117] sender: [10:144:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:88:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:91:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:92:2057] recipient: [11:90:2119] Leader for TabletID 72057594037927937 is [11:93:2120] sender: [11:94:2057] recipient: [11:90:2119] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:93:2120] Leader for TabletID 72057594037927937 is [11:93:2120] sender: [11:147:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... 29:78:2110] !Reboot 72057594037927937 (actor [29:56:2097]) rebooted! !Reboot 72057594037927937 (actor [29:56:2097]) tablet resolver refreshed! new actor is[29:81:2111] Leader for TabletID 72057594037927937 is [29:81:2111] sender: [29:135:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:57:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:74:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:77:2057] recipient: [30:36:2083] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:80:2057] recipient: [30:79:2110] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:81:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:82:2111] sender: [30:83:2057] recipient: [30:79:2110] !Reboot 72057594037927937 (actor [30:56:2097]) rebooted! !Reboot 72057594037927937 (actor [30:56:2097]) tablet resolver refreshed! new actor is[30:82:2111] Leader for TabletID 72057594037927937 is [30:82:2111] sender: [30:136:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:52:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:57:2057] recipient: [31:52:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:74:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:80:2057] recipient: [31:36:2083] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:83:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:84:2057] recipient: [31:82:2113] Leader for TabletID 72057594037927937 is [31:85:2114] sender: [31:86:2057] recipient: [31:82:2113] !Reboot 72057594037927937 (actor [31:56:2097]) rebooted! !Reboot 72057594037927937 (actor [31:56:2097]) tablet resolver refreshed! new actor is[31:85:2114] Leader for TabletID 72057594037927937 is [31:85:2114] sender: [31:139:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:57:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:74:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:80:2057] recipient: [32:36:2083] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:83:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:84:2057] recipient: [32:82:2113] Leader for TabletID 72057594037927937 is [32:85:2114] sender: [32:86:2057] recipient: [32:82:2113] !Reboot 72057594037927937 (actor [32:56:2097]) rebooted! !Reboot 72057594037927937 (actor [32:56:2097]) tablet resolver refreshed! new actor is[32:85:2114] Leader for TabletID 72057594037927937 is [32:85:2114] sender: [32:139:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:50:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:57:2057] recipient: [33:50:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:74:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:81:2057] recipient: [33:36:2083] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:84:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:85:2057] recipient: [33:83:2113] Leader for TabletID 72057594037927937 is [33:86:2114] sender: [33:87:2057] recipient: [33:83:2113] !Reboot 72057594037927937 (actor [33:56:2097]) rebooted! !Reboot 72057594037927937 (actor [33:56:2097]) tablet resolver refreshed! new actor is[33:86:2114] Leader for TabletID 72057594037927937 is [33:86:2114] sender: [33:140:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:57:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:74:2057] recipient: [34:14:2061] !Reboot 72057594037927937 (actor [34:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:84:2057] recipient: [34:36:2083] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:87:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:88:2057] recipient: [34:86:2116] Leader for TabletID 72057594037927937 is [34:89:2117] sender: [34:90:2057] recipient: [34:86:2116] !Reboot 72057594037927937 (actor [34:56:2097]) rebooted! !Reboot 72057594037927937 (actor [34:56:2097]) tablet resolver refreshed! new actor is[34:89:2117] Leader for TabletID 72057594037927937 is [34:89:2117] sender: [34:143:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:54:2057] recipient: [35:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:54:2057] recipient: [35:51:2095] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:57:2057] recipient: [35:51:2095] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:74:2057] recipient: [35:14:2061] !Reboot 72057594037927937 (actor [35:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:84:2057] recipient: [35:36:2083] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:87:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:88:2057] recipient: [35:86:2116] Leader for TabletID 72057594037927937 is [35:89:2117] sender: [35:90:2057] recipient: [35:86:2116] !Reboot 72057594037927937 (actor [35:56:2097]) rebooted! !Reboot 72057594037927937 (actor [35:56:2097]) tablet resolver refreshed! new actor is[35:89:2117] Leader for TabletID 72057594037927937 is [35:89:2117] sender: [35:143:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:54:2057] recipient: [36:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:54:2057] recipient: [36:51:2095] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:57:2057] recipient: [36:51:2095] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:74:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:85:2057] recipient: [36:36:2083] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:88:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:89:2057] recipient: [36:87:2116] Leader for TabletID 72057594037927937 is [36:90:2117] sender: [36:91:2057] recipient: [36:87:2116] !Reboot 72057594037927937 (actor [36:56:2097]) rebooted! !Reboot 72057594037927937 (actor [36:56:2097]) tablet resolver refreshed! new actor is[36:90:2117] Leader for TabletID 72057594037927937 is [36:90:2117] sender: [36:144:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:54:2057] recipient: [37:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:54:2057] recipient: [37:50:2095] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:57:2057] recipient: [37:50:2095] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:74:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:88:2057] recipient: [37:36:2083] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:91:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:92:2057] recipient: [37:90:2119] Leader for TabletID 72057594037927937 is [37:93:2120] sender: [37:94:2057] recipient: [37:90:2119] !Reboot 72057594037927937 (actor [37:56:2097]) rebooted! !Reboot 72057594037927937 (actor [37:56:2097]) tablet resolver refreshed! new actor is[37:93:2120] Leader for TabletID 72057594037927937 is [37:93:2120] sender: [37:147:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:54:2057] recipient: [38:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:54:2057] recipient: [38:51:2095] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:57:2057] recipient: [38:51:2095] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:74:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:88:2057] recipient: [38:36:2083] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:90:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:92:2057] recipient: [38:91:2119] Leader for TabletID 72057594037927937 is [38:93:2120] sender: [38:94:2057] recipient: [38:91:2119] !Reboot 72057594037927937 (actor [38:56:2097]) rebooted! !Reboot 72057594037927937 (actor [38:56:2097]) tablet resolver refreshed! new actor is[38:93:2120] Leader for TabletID 72057594037927937 is [38:93:2120] sender: [38:147:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:54:2057] recipient: [39:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:54:2057] recipient: [39:50:2095] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:57:2057] recipient: [39:50:2095] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:74:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:89:2057] recipient: [39:36:2083] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:91:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:93:2057] recipient: [39:92:2119] Leader for TabletID 72057594037927937 is [39:94:2120] sender: [39:95:2057] recipient: [39:92:2119] !Reboot 72057594037927937 (actor [39:56:2097]) rebooted! !Reboot 72057594037927937 (actor [39:56:2097]) tablet resolver refreshed! new actor is[39:94:2120] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:54:2057] recipient: [40:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:54:2057] recipient: [40:51:2095] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:57:2057] recipient: [40:51:2095] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:74:2057] recipient: [40:14:2061] >> KqpLimits::TooBigColumn-useSink >> Cdc::MustNotLoseSchemaSnapshotWithVolatileTx [GOOD] >> Cdc::ResolvedTimestampForDisplacedUpsert ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::ExplainDataQueryWithParams [GOOD] Test command err: Trying to start YDB, gRPC: 6045, MsgBus: 12525 2025-03-31T16:20:17.557045Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009267937461904:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:17.557145Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0009ee/r3tmp/tmpFxzG9Z/pdisk_1.dat 2025-03-31T16:20:17.876876Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6045, node 1 2025-03-31T16:20:17.920456Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:17.920590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:17.921940Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:17.928784Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:17.928810Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:17.928817Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:17.928947Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12525 TClient is connected to server localhost:12525 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:18.425589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:18.445203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:18.566878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:18.701310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:18.762912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:20.396702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009280822365563:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:20.396817Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:20.682104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:20.712752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:20.776637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:20.803933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:20.833868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:20.900691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:20.974512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009280822366088:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:20.974579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:20.974631Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009280822366093:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:20.977927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:20.986338Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009280822366095:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:20:21.047909Z node 1 :TX_PROXY ERROR: Actor# [1:7488009285117333444:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":8,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Plans":[{"Tables":["EightShard"],"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/KeyValue","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"Broadcast","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate","Phase":"Intermediate"},{"Inputs":[{"InternalOperatorId":2},{"ExternalPlanNodeId":4}],"E-Rows":"No estimate","Condition":"t.Data = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"},{"Inputs":[{"InternalOperatorId":3}],"E-Rows":"No estimate","Predicate":"Exist(item.Data)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data"],"E-Cost":"No estimate"}],"Node Type":"Aggregate-InnerJoin (MapJoin)-Filter-TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate","Phase":"Final"},{"Inputs":[{"InternalOperatorId":2}],"Name":"Limit","Limit":"1"},{"Inputs":[{"ExternalPlanNodeId":6}],"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate-Limit-Aggregate"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]},{"name":"\/Root\/KeyValue","reads":[{"columns":["Key"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":11,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Data)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"},{"PlanNodeId":15,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/KeyValue","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Condition":"t.Data = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"InnerJoin (MapJoin)"}],"Operators":[{"Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":2},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 62101, MsgBus: 20115 2025-03-31T16:20:23.071627Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488009293679507000:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:23.071688Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_un ... RangesKeys":["Key"],"ReadColumns":["Key","Value"],"E-Cost":"No estimate","ReadRangesExpectedSize":"4"}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1_1","PlanNodeType":"ResultSet"},{"PlanNodeId":11,"Plans":[{"PlanNodeId":15,"Plans":[{"PlanNodeId":16,"Plans":[{"PlanNodeId":17,"Plans":[{"PlanNodeId":19,"Plans":[{"PlanNodeId":20,"Plans":[{"PlanNodeId":21,"Plans":[{"PlanNodeId":22,"Plans":[{"PlanNodeId":23,"Operators":[{"Scan":"Parallel","ReadRange":["Key (20, 120]"],"E-Size":"No estimate","Name":"TableRangeScan","Path":"\/Root\/KeyValue","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Value"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"GroupBy":"item.Value","Aggregation":"state","Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Node Type":"HashShuffle (KeyColumns: [\"Value\"])","PlanNodeType":"Connection"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1_2","PlanNodeType":"ResultSet"},{"PlanNodeId":24,"Plans":[{"PlanNodeId":28,"Plans":[{"PlanNodeId":29,"Plans":[{"PlanNodeId":30,"Plans":[{"PlanNodeId":32,"Plans":[{"PlanNodeId":33,"Plans":[{"PlanNodeId":34,"Plans":[{"PlanNodeId":35,"Plans":[{"PlanNodeId":36,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/KeyValue","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"GroupBy":"item.Value","Aggregation":"state","Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Node Type":"HashShuffle (KeyColumns: [\"Value\"])","PlanNodeType":"Connection"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1_3","PlanNodeType":"ResultSet"},{"PlanNodeId":37,"Plans":[{"PlanNodeId":41,"Plans":[{"PlanNodeId":42,"Plans":[{"PlanNodeId":43,"Plans":[{"PlanNodeId":45,"Plans":[{"PlanNodeId":46,"Plans":[{"PlanNodeId":47,"Plans":[{"PlanNodeId":48,"Plans":[{"PlanNodeId":49,"Operators":[{"Scan":"Parallel","ReadRange":["Key [10, +∞)"],"E-Size":"No estimate","Name":"TableRangeScan","Path":"\/Root\/KeyValue","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Value"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"GroupBy":"item.Value","Aggregation":"state","Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Node Type":"HashShuffle (KeyColumns: [\"Value\"])","PlanNodeType":"Connection"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1_4","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 6505, MsgBus: 13123 2025-03-31T16:20:35.366135Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488009344657660106:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:35.366200Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0009ee/r3tmp/tmpk8BEGh/pdisk_1.dat 2025-03-31T16:20:35.502245Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:20:35.520883Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:35.520985Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:35.522768Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6505, node 4 2025-03-31T16:20:35.565659Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:35.565690Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:35.565701Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:35.565840Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13123 TClient is connected to server localhost:13123 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:36.052592Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:36.057409Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:20:36.066511Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:36.138289Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:36.300044Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:36.376840Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:38.998405Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488009357542563774:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:38.998514Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:39.055893Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:39.133913Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:39.173210Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:39.232336Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:39.272882Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:39.309602Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:39.369295Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488009361837531580:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:39.369409Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:39.369801Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488009361837531585:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:39.374694Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:39.388828Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488009361837531587:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:20:39.490968Z node 4 :TX_PROXY ERROR: Actor# [4:7488009361837531645:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:40.366279Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7488009344657660106:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:40.366337Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpPg::InsertNoTargetColumns_Simple-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Serial-useSink >> KqpPg::Insert_Serial+useSink [GOOD] >> KqpPg::Insert_Serial-useSink >> KqpPg::CreateTableSerialColumns-useSink [GOOD] >> KqpPg::DropIndex >> TCmsTest::ManagePermissions >> TCmsTest::StateRequest >> KqpQuery::CreateAsSelect_BadCases [GOOD] >> YdbSdkSessionsPool::WaitQueue10 [GOOD] >> YdbSdkSessionsPool::Get1Session [GOOD] >> YdbSdkSessionsPool::PeriodicTask1 >> BasicUsage::RetryDiscoveryWithCancel [GOOD] >> BasicUsage::RecreateObserver >> KqpLimits::ManyPartitionsSorting [GOOD] >> KqpLimits::ManyPartitionsSortingLimit >> TCmsTest::RequestRestartServicesOk >> BsControllerConfig::MergeBoxes [GOOD] >> TKeyValueTest::TestCleanUpDataWithMockDisk [GOOD] >> KqpParams::ParameterTypes [GOOD] >> KqpParams::InvalidJson ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::WaitQueue10 [GOOD] Test command err: 2025-03-31T16:20:38.496491Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009358446543478:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:38.497349Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0005d4/r3tmp/tmpLZVxTa/pdisk_1.dat 2025-03-31T16:20:38.909191Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:20:38.918079Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:38.918322Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:38.934842Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12131, node 1 2025-03-31T16:20:39.127734Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:39.127768Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:39.127775Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:39.127893Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28478 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:39.629643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... >> KqpTypes::DyNumberCompare [GOOD] >> KqpTypes::MultipleCurrentUtcTimestamp ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestCleanUpDataWithMockDisk [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:78:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:82:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:83:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:83:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvCleanUpDataRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvForceTabletDataCleanup ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:80:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:83:2057] recipient: [7:82:2113] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:85:2114] sender: [7:86:2057] recipient: [7:82:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:85:2114] Leader for TabletID 72057594037927937 is [7:85:2114] sender: [7:139:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTablet::TEvFollowerGcApplied ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:85:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:88:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:89:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:90:2118] sender: [8:91:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:90:2118] Leader for TabletID 72057594037927937 is [8:90:2118] sender: [8:144:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:89:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:92:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:93:2057] recipient: [9:91:2121] Leader for TabletID 72057594037927937 is [9:94:2122] sender: [9:95:2057] recipient: [9:91:2121] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:94:2122] Leader for TabletID 72057594037927937 is [9:94:2122] sender: [9:148:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:52:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:52:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvCleanUpDataRequest ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:89:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:92:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:93:2057] recipient: [10:91:2121] Leader for TabletID 72057594037927937 is [10:94:2122] sender: [10:95:2057] recipient: [10:91:2121] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:94:2122] Leader for TabletID 72057594037927937 is [10:94:2122] sender: [10:148:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:91:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:94:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:95:2057] recipient: [11:93:2123] Leader for TabletID 72057594037927937 is [11:96:2124] sender: [11:97:2057] recipient: [11:93:2123] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:96:2124] Leader for TabletID 72057594037927937 is [11:96:2124] sender: [11:150:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:1 ... 57594037927937 is [35:99:2125] sender: [35:153:2057] recipient: [35:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:54:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:59:2057] recipient: [36:54:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:76:2057] recipient: [36:17:2064] !Reboot 72057594037927937 (actor [36:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:95:2057] recipient: [36:39:2086] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:98:2057] recipient: [36:17:2064] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:99:2057] recipient: [36:97:2124] Leader for TabletID 72057594037927937 is [36:100:2125] sender: [36:101:2057] recipient: [36:97:2124] !Reboot 72057594037927937 (actor [36:58:2099]) rebooted! !Reboot 72057594037927937 (actor [36:58:2099]) tablet resolver refreshed! new actor is[36:100:2125] Leader for TabletID 72057594037927937 is [36:100:2125] sender: [36:154:2057] recipient: [36:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:54:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:59:2057] recipient: [37:54:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:76:2057] recipient: [37:17:2064] !Reboot 72057594037927937 (actor [37:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:98:2057] recipient: [37:39:2086] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:101:2057] recipient: [37:17:2064] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:102:2057] recipient: [37:100:2127] Leader for TabletID 72057594037927937 is [37:103:2128] sender: [37:104:2057] recipient: [37:100:2127] !Reboot 72057594037927937 (actor [37:58:2099]) rebooted! !Reboot 72057594037927937 (actor [37:58:2099]) tablet resolver refreshed! new actor is[37:103:2128] Leader for TabletID 72057594037927937 is [37:103:2128] sender: [37:157:2057] recipient: [37:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:54:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:59:2057] recipient: [38:54:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:76:2057] recipient: [38:17:2064] !Reboot 72057594037927937 (actor [38:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:98:2057] recipient: [38:39:2086] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:101:2057] recipient: [38:17:2064] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:102:2057] recipient: [38:100:2127] Leader for TabletID 72057594037927937 is [38:103:2128] sender: [38:104:2057] recipient: [38:100:2127] !Reboot 72057594037927937 (actor [38:58:2099]) rebooted! !Reboot 72057594037927937 (actor [38:58:2099]) tablet resolver refreshed! new actor is[38:103:2128] Leader for TabletID 72057594037927937 is [38:103:2128] sender: [38:157:2057] recipient: [38:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:54:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:59:2057] recipient: [39:54:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:76:2057] recipient: [39:17:2064] !Reboot 72057594037927937 (actor [39:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:99:2057] recipient: [39:39:2086] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:102:2057] recipient: [39:17:2064] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:103:2057] recipient: [39:101:2127] Leader for TabletID 72057594037927937 is [39:104:2128] sender: [39:105:2057] recipient: [39:101:2127] !Reboot 72057594037927937 (actor [39:58:2099]) rebooted! !Reboot 72057594037927937 (actor [39:58:2099]) tablet resolver refreshed! new actor is[39:104:2128] Leader for TabletID 72057594037927937 is [39:104:2128] sender: [39:158:2057] recipient: [39:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:54:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:59:2057] recipient: [40:54:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:76:2057] recipient: [40:17:2064] !Reboot 72057594037927937 (actor [40:58:2099]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:100:2057] recipient: [40:39:2086] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:103:2057] recipient: [40:17:2064] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:104:2057] recipient: [40:102:2128] Leader for TabletID 72057594037927937 is [40:105:2129] sender: [40:106:2057] recipient: [40:102:2128] !Reboot 72057594037927937 (actor [40:58:2099]) rebooted! !Reboot 72057594037927937 (actor [40:58:2099]) tablet resolver refreshed! new actor is[40:105:2129] Leader for TabletID 72057594037927937 is [40:105:2129] sender: [40:125:2057] recipient: [40:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:54:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:59:2057] recipient: [41:54:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:76:2057] recipient: [41:17:2064] !Reboot 72057594037927937 (actor [41:58:2099]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:101:2057] recipient: [41:39:2086] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:104:2057] recipient: [41:17:2064] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:105:2057] recipient: [41:103:2129] Leader for TabletID 72057594037927937 is [41:106:2130] sender: [41:107:2057] recipient: [41:103:2129] !Reboot 72057594037927937 (actor [41:58:2099]) rebooted! !Reboot 72057594037927937 (actor [41:58:2099]) tablet resolver refreshed! new actor is[41:106:2130] Leader for TabletID 72057594037927937 is [41:106:2130] sender: [41:126:2057] recipient: [41:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:54:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:59:2057] recipient: [42:54:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:76:2057] recipient: [42:17:2064] !Reboot 72057594037927937 (actor [42:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:104:2057] recipient: [42:39:2086] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:107:2057] recipient: [42:17:2064] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:108:2057] recipient: [42:106:2132] Leader for TabletID 72057594037927937 is [42:109:2133] sender: [42:110:2057] recipient: [42:106:2132] !Reboot 72057594037927937 (actor [42:58:2099]) rebooted! !Reboot 72057594037927937 (actor [42:58:2099]) tablet resolver refreshed! new actor is[42:109:2133] Leader for TabletID 72057594037927937 is [42:109:2133] sender: [42:163:2057] recipient: [42:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:54:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:59:2057] recipient: [43:54:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:76:2057] recipient: [43:17:2064] !Reboot 72057594037927937 (actor [43:58:2099]) on event NKikimr::TEvKeyValue::TEvCleanUpDataRequest ! Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:104:2057] recipient: [43:39:2086] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:107:2057] recipient: [43:17:2064] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:108:2057] recipient: [43:106:2132] Leader for TabletID 72057594037927937 is [43:109:2133] sender: [43:110:2057] recipient: [43:106:2132] !Reboot 72057594037927937 (actor [43:58:2099]) rebooted! !Reboot 72057594037927937 (actor [43:58:2099]) tablet resolver refreshed! new actor is[43:109:2133] Leader for TabletID 72057594037927937 is [43:109:2133] sender: [43:163:2057] recipient: [43:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:56:2057] recipient: [44:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:56:2057] recipient: [44:53:2097] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:59:2057] recipient: [44:53:2097] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:76:2057] recipient: [44:17:2064] !Reboot 72057594037927937 (actor [44:58:2099]) on event NKikimr::TEvKeyValue::TEvForceTabletDataCleanup ! Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:104:2057] recipient: [44:39:2086] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:107:2057] recipient: [44:17:2064] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:108:2057] recipient: [44:106:2132] Leader for TabletID 72057594037927937 is [44:109:2133] sender: [44:110:2057] recipient: [44:106:2132] !Reboot 72057594037927937 (actor [44:58:2099]) rebooted! !Reboot 72057594037927937 (actor [44:58:2099]) tablet resolver refreshed! new actor is[44:109:2133] Leader for TabletID 72057594037927937 is [44:109:2133] sender: [44:163:2057] recipient: [44:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:56:2057] recipient: [45:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:56:2057] recipient: [45:54:2097] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:59:2057] recipient: [45:54:2097] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:76:2057] recipient: [45:17:2064] !Reboot 72057594037927937 (actor [45:58:2099]) on event NKikimr::TEvTablet::TEvFollowerGcApplied ! Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:109:2057] recipient: [45:39:2086] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:112:2057] recipient: [45:17:2064] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:113:2057] recipient: [45:111:2136] Leader for TabletID 72057594037927937 is [45:114:2137] sender: [45:115:2057] recipient: [45:111:2136] !Reboot 72057594037927937 (actor [45:58:2099]) rebooted! !Reboot 72057594037927937 (actor [45:58:2099]) tablet resolver refreshed! new actor is[45:114:2137] Leader for TabletID 72057594037927937 is [45:114:2137] sender: [45:168:2057] recipient: [45:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:56:2057] recipient: [46:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:56:2057] recipient: [46:54:2097] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:59:2057] recipient: [46:54:2097] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:76:2057] recipient: [46:17:2064] >> KqpPg::DuplicatedColumns+useSink [GOOD] >> KqpPg::DuplicatedColumns-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::CreateAsSelect_BadCases [GOOD] Test command err: Trying to start YDB, gRPC: 2247, MsgBus: 64513 2025-03-31T16:20:19.565043Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009275488125827:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:19.565221Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0009b8/r3tmp/tmpZ6Nmgn/pdisk_1.dat 2025-03-31T16:20:19.846606Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2247, node 1 2025-03-31T16:20:19.914277Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:19.914318Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:19.914326Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:19.914454Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:20:19.918910Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:19.919024Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:19.920796Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64513 TClient is connected to server localhost:64513 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:20.404541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:20.420119Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:20:20.432666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:20.587572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:20.744067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:20.808205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:22.658435Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009288373029506:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:22.658558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:22.919448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:22.945607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:22.969795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:22.992470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:23.016411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:23.048725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:23.123813Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009292667997315:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:23.123893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:23.123988Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009292667997320:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:23.127278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:23.139921Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009292667997322:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:20:23.224126Z node 1 :TX_PROXY ERROR: Actor# [1:7488009292667997378:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 2675, MsgBus: 25438 2025-03-31T16:20:25.098874Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488009302973189238:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:25.098945Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0009b8/r3tmp/tmp5XSVKX/pdisk_1.dat 2025-03-31T16:20:25.190766Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2675, node 2 2025-03-31T16:20:25.240054Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:25.240117Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:25.241541Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:25.257637Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:25.257661Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:25.257668Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:25.257756Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25438 TClient is connected to server localhost:25438 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:25.671433Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:25.679927Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:25.754332Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:25.902574Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:20:25.965142Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:20:28.189287Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009315858092925:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T1 ... ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-31T16:20:41.298006Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[3:7488009358958439397:3301];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-31T16:20:41.301087Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038027;self_id=[3:7488009358958440200:3346];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-31T16:20:41.304114Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[3:7488009358958440228:3348];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-31T16:20:41.306267Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[3:7488009358958440246:3359];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-31T16:20:41.309506Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[3:7488009358958440345:3381];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-31T16:20:41.312643Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[3:7488009358958440193:3344];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-31T16:20:41.314529Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037996;self_id=[3:7488009358958439393:3299];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-31T16:20:41.315585Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[3:7488009358958440321:3373];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-31T16:20:41.318104Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037987;self_id=[3:7488009358958439605:3326];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-31T16:20:41.318681Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[3:7488009358958440332:3374];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-31T16:20:41.321777Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037994;self_id=[3:7488009358958439588:3320];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-31T16:20:41.322012Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;self_id=[3:7488009358958440240:3357];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-31T16:20:41.325295Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037989;self_id=[3:7488009358958439549:3312];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-31T16:20:41.325313Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038028;self_id=[3:7488009358958440354:3382];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-31T16:20:41.328804Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037992;self_id=[3:7488009358958439555:3315];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-31T16:20:41.329208Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037983;self_id=[3:7488009358958439530:3306];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-31T16:20:41.332312Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[3:7488009358958440242:3358];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-31T16:20:41.333078Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037977;self_id=[3:7488009358958439551:3313];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-31T16:20:41.335363Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[3:7488009358958439353:3293];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-31T16:20:41.336995Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038006;self_id=[3:7488009358958439381:3294];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-31T16:20:41.344901Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037976;self_id=[3:7488009358958439603:3325];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-31T16:20:41.348254Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037988;self_id=[3:7488009358958439615:3327];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-31T16:20:41.350835Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037984 not found 2025-03-31T16:20:41.350869Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037982 not found 2025-03-31T16:20:41.350884Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037976 not found 2025-03-31T16:20:41.350899Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037988 not found 2025-03-31T16:20:41.350915Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037986 not found 2025-03-31T16:20:41.350929Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037980 not found 2025-03-31T16:20:41.350943Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037973 not found 2025-03-31T16:20:41.350957Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037969 not found 2025-03-31T16:20:41.350973Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037971 not found 2025-03-31T16:20:41.350988Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037967 not found 2025-03-31T16:20:41.351002Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037990 not found 2025-03-31T16:20:41.351017Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038026 not found 2025-03-31T16:20:41.351030Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037978 not found 2025-03-31T16:20:41.351043Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037974 not found 2025-03-31T16:20:41.351065Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037965 not found 2025-03-31T16:20:41.351222Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037986;self_id=[3:7488009358958439631:3332];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-31T16:20:41.354538Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037965;self_id=[3:7488009358958439387:3296];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-31T16:20:41.358590Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037984;self_id=[3:7488009358958439626:3330];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-31T16:20:41.361752Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037982;self_id=[3:7488009358958439534:3308];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-31T16:20:41.365134Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037980;self_id=[3:7488009358958439399:3302];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-31T16:20:41.368099Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037973;self_id=[3:7488009358958439753:3334];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-31T16:20:41.370970Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037969;self_id=[3:7488009358958439532:3307];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-31T16:20:41.373967Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037971;self_id=[3:7488009358958439586:3319];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-31T16:20:41.376937Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037967;self_id=[3:7488009358958439553:3314];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-31T16:20:41.379867Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038026;self_id=[3:7488009358958440340:3379];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-31T16:20:41.382134Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037978;self_id=[3:7488009358958439557:3316];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-31T16:20:41.385306Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037974;self_id=[3:7488009358958439590:3321];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-31T16:20:41.387499Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037990;self_id=[3:7488009358958439538:3309];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-31T16:20:41.453142Z node 3 :TX_PROXY ERROR: Actor# [3:7488009371843344816:7497] txid# 281474976715687, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:41.461648Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:1, at schemeshard: 72057594046644480 2025-03-31T16:20:41.630686Z node 3 :TX_PROXY ERROR: Actor# [3:7488009371843344954:7583] txid# 281474976715691, issues: { message: "Check failed: path: \'/Root/RowSrc\', error: path exist, request doesn\'t accept it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:41.631096Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MjVlMGMzMjctZjAzOTc0YWItNmViMzYyZmItMTMxN2IwODA=, ActorId: [3:7488009371843344792:4039], ActorState: ExecuteState, TraceId: 01jqpg2ma49m384hnfqr46tahy, Create QueryResponse for error on request, msg: 2025-03-31T16:20:41.803115Z node 3 :TX_PROXY ERROR: Actor# [3:7488009371843345025:7614] txid# 281474976715693, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:41.816092Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715695:1, at schemeshard: 72057594046644480 2025-03-31T16:20:42.565194Z node 3 :TX_PROXY ERROR: Actor# [3:7488009376138312691:7781] txid# 281474976715699, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:42.576969Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715701:1, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::MergeBoxes [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:11014:2156] recipient: [1:10814:2166] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:11014:2156] recipient: [1:10814:2166] Leader for TabletID 72057594037932033 is [1:11016:2168] sender: [1:11017:2156] recipient: [1:10814:2166] 2025-03-31T16:19:30.389473Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-03-31T16:19:30.394101Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-03-31T16:19:30.394436Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-03-31T16:19:30.396554Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:19:30.397030Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-03-31T16:19:30.397581Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-03-31T16:19:30.397609Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-03-31T16:19:30.398112Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-03-31T16:19:30.407040Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-03-31T16:19:30.407205Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-03-31T16:19:30.407395Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-03-31T16:19:30.407536Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-31T16:19:30.407651Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-31T16:19:30.407731Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:11016:2168] sender: [1:11042:2156] recipient: [1:110:2157] 2025-03-31T16:19:30.421627Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-03-31T16:19:30.421788Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-31T16:19:30.432656Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-31T16:19:30.432819Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-31T16:19:30.432899Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-31T16:19:30.432971Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-31T16:19:30.433081Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-31T16:19:30.433135Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-31T16:19:30.433170Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-31T16:19:30.433272Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-31T16:19:30.444238Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-31T16:19:30.444427Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-31T16:19:30.455343Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-31T16:19:30.455492Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-03-31T16:19:30.456759Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-03-31T16:19:30.456813Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-03-31T16:19:30.457007Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-03-31T16:19:30.457067Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-03-31T16:19:30.472427Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk0" } Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" } Drive { Path: "/dev/disk3" } Drive { Path: "/dev/disk4" } Drive { Path: "/dev/disk5" } Drive { Path: "/dev/disk6" } Drive { Path: "/dev/disk7" } Drive { Path: "/dev/disk8" Type: SSD } Drive { Path: "/dev/disk9" Type: SSD } Drive { Path: "/dev/disk10" Type: SSD } Drive { Path: "/dev/disk11" Type: SSD } Drive { Path: "/dev/disk12" Type: SSD } Drive { Path: "/dev/disk13" Type: SSD } Drive { Path: "/dev/disk14" Type: SSD } Drive { Path: "/dev/disk15" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "test box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12051 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12052 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12053 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12054 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12055 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12056 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12057 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12058 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12059 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12060 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12061 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12062 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12063 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12064 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12065 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12066 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12067 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12068 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12069 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12070 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12071 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12072 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12073 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12074 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12075 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12076 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12077 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12078 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12079 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12080 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12081 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12082 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12083 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12084 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12085 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12086 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12087 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12088 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12089 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12090 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12091 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12092 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12093 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12094 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12095 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12096 } HostConfigId: 1 } Host { Ke ... 9} Create new pdisk PDiskId# 275:1002 Path# /dev/disk3 2025-03-31T16:20:34.337966Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 276:1000 Path# /dev/disk1 2025-03-31T16:20:34.337992Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 276:1001 Path# /dev/disk2 2025-03-31T16:20:34.338020Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 276:1002 Path# /dev/disk3 2025-03-31T16:20:34.338045Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 277:1000 Path# /dev/disk1 2025-03-31T16:20:34.338071Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 277:1001 Path# /dev/disk2 2025-03-31T16:20:34.338097Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 277:1002 Path# /dev/disk3 2025-03-31T16:20:34.338122Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 278:1000 Path# /dev/disk1 2025-03-31T16:20:34.338145Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 278:1001 Path# /dev/disk2 2025-03-31T16:20:34.338180Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 278:1002 Path# /dev/disk3 2025-03-31T16:20:34.338216Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 279:1000 Path# /dev/disk1 2025-03-31T16:20:34.338245Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 279:1001 Path# /dev/disk2 2025-03-31T16:20:34.338271Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 279:1002 Path# /dev/disk3 2025-03-31T16:20:34.338295Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 280:1000 Path# /dev/disk1 2025-03-31T16:20:34.338334Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 280:1001 Path# /dev/disk2 2025-03-31T16:20:34.338370Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 280:1002 Path# /dev/disk3 2025-03-31T16:20:34.338396Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 281:1000 Path# /dev/disk1 2025-03-31T16:20:34.338421Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 281:1001 Path# /dev/disk2 2025-03-31T16:20:34.338454Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 281:1002 Path# /dev/disk3 2025-03-31T16:20:34.338492Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 282:1000 Path# /dev/disk1 2025-03-31T16:20:34.338520Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 282:1001 Path# /dev/disk2 2025-03-31T16:20:34.338545Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 282:1002 Path# /dev/disk3 2025-03-31T16:20:34.338595Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 283:1000 Path# /dev/disk1 2025-03-31T16:20:34.338629Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 283:1001 Path# /dev/disk2 2025-03-31T16:20:34.338656Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 283:1002 Path# /dev/disk3 2025-03-31T16:20:34.338698Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 284:1000 Path# /dev/disk1 2025-03-31T16:20:34.338737Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 284:1001 Path# /dev/disk2 2025-03-31T16:20:34.338763Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 284:1002 Path# /dev/disk3 2025-03-31T16:20:34.338789Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 285:1000 Path# /dev/disk1 2025-03-31T16:20:34.338817Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 285:1001 Path# /dev/disk2 2025-03-31T16:20:34.338842Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 285:1002 Path# /dev/disk3 2025-03-31T16:20:34.338868Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 286:1000 Path# /dev/disk1 2025-03-31T16:20:34.338894Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 286:1001 Path# /dev/disk2 2025-03-31T16:20:34.338927Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 286:1002 Path# /dev/disk3 2025-03-31T16:20:34.338964Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 287:1000 Path# /dev/disk1 2025-03-31T16:20:34.338990Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 287:1001 Path# /dev/disk2 2025-03-31T16:20:34.339016Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 287:1002 Path# /dev/disk3 2025-03-31T16:20:34.339041Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 288:1000 Path# /dev/disk1 2025-03-31T16:20:34.339065Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 288:1001 Path# /dev/disk2 2025-03-31T16:20:34.339125Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 288:1002 Path# /dev/disk3 2025-03-31T16:20:34.339172Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 289:1000 Path# /dev/disk1 2025-03-31T16:20:34.339207Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 289:1001 Path# /dev/disk2 2025-03-31T16:20:34.339234Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 289:1002 Path# /dev/disk3 2025-03-31T16:20:34.339268Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 290:1000 Path# /dev/disk1 2025-03-31T16:20:34.339303Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 290:1001 Path# /dev/disk2 2025-03-31T16:20:34.339334Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 290:1002 Path# /dev/disk3 2025-03-31T16:20:34.339359Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 291:1000 Path# /dev/disk1 2025-03-31T16:20:34.339385Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 291:1001 Path# /dev/disk2 2025-03-31T16:20:34.339409Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 291:1002 Path# /dev/disk3 2025-03-31T16:20:34.339435Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 292:1000 Path# /dev/disk1 2025-03-31T16:20:34.339462Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 292:1001 Path# /dev/disk2 2025-03-31T16:20:34.339489Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 292:1002 Path# /dev/disk3 2025-03-31T16:20:34.339518Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 293:1000 Path# /dev/disk1 2025-03-31T16:20:34.339563Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 293:1001 Path# /dev/disk2 2025-03-31T16:20:34.339589Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 293:1002 Path# /dev/disk3 2025-03-31T16:20:34.339615Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 294:1000 Path# /dev/disk1 2025-03-31T16:20:34.339642Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 294:1001 Path# /dev/disk2 2025-03-31T16:20:34.339682Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 294:1002 Path# /dev/disk3 2025-03-31T16:20:34.339716Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 295:1000 Path# /dev/disk1 2025-03-31T16:20:34.339744Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 295:1001 Path# /dev/disk2 2025-03-31T16:20:34.339783Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 295:1002 Path# /dev/disk3 2025-03-31T16:20:34.339815Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 296:1000 Path# /dev/disk1 2025-03-31T16:20:34.339843Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 296:1001 Path# /dev/disk2 2025-03-31T16:20:34.339871Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 296:1002 Path# /dev/disk3 2025-03-31T16:20:34.339897Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 297:1000 Path# /dev/disk1 2025-03-31T16:20:34.339924Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 297:1001 Path# /dev/disk2 2025-03-31T16:20:34.339960Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 297:1002 Path# /dev/disk3 2025-03-31T16:20:34.339999Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 298:1000 Path# /dev/disk1 2025-03-31T16:20:34.340023Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 298:1001 Path# /dev/disk2 2025-03-31T16:20:34.340049Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 298:1002 Path# /dev/disk3 2025-03-31T16:20:34.340076Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 299:1000 Path# /dev/disk1 2025-03-31T16:20:34.340102Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 299:1001 Path# /dev/disk2 2025-03-31T16:20:34.340133Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 299:1002 Path# /dev/disk3 2025-03-31T16:20:34.340163Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 300:1000 Path# /dev/disk1 2025-03-31T16:20:34.340190Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 300:1001 Path# /dev/disk2 2025-03-31T16:20:34.340216Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 300:1002 Path# /dev/disk3 2025-03-31T16:20:34.558440Z node 251 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.222608s 2025-03-31T16:20:34.558620Z node 251 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.222802s 2025-03-31T16:20:34.590539Z node 251 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { MergeBoxes { OriginBoxId: 2 OriginBoxGeneration: 1 TargetBoxId: 1 TargetBoxGeneration: 1 StoragePoolIdMap { OriginStoragePoolId: 1 TargetStoragePoolId: 2 } } } } 2025-03-31T16:20:34.615417Z node 251 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { ReadBox { BoxId: 1 } } Command { QueryBaseConfig { } } } >> TKeyValueTest::TestConcatToLongKey [GOOD] >> KqpQuery::QueryCacheInvalidate [GOOD] >> KqpQuery::QueryCachePermissionsLoss >> YdbOlapStore::LogCountByResource [GOOD] |89.9%| [TA] $(B)/ydb/core/mind/bscontroller/ut_bscontroller/test-results/unittest/{meta.json ... results_accumulator.log} |89.9%| [TA] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/test-results/unittest/{meta.json ... results_accumulator.log} >> TCmsTest::StateRequest [GOOD] >> TCmsTest::ScheduledEmergencyDuringRollingRestart ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestConcatToLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:78:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:84:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:88:2057] recipient: [8:86:2116] Leader for TabletID 72057594037927937 is [8:89:2117] sender: [8:90:2057] recipient: [8:86:2116] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:89:2117] Leader for TabletID 72057594037927937 is [8:89:2117] sender: [8:143:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:84:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:88:2057] recipient: [9:86:2116] Leader for TabletID 72057594037927937 is [9:89:2117] sender: [9:90:2057] recipient: [9:86:2116] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:89:2117] Leader for TabletID 72057594037927937 is [9:89:2117] sender: [9:143:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:52:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:52:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:85:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:89:2057] recipient: [10:87:2116] Leader for TabletID 72057594037927937 is [10:90:2117] sender: [10:91:2057] recipient: [10:87:2116] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:90:2117] Leader for TabletID 72057594037927937 is [10:90:2117] sender: [10:144:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:87:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:91:2057] recipient: [11:89:2118] Leader for TabletID 72057594037927937 is [11:92:2119] sender: [11:93:2057] recipient: [11:89:2118] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:92:2119] Leader for TabletID 72057594037927937 is [11:92:2119] sender: [11:146:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... 7 is [33:56:2097] sender: [33:92:2057] recipient: [33:36:2083] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:95:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:96:2057] recipient: [33:94:2121] Leader for TabletID 72057594037927937 is [33:97:2122] sender: [33:98:2057] recipient: [33:94:2121] !Reboot 72057594037927937 (actor [33:56:2097]) rebooted! !Reboot 72057594037927937 (actor [33:56:2097]) tablet resolver refreshed! new actor is[33:97:2122] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:57:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:74:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:54:2057] recipient: [35:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:54:2057] recipient: [35:51:2095] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:57:2057] recipient: [35:51:2095] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:74:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:54:2057] recipient: [36:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:54:2057] recipient: [36:51:2095] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:57:2057] recipient: [36:51:2095] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:74:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:76:2057] recipient: [36:36:2083] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:79:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:80:2057] recipient: [36:78:2110] Leader for TabletID 72057594037927937 is [36:81:2111] sender: [36:82:2057] recipient: [36:78:2110] !Reboot 72057594037927937 (actor [36:56:2097]) rebooted! !Reboot 72057594037927937 (actor [36:56:2097]) tablet resolver refreshed! new actor is[36:81:2111] Leader for TabletID 72057594037927937 is [36:81:2111] sender: [36:135:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:54:2057] recipient: [37:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:54:2057] recipient: [37:50:2095] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:57:2057] recipient: [37:50:2095] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:74:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:56:2097]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:76:2057] recipient: [37:36:2083] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:79:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:80:2057] recipient: [37:78:2110] Leader for TabletID 72057594037927937 is [37:81:2111] sender: [37:82:2057] recipient: [37:78:2110] !Reboot 72057594037927937 (actor [37:56:2097]) rebooted! !Reboot 72057594037927937 (actor [37:56:2097]) tablet resolver refreshed! new actor is[37:81:2111] Leader for TabletID 72057594037927937 is [37:81:2111] sender: [37:135:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:54:2057] recipient: [38:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:54:2057] recipient: [38:51:2095] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:57:2057] recipient: [38:51:2095] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:74:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:77:2057] recipient: [38:36:2083] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:80:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:81:2057] recipient: [38:79:2110] Leader for TabletID 72057594037927937 is [38:82:2111] sender: [38:83:2057] recipient: [38:79:2110] !Reboot 72057594037927937 (actor [38:56:2097]) rebooted! !Reboot 72057594037927937 (actor [38:56:2097]) tablet resolver refreshed! new actor is[38:82:2111] Leader for TabletID 72057594037927937 is [38:82:2111] sender: [38:136:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:54:2057] recipient: [39:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:54:2057] recipient: [39:50:2095] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:57:2057] recipient: [39:50:2095] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:74:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:80:2057] recipient: [39:36:2083] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:83:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:84:2057] recipient: [39:82:2113] Leader for TabletID 72057594037927937 is [39:85:2114] sender: [39:86:2057] recipient: [39:82:2113] !Reboot 72057594037927937 (actor [39:56:2097]) rebooted! !Reboot 72057594037927937 (actor [39:56:2097]) tablet resolver refreshed! new actor is[39:85:2114] Leader for TabletID 72057594037927937 is [39:85:2114] sender: [39:139:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:54:2057] recipient: [40:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:54:2057] recipient: [40:51:2095] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:57:2057] recipient: [40:51:2095] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:74:2057] recipient: [40:14:2061] !Reboot 72057594037927937 (actor [40:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:80:2057] recipient: [40:36:2083] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:83:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:84:2057] recipient: [40:82:2113] Leader for TabletID 72057594037927937 is [40:85:2114] sender: [40:86:2057] recipient: [40:82:2113] !Reboot 72057594037927937 (actor [40:56:2097]) rebooted! !Reboot 72057594037927937 (actor [40:56:2097]) tablet resolver refreshed! new actor is[40:85:2114] Leader for TabletID 72057594037927937 is [40:85:2114] sender: [40:139:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:54:2057] recipient: [41:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:54:2057] recipient: [41:51:2095] Leader for TabletID 72057594037927937 is [41:56:2097] sender: [41:57:2057] recipient: [41:51:2095] Leader for TabletID 72057594037927937 is [41:56:2097] sender: [41:74:2057] recipient: [41:14:2061] !Reboot 72057594037927937 (actor [41:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [41:56:2097] sender: [41:81:2057] recipient: [41:36:2083] Leader for TabletID 72057594037927937 is [41:56:2097] sender: [41:84:2057] recipient: [41:14:2061] Leader for TabletID 72057594037927937 is [41:56:2097] sender: [41:85:2057] recipient: [41:83:2113] Leader for TabletID 72057594037927937 is [41:86:2114] sender: [41:87:2057] recipient: [41:83:2113] !Reboot 72057594037927937 (actor [41:56:2097]) rebooted! !Reboot 72057594037927937 (actor [41:56:2097]) tablet resolver refreshed! new actor is[41:86:2114] Leader for TabletID 72057594037927937 is [41:86:2114] sender: [41:140:2057] recipient: [41:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:54:2057] recipient: [42:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:54:2057] recipient: [42:51:2095] Leader for TabletID 72057594037927937 is [42:56:2097] sender: [42:57:2057] recipient: [42:51:2095] Leader for TabletID 72057594037927937 is [42:56:2097] sender: [42:74:2057] recipient: [42:14:2061] !Reboot 72057594037927937 (actor [42:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [42:56:2097] sender: [42:84:2057] recipient: [42:36:2083] Leader for TabletID 72057594037927937 is [42:56:2097] sender: [42:87:2057] recipient: [42:14:2061] Leader for TabletID 72057594037927937 is [42:56:2097] sender: [42:88:2057] recipient: [42:86:2116] Leader for TabletID 72057594037927937 is [42:89:2117] sender: [42:90:2057] recipient: [42:86:2116] !Reboot 72057594037927937 (actor [42:56:2097]) rebooted! !Reboot 72057594037927937 (actor [42:56:2097]) tablet resolver refreshed! new actor is[42:89:2117] Leader for TabletID 72057594037927937 is [42:89:2117] sender: [42:143:2057] recipient: [42:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:54:2057] recipient: [43:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:54:2057] recipient: [43:52:2095] Leader for TabletID 72057594037927937 is [43:56:2097] sender: [43:57:2057] recipient: [43:52:2095] Leader for TabletID 72057594037927937 is [43:56:2097] sender: [43:74:2057] recipient: [43:14:2061] !Reboot 72057594037927937 (actor [43:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [43:56:2097] sender: [43:84:2057] recipient: [43:36:2083] Leader for TabletID 72057594037927937 is [43:56:2097] sender: [43:86:2057] recipient: [43:14:2061] Leader for TabletID 72057594037927937 is [43:56:2097] sender: [43:88:2057] recipient: [43:87:2116] Leader for TabletID 72057594037927937 is [43:89:2117] sender: [43:90:2057] recipient: [43:87:2116] !Reboot 72057594037927937 (actor [43:56:2097]) rebooted! !Reboot 72057594037927937 (actor [43:56:2097]) tablet resolver refreshed! new actor is[43:89:2117] Leader for TabletID 72057594037927937 is [43:89:2117] sender: [43:143:2057] recipient: [43:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:54:2057] recipient: [44:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:54:2057] recipient: [44:51:2095] Leader for TabletID 72057594037927937 is [44:56:2097] sender: [44:57:2057] recipient: [44:51:2095] Leader for TabletID 72057594037927937 is [44:56:2097] sender: [44:74:2057] recipient: [44:14:2061] !Reboot 72057594037927937 (actor [44:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [44:56:2097] sender: [44:85:2057] recipient: [44:36:2083] Leader for TabletID 72057594037927937 is [44:56:2097] sender: [44:88:2057] recipient: [44:14:2061] Leader for TabletID 72057594037927937 is [44:56:2097] sender: [44:89:2057] recipient: [44:87:2116] Leader for TabletID 72057594037927937 is [44:90:2117] sender: [44:91:2057] recipient: [44:87:2116] !Reboot 72057594037927937 (actor [44:56:2097]) rebooted! !Reboot 72057594037927937 (actor [44:56:2097]) tablet resolver refreshed! new actor is[44:90:2117] Leader for TabletID 72057594037927937 is [44:90:2117] sender: [44:144:2057] recipient: [44:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:54:2057] recipient: [45:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:54:2057] recipient: [45:51:2095] Leader for TabletID 72057594037927937 is [45:56:2097] sender: [45:57:2057] recipient: [45:51:2095] Leader for TabletID 72057594037927937 is [45:56:2097] sender: [45:74:2057] recipient: [45:14:2061] >> TCmsTest::TestKeepAvailableMode >> TCmsTest::ManagePermissions [GOOD] >> TCmsTest::ManagePermissionWrongRequest >> TCmsTest::RequestRestartServicesOk [GOOD] >> TCmsTest::RequestRestartServicesReject >> TCmsTest::TestForceRestartMode >> TCmsTest::WalleTasks >> KqpPg::InsertNoTargetColumns_Serial-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefault+useSink >> TCmsTest::StateStorageNodesFromOneRing >> KqpQuery::ReadOverloaded-StreamLookup [GOOD] >> KqpPg::Insert_Serial-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultText+useSink >> KqpPg::DropIndex [GOOD] >> KqpPg::CreateUniqPgColumn+useSink >> TCmsTest::TestKeepAvailableModeScheduled >> KqpLimits::DatashardProgramSize-useSink [GOOD] >> KqpLimits::DatashardReplySize >> KqpLimits::TooBigColumn-useSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::ReadOverloaded-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 22784, MsgBus: 21425 2025-03-31T16:20:19.033786Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009277720496649:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:19.033857Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0009e7/r3tmp/tmpXnsby1/pdisk_1.dat 2025-03-31T16:20:19.329751Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22784, node 1 2025-03-31T16:20:19.399716Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:19.399741Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:19.399749Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:19.399864Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:20:19.402875Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:19.402976Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:19.404696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21425 TClient is connected to server localhost:21425 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:19.823502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:19.840233Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:20:19.854992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:19.963125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:20.109795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:20.175896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:22.220199Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009290605400322:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:22.220316Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:22.529406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:22.554139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:22.578831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:22.602933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:22.625218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:22.653590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:22.688435Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009290605400829:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:22.688508Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:22.688546Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009290605400834:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:22.691295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:22.699262Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009290605400836:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:20:22.781680Z node 1 :TX_PROXY ERROR: Actor# [1:7488009290605400890:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 17005, MsgBus: 27063 2025-03-31T16:20:27.416687Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:101:2147], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:20:27.417113Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:20:27.417215Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0009e7/r3tmp/tmpLryxwF/pdisk_1.dat TServer::EnableGrpc on GrpcPort 17005, node 2 2025-03-31T16:20:27.847040Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:20:27.850223Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:27.850288Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:27.850331Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:27.850803Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:20:27.889082Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:27.889211Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:27.900388Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27063 TClient is connected to server localhost:27063 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:28.127129Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:28.166850Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:28.428987Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:28.894438Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:29.184716Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:29.715357 ... }. InternalError: OVERLOADED DEFAULT_ERROR: {
: Error: Table '/Root/SecondaryKeys/Index/indexImplTable' retry limit exceeded. }. 2025-03-31T16:20:34.596415Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:3289:4557], TxId: 281474976715674, task: 2. Ctx: { TraceId : 01jqpg2cg02h5sd7n8j7r1q494. SessionId : ydb://session/3?node_id=2&id=YzU0MzRjNWQtNmFjODY3MGQtN2Y5YzZlYjEtYzliNTM1ODM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:3282:4087], status: OVERLOADED, reason: {
: Error: Terminate execution } 2025-03-31T16:20:34.596731Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:3290:4558], TxId: 281474976715674, task: 3. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YzU0MzRjNWQtNmFjODY3MGQtN2Y5YzZlYjEtYzliNTM1ODM=. TraceId : 01jqpg2cg02h5sd7n8j7r1q494. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:3282:4087], status: OVERLOADED, reason: {
: Error: Terminate execution } 2025-03-31T16:20:34.597063Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:3291:4559], TxId: 281474976715674, task: 4. Ctx: { TraceId : 01jqpg2cg02h5sd7n8j7r1q494. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YzU0MzRjNWQtNmFjODY3MGQtN2Y5YzZlYjEtYzliNTM1ODM=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:3282:4087], status: OVERLOADED, reason: {
: Error: Terminate execution } 2025-03-31T16:20:34.597753Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzU0MzRjNWQtNmFjODY3MGQtN2Y5YzZlYjEtYzliNTM1ODM=, ActorId: [2:2702:4087], ActorState: ExecuteState, TraceId: 01jqpg2cg02h5sd7n8j7r1q494, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 15432, MsgBus: 19170 2025-03-31T16:20:38.531145Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:304:2347], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:20:38.531391Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:20:38.531485Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0009e7/r3tmp/tmpH39vyq/pdisk_1.dat TServer::EnableGrpc on GrpcPort 15432, node 3 2025-03-31T16:20:38.956862Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:20:38.957856Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:38.957913Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:38.957951Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:38.958345Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:20:38.993506Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:38.993613Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:39.005150Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19170 TClient is connected to server localhost:19170 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:39.339503Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:39.395240Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:39.709682Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:40.134732Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:40.435868Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:41.037484Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1804:3403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:41.037731Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:41.061069Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:41.301122Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:41.548434Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:41.814625Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:42.064520Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:42.395278Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:42.708964Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2394:3856], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:42.709113Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:42.709497Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2399:3861], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:42.716441Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:42.875435Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:2401:3863], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:20:42.921999Z node 3 :TX_PROXY ERROR: Actor# [3:2464:3907] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:43.868990Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:20:44.136337Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:20:44.497956Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:20:46.250508Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:3278:4550], TxId: 281474976715674, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=NTQ3ZGFjZDktODY4MDM0MjYtMmE0ZjgwNGItOTdkNzEyYWQ=. CustomerSuppliedId : . TraceId : 01jqpg2qnkfthmbnw7mjg9fp9t. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Source[0] fatal error: {
: Error: Table '/Root/SecondaryKeys' retry limit exceeded. } 2025-03-31T16:20:46.250647Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:3278:4550], TxId: 281474976715674, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=NTQ3ZGFjZDktODY4MDM0MjYtMmE0ZjgwNGItOTdkNzEyYWQ=. CustomerSuppliedId : . TraceId : 01jqpg2qnkfthmbnw7mjg9fp9t. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: OVERLOADED DEFAULT_ERROR: {
: Error: Table '/Root/SecondaryKeys' retry limit exceeded. }. 2025-03-31T16:20:46.251767Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:3279:4551], TxId: 281474976715674, task: 2. Ctx: { TraceId : 01jqpg2qnkfthmbnw7mjg9fp9t. SessionId : ydb://session/3?node_id=3&id=NTQ3ZGFjZDktODY4MDM0MjYtMmE0ZjgwNGItOTdkNzEyYWQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:3272:4086], status: OVERLOADED, reason: {
: Error: Terminate execution } 2025-03-31T16:20:46.252655Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTQ3ZGFjZDktODY4MDM0MjYtMmE0ZjgwNGItOTdkNzEyYWQ=, ActorId: [3:2699:4086], ActorState: ExecuteState, TraceId: 01jqpg2qnkfthmbnw7mjg9fp9t, Create QueryResponse for error on request, msg: >> TCmsTest::ManagePermissionWrongRequest [GOOD] >> TCmsTest::ManageRequests >> TCmsTest::ScheduledEmergencyDuringRollingRestart [GOOD] >> TCmsTest::ScheduledWalleRequestDuringRollingRestart >> TCmsTest::StateStorageNodesFromOneRing [GOOD] >> TCmsTest::StateStorageTwoBrokenRings >> TCmsTest::RequestRestartServicesReject [GOOD] >> TCmsTest::RequestRestartServicesPartial >> TCmsTest::TestKeepAvailableMode [GOOD] >> TCmsTest::TestKeepAvailableModeDisconnects ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::TooBigColumn-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 4390, MsgBus: 13380 2025-03-31T16:20:14.960900Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009254508895403:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:14.960986Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a02/r3tmp/tmpPh4gYu/pdisk_1.dat 2025-03-31T16:20:15.272887Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4390, node 1 2025-03-31T16:20:15.338335Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:15.338507Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:15.339812Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:15.339839Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:15.339850Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:15.340001Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:20:15.340284Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13380 TClient is connected to server localhost:13380 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:15.799050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:17.511212Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009267393797967:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:17.511283Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009267393797958:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:17.511383Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:17.516073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:20:17.526103Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009267393797972:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:20:17.591072Z node 1 :TX_PROXY ERROR: Actor# [1:7488009267393798023:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:17.865397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 24440, MsgBus: 15404 2025-03-31T16:20:18.619273Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488009273368998977:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:18.619323Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a02/r3tmp/tmpzzm7Q9/pdisk_1.dat 2025-03-31T16:20:18.706673Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24440, node 2 2025-03-31T16:20:18.749407Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:18.749506Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:18.751107Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:18.764842Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:18.764861Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:18.764867Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:18.764956Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15404 TClient is connected to server localhost:15404 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:19.148868Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:19.153779Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:20:21.468922Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009286253901496:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:21.468997Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:21.469345Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009286253901531:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:21.472451Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:20:21.478715Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488009286253901534:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:20:21.573940Z node 2 :TX_PROXY ERROR: Actor# [2:7488009286253901585:2334] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:21.597191Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:20:21.715818Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488009286253901751:2348], SessionActorId: [2:7488009286253901735:2348], statusCode=PRECONDITION_FAILED. Issue=
: Error: Stream write queries aren't allowed., code: 2029 . sessionActorId=[2:7488009286253901735:2348]. isRollback=0 2025-03-31T16:20:21.716056Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NWU5YjZmNGQtN2I3ODJhNjMtYmQzZmM2YTAtZWRjMWFkNDQ=, ActorId: [2:7488009286253901735:2348], ActorState: ExecuteState, TraceId: 01jqpg21007shgnnhf7a22vjdd, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [2:7488009286253901752:2348] from: [2:7488009286253901751:2348] 2025-03-31T16:20:21.716132Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7488009286253901752:2348] TxId: 281474976715661. Ctx: { TraceId: 01jqpg21007shgnnhf7a22vjdd, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWU5YjZmNGQtN2I3ODJhNjMtYmQzZmM2YTAtZWRjMWFkNDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Stream write queries aren't allowed., code: 2029 } 2025-03-31T16:20:21.716296Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488009286253901755:2348], TxId: 281474976715661, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWU5YjZmNGQtN2I3ODJhNjMtYmQzZmM2YTAtZWRjMWFkNDQ=. TraceId : 01jqpg21007shgnnhf7a22vjdd. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7488009286253901752:2348], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-31T16:20:21.717452Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NWU5YjZmNGQtN2I3ODJhNjMtYmQzZmM2YTAtZWRjMWFkNDQ=, ActorId: [2:7488009286253901735:2348], ActorState: ExecuteState, TraceId: 01jqpg21007shgnnhf7a22vjdd, Create QueryResponse for error on request, msg: ... zOC1jYjc1Yjhm. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7488009307276086644:2404], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-31T16:20:26.633512Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZDBjNWZiNi01MWZkNzFjNS0yMTY1YWUzOC1jYjc1Yjhm, ActorId: [3:7488009302981118633:2404], ActorState: ExecuteState, TraceId: 01jqpg250jee0wyc76m1jw3g9b, Create QueryResponse for error on request, msg: VERIFY failed (2025-03-31T16:20:26.636745Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:372, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Error: Stream write queries aren't allowed., code: 2029 library/cpp/testing/unittest/registar.cpp:37 RaiseError(): requirement UnittestThread failed 2025-03-31T16:20:27.282200Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488009290096215007:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:27.282293Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:20:37.352182Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:20:37.352219Z node 3 :IMPORT WARN: Table profiles were not loaded 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x19532108 1. /-S/util/system/yassert.cpp:55: Panic @ 0x1952039A 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:37: RaiseError @ 0x199AF681 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:372: AssertSuccessResult @ 0x18D20B96 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:348: CreateSampleTables @ 0x48EC51B6 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:544: operator() @ 0x48F08603 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x48F08603 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x48F08603 8. /-S/util/thread/pool.h:71: Process @ 0x48F08603 9. /-S/util/thread/pool.cpp:411: DoExecute @ 0x195452B5 10. /-S/util/thread/factory.h:15: Execute @ 0x19541E4C 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x19541E4C 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x19536554 13. /tmp//-S/contrib/libs/clang18-rt/lib/asan/asan_interceptors.cpp:239: asan_thread_start @ 0x191E7DE8 14. ??:0: ?? @ 0x7FC59F8E7AC2 15. ??:0: ?? @ 0x7FC59F97984F Trying to start YDB, gRPC: 20467, MsgBus: 19899 2025-03-31T16:20:42.152639Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009376329055378:2080];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:42.155216Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a02/r3tmp/tmpXLkdyG/pdisk_1.dat 2025-03-31T16:20:42.502101Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:20:42.521809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:42.521889Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:42.523837Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20467, node 1 2025-03-31T16:20:42.591941Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:42.591974Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:42.591987Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:42.592117Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19899 TClient is connected to server localhost:19899 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:43.074291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:43.100589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:20:43.223411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:20:43.386365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:43.450887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:45.133506Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009389213959020:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:45.133599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:45.431296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:45.460706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:45.497185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:45.527220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:45.557665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:45.624520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:45.678073Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009389213959535:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:45.678142Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:45.678303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009389213959540:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:45.681764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:45.690744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009389213959542:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:20:45.793964Z node 1 :TX_PROXY ERROR: Actor# [1:7488009389213959598:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:47.151020Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488009376329055378:2080];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:47.151111Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:20:47.353959Z node 1 :TX_DATASHARD ERROR: Transaction write column value of 20971522 bytes is larger than the allowed threshold 2025-03-31T16:20:47.354117Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710671 at tablet 72075186224037911 status: EXEC_ERROR errors: BAD_ARGUMENT (Transaction write column value of 20971522 bytes is larger than the allowed threshold) | 2025-03-31T16:20:47.354322Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7488009397803894499:2488] TxId: 281474976710671. Ctx: { TraceId: 01jqpg2spm3v679v82gyxk8tej, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWM5NDAzMzItNDM3ZDczNDctNDkzYWYwOWYtNTI2ZGIxZGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. EXEC_ERROR: [BAD_ARGUMENT] Transaction write column value of 20971522 bytes is larger than the allowed threshold; 2025-03-31T16:20:47.365747Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWM5NDAzMzItNDM3ZDczNDctNDkzYWYwOWYtNTI2ZGIxZGI=, ActorId: [1:7488009393508927151:2488], ActorState: ExecuteState, TraceId: 01jqpg2spm3v679v82gyxk8tej, Create QueryResponse for error on request, msg:
: Error: Error executing transaction (ExecError): Execution failed
: Error: [BAD_ARGUMENT] Transaction write column value of 20971522 bytes is larger than the allowed threshold >> TCmsTest::TestForceRestartMode [GOOD] >> TCmsTest::TestForceRestartModeDisconnects >> KqpLimits::ManyPartitionsSortingLimit [GOOD] >> KqpPg::DuplicatedColumns-useSink [GOOD] >> KqpPg::InsertFromSelect_NoReorder+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbOlapStore::LogCountByResource [GOOD] Test command err: 2025-03-31T16:16:57.355465Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008408932943799:2211];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:57.356166Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:16:57.391097Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008408793325560:2141];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:57.392835Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:16:57.397316Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488008410494008013:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:16:57.397748Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dc8/r3tmp/tmpFiLboq/pdisk_1.dat 2025-03-31T16:16:57.815880Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:57.816018Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:57.816194Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:57.816261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:16:57.820658Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:16:57.820704Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-03-31T16:16:57.822317Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:16:57.822694Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:16:57.855932Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:16:57.856021Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 3255, node 1 2025-03-31T16:16:57.869005Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:57.873086Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:16:57.882223Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:16:57.882375Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:16:57.970693Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:57.970730Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:57.970745Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:57.970941Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16916 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:16:58.320577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:02.353391Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008408932943799:2211];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:02.353469Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:17:02.389312Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008408793325560:2141];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:02.389374Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:17:02.397072Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488008410494008013:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:02.397125Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Killing node 1 Killing node 2 2025-03-31T16:17:12.864428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:17:12.864464Z node 1 :IMPORT WARN: Table profiles were not loaded Killing node 3 2025-03-31T16:17:20.259472Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7488008507952525448:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:20.259537Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dc8/r3tmp/tmpsy1dte/pdisk_1.dat 2025-03-31T16:17:20.464552Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:20.489085Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:20.489199Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:20.494959Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1708, node 5 2025-03-31T16:17:20.637397Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:20.637425Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:20.637433Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:20.637635Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17347 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:20.876533Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:17347 2025-03-31T16:17:21.194831Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "Root" DiffACL: "\n\033\010\000\022\027\010\001\020\200\200\002\032\ralice@builtin \003" } } TxId: 281474976715658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-31T16:17:21.195016Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:21.195165Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046644480, LocalPathId: 1] name: Root type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-31T16:17:21.195192Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-31T16:17:21.195383Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-03-31T16:17:21.195422Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:17:21.195517Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-03-31T16:17:21.195544Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715658 ready parts: 1/1 2025-03-31T16:17:21.195567Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-03-31T16:17:21.195577Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715658 ready parts: 1/1 2025-03-31T16:17:21.195619Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-31T16:17:21.195669Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715658, ready parts: 1/1, is published: false 2025-03-31T16:17:21.195692Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-03-31T16:17:21.195705Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715658 ready parts: 1/1 2025-03-31T16:17:21.195714Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-03-31T16:17:21.195731Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication still ... 4];TabletId=72075186224037889;ScanId=48;TxId=281474976710673;ScanGen=1;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-03-31T16:20:43.449182Z node 50 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[50:7488009377847610582:3593];TabletId=72075186224037888;ScanId=48;TxId=281474976710673;ScanGen=1;task_identifier=;fline=source.cpp:50;event=source_ready;intervals_count=1;source_idx=44; 2025-03-31T16:20:43.449252Z node 50 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[50:7488009377847610582:3593];TabletId=72075186224037888;ScanId=48;TxId=281474976710673;ScanGen=1;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=3,6;column_names=resource_id,resource_type;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=3,4,5,6;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=3,6;column_names=resource_id,resource_type;);;program_input=(column_ids=3,6;column_names=resource_id,resource_type;);;;); 2025-03-31T16:20:43.449265Z node 50 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[50:7488009377847610582:3593];TabletId=72075186224037888;ScanId=48;TxId=281474976710673;ScanGen=1;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-03-31T16:20:43.449272Z node 50 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[50:7488009377847610583:3594];TabletId=72075186224037889;ScanId=48;TxId=281474976710673;ScanGen=1;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=3,6;column_names=resource_id,resource_type;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=3,4,5,6;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=3,6;column_names=resource_id,resource_type;);;program_input=(column_ids=3,6;column_names=resource_id,resource_type;);;;); 2025-03-31T16:20:43.449294Z node 50 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[50:7488009377847610583:3594];TabletId=72075186224037889;ScanId=48;TxId=281474976710673;ScanGen=1;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-03-31T16:20:43.449331Z node 50 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[50:7488009377847610582:3593];TabletId=72075186224037888;ScanId=48;TxId=281474976710673;ScanGen=1;task_identifier=;method=produce result;fline=actor.cpp:217;stage=no data is ready yet;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=3,6;column_names=resource_id,resource_type;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=3,4,5,6;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=3,6;column_names=resource_id,resource_type;);;program_input=(column_ids=3,6;column_names=resource_id,resource_type;);;;); 2025-03-31T16:20:43.449361Z node 50 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[50:7488009377847610582:3593];TabletId=72075186224037888;ScanId=48;TxId=281474976710673;ScanGen=1;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-03-31T16:20:43.449379Z node 50 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[50:7488009377847610582:3593];TabletId=72075186224037888;ScanId=48;TxId=281474976710673;ScanGen=1;task_identifier=;fline=fetching.cpp:17;event=apply; 2025-03-31T16:20:43.449389Z node 50 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[50:7488009377847610583:3594];TabletId=72075186224037889;ScanId=48;TxId=281474976710673;ScanGen=1;task_identifier=;method=produce result;fline=actor.cpp:217;stage=no data is ready yet;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=3,6;column_names=resource_id,resource_type;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=3,4,5,6;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=3,6;column_names=resource_id,resource_type;);;program_input=(column_ids=3,6;column_names=resource_id,resource_type;);;;); 2025-03-31T16:20:43.449398Z node 50 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[50:7488009377847610582:3593];TabletId=72075186224037888;ScanId=48;TxId=281474976710673;ScanGen=1;task_identifier=;fline=interval.cpp:28;event=fetched;interval_idx=0; 2025-03-31T16:20:43.449414Z node 50 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[50:7488009377847610582:3593];TabletId=72075186224037888;ScanId=48;TxId=281474976710673;ScanGen=1;task_identifier=;fline=interval.cpp:11;event=skip_construct_result;interval_idx=0;count=100;ready=29;interval_id=31; 2025-03-31T16:20:43.449429Z node 50 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[50:7488009377847610582:3593];TabletId=72075186224037888;ScanId=48;TxId=281474976710673;ScanGen=1;task_identifier=;fline=source.cpp:50;event=source_ready;intervals_count=1;source_idx=33; 2025-03-31T16:20:43.449432Z node 50 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[50:7488009377847610583:3594];TabletId=72075186224037889;ScanId=48;TxId=281474976710673;ScanGen=1;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-03-31T16:20:43.449532Z node 50 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[50:7488009377847610582:3593];TabletId=72075186224037888;ScanId=48;TxId=281474976710673;ScanGen=1;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=3,6;column_names=resource_id,resource_type;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=3,4,5,6;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=3,6;column_names=resource_id,resource_type;);;program_input=(column_ids=3,6;column_names=resource_id,resource_type;);;;); 2025-03-31T16:20:43.449540Z node 50 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[50:7488009377847610583:3594];TabletId=72075186224037889;ScanId=48;TxId=281474976710673;ScanGen=1;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=3,6;column_names=resource_id,resource_type;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=3,4,5,6;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=3,6;column_names=resource_id,resource_type;);;program_input=(column_ids=3,6;column_names=resource_id,resource_type;);;;); 2025-03-31T16:20:43.449552Z node 50 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[50:7488009377847610582:3593];TabletId=72075186224037888;ScanId=48;TxId=281474976710673;ScanGen=1;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-03-31T16:20:43.449559Z node 50 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[50:7488009377847610583:3594];TabletId=72075186224037889;ScanId=48;TxId=281474976710673;ScanGen=1;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-03-31T16:20:43.449647Z node 50 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[50:7488009377847610582:3593];TabletId=72075186224037888;ScanId=48;TxId=281474976710673;ScanGen=1;task_identifier=;method=produce result;fline=actor.cpp:217;stage=no data is ready yet;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=3,6;column_names=resource_id,resource_type;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=3,4,5,6;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=3,6;column_names=resource_id,resource_type;);;program_input=(column_ids=3,6;column_names=resource_id,resource_type;);;;); 2025-03-31T16:20:43.449661Z node 50 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[50:7488009377847610583:3594];TabletId=72075186224037889;ScanId=48;TxId=281474976710673;ScanGen=1;task_identifier=;method=produce result;fline=actor.cpp:217;stage=no data is ready yet;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=3,6;column_names=resource_id,resource_type;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=3,4,5,6;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=3,6;column_names=resource_id,resource_type;);;program_input=(column_ids=3,6;column_names=resource_id,resource_type;);;;); 2025-03-31T16:20:43.449694Z node 50 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[50:7488009377847610582:3593];TabletId=72075186224037888;ScanId=48;TxId=281474976710673;ScanGen=1;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-03-31T16:20:43.449709Z node 50 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[50:7488009377847610582:3593];TabletId=72075186224037888;ScanId=48;TxId=281474976710673;ScanGen=1;task_identifier=;fline=fetching.cpp:17;event=apply; 2025-03-31T16:20:43.449710Z node 50 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[50:7488009377847610583:3594];TabletId=72075186224037889;ScanId=48;TxId=281474976710673;ScanGen=1;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-03-31T16:20:43.449728Z node 50 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[50:7488009377847610582:3593];TabletId=72075186224037888;ScanId=48;TxId=281474976710673;ScanGen=1;task_identifier=;fline=interval.cpp:28;event=fetched;interval_idx=0; 2025-03-31T16:20:43.449745Z node 50 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[50:7488009377847610582:3593];TabletId=72075186224037888;ScanId=48;TxId=281474976710673;ScanGen=1;task_identifier=;fline=interval.cpp:11;event=skip_construct_result;interval_idx=0;count=100;ready=30;interval_id=31; 2025-03-31T16:20:43.449760Z node 50 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[50:7488009377847610582:3593];TabletId=72075186224037888;ScanId=48;TxId=281474976710673;ScanGen=1;task_identifier=;fline=source.cpp:50;event=source_ready;intervals_count=1;source_idx=22; 2025-03-31T16:20:43.449818Z node 50 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[50:7488009377847610583:3594];TabletId=72075186224037889;ScanId=48;TxId=281474976710673;ScanGen=1;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=3,6;column_names=resource_id,resource_type;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=3,4,5,6;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=3,6;column_names=resource_id,resource_type;);;program_input=(column_ids=3,6;column_names=resource_id,resource_type;);;;); 2025-03-31T16:20:43.449839Z node 50 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[50:7488009377847610583:3594];TabletId=72075186224037889;ScanId=48;TxId=281474976710673;ScanGen=1;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-03-31T16:20:43.449863Z node 50 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[50:7488009377847610582:3593];TabletId=72075186224037888;ScanId=48;TxId=281474976710673;ScanGen=1;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=3,6;column_names=resource_id,resource_type;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=3,4,5,6;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=3,6;column_names=resource_id,resource_type;);;program_input=(column_ids=3,6;column_names=resource_id,resource_type;);;;); 2025-03-31T16:20:43.449884Z node 50 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[50:7488009377847610582:3593];TabletId=72075186224037888;ScanId=48;TxId=281474976710673;ScanGen=1;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-03-31T16:20:43.449937Z node 50 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[50:7488009377847610583:3594];TabletId=72075186224037889;ScanId=48;TxId=281474976710673;ScanGen=1;task_identifier=;method=produce result;fline=actor.cpp:217;stage=no data is ready yet;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=3,6;column_names=resource_id,resource_type;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=3,4,5,6;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=3,6;column_names=resource_id,resource_type;);;program_input=(column_ids=3,6;column_names=resource_id,resource_type;);;;); >> TCmsTest::CollectInfo >> Cdc::EnqueueRequestProcessSend [GOOD] >> Cdc::InitialScanAndResolvedTimestamps >> TCmsTest::TestKeepAvailableModeScheduled [GOOD] >> TCmsTest::TestKeepAvailableModeScheduledDisconnects >> TCmsTest::RequestRestartServicesMultipleNodes >> KqpParams::InvalidJson [GOOD] >> YdbSdkSessionsPool::CustomPlan [GOOD] >> YdbSdkSessionsPool::FailTest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::ManyPartitionsSortingLimit [GOOD] Test command err: Trying to start YDB, gRPC: 7411, MsgBus: 18609 2025-03-31T16:20:29.877250Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009319740826066:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:29.877438Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000909/r3tmp/tmpUST9kA/pdisk_1.dat 2025-03-31T16:20:30.171361Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7411, node 1 2025-03-31T16:20:30.236300Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:30.236388Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:30.238066Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:30.248908Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:30.248940Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:30.248952Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:30.249104Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18609 TClient is connected to server localhost:18609 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:30.769693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:30.785960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:30.927262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:31.079410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:31.152998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:32.470049Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009332625729731:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:32.470211Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:32.691727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:32.715680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:32.740361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:32.765865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:32.814519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:32.882982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:32.919848Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009332625730245:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:32.919931Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:32.920015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009332625730250:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:32.922832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:32.930465Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009332625730252:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:20:33.023396Z node 1 :TX_PROXY ERROR: Actor# [1:7488009336920697601:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:33.732299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:33.928577Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWI1NmUzMDAtOGUwMmJkZWMtZjMwMzBlODEtNTViMzZhM2U=, ActorId: [1:7488009336920698230:2520], ActorState: ExecuteState, TraceId: 01jqpg2cxcd2hggn199ykg3ztj, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:999: Memory limit exception at ExecuteState, current limit is 1024 bytes.
: Error: ydb/core/kqp/session_actor/kqp_session_actor.cpp:999: Memory limit exception at ExecuteState, current limit is 1024 bytes. Trying to start YDB, gRPC: 16820, MsgBus: 21170 2025-03-31T16:20:34.725097Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488009342763961887:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:34.725151Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000909/r3tmp/tmpK5WM4u/pdisk_1.dat 2025-03-31T16:20:34.833955Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16820, node 2 2025-03-31T16:20:34.876425Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:34.876490Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:34.879897Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:34.903309Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:34.903329Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:34.903335Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:34.903431Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21170 TClient is connected to server localhost:21170 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:35.326493Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:35.347044Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:37.958339Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009355648869133:2631], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:37.958429Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFe ... um\":24,\"Max\":24,\"Min\":24},\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":64,\"Max\":64,\"Min\":64},\"ActiveMessageMs\":{\"Count\":1,\"Max\":64,\"Min\":4},\"FirstMessageMs\":{\"Count\":1,\"Sum\":4,\"Max\":4,\"Min\":4},\"Bytes\":{\"Count\":1,\"Sum\":7713,\"Max\":7713,\"Min\":7713,\"History\":[22,846,64,1510,87,7713]},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":60000,\"Max\":60000,\"Min\":60000}},\"Name\":\"RESULT\",\"Push\":{\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":64,\"Max\":64,\"Min\":64},\"Chunks\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":59,\"Max\":59,\"Min\":59},\"FirstMessageMs\":{\"Count\":1,\"Sum\":4,\"Max\":4,\"Min\":4},\"ActiveMessageMs\":{\"Count\":1,\"Max\":64,\"Min\":4},\"PauseMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":60000,\"Max\":60000,\"Min\":60000},\"WaitTimeUs\":{\"Count\":1,\"Sum\":55530,\"Max\":55530,\"Min\":55530,\"History\":[22,19530,64,44686,87,55530]},\"WaitPeriods\":{\"Count\":1,\"Sum\":22,\"Max\":22,\"Min\":22},\"WaitMessageMs\":{\"Count\":1,\"Max\":59,\"Min\":3}}}],\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576,\"History\":[2,1048576,87,1048576]},\"DurationUs\":{\"Count\":1,\"Sum\":84000,\"Max\":84000,\"Min\":84000},\"InputBytes\":{\"Count\":1,\"Sum\":8168,\"Max\":8168,\"Min\":8168},\"ResultRows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"Tasks\":1,\"ResultBytes\":{\"Count\":1,\"Sum\":7713,\"Max\":7713,\"Min\":7713},\"OutputRows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"FinishedTasks\":1,\"InputRows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"PhysicalStageId\":1,\"StageDurationUs\":84000,\"BaseTimeMs\":1743438043299,\"OutputBytes\":{\"Count\":1,\"Sum\":7713,\"Max\":7713,\"Min\":7713},\"CpuTimeUs\":{\"Count\":1,\"Sum\":8846,\"Max\":8846,\"Min\":8846,\"History\":[1,486,22,2458,64,3681,87,8846]},\"Input\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":30,\"Max\":30,\"Min\":30},\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":63,\"Max\":63,\"Min\":63},\"ActiveMessageMs\":{\"Count\":1,\"Max\":63,\"Min\":3},\"FirstMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"Bytes\":{\"Count\":1,\"Sum\":8168,\"Max\":8168,\"Min\":8168,\"History\":[22,1101,64,1765,87,8168]},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":60000,\"Max\":60000,\"Min\":60000}},\"Name\":\"2\",\"Push\":{\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":59,\"Max\":59,\"Min\":59},\"Chunks\":{\"Count\":1,\"Sum\":100,\"Max\":100,\"Min\":100},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":59,\"Max\":59,\"Min\":59},\"FirstMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"ActiveMessageMs\":{\"Count\":1,\"Max\":59,\"Min\":3},\"Bytes\":{\"Count\":1,\"Sum\":8168,\"Max\":8168,\"Min\":8168,\"History\":[22,4089,64,7020,87,8168]},\"PauseMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":56000,\"Max\":56000,\"Min\":56000},\"WaitTimeUs\":{\"Count\":1,\"Sum\":13275,\"Max\":13275,\"Min\":13275,\"History\":[22,4594,64,10679,87,13275]},\"WaitPeriods\":{\"Count\":1,\"Sum\":25,\"Max\":25,\"Min\":25},\"WaitMessageMs\":{\"Count\":1,\"Max\":59,\"Min\":1}}}]}}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"Compilation\":{\"FromCache\":false,\"DurationUs\":115003,\"CpuTimeUs\":111684},\"ProcessCpuTimeUs\":364,\"TotalDurationUs\":326895,\"ResourcePoolId\":\"default\",\"QueuedTimeUs\":112255},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":5,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"No estimate\",\"ReadRanges\":[\"Key (-\342\210\236, +\342\210\236)\"],\"Name\":\"TableFullScan\",\"Path\":\"\\/Root\\/ManyShardsTable\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"No estimate\",\"Table\":\"ManyShardsTable\",\"ReadColumns\":[\"Data\",\"Key\"],\"E-Cost\":\"No estimate\"}],\"Node Type\":\"TableFullScan\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"PlanNodeType\":\"Query\"}}" query_ast: "(\n(let $1 (KqpTable \'\"/Root/ManyShardsTable\" \'\"72057594046644480:2\" \'\"\" \'1))\n(let $2 (KqpRowsSourceSettings $1 \'(\'\"Data\" \'\"Key\") \'(\'(\'\"Sorted\")) (Void) \'()))\n(let $3 (StructType \'(\'\"Data\" (OptionalType (DataType \'Int32))) \'(\'\"Key\" (OptionalType (DataType \'Uint32)))))\n(let $4 \'(\'(\'\"_logical_id\" \'367) \'(\'\"_id\" \'\"57998c39-fd70230b-2811ce9d-7bab334c\") \'(\'\"_wide_channels\" $3)))\n(let $5 (DqPhyStage \'((DqSource (DataSource \'\"KqpReadRangesSource\") $2)) (lambda \'($9) (block \'(\n (let $10 (lambda \'($11) (Member $11 \'\"Data\") (Member $11 \'\"Key\")))\n (return (FromFlow (ExpandMap (ToFlow $9) $10)))\n))) $4))\n(let $6 (DqCnMerge (TDqOutput $5 \'\"0\") \'(\'(\'1 \'\"Asc\"))))\n(let $7 (DqPhyStage \'($6) (lambda \'($12) (FromFlow (NarrowMap (ToFlow $12) (lambda \'($13 $14) (AsStruct \'(\'\"Data\" $13) \'(\'\"Key\" $14)))))) \'(\'(\'\"_logical_id\" \'379) \'(\'\"_id\" \'\"4ed999b4-45479c2f-6cf33a68-7b68d2c4\"))))\n(let $8 (DqCnResult (TDqOutput $7 \'\"0\") \'(\'\"Key\" \'\"Data\")))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($5 $7) \'($8) \'() \'(\'(\'\"type\" \'\"generic\")))) \'((KqpTxResultBinding (ListType $3) \'\"0\" \'\"0\")) \'(\'(\'\"type\" \'\"query\"))))\n)\n" total_duration_us: 326895 total_cpu_time_us: 200931 query_meta: "{\"query_database\":\"/Root\",\"query_parameter_types\":{},\"table_metadata\":[\"{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/ManyShardsTable\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":2},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Data\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"Int32\\\",\\\"TypeId\\\":1,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Key\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"Uint32\\\",\\\"TypeId\\\":2,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"Key\\\"],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}\"],\"table_meta_serialization_type\":2,\"created_at\":\"1743438043\",\"query_type\":\"QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY\",\"query_syntax\":\"1\",\"query_cluster\":\"db\",\"query_id\":\"ad6db561-27b40d7-478fcdac-3fbe26e5\",\"version\":\"1.0\"}" Trying to start YDB, gRPC: 16820, MsgBus: 21172 2025-03-31T16:20:44.320374Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488009385713634845:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:44.320432Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000909/r3tmp/tmpyO4ERc/pdisk_1.dat 2025-03-31T16:20:44.442490Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:20:44.463013Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:44.463124Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:44.464493Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16820, node 4 2025-03-31T16:20:44.508931Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:44.508959Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:44.508968Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:44.509099Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21172 TClient is connected to server localhost:21172 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:45.047520Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:45.061103Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:48.162332Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488009402893509424:2635], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:48.162385Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488009402893509434:2638], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:48.162429Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:48.166446Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:20:48.176925Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488009402893509438:2639], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:20:48.236495Z node 4 :TX_PROXY ERROR: Actor# [4:7488009402893509501:5641] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TCmsTest::ScheduledWalleRequestDuringRollingRestart [GOOD] >> TCmsTest::SamePriorityRequest2 >> YdbSdkSessionsPool::StressTestSync10 [GOOD] >> TCmsTest::ActionIssuePartialPermissions >> TCmsTest::RequestRestartServicesPartial [GOOD] >> TCmsTest::RequestRestartServicesNoUser >> TCmsTest::ManageRequests [GOOD] >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag >> KqpTypes::MultipleCurrentUtcTimestamp [GOOD] >> TCmsTest::TestKeepAvailableModeDisconnects [GOOD] >> TCmsTest::TestForceRestartModeScheduled >> TCmsTest::StateStorageTwoBrokenRings [GOOD] >> TCmsTest::StateStorageRollingRestart >> KqpPg::InsertValuesFromTableWithDefault+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefault-useSink >> TCmsTest::TestForceRestartModeDisconnects [GOOD] >> TCmsTest::StateStorageTwoRings >> KqpPg::InsertValuesFromTableWithDefaultText+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultText-useSink >> TCmsTest::CollectInfo [GOOD] >> TCmsTest::DynamicConfig ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::StressTestSync10 [GOOD] Test command err: 2025-03-31T16:20:38.489549Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009359401457897:2077];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:38.490189Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0005b1/r3tmp/tmpuk1Wkh/pdisk_1.dat 2025-03-31T16:20:38.899525Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:20:38.909991Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:38.910120Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:38.925143Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23454, node 1 2025-03-31T16:20:39.129033Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:39.129068Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:39.129075Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:39.129177Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25645 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:39.634566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:43.489237Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488009359401457897:2077];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:43.489370Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::InvalidJson [GOOD] Test command err: Trying to start YDB, gRPC: 12466, MsgBus: 5305 2025-03-31T16:20:28.579517Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009315666564029:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:28.579712Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00091e/r3tmp/tmpiKLOar/pdisk_1.dat 2025-03-31T16:20:28.850529Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12466, node 1 2025-03-31T16:20:28.935929Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:28.935964Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:28.935976Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:28.936124Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:20:28.946194Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:28.946328Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:28.948055Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5305 TClient is connected to server localhost:5305 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:29.405064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:29.429944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:29.557739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:29.683338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:29.741738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:31.634003Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009328551467701:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:31.634119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:31.891278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:31.915768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:31.943253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:31.967870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:31.991821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:32.019077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:32.055148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009332846435506:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:32.055224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:32.055243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009332846435511:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:32.058034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:32.065306Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009332846435513:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:20:32.126023Z node 1 :TX_PROXY ERROR: Actor# [1:7488009332846435566:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:33.014922Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzZhMGU4MTctYTg3MGM5ZTEtYmJjMzA1MTMtMTE0M2RhMTY=, ActorId: [1:7488009332846435855:2496], ActorState: ExecuteState, TraceId: 01jqpg2bzb2c6r2n8kz6vqm6np, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1294: ydb/core/kqp/query_data/kqp_query_data.cpp:266: Missing value for parameter: $group Trying to start YDB, gRPC: 28252, MsgBus: 2046 2025-03-31T16:20:33.709098Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488009336262619539:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:33.709159Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00091e/r3tmp/tmpfWftl0/pdisk_1.dat 2025-03-31T16:20:33.818263Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:20:33.844188Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:33.844280Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:33.845992Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28252, node 2 2025-03-31T16:20:33.876158Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:33.876179Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:33.876188Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:33.876294Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2046 TClient is connected to server localhost:2046 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:34.252179Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:34.269299Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:34.348234Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:34.487349Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:20:34.553563Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:20:36.365141Z node 2 :KQP_WORKLOAD_SE ... KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:42.147041Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:42.179126Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:42.210167Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:42.239387Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:42.276219Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:42.348854Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:42.435761Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009374162188707:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:42.435842Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:42.436028Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009374162188712:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:42.439555Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:42.449056Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488009374162188714:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:20:42.525151Z node 3 :TX_PROXY ERROR: Actor# [3:7488009374162188769:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:43.633829Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488009356982317242:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:43.633914Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 23901, MsgBus: 16290 2025-03-31T16:20:44.913870Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488009385821754306:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:44.913931Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00091e/r3tmp/tmpsjOlke/pdisk_1.dat 2025-03-31T16:20:45.009928Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23901, node 4 2025-03-31T16:20:45.049822Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:45.049929Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:45.052179Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:45.078939Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:45.078967Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:45.078976Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:45.079141Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16290 TClient is connected to server localhost:16290 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:45.528321Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:45.543516Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:45.619038Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:20:45.795474Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:45.879376Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:20:48.370147Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488009403001625274:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:48.370241Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:48.422493Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:48.456674Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:48.510026Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:48.543435Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:48.573889Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:48.643601Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:48.686558Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488009403001625790:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:48.686620Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:48.686664Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488009403001625795:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:48.689766Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:48.699046Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488009403001625797:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:20:48.786269Z node 4 :TX_PROXY ERROR: Actor# [4:7488009403001625850:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:49.648463Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:20:49.786889Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ODBlODExNTktZDk5ODFhYTQtN2U2YjA0ZGUtYWI5MTUwYTQ=, ActorId: [4:7488009407296593437:2496], ActorState: ExecuteState, TraceId: 01jqpg2wcrcz4ztejxqa794cq2, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:997: Invalid Json value
: Error: ydb/core/kqp/session_actor/kqp_session_actor.cpp:997: Invalid Json value >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestBlock42 >> YdbSdkSessionsPool::StressTestSync1 [GOOD] >> TCmsTest::DynamicConfig [GOOD] >> TCmsTest::DisabledEvictVDisks >> KqpQuery::QueryCachePermissionsLoss [GOOD] >> KqpPg::CreateTableBulkUpsertAndRead [GOOD] >> KqpPg::CopyTableSerialColumns+useSink >> TCmsTest::TestKeepAvailableModeScheduledDisconnects [GOOD] >> TCmsTest::TestLoadLog >> TCmsTest::RequestRestartServicesMultipleNodes [GOOD] >> TCmsTest::RequestRestartServicesDryRun >> Cdc::ResolvedTimestampForDisplacedUpsert [GOOD] >> TCmsTest::StateStorageTwoRings [GOOD] >> TCmsTest::SysTabletsNode >> TCmsTest::RequestRestartServicesNoUser [GOOD] >> TCmsTest::SamePriorityRequest2 [GOOD] >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag [GOOD] >> TDowntimeTest::AddDowntime [GOOD] >> TDowntimeTest::HasUpcomingDowntime [GOOD] >> TDowntimeTest::CleanupOldSegments [GOOD] >> TCmsTest::StateRequestNode ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::StressTestSync1 [GOOD] Test command err: 2025-03-31T16:20:40.493000Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009365667102417:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:40.493436Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00053b/r3tmp/tmpmsHjTA/pdisk_1.dat 2025-03-31T16:20:40.875682Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20971, node 1 2025-03-31T16:20:40.925227Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:20:40.925256Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:20:40.979623Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:40.979769Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:40.990717Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:40.990746Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:40.990753Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:40.990907Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:20:40.994296Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18808 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:41.286136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:45.492306Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488009365667102417:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:45.492402Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TCmsTest::ActionIssuePartialPermissions [GOOD] >> TCmsTest::ActionWithZeroDuration >> TCmsTest::TestForceRestartModeScheduled [GOOD] >> TCmsTest::TestForceRestartModeScheduledDisconnects |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TDowntimeTest::CleanupOldSegments [GOOD] |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag [GOOD] >> TCmsTest::TestLoadLog [GOOD] >> TCmsTest::TestLogOperationsRollback |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::SamePriorityRequest2 [GOOD] |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::RequestRestartServicesNoUser [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryCachePermissionsLoss [GOOD] Test command err: Trying to start YDB, gRPC: 27689, MsgBus: 28380 2025-03-31T16:20:31.057841Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009327696428873:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:31.057929Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0008f9/r3tmp/tmpojJkiw/pdisk_1.dat 2025-03-31T16:20:31.411436Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27689, node 1 2025-03-31T16:20:31.455006Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:31.455148Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:31.456967Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:31.462350Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:31.462381Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:31.462390Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:31.462532Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28380 TClient is connected to server localhost:28380 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:31.921102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:31.960180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:32.093939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:32.244518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:32.310782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:33.792259Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009336286365259:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:33.792394Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:34.109400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:34.136267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:34.162257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:34.189789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:34.217090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:34.250776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:34.294201Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009340581333065:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:34.294284Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:34.294349Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009340581333070:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:34.298167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:34.308383Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009340581333072:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:20:34.405008Z node 1 :TX_PROXY ERROR: Actor# [1:7488009340581333127:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:36.057798Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488009327696428873:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:36.057858Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 8860, MsgBus: 19747 2025-03-31T16:20:40.276864Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488009366090559944:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:40.276996Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0008f9/r3tmp/tmpN6hAto/pdisk_1.dat 2025-03-31T16:20:40.369555Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8860, node 2 2025-03-31T16:20:40.412036Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:40.412150Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:40.418198Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:40.439019Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:40.439042Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:40.439049Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:40.439198Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19747 TClient is connected to server localhost:19747 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:20:40.880160Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:40.898271Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:20:40.949966Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:41.165420Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:41.233775Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:43.479136Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] ... : 01jqpg2x5je9dw2vm1ddrvmrw3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YzJkMmYxYjktOGRkN2MwLTkwNjNjZDc1LTY4MDc0MjQz, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:20:50.692326Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488009386039851893:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:50.692394Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:20:50.717175Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7488009407514691238:3771], for# user0@builtin, access# UpdateRow 2025-03-31T16:20:50.717457Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715677. Error resolving keys for entry: { TableId: [OwnerId: 72057594046644480, LocalPathId: 9] Access: 2 SyncVersion: false Status: AccessDenied Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-03-31T16:20:50.726680Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NzMxMGQ5NDAtOGFiMmI0YTUtN2UxYzgzMGUtNjBiNDk5ZjQ=, ActorId: [3:7488009407514691203:2550], ActorState: ExecuteState, TraceId: 01jqpg2xa60rfxqmvd9jhbgk0f, Create QueryResponse for error on request, msg: 2025-03-31T16:20:50.852822Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7488009407514691263:3779], for# user0@builtin, access# EraseRow 2025-03-31T16:20:50.852927Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715678. Error resolving keys for entry: { TableId: [OwnerId: 72057594046644480, LocalPathId: 9] Access: 4 SyncVersion: false Status: AccessDenied Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-03-31T16:20:50.853093Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZWIwZjA5YTItYmZhNTIxYzItNGYwZGViOGYtMjhmNWYxYTQ=, ActorId: [3:7488009407514691249:2562], ActorState: ExecuteState, TraceId: 01jqpg2xe1405414fj3wwb41md, Create QueryResponse for error on request, msg: 2025-03-31T16:20:50.862064Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-03-31T16:20:50.994570Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7488009407514691313:3811], for# user0@builtin, access# SelectRow 2025-03-31T16:20:50.994682Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715680. Error resolving keys for entry: { TableId: [OwnerId: 72057594046644480, LocalPathId: 9] Access: 1 SyncVersion: false Status: AccessDenied Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-03-31T16:20:50.994910Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZWM2NzFlZGQtOGQ5MWRhMWQtNTRkMWMyOWItZjYwMGFjY2E=, ActorId: [3:7488009407514691299:2573], ActorState: ExecuteState, TraceId: 01jqpg2xj47mrhhmjawz9m6yd6, Create QueryResponse for error on request, msg: 2025-03-31T16:20:50.995397Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jqpg2xj47mrhhmjawz9m6yd6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZWM2NzFlZGQtOGQ5MWRhMWQtNTRkMWMyOWItZjYwMGFjY2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:20:51.107578Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7488009411809658653:3836], for# user0@builtin, access# UpdateRow 2025-03-31T16:20:51.107703Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715682. Error resolving keys for entry: { TableId: [OwnerId: 72057594046644480, LocalPathId: 9] Access: 2 SyncVersion: false Status: AccessDenied Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-03-31T16:20:51.107933Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YTdmMWQwYmQtZWJiMjM4YWItNTQ3ODYyOC05MWU1NDgz, ActorId: [3:7488009411809658622:2583], ActorState: ExecuteState, TraceId: 01jqpg2xpb2fbchy9fwt33dhpx, Create QueryResponse for error on request, msg: 2025-03-31T16:20:51.223666Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7488009411809658678:3844], for# user0@builtin, access# EraseRow 2025-03-31T16:20:51.223772Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715683. Error resolving keys for entry: { TableId: [OwnerId: 72057594046644480, LocalPathId: 9] Access: 4 SyncVersion: false Status: AccessDenied Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-03-31T16:20:51.223951Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZTdlMTIzOWItYzVkODk4NTItOGJhZTdjNTItMmJkNDE4ZTQ=, ActorId: [3:7488009411809658663:2593], ActorState: ExecuteState, TraceId: 01jqpg2xsy5qc5s3fe734jjmve, Create QueryResponse for error on request, msg: 2025-03-31T16:20:51.233972Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715684:0, at schemeshard: 72057594046644480 2025-03-31T16:20:51.269300Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7488009411809658703:3854], for# user0@builtin, access# DescribeSchema 2025-03-31T16:20:51.269324Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7488009411809658703:3854], for# user0@builtin, access# DescribeSchema 2025-03-31T16:20:51.271047Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7488009411809658700:2606], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:20:51.271359Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MTU0N2VhNS1lM2FiMjA3MC1jNzRhMzg4Ni1mYmVkZmQ1ZQ==, ActorId: [3:7488009411809658696:2604], ActorState: ExecuteState, TraceId: 01jqpg2xxv2ge2zgzacn8e29cs, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:20:51.309930Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7488009411809658720:3858], for# user0@builtin, access# DescribeSchema 2025-03-31T16:20:51.309974Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7488009411809658720:3858], for# user0@builtin, access# DescribeSchema 2025-03-31T16:20:51.312666Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7488009411809658717:2615], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:8:25: Error: At function: KiWriteTable!
:8:25: Error: Cannot find table 'db.[/Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:20:51.312956Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NzlkMjRkZTUtNjdkNDQ2NzEtN2RjZjYwYWQtN2I2ZDgwOGI=, ActorId: [3:7488009411809658713:2613], ActorState: ExecuteState, TraceId: 01jqpg2xyw0cdjx0zz2g2n6mkg, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:20:51.355570Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7488009411809658737:3862], for# user0@builtin, access# DescribeSchema 2025-03-31T16:20:51.355599Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7488009411809658737:3862], for# user0@builtin, access# DescribeSchema 2025-03-31T16:20:51.358468Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7488009411809658734:2624], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:12:30: Error: At function: KiWriteTable!
:12:30: Error: Cannot find table 'db.[/Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:20:51.358829Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NzQ5OTU3M2YtN2IzMjRlNC02OTI3NWFiOC0zYjQ4MWUwOA==, ActorId: [3:7488009411809658730:2622], ActorState: ExecuteState, TraceId: 01jqpg2y084kv5ck7fa4369d20, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:20:51.368170Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715685:0, at schemeshard: 72057594046644480 2025-03-31T16:20:51.401851Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7488009411809658763:3873], for# user0@builtin, access# DescribeSchema 2025-03-31T16:20:51.401880Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7488009411809658763:3873], for# user0@builtin, access# DescribeSchema 2025-03-31T16:20:51.403361Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7488009411809658760:2634], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:20:51.403564Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=Y2NjZjgwMmYtZWE5MTExMC1jYWQ4MzRlNi0zZDE0OWRhNw==, ActorId: [3:7488009411809658756:2632], ActorState: ExecuteState, TraceId: 01jqpg2y1z2pj9t908jk8fpzsf, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:20:51.444140Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7488009411809658782:3877], for# user0@builtin, access# DescribeSchema 2025-03-31T16:20:51.444198Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7488009411809658782:3877], for# user0@builtin, access# DescribeSchema 2025-03-31T16:20:51.446517Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7488009411809658779:2643], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:8:25: Error: At function: KiWriteTable!
:8:25: Error: Cannot find table 'db.[/Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:20:51.446870Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MzkzNjQ4YmItODliYTcyNDEtYjA0ODE2YTItMTY4NTM2MDU=, ActorId: [3:7488009411809658775:2641], ActorState: ExecuteState, TraceId: 01jqpg2y3393bgq7rm6nrp5zf6, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:20:51.489218Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7488009411809658799:3881], for# user0@builtin, access# DescribeSchema 2025-03-31T16:20:51.489251Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7488009411809658799:3881], for# user0@builtin, access# DescribeSchema 2025-03-31T16:20:51.492066Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7488009411809658796:2652], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:12:30: Error: At function: KiWriteTable!
:12:30: Error: Cannot find table 'db.[/Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:20:51.492487Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=N2FlYmIzODctNzE4ZmMxMjQtM2ViNWRjZjItMTJjYjA2Njg=, ActorId: [3:7488009411809658792:2650], ActorState: ExecuteState, TraceId: 01jqpg2y4g0nfbm0g85252v59r, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: >> KqpPg::InsertFromSelect_NoReorder+useSink [GOOD] >> KqpPg::DropTablePg ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpTypes::MultipleCurrentUtcTimestamp [GOOD] Test command err: Trying to start YDB, gRPC: 14603, MsgBus: 26072 2025-03-31T16:19:53.785003Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009164900390578:2263];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:53.785351Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000aa5/r3tmp/tmpB45JJ6/pdisk_1.dat 2025-03-31T16:19:54.142420Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:54.143985Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:54.144300Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:54.156284Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14603, node 1 2025-03-31T16:19:54.350455Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:19:54.350473Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:19:54.350478Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:19:54.350602Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26072 TClient is connected to server localhost:26072 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:19:54.945900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:54.973107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:55.073452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:55.199450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:55.268451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:55.969428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009173490326712:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:55.969588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:56.504130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:19:56.528674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:19:56.551176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:19:56.572641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:19:56.597850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:19:56.626219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:19:56.682980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009177785294545:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:56.683047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:56.683191Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009177785294550:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:56.691220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:19:56.697640Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009177785294552:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:19:56.764259Z node 1 :TX_PROXY ERROR: Actor# [1:7488009177785294606:3440] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:19:57.594423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:58.781554Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488009164900390578:2263];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:58.781685Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:20:09.142334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:20:09.142363Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:20:20.634871Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438020615, txId: 281474976710672] shutting down 2025-03-31T16:20:20.703376Z node 1 :RPC_REQUEST WARN: Client lost 2025-03-31T16:20:21.909182Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438021901, txId: 281474976710674] shutting down 2025-03-31T16:20:23.095871Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438023088, txId: 281474976710676] shutting down 2025-03-31T16:20:24.352428Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438024275, txId: 281474976710678] shutting down 2025-03-31T16:20:25.556456Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438025549, txId: 281474976710680] shutting down 2025-03-31T16:20:26.728087Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438026721, txId: 281474976710682] shutting down 2025-03-31T16:20:27.888648Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438027881, txId: 281474976710684] shutting down 2025-03-31T16:20:29.084589Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438029077, txId: 281474976710686] shutting down 2025-03-31T16:20:30.266433Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438030258, txId: 281474976710688] shutting down 2025-03-31T16:20:31.449904Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438031441, txId: 281474976710690] shutting down 2025-03-31T16:20:32.625781Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438032617, txId: 281474976710692] shutting down assertion failed at ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591, virtual void NKikimr::NKqp::NTestSuiteKqpStats::TTestCaseSysViewClientLost::Execute_(NUnitTest::TTestContext &): (timeoutedCount == 1) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x194EA4DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x199AF40F 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591: Execute_ @ 0x1908FA48 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: operator() @ 0x190A2A57 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x190A2A57 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x190A2A57 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x190A2A57 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x190A2A57 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x199E6435 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x199E6435 10. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x199E6435 11. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x199B5F88 12. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: Execute @ 0x190A1BDB 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x199B7855 14. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ ... Error: At function: <=
:9:22: Error: Uncompatible types in compare: Optional '<=' Int32 2025-03-31T16:20:44.363407Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTM4YjE3MzktOWUzMzU2MTYtNDZkYzQ0NjEtNjJhOTY5MmY=, ActorId: [2:7488009383473221428:2496], ActorState: ExecuteState, TraceId: 01jqpg2q5ravz0qp6p99awf4e5, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:5:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject
:5:13: Error: At function: SqlProjectItem
:6:22: Error: At function: ==
:6:22: Error: Uncompatible types in compare: Optional '==' Int32
:5:13: Error: At function: SqlProjectItem
:7:22: Error: At function: !=
:7:22: Error: Uncompatible types in compare: Optional '!=' Int32
:5:13: Error: At function: SqlProjectItem
:8:22: Error: At function: >
:8:22: Error: Uncompatible types in compare: Optional '>' Int32
:5:13: Error: At function: SqlProjectItem
:9:22: Error: At function: <=
:9:22: Error: Uncompatible types in compare: Optional '<=' Int32 2025-03-31T16:20:44.394185Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488009383473221466:2509], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:6:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject
:6:13: Error: At function: SqlProjectItem
:7:22: Error: At function: ==
:7:22: Error: Uncompatible types in compare: Optional '==' Optional
:6:13: Error: At function: SqlProjectItem
:8:22: Error: At function: !=
:8:22: Error: Uncompatible types in compare: Optional '!=' Optional
:6:13: Error: At function: SqlProjectItem
:9:22: Error: At function: >
:9:22: Error: Uncompatible types in compare: Optional '>' Optional
:6:13: Error: At function: SqlProjectItem
:10:22: Error: At function: <=
:10:22: Error: Uncompatible types in compare: Optional '<=' Optional 2025-03-31T16:20:44.396562Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTM4YjE3MzktOWUzMzU2MTYtNDZkYzQ0NjEtNjJhOTY5MmY=, ActorId: [2:7488009383473221428:2496], ActorState: ExecuteState, TraceId: 01jqpg2q6ka9ntxhqgs08x6rgp, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:6:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject
:6:13: Error: At function: SqlProjectItem
:7:22: Error: At function: ==
:7:22: Error: Uncompatible types in compare: Optional '==' Optional
:6:13: Error: At function: SqlProjectItem
:8:22: Error: At function: !=
:8:22: Error: Uncompatible types in compare: Optional '!=' Optional
:6:13: Error: At function: SqlProjectItem
:9:22: Error: At function: >
:9:22: Error: Uncompatible types in compare: Optional '>' Optional
:6:13: Error: At function: SqlProjectItem
:10:22: Error: At function: <=
:10:22: Error: Uncompatible types in compare: Optional '<=' Optional Trying to start YDB, gRPC: 31705, MsgBus: 1261 2025-03-31T16:20:45.186018Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488009390149111503:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:45.186087Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000aa5/r3tmp/tmpxuhwaU/pdisk_1.dat 2025-03-31T16:20:45.285437Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31705, node 3 2025-03-31T16:20:45.320922Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:45.321000Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:45.323689Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:45.350540Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:45.350571Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:45.350579Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:45.350722Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1261 TClient is connected to server localhost:1261 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:45.830606Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:20:45.848599Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:20:45.922748Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:46.091642Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:46.194422Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:48.633041Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009403034015158:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:48.633128Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:48.693740Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:48.726278Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:48.769852Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:48.803002Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:48.835962Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:48.906764Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:48.948737Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009403034015672:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:48.948825Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:48.948829Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009403034015677:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:48.952913Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:48.962827Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488009403034015679:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:20:49.033383Z node 3 :TX_PROXY ERROR: Actor# [3:7488009407328983028:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:50.186312Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488009390149111503:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:50.186394Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpLimits::QueryExecTimeoutCancel [GOOD] >> KqpLimits::ReplySizeExceeded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::ResolvedTimestampForDisplacedUpsert [GOOD] Test command err: 2025-03-31T16:18:02.192460Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008687129074120:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:02.192532Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00190b/r3tmp/tmpjXQwSb/pdisk_1.dat 2025-03-31T16:18:02.644043Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:02.667607Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:02.667744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:02.669658Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5033, node 1 2025-03-31T16:18:02.822356Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:02.822381Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:02.822387Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:02.822497Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:18:03.096890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:18:03.149819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:18:03.204303Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7488008691424042032:2309] 2025-03-31T16:18:03.204661Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:18:03.222403Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:18:03.222489Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:18:03.224815Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:18:03.224884Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:18:03.224919Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:18:03.225605Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:18:03.225672Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:18:03.225702Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:7488008691424042046:2309] in generation 1 2025-03-31T16:18:03.227377Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:18:03.272856Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:18:03.273062Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:18:03.273120Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:7488008691424042050:2310] 2025-03-31T16:18:03.273140Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:18:03.273161Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:18:03.273172Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:18:03.273353Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:18:03.273434Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:18:03.273460Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:18:03.273480Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:18:03.273498Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:18:03.273514Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:18:03.273807Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7488008691424042028:2302], serverId# [1:7488008691424042049:2312], sessionId# [0:0:0] 2025-03-31T16:18:03.273998Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:18:03.274283Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:18:03.274384Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-03-31T16:18:03.275418Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:18:03.276495Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:18:03.276563Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:18:03.278368Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7488008691424042064:2320], serverId# [1:7488008691424042065:2321], sessionId# [0:0:0] 2025-03-31T16:18:03.283380Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710657 at step 1743437883327 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743437883327 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:18:03.283415Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:18:03.283550Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:18:03.283627Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:18:03.283644Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:18:03.283662Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1743437883327:281474976710657] in PlanQueue unit at 72075186224037888 2025-03-31T16:18:03.283899Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1743437883327:281474976710657 keys extracted: 0 2025-03-31T16:18:03.284004Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:18:03.284096Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:18:03.284134Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:18:03.286394Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:18:03.288527Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:18:03.290395Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1743437883326 2025-03-31T16:18:03.290409Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:18:03.290442Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1743437883334 2025-03-31T16:18:03.291425Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1743437883327} 2025-03-31T16:18:03.291480Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:18:03.292134Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:18:03.292163Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:18:03.292204Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:18:03.292247Z node 1 :TX_DATASHARD DEBUG: Complete [1743437883327 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7488008687129074529:2191], exec latency: 4 ms, propose latency: 8 ms 2025-03-31T16:18:03.292302Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-03-31T16:18:03.292367Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:18:03.304046Z node 1 :CHANGE_EXCHANGE DEBUG: [ChangeSender][72075186224037888:1][1:7488008691424042050:2310][Inactive] Handle NKikimrChangeExchange.TEvActivateSender 2025-03-31T16:18:03.304978Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2025-03-31T16:18:03.305075Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:18:03.318428Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:18:03.319536Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:18:03.319764Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710658 ssId 72057594046644480 seqNo 2:2 2025-03-31T16:18:03.319803Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976710658 2025-03-31T16:18:03.319813Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710658 at tablet 72075186224037888 2025-03-31T16:18:03.320963Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:18:03.381847Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:18:03.385360Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Registered with mediator time cast 2025-03-31T16:18:03.389196Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:18:03.389613Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] doesn't have tx info 2025-03-31T16:18:03.389648Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:18:03.390345Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] no config, start with empty partitions and default config 2025-03-31T16:18:03.390373Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:18:03.390471Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037889] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:18:03.390524Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037889] doesn't have tx writes info 2025-03-31T16:18:03.391457Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] server connected, pipe [1:7488008691424042134:2373], now have 1 active actors on pipe 2025-03-31T16:18:03.391484Z node 1 :PERSQUEUE DEBUG: [PQ ... ed on disk 2025-03-31T16:20:51.561485Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 9 requestId: cookie: 5 2025-03-31T16:20:51.561902Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][27:939:2695] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037888" SeqNo: 5 Offset: 4 WriteTimestampMS: 8969 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 5 } } } 2025-03-31T16:20:51.562138Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:852:2695] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-03-31T16:20:51.562402Z node 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 1, at tablet# 72075186224037888 2025-03-31T16:20:51.562499Z node 27 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 5, at tablet: 72075186224037888 2025-03-31T16:20:51.563363Z node 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 1, left# 0, at tablet# 72075186224037888 ... unblocking updates ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... unblocking NKikimr::TEvMediatorTimecast::TEvUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... unblocking NKikimr::TEvMediatorTimecast::TEvUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-03-31T16:20:51.669906Z node 27 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 9000 at tablet 72075186224037888 2025-03-31T16:20:51.670032Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:20:51.670180Z node 27 :TX_DATASHARD INFO: [CdcStreamHeartbeat] Emit change records: edge# v9000/18446744073709551615, at tablet# 72075186224037888 2025-03-31T16:20:51.670407Z node 27 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 6 Group: 0 Step: 9000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-03-31T16:20:51.675883Z node 27 :TX_DATASHARD INFO: [CdcStreamHeartbeat] Enqueue 1 change record(s): at tablet# 72075186224037888 2025-03-31T16:20:51.676038Z node 27 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 0 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 } 2025-03-31T16:20:51.676145Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:20:51.676250Z node 27 :TX_DATASHARD DEBUG: Waiting for PlanStep# 12000 from mediator time cast 2025-03-31T16:20:51.676495Z node 27 :CHANGE_EXCHANGE DEBUG: [ChangeSender][72075186224037888:1][27:685:2581] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 0 }] } 2025-03-31T16:20:51.676755Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:852:2695] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 0 }] } 2025-03-31T16:20:51.677508Z node 27 :TX_DATASHARD INFO: TTxRequestChangeRecords Execute: at tablet# 72075186224037888 2025-03-31T16:20:51.677859Z node 27 :TX_DATASHARD DEBUG: Send 1 change records: to# [27:852:2695], at tablet# 72075186224037888 2025-03-31T16:20:51.677960Z node 27 :TX_DATASHARD INFO: TTxRequestChangeRecords Complete: sent# 1, forgotten# 0, left# 0, at tablet# 72075186224037888 2025-03-31T16:20:51.678217Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:852:2695] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 6 Group: 0 Step: 9000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-03-31T16:20:51.678577Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][27:939:2695] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 6 Group: 0 Step: 9000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-03-31T16:20:51.678967Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-03-31T16:20:51.679076Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-03-31T16:20:51.679262Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 10 requestId: cookie: 6 2025-03-31T16:20:51.679435Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-03-31T16:20:51.679464Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-03-31T16:20:51.679553Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message topic: Table/Stream/streamImpl partition: 0 SourceId: '\00072075186224037888' SeqNo: 6 partNo : 0 messageNo: 11 size 26 offset: -1 2025-03-31T16:20:51.679812Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 process heartbeat sourceId '\00072075186224037888' version v9000/0 2025-03-31T16:20:51.679954Z node 27 :PERSQUEUE INFO: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 emit heartbeat v9000/0 2025-03-31T16:20:51.680169Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob processing sourceId '\00072075186224037889' seqNo 0 partNo 0 2025-03-31T16:20:51.680941Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob complete sourceId '\00072075186224037889' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 5 PartNo 0 PackedSize 107 count 1 nextOffset 6 batches 1 2025-03-31T16:20:51.682258Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Add new write blob: topic 'Table/Stream/streamImpl' partition 0 compactOffset 5,1 HeadOffset 0 endOffset 5 curOffset 6 d0000000000_00000000000000000005_00000_0000000001_00000| size 93 WTime 8979 2025-03-31T16:20:51.682549Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:20:51.682635Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:20:51.682738Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-03-31T16:20:51.682817Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:20:51.682901Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] m0000000000p72075186224037889 2025-03-31T16:20:51.682954Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] d0000000000_00000000000000000005_00000_0000000001_00000| 2025-03-31T16:20:51.682979Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:20:51.683028Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:20:51.683137Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] =========================== 2025-03-31T16:20:51.683298Z node 27 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-31T16:20:51.683505Z node 27 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 5 partNo 0 count 1 size 93 2025-03-31T16:20:51.684750Z node 27 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 5 count 1 size 93 actorID [27:798:2662] 2025-03-31T16:20:51.685025Z node 27 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037889' partition 0 offset 5 partno 0 count 1 parts 0 size 93 2025-03-31T16:20:51.695614Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 44 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:20:51.695840Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:20:51.696006Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037888', Topic: 'Table/Stream/streamImpl', Partition: 0, SeqNo: 6, partNo: 0, Offset: 5 is stored on disk 2025-03-31T16:20:51.696442Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 11 requestId: cookie: 6 2025-03-31T16:20:51.696793Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][27:939:2695] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037888" SeqNo: 6 Offset: 5 WriteTimestampMS: 8979 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 6 } } } 2025-03-31T16:20:51.696969Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:852:2695] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-03-31T16:20:51.697207Z node 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 1, at tablet# 72075186224037888 2025-03-31T16:20:51.697279Z node 27 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 6, at tablet: 72075186224037888 2025-03-31T16:20:51.697954Z node 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 1, left# 0, at tablet# 72075186224037888 ... checking the update is logged before the new resolved timestamp >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2025-03-31T16:20:51.806390Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-03-31T16:20:51.806539Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-03-31T16:20:51.806827Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 8 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 6 max time lag 0ms effective offset 0 2025-03-31T16:20:51.806951Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 8 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 6 2025-03-31T16:20:51.807150Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 8. All data is from uncompacted head. 2025-03-31T16:20:51.807233Z node 27 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-31T16:20:51.807575Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestBlock42 [GOOD] >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestMirror3dc >> TCmsTest::WalleTasks [GOOD] >> TCmsTest::WalleTasksWithNodeLimit >> TCmsTest::DisabledEvictVDisks [GOOD] >> TCmsTest::EmergencyDuringRollingRestart >> TCmsTenatsTest::TestTenantLimit >> YdbSdkSessionsPool::FailTest [GOOD] >> TCmsTenatsTest::TestNoneTenantPolicy >> TCmsTest::RequestRestartServicesDryRun [GOOD] >> TCmsTest::RequestReplacePDiskDoesntBreakGroup >> TCmsTest::SysTabletsNode [GOOD] >> TClusterInfoTest::DeviceId [GOOD] >> TClusterInfoTest::FillInfo >> TCmsTest::VDisksEvictionShouldFailWhileSentinelIsDisabled >> TCmsTest::TestOutdatedState >> TClusterInfoTest::FillInfo [GOOD] >> TCmsTenatsTest::CollectInfo >> TCmsTest::ManageRequestsWrong >> KqpPg::CreateUniqPgColumn+useSink [GOOD] >> KqpPg::CreateUniqPgColumn-useSink |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::SysTabletsNode [GOOD] >> TCmsTest::StateRequestNode [GOOD] >> TCmsTest::StateRequestUnknownNode >> TCmsTenatsTest::TestTenantRatioLimit >> TCmsTest::TestForceRestartModeScheduledDisconnects [GOOD] >> TCmsTest::ActionWithZeroDuration [GOOD] >> TCmsTest::CheckUnreplicatedDiskPreventsRestart ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::FailTest [GOOD] Test command err: 2025-03-31T16:20:38.478863Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009357275324960:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:38.478954Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0005f8/r3tmp/tmpMuPWg8/pdisk_1.dat 2025-03-31T16:20:38.896218Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:20:38.909268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:38.909845Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:38.922769Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18034, node 1 2025-03-31T16:20:39.127120Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:39.127140Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:39.127147Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:39.127285Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13894 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:39.643147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:43.478975Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488009357275324960:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:43.479121Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:20:50.765559Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488009408263126793:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:50.765629Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0005f8/r3tmp/tmpRiMhIV/pdisk_1.dat 2025-03-31T16:20:50.895932Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:20:50.922868Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:50.922977Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:50.925929Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18116, node 4 2025-03-31T16:20:50.988198Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:50.988225Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:50.988235Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:50.988384Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21643 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:51.198551Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... >> TDowntimeTest::SetIgnoredDowntimeGap [GOOD] >> TMaintenanceApiTest::CompositeActionGroupSameStorageGroup >> KqpPg::InsertValuesFromTableWithDefaultText-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull+useSink >> KqpPg::InsertValuesFromTableWithDefault-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultAndCast+useSink |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::TestForceRestartModeScheduledDisconnects [GOOD] >> TCmsTest::WalleRebootDownNode >> TCmsTest::StateStorageRollingRestart [GOOD] >> TCmsTest::StateStorageLockedNodes >> KqpPg::TableArrayInsert+useSink [GOOD] >> KqpPg::TableArrayInsert-useSink >> TCmsTest::EmergencyDuringRollingRestart [GOOD] >> TCmsTenatsTest::TestClusterLimit >> TCmsTest::RequestReplacePDiskDoesntBreakGroup [GOOD] >> TCmsTest::RequestReplacePDiskConsecutiveWithDone >> TCmsTenatsTest::CollectInfo [GOOD] >> TCmsTenatsTest::RequestRestartServices >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestMirror3dc [GOOD] >> TCmsTest::VDisksEviction >> TCmsTenatsTest::TestTenantLimit [GOOD] >> TCmsTenatsTest::TestScheduledPermissionWithNonePolicy >> TCmsTest::VDisksEvictionShouldFailWhileSentinelIsDisabled [GOOD] >> TCmsTest::VDisksEvictionShouldFailOnUnsupportedAction >> TCmsTenatsTest::TestNoneTenantPolicy [GOOD] >> TCmsTenatsTest::TestDefaultTenantPolicyWithSingleTenantHost >> TCmsTest::ManageRequestsWrong [GOOD] >> TCmsTest::ManageRequestsDry ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::EmergencyDuringRollingRestart [GOOD] Test command err: 2025-03-31T16:20:52.465790Z node 10 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvGetConfigRequest { }, response# NKikimr::NCms::TEvCms::TEvGetConfigResponse { Status { Code: OK } Config { DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: true UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 } } } 2025-03-31T16:20:52.466246Z node 10 :CMS DEBUG: TTxUpdateConfig Execute 2025-03-31T16:20:52.493216Z node 10 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-03-31T16:20:52.493365Z node 10 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-03-31T16:20:52.495328Z node 10 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-17-17" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 17 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-10-10" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 10 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-11-11" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 11 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-12-12" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 12 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-13-13" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 13 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-14-14" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 14 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-15-15" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 15 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-16-16" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 16 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Timestamp: 120029512 } } 2025-03-31T16:20:52.496798Z node 10 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-17-17" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 17 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-10-10" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 10 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-11-11" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 11 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-12-12" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 12 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-13-13" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 13 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-14-14" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 14 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-15-15" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 15 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120029512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120029512 } Devices { Name: "pdisk-16-16" State: UP Timestamp: 120029512 } Timestamp: 120029512 NodeId: 16 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Timestamp: 120029512 } 2025-03-31T16:20:52.497042Z node 10 :CMS DEBUG: [Sentinel] [Main] Config was updated in 120.003512s 2025-03-31T16:20:52.497112Z node 10 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-03-31T16:20:52.497209Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2025-03-31T16:20:52.497251Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2025-03-31T16:20:52.497276Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2025-03-31T16:20:52.497315Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2025-03-31T16:20:52.497339Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2025-03-31T16:20:52.497363Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, wbId# [15:8388350642965737326:1634689637] 2025-03-31T16:20:52.497385Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 16, wbId# [16:8388350642965737326:1634689637] 2025-03-31T16:20:52.497447Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 17, wbId# [17:83883506 ... :CMS DEBUG: Running CleanupWalleTasks 2025-03-31T16:20:52.769025Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2025-03-31T16:20:52.769073Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2025-03-31T16:20:52.769093Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2025-03-31T16:20:52.769109Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2025-03-31T16:20:52.769133Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2025-03-31T16:20:52.769154Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, wbId# [15:8388350642965737326:1634689637] 2025-03-31T16:20:52.769172Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 16, wbId# [16:8388350642965737326:1634689637] 2025-03-31T16:20:52.769188Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 17, wbId# [17:8388350642965737326:1634689637] 2025-03-31T16:20:52.769359Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 10, response# PDiskStateInfo { PDiskId: 10 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-10.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240029 2025-03-31T16:20:52.769817Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 16, response# PDiskStateInfo { PDiskId: 16 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-16.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240029 2025-03-31T16:20:52.769912Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 17, response# PDiskStateInfo { PDiskId: 17 CreateTime: 0 ChangeTime: 0 Path: "/17/pdisk-17.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240029 2025-03-31T16:20:52.770037Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 15 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-15.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240029 2025-03-31T16:20:52.770078Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 11, response# PDiskStateInfo { PDiskId: 11 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-11.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240029 2025-03-31T16:20:52.770134Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 12, response# PDiskStateInfo { PDiskId: 12 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-12.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240029 2025-03-31T16:20:52.770168Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 13, response# PDiskStateInfo { PDiskId: 13 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-13.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240029 2025-03-31T16:20:52.770212Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 14, response# PDiskStateInfo { PDiskId: 14 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-14.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240029 2025-03-31T16:20:52.770250Z node 10 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-03-31T16:20:52.770449Z node 10 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 10:10, status# FAULTY, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 3 StateLimit# 1, dry run# 0 2025-03-31T16:20:52.770494Z node 10 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 1 2025-03-31T16:20:52.770652Z node 10 :CMS DEBUG: TTxLogAndSend Execute 2025-03-31T16:20:52.770815Z node 10 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Success: true, cookie# 2 2025-03-31T16:20:52.770845Z node 10 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 10:10 2025-03-31T16:20:52.783365Z node 10 :CMS DEBUG: TTxLogAndSend Complete 2025-03-31T16:20:52.799000Z node 10 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-03-31T16:20:52.799126Z node 10 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-03-31T16:20:52.799190Z node 10 :CMS DEBUG: Timestamp: 1970-01-01T00:04:00Z 2025-03-31T16:20:52.800171Z node 10 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 10 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-03-31T16:20:52.800279Z node 10 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 10 has not yet been completed" } 2025-03-31T16:20:52.800325Z node 10 :CMS DEBUG: Result: ERROR (reason: Evict vdisks is disabled in Sentinel (self heal)) 2025-03-31T16:20:52.800448Z node 10 :CMS DEBUG: TTxStorePermissions Execute 2025-03-31T16:20:52.800584Z node 10 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-03-31T16:20:52.812994Z node 10 :CMS DEBUG: TTxStorePermissions complete 2025-03-31T16:20:52.813242Z node 10 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ERROR Reason: "Evict vdisks is disabled in Sentinel (self heal)" } RequestId: "user-r-1" } 2025-03-31T16:20:52.813806Z node 10 :CMS DEBUG: TTxUpdateConfig Execute 2025-03-31T16:20:52.825827Z node 10 :CMS DEBUG: TTxUpdateConfig Complete 2025-03-31T16:20:52.826010Z node 10 :CMS DEBUG: Updated config: DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: true UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 1 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 } 2025-03-31T16:20:52.889445Z node 10 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-03-31T16:20:52.889508Z node 10 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-03-31T16:20:52.889596Z node 10 :CMS DEBUG: Running CleanupWalleTasks 2025-03-31T16:20:52.889785Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2025-03-31T16:20:52.889832Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2025-03-31T16:20:52.889861Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2025-03-31T16:20:52.889904Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2025-03-31T16:20:52.889932Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2025-03-31T16:20:52.889961Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, wbId# [15:8388350642965737326:1634689637] 2025-03-31T16:20:52.889981Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 16, wbId# [16:8388350642965737326:1634689637] 2025-03-31T16:20:52.889998Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 17, wbId# [17:8388350642965737326:1634689637] 2025-03-31T16:20:52.890232Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 10, response# PDiskStateInfo { PDiskId: 10 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-10.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300029 2025-03-31T16:20:52.890716Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 12, response# PDiskStateInfo { PDiskId: 12 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-12.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300029 2025-03-31T16:20:52.890773Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 13, response# PDiskStateInfo { PDiskId: 13 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-13.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300029 2025-03-31T16:20:52.890815Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 14, response# PDiskStateInfo { PDiskId: 14 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-14.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300029 2025-03-31T16:20:52.890870Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 15 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-15.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300029 2025-03-31T16:20:52.890921Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 16, response# PDiskStateInfo { PDiskId: 16 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-16.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300029 2025-03-31T16:20:52.890970Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 17, response# PDiskStateInfo { PDiskId: 17 CreateTime: 0 ChangeTime: 0 Path: "/17/pdisk-17.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300029 2025-03-31T16:20:52.891026Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 11, response# PDiskStateInfo { PDiskId: 11 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-11.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300029 2025-03-31T16:20:52.891071Z node 10 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-03-31T16:20:52.891247Z node 10 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 10:10, status# ACTIVE, required status# FAULTY, reason# Forced status, dry run# 0 2025-03-31T16:20:52.891305Z node 10 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 1 2025-03-31T16:20:52.891428Z node 10 :CMS DEBUG: TTxLogAndSend Execute 2025-03-31T16:20:52.891632Z node 10 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Success: true, cookie# 3 2025-03-31T16:20:52.891667Z node 10 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 10:10 >> TCmsTest::TestOutdatedState [GOOD] >> TCmsTest::TestSetResetMarkers >> TCmsTest::StateRequestUnknownNode [GOOD] >> TCmsTest::StateRequestUnknownMultipleNodes >> KqpPg::CopyTableSerialColumns+useSink [GOOD] >> KqpPg::CopyTableSerialColumns-useSink >> TCmsTest::CheckUnreplicatedDiskPreventsRestart [GOOD] >> TCmsTest::AllVDisksEvictionInRack >> TCmsTenatsTest::TestTenantRatioLimit [GOOD] >> TCmsTenatsTest::TestTenantRatioLimitForceRestartMode >> TMaintenanceApiTest::CompositeActionGroupSameStorageGroup [GOOD] >> TMaintenanceApiTest::ActionReason >> TMaintenanceApiTest::SingleCompositeActionGroup >> TCmsTest::WalleRebootDownNode [GOOD] >> TCmsTest::WalleCleanupTest >> Cdc::InitialScanAndResolvedTimestamps [GOOD] >> TCmsTest::StateStorageLockedNodes [GOOD] >> TCmsTest::VDisksEvictionShouldFailOnUnsupportedAction [GOOD] >> TCmsTest::VDisksEvictionShouldFailOnMultipleActions >> TCmsTest::TestLogOperationsRollback [GOOD] >> TCmsTenatsTest::TestScheduledPermissionWithNonePolicy [GOOD] >> TCmsTenatsTest::TestTenantLimitForceRestartMode >> TCmsTest::TestSetResetMarkers [GOOD] >> TCmsTest::TestProcessingQueue >> TCmsTest::StateRequestUnknownMultipleNodes [GOOD] >> TCmsTest::StateStorageAvailabilityMode >> KqpPg::DropTablePg [GOOD] >> KqpPg::DropTablePgMultiple >> TCmsTest::RequestReplacePDiskConsecutiveWithDone [GOOD] >> TCmsTest::ManageRequestsDry [GOOD] >> TCmsTest::Notifications >> TCmsTest::VDisksEviction [GOOD] >> TCmsTenatsTest::TestDefaultTenantPolicyWithSingleTenantHost [GOOD] >> TCmsTenatsTest::TestLimitsWithDownNode >> BasicUsage::RecreateObserver [GOOD] >> TCmsTenatsTest::TestClusterLimit [GOOD] >> TCmsTenatsTest::RequestShutdownHost |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::StateStorageLockedNodes [GOOD] |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::TestLogOperationsRollback [GOOD] >> KqpLimits::QueryExecTimeout [GOOD] >> KqpLimits::QSReplySize-useSink |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::RequestReplacePDiskConsecutiveWithDone [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::VDisksEviction [GOOD] Test command err: 2025-03-31T16:20:57.238321Z node 18 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-03-31T16:20:57.238412Z node 18 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-03-31T16:20:57.238568Z node 18 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-03-31T16:20:57.240165Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-18-18" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 18 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-19-19" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 19 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-20-20" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 20 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-21-21" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 21 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-22-22" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 22 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-23-23" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 23 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-24-24" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 24 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 25 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120028512 } } 2025-03-31T16:20:57.240949Z node 18 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-18-18" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 18 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-19-19" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 19 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-20-20" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 20 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-21-21" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 21 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-22-22" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 22 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-23-23" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 23 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-24-24" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 24 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 25 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120028512 } 2025-03-31T16:20:57.241162Z node 18 :CMS DEBUG: [Sentinel] [Main] Config was updated in 120.003512s 2025-03-31T16:20:57.241203Z node 18 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-03-31T16:20:57.241330Z node 18 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true 2025-03-31T16:20:57.241378Z node 18 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 2025-03-31T16:20:57.241438Z node 18 :CMS DEBUG: Result: DISALLOW_TEMP (reason: VDisks eviction from host 18 has not yet been completed) 2025-03-31T16:20:57.241552Z node 18 :CMS DEBUG: TTxStorePermissions Execute 2025-03-31T16:20:57.241724Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 18 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-03-31T16:20:57.241770Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Add host marker: host# 18, marker# MARKER_DISK_FAULTY 2025-03-31T16:20:57.241966Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 18, wbId# [18:8388350642965737326:1634689637] 2025-03-31T16:20:57.242002Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 19, wbId# [19:8388350642965737326:1634689637] 2025-03-31T16:20:57.242021Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 20, wbId# [20:8388350642965737326:1634689637] 2025-03-31T16:20:57.242039Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 21, wbId# [21:8388350642965737326:1634689637] 2025-03-31T16:20:57.242075Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 22, wbId# [22:8388350642965737326:1634689637] 2025-03-3 ... mp: 120541560 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120541560 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120541560 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120541560 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120541560 } Timestamp: 120541560 NodeId: 25 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120541560 } } 2025-03-31T16:20:57.555698Z node 18 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120541560 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120541560 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120541560 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120541560 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120541560 } Devices { Name: "pdisk-18-18" State: UP Timestamp: 120541560 } Timestamp: 120541560 NodeId: 18 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120541560 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120541560 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120541560 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120541560 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120541560 } Devices { Name: "pdisk-19-19" State: UP Timestamp: 120541560 } Timestamp: 120541560 NodeId: 19 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120541560 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120541560 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120541560 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120541560 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120541560 } Devices { Name: "pdisk-20-20" State: UP Timestamp: 120541560 } Timestamp: 120541560 NodeId: 20 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120541560 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120541560 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120541560 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120541560 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120541560 } Devices { Name: "pdisk-21-21" State: UP Timestamp: 120541560 } Timestamp: 120541560 NodeId: 21 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120541560 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120541560 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120541560 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120541560 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120541560 } Devices { Name: "pdisk-22-22" State: UP Timestamp: 120541560 } Timestamp: 120541560 NodeId: 22 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120541560 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120541560 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120541560 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120541560 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120541560 } Devices { Name: "pdisk-23-23" State: UP Timestamp: 120541560 } Timestamp: 120541560 NodeId: 23 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120541560 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120541560 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120541560 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120541560 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120541560 } Devices { Name: "pdisk-24-24" State: UP Timestamp: 120541560 } Timestamp: 120541560 NodeId: 24 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120541560 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120541560 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120541560 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120541560 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120541560 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120541560 } Timestamp: 120541560 NodeId: 25 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120541560 } 2025-03-31T16:20:57.555923Z node 18 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true 2025-03-31T16:20:57.555984Z node 18 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 2025-03-31T16:20:57.556027Z node 18 :CMS DEBUG: Result: DISALLOW_TEMP (reason: VDisks eviction from host 18 has not yet been completed) 2025-03-31T16:20:57.556123Z node 18 :CMS DEBUG: TTxStorePermissions Execute 2025-03-31T16:20:57.556266Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-3, owner# user, order# 3, priority# 0, body# User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 18 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-03-31T16:20:57.556301Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Add host marker: host# 18, marker# MARKER_DISK_FAULTY 2025-03-31T16:20:57.556471Z node 18 :CMS DEBUG: [Sentinel] [Main] Config was updated in 0.100000s 2025-03-31T16:20:57.556511Z node 18 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-03-31T16:20:57.556587Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 18, wbId# [18:8388350642965737326:1634689637] 2025-03-31T16:20:57.556621Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 19, wbId# [19:8388350642965737326:1634689637] 2025-03-31T16:20:57.556642Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 20, wbId# [20:8388350642965737326:1634689637] 2025-03-31T16:20:57.556668Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 21, wbId# [21:8388350642965737326:1634689637] 2025-03-31T16:20:57.556692Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 22, wbId# [22:8388350642965737326:1634689637] 2025-03-31T16:20:57.556714Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 23, wbId# [23:8388350642965737326:1634689637] 2025-03-31T16:20:57.556729Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 24, wbId# [24:8388350642965737326:1634689637] 2025-03-31T16:20:57.556748Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 25, wbId# [25:8388350642965737326:1634689637] 2025-03-31T16:20:57.556875Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 18, response# PDiskStateInfo { PDiskId: 18 CreateTime: 120443072 ChangeTime: 120443072 Path: "/18/pdisk-18.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120541 2025-03-31T16:20:57.557318Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 23, response# PDiskStateInfo { PDiskId: 23 CreateTime: 120443072 ChangeTime: 120443072 Path: "/23/pdisk-23.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120541 2025-03-31T16:20:57.557436Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 24, response# PDiskStateInfo { PDiskId: 24 CreateTime: 120443072 ChangeTime: 120443072 Path: "/24/pdisk-24.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120541 2025-03-31T16:20:57.557478Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 25, response# PDiskStateInfo { PDiskId: 25 CreateTime: 120443072 ChangeTime: 120443072 Path: "/25/pdisk-25.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120541 2025-03-31T16:20:57.557516Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 21, response# PDiskStateInfo { PDiskId: 21 CreateTime: 120443072 ChangeTime: 120443072 Path: "/21/pdisk-21.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120541 2025-03-31T16:20:57.557551Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 22, response# PDiskStateInfo { PDiskId: 22 CreateTime: 120443072 ChangeTime: 120443072 Path: "/22/pdisk-22.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120541 2025-03-31T16:20:57.557602Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 19, response# PDiskStateInfo { PDiskId: 19 CreateTime: 120443072 ChangeTime: 120443072 Path: "/19/pdisk-19.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120541 2025-03-31T16:20:57.557646Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 20, response# PDiskStateInfo { PDiskId: 20 CreateTime: 120443072 ChangeTime: 120443072 Path: "/20/pdisk-20.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120541 2025-03-31T16:20:57.557672Z node 18 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-03-31T16:20:57.569832Z node 18 :CMS DEBUG: TTxStorePermissions complete 2025-03-31T16:20:57.570064Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "VDisks eviction from host 18 has not yet been completed" } RequestId: "user-r-3" Deadline: 0 } 2025-03-31T16:20:57.570663Z node 18 :CMS INFO: User user removes request user-r-3 2025-03-31T16:20:57.570716Z node 18 :CMS DEBUG: Resulting status: OK 2025-03-31T16:20:57.570781Z node 18 :CMS DEBUG: TTxRemoveRequest Execute 2025-03-31T16:20:57.570821Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reset host markers: host# 18 2025-03-31T16:20:57.570959Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-3, reason# explicit remove 2025-03-31T16:20:57.583277Z node 18 :CMS DEBUG: TTxRemoveRequest Complete 2025-03-31T16:20:57.583461Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManageRequestRequest { User: "user" Command: REJECT RequestId: "user-r-3" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManageRequestResponse { Status { Code: OK } } >> TMaintenanceApiTest::ActionReason [GOOD] >> KqpLimits::DatashardReplySize [GOOD] >> TCmsTenatsTest::TestTenantRatioLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestTenantRatioLimitForceRestartModeScheduled >> TCmsTest::AllVDisksEvictionInRack [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull-useSink >> KqpPg::InsertValuesFromTableWithDefaultAndCast+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultAndCast-useSink |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TMaintenanceApiTest::ActionReason [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::InitialScanAndResolvedTimestamps [GOOD] Test command err: 2025-03-31T16:18:02.192438Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008686581343015:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:18:02.192502Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018ff/r3tmp/tmpPd8yrt/pdisk_1.dat 2025-03-31T16:18:02.632185Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:18:02.662730Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:18:02.662825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:18:02.667506Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29756, node 1 2025-03-31T16:18:02.823284Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:18:02.823310Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:18:02.823316Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:18:02.823449Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:18:03.096473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:18:03.152261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:18:03.199191Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7488008690876310915:2308] 2025-03-31T16:18:03.199663Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:18:03.220086Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:18:03.220156Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:18:03.224245Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:18:03.224324Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:18:03.224373Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:18:03.225605Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:18:03.225697Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:18:03.225750Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:7488008690876310942:2308] in generation 1 2025-03-31T16:18:03.227593Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:18:03.278476Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:18:03.278664Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:18:03.278734Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:7488008690876310944:2310] 2025-03-31T16:18:03.278745Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:18:03.278760Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:18:03.278788Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:18:03.278938Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:18:03.279027Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:18:03.279053Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:18:03.279107Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:18:03.279128Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:18:03.279149Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:18:03.279406Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7488008690876310920:2301], serverId# [1:7488008690876310924:2304], sessionId# [0:0:0] 2025-03-31T16:18:03.279994Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:18:03.280415Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:18:03.280506Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-03-31T16:18:03.282282Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:18:03.282362Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:18:03.282414Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:18:03.285346Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7488008690876310958:2319], serverId# [1:7488008690876310960:2321], sessionId# [0:0:0] 2025-03-31T16:18:03.292302Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710657 at step 1743437883327 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743437883327 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:18:03.292352Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:18:03.292503Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:18:03.292612Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:18:03.292671Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:18:03.292694Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1743437883327:281474976710657] in PlanQueue unit at 72075186224037888 2025-03-31T16:18:03.293047Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1743437883327:281474976710657 keys extracted: 0 2025-03-31T16:18:03.293170Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:18:03.293393Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:18:03.293468Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:18:03.295930Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:18:03.296410Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:18:03.297559Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1743437883327} 2025-03-31T16:18:03.297634Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:18:03.297682Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1743437883334 2025-03-31T16:18:03.297733Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:18:03.297792Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1743437883334 2025-03-31T16:18:03.297832Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:18:03.297870Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:18:03.297904Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:18:03.297947Z node 1 :TX_DATASHARD DEBUG: Complete [1743437883327 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7488008686581343434:2194], exec latency: 3 ms, propose latency: 4 ms 2025-03-31T16:18:03.297975Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-03-31T16:18:03.298039Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:18:03.301298Z node 1 :CHANGE_EXCHANGE DEBUG: [ChangeSender][72075186224037888:1][1:7488008690876310944:2310][Inactive] Handle NKikimrChangeExchange.TEvActivateSender 2025-03-31T16:18:03.303326Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2025-03-31T16:18:03.303390Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:18:03.319072Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:18:03.320809Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:18:03.320940Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710658 ssId 72057594046644480 seqNo 2:2 2025-03-31T16:18:03.320990Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976710658 2025-03-31T16:18:03.321021Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710658 at tablet 72075186224037888 2025-03-31T16:18:03.322520Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:18:03.392062Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:18:03.393254Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Registered with mediator time cast 2025-03-31T16:18:03.393533Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:18:03.393847Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] doesn't have tx info 2025-03-31T16:18:03.393885Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:18:03.393920Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] no config, start with empty partitions and default config 2025-03-31T16:18:03.393944Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:18:03.394036Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037889] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:18:03.394076Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037889] doesn't have tx writes info 2025-03-31T16:18:03.394861Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] server connected, pipe [1:7488008690876311020:2367], now have 1 active actors on pipe 2025-03-31T16:18:03.394924Z node 1 :PERSQUEUE DEBUG: [P ... G: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:20:58.062712Z node 27 :TX_DATASHARD DEBUG: Add schema snapshot: pathId# [OwnerId: 72057594046644480, LocalPathId: 2], version# 3, step# 7500, txId# 281474976715662, at tablet# 72075186224037888 2025-03-31T16:20:58.063194Z node 27 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:20:58.079744Z node 27 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 7500} 2025-03-31T16:20:58.079940Z node 27 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:20:58.080007Z node 27 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:20:58.080057Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:20:58.080255Z node 27 :TX_DATASHARD DEBUG: Complete [7500 : 281474976715662] from 72075186224037888 at tablet 72075186224037888 send result to client [27:410:2405], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:20:58.080367Z node 27 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715662 state Ready TxInFly 0 2025-03-31T16:20:58.080551Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:20:58.081230Z node 27 :TX_DATASHARD INFO: [CdcStreamHeartbeat] Emit change records: edge# v7500/18446744073709551615, at tablet# 72075186224037888 2025-03-31T16:20:58.081461Z node 27 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 4 Group: 0 Step: 6000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-03-31T16:20:58.082208Z node 27 :TX_DATASHARD INFO: [CdcStreamHeartbeat] Emit change records: edge# v7500/18446744073709551615, at tablet# 72075186224037888 2025-03-31T16:20:58.083538Z node 27 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715662 datashard 72075186224037888 state Ready 2025-03-31T16:20:58.083677Z node 27 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:20:58.095420Z node 27 :TX_DATASHARD INFO: [CdcStreamHeartbeat] Enqueue 1 change record(s): at tablet# 72075186224037888 2025-03-31T16:20:58.095601Z node 27 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] BodySize: 0 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 } 2025-03-31T16:20:58.095721Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:20:58.095846Z node 27 :TX_DATASHARD DEBUG: Waiting for PlanStep# 9000 from mediator time cast 2025-03-31T16:20:58.095928Z node 27 :TX_DATASHARD INFO: [CdcStreamHeartbeat] Enqueue 0 change record(s): at tablet# 72075186224037888 2025-03-31T16:20:58.096021Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:20:58.096198Z node 27 :CHANGE_EXCHANGE DEBUG: [ChangeSender][72075186224037888:1][27:685:2581] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] BodySize: 0 }] } 2025-03-31T16:20:58.096433Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:981:2779] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] BodySize: 0 }] } 2025-03-31T16:20:58.096722Z node 27 :TX_DATASHARD INFO: TTxRequestChangeRecords Execute: at tablet# 72075186224037888 2025-03-31T16:20:58.096883Z node 27 :TX_DATASHARD DEBUG: Send 1 change records: to# [27:981:2779], at tablet# 72075186224037888 2025-03-31T16:20:58.096919Z node 27 :TX_DATASHARD INFO: TTxRequestChangeRecords Complete: sent# 1, forgotten# 0, left# 0, at tablet# 72075186224037888 2025-03-31T16:20:58.097034Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:981:2779] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 4 Group: 0 Step: 6000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-03-31T16:20:58.097218Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][27:1065:2779] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 4 Group: 0 Step: 6000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-03-31T16:20:58.097612Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-03-31T16:20:58.097651Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-03-31T16:20:58.097748Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 2 requestId: cookie: 2 2025-03-31T16:20:58.097862Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-03-31T16:20:58.097886Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-03-31T16:20:58.097923Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message topic: Table/Stream/streamImpl partition: 0 SourceId: '\00072075186224037888' SeqNo: 4 partNo : 0 messageNo: 3 size 26 offset: -1 2025-03-31T16:20:58.098096Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 process heartbeat sourceId '\00072075186224037888' version v6000/0 2025-03-31T16:20:58.098237Z node 27 :PERSQUEUE INFO: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 emit heartbeat v6000/0 2025-03-31T16:20:58.098457Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob processing sourceId '\00072075186224037889' seqNo 0 partNo 0 2025-03-31T16:20:58.099277Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob complete sourceId '\00072075186224037889' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 3 PartNo 0 PackedSize 107 count 1 nextOffset 4 batches 1 2025-03-31T16:20:58.099738Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Add new write blob: topic 'Table/Stream/streamImpl' partition 0 compactOffset 3,1 HeadOffset 0 endOffset 3 curOffset 4 d0000000000_00000000000000000003_00000_0000000001_00000| size 93 WTime 7451 2025-03-31T16:20:58.099854Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:20:58.099880Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:20:58.099904Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-03-31T16:20:58.099928Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:20:58.099952Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] m0000000000p72075186224037889 2025-03-31T16:20:58.099972Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] d0000000000_00000000000000000003_00000_0000000001_00000| 2025-03-31T16:20:58.099992Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:20:58.100015Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:20:58.100041Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] =========================== 2025-03-31T16:20:58.100117Z node 27 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-31T16:20:58.100178Z node 27 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 3 partNo 0 count 1 size 93 2025-03-31T16:20:58.100879Z node 27 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 3 count 1 size 93 actorID [27:922:2734] 2025-03-31T16:20:58.100978Z node 27 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037889' partition 0 offset 3 partno 0 count 1 parts 0 size 93 2025-03-31T16:20:58.111284Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 44 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:20:58.111415Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:20:58.111473Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037888', Topic: 'Table/Stream/streamImpl', Partition: 0, SeqNo: 4, partNo: 0, Offset: 3 is stored on disk 2025-03-31T16:20:58.111668Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 3 requestId: cookie: 2 2025-03-31T16:20:58.111892Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][27:1065:2779] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037888" SeqNo: 4 Offset: 3 WriteTimestampMS: 7451 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 2 } } } 2025-03-31T16:20:58.111973Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:981:2779] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-03-31T16:20:58.112112Z node 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 1, at tablet# 72075186224037888 2025-03-31T16:20:58.112140Z node 27 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 4, at tablet: 72075186224037888 2025-03-31T16:20:58.122876Z node 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 1, left# 0, at tablet# 72075186224037888 >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2025-03-31T16:20:58.259219Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-03-31T16:20:58.259297Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-03-31T16:20:58.259478Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 2 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 4 max time lag 0ms effective offset 0 2025-03-31T16:20:58.259582Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 2 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 4 2025-03-31T16:20:58.260035Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 2. All data is from uncompacted head. 2025-03-31T16:20:58.260154Z node 27 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-31T16:20:58.261037Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::AllVDisksEvictionInRack [GOOD] Test command err: 2025-03-31T16:20:57.929740Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-03-31T16:20:57.929833Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-03-31T16:20:57.929953Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-03-31T16:20:57.931343Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 25 InterconnectPort: 12001 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-26-26" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 26 InterconnectPort: 12002 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-27-27" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 27 InterconnectPort: 12003 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-28-28" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 28 InterconnectPort: 12004 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-29-29" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 29 InterconnectPort: 12005 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-30-30" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 30 InterconnectPort: 12006 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-31-31" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 31 InterconnectPort: 12007 Location { Rack: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-32-32" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 32 InterconnectPort: 12008 Location { Rack: "4" } StartTimeSeconds: 0 } Timestamp: 120029000 } } 2025-03-31T16:20:57.931869Z node 25 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 25 InterconnectPort: 12001 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-26-26" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 26 InterconnectPort: 12002 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-27-27" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 27 InterconnectPort: 12003 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-28-28" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 28 InterconnectPort: 12004 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-29-29" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 29 InterconnectPort: 12005 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-30-30" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 30 InterconnectPort: 12006 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-31-31" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 31 InterconnectPort: 12007 Location { Rack: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-32-32" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 32 InterconnectPort: 12008 Location { Rack: "4" } StartTimeSeconds: 0 } Timestamp: 120029000 } 2025-03-31T16:20:57.932023Z node 25 :CMS DEBUG: [Sentinel] [Main] Config was updated in 120.003000s 2025-03-31T16:20:57.932063Z node 25 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-03-31T16:20:57.932200Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true 2025-03-31T16:20:57.932256Z node 25 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 2025-03-31T16:20:57.932330Z node 25 :CMS DEBUG: Result: DISALLOW_TEMP (reason: VDisks eviction from host 25 has not yet been completed) 2025-03-31T16:20:57.932454Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2025-03-31T16:20:57.932620Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 25 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-03-31T16:20:57.932665Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Add host marker: host# 25, marker# MARKER_DISK_FAULTY 2025-03-31T16:20:57.932904Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 25, wbId# [25:8388350642965737326:1634689637] 2025-03-31T16:20:57.932944Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 26, wbId# [26:8388350642965737326:1634689637] 2025-03-31T16:20:57.932967Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 27, wbId# [27:8388350642965737326:1634689637] 2025-03-31T16:20:57.932987Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 28, wbId# [28:8388350642965737326:1634689637] 2025-03-31T16:20:57.933010Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 29, wbId# [29:8388350642965737326:1634689637] 2025-03-31T16:20:57.933033Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 30, wbId# [30:8388350642965737326:1634689637] 2025-03-31T16:20:57.933054Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 31, wbId# [31:8388350642965737326:1634689637] 2025-03-31T16:20:57.933074Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 32, wbId# [32:8388350642965737326:1634689637] 2025-03-31T16:20:57.939648Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 25, response# PDiskStateInfo { PDiskId: ... [28:8388350642965737326:1634689637] 2025-03-31T16:20:58.137578Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 29, wbId# [29:8388350642965737326:1634689637] 2025-03-31T16:20:58.137594Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 30, wbId# [30:8388350642965737326:1634689637] 2025-03-31T16:20:58.137610Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 31, wbId# [31:8388350642965737326:1634689637] 2025-03-31T16:20:58.137635Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 32, wbId# [32:8388350642965737326:1634689637] 2025-03-31T16:20:58.137953Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 25, response# PDiskStateInfo { PDiskId: 25 CreateTime: 0 ChangeTime: 0 Path: "/25/pdisk-25.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180029 2025-03-31T16:20:58.138346Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 29, response# PDiskStateInfo { PDiskId: 29 CreateTime: 0 ChangeTime: 0 Path: "/29/pdisk-29.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180029 2025-03-31T16:20:58.138512Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 30, response# PDiskStateInfo { PDiskId: 30 CreateTime: 0 ChangeTime: 0 Path: "/30/pdisk-30.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180029 2025-03-31T16:20:58.138558Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 31, response# PDiskStateInfo { PDiskId: 31 CreateTime: 0 ChangeTime: 0 Path: "/31/pdisk-31.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180029 2025-03-31T16:20:58.138605Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 26, response# PDiskStateInfo { PDiskId: 26 CreateTime: 0 ChangeTime: 0 Path: "/26/pdisk-26.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180029 2025-03-31T16:20:58.138683Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 27, response# PDiskStateInfo { PDiskId: 27 CreateTime: 0 ChangeTime: 0 Path: "/27/pdisk-27.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180029 2025-03-31T16:20:58.138734Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 28, response# PDiskStateInfo { PDiskId: 28 CreateTime: 0 ChangeTime: 0 Path: "/28/pdisk-28.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180029 2025-03-31T16:20:58.138767Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 32, response# PDiskStateInfo { PDiskId: 32 CreateTime: 0 ChangeTime: 0 Path: "/32/pdisk-32.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180029 2025-03-31T16:20:58.138795Z node 25 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-03-31T16:20:58.138930Z node 25 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 26:26, status# ACTIVE, required status# FAULTY, reason# Forced status, dry run# 0 2025-03-31T16:20:58.138991Z node 25 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 25:25, status# ACTIVE, required status# FAULTY, reason# Forced status, dry run# 0 2025-03-31T16:20:58.139021Z node 25 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 2 2025-03-31T16:20:58.139219Z node 25 :CMS DEBUG: TTxLogAndSend Execute 2025-03-31T16:20:58.139348Z node 25 :CMS DEBUG: TTxLogAndSend Execute 2025-03-31T16:20:58.139432Z node 25 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Success: true, cookie# 1 2025-03-31T16:20:58.139487Z node 25 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 25:25 2025-03-31T16:20:58.139520Z node 25 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 26:26 2025-03-31T16:20:58.151901Z node 25 :CMS DEBUG: TTxLogAndSend Complete 2025-03-31T16:20:58.151972Z node 25 :CMS DEBUG: TTxLogAndSend Complete 2025-03-31T16:20:58.166107Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-03-31T16:20:58.166189Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-03-31T16:20:58.166232Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:03:00Z 2025-03-31T16:20:58.166789Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 25 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-03-31T16:20:58.166856Z node 25 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 25 has not yet been completed" } 2025-03-31T16:20:58.166897Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 25, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-03-31T16:20:58.166975Z node 25 :CMS DEBUG: Ring: 0; State: Ok 2025-03-31T16:20:58.166996Z node 25 :CMS DEBUG: Ring: 1; State: Ok 2025-03-31T16:20:58.167009Z node 25 :CMS DEBUG: Ring: 2; State: Ok 2025-03-31T16:20:58.167026Z node 25 :CMS DEBUG: Result: ALLOW 2025-03-31T16:20:58.167154Z node 25 :CMS DEBUG: Accepting permission: id# user-p-1, requestId# user-r-1, owner# user 2025-03-31T16:20:58.167199Z node 25 :CMS INFO: Adding lock for Host ::1:12001 (25) (permission user-p-1 until 1970-01-01T00:13:00Z) 2025-03-31T16:20:58.167255Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2025-03-31T16:20:58.167391Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:13:00.129000Z, action# Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 2025-03-31T16:20:58.167481Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-03-31T16:20:58.179169Z node 25 :CMS DEBUG: TTxStorePermissions complete 2025-03-31T16:20:58.179375Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 } Deadline: 780129000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 25 InterconnectPort: 12001 } } } } 2025-03-31T16:20:58.179415Z node 25 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:33:00.129000Z 2025-03-31T16:20:58.193741Z node 25 :CMS INFO: Adding lock for Host ::1:12001 (25) (permission user-p-1 until 1970-01-01T00:13:00Z) 2025-03-31T16:20:58.193990Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-03-31T16:20:58.194041Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-03-31T16:20:58.194101Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:03:00Z 2025-03-31T16:20:58.194652Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 26 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-03-31T16:20:58.194748Z node 25 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 26 has not yet been completed" } 2025-03-31T16:20:58.194813Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 26, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-03-31T16:20:58.194853Z node 25 :CMS DEBUG: Result: ALLOW 2025-03-31T16:20:58.194958Z node 25 :CMS DEBUG: Accepting permission: id# user-p-2, requestId# user-r-2, owner# user 2025-03-31T16:20:58.195014Z node 25 :CMS INFO: Adding lock for Host ::1:12002 (26) (permission user-p-2 until 1970-01-01T00:13:00Z) 2025-03-31T16:20:58.195111Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2025-03-31T16:20:58.195266Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:13:00.230512Z, action# Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 2025-03-31T16:20:58.195385Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-2, owner# user, order# 2, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-03-31T16:20:58.207267Z node 25 :CMS DEBUG: TTxStorePermissions complete 2025-03-31T16:20:58.207516Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-2" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-2" Permissions { Id: "user-p-2" Action { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 } Deadline: 780230512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 26 InterconnectPort: 12002 } } } } 2025-03-31T16:20:58.208025Z node 25 :CMS INFO: User user is done with permissions user-p-1 2025-03-31T16:20:58.208066Z node 25 :CMS DEBUG: Resulting status: OK 2025-03-31T16:20:58.208116Z node 25 :CMS DEBUG: TTxRemovePermissions Execute 2025-03-31T16:20:58.208176Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reset host markers: host# 25 2025-03-31T16:20:58.208267Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-1, reason# permission user-p-1 was removed 2025-03-31T16:20:58.208390Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-1, reason# explicit remove 2025-03-31T16:20:58.220471Z node 25 :CMS DEBUG: TTxRemovePermissions Complete 2025-03-31T16:20:58.220640Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-1" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2025-03-31T16:20:58.221108Z node 25 :CMS INFO: User user is done with permissions user-p-2 2025-03-31T16:20:58.221150Z node 25 :CMS DEBUG: Resulting status: OK 2025-03-31T16:20:58.221197Z node 25 :CMS DEBUG: TTxRemovePermissions Execute 2025-03-31T16:20:58.221264Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reset host markers: host# 26 2025-03-31T16:20:58.221339Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-2, reason# permission user-p-2 was removed 2025-03-31T16:20:58.221377Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-2, reason# explicit remove 2025-03-31T16:20:58.233372Z node 25 :CMS DEBUG: TTxRemovePermissions Complete 2025-03-31T16:20:58.233536Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-2" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } >> TCmsTest::WalleTasksWithNodeLimit [GOOD] >> TCmsTest::WalleTasksDifferentPriorities ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::RecreateObserver [GOOD] Test command err: 2025-03-31T16:19:10.050855Z :RetryDiscoveryWithCancel INFO: Random seed for debugging is 1743437950050821 2025-03-31T16:19:10.465094Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008981734591680:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:10.471653Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:19:10.563528Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008978905412193:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:10.563604Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001603/r3tmp/tmpJuVOCy/pdisk_1.dat 2025-03-31T16:19:10.842979Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:19:10.855323Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:19:11.246098Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:11.284234Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:11.284331Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:11.285753Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:11.285819Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:11.290239Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:11.292258Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:19:11.293613Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2458, node 1 2025-03-31T16:19:11.515839Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/001603/r3tmp/yandexLrVAYx.tmp 2025-03-31T16:19:11.515864Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/001603/r3tmp/yandexLrVAYx.tmp 2025-03-31T16:19:11.516018Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/001603/r3tmp/yandexLrVAYx.tmp 2025-03-31T16:19:11.516155Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:19:11.768932Z INFO: TTestServer started on Port 19174 GrpcPort 2458 TClient is connected to server localhost:19174 PQClient connected to localhost:2458 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:19:12.126867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-03-31T16:19:12.231689Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976720660, at schemeshard: 72057594046644480 2025-03-31T16:19:14.267177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008998914461870:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:14.267313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:14.267620Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008998914461882:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:14.270704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720661:3, at schemeshard: 72057594046644480 2025-03-31T16:19:14.284832Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008998914461916:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:14.285342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:14.306311Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488008998914461884:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720661 completed, doublechecking } 2025-03-31T16:19:14.396581Z node 1 :TX_PROXY ERROR: Actor# [1:7488008998914461965:2676] txid# 281474976720662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:19:14.689628Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488008996085281711:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:19:14.689962Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MmM0YWE1NzktNTZjZTgzOTgtOTIxNjNhNGUtNDI5NjI4ZTk=, ActorId: [2:7488008996085281665:2308], ActorState: ExecuteState, TraceId: 01jqpfzz7n50tnrtshf8yt82sj, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:19:14.689000Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488008998914461976:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:19:14.692291Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:19:14.694751Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTNmMzQwMjAtZDRhOGMwYjEtMzE5YTM3MzUtZmNkOGFiNmM=, ActorId: [1:7488008998914461867:2336], ActorState: ExecuteState, TraceId: 01jqpfzz5t1vchgqvqvgthf292, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:19:14.695108Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:19:14.749306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2025-03-31T16:19:14.903476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 2025-03-31T16:19:15.022923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:2458", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-03-31T16:19:15.413170Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720666. Ctx: { TraceId: 01jqpg001mehkz6mvkdvjmfq56, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2Q3NGVjZGItZGFjNjFlZTctODBiMWMzZGUtYWQyODE1OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:19:15.464610Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008981734591680:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:15.464697Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Subcribe to ClusterTracker from [1:7488009003209429688:2977] 2025-03-31T16:19:15.564030Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008978905412193:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:15.564095Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-03-31T16:19:21.539141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, ... base id: account-dc3 CommittedOffset: 0 EndOffset: 0 } 2025-03-31T16:20:57.734372Z :INFO: [/Root] [/Root] [83cc9bc9-1a88f35-b3cafcc7-6ea0b46e] Closing read session. Close timeout: 0.000000s 2025-03-31T16:20:57.734417Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-03-31T16:20:57.734470Z :INFO: [/Root] [/Root] [83cc9bc9-1a88f35-b3cafcc7-6ea0b46e] Counters: { Errors: 0 CurrentSessionLifetimeMs: 22 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T16:20:57.734563Z :NOTICE: [/Root] [/Root] [83cc9bc9-1a88f35-b3cafcc7-6ea0b46e] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-03-31T16:20:57.734597Z :DEBUG: [/Root] [/Root] [83cc9bc9-1a88f35-b3cafcc7-6ea0b46e] [] Abort session to cluster 2025-03-31T16:20:57.735334Z :INFO: [/Root] [/Root] [87c2b462-d6251b75-7779a19e-d8edd75f] Closing read session. Close timeout: 0.000000s 2025-03-31T16:20:57.735377Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-03-31T16:20:57.735413Z :INFO: [/Root] [/Root] [87c2b462-d6251b75-7779a19e-d8edd75f] Counters: { Errors: 0 CurrentSessionLifetimeMs: 21 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T16:20:57.735471Z :NOTICE: [/Root] [/Root] [87c2b462-d6251b75-7779a19e-d8edd75f] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-03-31T16:20:57.735513Z :DEBUG: [/Root] [/Root] [87c2b462-d6251b75-7779a19e-d8edd75f] [] Abort session to cluster 2025-03-31T16:20:57.735629Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_15604237749166399999_v1 grpc read done: success# 0, data# { } 2025-03-31T16:20:57.735661Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_15604237749166399999_v1 grpc read failed 2025-03-31T16:20:57.735683Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_15604237749166399999_v1 grpc closed 2025-03-31T16:20:57.735700Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_15604237749166399999_v1 is DEAD 2025-03-31T16:20:57.736007Z :INFO: [/Root] [/Root] [56315efc-9600484c-754db024-f64d832] Closing read session. Close timeout: 0.000000s 2025-03-31T16:20:57.736066Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:0:0 2025-03-31T16:20:57.736097Z :INFO: [/Root] [/Root] [56315efc-9600484c-754db024-f64d832] Counters: { Errors: 0 CurrentSessionLifetimeMs: 21 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T16:20:57.736145Z :NOTICE: [/Root] [/Root] [56315efc-9600484c-754db024-f64d832] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-03-31T16:20:57.736169Z :DEBUG: [/Root] [/Root] [56315efc-9600484c-754db024-f64d832] [] Abort session to cluster 2025-03-31T16:20:57.736252Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_3_2_6692824852559230653_v1 grpc read done: success# 0, data# { } 2025-03-31T16:20:57.736276Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_6692824852559230653_v1 grpc read failed 2025-03-31T16:20:57.736329Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_6692824852559230653_v1 closed 2025-03-31T16:20:57.736533Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_6692824852559230653_v1 is DEAD 2025-03-31T16:20:57.736690Z :INFO: [/Root] [/Root] [56315efc-9600484c-754db024-f64d832] Closing read session. Close timeout: 0.000000s 2025-03-31T16:20:57.736753Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:0:0 2025-03-31T16:20:57.736802Z :INFO: [/Root] [/Root] [56315efc-9600484c-754db024-f64d832] Counters: { Errors: 0 CurrentSessionLifetimeMs: 22 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T16:20:57.736793Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7488009439220298994:2550] disconnected; active server actors: 1 2025-03-31T16:20:57.736840Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7488009439220298994:2550] client user disconnected session shared/user_3_1_15604237749166399999_v1 2025-03-31T16:20:57.736896Z :NOTICE: [/Root] [/Root] [56315efc-9600484c-754db024-f64d832] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-31T16:20:57.736893Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user rebalancing was scheduled 2025-03-31T16:20:57.736943Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7488009439220298995:2551] disconnected; active server actors: 1 2025-03-31T16:20:57.736965Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7488009439220298995:2551] client user disconnected session shared/user_3_2_6692824852559230653_v1 2025-03-31T16:20:57.737003Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user balancing. Sessions=1, Families=1, UnradableFamilies=0 [], RequireBalancing=0 [] 2025-03-31T16:20:57.737035Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2025-03-31T16:20:57.737058Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user balancing duration: 0.000036s 2025-03-31T16:20:57.737057Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer shared/user session shared/user_3_3_3933644209797610350_v1 grpc read done: success# 0, data# { } 2025-03-31T16:20:57.737068Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_3933644209797610350_v1 grpc read failed 2025-03-31T16:20:57.737086Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_3933644209797610350_v1 closed 2025-03-31T16:20:57.737139Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_3933644209797610350_v1 is DEAD 2025-03-31T16:20:57.737458Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7488009439220298998:2552] disconnected; active server actors: 1 2025-03-31T16:20:57.737484Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7488009439220298998:2552] client user disconnected session shared/user_3_3_3933644209797610350_v1 2025-03-31T16:20:57.737416Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_3_3_3933644209797610350_v1 2025-03-31T16:20:57.737459Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7488009439220299001:2559] destroyed 2025-03-31T16:20:57.737530Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_3_3933644209797610350_v1 2025-03-31T16:20:57.737849Z :INFO: [/Root] [/Root] [87c2b462-d6251b75-7779a19e-d8edd75f] Closing read session. Close timeout: 0.000000s 2025-03-31T16:20:57.737890Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-03-31T16:20:57.737924Z :INFO: [/Root] [/Root] [87c2b462-d6251b75-7779a19e-d8edd75f] Counters: { Errors: 0 CurrentSessionLifetimeMs: 24 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T16:20:57.737977Z :NOTICE: [/Root] [/Root] [87c2b462-d6251b75-7779a19e-d8edd75f] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-31T16:20:57.738483Z :INFO: [/Root] [/Root] [83cc9bc9-1a88f35-b3cafcc7-6ea0b46e] Closing read session. Close timeout: 0.000000s 2025-03-31T16:20:57.738520Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-03-31T16:20:57.738550Z :INFO: [/Root] [/Root] [83cc9bc9-1a88f35-b3cafcc7-6ea0b46e] Counters: { Errors: 0 CurrentSessionLifetimeMs: 26 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T16:20:57.738597Z :NOTICE: [/Root] [/Root] [83cc9bc9-1a88f35-b3cafcc7-6ea0b46e] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-31T16:20:58.240966Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7488009443515266320:2561] TxId: 281474976715690. Ctx: { TraceId: 01jqpg34d54sbkp7rj5v28syp0, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ODk1OWRjNGEtNzcyMjQ0ZjktZjMwODAzOTAtMjViNTMxZjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 4 2025-03-31T16:20:58.241665Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7488009443515266331:2568], TxId: 281474976715690, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jqpg34d54sbkp7rj5v28syp0. SessionId : ydb://session/3?node_id=3&id=ODk1OWRjNGEtNzcyMjQ0ZjktZjMwODAzOTAtMjViNTMxZjQ=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7488009443515266320:2561], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-03-31T16:20:58.241675Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7488009443515266333:2569], TxId: 281474976715690, task: 4. Ctx: { TraceId : 01jqpg34d54sbkp7rj5v28syp0. SessionId : ydb://session/3?node_id=3&id=ODk1OWRjNGEtNzcyMjQ0ZjktZjMwODAzOTAtMjViNTMxZjQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7488009443515266320:2561], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-03-31T16:20:58.422109Z node 3 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715691. Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-31T16:20:58.422248Z node 3 :KQP_EXECUTER WARN: ActorId: [3:7488009443515266338:2570] TxId: 281474976715691. Ctx: { TraceId: 01jqpg34rk7s4141a262ceaz85, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGVlN2Q1MzYtNzhmNTIxYTctMjkxYjE5NjAtMzg4ZmFkM2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-31T16:20:58.422423Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NGVlN2Q1MzYtNzhmNTIxYTctMjkxYjE5NjAtMzg4ZmFkM2Q=, ActorId: [3:7488009443515266335:2570], ActorState: ExecuteState, TraceId: 01jqpg34rk7s4141a262ceaz85, Create QueryResponse for error on request, msg: 2025-03-31T16:20:58.423217Z node 3 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jqpg34rk7s4141a263exzm44" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } >> TCmsTenatsTest::RequestRestartServices [GOOD] >> TCmsTest::VDisksEvictionShouldFailOnMultipleActions [GOOD] |90.1%| [TA] $(B)/ydb/core/tx/datashard/ut_change_exchange/test-results/unittest/{meta.json ... results_accumulator.log} >> TCmsTest::WalleCleanupTest [GOOD] >> TCmsTest::WalleRequestDuringRollingRestart ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::DatashardReplySize [GOOD] Test command err: Trying to start YDB, gRPC: 10417, MsgBus: 13983 2025-03-31T16:20:28.374243Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009313043302657:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:28.374380Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000925/r3tmp/tmpIcOVow/pdisk_1.dat 2025-03-31T16:20:28.676908Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10417, node 1 2025-03-31T16:20:28.732080Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:28.732210Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:28.733847Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:28.743808Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:28.743838Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:28.743852Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:28.743990Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13983 TClient is connected to server localhost:13983 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:29.177960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:29.198071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:29.302496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:29.436765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:29.511032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:31.238981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009325928206326:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:31.239109Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:31.591668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:31.616765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:31.640112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:31.664186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:31.691208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:31.724496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:31.800585Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009325928206840:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:31.800631Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009325928206845:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:31.800681Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:31.804471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:31.814481Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009325928206847:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:20:31.911677Z node 1 :TX_PROXY ERROR: Actor# [1:7488009325928206903:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:32.650449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:33.374191Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488009313043302657:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:33.374252Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:20:35.051148Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7488009343108077323:2606] TxId: 281474976710672. Ctx: { TraceId: 01jqpg2dyqawfms0tjj7nf3txj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWM1NDkwNmEtZTZhODc1MmUtNjRjMmM0ODEtYzJmNjYzZjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Memory limit exception at WaitResolveState, current limit is 1024 bytes. } 2025-03-31T16:20:35.051385Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWM1NDkwNmEtZTZhODc1MmUtNjRjMmM0ODEtYzJmNjYzZjI=, ActorId: [1:7488009338813110011:2606], ActorState: ExecuteState, TraceId: 01jqpg2dyqawfms0tjj7nf3txj, Create QueryResponse for error on request, msg:
: Error: Memory limit exception at WaitResolveState, current limit is 1024 bytes. Trying to start YDB, gRPC: 1310, MsgBus: 15722 2025-03-31T16:20:35.758394Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488009344428425112:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:35.758624Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000925/r3tmp/tmpSnZszY/pdisk_1.dat 2025-03-31T16:20:35.886403Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:20:35.904130Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:35.904234Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:35.905559Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1310, node 2 2025-03-31T16:20:35.944820Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:35.944844Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:35.944856Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:35.944993Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15722 TClient is connected to server localhost:15722 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:36.299493Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715 ... 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:42.600931Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:46.117752Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009390318078638:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:46.117844Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009390318078646:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:46.117910Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:46.121509Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:20:46.131245Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488009390318078652:2361], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:20:46.197878Z node 3 :TX_PROXY ERROR: Actor# [3:7488009390318078703:2604] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:47.009825Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488009373138208411:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:47.009906Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:20:47.105668Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7488009390318078735:2353] TxId: 281474976715661. Ctx: { TraceId: 01jqpg2rww44dn6pq649kk2rw0, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ODkwMjU1OTgtZDVlZTZkMjctNjEzY2ExNTEtYWY5NDE1Njc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Abort execution. Task #1 size is too big: 100442499 > 50331648 2025-03-31T16:20:47.106000Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ODkwMjU1OTgtZDVlZTZkMjctNjEzY2ExNTEtYWY5NDE1Njc=, ActorId: [3:7488009386023111312:2353], ActorState: ExecuteState, TraceId: 01jqpg2rww44dn6pq649kk2rw0, Create QueryResponse for error on request, msg:
: Error: Datashard program size limit exceeded (100442499 > 50331648), code: 200509 Trying to start YDB, gRPC: 21261, MsgBus: 10919 2025-03-31T16:20:47.977240Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488009398253400376:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:47.977361Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000925/r3tmp/tmpjp2Lhx/pdisk_1.dat 2025-03-31T16:20:48.060142Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21261, node 4 2025-03-31T16:20:48.111150Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:48.111243Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:48.112820Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:48.122916Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:48.122949Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:48.122959Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:48.123061Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10919 TClient is connected to server localhost:10919 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:48.593188Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:48.610660Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:48.685999Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:48.906994Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:48.986514Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:51.326504Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488009415433271342:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:51.326598Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:51.377724Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:51.410520Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:51.443078Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:51.477885Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:51.511985Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:51.580311Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:51.624408Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488009415433271853:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:51.624476Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488009415433271858:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:51.624491Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:51.627801Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:51.637343Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488009415433271860:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:20:51.717765Z node 4 :TX_PROXY ERROR: Actor# [4:7488009415433271913:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:52.453401Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:52.977679Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7488009398253400376:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:52.977749Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:20:59.026723Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NTg4Y2UwYzEtODBmMzQ1NDMtNzNiNTcxNmYtYTU5ZDUxYWQ=, ActorId: [4:7488009441203077583:2735], ActorState: ExecuteState, TraceId: 01jqpg34815tj8s6mbkgbfkncz, Create QueryResponse for error on request, msg:
: Error: Query result size limit exceeded. (200003988 > 50331648), code: 2013 |90.1%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_exchange/test-results/unittest/{meta.json ... results_accumulator.log} |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::RequestRestartServices [GOOD] |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::VDisksEvictionShouldFailOnMultipleActions [GOOD] >> TCmsTest::TestProcessingQueue [GOOD] >> TCmsTenatsTest::TestLimitsWithDownNode [GOOD] >> TCmsTenatsTest::TestScheduledPermissionWithDefaultPolicy >> TCmsTest::Notifications [GOOD] >> TCmsTest::Mirror3dcPermissions >> TCmsTenatsTest::TestTenantLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestTenantLimitForceRestartModeScheduled >> TCmsTest::StateStorageAvailabilityMode [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::TestProcessingQueue [GOOD] Test command err: 2025-03-31T16:20:55.289052Z node 1 :CMS ERROR: [InfoCollector] Couldn't get base config 2025-03-31T16:20:55.430217Z node 1 :CMS ERROR: [InfoCollector] Couldn't get base config 2025-03-31T16:20:55.445025Z node 1 :CMS ERROR: [InfoCollector] Couldn't get base config 2025-03-31T16:20:55.491099Z node 1 :CMS ERROR: [InfoCollector] Couldn't get base config 2025-03-31T16:20:59.614720Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 17:17 2025-03-31T16:20:59.614783Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 18:18 2025-03-31T16:20:59.614804Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 19:19 2025-03-31T16:20:59.614825Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 20:20 2025-03-31T16:20:59.614845Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 21:21 2025-03-31T16:20:59.614868Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 22:22 2025-03-31T16:20:59.614888Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 23:23 2025-03-31T16:20:59.614912Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 24:24 >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryTrue_Test >> TSourceIdTests::SourceIdStorageParseAndAdd [GOOD] >> TSourceIdTests::SourceIdStorageMinDS [GOOD] >> TSourceIdTests::SourceIdStorageTestClean >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryFalse_Test >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_NewSourceId_Test >> TPQTestInternal::TestPartitionedBlobSimpleTest [GOOD] >> TPQTestInternal::TestPartitionedBigTest >> TSourceIdTests::SourceIdWriterAddMessage [GOOD] >> TSourceIdTests::SourceIdWriterClean [GOOD] >> TSourceIdTests::SourceIdWriterFormCommand >> TSourceIdTests::SourceIdStorageTestClean [GOOD] >> TSourceIdTests::SourceIdStorageDeleteByMaxCount [GOOD] >> TSourceIdTests::SourceIdStorageDeleteAndOwnersMark [GOOD] >> TPQTest::TestAccountReadQuota >> TPQTest::TestSeveralOwners >> TPartitionChooserSuite::TBoundaryChooserTest [GOOD] >> TPartitionChooserSuite::TBoundaryChooser_GetTabletIdTest [GOOD] >> TPartitionChooserSuite::THashChooserTest [GOOD] >> TPQUserInfoTest::UserDataDeprecatedSerializaion [GOOD] >> TPQUtilsTest::TLastCounter [GOOD] >> TSourceIdTests::SourceIdWriterFormCommand [GOOD] >> TTypeCodecsTest::TestBoolCodec [GOOD] >> TTypeCodecsTest::TestDeltaVarIntCodecAndRev [GOOD] >> TCmsTenatsTest::TestTenantRatioLimitForceRestartModeScheduled [GOOD] >> TCmsTest::ActionIssue >> TMaintenanceApiTest::SingleCompositeActionGroup [GOOD] >> TMaintenanceApiTest::SimplifiedMirror3DC >> KqpPg::TypeCoercionInsert-useSink [GOOD] >> KqpPg::V1CreateTable |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::StateStorageAvailabilityMode [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TTypeCodecsTest::TestDeltaVarIntCodecAndRev [GOOD] Test command err: Size: 128 Create chunk: 0.000039s Read by index: 0.000019s Iterate: 0.000019s Size: 252 Create chunk: 0.000048s Read by index: 0.000021s Iterate: 0.000022s Size: 1887 Create chunk: 0.000103s Read by index: 0.000128s Iterate: 0.000082s Size: 1658 Create chunk: 0.000102s Read by index: 0.000128s Iterate: 0.000088s Size: 1889 Create chunk: 0.000080s Read by index: 0.000086s Iterate: 0.000048s Size: 1660 Create chunk: 0.000111s Read by index: 0.000111s Iterate: 0.000078s >> TKeyValueTest::TestRewriteThenLastValue [GOOD] >> TKeyValueTest::TestRewriteThenLastValueNewApi >> KqpLimits::ReplySizeExceeded [GOOD] |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQUtilsTest::TLastCounter [GOOD] |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TSourceIdTests::SourceIdStorageDeleteAndOwnersMark [GOOD] >> KqpPg::CopyTableSerialColumns-useSink [GOOD] >> KqpPg::CreateIndex >> TListAllTopicsTests::PlainList >> TPartitionTests::IncorrectRange >> KqpPg::CreateUniqPgColumn-useSink [GOOD] >> KqpPg::CreateUniqComplexPgColumn+useSink >> TPartitionTests::CorrectRange_Commit >> TPQTestInternal::TestPartitionedBigTest [GOOD] >> TPQTestInternal::TestBatchPacking [GOOD] >> TPQTestInternal::TestKeyRange [GOOD] >> TPQTestInternal::TestToHex [GOOD] >> YdbSdkSessionsPool::PeriodicTask10 [GOOD] >> TCmsTest::WalleTasksDifferentPriorities [GOOD] >> TCmsTest::WalleRequestDuringRollingRestart [GOOD] >> TMaintenanceApiTest::SimplifiedMirror3DC [GOOD] >> TMaintenanceApiTest::RequestReplaceDevicePDisk >> TPQTest::TestSeveralOwners [GOOD] >> TPQTest::TestSourceIdDropByUserWrites |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTestInternal::TestToHex [GOOD] >> TPQTabletTests::Single_PQTablet_And_Multiple_Partitions >> TCmsTenatsTest::RequestShutdownHost [GOOD] >> TCmsTenatsTest::RequestShutdownHostWithTenantPolicy >> TCmsTenatsTest::TestScheduledPermissionWithDefaultPolicy [GOOD] >> TPartitionTests::IncorrectRange [GOOD] |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::WalleRequestDuringRollingRestart [GOOD] |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::WalleTasksDifferentPriorities [GOOD] >> TPartitionTests::CorrectRange_Commit [GOOD] >> TPartitionTests::GetPartitionWriteInfoSuccess >> TPartitionTests::ConflictingTxIsAborted >> TPQTabletTests::Single_PQTablet_And_Multiple_Partitions [GOOD] >> TCmsTenatsTest::TestTenantLimitForceRestartModeScheduled [GOOD] >> TMeteringSink::FlushThroughputV1 [GOOD] >> TMeteringSink::UsedStorageV1 [GOOD] >> TMicrosecondsSlidingWindow::Basic [GOOD] >> TMultiBucketCounter::InsertAndUpdate [GOOD] >> TMultiBucketCounter::ManyCounters >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_More_Senders_Than_Recipients ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::PeriodicTask10 [GOOD] Test command err: 2025-03-31T16:20:39.040160Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009361602447837:2077];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:39.040264Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000583/r3tmp/tmpziQcui/pdisk_1.dat 2025-03-31T16:20:39.460684Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:39.460796Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:39.469366Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:39.488577Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21005, node 1 2025-03-31T16:20:39.527167Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:20:39.527215Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:20:39.623719Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:39.623749Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:39.623758Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:39.623879Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15658 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:39.903771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:44.040664Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488009361602447837:2077];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:44.040754Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:20:54.479326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:20:54.479363Z node 1 :IMPORT WARN: Table profiles were not loaded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::ReplySizeExceeded [GOOD] Test command err: Trying to start YDB, gRPC: 27931, MsgBus: 31171 2025-03-31T16:20:05.763106Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009217478321584:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:05.763163Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a73/r3tmp/tmpE7eclU/pdisk_1.dat 2025-03-31T16:20:06.013145Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27931, node 1 2025-03-31T16:20:06.072458Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:06.072498Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:06.072506Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:06.072639Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:20:06.097042Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:06.097115Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:06.098890Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31171 TClient is connected to server localhost:31171 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:06.483319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:06.505256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:06.618006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:06.741112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:06.795061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:08.203013Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009230363225249:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:08.203150Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:08.414714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:08.436971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:08.458506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:08.480732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:08.503474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:08.547494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:08.580618Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009230363225757:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:08.580708Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009230363225762:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:08.580710Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:08.583744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:08.591293Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009230363225764:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:20:08.664021Z node 1 :TX_PROXY ERROR: Actor# [1:7488009230363225818:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:09.297654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:10.763010Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488009217478321584:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:10.763098Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:20:12.112275Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTc4MjEzNTYtN2UyZGE4NjAtN2IzYTEzNDItOWZkNmZkNjk=, ActorId: [1:7488009243248128904:2604], ActorState: ExecuteState, TraceId: 01jqpg1q3rdx7m6zwnkqgaemmw, Create QueryResponse for error on request, msg:
: Error: Query result size limit exceeded. (80001691 > 50331648), code: 2013 Trying to start YDB, gRPC: 9064, MsgBus: 14710 2025-03-31T16:20:12.958279Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488009244648323283:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:12.958347Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a73/r3tmp/tmp8wwgpb/pdisk_1.dat 2025-03-31T16:20:13.071374Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:20:13.088888Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:13.088980Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:13.090658Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9064, node 2 2025-03-31T16:20:13.126134Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:13.126166Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:13.126175Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:13.126312Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14710 TClient is connected to server localhost:14710 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:13.509262Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:13.525562Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:13.599640Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 28147 ...
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:20:22.684572Z node 3 :TX_PROXY ERROR: Actor# [3:7488009287568784260:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:23.647834Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:24.194816Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488009274683880027:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:24.194907Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:20:34.285294Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:20:34.285323Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:20:52.750005Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NWYzN2FhZTItYTU4NmJhZjAtYTExY2ZkNjItYWYyYjZlYmU=, ActorId: [3:7488009416417804862:2744], ActorState: ExecuteState, TraceId: 01jqpg2z825vgfmmj0s5mavksj, Create QueryResponse for error on request, msg:
: Error: Task execution timeout 96ms exceeded, terminating after 134ms 2025-03-31T16:20:52.887136Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7488009416417804961:2744] TxId: 281474976715674. Ctx: { TraceId: 01jqpg2zd87r2q6n3c0wq3x5na, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NWYzN2FhZTItYTU4NmJhZjAtYTExY2ZkNjItYWYyYjZlYmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 100ms } {
: Error: Cancelling after 109ms during execution } ] 2025-03-31T16:20:52.887801Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7488009416417804977:2777], TxId: 281474976715674, task: 9. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=NWYzN2FhZTItYTU4NmJhZjAtYTExY2ZkNjItYWYyYjZlYmU=. TraceId : 01jqpg2zd87r2q6n3c0wq3x5na. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7488009416417804961:2744], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-03-31T16:20:52.960231Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7488009416417804975:2775], TxId: 281474976715674, task: 7. Ctx: { TraceId : 01jqpg2zd87r2q6n3c0wq3x5na. SessionId : ydb://session/3?node_id=3&id=NWYzN2FhZTItYTU4NmJhZjAtYTExY2ZkNjItYWYyYjZlYmU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7488009416417804961:2744], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-03-31T16:20:52.962726Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NWYzN2FhZTItYTU4NmJhZjAtYTExY2ZkNjItYWYyYjZlYmU=, ActorId: [3:7488009416417804862:2744], ActorState: ExecuteState, TraceId: 01jqpg2zd87r2q6n3c0wq3x5na, Create QueryResponse for error on request, msg:
: Error: Request canceled after 100ms
: Error: Cancelling after 109ms during execution Trying to start YDB, gRPC: 26433, MsgBus: 14568 2025-03-31T16:20:54.112201Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488009425122016269:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:54.112281Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a73/r3tmp/tmpugjGZQ/pdisk_1.dat 2025-03-31T16:20:54.224911Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:20:54.246138Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:54.246212Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:54.247548Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26433, node 4 2025-03-31T16:20:54.291264Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:54.291296Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:54.291309Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:54.291505Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14568 TClient is connected to server localhost:14568 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:54.826948Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:54.842902Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:54.903336Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:55.063664Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:55.132677Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:57.394793Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488009438006919931:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:57.394892Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:57.426173Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:57.456133Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:57.483110Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:57.510957Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:57.539529Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:57.569769Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:57.604426Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488009438006920437:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:57.604482Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:57.604555Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488009438006920442:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:57.607468Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:57.615634Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488009438006920444:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:20:57.697175Z node 4 :TX_PROXY ERROR: Actor# [4:7488009438006920498:3439] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:58.631733Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:20:59.112674Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7488009425122016269:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:59.112738Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:21:01.531209Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OWJjZjg2MmQtZWM3YjVjZmYtOTY1ZGE5MGEtYTQwYWYxMmI=, ActorId: [4:7488009442301888091:2496], ActorState: ExecuteState, TraceId: 01jqpg37k20b6hfrcdbst5enr3, Create QueryResponse for error on request, msg: >> TPartitionTests::ConflictingTxIsAborted [GOOD] >> TMultiBucketCounter::ManyCounters [GOOD] >> TPQRBDescribes::PartitionLocations >> TFetchRequestTests::HappyWay >> KqpPg::DropTablePgMultiple [GOOD] >> KqpPg::DropTableIfExists >> TPartitionTests::CorrectRange_Multiple_Transactions >> TPartitionTests::UserActCount |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestScheduledPermissionWithDefaultPolicy [GOOD] >> TPartitionTests::ConflictingTxProceedAfterRollback >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_More_Senders_Than_Recipients [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_Fewer_Senders_Than_Recipients |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestTenantLimitForceRestartModeScheduled [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_Fewer_Senders_Than_Recipients [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_The_Number_Of_Senders_And_Recipients_Match >> TPartitionTests::CorrectRange_Multiple_Transactions [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull+useSink >> TPartitionTests::CorrectRange_Multiple_Consumers >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_The_Number_Of_Senders_And_Recipients_Match [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultAndCast-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultBool+useSink >> TPartitionTests::GetPartitionWriteInfoSuccess [GOOD] >> TPartitionTests::GetPartitionWriteInfoError >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Recipients >> TSourceIdTests::SourceIdStorageAdd [GOOD] >> TSourceIdTests::ProtoSourceIdStorageParseAndAdd [GOOD] >> TSourceIdTests::SourceIdStorageComplexDelete [GOOD] >> TSourceIdTests::HeartbeatEmitter [GOOD] >> TSourceIdTests::SourceIdMinSeqNo [GOOD] >> TPartitionChooserSuite::THashChooser_GetTabletIdTest [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_NewSourceId_Test >> TPartitionTests::WriteSubDomainOutOfSpace_DisableExpiration >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Recipients [GOOD] >> TPQTabletTests::TEvReadSet_comes_before_TEvPlanStep >> TPQTest::TestWritePQCompact >> TPartitionTests::CorrectRange_Multiple_Consumers [GOOD] >> TPQTabletTests::UpdateConfig_1 >> TPartitionTests::CorrectRange_Rollback >> TPartitionTests::GetPartitionWriteInfoError [GOOD] |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TSourceIdTests::SourceIdMinSeqNo [GOOD] >> TPQTabletTests::TEvReadSet_comes_before_TEvPlanStep [GOOD] >> TPQTabletTests::Parallel_Transactions_1 >> TPartitionTests::FailedTxsDontBlock >> TPQTest::TestAccountReadQuota [GOOD] >> TPQTest::TestAlreadyWritten >> TPartitionTests::WriteSubDomainOutOfSpace_DisableExpiration [GOOD] >> TPQTabletTests::UpdateConfig_1 [GOOD] >> TCmsTest::ActionIssue [GOOD] >> TPartitionTests::DataTxCalcPredicateOrder >> TPartitionTests::WriteSubDomainOutOfSpace_IgnoreQuotaDeadline >> TCmsTest::Mirror3dcPermissions [GOOD] >> TPartitionTests::CorrectRange_Rollback [GOOD] >> TPQTabletTests::UpdateConfig_2 >> TPQTabletTests::Parallel_Transactions_1 [GOOD] >> TPartitionTests::DataTxCalcPredicateOk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::TEvReadSet_comes_before_TEvPlanStep [GOOD] Test command err: 2025-03-31T16:21:03.729799Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:21:03.733959Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:21:03.734250Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-03-31T16:21:03.734294Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:21:03.734328Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-03-31T16:21:03.734354Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:21:03.734388Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:03.734433Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:03.750057Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:178:2193], now have 1 active actors on pipe 2025-03-31T16:21:03.750191Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-03-31T16:21:03.764022Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-03-31T16:21:03.766649Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-03-31T16:21:03.766773Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:03.767995Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-03-31T16:21:03.768118Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:21:03.768182Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:21:03.768562Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:21:03.768838Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:186:2199] 2025-03-31T16:21:03.769484Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-03-31T16:21:03.769531Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:186:2199] 2025-03-31T16:21:03.769570Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:21:03.769970Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-31T16:21:03.770090Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-03-31T16:21:03.770131Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-03-31T16:21:03.770227Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:03.770262Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:03.770293Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:03.770331Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:03.770374Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-03-31T16:21:03.770398Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-03-31T16:21:03.770423Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:03.770450Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-03-31T16:21:03.770511Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-31T16:21:03.770683Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:21:03.770849Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:188:2201] 2025-03-31T16:21:03.771302Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Initializing completed. 2025-03-31T16:21:03.771333Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 2 [1:188:2201] 2025-03-31T16:21:03.771363Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:21:03.771657Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-03-31T16:21:03.771711Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit request with generation 1 2025-03-31T16:21:03.771732Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit with generation 1 done 2025-03-31T16:21:03.771796Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:03.771827Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:03.771883Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-03-31T16:21:03.771902Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-03-31T16:21:03.771929Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001cuser 2025-03-31T16:21:03.771942Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uuser 2025-03-31T16:21:03.771956Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:03.771981Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] =========================== 2025-03-31T16:21:03.772025Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-31T16:21:03.772116Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-31T16:21:03.772237Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-31T16:21:03.776283Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:21:03.776477Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:21:03.776837Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:201:2210], now have 1 active actors on pipe 2025-03-31T16:21:03.777469Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:204:2212], now have 1 active actors on pipe 2025-03-31T16:21:03.778218Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 177 RawX2: 4294969488 } TxId: 67890 Data { Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Operations { PartitionId: 1 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Immediate: false } 2025-03-31T16:21:03.778270Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2025-03-31T16:21:03.778357Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Propose TxId 67890, WriteId (empty maybe) 2025-03-31T16:21:03.778398Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2025-03-31T16:21:03.778424Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2025-03-31T16:21:03.778464Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-03-31T16:21:03.778496Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2025-03-31T16:21:03.778736Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 231 MaxStep: 30231 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Operations { PartitionId: 1 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 177 RawX2: 4294969488 } Partitions { } 2025-03-31T16:21:03.778796Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-31T16:21:03.781674Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-31T16:21:03.781722Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PREPARING ... itions# 0 2025-03-31T16:21:05.773722Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [6:221:2223], now have 1 active actors on pipe 2025-03-31T16:21:05.774248Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [6:224:2225], now have 1 active actors on pipe 2025-03-31T16:21:05.774465Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 177 RawX2: 25769805968 } TxId: 67890 Data { Operations { PartitionId: 0 Begin: 0 End: 1 Consumer: "user" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2025-03-31T16:21:05.774510Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2025-03-31T16:21:05.774583Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Propose TxId 67890, WriteId (empty maybe) 2025-03-31T16:21:05.774623Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2025-03-31T16:21:05.774665Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2025-03-31T16:21:05.774705Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-03-31T16:21:05.774753Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2025-03-31T16:21:05.774898Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 1 Consumer: "user" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 177 RawX2: 25769805968 } Partitions { } 2025-03-31T16:21:05.774989Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-31T16:21:05.778067Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-31T16:21:05.778134Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-03-31T16:21:05.778173Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARING 2025-03-31T16:21:05.778211Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARED 2025-03-31T16:21:05.778473Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet Step: 100 TxId: 67890 TabletSource: 22222 TabletDest: 72057594037927937 TabletProducer: 22222 ReadSet: "\010\001" Seqno: 0 2025-03-31T16:21:05.778519Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvReadSet 2025-03-31T16:21:05.778557Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] Predicates 1/1 2025-03-31T16:21:05.778699Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvPlanStep Transactions { TxId: 67890 AckTo { RawX1: 177 RawX2: 25769805968 } } Step: 100 2025-03-31T16:21:05.778759Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PREPARED 2025-03-31T16:21:05.778795Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARED 2025-03-31T16:21:05.778836Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PLANNING 2025-03-31T16:21:05.778879Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 100, PlanTxId 67890 2025-03-31T16:21:05.779034Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PLANNED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 Predicate: true } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 1 Consumer: "user" Path: "/topic" } Step: 100 Kind: KIND_DATA SourceActor { RawX1: 177 RawX2: 25769805968 } Partitions { } 2025-03-31T16:21:05.779143Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-31T16:21:05.782073Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-31T16:21:05.782143Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PLANNING 2025-03-31T16:21:05.782179Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PLANNING 2025-03-31T16:21:05.782216Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PLANNED 2025-03-31T16:21:05.782261Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from PLANNING to PLANNED 2025-03-31T16:21:05.782296Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxQueue.size 1 2025-03-31T16:21:05.782334Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] New ExecStep 100, ExecTxId 67890 2025-03-31T16:21:05.782398Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATING 2025-03-31T16:21:05.782437Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from PLANNED to CALCULATING 2025-03-31T16:21:05.782515Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 100, TxId 67890 2025-03-31T16:21:05.782586Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Partition 0 Consumer 'user' Bad request (behind the last offset) EndOffset 0 End 1 2025-03-31T16:21:05.782763Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCalcPredicateResult Step 100, TxId 67890, Partition 0, Predicate 0 2025-03-31T16:21:05.782799Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvTxCalcPredicateResult 2025-03-31T16:21:05.782831Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] Partition responses 1/1 2025-03-31T16:21:05.782865Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATING 2025-03-31T16:21:05.782905Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATING 2025-03-31T16:21:05.782941Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State CALCULATING FrontTxId 67890 2025-03-31T16:21:05.782975Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2025-03-31T16:21:05.783011Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATED 2025-03-31T16:21:05.783049Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from CALCULATING to CALCULATED 2025-03-31T16:21:05.783191Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: CALCULATED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 Predicate: true } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 1 Consumer: "user" Path: "/topic" } Step: 100 Predicate: false Kind: KIND_DATA SourceActor { RawX1: 177 RawX2: 25769805968 } Partitions { } 2025-03-31T16:21:05.783253Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-31T16:21:05.787957Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-31T16:21:05.788016Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATED 2025-03-31T16:21:05.788047Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATED 2025-03-31T16:21:05.788080Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State CALCULATED FrontTxId 67890 2025-03-31T16:21:05.788153Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS 2025-03-31T16:21:05.788196Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from CALCULATED to WAIT_RS 2025-03-31T16:21:05.788249Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 1 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2025-03-31T16:21:05.788292Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvReadSet to tablet 22222 2025-03-31T16:21:05.788416Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 1 2025-03-31T16:21:05.788463Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTING 2025-03-31T16:21:05.788502Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS to EXECUTING 2025-03-31T16:21:05.788536Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 0, Expected 0 2025-03-31T16:21:05.788576Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId: 67890 send TEvPersQueue::TEvProposeTransactionResult(ABORTED) 2025-03-31T16:21:05.788621Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] complete TxId 67890 2025-03-31T16:21:05.788657Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] delete partitions for TxId 67890 2025-03-31T16:21:05.788700Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTED 2025-03-31T16:21:05.788738Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from EXECUTING to EXECUTED 2025-03-31T16:21:05.788923Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: EXECUTED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 Predicate: true } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 1 Consumer: "user" Path: "/topic" } Step: 100 Predicate: false Kind: KIND_DATA SourceActor { RawX1: 177 RawX2: 25769805968 } Partitions { } 2025-03-31T16:21:05.788993Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-31T16:21:05.789212Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:05.789256Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:05.789298Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:05.789332Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:05.789357Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-03-31T16:21:05.789387Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:05.789424Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-03-31T16:21:05.789488Z node 6 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-31T16:21:05.791433Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2025-03-31T16:21:05.791478Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Connected to tablet 22222 2025-03-31T16:21:05.793934Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:21:05.794891Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-31T16:21:05.794936Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state EXECUTED 2025-03-31T16:21:05.794965Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTED 2025-03-31T16:21:05.794988Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State EXECUTED FrontTxId 67890 2025-03-31T16:21:05.795013Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2025-03-31T16:21:05.795058Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22222 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2025-03-31T16:21:05.795114Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS_ACKS 2025-03-31T16:21:05.795155Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from EXECUTED to WAIT_RS_ACKS 2025-03-31T16:21:05.795200Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/1 2025-03-31T16:21:05.795230Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2025-03-31T16:21:05.795261Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/1 >> TPartitionTests::WriteSubDomainOutOfSpace_IgnoreQuotaDeadline [GOOD] >> TPQTabletTests::Parallel_Transactions_2 >> TPQTabletTests::UpdateConfig_2 [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Senders |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::ActionIssue [GOOD] >> TQuotaTracker::TestSmallMessages [GOOD] >> TQuotaTracker::TestBigMessages [GOOD] >> TSourceIdTests::ExpensiveCleanup |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::Mirror3dcPermissions [GOOD] >> TPQTabletTests::Parallel_Transactions_2 [GOOD] >> TPQTabletTests::PQTablet_Send_RS_With_Abort >> KqpPg::CreateIndex [GOOD] >> KqpPg::CreateNotNullPgColumn >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Senders [GOOD] >> TPQTest::DirectReadBadSessionOrPipe >> TPQTabletTests::PQTablet_Send_RS_With_Abort [GOOD] >> TPartitionTests::ConflictingTxProceedAfterRollback [GOOD] >> TPQTabletTests::Partition_Send_Predicate_With_False >> TMaintenanceApiTest::RequestReplaceDevicePDisk [GOOD] >> TPartitionTests::TabletConfig_Is_Newer_That_PartitionConfig >> TCmsTenatsTest::RequestShutdownHostWithTenantPolicy [GOOD] >> TCmsTenatsTest::TestClusterLimitForceRestartMode >> TSourceIdTests::ExpensiveCleanup [GOOD] >> TPartitionTests::ConflictingSrcIdTxAndWritesDifferentBatches >> TPQTabletTests::Partition_Send_Predicate_With_False [GOOD] >> TPQTabletTests::One_Tablet_For_All_Partitions |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TMaintenanceApiTest::RequestReplaceDevicePDisk [GOOD] >> TPartitionTests::TabletConfig_Is_Newer_That_PartitionConfig [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TSourceIdTests::ExpensiveCleanup [GOOD] Test command err: 2025-03-31T16:21:05.606858Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:05.606952Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:05.626701Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:1:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:21:05.627223Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:21:05.627667Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:179:2194] 2025-03-31T16:21:05.628735Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:1:Initializer] Initializing completed. 2025-03-31T16:21:05.628809Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [1:179:2194] 2025-03-31T16:21:05.628872Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:21:05.629431Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-03-31T16:21:05.629730Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:05.629779Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:05.629820Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-03-31T16:21:05.629860Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-03-31T16:21:05.629901Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001cclient-1 2025-03-31T16:21:05.629933Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uclient-1 2025-03-31T16:21:05.629956Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] _config_1 2025-03-31T16:21:05.629990Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:05.630027Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] =========================== 2025-03-31T16:21:05.630125Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user client-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-31T16:21:05.630336Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 1 2025-03-31T16:21:05.630420Z node 1 :PERSQUEUE INFO: new Cookie owner1|9ce00128-de10f203-987fed1-9997f3bf_0 generated for partition 1 topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 Send disk status response with cookie: 0 2025-03-31T16:21:05.630864Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:21:05.630966Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 1 2025-03-31T16:21:05.631289Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Send write quota request. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. Amount: 22. Cookie: 1 2025-03-31T16:21:05.631375Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Got quota. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1: Cookie: 1 2025-03-31T16:21:05.631524Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob processing sourceId 'SourceId' seqNo 0 partNo 0 2025-03-31T16:21:05.632521Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob complete sourceId 'SourceId' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 100 PartNo 0 PackedSize 118 count 1 nextOffset 101 batches 1 2025-03-31T16:21:05.633204Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 1 compactOffset 100,1 HeadOffset 0 endOffset 0 curOffset 101 d0000000001_00000000000000000100_00000_0000000001_00000| size 104 WTime 128 2025-03-31T16:21:05.633362Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:05.633425Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:05.633468Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] [x0000000001, x0000000002) 2025-03-31T16:21:05.633516Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-03-31T16:21:05.633572Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001pSourceId 2025-03-31T16:21:05.633611Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] d0000000001_00000000000000000100_00000_0000000001_00000| 2025-03-31T16:21:05.633633Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-03-31T16:21:05.633665Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:05.633706Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] =========================== 2025-03-31T16:21:05.675267Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:21:05.675410Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2025-03-31T16:21:05.675533Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 1, SeqNo: 0, partNo: 0, Offset: 100 is stored on disk 2025-03-31T16:21:05.675742Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user client-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 100 ReadingTimestamp 0 rrg 0 2025-03-31T16:21:05.968502Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 0 2025-03-31T16:21:05.999613Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Send write quota request. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. Amount: 22. Cookie: 2 2025-03-31T16:21:05.999754Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Got quota. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1: Cookie: 2 2025-03-31T16:21:05.999919Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob processing sourceId 'SourceId' seqNo 1 partNo 0 2025-03-31T16:21:06.000319Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob sourceId 'SourceId' seqNo 1 partNo 0 result is x0000000001_00000000000000000100_00000_0000000001_00000 size 104 2025-03-31T16:21:06.000388Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] writing blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 1 old key x0000000001_00000000000000000100_00000_0000000001_00000 new key d0000000001_00000000000000000100_00000_0000000001_00000 size 104 WTime 1329 2025-03-31T16:21:06.001212Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob complete sourceId 'SourceId' seqNo 1 partNo 0 FormedBlobsCount 1 NewHead: Offset 200 PartNo 0 PackedSize 118 count 1 nextOffset 201 batches 1 2025-03-31T16:21:06.001763Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 1 compactOffset 200,1 HeadOffset 100 endOffset 101 curOffset 201 d0000000001_00000000000000000200_00000_0000000001_00000| size 105 WTime 1329 2025-03-31T16:21:06.001884Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:06.001921Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:06.001967Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] [x0000000001, x0000000002) 2025-03-31T16:21:06.002001Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-03-31T16:21:06.002049Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] d0000000001_00000000000000000100_00000_0000000001_00000 2025-03-31T16:21:06.002075Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001pSourceId 2025-03-31T16:21:06.002107Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] d0000000001_00000000000000000200_00000_0000000001_00000| 2025-03-31T16:21:06.002129Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-03-31T16:21:06.002155Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:06.002193Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] =========================== 2025-03-31T16:21:06.022995Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:21:06.023127Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2025-03-31T16:21:06.023197Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 1, SeqNo: 1, partNo: 0, Offset: 200 is stored on disk 2025-03-31T16:21:06.023464Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:06.023518Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:06.023563Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] [d0000000001_00000000000000000100_00000_0000000001_00000|, d0000000001_00000000000000000100_00000_0000000001_00000|] 2025-03-31T16:21:06.023609Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-03-31T16:21:06.023643Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-03-31T16:21:06.023683Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-03-31T16:21 ... 2 Iteration 143 Iteration 144 Iteration 145 Iteration 146 Iteration 147 Iteration 148 Iteration 149 Iteration 150 Iteration 151 Iteration 152 Iteration 153 Iteration 154 Iteration 155 Iteration 156 Iteration 157 Iteration 158 Iteration 159 Iteration 160 Iteration 161 Iteration 162 Iteration 163 Iteration 164 Iteration 165 Iteration 166 Iteration 167 Iteration 168 Iteration 169 Iteration 170 Iteration 171 Iteration 172 Iteration 173 Iteration 174 Iteration 175 Iteration 176 Iteration 177 Iteration 178 Iteration 179 Iteration 180 Iteration 181 Iteration 182 Iteration 183 Iteration 184 Iteration 185 Iteration 186 Iteration 187 Iteration 188 Iteration 189 Iteration 190 Iteration 191 Iteration 192 Iteration 193 Iteration 194 Iteration 195 Iteration 196 Iteration 197 Iteration 198 Iteration 199 Iteration 200 Iteration 201 Iteration 202 Iteration 203 Iteration 204 Iteration 205 Iteration 206 Iteration 207 Iteration 208 Iteration 209 Iteration 210 Iteration 211 Iteration 212 Iteration 213 Iteration 214 Iteration 215 Iteration 216 Iteration 217 Iteration 218 Iteration 219 Iteration 220 Iteration 221 Iteration 222 Iteration 223 Iteration 224 Iteration 225 Iteration 226 Iteration 227 Iteration 228 Iteration 229 Iteration 230 Iteration 231 Iteration 232 Iteration 233 Iteration 234 Iteration 235 Iteration 236 Iteration 237 Iteration 238 Iteration 239 Iteration 240 Iteration 241 Iteration 242 Iteration 243 Iteration 244 Iteration 245 Iteration 246 Iteration 247 Iteration 248 Iteration 249 Iteration 250 Iteration 251 Iteration 252 Iteration 253 Iteration 254 Iteration 255 Iteration 256 Iteration 257 Iteration 258 Iteration 259 Iteration 260 Iteration 261 Iteration 262 Iteration 263 Iteration 264 Iteration 265 Iteration 266 Iteration 267 Iteration 268 Iteration 269 Iteration 270 Iteration 271 Iteration 272 Iteration 273 Iteration 274 Iteration 275 Iteration 276 Iteration 277 Iteration 278 Iteration 279 Iteration 280 Iteration 281 Iteration 282 Iteration 283 Iteration 284 Iteration 285 Iteration 286 Iteration 287 Iteration 288 Iteration 289 Iteration 290 Iteration 291 Iteration 292 Iteration 293 Iteration 294 Iteration 295 Iteration 296 Iteration 297 Iteration 298 Iteration 299 Iteration 300 Iteration 301 Iteration 302 Iteration 303 Iteration 304 Iteration 305 Iteration 306 Iteration 307 Iteration 308 Iteration 309 Iteration 310 Iteration 311 Iteration 312 Iteration 313 Iteration 314 Iteration 315 Iteration 316 Iteration 317 Iteration 318 Iteration 319 Iteration 320 Iteration 321 Iteration 322 Iteration 323 Iteration 324 Iteration 325 Iteration 326 Iteration 327 Iteration 328 Iteration 329 Iteration 330 Iteration 331 Iteration 332 Iteration 333 Iteration 334 Iteration 335 Iteration 336 Iteration 337 Iteration 338 Iteration 339 Iteration 340 Iteration 341 Iteration 342 Iteration 343 Iteration 344 Iteration 345 Iteration 346 Iteration 347 Iteration 348 Iteration 349 Iteration 350 Iteration 351 Iteration 352 Iteration 353 Iteration 354 Iteration 355 Iteration 356 Iteration 357 Iteration 358 Iteration 359 Iteration 360 Iteration 361 Iteration 362 Iteration 363 Iteration 364 Iteration 365 Iteration 366 Iteration 367 Iteration 368 Iteration 369 Iteration 370 Iteration 371 Iteration 372 Iteration 373 Iteration 374 Iteration 375 Iteration 376 Iteration 377 Iteration 378 Iteration 379 Iteration 380 Iteration 381 Iteration 382 Iteration 383 Iteration 384 Iteration 385 Iteration 386 Iteration 387 Iteration 388 Iteration 389 Iteration 390 Iteration 391 Iteration 392 Iteration 393 Iteration 394 Iteration 395 Iteration 396 Iteration 397 Iteration 398 Iteration 399 Iteration 400 Iteration 401 Iteration 402 Iteration 403 Iteration 404 Iteration 405 Iteration 406 Iteration 407 Iteration 408 Iteration 409 Iteration 410 Iteration 411 Iteration 412 Iteration 413 Iteration 414 Iteration 415 Iteration 416 Iteration 417 Iteration 418 Iteration 419 Iteration 420 Iteration 421 Iteration 422 Iteration 423 Iteration 424 Iteration 425 Iteration 426 Iteration 427 Iteration 428 Iteration 429 Iteration 430 Iteration 431 Iteration 432 Iteration 433 Iteration 434 Iteration 435 Iteration 436 Iteration 437 Iteration 438 Iteration 439 Iteration 440 Iteration 441 Iteration 442 Iteration 443 Iteration 444 Iteration 445 Iteration 446 Iteration 447 Iteration 448 Iteration 449 Iteration 450 Iteration 451 Iteration 452 Iteration 453 Iteration 454 Iteration 455 Iteration 456 Iteration 457 Iteration 458 Iteration 459 Iteration 460 Iteration 461 Iteration 462 Iteration 463 Iteration 464 Iteration 465 Iteration 466 Iteration 467 Iteration 468 Iteration 469 Iteration 470 Iteration 471 Iteration 472 Iteration 473 Iteration 474 Iteration 475 Iteration 476 Iteration 477 Iteration 478 Iteration 479 Iteration 480 Iteration 481 Iteration 482 Iteration 483 Iteration 484 Iteration 485 Iteration 486 Iteration 487 Iteration 488 Iteration 489 Iteration 490 Iteration 491 Iteration 492 Iteration 493 Iteration 494 Iteration 495 Iteration 496 Iteration 497 Iteration 498 Iteration 499 Iteration 500 Iteration 501 Iteration 502 Iteration 503 Iteration 504 Iteration 505 Iteration 506 Iteration 507 Iteration 508 Iteration 509 Iteration 510 Iteration 511 Iteration 512 Iteration 513 Iteration 514 Iteration 515 Iteration 516 Iteration 517 Iteration 518 Iteration 519 Iteration 520 Iteration 521 Iteration 522 Iteration 523 Iteration 524 Iteration 525 Iteration 526 Iteration 527 Iteration 528 Iteration 529 Iteration 530 Iteration 531 Iteration 532 Iteration 533 Iteration 534 Iteration 535 Iteration 536 Iteration 537 Iteration 538 Iteration 539 Iteration 540 Iteration 541 Iteration 542 Iteration 543 Iteration 544 Iteration 545 Iteration 546 Iteration 547 Iteration 548 Iteration 549 Iteration 550 Iteration 551 Iteration 552 Iteration 553 Iteration 554 Iteration 555 Iteration 556 Iteration 557 Iteration 558 Iteration 559 Iteration 560 Iteration 561 Iteration 562 Iteration 563 Iteration 564 Iteration 565 Iteration 566 Iteration 567 Iteration 568 Iteration 569 Iteration 570 Iteration 571 Iteration 572 Iteration 573 Iteration 574 Iteration 575 Iteration 576 Iteration 577 Iteration 578 Iteration 579 Iteration 580 Iteration 581 Iteration 582 Iteration 583 Iteration 584 Iteration 585 Iteration 586 Iteration 587 Iteration 588 Iteration 589 Iteration 590 Iteration 591 Iteration 592 Iteration 593 Iteration 594 Iteration 595 Iteration 596 Iteration 597 Iteration 598 Iteration 599 Iteration 600 Iteration 601 Iteration 602 Iteration 603 Iteration 604 Iteration 605 Iteration 606 Iteration 607 Iteration 608 Iteration 609 Iteration 610 Iteration 611 Iteration 612 Iteration 613 Iteration 614 Iteration 615 Iteration 616 Iteration 617 Iteration 618 Iteration 619 Iteration 620 Iteration 621 Iteration 622 Iteration 623 Iteration 624 Iteration 625 Iteration 626 Iteration 627 Iteration 628 Iteration 629 Iteration 630 Iteration 631 Iteration 632 Iteration 633 Iteration 634 Iteration 635 Iteration 636 Iteration 637 Iteration 638 Iteration 639 Iteration 640 Iteration 641 Iteration 642 Iteration 643 Iteration 644 Iteration 645 Iteration 646 Iteration 647 Iteration 648 Iteration 649 Iteration 650 Iteration 651 Iteration 652 Iteration 653 Iteration 654 Iteration 655 Iteration 656 Iteration 657 Iteration 658 Iteration 659 Iteration 660 Iteration 661 Iteration 662 Iteration 663 Iteration 664 Iteration 665 Iteration 666 Iteration 667 Iteration 668 Iteration 669 Iteration 670 Iteration 671 Iteration 672 Iteration 673 Iteration 674 Iteration 675 Iteration 676 Iteration 677 Iteration 678 Iteration 679 Iteration 680 Iteration 681 Iteration 682 Iteration 683 Iteration 684 Iteration 685 Iteration 686 Iteration 687 Iteration 688 Iteration 689 Iteration 690 Iteration 691 Iteration 692 Iteration 693 Iteration 694 Iteration 695 Iteration 696 Iteration 697 Iteration 698 Iteration 699 Iteration 700 Iteration 701 Iteration 702 Iteration 703 Iteration 704 Iteration 705 Iteration 706 Iteration 707 Iteration 708 Iteration 709 Iteration 710 Iteration 711 Iteration 712 Iteration 713 Iteration 714 Iteration 715 Iteration 716 Iteration 717 Iteration 718 Iteration 719 Iteration 720 Iteration 721 Iteration 722 Iteration 723 Iteration 724 Iteration 725 Iteration 726 Iteration 727 Iteration 728 Iteration 729 Iteration 730 Iteration 731 Iteration 732 Iteration 733 Iteration 734 Iteration 735 Iteration 736 Iteration 737 Iteration 738 Iteration 739 Iteration 740 Iteration 741 Iteration 742 Iteration 743 Iteration 744 Iteration 745 Iteration 746 Iteration 747 Iteration 748 Iteration 749 Iteration 750 Iteration 751 Iteration 752 Iteration 753 Iteration 754 Iteration 755 Iteration 756 Iteration 757 Iteration 758 Iteration 759 Iteration 760 Iteration 761 Iteration 762 Iteration 763 Iteration 764 Iteration 765 Iteration 766 Iteration 767 Iteration 768 Iteration 769 Iteration 770 Iteration 771 Iteration 772 Iteration 773 Iteration 774 Iteration 775 Iteration 776 Iteration 777 Iteration 778 Iteration 779 Iteration 780 Iteration 781 Iteration 782 Iteration 783 Iteration 784 Iteration 785 Iteration 786 Iteration 787 Iteration 788 Iteration 789 Iteration 790 Iteration 791 Iteration 792 Iteration 793 Iteration 794 Iteration 795 Iteration 796 Iteration 797 Iteration 798 Iteration 799 Iteration 800 Iteration 801 Iteration 802 Iteration 803 Iteration 804 Iteration 805 Iteration 806 Iteration 807 Iteration 808 Iteration 809 Iteration 810 Iteration 811 Iteration 812 Iteration 813 Iteration 814 Iteration 815 Iteration 816 Iteration 817 Iteration 818 Iteration 819 Iteration 820 Iteration 821 Iteration 822 Iteration 823 Iteration 824 Iteration 825 Iteration 826 Iteration 827 Iteration 828 Iteration 829 Iteration 830 Iteration 831 Iteration 832 Iteration 833 Iteration 834 Iteration 835 Iteration 836 Iteration 837 Iteration 838 Iteration 839 Iteration 840 Iteration 841 Iteration 842 Iteration 843 Iteration 844 Iteration 845 Iteration 846 Iteration 847 Iteration 848 Iteration 849 Iteration 850 Iteration 851 Iteration 852 Iteration 853 Iteration 854 Iteration 855 Iteration 856 Iteration 857 Iteration 858 Iteration 859 Iteration 860 Iteration 861 Iteration 862 Iteration 863 Iteration 864 Iteration 865 Iteration 866 Iteration 867 Iteration 868 Iteration 869 Iteration 870 Iteration 871 Iteration 872 Iteration 873 Iteration 874 Iteration 875 Iteration 876 Iteration 877 Iteration 878 Iteration 879 Iteration 880 Iteration 881 Iteration 882 Iteration 883 Iteration 884 Iteration 885 Iteration 886 Iteration 887 Iteration 888 Iteration 889 Iteration 890 Iteration 891 Iteration 892 Iteration 893 Iteration 894 Iteration 895 Iteration 896 Iteration 897 Iteration 898 Iteration 899 Iteration 900 Iteration 901 Iteration 902 Iteration 903 Iteration 904 Iteration 905 Iteration 906 Iteration 907 Iteration 908 Iteration 909 Iteration 910 Iteration 911 Iteration 912 Iteration 913 Iteration 914 Iteration 915 Iteration 916 Iteration 917 Iteration 918 Iteration 919 Iteration 920 Iteration 921 Iteration 922 Iteration 923 Iteration 924 Iteration 925 Iteration 926 Iteration 927 Iteration 928 Iteration 929 Iteration 930 Iteration 931 Iteration 932 Iteration 933 Iteration 934 Iteration 935 Iteration 936 Iteration 937 Iteration 938 Iteration 939 Iteration 940 Iteration 941 Iteration 942 Iteration 943 Iteration 944 Iteration 945 Iteration 946 Iteration 947 Iteration 948 Iteration 949 Iteration 950 Iteration 951 Iteration 952 Iteration 953 Iteration 954 Iteration 955 Iteration 956 Iteration 957 Iteration 958 Iteration 959 Iteration 960 Iteration 961 Iteration 962 Iteration 963 Iteration 964 Iteration 965 Iteration 966 Iteration 967 Iteration 968 Iteration 969 Iteration 970 Iteration 971 Iteration 972 Iteration 973 Iteration 974 Iteration 975 Iteration 976 Iteration 977 Iteration 978 Iteration 979 Iteration 980 Iteration 981 Iteration 982 Iteration 983 Iteration 984 Iteration 985 Iteration 986 Iteration 987 Iteration 988 Iteration 989 Iteration 990 Iteration 991 Iteration 992 Iteration 993 Iteration 994 Iteration 995 Iteration 996 Iteration 997 Iteration 998 Iteration 999 >> TPQTabletTests::One_Tablet_For_All_Partitions [GOOD] >> CacheEviction::DeleteKeys [GOOD] >> PQCountersLabeled::Partition >> YdbSdkSessionsPool::PeriodicTask1 [GOOD] >> TPartitionTests::ShadowPartitionCountersFirstClass >> TPQTabletTests::One_New_Partition_In_Another_Tablet >> TPQTest::TestReadRuleVersions >> TPQTest::DirectReadBadSessionOrPipe [GOOD] >> TPQTest::DirectReadOldPipe >> TPQTabletTests::ProposeTx_Missing_Operations >> TPQTabletTests::One_New_Partition_In_Another_Tablet [GOOD] >> TPQTabletTests::ProposeTx_Missing_Operations [GOOD] >> TPQTabletTests::ProposeTx_Unknown_Partition_1 >> TPQTabletTests::ProposeTx_Unknown_Partition_1 [GOOD] >> KqpPg::DropTableIfExists [GOOD] >> KqpPg::DropTableIfExists_GenericQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::PeriodicTask1 [GOOD] Test command err: 2025-03-31T16:20:40.178341Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009368569696384:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:40.178396Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000570/r3tmp/tmpEKuhrR/pdisk_1.dat 2025-03-31T16:20:40.579010Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:40.579152Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:40.580597Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24453, node 1 2025-03-31T16:20:40.626143Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:20:40.626170Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:20:40.646485Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:40.653762Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:40.653795Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:40.653803Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:40.653932Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61404 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:40.963139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:44.252227Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488009384534698304:2136];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:44.263184Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000570/r3tmp/tmpIqSts5/pdisk_1.dat 2025-03-31T16:20:44.389804Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:20:44.411326Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:44.411399Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:44.419777Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19557, node 4 2025-03-31T16:20:44.473551Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:44.473572Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:44.473578Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:44.473690Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11350 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:44.740366Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:49.252261Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7488009384534698304:2136];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:49.252326Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:20:59.367865Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:20:59.367891Z node 4 :IMPORT WARN: Table profiles were not loaded >> TPQTabletTests::ProposeTx_Unknown_WriteId >> TPQTest::TestPQPartialRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::One_New_Partition_In_Another_Tablet [GOOD] Test command err: 2025-03-31T16:21:06.142911Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:21:06.147311Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:21:06.147615Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-03-31T16:21:06.147674Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:21:06.147713Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-03-31T16:21:06.147754Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:21:06.147802Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:06.147894Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:06.178733Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:206:2212], now have 1 active actors on pipe 2025-03-31T16:21:06.178875Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-03-31T16:21:06.202465Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "consumer" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "consumer" Generation: 1 Important: true } 2025-03-31T16:21:06.205415Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "consumer" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "consumer" Generation: 1 Important: true } 2025-03-31T16:21:06.205663Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:06.206649Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "consumer" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "consumer" Generation: 1 Important: true } 2025-03-31T16:21:06.206820Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:21:06.207308Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:21:06.207731Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:214:2218] 2025-03-31T16:21:06.208767Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-03-31T16:21:06.208846Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:214:2218] 2025-03-31T16:21:06.208906Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:21:06.209830Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-31T16:21:06.209954Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-03-31T16:21:06.210003Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-03-31T16:21:06.210047Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user consumer reinit request with generation 1 2025-03-31T16:21:06.210073Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user consumer reinit with generation 1 done 2025-03-31T16:21:06.210280Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:06.210331Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:06.210378Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:06.210428Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:06.210504Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-03-31T16:21:06.210528Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-03-31T16:21:06.210550Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cconsumer 2025-03-31T16:21:06.210570Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uconsumer 2025-03-31T16:21:06.210604Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:06.210644Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-03-31T16:21:06.210761Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-31T16:21:06.210817Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-31T16:21:06.211051Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-31T16:21:06.214148Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:21:06.214596Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:221:2223], now have 1 active actors on pipe 2025-03-31T16:21:06.215326Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:224:2225], now have 1 active actors on pipe 2025-03-31T16:21:06.216183Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 177 RawX2: 4294969488 } TxId: 67890 Data { Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "consumer" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2025-03-31T16:21:06.216247Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2025-03-31T16:21:06.216348Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Propose TxId 67890, WriteId (empty maybe) 2025-03-31T16:21:06.216411Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2025-03-31T16:21:06.216452Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2025-03-31T16:21:06.216499Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-03-31T16:21:06.216545Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2025-03-31T16:21:06.216717Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "consumer" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 177 RawX2: 4294969488 } Partitions { } 2025-03-31T16:21:06.216812Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-31T16:21:06.220679Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-31T16:21:06.220752Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-03-31T16:21:06.220793Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARING 2025-03-31T16:21:06.220837Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARED 2025-03-31T16:21:06.221273Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 177 RawX2: 4294969488 } TxId: 67891 Data { Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "consumer" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2025-03-31T16:21:06.221325Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2025-03-31T16:21:06.221402Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Propose TxId 67891, WriteId (empty maybe) 2025-03-31T16:21:06.221447Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2025-03-31T16:21:06.221506Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State UNKNOWN 2025-03-31T16:21:06.221550Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-03-31T16:21:06.221590Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState PREPARING 2025-03-31T16:21:06.221740Z node 1 :PERSQUEUE DEBUG: [TxId: 67891] save tx TxId: 67891 State: PREPARED MinStep: 136 MaxStep: 30136 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "consumer" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 177 RawX2: 4294969488 } Partitions { } 2025-03-31T16:21:06.221835Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-31T16:21:06.225412Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-31T16:21:06.225488Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-03-31T16:21:06.225548Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State PREP ... 67890, State CALCULATED 2025-03-31T16:21:08.620332Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State CALCULATED FrontTxId 67890 2025-03-31T16:21:08.620385Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS 2025-03-31T16:21:08.620432Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from CALCULATED to WAIT_RS 2025-03-31T16:21:08.620487Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 1 receivers. Wait TEvTxProcessing::TEvReadSet from 0 senders. 2025-03-31T16:21:08.620521Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvReadSet to tablet 22222 2025-03-31T16:21:08.620637Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 1 2025-03-31T16:21:08.620738Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTING 2025-03-31T16:21:08.620773Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS to EXECUTING 2025-03-31T16:21:08.620808Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 0, Expected 2 2025-03-31T16:21:08.620934Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2025-03-31T16:21:08.621252Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:08.621288Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:08.621333Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-03-31T16:21:08.621375Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-03-31T16:21:08.621412Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] I0000000001 2025-03-31T16:21:08.621440Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001cclient-1 2025-03-31T16:21:08.621467Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uclient-1 2025-03-31T16:21:08.621517Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001cclient-3 2025-03-31T16:21:08.621545Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uclient-3 2025-03-31T16:21:08.621571Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] _config_1 2025-03-31T16:21:08.621610Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:08.621663Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] =========================== 2025-03-31T16:21:08.621808Z node 6 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-31T16:21:08.621931Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2025-03-31T16:21:08.622056Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user client-1 reinit with generation 2 done 2025-03-31T16:21:08.622091Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user client-3 reinit with generation 2 done 2025-03-31T16:21:08.622121Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user drop done 2025-03-31T16:21:08.622279Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:08.622305Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:08.622336Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [m0000000000cuser, m0000000000cuser] 2025-03-31T16:21:08.622360Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [m0000000000uuser, m0000000000uuser] 2025-03-31T16:21:08.622387Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:08.622417Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:08.622438Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-03-31T16:21:08.622460Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-1 2025-03-31T16:21:08.622481Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-1 2025-03-31T16:21:08.622503Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-3 2025-03-31T16:21:08.622521Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-3 2025-03-31T16:21:08.622538Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] _config_0 2025-03-31T16:21:08.622557Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:08.622580Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-03-31T16:21:08.622639Z node 6 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-31T16:21:08.624613Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2025-03-31T16:21:08.624663Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Connected to tablet 22222 2025-03-31T16:21:08.626790Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:21:08.626894Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCommitDone Step 100, TxId 67890, Partition 1 2025-03-31T16:21:08.626935Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2025-03-31T16:21:08.626977Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2025-03-31T16:21:08.627019Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2025-03-31T16:21:08.627054Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 2 2025-03-31T16:21:08.627122Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 status has not changed 2025-03-31T16:21:08.629060Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:21:08.629195Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCommitDone Step 100, TxId 67890, Partition 0 2025-03-31T16:21:08.629223Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2025-03-31T16:21:08.629246Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2025-03-31T16:21:08.629272Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2025-03-31T16:21:08.629296Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 2, Expected 2 2025-03-31T16:21:08.629325Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId: 67890 send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2025-03-31T16:21:08.629359Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] complete TxId 67890 2025-03-31T16:21:08.629550Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 ChildPartitionIds: 1 ChildPartitionIds: 2 TabletId: 72057594037927937 } AllPartitions { PartitionId: 1 ParentPartitionIds: 0 TabletId: 72057594037927937 } AllPartitions { PartitionId: 2 ParentPartitionIds: 0 TabletId: 22222 } Consumers { Name: "client-1" Generation: 2 Important: false } Consumers { Name: "client-3" Generation: 2 Important: false } 2025-03-31T16:21:08.629600Z node 6 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:08.629657Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] delete partitions for TxId 67890 2025-03-31T16:21:08.629686Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTED 2025-03-31T16:21:08.629730Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from EXECUTING to EXECUTED 2025-03-31T16:21:08.629960Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: EXECUTED MinStep: 134 MaxStep: 18446744073709551615 PredicateRecipients: 22222 Step: 100 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 ChildPartitionIds: 1 ChildPartitionIds: 2 TabletId: 72057594037927937 } AllPartitions { PartitionId: 1 ParentPartitionIds: 0 TabletId: 72057594037927937 } AllPartitions { PartitionId: 2 ParentPartitionIds: 0 TabletId: 22222 } Consumers { Name: "client-1" Generation: 2 Important: false } Consumers { Name: "client-3" Generation: 2 Important: false } } BootstrapConfig { } SourceActor { RawX1: 177 RawX2: 25769805968 } Partitions { Partition { PartitionId: 0 } Partition { PartitionId: 1 } } 2025-03-31T16:21:08.630136Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-31T16:21:08.632508Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-31T16:21:08.632550Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state EXECUTED 2025-03-31T16:21:08.632583Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTED 2025-03-31T16:21:08.632611Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State EXECUTED FrontTxId 67890 2025-03-31T16:21:08.632638Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2025-03-31T16:21:08.632677Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS_ACKS 2025-03-31T16:21:08.632712Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from EXECUTED to WAIT_RS_ACKS 2025-03-31T16:21:08.632747Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/1 2025-03-31T16:21:08.632771Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2025-03-31T16:21:08.632801Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/1 2025-03-31T16:21:08.636253Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [6:363:2335], now have 1 active actors on pipe 2025-03-31T16:21:08.636448Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet Step: 100 TxId: 67890 TabletSource: 22222 TabletDest: 72057594037927937 TabletProducer: 22222 ReadSet: "\010\001" Seqno: 0 2025-03-31T16:21:08.636493Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] send TEvReadSetAck to 22222 2025-03-31T16:21:08.636551Z node 6 :PERSQUEUE DEBUG: Connected to tablet 72057594037927937 from tablet 22222 >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOkNewApi [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull-useSink >> TPQTabletTests::ProposeTx_Unknown_WriteId [GOOD] >> TPQTest::TestUserInfoCompatibility >> TPQTabletTests::ProposeTx_Unknown_Partition_2 >> TPQTest::DirectReadOldPipe [GOOD] >> TPQTest::PQ_Tablet_Does_Not_Remove_The_Blob_Until_The_Reading_Is_Complete >> PQCountersLabeled::Partition [GOOD] >> PQCountersLabeled::PartitionFirstClass >> TCmsTenatsTest::TestClusterLimitForceRestartMode [GOOD] >> TPQTabletTests::ProposeTx_Unknown_Partition_2 [GOOD] >> TPQTabletTests::ProposeTx_Command_After_Propose ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:78:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:82:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:83:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:83:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:84:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:88:2057] recipient: [8:86:2116] Leader for TabletID 72057594037927937 is [8:89:2117] sender: [8:90:2057] recipient: [8:86:2116] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:89:2117] Leader for TabletID 72057594037927937 is [8:89:2117] sender: [8:143:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:84:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:88:2057] recipient: [9:86:2116] Leader for TabletID 72057594037927937 is [9:89:2117] sender: [9:90:2057] recipient: [9:86:2116] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:89:2117] Leader for TabletID 72057594037927937 is [9:89:2117] sender: [9:143:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:52:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:52:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:85:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:89:2057] recipient: [10:88:2116] Leader for TabletID 72057594037927937 is [10:90:2117] sender: [10:91:2057] recipient: [10:88:2116] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:90:2117] Leader for TabletID 72057594037927937 is [10:90:2117] sender: [10:144:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:88:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:91:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:92:2057] recipient: [11:90:2119] Leader for TabletID 72057594037927937 is [11:93:2120] sender: [11:94:2057] recipient: [11:90:2119] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:93:2120] Leader for TabletID 72057594037927937 is [11:93:2120] sender: [11:147:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... 6:2097]) rebooted! !Reboot 72057594037927937 (actor [44:56:2097]) tablet resolver refreshed! new actor is[44:101:2126] Leader for TabletID 72057594037927937 is [44:101:2126] sender: [44:155:2057] recipient: [44:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:54:2057] recipient: [45:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:54:2057] recipient: [45:51:2095] Leader for TabletID 72057594037927937 is [45:56:2097] sender: [45:57:2057] recipient: [45:51:2095] Leader for TabletID 72057594037927937 is [45:56:2097] sender: [45:74:2057] recipient: [45:14:2061] !Reboot 72057594037927937 (actor [45:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [45:56:2097] sender: [45:97:2057] recipient: [45:36:2083] Leader for TabletID 72057594037927937 is [45:56:2097] sender: [45:100:2057] recipient: [45:14:2061] Leader for TabletID 72057594037927937 is [45:56:2097] sender: [45:101:2057] recipient: [45:99:2125] Leader for TabletID 72057594037927937 is [45:102:2126] sender: [45:103:2057] recipient: [45:99:2125] !Reboot 72057594037927937 (actor [45:56:2097]) rebooted! !Reboot 72057594037927937 (actor [45:56:2097]) tablet resolver refreshed! new actor is[45:102:2126] Leader for TabletID 72057594037927937 is [45:102:2126] sender: [45:120:2057] recipient: [45:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:54:2057] recipient: [46:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:54:2057] recipient: [46:52:2095] Leader for TabletID 72057594037927937 is [46:56:2097] sender: [46:57:2057] recipient: [46:52:2095] Leader for TabletID 72057594037927937 is [46:56:2097] sender: [46:74:2057] recipient: [46:14:2061] !Reboot 72057594037927937 (actor [46:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [46:56:2097] sender: [46:99:2057] recipient: [46:36:2083] Leader for TabletID 72057594037927937 is [46:56:2097] sender: [46:102:2057] recipient: [46:14:2061] Leader for TabletID 72057594037927937 is [46:56:2097] sender: [46:103:2057] recipient: [46:101:2127] Leader for TabletID 72057594037927937 is [46:104:2128] sender: [46:105:2057] recipient: [46:101:2127] !Reboot 72057594037927937 (actor [46:56:2097]) rebooted! !Reboot 72057594037927937 (actor [46:56:2097]) tablet resolver refreshed! new actor is[46:104:2128] Leader for TabletID 72057594037927937 is [46:104:2128] sender: [46:158:2057] recipient: [46:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:54:2057] recipient: [47:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:54:2057] recipient: [47:50:2095] Leader for TabletID 72057594037927937 is [47:56:2097] sender: [47:57:2057] recipient: [47:50:2095] Leader for TabletID 72057594037927937 is [47:56:2097] sender: [47:74:2057] recipient: [47:14:2061] !Reboot 72057594037927937 (actor [47:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [47:56:2097] sender: [47:99:2057] recipient: [47:36:2083] Leader for TabletID 72057594037927937 is [47:56:2097] sender: [47:102:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [47:56:2097] sender: [47:103:2057] recipient: [47:101:2127] Leader for TabletID 72057594037927937 is [47:104:2128] sender: [47:105:2057] recipient: [47:101:2127] !Reboot 72057594037927937 (actor [47:56:2097]) rebooted! !Reboot 72057594037927937 (actor [47:56:2097]) tablet resolver refreshed! new actor is[47:104:2128] Leader for TabletID 72057594037927937 is [47:104:2128] sender: [47:158:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:54:2057] recipient: [48:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:54:2057] recipient: [48:51:2095] Leader for TabletID 72057594037927937 is [48:56:2097] sender: [48:57:2057] recipient: [48:51:2095] Leader for TabletID 72057594037927937 is [48:56:2097] sender: [48:74:2057] recipient: [48:14:2061] !Reboot 72057594037927937 (actor [48:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [48:56:2097] sender: [48:100:2057] recipient: [48:36:2083] Leader for TabletID 72057594037927937 is [48:56:2097] sender: [48:102:2057] recipient: [48:14:2061] Leader for TabletID 72057594037927937 is [48:56:2097] sender: [48:104:2057] recipient: [48:103:2127] Leader for TabletID 72057594037927937 is [48:105:2128] sender: [48:106:2057] recipient: [48:103:2127] !Reboot 72057594037927937 (actor [48:56:2097]) rebooted! !Reboot 72057594037927937 (actor [48:56:2097]) tablet resolver refreshed! new actor is[48:105:2128] Leader for TabletID 72057594037927937 is [48:105:2128] sender: [48:123:2057] recipient: [48:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:54:2057] recipient: [49:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:54:2057] recipient: [49:51:2095] Leader for TabletID 72057594037927937 is [49:56:2097] sender: [49:57:2057] recipient: [49:51:2095] Leader for TabletID 72057594037927937 is [49:56:2097] sender: [49:74:2057] recipient: [49:14:2061] !Reboot 72057594037927937 (actor [49:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [49:56:2097] sender: [49:102:2057] recipient: [49:36:2083] Leader for TabletID 72057594037927937 is [49:56:2097] sender: [49:105:2057] recipient: [49:14:2061] Leader for TabletID 72057594037927937 is [49:56:2097] sender: [49:106:2057] recipient: [49:104:2129] Leader for TabletID 72057594037927937 is [49:107:2130] sender: [49:108:2057] recipient: [49:104:2129] !Reboot 72057594037927937 (actor [49:56:2097]) rebooted! !Reboot 72057594037927937 (actor [49:56:2097]) tablet resolver refreshed! new actor is[49:107:2130] Leader for TabletID 72057594037927937 is [49:107:2130] sender: [49:161:2057] recipient: [49:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [50:54:2057] recipient: [50:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [50:54:2057] recipient: [50:50:2095] Leader for TabletID 72057594037927937 is [50:56:2097] sender: [50:57:2057] recipient: [50:50:2095] Leader for TabletID 72057594037927937 is [50:56:2097] sender: [50:74:2057] recipient: [50:14:2061] !Reboot 72057594037927937 (actor [50:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [50:56:2097] sender: [50:102:2057] recipient: [50:36:2083] Leader for TabletID 72057594037927937 is [50:56:2097] sender: [50:105:2057] recipient: [50:14:2061] Leader for TabletID 72057594037927937 is [50:56:2097] sender: [50:106:2057] recipient: [50:104:2129] Leader for TabletID 72057594037927937 is [50:107:2130] sender: [50:108:2057] recipient: [50:104:2129] !Reboot 72057594037927937 (actor [50:56:2097]) rebooted! !Reboot 72057594037927937 (actor [50:56:2097]) tablet resolver refreshed! new actor is[50:107:2130] Leader for TabletID 72057594037927937 is [50:107:2130] sender: [50:161:2057] recipient: [50:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [51:54:2057] recipient: [51:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [51:54:2057] recipient: [51:51:2095] Leader for TabletID 72057594037927937 is [51:56:2097] sender: [51:57:2057] recipient: [51:51:2095] Leader for TabletID 72057594037927937 is [51:56:2097] sender: [51:74:2057] recipient: [51:14:2061] !Reboot 72057594037927937 (actor [51:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [51:56:2097] sender: [51:103:2057] recipient: [51:36:2083] Leader for TabletID 72057594037927937 is [51:56:2097] sender: [51:106:2057] recipient: [51:14:2061] Leader for TabletID 72057594037927937 is [51:56:2097] sender: [51:107:2057] recipient: [51:105:2129] Leader for TabletID 72057594037927937 is [51:108:2130] sender: [51:109:2057] recipient: [51:105:2129] !Reboot 72057594037927937 (actor [51:56:2097]) rebooted! !Reboot 72057594037927937 (actor [51:56:2097]) tablet resolver refreshed! new actor is[51:108:2130] Leader for TabletID 72057594037927937 is [51:108:2130] sender: [51:126:2057] recipient: [51:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [52:54:2057] recipient: [52:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [52:54:2057] recipient: [52:51:2095] Leader for TabletID 72057594037927937 is [52:56:2097] sender: [52:57:2057] recipient: [52:51:2095] Leader for TabletID 72057594037927937 is [52:56:2097] sender: [52:74:2057] recipient: [52:14:2061] !Reboot 72057594037927937 (actor [52:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [52:56:2097] sender: [52:105:2057] recipient: [52:36:2083] Leader for TabletID 72057594037927937 is [52:56:2097] sender: [52:108:2057] recipient: [52:14:2061] Leader for TabletID 72057594037927937 is [52:56:2097] sender: [52:109:2057] recipient: [52:107:2131] Leader for TabletID 72057594037927937 is [52:110:2132] sender: [52:111:2057] recipient: [52:107:2131] !Reboot 72057594037927937 (actor [52:56:2097]) rebooted! !Reboot 72057594037927937 (actor [52:56:2097]) tablet resolver refreshed! new actor is[52:110:2132] Leader for TabletID 72057594037927937 is [52:110:2132] sender: [52:164:2057] recipient: [52:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [53:54:2057] recipient: [53:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [53:54:2057] recipient: [53:51:2095] Leader for TabletID 72057594037927937 is [53:56:2097] sender: [53:57:2057] recipient: [53:51:2095] Leader for TabletID 72057594037927937 is [53:56:2097] sender: [53:74:2057] recipient: [53:14:2061] !Reboot 72057594037927937 (actor [53:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [53:56:2097] sender: [53:105:2057] recipient: [53:36:2083] Leader for TabletID 72057594037927937 is [53:56:2097] sender: [53:108:2057] recipient: [53:14:2061] Leader for TabletID 72057594037927937 is [53:56:2097] sender: [53:109:2057] recipient: [53:107:2131] Leader for TabletID 72057594037927937 is [53:110:2132] sender: [53:111:2057] recipient: [53:107:2131] !Reboot 72057594037927937 (actor [53:56:2097]) rebooted! !Reboot 72057594037927937 (actor [53:56:2097]) tablet resolver refreshed! new actor is[53:110:2132] Leader for TabletID 72057594037927937 is [53:110:2132] sender: [53:164:2057] recipient: [53:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:54:2057] recipient: [54:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:54:2057] recipient: [54:51:2095] Leader for TabletID 72057594037927937 is [54:56:2097] sender: [54:57:2057] recipient: [54:51:2095] Leader for TabletID 72057594037927937 is [54:56:2097] sender: [54:74:2057] recipient: [54:14:2061] !Reboot 72057594037927937 (actor [54:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [54:56:2097] sender: [54:106:2057] recipient: [54:36:2083] Leader for TabletID 72057594037927937 is [54:56:2097] sender: [54:109:2057] recipient: [54:14:2061] Leader for TabletID 72057594037927937 is [54:56:2097] sender: [54:110:2057] recipient: [54:108:2131] Leader for TabletID 72057594037927937 is [54:111:2132] sender: [54:112:2057] recipient: [54:108:2131] !Reboot 72057594037927937 (actor [54:56:2097]) rebooted! !Reboot 72057594037927937 (actor [54:56:2097]) tablet resolver refreshed! new actor is[54:111:2132] Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:54:2057] recipient: [55:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:54:2057] recipient: [55:52:2095] Leader for TabletID 72057594037927937 is [55:56:2097] sender: [55:57:2057] recipient: [55:52:2095] Leader for TabletID 72057594037927937 is [55:56:2097] sender: [55:74:2057] recipient: [55:14:2061] >> TPQTest::TestReadRuleVersions [GOOD] >> TPQTest::TestReserveBytes >> TPartitionTests::DataTxCalcPredicateOrder [GOOD] >> TPartitionTests::FailedTxsDontBlock [GOOD] >> KqpPg::CreateUniqComplexPgColumn+useSink [GOOD] >> KqpPg::CreateUniqComplexPgColumn-useSink >> TPartitionTests::GetUsedStorage >> TPQTabletTests::ProposeTx_Command_After_Propose [GOOD] |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestClusterLimitForceRestartMode [GOOD] >> TPartitionTests::DifferentWriteTxBatchingOptions >> KqpPg::InsertValuesFromTableWithDefaultBool+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultBool-useSink >> TPQTabletTests::Read_TEvTxCommit_After_Restart >> TPartitionTests::ConflictingActsInSeveralBatches >> TPartitionTests::ConflictingSrcIdTxAndWritesDifferentBatches [GOOD] >> TPartitionTests::GetUsedStorage [GOOD] >> TPQTabletTests::Read_TEvTxCommit_After_Restart [GOOD] |90.2%| [TA] $(B)/ydb/core/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.2%| [TA] {RESULT} $(B)/ydb/core/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TPartitionTests::ConflictingSrcIdForTxWithHead >> TPQTest::TestPartitionTotalQuota >> TPartitionTests::ShadowPartitionCountersFirstClass [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::Read_TEvTxCommit_After_Restart [GOOD] Test command err: 2025-03-31T16:21:08.754810Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:21:08.758003Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:21:08.758185Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-03-31T16:21:08.758241Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:21:08.758269Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-03-31T16:21:08.758312Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:21:08.758354Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:08.758403Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:08.770932Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:178:2193], now have 1 active actors on pipe 2025-03-31T16:21:08.771062Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-03-31T16:21:08.786386Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-03-31T16:21:08.788577Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-03-31T16:21:08.788706Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:08.789460Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-03-31T16:21:08.789571Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:21:08.789866Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:21:08.790143Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:186:2199] 2025-03-31T16:21:08.790798Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-03-31T16:21:08.790850Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:186:2199] 2025-03-31T16:21:08.790903Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:21:08.791358Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-31T16:21:08.791455Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-03-31T16:21:08.791489Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-03-31T16:21:08.791592Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:08.791623Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:08.791649Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:08.791675Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:08.791702Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-03-31T16:21:08.791723Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-03-31T16:21:08.791759Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:08.791790Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-03-31T16:21:08.791862Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-31T16:21:08.791991Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-31T16:21:08.794128Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:21:08.794447Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:193:2204], now have 1 active actors on pipe 2025-03-31T16:21:08.794922Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:196:2206], now have 1 active actors on pipe 2025-03-31T16:21:08.795485Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 177 RawX2: 4294969488 } TxId: 2 Data { Immediate: false } 2025-03-31T16:21:08.795522Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 2 empty list of operations 2025-03-31T16:21:08.795566Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId: 2 send TEvPersQueue::TEvProposeTransactionResult(ABORTED) 2025-03-31T16:21:09.089436Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:21:09.093262Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:21:09.093513Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-03-31T16:21:09.093547Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:21:09.093571Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-03-31T16:21:09.093600Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:21:09.093631Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:09.093669Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:09.108291Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [2:178:2193], now have 1 active actors on pipe 2025-03-31T16:21:09.108451Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-03-31T16:21:09.108678Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 2(current 0) received from actor [2:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 2 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-03-31T16:21:09.110627Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 2 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-03-31T16:21:09.110713Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:09.111346Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 2 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-03-31T16:21:09.111458Z node 2 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:21:09.111759Z node 2 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:21:09.112016Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:186:2199] 2025-03-31T16:21:09.112961Z node 2 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-03-31T16:21:09.113015Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [2:186:2199] 2025-03-31T16:21:09.113064Z node 2 :PERSQUEUE DEBUG: [PQ: 7205759403792793 ... DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:21:10.842344Z node 6 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [6:304:2290] 2025-03-31T16:21:10.843143Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDiskStatusStep 2025-03-31T16:21:10.844255Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitMetaStep 2025-03-31T16:21:10.844537Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInfoRangeStep 2025-03-31T16:21:10.845273Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDataRangeStep 2025-03-31T16:21:10.845542Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDataStep 2025-03-31T16:21:10.845582Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-03-31T16:21:10.845620Z node 6 :PERSQUEUE INFO: [topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:21:10.845656Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-03-31T16:21:10.845701Z node 6 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 3 [6:304:2290] 2025-03-31T16:21:10.845751Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:21:10.845806Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-31T16:21:10.845876Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 6 2025-03-31T16:21:10.846115Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PLANNED 2025-03-31T16:21:10.846158Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PLANNED 2025-03-31T16:21:10.846194Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State PLANNED FrontTxId 67890 2025-03-31T16:21:10.846237Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxQueue.size 1 2025-03-31T16:21:10.846269Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] New ExecStep 100, ExecTxId 67890 2025-03-31T16:21:10.846326Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATING 2025-03-31T16:21:10.846365Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from PLANNED to CALCULATING 2025-03-31T16:21:10.846431Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 100, TxId 67890 2025-03-31T16:21:10.846802Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCalcPredicateResult Step 100, TxId 67890, Partition 0, Predicate 1 2025-03-31T16:21:10.846841Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvTxCalcPredicateResult 2025-03-31T16:21:10.846871Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] Partition responses 1/1 2025-03-31T16:21:10.846902Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATING 2025-03-31T16:21:10.846934Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATING 2025-03-31T16:21:10.846973Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State CALCULATING FrontTxId 67890 2025-03-31T16:21:10.847006Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2025-03-31T16:21:10.847042Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATED 2025-03-31T16:21:10.847098Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from CALCULATING to CALCULATED 2025-03-31T16:21:10.847263Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: CALCULATED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Step: 100 Predicate: true Kind: KIND_DATA SourceActor { RawX1: 177 RawX2: 25769805968 } Partitions { } 2025-03-31T16:21:10.847350Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-31T16:21:10.852069Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-31T16:21:10.852136Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATED 2025-03-31T16:21:10.852181Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATED 2025-03-31T16:21:10.852218Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State CALCULATED FrontTxId 67890 2025-03-31T16:21:10.852253Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS 2025-03-31T16:21:10.852299Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from CALCULATED to WAIT_RS 2025-03-31T16:21:10.852344Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 1 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2025-03-31T16:21:10.852379Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvReadSet to tablet 22222 2025-03-31T16:21:10.852500Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 0 2025-03-31T16:21:10.853896Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2025-03-31T16:21:10.853944Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Connected to tablet 22222 2025-03-31T16:21:10.855706Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [6:329:2308], now have 1 active actors on pipe 2025-03-31T16:21:10.855856Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet Step: 100 TxId: 67890 TabletSource: 22222 TabletDest: 72057594037927937 TabletProducer: 22222 ReadSet: "\010\001" Seqno: 0 2025-03-31T16:21:10.855894Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvReadSet 2025-03-31T16:21:10.855943Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] Predicates 1/1 2025-03-31T16:21:10.855972Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state WAIT_RS 2025-03-31T16:21:10.855999Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State WAIT_RS 2025-03-31T16:21:10.856037Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State WAIT_RS FrontTxId 67890 2025-03-31T16:21:10.856074Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 1 2025-03-31T16:21:10.856126Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTING 2025-03-31T16:21:10.856181Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS to EXECUTING 2025-03-31T16:21:10.856232Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 0, Expected 1 2025-03-31T16:21:10.856290Z node 6 :PERSQUEUE DEBUG: Connected to tablet 72057594037927937 from tablet 22222 2025-03-31T16:21:10.856344Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2025-03-31T16:21:10.856395Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 67890 2025-03-31T16:21:10.856551Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:10.856588Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:10.856629Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:10.856662Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:10.856691Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-03-31T16:21:10.856714Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-03-31T16:21:10.856735Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-03-31T16:21:10.856774Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:10.856826Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-03-31T16:21:10.856900Z node 6 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-31T16:21:10.858848Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:21:10.858929Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCommitDone Step 100, TxId 67890, Partition 0 2025-03-31T16:21:10.858962Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2025-03-31T16:21:10.858992Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2025-03-31T16:21:10.859019Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2025-03-31T16:21:10.859051Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2025-03-31T16:21:10.859112Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId: 67890 send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2025-03-31T16:21:10.859147Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] complete TxId 67890 2025-03-31T16:21:10.859180Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] delete partitions for TxId 67890 2025-03-31T16:21:10.859221Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTED 2025-03-31T16:21:10.859260Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from EXECUTING to EXECUTED 2025-03-31T16:21:10.859386Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: EXECUTED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 Predicate: true } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Step: 100 Predicate: true Kind: KIND_DATA SourceActor { RawX1: 177 RawX2: 25769805968 } Partitions { } 2025-03-31T16:21:10.859442Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-31T16:21:10.862122Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-31T16:21:10.862166Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state EXECUTED 2025-03-31T16:21:10.862196Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTED 2025-03-31T16:21:10.862232Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State EXECUTED FrontTxId 67890 2025-03-31T16:21:10.862280Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2025-03-31T16:21:10.862324Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22222 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2025-03-31T16:21:10.862361Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS_ACKS 2025-03-31T16:21:10.862403Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from EXECUTED to WAIT_RS_ACKS 2025-03-31T16:21:10.862433Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/1 2025-03-31T16:21:10.862451Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2025-03-31T16:21:10.862472Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/1 >> TPQTest::TestUserInfoCompatibility [GOOD] >> TPQTest::TestWaitInOwners >> TPartitionTests::DataTxCalcPredicateOk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::GetUsedStorage [GOOD] Test command err: 2025-03-31T16:21:03.155261Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:03.155352Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:03.173017Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [1:179:2194] 2025-03-31T16:21:03.174608Z node 1 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-31T16:21:03.000000Z 2025-03-31T16:21:03.174668Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:179:2194] Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\230\313\206\350\3362" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\230\313\206\350\3362" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\230\313\206\350\3362" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\263\222\004" StorageChannel: INLINE } 2025-03-31T16:21:03.908939Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:03.909011Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:03.924218Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:21:03.924435Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:21:03.924663Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateInit] bootstrapping {2, {0, 10}, 100001} [2:178:2193] 2025-03-31T16:21:03.925653Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:Initializer] Initializing completed. 2025-03-31T16:21:03.925720Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} generation 0 [2:178:2193] 2025-03-31T16:21:03.925775Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition {2, {0, 10}, 100001} so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:21:03.925825Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Process pending events. Count 0 2025-03-31T16:21:03.925996Z node 2 :PERSQUEUE INFO: new Cookie owner1|bc794b73-6c727860-b213f503-9ff9e086_0 generated for partition {2, {0, 10}, 100001} topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 2025-03-31T16:21:03.926100Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::ReplyOwnerOk. Partition: {2, {0, 10}, 100001} 2025-03-31T16:21:03.926454Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob processing sourceId 'SourceId' seqNo 2 partNo 0 2025-03-31T16:21:03.927388Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob complete sourceId 'SourceId' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 100 PartNo 0 PackedSize 118 count 1 nextOffset 101 batches 1 2025-03-31T16:21:03.927982Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} compactOffset 100,1 HeadOffset 0 endOffset 0 curOffset 101 D0000100001_00000000000000000100_00000_0000000001_00000| size 104 WTime 128 2025-03-31T16:21:03.928145Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:03.928198Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:03.928251Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] [X0000100001, X0000100002) 2025-03-31T16:21:03.928289Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] --- write ----------------- 2025-03-31T16:21:03.928322Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] M0000100001pSourceId 2025-03-31T16:21:03.928351Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] D0000100001_00000000000000000100_00000_0000000001_00000| 2025-03-31T16:21:03.928394Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] J0000100001 2025-03-31T16:21:03.928423Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:03.928461Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] =========================== 2025-03-31T16:21:03.969924Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:21:03.970052Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::ReplyWrite. Partition: {2, {0, 10}, 100001} 2025-03-31T16:21:03.970135Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: {2, {0, 10}, 100001}, SeqNo: 2, partNo: 0, Offset: 100 is stored on disk 2025-03-31T16:21:04.241512Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob processing sourceId 'SourceId' seqNo 4 partNo 0 2025-03-31T16:21:04.242464Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob complete sourceId 'SourceId' seqNo 4 partNo 0 FormedBlobsCount 0 NewHead: Offset 101 PartNo 0 PackedSize 118 count 1 nextOffset 102 batches 1 2025-03-31T16:21:04.242918Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} compactOffset 101,1 HeadOffset 100 endOffset 101 curOffset 102 D0000100001_00000000000000000101_00000_0000000001_00000| size 104 WTime 1129 2025-03-31T16:21:04.243040Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:04.243076Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:04.243127Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] [X0000100001, X0000100002) 2025-03-31T16:21:04.243154Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] --- write ----------------- 2025-03-31T16:21:04.243181Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] M0000100001pSourceId 2025-03-31T16:21:04.243205Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] D0000100001_00000000000000000101_00000_0000000001_00000| 2025-03-31T16:21:04.243229Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] J0000100001 2025-03-31T16:21:04.243253Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:04.243308Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] =========================== 2025-03-31T16:21:04.274328Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:21:04.274458Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::ReplyWrite. Partition: {2, {0, 10}, 100001} 2025-03-31T16:21:04.274528Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: {2, {0, 10}, 100001}, SeqNo: 4, partNo: 0, Offset: 101 is stored on disk 2025-03-31T16:21:04.482884Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob processing sourceId 'SourceId' seqNo 6 partNo 0 2025-03-31T16:21:04.485234Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob complete sourceId 'SourceId' seqNo 6 partNo 0 FormedBlobsCount 0 NewHead: Offset 102 PartNo 0 PackedSize 118 count 1 nextOffset 103 batches 1 2025-03-31T16:21:04.485614Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} compactOffset 102,1 HeadOffset 100 endOffset 102 curOffset 103 D0000100001_00000000000000000102_00000_0000000001_00000| size 104 WTime 2130 2025-03-31T16:21:04.485705Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:04.485727Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:04.485748Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] [X0000100001, X0000100002) 2025-03-31T16:21:04.485769Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] --- write ----------------- 2025-03-31T16:21:04.485790Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] M0000100001pSourceId 2025-03-31T16:21:04.485808Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] D0000100001_00000000000000000102_00000_0000000001_00000| 2025-03-31T16:21:04.485822Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] J0000100001 2025-03-31T16:21:04.485839Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] --- rename ----- ... rc2' seqNo 10 partNo 0 2025-03-31T16:21:09.004767Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src2' seqNo 10 partNo 0 FormedBlobsCount 0 NewHead: Offset 70 PartNo 0 PackedSize 552 count 10 nextOffset 80 batches 1 2025-03-31T16:21:09.005321Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 compactOffset 70,10 HeadOffset 20 endOffset 25 curOffset 80 d0000000000_00000000000000000070_00000_0000000010_00000| size 299 WTime 11239 2025-03-31T16:21:09.005476Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:09.005515Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:09.005560Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-03-31T16:21:09.005593Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:09.005630Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000020_00000_0000000005_00000 2025-03-31T16:21:09.005663Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc2 2025-03-31T16:21:09.005685Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000070_00000_0000000010_00000| 2025-03-31T16:21:09.005705Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:09.005739Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:09.005775Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got batch complete: 10 Got KV request Got KV request Send disk status response with cookie: 0 2025-03-31T16:21:09.026538Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 170 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:21:09.026617Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:21:09.026693Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 70 is stored on disk 2025-03-31T16:21:09.026741Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:21:09.026769Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 71 is stored on disk 2025-03-31T16:21:09.026790Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:21:09.026814Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 72 is stored on disk 2025-03-31T16:21:09.026853Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:21:09.026886Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 4, partNo: 0, Offset: 73 is stored on disk 2025-03-31T16:21:09.026906Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:21:09.026931Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 5, partNo: 0, Offset: 74 is stored on disk 2025-03-31T16:21:09.026950Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:21:09.026973Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 6, partNo: 0, Offset: 75 is stored on disk 2025-03-31T16:21:09.026991Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:21:09.027014Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 7, partNo: 0, Offset: 76 is stored on disk 2025-03-31T16:21:09.027035Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:21:09.027063Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 8, partNo: 0, Offset: 77 is stored on disk 2025-03-31T16:21:09.027096Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:21:09.027126Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 9, partNo: 0, Offset: 78 is stored on disk 2025-03-31T16:21:09.027153Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:21:09.027189Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 10, partNo: 0, Offset: 79 is stored on disk 2025-03-31T16:21:09.027422Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:09.027459Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:09.027495Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [d0000000000_00000000000000000020_00000_0000000005_00000|, d0000000000_00000000000000000020_00000_0000000005_00000|] 2025-03-31T16:21:09.027525Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:09.027561Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:09.027594Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:09.027629Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Create distr tx with id = 8 and act no: 9 Create immediate tx with id = 10 and act no: 11 Create distr tx with id = 12 and act no: 13 2025-03-31T16:21:09.027864Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 8 2025-03-31T16:21:09.027967Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 12 2025-03-31T16:21:09.967279Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-03-31T16:21:09.967382Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Wait batch completion 2025-03-31T16:21:09.967558Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-03-31T16:21:09.967610Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-03-31T16:21:09.967674Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(ABORTED), reason=MinSeqNo violation failure on src2 Got batch complete: 3 2025-03-31T16:21:10.194417Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 12 2025-03-31T16:21:10.194503Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 12 2025-03-31T16:21:10.194564Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-03-31T16:21:10.194619Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 70 PartNo 0 PackedSize 299 count 10 nextOffset 80 batches 1, NewHead=Offset 80 PartNo 0 PackedSize 0 count 0 nextOffset 80 batches 0 2025-03-31T16:21:10.194819Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:10.194856Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:10.194883Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:10.194914Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrcId2 2025-03-31T16:21:10.194933Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:10.194948Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:10.194961Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-03-31T16:21:10.194984Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:10.195014Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Send disk status response with cookie: 0 Wait immediate tx complete 10 2025-03-31T16:21:10.226090Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 1 Got propose resutl: Origin: 72057594037927937 Status: ABORTED TxId: 10 Errors { Kind: BAD_REQUEST Reason: "MinSeqNo violation failure on src2" } Wait tx committed for tx 12 2025-03-31T16:21:10.559603Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:10.559675Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:10.577994Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateInit] bootstrapping {2, {0, 10}, 100001} [5:179:2194] 2025-03-31T16:21:10.579854Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:21:10.579956Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} generation 0 [5:179:2194] >> KqpPg::CreateNotNullPgColumn [GOOD] >> KqpPg::CreateSequence >> TPartitionTests::ShadowPartitionCountersRestore >> TPQTest::TestDirectReadHappyWay >> TPartitionTests::DataTxCalcPredicateError >> TTypeCodecsTest::TestFixedLenCodec [GOOD] >> TTypeCodecsTest::TestVarLenCodec >> TTypeCodecsTest::TestVarLenCodec [GOOD] >> TTypeCodecsTest::TestVarIntCodec [GOOD] >> TTypeCodecsTest::TestZigZagCodec [GOOD] >> TTypeCodecsTest::TestDeltaZigZagCodec [GOOD] >> TPartitionTests::ShadowPartitionCountersRestore [GOOD] >> TPartitionTests::TestBatchingWithChangeConfig >> TPQTest::PQ_Tablet_Does_Not_Remove_The_Blob_Until_The_Reading_Is_Complete [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TTypeCodecsTest::TestDeltaZigZagCodec [GOOD] Test command err: Size: 8002 Create chunk: 0.000226s Read by index: 0.000025s Iterate: 0.000149s Size: 8256 Create chunk: 0.000209s Read by index: 0.000032s Iterate: 0.000070s Size: 8532 Create chunk: 0.000130s Read by index: 0.000041s Iterate: 0.000037s Size: 7769 Create chunk: 0.000140s Read by index: 0.000062s Iterate: 0.000038s Size: 2853 Create chunk: 0.000080s Read by index: 0.000072s Iterate: 0.000035s Size: 2419 Create chunk: 0.000129s Read by index: 0.000078s Iterate: 0.000044s Size: 2929 Create chunk: 0.000094s Read by index: 0.000096s Iterate: 0.000060s Size: 2472 Create chunk: 0.000117s Read by index: 0.000083s Iterate: 0.000046s Size: 2407 Create chunk: 0.000100s Read by index: 0.000094s Iterate: 0.000045s Size: 2061 Create chunk: 0.000115s Read by index: 0.000091s Iterate: 0.000052s >> TListAllTopicsTests::PlainList [GOOD] >> TListAllTopicsTests::RecursiveList ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::PQ_Tablet_Does_Not_Remove_The_Blob_Until_The_Reading_Is_Complete [GOOD] Test command err: 2025-03-31T16:21:05.966876Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:21:05.971366Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:21:05.971703Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-03-31T16:21:05.971757Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:21:05.971815Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-03-31T16:21:05.971856Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:21:05.971921Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:05.971987Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:05.989201Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:178:2193], now have 1 active actors on pipe 2025-03-31T16:21:05.989358Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-03-31T16:21:06.006781Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-03-31T16:21:06.009748Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-03-31T16:21:06.009910Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:06.011695Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-03-31T16:21:06.011850Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:21:06.011945Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:21:06.012475Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:21:06.012882Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:186:2199] 2025-03-31T16:21:06.013836Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-03-31T16:21:06.013899Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:186:2199] 2025-03-31T16:21:06.013972Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:21:06.014474Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-31T16:21:06.014597Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-03-31T16:21:06.014643Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-03-31T16:21:06.014781Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:06.014819Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:06.014874Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:06.014909Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:06.014955Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-03-31T16:21:06.014979Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-03-31T16:21:06.015011Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:06.015057Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-03-31T16:21:06.015189Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-31T16:21:06.015343Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:21:06.015578Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:188:2201] 2025-03-31T16:21:06.016292Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Initializing completed. 2025-03-31T16:21:06.016342Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 2 [1:188:2201] 2025-03-31T16:21:06.016392Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:21:06.016739Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-03-31T16:21:06.016801Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit request with generation 1 2025-03-31T16:21:06.016848Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit with generation 1 done 2025-03-31T16:21:06.016935Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:06.016961Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:06.016986Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-03-31T16:21:06.017012Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-03-31T16:21:06.017034Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001cuser 2025-03-31T16:21:06.017054Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uuser 2025-03-31T16:21:06.017076Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:06.017107Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] =========================== 2025-03-31T16:21:06.017178Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-31T16:21:06.017279Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-31T16:21:06.017394Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-31T16:21:06.021954Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:21:06.022204Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:21:06.022615Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:201:2210], now have 1 active actors on pipe 2025-03-31T16:21:06.023356Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:204:2212], now have 1 active actors on pipe 2025-03-31T16:21:06.024270Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 177 RawX2: 4294969488 } TxId: 67890 Config { TabletConfig { PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "client-1" Generation: 0 Important: false } Consumers { Name: "client-3" Generation: 7 Important: false } } BootstrapConfig { } } 2025-03-31T16:21:06.025349Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Propose TxId 67890, WriteId (empty maybe) 2025-03-31T16:21:06.025418Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2025-03-31T16:21:06.025472Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2025-03-31T16:21:06.025515Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-03-31T16:21:06.025557Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2025-03-31T16:21:06.025824Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 231 MaxStep: 18446744073709551615 Kind: KIND_CONFIG TabletConfig { PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" Ydb ... mAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-31T16:21:11.552086Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-31T16:21:11.553127Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-31T16:21:11.554210Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-31T16:21:11.555186Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-31T16:21:11.556251Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-31T16:21:11.557065Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-31T16:21:11.557875Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-31T16:21:11.558690Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-31T16:21:11.559054Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 1 size 172682 from pos 0 cbcount 1 2025-03-31T16:21:11.560347Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-31T16:21:11.561162Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-31T16:21:11.561984Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-31T16:21:11.562788Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-31T16:21:11.563646Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-31T16:21:11.564508Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-31T16:21:11.565389Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-31T16:21:11.566375Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-31T16:21:11.567471Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-31T16:21:11.568533Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-31T16:21:11.569604Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-31T16:21:11.570680Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-31T16:21:11.571765Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-31T16:21:11.572907Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-31T16:21:11.573369Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 1 size 172682 from pos 0 cbcount 1 2025-03-31T16:21:11.576281Z node 10 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 123 2025-03-31T16:21:11.614838Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [10:444:2420], now have 1 active actors on pipe 2025-03-31T16:21:11.615019Z node 10 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-31T16:21:11.615103Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-03-31T16:21:11.615186Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: topic partition: 0 SourceId: 'sourceid1' SeqNo: 15 partNo : 0 messageNo: 1 size 102400 offset: 14 2025-03-31T16:21:11.615322Z node 10 :PERSQUEUE DEBUG: tablet 72057594037927937 topic 'topic' partition 0 error: new GetOwnership request needed for owner 2025-03-31T16:21:11.615482Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:11.615544Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:11.615604Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [d0000000000_00000000000000000002_00000_0000000001_00014, d0000000000_00000000000000000002_00000_0000000001_00014] 2025-03-31T16:21:11.615654Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [d0000000000_00000000000000000003_00000_0000000001_00014, d0000000000_00000000000000000003_00000_0000000001_00014] 2025-03-31T16:21:11.615696Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:11.615748Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:11.615803Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:11.615857Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-03-31T16:21:11.615980Z node 10 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-31T16:21:11.616038Z node 10 :PERSQUEUE DEBUG: CacheProxy. Delete blobs from d0000000000_00000000000000000002_00000_0000000001_00014(+) to d0000000000_00000000000000000002_00000_0000000001_00014(+) 2025-03-31T16:21:11.616073Z node 10 :PERSQUEUE DEBUG: CacheProxy. Delete blobs from d0000000000_00000000000000000003_00000_0000000001_00014(+) to d0000000000_00000000000000000003_00000_0000000001_00014(+) 2025-03-31T16:21:11.616152Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 45, Error new GetOwnership request needed for owner 2025-03-31T16:21:11.616200Z node 10 :PERSQUEUE DEBUG: Answer error topic: 'topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-03-31T16:21:11.619168Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:21:11.622342Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [10:456:2431], now have 1 active actors on pipe 2025-03-31T16:21:11.622453Z node 10 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-31T16:21:11.622501Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-03-31T16:21:11.622602Z node 10 :PERSQUEUE INFO: new Cookie default|60edbb24-3f88434e-7d8c8ae4-adc4f1de_14 generated for partition 0 topic 'topic' owner default 2025-03-31T16:21:11.622701Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-03-31T16:21:11.622771Z node 10 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-31T16:21:11.623063Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [10:458:2433], now have 1 active actors on pipe 2025-03-31T16:21:11.623144Z node 10 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-31T16:21:11.623175Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-03-31T16:21:11.623214Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: topic partition: 0 SourceId: 'sourceid1' SeqNo: 15 partNo : 0 messageNo: 0 size 102400 offset: 14 2025-03-31T16:21:11.623282Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Send write quota request. Topic: "topic". Partition: 0. Amount: 102409. Cookie: 15 2025-03-31T16:21:11.883071Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Got quota. Topic: "topic". Partition: 0: Cookie: 15 2025-03-31T16:21:11.883317Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 part blob processing sourceId 'sourceid1' seqNo 15 partNo 0 2025-03-31T16:21:11.884310Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 part blob complete sourceId 'sourceid1' seqNo 15 partNo 0 FormedBlobsCount 0 NewHead: Offset 14 PartNo 0 PackedSize 102472 count 1 nextOffset 15 batches 1 2025-03-31T16:21:11.885385Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'topic' partition 0 compactOffset 14,1 HeadOffset 14 endOffset 14 curOffset 15 d0000000000_00000000000000000014_00000_0000000001_00000| size 102462 WTime 2103 2025-03-31T16:21:11.885700Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:11.885761Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:11.885831Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-03-31T16:21:11.885884Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:11.885936Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psourceid1 2025-03-31T16:21:11.885975Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000014_00000_0000000001_00000| 2025-03-31T16:21:11.886004Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:11.886042Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:11.886097Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-03-31T16:21:11.886315Z node 10 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-31T16:21:11.886449Z node 10 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 14 partNo 0 count 1 size 102462 2025-03-31T16:21:11.891242Z node 10 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 14 count 1 size 102462 actorID [10:134:2160] 2025-03-31T16:21:11.891390Z node 10 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 14 partno 0 count 1 parts 0 size 102462 2025-03-31T16:21:11.891513Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 102409 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:21:11.891595Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:21:11.891689Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'sourceid1', Topic: 'topic', Partition: 0, SeqNo: 15, partNo: 0, Offset: 14 is stored on disk 2025-03-31T16:21:11.892075Z node 10 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-31T16:21:11.892654Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [10:469:2442], now have 1 active actors on pipe >> TPartitionTests::Batching >> TPQTest::TestDirectReadHappyWay [GOOD] >> TPQTest::TestDescribeBalancer >> TPQTabletTests::Multiple_PQTablets_1 >> TPartitionTests::Batching [GOOD] >> TPartitionTests::ConflictingSrcIdForTxWithHead [GOOD] >> TPQTabletTests::Multiple_PQTablets_1 [GOOD] >> TPartitionTests::CommitOffsetRanges >> KqpLimits::QSReplySize-useSink [GOOD] >> TPQTabletTests::Multiple_PQTablets_2 >> TPartitionTests::ConflictingActsInSeveralBatches [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase+useSink >> TPartitionTests::SetOffset >> TPartitionTests::ConflictingSrcIdForTxInDifferentBatches >> TPQTabletTests::Multiple_PQTablets_2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::ConflictingSrcIdForTxWithHead [GOOD] Test command err: 2025-03-31T16:21:03.201765Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:03.201873Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:03.218395Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [1:179:2194] 2025-03-31T16:21:03.219936Z node 1 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-31T16:21:03.000000Z 2025-03-31T16:21:03.219991Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:179:2194] Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\230\313\206\350\3362" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\230\313\206\350\3362" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\002\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\002\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-03-31T16:21:03.971958Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:03.972015Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:04.375832Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:04.375919Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:04.391573Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-03-31T16:21:04.391790Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:21:04.392113Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:178:2193] 2025-03-31T16:21:04.393081Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-03-31T16:21:04.393254Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-03-31T16:21:04.393428Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-03-31T16:21:04.393630Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-03-31T16:21:04.394063Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 50 size 684 so 0 eo 50 d0000000000_00000000000000000000_00000_0000000050_00000 2025-03-31T16:21:04.394160Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-03-31T16:21:04.394203Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-03-31T16:21:04.394253Z node 3 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-31T16:21:04.000000Z 2025-03-31T16:21:04.394292Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-03-31T16:21:04.394346Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [3:178:2193] 2025-03-31T16:21:04.394430Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 50 Head Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000050_00000 size 684 2025-03-31T16:21:04.394482Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 Create distr tx with id = 0 and act no: 1 Create distr tx with id = 2 and act no: 3 Create immediate tx with id = 4 and act no: 5 2025-03-31T16:21:05.677522Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-03-31T16:21:05.677620Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 2 2025-03-31T16:21:07.002741Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-03-31T16:21:07.002872Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-03-31T16:21:07.002919Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 1 Wait batch completion Got batch complete: 2 Wait batch completion Wait kv request 2025-03-31T16:21:07.241258Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 2 2025-03-31T16:21:07.241344Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 2 2025-03-31T16:21:07.241407Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-03-31T16:21:07.241467Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 2025-03-31T16:21:07.241541Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-03-31T16:21:07.241594Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-03-31T16:21:07.241649Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 2025-03-31T16:21:07.260004Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-03-31T16:21:07.260306Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:07.260359Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:07.260402Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:07.260445Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc2 2025-03-31T16:21:07.260480Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc1 2025-03-31T16:21:07.260505Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:07.260532Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:07.260556Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-03-31T16:21:07.260593Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:07.260638Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Wait tx committed for tx 2 2025-03-31T16:21:07.291863Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 2 Wait immediate tx complete 4 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 4 2025-03-31T16:21:07.797687Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:07.797741Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:07.810602Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-03-31T16:21:07.810789Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:21:07.811053Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:176:2191] 2025-03-31T16:21:07.812023Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-03-31T16:21:07.812215Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-03-31T16:21:07.812382Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-03-31T16:21:07.812577Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-03-31T16:21:07.812991Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 1 size 684 so 0 eo 1 d0000000000_00000000000000000000_00000_0000000001_00000 2025-03-31T16:21:07.813102Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-03-31T16:21:07.813134Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-03-31T16:21:07.813172Z node 4 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-31T16:21:07.000000Z 2025-03-31T16:21:07.813201Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-03-31T16:21:07.813240Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [4:176:2191] 2025-03-31T16:21:07.813322Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 1 Head Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000001_00000 size 684 2025-03-31T16:21:07.813378Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-31T16:21:08.126207Z node 4 :PERSQUEUE INFO: new Cookie src1|e3e5411e-369a36b7-7db7419d-107b3866_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src1 2025-03-31T16:21:08.126359Z node 4 :PERSQUEUE DEBUG ... /rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-03-31T16:21:11.220468Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-03-31T16:21:11.220668Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-03-31T16:21:11.221121Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 1 size 684 so 0 eo 1 d0000000000_00000000000000000000_00000_0000000001_00000 2025-03-31T16:21:11.221231Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-03-31T16:21:11.221275Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-03-31T16:21:11.221362Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-31T16:21:11.000000Z 2025-03-31T16:21:11.221409Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-03-31T16:21:11.221478Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [5:178:2193] 2025-03-31T16:21:11.221539Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 1 Head Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000001_00000 size 684 2025-03-31T16:21:11.221583Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-31T16:21:11.531351Z node 5 :PERSQUEUE INFO: new Cookie src1|b7a81408-6f238aaa-e11896e6-a084c49a_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src1 2025-03-31T16:21:11.531462Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 Got batch complete: 1 Create distr tx with id = 0 and act no: 1 2025-03-31T16:21:12.478007Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-03-31T16:21:13.648533Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 1 Wait batch completion Wait 1st KV request Wait kv request 2025-03-31T16:21:13.648817Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 0 2025-03-31T16:21:13.648899Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 0 2025-03-31T16:21:13.648967Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=1 2025-03-31T16:21:13.649024Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0, NewHead=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 2025-03-31T16:21:13.649159Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src1' seqNo 10 partNo 0 2025-03-31T16:21:13.650196Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src1' seqNo 10 partNo 0 FormedBlobsCount 0 NewHead: Offset 1 PartNo 0 PackedSize 85 count 1 nextOffset 2 batches 1 2025-03-31T16:21:13.650937Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 compactOffset 1,1 HeadOffset 1 endOffset 1 curOffset 2 d0000000000_00000000000000000001_00000_0000000001_00000| size 71 WTime 10139 2025-03-31T16:21:13.651198Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:13.651253Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:13.651292Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-03-31T16:21:13.651337Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:13.651380Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc1 2025-03-31T16:21:13.651415Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000001_00000_0000000001_00000| 2025-03-31T16:21:13.651442Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:13.651467Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:13.651491Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-03-31T16:21:13.651543Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:13.651588Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Wait tx committed for tx 0 2025-03-31T16:21:13.662276Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 1 2025-03-31T16:21:13.662567Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Already written message. Topic: 'Root/PQ/rt3.dc1--account--topic' Partition: 0 SourceId: 'src1'. Message seqNo: 8. Committed seqNo: 10. Writing seqNo: (NULL). EndOffset: 2. CurOffset: 2. Offset: 20 2025-03-31T16:21:13.662658Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Already written message. Topic: 'Root/PQ/rt3.dc1--account--topic' Partition: 0 SourceId: 'src1'. Message seqNo: 10. Committed seqNo: 10. Writing seqNo: (NULL). EndOffset: 2. CurOffset: 2. Offset: 30 2025-03-31T16:21:13.662764Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src1' seqNo 11 partNo 0 2025-03-31T16:21:13.663296Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob sourceId 'src1' seqNo 11 partNo 0 result is x0000000000_00000000000000000001_00000_0000000001_00000 size 71 2025-03-31T16:21:13.663388Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] writing blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 old key x0000000000_00000000000000000001_00000_0000000001_00000 new key d0000000000_00000000000000000001_00000_0000000001_00000 size 71 WTime 10239 2025-03-31T16:21:13.667747Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src1' seqNo 11 partNo 0 FormedBlobsCount 1 NewHead: Offset 40 PartNo 0 PackedSize 84 count 1 nextOffset 41 batches 1 2025-03-31T16:21:13.668486Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 compactOffset 40,1 HeadOffset 1 endOffset 2 curOffset 41 d0000000000_00000000000000000040_00000_0000000001_00000| size 70 WTime 10239 2025-03-31T16:21:13.668658Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:13.668717Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:13.668764Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-03-31T16:21:13.668814Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:13.668854Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000001_00000_0000000001_00000 2025-03-31T16:21:13.668890Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc1 2025-03-31T16:21:13.668917Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000040_00000_0000000001_00000| 2025-03-31T16:21:13.668949Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:13.668986Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:13.669029Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got batch complete: 3 Got KV request Got KV request Wait batch completion Wait 2nd KV request Wait kv request 2025-03-31T16:21:13.690031Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 17 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:21:13.690143Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:21:13.690232Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src1', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 8, partNo: 0, Offset: 2 is already written 2025-03-31T16:21:13.690281Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:21:13.690323Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src1', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 10, partNo: 0, Offset: 2 is already written 2025-03-31T16:21:13.690362Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:21:13.690403Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src1', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 11, partNo: 0, Offset: 40 is stored on disk 2025-03-31T16:21:13.690701Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:13.690754Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:13.690801Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [d0000000000_00000000000000000001_00000_0000000001_00000|, d0000000000_00000000000000000001_00000_0000000001_00000|] 2025-03-31T16:21:13.690847Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:13.690889Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:13.690938Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:13.690983Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request >> TPartitionTests::CommitOffsetRanges [GOOD] >> TPQTest::TestDescribeBalancer [GOOD] >> TPQTest::TestCheckACL >> TPQTabletTests::DropTablet_And_Tx >> KqpPg::DropTableIfExists_GenericQuery [GOOD] >> KqpPg::EquiJoin+useSink >> TPartitionTests::AfterRestart_1 >> TPartitionTests::DataTxCalcPredicateError [GOOD] >> TPQTabletTests::DropTablet_And_Tx [GOOD] >> TPQTabletTests::DropTablet_Before_Write >> TPartitionTests::SetOffset [GOOD] >> TPQTabletTests::DropTablet_Before_Write [GOOD] >> TPartitionTests::OldPlanStep >> TPartitionTests::AfterRestart_1 [GOOD] >> TPQTabletTests::DropTablet_And_UnplannedConfigTransaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::QSReplySize-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 6507, MsgBus: 29853 2025-03-31T16:20:23.300979Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009294840400617:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:23.301030Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000941/r3tmp/tmpNSCePD/pdisk_1.dat 2025-03-31T16:20:23.649459Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6507, node 1 2025-03-31T16:20:23.688778Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:23.688871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:23.690488Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:23.702539Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:23.702569Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:23.702577Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:23.702723Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29853 TClient is connected to server localhost:29853 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:24.145886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:24.171020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:20:24.309954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:20:24.450780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:24.523840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:26.200717Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009307725304168:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:26.200836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:26.484653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:26.510761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:26.535370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:26.558515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:26.587465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:26.618768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:26.658220Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009307725304679:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:26.658295Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009307725304684:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:26.658337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:26.661417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:26.670057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009307725304686:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:20:26.767362Z node 1 :TX_PROXY ERROR: Actor# [1:7488009307725304741:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:27.462172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:28.304168Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488009294840400617:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:28.304230Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:20:28.567748Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488009316315240172:2530], SessionActorId: [1:7488009316315240125:2530], statusCode=PRECONDITION_FAILED. Issue=
: Error: Memory limit exception, current limit is 1024 bytes., code: 2029 . sessionActorId=[1:7488009316315240125:2530]. isRollback=0 2025-03-31T16:20:28.590450Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODFmNmVlYmMtNWRlZmExMTgtMzkwYmFhZmQtNjE4NTU1NmU=, ActorId: [1:7488009316315240125:2530], ActorState: ExecuteState, TraceId: 01jqpg27ejb9kf12g1c29m99pc, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7488009316315240173:2530] from: [1:7488009316315240172:2530] 2025-03-31T16:20:28.590560Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7488009316315240173:2530] TxId: 281474976710672. Ctx: { TraceId: 01jqpg27ejb9kf12g1c29m99pc, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODFmNmVlYmMtNWRlZmExMTgtMzkwYmFhZmQtNjE4NTU1NmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Memory limit exception, current limit is 1024 bytes., code: 2029 } 2025-03-31T16:20:28.590719Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488009316315240181:2544], TxId: 281474976710672, task: 5. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ODFmNmVlYmMtNWRlZmExMTgtMzkwYmFhZmQtNjE4NTU1NmU=. TraceId : 01jqpg27ejb9kf12g1c29m99pc. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7488009316315240173:2530], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-31T16:20:28.591192Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODFmNmVlYmMtNWRlZmExMTgtMzkwYmFhZmQtNjE4NTU1NmU=, ActorId: [1:7488009316315240125:2530], ActorState: ExecuteState, TraceId: 01jqpg27ejb9kf12g1c29m99pc, Create QueryResponse for error on request, msg:
: Error: Memory limit exception, current limit is 1024 bytes., code: 2029 Trying to start YDB, gRPC: 14398, MsgBus: 1256 2025-03-31T16:20:29.245606Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488009320087093398:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:29.245696Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000941/r3tmp/tmpQiNbkD/pdisk_1.dat 2025-03-31T16:20:29.325728Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14398, node 2 2025-03-31T16:20:29.374415Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:29.374505Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:29.376159Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:29.380181Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:29.380203Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:29.380211Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:29.380308Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1256 TClient is connected to server localhost:1256 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS ... 4976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:37.843127Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:37.867500Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:37.934514Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:37.974191Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009353705990618:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:37.974296Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:37.974679Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488009353705990623:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:37.978070Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:37.986274Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488009353705990626:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:20:38.061708Z node 3 :TX_PROXY ERROR: Actor# [3:7488009358000957975:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:39.908889Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488009340821086446:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:39.908967Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:20:49.978400Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:20:49.978438Z node 3 :IMPORT WARN: Table profiles were not loaded
: Error: Query did not complete within specified timeout 500ms, session id ydb://session/3?node_id=3&id=M2VhMWQ1YzctZDhhODc4MTItMTQ0NWZjZmQtNjUzZmFjMzE= Trying to start YDB, gRPC: 1739, MsgBus: 13962 2025-03-31T16:20:59.560933Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488009449197597596:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:59.560994Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000941/r3tmp/tmpzE9gs1/pdisk_1.dat 2025-03-31T16:20:59.693660Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:20:59.724661Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:59.724759Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:59.726226Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1739, node 4 2025-03-31T16:20:59.766165Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:59.766199Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:59.766210Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:59.766387Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13962 TClient is connected to server localhost:13962 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:21:00.216463Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:21:00.233271Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:21:00.309295Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:21:00.497128Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:21:00.574342Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:21:02.914941Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488009462082501267:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:02.915119Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:02.961750Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:21:02.993660Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:21:03.027287Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:21:03.095222Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:21:03.130437Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:21:03.168105Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:21:03.244693Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488009466377469082:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:03.244704Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488009466377469087:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:03.244778Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:03.247838Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:21:03.256588Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488009466377469089:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:21:03.313644Z node 4 :TX_PROXY ERROR: Actor# [4:7488009466377469142:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:21:04.314702Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:21:04.561231Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7488009449197597596:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:04.561359Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:21:13.121714Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NmIzNGJlOTItMjhlNzFmMTYtMmJjNWYxODAtMzAwOTM5NjA=, ActorId: [4:7488009500737208795:2633], ActorState: ExecuteState, TraceId: 01jqpg3hz2echszwjcs631d6dn, Create QueryResponse for error on request, msg:
: Error: Intermediate data materialization exceeded size limit (88240924 > 50331648). This usually happens when trying to write large amounts of data or to perform lookup by big collection of keys in single query. Consider using smaller batches of data., code: 2013 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::DataTxCalcPredicateError [GOOD] Test command err: 2025-03-31T16:21:04.345703Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:04.345806Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:04.366101Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [1:179:2194] 2025-03-31T16:21:04.368052Z node 1 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-31T16:21:04.000000Z 2025-03-31T16:21:04.368117Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:179:2194] Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\200\323\206\350\3362" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\200\323\206\350\3362" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\264\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\001\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\001\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-03-31T16:21:05.089511Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:05.089583Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:05.105091Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [2:179:2194] 2025-03-31T16:21:05.106695Z node 2 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-31T16:21:05.000000Z 2025-03-31T16:21:05.106756Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [2:179:2194] Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\350\332\206\350\3362" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\000\020\001\030\001\"\tsession-1(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-1" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\350\332\206\350\3362" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-2" Value: "\010\000\020\001\030\001\"\tsession-2(\0000\003" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-2" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-2" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\350\332\206\350\3362" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\003\020\001\030\001\"\tsession-1(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\003\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-1" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\350\332\206\350\3362" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-2" Value: "\010\001\020\001\030\001\"\tsession-2(\0000\003" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-2" Value: "\001\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-2" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\006\020\001\030\001\"\tsession-1(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\006\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-1" StorageChannel: INLINE } 2025-03-31T16:21:05.889412Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:05.889490Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:05.912631Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [3:179:2194] 2025-03-31T16:21:05.914327Z node 3 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-31T16:21:05.000000Z 2025-03-31T16:21:05.914387Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [3:179:2194] Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\350\332\206\350\3362" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-03-31T16:21:06.640264Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:06.640338Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:06.652650Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-03-31T16:21:06.652872Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:21:06.653106Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:176:2191] 2025-03-31T16:21:06.653976Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-03-31T16:21:06.654139Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-03-31T16:21:06.654295Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-03-31T16:21:06.654476Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-03-31T16:21:06.654861Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 50 size 684 so 0 eo 50 d0000000000_00000000000000000000_00000_0000000050_00000 2025-03-31T16:21:06.654956Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-03-31T16:21:06.654995Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-03-31T16:21:06.655038Z node 4 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-31T16:21:06.000000Z 2025-03-31T16:21:06.655071Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-03-31T16:21:06.655145Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [4:176:2191] 2025-03-31T16:21:06.655200Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 50 Head Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000050_00000 size 684 2025-03-31T16:21:06.655246Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-31T16:21:07.935687Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client session is set to 0 (startOffset 0) session session 2025-03-31T16:21:07.935837Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:07.935878Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:07.935933Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:07.935983Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:07.936014Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient 2025-03-31T16:21:07.936032Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient 2025-03-31T16:21:07.936061Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:07.936103Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got batch complete: 1 Got KV request Got KV request Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\0202\030\000(\320\342\206\350\3362" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-03-31T16:21:07.947143Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Create distr tx with id = 0 and act no: 1 2025-03-31T16:21:07.947462Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 Wait first predicate result 2025-03-31T16:21:09.200092Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 1 Create distr tx with id = 2 and act no: 3 2025-03-31T16:21:09.200399Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 2 Wait second predicate result 2025-03-31T16:21:10.399683Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-03-31T16:21:10.399758Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 0 2025-03-31T16:21:10.399804Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 0 2025-03-31T16:21:10.399854Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-03-31T16:21:10.399896Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 ... State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 2 2025-03-31T16:21:10.400153Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 2 2025-03-31T16:21:10.400206Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-03-31T16:21:10.400250Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 2025-03-31T16:21:10.400421Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:10.400469Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:10.400513Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:10.400542Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc1 2025-03-31T16:21:10.400568Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:10.400587Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:10.400604Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-03-31T16:21:10.400629Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:10.400697Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Send disk status response with cookie: 0 2025-03-31T16:21:10.400910Z node 4 :PERSQUEUE INFO: new Cookie owner1|932752a-44ae728b-68d12946-3a948f8d_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 2025-03-31T16:21:10.422465Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 2 2025-03-31T16:21:10.422613Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 Got batch complete: 1 2025-03-31T16:21:10.423692Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'SourceId' seqNo 5 partNo 0 2025-03-31T16:21:10.424582Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'SourceId' seqNo 5 partNo 0 FormedBlobsCount 0 NewHead: Offset 51 PartNo 0 PackedSize 118 count 1 nextOffset 52 batches 1 2025-03-31T16:21:10.425256Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 compactOffset 51,1 HeadOffset 50 endOffset 50 curOffset 52 d0000000000_00000000000000000051_00000_0000000001_00000| size 104 WTime 15243 2025-03-31T16:21:10.425435Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:10.425482Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:10.425533Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-03-31T16:21:10.425575Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:10.425624Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000pSourceId 2025-03-31T16:21:10.425681Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000051_00000_0000000001_00000| 2025-03-31T16:21:10.425713Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:10.425751Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:10.425797Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got batch complete: 1 Got KV request Got KV request Send disk status response with cookie: 0 2025-03-31T16:21:10.447436Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:21:10.447519Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:21:10.447597Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 5, partNo: 0, Offset: 51 is stored on disk Wait third predicate result Create distr tx with id = 4 and act no: 5 2025-03-31T16:21:10.448018Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 4 2025-03-31T16:21:11.660337Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 1 2025-03-31T16:21:12.106007Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:12.106085Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:12.121565Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-03-31T16:21:12.121809Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:21:12.122078Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:178:2193] 2025-03-31T16:21:12.123063Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-03-31T16:21:12.123296Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-03-31T16:21:12.123508Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-03-31T16:21:12.123748Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-03-31T16:21:12.124240Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 1 size 684 so 0 eo 1 d0000000000_00000000000000000000_00000_0000000001_00000 2025-03-31T16:21:12.124343Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-03-31T16:21:12.124393Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-03-31T16:21:12.124446Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-31T16:21:12.000000Z 2025-03-31T16:21:12.124492Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-03-31T16:21:12.124544Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [5:178:2193] 2025-03-31T16:21:12.124610Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 1 Head Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000001_00000 size 684 2025-03-31T16:21:12.124662Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-31T16:21:13.391597Z node 5 :PERSQUEUE INFO: new Cookie SourceId|bc07c81c-987ec347-efe74e33-7cc82392_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner SourceId 2025-03-31T16:21:13.391772Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 Got batch complete: 1 Wait write response Wait kv request 2025-03-31T16:21:13.392176Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'SourceId' seqNo 4 partNo 0 2025-03-31T16:21:13.393102Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'SourceId' seqNo 4 partNo 0 FormedBlobsCount 0 NewHead: Offset 11 PartNo 0 PackedSize 118 count 1 nextOffset 12 batches 1 2025-03-31T16:21:13.393829Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 compactOffset 11,1 HeadOffset 1 endOffset 1 curOffset 12 d0000000000_00000000000000000011_00000_0000000001_00000| size 104 WTime 5132 2025-03-31T16:21:13.393998Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:13.394046Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:13.394097Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-03-31T16:21:13.394142Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:13.394189Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000pSourceId 2025-03-31T16:21:13.394226Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000011_00000_0000000001_00000| 2025-03-31T16:21:13.394251Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:13.394284Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:13.394331Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got batch complete: 1 Got KV request Got KV request 2025-03-31T16:21:13.418797Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:21:13.418917Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:21:13.419033Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 4, partNo: 0, Offset: 11 is stored on disk Wait second predicate result Create distr tx with id = 0 and act no: 1 2025-03-31T16:21:13.419492Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-03-31T16:21:14.639557Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 1 >> TPartitionTests::AfterRestart_2 >> TPQTabletTests::DropTablet >> TPQTabletTests::DropTablet_And_UnplannedConfigTransaction [GOOD] >> TPartitionTests::OldPlanStep [GOOD] >> TPQTabletTests::Huge_ProposeTransacton >> KqpPg::InsertValuesFromTableWithDefaultBool-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_SerialNotNull+useSink >> TPartitionTests::ReserveSubDomainOutOfSpace >> TPQTabletTests::DropTablet [GOOD] >> TPartitionTests::AfterRestart_2 [GOOD] >> TPQTabletTests::DropTablet_And_PlannedConfigTransaction >> KqpPg::CreateSequence [GOOD] >> KqpPg::AlterSequence >> TPartitionTests::ChangeConfig >> TPartitionTests::ReserveSubDomainOutOfSpace [GOOD] >> TPartitionTests::ShadowPartitionCounters >> PQCountersLabeled::PartitionFirstClass [GOOD] >> TPQRBDescribes::PartitionLocations [GOOD] >> PQCountersLabeled::ImportantFlagSwitching >> TPartitionTests::TestBatchingWithChangeConfig [GOOD] >> TPQTabletTests::DropTablet_And_PlannedConfigTransaction [GOOD] >> TPartitionTests::TestBatchingWithProposeConfig >> TPQTabletTests::Cancel_Tx >> TPQTabletTests::Cancel_Tx [GOOD] >> TPQTabletTests::Config_TEvTxCommit_After_Restart >> TPartitionTests::ChangeConfig [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryTrue_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_0_Test >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_NewSourceId_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_Active_Test >> TPQTabletTests::Config_TEvTxCommit_After_Restart [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryFalse_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_Test >> TPQTabletTests::All_New_Partitions_In_Another_Tablet >> TPQTest::TestWritePQCompact [GOOD] >> TPQTest::TestWritePQBigMessage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQRBDescribes::PartitionLocations [GOOD] Test command err: Bucket: 100 elems count: 97 Bucket: 200 elems count: 104 Bucket: 500 elems count: 288 Bucket: 1000 elems count: 528 Bucket: 2000 elems count: 1008 Bucket: 5000 elems count: 2976 2025-03-31T16:21:04.198527Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009468960460433:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:04.198664Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:21:04.217995Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488009470117939012:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:04.218102Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:21:04.352780Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00157c/r3tmp/tmpK0MLxK/pdisk_1.dat 2025-03-31T16:21:04.370430Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:21:04.536554Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:21:04.536687Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:21:04.539970Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:21:04.541032Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:21:04.562958Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18277, node 1 2025-03-31T16:21:04.588811Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:21:04.589077Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:21:04.589196Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:21:04.589224Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:21:04.595258Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:21:04.613974Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/00157c/r3tmp/yandexwXGqEh.tmp 2025-03-31T16:21:04.614039Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/00157c/r3tmp/yandexwXGqEh.tmp 2025-03-31T16:21:04.615760Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/00157c/r3tmp/yandexwXGqEh.tmp 2025-03-31T16:21:04.615987Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:21:04.655695Z INFO: TTestServer started on Port 25655 GrpcPort 18277 TClient is connected to server localhost:25655 PQClient connected to localhost:18277 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:21:04.895635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:21:04.944070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-31T16:21:07.179418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009481845363471:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:07.179422Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009481845363464:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:07.179512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:07.183743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-31T16:21:07.185940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009481845363525:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:07.186004Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:07.207287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009481845363493:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-31T16:21:07.408215Z node 1 :TX_PROXY ERROR: Actor# [1:7488009481845363572:2802] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:21:07.429258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:21:07.434278Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488009481845363590:2353], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:21:07.434315Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488009483002841242:2313], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:21:07.434488Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWUwMGE1ZTYtMzdmNmViMTgtNGNjNmFmNmUtZjdjNmQyZDE=, ActorId: [1:7488009481845363461:2340], ActorState: ExecuteState, TraceId: 01jqpg3df526styf4r0679927e, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:21:07.434549Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGUxOWUxZDUtMzU4ZmQwZGMtZDIzODNlZDUtODUzNTZmYmY=, ActorId: [2:7488009483002841202:2307], ActorState: ExecuteState, TraceId: 01jqpg3dgg42ajwgs2hj9nc0ts, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:21:07.449221Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:21:07.449271Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:21:07.507331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:21:07.592287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-31T16:21:07.773729Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jqpg3dybep9csqspa1vkm849, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzBhYjgxZmMtN2IxOTExYzgtY2EwM2FjM2YtNWY5ZWJhZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7488009481845363961:3082] 2025-03-31T16:21:09.198456Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488009468960460433:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:09.198540Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:21:09.217884Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488009470117939012:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:09.217971Z ... 1:14.064638Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710679, State EXECUTED 2025-03-31T16:21:14.064653Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710679 State EXECUTED FrontTxId 281474976710679 2025-03-31T16:21:14.064676Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TPersQueue::SendEvReadSetAckToSenders 2025-03-31T16:21:14.064697Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710679, NewState WAIT_RS_ACKS 2025-03-31T16:21:14.064714Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710679 moved from EXECUTED to WAIT_RS_ACKS 2025-03-31T16:21:14.064743Z node 1 :PERSQUEUE DEBUG: [TxId: 281474976710679] PredicateAcks: 0/0 2025-03-31T16:21:14.064750Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-03-31T16:21:14.064764Z node 1 :PERSQUEUE DEBUG: [TxId: 281474976710679] PredicateAcks: 0/0 2025-03-31T16:21:14.064805Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] add an TxId 281474976710679 to the list for deletion 2025-03-31T16:21:14.064832Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710679, NewState DELETING 2025-03-31T16:21:14.064860Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete key for TxId 281474976710679 2025-03-31T16:21:14.064933Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-31T16:21:14.066569Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-31T16:21:14.066598Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state DELETING 2025-03-31T16:21:14.066610Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710679, State DELETING 2025-03-31T16:21:14.066632Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete TxId 281474976710679 ===Query complete TClient::Ls request: /Root/PQ/rt3.dc1--topic TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "rt3.dc1--topic" PathId: 13 SchemeshardId: 72057594046644480 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976710679 CreateStep: 1743438074063 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186224037893 } PersQueueGroup { Name: "rt3.dc1--topic" PathId: 13 TotalGroupCount: 5 PartitionPerTablet: 5 PQTabletConfig { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 2000... (TRUNCATED) GetTopicVersionFromPath: record Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "rt3.dc1--topic" PathId: 13 SchemeshardId: 72057594046644480 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976710679 CreateStep: 1743438074063 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186224037893 } PersQueueGroup { Name: "rt3.dc1--topic" PathId: 13 TotalGroupCount: 5 PartitionPerTablet: 5 PQTabletConfig { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 YdbDatabasePath: "/Root" } Partitions { PartitionId: 0 TabletId: 72075186224037892 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186224037892 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186224037892 Status: Active } Partitions { PartitionId: 3 TabletId: 72075186224037892 Status: Active } Partitions { PartitionId: 4 TabletId: 72075186224037892 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186224037893 NextPartitionId: 5 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 12 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 5 PQPartitionsLimit: 1000000 } } Path: "/Root/PQ/rt3.dc1--topic" name rt3.dc1--topic version1 CallPersQueueGRPC request to localhost:18277 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic" } } 2025-03-31T16:21:14.075077Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:18277 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic" } } CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC 2025-03-31T16:21:14.579562Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC request to localhost:18277 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic" } } 2025-03-31T16:21:15.083036Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 1 ErrorCode: OK MetaResponse { CmdGetTopicMetadataResult { TopicInfo { Topic: "rt3.dc1--topic" NumPartitions: 5 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } Version: 1 LocalDC: true Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } TopicPath: "/Root/PQ/rt3.dc1--topic" YdbDatabasePath: "/Root" Consumers { Name: "user" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } Version: 0 Important: false } } ErrorCode: OK } } } === Topic created, have version: 1 TClient::Ls request: /Root/PQ/rt3.dc1--topic TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "rt3.dc1--topic" PathId: 13 SchemeshardId: 72057594046644480 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976710679 CreateStep: 1743438074063 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186224037893 } PersQueueGroup { Name: "rt3.dc1--topic" PathId: 13 TotalGroupCount: 5 PartitionPerTablet: 5 PQTabletConfig { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 2000... (TRUNCATED) 2025-03-31T16:21:15.086706Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic] pipe [1:7488009516205103154:3552] connected; active server actors: 1 2025-03-31T16:21:15.086961Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 1, Generation 1 2025-03-31T16:21:15.086991Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 1, NodeId 1, Generation 1 2025-03-31T16:21:15.087001Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 2, NodeId 1, Generation 1 2025-03-31T16:21:15.087013Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 1, Generation 1 2025-03-31T16:21:15.087022Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 4, NodeId 1, Generation 1 2025-03-31T16:21:15.087513Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic] pipe [1:7488009516205103155:3553] connected; active server actors: 1 2025-03-31T16:21:15.087794Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 1, Generation 1 2025-03-31T16:21:15.087814Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 1, NodeId 1, Generation 1 2025-03-31T16:21:15.087826Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 2, NodeId 1, Generation 1 2025-03-31T16:21:15.087837Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 1, Generation 1 2025-03-31T16:21:15.087848Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 4, NodeId 1, Generation 1 response: Status: true Locations { PartitionId: 0 NodeId: 1 Generation: 1 } Locations { PartitionId: 1 NodeId: 1 Generation: 1 } Locations { PartitionId: 2 NodeId: 1 Generation: 1 } Locations { PartitionId: 3 NodeId: 1 Generation: 1 } Locations { PartitionId: 4 NodeId: 1 Generation: 1 } 2025-03-31T16:21:15.088540Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic] pipe [1:7488009516205103156:3554] connected; active server actors: 1 2025-03-31T16:21:15.088694Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 1, Generation 1 response: Status: true Locations { PartitionId: 3 NodeId: 1 Generation: 1 } 2025-03-31T16:21:15.089198Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic] pipe [1:7488009516205103157:3555] connected; active server actors: 1 response: Status: false >> TPQTabletTests::All_New_Partitions_In_Another_Tablet [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::ChangeConfig [GOOD] Test command err: 2025-03-31T16:21:13.511186Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:13.511266Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:13.529742Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:179:2194] 2025-03-31T16:21:13.530473Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [1:179:2194] Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient-1" Value: "\010\000\020\002\030\003\"\014session-id-1(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-1" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id-1" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient-2" Value: "\010\000\020\004\030\005\"\014session-id-2(\0000\003" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-2" Value: "\000\000\000\000\000\000\000\000\004\000\000\000\005\000\000\000session-id-2" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient-3" Value: "\010\000\020\006\030\007\"\014session-id-3(\0000\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-3" Value: "\000\000\000\000\000\000\000\000\006\000\000\000\007\000\000\000session-id-3" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient-1" Value: "\010\000\020\010\030\t\"\014session-id-2(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-1" Value: "\000\000\000\000\000\000\000\000\010\000\000\000\t\000\000\000session-id-2" StorageChannel: INLINE } 2025-03-31T16:21:14.085192Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:14.085254Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:14.097190Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:179:2194] 2025-03-31T16:21:14.098461Z node 2 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-31T16:21:14.000000Z 2025-03-31T16:21:14.098509Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [2:179:2194] Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\220\241\207\350\3362" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\220\241\207\350\3362" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\002\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\002\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\220\241\207\350\3362" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\004\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\004\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-03-31T16:21:14.884066Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:14.884140Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:14.900720Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [3:179:2194] 2025-03-31T16:21:14.902935Z node 3 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-31T16:21:14.000000Z 2025-03-31T16:21:14.903005Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [3:179:2194] Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\220\241\207\350\3362" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\316\255\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\004\020\000\030\000\"\007session(\0000\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\004\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000session" StorageChannel: INLINE } 2025-03-31T16:21:15.666996Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:15.667064Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:15.682247Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [4:177:2192] 2025-03-31T16:21:15.684474Z node 4 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-31T16:21:15.000000Z 2025-03-31T16:21:15.684543Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [4:177:2192] 2025-03-31T16:21:16.457082Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:16.457150Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:16.473019Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [5:179:2194] 2025-03-31T16:21:16.475592Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-31T16:21:16.000000Z 2025-03-31T16:21:16.475657Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [5:179:2194] Send change config Wait cmd write (initial) Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\340\260\207\350\3362" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\002\020\000\030\000\"\tsession-1(\0000\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000session-1" StorageChannel: INLINE } Wait commit 1 done Wait cmd write (change config) Got cmd write: CmdDeleteRange { Range { From: "m0000000003cclient-2" IncludeFrom: true To: "m0000000003cclient-2" IncludeTo: true } } CmdDeleteRange { Range { From: "m0000000003uclient-2" IncludeFrom: true To: "m0000000003uclient-2" IncludeTo: true } } CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\340\260\207\350\3362" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\002\020\000\030\000\"\tsession-1(\0000\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000session-1" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-3" Value: "\010\000\020\000\030\000\"\000(\0000\007" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-3" Value: "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000" StorageChannel: INLINE } CmdWrite { Key: "_config_3" Value: "\022\t\030\200\243\0058\200\200\200\005\030\000\"\027rt3.dc1--account--topic(\0020\001\272\001 /Root/PQ/rt3.dc1--account--topic\352\001\000\372\001\002\010\000\212\002\007account\220\002\001\242\002\002\010\000\252\002\016\n\010client-1@\000H\000\252\002\016\n\010client-3@\007H\000" StorageChannel: INLINE } Wait config changed |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TPartitionTests::ConflictingSrcIdForTxInDifferentBatches [GOOD] |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TPQTabletTests::After_Restarting_The_Tablet_Sends_A_TEvReadSet_For_Transactions_In_The_EXECUTED_State >> TNebiusAccessServiceTest::PassRequestId [GOOD] >> TPartitionTests::ConflictingCommitsInSeveralBatches >> TPQTabletTests::After_Restarting_The_Tablet_Sends_A_TEvReadSet_For_Transactions_In_The_EXECUTED_State [GOOD] >> HttpRequest::Probe [GOOD] >> TPQTest::TestReserveBytes [GOOD] >> TPQTest::TestSetClientOffset ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TNebiusAccessServiceTest::PassRequestId [GOOD] Test command err: 2025-03-31T16:21:17.966087Z node 2 :GRPC_CLIENT DEBUG: [517000004708]{reqId} Connect to grpc://localhost:13760 2025-03-31T16:21:17.979239Z node 2 :GRPC_CLIENT DEBUG: [517000004708]{reqId} Request AuthenticateRequest { iam_token: "**** (717F937C)" } 2025-03-31T16:21:17.996518Z node 2 :GRPC_CLIENT DEBUG: [517000004708]{reqId} Response AuthenticateResponse { account { user_account { id: "1234" } } } >> TFetchRequestTests::HappyWay [GOOD] >> TFetchRequestTests::BadTopicName >> KqpPg::CreateUniqComplexPgColumn-useSink [GOOD] >> KqpPg::CreateTempTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::After_Restarting_The_Tablet_Sends_A_TEvReadSet_For_Transactions_In_The_EXECUTED_State [GOOD] Test command err: 2025-03-31T16:21:15.874630Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:21:15.879246Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:21:15.879550Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-03-31T16:21:15.879599Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:21:15.879631Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-03-31T16:21:15.879677Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:21:15.879715Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:15.879771Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:15.894206Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:178:2193], now have 1 active actors on pipe 2025-03-31T16:21:15.894335Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-03-31T16:21:15.915436Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-03-31T16:21:15.918030Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-03-31T16:21:15.918153Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:15.918910Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-03-31T16:21:15.919037Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:21:15.919462Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:21:15.919915Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:186:2199] 2025-03-31T16:21:15.920847Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-03-31T16:21:15.920912Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:186:2199] 2025-03-31T16:21:15.920968Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:21:15.921546Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-31T16:21:15.921666Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-03-31T16:21:15.921711Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-03-31T16:21:15.921852Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:15.921915Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:15.921956Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:15.922019Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:15.922064Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-03-31T16:21:15.922088Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-03-31T16:21:15.922120Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:15.922166Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-03-31T16:21:15.922258Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-31T16:21:15.922402Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-31T16:21:15.925311Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:21:15.925716Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:193:2204], now have 1 active actors on pipe 2025-03-31T16:21:15.926460Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:196:2206], now have 1 active actors on pipe 2025-03-31T16:21:15.926549Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvDropTablet 2025-03-31T16:21:16.257735Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:21:16.261668Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:21:16.261955Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-03-31T16:21:16.261994Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:21:16.262025Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-03-31T16:21:16.262059Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:21:16.262095Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:16.262138Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:16.277958Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [2:178:2193], now have 1 active actors on pipe 2025-03-31T16:21:16.278074Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-03-31T16:21:16.278342Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 2(current 0) received from actor [2:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-03-31T16:21:16.280638Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-03-31T16:21:16.280786Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:16.282057Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-03-31T16:21:16.282220Z node 2 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:21:16.282289Z node 2 :PERSQUEUE DEBUG: [topic:1:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:21:16.282703Z node 2 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:21:16.282925Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:186:2199] 2025-03-31T16:21:16.283574Z node 2 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-03-31T16:21:16.283618Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [2:186:2199] 2025-03-31T16:21:16.283660Z node 2 :PERSQUEUE DEBUG ... LATING 2025-03-31T16:21:18.196692Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 moved from PLANNED to CALCULATING 2025-03-31T16:21:18.196764Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 110, TxId 67891 2025-03-31T16:21:18.196988Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCalcPredicateResult Step 110, TxId 67891, Partition 0, Predicate 1 2025-03-31T16:21:18.197020Z node 6 :PERSQUEUE DEBUG: [TxId: 67891] Handle TEvTxCalcPredicateResult 2025-03-31T16:21:18.197047Z node 6 :PERSQUEUE DEBUG: [TxId: 67891] Partition responses 1/1 2025-03-31T16:21:18.197076Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATING 2025-03-31T16:21:18.197105Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State CALCULATING 2025-03-31T16:21:18.197141Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 State CALCULATING FrontTxId 67891 2025-03-31T16:21:18.197177Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2025-03-31T16:21:18.197212Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState CALCULATED 2025-03-31T16:21:18.197242Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 moved from CALCULATING to CALCULATED 2025-03-31T16:21:18.197409Z node 6 :PERSQUEUE DEBUG: [TxId: 67891] save tx TxId: 67891 State: CALCULATED MinStep: 153 MaxStep: 30153 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Step: 110 Predicate: true Kind: KIND_DATA SourceActor { RawX1: 177 RawX2: 25769805968 } Partitions { } 2025-03-31T16:21:18.197476Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-31T16:21:18.204735Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-31T16:21:18.204807Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATED 2025-03-31T16:21:18.204851Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State CALCULATED 2025-03-31T16:21:18.204901Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 State CALCULATED FrontTxId 67891 2025-03-31T16:21:18.204946Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState WAIT_RS 2025-03-31T16:21:18.204990Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 moved from CALCULATED to WAIT_RS 2025-03-31T16:21:18.205036Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 1 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2025-03-31T16:21:18.205082Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvReadSet to tablet 22222 2025-03-31T16:21:18.205167Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 0 2025-03-31T16:21:18.210625Z node 6 :PERSQUEUE DEBUG: Client pipe to tablet 72057594037927937 from 22222 is reset 2025-03-31T16:21:18.235040Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:21:18.237554Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:21:18.238781Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] has a tx info 2025-03-31T16:21:18.238839Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 110, PlanTxId 67891, ExecStep 110, ExecTxId 67891 2025-03-31T16:21:18.238975Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] ReadRange pair. Key tx_00000000000000067890, Status 0 2025-03-31T16:21:18.239070Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Restore Tx. TxId: 67890, Step: 100, State: EXECUTED, WriteId: 2025-03-31T16:21:18.239172Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] ReadRange pair. Key tx_00000000000000067891, Status 0 2025-03-31T16:21:18.239211Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Restore Tx. TxId: 67891, Step: 110, State: CALCULATED, WriteId: 2025-03-31T16:21:18.239240Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Fix tx state 2025-03-31T16:21:18.239287Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=2, PlannedTxs.size=2 2025-03-31T16:21:18.239327Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] top tx queue (100, 67890) 2025-03-31T16:21:18.239374Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxsOrder: 67890 EXECUTED 0 2025-03-31T16:21:18.239409Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxsOrder: 67891 PLANNED 0 2025-03-31T16:21:18.239927Z node 6 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:18.239973Z node 6 :PERSQUEUE INFO: [PQ: 72057594037927937] has a tx writes info 2025-03-31T16:21:18.240107Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:21:18.240410Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:21:18.240657Z node 6 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [6:362:2340] 2025-03-31T16:21:18.241549Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDiskStatusStep 2025-03-31T16:21:18.242615Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitMetaStep 2025-03-31T16:21:18.242874Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInfoRangeStep 2025-03-31T16:21:18.243500Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDataRangeStep 2025-03-31T16:21:18.243751Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDataStep 2025-03-31T16:21:18.243796Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-03-31T16:21:18.243839Z node 6 :PERSQUEUE INFO: [topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:21:18.243872Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-03-31T16:21:18.243946Z node 6 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 3 [6:362:2340] 2025-03-31T16:21:18.244018Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:21:18.244076Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-31T16:21:18.244158Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 6 2025-03-31T16:21:18.244398Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 1 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2025-03-31T16:21:18.244446Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvReadSet to tablet 22222 2025-03-31T16:21:18.244581Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state EXECUTED 2025-03-31T16:21:18.244623Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTED 2025-03-31T16:21:18.244665Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State EXECUTED FrontTxId 67890 2025-03-31T16:21:18.244705Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2025-03-31T16:21:18.244747Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS_ACKS 2025-03-31T16:21:18.244785Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from EXECUTED to WAIT_RS_ACKS 2025-03-31T16:21:18.244826Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/1 2025-03-31T16:21:18.244855Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2025-03-31T16:21:18.244888Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/1 2025-03-31T16:21:18.244923Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PLANNED 2025-03-31T16:21:18.244951Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State PLANNED 2025-03-31T16:21:18.244977Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 State PLANNED FrontTxId 67891 2025-03-31T16:21:18.245004Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxQueue.size 1 2025-03-31T16:21:18.245045Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] New ExecStep 110, ExecTxId 67891 2025-03-31T16:21:18.245107Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState CALCULATING 2025-03-31T16:21:18.245153Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 moved from PLANNED to CALCULATING 2025-03-31T16:21:18.245216Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 110, TxId 67891 2025-03-31T16:21:18.245785Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCalcPredicateResult Step 110, TxId 67891, Partition 0, Predicate 1 2025-03-31T16:21:18.245826Z node 6 :PERSQUEUE DEBUG: [TxId: 67891] Handle TEvTxCalcPredicateResult 2025-03-31T16:21:18.245870Z node 6 :PERSQUEUE DEBUG: [TxId: 67891] Partition responses 1/1 2025-03-31T16:21:18.245908Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATING 2025-03-31T16:21:18.245944Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State CALCULATING 2025-03-31T16:21:18.245987Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 State CALCULATING FrontTxId 67891 2025-03-31T16:21:18.246023Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2025-03-31T16:21:18.246069Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState CALCULATED 2025-03-31T16:21:18.246106Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 moved from CALCULATING to CALCULATED 2025-03-31T16:21:18.246284Z node 6 :PERSQUEUE DEBUG: [TxId: 67891] save tx TxId: 67891 State: CALCULATED MinStep: 153 MaxStep: 30153 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Step: 110 Predicate: true Kind: KIND_DATA SourceActor { RawX1: 177 RawX2: 25769805968 } Partitions { } 2025-03-31T16:21:18.246380Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-31T16:21:18.246492Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2025-03-31T16:21:18.246534Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Connected to tablet 22222 2025-03-31T16:21:18.250005Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-31T16:21:18.250072Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATED 2025-03-31T16:21:18.250127Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State CALCULATED 2025-03-31T16:21:18.250174Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 State CALCULATED FrontTxId 67891 2025-03-31T16:21:18.250205Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState WAIT_RS 2025-03-31T16:21:18.250239Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 moved from CALCULATED to WAIT_RS 2025-03-31T16:21:18.250280Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 1 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2025-03-31T16:21:18.250323Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvReadSet to tablet 22222 2025-03-31T16:21:18.250401Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 0 >> TPQTest::TestWaitInOwners [GOOD] >> TPQTest::TestWriteOffsetWithBigMessage >> TPartitionTests::ShadowPartitionCounters [GOOD] >> TPQTest::TestCheckACL [GOOD] >> TPQTest::TestLowWatermark >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase-useSink >> TPartitionTests::NonConflictingCommitsBatch ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::Probe [GOOD] Test command err: 2025-03-31T16:15:14.455315Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:14.455679Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:14.455804Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000cfb/r3tmp/tmpcjTZk6/pdisk_1.dat 2025-03-31T16:15:14.846136Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27271, node 1 2025-03-31T16:15:15.113396Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:15.113454Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:15.113502Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:15.113951Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:15:15.116364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:15:15.224292Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:15.224441Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:15.240808Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12894 2025-03-31T16:15:15.878773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:15:19.446634Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T16:15:19.501474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:19.501615Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:19.536316Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:15:19.540524Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:15:19.825317Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:19.825994Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:19.826812Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:19.827026Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:19.827507Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:19.827639Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:19.827726Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:19.827841Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:19.827930Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:20.005862Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:20.006048Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:20.019152Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:15:20.181206Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:20.242578Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T16:15:20.242688Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T16:15:20.285044Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T16:15:20.285451Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T16:15:20.285684Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T16:15:20.285743Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T16:15:20.285938Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T16:15:20.286017Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T16:15:20.286065Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T16:15:20.286116Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T16:15:20.287777Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T16:15:20.326074Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:15:20.326226Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2595], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:15:20.334181Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1885:2606] 2025-03-31T16:15:20.340360Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2617] 2025-03-31T16:15:20.341160Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1904:2617], schemeshard id = 72075186224037897 2025-03-31T16:15:20.354389Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T16:15:20.449549Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T16:15:20.449620Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T16:15:20.449705Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T16:15:20.467764Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T16:15:20.476510Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T16:15:20.476673Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T16:15:20.704812Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T16:15:20.932248Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T16:15:20.998028Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:15:21.980366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2244:3078], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:21.980518Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:22.052538Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-31T16:15:22.409304Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2399:2886];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:15:22.409598Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2399:2886];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:15:22.409941Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2399:2886];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:15:22.410113Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2399:2886];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:15:22.410254Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2399:2886];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:15:22.410413Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2399:2886];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:15:22.410557Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2399:2886];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:15:22.410706Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2399:2886];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:15:22.410843Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2399:2886];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:15:22.410964Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2399:2886];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:15:22.411107Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2399:2886];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:15:22.411242Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2399:2886];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:15:22.480963Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2410:2894];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:15:22.481077Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2410:2894];tablet_id=72075186224037900;process= ... ete. 2025-03-31T16:21:14.776360Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-03-31T16:21:14.776388Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-03-31T16:21:15.387807Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T16:21:15.388077Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:21:15.961305Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T16:21:15.961397Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=.sVAG 2025-03-31T16:21:15.961452Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-31T16:21:17.101893Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:21:17.102025Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-31T16:21:17.102095Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:21:17.102795Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-31T16:21:17.117350Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-31T16:21:17.117714Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T16:21:17.117830Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T16:21:17.118437Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-31T16:21:17.132299Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-31T16:21:17.132523Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-03-31T16:21:17.133237Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:15312:9706], server id = [2:15317:9711], tablet id = 72075186224037899, status = OK 2025-03-31T16:21:17.133364Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:15312:9706], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:21:17.134593Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:15313:9707], server id = [2:15318:9712], tablet id = 72075186224037900, status = OK 2025-03-31T16:21:17.134678Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:15313:9707], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:21:17.134989Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:15314:9708], server id = [2:15319:9713], tablet id = 72075186224037901, status = OK 2025-03-31T16:21:17.135027Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:15314:9708], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:21:17.135807Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:15315:9709], server id = [2:15320:9714], tablet id = 72075186224037902, status = OK 2025-03-31T16:21:17.135857Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:15315:9709], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:21:17.136565Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:15316:9710], server id = [2:15322:9716], tablet id = 72075186224037903, status = OK 2025-03-31T16:21:17.136622Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:15316:9710], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:21:17.137402Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-31T16:21:17.137870Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-03-31T16:21:17.138705Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:15312:9706], server id = [2:15317:9711], tablet id = 72075186224037899 2025-03-31T16:21:17.138745Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:21:17.139062Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-03-31T16:21:17.139536Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:15313:9707], server id = [2:15318:9712], tablet id = 72075186224037900 2025-03-31T16:21:17.139556Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:21:17.139658Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-03-31T16:21:17.139951Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:15314:9708], server id = [2:15319:9713], tablet id = 72075186224037901 2025-03-31T16:21:17.139971Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:21:17.140062Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:15326:9720], server id = [2:15329:9723], tablet id = 72075186224037904, status = OK 2025-03-31T16:21:17.140110Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:15326:9720], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:21:17.140330Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-03-31T16:21:17.140923Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:15327:9721], server id = [2:15331:9725], tablet id = 72075186224037905, status = OK 2025-03-31T16:21:17.141013Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:15327:9721], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:21:17.141291Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:15315:9709], server id = [2:15320:9714], tablet id = 72075186224037902 2025-03-31T16:21:17.141318Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:21:17.141534Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:15330:9724], server id = [2:15333:9727], tablet id = 72075186224037906, status = OK 2025-03-31T16:21:17.141598Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:15330:9724], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:21:17.142194Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:15316:9710], server id = [2:15322:9716], tablet id = 72075186224037903 2025-03-31T16:21:17.142221Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:21:17.142830Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:15332:9726], server id = [2:15334:9728], tablet id = 72075186224037907, status = OK 2025-03-31T16:21:17.142889Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:15332:9726], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:21:17.143234Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-03-31T16:21:17.143553Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:15335:9729], server id = [2:15337:9731], tablet id = 72075186224037908, status = OK 2025-03-31T16:21:17.143608Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:15335:9729], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:21:17.144289Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-03-31T16:21:17.144885Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:15326:9720], server id = [2:15329:9723], tablet id = 72075186224037904 2025-03-31T16:21:17.144910Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:21:17.145192Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-03-31T16:21:17.145549Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:15327:9721], server id = [2:15331:9725], tablet id = 72075186224037905 2025-03-31T16:21:17.145574Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:21:17.145707Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-03-31T16:21:17.145830Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:15330:9724], server id = [2:15333:9727], tablet id = 72075186224037906 2025-03-31T16:21:17.145853Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:21:17.145917Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-03-31T16:21:17.145955Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-31T16:21:17.146129Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-31T16:21:17.146328Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-31T16:21:17.146714Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T16:21:17.149426Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:15332:9726], server id = [2:15334:9728], tablet id = 72075186224037907 2025-03-31T16:21:17.149462Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:21:17.150043Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:15335:9729], server id = [2:15337:9731], tablet id = 72075186224037908 2025-03-31T16:21:17.150074Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:21:17.150483Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-31T16:21:17.178677Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZjdkYWY1OTYtMTU2ZjdhYTItOTM2NDkwODAtYmJiYTg3NWI=, TxId: 2025-03-31T16:21:17.178754Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZjdkYWY1OTYtMTU2ZjdhYTItOTM2NDkwODAtYmJiYTg3NWI=, TxId: 2025-03-31T16:21:17.179398Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:21:17.193886Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:21:17.193963Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=.sVAG, ActorId=[1:4099:3324] 2025-03-31T16:21:17.197226Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:15378:9009]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-31T16:21:17.197520Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T16:21:17.197567Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-31T16:21:17.199707Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T16:21:17.199778Z node 1 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-03-31T16:21:17.199823Z node 1 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 2 ] 2025-03-31T16:21:17.209864Z node 1 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 Answer: '/Root/Database/Table1[Value]=4' >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_NewSourceId_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_RegisteredSourceId_Test |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TPartitionTests::TestBatchingWithProposeConfig [GOOD] |90.3%| [TA] $(B)/ydb/library/ncloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.3%| [TA] {RESULT} $(B)/ydb/library/ncloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::TestBatchingWithProposeConfig [GOOD] Test command err: 2025-03-31T16:21:07.672464Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:07.672574Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:07.689946Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [1:179:2194] 2025-03-31T16:21:07.692231Z node 1 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-31T16:21:07.000000Z 2025-03-31T16:21:07.692299Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:179:2194] Got cmd write: CmdDeleteRange { Range { From: "m0000000003cclient-1" IncludeFrom: true To: "m0000000003cclient-1" IncludeTo: true } } CmdDeleteRange { Range { From: "m0000000003uclient-1" IncludeFrom: true To: "m0000000003uclient-1" IncludeTo: true } } CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\270\352\206\350\3362" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-2" Value: "\010\000\020\000\030\000\"\000(\0000\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-2" Value: "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000" StorageChannel: INLINE } CmdWrite { Key: "_config_3" Value: "\022\t\030\200\243\0058\200\200\200\005\030\000\"\027rt3.dc1--account--topic(\0020\001\272\001 /Root/PQ/rt3.dc1--account--topic\352\001\000\372\001\002\010\000\212\002\007account\220\002\001\242\002\002\010\000\252\002\016\n\010client-2@\000H\000" StorageChannel: INLINE } 2025-03-31T16:21:08.433831Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:08.433880Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:08.444375Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] bootstrapping {0, {0, 1111}, 123} [2:179:2194] 2025-03-31T16:21:08.445517Z node 2 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:{0, {0, 1111}, 123}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:21:08.445567Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {0, {0, 1111}, 123} generation 0 [2:179:2194] 2025-03-31T16:21:08.754811Z node 2 :PERSQUEUE INFO: new Cookie owner1|e06906ad-8b81185-c9fa4a31-fa934178_0 generated for partition {0, {0, 1111}, 123} topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 Send write: 0 Send write: 1 Send write: 2 Send write: 3 Send write: 4 Send write: 5 Send write: 6 Send write: 7 Send write: 8 Send write: 9 Got write info response. Body keys: 0, head: 10, src id info: 1 2025-03-31T16:21:11.819309Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:11.819368Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:11.831364Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] bootstrapping {0, {0, 1111}, 123} [3:179:2194] 2025-03-31T16:21:11.834166Z node 3 :PERSQUEUE INFO: [rt3.dc1--account--topic:{0, {0, 1111}, 123}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:21:11.834221Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] init complete for topic 'rt3.dc1--account--topic' partition {0, {0, 1111}, 123} generation 0 [3:179:2194] 2025-03-31T16:21:12.445591Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:12.445664Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:12.460104Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-03-31T16:21:12.460455Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:21:12.460737Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:176:2191] 2025-03-31T16:21:12.461665Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-03-31T16:21:12.461844Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-03-31T16:21:12.462028Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-03-31T16:21:12.462886Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-03-31T16:21:12.463460Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 50 size 684 so 0 eo 50 d0000000000_00000000000000000000_00000_0000000050_00000 2025-03-31T16:21:12.463578Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-03-31T16:21:12.463619Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-03-31T16:21:12.463674Z node 4 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-31T16:21:12.000000Z 2025-03-31T16:21:12.463715Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-03-31T16:21:12.463768Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [4:176:2191] 2025-03-31T16:21:12.463838Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 50 Head Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000050_00000 size 684 2025-03-31T16:21:12.463894Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-31T16:21:12.464029Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-31T16:21:12.464117Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-03-31T16:21:12.464195Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-03-31T16:21:12.464575Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 offset 0 count 1 size 1024000 endOffset 50 max time lag 0ms effective offset 0 2025-03-31T16:21:12.464801Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 added 1 blobs, size 684 count 50 last offset 1, current partition end offset: 50 2025-03-31T16:21:12.464863Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Reading cookie 0. Send blob request. Create distr tx with id = 0 and act no: 1 2025-03-31T16:21:13.756920Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 Wait batch completion 2025-03-31T16:21:15.029832Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-03-31T16:21:15.030021Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-03-31T16:21:15.047235Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-03-31T16:21:15.047482Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:15.047541Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:15.047586Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:15.047626Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:15.047666Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-03-31T16:21:15.047686Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-0 2025-03-31T16:21:15.047708Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-0 2025-03-31T16:21:15.047738Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:15.047775Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got batch complete: 2 Got KV request Got KV request Send disk status response with cookie: 0 Wait immediate tx complete 2 2025-03-31T16:21:16.243626Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:21:16.243735Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 reinit with generation 7 done 2025-03-31T16:21:16.243934Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:16.244004Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:16.244049Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:16.244092Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:16.244123Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-1 2025-03-31T16:21:16.244157Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-1 2025-03-31T16:21:16.244197Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-0 2025-03-31T16:21:16.244232Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-0 2025-03-31T16:21:16.244266Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] _config_0 2025-03-31T16:21:16.244306Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:16.244348Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, P ... 27937] doesn't have tx writes info 2025-03-31T16:21:16.660060Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-03-31T16:21:16.660375Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:21:16.660624Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:178:2193] 2025-03-31T16:21:16.661532Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-03-31T16:21:16.661711Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-03-31T16:21:16.661868Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-03-31T16:21:16.662612Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-03-31T16:21:16.663075Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 50 size 684 so 0 eo 50 d0000000000_00000000000000000000_00000_0000000050_00000 2025-03-31T16:21:16.663200Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-03-31T16:21:16.663242Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-03-31T16:21:16.663291Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-31T16:21:16.000000Z 2025-03-31T16:21:16.663331Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-03-31T16:21:16.663381Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [5:178:2193] 2025-03-31T16:21:16.663439Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 50 Head Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000050_00000 size 684 2025-03-31T16:21:16.663493Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-31T16:21:16.663587Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-31T16:21:16.663633Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-03-31T16:21:16.663676Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-03-31T16:21:16.663967Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 offset 0 count 1 size 1024000 endOffset 50 max time lag 0ms effective offset 0 2025-03-31T16:21:16.664169Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 added 1 blobs, size 684 count 50 last offset 1, current partition end offset: 50 2025-03-31T16:21:16.664219Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Reading cookie 0. Send blob request. Create distr tx with id = 0 and act no: 1 2025-03-31T16:21:17.903612Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-03-31T16:21:17.903743Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvProposePartitionConfig Step 1, TxId 3 Wait batch completion 2025-03-31T16:21:19.095127Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-03-31T16:21:19.095293Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-03-31T16:21:19.095347Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-03-31T16:21:19.095558Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:19.095601Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:19.095634Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:19.095666Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:19.095693Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-03-31T16:21:19.095714Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-0 2025-03-31T16:21:19.095738Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-0 2025-03-31T16:21:19.095782Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:19.095814Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got batch complete: 2 Got KV request Got KV request Send disk status response with cookie: 0 Wait immediate tx complete 2 2025-03-31T16:21:20.260449Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Got batch complete: 1 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 2 Wait batch completion 2025-03-31T16:21:20.260761Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 3 2025-03-31T16:21:20.260887Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 drop done 2025-03-31T16:21:20.261144Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:20.261190Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:20.261234Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [m0000000000cclient-1, m0000000000cclient-1] 2025-03-31T16:21:20.261262Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [m0000000000uclient-1, m0000000000uclient-1] 2025-03-31T16:21:20.261296Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:20.261333Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:20.261363Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-03-31T16:21:20.261387Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-0 2025-03-31T16:21:20.261409Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-0 2025-03-31T16:21:20.261432Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] _config_0 2025-03-31T16:21:20.261464Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:20.261508Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Send disk status response with cookie: 0 2025-03-31T16:21:20.272553Z node 5 :PERSQUEUE DEBUG: tablet 72057594037927937 topic 'Root/PQ/rt3.dc1--account--topic' partition 0 error: cannot finish read request. Consumer client-1 is gone from partition 2025-03-31T16:21:20.272689Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:21:20.272758Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-03-31T16:21:20.272803Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-03-31T16:21:20.272940Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:20.272983Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:20.273018Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:20.273050Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:20.273073Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-0 2025-03-31T16:21:20.273089Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-0 2025-03-31T16:21:20.273126Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:20.273158Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request 2025-03-31T16:21:20.273851Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 readTimeStamp for offset 5 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-31T16:21:20.273930Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 send read request for offset 5 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 Got KV request Got batch complete: 1 Got KV request Got KV request Got KV request 2025-03-31T16:21:20.274372Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 1 Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 offset 5 count 1 size 1024000 endOffset 50 max time lag 0ms effective offset 5 2025-03-31T16:21:20.274588Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 1 added 1 blobs, size 0 count 45 last offset 6, current partition end offset: 50 2025-03-31T16:21:20.274644Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Reading cookie 1. Send blob request. Got KV request Got KV request Wait batch completion Send disk status response with cookie: 0 Wait immediate tx complete 4 2025-03-31T16:21:20.274926Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 4 >> KqpPg::EquiJoin+useSink [GOOD] >> KqpPg::EquiJoin-useSink >> KqpPg::AlterSequence [GOOD] >> KqpPg::AlterColumnSetDefaultFromSequence >> KqpPg::PgCreateTable [GOOD] >> KqpPg::PgUpdate+useSink >> KqpPg::InsertNoTargetColumns_SerialNotNull+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_SerialNotNull-useSink >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx >> TSchemeShardExtSubDomainTest::Fake [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithOnlyDotsNotAllowed >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::Create >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-ExternalHive >> TPartitionTests::ConflictingCommitsInSeveralBatches [GOOD] >> TPartitionTests::ConflictingCommitFails >> TPartitionTests::NonConflictingCommitsBatch [GOOD] >> TListAllTopicsTests::RecursiveList [GOOD] >> TListAllTopicsTests::ListLimitAndPaging >> TSchemeShardExtSubDomainTest::Create [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter >> JsonProtoConversion::JsonToProtoSingleValue [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::NonConflictingCommitsBatch [GOOD] Test command err: 2025-03-31T16:21:14.586159Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:14.586224Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:14.602315Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:179:2194] 2025-03-31T16:21:14.603795Z node 1 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-31T16:21:14.000000Z 2025-03-31T16:21:14.603857Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [1:179:2194] Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\220\241\207\350\3362" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\220\241\207\350\3362" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\005\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\005\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\220\241\207\350\3362" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\005\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\005\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-03-31T16:21:14.994679Z node 1 :PERSQUEUE WARN: [PQ: 72057594037927937, Partition: 0, State: StateIdle] commit to future - topic Root/PQ/rt3.dc1--account--topic partition 0 client client EndOffset 10 offset 13 Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\220\241\207\350\3362" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\n\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\n\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-03-31T16:21:15.345139Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:15.345194Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:15.356593Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [2:179:2194] 2025-03-31T16:21:15.358148Z node 2 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-31T16:21:15.000000Z 2025-03-31T16:21:15.358205Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [2:179:2194] 2025-03-31T16:21:15.921727Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:15.921796Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:15.934646Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:179:2194] 2025-03-31T16:21:15.935505Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [3:179:2194] 2025-03-31T16:21:15.936072Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 1 2025-03-31T16:21:15.936153Z node 3 :PERSQUEUE INFO: new Cookie owner1|7e1352fd-49910989-cbbbac6f-36a13acd_0 generated for partition 1 topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 Send disk status response with cookie: 0 2025-03-31T16:21:16.246035Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 0 2025-03-31T16:21:16.516563Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:16.516616Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:16.527819Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] bootstrapping {0, {0, 1111}, 123} [4:177:2192] 2025-03-31T16:21:16.530877Z node 4 :PERSQUEUE INFO: [rt3.dc1--account--topic:{0, {0, 1111}, 123}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:21:16.530939Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] init complete for topic 'rt3.dc1--account--topic' partition {0, {0, 1111}, 123} generation 0 [4:177:2192] 2025-03-31T16:21:16.839660Z node 4 :PERSQUEUE INFO: new Cookie owner1|99fbeed7-dc74cf72-f7695ed8-b723da6f_0 generated for partition {0, {0, 1111}, 123} topic 'rt3.dc1--account--topic' owner owner1 Send write: 0 Send write: 1 Send write: 2 Send write: 3 Send write: 4 Send write: 5 Send write: 6 Send write: 7 Send write: 8 Send write: 9 Got write info response. Body keys: 0, head: 10, src id info: 1 2025-03-31T16:21:19.937429Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:19.937515Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:19.955587Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-03-31T16:21:19.955949Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:21:19.956302Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:178:2193] 2025-03-31T16:21:19.957378Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-03-31T16:21:19.957574Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-03-31T16:21:19.957746Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-03-31T16:21:19.959032Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-03-31T16:21:19.959572Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 50 size 684 so 0 eo 50 d0000000000_00000000000000000000_00000_0000000050_00000 2025-03-31T16:21:19.959669Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-03-31T16:21:19.959706Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-03-31T16:21:19.959759Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-31T16:21:19.000000Z 2025-03-31T16:21:19.959795Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-03-31T16:21:19.959851Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [5:178:2193] 2025-03-31T16:21:19.959913Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 50 Head Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000050_00000 size 684 2025-03-31T16:21:19.959978Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-31T16:21:19.960103Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-2 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-31T16:21:19.960164Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-2 send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-03-31T16:21:19.960211Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-03-31T16:21:19.960247Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 readTimeStamp for offset 0 initiated queuesize 1 startOffset 0 ReadingTimestamp 1 rrg 0 2025-03-31T16:21:19.960596Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-2 offset 0 count 1 size 1024000 endOffset 50 max time lag 0ms effective offset 0 2025-03-31T16:21:19.960786Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 added 1 blobs, size 684 count 50 last offset 1, current partition end offset: 50 2025-03-31T16:21:19.960836Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Reading cookie 0. Send blob request. 2025-03-31T16:21:20.275575Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 session is set to 0 (startOffset 0) session session-client-0 2025-03-31T16:21:20.275907Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:20.275966Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:20.276006Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:20.276064Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:20.276096Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-0 2025-03-31T16:21:20.276146Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-0 2025-03-31T16:21:20.276183Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:20.276224Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got batch complete: 1 Got KV request Got KV request Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\0202\030\000(\230\310\207\350\3362" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient-0" Value: "\010\000\020\001\030\001\"\020session-client-0(\0000\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient-0" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-client-0" StorageChannel: INLINE } 2025-03-31T16:21:20.297202Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Create distr tx with id = 0 and act no: 1 Created Tx with id 3 as act# 3 Created Tx with id 4 as act# 4 2025-03-31T16:21:21.230553Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-03-31T16:21:21.230682Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 3 2025-03-31T16:21:21.230722Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 4 2025-03-31T16:21:22.461338Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 6 Wait batch completion Wait kv request 2025-03-31T16:21:22.461754Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 offset is set to 5 (startOffset 0) session session-client-0 2025-03-31T16:21:22.461819Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 3 2025-03-31T16:21:22.461864Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 3 2025-03-31T16:21:22.461906Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 4 2025-03-31T16:21:22.461928Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 4 2025-03-31T16:21:22.461968Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 offset is set to 10 (startOffset 0) session session-client-0 2025-03-31T16:21:22.479311Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(ABORTED), reason=incorrect offset range (gap) 2025-03-31T16:21:22.479585Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:22.479630Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:22.479668Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:22.479700Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:22.479724Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-03-31T16:21:22.479740Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-2 2025-03-31T16:21:22.479755Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-2 2025-03-31T16:21:22.479788Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-1 2025-03-31T16:21:22.479810Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-1 2025-03-31T16:21:22.479828Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-0 2025-03-31T16:21:22.479843Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-0 2025-03-31T16:21:22.479878Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:22.479915Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Wait tx committed for tx 3 2025-03-31T16:21:22.500885Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Wait tx committed for tx 4 Wait immediate tx complete 6 Got propose resutl: Origin: 72057594037927937 Status: ABORTED TxId: 6 Errors { Kind: BAD_REQUEST Reason: "incorrect offset range (gap)" } >> TSchemeShardExtSubDomainTest::CreateWithOnlyDotsNotAllowed [GOOD] >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-false >> JsonProtoConversion::ProtoMapToJson_ReceiveMessageResult [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-true >> TPartitionTests::DifferentWriteTxBatchingOptions [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst-ExternalHive >> TPartitionTests::EndWriteTimestamp_DataKeysBody |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::ProtoMapToJson_ReceiveMessageResult [GOOD] |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::JsonToProtoSingleValue [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-false >> TPartitionTests::EndWriteTimestamp_DataKeysBody [GOOD] >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::Drop >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools >> TPartitionTests::EndWriteTimestamp_FromMeta >> TSchemeShardExtSubDomainTest::CreateAndAlter [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-ExternalHive >> KqpPg::CreateTempTable [GOOD] >> KqpPg::CreateTempTableSerial |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> TPartitionTests::EndWriteTimestamp_FromMeta [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-true >> TPQTest::TestPQPartialRead [GOOD] >> TPQTest::TestPQRead >> TPartitionTests::EndWriteTimestamp_HeadKeys >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-ExternalHive >> TSchemeShardExtSubDomainTest::CreateAndAlter-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst >> TPQTabletTests::Huge_ProposeTransacton [GOOD] >> JsonProtoConversion::NlohmannJsonToProtoMap >> TSchemeShardExtSubDomainTest::Drop [GOOD] >> TSchemeShardExtSubDomainTest::Drop-ExternalHive >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-false >> TPartitionTests::EndWriteTimestamp_HeadKeys [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase-useSink [GOOD] >> JsonProtoConversion::NlohmannJsonToProtoMap [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-ExternalHive |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::NlohmannJsonToProtoMap [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst-ExternalHive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::EndWriteTimestamp_HeadKeys [GOOD] Test command err: 2025-03-31T16:21:06.419404Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:06.419495Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:06.436654Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-03-31T16:21:06.436925Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:21:06.437303Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:178:2193] 2025-03-31T16:21:06.438205Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-03-31T16:21:06.438430Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-03-31T16:21:06.438594Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-03-31T16:21:06.438779Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-03-31T16:21:06.439212Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 50 size 684 so 0 eo 50 d0000000000_00000000000000000000_00000_0000000050_00000 2025-03-31T16:21:06.439325Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-03-31T16:21:06.439363Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-03-31T16:21:06.439407Z node 1 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-31T16:21:06.000000Z 2025-03-31T16:21:06.439443Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-03-31T16:21:06.439500Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [1:178:2193] 2025-03-31T16:21:06.439558Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 50 Head Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000050_00000 size 684 2025-03-31T16:21:06.439622Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 Create distr tx with id = 0 and act no: 1 2025-03-31T16:21:07.710915Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-03-31T16:21:08.978095Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 1 Create distr tx with id = 2 and act no: 3 2025-03-31T16:21:08.978436Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 2 2025-03-31T16:21:08.978524Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 0 2025-03-31T16:21:08.978567Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 0 2025-03-31T16:21:08.978628Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-03-31T16:21:08.978709Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 2025-03-31T16:21:10.192636Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 1 2025-03-31T16:21:10.192823Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 2 2025-03-31T16:21:10.192869Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 2 2025-03-31T16:21:10.192902Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-03-31T16:21:10.192939Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 2025-03-31T16:21:10.193107Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:10.193143Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:10.193200Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:10.193228Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc1 2025-03-31T16:21:10.193264Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:10.193285Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:10.193313Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-03-31T16:21:10.193337Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:10.193362Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Send disk status response with cookie: 0 Wait tx committed for tx 0 2025-03-31T16:21:10.204278Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 2 Wait tx committed for tx 2 2025-03-31T16:21:10.597994Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:10.598058Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:10.609555Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-03-31T16:21:10.609699Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:21:10.609875Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:178:2193] 2025-03-31T16:21:10.610649Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-03-31T16:21:10.610812Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-03-31T16:21:10.610968Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-03-31T16:21:10.611161Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-03-31T16:21:10.611568Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 1 size 684 so 0 eo 1 d0000000000_00000000000000000000_00000_0000000001_00000 2025-03-31T16:21:10.611655Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-03-31T16:21:10.611718Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-03-31T16:21:10.611768Z node 2 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-31T16:21:10.000000Z 2025-03-31T16:21:10.611806Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-03-31T16:21:10.611848Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [2:178:2193] 2025-03-31T16:21:10.611903Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 1 Head Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000001_00000 size 684 2025-03-31T16:21:10.611960Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-31T16:21:10.921642Z node 2 :PERSQUEUE INFO: new Cookie src1|e4db9765-98371141-6771b0a5-6eb6277b_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src1 2025-03-31T16:21:10.921799Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 Got batch complete: 1 Create distr tx with id = 0 and act no: 1 Create immediate tx with id = 3 and act no: 4 Create immediate tx with id = 6 and act no: 7 2025-03-31T16:21:11.879651Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-03-31T16:21:13.106890Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Wait batch completion 2025-03-31T16:21:13.107122Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-03-31T16:21:13.107179Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-03-31T16:21:13.107300Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src1' seqNo 1 partNo 0 2025-03-31T16:21:13.108229Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src1' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 20 PartNo 0 PackedSize 84 count 1 nextOffset 21 batches 1 2025-03-31T16:21:13.108323Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-03-31T16:21:13.108369Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-03-31T16:21:13.108424Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0, NewHead=Offset 20 PartNo 0 PackedSize 84 count 1 nextOffset 21 batches 1 2025-03-31T16:21:13.128335Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-03-31T16:21:13.128487Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src1' seqNo 3 partNo 0 2025-03-31T16:21:13.134385Z node 2 :PE ... -account--topic' partition 0 part blob processing sourceId 'src1' seqNo 10 partNo 0 2025-03-31T16:21:20.819343Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob sourceId 'src1' seqNo 10 partNo 0 result is x0000000000_00000000000000000160_00000_0000000001_00000 size 71 2025-03-31T16:21:20.819436Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] writing blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 old key x0000000000_00000000000000000160_00000_0000000001_00000 new key d0000000000_00000000000000000160_00000_0000000001_00000 size 71 WTime 41173 2025-03-31T16:21:20.822908Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src1' seqNo 10 partNo 0 FormedBlobsCount 1 NewHead: Offset 230 PartNo 0 PackedSize 84 count 1 nextOffset 231 batches 1 2025-03-31T16:21:20.823681Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 compactOffset 230,1 HeadOffset 160 endOffset 161 curOffset 231 d0000000000_00000000000000000230_00000_0000000001_00000| size 71 WTime 41173 2025-03-31T16:21:20.823860Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:20.823896Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:20.823924Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-03-31T16:21:20.823959Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:20.823989Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000160_00000_0000000001_00000 2025-03-31T16:21:20.824014Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc1 2025-03-31T16:21:20.824049Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000230_00000_0000000001_00000| 2025-03-31T16:21:20.824072Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:20.824087Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:20.824102Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-03-31T16:21:20.824125Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:20.824172Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got batch complete: 1 Got KV request Got KV request Send disk status response with cookie: 0 Wait tx committed for tx 21 2025-03-31T16:21:20.856216Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 17 WriteNewSizeFromSupportivePartitions# 1 2025-03-31T16:21:20.856315Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:21:20.856392Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src1', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 10, partNo: 0, Offset: 230 is stored on disk 2025-03-31T16:21:20.856673Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:20.856715Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:20.856754Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [d0000000000_00000000000000000160_00000_0000000001_00000|, d0000000000_00000000000000000160_00000_0000000001_00000|] 2025-03-31T16:21:20.856799Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:20.856842Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:20.856875Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:20.856917Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Create distr tx with id = 24 and act no: 25 Create distr tx with id = 26 and act no: 27 Create immediate tx with id = 28 and act no: 29 2025-03-31T16:21:21.821267Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 24 2025-03-31T16:21:21.821403Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 26 2025-03-31T16:21:23.119568Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-03-31T16:21:23.119681Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Wait batch completion 2025-03-31T16:21:23.119882Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-03-31T16:21:23.119952Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 2 Wait batch completion 2025-03-31T16:21:23.120105Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 26 2025-03-31T16:21:23.120146Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 26 2025-03-31T16:21:23.120206Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-03-31T16:21:23.120254Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 230 PartNo 0 PackedSize 71 count 1 nextOffset 231 batches 1, NewHead=Offset 231 PartNo 0 PackedSize 0 count 0 nextOffset 231 batches 0 2025-03-31T16:21:23.120351Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-03-31T16:21:23.120397Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-03-31T16:21:23.120441Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 230 PartNo 0 PackedSize 71 count 1 nextOffset 231 batches 1, NewHead=Offset 231 PartNo 0 PackedSize 0 count 0 nextOffset 231 batches 0 2025-03-31T16:21:23.120483Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-03-31T16:21:23.120659Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:23.120704Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:23.120735Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:23.120774Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc1 2025-03-31T16:21:23.120795Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:23.120810Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:23.120824Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-03-31T16:21:23.120846Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:23.120872Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got batch complete: 1 Got KV request Got KV request Wait for no tx committed Send disk status response with cookie: 0 Wait tx committed for tx 26 2025-03-31T16:21:23.392628Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 2 Wait immediate tx complete 28 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 28 2025-03-31T16:21:23.833769Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:23.833822Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:23.845299Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [3:179:2194] 2025-03-31T16:21:23.846550Z node 3 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-31T16:21:23.000000Z 2025-03-31T16:21:23.846614Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 2 generation 0 [3:179:2194] 2025-03-31T16:21:24.436314Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:24.436379Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:24.449218Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [4:177:2192] 2025-03-31T16:21:24.450683Z node 4 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:21:24.450730Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 2 generation 0 [4:177:2192] 2025-03-31T16:21:25.242539Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:25.242605Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:25.258577Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [5:179:2194] >>>> ADD BLOB 0 writeTimestamp=2025-03-31T16:21:25.250487Z >>>> ADD BLOB 1 writeTimestamp=2025-03-31T16:21:25.250513Z >>>> ADD BLOB 2 writeTimestamp=2025-03-31T16:21:25.250529Z >>>> ADD BLOB 3 writeTimestamp=2025-03-31T16:21:25.250544Z >>>> ADD BLOB 4 writeTimestamp=2025-03-31T16:21:25.250557Z >>>> ADD BLOB 5 writeTimestamp=2025-03-31T16:21:25.250571Z >>>> ADD BLOB 6 writeTimestamp=2025-03-31T16:21:25.250583Z >>>> ADD BLOB 7 writeTimestamp=2025-03-31T16:21:25.250595Z >>>> ADD BLOB 8 writeTimestamp=2025-03-31T16:21:25.250607Z >>>> ADD BLOB 9 writeTimestamp=2025-03-31T16:21:25.250622Z 2025-03-31T16:21:25.261761Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-31T16:21:25.000000Z 2025-03-31T16:21:25.261856Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 2 generation 0 [5:179:2194] |90.3%| [TA] $(B)/ydb/core/http_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.4%| [TA] {RESULT} $(B)/ydb/core/http_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-true >> KqpLimits::OutOfSpaceBulkUpsertFail [GOOD] >> KqpLimits::OutOfSpaceYQLUpsertFail+useSink >> KqpPg::PgUpdate+useSink [GOOD] >> KqpPg::PgUpdate-useSink >> TPQTest::TestWritePQBigMessage [GOOD] >> TPQTest::TestWritePQ >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::Drop-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst-ExternalHive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 24164, MsgBus: 10553 2025-03-31T16:20:34.319213Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009340172578714:2193];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:34.319299Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012d3/r3tmp/tmpetbTp9/pdisk_1.dat 2025-03-31T16:20:34.682974Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:20:34.721238Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:34.722097Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:34.724232Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24164, node 1 2025-03-31T16:20:34.876940Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:34.876963Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:34.876970Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:34.877101Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10553 TClient is connected to server localhost:10553 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:35.521118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:37.117958Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009353057481136:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:37.118049Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:37.206758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:20:37.358900Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009353057481244:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:37.358968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:37.372799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:20:37.410147Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009353057481322:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:37.410236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:37.410290Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009353057481327:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:37.413471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-03-31T16:20:37.422174Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009353057481329:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-03-31T16:20:37.509455Z node 1 :TX_PROXY ERROR: Actor# [1:7488009353057481381:2446] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 7668, MsgBus: 15585 2025-03-31T16:20:38.675709Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488009355940799384:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:38.675772Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012d3/r3tmp/tmpzXHrun/pdisk_1.dat 2025-03-31T16:20:38.890417Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:20:38.902046Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:38.902130Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:38.904604Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7668, node 2 2025-03-31T16:20:39.001592Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:39.001611Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:39.001617Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:39.001706Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15585 TClient is connected to server localhost:15585 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:39.475066Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:41.987465Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009368825701909:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:41.987643Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:41.988098Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009368825701936:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:41.991362Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:20:41.999075Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488009368825701938:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:20:42.084924Z node 2 :TX_PROXY ERROR: Actor# [2:7488009373120669285:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:42.105226Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 26844, MsgBus: 1190 2025-03-31T16:20:43.055529Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488009378913425558:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:43.055595Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012d3/r3tmp/tmpKu2d3m/pdisk_1.dat 2025-03-31T16:20:43.165666Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26844, node 3 2025-03-31T16:20:43.201149Z node 3 :HIVE ... .cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488009510867278602:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:14.325609Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012d3/r3tmp/tmp7bVeII/pdisk_1.dat 2025-03-31T16:21:14.470958Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:21:14.512896Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:21:14.513010Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:21:14.514972Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5423, node 10 2025-03-31T16:21:14.575714Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:21:14.575745Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:21:14.575756Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:21:14.575895Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8150 TClient is connected to server localhost:8150 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:21:15.189114Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:21:18.839433Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488009528047148456:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:18.839499Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488009528047148448:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:18.839634Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:18.843923Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:21:18.855457Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7488009528047148462:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:21:18.928194Z node 10 :TX_PROXY ERROR: Actor# [10:7488009528047148513:2337] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:21:18.950079Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7488009528047148530:2339], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: "text" 2025-03-31T16:21:18.950514Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=ZmYyZmYxZTUtNzVjZjA5NzctZmU1MWY0NS01ZDNiNDEwZg==, ActorId: [10:7488009528047148446:2330], ActorState: ExecuteState, TraceId: 01jqpg3na8e6rsgdz0nxed1cpf, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: "text" Trying to start YDB, gRPC: 3977, MsgBus: 27227 2025-03-31T16:21:19.847555Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7488009535844062101:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:19.847638Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012d3/r3tmp/tmps8nBht/pdisk_1.dat 2025-03-31T16:21:20.036054Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:21:20.073989Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:21:20.074096Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:21:20.076071Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3977, node 11 2025-03-31T16:21:20.134972Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:21:20.135002Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:21:20.135014Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:21:20.135214Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27227 TClient is connected to server localhost:27227 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:21:20.792419Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:21:24.772954Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7488009557318899244:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:24.773070Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7488009557318899229:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:24.773386Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:24.777768Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:21:24.789343Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7488009557318899258:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:21:24.848146Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7488009535844062101:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:24.848293Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:21:24.853568Z node 11 :TX_PROXY ERROR: Actor# [11:7488009557318899309:2337] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:21:24.872231Z node 11 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [11:7488009557318899326:2339], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: "text" 2025-03-31T16:21:24.872431Z node 11 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=11&id=ZDAwNzRjMDUtM2RiN2Y2YjgtMWRmYmNkMjYtYzZhNjFlMDA=, ActorId: [11:7488009557318899227:2330], ActorState: ExecuteState, TraceId: 01jqpg3tsc950cs5qs1kcqyshm, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: "text" >> PQCountersLabeled::ImportantFlagSwitching [GOOD] >> PQCountersLabeled::NewConsumersCountersAppear >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32_Reboot >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32 >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::Huge_ProposeTransacton [GOOD] Test command err: 2025-03-31T16:21:13.696984Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:21:13.700912Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:21:13.701207Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-03-31T16:21:13.701253Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:21:13.701285Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-03-31T16:21:13.701336Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:21:13.701379Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:13.701464Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:13.731696Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:206:2212], now have 1 active actors on pipe 2025-03-31T16:21:13.731853Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-03-31T16:21:13.746816Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "consumer" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "consumer" Generation: 1 Important: true } 2025-03-31T16:21:13.749623Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "consumer" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "consumer" Generation: 1 Important: true } 2025-03-31T16:21:13.749755Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:13.750640Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "consumer" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "consumer" Generation: 1 Important: true } 2025-03-31T16:21:13.750788Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:21:13.751187Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:21:13.751510Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:214:2218] 2025-03-31T16:21:13.752463Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-03-31T16:21:13.752527Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:214:2218] 2025-03-31T16:21:13.752587Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:21:13.753465Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-31T16:21:13.753566Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-03-31T16:21:13.753618Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-03-31T16:21:13.753658Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user consumer reinit request with generation 1 2025-03-31T16:21:13.753686Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user consumer reinit with generation 1 done 2025-03-31T16:21:13.753840Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:13.753891Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:13.753935Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:13.753971Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:13.753998Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-03-31T16:21:13.754015Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-03-31T16:21:13.754033Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cconsumer 2025-03-31T16:21:13.754047Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uconsumer 2025-03-31T16:21:13.754078Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:13.754112Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-03-31T16:21:13.754231Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-31T16:21:13.754269Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-31T16:21:13.754437Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-31T16:21:13.757787Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:21:13.758215Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:221:2223], now have 1 active actors on pipe 2025-03-31T16:21:13.758780Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:224:2225], now have 1 active actors on pipe 2025-03-31T16:21:13.759559Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 177 RawX2: 4294969488 } TxId: 67890 Data { Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "consumer" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2025-03-31T16:21:13.759615Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2025-03-31T16:21:13.759700Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Propose TxId 67890, WriteId (empty maybe) 2025-03-31T16:21:13.759754Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2025-03-31T16:21:13.759792Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2025-03-31T16:21:13.759823Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-03-31T16:21:13.759870Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2025-03-31T16:21:13.760035Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "consumer" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 177 RawX2: 4294969488 } Partitions { } 2025-03-31T16:21:13.760131Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-31T16:21:13.763524Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-31T16:21:13.763590Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-03-31T16:21:13.763639Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARING 2025-03-31T16:21:13.763673Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARED 2025-03-31T16:21:13.764010Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 177 RawX2: 4294969488 } TxId: 67891 Data { Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "consumer" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2025-03-31T16:21:13.764049Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2025-03-31T16:21:13.764119Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Propose TxId 67891, WriteId (empty maybe) 2025-03-31T16:21:13.764150Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2025-03-31T16:21:13.764178Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State UNKNOWN 2025-03-31T16:21:13.764212Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-03-31T16:21:13.764255Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState PREPARING 2025-03-31T16:21:13.764389Z node 1 :PERSQUEUE DEBUG: [TxId: 67891] save tx TxId: 67891 State: PREPARED MinStep: 136 MaxStep: 30136 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "consumer" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 177 RawX2: 4294969488 } Partitions { } 2025-03-31T16:21:13.764456Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-31T16:21:13.767843Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-31T16:21:13.767924Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-03-31T16:21:13.768007Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State PREP ... aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-2496" Generation: 2 Important: false } Consumers { Name: "fake-consumer-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-2497" Generation: 2 Important: false } Consumers { Name: "fake-consumer-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-2498" Generation: 2 Important: false } Consumers { Name: "fake-consumer-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-2499" Generation: 2 Important: false } } BootstrapConfig { } SourceActor { RawX1: 177 RawX2: 25769805968 } Partitions { Partition { PartitionId: 0 } Partition { PartitionId: 1 } } 2025-03-31T16:21:25.428845Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-31T16:21:25.443646Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-31T16:21:25.443704Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATED 2025-03-31T16:21:25.443744Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATED 2025-03-31T16:21:25.443775Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State CALCULATED FrontTxId 67890 2025-03-31T16:21:25.443814Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS 2025-03-31T16:21:25.443853Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from CALCULATED to WAIT_RS 2025-03-31T16:21:25.443928Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 0 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2025-03-31T16:21:25.443970Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 0 >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent >> Normalizers::PortionsNormalizer >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> KqpPg::EquiJoin-useSink [GOOD] >> KqpPg::ExplainColumnsReorder >> KqpPg::InsertNoTargetColumns_SerialNotNull-useSink [GOOD] >> TPartitionTests::ConflictingCommitFails [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent [GOOD] >> TPartitionTests::ConflictingCommitProccesAfterRollback >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_Active_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveConfig_Test >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:21:22.702598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:21:22.702681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:21:22.702709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:21:22.702737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:21:22.703767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:21:22.703798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:21:22.703918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:21:22.703979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:21:22.705144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:21:22.777412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:21:22.777474Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:21:22.782169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:21:22.782256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:21:22.782385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:21:22.789178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:21:22.789411Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:21:22.792752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:21:22.793102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:21:22.802587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:21:22.810349Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:21:22.810415Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:21:22.810546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:21:22.810580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:21:22.810686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:21:22.810850Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:21:22.819505Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:21:22.918782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:21:22.920348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:21:22.921946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:21:22.923813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:21:22.923903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:21:22.927503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:21:22.927678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:21:22.927901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:21:22.928072Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:21:22.928111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:21:22.928161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:21:22.930404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:21:22.930460Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:21:22.930496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:21:22.932424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:21:22.932473Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:21:22.932536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:21:22.932581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:21:22.938825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:21:22.940759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:21:22.940943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:21:22.941936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:21:22.942073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:21:22.942120Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:21:22.943301Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:21:22.943362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:21:22.943570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:21:22.943643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:21:22.945851Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:21:22.945896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:21:22.946036Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:21:22.946072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:21:22.946248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:21:22.946315Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:21:22.946448Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:21:22.946483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:21:22.946520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:21:22.946548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:21:22.946581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:21:22.946663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:21:22.946712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:21:22.946739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:21:22.946793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:21:22.946832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:21:22.946859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:21:22.948628Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:21:22.948749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:21:22.948785Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... shard: 72057594046678944, txId: 103, path id: 1 2025-03-31T16:21:28.287015Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:205:2207], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-03-31T16:21:28.287349Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:21:28.287399Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 103:0 ProgressState 2025-03-31T16:21:28.287455Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 135 -> 240 2025-03-31T16:21:28.288206Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:21:28.288300Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:21:28.288333Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-03-31T16:21:28.288364Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-03-31T16:21:28.288395Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:21:28.288789Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:21:28.288864Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:21:28.288886Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-03-31T16:21:28.288912Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-31T16:21:28.288937Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-03-31T16:21:28.288988Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-03-31T16:21:28.290889Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72075186233409546 at ss 72057594046678944 2025-03-31T16:21:28.290939Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72075186233409546 at ss 72057594046678944 2025-03-31T16:21:28.290966Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72075186233409546 at ss 72057594046678944 2025-03-31T16:21:28.290990Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72075186233409546 at ss 72057594046678944 2025-03-31T16:21:28.291131Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:21:28.291173Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-03-31T16:21:28.291307Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-31T16:21:28.291356Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T16:21:28.291423Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-31T16:21:28.291463Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T16:21:28.291507Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-03-31T16:21:28.291560Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T16:21:28.291611Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-03-31T16:21:28.291649Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-03-31T16:21:28.291823Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-03-31T16:21:28.293527Z node 7 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-03-31T16:21:28.293674Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-31T16:21:28.293952Z node 7 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186234409547 2025-03-31T16:21:28.294298Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-03-31T16:21:28.294585Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 Forgetting tablet 72075186234409547 2025-03-31T16:21:28.295823Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-31T16:21:28.296665Z node 7 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186234409546 2025-03-31T16:21:28.296982Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-03-31T16:21:28.297224Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T16:21:28.298149Z node 7 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:21:28.301814Z node 7 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186234409548 2025-03-31T16:21:28.303303Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-31T16:21:28.303535Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186234409546 Forgetting tablet 72075186234409548 2025-03-31T16:21:28.304197Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-03-31T16:21:28.304350Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T16:21:28.304764Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:21:28.304825Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T16:21:28.304947Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T16:21:28.305363Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:21:28.305418Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T16:21:28.305486Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:21:28.307435Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-31T16:21:28.307511Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-03-31T16:21:28.307602Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-03-31T16:21:28.307626Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186234409547 2025-03-31T16:21:28.309770Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-31T16:21:28.309812Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186234409546 2025-03-31T16:21:28.309892Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-03-31T16:21:28.309949Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186234409548 2025-03-31T16:21:28.310065Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-31T16:21:28.310181Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-03-31T16:21:28.310477Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-03-31T16:21:28.310530Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-03-31T16:21:28.310976Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-31T16:21:28.311109Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-31T16:21:28.311161Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [7:584:2521] TestWaitNotification: OK eventTxId 103 2025-03-31T16:21:28.311738Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:21:28.311938Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 255us result status StatusPathDoesNotExist 2025-03-31T16:21:28.312084Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:21:22.702618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:21:22.702726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:21:22.702778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:21:22.702818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:21:22.703800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:21:22.703845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:21:22.703919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:21:22.704098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:21:22.705168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:21:22.792285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:21:22.792344Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:21:22.796726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:21:22.796822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:21:22.796953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:21:22.801787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:21:22.801989Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:21:22.802465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:21:22.802637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:21:22.804221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:21:22.810406Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:21:22.810485Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:21:22.810661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:21:22.810712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:21:22.810762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:21:22.810933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:21:22.819661Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:21:22.954206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:21:22.954418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:21:22.954613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:21:22.954829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:21:22.954887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:21:22.957077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:21:22.957209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:21:22.957396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:21:22.957464Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:21:22.957513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:21:22.957574Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:21:22.959580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:21:22.959630Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:21:22.959665Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:21:22.961328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:21:22.961376Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:21:22.961445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:21:22.961494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:21:22.965263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:21:22.967220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:21:22.967430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:21:22.968452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:21:22.968605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:21:22.968659Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:21:22.968985Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:21:22.969043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:21:22.969221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:21:22.969297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:21:22.971423Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:21:22.971473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:21:22.971633Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:21:22.971689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:21:22.971909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:21:22.971954Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:21:22.972133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:21:22.972173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:21:22.972210Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:21:22.972243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:21:22.972278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:21:22.972329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:21:22.972386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:21:22.972421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:21:22.972502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:21:22.972549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:21:22.972612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:21:22.974571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:21:22.974708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:21:22.974763Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 8.434880Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:21:28.435148Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:21:28.435193Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:21:28.435326Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:21:28.435477Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:21:28.435514Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:205:2207], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-03-31T16:21:28.435553Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:205:2207], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-03-31T16:21:28.435824Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:21:28.435878Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 103:0 ProgressState 2025-03-31T16:21:28.435935Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 135 -> 240 2025-03-31T16:21:28.436858Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:21:28.436946Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:21:28.436977Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-03-31T16:21:28.437012Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-03-31T16:21:28.437047Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:21:28.437785Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:21:28.437866Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:21:28.437893Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-03-31T16:21:28.437923Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-31T16:21:28.437953Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-03-31T16:21:28.438016Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-03-31T16:21:28.440850Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-03-31T16:21:28.440910Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-03-31T16:21:28.440940Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-03-31T16:21:28.441219Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:21:28.441277Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-03-31T16:21:28.441444Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-31T16:21:28.441498Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T16:21:28.441548Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-31T16:21:28.441603Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T16:21:28.441656Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-03-31T16:21:28.441714Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T16:21:28.441764Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-03-31T16:21:28.441805Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-03-31T16:21:28.441998Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-31T16:21:28.443062Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-31T16:21:28.443541Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-31T16:21:28.443770Z node 7 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2025-03-31T16:21:28.444990Z node 7 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:21:28.448969Z node 7 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-03-31T16:21:28.449258Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-03-31T16:21:28.449610Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409548 2025-03-31T16:21:28.451131Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-03-31T16:21:28.451378Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:21:28.451798Z node 7 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409547 2025-03-31T16:21:28.453841Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-31T16:21:28.454053Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T16:21:28.455601Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:21:28.455663Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T16:21:28.455802Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T16:21:28.456241Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:21:28.456304Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T16:21:28.456389Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:21:28.458835Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-31T16:21:28.458906Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-03-31T16:21:28.459009Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-03-31T16:21:28.459038Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-03-31T16:21:28.460997Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-31T16:21:28.461060Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-03-31T16:21:28.461275Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-31T16:21:28.461356Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-03-31T16:21:28.461636Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-03-31T16:21:28.461686Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-03-31T16:21:28.462193Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-31T16:21:28.462302Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-31T16:21:28.462355Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [7:538:2487] TestWaitNotification: OK eventTxId 103 2025-03-31T16:21:28.462941Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:21:28.463185Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 290us result status StatusPathDoesNotExist 2025-03-31T16:21:28.463392Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:21:22.702625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:21:22.702726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:21:22.702761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:21:22.702808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:21:22.703799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:21:22.703845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:21:22.703928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:21:22.704016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:21:22.705215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:21:22.790834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:21:22.790887Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:21:22.795262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:21:22.795355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:21:22.795495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:21:22.800711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:21:22.800884Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:21:22.801531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:21:22.801720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:21:22.803408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:21:22.810415Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:21:22.810483Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:21:22.810672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:21:22.810730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:21:22.810780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:21:22.810915Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:21:22.820125Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:21:22.930169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:21:22.930333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:21:22.930483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:21:22.930647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:21:22.930686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:21:22.932642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:21:22.932783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:21:22.932939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:21:22.932994Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:21:22.933036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:21:22.933076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:21:22.934824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:21:22.934864Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:21:22.934890Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:21:22.936288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:21:22.936332Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:21:22.936376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:21:22.936414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:21:22.948176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:21:22.949869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:21:22.950008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:21:22.951667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:21:22.951784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:21:22.951825Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:21:22.952104Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:21:22.952164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:21:22.952365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:21:22.952707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:21:22.954914Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:21:22.954959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:21:22.955141Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:21:22.955183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:21:22.955412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:21:22.955473Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:21:22.955630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:21:22.955667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:21:22.955701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:21:22.955729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:21:22.955780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:21:22.955820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:21:22.955857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:21:22.955886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:21:22.955942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:21:22.955976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:21:22.956030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:21:22.957810Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:21:22.957936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:21:22.957972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... HARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 30064773228 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:21:28.807920Z node 7 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 104:0, stepId:5000005, at schemeshard: 72057594046678944 2025-03-31T16:21:28.808147Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-31T16:21:28.808187Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T16:21:28.808232Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-31T16:21:28.808277Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T16:21:28.808348Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-31T16:21:28.808421Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-03-31T16:21:28.808564Z node 7 :FLAT_TX_SCHEMESHARD INFO: Send TEvUpdateTenantSchemeShard, to actor: [7:350:2329], msg: TabletId: 72057594046678944 Generation: 2 UserAttributes { Key: "user__attr_1" Value: "value" } UserAttributesVersion: 2, at schemeshard: 72057594046678944 2025-03-31T16:21:28.808615Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T16:21:28.808660Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-03-31T16:21:28.808696Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-03-31T16:21:28.808747Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-03-31T16:21:28.808788Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 1, subscribers: 0 2025-03-31T16:21:28.808828Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 6 2025-03-31T16:21:28.810955Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateTenantSchemeShard, at schemeshard: 72075186233409546, msg: TabletId: 72057594046678944 Generation: 2 UserAttributes { Key: "user__attr_1" Value: "value" } UserAttributesVersion: 2 2025-03-31T16:21:28.811078Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpdateTenant DoExecute, msg: TabletId: 72057594046678944 Generation: 2 UserAttributes { Key: "user__attr_1" Value: "value" } UserAttributesVersion: 2, at schemeshard: 72075186233409546 2025-03-31T16:21:28.811324Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Cannot publish paths for unknown operation id#0 2025-03-31T16:21:28.811522Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:21:28.811572Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:21:28.811772Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:21:28.811823Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:205:2207], at schemeshard: 72057594046678944, txId: 104, path id: 2 FAKE_COORDINATOR: Erasing txId 104 2025-03-31T16:21:28.812516Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T16:21:28.812636Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T16:21:28.812680Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-03-31T16:21:28.812729Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 6 2025-03-31T16:21:28.812782Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-31T16:21:28.812878Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-03-31T16:21:28.815601Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 2 TabletID: 72075186233409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 3 UserAttributesVersion: 2 TenantHive: 18446744073709551615 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-03-31T16:21:28.815706Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T16:21:28.815806Z node 7 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[7:350:2329], EffectiveACLVersion: 0, SubdomainVersion: 3, UserAttributesVersion: 2, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 3, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 2, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-31T16:21:28.816424Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-03-31T16:21:28.816459Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-03-31T16:21:28.816581Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-03-31T16:21:28.816619Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:445:2397], at schemeshard: 72075186233409546, txId: 0, path id: 1 2025-03-31T16:21:28.817257Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186233409546, cookie: 0 2025-03-31T16:21:28.817359Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-31T16:21:28.817442Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-03-31T16:21:28.817813Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-03-31T16:21:28.817876Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-03-31T16:21:28.818331Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-03-31T16:21:28.818414Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-03-31T16:21:28.818460Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [7:543:2493] TestWaitNotification: OK eventTxId 104 2025-03-31T16:21:28.818967Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:21:28.819208Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 286us result status StatusSuccess 2025-03-31T16:21:28.819588Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } StoragePools { Name: "pool-2" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } UserAttributes { Key: "user__attr_1" Value: "value" } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:21:28.820192Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-03-31T16:21:28.820362Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe path "/MyRoot/USER_0" took 193us result status StatusSuccess 2025-03-31T16:21:28.820720Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "MyRoot/USER_0" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } StoragePools { Name: "pool-2" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot/USER_0" } } UserAttributes { Key: "user__attr_1" Value: "value" } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 >> KqpPg::CreateTempTableSerial [GOOD] >> KqpPg::DropSequence >> KqpPg::AlterColumnSetDefaultFromSequence [GOOD] >> KqpPg::CreateTableIfNotExists_GenericQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::InsertNoTargetColumns_SerialNotNull-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 23678, MsgBus: 24800 2025-03-31T16:20:34.493973Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009340957900739:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:34.494119Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012b4/r3tmp/tmp00oSKp/pdisk_1.dat 2025-03-31T16:20:34.792785Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23678, node 1 2025-03-31T16:20:34.862209Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:34.862352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:34.864013Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:34.873900Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:34.873924Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:34.873931Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:34.874042Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24800 TClient is connected to server localhost:24800 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:35.519867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:37.268585Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009353842803296:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:37.268711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009353842803289:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:37.269053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:37.272987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:20:37.282024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009353842803303:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:20:37.369062Z node 1 :TX_PROXY ERROR: Actor# [1:7488009353842803354:2336] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:37.415853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 1633, MsgBus: 22307 2025-03-31T16:20:38.437617Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488009358703986323:2133];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:38.442581Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012b4/r3tmp/tmpIS0kiQ/pdisk_1.dat 2025-03-31T16:20:38.534170Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1633, node 2 2025-03-31T16:20:38.582330Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:38.582419Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:38.588534Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:38.619723Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:38.619753Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:38.619764Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:38.619875Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22307 TClient is connected to server localhost:22307 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:39.076088Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:39.082517Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:20:41.508550Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009371588888770:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:41.508643Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:41.508739Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009371588888794:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:41.512435Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:20:41.521111Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488009371588888799:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:20:41.621624Z node 2 :TX_PROXY ERROR: Actor# [2:7488009371588888850:2335] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:41.638041Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 28257, MsgBus: 16073 2025-03-31T16:20:42.829493Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488009373560915052:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:42.829634Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012b4/r3tmp/tmp9OnfiM/pdisk_1.dat 2025-03-31T16:20:42.957761Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:20:42.987052Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:42.987213Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:42.988892Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28257, node 3 2025-03-31T16:20:43.025864Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:43.025882Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:43.025888Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:43.025983Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16073 TClient is connected to server localhost:16073 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId ... aeo/pdisk_1.dat 2025-03-31T16:21:16.034324Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:21:16.066831Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:21:16.066926Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:21:16.068788Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4558, node 10 2025-03-31T16:21:16.114431Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:21:16.114459Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:21:16.114471Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:21:16.114637Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23008 TClient is connected to server localhost:23008 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:21:16.700674Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:21:20.435646Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488009537610577603:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:20.435733Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488009537610577619:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:20.435804Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:20.439867Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:21:20.450797Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7488009537610577629:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:21:20.526835Z node 10 :TX_PROXY ERROR: Actor# [10:7488009537610577680:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:21:20.556381Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:21:20.700652Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:21:20.783264Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7488009537610577918:2361], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Missing not null column in input: c. All not null columns should be initialized, code: 2032 2025-03-31T16:21:20.783586Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=YmIyYzY5MjgtNzRiMWFlNy01ZGZmOWRhMi03ZDBjZGIzZA==, ActorId: [10:7488009537610577916:2360], ActorState: ExecuteState, TraceId: 01jqpg3tqac0sx2f3wcbdy1c7c, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-03-31T16:21:20.925328Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7488009516135740474:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:20.925413Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 5961, MsgBus: 10105 2025-03-31T16:21:21.920495Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7488009543465985117:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:21.920602Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012b4/r3tmp/tmpTmilcX/pdisk_1.dat 2025-03-31T16:21:22.099407Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:21:22.124543Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:21:22.124683Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:21:22.126138Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5961, node 11 2025-03-31T16:21:22.188846Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:21:22.188872Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:21:22.188882Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:21:22.189043Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10105 TClient is connected to server localhost:10105 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:21:22.801250Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:21:26.798553Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7488009564940822253:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:26.798647Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7488009564940822264:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:26.798797Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:26.803924Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:21:26.817039Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7488009564940822274:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:21:26.911628Z node 11 :TX_PROXY ERROR: Actor# [11:7488009564940822325:2337] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:21:26.920918Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7488009543465985117:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:26.921019Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:21:26.947814Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:21:27.064332Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:21:27.139226Z node 11 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [11:7488009569235789860:2362], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Missing not null column in input: c. All not null columns should be initialized, code: 2032 2025-03-31T16:21:27.139500Z node 11 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=11&id=YTU2OTM0OTgtOTc4NTk1ZGQtNTNlN2E2NTctZjE0NGE3MzQ=, ActorId: [11:7488009569235789858:2361], ActorState: ExecuteState, TraceId: 01jqpg40ya3yp1n6dfdcdy99bh, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:21:22.702595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:21:22.702682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:21:22.702720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:21:22.702753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:21:22.703800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:21:22.703834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:21:22.703910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:21:22.703981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:21:22.705156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:21:22.790811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:21:22.790886Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:21:22.795313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:21:22.795403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:21:22.795527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:21:22.800409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:21:22.800606Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:21:22.801242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:21:22.801467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:21:22.803237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:21:22.810462Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:21:22.810529Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:21:22.810721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:21:22.810771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:21:22.810816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:21:22.810979Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:21:22.819564Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:21:22.928782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:21:22.928992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:21:22.929187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:21:22.929395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:21:22.929460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:21:22.931650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:21:22.931826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:21:22.932087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:21:22.932154Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:21:22.932189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:21:22.932235Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:21:22.933954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:21:22.934016Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:21:22.934047Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:21:22.935419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:21:22.935458Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:21:22.935510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:21:22.935553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:21:22.937983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:21:22.939552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:21:22.940899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:21:22.941993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:21:22.942126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:21:22.942176Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:21:22.943405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:21:22.943480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:21:22.943664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:21:22.943741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:21:22.945679Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:21:22.945722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:21:22.945866Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:21:22.945899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:21:22.946152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:21:22.946191Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:21:22.946339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:21:22.946377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:21:22.946408Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:21:22.946433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:21:22.946467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:21:22.946516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:21:22.946549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:21:22.946571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:21:22.946614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:21:22.946641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:21:22.946665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:21:22.948169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:21:22.948262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:21:22.948302Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 9.134871Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2025-03-31T16:21:29.134985Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-03-31T16:21:29.135019Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-31T16:21:29.135055Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-03-31T16:21:29.135118Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-31T16:21:29.135161Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: true 2025-03-31T16:21:29.135212Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-31T16:21:29.135260Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-03-31T16:21:29.135302Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-03-31T16:21:29.135501Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-03-31T16:21:29.137889Z node 6 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-03-31T16:21:29.138111Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-03-31T16:21:29.138397Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-31T16:21:29.139175Z node 6 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186234409547 2025-03-31T16:21:29.139376Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-03-31T16:21:29.139538Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T16:21:29.140535Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-03-31T16:21:29.141168Z node 6 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186234409546 Forgetting tablet 72075186234409547 2025-03-31T16:21:29.142105Z node 6 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186234409548 2025-03-31T16:21:29.142239Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-31T16:21:29.142417Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186234409546 2025-03-31T16:21:29.143227Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-03-31T16:21:29.143399Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T16:21:29.143719Z node 6 :TX_DATASHARD ERROR: Datashard's schemeshard pipe destroyed while no messages to sent at 72075186234409549 2025-03-31T16:21:29.143778Z node 6 :TX_DATASHARD ERROR: Datashard's schemeshard pipe destroyed while no messages to sent at 72075186234409550 2025-03-31T16:21:29.144779Z node 6 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Forgetting tablet 72075186234409548 2025-03-31T16:21:29.148981Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:21:29.149042Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T16:21:29.149165Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T16:21:29.150758Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-03-31T16:21:29.150900Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:21:29.150938Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T16:21:29.151017Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:21:29.154089Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-31T16:21:29.154151Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-03-31T16:21:29.154230Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-03-31T16:21:29.154245Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186234409547 2025-03-31T16:21:29.154472Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-31T16:21:29.154499Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186234409546 2025-03-31T16:21:29.154539Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-03-31T16:21:29.154569Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186234409548 2025-03-31T16:21:29.155570Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-31T16:21:29.155640Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-03-31T16:21:29.155889Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-03-31T16:21:29.155937Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-03-31T16:21:29.156372Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-03-31T16:21:29.156461Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-03-31T16:21:29.156495Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [6:787:2696] TestWaitNotification: OK eventTxId 105 2025-03-31T16:21:29.157057Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir/table_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:21:29.157242Z node 6 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir/table_1" took 237us result status StatusPathDoesNotExist 2025-03-31T16:21:29.157374Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/dir/table_1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/dir/table_1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-31T16:21:29.157863Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:21:29.158005Z node 6 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 169us result status StatusPathDoesNotExist 2025-03-31T16:21:29.158114Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-31T16:21:29.158593Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:21:29.158746Z node 6 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 181us result status StatusSuccess 2025-03-31T16:21:29.159039Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:21:22.702619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:21:22.702738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:21:22.702785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:21:22.702837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:21:22.703805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:21:22.703851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:21:22.703941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:21:22.704030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:21:22.705217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:21:22.784272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:21:22.784331Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:21:22.788843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:21:22.788967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:21:22.789132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:21:22.794161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:21:22.794358Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:21:22.795120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:21:22.795352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:21:22.802553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:21:22.810411Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:21:22.810483Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:21:22.810662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:21:22.810712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:21:22.810761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:21:22.810886Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:21:22.819940Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:21:22.954062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:21:22.954319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:21:22.954557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:21:22.954810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:21:22.954895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:21:22.957195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:21:22.957346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:21:22.957504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:21:22.957600Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:21:22.957666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:21:22.957708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:21:22.959529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:21:22.959595Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:21:22.959674Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:21:22.961422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:21:22.961485Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:21:22.961547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:21:22.961601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:21:22.971419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:21:22.973655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:21:22.973888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:21:22.974992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:21:22.975152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:21:22.975211Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:21:22.975508Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:21:22.975585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:21:22.975769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:21:22.975843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:21:22.977932Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:21:22.977982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:21:22.978149Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:21:22.978192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:21:22.978478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:21:22.978545Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:21:22.978650Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:21:22.978685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:21:22.978725Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:21:22.978755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:21:22.978799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:21:22.978840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:21:22.978878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:21:22.978907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:21:22.978976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:21:22.979016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:21:22.979077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:21:22.981250Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:21:22.981387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:21:22.981429Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... EvConfigureStatus operationId:102:0 at schemeshard:72057594046678944 2025-03-31T16:21:29.092170Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 102:0 Got OK TEvConfigureStatus from tablet# 72075186233409548 shardIdx# 72057594046678944:3 at schemeshard# 72057594046678944 2025-03-31T16:21:29.094781Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-31T16:21:29.095107Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-31T16:21:29.095170Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-31T16:21:29.095678Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 102, at schemeshard: 72057594046678944 2025-03-31T16:21:29.095744Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-03-31T16:21:29.095800Z node 8 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 102, at schemeshard: 72057594046678944 2025-03-31T16:21:29.129105Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409549, partId: 0 2025-03-31T16:21:29.129312Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409549 2025-03-31T16:21:29.129383Z node 8 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 102:0 HandleReply TEvConfigureStatus operationId:102:0 at schemeshard:72057594046678944 2025-03-31T16:21:29.129446Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 102:0 Got OK TEvConfigureStatus from tablet# 72075186233409549 shardIdx# 72057594046678944:4 at schemeshard# 72057594046678944 2025-03-31T16:21:29.129503Z node 8 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 3 -> 128 2025-03-31T16:21:29.131913Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:21:29.132162Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:21:29.132227Z node 8 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:21:29.132288Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 102:0, at tablet# 72057594046678944 2025-03-31T16:21:29.132358Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-03-31T16:21:29.132535Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:21:29.134606Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-03-31T16:21:29.134780Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-03-31T16:21:29.135063Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:21:29.135215Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 34359740524 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:21:29.135271Z node 8 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet# 72057594046678944 2025-03-31T16:21:29.135545Z node 8 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-03-31T16:21:29.135601Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet# 72057594046678944 2025-03-31T16:21:29.135721Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-03-31T16:21:29.135817Z node 8 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[8:361:2335], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 72075186233409549, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-03-31T16:21:29.137818Z node 8 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:21:29.137870Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:21:29.138047Z node 8 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:21:29.138092Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [8:203:2205], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-31T16:21:29.138394Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:21:29.138446Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 102:0, ProgressState, NeedSyncHive: 0 2025-03-31T16:21:29.138483Z node 8 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 240 -> 240 2025-03-31T16:21:29.139021Z node 8 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:21:29.139143Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:21:29.139190Z node 8 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:21:29.139250Z node 8 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-03-31T16:21:29.139293Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-03-31T16:21:29.139387Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-03-31T16:21:29.141965Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:21:29.142038Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-31T16:21:29.142200Z node 8 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:21:29.142265Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:21:29.142320Z node 8 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:21:29.142367Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:21:29.142420Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-31T16:21:29.142510Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [8:297:2288] message: TxId: 102 2025-03-31T16:21:29.142577Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:21:29.142637Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-31T16:21:29.142685Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-31T16:21:29.142922Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-03-31T16:21:29.143541Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:21:29.145148Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:21:29.145209Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [8:503:2442] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 2025-03-31T16:21:29.148500Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "USER_0" ExternalStatisticsAggregator: false } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:21:29.148707Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 103:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "USER_0" ExternalStatisticsAggregator: false } 2025-03-31T16:21:29.148766Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 103:0, path /MyRoot/USER_0 2025-03-31T16:21:29.148922Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 103:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, at schemeshard: 72057594046678944 2025-03-31T16:21:29.148985Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, at schemeshard: 72057594046678944 2025-03-31T16:21:29.151709Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:21:29.151921Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, operation: ALTER DATABASE, path: /MyRoot/USER_0 TestModificationResult got TxId: 103, wait until txId: 103 >> TPQTest::TestSetClientOffset [GOOD] >> TPQTest::TestReadSessions >> TColumnShardTestReadWrite::ReadStale |90.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/test-results/unittest/{meta.json ... results_accumulator.log} |90.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/test-results/unittest/{meta.json ... results_accumulator.log} >> TPartitionTests::ConflictingCommitProccesAfterRollback [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt32 >> TPartitionTests::UserActCount [GOOD] >> TColumnShardTestReadWrite::WriteReadNoCompression >> TPartitionTests::TooManyImmediateTxs |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotWritten_Test >> BasicStatistics::NotFullStatisticsColumnshard [GOOD] >> TColumnShardTestReadWrite::RebootWriteRead >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32 >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_0_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_1_Test >> Normalizers::PortionsNormalizer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::ConflictingCommitProccesAfterRollback [GOOD] Test command err: 2025-03-31T16:21:11.022468Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:11.022531Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:11.036912Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-03-31T16:21:11.037164Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:21:11.037496Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:178:2193] 2025-03-31T16:21:11.038381Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-03-31T16:21:11.038550Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-03-31T16:21:11.038730Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-03-31T16:21:11.038903Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-03-31T16:21:11.039301Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 1 size 684 so 0 eo 1 d0000000000_00000000000000000000_00000_0000000001_00000 2025-03-31T16:21:11.039417Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-03-31T16:21:11.039457Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-03-31T16:21:11.039496Z node 1 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-31T16:21:11.000000Z 2025-03-31T16:21:11.039535Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-03-31T16:21:11.039597Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [1:178:2193] 2025-03-31T16:21:11.039653Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 1 Head Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000001_00000 size 684 2025-03-31T16:21:11.039714Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-31T16:21:11.349356Z node 1 :PERSQUEUE INFO: new Cookie src1|2fcfcf92-95c80884-4cdac797-c1ad50f_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src1 2025-03-31T16:21:11.349503Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 Got batch complete: 1 2025-03-31T16:21:11.349680Z node 1 :PERSQUEUE INFO: new Cookie src4|3aa7ace4-c0ca08d5-932c470d-feba1a16_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src4 2025-03-31T16:21:11.349735Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 Got batch complete: 1 Create distr tx with id = 0 and act no: 1 Create distr tx with id = 2 and act no: 3 Create distr tx with id = 4 and act no: 5 Create distr tx with id = 8 and act no: 9 Create immediate tx with id = 11 and act no: 12 2025-03-31T16:21:12.317176Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-03-31T16:21:12.317279Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 2 2025-03-31T16:21:12.317379Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 4 2025-03-31T16:21:12.317527Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 8 2025-03-31T16:21:13.592705Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-03-31T16:21:13.592851Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-03-31T16:21:13.592906Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-03-31T16:21:13.592947Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-03-31T16:21:13.592977Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 2 Wait batch completion 2025-03-31T16:21:13.593119Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 0 2025-03-31T16:21:13.593174Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 0 2025-03-31T16:21:13.593213Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-03-31T16:21:13.593246Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0, NewHead=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 Got batch complete: 1 Wait batch completion Wait batch completion 2025-03-31T16:21:13.837702Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 4 2025-03-31T16:21:13.837770Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 4 2025-03-31T16:21:13.837819Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-03-31T16:21:13.837862Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0, NewHead=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 2025-03-31T16:21:13.838023Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src1' seqNo 7 partNo 0 2025-03-31T16:21:13.838881Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src1' seqNo 7 partNo 0 FormedBlobsCount 0 NewHead: Offset 60 PartNo 0 PackedSize 84 count 1 nextOffset 61 batches 1 2025-03-31T16:21:13.838959Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src1' seqNo 8 partNo 0 2025-03-31T16:21:13.838999Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src1' seqNo 8 partNo 0 FormedBlobsCount 0 NewHead: Offset 60 PartNo 0 PackedSize 136 count 2 nextOffset 62 batches 1 2025-03-31T16:21:13.839021Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src1' seqNo 9 partNo 0 2025-03-31T16:21:13.839041Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src1' seqNo 9 partNo 0 FormedBlobsCount 0 NewHead: Offset 60 PartNo 0 PackedSize 188 count 3 nextOffset 63 batches 1 2025-03-31T16:21:13.839058Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src1' seqNo 10 partNo 0 2025-03-31T16:21:13.839129Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src1' seqNo 10 partNo 0 FormedBlobsCount 0 NewHead: Offset 60 PartNo 0 PackedSize 240 count 4 nextOffset 64 batches 1 2025-03-31T16:21:13.839184Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src1' seqNo 11 partNo 0 2025-03-31T16:21:13.839218Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src1' seqNo 11 partNo 0 FormedBlobsCount 0 NewHead: Offset 60 PartNo 0 PackedSize 292 count 5 nextOffset 65 batches 1 2025-03-31T16:21:13.839248Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src1' seqNo 12 partNo 0 2025-03-31T16:21:13.839275Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src1' seqNo 12 partNo 0 FormedBlobsCount 0 NewHead: Offset 60 PartNo 0 PackedSize 344 count 6 nextOffset 66 batches 1 2025-03-31T16:21:13.839316Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 1 partNo 0 2025-03-31T16:21:13.840440Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob sourceId 'src4' seqNo 1 partNo 0 result is x0000000000_00000000000000000060_00000_0000000006_00000 size 211 2025-03-31T16:21:13.840532Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] writing blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 old key x0000000000_00000000000000000060_00000_0000000006_00000 new key d0000000000_00000000000000000060_00000_0000000006_00000 size 211 WTime 11140 2025-03-31T16:21:13.841229Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 1 partNo 0 FormedBlobsCount 1 NewHead: Offset 70 PartNo 0 PackedSize 84 count 1 nextOffset 71 batches 1 2025-03-31T16:21:13.841294Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 2 partNo 0 2025-03-31T16:21:13.841335Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 70 PartNo 0 PackedSize 136 count 2 nextOffset 72 batches 1 2025-03-31T16:21:13.841833Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 compactOffset 70,2 HeadOffset 1 endOffset 1 curOffset 72 d0000000000_00000000000000000070_00000_0000000002_00000| size 121 WTime 11140 2025-03-31T16:21:13.841989Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:13.842025Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: ... c' partition 0 user client-1 offset is set to 3 (startOffset 0) session session-client-1 2025-03-31T16:21:27.488612Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:27.488657Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:27.488694Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:27.488730Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:27.488760Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-03-31T16:21:27.488807Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-1 2025-03-31T16:21:27.488833Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-1 2025-03-31T16:21:27.488864Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:27.488901Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Wait batch completion Wait kv request 2025-03-31T16:21:27.747995Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:21:27.748101Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Partition 0 Consumer 'client-1' Bad request (gap) Offset 3 Begin 0 Got batch complete: 1 Wait batch completion Wait kv request 2025-03-31T16:21:27.748376Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:27.748421Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:27.748462Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:27.748513Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:27.748546Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-03-31T16:21:27.748581Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:27.748620Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Create distr tx with id = 8 and act no: 9 2025-03-31T16:21:27.748870Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 8 2025-03-31T16:21:27.769519Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:21:28.748165Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 3 Wait kv request 2025-03-31T16:21:28.748455Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-03-31T16:21:28.748509Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-03-31T16:21:28.748585Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(ABORTED), reason=incorrect offset range (gap) 2025-03-31T16:21:28.748757Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:28.748803Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:28.748840Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:28.748877Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:28.748905Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-03-31T16:21:28.748930Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-1 2025-03-31T16:21:28.748949Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-1 2025-03-31T16:21:28.748977Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:28.749015Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Wait immediate tx complete 10 2025-03-31T16:21:28.769643Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 10 Wait immediate tx complete 11 Got propose resutl: Origin: 72057594037927937 Status: ABORTED TxId: 11 Errors { Kind: BAD_REQUEST Reason: "incorrect offset range (gap)" } 2025-03-31T16:21:29.136232Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:29.136291Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:29.153769Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-03-31T16:21:29.154107Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:21:29.154403Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:178:2193] 2025-03-31T16:21:29.155445Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-03-31T16:21:29.155645Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-03-31T16:21:29.155817Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-03-31T16:21:29.156630Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-03-31T16:21:29.157152Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 50 size 684 so 0 eo 50 d0000000000_00000000000000000000_00000_0000000050_00000 2025-03-31T16:21:29.157275Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-03-31T16:21:29.157322Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-03-31T16:21:29.157380Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-31T16:21:29.000000Z 2025-03-31T16:21:29.157431Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-03-31T16:21:29.157487Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [5:178:2193] 2025-03-31T16:21:29.157555Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 50 Head Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000050_00000 size 684 2025-03-31T16:21:29.157617Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-31T16:21:29.157698Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-31T16:21:29.157757Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-03-31T16:21:29.157805Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-03-31T16:21:29.158148Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 offset 0 count 1 size 1024000 endOffset 50 max time lag 0ms effective offset 0 2025-03-31T16:21:29.158380Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 added 1 blobs, size 684 count 50 last offset 1, current partition end offset: 50 2025-03-31T16:21:29.158440Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Reading cookie 0. Send blob request. Created Tx with id 0 as act# 0 Created Tx with id 1 as act# 1 2025-03-31T16:21:30.416419Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-03-31T16:21:30.416579Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 1 Got batch complete: 1 Wait batch completion Got batch complete: 1 Wait batch completion Wait kv request 2025-03-31T16:21:30.624715Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 1 2025-03-31T16:21:30.624784Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 1 2025-03-31T16:21:30.624982Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:30.625032Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:30.625088Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:30.625147Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:30.625185Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-03-31T16:21:30.625211Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-0 2025-03-31T16:21:30.625241Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-0 2025-03-31T16:21:30.625279Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:30.625342Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Wait tx committed for tx 1 2025-03-31T16:21:30.646252Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Wait for no tx committed >> TColumnShardTestReadWrite::ReadStale [GOOD] >> TPQTest::TestAlreadyWritten [GOOD] >> TPQTest::TestAlreadyWrittenWithoutDeduplication ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::PortionsNormalizer [GOOD] Test command err: 2025-03-31T16:21:28.075501Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:21:28.155542Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:21:28.174976Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:21:28.175268Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:21:28.182592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=EmptyPortionsCleaner; 2025-03-31T16:21:28.182790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=LeakedBlobsNormalizer; 2025-03-31T16:21:28.182874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-03-31T16:21:28.183095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:21:28.183278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:21:28.183380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:21:28.183469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:21:28.183544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:21:28.183617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:21:28.183711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:21:28.183784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:21:28.183878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:21:28.183945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:21:28.184006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:21:28.204520Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:21:28.204691Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=13;current_normalizer=CLASS_NAME=EmptyPortionsCleaner; 2025-03-31T16:21:28.204736Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-03-31T16:21:28.205015Z node 1 :TX_COLUMNSHARD CRIT: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_empty.cpp:286;tasks_for_remove=0; 2025-03-31T16:21:28.205131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=EmptyPortionsCleaner;id=NO_VALUE_OPTIONAL; 2025-03-31T16:21:28.205192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=LeakedBlobsNormalizer;id=NO_VALUE_OPTIONAL; 2025-03-31T16:21:28.205229Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-03-31T16:21:28.205734Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=64; 2025-03-31T16:21:28.205828Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=36; 2025-03-31T16:21:28.205913Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=42; 2025-03-31T16:21:28.205997Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=39; 2025-03-31T16:21:28.206108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=LeakedBlobsNormalizer;id=NO_VALUE_OPTIONAL; 2025-03-31T16:21:28.206204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2025-03-31T16:21:28.206236Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-03-31T16:21:28.206360Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:28.206429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:21:28.206463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:21:28.206485Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-03-31T16:21:28.206558Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:21:28.206599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:21:28.206638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:21:28.206665Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-03-31T16:21:28.206793Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:28.206852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:21:28.206880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:21:28.206903Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-03-31T16:21:28.206974Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:21:28.207027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:21:28.207077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:21:28.207115Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:21:28.207182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:21:28.207215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:21:28.207238Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:21:28.207286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:21:28.207310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:21:28.207329Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:21:28.207592Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=19; 2025-03-31T16:21:28.207668Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=27; 2025-03-31T16:21:28.207739Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=24; 2025-03-31T16:21:28.207814Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=17; 2025-03-31T16:21:28.207925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:21:28.207964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=a ... oad_size=0; 2025-03-31T16:21:31.230013Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:398:2412];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-31T16:21:31.230085Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:398:2412];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-31T16:21:31.230110Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-31T16:21:31.230136Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-31T16:21:31.230176Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:398:2412];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:21:31.230229Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:398:2412];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:21:31.230288Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:398:2412];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=0; 2025-03-31T16:21:31.230344Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:398:2412];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:21:31.230411Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:398:2412];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:21:31.230449Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:398:2412];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:21:31.230518Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:398:2412];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=1.000000s; 2025-03-31T16:21:31.230563Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:398:2412];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:21:31.387642Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 111 scanId: 0 version: {11:111} readable: {11:max} at tablet 9437184 2025-03-31T16:21:31.387798Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 111 scanId: 0 at tablet 9437184 2025-03-31T16:21:31.387994Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:398:2412];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } Columns { Id: 3 } } } ; 2025-03-31T16:21:31.388059Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:398:2412];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } Columns { Id: 3 } } } ; 2025-03-31T16:21:31.388659Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:398:2412];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4},{"from":6}]},{"owner_id":2,"inputs":[{"from":7}]},{"owner_id":4,"inputs":[{"from":7}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"1","p":{"address":{"name":"key1","id":1}},"o":"1","t":"AssembleOriginalData"},"w":11,"id":2},"6":{"p":{"i":"3","p":{"address":{"name":"field","id":3}},"o":"3","t":"AssembleOriginalData"},"w":11,"id":6},"7":{"p":{"p":{"data":[{"name":"key1","id":1},{"name":"key2","id":2},{"name":"field","id":3}]},"o":"1,2,3","t":"FetchOriginalData"},"w":6,"id":7},"4":{"p":{"i":"2","p":{"address":{"name":"key2","id":2}},"o":"2","t":"AssembleOriginalData"},"w":11,"id":4},"0":{"p":{"i":"1,2,3","t":"Projection"},"w":33,"id":0}}}; 2025-03-31T16:21:31.388751Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:398:2412];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-03-31T16:21:31.389272Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:398:2412];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:471:2477];trace_detailed=; 2025-03-31T16:21:31.389835Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:84;ff_first=(column_ids=1,2,3;column_names=field,key1,key2;);; 2025-03-31T16:21:31.390007Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; 2025-03-31T16:21:31.390244Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:471:2477];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:21:31.390349Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:471:2477];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-31T16:21:31.390445Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:471:2477];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-31T16:21:31.390497Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:471:2477] finished for tablet 9437184 2025-03-31T16:21:31.390821Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:471:2477];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:469:2476];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1743438091389200,"name":"_full_task","f":1743438091389200,"d_finished":0,"c":0,"l":1743438091390563,"d":1363},"events":[{"name":"bootstrap","f":1743438091389386,"d_finished":747,"c":1,"l":1743438091390133,"d":747},{"a":1743438091390228,"name":"ack","f":1743438091390228,"d_finished":0,"c":0,"l":1743438091390563,"d":335},{"a":1743438091390214,"name":"processing","f":1743438091390214,"d_finished":0,"c":0,"l":1743438091390563,"d":349},{"name":"ProduceResults","f":1743438091390117,"d_finished":226,"c":2,"l":1743438091390483,"d":226},{"a":1743438091390486,"name":"Finish","f":1743438091390486,"d_finished":0,"c":0,"l":1743438091390563,"d":77}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-31T16:21:31.390882Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:471:2477];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:469:2476];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:21:31.391182Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:471:2477];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:469:2476];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1743438091389200,"name":"_full_task","f":1743438091389200,"d_finished":0,"c":0,"l":1743438091390913,"d":1713},"events":[{"name":"bootstrap","f":1743438091389386,"d_finished":747,"c":1,"l":1743438091390133,"d":747},{"a":1743438091390228,"name":"ack","f":1743438091390228,"d_finished":0,"c":0,"l":1743438091390913,"d":685},{"a":1743438091390214,"name":"processing","f":1743438091390214,"d_finished":0,"c":0,"l":1743438091390913,"d":699},{"name":"ProduceResults","f":1743438091390117,"d_finished":226,"c":2,"l":1743438091390483,"d":226},{"a":1743438091390486,"name":"Finish","f":1743438091390486,"d_finished":0,"c":0,"l":1743438091390913,"d":427}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-31T16:21:31.391263Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:471:2477];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:21:31.388723Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-03-31T16:21:31.391305Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:471:2477];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:21:31.391398Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:471:2477];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=string;records=0;size=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=0;count=0; >> TListAllTopicsTests::ListLimitAndPaging [GOOD] >> TMeteringSink::FlushPutEventsV1 [GOOD] >> TMeteringSink::FlushResourcesReservedV1 [GOOD] >> TMeteringSink::FlushStorageV1 [GOOD] >> TFetchRequestTests::BadTopicName [GOOD] >> TFetchRequestTests::CheckAccess ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadStale [GOOD] Test command err: 2025-03-31T16:21:30.973562Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:21:31.064655Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:21:31.090560Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:21:31.090852Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:21:31.099822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:21:31.100051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:21:31.100324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:21:31.100454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:21:31.100588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:21:31.100749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:21:31.100866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:21:31.100977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:21:31.101090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:21:31.101214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:21:31.101328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:21:31.101460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:21:31.130701Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:21:31.130867Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:21:31.130922Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:21:31.131141Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:31.131299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:21:31.131371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:21:31.131416Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:21:31.131506Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:21:31.131570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:21:31.131630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:21:31.131664Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:21:31.131896Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:31.131963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:21:31.132005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:21:31.132036Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:21:31.132173Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:21:31.132242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:21:31.132301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:21:31.132334Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:21:31.132420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:21:31.132466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:21:31.132500Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:21:31.132561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:21:31.132600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:21:31.132637Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:21:31.133029Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=46; 2025-03-31T16:21:31.133128Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=50; 2025-03-31T16:21:31.133220Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=39; 2025-03-31T16:21:31.133298Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=33; 2025-03-31T16:21:31.133464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:21:31.133534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:21:31.133573Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:21:31.133786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:21:31.133844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:21:31.133890Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:21:31.134077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:21:31.134124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:21:31.134155Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:21:31.134389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:21:31.134450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:21:31.134486Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:21:31.134612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:21:31.134664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:21:31.134721Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... ock_id=1;broken=0; 2025-03-31T16:21:31.812815Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=77304; 2025-03-31T16:21:31.816889Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=manager.cpp:10;event=lock;process_id=CS::INDEXATION::350240b4-e4c11f0-bde87f49-8772585e; 2025-03-31T16:21:31.816982Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=ro_controller.cpp:45;event=CS::INDEXATION;tablet_id=9437184; 2025-03-31T16:21:31.817111Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:754;event=indexation;bytes=77304;blobs_count=9;max_limit=251658240;has_more=0;external_task_id=350240b4-e4c11f0-bde87f49-8772585e; 2025-03-31T16:21:31.817348Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:620;event=start_changes;type=CS::INDEXATION;task_id=350240b4-e4c11f0-bde87f49-8772585e; 2025-03-31T16:21:31.817816Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NOlap::NResourceBroker::NSubscribe::TEvStartTask;fline=actor.cpp:38;event=ask_resources;task=cpu=0;mem=69691;external_task_id=350240b4-e4c11f0-bde87f49-8772585e;type=CS::INDEXATION;priority=0;; 2025-03-31T16:21:31.818058Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=1;task=cpu=0;mem=69691;external_task_id=350240b4-e4c11f0-bde87f49-8772585e;type=CS::INDEXATION;priority=0;; 2025-03-31T16:21:31.818105Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=350240b4-e4c11f0-bde87f49-8772585e;mem=69691;cpu=0; 2025-03-31T16:21:31.818285Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=350240b4-e4c11f0-bde87f49-8772585e;task_id=1;mem=69691;cpu=0; 2025-03-31T16:21:31.818491Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=350240b4-e4c11f0-bde87f49-8772585e;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=350240b4-e4c11f0-bde87f49-8772585e; 2025-03-31T16:21:31.842725Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=350240b4-e4c11f0-bde87f49-8772585e;fline=actor.cpp:48;task=agents_waiting=0;additional_info=();; 2025-03-31T16:21:31.842912Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; 2025-03-31T16:21:31.843657Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:239;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-31T16:21:31.843722Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:21:31.843800Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:769;event=skip_indexation;reason=in_progress;count=1;insert_overload_size=77304;indexing_debug={task_ids=350240b4-e4c11f0-bde87f49-8772585e,;}; 2025-03-31T16:21:31.843880Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=0; 2025-03-31T16:21:31.844170Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:21:31.844247Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:21:31.844299Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:21:31.844452Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:21:31.844846Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 1 version: {640000:max} readable: {1000000:max} at tablet 9437184 2025-03-31T16:21:31.856991Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 1 at tablet 9437184 2025-03-31T16:21:31.857145Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tx_id=18446744073709551615;scan_id=1;gen=0;table=test_olap_table;snapshot={640000:max};tablet=9437184;timeout=0.000000s;fline=constructor.cpp:18;event=overriden_columns;ids=1,2,3,4,5,6,7,8,9,10,4294967040,4294967041,4294967042,4294967043; 2025-03-31T16:21:31.857244Z node 1 :TX_COLUMNSHARD_SCAN WARN: tx_id=18446744073709551615;scan_id=1;gen=0;table=test_olap_table;snapshot={640000:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:14;event=TTxScan failed;problem=cannot build metadata withno ranges;details=Snapshot too old: {640000:max}. CS min read snapshot: {700000:max}. now: 2025-03-31T16:21:31.857206Z; 2025-03-31T16:21:31.880511Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:1:255:1:6824:0]; 2025-03-31T16:21:31.880651Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:1:255:2:6824:0]; 2025-03-31T16:21:31.886660Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-03-31T16:21:31.887004Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[8] (CS::INDEXATION) apply at tablet 9437184 2025-03-31T16:21:31.888341Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 7 2025-03-31T16:21:31.888511Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 0 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-31T16:21:31.889097Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {640000:max} readable: {1000000:max} at tablet 9437184 2025-03-31T16:21:31.901906Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=350240b4-e4c11f0-bde87f49-8772585e;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; 2025-03-31T16:21:31.901990Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=350240b4-e4c11f0-bde87f49-8772585e;fline=with_appended.cpp:65;portions=1,;task_id=350240b4-e4c11f0-bde87f49-8772585e; 2025-03-31T16:21:31.902250Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=350240b4-e4c11f0-bde87f49-8772585e;fline=manager.cpp:15;event=unlock;process_id=CS::INDEXATION::350240b4-e4c11f0-bde87f49-8772585e; 2025-03-31T16:21:31.902323Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=350240b4-e4c11f0-bde87f49-8772585e;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:21:31.902403Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=350240b4-e4c11f0-bde87f49-8772585e;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:21:31.902491Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=350240b4-e4c11f0-bde87f49-8772585e;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=0; 2025-03-31T16:21:31.902579Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=350240b4-e4c11f0-bde87f49-8772585e;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:21:31.902660Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=350240b4-e4c11f0-bde87f49-8772585e;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:21:31.902728Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=350240b4-e4c11f0-bde87f49-8772585e;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:21:31.902833Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=350240b4-e4c11f0-bde87f49-8772585e;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.993000s; 2025-03-31T16:21:31.902893Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=350240b4-e4c11f0-bde87f49-8772585e;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:21:31.903043Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 7 2025-03-31T16:21:31.903285Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=1;external_task_id=350240b4-e4c11f0-bde87f49-8772585e;mem=69691;cpu=0; 2025-03-31T16:21:31.903452Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:21:31.903559Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-03-31T16:21:31.910207Z node 1 :TX_COLUMNSHARD DEBUG: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={640000:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } Columns { Id: 6 } } } ; 2025-03-31T16:21:31.910367Z node 1 :TX_COLUMNSHARD DEBUG: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={640000:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } Columns { Id: 6 } } } ; 2025-03-31T16:21:31.911561Z node 1 :TX_COLUMNSHARD DEBUG: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={640000:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":4,"inputs":[{"from":5}]},{"owner_id":5,"inputs":[]}],"nodes":{"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":9,"id":2},"5":{"p":{"p":{"data":[{"name":"timestamp","id":1},{"name":"message","id":6}]},"o":"1,6","t":"FetchOriginalData"},"w":4,"id":5},"4":{"p":{"i":"6","p":{"address":{"name":"message","id":6}},"o":"6","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"1,6","t":"Projection"},"w":18,"id":0}}}; 2025-03-31T16:21:31.911697Z node 1 :TX_COLUMNSHARD_SCAN WARN: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={640000:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:14;event=TTxScan failed;problem=cannot build metadata withno ranges;details=Snapshot too old: {640000:max}. CS min read snapshot: {700000:max}. now: 2025-03-31T16:21:31.911652Z; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::NotFullStatisticsColumnshard [GOOD] Test command err: 2025-03-31T16:15:15.063340Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:15.063617Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:15.063695Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000cf9/r3tmp/tmpuNwVO2/pdisk_1.dat 2025-03-31T16:15:15.460483Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24380, node 1 2025-03-31T16:15:15.775687Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:15.775745Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:15.775804Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:15.778497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:15:15.779047Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:15:15.873842Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:15.874124Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:15.892663Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15238 2025-03-31T16:15:16.522651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:15:19.832888Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T16:15:19.878143Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:19.878267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:19.908028Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:15:19.910261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:15:20.156668Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:20.157493Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:20.158165Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:20.158309Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:20.158579Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:20.158757Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:20.158888Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:20.158978Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:20.159756Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:20.330812Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:20.330979Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:20.349918Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:15:20.523982Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:20.581976Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T16:15:20.582130Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T16:15:20.615429Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T16:15:20.619587Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T16:15:20.619868Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T16:15:20.619937Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T16:15:20.620007Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T16:15:20.620065Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T16:15:20.620124Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T16:15:20.620194Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T16:15:20.620823Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T16:15:20.652000Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:15:20.652152Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1872:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:15:20.661105Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-03-31T16:15:20.676262Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1908:2617] 2025-03-31T16:15:20.676962Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1908:2617], schemeshard id = 72075186224037897 2025-03-31T16:15:20.679584Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T16:15:20.717433Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T16:15:20.717491Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T16:15:20.717564Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T16:15:20.782514Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T16:15:20.792664Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T16:15:20.792799Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T16:15:20.977203Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T16:15:21.168177Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T16:15:21.265075Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:15:22.434069Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2241:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:22.434208Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:22.455825Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-31T16:15:22.672564Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2352:2866];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:15:22.672835Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2352:2866];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:15:22.673168Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2352:2866];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:15:22.673322Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2352:2866];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:15:22.673440Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2352:2866];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:15:22.673576Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2352:2866];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:15:22.673696Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2352:2866];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:15:22.673824Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2352:2866];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:15:22.674009Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2352:2866];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:15:22.674153Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2352:2866];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:15:22.674283Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2352:2866];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:15:22.674416Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2352:2866];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:15:22.744352Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2357:2868];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:15:22.744456Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2357:2868];tablet_id=72075186224037900;process= ... NextTraversal. No force traversals. 2025-03-31T16:20:23.415190Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:20:24.952720Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:20:24.952790Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:20:26.125432Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T16:20:26.125671Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:20:27.496416Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:20:27.496480Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:20:29.609713Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:20:29.759930Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:20:29.759988Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:20:32.181949Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T16:20:32.182100Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:20:32.366892Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:20:32.366951Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:20:34.799175Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:20:34.799246Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:20:35.815509Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:20:37.128411Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:20:37.128490Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:20:38.153393Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T16:20:38.153602Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:20:39.531698Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:20:39.531774Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:20:41.825554Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:20:41.977028Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:20:41.977102Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:20:43.037930Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-03-31T16:20:43.038004Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-31T16:20:43.038038Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-31T16:20:43.038087Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-03-31T16:20:44.556851Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T16:20:44.557047Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:20:44.757009Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:20:44.757093Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:20:47.161029Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:20:47.161103Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:20:48.192553Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:20:49.566723Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:20:49.566792Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:20:50.590439Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T16:20:50.590665Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:20:51.871751Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:20:51.871823Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:20:53.741719Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:20:53.860884Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:20:53.860943Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:20:56.297714Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T16:20:56.297870Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:20:56.480605Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:20:56.480673Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:20:58.731034Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:20:58.731127Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:20:59.755956Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:21:01.244646Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:21:01.244714Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:21:02.366275Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T16:21:02.366441Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:21:03.726496Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:21:03.726567Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:21:05.888200Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:21:06.042195Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:21:06.042268Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:21:08.489183Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T16:21:08.489351Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:21:08.672810Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:21:08.672901Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:21:11.022803Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:21:11.022874Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:21:12.012607Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:21:13.335725Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:21:13.335804Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:21:14.385525Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T16:21:14.385704Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:21:15.721054Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:21:15.721139Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:21:17.652524Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:21:17.772410Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:21:17.772481Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:21:18.741911Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-03-31T16:21:18.741983Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-31T16:21:18.742013Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-31T16:21:18.742046Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-03-31T16:21:20.095130Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T16:21:20.095336Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:21:20.274336Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:21:20.274411Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:21:22.266745Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:21:22.266837Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:21:23.152887Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:21:24.291026Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:21:24.291117Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:21:25.137586Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T16:21:25.137753Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:21:26.098970Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-03-31T16:21:26.099065Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 208.000000s, at schemeshard: 72075186224037897 2025-03-31T16:21:26.099338Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 53 ... waiting for TEvSchemeShardStats 2 (done) ... waiting for TEvPropagateStatistics 2025-03-31T16:21:26.115576Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-31T16:21:26.224872Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:21:26.224957Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:21:28.118284Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:21:28.236948Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:21:28.237016Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:21:30.382231Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T16:21:30.382470Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 ... waiting for TEvPropagateStatistics (done) 2025-03-31T16:21:30.382866Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:14464:8945]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:21:30.386677Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2025-03-31T16:21:30.386754Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 4, ReplyToActorId = [2:14464:8945], StatRequests.size() = 1 >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp_Reboot >> KqpPg::PgUpdate-useSink [GOOD] >> KqpPg::JoinWithQueryService-StreamLookup >> TPartitionTests::TooManyImmediateTxs [GOOD] >> TPartitionTests::WriteSubDomainOutOfSpace >> TColumnShardTestReadWrite::ReadWithProgram ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TMeteringSink::FlushStorageV1 [GOOD] Test command err: 2025-03-31T16:21:02.828381Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009460446444052:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:02.828714Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:21:02.973281Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00158d/r3tmp/tmpg01aFp/pdisk_1.dat 2025-03-31T16:21:03.108920Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18693, node 1 2025-03-31T16:21:03.190048Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/00158d/r3tmp/yandexhVs3PB.tmp 2025-03-31T16:21:03.190080Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/00158d/r3tmp/yandexhVs3PB.tmp 2025-03-31T16:21:03.190311Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/00158d/r3tmp/yandexhVs3PB.tmp 2025-03-31T16:21:03.190461Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:21:03.197760Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:21:03.197894Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:21:03.199676Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:21:03.237390Z INFO: TTestServer started on Port 12757 GrpcPort 18693 TClient is connected to server localhost:12757 PQClient connected to localhost:18693 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:21:03.466216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:21:03.493657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-31T16:21:05.321563Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009473331346761:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:05.321580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009473331346769:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:05.321677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:05.325340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-31T16:21:05.335723Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009473331346775:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-31T16:21:05.546304Z node 1 :TX_PROXY ERROR: Actor# [1:7488009473331346839:2448] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:21:05.669403Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488009473331346854:2347], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:21:05.671111Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTY2ZTVhZGMtODE0MjcxMzgtNDRjYTJlOTYtN2Y5ZTI1OGY=, ActorId: [1:7488009473331346758:2335], ActorState: ExecuteState, TraceId: 01jqpg3bmq168ztnnez051c424, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:21:05.673356Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:21:05.699704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:21:05.725864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:21:05.802022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-31T16:21:06.150096Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jqpg3c5z3dv5zm9m1qmtsbhn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjY0MDczY2ItOTJiYWFkMWUtODA2MGE2NTgtNWUxMTY1Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7488009477626314446:2632] 2025-03-31T16:21:07.828341Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488009460446444052:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:07.828429Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-03-31T16:21:12.052494Z node 1 :PQ_READ_PROXY DEBUG: new Create topic request 2025-03-31T16:21:12.054663Z node 1 :PQ_READ_PROXY DEBUG: new Create topic request 2025-03-31T16:21:12.086296Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:21:12.086581Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2025-03-31T16:21:12.086903Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7488009503396118546:2818] connected; active server actors: 1 2025-03-31T16:21:12.087132Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][topic2] updating configuration. Deleted partitions []. Added partitions [0] 2025-03-31T16:21:12.087814Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:21:12.088034Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2025-03-31T16:21:12.088054Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:21:12.088071Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2025-03-31T16:21:12.088089Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:21:12.088116Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:12.088142Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2025-03-31T16:21:12.088241Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037895][] pipe [1:7488009503396118560:2824] connected; active server actors: 1 2025-03-31T16:21:12.088265Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][topic2] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2025-03-31T16:21:12.088357Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][topic2] BALANCER INIT DONE for topic2: (0, 72075186224037892) 2025-03-31T16:21:12.088369Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037895][topic1] updating configuration. Deleted partitions []. Added partitions [0] 2025-03-31T16:21:12.089901Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:21:12.090262Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037895][topic1] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037895 2025-03-31T16:21:12.090290Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][topic2] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2025-03-31T16:21:12.090360Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Registered with mediator time cast 2025-03-31T16:21:12.090573Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037895][topic1] BALANCER INIT DONE for topic1: (0, 72075186224037894) 2025-03-31T16:21:12.091253Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][topic2] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2025-03-31T16:21:12.091294Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037895][topic1] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2025-03-31T16:21:12.091304Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7488009503396118629:2470], now have 1 active actors on pipe 2025-03-31T16:21:12.091414Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894 ... d from CALCULATING to CALCULATED 2025-03-31T16:21:31.859782Z node 3 :PERSQUEUE DEBUG: [TxId: 281474976715673] save tx TxId: 281474976715673 State: CALCULATED MinStep: 1743438091892 MaxStep: 18446744073709551615 Step: 1743438091899 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 MaxSizeInPartition: 9223372036854775807 LifetimeSeconds: 64800 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 2097152 BurstSize: 2097152 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "topic1" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/topic1" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } } BootstrapConfig { } SourceActor { RawX1: 7488009550752605823 RawX2: 12884904026 } Partitions { Partition { PartitionId: 0 } } 2025-03-31T16:21:31.859903Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-31T16:21:31.861083Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-31T16:21:31.861118Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state CALCULATED 2025-03-31T16:21:31.861134Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715673, State CALCULATED 2025-03-31T16:21:31.861151Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715673 State CALCULATED FrontTxId 281474976715673 2025-03-31T16:21:31.861167Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715673, NewState WAIT_RS 2025-03-31T16:21:31.861183Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715673 moved from CALCULATED to WAIT_RS 2025-03-31T16:21:31.861221Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvTxProcessing::TEvReadSet to 0 receivers. Wait TEvTxProcessing::TEvReadSet from 0 senders. 2025-03-31T16:21:31.861262Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] HaveParticipantsDecision 1 2025-03-31T16:21:31.861314Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715673, NewState EXECUTING 2025-03-31T16:21:31.861330Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715673 moved from WAIT_RS to EXECUTING 2025-03-31T16:21:31.861353Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Received 0, Expected 1 2025-03-31T16:21:31.861366Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1743438091899, TxId 281474976715673 2025-03-31T16:21:31.861634Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:31.861655Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:31.861676Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:31.861696Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:31.861706Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] I0000000000 2025-03-31T16:21:31.861726Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] _config_0 2025-03-31T16:21:31.861757Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:31.861787Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] =========================== 2025-03-31T16:21:31.861834Z node 3 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-31T16:21:31.862807Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvTxCommitDone Step 1743438091899, TxId 281474976715673, Partition 0 2025-03-31T16:21:31.862837Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:21:31.862840Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state EXECUTING 2025-03-31T16:21:31.862858Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715673, State EXECUTING 2025-03-31T16:21:31.862876Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715673 State EXECUTING FrontTxId 281474976715673 2025-03-31T16:21:31.862890Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Received 1, Expected 1 2025-03-31T16:21:31.862911Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId: 281474976715673 send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2025-03-31T16:21:31.862945Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] complete TxId 281474976715673 2025-03-31T16:21:31.863261Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 MaxSizeInPartition: 9223372036854775807 LifetimeSeconds: 64800 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 2097152 BurstSize: 2097152 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "topic1" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/topic1" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } 2025-03-31T16:21:31.863349Z node 3 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:31.863421Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete partitions for TxId 281474976715673 2025-03-31T16:21:31.863447Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715673, NewState EXECUTED 2025-03-31T16:21:31.863465Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715673 moved from EXECUTING to EXECUTED 2025-03-31T16:21:31.863742Z node 3 :PERSQUEUE DEBUG: [TxId: 281474976715673] save tx TxId: 281474976715673 State: EXECUTED MinStep: 1743438091892 MaxStep: 18446744073709551615 Step: 1743438091899 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 MaxSizeInPartition: 9223372036854775807 LifetimeSeconds: 64800 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 2097152 BurstSize: 2097152 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "topic1" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/topic1" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } } BootstrapConfig { } SourceActor { RawX1: 7488009550752605823 RawX2: 12884904026 } Partitions { Partition { PartitionId: 0 } } 2025-03-31T16:21:31.863899Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-31T16:21:31.865607Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][topic1] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2025-03-31T16:21:31.865667Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037895][topic2] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037895 2025-03-31T16:21:31.865970Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037897][topic3] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037897 2025-03-31T16:21:31.866132Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2025-03-31T16:21:31.866158Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Registered with mediator time cast 2025-03-31T16:21:31.866204Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Registered with mediator time cast 2025-03-31T16:21:31.866285Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-31T16:21:31.866313Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state EXECUTED 2025-03-31T16:21:31.866327Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715673, State EXECUTED 2025-03-31T16:21:31.866344Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715673 State EXECUTED FrontTxId 281474976715673 2025-03-31T16:21:31.866359Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TPersQueue::SendEvReadSetAckToSenders 2025-03-31T16:21:31.866374Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715673, NewState WAIT_RS_ACKS 2025-03-31T16:21:31.866386Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715673 moved from EXECUTED to WAIT_RS_ACKS 2025-03-31T16:21:31.866405Z node 3 :PERSQUEUE DEBUG: [TxId: 281474976715673] PredicateAcks: 0/0 2025-03-31T16:21:31.866412Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-03-31T16:21:31.866424Z node 3 :PERSQUEUE DEBUG: [TxId: 281474976715673] PredicateAcks: 0/0 2025-03-31T16:21:31.866436Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] add an TxId 281474976715673 to the list for deletion 2025-03-31T16:21:31.866453Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715673, NewState DELETING 2025-03-31T16:21:31.866474Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete key for TxId 281474976715673 2025-03-31T16:21:31.866517Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-31T16:21:31.866998Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-31T16:21:31.867018Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state DELETING 2025-03-31T16:21:31.867028Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715673, State DELETING 2025-03-31T16:21:31.867044Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete TxId 281474976715673 >> TPartitionTests::WriteSubDomainOutOfSpace [GOOD] >> EvWrite::WriteWithSplit >> TPartitionTests::TestNonConflictingActsBatchOk >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8_Reboot >> TColumnShardTestReadWrite::WriteOverload >> KqpPg::ExplainColumnsReorder [GOOD] >> TColumnShardTestReadWrite::ReadWithProgram [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime_Reboot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadWithProgram [GOOD] Test command err: 2025-03-31T16:21:33.482018Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:21:33.574809Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:21:33.599516Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:21:33.599758Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:21:33.607939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:21:33.608167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:21:33.608409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:21:33.608528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:21:33.608623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:21:33.608736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:21:33.608876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:21:33.608998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:21:33.609124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:21:33.609238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:21:33.609340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:21:33.609456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:21:33.638962Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:21:33.639138Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:21:33.639198Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:21:33.639389Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:33.639506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:21:33.639566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:21:33.639609Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:21:33.639701Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:21:33.639759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:21:33.639823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:21:33.639853Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:21:33.640055Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:33.640129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:21:33.640192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:21:33.640256Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:21:33.640364Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:21:33.640422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:21:33.640476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:21:33.640515Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:21:33.640602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:21:33.640646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:21:33.640679Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:21:33.640737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:21:33.640770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:21:33.640801Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:21:33.641188Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=52; 2025-03-31T16:21:33.641276Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=39; 2025-03-31T16:21:33.641350Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=34; 2025-03-31T16:21:33.641425Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=34; 2025-03-31T16:21:33.641578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:21:33.641668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:21:33.641715Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:21:33.641924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:21:33.641971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:21:33.642000Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:21:33.642145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:21:33.642198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:21:33.642227Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:21:33.642445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:21:33.642489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:21:33.642518Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:21:33.642655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:21:33.642696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:21:33.642745Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... =TEvTaskProcessedResult; 2025-03-31T16:21:34.386019Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-03-31T16:21:34.386050Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-03-31T16:21:34.386080Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-31T16:21:34.386130Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-03-31T16:21:34.386160Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=fetching.cpp:17;event=apply; 2025-03-31T16:21:34.386204Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=interval.cpp:28;event=fetched;interval_idx=0; 2025-03-31T16:21:34.386246Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=interval.cpp:17;event=start_construct_result;interval_idx=0;interval_id=2;memory=8400226;count=1; 2025-03-31T16:21:34.386553Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=merge.cpp:149;event=DoExecute;interval_idx=0; 2025-03-31T16:21:34.388864Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=source.cpp:50;event=source_ready;intervals_count=1;source_idx=0; 2025-03-31T16:21:34.388984Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-03-31T16:21:34.389021Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-03-31T16:21:34.389051Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-31T16:21:34.389220Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-03-31T16:21:34.389250Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-03-31T16:21:34.389298Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=2; 2025-03-31T16:21:34.389338Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=0;merger=0;interval_id=2; 2025-03-31T16:21:34.389385Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-03-31T16:21:34.389493Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-03-31T16:21:34.389626Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-03-31T16:21:34.389929Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:21:34.390049Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-03-31T16:21:34.390181Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-03-31T16:21:34.390226Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:286:2304] finished for tablet 9437184 2025-03-31T16:21:34.390723Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:285:2303];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.011}],"full":{"a":1743438094378518,"name":"_full_task","f":1743438094378518,"d_finished":0,"c":0,"l":1743438094390299,"d":11781},"events":[{"name":"bootstrap","f":1743438094378721,"d_finished":2502,"c":1,"l":1743438094381223,"d":2502},{"a":1743438094389895,"name":"ack","f":1743438094389895,"d_finished":0,"c":0,"l":1743438094390299,"d":404},{"a":1743438094389881,"name":"processing","f":1743438094382294,"d_finished":5104,"c":9,"l":1743438094389766,"d":5522},{"name":"ProduceResults","f":1743438094380176,"d_finished":1885,"c":11,"l":1743438094390204,"d":1885},{"a":1743438094390209,"name":"Finish","f":1743438094390209,"d_finished":0,"c":0,"l":1743438094390299,"d":90},{"name":"task_result","f":1743438094382310,"d_finished":4999,"c":9,"l":1743438094389764,"d":4999}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-03-31T16:21:34.390804Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:285:2303];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:21:34.391257Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:285:2303];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_ProduceResults","f_Finish","l_task_result"],"t":0.011},{"events":["l_ack","l_processing","l_Finish"],"t":0.012}],"full":{"a":1743438094378518,"name":"_full_task","f":1743438094378518,"d_finished":0,"c":0,"l":1743438094390849,"d":12331},"events":[{"name":"bootstrap","f":1743438094378721,"d_finished":2502,"c":1,"l":1743438094381223,"d":2502},{"a":1743438094389895,"name":"ack","f":1743438094389895,"d_finished":0,"c":0,"l":1743438094390849,"d":954},{"a":1743438094389881,"name":"processing","f":1743438094382294,"d_finished":5104,"c":9,"l":1743438094389766,"d":6072},{"name":"ProduceResults","f":1743438094380176,"d_finished":1885,"c":11,"l":1743438094390204,"d":1885},{"a":1743438094390209,"name":"Finish","f":1743438094390209,"d_finished":0,"c":0,"l":1743438094390849,"d":640},{"name":"task_result","f":1743438094382310,"d_finished":4999,"c":9,"l":1743438094389764,"d":4999}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-03-31T16:21:34.391372Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:21:34.377961Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-03-31T16:21:34.391421Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:21:34.391733Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;; >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_RegisteredSourceId_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_PreferedPartition_Test >> TPQTest::TestPartitionTotalQuota [GOOD] >> TPQTest::TestPartitionPerConsumerQuota >> KqpPg::CreateTableIfNotExists_GenericQuery [GOOD] >> KqpPg::AlterColumnSetDefaultFromSequenceWithSchemaname ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::ExplainColumnsReorder [GOOD] Test command err: Trying to start YDB, gRPC: 3744, MsgBus: 10406 2025-03-31T16:20:34.318297Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009341048272920:2131];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:34.318470Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012c1/r3tmp/tmpebqr5t/pdisk_1.dat 2025-03-31T16:20:34.710542Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:20:34.724568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:34.724672Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:34.747121Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3744, node 1 2025-03-31T16:20:34.873935Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:34.873963Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:34.873976Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:34.874121Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10406 TClient is connected to server localhost:10406 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:35.539010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:37.094798Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009353933175413:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:37.094802Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009353933175402:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:37.094942Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:37.101777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:20:37.112869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009353933175416:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:20:37.182076Z node 1 :TX_PROXY ERROR: Actor# [1:7488009353933175467:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 4467, MsgBus: 21070 2025-03-31T16:20:37.886501Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488009354334130137:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:37.886650Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012c1/r3tmp/tmpLroVCB/pdisk_1.dat 2025-03-31T16:20:37.975062Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4467, node 2 2025-03-31T16:20:38.018422Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:38.018508Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:38.020153Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:38.032477Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:38.032498Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:38.032506Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:38.032630Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21070 TClient is connected to server localhost:21070 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:38.444050Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:40.796827Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009367219032680:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:40.796908Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009367219032688:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:40.796918Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:40.801189Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:20:40.819968Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488009367219032694:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:20:40.896460Z node 2 :TX_PROXY ERROR: Actor# [2:7488009367219032745:2333] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 20454, MsgBus: 14286 2025-03-31T16:20:41.626039Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488009372580743010:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:41.626116Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012c1/r3tmp/tmpjFYQSH/pdisk_1.dat 2025-03-31T16:20:41.732659Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20454, node 3 2025-03-31T16:20:41.756446Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:41.756528Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:41.757829Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:41.789147Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:41.789173Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:41.789182Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:41.789306Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14286 TClient is connected to server localhost:14286 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } ... ard: 72057594046644480 2025-03-31T16:21:19.173145Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7488009532656069258:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-03-31T16:21:19.260255Z node 10 :TX_PROXY ERROR: Actor# [10:7488009532656069309:2442] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:21:19.760906Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7488009511181231931:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:19.761009Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 17204, MsgBus: 10973 2025-03-31T16:21:21.302161Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7488009544790029051:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:21.302271Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012c1/r3tmp/tmpc1S3Ya/pdisk_1.dat 2025-03-31T16:21:21.422683Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:21:21.455235Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:21:21.455357Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:21:21.456848Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17204, node 11 2025-03-31T16:21:21.503978Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:21:21.504015Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:21:21.504043Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:21:21.504214Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10973 TClient is connected to server localhost:10973 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:21:22.149737Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:21:26.141921Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7488009566264866198:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:26.142081Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:26.163289Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:21:26.208565Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:21:26.264399Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7488009566264866370:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:26.264499Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:26.264627Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7488009566264866376:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:26.269435Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-03-31T16:21:26.282579Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7488009566264866378:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-03-31T16:21:26.302215Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7488009544790029051:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:26.302313Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:21:26.368063Z node 11 :TX_PROXY ERROR: Actor# [11:7488009566264866432:2444] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:21:32.757160Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [12:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:21:32.757559Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:21:32.757771Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012c1/r3tmp/tmpuZNvS2/pdisk_1.dat 2025-03-31T16:21:33.152288Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:21:33.190162Z node 12 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:21:33.230814Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:21:33.231012Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:21:33.243020Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:21:33.341149Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:644:2552], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:33.341309Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:654:2557], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:33.341480Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:33.351482Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-03-31T16:21:33.484359Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [12:658:2560], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-03-31T16:21:33.535198Z node 12 :TX_PROXY ERROR: Actor# [12:730:2601] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } PreparedQuery: "1c135f89-ba761503-27c64087-c78c3428" QueryAst: "(\n(let $1 (PgType \'int4))\n(let $2 \'(\'(\'\"_logical_id\" \'218) \'(\'\"_id\" \'\"35e72aea-6f9f712e-9e8b1ef7-b714515\") \'(\'\"_partition_mode\" \'\"single\")))\n(let $3 (DqPhyStage \'() (lambda \'() (Iterator (AsList (AsStruct \'(\'\"x\" (PgConst \'1 $1)) \'(\'\"y\" (PgConst \'2 $1)))))) $2))\n(let $4 (DqCnResult (TDqOutput $3 \'\"0\") \'(\'\"y\" \'\"x\")))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($3) \'($4) \'() \'(\'(\'\"type\" \'\"generic\")))) \'((KqpTxResultBinding (ListType (StructType \'(\'\"x\" $1) \'(\'\"y\" $1))) \'\"0\" \'\"0\")) \'(\'(\'\"type\" \'\"query\"))))\n)\n" QueryPlan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":1,\"Operators\":[{\"Inputs\":[],\"Iterator\":\"[{x: \\\"1\\\",y: \\\"2\\\"}]\",\"Name\":\"Iterator\"}],\"Node Type\":\"ConstantExpr\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}" YdbResults { columns { name: "y" type { pg_type { oid: 23 } } } columns { name: "x" type { pg_type { oid: 23 } } } } QueryDiagnostics: "" >> KqpPg::DropSequence [GOOD] >> KqpPg::DeleteWithQueryService+useSink >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString >> TColumnShardTestReadWrite::WriteReadModifications >> Viewer::JsonStorageListingV2PDiskIdFilter [GOOD] >> BasicUsage::WriteSessionSwitchDatabases [GOOD] >> TColumnShardTestReadWrite::RebootWriteRead [GOOD] >> TColumnShardTestReadWrite::WriteReadModifications [GOOD] >> TColumnShardTestReadWrite::WriteReadNoCompression [GOOD] >> TPartitionTests::TestNonConflictingActsBatchOk [GOOD] >> KqpPg::JoinWithQueryService-StreamLookup [GOOD] >> KqpPg::PgAggregate+useSink >> TPartitionTests::TestTxBatchInFederation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadModifications [GOOD] Test command err: 2025-03-31T16:21:37.477671Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:21:37.549870Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:21:37.567971Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:21:37.568238Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:21:37.574865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:21:37.575073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:21:37.575296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:21:37.575399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:21:37.575482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:21:37.575585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:21:37.575706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:21:37.575819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:21:37.575954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:21:37.576060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:21:37.576192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:21:37.576255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:21:37.597091Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:21:37.597202Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:21:37.597245Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:21:37.597384Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:37.597481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:21:37.597527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:21:37.597556Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:21:37.597657Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:21:37.597702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:21:37.597738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:21:37.597759Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:21:37.597902Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:37.597946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:21:37.597980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:21:37.598000Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:21:37.598071Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:21:37.598112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:21:37.598140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:21:37.598161Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:21:37.598199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:21:37.598220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:21:37.598240Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:21:37.598280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:21:37.598307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:21:37.598352Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:21:37.598648Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=31; 2025-03-31T16:21:37.598709Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=26; 2025-03-31T16:21:37.598866Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=26; 2025-03-31T16:21:37.598917Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=23; 2025-03-31T16:21:37.599028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:21:37.599100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:21:37.599183Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:21:37.599409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:21:37.599462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:21:37.599496Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:21:37.599620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:21:37.599651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:21:37.599671Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:21:37.599835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:21:37.599884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:21:37.599912Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:21:37.600041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:21:37.600081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:21:37.600121Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... angeResult;fline=fetching.cpp:58;scan_step=name=BUILD_STAGE_RESULT;details={};;scan_step_idx=5; 2025-03-31T16:21:38.868453Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-03-31T16:21:38.868485Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=fetching.cpp:17;event=apply; 2025-03-31T16:21:38.868522Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=interval.cpp:28;event=fetched;interval_idx=0; 2025-03-31T16:21:38.868563Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=interval.cpp:17;event=start_construct_result;interval_idx=0;interval_id=6;memory=8391908;count=2; 2025-03-31T16:21:38.868970Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=merge.cpp:149;event=DoExecute;interval_idx=0; 2025-03-31T16:21:38.869365Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=source.cpp:50;event=source_ready;intervals_count=1;source_idx=0; 2025-03-31T16:21:38.869460Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:21:38.869506Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-03-31T16:21:38.869550Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-31T16:21:38.869903Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-03-31T16:21:38.869945Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-03-31T16:21:38.869985Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=6; 2025-03-31T16:21:38.870031Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=0;merger=0;interval_id=6; 2025-03-31T16:21:38.870099Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-03-31T16:21:38.870205Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:21:38.870313Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:21:38.870415Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-03-31T16:21:38.870643Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[35] (CS::INDEXATION) apply at tablet 9437184 2025-03-31T16:21:38.871680Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:12 Blob count: 1 2025-03-31T16:21:38.871818Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=2812;raw_bytes=868;count=1;records=10} inactive {blob_bytes=11248;raw_bytes=3472;count=4;records=40} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-31T16:21:38.872371Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:21:38.872509Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:21:38.872611Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:21:38.872681Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:430:2448] finished for tablet 9437184 2025-03-31T16:21:38.873129Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:426:2444];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.004},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["l_task_result"],"t":0.014},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.016}],"full":{"a":1743438098856142,"name":"_full_task","f":1743438098856142,"d_finished":0,"c":0,"l":1743438098872740,"d":16598},"events":[{"name":"bootstrap","f":1743438098856328,"d_finished":4090,"c":1,"l":1743438098860418,"d":4090},{"a":1743438098872324,"name":"ack","f":1743438098872324,"d_finished":0,"c":0,"l":1743438098872740,"d":416},{"a":1743438098872303,"name":"processing","f":1743438098861724,"d_finished":3526,"c":10,"l":1743438098870367,"d":3963},{"name":"ProduceResults","f":1743438098858832,"d_finished":1872,"c":12,"l":1743438098872662,"d":1872},{"a":1743438098872668,"name":"Finish","f":1743438098872668,"d_finished":0,"c":0,"l":1743438098872740,"d":72},{"name":"task_result","f":1743438098861748,"d_finished":3350,"c":10,"l":1743438098870365,"d":3350}],"id":"9437184::9"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:21:38.873245Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:426:2444];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:21:38.873704Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:426:2444];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.004},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["l_task_result"],"t":0.014},{"events":["f_ack","l_ProduceResults","f_Finish"],"t":0.016},{"events":["l_ack","l_processing","l_Finish"],"t":0.017}],"full":{"a":1743438098856142,"name":"_full_task","f":1743438098856142,"d_finished":0,"c":0,"l":1743438098873297,"d":17155},"events":[{"name":"bootstrap","f":1743438098856328,"d_finished":4090,"c":1,"l":1743438098860418,"d":4090},{"a":1743438098872324,"name":"ack","f":1743438098872324,"d_finished":0,"c":0,"l":1743438098873297,"d":973},{"a":1743438098872303,"name":"processing","f":1743438098861724,"d_finished":3526,"c":10,"l":1743438098870367,"d":4520},{"name":"ProduceResults","f":1743438098858832,"d_finished":1872,"c":12,"l":1743438098872662,"d":1872},{"a":1743438098872668,"name":"Finish","f":1743438098872668,"d_finished":0,"c":0,"l":1743438098873297,"d":629},{"name":"task_result","f":1743438098861748,"d_finished":3350,"c":10,"l":1743438098870365,"d":3350}],"id":"9437184::9"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:21:38.873812Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:21:38.855624Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=1;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=2812;inserted_portions_bytes=0;committed_portions_bytes=1384;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4196;selected_rows=0; 2025-03-31T16:21:38.873857Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:21:38.874191Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::RebootWriteRead [GOOD] Test command err: 2025-03-31T16:21:31.711319Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:21:31.807372Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:21:31.833904Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:21:31.834204Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:21:31.843135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:21:31.843352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:21:31.843636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:21:31.843756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:21:31.843884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:21:31.844023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:21:31.844151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:21:31.844283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:21:31.844403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:21:31.844516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:21:31.844622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:21:31.844755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:21:31.874794Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:21:31.874977Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:21:31.875038Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:21:31.875253Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:31.875420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:21:31.875507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:21:31.875554Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:21:31.875657Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:21:31.875721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:21:31.875767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:21:31.875797Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:21:31.876012Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:31.876094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:21:31.876161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:21:31.876195Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:21:31.876303Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:21:31.876368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:21:31.876424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:21:31.876465Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:21:31.876549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:21:31.876598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:21:31.876644Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:21:31.876710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:21:31.876751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:21:31.876780Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:21:31.877190Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=48; 2025-03-31T16:21:31.877305Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=58; 2025-03-31T16:21:31.877404Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=38; 2025-03-31T16:21:31.877507Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=42; 2025-03-31T16:21:31.877679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:21:31.877737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:21:31.877773Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:21:31.877982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:21:31.878031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:21:31.878066Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:21:31.878231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:21:31.878283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:21:31.878314Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:21:31.878527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:21:31.878574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:21:31.878605Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:21:31.878741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:21:31.878791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:21:31.878837Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... t_id: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:21:38.716516Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-31T16:21:38.716666Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:232;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-03-31T16:21:38.716770Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:252;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-03-31T16:21:38.716928Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:1064:2935];bytes=1984;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-03-31T16:21:38.717096Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:21:38.717233Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:21:38.717387Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:21:38.717635Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:21:38.717796Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:21:38.717950Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:21:38.717995Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1065:2936] finished for tablet 9437184 2025-03-31T16:21:38.718392Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:1064:2935];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.011},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.013}],"full":{"a":1743438098704799,"name":"_full_task","f":1743438098704799,"d_finished":0,"c":0,"l":1743438098718041,"d":13242},"events":[{"name":"bootstrap","f":1743438098704991,"d_finished":2561,"c":1,"l":1743438098707552,"d":2561},{"a":1743438098717608,"name":"ack","f":1743438098716154,"d_finished":1256,"c":1,"l":1743438098717410,"d":1689},{"a":1743438098717592,"name":"processing","f":1743438098708601,"d_finished":5232,"c":10,"l":1743438098717416,"d":5681},{"name":"ProduceResults","f":1743438098706381,"d_finished":2856,"c":13,"l":1743438098717977,"d":2856},{"a":1743438098717981,"name":"Finish","f":1743438098717981,"d_finished":0,"c":0,"l":1743438098718041,"d":60},{"name":"task_result","f":1743438098708621,"d_finished":3855,"c":9,"l":1743438098716003,"d":3855}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:21:38.718465Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:1064:2935];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:21:38.718840Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:1064:2935];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.011},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.013}],"full":{"a":1743438098704799,"name":"_full_task","f":1743438098704799,"d_finished":0,"c":0,"l":1743438098718504,"d":13705},"events":[{"name":"bootstrap","f":1743438098704991,"d_finished":2561,"c":1,"l":1743438098707552,"d":2561},{"a":1743438098717608,"name":"ack","f":1743438098716154,"d_finished":1256,"c":1,"l":1743438098717410,"d":2152},{"a":1743438098717592,"name":"processing","f":1743438098708601,"d_finished":5232,"c":10,"l":1743438098717416,"d":6144},{"name":"ProduceResults","f":1743438098706381,"d_finished":2856,"c":13,"l":1743438098717977,"d":2856},{"a":1743438098717981,"name":"Finish","f":1743438098717981,"d_finished":0,"c":0,"l":1743438098718504,"d":523},{"name":"task_result","f":1743438098708621,"d_finished":3855,"c":9,"l":1743438098716003,"d":3855}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:21:38.718903Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:21:38.704281Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-03-31T16:21:38.718945Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:21:38.719265Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> KqpPg::TableArrayInsert-useSink [GOOD] >> KqpPg::Returning+useSink >> PQCountersLabeled::NewConsumersCountersAppear [GOOD] >> PQCountersSimple::Partition ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::WriteSessionSwitchDatabases [GOOD] Test command err: 2025-03-31T16:19:10.050847Z :WriteSessionNoAvailableDatabase INFO: Random seed for debugging is 1743437950050817 2025-03-31T16:19:10.439174Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008979384228278:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:10.439789Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:19:10.501040Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488008980767417213:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:10.501101Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001626/r3tmp/tmplPWjCB/pdisk_1.dat 2025-03-31T16:19:10.760931Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:19:10.763092Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:19:11.323635Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:11.353259Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:11.353344Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:11.359620Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:11.359715Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:11.369714Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:19:11.369835Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:11.371664Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25862, node 1 2025-03-31T16:19:11.675681Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/001626/r3tmp/yandexEmJLOT.tmp 2025-03-31T16:19:11.675707Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/001626/r3tmp/yandexEmJLOT.tmp 2025-03-31T16:19:11.675845Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/001626/r3tmp/yandexEmJLOT.tmp 2025-03-31T16:19:11.675953Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:19:11.765017Z INFO: TTestServer started on Port 1808 GrpcPort 25862 TClient is connected to server localhost:1808 PQClient connected to localhost:25862 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:19:12.098540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-03-31T16:19:14.516758Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008997947286715:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:14.516866Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488008997947286720:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:14.516994Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:19:14.524584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2025-03-31T16:19:14.564704Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488008997947286730:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-03-31T16:19:14.847303Z node 2 :TX_PROXY ERROR: Actor# [2:7488008997947286758:2130] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:19:14.876919Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488008997947286767:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:19:14.878523Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=N2Y2OTQ1MjUtNDQ4MGNlZDgtNWRjODQxZC1lYTg1MGFmMA==, ActorId: [2:7488008997947286699:2308], ActorState: ExecuteState, TraceId: 01jqpfzzee339c4qw2ttywzgnn, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:19:14.877430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:19:14.879878Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488008996564098566:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:19:14.880291Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:19:14.880342Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NmY3ZGExNy1lZTVkMmRlYi1hZTVhZWZkYS04OGQ4MDg1Yw==, ActorId: [1:7488008996564098540:2337], ActorState: ExecuteState, TraceId: 01jqpfzzg34q2v95qzvwyy5mf9, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:19:14.880819Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:19:15.021906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:19:15.153592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:25862", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-03-31T16:19:15.413177Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqpg00589y7x9nh5ke7gqzjd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2Y5ZWZjYWMtNTc4OWE2Ni01YzFjNDRkZi0zZTYwNjBiZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:19:15.438270Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488008979384228278:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:15.438324Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Subcribe to ClusterTracker from [1:7488009000859066275:2984] 2025-03-31T16:19:15.501174Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488008980767417213:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:15.501260Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-03-31T16:19:21.452084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:25862 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-03-31T16:19:21.592502Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:25862 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 8 ... 3-31T16:21:16.927180Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2025-03-31T16:21:16.927241Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1743438076927 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T16:21:16.927364Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session established. Init response: last_seq_no: 2 session_id: "src_id|a9a19c1f-58f46f1-68933fc0-87e9b780_0" supported_codecs { codecs: 1 codecs: 2 codecs: 3 } 2025-03-31T16:21:17.927096Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7488009521305382394:3621] (SourceId=src_id, PreferedPartition=(NULL)) Update the table 2025-03-31T16:21:17.945037Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7488009521305382394:3621] (SourceId=src_id, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=1 Status=SUCCESS 2025-03-31T16:21:17.945074Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7488009521305382394:3621] (SourceId=src_id, PreferedPartition=(NULL)) Start idle 2025-03-31T16:21:35.523431Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] TPersQueueReadBalancer::HandleWakeup 2025-03-31T16:21:35.523502Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] Send TEvPersQueue::TEvStatus TabletId: 72075186224037892 Cookie: 4 2025-03-31T16:21:35.524128Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } 2025-03-31T16:21:35.524952Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] Send TEvPeriodicTopicStats PathId: 13 Generation: 1 StatsReportRound: 4 DataSize: 0 UsedReserveSize: 0 2025-03-31T16:21:35.525091Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] ProcessPendingStats. PendingUpdates size 1 >>> Ready to answer: ok 2025-03-31T16:21:37.066323Z :DEBUG: [/Root] TraceId [] SessionId [src_id|a9a19c1f-58f46f1-68933fc0-87e9b780_0] MessageGroupId [src_id] Write 1 messages with Id from 1 to 1 >>> Got event: ReadyToAcceptEvent 2025-03-31T16:21:37.066827Z :DEBUG: [/Root] TraceId [] SessionId [src_id|a9a19c1f-58f46f1-68933fc0-87e9b780_0] MessageGroupId [src_id] Write session: try to update token 2025-03-31T16:21:37.066879Z :DEBUG: [/Root] TraceId [] SessionId [src_id|a9a19c1f-58f46f1-68933fc0-87e9b780_0] MessageGroupId [src_id] Send 1 message(s) (0 left), first sequence number is 3 2025-03-31T16:21:37.067479Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: src_id|a9a19c1f-58f46f1-68933fc0-87e9b780_0 grpc read done: success: 1 data: write_request[data omitted] 2025-03-31T16:21:37.067842Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-03-31T16:21:37.068232Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-03-31T16:21:37.068281Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-03-31T16:21:37.068362Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 1 2025-03-31T16:21:37.068622Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2025-03-31T16:21:37.068967Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-03-31T16:21:37.069001Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-03-31T16:21:37.069049Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message topic: rt3.dc1--test-topic partition: 0 SourceId: '\0src_id' SeqNo: 3 partNo : 0 messageNo: 1 size 98 offset: -1 2025-03-31T16:21:37.069222Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0src_id' seqNo 3 partNo 0 2025-03-31T16:21:37.087998Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0src_id' seqNo 3 partNo 0 FormedBlobsCount 0 NewHead: Offset 2 PartNo 0 PackedSize 172 count 1 nextOffset 3 batches 1 2025-03-31T16:21:37.088520Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 2,1 HeadOffset 0 endOffset 2 curOffset 3 d0000000000_00000000000000000002_00000_0000000001_00000| size 160 WTime 1743438097088 2025-03-31T16:21:37.088649Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:37.088675Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:37.088693Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-03-31T16:21:37.088710Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:37.088726Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000psrc_id 2025-03-31T16:21:37.088742Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] d0000000000_00000000000000000002_00000_0000000001_00000| 2025-03-31T16:21:37.088758Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:37.088779Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:37.088795Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] =========================== 2025-03-31T16:21:37.089126Z node 4 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-31T16:21:37.089207Z node 4 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 2 partNo 0 count 1 size 160 2025-03-31T16:21:37.094684Z node 4 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 2 count 1 size 160 actorID [4:7488009085423085549:2429] 2025-03-31T16:21:37.094781Z node 4 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 0 offset 2 partno 0 count 1 parts 0 size 160 2025-03-31T16:21:37.095008Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 105 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:21:37.095059Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:21:37.095129Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0src_id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2025-03-31T16:21:37.095323Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] topic 'rt3.dc1--test-topicCounters. CacheSize 480 CachedBlobs 3 2025-03-31T16:21:37.095362Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-03-31T16:21:37.095604Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2025-03-31T16:21:37.096207Z :DEBUG: [/Root] TraceId [] SessionId [src_id|a9a19c1f-58f46f1-68933fc0-87e9b780_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2025-03-31T16:21:37.096321Z :DEBUG: [/Root] TraceId [] SessionId [src_id|a9a19c1f-58f46f1-68933fc0-87e9b780_0] MessageGroupId [src_id] Write session got write response: acks { seq_no: 3 written { offset: 2 } } write_statistics { persisting_time { nanos: 6000000 } min_queue_wait_time { nanos: 18000000 } max_queue_wait_time { nanos: 18000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2025-03-31T16:21:37.096339Z :DEBUG: [/Root] TraceId [] SessionId [src_id|a9a19c1f-58f46f1-68933fc0-87e9b780_0] MessageGroupId [src_id] OnAck: seqNo=1, txId=? 2025-03-31T16:21:37.096355Z :DEBUG: [/Root] TraceId [] SessionId [src_id|a9a19c1f-58f46f1-68933fc0-87e9b780_0] MessageGroupId [src_id] Write session: acknoledged message 1 2025-03-31T16:21:37.097344Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: src_id|a9a19c1f-58f46f1-68933fc0-87e9b780_0 grpc read done: success: 0 data: 2025-03-31T16:21:37.097364Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|a9a19c1f-58f46f1-68933fc0-87e9b780_0 grpc read failed 2025-03-31T16:21:37.097412Z node 3 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 4 sessionId: src_id|a9a19c1f-58f46f1-68933fc0-87e9b780_0 2025-03-31T16:21:37.097429Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|a9a19c1f-58f46f1-68933fc0-87e9b780_0 is DEAD 2025-03-31T16:21:37.097677Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-31T16:21:37.098733Z :DEBUG: [/Root] TraceId [] SessionId [src_id|a9a19c1f-58f46f1-68933fc0-87e9b780_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 1, Msg: Cancelled on the server side, Details: , InternalError: 0 2025-03-31T16:21:37.098846Z :ERROR: [/Root] TraceId [] SessionId [src_id|a9a19c1f-58f46f1-68933fc0-87e9b780_0] MessageGroupId [src_id] Got error. Status: CLIENT_CANCELLED, Description:
: Error: GRpc error: (1): Cancelled on the server side 2025-03-31T16:21:37.098877Z :ERROR: [/Root] TraceId [] SessionId [src_id|a9a19c1f-58f46f1-68933fc0-87e9b780_0] MessageGroupId [src_id] Write session will not restart after a fatal error 2025-03-31T16:21:37.098913Z :INFO: [/Root] TraceId [] SessionId [src_id|a9a19c1f-58f46f1-68933fc0-87e9b780_0] MessageGroupId [src_id] Write session will now close 2025-03-31T16:21:37.098980Z :DEBUG: [/Root] TraceId [] SessionId [src_id|a9a19c1f-58f46f1-68933fc0-87e9b780_0] MessageGroupId [src_id] Write session: aborting 2025-03-31T16:21:37.099193Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7488009521305382429:3621] destroyed 2025-03-31T16:21:37.099298Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-03-31T16:21:37.112708Z :DEBUG: [/Root] TraceId [] SessionId [src_id|a9a19c1f-58f46f1-68933fc0-87e9b780_0] MessageGroupId [src_id] Write session: destroy 2025-03-31T16:21:37.339745Z node 3 :KQP_COMPUTE WARN: TxId: 281474976720903, task: 1, CA Id [3:7488009611499696857:3831]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2025-03-31T16:21:37.371564Z node 3 :KQP_COMPUTE WARN: TxId: 281474976720903, task: 1, CA Id [3:7488009611499696857:3831]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-31T16:21:37.418868Z node 3 :KQP_COMPUTE WARN: TxId: 281474976720903, task: 1, CA Id [3:7488009611499696857:3831]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-31T16:21:37.487500Z node 3 :KQP_COMPUTE WARN: TxId: 281474976720903, task: 1, CA Id [3:7488009611499696857:3831]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-31T16:21:37.593657Z node 3 :KQP_COMPUTE WARN: TxId: 281474976720903, task: 1, CA Id [3:7488009611499696857:3831]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadNoCompression [GOOD] Test command err: 2025-03-31T16:21:31.530609Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:21:31.620280Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:21:31.644910Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:21:31.645239Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:21:31.653665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:21:31.653876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:21:31.654127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:21:31.654274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:21:31.654371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:21:31.654489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:21:31.654609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:21:31.654747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:21:31.654863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:21:31.654966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:21:31.655110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:21:31.655240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:21:31.684772Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:21:31.684975Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:21:31.685030Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:21:31.685245Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:31.685413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:21:31.685495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:21:31.685542Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:21:31.685651Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:21:31.685716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:21:31.685772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:21:31.685809Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:21:31.686049Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:31.686126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:21:31.686169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:21:31.686201Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:21:31.686324Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:21:31.686395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:21:31.686439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:21:31.686473Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:21:31.686564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:21:31.686611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:21:31.686649Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:21:31.686713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:21:31.686753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:21:31.686790Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:21:31.687270Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=59; 2025-03-31T16:21:31.687401Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=53; 2025-03-31T16:21:31.687506Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=52; 2025-03-31T16:21:31.687599Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=41; 2025-03-31T16:21:31.687784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:21:31.687848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:21:31.687884Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:21:31.688110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:21:31.688185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:21:31.688217Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:21:31.688395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:21:31.688442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:21:31.688473Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:21:31.688705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:21:31.688753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:21:31.688784Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:21:31.688906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:21:31.688954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:21:31.689014Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:21:39.046251Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-31T16:21:39.046405Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:232;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-03-31T16:21:39.046502Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:252;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-03-31T16:21:39.046655Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:1064:2935];bytes=1984;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-03-31T16:21:39.046816Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:21:39.046942Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:21:39.047123Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:21:39.047364Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:21:39.047509Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:21:39.047647Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:21:39.047690Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1065:2936] finished for tablet 9437184 2025-03-31T16:21:39.048246Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:1064:2935];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_task_result"],"t":0.013},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.015}],"full":{"a":1743438099032269,"name":"_full_task","f":1743438099032269,"d_finished":0,"c":0,"l":1743438099047753,"d":15484},"events":[{"name":"bootstrap","f":1743438099032539,"d_finished":3089,"c":1,"l":1743438099035628,"d":3089},{"a":1743438099047335,"name":"ack","f":1743438099045907,"d_finished":1249,"c":1,"l":1743438099047156,"d":1667},{"a":1743438099047322,"name":"processing","f":1743438099037010,"d_finished":6146,"c":10,"l":1743438099047159,"d":6577},{"name":"ProduceResults","f":1743438099034315,"d_finished":3155,"c":13,"l":1743438099047674,"d":3155},{"a":1743438099047676,"name":"Finish","f":1743438099047676,"d_finished":0,"c":0,"l":1743438099047753,"d":77},{"name":"task_result","f":1743438099037030,"d_finished":4737,"c":9,"l":1743438099045692,"d":4737}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:21:39.048346Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:1064:2935];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:21:39.048838Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:1064:2935];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_task_result"],"t":0.013},{"events":["l_ProduceResults","f_Finish"],"t":0.015},{"events":["l_ack","l_processing","l_Finish"],"t":0.016}],"full":{"a":1743438099032269,"name":"_full_task","f":1743438099032269,"d_finished":0,"c":0,"l":1743438099048397,"d":16128},"events":[{"name":"bootstrap","f":1743438099032539,"d_finished":3089,"c":1,"l":1743438099035628,"d":3089},{"a":1743438099047335,"name":"ack","f":1743438099045907,"d_finished":1249,"c":1,"l":1743438099047156,"d":2311},{"a":1743438099047322,"name":"processing","f":1743438099037010,"d_finished":6146,"c":10,"l":1743438099047159,"d":7221},{"name":"ProduceResults","f":1743438099034315,"d_finished":3155,"c":13,"l":1743438099047674,"d":3155},{"a":1743438099047676,"name":"Finish","f":1743438099047676,"d_finished":0,"c":0,"l":1743438099048397,"d":721},{"name":"task_result","f":1743438099037030,"d_finished":4737,"c":9,"l":1743438099045692,"d":4737}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:21:39.048928Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:21:39.031590Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-03-31T16:21:39.048974Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:21:39.049345Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> PQCountersSimple::Partition [GOOD] |90.5%| [TA] $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.5%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> PQCountersSimple::Partition [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:126:2057] recipient: [1:124:2158] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:126:2057] recipient: [1:124:2158] Leader for TabletID 72057594037927937 is [1:130:2162] sender: [1:131:2057] recipient: [1:124:2158] 2025-03-31T16:21:08.442198Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:08.442274Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:172:2057] recipient: [1:170:2193] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:172:2057] recipient: [1:170:2193] Leader for TabletID 72057594037927938 is [1:176:2197] sender: [1:177:2057] recipient: [1:170:2193] Leader for TabletID 72057594037927937 is [1:130:2162] sender: [1:200:2057] recipient: [1:14:2061] 2025-03-31T16:21:08.460900Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:08.481644Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:198:2213] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-03-31T16:21:08.482632Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:206:2219] 2025-03-31T16:21:08.485150Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:206:2219] 2025-03-31T16:21:08.487363Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:207:2220] 2025-03-31T16:21:08.489147Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:207:2220] 2025-03-31T16:21:08.494948Z node 1 :PERSQUEUE INFO: new Cookie default|5f30b907-5264c108-577435fa-4e7f349c_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:21:08.501017Z node 1 :PERSQUEUE INFO: new Cookie default|c56811bd-2f9a7df3-933cfe39-91712412_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:21:08.506437Z node 1 :PERSQUEUE INFO: new Cookie default|df9a0441-744d3832-efa9cb09-73c5d2d2_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Expected: { "sensors": [ { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/MessageLagByCommitted" }, "value": 30 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/MessageLagByLastRead" }, "value": 29 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/PartitionMaxReadQuotaUsage" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesAvailAvgMin" }, "value": 1000000000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesAvailAvgSec" }, "value": 1000000000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerDay" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerHour" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerMin" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerSec" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerDay" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerHour" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerMin" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerSec" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesQuota" }, "value": 1000000000 }, { "kind": "RATE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadOffsetRewindSum" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadTimeLagMs" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/SizeLagByCommitted" }, "value": 747 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/SizeLagByLastRead" }, "value": 747 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/TimeSinceLastReadMs" }, "value": 5000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/TotalMessageLagByLastRead" }, "value": 29 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/TotalSizeLagByLastRead" }, "value": 747 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/TotalTimeLagMsByLastRead" }, "value": 5000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/UserPartitionsAnswered" }, "value": 2 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/WriteTimeLagMsByLastRead" }, "value": 30 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/WriteTimeLagMsByLastReadOld" }, "value": 5000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "total", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/MessageLagByCommitted" }, "value": 30 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "total", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/MessageLagByLastRead" }, "value": 29 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "total", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/PartitionMaxReadQuotaUsage" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "total", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesAvailAvgMin" }, "value": 1000000000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "total", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesAvailAvgSec" }, "value": 1000000000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "total", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerDay" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "total", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerHour" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "total", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerMin" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "total", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerSec" }, "value": 0 }, { "kind" ... artitions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: true } NEW ANS: ANS GROUP total/total/rt3.dc1--asdfgs--topic ANS GROUP user/1/rt3.dc1--asdfgs--topic ANS GROUP user/1/total ANS GROUP user/total/total ANS GROUP total/total/total ANS GROUP rt3.dc1--asdfgs--topic ANS GROUP total ANS GROUP total/1/rt3.dc1--asdfgs--topic CHECKING GROUP user/1/rt3.dc1--asdfgs--topic 2025-03-31T16:21:18.619615Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:18.623616Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 5 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 5 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } NEW ANS: ANS GROUP total/total/rt3.dc1--asdfgs--topic ANS GROUP user/0/total ANS GROUP user/total/total ANS GROUP user/0/rt3.dc1--asdfgs--topic ANS GROUP total/total/total ANS GROUP rt3.dc1--asdfgs--topic ANS GROUP total/0/rt3.dc1--asdfgs--topic ANS GROUP total CHECKING GROUP user/0/rt3.dc1--asdfgs--topic 2025-03-31T16:21:20.616537Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:20.623354Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 6 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user" ImportantClientId: "user2" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 6 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 ReadRuleGenerations: 6 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: true } Consumers { Name: "user2" Generation: 6 Important: true } NEW ANS: ANS GROUP total/total/rt3.dc1--asdfgs--topic ANS GROUP user2/1/total ANS GROUP user/1/rt3.dc1--asdfgs--topic ANS GROUP user/1/total ANS GROUP user2/total/total ANS GROUP user/total/total ANS GROUP user2/1/rt3.dc1--asdfgs--topic ANS GROUP total/total/total ANS GROUP rt3.dc1--asdfgs--topic ANS GROUP total ANS GROUP total/1/rt3.dc1--asdfgs--topic CHECKING GROUP user/1/rt3.dc1--asdfgs--topic CHECKING GROUP user2/1/rt3.dc1--asdfgs--topic 2025-03-31T16:21:22.634938Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:22.639132Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 7 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 7 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 ReadRuleGenerations: 6 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: true } Consumers { Name: "user2" Generation: 6 Important: false } NEW ANS: ANS GROUP total/total/rt3.dc1--asdfgs--topic ANS GROUP user/1/rt3.dc1--asdfgs--topic ANS GROUP user2/0/total ANS GROUP user/1/total ANS GROUP user2/total/total ANS GROUP user/total/total ANS GROUP user2/0/rt3.dc1--asdfgs--topic ANS GROUP total/total/total ANS GROUP total/0/rt3.dc1--asdfgs--topic ANS GROUP rt3.dc1--asdfgs--topic ANS GROUP total ANS GROUP total/1/rt3.dc1--asdfgs--topic CHECKING GROUP user/1/rt3.dc1--asdfgs--topic CHECKING GROUP user2/0/rt3.dc1--asdfgs--topic 2025-03-31T16:21:24.742517Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:24.747953Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 8 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 8 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: true } NEW ANS: ANS GROUP total/total/rt3.dc1--asdfgs--topic ANS GROUP user/1/rt3.dc1--asdfgs--topic ANS GROUP user/1/total ANS GROUP user/total/total ANS GROUP total/total/total ANS GROUP rt3.dc1--asdfgs--topic ANS GROUP total ANS GROUP total/1/rt3.dc1--asdfgs--topic CHECKING GROUP user/1/rt3.dc1--asdfgs--topic 2025-03-31T16:21:27.482975Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:27.483072Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:27.510652Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:27.511639Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 9 actor [4:200:2215] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 3600 ImportantClientId: "client" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 9 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 9 ReadRuleGenerations: 9 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 9 Important: false } Consumers { Name: "client" Generation: 9 Important: true } 2025-03-31T16:21:27.512584Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:208:2221] 2025-03-31T16:21:27.513699Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [4:208:2221] 2025-03-31T16:21:27.514806Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [4:209:2222] 2025-03-31T16:21:27.515453Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 2 [4:209:2222] 2025-03-31T16:21:27.534300Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][] pipe [4:251:2250] connected; active server actors: 1 2025-03-31T16:21:27.537940Z node 4 :PERSQUEUE INFO: new Cookie default|c1476c4-2870519d-2ad3c058-259c8ce9_0 generated for partition 0 topic 'topic' owner default 2025-03-31T16:21:27.543763Z node 4 :PERSQUEUE INFO: new Cookie default|fe77598d-9663365d-46eb403f-867febac_1 generated for partition 0 topic 'topic' owner default 2025-03-31T16:21:27.553790Z node 4 :PERSQUEUE INFO: new Cookie default|b85b2a1d-1c86b0c8-781a499c-ce47971_2 generated for partition 0 topic 'topic' owner default 2025-03-31T16:21:27.559742Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][topic] pipe [4:298:2291] connected; active server actors: 1 2025-03-31T16:21:33.376603Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][topic] pipe [4:398:2378] connected; active server actors: 1 2025-03-31T16:21:40.185832Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:40.185907Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:40.203369Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:40.204544Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 10 actor [5:198:2213] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 10 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 10 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 10 Important: false } 2025-03-31T16:21:40.205314Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:206:2219] 2025-03-31T16:21:40.208223Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [5:206:2219] 2025-03-31T16:21:40.210441Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [5:207:2220] 2025-03-31T16:21:40.212584Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [5:207:2220] 2025-03-31T16:21:40.219944Z node 5 :PERSQUEUE INFO: new Cookie default|9b655779-8da0cdd1-cdb6e8da-9cf1e067_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:21:40.227404Z node 5 :PERSQUEUE INFO: new Cookie default|7ef92c1e-8deb392-5eff3310-bbb82afe_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:21:40.237846Z node 5 :PERSQUEUE INFO: new Cookie default|1d78f5f1-fd1d0507-6988433f-10b6cc68_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:21:40.244897Z node 5 :PERSQUEUE INFO: new Cookie default|377e3e43-7f8bdb2a-12088101-2a70f1ba_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:21:40.246813Z node 5 :PERSQUEUE INFO: new Cookie default|6ba6c417-40395c5f-951cc23f-141969cd_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default >> TColumnShardTestReadWrite::Write ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::JsonStorageListingV2PDiskIdFilter [GOOD] Test command err: 2025-03-31T16:15:48.017102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:2901:2433], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:48.017761Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:48.018596Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:48.020632Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:2904:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:48.021312Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:2913:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:48.021662Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:48.021849Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:2907:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:48.022768Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:48.022911Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:48.023350Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:48.023455Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:2910:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:48.023519Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:48.023988Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:48.024574Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:2922:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:48.024859Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:48.024887Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:48.025506Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:48.025769Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:48.027067Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:2916:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:48.027248Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:2919:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:48.028101Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:1489:2181], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:48.029078Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:48.029162Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:48.029217Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:48.029448Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:48.029506Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:48.030332Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-31T16:15:48.453864Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:48.628686Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-03-31T16:15:48.652374Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-03-31T16:15:49.067790Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 14268, node 1 TClient is connected to server localhost:4660 2025-03-31T16:15:49.292805Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:49.292870Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:49.292906Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:49.293180Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:17:03.492213Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:3135:2433], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:17:03.493466Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:17:03.494618Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:17:03.497735Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [14:2207:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:17:03.498183Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:3131:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:17:03.498362Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [12:3138:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:17:03.499124Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [17:2216:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:17:03.499322Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [18:2219:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:17:03.500262Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [16:2213:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:17:03.500382Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:17:03.500463Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:17:03.500548Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:17:03.500610Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:17:03.500744Z node 15 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [15:2210:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:17:03.501821Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:17:03.501865Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:17:03.502226Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:17:03.502319Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:17:03.502659Z node 15 :METADATA_PROVIDER ... : LookupError; 2025-03-31T16:18:59.805792Z node 24 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:18:59.805843Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:18:59.805903Z node 26 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:18:59.805962Z node 27 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:18:59.807416Z node 20 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [20:1489:2180], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:18:59.808943Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:18:59.809002Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:18:59.812965Z node 22 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [22:2908:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:18:59.813763Z node 21 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [21:2905:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:18:59.814249Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:18:59.814727Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:18:59.815157Z node 21 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:18:59.815215Z node 21 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-31T16:19:00.389024Z node 19 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:00.605633Z node 19 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-03-31T16:19:00.637768Z node 19 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-03-31T16:19:01.499755Z node 19 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 2540, node 19 TClient is connected to server localhost:17256 2025-03-31T16:19:02.263759Z node 19 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:19:02.263880Z node 19 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:19:02.263962Z node 19 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:19:02.264877Z node 19 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:21:10.475520Z node 28 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [28:3132:2433], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:21:10.477546Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:21:10.478740Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:21:10.481185Z node 32 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [32:2409:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:21:10.482788Z node 29 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [29:3128:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:21:10.483006Z node 30 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [30:3135:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:21:10.483221Z node 31 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [31:2406:2374], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:21:10.483325Z node 32 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:21:10.483504Z node 33 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [33:2412:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:21:10.484780Z node 32 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:21:10.485049Z node 34 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [34:2415:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:21:10.485729Z node 29 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:21:10.485810Z node 30 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:21:10.485886Z node 31 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:21:10.485965Z node 33 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:21:10.486046Z node 34 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:21:10.486354Z node 29 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:21:10.486433Z node 30 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:21:10.486518Z node 31 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:21:10.486563Z node 33 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:21:10.486603Z node 34 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:21:10.489290Z node 36 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [36:2421:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:21:10.490884Z node 35 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [35:2418:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:21:10.491010Z node 36 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:21:10.491569Z node 36 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:21:10.492350Z node 35 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:21:10.493755Z node 35 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-31T16:21:11.008135Z node 28 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:21:11.348401Z node 28 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-03-31T16:21:11.378282Z node 28 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-03-31T16:21:12.291857Z node 28 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 5994, node 28 TClient is connected to server localhost:1049 2025-03-31T16:21:13.026342Z node 28 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:21:13.026457Z node 28 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:21:13.026564Z node 28 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:21:13.028003Z node 28 :NET_CLASSIFIER ERROR: got bad distributable configuration >> TColumnShardTestReadWrite::WriteStandaloneExoticTypes >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveConfig_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveActor_Test >> TColumnShardTestReadWrite::WriteReadDuplicate >> TColumnShardTestReadWrite::CompactionGC >> KqpPg::DeleteWithQueryService+useSink [GOOD] >> KqpPg::DeleteWithQueryService-useSink >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64_Reboot >> TColumnShardTestReadWrite::ReadGroupBy >> KqpPg::AlterColumnSetDefaultFromSequenceWithSchemaname [GOOD] >> KqpPg::CheckPgAutoParams+useSink >> TPQTest::TestPartitionPerConsumerQuota [GOOD] >> TPQTest::TestPartitionWriteQuota >> TPartitionTests::TestTxBatchInFederation [GOOD] >> KqpPg::V1CreateTable [GOOD] >> KqpPg::ValuesInsert+useSink >> KqpPg::PgAggregate+useSink [GOOD] >> KqpPg::PgAggregate-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::TestTxBatchInFederation [GOOD] Test command err: 2025-03-31T16:21:04.423513Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:04.423609Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:04.443906Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:179:2194] 2025-03-31T16:21:04.444994Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [1:179:2194] Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" St ... .864255Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 10 2025-03-31T16:21:42.864313Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-03-31T16:21:42.864368Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-03-31T16:21:43.629360Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-03-31T16:21:44.834732Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-03-31T16:21:44.835124Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 17 Wait batch completion 2025-03-31T16:21:44.835332Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 10 Wait kv request 2025-03-31T16:21:45.077761Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 0 2025-03-31T16:21:45.077835Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 0 2025-03-31T16:21:45.077897Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-03-31T16:21:45.077951Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 2025-03-31T16:21:45.078032Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-03-31T16:21:45.078070Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-03-31T16:21:45.078181Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-03-31T16:21:45.078225Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-03-31T16:21:45.078288Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 2025-03-31T16:21:45.078343Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-03-31T16:21:45.078425Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Already written message. Topic: 'rt3.dc1--account--topic' Partition: 0 SourceId: 'src4'. Message seqNo: 7. Committed seqNo: (NULL). Writing seqNo: 7. EndOffset: 50. CurOffset: 50. Offset: 50 2025-03-31T16:21:45.078544Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 8 partNo 0 2025-03-31T16:21:45.079454Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 8 partNo 0 FormedBlobsCount 0 NewHead: Offset 51 PartNo 0 PackedSize 84 count 1 nextOffset 52 batches 1 2025-03-31T16:21:45.079537Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 9 partNo 0 2025-03-31T16:21:45.079588Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 9 partNo 0 FormedBlobsCount 0 NewHead: Offset 51 PartNo 0 PackedSize 136 count 2 nextOffset 53 batches 1 2025-03-31T16:21:45.079630Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 10 partNo 0 2025-03-31T16:21:45.079670Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 10 partNo 0 FormedBlobsCount 0 NewHead: Offset 51 PartNo 0 PackedSize 188 count 3 nextOffset 54 batches 1 2025-03-31T16:21:45.079711Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 11 partNo 0 2025-03-31T16:21:45.079752Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 11 partNo 0 FormedBlobsCount 0 NewHead: Offset 51 PartNo 0 PackedSize 240 count 4 nextOffset 55 batches 1 2025-03-31T16:21:45.079793Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 12 partNo 0 2025-03-31T16:21:45.079832Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 12 partNo 0 FormedBlobsCount 0 NewHead: Offset 51 PartNo 0 PackedSize 292 count 5 nextOffset 56 batches 1 2025-03-31T16:21:45.079874Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-03-31T16:21:45.079917Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-03-31T16:21:45.079958Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 51 PartNo 0 PackedSize 292 count 5 nextOffset 56 batches 1 2025-03-31T16:21:45.080002Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-03-31T16:21:45.080062Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 10 2025-03-31T16:21:45.080124Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-03-31T16:21:45.080187Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 51 PartNo 0 PackedSize 292 count 5 nextOffset 56 batches 1 2025-03-31T16:21:45.080788Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--account--topic' partition 0 compactOffset 51,5 HeadOffset 50 endOffset 50 curOffset 56 d0000000000_00000000000000000051_00000_0000000005_00000| size 189 WTime 21151 2025-03-31T16:21:45.080994Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:45.081057Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:45.081114Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-03-31T16:21:45.081153Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:45.081217Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc2 2025-03-31T16:21:45.081254Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc4 2025-03-31T16:21:45.081297Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc1 2025-03-31T16:21:45.081326Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000051_00000_0000000005_00000| 2025-03-31T16:21:45.081354Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:45.081381Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:45.081406Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-03-31T16:21:45.081435Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:45.081471Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got KV request Got KV request Got KV request Got KV request Got KV request Got KV request Got KV request Wait tx committed for tx 0 2025-03-31T16:21:45.102635Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 85 WriteNewSizeFromSupportivePartitions# 4 2025-03-31T16:21:45.102714Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:21:45.102810Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src4', Topic: 'rt3.dc1--account--topic', Partition: 0, SeqNo: 7, partNo: 0, Offset: 50 is already written 2025-03-31T16:21:45.102856Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:21:45.102898Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src4', Topic: 'rt3.dc1--account--topic', Partition: 0, SeqNo: 8, partNo: 0, Offset: 50 is already written 2025-03-31T16:21:45.102928Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:21:45.102978Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src4', Topic: 'rt3.dc1--account--topic', Partition: 0, SeqNo: 9, partNo: 0, Offset: 50 is already written 2025-03-31T16:21:45.103014Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:21:45.103054Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src4', Topic: 'rt3.dc1--account--topic', Partition: 0, SeqNo: 10, partNo: 0, Offset: 50 is already written 2025-03-31T16:21:45.103101Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:21:45.103138Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src4', Topic: 'rt3.dc1--account--topic', Partition: 0, SeqNo: 11, partNo: 0, Offset: 50 is already written 2025-03-31T16:21:45.103166Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:21:45.103202Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src4', Topic: 'rt3.dc1--account--topic', Partition: 0, SeqNo: 12, partNo: 0, Offset: 50 is already written Wait immediate tx complete 3 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 3 Wait immediate tx complete 6 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 6 Wait tx committed for tx 10 >> TColumnShardTestReadWrite::Write [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotWritten_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotBoundary_Test >> TColumnShardTestReadWrite::RebootWriteReadStandalone ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::Write [GOOD] Test command err: 2025-03-31T16:21:41.461770Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:21:41.561118Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:21:41.585724Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:21:41.585999Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:21:41.594314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:21:41.594519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:21:41.594774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:21:41.594921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:21:41.595027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:21:41.595163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:21:41.595270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:21:41.595395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:21:41.595557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:21:41.595680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:21:41.595786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:21:41.595881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:21:41.619132Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:21:41.619260Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:21:41.619300Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:21:41.619440Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:41.619548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:21:41.619611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:21:41.619653Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:21:41.619732Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:21:41.619780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:21:41.619808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:21:41.619829Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:21:41.620010Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:41.620084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:21:41.620129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:21:41.620160Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:21:41.620266Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:21:41.620331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:21:41.620399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:21:41.620437Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:21:41.620535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:21:41.620580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:21:41.620614Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:21:41.620672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:21:41.620708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:21:41.620735Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:21:41.621136Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=47; 2025-03-31T16:21:41.621228Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=36; 2025-03-31T16:21:41.621310Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=41; 2025-03-31T16:21:41.621371Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=28; 2025-03-31T16:21:41.621482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:21:41.621562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:21:41.621600Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:21:41.621748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:21:41.621779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:21:41.621798Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:21:41.621896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:21:41.621928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:21:41.621959Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:21:41.622111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:21:41.622141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:21:41.622158Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:21:41.622233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:21:41.622260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:21:41.622300Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... :[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000032;32;32;32;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000033;33;33;33;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000034;34;34;34;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000035;35;35;35;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000036;36;36;36;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000037;37;37;37;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000038;38;38;38;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000039;39;39;39;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000040;40;40;40;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000041;41;41;41;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000042;42;42;42;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000043;43;43;43;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000044;44;44;44;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000045;45;45;45;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000046;46;46;46;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000047;47;47;47;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000048;48;48;48;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000049;49;49;49;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000050;50;50;50;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000051;51;51;51;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000052;52;52;52;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000053;53;53;53;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000054;54;54;54;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000055;55;55;55;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000056;56;56;56;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000057;57;57;57;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000058;58;58;58;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000059;59;59;59;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000060;60;60;60;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000061;61;61;61;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000062;62;62;62;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000063;63;63;63;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000064;64;64;64;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000065;65;65;65;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000066;66;66;66;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000067;67;67;67;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000068;68;68;68;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000069;69;69;69;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000070;70;70;70;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000071;71;71;71;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000072;72;72;72;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000073;73;73;73;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000074;74;74;74;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000075;75;75;75;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000076;76;76;76;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000077;77;77;77;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000078;78;78;78;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000079;79;79;79;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000080;80;80;80;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000081;81;81;81;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000082;82;82;82;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000083;83;83;83;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000084;84;84;84;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000085;85;85;85;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000086;86;86;86;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000087;87;87;87;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000088;88;88;88;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000089;89;89;89;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000090;90;90;90;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000091;91;91;91;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000092;92;92;92;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000093;93;93;93;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000094;94;94;94;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000095;95;95;95;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000096;96;96;96;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000097;97;97;97;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000098;98;98;98;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000099;99;99;99;"}}]}; 2025-03-31T16:21:46.587237Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=30;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; >> TColumnShardTestReadWrite::WriteStandaloneExoticTypes [GOOD] >> KqpPg::Returning+useSink [GOOD] >> KqpPg::Returning-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteStandaloneExoticTypes [GOOD] Test command err: 2025-03-31T16:21:41.623159Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:21:41.705969Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:21:41.726272Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:21:41.726483Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:21:41.733983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:21:41.734134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:21:41.734359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:21:41.734473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:21:41.734540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:21:41.734619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:21:41.734686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:21:41.734763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:21:41.734854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:21:41.734924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:21:41.734984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:21:41.735046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:21:41.755980Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:21:41.756101Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:21:41.756143Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:21:41.756309Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:41.756419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:21:41.756477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:21:41.756515Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:21:41.756576Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:21:41.756628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:21:41.756664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:21:41.756682Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:21:41.756849Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:41.756898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:21:41.756920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:21:41.756939Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:21:41.757029Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:21:41.757071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:21:41.757098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:21:41.757116Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:21:41.757178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:21:41.757203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:21:41.757224Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:21:41.757262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:21:41.757284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:21:41.757305Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:21:41.757618Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=35; 2025-03-31T16:21:41.757677Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=24; 2025-03-31T16:21:41.757740Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=33; 2025-03-31T16:21:41.757806Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=28; 2025-03-31T16:21:41.757908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:21:41.757961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:21:41.758007Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:21:41.758172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:21:41.758226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:21:41.758247Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:21:41.758358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:21:41.758385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:21:41.758402Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:21:41.758579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:21:41.758613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:21:41.758632Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:21:41.758710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:21:41.758735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:21:41.758771Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... :[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000032;32;32;32;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000033;33;33;33;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000034;34;34;34;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000035;35;35;35;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000036;36;36;36;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000037;37;37;37;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000038;38;38;38;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000039;39;39;39;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000040;40;40;40;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000041;41;41;41;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000042;42;42;42;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000043;43;43;43;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000044;44;44;44;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000045;45;45;45;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000046;46;46;46;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000047;47;47;47;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000048;48;48;48;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000049;49;49;49;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000050;50;50;50;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000051;51;51;51;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000052;52;52;52;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000053;53;53;53;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000054;54;54;54;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000055;55;55;55;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000056;56;56;56;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000057;57;57;57;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000058;58;58;58;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000059;59;59;59;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000060;60;60;60;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000061;61;61;61;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000062;62;62;62;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000063;63;63;63;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000064;64;64;64;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000065;65;65;65;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000066;66;66;66;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000067;67;67;67;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000068;68;68;68;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000069;69;69;69;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000070;70;70;70;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000071;71;71;71;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000072;72;72;72;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000073;73;73;73;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000074;74;74;74;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000075;75;75;75;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000076;76;76;76;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000077;77;77;77;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000078;78;78;78;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000079;79;79;79;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000080;80;80;80;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000081;81;81;81;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000082;82;82;82;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000083;83;83;83;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000084;84;84;84;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000085;85;85;85;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000086;86;86;86;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000087;87;87;87;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000088;88;88;88;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000089;89;89;89;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000090;90;90;90;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000091;91;91;91;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000092;92;92;92;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000093;93;93;93;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000094;94;94;94;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000095;95;95;95;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000096;96;96;96;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000097;97;97;97;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000098;98;98;98;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000099;99;99;99;"}}]}; 2025-03-31T16:21:47.465805Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=30;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_1_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionNotExists_Test >> Backup::ProposeBackup >> KqpPg::DeleteWithQueryService-useSink [GOOD] >> TColumnShardTestReadWrite::WriteReadStandalone >> Backup::ProposeBackup [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_PreferedPartition_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_BadSourceId_Test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::DeleteWithQueryService-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 8424, MsgBus: 4530 2025-03-31T16:20:34.317966Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009338819473965:2133];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:34.318154Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012df/r3tmp/tmpuWWHds/pdisk_1.dat 2025-03-31T16:20:34.698310Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:20:34.751255Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:34.751367Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:34.752985Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8424, node 1 2025-03-31T16:20:34.874711Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:34.874769Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:34.874775Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:34.874885Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4530 TClient is connected to server localhost:4530 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:35.523832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:37.014443Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009351704376445:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:37.014549Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:37.206458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:20:37.323248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009351704376582:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:37.323317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:37.325912Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009351704376587:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:37.331487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:20:37.339532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009351704376589:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:20:37.440408Z node 1 :TX_PROXY ERROR: Actor# [1:7488009351704376642:2420] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 1 1 1 Trying to start YDB, gRPC: 61646, MsgBus: 22187 2025-03-31T16:20:38.943472Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488009358336911755:2130];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:38.953042Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012df/r3tmp/tmpWGuxeb/pdisk_1.dat 2025-03-31T16:20:39.098409Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:20:39.114305Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:39.114399Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:39.116099Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61646, node 2 2025-03-31T16:20:39.158812Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:39.158834Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:39.158840Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:39.158948Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22187 TClient is connected to server localhost:22187 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:39.621387Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:41.916800Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009371221814227:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:41.916877Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:41.925083Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:20:41.979786Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009371221814361:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:41.979872Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:41.979917Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009371221814367:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:41.982922Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:20:41.995006Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488009371221814369:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:20:42.070883Z node 2 :TX_PROXY ERROR: Actor# [2:7488009375516781716:2414] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 1 1 1 Trying to start YDB, gRPC: 19065, MsgBus: 25196 2025-03-31T16:20:43.476838Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488009378521974523:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:43.476923Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012df/r3tmp/tmpXpUQ1g/pdisk_1.dat 2025-03-31T16:20:43.578406Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19065, node 3 2025-03-31T16:20:43.611760Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:43.611857Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connectin ... :2421] txid# 281474976715664, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } Trying to start YDB, gRPC: 23042, MsgBus: 30015 2025-03-31T16:21:36.134963Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7488009605161990497:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:36.135054Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012df/r3tmp/tmp3xL0N1/pdisk_1.dat 2025-03-31T16:21:36.286861Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:21:36.312934Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:21:36.313038Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:21:36.314415Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23042, node 11 2025-03-31T16:21:36.375801Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:21:36.375830Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:21:36.375842Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:21:36.376023Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30015 TClient is connected to server localhost:30015 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:21:37.165996Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:21:40.808492Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7488009622341860344:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:40.808648Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:40.836499Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:21:40.930231Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7488009622341860449:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:40.930333Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7488009622341860455:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:40.930359Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:40.935782Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:21:40.949562Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7488009622341860457:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:21:41.026423Z node 11 :TX_PROXY ERROR: Actor# [11:7488009626636827804:2396] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:21:41.135496Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7488009605161990497:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:41.135612Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 3903, MsgBus: 21658 2025-03-31T16:21:42.526879Z node 12 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7488009633839284978:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:42.527016Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012df/r3tmp/tmpfwq10Z/pdisk_1.dat 2025-03-31T16:21:42.720430Z node 12 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:21:42.749727Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:21:42.749878Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:21:42.752941Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3903, node 12 2025-03-31T16:21:42.812131Z node 12 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:21:42.812156Z node 12 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:21:42.812185Z node 12 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:21:42.812399Z node 12 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21658 TClient is connected to server localhost:21658 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:21:43.552652Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:21:47.527289Z node 12 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7488009633839284978:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:47.527379Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:21:47.910165Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7488009655314122127:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:47.910354Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:47.933665Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:21:48.029063Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7488009659609089530:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:48.029217Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:48.029504Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7488009659609089535:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:48.035875Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:21:48.049606Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7488009659609089537:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:21:48.137799Z node 12 :TX_PROXY ERROR: Actor# [12:7488009659609089588:2403] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TPQTest::TestWriteOffsetWithBigMessage [GOOD] >> TPQTest::TestTimeRetention ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Backup::ProposeBackup [GOOD] Test command err: 2025-03-31T16:21:49.429533Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:21:49.514322Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:21:49.536217Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:21:49.536478Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:21:49.543300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:21:49.543504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:21:49.543706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:21:49.543832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:21:49.543966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:21:49.544045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:21:49.544118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:21:49.544188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:21:49.544265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:21:49.544452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:21:49.544555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:21:49.544641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:21:49.566292Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:21:49.566462Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:21:49.566501Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:21:49.566632Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:49.566748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:21:49.566799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:21:49.566826Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:21:49.566888Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:21:49.566948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:21:49.566978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:21:49.567007Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:21:49.567200Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:49.567254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:21:49.567278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:21:49.567295Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:21:49.567381Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:21:49.567421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:21:49.567457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:21:49.567487Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:21:49.567532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:21:49.567553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:21:49.567575Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:21:49.567634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:21:49.567657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:21:49.567681Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:21:49.568059Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=58; 2025-03-31T16:21:49.568121Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=26; 2025-03-31T16:21:49.568176Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=22; 2025-03-31T16:21:49.568238Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=32; 2025-03-31T16:21:49.568377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:21:49.568423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:21:49.568450Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:21:49.568603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:21:49.568639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:21:49.568674Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:21:49.568769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:21:49.568793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:21:49.568813Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:21:49.568959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:21:49.568990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:21:49.569008Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:21:49.569121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:21:49.569147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:21:49.569185Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... 67041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;;); 2025-03-31T16:21:50.433880Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:301:2319];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-31T16:21:50.434001Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:301:2319];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;method=produce result;fline=actor.cpp:232;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;program_input=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;;);columns=7;rows=100; 2025-03-31T16:21:50.434093Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:301:2319];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;method=produce result;fline=actor.cpp:252;stage=data_format;batch_size=4813;num_rows=100;batch_columns=key1,key2,field,_yql_plan_step,_yql_tx_id,_yql_write_id,_yql_delete_flag; 2025-03-31T16:21:50.434266Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:301:2319];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:295:2313];bytes=4813;rows=100;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string _yql_plan_step: uint64 _yql_tx_id: uint64 _yql_write_id: uint64 _yql_delete_flag: bool; 2025-03-31T16:21:50.434377Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:301:2319];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;program_input=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;;); 2025-03-31T16:21:50.434514Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:301:2319];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;program_input=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;;); 2025-03-31T16:21:50.434631Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:301:2319];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;program_input=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;;); 2025-03-31T16:21:50.435316Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:0:0:1:3:2752:0]; 2025-03-31T16:21:50.447375Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:301:2319];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:21:50.447534Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:301:2319];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;program_input=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;;); 2025-03-31T16:21:50.447682Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:301:2319];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;program_input=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;;); 2025-03-31T16:21:50.447746Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:301:2319] finished for tablet 9437184 2025-03-31T16:21:50.448254Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:301:2319];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:295:2313];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.005},{"events":["f_processing","f_task_result"],"t":0.007},{"events":["f_ack"],"t":0.013},{"events":["l_task_result"],"t":0.028},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.092}],"full":{"a":1743438110354872,"name":"_full_task","f":1743438110354872,"d_finished":0,"c":0,"l":1743438110447825,"d":92953},"events":[{"name":"bootstrap","f":1743438110355201,"d_finished":4705,"c":1,"l":1743438110359906,"d":4705},{"a":1743438110447349,"name":"ack","f":1743438110368776,"d_finished":2673,"c":3,"l":1743438110434660,"d":3149},{"a":1743438110447329,"name":"processing","f":1743438110361981,"d_finished":13865,"c":27,"l":1743438110434662,"d":14361},{"name":"ProduceResults","f":1743438110357804,"d_finished":7960,"c":32,"l":1743438110447728,"d":7960},{"a":1743438110447733,"name":"Finish","f":1743438110447733,"d_finished":0,"c":0,"l":1743438110447825,"d":92},{"name":"task_result","f":1743438110361994,"d_finished":10846,"c":24,"l":1743438110383724,"d":10846}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;program_input=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;;); 2025-03-31T16:21:50.448361Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:301:2319];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:295:2313];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:21:50.448758Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:301:2319];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:295:2313];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.005},{"events":["f_processing","f_task_result"],"t":0.007},{"events":["f_ack"],"t":0.013},{"events":["l_task_result"],"t":0.028},{"events":["l_ProduceResults","f_Finish"],"t":0.092},{"events":["l_ack","l_processing","l_Finish"],"t":0.093}],"full":{"a":1743438110354872,"name":"_full_task","f":1743438110354872,"d_finished":0,"c":0,"l":1743438110448402,"d":93530},"events":[{"name":"bootstrap","f":1743438110355201,"d_finished":4705,"c":1,"l":1743438110359906,"d":4705},{"a":1743438110447349,"name":"ack","f":1743438110368776,"d_finished":2673,"c":3,"l":1743438110434660,"d":3726},{"a":1743438110447329,"name":"processing","f":1743438110361981,"d_finished":13865,"c":27,"l":1743438110434662,"d":14938},{"name":"ProduceResults","f":1743438110357804,"d_finished":7960,"c":32,"l":1743438110447728,"d":7960},{"a":1743438110447733,"name":"Finish","f":1743438110447733,"d_finished":0,"c":0,"l":1743438110448402,"d":669},{"name":"task_result","f":1743438110361994,"d_finished":10846,"c":24,"l":1743438110383724,"d":10846}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;program_input=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;;); 2025-03-31T16:21:50.448857Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:301:2319];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:21:50.353926Z;index_granules=0;index_portions=3;index_batches=3;committed_batches=0;schema_columns=4;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=13880;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=13880;selected_rows=0; 2025-03-31T16:21:50.448905Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:301:2319];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:21:50.449166Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:301:2319];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;program_input=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;; 2025-03-31T16:21:50.449537Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 1 at tablet 9437184 2025-03-31T16:21:50.474665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];ev=NKikimr::NOlap::NBackground::TEvExecuteGeneralLocalTransaction;method=TTxController::FinishProposeOnComplete;tx_id=115;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:244:2262]; 2025-03-31T16:21:50.474725Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:138:2170];ev=NKikimr::NOlap::NBackground::TEvExecuteGeneralLocalTransaction;method=TTxController::FinishProposeOnComplete;tx_id=115;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=115; >> TColumnShardTestReadWrite::PortionInfoSize [GOOD] >> KqpPg::PgAggregate-useSink [GOOD] >> KqpPg::MkqlTerminate >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::PortionInfoSize [GOOD] Test command err: 304 176 28 48 32 24 16 24 56 >> TPQTest::TestReadSessions [GOOD] >> TPQTest::TestReadSubscription >> TPQTest::TestTimeRetention [GOOD] >> TPQTest::TestTabletRestoreEventsOrder >> TPQTest::TestAlreadyWrittenWithoutDeduplication [GOOD] >> TPQTest::TestChangeConfig >> TPQTest::TestTabletRestoreEventsOrder [GOOD] >> EvWrite::AbortInTransaction >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveActor_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_BadSourceId_Test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestTabletRestoreEventsOrder [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] 2025-03-31T16:21:09.924828Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:09.924900Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927938 is [1:153:2174] sender: [1:154:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:179:2057] recipient: [1:14:2061] 2025-03-31T16:21:09.941897Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:178:2193], now have 1 active actors on pipe 2025-03-31T16:21:09.942044Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-03-31T16:21:09.962852Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "test" Generation: 1 Important: false } 2025-03-31T16:21:09.965497Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "test" Generation: 1 Important: false } 2025-03-31T16:21:09.965653Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:09.967978Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "test" Generation: 1 Important: false } 2025-03-31T16:21:09.968095Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:21:09.968161Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:21:09.968208Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:2:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:21:09.968240Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:3:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:21:09.968737Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:21:09.969080Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2025-03-31T16:21:09.971680Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:0:Initializer] Initializing completed. 2025-03-31T16:21:09.971749Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:185:2198] 2025-03-31T16:21:09.971800Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:21:09.974184Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-31T16:21:09.974319Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2025-03-31T16:21:09.974362Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2025-03-31T16:21:09.974418Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test reinit request with generation 1 2025-03-31T16:21:09.974447Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test reinit with generation 1 done 2025-03-31T16:21:09.974594Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:09.974635Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:09.974681Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:09.974720Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:09.974765Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-03-31T16:21:09.974789Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-03-31T16:21:09.974812Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000ctest 2025-03-31T16:21:09.974833Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000utest 2025-03-31T16:21:09.974862Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:09.974911Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-03-31T16:21:09.975004Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-31T16:21:09.975066Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-31T16:21:09.975249Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-31T16:21:09.975565Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:21:09.975835Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:186:2199] 2025-03-31T16:21:09.977698Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:1:Initializer] Initializing completed. 2025-03-31T16:21:09.977759Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:186:2199] 2025-03-31T16:21:09.977810Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:21:09.979589Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-03-31T16:21:09.979667Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit request with generation 1 2025-03-31T16:21:09.979698Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit with generation 1 done 2025-03-31T16:21:09.979739Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user test reinit request with generation 1 2025-03-31T16:21:09.979762Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user test reinit with generation 1 done 2025-03-31T16:21:09.979872Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:09.979908Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:09.979948Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-03-31T16:21:09.979997Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-03-31T16:21:09.980023Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001cuser 2025-03-31T16:21:09.980046Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uuser 2025-03-31T16:21:09.980072Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001ctest 2025-03-31T16:21:09.980092Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001utest 2025-03-31T16:21:09.980114Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:09.980143Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] =========================== 2025-03-31T16:21:09.980199Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-31T16:21:09.980226Z node 1 :PERSQUEUE DEBUG: [P ... ID 72057594037927938 is [33:153:2174] sender: [33:154:2057] recipient: [33:147:2170] Leader for TabletID 72057594037927937 is [33:107:2139] sender: [33:179:2057] recipient: [33:14:2061] 2025-03-31T16:21:52.383030Z node 33 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:52.384163Z node 33 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 32 actor [33:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 1000 MaxSizeInPartition: 104857600 LifetimeSeconds: 1000 LowWatermark: 100 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 32 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 32 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 32 Important: false } 2025-03-31T16:21:52.385091Z node 33 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [33:185:2198] 2025-03-31T16:21:52.388696Z node 33 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [33:185:2198] 2025-03-31T16:21:52.391344Z node 33 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [33:186:2199] 2025-03-31T16:21:52.393887Z node 33 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [33:186:2199] 2025-03-31T16:21:52.402702Z node 33 :PERSQUEUE INFO: new Cookie default|b4500fba-e18d6401-61bd7d4c-dcae5fcc_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:21:52.412501Z node 33 :PERSQUEUE INFO: new Cookie default|c9ed51fa-ced925ab-1e3b40ca-dc8c1df0_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:21:52.421316Z node 33 :PERSQUEUE INFO: new Cookie default|ced09474-7b9bd2d8-60e98f53-e44e70b7_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:21:52.435821Z node 33 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:52.442064Z node 33 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 33 actor [33:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 1000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 100 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 33 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 32 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 32 Important: false } 2025-03-31T16:21:52.450615Z node 33 :PERSQUEUE INFO: new Cookie default|608c050b-8d99d67c-f2783737-db3f9075_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:21:52.466057Z node 33 :PERSQUEUE INFO: new Cookie default|2a06ff1e-5a75d464-99879a5c-951f8d4b_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:21:52.482492Z node 33 :PERSQUEUE INFO: new Cookie default|86d57b58-2b9ebbe9-58b57385-8b6ca90c_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:103:2057] recipient: [34:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:103:2057] recipient: [34:101:2135] Leader for TabletID 72057594037927937 is [34:107:2139] sender: [34:108:2057] recipient: [34:101:2135] 2025-03-31T16:21:53.004266Z node 34 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:53.004363Z node 34 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [34:149:2057] recipient: [34:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [34:149:2057] recipient: [34:147:2170] Leader for TabletID 72057594037927938 is [34:153:2174] sender: [34:154:2057] recipient: [34:147:2170] Leader for TabletID 72057594037927937 is [34:107:2139] sender: [34:177:2057] recipient: [34:14:2061] 2025-03-31T16:21:53.025129Z node 34 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:53.026153Z node 34 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 34 actor [34:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 1000 MaxSizeInPartition: 104857600 LifetimeSeconds: 1000 LowWatermark: 100 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 34 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 34 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 34 Important: false } 2025-03-31T16:21:53.026972Z node 34 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [34:183:2196] 2025-03-31T16:21:53.030322Z node 34 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [34:183:2196] 2025-03-31T16:21:53.032648Z node 34 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [34:184:2197] 2025-03-31T16:21:53.035055Z node 34 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [34:184:2197] 2025-03-31T16:21:53.042662Z node 34 :PERSQUEUE INFO: new Cookie default|b56f2ea4-20bf0d8f-dee236b4-6297710d_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:21:53.051073Z node 34 :PERSQUEUE INFO: new Cookie default|82ea7eba-b002b273-43c881da-7ae1e880_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:21:53.059000Z node 34 :PERSQUEUE INFO: new Cookie default|fc8ae691-f49c6ed4-f9458b55-cba3fbb8_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:21:53.072490Z node 34 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:53.078883Z node 34 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 35 actor [34:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 1000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 100 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 35 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 34 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 34 Important: false } 2025-03-31T16:21:53.091122Z node 34 :PERSQUEUE INFO: new Cookie default|f64a240a-be2f8333-ed4f2252-ca4afce1_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:21:53.107013Z node 34 :PERSQUEUE INFO: new Cookie default|3339a111-d7251a97-588bd6ea-ce6912eb_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:21:53.124099Z node 34 :PERSQUEUE INFO: new Cookie default|45967ea9-b283e993-bcdd6d63-e945cf17_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:21:53.683672Z node 35 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:53.683783Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:53.740199Z node 35 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:53.740298Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:53.743586Z node 35 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:53.745022Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 36 actor [35:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 36 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 36 ReadRuleGenerations: 36 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 36 Important: false } Consumers { Name: "aaa" Generation: 36 Important: true } 2025-03-31T16:21:53.745962Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [35:245:2245] 2025-03-31T16:21:53.747208Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 3 [35:245:2245] 2025-03-31T16:21:53.748736Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [35:247:2247] 2025-03-31T16:21:53.749631Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 3 [35:247:2247] 2025-03-31T16:21:53.789503Z node 35 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:53.789577Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:53.790500Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [35:324:2307] 2025-03-31T16:21:53.791933Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [35:326:2309] 2025-03-31T16:21:53.796875Z node 35 :PERSQUEUE INFO: [topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:21:53.796952Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 4 [35:324:2307] 2025-03-31T16:21:53.797757Z node 35 :PERSQUEUE INFO: [topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:21:53.797807Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 4 [35:326:2309] >> TColumnShardTestReadWrite::RebootWriteReadStandalone [GOOD] >> EvWrite::AbortInTransaction [GOOD] >> TColumnShardTestReadWrite::WriteReadStandalone [GOOD] >> TPQTest::TestChangeConfig [GOOD] >> TPQTest::PQ_Tablet_Removes_Blobs_Asynchronously >> TColumnShardTestReadWrite::ReadSomePrograms >> TFetchRequestTests::CheckAccess [GOOD] >> PQCountersSimple::PartitionWriteQuota ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::AbortInTransaction [GOOD] Test command err: 2025-03-31T16:21:54.835437Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:21:54.931332Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:21:54.955304Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:21:54.955581Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:21:54.963841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:21:54.964055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:21:54.964295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:21:54.964709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:21:54.964836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:21:54.964955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:21:54.965058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:21:54.965166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:21:54.965282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:21:54.965427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:21:54.965538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:21:54.965641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:21:54.995528Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:21:54.995697Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:21:54.995765Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:21:54.995957Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:54.996103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:21:54.996173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:21:54.996218Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:21:54.996354Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:21:54.996426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:21:54.996469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:21:54.996507Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:21:54.996728Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:54.996802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:21:54.996840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:21:54.996868Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:21:54.996974Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:21:54.997037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:21:54.997087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:21:54.997125Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:21:54.997197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:21:54.997234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:21:54.997284Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:21:54.997343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:21:54.997381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:21:54.997408Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:21:54.997809Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=50; 2025-03-31T16:21:54.997890Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=32; 2025-03-31T16:21:54.997976Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=39; 2025-03-31T16:21:54.998069Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=45; 2025-03-31T16:21:54.998234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:21:54.998305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:21:54.998344Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:21:54.998597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:21:54.998648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:21:54.998681Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:21:54.998822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:21:54.998869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:21:54.998898Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:21:54.999129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:21:54.999194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:21:54.999221Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:21:54.999340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:21:54.999384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:21:54.999430Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... 4;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:245;method=RegisterTable;path_id=1; 2025-03-31T16:21:55.583377Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine.h:144;event=RegisterTable;path_id=1; 2025-03-31T16:21:55.589825Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:488;event=OnTieringModified;path_id=1; 2025-03-31T16:21:55.590053Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tx_controller.cpp:211;event=finished_tx;tx_id=10; 2025-03-31T16:21:55.616266Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[2] complete at tablet 9437184 2025-03-31T16:21:55.616501Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=229592;columns=2; 2025-03-31T16:21:55.637086Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:138:2170];fline=actor.cpp:22;event=flush_writing;size=229592;count=1; 2025-03-31T16:21:55.640668Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 1 at tablet 9437184 2025-03-31T16:21:55.642508Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-03-31T16:21:55.655554Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-03-31T16:21:55.655727Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=4;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:21:55.656232Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;self_id=[1:138:2170];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=222;problem=finished; 2025-03-31T16:21:55.656316Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;self_id=[1:138:2170];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=222;problem=finished; 2025-03-31T16:21:55.656594Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 10 at tablet 9437184, mediator 0 2025-03-31T16:21:55.656656Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[5] execute at tablet 9437184 2025-03-31T16:21:55.656728Z node 1 :TX_COLUMNSHARD ERROR: TxPlanStep[5] Ignore old txIds [112] for step 10 last planned step 10 at tablet 9437184 2025-03-31T16:21:55.656791Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[5] complete at tablet 9437184 2025-03-31T16:21:55.657210Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {10:max} readable: {10:max} at tablet 9437184 2025-03-31T16:21:55.657340Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-03-31T16:21:55.666229Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:138:2170];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={10:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } } } ; 2025-03-31T16:21:55.666353Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:138:2170];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={10:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } } } ; 2025-03-31T16:21:55.667364Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:138:2170];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={10:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":4,"inputs":[{"from":5}]},{"owner_id":5,"inputs":[]}],"nodes":{"2":{"p":{"i":"1","p":{"address":{"name":"key","id":1}},"o":"1","t":"AssembleOriginalData"},"w":9,"id":2},"5":{"p":{"p":{"data":[{"name":"key","id":1},{"name":"field","id":2}]},"o":"1,2","t":"FetchOriginalData"},"w":4,"id":5},"4":{"p":{"i":"2","p":{"address":{"name":"field","id":2}},"o":"2","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"1,2","t":"Projection"},"w":18,"id":0}}}; 2025-03-31T16:21:55.667542Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:138:2170];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={10:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-03-31T16:21:55.668929Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:138:2170];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={10:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:258:2276];trace_detailed=; 2025-03-31T16:21:55.670301Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:84;ff_first=(column_ids=1,2;column_names=field,key;);; 2025-03-31T16:21:55.670517Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; 2025-03-31T16:21:55.670931Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:258:2276];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:21:55.671112Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:258:2276];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-31T16:21:55.671246Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:258:2276];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-31T16:21:55.671304Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:258:2276] finished for tablet 9437184 2025-03-31T16:21:55.671761Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:258:2276];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:252:2270];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":1743438115668845,"name":"_full_task","f":1743438115668845,"d_finished":0,"c":0,"l":1743438115671380,"d":2535},"events":[{"name":"bootstrap","f":1743438115669109,"d_finished":1609,"c":1,"l":1743438115670718,"d":1609},{"a":1743438115670900,"name":"ack","f":1743438115670900,"d_finished":0,"c":0,"l":1743438115671380,"d":480},{"a":1743438115670881,"name":"processing","f":1743438115670881,"d_finished":0,"c":0,"l":1743438115671380,"d":499},{"name":"ProduceResults","f":1743438115670693,"d_finished":338,"c":2,"l":1743438115671287,"d":338},{"a":1743438115671291,"name":"Finish","f":1743438115671291,"d_finished":0,"c":0,"l":1743438115671380,"d":89}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-31T16:21:55.671857Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:258:2276];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:252:2270];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:21:55.672257Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:258:2276];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:252:2270];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.002},{"events":["l_ack","l_processing","l_Finish"],"t":0.003}],"full":{"a":1743438115668845,"name":"_full_task","f":1743438115668845,"d_finished":0,"c":0,"l":1743438115671914,"d":3069},"events":[{"name":"bootstrap","f":1743438115669109,"d_finished":1609,"c":1,"l":1743438115670718,"d":1609},{"a":1743438115670900,"name":"ack","f":1743438115670900,"d_finished":0,"c":0,"l":1743438115671914,"d":1014},{"a":1743438115670881,"name":"processing","f":1743438115670881,"d_finished":0,"c":0,"l":1743438115671914,"d":1033},{"name":"ProduceResults","f":1743438115670693,"d_finished":338,"c":2,"l":1743438115671287,"d":338},{"a":1743438115671291,"name":"Finish","f":1743438115671291,"d_finished":0,"c":0,"l":1743438115671914,"d":623}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-31T16:21:55.672373Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:258:2276];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:21:55.667490Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-03-31T16:21:55.672423Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:258:2276];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:21:55.672541Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:258:2276];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=uint64;records=0;size=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=string;records=0;size=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=0;count=0; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::RebootWriteReadStandalone [GOOD] Test command err: 2025-03-31T16:21:47.838340Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:21:47.940140Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:21:47.964937Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:21:47.965224Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:21:47.973176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:21:47.973383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:21:47.973627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:21:47.973756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:21:47.973870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:21:47.973985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:21:47.974107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:21:47.974236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:21:47.974359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:21:47.974465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:21:47.974572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:21:47.974700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:21:48.003242Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:21:48.003407Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:21:48.003454Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:21:48.003642Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:48.003771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:21:48.003851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:21:48.003900Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:21:48.003994Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:21:48.004061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:21:48.004111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:21:48.004148Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:21:48.004390Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:48.004469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:21:48.004512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:21:48.004541Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:21:48.004652Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:21:48.004717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:21:48.004759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:21:48.004787Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:21:48.004863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:21:48.004907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:21:48.004940Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:21:48.005003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:21:48.005040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:21:48.005069Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:21:48.005446Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=44; 2025-03-31T16:21:48.005536Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=43; 2025-03-31T16:21:48.005651Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=48; 2025-03-31T16:21:48.005746Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=49; 2025-03-31T16:21:48.005901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:21:48.005968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:21:48.006010Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:21:48.006223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:21:48.006290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:21:48.006323Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:21:48.006474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:21:48.006513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:21:48.006541Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:21:48.006732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:21:48.006776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:21:48.006804Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:21:48.006955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:21:48.007002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:21:48.007047Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... g=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:21:55.385681Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-31T16:21:55.385828Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:232;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-03-31T16:21:55.385932Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:252;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-03-31T16:21:55.386083Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:1064:2935];bytes=1984;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-03-31T16:21:55.386245Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:21:55.386366Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:21:55.386526Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:21:55.386752Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:21:55.386887Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:21:55.387028Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:21:55.387076Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1065:2936] finished for tablet 9437184 2025-03-31T16:21:55.387608Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:1064:2935];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["l_task_result"],"t":0.011},{"events":["f_ack"],"t":0.012},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.013}],"full":{"a":1743438115373204,"name":"_full_task","f":1743438115373204,"d_finished":0,"c":0,"l":1743438115387149,"d":13945},"events":[{"name":"bootstrap","f":1743438115373388,"d_finished":2772,"c":1,"l":1743438115376160,"d":2772},{"a":1743438115386729,"name":"ack","f":1743438115385371,"d_finished":1188,"c":1,"l":1743438115386559,"d":1608},{"a":1743438115386713,"name":"processing","f":1743438115377316,"d_finished":5329,"c":10,"l":1743438115386562,"d":5765},{"name":"ProduceResults","f":1743438115374894,"d_finished":3085,"c":13,"l":1743438115387059,"d":3085},{"a":1743438115387061,"name":"Finish","f":1743438115387061,"d_finished":0,"c":0,"l":1743438115387149,"d":88},{"name":"task_result","f":1743438115377330,"d_finished":4010,"c":9,"l":1743438115385178,"d":4010}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:21:55.387717Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:1064:2935];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:21:55.388223Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:1064:2935];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["l_task_result"],"t":0.011},{"events":["f_ack"],"t":0.012},{"events":["l_ProduceResults","f_Finish"],"t":0.013},{"events":["l_ack","l_processing","l_Finish"],"t":0.014}],"full":{"a":1743438115373204,"name":"_full_task","f":1743438115373204,"d_finished":0,"c":0,"l":1743438115387785,"d":14581},"events":[{"name":"bootstrap","f":1743438115373388,"d_finished":2772,"c":1,"l":1743438115376160,"d":2772},{"a":1743438115386729,"name":"ack","f":1743438115385371,"d_finished":1188,"c":1,"l":1743438115386559,"d":2244},{"a":1743438115386713,"name":"processing","f":1743438115377316,"d_finished":5329,"c":10,"l":1743438115386562,"d":6401},{"name":"ProduceResults","f":1743438115374894,"d_finished":3085,"c":13,"l":1743438115387059,"d":3085},{"a":1743438115387061,"name":"Finish","f":1743438115387061,"d_finished":0,"c":0,"l":1743438115387785,"d":724},{"name":"task_result","f":1743438115377330,"d_finished":4010,"c":9,"l":1743438115385178,"d":4010}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:21:55.388316Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:21:55.372666Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-03-31T16:21:55.388374Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:21:55.388730Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadStandalone [GOOD] Test command err: 2025-03-31T16:21:50.510546Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:21:50.596486Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:21:50.614384Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:21:50.614638Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:21:50.621304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:21:50.621514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:21:50.621745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:21:50.621825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:21:50.621892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:21:50.621971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:21:50.622066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:21:50.622148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:21:50.622245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:21:50.622311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:21:50.622378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:21:50.622457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:21:50.649601Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:21:50.649790Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:21:50.649849Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:21:50.650094Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:50.650256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:21:50.650324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:21:50.650369Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:21:50.650479Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:21:50.650547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:21:50.650592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:21:50.650624Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:21:50.650837Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:50.650920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:21:50.650967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:21:50.651000Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:21:50.651142Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:21:50.651201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:21:50.651245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:21:50.651274Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:21:50.651342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:21:50.651386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:21:50.651425Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:21:50.651499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:21:50.651548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:21:50.651575Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:21:50.651970Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=51; 2025-03-31T16:21:50.652046Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=32; 2025-03-31T16:21:50.652160Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=65; 2025-03-31T16:21:50.652259Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=39; 2025-03-31T16:21:50.652430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:21:50.652488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:21:50.652521Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:21:50.652724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:21:50.652772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:21:50.652801Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:21:50.652966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:21:50.653013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:21:50.653046Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:21:50.653264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:21:50.653311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:21:50.653339Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:21:50.653455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:21:50.653494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:21:50.653558Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... id: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:21:55.697640Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-31T16:21:55.697780Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:232;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-03-31T16:21:55.697865Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:252;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-03-31T16:21:55.698030Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:432:2447];bytes=1984;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-03-31T16:21:55.698201Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:21:55.698358Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:21:55.698505Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:21:55.698737Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:21:55.698890Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:21:55.699057Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:21:55.699131Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:433:2448] finished for tablet 9437184 2025-03-31T16:21:55.699670Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:432:2447];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.012},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.014}],"full":{"a":1743438115684570,"name":"_full_task","f":1743438115684570,"d_finished":0,"c":0,"l":1743438115699188,"d":14618},"events":[{"name":"bootstrap","f":1743438115684743,"d_finished":2535,"c":1,"l":1743438115687278,"d":2535},{"a":1743438115698713,"name":"ack","f":1743438115697328,"d_finished":1210,"c":1,"l":1743438115698538,"d":1685},{"a":1743438115698695,"name":"processing","f":1743438115688353,"d_finished":6403,"c":10,"l":1743438115698544,"d":6896},{"name":"ProduceResults","f":1743438115686174,"d_finished":3300,"c":13,"l":1743438115699112,"d":3300},{"a":1743438115699117,"name":"Finish","f":1743438115699117,"d_finished":0,"c":0,"l":1743438115699188,"d":71},{"name":"task_result","f":1743438115688375,"d_finished":5036,"c":9,"l":1743438115697142,"d":5036}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:21:55.699742Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:432:2447];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:21:55.700144Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:432:2447];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.012},{"events":["l_ProduceResults","f_Finish"],"t":0.014},{"events":["l_ack","l_processing","l_Finish"],"t":0.015}],"full":{"a":1743438115684570,"name":"_full_task","f":1743438115684570,"d_finished":0,"c":0,"l":1743438115699782,"d":15212},"events":[{"name":"bootstrap","f":1743438115684743,"d_finished":2535,"c":1,"l":1743438115687278,"d":2535},{"a":1743438115698713,"name":"ack","f":1743438115697328,"d_finished":1210,"c":1,"l":1743438115698538,"d":2279},{"a":1743438115698695,"name":"processing","f":1743438115688353,"d_finished":6403,"c":10,"l":1743438115698544,"d":7490},{"name":"ProduceResults","f":1743438115686174,"d_finished":3300,"c":13,"l":1743438115699112,"d":3300},{"a":1743438115699117,"name":"Finish","f":1743438115699117,"d_finished":0,"c":0,"l":1743438115699782,"d":665},{"name":"task_result","f":1743438115688375,"d_finished":5036,"c":9,"l":1743438115697142,"d":5036}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:21:55.700222Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:21:55.684009Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-03-31T16:21:55.700261Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:21:55.700564Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> TPQTest::PQ_Tablet_Removes_Blobs_Asynchronously [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::PQ_Tablet_Removes_Blobs_Asynchronously [GOOD] Test command err: 2025-03-31T16:21:02.382022Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:02.382104Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:02.407762Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:02.428887Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "important_user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "important_user" Generation: 1 Important: true } 2025-03-31T16:21:02.432604Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:186:2199] 2025-03-31T16:21:02.434020Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:186:2199] Run 1 CmdWrite 2025-03-31T16:21:02.449690Z node 1 :PERSQUEUE INFO: new Cookie default|74dc63d7-a379d508-6e7f6a24-12306b21_0 generated for partition 0 topic 'topic' owner default Captured kesus quota request event from [1:207:2216] Captured kesus quota request event from [1:208:2217] CmdRead Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [1:177:2192] Captured kesus quota request event from [1:207:2216] Currently have 3 quoter requests Run 2 CmdWrite 2025-03-31T16:21:03.474481Z node 1 :PERSQUEUE INFO: new Cookie default|4cf59927-3cd33f29-4560c338-4c96dfa3_1 generated for partition 0 topic 'topic' owner default CmdRead Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [1:177:2192] Captured kesus quota request event from [1:207:2216] Currently have 4 quoter requests Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:108:2057] recipient: [2:101:2135] 2025-03-31T16:21:06.288107Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:06.288192Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927938 is [2:153:2174] sender: [2:154:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:179:2057] recipient: [2:14:2061] 2025-03-31T16:21:06.305631Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:06.306574Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-03-31T16:21:06.307161Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:185:2198] 2025-03-31T16:21:06.308911Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:185:2198] 2025-03-31T16:21:06.310677Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:186:2199] 2025-03-31T16:21:06.312232Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:186:2199] 2025-03-31T16:21:06.320196Z node 2 :PERSQUEUE INFO: new Cookie default|9bf4c931-a0908afc-feab898-33b24e69_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:21:06.325259Z node 2 :PERSQUEUE INFO: new Cookie default|705f8b08-bb2d8ff9-7a8d745a-f084bae8_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:21:06.330494Z node 2 :PERSQUEUE INFO: new Cookie default|87e50fdf-4e1ac046-e0a508a-3c09854f_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:108:2057] recipient: [3:101:2135] 2025-03-31T16:21:06.692726Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:06.692809Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927938 is [3:153:2174] sender: [3:154:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:179:2057] recipient: [3:14:2061] 2025-03-31T16:21:06.711551Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:06.712333Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 3 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } 2025-03-31T16:21:06.712962Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:185:2198] 2025-03-31T16:21:06.714954Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [3:185:2198] 2025-03-31T16:21:06.716429Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:186:2199] 2025-03-31T16:21:06.717928Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [3:186:2199] !Reboot 72057594037927937 (actor [3:107:2139]) on event NKikimr::TEvPersQueue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:204:2057] recipient: [3:99:2134] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:207:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:208:2057] recipient: [3:206:2212] Leader for TabletID 72057594037927937 is [3:209:2213] sender: [3:210:2057] recipient: [3:206:2212] 2025-03-31T16:21:06.753203Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:06.753259Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:06.753832Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:262:2258] 2025-03-31T16:21:06.755771Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:263:2259] 2025-03-31T16:21:06.762056Z node 3 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:21:06.762134Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [3:262:2258] 2025-03-31T16:21:06.762485Z node 3 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:21:06.762514Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [3:263:2259] !Reboot 72057594037927937 (actor [3:107:2139]) rebooted! !Reboot 72057594037927937 (actor [3:107:2139]) tablet resolver refreshed! new actor is[3:209:2213] Leader for TabletID 72057594037927937 is [3:209:2213] sender: [3:305:2057] recipient: [3:14:2061] 2025-03-31T16:21:07.949002Z node 3 :PERSQUEUE INFO: new Cookie default|fb253471-f74258c3-767eb365-192c611f_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:21:07.957205Z node 3 :PERSQUEUE INFO: new Cookie default|90bff8a3-5c497b1-a09f48a8-801344aa_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:21:07.973978Z node 3 :PERSQUEUE INFO: new Cookie default|63d20aa2-5f3cb29-37902f46-8540cda_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:103:2057] recipient: [4:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:103:2057] recipient: [4:101:2135] Leader for TabletID 72057594037927937 is [4:107:2139] sender: [4:108:2057] recipient: [4:101:2135] 2025-03-31T16:21:08.290990Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:08.291069Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:149:2057] recipient: [4:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:149:2057] recipient: [4:147:2170] Leader for TabletID 72057594037927938 is [4:153:2174] sender: [4:154:2057] recipient: [4:147:2170] Leader for TabletID 72057594037927937 is [4:107:2139] sender: [4:179:2057] recipient: [4:14:2061] 2025-03-31T16:21:08.308027Z node 4 :PERSQUEUE NOTICE: [PQ: 7205759403792793 ... .size=0 2025-03-31T16:21:56.448685Z node 36 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:56.448742Z node 36 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:56.448851Z node 36 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:21:56.449221Z node 36 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:21:56.449581Z node 36 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [36:278:2272] 2025-03-31T16:21:56.450545Z node 36 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDiskStatusStep 2025-03-31T16:21:56.451832Z node 36 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitMetaStep 2025-03-31T16:21:56.452176Z node 36 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInfoRangeStep 2025-03-31T16:21:56.453057Z node 36 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDataRangeStep 2025-03-31T16:21:56.453877Z node 36 :PERSQUEUE DEBUG: [topic:0:TInitDataRangeStep] Got data offset 0 count 2 size 1049870 so 0 eo 2 d0000000000_00000000000000000000_00000_0000000002_00002| 2025-03-31T16:21:56.454025Z node 36 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDataStep 2025-03-31T16:21:56.460727Z node 36 :PERSQUEUE DEBUG: [topic:0:TInitDataStep] read res partition offset 0 endOffset 2 key 0,2 valuesize 1049870 expected 1049870 2025-03-31T16:21:56.463228Z node 36 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-03-31T16:21:56.463319Z node 36 :PERSQUEUE INFO: [topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:21:56.463368Z node 36 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-03-31T16:21:56.463434Z node 36 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 3 [36:278:2272] 2025-03-31T16:21:56.463532Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 2 Head Offset 0 PartNo 0 PackedSize 1049870 count 2 nextOffset 2 batches 4 SYNC INIT sourceId sourceid1 seqNo 2 offset 1 SYNC INIT HEAD KEY: d0000000000_00000000000000000000_00000_0000000002_00002| size 1049870 2025-03-31T16:21:56.463616Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-31T16:21:56.463771Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:56.463834Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:56.463899Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [d0000000000_00000000000000000000_00000_0000000001_00000|, d0000000000_00000000000000000000_00000_0000000001_00000|] 2025-03-31T16:21:56.463960Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:56.464019Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:56.464078Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:56.464137Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-03-31T16:21:56.464234Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Init complete for topic 'topic' Partition: 0 SourceId: sourceid1 SeqNo: 2 offset: 1 MaxOffset: 2 2025-03-31T16:21:56.464307Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 39 2025-03-31T16:21:56.464395Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 39 2025-03-31T16:21:56.464925Z node 36 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-31T16:21:56.465001Z node 36 :PERSQUEUE DEBUG: CacheProxy. Delete blobs from d0000000000_00000000000000000000_00000_0000000001_00000|(+) to d0000000000_00000000000000000000_00000_0000000001_00000|(+) 2025-03-31T16:21:56.465499Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 Topic 'topic' partition 0 user user offset 0 count 1 size 1024000 endOffset 2 max time lag 0ms effective offset 0 2025-03-31T16:21:56.465576Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 2 2025-03-31T16:21:56.466674Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-03-31T16:21:56.466758Z node 36 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-31T16:21:56.466979Z node 36 :PERSQUEUE DEBUG: Topic 'topic' partition 0 user user readTimeStamp done, result 130 queuesize 0 startOffset 0 2025-03-31T16:21:56.470121Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 >>> write #3 2025-03-31T16:21:56.478322Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [36:303:2290], now have 1 active actors on pipe 2025-03-31T16:21:56.478445Z node 36 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-31T16:21:56.478495Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-03-31T16:21:56.478564Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: topic partition: 0 SourceId: 'sourceid1' SeqNo: 3 partNo : 0 messageNo: 1 size 1024 offset: 2 2025-03-31T16:21:56.478658Z node 36 :PERSQUEUE DEBUG: tablet 72057594037927937 topic 'topic' partition 0 error: new GetOwnership request needed for owner 2025-03-31T16:21:56.478756Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 1, Error new GetOwnership request needed for owner 2025-03-31T16:21:56.478805Z node 36 :PERSQUEUE DEBUG: Answer error topic: 'topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-03-31T16:21:56.479201Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [36:306:2292], now have 1 active actors on pipe 2025-03-31T16:21:56.479286Z node 36 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-31T16:21:56.479332Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-03-31T16:21:56.479441Z node 36 :PERSQUEUE INFO: new Cookie default|394c3ead-fe9ddfe1-7c82b1ff-d497ffac_0 generated for partition 0 topic 'topic' owner default 2025-03-31T16:21:56.479545Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-03-31T16:21:56.479661Z node 36 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-31T16:21:56.480092Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [36:308:2294], now have 1 active actors on pipe 2025-03-31T16:21:56.480147Z node 36 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-31T16:21:56.480183Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-03-31T16:21:56.480223Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: topic partition: 0 SourceId: 'sourceid1' SeqNo: 3 partNo : 0 messageNo: 0 size 1024 offset: 2 2025-03-31T16:21:56.480300Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Send write quota request. Topic: "topic". Partition: 0. Amount: 1033. Cookie: 1 2025-03-31T16:21:56.480427Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Got quota. Topic: "topic". Partition: 0: Cookie: 1 2025-03-31T16:21:56.480585Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 part blob processing sourceId 'sourceid1' seqNo 3 partNo 0 2025-03-31T16:21:56.482698Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 part blob complete sourceId 'sourceid1' seqNo 3 partNo 0 FormedBlobsCount 0 NewHead: Offset 2 PartNo 0 PackedSize 1096 count 1 nextOffset 3 batches 1 2025-03-31T16:21:56.483433Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'topic' partition 0 compactOffset 2,1 HeadOffset 0 endOffset 2 curOffset 3 d0000000000_00000000000000000002_00000_0000000001_00000| size 1084 WTime 253 2025-03-31T16:21:56.483609Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:56.483657Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:56.483707Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-03-31T16:21:56.483745Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:56.483788Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psourceid1 2025-03-31T16:21:56.483822Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000002_00000_0000000001_00000| 2025-03-31T16:21:56.483846Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:56.483884Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:56.483938Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-03-31T16:21:56.484036Z node 36 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-31T16:21:56.484127Z node 36 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 2 partNo 0 count 1 size 1084 2025-03-31T16:21:56.488829Z node 36 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 2 count 1 size 1084 actorID [36:269:2265] 2025-03-31T16:21:56.489005Z node 36 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 2 partno 0 count 1 parts 0 size 1084 2025-03-31T16:21:56.489135Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 1033 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:21:56.489222Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:21:56.489315Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'sourceid1', Topic: 'topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2025-03-31T16:21:56.489616Z node 36 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 >>> write #1 2025-03-31T16:21:56.490128Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [36:315:2300], now have 1 active actors on pipe >> TColumnShardTestReadWrite::ReadSomePrograms [GOOD] >> TPQTest::TestReadSubscription [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranule_PKDatetime >> TColumnShardTestReadWrite::ReadWithProgramLike ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadSomePrograms [GOOD] Test command err: 2025-03-31T16:21:56.394831Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:21:56.495590Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:21:56.524985Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:21:56.525334Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:21:56.535525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:21:56.535777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:21:56.536068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:21:56.536238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:21:56.536400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:21:56.536578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:21:56.536740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:21:56.536912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:21:56.537086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:21:56.537254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:21:56.537399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:21:56.537552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:21:56.576456Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:21:56.576702Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:21:56.576771Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:21:56.576999Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:56.577191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:21:56.577280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:21:56.577332Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:21:56.577483Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:21:56.577574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:21:56.577628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:21:56.577662Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:21:56.577927Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:56.578031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:21:56.578091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:21:56.578153Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:21:56.578307Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:21:56.578392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:21:56.578450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:21:56.578488Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:21:56.578597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:21:56.578668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:21:56.578714Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:21:56.578797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:21:56.578849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:21:56.578895Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:21:56.579398Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=64; 2025-03-31T16:21:56.579520Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=48; 2025-03-31T16:21:56.579631Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=41; 2025-03-31T16:21:56.579737Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=50; 2025-03-31T16:21:56.579954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:21:56.580062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:21:56.580108Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:21:56.580379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:21:56.580446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:21:56.580490Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:21:56.580687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:21:56.580774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:21:56.580821Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:21:56.581084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:21:56.581145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:21:56.581184Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:21:56.581380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:21:56.581443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:21:56.581502Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... s_manager.cpp:245;method=RegisterTable;path_id=1; 2025-03-31T16:21:57.148373Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine.h:144;event=RegisterTable;path_id=1; 2025-03-31T16:21:57.154489Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:488;event=OnTieringModified;path_id=1; 2025-03-31T16:21:57.154648Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tx_controller.cpp:211;event=finished_tx;tx_id=10; 2025-03-31T16:21:57.178085Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[2] complete at tablet 9437184 2025-03-31T16:21:57.178259Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=3200;columns=5; 2025-03-31T16:21:57.190677Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];fline=actor.cpp:22;event=flush_writing;size=3200;count=1; 2025-03-31T16:21:57.192604Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 1 at tablet 9437184 2025-03-31T16:21:57.192955Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-03-31T16:21:57.205373Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-03-31T16:21:57.205546Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=4;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:21:57.219316Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 100 at tablet 9437184, mediator 0 2025-03-31T16:21:57.219433Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[6] execute at tablet 9437184 2025-03-31T16:21:57.219815Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=100;fline=abstract.h:83;progress_tx_id=100;lock_id=1;broken=0; 2025-03-31T16:21:57.220074Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=100;fline=tx_controller.cpp:211;event=finished_tx;tx_id=100; 2025-03-31T16:21:57.232252Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[6] complete at tablet 9437184 2025-03-31T16:21:57.232406Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=abstract.h:93;progress_tx_id=100;lock_id=1;broken=0; 2025-03-31T16:21:57.232582Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=3384; 2025-03-31T16:21:57.236500Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=manager.cpp:10;event=lock;process_id=CS::INDEXATION::44290064-e4c11f0-bb18963a-5d251d82; 2025-03-31T16:21:57.236600Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=ro_controller.cpp:45;event=CS::INDEXATION;tablet_id=9437184; 2025-03-31T16:21:57.236720Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:754;event=indexation;bytes=3384;blobs_count=1;max_limit=251658240;has_more=0;external_task_id=44290064-e4c11f0-bb18963a-5d251d82; 2025-03-31T16:21:57.236971Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:620;event=start_changes;type=CS::INDEXATION;task_id=44290064-e4c11f0-bb18963a-5d251d82; 2025-03-31T16:21:57.237482Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NOlap::NResourceBroker::NSubscribe::TEvStartTask;fline=actor.cpp:38;event=ask_resources;task=cpu=0;mem=3035;external_task_id=44290064-e4c11f0-bb18963a-5d251d82;type=CS::INDEXATION;priority=0;; 2025-03-31T16:21:57.237708Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=1;task=cpu=0;mem=3035;external_task_id=44290064-e4c11f0-bb18963a-5d251d82;type=CS::INDEXATION;priority=0;; 2025-03-31T16:21:57.237757Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=44290064-e4c11f0-bb18963a-5d251d82;mem=3035;cpu=0; 2025-03-31T16:21:57.237927Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=44290064-e4c11f0-bb18963a-5d251d82;task_id=1;mem=3035;cpu=0; 2025-03-31T16:21:57.238091Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=44290064-e4c11f0-bb18963a-5d251d82;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=44290064-e4c11f0-bb18963a-5d251d82; 2025-03-31T16:21:57.243341Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=44290064-e4c11f0-bb18963a-5d251d82;fline=actor.cpp:48;task=agents_waiting=0;additional_info=();; 2025-03-31T16:21:57.243554Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; 2025-03-31T16:21:57.245762Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-03-31T16:21:57.246056Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[8] (CS::INDEXATION) apply at tablet 9437184 2025-03-31T16:21:57.246756Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 1 2025-03-31T16:21:57.246848Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 0 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-31T16:21:57.247388Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:239;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-31T16:21:57.247508Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:21:57.247658Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:769;event=skip_indexation;reason=in_progress;count=1;insert_overload_size=3384;indexing_debug={task_ids=44290064-e4c11f0-bb18963a-5d251d82,;}; 2025-03-31T16:21:57.247753Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=0; 2025-03-31T16:21:57.247958Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:21:57.248007Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:21:57.248046Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:21:57.248129Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:21:57.248480Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 100 scanId: 0 version: {100:100} readable: {100:max} at tablet 9437184 2025-03-31T16:21:57.260972Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=44290064-e4c11f0-bb18963a-5d251d82;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; 2025-03-31T16:21:57.261058Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=44290064-e4c11f0-bb18963a-5d251d82;fline=with_appended.cpp:65;portions=1,;task_id=44290064-e4c11f0-bb18963a-5d251d82; 2025-03-31T16:21:57.261292Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=44290064-e4c11f0-bb18963a-5d251d82;fline=manager.cpp:15;event=unlock;process_id=CS::INDEXATION::44290064-e4c11f0-bb18963a-5d251d82; 2025-03-31T16:21:57.261379Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=44290064-e4c11f0-bb18963a-5d251d82;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:21:57.261473Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=44290064-e4c11f0-bb18963a-5d251d82;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:21:57.261552Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=44290064-e4c11f0-bb18963a-5d251d82;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=0; 2025-03-31T16:21:57.261630Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=44290064-e4c11f0-bb18963a-5d251d82;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:21:57.261692Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=44290064-e4c11f0-bb18963a-5d251d82;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:21:57.261738Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=44290064-e4c11f0-bb18963a-5d251d82;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:21:57.261833Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=44290064-e4c11f0-bb18963a-5d251d82;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.998500s; 2025-03-31T16:21:57.261904Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=44290064-e4c11f0-bb18963a-5d251d82;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:21:57.262042Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:1:3:0:3384:0] 2025-03-31T16:21:57.262104Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 1 2025-03-31T16:21:57.262252Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=1;external_task_id=44290064-e4c11f0-bb18963a-5d251d82;mem=3035;cpu=0; 2025-03-31T16:21:57.262409Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:21:57.262585Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 100 scanId: 0 at tablet 9437184 2025-03-31T16:21:57.262732Z node 1 :TX_COLUMNSHARD_SCAN WARN: tx_id=100;scan_id=0;gen=0;table=;snapshot={100:100};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:14;event=TTxScan failed;problem=cannot parse program;details=Can't parse SsaProgram: Can't parse TOlapProgram protobuf; >> TColumnShardTestReadWrite::ReadAggregate >> PQCountersSimple::PartitionWriteQuota [GOOD] >> PQCountersSimple::PartitionFirstClass ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestReadSubscription [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] 2025-03-31T16:21:08.623589Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:08.623644Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927938 is [1:153:2174] sender: [1:154:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:179:2057] recipient: [1:14:2061] 2025-03-31T16:21:08.639520Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:08.653820Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "test" Generation: 1 Important: false } Consumers { Name: "another-user" Generation: 1 Important: false } 2025-03-31T16:21:08.654756Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2025-03-31T16:21:08.656705Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:185:2198] 2025-03-31T16:21:08.660114Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:186:2199] 2025-03-31T16:21:08.661572Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:186:2199] 2025-03-31T16:21:08.664644Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [1:187:2200] 2025-03-31T16:21:08.665947Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [1:187:2200] 2025-03-31T16:21:08.674788Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:207:2213], now have 1 active actors on pipe 2025-03-31T16:21:08.674908Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-03-31T16:21:08.674961Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-03-31T16:21:08.675243Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid' SeqNo: 1 partNo : 0 messageNo: 0 size 1 offset: -1 2025-03-31T16:21:08.675290Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid' SeqNo: 2 partNo : 0 messageNo: 0 size 1 offset: -1 2025-03-31T16:21:08.675378Z node 1 :PERSQUEUE DEBUG: tablet 72057594037927937 topic 'rt3.dc1--asdfgs--topic' partition 0 error: new GetOwnership request needed for owner 2025-03-31T16:21:08.675461Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 1, Error new GetOwnership request needed for owner 2025-03-31T16:21:08.675489Z node 1 :PERSQUEUE DEBUG: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2025-03-31T16:21:08.675766Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:209:2215], now have 1 active actors on pipe 2025-03-31T16:21:08.675835Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-03-31T16:21:08.675863Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-03-31T16:21:08.675956Z node 1 :PERSQUEUE INFO: new Cookie default|33042d4d-200f2460-9283db4b-17f540d1_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:21:08.676050Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-03-31T16:21:08.676136Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-31T16:21:08.676411Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:211:2217], now have 1 active actors on pipe 2025-03-31T16:21:08.676492Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-03-31T16:21:08.676514Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-03-31T16:21:08.676556Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid' SeqNo: 1 partNo : 0 messageNo: 0 size 1 offset: -1 2025-03-31T16:21:08.676584Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid' SeqNo: 2 partNo : 0 messageNo: 0 size 1 offset: -1 2025-03-31T16:21:08.676725Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 part blob processing sourceId 'sourceid' seqNo 1 partNo 0 2025-03-31T16:21:08.677839Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 part blob complete sourceId 'sourceid' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 72 count 1 nextOffset 1 batches 1 2025-03-31T16:21:08.677936Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 part blob processing sourceId 'sourceid' seqNo 2 partNo 0 2025-03-31T16:21:08.677970Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 part blob complete sourceId 'sourceid' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 112 count 2 nextOffset 2 batches 1 2025-03-31T16:21:08.678747Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--asdfgs--topic' partition 0 compactOffset 0,2 HeadOffset 0 endOffset 0 curOffset 2 d0000000000_00000000000000000000_00000_0000000002_00000| size 94 WTime 331 2025-03-31T16:21:08.678891Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:08.678922Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:08.678949Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-03-31T16:21:08.678988Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:08.679035Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psourceid 2025-03-31T16:21:08.679058Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000000_00000_0000000002_00000| 2025-03-31T16:21:08.679073Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:08.679140Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:08.679177Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-03-31T16:21:08.679229Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-31T16:21:08.679289Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 0 partNo 0 count 2 size 94 2025-03-31T16:21:08.681269Z node 1 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 0 count 2 size 94 actorID [1:134:2160] 2025-03-31T16:21:08.681444Z node 1 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 2 parts 0 size 94 2025-03-31T16:21:08.681516Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 18 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:21:08.681604Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:21:08.681665Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'sourceid', Topic: 'rt3.dc1--asdfgs--topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2025-03-31T16:21:08.681700Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:21:08.681731Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'sourceid', Topic: 'rt3.dc1--asdfgs--topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 1 is stored on disk 2025-03-31T16:21:08.681844Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 1 2025-03-31T16:21:08.681900Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 1 2025-03-31T16:21:08.681934Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user another-user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 1 2025-03-31T16:21:08.681955Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test readTimeStamp for offset 0 initiated queuesize 1 startOffset 0 ReadingTimestamp 1 rrg 1 2025-03-31T16:21:08.682079Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-31T16:21:08.682213Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 Topic 'rt3.dc1--asdfgs--topic' partition 0 user user offset 0 count 1 size 1024000 endOffset 2 max time lag 0ms effective offset 0 2025-03-31T16:21:08.682258Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 2 2025-03-31T16:21:08.682564Z node 1 :PERSQUEUE DEBUG: [PQ: 72 ... s: 1 PartitionIds: 2 PartitionIds: 3 PartitionIds: 4 TopicName: "rt3.dc1--asdfgs--topic" Version: 55 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } Partitions { PartitionId: 4 } ReadRuleGenerations: 55 ReadRuleGenerations: 55 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } AllPartitions { PartitionId: 4 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 55 Important: false } Consumers { Name: "user1" Generation: 55 Important: true } 2025-03-31T16:21:53.790704Z node 52 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [52:185:2198] 2025-03-31T16:21:53.794007Z node 52 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [52:185:2198] 2025-03-31T16:21:53.797636Z node 52 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [52:186:2199] 2025-03-31T16:21:53.799868Z node 52 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [52:186:2199] 2025-03-31T16:21:53.802771Z node 52 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [52:187:2200] 2025-03-31T16:21:53.804821Z node 52 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [52:187:2200] 2025-03-31T16:21:53.807564Z node 52 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [52:188:2201] 2025-03-31T16:21:53.809604Z node 52 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 3 generation 2 [52:188:2201] 2025-03-31T16:21:53.812519Z node 52 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 4, State: StateInit] bootstrapping 4 [52:189:2202] 2025-03-31T16:21:53.814611Z node 52 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 4, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 4 generation 2 [52:189:2202] 2025-03-31T16:21:53.827717Z node 52 :PERSQUEUE INFO: new Cookie default|3e7b7f1e-b2b3e3d0-6447ed4a-29575893_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:21:54.826779Z node 52 :PERSQUEUE INFO: new Cookie default|54719d4d-51649655-595bee6f-150d5963_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:21:54.837291Z node 52 :PERSQUEUE INFO: new Cookie default|24ab75ff-76300d8c-6dfd3fdd-b462b487_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [53:103:2057] recipient: [53:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [53:103:2057] recipient: [53:101:2135] Leader for TabletID 72057594037927937 is [53:107:2139] sender: [53:108:2057] recipient: [53:101:2135] 2025-03-31T16:21:55.226641Z node 53 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:55.226723Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [53:149:2057] recipient: [53:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [53:149:2057] recipient: [53:147:2170] Leader for TabletID 72057594037927938 is [53:153:2174] sender: [53:154:2057] recipient: [53:147:2170] Leader for TabletID 72057594037927937 is [53:107:2139] sender: [53:179:2057] recipient: [53:14:2061] 2025-03-31T16:21:55.248905Z node 53 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:55.250600Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 56 actor [53:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 PartitionIds: 4 TopicName: "rt3.dc1--asdfgs--topic" Version: 56 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } Partitions { PartitionId: 4 } ReadRuleGenerations: 56 ReadRuleGenerations: 56 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } AllPartitions { PartitionId: 4 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 56 Important: false } Consumers { Name: "user1" Generation: 56 Important: true } 2025-03-31T16:21:55.251584Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [53:185:2198] 2025-03-31T16:21:55.253841Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [53:185:2198] 2025-03-31T16:21:55.256222Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [53:186:2199] 2025-03-31T16:21:55.257800Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [53:186:2199] 2025-03-31T16:21:55.259931Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [53:187:2200] 2025-03-31T16:21:55.261480Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [53:187:2200] 2025-03-31T16:21:55.263761Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [53:188:2201] 2025-03-31T16:21:55.265224Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 3 generation 2 [53:188:2201] 2025-03-31T16:21:55.267271Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 4, State: StateInit] bootstrapping 4 [53:189:2202] 2025-03-31T16:21:55.268760Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 4, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 4 generation 2 [53:189:2202] 2025-03-31T16:21:55.279072Z node 53 :PERSQUEUE INFO: new Cookie default|1be8c37b-da1ee345-5f0e8246-aad60bcb_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:21:56.244474Z node 53 :PERSQUEUE INFO: new Cookie default|dcb483ce-c2e55a2e-2f912bbd-1d4663e6_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:21:56.257269Z node 53 :PERSQUEUE INFO: new Cookie default|80a22712-bf2b3d70-ae06a2cc-be18e66_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:103:2057] recipient: [54:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:103:2057] recipient: [54:101:2135] Leader for TabletID 72057594037927937 is [54:107:2139] sender: [54:108:2057] recipient: [54:101:2135] 2025-03-31T16:21:56.687096Z node 54 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:56.687158Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [54:149:2057] recipient: [54:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [54:149:2057] recipient: [54:147:2170] Leader for TabletID 72057594037927938 is [54:153:2174] sender: [54:154:2057] recipient: [54:147:2170] Leader for TabletID 72057594037927937 is [54:107:2139] sender: [54:179:2057] recipient: [54:14:2061] 2025-03-31T16:21:56.702934Z node 54 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:56.704473Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 57 actor [54:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 PartitionIds: 4 TopicName: "rt3.dc1--asdfgs--topic" Version: 57 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } Partitions { PartitionId: 4 } ReadRuleGenerations: 57 ReadRuleGenerations: 57 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } AllPartitions { PartitionId: 4 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 57 Important: false } Consumers { Name: "user1" Generation: 57 Important: true } 2025-03-31T16:21:56.705366Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [54:185:2198] 2025-03-31T16:21:56.708037Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [54:185:2198] 2025-03-31T16:21:56.710052Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [54:186:2199] 2025-03-31T16:21:56.711589Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [54:186:2199] 2025-03-31T16:21:56.713403Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [54:187:2200] 2025-03-31T16:21:56.715051Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [54:187:2200] 2025-03-31T16:21:56.717430Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [54:188:2201] 2025-03-31T16:21:56.718927Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 3 generation 2 [54:188:2201] 2025-03-31T16:21:56.721297Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 4, State: StateInit] bootstrapping 4 [54:189:2202] 2025-03-31T16:21:56.722804Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 4, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 4 generation 2 [54:189:2202] 2025-03-31T16:21:56.734613Z node 54 :PERSQUEUE INFO: new Cookie default|26076b88-2842a936-2adb96fb-92050467_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:21:57.699809Z node 54 :PERSQUEUE INFO: new Cookie default|4dabcea2-67834cc0-b6385b86-ccd3fda6_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:21:57.709999Z node 54 :PERSQUEUE INFO: new Cookie default|e2c50c54-3b5a9d31-99b1f79f-77cc61a0_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default >> TColumnShardTestReadWrite::CompactionInGranule_PKString >> PQCountersSimple::PartitionFirstClass [GOOD] >> PQCountersSimple::SupportivePartitionCountersPersist >> KqpPg::Returning-useSink [GOOD] >> KqpPg::SelectIndex+useSink >> KqpPg::MkqlTerminate [GOOD] >> KqpPg::NoSelectFullScan >> TColumnShardTestReadWrite::ReadWithProgramLike [GOOD] >> KqpPg::CheckPgAutoParams+useSink [GOOD] >> KqpPg::CheckPgAutoParams-useSink >> Normalizers::EmptyTablesNormalizer >> TColumnShardTestReadWrite::WriteReadDuplicate [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadWithProgramLike [GOOD] Test command err: 2025-03-31T16:21:58.276559Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:21:58.372992Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:21:58.399823Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:21:58.400149Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:21:58.409260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:21:58.409510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:21:58.409852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:21:58.410016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:21:58.410144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:21:58.410287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:21:58.410433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:21:58.410575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:21:58.410703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:21:58.410836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:21:58.410983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:21:58.411155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:21:58.440265Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:21:58.440502Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:21:58.440561Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:21:58.440773Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:58.440928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:21:58.440999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:21:58.441050Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:21:58.441201Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:21:58.441300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:21:58.441359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:21:58.441395Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:21:58.441632Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:58.441733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:21:58.441805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:21:58.441841Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:21:58.441973Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:21:58.442061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:21:58.442120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:21:58.442157Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:21:58.442248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:21:58.442302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:21:58.442346Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:21:58.442416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:21:58.442459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:21:58.442495Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:21:58.442943Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=68; 2025-03-31T16:21:58.443057Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=47; 2025-03-31T16:21:58.443214Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=37; 2025-03-31T16:21:58.443316Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=52; 2025-03-31T16:21:58.443501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:21:58.443597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:21:58.443636Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:21:58.443873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:21:58.443929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:21:58.443965Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:21:58.444139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:21:58.444202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:21:58.444235Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:21:58.444531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:21:58.444587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:21:58.444621Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:21:58.444773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:21:58.444824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:21:58.444895Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... ine=actor.cpp:201;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-31T16:21:59.284159Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-03-31T16:21:59.284187Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-03-31T16:21:59.284220Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=6; 2025-03-31T16:21:59.284257Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=10;merger=0;interval_id=6; 2025-03-31T16:21:59.284287Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-03-31T16:21:59.284358Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-03-31T16:21:59.284415Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=10;finished=1; 2025-03-31T16:21:59.284444Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-31T16:21:59.284626Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:21:59.284750Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:1;records_count:10;schema=message: string;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-03-31T16:21:59.284792Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-31T16:21:59.284870Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:232;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;);columns=1;rows=10; 2025-03-31T16:21:59.284911Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:252;stage=data_format;batch_size=61;num_rows=10;batch_columns=message; 2025-03-31T16:21:59.285005Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:303:2321];bytes=61;rows=10;faults=0;finished=0;fault=0;schema=message: string; 2025-03-31T16:21:59.285101Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-03-31T16:21:59.285262Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-03-31T16:21:59.285344Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-03-31T16:21:59.285469Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:21:59.285537Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-03-31T16:21:59.285622Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-03-31T16:21:59.285653Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:304:2322] finished for tablet 9437184 2025-03-31T16:21:59.286023Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:303:2321];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.009},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.01}],"full":{"a":1743438119275116,"name":"_full_task","f":1743438119275116,"d_finished":0,"c":0,"l":1743438119285693,"d":10577},"events":[{"name":"bootstrap","f":1743438119275266,"d_finished":2048,"c":1,"l":1743438119277314,"d":2048},{"a":1743438119285454,"name":"ack","f":1743438119284607,"d_finished":760,"c":1,"l":1743438119285367,"d":999},{"a":1743438119285445,"name":"processing","f":1743438119278577,"d_finished":4979,"c":9,"l":1743438119285369,"d":5227},{"name":"ProduceResults","f":1743438119276313,"d_finished":2225,"c":12,"l":1743438119285641,"d":2225},{"a":1743438119285643,"name":"Finish","f":1743438119285643,"d_finished":0,"c":0,"l":1743438119285693,"d":50},{"name":"task_result","f":1743438119278589,"d_finished":4084,"c":8,"l":1743438119284480,"d":4084}],"id":"9437184::6"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-03-31T16:21:59.286113Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:303:2321];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:21:59.286449Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:303:2321];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.009},{"events":["l_ProduceResults","f_Finish"],"t":0.01},{"events":["l_ack","l_processing","l_Finish"],"t":0.011}],"full":{"a":1743438119275116,"name":"_full_task","f":1743438119275116,"d_finished":0,"c":0,"l":1743438119286143,"d":11027},"events":[{"name":"bootstrap","f":1743438119275266,"d_finished":2048,"c":1,"l":1743438119277314,"d":2048},{"a":1743438119285454,"name":"ack","f":1743438119284607,"d_finished":760,"c":1,"l":1743438119285367,"d":1449},{"a":1743438119285445,"name":"processing","f":1743438119278577,"d_finished":4979,"c":9,"l":1743438119285369,"d":5677},{"name":"ProduceResults","f":1743438119276313,"d_finished":2225,"c":12,"l":1743438119285641,"d":2225},{"a":1743438119285643,"name":"Finish","f":1743438119285643,"d_finished":0,"c":0,"l":1743438119286143,"d":500},{"name":"task_result","f":1743438119278589,"d_finished":4084,"c":8,"l":1743438119284480,"d":4084}],"id":"9437184::6"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-03-31T16:21:59.286518Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:21:59.274690Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-03-31T16:21:59.286549Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:21:59.286741Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadDuplicate [GOOD] Test command err: 2025-03-31T16:21:41.721452Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:21:41.819638Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:21:41.842930Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:21:41.843268Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:21:41.851429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:21:41.851653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:21:41.851882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:21:41.852013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:21:41.852110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:21:41.852235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:21:41.852345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:21:41.852475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:21:41.852607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:21:41.852718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:21:41.852818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:21:41.852910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:21:41.879983Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:21:41.880177Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:21:41.880247Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:21:41.880450Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:41.880609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:21:41.880673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:21:41.880711Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:21:41.880807Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:21:41.880880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:21:41.880924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:21:41.880971Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:21:41.881157Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:41.881234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:21:41.881277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:21:41.881303Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:21:41.881409Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:21:41.881466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:21:41.881515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:21:41.881545Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:21:41.881608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:21:41.881650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:21:41.881689Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:21:41.881746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:21:41.881781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:21:41.881809Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:21:41.882186Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=49; 2025-03-31T16:21:41.882260Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=27; 2025-03-31T16:21:41.882353Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=34; 2025-03-31T16:21:41.882439Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=47; 2025-03-31T16:21:41.882590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:21:41.882638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:21:41.882685Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:21:41.882881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:21:41.882925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:21:41.882955Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:21:41.883119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:21:41.883172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:21:41.883199Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:21:41.883386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:21:41.883435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:21:41.883462Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:21:41.883589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:21:41.883626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:21:41.883676Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... 1T16:21:59.635988Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=10;merger=0;interval_id=49; 2025-03-31T16:21:59.636031Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-03-31T16:21:59.636127Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:21:59.636185Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=10;finished=1; 2025-03-31T16:21:59.636228Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-31T16:21:59.637453Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:21:59.637596Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:1;records_count:10;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:21:59.637637Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-31T16:21:59.637748Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:232;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=10; 2025-03-31T16:21:59.637813Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:252;stage=data_format;batch_size=80;num_rows=10;batch_columns=timestamp; 2025-03-31T16:21:59.637920Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:4118:6130];bytes=80;rows=10;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; 2025-03-31T16:21:59.638029Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:21:59.638145Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:21:59.638228Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:21:59.638674Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:21:59.638780Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:21:59.638865Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:21:59.638901Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:4123:6135] finished for tablet 9437184 2025-03-31T16:21:59.639374Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:4118:6130];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["l_task_result"],"t":0.018},{"events":["f_ack"],"t":0.019},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.021}],"full":{"a":1743438119617560,"name":"_full_task","f":1743438119617560,"d_finished":0,"c":0,"l":1743438119638949,"d":21389},"events":[{"name":"bootstrap","f":1743438119618213,"d_finished":2448,"c":1,"l":1743438119620661,"d":2448},{"a":1743438119638653,"name":"ack","f":1743438119637426,"d_finished":825,"c":1,"l":1743438119638251,"d":1121},{"a":1743438119638633,"name":"processing","f":1743438119621918,"d_finished":3593,"c":8,"l":1743438119638253,"d":3909},{"name":"ProduceResults","f":1743438119619530,"d_finished":2377,"c":11,"l":1743438119638886,"d":2377},{"a":1743438119638889,"name":"Finish","f":1743438119638889,"d_finished":0,"c":0,"l":1743438119638949,"d":60},{"name":"task_result","f":1743438119621937,"d_finished":2578,"c":7,"l":1743438119636280,"d":2578}],"id":"9437184::49"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:21:59.639449Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:4118:6130];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:21:59.639877Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:4118:6130];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["l_task_result"],"t":0.018},{"events":["f_ack"],"t":0.019},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.021}],"full":{"a":1743438119617560,"name":"_full_task","f":1743438119617560,"d_finished":0,"c":0,"l":1743438119639488,"d":21928},"events":[{"name":"bootstrap","f":1743438119618213,"d_finished":2448,"c":1,"l":1743438119620661,"d":2448},{"a":1743438119638653,"name":"ack","f":1743438119637426,"d_finished":825,"c":1,"l":1743438119638251,"d":1660},{"a":1743438119638633,"name":"processing","f":1743438119621918,"d_finished":3593,"c":8,"l":1743438119638253,"d":4448},{"name":"ProduceResults","f":1743438119619530,"d_finished":2377,"c":11,"l":1743438119638886,"d":2377},{"a":1743438119638889,"name":"Finish","f":1743438119638889,"d_finished":0,"c":0,"l":1743438119639488,"d":599},{"name":"task_result","f":1743438119621937,"d_finished":2578,"c":7,"l":1743438119636280,"d":2578}],"id":"9437184::49"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:21:59.639953Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:21:59.617018Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=2812;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2812;selected_rows=0; 2025-03-31T16:21:59.639993Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:21:59.640170Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> BasicStatistics::NotFullStatisticsDatashard [GOOD] >> PQCountersSimple::SupportivePartitionCountersPersist [GOOD] >> TColumnShardTestReadWrite::CompactionGCFailingBs >> EvWrite::WriteWithSplit [GOOD] >> TColumnShardTestReadWrite::ReadAggregate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::NotFullStatisticsDatashard [GOOD] Test command err: 2025-03-31T16:15:04.885300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:04.885563Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:15:04.885634Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d28/r3tmp/tmpkGz9vQ/pdisk_1.dat 2025-03-31T16:15:05.276093Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13374, node 1 2025-03-31T16:15:05.597683Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:15:05.597743Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:15:05.597773Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:15:05.600184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:15:05.600650Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:15:05.686006Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:05.686171Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:05.702188Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24427 2025-03-31T16:15:06.352388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:15:10.086285Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T16:15:10.154696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:10.154863Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:10.189652Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:15:10.196662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:15:10.498100Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:10.498811Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:10.499470Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:10.499611Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:10.499872Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:10.499984Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:10.500074Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:10.500173Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:10.500275Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:15:10.686158Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:15:10.686278Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:15:10.701447Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:15:10.857365Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:15:10.919333Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T16:15:10.919460Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T16:15:10.954240Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T16:15:10.956384Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T16:15:10.956638Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T16:15:10.956703Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T16:15:10.956765Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T16:15:10.956818Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T16:15:10.956877Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T16:15:10.956936Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T16:15:10.958471Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T16:15:10.994263Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:15:10.994412Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1877:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:15:11.048441Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1888:2606] 2025-03-31T16:15:11.054730Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1918:2621] 2025-03-31T16:15:11.056163Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1918:2621], schemeshard id = 72075186224037897 2025-03-31T16:15:11.066409Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T16:15:11.087545Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T16:15:11.087618Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T16:15:11.087722Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T16:15:11.110543Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T16:15:11.121384Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T16:15:11.121591Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T16:15:11.358289Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T16:15:11.566481Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T16:15:11.692180Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:15:12.764471Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2241:3075], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:12.764650Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:12.788851Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-31T16:15:13.249679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2539:3123], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:13.249850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:13.251286Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2544:3127]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:15:13.251457Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T16:15:13.251547Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2546:3129] 2025-03-31T16:15:13.251621Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2546:3129] 2025-03-31T16:15:13.252142Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2547:2987] 2025-03-31T16:15:13.252503Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2546:3129], server id = [2:2547:2987], tablet id = 72075186224037894, status = OK 2025-03-31T16:15:13.252623Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2547:2987], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-03-31T16:15:13.252693Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-03-31T16:15:13.252930Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:15:13.252994Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2544:3127], StatRequests.size() = 1 2025-03-31T16:15:13.271979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2551:3133], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:13.272134Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:13.272525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2556:3138], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:15:13.278735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-03-31T16:15:13.429112Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-31T16:15:13.429183Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-31T16:15:13.543528Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2546:3129], schemeshard count = 1 2025-03-31T16:15:13.993187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreator ... 1T16:20:56.006108Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:20:58.252400Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-03-31T16:20:58.252566Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:20:58.252865Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:20:58.263643Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:20:58.263699Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:21:00.598950Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:21:00.599001Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:21:01.749634Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:21:03.026659Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-03-31T16:21:03.026738Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-31T16:21:03.026775Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-31T16:21:03.026812Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-03-31T16:21:03.247130Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:21:03.247193Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:21:04.486447Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-03-31T16:21:04.486597Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:21:04.486866Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:21:05.813158Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:21:05.813222Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:21:08.169707Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:21:08.180543Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:21:08.180619Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:21:10.547378Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-03-31T16:21:10.547550Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:21:10.547863Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:21:10.558712Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:21:10.558777Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:21:12.870025Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:21:12.870079Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:21:14.041681Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:21:15.370665Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:21:15.370725Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:21:16.595529Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-03-31T16:21:16.595716Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:21:16.596089Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:21:17.846744Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:21:17.846810Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:21:20.129917Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:21:20.141780Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:21:20.141844Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:21:22.790195Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-03-31T16:21:22.790560Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:21:22.790847Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:21:22.801699Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:21:22.801761Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:21:25.243027Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:21:25.243114Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:21:26.368942Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:21:27.742578Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:21:27.742652Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:21:28.922896Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-03-31T16:21:28.923175Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:21:28.923545Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:21:30.194601Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:21:30.194669Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:21:32.507686Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:21:32.518582Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:21:32.518658Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:21:34.930098Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-03-31T16:21:34.930315Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:21:34.930629Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:21:34.941576Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:21:34.941655Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:21:37.321403Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:21:37.321463Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:21:38.494056Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:21:39.735373Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-03-31T16:21:39.735439Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-31T16:21:39.735472Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-31T16:21:39.735503Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-03-31T16:21:39.931184Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:21:39.931267Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:21:41.171824Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-03-31T16:21:41.172228Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:21:41.172477Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:21:42.531559Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:21:42.531641Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:21:44.905028Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:21:44.915794Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:21:44.915864Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:21:47.356413Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-03-31T16:21:47.356765Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:21:47.357004Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:21:47.367843Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:21:47.367908Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:21:49.656262Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:21:49.656339Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:21:50.775728Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:21:52.144980Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:21:52.145042Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:21:53.378164Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-03-31T16:21:53.378318Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:21:53.378597Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:21:54.637987Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:21:54.638073Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:21:55.840963Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-03-31T16:21:55.841057Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 237.000000s, at schemeshard: 72075186224037897 2025-03-31T16:21:55.841344Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 49 ... waiting for TEvSchemeShardStats 2 (done) ... waiting for TEvPropagateStatistics 2025-03-31T16:21:55.856030Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-31T16:21:57.017974Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:21:57.028975Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:21:57.029067Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:21:59.679706Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-03-31T16:21:59.680142Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 ... waiting for TEvPropagateStatistics (done) 2025-03-31T16:21:59.680573Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:13805:7674]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:21:59.680913Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:21:59.688909Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-31T16:21:59.689007Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [2:13805:7674], StatRequests.size() = 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> PQCountersSimple::SupportivePartitionCountersPersist [GOOD] Test command err: 2025-03-31T16:21:04.000191Z :HappyWay INFO: Random seed for debugging is 1743438064000166 2025-03-31T16:21:04.288517Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009468255859470:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:04.288583Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:21:04.327946Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488009471412752029:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:04.328045Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:21:04.481742Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:21:04.480909Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001574/r3tmp/tmp0qQlgS/pdisk_1.dat 2025-03-31T16:21:04.656333Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:21:04.656469Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:21:04.658395Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:21:04.659490Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:21:04.685148Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19097, node 1 2025-03-31T16:21:04.698336Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:21:04.698391Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:21:04.717971Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:21:04.718078Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:21:04.724186Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:21:04.737625Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/001574/r3tmp/yandexqo14XO.tmp 2025-03-31T16:21:04.737654Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/001574/r3tmp/yandexqo14XO.tmp 2025-03-31T16:21:04.737832Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/001574/r3tmp/yandexqo14XO.tmp 2025-03-31T16:21:04.737998Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:21:04.796172Z INFO: TTestServer started on Port 9401 GrpcPort 19097 TClient is connected to server localhost:9401 PQClient connected to localhost:19097 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:21:05.053596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-03-31T16:21:07.184866Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009484297654236:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:07.184947Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009484297654227:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:07.185298Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:07.190927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-03-31T16:21:07.206599Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488009484297654242:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-03-31T16:21:07.279127Z node 2 :TX_PROXY ERROR: Actor# [2:7488009484297654270:2129] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:21:07.560058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:21:07.561202Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488009481140762440:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:21:07.561441Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDk3YWIyY2MtODNlZTJhMTEtODJhMTNkMWItYjVjZjU0ZDg=, ActorId: [1:7488009481140762387:2335], ActorState: ExecuteState, TraceId: 01jqpg3dkhaf29f3s00bhdx2zp, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:21:07.563717Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:21:07.563939Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488009484297654285:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:21:07.564160Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTZmZWFmMTYtNzUyOWI2YzQtZjVhNjFkM2YtNzdjNzgzYzc=, ActorId: [2:7488009484297654211:2307], ActorState: ExecuteState, TraceId: 01jqpg3dfe8r7yvn922hkd03qb, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:21:07.564666Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:21:07.693308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:21:07.832772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:19097", true, true, 1000); 2025-03-31T16:21:07.984851Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jqpg3e5r5pjqph90xh1vxbyk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGZmODE4MjYtYjIzZmQzOTItNTA1YTY1MDEtMmMzZmEwYzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7488009485435730196:3014] 2025-03-31T16:21:09.288039Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488009468255859470:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:09.288123Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:21:09.326003Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488009471412752029:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:09.326084Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-03-31T16:21:14.015931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:19097 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-03-31T16:21:14.072068Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:19097 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--te ... 21:47.953901Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:21:54.473595Z node 5 :PERSQUEUE DEBUG: got HasDatainfoResponse 2025-03-31T16:21:54.474129Z node 6 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: request-id-0-1 2025-03-31T16:21:54.474184Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037894] got client message batch for topic 'rt3.dc1--topic1' partition 1 2025-03-31T16:21:54.474265Z node 6 :PERSQUEUE ERROR: [PQ: 72075186224037894, Partition: 1, State: StateIdle] reading from too big offset - topic rt3.dc1--topic1 partition 1 client $without_consumer EndOffset 0 offset 1 2025-03-31T16:21:54.474302Z node 6 :PERSQUEUE DEBUG: tablet 72075186224037894 topic 'rt3.dc1--topic1' partition 1 error: trying to read from future. ReadOffset 1, 0 EndOffset 0 2025-03-31T16:21:54.474335Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvPQ::TEvError Cookie 1, Error trying to read from future. ReadOffset 1, 0 EndOffset 0 2025-03-31T16:21:54.474351Z node 6 :PERSQUEUE DEBUG: Answer error topic: 'rt3.dc1--topic1' partition: 1 messageNo: 0 requestId: request-id-0-1 error: trying to read from future. ReadOffset 1, 0 EndOffset 0 2025-03-31T16:21:54.475298Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [5:7488009640280922319:3685] destroyed 2025-03-31T16:21:54.717610Z node 5 :KQP_EXECUTER ERROR: ActorId: [5:7488009683230595873:2639] TxId: 281474976715709. Ctx: { TraceId: 01jqpg4vg463nd8xq7veetgcm3, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MzFlYjNkYWQtNThjNDE2ZWEtODQ0ZmNiM2EtZmUzYzdkMGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 6 2025-03-31T16:21:54.717806Z node 5 :KQP_COMPUTE ERROR: SelfId: [5:7488009683230595884:2648], TxId: 281474976715709, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jqpg4vg463nd8xq7veetgcm3. SessionId : ydb://session/3?node_id=5&id=MzFlYjNkYWQtNThjNDE2ZWEtODQ0ZmNiM2EtZmUzYzdkMGQ=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [5:7488009683230595873:2639], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-03-31T16:21:54.717820Z node 5 :KQP_COMPUTE ERROR: SelfId: [5:7488009683230595886:2649], TxId: 281474976715709, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=5&id=MzFlYjNkYWQtNThjNDE2ZWEtODQ0ZmNiM2EtZmUzYzdkMGQ=. TraceId : 01jqpg4vg463nd8xq7veetgcm3. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [5:7488009683230595873:2639], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-03-31T16:21:56.548278Z node 7 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:56.548402Z node 7 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:56.575064Z node 7 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:56.576165Z node 7 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [7:198:2213] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 30720 BurstSize: 30720 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-03-31T16:21:56.577033Z node 7 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [7:207:2220] 2025-03-31T16:21:56.581858Z node 7 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [7:207:2220] 2025-03-31T16:21:56.589732Z node 7 :PERSQUEUE INFO: new Cookie default|29e86e78-7a3a251b-7b707071-40802d54_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured kesus quota request event from [7:223:2233] 2025-03-31T16:21:56.600550Z node 7 :PERSQUEUE INFO: new Cookie default|5b882d79-28074364-97788a2c-4f617d1a_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured kesus quota request event from [7:223:2233] 2025-03-31T16:21:56.920292Z node 7 :PERSQUEUE INFO: new Cookie default|f9eebc29-25358aa9-9ad44363-9f44e6fb_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured kesus quota request event from [7:223:2233] 2025-03-31T16:21:57.189066Z node 7 :PERSQUEUE INFO: new Cookie default|ee5d8417-2f46514b-f723a705-8091724c_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured kesus quota request event from [7:223:2233] 2025-03-31T16:21:57.445849Z node 7 :PERSQUEUE INFO: new Cookie default|d4bb41d9-6bafd8b6-20603fb0-e9fef559_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured kesus quota request event from [7:223:2233] 2025-03-31T16:21:57.694518Z node 7 :PERSQUEUE INFO: new Cookie default|2bed8cb8-2bff1d5-9999e5d-dec23b25_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured kesus quota request event from [7:223:2233] **** Total histogram: ****
Interval=0ms: 1
Interval=10000ms: 0
Interval=1000ms: 3
Interval=100ms: 0
Interval=10ms: 0
Interval=1ms: 0
Interval=20ms: 0
Interval=2500ms: 2
Interval=5000ms: 0
Interval=500ms: 0
Interval=50ms: 0
Interval=5ms: 0
Interval=999999ms: 0
**** **** **** **** 2025-03-31T16:21:58.563892Z node 8 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:58.564006Z node 8 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:58.589427Z node 8 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:58.590771Z node 8 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [8:198:2213] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-03-31T16:21:58.591814Z node 8 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [8:206:2219] 2025-03-31T16:21:58.592977Z node 8 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [8:206:2219] 2025-03-31T16:21:58.594030Z node 8 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [8:207:2220] 2025-03-31T16:21:58.594868Z node 8 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 2 [8:207:2220] 2025-03-31T16:21:58.603620Z node 8 :PERSQUEUE INFO: new Cookie default|85d2d8ac-e389ceb3-4c7d6e9c-19ee3421_0 generated for partition 0 topic 'topic' owner default 2025-03-31T16:21:58.611632Z node 8 :PERSQUEUE INFO: new Cookie default|e15f01f5-2ce42376-b34fd31f-a3101b25_1 generated for partition 0 topic 'topic' owner default 2025-03-31T16:21:58.621271Z node 8 :PERSQUEUE INFO: new Cookie default|110d27b5-9ed3f23b-b1aee1eb-aad39a73_2 generated for partition 0 topic 'topic' owner default 2025-03-31T16:21:58.631003Z node 8 :PERSQUEUE INFO: new Cookie default|ece074d-57c3b1c5-6829f14c-14052b3d_3 generated for partition 0 topic 'topic' owner default 2025-03-31T16:21:59.359668Z node 9 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:59.359792Z node 9 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:59.396849Z node 9 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:59.397978Z node 9 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 3 actor [9:198:2213] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 30720 BurstSize: 30720 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } 2025-03-31T16:21:59.398889Z node 9 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [9:207:2220] 2025-03-31T16:21:59.403524Z node 9 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [9:207:2220] 2025-03-31T16:21:59.411583Z node 9 :PERSQUEUE INFO: new Cookie default|71aede67-c9113b60-d04adc5b-f9dbf3e9_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured kesus quota request event from [9:223:2233] Captured TEvRequest, cmd write size: 3 Captured TEvRequest, cmd write size: 3 2025-03-31T16:21:59.423292Z node 9 :PERSQUEUE INFO: new Cookie default|5c495c45-4aace7a5-85943e9d-cfb2c6c7_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvRequest, cmd write size: 3 Captured TEvRequest, cmd write size: 3 Captured kesus quota request event from [9:223:2233] 2025-03-31T16:21:59.738884Z node 9 :PERSQUEUE INFO: new Cookie default|9223949f-c79d31a1-ffa5c0f1-bb904126_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured kesus quota request event from [9:223:2233] Captured TEvRequest, cmd write size: 3 Captured TEvRequest, cmd write size: 3 2025-03-31T16:22:00.015417Z node 9 :PERSQUEUE INFO: new Cookie default|704213b7-be108779-bd0bf8e0-4ae63e79_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured kesus quota request event from [9:223:2233] Captured TEvRequest, cmd write size: 3 Captured TEvRequest, cmd write size: 3 Captured TEvRequest, cmd write size: 1 Captured TEvRequest, cmd write size: 1 2025-03-31T16:22:00.282784Z node 9 :PERSQUEUE INFO: new Cookie default|b530614a-2b1ebd79-f2007d3c-be946125_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvRequest, cmd write size: 3 Captured TEvRequest, cmd write size: 3 Captured kesus quota request event from [9:223:2233] 2025-03-31T16:22:00.535768Z node 9 :PERSQUEUE INFO: new Cookie default|d3561b24-84a483d3-ccb28837-4f2547d7_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvRequest, cmd write size: 3 Captured TEvRequest, cmd write size: 3 Captured kesus quota request event from [9:223:2233] >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64_Reboot |90.6%| [TA] $(B)/ydb/core/statistics/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::WriteWithSplit [GOOD] Test command err: 2025-03-31T16:21:33.937717Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:21:34.024302Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:21:34.048366Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:21:34.048672Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:21:34.057215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:21:34.057426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:21:34.057670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:21:34.057819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:21:34.057937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:21:34.058054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:21:34.058170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:21:34.058279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:21:34.058398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:21:34.058544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:21:34.058653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:21:34.058748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:21:34.080961Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:21:34.081133Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:21:34.081189Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:21:34.081398Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:34.081558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:21:34.081624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:21:34.081662Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:21:34.081740Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:21:34.081791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:21:34.081821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:21:34.081860Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:21:34.082055Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:34.082117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:21:34.082146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:21:34.082166Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:21:34.082244Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:21:34.082291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:21:34.082324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:21:34.082343Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:21:34.082396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:21:34.082428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:21:34.082457Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:21:34.082504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:21:34.082532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:21:34.082551Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:21:34.082882Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=45; 2025-03-31T16:21:34.082950Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=26; 2025-03-31T16:21:34.083035Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=36; 2025-03-31T16:21:34.083139Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=65; 2025-03-31T16:21:34.083258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:21:34.083304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:21:34.083337Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:21:34.083511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:21:34.083561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:21:34.083582Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:21:34.083676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:21:34.083705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:21:34.083732Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:21:34.083880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:21:34.083915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:21:34.083934Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:21:34.084030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:21:34.084060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:21:34.084098Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... 791421Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:858:2875];bytes=3691800;rows=450;faults=0;finished=0;fault=0;schema=key: uint64 field: string; 2025-03-31T16:22:01.791519Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:1;records_count:149;schema=key: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-31T16:22:01.791630Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:1;records_count:149;schema=key: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-31T16:22:01.791678Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-31T16:22:01.791717Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-31T16:22:01.793042Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:22:01.793154Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:1;records_count:149;schema=key: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-31T16:22:01.793189Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-31T16:22:01.793266Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:232;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;);columns=2;rows=149; 2025-03-31T16:22:01.793314Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:252;stage=data_format;batch_size=1222396;num_rows=149;batch_columns=key,field; 2025-03-31T16:22:01.793418Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:858:2875];bytes=1222396;rows=149;faults=0;finished=0;fault=0;schema=key: uint64 field: string; 2025-03-31T16:22:01.793503Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-31T16:22:01.793588Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-31T16:22:01.793717Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-31T16:22:01.794273Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:22:01.794379Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-31T16:22:01.794487Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-31T16:22:01.794539Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:867:2884] finished for tablet 9437184 2025-03-31T16:22:01.795073Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:858:2875];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.004},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.012},{"events":["l_task_result"],"t":0.668},{"events":["f_ack"],"t":19.259},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":19.273}],"full":{"a":1743438102521383,"name":"_full_task","f":1743438102521383,"d_finished":0,"c":0,"l":1743438121794607,"d":19273224},"events":[{"name":"bootstrap","f":1743438102522016,"d_finished":11613,"c":1,"l":1743438102533629,"d":11613},{"a":1743438121794241,"name":"ack","f":1743438121780438,"d_finished":7450,"c":9,"l":1743438121793755,"d":7816},{"a":1743438121794211,"name":"processing","f":1743438102534172,"d_finished":241473,"c":53,"l":1743438121793757,"d":241869},{"name":"ProduceResults","f":1743438102525681,"d_finished":16006,"c":64,"l":1743438121794512,"d":16006},{"a":1743438121794515,"name":"Finish","f":1743438121794515,"d_finished":0,"c":0,"l":1743438121794607,"d":92},{"name":"task_result","f":1743438102534218,"d_finished":232680,"c":44,"l":1743438103190037,"d":232680}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-31T16:22:01.795180Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:858:2875];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:22:01.795668Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:858:2875];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.004},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.012},{"events":["l_task_result"],"t":0.668},{"events":["f_ack"],"t":19.259},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":19.273}],"full":{"a":1743438102521383,"name":"_full_task","f":1743438102521383,"d_finished":0,"c":0,"l":1743438121795222,"d":19273839},"events":[{"name":"bootstrap","f":1743438102522016,"d_finished":11613,"c":1,"l":1743438102533629,"d":11613},{"a":1743438121794241,"name":"ack","f":1743438121780438,"d_finished":7450,"c":9,"l":1743438121793755,"d":8431},{"a":1743438121794211,"name":"processing","f":1743438102534172,"d_finished":241473,"c":53,"l":1743438121793757,"d":242484},{"name":"ProduceResults","f":1743438102525681,"d_finished":16006,"c":64,"l":1743438121794512,"d":16006},{"a":1743438121794515,"name":"Finish","f":1743438121794515,"d_finished":0,"c":0,"l":1743438121795222,"d":707},{"name":"task_result","f":1743438102534218,"d_finished":232680,"c":44,"l":1743438103190037,"d":232680}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-31T16:22:01.795763Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:21:42.520147Z;index_granules=0;index_portions=5;index_batches=2052;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=17133336;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=17133336;selected_rows=0; 2025-03-31T16:22:01.795815Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:22:01.796064Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadAggregate [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8328;columns=19; 2025-03-31T16:21:58.362808Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:21:58.449865Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:21:58.470783Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:21:58.471072Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:21:58.478652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:21:58.478854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:21:58.479068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:21:58.479192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:21:58.479330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:21:58.479443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:21:58.479580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:21:58.479692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:21:58.479809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:21:58.479930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:21:58.480037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:21:58.480158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:21:58.508974Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:21:58.509133Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:21:58.509182Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:21:58.509395Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:58.509548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:21:58.509617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:21:58.509727Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:21:58.509835Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:21:58.509908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:21:58.509953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:21:58.509980Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:21:58.510160Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:58.510219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:21:58.510256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:21:58.510324Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:21:58.510408Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:21:58.510454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:21:58.510494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:21:58.510522Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:21:58.510593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:21:58.510631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:21:58.510661Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:21:58.510727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:21:58.510765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:21:58.510791Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:21:58.511174Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=45; 2025-03-31T16:21:58.511253Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=31; 2025-03-31T16:21:58.511351Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=56; 2025-03-31T16:21:58.511449Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=54; 2025-03-31T16:21:58.511606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:21:58.511653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:21:58.511685Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:21:58.511876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:21:58.511915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:21:58.511958Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:21:58.512100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:21:58.512140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:21:58.512171Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:21:58.512396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:21:58.512435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:21:58.512462Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:21:58.512574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:21:58.512609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:21:58.512658Z node 1 :TX_COLUMNSHARD INFO: tablet_i ... =9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-31T16:22:01.911367Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-03-31T16:22:01.911410Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-03-31T16:22:01.911457Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=76; 2025-03-31T16:22:01.911506Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=1;merger=0;interval_id=76; 2025-03-31T16:22:01.911550Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-03-31T16:22:01.911656Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-03-31T16:22:01.911692Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=1;finished=1; 2025-03-31T16:22:01.911731Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-31T16:22:01.912209Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:22:01.912341Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:1;records_count:1;schema=100: binary 101: binary 102: binary 103: uint64;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-03-31T16:22:01.912407Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-31T16:22:01.912507Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:232;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;);columns=4;rows=1; 2025-03-31T16:22:01.912569Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:252;stage=data_format;batch_size=26;num_rows=1;batch_columns=100,101,102,103; 2025-03-31T16:22:01.912678Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[2:434:2452];bytes=26;rows=1;faults=0;finished=0;fault=0;schema=100: binary 101: binary 102: binary 103: uint64; 2025-03-31T16:22:01.912785Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-03-31T16:22:01.912893Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-03-31T16:22:01.912984Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-03-31T16:22:01.913285Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:22:01.913385Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-03-31T16:22:01.913475Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-03-31T16:22:01.913510Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: Scan [2:435:2453] finished for tablet 9437184 2025-03-31T16:22:01.913967Z node 2 :TX_COLUMNSHARD_SCAN INFO: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[2:434:2452];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.012},{"events":["l_ProduceResults","f_Finish"],"t":0.013},{"events":["l_ack","l_processing","l_Finish"],"t":0.014}],"full":{"a":1743438121899559,"name":"_full_task","f":1743438121899559,"d_finished":0,"c":0,"l":1743438121913561,"d":14002},"events":[{"name":"bootstrap","f":1743438121899788,"d_finished":2327,"c":1,"l":1743438121902115,"d":2327},{"a":1743438121913265,"name":"ack","f":1743438121912185,"d_finished":823,"c":1,"l":1743438121913008,"d":1119},{"a":1743438121913249,"name":"processing","f":1743438121903336,"d_finished":6694,"c":10,"l":1743438121913010,"d":7006},{"name":"ProduceResults","f":1743438121901043,"d_finished":2498,"c":13,"l":1743438121913496,"d":2498},{"a":1743438121913498,"name":"Finish","f":1743438121913498,"d_finished":0,"c":0,"l":1743438121913561,"d":63},{"name":"task_result","f":1743438121903355,"d_finished":5700,"c":9,"l":1743438121911783,"d":5700}],"id":"9437184::76"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-03-31T16:22:01.914060Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[2:434:2452];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:22:01.914490Z node 2 :TX_COLUMNSHARD_SCAN INFO: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[2:434:2452];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.012},{"events":["l_ProduceResults","f_Finish"],"t":0.013},{"events":["l_ack","l_processing","l_Finish"],"t":0.014}],"full":{"a":1743438121899559,"name":"_full_task","f":1743438121899559,"d_finished":0,"c":0,"l":1743438121914111,"d":14552},"events":[{"name":"bootstrap","f":1743438121899788,"d_finished":2327,"c":1,"l":1743438121902115,"d":2327},{"a":1743438121913265,"name":"ack","f":1743438121912185,"d_finished":823,"c":1,"l":1743438121913008,"d":1669},{"a":1743438121913249,"name":"processing","f":1743438121903336,"d_finished":6694,"c":10,"l":1743438121913010,"d":7556},{"name":"ProduceResults","f":1743438121901043,"d_finished":2498,"c":13,"l":1743438121913496,"d":2498},{"a":1743438121913498,"name":"Finish","f":1743438121913498,"d_finished":0,"c":0,"l":1743438121914111,"d":613},{"name":"task_result","f":1743438121903355,"d_finished":5700,"c":9,"l":1743438121911783,"d":5700}],"id":"9437184::76"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-03-31T16:22:01.914583Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:22:01.899068Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=16001;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=16001;selected_rows=0; 2025-03-31T16:22:01.914625Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:22:01.914872Z node 2 :TX_COLUMNSHARD_SCAN INFO: SelfId=[2:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;; |90.6%| [TA] {RESULT} $(B)/ydb/core/statistics/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestReadWrite::WriteOverload [GOOD] >> Normalizers::EmptyTablesNormalizer [GOOD] >> TColumnShardTestReadWrite::WriteReadZSTD >> TColumnShardTestReadWrite::WriteRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::EmptyTablesNormalizer [GOOD] Test command err: 2025-03-31T16:21:59.878400Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:21:59.960548Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:21:59.982238Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:21:59.982496Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:21:59.989812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=PortionsCleaner; 2025-03-31T16:21:59.990046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-03-31T16:21:59.990227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:21:59.990444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:21:59.990578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:21:59.990690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:21:59.990810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:21:59.990905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:21:59.990991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:21:59.991135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:21:59.991219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:21:59.991300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:21:59.991404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:22:00.018249Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:22:00.018419Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=PortionsCleaner; 2025-03-31T16:22:00.018466Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-03-31T16:22:00.018869Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=47; 2025-03-31T16:22:00.018944Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=35; 2025-03-31T16:22:00.019006Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=30; 2025-03-31T16:22:00.019101Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=38; 2025-03-31T16:22:00.019248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=PortionsCleaner;id=NO_VALUE_OPTIONAL; 2025-03-31T16:22:00.019328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2025-03-31T16:22:00.019370Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-03-31T16:22:00.019512Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:22:00.019590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:22:00.019675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:22:00.019701Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-03-31T16:22:00.019776Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:22:00.019832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:22:00.019870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:22:00.019894Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-03-31T16:22:00.020042Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:22:00.020096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:22:00.020127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:22:00.020149Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-03-31T16:22:00.020236Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:22:00.020287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:22:00.020317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:22:00.020352Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:22:00.020436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:22:00.020462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:22:00.020483Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:22:00.020527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:22:00.020571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:22:00.020624Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:22:00.020958Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=26; 2025-03-31T16:22:00.021022Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=23; 2025-03-31T16:22:00.021074Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=19; 2025-03-31T16:22:00.021152Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=34; 2025-03-31T16:22:00.021295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:22:00.021352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:22:00.021392Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:22:00.021576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:22:00.021628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:22:00.021662Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:22:00.021815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpda ... p:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:22:03.078427Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:22:03.078796Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:404:2417];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-31T16:22:03.078945Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:404:2417];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-31T16:22:03.078994Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-31T16:22:03.079029Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-31T16:22:03.079122Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:404:2417];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:22:03.079226Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:404:2417];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:22:03.079327Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:404:2417];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=0; 2025-03-31T16:22:03.079431Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:404:2417];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:22:03.079511Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:404:2417];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:22:03.079566Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:404:2417];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:22:03.079689Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:404:2417];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:22:03.226463Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 111 scanId: 0 version: {11:111} readable: {11:max} at tablet 9437184 2025-03-31T16:22:03.226656Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 111 scanId: 0 at tablet 9437184 2025-03-31T16:22:03.226892Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:404:2417];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } Columns { Id: 3 } } } ; 2025-03-31T16:22:03.227012Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:404:2417];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } Columns { Id: 3 } } } ; 2025-03-31T16:22:03.228008Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:404:2417];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4},{"from":6}]},{"owner_id":2,"inputs":[{"from":7}]},{"owner_id":4,"inputs":[{"from":7}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"1","p":{"address":{"name":"key1","id":1}},"o":"1","t":"AssembleOriginalData"},"w":11,"id":2},"6":{"p":{"i":"3","p":{"address":{"name":"field","id":3}},"o":"3","t":"AssembleOriginalData"},"w":11,"id":6},"7":{"p":{"p":{"data":[{"name":"key1","id":1},{"name":"key2","id":2},{"name":"field","id":3}]},"o":"1,2,3","t":"FetchOriginalData"},"w":6,"id":7},"4":{"p":{"i":"2","p":{"address":{"name":"key2","id":2}},"o":"2","t":"AssembleOriginalData"},"w":11,"id":4},"0":{"p":{"i":"1,2,3","t":"Projection"},"w":33,"id":0}}}; 2025-03-31T16:22:03.228156Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:404:2417];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-03-31T16:22:03.228878Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:404:2417];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:465:2470];trace_detailed=; 2025-03-31T16:22:03.229666Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:84;ff_first=(column_ids=1,2,3;column_names=field,key1,key2;);; 2025-03-31T16:22:03.229912Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; 2025-03-31T16:22:03.230289Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:465:2470];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:22:03.230440Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:465:2470];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-31T16:22:03.230581Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:465:2470];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-31T16:22:03.230654Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:465:2470] finished for tablet 9437184 2025-03-31T16:22:03.231130Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:465:2470];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:463:2469];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","f_ProduceResults","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1743438123228788,"name":"_full_task","f":1743438123228788,"d_finished":0,"c":0,"l":1743438123230718,"d":1930},"events":[{"name":"bootstrap","f":1743438123229048,"d_finished":1038,"c":1,"l":1743438123230086,"d":1038},{"a":1743438123230259,"name":"ack","f":1743438123230259,"d_finished":0,"c":0,"l":1743438123230718,"d":459},{"a":1743438123230241,"name":"processing","f":1743438123230241,"d_finished":0,"c":0,"l":1743438123230718,"d":477},{"name":"ProduceResults","f":1743438123230070,"d_finished":299,"c":2,"l":1743438123230613,"d":299},{"a":1743438123230620,"name":"Finish","f":1743438123230620,"d_finished":0,"c":0,"l":1743438123230718,"d":98}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-31T16:22:03.231227Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:465:2470];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:463:2469];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:22:03.231649Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:465:2470];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:463:2469];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1743438123228788,"name":"_full_task","f":1743438123228788,"d_finished":0,"c":0,"l":1743438123231275,"d":2487},"events":[{"name":"bootstrap","f":1743438123229048,"d_finished":1038,"c":1,"l":1743438123230086,"d":1038},{"a":1743438123230259,"name":"ack","f":1743438123230259,"d_finished":0,"c":0,"l":1743438123231275,"d":1016},{"a":1743438123230241,"name":"processing","f":1743438123230241,"d_finished":0,"c":0,"l":1743438123231275,"d":1034},{"name":"ProduceResults","f":1743438123230070,"d_finished":299,"c":2,"l":1743438123230613,"d":299},{"a":1743438123230620,"name":"Finish","f":1743438123230620,"d_finished":0,"c":0,"l":1743438123231275,"d":655}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-31T16:22:03.231750Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:465:2470];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:22:03.228114Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-03-31T16:22:03.231813Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:465:2470];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:22:03.231934Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:465:2470];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=20048; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=20048; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=string;records=0;size=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=0;count=0; >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_BadSourceId_Test [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteOverload [GOOD] Test command err: 2025-03-31T16:21:34.438791Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:21:34.525079Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:21:34.549222Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:21:34.549518Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:21:34.557469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:21:34.557680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:21:34.557933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:21:34.558069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:21:34.558175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:21:34.558283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:21:34.558399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:21:34.558532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:21:34.558672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:21:34.558781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:21:34.558883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:21:34.559017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:21:34.587622Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:21:34.587800Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:21:34.587848Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:21:34.588047Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:34.588240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:21:34.588312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:21:34.588354Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:21:34.588446Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:21:34.588503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:21:34.588541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:21:34.588568Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:21:34.588781Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:34.588869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:21:34.588918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:21:34.588950Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:21:34.589062Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:21:34.589117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:21:34.589171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:21:34.589203Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:21:34.589270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:21:34.589309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:21:34.589345Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:21:34.589406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:21:34.589440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:21:34.589467Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:21:34.589869Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=46; 2025-03-31T16:21:34.589947Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=31; 2025-03-31T16:21:34.590047Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=35; 2025-03-31T16:21:34.590118Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=33; 2025-03-31T16:21:34.590299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:21:34.590362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:21:34.590397Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:21:34.590615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:21:34.590679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:21:34.590716Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:21:34.590868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:21:34.590916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:21:34.590944Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:21:34.591178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:21:34.591225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:21:34.591255Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:21:34.591383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:21:34.591428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:21:34.591476Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... BlobManager at tablet 9437184 Save Batch GenStep: 2:4 Blob count: 2 2025-03-31T16:21:59.368633Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=7;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-31T16:21:59.370860Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 5 at tablet 9437184 2025-03-31T16:21:59.419962Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 2 2025-03-31T16:21:59.450124Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 2 2025-03-31T16:21:59.450283Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=8;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-31T16:21:59.477214Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 6 at tablet 9437184 2025-03-31T16:21:59.535161Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:6 Blob count: 2 2025-03-31T16:21:59.555801Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:6 Blob count: 2 2025-03-31T16:21:59.555945Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=9;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-31T16:21:59.576352Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 7 at tablet 9437184 2025-03-31T16:21:59.623357Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:7 Blob count: 2 2025-03-31T16:21:59.651937Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:7 Blob count: 2 2025-03-31T16:21:59.652113Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=10;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-31T16:21:59.654377Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 8 at tablet 9437184 2025-03-31T16:21:59.698817Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:8 Blob count: 2 2025-03-31T16:21:59.727302Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:8 Blob count: 2 2025-03-31T16:21:59.727457Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=11;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-31T16:21:59.729663Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 9 at tablet 9437184 2025-03-31T16:21:59.780767Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:9 Blob count: 2 2025-03-31T16:21:59.889932Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:9 Blob count: 2 2025-03-31T16:21:59.890128Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=12;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-31T16:21:59.892729Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 10 at tablet 9437184 2025-03-31T16:21:59.943143Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:10 Blob count: 2 2025-03-31T16:22:00.110905Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:10 Blob count: 2 2025-03-31T16:22:00.111545Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=13;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-31T16:22:00.232429Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 11 at tablet 9437184 2025-03-31T16:22:00.291431Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:11 Blob count: 2 2025-03-31T16:22:00.324491Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:11 Blob count: 2 2025-03-31T16:22:00.324685Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=14;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-31T16:22:00.327434Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 12 at tablet 9437184 2025-03-31T16:22:00.366175Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:12 Blob count: 2 2025-03-31T16:22:00.385728Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:12 Blob count: 2 2025-03-31T16:22:00.385896Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=15;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-31T16:22:00.404594Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 13 at tablet 9437184 2025-03-31T16:22:00.454931Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:13 Blob count: 2 2025-03-31T16:22:00.485835Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:13 Blob count: 2 2025-03-31T16:22:00.486015Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=16;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-31T16:22:00.488036Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 14 at tablet 9437184 2025-03-31T16:22:00.537718Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:14 Blob count: 2 2025-03-31T16:22:00.571577Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:14 Blob count: 2 2025-03-31T16:22:00.571752Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=17;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-31T16:22:00.573923Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 15 at tablet 9437184 2025-03-31T16:22:00.632581Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:15 Blob count: 2 2025-03-31T16:22:00.653271Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:15 Blob count: 2 2025-03-31T16:22:00.653444Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=18;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-31T16:22:00.666035Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 16 at tablet 9437184 2025-03-31T16:22:00.706364Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:16 Blob count: 2 2025-03-31T16:22:00.729667Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:16 Blob count: 2 2025-03-31T16:22:00.729842Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=19;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-31T16:22:00.732001Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 17 at tablet 9437184 2025-03-31T16:22:00.787535Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:17 Blob count: 2 2025-03-31T16:22:00.830536Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:17 Blob count: 2 2025-03-31T16:22:00.830716Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=20;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-31T16:22:00.833088Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 18 at tablet 9437184 2025-03-31T16:22:00.884699Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:18 Blob count: 2 2025-03-31T16:22:00.904903Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:18 Blob count: 2 2025-03-31T16:22:00.905097Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=21;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-31T16:22:00.919243Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 19 at tablet 9437184 2025-03-31T16:22:00.970364Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:19 Blob count: 2 2025-03-31T16:22:00.990999Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:19 Blob count: 2 2025-03-31T16:22:00.991193Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=22;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-31T16:22:00.993321Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 20 at tablet 9437184 2025-03-31T16:22:01.044427Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:20 Blob count: 2 2025-03-31T16:22:01.228050Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:20 Blob count: 2 2025-03-31T16:22:01.228236Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=23;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-31T16:22:01.311812Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 21 at tablet 9437184 2025-03-31T16:22:01.364718Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:21 Blob count: 2 2025-03-31T16:22:01.392427Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:21 Blob count: 2 2025-03-31T16:22:01.392597Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=24;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:22:02.519666Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];fline=actor.cpp:22;event=flush_writing;size=6330728;count=1; CATCH TEvWrite, status OK 2025-03-31T16:22:02.581397Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 22 at tablet 9437184 2025-03-31T16:22:02.635833Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:22 Blob count: 2 2025-03-31T16:22:02.660237Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:22 Blob count: 2 2025-03-31T16:22:02.660425Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=25;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; >> TColumnShardTestReadWrite::CompactionSplitGranule_PKTimestamp >> TColumnShardTestReadWrite::WriteExoticTypes >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotBoundary_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_OtherPartition_Test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_BadSourceId_Test [GOOD] Test command err: 2025-03-31T16:21:05.455998Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009474721509501:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:05.456145Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:21:05.484423Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488009475784147877:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:05.485266Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:21:05.623156Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:21:05.636623Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00156d/r3tmp/tmpRJajQJ/pdisk_1.dat 2025-03-31T16:21:05.815789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:21:05.815887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:21:05.818702Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:21:05.819522Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:21:05.854478Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:21:05.854558Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 13758, node 1 2025-03-31T16:21:05.862813Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:21:05.862845Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:21:05.865630Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:21:05.882270Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:21:05.900975Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/00156d/r3tmp/yandex2wJJNU.tmp 2025-03-31T16:21:05.901015Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/00156d/r3tmp/yandex2wJJNU.tmp 2025-03-31T16:21:05.901204Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/00156d/r3tmp/yandex2wJJNU.tmp 2025-03-31T16:21:05.901402Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:21:05.948922Z INFO: TTestServer started on Port 65048 GrpcPort 13758 TClient is connected to server localhost:65048 PQClient connected to localhost:13758 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:21:06.174574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:21:06.216195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-31T16:21:08.208738Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009488669050086:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:08.208811Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009488669050066:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:08.208961Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:08.214812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2025-03-31T16:21:08.233133Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488009488669050093:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2025-03-31T16:21:08.337181Z node 2 :TX_PROXY ERROR: Actor# [2:7488009488669050121:2129] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:21:08.666293Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488009488669050136:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:21:08.666388Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488009487606412626:2347], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:21:08.666595Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2RjN2MwZmYtZTkwYTA0MjAtYWFlZTkxODMtZWFkYTRjMWE=, ActorId: [1:7488009487606412603:2340], ActorState: ExecuteState, TraceId: 01jqpg3epr6m696mbt168fb7tr, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:21:08.666604Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjQ2ZDU5YjktZDllZjQ5NTAtNzkyMDY5OWUtNWE4NTI2MzQ=, ActorId: [2:7488009488669050062:2307], ActorState: ExecuteState, TraceId: 01jqpg3efe55p2j8n04q82vfb9, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:21:08.668099Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:21:08.668149Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:21:08.668480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:21:08.746417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:21:08.818290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-31T16:21:08.993610Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqpg3f55dn2e7h5zdr91yntr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGZmYTkzNGMtNDQ4YTE0MDAtYjA3NmUyZS0yZDU1ZGI5OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7488009491901380301:3083] 2025-03-31T16:21:10.455201Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488009474721509501:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:10.455267Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:21:10.484380Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488009475784147877:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:10.484442Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-03-31T16:21:15.194608Z node 1 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-03-31T16:21:15.194641Z node 1 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint ... PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:21:51.336761Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7488009671359895549:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:51.336843Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:21:51.366265Z node 7 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:21:51.373548Z node 8 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00156d/r3tmp/tmp5oSRV2/pdisk_1.dat 2025-03-31T16:21:51.510428Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:21:51.560965Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:21:51.561079Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:21:51.563991Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 8 Cookie 8 2025-03-31T16:21:51.564934Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65049, node 7 2025-03-31T16:21:51.595000Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:21:51.595132Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:21:51.603337Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:21:51.630328Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/00156d/r3tmp/yandexv8Z1bE.tmp 2025-03-31T16:21:51.630362Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/00156d/r3tmp/yandexv8Z1bE.tmp 2025-03-31T16:21:51.630541Z node 7 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/00156d/r3tmp/yandexv8Z1bE.tmp 2025-03-31T16:21:51.630720Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:21:51.709057Z INFO: TTestServer started on Port 11198 GrpcPort 65049 TClient is connected to server localhost:11198 PQClient connected to localhost:65049 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:21:51.998492Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:21:52.037805Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-31T16:21:55.406790Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488009689585653954:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:55.406836Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488009689585653940:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:55.406927Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:55.410735Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-31T16:21:55.413341Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488009689585654001:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:55.413466Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:55.433287Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7488009689585653969:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-31T16:21:55.451855Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:21:55.505447Z node 7 :TX_PROXY ERROR: Actor# [7:7488009689585654105:2840] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:21:55.539869Z node 7 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [7:7488009689585654123:2359], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:21:55.540165Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=NGViNDBhODYtODk0YjcyNi1hZGI0MGZiMy01NTk5NzkxOQ==, ActorId: [7:7488009689585653937:2342], ActorState: ExecuteState, TraceId: 01jqpg4wjc80h9zydyejx7pabr, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:21:55.540794Z node 7 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:21:55.626804Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:21:55.756533Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-31T16:21:56.118535Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jqpg4x0d07k14b3jfrrtbjve, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=M2Q0ZDVlZGUtOGEzYWQ5YzUtMjZhMTRiYWMtNzM2YWUzNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [7:7488009693880621734:3090] 2025-03-31T16:21:56.289480Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7488009672405783599:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:56.289563Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:21:56.337097Z node 8 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7488009671359895549:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:56.337185Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-03-31T16:22:01.284974Z node 7 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [7:7488009715355458578:3296] (SourceId=base64:a***, PreferedPartition=(NULL)) Start idle 2025-03-31T16:22:01.285059Z node 7 :PQ_PARTITION_CHOOSER INFO: TPartitionChooser [7:7488009715355458578:3296] (SourceId=base64:a***, PreferedPartition=(NULL)) ReplyError: Bad SourceId Received TEvChooseError: Bad SourceId 2025-03-31T16:22:02.710672Z node 7 :KQP_EXECUTER ERROR: ActorId: [7:7488009719650425972:2469] TxId: 281474976710678. Ctx: { TraceId: 01jqpg53935wq23b5qb4d2jh1g, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MmQzMTY5NTAtYWI1M2M4YjMtOTRmMjc4NTEtODc1ZWE3YTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 8 2025-03-31T16:22:02.711128Z node 7 :KQP_COMPUTE ERROR: SelfId: [7:7488009719650425984:2480], TxId: 281474976710678, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=MmQzMTY5NTAtYWI1M2M4YjMtOTRmMjc4NTEtODc1ZWE3YTM=. TraceId : 01jqpg53935wq23b5qb4d2jh1g. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [7:7488009719650425972:2469], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-03-31T16:22:02.711154Z node 7 :KQP_COMPUTE ERROR: SelfId: [7:7488009719650425983:2479], TxId: 281474976710678, task: 2. Ctx: { TraceId : 01jqpg53935wq23b5qb4d2jh1g. SessionId : ydb://session/3?node_id=7&id=MmQzMTY5NTAtYWI1M2M4YjMtOTRmMjc4NTEtODc1ZWE3YTM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [7:7488009719650425972:2469], status: UNAVAILABLE, reason: {
: Error: Terminate execution } >> TColumnShardTestReadWrite::WriteReadExoticTypes >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32_Reboot >> KqpPg::SelectIndex+useSink [GOOD] >> KqpPg::SelectIndex-useSink >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionNotExists_Test [GOOD] >> TPartitionGraphTest::BuildGraph [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64 >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_BadSourceId_Test [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionGraphTest::BuildGraph [GOOD] Test command err: 2025-03-31T16:21:02.171660Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009460936877732:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:02.171743Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:21:02.204368Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488009461340303809:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:02.204472Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:21:02.360356Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0015ab/r3tmp/tmp44LIlE/pdisk_1.dat 2025-03-31T16:21:02.361242Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:21:02.564177Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:21:02.576897Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:21:02.576994Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:21:02.578285Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:21:02.578344Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:21:02.583426Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:21:02.585332Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:21:02.585538Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20692, node 1 2025-03-31T16:21:02.760608Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/0015ab/r3tmp/yandexPsy4tr.tmp 2025-03-31T16:21:02.760673Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/0015ab/r3tmp/yandexPsy4tr.tmp 2025-03-31T16:21:02.762030Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/0015ab/r3tmp/yandexPsy4tr.tmp 2025-03-31T16:21:02.762186Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:21:02.982940Z INFO: TTestServer started on Port 24121 GrpcPort 20692 TClient is connected to server localhost:24121 PQClient connected to localhost:20692 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:21:03.286487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:21:03.335046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-31T16:21:05.175699Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009473821780784:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:05.175994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009473821780796:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:05.176645Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:05.185382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-03-31T16:21:05.186669Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009473821780831:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:05.186735Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:05.244725Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009473821780800:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-03-31T16:21:05.346309Z node 1 :TX_PROXY ERROR: Actor# [1:7488009473821780880:2812] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:21:05.691192Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488009474225206036:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:21:05.691499Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NzcxMWI3NWMtZGM5OGI0ZDktM2Y2YmEyNTctYjAwMTYzYTA=, ActorId: [2:7488009474225205987:2307], ActorState: ExecuteState, TraceId: 01jqpg3bsx8h10a12993v1rrfj, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:21:05.691191Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488009473821780898:2353], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:21:05.693018Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjQ3YTE2NTctYTE5Yzg5MDEtYWMyMzYzLTQ0OTRlNDA4, ActorId: [1:7488009473821780766:2340], ActorState: ExecuteState, TraceId: 01jqpg3bfravf9n52028zjbwww, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:21:05.694266Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:21:05.694304Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:21:05.699705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:21:05.784209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:21:05.865163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-31T16:21:06.150097Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jqpg3c9s7rcscht2dh64wyeh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTc3Yjc1ZWYtOTM3NTEzMWQtMzI3N2VmZTctMmFlZDA1MzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7488009478116748570:3090] 2025-03-31T16:21:07.171436Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488009460936877732:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:07.171492Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:21:07.204738Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488009461340303809:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:07.204828Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-03-31T16:21:12.101908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... stsActor;event=timeout;self_id=[8:7488009664270773106:2123];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:54.312074Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-03-31T16:21:59.248757Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:1, at schemeshard: 72057594046644480 2025-03-31T16:22:00.097778Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-03-31T16:22:00.798949Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:0, at schemeshard: 72057594046644480 2025-03-31T16:22:01.638218Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2025-03-31T16:22:02.378104Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715694:0, at schemeshard: 72057594046644480 2025-03-31T16:22:03.124186Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715697:0, at schemeshard: 72057594046644480 Run query: --!syntax_v1 UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES (11131928866524144434, "Root", "00415F536F757263655F35", 1743438123868, 1743438123868, 0, 13); 2025-03-31T16:22:03.985626Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715703. Ctx: { TraceId: 01jqpg54vmdwmph883j9v9xw5e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NmI0NDIxOTEtZjI1Mjk0NmQtYWUzNzZkMWMtNTQ2ZDJiNTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:22:04.002404Z node 7 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-03-31T16:22:04.002432Z node 7 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-03-31T16:22:04.002444Z node 7 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-03-31T16:22:04.002469Z node 7 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [7:7488009727184971458:3977] (SourceId=A_Source_5, PreferedPartition=(NULL)) GetOwnershipFast Partition=1 TabletId=1001 2025-03-31T16:22:04.002656Z node 7 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 269877760, Sender [7:7488009727184971459:3977], Recipient [7:7488009705710133868:3326]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 1001 Status: OK ServerId: [7:7488009727184971458:3977] Leader: 1 Dead: 0 Generation: 1 VersionInfo: } 2025-03-31T16:22:04.002783Z node 7 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 271188557, Sender [7:7488009727184971458:3977], Recipient [7:7488009705710133868:3326]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 1 SourceId: "A_Source_5" 2025-03-31T16:22:04.002864Z node 7 :PQ_PARTITION_CHOOSER TRACE: StateOwnershipFast, received event# 271188558, Sender [7:7488009705710133868:3326], Recipient [7:7488009727184971458:3977]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2025-03-31T16:22:04.002905Z node 7 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [7:7488009727184971458:3977] (SourceId=A_Source_5, PreferedPartition=(NULL)) InitTable: SourceId=A_Source_5 TopicsAreFirstClassCitizen=1 UseSrcIdMetaMappingInFirstClass=1 2025-03-31T16:22:04.002994Z node 7 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 65543, Sender [7:7488009727184971458:3977], Recipient [7:7488009705710133868:3326]: NActors::TEvents::TEvPoison 2025-03-31T16:22:04.003054Z node 7 :PQ_PARTITION_CHOOSER TRACE: StateInitTable, received event# 277020685, Sender [7:7488009662760458849:2070], Recipient [7:7488009727184971458:3977]: NKikimr::NMetadata::NProvider::TEvManagerPrepared 2025-03-31T16:22:04.003096Z node 7 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [7:7488009727184971458:3977] (SourceId=A_Source_5, PreferedPartition=(NULL)) StartKqpSession 2025-03-31T16:22:04.006195Z node 7 :PQ_PARTITION_CHOOSER TRACE: StateCreateKqpSession, received event# 271646728, Sender [7:7488009662760459074:2280], Recipient [7:7488009727184971458:3977]: NKikimrKqp.TEvCreateSessionResponse Error: "" Response { SessionId: "ydb://session/3?node_id=7&id=NDIwZWFlMWYtYzgyMTlhMmQtZGQ3ZDk0MDAtMjk0NWM3Njk=" NodeId: 7 } YdbStatus: SUCCESS ResourceExhausted: false 2025-03-31T16:22:04.006237Z node 7 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [7:7488009727184971458:3977] (SourceId=A_Source_5, PreferedPartition=(NULL)) Select from the table 2025-03-31T16:22:04.184761Z node 7 :PQ_PARTITION_CHOOSER TRACE: StateSelect, received event# 271646721, Sender [7:7488009662760459074:2280], Recipient [7:7488009727184971458:3977]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=7&id=NDIwZWFlMWYtYzgyMTlhMmQtZGQ3ZDk0MDAtMjk0NWM3Njk=" PreparedQuery: "ec564efe-52e22edb-61073034-5df3d985" QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { id: "01jqpg554hfce3cyqtdb5w8zrj" } YdbResults { columns { name: "Partition" type { optional_type { item { type_id: UINT32 } } } } columns { name: "CreateTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "AccessTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "SeqNo" type { optional_type { item { type_id: UINT64 } } } } rows { items { uint32_value: 0 } items { uint64_value: 1743438123868 } items { uint64_value: 1743438123868 } items { uint64_value: 13 } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 112 2025-03-31T16:22:04.184980Z node 7 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [7:7488009727184971458:3977] (SourceId=A_Source_5, PreferedPartition=(NULL)) Selected from table PartitionId=0 SeqNo=13 2025-03-31T16:22:04.185010Z node 7 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [7:7488009727184971458:3977] (SourceId=A_Source_5, PreferedPartition=(NULL)) OnPartitionChosen 2025-03-31T16:22:04.185196Z node 7 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 269877760, Sender [7:7488009727184971492:3977], Recipient [7:7488009705710133868:3326]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 1001 Status: OK ServerId: [7:7488009727184971458:3977] Leader: 1 Dead: 0 Generation: 1 VersionInfo: } 2025-03-31T16:22:04.185275Z node 7 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 271188557, Sender [7:7488009727184971458:3977], Recipient [7:7488009705710133868:3326]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 1 2025-03-31T16:22:04.185355Z node 7 :PQ_PARTITION_CHOOSER TRACE: StateCheckPartition, received event# 271188558, Sender [7:7488009705710133868:3326], Recipient [7:7488009727184971458:3977]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2025-03-31T16:22:04.185390Z node 7 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [7:7488009727184971458:3977] (SourceId=A_Source_5, PreferedPartition=(NULL)) Update the table 2025-03-31T16:22:04.185661Z node 7 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 65543, Sender [7:7488009727184971458:3977], Recipient [7:7488009705710133868:3326]: NActors::TEvents::TEvPoison 2025-03-31T16:22:04.298014Z node 7 :PQ_PARTITION_CHOOSER TRACE: StateUpdate, received event# 271646721, Sender [7:7488009662760459074:2280], Recipient [7:7488009727184971458:3977]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=7&id=NDIwZWFlMWYtYzgyMTlhMmQtZGQ3ZDk0MDAtMjk0NWM3Njk=" PreparedQuery: "da461df5-d9611acb-e230c81b-73f530ee" QueryParameters { Name: "$AccessTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$CreateTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Partition" Type { Kind: Data Data { Scheme: 2 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SeqNo" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 65 Received TEvChooseResult: 1 2025-03-31T16:22:04.298082Z node 7 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [7:7488009727184971458:3977] (SourceId=A_Source_5, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-03-31T16:22:04.298126Z node 7 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [7:7488009727184971458:3977] (SourceId=A_Source_5, PreferedPartition=(NULL)) ReplyResult: Partition=1, SeqNo=13 2025-03-31T16:22:04.298158Z node 7 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [7:7488009727184971458:3977] (SourceId=A_Source_5, PreferedPartition=(NULL)) Start idle Run query: --!syntax_v1 SELECT Partition, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash = 11131928866524144434 AND Topic = "Root" AND ProducerId = "00415F536F757263655F35" 2025-03-31T16:22:04.458128Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:22:04.458159Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:22:04.500525Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715706. Ctx: { TraceId: 01jqpg5591eaj5dxyntmdk0cer, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZjdlZGJjMDYtMmRkMGQxYmEtYWZhM2EwMjItY2QxOTU1NTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:22:04.996279Z node 7 :KQP_EXECUTER ERROR: ActorId: [7:7488009727184971609:2704] TxId: 281474976715707. Ctx: { TraceId: 01jqpg55sg611h8aq2n172zzxz, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MzkxMWU1LTdjNDE1NDA1LTUzYWM0YjQ5LTliMTJkN2Jk, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 8 2025-03-31T16:22:04.996395Z node 7 :KQP_COMPUTE ERROR: SelfId: [7:7488009727184971613:2704], TxId: 281474976715707, task: 2. Ctx: { TraceId : 01jqpg55sg611h8aq2n172zzxz. SessionId : ydb://session/3?node_id=7&id=MzkxMWU1LTdjNDE1NDA1LTUzYWM0YjQ5LTliMTJkN2Jk. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [7:7488009727184971609:2704], status: UNAVAILABLE, reason: {
: Error: Terminate execution } >> KqpPg::NoSelectFullScan [GOOD] >> KqpPg::LongDomainName ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_BadSourceId_Test [GOOD] Test command err: 2025-03-31T16:21:02.153446Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009462554971472:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:02.153542Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:21:02.204510Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488009459366312324:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:02.204614Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001594/r3tmp/tmp5nh0bH/pdisk_1.dat 2025-03-31T16:21:02.383867Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:21:02.387722Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:21:02.556494Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:21:02.566788Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:21:02.566868Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:21:02.583515Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:21:02.584904Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:21:02.602409Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:21:02.602473Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:21:02.605068Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3828, node 1 2025-03-31T16:21:02.760624Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/001594/r3tmp/yandexpKxWti.tmp 2025-03-31T16:21:02.760652Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/001594/r3tmp/yandexpKxWti.tmp 2025-03-31T16:21:02.761956Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/001594/r3tmp/yandexpKxWti.tmp 2025-03-31T16:21:02.762153Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:21:02.982993Z INFO: TTestServer started on Port 29942 GrpcPort 3828 TClient is connected to server localhost:29942 PQClient connected to localhost:3828 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:21:03.289687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:21:03.334799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-31T16:21:05.297562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009475439874538:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:05.297689Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:05.298001Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009475439874550:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:05.301547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-31T16:21:05.323138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009475439874552:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-31T16:21:05.584530Z node 1 :TX_PROXY ERROR: Actor# [1:7488009475439874646:2820] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:21:05.648100Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488009475439874663:2353], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:21:05.648099Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488009472251214552:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:21:05.648455Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjdmOGE5YzctZDBiZDhhMTItNGY5YWRmOGQtODcyYmVkNTQ=, ActorId: [2:7488009472251214503:2307], ActorState: ExecuteState, TraceId: 01jqpg3bnhab1gdbnj9zz67qzz, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:21:05.657744Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:21:05.659495Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzY5NmZhNTQtYWExMjYzZDItMjFjMzk1YmYtZTkxOTE2YjE=, ActorId: [1:7488009475439874535:2341], ActorState: ExecuteState, TraceId: 01jqpg3bkw6acff4wbf0xgbgjy, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:21:05.659887Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:21:05.699928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:21:05.783909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:21:05.870695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-31T16:21:06.150122Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jqpg3c9r064wwmqw2cxtwer1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjk5MDM4MzEtZjFmNjAzMTYtZjY4NWQyMzktYWJhZTkwZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7488009479734842332:3100] 2025-03-31T16:21:07.153358Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488009462554971472:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:07.153427Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:21:07.204586Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488009459366312324:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:07.204681Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-03-31T16:21:12.159404Z node 1 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-03-31T16:21:12.159431Z node 1 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, Creat ... R ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:21:55.007486Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488009688877953009:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:55.007581Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:21:55.049542Z node 9 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001594/r3tmp/tmp9m9LCl/pdisk_1.dat 2025-03-31T16:21:55.068130Z node 10 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:21:55.177932Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:21:55.227123Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:21:55.227244Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:21:55.228817Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:21:55.228910Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:21:55.231339Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:21:55.233256Z node 9 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 10 Cookie 10 2025-03-31T16:21:55.234033Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61864, node 9 2025-03-31T16:21:55.283763Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/001594/r3tmp/yandexLRMXdf.tmp 2025-03-31T16:21:55.283803Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/001594/r3tmp/yandexLRMXdf.tmp 2025-03-31T16:21:55.283961Z node 9 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/001594/r3tmp/yandexLRMXdf.tmp 2025-03-31T16:21:55.284142Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:21:55.350008Z INFO: TTestServer started on Port 28917 GrpcPort 61864 TClient is connected to server localhost:28917 PQClient connected to localhost:61864 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:21:55.620694Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:21:55.658187Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-31T16:21:59.089807Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7488009705134072427:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:59.090269Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7488009705134072416:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:59.090351Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:59.093622Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-31T16:21:59.104129Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7488009705134072464:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:59.104266Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:59.132629Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [9:7488009705134072430:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-31T16:21:59.143877Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:21:59.200400Z node 9 :TX_PROXY ERROR: Actor# [9:7488009705134072603:2818] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:21:59.243154Z node 9 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [9:7488009705134072628:2358], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:21:59.243411Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:21:59.243538Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=MzNmNjkxMDUtNGYwYmQ2N2YtZDY1N2FkODEtNWJmOTRkZTM=, ActorId: [9:7488009705134072410:2338], ActorState: ExecuteState, TraceId: 01jqpg505e4fm8x83reqwmvw63, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:21:59.244090Z node 9 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:21:59.356954Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7488009706057822668:2321], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:21:59.357200Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=N2E4NTgzYTEtOTQ5NzQyY2ItM2MxNjliNTYtYjE3OTI4NDM=, ActorId: [10:7488009706057822620:2314], ActorState: ExecuteState, TraceId: 01jqpg50cn2c0ptd321vegt00j, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:21:59.357715Z node 10 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:21:59.461678Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-31T16:21:59.738394Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jqpg50m50nhggxvfckp2rg6d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=Nzc3MThmNzctNmE4NDQ3ODQtODlmMzJiYjEtMjdjNDVhNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [9:7488009705134072977:3095] 2025-03-31T16:21:59.985624Z node 9 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[9:7488009683659234833:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:59.985719Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:22:00.007162Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7488009688877953009:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:22:00.007268Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok Received TEvChooseError: Bad SourceId 2025-03-31T16:22:05.058694Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7488009730903877209:3364] (SourceId=base64:a***, PreferedPartition=(NULL)) Start idle 2025-03-31T16:22:05.058728Z node 9 :PQ_PARTITION_CHOOSER INFO: TPartitionChooser [9:7488009730903877209:3364] (SourceId=base64:a***, PreferedPartition=(NULL)) ReplyError: Bad SourceId >> TColumnShardTestReadWrite::WriteRead [GOOD] >> Normalizers::ColumnChunkNormalizer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteRead [GOOD] Test command err: 2025-03-31T16:22:04.171705Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:22:04.268519Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:22:04.295970Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:22:04.296287Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:22:04.305303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:22:04.305547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:22:04.305838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:22:04.305971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:22:04.306091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:22:04.306232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:22:04.306370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:22:04.306507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:22:04.306643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:22:04.306769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:22:04.306879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:22:04.306989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:22:04.338749Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:22:04.338913Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:22:04.339024Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:22:04.339257Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:22:04.339413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:22:04.339469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:22:04.339516Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:22:04.339585Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:22:04.339647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:22:04.339682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:22:04.339705Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:22:04.340002Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:22:04.340083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:22:04.340121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:22:04.340145Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:22:04.340237Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:22:04.340295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:22:04.340330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:22:04.340351Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:22:04.340424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:22:04.340450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:22:04.340478Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:22:04.340526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:22:04.340563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:22:04.340586Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:22:04.340919Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=37; 2025-03-31T16:22:04.341009Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=47; 2025-03-31T16:22:04.341094Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=36; 2025-03-31T16:22:04.341209Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=39; 2025-03-31T16:22:04.341375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:22:04.341435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:22:04.341462Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:22:04.341650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:22:04.341685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:22:04.341714Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:22:04.341842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:22:04.341871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:22:04.341892Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:22:04.342033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:22:04.342062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:22:04.342080Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:22:04.342167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:22:04.342198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:22:04.342240Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... id: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:22:08.840908Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-31T16:22:08.841065Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:232;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-03-31T16:22:08.841171Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:252;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-03-31T16:22:08.841353Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:432:2447];bytes=1984;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-03-31T16:22:08.841537Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:22:08.841698Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:22:08.841863Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:22:08.842125Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:22:08.842343Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:22:08.842509Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:22:08.842565Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:433:2448] finished for tablet 9437184 2025-03-31T16:22:08.843134Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:432:2447];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.004},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_task_result"],"t":0.016},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.018}],"full":{"a":1743438128823929,"name":"_full_task","f":1743438128823929,"d_finished":0,"c":0,"l":1743438128842630,"d":18701},"events":[{"name":"bootstrap","f":1743438128824182,"d_finished":3761,"c":1,"l":1743438128827943,"d":3761},{"a":1743438128842096,"name":"ack","f":1743438128840546,"d_finished":1353,"c":1,"l":1743438128841899,"d":1887},{"a":1743438128842082,"name":"processing","f":1743438128829621,"d_finished":7252,"c":10,"l":1743438128841902,"d":7800},{"name":"ProduceResults","f":1743438128826270,"d_finished":3960,"c":13,"l":1743438128842545,"d":3960},{"a":1743438128842549,"name":"Finish","f":1743438128842549,"d_finished":0,"c":0,"l":1743438128842630,"d":81},{"name":"task_result","f":1743438128829645,"d_finished":5707,"c":9,"l":1743438128840308,"d":5707}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:22:08.843225Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:432:2447];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:22:08.843760Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:432:2447];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.004},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_task_result"],"t":0.016},{"events":["l_ProduceResults","f_Finish"],"t":0.018},{"events":["l_ack","l_processing","l_Finish"],"t":0.019}],"full":{"a":1743438128823929,"name":"_full_task","f":1743438128823929,"d_finished":0,"c":0,"l":1743438128843273,"d":19344},"events":[{"name":"bootstrap","f":1743438128824182,"d_finished":3761,"c":1,"l":1743438128827943,"d":3761},{"a":1743438128842096,"name":"ack","f":1743438128840546,"d_finished":1353,"c":1,"l":1743438128841899,"d":2530},{"a":1743438128842082,"name":"processing","f":1743438128829621,"d_finished":7252,"c":10,"l":1743438128841902,"d":8443},{"name":"ProduceResults","f":1743438128826270,"d_finished":3960,"c":13,"l":1743438128842545,"d":3960},{"a":1743438128842549,"name":"Finish","f":1743438128842549,"d_finished":0,"c":0,"l":1743438128843273,"d":724},{"name":"task_result","f":1743438128829645,"d_finished":5707,"c":9,"l":1743438128840308,"d":5707}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:22:08.843868Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:22:08.823182Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-03-31T16:22:08.843921Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:22:08.844329Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> TColumnShardTestReadWrite::WriteExoticTypes [GOOD] >> TColumnShardTestReadWrite::WriteStandaloneOverload >> TColumnShardTestReadWrite::WriteReadExoticTypes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteExoticTypes [GOOD] Test command err: 2025-03-31T16:22:04.679486Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:22:04.777914Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:22:04.802758Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:22:04.803066Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:22:04.811672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:22:04.811902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:22:04.812165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:22:04.812327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:22:04.812459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:22:04.812580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:22:04.812702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:22:04.812838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:22:04.812986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:22:04.813106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:22:04.813225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:22:04.813329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:22:04.843470Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:22:04.843640Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:22:04.843691Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:22:04.843879Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:22:04.844062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:22:04.844135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:22:04.844177Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:22:04.844267Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:22:04.844328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:22:04.844369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:22:04.844398Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:22:04.844665Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:22:04.844767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:22:04.844815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:22:04.844846Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:22:04.844978Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:22:04.845040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:22:04.845084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:22:04.845114Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:22:04.845203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:22:04.845249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:22:04.845292Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:22:04.845351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:22:04.845388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:22:04.845416Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:22:04.845845Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=55; 2025-03-31T16:22:04.845932Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=32; 2025-03-31T16:22:04.846036Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=55; 2025-03-31T16:22:04.846128Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=39; 2025-03-31T16:22:04.846309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:22:04.846372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:22:04.846411Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:22:04.846622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:22:04.846697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:22:04.846745Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:22:04.846901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:22:04.846964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:22:04.846992Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:22:04.847220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:22:04.847269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:22:04.847306Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:22:04.847427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:22:04.847464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:22:04.847519Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... :[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000032;32;32;32;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000033;33;33;33;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000034;34;34;34;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000035;35;35;35;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000036;36;36;36;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000037;37;37;37;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000038;38;38;38;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000039;39;39;39;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000040;40;40;40;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000041;41;41;41;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000042;42;42;42;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000043;43;43;43;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000044;44;44;44;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000045;45;45;45;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000046;46;46;46;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000047;47;47;47;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000048;48;48;48;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000049;49;49;49;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000050;50;50;50;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000051;51;51;51;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000052;52;52;52;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000053;53;53;53;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000054;54;54;54;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000055;55;55;55;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000056;56;56;56;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000057;57;57;57;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000058;58;58;58;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000059;59;59;59;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000060;60;60;60;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000061;61;61;61;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000062;62;62;62;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000063;63;63;63;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000064;64;64;64;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000065;65;65;65;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000066;66;66;66;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000067;67;67;67;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000068;68;68;68;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000069;69;69;69;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000070;70;70;70;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000071;71;71;71;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000072;72;72;72;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000073;73;73;73;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000074;74;74;74;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000075;75;75;75;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000076;76;76;76;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000077;77;77;77;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000078;78;78;78;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000079;79;79;79;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000080;80;80;80;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000081;81;81;81;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000082;82;82;82;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000083;83;83;83;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000084;84;84;84;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000085;85;85;85;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000086;86;86;86;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000087;87;87;87;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000088;88;88;88;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000089;89;89;89;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000090;90;90;90;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000091;91;91;91;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000092;92;92;92;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000093;93;93;93;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000094;94;94;94;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000095;95;95;95;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000096;96;96;96;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000097;97;97;97;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000098;98;98;98;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000099;99;99;99;"}}]}; 2025-03-31T16:22:09.974115Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=30;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; >> EvWrite::WriteInTransaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadExoticTypes [GOOD] Test command err: 2025-03-31T16:22:05.912244Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:22:06.006294Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:22:06.034296Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:22:06.034602Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:22:06.043234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:22:06.043455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:22:06.043717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:22:06.043875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:22:06.044007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:22:06.044157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:22:06.044296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:22:06.044454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:22:06.044580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:22:06.044689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:22:06.044784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:22:06.044913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:22:06.076209Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:22:06.076358Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:22:06.076403Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:22:06.076638Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:22:06.076793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:22:06.076878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:22:06.076923Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:22:06.077027Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:22:06.077100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:22:06.077158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:22:06.077192Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:22:06.077534Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:22:06.077623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:22:06.077669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:22:06.077701Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:22:06.077822Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:22:06.077909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:22:06.077961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:22:06.077992Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:22:06.078080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:22:06.078122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:22:06.078164Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:22:06.078224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:22:06.078259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:22:06.078286Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:22:06.078669Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=44; 2025-03-31T16:22:06.078752Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=33; 2025-03-31T16:22:06.078857Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=51; 2025-03-31T16:22:06.078961Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=39; 2025-03-31T16:22:06.079136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:22:06.079190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:22:06.079227Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:22:06.079473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:22:06.079529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:22:06.079558Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:22:06.079716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:22:06.079760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:22:06.079789Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:22:06.079992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:22:06.080039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:22:06.080073Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:22:06.080206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:22:06.080247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:22:06.080295Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... id: binary;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:22:10.760134Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-31T16:22:10.760305Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:232;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-03-31T16:22:10.760412Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:252;stage=data_format;batch_size=2759;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-03-31T16:22:10.760595Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:432:2447];bytes=2759;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: binary json_payload: binary ingested_at: timestamp[us] saved_at: timestamp[us] request_id: binary; 2025-03-31T16:22:10.760798Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:22:10.760949Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:22:10.761078Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:22:10.761331Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:22:10.761480Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:22:10.761626Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:22:10.761670Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:433:2448] finished for tablet 9437184 2025-03-31T16:22:10.762177Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:432:2447];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.011},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.013}],"full":{"a":1743438130747770,"name":"_full_task","f":1743438130747770,"d_finished":0,"c":0,"l":1743438130761726,"d":13956},"events":[{"name":"bootstrap","f":1743438130747964,"d_finished":2668,"c":1,"l":1743438130750632,"d":2668},{"a":1743438130761303,"name":"ack","f":1743438130759739,"d_finished":1391,"c":1,"l":1743438130761130,"d":1814},{"a":1743438130761289,"name":"processing","f":1743438130751663,"d_finished":5871,"c":10,"l":1743438130761132,"d":6308},{"name":"ProduceResults","f":1743438130749479,"d_finished":3467,"c":13,"l":1743438130761656,"d":3467},{"a":1743438130761660,"name":"Finish","f":1743438130761660,"d_finished":0,"c":0,"l":1743438130761726,"d":66},{"name":"task_result","f":1743438130751679,"d_finished":4338,"c":9,"l":1743438130759504,"d":4338}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:22:10.762252Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:432:2447];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:22:10.762723Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:432:2447];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.011},{"events":["l_ProduceResults","f_Finish"],"t":0.013},{"events":["l_ack","l_processing","l_Finish"],"t":0.014}],"full":{"a":1743438130747770,"name":"_full_task","f":1743438130747770,"d_finished":0,"c":0,"l":1743438130762303,"d":14533},"events":[{"name":"bootstrap","f":1743438130747964,"d_finished":2668,"c":1,"l":1743438130750632,"d":2668},{"a":1743438130761303,"name":"ack","f":1743438130759739,"d_finished":1391,"c":1,"l":1743438130761130,"d":2391},{"a":1743438130761289,"name":"processing","f":1743438130751663,"d_finished":5871,"c":10,"l":1743438130761132,"d":6885},{"name":"ProduceResults","f":1743438130749479,"d_finished":3467,"c":13,"l":1743438130761656,"d":3467},{"a":1743438130761660,"name":"Finish","f":1743438130761660,"d_finished":0,"c":0,"l":1743438130762303,"d":643},{"name":"task_result","f":1743438130751679,"d_finished":4338,"c":9,"l":1743438130759504,"d":4338}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:22:10.762882Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:22:10.747196Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=13268;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=13268;selected_rows=0; 2025-03-31T16:22:10.762935Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:22:10.763297Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> TColumnShardTestReadWrite::WriteReadZSTD [GOOD] >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes >> KqpAnalyze::AnalyzeTable+ColumnStore [GOOD] >> KqpAnalyze::AnalyzeTable-ColumnStore >> KqpLimits::CancelAfterRwTx+useSink [GOOD] >> KqpLimits::CancelAfterRwTx-useSink >> KqpPg::SelectIndex-useSink [GOOD] >> KqpPg::TableDeleteAllData+useSink >> EvWrite::WriteInTransaction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadZSTD [GOOD] Test command err: 2025-03-31T16:22:04.061813Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:22:04.151257Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:22:04.175533Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:22:04.175815Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:22:04.183907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:22:04.184117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:22:04.184400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:22:04.184548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:22:04.184650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:22:04.184789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:22:04.184903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:22:04.185039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:22:04.185162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:22:04.185271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:22:04.185405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:22:04.185521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:22:04.220840Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:22:04.221011Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:22:04.221060Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:22:04.221246Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:22:04.221397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:22:04.221461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:22:04.221501Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:22:04.221593Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:22:04.221657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:22:04.221698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:22:04.221727Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:22:04.221953Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:22:04.222048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:22:04.222095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:22:04.222125Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:22:04.222239Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:22:04.222297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:22:04.222343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:22:04.222384Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:22:04.222482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:22:04.222520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:22:04.222556Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:22:04.222612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:22:04.222652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:22:04.222680Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:22:04.223114Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=67; 2025-03-31T16:22:04.223223Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=39; 2025-03-31T16:22:04.223319Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=43; 2025-03-31T16:22:04.223405Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=39; 2025-03-31T16:22:04.223570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:22:04.223633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:22:04.223668Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:22:04.223877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:22:04.223925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:22:04.223963Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:22:04.224135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:22:04.224182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:22:04.224212Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:22:04.224449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:22:04.224493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:22:04.224522Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:22:04.224638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:22:04.224673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:22:04.224725Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:22:11.774186Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-31T16:22:11.774364Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:232;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-03-31T16:22:11.774475Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:252;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-03-31T16:22:11.774659Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:1064:2935];bytes=1984;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-03-31T16:22:11.774868Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:22:11.775027Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:22:11.775275Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:22:11.775571Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:22:11.775762Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:22:11.775938Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:22:11.775992Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1065:2936] finished for tablet 9437184 2025-03-31T16:22:11.776628Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:1064:2935];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.004},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_task_result"],"t":0.016},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.018}],"full":{"a":1743438131757425,"name":"_full_task","f":1743438131757425,"d_finished":0,"c":0,"l":1743438131776063,"d":18638},"events":[{"name":"bootstrap","f":1743438131757687,"d_finished":3820,"c":1,"l":1743438131761507,"d":3820},{"a":1743438131775537,"name":"ack","f":1743438131773790,"d_finished":1526,"c":1,"l":1743438131775316,"d":2052},{"a":1743438131775516,"name":"processing","f":1743438131763124,"d_finished":7273,"c":10,"l":1743438131775319,"d":7820},{"name":"ProduceResults","f":1743438131759787,"d_finished":3910,"c":13,"l":1743438131775970,"d":3910},{"a":1743438131775976,"name":"Finish","f":1743438131775976,"d_finished":0,"c":0,"l":1743438131776063,"d":87},{"name":"task_result","f":1743438131763148,"d_finished":5580,"c":9,"l":1743438131773560,"d":5580}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:22:11.776740Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:1064:2935];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:22:11.777318Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:1064:2935];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.004},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_task_result"],"t":0.016},{"events":["l_ProduceResults","f_Finish"],"t":0.018},{"events":["l_ack","l_processing","l_Finish"],"t":0.019}],"full":{"a":1743438131757425,"name":"_full_task","f":1743438131757425,"d_finished":0,"c":0,"l":1743438131776803,"d":19378},"events":[{"name":"bootstrap","f":1743438131757687,"d_finished":3820,"c":1,"l":1743438131761507,"d":3820},{"a":1743438131775537,"name":"ack","f":1743438131773790,"d_finished":1526,"c":1,"l":1743438131775316,"d":2792},{"a":1743438131775516,"name":"processing","f":1743438131763124,"d_finished":7273,"c":10,"l":1743438131775319,"d":8560},{"name":"ProduceResults","f":1743438131759787,"d_finished":3910,"c":13,"l":1743438131775970,"d":3910},{"a":1743438131775976,"name":"Finish","f":1743438131775976,"d_finished":0,"c":0,"l":1743438131776803,"d":827},{"name":"task_result","f":1743438131763148,"d_finished":5580,"c":9,"l":1743438131773560,"d":5580}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:22:11.777421Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:22:11.756668Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-03-31T16:22:11.777490Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:22:11.777947Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> EvWrite::WriteWithLock ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::WriteInTransaction [GOOD] Test command err: 2025-03-31T16:22:11.555311Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:22:11.649177Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:22:11.675013Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:22:11.675371Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:22:11.684234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:22:11.684450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:22:11.684709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:22:11.684869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:22:11.684990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:22:11.685112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:22:11.685226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:22:11.685335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:22:11.685473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:22:11.685669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:22:11.685790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:22:11.685896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:22:11.716235Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:22:11.716404Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:22:11.716490Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:22:11.716691Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:22:11.716896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:22:11.716969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:22:11.717010Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:22:11.717122Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:22:11.717201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:22:11.717252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:22:11.717287Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:22:11.717514Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:22:11.717590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:22:11.717633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:22:11.717663Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:22:11.717773Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:22:11.717841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:22:11.717890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:22:11.717931Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:22:11.718010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:22:11.718050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:22:11.718088Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:22:11.718153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:22:11.718191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:22:11.718219Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:22:11.718641Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=49; 2025-03-31T16:22:11.718745Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=53; 2025-03-31T16:22:11.718837Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=37; 2025-03-31T16:22:11.718909Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=35; 2025-03-31T16:22:11.719156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:22:11.719233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:22:11.719278Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:22:11.719504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:22:11.719556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:22:11.719589Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:22:11.719736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:22:11.719781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:22:11.719811Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:22:11.720027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:22:11.720073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:22:11.720107Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:22:11.720245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:22:11.720290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:22:11.720345Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... ted;limit=limits:(bytes=0;chunks=0);; 2025-03-31T16:22:12.543951Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-03-31T16:22:12.543996Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-03-31T16:22:12.544032Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=1; 2025-03-31T16:22:12.544072Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=2048;merger=0;interval_id=1; 2025-03-31T16:22:12.544105Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-03-31T16:22:12.544164Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-31T16:22:12.544185Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=2048;finished=1; 2025-03-31T16:22:12.544214Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-31T16:22:12.544409Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:22:12.544553Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:1;records_count:2048;schema=key: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-31T16:22:12.544605Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-31T16:22:12.544695Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:232;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;);columns=2;rows=2048; 2025-03-31T16:22:12.544743Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:252;stage=data_format;batch_size=229376;num_rows=2048;batch_columns=key,field; 2025-03-31T16:22:12.544833Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:285:2303];bytes=229376;rows=2048;faults=0;finished=0;fault=0;schema=key: uint64 field: string; 2025-03-31T16:22:12.544936Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-31T16:22:12.545004Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-31T16:22:12.545088Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-31T16:22:12.545378Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:22:12.545486Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-31T16:22:12.545563Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-31T16:22:12.545616Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:289:2307] finished for tablet 9437184 2025-03-31T16:22:12.546000Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:285:2303];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_task_result"],"t":0.033},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.034}],"full":{"a":1743438132510681,"name":"_full_task","f":1743438132510681,"d_finished":0,"c":0,"l":1743438132545674,"d":34993},"events":[{"name":"bootstrap","f":1743438132510971,"d_finished":2492,"c":1,"l":1743438132513463,"d":2492},{"a":1743438132545351,"name":"ack","f":1743438132544389,"d_finished":727,"c":1,"l":1743438132545116,"d":1050},{"a":1743438132545332,"name":"processing","f":1743438132516164,"d_finished":18474,"c":9,"l":1743438132545118,"d":18816},{"name":"ProduceResults","f":1743438132512407,"d_finished":2159,"c":12,"l":1743438132545601,"d":2159},{"a":1743438132545604,"name":"Finish","f":1743438132545604,"d_finished":0,"c":0,"l":1743438132545674,"d":70},{"name":"task_result","f":1743438132516183,"d_finished":17589,"c":8,"l":1743438132544306,"d":17589}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-31T16:22:12.546052Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:285:2303];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:22:12.546381Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:285:2303];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_task_result"],"t":0.033},{"events":["l_ProduceResults","f_Finish"],"t":0.034},{"events":["l_ack","l_processing","l_Finish"],"t":0.035}],"full":{"a":1743438132510681,"name":"_full_task","f":1743438132510681,"d_finished":0,"c":0,"l":1743438132546081,"d":35400},"events":[{"name":"bootstrap","f":1743438132510971,"d_finished":2492,"c":1,"l":1743438132513463,"d":2492},{"a":1743438132545351,"name":"ack","f":1743438132544389,"d_finished":727,"c":1,"l":1743438132545116,"d":1457},{"a":1743438132545332,"name":"processing","f":1743438132516164,"d_finished":18474,"c":9,"l":1743438132545118,"d":19223},{"name":"ProduceResults","f":1743438132512407,"d_finished":2159,"c":12,"l":1743438132545601,"d":2159},{"a":1743438132545604,"name":"Finish","f":1743438132545604,"d_finished":0,"c":0,"l":1743438132546081,"d":477},{"name":"task_result","f":1743438132516183,"d_finished":17589,"c":8,"l":1743438132544306,"d":17589}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-31T16:22:12.546490Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:22:12.510278Z;index_granules=0;index_portions=1;index_batches=82;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=238056;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=238056;selected_rows=0; 2025-03-31T16:22:12.546531Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:22:12.546730Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; >> Normalizers::ColumnChunkNormalizer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::ColumnChunkNormalizer [GOOD] Test command err: 2025-03-31T16:22:10.008374Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:22:10.098545Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:22:10.118394Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:22:10.118647Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:22:10.125124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-03-31T16:22:10.125332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:22:10.125506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:22:10.125595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:22:10.125683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:22:10.125761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:22:10.125838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:22:10.125917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:22:10.125989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:22:10.126085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:22:10.126156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:22:10.126239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:22:10.148829Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:22:10.149012Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:22:10.149075Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-03-31T16:22:10.149348Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:22:10.149530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:22:10.149619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:22:10.149666Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-03-31T16:22:10.149814Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:22:10.149892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:22:10.149939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:22:10.150024Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-03-31T16:22:10.150211Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:22:10.150289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:22:10.150338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:22:10.150377Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-03-31T16:22:10.150494Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:22:10.150567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:22:10.150625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:22:10.150666Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:22:10.150757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:22:10.150805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:22:10.150844Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:22:10.150916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:22:10.150959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:22:10.150992Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:22:10.151511Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=63; 2025-03-31T16:22:10.151616Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=42; 2025-03-31T16:22:10.151726Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=50; 2025-03-31T16:22:10.151881Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=56; 2025-03-31T16:22:10.152090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:22:10.152182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:22:10.152229Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:22:10.152447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:22:10.152517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:22:10.152567Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:22:10.152719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:22:10.152769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:22:10.152800Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:22:10.153006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:22:10.153060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:22:10.153091Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:22:10.153209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:22:10.153254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normaliza ... TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-31T16:22:13.526567Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-03-31T16:22:13.526638Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-03-31T16:22:13.526699Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=2; 2025-03-31T16:22:13.526777Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=20048;merger=0;interval_id=2; 2025-03-31T16:22:13.526839Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-03-31T16:22:13.526959Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-31T16:22:13.527002Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=20048;finished=1; 2025-03-31T16:22:13.527046Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-31T16:22:13.527301Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:22:13.527492Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:1;records_count:20048;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-31T16:22:13.527545Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-31T16:22:13.527697Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:232;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;);columns=3;rows=20048; 2025-03-31T16:22:13.527757Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:252;stage=data_format;batch_size=2405760;num_rows=20048;batch_columns=key1,key2,field; 2025-03-31T16:22:13.527848Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:479:2485];bytes=2405760;rows=20048;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string; 2025-03-31T16:22:13.527955Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-31T16:22:13.528063Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-31T16:22:13.528152Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-31T16:22:13.528921Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:22:13.529050Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-31T16:22:13.529140Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-31T16:22:13.529171Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:481:2486] finished for tablet 9437184 2025-03-31T16:22:13.529614Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:479:2485];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_task_result"],"t":0.261},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.263}],"full":{"a":1743438133265533,"name":"_full_task","f":1743438133265533,"d_finished":0,"c":0,"l":1743438133529274,"d":263741},"events":[{"name":"bootstrap","f":1743438133265714,"d_finished":2094,"c":1,"l":1743438133267808,"d":2094},{"a":1743438133528888,"name":"ack","f":1743438133527271,"d_finished":903,"c":1,"l":1743438133528174,"d":1289},{"a":1743438133528863,"name":"processing","f":1743438133270174,"d_finished":151309,"c":9,"l":1743438133528176,"d":151720},{"name":"ProduceResults","f":1743438133266962,"d_finished":2425,"c":12,"l":1743438133529161,"d":2425},{"a":1743438133529163,"name":"Finish","f":1743438133529163,"d_finished":0,"c":0,"l":1743438133529274,"d":111},{"name":"task_result","f":1743438133270191,"d_finished":150238,"c":8,"l":1743438133527118,"d":150238}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-31T16:22:13.529684Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:479:2485];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:22:13.530070Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:479:2485];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_task_result"],"t":0.261},{"events":["l_ProduceResults","f_Finish"],"t":0.263},{"events":["l_ack","l_processing","l_Finish"],"t":0.264}],"full":{"a":1743438133265533,"name":"_full_task","f":1743438133265533,"d_finished":0,"c":0,"l":1743438133529725,"d":264192},"events":[{"name":"bootstrap","f":1743438133265714,"d_finished":2094,"c":1,"l":1743438133267808,"d":2094},{"a":1743438133528888,"name":"ack","f":1743438133527271,"d_finished":903,"c":1,"l":1743438133528174,"d":1740},{"a":1743438133528863,"name":"processing","f":1743438133270174,"d_finished":151309,"c":9,"l":1743438133528176,"d":152171},{"name":"ProduceResults","f":1743438133266962,"d_finished":2425,"c":12,"l":1743438133529161,"d":2425},{"a":1743438133529163,"name":"Finish","f":1743438133529163,"d_finished":0,"c":0,"l":1743438133529725,"d":562},{"name":"task_result","f":1743438133270191,"d_finished":150238,"c":8,"l":1743438133527118,"d":150238}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-31T16:22:13.530153Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:22:13.265073Z;index_granules=0;index_portions=1;index_batches=939;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=2589280;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2589280;selected_rows=0; 2025-03-31T16:22:13.530200Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:22:13.530432Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; >> TColumnShardTestReadWrite::WriteStandalone >> EvWrite::WriteWithLock [GOOD] >> KqpPg::LongDomainName [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt32 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::WriteWithLock [GOOD] Test command err: 2025-03-31T16:22:13.565805Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:22:13.670325Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:22:13.689204Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:22:13.689415Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:22:13.696405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:22:13.696630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:22:13.696898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:22:13.697054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:22:13.697171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:22:13.697275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:22:13.697394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:22:13.697509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:22:13.697651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:22:13.697784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:22:13.697896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:22:13.697989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:22:13.720383Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:22:13.720625Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:22:13.720697Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:22:13.720864Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:22:13.721023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:22:13.721097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:22:13.721157Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:22:13.721277Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:22:13.721373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:22:13.721417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:22:13.721452Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:22:13.721645Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:22:13.721697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:22:13.721730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:22:13.721762Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:22:13.721864Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:22:13.721933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:22:13.721987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:22:13.722022Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:22:13.722091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:22:13.722129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:22:13.722160Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:22:13.722221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:22:13.722259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:22:13.722285Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:22:13.722710Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=34; 2025-03-31T16:22:13.722790Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=36; 2025-03-31T16:22:13.722870Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=37; 2025-03-31T16:22:13.722937Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=30; 2025-03-31T16:22:13.723112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:22:13.723174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:22:13.723219Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:22:13.723452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:22:13.723498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:22:13.723530Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:22:13.723684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:22:13.723724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:22:13.723747Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:22:13.723933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:22:13.723973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:22:13.724001Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:22:13.724130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:22:13.724167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:22:13.724235Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... ;;;); 2025-03-31T16:22:14.658953Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-03-31T16:22:14.659000Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-31T16:22:14.659394Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-03-31T16:22:14.659458Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-03-31T16:22:14.659507Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=1; 2025-03-31T16:22:14.659557Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=4096;merger=0;interval_id=1; 2025-03-31T16:22:14.659619Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-03-31T16:22:14.659706Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-31T16:22:14.659733Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=4096;finished=1; 2025-03-31T16:22:14.659768Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-31T16:22:14.659974Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:22:14.660119Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:1;records_count:4096;schema=key: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-31T16:22:14.660157Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-31T16:22:14.660258Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:232;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;);columns=2;rows=4096; 2025-03-31T16:22:14.660326Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:252;stage=data_format;batch_size=458752;num_rows=4096;batch_columns=key,field; 2025-03-31T16:22:14.660462Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:305:2323];bytes=458752;rows=4096;faults=0;finished=0;fault=0;schema=key: uint64 field: string; 2025-03-31T16:22:14.660605Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-31T16:22:14.660718Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-31T16:22:14.660854Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-31T16:22:14.661222Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:22:14.661346Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-31T16:22:14.661450Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-31T16:22:14.661502Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:309:2327] finished for tablet 9437184 2025-03-31T16:22:14.661848Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:305:2323];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_task_result"],"t":0.046},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.048}],"full":{"a":1743438134612959,"name":"_full_task","f":1743438134612959,"d_finished":0,"c":0,"l":1743438134661578,"d":48619},"events":[{"name":"bootstrap","f":1743438134613209,"d_finished":2358,"c":1,"l":1743438134615567,"d":2358},{"a":1743438134661196,"name":"ack","f":1743438134659940,"d_finished":952,"c":1,"l":1743438134660892,"d":1334},{"a":1743438134661180,"name":"processing","f":1743438134618116,"d_finished":22121,"c":9,"l":1743438134660895,"d":22519},{"name":"ProduceResults","f":1743438134614601,"d_finished":2418,"c":12,"l":1743438134661486,"d":2418},{"a":1743438134661490,"name":"Finish","f":1743438134661490,"d_finished":0,"c":0,"l":1743438134661578,"d":88},{"name":"task_result","f":1743438134618130,"d_finished":21017,"c":8,"l":1743438134659806,"d":21017}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-31T16:22:14.661899Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:305:2323];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:22:14.662209Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:305:2323];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_task_result"],"t":0.046},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.048}],"full":{"a":1743438134612959,"name":"_full_task","f":1743438134612959,"d_finished":0,"c":0,"l":1743438134661925,"d":48966},"events":[{"name":"bootstrap","f":1743438134613209,"d_finished":2358,"c":1,"l":1743438134615567,"d":2358},{"a":1743438134661196,"name":"ack","f":1743438134659940,"d_finished":952,"c":1,"l":1743438134660892,"d":1681},{"a":1743438134661180,"name":"processing","f":1743438134618116,"d_finished":22121,"c":9,"l":1743438134660895,"d":22866},{"name":"ProduceResults","f":1743438134614601,"d_finished":2418,"c":12,"l":1743438134661486,"d":2418},{"a":1743438134661490,"name":"Finish","f":1743438134661490,"d_finished":0,"c":0,"l":1743438134661925,"d":435},{"name":"task_result","f":1743438134618130,"d_finished":21017,"c":8,"l":1743438134659806,"d":21017}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-31T16:22:14.662278Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:22:14.612567Z;index_granules=0;index_portions=1;index_batches=176;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=494016;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=494016;selected_rows=0; 2025-03-31T16:22:14.662310Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:22:14.662499Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; >> Normalizers::SchemaVersionsNormalizer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::LongDomainName [GOOD] Test command err: Trying to start YDB, gRPC: 21239, MsgBus: 2362 2025-03-31T16:20:34.317028Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009339284538201:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:34.317625Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012bd/r3tmp/tmphLiIeC/pdisk_1.dat 2025-03-31T16:20:34.694695Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:20:34.722422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:34.722554Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:34.723832Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21239, node 1 2025-03-31T16:20:34.879478Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:34.879501Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:34.879508Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:34.879627Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2362 TClient is connected to server localhost:2362 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:35.536690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:37.118523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009352169440757:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:37.118534Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009352169440746:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:37.118643Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:37.122231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:20:37.130684Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009352169440760:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:20:37.228819Z node 1 :TX_PROXY ERROR: Actor# [1:7488009352169440811:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 18679, MsgBus: 2194 2025-03-31T16:20:38.181955Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488009357367308526:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:38.182036Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012bd/r3tmp/tmpwgNL4z/pdisk_1.dat 2025-03-31T16:20:38.270453Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18679, node 2 2025-03-31T16:20:38.317237Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:38.317360Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:38.318658Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:38.337850Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:38.337872Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:38.337879Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:38.337983Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2194 TClient is connected to server localhost:2194 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:38.747047Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:38.752858Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:20:41.120812Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009370252211068:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:41.120812Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009370252211076:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:41.120894Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:41.124452Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:20:41.134113Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488009370252211082:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:20:41.190703Z node 2 :TX_PROXY ERROR: Actor# [2:7488009370252211133:2334] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 5776, MsgBus: 64884 2025-03-31T16:20:41.899693Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488009369262898238:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:41.899748Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012bd/r3tmp/tmpBozkXv/pdisk_1.dat 2025-03-31T16:20:41.998595Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5776, node 3 2025-03-31T16:20:42.044334Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:42.044415Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:42.049805Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:42.052381Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:42.052400Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:42.052406Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:42.052493Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64884 TClient is connected to server localhost:64884 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 Schemeshar ... alse } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:22:00.598516Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:04.612712Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7488009705277093484:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:22:04.612797Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:22:04.990193Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488009726751930633:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:22:04.990350Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:22:04.990891Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488009726751930645:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:22:04.997470Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:22:05.013751Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7488009726751930647:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:22:05.072324Z node 10 :TX_PROXY ERROR: Actor# [10:7488009731046897994:2342] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:22:05.103226Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["pgbench_accounts"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["aid (null, 3)","aid [7, 7]"],"Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/pgbench_accounts","E-Rows":"No estimate","Table":"pgbench_accounts","ReadRangesKeys":["aid"],"ReadColumns":["abalance"],"E-Cost":"No estimate","ReadRangesExpectedSize":2}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"input.abalance","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/pgbench_accounts","reads":[{"columns":["abalance"],"scan_by":["aid (null, 3)","aid [7, 7]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["aid (null, 3)","aid [7, 7]"],"Name":"TableRangeScan","Path":"\/Root\/pgbench_accounts","E-Rows":"No estimate","Table":"pgbench_accounts","ReadRangesKeys":["aid"],"ReadColumns":["abalance"],"E-Cost":"No estimate","ReadRangesExpectedSize":2}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"input.abalance","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["pgbench_accounts"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","ReadRange":["aid (4, 3)"],"E-Size":"No estimate","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/pgbench_accounts","E-Rows":"No estimate","Table":"pgbench_accounts","ReadColumns":["abalance"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/pgbench_accounts","reads":[{"columns":["abalance"],"scan_by":["aid (4, 3)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Operators":[{"Scan":"Parallel","ReadRange":["aid (4, 3)"],"E-Size":"No estimate","Name":"TableRangeScan","Path":"\/Root\/pgbench_accounts","E-Rows":"No estimate","Table":"pgbench_accounts","ReadColumns":["abalance"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 3770, MsgBus: 24727 2025-03-31T16:22:08.506849Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7488009744768623867:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:22:08.507025Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012bd/r3tmp/tmpvj1unA/pdisk_1.dat 2025-03-31T16:22:08.741049Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:22:08.777490Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:22:08.777596Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:22:08.780902Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3770, node 11 2025-03-31T16:22:08.843882Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:22:08.843911Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:22:08.843921Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:22:08.844077Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24727 TClient is connected to server localhost:24727 WaitRootIsUp 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'... TClient::Ls request: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_D... (TRUNCATED) WaitRootIsUp 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' success. 2025-03-31T16:22:09.646698Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:13.503237Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7488009766243461009:2334], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:22:13.503391Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7488009766243460996:2331], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:22:13.503496Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:22:13.506830Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7488009744768623867:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:22:13.506954Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:22:13.508460Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:22:13.526395Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7488009766243461025:2335], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:22:13.613696Z node 11 :TX_PROXY ERROR: Actor# [11:7488009766243461079:2342] txid# 281474976715659, issues: { message: "Check failed: path: \'/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:22:13.649394Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 >> KqpPg::CheckPgAutoParams-useSink [GOOD] >> Normalizers::CleanEmptyPortionsNormalizer >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes [GOOD] >> KqpAnalyze::AnalyzeTable-ColumnStore [GOOD] >> KqpExplain::AggGroupLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::CheckPgAutoParams-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 4441, MsgBus: 21276 2025-03-31T16:20:34.318008Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009340655567358:2133];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:34.318309Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012c6/r3tmp/tmpRWjyQt/pdisk_1.dat 2025-03-31T16:20:34.721871Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4441, node 1 2025-03-31T16:20:34.747566Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:34.747682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:34.749330Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:20:34.749520Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:20:34.753753Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:34.874108Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:34.874132Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:34.874143Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:34.874273Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21276 TClient is connected to server localhost:21276 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:35.519937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:35.543146Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:20:37.004835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:20:37.193435Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill \x62797465612030 \x62797465612030 \x62797465612031 \x62797465612031 \x62797465612032 \x62797465612032 \x62797465612033 \x62797465612033 \x62797465612034 \x62797465612034 \x62797465612035 \x62797465612035 \x62797465612036 \x62797465612036 \x62797465612037 \x62797465612037 \x62797465612038 \x62797465612038 \x62797465612039 \x62797465612039 2025-03-31T16:20:37.241451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:37.289661Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill \x62797465612030 \x62797465612030 \x62797465612031 \x62797465612031 \x62797465612032 \x62797465612032 \x62797465612033 \x62797465612033 \x62797465612034 \x62797465612034 \x62797465612035 \x62797465612035 \x62797465612036 \x62797465612036 \x62797465612037 \x62797465612037 \x62797465612038 \x62797465612038 \x62797465612039 \x62797465612039 2025-03-31T16:20:37.325479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:37.367448Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {"\\x6130","\\x623130"} {"\\x6130","\\x623130"} {"\\x6131","\\x623131"} {"\\x6131","\\x623131"} {"\\x6132","\\x623132"} {"\\x6132","\\x623132"} {"\\x6133","\\x623133"} {"\\x6133","\\x623133"} {"\\x6134","\\x623134"} {"\\x6134","\\x623134"} {"\\x6135","\\x623135"} {"\\x6135","\\x623135"} {"\\x6136","\\x623136"} {"\\x6136","\\x623136"} {"\\x6137","\\x623137"} {"\\x6137","\\x623137"} {"\\x6138","\\x623138"} {"\\x6138","\\x623138"} {"\\x6139","\\x623139"} {"\\x6139","\\x623139"} 2025-03-31T16:20:37.459248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-31T16:20:37.495491Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {"\\x6130","\\x623130"} {"\\x6130","\\x623130"} {"\\x6131","\\x623131"} {"\\x6131","\\x623131"} {"\\x6132","\\x623132"} {"\\x6132","\\x623132"} {"\\x6133","\\x623133"} {"\\x6133","\\x623133"} {"\\x6134","\\x623134"} {"\\x6134","\\x623134"} {"\\x6135","\\x623135"} {"\\x6135","\\x623135"} {"\\x6136","\\x623136"} {"\\x6136","\\x623136"} {"\\x6137","\\x623137"} {"\\x6137","\\x623137"} {"\\x6138","\\x623138"} {"\\x6138","\\x623138"} {"\\x6139","\\x623139"} {"\\x6139","\\x623139"} 2025-03-31T16:20:37.528664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:20:37.565068Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill f f t t 2025-03-31T16:20:37.629606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-31T16:20:37.668977Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill f f t t 2025-03-31T16:20:37.700083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-31T16:20:37.739032Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {f,f} {f,f} {t,t} {t,t} 2025-03-31T16:20:37.774554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-31T16:20:37.813184Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {f,f} {f,f} {t,t} {t,t} 2025-03-31T16:20:37.845870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710690:0, at schemeshard: 72057594046644480 2025-03-31T16:20:37.884840Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2025-03-31T16:20:37.915793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710694:0, at schemeshard: 72057594046644480 2025-03-31T16:20:37.956845Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2025-03-31T16:20:37.989701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710698:0, at schemeshard: 72057594046644480 2025-03-31T16:20:38.025889Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2025-03-31T16:20:38.060586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710702:0, at schemeshard: 72057594046644480 2025-03-31T16:20:38.098987Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2025-03-31T16:20:38.130319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710706:0, at schemeshard: 72057594046644480 2025-03-31T16:20:38.166823Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2025-03-31T16:20:38.202559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710710:0, at schemeshard: 72057594046644480 2025-03-31T16:20:38.245270Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2025-03-31T16:20:38.281172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710714:0, at schemeshard: 72057594046644480 2025-03-31T16:20:38.323728Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2025-03-31T16:20:38.371650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710718:0, at schemeshard: 72057594046644480 {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2025-03-31T16:20:38.471258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710721:0, at schemeshard: 72057594046644480 2025-03-31T16:20:38.520968Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2025-03-31T16:20:38.563917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710725:0, at schemeshard: 72057594046644480 2025-03-31T16:20:38.610908Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2025-03-31T16:20:38.650777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710729:0, at schemeshard: 72057594046644480 2025-03-31T16:20:38.698248Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2025-03-31T16:20:38.806649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operatio ... to start YDB, gRPC: 25742, MsgBus: 61625 2025-03-31T16:22:07.044846Z node 14 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[14:7488009739400004495:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:22:07.045041Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012c6/r3tmp/tmpWxNN7a/pdisk_1.dat 2025-03-31T16:22:07.214631Z node 14 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:22:07.261771Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:22:07.261942Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:22:07.264139Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25742, node 14 2025-03-31T16:22:07.335938Z node 14 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:22:07.335974Z node 14 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:22:07.335991Z node 14 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:22:07.336202Z node 14 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61625 TClient is connected to server localhost:61625 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:22:08.233444Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:22:12.044979Z node 14 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[14:7488009739400004495:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:22:12.045098Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:22:12.715675Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7488009760874841621:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:22:12.715766Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:22:12.715883Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7488009760874841654:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:22:12.721196Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:22:12.735864Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7488009760874841658:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:22:12.806023Z node 14 :TX_PROXY ERROR: Actor# [14:7488009760874841709:2343] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:22:12.841747Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:22:13.282421Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:22:13.897924Z node 14 :KQP_COMPILE_ACTOR ERROR: Get parsing result with error, self: [14:7488009765169809355:2398], owner: [14:7488009760874841610:2324], statement id: 0 2025-03-31T16:22:13.898186Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=ODI0YmMzOWYtYzc0MzQ3NTItYjQzNDdiOTAtYmU2YzdkMDc=, ActorId: [14:7488009765169809353:2397], ActorState: ExecuteState, TraceId: 01jqpg5em53apdsdtxz39fxzce, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-31T16:22:14.150233Z node 14 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [14:7488009769464776685:2410], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: RemovePrefixMembers, At function: PgSelect
: Error: At function: PgSetItem
:1:1: Error: At function: PgWhere
:2:55: Error: At function: PgOp
:2:55: Error: Unable to find an overload for operator = with given argument type(s): (text,int4) 2025-03-31T16:22:14.150584Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=NTJjZTA1ZjktNWU0NmMyOGMtYWY3MzUyZWQtNGJlZmFjNGE=, ActorId: [14:7488009769464776682:2408], ActorState: ExecuteState, TraceId: 01jqpg5ev81z93m529e39bvj3x, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-31T16:22:14.189049Z node 14 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [14:7488009769464776697:2416], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: RemovePrefixMembers, At function: PgSelect
: Error: At function: PgSetItem
:1:1: Error: At function: PgWhere
:2:57: Error: At function: PgAnd
:2:67: Error: At function: PgOp
:2:67: Error: Unable to find an overload for operator = with given argument type(s): (text,int4) 2025-03-31T16:22:14.189335Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=NjY5N2E4YzAtN2YzZWNjODItZTkxM2JmZjQtODA0ZWJjNTc=, ActorId: [14:7488009769464776694:2414], ActorState: ExecuteState, TraceId: 01jqpg5ewe7y2grexp2m5prkn8, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-31T16:22:14.205772Z node 14 :KQP_EXECUTER CRIT: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqpg5exn5872eqhhqbac2z3p, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=OTI4YjVhZmQtM2NhZjRkY2QtN2NkMGMwNWUtYTFmZmViMGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, unexpected exception caught: (NKikimr::NMiniKQL::TTerminateException) Terminate was called, reason(51): ERROR: invalid input syntax for type integer: "a" 2025-03-31T16:22:14.206110Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=OTI4YjVhZmQtM2NhZjRkY2QtN2NkMGMwNWUtYTFmZmViMGM=, ActorId: [14:7488009769464776707:2420], ActorState: ExecuteState, TraceId: 01jqpg5exn5872eqhhqbac2z3p, Create QueryResponse for error on request, msg: 2025-03-31T16:22:14.252124Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:22:14.349093Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:22:14.434206Z node 14 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [14:7488009769464776878:2445], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: values have 3 columns, INSERT INTO expects: 2 2025-03-31T16:22:14.434476Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=ZmE3Njk1YWUtMzUzZTgxZGUtY2I4ZWJmOGMtM2EzZDkxODY=, ActorId: [14:7488009769464776875:2443], ActorState: ExecuteState, TraceId: 01jqpg5f48ctwb9mv43csjs799, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-31T16:22:14.469299Z node 14 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [14:7488009769464776891:2451], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Failed to convert type: List> to List>
:1:1: Error: Failed to convert 'id': pgunknown to Optional
:1:1: Error: Row type mismatch for table: db.[/Root/PgTable2] 2025-03-31T16:22:14.469625Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=ZDcyNzkyYTQtNjczYzVmNmYtODE2ZjBmNTAtOWEwYWE1ZTc=, ActorId: [14:7488009769464776888:2449], ActorState: ExecuteState, TraceId: 01jqpg5f5a75k2xe66w00vhdq8, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-31T16:22:15.020178Z node 14 :KQP_EXECUTER CRIT: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqpg5f6j45r8p9x74dpkwk8x, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=ZmYxMjAzY2MtNWRlNzRmNzMtNzU2MTJlN2QtZjBlMWMwODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, unexpected exception caught: (NKikimr::NMiniKQL::TTerminateException) Terminate was called, reason(51): ERROR: invalid input syntax for type integer: "a" 2025-03-31T16:22:15.020766Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=ZmYxMjAzY2MtNWRlNzRmNzMtNzU2MTJlN2QtZjBlMWMwODg=, ActorId: [14:7488009769464776901:2455], ActorState: ExecuteState, TraceId: 01jqpg5f6j45r8p9x74dpkwk8x, Create QueryResponse for error on request, msg: 2025-03-31T16:22:15.064046Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:22:15.626768Z node 14 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 14, TabletId: 72075186224037892 not found 2025-03-31T16:22:15.674510Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes [GOOD] Test command err: 2025-03-31T16:22:12.816699Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:22:12.897791Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:22:12.914772Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:22:12.915100Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:22:12.923202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:22:12.923436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:22:12.923691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:22:12.923809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:22:12.923929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:22:12.924050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:22:12.924166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:22:12.924298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:22:12.924422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:22:12.924544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:22:12.924651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:22:12.924775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:22:12.949221Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:22:12.949370Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:22:12.949422Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:22:12.949626Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:22:12.949824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:22:12.949900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:22:12.949944Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:22:12.950027Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:22:12.950074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:22:12.950119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:22:12.950141Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:22:12.950314Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:22:12.950374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:22:12.950408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:22:12.950429Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:22:12.950511Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:22:12.950555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:22:12.950600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:22:12.950627Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:22:12.950697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:22:12.950728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:22:12.950759Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:22:12.950816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:22:12.950847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:22:12.950875Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:22:12.951253Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=61; 2025-03-31T16:22:12.951326Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=29; 2025-03-31T16:22:12.951419Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=52; 2025-03-31T16:22:12.951509Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=42; 2025-03-31T16:22:12.951638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:22:12.951698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:22:12.951730Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:22:12.951888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:22:12.951926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:22:12.951960Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:22:12.952080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:22:12.952121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:22:12.952140Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:22:12.952305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:22:12.952339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:22:12.952360Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:22:12.952451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:22:12.952493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:22:12.952536Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... id: binary;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:22:17.990281Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-31T16:22:17.990444Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:232;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-03-31T16:22:17.990548Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:252;stage=data_format;batch_size=2759;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-03-31T16:22:17.990715Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:432:2447];bytes=2759;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: binary json_payload: binary ingested_at: timestamp[us] saved_at: timestamp[us] request_id: binary; 2025-03-31T16:22:17.990898Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:22:17.991050Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:22:17.991205Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:22:17.991469Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:22:17.991616Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:22:17.991756Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:22:17.991807Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:433:2448] finished for tablet 9437184 2025-03-31T16:22:17.992287Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:432:2447];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_task_result"],"t":0.014},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.016}],"full":{"a":1743438137974992,"name":"_full_task","f":1743438137974992,"d_finished":0,"c":0,"l":1743438137991870,"d":16878},"events":[{"name":"bootstrap","f":1743438137975338,"d_finished":3483,"c":1,"l":1743438137978821,"d":3483},{"a":1743438137991441,"name":"ack","f":1743438137989962,"d_finished":1288,"c":1,"l":1743438137991250,"d":1717},{"a":1743438137991427,"name":"processing","f":1743438137980116,"d_finished":6678,"c":10,"l":1743438137991252,"d":7121},{"name":"ProduceResults","f":1743438137977159,"d_finished":3640,"c":13,"l":1743438137991791,"d":3640},{"a":1743438137991795,"name":"Finish","f":1743438137991795,"d_finished":0,"c":0,"l":1743438137991870,"d":75},{"name":"task_result","f":1743438137980139,"d_finished":5243,"c":9,"l":1743438137989776,"d":5243}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:22:17.992371Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:432:2447];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:22:17.992853Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:432:2447];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_task_result"],"t":0.014},{"events":["l_ProduceResults","f_Finish"],"t":0.016},{"events":["l_ack","l_processing","l_Finish"],"t":0.017}],"full":{"a":1743438137974992,"name":"_full_task","f":1743438137974992,"d_finished":0,"c":0,"l":1743438137992419,"d":17427},"events":[{"name":"bootstrap","f":1743438137975338,"d_finished":3483,"c":1,"l":1743438137978821,"d":3483},{"a":1743438137991441,"name":"ack","f":1743438137989962,"d_finished":1288,"c":1,"l":1743438137991250,"d":2266},{"a":1743438137991427,"name":"processing","f":1743438137980116,"d_finished":6678,"c":10,"l":1743438137991252,"d":7670},{"name":"ProduceResults","f":1743438137977159,"d_finished":3640,"c":13,"l":1743438137991791,"d":3640},{"a":1743438137991795,"name":"Finish","f":1743438137991795,"d_finished":0,"c":0,"l":1743438137992419,"d":624},{"name":"task_result","f":1743438137980139,"d_finished":5243,"c":9,"l":1743438137989776,"d":5243}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-31T16:22:17.992961Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:22:17.974363Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=13268;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=13268;selected_rows=0; 2025-03-31T16:22:17.993016Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:22:17.993360Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> TPQTest::TestPartitionWriteQuota [GOOD] >> TPQTest::TestPartitionedBlobFails >> Normalizers::SchemaVersionsNormalizer [GOOD] >> TColumnShardTestReadWrite::WriteStandalone [GOOD] >> KqpPg::ValuesInsert+useSink [GOOD] >> KqpPg::ValuesInsert-useSink >> TPQTest::TestSourceIdDropByUserWrites [GOOD] >> TPQTest::TestSourceIdDropBySourceIdCount >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::SchemaVersionsNormalizer [GOOD] Test command err: 2025-03-31T16:22:16.100475Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:22:16.206280Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:22:16.231977Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:22:16.232269Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:22:16.240636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SchemaVersionCleaner; 2025-03-31T16:22:16.240893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-03-31T16:22:16.241081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:22:16.241295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:22:16.241436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:22:16.241558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:22:16.241680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:22:16.241785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:22:16.241903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:22:16.242045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:22:16.242167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:22:16.242283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:22:16.242392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:22:16.275169Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:22:16.275401Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=SchemaVersionCleaner; 2025-03-31T16:22:16.275461Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-03-31T16:22:16.275775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SchemaVersionCleaner;id=NO_VALUE_OPTIONAL; 2025-03-31T16:22:16.275869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2025-03-31T16:22:16.275910Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-03-31T16:22:16.276071Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:22:16.276185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:22:16.276248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:22:16.276286Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-03-31T16:22:16.276425Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:22:16.276489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:22:16.276554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:22:16.276584Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-03-31T16:22:16.276765Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:22:16.276838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:22:16.276879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:22:16.276910Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-03-31T16:22:16.277008Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:22:16.277061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:22:16.277101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:22:16.277127Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:22:16.277209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:22:16.277265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:22:16.277297Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:22:16.277365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:22:16.277411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:22:16.277437Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:22:16.277887Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=59; 2025-03-31T16:22:16.277989Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=35; 2025-03-31T16:22:16.278107Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=46; 2025-03-31T16:22:16.278208Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=43; 2025-03-31T16:22:16.278389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:22:16.278441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:22:16.278471Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:22:16.278655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:22:16.278696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:22:16.278723Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:22:16.278906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:22:16.278959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:22:16.278989Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:22:16.279217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;pr ... UG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-31T16:22:19.766950Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-03-31T16:22:19.767009Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-03-31T16:22:19.767046Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=2; 2025-03-31T16:22:19.767107Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=20048;merger=0;interval_id=2; 2025-03-31T16:22:19.767144Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-03-31T16:22:19.767250Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-31T16:22:19.767290Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=20048;finished=1; 2025-03-31T16:22:19.767330Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-31T16:22:19.767516Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:22:19.767668Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:1;records_count:20048;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-31T16:22:19.767720Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-31T16:22:19.767845Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:232;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;);columns=3;rows=20048; 2025-03-31T16:22:19.767917Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:252;stage=data_format;batch_size=2405760;num_rows=20048;batch_columns=key1,key2,field; 2025-03-31T16:22:19.768015Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:457:2464];bytes=2405760;rows=20048;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string; 2025-03-31T16:22:19.768163Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-31T16:22:19.768284Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-31T16:22:19.768405Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-31T16:22:19.769346Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:22:19.769511Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-31T16:22:19.769635Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-31T16:22:19.769679Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:459:2465] finished for tablet 9437184 2025-03-31T16:22:19.770078Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:457:2464];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.007},{"events":["f_ack","l_task_result"],"t":0.333},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.335}],"full":{"a":1743438139434276,"name":"_full_task","f":1743438139434276,"d_finished":0,"c":0,"l":1743438139769729,"d":335453},"events":[{"name":"bootstrap","f":1743438139434594,"d_finished":3431,"c":1,"l":1743438139438025,"d":3431},{"a":1743438139769322,"name":"ack","f":1743438139767490,"d_finished":955,"c":1,"l":1743438139768445,"d":1362},{"a":1743438139769308,"name":"processing","f":1743438139441756,"d_finished":164437,"c":9,"l":1743438139768448,"d":164858},{"name":"ProduceResults","f":1743438139436514,"d_finished":2918,"c":12,"l":1743438139769659,"d":2918},{"a":1743438139769662,"name":"Finish","f":1743438139769662,"d_finished":0,"c":0,"l":1743438139769729,"d":67},{"name":"task_result","f":1743438139441807,"d_finished":163272,"c":8,"l":1743438139767369,"d":163272}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-31T16:22:19.770140Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:457:2464];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:22:19.770523Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:457:2464];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.007},{"events":["f_ack","l_task_result"],"t":0.333},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.335}],"full":{"a":1743438139434276,"name":"_full_task","f":1743438139434276,"d_finished":0,"c":0,"l":1743438139770172,"d":335896},"events":[{"name":"bootstrap","f":1743438139434594,"d_finished":3431,"c":1,"l":1743438139438025,"d":3431},{"a":1743438139769322,"name":"ack","f":1743438139767490,"d_finished":955,"c":1,"l":1743438139768445,"d":1805},{"a":1743438139769308,"name":"processing","f":1743438139441756,"d_finished":164437,"c":9,"l":1743438139768448,"d":165301},{"name":"ProduceResults","f":1743438139436514,"d_finished":2918,"c":12,"l":1743438139769659,"d":2918},{"a":1743438139769662,"name":"Finish","f":1743438139769662,"d_finished":0,"c":0,"l":1743438139770172,"d":510},{"name":"task_result","f":1743438139441807,"d_finished":163272,"c":8,"l":1743438139767369,"d":163272}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-31T16:22:19.770613Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:22:19.433575Z;index_granules=0;index_portions=1;index_batches=953;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=2589608;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2589608;selected_rows=0; 2025-03-31T16:22:19.770664Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:22:19.770908Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteStandalone [GOOD] Test command err: 2025-03-31T16:22:14.716461Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:22:14.808937Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:22:14.827139Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:22:14.827397Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:22:14.835172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:22:14.835390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:22:14.835620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:22:14.835745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:22:14.835857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:22:14.835974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:22:14.836097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:22:14.836228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:22:14.836373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:22:14.836505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:22:14.836589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:22:14.836667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:22:14.858930Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:22:14.859108Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:22:14.859172Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:22:14.859349Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:22:14.859523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:22:14.859581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:22:14.859615Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:22:14.859682Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:22:14.859731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:22:14.859774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:22:14.859804Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:22:14.860028Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:22:14.860121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:22:14.860166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:22:14.860200Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:22:14.860311Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:22:14.860370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:22:14.860414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:22:14.860447Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:22:14.860536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:22:14.860582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:22:14.860617Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:22:14.860682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:22:14.860713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:22:14.860735Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:22:14.861156Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=45; 2025-03-31T16:22:14.861235Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=33; 2025-03-31T16:22:14.861329Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=56; 2025-03-31T16:22:14.861392Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=28; 2025-03-31T16:22:14.861537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:22:14.861586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:22:14.861613Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:22:14.861760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:22:14.861795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:22:14.861829Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:22:14.861946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:22:14.861981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:22:14.862005Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:22:14.862160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:22:14.862210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:22:14.862239Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:22:14.862363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:22:14.862418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:22:14.862477Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... :[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000032;32;32;32;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000033;33;33;33;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000034;34;34;34;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000035;35;35;35;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000036;36;36;36;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000037;37;37;37;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000038;38;38;38;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000039;39;39;39;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000040;40;40;40;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000041;41;41;41;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000042;42;42;42;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000043;43;43;43;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000044;44;44;44;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000045;45;45;45;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000046;46;46;46;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000047;47;47;47;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000048;48;48;48;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000049;49;49;49;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000050;50;50;50;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000051;51;51;51;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000052;52;52;52;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000053;53;53;53;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000054;54;54;54;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000055;55;55;55;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000056;56;56;56;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000057;57;57;57;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000058;58;58;58;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000059;59;59;59;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000060;60;60;60;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000061;61;61;61;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000062;62;62;62;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000063;63;63;63;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000064;64;64;64;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000065;65;65;65;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000066;66;66;66;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000067;67;67;67;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000068;68;68;68;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000069;69;69;69;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000070;70;70;70;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000071;71;71;71;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000072;72;72;72;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000073;73;73;73;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000074;74;74;74;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000075;75;75;75;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000076;76;76;76;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000077;77;77;77;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000078;78;78;78;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000079;79;79;79;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000080;80;80;80;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000081;81;81;81;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000082;82;82;82;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000083;83;83;83;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000084;84;84;84;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000085;85;85;85;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000086;86;86;86;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000087;87;87;87;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000088;88;88;88;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000089;89;89;89;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000090;90;90;90;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000091;91;91;91;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000092;92;92;92;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000093;93;93;93;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000094;94;94;94;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000095;95;95;95;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000096;96;96;96;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000097;97;97;97;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000098;98;98;98;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000099;99;99;99;"}}]}; 2025-03-31T16:22:19.892233Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=30;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailesOnNotATopic >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedPartition >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailesOnNotATopic >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TPQTest::TestPQRead [GOOD] >> TPQTest::TestPQSmallRead >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailesOnNotATopic [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_OtherPartition_Test [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "multiple partition 2 in TopicRequest for topic \'rt3.dc1--topic2\'" ErrorCode: BAD_REQUEST } >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TKeyValueTest::TestRewriteThenLastValueNewApi [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailesOnNotATopic [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2025-03-31T16:22:21.790956Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:22:21.795920Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:22:21.796301Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-03-31T16:22:21.796355Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:22:21.796407Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-03-31T16:22:21.796475Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:22:21.796525Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:22:21.796618Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-03-31T16:22:21.797356Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:261:2253], now have 1 active actors on pipe 2025-03-31T16:22:21.797472Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-03-31T16:22:21.812682Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-31T16:22:21.815749Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-31T16:22:21.815995Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:22:21.816940Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-31T16:22:21.817113Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:22:21.817560Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:22:21.817893Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:269:2259] 2025-03-31T16:22:21.820479Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-03-31T16:22:21.820580Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:269:2259] 2025-03-31T16:22:21.820651Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:22:21.820709Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-31T16:22:21.821670Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:272:2261], now have 1 active actors on pipe 2025-03-31T16:22:21.883673Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:22:21.887687Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:22:21.888005Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-03-31T16:22:21.888062Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:22:21.888109Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-03-31T16:22:21.888157Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:22:21.888204Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:22:21.888267Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-03-31T16:22:21.889015Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:407:2362], now have 1 active actors on pipe 2025-03-31T16:22:21.889136Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-03-31T16:22:21.889341Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 2(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-31T16:22:21.891950Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-31T16:22:21.892090Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:22:21.892983Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 2 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-31T16:22:21.893102Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:22:21.893471Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:22:21.893704Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [2:415:2368] 2025-03-31T16:22:21.896110Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-03-31T16:22:21.896181Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:415:2368] 2025-03-31T16:22:21.896238Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:22:21.896295Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-03-31T16:22:21.897131Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:418:2370], now have 1 active actors on pipe 2025-03-31T16:22:21.898598Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:424:2373], now have 1 active actors on pipe 2025-03-31T16:22:21.898944Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:426:2374], now have 1 active actors on pipe 2025-03-31T16:22:21.899523Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [2:424:2373] destroyed 2025-03-31T16:22:21.899839Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server disconnected, pipe [2:426:2374] destroyed Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestRewriteThenLastValueNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:52:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:78:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:79:2110] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:79:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:50:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:51:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:51:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:52:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:52:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:85:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:88:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:89:2057] recipient: [11:87:2116] Leader for TabletID 72057594037927937 is [11:90:2117] sender: [11:91:2057] recipient: [11:87:2116] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:90:2117] Leader for TabletID 72057594037927937 is [11:90:2117] sender: [11:110:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:56:2 ... 2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [106:54:2057] recipient: [106:51:2095] Leader for TabletID 72057594037927937 is [106:56:2097] sender: [106:57:2057] recipient: [106:51:2095] Leader for TabletID 72057594037927937 is [106:56:2097] sender: [106:74:2057] recipient: [106:14:2061] !Reboot 72057594037927937 (actor [106:56:2097]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [106:56:2097] sender: [106:127:2057] recipient: [106:36:2083] Leader for TabletID 72057594037927937 is [106:56:2097] sender: [106:130:2057] recipient: [106:14:2061] Leader for TabletID 72057594037927937 is [106:56:2097] sender: [106:131:2057] recipient: [106:129:2148] Leader for TabletID 72057594037927937 is [106:132:2149] sender: [106:133:2057] recipient: [106:129:2148] !Reboot 72057594037927937 (actor [106:56:2097]) rebooted! !Reboot 72057594037927937 (actor [106:56:2097]) tablet resolver refreshed! new actor is[106:132:2149] Leader for TabletID 72057594037927937 is [106:132:2149] sender: [106:152:2057] recipient: [106:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [107:54:2057] recipient: [107:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [107:54:2057] recipient: [107:51:2095] Leader for TabletID 72057594037927937 is [107:56:2097] sender: [107:57:2057] recipient: [107:51:2095] Leader for TabletID 72057594037927937 is [107:56:2097] sender: [107:74:2057] recipient: [107:14:2061] !Reboot 72057594037927937 (actor [107:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [107:56:2097] sender: [107:130:2057] recipient: [107:36:2083] Leader for TabletID 72057594037927937 is [107:56:2097] sender: [107:133:2057] recipient: [107:14:2061] Leader for TabletID 72057594037927937 is [107:56:2097] sender: [107:134:2057] recipient: [107:132:2151] Leader for TabletID 72057594037927937 is [107:135:2152] sender: [107:136:2057] recipient: [107:132:2151] !Reboot 72057594037927937 (actor [107:56:2097]) rebooted! !Reboot 72057594037927937 (actor [107:56:2097]) tablet resolver refreshed! new actor is[107:135:2152] Leader for TabletID 72057594037927937 is [107:135:2152] sender: [107:189:2057] recipient: [107:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [108:54:2057] recipient: [108:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [108:54:2057] recipient: [108:51:2095] Leader for TabletID 72057594037927937 is [108:56:2097] sender: [108:57:2057] recipient: [108:51:2095] Leader for TabletID 72057594037927937 is [108:56:2097] sender: [108:74:2057] recipient: [108:14:2061] !Reboot 72057594037927937 (actor [108:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [108:56:2097] sender: [108:130:2057] recipient: [108:36:2083] Leader for TabletID 72057594037927937 is [108:56:2097] sender: [108:132:2057] recipient: [108:14:2061] Leader for TabletID 72057594037927937 is [108:56:2097] sender: [108:134:2057] recipient: [108:133:2151] Leader for TabletID 72057594037927937 is [108:135:2152] sender: [108:136:2057] recipient: [108:133:2151] !Reboot 72057594037927937 (actor [108:56:2097]) rebooted! !Reboot 72057594037927937 (actor [108:56:2097]) tablet resolver refreshed! new actor is[108:135:2152] Leader for TabletID 72057594037927937 is [108:135:2152] sender: [108:189:2057] recipient: [108:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [109:54:2057] recipient: [109:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [109:54:2057] recipient: [109:51:2095] Leader for TabletID 72057594037927937 is [109:56:2097] sender: [109:57:2057] recipient: [109:51:2095] Leader for TabletID 72057594037927937 is [109:56:2097] sender: [109:74:2057] recipient: [109:14:2061] !Reboot 72057594037927937 (actor [109:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [109:56:2097] sender: [109:131:2057] recipient: [109:36:2083] Leader for TabletID 72057594037927937 is [109:56:2097] sender: [109:134:2057] recipient: [109:14:2061] Leader for TabletID 72057594037927937 is [109:56:2097] sender: [109:135:2057] recipient: [109:133:2151] Leader for TabletID 72057594037927937 is [109:136:2152] sender: [109:137:2057] recipient: [109:133:2151] !Reboot 72057594037927937 (actor [109:56:2097]) rebooted! !Reboot 72057594037927937 (actor [109:56:2097]) tablet resolver refreshed! new actor is[109:136:2152] Leader for TabletID 72057594037927937 is [109:136:2152] sender: [109:154:2057] recipient: [109:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [110:54:2057] recipient: [110:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [110:54:2057] recipient: [110:51:2095] Leader for TabletID 72057594037927937 is [110:56:2097] sender: [110:57:2057] recipient: [110:51:2095] Leader for TabletID 72057594037927937 is [110:56:2097] sender: [110:74:2057] recipient: [110:14:2061] !Reboot 72057594037927937 (actor [110:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [110:56:2097] sender: [110:133:2057] recipient: [110:36:2083] Leader for TabletID 72057594037927937 is [110:56:2097] sender: [110:136:2057] recipient: [110:135:2153] Leader for TabletID 72057594037927937 is [110:56:2097] sender: [110:137:2057] recipient: [110:14:2061] Leader for TabletID 72057594037927937 is [110:138:2154] sender: [110:139:2057] recipient: [110:135:2153] !Reboot 72057594037927937 (actor [110:56:2097]) rebooted! !Reboot 72057594037927937 (actor [110:56:2097]) tablet resolver refreshed! new actor is[110:138:2154] Leader for TabletID 72057594037927937 is [110:138:2154] sender: [110:192:2057] recipient: [110:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [111:54:2057] recipient: [111:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [111:54:2057] recipient: [111:51:2095] Leader for TabletID 72057594037927937 is [111:56:2097] sender: [111:57:2057] recipient: [111:51:2095] Leader for TabletID 72057594037927937 is [111:56:2097] sender: [111:74:2057] recipient: [111:14:2061] !Reboot 72057594037927937 (actor [111:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [111:56:2097] sender: [111:133:2057] recipient: [111:36:2083] Leader for TabletID 72057594037927937 is [111:56:2097] sender: [111:135:2057] recipient: [111:14:2061] Leader for TabletID 72057594037927937 is [111:56:2097] sender: [111:137:2057] recipient: [111:136:2153] Leader for TabletID 72057594037927937 is [111:138:2154] sender: [111:139:2057] recipient: [111:136:2153] !Reboot 72057594037927937 (actor [111:56:2097]) rebooted! !Reboot 72057594037927937 (actor [111:56:2097]) tablet resolver refreshed! new actor is[111:138:2154] Leader for TabletID 72057594037927937 is [111:138:2154] sender: [111:192:2057] recipient: [111:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [112:54:2057] recipient: [112:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [112:54:2057] recipient: [112:51:2095] Leader for TabletID 72057594037927937 is [112:56:2097] sender: [112:57:2057] recipient: [112:51:2095] Leader for TabletID 72057594037927937 is [112:56:2097] sender: [112:74:2057] recipient: [112:14:2061] !Reboot 72057594037927937 (actor [112:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [112:56:2097] sender: [112:134:2057] recipient: [112:36:2083] Leader for TabletID 72057594037927937 is [112:56:2097] sender: [112:137:2057] recipient: [112:136:2153] Leader for TabletID 72057594037927937 is [112:56:2097] sender: [112:138:2057] recipient: [112:14:2061] Leader for TabletID 72057594037927937 is [112:139:2154] sender: [112:140:2057] recipient: [112:136:2153] !Reboot 72057594037927937 (actor [112:56:2097]) rebooted! !Reboot 72057594037927937 (actor [112:56:2097]) tablet resolver refreshed! new actor is[112:139:2154] Leader for TabletID 72057594037927937 is [112:139:2154] sender: [112:193:2057] recipient: [112:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [113:54:2057] recipient: [113:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [113:54:2057] recipient: [113:51:2095] Leader for TabletID 72057594037927937 is [113:56:2097] sender: [113:57:2057] recipient: [113:51:2095] Leader for TabletID 72057594037927937 is [113:56:2097] sender: [113:74:2057] recipient: [113:14:2061] !Reboot 72057594037927937 (actor [113:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [113:56:2097] sender: [113:137:2057] recipient: [113:36:2083] Leader for TabletID 72057594037927937 is [113:56:2097] sender: [113:140:2057] recipient: [113:14:2061] Leader for TabletID 72057594037927937 is [113:56:2097] sender: [113:141:2057] recipient: [113:139:2156] Leader for TabletID 72057594037927937 is [113:142:2157] sender: [113:143:2057] recipient: [113:139:2156] !Reboot 72057594037927937 (actor [113:56:2097]) rebooted! !Reboot 72057594037927937 (actor [113:56:2097]) tablet resolver refreshed! new actor is[113:142:2157] Leader for TabletID 72057594037927937 is [113:142:2157] sender: [113:196:2057] recipient: [113:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [114:54:2057] recipient: [114:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [114:54:2057] recipient: [114:50:2095] Leader for TabletID 72057594037927937 is [114:56:2097] sender: [114:57:2057] recipient: [114:50:2095] Leader for TabletID 72057594037927937 is [114:56:2097] sender: [114:74:2057] recipient: [114:14:2061] !Reboot 72057594037927937 (actor [114:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [114:56:2097] sender: [114:137:2057] recipient: [114:36:2083] Leader for TabletID 72057594037927937 is [114:56:2097] sender: [114:139:2057] recipient: [114:14:2061] Leader for TabletID 72057594037927937 is [114:56:2097] sender: [114:141:2057] recipient: [114:140:2156] Leader for TabletID 72057594037927937 is [114:142:2157] sender: [114:143:2057] recipient: [114:140:2156] !Reboot 72057594037927937 (actor [114:56:2097]) rebooted! !Reboot 72057594037927937 (actor [114:56:2097]) tablet resolver refreshed! new actor is[114:142:2157] Leader for TabletID 72057594037927937 is [114:142:2157] sender: [114:196:2057] recipient: [114:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [115:54:2057] recipient: [115:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [115:54:2057] recipient: [115:51:2095] Leader for TabletID 72057594037927937 is [115:56:2097] sender: [115:57:2057] recipient: [115:51:2095] Leader for TabletID 72057594037927937 is [115:56:2097] sender: [115:74:2057] recipient: [115:14:2061] !Reboot 72057594037927937 (actor [115:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [115:56:2097] sender: [115:138:2057] recipient: [115:36:2083] Leader for TabletID 72057594037927937 is [115:56:2097] sender: [115:141:2057] recipient: [115:140:2156] Leader for TabletID 72057594037927937 is [115:56:2097] sender: [115:142:2057] recipient: [115:14:2061] Leader for TabletID 72057594037927937 is [115:143:2157] sender: [115:144:2057] recipient: [115:140:2156] !Reboot 72057594037927937 (actor [115:56:2097]) rebooted! !Reboot 72057594037927937 (actor [115:56:2097]) tablet resolver refreshed! new actor is[115:143:2157] Leader for TabletID 72057594037927937 is [0:0:0] sender: [116:54:2057] recipient: [116:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [116:54:2057] recipient: [116:51:2095] Leader for TabletID 72057594037927937 is [116:56:2097] sender: [116:57:2057] recipient: [116:51:2095] Leader for TabletID 72057594037927937 is [116:56:2097] sender: [116:74:2057] recipient: [116:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } 2025-03-31T16:22:21.665809Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:22:21.676024Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:22:21.676329Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-03-31T16:22:21.676471Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:22:21.676505Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-03-31T16:22:21.676595Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:22:21.676637Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:22:21.676689Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-03-31T16:22:21.678030Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:262:2254], now have 1 active actors on pipe 2025-03-31T16:22:21.678130Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-03-31T16:22:21.692179Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-31T16:22:21.695865Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-31T16:22:21.695984Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:22:21.700842Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-31T16:22:21.701041Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:22:21.702638Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:22:21.704071Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:270:2260] 2025-03-31T16:22:21.707651Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-03-31T16:22:21.708847Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:270:2260] 2025-03-31T16:22:21.708931Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:22:21.708988Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-31T16:22:21.711915Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:273:2262], now have 1 active actors on pipe 2025-03-31T16:22:21.767345Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:22:21.771565Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:22:21.771913Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] doesn't have tx info 2025-03-31T16:22:21.771965Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:22:21.772005Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] no config, start with empty partitions and default config 2025-03-31T16:22:21.772045Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:22:21.772092Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:22:21.772147Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137] doesn't have tx writes info 2025-03-31T16:22:21.772804Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [2:406:2361], now have 1 active actors on pipe 2025-03-31T16:22:21.772970Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2025-03-31T16:22:21.773172Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Config update version 2(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-31T16:22:21.776032Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-31T16:22:21.776158Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:22:21.776970Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137] Config applied version 2 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-31T16:22:21.777118Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:22:21.777533Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:22:21.777724Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] bootstrapping 0 [2:414:2367] 2025-03-31T16:22:21.779962Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2025-03-31T16:22:21.780034Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [2:414:2367] 2025-03-31T16:22:21.780085Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:22:21.780157Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-31T16:22:21.781002Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [2:417:2369], now have 1 active actors on pipe 2025-03-31T16:22:21.798304Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:22:21.802708Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:22:21.803064Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] doesn't have tx info 2025-03-31T16:22:21.803153Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:22:21.803191Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-03-31T16:22:21.803229Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:22:21.803307Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:22:21.803375Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-03-31T16:22:21.804050Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [2:466:2406], now have 1 active actors on pipe 2025-03-31T16:22:21.804165Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-03-31T16:22:21.804360Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 3(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-31T16:22:21.807148Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-31T16:22:21.807284Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:22:21.808068Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 3 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-31T16:22:21.808203Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:22:21.808612Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:22:21.808815Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [2:474:2412] 2025-03-31T16:22:21.810726Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-03-31T16:22:21.810808Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [2:474:2412] 2025-03-31T16:22:21.810871Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:22:21.810907Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Proc ... EBUG: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:22:23.191582Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-03-31T16:22:23.191626Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:22:23.191669Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:22:23.191732Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-03-31T16:22:23.192406Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [4:465:2405], now have 1 active actors on pipe 2025-03-31T16:22:23.192476Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-03-31T16:22:23.192683Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 11(current 0) received from actor [4:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 11 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-31T16:22:23.194871Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 11 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-31T16:22:23.194999Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:22:23.195644Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 11 actor [4:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 11 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-31T16:22:23.195798Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:22:23.196149Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:22:23.196385Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [4:473:2411] 2025-03-31T16:22:23.198456Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-03-31T16:22:23.198528Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [4:473:2411] 2025-03-31T16:22:23.198585Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:22:23.198641Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-03-31T16:22:23.199409Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [4:476:2413], now have 1 active actors on pipe 2025-03-31T16:22:23.213887Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:22:23.217944Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:22:23.218272Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-03-31T16:22:23.218345Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:22:23.218391Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-03-31T16:22:23.218437Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:22:23.218489Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:22:23.218561Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-03-31T16:22:23.219299Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:525:2450], now have 1 active actors on pipe 2025-03-31T16:22:23.219438Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-03-31T16:22:23.219621Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 12(current 0) received from actor [4:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-31T16:22:23.222011Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-31T16:22:23.222116Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:22:23.222535Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 12 actor [4:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-31T16:22:23.222625Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:22:23.222933Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:22:23.223136Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [4:533:2456] 2025-03-31T16:22:23.224633Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-03-31T16:22:23.224694Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [4:533:2456] 2025-03-31T16:22:23.224749Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:22:23.224787Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-03-31T16:22:23.225416Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:536:2458], now have 1 active actors on pipe 2025-03-31T16:22:23.226320Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [4:542:2461], now have 1 active actors on pipe 2025-03-31T16:22:23.226535Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [4:543:2462], now have 1 active actors on pipe 2025-03-31T16:22:23.226691Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:544:2462], now have 1 active actors on pipe 2025-03-31T16:22:23.237770Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:552:2469], now have 1 active actors on pipe 2025-03-31T16:22:23.263443Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:22:23.267608Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:22:23.267966Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-03-31T16:22:23.268017Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:22:23.268167Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:22:23.268544Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:22:23.268592Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-03-31T16:22:23.268681Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:22:23.268903Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:22:23.269076Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [4:609:2514] 2025-03-31T16:22:23.270403Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-03-31T16:22:23.271431Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-03-31T16:22:23.271774Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-03-31T16:22:23.272098Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-03-31T16:22:23.272274Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-03-31T16:22:23.272314Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-03-31T16:22:23.272347Z node 4 :PERSQUEUE INFO: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:22:23.272377Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-03-31T16:22:23.272420Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [4:609:2514] 2025-03-31T16:22:23.272479Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:22:23.272517Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-03-31T16:22:23.273227Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [4:543:2462] destroyed 2025-03-31T16:22:23.273280Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [4:542:2461] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionOffsetsResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 StartOffset: 0 EndOffset: 0 ErrorCode: OK WriteTimestampEstimateMS: 0 } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 1 StartOffset: 0 EndOffset: 0 ErrorCode: OK WriteTimestampEstimateMS: 0 } PartitionResult { Partition: 2 ErrorCode: INITIALIZING ErrorReason: "partition is not ready yet" } ErrorCode: OK } } } >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_OtherPartition_Test [GOOD] Test command err: 2025-03-31T16:21:02.174043Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009459393987354:2275];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:02.174108Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:21:02.205290Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488009459847053862:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:02.206361Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:21:02.348095Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:21:02.357803Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0015c0/r3tmp/tmpa9A3wt/pdisk_1.dat 2025-03-31T16:21:02.559344Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:21:02.559978Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:21:02.563938Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:21:02.565043Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:21:02.616728Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:21:02.618692Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:21:02.618793Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 12023, node 1 2025-03-31T16:21:02.626294Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:21:02.626338Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:21:02.628739Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:21:02.763824Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/0015c0/r3tmp/yandexMFootS.tmp 2025-03-31T16:21:02.763871Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/0015c0/r3tmp/yandexMFootS.tmp 2025-03-31T16:21:02.764129Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/0015c0/r3tmp/yandexMFootS.tmp 2025-03-31T16:21:02.764300Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:21:02.982910Z INFO: TTestServer started on Port 32148 GrpcPort 12023 TClient is connected to server localhost:32148 PQClient connected to localhost:12023 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:21:03.282233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:21:03.334951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-31T16:21:05.343336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009472278890224:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:05.343433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009472278890207:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:05.343638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:05.347516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-31T16:21:05.375759Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009472278890260:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:05.376545Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:21:05.376941Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2025-03-31T16:21:05.377088Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009472278890229:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-31T16:21:05.619498Z node 1 :TX_PROXY ERROR: Actor# [1:7488009472278890306:2815] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:21:05.649715Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488009472731956072:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:21:05.649949Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzRmYTRkNjgtYWVmODgyNDQtNDNhNDBhNDktNzZmOWMzZGE=, ActorId: [2:7488009472731956054:2308], ActorState: ExecuteState, TraceId: 01jqpg3bsncqgjtfsrbppsgxh1, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:21:05.652025Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488009472278890324:2353], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:21:05.657536Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:21:05.659527Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTMwNDc5Ni02YWQyN2VjZi02ZTZhNjY2ZC00M2Q1MTVkNg==, ActorId: [1:7488009472278890197:2341], ActorState: ExecuteState, TraceId: 01jqpg3bnna98hvmptmptqmymr, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:21:05.659909Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:21:05.699584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:21:05.784131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:21:05.871017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-31T16:21:06.150134Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jqpg3c8s4dxedcta6xa99m1r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTRjZDgwMDUtMjRmNzk2ZjYtOWViNTEzOWUtMjcxMGYxNTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7488009476573857996:3099] 2025-03-31T16:21:07.173410Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488009459393987354:2275];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:07.173492Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:21:07.205110Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488009459847053862:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:21:07.205182Z node 2 :METADATA_ ... 1T16:22:09.741524Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=NDU3ZWZkZjYtMTMxZTZkOTgtMjc5NTU2M2MtMTRlMGQwYjQ=, ActorId: [10:7488009748149687500:2316], ActorState: ExecuteState, TraceId: 01jqpg5ah89q5pp2hrp93a99vy, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:22:09.742069Z node 10 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-31T16:22:09.996966Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jqpg5amp65srrgpzmzhqvw7m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=MjBkYTg2NWItOTRhYTE1MmYtZDRlOTlkMjAtMjEzYTU0NWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [9:7488009754015414416:3092] 2025-03-31T16:22:10.092565Z node 9 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[9:7488009732540576277:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:22:10.092650Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:22:10.103329Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7488009730969817894:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:22:10.103416Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-03-31T16:22:15.504386Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:1, at schemeshard: 72057594046644480 2025-03-31T16:22:16.288258Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-31T16:22:17.000704Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-03-31T16:22:17.768037Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710694:0, at schemeshard: 72057594046644480 2025-03-31T16:22:18.477868Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710698:0, at schemeshard: 72057594046644480 2025-03-31T16:22:19.184434Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710703:0, at schemeshard: 72057594046644480 Run query: --!syntax_v1 UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES (16261273835729377752, "Root", "00415F536F757263655F3130", 1743438140119, 1743438140119, 0, 13); 2025-03-31T16:22:20.232232Z node 9 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:22:20.232264Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:22:20.302606Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976710709. Ctx: { TraceId: 01jqpg5mqm0gke7f366f0reapx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=NGIyMzc1MWYtNzZjNTNiYWEtNzVmYThiNS01YjhiMWZjYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:22:20.324075Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-03-31T16:22:20.324111Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-03-31T16:22:20.324125Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-03-31T16:22:20.324153Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7488009796965088982:4025] (SourceId=A_Source_10, PreferedPartition=1) GetOwnershipFast Partition=1 TabletId=1001 2025-03-31T16:22:20.324341Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 269877760, Sender [9:7488009796965088983:4025], Recipient [9:7488009775490251378:3368]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 1001 Status: OK ServerId: [9:7488009796965088982:4025] Leader: 1 Dead: 0 Generation: 1 VersionInfo: } 2025-03-31T16:22:20.324455Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 271188557, Sender [9:7488009796965088982:4025], Recipient [9:7488009775490251378:3368]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 1 SourceId: "A_Source_10" 2025-03-31T16:22:20.324556Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateOwnershipFast, received event# 271188558, Sender [9:7488009775490251378:3368], Recipient [9:7488009796965088982:4025]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2025-03-31T16:22:20.324600Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7488009796965088982:4025] (SourceId=A_Source_10, PreferedPartition=1) InitTable: SourceId=A_Source_10 TopicsAreFirstClassCitizen=1 UseSrcIdMetaMappingInFirstClass=1 2025-03-31T16:22:20.324688Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 65543, Sender [9:7488009796965088982:4025], Recipient [9:7488009775490251378:3368]: NActors::TEvents::TEvPoison 2025-03-31T16:22:20.324785Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateInitTable, received event# 277020685, Sender [9:7488009732540576272:2070], Recipient [9:7488009796965088982:4025]: NKikimr::NMetadata::NProvider::TEvManagerPrepared 2025-03-31T16:22:20.324823Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7488009796965088982:4025] (SourceId=A_Source_10, PreferedPartition=1) StartKqpSession 2025-03-31T16:22:20.328435Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateCreateKqpSession, received event# 271646728, Sender [9:7488009732540576498:2280], Recipient [9:7488009796965088982:4025]: NKikimrKqp.TEvCreateSessionResponse Error: "" Response { SessionId: "ydb://session/3?node_id=9&id=ZGYzNzkxNmUtNTgzNDdmODQtNzdiZjJjNWUtNzU2YjliNTA=" NodeId: 9 } YdbStatus: SUCCESS ResourceExhausted: false 2025-03-31T16:22:20.328479Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7488009796965088982:4025] (SourceId=A_Source_10, PreferedPartition=1) Select from the table 2025-03-31T16:22:20.538403Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateSelect, received event# 271646721, Sender [9:7488009732540576498:2280], Recipient [9:7488009796965088982:4025]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=9&id=ZGYzNzkxNmUtNTgzNDdmODQtNzdiZjJjNWUtNzU2YjliNTA=" PreparedQuery: "21ffcc9b-c46bca66-170ef5db-199686d3" QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { id: "01jqpg5n3he9y28vraq37qnsv7" } YdbResults { columns { name: "Partition" type { optional_type { item { type_id: UINT32 } } } } columns { name: "CreateTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "AccessTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "SeqNo" type { optional_type { item { type_id: UINT64 } } } } rows { items { uint32_value: 0 } items { uint64_value: 1743438140119 } items { uint64_value: 1743438140119 } items { uint64_value: 13 } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 131 2025-03-31T16:22:20.538645Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7488009796965088982:4025] (SourceId=A_Source_10, PreferedPartition=1) Selected from table PartitionId=0 SeqNo=13 2025-03-31T16:22:20.538693Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7488009796965088982:4025] (SourceId=A_Source_10, PreferedPartition=1) OnPartitionChosen Received TEvChooseError: 2025-03-31T16:22:20.538752Z node 9 :PQ_PARTITION_CHOOSER INFO: TPartitionChooser [9:7488009796965088982:4025] (SourceId=A_Source_10, PreferedPartition=1) ReplyError: MessageGroupId A_Source_10 is already bound to PartitionGroupId 1, but client provided 2. MessageGroupId->PartitionGroupId binding cannot be changed, either use another MessageGroupId, specify PartitionGroupId 1, or do not specify PartitionGroupId at all. MessageGroupId A_Source_10 is already bound to PartitionGroupId 1, but client provided 2. MessageGroupId->PartitionGroupId binding cannot be changed, either use another MessageGroupId, specify PartitionGroupId 1, or do not specify PartitionGroupId at all. Run query: --!syntax_v1 SELECT Partition, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash = 16261273835729377752 AND Topic = "Root" AND ProducerId = "00415F536F757263655F3130" 2025-03-31T16:22:20.750417Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976710712. Ctx: { TraceId: 01jqpg5n4s38kw4z7vrrepb5qf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=M2ZlNmFjMWQtYjk3Zjc5YmItNTA3YmNlMTEtYmU4Nzk4MTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:22:21.210733Z node 9 :KQP_EXECUTER ERROR: ActorId: [9:7488009801260056407:2710] TxId: 281474976710713. Ctx: { TraceId: 01jqpg5njqb49wfpe8qhyhke7f, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=NGVmNDIxMGUtMzc1MjkzOTUtZGEyMzYzMGUtNjY1Nzg0Mzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 10 2025-03-31T16:22:21.210960Z node 9 :KQP_COMPUTE ERROR: SelfId: [9:7488009801260056411:2710], TxId: 281474976710713, task: 2. Ctx: { TraceId : 01jqpg5njqb49wfpe8qhyhke7f. SessionId : ydb://session/3?node_id=9&id=NGVmNDIxMGUtMzc1MjkzOTUtZGEyMzYzMGUtNjY1Nzg0Mzc=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [9:7488009801260056407:2710], status: UNAVAILABLE, reason: {
: Error: Terminate execution } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'/Root/PQ/\', Marker# PQ17" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } |90.7%| [TA] $(B)/ydb/core/keyvalue/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnFailedGetAllTopicsRequest |90.7%| [TA] {RESULT} $(B)/ydb/core/keyvalue/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2025-03-31T16:22:23.111354Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:22:23.115476Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:22:23.115755Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-03-31T16:22:23.115799Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:22:23.115830Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-03-31T16:22:23.115883Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:22:23.115938Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:22:23.115991Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-03-31T16:22:23.116572Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:262:2254], now have 1 active actors on pipe 2025-03-31T16:22:23.116625Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-03-31T16:22:23.135076Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-31T16:22:23.138169Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-31T16:22:23.138389Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:22:23.139412Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-31T16:22:23.139606Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:22:23.139997Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:22:23.140244Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:270:2260] 2025-03-31T16:22:23.142026Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-03-31T16:22:23.142084Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:270:2260] 2025-03-31T16:22:23.142132Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:22:23.142170Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-31T16:22:23.143102Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:273:2262], now have 1 active actors on pipe 2025-03-31T16:22:23.199365Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:22:23.204914Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:22:23.205313Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-03-31T16:22:23.205374Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:22:23.205417Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-03-31T16:22:23.205466Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:22:23.205524Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:22:23.205587Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-03-31T16:22:23.206333Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:406:2361], now have 1 active actors on pipe 2025-03-31T16:22:23.206498Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-03-31T16:22:23.206743Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 2(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-31T16:22:23.212549Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-31T16:22:23.212728Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:22:23.213534Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 2 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-31T16:22:23.213696Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:22:23.214140Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:22:23.214354Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [2:414:2367] 2025-03-31T16:22:23.216526Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-03-31T16:22:23.216620Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:414:2367] 2025-03-31T16:22:23.216684Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:22:23.216731Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-03-31T16:22:23.217606Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:417:2369], now have 1 active actors on pipe 2025-03-31T16:22:23.218822Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:423:2372], now have 1 active actors on pipe 2025-03-31T16:22:23.220525Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-31T16:22:23.220966Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:425:2373], now have 1 active actors on pipe 2025-03-31T16:22:23.221258Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [2:423:2372] destroyed 2025-03-31T16:22:23.221502Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-31T16:22:23.222439Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server disconnected, pipe [2:425:2373] destroyed 2025-03-31T16:22:23.786641Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:22:23.790670Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:22:23.791046Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-03-31T16:22:23.791121Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:22:23.791171Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-03-31T16:22:23.791215Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:22:23.791266Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:22:23.791333Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-03-31T16:22:23.792188Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:259:2251], now have 1 active actors on pipe 2025-03-31T16:22:23.792278Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-03-31T16:22:23.792482Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 3(current 0) received from actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 3 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-31T16:22:23.795218Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 3 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-31T16:22:23.795339Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:22:23.795975Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 3 actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 L ... txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-31T16:22:23.931456Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:22:23.931883Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:22:23.932139Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:531:2454] 2025-03-31T16:22:23.934325Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-03-31T16:22:23.934428Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:531:2454] 2025-03-31T16:22:23.934504Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:22:23.934556Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-03-31T16:22:23.935595Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:534:2456], now have 1 active actors on pipe 2025-03-31T16:22:23.936889Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:540:2459], now have 1 active actors on pipe 2025-03-31T16:22:23.937037Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:541:2460], now have 1 active actors on pipe 2025-03-31T16:22:23.937319Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-31T16:22:23.937526Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-31T16:22:23.937707Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:542:2460], now have 1 active actors on pipe 2025-03-31T16:22:23.937828Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-31T16:22:23.948998Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:550:2467], now have 1 active actors on pipe 2025-03-31T16:22:23.984663Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:22:23.988870Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:22:23.989298Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-03-31T16:22:23.989356Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:22:23.989501Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:22:23.990336Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:22:23.990411Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-03-31T16:22:23.990527Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:22:23.990891Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:22:23.991191Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:607:2512] 2025-03-31T16:22:23.993350Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-03-31T16:22:23.994886Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-03-31T16:22:23.995304Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-03-31T16:22:23.995692Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-03-31T16:22:23.995980Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-03-31T16:22:23.996029Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-03-31T16:22:23.996073Z node 3 :PERSQUEUE INFO: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:22:23.996111Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-03-31T16:22:23.996170Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:607:2512] 2025-03-31T16:22:23.996230Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:22:23.996298Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-03-31T16:22:23.997287Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [3:541:2460] destroyed 2025-03-31T16:22:23.997553Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [3:540:2459] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionStatusResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 38 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 38 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 1 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 77 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 77 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } PartitionResult { Partition: 2 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 91 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 91 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } } } >> KqpExplain::AggGroupLimit [GOOD] >> KqpExplain::ComplexJoin >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnEmptyTopicName >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedTopicName >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::SuccessfullyReplies >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedTopicName >> Normalizers::CleanEmptyPortionsNormalizer [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNoBalancerInGetNodeRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'/Root/PQ/\', Marker# PQ17" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "TopicRequest must have Topic field." ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple TopicRequest for topic \'rt3.dc1--topic1\'" ErrorCode: BAD_REQUEST } >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::SuccessfullyReplies [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedPartition >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnEmptyTopicName >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'/Root/PQ/\', Marker# PQ17" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "TopicRequest must have Topic field." ErrorCode: BAD_REQUEST } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::SuccessfullyReplies [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::CleanEmptyPortionsNormalizer [GOOD] Test command err: 2025-03-31T16:22:18.058494Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:22:18.157544Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:22:18.182729Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:22:18.183004Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:22:18.191572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=EmptyPortionsCleaner; 2025-03-31T16:22:18.191807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-03-31T16:22:18.191984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:22:18.192212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:22:18.192341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:22:18.192465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:22:18.192612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:22:18.192725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:22:18.192843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:22:18.193001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:22:18.193115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:22:18.193238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:22:18.193348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:22:18.224821Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:22:18.225010Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=EmptyPortionsCleaner; 2025-03-31T16:22:18.225060Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-03-31T16:22:18.225356Z node 1 :TX_COLUMNSHARD CRIT: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_empty.cpp:286;tasks_for_remove=0; 2025-03-31T16:22:18.225476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=EmptyPortionsCleaner;id=NO_VALUE_OPTIONAL; 2025-03-31T16:22:18.225547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2025-03-31T16:22:18.225591Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-03-31T16:22:18.225726Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:22:18.225827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:22:18.225883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:22:18.225958Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-03-31T16:22:18.226067Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:22:18.226129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:22:18.226168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:22:18.226196Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-03-31T16:22:18.226407Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:22:18.226469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:22:18.226514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:22:18.226543Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-03-31T16:22:18.226629Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:22:18.226679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:22:18.226717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:22:18.226764Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:22:18.226844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:22:18.226882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:22:18.226914Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:22:18.226975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:22:18.227031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:22:18.227062Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:22:18.227482Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=42; 2025-03-31T16:22:18.227583Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=39; 2025-03-31T16:22:18.227679Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=51; 2025-03-31T16:22:18.227758Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=34; 2025-03-31T16:22:18.227912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:22:18.227960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:22:18.228002Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:22:18.228206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:22:18.228261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:22:18.228290Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:22:18.228461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:22:18.228510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:22:18.228563Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;eve ... node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:498:2500];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-31T16:22:26.280797Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:498:2500];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-03-31T16:22:26.280836Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:498:2500];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-03-31T16:22:26.280870Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:498:2500];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=2; 2025-03-31T16:22:26.280911Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:498:2500];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=20048;merger=0;interval_id=2; 2025-03-31T16:22:26.280947Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:498:2500];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-03-31T16:22:26.281016Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:498:2500];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-31T16:22:26.281039Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:498:2500];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=20048;finished=1; 2025-03-31T16:22:26.281062Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:498:2500];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-31T16:22:26.281197Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:498:2500];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:22:26.281323Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:498:2500];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:1;records_count:20048;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-31T16:22:26.281352Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:498:2500];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-31T16:22:26.281441Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:498:2500];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:232;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;);columns=3;rows=20048; 2025-03-31T16:22:26.281489Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:498:2500];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:252;stage=data_format;batch_size=2405760;num_rows=20048;batch_columns=key1,key2,field; 2025-03-31T16:22:26.281612Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:498:2500];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:496:2499];bytes=2405760;rows=20048;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string; 2025-03-31T16:22:26.281730Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:498:2500];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-31T16:22:26.281809Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:498:2500];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-31T16:22:26.281926Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:498:2500];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-31T16:22:26.282868Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:498:2500];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:22:26.283005Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:498:2500];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-31T16:22:26.283132Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:498:2500];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-31T16:22:26.283174Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:498:2500] finished for tablet 9437184 2025-03-31T16:22:26.283607Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:498:2500];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:496:2499];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.163},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.165}],"full":{"a":1743438146117544,"name":"_full_task","f":1743438146117544,"d_finished":0,"c":0,"l":1743438146283231,"d":165687},"events":[{"name":"bootstrap","f":1743438146117804,"d_finished":2805,"c":1,"l":1743438146120609,"d":2805},{"a":1743438146282840,"name":"ack","f":1743438146281179,"d_finished":771,"c":1,"l":1743438146281950,"d":1162},{"a":1743438146282812,"name":"processing","f":1743438146120704,"d_finished":78069,"c":9,"l":1743438146281952,"d":78488},{"name":"ProduceResults","f":1743438146119412,"d_finished":2126,"c":12,"l":1743438146283157,"d":2126},{"a":1743438146283163,"name":"Finish","f":1743438146283163,"d_finished":0,"c":0,"l":1743438146283231,"d":68},{"name":"task_result","f":1743438146120722,"d_finished":77153,"c":8,"l":1743438146281096,"d":77153}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-31T16:22:26.283699Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:498:2500];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:496:2499];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:22:26.284109Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:498:2500];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:496:2499];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.163},{"events":["l_ProduceResults","f_Finish"],"t":0.165},{"events":["l_ack","l_processing","l_Finish"],"t":0.166}],"full":{"a":1743438146117544,"name":"_full_task","f":1743438146117544,"d_finished":0,"c":0,"l":1743438146283746,"d":166202},"events":[{"name":"bootstrap","f":1743438146117804,"d_finished":2805,"c":1,"l":1743438146120609,"d":2805},{"a":1743438146282840,"name":"ack","f":1743438146281179,"d_finished":771,"c":1,"l":1743438146281950,"d":1677},{"a":1743438146282812,"name":"processing","f":1743438146120704,"d_finished":78069,"c":9,"l":1743438146281952,"d":79003},{"name":"ProduceResults","f":1743438146119412,"d_finished":2126,"c":12,"l":1743438146283157,"d":2126},{"a":1743438146283163,"name":"Finish","f":1743438146283163,"d_finished":0,"c":0,"l":1743438146283746,"d":583},{"name":"task_result","f":1743438146120722,"d_finished":77153,"c":8,"l":1743438146281096,"d":77153}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-31T16:22:26.284183Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:498:2500];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:22:26.116873Z;index_granules=0;index_portions=1;index_batches=939;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=2589280;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2589280;selected_rows=0; 2025-03-31T16:22:26.284225Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:498:2500];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:22:26.284472Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:498:2500];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'/Root/PQ/\', Marker# PQ17" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "multiple TopicRequest for topic \'rt3.dc1--topic1\'" ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple partition 2 in TopicRequest for topic \'rt3.dc1--topic2\'" ErrorCode: BAD_REQUEST } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnEmptyTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "empty topic in GetTopicMetadata request" ErrorCode: BAD_REQUEST } >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2025-03-31T16:22:26.688702Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:22:26.691496Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:22:26.691737Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-03-31T16:22:26.691771Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:22:26.691813Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-03-31T16:22:26.691849Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:22:26.691882Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:22:26.691931Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-03-31T16:22:26.692415Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:261:2253], now have 1 active actors on pipe 2025-03-31T16:22:26.692486Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-03-31T16:22:26.705961Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-31T16:22:26.708671Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-31T16:22:26.708816Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:22:26.709556Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-31T16:22:26.709703Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:22:26.710014Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:22:26.710242Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:269:2259] 2025-03-31T16:22:26.712295Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-03-31T16:22:26.712364Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:269:2259] 2025-03-31T16:22:26.712406Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:22:26.712439Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-31T16:22:26.713191Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:272:2261], now have 1 active actors on pipe 2025-03-31T16:22:26.757135Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:22:26.761666Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:22:26.762018Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-03-31T16:22:26.762085Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:22:26.762124Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-03-31T16:22:26.762175Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:22:26.762217Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:22:26.762274Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-03-31T16:22:26.762931Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:405:2360], now have 1 active actors on pipe 2025-03-31T16:22:26.763044Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-03-31T16:22:26.763227Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 2(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-31T16:22:26.765278Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-31T16:22:26.765392Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:22:26.765985Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 2 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-31T16:22:26.766071Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:22:26.766334Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:22:26.766488Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [2:413:2366] 2025-03-31T16:22:26.767851Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-03-31T16:22:26.767911Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:413:2366] 2025-03-31T16:22:26.767953Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:22:26.768002Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-03-31T16:22:26.768548Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:416:2368], now have 1 active actors on pipe 2025-03-31T16:22:26.769781Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:424:2371], now have 1 active actors on pipe 2025-03-31T16:22:26.769938Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:426:2372], now have 1 active actors on pipe 2025-03-31T16:22:26.770106Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [2:424:2371] destroyed 2025-03-31T16:22:26.770338Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server disconnected, pipe [2:426:2372] destroyed 2025-03-31T16:22:27.319040Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:22:27.322253Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:22:27.322561Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-03-31T16:22:27.322613Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:22:27.322651Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-03-31T16:22:27.322689Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:22:27.322735Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:22:27.322789Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-03-31T16:22:27.323444Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:259:2251], now have 1 active actors on pipe 2025-03-31T16:22:27.323514Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-03-31T16:22:27.323729Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 3(current 0) received from actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 3 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-31T16:22:27.325854Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 3 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-31T16:22:27.325949Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:22:27.326476Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 3 actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 3 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-31T16:22:27.326603Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:22:27.326850Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:22:27.326981Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [3:267:2257] 2025-03-31T16:22:27.328668Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing c ... xId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:22:27.413988Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-03-31T16:22:27.414031Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:22:27.414079Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:22:27.414142Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-03-31T16:22:27.415011Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:463:2403], now have 1 active actors on pipe 2025-03-31T16:22:27.415147Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-03-31T16:22:27.415331Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 5(current 0) received from actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 5 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-31T16:22:27.418019Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 5 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-31T16:22:27.418161Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:22:27.419110Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 5 actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 5 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-31T16:22:27.419241Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:22:27.419655Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:22:27.419967Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [3:471:2409] 2025-03-31T16:22:27.421974Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-03-31T16:22:27.422067Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [3:471:2409] 2025-03-31T16:22:27.422126Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:22:27.422175Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-03-31T16:22:27.423125Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:474:2411], now have 1 active actors on pipe 2025-03-31T16:22:27.440831Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:22:27.445150Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:22:27.445514Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-03-31T16:22:27.445566Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:22:27.445625Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-03-31T16:22:27.445664Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:22:27.445709Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:22:27.445772Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-03-31T16:22:27.446511Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:523:2448], now have 1 active actors on pipe 2025-03-31T16:22:27.446588Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-03-31T16:22:27.446768Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 6(current 0) received from actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-31T16:22:27.449781Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-31T16:22:27.449938Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:22:27.450899Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 6 actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-31T16:22:27.451051Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:22:27.451461Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:22:27.451651Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:531:2454] 2025-03-31T16:22:27.453858Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-03-31T16:22:27.453942Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:531:2454] 2025-03-31T16:22:27.454006Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:22:27.454058Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-03-31T16:22:27.455003Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:534:2456], now have 1 active actors on pipe 2025-03-31T16:22:27.456671Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:542:2459], now have 1 active actors on pipe 2025-03-31T16:22:27.456815Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:543:2460], now have 1 active actors on pipe 2025-03-31T16:22:27.456924Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:544:2460], now have 1 active actors on pipe 2025-03-31T16:22:27.467713Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:549:2464], now have 1 active actors on pipe 2025-03-31T16:22:27.488549Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:22:27.491431Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:22:27.491788Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-03-31T16:22:27.491835Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:22:27.491986Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:22:27.492827Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:22:27.492878Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-03-31T16:22:27.493001Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:22:27.493336Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:22:27.493581Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:606:2509] 2025-03-31T16:22:27.495573Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-03-31T16:22:27.496946Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-03-31T16:22:27.497282Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-03-31T16:22:27.497584Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-03-31T16:22:27.497847Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-03-31T16:22:27.497920Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-03-31T16:22:27.497961Z node 3 :PERSQUEUE INFO: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:22:27.497998Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-03-31T16:22:27.498049Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:606:2509] 2025-03-31T16:22:27.498102Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:22:27.498146Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-03-31T16:22:27.498987Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [3:543:2460] destroyed 2025-03-31T16:22:27.499250Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [3:542:2459] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionLocationsResult { TopicResult { Topic: "rt3.dc1--topic2" PartitionLocation { Partition: 1 Host: "::1" HostId: 3 ErrorCode: OK } PartitionLocation { Partition: 2 Host: "::1" HostId: 3 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic1" PartitionLocation { Partition: 0 Host: "::1" HostId: 3 ErrorCode: OK } ErrorCode: OK } } } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnEmptyTopicName >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedTopicName >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedPartition ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: 2025-03-31T16:22:28.865729Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:22:28.871074Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:22:28.871451Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-03-31T16:22:28.871498Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:22:28.871537Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-03-31T16:22:28.871592Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:22:28.871641Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:22:28.871709Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-03-31T16:22:28.872503Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [1:259:2251], now have 1 active actors on pipe 2025-03-31T16:22:28.872638Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-03-31T16:22:28.888054Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [1:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-31T16:22:28.891414Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-31T16:22:28.891592Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:22:28.892767Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [1:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-31T16:22:28.892959Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:22:28.893483Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:22:28.893907Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [1:267:2257] 2025-03-31T16:22:28.896801Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-03-31T16:22:28.896895Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [1:267:2257] 2025-03-31T16:22:28.896959Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:22:28.897008Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-31T16:22:28.897996Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [1:270:2259], now have 1 active actors on pipe 2025-03-31T16:22:28.961702Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:22:28.967054Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:22:28.967472Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] doesn't have tx info 2025-03-31T16:22:28.967524Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:22:28.967568Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] no config, start with empty partitions and default config 2025-03-31T16:22:28.967629Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:22:28.967683Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:22:28.967750Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928137] doesn't have tx writes info 2025-03-31T16:22:28.968618Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [1:405:2360], now have 1 active actors on pipe 2025-03-31T16:22:28.968686Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2025-03-31T16:22:28.968879Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Config update version 2(current 0) received from actor [1:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-31T16:22:28.971532Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-31T16:22:28.971692Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:22:28.972693Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928137] Config applied version 2 actor [1:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-31T16:22:28.972861Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:22:28.973314Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:22:28.973592Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] bootstrapping 0 [1:413:2366] 2025-03-31T16:22:28.975955Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2025-03-31T16:22:28.976034Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [1:413:2366] 2025-03-31T16:22:28.976137Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:22:28.976189Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-31T16:22:28.977229Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [1:416:2368], now have 1 active actors on pipe 2025-03-31T16:22:29.005671Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:22:29.011534Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:22:29.011904Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] doesn't have tx info 2025-03-31T16:22:29.011989Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:22:29.012044Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-03-31T16:22:29.012092Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:22:29.012159Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:22:29.012260Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-03-31T16:22:29.013051Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [1:465:2405], now have 1 active actors on pipe 2025-03-31T16:22:29.013222Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-03-31T16:22:29.013450Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 3(current 0) received from actor [1:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-31T16:22:29.017288Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-31T16:22:29.017434Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:22:29.018367Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 3 actor [1:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-31T16:22:29.018530Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:22:29.018997Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:22:29.019320Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [1:473:2411] 2025-03-31T16:22:29.021636Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-03-31T16:22:29.021719Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [1:473:2411] 2025-03-31T16:22:29.021829Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:22:29.021907Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-03-31T16:22:29.022868Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [1:476:2413], ... SQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-03-31T16:22:30.453472Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 11(current 0) received from actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 11 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-31T16:22:30.455961Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 11 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-31T16:22:30.456102Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:22:30.456742Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 11 actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 11 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-31T16:22:30.456865Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:22:30.457226Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:22:30.457435Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [3:470:2408] 2025-03-31T16:22:30.459574Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-03-31T16:22:30.459656Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [3:470:2408] 2025-03-31T16:22:30.459720Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:22:30.459778Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-03-31T16:22:30.460665Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:473:2410], now have 1 active actors on pipe 2025-03-31T16:22:30.477394Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:22:30.480778Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:22:30.481098Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-03-31T16:22:30.481156Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:22:30.481204Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-03-31T16:22:30.481247Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:22:30.481297Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:22:30.481366Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-03-31T16:22:30.482065Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:522:2447], now have 1 active actors on pipe 2025-03-31T16:22:30.482138Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-03-31T16:22:30.482333Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 12(current 0) received from actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-31T16:22:30.484936Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-31T16:22:30.485074Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:22:30.485680Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 12 actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-31T16:22:30.485800Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:22:30.486171Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:22:30.486393Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:530:2453] 2025-03-31T16:22:30.488500Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-03-31T16:22:30.488573Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:530:2453] 2025-03-31T16:22:30.488651Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:22:30.488706Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-03-31T16:22:30.489451Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:533:2455], now have 1 active actors on pipe 2025-03-31T16:22:30.491269Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:542:2458], now have 1 active actors on pipe 2025-03-31T16:22:30.491835Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:545:2459], now have 1 active actors on pipe 2025-03-31T16:22:30.491903Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [3:544:2459], now have 1 active actors on pipe 2025-03-31T16:22:30.491997Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:546:2459], now have 1 active actors on pipe 2025-03-31T16:22:30.492765Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:559:2470], now have 1 active actors on pipe 2025-03-31T16:22:30.517543Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:22:30.520475Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:22:30.520855Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-03-31T16:22:30.520911Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:22:30.521086Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:22:30.521593Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:22:30.521647Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-03-31T16:22:30.521779Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:22:30.522167Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:22:30.522382Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:616:2515] 2025-03-31T16:22:30.524583Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-03-31T16:22:30.526117Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-03-31T16:22:30.526441Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-03-31T16:22:30.526755Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-03-31T16:22:30.526982Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-03-31T16:22:30.527031Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-03-31T16:22:30.527119Z node 3 :PERSQUEUE INFO: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:22:30.527166Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-03-31T16:22:30.527221Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:616:2515] 2025-03-31T16:22:30.527281Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:22:30.527338Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-03-31T16:22:30.528341Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server disconnected, pipe [3:544:2459] destroyed 2025-03-31T16:22:30.528442Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [3:545:2459] destroyed 2025-03-31T16:22:30.528482Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [3:542:2458] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetReadSessionsInfoResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 3 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 0 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 3 ErrorCode: OK } PartitionResult { Partition: 1 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 3 ErrorCode: OK } PartitionResult { Partition: 2 ErrorCode: INITIALIZING ErrorReason: "tablet for partition is not running" } ErrorCode: OK } } } >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedPartition [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedPartition [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "TopicRequest must have Topic field." ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple TopicRequest for topic \'rt3.dc1--topic1\'" ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple partition 2 in TopicRequest for topic \'rt3.dc1--topic2\'" ErrorCode: BAD_REQUEST } >> DataStreams::TestNonChargeableUser >> DataStreams::TestDeleteStream >> DataStreams::TestControlPlaneAndMeteringData >> DataStreams::TestGetShardIterator >> DataStreams::TestGetRecordsStreamWithSingleShard >> DataStreams::TestUpdateStorage |90.8%| [TA] $(B)/ydb/core/client/server/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.8%| [TA] {RESULT} $(B)/ydb/core/client/server/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DataStreams::TestUpdateStream >> KqpExplain::ComplexJoin [GOOD] >> DataStreams::TestPutRecordsOfAnauthorizedUser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::ComplexJoin [GOOD] Test command err: 2025-03-31T16:20:06.633328Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009220813382778:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:06.633432Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a65/r3tmp/tmpoLLLQC/pdisk_1.dat 2025-03-31T16:20:06.888587Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14875, node 1 2025-03-31T16:20:06.970821Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:06.970923Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:06.973808Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:06.975895Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:06.975918Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:06.975925Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:06.976056Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:20:07.023628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:20201 2025-03-31T16:20:07.240037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:20:07.254509Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488009223858901837:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:07.254594Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Database/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:20:07.272491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:07.272558Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:07.274866Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:20:07.275746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:07.303226Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T16:20:07.321794Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:20:07.322054Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:20:07.322170Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:20:07.322286Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:20:07.322352Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:20:07.322435Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:20:07.322528Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:20:07.322626Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:20:07.322706Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:20:07.373886Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:07.374003Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:07.377412Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:07.403774Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T16:20:07.403858Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T16:20:07.452939Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T16:20:07.452994Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T16:20:07.453243Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T16:20:07.453296Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T16:20:07.453333Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T16:20:07.453352Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T16:20:07.453399Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T16:20:07.453437Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T16:20:07.457369Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7488009223858902303:2242] 2025-03-31T16:20:07.457404Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T16:20:07.457469Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:20:07.458183Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T16:20:07.464131Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T16:20:07.464156Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T16:20:07.464196Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T16:20:07.469268Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T16:20:07.479919Z node 2 :TX_PROXY ERROR: Actor# [2:7488009223858902453:2341] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2025-03-31T16:20:07.484379Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2025-03-31T16:20:07.486122Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:20:07.486163Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:7488009223858902545:2330], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:20:07.488170Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7488009223858902551:2384] 2025-03-31T16:20:07.488233Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:7488009223858902551:2384], schemeshard id = 72075186224037897 2025-03-31T16:20:07.566817Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T16:20:07.571434Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897 2025-03-31T16:20:07.575690Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T16:20:07.575732Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720658 2025-03-31T16:20:07.739293Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720658. Doublechecking... 2025-03-31T16:20:07.855857Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:20:09.463139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009233698285911:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:09.463291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:09.771155Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72075186224037897 2025-03-31T16:20:09.965067Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;self_id=[2:7488009232448837552:2355];tablet_id=72075186224037910;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:20:09.965363Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;self_id=[2:7488009232448837552:2355];tablet_id=72075186224037910;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:20:09.965659Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;self_id=[2:7488009232448837552:2355];tablet_id=72075186224037910;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:20:09.965808Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;self_id=[2:7488009232448837552:2355];tablet_id=72075186224037910;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:20:09.965944Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;self_id=[2:7488009232448837552:2355];tablet_id=72075186224037910;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:20:09.966056Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;self_id=[2:7488009232448837552:2355];tablet_id=72075186224037910;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:20:09.966136Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;self_id=[2:7488009232448837552:2355];tablet_id=72075186224037910;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:20:09.966303Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;self_id=[2:7488009232448837552:2355];tablet_id=72075186224037910;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:20:09.966426Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;self_id=[2:7488009232448837552:2355];tablet_id=72075186224037910;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:20:09.966534Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;self_id=[2:7488009232448837552:2355];tablet_id=72075186224037910;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;descri ... 81474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:22:23.519235Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7488009788155884275:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:22:23.519358Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Logs"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1},{"InternalOperatorId":1}],"GroupBy":"item.App","Aggregation":"{MAX(item.Message),MIN(item.Message)}","Name":"Aggregate","Phase":"Intermediate"},{"Inputs":[{"InternalOperatorId":2}],"E-Rows":"No estimate","Predicate":"item.Ts \u003E 1 AND item.Ts \u003C= 4 OR item.App == \"ydb\"","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["App (-∞, +∞)","Ts (-∞, +∞)","Host (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/Logs","E-Rows":"No estimate","Table":"Logs","ReadColumns":["App","Message","Ts"],"E-Cost":"No estimate"}],"Node Type":"Aggregate-Filter-TableFullScan"}],"Node Type":"HashShuffle","KeyColumns":["App"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Logs","reads":[{"columns":["App","Message","Ts"],"scan_by":["App (-∞, +∞)","Ts (-∞, +∞)","Host (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":8,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["App (-∞, +∞)","Ts (-∞, +∞)","Host (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/Logs","E-Rows":"No estimate","Table":"Logs","ReadColumns":["App","Message","Ts"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"item.Ts \u003E 1 AND item.Ts \u003C= 4 OR item.App == \"ydb\"","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Operators":[{"GroupBy":"item.App","Aggregation":"{MAX(item.Message),MIN(item.Message)}","Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Node Type":"HashShuffle (KeyColumns: [\"App\"])","PlanNodeType":"Connection"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 12043, MsgBus: 20371 2025-03-31T16:22:24.937047Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488009814457999803:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:22:24.937178Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a65/r3tmp/tmpbfNF9l/pdisk_1.dat 2025-03-31T16:22:25.051029Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:22:25.083068Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:22:25.083185Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:22:25.085482Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12043, node 6 2025-03-31T16:22:25.132307Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:22:25.132332Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:22:25.132342Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:22:25.132488Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20371 TClient is connected to server localhost:20371 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:22:25.590368Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:25.603703Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:25.682169Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:25.866656Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:25.948592Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:28.662432Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488009831637870779:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:22:28.662581Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:22:28.708201Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:22:28.751316Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:22:28.787327Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:22:28.822515Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:22:28.862333Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:22:28.901456Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:22:28.949200Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488009831637871287:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:22:28.949353Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:22:28.949683Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488009831637871293:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:22:28.955355Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:22:28.967747Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488009831637871295:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:22:29.068068Z node 6 :TX_PROXY ERROR: Actor# [6:7488009835932838648:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:22:29.937617Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488009814457999803:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:22:29.937710Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:22:30.125836Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:30.418358Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:22:30.457332Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 >> DataStreams::TestDeleteStream [GOOD] >> DataStreams::TestDeleteStreamWithEnforceFlag >> DataStreams::TestGetShardIterator [GOOD] >> DataStreams::TestGetRecordsWithoutPermission >> DataStreams::TestGetRecordsStreamWithSingleShard [GOOD] >> DataStreams::TestGetRecords1MBMessagesOneByOneByTS >> DataStreams::TestUpdateStorage [GOOD] >> DataStreams::TestStreamTimeRetention >> DataStreams::TestNonChargeableUser [GOOD] >> DataStreams::TestPutEmptyMessage >> DataStreams::TestUpdateStream [GOOD] >> DataStreams::Test_AutoPartitioning_Describe >> DataStreams::TestStreamStorageRetention >> DataStreams::TestControlPlaneAndMeteringData [GOOD] >> DataStreams::ChangeBetweenRetentionModes >> TColumnShardTestReadWrite::WriteStandaloneOverload [GOOD] >> DataStreams::TestPutRecordsOfAnauthorizedUser [GOOD] >> DataStreams::TestPutRecordsWithRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteStandaloneOverload [GOOD] Test command err: 2025-03-31T16:22:10.661281Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:22:10.736431Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:22:10.753823Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:22:10.754068Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:22:10.762197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:22:10.762390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:22:10.762647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:22:10.762781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:22:10.762877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:22:10.763005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:22:10.763129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:22:10.763269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:22:10.763401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:22:10.763505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:22:10.763606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:22:10.763698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:22:10.785346Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:22:10.785494Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:22:10.785610Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:22:10.785812Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:22:10.785929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:22:10.785988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:22:10.786027Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:22:10.786103Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:22:10.786155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:22:10.786212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:22:10.786251Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:22:10.786471Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:22:10.786545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:22:10.786584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:22:10.786615Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:22:10.786731Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:22:10.786803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:22:10.786852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:22:10.786879Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:22:10.786948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:22:10.786981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:22:10.787012Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:22:10.787070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:22:10.787132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:22:10.787162Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:22:10.787565Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=41; 2025-03-31T16:22:10.787648Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=35; 2025-03-31T16:22:10.787717Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=30; 2025-03-31T16:22:10.787806Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=43; 2025-03-31T16:22:10.787958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:22:10.788033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:22:10.788069Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:22:10.788301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:22:10.788348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:22:10.788377Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:22:10.788532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:22:10.788576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:22:10.788604Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:22:10.788805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:22:10.788850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:22:10.788885Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:22:10.789002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:22:10.789039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:22:10.789108Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... BlobManager at tablet 9437184 Save Batch GenStep: 2:4 Blob count: 2 2025-03-31T16:22:34.766077Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=7;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-31T16:22:34.768445Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 5 at tablet 9437184 2025-03-31T16:22:34.831955Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 2 2025-03-31T16:22:34.852308Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 2 2025-03-31T16:22:34.852478Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=8;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-31T16:22:34.880449Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 6 at tablet 9437184 2025-03-31T16:22:34.934510Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:6 Blob count: 2 2025-03-31T16:22:34.958055Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:6 Blob count: 2 2025-03-31T16:22:34.958213Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=9;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-31T16:22:34.981306Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 7 at tablet 9437184 2025-03-31T16:22:35.036295Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:7 Blob count: 2 2025-03-31T16:22:35.058971Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:7 Blob count: 2 2025-03-31T16:22:35.059150Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=10;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-31T16:22:35.061476Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 8 at tablet 9437184 2025-03-31T16:22:35.115299Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:8 Blob count: 2 2025-03-31T16:22:35.170750Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:8 Blob count: 2 2025-03-31T16:22:35.170912Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=11;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-31T16:22:35.173638Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 9 at tablet 9437184 2025-03-31T16:22:35.229205Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:9 Blob count: 2 2025-03-31T16:22:35.263971Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:9 Blob count: 2 2025-03-31T16:22:35.264174Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=12;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-31T16:22:35.307790Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 10 at tablet 9437184 2025-03-31T16:22:35.370370Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:10 Blob count: 2 2025-03-31T16:22:35.527464Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:10 Blob count: 2 2025-03-31T16:22:35.527626Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=13;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-31T16:22:35.632398Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 11 at tablet 9437184 2025-03-31T16:22:35.685177Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:11 Blob count: 2 2025-03-31T16:22:35.707513Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:11 Blob count: 2 2025-03-31T16:22:35.707685Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=14;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-31T16:22:35.720677Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 12 at tablet 9437184 2025-03-31T16:22:35.770132Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:12 Blob count: 2 2025-03-31T16:22:35.790593Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:12 Blob count: 2 2025-03-31T16:22:35.790752Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=15;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-31T16:22:35.792872Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 13 at tablet 9437184 2025-03-31T16:22:35.837056Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:13 Blob count: 2 2025-03-31T16:22:35.885833Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:13 Blob count: 2 2025-03-31T16:22:35.886002Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=16;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-31T16:22:35.888082Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 14 at tablet 9437184 2025-03-31T16:22:35.940548Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:14 Blob count: 2 2025-03-31T16:22:35.963112Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:14 Blob count: 2 2025-03-31T16:22:35.963278Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=17;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-31T16:22:35.975477Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 15 at tablet 9437184 2025-03-31T16:22:36.025250Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:15 Blob count: 2 2025-03-31T16:22:36.045408Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:15 Blob count: 2 2025-03-31T16:22:36.045564Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=18;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-31T16:22:36.047757Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 16 at tablet 9437184 2025-03-31T16:22:36.098076Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:16 Blob count: 2 2025-03-31T16:22:36.133723Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:16 Blob count: 2 2025-03-31T16:22:36.133902Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=19;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-31T16:22:36.136231Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 17 at tablet 9437184 2025-03-31T16:22:36.187399Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:17 Blob count: 2 2025-03-31T16:22:36.228219Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:17 Blob count: 2 2025-03-31T16:22:36.228392Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=20;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-31T16:22:36.230647Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 18 at tablet 9437184 2025-03-31T16:22:36.280273Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:18 Blob count: 2 2025-03-31T16:22:36.299309Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:18 Blob count: 2 2025-03-31T16:22:36.299489Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=21;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-31T16:22:36.311393Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 19 at tablet 9437184 2025-03-31T16:22:36.353161Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:19 Blob count: 2 2025-03-31T16:22:36.372281Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:19 Blob count: 2 2025-03-31T16:22:36.372448Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=22;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-31T16:22:36.374486Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 20 at tablet 9437184 2025-03-31T16:22:36.450211Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:20 Blob count: 2 2025-03-31T16:22:36.629024Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:20 Blob count: 2 2025-03-31T16:22:36.629238Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=23;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-31T16:22:36.679767Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 21 at tablet 9437184 2025-03-31T16:22:36.722236Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:21 Blob count: 2 2025-03-31T16:22:36.749391Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:21 Blob count: 2 2025-03-31T16:22:36.749538Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=24;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:22:37.758836Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];fline=actor.cpp:22;event=flush_writing;size=6330728;count=1; CATCH TEvWrite, status OK 2025-03-31T16:22:37.826913Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 22 at tablet 9437184 2025-03-31T16:22:37.884275Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:22 Blob count: 2 2025-03-31T16:22:37.916372Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:22 Blob count: 2 2025-03-31T16:22:37.916591Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=25;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; >> TPQTest::TestWritePQ [GOOD] >> TPQTest::TestWriteSplit >> TPQTest::TestPQSmallRead [GOOD] >> TPQTest::TestPQReadAhead >> DataStreams::TestDeleteStreamWithEnforceFlag [GOOD] >> DataStreams::TestDeleteStreamWithEnforceFlagFalse >> TColumnShardTestReadWrite::CompactionGC [GOOD] >> TPQTest::TestWriteSplit [GOOD] >> TPQTest::TestWriteTimeLag >> DataStreams::TestGetRecordsWithoutPermission [GOOD] >> DataStreams::TestGetRecordsWithCount >> DataStreams::TestStreamStorageRetention [GOOD] >> DataStreams::TestStreamPagination >> DataStreams::TestPutEmptyMessage [GOOD] >> DataStreams::TestListStreamConsumers >> DataStreams::ChangeBetweenRetentionModes [GOOD] >> DataStreams::TestCreateExistingStream >> DataShardTxOrder::RandomPoints_DelayData [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionGC [GOOD] Test command err: 2025-03-31T16:21:41.912006Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:21:42.003223Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:21:42.020958Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:21:42.021220Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:21:42.029882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:21:42.030114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:21:42.030367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:21:42.030515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:21:42.030622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:21:42.030768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:21:42.030878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:21:42.030987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:21:42.031129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:21:42.031269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:21:42.031404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:21:42.031532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:21:42.060249Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:21:42.060412Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:21:42.060484Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:21:42.060717Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:42.060886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:21:42.060978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:21:42.061025Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:21:42.061154Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:21:42.061247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:21:42.061295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:21:42.061326Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:21:42.061549Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:42.061641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:21:42.061694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:21:42.061726Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:21:42.061851Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:21:42.061921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:21:42.061967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:21:42.061997Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:21:42.062100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:21:42.062141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:21:42.062189Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:21:42.062263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:21:42.062304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:21:42.062335Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:21:42.062779Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=59; 2025-03-31T16:21:42.062897Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=46; 2025-03-31T16:21:42.062987Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=36; 2025-03-31T16:21:42.063187Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=43; 2025-03-31T16:21:42.063421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:21:42.063482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:21:42.063519Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:21:42.063767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:21:42.063827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:21:42.063865Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:21:42.064053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:21:42.064104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:21:42.064134Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:21:42.064401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:21:42.064451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:21:42.064485Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:21:42.064631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:21:42.064679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:21:42.064732Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... p_indexation;reason=in_progress;count=1;insert_overload_size=5870200;indexing_debug={task_ids=5db9beba-e4c11f0-85111e70-5f0080f5,;}; 2025-03-31T16:22:40.813252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:831;event=skip_compaction;reason=disabled; 2025-03-31T16:22:40.813309Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=0; 2025-03-31T16:22:40.813382Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:22:40.813442Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:22:40.813491Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:22:40.813576Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.414500s; 2025-03-31T16:22:40.813642Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:22:41.061468Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];fline=actor.cpp:22;event=flush_writing;size=4735248;count=1; 2025-03-31T16:22:41.106691Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 45 at tablet 9437184 2025-03-31T16:22:41.107007Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:89 Blob count: 1 2025-03-31T16:22:41.120061Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:89 Blob count: 1 2025-03-31T16:22:41.120288Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=270;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:769;event=skip_indexation;reason=in_progress;count=1;insert_overload_size=5870200;indexing_debug={task_ids=5db9beba-e4c11f0-85111e70-5f0080f5,;}; 2025-03-31T16:22:41.126561Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:44:255:1:574112:0]; 2025-03-31T16:22:41.126726Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:44:255:2:592928:0]; GC for channel 2 deletes blobs: GC for channel 4 deletes blobs: GC for channel 3 deletes blobs: [9437184:2:85:3:0:5870200:0] Added portions: 151 152 2025-03-31T16:22:41.152160Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-03-31T16:22:41.152505Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[271] (CS::INDEXATION) apply at tablet 9437184 2025-03-31T16:22:41.157144Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:88 Blob count: 2 2025-03-31T16:22:41.157326Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=92311612;raw_bytes=143732845;count=39;records=1462497} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=7381080;raw_bytes=7369506;count=2;records=75000} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-31T16:22:41.171847Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=5db9beba-e4c11f0-85111e70-5f0080f5;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; 2025-03-31T16:22:41.171931Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=5db9beba-e4c11f0-85111e70-5f0080f5;fline=with_appended.cpp:65;portions=75,76,;task_id=5db9beba-e4c11f0-85111e70-5f0080f5; 2025-03-31T16:22:41.172233Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=5db9beba-e4c11f0-85111e70-5f0080f5;fline=manager.cpp:15;event=unlock;process_id=CS::INDEXATION::5db9beba-e4c11f0-85111e70-5f0080f5; 2025-03-31T16:22:41.172306Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=5db9beba-e4c11f0-85111e70-5f0080f5;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:22:41.172378Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=5db9beba-e4c11f0-85111e70-5f0080f5;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:22:41.172436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;task_id=5db9beba-e4c11f0-85111e70-5f0080f5;tablet_id=9437184;fline=columnshard_impl.cpp:831;event=skip_compaction;reason=disabled; 2025-03-31T16:22:41.172486Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=5db9beba-e4c11f0-85111e70-5f0080f5;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=0; 2025-03-31T16:22:41.172558Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=5db9beba-e4c11f0-85111e70-5f0080f5;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:22:41.172617Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=5db9beba-e4c11f0-85111e70-5f0080f5;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:22:41.172669Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=5db9beba-e4c11f0-85111e70-5f0080f5;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:22:41.172782Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=5db9beba-e4c11f0-85111e70-5f0080f5;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.403000s; 2025-03-31T16:22:41.172841Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=5db9beba-e4c11f0-85111e70-5f0080f5;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:22:41.173006Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:87:2:0:5870200:0] 2025-03-31T16:22:41.173070Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:88 Blob count: 2 2025-03-31T16:22:41.174058Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=45;external_task_id=5db9beba-e4c11f0-85111e70-5f0080f5;mem=5963210;cpu=0; 2025-03-31T16:22:41.174775Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:22:41.175689Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 6080043 at tablet 9437184, mediator 0 2025-03-31T16:22:41.175780Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[274] execute at tablet 9437184 2025-03-31T16:22:41.176154Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=1043;fline=abstract.h:83;progress_tx_id=1043;lock_id=1;broken=0; 2025-03-31T16:22:41.176390Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=1043;fline=tx_controller.cpp:211;event=finished_tx;tx_id=1043; 2025-03-31T16:22:41.189171Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[274] complete at tablet 9437184 2025-03-31T16:22:41.189306Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=abstract.h:93;progress_tx_id=1043;lock_id=1;broken=0; 2025-03-31T16:22:41.189470Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=5870200; 2025-03-31T16:22:41.189612Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=manager.cpp:10;event=lock;process_id=CS::INDEXATION::5e5c4982-e4c11f0-9e22c2b5-4fb616c5; 2025-03-31T16:22:41.189669Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=ro_controller.cpp:45;event=CS::INDEXATION;tablet_id=9437184; 2025-03-31T16:22:41.189787Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:754;event=indexation;bytes=5870200;blobs_count=1;max_limit=251658240;has_more=0;external_task_id=5e5c4982-e4c11f0-9e22c2b5-4fb616c5; 2025-03-31T16:22:41.189882Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:620;event=start_changes;type=CS::INDEXATION;task_id=5e5c4982-e4c11f0-9e22c2b5-4fb616c5; 2025-03-31T16:22:41.190054Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NOlap::NResourceBroker::NSubscribe::TEvStartTask;fline=actor.cpp:38;event=ask_resources;task=cpu=0;mem=5963210;external_task_id=5e5c4982-e4c11f0-9e22c2b5-4fb616c5;type=CS::INDEXATION;priority=0;; 2025-03-31T16:22:41.190320Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=46;task=cpu=0;mem=5963210;external_task_id=5e5c4982-e4c11f0-9e22c2b5-4fb616c5;type=CS::INDEXATION;priority=0;; 2025-03-31T16:22:41.190374Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=5e5c4982-e4c11f0-9e22c2b5-4fb616c5;mem=5963210;cpu=0; 2025-03-31T16:22:41.190425Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=5e5c4982-e4c11f0-9e22c2b5-4fb616c5;task_id=46;mem=5963210;cpu=0; 2025-03-31T16:22:41.190619Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=5e5c4982-e4c11f0-9e22c2b5-4fb616c5;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=5e5c4982-e4c11f0-9e22c2b5-4fb616c5; Added portions: 153 154 2025-03-31T16:22:41.845971Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=5e5c4982-e4c11f0-9e22c2b5-4fb616c5;fline=actor.cpp:48;task=agents_waiting=0;additional_info=();; 2025-03-31T16:22:41.846145Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=2; Compactions happened: 14 Indexations happened: 31 Cleanups happened: 1 Old portions: 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 34 Cleaned up portions: 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 34 FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> DataStreams::Test_AutoPartitioning_Describe [GOOD] >> DataStreams::Test_Crreate_AutoPartitioning_Disabled >> TPQTest::TestWriteTimeLag [GOOD] >> DataStreams::TestPutRecordsWithRead [GOOD] >> DataStreams::TestPutRecordsCornerCases ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestWriteTimeLag [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] 2025-03-31T16:21:05.863743Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:05.863829Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927938 is [1:153:2174] sender: [1:154:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:179:2057] recipient: [1:14:2061] 2025-03-31T16:21:05.880565Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:05.896870Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 7864320 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-03-31T16:21:05.897790Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2025-03-31T16:21:05.900185Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:185:2198] 2025-03-31T16:21:05.901781Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:186:2199] 2025-03-31T16:21:05.902929Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:186:2199] 2025-03-31T16:21:05.921980Z node 1 :PERSQUEUE INFO: new Cookie default|1bd3bd60-49172645-11be21bd-3fb2f2cf_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:21:06.044660Z node 1 :PERSQUEUE INFO: new Cookie default|eb0de4a-83db7384-92789eef-c1b2293b_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:21:06.113623Z node 1 :PERSQUEUE INFO: new Cookie default|ea90d5d-bf6e542b-1e57c327-4707848d_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:21:06.150545Z node 1 :PERSQUEUE INFO: new Cookie default|f07fe99-459ca91e-95aad812-b2ff0e15_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:21:06.158525Z node 1 :PERSQUEUE INFO: new Cookie default|1c08e161-5ec02393-1d2241e6-49ebf27d_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:21:06.163832Z node 1 :PERSQUEUE INFO: new Cookie default|b950e937-eadbc51b-6ecc5ad3-3d593b03_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:108:2057] recipient: [2:101:2135] 2025-03-31T16:21:06.646931Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:06.647011Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927938 is [2:153:2174] sender: [2:154:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:179:2057] recipient: [2:14:2061] 2025-03-31T16:21:06.665091Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:06.666069Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 7864320 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-03-31T16:21:06.666682Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:185:2198] 2025-03-31T16:21:06.669235Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:185:2198] 2025-03-31T16:21:06.671146Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:186:2199] 2025-03-31T16:21:06.672969Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:186:2199] 2025-03-31T16:21:06.695782Z node 2 :PERSQUEUE INFO: new Cookie default|e030c9af-9883b4d4-1d20b933-88f017fd_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:21:06.804242Z node 2 :PERSQUEUE INFO: new Cookie default|53db36f2-d9c97e20-4ebfed3f-eeb2374a_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:21:06.870037Z node 2 :PERSQUEUE INFO: new Cookie default|1a408e3-c1100f1c-bae4e5d2-c026760_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:21:06.906417Z node 2 :PERSQUEUE INFO: new Cookie default|5995b8b0-8f05478d-dc53cf51-d69735f2_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:21:06.914011Z node 2 :PERSQUEUE INFO: new Cookie default|b595bf08-aa8fbfef-4f0b97fd-77276eaf_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default !Reboot 72057594037927937 (actor [2:107:2139]) on event NKikimr::TEvPersQueue::TEvRequest ! Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:295:2057] recipient: [2:99:2134] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:297:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:299:2057] recipient: [2:298:2295] Leader for TabletID 72057594037927937 is [2:300:2296] sender: [2:301:2057] recipient: [2:298:2295] 2025-03-31T16:21:06.954912Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:06.954978Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:21:06.955993Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:349:2337] 2025-03-31T16:21:06.958513Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:350:2338] 2025-03-31T16:21:06.969566Z node 2 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:21:06.969644Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [2:349:2337] 2025-03-31T16:21:06.970143Z node 2 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:21:06.970199Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [2:350:2338] !Reboot 72057594037927937 (actor [2:107:2139]) rebooted! !Reboot 72057594037927937 (actor [2:107:2139]) tablet resolver refreshed! new actor is[2:300:2296] Leader for TabletID 72057594037927937 is [2:300:2296] sender: [2:395:2057] recipient: [2:14:2061] 2025-03-31T16:21:08.151634Z node 2 :PERSQUEUE INFO: new Cookie default|f4e6b844-9b27f201-dadefb53-57441faf_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:108:2057] recipient: [3:101:2135] 2025-03-31T16:21:08.465767Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:08.465834Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927938 is [3:153:2174] sender: [3:154:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:179:2057] recipient: [3:14:2061] 2025-03-31T16:21:08.482205Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:08.482708Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 3 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 7864320 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } 2025-03-31T16:21:08.483156Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:185:2198] 2025-03-31T16:21:08.484720Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [3:185:2198] 2025-03-31T16:21:08.485651Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:186:2199] 2025-03-31T16:21:08.486762Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [3:186:2199] 2025-03-31T16:21:08.508206Z node 3 :PERSQUEUE INFO: new Cookie default|f2d426df-db516590-e70b9774-a9e83cec_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' ... Q: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-03-31T16:22:43.420579Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 63(current 62) received from actor [60:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 1099511627776 LifetimeSeconds: 0 ImportantClientId: "another1" ImportantClientId: "important" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 63 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 60 ReadRuleGenerations: 60 ReadRuleGenerations: 62 ReadRuleGenerations: 61 ReadRuleGenerations: 63 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 60 Important: false } Consumers { Name: "aaa" Generation: 60 Important: false } Consumers { Name: "another1" Generation: 62 Important: true } Consumers { Name: "important" Generation: 61 Important: true } Consumers { Name: "another" Generation: 63 Important: false } 2025-03-31T16:22:43.424583Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 1099511627776 LifetimeSeconds: 0 ImportantClientId: "another1" ImportantClientId: "important" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 63 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 60 ReadRuleGenerations: 60 ReadRuleGenerations: 62 ReadRuleGenerations: 61 ReadRuleGenerations: 63 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 60 Important: false } Consumers { Name: "aaa" Generation: 60 Important: false } Consumers { Name: "another1" Generation: 62 Important: true } Consumers { Name: "important" Generation: 61 Important: true } Consumers { Name: "another" Generation: 63 Important: false } 2025-03-31T16:22:43.424728Z node 60 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:22:43.425053Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user another reinit with generation 63 done 2025-03-31T16:22:43.425405Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:22:43.425469Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:22:43.425527Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:22:43.425586Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:22:43.425633Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-03-31T16:22:43.425660Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-03-31T16:22:43.425690Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000caaa 2025-03-31T16:22:43.425719Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uaaa 2025-03-31T16:22:43.425745Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000canother1 2025-03-31T16:22:43.425771Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uanother1 2025-03-31T16:22:43.425798Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000canother 2025-03-31T16:22:43.425824Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uanother 2025-03-31T16:22:43.425850Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cimportant 2025-03-31T16:22:43.425876Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uimportant 2025-03-31T16:22:43.425905Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] _config_0 2025-03-31T16:22:43.425944Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:22:43.426001Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-03-31T16:22:43.426124Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user another reinit with generation 63 done 2025-03-31T16:22:43.426347Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:22:43.426385Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-03-31T16:22:43.426416Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-03-31T16:22:43.426445Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-03-31T16:22:43.426471Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001cuser 2025-03-31T16:22:43.426497Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uuser 2025-03-31T16:22:43.426522Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001caaa 2025-03-31T16:22:43.426550Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uaaa 2025-03-31T16:22:43.426575Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001canother1 2025-03-31T16:22:43.426602Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uanother1 2025-03-31T16:22:43.426629Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001canother 2025-03-31T16:22:43.426654Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uanother 2025-03-31T16:22:43.426680Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001cimportant 2025-03-31T16:22:43.426707Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uimportant 2025-03-31T16:22:43.426763Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] _config_1 2025-03-31T16:22:43.426794Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-03-31T16:22:43.426824Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] =========================== 2025-03-31T16:22:43.426913Z node 60 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-31T16:22:43.427175Z node 60 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-31T16:22:43.436120Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:22:43.436585Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionConfigChanged 2025-03-31T16:22:43.437682Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:22:43.437846Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionConfigChanged 2025-03-31T16:22:43.438163Z node 60 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 63 actor [60:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 1099511627776 LifetimeSeconds: 0 ImportantClientId: "another1" ImportantClientId: "important" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 63 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 60 ReadRuleGenerations: 60 ReadRuleGenerations: 62 ReadRuleGenerations: 61 ReadRuleGenerations: 63 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 60 Important: false } Consumers { Name: "aaa" Generation: 60 Important: false } Consumers { Name: "another1" Generation: 62 Important: true } Consumers { Name: "important" Generation: 61 Important: true } Consumers { Name: "another" Generation: 63 Important: false } 2025-03-31T16:22:43.438922Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [60:617:2564], now have 1 active actors on pipe 2025-03-31T16:22:43.440398Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [60:620:2566], now have 1 active actors on pipe 2025-03-31T16:22:43.440652Z node 60 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-31T16:22:43.440760Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-03-31T16:22:43.440979Z node 60 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-31T16:22:43.441901Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [60:622:2568], now have 1 active actors on pipe 2025-03-31T16:22:43.442068Z node 60 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-31T16:22:43.442148Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-03-31T16:22:43.442305Z node 60 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-31T16:22:43.443052Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [60:624:2570], now have 1 active actors on pipe 2025-03-31T16:22:43.443372Z node 60 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-31T16:22:43.443460Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-03-31T16:22:43.443628Z node 60 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-31T16:22:43.444323Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [60:626:2572], now have 1 active actors on pipe 2025-03-31T16:22:43.444549Z node 60 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-31T16:22:43.444635Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-03-31T16:22:43.444824Z node 60 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 >> TSchemeShardLoginTest::UserLogin >> TSchemeShardLoginTest::RemoveGroup-StrictAclCheck-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayData [GOOD] Test command err: 2025-03-31T16:19:20.170702Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:19:20.171956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:19:20.172033Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:20.180597Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:19:20.181052Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-03-31T16:19:20.181318Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:19:20.192153Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:19:20.236051Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:19:20.236380Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:19:20.237943Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-31T16:19:20.238017Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-31T16:19:20.238068Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-31T16:19:20.238491Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:19:20.239108Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:19:20.239195Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:197:2154] in generation 2 2025-03-31T16:19:20.303674Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:19:20.335576Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-31T16:19:20.335711Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:19:20.335788Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-31T16:19:20.335811Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-31T16:19:20.335835Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-31T16:19:20.335859Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:20.336044Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:20.336077Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:20.336279Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-31T16:19:20.336340Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-31T16:19:20.336371Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:19:20.336395Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:19:20.336417Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-31T16:19:20.336441Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T16:19:20.336466Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T16:19:20.336488Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-31T16:19:20.336510Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:19:20.336567Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:20.336602Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:20.336652Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-31T16:19:20.339279Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-31T16:19:20.339340Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:19:20.339417Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T16:19:20.339557Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-31T16:19:20.339597Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-31T16:19:20.339665Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-31T16:19:20.339714Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:19:20.339754Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-31T16:19:20.339792Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-31T16:19:20.339823Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:19:20.340119Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-31T16:19:20.340187Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-31T16:19:20.340221Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-31T16:19:20.340250Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:19:20.340285Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-31T16:19:20.340314Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-31T16:19:20.340358Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-31T16:19:20.340389Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-31T16:19:20.340413Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-31T16:19:20.352432Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-31T16:19:20.352491Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:19:20.352524Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:19:20.352581Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-31T16:19:20.352661Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-31T16:19:20.353199Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:20.353250Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:20.353295Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-31T16:19:20.353430Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-31T16:19:20.353466Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:19:20.353579Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-31T16:19:20.353618Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:19:20.353648Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-31T16:19:20.353682Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-31T16:19:20.357515Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-31T16:19:20.357588Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:20.357790Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:20.357827Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:20.357880Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:19:20.357914Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:19:20.357969Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T16:19:20.358010Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-31T16:19:20.358040Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-31T16:19:20.358087Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:19:20.358123Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-31T16:19:20.358154Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-31T16:19:20.358202Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-31T16:19:20.358372Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-31T16:19:20.358415Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:19:20.358455Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-31T16:19:20.358478Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-31T16:19:20.358500Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-31T16:19:20.358563Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:19:20.358584Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-31T16:19:20.358611Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-31T16:19:20.358637Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-31T16:19:20.358701Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-31T16:19:20.358733Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-31T16:19:20.358776Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-31T16:19:20.358827Z node 1 :TX_D ... 437184 txId 522 2025-03-31T16:22:42.809649Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:344:2312]: {TEvReadSet step# 1000005 txid# 523 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 17} 2025-03-31T16:22:42.809701Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:22:42.809742Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 523 2025-03-31T16:22:42.810013Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:344:2312]: {TEvReadSet step# 1000005 txid# 510 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-03-31T16:22:42.810066Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:22:42.810109Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 510 2025-03-31T16:22:42.810271Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:344:2312]: {TEvReadSet step# 1000005 txid# 524 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 18} 2025-03-31T16:22:42.810323Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:22:42.810364Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 524 2025-03-31T16:22:42.810735Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:344:2312]: {TEvReadSet step# 1000005 txid# 525 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 19} 2025-03-31T16:22:42.810789Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:22:42.810830Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 525 2025-03-31T16:22:42.811127Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:344:2312]: {TEvReadSet step# 1000005 txid# 526 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 20} 2025-03-31T16:22:42.811179Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:22:42.811222Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 526 2025-03-31T16:22:42.811508Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:344:2312]: {TEvReadSet step# 1000005 txid# 527 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 21} 2025-03-31T16:22:42.811561Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:22:42.811604Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 527 2025-03-31T16:22:42.811827Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:344:2312]: {TEvReadSet step# 1000005 txid# 528 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 22} 2025-03-31T16:22:42.811886Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:22:42.811925Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 528 2025-03-31T16:22:42.812190Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:344:2312]: {TEvReadSet step# 1000005 txid# 529 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 23} 2025-03-31T16:22:42.812241Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:22:42.812285Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 529 2025-03-31T16:22:42.812536Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:344:2312]: {TEvReadSet step# 1000005 txid# 511 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-03-31T16:22:42.812584Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:22:42.812630Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 511 2025-03-31T16:22:42.812906Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:344:2312]: {TEvReadSet step# 1000005 txid# 530 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 24} 2025-03-31T16:22:42.812958Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:22:42.812999Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 530 2025-03-31T16:22:42.813240Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:344:2312]: {TEvReadSet step# 1000005 txid# 531 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 25} 2025-03-31T16:22:42.813291Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:22:42.813340Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 531 2025-03-31T16:22:42.813653Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:344:2312]: {TEvReadSet step# 1000005 txid# 532 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 26} 2025-03-31T16:22:42.813706Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:22:42.813746Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 532 2025-03-31T16:22:42.813984Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:344:2312]: {TEvReadSet step# 1000005 txid# 533 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 27} 2025-03-31T16:22:42.814035Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:22:42.814076Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 533 2025-03-31T16:22:42.814362Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:344:2312]: {TEvReadSet step# 1000005 txid# 534 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 28} 2025-03-31T16:22:42.814417Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:22:42.814456Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 534 2025-03-31T16:22:42.814623Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:344:2312]: {TEvReadSet step# 1000005 txid# 535 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 29} 2025-03-31T16:22:42.814675Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:22:42.814726Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 535 2025-03-31T16:22:42.815141Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:344:2312]: {TEvReadSet step# 1000005 txid# 536 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 30} 2025-03-31T16:22:42.815195Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:22:42.815240Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 536 2025-03-31T16:22:42.815460Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:344:2312]: {TEvReadSet step# 1000005 txid# 512 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-03-31T16:22:42.815511Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:22:42.815548Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 512 2025-03-31T16:22:42.815864Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:344:2312]: {TEvReadSet step# 1000005 txid# 537 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 31} 2025-03-31T16:22:42.815913Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:22:42.815954Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 537 2025-03-31T16:22:42.816253Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:344:2312]: {TEvReadSet step# 1000005 txid# 514 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-03-31T16:22:42.816307Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:22:42.816345Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 514 2025-03-31T16:22:42.856844Z node 16 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:22:42.856928Z node 16 :TX_DATASHARD TRACE: Complete execution for [1000005:538] at 9437184 on unit CompleteOperation 2025-03-31T16:22:42.857010Z node 16 :TX_DATASHARD DEBUG: Complete [1000005 : 538] from 9437184 at tablet 9437184 send result to client [16:99:2134], exec latency: 1 ms, propose latency: 2 ms 2025-03-31T16:22:42.857094Z node 16 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 538 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2025-03-31T16:22:42.857145Z node 16 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:22:42.857482Z node 16 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:22:42.857519Z node 16 :TX_DATASHARD TRACE: Complete execution for [1000005:539] at 9437184 on unit CompleteOperation 2025-03-31T16:22:42.857568Z node 16 :TX_DATASHARD DEBUG: Complete [1000005 : 539] from 9437184 at tablet 9437184 send result to client [16:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:22:42.857606Z node 16 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:22:42.857841Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:233:2226], Recipient [16:344:2312]: {TEvReadSet step# 1000005 txid# 538 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2025-03-31T16:22:42.857889Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:22:42.857930Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 538 expect 30 22 31 28 28 27 28 29 31 31 31 31 30 31 17 27 30 31 15 15 13 31 31 16 27 31 16 - - - 31 - actual 30 22 31 28 28 27 28 29 31 31 31 31 30 31 17 27 30 31 15 15 13 31 31 16 27 31 16 - - - 31 - interm 30 22 30 28 28 27 28 29 27 27 29 22 30 11 17 27 30 27 15 15 13 27 3 16 27 - 16 - - - - - >> TSchemeShardLoginTest::RemoveUser-StrictAclCheck-false >> DataStreams::TestStreamTimeRetention [GOOD] >> DataStreams::TestUnsupported >> TSchemeShardLoginTest::RemoveGroup-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup-StrictAclCheck-true >> TSchemeShardLoginTest::UserLogin [GOOD] >> TSchemeShardLoginTest::TestExternalLoginWithIncorrectLdapDomain >> TSchemeShardLoginTest::RemoveGroup-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::DisableBuiltinAuthMechanism >> DataStreams::TestDeleteStreamWithEnforceFlagFalse [GOOD] >> DataStreams::TestGetRecords1MBMessagesOneByOneBySeqNo >> TSchemeShardLoginTest::RemoveUser-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveUser-StrictAclCheck-true >> TSchemeShardLoginTest::TestExternalLoginWithIncorrectLdapDomain [GOOD] >> TSchemeShardLoginTest::UserStayLockedOutIfEnterValidPassword >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-true >> DataStreams::TestListStreamConsumers [GOOD] >> DataStreams::TestListShards1Shard >> TSchemeShardLoginTest::RemoveUser-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-false >> TSchemeShardLoginTest::DisableBuiltinAuthMechanism [GOOD] >> TSchemeShardLoginTest::FailedLoginUserUnderNameOfGroup >> DataStreams::TestCreateExistingStream [GOOD] >> DataStreams::ListStreamsValidation >> TSchemeShardLoginTest::UserStayLockedOutIfEnterValidPassword [GOOD] >> TWebLoginService::AuditLogAdminLoginSuccess >> DataStreams::Test_Crreate_AutoPartitioning_Disabled [GOOD] >> DataStreams::TestStreamPagination [GOOD] >> DataStreams::TestShardPagination >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-true >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::TestExternalLogin >> TSchemeShardLoginTest::FailedLoginUserUnderNameOfGroup [GOOD] >> TSchemeShardLoginTest::FailedLoginWithInvalidUser >> TWebLoginService::AuditLogAdminLoginSuccess [GOOD] >> TSchemeShardLoginTest::TestExternalLogin [GOOD] >> TSchemeShardLoginTest::ResetFailedAttemptCount ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::Test_Crreate_AutoPartitioning_Disabled [GOOD] Test command err: 2025-03-31T16:22:33.352656Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009853186790209:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:22:33.352748Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001a14/r3tmp/tmpZUV9t8/pdisk_1.dat 2025-03-31T16:22:33.787369Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:22:33.802637Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:22:33.802763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 27724, node 1 2025-03-31T16:22:33.813413Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:22:33.856745Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:22:33.877426Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:22:33.877473Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:22:33.877481Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:22:33.877672Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5635 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:22:34.176054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:34.272215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:5635 2025-03-31T16:22:34.480576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:34.839454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:22:35.070688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:22:38.166123Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488009873483374703:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:22:38.166244Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001a14/r3tmp/tmp6uR5v2/pdisk_1.dat 2025-03-31T16:22:38.451920Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:22:38.471000Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:22:38.471673Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:22:38.480310Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28200, node 4 2025-03-31T16:22:38.639872Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:22:38.639889Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:22:38.639894Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:22:38.640018Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24550 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:22:38.922862Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:39.016638Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:24550 2025-03-31T16:22:39.259586Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "0" shard_id: "shard-000002" } records { sequence_number: "0" shard_id: "shard-000001" } records { sequence_number: "1" shard_id: "shard-000001" } records { sequence_number: "1" shard_id: "shard-000002" } records { sequence_number: "2" shard_id: "shard-000001" } records { sequence_number: "3" shard_id: "shard-000001" } records { sequence_number: "4" shard_id: "shard-000001" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000002" } records { sequence_number: "3" shard_id: "shard-000002" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000002" } records { sequence_number: "5" shard_id: "shard-000002" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000002" } records { sequence_number: "5" shard_id: "shard-000001" } records { sequence_number: "6" shard_id: "shard-000001" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "7" shard_id: "shard-000002" } records { sequence_number: "8" shard_id: "shard-000002" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000002" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "10" shard_id: "shard-000002" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "11" shard_id: "shard-000002" } records { sequence_number: "7" shard_id: "shard-000001" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000000" } ALTER_SCHEME: { Name: "test-topic" Split { Partition: 1 SplitBoundary: "a" } } 2025-03-31T16:22:40.504302Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 107:0, at schemeshard: 72057594046644480 2025-03-31T16:22:41.744834Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:22:41.846400Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:22:41.977728Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:22:42.199910Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:22:43.916274Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488009896227880738:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:22:43.916371Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001a14/r3tmp/tmpf69Rv0/pdisk_1.dat 2025-03-31T16:22:44.092205Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:22:44.136692Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:22:44.136817Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:22:44.142129Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62789, node 7 2025-03-31T16:22:44.242101Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:22:44.242129Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:22:44.242139Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:22:44.242302Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16376 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:22:44.539423Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:44.629609Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:16376 2025-03-31T16:22:44.849178Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogAdminLoginSuccess [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:22:45.670092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:22:45.670217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:22:45.670257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:22:45.670295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:22:45.671407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:22:45.671491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:22:45.671600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:22:45.671703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:22:45.673291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:22:45.760832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:22:45.760892Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:22:45.765945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:22:45.766061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:22:45.766211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:22:45.774371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:22:45.774746Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:22:45.795167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:22:45.795649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:22:45.803758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:45.814938Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:22:45.815027Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:45.815271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:22:45.815363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:22:45.815426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:22:45.815596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:22:45.822598Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:22:45.982939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:22:45.983789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:45.985172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:22:45.986356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:22:45.986476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:45.990375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:22:45.990510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:22:45.990763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:45.990830Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:22:45.990869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:22:45.990904Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:22:45.993076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:45.993156Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:22:45.993199Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:22:45.995390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:45.995446Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:45.995508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:22:45.995581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:22:46.006072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:22:46.008196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:22:46.008353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:22:46.009325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:22:46.009459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:22:46.009511Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:22:46.009717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:22:46.009752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:22:46.009910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:22:46.009988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:22:46.012333Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:22:46.012383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:22:46.012544Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:46.012579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:22:46.012805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:46.012854Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:22:46.012935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:22:46.012962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:22:46.012994Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:22:46.013032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:22:46.013063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:22:46.013123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:22:46.013157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:22:46.013183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:22:46.013235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:22:46.013265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:22:46.013291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:22:46.015465Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:22:46.015565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:22:46.015595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:22:48.709455Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:22:48.709543Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:22:48.711796Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:22:48.711854Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:22:48.712130Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:48.712188Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:22:48.712600Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:48.712656Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:22:48.712829Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:22:48.712870Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:22:48.712915Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:22:48.712959Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:22:48.713007Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:22:48.713059Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:22:48.713104Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:22:48.713144Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:22:48.713222Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:22:48.713267Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:22:48.713311Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:22:48.713852Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:22:48.713968Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:22:48.714016Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-31T16:22:48.714064Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-31T16:22:48.714111Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:22:48.714214Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-31T16:22:48.717400Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-31T16:22:48.717962Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-03-31T16:22:48.720968Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "user1" Password: "password1" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:22:48.726279Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2025-03-31T16:22:48.726441Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T16:22:48.726501Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:22:48.726548Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T16:22:48.726589Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:22:48.726650Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:22:48.726744Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-03-31T16:22:48.726789Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:22:48.726834Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-31T16:22:48.726872Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 1, subscribers: 0 2025-03-31T16:22:48.726917Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-03-31T16:22:48.727269Z node 4 :TX_PROXY DEBUG: actor# [4:269:2260] Bootstrap 2025-03-31T16:22:48.749621Z node 4 :TX_PROXY DEBUG: actor# [4:269:2260] Become StateWork (SchemeCache [4:274:2265]) 2025-03-31T16:22:48.750862Z node 4 :TX_PROXY DEBUG: actor# [4:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T16:22:48.756184Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSuccess TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:22:48.756327Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSuccess, operation: CREATE USER, path: /MyRoot 2025-03-31T16:22:48.756536Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:22:48.756592Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:22:48.756801Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:48.756848Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-03-31T16:22:48.757473Z node 4 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-31T16:22:48.757633Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:22:48.757733Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:22:48.757779Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-31T16:22:48.757827Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-03-31T16:22:48.757881Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:22:48.757995Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-03-31T16:22:48.760001Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 2025-03-31T16:22:48.760477Z node 4 :HTTP WARN: 127.0.0.1:0 POST /login 2025-03-31T16:22:48.771938Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-03-31T16:22:48.772013Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-03-31T16:22:48.920720Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzNDgxMzY4LCJpYXQiOjE3NDM0MzgxNjgsInN1YiI6InVzZXIxIn0.V-wBeOmEU-qNBwM_N_mHzDk55by_Htn32V7qqe7JVOLSJg1rdP0Zwigyvd6PeuoFR9F3IBq2_s2UOkltFBbIYNMLDX3oo07Ajb2VUWMq6kenmZJraxs9TTYY_SdtJVWRP7UDGFdbFYFuWbAxy2O_Y90Cskww9bBy_8P-L9JZ48W_v3ltGtZfyh5xTM2ZDF-4F2zGqmqvpq8J4yLD9XzlBIhJfFjdERaYw3lEtfs7yZCJOvg6vrKLq6VB9UlqpDqhTEcIrmZVkvo3fKZZ83rkrVl307rYDi62vdORdI-RENU0XKcGHiHLs_ofT2MMLvyddxjcI0pz3KdyPokUMZssmg" SanitizedToken: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzNDgxMzY4LCJpYXQiOjE3NDM0MzgxNjgsInN1YiI6InVzZXIxIn0.**" IsAdmin: true, at schemeshard: 72057594046678944 2025-03-31T16:22:48.920911Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:22:48.920963Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:22:48.921178Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:48.921226Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-03-31T16:22:48.924294Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 0 AUDIT LOG buffer(3): 2025-03-31T16:22:48.701206Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2025-03-31T16:22:48.726127Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1 2025-03-31T16:22:48.921712Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=SUCCESS, login_user=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzNDgxMzY4LCJpYXQiOjE3NDM0MzgxNjgsInN1YiI6InVzZXIxIn0.**, login_user_level=admin AUDIT LOG checked line: 2025-03-31T16:22:48.921712Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=SUCCESS, login_user=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzNDgxMzY4LCJpYXQiOjE3NDM0MzgxNjgsInN1YiI6InVzZXIxIn0.**, login_user_level=admin >> TSchemeShardLoginTest::FailedLoginWithInvalidUser [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-true [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::FailedLoginWithInvalidUser [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:22:45.671702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:22:45.671815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:22:45.671855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:22:45.671919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:22:45.671970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:22:45.672012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:22:45.672106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:22:45.672221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:22:45.673203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:22:45.759101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:22:45.759171Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:22:45.765820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:22:45.765960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:22:45.766133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:22:45.777217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:22:45.777465Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:22:45.795169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:22:45.795660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:22:45.802382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:45.810039Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:22:45.810126Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:45.810300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:22:45.810361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:22:45.810462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:22:45.810636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:22:45.817841Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:22:45.982439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:22:45.983787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:45.984802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:22:45.986029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:22:45.986122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:45.989634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:22:45.989842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:22:45.990113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:45.990272Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:22:45.990336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:22:45.990380Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:22:45.993060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:45.993138Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:22:45.993205Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:22:45.995498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:45.995561Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:45.995627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:22:45.995693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:22:46.001602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:22:46.004219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:22:46.005410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:22:46.006910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:22:46.007065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:22:46.007169Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:22:46.008591Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:22:46.008679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:22:46.009003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:22:46.009120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:22:46.012377Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:22:46.012457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:22:46.012630Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:46.012688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:22:46.012974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:46.013035Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:22:46.013145Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:22:46.013193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:22:46.013244Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:22:46.013281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:22:46.013329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:22:46.013401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:22:46.013469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:22:46.013511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:22:46.013580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:22:46.013650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:22:46.013719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:22:46.015984Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:22:46.016152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:22:46.016205Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... tablet# 72057594046678944 2025-03-31T16:22:49.405017Z node 5 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:22:49.405107Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:22:49.405332Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:22:49.405428Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:22:49.408108Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:22:49.408174Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:22:49.408402Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:49.408457Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:22:49.408829Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:49.408885Z node 5 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:22:49.409018Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:22:49.409067Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:22:49.409113Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:22:49.409240Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:22:49.409286Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:22:49.409354Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:22:49.409396Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:22:49.409433Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:22:49.409522Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:22:49.409568Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:22:49.409611Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:22:49.410656Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:22:49.410781Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:22:49.410828Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-31T16:22:49.410881Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-31T16:22:49.410934Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:22:49.411041Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-31T16:22:49.414117Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-31T16:22:49.414672Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2025-03-31T16:22:49.415206Z node 5 :TX_PROXY DEBUG: actor# [5:269:2260] Bootstrap 2025-03-31T16:22:49.438530Z node 5 :TX_PROXY DEBUG: actor# [5:269:2260] Become StateWork (SchemeCache [5:274:2265]) 2025-03-31T16:22:49.439135Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:22:49.439371Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 290us result status StatusSuccess 2025-03-31T16:22:49.439819Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:22:49.440201Z node 5 :TX_PROXY DEBUG: actor# [5:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T16:22:49.442548Z node 5 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944 2025-03-31T16:22:49.443374Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-03-31T16:22:49.443427Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-03-31T16:22:49.589013Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "Cannot find user: user1", at schemeshard: 72057594046678944 2025-03-31T16:22:49.589217Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:22:49.589282Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:22:49.589548Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:49.589615Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:206:2208], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-03-31T16:22:49.590205Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 0 2025-03-31T16:22:49.590666Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:22:49.590875Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 230us result status StatusSuccess 2025-03-31T16:22:49.591625Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAubv0fxU4lrhbDQnm1R6i\noF7K5lHdBm+Ow91+bDRYWdQvJfLq+rKPSKTuCUlkNuhWpCgLnqIsLUKrYvw92+ub\nAa24qM36W97vFCqyE5W82zqwWvB4h+hVYKSRpej4s/tTB6Wf+VXKYfMXS0nQokFT\nb6Q/P9h9UohERvq0l8Zuy5uV2OPOvuCug40PLdBkYHLk64OwjQNMJBXATFMfogcx\n6K+dq0hBGI7UwMO7sxD8iJvmthm1q/6xTzAU13UUx+HtlmAUO0QQp5iYhb+qVRJT\ny/io+p9wB56p9gJMFMkV23cdGkNnpqSAzUxxo5eFKKPWjjKXZGr/QWsx7WUAf6cN\nuQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1743524569583 } Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:22:46.600618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:22:46.600722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:22:46.600781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:22:46.600831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:22:46.600885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:22:46.600930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:22:46.601000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:22:46.601100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:22:46.601431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:22:46.695572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:22:46.695660Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:22:46.705253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:22:46.705388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:22:46.705564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:22:46.710820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:22:46.711029Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:22:46.711607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:22:46.711866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:22:46.713744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:46.714766Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:22:46.714841Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:46.715015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:22:46.715076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:22:46.715148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:22:46.715283Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:22:46.723382Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:22:46.836541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:22:46.836781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:46.836962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:22:46.837161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:22:46.837214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:46.844192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:22:46.844391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:22:46.844621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:46.844689Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:22:46.844750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:22:46.844787Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:22:46.847966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:46.848036Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:22:46.848076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:22:46.850006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:46.850054Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:46.850119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:22:46.850183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:22:46.853077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:22:46.854861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:22:46.854998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:22:46.855994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:22:46.856116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:22:46.856178Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:22:46.856439Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:22:46.856482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:22:46.856629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:22:46.856696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:22:46.858563Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:22:46.858603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:22:46.858742Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:46.858770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:22:46.858967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:46.859022Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:22:46.859131Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:22:46.859165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:22:46.859201Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:22:46.859226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:22:46.859253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:22:46.859296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:22:46.859325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:22:46.859354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:22:46.859413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:22:46.859454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:22:46.859485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:22:46.861324Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:22:46.861443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:22:46.861480Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 6678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:22:50.219768Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:22:50.219928Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:50.219981Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:206:2208], at schemeshard: 72057594046678944, txId: 105, path id: 2 2025-03-31T16:22:50.220034Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:206:2208], at schemeshard: 72057594046678944, txId: 105, path id: 1 2025-03-31T16:22:50.220692Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2025-03-31T16:22:50.220831Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2025-03-31T16:22:50.220879Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2025-03-31T16:22:50.220929Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-03-31T16:22:50.220979Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T16:22:50.221547Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-03-31T16:22:50.221625Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-03-31T16:22:50.221652Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-03-31T16:22:50.221680Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-03-31T16:22:50.221710Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:22:50.221781Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-03-31T16:22:50.225932Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-03-31T16:22:50.226283Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 2025-03-31T16:22:50.226885Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dir1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:22:50.227101Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Dir1" took 230us result status StatusSuccess 2025-03-31T16:22:50.227451Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dir1" PathDescription { Self { Name: "Dir1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 106 2025-03-31T16:22:50.230628Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveGroup { Group: "group1" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:22:50.230857Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 1] name: MyRoot type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:22:50.230900Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:22:50.230946Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 2] name: Dir1 type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:22:50.230980Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:22:50.231230Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2025-03-31T16:22:50.231347Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-03-31T16:22:50.231397Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-31T16:22:50.231445Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-03-31T16:22:50.231482Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-31T16:22:50.231548Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:22:50.231619Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: false 2025-03-31T16:22:50.231664Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-31T16:22:50.231718Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-03-31T16:22:50.231759Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 106, publications: 1, subscribers: 0 2025-03-31T16:22:50.231805Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 106, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-03-31T16:22:50.234424Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusSuccess TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:22:50.234568Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusSuccess, operation: REMOVE GROUP, path: /MyRoot 2025-03-31T16:22:50.234830Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:22:50.234870Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:22:50.235034Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:50.235075Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:206:2208], at schemeshard: 72057594046678944, txId: 106, path id: 1 2025-03-31T16:22:50.235538Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 106 2025-03-31T16:22:50.235637Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 106 2025-03-31T16:22:50.235669Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2025-03-31T16:22:50.235722Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-03-31T16:22:50.235764Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:22:50.235893Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2025-03-31T16:22:50.237752Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 TestModificationResult got TxId: 106, wait until txId: 106 2025-03-31T16:22:50.238425Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:22:50.238654Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 258us result status StatusSuccess 2025-03-31T16:22:50.239142Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: true } Children { Name: "Dir1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardLoginTest::RemoveGroup_NonExisting-StrictAclCheck-false >> DataStreams::TestPutRecordsCornerCases [GOOD] >> DataStreams::TestPutRecords >> DataStreams::TestUnsupported [GOOD] >> TWebLoginService::AuditLogLoginSuccess >> TSchemeShardLoginTest::RemoveGroup_NonExisting-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup_NonExisting-StrictAclCheck-true ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestUnsupported [GOOD] Test command err: 2025-03-31T16:22:32.830867Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009848977206638:2141];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:22:32.830939Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001a2c/r3tmp/tmpVj0tJm/pdisk_1.dat 2025-03-31T16:22:33.315629Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:22:33.315711Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:22:33.320618Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:22:33.368866Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23274, node 1 2025-03-31T16:22:33.446098Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:22:33.446131Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:22:33.500253Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:22:33.500280Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:22:33.500287Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:22:33.500428Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25703 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:22:34.028044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:34.128330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:25703 2025-03-31T16:22:34.324379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:34.582531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:22:37.598944Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488009870230465969:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:22:37.599053Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001a2c/r3tmp/tmpxWWecl/pdisk_1.dat 2025-03-31T16:22:37.836245Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24834, node 4 2025-03-31T16:22:37.942903Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:22:37.943002Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:22:37.979929Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:22:38.001001Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:22:38.001035Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:22:38.001048Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:22:38.001233Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61373 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:22:38.299585Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:38.393915Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:61373 2025-03-31T16:22:38.640850Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:38.871256Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:22:38.938223Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480 encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000000" } records { sequence_number: "10" shard_id: "shard-000000" } records { sequence_number: "11" shard_id: "shard-000000" } records { sequence_number: "12" shard_id: "shard-000000" } records { sequence_number: "13" shard_id: "shard-000000" } records { sequence_number: "14" shard_id: "shard-000000" } records { sequence_number: "15" shard_id: "shard-000000" } records { sequence_number: "16" shard_id: "shard-000000" } records { sequence_number: "17" shard_id: "shard-000000" } records { sequence_number: "18" shard_id: "shard-000000" } records { sequence_number: "19" shard_id: "shard-000000" } records { sequence_number: "20" shard_id: "shard-000000" } records { sequence_number: "21" shard_id: "shard-000000" } records { sequence_number: "22" shard_id: "shard-000000" } records { sequence_number: "23" shard_id: "shard-000000" } records { sequence_number: "24" shard_id: "shard-000000" } records { sequence_number: "25" shard_id: "shard-000000" } records { sequence_number: "26" shard_id: "shard-000000" } records { sequence_number: "27" shard_id: "shard-000000" } records { sequence_number: "28" shard_id: "shard-000000" } records { sequence_number: "29" shard_id: "shard-000000" } encryption_type: NONE records { sequence_number: "30" shard_id: "shard-000000" } records { sequence_number: "31" shard_id: "shard-000000" } records { sequence_number: "32" shard_id: "shard-000000" } records { sequence_number: "33" shard_id: "shard-000000" } records { sequence_number: "34" shard_id: "shard-000000" } records { sequence_number: "35" shard_id: "shard-000000" } records { sequence_number: "36" shard_id: "shard-000000" } records { sequence_number: "37" shard_id: "shard-000000" } records { sequence_number: "38" shard_id: "shard-000000" } records { sequence_number: "39" shard_id: "shard-000000" } records { sequence_number: "40" shard_id: "shard-000000" } records { sequence_number: "41" shard_id: "shard-000000" } records { sequence_number: "42" shard_id: "shard-000000" } records { sequence_number: "43" shard_id: "shard-000000" } records { sequence_number: "44" shard_id: "shard-000000" } records { sequence_number: "45" shard_id: "shard-000000" } records { sequence_number: "46" shard_id: "shard-000000" } records { sequence_number: "47" shard_id: "shard-000000" } records { sequence_number: "48" shard_id: "shard-000000" } records { sequence_number: "49" shard_id: "shard-000000" } records { sequence_number: "50" shard_id: "shard-000000" } records { sequence_number: "51" shard_id: "shard-000000" } records { sequence_number: "52" shard_id: "shard-000000" } records { sequence_number: "53" shard_id: "shard-000000" } records { sequence_number: "54" shard_id: "shard-000000" } records { sequence_number: "55" shard_id: "shard-000000" } records { sequence_number: "56" shard_id: "shard-000000" } records { sequence_number: "57" shard_id: "shard-000000" } records { sequence_number: "58" shard_id: "shard-000000" } records { sequence_number: "59" shard_id: "shard-000000" } encryption_type: NONE records { sequence_number: "60" shard_id: "shard-000000" } records { sequence_number: "61" shard_id: "shard-000000" } records { sequence_number: "62" shard_id: "shard-000000" } records { sequence_number: "63" shard_id: "shard-000000" } records { sequence_number: "64" shard_id: "shard-000000" } records { sequence_number: "65" shard_id: "shard-000000" } records { sequence_number: "66" shard_id: "shard-000000" } records { sequence_number: "67" shard_id: "sh ... d_id: "shard-000000" } records { sequence_number: "69" shard_id: "shard-000000" } records { sequence_number: "70" shard_id: "shard-000000" } records { sequence_number: "71" shard_id: "shard-000000" } records { sequence_number: "72" shard_id: "shard-000000" } records { sequence_number: "73" shard_id: "shard-000000" } records { sequence_number: "74" shard_id: "shard-000000" } records { sequence_number: "75" shard_id: "shard-000000" } records { sequence_number: "76" shard_id: "shard-000000" } records { sequence_number: "77" shard_id: "shard-000000" } records { sequence_number: "78" shard_id: "shard-000000" } records { sequence_number: "79" shard_id: "shard-000000" } records { sequence_number: "80" shard_id: "shard-000000" } records { sequence_number: "81" shard_id: "shard-000000" } records { sequence_number: "82" shard_id: "shard-000000" } records { sequence_number: "83" shard_id: "shard-000000" } records { sequence_number: "84" shard_id: "shard-000000" } records { sequence_number: "85" shard_id: "shard-000000" } records { sequence_number: "86" shard_id: "shard-000000" } records { sequence_number: "87" shard_id: "shard-000000" } records { sequence_number: "88" shard_id: "shard-000000" } records { sequence_number: "89" shard_id: "shard-000000" } encryption_type: NONE records { sequence_number: "90" shard_id: "shard-000000" } records { sequence_number: "91" shard_id: "shard-000000" } records { sequence_number: "92" shard_id: "shard-000000" } records { sequence_number: "93" shard_id: "shard-000000" } records { sequence_number: "94" shard_id: "shard-000000" } records { sequence_number: "95" shard_id: "shard-000000" } records { sequence_number: "96" shard_id: "shard-000000" } records { sequence_number: "97" shard_id: "shard-000000" } records { sequence_number: "98" shard_id: "shard-000000" } records { sequence_number: "99" shard_id: "shard-000000" } records { sequence_number: "100" shard_id: "shard-000000" } records { sequence_number: "101" shard_id: "shard-000000" } records { sequence_number: "102" shard_id: "shard-000000" } records { sequence_number: "103" shard_id: "shard-000000" } records { sequence_number: "104" shard_id: "shard-000000" } records { sequence_number: "105" shard_id: "shard-000000" } records { sequence_number: "106" shard_id: "shard-000000" } records { sequence_number: "107" shard_id: "shard-000000" } records { sequence_number: "108" shard_id: "shard-000000" } records { sequence_number: "109" shard_id: "shard-000000" } records { sequence_number: "110" shard_id: "shard-000000" } records { sequence_number: "111" shard_id: "shard-000000" } records { sequence_number: "112" shard_id: "shard-000000" } records { sequence_number: "113" shard_id: "shard-000000" } records { sequence_number: "114" shard_id: "shard-000000" } records { sequence_number: "115" shard_id: "shard-000000" } records { sequence_number: "116" shard_id: "shard-000000" } records { sequence_number: "117" shard_id: "shard-000000" } records { sequence_number: "118" shard_id: "shard-000000" } records { sequence_number: "119" shard_id: "shard-000000" } 2025-03-31T16:22:42.595854Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7488009870230465969:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:22:42.595926Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; encryption_type: NONE records { sequence_number: "120" shard_id: "shard-000000" } records { sequence_number: "121" shard_id: "shard-000000" } records { sequence_number: "122" shard_id: "shard-000000" } records { sequence_number: "123" shard_id: "shard-000000" } records { sequence_number: "124" shard_id: "shard-000000" } records { sequence_number: "125" shard_id: "shard-000000" } records { sequence_number: "126" shard_id: "shard-000000" } records { sequence_number: "127" shard_id: "shard-000000" } records { sequence_number: "128" shard_id: "shard-000000" } records { sequence_number: "129" shard_id: "shard-000000" } records { sequence_number: "130" shard_id: "shard-000000" } records { sequence_number: "131" shard_id: "shard-000000" } records { sequence_number: "132" shard_id: "shard-000000" } records { sequence_number: "133" shard_id: "shard-000000" } records { sequence_number: "134" shard_id: "shard-000000" } records { sequence_number: "135" shard_id: "shard-000000" } records { sequence_number: "136" shard_id: "shard-000000" } records { sequence_number: "137" shard_id: "shard-000000" } records { sequence_number: "138" shard_id: "shard-000000" } records { sequence_number: "139" shard_id: "shard-000000" } records { sequence_number: "140" shard_id: "shard-000000" } records { sequence_number: "141" shard_id: "shard-000000" } records { sequence_number: "142" shard_id: "shard-000000" } records { sequence_number: "143" shard_id: "shard-000000" } records { sequence_number: "144" shard_id: "shard-000000" } records { sequence_number: "145" shard_id: "shard-000000" } records { sequence_number: "146" shard_id: "shard-000000" } records { sequence_number: "147" shard_id: "shard-000000" } records { sequence_number: "148" shard_id: "shard-000000" } records { sequence_number: "149" shard_id: "shard-000000" } Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestStreamTimeRetention","id":"used_storage-root-72075186224037888-1743438158823-2","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":0,"unit":"byte*second","start":1743438158,"finish":1743438158},"labels":{"datastreams_stream_name":"stream_TestStreamTimeRetention","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743438158}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestStreamTimeRetention","id":"used_storage-root-72075186224037888-1743438158902-3","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":0,"unit":"byte*second","start":1743438158,"finish":1743438158},"labels":{"datastreams_stream_name":"stream_TestStreamTimeRetention","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743438158}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestStreamTimeRetention","id":"used_storage-root-72075186224037888-1743438158972-4","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":2,"unit":"byte*second","start":1743438158,"finish":1743438160},"labels":{"datastreams_stream_name":"stream_TestStreamTimeRetention","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743438160}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestStreamTimeRetention","id":"used_storage-root-72075186224037888-1743438160023-5","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1743438160,"finish":1743438161},"labels":{"datastreams_stream_name":"stream_TestStreamTimeRetention","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743438161}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestStreamTimeRetention","id":"used_storage-root-72075186224037888-1743438161043-6","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1743438161,"finish":1743438162},"labels":{"datastreams_stream_name":"stream_TestStreamTimeRetention","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743438162}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestStreamTimeRetention","id":"used_storage-root-72075186224037888-1743438162066-7","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1743438162,"finish":1743438163},"labels":{"datastreams_stream_name":"stream_TestStreamTimeRetention","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743438163}' 2025-03-31T16:22:46.648371Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488009906421938156:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:22:46.648492Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001a2c/r3tmp/tmp1EqgHt/pdisk_1.dat 2025-03-31T16:22:46.771815Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:22:46.806928Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:22:46.806999Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:22:46.810227Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22097, node 7 2025-03-31T16:22:46.907995Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:22:46.908022Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:22:46.908031Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:22:46.908162Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29534 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:22:47.188485Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:47.255202Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:29534 2025-03-31T16:22:47.469362Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> TSchemeShardLoginTest::BanUnbanUser >> TWebLoginService::AuditLogLoginSuccess [GOOD] >> TWebLoginService::AuditLogLoginBadPassword >> TSchemeShardLoginTest::AddAccess_NonExisting-StrictAclCheck-false >> TSchemeShardLoginTest::RemoveGroup_NonExisting-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-false >> TWebLoginService::AuditLogLoginBadPassword [GOOD] >> TWebLoginService::AuditLogLdapLoginSuccess >> DataStreams::ListStreamsValidation [GOOD] >> DataStreams::TestListShards1Shard [GOOD] >> TSchemeShardLoginTest::BanUnbanUser [GOOD] >> TSchemeShardLoginTest::BanUserWithWaiting >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-false >> DataStreams::TestShardPagination [GOOD] >> TSchemeShardLoginTest::AddAccess_NonExisting-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::AddAccess_NonExisting-StrictAclCheck-true >> TWebLoginService::AuditLogEmptySIDsLoginSuccess >> TWebLoginService::AuditLogLdapLoginSuccess [GOOD] >> TWebLoginService::AuditLogLogout ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::ListStreamsValidation [GOOD] Test command err: 2025-03-31T16:22:32.780838Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009845743322077:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:22:32.780900Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001a23/r3tmp/tmpp1nhuk/pdisk_1.dat 2025-03-31T16:22:33.260126Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:22:33.260217Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:22:33.268338Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:22:33.297943Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15029, node 1 2025-03-31T16:22:33.503751Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:22:33.503790Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:22:33.503800Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:22:33.503942Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13559 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:22:34.028725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:34.122021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:13559 2025-03-31T16:22:34.315436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:35.969531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:22:36.181260Z node 1 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037890:1][1:7488009862923192933:2371] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:6:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-03-31T16:22:36.348973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:22:36.535707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:22:36.560240Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037906 not found 2025-03-31T16:22:36.560393Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037897 not found 2025-03-31T16:22:36.560421Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037908 not found 2025-03-31T16:22:36.560435Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037905 not found 2025-03-31T16:22:36.560448Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037894 not found 2025-03-31T16:22:36.560464Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037903 not found 2025-03-31T16:22:36.564749Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037900 not found 2025-03-31T16:22:36.564773Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037896 not found 2025-03-31T16:22:36.566326Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037893 not found 2025-03-31T16:22:36.566368Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037901 not found 2025-03-31T16:22:36.566380Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037898 not found 2025-03-31T16:22:36.566391Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037895 not found 2025-03-31T16:22:36.566402Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037907 not found 2025-03-31T16:22:36.566416Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037902 not found 2025-03-31T16:22:36.566435Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2025-03-31T16:22:36.566458Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037904 not found 2025-03-31T16:22:36.581239Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,19) wasn't found 2025-03-31T16:22:36.581335Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,7) wasn't found 2025-03-31T16:22:36.581368Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,13) wasn't found 2025-03-31T16:22:36.581395Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,10) wasn't found 2025-03-31T16:22:36.581431Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,16) wasn't found 2025-03-31T16:22:36.581474Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,21) wasn't found 2025-03-31T16:22:36.581501Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,18) wasn't found 2025-03-31T16:22:36.581528Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,15) wasn't found 2025-03-31T16:22:36.581557Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,12) wasn't found 2025-03-31T16:22:36.581630Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,9) wasn't found 2025-03-31T16:22:36.581712Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,20) wasn't found 2025-03-31T16:22:36.581772Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,14) wasn't found 2025-03-31T16:22:36.581798Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,11) wasn't found 2025-03-31T16:22:36.582085Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,8) wasn't found 2025-03-31T16:22:38.251013Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488009872261739617:2195];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:22:38.251284Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001a23/r3tmp/tmpEkSiEB/pdisk_1.dat 2025-03-31T16:22:38.442661Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:22:38.497145Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:22:38.497239Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:22:38.501466Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17631, node 4 2025-03-31T16:22:38.581459Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:22:38.581487Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:22:38.581498Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:22:38.581658Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1688 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:22:38.848900Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:38.920986Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:1688 2025-03-31T16:22:39.111707Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:39.380635Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:22:39.490628Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:22:39.579242Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:22:43.252138Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488009893641530438:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:22:43.252227Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001a23/r3tmp/tmpgNR8WQ/pdisk_1.dat 2025-03-31T16:22:43.409311Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:22:43.444124Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:22:43.444211Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:22:43.447770Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29027, node 7 2025-03-31T16:22:43.564101Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:22:43.564128Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:22:43.564136Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:22:43.564290Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15848 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:22:43.843859Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:43.922591Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:15848 2025-03-31T16:22:44.138202Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:44.444302Z node 7 :TX_PROXY ERROR: Actor# [7:7488009897936499864:3454] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/stream_TestCreateExistingStream\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypePersQueueGroup, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:22:48.332240Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488009915549244859:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:22:48.332312Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001a23/r3tmp/tmpX3ouaL/pdisk_1.dat 2025-03-31T16:22:48.491958Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:22:48.537388Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:22:48.537498Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:22:48.540649Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26188, node 10 2025-03-31T16:22:48.628332Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:22:48.628361Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:22:48.628376Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:22:48.628546Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14585 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:22:48.945961Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:49.020718Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:14585 2025-03-31T16:22:49.264785Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-true >> TSchemeShardLoginTest::AddAccess_NonExisting-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::AddAccess_NonYdb-StrictAclCheck-false ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestListShards1Shard [GOOD] Test command err: 2025-03-31T16:22:32.813909Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009847474306519:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:22:32.813954Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001a33/r3tmp/tmpFlERCy/pdisk_1.dat 2025-03-31T16:22:33.263712Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:22:33.266227Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:22:33.266309Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:22:33.274922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6948, node 1 2025-03-31T16:22:33.500233Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:22:33.500267Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:22:33.500277Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:22:33.500458Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4375 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:22:34.036707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:34.205561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:4375 2025-03-31T16:22:34.397989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:34.791664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480 encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000000" } records { sequence_number: "10" shard_id: "shard-000000" } records { sequence_number: "11" shard_id: "shard-000000" } records { sequence_number: "12" shard_id: "shard-000000" } records { sequence_number: "13" shard_id: "shard-000000" } records { sequence_number: "14" shard_id: "shard-000000" } records { sequence_number: "15" shard_id: "shard-000000" } records { sequence_number: "16" shard_id: "shard-000000" } records { sequence_number: "17" shard_id: "shard-000000" } records { sequence_number: "18" shard_id: "shard-000000" } records { sequence_number: "19" shard_id: "shard-000000" } records { sequence_number: "20" shard_id: "shard-000000" } records { sequence_number: "21" shard_id: "shard-000000" } records { sequence_number: "22" shard_id: "shard-000000" } records { sequence_number: "23" shard_id: "shard-000000" } records { sequence_number: "24" shard_id: "shard-000000" } records { sequence_number: "25" shard_id: "shard-000000" } records { sequence_number: "26" shard_id: "shard-000000" } records { sequence_number: "27" shard_id: "shard-000000" } records { sequence_number: "28" shard_id: "shard-000000" } records { sequence_number: "29" shard_id: "shard-000000" } 2025-03-31T16:22:34.909405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:22:35.029428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:22:35.047583Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-03-31T16:22:35.048693Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-03-31T16:22:35.048720Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-03-31T16:22:35.060150Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2025-03-31T16:22:35.060210Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found 2025-03-31T16:22:35.060237Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037888-1743438154653-1","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1743438154,"finish":1743438154},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743438154}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"used_storage-root-72075186224037888-1743438154653-2","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":0,"unit":"byte*second","start":1743438154,"finish":1743438154},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743438154}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037888-1743438154975-3","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":1,"unit":"second","start":1743438154,"finish":1743438155},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743438155}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"used_storage-root-72075186224037888-1743438154975-4","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1743438154,"finish":1743438155},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743438155}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037890-1743438154975-5","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":1,"unit":"second","start":1743438154,"finish":1743438155},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root"},"version":"v1","source_id":"72075186224037890","source_wt":1743438155}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"used_storage-root-72075186224037890-1743438154975-6","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1743438154,"finish":1743438155},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037890","source_wt":1743438155}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037888-1743438154653-1","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1743438154,"finish":1743438154},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743438154}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"used_storage-root-72075186224037888-1743438154653-2","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":0,"unit":"byte*second","start":1743438154,"finish":1743438154},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743438154}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037888-1743438154975-3","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":1,"unit":"second","start":1743438154,"finish":1743438155},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743438155}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"used_storage-root-72075186224037888-1743438154975-4","schema":"ydb.serverless.v1","tags":{"ydb_size": ... 4480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:22:43.628894Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:43.710465Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:10026 2025-03-31T16:22:43.936397Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR E0000 00:00:1743438164.145528 485252 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743438164.145789 485252 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743438164.157183 485252 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743438164.157282 485252 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743438164.168324 485252 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743438164.168464 485252 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743438164.179656 485252 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743438164.179752 485252 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-03-31T16:22:44.212234Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:22:44.281350Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480 E0000 00:00:1743438164.356117 485252 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743438164.356257 485252 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-03-31T16:22:44.371792Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710663:0, at schemeshard: 72057594046644480 E0000 00:00:1743438164.461173 485252 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743438164.462392 485252 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-03-31T16:22:44.477245Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710664:0, at schemeshard: 72057594046644480 E0000 00:00:1743438164.573084 485252 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743438164.573207 485252 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743438164.597736 485252 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743438164.597854 485252 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-03-31T16:22:44.650521Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:22:44.669415Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037892 not found 2025-03-31T16:22:44.669446Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037893 not found 2025-03-31T16:22:44.669460Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037890 not found 2025-03-31T16:22:44.669473Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037889 not found 2025-03-31T16:22:44.669492Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037891 not found 2025-03-31T16:22:44.669672Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037888 not found E0000 00:00:1743438164.697557 485252 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743438164.697685 485252 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-03-31T16:22:48.194371Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488009916624538151:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:22:48.194448Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001a33/r3tmp/tmpTEs08Z/pdisk_1.dat 2025-03-31T16:22:48.404241Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:22:48.457391Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:22:48.457513Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:22:48.461137Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17072, node 10 2025-03-31T16:22:48.553575Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:22:48.553604Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:22:48.553633Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:22:48.553833Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1504 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:22:48.970839Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:49.046020Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:1504 2025-03-31T16:22:49.361033Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... E0000 00:00:1743438169.551494 486765 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743438169.560929 486765 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743438169.568707 486765 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743438169.576724 486765 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743438169.585833 486765 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn >> TSchemeShardLoginTest::ResetFailedAttemptCount [GOOD] >> TSchemeShardLoginTest::ResetFailedAttemptCountAfterModifyUser >> TWebLoginService::AuditLogLogout [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestShardPagination [GOOD] Test command err: 2025-03-31T16:22:38.190871Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009871246745637:2212];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:22:38.194652Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019fa/r3tmp/tmpX5lOp2/pdisk_1.dat 2025-03-31T16:22:38.584448Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:22:38.611337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:22:38.611469Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 29103, node 1 2025-03-31T16:22:38.685900Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:22:38.686105Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:22:38.686623Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:22:38.705994Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:22:38.706024Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:22:38.706032Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:22:38.706158Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24240 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:22:38.989903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:39.077057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:24240 2025-03-31T16:22:39.252974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting...
: Error: retention hours and storage megabytes must fit one of: { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, provided values: hours 168, storage 40960, code: 500080 2025-03-31T16:22:39.667071Z node 1 :TX_PROXY ERROR: Actor# [1:7488009875541714912:3445] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/stream_TestStreamStorageRetention\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypePersQueueGroup, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:22:42.831574Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488009891814757268:2143];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:22:42.832025Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019fa/r3tmp/tmpYG8ytE/pdisk_1.dat 2025-03-31T16:22:43.027368Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:22:43.069061Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:22:43.069133Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:22:43.073243Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15624, node 4 2025-03-31T16:22:43.151246Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:22:43.151279Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:22:43.151301Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:22:43.151453Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10020 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:22:43.419641Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:43.489828Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:10020 2025-03-31T16:22:43.688742Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:49.109934Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488009919707894824:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:22:49.110462Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019fa/r3tmp/tmpRdoyD8/pdisk_1.dat 2025-03-31T16:22:49.291106Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:22:49.334971Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:22:49.335075Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:22:49.340694Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15118, node 7 2025-03-31T16:22:49.423153Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:22:49.423174Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:22:49.423182Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:22:49.423346Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2064 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:22:49.736793Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:49.804305Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:2064 2025-03-31T16:22:50.038346Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:50.059634Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 >> TWebLoginService::AuditLogEmptySIDsLoginSuccess [GOOD] >> TWebLoginService::AuditLogLdapLoginBadPassword >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::AddAccess_NonYdb-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::AddAccess_NonYdb-StrictAclCheck-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLogout [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:111:2142] 2025-03-31T16:22:51.848274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:22:51.848368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:22:51.848405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:22:51.848444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:22:51.848501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:22:51.848532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:22:51.848597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:22:51.848694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:22:51.849059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:22:51.932273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:22:51.932356Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:22:51.945452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:22:51.945765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:22:51.945933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:22:51.955351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:22:51.955630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:22:51.956296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:22:51.956490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:22:51.958534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:51.959854Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:22:51.959909Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:51.960005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:22:51.960063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:22:51.960106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:22:51.960298Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:22:51.966773Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:240:2058] recipient: [1:15:2062] 2025-03-31T16:22:52.081080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:22:52.081290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:52.081441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:22:52.081610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:22:52.081667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:52.083535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:22:52.083696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:22:52.083894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:52.083953Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:22:52.083978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:22:52.084003Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:22:52.085651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:52.085697Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:22:52.085729Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:22:52.087314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:52.087349Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:52.087372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:22:52.087415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:22:52.090082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:22:52.091736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:22:52.091891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:22:52.092656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:22:52.092758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969446 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:22:52.092806Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:22:52.093002Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:22:52.093040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:22:52.093172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:22:52.093242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:22:52.094759Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:22:52.094792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:22:52.094919Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:52.094951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:22:52.095235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:52.095267Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:22:52.095352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:22:52.095376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:22:52.095401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:22:52.095442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:22:52.095468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:22:52.095495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:22:52.095517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:22:52.095535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:22:52.095575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:22:52.095607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:22:52.095634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:22:52.096948Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:22:52.097034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:22:52.097062Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 94046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:22:54.302048Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-31T16:22:54.302098Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-31T16:22:54.302139Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:22:54.302224Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-31T16:22:54.304990Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-31T16:22:54.305489Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-03-31T16:22:54.305949Z node 4 :TX_PROXY DEBUG: actor# [4:269:2260] Bootstrap 2025-03-31T16:22:54.324753Z node 4 :TX_PROXY DEBUG: actor# [4:269:2260] Become StateWork (SchemeCache [4:275:2266]) 2025-03-31T16:22:54.327696Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "user1" Password: "password1" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:22:54.332927Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2025-03-31T16:22:54.333063Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T16:22:54.333102Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:22:54.333146Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T16:22:54.333181Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:22:54.333237Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:22:54.333295Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-03-31T16:22:54.333335Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:22:54.333376Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-31T16:22:54.333416Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 1, subscribers: 0 2025-03-31T16:22:54.333454Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-03-31T16:22:54.334568Z node 4 :TX_PROXY DEBUG: actor# [4:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T16:22:54.337197Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSuccess TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:22:54.337317Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSuccess, operation: CREATE USER, path: /MyRoot 2025-03-31T16:22:54.337637Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:22:54.337685Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:22:54.337858Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:54.337901Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-03-31T16:22:54.338660Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:22:54.338772Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:22:54.338811Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-31T16:22:54.338851Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-03-31T16:22:54.338898Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:22:54.338992Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-03-31T16:22:54.339423Z node 4 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-31T16:22:54.340972Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 2025-03-31T16:22:54.341395Z node 4 :HTTP WARN: 127.0.0.1:0 POST /login 2025-03-31T16:22:54.343211Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-03-31T16:22:54.343271Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-03-31T16:22:54.383467Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzNDgxMzc0LCJpYXQiOjE3NDM0MzgxNzQsInN1YiI6InVzZXIxIn0.gTPd3rWwXluNCr-RHAn4D68-FfnQ2-zmfvAY9FsibNGvpxixDmHdo5DrPNstuE0IPPonJvGXqfyMwL3F66upqorCOHXUd65uYuopgt9XbJVHFYoqJ9YgArrbGVYwO1pTNK5bv-yJJyON4QZAIDxvllbLsSDfq692EOvHVVj1-D17uv44IoNxTbBdxuAfi6fC8hBcR3yWmUbVYixj-LTE_smG_EXCMRk1p7FqXbJsxwFgKnZrUlNtwKvr_t6JMRB8yTv0clSk9Xz_YwWB0GSEBFU0NwdnFJRzuISQ9Qe-oabr6fahwUXg77rR_pTHYa1XerZb6ApW3aKC0ciDejLYRQ" SanitizedToken: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzNDgxMzc0LCJpYXQiOjE3NDM0MzgxNzQsInN1YiI6InVzZXIxIn0.**" IsAdmin: true, at schemeshard: 72057594046678944 2025-03-31T16:22:54.383602Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:22:54.383644Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:22:54.383826Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:54.383868Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-03-31T16:22:54.385204Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 0 2025-03-31T16:22:54.385880Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:22:54.386067Z node 4 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 209us result status StatusSuccess 2025-03-31T16:22:54.386479Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0R49Gjc6YzEyVkGBgkh+\nEpQo+a74VzHJQa6IFBKIsYFnqV2BN03yaJIaVrHJWpTXqyfpjTcnuGEAVqROm8/h\nCd9sAnriLTbICvMFunVrPNpKVeq9XQu80R6EB07TCRlMwU86ligld3ofQWCqYGDu\nsn5prZG7mCdn/HpRRwNOYrtZp+gILecR01zigdnRqFDkjAHHqurnP4PpZJ102jQd\nhwr4g9tPzDuIfeKcx2SEBK3pGEFSIvU0jklNVIfPiL3gk9DfIN9BT7Y9CAUfyEo1\nf4c6Uhwo4FWEjD2f2qEQnu8S942jB8v2iv2w+ZGQa04UNVrKRK0YUgA4Q8OwACCZ\nAQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1743524574373 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:22:54.389022Z node 4 :HTTP WARN: 127.0.0.1:0 POST /logout 2025-03-31T16:22:54.389092Z node 4 :HTTP ERROR: Logout: No ydb_session_id cookie 2025-03-31T16:22:54.389485Z node 4 :HTTP WARN: 127.0.0.1:0 POST /logout 2025-03-31T16:22:54.396291Z node 4 :TICKET_PARSER ERROR: Ticket **** (589A015B): Token is not in correct format 2025-03-31T16:22:54.396411Z node 4 :HTTP ERROR: Logout: Token is not in correct format 2025-03-31T16:22:54.396869Z node 4 :HTTP WARN: 127.0.0.1:0 POST /logout AUDIT LOG buffer(4): 2025-03-31T16:22:54.291037Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2025-03-31T16:22:54.332800Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1 2025-03-31T16:22:54.384284Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=SUCCESS, login_user=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzNDgxMzc0LCJpYXQiOjE3NDM0MzgxNzQsInN1YiI6InVzZXIxIn0.**, login_user_level=admin 2025-03-31T16:22:54.398040Z: component=web-login, remote_address=127.0.0.1, subject=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzNDgxMzc0LCJpYXQiOjE3NDM0MzgxNzQsInN1YiI6InVzZXIxIn0.**, operation=LOGOUT, status=SUCCESS AUDIT LOG checked line: 2025-03-31T16:22:54.398040Z: component=web-login, remote_address=127.0.0.1, subject=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzNDgxMzc0LCJpYXQiOjE3NDM0MzgxNzQsInN1YiI6InVzZXIxIn0.**, operation=LOGOUT, status=SUCCESS ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:22:51.764305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:22:51.764382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:22:51.764410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:22:51.764460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:22:51.764527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:22:51.764568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:22:51.764643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:22:51.764724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:22:51.765002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:22:51.838622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:22:51.838675Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:22:51.844573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:22:51.844730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:22:51.844891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:22:51.850055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:22:51.850277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:22:51.850855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:22:51.851058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:22:51.853183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:51.854654Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:22:51.854723Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:51.854822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:22:51.854874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:22:51.854916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:22:51.855184Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:22:51.863349Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:22:52.014993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:22:52.015279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:52.015531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:22:52.015741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:22:52.015787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:52.018296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:22:52.018459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:22:52.018679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:52.018738Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:22:52.018772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:22:52.018804Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:22:52.021168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:52.021228Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:22:52.021269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:22:52.023313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:52.023362Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:52.023403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:22:52.023465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:22:52.033591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:22:52.035952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:22:52.036141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:22:52.037220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:22:52.037371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:22:52.037425Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:22:52.037726Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:22:52.037780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:22:52.037957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:22:52.038037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:22:52.040536Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:22:52.040582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:22:52.040795Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:52.040852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:22:52.041075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:52.041118Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:22:52.041217Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:22:52.041251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:22:52.041291Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:22:52.041320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:22:52.041355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:22:52.041410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:22:52.041450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:22:52.041483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:22:52.041566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:22:52.041603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:22:52.041637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:22:52.043631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:22:52.043780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:22:52.043866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 6678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:22:54.714824Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:22:54.714990Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:54.715034Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:206:2208], at schemeshard: 72057594046678944, txId: 105, path id: 2 2025-03-31T16:22:54.715106Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:206:2208], at schemeshard: 72057594046678944, txId: 105, path id: 1 2025-03-31T16:22:54.715784Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2025-03-31T16:22:54.715905Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2025-03-31T16:22:54.715950Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2025-03-31T16:22:54.715996Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-03-31T16:22:54.716047Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T16:22:54.716591Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-03-31T16:22:54.716672Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-03-31T16:22:54.716700Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-03-31T16:22:54.716728Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-03-31T16:22:54.716763Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:22:54.716852Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-03-31T16:22:54.720001Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-03-31T16:22:54.720393Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 2025-03-31T16:22:54.720973Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dir1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:22:54.721169Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Dir1" took 233us result status StatusSuccess 2025-03-31T16:22:54.721506Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dir1" PathDescription { Self { Name: "Dir1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 2 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 106 2025-03-31T16:22:54.724652Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveGroup { Group: "group1" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:22:54.724876Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 1] name: MyRoot type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:22:54.724920Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:22:54.724972Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 2] name: Dir1 type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:22:54.725007Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:22:54.725231Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2025-03-31T16:22:54.725344Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-03-31T16:22:54.725389Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-31T16:22:54.725432Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-03-31T16:22:54.725472Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-31T16:22:54.725536Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:22:54.725622Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: false 2025-03-31T16:22:54.725669Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-31T16:22:54.725710Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-03-31T16:22:54.725758Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 106, publications: 1, subscribers: 0 2025-03-31T16:22:54.725797Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 106, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-03-31T16:22:54.728506Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusSuccess TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:22:54.728640Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusSuccess, operation: REMOVE GROUP, path: /MyRoot 2025-03-31T16:22:54.728894Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:22:54.728948Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:22:54.729128Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:54.729183Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:206:2208], at schemeshard: 72057594046678944, txId: 106, path id: 1 2025-03-31T16:22:54.729778Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 106 2025-03-31T16:22:54.729898Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 106 2025-03-31T16:22:54.729941Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2025-03-31T16:22:54.729989Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-03-31T16:22:54.730042Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:22:54.730185Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2025-03-31T16:22:54.732474Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 TestModificationResult got TxId: 106, wait until txId: 106 2025-03-31T16:22:54.733161Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:22:54.733351Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 220us result status StatusSuccess 2025-03-31T16:22:54.733753Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: true } Children { Name: "Dir1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TWebLoginService::AuditLogLdapLoginBadPassword [GOOD] >> TWebLoginService::AuditLogLdapLoginBadUser >> TSchemeShardLoginTest::AddAccess_NonYdb-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::AccountLockoutAndAutomaticallyUnlock >> TSchemeShardLoginTest::ResetFailedAttemptCountAfterModifyUser [GOOD] >> DataStreams::TestPutRecords [GOOD] >> TWebLoginService::AuditLogLdapLoginBadUser [GOOD] >> TWebLoginService::AuditLogLdapLoginBadBind ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::ResetFailedAttemptCountAfterModifyUser [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:22:48.162105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:22:48.162173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:22:48.162198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:22:48.162222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:22:48.162254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:22:48.162286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:22:48.162332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:22:48.162402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:22:48.162645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:22:48.247461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:22:48.247505Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:22:48.252627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:22:48.252795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:22:48.252962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:22:48.258537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:22:48.258764Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:22:48.259392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:22:48.259625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:22:48.265186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:48.266497Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:22:48.266573Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:48.266745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:22:48.266805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:22:48.266858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:22:48.267043Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:22:48.278804Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:22:48.408437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:22:48.408709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:48.408984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:22:48.409284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:22:48.409345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:48.412072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:22:48.412210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:22:48.412415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:48.412469Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:22:48.412511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:22:48.412545Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:22:48.414877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:48.414961Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:22:48.415003Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:22:48.416979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:48.417034Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:48.417094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:22:48.417165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:22:48.420933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:22:48.422981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:22:48.423157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:22:48.424261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:22:48.424428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:22:48.424490Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:22:48.424791Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:22:48.424846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:22:48.425011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:22:48.425093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:22:48.427423Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:22:48.427463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:22:48.427621Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:48.427669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:22:48.427898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:48.427944Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:22:48.428043Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:22:48.428077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:22:48.428117Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:22:48.428145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:22:48.428183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:22:48.428251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:22:48.428292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:22:48.428321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:22:48.428379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:22:48.428417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:22:48.428450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:22:48.430380Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:22:48.430486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:22:48.430522Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... DbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:22:55.455347Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-03-31T16:22:55.455379Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T16:22:55.455413Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-03-31T16:22:55.455443Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2025-03-31T16:22:55.455471Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2025-03-31T16:22:55.457767Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusSuccess TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:22:55.457870Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusSuccess, operation: MODIFY USER, path: /MyRoot 2025-03-31T16:22:55.458131Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:22:55.458190Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:22:55.458368Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:55.458420Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:360:2336], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-03-31T16:22:55.458965Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:22:55.459100Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:22:55.459147Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-03-31T16:22:55.459214Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-03-31T16:22:55.459278Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:22:55.459404Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-03-31T16:22:55.461496Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-31T16:22:55.462688Z node 4 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [4:311:2298] sender: [4:406:2058] recipient: [4:102:2137] Leader for TabletID 72057594046678944 is [4:311:2298] sender: [4:409:2058] recipient: [4:15:2062] Leader for TabletID 72057594046678944 is [4:311:2298] sender: [4:410:2058] recipient: [4:408:2379] Leader for TabletID 72057594046678944 is [4:411:2380] sender: [4:412:2058] recipient: [4:408:2379] 2025-03-31T16:22:55.500054Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:22:55.500154Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:22:55.500198Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:22:55.500240Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:22:55.500283Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:22:55.500311Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:22:55.500366Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:22:55.500433Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:22:55.500725Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:22:55.517450Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:22:55.518792Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:22:55.518968Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:22:55.519106Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:22:55.519145Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:22:55.519662Z node 4 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:22:55.520352Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2025-03-31T16:22:55.520482Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:22:55.520560Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:22:55.520979Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:22:55.521051Z node 4 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-03-31T16:22:55.521207Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:22:55.521281Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:22:55.521351Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:22:55.521462Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:22:55.521543Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:22:55.521661Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:22:55.521895Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:22:55.522006Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:22:55.522384Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:22:55.522467Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:22:55.522620Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:22:55.522710Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:22:55.522785Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:22:55.523047Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:22:55.523180Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:22:55.523304Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:22:55.523500Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-03-31T16:22:55.523716Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-03-31T16:22:55.523781Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-03-31T16:22:55.523840Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-03-31T16:22:55.532130Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:22:55.532224Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:55.532446Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:22:55.532504Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:22:55.532540Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:22:55.532606Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [4:411:2380] sender: [4:466:2058] recipient: [4:15:2062] 2025-03-31T16:22:55.565537Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-03-31T16:22:55.565605Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-03-31T16:22:55.663839Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjMifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzNDgxMzc1LCJpYXQiOjE3NDM0MzgxNzUsInN1YiI6InVzZXIxIn0.Tl2p0yN_PGzwRHCzl3oTbnP5enXorhw6DdiJyIMuEq32zz63FWmBCkAcM9VCUFu6CYT3UyFh0VX4aE77wwXQKyx8NM9h5Jt4Mz23fjctkFVzVifGmdeXhKIs1urCpsgxG6GvR8aVbpKtcPRznABIN66o4cdBRMTfBzOD1BjWgq330zVwwuhM0XIN0FPGoYYChqpAhwnItV4PosrgK4JSMtoMOVczVyau-7Iq4J9pRzpyYUmw-IKozFzZ8qEaVbZTrTvC0gkATE9CW0v1gVnu7XdzZPxDXddt-BHMJRWAaz0qzQB84QAaEWsqgvTr5_fkeznRRkWiK09MkH1kxAjxbg" SanitizedToken: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjMifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzNDgxMzc1LCJpYXQiOjE3NDM0MzgxNzUsInN1YiI6InVzZXIxIn0.**" IsAdmin: true, at schemeshard: 72057594046678944 2025-03-31T16:22:55.663976Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:22:55.664014Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:22:55.664261Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:55.664333Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:460:2418], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-03-31T16:22:55.664963Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 0 >> TSchemeShardLoginTest::RemoveUser_NonExisting-StrictAclCheck-false >> TWebLoginService::AuditLogLdapLoginBadBind [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestPutRecords [GOOD] Test command err: 2025-03-31T16:22:34.639410Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009855449271021:2225];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:22:34.639968Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001a09/r3tmp/tmpsB6mbA/pdisk_1.dat 2025-03-31T16:22:35.058304Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:22:35.069038Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:22:35.069161Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:22:35.076577Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30226, node 1 2025-03-31T16:22:35.187535Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:22:35.187560Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:22:35.187569Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:22:35.187770Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19321 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:22:35.450615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:35.538366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:19321 2025-03-31T16:22:35.714854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:39.589875Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488009879725982119:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:22:39.591536Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001a09/r3tmp/tmp9CRUtS/pdisk_1.dat 2025-03-31T16:22:39.807006Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4200, node 4 2025-03-31T16:22:39.934927Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:22:39.935053Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:22:39.939184Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:22:39.940076Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:22:39.940102Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:22:39.940113Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:22:39.940310Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31208 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:22:40.249157Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:40.333557Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:31208 2025-03-31T16:22:40.577698Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:40.793120Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480 encryption_type: NONE sequence_number: "0" shard_id: "shard-000000" encryption_type: NONE records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "0" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000004" } records { sequence_number: "2" shard_id: "shard-000002" } records { sequence_number: "3" shard_id: "shard-000002" } records { sequence_number: "4" shard_id: "shard-000002" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000003" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000003" } records { sequence_number: "2" shard_id: "shard-000003" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000003" } records { sequence_number: "5" shard_id: "shard-000002" } records { sequence_number: "6" shard_id: "shard-000002" } records { sequence_number: "0" shard_id: "shard-000001" } records { sequence_number: "3" shard_id: "shard-000004" } records { sequence_number: "4" shard_id: "shard-000004" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000003" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000002" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000000" } 2025-03-31T16:22:40.861912Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:22:40.922613Z :INFO: [/Root/] [/Root/] [6dcb1f48-c5d9236c-df1ef08e-ee98176e] Starting read session 2025-03-31T16:22:40.922744Z :DEBUG: [/Root/] [/Root/] [6dcb1f48-c5d9236c-df1ef08e-ee98176e] Starting session to cluster null (localhost:4200) 2025-03-31T16:22:40.924791Z :DEBUG: [/Root/] [/Root/] [6dcb1f48-c5d9236c-df1ef08e-ee98176e] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:22:40.925014Z :DEBUG: [/Root/] [/Root/] [6dcb1f48-c5d9236c-df1ef08e-ee98176e] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:22:40.925090Z :DEBUG: [/Root/] [/Root/] [6dcb1f48-c5d9236c-df1ef08e-ee98176e] [null] Reconnecting session to cluster null in 0.000000s 2025-03-31T16:22:40.936745Z :DEBUG: [/Root/] [/Root/] [6dcb1f48-c5d9236c-df1ef08e-ee98176e] [null] Successfully connected. Initializing session 2025-03-31T16:22:40.937910Z node 4 :PQ_READ_PROXY DEBUG: new grpc connection 2025-03-31T16:22:40.937943Z node 4 :PQ_READ_PROXY DEBUG: new session created cookie 1 2025-03-31T16:22:40.948617Z node 4 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session grpc read done: success# 1, data# { init_request { topics_read_settings { topic: "/Root/stream_TestPutRecordsWithRead" } read_only_original: true consumer: "user1" read_params { max_read_size: 104857600 } } } 2025-03-31T16:22:40.949040Z node 4 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_4_1_3992628957942520009_v1 read init: from# ipv6:[::1]:57038, request# { init_request { topics_read_settings { topic: "/Root/stream_TestPutRecordsWithRead" } read_only_original: true consumer: "user1" read_params { max_read_size: 104857600 } } } 2025-03-31T16:22:40.949337Z node 4 :PQ_READ_PROXY DEBUG: session cookie 1 consumer user1 session user1_4_1_3992628957942520009_v1 auth for : user1 2025-03-31T16:22:40.952058Z node 4 :PQ_READ_PROXY DEBUG: session cookie 1 consumer user1 session user1_4_1_3992628957942520009_v1 Handle describe topics response 2025-03-31T16:22:40.952267Z node 4 :PQ_READ_PROXY DEBUG: session cookie 1 consumer user1 session user1_4_1_3992628957942520009_v1 auth is DEAD 2025-03-31T16:22:40.952397Z node 4 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_4_1_3992628957942520009_v1 auth ok: topics# 1, initDone# 0 2025-03-31T16:22:40.953842Z node 4 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_4_1_3992628957942520009_v1 register session: topic# /Root/stream_TestPutRecordsWithRead 2025-03-31T16:22:40.955733Z :INFO: [/Root/] [/Root/] [6dcb1f48-c5d9236c-df1ef08e-ee98176e] [null] Server session id: user1_4_1_3992628957942520009_v1 2025-03-31T16:22:40.955937Z :DEBUG: [/Root/] [/Root/] [6dcb1f48-c5d9236c-df1ef08e-ee98176e] [null] After sending read request: ReadSizeBudget = 52428800, R ... itOffset 0 clientReadOffset 0 2025-03-31T16:22:50.084724Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_1244787524513928105_v1 Start reading TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 3(assignId:2) EndOffset 4 readOffset 0 committedOffset 0 clientCommitOffset 0 clientReadOffset 0 2025-03-31T16:22:50.188765Z :DEBUG: [/Root/] [/Root/] [c479ceac-d1392f4a-df3b92a7-505a147d] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:22:50.188968Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2025-03-31T16:22:50.189020Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (3-3) 2025-03-31T16:22:50.189022Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (0-1) 2025-03-31T16:22:50.189077Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (4-4) 2025-03-31T16:22:50.189263Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (5-5) 2025-03-31T16:22:50.189181Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (6-6) 2025-03-31T16:22:50.189338Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (7-7) 2025-03-31T16:22:50.189380Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (8-8) 2025-03-31T16:22:50.190052Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {0, 0} (0-0) 2025-03-31T16:22:50.193751Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {1, 0} (1-1) 2025-03-31T16:22:50.196530Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {2, 0} (2-2) 2025-03-31T16:22:50.198449Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {3, 0} (3-3) 2025-03-31T16:22:50.205597Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {4, 0} (4-4) 2025-03-31T16:22:50.206599Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {5, 0} (5-5) 2025-03-31T16:22:50.208302Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {6, 0} (6-6) 2025-03-31T16:22:50.210176Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {7, 0} (7-7) 2025-03-31T16:22:50.219546Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {8, 0} (8-8) 2025-03-31T16:22:50.219623Z :DEBUG: [/Root/] [/Root/] [c479ceac-d1392f4a-df3b92a7-505a147d] [null] The application data is transferred to the client. Number of messages 9, size 8388611 bytes 2025-03-31T16:22:50.220152Z :DEBUG: [/Root/] [/Root/] [c479ceac-d1392f4a-df3b92a7-505a147d] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:22:50.220356Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 4 (0-1) 2025-03-31T16:22:50.220371Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 0 (0-1) 2025-03-31T16:22:50.220469Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 3 (0-2) 2025-03-31T16:22:50.220527Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 3 (3-3) 2025-03-31T16:22:50.224555Z :DEBUG: [/Root/] Take Data. Partition 4. Read: {0, 0} (0-0) 2025-03-31T16:22:50.224599Z :DEBUG: [/Root/] Take Data. Partition 4. Read: {1, 0} (1-1) 2025-03-31T16:22:50.224623Z :DEBUG: [/Root/] [/Root/] [c479ceac-d1392f4a-df3b92a7-505a147d] [null] The application data is transferred to the client. Number of messages 2, size 0 bytes 2025-03-31T16:22:50.224782Z :DEBUG: [/Root/] Take Data. Partition 0. Read: {0, 0} (0-0) 2025-03-31T16:22:50.224803Z :DEBUG: [/Root/] Take Data. Partition 0. Read: {1, 0} (1-1) 2025-03-31T16:22:50.224818Z :DEBUG: [/Root/] [/Root/] [c479ceac-d1392f4a-df3b92a7-505a147d] [null] The application data is transferred to the client. Number of messages 2, size 0 bytes 2025-03-31T16:22:50.224897Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {0, 0} (0-0) 2025-03-31T16:22:50.224916Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {0, 1} (1-1) 2025-03-31T16:22:50.225625Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {1, 0} (2-2) 2025-03-31T16:22:50.225655Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {2, 0} (3-3) 2025-03-31T16:22:50.225680Z :DEBUG: [/Root/] [/Root/] [c479ceac-d1392f4a-df3b92a7-505a147d] [null] The application data is transferred to the client. Number of messages 4, size 1049088 bytes 2025-03-31T16:22:50.226180Z :INFO: [/Root/] [/Root/] [c479ceac-d1392f4a-df3b92a7-505a147d] Closing read session. Close timeout: 0.000000s 2025-03-31T16:22:50.226229Z :INFO: [/Root/] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:stream_TestPutRecordsCornerCases:3:5:3:0 null:stream_TestPutRecordsCornerCases:2:4:0:0 null:stream_TestPutRecordsCornerCases:0:3:1:0 null:stream_TestPutRecordsCornerCases:4:2:1:0 null:stream_TestPutRecordsCornerCases:1:1:8:0 2025-03-31T16:22:50.226279Z :INFO: [/Root/] [/Root/] [c479ceac-d1392f4a-df3b92a7-505a147d] Counters: { Errors: 0 CurrentSessionLifetimeMs: 175 BytesRead: 9437699 MessagesRead: 17 BytesReadCompressed: 9437699 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T16:22:50.226356Z :NOTICE: [/Root/] [/Root/] [c479ceac-d1392f4a-df3b92a7-505a147d] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-03-31T16:22:50.226392Z :DEBUG: [/Root/] [/Root/] [c479ceac-d1392f4a-df3b92a7-505a147d] [null] Abort session to cluster 2025-03-31T16:22:50.226831Z :NOTICE: [/Root/] [/Root/] [c479ceac-d1392f4a-df3b92a7-505a147d] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-31T16:22:50.229340Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_1244787524513928105_v1 grpc read failed 2025-03-31T16:22:50.229423Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_1244787524513928105_v1 grpc closed 2025-03-31T16:22:50.229519Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_1244787524513928105_v1 is DEAD 2025-03-31T16:22:51.581531Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488009931172899722:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:22:51.581651Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001a09/r3tmp/tmpH1NLII/pdisk_1.dat 2025-03-31T16:22:51.724380Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:22:51.759297Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:22:51.759394Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:22:51.762830Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17315, node 10 2025-03-31T16:22:51.832948Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:22:51.832977Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:22:51.832995Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:22:51.833178Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21418 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:22:52.141238Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:52.278613Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:21418 2025-03-31T16:22:52.472757Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:52.768153Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715661:0, at schemeshard: 72057594046644480
: Error: Access for stream /Root/stream_TestPutRecords is denied for subject user2@builtin, code: 500018 2025-03-31T16:22:52.882539Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 PutRecordsResponse = encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "0" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000004" } records { sequence_number: "2" shard_id: "shard-000002" } records { sequence_number: "3" shard_id: "shard-000002" } records { sequence_number: "4" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000003" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000003" } records { sequence_number: "2" shard_id: "shard-000003" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000003" } records { sequence_number: "5" shard_id: "shard-000002" } records { sequence_number: "6" shard_id: "shard-000002" } records { sequence_number: "0" shard_id: "shard-000001" } records { sequence_number: "3" shard_id: "shard-000004" } records { sequence_number: "4" shard_id: "shard-000004" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000003" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000004" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000002" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000000" } PutRecord response = encryption_type: NONE sequence_number: "7" shard_id: "shard-000004" ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLdapLoginBadBind [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:111:2142] 2025-03-31T16:22:54.333814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:22:54.333909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:22:54.333952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:22:54.333991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:22:54.334092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:22:54.334126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:22:54.334188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:22:54.334289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:22:54.334602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:22:54.419787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:22:54.419853Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:22:54.431407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:22:54.431741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:22:54.431913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:22:54.440589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:22:54.440901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:22:54.441523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:22:54.441740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:22:54.447810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:54.449284Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:22:54.449354Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:54.449450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:22:54.449520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:22:54.449565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:22:54.449790Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:22:54.456623Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:240:2058] recipient: [1:15:2062] 2025-03-31T16:22:54.592034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:22:54.592296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:54.592516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:22:54.592729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:22:54.592799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:54.595240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:22:54.595500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:22:54.595751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:54.595818Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:22:54.595855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:22:54.595892Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:22:54.597874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:54.597942Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:22:54.597983Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:22:54.599898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:54.599949Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:54.599986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:22:54.600060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:22:54.608687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:22:54.610779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:22:54.610990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:22:54.611941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:22:54.612079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 4294969446 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:22:54.612147Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:22:54.612443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:22:54.612511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:22:54.612701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:22:54.612819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:22:54.614995Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:22:54.615044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:22:54.615241Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:54.615296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:22:54.615638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:54.615680Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:22:54.615766Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:22:54.615797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:22:54.615835Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:22:54.615882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:22:54.615916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:22:54.615951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:22:54.615986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:22:54.616015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:22:54.616088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:22:54.616127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:22:54.616157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:22:54.618023Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:22:54.618136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:22:54.618176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:22:56.689732Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:56.689885Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:22:56.689935Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:22:56.689980Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:22:56.690131Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:22:56.697178Z node 4 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [4:124:2150] sender: [4:239:2058] recipient: [4:15:2062] 2025-03-31T16:22:56.707352Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:22:56.707575Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:56.707762Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:22:56.707939Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:22:56.707985Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:56.710247Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:22:56.710400Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:22:56.710633Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:56.710685Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:22:56.710719Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:22:56.710752Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:22:56.712811Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:56.712886Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:22:56.712936Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:22:56.714697Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:56.714742Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:56.714783Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:22:56.714828Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:22:56.714992Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:22:56.716828Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:22:56.716988Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:22:56.717947Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:22:56.718076Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 17179871341 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:22:56.718135Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:22:56.718368Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:22:56.718413Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:22:56.718572Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:22:56.718640Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:22:56.720659Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:22:56.720712Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:22:56.720937Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:56.720982Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:22:56.721301Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:56.721341Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:22:56.721444Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:22:56.721481Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:22:56.721519Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:22:56.721557Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:22:56.721598Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:22:56.721639Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:22:56.721674Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:22:56.721701Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:22:56.721772Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:22:56.721809Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:22:56.721855Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:22:56.722282Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:22:56.722367Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:22:56.722402Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-31T16:22:56.722439Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-31T16:22:56.722479Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:22:56.722573Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-31T16:22:56.725638Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-31T16:22:56.726236Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2025-03-31T16:22:56.727607Z node 4 :HTTP WARN: 127.0.0.1:0 POST /login 2025-03-31T16:22:56.727830Z node 4 :TX_PROXY DEBUG: actor# [4:269:2260] Bootstrap 2025-03-31T16:22:56.749494Z node 4 :TX_PROXY DEBUG: actor# [4:269:2260] Become StateWork (SchemeCache [4:278:2269]) 2025-03-31T16:22:56.749801Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:21757, port: 21757 2025-03-31T16:22:56.749903Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:22:56.753160Z node 4 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:21757. Invalid credentials 2025-03-31T16:22:56.753696Z node 4 :HTTP ERROR: Login fail for user1@ldap: Could not login via LDAP 2025-03-31T16:22:56.754113Z node 4 :TX_PROXY DEBUG: actor# [4:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T16:22:56.756231Z node 4 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 AUDIT LOG buffer(2): 2025-03-31T16:22:56.710361Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2025-03-31T16:22:56.753440Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=ERROR, detailed_status=UNAUTHORIZED, reason=Could not login via LDAP: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:21757. Invalid credentials, login_user=user1@ldap, sanitized_token={none} AUDIT LOG checked line: 2025-03-31T16:22:56.753440Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=ERROR, detailed_status=UNAUTHORIZED, reason=Could not login via LDAP: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:21757. Invalid credentials, login_user=user1@ldap, sanitized_token={none} >> TSchemeShardLoginTest::RemoveUser_NonExisting-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveUser_NonExisting-StrictAclCheck-true >> TSchemeShardLoginTest::BanUserWithWaiting [GOOD] >> TSchemeShardLoginTest::ChangeAcceptablePasswordParameters >> TSchemeShardLoginTest::RemoveUser_NonExisting-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveUser_Groups-StrictAclCheck-false >> TCdcStreamTests::VirtualTimestamps >> TSchemeShardLoginTest::ChangeAcceptablePasswordParameters [GOOD] >> TSchemeShardLoginTest::ChangeAccountLockoutParameters >> TSchemeShardLoginTest::RemoveUser_Groups-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveUser_Groups-StrictAclCheck-true >> TCdcStreamTests::Basic >> DataStreams::TestGetRecords1MBMessagesOneByOneBySeqNo [GOOD] >> TSchemeShardLoginTest::RemoveUser_Groups-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-false >> TestKinesisHttpProxy::CreateStreamInIncorrectDb >> TestYmqHttpProxy::TestGetQueueUrl >> TSchemeShardLoginTest::AccountLockoutAndAutomaticallyUnlock [GOOD] >> DataShardVolatile::DistributedWrite >> TestYmqHttpProxy::TestCreateQueue ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestGetRecords1MBMessagesOneByOneBySeqNo [GOOD] Test command err: 2025-03-31T16:22:32.804226Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009848217513792:2082];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:22:32.806933Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001a1e/r3tmp/tmpKvawTj/pdisk_1.dat 2025-03-31T16:22:33.258251Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:22:33.258373Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:22:33.267606Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:22:33.316535Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12268, node 1 2025-03-31T16:22:33.355376Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:22:33.355402Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:22:33.506504Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:22:33.506537Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:22:33.506548Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:22:33.506716Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20240 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:22:34.028721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:34.131598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:20240 2025-03-31T16:22:34.336812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:34.345977Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-03-31T16:22:34.584855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:22:34.610472Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-03-31T16:22:34.612215Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-03-31T16:22:34.612237Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-03-31T16:22:34.612249Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-03-31T16:22:37.499805Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488009870287242005:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:22:37.499988Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001a1e/r3tmp/tmpmRiBUq/pdisk_1.dat 2025-03-31T16:22:37.744319Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6823, node 4 2025-03-31T16:22:37.863570Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:22:37.863665Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:22:37.879988Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:22:37.896349Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:22:37.896369Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:22:37.896376Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:22:37.896491Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22585 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:22:38.196730Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:38.288637Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:22585 2025-03-31T16:22:38.548829Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:38.773791Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:22:38.857930Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:22:38.883989Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037889 not found 2025-03-31T16:22:38.884019Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037891 not found 2025-03-31T16:22:38.884054Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037890 not found 2025-03-31T16:22:38.884083Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037888 not found 2025-03-31T16:22:42.341477Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488009892055393066:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:22:42.341896Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001a1e/r3tmp/tmpMaOljU/pdisk_1.dat 2025-03-31T16:22:42.526674Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:22:42.584865Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:22:42.585032Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:22:42.590777Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9892, node 7 2025-03-31T16:22:42.668675Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:22:42.668708Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:22:42.668715Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:22:42.668861Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12715 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:22:42.963738Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:43.038865Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:12715 2025-03-31T16:22:43.240311Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:43.445077Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:22:43.520686Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:22:43.575944Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:22:43.596188Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037891 not found 2025-03-31T16:22:43.596247Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037889 not found 2025-03-31T16:22:43.596286Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037890 not found 2025-03-31T16:22:43.596328Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037888 not found 2025-03-31T16:22:43.604966Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2025-03-31T16:22:43.605040Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,4) wasn't found 2025-03-31T16:22:43.605075Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found 2025-03-31T16:22:43.605133Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found 2025-03-31T16:22:47.464216Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488009912557385094:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:22:47.464432Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001a1e/r3tmp/tmpnWdG5P/pdisk_1.dat 2025-03-31T16:22:47.668955Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:22:47.706034Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:22:47.706129Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:22:47.709193Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11450, node 10 2025-03-31T16:22:47.793592Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:22:47.793626Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:22:47.793640Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:22:47.793806Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16909 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:22:48.124544Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:48.208218Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:16909 2025-03-31T16:22:48.477628Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:52.464051Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7488009912557385094:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:22:52.464185Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-false [GOOD] >> TCdcStreamTests::Basic [GOOD] >> TCdcStreamTests::Attributes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::AccountLockoutAndAutomaticallyUnlock [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:22:53.175883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:22:53.175982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:22:53.176043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:22:53.176095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:22:53.176152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:22:53.176194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:22:53.176270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:22:53.176375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:22:53.176687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:22:53.272408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:22:53.272483Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:22:53.280654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:22:53.280798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:22:53.280972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:22:53.286365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:22:53.286601Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:22:53.287302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:22:53.287542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:22:53.289442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:53.290607Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:22:53.290664Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:53.290822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:22:53.290879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:22:53.290930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:22:53.291109Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:22:53.298199Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:22:53.429845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:22:53.430175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:53.430434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:22:53.430684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:22:53.430754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:53.434420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:22:53.434579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:22:53.434824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:53.434896Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:22:53.434956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:22:53.435005Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:22:53.437980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:53.438052Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:22:53.438098Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:22:53.440833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:53.440893Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:53.440956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:22:53.441023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:22:53.444944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:22:53.447263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:22:53.447452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:22:53.448302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:22:53.448442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:22:53.448521Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:22:53.448746Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:22:53.448819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:22:53.448993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:22:53.449098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:22:53.451140Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:22:53.451188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:22:53.451365Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:53.451404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:22:53.451642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:53.451680Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:22:53.451775Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:22:53.451803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:22:53.451843Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:22:53.451869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:22:53.451946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:22:53.451997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:22:53.452039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:22:53.452066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:22:53.452122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:22:53.452161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:22:53.452191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:22:53.453945Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:22:53.454076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:22:53.454117Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 31T16:22:56.266591Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T16:22:56.266641Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:22:56.266713Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:22:56.266794Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-03-31T16:22:56.266845Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:22:56.266890Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-31T16:22:56.266941Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 1, subscribers: 0 2025-03-31T16:22:56.266989Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-03-31T16:22:56.269830Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSuccess TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:22:56.269992Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSuccess, operation: CREATE USER, path: /MyRoot 2025-03-31T16:22:56.270217Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:22:56.270266Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:22:56.270479Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:56.270535Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-03-31T16:22:56.271163Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:22:56.271299Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:22:56.271351Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-31T16:22:56.271400Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-03-31T16:22:56.271451Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:22:56.271582Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-03-31T16:22:56.273706Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 2025-03-31T16:22:56.274146Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-03-31T16:22:56.274192Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-03-31T16:22:56.383795Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "Invalid password", at schemeshard: 72057594046678944 2025-03-31T16:22:56.383939Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:22:56.383995Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:22:56.384213Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:56.384267Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:206:2208], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-03-31T16:22:56.384821Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 0 2025-03-31T16:22:56.385127Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-03-31T16:22:56.391740Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "Invalid password", at schemeshard: 72057594046678944 2025-03-31T16:22:56.392108Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-03-31T16:22:56.398763Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "Invalid password", at schemeshard: 72057594046678944 2025-03-31T16:22:56.399192Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-03-31T16:22:56.410064Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "Invalid password", at schemeshard: 72057594046678944 2025-03-31T16:22:56.410553Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-03-31T16:22:56.410696Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "User user1 is not permitted to log in", at schemeshard: 72057594046678944 2025-03-31T16:22:56.411162Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-03-31T16:22:56.411278Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "User user1 is not permitted to log in", at schemeshard: 72057594046678944 2025-03-31T16:22:56.411736Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:22:56.411946Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 238us result status StatusSuccess 2025-03-31T16:22:56.412404Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAoWCI0WRqDMSqzHKNGZLl\nBtnMjRruFtjl+6doTiwjYIt47Uyg8a/3TxzYxHhAhcBXZde09AvF/0G+5SrRcQqa\nQ7cE2D8W1DvUZ2tvaspmrVolH/cE9zdPg0NEjCj63c7MJkFEtAPVDvIhNpnr3+6p\nCNhPGm0cwpO5u01KVhfLoopB3HSmBDl1gJQ6xibhCrx/NOZDKrGFOnh7sg5S2QnJ\nXEIEcx1ZI+wEjenzUtgklpejwSSmp75OlGgSaIBekRns03LCx6PlA1RxidDe9FUe\nVScln/DLKioLV0OscECHoFyUVgtcKrYagPAJSzH5GCcQUNmv5hb0z2Xvw2EI15v6\nGQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1743524576376 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:23:00.413330Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-03-31T16:23:00.420517Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "Invalid password", at schemeshard: 72057594046678944 2025-03-31T16:23:00.421136Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-03-31T16:23:00.430297Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzNDgxMzgwLCJpYXQiOjE3NDM0MzgxODAsInN1YiI6InVzZXIxIn0.KwlioKbIjV1ZGDdjMKaWZkE0NF4bzClVvH2SfH3gO5JDhPFA5jNU5EAXKosqhZDP-I5ZuehRiXyhqYYWL2PlWnMkZQ6iIwcF544SCylvAmst4U4YxSmLdtHrjYXhwLdZ3qrkvzrbimGnrZa2f5WiTdC3l3JfjtUUWiV6AG8k8h0wtR6JtNJ37zR0HXhjPTEAIX8B-zzknnI6kr5QOyGh_mlQpDl8z52rSSeOnpwcKFN4VunpZrXO_Yn9BL7u9OaZTie2D4oKHeTenVu4kn3FDXvyhwm-JroUg-98byr1q6jVp3Kpg_uHN8DDRO8I7aOdP7RZqVv6T9lCLusLluqdIw" SanitizedToken: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzNDgxMzgwLCJpYXQiOjE3NDM0MzgxODAsInN1YiI6InVzZXIxIn0.**" IsAdmin: true, at schemeshard: 72057594046678944 2025-03-31T16:23:00.430923Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:23:00.431185Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 281us result status StatusSuccess 2025-03-31T16:23:00.431688Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAoWCI0WRqDMSqzHKNGZLl\nBtnMjRruFtjl+6doTiwjYIt47Uyg8a/3TxzYxHhAhcBXZde09AvF/0G+5SrRcQqa\nQ7cE2D8W1DvUZ2tvaspmrVolH/cE9zdPg0NEjCj63c7MJkFEtAPVDvIhNpnr3+6p\nCNhPGm0cwpO5u01KVhfLoopB3HSmBDl1gJQ6xibhCrx/NOZDKrGFOnh7sg5S2QnJ\nXEIEcx1ZI+wEjenzUtgklpejwSSmp75OlGgSaIBekRns03LCx6PlA1RxidDe9FUe\nVScln/DLKioLV0OscECHoFyUVgtcKrYagPAJSzH5GCcQUNmv5hb0z2Xvw2EI15v6\nGQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1743524576376 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TCdcStreamTests::VirtualTimestamps [GOOD] >> TCdcStreamTests::ResolvedTimestamps ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:22:57.015884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:22:57.015956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:22:57.015989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:22:57.016032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:22:57.016074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:22:57.016106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:22:57.016155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:22:57.016242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:22:57.016531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:22:57.087463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:22:57.087513Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:22:57.091952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:22:57.092097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:22:57.092273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:22:57.097371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:22:57.097585Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:22:57.098181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:22:57.098405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:22:57.100397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:57.101518Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:22:57.101584Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:57.101755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:22:57.101804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:22:57.101849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:22:57.102015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:22:57.109116Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:22:57.252872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:22:57.253153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:57.253373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:22:57.253607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:22:57.253684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:57.256267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:22:57.256406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:22:57.256602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:57.256651Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:22:57.256681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:22:57.256710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:22:57.258938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:57.259134Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:22:57.259193Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:22:57.261573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:57.261627Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:57.261665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:22:57.261722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:22:57.265619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:22:57.268077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:22:57.268275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:22:57.269731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:22:57.269912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:22:57.269986Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:22:57.270294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:22:57.270368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:22:57.270585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:22:57.270693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:22:57.273267Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:22:57.273313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:22:57.273503Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:57.273549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:22:57.273815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:57.273875Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:22:57.273965Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:22:57.274001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:22:57.274037Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:22:57.274058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:22:57.274084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:22:57.274128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:22:57.274157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:22:57.274179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:22:57.274244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:22:57.274269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:22:57.274289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:22:57.276200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:22:57.276335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:22:57.276385Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 105:0, at schemeshard: 72057594046678944 2025-03-31T16:23:00.959556Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-03-31T16:23:00.959597Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-31T16:23:00.959647Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-03-31T16:23:00.959690Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-31T16:23:00.959757Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-03-31T16:23:00.959827Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T16:23:00.959855Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-03-31T16:23:00.959901Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-31T16:23:00.959937Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-31T16:23:00.959971Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-03-31T16:23:00.960015Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 2, subscribers: 0 2025-03-31T16:23:00.960046Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 2], 7 2025-03-31T16:23:00.960075Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 5 2025-03-31T16:23:00.962645Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusSuccess TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:23:00.962854Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /MyRoot/Dir1/DirSub1, set owner:user2 2025-03-31T16:23:00.963071Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:23:00.963152Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-31T16:23:00.963275Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:23:00.963396Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:23:00.963446Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:206:2208], at schemeshard: 72057594046678944, txId: 105, path id: 3 2025-03-31T16:23:00.963498Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:206:2208], at schemeshard: 72057594046678944, txId: 105, path id: 2 2025-03-31T16:23:00.964180Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2025-03-31T16:23:00.964294Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2025-03-31T16:23:00.964333Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2025-03-31T16:23:00.964371Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2025-03-31T16:23:00.964426Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-31T16:23:00.964877Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 105 2025-03-31T16:23:00.964933Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 105 2025-03-31T16:23:00.964966Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-03-31T16:23:00.964990Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 7 2025-03-31T16:23:00.965018Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T16:23:00.965083Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-03-31T16:23:00.968008Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-03-31T16:23:00.968372Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestModificationResults wait txId: 106 2025-03-31T16:23:00.971066Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "user1" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:23:00.971578Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2025-03-31T16:23:00.971708Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-03-31T16:23:00.971754Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-31T16:23:00.971804Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-03-31T16:23:00.971848Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-31T16:23:00.971912Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:23:00.971978Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: false 2025-03-31T16:23:00.972027Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-31T16:23:00.972069Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-03-31T16:23:00.972111Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 106, publications: 1, subscribers: 0 2025-03-31T16:23:00.972152Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 106, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2025-03-31T16:23:00.975017Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusSuccess TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:23:00.975168Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusSuccess, operation: REMOVE USER, path: /MyRoot 2025-03-31T16:23:00.975447Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:23:00.975522Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:23:00.975765Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:23:00.975836Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:206:2208], at schemeshard: 72057594046678944, txId: 106, path id: 1 2025-03-31T16:23:00.976563Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 106 2025-03-31T16:23:00.976717Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 106 2025-03-31T16:23:00.976782Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2025-03-31T16:23:00.976865Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-03-31T16:23:00.976932Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:23:00.977076Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2025-03-31T16:23:00.979323Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 TestModificationResult got TxId: 106, wait until txId: 106 2025-03-31T16:23:00.979929Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dir1/DirSub1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:23:00.980154Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Dir1/DirSub1" took 261us result status StatusSuccess 2025-03-31T16:23:00.980514Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dir1/DirSub1" PathDescription { Self { Name: "DirSub1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "user2" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:23:00.981073Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-03-31T16:23:00.981209Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "Cannot find user: user1", at schemeshard: 72057594046678944 >> TCdcStreamTests::Attributes [GOOD] >> TCdcStreamTests::DocApi >> TCdcStreamTests::ResolvedTimestamps [GOOD] >> TCdcStreamTests::RetentionPeriod >> KqpPg::TableDeleteAllData+useSink [GOOD] >> KqpPg::TableDeleteAllData-useSink >> TSchemeShardTTLTests::CreateTableShouldFailOnUnknownColumn >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_UnknownColumn >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true >> TSchemeShardTTLTestsWithReboots::CreateTable >> TCdcStreamTests::DocApi [GOOD] >> TCdcStreamTests::DocApiNegative >> TCdcStreamTests::RetentionPeriod [GOOD] >> TCdcStreamTests::TopicPartitions >> TColumnShardTestReadWrite::CompactionGCFailingBs [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnUnknownColumn [GOOD] >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_UnknownColumn [GOOD] >> TCdcStreamTests::DocApiNegative [GOOD] >> TCdcStreamTests::Negative ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_UnknownColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:23:03.805940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:23:03.806061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:23:03.806102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:23:03.806140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:23:03.807123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:23:03.807181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:23:03.807259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:23:03.807403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:23:03.808898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:23:03.906662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:23:03.906731Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:23:03.912166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:23:03.912284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:23:03.912468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:23:03.918216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:23:03.918477Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:23:03.919168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:23:03.919397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:23:03.927041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:23:03.934716Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:23:03.934805Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:23:03.934964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:23:03.935021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:23:03.935072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:23:03.935238Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:23:03.944886Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:23:04.049778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:23:04.057216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:04.060658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:23:04.061928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:23:04.062004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:04.065438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:23:04.065591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:23:04.065803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:04.065945Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:23:04.065988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:23:04.066025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:23:04.067965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:04.068027Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:23:04.068058Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:23:04.069815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:04.069859Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:04.069903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:23:04.069969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:23:04.074230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:23:04.076244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:23:04.076441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:23:04.077368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:23:04.077506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:23:04.077543Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:23:04.079040Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:23:04.079123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:23:04.079308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:23:04.079386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:23:04.082078Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:23:04.082150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:23:04.082343Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:23:04.082403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:23:04.082674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:04.082711Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:23:04.082788Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:23:04.082836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:23:04.082868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:23:04.082894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:23:04.082925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:23:04.082957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:23:04.082986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:23:04.083026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:23:04.083097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:23:04.083133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:23:04.083170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:23:04.084735Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:23:04.084820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:23:04.084869Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-31T16:23:04.084911Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-31T16:23:04.084956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:23:04.085080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-31T16:23:04.087618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-31T16:23:04.090158Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-03-31T16:23:04.100633Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Bootstrap 2025-03-31T16:23:04.118775Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Become StateWork (SchemeCache [1:272:2263]) 2025-03-31T16:23:04.120893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TTLEnabledTable" Schema { Columns { Name: "key" Type: "Uint64" NotNull: true } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" } TtlSettings { Enabled { ColumnName: "created_at" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:23:04.121858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:23:04.124067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Incorrect ttl column - not found in scheme, at schemeshard: 72057594046678944 2025-03-31T16:23:04.126401Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T16:23:04.129355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Incorrect ttl column - not found in scheme" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:23:04.129538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Incorrect ttl column - not found in scheme, operation: CREATE COLUMN TABLE, path: /MyRoot/ 2025-03-31T16:23:04.130147Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnUnknownColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:23:03.805968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:23:03.806093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:23:03.806141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:23:03.806173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:23:03.807132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:23:03.807204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:23:03.807285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:23:03.807428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:23:03.808863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:23:03.886130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:23:03.886191Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:23:03.892115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:23:03.892228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:23:03.892378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:23:03.899360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:23:03.899709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:23:03.904274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:23:03.905344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:23:03.927300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:23:03.934682Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:23:03.934758Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:23:03.934915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:23:03.934979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:23:03.935055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:23:03.935209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:23:03.944212Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:23:04.079208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:23:04.079437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:04.079722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:23:04.079963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:23:04.080021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:04.082308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:23:04.082448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:23:04.082637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:04.082699Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:23:04.082736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:23:04.082767Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:23:04.084828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:04.084907Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:23:04.084943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:23:04.086744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:04.086788Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:04.086826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:23:04.086890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:23:04.090676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:23:04.092764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:23:04.092962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:23:04.093934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:23:04.094072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:23:04.094132Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:23:04.094407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:23:04.094457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:23:04.094616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:23:04.094697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:23:04.097171Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:23:04.097219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:23:04.097393Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:23:04.097446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:23:04.097676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:04.097721Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:23:04.097813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:23:04.097846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:23:04.097883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:23:04.097913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:23:04.097947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:23:04.097998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:23:04.098027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:23:04.098060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:23:04.098129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:23:04.098187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:23:04.098241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:23:04.099976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:23:04.100067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:23:04.100095Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-31T16:23:04.100120Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-31T16:23:04.100149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:23:04.100258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-31T16:23:04.103321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-31T16:23:04.103936Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-03-31T16:23:04.105009Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Bootstrap 2025-03-31T16:23:04.121263Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Become StateWork (SchemeCache [1:272:2263]) 2025-03-31T16:23:04.123837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "created_at" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:23:04.124271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:23:04.124412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "created_at" } }, at schemeshard: 72057594046678944 2025-03-31T16:23:04.126533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Cannot enable TTL on unknown column: 'created_at', at schemeshard: 72057594046678944 2025-03-31T16:23:04.127798Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T16:23:04.130354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Cannot enable TTL on unknown column: \'created_at\'" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:23:04.130514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Cannot enable TTL on unknown column: 'created_at', operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2025-03-31T16:23:04.131134Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionGCFailingBs [GOOD] Test command err: 2025-03-31T16:22:02.133935Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:22:02.238671Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:22:02.266703Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:22:02.267050Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:22:02.276255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:22:02.276518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:22:02.276788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:22:02.276921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:22:02.277028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:22:02.277148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:22:02.277260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:22:02.277367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:22:02.277494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:22:02.277632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:22:02.277767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:22:02.277898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:22:02.311905Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:22:02.312073Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:22:02.312134Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:22:02.312411Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:22:02.312600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:22:02.312681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:22:02.312730Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:22:02.312839Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:22:02.312923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:22:02.313008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:22:02.313047Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:22:02.313293Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:22:02.313369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:22:02.313414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:22:02.313449Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:22:02.313600Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:22:02.313677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:22:02.313731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:22:02.313766Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:22:02.313848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:22:02.313905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:22:02.313945Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:22:02.314017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:22:02.314063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:22:02.314094Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:22:02.314574Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=61; 2025-03-31T16:22:02.314673Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=41; 2025-03-31T16:22:02.314781Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=51; 2025-03-31T16:22:02.314875Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=39; 2025-03-31T16:22:02.315048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:22:02.315291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:22:02.315354Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:22:02.315604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:22:02.315664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:22:02.315714Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:22:02.315898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:22:02.315944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:22:02.315987Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:22:02.316223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:22:02.316282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:22:02.316316Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:22:02.316468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:22:02.316510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:22:02.316579Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... ;event=skip_indexation;reason=in_progress;count=1;insert_overload_size=5870200;indexing_debug={task_ids=6aeee0ec-e4c11f0-8df82d9b-37eb653e,;}; 2025-03-31T16:23:02.873613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:281:2294];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:831;event=skip_compaction;reason=disabled; 2025-03-31T16:23:02.873650Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:281:2294];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=0; 2025-03-31T16:23:02.873700Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:281:2294];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:23:02.873736Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:281:2294];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:23:02.873767Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:281:2294];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:23:02.873824Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:281:2294];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.408000s; 2025-03-31T16:23:02.873863Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:281:2294];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:23:03.068362Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:281:2294];fline=actor.cpp:22;event=flush_writing;size=4735248;count=1; 2025-03-31T16:23:03.105105Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 45 at tablet 9437184 2025-03-31T16:23:03.105382Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 3:87 Blob count: 1 2025-03-31T16:23:03.117793Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 3:87 Blob count: 1 2025-03-31T16:23:03.117949Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=263;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:769;event=skip_indexation;reason=in_progress;count=1;insert_overload_size=5870200;indexing_debug={task_ids=6aeee0ec-e4c11f0-8df82d9b-37eb653e,;}; 2025-03-31T16:23:03.122329Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:3:44:255:1:574112:0]; 2025-03-31T16:23:03.122418Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:3:44:255:2:592928:0]; GC for channel 3 deletes blobs: GC for channel 2 deletes blobs: GC for channel 4 deletes blobs: [9437184:3:83:4:0:5870200:0] Added portions: 151 152 2025-03-31T16:23:03.143311Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-03-31T16:23:03.143624Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[264] (CS::INDEXATION) apply at tablet 9437184 2025-03-31T16:23:03.147046Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 3:86 Blob count: 2 2025-03-31T16:23:03.147227Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=92311612;raw_bytes=143732845;count=39;records=1462497} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=7381080;raw_bytes=7369506;count=2;records=75000} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-31T16:23:03.171688Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=6aeee0ec-e4c11f0-8df82d9b-37eb653e;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; 2025-03-31T16:23:03.171779Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=6aeee0ec-e4c11f0-8df82d9b-37eb653e;fline=with_appended.cpp:65;portions=75,76,;task_id=6aeee0ec-e4c11f0-8df82d9b-37eb653e; 2025-03-31T16:23:03.172021Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=6aeee0ec-e4c11f0-8df82d9b-37eb653e;fline=manager.cpp:15;event=unlock;process_id=CS::INDEXATION::6aeee0ec-e4c11f0-8df82d9b-37eb653e; 2025-03-31T16:23:03.172076Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=6aeee0ec-e4c11f0-8df82d9b-37eb653e;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:23:03.172134Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=6aeee0ec-e4c11f0-8df82d9b-37eb653e;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:23:03.172173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;task_id=6aeee0ec-e4c11f0-8df82d9b-37eb653e;tablet_id=9437184;fline=columnshard_impl.cpp:831;event=skip_compaction;reason=disabled; 2025-03-31T16:23:03.172218Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=6aeee0ec-e4c11f0-8df82d9b-37eb653e;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=0; 2025-03-31T16:23:03.172277Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=6aeee0ec-e4c11f0-8df82d9b-37eb653e;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:23:03.172321Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=6aeee0ec-e4c11f0-8df82d9b-37eb653e;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:23:03.172362Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=6aeee0ec-e4c11f0-8df82d9b-37eb653e;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:23:03.172434Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=6aeee0ec-e4c11f0-8df82d9b-37eb653e;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.396500s; 2025-03-31T16:23:03.172483Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=6aeee0ec-e4c11f0-8df82d9b-37eb653e;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:23:03.172571Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:85:3:0:5870200:0] 2025-03-31T16:23:03.172622Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 3:86 Blob count: 2 2025-03-31T16:23:03.173768Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=45;external_task_id=6aeee0ec-e4c11f0-8df82d9b-37eb653e;mem=5963210;cpu=0; 2025-03-31T16:23:03.174535Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:23:03.175426Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 6080043 at tablet 9437184, mediator 0 2025-03-31T16:23:03.175505Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[267] execute at tablet 9437184 2025-03-31T16:23:03.175816Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=1043;fline=abstract.h:83;progress_tx_id=1043;lock_id=1;broken=0; 2025-03-31T16:23:03.176009Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=1043;fline=tx_controller.cpp:211;event=finished_tx;tx_id=1043; 2025-03-31T16:23:03.188166Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[267] complete at tablet 9437184 2025-03-31T16:23:03.188262Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=abstract.h:93;progress_tx_id=1043;lock_id=1;broken=0; 2025-03-31T16:23:03.188344Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=5870200; 2025-03-31T16:23:03.188467Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=manager.cpp:10;event=lock;process_id=CS::INDEXATION::6b790c54-e4c11f0-8e00707b-3830036; 2025-03-31T16:23:03.188531Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=ro_controller.cpp:45;event=CS::INDEXATION;tablet_id=9437184; 2025-03-31T16:23:03.188611Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:754;event=indexation;bytes=5870200;blobs_count=1;max_limit=251658240;has_more=0;external_task_id=6b790c54-e4c11f0-8e00707b-3830036; 2025-03-31T16:23:03.188661Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:620;event=start_changes;type=CS::INDEXATION;task_id=6b790c54-e4c11f0-8e00707b-3830036; 2025-03-31T16:23:03.188769Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:281:2294];ev_type=NKikimr::NOlap::NResourceBroker::NSubscribe::TEvStartTask;fline=actor.cpp:38;event=ask_resources;task=cpu=0;mem=5963210;external_task_id=6b790c54-e4c11f0-8e00707b-3830036;type=CS::INDEXATION;priority=0;; 2025-03-31T16:23:03.188988Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:281:2294];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=46;task=cpu=0;mem=5963210;external_task_id=6b790c54-e4c11f0-8e00707b-3830036;type=CS::INDEXATION;priority=0;; 2025-03-31T16:23:03.189020Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:281:2294];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=6b790c54-e4c11f0-8e00707b-3830036;mem=5963210;cpu=0; 2025-03-31T16:23:03.189051Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:281:2294];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=6b790c54-e4c11f0-8e00707b-3830036;task_id=46;mem=5963210;cpu=0; 2025-03-31T16:23:03.189188Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=6b790c54-e4c11f0-8e00707b-3830036;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=6b790c54-e4c11f0-8e00707b-3830036; Added portions: 153 154 2025-03-31T16:23:03.716131Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=6b790c54-e4c11f0-8e00707b-3830036;fline=actor.cpp:48;task=agents_waiting=0;additional_info=();; 2025-03-31T16:23:03.716282Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:281:2294];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=2; Compactions happened: 14 Indexations happened: 31 Cleanups happened: 1 Old portions: 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 Cleaned up portions: 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> TCdcStreamTests::Negative [GOOD] >> TCdcStreamTests::DisableProtoSourceIdInfo >> TCdcStreamTests::TopicPartitions [GOOD] >> TCdcStreamTests::ReplicationAttribute |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TCdcStreamTests::DisableProtoSourceIdInfo [GOOD] >> TCdcStreamTests::CreateStream >> TSchemeShardTTLTests::CreateTableShouldSucceedOnIndexedTable >> TestKinesisHttpProxy::CreateStreamInIncorrectDb [GOOD] >> TestYmqHttpProxy::TestCreateQueue [GOOD] >> TestYmqHttpProxy::TestGetQueueUrl [GOOD] >> TCdcStreamTests::ReplicationAttribute [GOOD] >> TCdcStreamTests::RebootSchemeShard >> TPQTest::TestPQReadAhead [GOOD] >> TPQTest::TestPQCacheSizeManagement >> TestKinesisHttpProxy::CreateStreamWithInvalidName >> DataStreams::TestGetRecords1MBMessagesOneByOneByTS [GOOD] >> DataStreams::TestGetRecordsStreamWithMultipleShards >> TestYmqHttpProxy::TestGetQueueUrlOfNotExistingQueue >> TestYmqHttpProxy::TestCreateQueueWithBadQueueName >> TSchemeShardTTLTests::CreateTableShouldSucceedOnIndexedTable [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnBeforeEpochTTL >> TSchemeShardTTLTests::AlterTableShouldSuccessOnSimultaneousAddColumnAndEnableTTL >> TCdcStreamTests::CreateStream [GOOD] >> TCdcStreamTests::AlterStream >> DataShardVolatile::DistributedWrite [GOOD] >> DataShardVolatile::DistributedWriteBrokenLock ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceedOnIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:23:07.252566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:23:07.252656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:23:07.252683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:23:07.252706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:23:07.252736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:23:07.252768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:23:07.252826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:23:07.252961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:23:07.253277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:23:07.320760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:23:07.320821Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:23:07.325275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:23:07.325346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:23:07.325473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:23:07.329986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:23:07.330200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:23:07.330769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:23:07.330933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:23:07.332671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:23:07.333704Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:23:07.333745Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:23:07.333846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:23:07.333877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:23:07.333903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:23:07.333997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:23:07.339339Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:23:07.431373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:23:07.431560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:07.431777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:23:07.432004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:23:07.432058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:07.434211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:23:07.434328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:23:07.434498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:07.434548Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:23:07.434582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:23:07.434613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:23:07.436470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:07.436529Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:23:07.436563Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:23:07.437873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:07.437913Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:07.437949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:23:07.438008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:23:07.440641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:23:07.442408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:23:07.442553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:23:07.443428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:23:07.443543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:23:07.443598Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:23:07.443844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:23:07.443898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:23:07.444023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:23:07.444089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:23:07.445923Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:23:07.445956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:23:07.446096Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:23:07.446151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:23:07.446322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:07.446352Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:23:07.446415Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:23:07.446435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:23:07.446470Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:23:07.446500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:23:07.446535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:23:07.446574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:23:07.446598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:23:07.446619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:23:07.446666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:23:07.446705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:23:07.446740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:23:07.448235Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:23:07.448327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:23:07.448355Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... esult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 1 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1324 } } 2025-03-31T16:23:07.698846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 323 RawX2: 4294969602 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-31T16:23:07.698889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2025-03-31T16:23:07.699025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 323 RawX2: 4294969602 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-31T16:23:07.699104Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-31T16:23:07.699179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 323 RawX2: 4294969602 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-31T16:23:07.699255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:23:07.699300Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:23:07.699333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-03-31T16:23:07.699387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-03-31T16:23:07.699794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 319 RawX2: 4294969599 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-31T16:23:07.699846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 2 2025-03-31T16:23:07.699941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:2, at schemeshard: 72057594046678944, message: Source { RawX1: 319 RawX2: 4294969599 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-31T16:23:07.699970Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-31T16:23:07.700018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 319 RawX2: 4294969599 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-31T16:23:07.700054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:23:07.700087Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:2, at schemeshard: 72057594046678944 2025-03-31T16:23:07.700116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-31T16:23:07.700149Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:2 129 -> 240 2025-03-31T16:23:07.703195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T16:23:07.703402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T16:23:07.706090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T16:23:07.706240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:23:07.706332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T16:23:07.706406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-03-31T16:23:07.706486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:23:07.706621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-03-31T16:23:07.707038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:23:07.707108Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-31T16:23:07.707222Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 2/3 2025-03-31T16:23:07.707265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-03-31T16:23:07.707319Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 2/3 2025-03-31T16:23:07.707351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-03-31T16:23:07.707383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2025-03-31T16:23:07.707655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2025-03-31T16:23:07.707705Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:2 ProgressState 2025-03-31T16:23:07.707761Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 3/3 2025-03-31T16:23:07.707782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-03-31T16:23:07.707808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 3/3 2025-03-31T16:23:07.707845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-03-31T16:23:07.707870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2025-03-31T16:23:07.707936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:377:2345] message: TxId: 101 2025-03-31T16:23:07.707977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-03-31T16:23:07.708020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-31T16:23:07.708051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-31T16:23:07.708168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T16:23:07.708208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:1 2025-03-31T16:23:07.708226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:1 2025-03-31T16:23:07.708250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-31T16:23:07.708270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:2 2025-03-31T16:23:07.708290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:2 2025-03-31T16:23:07.708329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-31T16:23:07.710574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T16:23:07.710625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:378:2346] TestWaitNotification: OK eventTxId 101 2025-03-31T16:23:07.711143Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:23:07.711432Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 291us result status StatusSuccess 2025-03-31T16:23:07.711930Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TCdcStreamTests::RebootSchemeShard [GOOD] >> TCdcStreamTests::StreamOnIndexTableNegative >> TSchemeShardTTLTests::CreateTableShouldFailOnBeforeEpochTTL [GOOD] >> TSchemeShardTTLTests::AlterTableShouldSuccessOnSimultaneousAddColumnAndEnableTTL [GOOD] >> TCdcStreamTests::AlterStream [GOOD] >> TCdcStreamTests::DropStream ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnBeforeEpochTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:23:08.607705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:23:08.607825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:23:08.607862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:23:08.607898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:23:08.607941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:23:08.607986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:23:08.608061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:23:08.608209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:23:08.608625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:23:08.702842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:23:08.702909Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:23:08.708530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:23:08.708639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:23:08.708836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:23:08.717584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:23:08.717854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:23:08.718503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:23:08.718724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:23:08.720776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:23:08.722101Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:23:08.722159Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:23:08.722342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:23:08.722394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:23:08.722434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:23:08.722595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:23:08.729827Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:23:08.853061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:23:08.853247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:08.853489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:23:08.853699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:23:08.853752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:08.856014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:23:08.856127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:23:08.856299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:08.856339Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:23:08.856361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:23:08.856383Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:23:08.858356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:08.858399Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:23:08.858431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:23:08.860169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:08.860218Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:08.860260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:23:08.860326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:23:08.870034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:23:08.873069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:23:08.873240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:23:08.874046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:23:08.874161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:23:08.874217Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:23:08.874444Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:23:08.874490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:23:08.874621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:23:08.874712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:23:08.876518Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:23:08.876560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:23:08.876690Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:23:08.876738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:23:08.876908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:08.876947Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:23:08.877019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:23:08.877042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:23:08.877070Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:23:08.877092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:23:08.877120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:23:08.877153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:23:08.877175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:23:08.877199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:23:08.877260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:23:08.877299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:23:08.877333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:23:08.878719Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:23:08.878802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:23:08.878836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-31T16:23:08.878862Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-31T16:23:08.878889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:23:08.878977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-31T16:23:08.881572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-31T16:23:08.881985Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR W0000 00:00:1743438188.882998 491103 text_format.cc:398] Warning parsing text-format NKikimrSchemeOp.TTableDescription: 9:35: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 101 2025-03-31T16:23:08.883444Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Bootstrap 2025-03-31T16:23:08.896657Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Become StateWork (SchemeCache [1:272:2263]) 2025-03-31T16:23:08.898681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3153600000 Tiers { ApplyAfterSeconds: 3153600000 Delete { } } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:23:08.899028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:23:08.899189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3153600000 Tiers { ApplyAfterSeconds: 3153600000 Delete { } } } }, at schemeshard: 72057594046678944 2025-03-31T16:23:08.899558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: TTL should be less than 1743438188 seconds (20178 days, 55 years). The ttl behaviour is undefined before 1970., at schemeshard: 72057594046678944 2025-03-31T16:23:08.900560Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T16:23:08.902949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "TTL should be less than 1743438188 seconds (20178 days, 55 years). The ttl behaviour is undefined before 1970." TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:23:08.903118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: TTL should be less than 1743438188 seconds (20178 days, 55 years). The ttl behaviour is undefined before 1970., operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2025-03-31T16:23:08.903689Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSuccessOnSimultaneousAddColumnAndEnableTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-31T16:23:08.554084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:23:08.554189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:23:08.554227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:23:08.554256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:23:08.554301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:23:08.554328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:23:08.554405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:23:08.554469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:23:08.554773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:23:08.630498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:23:08.630558Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:23:08.638745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:23:08.639029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:23:08.639174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:23:08.646655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:23:08.646962Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:23:08.647587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:23:08.647944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:23:08.649738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:23:08.650855Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:23:08.650909Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:23:08.650970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:23:08.651013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:23:08.651045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:23:08.651242Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:23:08.657680Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-31T16:23:08.782765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:23:08.782988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:08.783220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:23:08.783397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:23:08.783456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:08.785454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:23:08.785601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:23:08.785806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:08.785861Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:23:08.785892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:23:08.785919Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:23:08.787575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:08.787617Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:23:08.787644Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:23:08.789131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:08.789166Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:08.789201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:23:08.789247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:23:08.792183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:23:08.793967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:23:08.794103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:23:08.795201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:23:08.795375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:23:08.795423Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:23:08.795696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:23:08.795754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:23:08.795918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:23:08.795992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:23:08.798150Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:23:08.798195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:23:08.798368Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:23:08.798426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:23:08.798787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:08.798840Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:23:08.798936Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:23:08.798965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:23:08.799003Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:23:08.799036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:23:08.799067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:23:08.799137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:23:08.799172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:23:08.799200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:23:08.799263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:23:08.799300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:23:08.799349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:23:08.801241Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:23:08.801362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:23:08.801414Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-03-31T16:23:09.057450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:23:09.057601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:23:09.057651Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2025-03-31T16:23:09.059116Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-03-31T16:23:09.059301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-03-31T16:23:09.066776Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:23:09.066855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:23:09.067136Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:23:09.067197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-31T16:23:09.067605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:23:09.067677Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-03-31T16:23:09.068263Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:23:09.068355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:23:09.068406Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:23:09.068447Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-03-31T16:23:09.068500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T16:23:09.068594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 2025-03-31T16:23:09.070292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 2911 } } 2025-03-31T16:23:09.070333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-03-31T16:23:09.070436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 2911 } } 2025-03-31T16:23:09.070520Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 2911 } } 2025-03-31T16:23:09.071936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 309 RawX2: 4294969592 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-31T16:23:09.071983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-03-31T16:23:09.072123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 309 RawX2: 4294969592 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-31T16:23:09.072214Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-31T16:23:09.072305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 309 RawX2: 4294969592 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-31T16:23:09.072363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:23:09.072394Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:23:09.072428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-31T16:23:09.072475Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-03-31T16:23:09.074567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:23:09.076441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:23:09.076573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:23:09.076855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:23:09.076915Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-31T16:23:09.077014Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:23:09.077046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:23:09.077104Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:23:09.077142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:23:09.077175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-31T16:23:09.077244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:337:2316] message: TxId: 102 2025-03-31T16:23:09.077297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:23:09.077347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-31T16:23:09.077384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-31T16:23:09.077491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:23:09.079319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:23:09.079370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:395:2367] TestWaitNotification: OK eventTxId 102 2025-03-31T16:23:09.079946Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:23:09.080182Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 237us result status StatusSuccess 2025-03-31T16:23:09.080623Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TCdcStreamTests::StreamOnIndexTableNegative [GOOD] >> TCdcStreamTests::StreamOnIndexTable |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> YdbYqlClient::SimpleColumnFamilies [GOOD] >> YdbYqlClient::TableKeyRangesSinglePartition >> TCdcStreamTests::StreamOnIndexTable [GOOD] >> TCdcStreamTests::StreamOnBuildingIndexTable >> TCdcStreamTests::DropStream [GOOD] >> TCdcStreamTests::AlterStreamImplShouldFail >> TPQTest::TestLowWatermark [GOOD] >> TPQTest::TestGetTimestamps >> TSchemeShardTTLTests::BuildIndexShouldSucceed >> DataStreams::TestGetRecordsStreamWithMultipleShards [GOOD] >> DataStreams::TestGetRecordsWithBigSeqno >> TestKinesisHttpProxy::CreateStreamWithInvalidName [GOOD] >> TestYmqHttpProxy::TestGetQueueUrlOfNotExistingQueue [GOOD] >> TCdcStreamTests::AlterStreamImplShouldFail [GOOD] >> TCdcStreamTests::DropStreamImplShouldFail >> TestYmqHttpProxy::TestCreateQueueWithBadQueueName [GOOD] >> TestKinesisHttpProxy::CreateStreamWithDifferentRetentions >> DataShardVolatile::DistributedWriteBrokenLock [GOOD] >> DataShardVolatile::DistributedWriteShardRestartBeforePlan+UseSink >> TCdcStreamTests::StreamOnBuildingIndexTable [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanEnabled >> TestYmqHttpProxy::TestGetQueueUrlWithIAM >> TestYmqHttpProxy::TestCreateQueueWithEmptyName >> TSchemeShardTTLTests::BuildIndexShouldSucceed [GOOD] >> TPQTest::TestGetTimestamps [GOOD] >> TPQTest::TestPQCacheSizeManagement [GOOD] >> TCdcStreamTests::DropStreamImplShouldFail [GOOD] >> TCdcStreamTests::CopyTableShouldNotCopyStream ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BuildIndexShouldSucceed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:23:13.004494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:23:13.004617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:23:13.004664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:23:13.004701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:23:13.004762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:23:13.004799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:23:13.004885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:23:13.005075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:23:13.005536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:23:13.089354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:23:13.089427Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:23:13.094518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:23:13.094652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:23:13.094823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:23:13.100654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:23:13.100916Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:23:13.101499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:23:13.101691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:23:13.103877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:23:13.104986Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:23:13.105041Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:23:13.105156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:23:13.105194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:23:13.105233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:23:13.105400Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:23:13.112529Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:23:13.229038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:23:13.229286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:13.229564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:23:13.229868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:23:13.229931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:13.232858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:23:13.233068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:23:13.233292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:13.233351Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:23:13.233395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:23:13.233431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:23:13.235865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:13.235945Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:23:13.235989Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:23:13.238315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:13.238382Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:13.238429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:23:13.238498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:23:13.242697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:23:13.245178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:23:13.245404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:23:13.246479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:23:13.246656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:23:13.246713Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:23:13.246994Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:23:13.247109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:23:13.247295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:23:13.247392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:23:13.250337Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:23:13.250392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:23:13.250587Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:23:13.250652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:23:13.250895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:13.250944Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:23:13.251048Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:23:13.251106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:23:13.251147Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:23:13.251180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:23:13.251222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:23:13.251267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:23:13.251309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:23:13.251340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:23:13.251429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:23:13.251470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:23:13.251527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:23:13.253697Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:23:13.253828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:23:13.253865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Type: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:384:2356], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:23:13.963780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710760:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710760 msg type: 269090816 2025-03-31T16:23:13.963936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710760, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2025-03-31T16:23:13.964305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2025-03-31T16:23:13.964357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2025-03-31T16:23:13.964424Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 2025-03-31T16:23:13.964643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:23:13.964808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:23:13.964883Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2025-03-31T16:23:13.964954Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2025-03-31T16:23:13.968031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-03-31T16:23:13.968112Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2025-03-31T16:23:13.968223Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-03-31T16:23:13.968265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-03-31T16:23:13.968312Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-03-31T16:23:13.968356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-03-31T16:23:13.968399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-03-31T16:23:13.968486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:128:2153] message: TxId: 281474976710760 2025-03-31T16:23:13.968547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-03-31T16:23:13.968620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2025-03-31T16:23:13.968678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2025-03-31T16:23:13.968776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 FAKE_COORDINATOR: Erasing txId 281474976710760 2025-03-31T16:23:13.971725Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-03-31T16:23:13.971829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2025-03-31T16:23:13.971916Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2025-03-31T16:23:13.972034Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:384:2356], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:23:13.974883Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-03-31T16:23:13.975031Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:384:2356], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:23:13.975149Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-03-31T16:23:13.977670Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-03-31T16:23:13.977792Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:384:2356], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:23:13.977880Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-03-31T16:23:13.978098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:23:13.978159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:475:2436] TestWaitNotification: OK eventTxId 102 2025-03-31T16:23:13.978883Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:23:13.979264Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 401us result status StatusSuccess 2025-03-31T16:23:13.979998Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByValue" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TCdcStreamWithInitialScanTests::InitialScanEnabled [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanDisabled ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestGetTimestamps [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] 2025-03-31T16:21:12.091466Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-03-31T16:21:12.095918Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-31T16:21:12.096248Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-03-31T16:21:12.096293Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-31T16:21:12.096326Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-03-31T16:21:12.096368Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-03-31T16:21:12.096407Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:12.096491Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927938 is [1:153:2174] sender: [1:154:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:180:2057] recipient: [1:14:2061] 2025-03-31T16:21:12.113929Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:179:2194], now have 1 active actors on pipe 2025-03-31T16:21:12.114095Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-03-31T16:21:12.132994Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "user1" Generation: 1 Important: true } 2025-03-31T16:21:12.135723Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "user1" Generation: 1 Important: true } 2025-03-31T16:21:12.135904Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:12.136670Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "user1" Generation: 1 Important: true } 2025-03-31T16:21:12.136796Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitConfigStep 2025-03-31T16:21:12.137188Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-31T16:21:12.137533Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:186:2199] 2025-03-31T16:21:12.140072Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:0:Initializer] Initializing completed. 2025-03-31T16:21:12.140142Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:186:2199] 2025-03-31T16:21:12.140191Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-31T16:21:12.142508Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-31T16:21:12.142698Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2025-03-31T16:21:12.142751Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2025-03-31T16:21:12.142792Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit request with generation 1 2025-03-31T16:21:12.142829Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit with generation 1 done 2025-03-31T16:21:12.142976Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:21:12.143015Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:21:12.143052Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:21:12.143123Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:21:12.143157Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-03-31T16:21:12.143183Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-03-31T16:21:12.143203Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser1 2025-03-31T16:21:12.143223Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser1 2025-03-31T16:21:12.143252Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:21:12.143307Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-03-31T16:21:12.143427Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-31T16:21:12.143462Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-31T16:21:12.143648Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-31T16:21:12.146514Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:21:12.146897Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:193:2204], now have 1 active actors on pipe 2025-03-31T16:21:12.149191Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:196:2206], now have 1 active actors on pipe 2025-03-31T16:21:12.149313Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-03-31T16:21:12.149359Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-03-31T16:21:12.150084Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 0 messageNo: 0 size: 511957 2025-03-31T16:21:12.150558Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 1 messageNo: 0 size: 511957 2025-03-31T16:21:12.151059Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 2 messageNo: 0 size: 511957 2025-03-31T16:21:12.151590Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 3 messageNo: 0 size: 511957 2025-03-31T16:21:12.151689Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 4 messageNo: 0 size: 49324 2025-03-31T16:21:12.151729Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 4 messageNo: 0 size 49324 offset: 0 2025-03-31T16:21:12.151835Z node 1 :PERSQUEUE DEBUG: tablet 72057594037927937 topic 'rt3.dc1--asdfgs--topic' partition 0 error: new GetOwnership request needed for owner 2025-03-31T16:21:12.151997Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 1, Error new GetOwnership request needed for owner 2025-03-31T16:21:12.152043Z node 1 :PERSQUEUE DEBUG: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2025-03-31T16:21:12.152428Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:198:2208], now have 1 active actors on pipe 2025-03-31T16:21:12.152481Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-03-31T16:21:12.152516Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-03-31T16:21:12.152614Z node 1 :PERSQUEUE INFO: new Cookie default|8e353fd9-150480c2-a3e30b27-83e9630d_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:21:12.152735Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-03-31T16:21:12.152833Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-31T16:21:12.153156Z node 1 :PERSQUEUE DEBUG: [ ... eGenerations: 188 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 188 Important: false } Consumers { Name: "user1" Generation: 188 Important: false } 2025-03-31T16:23:13.235561Z node 72 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [72:185:2198] 2025-03-31T16:23:13.238504Z node 72 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [72:185:2198] 2025-03-31T16:23:13.241514Z node 72 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [72:186:2199] 2025-03-31T16:23:13.244018Z node 72 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [72:186:2199] 2025-03-31T16:23:13.254393Z node 72 :PERSQUEUE INFO: new Cookie default|942d8eb4-650a1066-9ec6eb9f-eeb8dc37_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:23:13.293709Z node 72 :PERSQUEUE INFO: new Cookie default|28f89f3a-979f77ee-62f2f4fa-2d3bc06_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [72:107:2139] sender: [72:280:2057] recipient: [72:99:2134] Leader for TabletID 72057594037927937 is [72:107:2139] sender: [72:283:2057] recipient: [72:14:2061] Leader for TabletID 72057594037927937 is [72:107:2139] sender: [72:284:2057] recipient: [72:282:2279] Leader for TabletID 72057594037927937 is [72:285:2280] sender: [72:286:2057] recipient: [72:282:2279] 2025-03-31T16:23:13.341838Z node 72 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:23:13.341921Z node 72 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:23:13.342635Z node 72 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [72:334:2321] 2025-03-31T16:23:13.345652Z node 72 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [72:335:2322] 2025-03-31T16:23:13.355953Z node 72 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:23:13.356043Z node 72 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [72:335:2322] 2025-03-31T16:23:13.358383Z node 72 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:23:13.358468Z node 72 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [72:334:2321] Leader for TabletID 72057594037927937 is [72:285:2280] sender: [72:362:2057] recipient: [72:14:2061] 2025-03-31T16:23:13.361459Z node 72 :PERSQUEUE INFO: new Cookie default|49c057cb-2fdd218c-f9ca8405-802903ec_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 100 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [72:177:2192] 2025-03-31T16:23:13.381804Z node 72 :PERSQUEUE INFO: Reading Timestamp failed for offset 50 ( 50 ) Status: 1 ErrorCode: OK PartitionResponse { CmdReadResult { MaxOffset: 104 Result { Offset: 100 Data: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" SourceId: "sourceid2" SeqNo: 1 WriteTimestampMS: 172800426 CreateTimestampMS: 100 UncompressedSize: 0 PartitionKey: "" ExplicitHash: "" } BlobsCachedSize: 1064 SizeLag: 11563 RealReadOffset: 50 WaitQuotaTimeMs: 0 ReadFromTimestampMs: 0 SizeEstimate: 1086 LastOffset: 100 EndOffset: 104 StartOffset: 1 } } Leader for TabletID 72057594037927937 is [0:0:0] sender: [73:103:2057] recipient: [73:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [73:103:2057] recipient: [73:101:2135] Leader for TabletID 72057594037927937 is [73:107:2139] sender: [73:108:2057] recipient: [73:101:2135] 2025-03-31T16:23:13.894611Z node 73 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:23:13.894700Z node 73 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [73:149:2057] recipient: [73:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [73:149:2057] recipient: [73:147:2170] Leader for TabletID 72057594037927938 is [73:153:2174] sender: [73:154:2057] recipient: [73:147:2170] Leader for TabletID 72057594037927937 is [73:107:2139] sender: [73:179:2057] recipient: [73:14:2061] 2025-03-31T16:23:13.919264Z node 73 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:23:13.920346Z node 73 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 189 actor [73:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 10 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 189 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 189 ReadRuleGenerations: 189 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 189 Important: false } Consumers { Name: "user1" Generation: 189 Important: false } 2025-03-31T16:23:13.921182Z node 73 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [73:185:2198] 2025-03-31T16:23:13.924387Z node 73 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [73:185:2198] 2025-03-31T16:23:13.927471Z node 73 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [73:186:2199] 2025-03-31T16:23:13.929816Z node 73 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [73:186:2199] 2025-03-31T16:23:13.941015Z node 73 :PERSQUEUE INFO: new Cookie default|908d61ee-b0571b5e-f15888a2-87000ab2_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:23:13.971292Z node 73 :PERSQUEUE INFO: new Cookie default|a92ba31a-fed6895b-e4930bf2-fd29c9f7_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [73:107:2139] sender: [73:280:2057] recipient: [73:99:2134] Leader for TabletID 72057594037927937 is [73:107:2139] sender: [73:283:2057] recipient: [73:14:2061] Leader for TabletID 72057594037927937 is [73:107:2139] sender: [73:284:2057] recipient: [73:282:2279] Leader for TabletID 72057594037927937 is [73:285:2280] sender: [73:286:2057] recipient: [73:282:2279] 2025-03-31T16:23:14.020935Z node 73 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:23:14.021010Z node 73 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:23:14.021912Z node 73 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [73:334:2321] 2025-03-31T16:23:14.025176Z node 73 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [73:335:2322] 2025-03-31T16:23:14.037867Z node 73 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:23:14.037940Z node 73 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [73:335:2322] 2025-03-31T16:23:14.039709Z node 73 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:23:14.039796Z node 73 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [73:334:2321] Leader for TabletID 72057594037927937 is [73:285:2280] sender: [73:364:2057] recipient: [73:14:2061] 2025-03-31T16:23:14.042714Z node 73 :PERSQUEUE INFO: new Cookie default|ff6bc36b-185882cc-c9150304-7bc090b_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 100 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [73:177:2192] 2025-03-31T16:23:14.063077Z node 73 :PERSQUEUE INFO: Reading Timestamp failed for offset 50 ( 50 ) Status: 1 ErrorCode: OK PartitionResponse { CmdReadResult { MaxOffset: 104 Result { Offset: 100 Data: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" SourceId: "sourceid2" SeqNo: 1 WriteTimestampMS: 172800426 CreateTimestampMS: 100 UncompressedSize: 0 PartitionKey: "" ExplicitHash: "" } BlobsCachedSize: 1064 SizeLag: 11563 RealReadOffset: 50 WaitQuotaTimeMs: 0 ReadFromTimestampMs: 0 SizeEstimate: 1086 LastOffset: 100 EndOffset: 104 StartOffset: 1 } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestPQCacheSizeManagement [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] 2025-03-31T16:21:09.612628Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:09.612710Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927938 is [1:153:2174] sender: [1:154:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:179:2057] recipient: [1:14:2061] 2025-03-31T16:21:09.630626Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:09.648579Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "aaa" Generation: 1 Important: true } 2025-03-31T16:21:09.649571Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2025-03-31T16:21:09.651661Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:185:2198] 2025-03-31T16:21:09.654250Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:186:2199] 2025-03-31T16:21:09.655688Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:186:2199] 2025-03-31T16:21:09.666214Z node 1 :PERSQUEUE INFO: new Cookie default|a4f4d8d5-1feb7271-171347e0-ec18ac31_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 1 } Cookie: 123 } via pipe: [1:177:2192] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:108:2057] recipient: [2:101:2135] 2025-03-31T16:21:10.061719Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:10.061793Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927938 is [2:153:2174] sender: [2:154:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:179:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:107:2139]) on event NKikimr::TEvPersQueue::TEvUpdateConfigBuilder ! Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:181:2057] recipient: [2:99:2134] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:184:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:185:2057] recipient: [2:183:2195] Leader for TabletID 72057594037927937 is [2:186:2196] sender: [2:187:2057] recipient: [2:183:2195] 2025-03-31T16:21:10.101962Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:10.102037Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [2:107:2139]) rebooted! !Reboot 72057594037927937 (actor [2:107:2139]) tablet resolver refreshed! new actor is[2:186:2196] Leader for TabletID 72057594037927937 is [2:186:2196] sender: [2:260:2057] recipient: [2:14:2061] 2025-03-31T16:21:11.693872Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:11.694812Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } Consumers { Name: "aaa" Generation: 2 Important: true } 2025-03-31T16:21:11.695847Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:266:2258] 2025-03-31T16:21:11.698280Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [2:266:2258] 2025-03-31T16:21:11.700828Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:267:2259] 2025-03-31T16:21:11.702637Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [2:267:2259] 2025-03-31T16:21:11.716151Z node 2 :PERSQUEUE INFO: new Cookie default|a4ab0c5c-8cdd547c-3367fe6-7141e46d_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 1 } Cookie: 123 } via pipe: [2:177:2192] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:108:2057] recipient: [3:101:2135] 2025-03-31T16:21:12.078665Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:12.078745Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927938 is [3:153:2174] sender: [3:154:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:179:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:107:2139]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:181:2057] recipient: [3:99:2134] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:184:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:185:2057] recipient: [3:183:2195] Leader for TabletID 72057594037927937 is [3:186:2196] sender: [3:187:2057] recipient: [3:183:2195] 2025-03-31T16:21:12.125529Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:12.125604Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [3:107:2139]) rebooted! !Reboot 72057594037927937 (actor [3:107:2139]) tablet resolver refreshed! new actor is[3:186:2196] Leader for TabletID 72057594037927937 is [3:186:2196] sender: [3:260:2057] recipient: [3:14:2061] 2025-03-31T16:21:13.705038Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:13.705805Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 3 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } Consumers { Name: "aaa" Generation: 3 Important: true } 2025-03-31T16:21:13.706668Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:266:2258] 2025-03-31T16:21:13.709113Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [3:266:2258] 2025-03-31T16:21:13.711398Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:267:2259] 2025-03-31T16:21:13.713122Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [3:267:2259] 2025-03-31T16:21:13.724787Z node 3 :PERSQUEUE INFO: new Cookie default|68b54405-7d4e584e-7776d5a6-f421a5d2_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 1 } Cookie: 123 } via pipe: [3:177:2192] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:103:2057] recipient: [4:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:103:2057] recipient: [4:101:2135] Leader for TabletID 72057594037927937 is [4:107:2139] sender: [4:108:2057] recipient: [4:101:2135] 2025-03-31T16:21:14.084470Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:14.084547Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:149:2057] recipient: [4:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:149:2057] recipient: [4:147:2170] Leader for TabletID 72057594037927938 is [4:153:2174] sender: [4:154:2057] recipient: [4:147:2170] Leader for TabletID 72057594037927937 is ... node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [79:658:2551] 2025-03-31T16:23:13.489358Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [79:659:2552] 2025-03-31T16:23:13.499917Z node 79 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:23:13.499988Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 7 [79:659:2552] 2025-03-31T16:23:13.518132Z node 79 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:23:13.518237Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 7 [79:658:2551] 2025-03-31T16:23:13.558302Z node 79 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [79:177:2192] Leader for TabletID 72057594037927937 is [79:601:2502] sender: [79:689:2057] recipient: [79:14:2061] Leader for TabletID 72057594037927937 is [79:601:2502] sender: [79:692:2057] recipient: [79:99:2134] Leader for TabletID 72057594037927937 is [79:601:2502] sender: [79:695:2057] recipient: [79:694:2572] Leader for TabletID 72057594037927937 is [79:601:2502] sender: [79:696:2057] recipient: [79:14:2061] Leader for TabletID 72057594037927937 is [79:697:2573] sender: [79:698:2057] recipient: [79:694:2572] 2025-03-31T16:23:13.617041Z node 79 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:23:13.617110Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:23:13.617739Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [79:756:2624] 2025-03-31T16:23:13.620327Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [79:757:2625] 2025-03-31T16:23:13.637890Z node 79 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:23:13.637969Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 8 [79:757:2625] 2025-03-31T16:23:13.648539Z node 79 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:23:13.648631Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 8 [79:756:2624] 2025-03-31T16:23:13.680706Z node 79 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [79:177:2192] Leader for TabletID 72057594037927937 is [79:697:2573] sender: [79:789:2057] recipient: [79:14:2061] Leader for TabletID 72057594037927937 is [79:697:2573] sender: [79:792:2057] recipient: [79:99:2134] Leader for TabletID 72057594037927937 is [79:697:2573] sender: [79:795:2057] recipient: [79:14:2061] Leader for TabletID 72057594037927937 is [79:697:2573] sender: [79:796:2057] recipient: [79:794:2647] Leader for TabletID 72057594037927937 is [79:797:2648] sender: [79:798:2057] recipient: [79:794:2647] 2025-03-31T16:23:13.731418Z node 79 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:23:13.731473Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:23:13.732152Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [79:858:2701] 2025-03-31T16:23:13.734832Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [79:859:2702] 2025-03-31T16:23:13.744413Z node 79 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:23:13.744470Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 9 [79:859:2702] 2025-03-31T16:23:13.770569Z node 79 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:23:13.770643Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 9 [79:858:2701] 2025-03-31T16:23:13.801027Z node 79 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [79:177:2192] Leader for TabletID 72057594037927937 is [79:797:2648] sender: [79:891:2057] recipient: [79:14:2061] Leader for TabletID 72057594037927937 is [79:797:2648] sender: [79:894:2057] recipient: [79:99:2134] Leader for TabletID 72057594037927937 is [79:797:2648] sender: [79:897:2057] recipient: [79:896:2724] Leader for TabletID 72057594037927937 is [79:797:2648] sender: [79:898:2057] recipient: [79:14:2061] Leader for TabletID 72057594037927937 is [79:899:2725] sender: [79:900:2057] recipient: [79:896:2724] 2025-03-31T16:23:13.856593Z node 79 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:23:13.856661Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:23:13.857487Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [79:962:2780] 2025-03-31T16:23:13.860766Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [79:963:2781] 2025-03-31T16:23:13.872739Z node 79 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:23:13.872818Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 10 [79:963:2781] 2025-03-31T16:23:13.895831Z node 79 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:23:13.895935Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 10 [79:962:2780] 2025-03-31T16:23:13.939109Z node 79 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [79:177:2192] Leader for TabletID 72057594037927937 is [79:899:2725] sender: [79:993:2057] recipient: [79:14:2061] Leader for TabletID 72057594037927937 is [79:899:2725] sender: [79:996:2057] recipient: [79:99:2134] Leader for TabletID 72057594037927937 is [79:899:2725] sender: [79:999:2057] recipient: [79:14:2061] Leader for TabletID 72057594037927937 is [79:899:2725] sender: [79:1000:2057] recipient: [79:998:2801] Leader for TabletID 72057594037927937 is [79:1001:2802] sender: [79:1002:2057] recipient: [79:998:2801] 2025-03-31T16:23:14.003145Z node 79 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:23:14.003213Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:23:14.003939Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [79:1066:2859] 2025-03-31T16:23:14.006809Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [79:1067:2860] 2025-03-31T16:23:14.017607Z node 79 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:23:14.017682Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 11 [79:1067:2860] 2025-03-31T16:23:14.037062Z node 79 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:23:14.037154Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 11 [79:1066:2859] 2025-03-31T16:23:14.065540Z node 79 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [79:177:2192] Leader for TabletID 72057594037927937 is [79:1001:2802] sender: [79:1097:2057] recipient: [79:14:2061] Leader for TabletID 72057594037927937 is [79:1001:2802] sender: [79:1100:2057] recipient: [79:99:2134] Leader for TabletID 72057594037927937 is [79:1001:2802] sender: [79:1103:2057] recipient: [79:14:2061] Leader for TabletID 72057594037927937 is [79:1001:2802] sender: [79:1104:2057] recipient: [79:1102:2880] Leader for TabletID 72057594037927937 is [79:1105:2881] sender: [79:1106:2057] recipient: [79:1102:2880] 2025-03-31T16:23:14.132693Z node 79 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:23:14.132765Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:23:14.133500Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [79:1172:2940] 2025-03-31T16:23:14.136391Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [79:1173:2941] 2025-03-31T16:23:14.148942Z node 79 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:23:14.149022Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 12 [79:1173:2941] 2025-03-31T16:23:14.184568Z node 79 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:23:14.184681Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 12 [79:1172:2940] 2025-03-31T16:23:14.217630Z node 79 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 size 8364507 >> TColumnShardTestSchema::RebootOneColdTier >> TPQTest::TestPartitionedBlobFails [GOOD] >> TPQTest::TestReadAndDeleteConsumer >> TCdcStreamWithInitialScanTests::InitialScanDisabled [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanProgress >> TPersQueueCommonTest::TestWriteWithRateLimiterWithBlobsRateLimit [GOOD] >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit >> TSchemeShardLoginTest::ChangeAccountLockoutParameters [GOOD] >> TSchemeShardLoginTest::CheckThatLockedOutParametersIsRestoredFromLocalDb >> TCdcStreamTests::CopyTableShouldNotCopyStream [GOOD] >> TCdcStreamTests::MoveTableShouldFail |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::InternalTTL >> TCdcStreamWithInitialScanTests::InitialScanProgress [GOOD] >> TCdcStreamWithInitialScanTests::WithoutPqTransactions >> YdbYqlClient::TableKeyRangesSinglePartition [GOOD] >> DataStreams::TestGetRecordsWithBigSeqno [GOOD] >> TCdcStreamTests::MoveTableShouldFail [GOOD] >> TCdcStreamTests::CheckSchemeLimits >> KqpPg::ValuesInsert-useSink [GOOD] >> PgCatalog::PgType >> TColumnShardTestSchema::HotTiers >> TColumnShardTestSchema::ExternalTTL_Types ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestGetRecordsWithBigSeqno [GOOD] Test command err: 2025-03-31T16:22:32.781651Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009849120220435:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:22:32.787311Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001a18/r3tmp/tmpawWdSY/pdisk_1.dat 2025-03-31T16:22:33.256522Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:22:33.257478Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:22:33.264005Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:22:33.283584Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17964, node 1 2025-03-31T16:22:33.369217Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:22:33.369238Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:22:33.501835Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:22:33.501860Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:22:33.501868Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:22:33.501987Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11116 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:22:34.054963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:34.139565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:11116 2025-03-31T16:22:34.337605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:34.764127Z node 1 :PERSQUEUE ERROR: [PQ: 72075186224037888, Partition: 0, State: StateIdle] reading from too big offset - topic stream_TestGetRecordsStreamWithSingleShard partition 0 client $without_consumer EndOffset 30 offset 100000 2025-03-31T16:22:37.542703Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488009867943339505:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:22:37.542872Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001a18/r3tmp/tmp0cpB8M/pdisk_1.dat 2025-03-31T16:22:37.762451Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31536, node 4 2025-03-31T16:22:37.863876Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:22:37.863993Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:22:37.874687Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:22:37.907359Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:22:37.907383Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:22:37.907394Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:22:37.907571Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8784 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:22:38.220983Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:38.331989Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:8784 2025-03-31T16:22:38.553633Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:42.542959Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7488009867943339505:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:22:42.543055Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:22:52.740818Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:22:52.740848Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:23:08.072020Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488010000313868555:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:23:08.072128Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001a18/r3tmp/tmpDgHIC0/pdisk_1.dat 2025-03-31T16:23:08.220406Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:23:08.260548Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:23:08.260662Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:23:08.268174Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7208, node 7 2025-03-31T16:23:08.368421Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:23:08.368457Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:23:08.368467Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:23:08.368672Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26042 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:23:08.660402Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:08.726715Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:26042 2025-03-31T16:23:08.956894Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:12.887193Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488010019363682674:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:23:12.887272Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001a18/r3tmp/tmpTbjzow/pdisk_1.dat 2025-03-31T16:23:13.027849Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:23:13.057219Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:23:13.057311Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:23:13.060009Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21068, node 10 2025-03-31T16:23:13.112698Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:23:13.112718Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:23:13.112724Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:23:13.112849Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29472 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:23:13.385554Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:13.455002Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:29472 2025-03-31T16:23:13.701393Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TableKeyRangesSinglePartition [GOOD] Test command err: 2025-03-31T16:17:15.997563Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488008487446051224:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:15.997710Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d97/r3tmp/tmp7vl26Z/pdisk_1.dat 2025-03-31T16:17:16.320797Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4057, node 1 2025-03-31T16:17:16.349934Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:17:16.350116Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:17:16.366070Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:16.366309Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:16.403805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:17:16.419314Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:16.419336Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:16.419343Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:16.419459Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14416 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:16.708763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:18.758811Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008500330954145:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:18.758903Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:19.053141Z node 1 :TX_PROXY ERROR: Actor# [1:7488008504625921470:2643] txid# 281474976710658, Access denied for badguy@builtin on path /Root, with access CreateTable 2025-03-31T16:17:19.053373Z node 1 :TX_PROXY ERROR: Actor# [1:7488008504625921470:2643] txid# 281474976710658, issues: { message: "Access denied for badguy@builtin on path /Root" issue_code: 200000 severity: 1 } 2025-03-31T16:17:19.145144Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488008504625921482:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:19.145229Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:19.161016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:17:21.034156Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488008510757682849:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:21.034220Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d97/r3tmp/tmp8dpelB/pdisk_1.dat 2025-03-31T16:17:21.186430Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:21.228933Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:21.229045Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:21.234832Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29127, node 4 2025-03-31T16:17:21.315216Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:21.315246Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:21.315253Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:21.315401Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63064 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:17:21.546821Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:17:24.184971Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008523642585764:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:24.185116Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:24.198782Z node 4 :TX_PROXY ERROR: Actor# [4:7488008523642585785:2623] txid# 281474976715658, Access denied for badguy@builtin on path /Root, with access CreateTable 2025-03-31T16:17:24.198919Z node 4 :TX_PROXY ERROR: Actor# [4:7488008523642585785:2623] txid# 281474976715658, issues: { message: "Access denied for badguy@builtin on path /Root" issue_code: 200000 severity: 1 } 2025-03-31T16:17:24.279792Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488008523642585797:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:24.279847Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:17:24.289509Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:17:26.066441Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488008531503155355:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:17:26.066512Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d97/r3tmp/tmpMMBaF6/pdisk_1.dat 2025-03-31T16:17:26.275810Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:17:26.318884Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:17:26.318977Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:17:26.322992Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10189, node 7 2025-03-31T16:17:26.425259Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:17:26.425285Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:17:26.425296Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:17:26.425460Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62044 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 Proce ... 976714643. Ctx: { TraceId: 01jqpg73545jn93by6xh6m4n7m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZjQwYjEyZDAtNTExN2Y5ZjktNjQzMTViNDktN2EzMzMxMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:23:07.851570Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714644. Ctx: { TraceId: 01jqpg73828bw6wq18323hxaye, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZjQwYjEyZDAtNTExN2Y5ZjktNjQzMTViNDktN2EzMzMxMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:23:07.938546Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714645. Ctx: { TraceId: 01jqpg73b10xs5npbm1g8ka36x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZjQwYjEyZDAtNTExN2Y5ZjktNjQzMTViNDktN2EzMzMxMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:23:08.021589Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714646. Ctx: { TraceId: 01jqpg73dgf4k552dr35jc603z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZjQwYjEyZDAtNTExN2Y5ZjktNjQzMTViNDktN2EzMzMxMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:23:08.100440Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714647. Ctx: { TraceId: 01jqpg73g51x8kpyddn7c4p3tk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZjQwYjEyZDAtNTExN2Y5ZjktNjQzMTViNDktN2EzMzMxMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:23:08.180375Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714648. Ctx: { TraceId: 01jqpg73jj4r7rjdqb42q7ce1x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZjQwYjEyZDAtNTExN2Y5ZjktNjQzMTViNDktN2EzMzMxMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:23:08.283811Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714649. Ctx: { TraceId: 01jqpg73nacts2mn079e9t7nyw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZjQwYjEyZDAtNTExN2Y5ZjktNjQzMTViNDktN2EzMzMxMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:23:08.363530Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714650. Ctx: { TraceId: 01jqpg73r96wdr679dtdfx6m3r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZjQwYjEyZDAtNTExN2Y5ZjktNjQzMTViNDktN2EzMzMxMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:23:08.454479Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714651. Ctx: { TraceId: 01jqpg73tz1dyxhh6accqynche, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZjQwYjEyZDAtNTExN2Y5ZjktNjQzMTViNDktN2EzMzMxMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:23:08.539738Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714652. Ctx: { TraceId: 01jqpg73xq29dnkt3pej89fe16, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZjQwYjEyZDAtNTExN2Y5ZjktNjQzMTViNDktN2EzMzMxMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:23:08.640583Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714653. Ctx: { TraceId: 01jqpg740h1rnr5y4d2pmhaend, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZjQwYjEyZDAtNTExN2Y5ZjktNjQzMTViNDktN2EzMzMxMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:23:08.724351Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714654. Ctx: { TraceId: 01jqpg743s3jrsstx89h6sv1zk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZjQwYjEyZDAtNTExN2Y5ZjktNjQzMTViNDktN2EzMzMxMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:23:08.807763Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714655. Ctx: { TraceId: 01jqpg74619myehte69tzx3pm2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZjQwYjEyZDAtNTExN2Y5ZjktNjQzMTViNDktN2EzMzMxMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:23:08.910354Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714656. Ctx: { TraceId: 01jqpg748pe4pr5c6xrkqtfabk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZjQwYjEyZDAtNTExN2Y5ZjktNjQzMTViNDktN2EzMzMxMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:23:08.998226Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714657. Ctx: { TraceId: 01jqpg74bx9t782vr67a4zq4ys, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZjQwYjEyZDAtNTExN2Y5ZjktNjQzMTViNDktN2EzMzMxMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:23:09.088279Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714658. Ctx: { TraceId: 01jqpg74em7rsn6zvkyx8tp9pk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZjQwYjEyZDAtNTExN2Y5ZjktNjQzMTViNDktN2EzMzMxMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:23:09.170078Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714659. Ctx: { TraceId: 01jqpg74he5dzdxzv9jpzxvewr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZjQwYjEyZDAtNTExN2Y5ZjktNjQzMTViNDktN2EzMzMxMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:23:09.259015Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714660. Ctx: { TraceId: 01jqpg74m00q7xk4tpexen9abk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZjQwYjEyZDAtNTExN2Y5ZjktNjQzMTViNDktN2EzMzMxMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:23:09.341073Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714661. Ctx: { TraceId: 01jqpg74pr3z6y8qgejp4z4h2a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZjQwYjEyZDAtNTExN2Y5ZjktNjQzMTViNDktN2EzMzMxMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:23:09.423439Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714662. Ctx: { TraceId: 01jqpg74sadn4h9n70g5dgf2xx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZjQwYjEyZDAtNTExN2Y5ZjktNjQzMTViNDktN2EzMzMxMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:23:09.502190Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714663. Ctx: { TraceId: 01jqpg74vz40jjb8vqdaaeydg5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZjQwYjEyZDAtNTExN2Y5ZjktNjQzMTViNDktN2EzMzMxMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:23:09.594278Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714664. Ctx: { TraceId: 01jqpg74yc60590qj34wmacz8e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZjQwYjEyZDAtNTExN2Y5ZjktNjQzMTViNDktN2EzMzMxMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:23:09.687126Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714665. Ctx: { TraceId: 01jqpg7518f3e39dq948v4ppxe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZjQwYjEyZDAtNTExN2Y5ZjktNjQzMTViNDktN2EzMzMxMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:23:09.778288Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714666. Ctx: { TraceId: 01jqpg75444ep3hef3nawqybp7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZjQwYjEyZDAtNTExN2Y5ZjktNjQzMTViNDktN2EzMzMxMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:23:09.860000Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714667. Ctx: { TraceId: 01jqpg756zfpq6jdt015188f0s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZjQwYjEyZDAtNTExN2Y5ZjktNjQzMTViNDktN2EzMzMxMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:23:09.940141Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714668. Ctx: { TraceId: 01jqpg759h3derw0sxqtqpw40j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZjQwYjEyZDAtNTExN2Y5ZjktNjQzMTViNDktN2EzMzMxMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:23:09.957596Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 9 2025-03-31T16:23:09.958589Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-31T16:23:12.114141Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488010020253305991:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:23:12.114224Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d97/r3tmp/tmpG5JOPd/pdisk_1.dat 2025-03-31T16:23:12.230164Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:23:12.262964Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:23:12.263037Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:23:12.268378Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24541, node 10 2025-03-31T16:23:12.327620Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:23:12.327652Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:23:12.327663Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:23:12.327825Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1894 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:23:12.582321Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:15.983417Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 >> TCdcStreamWithInitialScanTests::WithoutPqTransactions [GOOD] >> TCdcStreamWithInitialScanTests::WithPqTransactions >> TestYmqHttpProxy::TestCreateQueueWithEmptyName [GOOD] >> TestYmqHttpProxy::TestGetQueueUrlWithIAM [GOOD] >> TestKinesisHttpProxy::CreateStreamWithDifferentRetentions [GOOD] >> DataShardVolatile::DistributedWriteShardRestartBeforePlan+UseSink [GOOD] >> DataShardVolatile::DistributedWriteShardRestartBeforePlan-UseSink >> TestYmqHttpProxy::TestCreateQueueWithAllAttributes >> TCdcStreamTests::CheckSchemeLimits [GOOD] >> TCdcStreamTests::MeteringServerless >> TestYmqHttpProxy::TestGetQueueAttributes >> TestKinesisHttpProxy::CreateDeleteStream >> TColumnShardTestSchema::ColdCompactionSmoke >> TColumnShardTestSchema::RebootDrop >> TPQTest::TestReadAndDeleteConsumer [GOOD] >> TSchemeShardLoginTest::CheckThatLockedOutParametersIsRestoredFromLocalDb [GOOD] >> TCdcStreamWithInitialScanTests::WithPqTransactions [GOOD] >> TCdcStreamWithInitialScanTests::AlterStream ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::CheckThatLockedOutParametersIsRestoredFromLocalDb [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:22:52.859729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:22:52.859846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:22:52.859884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:22:52.859931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:22:52.859977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:22:52.860024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:22:52.860086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:22:52.860173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:22:52.860487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:22:52.947433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:22:52.947494Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:22:52.955073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:22:52.955225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:22:52.955429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:22:52.962349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:22:52.962626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:22:52.963206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:22:52.963477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:22:52.965732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:52.966787Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:22:52.966840Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:52.966949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:22:52.966990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:22:52.967028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:22:52.967194Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:22:52.974080Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:22:53.081262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:22:53.081527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:53.081750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:22:53.081977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:22:53.082048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:53.084680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:22:53.084864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:22:53.085096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:53.085185Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:22:53.085224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:22:53.085255Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:22:53.087672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:53.087736Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:22:53.087771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:22:53.089929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:53.089978Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:53.090014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:22:53.090076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:22:53.093787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:22:53.096208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:22:53.096392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:22:53.097533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:22:53.097673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:22:53.097726Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:22:53.098064Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:22:53.098121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:22:53.098287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:22:53.098365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:22:53.100941Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:22:53.100983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:22:53.101161Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:22:53.101198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:22:53.101427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:22:53.101473Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:22:53.101591Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:22:53.101624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:22:53.101662Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:22:53.101692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:22:53.101738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:22:53.101816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:22:53.101855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:22:53.101887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:22:53.101959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:22:53.101994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:22:53.102028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:22:53.104038Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:22:53.104153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:22:53.104186Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 0.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:23:19.184117Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:23:19.184178Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:23:19.184224Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:23:19.184272Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:23:19.184320Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:23:19.184390Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:23:19.184483Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:23:19.184896Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:23:19.206994Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:23:19.208613Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:23:19.208844Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:23:19.209030Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:23:19.209079Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:23:19.209490Z node 5 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:23:19.210463Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2025-03-31T16:23:19.210601Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:23:19.210700Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:23:19.211097Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:23:19.211193Z node 5 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-03-31T16:23:19.211397Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:23:19.211488Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:23:19.211588Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:23:19.211699Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:23:19.211769Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:23:19.211894Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:23:19.212195Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:23:19.212359Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:23:19.212755Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:23:19.212824Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:23:19.212986Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:23:19.213069Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:23:19.213162Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:23:19.213508Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:23:19.213631Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:23:19.213779Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:23:19.214075Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-03-31T16:23:19.214308Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-03-31T16:23:19.214383Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-03-31T16:23:19.214471Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-03-31T16:23:19.224087Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:23:19.224182Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:23:19.224548Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:23:19.224597Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:23:19.224651Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:23:19.225550Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [5:372:2341] sender: [5:429:2058] recipient: [5:15:2062] 2025-03-31T16:23:19.269092Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-03-31T16:23:19.269171Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-03-31T16:23:19.320848Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "User user1 is not permitted to log in", at schemeshard: 72057594046678944 2025-03-31T16:23:19.321022Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:23:19.321070Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:23:19.321284Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:23:19.321335Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:422:2380], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-03-31T16:23:19.321976Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 0 2025-03-31T16:23:21.322915Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-03-31T16:23:21.336534Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjMifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzNDgxNDAxLCJpYXQiOjE3NDM0MzgyMDEsInN1YiI6InVzZXIxIn0.VG6oRTPshk3bcPcX9OIqKtfRoH6H8WuAHwaf_Eukkgy-fuWNxJiDz2A_jrYVSCYJ4fk9nGcv-vLyr9uf5_P4NOg1tx7Gyuge4sWchKIWv-SB11kS_mSEYcCLV-Xsk1hr6UloR6Co7ykcZwt2mji0hHpbhb2PXYqBmwFkkupLLn5nhR-Lcn-xCOChdGWf_vpUxNCoVgw-vI57BNtjXGPS8BkYeltzdRmKtoqNlLHX7WImAkxnNVbvVLVZHJQ2uXjG9nR24sO9WolckFtrK011dYqJlEGb2ZCoAcg-5-V_6n-4uZ9EItphWlkD4-y8hUjNlsBaVSIVwGw8PjlKiiqH_A" SanitizedToken: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjMifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzNDgxNDAxLCJpYXQiOjE3NDM0MzgyMDEsInN1YiI6InVzZXIxIn0.**" IsAdmin: true, at schemeshard: 72057594046678944 2025-03-31T16:23:21.337278Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:23:21.337558Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 307us result status StatusSuccess 2025-03-31T16:23:21.338098Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 4 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA3Jxf53ToO/oI6TOOqUdA\nZS/G0aefk+PahNKWO29xA+nwRSKoV2V8XZcbph9zXKFdUUik+bz07IrEipkit8TL\nGMQiC712uLB3tPsWscsW6KmZGX0yGbs6PEFzasR+vEt1DslzTYCqXfC0UoddiReY\nO/c319i9JBgFS3QH6Eo/yRZRByKjIKrLPflbiG23sdNKqzb9KIhlzkX/iNX2AEXu\nERkcUcacJu5mu8BChKXNVavEDuC2PY0yS2/Nu++159KwyB+lkNJazNJ8aTMANARH\nuEG65V0hIiyMMw924u6rJI3tZrW+9m0PQQiftBVGItRHIP+sHQOmdrMhV3XJ1P29\nywIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1743524596936 } PublicKeys { KeyId: 2 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuhwxKKTHPMHn3BvsXrSn\nG51AgGlrBQGCNH6aT8UaW8KU/jtyCP9qy8iqRPOtawCrrM3fZy5qlA2J3dLxICGP\nIhXdLhYkpmHjNlVGBZfZBgbdUpjyTu3j+GhTB5x8lQaI95TqYQswKLRiZfGpb6vF\nGdYML/Iepp5wkPODJc8zs0LjKjshik3CYBNqzUsDond+lK10wjr3ffKOoRIoaoH4\naiTCoqfXyhvxCvqw6/8HfvMo3OoF1/ILlJIvD97L2NjaXUyIiC2smWEsCCU2WSiT\nwCi29zJZcLoeFjkhdgsd9SU+SPVwkN0ilMcWen9RPNkDWqwSD1mzk9xLdor3E0ev\n8wIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1743524597117 } PublicKeys { KeyId: 3 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0FTVPWTOQC2PFQt2C5aK\nbVNEYLyhk3xQBd9xQgYtn/iwJrFHrdRectZ5O/Q76pZ63N501FYketEvKiZGXhsS\nB8ozgHiVEqHzAlf94SERCqpliQc4Op883gkZJc3ahfGMhuVWnJopLZp6O0aroG8h\ny7ZDaYIz+sJgp+KDhJLKUkXNdXJPAB94f3ERZiT1gNM9Ejr1wkkKDuZobQyzsT9r\n32KMOEC7fQLwu18oCskXRGm1NpVzEWXfbKZP+q1RAPI1NzVDa/vY5/iHQi1ALcsq\nK8Oa6F/7W7iaLEQZlShV+GfF7gltZQh6nTftjfYKov+bxUIGRO70rfnPjH0BSkwj\naQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1743524599317 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestReadAndDeleteConsumer [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] 2025-03-31T16:21:11.380950Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:11.381007Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927938 is [1:153:2174] sender: [1:154:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:179:2057] recipient: [1:14:2061] 2025-03-31T16:21:11.395952Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:11.413622Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "important_user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "important_user" Generation: 1 Important: true } 2025-03-31T16:21:11.414317Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2025-03-31T16:21:11.416105Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:185:2198] 2025-03-31T16:21:11.425788Z node 1 :PERSQUEUE INFO: new Cookie default|b9202f99-436bf89d-a280f771-e7763dad_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [1:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user2" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [1:177:2192] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:108:2057] recipient: [2:101:2135] 2025-03-31T16:21:19.652035Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:19.652150Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927938 is [2:153:2174] sender: [2:154:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:179:2057] recipient: [2:14:2061] 2025-03-31T16:21:19.668728Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:19.669330Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "important_user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } Consumers { Name: "important_user" Generation: 2 Important: true } 2025-03-31T16:21:19.669898Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:185:2198] 2025-03-31T16:21:19.672306Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:185:2198] 2025-03-31T16:21:19.680878Z node 2 :PERSQUEUE INFO: new Cookie default|3e811adf-81ff0779-66fa9642-48fb82ee_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [2:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user2" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [2:177:2192] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:108:2057] recipient: [3:101:2135] 2025-03-31T16:21:27.753934Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:27.754011Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927938 is [3:153:2174] sender: [3:154:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:179:2057] recipient: [3:14:2061] 2025-03-31T16:21:27.775408Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:27.776004Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 3 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "important_user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 3 ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } Consumers { Name: "important_user" Generation: 3 Important: true } 2025-03-31T16:21:27.776563Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:185:2198] 2025-03-31T16:21:27.779195Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [3:185:2198] 2025-03-31T16:21:27.789748Z node 3 :PERSQUEUE INFO: new Cookie default|f7a683b5-762a0b4a-b7cfbab9-a278ac9d_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [3:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user2" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [3:177:2192] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:103:2057] recipient: [4:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:103:2057] recipient: [4:101:2135] Leader for TabletID 72057594037927937 is [4:107:2139] sender: [4:108:2057] recipient: [4:101:2135] 2025-03-31T16:21:35.833668Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:35.833781Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:149:2057] recipient: [4:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:149:2057] recipient: [4:147:2170] Leader for TabletID 72057594037927938 is [4:153:2174] sender: [4:154:2057] recipient: [4:147:2170] Leader for TabletID 72057594037927937 is [4:107:2139] sender: [4:179:2057] recipient: [4:14:2061] 2025-03-31T16:21:35.855921Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:35.856584Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 4 actor [4:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "important_user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 4 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 4 ReadRuleGenerations: 4 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 4 Important: false } Consumers { Name: "important_user" Generation: 4 Important: true } 2025-03-31T16:21:35.857324Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:185:2198] 2025-03-31T16:21:35.859935Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [4:185:2198] 2025-03-31T16:21:35.874007Z node 4 :PERSQUEUE INFO: new Cookie default|b344a61c-19f65d2f-312b620b-a283c548_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [4:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [4:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user2" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [4:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user3 ... Config 2025-03-31T16:23:17.419633Z node 19 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:23:17.420171Z node 19 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [19:294:2285] 2025-03-31T16:23:17.443766Z node 19 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:23:17.443926Z node 19 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [19:294:2285] 2025-03-31T16:23:17.469759Z node 19 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 81 parts 0 size 8296398 Leader for TabletID 72057594037927937 is [19:245:2244] sender: [19:317:2057] recipient: [19:14:2061] 2025-03-31T16:23:17.480272Z node 19 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:23:17.486069Z node 19 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1001 actor [19:314:2298] txId 42 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "user1" ImportantClientId: "user2" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 Version: 1001 LocalDC: true Topic: "topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1000 AllPartitions { PartitionId: 0 } Consumers { Name: "user2" Generation: 1000 Important: true } Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:103:2057] recipient: [20:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:103:2057] recipient: [20:101:2135] Leader for TabletID 72057594037927937 is [20:107:2139] sender: [20:108:2057] recipient: [20:101:2135] 2025-03-31T16:23:18.070137Z node 20 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:23:18.070234Z node 20 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [20:149:2057] recipient: [20:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [20:149:2057] recipient: [20:147:2170] Leader for TabletID 72057594037927938 is [20:153:2174] sender: [20:154:2057] recipient: [20:147:2170] Leader for TabletID 72057594037927937 is [20:107:2139] sender: [20:179:2057] recipient: [20:14:2061] 2025-03-31T16:23:18.097889Z node 20 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:23:18.098811Z node 20 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1002 actor [20:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "user1" ImportantClientId: "user2" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1002 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1002 ReadRuleGenerations: 1002 ReadRuleGenerations: 1002 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1002 Important: false } Consumers { Name: "user1" Generation: 1002 Important: true } Consumers { Name: "user2" Generation: 1002 Important: true } 2025-03-31T16:23:18.099672Z node 20 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [20:185:2198] 2025-03-31T16:23:18.103380Z node 20 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [20:185:2198] 2025-03-31T16:23:18.119951Z node 20 :PERSQUEUE INFO: new Cookie default|cfbacc04-bb956e0-783e506f-11184647_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:23:19.202721Z node 20 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 2025-03-31T16:23:19.261587Z node 20 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 Leader for TabletID 72057594037927937 is [20:107:2139] sender: [20:240:2057] recipient: [20:99:2134] Leader for TabletID 72057594037927937 is [20:107:2139] sender: [20:242:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:107:2139] sender: [20:244:2057] recipient: [20:243:2243] Leader for TabletID 72057594037927937 is [20:245:2244] sender: [20:246:2057] recipient: [20:243:2243] 2025-03-31T16:23:19.316494Z node 20 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:23:19.316583Z node 20 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:23:19.317381Z node 20 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [20:294:2285] 2025-03-31T16:23:19.346842Z node 20 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:23:19.347005Z node 20 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [20:294:2285] 2025-03-31T16:23:19.375772Z node 20 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 81 parts 0 size 8296398 Leader for TabletID 72057594037927937 is [20:245:2244] sender: [20:317:2057] recipient: [20:14:2061] 2025-03-31T16:23:19.388932Z node 20 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:23:19.393554Z node 20 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1003 actor [20:314:2298] txId 42 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "user1" ImportantClientId: "user2" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 Version: 1003 LocalDC: true Topic: "topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1002 AllPartitions { PartitionId: 0 } Consumers { Name: "user2" Generation: 1002 Important: true } Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:103:2057] recipient: [21:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:103:2057] recipient: [21:101:2135] Leader for TabletID 72057594037927937 is [21:107:2139] sender: [21:108:2057] recipient: [21:101:2135] 2025-03-31T16:23:20.031400Z node 21 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:23:20.031468Z node 21 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [21:149:2057] recipient: [21:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [21:149:2057] recipient: [21:147:2170] Leader for TabletID 72057594037927938 is [21:153:2174] sender: [21:154:2057] recipient: [21:147:2170] Leader for TabletID 72057594037927937 is [21:107:2139] sender: [21:179:2057] recipient: [21:14:2061] 2025-03-31T16:23:20.049465Z node 21 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:23:20.050146Z node 21 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1004 actor [21:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "user1" ImportantClientId: "user2" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1004 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1004 ReadRuleGenerations: 1004 ReadRuleGenerations: 1004 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1004 Important: false } Consumers { Name: "user1" Generation: 1004 Important: true } Consumers { Name: "user2" Generation: 1004 Important: true } 2025-03-31T16:23:20.050783Z node 21 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [21:185:2198] 2025-03-31T16:23:20.053739Z node 21 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [21:185:2198] 2025-03-31T16:23:20.067125Z node 21 :PERSQUEUE INFO: new Cookie default|89f3bd43-e0cb1f35-788032ac-83f7cd58_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-31T16:23:21.018156Z node 21 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 2025-03-31T16:23:21.078831Z node 21 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 Leader for TabletID 72057594037927937 is [21:107:2139] sender: [21:240:2057] recipient: [21:99:2134] Leader for TabletID 72057594037927937 is [21:107:2139] sender: [21:243:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:107:2139] sender: [21:244:2057] recipient: [21:242:2243] Leader for TabletID 72057594037927937 is [21:245:2244] sender: [21:246:2057] recipient: [21:242:2243] 2025-03-31T16:23:21.117061Z node 21 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:23:21.117125Z node 21 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-31T16:23:21.117658Z node 21 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [21:294:2285] 2025-03-31T16:23:21.144771Z node 21 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-31T16:23:21.144872Z node 21 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [21:294:2285] 2025-03-31T16:23:21.176237Z node 21 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 81 parts 0 size 8296398 Leader for TabletID 72057594037927937 is [21:245:2244] sender: [21:317:2057] recipient: [21:14:2061] 2025-03-31T16:23:21.185704Z node 21 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:23:21.189736Z node 21 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1005 actor [21:314:2298] txId 42 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "user1" ImportantClientId: "user2" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 Version: 1005 LocalDC: true Topic: "topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1004 AllPartitions { PartitionId: 0 } Consumers { Name: "user2" Generation: 1004 Important: true } |91.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_login/test-results/unittest/{meta.json ... results_accumulator.log} |91.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login/test-results/unittest/{meta.json ... results_accumulator.log} >> TCdcStreamWithInitialScanTests::AlterStream [GOOD] >> TCdcStreamWithInitialScanTests::DropStream >> TColumnShardTestSchema::OneTierExternalTtl >> TColumnShardTestSchema::RebootColdTiersWithStat >> PgCatalog::PgType [GOOD] >> PgCatalog::InformationSchema >> TCdcStreamWithInitialScanTests::DropStream [GOOD] >> TCdcStreamWithInitialScanTests::RacyAlterStreamAndRestart |91.0%| [TA] $(B)/ydb/services/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DataStreams::TestGetRecordsWithCount [GOOD] >> DataStreams::TestInvalidRetentionCombinations >> DataShardVolatile::DistributedWriteShardRestartBeforePlan-UseSink [GOOD] >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation+UseSink >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32 [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithAllAttributes [GOOD] >> TestKinesisHttpProxy::CreateDeleteStream [GOOD] >> TestYmqHttpProxy::TestGetQueueAttributes [GOOD] >> TCdcStreamWithInitialScanTests::RacyAlterStreamAndRestart [GOOD] >> TCdcStreamWithInitialScanTests::MeteringServerless >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumer >> TestYmqHttpProxy::TestDeleteQueue >> TestYmqHttpProxy::BillingRecordsForJsonApi >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true [GOOD] |91.0%| [TA] {RESULT} $(B)/ydb/services/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime [GOOD] Test command err: 2025-03-31T16:21:27.532558Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:21:27.707967Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:21:27.733537Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:21:27.733821Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:21:27.744534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:21:27.744784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:21:27.745041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:21:27.745174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:21:27.745303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:21:27.745449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:21:27.745588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:21:27.745714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:21:27.745852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:21:27.745982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:21:27.746114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:21:27.746231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:21:27.777447Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:21:27.777588Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:21:27.777640Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:21:27.777858Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:27.778006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:21:27.778079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:21:27.778126Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:21:27.778224Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:21:27.778285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:21:27.778330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:21:27.778368Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:21:27.778602Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:27.778689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:21:27.778736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:21:27.778792Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:21:27.778909Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:21:27.778978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:21:27.779023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:21:27.779054Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:21:27.779150Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:21:27.779193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:21:27.779247Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:21:27.779314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:21:27.779354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:21:27.779383Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:21:27.779815Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=57; 2025-03-31T16:21:27.779908Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=37; 2025-03-31T16:21:27.779992Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=33; 2025-03-31T16:21:27.780106Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=46; 2025-03-31T16:21:27.780270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:21:27.780338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:21:27.780374Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:21:27.780595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:21:27.780645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:21:27.780679Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:21:27.780848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:21:27.780907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:21:27.780943Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:21:27.781159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:21:27.781221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:21:27.781261Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:21:27.781409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:21:27.781457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:21:27.781510Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... 4Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:117:2768:0]; 2025-03-31T16:23:25.274668Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:118:2768:0]; 2025-03-31T16:23:25.274732Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:119:2768:0]; 2025-03-31T16:23:25.274797Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:120:2768:0]; 2025-03-31T16:23:25.274860Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:121:2768:0]; 2025-03-31T16:23:25.274926Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:122:2768:0]; 2025-03-31T16:23:25.274990Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:123:2768:0]; 2025-03-31T16:23:25.275056Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:124:2768:0]; 2025-03-31T16:23:25.275204Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:125:2768:0]; 2025-03-31T16:23:25.275276Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:126:2768:0]; 2025-03-31T16:23:25.275337Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:127:2768:0]; 2025-03-31T16:23:25.275402Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:128:2696:0]; 2025-03-31T16:23:25.275463Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:129:2696:0]; 2025-03-31T16:23:25.275527Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:130:2696:0]; 2025-03-31T16:23:25.275592Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:131:2696:0]; 2025-03-31T16:23:25.275652Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:132:8920:0]; 2025-03-31T16:23:25.275711Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:133:2776:0]; 2025-03-31T16:23:25.275773Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:134:2776:0]; 2025-03-31T16:23:25.275838Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:135:2776:0]; 2025-03-31T16:23:25.275903Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:136:2776:0]; 2025-03-31T16:23:25.275968Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:137:2776:0]; 2025-03-31T16:23:25.276027Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:138:2768:0]; 2025-03-31T16:23:25.276087Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:139:2768:0]; 2025-03-31T16:23:25.276147Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:140:2768:0]; 2025-03-31T16:23:25.276228Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:141:2768:0]; 2025-03-31T16:23:25.276291Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:142:2768:0]; 2025-03-31T16:23:25.276351Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:143:2768:0]; 2025-03-31T16:23:25.276409Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:144:2768:0]; 2025-03-31T16:23:25.276471Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:145:2768:0]; 2025-03-31T16:23:25.276527Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:146:2768:0]; 2025-03-31T16:23:25.276585Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:147:2768:0]; 2025-03-31T16:23:25.276641Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:148:2768:0]; 2025-03-31T16:23:25.276700Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:149:2768:0]; 2025-03-31T16:23:25.276760Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:150:2768:0]; 2025-03-31T16:23:25.276817Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:151:2768:0]; 2025-03-31T16:23:25.276878Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:152:2768:0]; 2025-03-31T16:23:25.276936Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:153:2768:0]; 2025-03-31T16:23:25.277016Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:154:2768:0]; 2025-03-31T16:23:25.277079Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:155:2768:0]; 2025-03-31T16:23:25.277142Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:156:2768:0]; 2025-03-31T16:23:25.277209Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:157:2768:0]; 2025-03-31T16:23:25.277270Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:158:2768:0]; 2025-03-31T16:23:25.277329Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:159:2768:0]; 2025-03-31T16:23:25.277386Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:160:2768:0]; 2025-03-31T16:23:25.277445Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:161:2768:0]; 2025-03-31T16:23:25.277507Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:162:2768:0]; 2025-03-31T16:23:25.277569Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:163:2768:0]; 2025-03-31T16:23:25.277633Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:164:2768:0]; 2025-03-31T16:23:25.277695Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:165:2768:0]; 2025-03-31T16:23:25.277756Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:166:2768:0]; 2025-03-31T16:23:25.277817Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:167:2768:0]; 2025-03-31T16:23:25.277875Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:168:2768:0]; 2025-03-31T16:23:25.277937Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:169:2768:0]; 2025-03-31T16:23:25.278002Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:170:2768:0]; 2025-03-31T16:23:25.278076Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:171:2768:0]; 2025-03-31T16:23:25.278141Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:172:2696:0]; 2025-03-31T16:23:25.278203Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:173:2696:0]; 2025-03-31T16:23:25.278259Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:174:2696:0]; 2025-03-31T16:23:25.278321Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:175:2696:0]; 2025-03-31T16:23:25.278381Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:176:8904:0]; 2025-03-31T16:23:25.864542Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-03-31T16:23:25.865724Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[2] (CS::GENERAL) apply at tablet 9437184 2025-03-31T16:23:25.993536Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 4:1 Blob count: 747 2025-03-31T16:23:26.001936Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=2213112;raw_bytes=2475629;count=1;records=26059} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=7587944;raw_bytes=7088522;count=3;records=75200} inactive {blob_bytes=100101848;raw_bytes=103700153;count=42;records=1100341} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-31T16:23:26.358851Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=7783041e-e4c11f0-9b97a0f3-b47f3a8;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::GENERAL;success=1; 2025-03-31T16:23:26.358935Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=7783041e-e4c11f0-9b97a0f3-b47f3a8;fline=with_appended.cpp:65;portions=47,;task_id=7783041e-e4c11f0-9b97a0f3-b47f3a8; 2025-03-31T16:23:26.359812Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=7783041e-e4c11f0-9b97a0f3-b47f3a8;fline=manager.cpp:15;event=unlock;process_id=CS::GENERAL::7783041e-e4c11f0-9b97a0f3-b47f3a8; 2025-03-31T16:23:26.359909Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=7783041e-e4c11f0-9b97a0f3-b47f3a8;fline=granule.cpp:101;event=OnCompactionFinished;info=(granule:1;path_id:1;size:7587944;portions_count:47;); 2025-03-31T16:23:26.359961Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=7783041e-e4c11f0-9b97a0f3-b47f3a8;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:23:26.360035Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=7783041e-e4c11f0-9b97a0f3-b47f3a8;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:23:26.360105Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=7783041e-e4c11f0-9b97a0f3-b47f3a8;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=8; 2025-03-31T16:23:26.360178Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=7783041e-e4c11f0-9b97a0f3-b47f3a8;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2025-03-31T16:23:26.360226Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=7783041e-e4c11f0-9b97a0f3-b47f3a8;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=8;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:23:26.360286Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=7783041e-e4c11f0-9b97a0f3-b47f3a8;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:23:26.360330Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=7783041e-e4c11f0-9b97a0f3-b47f3a8;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:23:26.360418Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=7783041e-e4c11f0-9b97a0f3-b47f3a8;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.944000s; 2025-03-31T16:23:26.360472Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=7783041e-e4c11f0-9b97a0f3-b47f3a8;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:23:26.360702Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 4:1 Blob count: 747 2025-03-31T16:23:26.362504Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=39;external_task_id=7783041e-e4c11f0-9b97a0f3-b47f3a8;mem=11009198;cpu=0; 2025-03-31T16:23:26.364282Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32 [GOOD] Test command err: 2025-03-31T16:21:27.532539Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:21:27.707982Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:21:27.732546Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:21:27.732808Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:21:27.744515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:21:27.744773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:21:27.745003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:21:27.745112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:21:27.745212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:21:27.745341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:21:27.745462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:21:27.745585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:21:27.745693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:21:27.745799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:21:27.745923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:21:27.746032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:21:27.776962Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:21:27.777112Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:21:27.777158Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:21:27.777393Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:27.777534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:21:27.777601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:21:27.777642Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:21:27.777726Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:21:27.777801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:21:27.777853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:21:27.777884Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:21:27.778102Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:27.778166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:21:27.778202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:21:27.778231Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:21:27.778335Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:21:27.778407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:21:27.778465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:21:27.778498Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:21:27.778568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:21:27.778603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:21:27.778634Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:21:27.778689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:21:27.778725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:21:27.778750Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:21:27.779169Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=45; 2025-03-31T16:21:27.779262Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=36; 2025-03-31T16:21:27.779347Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=37; 2025-03-31T16:21:27.779422Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=36; 2025-03-31T16:21:27.779589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:21:27.779661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:21:27.779715Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:21:27.779921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:21:27.779976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:21:27.780006Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:21:27.780179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:21:27.780222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:21:27.780251Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:21:27.780458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:21:27.780502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:21:27.780529Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:21:27.780648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:21:27.780698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:21:27.780750Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... 5.217903Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:81:2696:0]; 2025-03-31T16:23:25.217967Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:82:8528:0]; 2025-03-31T16:23:25.218019Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:83:2776:0]; 2025-03-31T16:23:25.218073Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:84:2768:0]; 2025-03-31T16:23:25.218136Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:85:2768:0]; 2025-03-31T16:23:25.218203Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:86:2768:0]; 2025-03-31T16:23:25.218261Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:87:2768:0]; 2025-03-31T16:23:25.218317Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:88:2768:0]; 2025-03-31T16:23:25.218372Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:89:2768:0]; 2025-03-31T16:23:25.218424Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:90:2768:0]; 2025-03-31T16:23:25.218494Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:91:2768:0]; 2025-03-31T16:23:25.218574Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:92:2768:0]; 2025-03-31T16:23:25.218642Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:93:2768:0]; 2025-03-31T16:23:25.218696Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:94:2768:0]; 2025-03-31T16:23:25.218747Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:95:2768:0]; 2025-03-31T16:23:25.218799Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:96:2768:0]; 2025-03-31T16:23:25.218848Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:97:2768:0]; 2025-03-31T16:23:25.218930Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:98:2768:0]; 2025-03-31T16:23:25.219016Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:99:2768:0]; 2025-03-31T16:23:25.219103Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:100:2768:0]; 2025-03-31T16:23:25.219174Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:101:2768:0]; 2025-03-31T16:23:25.219238Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:102:2768:0]; 2025-03-31T16:23:25.219298Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:103:2768:0]; 2025-03-31T16:23:25.219358Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:104:2768:0]; 2025-03-31T16:23:25.219415Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:105:2768:0]; 2025-03-31T16:23:25.219482Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:106:2768:0]; 2025-03-31T16:23:25.219543Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:107:2768:0]; 2025-03-31T16:23:25.219609Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:108:2768:0]; 2025-03-31T16:23:25.219688Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:109:2768:0]; 2025-03-31T16:23:25.219745Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:110:2768:0]; 2025-03-31T16:23:25.219799Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:111:2768:0]; 2025-03-31T16:23:25.219850Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:112:2768:0]; 2025-03-31T16:23:25.219899Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:113:2768:0]; 2025-03-31T16:23:25.219950Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:114:2768:0]; 2025-03-31T16:23:25.220002Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:115:2768:0]; 2025-03-31T16:23:25.220071Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:116:2768:0]; 2025-03-31T16:23:25.220153Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:117:2768:0]; 2025-03-31T16:23:25.220235Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:118:2768:0]; 2025-03-31T16:23:25.220300Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:119:2696:0]; 2025-03-31T16:23:25.220351Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:120:2696:0]; 2025-03-31T16:23:25.220405Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:121:2696:0]; 2025-03-31T16:23:25.220456Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:122:2696:0]; 2025-03-31T16:23:25.220507Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:123:8528:0]; 2025-03-31T16:23:25.220571Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:124:2768:0]; 2025-03-31T16:23:25.220639Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:125:2768:0]; 2025-03-31T16:23:25.220725Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:126:2768:0]; 2025-03-31T16:23:25.220782Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:127:2768:0]; 2025-03-31T16:23:25.220838Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:128:2768:0]; 2025-03-31T16:23:25.220888Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:129:2768:0]; 2025-03-31T16:23:25.220941Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:130:2768:0]; 2025-03-31T16:23:25.221009Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:131:2768:0]; 2025-03-31T16:23:25.221067Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:132:2768:0]; 2025-03-31T16:23:25.221126Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:133:2768:0]; 2025-03-31T16:23:25.221184Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:134:2768:0]; 2025-03-31T16:23:25.221235Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:135:2768:0]; 2025-03-31T16:23:25.221283Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:136:2768:0]; 2025-03-31T16:23:25.221339Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:137:2768:0]; 2025-03-31T16:23:25.221387Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:138:2768:0]; 2025-03-31T16:23:25.221439Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:139:2768:0]; 2025-03-31T16:23:25.221501Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:140:2768:0]; 2025-03-31T16:23:25.221568Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:141:2768:0]; 2025-03-31T16:23:25.221623Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:142:2768:0]; 2025-03-31T16:23:25.221674Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:143:2768:0]; 2025-03-31T16:23:25.221751Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:144:2768:0]; 2025-03-31T16:23:25.221811Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:145:2768:0]; 2025-03-31T16:23:25.221863Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:146:2768:0]; 2025-03-31T16:23:25.221913Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:147:2768:0]; 2025-03-31T16:23:25.221973Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:148:2768:0]; 2025-03-31T16:23:25.222032Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:149:2768:0]; 2025-03-31T16:23:25.222085Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:150:2768:0]; 2025-03-31T16:23:25.222171Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:151:2768:0]; 2025-03-31T16:23:25.222220Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:152:2768:0]; 2025-03-31T16:23:25.222279Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:153:2768:0]; 2025-03-31T16:23:25.222329Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:154:2768:0]; 2025-03-31T16:23:25.222377Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:155:2768:0]; 2025-03-31T16:23:25.222439Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:156:2768:0]; 2025-03-31T16:23:25.222498Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:157:2768:0]; 2025-03-31T16:23:25.222553Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:158:2768:0]; 2025-03-31T16:23:25.222601Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:159:2768:0]; 2025-03-31T16:23:25.222653Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:160:2696:0]; 2025-03-31T16:23:25.222707Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:161:2696:0]; 2025-03-31T16:23:25.222755Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:162:2696:0]; 2025-03-31T16:23:25.222805Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:163:2696:0]; 2025-03-31T16:23:25.222867Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:164:8528:0]; 2025-03-31T16:23:25.711701Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-03-31T16:23:25.712520Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[4] (CS::GENERAL) apply at tablet 9437184 2025-03-31T16:23:25.841667Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 4:2 Blob count: 692 2025-03-31T16:23:25.849258Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=2078720;raw_bytes=2324579;count=1;records=24469} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=7587944;raw_bytes=7088522;count=3;records=75200} inactive {blob_bytes=100419184;raw_bytes=104021253;count=42;records=1103721} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:23:03.805949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:23:03.806064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:23:03.806110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:23:03.806148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:23:03.807102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:23:03.807171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:23:03.807248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:23:03.807418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:23:03.808896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:23:03.903444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:23:03.903515Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:23:03.909408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:23:03.909526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:23:03.909694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:23:03.915431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:23:03.915687Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:23:03.916419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:23:03.916645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:23:03.927049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:23:03.934716Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:23:03.934806Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:23:03.934932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:23:03.934973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:23:03.935025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:23:03.935218Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:23:03.944105Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:23:04.063854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:23:04.064105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:04.064371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:23:04.064675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:23:04.064735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:04.067455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:23:04.067637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:23:04.067856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:04.067925Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:23:04.067961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:23:04.067995Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:23:04.069840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:04.069908Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:23:04.069950Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:23:04.071819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:04.071869Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:04.071902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:23:04.071953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:23:04.080620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:23:04.082275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:23:04.082426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:23:04.083148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:23:04.083261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:23:04.083306Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:23:04.083504Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:23:04.083550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:23:04.083717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:23:04.083823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:23:04.085684Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:23:04.085721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:23:04.085854Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:23:04.085899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:23:04.086092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:04.086142Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:23:04.086238Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:23:04.086274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:23:04.086316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:23:04.086348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:23:04.086389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:23:04.086438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:23:04.086474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:23:04.086505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:23:04.086603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:23:04.086648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:23:04.086697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:23:04.094634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:23:04.094778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:23:04.094822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:23:28.153521Z node 28 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:23:28.153561Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [28:202:2204], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-03-31T16:23:28.153604Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [28:202:2204], at schemeshard: 72057594046678944, txId: 101, path id: 2 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 2025-03-31T16:23:28.154892Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:23:28.154943Z node 28 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 2025-03-31T16:23:28.156050Z node 28 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:23:28.156144Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:23:28.156170Z node 28 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-03-31T16:23:28.156199Z node 28 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-31T16:23:28.156230Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:23:28.157235Z node 28 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:23:28.157344Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:23:28.157372Z node 28 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-03-31T16:23:28.157644Z node 28 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-31T16:23:28.157685Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T16:23:28.157751Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-03-31T16:23:28.158837Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1351 } } 2025-03-31T16:23:28.158874Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-03-31T16:23:28.158980Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1351 } } 2025-03-31T16:23:28.159052Z node 28 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1351 } } FAKE_COORDINATOR: Erasing txId 101 2025-03-31T16:23:28.160932Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 120259086579 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-31T16:23:28.160977Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-03-31T16:23:28.161116Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 120259086579 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-31T16:23:28.161150Z node 28 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-31T16:23:28.161215Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 304 RawX2: 120259086579 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-31T16:23:28.161260Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:23:28.161301Z node 28 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:23:28.161333Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-31T16:23:28.161364Z node 28 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-03-31T16:23:28.164438Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T16:23:28.164625Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T16:23:28.164808Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:23:28.164910Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:23:28.165198Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:23:28.165235Z node 28 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-31T16:23:28.165333Z node 28 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T16:23:28.165359Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:23:28.165390Z node 28 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T16:23:28.165413Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:23:28.165441Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-03-31T16:23:28.165521Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [28:333:2312] message: TxId: 101 2025-03-31T16:23:28.165560Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:23:28.165613Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-31T16:23:28.165638Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-31T16:23:28.165731Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:23:28.167304Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T16:23:28.167366Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [28:334:2313] TestWaitNotification: OK eventTxId 101 2025-03-31T16:23:28.167850Z node 28 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:23:28.168026Z node 28 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" took 206us result status StatusSuccess 2025-03-31T16:23:28.168560Z node 28 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" PathDescription { Self { Name: "TTLTableWithpgint8Column_UNIT_NANOSECONDS" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLTableWithpgint8Column_UNIT_NANOSECONDS" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "pgint8" TypeId: 12288 Id: 2 NotNull: false TypeInfo { PgTypeId: 20 } IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 ColumnUnit: UNIT_NANOSECONDS Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TCdcStreamWithInitialScanTests::MeteringServerless [GOOD] >> TCdcStreamWithInitialScanTests::MeteringDedicated >> YdbSdkSessionsPool::StressTestAsync10 [GOOD] >> TColumnShardTestSchema::RebootExternalTTL >> TColumnShardTestSchema::DropWriteRace |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> PgCatalog::InformationSchema [GOOD] >> PgCatalog::CheckSetConfig >> DataStreams::TestInvalidRetentionCombinations [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::StressTestAsync10 [GOOD] Test command err: 2025-03-31T16:20:41.956124Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009372843309645:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:41.956175Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0004fc/r3tmp/tmpFcZKpz/pdisk_1.dat 2025-03-31T16:20:42.294372Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15815, node 1 2025-03-31T16:20:42.337862Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:42.337988Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:42.342368Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:42.376936Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:42.376960Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:42.376968Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:42.377085Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11928 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:42.608734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:46.956073Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488009372843309645:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:46.956169Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:20:57.294339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:20:57.294377Z node 1 :IMPORT WARN: Table profiles were not loaded >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit [GOOD] |91.0%| [TA] $(B)/ydb/services/ydb/sdk_sessions_pool_ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.0%| [TA] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestSchema::DropWriteRace [GOOD] >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation+UseSink [GOOD] >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestInvalidRetentionCombinations [GOOD] Test command err: 2025-03-31T16:22:32.797256Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009849589969452:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:22:32.797420Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001a25/r3tmp/tmpQRQpuH/pdisk_1.dat 2025-03-31T16:22:33.289153Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:22:33.289279Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:22:33.293607Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:22:33.348697Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8651, node 1 2025-03-31T16:22:33.386196Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:22:33.386231Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:22:33.501478Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:22:33.501504Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:22:33.501516Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:22:33.501735Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29138 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:22:34.028554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:34.121796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:29138 2025-03-31T16:22:34.325469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:37.530071Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488009869204934468:2083];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:22:37.530314Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001a25/r3tmp/tmpo6ggU2/pdisk_1.dat 2025-03-31T16:22:37.718816Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:22:37.756143Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:22:37.756241Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:22:37.764171Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30526, node 4 2025-03-31T16:22:37.903997Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:22:37.904026Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:22:37.904034Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:22:37.904238Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17928 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:22:38.190027Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:38.269954Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:17928 2025-03-31T16:22:38.504860Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:38.775008Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:22:38.803927Z node 4 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [4:7488009873499902991:2838], for# user2@builtin, access# DescribeSchema 2025-03-31T16:22:38.813149Z node 4 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [4:7488009873499902994:2839], for# user2@builtin, access# DescribeSchema 2025-03-31T16:22:38.829294Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:22:42.670660Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488009889243247003:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:22:42.672912Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001a25/r3tmp/tmpMBpcCv/pdisk_1.dat 2025-03-31T16:22:42.891618Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:22:42.918137Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:22:42.918244Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:22:42.921991Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2030, node 7 2025-03-31T16:22:43.007046Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:22:43.007138Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:22:43.007150Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:22:43.007343Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9478 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:22:43.316656Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:43.390762Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:9478 2025-03-31T16:22:43.604851Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:22:47.666566Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7488009889243247003:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:22:47.666681Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:22:57.849698Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:22:57.849745Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:23:26.418474Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488010078152430509:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:23:26.418661Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001a25/r3tmp/tmprFApvA/pdisk_1.dat 2025-03-31T16:23:26.603708Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:23:26.646098Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:23:26.646176Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:23:26.649476Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7422, node 10 2025-03-31T16:23:26.722031Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:23:26.722066Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:23:26.722078Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:23:26.722291Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15078 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:23:27.039005Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:27.120491Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:15078 2025-03-31T16:23:27.406245Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting...
: Error: retention hours and storage megabytes must fit one of: { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, provided values: hours 168, storage 10, code: 500080
: Error: retention hours and storage megabytes must fit one of: { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, provided values: hours 144, storage 0, code: 500080
: Error: write_speed per second in partition must have values from set {131072,524288,1048576}, got 130048, code: 500080
: Error: write_speed per second in partition must have values from set {131072,524288,1048576}, got 1049600, code: 500080 >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::DropWriteRace [GOOD] Test command err: 2025-03-31T16:23:30.957701Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:23:31.070550Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:23:31.088187Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:23:31.088599Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:23:31.097705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:23:31.097942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:23:31.098202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:23:31.098343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:23:31.098456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:23:31.098585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:23:31.098701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:23:31.098839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:23:31.098980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:23:31.099142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:23:31.099287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:23:31.099405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:23:31.136231Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:23:31.136488Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:23:31.136556Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:23:31.136756Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:31.136916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:23:31.137058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:23:31.137114Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:23:31.137208Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:23:31.137278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:23:31.137324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:23:31.137369Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:23:31.137570Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:31.137647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:23:31.137711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:23:31.137754Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:23:31.137858Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:23:31.137920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:23:31.137978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:23:31.138024Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:23:31.138101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:23:31.138144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:23:31.138183Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:23:31.138235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:23:31.138279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:23:31.138327Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:23:31.138758Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=49; 2025-03-31T16:23:31.138851Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=37; 2025-03-31T16:23:31.138954Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=54; 2025-03-31T16:23:31.139038Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=36; 2025-03-31T16:23:31.139339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:23:31.139446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:23:31.139493Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:23:31.139706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:23:31.139757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:23:31.139799Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:23:31.139975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:23:31.140039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:23:31.140078Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:23:31.140283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:23:31.140334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:23:31.140369Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:23:31.140493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:23:31.140535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:23:31.140584Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... ECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=46; 2025-03-31T16:23:31.423448Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=4; 2025-03-31T16:23:31.423534Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=46; 2025-03-31T16:23:31.423574Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2025-03-31T16:23:31.423681Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=77; 2025-03-31T16:23:31.423735Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=20; 2025-03-31T16:23:31.423773Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=17; 2025-03-31T16:23:31.423798Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=3353; 2025-03-31T16:23:31.423910Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-31T16:23:31.423953Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-31T16:23:31.424028Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-31T16:23:31.424318Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:23:31.424362Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;fline=columnshard_impl.cpp:521;problem=Background activities cannot be started: no index at tablet; 2025-03-31T16:23:31.424609Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:23:31.424737Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-31T16:23:31.424801Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-31T16:23:31.424830Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-31T16:23:31.424852Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-31T16:23:31.424895Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:23:31.424955Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:521;problem=Background activities cannot be started: no index at tablet; 2025-03-31T16:23:31.697423Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=101;this=88923004797312;method=TTxController::StartProposeOnExecute;tx_info=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;fline=schema.h:36;event=sync_schema; 2025-03-31T16:23:31.710015Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;this=88923004797312;op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_this=89197881142080;fline=columnshard__propose_transaction.cpp:103;event=actual tx operator; 2025-03-31T16:23:31.710119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;this=88923004797312;op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_this=89197881142080;method=TTxController::FinishProposeOnComplete;tx_id=101;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:99:2134]; 2025-03-31T16:23:31.710183Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;this=88923004797312;op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_this=89197881142080;method=TTxController::FinishProposeOnComplete;tx_id=101;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=101; 2025-03-31T16:23:31.710542Z node 1 :TX_COLUMNSHARD DEBUG: TTxNotifyTxCompletion.Execute at tablet 9437184 2025-03-31T16:23:31.710657Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 1000000001 at tablet 9437184, mediator 0 2025-03-31T16:23:31.710696Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[2] execute at tablet 9437184 2025-03-31T16:23:31.711004Z node 1 :TX_COLUMNSHARD DEBUG: EnsureTable for pathId: 1 ttl settings: { Version: 1 } at tablet 9437184 2025-03-31T16:23:31.716423Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=0; 2025-03-31T16:23:31.716556Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=tables_manager.cpp:245;method=RegisterTable;path_id=1; 2025-03-31T16:23:31.716592Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=column_engine.h:144;event=RegisterTable;path_id=1; 2025-03-31T16:23:31.722175Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=column_engine_logs.cpp:488;event=OnTieringModified;path_id=1; 2025-03-31T16:23:31.722346Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=tx_controller.cpp:211;event=finished_tx;tx_id=101; 2025-03-31T16:23:31.747520Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[2] complete at tablet 9437184 2025-03-31T16:23:31.747743Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6120;columns=10; 2025-03-31T16:23:31.764633Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];fline=actor.cpp:22;event=flush_writing;size=6120;count=1; 2025-03-31T16:23:31.767119Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 1 at tablet 9437184 2025-03-31T16:23:31.767499Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-03-31T16:23:31.780005Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-03-31T16:23:31.780188Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=4;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:23:31.806951Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:2;;this=88923005124576;op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:2;;int_op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:2;;int_this=89197881219648;fline=columnshard__propose_transaction.cpp:103;event=actual tx operator; 2025-03-31T16:23:31.807068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:2;;this=88923005124576;op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:2;;int_op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:2;;int_this=89197881219648;method=TTxController::FinishProposeOnComplete;tx_id=103;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:99:2134]; 2025-03-31T16:23:31.807144Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:2;;this=88923005124576;op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:2;;int_op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:2;;int_this=89197881219648;method=TTxController::FinishProposeOnComplete;tx_id=103;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=103; 2025-03-31T16:23:31.807456Z node 1 :TX_COLUMNSHARD DEBUG: TTxNotifyTxCompletion.Execute at tablet 9437184 2025-03-31T16:23:31.807633Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 1000000002 at tablet 9437184, mediator 0 2025-03-31T16:23:31.807692Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[6] execute at tablet 9437184 2025-03-31T16:23:31.807963Z node 1 :TX_COLUMNSHARD DEBUG: DropTable for pathId: 1 at tablet 9437184 2025-03-31T16:23:31.808081Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=103;fline=tx_controller.cpp:211;event=finished_tx;tx_id=103; 2025-03-31T16:23:31.821223Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[6] complete at tablet 9437184 2025-03-31T16:23:31.821374Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:23:31.821683Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 1000000003 at tablet 9437184, mediator 0 2025-03-31T16:23:31.821763Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[8] execute at tablet 9437184 2025-03-31T16:23:31.822017Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=102;fline=abstract.h:83;progress_tx_id=102;lock_id=1;broken=0; 2025-03-31T16:23:31.822149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=102;commit_tx_id=102;commit_lock_id=1;fline=insert_table.cpp:50;event=abort_insertion;path_id=1;blob_range={ Blob: DS:0:[9437184:2:1:3:0:7080:0] Offset: 0 Size: 7080 }; 2025-03-31T16:23:31.822288Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=102;fline=tx_controller.cpp:211;event=finished_tx;tx_id=102; 2025-03-31T16:23:31.834598Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[8] complete at tablet 9437184 2025-03-31T16:23:31.834771Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=abstract.h:93;progress_tx_id=102;lock_id=1;broken=0; 2025-03-31T16:23:31.834911Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; >> TColumnShardTestSchema::HotTiersTtlWithStat |91.0%| [TA] $(B)/ydb/services/datastreams/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit [GOOD] Test command err: === Server->StartServer(false); 2025-03-31T16:23:16.604377Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010035658494969:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:23:16.604432Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:23:16.653678Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010037335871244:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:23:16.653801Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:23:16.815001Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:23:16.823475Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000970/r3tmp/tmpDTlJ7H/pdisk_1.dat 2025-03-31T16:23:17.149669Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:23:17.150323Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:23:17.151710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:23:17.151875Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:23:17.154029Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:23:17.161556Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:23:17.166605Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:23:17.166946Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8820, node 1 2025-03-31T16:23:17.390750Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/000970/r3tmp/yandexTUsEYt.tmp 2025-03-31T16:23:17.390780Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/000970/r3tmp/yandexTUsEYt.tmp 2025-03-31T16:23:17.392064Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/000970/r3tmp/yandexTUsEYt.tmp 2025-03-31T16:23:17.392256Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:23:17.629145Z INFO: TTestServer started on Port 3433 GrpcPort 8820 TClient is connected to server localhost:3433 PQClient connected to localhost:8820 === TenantModeEnabled() = 1 === Init PQ - start server on port 8820 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:23:18.044100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-31T16:23:18.046388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:23:18.048445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-31T16:23:18.050675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:23:18.050785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:23:18.053779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-31T16:23:18.054928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-31T16:23:18.055164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:23:18.055253Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-31T16:23:18.055354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-03-31T16:23:18.055373Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-03-31T16:23:18.057187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:23:18.057215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-03-31T16:23:18.057234Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:23:18.059322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:23:18.059387Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-03-31T16:23:18.059411Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-03-31T16:23:18.061484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:23:18.061517Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:23:18.061549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-31T16:23:18.061573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-03-31T16:23:18.078111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:23:18.080318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-03-31T16:23:18.081586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-03-31T16:23:18.083421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743438198124, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T16:23:18.083632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743438198124 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-31T16:23:18.083685Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-31T16:23:18.084878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-03-31T16:23:18.084958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-31T16:23:18.085207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-31T16:23:18.085291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-03-31T16:23:18.087298Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-31T16:23:18.087331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-31T16:23:18.087615Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-31T16:23:18.087645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7488010039953462926:2400], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-03-31T16:23:18.087702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:23:18.087728Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-03-31T16:23:18.087815Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-31T16:23:18.087841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-31T16:23:18.087859Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-31T16:23:18.087871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-31T16:23:18.087886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-03-31T16:23:18.087915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-31T16:23:18.087928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-03-31T16:23:18.087938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2025-03-31T16:23:18.087975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 720575940466444 ... t = 10000 ms 2025-03-31T16:23:29.607666Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 22 sessionId: 1236|359330c3-ccaaa33b-567cf6d3-c976561c_0 grpc read done: success: 1 data: write_request[data omitted] 2025-03-31T16:23:29.609219Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037899 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-03-31T16:23:29.609723Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-31T16:23:29.609769Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client message batch for topic 'PQ/account3/folder1/folder2/topic' partition 0 2025-03-31T16:23:29.609884Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 1 2025-03-31T16:23:29.609953Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037899 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-03-31T16:23:29.610102Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-31T16:23:29.610123Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client message batch for topic 'PQ/account3/folder1/folder2/topic' partition 0 2025-03-31T16:23:29.610744Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client PART message topic: PQ/account3/folder1/folder2/topic partition: 0 SourceId: '\0001236' SeqNo: 1 partNo : 0 messageNo: 1 size: 511961 2025-03-31T16:23:29.611279Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client PART message topic: PQ/account3/folder1/folder2/topic partition: 0 SourceId: '\0001236' SeqNo: 1 partNo : 1 messageNo: 1 size: 511961 2025-03-31T16:23:29.611592Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client PART message topic: PQ/account3/folder1/folder2/topic partition: 0 SourceId: '\0001236' SeqNo: 1 partNo : 2 messageNo: 1 size: 176151 2025-03-31T16:23:29.611626Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client message topic: PQ/account3/folder1/folder2/topic partition: 0 SourceId: '\0001236' SeqNo: 1 partNo : 2 messageNo: 1 size 176151 offset: -1 2025-03-31T16:23:29.611769Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Send write quota request. Topic: "PQ/account3/folder1/folder2/topic". Partition: 0. Amount: 1200088. Cookie: 7 2025-03-31T16:23:30.320371Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 8] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-03-31T16:23:30.371737Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Got quota. Topic: "PQ/account3/folder1/folder2/topic". Partition: 0: Cookie: 7 2025-03-31T16:23:30.371939Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Topic 'PQ/account3/folder1/folder2/topic' partition 0 part blob processing sourceId '\0001236' seqNo 1 partNo 0 2025-03-31T16:23:30.372002Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Topic 'PQ/account3/folder1/folder2/topic' partition 0 part blob processing sourceId '\0001236' seqNo 1 partNo 1 2025-03-31T16:23:30.372028Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Topic 'PQ/account3/folder1/folder2/topic' partition 0 part blob processing sourceId '\0001236' seqNo 1 partNo 2 2025-03-31T16:23:30.383797Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Topic 'PQ/account3/folder1/folder2/topic' partition 0 part blob complete sourceId '\0001236' seqNo 1 partNo 2 FormedBlobsCount 0 NewHead: Offset 6 PartNo 0 PackedSize 1200285 count 1 nextOffset 7 batches 3 2025-03-31T16:23:30.384578Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Add new write blob: topic 'PQ/account3/folder1/folder2/topic' partition 0 compactOffset 6,1 HeadOffset 6 endOffset 6 curOffset 7 d0000000000_00000000000000000006_00000_0000000001_00002| size 1200275 WTime 1743438210384 2025-03-31T16:23:30.385586Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:23:30.385613Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:23:30.385625Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-03-31T16:23:30.385635Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:23:30.385644Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] m0000000000p1236 2025-03-31T16:23:30.385652Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] d0000000000_00000000000000000006_00000_0000000001_00002| 2025-03-31T16:23:30.385662Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:23:30.385672Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:23:30.385684Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] =========================== 2025-03-31T16:23:30.385749Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-31T16:23:30.385803Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 6 partNo 0 count 1 size 1200275 2025-03-31T16:23:30.394084Z node 1 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 6 count 1 size 1200275 actorID [1:7488010082903138211:2558] 2025-03-31T16:23:30.394208Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 1200088 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:23:30.394211Z node 1 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037899' partition 0 offset 6 partno 0 count 1 parts 2 size 1200275 2025-03-31T16:23:30.394242Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:23:30.394288Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Answering for message sourceid: '\0001236', Topic: 'PQ/account3/folder1/folder2/topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 6 is stored on disk 2025-03-31T16:23:30.394318Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:23:30.394342Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Answering for message sourceid: '\0001236', Topic: 'PQ/account3/folder1/folder2/topic', Partition: 0, SeqNo: 1, partNo: 1, Offset: 6 is stored on disk 2025-03-31T16:23:30.394382Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-31T16:23:30.394414Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Answering for message sourceid: '\0001236', Topic: 'PQ/account3/folder1/folder2/topic', Partition: 0, SeqNo: 1, partNo: 2, Offset: 6 is stored on disk 2025-03-31T16:23:30.394426Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-03-31T16:23:30.394500Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037899 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-03-31T16:23:30.395674Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|359330c3-ccaaa33b-567cf6d3-c976561c_0] Write session got write response: sequence_numbers: 1 offsets: 6 already_written: false write_statistics { persist_duration_ms: 22 queued_in_partition_duration_ms: 760 throttled_on_partition_duration_ms: 759 } 2025-03-31T16:23:30.395727Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|359330c3-ccaaa33b-567cf6d3-c976561c_0] Write session: acknoledged message 1 2025-03-31T16:23:30.420830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-03-31T16:23:30.420972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 8 shard idx 72057594046644480:1 data size 0 row count 0 2025-03-31T16:23:30.422416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 8], pathId map=user, is column=0, is olap=0 2025-03-31T16:23:30.422459Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037888 followerId=0, pathId 8: RowCount 0, DataSize 0 2025-03-31T16:23:30.422673Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-03-31T16:23:30.460531Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488010095788040590:2632], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:23:30.460805Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODA5YmUxNzYtYzBiODg0ZjktYjViYzI1MC01NGFkMWY1NQ==, ActorId: [1:7488010095788040588:2631], ActorState: ExecuteState, TraceId: 01jqpg7sca1s815kqazxjgy7jy, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:23:30.461223Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:23:30.493518Z :INFO: [] MessageGroupId [1236] SessionId [1236|359330c3-ccaaa33b-567cf6d3-c976561c_0] Write session will now close 2025-03-31T16:23:30.493583Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|359330c3-ccaaa33b-567cf6d3-c976561c_0] Write session: aborting 2025-03-31T16:23:30.494448Z :INFO: [] MessageGroupId [1236] SessionId [1236|359330c3-ccaaa33b-567cf6d3-c976561c_0] Write session: gracefully shut down, all writes complete 2025-03-31T16:23:30.494494Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|359330c3-ccaaa33b-567cf6d3-c976561c_0] Write session: destroy 2025-03-31T16:23:30.494708Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 22 sessionId: 1236|359330c3-ccaaa33b-567cf6d3-c976561c_0 grpc read done: success: 0 data: 2025-03-31T16:23:30.494737Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 22 sessionId: 1236|359330c3-ccaaa33b-567cf6d3-c976561c_0 grpc read failed 2025-03-31T16:23:30.494779Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 22 sessionId: 1236|359330c3-ccaaa33b-567cf6d3-c976561c_0 grpc closed 2025-03-31T16:23:30.494793Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 22 sessionId: 1236|359330c3-ccaaa33b-567cf6d3-c976561c_0 is DEAD DURATION 3.050316s 2025-03-31T16:23:30.495972Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037899 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-31T16:23:30.496163Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] server disconnected, pipe [1:7488010091493073268:2626] destroyed 2025-03-31T16:23:30.496221Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::DropOwner. |91.1%| [TA] {RESULT} $(B)/ydb/services/datastreams/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestSchema::ColdTiersWithStat >> TColumnShardTestReadWrite::ReadGroupBy [GOOD] |91.1%| [TA] $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.1%| [TA] {RESULT} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32 [GOOD] Test command err: 2025-03-31T16:21:31.758952Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:21:31.842052Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:21:31.862357Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:21:31.862633Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:21:31.870336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:21:31.870499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:21:31.870680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:21:31.870772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:21:31.870835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:21:31.870918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:21:31.871012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:21:31.871136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:21:31.871247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:21:31.871325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:21:31.871413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:21:31.871477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:21:31.894505Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:21:31.894629Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:21:31.894673Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:21:31.894889Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:31.895021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:21:31.895101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:21:31.895145Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:21:31.895236Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:21:31.895306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:21:31.895342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:21:31.895368Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:21:31.895583Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:31.895653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:21:31.895686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:21:31.895711Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:21:31.895826Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:21:31.895894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:21:31.895936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:21:31.895966Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:21:31.896043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:21:31.896086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:21:31.896119Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:21:31.896189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:21:31.896222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:21:31.896267Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:21:31.896677Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=56; 2025-03-31T16:21:31.896754Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=31; 2025-03-31T16:21:31.896844Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=39; 2025-03-31T16:21:31.896925Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=38; 2025-03-31T16:21:31.897073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:21:31.897154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:21:31.897202Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:21:31.897397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:21:31.897442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:21:31.897479Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:21:31.897609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:21:31.897648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:21:31.897677Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:21:31.897864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:21:31.897910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:21:31.897937Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:21:31.898055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:21:31.898091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:21:31.898142Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... 1.002904Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:81:2696:0]; 2025-03-31T16:23:31.002986Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:82:8528:0]; 2025-03-31T16:23:31.003066Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:83:2776:0]; 2025-03-31T16:23:31.003185Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:84:2768:0]; 2025-03-31T16:23:31.003260Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:85:2768:0]; 2025-03-31T16:23:31.003338Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:86:2768:0]; 2025-03-31T16:23:31.003415Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:87:2768:0]; 2025-03-31T16:23:31.003479Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:88:2768:0]; 2025-03-31T16:23:31.003544Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:89:2768:0]; 2025-03-31T16:23:31.003620Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:90:2768:0]; 2025-03-31T16:23:31.003695Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:91:2768:0]; 2025-03-31T16:23:31.003765Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:92:2768:0]; 2025-03-31T16:23:31.003833Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:93:2768:0]; 2025-03-31T16:23:31.003909Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:94:2768:0]; 2025-03-31T16:23:31.003996Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:95:2768:0]; 2025-03-31T16:23:31.004070Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:96:2768:0]; 2025-03-31T16:23:31.004139Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:97:2768:0]; 2025-03-31T16:23:31.004257Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:98:2768:0]; 2025-03-31T16:23:31.004335Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:99:2768:0]; 2025-03-31T16:23:31.004425Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:100:2768:0]; 2025-03-31T16:23:31.004497Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:101:2768:0]; 2025-03-31T16:23:31.004574Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:102:2768:0]; 2025-03-31T16:23:31.004653Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:103:2768:0]; 2025-03-31T16:23:31.004719Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:104:2768:0]; 2025-03-31T16:23:31.004782Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:105:2768:0]; 2025-03-31T16:23:31.004874Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:106:2768:0]; 2025-03-31T16:23:31.004945Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:107:2768:0]; 2025-03-31T16:23:31.005051Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:108:2768:0]; 2025-03-31T16:23:31.005134Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:109:2768:0]; 2025-03-31T16:23:31.005198Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:110:2768:0]; 2025-03-31T16:23:31.005274Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:111:2768:0]; 2025-03-31T16:23:31.005358Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:112:2768:0]; 2025-03-31T16:23:31.005434Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:113:2768:0]; 2025-03-31T16:23:31.005508Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:114:2768:0]; 2025-03-31T16:23:31.005595Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:115:2768:0]; 2025-03-31T16:23:31.005672Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:116:2768:0]; 2025-03-31T16:23:31.005736Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:117:2768:0]; 2025-03-31T16:23:31.005825Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:118:2768:0]; 2025-03-31T16:23:31.005894Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:119:2696:0]; 2025-03-31T16:23:31.005966Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:120:2696:0]; 2025-03-31T16:23:31.006036Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:121:2696:0]; 2025-03-31T16:23:31.006101Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:122:2696:0]; 2025-03-31T16:23:31.006192Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:123:8528:0]; 2025-03-31T16:23:31.006264Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:124:2768:0]; 2025-03-31T16:23:31.006326Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:125:2768:0]; 2025-03-31T16:23:31.006397Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:126:2768:0]; 2025-03-31T16:23:31.006462Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:127:2768:0]; 2025-03-31T16:23:31.006547Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:128:2768:0]; 2025-03-31T16:23:31.006623Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:129:2768:0]; 2025-03-31T16:23:31.006687Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:130:2768:0]; 2025-03-31T16:23:31.006755Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:131:2768:0]; 2025-03-31T16:23:31.006820Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:132:2768:0]; 2025-03-31T16:23:31.006885Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:133:2768:0]; 2025-03-31T16:23:31.006947Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:134:2768:0]; 2025-03-31T16:23:31.007012Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:135:2768:0]; 2025-03-31T16:23:31.007115Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:136:2768:0]; 2025-03-31T16:23:31.007200Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:137:2768:0]; 2025-03-31T16:23:31.007277Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:138:2768:0]; 2025-03-31T16:23:31.007346Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:139:2768:0]; 2025-03-31T16:23:31.007410Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:140:2768:0]; 2025-03-31T16:23:31.007480Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:141:2768:0]; 2025-03-31T16:23:31.007547Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:142:2768:0]; 2025-03-31T16:23:31.007616Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:143:2768:0]; 2025-03-31T16:23:31.007696Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:144:2768:0]; 2025-03-31T16:23:31.007803Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:145:2768:0]; 2025-03-31T16:23:31.007897Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:146:2768:0]; 2025-03-31T16:23:31.007964Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:147:2768:0]; 2025-03-31T16:23:31.008032Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:148:2768:0]; 2025-03-31T16:23:31.008104Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:149:2768:0]; 2025-03-31T16:23:31.008174Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:150:2768:0]; 2025-03-31T16:23:31.008237Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:151:2768:0]; 2025-03-31T16:23:31.008321Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:152:2768:0]; 2025-03-31T16:23:31.008400Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:153:2768:0]; 2025-03-31T16:23:31.008473Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:154:2768:0]; 2025-03-31T16:23:31.008538Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:155:2768:0]; 2025-03-31T16:23:31.008604Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:156:2768:0]; 2025-03-31T16:23:31.008675Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:157:2768:0]; 2025-03-31T16:23:31.008738Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:158:2768:0]; 2025-03-31T16:23:31.008801Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:159:2768:0]; 2025-03-31T16:23:31.008878Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:160:2696:0]; 2025-03-31T16:23:31.008954Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:161:2696:0]; 2025-03-31T16:23:31.009043Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:162:2696:0]; 2025-03-31T16:23:31.009111Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:163:2696:0]; 2025-03-31T16:23:31.009176Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:164:8528:0]; 2025-03-31T16:23:31.598933Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-03-31T16:23:31.600021Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[4] (CS::GENERAL) apply at tablet 9437184 2025-03-31T16:23:31.720985Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 4:2 Blob count: 692 2025-03-31T16:23:31.728042Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=2078720;raw_bytes=2324579;count=1;records=24469} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=7587944;raw_bytes=7088522;count=3;records=75200} inactive {blob_bytes=100419184;raw_bytes=104021253;count=42;records=1103721} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumer [GOOD] >> TColumnShardTestSchema::RebootColdTiers >> TColumnShardTestSchema::HotTiersAfterTtl >> TColumnShardTestSchema::RebootOneTierExternalTtl >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumerWithFlag |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadGroupBy [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8328;columns=19; -- group by key: 0 2025-03-31T16:21:43.156774Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:21:43.254828Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:21:43.274224Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:21:43.274491Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:21:43.281909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:21:43.282100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:21:43.282313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:21:43.282411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:21:43.282502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:21:43.282571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:21:43.282669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:21:43.282778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:21:43.282854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:21:43.282934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:21:43.283003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:21:43.283130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:21:43.306180Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:21:43.306309Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:21:43.306367Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:21:43.306610Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:43.306733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:21:43.306793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:21:43.306866Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:21:43.306940Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:21:43.306990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:21:43.307025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:21:43.307049Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:21:43.307212Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:43.307266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:21:43.307291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:21:43.307320Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:21:43.307390Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:21:43.307430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:21:43.307486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:21:43.307508Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:21:43.307559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:21:43.307583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:21:43.307607Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:21:43.307674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:21:43.307717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:21:43.307746Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:21:43.308213Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=81; 2025-03-31T16:21:43.308331Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=44; 2025-03-31T16:21:43.308460Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=52; 2025-03-31T16:21:43.308565Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=47; 2025-03-31T16:21:43.308763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:21:43.308831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:21:43.308872Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:21:43.309091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:21:43.309132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:21:43.309170Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:21:43.309322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:21:43.309373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:21:43.309414Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:21:43.309636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:21:43.309680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:21:43.309710Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:21:43.309826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:21:43.309869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:21:43.309918Z node 1 :TX_COLUMNS ... HARD_SCAN DEBUG: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-03-31T16:23:33.383969Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=2052; 2025-03-31T16:23:33.384019Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=1;merger=0;interval_id=2052; 2025-03-31T16:23:33.384065Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-03-31T16:23:33.384183Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-03-31T16:23:33.384227Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=1;finished=1; 2025-03-31T16:23:33.384269Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-31T16:23:33.384869Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:23:33.385059Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:1;records_count:1;schema=100: binary 101: binary 102: binary 103: uint64;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-03-31T16:23:33.385107Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-31T16:23:33.385229Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:232;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;);columns=4;rows=1; 2025-03-31T16:23:33.385305Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:252;stage=data_format;batch_size=26;num_rows=1;batch_columns=100,101,102,103; 2025-03-31T16:23:33.385423Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[54:434:2452];bytes=26;rows=1;faults=0;finished=0;fault=0;schema=100: binary 101: binary 102: binary 103: uint64; 2025-03-31T16:23:33.385559Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-03-31T16:23:33.385693Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-03-31T16:23:33.385807Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-03-31T16:23:33.386118Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:23:33.386244Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-03-31T16:23:33.386376Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-03-31T16:23:33.386421Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: Scan [54:435:2453] finished for tablet 9437184 2025-03-31T16:23:33.386967Z node 54 :TX_COLUMNSHARD_SCAN INFO: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[54:434:2452];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.003},{"events":["l_task_result"],"t":0.015},{"events":["f_ack"],"t":0.016},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.017}],"full":{"a":1743438213368695,"name":"_full_task","f":1743438213368695,"d_finished":0,"c":0,"l":1743438213386475,"d":17780},"events":[{"name":"bootstrap","f":1743438213368920,"d_finished":2957,"c":1,"l":1743438213371877,"d":2957},{"a":1743438213386096,"name":"ack","f":1743438213384841,"d_finished":992,"c":1,"l":1743438213385833,"d":1371},{"a":1743438213386081,"name":"processing","f":1743438213372013,"d_finished":8446,"c":10,"l":1743438213385839,"d":8840},{"name":"ProduceResults","f":1743438213370495,"d_finished":3162,"c":13,"l":1743438213386403,"d":3162},{"a":1743438213386407,"name":"Finish","f":1743438213386407,"d_finished":0,"c":0,"l":1743438213386475,"d":68},{"name":"task_result","f":1743438213372036,"d_finished":7274,"c":9,"l":1743438213384329,"d":7274}],"id":"9437184::2052"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-03-31T16:23:33.387048Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[54:434:2452];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:23:33.387584Z node 54 :TX_COLUMNSHARD_SCAN INFO: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[54:434:2452];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.003},{"events":["l_task_result"],"t":0.015},{"events":["f_ack"],"t":0.016},{"events":["l_ProduceResults","f_Finish"],"t":0.017},{"events":["l_ack","l_processing","l_Finish"],"t":0.018}],"full":{"a":1743438213368695,"name":"_full_task","f":1743438213368695,"d_finished":0,"c":0,"l":1743438213387110,"d":18415},"events":[{"name":"bootstrap","f":1743438213368920,"d_finished":2957,"c":1,"l":1743438213371877,"d":2957},{"a":1743438213386096,"name":"ack","f":1743438213384841,"d_finished":992,"c":1,"l":1743438213385833,"d":2006},{"a":1743438213386081,"name":"processing","f":1743438213372013,"d_finished":8446,"c":10,"l":1743438213385839,"d":9475},{"name":"ProduceResults","f":1743438213370495,"d_finished":3162,"c":13,"l":1743438213386403,"d":3162},{"a":1743438213386407,"name":"Finish","f":1743438213386407,"d_finished":0,"c":0,"l":1743438213387110,"d":703},{"name":"task_result","f":1743438213372036,"d_finished":7274,"c":9,"l":1743438213384329,"d":7274}],"id":"9437184::2052"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-03-31T16:23:33.387659Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:23:33.368085Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=4;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=16001;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=16001;selected_rows=0; 2025-03-31T16:23:33.387701Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:23:33.388041Z node 54 :TX_COLUMNSHARD_SCAN INFO: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;; >> KqpLimits::OutOfSpaceYQLUpsertFail+useSink [GOOD] >> KqpLimits::OutOfSpaceYQLUpsertFail-useSink >> TColumnShardTestSchema::ForgetWithLostAnswer >> TCdcStreamWithInitialScanTests::MeteringDedicated [GOOD] >> TColumnShardTestSchema::HotTiersWithStat ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_cdc_stream/unittest >> TCdcStreamWithInitialScanTests::MeteringDedicated [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:23:00.020032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:23:00.020291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:23:00.020363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:23:00.020414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:23:00.021631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:23:00.021720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:23:00.021809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:23:00.021904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:23:00.023444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:23:00.115422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:23:00.115479Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:23:00.120122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:23:00.120212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:23:00.120353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:23:00.125077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:23:00.125274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:23:00.125870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:23:00.126106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:23:00.131523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:23:00.141121Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:23:00.141210Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:23:00.142333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:23:00.142388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:23:00.142438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:23:00.142574Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:23:00.148963Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:23:00.284120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:23:00.284326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:00.284511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:23:00.284731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:23:00.284781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:00.286822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:23:00.286948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:23:00.287128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:00.287179Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:23:00.287222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:23:00.287261Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:23:00.289208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:00.289258Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:23:00.289291Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:23:00.290906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:00.290956Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:00.291013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:23:00.291067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:23:00.294776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:23:00.296651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:23:00.296852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:23:00.297828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:23:00.297957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:23:00.298012Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:23:00.298338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:23:00.298389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:23:00.298562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:23:00.298631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:23:00.300742Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:23:00.300799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:23:00.300980Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:23:00.301016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:23:00.301218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:00.301280Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:23:00.301377Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:23:00.301407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:23:00.301442Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:23:00.301469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:23:00.301500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:23:00.301550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:23:00.301588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:23:00.301611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:23:00.301677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:23:00.301715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:23:00.301758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:23:00.303631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:23:00.303747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:23:00.303784Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 7686Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 2/3 2025-03-31T16:23:31.077706Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 2/3 2025-03-31T16:23:31.077729Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 2/3 2025-03-31T16:23:31.077746Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 2/3 2025-03-31T16:23:31.077766Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 2/3, is published: false 2025-03-31T16:23:31.078117Z node 19 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2025-03-31T16:23:31.078250Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2025-03-31T16:23:31.078303Z node 19 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 281474976715657 2025-03-31T16:23:31.078367Z node 19 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 281474976715657, pathId: [OwnerId: 72075186233409546, LocalPathId: 3], version: 5 2025-03-31T16:23:31.078433Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 3] was 4 2025-03-31T16:23:31.078883Z node 19 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2025-03-31T16:23:31.078937Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2025-03-31T16:23:31.078959Z node 19 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 281474976715657 2025-03-31T16:23:31.078978Z node 19 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 281474976715657, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 5 2025-03-31T16:23:31.078998Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 6 2025-03-31T16:23:31.079063Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 2/3, is published: true 2025-03-31T16:23:31.084389Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976715657 2025-03-31T16:23:31.084606Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976715657 2025-03-31T16:23:31.098907Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72075186233409546, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 250 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1180 } } 2025-03-31T16:23:31.098985Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409552, partId: 1 2025-03-31T16:23:31.099212Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 250 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1180 } } 2025-03-31T16:23:31.099415Z node 19 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72075186233409546, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 250 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1180 } } 2025-03-31T16:23:31.100916Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72075186233409546, at schemeshard: 72075186233409546, message: Source { RawX1: 752 RawX2: 81604381265 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-03-31T16:23:31.101043Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409552, partId: 1 2025-03-31T16:23:31.101332Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546, message: Source { RawX1: 752 RawX2: 81604381265 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-03-31T16:23:31.101465Z node 19 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvSchemaChanged at tablet: 72075186233409546 2025-03-31T16:23:31.101652Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvSchemaChanged at tablet: 72075186233409546 message: Source { RawX1: 752 RawX2: 81604381265 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-03-31T16:23:31.101770Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715657:1, shardIdx: 72075186233409546:4, datashard: 72075186233409552, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72075186233409546 2025-03-31T16:23:31.101856Z node 19 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2025-03-31T16:23:31.101963Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715657:1, datashard: 72075186233409552, at schemeshard: 72075186233409546 2025-03-31T16:23:31.102049Z node 19 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:1 129 -> 240 2025-03-31T16:23:31.106143Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2025-03-31T16:23:31.107787Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2025-03-31T16:23:31.108352Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2025-03-31T16:23:31.108425Z node 19 :FLAT_TX_SCHEMESHARD INFO: [72075186233409546] TDone opId# 281474976715657:1 ProgressState 2025-03-31T16:23:31.108692Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:1 progress is 3/3 2025-03-31T16:23:31.108773Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2025-03-31T16:23:31.108854Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:1 progress is 3/3 2025-03-31T16:23:31.108919Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2025-03-31T16:23:31.108993Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 3/3, is published: true 2025-03-31T16:23:31.109096Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2025-03-31T16:23:31.109170Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-03-31T16:23:31.109230Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:0 2025-03-31T16:23:31.109342Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 3] was 3 2025-03-31T16:23:31.109410Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:1 2025-03-31T16:23:31.109434Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:1 2025-03-31T16:23:31.109526Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 5 2025-03-31T16:23:31.109565Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:2 2025-03-31T16:23:31.109589Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:2 2025-03-31T16:23:31.109621Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 4 2025-03-31T16:23:34.056659Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 4 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2025-03-31T16:23:34.057126Z node 19 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe pathId 4 took 459us result status StatusNameConflict 2025-03-31T16:23:34.057382Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/Shared/Table/Stream/streamImpl\', error: path is not a common path (id: [OwnerId: 72075186233409546, LocalPathId: 4], type: EPathTypePersQueueGroup, state: EPathStateNoChanges)" Path: "/MyRoot/Shared/Table/Stream/streamImpl" PathId: 4 LastExistedPrefixPath: "/MyRoot/Shared/Table/Stream/streamImpl" LastExistedPrefixPathId: 4 LastExistedPrefixDescription { Self { Name: "streamImpl" PathId: 4 SchemeshardId: 72075186233409546 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 106 CreateStep: 200 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409554 } } PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 2025-03-31T16:23:36.694857Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 4 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2025-03-31T16:23:36.695277Z node 19 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe pathId 4 took 459us result status StatusNameConflict 2025-03-31T16:23:36.695488Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/Shared/Table/Stream/streamImpl\', error: path is not a common path (id: [OwnerId: 72075186233409546, LocalPathId: 4], type: EPathTypePersQueueGroup, state: EPathStateNoChanges)" Path: "/MyRoot/Shared/Table/Stream/streamImpl" PathId: 4 LastExistedPrefixPath: "/MyRoot/Shared/Table/Stream/streamImpl" LastExistedPrefixPathId: 4 LastExistedPrefixDescription { Self { Name: "streamImpl" PathId: 4 SchemeshardId: 72075186233409546 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 106 CreateStep: 200 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409554 } } PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 >> TColumnShardTestSchema::RebootDrop [GOOD] >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation-UseSink [GOOD] >> DataShardVolatile::DistributedWriteEarlierSnapshotNotBlocked ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootDrop [GOOD] Test command err: 2025-03-31T16:23:21.083586Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:23:21.186115Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:23:21.208626Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:23:21.208971Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:23:21.218416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:23:21.218656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:23:21.218904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:23:21.219056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:23:21.219189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:23:21.219316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:23:21.219429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:23:21.219580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:23:21.219699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:23:21.219815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:23:21.219945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:23:21.220064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:23:21.254450Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:23:21.254686Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:23:21.254748Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:23:21.254917Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:21.255067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:23:21.255155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:23:21.255243Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:23:21.255352Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:23:21.255425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:23:21.255468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:23:21.255500Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:23:21.255666Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:21.255734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:23:21.255777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:23:21.255809Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:23:21.255928Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:23:21.255990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:23:21.256044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:23:21.256103Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:23:21.256179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:23:21.256215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:23:21.256244Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:23:21.256288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:23:21.256328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:23:21.256371Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:23:21.256739Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=42; 2025-03-31T16:23:21.256820Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=35; 2025-03-31T16:23:21.256883Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=29; 2025-03-31T16:23:21.256994Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=68; 2025-03-31T16:23:21.257186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:23:21.257284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:23:21.257321Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:23:21.257548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:23:21.257594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:23:21.257632Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:23:21.257805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:23:21.257858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:23:21.257894Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:23:21.258105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:23:21.258159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:23:21.258191Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:23:21.258339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:23:21.258386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:23:21.258433Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:953:2954];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-31T16:23:37.569064Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-31T16:23:37.569101Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-31T16:23:37.569155Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:953:2954];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:23:37.569216Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:953:2954];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:23:37.569284Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:953:2954];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=1; 2025-03-31T16:23:37.569343Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:953:2954];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700004;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-31T16:23:37.569386Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:953:2954];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:23:37.569434Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:953:2954];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:23:37.569478Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:953:2954];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:23:37.569571Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:953:2954];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.999000s; 2025-03-31T16:23:37.569619Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:953:2954];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:23:37.570033Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:953:2954];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=7;path_id=1; 2025-03-31T16:23:37.570680Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:953:2954];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; 2025-03-31T16:23:37.747121Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000003:max} readable: {1000000004:max} at tablet 9437184 2025-03-31T16:23:37.747343Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-03-31T16:23:37.749370Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:953:2954];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000003:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 9 } } } ; 2025-03-31T16:23:37.749455Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:953:2954];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000003:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 9 } } } ; 2025-03-31T16:23:37.749993Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:953:2954];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000003:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"saved_at","id":9}]},"o":"9","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"9","p":{"address":{"name":"saved_at","id":9}},"o":"9","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"9","t":"Projection"},"w":7,"id":0}}}; 2025-03-31T16:23:37.750118Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:953:2954];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000003:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-03-31T16:23:37.751106Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:953:2954];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000003:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:1013:3006];trace_detailed=; 2025-03-31T16:23:37.752292Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:84;ff_first=(column_ids=9;column_names=saved_at;);; 2025-03-31T16:23:37.752555Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; 2025-03-31T16:23:37.753077Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1013:3006];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:23:37.753209Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1013:3006];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:23:37.753313Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1013:3006];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:23:37.753356Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1013:3006] finished for tablet 9437184 2025-03-31T16:23:37.753721Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1013:3006];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:1006:3000];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults"],"t":0.001},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":1743438217751032,"name":"_full_task","f":1743438217751032,"d_finished":0,"c":0,"l":1743438217753402,"d":2370},"events":[{"name":"bootstrap","f":1743438217751265,"d_finished":1438,"c":1,"l":1743438217752703,"d":1438},{"a":1743438217753036,"name":"ack","f":1743438217753036,"d_finished":0,"c":0,"l":1743438217753402,"d":366},{"a":1743438217753020,"name":"processing","f":1743438217753020,"d_finished":0,"c":0,"l":1743438217753402,"d":382},{"name":"ProduceResults","f":1743438217752685,"d_finished":248,"c":2,"l":1743438217753338,"d":248},{"a":1743438217753340,"name":"Finish","f":1743438217753340,"d_finished":0,"c":0,"l":1743438217753402,"d":62}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:23:37.753779Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1013:3006];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:1006:3000];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:23:37.754064Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1013:3006];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:1006:3000];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults"],"t":0.001},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":1743438217751032,"name":"_full_task","f":1743438217751032,"d_finished":0,"c":0,"l":1743438217753814,"d":2782},"events":[{"name":"bootstrap","f":1743438217751265,"d_finished":1438,"c":1,"l":1743438217752703,"d":1438},{"a":1743438217753036,"name":"ack","f":1743438217753036,"d_finished":0,"c":0,"l":1743438217753814,"d":778},{"a":1743438217753020,"name":"processing","f":1743438217753020,"d_finished":0,"c":0,"l":1743438217753814,"d":794},{"name":"ProduceResults","f":1743438217752685,"d_finished":248,"c":2,"l":1743438217753338,"d":248},{"a":1743438217753340,"name":"Finish","f":1743438217753340,"d_finished":0,"c":0,"l":1743438217753814,"d":474}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:23:37.754118Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1013:3006];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:23:37.750062Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-03-31T16:23:37.754159Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1013:3006];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:23:37.754238Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1013:3006];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; >> TestYmqHttpProxy::BillingRecordsForJsonApi [GOOD] >> TColumnShardTestSchema::OneColdTier >> TestYmqHttpProxy::TestChangeMessageVisibility >> TColumnShardTestSchema::RebootInternalTTL >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumerWithFlag [GOOD] >> TestKinesisHttpProxy::BadRequestUnknownMethod >> TColumnShardTestSchema::InternalTTL [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::InternalTTL [GOOD] Test command err: 2025-03-31T16:23:17.269446Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:23:17.455275Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:23:17.468615Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:23:17.471306Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:23:17.497493Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:23:17.498428Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:23:17.511136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:23:17.511401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:23:17.511671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:23:17.511806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:23:17.511921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:23:17.512043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:23:17.512185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:23:17.512316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:23:17.512452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:23:17.512582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:23:17.512696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:23:17.512824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:23:17.541082Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:23:17.545982Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:23:17.546156Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:23:17.546217Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:23:17.546470Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:17.548733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:23:17.548860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:23:17.548918Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:23:17.549079Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:23:17.549188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:23:17.549241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:23:17.549279Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:23:17.549496Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:17.549592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:23:17.549656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:23:17.549695Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:23:17.549792Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:23:17.549866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:23:17.549922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:23:17.549953Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:23:17.550033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:23:17.550072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:23:17.550104Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:23:17.550157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:23:17.550196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:23:17.550225Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:23:17.550650Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=52; 2025-03-31T16:23:17.550759Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=41; 2025-03-31T16:23:17.551541Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=724; 2025-03-31T16:23:17.551722Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=95; 2025-03-31T16:23:17.551942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:23:17.552022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:23:17.552063Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:23:17.552323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:23:17.552373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:23:17.552411Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:23:17.552575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:23:17.552628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:23:17.552679Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:23:17.552889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:23:17.552953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:23:17.552985Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T ... tamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:23:42.738826Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:1;records_count:71;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:23:42.738847Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-31T16:23:42.738870Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-31T16:23:42.738922Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:23:42.738979Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:1;records_count:71;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:23:42.739001Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-31T16:23:42.739049Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:232;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;);columns=1;rows=71; 2025-03-31T16:23:42.739099Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:252;stage=data_format;batch_size=568;num_rows=71;batch_columns=saved_at; 2025-03-31T16:23:42.739225Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:684:2700];bytes=568;rows=71;faults=0;finished=0;fault=0;schema=saved_at: timestamp[us]; 2025-03-31T16:23:42.739314Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:23:42.739396Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:23:42.739513Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:23:42.739607Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:23:42.739660Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:23:42.739726Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:23:42.739760Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:691:2707] finished for tablet 9437184 2025-03-31T16:23:42.740244Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:684:2700];stats={"p":[{"events":["f_bootstrap"],"t":0.043},{"events":["f_ProduceResults"],"t":0.421},{"events":["l_bootstrap"],"t":0.684},{"events":["f_processing","f_task_result"],"t":0.706},{"events":["l_task_result"],"t":6.773},{"events":["f_ack"],"t":6.8},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":7.663}],"full":{"a":1743438215076374,"name":"_full_task","f":1743438215076374,"d_finished":0,"c":0,"l":1743438222739824,"d":7663450},"events":[{"name":"bootstrap","f":1743438215119603,"d_finished":641208,"c":1,"l":1743438215760811,"d":641208},{"a":1743438222739597,"name":"ack","f":1743438221876846,"d_finished":800258,"c":903,"l":1743438222739542,"d":800485},{"a":1743438222739589,"name":"processing","f":1743438215782721,"d_finished":3373107,"c":4515,"l":1743438222739544,"d":3373342},{"name":"ProduceResults","f":1743438215497378,"d_finished":1448605,"c":5420,"l":1743438222739743,"d":1448605},{"a":1743438222739745,"name":"Finish","f":1743438222739745,"d_finished":0,"c":0,"l":1743438222739824,"d":79},{"name":"task_result","f":1743438215782780,"d_finished":2491011,"c":3612,"l":1743438221850248,"d":2491011}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:23:42.740309Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:684:2700];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:23:42.740761Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:684:2700];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0.043},{"events":["f_ProduceResults"],"t":0.421},{"events":["l_bootstrap"],"t":0.684},{"events":["f_processing","f_task_result"],"t":0.706},{"events":["l_task_result"],"t":6.773},{"events":["f_ack"],"t":6.8},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":7.663}],"full":{"a":1743438215076374,"name":"_full_task","f":1743438215076374,"d_finished":0,"c":0,"l":1743438222740355,"d":7663981},"events":[{"name":"bootstrap","f":1743438215119603,"d_finished":641208,"c":1,"l":1743438215760811,"d":641208},{"a":1743438222739597,"name":"ack","f":1743438221876846,"d_finished":800258,"c":903,"l":1743438222739542,"d":801016},{"a":1743438222739589,"name":"processing","f":1743438215782721,"d_finished":3373107,"c":4515,"l":1743438222739544,"d":3373873},{"name":"ProduceResults","f":1743438215497378,"d_finished":1448605,"c":5420,"l":1743438222739743,"d":1448605},{"a":1743438222739745,"name":"Finish","f":1743438222739745,"d_finished":0,"c":0,"l":1743438222740355,"d":610},{"name":"task_result","f":1743438215782780,"d_finished":2491011,"c":3612,"l":1743438221850248,"d":2491011}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:23:42.740851Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:23:35.026498Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=903;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=7037528;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7037528;selected_rows=0; 2025-03-31T16:23:42.740907Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:23:42.741170Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> TestYmqHttpProxy::TestDeleteQueue [GOOD] >> TestYmqHttpProxy::TestListDeadLetterSourceQueues >> DataShardVolatile::DistributedWriteEarlierSnapshotNotBlocked [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit+UseSink |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TestYmqHttpProxy::TestChangeMessageVisibility [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp [GOOD] >> TColumnShardTestSchema::RebootHotTiersAfterTtl >> KqpLimits::TooBigQuery+useSink [GOOD] >> KqpLimits::TooBigQuery-useSink >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch >> TColumnShardTestSchema::RebootExternalTTL [GOOD] >> TestKinesisHttpProxy::BadRequestUnknownMethod [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp [GOOD] Test command err: 2025-03-31T16:21:29.122691Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:21:29.208799Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:21:29.226627Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:21:29.226936Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:21:29.234430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:21:29.234677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:21:29.234977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:21:29.235127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:21:29.235231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:21:29.235330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:21:29.235416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:21:29.235530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:21:29.235656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:21:29.235762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:21:29.235909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:21:29.236040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:21:29.256539Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:21:29.256703Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:21:29.256745Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:21:29.256913Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:29.257045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:21:29.257106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:21:29.257142Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:21:29.257239Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:21:29.257298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:21:29.257329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:21:29.257350Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:21:29.257535Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:29.257601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:21:29.257634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:21:29.257655Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:21:29.257749Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:21:29.257797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:21:29.257842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:21:29.257863Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:21:29.257913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:21:29.257940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:21:29.257966Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:21:29.258023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:21:29.258050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:21:29.258074Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:21:29.258423Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=38; 2025-03-31T16:21:29.258512Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=40; 2025-03-31T16:21:29.258582Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=29; 2025-03-31T16:21:29.258649Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=30; 2025-03-31T16:21:29.258770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:21:29.258810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:21:29.258839Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:21:29.258995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:21:29.259042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:21:29.259063Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:21:29.259210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:21:29.259245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:21:29.259269Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:21:29.259458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:21:29.259496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:21:29.259530Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:21:29.259632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:21:29.259671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:21:29.259715Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:8464];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:9040];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:9024];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:9456];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:9448];;;;switched=(portion_id:44;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););(portion_id:48;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2586528;index_size:28;meta:((produced=SPLIT_COMPACTED;)););(portion_id:49;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););; 2025-03-31T16:23:47.421323Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:5927:7919];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=3; 2025-03-31T16:23:47.422782Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5927:7919];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootExternalTTL [GOOD] Test command err: 2025-03-31T16:23:30.873986Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:23:30.973241Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:23:30.978122Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:23:30.978582Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:23:31.008603Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:23:31.008954Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:23:31.018694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:23:31.018970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:23:31.020762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:23:31.020976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:23:31.021125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:23:31.021278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:23:31.021424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:23:31.021618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:23:31.021783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:23:31.021917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:23:31.022078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:23:31.022212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:23:31.053574Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:23:31.059865Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:23:31.060054Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:23:31.060127Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:23:31.060348Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:31.060560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:23:31.060703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:23:31.060758Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:23:31.060896Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:23:31.060990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:23:31.061064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:23:31.061110Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:23:31.061352Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:31.061440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:23:31.061501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:23:31.061554Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:23:31.061681Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:23:31.061754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:23:31.061823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:23:31.061865Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:23:31.061958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:23:31.062004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:23:31.062057Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:23:31.062128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:23:31.062175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:23:31.062219Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:23:31.062667Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=55; 2025-03-31T16:23:31.062776Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=43; 2025-03-31T16:23:31.062909Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=56; 2025-03-31T16:23:31.063031Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=51; 2025-03-31T16:23:31.063270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:23:31.063357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:23:31.063409Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:23:31.063710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:23:31.063770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:23:31.063809Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:23:31.064010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:23:31.064068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:23:31.064104Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:23:31.064313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:23:31.064360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:23:31.064393Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T1 ... =362872;rows=45359;faults=0;finished=0;fault=0;schema=saved_at: timestamp[us]; 2025-03-31T16:23:48.504826Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:1;records_count:34641;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:23:48.504960Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:1;records_count:34641;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:23:48.505000Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-31T16:23:48.505047Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-31T16:23:48.505223Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:23:48.505376Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:1;records_count:34641;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:23:48.505423Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-31T16:23:48.505546Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:232;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;);columns=1;rows=34641; 2025-03-31T16:23:48.505613Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:252;stage=data_format;batch_size=277128;num_rows=34641;batch_columns=saved_at; 2025-03-31T16:23:48.505775Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:1031:3024];bytes=277128;rows=34641;faults=0;finished=0;fault=0;schema=saved_at: timestamp[us]; 2025-03-31T16:23:48.505913Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:23:48.506020Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:23:48.506136Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:23:48.506314Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:23:48.506423Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:23:48.506522Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:23:48.506575Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1036:3029] finished for tablet 9437184 2025-03-31T16:23:48.507147Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:1031:3024];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["l_task_result"],"t":0.558},{"events":["f_ack"],"t":0.559},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.561}],"full":{"a":1743438227944892,"name":"_full_task","f":1743438227944892,"d_finished":0,"c":0,"l":1743438228506650,"d":561758},"events":[{"name":"bootstrap","f":1743438227945364,"d_finished":3061,"c":1,"l":1743438227948425,"d":3061},{"a":1743438228506290,"name":"ack","f":1743438228504054,"d_finished":2000,"c":2,"l":1743438228506173,"d":2360},{"a":1743438228506273,"name":"processing","f":1743438227950854,"d_finished":130499,"c":18,"l":1743438228506177,"d":130876},{"name":"ProduceResults","f":1743438227946755,"d_finished":5121,"c":22,"l":1743438228506552,"d":5121},{"a":1743438228506557,"name":"Finish","f":1743438228506557,"d_finished":0,"c":0,"l":1743438228506650,"d":93},{"name":"task_result","f":1743438227950888,"d_finished":127978,"c":16,"l":1743438228503795,"d":127978}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:23:48.507262Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:1031:3024];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:23:48.507737Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:1031:3024];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["l_task_result"],"t":0.558},{"events":["f_ack"],"t":0.559},{"events":["l_ProduceResults","f_Finish"],"t":0.561},{"events":["l_ack","l_processing","l_Finish"],"t":0.562}],"full":{"a":1743438227944892,"name":"_full_task","f":1743438227944892,"d_finished":0,"c":0,"l":1743438228507310,"d":562418},"events":[{"name":"bootstrap","f":1743438227945364,"d_finished":3061,"c":1,"l":1743438227948425,"d":3061},{"a":1743438228506290,"name":"ack","f":1743438228504054,"d_finished":2000,"c":2,"l":1743438228506173,"d":3020},{"a":1743438228506273,"name":"processing","f":1743438227950854,"d_finished":130499,"c":18,"l":1743438228506177,"d":131536},{"name":"ProduceResults","f":1743438227946755,"d_finished":5121,"c":22,"l":1743438228506552,"d":5121},{"a":1743438228506557,"name":"Finish","f":1743438228506557,"d_finished":0,"c":0,"l":1743438228507310,"d":753},{"name":"task_result","f":1743438227950888,"d_finished":127978,"c":16,"l":1743438228503795,"d":127978}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:23:48.507835Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:23:47.943984Z;index_granules=0;index_portions=2;index_batches=1720;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=5265968;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=5265968;selected_rows=0; 2025-03-31T16:23:48.507883Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:23:48.508189Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; >> HttpProxyInsideYdb::TestIfEnvVariableSet [GOOD] >> TColumnShardTestSchema::RebootForgetWithLostAnswer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> HttpProxyInsideYdb::TestIfEnvVariableSet [GOOD] Test command err: 2025-03-31T16:23:00.845136Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009965734389819:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:23:00.845845Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f96/r3tmp/tmprNg6dM/pdisk_1.dat 2025-03-31T16:23:01.232231Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:23:01.262395Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:23:01.263248Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:23:01.273375Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3073, node 1 2025-03-31T16:23:01.419489Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:23:01.419552Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:23:01.419568Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:23:01.419731Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27731 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:23:01.908164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:27731 waiting... 2025-03-31T16:23:02.131349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:23:02.136435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:02.151470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:02.296066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:23:02.339308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:23:02.382651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:02.418550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:02.453537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:02.491179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:02.519508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:02.551268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:23:02.583821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:23:03.710792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009978619293142:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:23:03.710799Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009978619293154:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:23:03.710900Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:23:03.717801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-03-31T16:23:03.728180Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009978619293156:2378], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-03-31T16:23:03.829062Z node 1 :TX_PROXY ERROR: Actor# [1:7488009978619293207:2913] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:23:04.568057Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jqpg6z8r719r3rasp59aczvr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2I1MTBkOGEtZGZjYTIxY2ItNmE1YWFmODMtZWFjNTQyNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:23:04.642076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:04.672023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:04.699599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:04.728501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:04.756455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:04.781688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:04.810725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:04.844790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:04.876799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:04.917167Z node 1 :HTTP INFO: Listening on http://127.0.0.1:61254 2025-03-31T16:23:05.843967Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488009965734389819:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:23:05.844053Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:23:05.919066Z node 1 :SQS INFO: Start SQS proxy service actor 2025-03-31T16:23:05.920120Z node 1 :SQS INFO: Start SQS service actor 2025-03-31T16:23:05.920257Z node 1 :SQS DEBUG: SQS service config: { EnableSqs: true YandexCloudMode: true EnableDeadLetterQueues: true } 2025-03-31T16:23:05.922351Z node 1 :SQS DEBUG: Enable scheme board scheme cache 2025-03-31T16:23:05.941704Z node 1 :SQS INFO: Request SQS users list 2025-03-31T16:23:05.941726Z node 1 :SQS NOTICE: [Node tracker] schedule describe tables after 0.000000s 2025-03-31T16:23:05.941746Z node 1 :SQS DEBUG: Request SQS queues list 2025-03-31T16:23:05.941807Z node 1 :SQS DEBUG: [Node tracker] bootstrap on node=1 2025-03-31T16:23:05.948512Z node 1 :SQS DEBUG: Request [] Starting executor actor for query(idx=GET_USER_SETTINGS_ID). Mode: COMPILE 2025-03-31T16:23:05.948536Z node 1 :SQS DEBUG: Request [] Starting executor actor for query(idx=GET_QUEUES_LIST_ID). Mode: COMPILE 2025-03-31T16:23:05.949660Z node 1 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Compile program: ( (let fromUser (Parameter 'FROM_USER (DataType 'Utf8String))) (let fromQueue (Parameter 'FROM_QUEUE (DataType 'Utf8String))) (let batchSize (Parameter 'BATCH_SIZE (DataType 'Uint64))) (let queuesTable '/Root/SQS/.Queues) (let range '( '('Account fromUser (Void)) '('QueueName fromQueue (Void)))) (let queuesSelect '( 'Account 'Qu ... em { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-03-31T16:23:48.128391Z node 7 :SQS DEBUG: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Attempt 1 execution duration: 17ms 2025-03-31T16:23:48.128663Z node 7 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-03-31T16:23:48.128706Z node 7 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2025-03-31T16:23:48.128802Z node 7 :SQS DEBUG: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 18ms 2025-03-31T16:23:48.129145Z node 7 :SQS TRACE: Handle user settings: { Status: 48 TxId: 281474976715685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-03-31T16:23:48.134401Z node 7 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-03-31T16:23:48.134441Z node 7 :SQS DEBUG: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 11ms 2025-03-31T16:23:48.134866Z node 7 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-03-31T16:23:48.134900Z node 7 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2025-03-31T16:23:48.135004Z node 7 :SQS DEBUG: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 12ms 2025-03-31T16:23:48.135530Z node 7 :SQS TRACE: Handle queues list: { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-03-31T16:23:48.303070Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7488010172802086666:2439]: Pool not found 2025-03-31T16:23:48.303326Z node 7 :SQS DEBUG: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2025-03-31T16:23:48.614594Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7488010172802086700:2443]: Pool not found 2025-03-31T16:23:48.614760Z node 7 :SQS DEBUG: [cleanup removed queues] getting queues... 2025-03-31T16:23:48.617537Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7488010172802086819:2462], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-03-31T16:23:48.617537Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488010172802086818:2461], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:23:48.617613Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:23:48.872015Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7488010172802086816:2460]: Pool not found 2025-03-31T16:23:48.872383Z node 7 :SQS DEBUG: [cleanup removed queues] there are no queues to delete 2025-03-31T16:23:49.089033Z node 7 :HTTP DEBUG: (#37,[::1]:59854) incoming connection opened 2025-03-31T16:23:49.089161Z node 7 :HTTP DEBUG: (#37,[::1]:59854) -> (POST /Root) 2025-03-31T16:23:49.089306Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [78d1:de00:6050:0:60d1:de00:6050:0] request [UnknownMethodName] url [/Root] database [/Root] requestId: 35d1c55d-9b32b76b-b19f3e6-8da4d1d7 2025-03-31T16:23:49.089649Z node 7 :HTTP_PROXY INFO: http request [UnknownMethodName] requestId [35d1c55d-9b32b76b-b19f3e6-8da4d1d7] reply with status: UNSUPPORTED message: Missing method name UnknownMethodName 2025-03-31T16:23:49.089830Z node 7 :HTTP DEBUG: (#37,[::1]:59854) <- (400 InvalidAction) 2025-03-31T16:23:49.089896Z node 7 :HTTP DEBUG: (#37,[::1]:59854) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.UnknownMethodName X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 3 { } 0 2025-03-31T16:23:49.089937Z node 7 :HTTP DEBUG: (#37,[::1]:59854) Response: HTTP/1.1 400 InvalidAction Connection: close x-amzn-requestid: 35d1c55d-9b32b76b-b19f3e6-8da4d1d7 x-amz-crc32: 139748724 Content-Type: application/x-amz-json-1.1 Content-Length: 76 {"__type":"InvalidAction","message":"Missing method name UnknownMethodName"} 2025-03-31T16:23:49.090068Z node 7 :HTTP DEBUG: (#37,[::1]:59854) connection closed Http output full {"__type":"InvalidAction","message":"Missing method name UnknownMethodName"} 400 {"__type":"InvalidAction","message":"Missing method name UnknownMethodName"} >> TColumnShardTestSchema::OneTierExternalTtl [GOOD] >> TColumnShardTestSchema::EnableColdTiersAfterNoEviction >> TestYmqHttpProxy::TestListDeadLetterSourceQueues [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::OneTierExternalTtl [GOOD] Test command err: 2025-03-31T16:23:24.033540Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:23:24.124770Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:23:24.129473Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:23:24.129860Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:23:24.149514Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:23:24.149875Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:23:24.157537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:23:24.157790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:23:24.158056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:23:24.158179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:23:24.158295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:23:24.158425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:23:24.158558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:23:24.158707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:23:24.158825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:23:24.158924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:23:24.159062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:23:24.159224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:23:24.179767Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:23:24.188463Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:23:24.188637Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:23:24.188693Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:23:24.188896Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:24.189088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:23:24.189214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:23:24.189261Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:23:24.189367Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:23:24.189441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:23:24.189517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:23:24.189555Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:23:24.189800Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:24.189891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:23:24.189945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:23:24.189986Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:23:24.190062Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:23:24.190101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:23:24.190129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:23:24.190147Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:23:24.190193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:23:24.190218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:23:24.190236Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:23:24.190270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:23:24.190296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:23:24.190313Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:23:24.190662Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=37; 2025-03-31T16:23:24.190727Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=27; 2025-03-31T16:23:24.190815Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=47; 2025-03-31T16:23:24.190904Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=41; 2025-03-31T16:23:24.191221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:23:24.191302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:23:24.191340Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:23:24.191595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:23:24.191651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:23:24.191715Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:23:24.191888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:23:24.191938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:23:24.191971Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:23:24.192173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:23:24.192220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:23:24.192267Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T1 ... umn_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:23:50.731026Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:1;records_count:73;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:23:50.731060Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-31T16:23:50.731113Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-31T16:23:50.731221Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:23:50.731317Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:1;records_count:73;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:23:50.731352Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-31T16:23:50.731431Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:232;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=73; 2025-03-31T16:23:50.731469Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:252;stage=data_format;batch_size=584;num_rows=73;batch_columns=timestamp; 2025-03-31T16:23:50.731594Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:673:2689];bytes=584;rows=73;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; 2025-03-31T16:23:50.731674Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:23:50.731787Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:23:50.731940Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:23:50.732055Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:23:50.732139Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:23:50.732222Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:23:50.732256Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:680:2696] finished for tablet 9437184 2025-03-31T16:23:50.732796Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:673:2689];stats={"p":[{"events":["f_bootstrap"],"t":0.061},{"events":["f_ProduceResults"],"t":0.464},{"events":["l_bootstrap"],"t":0.713},{"events":["f_processing","f_task_result"],"t":0.733},{"events":["l_task_result"],"t":7.229},{"events":["f_ack"],"t":7.262},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":7.996}],"full":{"a":1743438222736107,"name":"_full_task","f":1743438222736107,"d_finished":0,"c":0,"l":1743438230732322,"d":7996215},"events":[{"name":"bootstrap","f":1743438222797195,"d_finished":651975,"c":1,"l":1743438223449170,"d":651975},{"a":1743438230732038,"name":"ack","f":1743438229998451,"d_finished":679650,"c":904,"l":1743438230731971,"d":679934},{"a":1743438230732025,"name":"processing","f":1743438223469516,"d_finished":3391535,"c":4520,"l":1743438230731973,"d":3391832},{"name":"ProduceResults","f":1743438223200417,"d_finished":1358688,"c":5426,"l":1743438230732241,"d":1358688},{"a":1743438230732244,"name":"Finish","f":1743438230732244,"d_finished":0,"c":0,"l":1743438230732322,"d":78},{"name":"task_result","f":1743438223469548,"d_finished":2630470,"c":3616,"l":1743438229965944,"d":2630470}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:23:50.732890Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:673:2689];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:23:50.733391Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:673:2689];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0.061},{"events":["f_ProduceResults"],"t":0.464},{"events":["l_bootstrap"],"t":0.713},{"events":["f_processing","f_task_result"],"t":0.733},{"events":["l_task_result"],"t":7.229},{"events":["f_ack"],"t":7.262},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":7.996}],"full":{"a":1743438222736107,"name":"_full_task","f":1743438222736107,"d_finished":0,"c":0,"l":1743438230732946,"d":7996839},"events":[{"name":"bootstrap","f":1743438222797195,"d_finished":651975,"c":1,"l":1743438223449170,"d":651975},{"a":1743438230732038,"name":"ack","f":1743438229998451,"d_finished":679650,"c":904,"l":1743438230731971,"d":680558},{"a":1743438230732025,"name":"processing","f":1743438223469516,"d_finished":3391535,"c":4520,"l":1743438230731973,"d":3392456},{"name":"ProduceResults","f":1743438223200417,"d_finished":1358688,"c":5426,"l":1743438230732241,"d":1358688},{"a":1743438230732244,"name":"Finish","f":1743438230732244,"d_finished":0,"c":0,"l":1743438230732946,"d":702},{"name":"task_result","f":1743438223469548,"d_finished":2630470,"c":3616,"l":1743438229965944,"d":2630470}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:23:50.733491Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:23:42.676543Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=904;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=7049848;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7049848;selected_rows=0; 2025-03-31T16:23:50.733547Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:23:50.733792Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> TColumnShardTestSchema::RebootHotTiersWithStat >> TestYmqHttpProxy::TestListQueueTags >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit+UseSink [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit-UseSink >> TColumnShardTestSchema::RebootOneTierExternalTtl [GOOD] |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> KqpPg::TableDeleteAllData-useSink [GOOD] >> KqpPg::PgUpdateCompoundKey+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootOneTierExternalTtl [GOOD] Test command err: 2025-03-31T16:23:34.912117Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:23:35.007027Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:23:35.012112Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:23:35.012578Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:23:35.041395Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:23:35.041760Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:23:35.051450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:23:35.051701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:23:35.051993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:23:35.052166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:23:35.052289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:23:35.052410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:23:35.052559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:23:35.052706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:23:35.052858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:23:35.052989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:23:35.053144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:23:35.053281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:23:35.081921Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:23:35.086030Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:23:35.086209Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:23:35.086313Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:23:35.086514Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:35.086669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:23:35.086731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:23:35.086770Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:23:35.086840Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:23:35.086885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:23:35.086918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:23:35.086944Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:23:35.087138Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:35.087201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:23:35.087246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:23:35.087275Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:23:35.087346Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:23:35.087387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:23:35.087418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:23:35.087437Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:23:35.087490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:23:35.087516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:23:35.087536Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:23:35.087569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:23:35.087595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:23:35.087641Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:23:35.087976Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=44; 2025-03-31T16:23:35.088076Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=40; 2025-03-31T16:23:35.088152Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=35; 2025-03-31T16:23:35.088236Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=37; 2025-03-31T16:23:35.088400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:23:35.088455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:23:35.088481Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:23:35.088668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:23:35.088705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:23:35.088724Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:23:35.088835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:23:35.088866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:23:35.088890Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:23:35.089029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:23:35.089088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:23:35.089138Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T1 ... 1033:3026];bytes=350080;rows=43760;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; 2025-03-31T16:23:53.781023Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1038:3031];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:1;records_count:36240;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:23:53.781182Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1038:3031];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:1;records_count:36240;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:23:53.781229Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1038:3031];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-31T16:23:53.781278Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1038:3031];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-31T16:23:53.781459Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1038:3031];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:23:53.781617Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1038:3031];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:1;records_count:36240;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:23:53.781671Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1038:3031];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-31T16:23:53.781803Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1038:3031];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:232;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=36240; 2025-03-31T16:23:53.781870Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1038:3031];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:252;stage=data_format;batch_size=289920;num_rows=36240;batch_columns=timestamp; 2025-03-31T16:23:53.782058Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1038:3031];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:1033:3026];bytes=289920;rows=36240;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; 2025-03-31T16:23:53.782203Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1038:3031];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:23:53.782325Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1038:3031];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:23:53.782444Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1038:3031];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:23:53.782585Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1038:3031];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:23:53.782668Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1038:3031];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:23:53.782747Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1038:3031];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:23:53.782785Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1038:3031] finished for tablet 9437184 2025-03-31T16:23:53.783346Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1038:3031];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:1033:3026];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.005},{"events":["f_processing","f_task_result"],"t":0.008},{"events":["f_ack","l_task_result"],"t":0.788},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.791}],"full":{"a":1743438232991325,"name":"_full_task","f":1743438232991325,"d_finished":0,"c":0,"l":1743438233782851,"d":791526},"events":[{"name":"bootstrap","f":1743438232991917,"d_finished":4562,"c":1,"l":1743438232996479,"d":4562},{"a":1743438233782565,"name":"ack","f":1743438233780131,"d_finished":2225,"c":2,"l":1743438233782474,"d":2511},{"a":1743438233782550,"name":"processing","f":1743438232999356,"d_finished":243324,"c":16,"l":1743438233782477,"d":243625},{"name":"ProduceResults","f":1743438232994021,"d_finished":5422,"c":20,"l":1743438233782770,"d":5422},{"a":1743438233782773,"name":"Finish","f":1743438233782773,"d_finished":0,"c":0,"l":1743438233782851,"d":78},{"name":"task_result","f":1743438232999387,"d_finished":240599,"c":14,"l":1743438233779865,"d":240599}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:23:53.783440Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1038:3031];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:1033:3026];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:23:53.783969Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1038:3031];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:1033:3026];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.005},{"events":["f_processing","f_task_result"],"t":0.008},{"events":["f_ack","l_task_result"],"t":0.788},{"events":["l_ProduceResults","f_Finish"],"t":0.791},{"events":["l_ack","l_processing","l_Finish"],"t":0.792}],"full":{"a":1743438232991325,"name":"_full_task","f":1743438232991325,"d_finished":0,"c":0,"l":1743438233783495,"d":792170},"events":[{"name":"bootstrap","f":1743438232991917,"d_finished":4562,"c":1,"l":1743438232996479,"d":4562},{"a":1743438233782565,"name":"ack","f":1743438233780131,"d_finished":2225,"c":2,"l":1743438233782474,"d":3155},{"a":1743438233782550,"name":"processing","f":1743438232999356,"d_finished":243324,"c":16,"l":1743438233782477,"d":244269},{"name":"ProduceResults","f":1743438232994021,"d_finished":5422,"c":20,"l":1743438233782770,"d":5422},{"a":1743438233782773,"name":"Finish","f":1743438233782773,"d_finished":0,"c":0,"l":1743438233783495,"d":722},{"name":"task_result","f":1743438232999387,"d_finished":240599,"c":14,"l":1743438233779865,"d":240599}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:23:53.784064Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1038:3031];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:23:52.990039Z;index_granules=0;index_portions=2;index_batches=1721;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=5175704;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=5175704;selected_rows=0; 2025-03-31T16:23:53.784115Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1038:3031];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:23:53.784406Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1038:3031];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch [GOOD] >> TColumnShardTestSchema::ExternalTTL >> TColumnShardTestSchema::RebootExportAfterFail ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch [GOOD] Test command err: 2025-03-31T16:23:00.866831Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009969737088890:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:23:00.866903Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f93/r3tmp/tmp16Si6N/pdisk_1.dat 2025-03-31T16:23:01.244073Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:23:01.291520Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:23:01.291689Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 27177, node 1 2025-03-31T16:23:01.298308Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:23:01.419765Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:23:01.419810Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:23:01.419819Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:23:01.419942Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30813 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:23:01.906548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:30813 2025-03-31T16:23:02.099008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:02.105874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:02.128691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:02.278959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:23:02.316682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:23:02.353054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:02.389531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:02.425383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:02.454676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:02.490345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:02.522182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:02.553225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:03.896343Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009982621992216:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:23:03.896343Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009982621992208:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:23:03.896440Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:23:03.899475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-03-31T16:23:03.908923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009982621992222:2379], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-03-31T16:23:03.972039Z node 1 :TX_PROXY ERROR: Actor# [1:7488009982621992273:2916] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:23:04.567953Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jqpg6zep8dwdwtxa0wcm8rmp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjAwOTI4ZWItNDliMWM5YjUtZWFhZDUyODYtNjMzYTc3MjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:23:04.642241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:04.667666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:04.694906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:04.723266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:04.754221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:04.783551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:23:04.812830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-31T16:23:04.842448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:04.878626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:04.935399Z node 1 :HTTP INFO: Listening on http://127.0.0.1:65191 2025-03-31T16:23:05.866696Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488009969737088890:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:23:05.866820Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:23:05.937472Z node 1 :SQS INFO: Start SQS service actor 2025-03-31T16:23:05.937526Z node 1 :SQS INFO: Start SQS proxy service actor 2025-03-31T16:23:05.937625Z node 1 :SQS DEBUG: SQS service config: { EnableSqs: true YandexCloudMode: true EnableDeadLetterQueues: true } 2025-03-31T16:23:05.938943Z node 1 :SQS DEBUG: Enable scheme board scheme cache 2025-03-31T16:23:05.947699Z node 1 :HTTP INFO: Listening on http://[::]:3260 2025-03-31T16:23:05.956953Z node 1 :SQS NOTICE: [Node tracker] schedule describe tables after 0.000000s 2025-03-31T16:23:05.956968Z node 1 :SQS INFO: Request SQS users list 2025-03-31T16:23:05.956991Z node 1 :SQS DEBUG: Request SQS queues list 2025-03-31T16:23:05.957005Z node 1 :SQS DEBUG: [Node tracker] bootstrap on node=1 2025-03-31T16:23:05.961427Z node 1 :SQS DEBUG: Request [] Starting executor actor for query(idx=GET_QUEUES_LIST_ID). Mode: COMPILE 2025-03-31T16:23:05.961441Z node 1 :SQS DEBUG: Request [] Starting executor actor for query(idx=GET_USER_SETTINGS_ID). Mode: COMPILE 2025-03-31T16:23:05.961566Z node 1 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Compile program: ( (let fromUser (Parameter 'FROM_USER (DataType 'Utf8String))) (let fromQueue (Parameter 'FROM_QUEUE (DataType 'Utf8String))) (let batchSize (Parameter 'BATCH_SIZE (DataType 'Uint64))) (let queuesTable '/Root/SQS/.Queues) (let range '( '('Account fromUser (Void)) '('QueueName fromQue ... ud4/000000000000000101v0/1]. ActiveMessageRequests: 1 2025-03-31T16:23:55.873049Z node 7 :SQS DEBUG: Request [1c216edb-b8ce0170-f27e54ae-9d4881a9] Sending execute request for query(idx=CHANGE_VISIBILITY_ID) to queue leader 2025-03-31T16:23:55.873088Z node 7 :SQS DEBUG: Request [1c216edb-b8ce0170-f27e54ae-9d4881a9] Executing compiled query(idx=CHANGE_VISIBILITY_ID) 2025-03-31T16:23:55.873218Z node 7 :SQS DEBUG: Request [1c216edb-b8ce0170-f27e54ae-9d4881a9] Starting executor actor for query(idx=CHANGE_VISIBILITY_ID). Mode: COMPILE_AND_EXEC 2025-03-31T16:23:55.873467Z node 7 :SQS TRACE: Request [1c216edb-b8ce0170-f27e54ae-9d4881a9] Query(idx=CHANGE_VISIBILITY_ID) Queue [cloud4/000000000000000101v0] Serializing params: {"QUEUE_ID_NUMBER": 2, "QUEUE_ID_NUMBER_HASH": 17472595041006102391, "SHARD": 1, "QUEUE_ID_NUMBER_AND_SHARD_HASH": 5923258363543965525, "NOW": 1743438235872, "GROUPS_READ_ATTEMPT_IDS_PERIOD": 300000, "KEYS": [{"LockTimestamp": 1743438235718, "Offset": 1, "NewVisibilityDeadline": 1743438236872}, {"LockTimestamp": 1743438235757, "Offset": 2, "NewVisibilityDeadline": 1743438237872}]} 2025-03-31T16:23:55.874142Z node 7 :SQS TRACE: Request [1c216edb-b8ce0170-f27e54ae-9d4881a9] Query(idx=CHANGE_VISIBILITY_ID) Queue [cloud4/000000000000000101v0] Execute program: { Transaction { MiniKQLTransaction { Mode: COMPILE_AND_EXEC Program { Bin: "O\034\014Exists*NewVisibilityDeadline\014Offset\006Arg\014Member\nFlags\010Name\010Args\016Payload\022Parameter\006And\032LockTimestamp$VisibilityDeadline\014Invoke\t\211\004\206\202?\000\206\202\030Extend\000\006\002?\000\t\211\004\202\203\005@\206\205\n\203\014\207\203\010\203\014\203\010?\020(ChangeConddCurrentVisibilityDeadline\002\006\n$SetResult\000\003?\006\014result\t\211\006?\024\206\205\006?\020?\020?\020.\006\n?\032?\0220MapParameter\000\t\351\000?\034\005\205\004\206\205\004\203\010\203\005@\026\032\203\005@\036\"\006\000?&\003?(\010KEYS\003&\000\t\251\000?\032\016\000\005?\022\t\211\004?\010\207\203\014?\010 Coalesce\000\t\211\004?<\207\203\014\207\203\014*\000\t\211\006?B\203\005@\203\010?\0146\000\003?J\026LessOrEqual\t\351\000?L\005\205\004\206\205\004\203\010\203\005@\026\032\203\005@\036\"\006\000?X\003?Z\006NOW\003&\000\t\211\004?\014\207\205\004\207\203\010?\014.2\203\004\022\000\t\211\n?n\203\005\004\200\205\004\203\004\203\004.2\213\010\203\010\203\010\203\004?\020\203\004$SelectRow\000\003?t \000\001\205\000\000\000\000\001\030\000\000\000\000\000\000\000?l\005?z\003?v\020\003?x\026\003\013?\202\t\351\000?|\005\205\004\206\205\004\203\010\203\005@\026\032\203\005@\036\"\006\000?\226\003?\230> TColumnShardTestSchema::RebootInternalTTL [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootInternalTTL [GOOD] Test command err: 2025-03-31T16:23:40.928750Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:23:41.017567Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:23:41.021835Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:23:41.022258Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:23:41.048316Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:23:41.048654Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:23:41.057919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:23:41.058156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:23:41.058429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:23:41.058568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:23:41.058683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:23:41.058814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:23:41.058965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:23:41.059141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:23:41.059287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:23:41.059408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:23:41.059551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:23:41.059673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:23:41.088622Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:23:41.092627Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:23:41.092809Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:23:41.092864Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:23:41.093062Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:41.093228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:23:41.093347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:23:41.093395Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:23:41.093515Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:23:41.093606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:23:41.093654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:23:41.093690Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:23:41.093906Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:41.094005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:23:41.094083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:23:41.094120Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:23:41.094231Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:23:41.094295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:23:41.094348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:23:41.094381Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:23:41.094460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:23:41.094502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:23:41.094536Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:23:41.094593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:23:41.094637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:23:41.094668Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:23:41.095147Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=96; 2025-03-31T16:23:41.095250Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=41; 2025-03-31T16:23:41.095354Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=49; 2025-03-31T16:23:41.095471Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=42; 2025-03-31T16:23:41.095706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:23:41.095775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:23:41.095827Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:23:41.096047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:23:41.096100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:23:41.096134Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:23:41.096309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:23:41.096359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:23:41.096394Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:23:41.096659Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:23:41.096709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:23:41.096740Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T1 ... send_data;compute_actor_id=[1:1031:3024];bytes=362872;rows=45359;faults=0;finished=0;fault=0;schema=saved_at: timestamp[us]; 2025-03-31T16:23:58.103168Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:1;records_count:34641;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:23:58.103333Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:1;records_count:34641;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:23:58.103376Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-31T16:23:58.103425Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-31T16:23:58.103592Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:23:58.103743Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:1;records_count:34641;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:23:58.103795Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-31T16:23:58.103925Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:232;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;);columns=1;rows=34641; 2025-03-31T16:23:58.103990Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:252;stage=data_format;batch_size=277128;num_rows=34641;batch_columns=saved_at; 2025-03-31T16:23:58.104164Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:1031:3024];bytes=277128;rows=34641;faults=0;finished=0;fault=0;schema=saved_at: timestamp[us]; 2025-03-31T16:23:58.104310Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:23:58.104430Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:23:58.104548Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:23:58.104680Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:23:58.104756Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:23:58.104821Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:23:58.104854Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1036:3029] finished for tablet 9437184 2025-03-31T16:23:58.105379Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:1031:3024];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.004},{"events":["f_processing","f_task_result"],"t":0.007},{"events":["f_ack","l_task_result"],"t":0.638},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.641}],"full":{"a":1743438237463479,"name":"_full_task","f":1743438237463479,"d_finished":0,"c":0,"l":1743438238104919,"d":641440},"events":[{"name":"bootstrap","f":1743438237464080,"d_finished":4058,"c":1,"l":1743438237468138,"d":4058},{"a":1743438238104661,"name":"ack","f":1743438238102327,"d_finished":2134,"c":2,"l":1743438238104576,"d":2392},{"a":1743438238104648,"name":"processing","f":1743438237470914,"d_finished":158811,"c":18,"l":1743438238104580,"d":159082},{"name":"ProduceResults","f":1743438237466058,"d_finished":5462,"c":22,"l":1743438238104838,"d":5462},{"a":1743438238104842,"name":"Finish","f":1743438238104842,"d_finished":0,"c":0,"l":1743438238104919,"d":77},{"name":"task_result","f":1743438237470942,"d_finished":156169,"c":16,"l":1743438238102091,"d":156169}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:23:58.105471Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:1031:3024];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:23:58.105961Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:1031:3024];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.004},{"events":["f_processing","f_task_result"],"t":0.007},{"events":["f_ack","l_task_result"],"t":0.638},{"events":["l_ProduceResults","f_Finish"],"t":0.641},{"events":["l_ack","l_processing","l_Finish"],"t":0.642}],"full":{"a":1743438237463479,"name":"_full_task","f":1743438237463479,"d_finished":0,"c":0,"l":1743438238105522,"d":642043},"events":[{"name":"bootstrap","f":1743438237464080,"d_finished":4058,"c":1,"l":1743438237468138,"d":4058},{"a":1743438238104661,"name":"ack","f":1743438238102327,"d_finished":2134,"c":2,"l":1743438238104576,"d":2995},{"a":1743438238104648,"name":"processing","f":1743438237470914,"d_finished":158811,"c":18,"l":1743438238104580,"d":159685},{"name":"ProduceResults","f":1743438237466058,"d_finished":5462,"c":22,"l":1743438238104838,"d":5462},{"a":1743438238104842,"name":"Finish","f":1743438238104842,"d_finished":0,"c":0,"l":1743438238105522,"d":680},{"name":"task_result","f":1743438237470942,"d_finished":156169,"c":16,"l":1743438238102091,"d":156169}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:23:58.106074Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:23:57.462239Z;index_granules=0;index_portions=2;index_batches=1720;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=5265968;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=5265968;selected_rows=0; 2025-03-31T16:23:58.106123Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:23:58.106441Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; >> TColumnShardTestSchema::InternalTTL_Types >> TestYmqHttpProxy::TestListQueueTags [GOOD] >> TColumnShardTestSchema::Drop >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit-UseSink [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenAbort >> PgCatalog::CheckSetConfig [GOOD] >> PgCatalog::PgDatabase+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestListQueueTags [GOOD] Test command err: 2025-03-31T16:23:00.845034Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009967567291220:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:23:00.845716Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f9e/r3tmp/tmpFfc3Iu/pdisk_1.dat 2025-03-31T16:23:01.231029Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:23:01.287611Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:23:01.287716Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:23:01.289798Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9104, node 1 2025-03-31T16:23:01.419599Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:23:01.419639Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:23:01.419652Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:23:01.419802Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3429 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:23:01.907117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:3429 2025-03-31T16:23:02.116351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:02.123007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:02.137580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:02.256573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:23:02.295738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:23:02.339687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:02.393384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:02.425384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:02.456608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:23:02.483798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-31T16:23:02.514596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:02.543152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:04.001242Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009980452194560:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:23:04.001243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009980452194550:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:23:04.001370Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:23:04.005012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-03-31T16:23:04.014586Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009984747161860:2379], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-03-31T16:23:04.084725Z node 1 :TX_PROXY ERROR: Actor# [1:7488009984747161911:2916] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:23:04.567916Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jqpg6zhzff2k2q00vpjaj7n1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDUzOTAxZTgtZWYwYzM4ZC04ZTkwMDMwNy05NDQ0YzhkYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:23:04.642757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:04.670592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:04.699525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:04.726608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:04.758266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:04.790325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:04.818765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:04.848962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:04.884745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:04.937993Z node 1 :HTTP INFO: Listening on http://127.0.0.1:21094 2025-03-31T16:23:05.844199Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488009967567291220:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:23:05.844301Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:23:05.940600Z node 1 :SQS INFO: Start SQS service actor 2025-03-31T16:23:05.940649Z node 1 :SQS INFO: Start SQS proxy service actor 2025-03-31T16:23:05.940728Z node 1 :SQS DEBUG: SQS service config: { EnableSqs: true YandexCloudMode: true EnableDeadLetterQueues: true } 2025-03-31T16:23:05.941815Z node 1 :SQS DEBUG: Enable scheme board scheme cache 2025-03-31T16:23:05.947699Z node 1 :HTTP INFO: Listening on http://[::]:14762 2025-03-31T16:23:05.958131Z node 1 :SQS NOTICE: [Node tracker] schedule describe tables after 0.000000s 2025-03-31T16:23:05.958140Z node 1 :SQS INFO: Request SQS users list 2025-03-31T16:23:05.958163Z node 1 :SQS DEBUG: Request SQS queues list 2025-03-31T16:23:05.958177Z node 1 :SQS DEBUG: [Node tracker] bootstrap on node=1 2025-03-31T16:23:05.962896Z node 1 :SQS DEBUG: Request [] Starting executor actor for query(idx=GET_USER_SETTINGS_ID). Mode: COMPILE 2025-03-31T16:23:05.962919Z node 1 :SQS DEBUG: Request [] Starting executor actor for query(idx=GET_QUEUES_LIST_ID). Mode: COMPILE 2025-03-31T16:23:05.963029Z node 1 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Compile program: ( (let fromUser (Parameter 'FROM_USER (DataType 'Utf8String))) (let fromQueue (Parameter 'FROM_QUEUE (DataType 'Utf8String))) (let batchSize (Parameter 'BATCH_SIZE (DataType 'Uint64))) (let queuesTable '/Root/SQS/.Queues) (let range '( '('Account fromUser (Void)) '('QueueName fromQueue ... ional Optional { Item { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 3 } } } } } } } Member { Name: "inflyVersion" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "messageCount" Type { Kind: Optional Optional { Item { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 3 } } } } } } } Member { Name: "readOffset" Type { Kind: Optional Optional { Item { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } Value { Struct { Optional { Optional { Uint64: 1743438239306 } } } Struct { Optional { } } Struct { Optional { Optional { Int64: 0 } } } Struct { Optional { Uint64: 0 } } Struct { Optional { Optional { Int64: 0 } } } Struct { Optional { Optional { Uint64: 0 } } } } } } 2025-03-31T16:23:59.568380Z node 7 :SQS DEBUG: Infly version for shard [cloud4/000000000000000101v0/2]: 0 2025-03-31T16:23:59.568419Z node 7 :SQS DEBUG: Request [] Sending executed reply 2025-03-31T16:23:59.568773Z node 7 :SQS TRACE: Infly load reply for shard [cloud4/000000000000000101v0/0]: { Status: 48 TxId: 281474976715714 Step: 1743438239606 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "createdTimestamp" Type { Kind: Optional Optional { Item { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } Member { Name: "infly" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "DelayDeadline" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Offset" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "RandomId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ReceiveCount" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "VisibilityDeadline" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "inflyCount" Type { Kind: Optional Optional { Item { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 3 } } } } } } } Member { Name: "inflyVersion" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "messageCount" Type { Kind: Optional Optional { Item { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 3 } } } } } } } Member { Name: "readOffset" Type { Kind: Optional Optional { Item { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } Value { Struct { Optional { Optional { Uint64: 1743438239306 } } } Struct { Optional { } } Struct { Optional { Optional { Int64: 0 } } } Struct { Optional { Uint64: 0 } } Struct { Optional { Optional { Int64: 0 } } } Struct { Optional { Optional { Uint64: 0 } } } } } } 2025-03-31T16:23:59.568787Z node 7 :SQS DEBUG: Infly version for shard [cloud4/000000000000000101v0/0]: 0 2025-03-31T16:23:59.573805Z node 7 :SQS TRACE: Request [79139425-6589bac-d904e082-1fb46d0] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] HandleResponse { Status: 48 TxId: 281474976715716 Step: 1743438239620 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "attrs" Type { Kind: Optional Optional { Item { Kind: Optional Optional { Item { Kind: Struct Struct { Member { Name: "ContentBasedDeduplication" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "DelaySeconds" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "DlqArn" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "MaxReceiveCount" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MaximumMessageSize" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MessageRetentionPeriod" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ReceiveMessageWaitTime" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ShowDetailedCountersDeadline" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "VisibilityTimeout" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "queueExists" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "tags" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } Value { Struct { Optional { Optional { Struct { Optional { Bool: false } } Struct { Optional { Uint64: 0 } } Struct { Optional { Text: "" } } Struct { Optional { Text: "" } } Struct { Optional { Bool: true } } Struct { Optional { Uint64: 0 } } Struct { Optional { Uint64: 262144 } } Struct { Optional { Uint64: 345600000 } } Struct { Optional { Uint64: 0 } } Struct { } Struct { Optional { Uint64: 30000 } } } } } Struct { Optional { Bool: true } } Struct { Optional { Text: "{}" } } } } } 2025-03-31T16:23:59.573846Z node 7 :SQS DEBUG: Request [79139425-6589bac-d904e082-1fb46d0] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] Attempt 1 execution duration: 18ms 2025-03-31T16:23:59.574347Z node 7 :SQS TRACE: Request [79139425-6589bac-d904e082-1fb46d0] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] Sending mkql execution result: { Status: 48 TxId: 281474976715716 Step: 1743438239620 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "attrs" Type { Kind: Optional Optional { Item { Kind: Optional Optional { Item { Kind: Struct Struct { Member { Name: "ContentBasedDeduplication" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "DelaySeconds" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "DlqArn" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "MaxReceiveCount" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MaximumMessageSize" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MessageRetentionPeriod" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ReceiveMessageWaitTime" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ShowDetailedCountersDeadline" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "VisibilityTimeout" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "queueExists" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "tags" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } Value { Struct { Optional { Optional { Struct { Optional { Bool: false } } Struct { Optional { Uint64: 0 } } Struct { Optional { Text: "" } } Struct { Optional { Text: "" } } Struct { Optional { Bool: true } } Struct { Optional { Uint64: 0 } } Struct { Optional { Uint64: 262144 } } Struct { Optional { Uint64: 345600000 } } Struct { Optional { Uint64: 0 } } Struct { } Struct { Optional { Uint64: 30000 } } } } } Struct { Optional { Bool: true } } Struct { Optional { Text: "{}" } } } } } 2025-03-31T16:23:59.574442Z node 7 :SQS TRACE: Request [79139425-6589bac-d904e082-1fb46d0] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] Minikql data response: {"attrs": {"ContentBasedDeduplication": false, "DelaySeconds": 0, "DlqArn": "", "DlqName": "", "FifoQueue": true, "MaxReceiveCount": 0, "MaximumMessageSize": 262144, "MessageRetentionPeriod": 345600000, "ReceiveMessageWaitTime": 0, "ShowDetailedCountersDeadline": null, "VisibilityTimeout": 30000}, "queueExists": true, "tags": "{}"} 2025-03-31T16:23:59.574593Z node 7 :SQS DEBUG: Request [79139425-6589bac-d904e082-1fb46d0] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] execution duration: 20ms 2025-03-31T16:23:59.574609Z node 7 :SQS DEBUG: Request [79139425-6589bac-d904e082-1fb46d0] Sending executed reply 2025-03-31T16:23:59.574943Z node 7 :SQS DEBUG: Request [386e3917-d794858b-57282873-aab62beb] Get configuration duration: 11ms 2025-03-31T16:23:59.574969Z node 7 :SQS TRACE: Request [386e3917-d794858b-57282873-aab62beb] Got configuration. Root url: http://ghrun-tuhoqst4xe.auto.internal:8771, Shards: 1, Fail: 0 2025-03-31T16:23:59.574989Z node 7 :SQS TRACE: Request [386e3917-d794858b-57282873-aab62beb] DoRoutine 2025-03-31T16:23:59.575053Z node 7 :SQS TRACE: Request [386e3917-d794858b-57282873-aab62beb] SendReplyAndDie from action actor { ListQueueTags { RequestId: "386e3917-d794858b-57282873-aab62beb" } } 2025-03-31T16:23:59.575148Z node 7 :SQS TRACE: Request [386e3917-d794858b-57282873-aab62beb] Sending sqs response: { ListQueueTags { RequestId: "386e3917-d794858b-57282873-aab62beb" } RequestId: "386e3917-d794858b-57282873-aab62beb" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true } 2025-03-31T16:23:59.575228Z node 7 :SQS TRACE: Dec local leader ref for actor [7:7488010220129279637:3795]. Found: 1 2025-03-31T16:23:59.575248Z node 7 :SQS TRACE: HandleSqsResponse ListQueueTags { RequestId: "386e3917-d794858b-57282873-aab62beb" } RequestId: "386e3917-d794858b-57282873-aab62beb" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true 2025-03-31T16:23:59.575297Z node 7 :SQS TRACE: Sending answer to proxy actor [7:7488010220129279627:2559]: ListQueueTags { RequestId: "386e3917-d794858b-57282873-aab62beb" } RequestId: "386e3917-d794858b-57282873-aab62beb" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true 2025-03-31T16:23:59.575495Z node 7 :SQS TRACE: Request [386e3917-d794858b-57282873-aab62beb] HandleResponse: { ListQueueTags { RequestId: "386e3917-d794858b-57282873-aab62beb" } RequestId: "386e3917-d794858b-57282873-aab62beb" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true }, status: OK 2025-03-31T16:23:59.575584Z node 7 :SQS DEBUG: Request [386e3917-d794858b-57282873-aab62beb] Sending reply from proxy actor: { ListQueueTags { RequestId: "386e3917-d794858b-57282873-aab62beb" } RequestId: "386e3917-d794858b-57282873-aab62beb" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true } 2025-03-31T16:23:59.575719Z node 7 :HTTP_PROXY DEBUG: http request [ListQueueTags] requestId [386e3917-d794858b-57282873-aab62beb] Got succesfult GRPC response. 2025-03-31T16:23:59.575763Z node 7 :HTTP_PROXY INFO: http request [ListQueueTags] requestId [386e3917-d794858b-57282873-aab62beb] reply ok 2025-03-31T16:23:59.575835Z node 7 :HTTP_PROXY DEBUG: http request [ListQueueTags] requestId [386e3917-d794858b-57282873-aab62beb] Send metering event. HttpStatusCode: 200 IsFifo: 1 FolderId: folder4 RequestSizeInBytes: 530 ResponseSizeInBytes: 179 SourceAddress: 386c:3c00:6050:0:206c:3c00:6050:0 ResourceId: 000000000000000301v0 Action: ListQueueTags 2025-03-31T16:23:59.575921Z node 7 :HTTP DEBUG: (#37,[::1]:48178) <- (200 ) 2025-03-31T16:23:59.576060Z node 7 :HTTP DEBUG: (#37,[::1]:48178) connection closed Http output full {} |91.2%| [TA] $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.2%| [TA] {RESULT} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpPg::PgUpdateCompoundKey+useSink [GOOD] >> KqpPg::PgUpdateCompoundKey-useSink |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersTtlWithStat >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenAbort [GOOD] >> DataShardVolatile::DistributedWriteAsymmetricExecute >> TSchemeShardTTLTestsWithReboots::CreateTable [GOOD] >> PgCatalog::PgDatabase+useSink [GOOD] >> PgCatalog::PgDatabase-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::CreateTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:135:2157] sender: [1:136:2058] recipient: [1:111:2142] 2025-03-31T16:23:04.023501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:23:04.023581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:23:04.023609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:23:04.023634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:23:04.023664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:23:04.023719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:23:04.023772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:23:04.023838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:23:04.024113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:23:04.093133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-31T16:23:04.093182Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:169:2058] recipient: [1:15:2062] 2025-03-31T16:23:04.116980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:23:04.117160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:23:04.117343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:23:04.122029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:23:04.122318Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:23:04.122979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:23:04.123260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:23:04.128365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:23:04.130007Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:23:04.130073Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:23:04.130157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:23:04.130200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:23:04.130247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:23:04.130410Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:210:2058] recipient: [1:208:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:210:2058] recipient: [1:208:2209] Leader for TabletID 72057594037968897 is [1:214:2213] sender: [1:215:2058] recipient: [1:208:2209] 2025-03-31T16:23:04.137198Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:235:2058] recipient: [1:15:2062] 2025-03-31T16:23:04.287425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:23:04.287673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:04.287920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:23:04.288217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:23:04.288275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:04.290925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:23:04.291065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:23:04.291313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:04.291400Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:23:04.291438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:23:04.291469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:23:04.293600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:04.293673Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:23:04.293717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:23:04.295782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:04.295840Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:04.295878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:23:04.295935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:23:04.300042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:23:04.302205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:23:04.302369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:135:2157] sender: [1:250:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:23:04.303506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:23:04.303654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:23:04.303702Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:23:04.303991Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:23:04.304050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:23:04.304226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:23:04.304304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:23:04.306615Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:23:04.306679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:23:04.306864Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:23:04.306900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:202:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:23:04.307315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:04.307370Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:23:04.307465Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:23:04.307513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:23:04.307562Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025 ... lPathId: 3] 2025-03-31T16:24:09.139499Z node 72 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:24:09.139547Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [72:205:2207], at schemeshard: 72057594046678944, txId: 1002, path id: 1 2025-03-31T16:24:09.139599Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [72:205:2207], at schemeshard: 72057594046678944, txId: 1002, path id: 3 2025-03-31T16:24:09.140439Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2025-03-31T16:24:09.140506Z node 72 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1002:0 ProgressState at tablet: 72057594046678944 2025-03-31T16:24:09.142199Z node 72 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2025-03-31T16:24:09.142311Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2025-03-31T16:24:09.142349Z node 72 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2025-03-31T16:24:09.142392Z node 72 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-03-31T16:24:09.142435Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 FAKE_COORDINATOR: Erasing txId 1002 2025-03-31T16:24:09.144743Z node 72 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2025-03-31T16:24:09.144864Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2025-03-31T16:24:09.144922Z node 72 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2025-03-31T16:24:09.144966Z node 72 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-03-31T16:24:09.145008Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-31T16:24:09.145113Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 0/1, is published: true 2025-03-31T16:24:09.147208Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000003 OrderId: 1002 ExecLatency: 1 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1287 } } 2025-03-31T16:24:09.147274Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2025-03-31T16:24:09.147437Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000003 OrderId: 1002 ExecLatency: 1 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1287 } } 2025-03-31T16:24:09.147547Z node 72 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000003 OrderId: 1002 ExecLatency: 1 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1287 } } 2025-03-31T16:24:09.148331Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 332 RawX2: 309237647631 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2025-03-31T16:24:09.148382Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2025-03-31T16:24:09.148509Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Source { RawX1: 332 RawX2: 309237647631 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2025-03-31T16:24:09.148559Z node 72 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1002:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-31T16:24:09.148648Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1002:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 332 RawX2: 309237647631 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2025-03-31T16:24:09.148705Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1002:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:24:09.148744Z node 72 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1002:0, at schemeshard: 72057594046678944 2025-03-31T16:24:09.148783Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1002:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-31T16:24:09.148823Z node 72 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 129 -> 240 2025-03-31T16:24:09.153780Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-03-31T16:24:09.154067Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-03-31T16:24:09.154821Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2025-03-31T16:24:09.155013Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2025-03-31T16:24:09.155450Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2025-03-31T16:24:09.155504Z node 72 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1002:0 ProgressState 2025-03-31T16:24:09.155616Z node 72 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2025-03-31T16:24:09.155655Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-03-31T16:24:09.155701Z node 72 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2025-03-31T16:24:09.155742Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-03-31T16:24:09.155782Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: true 2025-03-31T16:24:09.155827Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-03-31T16:24:09.155870Z node 72 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1002:0 2025-03-31T16:24:09.155907Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1002:0 2025-03-31T16:24:09.156040Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 TestModificationResult got TxId: 1002, wait until txId: 1002 TestWaitNotification wait txId: 1002 2025-03-31T16:24:09.159534Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: send EvNotifyTxCompletion 2025-03-31T16:24:09.159596Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1002 2025-03-31T16:24:09.159982Z node 72 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1002, at schemeshard: 72057594046678944 2025-03-31T16:24:09.160085Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2025-03-31T16:24:09.160124Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [72:408:2381] TestWaitNotification: OK eventTxId 1002 2025-03-31T16:24:09.160597Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:24:09.160832Z node 72 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 277us result status StatusSuccess 2025-03-31T16:24:09.161392Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> KqpLimits::CancelAfterRwTx-useSink [GOOD] |91.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_ttl/test-results/unittest/{meta.json ... results_accumulator.log} |91.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ttl/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestSchema::Drop [GOOD] >> KqpPg::PgUpdateCompoundKey-useSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::CancelAfterRwTx-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 16145, MsgBus: 15665 2025-03-31T16:20:13.589676Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009249597741343:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:13.589802Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a16/r3tmp/tmpFHnXsa/pdisk_1.dat 2025-03-31T16:20:13.893324Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16145, node 1 2025-03-31T16:20:13.969519Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:13.969544Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:13.969553Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:13.969689Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:20:13.979017Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:13.979199Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:13.980799Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15665 TClient is connected to server localhost:15665 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:14.426104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:14.451907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:16.567863Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009262482644292:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:16.567958Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009262482644300:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:16.568027Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:16.573017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:20:16.584165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009262482644306:2359], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:20:16.642019Z node 1 :TX_PROXY ERROR: Actor# [1:7488009262482644359:2603] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:16.965288Z node 1 :KQP_COMPUTE WARN: fline=kqp_compute_actor_factory.cpp:40;problem=cannot_allocate_memory;tx_id=281474976710661;task_id=1;memory=1048576; 2025-03-31T16:20:16.965316Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710661, task: 1. [Mem] memory 1048576 NOT granted 2025-03-31T16:20:16.981035Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488009262482644400:2368], TxId: 281474976710661, task: 1. Ctx: { TraceId : 01jqpg1w1n1gnm9ds3k5d9v7gc. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=OTg1MGI2YWMtNjQ5NWFjNzUtYjNhZjRhN2MtNDM4ODZhYw==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: OVERLOADED KIKIMR_PRECONDITION_FAILED: {
: Error: Mkql memory limit exceeded, allocated by task 1: 10, host: ghrun-tuhoqst4xe, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 20B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 2, started at: 2025-03-31T16:20:16.963266Z }, code: 2029 }. 2025-03-31T16:20:16.981525Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488009262482644402:2369], TxId: 281474976710661, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=OTg1MGI2YWMtNjQ5NWFjNzUtYjNhZjRhN2MtNDM4ODZhYw==. TraceId : 01jqpg1w1n1gnm9ds3k5d9v7gc. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7488009262482644390:2352], status: OVERLOADED, reason: {
: Error: Terminate execution } 2025-03-31T16:20:16.985457Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTg1MGI2YWMtNjQ5NWFjNzUtYjNhZjRhN2MtNDM4ODZhYw==, ActorId: [1:7488009262482644265:2352], ActorState: ExecuteState, TraceId: 01jqpg1w1n1gnm9ds3k5d9v7gc, Create QueryResponse for error on request, msg:
: Error: Mkql memory limit exceeded, allocated by task 1: 10, host: ghrun-tuhoqst4xe, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 20B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 2, started at: 2025-03-31T16:20:16.963266Z } , code: 2029 Trying to start YDB, gRPC: 20729, MsgBus: 27089 2025-03-31T16:20:17.654595Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488009268054589339:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:17.655196Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a16/r3tmp/tmp2ve9Vu/pdisk_1.dat 2025-03-31T16:20:17.757122Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20729, node 2 2025-03-31T16:20:17.784949Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:17.785191Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:17.789238Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:20:17.814472Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:17.814495Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:17.814503Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:17.814608Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27089 TClient is connected to server localhost:27089 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:18.204193Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:18.222352Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:20.426923Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009280939492271:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:20.426984Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488009280939492263:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:20.427258Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:20.431393Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:20:20.446134Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488009280939492277:2359], DatabaseId: /Root, PoolId: default, Scheduled retr ... : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7488010039611666433:2491], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-03-31T16:23:17.117202Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7488010039611666446:4926], TxId: 281474976710962, task: 7. Ctx: { SessionId : ydb://session/3?node_id=4&id=MTllMGM2NGYtYjFjNThlMmEtOTgzMWRkNTMtZDFhY2JiNzY=. CustomerSuppliedId : . TraceId : 01jqpg7c07ana91nf58stff4qa. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7488010039611666433:2491], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-03-31T16:23:17.117512Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7488010039611666444:4924], TxId: 281474976710962, task: 5. Ctx: { CustomerSuppliedId : . TraceId : 01jqpg7c07ana91nf58stff4qa. SessionId : ydb://session/3?node_id=4&id=MTllMGM2NGYtYjFjNThlMmEtOTgzMWRkNTMtZDFhY2JiNzY=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7488010039611666433:2491], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-03-31T16:23:17.117796Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7488010039611666440:4920], TxId: 281474976710962, task: 1. Ctx: { TraceId : 01jqpg7c07ana91nf58stff4qa. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=MTllMGM2NGYtYjFjNThlMmEtOTgzMWRkNTMtZDFhY2JiNzY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7488010039611666433:2491], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-03-31T16:23:17.118093Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7488010039611666441:4921], TxId: 281474976710962, task: 2. Ctx: { TraceId : 01jqpg7c07ana91nf58stff4qa. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=MTllMGM2NGYtYjFjNThlMmEtOTgzMWRkNTMtZDFhY2JiNzY=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7488010039611666433:2491], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-03-31T16:23:17.118330Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7488010039611666443:4923], TxId: 281474976710962, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=MTllMGM2NGYtYjFjNThlMmEtOTgzMWRkNTMtZDFhY2JiNzY=. TraceId : 01jqpg7c07ana91nf58stff4qa. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7488010039611666433:2491], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-03-31T16:23:17.118540Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7488010039611666448:4928], TxId: 281474976710962, task: 9. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=MTllMGM2NGYtYjFjNThlMmEtOTgzMWRkNTMtZDFhY2JiNzY=. TraceId : 01jqpg7c07ana91nf58stff4qa. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7488010039611666433:2491], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-03-31T16:23:17.119455Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MTllMGM2NGYtYjFjNThlMmEtOTgzMWRkNTMtZDFhY2JiNzY=, ActorId: [4:7488009790503551983:2491], ActorState: ExecuteState, TraceId: 01jqpg7c07ana91nf58stff4qa, Create QueryResponse for error on request, msg: 2025-03-31T16:23:20.760754Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MTllMGM2NGYtYjFjNThlMmEtOTgzMWRkNTMtZDFhY2JiNzY=, ActorId: [4:7488009790503551983:2491], ActorState: ExecuteState, TraceId: 01jqpg7fj57mv75gzcprtz6hat, Create QueryResponse for error on request, msg: 2025-03-31T16:23:21.269570Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MTllMGM2NGYtYjFjNThlMmEtOTgzMWRkNTMtZDFhY2JiNzY=, ActorId: [4:7488009790503551983:2491], ActorState: ExecuteState, TraceId: 01jqpg7g1x3hcd4f7ytgt659x8, Create QueryResponse for error on request, msg: 2025-03-31T16:23:22.862641Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7488010061086504045:2491] TxId: 281474976710992. Ctx: { TraceId: 01jqpg7hkh6madswqfq3y9700z, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MTllMGM2NGYtYjFjNThlMmEtOTgzMWRkNTMtZDFhY2JiNzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 378ms } {
: Error: Cancelling after 380ms during execution } ] 2025-03-31T16:23:22.862831Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7488010061086504055:5147], TxId: 281474976710992, task: 3. Ctx: { TraceId : 01jqpg7hkh6madswqfq3y9700z. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=MTllMGM2NGYtYjFjNThlMmEtOTgzMWRkNTMtZDFhY2JiNzY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7488010061086504045:2491], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-03-31T16:23:22.863771Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7488010061086504052:5145], TxId: 281474976710992, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=MTllMGM2NGYtYjFjNThlMmEtOTgzMWRkNTMtZDFhY2JiNzY=. TraceId : 01jqpg7hkh6madswqfq3y9700z. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7488010061086504045:2491], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-03-31T16:23:22.864279Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7488010061086504054:5146], TxId: 281474976710992, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=MTllMGM2NGYtYjFjNThlMmEtOTgzMWRkNTMtZDFhY2JiNzY=. TraceId : 01jqpg7hkh6madswqfq3y9700z. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7488010061086504045:2491], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-03-31T16:23:22.864578Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7488010061086504057:5149], TxId: 281474976710992, task: 5. Ctx: { TraceId : 01jqpg7hkh6madswqfq3y9700z. SessionId : ydb://session/3?node_id=4&id=MTllMGM2NGYtYjFjNThlMmEtOTgzMWRkNTMtZDFhY2JiNzY=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7488010061086504045:2491], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-03-31T16:23:22.864727Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7488010061086504059:5150], TxId: 281474976710992, task: 6. Ctx: { TraceId : 01jqpg7hkh6madswqfq3y9700z. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=MTllMGM2NGYtYjFjNThlMmEtOTgzMWRkNTMtZDFhY2JiNzY=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7488010061086504045:2491], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-03-31T16:23:22.864810Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7488010061086504061:5151], TxId: 281474976710992, task: 7. Ctx: { SessionId : ydb://session/3?node_id=4&id=MTllMGM2NGYtYjFjNThlMmEtOTgzMWRkNTMtZDFhY2JiNzY=. TraceId : 01jqpg7hkh6madswqfq3y9700z. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7488010061086504045:2491], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-03-31T16:23:22.864948Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7488010061086504062:5152], TxId: 281474976710992, task: 8. Ctx: { TraceId : 01jqpg7hkh6madswqfq3y9700z. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=MTllMGM2NGYtYjFjNThlMmEtOTgzMWRkNTMtZDFhY2JiNzY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7488010061086504045:2491], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-03-31T16:23:22.865032Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7488010061086504064:5153], TxId: 281474976710992, task: 9. Ctx: { SessionId : ydb://session/3?node_id=4&id=MTllMGM2NGYtYjFjNThlMmEtOTgzMWRkNTMtZDFhY2JiNzY=. TraceId : 01jqpg7hkh6madswqfq3y9700z. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7488010061086504045:2491], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-03-31T16:23:22.865675Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MTllMGM2NGYtYjFjNThlMmEtOTgzMWRkNTMtZDFhY2JiNzY=, ActorId: [4:7488009790503551983:2491], ActorState: ExecuteState, TraceId: 01jqpg7hkh6madswqfq3y9700z, Create QueryResponse for error on request, msg: 2025-03-31T16:23:24.301280Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MTllMGM2NGYtYjFjNThlMmEtOTgzMWRkNTMtZDFhY2JiNzY=, ActorId: [4:7488009790503551983:2491], ActorState: ExecuteState, TraceId: 01jqpg7k09f6f60rxt7pb4n4wk, Create QueryResponse for error on request, msg: 2025-03-31T16:23:25.141521Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MTllMGM2NGYtYjFjNThlMmEtOTgzMWRkNTMtZDFhY2JiNzY=, ActorId: [4:7488009790503551983:2491], ActorState: ExecuteState, TraceId: 01jqpg7ktkakx2vcat3105njkr, Create QueryResponse for error on request, msg: 2025-03-31T16:23:27.116473Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7488010082561341410:2491] TxId: 281474976711015. Ctx: { TraceId: 01jqpg7nr37ddwhsgnefhdj5g9, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MTllMGM2NGYtYjFjNThlMmEtOTgzMWRkNTMtZDFhY2JiNzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 391ms } {
: Error: Cancelling after 391ms during execution } ] 2025-03-31T16:23:27.116735Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MTllMGM2NGYtYjFjNThlMmEtOTgzMWRkNTMtZDFhY2JiNzY=, ActorId: [4:7488009790503551983:2491], ActorState: ExecuteState, TraceId: 01jqpg7nr37ddwhsgnefhdj5g9, Create QueryResponse for error on request, msg: 2025-03-31T16:23:27.905143Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MTllMGM2NGYtYjFjNThlMmEtOTgzMWRkNTMtZDFhY2JiNzY=, ActorId: [4:7488009790503551983:2491], ActorState: ExecuteState, TraceId: 01jqpg7pgq4cs66bezz5c5xz0v, Create QueryResponse for error on request, msg: 2025-03-31T16:23:28.314799Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MTllMGM2NGYtYjFjNThlMmEtOTgzMWRkNTMtZDFhY2JiNzY=, ActorId: [4:7488009790503551983:2491], ActorState: ExecuteState, TraceId: 01jqpg7pxg2t9fw9jbfe4st4zv, Create QueryResponse for error on request, msg: 2025-03-31T16:23:30.758636Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MTllMGM2NGYtYjFjNThlMmEtOTgzMWRkNTMtZDFhY2JiNzY=, ActorId: [4:7488009790503551983:2491], ActorState: ExecuteState, TraceId: 01jqpg7s9n0rdbwsbjrba5t98q, Create QueryResponse for error on request, msg: 2025-03-31T16:23:31.592670Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MTllMGM2NGYtYjFjNThlMmEtOTgzMWRkNTMtZDFhY2JiNzY=, ActorId: [4:7488009790503551983:2491], ActorState: ExecuteState, TraceId: 01jqpg7t3n943rsw9nhk21r39h, Create QueryResponse for error on request, msg: 2025-03-31T16:23:35.337919Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MTllMGM2NGYtYjFjNThlMmEtOTgzMWRkNTMtZDFhY2JiNzY=, ActorId: [4:7488009790503551983:2491], ActorState: ExecuteState, TraceId: 01jqpg7xrbb4stts4rz56vexsa, Create QueryResponse for error on request, msg: 2025-03-31T16:23:55.262950Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MTllMGM2NGYtYjFjNThlMmEtOTgzMWRkNTMtZDFhY2JiNzY=, ActorId: [4:7488009790503551983:2491], ActorState: ExecuteState, TraceId: 01jqpg8h5dawhcrsnnpjtfs2fq, Create QueryResponse for error on request, msg: >> TColumnShardTestSchema::RebootOneTier ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::Drop [GOOD] Test command err: 2025-03-31T16:24:01.397772Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:24:01.492834Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:24:01.518084Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:24:01.518418Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:24:01.526883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:24:01.527120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:24:01.527395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:24:01.527542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:24:01.527656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:24:01.527790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:24:01.527905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:24:01.528035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:24:01.528179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:24:01.528310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:24:01.528444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:24:01.528547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:24:01.558146Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:24:01.558349Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:24:01.558399Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:24:01.558583Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:24:01.558738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:24:01.558802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:24:01.558890Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:24:01.558990Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:24:01.559059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:24:01.559130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:24:01.559160Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:24:01.559337Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:24:01.559398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:24:01.559438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:24:01.559471Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:24:01.559564Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:24:01.559616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:24:01.559663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:24:01.559703Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:24:01.559772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:24:01.559827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:24:01.559859Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:24:01.559903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:24:01.559959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:24:01.559988Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:24:01.560350Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=44; 2025-03-31T16:24:01.560420Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=29; 2025-03-31T16:24:01.560507Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=40; 2025-03-31T16:24:01.560588Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=35; 2025-03-31T16:24:01.560757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:24:01.560854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:24:01.560894Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:24:01.561111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:24:01.561169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:24:01.561223Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:24:01.561384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:24:01.561429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:24:01.561457Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:24:01.561617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:24:01.561659Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:24:01.561687Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:24:01.561818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:24:01.561860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:24:01.561906Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... 1T16:24:12.158145Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:122:2872:0]; 2025-03-31T16:24:12.158174Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:123:2872:0]; 2025-03-31T16:24:12.158202Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:124:2872:0]; 2025-03-31T16:24:12.158233Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:125:2872:0]; 2025-03-31T16:24:12.158263Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:126:2872:0]; 2025-03-31T16:24:12.158295Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:127:2872:0]; 2025-03-31T16:24:12.158328Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:128:2872:0]; 2025-03-31T16:24:12.158356Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:129:2872:0]; 2025-03-31T16:24:12.158384Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:130:2872:0]; 2025-03-31T16:24:12.158413Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:131:2864:0]; 2025-03-31T16:24:12.158442Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:132:2872:0]; 2025-03-31T16:24:12.158469Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:133:2872:0]; 2025-03-31T16:24:12.158496Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:134:2864:0]; 2025-03-31T16:24:12.158522Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:135:2864:0]; 2025-03-31T16:24:12.158549Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:136:2864:0]; 2025-03-31T16:24:12.158578Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:137:2856:0]; 2025-03-31T16:24:12.158613Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:138:2864:0]; 2025-03-31T16:24:12.158654Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:139:2864:0]; 2025-03-31T16:24:12.158699Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:140:2856:0]; 2025-03-31T16:24:12.158744Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:141:2856:0]; 2025-03-31T16:24:12.158787Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:142:2800:0]; 2025-03-31T16:24:12.158831Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:143:2752:0]; 2025-03-31T16:24:12.158876Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:144:2792:0]; 2025-03-31T16:24:12.158923Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:145:2792:0]; 2025-03-31T16:24:12.158962Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:146:2792:0]; 2025-03-31T16:24:12.158992Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:147:2784:0]; 2025-03-31T16:24:12.159023Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:148:2784:0]; 2025-03-31T16:24:12.159053Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:149:2784:0]; 2025-03-31T16:24:12.159103Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:150:2784:0]; 2025-03-31T16:24:12.159137Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:151:2784:0]; 2025-03-31T16:24:12.159166Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:152:2776:0]; 2025-03-31T16:24:12.159196Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:153:2768:0]; 2025-03-31T16:24:12.159224Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:154:9448:0]; 2025-03-31T16:24:12.161975Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:84;ff_first=(column_ids=9;column_names=saved_at;);; 2025-03-31T16:24:12.162204Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; 2025-03-31T16:24:12.254040Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-03-31T16:24:12.254569Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[22] (CS::GENERAL) apply at tablet 9437184 2025-03-31T16:24:12.274795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=9437184;external_task_id=9402c75a-e4c11f0-93c5cb13-b523dcb1;fline=with_appended.cpp:24;event=skip_inserted_data;reason=table_removed;path_id=1; 2025-03-31T16:24:12.277002Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:7 Blob count: 464 2025-03-31T16:24:12.279852Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=10308;raw_bytes=8378;count=1;records=100} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=5601076;raw_bytes=7864534;count=3;records=80000} inactive {blob_bytes=5605344;raw_bytes=7864506;count=2;records=80000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-31T16:24:12.290209Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:660:2677];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:24:12.290415Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:660:2677];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:24:12.290542Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:660:2677];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:24:12.290593Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:660:2677] finished for tablet 9437184 2025-03-31T16:24:12.290977Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:660:2677];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:650:2667];stats={"p":[{"events":["f_bootstrap"],"t":0.007},{"events":["l_bootstrap","f_ProduceResults"],"t":0.008},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.136}],"full":{"a":1743438252153852,"name":"_full_task","f":1743438252153852,"d_finished":0,"c":0,"l":1743438252290659,"d":136807},"events":[{"name":"bootstrap","f":1743438252161079,"d_finished":1253,"c":1,"l":1743438252162332,"d":1253},{"a":1743438252290166,"name":"ack","f":1743438252290166,"d_finished":0,"c":0,"l":1743438252290659,"d":493},{"a":1743438252290114,"name":"processing","f":1743438252290114,"d_finished":0,"c":0,"l":1743438252290659,"d":545},{"name":"ProduceResults","f":1743438252162316,"d_finished":309,"c":2,"l":1743438252290575,"d":309},{"a":1743438252290579,"name":"Finish","f":1743438252290579,"d_finished":0,"c":0,"l":1743438252290659,"d":80}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:24:12.291061Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:660:2677];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:650:2667];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:24:12.291469Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:660:2677];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:650:2667];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0.007},{"events":["l_bootstrap","f_ProduceResults"],"t":0.008},{"events":["f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.136},{"events":["l_ack","l_processing","l_Finish"],"t":0.137}],"full":{"a":1743438252153852,"name":"_full_task","f":1743438252153852,"d_finished":0,"c":0,"l":1743438252291126,"d":137274},"events":[{"name":"bootstrap","f":1743438252161079,"d_finished":1253,"c":1,"l":1743438252162332,"d":1253},{"a":1743438252290166,"name":"ack","f":1743438252290166,"d_finished":0,"c":0,"l":1743438252291126,"d":960},{"a":1743438252290114,"name":"processing","f":1743438252290114,"d_finished":0,"c":0,"l":1743438252291126,"d":1012},{"name":"ProduceResults","f":1743438252162316,"d_finished":309,"c":2,"l":1743438252290575,"d":309},{"a":1743438252290579,"name":"Finish","f":1743438252290579,"d_finished":0,"c":0,"l":1743438252291126,"d":547}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:24:12.291553Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:660:2677];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:24:12.152955Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-03-31T16:24:12.291604Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:660:2677];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:24:12.291709Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:660:2677];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64 [GOOD] >> TColumnShardTestSchema::RebootOneColdTier [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::PgUpdateCompoundKey-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 4619, MsgBus: 12582 2025-03-31T16:20:35.996354Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009345242230644:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:35.996507Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012aa/r3tmp/tmp8yCgLi/pdisk_1.dat 2025-03-31T16:20:36.187357Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:20:36.204185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:36.204301Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:36.207703Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4619, node 1 2025-03-31T16:20:36.271581Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:36.271615Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:36.271627Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:36.271783Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12582 TClient is connected to server localhost:12582 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:36.721111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 16 2025-03-31T16:20:38.737958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:20:38.916666Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1000_b (key, value) VALUES ( '0'::int2, ARRAY ['false'::bool, 'false'::bool] ); 2025-03-31T16:20:38.951036Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009358127133299:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:38.951142Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:38.951708Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009358127133311:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:38.956630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-03-31T16:20:38.968382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009358127133313:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-03-31T16:20:39.053345Z node 1 :TX_PROXY ERROR: Actor# [1:7488009362422100663:2405] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } --!syntax_pg INSERT INTO Pg1000_b (key, value) VALUES ( '1'::int2, ARRAY ['true'::bool, 'true'::bool] ); 18 2025-03-31T16:20:39.432236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:39.486023Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1002_b (key, value) VALUES ( '0'::int2, ARRAY ['0'::"char", '0'::"char"] ); --!syntax_pg INSERT INTO Pg1002_b (key, value) VALUES ( '1'::int2, ARRAY ['1'::"char", '1'::"char"] ); --!syntax_pg INSERT INTO Pg1002_b (key, value) VALUES ( '2'::int2, ARRAY ['2'::"char", '2'::"char"] ); 21 2025-03-31T16:20:39.915342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:20:39.958887Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1005_b (key, value) VALUES ( '0'::int2, ARRAY ['0'::int2, '0'::int2] ); --!syntax_pg INSERT INTO Pg1005_b (key, value) VALUES ( '1'::int2, ARRAY ['1'::int2, '1'::int2] ); --!syntax_pg INSERT INTO Pg1005_b (key, value) VALUES ( '2'::int2, ARRAY ['2'::int2, '2'::int2] ); 23 2025-03-31T16:20:40.360185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T16:20:40.411176Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1007_b (key, value) VALUES ( '0'::int2, ARRAY ['0'::int4, '0'::int4] ); --!syntax_pg INSERT INTO Pg1007_b (key, value) VALUES ( '1'::int2, ARRAY ['1'::int4, '1'::int4] ); --!syntax_pg INSERT INTO Pg1007_b (key, value) VALUES ( '2'::int2, ARRAY ['2'::int4, '2'::int4] ); 20 2025-03-31T16:20:40.815153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-31T16:20:40.853348Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1016_b (key, value) VALUES ( '0'::int2, ARRAY ['0'::int8, '0'::int8] ); --!syntax_pg INSERT INTO Pg1016_b (key, value) VALUES ( '1'::int2, ARRAY ['1'::int8, '1'::int8] ); 2025-03-31T16:20:40.996559Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488009345242230644:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:40.996619Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; --!syntax_pg INSERT INTO Pg1016_b (key, value) VALUES ( '2'::int2, ARRAY ['2'::int8, '2'::int8] ); 700 2025-03-31T16:20:41.246255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-03-31T16:20:41.293415Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1021_b (key, value) VALUES ( '0'::int2, ARRAY ['0.5'::float4, '0.5'::float4] ); --!syntax_pg INSERT INTO Pg1021_b (key, value) VALUES ( '1'::int2, ARRAY ['1.5'::float4, '1.5'::float4] ); --!syntax_pg INSERT INTO Pg1021_b (key, value) VALUES ( '2'::int2, ARRAY ['2.5'::float4, '2.5'::float4] ); 701 2025-03-31T16:20:41.697198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710695:0, at schemeshard: 72057594046644480 2025-03-31T16:20:41.734166Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1022_b (key, value) VALUES ( '0'::int2, ARRAY ['0.5'::float8, '0.5'::float8] ); --!syntax_pg INSERT INTO Pg1022_b (key, value) VALUES ( '1'::int2, ARRAY ['1.5'::float8, '1.5'::float8] ); --!syntax_pg INSERT INTO Pg1022_b (key, value) VALUES ( '2'::int2, ARRAY ['2.5'::float8, '2.5'::float8] ); 25 2025-03-31T16:20:42.153611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710701:0, at schemeshard: 72057594046644480 2025-03-31T16:20:42.191910Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1009_b (key, value) VALUES ( '0'::int2, ARRAY ['text 0'::text, 'text 0'::text] ); --!syntax_pg INSERT INTO Pg1009_b (key, value) VALUES ( '1'::int2, ARRAY ['text 1'::text, 'text 1'::text] ); --!syntax_pg INSERT INTO Pg1009_b (key, value) VALUES ( '2'::int2, ARRAY ['text 2'::text, 'text 2'::text] ); 1042 2025-03-31T16:20:42.565009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710707:0, at schemeshard: 72057594046644480 2025-03-31T16:20:42.607806Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1014_b (key, value) VALUES ( '0'::int2, ARRAY ['bpchar 0'::bpchar, 'bpchar 0'::bpchar] ); --!syntax_pg INSERT INTO Pg1014_b (key, value) VALUES ( '1'::int2, ARRAY ['bpchar 1'::bpchar, 'bpchar 1'::bpchar] ); --!syntax_pg INSERT INTO Pg1014_b (key, value) VALUES ( '2'::int2, ARRAY ['bpchar 2'::bpchar, 'bpchar 2'::bpchar] ); 1043 2025-03-31T16:20:42.983444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710713:0, at schemeshard: 72057594046644480 2025-03-31T16:20:43.02 ... PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:23:56.253350Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:56.266011Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:24:00.212275Z node 9 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[9:7488010205485823300:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:24:00.212393Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:24:01.107060Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7488010231255627738:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:24:01.107311Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:24:01.120150Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:24:01.210547Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7488010231255627843:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:24:01.210673Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:24:01.210765Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7488010231255627849:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:24:01.215353Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:24:01.229097Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [9:7488010231255627851:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:24:01.291016Z node 9 :TX_PROXY ERROR: Actor# [9:7488010231255627906:2403] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:24:01.557327Z node 9 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [9:7488010231255627952:2362], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Cannot update primary key column: key1
:1:1: Error: Cannot update primary key column: key2 2025-03-31T16:24:01.557589Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=ODRhM2Q5ZmUtZWI0ZWQ5NWEtNDBiYWE4OTMtNmFhY2RiN2I=, ActorId: [9:7488010231255627945:2358], ActorState: ExecuteState, TraceId: 01jqpg8qqqdgz8pz8p4m7xrqfb, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-31T16:24:01.569168Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 19530, MsgBus: 11340 2025-03-31T16:24:03.570891Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488010238577311841:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:24:03.571007Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012aa/r3tmp/tmpvBoDqF/pdisk_1.dat 2025-03-31T16:24:03.768240Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:24:03.814755Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:24:03.814898Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:24:03.816536Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19530, node 10 2025-03-31T16:24:03.880717Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:24:03.880745Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:24:03.880758Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:24:03.880962Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11340 TClient is connected to server localhost:11340 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:24:04.879553Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:24:04.887271Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:24:08.571159Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7488010238577311841:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:24:08.571239Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:24:09.424069Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488010264347116290:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:24:09.424239Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:24:09.450476Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:24:09.517621Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488010264347116393:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:24:09.517750Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:24:09.517945Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7488010264347116399:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:24:09.523984Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:24:09.543724Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7488010264347116401:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:24:09.646333Z node 10 :TX_PROXY ERROR: Actor# [10:7488010264347116456:2404] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:24:10.183286Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7488010268642083821:2369], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Cannot update primary key column: key1
:1:1: Error: Cannot update primary key column: key2 2025-03-31T16:24:10.183671Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=MmI2YTFhMjMtZTFlNTI2MTgtZWMyNWFlZDgtODRkZjRmOGY=, ActorId: [10:7488010268642083814:2365], ActorState: ExecuteState, TraceId: 01jqpg904aegftf06mzq6fpaqh, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-31T16:24:10.190829Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... >> TColumnShardTestSchema::RebootEnableColdTiersAfterTtl ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64 [GOOD] Test command err: 2025-03-31T16:21:53.219867Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:21:53.318494Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:21:53.343187Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:21:53.343502Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:21:53.351444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:21:53.351619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:21:53.351806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:21:53.351881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:21:53.351942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:21:53.352033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:21:53.352153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:21:53.352241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:21:53.352307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:21:53.352409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:21:53.352496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:21:53.352564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:21:53.382054Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:21:53.382212Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:21:53.382273Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:21:53.382476Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:53.382641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:21:53.382715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:21:53.382762Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:21:53.382858Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:21:53.382934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:21:53.382988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:21:53.383019Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:21:53.383276Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:53.383353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:21:53.383394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:21:53.383426Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:21:53.383546Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:21:53.383618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:21:53.383673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:21:53.383712Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:21:53.383809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:21:53.383850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:21:53.383886Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:21:53.383953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:21:53.384012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:21:53.384049Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:21:53.384507Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=60; 2025-03-31T16:21:53.384608Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=44; 2025-03-31T16:21:53.384694Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=39; 2025-03-31T16:21:53.384775Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=34; 2025-03-31T16:21:53.384938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:21:53.385000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:21:53.385052Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:21:53.385264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:21:53.385314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:21:53.385348Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:21:53.385497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:21:53.385535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:21:53.385566Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:21:53.385785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:21:53.385834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:21:53.385865Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:21:53.386030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:21:53.386082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:21:53.386136Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:8464];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:9040];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:9024];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:9456];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:9448];;;;switched=(portion_id:44;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););(portion_id:48;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2586528;index_size:28;meta:((produced=SPLIT_COMPACTED;)););(portion_id:49;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););; 2025-03-31T16:24:13.061695Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:5927:7919];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=3; 2025-03-31T16:24:13.063322Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5927:7919];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> TColumnShardTestSchema::RebootEnableColdTiersAfterNoEviction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootOneColdTier [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143438795.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=143438795.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123438795.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=123438795.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123437595.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=123437595.000000s;Name=;Codec=}; 2025-03-31T16:23:17.410886Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:23:17.511252Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:23:17.543745Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:23:17.544148Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:23:17.554331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:23:17.554637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:23:17.554967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:23:17.555203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:23:17.555394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:23:17.555596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:23:17.555762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:23:17.555939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:23:17.556114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:23:17.556322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:23:17.556494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:23:17.556668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:23:17.597197Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:23:17.597415Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:23:17.597522Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:23:17.597760Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:17.598024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:23:17.598142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:23:17.598208Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:23:17.598344Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:23:17.598452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:23:17.598527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:23:17.598571Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:23:17.598791Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:17.598892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:23:17.598967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:23:17.599012Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:23:17.599155Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:23:17.599238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:23:17.599311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:23:17.599366Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:23:17.599478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:23:17.599534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:23:17.599577Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:23:17.599657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:23:17.599718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:23:17.599776Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:23:17.600271Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=59; 2025-03-31T16:23:17.600388Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=44; 2025-03-31T16:23:17.600508Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=51; 2025-03-31T16:23:17.600669Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=93; 2025-03-31T16:23:17.600886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:23:17.600994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:23:17.601047Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:23:17.601333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:23:17.601410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:23:17.601462Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:23:17.601674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:23:17.601756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:23:17.601804Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:23:17.602090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... a.cpp:29;PRECHARGE:finishLoadingTime=9; 2025-03-31T16:24:13.611003Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=178; 2025-03-31T16:24:13.611034Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=20598; 2025-03-31T16:24:13.615216Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=4119; 2025-03-31T16:24:13.620234Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=4201; 2025-03-31T16:24:13.620327Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=5032; 2025-03-31T16:24:13.620464Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=74; 2025-03-31T16:24:13.620560Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=49; 2025-03-31T16:24:13.620681Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=68; 2025-03-31T16:24:13.620780Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=55; 2025-03-31T16:24:13.625601Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=4761; 2025-03-31T16:24:13.632256Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=6560; 2025-03-31T16:24:13.632359Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=29; 2025-03-31T16:24:13.632419Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=19; 2025-03-31T16:24:13.632458Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=8; 2025-03-31T16:24:13.632501Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=10; 2025-03-31T16:24:13.632563Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2025-03-31T16:24:13.632643Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=40; 2025-03-31T16:24:13.632689Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2025-03-31T16:24:13.632781Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=47; 2025-03-31T16:24:13.632826Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2025-03-31T16:24:13.632878Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=24; 2025-03-31T16:24:13.632955Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=45; 2025-03-31T16:24:13.633169Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=182; 2025-03-31T16:24:13.633202Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=48927; 2025-03-31T16:24:13.633338Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=20801572;raw_bytes=32169208;count=11;records=320000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-31T16:24:13.633451Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-31T16:24:13.633506Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-31T16:24:13.633557Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-31T16:24:13.641182Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-03-31T16:24:13.641319Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:24:13.641366Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:24:13.641437Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=3; 2025-03-31T16:24:13.641489Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700006;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-31T16:24:13.641524Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:24:13.641572Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:24:13.641609Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:24:13.641683Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:24:13.642217Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:24:13.642280Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:1991:3891];tablet_id=9437184;parent=[1:1953:3860];fline=manager.cpp:82;event=ask_data;request=request_id=95;1={portions_count=11};; 2025-03-31T16:24:13.642773Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-31T16:24:13.643104Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-31T16:24:13.643130Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-31T16:24:13.643148Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-31T16:24:13.643179Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:24:13.643221Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:24:13.643266Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=3; 2025-03-31T16:24:13.643311Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700006;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-31T16:24:13.643342Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:24:13.643379Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:24:13.643407Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:24:13.643472Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:24:13.644499Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=11;path_id=1; 2025-03-31T16:24:13.645514Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 80000/5203504 0/0 |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> DataShardVolatile::DistributedWriteAsymmetricExecute [GOOD] >> DataShardVolatile::DistributedWriteThenDropTable >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8 [GOOD] >> TColumnShardTestSchema::HotTiersTtl |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TCdcStreamTests::MeteringServerless [GOOD] >> TCdcStreamTests::MeteringDedicated >> PgCatalog::PgDatabase-useSink [GOOD] >> PgCatalog::PgRoles |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8 [GOOD] Test command err: 2025-03-31T16:22:00.351043Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:22:00.450270Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:22:00.474791Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:22:00.475292Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:22:00.483674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:22:00.483898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:22:00.484137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:22:00.484255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:22:00.484351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:22:00.484508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:22:00.484632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:22:00.484772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:22:00.484881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:22:00.484988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:22:00.485104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:22:00.485230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:22:00.514734Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:22:00.514905Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:22:00.514960Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:22:00.515175Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:22:00.515318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:22:00.515386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:22:00.515427Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:22:00.515528Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:22:00.515606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:22:00.515658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:22:00.515692Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:22:00.515904Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:22:00.515984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:22:00.516027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:22:00.516053Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:22:00.516158Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:22:00.516242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:22:00.516286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:22:00.516330Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:22:00.516423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:22:00.516460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:22:00.516495Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:22:00.516554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:22:00.516589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:22:00.516616Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:22:00.517033Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=53; 2025-03-31T16:22:00.517134Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=47; 2025-03-31T16:22:00.517237Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=51; 2025-03-31T16:22:00.517319Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=35; 2025-03-31T16:22:00.517483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:22:00.517539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:22:00.517589Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:22:00.517785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:22:00.517835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:22:00.517863Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:22:00.518012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:22:00.518057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:22:00.518085Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:22:00.518311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:22:00.518357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:22:00.518385Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:22:00.518498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:22:00.518544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:22:00.518600Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... lumn_id:8;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:33;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:34;blob_range:[NO_BLOB:0:8352];;column_id:8;chunk_idx:35;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:36;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:8336];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:8352];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:8336];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:10208];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:10208];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:9400];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:9392];;;;switched=(portion_id:44;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2507632;index_size:20;meta:((produced=SPLIT_COMPACTED;)););(portion_id:46;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2109896;index_size:20;meta:((produced=INSERTED;)););(portion_id:51;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2109896;index_size:20;meta:((produced=INSERTED;)););; 2025-03-31T16:24:15.731095Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:6068:8060];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=3; 2025-03-31T16:24:15.732722Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:6068:8060];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKString [GOOD] |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ForgetAfterFail >> TColumnShardTestSchema::RebootExportWithLostAnswer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKString [GOOD] Test command err: 2025-03-31T16:21:59.060387Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:21:59.164931Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:21:59.187684Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:21:59.187978Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:21:59.195858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:21:59.196061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:21:59.196312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:21:59.196460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:21:59.196554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:21:59.196676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:21:59.196815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:21:59.196951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:21:59.197057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:21:59.197155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:21:59.197261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:21:59.197388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:21:59.232621Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:21:59.232772Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:21:59.232835Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:21:59.233030Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:59.233159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:21:59.233225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:21:59.233268Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:21:59.233359Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:21:59.233409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:21:59.233438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:21:59.233456Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:21:59.233670Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:59.233736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:21:59.233782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:21:59.233813Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:21:59.233926Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:21:59.233993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:21:59.234038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:21:59.234082Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:21:59.234165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:21:59.234198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:21:59.234230Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:21:59.234286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:21:59.234320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:21:59.234346Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:21:59.234762Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=55; 2025-03-31T16:21:59.234860Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=36; 2025-03-31T16:21:59.234966Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=35; 2025-03-31T16:21:59.235057Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=44; 2025-03-31T16:21:59.235229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:21:59.235293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:21:59.235324Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:21:59.235514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:21:59.235549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:21:59.235582Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:21:59.235699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:21:59.235727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:21:59.235745Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:21:59.235909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:21:59.235948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:21:59.235967Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:21:59.236079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:21:59.236122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:21:59.236167Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... lumn_id:8;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:33;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:34;blob_range:[NO_BLOB:0:8352];;column_id:8;chunk_idx:35;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:36;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:8336];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:8352];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:8336];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:10208];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:10208];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:9400];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:9392];;;;switched=(portion_id:44;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2507632;index_size:20;meta:((produced=SPLIT_COMPACTED;)););(portion_id:46;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2109896;index_size:20;meta:((produced=INSERTED;)););(portion_id:51;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2109896;index_size:20;meta:((produced=INSERTED;)););; 2025-03-31T16:24:18.189577Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:6067:8059];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=3; 2025-03-31T16:24:18.190924Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:6067:8059];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> TColumnShardTestSchema::CreateTable >> TColumnShardTestSchema::ColdTiers >> TColumnShardTestSchema::CreateTable [GOOD] >> TColumnShardTestSchema::ExternalTTL [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::CreateTable [GOOD] Test command err: 2025-03-31T16:24:20.922800Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:24:21.007721Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:24:21.026384Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:24:21.026620Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:24:21.032840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:24:21.032987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:24:21.033180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:24:21.033255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:24:21.033358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:24:21.033421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:24:21.033518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:24:21.033643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:24:21.033772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:24:21.033897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:24:21.034013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:24:21.034131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:24:21.053789Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:24:21.053937Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:24:21.053978Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:24:21.054102Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:24:21.054249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:24:21.054294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:24:21.054339Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:24:21.054409Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:24:21.054457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:24:21.054484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:24:21.054502Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:24:21.054629Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:24:21.054672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:24:21.054701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:24:21.054718Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:24:21.054771Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:24:21.054805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:24:21.054837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:24:21.054860Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:24:21.054921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:24:21.054956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:24:21.054977Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:24:21.055021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:24:21.055054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:24:21.055109Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:24:21.055374Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=33; 2025-03-31T16:24:21.055439Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=30; 2025-03-31T16:24:21.055509Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=24; 2025-03-31T16:24:21.055574Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=30; 2025-03-31T16:24:21.055704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:24:21.055786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:24:21.055809Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:24:21.055926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:24:21.055948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:24:21.055964Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:24:21.056117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:24:21.056152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:24:21.056170Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:24:21.056295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:24:21.056324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:24:21.056346Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:24:21.056422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:24:21.056450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:24:21.056507Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... ame: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2025-03-31T16:24:22.068031Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:138:2170];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=118;this=88923004875264;method=TTxController::StartProposeOnExecute;tx_info=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=019:0;;fline=schema.h:36;event=sync_schema; 2025-03-31T16:24:22.080347Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=019:0;;this=88923004875264;op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=019:0;;int_op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=019:0;;int_this=89197881219264;fline=columnshard__propose_transaction.cpp:103;event=actual tx operator; 2025-03-31T16:24:22.080428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=019:0;;this=88923004875264;op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=019:0;;int_op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=019:0;;int_this=89197881219264;method=TTxController::FinishProposeOnComplete;tx_id=118;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:168:2193]; 2025-03-31T16:24:22.080475Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=019:0;;this=88923004875264;op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=019:0;;int_op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=019:0;;int_this=89197881219264;method=TTxController::FinishProposeOnComplete;tx_id=118;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=118; 2025-03-31T16:24:22.080857Z node 1 :TX_COLUMNSHARD DEBUG: TTxNotifyTxCompletion.Execute at tablet 9437184 2025-03-31T16:24:22.080990Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 1018 at tablet 9437184, mediator 0 2025-03-31T16:24:22.081068Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[34] execute at tablet 9437184 2025-03-31T16:24:22.081409Z node 1 :TX_COLUMNSHARD DEBUG: EnsureTable for pathId: 19 ttl settings: { Version: 1 } at tablet 9437184 2025-03-31T16:24:22.081477Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=118;fline=tables_manager.cpp:245;method=RegisterTable;path_id=19; 2025-03-31T16:24:22.081520Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=118;fline=column_engine.h:144;event=RegisterTable;path_id=19; 2025-03-31T16:24:22.081894Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=118;fline=column_engine_logs.cpp:488;event=OnTieringModified;path_id=19; 2025-03-31T16:24:22.082021Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=118;fline=tx_controller.cpp:211;event=finished_tx;tx_id=118; 2025-03-31T16:24:22.093813Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[34] complete at tablet 9437184 2025-03-31T16:24:22.093932Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; CreateTable: { SeqNo { Generation: 20 } EnsureTables { Tables { PathId: 20 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4609 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2025-03-31T16:24:22.095223Z node 1 :TX_COLUMNSHARD_TX ERROR: tablet_id=9437184;self_id=[1:138:2170];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=119;this=88923004878400;method=TTxController::StartProposeOnExecute;tx_info=119:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=020:0;;fline=tx_controller.cpp:345;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2025-03-31T16:24:22.107400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=020:0;;this=88923004878400;op_tx=119:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=020:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:168:2193]; 2025-03-31T16:24:22.107478Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=020:0;;this=88923004878400;op_tx=119:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=020:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=119; CreateTable: { SeqNo { Generation: 21 } EnsureTables { Tables { PathId: 21 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4610 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2025-03-31T16:24:22.108905Z node 1 :TX_COLUMNSHARD_TX ERROR: tablet_id=9437184;self_id=[1:138:2170];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=120;this=88923004879968;method=TTxController::StartProposeOnExecute;tx_info=120:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=021:0;;fline=tx_controller.cpp:345;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2025-03-31T16:24:22.121073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=021:0;;this=88923004879968;op_tx=120:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=021:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:168:2193]; 2025-03-31T16:24:22.121131Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=021:0;;this=88923004879968;op_tx=120:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=021:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=120; CreateTable: { SeqNo { Generation: 22 } EnsureTables { Tables { PathId: 22 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4612 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2025-03-31T16:24:22.122387Z node 1 :TX_COLUMNSHARD_TX ERROR: tablet_id=9437184;self_id=[1:138:2170];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=121;this=88923004881536;method=TTxController::StartProposeOnExecute;tx_info=121:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=022:0;;fline=tx_controller.cpp:345;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2025-03-31T16:24:22.134534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=022:0;;this=88923004881536;op_tx=121:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=022:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:168:2193]; 2025-03-31T16:24:22.134598Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=022:0;;this=88923004881536;op_tx=121:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=022:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=121; >> PgCatalog::PgRoles [GOOD] >> PgCatalog::PgTables >> DataShardVolatile::DistributedWriteThenDropTable [GOOD] >> DataShardVolatile::DistributedWriteThenCopyTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ExternalTTL [GOOD] Test command err: 2025-03-31T16:23:57.121655Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:23:57.195939Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:23:57.199861Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:23:57.200259Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:23:57.219040Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:23:57.219358Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:23:57.226427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:23:57.226603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:23:57.226796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:23:57.226903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:23:57.226976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:23:57.227110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:23:57.227212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:23:57.227301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:23:57.227378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:23:57.227461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:23:57.227548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:23:57.227636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:23:57.245773Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:23:57.248838Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:23:57.248971Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:23:57.249038Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:23:57.249181Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:57.249380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:23:57.249461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:23:57.249506Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:23:57.249614Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:23:57.249667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:23:57.249701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:23:57.249724Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:23:57.249888Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:57.249942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:23:57.249978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:23:57.249999Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:23:57.250086Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:23:57.250128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:23:57.250164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:23:57.250184Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:23:57.250240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:23:57.250267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:23:57.250285Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:23:57.250319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:23:57.250349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:23:57.250368Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:23:57.250670Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=35; 2025-03-31T16:23:57.250760Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=35; 2025-03-31T16:23:57.250833Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=27; 2025-03-31T16:23:57.250931Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=49; 2025-03-31T16:23:57.251074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:23:57.251171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:23:57.251206Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:23:57.251391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:23:57.251424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:23:57.251445Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:23:57.251560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:23:57.251599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:23:57.251619Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:23:57.251755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:23:57.251786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:23:57.251805Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T1 ... ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:24:21.687221Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:1;records_count:71;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:24:21.687240Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-31T16:24:21.687261Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-31T16:24:21.687321Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:24:21.687375Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:1;records_count:71;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:24:21.687394Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-31T16:24:21.687441Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:232;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;);columns=1;rows=71; 2025-03-31T16:24:21.687465Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:252;stage=data_format;batch_size=568;num_rows=71;batch_columns=saved_at; 2025-03-31T16:24:21.687574Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:684:2700];bytes=568;rows=71;faults=0;finished=0;fault=0;schema=saved_at: timestamp[us]; 2025-03-31T16:24:21.687645Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:24:21.687710Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:24:21.687804Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:24:21.687876Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:24:21.687924Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:24:21.687970Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:24:21.688007Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:691:2707] finished for tablet 9437184 2025-03-31T16:24:21.688399Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:684:2700];stats={"p":[{"events":["f_bootstrap"],"t":0.041},{"events":["f_ProduceResults"],"t":0.354},{"events":["l_bootstrap"],"t":0.526},{"events":["f_processing","f_task_result"],"t":0.54},{"events":["l_task_result"],"t":6.529},{"events":["f_ack"],"t":6.568},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":7.31}],"full":{"a":1743438254377432,"name":"_full_task","f":1743438254377432,"d_finished":0,"c":0,"l":1743438261688058,"d":7310626},"events":[{"name":"bootstrap","f":1743438254418625,"d_finished":484952,"c":1,"l":1743438254903577,"d":484952},{"a":1743438261687867,"name":"ack","f":1743438260946089,"d_finished":685500,"c":903,"l":1743438261687827,"d":685691},{"a":1743438261687860,"name":"processing","f":1743438254918233,"d_finished":3279932,"c":4515,"l":1743438261687828,"d":3280130},{"name":"ProduceResults","f":1743438254732373,"d_finished":1342261,"c":5420,"l":1743438261687987,"d":1342261},{"a":1743438261687988,"name":"Finish","f":1743438261687988,"d_finished":0,"c":0,"l":1743438261688058,"d":70},{"name":"task_result","f":1743438254918254,"d_finished":2512083,"c":3612,"l":1743438260906543,"d":2512083}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:24:21.688461Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:684:2700];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:24:21.688803Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:684:2700];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0.041},{"events":["f_ProduceResults"],"t":0.354},{"events":["l_bootstrap"],"t":0.526},{"events":["f_processing","f_task_result"],"t":0.54},{"events":["l_task_result"],"t":6.529},{"events":["f_ack"],"t":6.568},{"events":["l_ProduceResults","f_Finish"],"t":7.31},{"events":["l_ack","l_processing","l_Finish"],"t":7.311}],"full":{"a":1743438254377432,"name":"_full_task","f":1743438254377432,"d_finished":0,"c":0,"l":1743438261688501,"d":7311069},"events":[{"name":"bootstrap","f":1743438254418625,"d_finished":484952,"c":1,"l":1743438254903577,"d":484952},{"a":1743438261687867,"name":"ack","f":1743438260946089,"d_finished":685500,"c":903,"l":1743438261687827,"d":686134},{"a":1743438261687860,"name":"processing","f":1743438254918233,"d_finished":3279932,"c":4515,"l":1743438261687828,"d":3280573},{"name":"ProduceResults","f":1743438254732373,"d_finished":1342261,"c":5420,"l":1743438261687987,"d":1342261},{"a":1743438261687988,"name":"Finish","f":1743438261687988,"d_finished":0,"c":0,"l":1743438261688501,"d":513},{"name":"task_result","f":1743438254918254,"d_finished":2512083,"c":3612,"l":1743438260906543,"d":2512083}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:24:21.688859Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:24:14.337939Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=903;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=7037528;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7037528;selected_rows=0; 2025-03-31T16:24:21.688902Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:24:21.689090Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> TPQTest::TestSourceIdDropBySourceIdCount [GOOD] >> TPQTest::TestStorageRetention >> TColumnShardTestSchema::ExportAfterFail >> TColumnShardTestSchema::ExportWithLostAnswer >> TColumnShardTestSchema::ForgetWithLostAnswer [GOOD] >> TPQTest::TestStorageRetention [GOOD] >> TPQTest::TestStatusWithMultipleConsumers ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ForgetWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143438817.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=143438817.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123438817.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=123438817.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123437617.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=123437617.000000s;Name=;Codec=}; 2025-03-31T16:23:39.358793Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:23:39.458821Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:23:39.483135Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:23:39.483411Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:23:39.491329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:23:39.491533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:23:39.491784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:23:39.491904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:23:39.492024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:23:39.492150Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:23:39.492252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:23:39.492358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:23:39.492477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:23:39.492604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:23:39.492710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:23:39.492815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:23:39.520908Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:23:39.521079Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:23:39.521144Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:23:39.521305Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:39.521453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:23:39.521522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:23:39.521565Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:23:39.521655Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:23:39.521715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:23:39.521754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:23:39.521784Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:23:39.521950Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:39.522017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:23:39.522070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:23:39.522100Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:23:39.522184Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:23:39.522241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:23:39.522281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:23:39.522310Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:23:39.522380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:23:39.522418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:23:39.522448Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:23:39.522496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:23:39.522531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:23:39.522565Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:23:39.522967Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=63; 2025-03-31T16:23:39.523059Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=45; 2025-03-31T16:23:39.523157Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=36; 2025-03-31T16:23:39.523238Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=42; 2025-03-31T16:23:39.523394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:23:39.523466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:23:39.523504Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:23:39.523713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:23:39.523759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:23:39.523788Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:23:39.523935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:23:39.523984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:23:39.524014Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:23:39.524178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... :"Finish","f":1743438265863014,"d_finished":0,"c":0,"l":1743438265863452,"d":438},{"name":"task_result","f":1743438265330295,"d_finished":196065,"c":28,"l":1743438265860420,"d":196065}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1284:3291]->[1:1283:3290] 2025-03-31T16:24:25.863837Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1284:3291];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:24:25.321854Z;index_granules=0;index_portions=4;index_batches=1731;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=5203504;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=5203504;selected_rows=0; 2025-03-31T16:24:25.863868Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1284:3291];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:24:25.864056Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1284:3291];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-03-31T16:24:25.865547Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 7 at tablet 9437184 2025-03-31T16:24:25.865767Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000006:max} readable: {1000000006:max} at tablet 9437184 2025-03-31T16:24:25.865891Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-03-31T16:24:25.866027Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-31T16:24:25.866074Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-31T16:24:25.866430Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-03-31T16:24:25.866499Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-03-31T16:24:25.866873Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:1300:3307];trace_detailed=; 2025-03-31T16:24:25.867242Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-03-31T16:24:25.867425Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-03-31T16:24:25.867547Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:25.867687Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:25.867952Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:24:25.868046Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:25.868177Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:25.868224Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1300:3307] finished for tablet 9437184 2025-03-31T16:24:25.868553Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:1299:3306];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1743438265866823,"name":"_full_task","f":1743438265866823,"d_finished":0,"c":0,"l":1743438265868273,"d":1450},"events":[{"name":"bootstrap","f":1743438265866992,"d_finished":724,"c":1,"l":1743438265867716,"d":724},{"a":1743438265867932,"name":"ack","f":1743438265867932,"d_finished":0,"c":0,"l":1743438265868273,"d":341},{"a":1743438265867918,"name":"processing","f":1743438265867918,"d_finished":0,"c":0,"l":1743438265868273,"d":355},{"name":"ProduceResults","f":1743438265867492,"d_finished":455,"c":2,"l":1743438265868206,"d":455},{"a":1743438265868209,"name":"Finish","f":1743438265868209,"d_finished":0,"c":0,"l":1743438265868273,"d":64}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:25.868611Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:1299:3306];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:24:25.868914Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:1299:3306];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1743438265866823,"name":"_full_task","f":1743438265866823,"d_finished":0,"c":0,"l":1743438265868647,"d":1824},"events":[{"name":"bootstrap","f":1743438265866992,"d_finished":724,"c":1,"l":1743438265867716,"d":724},{"a":1743438265867932,"name":"ack","f":1743438265867932,"d_finished":0,"c":0,"l":1743438265868647,"d":715},{"a":1743438265867918,"name":"processing","f":1743438265867918,"d_finished":0,"c":0,"l":1743438265868647,"d":729},{"name":"ProduceResults","f":1743438265867492,"d_finished":455,"c":2,"l":1743438265868206,"d":455},{"a":1743438265868209,"name":"Finish","f":1743438265868209,"d_finished":0,"c":0,"l":1743438265868647,"d":438}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1300:3307]->[1:1299:3306] 2025-03-31T16:24:25.868982Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:24:25.866477Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-03-31T16:24:25.869032Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:24:25.869121Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 80000/5203504 0/0 >> TColumnShardTestSchema::OneColdTier [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64 [GOOD] >> TPQTest::TestStatusWithMultipleConsumers [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::OneColdTier [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143438819.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=143438819.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123438819.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=123438819.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123437619.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=123437619.000000s;Name=;Codec=}; 2025-03-31T16:23:41.487809Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:23:41.599237Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:23:41.623705Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:23:41.624013Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:23:41.632164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:23:41.632392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:23:41.632655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:23:41.632788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:23:41.632920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:23:41.633092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:23:41.633208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:23:41.633320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:23:41.633430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:23:41.633590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:23:41.633715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:23:41.633823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:23:41.663722Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:23:41.663868Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:23:41.663946Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:23:41.664142Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:41.664334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:23:41.664406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:23:41.664447Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:23:41.664546Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:23:41.664611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:23:41.664660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:23:41.664693Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:23:41.664878Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:41.664955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:23:41.665008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:23:41.665039Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:23:41.665159Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:23:41.665219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:23:41.665265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:23:41.665295Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:23:41.665364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:23:41.665403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:23:41.665437Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:23:41.665484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:23:41.665522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:23:41.665554Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:23:41.666007Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=80; 2025-03-31T16:23:41.666097Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=34; 2025-03-31T16:23:41.666172Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=34; 2025-03-31T16:23:41.666258Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=51; 2025-03-31T16:23:41.666431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:23:41.666489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:23:41.666523Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:23:41.666731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:23:41.666788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:23:41.666827Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:23:41.666992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:23:41.667046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:23:41.667098Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:23:41.667280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... l":1743438266657850,"d":921},{"name":"task_result","f":1743438266086831,"d_finished":219404,"c":28,"l":1743438266652179,"d":219404}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1284:3291]->[1:1283:3290] 2025-03-31T16:24:26.658499Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1284:3291];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:24:26.075968Z;index_granules=0;index_portions=4;index_batches=1731;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=5203504;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=5203504;selected_rows=0; 2025-03-31T16:24:26.658543Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1284:3291];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:24:26.658814Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1284:3291];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-03-31T16:24:26.660960Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 7 at tablet 9437184 2025-03-31T16:24:26.661403Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000006:max} readable: {1000000006:max} at tablet 9437184 2025-03-31T16:24:26.661551Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-03-31T16:24:26.661757Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-31T16:24:26.661847Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-31T16:24:26.662319Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-03-31T16:24:26.662403Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-03-31T16:24:26.662929Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:1300:3307];trace_detailed=; 2025-03-31T16:24:26.663517Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-03-31T16:24:26.663787Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-03-31T16:24:26.664008Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:26.664182Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:26.664558Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:24:26.664695Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:26.664859Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:26.664931Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1300:3307] finished for tablet 9437184 2025-03-31T16:24:26.665572Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:1299:3306];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":1743438266662850,"name":"_full_task","f":1743438266662850,"d_finished":0,"c":0,"l":1743438266665018,"d":2168},"events":[{"name":"bootstrap","f":1743438266663149,"d_finished":1074,"c":1,"l":1743438266664223,"d":1074},{"a":1743438266664527,"name":"ack","f":1743438266664527,"d_finished":0,"c":0,"l":1743438266665018,"d":491},{"a":1743438266664501,"name":"processing","f":1743438266664501,"d_finished":0,"c":0,"l":1743438266665018,"d":517},{"name":"ProduceResults","f":1743438266663906,"d_finished":623,"c":2,"l":1743438266664906,"d":623},{"a":1743438266664912,"name":"Finish","f":1743438266664912,"d_finished":0,"c":0,"l":1743438266665018,"d":106}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:26.665682Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:1299:3306];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:24:26.666292Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:1299:3306];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":1743438266662850,"name":"_full_task","f":1743438266662850,"d_finished":0,"c":0,"l":1743438266665753,"d":2903},"events":[{"name":"bootstrap","f":1743438266663149,"d_finished":1074,"c":1,"l":1743438266664223,"d":1074},{"a":1743438266664527,"name":"ack","f":1743438266664527,"d_finished":0,"c":0,"l":1743438266665753,"d":1226},{"a":1743438266664501,"name":"processing","f":1743438266664501,"d_finished":0,"c":0,"l":1743438266665753,"d":1252},{"name":"ProduceResults","f":1743438266663906,"d_finished":623,"c":2,"l":1743438266664906,"d":623},{"a":1743438266664912,"name":"Finish","f":1743438266664912,"d_finished":0,"c":0,"l":1743438266665753,"d":841}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1300:3307]->[1:1299:3306] 2025-03-31T16:24:26.666436Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:24:26.662377Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-03-31T16:24:26.666511Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:24:26.666653Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 80000/5203504 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64 [GOOD] Test command err: 2025-03-31T16:22:07.230435Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:22:07.335862Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:22:07.361334Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:22:07.361687Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:22:07.370917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:22:07.371194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:22:07.371452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:22:07.371571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:22:07.371682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:22:07.371834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:22:07.371968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:22:07.372087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:22:07.372208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:22:07.372326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:22:07.372474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:22:07.372590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:22:07.402342Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:22:07.402506Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:22:07.402561Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:22:07.402768Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:22:07.402923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:22:07.402991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:22:07.403036Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:22:07.403162Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:22:07.403231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:22:07.403274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:22:07.403303Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:22:07.403564Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:22:07.403644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:22:07.403685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:22:07.403716Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:22:07.403839Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:22:07.403906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:22:07.403952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:22:07.403984Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:22:07.404057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:22:07.404093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:22:07.404129Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:22:07.404221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:22:07.404264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:22:07.404299Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:22:07.404752Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=53; 2025-03-31T16:22:07.404855Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=45; 2025-03-31T16:22:07.404947Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=44; 2025-03-31T16:22:07.405051Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=54; 2025-03-31T16:22:07.405223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:22:07.405296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:22:07.405361Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:22:07.405577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:22:07.405627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:22:07.405670Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:22:07.405845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:22:07.405891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:22:07.405925Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:22:07.406131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:22:07.406179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:22:07.406217Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:22:07.406359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:22:07.406437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:22:07.406492Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:8464];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:9040];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:9024];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:9456];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:9448];;;;switched=(portion_id:44;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););(portion_id:48;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2586528;index_size:28;meta:((produced=SPLIT_COMPACTED;)););(portion_id:49;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););; 2025-03-31T16:24:26.631057Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:5927:7919];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=3; 2025-03-31T16:24:26.632517Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5927:7919];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestStatusWithMultipleConsumers [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] 2025-03-31T16:21:02.380217Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:02.380307Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927938 is [1:153:2174] sender: [1:154:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:179:2057] recipient: [1:14:2061] 2025-03-31T16:21:02.408407Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:02.428786Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-03-31T16:21:02.432325Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2025-03-31T16:21:02.434928Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:185:2198] 2025-03-31T16:21:02.440982Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:186:2199] 2025-03-31T16:21:02.442194Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:186:2199] 2025-03-31T16:21:02.448879Z node 1 :PERSQUEUE INFO: new Cookie owner1|2da19f60-b57a0823-a0cc0f0e-62c90adb_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 2025-03-31T16:21:02.449546Z node 1 :PERSQUEUE INFO: new Cookie owner2|3b588f09-75e0c598-6b22ab9-111e10f8_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner2 2025-03-31T16:21:02.466858Z node 1 :PERSQUEUE INFO: new Cookie owner1|8ab472ce-95e9d59d-a188be80-a049f44d_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:108:2057] recipient: [2:101:2135] 2025-03-31T16:21:02.827582Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:02.827642Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927938 is [2:153:2174] sender: [2:154:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:179:2057] recipient: [2:14:2061] 2025-03-31T16:21:02.841472Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:02.842575Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-03-31T16:21:02.843221Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:185:2198] 2025-03-31T16:21:02.845739Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:185:2198] 2025-03-31T16:21:02.847405Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:186:2199] 2025-03-31T16:21:02.849320Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:186:2199] 2025-03-31T16:21:02.855127Z node 2 :PERSQUEUE INFO: new Cookie owner1|7174438d-355321da-3232b9cd-1fe51f7_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 2025-03-31T16:21:02.855464Z node 2 :PERSQUEUE INFO: new Cookie owner2|89d5c95b-250b86bf-ef127dc8-8d2658f3_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner2 2025-03-31T16:21:02.869153Z node 2 :PERSQUEUE INFO: new Cookie owner1|4a31e051-85e6a6e9-bfbffe3-7e387170_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:108:2057] recipient: [3:101:2135] 2025-03-31T16:21:03.222660Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:03.222731Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927938 is [3:153:2174] sender: [3:154:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:179:2057] recipient: [3:14:2061] 2025-03-31T16:21:03.240177Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:03.240961Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 3 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } 2025-03-31T16:21:03.241582Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:185:2198] 2025-03-31T16:21:03.243839Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [3:185:2198] 2025-03-31T16:21:03.245276Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:186:2199] 2025-03-31T16:21:03.247038Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [3:186:2199] 2025-03-31T16:21:03.253692Z node 3 :PERSQUEUE INFO: new Cookie owner1|2790ca-acc232c2-2f95bc02-5ac9cd36_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 2025-03-31T16:21:03.254132Z node 3 :PERSQUEUE INFO: new Cookie owner2|ba51f2de-c352a88-bb42008f-7d789b26_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner2 2025-03-31T16:21:03.271817Z node 3 :PERSQUEUE INFO: new Cookie owner1|d817b56e-5626e23c-46e72d87-795494a8_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:103:2057] recipient: [4:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:103:2057] recipient: [4:101:2135] Leader for TabletID 72057594037927937 is [4:107:2139] sender: [4:108:2057] recipient: [4:101:2135] 2025-03-31T16:21:03.668666Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:03.668752Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:149:2057] recipient: [4:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:149:2057] recipient: [4:147:2170] Leader for TabletID 72057594037927938 is [4:153:2174] sender: [4:154:2057] recipient: [4:147:2170] Leader for TabletID 72057594037927937 is [4:107:2139] sender: [4:179:2057] recipient: [4:14:2061] 2025-03-31T16:21:03.685016Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:21:03.685961Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 4 actor [4:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 4 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 4 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 4 Important: false } 2025-03-31T16:21:03.686669Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:185:2198] 2025-03-31T16:21:03.688755Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [4:185:2198] 2025-03-31T16:21:03.690354Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [4:186:2199] 2025-03-31T16:21:03.692105Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [4:186:2199] 2025-03-31T16:21:03.698663Z node 4 :PERSQUEUE INFO: new Cookie default|cbf70d10-df276e9c-30eba425-f7fad12f_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner defa ... R INFO: [72057594037927938][rt3.dc1--topic] pipe [134:392:2386] connected; active server actors: 1 2025-03-31T16:24:27.127447Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:397:2391] connected; active server actors: 1 2025-03-31T16:24:27.129001Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:402:2396] connected; active server actors: 1 2025-03-31T16:24:27.130767Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:407:2401] connected; active server actors: 1 2025-03-31T16:24:27.132657Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:412:2406] connected; active server actors: 1 2025-03-31T16:24:27.134904Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:417:2411] connected; active server actors: 1 2025-03-31T16:24:27.137246Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:422:2416] connected; active server actors: 1 2025-03-31T16:24:27.139397Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:427:2421] connected; active server actors: 1 2025-03-31T16:24:27.141411Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:432:2426] connected; active server actors: 1 2025-03-31T16:24:27.143291Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:437:2431] connected; active server actors: 1 2025-03-31T16:24:27.145188Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:442:2436] connected; active server actors: 1 2025-03-31T16:24:27.147502Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:447:2441] connected; active server actors: 1 2025-03-31T16:24:27.150080Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:452:2446] connected; active server actors: 1 2025-03-31T16:24:27.152783Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:457:2451] connected; active server actors: 1 2025-03-31T16:24:27.155557Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:462:2456] connected; active server actors: 1 2025-03-31T16:24:27.158509Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:467:2461] connected; active server actors: 1 2025-03-31T16:24:27.161226Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:472:2466] connected; active server actors: 1 2025-03-31T16:24:27.163834Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:477:2471] connected; active server actors: 1 2025-03-31T16:24:27.166742Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:482:2476] connected; active server actors: 1 2025-03-31T16:24:27.169665Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:487:2481] connected; active server actors: 1 2025-03-31T16:24:27.172526Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:492:2486] connected; active server actors: 1 2025-03-31T16:24:27.175454Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:497:2491] connected; active server actors: 1 2025-03-31T16:24:27.178282Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:502:2496] connected; active server actors: 1 2025-03-31T16:24:27.181006Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:507:2501] connected; active server actors: 1 2025-03-31T16:24:27.183646Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:512:2506] connected; active server actors: 1 2025-03-31T16:24:27.186089Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:517:2511] connected; active server actors: 1 2025-03-31T16:24:27.188575Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:522:2516] connected; active server actors: 1 2025-03-31T16:24:27.190961Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:527:2521] connected; active server actors: 1 2025-03-31T16:24:27.193427Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:532:2526] connected; active server actors: 1 2025-03-31T16:24:27.195790Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:537:2531] connected; active server actors: 1 2025-03-31T16:24:27.198140Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:542:2536] connected; active server actors: 1 2025-03-31T16:24:27.200614Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:547:2541] connected; active server actors: 1 2025-03-31T16:24:27.202961Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:552:2546] connected; active server actors: 1 2025-03-31T16:24:27.205416Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:557:2551] connected; active server actors: 1 2025-03-31T16:24:27.207768Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:562:2556] connected; active server actors: 1 2025-03-31T16:24:27.210061Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:567:2561] connected; active server actors: 1 2025-03-31T16:24:27.212324Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:572:2566] connected; active server actors: 1 2025-03-31T16:24:27.214555Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:577:2571] connected; active server actors: 1 2025-03-31T16:24:27.216832Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:582:2576] connected; active server actors: 1 2025-03-31T16:24:27.219045Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:587:2581] connected; active server actors: 1 2025-03-31T16:24:27.221318Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:592:2586] connected; active server actors: 1 2025-03-31T16:24:27.223605Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:597:2591] connected; active server actors: 1 2025-03-31T16:24:27.225946Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:602:2596] connected; active server actors: 1 2025-03-31T16:24:27.228238Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:607:2601] connected; active server actors: 1 2025-03-31T16:24:27.230582Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:612:2606] connected; active server actors: 1 2025-03-31T16:24:27.232904Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:617:2611] connected; active server actors: 1 2025-03-31T16:24:27.235357Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:622:2616] connected; active server actors: 1 2025-03-31T16:24:27.237696Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:627:2621] connected; active server actors: 1 2025-03-31T16:24:27.240071Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:632:2626] connected; active server actors: 1 2025-03-31T16:24:27.242680Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:637:2631] connected; active server actors: 1 2025-03-31T16:24:27.245103Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:642:2636] connected; active server actors: 1 2025-03-31T16:24:27.247659Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:647:2641] connected; active server actors: 1 2025-03-31T16:24:27.250196Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:652:2646] connected; active server actors: 1 2025-03-31T16:24:27.252515Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:657:2651] connected; active server actors: 1 2025-03-31T16:24:27.254930Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:662:2656] connected; active server actors: 1 2025-03-31T16:24:27.257599Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:667:2661] connected; active server actors: 1 2025-03-31T16:24:27.260165Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:672:2666] connected; active server actors: 1 2025-03-31T16:24:27.262718Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:677:2671] connected; active server actors: 1 2025-03-31T16:24:27.265326Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:682:2676] connected; active server actors: 1 2025-03-31T16:24:27.267618Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:687:2681] connected; active server actors: 1 2025-03-31T16:24:27.269877Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:692:2686] connected; active server actors: 1 2025-03-31T16:24:27.271940Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:697:2691] connected; active server actors: 1 2025-03-31T16:24:27.276208Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:702:2696] connected; active server actors: 1 2025-03-31T16:24:27.278535Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:707:2701] connected; active server actors: 1 2025-03-31T16:24:27.280889Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:712:2706] connected; active server actors: 1 2025-03-31T16:24:27.283138Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:717:2711] connected; active server actors: 1 2025-03-31T16:24:27.284995Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:722:2716] connected; active server actors: 1 2025-03-31T16:24:27.287025Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:727:2721] connected; active server actors: 1 2025-03-31T16:24:27.289082Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:732:2726] connected; active server actors: 1 2025-03-31T16:24:27.291142Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:737:2731] connected; active server actors: 1 2025-03-31T16:24:27.292977Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:742:2736] connected; active server actors: 1 2025-03-31T16:24:27.294666Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:747:2741] connected; active server actors: 1 2025-03-31T16:24:27.296654Z node 134 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [134:752:2746], now have 1 active actors on pipe 2025-03-31T16:24:27.297630Z node 134 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [134:755:2749], now have 1 active actors on pipe 2025-03-31T16:24:27.298542Z node 134 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [134:758:2752], now have 1 active actors on pipe 2025-03-31T16:24:27.299469Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:761:2755] connected; active server actors: 1 >> TColumnShardTestSchema::RebootHotTiers >> TColumnShardTestSchema::RebootForgetAfterFail >> TColumnShardTestSchema::RebootHotTiersRevCompression >> TColumnShardTestSchema::RebootHotTiersTtl |91.3%| [TA] $(B)/ydb/core/persqueue/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestSchema::HotTiersTtlWithStat [GOOD] >> DataShardVolatile::DistributedWriteThenCopyTable [GOOD] >> DataShardVolatile::DistributedWriteThenBulkUpsert |91.3%| [TA] {RESULT} $(B)/ydb/core/persqueue/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersTtlWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143438815.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143438815.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143438815.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143438815.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143438815.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123438815.000000s;Name=;Codec=}; WaitEmptyAfter=1;Tiers={{Column=timestamp;EvictAfter=143438815.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143438815.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123437615.000000s;Name=;Codec=}; 2025-03-31T16:23:36.226100Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:23:36.308160Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:23:36.332509Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:23:36.332784Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:23:36.340553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:23:36.340756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:23:36.340985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:23:36.341116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:23:36.341237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:23:36.341379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:23:36.341478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:23:36.341584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:23:36.341690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:23:36.341825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:23:36.341942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:23:36.342052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:23:36.369492Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:23:36.369642Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:23:36.369696Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:23:36.369850Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:36.369989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:23:36.370050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:23:36.370089Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:23:36.370170Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:23:36.370222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:23:36.370260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:23:36.370287Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:23:36.370441Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:36.370500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:23:36.370540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:23:36.370567Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:23:36.370651Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:23:36.370698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:23:36.370739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:23:36.370764Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:23:36.370823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:23:36.370857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:23:36.370890Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:23:36.370933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:23:36.370968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:23:36.370996Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:23:36.371364Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=41; 2025-03-31T16:23:36.371445Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=31; 2025-03-31T16:23:36.371511Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=32; 2025-03-31T16:23:36.371585Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=36; 2025-03-31T16:23:36.371735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:23:36.371812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:23:36.371851Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:23:36.372046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:23:36.372093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:23:36.372126Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:23:36.372259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchem ... e 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-03-31T16:24:30.174204Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700008;tx_id=18446744073709551615;;current_snapshot_ts=1000000002; 2025-03-31T16:24:30.174249Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:24:30.174298Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:24:30.174342Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:24:30.174440Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:24:30.174645Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000008:max} readable: {1000000008:max} at tablet 9437184 2025-03-31T16:24:30.174761Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-03-31T16:24:30.174903Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000008:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-31T16:24:30.174956Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000008:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-31T16:24:30.175372Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000008:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-03-31T16:24:30.175448Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000008:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-03-31T16:24:30.175866Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000008:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:2013:4022];trace_detailed=; 2025-03-31T16:24:30.176317Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-03-31T16:24:30.176530Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-03-31T16:24:30.176682Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:30.176798Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:30.177077Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2013:4022];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:24:30.177171Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2013:4022];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:30.177280Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2013:4022];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:30.177332Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:2013:4022] finished for tablet 9437184 2025-03-31T16:24:30.177746Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:2013:4022];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:2012:4021];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1743438270175809,"name":"_full_task","f":1743438270175809,"d_finished":0,"c":0,"l":1743438270177396,"d":1587},"events":[{"name":"bootstrap","f":1743438270176060,"d_finished":765,"c":1,"l":1743438270176825,"d":765},{"a":1743438270177054,"name":"ack","f":1743438270177054,"d_finished":0,"c":0,"l":1743438270177396,"d":342},{"a":1743438270177040,"name":"processing","f":1743438270177040,"d_finished":0,"c":0,"l":1743438270177396,"d":356},{"name":"ProduceResults","f":1743438270176611,"d_finished":414,"c":2,"l":1743438270177302,"d":414},{"a":1743438270177305,"name":"Finish","f":1743438270177305,"d_finished":0,"c":0,"l":1743438270177396,"d":91}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:30.177823Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2013:4022];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:2012:4021];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:24:30.178186Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:2013:4022];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:2012:4021];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1743438270175809,"name":"_full_task","f":1743438270175809,"d_finished":0,"c":0,"l":1743438270177865,"d":2056},"events":[{"name":"bootstrap","f":1743438270176060,"d_finished":765,"c":1,"l":1743438270176825,"d":765},{"a":1743438270177054,"name":"ack","f":1743438270177054,"d_finished":0,"c":0,"l":1743438270177865,"d":811},{"a":1743438270177040,"name":"processing","f":1743438270177040,"d_finished":0,"c":0,"l":1743438270177865,"d":825},{"name":"ProduceResults","f":1743438270176611,"d_finished":414,"c":2,"l":1743438270177302,"d":414},{"a":1743438270177305,"name":"Finish","f":1743438270177305,"d_finished":0,"c":0,"l":1743438270177865,"d":560}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:2013:4022]->[1:2012:4021] 2025-03-31T16:24:30.178263Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2013:4022];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:24:30.175423Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-03-31T16:24:30.178301Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2013:4022];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:24:30.178402Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:2013:4022];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 80000/5203352 0/0 >> TColumnShardTestSchema::ColdTiersWithStat [GOOD] >> DataShardTxOrder::RandomPoints_DelayRS [GOOD] >> TColumnShardTestSchema::OneTier ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ColdTiersWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143438814.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143438814.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143438814.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123438814.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143438814.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143438814.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123437614.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123438814.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123438814.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123437614.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123437614.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123437614.000000s;Name=;Codec=}; 2025-03-31T16:23:35.253583Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:23:35.357930Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:23:35.385414Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:23:35.385722Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:23:35.394566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:23:35.394830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:23:35.395109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:23:35.395261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:23:35.395395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:23:35.395511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:23:35.395621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:23:35.395733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:23:35.395871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:23:35.396013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:23:35.396127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:23:35.396231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:23:35.426531Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:23:35.426712Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:23:35.426772Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:23:35.426911Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:35.427067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:23:35.427163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:23:35.427205Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:23:35.427275Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:23:35.427338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:23:35.427373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:23:35.427403Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:23:35.427574Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:35.427651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:23:35.427697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:23:35.427728Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:23:35.427820Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:23:35.427868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:23:35.427902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:23:35.427921Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:23:35.427965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:23:35.427988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:23:35.428005Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:23:35.428036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:23:35.428063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:23:35.428084Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:23:35.428383Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=35; 2025-03-31T16:23:35.428470Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=35; 2025-03-31T16:23:35.428529Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=25; 2025-03-31T16:23:35.428590Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=30; 2025-03-31T16:23:35.428738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:23:35.428787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:23:35.428810Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:23:35.428960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:23:35.428995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:23:35.429014Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:23:35.429137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:23:35.429170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:4 ... Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=4; 2025-03-31T16:24:32.481811Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700007;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-31T16:24:32.481860Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:24:32.481917Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:24:32.481970Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:24:32.482069Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:24:32.482231Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000007:max} readable: {1000000007:max} at tablet 9437184 2025-03-31T16:24:32.482340Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-03-31T16:24:32.482469Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000007:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-31T16:24:32.482515Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000007:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-31T16:24:32.482891Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000007:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-03-31T16:24:32.482976Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000007:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-03-31T16:24:32.483369Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000007:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:1392:3397];trace_detailed=; 2025-03-31T16:24:32.483749Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-03-31T16:24:32.483931Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-03-31T16:24:32.484084Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:32.484192Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:32.484429Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:24:32.484527Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:32.484631Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:32.484662Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1392:3397] finished for tablet 9437184 2025-03-31T16:24:32.485026Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:1391:3396];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1743438272483315,"name":"_full_task","f":1743438272483315,"d_finished":0,"c":0,"l":1743438272484729,"d":1414},"events":[{"name":"bootstrap","f":1743438272483511,"d_finished":706,"c":1,"l":1743438272484217,"d":706},{"a":1743438272484411,"name":"ack","f":1743438272484411,"d_finished":0,"c":0,"l":1743438272484729,"d":318},{"a":1743438272484398,"name":"processing","f":1743438272484398,"d_finished":0,"c":0,"l":1743438272484729,"d":331},{"name":"ProduceResults","f":1743438272484020,"d_finished":397,"c":2,"l":1743438272484651,"d":397},{"a":1743438272484653,"name":"Finish","f":1743438272484653,"d_finished":0,"c":0,"l":1743438272484729,"d":76}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:32.485094Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:1391:3396];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:24:32.485399Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:1391:3396];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1743438272483315,"name":"_full_task","f":1743438272483315,"d_finished":0,"c":0,"l":1743438272485133,"d":1818},"events":[{"name":"bootstrap","f":1743438272483511,"d_finished":706,"c":1,"l":1743438272484217,"d":706},{"a":1743438272484411,"name":"ack","f":1743438272484411,"d_finished":0,"c":0,"l":1743438272485133,"d":722},{"a":1743438272484398,"name":"processing","f":1743438272484398,"d_finished":0,"c":0,"l":1743438272485133,"d":735},{"name":"ProduceResults","f":1743438272484020,"d_finished":397,"c":2,"l":1743438272484651,"d":397},{"a":1743438272484653,"name":"Finish","f":1743438272484653,"d_finished":0,"c":0,"l":1743438272485133,"d":480}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1392:3397]->[1:1391:3396] 2025-03-31T16:24:32.485467Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:24:32.482952Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-03-31T16:24:32.485513Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:24:32.485600Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 160000/10402136 160000/10402136 160000/10402136 80000/5203584 0/0 >> TColumnShardTestSchema::RebootOneTier [GOOD] >> TColumnShardTestSchema::HotTiers [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayRS [GOOD] Test command err: 2025-03-31T16:19:12.086358Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:19:12.087847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:19:12.087896Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:12.096460Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:19:12.096887Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-03-31T16:19:12.097138Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:19:12.110112Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:19:12.157753Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:19:12.158043Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:19:12.162194Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-31T16:19:12.162296Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-31T16:19:12.162344Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-31T16:19:12.164822Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:19:12.165479Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:19:12.165558Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:197:2154] in generation 2 2025-03-31T16:19:12.252050Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:19:12.286301Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-31T16:19:12.287527Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:19:12.287679Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-31T16:19:12.287716Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-31T16:19:12.287752Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-31T16:19:12.287782Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:12.288001Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:12.288721Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:12.289910Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-31T16:19:12.290010Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-31T16:19:12.290064Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:19:12.290100Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:19:12.290150Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-31T16:19:12.290181Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T16:19:12.290213Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T16:19:12.290240Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-31T16:19:12.290278Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:19:12.290386Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:12.290437Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:12.290487Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-31T16:19:12.294845Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-31T16:19:12.294926Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:19:12.295010Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T16:19:12.295265Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-31T16:19:12.295316Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-31T16:19:12.295360Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-31T16:19:12.295410Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:19:12.295475Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-31T16:19:12.295521Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-31T16:19:12.295551Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:19:12.295808Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-31T16:19:12.295861Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-31T16:19:12.295894Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-31T16:19:12.295926Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:19:12.295967Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-31T16:19:12.295994Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-31T16:19:12.296024Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-31T16:19:12.296082Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-31T16:19:12.296112Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-31T16:19:12.311889Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-31T16:19:12.311957Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:19:12.312010Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:19:12.312091Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-31T16:19:12.312150Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-31T16:19:12.314880Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:12.314948Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:12.314994Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-31T16:19:12.315139Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-31T16:19:12.315167Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:19:12.315325Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-31T16:19:12.315385Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:19:12.315418Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-31T16:19:12.315460Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-31T16:19:12.323237Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-31T16:19:12.323309Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:12.323523Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:12.323563Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:12.323610Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:19:12.323651Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:19:12.323682Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T16:19:12.323718Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-31T16:19:12.323750Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-31T16:19:12.323786Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:19:12.323819Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-31T16:19:12.323857Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-31T16:19:12.323893Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-31T16:19:12.324062Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-31T16:19:12.324138Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:19:12.324179Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-31T16:19:12.324206Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-31T16:19:12.324231Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-31T16:19:12.324293Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:19:12.324324Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-31T16:19:12.324353Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-31T16:19:12.324382Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-31T16:19:12.324466Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-31T16:19:12.324502Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-31T16:19:12.324532Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-31T16:19:12.324568Z node 1 :TX_DATA ... _DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:24:32.611979Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:24:32.612017Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:22] at 9437184 on unit CompleteOperation 2025-03-31T16:24:32.612065Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 22] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:24:32.612104Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:24:32.612324Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:24:32.612359Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:23] at 9437184 on unit CompleteOperation 2025-03-31T16:24:32.612402Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 23] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:24:32.612435Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:24:32.612618Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:24:32.612652Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:24] at 9437184 on unit CompleteOperation 2025-03-31T16:24:32.612699Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 24] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:24:32.612730Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:24:32.612895Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:24:32.612930Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:25] at 9437184 on unit CompleteOperation 2025-03-31T16:24:32.612973Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 25] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:24:32.613005Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:24:32.613223Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:24:32.613263Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:26] at 9437184 on unit CompleteOperation 2025-03-31T16:24:32.613309Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 26] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:24:32.613362Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:24:32.613510Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:24:32.613540Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:27] at 9437184 on unit CompleteOperation 2025-03-31T16:24:32.613581Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 27] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:24:32.613614Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:24:32.613779Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:24:32.613815Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:28] at 9437184 on unit CompleteOperation 2025-03-31T16:24:32.613863Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 28] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:24:32.613895Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:24:32.614035Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:24:32.614069Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:29] at 9437184 on unit CompleteOperation 2025-03-31T16:24:32.614117Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 29] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:24:32.614151Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:24:32.614302Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:24:32.614335Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:30] at 9437184 on unit CompleteOperation 2025-03-31T16:24:32.614381Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 30] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:24:32.614413Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:24:32.614586Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:24:32.614618Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:31] at 9437184 on unit CompleteOperation 2025-03-31T16:24:32.614666Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 31] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:24:32.614701Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:24:32.614927Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:24:32.614960Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:32] at 9437184 on unit CompleteOperation 2025-03-31T16:24:32.615008Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 32] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:24:32.615042Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:24:32.615284Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:24:32.615320Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:33] at 9437184 on unit CompleteOperation 2025-03-31T16:24:32.615368Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 33] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:24:32.615403Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:24:32.615609Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:24:32.615640Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:34] at 9437184 on unit CompleteOperation 2025-03-31T16:24:32.615687Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 34] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:24:32.615720Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:24:32.615870Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:24:32.615901Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:35] at 9437184 on unit CompleteOperation 2025-03-31T16:24:32.615946Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 35] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:24:32.711756Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:24:32.712169Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:24:32.712237Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:36] at 9437184 on unit CompleteOperation 2025-03-31T16:24:32.712314Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 36] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:24:32.712361Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:24:32.712570Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:24:32.712606Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:37] at 9437184 on unit CompleteOperation 2025-03-31T16:24:32.712658Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 37] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:24:32.712693Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:24:32.713030Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:231:2224], Recipient [32:341:2309]: {TEvReadSet step# 1000004 txid# 36 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2025-03-31T16:24:32.713081Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:24:32.713126Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 36 2025-03-31T16:24:32.713268Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:231:2224], Recipient [32:341:2309]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-03-31T16:24:32.713303Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:24:32.713351Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 2025-03-31T16:24:32.713477Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:231:2224], Recipient [32:341:2309]: {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-03-31T16:24:32.713513Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:24:32.713545Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 8 2025-03-31T16:24:32.713608Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:231:2224], Recipient [32:341:2309]: {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-03-31T16:24:32.713642Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:24:32.713673Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 9 2025-03-31T16:24:32.713795Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:231:2224], Recipient [32:341:2309]: {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-03-31T16:24:32.713830Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:24:32.713864Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 10 2025-03-31T16:24:32.713934Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:231:2224], Recipient [32:341:2309]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-03-31T16:24:32.713969Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T16:24:32.714005Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 expect 31 31 31 30 31 27 31 28 17 31 16 30 30 27 26 30 20 17 20 28 - 28 16 26 27 17 - 20 - - - - actual 31 31 31 30 31 27 31 28 17 31 16 30 30 27 26 30 20 17 20 28 - 28 16 26 27 17 - 20 - - - - interm 5 6 - 2 3 5 4 4 4 3 2 - 5 6 5 - 5 4 6 - - - 6 - - - - - - - - - ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootOneTier [GOOD] Test command err: 2025-03-31T16:24:13.619974Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:24:13.700433Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:24:13.704271Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:24:13.704642Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:24:13.721097Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:24:13.721380Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:24:13.728504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:24:13.728679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:24:13.728864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:24:13.728939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:24:13.729036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:24:13.729139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:24:13.729224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:24:13.729343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:24:13.729433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:24:13.729498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:24:13.729565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:24:13.729652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:24:13.746327Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:24:13.749713Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:24:13.749928Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:24:13.750005Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:24:13.750227Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:24:13.750434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:24:13.750515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:24:13.750567Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:24:13.750681Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:24:13.750750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:24:13.750797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:24:13.750828Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:24:13.751111Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:24:13.751225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:24:13.751281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:24:13.751316Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:24:13.751416Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:24:13.751476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:24:13.751531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:24:13.751561Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:24:13.751628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:24:13.751667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:24:13.751696Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:24:13.751756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:24:13.751802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:24:13.751833Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:24:13.752291Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=64; 2025-03-31T16:24:13.752448Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=79; 2025-03-31T16:24:13.752535Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=37; 2025-03-31T16:24:13.752661Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=44; 2025-03-31T16:24:13.752876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:24:13.752963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:24:13.753005Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:24:13.753300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:24:13.753357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:24:13.753392Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:24:13.753573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:24:13.753638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:24:13.753682Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:24:13.753892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:24:13.753937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:24:13.753968Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T1 ... ished=0;fault=0;schema=timestamp: timestamp[us]; 2025-03-31T16:24:33.329196Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1037:3030];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:1;records_count:36240;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:33.329366Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1037:3030];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:1;records_count:36240;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:33.329421Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1037:3030];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-31T16:24:33.329472Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1037:3030];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-31T16:24:33.329645Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1037:3030];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:24:33.329814Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1037:3030];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:1;records_count:36240;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:33.329872Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1037:3030];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-31T16:24:33.329999Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1037:3030];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:232;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=36240; 2025-03-31T16:24:33.330075Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1037:3030];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:252;stage=data_format;batch_size=289920;num_rows=36240;batch_columns=timestamp; 2025-03-31T16:24:33.330263Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1037:3030];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:1032:3025];bytes=289920;rows=36240;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; 2025-03-31T16:24:33.330415Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1037:3030];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:33.330548Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1037:3030];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:33.330661Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1037:3030];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:33.330811Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1037:3030];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:24:33.330891Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1037:3030];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:33.330963Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1037:3030];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:33.330999Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1037:3030] finished for tablet 9437184 2025-03-31T16:24:33.331601Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1037:3030];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:1032:3025];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.004},{"events":["f_processing","f_task_result"],"t":0.007},{"events":["l_task_result"],"t":0.625},{"events":["f_ack"],"t":0.626},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.628}],"full":{"a":1743438272702397,"name":"_full_task","f":1743438272702397,"d_finished":0,"c":0,"l":1743438273331105,"d":628708},"events":[{"name":"bootstrap","f":1743438272702955,"d_finished":4276,"c":1,"l":1743438272707231,"d":4276},{"a":1743438273330792,"name":"ack","f":1743438273328527,"d_finished":2049,"c":2,"l":1743438273330690,"d":2362},{"a":1743438273330778,"name":"processing","f":1743438272709754,"d_finished":188816,"c":16,"l":1743438273330693,"d":189143},{"name":"ProduceResults","f":1743438272704972,"d_finished":5070,"c":20,"l":1743438273330983,"d":5070},{"a":1743438273330986,"name":"Finish","f":1743438273330986,"d_finished":0,"c":0,"l":1743438273331105,"d":119},{"name":"task_result","f":1743438272709785,"d_finished":186300,"c":14,"l":1743438273328350,"d":186300}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:33.331710Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1037:3030];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:1032:3025];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:24:33.332230Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1037:3030];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:1032:3025];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.004},{"events":["f_processing","f_task_result"],"t":0.007},{"events":["l_task_result"],"t":0.625},{"events":["f_ack"],"t":0.626},{"events":["l_ProduceResults","f_Finish"],"t":0.628},{"events":["l_ack","l_processing","l_Finish"],"t":0.629}],"full":{"a":1743438272702397,"name":"_full_task","f":1743438272702397,"d_finished":0,"c":0,"l":1743438273331765,"d":629368},"events":[{"name":"bootstrap","f":1743438272702955,"d_finished":4276,"c":1,"l":1743438272707231,"d":4276},{"a":1743438273330792,"name":"ack","f":1743438273328527,"d_finished":2049,"c":2,"l":1743438273330690,"d":3022},{"a":1743438273330778,"name":"processing","f":1743438272709754,"d_finished":188816,"c":16,"l":1743438273330693,"d":189803},{"name":"ProduceResults","f":1743438272704972,"d_finished":5070,"c":20,"l":1743438273330983,"d":5070},{"a":1743438273330986,"name":"Finish","f":1743438273330986,"d_finished":0,"c":0,"l":1743438273331765,"d":779},{"name":"task_result","f":1743438272709785,"d_finished":186300,"c":14,"l":1743438273328350,"d":186300}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:33.332329Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1037:3030];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:24:32.701168Z;index_granules=0;index_portions=2;index_batches=1721;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=5175704;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=5175704;selected_rows=0; 2025-03-31T16:24:33.332378Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1037:3030];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:24:33.332612Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1037:3030];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143438801.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143438801.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143438801.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123438801.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143438801.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143438801.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123437601.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123438801.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123438801.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123437601.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123437601.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123437601.000000s;Name=;Codec=}; 2025-03-31T16:23:21.899669Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:23:21.987575Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:23:22.006591Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:23:22.006847Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:23:22.014928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:23:22.015178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:23:22.015432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:23:22.015566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:23:22.015690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:23:22.015817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:23:22.015932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:23:22.016046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:23:22.016177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:23:22.016309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:23:22.016417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:23:22.016528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:23:22.043701Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:23:22.043880Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:23:22.043962Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:23:22.044158Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:22.044415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:23:22.044537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:23:22.044583Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:23:22.044681Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:23:22.044748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:23:22.044797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:23:22.044830Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:23:22.045044Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:22.045124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:23:22.045173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:23:22.045208Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:23:22.045299Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:23:22.045356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:23:22.045403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:23:22.045434Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:23:22.045511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:23:22.045551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:23:22.045583Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:23:22.045638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:23:22.045679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:23:22.045719Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:23:22.046138Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=52; 2025-03-31T16:23:22.046231Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=39; 2025-03-31T16:23:22.046314Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=43; 2025-03-31T16:23:22.046412Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=52; 2025-03-31T16:23:22.046632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:23:22.046701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:23:22.046742Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:23:22.046976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:23:22.047034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:23:22.047067Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=94 ... d=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-03-31T16:24:33.832376Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-31T16:24:33.832441Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:24:33.832506Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:24:33.832564Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:24:33.832685Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:24:33.832983Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000009:max} readable: {1000000009:max} at tablet 9437184 2025-03-31T16:24:33.833132Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-03-31T16:24:33.833323Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-31T16:24:33.833417Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-31T16:24:33.833960Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-03-31T16:24:33.834067Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-03-31T16:24:33.834647Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:1984:3989];trace_detailed=; 2025-03-31T16:24:33.835161Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-03-31T16:24:33.835429Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-03-31T16:24:33.835645Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:33.835807Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:33.836265Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:24:33.836401Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:33.836558Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:33.836620Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1984:3989] finished for tablet 9437184 2025-03-31T16:24:33.837146Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:1983:3988];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":1743438273834562,"name":"_full_task","f":1743438273834562,"d_finished":0,"c":0,"l":1743438273836689,"d":2127},"events":[{"name":"bootstrap","f":1743438273834803,"d_finished":1034,"c":1,"l":1743438273835837,"d":1034},{"a":1743438273836237,"name":"ack","f":1743438273836237,"d_finished":0,"c":0,"l":1743438273836689,"d":452},{"a":1743438273836216,"name":"processing","f":1743438273836216,"d_finished":0,"c":0,"l":1743438273836689,"d":473},{"name":"ProduceResults","f":1743438273835539,"d_finished":586,"c":2,"l":1743438273836592,"d":586},{"a":1743438273836596,"name":"Finish","f":1743438273836596,"d_finished":0,"c":0,"l":1743438273836689,"d":93}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:33.837239Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:1983:3988];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:24:33.837756Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:1983:3988];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":1743438273834562,"name":"_full_task","f":1743438273834562,"d_finished":0,"c":0,"l":1743438273837293,"d":2731},"events":[{"name":"bootstrap","f":1743438273834803,"d_finished":1034,"c":1,"l":1743438273835837,"d":1034},{"a":1743438273836237,"name":"ack","f":1743438273836237,"d_finished":0,"c":0,"l":1743438273837293,"d":1056},{"a":1743438273836216,"name":"processing","f":1743438273836216,"d_finished":0,"c":0,"l":1743438273837293,"d":1077},{"name":"ProduceResults","f":1743438273835539,"d_finished":586,"c":2,"l":1743438273836592,"d":586},{"a":1743438273836596,"name":"Finish","f":1743438273836596,"d_finished":0,"c":0,"l":1743438273837293,"d":697}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1984:3989]->[1:1983:3988] 2025-03-31T16:24:33.837867Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:24:33.834034Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-03-31T16:24:33.837922Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:24:33.838054Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 160000/10402096 80000/5203544 0/0 >> TColumnShardTestSchema::RebootColdTiersWithStat [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootColdTiersWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143438805.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143438805.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143438805.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123438805.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143438805.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143438805.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123437605.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123438805.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123438805.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123437605.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123437605.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123437605.000000s;Name=;Codec=}; 2025-03-31T16:23:26.153431Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:23:26.242003Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:23:26.258663Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:23:26.258940Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:23:26.266858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:23:26.267049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:23:26.267291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:23:26.267395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:23:26.267496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:23:26.267611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:23:26.267680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:23:26.267785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:23:26.267905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:23:26.268072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:23:26.268187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:23:26.268308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:23:26.291934Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:23:26.292112Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:23:26.292200Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:23:26.292385Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:26.292597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:23:26.292680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:23:26.292729Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:23:26.292830Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:23:26.292974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:23:26.293087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:23:26.293126Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:23:26.293492Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:26.293695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:23:26.293774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:23:26.293830Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:23:26.293967Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:23:26.294040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:23:26.294090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:23:26.294122Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:23:26.294223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:23:26.294267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:23:26.294297Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:23:26.294358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:23:26.294406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:23:26.294451Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:23:26.294919Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=63; 2025-03-31T16:23:26.295025Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=43; 2025-03-31T16:23:26.295148Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=68; 2025-03-31T16:23:26.295242Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=45; 2025-03-31T16:23:26.295461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:23:26.295534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:23:26.295589Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:23:26.295807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:23:26.295861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:23:26.295892Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:23:26.296028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:23:26.296080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:4 ... _data.cpp:29;EXECUTE:finishLoadingTime=345; 2025-03-31T16:24:34.731216Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=31994; 2025-03-31T16:24:34.737469Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=6143; 2025-03-31T16:24:34.744370Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=5948; 2025-03-31T16:24:34.744478Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=6910; 2025-03-31T16:24:34.744645Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=99; 2025-03-31T16:24:34.744784Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=65; 2025-03-31T16:24:34.744919Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=83; 2025-03-31T16:24:34.745032Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=60; 2025-03-31T16:24:34.753451Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=8337; 2025-03-31T16:24:34.765525Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=11948; 2025-03-31T16:24:34.765669Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=40; 2025-03-31T16:24:34.765743Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=29; 2025-03-31T16:24:34.765791Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=7; 2025-03-31T16:24:34.765836Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=9; 2025-03-31T16:24:34.765883Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=8; 2025-03-31T16:24:34.765957Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=37; 2025-03-31T16:24:34.766004Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2025-03-31T16:24:34.766095Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=54; 2025-03-31T16:24:34.766147Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=10; 2025-03-31T16:24:34.766217Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=30; 2025-03-31T16:24:34.766317Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=57; 2025-03-31T16:24:34.766710Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=349; 2025-03-31T16:24:34.766755Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=75212; 2025-03-31T16:24:34.766897Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=31203592;raw_bytes=48253350;count=18;records=480000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-31T16:24:34.767006Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-31T16:24:34.767057Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-31T16:24:34.767142Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-31T16:24:34.786207Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-03-31T16:24:34.786369Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:24:34.786429Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:24:34.786501Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=4; 2025-03-31T16:24:34.786562Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700007;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-31T16:24:34.786624Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:24:34.786689Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:24:34.786733Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:24:34.786827Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:24:34.787359Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:24:34.787442Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:2212:4085];tablet_id=9437184;parent=[1:2172:4052];fline=manager.cpp:82;event=ask_data;request=request_id=128;1={portions_count=18};; 2025-03-31T16:24:34.788300Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-31T16:24:34.788627Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-31T16:24:34.788659Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-31T16:24:34.788683Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-31T16:24:34.788723Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:24:34.788778Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:24:34.788836Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=4; 2025-03-31T16:24:34.788895Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700007;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-31T16:24:34.788947Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:24:34.789001Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:24:34.789036Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:24:34.789137Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:24:34.789967Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=18;path_id=1; 2025-03-31T16:24:34.790997Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 160000/10402136 160000/10402136 160000/10402136 80000/5203584 0/0 >> TColumnShardTestSchema::HotTiersRevCompression >> PgCatalog::PgTables [GOOD] >> TColumnShardTestSchema::EnableColdTiersAfterTtl ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> PgCatalog::PgTables [GOOD] Test command err: Trying to start YDB, gRPC: 7165, MsgBus: 12342 2025-03-31T16:20:34.318008Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009340380414048:2134];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:34.318137Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012cf/r3tmp/tmpb3nxCn/pdisk_1.dat 2025-03-31T16:20:34.683924Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:20:34.722425Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:34.722489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:34.724460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7165, node 1 2025-03-31T16:20:34.873832Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:34.873854Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:34.873861Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:34.873969Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12342 TClient is connected to server localhost:12342 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:35.566398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 1042 2025-03-31T16:20:37.114865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgbpchar_17472595041006102391_17823623939509273229' Typemod mismatch, got type pgbpchar for column value, type mod , but expected 2 --!syntax_pg INSERT INTO Coerce_pgbpchar_17472595041006102391_17823623939509273229 (key, value) VALUES ( '0'::int2, 'abcd'::bpchar ) 2025-03-31T16:20:37.258484Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009353265316625:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:37.258484Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009353265316633:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:37.258606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:37.262186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:20:37.271598Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009353265316639:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:20:37.369891Z node 1 :TX_PROXY ERROR: Actor# [1:7488009353265316693:2401] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:37.730803Z node 1 :TX_DATASHARD CRIT: Exception while executing KQP transaction [0:281474976710663] at 72075186224037888: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2025-03-31T16:20:37.732332Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710663 at tablet 72075186224037888 status: EXEC_ERROR errors: UNKNOWN (Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) ) | 2025-03-31T16:20:37.733646Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7488009353265316746:2338] TxId: 281474976710663. Ctx: { TraceId: 01jqpg2g889yhdcrz7y8vxc74s, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmUwZTEwYTAtNjM0MThiNWMtMTc5NDI1MTctZmFlOTYwNDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. EXEC_ERROR: [UNKNOWN] Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) ; 2025-03-31T16:20:37.751693Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmUwZTEwYTAtNjM0MThiNWMtMTc5NDI1MTctZmFlOTYwNDE=, ActorId: [1:7488009353265316622:2338], ActorState: ExecuteState, TraceId: 01jqpg2g889yhdcrz7y8vxc74s, Create QueryResponse for error on request, msg:
: Error: Error executing transaction (ExecError): Execution failed
: Error: [UNKNOWN] Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2025-03-31T16:20:37.774163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgbpchar_17472595041006102391_5352544928909966465' Typemod mismatch, got type _pgbpchar for column value, type mod , but expected 2 --!syntax_pg INSERT INTO Coerce__pgbpchar_17472595041006102391_5352544928909966465 (key, value) VALUES ( '0'::int2, '{abcd,abcd}'::_bpchar ) 2025-03-31T16:20:38.017747Z node 1 :TX_DATASHARD CRIT: Exception while executing KQP transaction [0:281474976710668] at 72075186224037889: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2025-03-31T16:20:38.019048Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710668 at tablet 72075186224037889 status: EXEC_ERROR errors: UNKNOWN (Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) ) | 2025-03-31T16:20:38.019262Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7488009357560284177:2372] TxId: 281474976710668. Ctx: { TraceId: 01jqpg2gt82798t17pvfps414k, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjIzOTliNTEtNGZhNGE0MzUtYTAwOGU3NGUtZmQ5YjJhMmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. EXEC_ERROR: [UNKNOWN] Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) ; 2025-03-31T16:20:38.019526Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjIzOTliNTEtNGZhNGE0MzUtYTAwOGU3NGUtZmQ5YjJhMmY=, ActorId: [1:7488009353265316837:2372], ActorState: ExecuteState, TraceId: 01jqpg2gt82798t17pvfps414k, Create QueryResponse for error on request, msg:
: Error: Error executing transaction (ExecError): Execution failed
: Error: [UNKNOWN] Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 1042 2025-03-31T16:20:38.037680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgbpchar_2169371982377735806_17823623939509273229' Typemod mismatch, got type pgbpchar for column value, type mod , but expected 4 --!syntax_pg INSERT INTO Coerce_pgbpchar_2169371982377735806_17823623939509273229 (key, value) VALUES ( '0'::int2, 'abcd'::bpchar ) abcd 2025-03-31T16:20:38.461267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgbpchar_2169371982377735806_5352544928909966465' Typemod mismatch, got type _pgbpchar for column value, type mod , but expected 4 --!syntax_pg INSERT INTO Coerce__pgbpchar_2169371982377735806_5352544928909966465 (key, value) VALUES ( '0'::int2, '{abcd,abcd}'::_bpchar ) {abcd,abcd} 1042 2025-03-31T16:20:38.917528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgbpchar_10103374131519304989_17823623939509273229' Typemod mismatch, got type pgbpchar for column value, type mod , but expected 6 --!syntax_pg INSERT INTO Coerce_pgbpchar_10103374131519304989_17823623939509273229 (key, value) VALUES ( '0'::int2, 'abcd'::bpchar ) abcd 2025-03-31T16:20:39.272731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 Disconnected 2025-03-31T16:24:17.432904Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:24:17.438806Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23585, node 13 2025-03-31T16:24:17.496074Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:24:17.496105Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:24:17.496124Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:24:17.496321Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6180 TClient is connected to server localhost:6180 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:24:18.447901Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:24:22.203179Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7488010299246725558:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:24:22.203305Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:24:22.294659Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7488010320721562697:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:24:22.294760Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7488010320721562712:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:24:22.294878Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:24:22.300144Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:24:22.313418Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7488010320721562719:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:24:22.385361Z node 13 :TX_PROXY ERROR: Actor# [13:7488010320721562770:2342] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 4912, MsgBus: 2439 2025-03-31T16:24:23.598819Z node 14 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[14:7488010323666572611:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:24:23.598959Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012cf/r3tmp/tmpZGzbfv/pdisk_1.dat 2025-03-31T16:24:23.787234Z node 14 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:24:23.826350Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:24:23.826474Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:24:23.827862Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4912, node 14 2025-03-31T16:24:23.895897Z node 14 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:24:23.895929Z node 14 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:24:23.895946Z node 14 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:24:23.896154Z node 14 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2439 TClient is connected to server localhost:2439 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:24:24.649607Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:24:24.740426Z node 14 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:24:28.599059Z node 14 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[14:7488010323666572611:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:24:28.599207Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:24:29.566185Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7488010349436377054:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:24:29.566216Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7488010349436377062:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:24:29.566325Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:24:29.572368Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:24:29.585204Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7488010349436377068:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:24:29.655547Z node 14 :TX_PROXY ERROR: Actor# [14:7488010349436377121:2343] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:24:29.760834Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:24:29.809865Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:24:34.497732Z node 14 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 14, TabletId: 72075186224037888 not found 2025-03-31T16:24:34.538324Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:24:34.981060Z node 14 :KQP_COMPUTE ERROR: SelfId: [14:7488010370911214177:2452], TxId: 281474976710672, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jqpg9r25bwwzqc7pybsvgjtc. SessionId : ydb://session/3?node_id=14&id=YTE3NTNlMGUtZDFjNjE1ZDctOTI3YzE4Y2YtN2U5M2MwOTg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED DEFAULT_ERROR: {
: Error: Terminate was called, reason(57): ERROR: invalid input syntax for type boolean: "pg_proc" }. 2025-03-31T16:24:34.981626Z node 14 :KQP_COMPUTE ERROR: SelfId: [14:7488010370911214178:2453], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jqpg9r25bwwzqc7pybsvgjtc. SessionId : ydb://session/3?node_id=14&id=YTE3NTNlMGUtZDFjNjE1ZDctOTI3YzE4Y2YtN2U5M2MwOTg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [14:7488010370911214174:2448], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-31T16:24:34.982404Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=YTE3NTNlMGUtZDFjNjE1ZDctOTI3YzE4Y2YtN2U5M2MwOTg=, ActorId: [14:7488010370911214163:2448], ActorState: ExecuteState, TraceId: 01jqpg9r25bwwzqc7pybsvgjtc, Create QueryResponse for error on request, msg: |91.3%| [TA] $(B)/ydb/core/kqp/ut/pg/test-results/unittest/{meta.json ... results_accumulator.log} |91.3%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/pg/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardVolatile::DistributedWriteThenBulkUpsert [GOOD] >> DataShardVolatile::DistributedWriteThenBulkUpsertWithCdc |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest >> YdbProxy::DescribePath >> YdbProxy::CreateTable >> TBlobStorageWardenTest::TestCreatePDiskAndGroup |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest >> YdbProxy::DescribePath [GOOD] >> YdbProxy::DescribeTable >> TBlobStorageWardenTest::TestCreatePDiskAndGroup [GOOD] >> YdbProxy::CreateTable [GOOD] >> YdbProxy::CreateCdcStream ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest >> TBlobStorageWardenTest::TestCreatePDiskAndGroup [GOOD] Test command err: 2025-03-31T16:24:41.807832Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:1:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:24:41.822491Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:2:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:24:41.833698Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:24:41.834693Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:2:0] targetVDisk# [3e000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:24:41.843124Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-31T16:24:41.848300Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 Sending TEvPut Sending TEvGet Sending TEvVGet Sending TEvPut Sending TEvGet >> TColumnShardTestSchema::RebootColdTiers [GOOD] |91.4%| [TA] $(B)/ydb/core/mind/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} |91.4%| [TA] {RESULT} $(B)/ydb/core/mind/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} >> TBalanceCoverageBuilderTest::TestComplexSplitWithDuplicates >> TBalanceCoverageBuilderTest::TestComplexSplitWithDuplicates [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootColdTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143438815.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143438815.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143438815.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123438815.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143438815.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143438815.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123437615.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123438815.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123438815.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123437615.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123437615.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123437615.000000s;Name=;Codec=}; 2025-03-31T16:23:36.272464Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:23:36.349624Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:23:36.364638Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:23:36.364873Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:23:36.371129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:23:36.371292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:23:36.371472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:23:36.371555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:23:36.371636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:23:36.371702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:23:36.371777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:23:36.371848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:23:36.371926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:23:36.372008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:23:36.372083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:23:36.372144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:23:36.392407Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:23:36.392525Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:23:36.392596Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:23:36.392743Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:36.392893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:23:36.392958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:23:36.393007Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:23:36.393088Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:23:36.393139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:23:36.393181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:23:36.393210Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:23:36.393378Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:36.393451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:23:36.393494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:23:36.393524Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:23:36.393613Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:23:36.393667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:23:36.393710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:23:36.393747Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:23:36.393811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:23:36.393843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:23:36.393868Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:23:36.393913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:23:36.393951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:23:36.393984Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:23:36.394296Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=38; 2025-03-31T16:23:36.394362Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=29; 2025-03-31T16:23:36.394412Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=23; 2025-03-31T16:23:36.394478Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=28; 2025-03-31T16:23:36.394621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:23:36.394666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:23:36.394701Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:23:36.394875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:23:36.394923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:23:36.394953Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:23:36.395064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:23:36.395117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:4 ... on_data.cpp:29;EXECUTE:finishLoadingTime=240; 2025-03-31T16:24:44.003324Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=22686; 2025-03-31T16:24:44.007117Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=3699; 2025-03-31T16:24:44.012439Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=4458; 2025-03-31T16:24:44.012535Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=5345; 2025-03-31T16:24:44.012675Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=84; 2025-03-31T16:24:44.012765Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=52; 2025-03-31T16:24:44.012881Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=79; 2025-03-31T16:24:44.012990Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=68; 2025-03-31T16:24:44.017604Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=4553; 2025-03-31T16:24:44.023938Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=6233; 2025-03-31T16:24:44.024063Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=48; 2025-03-31T16:24:44.024143Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=30; 2025-03-31T16:24:44.024184Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=8; 2025-03-31T16:24:44.024219Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=7; 2025-03-31T16:24:44.024251Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2025-03-31T16:24:44.024318Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=39; 2025-03-31T16:24:44.024356Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=4; 2025-03-31T16:24:44.024435Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=50; 2025-03-31T16:24:44.024474Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2025-03-31T16:24:44.024541Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=34; 2025-03-31T16:24:44.024616Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=42; 2025-03-31T16:24:44.024892Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=248; 2025-03-31T16:24:44.024927Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=50169; 2025-03-31T16:24:44.025040Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=31203592;raw_bytes=48253350;count=18;records=480000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-31T16:24:44.025137Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-31T16:24:44.025178Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-31T16:24:44.025231Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-31T16:24:44.040133Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-03-31T16:24:44.040268Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:24:44.040328Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:24:44.040390Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=4; 2025-03-31T16:24:44.040444Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700007;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-31T16:24:44.040486Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:24:44.040533Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:24:44.040569Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:24:44.040659Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:24:44.040984Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:2212:4085];tablet_id=9437184;parent=[1:2172:4052];fline=manager.cpp:82;event=ask_data;request=request_id=128;1={portions_count=18};; 2025-03-31T16:24:44.041298Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:24:44.041766Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-31T16:24:44.042056Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-31T16:24:44.042090Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-31T16:24:44.042115Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-31T16:24:44.042157Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:24:44.042221Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:24:44.042278Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=4; 2025-03-31T16:24:44.042337Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700007;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-31T16:24:44.042374Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:24:44.042414Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:24:44.042451Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:24:44.042531Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:24:44.043486Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=18;path_id=1; 2025-03-31T16:24:44.044355Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 160000/10402136 160000/10402136 160000/10402136 80000/5203584 0/0 >> TBalanceCoverageBuilderTest::TestZeroTracks [GOOD] |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestComplexSplitWithDuplicates [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::JsonStorageListingV1PDiskIdFilter 2025-03-31 16:24:42,318 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-31 16:24:42,719 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 339310 46.0M 46.0M 23.0M test_tool run_ut @/home/runner/.ya/build/build_root/0l5c/0015cd/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/test_tool.args 340594 4.1G 4.1G 3.8G └─ ydb-core-viewer-ut --trace-path-append /home/runner/.ya/build/build_root/0l5c/0015cd/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/chunk4/ytest.report.trace Test command err: 2025-03-31T16:15:05.009678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:336:2378], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:15:05.010157Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:15:05.010219Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 17706, node 1 TClient is connected to server localhost:22540 2025-03-31T16:16:00.347349Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:3137:2433], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:16:00.349166Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:00.349403Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:16:00.351801Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:1290:2180], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:16:00.352148Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:2470:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:16:00.352941Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:3133:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:16:00.353864Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:16:00.354044Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:2464:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:16:00.354211Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:00.354248Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:16:00.354350Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:00.354411Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:00.355040Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:16:00.355139Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:00.355612Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:2467:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:16:00.356178Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:16:00.357120Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:00.357333Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:16:00.358200Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:2473:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:16:00.358632Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:2479:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:16:00.358957Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:00.359718Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:00.360102Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:16:00.360450Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:2476:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:16:00.360521Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:16:00.361328Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:16:00.361877Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-31T16:16:00.789812Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:16:00.979899Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-03-31T16:16:01.010192Z node 2 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-03-31T16:16:01.500567Z node 2 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 11093, node 2 TClient is connected to server localhost:63806 2025-03-31T16:16:01.849237Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:16:01.849301Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:16:01.849341Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:16:01.849885Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:17:32.534185Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:3160:2436], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:17:32.538233Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:17:32.538417Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:17:32.540427Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [17:1827:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:17:32.540631Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [18:1830:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:17:32.542835Z node 15 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [15:3111:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:17:32.543002Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:17:32.543113Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:17:32.544462Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [12:3156:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:17:32.544834Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:17:32.544893Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:17:32.545750Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [16:1797:2245], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:17:32.546192Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:17:32.547141Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:17:32.547384Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [13:3163:2379], Scheduled retry for err ... not detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:21:40.330437Z node 37 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:21:40.333430Z node 35 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [35:3121:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:21:40.334640Z node 34 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [34:3118:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:21:40.334750Z node 35 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:21:40.335332Z node 35 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:21:40.335806Z node 34 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:21:40.336681Z node 34 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-31T16:21:40.883629Z node 29 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:21:41.151629Z node 29 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-03-31T16:21:41.182150Z node 29 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-03-31T16:21:42.287914Z node 29 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 3836, node 29 TClient is connected to server localhost:24970 2025-03-31T16:21:43.114901Z node 29 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:21:43.115043Z node 29 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:21:43.115170Z node 29 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:21:43.115885Z node 29 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:24:26.614583Z node 38 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [38:3119:2433], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:24:26.617378Z node 38 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:24:26.619167Z node 38 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:24:26.621046Z node 44 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [44:1745:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:24:26.621373Z node 45 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [45:1748:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:24:26.622753Z node 42 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [42:3128:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:24:26.623026Z node 43 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [43:3131:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:24:26.624120Z node 39 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [39:3115:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:24:26.624406Z node 40 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [40:3122:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:24:26.624670Z node 41 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [41:3125:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:24:26.625049Z node 44 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:24:26.625196Z node 45 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:24:26.626346Z node 42 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:24:26.626451Z node 43 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:24:26.626578Z node 44 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:24:26.626662Z node 45 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:24:26.626877Z node 46 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [46:1751:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:24:26.627813Z node 39 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:24:26.627921Z node 40 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:24:26.628074Z node 41 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:24:26.628204Z node 42 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:24:26.628272Z node 43 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:24:26.629009Z node 46 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:24:26.629754Z node 39 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:24:26.629837Z node 40 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:24:26.629900Z node 41 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:24:26.630181Z node 46 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-31T16:24:27.420865Z node 38 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:24:27.695961Z node 38 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-03-31T16:24:27.733504Z node 38 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-03-31T16:24:28.858371Z node 38 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 22081, node 38 TClient is connected to server localhost:24148 2025-03-31T16:24:29.645357Z node 38 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:24:29.645484Z node 38 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:24:29.645573Z node 38 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:24:29.646502Z node 38 :NET_CLASSIFIER ERROR: got bad distributable configuration Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8330113388/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/0l5c/0015cd/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8330113388/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/0l5c/0015cd/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestZeroTracks [GOOD] |91.4%| [TA] $(B)/ydb/core/viewer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.4%| [TA] {RESULT} $(B)/ydb/core/viewer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> YdbProxy::DescribeTable [GOOD] >> TBalanceCoverageBuilderTest::TestComplexSplit [GOOD] >> TBalanceCoverageBuilderTest::TestSplitWithMergeBack [GOOD] >> TBalanceCoverageBuilderTest::TestEmpty [GOOD] >> YdbProxy::CreateCdcStream [GOOD] |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestComplexSplit [GOOD] >> TColumnShardTestSchema::RebootForgetWithLostAnswer [GOOD] |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithMergeBack [GOOD] |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestEmpty [GOOD] >> TBalanceCoverageBuilderTest::TestSimpleSplit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DescribeTable [GOOD] Test command err: 2025-03-31T16:24:40.398155Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010395228088822:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:24:40.398268Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00155c/r3tmp/tmpfNb0Zy/pdisk_1.dat 2025-03-31T16:24:40.763697Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:24:40.792891Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:24:40.793006Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:24:40.794763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16545 TServer::EnableGrpc on GrpcPort 26228, node 1 2025-03-31T16:24:41.030986Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:24:41.031061Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:24:41.031070Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:24:41.031217Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16545 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:24:41.498285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:24:43.756493Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010408337948815:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:24:43.756608Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00155c/r3tmp/tmpuKmTIS/pdisk_1.dat 2025-03-31T16:24:43.864301Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:24:43.899357Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:24:43.899456Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:24:43.901577Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22073 TServer::EnableGrpc on GrpcPort 5566, node 2 2025-03-31T16:24:44.099385Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:24:44.099411Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:24:44.099418Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:24:44.099556Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22073 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:24:44.375898Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:24:46.347829Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 >> DataShardVolatile::DistributedWriteThenBulkUpsertWithCdc [GOOD] >> DataShardVolatile::DistributedWriteLostPlanThenDrop |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSimpleSplit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootForgetWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143438831.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=143438831.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123438831.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=123438831.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123437631.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=123437631.000000s;Name=;Codec=}; 2025-03-31T16:23:53.081368Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:23:53.163931Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:23:53.186839Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:23:53.187137Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:23:53.195050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:23:53.195324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:23:53.195579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:23:53.195708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:23:53.195829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:23:53.195957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:23:53.196077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:23:53.196192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:23:53.196316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:23:53.196426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:23:53.196489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:23:53.196567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:23:53.222193Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:23:53.222351Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:23:53.222408Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:23:53.222578Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:53.222713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:23:53.222768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:23:53.222824Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:23:53.222894Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:23:53.222933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:23:53.222960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:23:53.222979Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:23:53.223136Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:53.223201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:23:53.223228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:23:53.223247Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:23:53.223312Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:23:53.223348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:23:53.223377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:23:53.223394Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:23:53.223432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:23:53.223459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:23:53.223488Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:23:53.223533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:23:53.223570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:23:53.223600Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:23:53.223927Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=41; 2025-03-31T16:23:53.224005Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=33; 2025-03-31T16:23:53.224074Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=30; 2025-03-31T16:23:53.224154Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=38; 2025-03-31T16:23:53.224312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:23:53.224370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:23:53.224405Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:23:53.224620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:23:53.224672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:23:53.224709Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:23:53.224865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:23:53.224900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:23:53.224918Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:23:53.225077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... cpp:29;PRECHARGE:finishLoadingTime=14; 2025-03-31T16:24:47.143882Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=237; 2025-03-31T16:24:47.143920Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=21231; 2025-03-31T16:24:47.147959Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=3963; 2025-03-31T16:24:47.154433Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=5523; 2025-03-31T16:24:47.154540Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=6492; 2025-03-31T16:24:47.154683Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=80; 2025-03-31T16:24:47.154779Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=51; 2025-03-31T16:24:47.154891Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=74; 2025-03-31T16:24:47.154993Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=55; 2025-03-31T16:24:47.159523Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=4458; 2025-03-31T16:24:47.166318Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=6689; 2025-03-31T16:24:47.166444Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=36; 2025-03-31T16:24:47.166516Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=25; 2025-03-31T16:24:47.166564Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=10; 2025-03-31T16:24:47.166607Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=11; 2025-03-31T16:24:47.166644Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2025-03-31T16:24:47.166710Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=36; 2025-03-31T16:24:47.166749Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2025-03-31T16:24:47.166830Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=50; 2025-03-31T16:24:47.166873Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=9; 2025-03-31T16:24:47.166935Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=29; 2025-03-31T16:24:47.167016Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=45; 2025-03-31T16:24:47.167308Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=250; 2025-03-31T16:24:47.167349Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=52825; 2025-03-31T16:24:47.167477Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=20801572;raw_bytes=32169208;count=11;records=320000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-31T16:24:47.167573Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-31T16:24:47.167620Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-31T16:24:47.167677Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-31T16:24:47.176179Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-03-31T16:24:47.176311Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:24:47.176363Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:24:47.176429Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=3; 2025-03-31T16:24:47.176484Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700006;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-31T16:24:47.176527Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:24:47.176577Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:24:47.176621Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:24:47.176709Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:24:47.177299Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:24:47.177370Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:1991:3891];tablet_id=9437184;parent=[1:1953:3860];fline=manager.cpp:82;event=ask_data;request=request_id=95;1={portions_count=11};; 2025-03-31T16:24:47.177933Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-31T16:24:47.178233Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-31T16:24:47.178258Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-31T16:24:47.178279Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-31T16:24:47.178313Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:24:47.178363Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:24:47.178411Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=3; 2025-03-31T16:24:47.178460Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700006;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-31T16:24:47.178498Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:24:47.178537Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:24:47.178571Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:24:47.178643Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:24:47.179838Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=11;path_id=1; 2025-03-31T16:24:47.180913Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 80000/5203504 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::CreateCdcStream [GOOD] Test command err: 2025-03-31T16:24:40.397408Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010396395828423:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:24:40.397485Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001553/r3tmp/tmpTN3l90/pdisk_1.dat 2025-03-31T16:24:40.751253Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:24:40.790925Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:24:40.791055Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:24:40.794847Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22441 TServer::EnableGrpc on GrpcPort 12251, node 1 2025-03-31T16:24:41.031056Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:24:41.031100Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:24:41.031109Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:24:41.031264Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22441 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:24:41.490891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:24:43.354910Z node 1 :TX_PROXY ERROR: Actor# [1:7488010409280730960:2303] txid# 281474976710658, issues: { message: "Column key has wrong key type Float" severity: 1 } 2025-03-31T16:24:43.365288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:24:43.491649Z node 1 :TX_PROXY ERROR: Actor# [1:7488010409280731050:2364] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/table\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:24:44.120767Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010413825458401:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:24:44.120873Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001553/r3tmp/tmpLvkPfG/pdisk_1.dat 2025-03-31T16:24:44.210700Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:24:44.249587Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:24:44.249717Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:24:44.251452Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2228 TServer::EnableGrpc on GrpcPort 18273, node 2 2025-03-31T16:24:44.439775Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:24:44.439809Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:24:44.439817Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:24:44.439937Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2228 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:24:44.770026Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:24:46.700112Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:24:46.839592Z node 2 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037888:1][2:7488010422415393835:2340] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:4:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-03-31T16:24:46.910306Z node 2 :TX_PROXY ERROR: Actor# [2:7488010422415393901:2449] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/table/updates\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeCdcStream, state: EPathStateNoChanges)" severity: 1 } >> TColumnShardTestSchema::HotTiersAfterTtl [GOOD] >> TBalanceCoverageBuilderTest::TestOneSplit [GOOD] >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeAll [GOOD] |91.4%| [TA] $(B)/ydb/core/tx/replication/ydb_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.4%| [TA] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestOneSplit [GOOD] |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeAll [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=143438817.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143438817.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143438817.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143438817.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123438817.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143438817.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143438817.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123437617.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123438817.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123438817.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123437617.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123437617.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123437617.000000s;Name=;Codec=}; 2025-03-31T16:23:37.602981Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:23:37.720301Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:23:37.752153Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:23:37.752544Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:23:37.762969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:23:37.763315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:23:37.763637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:23:37.763855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:23:37.764024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:23:37.764226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:23:37.764395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:23:37.764573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:23:37.764764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:23:37.764951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:23:37.765138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:23:37.765319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:23:37.795335Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:23:37.795501Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:23:37.795580Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:23:37.795751Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:37.795942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:23:37.796014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:23:37.796057Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:23:37.796150Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:23:37.796216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:23:37.796259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:23:37.796293Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:23:37.796481Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:37.796557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:23:37.796608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:23:37.796645Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:23:37.796737Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:23:37.796793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:23:37.796839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:23:37.796871Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:23:37.796955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:23:37.796996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:23:37.797033Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:23:37.797103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:23:37.797150Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:23:37.797183Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:23:37.797584Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=51; 2025-03-31T16:23:37.797669Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=37; 2025-03-31T16:23:37.797759Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=40; 2025-03-31T16:23:37.797852Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=49; 2025-03-31T16:23:37.798066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:23:37.798147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:23:37.798193Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:23:37.798410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:23:37.798457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:23:37.798487Z node 1 :TX_COLUMNSHARD NOTICE: tabl ... D DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-03-31T16:24:48.573853Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-31T16:24:48.573919Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:24:48.573989Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:24:48.574045Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:24:48.574188Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:24:48.574506Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000009:max} readable: {1000000009:max} at tablet 9437184 2025-03-31T16:24:48.574652Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-03-31T16:24:48.574839Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-31T16:24:48.574910Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-31T16:24:48.575445Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-03-31T16:24:48.575561Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-03-31T16:24:48.576119Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:1970:3975];trace_detailed=; 2025-03-31T16:24:48.576577Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-03-31T16:24:48.576832Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-03-31T16:24:48.577035Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:48.577185Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:48.577645Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:24:48.577766Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:48.577897Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:48.577957Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1970:3975] finished for tablet 9437184 2025-03-31T16:24:48.578438Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:1969:3974];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1743438288576038,"name":"_full_task","f":1743438288576038,"d_finished":0,"c":0,"l":1743438288578026,"d":1988},"events":[{"name":"bootstrap","f":1743438288576270,"d_finished":948,"c":1,"l":1743438288577218,"d":948},{"a":1743438288577618,"name":"ack","f":1743438288577618,"d_finished":0,"c":0,"l":1743438288578026,"d":408},{"a":1743438288577595,"name":"processing","f":1743438288577595,"d_finished":0,"c":0,"l":1743438288578026,"d":431},{"name":"ProduceResults","f":1743438288576944,"d_finished":524,"c":2,"l":1743438288577933,"d":524},{"a":1743438288577936,"name":"Finish","f":1743438288577936,"d_finished":0,"c":0,"l":1743438288578026,"d":90}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:48.578531Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:1969:3974];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:24:48.579012Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:1969:3974];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1743438288576038,"name":"_full_task","f":1743438288576038,"d_finished":0,"c":0,"l":1743438288578582,"d":2544},"events":[{"name":"bootstrap","f":1743438288576270,"d_finished":948,"c":1,"l":1743438288577218,"d":948},{"a":1743438288577618,"name":"ack","f":1743438288577618,"d_finished":0,"c":0,"l":1743438288578582,"d":964},{"a":1743438288577595,"name":"processing","f":1743438288577595,"d_finished":0,"c":0,"l":1743438288578582,"d":987},{"name":"ProduceResults","f":1743438288576944,"d_finished":524,"c":2,"l":1743438288577933,"d":524},{"a":1743438288577936,"name":"Finish","f":1743438288577936,"d_finished":0,"c":0,"l":1743438288578582,"d":646}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1970:3975]->[1:1969:3974] 2025-03-31T16:24:48.579143Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:24:48.575526Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-03-31T16:24:48.579199Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:24:48.579326Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 160000/10402096 160000/10402096 160000/10402096 80000/5203544 0/0 >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeOne [GOOD] |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeOne [GOOD] >> TColumnShardTestSchema::HotTiersWithStat [GOOD] |91.5%| [TA] $(B)/ydb/core/tx/balance_coverage/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.5%| [TA] {RESULT} $(B)/ydb/core/tx/balance_coverage/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143438820.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143438820.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143438820.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123438820.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143438820.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143438820.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123437620.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123438820.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123438820.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123437620.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123437620.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123437620.000000s;Name=;Codec=}; 2025-03-31T16:23:40.506538Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:23:40.609644Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:23:40.636087Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:23:40.636410Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:23:40.645551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:23:40.645850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:23:40.646187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:23:40.646410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:23:40.646595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:23:40.646790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:23:40.646970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:23:40.647177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:23:40.647355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:23:40.647567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:23:40.647744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:23:40.647921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:23:40.686572Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:23:40.686790Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:23:40.686910Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:23:40.687153Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:40.687406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:23:40.687519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:23:40.687582Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:23:40.687711Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:23:40.687805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:23:40.687872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:23:40.687919Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:23:40.688166Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:40.688266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:23:40.688339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:23:40.688397Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:23:40.688526Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:23:40.688614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:23:40.688688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:23:40.688733Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:23:40.688843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:23:40.688905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:23:40.688954Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:23:40.689035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:23:40.689116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:23:40.689170Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:23:40.689745Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=79; 2025-03-31T16:23:40.689874Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=51; 2025-03-31T16:23:40.690002Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=61; 2025-03-31T16:23:40.690166Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=94; 2025-03-31T16:23:40.690446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:23:40.690541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:23:40.690596Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:23:40.690899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:23:40.690988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:23:40.691034Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=94 ... D DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-03-31T16:24:50.489048Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-31T16:24:50.489100Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:24:50.489160Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:24:50.489209Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:24:50.489308Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:24:50.489606Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000009:max} readable: {1000000009:max} at tablet 9437184 2025-03-31T16:24:50.489731Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-03-31T16:24:50.489916Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-31T16:24:50.489976Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-31T16:24:50.490438Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-03-31T16:24:50.490540Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-03-31T16:24:50.491075Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:1984:3989];trace_detailed=; 2025-03-31T16:24:50.491523Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-03-31T16:24:50.491737Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-03-31T16:24:50.491931Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:50.492057Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:50.492449Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:24:50.492550Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:50.492696Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:50.492747Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1984:3989] finished for tablet 9437184 2025-03-31T16:24:50.493153Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:1983:3988];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1743438290490996,"name":"_full_task","f":1743438290490996,"d_finished":0,"c":0,"l":1743438290492805,"d":1809},"events":[{"name":"bootstrap","f":1743438290491251,"d_finished":836,"c":1,"l":1743438290492087,"d":836},{"a":1743438290492428,"name":"ack","f":1743438290492428,"d_finished":0,"c":0,"l":1743438290492805,"d":377},{"a":1743438290492411,"name":"processing","f":1743438290492411,"d_finished":0,"c":0,"l":1743438290492805,"d":394},{"name":"ProduceResults","f":1743438290491841,"d_finished":494,"c":2,"l":1743438290492725,"d":494},{"a":1743438290492728,"name":"Finish","f":1743438290492728,"d_finished":0,"c":0,"l":1743438290492805,"d":77}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:50.493229Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:1983:3988];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:24:50.493607Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:1983:3988];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1743438290490996,"name":"_full_task","f":1743438290490996,"d_finished":0,"c":0,"l":1743438290493271,"d":2275},"events":[{"name":"bootstrap","f":1743438290491251,"d_finished":836,"c":1,"l":1743438290492087,"d":836},{"a":1743438290492428,"name":"ack","f":1743438290492428,"d_finished":0,"c":0,"l":1743438290493271,"d":843},{"a":1743438290492411,"name":"processing","f":1743438290492411,"d_finished":0,"c":0,"l":1743438290493271,"d":860},{"name":"ProduceResults","f":1743438290491841,"d_finished":494,"c":2,"l":1743438290492725,"d":494},{"a":1743438290492728,"name":"Finish","f":1743438290492728,"d_finished":0,"c":0,"l":1743438290493271,"d":543}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1984:3989]->[1:1983:3988] 2025-03-31T16:24:50.493697Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:24:50.490512Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-03-31T16:24:50.493755Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:24:50.493873Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 160000/10402096 80000/5203544 0/0 |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> TableCreator::CreateTables >> TColumnShardTestSchema::RebootExportAfterFail [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Seconds >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMicroSeconds >> EraseRowsTests::ConditionalEraseRowsShouldNotEraseModifiedRows >> DistributedEraseTests::ConditionalEraseRowsShouldNotErase >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberSeconds >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint32 >> EraseRowsTests::EraseRowsShouldSuccess >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MilliSeconds ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootExportAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143438837.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123438837.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123437637.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2025-03-31T16:23:59.084092Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:23:59.180021Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:23:59.202900Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:23:59.203199Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:23:59.211021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:23:59.211274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:23:59.211509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:23:59.211626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:23:59.211746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:23:59.211872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:23:59.211981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:23:59.212083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:23:59.212212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:23:59.212350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:23:59.212466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:23:59.212567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:23:59.234990Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:23:59.235135Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:23:59.235173Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:23:59.235305Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:59.235416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:23:59.235461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:23:59.235490Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:23:59.235551Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:23:59.235597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:23:59.235637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:23:59.235662Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:23:59.235802Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:59.235853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:23:59.235882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:23:59.235901Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:23:59.235960Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:23:59.235996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:23:59.236025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:23:59.236043Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:23:59.236093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:23:59.236116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:23:59.236138Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:23:59.236167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:23:59.236189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:23:59.236211Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:23:59.236482Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=30; 2025-03-31T16:23:59.236538Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=28; 2025-03-31T16:23:59.236618Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=49; 2025-03-31T16:23:59.236722Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=44; 2025-03-31T16:23:59.236876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:23:59.236929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:23:59.236960Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:23:59.237135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:23:59.237198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:23:59.237220Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:23:59.237319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:23:59.237349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:23:59.237370Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:23:59.237498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description= ... 10; 2025-03-31T16:24:53.059740Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=251; 2025-03-31T16:24:53.059776Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=26590; 2025-03-31T16:24:53.064229Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=4374; 2025-03-31T16:24:53.069034Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=3914; 2025-03-31T16:24:53.069131Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=4797; 2025-03-31T16:24:53.069269Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=79; 2025-03-31T16:24:53.069356Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=46; 2025-03-31T16:24:53.069479Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=85; 2025-03-31T16:24:53.069570Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=53; 2025-03-31T16:24:53.074424Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=4790; 2025-03-31T16:24:53.081325Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=6794; 2025-03-31T16:24:53.081462Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=50; 2025-03-31T16:24:53.081537Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=28; 2025-03-31T16:24:53.081584Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=12; 2025-03-31T16:24:53.081626Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=11; 2025-03-31T16:24:53.081669Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=10; 2025-03-31T16:24:53.081741Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=40; 2025-03-31T16:24:53.081783Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2025-03-31T16:24:53.081889Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=65; 2025-03-31T16:24:53.081939Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2025-03-31T16:24:53.082001Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=28; 2025-03-31T16:24:53.082081Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=46; 2025-03-31T16:24:53.082278Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=167; 2025-03-31T16:24:53.082313Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=57682; 2025-03-31T16:24:53.082478Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=20801572;raw_bytes=32169208;count=11;records=320000} evicted {blob_bytes=10402524;raw_bytes=16084646;count=7;records=160000} at tablet 9437184 2025-03-31T16:24:53.082575Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1934:3842];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-31T16:24:53.082619Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1934:3842];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-31T16:24:53.082677Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1934:3842];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-31T16:24:53.090256Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1934:3842];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-03-31T16:24:53.090482Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:24:53.090549Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:24:53.090621Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=3; 2025-03-31T16:24:53.090672Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700006;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-31T16:24:53.090709Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:24:53.090751Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:24:53.090781Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:24:53.090862Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:24:53.091567Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:24:53.091642Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:1972:3873];tablet_id=9437184;parent=[1:1934:3842];fline=manager.cpp:82;event=ask_data;request=request_id=108;1={portions_count=18};; 2025-03-31T16:24:53.092531Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1934:3842];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-31T16:24:53.093191Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1934:3842];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-31T16:24:53.093226Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-31T16:24:53.093255Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-31T16:24:53.093296Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1934:3842];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:24:53.093349Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1934:3842];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:24:53.093430Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1934:3842];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=3; 2025-03-31T16:24:53.093486Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1934:3842];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700006;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-31T16:24:53.093523Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1934:3842];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:24:53.093562Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1934:3842];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:24:53.093596Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1934:3842];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:24:53.093689Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1934:3842];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:24:53.094162Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1934:3842];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=18;path_id=1; 2025-03-31T16:24:53.095686Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1934:3842];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 160000/10402332 160000/10402524 >> DataShardVolatile::DistributedWriteLostPlanThenDrop [GOOD] >> DataShardVolatile::DistributedWriteLostPlanThenSplit >> EraseRowsTests::ConditionalEraseRowsShouldNotErase >> TableCreator::CreateTables [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> TableCreator::CreateTables [GOOD] Test command err: 2025-03-31T16:24:53.150200Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010453363131256:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:24:53.150377Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000e02/r3tmp/tmpAd26gJ/pdisk_1.dat 2025-03-31T16:24:53.611971Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:24:53.645981Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:24:53.646631Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:24:53.652065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28255 TServer::EnableGrpc on GrpcPort 11832, node 1 2025-03-31T16:24:53.994015Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:24:53.994063Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:24:53.994072Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:24:53.994233Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:24:54.342258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:24:54.381697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:24:54.383176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 |91.5%| [TA] $(B)/ydb/library/table_creator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.5%| [TA] {RESULT} $(B)/ydb/library/table_creator/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestSchema::OneTier [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt4Seconds ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::OneTier [GOOD] Test command err: 2025-03-31T16:24:33.418642Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:24:33.510050Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:24:33.514953Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:24:33.515397Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:24:33.540951Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:24:33.541313Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:24:33.549737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:24:33.549968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:24:33.550219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:24:33.550331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:24:33.550441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:24:33.550549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:24:33.550683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:24:33.550812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:24:33.550971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:24:33.551098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:24:33.551206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:24:33.551335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:24:33.578230Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:24:33.581852Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:24:33.582013Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:24:33.582068Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:24:33.582233Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:24:33.582429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:24:33.582559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:24:33.582600Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:24:33.582700Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:24:33.582767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:24:33.582808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:24:33.582840Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:24:33.583039Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:24:33.583187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:24:33.583237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:24:33.583269Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:24:33.583368Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:24:33.583418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:24:33.583467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:24:33.583495Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:24:33.583561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:24:33.583594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:24:33.583619Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:24:33.583668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:24:33.583704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:24:33.583733Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:24:33.584102Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=44; 2025-03-31T16:24:33.584234Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=47; 2025-03-31T16:24:33.584323Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=34; 2025-03-31T16:24:33.584428Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=37; 2025-03-31T16:24:33.584677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:24:33.584744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:24:33.584779Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:24:33.584979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:24:33.585027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:24:33.585058Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:24:33.585216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:24:33.585271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:24:33.585306Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:24:33.585558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:24:33.585608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:24:33.585637Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T1 ... olumn_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:57.123992Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:1;records_count:73;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:57.124019Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-31T16:24:57.124052Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-31T16:24:57.124174Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:24:57.124260Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:1;records_count:73;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:57.124318Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-31T16:24:57.124405Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:232;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=73; 2025-03-31T16:24:57.124445Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:252;stage=data_format;batch_size=584;num_rows=73;batch_columns=timestamp; 2025-03-31T16:24:57.124571Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:673:2689];bytes=584;rows=73;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; 2025-03-31T16:24:57.124656Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:57.124742Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:57.124854Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:57.124951Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:24:57.125019Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:57.125087Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:57.125118Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:680:2696] finished for tablet 9437184 2025-03-31T16:24:57.125639Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:673:2689];stats={"p":[{"events":["f_bootstrap"],"t":0.066},{"events":["f_ProduceResults"],"t":0.476},{"events":["l_bootstrap"],"t":0.725},{"events":["f_processing","f_task_result"],"t":0.746},{"events":["l_task_result"],"t":6.31},{"events":["f_ack"],"t":6.347},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":6.971}],"full":{"a":1743438290154047,"name":"_full_task","f":1743438290154047,"d_finished":0,"c":0,"l":1743438297125182,"d":6971135},"events":[{"name":"bootstrap","f":1743438290221016,"d_finished":658292,"c":1,"l":1743438290879308,"d":658292},{"a":1743438297124938,"name":"ack","f":1743438296501226,"d_finished":579523,"c":904,"l":1743438297124883,"d":579767},{"a":1743438297124929,"name":"processing","f":1743438290900659,"d_finished":2945827,"c":4520,"l":1743438297124885,"d":2946080},{"name":"ProduceResults","f":1743438290630951,"d_finished":1166535,"c":5426,"l":1743438297125105,"d":1166535},{"a":1743438297125106,"name":"Finish","f":1743438297125106,"d_finished":0,"c":0,"l":1743438297125182,"d":76},{"name":"task_result","f":1743438290900690,"d_finished":2290536,"c":3616,"l":1743438296464413,"d":2290536}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:57.125727Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:673:2689];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:24:57.126154Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:673:2689];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0.066},{"events":["f_ProduceResults"],"t":0.476},{"events":["l_bootstrap"],"t":0.725},{"events":["f_processing","f_task_result"],"t":0.746},{"events":["l_task_result"],"t":6.31},{"events":["f_ack"],"t":6.347},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":6.971}],"full":{"a":1743438290154047,"name":"_full_task","f":1743438290154047,"d_finished":0,"c":0,"l":1743438297125769,"d":6971722},"events":[{"name":"bootstrap","f":1743438290221016,"d_finished":658292,"c":1,"l":1743438290879308,"d":658292},{"a":1743438297124938,"name":"ack","f":1743438296501226,"d_finished":579523,"c":904,"l":1743438297124883,"d":580354},{"a":1743438297124929,"name":"processing","f":1743438290900659,"d_finished":2945827,"c":4520,"l":1743438297124885,"d":2946667},{"name":"ProduceResults","f":1743438290630951,"d_finished":1166535,"c":5426,"l":1743438297125105,"d":1166535},{"a":1743438297125106,"name":"Finish","f":1743438297125106,"d_finished":0,"c":0,"l":1743438297125769,"d":663},{"name":"task_result","f":1743438290900690,"d_finished":2290536,"c":3616,"l":1743438296464413,"d":2290536}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:24:57.126232Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:24:50.093801Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=904;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=7049848;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7049848;selected_rows=0; 2025-03-31T16:24:57.126278Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:24:57.126507Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint32 [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnTimestamp64 >> EraseRowsTests::EraseRowsShouldSuccess [GOOD] >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Seconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Milliseconds >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMicroSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDate32 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MilliSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MicroSeconds >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMilliSeconds >> EraseRowsTests::ConditionalEraseRowsShouldNotEraseModifiedRows [GOOD] >> EraseRowsTests::EraseRowsFromReplicatedTable >> DistributedEraseTests::ConditionalEraseRowsShouldNotErase [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnVariousErrors >> DistributedEraseTests::ConditionalEraseRowsShouldErase >> EraseRowsTests::ConditionalEraseRowsShouldNotErase [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldFailOnVariousErrors >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnTimestamp64 [GOOD] >> TColumnShardTestSchema::EnableColdTiersAfterNoEviction [GOOD] >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt4Seconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Microseconds >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MicroSeconds [GOOD] >> EraseRowsTests::EraseRowsFromReplicatedTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnTimestamp64 [GOOD] Test command err: 2025-03-31T16:24:57.325816Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:24:57.326019Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:24:57.326141Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012c9/r3tmp/tmpR1Gmjh/pdisk_1.dat 2025-03-31T16:24:57.625372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:24:57.665635Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:24:57.703906Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:24:57.704021Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:24:57.715450Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:24:57.796959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:24:57.836513Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:24:57.836768Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:24:57.884706Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:24:57.884878Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:24:57.886649Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:24:57.886733Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:24:57.886786Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:24:57.887162Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:24:57.887309Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:24:57.887409Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:24:57.898150Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:24:57.926168Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:24:57.926390Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:24:57.926504Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:24:57.926540Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:24:57.926575Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:24:57.926611Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:24:57.927057Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:24:57.927175Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:24:57.927291Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:24:57.927332Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:24:57.927368Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:24:57.927429Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:24:57.927533Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:24:57.927684Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:24:57.927927Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:24:57.928012Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:24:57.930083Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:24:57.940724Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:24:57.940844Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:24:58.089459Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T16:24:58.092787Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:24:58.092859Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:24:58.093305Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:24:58.093342Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:24:58.093392Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T16:24:58.093609Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:24:58.093723Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:24:58.094162Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:24:58.094230Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:24:58.095626Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:24:58.095952Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:24:58.097363Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-31T16:24:58.097404Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:24:58.098034Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-31T16:24:58.098101Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:24:58.098755Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:24:58.098787Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:24:58.098827Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:24:58.098876Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:24:58.098922Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T16:24:58.099038Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:24:58.101605Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:24:58.102926Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T16:24:58.103059Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-31T16:24:58.103121Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:24:58.110167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:24:58.110256Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:24:58.110325Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:24:58.114215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:24:58.119213Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:24:58.272254Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:24:58.276954Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:24:58.356916Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:24:58.910226Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpgaezw81qxa3c6bpmpt2ar, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjQ3YWZiY2ItMTA0ZWU3ZTItMWE2ZDZkZGUtZmIwMWNiOTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:24:58.951903Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:856:2687], serverId# [1:857:2688], sessionId# [0:0:0] 2025-03-31T16:24:58.954369Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-03-31T16:24:58.954584Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=4 ... 025-03-31T16:25:01.858421Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:01.858460Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:25:01.858496Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:01.858857Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:663:2568], serverId# [2:674:2575], sessionId# [0:0:0] 2025-03-31T16:25:01.858982Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:25:01.859197Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:25:01.859267Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:25:01.860889Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:01.871650Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:25:01.871775Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:25:02.019770Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:703:2593], serverId# [2:705:2595], sessionId# [0:0:0] 2025-03-31T16:25:02.020640Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:25:02.020695Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:02.020933Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:02.020963Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:25:02.020996Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T16:25:02.021197Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:25:02.021311Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:25:02.021772Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:02.021820Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:25:02.022139Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:25:02.022422Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:02.023642Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-31T16:25:02.023681Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:02.024305Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-31T16:25:02.024363Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:02.024844Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:02.024884Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:25:02.024936Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:25:02.025004Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:410:2405], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:25:02.025063Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T16:25:02.025135Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:02.026496Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:02.027929Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T16:25:02.028050Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-31T16:25:02.028102Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:25:02.034730Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:02.034823Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:748:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:02.034882Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:02.038629Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:25:02.043371Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:02.189055Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:02.191544Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:25:02.225916Z node 2 :TX_PROXY ERROR: Actor# [2:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:02.319694Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpgajth8etq5axtz634yxfw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzU2YjVjNTAtZTZjZTM0MTktZmJiYzE0M2UtNzhjNjYxYjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:25:02.327417Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:856:2687], serverId# [2:857:2688], sessionId# [0:0:0] 2025-03-31T16:25:02.327849Z node 2 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-03-31T16:25:02.328053Z node 2 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=5 2025-03-31T16:25:02.338940Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:02.342685Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:864:2694], serverId# [2:865:2695], sessionId# [0:0:0] 2025-03-31T16:25:02.343494Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-03-31T16:25:02.354576Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-03-31T16:25:02.354660Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:02.354908Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-03-31T16:25:02.354949Z node 2 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-03-31T16:25:02.355224Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:02.355274Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:02.355320Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:25:02.355381Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:02.355494Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:864:2694], serverId# [2:865:2695], sessionId# [0:0:0] 2025-03-31T16:25:02.356450Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:25:02.356805Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:25:02.356991Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:02.357032Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:25:02.357075Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-03-31T16:25:02.357288Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:25:02.357356Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:02.357983Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-03-31T16:25:02.358236Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-31T16:25:02.358353Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-03-31T16:25:02.358400Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-03-31T16:25:02.381506Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-03-31T16:25:02.381581Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2025-03-31T16:25:02.381980Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:02.382017Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:25:02.382059Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-03-31T16:25:02.382193Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:02.382262Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:02.382303Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors [GOOD] Test command err: 2025-03-31T16:24:57.291577Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:24:57.291790Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:24:57.291928Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012ea/r3tmp/tmpkypNx5/pdisk_1.dat 2025-03-31T16:24:57.597370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:24:57.634181Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:24:57.671586Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:24:57.671676Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:24:57.683106Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:24:57.762417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:24:57.794908Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:24:57.795112Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:24:57.827768Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:24:57.827863Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:24:57.829191Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:24:57.829251Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:24:57.829285Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:24:57.829570Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:24:57.829681Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:24:57.829750Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:24:57.840310Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:24:57.856139Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:24:57.856311Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:24:57.856394Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:24:57.856424Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:24:57.856455Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:24:57.856480Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:24:57.856787Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:24:57.856852Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:24:57.856943Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:24:57.856972Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:24:57.856999Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:24:57.857041Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:24:57.857117Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:24:57.857247Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:24:57.857464Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:24:57.857544Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:24:57.859191Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:24:57.869746Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:24:57.869842Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:24:58.017813Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T16:24:58.021547Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:24:58.021611Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:24:58.022024Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:24:58.022077Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:24:58.022124Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T16:24:58.022337Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:24:58.022467Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:24:58.022871Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:24:58.022921Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:24:58.024349Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:24:58.024736Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:24:58.026292Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-31T16:24:58.026328Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:24:58.027034Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-31T16:24:58.027144Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:24:58.027768Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:24:58.027820Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:24:58.027863Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:24:58.027933Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:24:58.027981Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T16:24:58.028062Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:24:58.030914Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:24:58.032527Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T16:24:58.032696Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-31T16:24:58.032744Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:24:58.040168Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:24:58.040254Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:24:58.040301Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:24:58.045447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:24:58.051284Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:24:58.199328Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:24:58.202746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:24:58.279891Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:24:58.910243Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpgaexp1bwew9ner46k66ed, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGIwZmFhMDctODdjYzEwNDQtZDliNDhhYTctYjIzZmZjYTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:24:58.951961Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:856:2687], serverId# [1:857:2688], sessionId# [0:0:0] 2025-03-31T16:24:58.954404Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-03-31T16:24:58.954577Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=3 ... 37968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:25:02.220232Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:25:02.240084Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:667:2571] 2025-03-31T16:25:02.240279Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:25:02.277196Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:25:02.277293Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:25:02.278626Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:25:02.278703Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:25:02.278793Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:25:02.279116Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:25:02.279223Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:25:02.279287Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:683:2571] in generation 1 2025-03-31T16:25:02.289906Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:25:02.289985Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:25:02.290087Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:25:02.290155Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:685:2581] 2025-03-31T16:25:02.290188Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:25:02.290212Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:25:02.290238Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:02.290582Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:25:02.290671Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:25:02.290741Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:02.290770Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:02.290851Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:25:02.290912Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:02.291422Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:663:2568], serverId# [2:674:2575], sessionId# [0:0:0] 2025-03-31T16:25:02.291583Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:25:02.291762Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:25:02.291824Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:25:02.293144Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:02.303790Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:25:02.303920Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:25:02.452394Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:703:2593], serverId# [2:705:2595], sessionId# [0:0:0] 2025-03-31T16:25:02.453592Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:25:02.453656Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:02.454024Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:02.454083Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:25:02.454150Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T16:25:02.454419Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:25:02.454586Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:25:02.455221Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:02.455292Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:25:02.455785Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:25:02.456230Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:02.457772Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-31T16:25:02.457825Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:02.458656Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-31T16:25:02.458746Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:02.459324Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:02.459373Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:25:02.459427Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:25:02.459488Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:410:2405], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:25:02.459552Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T16:25:02.459654Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:02.461134Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:02.463141Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T16:25:02.463349Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-31T16:25:02.463408Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:25:02.469716Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:739:2621], serverId# [2:740:2622], sessionId# [0:0:0] 2025-03-31T16:25:02.469888Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-03-31T16:25:02.491232Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-03-31T16:25:02.491319Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:02.491713Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:739:2621], serverId# [2:740:2622], sessionId# [0:0:0] 2025-03-31T16:25:02.493835Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:745:2627], serverId# [2:746:2628], sessionId# [0:0:0] 2025-03-31T16:25:02.493976Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-03-31T16:25:02.494236Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-03-31T16:25:02.494291Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:02.494535Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:745:2627], serverId# [2:746:2628], sessionId# [0:0:0] 2025-03-31T16:25:02.496485Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:750:2632], serverId# [2:751:2633], sessionId# [0:0:0] 2025-03-31T16:25:02.496644Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-03-31T16:25:02.496841Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-03-31T16:25:02.496934Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:02.497148Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:750:2632], serverId# [2:751:2633], sessionId# [0:0:0] 2025-03-31T16:25:02.498916Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:755:2637], serverId# [2:756:2638], sessionId# [0:0:0] 2025-03-31T16:25:02.499057Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-03-31T16:25:02.499255Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-03-31T16:25:02.499299Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:02.499507Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:755:2637], serverId# [2:756:2638], sessionId# [0:0:0] 2025-03-31T16:25:02.501234Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:760:2642], serverId# [2:761:2643], sessionId# [0:0:0] 2025-03-31T16:25:02.501414Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-03-31T16:25:02.501769Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-03-31T16:25:02.501821Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:02.502032Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:760:2642], serverId# [2:761:2643], sessionId# [0:0:0] 2025-03-31T16:25:02.503715Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:765:2647], serverId# [2:766:2648], sessionId# [0:0:0] 2025-03-31T16:25:02.503834Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-03-31T16:25:02.504005Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-03-31T16:25:02.504066Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:02.504303Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:765:2647], serverId# [2:766:2648], sessionId# [0:0:0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::EnableColdTiersAfterNoEviction [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143438834.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143438834.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143438834.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123438834.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143438834.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143438834.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123437634.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123438834.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123438834.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123437634.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123437634.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123437634.000000s;Name=;Codec=}; 2025-03-31T16:23:54.774991Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:23:54.873865Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:23:54.901442Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:23:54.901732Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:23:54.910301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:23:54.910509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:23:54.910746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:23:54.910884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:23:54.911007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:23:54.911141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:23:54.911244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:23:54.911353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:23:54.911489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:23:54.911626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:23:54.911724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:23:54.911833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:23:54.942901Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:23:54.943069Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:23:54.943150Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:23:54.943320Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:54.943473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:23:54.943544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:23:54.943594Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:23:54.943688Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:23:54.943755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:23:54.943803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:23:54.943836Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:23:54.944020Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:54.944099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:23:54.944144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:23:54.944177Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:23:54.944274Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:23:54.944335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:23:54.944386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:23:54.944423Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:23:54.944494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:23:54.944532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:23:54.944563Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:23:54.944611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:23:54.944652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:23:54.944686Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:23:54.945081Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=63; 2025-03-31T16:23:54.945183Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=53; 2025-03-31T16:23:54.945265Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=37; 2025-03-31T16:23:54.945346Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=40; 2025-03-31T16:23:54.945542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:23:54.945617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:23:54.945659Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:23:54.945882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:23:54.945939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:23:54.945976Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TT ... Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-03-31T16:25:02.575124Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-31T16:25:02.575183Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:25:02.575241Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:25:02.575278Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:25:02.575361Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:25:02.575589Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000009:max} readable: {1000000009:max} at tablet 9437184 2025-03-31T16:25:02.575699Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-03-31T16:25:02.575838Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-31T16:25:02.575896Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-31T16:25:02.576252Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-03-31T16:25:02.576322Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-03-31T16:25:02.576699Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:1984:3989];trace_detailed=; 2025-03-31T16:25:02.577056Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-03-31T16:25:02.577254Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-03-31T16:25:02.577386Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:25:02.577509Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:25:02.577849Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:25:02.577948Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:25:02.578067Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:25:02.578100Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1984:3989] finished for tablet 9437184 2025-03-31T16:25:02.578448Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:1983:3988];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1743438302576635,"name":"_full_task","f":1743438302576635,"d_finished":0,"c":0,"l":1743438302578149,"d":1514},"events":[{"name":"bootstrap","f":1743438302576799,"d_finished":732,"c":1,"l":1743438302577531,"d":732},{"a":1743438302577829,"name":"ack","f":1743438302577829,"d_finished":0,"c":0,"l":1743438302578149,"d":320},{"a":1743438302577814,"name":"processing","f":1743438302577814,"d_finished":0,"c":0,"l":1743438302578149,"d":335},{"name":"ProduceResults","f":1743438302577327,"d_finished":419,"c":2,"l":1743438302578088,"d":419},{"a":1743438302578090,"name":"Finish","f":1743438302578090,"d_finished":0,"c":0,"l":1743438302578149,"d":59}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:25:02.578512Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:1983:3988];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:25:02.578799Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:1983:3988];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1743438302576635,"name":"_full_task","f":1743438302576635,"d_finished":0,"c":0,"l":1743438302578546,"d":1911},"events":[{"name":"bootstrap","f":1743438302576799,"d_finished":732,"c":1,"l":1743438302577531,"d":732},{"a":1743438302577829,"name":"ack","f":1743438302577829,"d_finished":0,"c":0,"l":1743438302578546,"d":717},{"a":1743438302577814,"name":"processing","f":1743438302577814,"d_finished":0,"c":0,"l":1743438302578546,"d":732},{"name":"ProduceResults","f":1743438302577327,"d_finished":419,"c":2,"l":1743438302578088,"d":419},{"a":1743438302578090,"name":"Finish","f":1743438302578090,"d_finished":0,"c":0,"l":1743438302578546,"d":456}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1984:3989]->[1:1983:3988] 2025-03-31T16:25:02.578870Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:25:02.576301Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-03-31T16:25:02.578902Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:25:02.578989Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 160000/10402096 80000/5203544 0/0 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Milliseconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgTimestamp >> DataShardVolatile::DistributedWriteLostPlanThenSplit [GOOD] >> DataShardVolatile::DistributedOutOfOrderFollowerConsistency >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMilliSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberNanoSeconds >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDate32 [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnVariousErrors [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSplit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MicroSeconds [GOOD] Test command err: 2025-03-31T16:24:57.143754Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:24:57.144007Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:24:57.144181Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012c0/r3tmp/tmpolYbwi/pdisk_1.dat 2025-03-31T16:24:57.470232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:24:57.517520Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:24:57.562233Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:24:57.562368Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:24:57.574675Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:24:57.668896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:24:57.724736Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:24:57.724994Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:24:57.763463Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:24:57.763569Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:24:57.765923Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:24:57.766003Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:24:57.766043Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:24:57.767676Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:24:57.767852Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:24:57.767924Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:24:57.778599Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:24:57.807778Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:24:57.808988Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:24:57.809145Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:24:57.809180Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:24:57.809213Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:24:57.809248Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:24:57.810243Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:24:57.810324Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:24:57.810409Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:24:57.810439Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:24:57.810512Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:24:57.810563Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:24:57.810626Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:24:57.810754Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:24:57.811661Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:24:57.811808Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:24:57.814766Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:24:57.825414Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:24:57.825530Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:24:57.975341Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T16:24:57.980533Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:24:57.980647Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:24:57.981325Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:24:57.981387Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:24:57.981515Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T16:24:57.981823Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:24:57.982030Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:24:57.982692Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:24:57.982823Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:24:57.993654Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:24:57.995344Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:24:57.999476Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-31T16:24:57.999547Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:24:58.000737Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-31T16:24:58.000841Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:24:58.001845Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:24:58.001898Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:24:58.001964Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:24:58.002034Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:24:58.002115Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T16:24:58.002316Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:24:58.006674Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:24:58.008929Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T16:24:58.009166Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-31T16:24:58.009241Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:24:58.034076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:24:58.034221Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:24:58.034306Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:24:58.041987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:24:58.048658Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:24:58.199985Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:24:58.203383Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:24:58.266729Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:24:58.910272Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpgaexf63rgfmqc5frp7dpb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWNkNTc0NWUtNzQyMjA1YjUtNmFhNmNkZTEtOTgwNGQ4ZWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:24:58.951991Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:856:2687], serverId# [1:857:2688], sessionId# [0:0:0] 2025-03-31T16:24:58.954405Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-03-31T16:24:58.954651Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=4 ... 025-03-31T16:25:02.429315Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:02.429355Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:25:02.429397Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:02.429790Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:663:2568], serverId# [2:674:2575], sessionId# [0:0:0] 2025-03-31T16:25:02.429914Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:25:02.430127Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:25:02.430237Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:25:02.431921Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:02.442744Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:25:02.442851Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:25:02.591901Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:703:2593], serverId# [2:705:2595], sessionId# [0:0:0] 2025-03-31T16:25:02.593090Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:25:02.593146Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:02.593495Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:02.593560Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:25:02.593600Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T16:25:02.593839Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:25:02.593975Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:25:02.594589Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:02.594674Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:25:02.595152Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:25:02.595548Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:02.597006Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-31T16:25:02.597059Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:02.598019Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-31T16:25:02.598089Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:02.598644Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:02.598701Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:25:02.598760Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:25:02.598831Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:410:2405], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:25:02.598887Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T16:25:02.598979Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:02.600496Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:02.602430Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T16:25:02.602714Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-31T16:25:02.602804Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:25:02.611851Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:02.611973Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:748:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:02.612065Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:02.617194Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:25:02.623614Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:02.771611Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:02.774753Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:25:02.808687Z node 2 :TX_PROXY ERROR: Actor# [2:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:02.862635Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpgakcjdyes5jjxyfs2ygwf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Mjc2NjkxN2UtOTNmYmYzNDEtZGVjZjU1MTEtNzBiOWNmMTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:25:02.864600Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:856:2687], serverId# [2:857:2688], sessionId# [0:0:0] 2025-03-31T16:25:02.864914Z node 2 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-03-31T16:25:02.865037Z node 2 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=4 2025-03-31T16:25:02.875805Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:02.879521Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:864:2694], serverId# [2:865:2695], sessionId# [0:0:0] 2025-03-31T16:25:02.880445Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-03-31T16:25:02.891426Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-03-31T16:25:02.891486Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:02.891664Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-03-31T16:25:02.891695Z node 2 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-03-31T16:25:02.891863Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:02.891900Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:02.891960Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:25:02.892036Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:02.892131Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:864:2694], serverId# [2:865:2695], sessionId# [0:0:0] 2025-03-31T16:25:02.892814Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:25:02.893095Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:25:02.893233Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:02.893264Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:25:02.893296Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-03-31T16:25:02.893526Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:25:02.893579Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:02.894057Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-03-31T16:25:02.894252Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-31T16:25:02.894342Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-03-31T16:25:02.894379Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-03-31T16:25:02.911953Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-03-31T16:25:02.912010Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2025-03-31T16:25:02.912290Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:02.912324Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:25:02.912361Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-03-31T16:25:02.912445Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:02.912488Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:02.912521Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> TColumnShardTestSchema::ExternalTTL_Types [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::EraseRowsFromReplicatedTable [GOOD] Test command err: 2025-03-31T16:24:57.677265Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:24:57.677492Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:24:57.677620Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012d6/r3tmp/tmpt8oIie/pdisk_1.dat 2025-03-31T16:24:58.002276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:24:58.040071Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:24:58.077777Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:24:58.077931Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:24:58.089295Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:24:58.171733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:24:58.214172Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:24:58.214461Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:24:58.266126Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:24:58.266273Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:24:58.268208Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:24:58.268295Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:24:58.268378Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:24:58.268765Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:24:58.268931Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:24:58.269016Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:24:58.279864Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:24:58.306319Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:24:58.306579Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:24:58.306719Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:24:58.306759Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:24:58.306797Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:24:58.306838Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:24:58.307333Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:24:58.307439Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:24:58.307574Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:24:58.307620Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:24:58.307676Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:24:58.307745Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:24:58.307824Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:24:58.307996Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:24:58.308258Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:24:58.308369Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:24:58.310639Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:24:58.321445Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:24:58.321595Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:24:58.471151Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T16:24:58.479902Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:24:58.480001Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:24:58.480575Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:24:58.480620Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:24:58.480700Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T16:24:58.480924Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:24:58.481081Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:24:58.481767Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:24:58.481869Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:24:58.483718Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:24:58.484104Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:24:58.486146Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-31T16:24:58.486187Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:24:58.487210Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-31T16:24:58.487302Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:24:58.488012Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:24:58.488048Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:24:58.488091Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:24:58.488141Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:24:58.488213Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T16:24:58.488308Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:24:58.491542Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:24:58.493757Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T16:24:58.494004Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-31T16:24:58.494072Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:24:58.503971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:24:58.504080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:24:58.504143Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:24:58.509803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:24:58.515832Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:24:58.665788Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:24:58.669556Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:24:58.741682Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:24:59.106488Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpgafc59t47z9vfakxfp973, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mzk0ZmYwOTAtNWMwNzU0ZTUtYjU0YjU1ZjYtMjU2ZjIwZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:24:59.131599Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:856:2687], serverId# [1:857:2688], sessionId# [0:0:0] 2025-03-31T16:24:59.132150Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-03-31T16:24:59.132368Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=3 ... 3-31T16:24:59.242185Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:24:59.242231Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:24:59.242288Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715662] at 72075186224037888 for WaitForStreamClearance 2025-03-31T16:24:59.242589Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:24:59.242652Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:24:59.243289Z node 1 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2025-03-31T16:24:59.243529Z node 1 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715662, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-31T16:24:59.243664Z node 1 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715662, PendingAcks: 0 2025-03-31T16:24:59.243708Z node 1 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 0 2025-03-31T16:24:59.245744Z node 1 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-03-31T16:24:59.245797Z node 1 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715662, at: 72075186224037888 2025-03-31T16:24:59.245960Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:24:59.246001Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:24:59.246038Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715662] at 72075186224037888 for ReadTableScan 2025-03-31T16:24:59.246167Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:24:59.246232Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:24:59.246276Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:02.500257Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:101:2147], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:25:02.500773Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:25:02.500914Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012d6/r3tmp/tmp0FNu4V/pdisk_1.dat 2025-03-31T16:25:02.825798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:25:02.855803Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:02.893003Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:02.893195Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:02.904804Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:25:02.987131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:25:03.011037Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:667:2571] 2025-03-31T16:25:03.011343Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:25:03.066026Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:25:03.066184Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:25:03.068179Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:25:03.068283Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:25:03.068380Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:25:03.068794Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:25:03.068946Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:25:03.069046Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:683:2571] in generation 1 2025-03-31T16:25:03.079849Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:25:03.079941Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:25:03.080062Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:25:03.080168Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:685:2581] 2025-03-31T16:25:03.080209Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:25:03.080255Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:25:03.080298Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:03.080694Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:25:03.080835Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:25:03.080960Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:03.081006Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:03.081049Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:25:03.081113Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:03.081561Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:663:2568], serverId# [2:674:2575], sessionId# [0:0:0] 2025-03-31T16:25:03.081701Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:25:03.081956Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:25:03.082050Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:25:03.083974Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:03.094661Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:25:03.094768Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:25:03.243237Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:703:2593], serverId# [2:705:2595], sessionId# [0:0:0] 2025-03-31T16:25:03.244535Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:25:03.244606Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:03.244999Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:03.245057Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:25:03.245106Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T16:25:03.245353Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:25:03.245506Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:25:03.246116Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:03.246181Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:25:03.246651Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:25:03.247073Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:03.248637Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-31T16:25:03.248692Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:03.249576Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-31T16:25:03.249647Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:03.250119Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:03.250161Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:25:03.250207Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:25:03.250271Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:410:2405], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:25:03.250325Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T16:25:03.250458Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:03.251878Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:03.253679Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T16:25:03.253869Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-31T16:25:03.253950Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:25:03.260680Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:739:2621], serverId# [2:740:2622], sessionId# [0:0:0] 2025-03-31T16:25:03.260816Z node 2 :TX_DATASHARD NOTICE: Rejecting erase request on datashard: tablet# 72075186224037888, error# Can't execute erase at replicated table 2025-03-31T16:25:03.261016Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:739:2621], serverId# [2:740:2622], sessionId# [0:0:0] >> TColumnShardTestSchema::RebootHotTiersTtlWithStat [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldSuccessOnShardedIndex >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64Seconds >> EraseRowsTests::ConditionalEraseRowsShouldFailOnVariousErrors [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldErase [GOOD] >> DistributedEraseTests::ConditionalEraseRowsCheckLimits ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersTtlWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143438847.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143438847.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143438847.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143438847.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143438847.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123438847.000000s;Name=;Codec=}; WaitEmptyAfter=1;Tiers={{Column=timestamp;EvictAfter=143438847.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143438847.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123437647.000000s;Name=;Codec=}; 2025-03-31T16:24:08.166918Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:24:08.275384Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:24:08.303763Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:24:08.304114Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:24:08.313395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:24:08.313649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:24:08.313938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:24:08.314094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:24:08.314244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:24:08.314416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:24:08.314546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:24:08.314676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:24:08.314846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:24:08.315012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:24:08.315174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:24:08.315314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:24:08.346216Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:24:08.346445Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:24:08.346569Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:24:08.346806Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:24:08.347030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:24:08.347156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:24:08.347235Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:24:08.347387Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:24:08.347483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:24:08.347558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:24:08.347612Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:24:08.347850Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:24:08.347990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:24:08.348059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:24:08.348122Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:24:08.348268Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:24:08.348362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:24:08.348431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:24:08.348474Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:24:08.348581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:24:08.348677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:24:08.348727Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:24:08.348805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:24:08.348870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:24:08.348931Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:24:08.349470Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=63; 2025-03-31T16:24:08.349596Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=47; 2025-03-31T16:24:08.349708Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=47; 2025-03-31T16:24:08.349847Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=69; 2025-03-31T16:24:08.350081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:24:08.350171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:24:08.350236Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:24:08.350519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:24:08.350594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:24:08.350651Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:24:08.350868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchem ... ines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=320; 2025-03-31T16:25:04.789787Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=40470; 2025-03-31T16:25:04.797316Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=7452; 2025-03-31T16:25:04.805321Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=7102; 2025-03-31T16:25:04.805413Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=8010; 2025-03-31T16:25:04.805565Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=75; 2025-03-31T16:25:04.805662Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=59; 2025-03-31T16:25:04.805774Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=71; 2025-03-31T16:25:04.805900Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=73; 2025-03-31T16:25:04.813791Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=7822; 2025-03-31T16:25:04.825603Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=11701; 2025-03-31T16:25:04.825723Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=37; 2025-03-31T16:25:04.825790Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=25; 2025-03-31T16:25:04.825835Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=6; 2025-03-31T16:25:04.825898Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=8; 2025-03-31T16:25:04.825963Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=9; 2025-03-31T16:25:04.826037Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=38; 2025-03-31T16:25:04.826093Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2025-03-31T16:25:04.826171Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=46; 2025-03-31T16:25:04.826209Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2025-03-31T16:25:04.826260Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=23; 2025-03-31T16:25:04.826347Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=52; 2025-03-31T16:25:04.826596Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=202; 2025-03-31T16:25:04.826633Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=84251; 2025-03-31T16:25:04.826779Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=36397736;raw_bytes=56295575;count=22;records=560000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-31T16:25:04.826904Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:2549:4458];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-31T16:25:04.826959Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:2549:4458];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-31T16:25:04.827008Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2549:4458];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-31T16:25:04.840703Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2549:4458];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-03-31T16:25:04.840846Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:25:04.840900Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:25:04.840993Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-03-31T16:25:04.841049Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700008;tx_id=18446744073709551615;;current_snapshot_ts=1000000002; 2025-03-31T16:25:04.841088Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:25:04.841136Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:25:04.841171Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:25:04.841253Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:25:04.841915Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:25:04.842614Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:2599:4501];tablet_id=9437184;parent=[1:2549:4458];fline=manager.cpp:82;event=ask_data;request=request_id=120;1={portions_count=22};; 2025-03-31T16:25:04.843219Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2549:4458];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-31T16:25:04.844029Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2549:4458];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-31T16:25:04.844057Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-31T16:25:04.844082Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-31T16:25:04.844128Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2549:4458];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:25:04.844182Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2549:4458];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:25:04.844245Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2549:4458];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-03-31T16:25:04.844298Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2549:4458];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700008;tx_id=18446744073709551615;;current_snapshot_ts=1000000002; 2025-03-31T16:25:04.844347Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2549:4458];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:25:04.844390Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2549:4458];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:25:04.844421Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2549:4458];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:25:04.844492Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2549:4458];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:25:04.845037Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:2549:4458];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=22;path_id=1; 2025-03-31T16:25:04.845939Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:2549:4458];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 80000/5203352 0/0 >> TColumnShardTestSchema::ExportAfterFail [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ExternalTTL_Types [GOOD] Test command err: 2025-03-31T16:23:19.009562Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:23:19.117310Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:23:19.125093Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:23:19.125520Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:23:19.148516Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:23:19.148871Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:23:19.156229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:23:19.156424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:23:19.156606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:23:19.156700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:23:19.156766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:23:19.156852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:23:19.156935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:23:19.157048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:23:19.157142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:23:19.157214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:23:19.157295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:23:19.157379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:23:19.180679Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:23:19.192636Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:23:19.192821Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:23:19.192880Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:23:19.193084Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:19.193255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:23:19.193381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:23:19.193432Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:23:19.193552Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:23:19.193632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:23:19.193689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:23:19.193728Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:23:19.193932Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:19.194014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:23:19.194073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:23:19.194115Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:23:19.194228Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:23:19.194294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:23:19.194345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:23:19.194396Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:23:19.194481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:23:19.194521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:23:19.194554Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:23:19.194608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:23:19.194647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:23:19.194702Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:23:19.195161Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=74; 2025-03-31T16:23:19.195268Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=39; 2025-03-31T16:23:19.195413Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=88; 2025-03-31T16:23:19.195529Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=50; 2025-03-31T16:23:19.195751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:23:19.195826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:23:19.195881Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:23:19.196108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:23:19.196167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:23:19.196198Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:23:19.196366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:23:19.196412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:23:19.196443Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:23:19.196651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:23:19.196694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:23:19.196723Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T1 ... id;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:25:03.648718Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:1;records_count:71;schema=saved_at: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:25:03.648747Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-31T16:25:03.648777Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-31T16:25:03.648855Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:25:03.648938Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:1;records_count:71;schema=saved_at: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:25:03.648977Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-31T16:25:03.649066Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:232;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;);columns=1;rows=71; 2025-03-31T16:25:03.649109Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:252;stage=data_format;batch_size=568;num_rows=71;batch_columns=saved_at; 2025-03-31T16:25:03.649257Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[4:593:2609];bytes=568;rows=71;faults=0;finished=0;fault=0;schema=saved_at: uint64; 2025-03-31T16:25:03.649370Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:25:03.649500Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:25:03.649608Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:25:03.649728Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:25:03.649814Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:25:03.649896Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:25:03.649930Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: Scan [4:600:2616] finished for tablet 9437184 2025-03-31T16:25:03.650412Z node 4 :TX_COLUMNSHARD_SCAN INFO: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[4:593:2609];stats={"p":[{"events":["f_bootstrap"],"t":0.058},{"events":["f_ProduceResults"],"t":0.429},{"events":["l_bootstrap"],"t":0.658},{"events":["f_processing","f_task_result"],"t":0.683},{"events":["l_task_result"],"t":7.44},{"events":["f_ack"],"t":7.474},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":8.192}],"full":{"a":1743438295457226,"name":"_full_task","f":1743438295457226,"d_finished":0,"c":0,"l":1743438303649986,"d":8192760},"events":[{"name":"bootstrap","f":1743438295515333,"d_finished":600245,"c":1,"l":1743438296115578,"d":600245},{"a":1743438303649710,"name":"ack","f":1743438302931805,"d_finished":668473,"c":903,"l":1743438303649635,"d":668749},{"a":1743438303649698,"name":"processing","f":1743438296141002,"d_finished":3525853,"c":4515,"l":1743438303649638,"d":3526141},{"name":"ProduceResults","f":1743438295886321,"d_finished":1376680,"c":5420,"l":1743438303649916,"d":1376680},{"a":1743438303649918,"name":"Finish","f":1743438303649918,"d_finished":0,"c":0,"l":1743438303649986,"d":68},{"name":"task_result","f":1743438296141030,"d_finished":2774672,"c":3612,"l":1743438302897401,"d":2774672}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:25:03.650499Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[4:593:2609];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:25:03.650944Z node 4 :TX_COLUMNSHARD_SCAN INFO: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[4:593:2609];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0.058},{"events":["f_ProduceResults"],"t":0.429},{"events":["l_bootstrap"],"t":0.658},{"events":["f_processing","f_task_result"],"t":0.683},{"events":["l_task_result"],"t":7.44},{"events":["f_ack"],"t":7.474},{"events":["l_ProduceResults","f_Finish"],"t":8.192},{"events":["l_ack","l_processing","l_Finish"],"t":8.193}],"full":{"a":1743438295457226,"name":"_full_task","f":1743438295457226,"d_finished":0,"c":0,"l":1743438303650545,"d":8193319},"events":[{"name":"bootstrap","f":1743438295515333,"d_finished":600245,"c":1,"l":1743438296115578,"d":600245},{"a":1743438303649710,"name":"ack","f":1743438302931805,"d_finished":668473,"c":903,"l":1743438303649635,"d":669308},{"a":1743438303649698,"name":"processing","f":1743438296141002,"d_finished":3525853,"c":4515,"l":1743438303649638,"d":3526700},{"name":"ProduceResults","f":1743438295886321,"d_finished":1376680,"c":5420,"l":1743438303649916,"d":1376680},{"a":1743438303649918,"name":"Finish","f":1743438303649918,"d_finished":0,"c":0,"l":1743438303650545,"d":627},{"name":"task_result","f":1743438296141030,"d_finished":2774672,"c":3612,"l":1743438302897401,"d":2774672}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:25:03.651026Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:24:55.397213Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=903;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=7037528;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7037528;selected_rows=0; 2025-03-31T16:25:03.651115Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:25:03.651335Z node 4 :TX_COLUMNSHARD_SCAN INFO: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldFailOnVariousErrors [GOOD] Test command err: 2025-03-31T16:24:58.920453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:24:58.920731Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:24:58.920911Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012be/r3tmp/tmpe3r2HK/pdisk_1.dat 2025-03-31T16:24:59.321872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:24:59.364922Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:24:59.408093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:24:59.408222Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:24:59.419775Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:24:59.502173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:24:59.551121Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:24:59.551424Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:24:59.603780Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:24:59.603922Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:24:59.605755Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:24:59.605841Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:24:59.605902Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:24:59.606302Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:24:59.606458Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:24:59.606547Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:24:59.617342Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:24:59.642285Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:24:59.642532Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:24:59.642659Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:24:59.642696Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:24:59.642732Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:24:59.642770Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:24:59.643225Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:24:59.643341Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:24:59.643463Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:24:59.643504Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:24:59.643541Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:24:59.643601Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:24:59.643683Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:24:59.643844Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:24:59.644120Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:24:59.644232Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:24:59.646459Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:24:59.657151Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:24:59.657282Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:24:59.806771Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T16:24:59.816848Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:24:59.816935Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:24:59.817652Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:24:59.817702Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:24:59.817759Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T16:24:59.818056Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:24:59.818215Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:24:59.818791Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:24:59.818854Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:24:59.820965Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:24:59.821495Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:24:59.823527Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-31T16:24:59.823575Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:24:59.824566Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-31T16:24:59.824669Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:24:59.825797Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:24:59.825843Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:24:59.825909Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:24:59.825972Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:24:59.826021Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T16:24:59.826130Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:24:59.830226Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:24:59.832295Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T16:24:59.832498Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-31T16:24:59.832578Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:24:59.847831Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:24:59.847959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:24:59.848035Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:24:59.853834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:24:59.860951Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:00.011228Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:00.014709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:25:00.076026Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:00.469565Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpgagp5ddz8gkbfqj6jc2fx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTEyNjYwMDktMmRiOGEwYTctYWVmNzU4YzYtMzM3NDJhN2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:25:00.481975Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:856:2687], serverId# [1:857:2688], sessionId# [0:0:0] 2025-03-31T16:25:00.482499Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-03-31T16:25:00.482719Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=3 ... 24037893 2025-03-31T16:25:05.073937Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037893, clientId# [2:1249:3031], serverId# [2:1250:3032], sessionId# [0:0:0] 2025-03-31T16:25:05.074170Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037893, clientId# [2:1249:3031], serverId# [2:1250:3032], sessionId# [0:0:0] 2025-03-31T16:25:05.075577Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037893, clientId# [2:1254:3036], serverId# [2:1255:3037], sessionId# [0:0:0] 2025-03-31T16:25:05.075774Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037893, clientId# [2:1254:3036], serverId# [2:1255:3037], sessionId# [0:0:0] 2025-03-31T16:25:05.077162Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037893, clientId# [2:1259:3041], serverId# [2:1260:3042], sessionId# [0:0:0] 2025-03-31T16:25:05.077368Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037893, clientId# [2:1259:3041], serverId# [2:1260:3042], sessionId# [0:0:0] 2025-03-31T16:25:05.079648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:05.083526Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037893 2025-03-31T16:25:05.083634Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-31T16:25:05.083711Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-03-31T16:25:05.083803Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037892 2025-03-31T16:25:05.083866Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:05.084478Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037891 2025-03-31T16:25:05.121308Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037894 actor [2:1283:3061] 2025-03-31T16:25:05.121501Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:25:05.128061Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:25:05.128151Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:25:05.129130Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037894 2025-03-31T16:25:05.129191Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037894 2025-03-31T16:25:05.129233Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037894 2025-03-31T16:25:05.129429Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:25:05.129533Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:25:05.129575Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037894 persisting started state actor id [2:1299:3061] in generation 1 2025-03-31T16:25:05.150447Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:25:05.150502Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037894 2025-03-31T16:25:05.150587Z node 2 :TX_DATASHARD DEBUG: 72075186224037894 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:25:05.150650Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037894, actorId: [2:1301:3071] 2025-03-31T16:25:05.150670Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037894 2025-03-31T16:25:05.162527Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037894, state: WaitScheme 2025-03-31T16:25:05.162596Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-03-31T16:25:05.162947Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037894 2025-03-31T16:25:05.163010Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037894 2025-03-31T16:25:05.163289Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037894 2025-03-31T16:25:05.163313Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037894 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:05.163335Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037894 TxInFly 0 2025-03-31T16:25:05.163359Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037894 2025-03-31T16:25:05.163395Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1280:3059], serverId# [2:1290:3065], sessionId# [0:0:0] 2025-03-31T16:25:05.163458Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-03-31T16:25:05.163584Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037894 txId 281474976715663 ssId 72057594046644480 seqNo 2:7 2025-03-31T16:25:05.163643Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715663 at tablet 72075186224037894 2025-03-31T16:25:05.163925Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037894 2025-03-31T16:25:05.174475Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-03-31T16:25:05.174562Z node 2 :TX_DATASHARD DEBUG: 72075186224037894 not sending time cast registration request in state WaitScheme 2025-03-31T16:25:05.314098Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1311:3081], serverId# [2:1313:3083], sessionId# [0:0:0] 2025-03-31T16:25:05.314912Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715663 at step 4000 at tablet 72075186224037894 { Transactions { TxId: 281474976715663 AckTo { RawX1: 0 RawX2: 0 } } Step: 4000 MediatorID: 72057594046382081 TabletID: 72075186224037894 } 2025-03-31T16:25:05.314976Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-03-31T16:25:05.315595Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037894 2025-03-31T16:25:05.315651Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037894 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:25:05.315700Z node 2 :TX_DATASHARD DEBUG: Found ready operation [4000:281474976715663] in PlanQueue unit at 72075186224037894 2025-03-31T16:25:05.315968Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037894 loaded tx from db 4000:281474976715663 keys extracted: 0 2025-03-31T16:25:05.316131Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037894 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:25:05.316847Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037894 2025-03-31T16:25:05.316922Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037894 tableId# [OwnerId: 72057594046644480, LocalPathId: 8] schema version# 1 2025-03-31T16:25:05.317341Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037894 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:25:05.317758Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037894 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:05.319673Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037894 time 3500 2025-03-31T16:25:05.319730Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-03-31T16:25:05.320337Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037894 step# 4000} 2025-03-31T16:25:05.320411Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037894 2025-03-31T16:25:05.322169Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037894 2025-03-31T16:25:05.322227Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037894 2025-03-31T16:25:05.322275Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037894 2025-03-31T16:25:05.322345Z node 2 :TX_DATASHARD DEBUG: Complete [4000 : 281474976715663] from 72075186224037894 at tablet 72075186224037894 send result to client [2:410:2405], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:25:05.322400Z node 2 :TX_DATASHARD INFO: 72075186224037894 Sending notify to schemeshard 72057594046644480 txId 281474976715663 state Ready TxInFly 0 2025-03-31T16:25:05.322488Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-03-31T16:25:05.323252Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037893 2025-03-31T16:25:05.323384Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-31T16:25:05.323452Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037894 2025-03-31T16:25:05.323551Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-03-31T16:25:05.323995Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037891 2025-03-31T16:25:05.324091Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037892 2025-03-31T16:25:05.324422Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:05.325880Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037894 coordinator 72057594046316545 last step 0 next step 4000 2025-03-31T16:25:05.326493Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715663 datashard 72075186224037894 state Ready 2025-03-31T16:25:05.326558Z node 2 :TX_DATASHARD DEBUG: 72075186224037894 Got TEvSchemaChangedResult from SS at 72075186224037894 2025-03-31T16:25:05.332578Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1340:3104], serverId# [2:1341:3105], sessionId# [0:0:0] 2025-03-31T16:25:05.332860Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037894, clientId# [2:1340:3104], serverId# [2:1341:3105], sessionId# [0:0:0] 2025-03-31T16:25:05.334662Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1345:3109], serverId# [2:1346:3110], sessionId# [0:0:0] 2025-03-31T16:25:05.334909Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037894, clientId# [2:1345:3109], serverId# [2:1346:3110], sessionId# [0:0:0] 2025-03-31T16:25:05.337186Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1350:3114], serverId# [2:1351:3115], sessionId# [0:0:0] 2025-03-31T16:25:05.337543Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037894, clientId# [2:1350:3114], serverId# [2:1351:3115], sessionId# [0:0:0] >> DistributedEraseTests::DistributedEraseTxShouldFailOnVariousErrors ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ExportAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143438864.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123438864.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123437664.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2025-03-31T16:24:26.422284Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:24:26.511227Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:24:26.536130Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:24:26.536416Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:24:26.544331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:24:26.544548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:24:26.544786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:24:26.544906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:24:26.545031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:24:26.545166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:24:26.545279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:24:26.545405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:24:26.545516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:24:26.545656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:24:26.545778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:24:26.545881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:24:26.575186Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:24:26.575343Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:24:26.575396Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:24:26.575580Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:24:26.575740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:24:26.575807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:24:26.575848Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:24:26.575943Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:24:26.576001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:24:26.576042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:24:26.576073Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:24:26.576254Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:24:26.576324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:24:26.576368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:24:26.576398Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:24:26.576485Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:24:26.576539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:24:26.576583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:24:26.576614Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:24:26.576681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:24:26.576718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:24:26.576745Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:24:26.576791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:24:26.576831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:24:26.576864Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:24:26.577255Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=46; 2025-03-31T16:24:26.577357Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=54; 2025-03-31T16:24:26.577433Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=37; 2025-03-31T16:24:26.577516Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=40; 2025-03-31T16:24:26.577676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:24:26.577745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:24:26.577788Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:24:26.578003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:24:26.578053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:24:26.578083Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:24:26.578245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:24:26.578293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:24:26.578325Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:24:26.578499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description= ... NSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:1;records_count:14867;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:25:06.454932Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-31T16:25:06.454955Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-31T16:25:06.455028Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:25:06.455122Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:1;records_count:14867;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:25:06.455146Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-31T16:25:06.455207Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:232;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=14867; 2025-03-31T16:25:06.455236Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:252;stage=data_format;batch_size=118936;num_rows=14867;batch_columns=timestamp; 2025-03-31T16:25:06.455323Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:1270:3279];bytes=118936;rows=14867;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; Got TEvKqpCompute::TEvScanData [1:1271:3280]->[1:1270:3279] 2025-03-31T16:25:06.455389Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:25:06.455455Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:25:06.455516Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:25:06.455588Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:25:06.455643Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:25:06.455697Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:25:06.455721Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1271:3280] finished for tablet 9437184 2025-03-31T16:25:06.456043Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:1270:3279];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.005},{"events":["l_bootstrap"],"t":0.014},{"events":["f_processing","f_task_result"],"t":0.017},{"events":["l_task_result"],"t":0.721},{"events":["f_ack"],"t":0.722},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.726}],"full":{"a":1743438305729108,"name":"_full_task","f":1743438305729108,"d_finished":0,"c":0,"l":1743438306455755,"d":726647},"events":[{"name":"bootstrap","f":1743438305729396,"d_finished":14475,"c":1,"l":1743438305743871,"d":14475},{"a":1743438306455576,"name":"ack","f":1743438306451264,"d_finished":3923,"c":7,"l":1743438306455533,"d":4102},{"a":1743438306455569,"name":"processing","f":1743438305746279,"d_finished":283744,"c":56,"l":1743438306455534,"d":283930},{"name":"ProduceResults","f":1743438305734407,"d_finished":14684,"c":65,"l":1743438306455710,"d":14684},{"a":1743438306455712,"name":"Finish","f":1743438306455712,"d_finished":0,"c":0,"l":1743438306455755,"d":43},{"name":"task_result","f":1743438305746306,"d_finished":278582,"c":49,"l":1743438306451031,"d":278582}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:25:06.456090Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:1270:3279];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:25:06.456416Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:1270:3279];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.005},{"events":["l_bootstrap"],"t":0.014},{"events":["f_processing","f_task_result"],"t":0.017},{"events":["l_task_result"],"t":0.721},{"events":["f_ack"],"t":0.722},{"events":["l_ProduceResults","f_Finish"],"t":0.726},{"events":["l_ack","l_processing","l_Finish"],"t":0.727}],"full":{"a":1743438305729108,"name":"_full_task","f":1743438305729108,"d_finished":0,"c":0,"l":1743438306456124,"d":727016},"events":[{"name":"bootstrap","f":1743438305729396,"d_finished":14475,"c":1,"l":1743438305743871,"d":14475},{"a":1743438306455576,"name":"ack","f":1743438306451264,"d_finished":3923,"c":7,"l":1743438306455533,"d":4471},{"a":1743438306455569,"name":"processing","f":1743438305746279,"d_finished":283744,"c":56,"l":1743438306455534,"d":284299},{"name":"ProduceResults","f":1743438305734407,"d_finished":14684,"c":65,"l":1743438306455710,"d":14684},{"a":1743438306455712,"name":"Finish","f":1743438306455712,"d_finished":0,"c":0,"l":1743438306456124,"d":412},{"name":"task_result","f":1743438305746306,"d_finished":278582,"c":49,"l":1743438306451031,"d":278582}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1271:3280]->[1:1270:3279] 2025-03-31T16:25:06.456473Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:25:05.728487Z;index_granules=0;index_portions=7;index_batches=1260;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=10402524;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10402524;selected_rows=0; 2025-03-31T16:25:06.456498Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:25:06.456663Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 160000/10402332 160000/10402524 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Microseconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgDate >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberNanoSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgTimestamp [GOOD] |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberNanoSeconds [GOOD] Test command err: 2025-03-31T16:24:57.014534Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:24:57.014764Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:24:57.014934Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012fc/r3tmp/tmpU3pJlh/pdisk_1.dat 2025-03-31T16:24:57.473529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:24:57.522516Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:24:57.562261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:24:57.562385Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:24:57.574717Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:24:57.668941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:24:57.731654Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:24:57.731940Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:24:57.782441Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:24:57.782594Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:24:57.784532Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:24:57.784620Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:24:57.784704Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:24:57.785122Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:24:57.785312Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:24:57.785408Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:24:57.796320Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:24:57.826670Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:24:57.826889Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:24:57.827007Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:24:57.827056Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:24:57.827104Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:24:57.827139Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:24:57.827533Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:24:57.827611Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:24:57.827719Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:24:57.827753Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:24:57.827800Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:24:57.827854Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:24:57.827915Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:24:57.828065Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:24:57.828305Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:24:57.828396Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:24:57.830151Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:24:57.840778Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:24:57.840882Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:24:57.990914Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T16:24:57.996454Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:24:57.996560Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:24:57.997261Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:24:57.997317Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:24:57.997402Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T16:24:57.997723Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:24:57.997915Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:24:57.998582Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:24:57.998668Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:24:58.000998Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:24:58.001498Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:24:58.003800Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-31T16:24:58.003861Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:24:58.004882Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-31T16:24:58.004974Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:24:58.005887Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:24:58.005930Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:24:58.005988Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:24:58.006056Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:24:58.006142Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T16:24:58.006255Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:24:58.010363Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:24:58.012483Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T16:24:58.012716Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-31T16:24:58.012786Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:24:58.034090Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:24:58.034235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:24:58.034313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:24:58.042103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:24:58.047676Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:24:58.200730Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:24:58.204104Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:24:58.266998Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:24:58.910315Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpgaexf47efvyzfd14y57b1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjBlNzIyYzItODRlZmM0NWItNDFkNGQyZTItMTU2ZWNhODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:24:58.951873Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:856:2687], serverId# [1:857:2688], sessionId# [0:0:0] 2025-03-31T16:24:58.954351Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-03-31T16:24:58.954575Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=7 ... 025-03-31T16:25:06.714373Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:06.714413Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:25:06.714443Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:06.714733Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:662:2567], serverId# [3:673:2574], sessionId# [0:0:0] 2025-03-31T16:25:06.714863Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:25:06.715036Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:25:06.715123Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:25:06.716514Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:06.727137Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:25:06.727250Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:25:06.875852Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:703:2593], serverId# [3:705:2595], sessionId# [0:0:0] 2025-03-31T16:25:06.876424Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:25:06.876484Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:06.877394Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:06.877453Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:25:06.877531Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T16:25:06.877830Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:25:06.878017Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:25:06.878851Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:06.878932Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:25:06.879463Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:25:06.879928Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:06.881589Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-31T16:25:06.881657Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:06.882634Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-31T16:25:06.882736Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:06.883875Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:06.883924Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:25:06.883978Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:25:06.884056Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:25:06.884116Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T16:25:06.884218Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:06.885244Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:06.887600Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-31T16:25:06.887687Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:25:06.888803Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T16:25:06.897684Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:06.897808Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:06.897886Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:06.903805Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:25:06.910951Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:07.061523Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:07.064971Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:25:07.100049Z node 3 :TX_PROXY ERROR: Actor# [3:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:07.192914Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpgaqjfbycaqn3jh7ccqsg7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YmVjYjIxOC0yYzU2ODgyMi1kNzYyMjQzNS00N2NjNjhhNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:25:07.196108Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:856:2687], serverId# [3:857:2688], sessionId# [0:0:0] 2025-03-31T16:25:07.196532Z node 3 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-03-31T16:25:07.196722Z node 3 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=4 2025-03-31T16:25:07.207845Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:07.213203Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:864:2694], serverId# [3:865:2695], sessionId# [0:0:0] 2025-03-31T16:25:07.214356Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-03-31T16:25:07.225844Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-03-31T16:25:07.225931Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:07.226185Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-03-31T16:25:07.226233Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-03-31T16:25:07.226587Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:07.226658Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:07.226719Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:25:07.226790Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:07.226892Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:864:2694], serverId# [3:865:2695], sessionId# [0:0:0] 2025-03-31T16:25:07.227997Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:25:07.228384Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:25:07.228638Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:07.228706Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:25:07.228758Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-03-31T16:25:07.228991Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:25:07.229067Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:07.229762Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-03-31T16:25:07.230011Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 37, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-31T16:25:07.230195Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-03-31T16:25:07.230262Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-03-31T16:25:07.284912Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-03-31T16:25:07.284983Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2025-03-31T16:25:07.285173Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:07.285216Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:25:07.285259Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-03-31T16:25:07.285400Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:07.285484Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:07.285541Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> TSchemeShardViewTest::AsyncDropSameView >> TSchemeShardViewTest::AsyncCreateDifferentViews >> TSchemeShardViewTest::ReadOnlyMode >> TSchemeShardViewTest::AsyncCreateSameView |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 [GOOD] Test command err: 2025-03-31T16:24:57.014123Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:24:57.014430Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:24:57.014600Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012dc/r3tmp/tmpXxjsSz/pdisk_1.dat 2025-03-31T16:24:57.470027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:24:57.517499Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:24:57.562212Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:24:57.562319Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:24:57.574693Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:24:57.668858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:24:57.732246Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:24:57.732543Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:24:57.784602Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:24:57.784740Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:24:57.786755Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:24:57.786843Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:24:57.786907Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:24:57.787345Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:24:57.787513Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:24:57.787597Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:24:57.798437Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:24:57.835734Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:24:57.835960Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:24:57.836092Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:24:57.836133Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:24:57.836175Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:24:57.836215Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:24:57.836666Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:24:57.836763Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:24:57.836892Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:24:57.836939Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:24:57.837012Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:24:57.837091Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:24:57.837182Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:24:57.837353Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:24:57.837701Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:24:57.837816Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:24:57.840062Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:24:57.850790Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:24:57.850932Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:24:58.000837Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T16:24:58.005219Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:24:58.005296Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:24:58.005888Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:24:58.005936Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:24:58.006025Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T16:24:58.006286Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:24:58.006458Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:24:58.006982Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:24:58.007062Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:24:58.009025Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:24:58.009488Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:24:58.011397Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-31T16:24:58.011451Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:24:58.012301Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-31T16:24:58.012389Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:24:58.013209Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:24:58.013251Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:24:58.013310Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:24:58.013369Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:24:58.013462Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T16:24:58.013633Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:24:58.017499Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:24:58.019353Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T16:24:58.019681Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-31T16:24:58.019749Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:24:58.034063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:24:58.034192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:24:58.034294Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:24:58.041984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:24:58.048389Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:24:58.199909Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:24:58.203473Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:24:58.298214Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:24:58.910267Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpgaexf5btv22748fy2sw2t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmQ3MzA0OWUtMmZlMmI4NDYtOWY1NTIwNWItMTkxYmE1MDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:24:58.951997Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:856:2687], serverId# [1:857:2688], sessionId# [0:0:0] 2025-03-31T16:24:58.954398Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-03-31T16:24:58.954641Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=8 ... 025-03-31T16:25:07.129875Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:07.129922Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:25:07.129973Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:07.130408Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:662:2567], serverId# [3:673:2574], sessionId# [0:0:0] 2025-03-31T16:25:07.130575Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:25:07.130836Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:25:07.130936Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:25:07.132967Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:07.143929Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:25:07.144106Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:25:07.295323Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:703:2593], serverId# [3:705:2595], sessionId# [0:0:0] 2025-03-31T16:25:07.295854Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:25:07.295909Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:07.296650Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:07.296713Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:25:07.296764Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T16:25:07.297020Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:25:07.297197Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:25:07.298007Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:07.298080Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:25:07.298561Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:25:07.298960Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:07.300276Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-31T16:25:07.300334Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:07.301118Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-31T16:25:07.301192Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:07.302089Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:07.302132Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:25:07.302175Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:25:07.302247Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:25:07.302297Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T16:25:07.302389Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:07.303279Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:07.305561Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-31T16:25:07.305642Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:25:07.306484Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T16:25:07.313831Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:07.313950Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:07.314042Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:07.318847Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:25:07.324855Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:07.472256Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:07.474922Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:25:07.509443Z node 3 :TX_PROXY ERROR: Actor# [3:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:07.583047Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpgaqzg267bw3dep2m2mkej, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGIwZGE5NjctYmEwZGMyOWItY2Y1ODA3NWMtYTdmZmQxMjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:25:07.585473Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:856:2687], serverId# [3:857:2688], sessionId# [0:0:0] 2025-03-31T16:25:07.585818Z node 3 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-03-31T16:25:07.585956Z node 3 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=5 2025-03-31T16:25:07.596836Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:07.601180Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:864:2694], serverId# [3:865:2695], sessionId# [0:0:0] 2025-03-31T16:25:07.602167Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-03-31T16:25:07.613362Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-03-31T16:25:07.613470Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:07.613705Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-03-31T16:25:07.613755Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-03-31T16:25:07.614007Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:07.614048Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:07.614097Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:25:07.614156Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:07.614227Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:864:2694], serverId# [3:865:2695], sessionId# [0:0:0] 2025-03-31T16:25:07.615124Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:25:07.615467Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:25:07.615643Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:07.615689Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:25:07.615734Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-03-31T16:25:07.615941Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:25:07.615996Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:07.616455Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-03-31T16:25:07.616667Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-31T16:25:07.616805Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-03-31T16:25:07.616863Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-03-31T16:25:07.659549Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-03-31T16:25:07.659631Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2025-03-31T16:25:07.659842Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:07.659886Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:25:07.659933Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-03-31T16:25:07.660085Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:07.660159Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:07.660210Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSplit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgTimestamp [GOOD] Test command err: 2025-03-31T16:24:57.046741Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:24:57.046961Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:24:57.047127Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012e7/r3tmp/tmpdtkNqa/pdisk_1.dat 2025-03-31T16:24:57.469976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:24:57.521439Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:24:57.562295Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:24:57.562434Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:24:57.574786Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:24:57.668860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:24:57.734433Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:24:57.734668Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:24:57.780634Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:24:57.780759Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:24:57.782559Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:24:57.782653Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:24:57.782728Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:24:57.783127Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:24:57.783269Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:24:57.783362Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:24:57.794063Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:24:57.823890Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:24:57.824090Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:24:57.824204Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:24:57.824243Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:24:57.824277Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:24:57.824310Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:24:57.824728Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:24:57.824848Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:24:57.824972Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:24:57.825010Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:24:57.825059Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:24:57.825114Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:24:57.825200Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:24:57.825406Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:24:57.825676Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:24:57.825766Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:24:57.827821Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:24:57.838473Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:24:57.838577Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:24:57.987925Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T16:24:58.000036Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:24:58.000119Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:24:58.000735Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:24:58.000790Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:24:58.000892Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T16:24:58.001199Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:24:58.001371Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:24:58.001985Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:24:58.002080Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:24:58.004263Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:24:58.004729Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:24:58.006860Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-31T16:24:58.006909Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:24:58.007801Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-31T16:24:58.007891Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:24:58.009279Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:24:58.009326Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:24:58.009390Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:24:58.009472Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:24:58.009543Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T16:24:58.009656Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:24:58.013956Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:24:58.016588Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T16:24:58.016838Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-31T16:24:58.016916Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:24:58.034046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:24:58.034216Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:24:58.034293Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:24:58.041990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:24:58.048473Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:24:58.199601Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:24:58.202773Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:24:58.276227Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:24:58.910242Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpgaexfck6f0n56mk1kqrz2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGIwYTgzMzEtMzgwNDNiMDMtMTQ0ZTk5MTQtMTI5OGEyNDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:24:58.951945Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:856:2687], serverId# [1:857:2688], sessionId# [0:0:0] 2025-03-31T16:24:58.954367Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-03-31T16:24:58.954600Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=4 ... 025-03-31T16:25:07.572375Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:07.572419Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:25:07.572462Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:07.572809Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:662:2567], serverId# [3:673:2574], sessionId# [0:0:0] 2025-03-31T16:25:07.572924Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:25:07.573096Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:25:07.573169Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:25:07.574627Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:07.585290Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:25:07.585393Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:25:07.734713Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:703:2593], serverId# [3:705:2595], sessionId# [0:0:0] 2025-03-31T16:25:07.735262Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:25:07.735336Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:07.736336Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:07.736383Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:25:07.736424Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T16:25:07.736639Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:25:07.736751Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:25:07.737593Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:07.737685Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:25:07.738173Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:25:07.738540Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:07.739808Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-31T16:25:07.739851Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:07.740501Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-31T16:25:07.740569Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:07.741487Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:07.741522Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:25:07.741580Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:25:07.741647Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:25:07.741695Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T16:25:07.741780Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:07.742525Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:07.744257Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-31T16:25:07.744318Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:25:07.745116Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T16:25:07.752643Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:07.752725Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:07.752797Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:07.757081Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:25:07.763122Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:07.910653Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:07.913191Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:25:07.947251Z node 3 :TX_PROXY ERROR: Actor# [3:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:08.046081Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpgard7ag71ttnkjh4g95m2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OTgwMTlkNjEtNDFmMDE4ZWEtNjFkYTFkYjItYTM2ZmU4NTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:25:08.051203Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:856:2687], serverId# [3:857:2688], sessionId# [0:0:0] 2025-03-31T16:25:08.051548Z node 3 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-03-31T16:25:08.051684Z node 3 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=5 2025-03-31T16:25:08.062479Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:08.066613Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:864:2694], serverId# [3:865:2695], sessionId# [0:0:0] 2025-03-31T16:25:08.067724Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-03-31T16:25:08.078949Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-03-31T16:25:08.079028Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:08.079292Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-03-31T16:25:08.079335Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-03-31T16:25:08.079588Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:08.079631Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:08.079689Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:25:08.079746Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:08.079818Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:864:2694], serverId# [3:865:2695], sessionId# [0:0:0] 2025-03-31T16:25:08.080636Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:25:08.080938Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:25:08.081082Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:08.081118Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:25:08.081156Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-03-31T16:25:08.081380Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:25:08.081437Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:08.081971Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-03-31T16:25:08.082224Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 48, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-31T16:25:08.082395Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-03-31T16:25:08.082441Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-03-31T16:25:08.119495Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-03-31T16:25:08.119551Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2025-03-31T16:25:08.119688Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:08.119718Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:25:08.119752Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-03-31T16:25:08.119857Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:08.119903Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:08.119938Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64Seconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64NanoSeconds >> TSchemeShardViewTest::AsyncCreateDifferentViews [GOOD] >> TSchemeShardViewTest::AsyncDropSameView [GOOD] >> TSchemeShardViewTest::CreateView >> TSchemeShardViewTest::AsyncCreateSameView [GOOD] >> TSchemeShardViewTest::DropView ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSplit [GOOD] Test command err: 2025-03-31T16:24:57.014626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:24:57.014891Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:24:57.015019Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012d2/r3tmp/tmppsk0kp/pdisk_1.dat 2025-03-31T16:24:57.490036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:24:57.530072Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:24:57.569946Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:24:57.570077Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:24:57.581953Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:24:57.668950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:24:57.747770Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:687:2586] 2025-03-31T16:24:57.748118Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:24:57.816565Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:24:57.816841Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:24:57.819038Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:24:57.819175Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:24:57.819256Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:24:57.819723Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:24:57.820179Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:24:57.820277Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:718:2586] in generation 1 2025-03-31T16:24:57.822401Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:691:2588] 2025-03-31T16:24:57.822675Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:24:57.835560Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:24:57.835747Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:24:57.837490Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-03-31T16:24:57.837561Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-03-31T16:24:57.837642Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-03-31T16:24:57.837980Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:24:57.838290Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:24:57.838359Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:731:2588] in generation 1 2025-03-31T16:24:57.840063Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:695:2590] 2025-03-31T16:24:57.840299Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:24:57.852557Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:24:57.852685Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:24:57.854360Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-03-31T16:24:57.854443Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-03-31T16:24:57.854518Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-03-31T16:24:57.854859Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:24:57.855006Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:24:57.855104Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037890 persisting started state actor id [1:739:2590] in generation 1 2025-03-31T16:24:57.866390Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:24:57.906347Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-03-31T16:24:57.906594Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:24:57.906764Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:743:2617] 2025-03-31T16:24:57.906810Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-03-31T16:24:57.906854Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-03-31T16:24:57.906899Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:24:57.907385Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:24:57.907434Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-03-31T16:24:57.907519Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:24:57.907598Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:744:2618] 2025-03-31T16:24:57.907633Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-03-31T16:24:57.907660Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-03-31T16:24:57.907690Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-31T16:24:57.908072Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:24:57.908117Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:24:57.908194Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:24:57.908273Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:745:2619] 2025-03-31T16:24:57.908300Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:24:57.908350Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:24:57.908386Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:24:57.908460Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-03-31T16:24:57.908581Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-03-31T16:24:57.908853Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T16:24:57.908910Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:24:57.908967Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-03-31T16:24:57.909034Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-31T16:24:57.909392Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:679:2582], serverId# [1:713:2602], sessionId# [0:0:0] 2025-03-31T16:24:57.909491Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037890 2025-03-31T16:24:57.909595Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2025-03-31T16:24:57.909728Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:24:57.909793Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:24:57.909932Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-31T16:24:57.910300Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-03-31T16:24:57.910433Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-03-31T16:24:57.911004Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-03-31T16:24:57.911098Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:24:57.911148Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037890 TxInFly 0 2025-03-31T16:24:57.911194Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-31T16:24:57.911796Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:24:57.911838Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:24:57.911873Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:24:57.911938Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:24:57.912066Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:678:2581], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T16:24:57.912472Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:24:57.912682Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:24:57.912762Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:24:57.915498Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:24:57.915615Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-31T16:24:57.926696Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:24:57.926824Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:24:57.927610Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-03-31T16:24:57.927686Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-03-31T16:24:57.971879Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:680:2583], serverId# [1:762:2629], sessionId# [0:0:0] 2025-03-31T16:24:57.972106Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-03-31T16:24:57.972379Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037890 txId 2 ... 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037891 2025-03-31T16:25:08.900649Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037892 2025-03-31T16:25:08.900804Z node 3 :TX_DATASHARD DEBUG: 72075186224037891 ack init split/merge destination OpId 281474976715663 2025-03-31T16:25:08.900858Z node 3 :TX_DATASHARD DEBUG: 72075186224037891 not sending time cast registration request in state SplitDstReceivingSnapshot 2025-03-31T16:25:08.901332Z node 3 :TX_DATASHARD DEBUG: 72075186224037892 ack init split/merge destination OpId 281474976715663 2025-03-31T16:25:08.901357Z node 3 :TX_DATASHARD DEBUG: 72075186224037892 not sending time cast registration request in state SplitDstReceivingSnapshot 2025-03-31T16:25:08.902258Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 received split OpId 281474976715663 at state Ready 2025-03-31T16:25:08.913080Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 starting snapshot for split OpId 281474976715663 2025-03-31T16:25:08.913416Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 CancelReadIterators#0 2025-03-31T16:25:08.918256Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 3, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-31T16:25:08.918316Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 3, finished edge# 0, front# 0 2025-03-31T16:25:08.919218Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 4, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-31T16:25:08.919260Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 4, finished edge# 0, front# 0 2025-03-31T16:25:08.920784Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 7, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-31T16:25:08.920820Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 7, finished edge# 0, front# 0 2025-03-31T16:25:08.947272Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 8, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-31T16:25:08.947330Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 8, finished edge# 0, front# 0 2025-03-31T16:25:08.948425Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-31T16:25:08.948474Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 1001, finished edge# 0, front# 0 2025-03-31T16:25:08.948960Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 snapshot complete for split OpId 281474976715663 2025-03-31T16:25:08.949212Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 3 snapshot size is 12 total snapshot size is 12 for split OpId 281474976715663 2025-03-31T16:25:08.949281Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 4 snapshot size is 12 total snapshot size is 24 for split OpId 281474976715663 2025-03-31T16:25:08.949314Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 7 snapshot size is 12 total snapshot size is 36 for split OpId 281474976715663 2025-03-31T16:25:08.949349Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 8 snapshot size is 12 total snapshot size is 48 for split OpId 281474976715663 2025-03-31T16:25:08.949634Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 1001 snapshot size is 146 total snapshot size is 194 for split OpId 281474976715663 2025-03-31T16:25:08.949835Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 3 snapshot size is 12 total snapshot size is 206 for split OpId 281474976715663 2025-03-31T16:25:08.949871Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 4 snapshot size is 12 total snapshot size is 218 for split OpId 281474976715663 2025-03-31T16:25:08.949901Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 7 snapshot size is 12 total snapshot size is 230 for split OpId 281474976715663 2025-03-31T16:25:08.949930Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 8 snapshot size is 12 total snapshot size is 242 for split OpId 281474976715663 2025-03-31T16:25:08.950034Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 1001 snapshot size is 155 total snapshot size is 397 for split OpId 281474976715663 2025-03-31T16:25:08.950743Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 Sending snapshots from src for split OpId 281474976715663 2025-03-31T16:25:08.950953Z node 3 :TX_DATASHARD DEBUG: Sending snapshot for split opId 281474976715663 from datashard 72075186224037889 to datashard 72075186224037892 size 221 2025-03-31T16:25:08.951077Z node 3 :TX_DATASHARD DEBUG: Sending snapshot for split opId 281474976715663 from datashard 72075186224037889 to datashard 72075186224037891 size 215 2025-03-31T16:25:08.951440Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [3:1234:2944], serverId# [3:1235:2945], sessionId# [0:0:0] 2025-03-31T16:25:08.951482Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037892, clientId# [3:1233:2943], serverId# [3:1236:2946], sessionId# [0:0:0] 2025-03-31T16:25:08.951645Z node 3 :TX_DATASHARD DEBUG: 72075186224037891 Received snapshot for split/merge TxId 281474976715663 from tabeltId 72075186224037889 2025-03-31T16:25:08.954236Z node 3 :TX_DATASHARD DEBUG: 72075186224037892 Received snapshot for split/merge TxId 281474976715663 from tabeltId 72075186224037889 2025-03-31T16:25:08.956049Z node 3 :TX_DATASHARD DEBUG: 72075186224037891 ack snapshot OpId 281474976715663 2025-03-31T16:25:08.956176Z node 3 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037891 2025-03-31T16:25:08.956274Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037891 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:25:08.956367Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-03-31T16:25:08.956429Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037891, actorId: [3:1239:2949] 2025-03-31T16:25:08.956458Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037891 2025-03-31T16:25:08.956496Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037891 2025-03-31T16:25:08.956526Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-03-31T16:25:08.956630Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037891 for split OpId 281474976715663 2025-03-31T16:25:08.957165Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037891 time 2000 2025-03-31T16:25:08.957205Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-03-31T16:25:08.957396Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-03-31T16:25:08.957421Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:08.957447Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037891 TxInFly 0 2025-03-31T16:25:08.957490Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-03-31T16:25:08.957615Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037891, clientId# [3:1234:2944], serverId# [3:1235:2945], sessionId# [0:0:0] 2025-03-31T16:25:08.957673Z node 3 :TX_DATASHARD DEBUG: 72075186224037892 ack snapshot OpId 281474976715663 2025-03-31T16:25:08.957741Z node 3 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037892 2025-03-31T16:25:08.957791Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037892 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:25:08.957833Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037892 2025-03-31T16:25:08.957864Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037892, actorId: [3:1241:2951] 2025-03-31T16:25:08.957880Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037892 2025-03-31T16:25:08.957909Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037892 2025-03-31T16:25:08.957927Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-03-31T16:25:08.958005Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037892 for split OpId 281474976715663 2025-03-31T16:25:08.958446Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-03-31T16:25:08.958471Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:08.958490Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037892 TxInFly 0 2025-03-31T16:25:08.958508Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-03-31T16:25:08.958650Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037891 coordinator 72057594046316545 last step 1500 next step 2000 2025-03-31T16:25:08.958693Z node 3 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037891: waitStep# 2000 readStep# 2000 observedStep# 2000 2025-03-31T16:25:08.958777Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037892, clientId# [3:1233:2943], serverId# [3:1236:2946], sessionId# [0:0:0] 2025-03-31T16:25:08.958809Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037892 time 2000 2025-03-31T16:25:08.958828Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-03-31T16:25:08.959026Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037892 coordinator 72057594046316545 last step 1500 next step 2000 2025-03-31T16:25:08.959047Z node 3 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037892: waitStep# 2000 readStep# 2000 observedStep# 2000 2025-03-31T16:25:08.980527Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 ack split to schemeshard 281474976715663 2025-03-31T16:25:08.984090Z node 3 :TX_DATASHARD DEBUG: Got TEvSplitPartitioningChanged: opId: 281474976715663, at datashard: 72075186224037889, state: SplitSrcWaitForPartitioningChanged 2025-03-31T16:25:08.987025Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037889 2025-03-31T16:25:08.987119Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 4, at: 72075186224037889 2025-03-31T16:25:08.987513Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T16:25:08.987563Z node 3 :TX_DATASHARD INFO: Progress tx at non-ready tablet 72075186224037889 state 5 2025-03-31T16:25:08.987742Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [3:1102:2841], serverId# [3:1103:2842], sessionId# [0:0:0] 2025-03-31T16:25:08.987910Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 ack split partitioning changed to schemeshard 281474976715663 2025-03-31T16:25:08.988000Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-03-31T16:25:08.988056Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 >> TSchemeShardViewTest::ReadOnlyMode [GOOD] >> TSchemeShardViewTest::EmptyQueryText ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncCreateDifferentViews [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:25:09.665205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:25:09.665363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:25:09.665424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:25:09.665488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:25:09.665545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:25:09.665586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:25:09.665684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:25:09.665888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:25:09.666278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:25:09.747840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:25:09.747893Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:09.751831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:25:09.751907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:25:09.752034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:25:09.756402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:25:09.756592Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:25:09.757237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:25:09.757439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:25:09.758961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:25:09.759865Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:25:09.759916Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:25:09.760042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:25:09.760076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:25:09.760109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:25:09.760218Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:25:09.765814Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:25:09.868164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:25:09.868396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:09.868613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:25:09.868813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:25:09.868874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:09.871256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:25:09.871425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:25:09.871619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:09.871690Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:25:09.871731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:25:09.871775Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:25:09.873577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:09.873624Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:25:09.873666Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:25:09.875449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:09.875503Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:09.875568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:25:09.875640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:25:09.879320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:25:09.881283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:25:09.881505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:25:09.882485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:25:09.882610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:25:09.882671Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:25:09.883861Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:25:09.883930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:25:09.884152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:25:09.884297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:25:09.887595Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:25:09.887690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:25:09.887873Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:25:09.887917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:25:09.888244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:09.888299Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:25:09.888389Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:25:09.888421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:25:09.888472Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:25:09.888511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:25:09.888548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:25:09.888587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:25:09.888622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:25:09.888667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:25:09.888732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:25:09.888769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:25:09.888805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:25:09.890986Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:25:09.891133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:25:09.891175Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 3-31T16:25:09.975279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-03-31T16:25:09.975312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-03-31T16:25:09.975338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-03-31T16:25:09.975775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-31T16:25:09.975809Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:25:09.975832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-31T16:25:09.975858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 3 2025-03-31T16:25:09.976369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:25:09.976431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:25:09.976453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:25:09.976476Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-03-31T16:25:09.976499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:25:09.977121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:25:09.977181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:25:09.977203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:25:09.977222Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-03-31T16:25:09.977240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-31T16:25:09.977293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-03-31T16:25:09.979760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:25:09.980071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 2025-03-31T16:25:09.980345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-31T16:25:09.980376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-03-31T16:25:09.981597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-31T16:25:09.981626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-03-31T16:25:09.981706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-03-31T16:25:09.981730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-03-31T16:25:09.982344Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-31T16:25:09.982517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T16:25:09.982561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:332:2323] 2025-03-31T16:25:09.982792Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-31T16:25:09.982908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:25:09.982949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:332:2323] 2025-03-31T16:25:09.983068Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-31T16:25:09.983170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-31T16:25:09.983195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:332:2323] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-03-31T16:25:09.983718Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:25:09.983976Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDir" took 231us result status StatusSuccess 2025-03-31T16:25:09.985513Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir" PathDescription { Self { Name: "SomeDir" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "FirstView" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "SecondView" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 103 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:25:09.987236Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir/FirstView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:25:09.987472Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDir/FirstView" took 251us result status StatusSuccess 2025-03-31T16:25:09.987811Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir/FirstView" PathDescription { Self { Name: "FirstView" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "FirstView" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 QueryText: "First query" CapturedContext { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:25:09.988459Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir/SecondView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:25:09.988633Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDir/SecondView" took 188us result status StatusSuccess 2025-03-31T16:25:09.988916Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir/SecondView" PathDescription { Self { Name: "SecondView" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 103 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "SecondView" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 QueryText: "Second query" CapturedContext { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TColumnShardTestSchema::HotTiersTtl [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncCreateSameView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:25:09.651361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:25:09.651482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:25:09.651525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:25:09.651563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:25:09.652192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:25:09.652258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:25:09.652342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:25:09.652424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:25:09.653542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:25:09.744771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:25:09.744823Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:09.749450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:25:09.749581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:25:09.749764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:25:09.754999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:25:09.755275Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:25:09.755918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:25:09.756155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:25:09.758256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:25:09.759442Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:25:09.759507Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:25:09.759686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:25:09.759754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:25:09.759807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:25:09.759982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:25:09.766680Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:25:09.914019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:25:09.914305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:09.914555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:25:09.914815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:25:09.914874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:09.917326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:25:09.917461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:25:09.917661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:09.917728Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:25:09.917763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:25:09.917797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:25:09.919619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:09.919670Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:25:09.919719Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:25:09.921531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:09.921575Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:09.921626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:25:09.921687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:25:09.925509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:25:09.927236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:25:09.927453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:25:09.928504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:25:09.928640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:25:09.928699Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:25:09.928994Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:25:09.929059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:25:09.929241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:25:09.929360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:25:09.931687Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:25:09.931755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:25:09.931945Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:25:09.931987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:25:09.932203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:09.932253Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:25:09.932350Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:25:09.932380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:25:09.932432Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:25:09.932465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:25:09.932496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:25:09.932531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:25:09.932563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:25:09.932610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:25:09.932671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:25:09.932707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:25:09.932738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:25:09.934629Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:25:09.934755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:25:09.934822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... athStateCreate)" TxId: 103 SchemeshardId: 72057594046678944 PathId: 2 PathCreateTxId: 101, at schemeshard: 72057594046678944 2025-03-31T16:25:09.976026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusMultipleModifications, reason: Check failed: path: '/MyRoot/MyView', error: path exists but creating right now (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateCreate), operation: CREATE VIEW, path: /MyRoot/MyView 2025-03-31T16:25:09.976619Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-31T16:25:09.978072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-03-31T16:25:09.978208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-03-31T16:25:09.978514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:25:09.978606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:25:09.978655Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002 2025-03-31T16:25:09.978766Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-03-31T16:25:09.978955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:25:09.979045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 101 2025-03-31T16:25:09.981070Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:25:09.981112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:25:09.981274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:25:09.981396Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:25:09.981444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-03-31T16:25:09.981501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-03-31T16:25:09.981709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:25:09.981755Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-31T16:25:09.981865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T16:25:09.981897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:25:09.981933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T16:25:09.981961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:25:09.982013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-03-31T16:25:09.982051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:25:09.982087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-31T16:25:09.982121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-31T16:25:09.982180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T16:25:09.982215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-03-31T16:25:09.982245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-03-31T16:25:09.982300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-03-31T16:25:09.983047Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:25:09.983177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:25:09.983214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-03-31T16:25:09.983262Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-03-31T16:25:09.983298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:25:09.984216Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:25:09.984305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:25:09.984347Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-31T16:25:09.984429Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-03-31T16:25:09.984468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T16:25:09.984534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-03-31T16:25:09.987371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T16:25:09.988781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 2025-03-31T16:25:09.989118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-31T16:25:09.989162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-03-31T16:25:09.989270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-31T16:25:09.989294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-03-31T16:25:09.989337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-03-31T16:25:09.989376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-03-31T16:25:09.989924Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-31T16:25:09.990061Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-31T16:25:09.990099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T16:25:09.990136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:303:2294] 2025-03-31T16:25:09.990326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:25:09.990352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:303:2294] 2025-03-31T16:25:09.990541Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-31T16:25:09.990623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-31T16:25:09.990646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:303:2294] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-03-31T16:25:09.991152Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:25:09.991372Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 236us result status StatusSuccess 2025-03-31T16:25:09.991712Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyView" PathDescription { Self { Name: "MyView" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "MyView" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> DistributedEraseTests::DistributedEraseTxShouldFailOnVariousErrors [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldErase ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncDropSameView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:25:09.651356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:25:09.651464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:25:09.651503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:25:09.651541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:25:09.652212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:25:09.652262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:25:09.652343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:25:09.652426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:25:09.653544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:25:09.744498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:25:09.744571Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:09.749736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:25:09.749829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:25:09.749985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:25:09.755257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:25:09.755467Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:25:09.756122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:25:09.756328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:25:09.758260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:25:09.759490Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:25:09.759555Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:25:09.759731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:25:09.759775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:25:09.759820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:25:09.759960Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:25:09.766856Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:25:09.915909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:25:09.916190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:09.916413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:25:09.916681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:25:09.916748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:09.919316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:25:09.919456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:25:09.919647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:09.919712Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:25:09.919747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:25:09.919779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:25:09.921743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:09.921796Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:25:09.921845Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:25:09.923555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:09.923599Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:09.923646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:25:09.923705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:25:09.933492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:25:09.935490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:25:09.935713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:25:09.936760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:25:09.936892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:25:09.936961Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:25:09.937258Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:25:09.937314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:25:09.937513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:25:09.937628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:25:09.939855Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:25:09.939922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:25:09.940091Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:25:09.940132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:25:09.940358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:09.940408Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:25:09.940506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:25:09.940538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:25:09.940573Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:25:09.940602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:25:09.940636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:25:09.940680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:25:09.940714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:25:09.940763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:25:09.940838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:25:09.940877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:25:09.940908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:25:09.942951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:25:09.943117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:25:09.943176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Id: 2 PathDropTxId: 102, at schemeshard: 72057594046678944 2025-03-31T16:25:10.004576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusMultipleModifications, reason: Check failed: path: '/MyRoot/MyView', error: path is being deleted right now (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateDrop), operation: DROP VIEW, path: /MyRoot/MyView 2025-03-31T16:25:10.004823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 104, response: Status: StatusMultipleModifications Reason: "Check failed: path: \'/MyRoot/MyView\', error: path is being deleted right now (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateDrop)" TxId: 104 SchemeshardId: 72057594046678944 PathId: 2 PathDropTxId: 102, at schemeshard: 72057594046678944 2025-03-31T16:25:10.004924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusMultipleModifications, reason: Check failed: path: '/MyRoot/MyView', error: path is being deleted right now (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateDrop), operation: DROP VIEW, path: /MyRoot/MyView 2025-03-31T16:25:10.005400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-03-31T16:25:10.005544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-03-31T16:25:10.005926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:25:10.006025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:25:10.006074Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropView TPropose, opId: 102:0 HandleReply TEvOperationPlan, step: 5000003 2025-03-31T16:25:10.006205Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-03-31T16:25:10.006388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:25:10.006466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T16:25:10.008671Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:25:10.008719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:25:10.008863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:25:10.009015Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:25:10.009054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-03-31T16:25:10.009106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: Erasing txId 102 2025-03-31T16:25:10.009385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:25:10.009428Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-31T16:25:10.009540Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:25:10.009577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:25:10.009612Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:25:10.009643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:25:10.009680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-03-31T16:25:10.009735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:25:10.009774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-31T16:25:10.009803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-31T16:25:10.009875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T16:25:10.009912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-03-31T16:25:10.009944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-03-31T16:25:10.009999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-03-31T16:25:10.010778Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:25:10.010888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:25:10.010925Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:25:10.010959Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-31T16:25:10.010992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:25:10.012149Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:25:10.012241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:25:10.012268Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:25:10.012295Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-31T16:25:10.012320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T16:25:10.012403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-03-31T16:25:10.012634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:25:10.012696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T16:25:10.012756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:25:10.015712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:25:10.017568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:25:10.017685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 2025-03-31T16:25:10.018010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-03-31T16:25:10.018055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 TestWaitNotification wait txId: 104 2025-03-31T16:25:10.018132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-03-31T16:25:10.018168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-03-31T16:25:10.018666Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-31T16:25:10.018784Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-03-31T16:25:10.018823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-31T16:25:10.018856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:327:2318] 2025-03-31T16:25:10.019001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-03-31T16:25:10.019044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:327:2318] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 2025-03-31T16:25:10.019611Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:25:10.019822Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 234us result status StatusPathDoesNotExist 2025-03-31T16:25:10.019991Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyView\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyView" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:25:09.651306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:25:09.651473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:25:09.651507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:25:09.651537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:25:09.652182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:25:09.652206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:25:09.652271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:25:09.652349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:25:09.653485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:25:09.724431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:25:09.724493Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:09.729507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:25:09.729611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:25:09.729778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:25:09.736875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:25:09.737194Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:25:09.740291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:25:09.741190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:25:09.746346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:25:09.754207Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:25:09.754307Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:25:09.754512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:25:09.754575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:25:09.754668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:25:09.754838Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:25:09.763418Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:25:09.861026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:25:09.862943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:09.863764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:25:09.864848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:25:09.864910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:09.867790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:25:09.867922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:25:09.868095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:09.868228Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:25:09.868260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:25:09.868289Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:25:09.870506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:09.870577Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:25:09.870659Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:25:09.872826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:09.872884Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:09.872935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:25:09.873003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:25:09.877660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:25:09.880108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:25:09.881033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:25:09.882287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:25:09.882459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:25:09.882525Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:25:09.883840Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:25:09.883921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:25:09.884136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:25:09.884292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:25:09.887610Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:25:09.887683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:25:09.887869Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:25:09.887931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:25:09.888216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:09.888270Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:25:09.888373Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:25:09.888407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:25:09.888451Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:25:09.888485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:25:09.888521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:25:09.888568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:25:09.888603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:25:09.888648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:25:09.888753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:25:09.888788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:25:09.888820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:25:09.890931Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:25:09.891124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:25:09.891172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:25:10.173790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:25:10.173932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:25:10.174005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:25:10.174077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:25:10.174242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:25:10.174342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:25:10.174447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:25:10.174630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-03-31T16:25:10.174800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-03-31T16:25:10.174844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-03-31T16:25:10.174882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-03-31T16:25:10.181730Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:25:10.181816Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:25:10.182729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:25:10.182803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:25:10.182860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:25:10.184322Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 Leader for TabletID 72057594046678944 is [1:378:2347] sender: [1:435:2058] recipient: [1:15:2062] 2025-03-31T16:25:10.229937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "ThirdView" QueryText: "Some query" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:25:10.230251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateView Propose, path: /MyRoot/ThirdView, opId: 103:0 2025-03-31T16:25:10.230328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [72057594046678944] TCreateView Propose, path: /MyRoot/ThirdView, opId: 103:0, viewDescription: Name: "ThirdView" QueryText: "Some query" 2025-03-31T16:25:10.230471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: ThirdView, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-31T16:25:10.230547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-03-31T16:25:10.230611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:25:10.233708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944 PathId: 3, at schemeshard: 72057594046678944 2025-03-31T16:25:10.233942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE VIEW, path: /MyRoot/ThirdView 2025-03-31T16:25:10.234167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:25:10.234224Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 103:0 ProgressState 2025-03-31T16:25:10.234286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2025-03-31T16:25:10.234434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:25:10.236889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-03-31T16:25:10.237139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000003 2025-03-31T16:25:10.237984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:25:10.238126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:25:10.238190Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 103:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000003 2025-03-31T16:25:10.238350Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 240 2025-03-31T16:25:10.238564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:25:10.238650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 FAKE_COORDINATOR: Erasing txId 103 2025-03-31T16:25:10.241412Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:25:10.241492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:25:10.241713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-31T16:25:10.241836Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:25:10.241911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:428:2386], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-03-31T16:25:10.241967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:428:2386], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-03-31T16:25:10.242039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:25:10.242086Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-03-31T16:25:10.242194Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-31T16:25:10.242233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T16:25:10.242316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-31T16:25:10.242375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T16:25:10.242420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-03-31T16:25:10.242466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-31T16:25:10.242504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-03-31T16:25:10.242541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-03-31T16:25:10.242618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T16:25:10.242679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2025-03-31T16:25:10.242717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-03-31T16:25:10.242745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-03-31T16:25:10.243863Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:25:10.243974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:25:10.244011Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-03-31T16:25:10.244068Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-31T16:25:10.244121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-31T16:25:10.245123Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:25:10.245219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:25:10.245252Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-03-31T16:25:10.245284Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-03-31T16:25:10.245332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-31T16:25:10.245443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-03-31T16:25:10.249993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-31T16:25:10.250572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 >> TSchemeShardViewTest::CreateView [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldSuccessOnShardedIndex [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldNotEraseModifiedRows >> TSchemeShardViewTest::DropView [GOOD] >> TCdcStreamTests::MeteringDedicated [GOOD] >> TCdcStreamTests::ChangeOwner >> TSchemeShardViewTest::EmptyQueryText [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::CreateView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:25:10.785521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:25:10.785601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:25:10.785627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:25:10.785653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:25:10.785688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:25:10.785708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:25:10.785763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:25:10.785824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:25:10.786088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:25:10.848287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:25:10.848344Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:10.853692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:25:10.853802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:25:10.853991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:25:10.860351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:25:10.860628Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:25:10.861330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:25:10.861643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:25:10.864047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:25:10.865368Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:25:10.865507Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:25:10.865749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:25:10.865806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:25:10.865855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:25:10.866017Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:25:10.874111Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:25:10.989134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:25:10.989362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:10.989569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:25:10.989773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:25:10.989814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:10.991915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:25:10.992017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:25:10.992159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:10.992234Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:25:10.992262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:25:10.992285Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:25:10.994032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:10.994075Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:25:10.994111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:25:10.995709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:10.995745Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:10.995786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:25:10.995825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:25:10.998423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:25:11.000194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:25:11.000428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:25:11.001561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:25:11.001718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:25:11.001773Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:25:11.002056Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:25:11.002113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:25:11.002291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:25:11.002384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:25:11.004938Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:25:11.005008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:25:11.005214Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:25:11.005265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:25:11.005509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:11.005556Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:25:11.005662Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:25:11.005695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:25:11.005742Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:25:11.005781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:25:11.005817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:25:11.005857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:25:11.005888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:25:11.005929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:25:11.006001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:25:11.006042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:25:11.006075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:25:11.008163Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:25:11.008307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:25:11.008348Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... e 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 100:0 2025-03-31T16:25:11.029433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 100:0, viewDescription: Name: "MyView" QueryText: "Some query" 2025-03-31T16:25:11.029544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: MyView, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T16:25:11.029610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-03-31T16:25:11.029648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 100:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:25:11.030668Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T16:25:11.033624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 100, response: Status: StatusAccepted TxId: 100 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2025-03-31T16:25:11.033787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 100, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE VIEW, path: /MyRoot/MyView 2025-03-31T16:25:11.034021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-03-31T16:25:11.034090Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 100:0 ProgressState 2025-03-31T16:25:11.034155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2025-03-31T16:25:11.034269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:25:11.035009Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-31T16:25:11.036453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-03-31T16:25:11.036609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-03-31T16:25:11.036944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:25:11.037062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:25:11.037132Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 100:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002 2025-03-31T16:25:11.037289Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-03-31T16:25:11.037452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:25:11.037533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 100 2025-03-31T16:25:11.039644Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:25:11.039709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:25:11.039877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:25:11.039981Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:25:11.040032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-03-31T16:25:11.040075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-03-31T16:25:11.040270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-03-31T16:25:11.040318Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-03-31T16:25:11.040434Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-03-31T16:25:11.040493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-31T16:25:11.040533Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-03-31T16:25:11.040565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-31T16:25:11.040599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-03-31T16:25:11.040661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-31T16:25:11.040700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-03-31T16:25:11.040730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-03-31T16:25:11.040797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T16:25:11.040851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-03-31T16:25:11.040885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-03-31T16:25:11.040915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-03-31T16:25:11.041727Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 100 2025-03-31T16:25:11.041825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 100 2025-03-31T16:25:11.041878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-03-31T16:25:11.041912Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-03-31T16:25:11.041950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:25:11.042728Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2025-03-31T16:25:11.042799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2025-03-31T16:25:11.042825Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-03-31T16:25:11.042865Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-03-31T16:25:11.042893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T16:25:11.042969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-03-31T16:25:11.046733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-03-31T16:25:11.047121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 101 2025-03-31T16:25:11.047391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-31T16:25:11.047430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-31T16:25:11.047780Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-31T16:25:11.047866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T16:25:11.047903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:297:2288] TestWaitNotification: OK eventTxId 101 2025-03-31T16:25:11.048355Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:25:11.048561Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 213us result status StatusSuccess 2025-03-31T16:25:11.048908Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyView" PathDescription { Self { Name: "MyView" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "MyView" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgDate [GOOD] >> TSchemeShardViewTest::EmptyName ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::DropView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:25:10.851028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:25:10.851145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:25:10.851173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:25:10.851212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:25:10.851246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:25:10.851287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:25:10.851344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:25:10.851403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:25:10.851694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:25:10.922524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:25:10.922568Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:10.927211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:25:10.927320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:25:10.927489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:25:10.932328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:25:10.932514Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:25:10.933096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:25:10.933285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:25:10.935176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:25:10.936155Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:25:10.936212Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:25:10.936335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:25:10.936365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:25:10.936391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:25:10.936510Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:25:10.942472Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:25:11.031440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:25:11.031671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:11.031878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:25:11.032076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:25:11.032127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:11.034019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:25:11.034129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:25:11.034250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:11.034289Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:25:11.034317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:25:11.034340Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:25:11.035973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:11.036032Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:25:11.036084Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:25:11.037704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:11.037748Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:11.037786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:25:11.037827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:25:11.041443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:25:11.043709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:25:11.043983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:25:11.045417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:25:11.045612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:25:11.045698Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:25:11.046055Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:25:11.046142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:25:11.046361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:25:11.046490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:25:11.048941Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:25:11.048996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:25:11.049127Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:25:11.049162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:25:11.049344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:11.049379Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:25:11.049483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:25:11.049507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:25:11.049534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:25:11.049555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:25:11.049581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:25:11.049623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:25:11.049651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:25:11.049680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:25:11.049736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:25:11.049764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:25:11.049785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:25:11.051300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:25:11.051400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:25:11.051432Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... dReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "MyView" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2025-03-31T16:25:11.092015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpDropView Drop { Name: "MyView" } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:25:11.092167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TDropView Propose, opId: 102:0, path: /MyRoot/MyView 2025-03-31T16:25:11.092310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-03-31T16:25:11.092361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:25:11.094552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusAccepted TxId: 102 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2025-03-31T16:25:11.094711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusAccepted, operation: DROP VIEW, path: /MyRoot/MyView 2025-03-31T16:25:11.094906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:25:11.094976Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropView TPropose, opId: 102:0 ProgressState 2025-03-31T16:25:11.095029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-03-31T16:25:11.095201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:25:11.096997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-03-31T16:25:11.097167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-03-31T16:25:11.097498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:25:11.097586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:25:11.097637Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropView TPropose, opId: 102:0 HandleReply TEvOperationPlan, step: 5000003 2025-03-31T16:25:11.097789Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-03-31T16:25:11.097966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:25:11.098046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 102 2025-03-31T16:25:11.099897Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:25:11.099932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:25:11.100036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:25:11.100159Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:25:11.100192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-03-31T16:25:11.100229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-31T16:25:11.100387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:25:11.100418Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-31T16:25:11.100501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:25:11.100525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:25:11.100552Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:25:11.100572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:25:11.100605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-03-31T16:25:11.100637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:25:11.100660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-31T16:25:11.100682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-31T16:25:11.100732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T16:25:11.100755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-03-31T16:25:11.100779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-03-31T16:25:11.100811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-03-31T16:25:11.101320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:25:11.101388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:25:11.101415Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:25:11.101442Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-31T16:25:11.101489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:25:11.102661Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:25:11.102738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:25:11.102760Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:25:11.102779Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-31T16:25:11.102797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T16:25:11.102872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-03-31T16:25:11.103342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:25:11.103376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T16:25:11.103424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:25:11.105256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:25:11.107498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:25:11.107612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-31T16:25:11.107903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-31T16:25:11.107962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-31T16:25:11.108578Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-31T16:25:11.108715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:25:11.108765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:321:2312] TestWaitNotification: OK eventTxId 102 2025-03-31T16:25:11.109437Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:25:11.109695Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 274us result status StatusPathDoesNotExist 2025-03-31T16:25:11.109949Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyView\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyView" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143438859.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143438859.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143438859.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143438859.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143438859.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123438859.000000s;Name=;Codec=}; WaitEmptyAfter=1;Tiers={{Column=timestamp;EvictAfter=143438859.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143438859.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123437659.000000s;Name=;Codec=}; 2025-03-31T16:24:19.888245Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:24:19.969359Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:24:19.982926Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:24:19.983158Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:24:19.988921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:24:19.989069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:24:19.989232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:24:19.989317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:24:19.989389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:24:19.989467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:24:19.989526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:24:19.989585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:24:19.989662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:24:19.989734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:24:19.989792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:24:19.989847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:24:20.006744Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:24:20.006891Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:24:20.006945Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:24:20.007133Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:24:20.007314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:24:20.007390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:24:20.007432Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:24:20.007522Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:24:20.007574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:24:20.007613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:24:20.007641Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:24:20.007807Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:24:20.007881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:24:20.007926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:24:20.007959Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:24:20.008047Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:24:20.008100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:24:20.008144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:24:20.008173Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:24:20.008237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:24:20.008278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:24:20.008310Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:24:20.008359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:24:20.008398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:24:20.008428Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:24:20.008815Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=49; 2025-03-31T16:24:20.008902Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=35; 2025-03-31T16:24:20.008973Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=33; 2025-03-31T16:24:20.009053Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=40; 2025-03-31T16:24:20.009230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:24:20.009313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:24:20.009347Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:24:20.009545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:24:20.009588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:24:20.009618Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:24:20.009784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchem ... 31T16:25:10.512988Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-03-31T16:25:10.513071Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700008;tx_id=18446744073709551615;;current_snapshot_ts=1000000002; 2025-03-31T16:25:10.513118Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:25:10.513161Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:25:10.513203Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:25:10.513284Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:25:10.513507Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000008:max} readable: {1000000008:max} at tablet 9437184 2025-03-31T16:25:10.513607Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-03-31T16:25:10.513730Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000008:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-31T16:25:10.513774Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000008:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-31T16:25:10.514133Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000008:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-03-31T16:25:10.514211Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000008:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-03-31T16:25:10.514575Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000008:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:2009:4018];trace_detailed=; 2025-03-31T16:25:10.514906Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-03-31T16:25:10.515100Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-03-31T16:25:10.515246Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:25:10.515349Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:25:10.515648Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2009:4018];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:25:10.515734Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2009:4018];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:25:10.515830Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2009:4018];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:25:10.515862Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:2009:4018] finished for tablet 9437184 2025-03-31T16:25:10.516159Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:2009:4018];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:2008:4017];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1743438310514525,"name":"_full_task","f":1743438310514525,"d_finished":0,"c":0,"l":1743438310515908,"d":1383},"events":[{"name":"bootstrap","f":1743438310514679,"d_finished":695,"c":1,"l":1743438310515374,"d":695},{"a":1743438310515630,"name":"ack","f":1743438310515630,"d_finished":0,"c":0,"l":1743438310515908,"d":278},{"a":1743438310515617,"name":"processing","f":1743438310515617,"d_finished":0,"c":0,"l":1743438310515908,"d":291},{"name":"ProduceResults","f":1743438310515167,"d_finished":381,"c":2,"l":1743438310515848,"d":381},{"a":1743438310515850,"name":"Finish","f":1743438310515850,"d_finished":0,"c":0,"l":1743438310515908,"d":58}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:25:10.516215Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2009:4018];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:2008:4017];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:25:10.516516Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:2009:4018];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:2008:4017];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1743438310514525,"name":"_full_task","f":1743438310514525,"d_finished":0,"c":0,"l":1743438310516248,"d":1723},"events":[{"name":"bootstrap","f":1743438310514679,"d_finished":695,"c":1,"l":1743438310515374,"d":695},{"a":1743438310515630,"name":"ack","f":1743438310515630,"d_finished":0,"c":0,"l":1743438310516248,"d":618},{"a":1743438310515617,"name":"processing","f":1743438310515617,"d_finished":0,"c":0,"l":1743438310516248,"d":631},{"name":"ProduceResults","f":1743438310515167,"d_finished":381,"c":2,"l":1743438310515848,"d":381},{"a":1743438310515850,"name":"Finish","f":1743438310515850,"d_finished":0,"c":0,"l":1743438310516248,"d":398}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:2009:4018]->[1:2008:4017] 2025-03-31T16:25:10.516588Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2009:4018];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:25:10.514186Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-03-31T16:25:10.516622Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2009:4018];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:25:10.516704Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:2009:4018];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 80000/5203352 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::EmptyQueryText [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:25:11.006522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:25:11.006618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:25:11.006658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:25:11.006694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:25:11.006740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:25:11.006769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:25:11.006872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:25:11.006940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:25:11.007182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:25:11.072393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:25:11.072433Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:11.076485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:25:11.076555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:25:11.076721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:25:11.081936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:25:11.082172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:25:11.082826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:25:11.083103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:25:11.085150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:25:11.086332Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:25:11.086403Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:25:11.086544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:25:11.086577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:25:11.086618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:25:11.086771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:25:11.094821Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:25:11.226296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:25:11.226565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:11.226817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:25:11.227055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:25:11.227126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:11.229867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:25:11.230034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:25:11.230234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:11.230294Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:25:11.230334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:25:11.230399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:25:11.232793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:11.232865Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:25:11.232921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:25:11.235165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:11.235229Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:11.235274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:25:11.235323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:25:11.239353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:25:11.241793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:25:11.242044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:25:11.243174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:25:11.243337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:25:11.243397Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:25:11.243704Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:25:11.243764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:25:11.243937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:25:11.244035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:25:11.246542Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:25:11.246610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:25:11.246803Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:25:11.246856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:25:11.247127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:11.247175Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:25:11.247281Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:25:11.247316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:25:11.247354Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:25:11.247385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:25:11.247421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:25:11.247466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:25:11.247505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:25:11.247560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:25:11.247641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:25:11.247679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:25:11.247714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:25:11.249845Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:25:11.249970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:25:11.250006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-31T16:25:11.250092Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-31T16:25:11.250147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:25:11.250323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-31T16:25:11.253918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-31T16:25:11.254461Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-03-31T16:25:11.255168Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Bootstrap 2025-03-31T16:25:11.274832Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Become StateWork (SchemeCache [1:272:2263]) 2025-03-31T16:25:11.277761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "MyView" QueryText: "" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:25:11.278034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 101:0 2025-03-31T16:25:11.278097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 101:0, viewDescription: Name: "MyView" QueryText: "" 2025-03-31T16:25:11.278217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: MyView, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T16:25:11.278291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-03-31T16:25:11.278344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:25:11.279793Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T16:25:11.282946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusAccepted TxId: 101 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2025-03-31T16:25:11.283163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE VIEW, path: /MyRoot/MyView 2025-03-31T16:25:11.283431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:25:11.283486Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 101:0 ProgressState 2025-03-31T16:25:11.283550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2025-03-31T16:25:11.283676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:25:11.284548Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-31T16:25:11.286329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-03-31T16:25:11.286481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-03-31T16:25:11.286876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:25:11.286996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:25:11.287046Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002 2025-03-31T16:25:11.287255Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-03-31T16:25:11.287453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:25:11.287518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 101 2025-03-31T16:25:11.289999Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:25:11.290052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:25:11.290251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:25:11.290370Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:25:11.290411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-03-31T16:25:11.290458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-03-31T16:25:11.290670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:25:11.290739Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-31T16:25:11.290841Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T16:25:11.290921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:25:11.290966Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-31T16:25:11.290999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:25:11.291053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-03-31T16:25:11.291124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-31T16:25:11.291169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-31T16:25:11.291206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-31T16:25:11.291298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T16:25:11.291362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-03-31T16:25:11.291408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-03-31T16:25:11.291442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-03-31T16:25:11.292312Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:25:11.292424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:25:11.292464Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-03-31T16:25:11.292503Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-03-31T16:25:11.292543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:25:11.293570Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:25:11.293664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-03-31T16:25:11.293697Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-31T16:25:11.293730Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-03-31T16:25:11.293760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T16:25:11.293836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-03-31T16:25:11.298436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T16:25:11.298852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 >> TColumnShardTestSchema::RebootExportWithLostAnswer [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::SimpleType [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::TTzDate [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Optional [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::List [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Struct [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Dict [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::PgType [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleInt32 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleInt64 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzDate [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzDateTime [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzTimeStamp [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleDecimal [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleUuid [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Void [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Struct [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Tuple [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Variant [GOOD] >> ConvertTableDescription::StorageSettings >> CellsFromTupleTest::CellsFromTupleSuccess [GOOD] >> CellsFromTupleTest::CellsFromTupleSuccessPg >> ConvertMiniKQLTypeToYdbTypeTest::TTzDateTime [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::TTzTimeStamp [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::UuidType [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::VariantTuple ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgDate [GOOD] Test command err: 2025-03-31T16:25:01.278801Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:25:01.279163Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:25:01.279439Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012b9/r3tmp/tmpQvv8uK/pdisk_1.dat 2025-03-31T16:25:01.753772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:25:01.793375Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:01.830777Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:01.830913Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:01.842443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:25:01.923903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:25:01.967429Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:25:01.967719Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:25:02.019446Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:25:02.019575Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:25:02.021337Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:25:02.021453Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:25:02.021524Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:25:02.021875Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:25:02.022014Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:25:02.022104Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:25:02.032923Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:25:02.061961Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:25:02.062149Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:25:02.062287Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:25:02.062335Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:25:02.062371Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:25:02.062404Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:02.062811Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:25:02.062925Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:25:02.063066Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:02.063132Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:02.063171Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:25:02.063240Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:02.063320Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:25:02.063558Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:25:02.063785Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:25:02.063883Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:25:02.066021Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:02.076748Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:25:02.076889Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:25:02.225533Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T16:25:02.234010Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:25:02.234084Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:02.234591Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:02.234627Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:25:02.234680Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T16:25:02.234880Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:25:02.235014Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:25:02.235519Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:02.235571Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:25:02.237129Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:25:02.237477Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:02.239298Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-31T16:25:02.239341Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:02.240234Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-31T16:25:02.240343Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:02.241428Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:02.241491Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:25:02.241553Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:25:02.241617Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:25:02.241691Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T16:25:02.241798Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:02.244912Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:02.246501Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T16:25:02.246631Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-31T16:25:02.246677Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:25:02.254247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:02.254393Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:02.254466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:02.258336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:25:02.262915Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:02.411643Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:02.415330Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:25:02.475994Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:02.755986Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpgak1c4bmseqtk18251jvc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWFjYmU3MDMtZmI1ZTYzOS01YmYyNTNhMS01ZWVjZTc2OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:25:02.761134Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:856:2687], serverId# [1:857:2688], sessionId# [0:0:0] 2025-03-31T16:25:02.761614Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-03-31T16:25:02.761824Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=4 ... 025-03-31T16:25:10.873930Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:10.874006Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:25:10.874083Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:10.874547Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:662:2567], serverId# [3:673:2574], sessionId# [0:0:0] 2025-03-31T16:25:10.874696Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:25:10.874923Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:25:10.875009Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:25:10.876993Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:10.887821Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:25:10.887947Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:25:11.038339Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:703:2593], serverId# [3:705:2595], sessionId# [0:0:0] 2025-03-31T16:25:11.038973Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:25:11.039038Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:11.039895Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:11.039952Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:25:11.040022Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T16:25:11.040264Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:25:11.040414Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:25:11.041102Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:11.041160Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:25:11.041674Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:25:11.042146Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:11.043596Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-31T16:25:11.043649Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:11.044450Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-31T16:25:11.044539Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:11.045433Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:11.045487Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:25:11.045532Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:25:11.045593Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:25:11.045645Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T16:25:11.045734Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:11.046571Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:11.048497Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-31T16:25:11.048576Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:25:11.049397Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T16:25:11.057176Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:11.057271Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:11.057336Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:11.062320Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:25:11.068853Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:11.220413Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:11.224398Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:25:11.259772Z node 3 :TX_PROXY ERROR: Actor# [3:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:11.365938Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpgavmfefbx6e9y8k370pqt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YTA4ZjYzMGMtNDZjNWM1MzgtNzlmZjM2NTMtZDRmZGQ4NDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:25:11.368948Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:856:2687], serverId# [3:857:2688], sessionId# [0:0:0] 2025-03-31T16:25:11.369326Z node 3 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-03-31T16:25:11.369509Z node 3 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=5 2025-03-31T16:25:11.380528Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:11.385351Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:864:2694], serverId# [3:865:2695], sessionId# [0:0:0] 2025-03-31T16:25:11.386676Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-03-31T16:25:11.398166Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-03-31T16:25:11.398247Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:11.398492Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-03-31T16:25:11.398537Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-03-31T16:25:11.398857Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:11.398908Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:11.398960Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:25:11.399022Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:11.399131Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:864:2694], serverId# [3:865:2695], sessionId# [0:0:0] 2025-03-31T16:25:11.400143Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:25:11.400511Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:25:11.400725Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:11.400769Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:25:11.400812Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-03-31T16:25:11.401043Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:25:11.401108Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:11.401750Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-03-31T16:25:11.402088Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 43, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-31T16:25:11.402260Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-03-31T16:25:11.402307Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-03-31T16:25:11.404312Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-03-31T16:25:11.404363Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2025-03-31T16:25:11.404517Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:11.404551Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:25:11.404589Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-03-31T16:25:11.404713Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:11.404772Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:11.404817Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> ConvertYdbValueToMiniKQLValueTest::Void [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleUuidTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Struct >> ConvertTableDescription::StorageSettings [GOOD] >> ConvertTableDescription::ColumnFamilies [GOOD] >> ConvertYdbPermissionNameToACLAttrs::SimpleConvertGood [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::VariantTuple [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::VariantStruct [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Void [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Tuple [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Struct [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Tuple [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Variant [GOOD] >> ConvertYdbValueToMiniKQLValueTest::VariantIndexUnderflow [GOOD] >> TSchemeShardViewTest::EmptyName [GOOD] >> CellsFromTupleTest::CellsFromTupleSuccessPg [GOOD] >> CellsFromTupleTest::CellsFromTupleFails [GOOD] >> CellsFromTupleTest::CellsFromTupleFailsPg [GOOD] >> CompressionTests::Zstd [GOOD] >> CompressionTests::Unsupported [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::DecimalType [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleInt32 [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzDate [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzDateTime [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzTimeStamp [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleInt32TypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleUuid [GOOD] >> TCdcStreamTests::ChangeOwner [GOOD] >> TCdcStreamTests::DropIndexWithStream |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::PgType [GOOD] |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::Tuple [GOOD] |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbPermissionNameToACLAttrs::SimpleConvertGood [GOOD] |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLValueToYdbValueTest::SimpleUuid [GOOD] |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::VariantIndexUnderflow [GOOD] |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::DecimalType [GOOD] |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::SimpleUuid [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::EmptyName [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:25:12.316902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:25:12.317023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:25:12.317064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:25:12.317103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:25:12.317151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:25:12.317180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:25:12.317262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:25:12.317365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:25:12.317713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:25:12.414081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:25:12.414159Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:12.419529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:25:12.419650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:25:12.419828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:25:12.425845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:25:12.426120Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:25:12.426815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:25:12.427074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:25:12.429464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:25:12.430818Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:25:12.430897Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:25:12.431122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:25:12.431181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:25:12.431234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:25:12.431418Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:25:12.439774Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:25:12.577126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:25:12.577440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:12.577721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:25:12.578025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:25:12.578099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:12.580772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:25:12.580940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:25:12.581165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:12.581228Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:25:12.581274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:25:12.581335Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:25:12.583894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:12.583973Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:25:12.584034Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:25:12.586378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:12.586437Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:12.586512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:25:12.586574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:25:12.590595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:25:12.592637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:25:12.592827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:25:12.593647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:25:12.593766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:25:12.593822Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:25:12.594045Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:25:12.594084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:25:12.594223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:25:12.594305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:25:12.596506Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:25:12.596560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:25:12.596704Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:25:12.596745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:25:12.596955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:25:12.596996Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:25:12.597073Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:25:12.597098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:25:12.597127Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:25:12.597150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:25:12.597178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:25:12.597212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:25:12.597239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:25:12.597273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:25:12.597332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:25:12.597361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:25:12.597386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:25:12.598950Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:25:12.599038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:25:12.599064Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-31T16:25:12.599142Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-31T16:25:12.599188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:25:12.599273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-31T16:25:12.601992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-31T16:25:12.602404Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-03-31T16:25:12.602906Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Bootstrap 2025-03-31T16:25:12.617103Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Become StateWork (SchemeCache [1:272:2263]) 2025-03-31T16:25:12.619040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "" QueryText: "Some query" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:25:12.619231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateView Propose, path: /MyRoot/, opId: 101:0 2025-03-31T16:25:12.619288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [72057594046678944] TCreateView Propose, path: /MyRoot/, opId: 101:0, viewDescription: Name: "" QueryText: "Some query" 2025-03-31T16:25:12.619358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, at schemeshard: 72057594046678944 2025-03-31T16:25:12.620291Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T16:25:12.622536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/\', error: path part shouldn\'t be empty" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:25:12.622683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, operation: CREATE VIEW, path: /MyRoot/ 2025-03-31T16:25:12.623107Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootExportWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143438859.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=143438859.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123438859.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=123438859.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123437659.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=123437659.000000s;Name=;Codec=}; 2025-03-31T16:24:21.247003Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:24:21.325507Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:24:21.342020Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:24:21.342256Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:24:21.348546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:24:21.348715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:24:21.348907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:24:21.348983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:24:21.349067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:24:21.349141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:24:21.349213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:24:21.349313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:24:21.349395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:24:21.349474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:24:21.349539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:24:21.349596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:24:21.367625Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:24:21.367735Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:24:21.367773Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:24:21.367922Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:24:21.368087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:24:21.368156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:24:21.368194Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:24:21.368267Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:24:21.368306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:24:21.368335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:24:21.368353Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:24:21.368472Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:24:21.368520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:24:21.368545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:24:21.368564Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:24:21.368628Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:24:21.368662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:24:21.368689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:24:21.368708Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:24:21.368753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:24:21.368777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:24:21.368794Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:24:21.368823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:24:21.368849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:24:21.368869Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:24:21.369151Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=31; 2025-03-31T16:24:21.369214Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=27; 2025-03-31T16:24:21.369287Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=29; 2025-03-31T16:24:21.369346Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=29; 2025-03-31T16:24:21.369452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:24:21.369486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:24:21.369510Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:24:21.369643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:24:21.369673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:24:21.369693Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:24:21.369813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:24:21.369843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:24:21.369860Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:24:21.369968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... a.cpp:29;PRECHARGE:finishLoadingTime=13; 2025-03-31T16:25:12.234104Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=180; 2025-03-31T16:25:12.234136Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=22115; 2025-03-31T16:25:12.238599Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=4386; 2025-03-31T16:25:12.243187Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=3627; 2025-03-31T16:25:12.243293Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=4594; 2025-03-31T16:25:12.243437Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=81; 2025-03-31T16:25:12.243540Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=56; 2025-03-31T16:25:12.243655Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=73; 2025-03-31T16:25:12.243748Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=56; 2025-03-31T16:25:12.249372Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=5555; 2025-03-31T16:25:12.256254Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=6761; 2025-03-31T16:25:12.256390Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=35; 2025-03-31T16:25:12.256455Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=25; 2025-03-31T16:25:12.256492Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=6; 2025-03-31T16:25:12.256530Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=6; 2025-03-31T16:25:12.256565Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2025-03-31T16:25:12.256634Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=38; 2025-03-31T16:25:12.256673Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2025-03-31T16:25:12.256747Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=46; 2025-03-31T16:25:12.256785Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2025-03-31T16:25:12.256848Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=27; 2025-03-31T16:25:12.256948Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=50; 2025-03-31T16:25:12.257179Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=187; 2025-03-31T16:25:12.257213Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=51610; 2025-03-31T16:25:12.257347Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=20801572;raw_bytes=32169208;count=11;records=320000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-31T16:25:12.257444Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-31T16:25:12.257510Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-31T16:25:12.257567Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-31T16:25:12.265494Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-03-31T16:25:12.265655Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:25:12.265710Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:25:12.265787Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=3; 2025-03-31T16:25:12.265858Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700006;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-31T16:25:12.265897Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:25:12.265941Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:25:12.265975Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:25:12.266060Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:25:12.266731Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:25:12.266831Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:1991:3891];tablet_id=9437184;parent=[1:1953:3860];fline=manager.cpp:82;event=ask_data;request=request_id=95;1={portions_count=11};; 2025-03-31T16:25:12.267475Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-31T16:25:12.267872Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-31T16:25:12.267911Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-31T16:25:12.267938Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-31T16:25:12.267989Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:25:12.268041Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:25:12.268094Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=3; 2025-03-31T16:25:12.268153Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700006;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-31T16:25:12.268190Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:25:12.268232Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:25:12.268268Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:25:12.268346Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:25:12.268882Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=11;path_id=1; 2025-03-31T16:25:12.270146Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 80000/5203504 0/0 |91.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_view/test-results/unittest/{meta.json ... results_accumulator.log} |91.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_view/test-results/unittest/{meta.json ... results_accumulator.log} >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64NanoSeconds [GOOD] >> DataShardVolatile::DistributedOutOfOrderFollowerConsistency [GOOD] >> DataShardVolatile::DistributedWriteRSNotAckedBeforeCommit >> TColumnShardTestSchema::ExportWithLostAnswer [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleBool [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalString [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalEmpty [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalOptionalEmpty [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalOptionalEmpty2 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::List [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Dict [GOOD] |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLValueToYdbValueTest::Dict [GOOD] >> TCdcStreamTests::DropIndexWithStream [GOOD] >> TCdcStreamTests::DropTableWithIndexWithStream >> EraseRowsTests::ConditionalEraseRowsShouldErase [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks |91.7%| [TA] $(B)/ydb/core/ydb_convert/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.7%| [TA] {RESULT} $(B)/ydb/core/ydb_convert/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64NanoSeconds [GOOD] Test command err: 2025-03-31T16:25:07.873166Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:25:07.873380Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:25:07.873543Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00125e/r3tmp/tmpOp6S8y/pdisk_1.dat 2025-03-31T16:25:08.165623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:25:08.199814Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:08.237274Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:08.237377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:08.248554Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:25:08.328224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:25:08.359866Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:25:08.360140Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:25:08.400437Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:25:08.400539Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:25:08.401920Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:25:08.402004Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:25:08.402050Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:25:08.402413Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:25:08.402555Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:25:08.402639Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:25:08.413407Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:25:08.439696Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:25:08.439903Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:25:08.439992Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:25:08.440019Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:25:08.440046Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:25:08.440074Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:08.440414Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:25:08.440487Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:25:08.440575Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:08.440613Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:08.440641Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:25:08.440689Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:08.440749Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:25:08.440892Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:25:08.441081Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:25:08.441161Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:25:08.443009Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:08.453648Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:25:08.453764Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:25:08.603161Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T16:25:08.608222Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:25:08.608313Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:08.608909Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:08.608956Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:25:08.609010Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T16:25:08.609537Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:25:08.609717Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:25:08.610382Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:08.610489Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:25:08.612793Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:25:08.613251Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:08.615365Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-31T16:25:08.615436Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:08.616364Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-31T16:25:08.616453Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:08.617436Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:08.617497Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:25:08.617554Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:25:08.617623Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:25:08.617693Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T16:25:08.617806Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:08.621849Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:08.624008Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T16:25:08.624221Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-31T16:25:08.624280Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:25:08.634445Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:08.634571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:08.634644Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:08.640568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:25:08.646998Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:08.795568Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:08.798854Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:25:08.868958Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:09.133689Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpgas8r6p5w68ahtvyk3tbq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzMyYzczZDMtZDg1NzEzYTAtMmRjNjk4YTEtNDRlNDlhYTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:25:09.138977Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:856:2687], serverId# [1:857:2688], sessionId# [0:0:0] 2025-03-31T16:25:09.139397Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-03-31T16:25:09.139572Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=4 ... 025-03-31T16:25:13.180796Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:13.180839Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:25:13.180886Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:13.181322Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:663:2568], serverId# [2:674:2575], sessionId# [0:0:0] 2025-03-31T16:25:13.181506Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:25:13.181717Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:25:13.181818Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:25:13.183705Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:13.194427Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:25:13.194560Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:25:13.343960Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:703:2593], serverId# [2:705:2595], sessionId# [0:0:0] 2025-03-31T16:25:13.344897Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:25:13.344949Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:13.345191Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:13.345229Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:25:13.345266Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T16:25:13.345466Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:25:13.345578Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:25:13.346033Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:13.346094Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:25:13.346469Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:25:13.346764Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:13.347959Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-31T16:25:13.348004Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:13.348718Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-31T16:25:13.348791Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:13.349171Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:13.349201Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:25:13.349249Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:25:13.349304Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:410:2405], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:25:13.349344Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T16:25:13.349411Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:13.350542Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:13.351930Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T16:25:13.352134Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-31T16:25:13.352210Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:25:13.358985Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:13.359070Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:748:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:13.359157Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:13.363165Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:25:13.367829Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:13.517516Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:13.521365Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:25:13.556653Z node 2 :TX_PROXY ERROR: Actor# [2:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:13.635564Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpgaxwd5v6s3hfbdrs6y6pt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWFiODNkZmItYjUzZjA1NGMtYWUzZGNhYTAtYTk0YjA3N2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:25:13.638368Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:856:2687], serverId# [2:857:2688], sessionId# [0:0:0] 2025-03-31T16:25:13.638818Z node 2 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-03-31T16:25:13.639008Z node 2 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=4 2025-03-31T16:25:13.650132Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:13.655118Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:864:2694], serverId# [2:865:2695], sessionId# [0:0:0] 2025-03-31T16:25:13.656193Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-03-31T16:25:13.667521Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-03-31T16:25:13.667616Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:13.667866Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-03-31T16:25:13.667909Z node 2 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-03-31T16:25:13.668157Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:13.668204Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:13.668278Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:25:13.668354Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:13.668477Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:864:2694], serverId# [2:865:2695], sessionId# [0:0:0] 2025-03-31T16:25:13.669470Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:25:13.669864Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:25:13.670066Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:13.670114Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:25:13.670160Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-03-31T16:25:13.670399Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:25:13.670461Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:13.671129Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-03-31T16:25:13.671381Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-31T16:25:13.671526Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-03-31T16:25:13.671580Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-03-31T16:25:13.698100Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-03-31T16:25:13.698180Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2025-03-31T16:25:13.698611Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:13.698662Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:25:13.698706Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-03-31T16:25:13.698847Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:13.698920Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:13.698969Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ExportWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143438865.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=143438865.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123438865.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=123438865.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123437665.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=123437665.000000s;Name=;Codec=}; 2025-03-31T16:24:27.587932Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:24:27.684155Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:24:27.709264Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:24:27.709588Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:24:27.718079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:24:27.718288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:24:27.718519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:24:27.718634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:24:27.718757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:24:27.718884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:24:27.718984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:24:27.719113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:24:27.719239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:24:27.719360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:24:27.719466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:24:27.719560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:24:27.748506Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:24:27.748658Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:24:27.748720Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:24:27.748875Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:24:27.749050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:24:27.749129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:24:27.749178Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:24:27.749276Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:24:27.749355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:24:27.749398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:24:27.749427Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:24:27.749598Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:24:27.749671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:24:27.749715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:24:27.749745Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:24:27.749826Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:24:27.749888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:24:27.749932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:24:27.749959Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:24:27.750026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:24:27.750061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:24:27.750087Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:24:27.750141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:24:27.750177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:24:27.750209Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:24:27.750602Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=64; 2025-03-31T16:24:27.750699Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=36; 2025-03-31T16:24:27.750770Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=34; 2025-03-31T16:24:27.750867Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=37; 2025-03-31T16:24:27.751025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:24:27.751106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:24:27.751141Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:24:27.751362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:24:27.751414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:24:27.751446Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:24:27.751617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:24:27.751671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:24:27.751713Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:24:27.751891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... "l":1743438313889543,"d":523},{"name":"task_result","f":1743438313400596,"d_finished":181201,"c":28,"l":1743438313886448,"d":181201}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1284:3291]->[1:1283:3290] 2025-03-31T16:25:13.889942Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1284:3291];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:25:13.391822Z;index_granules=0;index_portions=4;index_batches=1731;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=5203504;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=5203504;selected_rows=0; 2025-03-31T16:25:13.889975Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1284:3291];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:25:13.890193Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1284:3291];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-03-31T16:25:13.891763Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 7 at tablet 9437184 2025-03-31T16:25:13.891978Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000006:max} readable: {1000000006:max} at tablet 9437184 2025-03-31T16:25:13.892083Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-03-31T16:25:13.892218Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-31T16:25:13.892268Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-31T16:25:13.892650Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-03-31T16:25:13.892748Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-03-31T16:25:13.893278Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:1300:3307];trace_detailed=; 2025-03-31T16:25:13.893850Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-03-31T16:25:13.894119Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-03-31T16:25:13.894345Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:25:13.894507Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:25:13.894885Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:25:13.895019Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:25:13.895189Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:25:13.895245Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1300:3307] finished for tablet 9437184 2025-03-31T16:25:13.895798Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:1299:3306];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":1743438313893198,"name":"_full_task","f":1743438313893198,"d_finished":0,"c":0,"l":1743438313895323,"d":2125},"events":[{"name":"bootstrap","f":1743438313893505,"d_finished":1049,"c":1,"l":1743438313894554,"d":1049},{"a":1743438313894855,"name":"ack","f":1743438313894855,"d_finished":0,"c":0,"l":1743438313895323,"d":468},{"a":1743438313894829,"name":"processing","f":1743438313894829,"d_finished":0,"c":0,"l":1743438313895323,"d":494},{"name":"ProduceResults","f":1743438313894244,"d_finished":609,"c":2,"l":1743438313895225,"d":609},{"a":1743438313895231,"name":"Finish","f":1743438313895231,"d_finished":0,"c":0,"l":1743438313895323,"d":92}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:25:13.895900Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:1299:3306];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:25:13.896394Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:1299:3306];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":1743438313893198,"name":"_full_task","f":1743438313893198,"d_finished":0,"c":0,"l":1743438313895956,"d":2758},"events":[{"name":"bootstrap","f":1743438313893505,"d_finished":1049,"c":1,"l":1743438313894554,"d":1049},{"a":1743438313894855,"name":"ack","f":1743438313894855,"d_finished":0,"c":0,"l":1743438313895956,"d":1101},{"a":1743438313894829,"name":"processing","f":1743438313894829,"d_finished":0,"c":0,"l":1743438313895956,"d":1127},{"name":"ProduceResults","f":1743438313894244,"d_finished":609,"c":2,"l":1743438313895225,"d":609},{"a":1743438313895231,"name":"Finish","f":1743438313895231,"d_finished":0,"c":0,"l":1743438313895956,"d":725}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1300:3307]->[1:1299:3306] 2025-03-31T16:25:13.896516Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:25:13.892715Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-03-31T16:25:13.896576Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:25:13.896710Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 80000/5203504 0/0 >> TCdcStreamTests::DropTableWithIndexWithStream [GOOD] >> DistributedEraseTests::ConditionalEraseRowsCheckLimits [GOOD] >> DistributedEraseTests::ConditionalEraseRowsAsyncIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_cdc_stream/unittest >> TCdcStreamTests::DropTableWithIndexWithStream [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:23:00.020011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:23:00.020242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:23:00.020289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:23:00.020332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:23:00.021621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:23:00.021696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:23:00.021787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:23:00.021887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:23:00.023473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:23:00.104314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:23:00.104376Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:23:00.111411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:23:00.111518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:23:00.111678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:23:00.119719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:23:00.120099Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:23:00.124935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:23:00.126176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:23:00.131459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:23:00.141106Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:23:00.141278Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:23:00.142308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:23:00.142364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:23:00.142440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:23:00.142583Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:23:00.148963Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:23:00.246623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:23:00.249241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:00.250637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:23:00.252602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:23:00.252704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:00.256282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:23:00.256448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:23:00.256678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:00.256760Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:23:00.256808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:23:00.256875Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:23:00.259388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:00.259471Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:23:00.259514Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:23:00.261821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:00.261872Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:00.261927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:23:00.261993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:23:00.266954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:23:00.269483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:23:00.270412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:23:00.271430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:23:00.271555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:23:00.271614Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:23:00.273672Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:23:00.273725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:23:00.274826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:23:00.274902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:23:00.277930Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:23:00.277993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:23:00.278204Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:23:00.278245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:23:00.278480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:23:00.278604Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:23:00.278725Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:23:00.278760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:23:00.278802Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:23:00.278847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:23:00.278892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:23:00.278951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:23:00.278994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:23:00.279027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:23:00.279149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:23:00.279207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:23:00.279249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:23:00.281284Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:23:00.281416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:23:00.281454Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 4Z node 19 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:25:15.893593Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:25:15.893613Z node 19 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-03-31T16:25:15.894734Z node 19 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:25:15.894796Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:25:15.894816Z node 19 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-03-31T16:25:15.894976Z node 19 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:25:15.895020Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-03-31T16:25:15.895035Z node 19 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-03-31T16:25:15.895053Z node 19 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-03-31T16:25:15.895077Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 5 2025-03-31T16:25:15.895167Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 3/5, is published: true 2025-03-31T16:25:15.896139Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:2, at schemeshard: 72057594046678944 2025-03-31T16:25:15.896206Z node 19 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 103:2 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:25:15.896591Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-03-31T16:25:15.896798Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:2 progress is 4/5 2025-03-31T16:25:15.896856Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/5 2025-03-31T16:25:15.896918Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:2 progress is 4/5 2025-03-31T16:25:15.896979Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/5 2025-03-31T16:25:15.897044Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 4/5, is published: true 2025-03-31T16:25:15.898450Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-31T16:25:15.898495Z node 19 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:25:15.898734Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T16:25:15.898838Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 5/5 2025-03-31T16:25:15.898863Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 5/5 2025-03-31T16:25:15.898897Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 5/5 2025-03-31T16:25:15.898923Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 5/5 2025-03-31T16:25:15.898952Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 5/5, is published: true 2025-03-31T16:25:15.899032Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [19:380:2348] message: TxId: 103 2025-03-31T16:25:15.899125Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 5/5 2025-03-31T16:25:15.899209Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-03-31T16:25:15.899265Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-03-31T16:25:15.899392Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:25:15.899453Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:1 2025-03-31T16:25:15.899472Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:1 2025-03-31T16:25:15.899494Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T16:25:15.899512Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:2 2025-03-31T16:25:15.899529Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:2 2025-03-31T16:25:15.899556Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-31T16:25:15.899575Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:3 2025-03-31T16:25:15.899592Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:3 2025-03-31T16:25:15.899612Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-03-31T16:25:15.899630Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:4 2025-03-31T16:25:15.899646Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:4 2025-03-31T16:25:15.899689Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2025-03-31T16:25:15.900164Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:25:15.900236Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2025-03-31T16:25:15.900326Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-03-31T16:25:15.900398Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-03-31T16:25:15.900428Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-03-31T16:25:15.901411Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-31T16:25:15.901476Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-31T16:25:15.901545Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-31T16:25:15.901572Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-31T16:25:15.901610Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-31T16:25:15.904136Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-31T16:25:15.904445Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-31T16:25:15.904529Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [19:758:2661] 2025-03-31T16:25:15.904697Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 2025-03-31T16:25:15.905443Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:25:15.905821Z node 19 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 429us result status StatusPathDoesNotExist 2025-03-31T16:25:15.906029Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Index/indexImplTable/Stream\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table/Index/indexImplTable\' (id: [OwnerId: 72057594046678944, LocalPathId: 4])" Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-31T16:25:15.906688Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:25:15.907010Z node 19 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" took 358us result status StatusPathDoesNotExist 2025-03-31T16:25:15.907204Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Index/indexImplTable/Stream/streamImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table/Index/indexImplTable\' (id: [OwnerId: 72057594046678944, LocalPathId: 4])" Path: "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |91.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/test-results/unittest/{meta.json ... results_accumulator.log} |91.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/test-results/unittest/{meta.json ... results_accumulator.log} >> SystemView::AuthGroups >> SystemView::StoragePoolsFields >> SystemView::AuthGroups_TableRange >> SystemView::ConcurrentScans >> DistributedEraseTests::ConditionalEraseRowsShouldNotEraseModifiedRows [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldNotFailOnMissingRows >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks [GOOD] |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> AsyncIndexChangeCollector::InsertSingleRow >> AsyncIndexChangeCollector::DeleteNothing >> CdcStreamChangeCollector::UpsertManyRows >> AsyncIndexChangeCollector::UpsertToSameKey >> CdcStreamChangeCollector::UpsertToSameKey >> AsyncIndexChangeCollector::UpsertSingleRow |91.8%| [TA] $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/test-results/unittest/{meta.json ... results_accumulator.log} |91.8%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks [GOOD] Test command err: 2025-03-31T16:25:09.440564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:25:09.440830Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:25:09.441000Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001250/r3tmp/tmpX0IWry/pdisk_1.dat 2025-03-31T16:25:09.757394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:25:09.801421Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:09.841361Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:09.841533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:09.853154Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:25:09.937978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:25:10.001732Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:687:2586] 2025-03-31T16:25:10.002072Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:25:10.066629Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:25:10.066898Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:25:10.069060Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:25:10.069154Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:25:10.069263Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:25:10.069717Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:25:10.070179Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:25:10.070259Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:718:2586] in generation 1 2025-03-31T16:25:10.072245Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:691:2588] 2025-03-31T16:25:10.072482Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:25:10.084650Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:25:10.084848Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:25:10.086512Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-03-31T16:25:10.086606Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-03-31T16:25:10.086693Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-03-31T16:25:10.087034Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:25:10.087327Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:25:10.087424Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:731:2588] in generation 1 2025-03-31T16:25:10.088986Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:695:2590] 2025-03-31T16:25:10.089216Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:25:10.100808Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:25:10.100946Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:25:10.102542Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-03-31T16:25:10.102654Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-03-31T16:25:10.102714Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-03-31T16:25:10.103057Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:25:10.103223Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:25:10.103297Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037890 persisting started state actor id [1:739:2590] in generation 1 2025-03-31T16:25:10.114489Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:25:10.145358Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-03-31T16:25:10.145540Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:25:10.145661Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:743:2617] 2025-03-31T16:25:10.145686Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-03-31T16:25:10.145713Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-03-31T16:25:10.145739Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:25:10.146068Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:25:10.146107Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-03-31T16:25:10.146172Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:25:10.146270Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:744:2618] 2025-03-31T16:25:10.146296Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-03-31T16:25:10.146326Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-03-31T16:25:10.146345Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-31T16:25:10.146584Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:25:10.146605Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:25:10.146644Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:25:10.146693Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:745:2619] 2025-03-31T16:25:10.146712Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:25:10.146725Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:25:10.146740Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:10.146779Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-03-31T16:25:10.146851Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-03-31T16:25:10.146996Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T16:25:10.147029Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:10.147060Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-03-31T16:25:10.147117Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-31T16:25:10.147299Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:679:2582], serverId# [1:713:2602], sessionId# [0:0:0] 2025-03-31T16:25:10.147338Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037890 2025-03-31T16:25:10.147374Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2025-03-31T16:25:10.147416Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:25:10.147449Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:25:10.147552Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-31T16:25:10.147763Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-03-31T16:25:10.147838Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-03-31T16:25:10.148230Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-03-31T16:25:10.148266Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:10.148287Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037890 TxInFly 0 2025-03-31T16:25:10.148311Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-31T16:25:10.148674Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:10.148696Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:10.148712Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:25:10.148734Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:10.148790Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:678:2581], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T16:25:10.149026Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:25:10.149155Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:25:10.149248Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:25:10.150746Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:10.150814Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-31T16:25:10.161554Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:25:10.161638Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:25:10.162114Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-03-31T16:25:10.162162Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-03-31T16:25:10.205087Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:680:2583], serverId# [1:762:2629], sessionId# [0:0:0] 2025-03-31T16:25:10.205285Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-03-31T16:25:10.205547Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037890 txId 2 ... ode 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:17.587306Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:25:17.587369Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T16:25:17.587558Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:25:17.587678Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:25:17.588198Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:17.588258Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:25:17.588588Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:25:17.588890Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:17.590267Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-31T16:25:17.590313Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:17.591099Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-31T16:25:17.591179Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:17.592149Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:17.592194Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:25:17.592234Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:25:17.592290Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:25:17.592330Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T16:25:17.592415Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:17.593061Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:17.594794Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-31T16:25:17.594855Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:25:17.595549Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T16:25:17.601736Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:17.601823Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:17.601870Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:17.606179Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:25:17.612222Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:17.758342Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:17.760834Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:25:17.795200Z node 3 :TX_PROXY ERROR: Actor# [3:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:17.856416Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpgb2106gdabynxphw954z7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MWE3NTg4NTQtZGQ2Mjk1ZjgtMmNmNDVhY2UtYWIxYWU5YzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:25:17.858786Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:856:2687], serverId# [3:857:2688], sessionId# [0:0:0] 2025-03-31T16:25:17.859114Z node 3 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-03-31T16:25:17.859262Z node 3 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=3 2025-03-31T16:25:17.870152Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:18.063073Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpgb29z8jjhc8bstva7bdrm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZDA5NWY4ZDAtYTI4MzJiNC0xNmFhYzgxOS0xNDU4MzZhYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:25:18.065811Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715661, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] { items { uint64_value: 0 } } 2025-03-31T16:25:18.072067Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:895:2718], serverId# [3:896:2719], sessionId# [0:0:0] 2025-03-31T16:25:18.072888Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-03-31T16:25:18.084020Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-03-31T16:25:18.084088Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:18.084142Z node 3 :TX_DATASHARD DEBUG: Waiting for PlanStep# 1501 from mediator time cast 2025-03-31T16:25:18.084765Z node 3 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 1501 at tablet 72075186224037888 2025-03-31T16:25:18.084817Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:18.084993Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-03-31T16:25:18.085026Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 4, at: 72075186224037888 2025-03-31T16:25:18.085186Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:18.085220Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:18.085268Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:25:18.085312Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:18.085424Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:895:2718], serverId# [3:896:2719], sessionId# [0:0:0] 2025-03-31T16:25:18.126463Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpgb2g55waqbpqpa33kkgvn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZDA5NWY4ZDAtYTI4MzJiNC0xNmFhYzgxOS0xNDU4MzZhYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:25:18.128850Z node 3 :TX_DATASHARD DEBUG: Executing write operation for [0:6] at 72075186224037888 2025-03-31T16:25:18.128963Z node 3 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=6; 2025-03-31T16:25:18.135289Z node 3 :TX_DATASHARD INFO: Write transaction 6 at 72075186224037888 has an error: Operation is aborting because locks are not valid 2025-03-31T16:25:18.135496Z node 3 :TX_DATASHARD ERROR: Prepare transaction failed. txid 6 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-03-31T16:25:18.135662Z node 3 :TX_DATASHARD ERROR: Errors while proposing transaction txid 6 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-03-31T16:25:18.135718Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:18.135893Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:917:2693], Table: `/Root/table-1` ([72057594046644480:2:1]), SessionActorId: [3:863:2693]Got LOCKS BROKEN for table `/Root/table-1`. ShardID=72075186224037888, Sink=[3:917:2693].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-03-31T16:25:18.136329Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:910:2693], SessionActorId: [3:863:2693], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/table-1`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[3:863:2693]. isRollback=0 2025-03-31T16:25:18.136649Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZDA5NWY4ZDAtYTI4MzJiNC0xNmFhYzgxOS0xNDU4MzZhYg==, ActorId: [3:863:2693], ActorState: ExecuteState, TraceId: 01jqpgb2g55waqbpqpa33kkgvn, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:911:2693] from: [3:910:2693] 2025-03-31T16:25:18.136785Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:911:2693] TxId: 281474976715662. Ctx: { TraceId: 01jqpgb2g55waqbpqpa33kkgvn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZDA5NWY4ZDAtYTI4MzJiNC0xNmFhYzgxOS0xNDU4MzZhYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/table-1`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-03-31T16:25:18.137690Z node 3 :TX_DATASHARD DEBUG: Executing write operation for [0:7] at 72075186224037888 2025-03-31T16:25:18.137786Z node 3 :TX_DATASHARD DEBUG: Skip empty write operation for [0:7] at 72075186224037888 2025-03-31T16:25:18.137970Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:18.138137Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZDA5NWY4ZDAtYTI4MzJiNC0xNmFhYzgxOS0xNDU4MzZhYg==, ActorId: [3:863:2693], ActorState: ExecuteState, TraceId: 01jqpgb2g55waqbpqpa33kkgvn, Create QueryResponse for error on request, msg: >> TColumnShardTestSchema::RebootHotTiersAfterTtl [GOOD] >> TColumnShardTestSchema::ColdTiers [GOOD] |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestZeroRange >> TTxLocatorTest::TestSignificantRequestWhenRunReserveTx >> TTxLocatorTest::TestImposibleSize |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestAllocateAllByPieces >> TTxLocatorTest::TestAllocateAll ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=143438831.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143438831.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143438831.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143438831.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123438831.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143438831.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143438831.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123437631.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123438831.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123438831.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123437631.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123437631.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123437631.000000s;Name=;Codec=}; 2025-03-31T16:23:52.048071Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:23:52.139659Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:23:52.165098Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:23:52.165423Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:23:52.173781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:23:52.174021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:23:52.174261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:23:52.174395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:23:52.174536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:23:52.174658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:23:52.174761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:23:52.174872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:23:52.174991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:23:52.175150Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:23:52.175271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:23:52.175379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:23:52.209841Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:23:52.210006Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:23:52.210091Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:23:52.210272Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:52.210454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:23:52.210529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:23:52.210570Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:23:52.210663Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:23:52.210724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:23:52.210769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:23:52.210805Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:23:52.210985Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:52.211060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:23:52.211128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:23:52.211162Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:23:52.211253Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:23:52.211307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:23:52.211355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:23:52.211388Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:23:52.211461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:23:52.211496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:23:52.211528Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:23:52.211586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:23:52.211631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:23:52.211666Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:23:52.212060Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=50; 2025-03-31T16:23:52.212149Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=42; 2025-03-31T16:23:52.212223Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=33; 2025-03-31T16:23:52.212335Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=53; 2025-03-31T16:23:52.212550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:23:52.212612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:23:52.212653Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:23:52.212845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:23:52.212891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:23:52.212920Z node 1 :TX_COLUMNSHARD NOTICE: tabl ... p:29;EXECUTE:finishLoadingTime=567; 2025-03-31T16:25:19.715111Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=65620; 2025-03-31T16:25:19.727611Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=12399; 2025-03-31T16:25:19.741488Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=12698; 2025-03-31T16:25:19.741630Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=13894; 2025-03-31T16:25:19.741843Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=128; 2025-03-31T16:25:19.741990Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=78; 2025-03-31T16:25:19.742146Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=98; 2025-03-31T16:25:19.742273Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=78; 2025-03-31T16:25:19.759124Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=16746; 2025-03-31T16:25:19.779998Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=20744; 2025-03-31T16:25:19.780160Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=47; 2025-03-31T16:25:19.780276Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=53; 2025-03-31T16:25:19.780348Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=13; 2025-03-31T16:25:19.780404Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=13; 2025-03-31T16:25:19.780455Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=8; 2025-03-31T16:25:19.780540Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=45; 2025-03-31T16:25:19.780596Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=9; 2025-03-31T16:25:19.780697Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=60; 2025-03-31T16:25:19.780757Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=11; 2025-03-31T16:25:19.780838Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=37; 2025-03-31T16:25:19.780943Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=56; 2025-03-31T16:25:19.781364Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=370; 2025-03-31T16:25:19.781414Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=140636; 2025-03-31T16:25:19.781599Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=46800024;raw_bytes=72380025;count=29;records=720000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-31T16:25:19.781718Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-31T16:25:19.781777Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-31T16:25:19.781846Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-31T16:25:19.803185Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-03-31T16:25:19.803389Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:25:19.803458Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:25:19.803534Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-03-31T16:25:19.803602Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-31T16:25:19.803648Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:25:19.803700Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:25:19.803740Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:25:19.803841Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:25:19.804388Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:25:19.804482Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:2625:4499];tablet_id=9437184;parent=[1:2585:4466];fline=manager.cpp:82;event=ask_data;request=request_id=151;1={portions_count=29};; 2025-03-31T16:25:19.805059Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-31T16:25:19.805308Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-31T16:25:19.805341Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-31T16:25:19.805388Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-31T16:25:19.805436Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:25:19.805522Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:25:19.805584Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-03-31T16:25:19.805655Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-31T16:25:19.805698Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:25:19.805747Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:25:19.805793Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:25:19.805892Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:25:19.806520Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=29;path_id=1; 2025-03-31T16:25:19.808079Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 160000/10402096 160000/10402096 160000/10402096 80000/5203544 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ColdTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143438863.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143438863.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143438863.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123438863.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143438863.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143438863.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123437663.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123438863.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123438863.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123437663.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123437663.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123437663.000000s;Name=;Codec=}; 2025-03-31T16:24:23.322809Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:24:23.410673Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:24:23.434577Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:24:23.434870Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:24:23.442780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:24:23.442988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:24:23.443239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:24:23.443377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:24:23.443502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:24:23.443612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:24:23.443714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:24:23.443821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:24:23.443950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:24:23.444079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:24:23.444190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:24:23.444291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:24:23.468935Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:24:23.469100Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:24:23.469141Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:24:23.469343Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:24:23.469494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:24:23.469548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:24:23.469589Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:24:23.469660Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:24:23.469705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:24:23.469733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:24:23.469751Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:24:23.469887Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:24:23.469944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:24:23.469971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:24:23.469996Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:24:23.470066Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:24:23.470105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:24:23.470136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:24:23.470153Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:24:23.470194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:24:23.470216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:24:23.470234Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:24:23.470263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:24:23.470289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:24:23.470318Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:24:23.470682Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=67; 2025-03-31T16:24:23.470764Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=33; 2025-03-31T16:24:23.470817Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=24; 2025-03-31T16:24:23.470876Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=29; 2025-03-31T16:24:23.471035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:24:23.471120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:24:23.471152Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:24:23.471299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:24:23.471329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:24:23.471348Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:24:23.471463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:24:23.471493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:4 ... =9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=4; 2025-03-31T16:25:20.687934Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700007;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-31T16:25:20.687985Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:25:20.688061Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:25:20.688130Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:25:20.688272Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:25:20.688537Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000007:max} readable: {1000000007:max} at tablet 9437184 2025-03-31T16:25:20.688683Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-03-31T16:25:20.688825Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000007:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-31T16:25:20.688875Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000007:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-31T16:25:20.689223Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000007:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-03-31T16:25:20.689292Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000007:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-03-31T16:25:20.689766Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000007:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:1392:3397];trace_detailed=; 2025-03-31T16:25:20.690262Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-03-31T16:25:20.690537Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-03-31T16:25:20.690768Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:25:20.690955Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:25:20.691457Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:25:20.691601Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:25:20.691771Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:25:20.691843Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1392:3397] finished for tablet 9437184 2025-03-31T16:25:20.692451Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:1391:3396];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":1743438320689694,"name":"_full_task","f":1743438320689694,"d_finished":0,"c":0,"l":1743438320691939,"d":2245},"events":[{"name":"bootstrap","f":1743438320689928,"d_finished":1078,"c":1,"l":1743438320691006,"d":1078},{"a":1743438320691429,"name":"ack","f":1743438320691429,"d_finished":0,"c":0,"l":1743438320691939,"d":510},{"a":1743438320691400,"name":"processing","f":1743438320691400,"d_finished":0,"c":0,"l":1743438320691939,"d":539},{"name":"ProduceResults","f":1743438320690670,"d_finished":649,"c":2,"l":1743438320691817,"d":649},{"a":1743438320691823,"name":"Finish","f":1743438320691823,"d_finished":0,"c":0,"l":1743438320691939,"d":116}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:25:20.692543Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:1391:3396];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:25:20.693006Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:1391:3396];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":1743438320689694,"name":"_full_task","f":1743438320689694,"d_finished":0,"c":0,"l":1743438320692590,"d":2896},"events":[{"name":"bootstrap","f":1743438320689928,"d_finished":1078,"c":1,"l":1743438320691006,"d":1078},{"a":1743438320691429,"name":"ack","f":1743438320691429,"d_finished":0,"c":0,"l":1743438320692590,"d":1161},{"a":1743438320691400,"name":"processing","f":1743438320691400,"d_finished":0,"c":0,"l":1743438320692590,"d":1190},{"name":"ProduceResults","f":1743438320690670,"d_finished":649,"c":2,"l":1743438320691817,"d":649},{"a":1743438320691823,"name":"Finish","f":1743438320691823,"d_finished":0,"c":0,"l":1743438320692590,"d":767}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1392:3397]->[1:1391:3396] 2025-03-31T16:25:20.693143Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:25:20.689269Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-03-31T16:25:20.693215Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:25:20.693356Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 160000/10402136 160000/10402136 160000/10402136 80000/5203584 0/0 >> TTxLocatorTest::TestAllocateAllByPieces [GOOD] >> TTxLocatorTest::TestAllocateAll [GOOD] >> TTxLocatorTest::TestZeroRange [GOOD] >> TTxLocatorTest::TestImposibleSize [GOOD] >> TTxLocatorTest::TestSignificantRequestWhenRunReserveTx [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestImposibleSize [GOOD] Test command err: 2025-03-31T16:25:21.679270Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-03-31T16:25:21.680713Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-03-31T16:25:21.683295Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-03-31T16:25:21.698417Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.704340Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-03-31T16:25:21.726450Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.726656Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.726761Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.726851Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-03-31T16:25:21.726984Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.727116Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-03-31T16:25:21.727309Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-03-31T16:25:21.729000Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#281474976710656 2025-03-31T16:25:21.729919Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 0 Reserved to# 0 2025-03-31T16:25:21.733845Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE 2025-03-31T16:25:21.734456Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:74:2108] requested range size#123456 2025-03-31T16:25:21.736557Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.736661Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.736774Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 123456 2025-03-31T16:25:21.736818Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:74:2108] TEvAllocateResult from# 0 to# 123456 expected SUCCESS 2025-03-31T16:25:21.737287Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:78:2112] requested range size#281474976587200 2025-03-31T16:25:21.738054Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 123456 Reserved to# 0 2025-03-31T16:25:21.738102Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:78:2112] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE 2025-03-31T16:25:21.738559Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:81:2115] requested range size#246912 2025-03-31T16:25:21.739379Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.739459Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.739586Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 123456 Reserved to# 370368 2025-03-31T16:25:21.739627Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:81:2115] TEvAllocateResult from# 123456 to# 370368 expected SUCCESS 2025-03-31T16:25:21.740107Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:85:2119] requested range size#281474976340288 2025-03-31T16:25:21.740231Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 370368 Reserved to# 0 2025-03-31T16:25:21.740278Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:85:2119] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestZeroRange [GOOD] Test command err: 2025-03-31T16:25:21.679418Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-03-31T16:25:21.680688Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-03-31T16:25:21.683331Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-03-31T16:25:21.698413Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.704309Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-03-31T16:25:21.726239Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.726406Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.726493Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.726570Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-03-31T16:25:21.726679Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.726761Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-03-31T16:25:21.727128Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-03-31T16:25:21.728880Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#0 2025-03-31T16:25:21.736579Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.736648Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.736747Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 0 2025-03-31T16:25:21.736791Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 0 to# 0 expected SUCCESS ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestAllocateAllByPieces [GOOD] Test command err: 2025-03-31T16:25:21.679593Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-03-31T16:25:21.680718Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-03-31T16:25:21.683294Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-03-31T16:25:21.698514Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.704344Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-03-31T16:25:21.725670Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.725859Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.725983Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.726062Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-03-31T16:25:21.726182Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.726278Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-03-31T16:25:21.727138Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-03-31T16:25:21.728906Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#8796093022207 2025-03-31T16:25:21.736558Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.736625Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.736750Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 8796093022207 2025-03-31T16:25:21.736806Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 0 to# 8796093022207 expected SUCCESS 2025-03-31T16:25:21.740658Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:75:2109] requested range size#8796093022207 2025-03-31T16:25:21.741108Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.741209Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.741326Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8796093022207 Reserved to# 17592186044414 2025-03-31T16:25:21.741371Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:75:2109] TEvAllocateResult from# 8796093022207 to# 17592186044414 expected SUCCESS 2025-03-31T16:25:21.741774Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:79:2113] requested range size#8796093022207 2025-03-31T16:25:21.742134Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.742212Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.742336Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 17592186044414 Reserved to# 26388279066621 2025-03-31T16:25:21.742379Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:79:2113] TEvAllocateResult from# 17592186044414 to# 26388279066621 expected SUCCESS 2025-03-31T16:25:21.742816Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:83:2117] requested range size#8796093022207 2025-03-31T16:25:21.743163Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.743229Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.743341Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 26388279066621 Reserved to# 35184372088828 2025-03-31T16:25:21.743405Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:83:2117] TEvAllocateResult from# 26388279066621 to# 35184372088828 expected SUCCESS 2025-03-31T16:25:21.743795Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:87:2121] requested range size#8796093022207 2025-03-31T16:25:21.744158Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.744227Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.744297Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 35184372088828 Reserved to# 43980465111035 2025-03-31T16:25:21.744349Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:87:2121] TEvAllocateResult from# 35184372088828 to# 43980465111035 expected SUCCESS 2025-03-31T16:25:21.744754Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:91:2125] requested range size#8796093022207 2025-03-31T16:25:21.745098Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.745181Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.745256Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 43980465111035 Reserved to# 52776558133242 2025-03-31T16:25:21.745329Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:91:2125] TEvAllocateResult from# 43980465111035 to# 52776558133242 expected SUCCESS 2025-03-31T16:25:21.745753Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:95:2129] requested range size#8796093022207 2025-03-31T16:25:21.746052Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.746157Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.746251Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 52776558133242 Reserved to# 61572651155449 2025-03-31T16:25:21.746308Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:95:2129] TEvAllocateResult from# 52776558133242 to# 61572651155449 expected SUCCESS 2025-03-31T16:25:21.746797Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:99:2133] requested range size#8796093022207 2025-03-31T16:25:21.747168Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.747235Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.747333Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 61572651155449 Reserved to# 70368744177656 2025-03-31T16:25:21.747388Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:99:2133] TEvAllocateResult from# 61572651155449 to# 70368744177656 expected SUCCESS 2025-03-31T16:25:21.747949Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:103:2137] requested range size#8796093022207 2025-03-31T16:25:21.748377Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.748433Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.748542Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 70368744177656 Reserved to# 79164837199863 2025-03-31T16:25:21.748592Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:103:2137] TEvAllocateResult from# 70368744177656 to# 79164837199863 expected SUCCESS 2025-03-31T16:25:21.749033Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:107:2141] requested range size#8796093022207 2025-03-31T16:25:21.749385Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.749440Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.749554Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 79164837199863 Reserved to# 87960930222070 2025-03-31T16:25:21.749588Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:107:2141] TEvAllocateResult from# 79164837199863 to# 87960930222070 expected SUCCESS 2025-03-31T16:25:21.750105Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:111:2145] requested range size#8796093022207 2025-03-31T16:25:21.750431Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:13:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.750518Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:13:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.750609Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 87960930222070 Reserved to# 96757023244277 2025-03-31T16:25:21.750642Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:111:2145] TEvAllocateResult from# 87960930222070 to# 96757023244277 expected SUCCESS 2025-03-31T16:25:21.751160Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:115:2149] requested range size#8796093022207 2025-03-31T16:25:21.751462Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:14:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.751525Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:14:0:0:69:0] Status# OK StatusFla ... e 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:151:2185] requested range size#8796093022207 2025-03-31T16:25:21.761833Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:23:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.761900Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:23:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.761974Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 175921860444140 Reserved to# 184717953466347 2025-03-31T16:25:21.762004Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:151:2185] TEvAllocateResult from# 175921860444140 to# 184717953466347 expected SUCCESS 2025-03-31T16:25:21.762735Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:155:2189] requested range size#8796093022207 2025-03-31T16:25:21.763075Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:24:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.763157Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:24:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.763237Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 184717953466347 Reserved to# 193514046488554 2025-03-31T16:25:21.763272Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:155:2189] TEvAllocateResult from# 184717953466347 to# 193514046488554 expected SUCCESS 2025-03-31T16:25:21.763923Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:159:2193] requested range size#8796093022207 2025-03-31T16:25:21.764240Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:25:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.764326Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:25:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.764413Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 193514046488554 Reserved to# 202310139510761 2025-03-31T16:25:21.764459Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:159:2193] TEvAllocateResult from# 193514046488554 to# 202310139510761 expected SUCCESS 2025-03-31T16:25:21.765173Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:163:2197] requested range size#8796093022207 2025-03-31T16:25:21.765476Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:26:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.765564Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:26:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.765661Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 202310139510761 Reserved to# 211106232532968 2025-03-31T16:25:21.765698Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:163:2197] TEvAllocateResult from# 202310139510761 to# 211106232532968 expected SUCCESS 2025-03-31T16:25:21.766408Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:167:2201] requested range size#8796093022207 2025-03-31T16:25:21.766710Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:27:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.766785Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:27:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.766893Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 211106232532968 Reserved to# 219902325555175 2025-03-31T16:25:21.766936Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:167:2201] TEvAllocateResult from# 211106232532968 to# 219902325555175 expected SUCCESS 2025-03-31T16:25:21.767733Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:171:2205] requested range size#8796093022207 2025-03-31T16:25:21.768057Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:28:1:24576:75:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.768119Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:28:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.768198Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 219902325555175 Reserved to# 228698418577382 2025-03-31T16:25:21.768232Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:171:2205] TEvAllocateResult from# 219902325555175 to# 228698418577382 expected SUCCESS 2025-03-31T16:25:21.769008Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:175:2209] requested range size#8796093022207 2025-03-31T16:25:21.769349Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:29:1:24576:73:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.769417Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:29:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.769542Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 228698418577382 Reserved to# 237494511599589 2025-03-31T16:25:21.769583Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:175:2209] TEvAllocateResult from# 228698418577382 to# 237494511599589 expected SUCCESS 2025-03-31T16:25:21.770369Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:179:2213] requested range size#8796093022207 2025-03-31T16:25:21.770691Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:30:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.770769Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:30:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.770885Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 237494511599589 Reserved to# 246290604621796 2025-03-31T16:25:21.770942Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:179:2213] TEvAllocateResult from# 237494511599589 to# 246290604621796 expected SUCCESS 2025-03-31T16:25:21.771775Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:183:2217] requested range size#8796093022207 2025-03-31T16:25:21.772102Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:31:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.772182Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:31:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.772277Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 246290604621796 Reserved to# 255086697644003 2025-03-31T16:25:21.772314Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:183:2217] TEvAllocateResult from# 246290604621796 to# 255086697644003 expected SUCCESS 2025-03-31T16:25:21.773133Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:187:2221] requested range size#8796093022207 2025-03-31T16:25:21.773468Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:32:1:24576:75:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.773555Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:32:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.773658Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 255086697644003 Reserved to# 263882790666210 2025-03-31T16:25:21.773702Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:187:2221] TEvAllocateResult from# 255086697644003 to# 263882790666210 expected SUCCESS 2025-03-31T16:25:21.774596Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:191:2225] requested range size#8796093022207 2025-03-31T16:25:21.774975Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:33:1:24576:77:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.775044Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:33:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.775153Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 263882790666210 Reserved to# 272678883688417 2025-03-31T16:25:21.775205Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:191:2225] TEvAllocateResult from# 263882790666210 to# 272678883688417 expected SUCCESS 2025-03-31T16:25:21.776062Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:195:2229] requested range size#8796093022207 2025-03-31T16:25:21.776391Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:34:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.776467Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:34:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.776591Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 272678883688417 Reserved to# 281474976710624 2025-03-31T16:25:21.776631Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:195:2229] TEvAllocateResult from# 272678883688417 to# 281474976710624 expected SUCCESS 2025-03-31T16:25:21.777535Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:199:2233] requested range size#31 2025-03-31T16:25:21.777847Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:35:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.777933Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:35:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.778029Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 281474976710624 Reserved to# 281474976710655 2025-03-31T16:25:21.778067Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:199:2233] TEvAllocateResult from# 281474976710624 to# 281474976710655 expected SUCCESS 2025-03-31T16:25:21.778956Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:203:2237] requested range size#1 2025-03-31T16:25:21.779076Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 281474976710655 Reserved to# 0 2025-03-31T16:25:21.779139Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:203:2237] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestAllocateAll [GOOD] Test command err: 2025-03-31T16:25:21.681420Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-03-31T16:25:21.681828Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-03-31T16:25:21.683295Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-03-31T16:25:21.698414Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.704315Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-03-31T16:25:21.726069Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.726245Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.726345Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.726422Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-03-31T16:25:21.726538Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.726620Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-03-31T16:25:21.727121Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-03-31T16:25:21.728881Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#281474976710655 2025-03-31T16:25:21.736488Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.736550Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.736653Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 281474976710655 2025-03-31T16:25:21.736690Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 0 to# 281474976710655 expected SUCCESS 2025-03-31T16:25:21.740109Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:75:2109] requested range size#1 2025-03-31T16:25:21.740269Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 281474976710655 Reserved to# 0 2025-03-31T16:25:21.740308Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:75:2109] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestSignificantRequestWhenRunReserveTx [GOOD] Test command err: 2025-03-31T16:25:21.679839Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-03-31T16:25:21.680708Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-03-31T16:25:21.683296Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-03-31T16:25:21.698409Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.704318Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-03-31T16:25:21.726108Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.726278Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.726373Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.726460Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-03-31T16:25:21.726562Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.726642Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-03-31T16:25:21.727113Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-03-31T16:25:21.729729Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:80:2115] requested range size#100000 2025-03-31T16:25:21.736668Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:82:2117] requested range size#100000 2025-03-31T16:25:21.739340Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:84:2119] requested range size#100000 2025-03-31T16:25:21.739747Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:86:2121] requested range size#100000 2025-03-31T16:25:21.740166Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:88:2123] requested range size#100000 2025-03-31T16:25:21.740589Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#100000 2025-03-31T16:25:21.740792Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.740990Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.741039Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.741124Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#100000 2025-03-31T16:25:21.741406Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.741544Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:74:2109] requested range size#100000 2025-03-31T16:25:21.741701Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.741826Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.741987Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.742094Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.742190Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:76:2111] requested range size#100000 2025-03-31T16:25:21.742368Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:78:2113] requested range size#100000 2025-03-31T16:25:21.742543Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.742603Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.742678Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 100000 2025-03-31T16:25:21.742715Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:80:2115] TEvAllocateResult from# 0 to# 100000 2025-03-31T16:25:21.742865Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 100000 Reserved to# 200000 2025-03-31T16:25:21.742891Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:82:2117] TEvAllocateResult from# 100000 to# 200000 2025-03-31T16:25:21.743011Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.743141Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 200000 Reserved to# 300000 2025-03-31T16:25:21.743168Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:84:2119] TEvAllocateResult from# 200000 to# 300000 2025-03-31T16:25:21.743318Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.743378Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.743444Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.743507Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 300000 Reserved to# 400000 2025-03-31T16:25:21.743533Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:86:2121] TEvAllocateResult from# 300000 to# 400000 2025-03-31T16:25:21.743682Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.743724Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 400000 Reserved to# 500000 2025-03-31T16:25:21.743746Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:88:2123] TEvAllocateResult from# 400000 to# 500000 2025-03-31T16:25:21.743831Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.743876Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 500000 Reserved to# 600000 2025-03-31T16:25:21.743896Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 500000 to# 600000 2025-03-31T16:25:21.743993Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 600000 Reserved to# 700000 2025-03-31T16:25:21.744027Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 600000 to# 700000 2025-03-31T16:25:21.744145Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.744214Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.744278Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 700000 Reserved to# 800000 2025-03-31T16:25:21.744302Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:74:2109] TEvAllocateResult from# 700000 to# 800000 2025-03-31T16:25:21.744432Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.744474Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 800000 Reserved to# 900000 2025-03-31T16:25:21.744498Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:76:2111] TEvAllocateResult from# 800000 to# 900000 2025-03-31T16:25:21.744586Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.744644Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 900000 Reserved to# 1000000 2025-03-31T16:25:21.744670Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:78:2113] TEvAllocateResult from# 900000 to# 1000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2025-03-31T16:25:21.751699Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:117:2151] requested range size#100000 2025-03-31T16:25:21.751997Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:119:2153] requested range size#100000 2025-03-31T16:25:21.752478Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:121:2155] requested range size#100000 2025-03-31T16:25:21.752738Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:123:2157] requested range size#100000 2025-03-31T16:25:21.752890Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:13:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.753184Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:13:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.753289Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:125:2159] requested range size#100000 2025-03-31T16:25:21.753558Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:14:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.753661Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:14:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.753875Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:15:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.753959Z node 1 :TX_ALLOCATOR DEBUG: tablet# ... 000 2025-03-31T16:25:21.817364Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:90:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.817423Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8300000 Reserved to# 8400000 2025-03-31T16:25:21.817445Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:397:2431] TEvAllocateResult from# 8300000 to# 8400000 2025-03-31T16:25:21.817609Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:90:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.817716Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8400000 Reserved to# 8500000 2025-03-31T16:25:21.817744Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:399:2433] TEvAllocateResult from# 8400000 to# 8500000 2025-03-31T16:25:21.817838Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:91:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.817892Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8500000 Reserved to# 8600000 2025-03-31T16:25:21.817914Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:401:2435] TEvAllocateResult from# 8500000 to# 8600000 2025-03-31T16:25:21.817979Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:91:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.818037Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8600000 Reserved to# 8700000 2025-03-31T16:25:21.818066Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:403:2437] TEvAllocateResult from# 8600000 to# 8700000 2025-03-31T16:25:21.818239Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:92:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.818316Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8700000 Reserved to# 8800000 2025-03-31T16:25:21.818341Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:405:2439] TEvAllocateResult from# 8700000 to# 8800000 2025-03-31T16:25:21.818452Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:92:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.818507Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8800000 Reserved to# 8900000 2025-03-31T16:25:21.818542Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:407:2441] TEvAllocateResult from# 8800000 to# 8900000 2025-03-31T16:25:21.818600Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8900000 Reserved to# 9000000 2025-03-31T16:25:21.818628Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:409:2443] TEvAllocateResult from# 8900000 to# 9000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2025-03-31T16:25:21.822979Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:431:2465] requested range size#100000 2025-03-31T16:25:21.823446Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:433:2467] requested range size#100000 2025-03-31T16:25:21.823911Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:435:2469] requested range size#100000 2025-03-31T16:25:21.824060Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:93:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.824251Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:93:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.824433Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:437:2471] requested range size#100000 2025-03-31T16:25:21.824716Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:94:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.824887Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:439:2473] requested range size#100000 2025-03-31T16:25:21.825024Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:94:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.825272Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:441:2475] requested range size#100000 2025-03-31T16:25:21.825582Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:95:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.825777Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:443:2477] requested range size#100000 2025-03-31T16:25:21.825912Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:95:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.826097Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:96:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.826196Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:445:2479] requested range size#100000 2025-03-31T16:25:21.826345Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:96:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.826552Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:97:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.826716Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:447:2481] requested range size#100000 2025-03-31T16:25:21.826856Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:97:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.827048Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:98:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.827220Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9000000 Reserved to# 9100000 2025-03-31T16:25:21.827254Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:431:2465] TEvAllocateResult from# 9000000 to# 9100000 2025-03-31T16:25:21.827338Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:98:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.827481Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:449:2483] requested range size#100000 2025-03-31T16:25:21.827629Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:99:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.827772Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9100000 Reserved to# 9200000 2025-03-31T16:25:21.827801Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:433:2467] TEvAllocateResult from# 9100000 to# 9200000 2025-03-31T16:25:21.827853Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:99:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.828001Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9200000 Reserved to# 9300000 2025-03-31T16:25:21.828034Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:435:2469] TEvAllocateResult from# 9200000 to# 9300000 2025-03-31T16:25:21.828083Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:100:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.828178Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9300000 Reserved to# 9400000 2025-03-31T16:25:21.828202Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:437:2471] TEvAllocateResult from# 9300000 to# 9400000 2025-03-31T16:25:21.828248Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:100:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.828384Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9400000 Reserved to# 9500000 2025-03-31T16:25:21.828412Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:439:2473] TEvAllocateResult from# 9400000 to# 9500000 2025-03-31T16:25:21.828457Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:101:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.828630Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9500000 Reserved to# 9600000 2025-03-31T16:25:21.828665Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:441:2475] TEvAllocateResult from# 9500000 to# 9600000 2025-03-31T16:25:21.828717Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:101:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.828832Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9600000 Reserved to# 9700000 2025-03-31T16:25:21.828857Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:443:2477] TEvAllocateResult from# 9600000 to# 9700000 2025-03-31T16:25:21.828905Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:102:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.828970Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9700000 Reserved to# 9800000 2025-03-31T16:25:21.828993Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:445:2479] TEvAllocateResult from# 9700000 to# 9800000 2025-03-31T16:25:21.829125Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9800000 Reserved to# 9900000 2025-03-31T16:25:21.829159Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:447:2481] TEvAllocateResult from# 9800000 to# 9900000 2025-03-31T16:25:21.829227Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:102:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:21.829331Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9900000 Reserved to# 10000000 2025-03-31T16:25:21.829354Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:449:2483] TEvAllocateResult from# 9900000 to# 10000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS >> TTxLocatorTest::TestWithReboot >> DistributedEraseTests::ConditionalEraseRowsAsyncIndex [GOOD] >> TTxLocatorTest::TestWithReboot [GOOD] >> DataShardVolatile::DistributedWriteRSNotAckedBeforeCommit [GOOD] >> DataShardVolatile::DistributedUpsertRestartBeforePrepare+UseSink >> SystemView::ConcurrentScans [GOOD] >> SystemView::Describe ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestWithReboot [GOOD] Test command err: 2025-03-31T16:25:23.038469Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-03-31T16:25:23.038903Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-03-31T16:25:23.039618Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-03-31T16:25:23.041256Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:23.041681Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-03-31T16:25:23.049790Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:23.049952Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:23.050028Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:23.050088Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-03-31T16:25:23.050179Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:23.050239Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-03-31T16:25:23.050371Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-03-31T16:25:23.051763Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:80:2115] requested range size#100000 2025-03-31T16:25:23.052363Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:82:2117] requested range size#100000 2025-03-31T16:25:23.052643Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:84:2119] requested range size#100000 2025-03-31T16:25:23.052875Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:86:2121] requested range size#100000 2025-03-31T16:25:23.053214Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:88:2123] requested range size#100000 2025-03-31T16:25:23.053650Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#100000 2025-03-31T16:25:23.053842Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:23.053999Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:23.054044Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:23.054120Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#100000 2025-03-31T16:25:23.054361Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:23.054479Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:74:2109] requested range size#100000 2025-03-31T16:25:23.054627Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:23.054757Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:23.054913Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:23.054996Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:23.055108Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:76:2111] requested range size#100000 2025-03-31T16:25:23.055281Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:78:2113] requested range size#100000 2025-03-31T16:25:23.055451Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:23.055512Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:23.055590Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 100000 2025-03-31T16:25:23.055631Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:80:2115] TEvAllocateResult from# 0 to# 100000 2025-03-31T16:25:23.055788Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 100000 Reserved to# 200000 2025-03-31T16:25:23.055813Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:82:2117] TEvAllocateResult from# 100000 to# 200000 2025-03-31T16:25:23.055891Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:23.056000Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 200000 Reserved to# 300000 2025-03-31T16:25:23.056027Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:84:2119] TEvAllocateResult from# 200000 to# 300000 2025-03-31T16:25:23.056154Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:23.056228Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:23.056297Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:23.056357Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 300000 Reserved to# 400000 2025-03-31T16:25:23.056377Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:86:2121] TEvAllocateResult from# 300000 to# 400000 2025-03-31T16:25:23.056506Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:23.056533Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 400000 Reserved to# 500000 2025-03-31T16:25:23.056556Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:88:2123] TEvAllocateResult from# 400000 to# 500000 2025-03-31T16:25:23.056604Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:23.056632Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 500000 Reserved to# 600000 2025-03-31T16:25:23.056645Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 500000 to# 600000 2025-03-31T16:25:23.056705Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 600000 Reserved to# 700000 2025-03-31T16:25:23.056720Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 600000 to# 700000 2025-03-31T16:25:23.056817Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:23.056862Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:23.056916Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 700000 Reserved to# 800000 2025-03-31T16:25:23.056937Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:74:2109] TEvAllocateResult from# 700000 to# 800000 2025-03-31T16:25:23.057022Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:23.057059Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 800000 Reserved to# 900000 2025-03-31T16:25:23.057082Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:76:2111] TEvAllocateResult from# 800000 to# 900000 2025-03-31T16:25:23.057141Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:23.057225Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 900000 Reserved to# 1000000 2025-03-31T16:25:23.057249Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:78:2113] TEvAllocateResult from# 900000 to# 1000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2025-03-31T16:25:23.061304Z node 1 :TABLET_MAIN NOTICE: Tablet: 72057594046447617 Type: TxAllocator, EReason: ReasonPill, SuggestedGeneration: 0, KnownGeneration: 2 Marker# TSYS31 2025-03-31T16:25:23.062395Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleStateStorageInfoResolve, KnownGeneration: 2 Promote Marker# TSYS16 2025-03-31T16:25:23.063266Z node 1 :TABLET_MAIN DEBUG: TabletId# 72057594046447617 TTabletReqRebuildHistoryGraph::ProcessKeyEntry, LastBlobID: [72057594046447617:2:12:0:0:71:0] Snap: 2:1 for 72057594046447617 Marker# TRRH04 2025-03-31T16:25:23.063353Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:12:0:0:71:0], refs: [[72057594046447617:2:12:1:24576:76:0],] for 72057594046447617 2025-03-31T16:25:23.063579Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:1:0:0:42:0], refs: [[72057594046447617:2:1:1:28672:35:0],] for 72057594046447617 2025-03-31T16:25:23.063642Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:2:0:0:71:0], refs: [[72057594046447617:2:2:1:8192:71:0],] for 72057594046447617 2025-03-31T16:25:23.063685Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:3:0:0:69:0], refs: [[72057594046447617:2:3:1:24576:70:0],] for 72057594046447617 2025-03-31T16:25:23.063721Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:4:0:0:71:0], refs: [[72057594046447617:2:4:1:24576:76:0],] for 720575940 ... TOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9100000 Reserved to# 9200000 2025-03-31T16:25:23.391878Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:617:2548] TEvAllocateResult from# 9100000 to# 9200000 2025-03-31T16:25:23.392023Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:6:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:23.392135Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9200000 Reserved to# 9300000 2025-03-31T16:25:23.392168Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:619:2550] TEvAllocateResult from# 9200000 to# 9300000 2025-03-31T16:25:23.392268Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:7:1:24576:78:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:23.392352Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:7:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:23.392466Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9300000 Reserved to# 9400000 2025-03-31T16:25:23.392492Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:621:2552] TEvAllocateResult from# 9300000 to# 9400000 2025-03-31T16:25:23.392630Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:8:1:24576:75:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:23.392707Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9400000 Reserved to# 9500000 2025-03-31T16:25:23.392737Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:623:2554] TEvAllocateResult from# 9400000 to# 9500000 2025-03-31T16:25:23.392809Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:8:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:23.392886Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9500000 Reserved to# 9600000 2025-03-31T16:25:23.392927Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:625:2556] TEvAllocateResult from# 9500000 to# 9600000 2025-03-31T16:25:23.393042Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:9:1:24576:78:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:23.393104Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9600000 Reserved to# 9700000 2025-03-31T16:25:23.393132Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:627:2558] TEvAllocateResult from# 9600000 to# 9700000 2025-03-31T16:25:23.393250Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:9:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:23.393353Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9700000 Reserved to# 9800000 2025-03-31T16:25:23.393399Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:629:2560] TEvAllocateResult from# 9700000 to# 9800000 2025-03-31T16:25:23.393506Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:10:1:24576:78:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:23.393623Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:10:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:23.393666Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:11:1:24576:72:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:23.393761Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9800000 Reserved to# 9900000 2025-03-31T16:25:23.393792Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:631:2562] TEvAllocateResult from# 9800000 to# 9900000 2025-03-31T16:25:23.393885Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:11:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:23.393957Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9900000 Reserved to# 10000000 2025-03-31T16:25:23.393997Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:633:2564] TEvAllocateResult from# 9900000 to# 10000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2025-03-31T16:25:23.398445Z node 1 :TABLET_MAIN NOTICE: Tablet: 72057594046447617 Type: TxAllocator, EReason: ReasonPill, SuggestedGeneration: 0, KnownGeneration: 11 Marker# TSYS31 2025-03-31T16:25:23.399696Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleStateStorageInfoResolve, KnownGeneration: 11 Promote Marker# TSYS16 2025-03-31T16:25:23.400241Z node 1 :TABLET_MAIN DEBUG: TabletId# 72057594046447617 TTabletReqRebuildHistoryGraph::ProcessKeyEntry, LastBlobID: [72057594046447617:11:11:0:0:71:0] Snap: 11:1 for 72057594046447617 Marker# TRRH04 2025-03-31T16:25:23.400290Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:11:0:0:71:0], refs: [[72057594046447617:11:11:1:24576:72:0],] for 72057594046447617 2025-03-31T16:25:23.400402Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:1:0:0:42:0], refs: [[72057594046447617:11:1:1:28672:1483:0],] for 72057594046447617 2025-03-31T16:25:23.400433Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:2:0:0:69:0], refs: [[72057594046447617:11:2:1:24576:76:0],] for 72057594046447617 2025-03-31T16:25:23.400464Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:3:0:0:71:0], refs: [[72057594046447617:11:3:1:24576:78:0],] for 72057594046447617 2025-03-31T16:25:23.400495Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:4:0:0:71:0], refs: [[72057594046447617:11:4:1:24576:75:0],] for 72057594046447617 2025-03-31T16:25:23.400562Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:5:0:0:71:0], refs: [[72057594046447617:11:5:1:24576:78:0],] for 72057594046447617 2025-03-31T16:25:23.400609Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:6:0:0:71:0], refs: [[72057594046447617:11:6:1:24576:78:0],] for 72057594046447617 2025-03-31T16:25:23.400636Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:7:0:0:71:0], refs: [[72057594046447617:11:7:1:24576:78:0],] for 72057594046447617 2025-03-31T16:25:23.400661Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:8:0:0:71:0], refs: [[72057594046447617:11:8:1:24576:75:0],] for 72057594046447617 2025-03-31T16:25:23.400695Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:9:0:0:71:0], refs: [[72057594046447617:11:9:1:24576:78:0],] for 72057594046447617 2025-03-31T16:25:23.400731Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:10:0:0:71:0], refs: [[72057594046447617:11:10:1:24576:78:0],] for 72057594046447617 2025-03-31T16:25:23.400819Z node 1 :TABLET_MAIN DEBUG: TabletId# 72057594046447617 TTabletReqRebuildHistoryGraph::BuildHistory - Process generation 11 from 1 with 11 steps Marker# TRRH09 2025-03-31T16:25:23.400844Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:1:1:28672:1483:0],] for 72057594046447617 2025-03-31T16:25:23.400865Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:2:1:24576:76:0],] for 72057594046447617 2025-03-31T16:25:23.400884Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:3:1:24576:78:0],] for 72057594046447617 2025-03-31T16:25:23.400898Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:4:1:24576:75:0],] for 72057594046447617 2025-03-31T16:25:23.400926Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:5:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:5:1:24576:78:0],] 2025-03-31T16:25:23.400952Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:6:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:6:1:24576:78:0],] 2025-03-31T16:25:23.400982Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:7:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:7:1:24576:78:0],] 2025-03-31T16:25:23.401011Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:8:1:24576:75:0],] for 72057594046447617, Gc+: [[72057594046447617:11:8:1:24576:75:0],] 2025-03-31T16:25:23.401098Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:9:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:9:1:24576:78:0],] 2025-03-31T16:25:23.401132Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:10:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:10:1:24576:78:0],] 2025-03-31T16:25:23.401167Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:11:1:24576:72:0],] for 72057594046447617, Gc+: [[72057594046447617:11:11:1:24576:72:0],] 2025-03-31T16:25:23.401365Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:12:0:0:0:0:0] Marker# TSYS01 2025-03-31T16:25:23.402276Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:12:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:23.404647Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-03-31T16:25:23.404813Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-03-31T16:25:23.405298Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 12, Type: TxAllocator started in 0msec Marker# TSYS24 2025-03-31T16:25:23.405405Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:12:1:1:28672:1639:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:23.405449Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:12:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:23.405493Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 12:0 Marker# TSYS28 >> DistributedEraseTests::ConditionalEraseRowsShouldNotFailOnMissingRows [GOOD] >> TTxLocatorTest::Boot >> SystemView::StoragePoolsFields [GOOD] >> SystemView::StoragePoolsRanges ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsAsyncIndex [GOOD] Test command err: 2025-03-31T16:25:03.250696Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:25:03.250954Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:25:03.251172Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012b7/r3tmp/tmpbZtUEV/pdisk_1.dat 2025-03-31T16:25:03.603239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:25:03.640687Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:03.679708Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:03.679855Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:03.691639Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:25:03.776350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:25:03.843077Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:687:2586] 2025-03-31T16:25:03.843437Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:25:03.914516Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:25:03.914826Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:25:03.917339Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:25:03.917440Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:25:03.917547Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:25:03.918019Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:25:03.918490Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:25:03.918587Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:718:2586] in generation 1 2025-03-31T16:25:03.920626Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:691:2588] 2025-03-31T16:25:03.920917Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:25:03.934334Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:25:03.934552Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:25:03.936393Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-03-31T16:25:03.936478Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-03-31T16:25:03.936561Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-03-31T16:25:03.936974Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:25:03.937337Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:25:03.937462Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:731:2588] in generation 1 2025-03-31T16:25:03.939253Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:695:2590] 2025-03-31T16:25:03.939595Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:25:03.952418Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:25:03.952578Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:25:03.954245Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-03-31T16:25:03.954341Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-03-31T16:25:03.954408Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-03-31T16:25:03.954842Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:25:03.955006Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:25:03.955118Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037890 persisting started state actor id [1:739:2590] in generation 1 2025-03-31T16:25:03.966603Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:25:04.019275Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-03-31T16:25:04.019543Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:25:04.019715Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:743:2617] 2025-03-31T16:25:04.019765Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-03-31T16:25:04.019807Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-03-31T16:25:04.019849Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:25:04.020362Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:25:04.020419Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-03-31T16:25:04.020532Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:25:04.020630Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:744:2618] 2025-03-31T16:25:04.020662Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-03-31T16:25:04.020689Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-03-31T16:25:04.020720Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-31T16:25:04.021115Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:25:04.021157Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:25:04.021247Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:25:04.021330Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:745:2619] 2025-03-31T16:25:04.021362Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:25:04.021388Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:25:04.021424Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:04.021558Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-03-31T16:25:04.021690Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-03-31T16:25:04.021993Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T16:25:04.022051Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:04.022099Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-03-31T16:25:04.022168Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-31T16:25:04.022484Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:679:2582], serverId# [1:713:2602], sessionId# [0:0:0] 2025-03-31T16:25:04.022592Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037890 2025-03-31T16:25:04.022680Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2025-03-31T16:25:04.022804Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:25:04.022874Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:25:04.023019Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-31T16:25:04.023347Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-03-31T16:25:04.023481Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-03-31T16:25:04.024047Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-03-31T16:25:04.024113Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:04.024151Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037890 TxInFly 0 2025-03-31T16:25:04.024197Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-31T16:25:04.024855Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:04.024900Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:04.024931Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:25:04.024974Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:04.025095Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:678:2581], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T16:25:04.025569Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:25:04.025750Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:25:04.025840Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:25:04.028461Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:04.028596Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-31T16:25:04.039706Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:25:04.039834Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:25:04.040560Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-03-31T16:25:04.040653Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-03-31T16:25:04.084354Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:680:2583], serverId# [1:762:2629], sessionId# [0:0:0] 2025-03-31T16:25:04.084549Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-03-31T16:25:04.084768Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037890 txId 2 ... 80, LocalPathId: 11] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037891 2025-03-31T16:25:22.646023Z node 3 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 5 Group: 0 Step: 2500 TxId: 281474976715666 PathId: [OwnerId: 72057594046644480, LocalPathId: 14] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 11] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037891 2025-03-31T16:25:22.646146Z node 3 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 6 Group: 0 Step: 2500 TxId: 281474976715666 PathId: [OwnerId: 72057594046644480, LocalPathId: 14] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 11] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037891 2025-03-31T16:25:22.646440Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:22.646632Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037893 2025-03-31T16:25:22.646674Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:25:22.646715Z node 3 :TX_DATASHARD DEBUG: Found ready operation [2500:281474976715666] in PlanQueue unit at 72075186224037893 2025-03-31T16:25:22.646924Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037893 loaded tx from db 2500:281474976715666 keys extracted: 0 2025-03-31T16:25:22.647078Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:25:22.658184Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037893 step# 2500} 2025-03-31T16:25:22.658297Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037893 2025-03-31T16:25:22.669481Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037891 step# 2500} 2025-03-31T16:25:22.669600Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-03-31T16:25:22.669657Z node 3 :TX_DATASHARD DEBUG: Send RS 2 at 72075186224037891 from 72075186224037891 to 72075186224037893 txId 281474976715666 2025-03-31T16:25:22.669716Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-03-31T16:25:22.669782Z node 3 :TX_DATASHARD DEBUG: Complete [2500 : 281474976715666] from 72075186224037891 at tablet 72075186224037891 send result to client [3:1442:3073], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:25:22.669916Z node 3 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037891, records: { Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 14] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 11] SchemaVersion: 1 }, { Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 14] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 11] SchemaVersion: 1 }, { Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 14] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 11] SchemaVersion: 1 } 2025-03-31T16:25:22.669990Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-03-31T16:25:22.670279Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1442:3073] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715666, shard# 72075186224037891, status# 2 2025-03-31T16:25:22.670849Z node 3 :TX_DATASHARD INFO: TTxRequestChangeRecords Execute: at tablet# 72075186224037891 2025-03-31T16:25:22.671187Z node 3 :TX_DATASHARD DEBUG: Send 3 change records: to# [3:1241:2954], at tablet# 72075186224037891 2025-03-31T16:25:22.671267Z node 3 :TX_DATASHARD INFO: TTxRequestChangeRecords Complete: sent# 3, forgotten# 0, left# 0, at tablet# 72075186224037891 2025-03-31T16:25:22.671396Z node 3 :TX_DATASHARD DEBUG: Receive RS at 72075186224037893 source 72075186224037891 dest 72075186224037893 producer 72075186224037891 txId 281474976715666 2025-03-31T16:25:22.671495Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037893 got read set: {TEvReadSet step# 2500 txid# 281474976715666 TabletSource# 72075186224037891 TabletDest# 72075186224037893 SetTabletProducer# 72075186224037891 ReadSet.Size()# 19 Seqno# 2 Flags# 0} 2025-03-31T16:25:22.671577Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037893 2025-03-31T16:25:22.671897Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037893 2025-03-31T16:25:22.671941Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:25:22.671996Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [2500:281474976715666] at 72075186224037893 for LoadAndWaitInRS 2025-03-31T16:25:22.672450Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:22.672890Z node 3 :TX_DATASHARD DEBUG: Handle TEvChangeExchange::TEvApplyRecords: origin# 72075186224037891, generation# 1, at tablet# 72075186224037892 2025-03-31T16:25:22.684694Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037893 2025-03-31T16:25:22.684815Z node 3 :TX_DATASHARD DEBUG: Complete [2500 : 281474976715666] from 72075186224037893 at tablet 72075186224037893 send result to client [3:1442:3073], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:25:22.684924Z node 3 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037893 {TEvReadSet step# 2500 txid# 281474976715666 TabletSource# 72075186224037891 TabletDest# 72075186224037893 SetTabletConsumer# 72075186224037893 Flags# 0 Seqno# 2} 2025-03-31T16:25:22.685002Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2025-03-31T16:25:22.685173Z node 3 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 3, at tablet# 72075186224037891 2025-03-31T16:25:22.685214Z node 3 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 4, at tablet: 72075186224037891 2025-03-31T16:25:22.685370Z node 3 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 5, at tablet: 72075186224037891 2025-03-31T16:25:22.685414Z node 3 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 6, at tablet: 72075186224037891 2025-03-31T16:25:22.685598Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1442:3073] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715666, shard# 72075186224037893, status# 2 2025-03-31T16:25:22.685661Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1442:3073] Reply: txId# 281474976715666, status# OK, error# 2025-03-31T16:25:22.685865Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037891 source 72075186224037891 dest 72075186224037893 consumer 72075186224037893 txId 281474976715666 2025-03-31T16:25:22.686292Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037891 2025-03-31T16:25:22.686352Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 4, at: 72075186224037891 2025-03-31T16:25:22.686487Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037891, clientId# [3:1436:3068], serverId# [3:1437:3069], sessionId# [0:0:0] 2025-03-31T16:25:22.686691Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-03-31T16:25:22.686745Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:22.686788Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037891 TxInFly 0 2025-03-31T16:25:22.688253Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037893 2025-03-31T16:25:22.688785Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037893 2025-03-31T16:25:22.689046Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037893 2025-03-31T16:25:22.689113Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:25:22.689174Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715667] at 72075186224037893 for WaitForStreamClearance 2025-03-31T16:25:22.689484Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:25:22.689593Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037893 2025-03-31T16:25:22.690516Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037893, TxId: 281474976715667, MessageQuota: 1 2025-03-31T16:25:22.690673Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037893, TxId: 281474976715667, MessageQuota: 1 2025-03-31T16:25:22.737227Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037893 2025-03-31T16:25:22.737321Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715667, at: 72075186224037893 2025-03-31T16:25:22.737593Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037893 2025-03-31T16:25:22.737647Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:25:22.737702Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715667] at 72075186224037893 for ReadTableScan 2025-03-31T16:25:22.737866Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:22.737949Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037893 2025-03-31T16:25:22.738017Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2025-03-31T16:25:22.739519Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037892 2025-03-31T16:25:22.739871Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037892 2025-03-31T16:25:22.740074Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-03-31T16:25:22.740118Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:25:22.740161Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715668] at 72075186224037892 for WaitForStreamClearance 2025-03-31T16:25:22.740378Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:25:22.740439Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-03-31T16:25:22.741080Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037892, TxId: 281474976715668, MessageQuota: 1 2025-03-31T16:25:22.741217Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037892, TxId: 281474976715668, MessageQuota: 1 2025-03-31T16:25:22.742950Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037892 2025-03-31T16:25:22.743000Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715668, at: 72075186224037892 2025-03-31T16:25:22.743276Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-03-31T16:25:22.743325Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:25:22.743371Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715668] at 72075186224037892 for ReadTableScan 2025-03-31T16:25:22.743505Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:22.743572Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-03-31T16:25:22.743621Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 >> TTxLocatorTest::Boot [GOOD] >> AsyncIndexChangeCollector::InsertSingleRow [GOOD] >> AsyncIndexChangeCollector::InsertManyRows >> AsyncIndexChangeCollector::UpsertSingleRow [GOOD] >> AsyncIndexChangeCollector::UpsertManyRows >> AsyncIndexChangeCollector::DeleteNothing [GOOD] >> AsyncIndexChangeCollector::DeleteSingleRow >> AsyncIndexChangeCollector::UpsertToSameKey [GOOD] >> AsyncIndexChangeCollector::UpsertWithoutIndexedValue >> CdcStreamChangeCollector::UpsertToSameKey [GOOD] >> CdcStreamChangeCollector::UpsertToSameKeyWithImages >> CdcStreamChangeCollector::UpsertManyRows [GOOD] >> CdcStreamChangeCollector::UpsertIntoTwoStreams ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::Boot [GOOD] Test command err: 2025-03-31T16:25:24.300878Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-03-31T16:25:24.301243Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-03-31T16:25:24.301796Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-03-31T16:25:24.303134Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:24.303559Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-03-31T16:25:24.310771Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:24.310903Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:24.310971Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:24.311029Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-03-31T16:25:24.311129Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:25:24.311188Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-03-31T16:25:24.311314Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldNotFailOnMissingRows [GOOD] Test command err: 2025-03-31T16:25:07.646875Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:25:07.647070Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:25:07.647211Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012ac/r3tmp/tmp2GmgCZ/pdisk_1.dat 2025-03-31T16:25:07.943115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:25:07.985653Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:08.025081Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:08.025224Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:08.036667Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:25:08.116941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:25:08.157751Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:687:2586] 2025-03-31T16:25:08.157968Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:25:08.191031Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:25:08.191219Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:25:08.192601Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:25:08.192662Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:25:08.192716Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:25:08.192992Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:25:08.193341Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:25:08.193402Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:718:2586] in generation 1 2025-03-31T16:25:08.194592Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:691:2588] 2025-03-31T16:25:08.194742Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:25:08.201576Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:25:08.201712Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:25:08.202744Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-03-31T16:25:08.202792Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-03-31T16:25:08.202835Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-03-31T16:25:08.203040Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:25:08.203288Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:25:08.203340Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:731:2588] in generation 1 2025-03-31T16:25:08.204448Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:695:2590] 2025-03-31T16:25:08.204571Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:25:08.211026Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:25:08.211138Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:25:08.212055Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-03-31T16:25:08.212102Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-03-31T16:25:08.212138Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-03-31T16:25:08.212375Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:25:08.212467Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:25:08.212527Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037890 persisting started state actor id [1:739:2590] in generation 1 2025-03-31T16:25:08.223523Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:25:08.257037Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-03-31T16:25:08.257200Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:25:08.257307Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:743:2617] 2025-03-31T16:25:08.257331Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-03-31T16:25:08.257354Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-03-31T16:25:08.257380Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:25:08.257690Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:25:08.257715Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-03-31T16:25:08.257752Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:25:08.257783Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:744:2618] 2025-03-31T16:25:08.257797Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-03-31T16:25:08.257809Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-03-31T16:25:08.257824Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-31T16:25:08.258022Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:25:08.258042Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:25:08.258072Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:25:08.258121Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:745:2619] 2025-03-31T16:25:08.258136Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:25:08.258148Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:25:08.258171Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:08.258209Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-03-31T16:25:08.258275Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-03-31T16:25:08.258441Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T16:25:08.258472Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:08.258502Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-03-31T16:25:08.258533Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-31T16:25:08.258697Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:679:2582], serverId# [1:713:2602], sessionId# [0:0:0] 2025-03-31T16:25:08.258733Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037890 2025-03-31T16:25:08.258780Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2025-03-31T16:25:08.258831Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:25:08.258869Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:25:08.258952Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-31T16:25:08.259161Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-03-31T16:25:08.259235Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-03-31T16:25:08.259660Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-03-31T16:25:08.259704Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:08.259727Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037890 TxInFly 0 2025-03-31T16:25:08.259749Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-31T16:25:08.260069Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:08.260098Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:08.260113Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:25:08.260133Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:08.260189Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:678:2581], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T16:25:08.260404Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:25:08.260515Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:25:08.260554Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:25:08.261892Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:08.261956Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-31T16:25:08.272754Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:25:08.272863Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:25:08.273437Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-03-31T16:25:08.273515Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-03-31T16:25:08.316501Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:680:2583], serverId# [1:762:2629], sessionId# [0:0:0] 2025-03-31T16:25:08.316634Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-03-31T16:25:08.316825Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037890 txId 2 ... [2000:281474976715662] at 72075186224037888 for LoadAndWaitInRS 2025-03-31T16:25:23.550550Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:23.550706Z node 3 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037889 dest 72075186224037890 producer 72075186224037889 txId 281474976715662 2025-03-31T16:25:23.550760Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 2000 txid# 281474976715662 TabletSource# 72075186224037889 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037889 ReadSet.Size()# 19 Seqno# 6 Flags# 0} 2025-03-31T16:25:23.550811Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-03-31T16:25:23.551005Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-03-31T16:25:23.551034Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:25:23.551125Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [2000:281474976715662] at 72075186224037890 for LoadAndWaitInRS 2025-03-31T16:25:23.551392Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:23.562567Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:23.562666Z node 3 :TX_DATASHARD DEBUG: Complete [2000 : 281474976715662] from 72075186224037888 at tablet 72075186224037888 send result to client [3:1107:2845], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:25:23.562751Z node 3 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037888 {TEvReadSet step# 2000 txid# 281474976715662 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 5} 2025-03-31T16:25:23.562801Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:23.562921Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715662 2025-03-31T16:25:23.562982Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1107:2845] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715662, shard# 72075186224037888, status# 2 2025-03-31T16:25:23.563107Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-31T16:25:23.563171Z node 3 :TX_DATASHARD DEBUG: Complete [2000 : 281474976715662] from 72075186224037890 at tablet 72075186224037890 send result to client [3:1107:2845], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:25:23.563234Z node 3 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037890 {TEvReadSet step# 2000 txid# 281474976715662 TabletSource# 72075186224037889 TabletDest# 72075186224037890 SetTabletConsumer# 72075186224037890 Flags# 0 Seqno# 6} 2025-03-31T16:25:23.563261Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-31T16:25:23.563347Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1107:2845] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715662, shard# 72075186224037890, status# 2 2025-03-31T16:25:23.563385Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1107:2845] Reply: txId# 281474976715662, status# OK, error# 2025-03-31T16:25:23.563511Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037890 consumer 72075186224037890 txId 281474976715662 2025-03-31T16:25:23.563696Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037889 2025-03-31T16:25:23.563742Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 4, at: 72075186224037889 2025-03-31T16:25:23.564089Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T16:25:23.564130Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:23.564170Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-03-31T16:25:23.564237Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-31T16:25:23.564369Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [3:1102:2841], serverId# [3:1103:2842], sessionId# [0:0:0] 2025-03-31T16:25:23.565598Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-31T16:25:23.565986Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-03-31T16:25:23.566226Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T16:25:23.566277Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:25:23.566350Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715664] at 72075186224037889 for WaitForStreamClearance 2025-03-31T16:25:23.566633Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:25:23.566709Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-31T16:25:23.567500Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037889, TxId: 281474976715664, MessageQuota: 1 2025-03-31T16:25:23.567768Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037889, TxId: 281474976715664, Size: 70, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-31T16:25:23.567905Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037889, TxId: 281474976715664, PendingAcks: 0 2025-03-31T16:25:23.567985Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037889, TxId: 281474976715664, MessageQuota: 0 2025-03-31T16:25:23.569791Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037889 2025-03-31T16:25:23.569848Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715664, at: 72075186224037889 2025-03-31T16:25:23.570331Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T16:25:23.570371Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:25:23.570408Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715664] at 72075186224037889 for ReadTableScan 2025-03-31T16:25:23.570536Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:23.570593Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-31T16:25:23.570638Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:25:23.594416Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:25:23.594829Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:25:23.595061Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:23.595132Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:25:23.595182Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715665] at 72075186224037888 for WaitForStreamClearance 2025-03-31T16:25:23.595431Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:25:23.595505Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:23.596204Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715665, MessageQuota: 1 2025-03-31T16:25:23.596461Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715665, Size: 35, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-31T16:25:23.596605Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715665, PendingAcks: 0 2025-03-31T16:25:23.596659Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715665, MessageQuota: 0 2025-03-31T16:25:23.598390Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-03-31T16:25:23.598448Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715665, at: 72075186224037888 2025-03-31T16:25:23.598863Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:23.598902Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:25:23.598943Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715665] at 72075186224037888 for ReadTableScan 2025-03-31T16:25:23.599074Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:23.599155Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:23.599209Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:23.635960Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-03-31T16:25:23.636372Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037890 2025-03-31T16:25:23.636596Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-03-31T16:25:23.636644Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:25:23.636693Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715666] at 72075186224037890 for WaitForStreamClearance 2025-03-31T16:25:23.636935Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:25:23.637012Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-31T16:25:23.637734Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715666, MessageQuota: 1 2025-03-31T16:25:23.637980Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715666, Size: 35, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-31T16:25:23.638127Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715666, PendingAcks: 0 2025-03-31T16:25:23.638181Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037890, TxId: 281474976715666, MessageQuota: 0 2025-03-31T16:25:23.640073Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037890 2025-03-31T16:25:23.640127Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715666, at: 72075186224037890 2025-03-31T16:25:23.640342Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-03-31T16:25:23.640383Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:25:23.640421Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715666] at 72075186224037890 for ReadTableScan 2025-03-31T16:25:23.640559Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:23.640621Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-31T16:25:23.640663Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 >> SystemView::AuthGroups [GOOD] >> SystemView::AuthGroups_Access >> SystemView::AuthGroups_TableRange [GOOD] >> SystemView::AuthOwners |91.9%| [TA] $(B)/ydb/core/tx/tx_allocator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.9%| [TA] {RESULT} $(B)/ydb/core/tx/tx_allocator/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestSchema::RebootHotTiersWithStat [GOOD] |91.9%| [TA] $(B)/ydb/core/tx/datashard/ut_erase_rows/test-results/unittest/{meta.json ... results_accumulator.log} |91.9%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_erase_rows/test-results/unittest/{meta.json ... results_accumulator.log} >> TargetDiscoverer::SystemObjects >> TargetDiscoverer::Basic >> TargetDiscoverer::Dirs >> TargetDiscoverer::Negative ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143438835.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143438835.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143438835.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123438835.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143438835.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143438835.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123437635.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123438835.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123438835.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123437635.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123437635.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123437635.000000s;Name=;Codec=}; 2025-03-31T16:23:56.044120Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:23:56.137760Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:23:56.161043Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:23:56.161386Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:23:56.169619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:23:56.169850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:23:56.170089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:23:56.170233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:23:56.170361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:23:56.170478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:23:56.170582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:23:56.170687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:23:56.170815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:23:56.170951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:23:56.171057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:23:56.171187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:23:56.197767Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:23:56.197929Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:23:56.197984Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:23:56.198146Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:56.198336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:23:56.198462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:23:56.198505Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:23:56.198589Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:23:56.198647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:23:56.198691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:23:56.198719Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:23:56.198886Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:56.198956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:23:56.199003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:23:56.199031Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:23:56.199138Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:23:56.199200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:23:56.199242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:23:56.199269Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:23:56.199334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:23:56.199369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:23:56.199397Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:23:56.199447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:23:56.199485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:23:56.199521Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:23:56.199877Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=44; 2025-03-31T16:23:56.199965Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=41; 2025-03-31T16:23:56.200043Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=33; 2025-03-31T16:23:56.200116Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=34; 2025-03-31T16:23:56.200303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:23:56.200363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:23:56.200399Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:23:56.200604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:23:56.200648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:23:56.200675Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=94 ... :29;EXECUTE:finishLoadingTime=591; 2025-03-31T16:25:24.680667Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=63288; 2025-03-31T16:25:24.693997Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=13200; 2025-03-31T16:25:24.708313Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=13076; 2025-03-31T16:25:24.708443Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=14327; 2025-03-31T16:25:24.708632Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=109; 2025-03-31T16:25:24.708802Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=109; 2025-03-31T16:25:24.708979Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=115; 2025-03-31T16:25:24.709136Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=95; 2025-03-31T16:25:24.724777Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=15541; 2025-03-31T16:25:24.744668Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=19739; 2025-03-31T16:25:24.744843Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=51; 2025-03-31T16:25:24.744940Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=37; 2025-03-31T16:25:24.744997Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=10; 2025-03-31T16:25:24.745052Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=10; 2025-03-31T16:25:24.745104Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=8; 2025-03-31T16:25:24.745196Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=48; 2025-03-31T16:25:24.745249Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2025-03-31T16:25:24.745352Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=59; 2025-03-31T16:25:24.745403Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2025-03-31T16:25:24.745477Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=35; 2025-03-31T16:25:24.745599Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=74; 2025-03-31T16:25:24.746044Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=394; 2025-03-31T16:25:24.746099Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=136285; 2025-03-31T16:25:24.746276Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=46800024;raw_bytes=72380025;count=29;records=720000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-31T16:25:24.746396Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-31T16:25:24.746460Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-31T16:25:24.746535Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-31T16:25:24.768663Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-03-31T16:25:24.768848Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:25:24.768918Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:25:24.768999Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-03-31T16:25:24.769073Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-31T16:25:24.769124Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:25:24.769179Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:25:24.769224Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:25:24.769330Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:25:24.769906Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:25:24.770001Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:2654:4528];tablet_id=9437184;parent=[1:2612:4493];fline=manager.cpp:82;event=ask_data;request=request_id=155;1={portions_count=29};; 2025-03-31T16:25:24.771052Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-31T16:25:24.771456Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-31T16:25:24.771495Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-31T16:25:24.771526Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-31T16:25:24.771577Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:25:24.771651Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:25:24.771721Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-03-31T16:25:24.771796Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-31T16:25:24.771850Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:25:24.771906Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:25:24.771949Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:25:24.772054Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:25:24.773124Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=29;path_id=1; 2025-03-31T16:25:24.774949Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 160000/10402096 80000/5203544 0/0 >> TargetDiscoverer::IndexedTable >> TargetDiscoverer::InvalidCredentials >> TColumnShardTestSchema::RebootHotTiersTtl [GOOD] >> KqpQueryPerf::MultiRead-QueryService >> KqpQueryPerf::IndexDeleteOn+QueryService-UseSink >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup-QueryService >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup-QueryService >> KqpWorkload::KV >> KqpQueryPerf::IndexReplace+QueryService-UseSink >> SystemView::Describe [GOOD] >> SystemView::DescribeSystemFolder ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143438872.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143438872.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143438872.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143438872.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143438872.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123438872.000000s;Name=;Codec=}; WaitEmptyAfter=1;Tiers={{Column=timestamp;EvictAfter=143438872.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143438872.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123437672.000000s;Name=;Codec=}; 2025-03-31T16:24:32.764995Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:24:32.850487Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:24:32.873974Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:24:32.874278Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:24:32.881770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:24:32.881967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:24:32.882192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:24:32.882304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:24:32.882413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:24:32.882530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:24:32.882627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:24:32.882722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:24:32.882821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:24:32.882940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:24:32.883050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:24:32.883166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:24:32.912973Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:24:32.913141Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:24:32.913202Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:24:32.913392Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:24:32.913577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:24:32.913651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:24:32.913695Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:24:32.913791Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:24:32.913859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:24:32.913904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:24:32.913937Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:24:32.914122Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:24:32.914200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:24:32.914250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:24:32.914288Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:24:32.914384Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:24:32.914439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:24:32.914488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:24:32.914519Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:24:32.914587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:24:32.914623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:24:32.914656Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:24:32.914702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:24:32.914741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:24:32.914775Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:24:32.915171Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=50; 2025-03-31T16:24:32.915263Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=34; 2025-03-31T16:24:32.915333Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=32; 2025-03-31T16:24:32.915412Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=39; 2025-03-31T16:24:32.915573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:24:32.915631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:24:32.915666Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:24:32.915880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:24:32.915937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:24:32.915970Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:24:32.916135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchem ... e=common_data.cpp:29;EXECUTE:finishLoadingTime=520; 2025-03-31T16:25:27.130298Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=55484; 2025-03-31T16:25:27.142811Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=12402; 2025-03-31T16:25:27.154701Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=10624; 2025-03-31T16:25:27.154825Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=11886; 2025-03-31T16:25:27.154986Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=91; 2025-03-31T16:25:27.155138Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=99; 2025-03-31T16:25:27.155294Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=101; 2025-03-31T16:25:27.155406Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=67; 2025-03-31T16:25:27.167478Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=11971; 2025-03-31T16:25:27.183772Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=16124; 2025-03-31T16:25:27.183944Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=47; 2025-03-31T16:25:27.184056Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=30; 2025-03-31T16:25:27.184123Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=11; 2025-03-31T16:25:27.184185Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=13; 2025-03-31T16:25:27.184244Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=10; 2025-03-31T16:25:27.184353Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=55; 2025-03-31T16:25:27.184430Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=8; 2025-03-31T16:25:27.184547Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=67; 2025-03-31T16:25:27.184604Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=9; 2025-03-31T16:25:27.184685Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=37; 2025-03-31T16:25:27.184799Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=66; 2025-03-31T16:25:27.185168Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=318; 2025-03-31T16:25:27.185221Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=118861; 2025-03-31T16:25:27.185416Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=36397736;raw_bytes=56295575;count=22;records=560000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-31T16:25:27.185566Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:2549:4458];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-31T16:25:27.185635Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:2549:4458];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-31T16:25:27.185710Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2549:4458];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-31T16:25:27.208562Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2549:4458];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-03-31T16:25:27.208765Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:25:27.208834Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:25:27.208917Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-03-31T16:25:27.208986Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700008;tx_id=18446744073709551615;;current_snapshot_ts=1000000002; 2025-03-31T16:25:27.209035Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:25:27.209086Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:25:27.209127Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:25:27.209256Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:25:27.210148Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:25:27.211123Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:2599:4501];tablet_id=9437184;parent=[1:2549:4458];fline=manager.cpp:82;event=ask_data;request=request_id=120;1={portions_count=22};; 2025-03-31T16:25:27.211866Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2549:4458];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-31T16:25:27.218854Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2549:4458];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-31T16:25:27.218913Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-31T16:25:27.218943Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-31T16:25:27.218991Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2549:4458];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:25:27.219068Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2549:4458];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:25:27.219177Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2549:4458];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-03-31T16:25:27.219252Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2549:4458];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700008;tx_id=18446744073709551615;;current_snapshot_ts=1000000002; 2025-03-31T16:25:27.219301Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2549:4458];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:25:27.219361Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2549:4458];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:25:27.219407Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2549:4458];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:25:27.219517Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2549:4458];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:25:27.220313Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:2549:4458];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=22;path_id=1; 2025-03-31T16:25:27.221473Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:2549:4458];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 80000/5203352 0/0 >> AsyncIndexChangeCollector::UpsertManyRows [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableUpdateOneIndexedColumn >> AsyncIndexChangeCollector::InsertManyRows [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableInsertSingleRow >> AsyncIndexChangeCollector::DeleteSingleRow [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyDeleteSingleRow >> AsyncIndexChangeCollector::UpsertWithoutIndexedValue [GOOD] >> CdcStreamChangeCollector::DeleteNothing >> CdcStreamChangeCollector::UpsertToSameKeyWithImages [GOOD] >> CdcStreamChangeCollector::UpsertModifyDelete >> CdcStreamChangeCollector::UpsertIntoTwoStreams [GOOD] >> CdcStreamChangeCollector::PageFaults >> TargetDiscoverer::InvalidCredentials [GOOD] >> TargetDiscoverer::Negative [GOOD] >> TargetDiscoverer::SystemObjects [GOOD] >> TargetDiscoverer::IndexedTable [GOOD] >> TargetDiscoverer::Dirs [GOOD] >> KqpQueryPerf::Replace-QueryService+UseSink >> TargetDiscoverer::Basic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Negative [GOOD] Test command err: 2025-03-31T16:25:26.813759Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010594518849055:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:26.813819Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00135d/r3tmp/tmpHVhuCD/pdisk_1.dat 2025-03-31T16:25:27.246515Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:27.246618Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:27.247863Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:27.254867Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14834 TServer::EnableGrpc on GrpcPort 18286, node 1 2025-03-31T16:25:27.622700Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:27.622723Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:27.622731Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:27.622848Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14834 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:28.127755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:28.250617Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: SCHEME_ERROR, issues: {
: Error: Path not found } } } 2025-03-31T16:25:28.250674Z node 1 :REPLICATION_CONTROLLER ERROR: [TargetDiscoverer][rid 1] Describe path failed: path# /Root/Table, status# SCHEME_ERROR, issues# {
: Error: Path not found } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::InvalidCredentials [GOOD] Test command err: 2025-03-31T16:25:26.814097Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010595265696515:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:26.814208Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00138b/r3tmp/tmprJWJGb/pdisk_1.dat 2025-03-31T16:25:27.246964Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:27.247075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:27.250718Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:25:27.292633Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:4099 TServer::EnableGrpc on GrpcPort 15867, node 1 2025-03-31T16:25:27.621226Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:27.621249Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:27.621261Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:27.621377Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4099 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:28.130121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:28.183925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:28.824531Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: CLIENT_UNAUTHENTICATED, issues: {
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/types/credentials/login/login.cpp:217: Cannot find user: user } } } 2025-03-31T16:25:28.824614Z node 1 :REPLICATION_CONTROLLER ERROR: [TargetDiscoverer][rid 1] Describe path failed: path# /Root, status# CLIENT_UNAUTHENTICATED, issues# {
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/types/credentials/login/login.cpp:217: Cannot find user: user } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::SystemObjects [GOOD] Test command err: 2025-03-31T16:25:26.817867Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010594405283060:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:26.817942Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001384/r3tmp/tmp7Rx12S/pdisk_1.dat 2025-03-31T16:25:27.216555Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:27.256837Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:27.257006Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:27.271275Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23520 TServer::EnableGrpc on GrpcPort 11105, node 1 2025-03-31T16:25:27.623465Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:27.623494Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:27.623506Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:27.623660Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23520 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:28.154199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:28.183528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:25:28.384975Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-03-31T16:25:28.388777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:28.471794Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1743438328212, tx_id: 1 } } } 2025-03-31T16:25:28.471829Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2025-03-31T16:25:28.484004Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743438328324, tx_id: 281474976710658 } }, { name: export-100500, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1743438328429, tx_id: 281474976710659 } }, { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } }] } } 2025-03-31T16:25:28.484046Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2025-03-31T16:25:29.896084Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743438328324, tx_id: 281474976710658 } } } 2025-03-31T16:25:29.896119Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Table 2025-03-31T16:25:29.896151Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table, dstPath# /Root/Replicated/Table, kind# Table >> SystemView::StoragePoolsRanges [GOOD] >> SystemView::SystemViewFailOps ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Dirs [GOOD] Test command err: 2025-03-31T16:25:26.813968Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010595707969476:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:26.814067Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00136f/r3tmp/tmpsrYC7g/pdisk_1.dat 2025-03-31T16:25:27.314951Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:27.320147Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:27.320231Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:27.322717Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22129 TServer::EnableGrpc on GrpcPort 12373, node 1 2025-03-31T16:25:27.621543Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:27.621579Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:27.621589Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:27.621734Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22129 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:28.127978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:28.152917Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:28.164818Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T16:25:28.183598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:28.394304Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1743438328191, tx_id: 1 } } } 2025-03-31T16:25:28.394345Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2025-03-31T16:25:28.404559Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Dir, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1743438328205, tx_id: 281474976710658 } }, { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } }] } } 2025-03-31T16:25:28.404611Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2025-03-31T16:25:28.424281Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743438328310, tx_id: 281474976710659 } }] } } 2025-03-31T16:25:28.424310Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root/Dir 2025-03-31T16:25:30.231598Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743438328310, tx_id: 281474976710659 } } } 2025-03-31T16:25:30.231629Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Dir/Table 2025-03-31T16:25:30.231654Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Dir/Table, dstPath# /Root/Replicated/Dir/Table, kind# Table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Basic [GOOD] Test command err: 2025-03-31T16:25:26.818018Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010595733816559:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:26.818160Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00137f/r3tmp/tmp1GZIAa/pdisk_1.dat 2025-03-31T16:25:27.243416Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:27.291643Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:27.291743Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:27.296726Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:65129 TServer::EnableGrpc on GrpcPort 18913, node 1 2025-03-31T16:25:27.620466Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:27.620487Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:27.620496Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:27.622239Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65129 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:28.129937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:28.187117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:28.386005Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1743438328191, tx_id: 1 } } } 2025-03-31T16:25:28.386887Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2025-03-31T16:25:28.400116Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743438328310, tx_id: 281474976710658 } }, { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } }] } } 2025-03-31T16:25:28.400158Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2025-03-31T16:25:30.163866Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743438328310, tx_id: 281474976710658 } } } 2025-03-31T16:25:30.163899Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Table 2025-03-31T16:25:30.163933Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table, dstPath# /Root/Replicated/Table, kind# Table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::IndexedTable [GOOD] Test command err: 2025-03-31T16:25:26.816061Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010593645411600:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:26.816117Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001373/r3tmp/tmpzQOsAh/pdisk_1.dat 2025-03-31T16:25:27.277831Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:27.283611Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:27.283744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:27.287190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32141 TServer::EnableGrpc on GrpcPort 5831, node 1 2025-03-31T16:25:27.621394Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:27.621432Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:27.621445Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:27.621585Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32141 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:28.128043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:28.183433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:28.673937Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1743438328191, tx_id: 1 } } } 2025-03-31T16:25:28.673971Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2025-03-31T16:25:28.692069Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743438328548, tx_id: 281474976710658 } }, { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } }] } } 2025-03-31T16:25:28.692109Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2025-03-31T16:25:30.230123Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743438328548, tx_id: 281474976710658 } } } 2025-03-31T16:25:30.230163Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Table 2025-03-31T16:25:30.230194Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table, dstPath# /Root/Replicated/Table, kind# Table 2025-03-31T16:25:30.230314Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table/Index, dstPath# /Root/Replicated/Table/Index/indexImplTable, kind# IndexTable |91.9%| [TA] $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/test-results/unittest/{meta.json ... results_accumulator.log} |91.9%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardVolatile::DistributedUpsertRestartBeforePrepare+UseSink [GOOD] >> DataShardVolatile::DistributedUpsertRestartBeforePrepare-UseSink >> KqpQueryPerf::IndexInsert+QueryService-UseSink >> KqpQueryPerf::RangeRead+QueryService >> KqpQueryPerf::Upsert+QueryService-UseSink >> KqpQueryPerf::UpdateOn-QueryService+UseSink >> KqpQueryPerf::Update-QueryService-UseSink >> KqpQueryPerf::UpdateOn-QueryService-UseSink >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup-QueryService [GOOD] >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup+QueryService >> KqpQueryPerf::MultiRead-QueryService [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableUpdateOneIndexedColumn [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow >> AsyncIndexChangeCollector::MultiIndexedTableInsertSingleRow [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow >> AsyncIndexChangeCollector::IndexedPrimaryKeyDeleteSingleRow [GOOD] >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn >> CdcStreamChangeCollector::DeleteNothing [GOOD] >> CdcStreamChangeCollector::DeleteSingleRow >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup-QueryService [GOOD] >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup+QueryService >> SystemView::AuthOwners [GOOD] >> SystemView::AuthOwners_Access >> CdcStreamChangeCollector::UpsertModifyDelete [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 6172, MsgBus: 62212 2025-03-31T16:25:28.136479Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010604039472284:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:28.136542Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f32/r3tmp/tmpzUhucj/pdisk_1.dat 2025-03-31T16:25:28.598850Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:28.599009Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:28.599719Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:28.607168Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6172, node 1 2025-03-31T16:25:28.822494Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:28.822511Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:28.822518Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:28.822617Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62212 TClient is connected to server localhost:62212 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:29.700674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:29.719947Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:25:29.739118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:29.921316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:30.094797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:30.182882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:31.541355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010616924375943:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:31.541511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:31.980248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:32.014185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:32.048033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:32.078980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:32.158289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:32.195735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:32.262087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010621219343755:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:32.262168Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:32.262327Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010621219343760:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:32.266021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:32.277427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010621219343762:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:25:32.352095Z node 1 :TX_PROXY ERROR: Actor# [1:7488010621219343815:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:33.136824Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010604039472284:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:33.136891Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> SystemView::AuthGroups_Access [GOOD] >> SystemView::AuthGroups_ResultOrder ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::UpsertModifyDelete [GOOD] Test command err: 2025-03-31T16:25:22.588083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:25:22.588346Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:25:22.588539Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018b2/r3tmp/tmphzwNVg/pdisk_1.dat 2025-03-31T16:25:22.974475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:25:23.018075Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:23.022287Z node 1 :TABLET_SAUSAGECACHE NOTICE: Update config MemoryLimit: 33554432 2025-03-31T16:25:23.058141Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:23.058296Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:23.069768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:25:23.151130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:25:23.195631Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:25:23.196046Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:25:23.233695Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:25:23.233807Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:25:23.235150Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:25:23.235218Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:25:23.235286Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:25:23.235607Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:25:23.235721Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:25:23.235797Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:25:23.246488Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:25:23.276510Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:25:23.276705Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:25:23.276793Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:25:23.276821Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:25:23.276850Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:25:23.276886Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:23.277211Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:25:23.277299Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:25:23.277386Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:23.277416Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:23.277459Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:25:23.277494Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:23.277567Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:25:23.277715Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:25:23.277926Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:25:23.278005Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:25:23.279580Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:23.290170Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:25:23.290256Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:25:23.438510Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T16:25:23.443831Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:25:23.443924Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:23.444558Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:23.444621Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:25:23.444706Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T16:25:23.444986Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:25:23.445159Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:25:23.445787Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:23.445878Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:25:23.448119Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:25:23.448589Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:23.450811Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-31T16:25:23.450866Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:23.451852Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-31T16:25:23.451935Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:23.452806Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:23.452850Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:25:23.452900Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:25:23.452963Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:25:23.453021Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T16:25:23.453144Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:23.457280Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:23.459439Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T16:25:23.459680Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-31T16:25:23.459754Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:25:23.477429Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:25:23.477605Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-03-31T16:25:23.477676Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-03-31T16:25:23.477712Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-03-31T16:25:23.479070Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:23.504690Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:25:23.781623Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:25:23.781686Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:23.781854Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:23.781897Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:25:23.781943Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-03-31T16:25:23.782164Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-03-31T16:25:23.782303Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:25:23.782656Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:23.784817Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:23.846621Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-03-31T16:25:23.846721Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:23.846760Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:23.846822Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tab ... rTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-31T16:25:33.317717Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:33.318679Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-31T16:25:33.318764Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:33.319894Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:33.319945Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:25:33.320018Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:25:33.320093Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:25:33.320146Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T16:25:33.320259Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:33.321177Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:33.323548Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-31T16:25:33.323622Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:25:33.324683Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T16:25:33.336542Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:25:33.336731Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-03-31T16:25:33.336789Z node 3 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-03-31T16:25:33.336827Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-03-31T16:25:33.337303Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:33.405441Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:25:33.606376Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:25:33.606444Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:33.607488Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:33.607556Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:25:33.607618Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-03-31T16:25:33.607837Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-03-31T16:25:33.608005Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:25:33.608344Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:33.609177Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:33.653717Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-03-31T16:25:33.653831Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:33.653876Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:33.653926Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:33.654009Z node 3 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [3:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:25:33.654078Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-03-31T16:25:33.654196Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:33.656568Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-03-31T16:25:33.656675Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:25:33.664457Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:886:2724], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:33.664585Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:33.664686Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:895:2729], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:33.670569Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:25:33.677630Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:33.836041Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:33.840048Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:900:2732], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:25:33.866627Z node 3 :TX_PROXY ERROR: Actor# [3:956:2769] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:33.930917Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpgbhpyabrpyb3jfftyrh59, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ODgwYmFmYTUtNzkwODY1YjYtZjRkNjMyNWQtOTdjZWY4YTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:25:33.934064Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:987:2786], serverId# [3:988:2787], sessionId# [0:0:0] 2025-03-31T16:25:33.934540Z node 3 :TX_DATASHARD DEBUG: Executing write operation for [0:3] at 72075186224037888 2025-03-31T16:25:33.934863Z node 3 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1743438333934746 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-03-31T16:25:33.935098Z node 3 :TX_DATASHARD DEBUG: Executed write operation for [0:3] at 72075186224037888, row count=1 2025-03-31T16:25:33.946328Z node 3 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-03-31T16:25:33.946437Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:34.006589Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpgbj008r6a8vcxj32wr1a5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZDk3MGNkMjgtNWM4YjMzNGEtY2IyNzczNy1lM2E4YTNhOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:25:34.009059Z node 3 :TX_DATASHARD DEBUG: Executing write operation for [0:4] at 72075186224037888 2025-03-31T16:25:34.009396Z node 3 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1743438334009294 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 50b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-03-31T16:25:34.009576Z node 3 :TX_DATASHARD DEBUG: Executed write operation for [0:4] at 72075186224037888, row count=1 2025-03-31T16:25:34.020655Z node 3 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 50 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-03-31T16:25:34.020731Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:34.107951Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqpgbj29edx93rhay8qm6q4f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YzgyNTFmMi04ZWY2YThjLTY5YzFkNzQ5LTE5ZmYzMjcz, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:25:34.110340Z node 3 :TX_DATASHARD DEBUG: Executing write operation for [0:5] at 72075186224037888 2025-03-31T16:25:34.110703Z node 3 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 3 Group: 1743438334110576 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-03-31T16:25:34.110863Z node 3 :TX_DATASHARD DEBUG: Executed write operation for [0:5] at 72075186224037888, row count=1 2025-03-31T16:25:34.121928Z node 3 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-03-31T16:25:34.122010Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:34.124597Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:1034:2816], serverId# [3:1035:2817], sessionId# [0:0:0] 2025-03-31T16:25:34.132190Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:1036:2818], serverId# [3:1037:2819], sessionId# [0:0:0] >> KqpQueryPerf::IndexDeleteOn+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexDeleteOn+QueryService+UseSink >> KqpQueryPerf::Replace-QueryService+UseSink [GOOD] >> KqpQueryPerf::IndexReplace+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexReplace+QueryService+UseSink >> SystemView::DescribeSystemFolder [GOOD] >> SystemView::DescribeAccessDenied >> KqpQueryPerf::UpdateOn+QueryService-UseSink >> SystemView::SystemViewFailOps [GOOD] >> SystemView::TabletsFields ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 7120, MsgBus: 7038 2025-03-31T16:25:31.056623Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010614897060003:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:31.056679Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f19/r3tmp/tmpFzTv9j/pdisk_1.dat TServer::EnableGrpc on GrpcPort 7120, node 1 2025-03-31T16:25:31.422300Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:25:31.424916Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:25:31.458410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:31.460291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:31.462001Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:25:31.463417Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:31.481681Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:31.481828Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:31.481860Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:31.481976Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7038 TClient is connected to server localhost:7038 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:32.079533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:32.104454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:32.216427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:32.397939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:32.490175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:34.054179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010627781963675:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:34.054309Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:34.344171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:34.381625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:34.420910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:34.491269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:34.530961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:34.569413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:34.667443Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010627781964193:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:34.667536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:34.667799Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010627781964198:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:34.672383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:34.685126Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010627781964200:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:25:34.788536Z node 1 :TX_PROXY ERROR: Actor# [1:7488010627781964256:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryPerf::IndexUpdateOn+QueryService-UseSink >> KqpQueryPerf::Upsert+QueryService-UseSink [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow [GOOD] >> KqpQueryPerf::RangeRead+QueryService [GOOD] >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn [GOOD] >> KqpQueryPerf::Insert+QueryService-UseSink >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup+QueryService [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow [GOOD] >> KqpQueryPerf::UpdateOn-QueryService-UseSink [GOOD] >> KqpQueryPerf::UpdateOn-QueryService+UseSink [GOOD] >> KqpQueryPerf::Update-QueryService-UseSink [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32_Reboot [GOOD] >> CdcStreamChangeCollector::DeleteSingleRow [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert+QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 6614, MsgBus: 5914 2025-03-31T16:25:33.703297Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010624315076004:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:33.703483Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f00/r3tmp/tmpTdnPJn/pdisk_1.dat 2025-03-31T16:25:34.034020Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6614, node 1 2025-03-31T16:25:34.088354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:34.088462Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:34.091577Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:25:34.126168Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:34.126194Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:34.126202Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:34.126360Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5914 TClient is connected to server localhost:5914 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:34.680055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:34.695257Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:25:34.709655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:34.864874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:25:35.009705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:25:35.082261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:36.787577Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010637199979657:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:36.787692Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:37.027451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:37.057239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:37.081307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:37.111701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:37.142459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:37.180160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:37.230157Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010641494947462:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:37.230232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:37.230316Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010641494947467:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:37.234212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:37.246960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010641494947469:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:25:37.306670Z node 1 :TX_PROXY ERROR: Actor# [1:7488010641494947523:3442] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:38.703067Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010624315076004:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:38.703167Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeRead+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 22268, MsgBus: 28512 2025-03-31T16:25:33.557610Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010624470148468:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:33.557739Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f0c/r3tmp/tmpEsEqSE/pdisk_1.dat 2025-03-31T16:25:33.937500Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22268, node 1 2025-03-31T16:25:33.980556Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:33.980683Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:33.982442Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:25:34.051659Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:34.051696Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:34.051704Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:34.051857Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28512 TClient is connected to server localhost:28512 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:34.570892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:34.585358Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:25:34.600314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:34.729429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:34.912452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:34.977661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:36.744403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010637355052129:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:36.744508Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:37.088507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:37.127476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:37.160957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:37.192668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:37.228946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:37.298375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:37.360052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010641650019941:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:37.360133Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:37.360323Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010641650019946:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:37.364538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:37.379462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010641650019948:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:25:37.464108Z node 1 :TX_PROXY ERROR: Actor# [1:7488010641650020003:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:38.557398Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010624470148468:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:38.557473Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn [GOOD] Test command err: 2025-03-31T16:25:22.292103Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:25:22.292459Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:25:22.292680Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00188a/r3tmp/tmpOuQNCS/pdisk_1.dat 2025-03-31T16:25:22.792205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:25:22.855727Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:22.902430Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:22.903240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:22.916280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:25:23.013468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:25:23.082057Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:676:2578] 2025-03-31T16:25:23.082290Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:25:23.122193Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:25:23.122364Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:25:23.125216Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:25:23.125305Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:25:23.125381Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:25:23.128028Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:25:23.128428Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:25:23.128503Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:703:2578] in generation 1 2025-03-31T16:25:23.129985Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:680:2580] 2025-03-31T16:25:23.130189Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:25:23.139544Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:25:23.139660Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:25:23.140961Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-03-31T16:25:23.141022Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-03-31T16:25:23.141055Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-03-31T16:25:23.141289Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:25:23.141404Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:25:23.141468Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:711:2580] in generation 1 2025-03-31T16:25:23.152339Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:25:23.178400Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:25:23.180450Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:25:23.180641Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:714:2599] 2025-03-31T16:25:23.180708Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:25:23.180757Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:25:23.180799Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:23.181958Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:25:23.182019Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-03-31T16:25:23.182100Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:25:23.182186Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:715:2600] 2025-03-31T16:25:23.182220Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-03-31T16:25:23.182257Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-03-31T16:25:23.182299Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:25:23.182889Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:25:23.183016Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:25:23.184966Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:23.185036Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:23.185158Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:25:23.185225Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:23.185300Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-03-31T16:25:23.185385Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-03-31T16:25:23.185482Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:670:2574], serverId# [1:688:2584], sessionId# [0:0:0] 2025-03-31T16:25:23.185559Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T16:25:23.185588Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:23.185635Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-03-31T16:25:23.185684Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-31T16:25:23.185866Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:25:23.186255Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:25:23.186398Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:25:23.186979Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:671:2575], serverId# [1:701:2592], sessionId# [0:0:0] 2025-03-31T16:25:23.187201Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-31T16:25:23.187407Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-03-31T16:25:23.187471Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-03-31T16:25:23.189875Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:23.190058Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-31T16:25:23.200899Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:25:23.201021Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:25:23.201153Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-03-31T16:25:23.201179Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-03-31T16:25:23.353619Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:739:2618], serverId# [1:742:2621], sessionId# [0:0:0] 2025-03-31T16:25:23.353848Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:740:2619], serverId# [1:743:2622], sessionId# [0:0:0] 2025-03-31T16:25:23.358741Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-03-31T16:25:23.358826Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:25:23.359431Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:25:23.359469Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:23.359573Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T16:25:23.359613Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:25:23.359687Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2025-03-31T16:25:23.359938Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:25:23.360125Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:25:23.360322Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:23.360365Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:25:23.360394Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T16:25:23.360565Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:25:23.360647Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:25:23.360732Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T16:25:23.360813Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:25:23.364248Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T ... Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:38.550197Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:25:38.550228Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T16:25:38.550401Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:25:38.550502Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:25:38.550613Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T16:25:38.550687Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:25:38.551266Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:25:38.551753Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:38.553165Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:38.553230Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 4] schema version# 1 2025-03-31T16:25:38.553614Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:25:38.553927Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:38.556219Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2025-03-31T16:25:38.556293Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:25:38.557264Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-31T16:25:38.557304Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:38.557368Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 1000} 2025-03-31T16:25:38.557441Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-31T16:25:38.558602Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-31T16:25:38.558682Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:38.560101Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-31T16:25:38.560164Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-03-31T16:25:38.560215Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037889 2025-03-31T16:25:38.560286Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:25:38.560342Z node 4 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T16:25:38.560449Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:25:38.562025Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:38.562078Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:25:38.562115Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:25:38.562162Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:25:38.562200Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T16:25:38.562262Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:38.563190Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:38.563272Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-31T16:25:38.566957Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T16:25:38.567433Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T16:25:38.567529Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2025-03-31T16:25:38.567601Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-03-31T16:25:38.568454Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-31T16:25:38.568503Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:25:38.578940Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:787:2658], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:38.579104Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:797:2663], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:38.579195Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:38.584935Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:25:38.592063Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:38.592194Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-31T16:25:38.752289Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:38.752420Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-31T16:25:38.755891Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:801:2666], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:25:38.794832Z node 4 :TX_PROXY ERROR: Actor# [4:879:2713] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:38.884077Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpgbpgg31j1szxb7bcb2htc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MmNlODM3N2YtOGQzMTYyMjEtNzU2YzM1Y2YtYTllODg5N2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:25:38.886851Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:948:2744], serverId# [4:949:2745], sessionId# [0:0:0] 2025-03-31T16:25:38.887358Z node 4 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037889 2025-03-31T16:25:38.887673Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1743438338887571 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-03-31T16:25:38.887852Z node 4 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037889, row count=1 2025-03-31T16:25:38.900200Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-03-31T16:25:38.900298Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:25:38.972340Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpgbpts6v6nam4hgva0vvnk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MjQ3MDMyNmYtYTM2MjVhOTktYzU0ZTY2NTEtNGFiZDkyZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:25:38.974562Z node 4 :TX_DATASHARD DEBUG: Executing write operation for [0:3] at 72075186224037889 2025-03-31T16:25:38.974856Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1743438338974767 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-03-31T16:25:38.975036Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 3 Group: 1743438338974767 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-03-31T16:25:38.975275Z node 4 :TX_DATASHARD DEBUG: Executed write operation for [0:3] at 72075186224037889, row count=1 2025-03-31T16:25:38.986527Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-03-31T16:25:38.986630Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:25:38.993024Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:989:2776], serverId# [4:990:2777], sessionId# [0:0:0] 2025-03-31T16:25:38.999904Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:991:2778], serverId# [4:992:2779], sessionId# [0:0:0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow [GOOD] Test command err: 2025-03-31T16:25:22.570206Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:25:22.570440Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:25:22.570585Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00189e/r3tmp/tmpCJ7oBG/pdisk_1.dat 2025-03-31T16:25:22.937155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:25:22.981824Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:23.029896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:23.030057Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:23.041905Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:25:23.124914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:25:23.185753Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:676:2578] 2025-03-31T16:25:23.186032Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:25:23.238735Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:25:23.238892Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:25:23.240769Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:25:23.240854Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:25:23.240918Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:25:23.241333Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:25:23.241655Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:25:23.241750Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:703:2578] in generation 1 2025-03-31T16:25:23.243209Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:680:2580] 2025-03-31T16:25:23.243440Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:25:23.253316Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:25:23.253429Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:25:23.254938Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-03-31T16:25:23.255009Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-03-31T16:25:23.255061Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-03-31T16:25:23.255407Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:25:23.255549Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:25:23.255611Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:711:2580] in generation 1 2025-03-31T16:25:23.266484Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:25:23.302300Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:25:23.302546Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:25:23.302684Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:714:2599] 2025-03-31T16:25:23.302744Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:25:23.302782Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:25:23.302822Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:23.303168Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:25:23.303211Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-03-31T16:25:23.303276Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:25:23.303334Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:715:2600] 2025-03-31T16:25:23.303359Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-03-31T16:25:23.303384Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-03-31T16:25:23.303409Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:25:23.303888Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:25:23.304008Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:25:23.304604Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:23.304657Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:23.304723Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:25:23.304778Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:23.304827Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-03-31T16:25:23.304888Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-03-31T16:25:23.304965Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:670:2574], serverId# [1:688:2584], sessionId# [0:0:0] 2025-03-31T16:25:23.305007Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T16:25:23.305033Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:23.305079Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-03-31T16:25:23.305118Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-31T16:25:23.305273Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:25:23.305527Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:25:23.305665Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:25:23.306102Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:671:2575], serverId# [1:701:2592], sessionId# [0:0:0] 2025-03-31T16:25:23.306243Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-31T16:25:23.306430Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-03-31T16:25:23.306485Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-03-31T16:25:23.308340Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:23.308495Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-31T16:25:23.319322Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:25:23.319415Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:25:23.319523Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-03-31T16:25:23.319550Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-03-31T16:25:23.469788Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:739:2618], serverId# [1:742:2621], sessionId# [0:0:0] 2025-03-31T16:25:23.469992Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:740:2619], serverId# [1:743:2622], sessionId# [0:0:0] 2025-03-31T16:25:23.485301Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-03-31T16:25:23.485421Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:25:23.486068Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:25:23.486122Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:23.486268Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T16:25:23.486321Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:25:23.486432Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2025-03-31T16:25:23.486750Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:25:23.486968Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:25:23.487204Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:23.487262Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:25:23.487296Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T16:25:23.487462Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:25:23.487560Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:25:23.487650Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T16:25:23.487731Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:25:23.489889Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T ... WaitScheme 2025-03-31T16:25:38.403888Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:739:2618], serverId# [4:742:2621], sessionId# [0:0:0] 2025-03-31T16:25:38.404073Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:740:2619], serverId# [4:743:2622], sessionId# [0:0:0] 2025-03-31T16:25:38.404708Z node 4 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-03-31T16:25:38.404767Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:25:38.405102Z node 4 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:25:38.405154Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:38.405276Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T16:25:38.405346Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:25:38.405399Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2025-03-31T16:25:38.405734Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:25:38.405887Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:25:38.406132Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:38.406177Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:25:38.406208Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T16:25:38.406380Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:25:38.406475Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:25:38.406590Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T16:25:38.406654Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:25:38.407260Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:25:38.407774Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:38.409330Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:38.409413Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 4] schema version# 1 2025-03-31T16:25:38.409793Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:25:38.410116Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:38.412531Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2025-03-31T16:25:38.412598Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:25:38.413669Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-31T16:25:38.413716Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:38.413786Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 1000} 2025-03-31T16:25:38.413886Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-31T16:25:38.415062Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-31T16:25:38.415193Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:38.416660Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-31T16:25:38.416726Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-03-31T16:25:38.416789Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037889 2025-03-31T16:25:38.416878Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:25:38.416936Z node 4 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T16:25:38.417054Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:25:38.418674Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:38.418726Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:25:38.418779Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:25:38.418849Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:25:38.418898Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T16:25:38.418971Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:38.419970Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:38.420046Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-31T16:25:38.423813Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T16:25:38.424274Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T16:25:38.424386Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2025-03-31T16:25:38.424465Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-03-31T16:25:38.425429Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-31T16:25:38.425485Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:25:38.439701Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:787:2658], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:38.439858Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:797:2663], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:38.439949Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:38.446147Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:25:38.453014Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:38.453143Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-31T16:25:38.613022Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:38.613129Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-31T16:25:38.616166Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:801:2666], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:25:38.651407Z node 4 :TX_PROXY ERROR: Actor# [4:879:2713] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:38.724631Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpgbpc5d955wxe4hdxs60ef, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZmExZjAyMTEtNzk2OThjOWItZGIwZDkyNzUtMjhlYjRmMTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:25:38.727438Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:948:2744], serverId# [4:949:2745], sessionId# [0:0:0] 2025-03-31T16:25:38.727827Z node 4 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037889 2025-03-31T16:25:38.728082Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1743438338727973 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-03-31T16:25:38.728250Z node 4 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037889, row count=1 2025-03-31T16:25:38.739242Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-03-31T16:25:38.739359Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:25:38.744152Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:955:2750], serverId# [4:956:2751], sessionId# [0:0:0] 2025-03-31T16:25:38.749118Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:957:2752], serverId# [4:958:2753], sessionId# [0:0:0] >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup+QueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 13052, MsgBus: 22127 2025-03-31T16:25:28.130437Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010603191442189:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:28.130523Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f36/r3tmp/tmpCRwQpt/pdisk_1.dat 2025-03-31T16:25:28.589035Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:28.589142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:28.591207Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:28.591559Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13052, node 1 2025-03-31T16:25:28.819842Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:28.819869Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:28.819877Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:28.820034Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22127 TClient is connected to server localhost:22127 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:29.669828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:29.716384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:29.917524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:30.087262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:30.174354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:31.387023Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010616076345850:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:31.387459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:31.866208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:31.934771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:31.974604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:32.006650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:32.079499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:32.165879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:32.230630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010620371313671:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:32.230737Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:32.230879Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010620371313676:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:32.235500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:32.251117Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010620371313678:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:25:32.333803Z node 1 :TX_PROXY ERROR: Actor# [1:7488010620371313734:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:33.132458Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010603191442189:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:33.132511Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 2756, MsgBus: 30043 2025-03-31T16:25:34.348367Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010630273426176:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:34.348783Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f36/r3tmp/tmpOg0tRt/pdisk_1.dat 2025-03-31T16:25:34.502217Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:34.525817Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:34.526111Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:34.527129Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2756, node 2 2025-03-31T16:25:34.596778Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:34.596802Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:34.596810Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:34.596921Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30043 TClient is connected to server localhost:30043 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:25:35.041159Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:25:35.049107Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:25:35.058359Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:35.151684Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:25:35.307334Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:25:35.377207Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:37.427253Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010643158329823:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:37.427362Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:37.468186Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:37.505672Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:37.539104Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:37.584995Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:37.616695Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:37.662115Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:37.721922Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010643158330336:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:37.722001Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:37.722357Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010643158330341:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:37.726464Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:37.736324Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488010643158330343:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:25:37.797438Z node 2 :TX_PROXY ERROR: Actor# [2:7488010643158330396:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 63922, MsgBus: 23718 2025-03-31T16:25:33.867897Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010624760492327:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:33.867976Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ef3/r3tmp/tmpMtGpLJ/pdisk_1.dat 2025-03-31T16:25:34.224173Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63922, node 1 2025-03-31T16:25:34.242637Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:34.242723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:34.247389Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:25:34.296398Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:34.296421Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:34.296427Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:34.296571Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23718 TClient is connected to server localhost:23718 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:34.850584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:34.869399Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:25:34.891916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:35.066462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:35.245225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:35.330357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:37.466848Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010641940363267:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:37.466979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:37.849000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:37.894040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:37.965365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:37.997424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:38.030661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:38.076827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:38.176638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010646235331081:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:38.176704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:38.176952Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010646235331086:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:38.181648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:38.194275Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010646235331088:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:25:38.256074Z node 1 :TX_PROXY ERROR: Actor# [1:7488010646235331143:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:38.869249Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010624760492327:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:38.869443Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 2841, MsgBus: 23879 2025-03-31T16:25:34.227517Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010628457716574:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:34.227606Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000eeb/r3tmp/tmpL7oPnv/pdisk_1.dat 2025-03-31T16:25:34.660516Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:34.660650Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:34.664444Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:25:34.667458Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2841, node 1 2025-03-31T16:25:34.754119Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:34.754149Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:34.754160Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:34.754304Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23879 TClient is connected to server localhost:23879 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:35.289029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:35.315962Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:25:35.326464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:35.466269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:35.629573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:35.700855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:37.355173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010641342620222:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:37.355334Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:37.658872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:37.702734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:37.739973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:37.773547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:37.812729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:37.852548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:37.908490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010641342620731:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:37.908562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:37.908780Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010641342620736:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:37.912028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:37.922350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010641342620738:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:25:37.997814Z node 1 :TX_PROXY ERROR: Actor# [1:7488010641342620791:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:39.228109Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010628457716574:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:39.228172Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> CdcStreamChangeCollector::PageFaults [GOOD] >> CdcStreamChangeCollector::OldImage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 11552, MsgBus: 27264 2025-03-31T16:25:34.042982Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010627628995928:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:34.043472Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ef4/r3tmp/tmp9137FI/pdisk_1.dat 2025-03-31T16:25:34.436913Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:34.450984Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:34.451122Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 11552, node 1 2025-03-31T16:25:34.467730Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:25:34.515690Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:34.515721Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:34.515727Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:34.515830Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27264 TClient is connected to server localhost:27264 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:35.126793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:35.155416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:35.310002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:25:35.474512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:25:35.552245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:37.241279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010640513899593:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:37.241392Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:37.602151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:37.640698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:37.676686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:37.726607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:37.762476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:37.807628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:37.858213Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010640513900107:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:37.858282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:37.858473Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010640513900112:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:37.862417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:37.876068Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010640513900114:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:25:37.963046Z node 1 :TX_PROXY ERROR: Actor# [1:7488010640513900167:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:39.043191Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010627628995928:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:39.043260Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow [GOOD] Test command err: 2025-03-31T16:25:22.483708Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:25:22.484009Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:25:22.484196Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001886/r3tmp/tmp1AXqEJ/pdisk_1.dat 2025-03-31T16:25:22.891533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:25:22.943138Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:22.983334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:22.983476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:22.994990Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:25:23.076712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:25:23.124962Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:676:2578] 2025-03-31T16:25:23.125243Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:25:23.168071Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:25:23.168227Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:25:23.169654Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:25:23.169715Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:25:23.169754Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:25:23.170075Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:25:23.170335Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:25:23.170388Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:703:2578] in generation 1 2025-03-31T16:25:23.171507Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:680:2580] 2025-03-31T16:25:23.171690Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:25:23.180223Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:25:23.180349Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:25:23.181851Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-03-31T16:25:23.181924Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-03-31T16:25:23.181974Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-03-31T16:25:23.182309Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:25:23.182438Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:25:23.182497Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:711:2580] in generation 1 2025-03-31T16:25:23.193324Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:25:23.220109Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:25:23.220285Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:25:23.220382Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:714:2599] 2025-03-31T16:25:23.220439Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:25:23.220478Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:25:23.220513Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:23.220786Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:25:23.220816Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-03-31T16:25:23.220873Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:25:23.220928Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:715:2600] 2025-03-31T16:25:23.220949Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-03-31T16:25:23.220968Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-03-31T16:25:23.220983Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:25:23.221317Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:25:23.221407Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:25:23.221912Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:23.221960Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:23.222000Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:25:23.222032Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:23.222082Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-03-31T16:25:23.222140Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-03-31T16:25:23.222198Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:670:2574], serverId# [1:688:2584], sessionId# [0:0:0] 2025-03-31T16:25:23.222229Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T16:25:23.222245Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:23.222275Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-03-31T16:25:23.222300Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-31T16:25:23.222415Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:25:23.222633Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:25:23.222728Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:25:23.223140Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:671:2575], serverId# [1:701:2592], sessionId# [0:0:0] 2025-03-31T16:25:23.223278Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-31T16:25:23.223435Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-03-31T16:25:23.223495Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-03-31T16:25:23.225135Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:23.225249Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-31T16:25:23.235984Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:25:23.236071Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:25:23.236189Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-03-31T16:25:23.236245Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-03-31T16:25:23.386107Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:739:2618], serverId# [1:742:2621], sessionId# [0:0:0] 2025-03-31T16:25:23.386284Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:740:2619], serverId# [1:743:2622], sessionId# [0:0:0] 2025-03-31T16:25:23.394882Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-03-31T16:25:23.394958Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:25:23.395520Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:25:23.395561Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:23.395669Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T16:25:23.395709Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:25:23.395770Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2025-03-31T16:25:23.396057Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:25:23.396220Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:25:23.396382Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:23.396414Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:25:23.396436Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T16:25:23.396558Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:25:23.396622Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:25:23.396695Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T16:25:23.396769Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:25:23.398554Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T ... cStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-31T16:25:38.936148Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:38.936294Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-31T16:25:38.936371Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-03-31T16:25:38.937019Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-31T16:25:38.937077Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-03-31T16:25:38.937109Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037889 2025-03-31T16:25:38.937170Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:25:38.937238Z node 4 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T16:25:38.937324Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:25:38.937427Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 1000} 2025-03-31T16:25:38.937471Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-31T16:25:38.941163Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-31T16:25:38.941227Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-03-31T16:25:38.941266Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037890 2025-03-31T16:25:38.941328Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037890 at tablet 72075186224037890 send result to client [4:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:25:38.941378Z node 4 :TX_DATASHARD INFO: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T16:25:38.941457Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-31T16:25:38.946746Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T16:25:38.947269Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-31T16:25:38.947352Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:25:38.947896Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T16:25:38.948427Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T16:25:38.948797Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2025-03-31T16:25:38.948864Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-03-31T16:25:38.949587Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037890 state Ready 2025-03-31T16:25:38.949658Z node 4 :TX_DATASHARD DEBUG: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2025-03-31T16:25:38.962863Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:838:2697], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:38.962977Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:848:2702], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:38.963057Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:38.970908Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:25:38.980221Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:38.980373Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-31T16:25:38.980440Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-03-31T16:25:39.141164Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:39.141266Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-31T16:25:39.141317Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-03-31T16:25:39.144904Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:852:2705], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:25:39.181374Z node 4 :TX_PROXY ERROR: Actor# [4:934:2756] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:39.268891Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpgbpwg8a9jm0z51h94t6rm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=Zjg0MzU1ZWItZGQ3NGJiNGEtMTQ4YzdmMDEtZjI5OGIzYTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:25:39.273886Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1039:2799], serverId# [4:1040:2800], sessionId# [0:0:0] 2025-03-31T16:25:39.274330Z node 4 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037889 2025-03-31T16:25:39.274670Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1743438339274555 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-03-31T16:25:39.274879Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1743438339274555 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-03-31T16:25:39.274998Z node 4 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037889, row count=1 2025-03-31T16:25:39.288175Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-03-31T16:25:39.288276Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:25:39.368160Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpgbq6y5petgv07n0n6q5gq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YmMyMTBiOTMtM2VlMGMzZDktYzkzODQwNjMtZjVhMGIxOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:25:39.370564Z node 4 :TX_DATASHARD DEBUG: Executing write operation for [0:3] at 72075186224037889 2025-03-31T16:25:39.370912Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 3 Group: 1743438339370795 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-03-31T16:25:39.371171Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 4 Group: 1743438339370795 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-03-31T16:25:39.371287Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 5 Group: 1743438339370795 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-03-31T16:25:39.371414Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 6 Group: 1743438339370795 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 24b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-03-31T16:25:39.371501Z node 4 :TX_DATASHARD DEBUG: Executed write operation for [0:3] at 72075186224037889, row count=1 2025-03-31T16:25:39.383882Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 24 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-03-31T16:25:39.383963Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:25:39.388451Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1089:2840], serverId# [4:1090:2841], sessionId# [0:0:0] 2025-03-31T16:25:39.393555Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1091:2842], serverId# [4:1092:2843], sessionId# [0:0:0] >> KqpQueryPerf::IndexInsert+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexInsert+QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::DeleteSingleRow [GOOD] Test command err: 2025-03-31T16:25:22.689545Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:25:22.689811Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:25:22.689987Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001870/r3tmp/tmpGKuTBq/pdisk_1.dat 2025-03-31T16:25:23.091833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:25:23.133063Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:23.174109Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:23.174249Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:23.185982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:25:23.269096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:25:23.322404Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:676:2578] 2025-03-31T16:25:23.322680Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:25:23.359100Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:25:23.359275Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:25:23.361132Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:25:23.361233Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:25:23.361305Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:25:23.361765Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:25:23.362119Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:25:23.362194Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:703:2578] in generation 1 2025-03-31T16:25:23.363755Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:680:2580] 2025-03-31T16:25:23.363971Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:25:23.374032Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:25:23.374129Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:25:23.375551Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-03-31T16:25:23.375661Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-03-31T16:25:23.375719Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-03-31T16:25:23.376005Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:25:23.376127Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:25:23.376186Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:711:2580] in generation 1 2025-03-31T16:25:23.386907Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:25:23.414239Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:25:23.414404Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:25:23.414497Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:714:2599] 2025-03-31T16:25:23.414543Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:25:23.414577Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:25:23.414611Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:23.414848Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:25:23.414875Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-03-31T16:25:23.414909Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:25:23.414958Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:715:2600] 2025-03-31T16:25:23.414976Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-03-31T16:25:23.414993Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-03-31T16:25:23.415006Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:25:23.415376Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:25:23.415456Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:25:23.415891Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:23.415931Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:23.415969Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:25:23.416003Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:23.416034Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-03-31T16:25:23.416098Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-03-31T16:25:23.416194Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:670:2574], serverId# [1:688:2584], sessionId# [0:0:0] 2025-03-31T16:25:23.416230Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T16:25:23.416252Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:23.416285Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-03-31T16:25:23.416321Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-31T16:25:23.416435Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:25:23.416639Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:25:23.416726Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:25:23.417069Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:671:2575], serverId# [1:701:2592], sessionId# [0:0:0] 2025-03-31T16:25:23.417194Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-31T16:25:23.417355Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-03-31T16:25:23.417397Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-03-31T16:25:23.418813Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:23.418943Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-31T16:25:23.429774Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:25:23.429894Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:25:23.430038Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-03-31T16:25:23.430076Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-03-31T16:25:23.580148Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:739:2618], serverId# [1:742:2621], sessionId# [0:0:0] 2025-03-31T16:25:23.580324Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:740:2619], serverId# [1:743:2622], sessionId# [0:0:0] 2025-03-31T16:25:23.584648Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-03-31T16:25:23.584716Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:25:23.585091Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:25:23.585118Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:23.585198Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T16:25:23.585228Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:25:23.585269Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2025-03-31T16:25:23.585501Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:25:23.585708Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:25:23.585899Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:23.585962Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:25:23.585993Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T16:25:23.586143Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:25:23.586210Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:25:23.586272Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T16:25:23.586327Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:25:23.587944Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T ... DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:38.818425Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:25:38.818495Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T16:25:38.818758Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:25:38.818908Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:25:38.819956Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:38.820041Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:25:38.820574Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:25:38.820980Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:38.823036Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-31T16:25:38.823152Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:38.824215Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-31T16:25:38.824306Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:38.825306Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:38.825352Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:25:38.825413Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:25:38.825490Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:25:38.825549Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T16:25:38.825672Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:38.826909Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:38.829092Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T16:25:38.829334Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-31T16:25:38.829452Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:25:38.844271Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:25:38.844477Z node 4 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-03-31T16:25:38.844534Z node 4 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-03-31T16:25:38.844580Z node 4 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-03-31T16:25:38.845086Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:38.871119Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:25:39.096195Z node 4 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:25:39.096287Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:39.096592Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:39.096677Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:25:39.096735Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-03-31T16:25:39.096953Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-03-31T16:25:39.097101Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:25:39.097427Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:39.098316Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:39.140362Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-03-31T16:25:39.140483Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:39.140564Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:39.140622Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:39.140705Z node 4 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:25:39.140774Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-03-31T16:25:39.140896Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:39.143857Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-03-31T16:25:39.143977Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:25:39.162693Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:887:2724], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:39.162808Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:897:2729], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:39.162894Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:39.169684Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:25:39.177817Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:39.344394Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:39.349106Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:901:2732], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:25:39.395033Z node 4 :TX_PROXY ERROR: Actor# [4:957:2769] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:39.468724Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpgbq2r9zj11mwkc2evnjf4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NjMyM2Y3YjAtNTgwNzkyZTItMmU5ZjNmZDQtNzIxMDY5ZWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:25:39.472752Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:988:2786], serverId# [4:989:2787], sessionId# [0:0:0] 2025-03-31T16:25:39.473338Z node 4 :TX_DATASHARD DEBUG: Executing write operation for [0:3] at 72075186224037888 2025-03-31T16:25:39.473759Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1743438339473607 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-03-31T16:25:39.474032Z node 4 :TX_DATASHARD DEBUG: Executed write operation for [0:3] at 72075186224037888, row count=1 2025-03-31T16:25:39.485559Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-03-31T16:25:39.485719Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:39.641222Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpgbqd449wts9y341eq36xw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NTU1ZTRmZGItNGQyMWU5ZTEtZDg2YmNhZDgtNGIzNTNlNjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:25:39.644105Z node 4 :TX_DATASHARD DEBUG: Executing write operation for [0:4] at 72075186224037888 2025-03-31T16:25:39.644505Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1743438339644369 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-03-31T16:25:39.644722Z node 4 :TX_DATASHARD DEBUG: Executed write operation for [0:4] at 72075186224037888, row count=1 2025-03-31T16:25:39.656257Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-03-31T16:25:39.656355Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:39.659992Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:1016:2805], serverId# [4:1017:2806], sessionId# [0:0:0] 2025-03-31T16:25:39.669650Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:1018:2807], serverId# [4:1019:2808], sessionId# [0:0:0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32_Reboot [GOOD] Test command err: 2025-03-31T16:21:27.532549Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:21:27.707958Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:21:27.730680Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:21:27.730908Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:21:27.744528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:21:27.744753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:21:27.745002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:21:27.745111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:21:27.745229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:21:27.745338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:21:27.745466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:21:27.745585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:21:27.745688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:21:27.745799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:21:27.745905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:21:27.746005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:21:27.776254Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:21:27.776394Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:21:27.776441Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:21:27.776728Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:27.776864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:21:27.776934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:21:27.776977Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:21:27.777055Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:21:27.777103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:21:27.777133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:21:27.777154Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:21:27.777318Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:27.777403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:21:27.777447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:21:27.777471Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:21:27.777553Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:21:27.777604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:21:27.777661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:21:27.777686Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:21:27.777738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:21:27.777764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:21:27.777795Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:21:27.777844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:21:27.777873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:21:27.777894Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:21:27.778283Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=45; 2025-03-31T16:21:27.778350Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=27; 2025-03-31T16:21:27.779108Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=693; 2025-03-31T16:21:27.779206Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=40; 2025-03-31T16:21:27.779414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:21:27.779498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:21:27.779537Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:21:27.779780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:21:27.779833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:21:27.779865Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:21:27.780053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:21:27.780116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:21:27.780164Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:21:27.780326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:21:27.780368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:21:27.780397Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:21:27.780506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:21:27.780552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:21:27.780603Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:8568];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:74;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:75;blob_range:[NO_BLOB:0:8552];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:8488];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:54;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:55;blob_range:[NO_BLOB:0:8472];;column_id:1;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:16;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:17;blob_range:[NO_BLOB:0:8248];;column_id:1;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:25;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:33;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:35;blob_range:[NO_BLOB:0:8240];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:8248];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:35;blob_range:[NO_BLOB:0:8240];;;;switched=(portion_id:55;path_id:1;records_count:25666;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2117976;index_size:24;meta:((produced=INSERTED;)););(portion_id:54;path_id:1;records_count:25666;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2549080;index_size:24;meta:((produced=SPLIT_COMPACTED;)););; 2025-03-31T16:25:39.722022Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:11195:12822];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=2; 2025-03-31T16:25:39.725084Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:11195:12822];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 25880, MsgBus: 22059 2025-03-31T16:25:28.131860Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010605355934830:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:28.131912Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f20/r3tmp/tmp6w5QQI/pdisk_1.dat 2025-03-31T16:25:28.604335Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:28.606663Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:28.606810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:28.619908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25880, node 1 2025-03-31T16:25:28.816758Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:28.816779Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:28.816787Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:28.816918Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22059 TClient is connected to server localhost:22059 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:29.691262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:29.727587Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:25:29.741191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:29.933921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:30.149318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:30.234868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:31.487697Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010618240838489:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:31.487817Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:31.869655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:31.901652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:31.930662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:31.960087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:31.990303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:32.023698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:32.132038Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010622535806298:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:32.132143Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:32.132367Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010622535806303:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:32.142345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:32.155695Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010622535806305:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:25:32.248981Z node 1 :TX_PROXY ERROR: Actor# [1:7488010622535806362:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:33.132331Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010605355934830:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:33.140264Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 9993, MsgBus: 24851 2025-03-31T16:25:34.794039Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010630050671521:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:34.794121Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f20/r3tmp/tmpyWJNmH/pdisk_1.dat 2025-03-31T16:25:34.938386Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:34.962706Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:34.962784Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:34.964989Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9993, node 2 2025-03-31T16:25:35.022616Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:35.022641Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:35.022649Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:35.022771Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24851 TClient is connected to server localhost:24851 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:35.501952Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:35.509143Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:25:35.521291Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:35.597612Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:25:35.756306Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:25:35.822073Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:38.281407Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010647230542487:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:38.281513Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:38.322657Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:38.360264Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:38.394845Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:38.430566Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:38.472386Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:38.513074Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:38.562927Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010647230543001:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:38.563013Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:38.563267Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010647230543006:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:38.567573Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:38.578753Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488010647230543008:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:25:38.671417Z node 2 :TX_PROXY ERROR: Actor# [2:7488010647230543061:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:39.799195Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488010630050671521:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:39.859405Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> SystemView::TabletsFields [GOOD] >> SystemView::TabletsFollowers >> KqpQueryPerf::IndexUpsert+QueryService-UseSink >> KqpQueryPerf::RangeLimitRead-QueryService >> KqpQueryPerf::Replace-QueryService-UseSink >> DataShardVolatile::DistributedUpsertRestartBeforePrepare-UseSink [GOOD] >> DataShardVolatile::DistributedUpsertRestartAfterPrepare+UseSink >> KqpQueryPerf::IndexDeleteOn-QueryService-UseSink >> KqpQueryPerf::IdxLookupJoin+QueryService >> KqpQueryPerf::KvRead+QueryService >> KqpQueryPerf::ComputeLength+QueryService >> KqpQueryPerf::RangeLimitRead+QueryService >> KqpQueryPerf::Update+QueryService-UseSink >> KqpQueryPerf::UpdateOn+QueryService-UseSink [GOOD] >> KqpQueryPerf::Upsert-QueryService-UseSink >> TColumnShardTestSchema::InternalTTL_Types [GOOD] >> KqpQueryPerf::Upsert+QueryService+UseSink >> KqpQueryPerf::DeleteOn+QueryService-UseSink >> KqpQueryPerf::IndexDeleteOn+QueryService+UseSink [GOOD] >> KqpQueryPerf::IndexReplace+QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn+QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 16409, MsgBus: 25252 2025-03-31T16:25:37.502888Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010640316497250:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:37.503349Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ed1/r3tmp/tmpbqjAaz/pdisk_1.dat 2025-03-31T16:25:37.933825Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:37.962375Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:37.962481Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:37.964190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16409, node 1 2025-03-31T16:25:38.065483Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:38.065506Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:38.065516Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:38.065661Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25252 TClient is connected to server localhost:25252 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:38.691825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:38.713026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:38.834667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:39.020213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:39.113343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:40.935774Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010653201400786:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:40.935900Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:41.282506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:41.319479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:41.364696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:41.402499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:41.428345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:41.467030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:41.566468Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010657496368601:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:41.566552Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:41.566763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010657496368606:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:41.571869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:41.594793Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010657496368608:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:25:41.651416Z node 1 :TX_PROXY ERROR: Actor# [1:7488010657496368662:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:42.502558Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010640316497250:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:42.502643Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> SystemView::AuthGroups_ResultOrder [GOOD] >> SystemView::AuthGroupMembers >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime_Reboot [GOOD] >> SystemView::AuthOwners_Access [GOOD] >> SystemView::AuthOwners_ResultOrder ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexDeleteOn+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 28680, MsgBus: 22115 2025-03-31T16:25:28.130413Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010602080348997:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:28.130868Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f37/r3tmp/tmp1FI8in/pdisk_1.dat 2025-03-31T16:25:28.591148Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:28.591243Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:28.594443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:25:28.595743Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28680, node 1 2025-03-31T16:25:28.816414Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:28.816445Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:28.816456Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:28.816611Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22115 TClient is connected to server localhost:22115 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:29.672971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:29.716205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:29.921346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:30.088191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:30.171438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:31.376636Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010614965252671:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:31.377339Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:31.869367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:31.900269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:31.928349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:31.960015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:31.989897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:32.024089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:32.131599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010619260220482:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:32.131695Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:32.131914Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010619260220487:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:32.140213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:32.159594Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010619260220489:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:25:32.248658Z node 1 :TX_PROXY ERROR: Actor# [1:7488010619260220546:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:33.130453Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010602080348997:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:33.130539Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:25:33.381197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:25:33.423272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:25:33.496596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 6088, MsgBus: 62208 2025-03-31T16:25:36.301910Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010636037648305:2088];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:36.302780Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f37/r3tmp/tmp2ARLaX/pdisk_1.dat 2025-03-31T16:25:36.423842Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:36.436569Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:36.436654Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:36.439441Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6088, node 2 2025-03-31T16:25:36.494905Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:36.494928Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:36.494935Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:36.495047Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62208 TClient is connected to server localhost:62208 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:36.970132Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:36.980173Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:25:36.993586Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:37.056676Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:37.207410Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:37.287743Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:39.688126Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010648922551915:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:39.688210Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:39.730933Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:39.805472Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:39.853766Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:39.894793Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:39.981745Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:40.021582Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:40.079620Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010653217519729:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:40.079700Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:40.079915Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010653217519734:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:40.084457Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:40.097371Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488010653217519736:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:25:40.152938Z node 2 :TX_PROXY ERROR: Actor# [2:7488010653217519789:3442] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:41.167172Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:25:41.222421Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:25:41.273483Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:25:41.304307Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488010636037648305:2088];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:41.304438Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::Insert+QueryService-UseSink [GOOD] >> KqpQueryPerf::Insert+QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexReplace+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 1068, MsgBus: 18010 2025-03-31T16:25:28.649359Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010603455652148:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:28.649927Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f1b/r3tmp/tmpLEl6Bz/pdisk_1.dat 2025-03-31T16:25:29.072613Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:29.100679Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:29.100754Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:29.102360Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1068, node 1 2025-03-31T16:25:29.175872Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:29.175903Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:29.175913Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:29.175995Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18010 TClient is connected to server localhost:18010 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:29.797219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:29.815512Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:25:29.834048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:29.974013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:25:30.159244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:25:30.240132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:31.976485Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010616340555691:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:31.976667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:32.303680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:32.336196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:32.366566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:32.400925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:32.474167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:32.545338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:32.598316Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010620635523506:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:32.598386Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:32.598537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010620635523511:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:32.602491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:32.615778Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010620635523513:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:25:32.710705Z node 1 :TX_PROXY ERROR: Actor# [1:7488010620635523569:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:33.646227Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010603455652148:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:33.646298Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:25:33.782355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:25:33.813509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:25:33.889442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 13497, MsgBus: 10996 2025-03-31T16:25:36.900980Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010636833520803:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:36.901058Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f1b/r3tmp/tmpiVhyYQ/pdisk_1.dat 2025-03-31T16:25:37.013400Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:37.046721Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:37.046798Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:37.048570Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13497, node 2 2025-03-31T16:25:37.093655Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:37.093684Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:37.093691Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:37.093804Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10996 TClient is connected to server localhost:10996 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:37.593315Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:37.600625Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:25:37.614871Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:37.698519Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:37.876098Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:37.973986Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:40.075929Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010654013391760:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:40.076019Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:40.122492Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:40.162444Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:40.204616Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:40.249850Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:40.320112Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:40.393741Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:40.469403Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010654013392278:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:40.469497Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:40.469527Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010654013392283:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:40.472980Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:40.502196Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488010654013392285:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:25:40.566224Z node 2 :TX_PROXY ERROR: Actor# [2:7488010654013392341:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:41.556046Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:25:41.643413Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:25:41.740501Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:25:41.901414Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488010636833520803:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:41.901480Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::InternalTTL_Types [GOOD] Test command err: 2025-03-31T16:23:59.803636Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:23:59.898634Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:23:59.903545Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:23:59.903999Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:23:59.930450Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:23:59.930983Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:23:59.939956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:23:59.940198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:23:59.940492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:23:59.940625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:23:59.940753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:23:59.940878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:23:59.941004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:23:59.941147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:23:59.941313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:23:59.941431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:23:59.941569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:23:59.941690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:23:59.968633Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:23:59.972299Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:23:59.972485Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:23:59.972546Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:23:59.972727Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:59.972891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:23:59.973017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:23:59.973068Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:23:59.973216Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:23:59.973304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:23:59.973357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:23:59.973398Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:23:59.973604Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:59.973690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:23:59.973779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:23:59.973817Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:23:59.973930Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:23:59.973993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:23:59.974059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:23:59.974100Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:23:59.974182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:23:59.974225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:23:59.974257Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:23:59.974314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:23:59.974361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:23:59.974395Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:23:59.974873Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=60; 2025-03-31T16:23:59.974975Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=38; 2025-03-31T16:23:59.975059Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=38; 2025-03-31T16:23:59.975207Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=41; 2025-03-31T16:23:59.975455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:23:59.975522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:23:59.975559Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:23:59.975759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:23:59.975810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:23:59.975842Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:23:59.976103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:23:59.976161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:23:59.976196Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:23:59.976413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:23:59.976465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:23:59.976500Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T1 ... =(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:25:42.742225Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:1;records_count:71;schema=saved_at: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:25:42.742274Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-31T16:25:42.742324Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-31T16:25:42.742477Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:25:42.742606Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:1;records_count:71;schema=saved_at: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:25:42.742655Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-31T16:25:42.742774Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:232;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;);columns=1;rows=71; 2025-03-31T16:25:42.742844Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:252;stage=data_format;batch_size=568;num_rows=71;batch_columns=saved_at; 2025-03-31T16:25:42.743004Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[4:593:2609];bytes=568;rows=71;faults=0;finished=0;fault=0;schema=saved_at: uint64; 2025-03-31T16:25:42.743127Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:25:42.743241Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:25:42.743356Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:25:42.743487Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:25:42.743578Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:25:42.743671Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:25:42.743711Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: Scan [4:600:2616] finished for tablet 9437184 2025-03-31T16:25:42.744236Z node 4 :TX_COLUMNSHARD_SCAN INFO: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[4:593:2609];stats={"p":[{"events":["f_bootstrap"],"t":0.08},{"events":["f_ProduceResults"],"t":0.568},{"events":["l_bootstrap"],"t":0.848},{"events":["f_processing","f_task_result"],"t":0.892},{"events":["l_task_result"],"t":8.881},{"events":["f_ack"],"t":8.925},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":10.049}],"full":{"a":1743438332694162,"name":"_full_task","f":1743438332694162,"d_finished":0,"c":0,"l":1743438342743770,"d":10049608},"events":[{"name":"bootstrap","f":1743438332774608,"d_finished":767589,"c":1,"l":1743438333542197,"d":767589},{"a":1743438342743465,"name":"ack","f":1743438341619499,"d_finished":1043471,"c":903,"l":1743438342743383,"d":1043776},{"a":1743438342743452,"name":"processing","f":1743438333586672,"d_finished":4254498,"c":4515,"l":1743438342743386,"d":4254816},{"name":"ProduceResults","f":1743438333263031,"d_finished":1828044,"c":5420,"l":1743438342743694,"d":1828044},{"a":1743438342743696,"name":"Finish","f":1743438342743696,"d_finished":0,"c":0,"l":1743438342743770,"d":74},{"name":"task_result","f":1743438333586702,"d_finished":3116355,"c":3612,"l":1743438341575625,"d":3116355}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:25:42.744340Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[4:593:2609];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:25:42.744911Z node 4 :TX_COLUMNSHARD_SCAN INFO: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[4:593:2609];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0.08},{"events":["f_ProduceResults"],"t":0.568},{"events":["l_bootstrap"],"t":0.848},{"events":["f_processing","f_task_result"],"t":0.892},{"events":["l_task_result"],"t":8.881},{"events":["f_ack"],"t":8.925},{"events":["l_ProduceResults","f_Finish"],"t":10.049},{"events":["l_ack","l_processing","l_Finish"],"t":10.05}],"full":{"a":1743438332694162,"name":"_full_task","f":1743438332694162,"d_finished":0,"c":0,"l":1743438342744394,"d":10050232},"events":[{"name":"bootstrap","f":1743438332774608,"d_finished":767589,"c":1,"l":1743438333542197,"d":767589},{"a":1743438342743465,"name":"ack","f":1743438341619499,"d_finished":1043471,"c":903,"l":1743438342743383,"d":1044400},{"a":1743438342743452,"name":"processing","f":1743438333586672,"d_finished":4254498,"c":4515,"l":1743438342743386,"d":4255440},{"name":"ProduceResults","f":1743438333263031,"d_finished":1828044,"c":5420,"l":1743438342743694,"d":1828044},{"a":1743438342743696,"name":"Finish","f":1743438342743696,"d_finished":0,"c":0,"l":1743438342744394,"d":698},{"name":"task_result","f":1743438333586702,"d_finished":3116355,"c":3612,"l":1743438341575625,"d":3116355}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-31T16:25:42.745018Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:25:32.613779Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=903;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=7037528;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7037528;selected_rows=0; 2025-03-31T16:25:42.745061Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:25:42.745337Z node 4 :TX_COLUMNSHARD_SCAN INFO: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> KqpQueryPerf::IndexUpdateOn+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexUpdateOn+QueryService+UseSink >> KqpQueryPerf::Replace+QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime_Reboot [GOOD] Test command err: 2025-03-31T16:21:35.256848Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:21:35.357910Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:21:35.386101Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:21:35.386390Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:21:35.395518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:21:35.395757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:21:35.396022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:21:35.396178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:21:35.396309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:21:35.396431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:21:35.396636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:21:35.396773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:21:35.396899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:21:35.397036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:21:35.397152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:21:35.397267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:21:35.429381Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:21:35.429553Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:21:35.429611Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:21:35.429815Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:35.429973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:21:35.430049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:21:35.430099Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:21:35.430205Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:21:35.430269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:21:35.430334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:21:35.430375Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:21:35.430622Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:35.430721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:21:35.430772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:21:35.430807Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:21:35.430930Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:21:35.430996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:21:35.431039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:21:35.431069Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:21:35.431167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:21:35.431214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:21:35.431254Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:21:35.431336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:21:35.431388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:21:35.431419Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:21:35.431878Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=55; 2025-03-31T16:21:35.431992Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=56; 2025-03-31T16:21:35.432095Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=45; 2025-03-31T16:21:35.432196Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=55; 2025-03-31T16:21:35.432374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:21:35.432472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:21:35.432512Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:21:35.432726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:21:35.432781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:21:35.432814Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:21:35.433008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:21:35.433063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:21:35.433101Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:21:35.433343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:21:35.433392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:21:35.433428Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:21:35.433555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:21:35.433616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:21:35.433677Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:8568];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:74;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:75;blob_range:[NO_BLOB:0:8552];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:8488];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:54;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:55;blob_range:[NO_BLOB:0:8472];;column_id:1;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:16;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:17;blob_range:[NO_BLOB:0:8248];;column_id:1;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:25;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:33;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:35;blob_range:[NO_BLOB:0:8240];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:8248];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:35;blob_range:[NO_BLOB:0:8240];;;;switched=(portion_id:55;path_id:1;records_count:25666;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2117976;index_size:24;meta:((produced=INSERTED;)););(portion_id:54;path_id:1;records_count:25666;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2549080;index_size:24;meta:((produced=SPLIT_COMPACTED;)););; 2025-03-31T16:25:45.027665Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:11195:12822];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=2; 2025-03-31T16:25:45.029417Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:11195:12822];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> CdcStreamChangeCollector::OldImage [GOOD] >> SystemView::TabletsFollowers [GOOD] >> SystemView::TabletsRanges >> SystemView::DescribeAccessDenied [GOOD] >> SystemView::CollectPreparedQueries >> TColumnShardTestSchema::RebootEnableColdTiersAfterNoEviction [GOOD] >> KqpQueryPerf::DeleteOn-QueryService-UseSink >> KqpQueryPerf::Upsert-QueryService+UseSink >> KqpQueryPerf::Delete-QueryService-UseSink >> TColumnShardTestSchema::RebootEnableColdTiersAfterTtl [GOOD] >> KqpQueryPerf::RangeLimitRead-QueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::OldImage [GOOD] Test command err: 2025-03-31T16:25:22.801693Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:25:22.801968Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:25:22.802162Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00189a/r3tmp/tmpTmwjY9/pdisk_1.dat 2025-03-31T16:25:23.201724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:25:23.238791Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:23.241919Z node 1 :TABLET_SAUSAGECACHE NOTICE: Update config MemoryLimit: 33554432 2025-03-31T16:25:23.277159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:23.277336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:23.288965Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:25:23.370724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:25:23.413542Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:25:23.413802Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:25:23.466973Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:25:23.467137Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:25:23.468985Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:25:23.469080Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:25:23.469149Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:25:23.469603Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:25:23.469742Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:25:23.469833Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:25:23.480648Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:25:23.511147Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:25:23.511379Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:25:23.511515Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:25:23.511564Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:25:23.511600Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:25:23.511656Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:23.512098Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:25:23.512214Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:25:23.512318Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:23.512371Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:23.512442Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:25:23.512487Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:23.512584Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:25:23.512744Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:25:23.513012Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:25:23.513136Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:25:23.515298Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:23.525980Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:25:23.526067Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:25:23.674719Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T16:25:23.678267Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:25:23.678345Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:23.678809Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:23.678850Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:25:23.678908Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T16:25:23.679129Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:25:23.679266Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:25:23.679678Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:23.679770Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:25:23.681183Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:25:23.681502Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:23.682968Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-31T16:25:23.683028Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:23.683750Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-31T16:25:23.683838Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:23.684544Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:23.684575Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:25:23.684615Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:25:23.684663Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:25:23.684706Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T16:25:23.684797Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:23.687778Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:23.689308Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T16:25:23.689490Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-31T16:25:23.689562Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:25:23.698863Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:25:23.698980Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-03-31T16:25:23.699035Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-03-31T16:25:23.699097Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-03-31T16:25:23.699956Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:23.723497Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:25:23.952395Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:25:23.952476Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:23.952674Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:23.952718Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:25:23.952765Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-03-31T16:25:23.952951Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-03-31T16:25:23.953082Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:25:23.953384Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:23.954171Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:24.016062Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-03-31T16:25:24.016196Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:24.016250Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:24.016295Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tab ... DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:45.970874Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:25:45.970929Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T16:25:45.971531Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:25:45.971703Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:25:45.972431Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:45.972520Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:25:45.973047Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:25:45.973534Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:45.976042Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-31T16:25:45.976123Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:45.977223Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-31T16:25:45.977324Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:45.978444Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:45.978496Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:25:45.978562Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:25:45.978638Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:25:45.978735Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T16:25:45.978861Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:45.980281Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:45.982767Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T16:25:45.983053Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-31T16:25:45.983169Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:25:45.998627Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:25:45.998846Z node 4 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-03-31T16:25:45.998906Z node 4 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-03-31T16:25:45.998948Z node 4 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-03-31T16:25:46.000536Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:46.025992Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:25:46.309682Z node 4 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:25:46.309749Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:46.309980Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:46.310018Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:25:46.310065Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-03-31T16:25:46.310268Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-03-31T16:25:46.310390Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:25:46.310688Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:25:46.311558Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:25:46.350740Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-03-31T16:25:46.350879Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:46.350924Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:25:46.350980Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:46.351101Z node 4 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:25:46.351179Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-03-31T16:25:46.351282Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:46.353473Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-03-31T16:25:46.353550Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:25:46.368357Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:887:2724], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:46.368473Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:897:2729], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:46.368560Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:46.375463Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:25:46.382817Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:46.551872Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:25:46.554853Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:901:2732], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:25:46.595100Z node 4 :TX_PROXY ERROR: Actor# [4:957:2769] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:46.671142Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpgby3ybgzje470gvj8cja9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=Y2JmYzY3NTgtZTcyZjg0YjMtN2ViOTIyY2MtYjVhYzFkN2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:25:46.674454Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:988:2786], serverId# [4:989:2787], sessionId# [0:0:0] 2025-03-31T16:25:46.674963Z node 4 :TX_DATASHARD DEBUG: Executing write operation for [0:3] at 72075186224037888 2025-03-31T16:25:46.675276Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1743438346675147 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-03-31T16:25:46.675467Z node 4 :TX_DATASHARD DEBUG: Executed write operation for [0:3] at 72075186224037888, row count=1 2025-03-31T16:25:46.687955Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 18 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-03-31T16:25:46.688062Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:46.774899Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpgbye64z4q1rxn1wtw607t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NDU4NjgxNjItNmFmNjNlOTQtYmU0YTg2MTAtZTYxYTUyZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:25:46.777251Z node 4 :TX_DATASHARD DEBUG: Executing write operation for [0:4] at 72075186224037888 2025-03-31T16:25:46.777610Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1743438346777484 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 40b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-03-31T16:25:46.777793Z node 4 :TX_DATASHARD DEBUG: Executed write operation for [0:4] at 72075186224037888, row count=1 2025-03-31T16:25:46.788988Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 40 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-03-31T16:25:46.789068Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:25:46.791837Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:1016:2805], serverId# [4:1017:2806], sessionId# [0:0:0] 2025-03-31T16:25:46.798750Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:1018:2807], serverId# [4:1019:2808], sessionId# [0:0:0] >> KqpQueryPerf::Replace-QueryService-UseSink [GOOD] >> KqpQueryPerf::IdxLookupJoinThreeWay+QueryService >> KqpQueryPerf::KvRead+QueryService [GOOD] >> KqpQueryPerf::KvRead-QueryService |92.0%| [TA] $(B)/ydb/core/tx/datashard/ut_change_collector/test-results/unittest/{meta.json ... results_accumulator.log} |92.0%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_collector/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootEnableColdTiersAfterNoEviction [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143438857.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143438857.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143438857.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123438857.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143438857.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143438857.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123437657.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123438857.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123438857.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123437657.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123437657.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123437657.000000s;Name=;Codec=}; 2025-03-31T16:24:17.911682Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:24:17.995837Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:24:18.011941Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:24:18.012184Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:24:18.019434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:24:18.019611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:24:18.019815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:24:18.019908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:24:18.020022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:24:18.020116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:24:18.020184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:24:18.020252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:24:18.020336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:24:18.020422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:24:18.020489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:24:18.020560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:24:18.039399Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:24:18.039536Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:24:18.039596Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:24:18.039723Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:24:18.039853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:24:18.039910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:24:18.039943Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:24:18.040006Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:24:18.040051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:24:18.040079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:24:18.040100Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:24:18.040231Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:24:18.040286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:24:18.040318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:24:18.040340Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:24:18.040411Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:24:18.040452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:24:18.040483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:24:18.040502Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:24:18.040553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:24:18.040578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:24:18.040598Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:24:18.040633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:24:18.040660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:24:18.040684Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:24:18.040994Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=34; 2025-03-31T16:24:18.041062Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=29; 2025-03-31T16:24:18.041131Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=31; 2025-03-31T16:24:18.041242Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=67; 2025-03-31T16:24:18.041455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:24:18.041513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:24:18.041546Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:24:18.041701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:24:18.041739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:24:18.041760Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TT ... pp:29;EXECUTE:finishLoadingTime=563; 2025-03-31T16:25:47.132887Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=61629; 2025-03-31T16:25:47.145528Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=12546; 2025-03-31T16:25:47.159311Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=12744; 2025-03-31T16:25:47.159423Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=13760; 2025-03-31T16:25:47.159616Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=117; 2025-03-31T16:25:47.159742Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=70; 2025-03-31T16:25:47.159910Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=115; 2025-03-31T16:25:47.160039Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=76; 2025-03-31T16:25:47.177062Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=16934; 2025-03-31T16:25:47.199514Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=22313; 2025-03-31T16:25:47.199775Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=45; 2025-03-31T16:25:47.199868Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=38; 2025-03-31T16:25:47.199923Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=9; 2025-03-31T16:25:47.199975Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=10; 2025-03-31T16:25:47.200029Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=8; 2025-03-31T16:25:47.200133Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=55; 2025-03-31T16:25:47.200189Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2025-03-31T16:25:47.200292Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=60; 2025-03-31T16:25:47.200345Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=8; 2025-03-31T16:25:47.200435Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=46; 2025-03-31T16:25:47.200558Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=67; 2025-03-31T16:25:47.200951Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=349; 2025-03-31T16:25:47.201081Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=143057; 2025-03-31T16:25:47.201252Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=46800024;raw_bytes=72380025;count=29;records=720000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-31T16:25:47.201381Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-31T16:25:47.201444Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-31T16:25:47.201515Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-31T16:25:47.221793Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-03-31T16:25:47.221981Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:25:47.222048Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:25:47.222126Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-03-31T16:25:47.222193Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-31T16:25:47.222242Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:25:47.222293Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:25:47.222338Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:25:47.222447Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:25:47.223026Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:25:47.223144Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:2654:4528];tablet_id=9437184;parent=[1:2612:4493];fline=manager.cpp:82;event=ask_data;request=request_id=155;1={portions_count=29};; 2025-03-31T16:25:47.224178Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-31T16:25:47.224574Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-31T16:25:47.224608Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-31T16:25:47.224632Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-31T16:25:47.224677Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:25:47.224740Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:25:47.224800Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-03-31T16:25:47.224863Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-31T16:25:47.224912Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:25:47.224964Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:25:47.225005Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:25:47.225110Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:25:47.228144Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=29;path_id=1; 2025-03-31T16:25:47.229920Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 160000/10402096 80000/5203544 0/0 >> KqpQueryPerf::IndexInsert+QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootEnableColdTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=143438858.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143438858.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143438858.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143438858.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123438858.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143438858.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143438858.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123437658.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123438858.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123438858.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123437658.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123437658.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123437658.000000s;Name=;Codec=}; 2025-03-31T16:24:18.406199Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:24:18.499879Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:24:18.516902Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:24:18.517153Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:24:18.524094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:24:18.524313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:24:18.524567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:24:18.524710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:24:18.524835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:24:18.524941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:24:18.525041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:24:18.525153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:24:18.525303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:24:18.525470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:24:18.525580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:24:18.525680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:24:18.553228Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:24:18.553410Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:24:18.553499Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:24:18.553681Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:24:18.553856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:24:18.553930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:24:18.553976Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:24:18.554067Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:24:18.554130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:24:18.554173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:24:18.554206Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:24:18.554411Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:24:18.554495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:24:18.554538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:24:18.554574Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:24:18.554683Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:24:18.554736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:24:18.554780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:24:18.554810Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:24:18.554882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:24:18.554920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:24:18.554973Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:24:18.555025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:24:18.555064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:24:18.555122Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:24:18.555548Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=49; 2025-03-31T16:24:18.555640Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=37; 2025-03-31T16:24:18.555724Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=37; 2025-03-31T16:24:18.555820Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=53; 2025-03-31T16:24:18.556059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:24:18.556142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:24:18.556191Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:24:18.556406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:24:18.556456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:24:18.556495Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;pr ... .cpp:29;EXECUTE:finishLoadingTime=635; 2025-03-31T16:25:47.560341Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=59410; 2025-03-31T16:25:47.573831Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=13381; 2025-03-31T16:25:47.588038Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=13101; 2025-03-31T16:25:47.588177Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=14222; 2025-03-31T16:25:47.588376Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=124; 2025-03-31T16:25:47.588524Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=94; 2025-03-31T16:25:47.588680Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=95; 2025-03-31T16:25:47.588815Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=87; 2025-03-31T16:25:47.604061Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=15154; 2025-03-31T16:25:47.619620Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=15414; 2025-03-31T16:25:47.619750Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=33; 2025-03-31T16:25:47.619807Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=21; 2025-03-31T16:25:47.619843Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=5; 2025-03-31T16:25:47.619890Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=8; 2025-03-31T16:25:47.619934Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=8; 2025-03-31T16:25:47.619997Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=35; 2025-03-31T16:25:47.620032Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2025-03-31T16:25:47.620159Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=95; 2025-03-31T16:25:47.620203Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2025-03-31T16:25:47.620265Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=31; 2025-03-31T16:25:47.620345Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=44; 2025-03-31T16:25:47.620736Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=353; 2025-03-31T16:25:47.620785Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=126863; 2025-03-31T16:25:47.620968Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=46800024;raw_bytes=72380025;count=29;records=720000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-31T16:25:47.621100Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-31T16:25:47.621159Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-31T16:25:47.621226Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-31T16:25:47.640003Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-03-31T16:25:47.640251Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:25:47.640347Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:25:47.640432Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-03-31T16:25:47.640503Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-31T16:25:47.640549Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:25:47.640600Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:25:47.640642Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:25:47.640740Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:25:47.641281Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:25:47.641367Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:2625:4499];tablet_id=9437184;parent=[1:2585:4466];fline=manager.cpp:82;event=ask_data;request=request_id=151;1={portions_count=29};; 2025-03-31T16:25:47.642486Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-31T16:25:47.642887Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-31T16:25:47.642927Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-31T16:25:47.642954Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-31T16:25:47.643001Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:25:47.643068Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:25:47.643158Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-03-31T16:25:47.643228Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-31T16:25:47.643270Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:25:47.643326Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:25:47.643368Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:25:47.643472Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:25:47.645475Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=29;path_id=1; 2025-03-31T16:25:47.647099Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 160000/10402096 160000/10402096 160000/10402096 80000/5203544 0/0 >> KqpQueryPerf::ComputeLength+QueryService [GOOD] >> KqpQueryPerf::ComputeLength-QueryService >> KqpQueryPerf::RangeLimitRead+QueryService [GOOD] >> KqpQueryPerf::Upsert-QueryService-UseSink [GOOD] >> KqpQueryPerf::IdxLookupJoin+QueryService [GOOD] >> KqpQueryPerf::IdxLookupJoin-QueryService >> KqpQueryPerf::Update+QueryService-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeLimitRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 9454, MsgBus: 19255 2025-03-31T16:25:42.679931Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010665361388281:2078];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:42.684586Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000eaf/r3tmp/tmpL5dFxb/pdisk_1.dat 2025-03-31T16:25:43.088246Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:43.095059Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:43.095161Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:43.098005Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9454, node 1 2025-03-31T16:25:43.226050Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:43.226070Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:43.226090Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:43.226205Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19255 TClient is connected to server localhost:19255 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:43.963605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:43.999337Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:25:44.013260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:44.199318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:44.399764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:44.477057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:46.003038Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010682541259215:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:46.003160Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:46.300781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:46.330268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:46.360750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:46.395399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:46.427027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:46.461775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:46.547454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010682541259730:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:46.547543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:46.547601Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010682541259735:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:46.551175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:46.564598Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010682541259737:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:25:46.639869Z node 1 :TX_PROXY ERROR: Actor# [1:7488010682541259792:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:47.678244Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010665361388281:2078];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:47.678308Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TColumnShardTestSchema::EnableColdTiersAfterTtl [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 3514, MsgBus: 10114 2025-03-31T16:25:42.930158Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010665535857676:2259];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:42.930230Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000eae/r3tmp/tmphVlILi/pdisk_1.dat TServer::EnableGrpc on GrpcPort 3514, node 1 2025-03-31T16:25:43.309050Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:25:43.309268Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:25:43.333340Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:43.334592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:43.368043Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:25:43.379878Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:43.398807Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:43.398838Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:43.398845Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:43.398992Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10114 TClient is connected to server localhost:10114 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:43.928578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:43.963850Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:25:43.981703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:44.166446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:44.364552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:25:44.461703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:25:46.111693Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010682715728458:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:46.111853Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:46.389505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:46.420121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:46.449295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:46.483842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:46.514160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:46.547892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:46.603546Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010682715728970:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:46.603650Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:46.603877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010682715728975:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:46.611609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:46.623881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010682715728977:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:25:46.699435Z node 1 :TX_PROXY ERROR: Actor# [1:7488010682715729031:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:47.923192Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010665535857676:2259];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:47.952407Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::Upsert+QueryService+UseSink [GOOD] >> KqpQueryPerf::DeleteOn+QueryService-UseSink [GOOD] >> KqpQueryPerf::DeleteOn+QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexInsert+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 9900, MsgBus: 25527 2025-03-31T16:25:33.394991Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010626890205692:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:33.395100Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f05/r3tmp/tmprylF45/pdisk_1.dat 2025-03-31T16:25:33.723441Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:33.728496Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:33.728607Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:33.731042Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9900, node 1 2025-03-31T16:25:33.800792Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:33.800828Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:33.800836Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:33.800970Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25527 TClient is connected to server localhost:25527 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:34.320204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:34.351330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:34.509708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:34.694457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:34.783215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:36.490777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010639775109355:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:36.490912Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:36.821316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:36.851482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:36.893048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:36.928808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:36.959475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:37.032406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:37.089165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010644070077164:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:37.089227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:37.089467Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010644070077169:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:37.093154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:37.107507Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010644070077171:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:25:37.191302Z node 1 :TX_PROXY ERROR: Actor# [1:7488010644070077225:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:38.306160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:25:38.348094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:25:38.396146Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010626890205692:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:38.396209Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:25:38.458013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 21186, MsgBus: 5767 2025-03-31T16:25:41.541209Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010657693189317:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:41.541266Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f05/r3tmp/tmphKtwJp/pdisk_1.dat 2025-03-31T16:25:41.679445Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:41.713383Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:41.713482Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:41.715258Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21186, node 2 2025-03-31T16:25:41.781174Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:41.781204Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:41.781211Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:41.781338Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5767 TClient is connected to server localhost:5767 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:42.206475Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:42.213536Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:25:42.221757Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:42.280246Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:25:42.463640Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:25:42.539790Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:44.977074Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010670578092985:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:44.977197Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:45.027143Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:45.084665Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:45.122401Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:45.189320Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:45.230123Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:45.268108Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:45.328440Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010674873060797:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:45.328516Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:45.328680Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010674873060802:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:45.332879Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:45.349844Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488010674873060804:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:25:45.420791Z node 2 :TX_PROXY ERROR: Actor# [2:7488010674873060858:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:46.284250Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:25:46.327024Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:25:46.365687Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:25:46.542084Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488010657693189317:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:46.542173Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeLimitRead+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 13209, MsgBus: 14991 2025-03-31T16:25:43.456266Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010668588185520:2134];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:43.475665Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000e97/r3tmp/tmp4A3rdn/pdisk_1.dat 2025-03-31T16:25:43.967563Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:43.972386Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:43.972489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:43.976619Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13209, node 1 2025-03-31T16:25:44.081118Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:44.081154Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:44.081166Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:44.081319Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14991 TClient is connected to server localhost:14991 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:44.650651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:44.690252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:44.844828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:45.038884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:45.118885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:46.769397Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010681473089099:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:46.769550Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:47.192137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:47.241276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:47.314388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:47.385102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:47.423775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:47.468180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:47.544963Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010685768056917:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:47.545072Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:47.545187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010685768056922:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:47.549250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:47.559066Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010685768056924:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:25:47.646888Z node 1 :TX_PROXY ERROR: Actor# [1:7488010685768056981:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:48.456265Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010668588185520:2134];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:48.456365Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 28610, MsgBus: 16949 2025-03-31T16:25:43.916130Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010668678280575:2219];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000e8c/r3tmp/tmpV1E6KX/pdisk_1.dat 2025-03-31T16:25:44.161193Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:25:44.345353Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:44.345506Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:44.349409Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:25:44.369822Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28610, node 1 2025-03-31T16:25:44.486110Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:44.486139Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:44.486162Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:44.486308Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16949 TClient is connected to server localhost:16949 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:45.048681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:45.077911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:25:45.240802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:25:45.375865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:25:45.435583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:25:47.053098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010685858151352:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:47.053213Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:47.397547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:47.429550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:47.466620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:47.496857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:47.530156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:47.572178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:47.658819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010685858151870:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:47.658971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:47.659411Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010685858151875:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:47.662319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:47.672542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010685858151878:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:25:47.779072Z node 1 :TX_PROXY ERROR: Actor# [1:7488010685858151933:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:48.903288Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010668678280575:2219];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:48.903351Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::Insert-QueryService-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update+QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 24844, MsgBus: 25339 2025-03-31T16:25:43.573166Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010668696174714:2081];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:43.575387Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000e96/r3tmp/tmpXQRHVi/pdisk_1.dat 2025-03-31T16:25:44.116505Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:44.144609Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:44.144720Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:44.148360Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24844, node 1 2025-03-31T16:25:44.298180Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:44.298206Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:44.298215Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:44.298328Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25339 TClient is connected to server localhost:25339 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:44.876845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:44.899950Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:25:44.909254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:45.057943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:25:45.207925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:45.279261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:25:46.965450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010681581078338:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:46.965609Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:47.285215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:47.324271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:47.364365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:47.400355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:47.441613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:47.477278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:47.549576Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010685876046146:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:47.549703Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010685876046151:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:47.549731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:47.554286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:47.569010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010685876046153:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:25:47.641342Z node 1 :TX_PROXY ERROR: Actor# [1:7488010685876046208:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:48.572808Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010668696174714:2081];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:48.572883Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::EnableColdTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=143438880.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143438880.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143438880.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143438880.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123438880.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143438880.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143438880.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123437680.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123438880.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123438880.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123437680.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123437680.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123437680.000000s;Name=;Codec=}; 2025-03-31T16:24:40.549477Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:24:40.636623Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:24:40.651631Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:24:40.651896Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:24:40.658309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:24:40.658473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:24:40.658675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:24:40.658780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:24:40.658864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:24:40.658944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:24:40.659010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:24:40.659108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:24:40.659184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:24:40.659287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:24:40.659367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:24:40.659434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:24:40.677934Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:24:40.678064Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:24:40.678125Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:24:40.678269Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:24:40.678403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:24:40.678472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:24:40.678504Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:24:40.678568Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:24:40.678631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:24:40.678660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:24:40.678682Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:24:40.678809Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:24:40.678864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:24:40.678892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:24:40.678914Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:24:40.678983Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:24:40.679031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:24:40.679070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:24:40.679113Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:24:40.679163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:24:40.679189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:24:40.679208Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:24:40.679241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:24:40.679272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:24:40.679296Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:24:40.679613Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=33; 2025-03-31T16:24:40.679678Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=28; 2025-03-31T16:24:40.679742Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=31; 2025-03-31T16:24:40.679837Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=56; 2025-03-31T16:24:40.680016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:24:40.680080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:24:40.680116Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:24:40.680291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:24:40.680337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:24:40.680364Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;pr ... d=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-03-31T16:25:49.586107Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-31T16:25:49.586169Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:25:49.586235Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:25:49.586304Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:25:49.586587Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:25:49.586951Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000009:max} readable: {1000000009:max} at tablet 9437184 2025-03-31T16:25:49.587128Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-03-31T16:25:49.587331Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-31T16:25:49.587410Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-31T16:25:49.587953Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-03-31T16:25:49.588066Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-03-31T16:25:49.588707Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:1970:3975];trace_detailed=; 2025-03-31T16:25:49.589235Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-03-31T16:25:49.589525Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-03-31T16:25:49.589771Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:25:49.589926Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:25:49.590475Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:25:49.590617Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:25:49.590780Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:25:49.590838Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1970:3975] finished for tablet 9437184 2025-03-31T16:25:49.591446Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:1969:3974];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":1743438349588618,"name":"_full_task","f":1743438349588618,"d_finished":0,"c":0,"l":1743438349590917,"d":2299},"events":[{"name":"bootstrap","f":1743438349588874,"d_finished":1093,"c":1,"l":1743438349589967,"d":1093},{"a":1743438349590440,"name":"ack","f":1743438349590440,"d_finished":0,"c":0,"l":1743438349590917,"d":477},{"a":1743438349590413,"name":"processing","f":1743438349590413,"d_finished":0,"c":0,"l":1743438349590917,"d":504},{"name":"ProduceResults","f":1743438349589667,"d_finished":595,"c":2,"l":1743438349590814,"d":595},{"a":1743438349590818,"name":"Finish","f":1743438349590818,"d_finished":0,"c":0,"l":1743438349590917,"d":99}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:25:49.591554Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:1969:3974];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:25:49.592061Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:1969:3974];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":1743438349588618,"name":"_full_task","f":1743438349588618,"d_finished":0,"c":0,"l":1743438349591615,"d":2997},"events":[{"name":"bootstrap","f":1743438349588874,"d_finished":1093,"c":1,"l":1743438349589967,"d":1093},{"a":1743438349590440,"name":"ack","f":1743438349590440,"d_finished":0,"c":0,"l":1743438349591615,"d":1175},{"a":1743438349590413,"name":"processing","f":1743438349590413,"d_finished":0,"c":0,"l":1743438349591615,"d":1202},{"name":"ProduceResults","f":1743438349589667,"d_finished":595,"c":2,"l":1743438349590814,"d":595},{"a":1743438349590818,"name":"Finish","f":1743438349590818,"d_finished":0,"c":0,"l":1743438349591615,"d":797}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1970:3975]->[1:1969:3974] 2025-03-31T16:25:49.592184Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:25:49.588030Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-03-31T16:25:49.592235Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:25:49.592382Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 160000/10402096 160000/10402096 160000/10402096 80000/5203544 0/0 >> KqpQueryPerf::IndexDeleteOn-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexDeleteOn-QueryService+UseSink >> KqpQueryPerf::Insert+QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 10295, MsgBus: 20705 2025-03-31T16:25:44.138180Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010673225713953:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:44.144016Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000e89/r3tmp/tmpkGZXpG/pdisk_1.dat 2025-03-31T16:25:44.553765Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:44.572833Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:44.572968Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:44.574475Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10295, node 1 2025-03-31T16:25:44.665803Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:44.665856Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:44.665870Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:44.665999Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20705 TClient is connected to server localhost:20705 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:45.237334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:45.257289Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:25:45.274781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:45.414706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:45.541904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:45.610111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:47.430867Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010686110617628:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:47.430959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:47.748842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:47.819717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:47.890682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:47.917841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:47.947072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:48.008591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:48.127269Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010690405585445:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:48.127352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:48.129261Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010690405585450:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:48.134442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:48.145850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010690405585452:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:25:48.251735Z node 1 :TX_PROXY ERROR: Actor# [1:7488010690405585509:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:49.135371Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010673225713953:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:49.135470Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::IndexUpsert+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexUpsert+QueryService+UseSink >> DataShardVolatile::DistributedUpsertRestartAfterPrepare+UseSink [GOOD] >> DataShardVolatile::DistributedUpsertRestartAfterPrepare-UseSink >> TColumnShardTestSchema::HotTiersRevCompression [GOOD] |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> KqpQueryPerf::UpdateOn+QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Insert+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 10460, MsgBus: 10713 2025-03-31T16:25:39.737788Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010649881379086:2129];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:39.737852Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000eba/r3tmp/tmpUnMena/pdisk_1.dat 2025-03-31T16:25:40.176987Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:40.211869Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:40.211984Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:40.213654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10460, node 1 2025-03-31T16:25:40.327316Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:40.327339Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:40.327351Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:40.327495Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10713 TClient is connected to server localhost:10713 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:40.991037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:41.060627Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:25:41.082438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:25:41.224957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:25:41.416717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:41.511650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:43.377901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010667061249978:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:43.378048Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:43.724447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:43.770406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:43.805682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:43.834691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:43.885536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:43.957812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:44.017972Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010671356217789:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:44.018051Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:44.018230Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010671356217794:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:44.021599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:44.031861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010671356217796:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:25:44.108521Z node 1 :TX_PROXY ERROR: Actor# [1:7488010671356217849:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:44.738290Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010649881379086:2129];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:44.738351Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 62852, MsgBus: 26835 2025-03-31T16:25:46.219692Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010679893225252:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:46.219805Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000eba/r3tmp/tmpTnq01q/pdisk_1.dat 2025-03-31T16:25:46.339136Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:46.368481Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:46.368572Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:46.373953Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62852, node 2 2025-03-31T16:25:46.429628Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:46.429649Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:46.429657Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:46.429764Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26835 TClient is connected to server localhost:26835 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:46.829178Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:46.838568Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:25:46.853513Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:46.940172Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:47.085579Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:47.151685Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:49.280464Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010692778128893:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:49.280572Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:49.325997Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:49.358241Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:49.423209Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:49.449644Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:49.487226Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:49.529084Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:49.590448Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010692778129407:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:49.590582Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:49.590916Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010692778129412:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:49.594317Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:49.605131Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488010692778129414:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:25:49.669819Z node 2 :TX_PROXY ERROR: Actor# [2:7488010692778129467:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryPerf::Replace+QueryService+UseSink [GOOD] >> KqpIndexLookupJoin::CheckCastUint32ToUint16+StreamLookupJoin-NotNull >> KqpJoin::JoinWithDuplicates ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersRevCompression [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143438879.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143438879.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143438879.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123438879.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143438879.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143438879.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123437679.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123438879.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123438879.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123437679.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123437679.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123437679.000000s;Name=;Codec=}; 2025-03-31T16:24:39.768304Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:24:39.847970Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:24:39.866849Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:24:39.867126Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:24:39.873696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:24:39.873872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:24:39.874095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:24:39.874242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:24:39.874390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:24:39.874547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:24:39.874667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:24:39.874794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:24:39.874921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:24:39.875111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:24:39.875251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:24:39.875367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:24:39.896216Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:24:39.896353Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:24:39.896418Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:24:39.896549Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:24:39.896705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:24:39.896773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:24:39.896810Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:24:39.896888Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:24:39.896936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:24:39.896967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:24:39.896989Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:24:39.897118Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:24:39.897167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:24:39.897210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:24:39.897233Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:24:39.897298Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:24:39.897338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:24:39.897388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:24:39.897409Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:24:39.897472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:24:39.897498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:24:39.897521Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:24:39.897554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:24:39.897582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:24:39.897608Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:24:39.897953Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=48; 2025-03-31T16:24:39.898020Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=28; 2025-03-31T16:24:39.898081Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=29; 2025-03-31T16:24:39.898167Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=43; 2025-03-31T16:24:39.898355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:24:39.898421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:24:39.898460Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:24:39.898672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:24:39.898734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:24:39.898765Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=94 ... et_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-03-31T16:25:51.442636Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-31T16:25:51.442693Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:25:51.442757Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:25:51.442811Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:25:51.442939Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:25:51.443238Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000009:max} readable: {1000000009:max} at tablet 9437184 2025-03-31T16:25:51.443381Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-03-31T16:25:51.443581Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-31T16:25:51.443661Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-31T16:25:51.444157Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-03-31T16:25:51.444270Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-03-31T16:25:51.444814Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:1984:3989];trace_detailed=; 2025-03-31T16:25:51.445288Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-03-31T16:25:51.445534Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-03-31T16:25:51.445739Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:25:51.445872Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:25:51.446367Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:25:51.446497Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:25:51.446629Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:25:51.446677Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1984:3989] finished for tablet 9437184 2025-03-31T16:25:51.447204Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:1983:3988];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1743438351444734,"name":"_full_task","f":1743438351444734,"d_finished":0,"c":0,"l":1743438351446750,"d":2016},"events":[{"name":"bootstrap","f":1743438351444964,"d_finished":947,"c":1,"l":1743438351445911,"d":947},{"a":1743438351446339,"name":"ack","f":1743438351446339,"d_finished":0,"c":0,"l":1743438351446750,"d":411},{"a":1743438351446315,"name":"processing","f":1743438351446315,"d_finished":0,"c":0,"l":1743438351446750,"d":435},{"name":"ProduceResults","f":1743438351445655,"d_finished":507,"c":2,"l":1743438351446657,"d":507},{"a":1743438351446660,"name":"Finish","f":1743438351446660,"d_finished":0,"c":0,"l":1743438351446750,"d":90}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:25:51.447305Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:1983:3988];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:25:51.447761Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:1983:3988];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1743438351444734,"name":"_full_task","f":1743438351444734,"d_finished":0,"c":0,"l":1743438351447362,"d":2628},"events":[{"name":"bootstrap","f":1743438351444964,"d_finished":947,"c":1,"l":1743438351445911,"d":947},{"a":1743438351446339,"name":"ack","f":1743438351446339,"d_finished":0,"c":0,"l":1743438351447362,"d":1023},{"a":1743438351446315,"name":"processing","f":1743438351446315,"d_finished":0,"c":0,"l":1743438351447362,"d":1047},{"name":"ProduceResults","f":1743438351445655,"d_finished":507,"c":2,"l":1743438351446657,"d":507},{"a":1743438351446660,"name":"Finish","f":1743438351446660,"d_finished":0,"c":0,"l":1743438351447362,"d":702}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1984:3989]->[1:1983:3988] 2025-03-31T16:25:51.447869Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:25:51.444227Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-03-31T16:25:51.447926Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:25:51.448055Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 160000/10402096 80000/5203544 0/0 >> KqpRanges::IsNotNullInJsonValue |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 7807, MsgBus: 26118 2025-03-31T16:25:47.084287Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010683003068124:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:47.086956Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000e7e/r3tmp/tmpGN6tX1/pdisk_1.dat 2025-03-31T16:25:47.499506Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:47.515893Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:47.515972Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:47.517896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7807, node 1 2025-03-31T16:25:47.615587Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:47.615607Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:47.615616Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:47.615723Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26118 TClient is connected to server localhost:26118 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:48.271774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:48.304152Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:25:48.313941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:48.470158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:25:48.657768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:25:48.741122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:50.436455Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010695887971634:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:50.436618Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:50.732182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:50.767495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:50.805134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:50.836743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:50.868033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:50.907485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:50.992047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010695887972148:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:50.992151Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:50.995111Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010695887972153:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:51.000103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:51.010658Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010695887972155:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:25:51.101704Z node 1 :TX_PROXY ERROR: Actor# [1:7488010700182939507:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:52.081323Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010683003068124:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:52.081407Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TPersQueueTest::SetupLockSession2 >> KqpQueryPerf::DeleteOn-QueryService-UseSink [GOOD] >> KqpQueryPerf::DeleteOn-QueryService+UseSink >> TPersQueueTest::BadTopic >> KqpQueryPerf::Delete-QueryService-UseSink [GOOD] >> KqpQueryPerf::Delete-QueryService+UseSink >> DemoTx::Scenario_1 >> KqpJoinOrder::GeneralPrioritiesBug2 >> TopicService::OneConsumer_TheRangesDoNotOverlap >> SystemView::CollectPreparedQueries [GOOD] >> SystemView::CollectScanQueries >> KqpQueryPerf::Upsert-QueryService+UseSink [GOOD] >> KqpIndexLookupJoin::SimpleInnerJoin+StreamLookup >> SystemView::TabletsRanges [GOOD] >> SystemView::TabletsRangesPredicateExtractDisabled >> KqpQueryPerf::KvRead-QueryService [GOOD] |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> KqpQueryPerf::IndexUpdateOn+QueryService+UseSink [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH16 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 2188, MsgBus: 1066 2025-03-31T16:25:48.386899Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010688185289842:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:48.386925Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000e76/r3tmp/tmp7muLfH/pdisk_1.dat 2025-03-31T16:25:48.971494Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:48.976113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:48.976209Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:48.979595Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2188, node 1 2025-03-31T16:25:49.099602Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:49.099625Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:49.099631Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:49.099750Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1066 TClient is connected to server localhost:1066 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:49.740423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:49.760542Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:49.779219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:25:49.942872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:25:50.110042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:25:50.183063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:51.988838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010701070193510:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:51.988955Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:52.336556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:52.367868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:52.443868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:52.478177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:52.508515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:52.567145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:52.618115Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010705365161321:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:52.618182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:52.618492Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010705365161326:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:52.622228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:52.632254Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010705365161328:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:25:52.705728Z node 1 :TX_PROXY ERROR: Actor# [1:7488010705365161382:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:53.387420Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010688185289842:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:53.387542Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpJoinOrder::CanonizedJoinOrderTPCH14 >> KqpQueryPerf::DeleteOn+QueryService+UseSink [GOOD] >> KqpQueryPerf::IdxLookupJoin-QueryService [GOOD] >> KqpQueryPerf::ComputeLength-QueryService [GOOD] >> KqpQueryPerf::IdxLookupJoinThreeWay+QueryService [GOOD] >> KqpQueryPerf::IdxLookupJoinThreeWay-QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::KvRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 5110, MsgBus: 18914 2025-03-31T16:25:43.380375Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010667720236852:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:43.386115Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ea2/r3tmp/tmpwoCiHF/pdisk_1.dat 2025-03-31T16:25:43.846831Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:43.849703Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:43.849805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:43.854823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5110, node 1 2025-03-31T16:25:43.955177Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:43.955204Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:43.955212Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:43.955338Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18914 TClient is connected to server localhost:18914 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:44.652058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:44.669398Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:25:44.682357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:44.807126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:45.001726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:45.074891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:46.669179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010680605140498:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:46.669291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:46.984268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:47.024612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:47.057938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:47.096109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:47.148539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:47.205667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:47.262087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010684900108309:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:47.262163Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:47.263338Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010684900108314:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:47.268495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:47.281632Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010684900108316:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:25:47.340017Z node 1 :TX_PROXY ERROR: Actor# [1:7488010684900108368:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:48.373389Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010667720236852:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:48.373480Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 8210, MsgBus: 24279 2025-03-31T16:25:49.244075Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010693588058559:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:49.244118Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ea2/r3tmp/tmpTyDuqG/pdisk_1.dat 2025-03-31T16:25:49.406664Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8210, node 2 2025-03-31T16:25:49.433014Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:49.433084Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:49.473302Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:25:49.538769Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:49.538794Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:49.538802Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:49.538956Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24279 TClient is connected to server localhost:24279 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:50.090294Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:25:50.120185Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:25:50.196747Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:50.361341Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:50.438518Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:52.568773Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010706472962221:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:52.568907Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:52.619246Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:52.656385Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:52.698825Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:52.733461Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:52.774590Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:52.850556Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:52.903334Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010706472962738:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:52.903435Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010706472962743:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:52.903457Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:52.907336Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:52.918650Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488010706472962745:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:25:52.980701Z node 2 :TX_PROXY ERROR: Actor# [2:7488010706472962797:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpdateOn+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 11283, MsgBus: 29488 2025-03-31T16:25:37.986860Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010642837604164:2266];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:37.986920Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ec5/r3tmp/tmpOaMMvK/pdisk_1.dat 2025-03-31T16:25:38.388488Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:38.421390Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:38.421497Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:38.423147Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11283, node 1 2025-03-31T16:25:38.474986Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:38.475012Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:38.475020Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:38.475142Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29488 TClient is connected to server localhost:29488 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:39.054414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:39.089236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:39.221487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:39.404298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:25:39.473579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:25:41.325277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010660017474930:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:41.325405Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:41.675153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:41.708981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:41.739681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:41.775642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:41.808518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:41.858303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:41.947203Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010660017475440:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:41.947388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:41.947729Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010660017475445:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:41.952382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:41.970188Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010660017475447:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:25:42.060251Z node 1 :TX_PROXY ERROR: Actor# [1:7488010664312442800:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:42.975037Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010642837604164:2266];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:42.975137Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:25:43.157031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:25:43.195544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:25:43.272737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 14328, MsgBus: 63007 2025-03-31T16:25:46.642488Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010682127866492:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:46.642554Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ec5/r3tmp/tmplb0gCG/pdisk_1.dat 2025-03-31T16:25:46.759685Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14328, node 2 2025-03-31T16:25:46.791122Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:46.791253Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:46.800661Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:25:46.819657Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:46.819679Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:46.819687Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:46.819804Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63007 TClient is connected to server localhost:63007 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:47.307378Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:47.323342Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:25:47.340017Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:47.422291Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:47.615930Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:47.696393Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:49.981931Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010695012770145:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:49.982047Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:50.049086Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:50.129152Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:50.164051Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:50.214702Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:50.269922Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:50.332313Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:50.393519Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010699307737954:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:50.393684Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:50.394015Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010699307737959:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:50.397779Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:50.411294Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488010699307737961:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:25:50.494071Z node 2 :TX_PROXY ERROR: Actor# [2:7488010699307738014:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:51.495819Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:25:51.579833Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:25:51.622087Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:25:51.642197Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488010682127866492:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:51.642252Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSinkTx::OlapSnapshotROInteractive1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IdxLookupJoin-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 21558, MsgBus: 13772 2025-03-31T16:25:43.299591Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010668474517527:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:43.300005Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ea3/r3tmp/tmpwMkgjE/pdisk_1.dat 2025-03-31T16:25:43.803710Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:43.804720Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:43.804804Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:43.809338Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21558, node 1 2025-03-31T16:25:43.898374Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:43.898395Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:43.898408Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:43.898554Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13772 TClient is connected to server localhost:13772 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:44.526318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:44.543625Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:25:44.550019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:44.706466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:44.879580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:44.971300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:46.572649Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010681359421056:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:46.572764Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:46.889779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:46.928940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:46.968109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:46.999361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:47.029646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:47.097331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:47.159439Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010685654388870:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:47.159525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:47.159677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010685654388875:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:47.162940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:47.172980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010685654388877:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:25:47.267500Z node 1 :TX_PROXY ERROR: Actor# [1:7488010685654388932:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:48.288873Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010668474517527:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:48.288984Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 16974, MsgBus: 23885 2025-03-31T16:25:49.832560Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010692584016632:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:49.832601Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ea3/r3tmp/tmpQWeaxO/pdisk_1.dat 2025-03-31T16:25:50.017300Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:50.052483Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:50.052576Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:50.057736Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16974, node 2 2025-03-31T16:25:50.162646Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:50.162668Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:50.162675Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:50.162782Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23885 TClient is connected to server localhost:23885 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:25:50.600786Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:25:50.617307Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:50.675598Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:25:50.850440Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:25:50.931173Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:53.155235Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010709763887577:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:53.155343Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:53.177559Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:53.212154Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:53.247612Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:53.282859Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:53.314352Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:53.345992Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:53.398701Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010709763888085:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:53.398801Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:53.398857Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010709763888090:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:53.403205Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:53.419839Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488010709763888092:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:25:53.491187Z node 2 :TX_PROXY ERROR: Actor# [2:7488010709763888146:3437] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:54.832750Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488010692584016632:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:54.832832Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::DeleteOn+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 14664, MsgBus: 61644 2025-03-31T16:25:44.220193Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010672323099198:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:44.220294Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000e8a/r3tmp/tmpArxSmL/pdisk_1.dat 2025-03-31T16:25:44.714147Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:44.719580Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:44.719694Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:44.723373Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14664, node 1 2025-03-31T16:25:44.814446Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:44.814478Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:44.814487Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:44.814616Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61644 TClient is connected to server localhost:61644 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:45.405682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:45.427325Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:25:45.441182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:45.590908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:45.766682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:45.838779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:47.609841Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010685208002841:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:47.610006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:47.921958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:47.958753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:47.994422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:48.036363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:48.072122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:48.112877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:48.216898Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010689502970652:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:48.217015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:48.217365Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010689502970657:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:48.221056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:48.232547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010689502970659:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:25:48.319610Z node 1 :TX_PROXY ERROR: Actor# [1:7488010689502970716:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:49.225509Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010672323099198:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:49.227802Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 65246, MsgBus: 25622 2025-03-31T16:25:50.368669Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010698271185596:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:50.368926Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000e8a/r3tmp/tmpf8BUmp/pdisk_1.dat 2025-03-31T16:25:50.507750Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:50.518780Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:50.518860Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:50.520459Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65246, node 2 2025-03-31T16:25:50.587663Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:50.587700Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:50.587708Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:50.587817Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25622 TClient is connected to server localhost:25622 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:51.054517Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:51.063799Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:25:51.069798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:51.148319Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:51.342054Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:51.431033Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:53.644196Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010711156089255:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:53.644285Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:53.680037Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:53.718520Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:53.758571Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:53.793789Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:53.829452Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:53.870238Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:53.957128Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010711156089772:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:53.957239Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:53.957828Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010711156089777:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:53.961701Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:53.973312Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488010711156089779:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:25:54.075887Z node 2 :TX_PROXY ERROR: Actor# [2:7488010715451057130:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryPerf::Insert-QueryService-UseSink [GOOD] >> KqpQueryPerf::Insert-QueryService+UseSink >> SystemView::AuthGroupMembers [GOOD] >> SystemView::AuthGroupMembers_Access |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_transfer_writer/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::ComputeLength-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 9412, MsgBus: 22814 2025-03-31T16:25:43.430877Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010666413993780:2077];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:43.431928Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000e98/r3tmp/tmplJz11N/pdisk_1.dat 2025-03-31T16:25:43.960722Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:43.997828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:43.997932Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:44.000364Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9412, node 1 2025-03-31T16:25:44.171726Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:44.171749Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:44.171756Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:44.171882Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22814 TClient is connected to server localhost:22814 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:44.867355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:44.884317Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:25:44.896771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:45.029369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:45.183576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:45.253576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:46.764122Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010679298897424:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:46.764244Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:47.126945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:47.163381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:47.191439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:47.224685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:47.261622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:47.301653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:47.355156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010683593865229:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:47.355285Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:47.355664Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010683593865235:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:47.359690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:47.374824Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010683593865237:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:25:47.480072Z node 1 :TX_PROXY ERROR: Actor# [1:7488010683593865294:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:48.431192Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010666413993780:2077];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:48.431263Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 19029, MsgBus: 17303 2025-03-31T16:25:49.662843Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010694946860371:2056];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:49.663102Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000e98/r3tmp/tmp3lCfSv/pdisk_1.dat 2025-03-31T16:25:49.844320Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19029, node 2 2025-03-31T16:25:49.884078Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:49.884185Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:49.890185Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:25:49.955878Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:49.955908Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:49.955917Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:49.956042Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17303 TClient is connected to server localhost:17303 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:25:50.408175Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:25:50.439994Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:50.498652Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:25:50.647293Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:25:50.722562Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:53.083284Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010712126731331:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:53.083364Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:53.136440Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:53.176505Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:53.215455Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:53.253798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:53.297926Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:53.335401Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:53.390565Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010712126731840:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:53.390626Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010712126731845:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:53.390685Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:53.394538Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:53.429737Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488010712126731847:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:25:53.502311Z node 2 :TX_PROXY ERROR: Actor# [2:7488010712126731902:3442] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:54.662244Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488010694946860371:2056];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:54.662312Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> SystemView::AuthOwners_ResultOrder [GOOD] >> SystemView::AuthOwners_TableRange |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> KqpSqlIn::TableSource >> KqpQueryPerf::UpdateOn+QueryService+UseSink [GOOD] >> KqpJoin::IdxLookupSelf >> KqpJoin::JoinDupColumnRight |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> KqpJoin::IdxLookupPartialWithTempTable >> KqpIndexLookupJoin::CheckCastUint32ToUint16+StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUint32ToUint16+StreamLookupJoin+NotNull ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 28478, MsgBus: 20879 2025-03-31T16:25:52.889148Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010707219542653:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:52.889200Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000e61/r3tmp/tmpL0iPum/pdisk_1.dat 2025-03-31T16:25:53.286994Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:53.294709Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:53.294815Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 28478, node 1 2025-03-31T16:25:53.298225Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:25:53.379907Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:53.379940Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:53.379955Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:53.380122Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20879 TClient is connected to server localhost:20879 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:53.923069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:53.957662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:54.120306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:54.299634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:54.376952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:56.080309Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010724399413618:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:56.080438Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:56.402577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:56.445987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:56.487831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:56.537005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:56.572673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:56.622935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:56.680796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010724399414130:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:56.680876Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:56.681084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010724399414135:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:56.684699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:56.695705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010724399414137:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:25:56.764122Z node 1 :TX_PROXY ERROR: Actor# [1:7488010724399414190:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:57.889436Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010707219542653:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:57.889554Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::IndexDeleteOn-QueryService+UseSink [GOOD] >> KqpQueryPerf::DeleteOn-QueryService+UseSink [GOOD] >> TColumnShardTestSchema::RebootForgetAfterFail [GOOD] >> KqpJoin::JoinWithDuplicates [GOOD] >> KqpQueryPerf::IndexUpsert+QueryService+UseSink [GOOD] >> KqpIndexLookupJoin::CheckCastUint32ToUint16-StreamLookupJoin-NotNull >> KqpRanges::IsNotNullInJsonValue [GOOD] >> KqpRanges::DuplicateKeyPredicateLiteral >> KqpQueryPerf::Delete-QueryService+UseSink [GOOD] >> SystemView::TabletsRangesPredicateExtractDisabled [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexDeleteOn-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 27609, MsgBus: 30830 2025-03-31T16:25:43.064636Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010665957730178:2202];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:43.065318Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ea7/r3tmp/tmpDUo2lm/pdisk_1.dat 2025-03-31T16:25:43.493019Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:43.495866Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:43.495981Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:43.499656Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27609, node 1 2025-03-31T16:25:43.594384Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:43.594404Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:43.594409Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:43.594502Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30830 TClient is connected to server localhost:30830 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:44.129283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:44.155693Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:25:44.177617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:44.375568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:44.549717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:44.625885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:46.389975Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010678842633717:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:46.390122Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:46.686876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:46.719382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:46.786843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:46.823257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:46.856215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:46.920185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:47.008646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010683137601529:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:47.008733Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:47.009126Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010683137601534:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:47.012506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:47.024378Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010683137601536:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:25:47.089600Z node 1 :TX_PROXY ERROR: Actor# [1:7488010683137601592:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:48.062128Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010665957730178:2202];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:48.062220Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:25:48.298638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:25:48.370036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:25:48.450900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 26032, MsgBus: 23254 2025-03-31T16:25:51.459806Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010700395256676:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:51.460091Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ea7/r3tmp/tmpgiCPTf/pdisk_1.dat 2025-03-31T16:25:51.568423Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26032, node 2 2025-03-31T16:25:51.616292Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:51.616369Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:51.617663Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:25:51.664565Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:51.664586Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:51.664594Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:51.664713Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23254 TClient is connected to server localhost:23254 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:52.093283Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:52.101420Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:25:52.114092Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:52.198196Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:52.385329Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:25:52.465912Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:25:54.975225Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010713280160340:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:54.975321Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:55.023368Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:55.067407Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:55.113448Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:55.155138Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:55.199586Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:55.278890Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:55.331886Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010717575128153:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:55.331981Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:55.332265Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010717575128158:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:55.336285Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:55.346828Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488010717575128160:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:25:55.406739Z node 2 :TX_PROXY ERROR: Actor# [2:7488010717575128213:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:56.462091Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488010700395256676:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:56.462149Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:25:56.546840Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:25:56.630487Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:25:56.674927Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinWithDuplicates [GOOD] Test command err: Trying to start YDB, gRPC: 20086, MsgBus: 4770 2025-03-31T16:25:53.247537Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010709142261060:2174];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:53.247978Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001975/r3tmp/tmp3hxXwW/pdisk_1.dat 2025-03-31T16:25:53.636328Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20086, node 1 2025-03-31T16:25:53.698404Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:53.701276Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:53.709100Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:25:53.735436Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:53.735459Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:53.735471Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:53.735601Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4770 TClient is connected to server localhost:4770 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:54.252626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:54.287327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:54.434173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:54.615309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:54.707303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:56.376646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010722027164614:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:56.376762Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:56.669750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:56.713605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:56.785435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:56.819823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:56.888625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:56.933331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:57.027662Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010726322132431:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:57.027717Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:57.027840Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010726322132436:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:57.032058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:57.043866Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-31T16:25:57.044306Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010726322132438:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:25:57.137538Z node 1 :TX_PROXY ERROR: Actor# [1:7488010726322132494:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:58.088738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:25:58.132137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:25:58.248423Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010709142261060:2174];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:58.248506Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::DeleteOn-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 12086, MsgBus: 23151 2025-03-31T16:25:48.151667Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010689728026763:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:48.191845Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000e7d/r3tmp/tmpRyjRoQ/pdisk_1.dat 2025-03-31T16:25:48.625557Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:48.625705Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:48.627446Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:25:48.651936Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12086, node 1 2025-03-31T16:25:48.763888Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:48.763909Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:48.763918Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:48.764057Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23151 TClient is connected to server localhost:23151 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:49.409254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:49.443540Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:25:49.458369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:49.594441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:49.791054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:49.878908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:51.757255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010702612930414:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:51.757413Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:52.129279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:52.164545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:52.200752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:52.237504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:52.292231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:52.365517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:52.424602Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010706907898229:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:52.424733Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:52.425014Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010706907898234:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:52.429215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:52.444158Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010706907898236:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:25:52.508456Z node 1 :TX_PROXY ERROR: Actor# [1:7488010706907898289:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:53.154640Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010689728026763:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:53.154730Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 24899, MsgBus: 19580 2025-03-31T16:25:54.477356Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010714247189678:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:54.484686Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000e7d/r3tmp/tmpTXxUUS/pdisk_1.dat 2025-03-31T16:25:54.609042Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:54.620933Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:54.621023Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:54.622394Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24899, node 2 2025-03-31T16:25:54.671619Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:54.671642Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:54.671650Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:54.671763Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19580 TClient is connected to server localhost:19580 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:55.129316Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:55.146162Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:25:55.154926Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:55.229209Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:55.406695Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:55.490702Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:57.724688Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010727132093347:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:57.724783Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:57.771864Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:57.806103Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:57.838226Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:57.875895Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:57.916236Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:57.950179Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:58.034991Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010731427061157:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:58.035098Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:58.035492Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010731427061162:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:58.038793Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:58.050113Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488010731427061164:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:25:58.109186Z node 2 :TX_PROXY ERROR: Actor# [2:7488010731427061217:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpsert+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 17283, MsgBus: 8651 2025-03-31T16:25:42.575786Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010664776095478:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:42.575861Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000eb3/r3tmp/tmpBkZjnX/pdisk_1.dat 2025-03-31T16:25:42.960963Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17283, node 1 2025-03-31T16:25:42.993040Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:42.993149Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:43.007482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:25:43.081897Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:43.081925Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:43.081934Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:43.082103Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8651 TClient is connected to server localhost:8651 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:43.687868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:43.709366Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:25:43.723359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:43.914246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:25:44.080845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:25:44.166301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:45.900079Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010677660999134:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:45.900208Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:46.198338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:46.228895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:46.270858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:46.299685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:46.330454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:46.363287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:46.467666Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010681955966948:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:46.467757Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:46.468073Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010681955966953:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:46.472849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:46.486868Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010681955966955:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:25:46.589711Z node 1 :TX_PROXY ERROR: Actor# [1:7488010681955967010:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:47.577002Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010664776095478:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:47.577639Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:25:47.682119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:25:47.761051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:25:47.817442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 15749, MsgBus: 23710 2025-03-31T16:25:51.635463Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010704029520343:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:51.635510Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000eb3/r3tmp/tmpecuwDZ/pdisk_1.dat 2025-03-31T16:25:51.760760Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:51.785498Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:51.785588Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:51.787882Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15749, node 2 2025-03-31T16:25:51.839107Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:51.839130Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:51.839137Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:51.839271Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23710 TClient is connected to server localhost:23710 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:52.284761Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:25:52.298300Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:25:52.360421Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:52.557875Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:52.642709Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:54.988405Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010716914424002:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:54.988524Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:55.026032Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:55.067910Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:55.103746Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:55.132605Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:55.171532Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:55.229829Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:55.296809Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010721209391809:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:55.296912Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:55.296985Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010721209391815:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:55.301457Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:55.318352Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488010721209391817:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:25:55.406627Z node 2 :TX_PROXY ERROR: Actor# [2:7488010721209391872:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:56.411744Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:25:56.459770Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:25:56.557236Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:25:56.639326Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488010704029520343:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:56.682479Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpNotNullColumns::ReplaceNotNull ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootForgetAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143438869.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123438869.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123437669.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2025-03-31T16:24:31.798612Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:24:31.889778Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:24:31.913520Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:24:31.913828Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:24:31.921639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:24:31.921855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:24:31.922098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:24:31.922220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:24:31.922337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:24:31.922471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:24:31.922575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:24:31.922687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:24:31.922797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:24:31.922937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:24:31.923051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:24:31.923184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:24:31.952102Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:24:31.952269Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:24:31.952326Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:24:31.952516Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:24:31.952695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:24:31.952779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:24:31.952825Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:24:31.952926Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:24:31.952990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:24:31.953035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:24:31.953065Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:24:31.953242Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:24:31.953332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:24:31.953380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:24:31.953412Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:24:31.953505Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:24:31.953558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:24:31.953608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:24:31.953638Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:24:31.953714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:24:31.953754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:24:31.953787Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:24:31.953837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:24:31.953876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:24:31.953914Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:24:31.954301Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=47; 2025-03-31T16:24:31.954387Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=35; 2025-03-31T16:24:31.954468Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=33; 2025-03-31T16:24:31.954548Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=41; 2025-03-31T16:24:31.954705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:24:31.954764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:24:31.954802Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:24:31.955024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:24:31.955103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:24:31.955149Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:24:31.955319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:24:31.955376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:24:31.955408Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:24:31.955614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description= ... request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:46:8544:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:73:8560:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:70:9384:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:37:8648:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:4:8680:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:55:2768:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:60:8560:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:54:8328:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:59:8552:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:97:2768:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:29:8704:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:41:2848:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:18:8592:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:20:2840:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:28:8712:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:13:8656:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:96:8328:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:33:8680:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:50:8536:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:74:8544:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:80:8368:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:30:8704:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:63:8568:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:27:8680:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:56:9608:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:61:8552:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:32:8704:0];FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:58:8568:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:89:8552:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:87:8552:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:72:8560:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:93:8368:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:79:8408:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:84:9608:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:66:8360:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:94:8360:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:36:8624:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:44:8560:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:45:8560:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:69:2768:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:49:8544:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:98:9384:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:64:8544:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:3:8680:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:77:8544:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:11:8672:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:6:8680:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:35:8656:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:47:8544:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:81:8336:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:86:8568:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 0/0 160000/10402524 |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> DataShardVolatile::DistributedUpsertRestartAfterPrepare-UseSink [GOOD] >> DataShardVolatile::DistributedUpsertRestartAfterPlan ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Delete-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 4571, MsgBus: 2010 2025-03-31T16:25:48.466706Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010691393950858:2274];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:48.466757Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000e75/r3tmp/tmpw9zbG4/pdisk_1.dat 2025-03-31T16:25:48.923678Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:48.926464Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:48.926606Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:48.931546Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4571, node 1 2025-03-31T16:25:49.025320Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:49.025355Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:49.025375Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:49.025527Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2010 TClient is connected to server localhost:2010 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:49.698205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:49.716596Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:25:49.737550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:25:49.897799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:50.089353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:50.161751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:25:51.747614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010704278854328:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:51.747934Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:52.023241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:52.060897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:52.098193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:52.132896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:52.163187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:52.207583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:52.300538Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010708573822144:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:52.300619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:52.300835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010708573822149:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:52.305933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:52.322277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010708573822151:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:25:52.421985Z node 1 :TX_PROXY ERROR: Actor# [1:7488010708573822206:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:53.468525Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010691393950858:2274];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:53.468833Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 64969, MsgBus: 62139 2025-03-31T16:25:54.452469Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010713636497150:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:54.452527Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000e75/r3tmp/tmpKhPffj/pdisk_1.dat 2025-03-31T16:25:54.611890Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:54.639964Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:54.640044Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 64969, node 2 2025-03-31T16:25:54.645984Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:25:54.695555Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:54.695579Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:54.695586Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:54.695696Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62139 TClient is connected to server localhost:62139 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:25:55.154705Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:25:55.170325Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:25:55.175318Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:55.229500Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:55.398904Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:55.513247Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:58.147573Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010730816368097:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:58.147668Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:58.191278Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:58.229747Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:58.274366Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:58.305696Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:58.337518Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:58.383053Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:58.480253Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010730816368612:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:58.480341Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:58.480465Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010730816368617:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:58.483411Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:58.496395Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488010730816368619:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:25:58.601474Z node 2 :TX_PROXY ERROR: Actor# [2:7488010730816368676:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:59.454036Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488010713636497150:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:59.456395Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> SystemView::CollectScanQueries [GOOD] >> SystemView::CollectScriptingQueries ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::TabletsRangesPredicateExtractDisabled [GOOD] Test command err: 2025-03-31T16:25:17.762103Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010558120249184:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:17.762218Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019f0/r3tmp/tmpEcUE97/pdisk_1.dat 2025-03-31T16:25:18.187569Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:18.191615Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:18.192418Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:18.222656Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18893, node 1 2025-03-31T16:25:18.395627Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:18.395653Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:18.395659Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:18.395748Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6234 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:18.908076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:20.959838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010571005151719:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:20.960006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:20.960348Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010571005151731:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:20.969407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:25:20.985686Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010571005151733:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:25:21.061189Z node 1 :TX_PROXY ERROR: Actor# [1:7488010575300119080:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:21.796581Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqpgb3bz4g2887912m4abd6h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWU3NTg1MDAtNjU5M2EyNzktYWMyMmUxZjMtNWFmMjhjZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:25:21.839568Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7488010575300119117:2341], owner: [1:7488010575300119113:2339], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2025-03-31T16:25:21.840231Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7488010575300119117:2341], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-31T16:25:21.842827Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7488010575300119117:2341], row count: 0, finished: 1 2025-03-31T16:25:21.842912Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7488010575300119117:2341], owner: [1:7488010575300119113:2339], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2025-03-31T16:25:21.855144Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438321789, txId: 281474976710660] shutting down 2025-03-31T16:25:22.761994Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010558120249184:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:22.762084Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:25:22.964019Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jqpgb75t16rv2trv4fctt8tk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDQ2NDc4YTItNjRkODkyNGMtMWE4Yjk3YmQtNWNlYWZiZjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:25:22.965977Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7488010579595086461:2356], owner: [1:7488010579595086457:2354], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2025-03-31T16:25:22.966596Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7488010579595086461:2356], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-31T16:25:22.966794Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7488010579595086461:2356], row count: 0, finished: 1 2025-03-31T16:25:22.966837Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7488010579595086461:2356], owner: [1:7488010579595086457:2354], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2025-03-31T16:25:22.970548Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438322963, txId: 281474976710662] shutting down 2025-03-31T16:25:24.054898Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jqpgb889ecccb3vhq9qvx99f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmY4OTE4MDktMjUwZjA0YmQtYmJjYmQ4MTgtZDc2YjFhNWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:25:24.056185Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7488010588185021096:2370], owner: [1:7488010588185021092:2368], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2025-03-31T16:25:24.056673Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7488010588185021096:2370], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-31T16:25:24.065021Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7488010588185021096:2370], row count: 1, finished: 1 2025-03-31T16:25:24.065093Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7488010588185021096:2370], owner: [1:7488010588185021092:2368], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2025-03-31T16:25:24.068114Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438324054, txId: 281474976710664] shutting down 2025-03-31T16:25:24.508216Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010588178369995:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:24.508314Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019f0/r3tmp/tmpm92eQW/pdisk_1.dat 2025-03-31T16:25:24.601565Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1706, node 2 2025-03-31T16:25:24.650744Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:24.650823Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:24.652502Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:25:24.659288Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:24.659316Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:24.659324Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:24.659450Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23436 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:24.893627Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-0 ... , finished: 1 2025-03-31T16:25:53.392252Z node 10 :SYSTEM_VIEWS INFO: Scan finished, actor: [10:7488010712447107844:2422], owner: [10:7488010712447107841:2420], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-03-31T16:25:53.395890Z node 10 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438353383, txId: 281474976710673] shutting down 2025-03-31T16:25:53.568773Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jqpgc4zwf01wm8jgnk2nbn11, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=N2I0Y2JiZDUtOGUwNGE5MTAtZDJmYzBlYzMtMmU4ZGZmMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:25:53.574763Z node 10 :SYSTEM_VIEWS INFO: Scan started, actor: [10:7488010712447107879:2433], owner: [10:7488010712447107876:2431], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-03-31T16:25:53.576170Z node 10 :SYSTEM_VIEWS INFO: Scan prepared, actor: [10:7488010712447107879:2433], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-31T16:25:53.576539Z node 10 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [10:7488010712447107879:2433], row count: 3, finished: 1 2025-03-31T16:25:53.576562Z node 10 :SYSTEM_VIEWS INFO: Scan finished, actor: [10:7488010712447107879:2433], owner: [10:7488010712447107876:2431], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-03-31T16:25:53.581323Z node 10 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438353567, txId: 281474976710675] shutting down 2025-03-31T16:25:53.736690Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710678. Ctx: { TraceId: 01jqpgc55t2d9ckbxxmkm7gqw2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZWNmNzcxYTEtODM0NzI4MGQtNjdhYWIwYzAtMzc1ZmZiZmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:25:53.739473Z node 10 :SYSTEM_VIEWS INFO: Scan started, actor: [10:7488010712447107913:2443], owner: [10:7488010712447107910:2441], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-03-31T16:25:53.740314Z node 10 :SYSTEM_VIEWS INFO: Scan prepared, actor: [10:7488010712447107913:2443], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-31T16:25:53.740823Z node 10 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [10:7488010712447107913:2443], row count: 4, finished: 1 2025-03-31T16:25:53.740850Z node 10 :SYSTEM_VIEWS INFO: Scan finished, actor: [10:7488010712447107913:2443], owner: [10:7488010712447107910:2441], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-03-31T16:25:53.744379Z node 10 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438353734, txId: 281474976710677] shutting down 2025-03-31T16:25:53.899795Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710680. Ctx: { TraceId: 01jqpgc5arcg6gszcpy3mypw73, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZTEzYTAxNTktMTViMjUxYTYtNTY0M2E0N2QtMzFjNjU0MGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:25:53.901681Z node 10 :SYSTEM_VIEWS INFO: Scan started, actor: [10:7488010712447107950:2454], owner: [10:7488010712447107946:2452], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-03-31T16:25:53.902518Z node 10 :SYSTEM_VIEWS INFO: Scan prepared, actor: [10:7488010712447107950:2454], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-31T16:25:53.903031Z node 10 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [10:7488010712447107950:2454], row count: 4, finished: 1 2025-03-31T16:25:53.903060Z node 10 :SYSTEM_VIEWS INFO: Scan finished, actor: [10:7488010712447107950:2454], owner: [10:7488010712447107946:2452], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-03-31T16:25:53.906809Z node 10 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438353898, txId: 281474976710679] shutting down 2025-03-31T16:25:54.963558Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7488010714722432059:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:54.963848Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019f0/r3tmp/tmpcdctqD/pdisk_1.dat 2025-03-31T16:25:55.170870Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:55.170946Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:55.172273Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:55.189542Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28797, node 11 2025-03-31T16:25:55.259774Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:55.259802Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:55.259810Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:55.259958Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31637 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:55.687214Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:59.272864Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:25:59.390503Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7488010736197269451:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:59.390588Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7488010736197269440:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:59.390814Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:59.396353Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:25:59.416406Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7488010736197269454:2355], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:25:59.470397Z node 11 :TX_PROXY ERROR: Actor# [11:7488010736197269505:2495] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:59.823515Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jqpgcatr0xze9fkjpbawqx1g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=Nzc4MjVlODUtODBiODg1ZGUtN2E1ZWI3ODYtNzhiODI2MTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:25:59.828700Z node 11 :SYSTEM_VIEWS INFO: Scan started, actor: [11:7488010736197269553:2365], owner: [11:7488010736197269552:2364], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-03-31T16:25:59.832068Z node 11 :SYSTEM_VIEWS INFO: Scan prepared, actor: [11:7488010736197269553:2365], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-31T16:25:59.832763Z node 11 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [11:7488010736197269553:2365], row count: 4, finished: 1 2025-03-31T16:25:59.832795Z node 11 :SYSTEM_VIEWS INFO: Scan finished, actor: [11:7488010736197269553:2365], owner: [11:7488010736197269552:2364], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-03-31T16:25:59.837021Z node 11 :SYSTEM_VIEWS INFO: Scan started, actor: [11:7488010736197269559:2368], owner: [11:7488010736197269552:2364], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-03-31T16:25:59.843244Z node 11 :SYSTEM_VIEWS INFO: Scan prepared, actor: [11:7488010736197269559:2368], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-31T16:25:59.843793Z node 11 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [11:7488010736197269559:2368], row count: 4, finished: 1 2025-03-31T16:25:59.843827Z node 11 :SYSTEM_VIEWS INFO: Scan finished, actor: [11:7488010736197269559:2368], owner: [11:7488010736197269552:2364], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-03-31T16:25:59.849140Z node 11 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438359820, txId: 281474976710661] shutting down 2025-03-31T16:25:59.966530Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7488010714722432059:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:59.966619Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpIndexLookupJoin::SimpleInnerJoin+StreamLookup [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH3 >> KqpQueryPerf::IdxLookupJoinThreeWay-QueryService [GOOD] >> KqpQueryPerf::Insert-QueryService+UseSink [GOOD] >> KqpIndexLookupJoin::CheckCastUtf8ToString+StreamLookupJoin-NotNull |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TColumnShardTestSchema::RebootHotTiers [GOOD] >> KqpIndexLookupJoin::CheckCastInt32ToInt16+StreamLookupJoin-NotNull >> KqpJoinOrder::TPCDS34-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::SimpleInnerJoin+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 9685, MsgBus: 6803 2025-03-31T16:25:54.997362Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010714175081399:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:54.997491Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001973/r3tmp/tmpuPYLov/pdisk_1.dat 2025-03-31T16:25:55.493396Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:55.499136Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:55.499209Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:55.501438Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9685, node 1 2025-03-31T16:25:55.603909Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:55.603961Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:55.603972Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:55.604096Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6803 TClient is connected to server localhost:6803 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:56.164781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:25:56.195929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:25:56.346875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:56.514840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:56.594354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:58.434932Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010731354952343:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:58.435047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:58.749758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:58.817332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:58.889953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:58.938197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:58.970339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:59.040706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:59.103809Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010735649920163:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:59.103906Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:59.104079Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010735649920168:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:59.108495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:59.133184Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010735649920170:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:25:59.232007Z node 1 :TX_PROXY ERROR: Actor# [1:7488010735649920226:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:00.000000Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010714175081399:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:00.000055Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:00.258157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:26:00.301898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:26:00.390550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:26:00.430086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:26:00.484831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:26:00.569022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> TransferWriter::Write_ColumnTable |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> KqpNewEngine::ContainerRegistryCombiner ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Insert-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 2281, MsgBus: 13758 2025-03-31T16:25:51.116040Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010702997604659:2220];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:51.118560Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000e69/r3tmp/tmpLW34O0/pdisk_1.dat 2025-03-31T16:25:51.552890Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:51.561712Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:51.561814Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:51.563908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2281, node 1 2025-03-31T16:25:51.715033Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:51.715061Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:51.715067Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:51.715185Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13758 TClient is connected to server localhost:13758 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:52.317615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:52.353908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:52.513750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:52.677336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:52.756122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:54.399999Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010715882508143:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:54.400110Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:54.736580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:54.771340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:54.807206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:54.836813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:54.871902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:54.922510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:54.990531Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010715882508654:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:54.990597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010715882508659:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:54.990627Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:54.999413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:55.009867Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010715882508661:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:25:55.094458Z node 1 :TX_PROXY ERROR: Actor# [1:7488010720177476012:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:56.106704Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010702997604659:2220];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:56.106779Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 16862, MsgBus: 62860 2025-03-31T16:25:57.491732Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010726341838856:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:57.491777Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000e69/r3tmp/tmpcJk2tr/pdisk_1.dat 2025-03-31T16:25:57.626730Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16862, node 2 2025-03-31T16:25:57.649001Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:57.649076Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:57.653629Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:25:57.698369Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:57.698402Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:57.698414Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:57.698531Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62860 TClient is connected to server localhost:62860 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:58.209512Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:58.215925Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:25:58.226257Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:58.311725Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:58.468666Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:58.538157Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:00.999333Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010739226742504:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:00.999461Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:01.057429Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:01.098409Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:01.135755Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:01.165428Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:01.220700Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:01.309918Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:01.407100Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010743521710322:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:01.407204Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:01.407536Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010743521710327:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:01.410977Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:01.422178Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488010743521710329:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:26:01.501419Z node 2 :TX_PROXY ERROR: Actor# [2:7488010743521710385:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:02.496419Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488010726341838856:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:02.496482Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IdxLookupJoinThreeWay-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 63955, MsgBus: 16187 2025-03-31T16:25:49.318109Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010694391764190:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:49.318312Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000e6b/r3tmp/tmpx75xxo/pdisk_1.dat 2025-03-31T16:25:49.806958Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63955, node 1 2025-03-31T16:25:49.825519Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:49.825620Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:49.828795Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:25:49.931098Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:49.931146Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:49.931155Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:49.931309Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16187 TClient is connected to server localhost:16187 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:50.606705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:50.617790Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:25:50.634964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:50.750502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:50.907625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:50.983043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:52.802012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010707276667842:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:52.802111Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:53.221057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:53.254391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:53.342313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:53.375244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:53.426367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:53.471524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:53.520249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010711571635652:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:53.520358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:53.520707Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010711571635657:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:53.524568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:53.536159Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010711571635659:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:25:53.598009Z node 1 :TX_PROXY ERROR: Actor# [1:7488010711571635713:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:54.316558Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010694391764190:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:54.316632Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 10651, MsgBus: 61251 2025-03-31T16:25:56.408814Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010721942024559:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:56.408981Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000e6b/r3tmp/tmpIbixAq/pdisk_1.dat 2025-03-31T16:25:56.605190Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:56.612211Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:56.612305Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:56.617769Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10651, node 2 2025-03-31T16:25:56.679603Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:56.679629Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:56.679636Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:56.679745Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61251 TClient is connected to server localhost:61251 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:57.134844Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:57.148341Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:57.225276Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:57.395062Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:57.475215Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:59.767241Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010734826928226:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:59.767361Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:59.820209Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:59.897779Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:59.978944Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:00.057378Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:00.106319Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:00.151743Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:00.239462Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010739121896045:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:00.239623Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:00.243526Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010739121896050:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:00.248475Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:00.272650Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488010739121896052:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:26:00.365084Z node 2 :TX_PROXY ERROR: Actor# [2:7488010739121896107:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:01.409556Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488010721942024559:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:01.409645Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143438872.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143438872.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143438872.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123438872.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143438872.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143438872.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123437672.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123438872.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123438872.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123437672.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123437672.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123437672.000000s;Name=;Codec=}; 2025-03-31T16:24:32.744030Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:24:32.818352Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:24:32.832656Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:24:32.832888Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:24:32.839459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:24:32.839631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:24:32.839803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:24:32.839897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:24:32.839974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:24:32.840039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:24:32.840096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:24:32.840158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:24:32.840222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:24:32.840298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:24:32.840384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:24:32.840445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:24:32.858788Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:24:32.858924Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:24:32.858963Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:24:32.859125Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:24:32.859253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:24:32.859322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:24:32.859362Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:24:32.859425Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:24:32.859463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:24:32.859500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:24:32.859532Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:24:32.859690Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:24:32.859763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:24:32.859806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:24:32.859841Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:24:32.859923Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:24:32.859982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:24:32.860028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:24:32.860060Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:24:32.860126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:24:32.860161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:24:32.860182Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:24:32.860212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:24:32.860241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:24:32.860262Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:24:32.860537Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=32; 2025-03-31T16:24:32.860595Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=25; 2025-03-31T16:24:32.860643Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=19; 2025-03-31T16:24:32.860698Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=27; 2025-03-31T16:24:32.860825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:24:32.860886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:24:32.860920Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:24:32.861065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:24:32.861093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:24:32.861112Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=94 ... cpp:29;EXECUTE:finishLoadingTime=515; 2025-03-31T16:26:02.800115Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=65727; 2025-03-31T16:26:02.812271Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=12064; 2025-03-31T16:26:02.825169Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=11809; 2025-03-31T16:26:02.825292Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=12908; 2025-03-31T16:26:02.825487Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=121; 2025-03-31T16:26:02.825612Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=69; 2025-03-31T16:26:02.825781Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=104; 2025-03-31T16:26:02.825930Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=95; 2025-03-31T16:26:02.841472Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=15459; 2025-03-31T16:26:02.855466Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=13867; 2025-03-31T16:26:02.855584Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=32; 2025-03-31T16:26:02.855652Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=27; 2025-03-31T16:26:02.855690Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=6; 2025-03-31T16:26:02.855731Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=8; 2025-03-31T16:26:02.855768Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2025-03-31T16:26:02.855838Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=38; 2025-03-31T16:26:02.855877Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2025-03-31T16:26:02.855953Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=46; 2025-03-31T16:26:02.855988Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2025-03-31T16:26:02.856037Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=22; 2025-03-31T16:26:02.856138Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=62; 2025-03-31T16:26:02.856538Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=348; 2025-03-31T16:26:02.856585Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=129675; 2025-03-31T16:26:02.856781Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=46800024;raw_bytes=72380025;count=29;records=720000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-31T16:26:02.856912Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-31T16:26:02.856971Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-31T16:26:02.857046Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-31T16:26:02.879301Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-03-31T16:26:02.879489Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:26:02.879556Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:26:02.879636Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-03-31T16:26:02.879705Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-31T16:26:02.879752Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:26:02.879802Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:26:02.879844Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:26:02.879947Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:26:02.880465Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:26:02.880841Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:2654:4528];tablet_id=9437184;parent=[1:2612:4493];fline=manager.cpp:82;event=ask_data;request=request_id=155;1={portions_count=29};; 2025-03-31T16:26:02.881161Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-31T16:26:02.881454Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-31T16:26:02.881488Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-31T16:26:02.881514Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-31T16:26:02.881557Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:26:02.881617Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:26:02.881698Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-03-31T16:26:02.881762Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-31T16:26:02.881808Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:26:02.881859Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:26:02.881900Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:26:02.881994Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:26:02.882638Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=29;path_id=1; 2025-03-31T16:26:02.884701Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 160000/10402096 80000/5203544 0/0 >> TopicService::OneConsumer_TheRangesDoNotOverlap [GOOD] >> KqpIndexLookupJoin::LeftJoinRightNullFilter-StreamLookup >> KqpIndexLookupJoin::LeftSemi >> KqpIndexLookupJoin::CheckCastUint32ToUint16+StreamLookupJoin+NotNull [GOOD] |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> KqpIndexLookupJoin::CheckCastInt64ToUint64-StreamLookupJoin-NotNull >> TopicService::OneConsumer_TheRangesOverlap >> KqpRanges::DuplicateKeyPredicateLiteral [GOOD] >> KqpRanges::DuplicateKeyPredicateParam >> KqpJoin::RightTableKeyPredicate >> KqpJoin::IdxLookupPartialWithTempTable [GOOD] >> KqpJoin::IdxLookupSelf [GOOD] >> KqpJoin::JoinDupColumnRight [GOOD] >> TColumnShardTestSchema::RebootHotTiersRevCompression [GOOD] >> KqpNotNullColumns::ReplaceNotNull [GOOD] >> KqpNotNullColumns::ReplaceNotNullPg ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastUint32ToUint16+StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 30795, MsgBus: 15993 2025-03-31T16:25:53.162159Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010710456802331:2209];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:53.162449Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001976/r3tmp/tmpGys8fr/pdisk_1.dat 2025-03-31T16:25:53.529413Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30795, node 1 2025-03-31T16:25:53.572152Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:53.572277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:53.573884Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:25:53.641143Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:53.641169Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:53.641182Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:53.641304Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15993 TClient is connected to server localhost:15993 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:54.162156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:54.196311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:54.349683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:54.542084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:54.614865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:56.436566Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010723341705832:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:56.436700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:56.773374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:56.805213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:56.873130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:56.907565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:56.943248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:57.014603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:57.103480Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010727636673645:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:57.103573Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:57.103623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010727636673650:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:57.107627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:57.122453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010727636673652:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:25:57.212721Z node 1 :TX_PROXY ERROR: Actor# [1:7488010727636673710:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:58.074039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:25:58.128027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:25:58.157823Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010710456802331:2209];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:58.157959Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 30144, MsgBus: 29907 2025-03-31T16:25:59.530259Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010734899980317:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:59.574166Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001976/r3tmp/tmpRITEZL/pdisk_1.dat 2025-03-31T16:25:59.713938Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:59.718611Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:59.719270Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:59.723268Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30144, node 2 2025-03-31T16:25:59.823596Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:59.823621Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:59.823628Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:59.823731Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29907 TClient is connected to server localhost:29907 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:00.236794Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:00.243905Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:26:00.248643Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:00.351039Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:00.637730Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:00.726966Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:02.763204Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010747784883955:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:02.763324Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:02.802394Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:02.832462Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:02.864606Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:02.901559Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:02.942550Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:02.993666Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:03.057267Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010752079851763:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:03.057368Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:03.057617Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010752079851768:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:03.061779Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:03.075914Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488010752079851770:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:26:03.133989Z node 2 :TX_PROXY ERROR: Actor# [2:7488010752079851823:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:04.211402Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:26:04.295346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:26:04.531062Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488010734899980317:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:04.531150Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpNewEngine::DeleteOn+UseSink >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp_Reboot [GOOD] >> KqpIndexLookupJoin::CheckCastUint32ToUint16-StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUint32ToUint16-StreamLookupJoin+NotNull >> DemoTx::Scenario_1 [GOOD] |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::IdxLookupPartialWithTempTable [GOOD] Test command err: Trying to start YDB, gRPC: 9605, MsgBus: 19461 2025-03-31T16:25:59.541771Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010738462669032:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:59.546266Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00196b/r3tmp/tmpicuWXx/pdisk_1.dat 2025-03-31T16:25:59.928039Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:59.931227Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:59.931337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:59.934869Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9605, node 1 2025-03-31T16:26:00.048624Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:00.048652Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:00.048666Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:00.048794Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19461 TClient is connected to server localhost:19461 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:00.769161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:26:00.813429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:26:00.998653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:01.181856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:01.275806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:03.197509Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010755642539981:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:03.197617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:03.574966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:03.618944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:03.666520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:03.716678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:03.786049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:03.837159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:03.900496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010755642540498:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:03.900556Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:03.900717Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010755642540503:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:03.905141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:03.918377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010755642540505:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:26:04.004978Z node 1 :TX_PROXY ERROR: Actor# [1:7488010759937507857:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:04.547195Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010738462669032:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:04.547262Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:05.188593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:26:05.268116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:26:05.318417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480
: Warning: Execution, code: 1060
:3:44: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 >> KqpSqlIn::TableSource [GOOD] >> KqpSqlIn::SimpleKey_Negated ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinDupColumnRight [GOOD] Test command err: Trying to start YDB, gRPC: 62129, MsgBus: 24160 2025-03-31T16:25:59.300546Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010738503692723:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:59.300619Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00196c/r3tmp/tmprOROGf/pdisk_1.dat 2025-03-31T16:25:59.820470Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:59.820561Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:59.827333Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:59.832098Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62129, node 1 2025-03-31T16:25:59.965862Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:59.965891Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:59.965899Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:59.966011Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24160 TClient is connected to server localhost:24160 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:00.782224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:00.815861Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:00.822185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:00.996873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:01.175653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:01.247116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:02.893991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010751388596400:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:02.894131Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:03.248139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:03.303200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:03.346959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:03.383370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:03.470820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:03.519901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:03.581376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010755683564214:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:03.581452Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:03.581675Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010755683564219:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:03.585528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:03.597162Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010755683564221:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:26:03.661070Z node 1 :TX_PROXY ERROR: Actor# [1:7488010755683564274:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:04.300080Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010738503692723:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:04.300170Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:04.765119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:26:04.808148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:26:04.849978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::IdxLookupSelf [GOOD] Test command err: Trying to start YDB, gRPC: 2977, MsgBus: 8542 2025-03-31T16:25:59.256915Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010736275017972:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:59.257045Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00196d/r3tmp/tmpZ2XuG3/pdisk_1.dat 2025-03-31T16:25:59.771608Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:59.777044Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:59.777154Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:59.791674Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2977, node 1 2025-03-31T16:25:59.957889Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:59.957915Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:59.957922Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:59.958064Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8542 TClient is connected to server localhost:8542 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:00.612284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:00.656791Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:00.669732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:00.865093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:01.042730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:01.127035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:03.036229Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010753454888941:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:03.036379Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:03.369788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:03.406094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:03.442089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:03.478916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:03.527498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:03.605198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:03.705398Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010753454889464:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:03.705474Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:03.705827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010753454889469:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:03.709679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:03.725918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010753454889471:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:26:03.808408Z node 1 :TX_PROXY ERROR: Actor# [1:7488010753454889526:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:04.259215Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010736275017972:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:04.259285Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:04.976196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:26:05.060744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:26:05.131574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480
: Warning: Execution, code: 1060
:3:29: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersRevCompression [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143438873.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143438873.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143438873.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123438873.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143438873.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143438873.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123437673.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123438873.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123438873.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123437673.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123437673.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123437673.000000s;Name=;Codec=}; 2025-03-31T16:24:34.026060Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:24:34.120568Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:24:34.148719Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:24:34.149028Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:24:34.157839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:24:34.158086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:24:34.158348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:24:34.158499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:24:34.158640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:24:34.158765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:24:34.158877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:24:34.158992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:24:34.159163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:24:34.159309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:24:34.159425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:24:34.159539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:24:34.191406Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:24:34.191586Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:24:34.191650Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:24:34.191836Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:24:34.192007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:24:34.192085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:24:34.192133Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:24:34.192234Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:24:34.192302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:24:34.192349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:24:34.192383Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:24:34.192572Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:24:34.192659Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:24:34.192706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:24:34.192744Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:24:34.192839Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:24:34.192904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:24:34.192948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:24:34.192980Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:24:34.193053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:24:34.193092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:24:34.193122Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:24:34.193173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:24:34.193216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:24:34.193253Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:24:34.193709Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=51; 2025-03-31T16:24:34.193802Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=38; 2025-03-31T16:24:34.193882Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=37; 2025-03-31T16:24:34.193969Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=42; 2025-03-31T16:24:34.194178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:24:34.194267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:24:34.194314Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:24:34.194523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:24:34.194569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:24:34.194600Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=94 ... pp:29;EXECUTE:finishLoadingTime=414; 2025-03-31T16:26:06.500483Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=59999; 2025-03-31T16:26:06.511993Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=11406; 2025-03-31T16:26:06.524981Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=11822; 2025-03-31T16:26:06.525120Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=12998; 2025-03-31T16:26:06.525323Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=122; 2025-03-31T16:26:06.525458Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=75; 2025-03-31T16:26:06.525665Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=131; 2025-03-31T16:26:06.525815Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=93; 2025-03-31T16:26:06.540965Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=15061; 2025-03-31T16:26:06.569375Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=28262; 2025-03-31T16:26:06.569542Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=38; 2025-03-31T16:26:06.569633Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=37; 2025-03-31T16:26:06.569704Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=6; 2025-03-31T16:26:06.569752Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=11; 2025-03-31T16:26:06.569816Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2025-03-31T16:26:06.569910Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=46; 2025-03-31T16:26:06.569964Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=8; 2025-03-31T16:26:06.570078Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=69; 2025-03-31T16:26:06.570136Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2025-03-31T16:26:06.570211Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=34; 2025-03-31T16:26:06.570316Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=60; 2025-03-31T16:26:06.570745Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=382; 2025-03-31T16:26:06.570799Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=139236; 2025-03-31T16:26:06.570979Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=46800024;raw_bytes=72380025;count=29;records=720000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-31T16:26:06.571121Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-31T16:26:06.571187Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-31T16:26:06.571260Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-31T16:26:06.589147Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-03-31T16:26:06.589334Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:26:06.589418Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:26:06.589502Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-03-31T16:26:06.589568Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-31T16:26:06.589614Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:26:06.589687Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:26:06.589732Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:26:06.589841Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:26:06.590367Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:26:06.590461Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:2654:4528];tablet_id=9437184;parent=[1:2612:4493];fline=manager.cpp:82;event=ask_data;request=request_id=155;1={portions_count=29};; 2025-03-31T16:26:06.591313Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-31T16:26:06.591723Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-31T16:26:06.591761Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-31T16:26:06.591788Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-31T16:26:06.591832Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:26:06.591899Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:26:06.591964Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-03-31T16:26:06.592036Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-31T16:26:06.592079Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:26:06.592133Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:26:06.592173Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:26:06.592279Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:26:06.592891Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=29;path_id=1; 2025-03-31T16:26:06.594412Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 160000/10402096 80000/5203544 0/0 >> DemoTx::Scenario_2 >> TPersQueueTest::BadTopic [GOOD] >> TPersQueueTest::CloseActiveWriteSessionOnClusterDisable >> KqpJoin::JoinLeftPureCross ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp_Reboot [GOOD] Test command err: 2025-03-31T16:21:33.091169Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:21:33.179149Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:21:33.198887Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:21:33.199172Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:21:33.207151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:21:33.207368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:21:33.207611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:21:33.207720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:21:33.207808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:21:33.207894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:21:33.207988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:21:33.208066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:21:33.208174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:21:33.208254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:21:33.208331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:21:33.208388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:21:33.236550Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:21:33.236717Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:21:33.236762Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:21:33.236951Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:33.237084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:21:33.237148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:21:33.237187Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:21:33.237284Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:21:33.237352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:21:33.237393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:21:33.237423Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:21:33.237615Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:33.237724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:21:33.237769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:21:33.237800Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:21:33.237929Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:21:33.237989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:21:33.238068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:21:33.238100Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:21:33.238172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:21:33.238213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:21:33.238245Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:21:33.238300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:21:33.238335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:21:33.238360Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:21:33.238736Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=43; 2025-03-31T16:21:33.238808Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=24; 2025-03-31T16:21:33.238868Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=23; 2025-03-31T16:21:33.238939Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=27; 2025-03-31T16:21:33.239110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:21:33.239180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:21:33.239221Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:21:33.239416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:21:33.239458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:21:33.239485Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:21:33.239590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:21:33.239633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:21:33.239655Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:21:33.239817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:21:33.239859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:21:33.239878Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:21:33.239970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:21:33.240015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:21:33.240087Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... LOB:0:2696];;column_id:8;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:8464];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:9040];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:9024];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:9456];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:9448];;;;switched=(portion_id:61;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2586528;index_size:28;meta:((produced=SPLIT_COMPACTED;)););(portion_id:57;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););; 2025-03-31T16:26:06.955692Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:11489:13116];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=2; 2025-03-31T16:26:06.958596Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:11489:13116];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> KqpJoinOrder::TestJoinOrderHintsComplex+ColumnStore >> SystemView::CollectScriptingQueries [GOOD] |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> KqpIndexLookupJoin::LeftSemiJoinWithLeftFilter+StreamLookup >> KqpJoin::RightTableIndexPredicate >> TPersQueueTest::SetupLockSession2 [GOOD] >> TPersQueueTest::SetupLockSession >> AnalyzeColumnshard::AnalyzeSameOperationId >> KqpIndexLookupJoin::CheckCastUtf8ToString+StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUtf8ToString+StreamLookupJoin+NotNull >> KqpNewEngine::ContainerRegistryCombiner [GOOD] >> KqpNewEngine::DeferredEffects |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> KqpIndexLookupJoin::CheckCastInt32ToInt16+StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastInt32ToInt16+StreamLookupJoin+NotNull >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite-withSink >> KqpNotNullColumns::ReplaceNotNullPg [GOOD] >> KqpNotNullColumns::JoinRightTableWithNotNullColumns+StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::CollectScriptingQueries [GOOD] Test command err: 2025-03-31T16:25:17.901241Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010556314619892:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:17.901600Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019e3/r3tmp/tmpBGCEDy/pdisk_1.dat 2025-03-31T16:25:18.218011Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32559, node 1 2025-03-31T16:25:18.240959Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:18.241068Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:18.245390Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:25:18.245414Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:25:18.245691Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:25:18.392603Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:18.392628Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:18.392669Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:18.392851Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13908 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:18.975476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:19.024678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:20.752324Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010569199522814:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:20.752464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010569199522827:2361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:20.752515Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010569199522828:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:20.752576Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:20.753297Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010569199522841:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:20.754307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010569199522844:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:20.754370Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010569199522848:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:20.754445Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:20.755384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010569199522871:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:20.755463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010569199522874:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:20.755563Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:20.758554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010569199522896:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:20.758631Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010569199522898:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:20.758676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010569199522903:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:20.758727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:20.760426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010569199522923:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:20.760489Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010569199522926:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:20.760531Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:20.761801Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010569199522940:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:20.761856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010569199522943:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:20.761900Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:20.762862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-03-31T16:25:20.762999Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010569199522955:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:20.763062Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010569199522960:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:20.763142Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:20.766109Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010569199522973:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:20.766182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010569199522975:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:20.766222Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010569199522981:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:20.766288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:20.767953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010569199522992:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: E ... 010715858760562:2126];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:59.853319Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:00.265608Z node 22 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [22:7488010741628565020:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:00.265742Z node 22 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:00.266514Z node 22 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [22:7488010741628565032:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:00.272860Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:26:00.294868Z node 22 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [22:7488010741628565034:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:26:00.394844Z node 22 :TX_PROXY ERROR: Actor# [22:7488010741628565085:2399] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:00.536833Z node 22 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpgcbp7dy6pakpdmqpkj35b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=22&id=YWMzM2M2NWUtZTE2MDE1MTMtMmEyOGEwY2QtNjhkZDY1MGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:26:00.701979Z node 22 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqpgcbzq3bx0njfmt8j5jyrp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=22&id=NmRiZDEzOGQtODI0MmYxMC03YWJhNjNhYi02NmM0NTFjYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:26:00.733531Z node 22 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438360741, txId: 281474976715662] shutting down 2025-03-31T16:26:00.940826Z node 22 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqpgcc5s28qqgn54qjvcxcgt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=22&id=NTI0YzhlOS02OGVmODZkNS0xNjE0ZmRjLTFjNGQ0Nzk4, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:26:00.943647Z node 22 :SYSTEM_VIEWS INFO: Scan started, actor: [22:7488010741628565209:2370], owner: [22:7488010741628565206:2368], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2025-03-31T16:26:00.945468Z node 22 :SYSTEM_VIEWS INFO: Scan prepared, actor: [22:7488010741628565209:2370], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-31T16:26:00.946156Z node 22 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [22:7488010741628565209:2370], row count: 2, finished: 1 2025-03-31T16:26:00.946198Z node 22 :SYSTEM_VIEWS INFO: Scan finished, actor: [22:7488010741628565209:2370], owner: [22:7488010741628565206:2368], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2025-03-31T16:26:00.950026Z node 22 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438360939, txId: 281474976715664] shutting down 2025-03-31T16:26:02.244656Z node 23 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[23:7488010750167257036:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:02.244716Z node 23 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019e3/r3tmp/tmpxWU5lJ/pdisk_1.dat 2025-03-31T16:26:02.504255Z node 23 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:02.526157Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:02.526279Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:02.528025Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12318, node 23 2025-03-31T16:26:02.651513Z node 23 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:02.651545Z node 23 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:02.651556Z node 23 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:02.651724Z node 23 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17309 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:03.083829Z node 23 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:03.091750Z node 23 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:03.106047Z node 23 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:07.245694Z node 23 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[23:7488010750167257036:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:07.245790Z node 23 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:08.152679Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [23:7488010775937061499:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:08.152778Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:08.153081Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [23:7488010775937061511:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:08.159232Z node 23 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:26:08.181089Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [23:7488010775937061513:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:26:08.238189Z node 23 :TX_PROXY ERROR: Actor# [23:7488010775937061567:2402] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:08.421459Z node 23 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqpgckcpenx9berz6fvfwzkc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=23&id=YjM3M2I1ZGMtODViMzQ4Ny1hNjNiN2EyYi01NGQ0NGY2NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:26:08.801143Z node 23 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jqpgckvk7djdvypytvdv46az, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=23&id=N2NlOTc3ZDItODlkYzA0N2EtYTQ2ZGMyMTMtZDk4OTI4MzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:26:08.826672Z node 23 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438368847, txId: 281474976710662] shutting down 2025-03-31T16:26:09.114680Z node 23 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jqpgcm4bcgs6cxhqgx6cpb1j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=23&id=MzNhNDA1NjEtM2QwZWU3MTEtODc4Zjg5OGYtZjhmODRmMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:26:09.117195Z node 23 :SYSTEM_VIEWS INFO: Scan started, actor: [23:7488010780232028993:2373], owner: [23:7488010780232028989:2371], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2025-03-31T16:26:09.143441Z node 23 :SYSTEM_VIEWS INFO: Scan prepared, actor: [23:7488010780232028993:2373], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-31T16:26:09.144244Z node 23 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [23:7488010780232028993:2373], row count: 2, finished: 1 2025-03-31T16:26:09.144326Z node 23 :SYSTEM_VIEWS INFO: Scan finished, actor: [23:7488010780232028993:2373], owner: [23:7488010780232028989:2371], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2025-03-31T16:26:09.212169Z node 23 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438369112, txId: 281474976710664] shutting down >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8_Reboot [GOOD] |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_transfer_writer/unittest >> DataShardVolatile::DistributedUpsertRestartAfterPlan [GOOD] >> DataShardVolatile::CompactedVolatileChangesCommit >> KqpLimits::TooBigQuery-useSink [GOOD] >> KqpLimits::TooBigKey+useSink >> TGroupMapperTest::MonteCarlo [GOOD] >> KqpIndexLookupJoin::LeftJoinRightNullFilter-StreamLookup [GOOD] >> KqpRanges::DuplicateKeyPredicateParam [GOOD] >> KqpRanges::DuplicateKeyPredicateMixed >> KqpIndexLookupJoin::LeftSemi [GOOD] >> KqpNewEngine::DeleteOn+UseSink [GOOD] >> KqpNewEngine::DeleteOn-UseSink >> KqpNewEngine::StreamLookupWithView >> KqpIndexLookupJoin::CheckCastUint32ToUint16-StreamLookupJoin+NotNull [GOOD] >> SystemView::AuthGroupMembers_Access [GOOD] >> SystemView::AuthGroupMembers_ResultOrder ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8_Reboot [GOOD] Test command err: 2025-03-31T16:21:34.362445Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:21:34.444656Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:21:34.463051Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:21:34.463363Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:21:34.470846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:21:34.471018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:21:34.471232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:21:34.471317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:21:34.471416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:21:34.471514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:21:34.471606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:21:34.471688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:21:34.471766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:21:34.471838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:21:34.471916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:21:34.472001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:21:34.495132Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:21:34.495272Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:21:34.495313Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:21:34.495462Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:34.495585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:21:34.495641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:21:34.495679Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:21:34.495769Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:21:34.495854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:21:34.495890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:21:34.495918Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:21:34.496114Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:34.496197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:21:34.496239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:21:34.496264Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:21:34.496350Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:21:34.496400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:21:34.496432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:21:34.496452Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:21:34.496516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:21:34.496543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:21:34.496568Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:21:34.496615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:21:34.496639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:21:34.496662Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:21:34.497027Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=37; 2025-03-31T16:21:34.497101Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=30; 2025-03-31T16:21:34.497170Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=27; 2025-03-31T16:21:34.497241Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=35; 2025-03-31T16:21:34.497391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:21:34.497432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:21:34.497457Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:21:34.497605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:21:34.497639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:21:34.497661Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:21:34.497765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:21:34.497800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:21:34.497835Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:21:34.498026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:21:34.498068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:21:34.498090Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:21:34.498203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:21:34.498244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:21:34.498294Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... B:0:2688];;column_id:8;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:33;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:34;blob_range:[NO_BLOB:0:8352];;column_id:8;chunk_idx:35;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:36;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:8336];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:8352];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:8336];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:10208];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:10208];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:9400];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:9392];;;;switched=(portion_id:60;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2507632;index_size:20;meta:((produced=SPLIT_COMPACTED;)););(portion_id:56;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2109896;index_size:20;meta:((produced=INSERTED;)););; 2025-03-31T16:26:12.423071Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:11572:13199];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=2; 2025-03-31T16:26:12.425278Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:11572:13199];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> KqpJoin::RightTableKeyPredicate [GOOD] >> KqpIndexLookupJoin::CheckCastInt64ToUint64-StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastInt64ToUint64-StreamLookupJoin+NotNull |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MonteCarlo [GOOD] |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftSemi [GOOD] Test command err: Trying to start YDB, gRPC: 28862, MsgBus: 24351 2025-03-31T16:26:06.103300Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010766885588825:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:06.111924Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001961/r3tmp/tmpdKux2s/pdisk_1.dat 2025-03-31T16:26:06.684978Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:06.685072Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:06.689832Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:26:06.713560Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28862, node 1 2025-03-31T16:26:06.925833Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:06.925852Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:06.925860Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:06.925965Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24351 TClient is connected to server localhost:24351 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:07.673815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:07.725251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:07.931008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:08.196251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:08.289844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:10.042317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010784065459784:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:10.042457Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:10.309899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:10.336682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:10.362712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:10.437064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:10.471966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:10.531264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:10.620148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010784065460303:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:10.620239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:10.620288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010784065460308:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:10.623551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:10.637433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010784065460310:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:26:10.734969Z node 1 :TX_PROXY ERROR: Actor# [1:7488010784065460366:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:11.099891Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010766885588825:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:11.099961Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:11.813139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:26:11.852621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:26:11.890881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:26:11.958418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:26:11.984212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:26:12.009815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftJoinRightNullFilter-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 27413, MsgBus: 17987 2025-03-31T16:26:06.030718Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010766028031731:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:06.046996Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001962/r3tmp/tmpFoLmWy/pdisk_1.dat 2025-03-31T16:26:06.596818Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:06.598354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:06.598424Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:06.603193Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27413, node 1 2025-03-31T16:26:06.855651Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:06.855669Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:06.855681Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:06.855809Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17987 TClient is connected to server localhost:17987 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:07.638894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:07.669381Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:07.691659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:07.829547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:08.067070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:08.174237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:09.945061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010778912935273:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:09.945217Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:10.369128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:10.399781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:10.435454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:10.498958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:10.543530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:10.576619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:10.622136Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010783207903083:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:10.622215Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:10.622293Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010783207903088:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:10.627833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:10.641457Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010783207903090:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:26:10.706814Z node 1 :TX_PROXY ERROR: Actor# [1:7488010783207903142:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:11.006590Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010766028031731:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:11.006658Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:11.628661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:26:11.695843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:26:11.732492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:26:11.780757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:26:11.819702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:26:11.868526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> TransferWriter::Write_ColumnTable [GOOD] |92.3%| [TA] $(B)/ydb/core/mind/bscontroller/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.3%| [TA] {RESULT} $(B)/ydb/core/mind/bscontroller/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastUint32ToUint16-StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 61226, MsgBus: 23412 2025-03-31T16:26:00.443714Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010742447607957:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:00.444338Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00196a/r3tmp/tmpiIZZ3N/pdisk_1.dat 2025-03-31T16:26:01.065111Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:01.065209Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:01.067850Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:01.071105Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61226, node 1 2025-03-31T16:26:01.146608Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:01.146631Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:01.146643Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:01.146761Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23412 TClient is connected to server localhost:23412 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:01.842312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:01.876128Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:01.891678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:02.101598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:02.353840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:02.452263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:04.136116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010759627478794:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:04.136215Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:04.428716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:04.514913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:04.566280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:04.634304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:04.690517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:04.766070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:04.848652Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010759627479316:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:04.848764Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:04.849176Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010759627479321:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:04.853609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:04.871486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010759627479323:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:26:04.924931Z node 1 :TX_PROXY ERROR: Actor# [1:7488010759627479376:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:05.417717Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010742447607957:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:05.417793Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:05.885194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:26:05.960761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 7255, MsgBus: 1328 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00196a/r3tmp/tmp4oVf3w/pdisk_1.dat 2025-03-31T16:26:08.165262Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:26:08.214105Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:08.215885Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:08.215965Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:08.218348Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7255, node 2 2025-03-31T16:26:08.391780Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:08.391803Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:08.391811Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:08.391921Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1328 TClient is connected to server localhost:1328 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:09.088550Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:09.096315Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:09.117513Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:09.198837Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:09.371696Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:09.474186Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:11.561697Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010788789320366:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:11.561790Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:11.604884Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:11.676828Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:11.727068Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:11.779790Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:11.825676Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:11.880500Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:11.946183Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010788789320881:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:11.946284Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010788789320886:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:11.946389Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:11.950050Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:11.962901Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488010788789320888:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:26:12.051268Z node 2 :TX_PROXY ERROR: Actor# [2:7488010793084288239:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:13.077399Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:26:13.136252Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightTableKeyPredicate [GOOD] Test command err: Trying to start YDB, gRPC: 8484, MsgBus: 9216 2025-03-31T16:26:07.155937Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010770461504525:2133];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:07.158086Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00195d/r3tmp/tmpepBTHO/pdisk_1.dat 2025-03-31T16:26:07.756066Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:07.774093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:07.774188Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:07.776733Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8484, node 1 2025-03-31T16:26:08.013693Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:08.013712Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:08.013719Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:08.013807Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9216 TClient is connected to server localhost:9216 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:08.651853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:08.701992Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:08.716756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:08.863290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:09.062491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:09.158050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:11.004120Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010783346408130:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:11.004248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:11.386085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:11.424883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:11.475323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:11.555254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:11.616378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:11.696864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:11.775225Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010787641375947:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:11.775305Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:11.779924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010787641375952:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:11.786972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:11.798690Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010787641375955:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:26:11.879297Z node 1 :TX_PROXY ERROR: Actor# [1:7488010787641376009:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:12.153815Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010770461504525:2133];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:12.153895Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:12.842773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> KqpJoin::JoinLeftPureCross [GOOD] >> SystemView::AuthOwners_TableRange [GOOD] >> SystemView::AuthPermissions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_transfer_writer/unittest >> TransferWriter::Write_ColumnTable [GOOD] Test command err: 2025-03-31T16:26:04.256832Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010757356696091:2134];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:04.257432Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001ac4/r3tmp/tmpmj9MHV/pdisk_1.dat 2025-03-31T16:26:04.964261Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:05.045977Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:05.046078Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:05.055832Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8245 TServer::EnableGrpc on GrpcPort 9658, node 1 2025-03-31T16:26:05.854861Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:05.854890Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:05.854896Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:05.863293Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8245 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:06.888057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:06.967122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:08.116880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010770241598951:2314];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:08.117096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010770241598951:2314];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:26:08.117400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010770241598951:2314];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:26:08.117532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010770241598951:2314];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:26:08.117665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010770241598951:2314];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:26:08.117813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010770241598951:2314];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:26:08.117935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010770241598951:2314];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:26:08.118362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010770241598951:2314];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:26:08.118507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010770241598951:2314];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:26:08.118607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010770241598951:2314];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:26:08.118748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010770241598951:2314];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:26:08.118852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010770241598951:2314];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:26:08.159115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;self_id=[1:7488010770241599096:2325];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:08.159186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;self_id=[1:7488010770241599096:2325];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:26:08.159404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;self_id=[1:7488010770241599096:2325];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:26:08.159532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;self_id=[1:7488010770241599096:2325];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:26:08.159669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;self_id=[1:7488010770241599096:2325];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:26:08.159788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;self_id=[1:7488010770241599096:2325];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:26:08.159876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;self_id=[1:7488010770241599096:2325];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:26:08.160000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;self_id=[1:7488010770241599096:2325];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:26:08.160109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;self_id=[1:7488010770241599096:2325];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:26:08.160257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;self_id=[1:7488010770241599096:2325];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:26:08.160375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;self_id=[1:7488010770241599096:2325];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:26:08.160496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;self_id=[1:7488010770241599096:2325];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:26:08.198345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037937;self_id=[1:7488010770241599085:2323];tablet_id=72075186224037937;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:08.198427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037937;self_id=[1:7488010770241599085:2323];tablet_id=72075186224037937;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:26:08.198653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037937;self_id=[1:7488010770241599085:2323];tablet_id=72075186224037937;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:26:08.198761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037937;self_id=[1:7488010770241599085:2323];tablet_id=72075186224037937;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:26:08.198918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037937;self_id=[1:7488010770241599085:2323];tablet_id=72075186224037937;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:26:08.199043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037937;self_id=[1:7488010770241599085:2323];tablet_id=72075186224037937;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:26:08.199185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037937;self_id=[1:7488010770241599085:2323];tablet_id=72075186224037937;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:26:08.199325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037937;self_id=[1:7488010770241599085:2323];tablet_id=72075186224037937;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:26:08.199432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037937;self_id=[1:7488010770241599085:2323];tablet_id=72075186224037937;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Sync ... _state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:26:10.026737Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:26:10.032565Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:26:10.032680Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:26:10.038482Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:26:10.038533Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037938;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:26:10.044280Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:26:10.044376Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037917;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:26:10.049938Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037940;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:26:10.049946Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037941;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:26:10.055612Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037915;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:26:10.055613Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:26:10.061252Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:26:10.061454Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:26:10.066964Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:26:10.066967Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:26:10.072316Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:26:10.072482Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:26:10.077961Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:26:10.078000Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:26:10.083896Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037942;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:26:10.083923Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:26:10.089698Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:26:10.089699Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037944;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:26:10.095339Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:26:10.095600Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037898;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:26:10.101186Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:26:10.101186Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:26:10.107062Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:26:10.107062Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:26:10.113167Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:26:10.113386Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037939;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:26:10.119392Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:26:10.125596Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:26:10.127317Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:26:10.131908Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:26:10.133144Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:26:10.138612Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:26:10.139250Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:26:10.144935Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:26:10.145432Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037951;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:26:10.151531Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:26:10.151966Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743438369988 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTi... (TRUNCATED) 2025-03-31T16:26:10.214737Z node 1 :REPLICATION_SERVICE DEBUG: [TransferWriter][1:7488010783126503104:3730] GetTableScheme: worker# [0:0:0] 2025-03-31T16:26:10.214793Z node 1 :REPLICATION_SERVICE DEBUG: [TransferWriter][1:7488010783126503104:3730] Handshake: worker# [1:7488010765946631221:2297] 2025-03-31T16:26:10.215141Z node 1 :REPLICATION_SERVICE DEBUG: [TransferWriter][1:7488010783126503104:3730] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindColumnTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T16:26:10.219225Z node 1 :REPLICATION_SERVICE DEBUG: [TransferWriter][1:7488010783126503104:3730] CompileTransferLambda: worker# [1:7488010765946631221:2297] 2025-03-31T16:26:14.096523Z node 1 :REPLICATION_SERVICE DEBUG: [TransferWriter][1:7488010783126503104:3730] Handle TEvPurecalcCompileResponse: result# |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> OlapEstimationRowsCorrectness::TPCH10 >> KqpIndexLookupJoin::InnerJoinOnlyRightColumn+StreamLookup >> KqpJoinOrder::FiveWayJoin+ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinLeftPureCross [GOOD] Test command err: Trying to start YDB, gRPC: 29525, MsgBus: 28470 2025-03-31T16:26:09.607047Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010780652041456:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:09.612030Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00195c/r3tmp/tmpUXO22a/pdisk_1.dat 2025-03-31T16:26:10.116393Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:10.123015Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:10.123142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 29525, node 1 2025-03-31T16:26:10.124408Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:26:10.171944Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:10.171965Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:10.171972Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:10.173704Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28470 TClient is connected to server localhost:28470 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:10.681983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:26:10.701413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:26:10.861825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:11.054546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:11.130452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:13.059468Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010797831912262:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:13.059614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:13.460592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:13.503751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:13.533302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:13.562986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:13.639458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:13.693723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:13.750061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010797831912780:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:13.750145Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:13.750440Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010797831912785:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:13.753966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:13.765622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010797831912787:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:26:13.862003Z node 1 :TX_PROXY ERROR: Actor# [1:7488010797831912842:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:14.622491Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010780652041456:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:14.622545Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpJoin::JoinConvert >> KqpIndexLookupJoin::CheckCastUtf8ToString+StreamLookupJoin+NotNull [GOOD] >> KqpSqlIn::SimpleKey_Negated [GOOD] >> KqpSqlIn::TupleParameter >> KqpSinkTx::OlapSnapshotROInteractive1 [GOOD] >> KqpSinkTx::OlapSnapshotROInteractive2 |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> KqpSqlIn::SecondaryIndex_PgKey >> KqpIndexLookupJoin::LeftSemiJoinWithLeftFilter+StreamLookup [GOOD] >> KqpNewEngine::DeferredEffects [GOOD] >> KqpNewEngine::Delete+UseSink >> KqpIndexLookupJoin::CheckCastInt32ToInt16+StreamLookupJoin+NotNull [GOOD] >> KqpJoin::RightTableIndexPredicate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastUtf8ToString+StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 15828, MsgBus: 61255 2025-03-31T16:26:03.616586Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010754538823653:2133];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:03.616632Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001965/r3tmp/tmpC6XK4y/pdisk_1.dat 2025-03-31T16:26:04.116120Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:04.120878Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:04.120981Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:04.124649Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15828, node 1 2025-03-31T16:26:04.264172Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:04.264189Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:04.264203Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:04.264295Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61255 TClient is connected to server localhost:61255 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:05.130522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:05.145933Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:05.153552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:05.350688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:05.568692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:05.683610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:07.578946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010771718694544:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:07.579075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:07.992044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:08.037827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:08.112369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:08.153384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:08.192963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:08.278209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:08.340684Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010776013662363:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:08.340765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:08.340956Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010776013662368:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:08.344486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:08.356068Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-31T16:26:08.356240Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010776013662370:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:26:08.458695Z node 1 :TX_PROXY ERROR: Actor# [1:7488010776013662426:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:08.634956Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010754538823653:2133];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:08.635005Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:09.521584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:26:09.610108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 64286, MsgBus: 25240 2025-03-31T16:26:11.162553Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010789102069899:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:11.162587Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001965/r3tmp/tmpFzgJCS/pdisk_1.dat 2025-03-31T16:26:11.388468Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:11.401329Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:11.401425Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:11.404099Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64286, node 2 2025-03-31T16:26:11.527625Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:11.527650Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:11.527658Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:11.527765Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25240 TClient is connected to server localhost:25240 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:12.016630Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:12.021253Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:26:12.041854Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:12.128865Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:12.293137Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:12.360046Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:14.709407Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010801986973557:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:14.709502Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:14.777369Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:14.817605Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:14.855200Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:14.896862Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:14.972894Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:15.020187Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:15.091377Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010806281941367:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:15.091455Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:15.091725Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010806281941372:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:15.095255Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:15.112905Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488010806281941374:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:26:15.183917Z node 2 :TX_PROXY ERROR: Actor# [2:7488010806281941429:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:16.097620Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:26:16.167675Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488010789102069899:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:16.168316Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:16.196647Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::TPCDS94+ColumnStore >> KqpJoinOrder::CanonizedJoinOrderTPCH12 >> TopicService::OneConsumer_TheRangesOverlap [GOOD] |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> KqpNotNullColumns::JoinRightTableWithNotNullColumns+StreamLookup [GOOD] >> KqpNotNullColumns::JoinRightTableWithNotNullColumns-StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftSemiJoinWithLeftFilter+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 11238, MsgBus: 7747 2025-03-31T16:26:10.695385Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010781996694306:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:10.695508Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00195a/r3tmp/tmpnoYoEC/pdisk_1.dat 2025-03-31T16:26:11.227717Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:11.230084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:11.230154Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:11.235058Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11238, node 1 2025-03-31T16:26:11.456139Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:11.456167Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:11.456177Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:11.456320Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7747 TClient is connected to server localhost:7747 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:12.140410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:12.159367Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:12.174001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:12.340288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:12.517903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:12.599160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:14.586101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010799176565268:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:14.586218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:14.885394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:14.921512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:14.954329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:14.998161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:15.033801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:15.107991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:15.211515Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010803471533088:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:15.211617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:15.211932Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010803471533093:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:15.216652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:15.234200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010803471533095:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:26:15.312300Z node 1 :TX_PROXY ERROR: Actor# [1:7488010803471533150:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:15.699987Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010781996694306:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:15.700106Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:16.459339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:26:16.500823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:26:16.566771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:26:16.611614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:26:16.684641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:26:16.734950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightTableIndexPredicate [GOOD] Test command err: Trying to start YDB, gRPC: 26676, MsgBus: 3800 2025-03-31T16:26:10.791674Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010782217542116:2250];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:10.791986Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001959/r3tmp/tmpcHcHWX/pdisk_1.dat 2025-03-31T16:26:11.310777Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:11.310850Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:11.317316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:26:11.347790Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26676, node 1 2025-03-31T16:26:11.501928Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:11.501954Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:11.501972Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:11.502107Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3800 TClient is connected to server localhost:3800 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:12.295171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:12.310137Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:12.316556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:12.465777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:12.675566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:12.763001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:14.648405Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010799397412880:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:14.648530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:14.973226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:15.009131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:15.045547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:15.127686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:15.159227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:15.216648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:15.317302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010803692380695:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:15.317367Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:15.317592Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010803692380700:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:15.321219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:15.338279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010803692380702:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:26:15.437898Z node 1 :TX_PROXY ERROR: Actor# [1:7488010803692380758:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:15.795171Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010782217542116:2250];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:15.795232Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:16.402892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::CanonizedJoinOrderTPCH1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastInt32ToInt16+StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 2096, MsgBus: 25651 2025-03-31T16:26:03.824177Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010755572320135:2079];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:03.826295Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001963/r3tmp/tmpmvVYY2/pdisk_1.dat 2025-03-31T16:26:04.421600Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:04.422100Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:04.426003Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:26:04.465466Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2096, node 1 2025-03-31T16:26:04.718434Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:04.718454Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:04.718463Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:04.718592Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25651 TClient is connected to server localhost:25651 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:05.442582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:05.463713Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:05.482995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:05.629803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:05.859164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:05.964938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:08.149526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010777047158361:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:08.149670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:08.443123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:08.485700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:08.523869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:08.563321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:08.632462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:08.721133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:08.793590Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010777047158877:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:08.793700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:08.794136Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010777047158882:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:08.798880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:08.822322Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010777047158884:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:26:08.824617Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010755572320135:2079];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:08.824702Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:08.915724Z node 1 :TX_PROXY ERROR: Actor# [1:7488010777047158942:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:09.956435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:26:10.055343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 29359, MsgBus: 6627 2025-03-31T16:26:11.731860Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010788499472612:2128];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:11.732808Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001963/r3tmp/tmpc9ZKUx/pdisk_1.dat 2025-03-31T16:26:11.982286Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:11.982358Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:11.982558Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:11.996912Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29359, node 2 2025-03-31T16:26:12.111687Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:12.111720Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:12.111728Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:12.111844Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6627 TClient is connected to server localhost:6627 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:12.672375Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:12.687524Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:12.702815Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:26:12.842523Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:13.032371Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:13.110200Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:15.702283Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010805679343495:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:15.702820Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:15.744228Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:15.780758Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:15.821136Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:15.854262Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:15.888770Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:15.970584Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:16.061025Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010809974311308:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:16.061124Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:16.061408Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010809974311313:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:16.065168Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:16.081372Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488010809974311315:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:26:16.177813Z node 2 :TX_PROXY ERROR: Actor# [2:7488010809974311370:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:16.719170Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488010788499472612:2128];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:16.719423Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:17.138044Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:26:17.259210Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite-withSink [GOOD] >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite+withSink >> TopicService::DifferentConsumers_TheRangesOverlap >> KqpJoinOrder::TPCDS16-ColumnStore >> KqpNewEngine::DeleteOn-UseSink [GOOD] >> KqpNewEngine::DeleteWithBuiltin+UseSink |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_transfer_writer/unittest >> KqpRanges::DuplicateKeyPredicateMixed [GOOD] >> KqpRanges::DuplicateCompositeKeyPredicate |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> KqpIndexLookupJoin::CheckCastInt64ToUint64-StreamLookupJoin+NotNull [GOOD] >> KqpReturning::ReturningWorks+QueryService >> KqpJoinOrder::DatetimeConstantFold-ColumnStore >> KqpLimits::OutOfSpaceYQLUpsertFail-useSink [GOOD] >> KqpLimits::QSReplySize+useSink >> DemoTx::Scenario_2 [GOOD] >> KqpLimits::TooBigKey+useSink [GOOD] >> KqpLimits::TooBigKey-useSink >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64_Reboot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastInt64ToUint64-StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 22419, MsgBus: 26191 2025-03-31T16:26:07.002883Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010769742133760:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:07.005850Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00195e/r3tmp/tmpWWx6Dz/pdisk_1.dat 2025-03-31T16:26:07.631655Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:07.631743Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:07.635546Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:07.636653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22419, node 1 2025-03-31T16:26:07.866137Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:07.866169Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:07.866180Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:07.866319Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26191 TClient is connected to server localhost:26191 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:08.716769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:08.745942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:08.999304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:09.244357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:09.353572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:11.218546Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010786922004580:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:11.218657Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:11.532709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:11.576455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:11.631469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:11.662921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:11.707185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:11.747349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:11.833898Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010786922005097:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:11.833963Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:11.834282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010786922005102:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:11.837431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:11.848794Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010786922005104:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:26:11.949518Z node 1 :TX_PROXY ERROR: Actor# [1:7488010786922005159:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:12.001612Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010769742133760:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:12.001682Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:13.097464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:26:13.202065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 10563, MsgBus: 3557 2025-03-31T16:26:15.179286Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010806854082048:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:15.179377Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00195e/r3tmp/tmpCbcC8H/pdisk_1.dat 2025-03-31T16:26:15.338503Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:15.364595Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:15.364673Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:15.366505Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10563, node 2 2025-03-31T16:26:15.422420Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:15.422444Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:15.422455Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:15.422605Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3557 TClient is connected to server localhost:3557 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:15.939598Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:15.947629Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:26:15.964097Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:16.048171Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:16.222276Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:16.298867Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:18.759259Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010819738985699:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:18.759370Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:18.824708Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:18.897340Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:18.933145Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:18.982963Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:19.031774Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:19.094159Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:19.149978Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010824033953510:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:19.150055Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:19.150238Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010824033953515:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:19.154204Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:19.169736Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488010824033953517:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:26:19.262405Z node 2 :TX_PROXY ERROR: Actor# [2:7488010824033953574:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:20.164552Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488010806854082048:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:20.167612Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:20.267128Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:26:20.366977Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::TestJoinHint1+ColumnStore |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> DataShardVolatile::CompactedVolatileChangesCommit [GOOD] >> DataShardVolatile::CompactedVolatileChangesAbort >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32_Reboot [GOOD] >> KqpNewEngine::StreamLookupWithView [GOOD] >> KqpNewEngine::Truncated >> TPersQueueTest::CloseActiveWriteSessionOnClusterDisable [GOOD] >> TPersQueueTest::Cache >> DemoTx::Scenario_3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64_Reboot [GOOD] Test command err: 2025-03-31T16:21:42.808637Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:21:42.907413Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:21:42.928782Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:21:42.928980Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:21:42.935615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:21:42.935807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:21:42.936055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:21:42.936177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:21:42.936308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:21:42.936458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:21:42.936622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:21:42.936720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:21:42.936790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:21:42.936870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:21:42.936955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:21:42.937029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:21:42.973566Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:21:42.973711Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:21:42.973756Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:21:42.973947Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:42.974112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:21:42.974183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:21:42.974223Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:21:42.974325Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:21:42.974397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:21:42.974449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:21:42.974482Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:21:42.974680Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:42.974759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:21:42.974820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:21:42.974850Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:21:42.974968Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:21:42.975029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:21:42.975068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:21:42.975130Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:21:42.975190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:21:42.975224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:21:42.975259Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:21:42.975320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:21:42.975373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:21:42.975411Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:21:42.975793Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=47; 2025-03-31T16:21:42.975984Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=143; 2025-03-31T16:21:42.976082Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=45; 2025-03-31T16:21:42.976174Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=46; 2025-03-31T16:21:42.976381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:21:42.976455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:21:42.976496Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:21:42.976704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:21:42.976756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:21:42.976800Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:21:42.976960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:21:42.977005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:21:42.977034Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:21:42.977234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:21:42.977290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:21:42.977322Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:21:42.977481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:21:42.977521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:21:42.977586Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... LOB:0:2696];;column_id:8;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:8464];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:9040];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:9024];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:9456];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:9448];;;;switched=(portion_id:61;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2586528;index_size:28;meta:((produced=SPLIT_COMPACTED;)););(portion_id:57;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););; 2025-03-31T16:26:22.848688Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:11490:13117];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=2; 2025-03-31T16:26:22.850977Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:11490:13117];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> KqpIndexLookupJoin::InnerJoinOnlyRightColumn+StreamLookup [GOOD] >> KqpIndexLookupJoin::InnerJoinOnlyRightColumn-StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32_Reboot [GOOD] Test command err: 2025-03-31T16:22:06.028796Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:22:06.120718Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:22:06.145806Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:22:06.146086Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:22:06.154605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:22:06.154755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:22:06.154938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:22:06.155019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:22:06.155129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:22:06.155218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:22:06.155314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:22:06.155387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:22:06.155451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:22:06.155526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:22:06.155594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:22:06.155683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:22:06.178372Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:22:06.178490Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:22:06.178526Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:22:06.178712Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:22:06.178833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:22:06.178884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:22:06.178927Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:22:06.179013Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:22:06.179065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:22:06.179126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:22:06.179154Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:22:06.179382Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:22:06.179446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:22:06.179480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:22:06.179506Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:22:06.179626Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:22:06.179682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:22:06.179724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:22:06.179752Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:22:06.179833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:22:06.179871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:22:06.179908Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:22:06.179966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:22:06.179995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:22:06.180024Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:22:06.180440Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=46; 2025-03-31T16:22:06.180519Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=30; 2025-03-31T16:22:06.180583Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=25; 2025-03-31T16:22:06.180644Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=31; 2025-03-31T16:22:06.180799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:22:06.180842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:22:06.180893Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:22:06.181083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:22:06.181126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:22:06.181155Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:22:06.181324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:22:06.181385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:22:06.181418Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:22:06.181588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:22:06.181617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:22:06.181644Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:22:06.181741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:22:06.181775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:22:06.181817Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:8568];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:74;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:75;blob_range:[NO_BLOB:0:8552];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:8488];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:54;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:55;blob_range:[NO_BLOB:0:8472];;column_id:1;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:16;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:17;blob_range:[NO_BLOB:0:8248];;column_id:1;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:25;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:33;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:35;blob_range:[NO_BLOB:0:8240];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:8248];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:35;blob_range:[NO_BLOB:0:8240];;;;switched=(portion_id:55;path_id:1;records_count:25666;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2117976;index_size:24;meta:((produced=INSERTED;)););(portion_id:54;path_id:1;records_count:25666;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2549080;index_size:24;meta:((produced=SPLIT_COMPACTED;)););; 2025-03-31T16:26:23.509520Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:11194:12821];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=2; 2025-03-31T16:26:23.512708Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:11194:12821];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> KqpJoin::LeftJoinPushdownPredicate_Nulls >> KqpNewEngine::Delete+UseSink [GOOD] >> KqpNewEngine::Delete-UseSink >> KqpJoinOrder::TPCH12_100 >> KqpJoin::JoinConvert [GOOD] |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> KqpMergeCn::TopSortBy_Utf8_Limit2 >> KqpNotNullColumns::JoinRightTableWithNotNullColumns-StreamLookup [GOOD] >> KqpNotNullColumns::OptionalParametersDataQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinConvert [GOOD] Test command err: Trying to start YDB, gRPC: 6596, MsgBus: 16977 2025-03-31T16:26:17.605903Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010815250676286:2127];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:17.605972Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001950/r3tmp/tmpXGhE00/pdisk_1.dat 2025-03-31T16:26:18.268158Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:18.273148Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:18.273231Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:18.277084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6596, node 1 2025-03-31T16:26:18.467631Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:18.467652Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:18.467667Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:18.467805Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16977 TClient is connected to server localhost:16977 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:19.321546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:19.348062Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:19.370022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:19.633272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:19.914961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:20.022511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:22.164141Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010836725514503:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:22.164292Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:22.467230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:22.510164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:22.561420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:22.607459Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010815250676286:2127];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:22.607739Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:22.615510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:22.663054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:22.743774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:22.855630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010836725515027:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:22.855731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:22.859416Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010836725515032:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:22.863872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:22.878109Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010836725515034:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:26:22.956240Z node 1 :TX_PROXY ERROR: Actor# [1:7488010836725515089:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:24.065396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:26:24.152685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:26:24.202261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:26:24.604660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 >> KqpSqlIn::TupleParameter [GOOD] >> KqpSqlIn::TupleLiteral >> KqpNewEngine::DeleteWithBuiltin+UseSink [GOOD] >> KqpNewEngine::DeleteWithBuiltin-UseSink >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv+ColumnStore >> KqpSqlIn::SecondaryIndex_PgKey [GOOD] >> KqpSqlIn::SecondaryIndex_SimpleKey >> KqpJoin::IdxLookupLeftPredicate >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite+withSink [GOOD] >> KqpNewEngine::Truncated [GOOD] >> KqpNewEngine::StaleRO_Immediate >> TopicService::DifferentConsumers_TheRangesOverlap [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 1183, MsgBus: 23915 2025-03-31T16:26:12.531488Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010791052613570:2080];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:12.536073Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00128d/r3tmp/tmp3pKNgI/pdisk_1.dat 2025-03-31T16:26:13.046330Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:13.065078Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:13.065197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:13.066938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1183, node 1 2025-03-31T16:26:13.269808Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:13.269841Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:13.269851Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:13.270023Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23915 TClient is connected to server localhost:23915 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:13.965329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:13.985042Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:13.999864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:14.197549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:14.378209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:14.455274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:16.289208Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010808232484497:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:16.289317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:16.612923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:16.668192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:16.710675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:16.765256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:16.816775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:16.903024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:17.009118Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010812527452317:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:17.009224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:17.009475Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010812527452322:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:17.014492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:17.032046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010812527452324:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:26:17.137970Z node 1 :TX_PROXY ERROR: Actor# [1:7488010812527452379:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:17.531835Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010791052613570:2080];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:17.531943Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 32435, MsgBus: 2694 2025-03-31T16:26:20.470340Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010825772097480:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:20.470388Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00128d/r3tmp/tmpWExWVM/pdisk_1.dat 2025-03-31T16:26:20.726640Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:20.742572Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:20.742660Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:20.744235Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32435, node 2 2025-03-31T16:26:20.816018Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:20.816044Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:20.816052Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:20.816173Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2694 TClient is connected to server localhost:2694 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:21.368378Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:21.391193Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:26:21.417823Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:21.535910Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:21.695017Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:21.777593Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:24.204087Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010842951968426:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:24.204156Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:24.254673Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:24.350024Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:24.425584Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:24.469639Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:24.520368Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:24.600724Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:24.692932Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010842951968950:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:24.693014Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:24.693485Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010842951968955:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:24.696865Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:24.708363Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488010842951968957:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:26:24.773141Z node 2 :TX_PROXY ERROR: Actor# [2:7488010842951969011:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:25.471345Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488010825772097480:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:25.471431Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:25.917797Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:26:25.995461Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:26:26.068709Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> TopicService::UnknownConsumer >> KqpReturning::ReturningWorks+QueryService [GOOD] >> KqpReturning::ReturningWorks-QueryService >> KqpRanges::DuplicateCompositeKeyPredicate [GOOD] >> KqpRanges::DeleteNotFullScan-UseSink >> SystemView::AuthGroupMembers_ResultOrder [GOOD] >> SystemView::AuthGroupMembers_TableRange >> KqpIndexLookupJoin::InnerJoinOnlyRightColumn-StreamLookup [GOOD] >> KqpNewEngine::Delete-UseSink [GOOD] >> KqpNewEngine::DecimalColumn >> KqpJoin::LeftJoinPushdownPredicate_Nulls [GOOD] |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> KqpLimits::TooBigKey-useSink [GOOD] >> KqpMergeCn::TopSortBy_Utf8_Limit2 [GOOD] >> KqpNewEngine::Aggregate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::InnerJoinOnlyRightColumn-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 10338, MsgBus: 23318 2025-03-31T16:26:17.287199Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010815486960374:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:17.287284Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001954/r3tmp/tmpojlHsv/pdisk_1.dat 2025-03-31T16:26:17.816896Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:17.821166Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:17.821261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:17.823447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10338, node 1 2025-03-31T16:26:17.970142Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:17.970162Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:17.970169Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:17.970267Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23318 TClient is connected to server localhost:23318 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:18.616844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:18.647907Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:18.670166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:26:18.893574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:19.089926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:19.178954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:21.193754Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010832666831342:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:21.193880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:21.470173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:21.514951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:21.565169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:21.613760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:21.699762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:21.790727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:21.900346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010832666831868:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:21.900423Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:21.900738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010832666831873:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:21.904956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:21.920467Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010832666831875:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:26:21.992937Z node 1 :TX_PROXY ERROR: Actor# [1:7488010832666831929:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:22.287218Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010815486960374:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:22.287290Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:23.404995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:26:23.471909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:26:23.512055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:26:23.551215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:26:23.602204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:26:23.643724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 30940, MsgBus: 24099 2025-03-31T16:26:25.800917Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010847619654162:2211];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:25.820115Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001954/r3tmp/tmpS7dzKF/pdisk_1.dat 2025-03-31T16:26:25.945455Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:25.989375Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:25.989463Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:25.996031Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30940, node 2 2025-03-31T16:26:26.167746Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:26.167776Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:26.167785Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:26.167912Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24099 TClient is connected to server localhost:24099 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:27.157637Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:27.197503Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:27.318648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:27.592707Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:27.703527Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:30.184285Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010869094492244:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:30.184363Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:30.262242Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:30.341188Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:30.410274Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:30.460083Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:30.503684Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:30.555648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:30.634347Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010869094492761:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:30.634441Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:30.634662Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010869094492766:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:30.639324Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:30.656332Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-03-31T16:26:30.656557Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488010869094492768:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:26:30.673399Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488010847619654162:2211];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:30.673476Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:30.729609Z node 2 :TX_PROXY ERROR: Actor# [2:7488010869094492823:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:31.876078Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:26:31.957849Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:26:31.997770Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:26:32.098881Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-03-31T16:26:32.147184Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-03-31T16:26:32.178238Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::LeftJoinPushdownPredicate_Nulls [GOOD] Test command err: Trying to start YDB, gRPC: 6389, MsgBus: 2774 2025-03-31T16:26:25.990943Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010846656750555:2267];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:25.991020Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00193e/r3tmp/tmpOXCDeY/pdisk_1.dat 2025-03-31T16:26:26.610083Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:26.614363Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:26.618609Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:26.624378Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6389, node 1 2025-03-31T16:26:26.847558Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:26.847577Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:26.847584Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:26.847675Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2774 TClient is connected to server localhost:2774 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:27.798922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:27.812022Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:27.825366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:28.041948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:28.295116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:28.402567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:30.341846Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010868131588597:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:30.341939Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:30.594848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:30.634410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:30.668782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:30.695929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:30.738017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:30.816584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:30.876845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010868131589114:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:30.877126Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:30.877381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010868131589119:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:30.881273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:30.895971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010868131589121:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:26:30.985797Z node 1 :TX_PROXY ERROR: Actor# [1:7488010868131589176:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:30.995239Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010846656750555:2267];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:30.995652Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:32.148060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:26:32.179839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:26:32.216252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> SystemView::AuthPermissions [GOOD] >> SystemView::AuthPermissions_Access >> KqpNotNullColumns::OptionalParametersDataQuery [GOOD] >> KqpNotNullColumns::OptionalParametersScanQuery >> DataShardVolatile::CompactedVolatileChangesAbort [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH20 >> KqpNewEngine::DeleteWithBuiltin-UseSink [GOOD] >> KqpNewEngine::DeleteON >> TPersQueueTest::SetupLockSession [GOOD] >> TPersQueueTest::StreamReadCreateAndDestroyMsgs >> KqpJoinOrder::GeneralPrioritiesBug2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::TooBigKey-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 18533, MsgBus: 5467 2025-03-31T16:20:13.090959Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009252511013937:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:13.091058Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a21/r3tmp/tmpt7ATIt/pdisk_1.dat 2025-03-31T16:20:13.348872Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18533, node 1 2025-03-31T16:20:13.399858Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:20:13.399900Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:20:13.399917Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:20:13.400100Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:20:13.440843Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:20:13.440984Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:20:13.443124Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5467 TClient is connected to server localhost:5467 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:20:13.867603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:13.900676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:14.033836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:14.183036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:14.259898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:15.989432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009261100950320:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:15.989560Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:16.300992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:16.327559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:16.354053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:16.384358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:16.413820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:16.445214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:16.486720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009265395918125:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:16.486783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009265395918130:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:16.486801Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:16.490477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:16.502602Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009265395918132:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:20:16.579981Z node 1 :TX_PROXY ERROR: Actor# [1:7488009265395918187:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:17.282911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:18.090584Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488009252511013937:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:18.090642Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:20:28.349061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:20:28.349093Z node 1 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 15038, MsgBus: 18042 2025-03-31T16:23:48.496183Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010175182123688:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:23:48.496285Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a21/r3tmp/tmpMoNraz/pdisk_1.dat 2025-03-31T16:23:48.649208Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:23:48.682563Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:23:48.682730Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:23:48.684277Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15038, node 2 2025-03-31T16:23:48.741804Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:23:48.741841Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:23:48.741875Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:23:48.742044Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18042 TClient is connected to server localhost:18042 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:23:49.305798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:49.327805Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:49.424201Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:23:49.645216Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchem ... 26:21.759481Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7488010832163477134:2502], Table: `/Root/Test` ([72057594046644480:9:1]), SessionActorId: [3:7488010832163477089:2502]Got BAD REQUEST for table `/Root/Test`. ShardID=72075186224037914, Sink=[3:7488010832163477134:2502].{
: Error: Cannot parse tx 3. BAD_ARGUMENT: Row key size of 2097156 bytes is larger than the allowed threshold 1049600 at tablet# 72075186224037914, code: 2017 } 2025-03-31T16:26:21.760135Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7488010832163477125:2502], SessionActorId: [3:7488010832163477089:2502], statusCode=BAD_REQUEST. Issue=
: Error: Bad request. Table: `/Root/Test`., code: 2017
: Error: Cannot parse tx 3. BAD_ARGUMENT: Row key size of 2097156 bytes is larger than the allowed threshold 1049600 at tablet# 72075186224037914, code: 2017 . sessionActorId=[3:7488010832163477089:2502]. isRollback=0 2025-03-31T16:26:21.920256Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NjllOGRiZTctNWRkNGRiMDQtZmY1YjIyNzYtMjZmOGE1MDQ=, ActorId: [3:7488010832163477089:2502], ActorState: ExecuteState, TraceId: 01jqpgd0g67q99ewqchdg8fq63, got TEvKqpBuffer::TEvError in ExecuteState, status: BAD_REQUEST send to: [3:7488010832163477126:2502] from: [3:7488010832163477125:2502] 2025-03-31T16:26:21.920417Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7488010832163477126:2502] TxId: 281474976710671. Ctx: { TraceId: 01jqpgd0g67q99ewqchdg8fq63, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NjllOGRiZTctNWRkNGRiMDQtZmY1YjIyNzYtMjZmOGE1MDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. BAD_REQUEST: {
: Error: Bad request. Table: `/Root/Test`., code: 2017 subissue: {
: Error: Cannot parse tx 3. BAD_ARGUMENT: Row key size of 2097156 bytes is larger than the allowed threshold 1049600 at tablet# 72075186224037914, code: 2017 } } 2025-03-31T16:26:21.921702Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NjllOGRiZTctNWRkNGRiMDQtZmY1YjIyNzYtMjZmOGE1MDQ=, ActorId: [3:7488010832163477089:2502], ActorState: ExecuteState, TraceId: 01jqpgd0g67q99ewqchdg8fq63, Create QueryResponse for error on request, msg:
: Error: Bad request. Table: `/Root/Test`., code: 2017
: Error: Cannot parse tx 3. BAD_ARGUMENT: Row key size of 2097156 bytes is larger than the allowed threshold 1049600 at tablet# 72075186224037914, code: 2017 Trying to start YDB, gRPC: 7855, MsgBus: 3346 2025-03-31T16:26:23.692667Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488010838704906026:2128];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:23.692716Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a21/r3tmp/tmpnvI089/pdisk_1.dat 2025-03-31T16:26:24.127664Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:24.155376Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:24.155568Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:24.157327Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7855, node 4 2025-03-31T16:26:24.347780Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:24.347806Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:24.347816Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:24.347993Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3346 TClient is connected to server localhost:3346 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:25.374184Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:25.390645Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:26:25.406458Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:25.576568Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:25.926903Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:26.060068Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:28.699241Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7488010838704906026:2128];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:28.699383Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:30.744340Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488010868769678813:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:30.744460Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:30.833726Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:30.907618Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:31.005169Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:31.094640Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:31.162392Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:31.279253Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:31.391074Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488010873064646654:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:31.391190Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:31.392370Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488010873064646659:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:31.397043Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:31.422731Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488010873064646661:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:26:31.526377Z node 4 :TX_PROXY ERROR: Actor# [4:7488010873064646718:3464] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:33.904093Z node 4 :TX_DATASHARD ERROR: Operation [0:281474976715671] writes key of 2097156 bytes which exceeds limit 1049600 bytes at 72075186224037914 2025-03-31T16:26:33.904282Z node 4 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715671 at tablet 72075186224037914 status: BAD_REQUEST errors: BAD_ARGUMENT (Operation [0:281474976715671] writes key of 2097156 bytes which exceeds limit 1049600 bytes at 72075186224037914) | 2025-03-31T16:26:33.941633Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7488010881654581609:2499] TxId: 281474976715671. Ctx: { TraceId: 01jqpgdcbvfazxd60t7fbft264, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MTFjNWU0NzMtMjBiZWUxZTMtMmZlNDEwMDItN2ZjZGRiYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. BAD_REQUEST: [BAD_ARGUMENT] Operation [0:281474976715671] writes key of 2097156 bytes which exceeds limit 1049600 bytes at 72075186224037914; 2025-03-31T16:26:33.941997Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MTFjNWU0NzMtMjBiZWUxZTMtMmZlNDEwMDItN2ZjZGRiYjU=, ActorId: [4:7488010881654581573:2499], ActorState: ExecuteState, TraceId: 01jqpgdcbvfazxd60t7fbft264, Create QueryResponse for error on request, msg:
: Error: Bad request., code: 2017
: Error: [BAD_ARGUMENT] Operation [0:281474976715671] writes key of 2097156 bytes which exceeds limit 1049600 bytes at 72075186224037914 >> KqpJoin::FullOuterJoinSizeCheck >> KqpTx::TooManyTx >> KqpNewEngine::StaleRO_Immediate [GOOD] >> KqpNewEngine::UnionAllPure ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_volatile/unittest >> DataShardVolatile::CompactedVolatileChangesAbort [GOOD] Test command err: 2025-03-31T16:23:04.005450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:23:04.005708Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:23:04.005883Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d5c/r3tmp/tmp7SwTok/pdisk_1.dat 2025-03-31T16:23:04.493919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:23:04.541372Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:23:04.582880Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T16:23:04.585212Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:23:04.585799Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:23:04.585982Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-31T16:23:04.598769Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:23:04.780816Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-03-31T16:23:04.780923Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-03-31T16:23:04.782287Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:645:2553] 2025-03-31T16:23:04.906910Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-31T16:23:04.907057Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:23:04.908663Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-31T16:23:04.908768Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:23:04.909208Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:23:04.909388Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 1000 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:23:04.909533Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-31T16:23:04.913402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:23:04.914448Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 HANDLE EvClientConnected 2025-03-31T16:23:04.915157Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-31T16:23:04.915256Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-03-31T16:23:04.956017Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:661:2568], Recipient [1:670:2574]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:23:04.957183Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:661:2568], Recipient [1:670:2574]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:23:04.957758Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:670:2574] 2025-03-31T16:23:04.958049Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:23:04.971538Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:661:2568], Recipient [1:670:2574]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:23:05.004795Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:23:05.004954Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:23:05.027724Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:23:05.027837Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:23:05.027893Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:23:05.030296Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:23:05.030452Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:23:05.030521Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:686:2574] in generation 1 2025-03-31T16:23:05.041185Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:23:05.076440Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:23:05.077803Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:23:05.077941Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:688:2584] 2025-03-31T16:23:05.077972Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:23:05.077997Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:23:05.078026Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:23:05.078172Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:670:2574], Recipient [1:670:2574]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:23:05.078940Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:23:05.080175Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:23:05.080290Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:23:05.080406Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:23:05.080432Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:23:05.080472Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T16:23:05.080498Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:23:05.080519Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:23:05.080541Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:23:05.080569Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:23:05.081755Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:675:2576], Recipient [1:670:2574]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:23:05.081787Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:23:05.081820Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2571], serverId# [1:675:2576], sessionId# [0:0:0] 2025-03-31T16:23:05.081891Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:675:2576] 2025-03-31T16:23:05.081923Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:23:05.082043Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:23:05.082374Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T16:23:05.082436Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:23:05.082569Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:23:05.082630Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:23:05.082668Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T16:23:05.082705Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T16:23:05.082734Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:23:05.082994Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T16:23:05.083028Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T16:23:05.083060Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T16:23:05.083123Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:23:05.083169Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T16:23:05.083190Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T16:23:05.083213Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T16:23:05.083235Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:23:05.083252Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T16:23:05.084394Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:689:2585], Recipient [1:670:2574]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T16:23:05.084429Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:23:05.095206Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Compl ... nected at leader tablet# 72075186224037888, clientId# [26:981:2787], serverId# [26:982:2788], sessionId# [0:0:0] 2025-03-31T16:26:35.390585Z node 26 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connected with status OK role: Leader [26:981:2787] 2025-03-31T16:26:35.390700Z node 26 :PIPE_CLIENT DEBUG: TClient[72075186224037888] send queued [26:981:2787] 2025-03-31T16:26:35.390790Z node 26 :PIPE_CLIENT DEBUG: TClient[72075186224037888] push event to server [26:981:2787] 2025-03-31T16:26:35.390919Z node 26 :PIPE_CLIENT DEBUG: TClient[72075186224037888] shutdown pipe due to pending shutdown request [26:981:2787] 2025-03-31T16:26:35.391016Z node 26 :PIPE_CLIENT DEBUG: TClient[72075186224037888] notify reset [26:981:2787] 2025-03-31T16:26:35.391326Z node 26 :TX_DATASHARD TRACE: StateWork, received event# 269553210, Sender [26:980:2786], Recipient [26:696:2585]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046644480 LocalId: 2 } CompactBorrowed: false 2025-03-31T16:26:35.391499Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:21} Tx{28, NKikimr::NDataShard::TDataShard::TTxCompactTable} queued, type NKikimr::NDataShard::TDataShard::TTxCompactTable 2025-03-31T16:26:35.391647Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:21} Tx{28, NKikimr::NDataShard::TDataShard::TTxCompactTable} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T16:26:35.391806Z node 26 :TABLET_EXECUTOR DEBUG: TCompactionLogic PrepareForceCompaction for 72075186224037888 table 1001, mode Full, forced state None, forced mode Full 2025-03-31T16:26:35.392026Z node 26 :TX_DATASHARD INFO: Started background compaction# 1 of 72075186224037888 tableId# 2 localTid# 1001, requested from [26:980:2786], partsCount# 0, memtableSize# 656, memtableWaste# 3952, memtableRows# 2 2025-03-31T16:26:35.392210Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:21} Tx{28, NKikimr::NDataShard::TDataShard::TTxCompactTable} hope 1 -> done Change{16, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-31T16:26:35.392366Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:21} Tx{28, NKikimr::NDataShard::TDataShard::TTxCompactTable} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T16:26:35.392717Z node 26 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy PrepareCompaction for 72075186224037888: task 1, edge 9223372036854775807/0, generation 0 2025-03-31T16:26:35.392829Z node 26 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:21} starting compaction 2025-03-31T16:26:35.393261Z node 26 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:22} starting Scan{1 on 1001, Compact{72075186224037888.1.21, eph 1}} 2025-03-31T16:26:35.393429Z node 26 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:22} started compaction 1 2025-03-31T16:26:35.393524Z node 26 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy PrepareCompaction for 72075186224037888 started compaction 1 generation 0 ... blocking NKikimr::TEvBlobStorage::TEvPut from TABLET_REQ_WRITE_LOG to BS_PROXY_ACTOR 2025-03-31T16:26:35.397239Z node 26 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:22} Compact 1 on TGenCompactionParams{1001: gen 0 epoch +inf, 0 parts} step 21, product {tx status + 1 parts epoch 2} done 2025-03-31T16:26:35.397582Z node 26 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy CompactionFinished for 72075186224037888: compaction 1, generation 0 2025-03-31T16:26:35.397760Z node 26 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy CheckGeneration for 72075186224037888 generation 1, state Free, final id 0, final level 0 2025-03-31T16:26:35.397842Z node 26 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy CheckGeneration for 72075186224037888 generation 3, state Free, final id 0, final level 0 2025-03-31T16:26:35.398802Z node 26 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 1, ts 1970-01-01T00:00:01.537831Z 2025-03-31T16:26:35.399037Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:23} Tx{29, NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs} queued, type NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs 2025-03-31T16:26:35.399203Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:23} Tx{29, NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T16:26:35.399336Z node 26 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 1, front# 1 2025-03-31T16:26:35.399469Z node 26 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001 sending TEvCompactTableResult to# [26:980:2786]pathId# [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-31T16:26:35.400327Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:23} Tx{29, NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs} hope 1 -> done Change{17, redo 83b alter 0b annex 0, ~{ 27 } -{ }, 0 gb} 2025-03-31T16:26:35.400500Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:23} Tx{29, NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs} release 4194304b of static, Memory{0 dyn 0} ... blocking NKikimr::TEvBlobStorage::TEvPut from TABLET_REQ_WRITE_LOG to BS_PROXY_ACTOR ... blocking NKikimr::TEvBlobStorage::TEvPut from TABLET_REQ_WRITE_LOG to BS_PROXY_ACTOR ========= Starting an immediate read ========= 2025-03-31T16:26:35.600718Z node 26 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqpgde0pe7c99g4nwfwpq3ea, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=26&id=NmU3ZTYzOTMtOWZmOTFhNDEtOTFlNTdhMDQtOWIwNWM0ZTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:26:35.602639Z node 26 :PIPE_CLIENT DEBUG: TClient[72075186224037888] send [26:914:2733] 2025-03-31T16:26:35.602769Z node 26 :PIPE_CLIENT DEBUG: TClient[72075186224037888] push event to server [26:914:2733] 2025-03-31T16:26:35.603520Z node 26 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [26:1006:2794], Recipient [26:696:2585]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false KeysSize: 1 2025-03-31T16:26:35.603817Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:24} Tx{30, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} queued, type NKikimr::NDataShard::TDataShard::TTxReadViaPipeline 2025-03-31T16:26:35.604002Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:24} Tx{30, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T16:26:35.604207Z node 26 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-31T16:26:35.604321Z node 26 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1538/281474976715662 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-03-31T16:26:35.604438Z node 26 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v1538/18446744073709551615 2025-03-31T16:26:35.604590Z node 26 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2025-03-31T16:26:35.604813Z node 26 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-03-31T16:26:35.604924Z node 26 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2025-03-31T16:26:35.605041Z node 26 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-31T16:26:35.605132Z node 26 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-31T16:26:35.605211Z node 26 :TX_DATASHARD TRACE: Activated operation [0:5] at 72075186224037888 2025-03-31T16:26:35.605304Z node 26 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-03-31T16:26:35.605336Z node 26 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-31T16:26:35.605364Z node 26 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2025-03-31T16:26:35.605392Z node 26 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2025-03-31T16:26:35.605586Z node 26 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2025-03-31T16:26:35.605938Z node 26 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is DelayComplete 2025-03-31T16:26:35.606003Z node 26 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2025-03-31T16:26:35.606103Z node 26 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2025-03-31T16:26:35.606182Z node 26 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2025-03-31T16:26:35.606243Z node 26 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-03-31T16:26:35.606272Z node 26 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2025-03-31T16:26:35.606329Z node 26 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037888 has finished 2025-03-31T16:26:35.606438Z node 26 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-31T16:26:35.606633Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:24} Tx{30, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 1 -> done Change{18, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-31T16:26:35.606804Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:24} Tx{30, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T16:26:35.672771Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:21} commited cookie 8 for step 20 2025-03-31T16:26:35.698369Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:22} commited cookie 8 for step 21 2025-03-31T16:26:35.723620Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:18} Tx{20, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-03-31T16:26:35.723848Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:18} Tx{20, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T16:26:35.724231Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:18} Tx{20, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{12, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-03-31T16:26:35.724409Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:18} Tx{20, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T16:26:35.725058Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:19} commited cookie 1 for step 18 2025-03-31T16:26:35.725283Z node 26 :PIPE_CLIENT DEBUG: TClient[72057594046382081] send [26:552:2485] 2025-03-31T16:26:35.725381Z node 26 :PIPE_CLIENT DEBUG: TClient[72057594046382081] push event to server [26:552:2485] 2025-03-31T16:26:35.750260Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:23} commited cookie 8 for step 22 2025-03-31T16:26:35.794935Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:24} commited cookie 8 for step 23 2025-03-31T16:26:35.819674Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:25} commited cookie 8 for step 24 >> KqpJoin::IdxLookupLeftPredicate [GOOD] |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_transfer_writer/unittest |92.5%| [TA] $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::GeneralPrioritiesBug2 [GOOD] Test command err: Trying to start YDB, gRPC: 24960, MsgBus: 29681 2025-03-31T16:25:54.629836Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010715128151009:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:54.631426Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001974/r3tmp/tmplJ2DG4/pdisk_1.dat 2025-03-31T16:25:54.995660Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:55.016153Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:55.016244Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:55.018227Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24960, node 1 2025-03-31T16:25:55.190867Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:55.190900Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:55.190907Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:55.191045Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29681 TClient is connected to server localhost:29681 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:55.845805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:55.871380Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:25:58.015039Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010732308020728:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:58.015063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010732308020720:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:58.015167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:58.019275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:25:58.035725Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010732308020734:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:25:58.143005Z node 1 :TX_PROXY ERROR: Actor# [1:7488010732308020785:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:58.514829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:25:58.659740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:25:58.696357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:58.736199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:58.772824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:58.923805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:58.963480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:58.995904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:59.033580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T16:25:59.062044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-31T16:25:59.133910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-31T16:25:59.188005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:25:59.258288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:25:59.601787Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010715128151009:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:59.601860Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:25:59.924802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-31T16:25:59.972984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:26:00.009257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:26:00.051942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:26:00.091621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T16:26:00.131609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-31T16:26:00.172111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T16:26:00.202131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-31T16:26:00.230179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-31T16:26:00.305241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-31T16:26:00.343711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-31T16:26:00.375989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-31T16:26:00.412674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-31T16:26:00.455795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-31T16:26:00.515014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-31T16:26:00.586378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-31T16:26:00.621974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... tablet_id=72075186224038444;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.783614Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038547;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.788980Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038426;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.788980Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038470;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.794954Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038476;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.794954Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038440;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.800760Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038527;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.800760Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038460;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.806497Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038482;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.807835Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038477;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.812995Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038448;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.813053Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038436;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.818785Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038503;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.818790Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038438;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.824599Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038478;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.824599Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038519;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.830527Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038432;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.830527Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038446;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.836259Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038474;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.836259Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038468;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.841815Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038486;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.842028Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038434;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.847695Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038430;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.850198Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038458;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.853500Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.855546Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038454;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.859101Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038525;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.860671Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.864564Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038456;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.866120Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038464;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.870178Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038480;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.871710Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038472;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.876254Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038442;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.876599Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038507;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.881886Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038484;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.883795Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038466;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.887333Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038428;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.890262Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038450;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.893342Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038529;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.896177Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038517;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.899319Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038549;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.901599Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038541;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.908204Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038462;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.912608Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038569;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.914621Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038452;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.918313Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038537;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:29.922940Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038561;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:30.039356Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgcde48hmdb1hgf3tezt92", SessionId: ydb://session/3?node_id=1&id=YTU0MGEwNC02MTcyMzA2OC0zZTViZjU2MC1lZDM5OWIxNA==, Slow query, duration: 27.986555s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:26:30.292121Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:26:30.292612Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:26:30.293619Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; |92.5%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} >> DemoTx::Scenario_3 [GOOD] >> KqpSqlIn::SecondaryIndex_SimpleKey [GOOD] >> KqpSqlIn::SecondaryIndex_SimpleKey_In_And ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::IdxLookupLeftPredicate [GOOD] Test command err: Trying to start YDB, gRPC: 9471, MsgBus: 1734 2025-03-31T16:26:31.195960Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010872516679851:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:31.199834Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001939/r3tmp/tmp85mkFY/pdisk_1.dat 2025-03-31T16:26:31.928675Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:31.928768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:31.946077Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:31.953480Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9471, node 1 2025-03-31T16:26:32.215917Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:32.215945Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:32.215952Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:32.216086Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1734 TClient is connected to server localhost:1734 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:32.984410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:33.025939Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:33.043511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:33.230106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:33.424350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:33.560498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:35.391063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010889696550785:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:35.391335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:35.768917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:35.814865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:35.858225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:35.912620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:35.960125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:36.009600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:36.095527Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010893991518594:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:36.095620Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:36.096016Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010893991518599:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:36.100207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:36.118090Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-31T16:26:36.118355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010893991518601:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:26:36.198454Z node 1 :TX_PROXY ERROR: Actor# [1:7488010893991518657:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:36.198673Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010872516679851:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:36.198772Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:37.353031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:26:37.415074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:26:37.453590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> KqpSqlIn::TupleLiteral [GOOD] >> KqpSqlIn::TupleSelect >> KqpRanges::WhereInSubquery |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> KqpSinkTx::OlapSnapshotROInteractive2 [GOOD] >> KqpJoin::LeftJoinPushdownPredicate_NestedJoin >> DemoTx::Scenario_4 >> KqpRanges::DeleteNotFullScan-UseSink [GOOD] >> KqpNewEngine::Aggregate [GOOD] >> KqpNewEngine::AggregateTuple >> KqpNewEngine::DecimalColumn [GOOD] >> KqpNewEngine::DecimalColumn35 >> KqpJoin::RightSemiJoin_SecondaryIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapSnapshotROInteractive2 [GOOD] Test command err: Trying to start YDB, gRPC: 3573, MsgBus: 13725 2025-03-31T16:25:56.990517Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010725814008916:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:56.990625Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001290/r3tmp/tmpGtnYb7/pdisk_1.dat 2025-03-31T16:25:57.407521Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:57.414700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:57.414776Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 3573, node 1 2025-03-31T16:25:57.418662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:25:57.473522Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:57.473550Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:57.473560Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:57.473702Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13725 TClient is connected to server localhost:13725 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:58.026134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:58.040397Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:00.115827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010742993878774:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:00.115957Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010742993878766:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:00.116120Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:00.121357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:26:00.134656Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010742993878780:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:26:00.244780Z node 1 :TX_PROXY ERROR: Actor# [1:7488010742993878831:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:00.688127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:26:00.889818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010742993879028:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:00.889855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488010742993879042:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:00.890038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010742993879028:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:26:00.890337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010742993879028:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:26:00.890467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010742993879028:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:26:00.890574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010742993879028:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:26:00.890615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488010742993879042:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:26:00.890733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010742993879028:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:26:00.890851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488010742993879042:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:26:00.890898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010742993879028:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:26:00.891015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488010742993879042:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:26:00.891036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010742993879028:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:26:00.891177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488010742993879042:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:26:00.891301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488010742993879042:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:26:00.891405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488010742993879042:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:26:00.891512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488010742993879042:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:26:00.891659Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488010742993879042:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:26:00.891699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010742993879028:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:26:00.891852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488010742993879042:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:26:00.892014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010742993879028:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:26:00.892031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488010742993879042:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:26:00.892148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010742993879028:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:26:00.892162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488010742993879042:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:26:00.892265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010742993879028:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:26:00.945592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488010742993879017:2344];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:00.945684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488010742993879017:2344];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstr ... =NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037999;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:26:34.490938Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;self_id=[2:7488010860340725783:3264];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038018;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:26:34.498367Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[2:7488010860340725737:3248];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038021;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:26:34.499347Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[2:7488010860340725737:3248];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038021;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:26:34.509171Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7488010860340725679:3239];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038005;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:26:34.509451Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038076;self_id=[2:7488010860340727019:3373];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038076;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:26:34.509605Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038090;self_id=[2:7488010864635694373:3396];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038090;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:26:34.511947Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7488010860340725679:3239];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038005;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:26:34.512162Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038076;self_id=[2:7488010860340727019:3373];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038076;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:26:34.512281Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038090;self_id=[2:7488010864635694373:3396];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038090;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:26:34.517657Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038094;self_id=[2:7488010860340726994:3363];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038094;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:26:34.519194Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038094;self_id=[2:7488010860340726994:3363];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038094;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:26:34.528161Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038075;self_id=[2:7488010864635694343:3382];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038075;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:26:34.528478Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038078;self_id=[2:7488010860340726916:3352];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038078;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:26:34.531829Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038075;self_id=[2:7488010864635694343:3382];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038075;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:26:34.532061Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038078;self_id=[2:7488010860340726916:3352];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038078;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:26:34.532588Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038088;self_id=[2:7488010864635694353:3388];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038088;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:26:34.533179Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038088;self_id=[2:7488010864635694353:3388];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038088;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:26:34.535023Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038097;self_id=[2:7488010860340726892:3346];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038097;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:26:34.535319Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038092;self_id=[2:7488010860340726981:3357];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038092;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:26:34.536123Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038097;self_id=[2:7488010860340726892:3346];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038097;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:26:34.536266Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038092;self_id=[2:7488010860340726981:3357];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038092;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:26:34.546284Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038048;self_id=[2:7488010860340726769:3329];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038048;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:26:34.552781Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038048;self_id=[2:7488010860340726769:3329];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038048;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:26:34.563579Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038069;self_id=[2:7488010864635694534:3432];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038069;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:26:34.563824Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038069;self_id=[2:7488010864635694534:3432];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038069;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:26:34.564013Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038068;self_id=[2:7488010864635694540:3433];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038068;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:26:34.564149Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038068;self_id=[2:7488010864635694540:3433];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038068;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:26:34.653014Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038047;self_id=[2:7488010860340726889:3345];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038047;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:26:34.653324Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038047;self_id=[2:7488010860340726889:3345];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038047;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:26:34.666470Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038071;self_id=[2:7488010864635694688:3465];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038071;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:26:34.670505Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038026;self_id=[2:7488010860340725757:3258];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038026;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:26:34.670762Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038026;self_id=[2:7488010860340725757:3258];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038026;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:26:34.670934Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038067;self_id=[2:7488010864635694498:3415];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038067;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:26:34.671209Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038050;self_id=[2:7488010860340726778:3334];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038050;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:26:34.671400Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[2:7488010860340725787:3266];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038019;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:26:34.673872Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[2:7488010860340725815:3278];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038017;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:26:34.683403Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038082;self_id=[2:7488010860340726991:3362];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038082;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:26:34.723590Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038071;self_id=[2:7488010864635694688:3465];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038071;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:26:34.723590Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038067;self_id=[2:7488010864635694498:3415];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038067;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:26:34.723819Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[2:7488010860340725787:3266];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038019;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:26:34.723819Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038050;self_id=[2:7488010860340726778:3334];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038050;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:26:34.723946Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[2:7488010860340725815:3278];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038017;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:26:34.724006Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038082;self_id=[2:7488010860340726991:3362];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038082;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpLimits::QSReplySize+useSink [GOOD] >> KqpReturning::ReturningWorks-QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedDelete+QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::DeleteNotFullScan-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 27176, MsgBus: 7532 2025-03-31T16:25:53.361512Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010712419958465:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:53.361744Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ac3/r3tmp/tmpLvpiC6/pdisk_1.dat 2025-03-31T16:25:53.791889Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:53.795476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:53.795584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:53.800185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27176, node 1 2025-03-31T16:25:53.921420Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:53.921443Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:53.921448Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:53.921552Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7532 TClient is connected to server localhost:7532 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:54.482836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:54.502673Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:25:54.513519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:54.678461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:54.855518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:25:54.935777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:25:56.676254Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010725304861997:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:56.676351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:56.991716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:57.029380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:57.063510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:57.094026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:57.121540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:25:57.165703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:25:57.225686Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010729599829806:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:57.225760Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:57.225920Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010729599829811:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:57.229807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:25:57.241840Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010729599829813:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:25:57.299183Z node 1 :TX_PROXY ERROR: Actor# [1:7488010729599829866:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:58.198456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:25:58.355252Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010712419958465:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:58.368387Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:25:58.458719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:25:58.632178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:25:58.787266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T16:25:59.085741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 65498, MsgBus: 27573 2025-03-31T16:26:00.534208Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010742484497756:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:00.534294Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ac3/r3tmp/tmpTe592v/pdisk_1.dat 2025-03-31T16:26:00.697892Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:00.735949Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:00.736039Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:00.737848Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65498, node 2 2025-03-31T16:26:00.809655Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:00.809688Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:00.809696Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:00.809833Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27573 TClient is connected to server localhost:27573 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootI ... cess permissions } 2025-03-31T16:26:27.769849Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:27.770125Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488010854856224434:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:27.775372Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:27.796980Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7488010854856224436:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:26:27.854676Z node 5 :TX_PROXY ERROR: Actor# [5:7488010854856224490:3452] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:29.165884Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:26:29.531262Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:26:29.757090Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-03-31T16:26:30.058595Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-03-31T16:26:30.516370Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 15336, MsgBus: 13175 2025-03-31T16:26:33.832837Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488010881478721937:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:33.832884Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ac3/r3tmp/tmprizheX/pdisk_1.dat 2025-03-31T16:26:34.150298Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:34.157080Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:34.157190Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:34.160336Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15336, node 6 2025-03-31T16:26:34.306199Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:34.306228Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:34.306238Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:34.306398Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13175 TClient is connected to server localhost:13175 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:35.293800Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:35.302224Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:35.310819Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:35.410250Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:35.681244Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:35.796875Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:38.828393Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488010902953560189:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:38.828485Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:38.835343Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488010881478721937:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:38.835417Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:38.942110Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:39.022035Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:39.069930Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:39.152530Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:39.230427Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:39.350390Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:39.459549Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488010907248528013:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:39.459667Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:39.460137Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488010907248528018:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:39.465372Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:39.499335Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488010907248528020:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:26:39.562467Z node 6 :TX_PROXY ERROR: Actor# [6:7488010907248528076:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Join2"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/Join2","Name":"Delete","Table":"Join2"},{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"Delete-ConstantExpr","Stats":{"ComputeNodes":[{"Tasks":[{"NodeId":6,"FinishTimeMs":1743438401731,"TaskId":1,"Host":"ghrun-tuhoqst4xe","ComputeTimeUs":207}],"CpuTimeUs":1023}],"UseLlvm":"undefined","Tasks":1,"FinishedTasks":0,"PhysicalStageId":0,"StageDurationUs":0,"Table":[{"Path":"\/Root\/Join2"}],"BaseTimeMs":1743438401731,"NodesScanShards":[],"CpuTimeUs":{"Count":1,"Sum":1023,"Max":1023,"Min":1023}},"CTE Name":"precompute_0_0"}],"Node Type":"Effect"}],"Node Type":"Query","Stats":{"Compilation":{"FromCache":false,"DurationUs":472671,"CpuTimeUs":461690},"ProcessCpuTimeUs":2333,"TotalDurationUs":497562,"ResourcePoolId":"default","QueuedTimeUs":6099},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"A-SelfCpu":1.023,"A-Cpu":1.023,"Path":"\/Root\/Join2","Name":"Delete","Table":"Join2"}],"Node Type":"Delete"}],"Node Type":"Effect"}],"Node Type":"Query","PlanNodeType":"Query"}} >> KqpNewEngine::DeleteON [GOOD] >> KqpNewEngine::DeleteByKey >> TopicService::UnknownConsumer [GOOD] >> KqpTx::TooManyTx [GOOD] >> KqpTx::SnapshotROInteractive2 >> OlapEstimationRowsCorrectness::TPCH21 >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64_Reboot [GOOD] >> KqpNotNullColumns::OptionalParametersScanQuery [GOOD] >> KqpNewEngine::UnionAllPure [GOOD] >> KqpNewEngine::StreamLookupForDataQuery+StreamLookupJoin >> KqpJoin::FullOuterJoinSizeCheck [GOOD] >> KqpIndexLookupJoin::CheckCastUint64ToInt64+StreamLookupJoin-NotNull >> TopicService::UnknownTopic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64_Reboot [GOOD] Test command err: 2025-03-31T16:22:02.819708Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:22:02.920711Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:22:02.944767Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:22:02.945031Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:22:02.953300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:22:02.953515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:22:02.953749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:22:02.953875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:22:02.953985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:22:02.954101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:22:02.954244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:22:02.954358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:22:02.954463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:22:02.954589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:22:02.954694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:22:02.954815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:22:02.983332Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:22:02.983492Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:22:02.983536Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:22:02.983720Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:22:02.983905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:22:02.983970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:22:02.984010Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:22:02.984117Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:22:02.984181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:22:02.984218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:22:02.984256Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:22:02.984464Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:22:02.984547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:22:02.984593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:22:02.984632Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:22:02.984774Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:22:02.984836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:22:02.984893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:22:02.984931Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:22:02.985001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:22:02.985033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:22:02.985062Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:22:02.985120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:22:02.985168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:22:02.985215Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:22:02.985586Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=46; 2025-03-31T16:22:02.985678Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=48; 2025-03-31T16:22:02.985750Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=31; 2025-03-31T16:22:02.985830Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=39; 2025-03-31T16:22:02.985973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:22:02.986028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:22:02.986061Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:22:02.986266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:22:02.986320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:22:02.986359Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:22:02.986539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:22:02.986584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:22:02.986611Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:22:02.986803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:22:02.986861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:22:02.986896Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:22:02.987023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:22:02.987066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:22:02.987129Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... LOB:0:2696];;column_id:8;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:8464];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:9040];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:9024];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:9456];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:9448];;;;switched=(portion_id:61;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2586528;index_size:28;meta:((produced=SPLIT_COMPACTED;)););(portion_id:57;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););; 2025-03-31T16:26:45.558315Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:11490:13117];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=2; 2025-03-31T16:26:45.560673Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:11490:13117];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::OptionalParametersScanQuery [GOOD] Test command err: Trying to start YDB, gRPC: 14464, MsgBus: 62185 2025-03-31T16:26:01.623643Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010747327523942:2191];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:01.624453Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000abc/r3tmp/tmpriJAOF/pdisk_1.dat 2025-03-31T16:26:02.156800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:02.156930Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:02.160047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:26:02.188888Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14464, node 1 2025-03-31T16:26:02.201139Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:26:02.201286Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:26:02.363643Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:02.363666Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:02.363673Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:02.363776Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62185 TClient is connected to server localhost:62185 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:03.115758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:03.134062Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:05.412280Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010764507393659:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:05.412441Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:05.876219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:26:06.056850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010768802361061:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:06.056963Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:06.057352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010768802361066:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:06.061518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:26:06.089013Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010768802361068:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:26:06.164218Z node 1 :TX_PROXY ERROR: Actor# [1:7488010768802361120:2401] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:06.459390Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488010768802361165:2357], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:14: Error: At function: KiWriteTable!
:1:14: Error: Missing not null column in input: Value. All not null columns should be initialized, code: 2032 2025-03-31T16:26:06.460685Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2EyOTZkMzctODVkMGQyMzEtZDlkNjZmY2ItY2I0ODY4ZmM=, ActorId: [1:7488010764507393631:2327], ActorState: ExecuteState, TraceId: 01jqpgchpb2p5addzgdjm0vknr, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-03-31T16:26:06.507266Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488010768802361174:2361], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:14: Error: At function: KiWriteTable!
:1:47: Error: Failed to convert type: Struct<'Key':Int32,'Value':Null> to Struct<'Key':Uint64?,'Value':String>
:1:47: Error: Failed to convert 'Value': Null to String
:1:47: Error: Failed to convert input columns types to scheme types, code: 2031 2025-03-31T16:26:06.508569Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2EyOTZkMzctODVkMGQyMzEtZDlkNjZmY2ItY2I0ODY4ZmM=, ActorId: [1:7488010764507393631:2327], ActorState: ExecuteState, TraceId: 01jqpgchrk9s0b1a67y62s1t1z, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-31T16:26:06.616278Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010747327523942:2191];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:06.616383Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 12905, MsgBus: 2646 2025-03-31T16:26:07.468256Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010769251890942:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:07.468765Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000abc/r3tmp/tmpABAfFu/pdisk_1.dat 2025-03-31T16:26:07.751629Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:07.753986Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:07.754056Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:07.760090Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12905, node 2 2025-03-31T16:26:07.943661Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:07.943687Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:07.943696Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:07.943820Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2646 TClient is connected to server localhost:2646 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:08.492468Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:08.499510Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:26:11.078972Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010786431760774:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:11.079030Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:11.096732Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:26:11.207512Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010786431760877:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access per ... at schemeshard: 72057594046644480 2025-03-31T16:26:33.103489Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:33.161101Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:33.257834Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:33.335254Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488010882122002937:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:33.335364Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:33.339151Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488010882122002942:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:33.343916Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:33.360013Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7488010882122002944:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:26:33.418749Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7488010860647164173:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:33.418835Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:33.443842Z node 5 :TX_PROXY ERROR: Actor# [5:7488010882122002999:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:34.594889Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 23804, MsgBus: 31046 2025-03-31T16:26:36.895563Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488010895084534009:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:36.895627Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000abc/r3tmp/tmpEyZ7iF/pdisk_1.dat 2025-03-31T16:26:37.058095Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:37.090162Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:37.090273Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:37.092256Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23804, node 6 2025-03-31T16:26:37.183763Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:37.183792Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:37.183800Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:37.183952Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31046 TClient is connected to server localhost:31046 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:26:38.096853Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:26:38.104266Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:38.112570Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:38.194633Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:38.462085Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:38.604148Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:41.895301Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488010895084534009:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:41.895374Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:42.244374Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488010920854339570:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:42.244520Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:42.326313Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:42.411463Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:42.478052Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:42.530494Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:42.585328Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:42.670052Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:42.773203Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488010920854340090:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:42.773348Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:42.773859Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488010920854340095:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:42.779035Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:42.803517Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488010920854340097:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:26:42.866498Z node 6 :TX_PROXY ERROR: Actor# [6:7488010920854340152:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:44.434890Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:26:44.958087Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438404960, txId: 281474976710673] shutting down 2025-03-31T16:26:45.336582Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438405359, txId: 281474976710675] shutting down 2025-03-31T16:26:45.613877Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438405646, txId: 281474976710677] shutting down >> TPersQueueTest::Cache [GOOD] >> TPersQueueTest::CacheHead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::FullOuterJoinSizeCheck [GOOD] Test command err: Trying to start YDB, gRPC: 10804, MsgBus: 19439 2025-03-31T16:26:38.159305Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010902132903826:2185];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:38.159836Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001935/r3tmp/tmplz7kI2/pdisk_1.dat 2025-03-31T16:26:38.782120Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:38.825430Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:38.825505Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:38.832356Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10804, node 1 2025-03-31T16:26:39.055598Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:39.055619Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:39.055628Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:39.055722Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19439 TClient is connected to server localhost:19439 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:39.842540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:39.868345Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:39.892361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:40.073499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:40.322874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:26:40.441626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:26:42.536807Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010919312774652:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:42.536924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:42.800730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:42.889258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:42.927775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:42.988330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:43.024093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:43.108375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:43.137403Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010902132903826:2185];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:43.139315Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:43.197494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010923607742467:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:43.197558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:43.198868Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010923607742473:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:43.202931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:43.222779Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010923607742475:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:26:43.315523Z node 1 :TX_PROXY ERROR: Actor# [1:7488010923607742532:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:44.467722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:26:44.508243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:26:44.572577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::OltpJoinTypeHintCBOTurnOFF >> KqpRanges::WhereInSubquery [GOOD] >> KqpReturning::ReturningTwice >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder+StreamLookup >> KqpNewEngine::AggregateTuple [GOOD] >> KqpNewEngine::AsyncIndexUpdate >> KqpIndexLookupJoin::LeftOnlyJoinValueColumn+StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::QSReplySize+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 16137, MsgBus: 4541 2025-03-31T16:19:58.494581Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488009184238822110:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:19:58.494697Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a89/r3tmp/tmpzGsIux/pdisk_1.dat 2025-03-31T16:19:58.830604Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:19:58.830695Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:19:58.835648Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:19:58.869604Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16137, node 1 2025-03-31T16:19:58.883521Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:19:58.883544Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:19:58.905670Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:19:58.905695Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:19:58.905708Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:19:58.905867Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4541 TClient is connected to server localhost:4541 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:19:59.305845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:59.339307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:59.455391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:59.590938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:19:59.654080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:00.760955Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009192828759503:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:00.761097Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:00.975521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:20:01.009549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:20:01.044374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:20:01.078468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:20:01.113662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:20:01.153604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:20:01.202620Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009197123727686:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:01.202701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:01.202788Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488009197123727691:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:20:01.206160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:20:01.219989Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488009197123727693:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:20:01.291263Z node 1 :TX_PROXY ERROR: Actor# [1:7488009197123727780:4845] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:20:01.868458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:20:03.494347Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488009184238822110:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:20:03.494407Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:20:13.869031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:20:13.869061Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:20:16.994758Z node 1 :TX_DATASHARD ERROR: CPU usage 77.2474 is higher than threshold of 60 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037919 table: [/Root/LargeTable] 2025-03-31T16:20:59.938906Z node 1 :HIVE ERROR: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923055352320}: tablet 72075186224037919 could not find a group for channel 1 pool /Root:test 2025-03-31T16:20:59.938959Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923055352320}: tablet 72075186224037919 wasn't changed 2025-03-31T16:20:59.938992Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923055352320}: tablet 72075186224037919 skipped channel 1 2025-03-31T16:20:59.965413Z node 1 :HIVE ERROR: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923055348512}: tablet 72075186224037919 could not find a group for channel 0 pool /Root:test 2025-03-31T16:20:59.965466Z node 1 :HIVE ERROR: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923055348512}: tablet 72075186224037919 could not find a group for channel 1 pool /Root:test 2025-03-31T16:20:59.965489Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923055348512}: tablet 72075186224037919 wasn't changed 2025-03-31T16:20:59.965512Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923055348512}: tablet 72075186224037919 skipped channel 0 2025-03-31T16:20:59.965556Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923055348512}: tablet 72075186224037919 skipped channel 1 2025-03-31T16:21:00.999408Z node 1 :HIVE ERROR: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923003574272}: tablet 72075186224037911 could not find a group for channel 0 pool /Root:test 2025-03-31T16:21:00.999464Z node 1 :HIVE ERROR: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923003574272}: tablet 72075186224037911 could not find a group for channel 1 pool /Root:test 2025-03-31T16:21:00.999482Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923003574272}: tablet 72075186224037911 wasn't changed 2025-03-31T16:21:00.999499Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923003574272}: tablet 72075186224037911 skipped channel 0 2025-03-31T16:21:00.999548Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923003574272}: tablet 72075186224037911 skipped channel 1 2025-03-31T16:21:01.036917Z node 1 :HIVE ERROR: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923016194208}: tablet 72075186224037912 could not find a group for channel 0 pool /Root:test 2025-03-31T16:21:01.036949Z node 1 :HIVE ERROR: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923016194208}: tablet 72075186224037912 could not find a group for channel 1 pool /Root:test 2025-03-31T16:21:01.036960Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923016194208}: tablet 72075186224037912 wasn't changed 2025-03-31T16:21:01.036969Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923016194208}: tablet 72075186224037912 skipped channel 0 2025-03-31T16:21:01.036985Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{889230161942 ... 5-03-31T16:26:19.687481Z node 3 :BS_SKELETON WARN: VDISK[82000000:_:0:0:0]: TDskSpaceTrackerActor: YELLOW ZONE Marker# BSVSOOST01 2025-03-31T16:26:19.687565Z node 3 :BS_SKELETON WARN: VDISK[0:_:0:0:0]: TDskSpaceTrackerActor: YELLOW ZONE Marker# BSVSOOST01 2025-03-31T16:26:20.688418Z node 3 :BS_SKELETON WARN: VDISK[0:_:0:0:0]: TDskSpaceTrackerActor: YELLOW ZONE Marker# BSVSOOST01 2025-03-31T16:26:20.688553Z node 3 :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: TDskSpaceTrackerActor: LIGHT_ORANGE ZONE Marker# BSVSOOST01 2025-03-31T16:26:20.800064Z node 3 :TX_DATASHARD ERROR: Cannot perform transaction: out of disk space at tablet 72075186224037890 txId 281474976710761 2025-03-31T16:26:20.800153Z node 3 :TX_DATASHARD ERROR: Prepare transaction failed. txid 281474976710761 at tablet 72075186224037890 errors: OUT_OF_SPACE (Cannot perform transaction: out of disk space at tablet 72075186224037890 txId 281474976710761) | 2025-03-31T16:26:20.800259Z node 3 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710761 at tablet 72075186224037890 status: ERROR errors: OUT_OF_SPACE (Cannot perform transaction: out of disk space at tablet 72075186224037890 txId 281474976710761) | 2025-03-31T16:26:20.803337Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7488010827699615384:2338] TxId: 281474976710761. Ctx: { TraceId: 01jqpgcyt8cvdfzaj8b58k1tm6, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjU1NmY5YTAtODMwMjY1ZDAtNjUyNWJkYWQtMmEzYzJiMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ERROR: [OUT_OF_SPACE] Cannot perform transaction: out of disk space at tablet 72075186224037890 txId 281474976710761; 2025-03-31T16:26:20.804006Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjU1NmY5YTAtODMwMjY1ZDAtNjUyNWJkYWQtMmEzYzJiMDg=, ActorId: [3:7488010170569606412:2338], ActorState: ExecuteState, TraceId: 01jqpgcyt8cvdfzaj8b58k1tm6, Create QueryResponse for error on request, msg: Got out of space. Successfully inserted 30 x 0 lines, each of size 1048576bytes Trying to start YDB, gRPC: 28584, MsgBus: 2453 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a89/r3tmp/tmpFnOVyN/pdisk_1.dat 2025-03-31T16:26:22.881898Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:22.884928Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:26:22.923565Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:22.923670Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:22.925632Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28584, node 4 2025-03-31T16:26:23.031076Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:23.031127Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:23.031140Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:23.031314Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2453 TClient is connected to server localhost:2453 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:23.836750Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:23.846906Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:23.855580Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:23.970131Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:24.305411Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:26:24.500069Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:26:27.214125Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488010856168664122:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:27.214234Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:27.293460Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:27.356673Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:27.408533Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:27.469076Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:27.549565Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:27.638067Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:27.782126Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488010856168664648:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:27.782280Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:27.783130Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488010856168664653:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:27.789405Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:27.815209Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488010856168664655:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:26:27.905431Z node 4 :TX_PROXY ERROR: Actor# [4:7488010856168664713:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:29.395040Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:37.859224Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:26:37.859256Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:42.886235Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7488010912003240972:2666], SessionActorId: [4:7488010912003240954:2666], statusCode=PRECONDITION_FAILED. Issue=
: Error: Stream write queries aren't allowed., code: 2029 . sessionActorId=[4:7488010912003240954:2666]. isRollback=0 2025-03-31T16:26:43.014467Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MmUzZjUzMC0yZjk2YzRlOC1kNWU1MmJjNi0xZWEyMzNkNA==, ActorId: [4:7488010912003240954:2666], ActorState: ExecuteState, TraceId: 01jqpgdjsmfrtw0v72h3mqqc6g, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [4:7488010912003240973:2666] from: [4:7488010912003240972:2666] 2025-03-31T16:26:43.014598Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7488010912003240973:2666] TxId: 281474976710672. Ctx: { TraceId: 01jqpgdjsmfrtw0v72h3mqqc6g, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MmUzZjUzMC0yZjk2YzRlOC1kNWU1MmJjNi0xZWEyMzNkNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Stream write queries aren't allowed., code: 2029 } 2025-03-31T16:26:43.014775Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7488010912003240981:2679], TxId: 281474976710672, task: 5. Ctx: { SessionId : ydb://session/3?node_id=4&id=MmUzZjUzMC0yZjk2YzRlOC1kNWU1MmJjNi0xZWEyMzNkNA==. CustomerSuppliedId : . TraceId : 01jqpgdjsmfrtw0v72h3mqqc6g. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7488010912003240973:2666], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-31T16:26:43.017208Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MmUzZjUzMC0yZjk2YzRlOC1kNWU1MmJjNi0xZWEyMzNkNA==, ActorId: [4:7488010912003240954:2666], ActorState: ExecuteState, TraceId: 01jqpgdjsmfrtw0v72h3mqqc6g, Create QueryResponse for error on request, msg:
: Error: Stream write queries aren't allowed., code: 2029 >> KqpSqlIn::SecondaryIndex_SimpleKey_In_And [GOOD] >> KqpSqlIn::SecondaryIndex_SimpleKey_In_And_In >> KqpJoin::LeftJoinPushdownPredicate_NestedJoin [GOOD] >> KqpNewEngine::DecimalColumn35 [GOOD] >> KqpNewEngine::ComplexLookupLimit |92.5%| [TA] $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} |92.5%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpTx::SnapshotROInteractive2 [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCDS78 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::LeftJoinPushdownPredicate_NestedJoin [GOOD] Test command err: Trying to start YDB, gRPC: 7735, MsgBus: 23403 2025-03-31T16:26:43.081767Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010925663609874:2219];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001934/r3tmp/tmpfHCPJh/pdisk_1.dat 2025-03-31T16:26:43.415553Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:26:43.680385Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:43.716994Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:43.717082Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:43.727917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7735, node 1 2025-03-31T16:26:43.883352Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:43.883369Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:43.883405Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:43.883499Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23403 TClient is connected to server localhost:23403 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:44.870490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:44.909748Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:44.933437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:45.206224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:45.411850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:45.502744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:47.593051Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010942843480645:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:47.593145Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:48.065292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:48.075886Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010925663609874:2219];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:48.076556Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:48.109059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:48.190524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:48.238307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:48.322022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:48.403264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:48.502828Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010947138448472:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:48.502898Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:48.503250Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010947138448477:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:48.507696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:48.522076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010947138448479:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:26:48.597617Z node 1 :TX_PROXY ERROR: Actor# [1:7488010947138448536:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:49.626867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:26:49.694073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:26:49.728010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> DataShardWrite::WriteImmediateBadRequest >> KqpSqlIn::TupleSelect [GOOD] >> KqpSqlIn::TupleNotOnlyOfKeys >> KqpJoin::RightSemiJoin_SecondaryIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::SnapshotROInteractive2 [GOOD] Test command err: Trying to start YDB, gRPC: 26956, MsgBus: 22490 2025-03-31T16:26:38.402905Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010906284727249:2239];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:38.404626Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00128a/r3tmp/tmppmq4uK/pdisk_1.dat 2025-03-31T16:26:39.124762Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:39.124852Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:39.164627Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:26:39.166162Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26956, node 1 2025-03-31T16:26:39.377967Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:39.377993Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:39.378015Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:39.378132Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22490 TClient is connected to server localhost:22490 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:40.174364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:26:40.249973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:26:40.524987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:40.817761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:40.951767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:42.919237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010923464598014:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:42.919341Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:43.164745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:43.255043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:43.328425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:43.375204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:43.402681Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010906284727249:2239];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:43.402746Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:43.430540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:43.481392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:43.555556Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010927759565830:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:43.555662Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:43.556093Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010927759565835:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:43.561297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:43.590525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010927759565837:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:26:43.649345Z node 1 :TX_PROXY ERROR: Actor# [1:7488010927759565892:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:45.038119Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDk5Y2UwMGYtMTE4Nzk2YzktNWUwMzNhNjgtYzNkZWI5ODA=, ActorId: [1:7488010932054533488:2501], ActorState: ReadyState, TraceId: 01jqpgdqct2hd9c0drt9dbv52r, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:855: Too many transactions, current active: 2 MaxTxPerSession: 2 Trying to start YDB, gRPC: 28685, MsgBus: 2947 2025-03-31T16:26:45.959780Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010936264800750:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:45.963533Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00128a/r3tmp/tmpL9ygp9/pdisk_1.dat 2025-03-31T16:26:46.232521Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:46.253876Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:46.253983Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:46.255620Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28685, node 2 2025-03-31T16:26:46.405037Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:46.405055Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:46.405062Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:46.405163Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2947 TClient is connected to server localhost:2947 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:47.040602Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:47.052192Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:26:47.067771Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:47.154810Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:47.339260Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:47.456887Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:50.037789Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010953444671560:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:50.037890Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:50.076198Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:50.119028Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:50.162113Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:50.225794Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:50.272190Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:50.363986Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:50.446786Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010957739639371:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:50.446881Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:50.447448Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010957739639376:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:50.451579Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:50.467723Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488010957739639378:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:26:50.559961Z node 2 :TX_PROXY ERROR: Actor# [2:7488010957739639433:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:50.952373Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488010936264800750:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:50.952424Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpIndexLookupJoin::CheckCastUint64ToInt64+StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUint64ToInt64+StreamLookupJoin+NotNull >> KqpNewEngine::DeleteByKey [GOOD] >> KqpNewEngine::DeleteWithInputMultiConsumption+UseSink >> KqpReturning::ReturningWorksIndexedDelete+QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedDelete-QueryService >> TPersQueueTest::StreamReadCreateAndDestroyMsgs [GOOD] >> TPersQueueTest::StreamReadCommitAndStatusMsgs >> KqpJoin::FullOuterJoinNotNullJoinKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightSemiJoin_SecondaryIndex [GOOD] Test command err: Trying to start YDB, gRPC: 10994, MsgBus: 19680 2025-03-31T16:26:43.863645Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010926525817694:2153];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:43.864075Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00192f/r3tmp/tmpM0jLMM/pdisk_1.dat 2025-03-31T16:26:44.619763Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:44.622633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:44.622694Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:44.626661Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10994, node 1 2025-03-31T16:26:44.825896Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:44.825917Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:44.825923Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:44.826015Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19680 TClient is connected to server localhost:19680 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:45.574816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:45.599731Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:45.617982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:45.862327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:46.071377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:46.179427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:48.536078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010948000655853:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:48.536190Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:48.863201Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010926525817694:2153];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:48.871590Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:48.970177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:49.029785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:49.079107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:49.115973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:49.203970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:49.286005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:49.403430Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010952295623674:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:49.403504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:49.403924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010952295623679:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:49.408519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:49.442580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010952295623681:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:26:49.523355Z node 1 :TX_PROXY ERROR: Actor# [1:7488010952295623738:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:50.596498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:26:50.632732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:26:50.673799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:26:50.719862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:26:50.783959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:26:52.151434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480
: Warning: Execution, code: 1060
:4:43: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001
: Warning: Execution, code: 1060
:3:53: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 >> KqpNewEngine::StreamLookupForDataQuery+StreamLookupJoin [GOOD] >> KqpNewEngine::StreamLookupForDataQuery-StreamLookupJoin >> SystemView::AuthGroupMembers_TableRange [GOOD] >> KqpIndexLookupJoin::Inner+StreamLookup >> DemoTx::Scenario_4 [GOOD] >> KqpReturning::ReturningTwice [GOOD] >> KqpReturning::ReplaceSerial >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder-StreamLookup >> KqpJoinOrder::FiveWayJoinStatsOverride-ColumnStore >> KqpNewEngine::AsyncIndexUpdate [GOOD] >> KqpNewEngine::AutoChooseIndex >> TopicService::UnknownTopic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::AuthGroupMembers_TableRange [GOOD] Test command err: 2025-03-31T16:25:17.906965Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010557074528359:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:17.907228Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019fc/r3tmp/tmpbTpTt1/pdisk_1.dat 2025-03-31T16:25:18.215380Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2324, node 1 2025-03-31T16:25:18.269458Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:18.269588Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:18.272447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:25:18.392538Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:18.392564Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:18.392575Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:18.392709Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22154 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:18.874525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:19.029543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "Tenant1" } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-31T16:25:19.029816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateExtSubDomain Propose, path/Root/Tenant1, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:25:19.029919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: Tenant1, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-03-31T16:25:19.030077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-03-31T16:25:19.030247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-03-31T16:25:19.030369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:25:19.030394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:25:19.030504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-31T16:25:19.030542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-03-31T16:25:19.033237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusAccepted TxId: 281474976710658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-03-31T16:25:19.033512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/Tenant1 2025-03-31T16:25:19.033747Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-31T16:25:19.033795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-31T16:25:19.033950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-31T16:25:19.034058Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-31T16:25:19.034086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7488010561369496262:2389], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 1 2025-03-31T16:25:19.034134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7488010561369496262:2389], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 2 2025-03-31T16:25:19.034188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:25:19.034217Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:25:19.034234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710658:0, at tablet# 72057594046644480 2025-03-31T16:25:19.034257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710658 ready parts: 1/1 waiting... 2025-03-31T16:25:19.038615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710658 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:25:19.040616Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-03-31T16:25:19.040716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-03-31T16:25:19.040731Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-03-31T16:25:19.040748Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-03-31T16:25:19.040784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-03-31T16:25:19.041086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-03-31T16:25:19.041175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-03-31T16:25:19.041190Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-03-31T16:25:19.041200Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2025-03-31T16:25:19.041216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-03-31T16:25:19.041272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710658, ready parts: 0/1, is published: true 2025-03-31T16:25:19.041416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T16:25:19.041434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710658, ready parts: 0/1, is published: true 2025-03-31T16:25:19.041466Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T16:25:19.041567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710658:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710658 msg type: 269090816 2025-03-31T16:25:19.041697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710658, partId: 4294967295, tablet: 72057594046316545 2025-03-31T16:25:19.042858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710658 2025-03-31T16:25:19.042922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710658 2025-03-31T16:25:19.043663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743438319091, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T16:25:19.043820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743438319091 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-31T16:25:19.043861Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710658:0, at tablet# 72057594046644480 2025-03-31T16:25:19.044032Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2025-03-31T16:25:19.044072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710658:0, at tablet# 72057594046644480 2025-03-31T16:25:19.044192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-31T16:25:19.044229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-03-31T16:25:19.044260Z node 1 :FLAT_TX_SCHEMESHARD IN ... Scan started, actor: [31:7488010970138711937:2471], owner: [31:7488010970138711934:2469], scan id: 0, table id: [72057594046644480:1:0:auth_group_members] 2025-03-31T16:26:53.336221Z node 31 :SYSTEM_VIEWS INFO: Scan prepared, actor: [31:7488010970138711937:2471], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-31T16:26:53.336265Z node 31 :SYSTEM_VIEWS DEBUG: ProceedToScan, tenant name: /Root tenant owner: root@builtin subject sid: empty require admin access: 1 is admin: 1 2025-03-31T16:26:53.336347Z node 31 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:26:53.336866Z node 31 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user3 },{ Sid: user2 },{ Sid: user1 }] Groups: [{ Sid: group1 Members: [user2,user1] },{ Sid: group3 Members: [user2,user1] },{ Sid: group2 Members: [user3,user2,user1] }] } Children [.metadata,Table0,Tenant1,Tenant2] }] } 2025-03-31T16:26:53.336933Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7488010970138711937:2471], row count: 1, finished: 1 2025-03-31T16:26:53.336961Z node 31 :SYSTEM_VIEWS INFO: Scan finished, actor: [31:7488010970138711937:2471], owner: [31:7488010970138711934:2469], scan id: 0, table id: [72057594046644480:1:0:auth_group_members] 2025-03-31T16:26:53.348716Z node 31 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438413316, txId: 281474976715697] shutting down 2025-03-31T16:26:53.349410Z node 31 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [31:7488010888534330930:2079], database# , query hash# 10825990382896916327, cpu time# 340940 2025-03-31T16:26:53.547229Z node 32 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector: TEvProcessOverloaded , top size by CPU # 0, top size by TLI # 0, time# 2025-03-31T16:26:53.546425Z 2025-03-31T16:26:53.555256Z node 35 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector: TEvProcessOverloaded , top size by CPU # 0, top size by TLI # 0, time# 2025-03-31T16:26:53.554620Z 2025-03-31T16:26:53.613102Z node 31 :KQP_EXECUTER ERROR: TxId: 281474976715700. Ctx: { TraceId: 01jqpgdzhsebmr3n6apaw9bh62, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=31&id=YjA4NDdlNmEtMmJlNjAwZTktZWVjOTk5MGMtYjkzZDkwYmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:26:53.616953Z node 31 :SYSTEM_VIEWS INFO: Scan started, actor: [31:7488010970138711971:2480], owner: [31:7488010970138711968:2478], scan id: 0, table id: [72057594046644480:1:0:auth_group_members] 2025-03-31T16:26:53.647337Z node 31 :SYSTEM_VIEWS INFO: Scan prepared, actor: [31:7488010970138711971:2480], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-31T16:26:53.647386Z node 31 :SYSTEM_VIEWS DEBUG: ProceedToScan, tenant name: /Root tenant owner: root@builtin subject sid: empty require admin access: 1 is admin: 1 2025-03-31T16:26:53.647473Z node 31 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:26:53.653547Z node 31 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user3 },{ Sid: user2 },{ Sid: user1 }] Groups: [{ Sid: group1 Members: [user2,user1] },{ Sid: group3 Members: [user2,user1] },{ Sid: group2 Members: [user3,user2,user1] }] } Children [.metadata,Table0,Tenant1,Tenant2] }] } 2025-03-31T16:26:53.653634Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7488010970138711971:2480], row count: 2, finished: 1 2025-03-31T16:26:53.653662Z node 31 :SYSTEM_VIEWS INFO: Scan finished, actor: [31:7488010970138711971:2480], owner: [31:7488010970138711968:2478], scan id: 0, table id: [72057594046644480:1:0:auth_group_members] 2025-03-31T16:26:53.658417Z node 31 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [31:7488010888534330930:2079], database# , query hash# 12756478633923396544, cpu time# 267102 2025-03-31T16:26:53.659219Z node 31 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438413607, txId: 281474976715699] shutting down 2025-03-31T16:26:53.952827Z node 31 :KQP_EXECUTER ERROR: TxId: 281474976715702. Ctx: { TraceId: 01jqpgdzw1drrtt6bzfenje706, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=31&id=ZGRhYjk0ODctN2I4MzMwYzctNGUzNzY5YmItZTQyNjliYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:26:53.955762Z node 31 :SYSTEM_VIEWS INFO: Scan started, actor: [31:7488010970138712013:2489], owner: [31:7488010970138712010:2487], scan id: 0, table id: [72057594046644480:1:0:auth_group_members] 2025-03-31T16:26:53.956873Z node 31 :SYSTEM_VIEWS INFO: Scan prepared, actor: [31:7488010970138712013:2489], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-31T16:26:53.956901Z node 31 :SYSTEM_VIEWS DEBUG: ProceedToScan, tenant name: /Root tenant owner: root@builtin subject sid: empty require admin access: 1 is admin: 1 2025-03-31T16:26:53.956977Z node 31 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:26:53.957309Z node 31 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user3 },{ Sid: user2 },{ Sid: user1 }] Groups: [{ Sid: group1 Members: [user2,user1] },{ Sid: group3 Members: [user2,user1] },{ Sid: group2 Members: [user3,user2,user1] }] } Children [.metadata,Table0,Tenant1,Tenant2] }] } 2025-03-31T16:26:53.957361Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7488010970138712013:2489], row count: 1, finished: 1 2025-03-31T16:26:53.957385Z node 31 :SYSTEM_VIEWS INFO: Scan finished, actor: [31:7488010970138712013:2489], owner: [31:7488010970138712010:2487], scan id: 0, table id: [72057594046644480:1:0:auth_group_members] 2025-03-31T16:26:53.962913Z node 31 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438413940, txId: 281474976715701] shutting down 2025-03-31T16:26:53.964070Z node 31 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [31:7488010888534330930:2079], database# , query hash# 11357838469093417614, cpu time# 225648 2025-03-31T16:26:54.027204Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 35 2025-03-31T16:26:54.028154Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(35, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-31T16:26:54.028325Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 32 2025-03-31T16:26:54.028960Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(32, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-31T16:26:54.051737Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 34 2025-03-31T16:26:54.052774Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-31T16:26:54.025427Z node 32 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:26:54.061033Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[33:7488010905919523879:2107], Type=268959746 2025-03-31T16:26:54.061762Z node 35 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:26:54.077980Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[33:7488010905919523879:2107], Type=268959746 2025-03-31T16:26:54.078334Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[33:7488010905919523879:2107], Type=268959746 2025-03-31T16:26:54.078523Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[33:7488010905919523879:2107], Type=268959746 2025-03-31T16:26:54.078701Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[33:7488010905919523879:2107], Type=268959746 2025-03-31T16:26:54.078867Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[33:7488010905919523879:2107], Type=268959746 2025-03-31T16:26:54.079114Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 33 2025-03-31T16:26:54.079546Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(33, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-31T16:26:54.080072Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[34:7488010897772469410:2107], Type=268959746 2025-03-31T16:26:54.080114Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[34:7488010897772469410:2107], Type=268959746 2025-03-31T16:26:54.080152Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[34:7488010897772469410:2107], Type=268959746 2025-03-31T16:26:54.080204Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[34:7488010897772469410:2107], Type=268959746 2025-03-31T16:26:54.080259Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[34:7488010897772469410:2107], Type=268959746 2025-03-31T16:26:54.080300Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[34:7488010897772469410:2107], Type=268959746 >> DemoTx::Scenario_5 >> KqpIndexLookupJoin::LeftOnlyJoinValueColumn+StreamLookup [GOOD] >> SystemView::AuthPermissions_Access [GOOD] >> KqpJoinOrder::TPCDS34-ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftOnlyJoinValueColumn+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 65285, MsgBus: 9455 2025-03-31T16:26:51.772038Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010959815468194:2268];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:51.772179Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001927/r3tmp/tmprPtq5K/pdisk_1.dat 2025-03-31T16:26:52.387478Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:52.424230Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:52.424310Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:52.428008Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65285, node 1 2025-03-31T16:26:52.675567Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:52.675588Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:52.675601Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:52.675733Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9455 TClient is connected to server localhost:9455 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:53.567533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:53.583967Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:53.610082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:53.864914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:54.053202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:54.205480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:56.511830Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010981290306231:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:56.511987Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:56.775371Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010959815468194:2268];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:56.775441Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:56.885389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:56.927560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:57.002969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:57.049492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:57.108322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:57.179980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:57.291303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010985585274051:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:57.291383Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:57.291936Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010985585274056:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:57.296760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:57.311319Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010985585274058:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:26:57.482739Z node 1 :TX_PROXY ERROR: Actor# [1:7488010985585274113:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:58.924823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:26:58.950645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:26:58.987234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:26:59.071524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:26:59.144714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:26:59.180550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> DataShardWrite::WriteImmediateBadRequest [GOOD] >> DataShardWrite::WriteImmediateSeveralOperations >> TopicService::UseDoubleSlashInTopicPath >> KqpJoinOrder::TPCDS94-ColumnStore >> KqpWorkload::KV [GOOD] >> KqpNewEngine::ComplexLookupLimit [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH8 >> KqpIndexLookupJoin::CheckCastUint64ToInt64+StreamLookupJoin+NotNull [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpWorkload::KV [GOOD] Test command err: Trying to start YDB, gRPC: 30630, MsgBus: 27694 2025-03-31T16:25:28.167147Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010601990674556:2268];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:28.167233Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f40/r3tmp/tmpO6xSEs/pdisk_1.dat 2025-03-31T16:25:28.619750Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:28.626591Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:28.626700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:28.630504Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30630, node 1 2025-03-31T16:25:28.819652Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:28.819675Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:28.819687Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:28.819801Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27694 TClient is connected to server localhost:27694 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:29.677801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:31.376635Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010614875576898:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:31.377307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:31.868907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:25:32.441829Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010619170545848:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:32.441943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010619170545853:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:32.441951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:32.445699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:25:32.456437Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010619170545855:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:25:32.542879Z node 1 :TX_PROXY ERROR: Actor# [1:7488010619170545932:3406] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:33.167129Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010601990674556:2268];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:33.167229Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:25:43.603211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:25:43.603249Z node 1 :IMPORT WARN: Table profiles were not loaded took: 0.184231s took: 0.234786s took: 0.192072s took: 0.242278s took: 0.235246s took: 0.270433s took: 0.274863s took: 0.281629s took: 0.282669s took: 0.284208s took: 0.237030s took: 0.236275s took: 0.236616s took: 0.236996s took: 0.240861s took: 0.241275s took: 0.241381s took: 0.244250s took: 0.248516s took: 0.245048s took: 0.499805s took: 0.501063s took: 0.501700s took: 0.501983s took: 0.501835s took: 0.498242s took: 0.502288s took: 0.488400s took: 0.505064s took: 0.505209s took: 0.148445s took: 0.149247s took: 0.150804s took: 0.151340s took: 0.153794s took: 0.155962s took: 0.158823s took: 0.159886s took: 0.152720s took: 0.162186s took: 0.267391s took: 0.269825s took: 0.271579s took: 0.272076s took: 0.272302s took: 0.293232s took: 0.304139s took: 0.309725s took: 0.309171s took: 0.338295s 2025-03-31T16:27:01.588525Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037922 not found 2025-03-31T16:27:01.588599Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037917 not found 2025-03-31T16:27:01.588616Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037926 not found 2025-03-31T16:27:01.588631Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037909 not found 2025-03-31T16:27:01.588646Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037923 not found 2025-03-31T16:27:01.588666Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037908 not found 2025-03-31T16:27:01.588683Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037903 not found 2025-03-31T16:27:01.588700Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-03-31T16:27:01.588716Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037901 not found 2025-03-31T16:27:01.588730Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037927 not found 2025-03-31T16:27:01.588745Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037900 not found 2025-03-31T16:27:01.588761Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037911 not found 2025-03-31T16:27:01.588775Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037905 not found 2025-03-31T16:27:01.588788Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037904 not found 2025-03-31T16:27:01.588824Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-03-31T16:27:01.588841Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037916 not found 2025-03-31T16:27:01.588857Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037925 not found 2025-03-31T16:27:01.588873Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037913 not found 2025-03-31T16:27:01.588888Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037921 not found 2025-03-31T16:27:01.588902Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037895 not found 2025-03-31T16:27:01.588916Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037915 not found 2025-03-31T16:27:01.588931Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037897 not found 2025-03-31T16:27:01.667984Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037893 not found 2025-03-31T16:27:01.720617Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-03-31T16:27:01.720660Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037896 not found 2025-03-31T16:27:01.720676Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037902 not found 2025-03-31T16:27:01.720691Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037924 not found 2025-03-31T16:27:01.720708Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-03-31T16:27:01.720723Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037918 not found 2025-03-31T16:27:01.720760Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037920 not found 2025-03-31T16:27:01.720785Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037898 not found 2025-03-31T16:27:01.720801Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037914 not found 2025-03-31T16:27:01.720818Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037894 not found 2025-03-31T16:27:01.720834Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037907 not found 2025-03-31T16:27:01.720848Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037906 not found 2025-03-31T16:27:01.720871Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037919 not found 2025-03-31T16:27:01.720886Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2025-03-31T16:27:01.720902Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037912 not found 2025-03-31T16:27:01.720917Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037910 not found 2025-03-31T16:27:01.720934Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS34-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 17807, MsgBus: 26414 2025-03-31T16:26:03.865349Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010753742902133:2191];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:03.865457Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001964/r3tmp/tmpUoAh1a/pdisk_1.dat 2025-03-31T16:26:04.623380Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:04.624156Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:04.624239Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:04.629823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17807, node 1 2025-03-31T16:26:04.958513Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:04.958556Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:04.958578Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:04.958706Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26414 TClient is connected to server localhost:26414 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:05.658366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:05.683688Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:08.089716Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010775217739154:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:08.089828Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010775217739165:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:08.089894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:08.095323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:26:08.110992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010775217739168:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:26:08.184080Z node 1 :TX_PROXY ERROR: Actor# [1:7488010775217739219:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:08.519270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:26:08.666423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:26:08.712070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:08.752753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:08.829480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:08.867727Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010753742902133:2191];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:08.867930Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:09.026882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:09.108716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:09.149782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:09.188753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T16:26:09.222806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-31T16:26:09.262506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-31T16:26:09.301151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:26:09.340654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:26:10.027839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-31T16:26:10.065894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:26:10.133213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:26:10.182991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:26:10.213875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T16:26:10.245476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-31T16:26:10.270728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T16:26:10.296260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-31T16:26:10.324190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-31T16:26:10.361851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-31T16:26:10.394223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-31T16:26:10.462516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-31T16:26:10.534769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-31T16:26:10.570872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-31T16:26:10.603493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-31T16:26:10.651821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-31T16:26:10.687032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:40.986691Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:40.986926Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:40.996399Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:41.001057Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:41.006137Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:41.016077Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:41.019160Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:41.022059Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:41.024104Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038470;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:41.027840Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:41.033763Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038616;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:41.034455Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:41.039577Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038602;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:41.042034Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038612;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:41.045358Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038610;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:41.047825Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038618;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:41.051559Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038590;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:41.054224Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038614;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:41.060996Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038600;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:41.066548Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038596;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:41.072227Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038608;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:41.077840Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038594;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:41.083397Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:41.088978Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:41.094696Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:41.100355Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:41.106420Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038604;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:41.112088Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:41.117674Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:41.123495Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:41.123495Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:41.128958Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:41.133783Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:41.135280Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038606;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:41.139622Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:41.141331Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038592;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:41.145614Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:41.146835Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:41.151866Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:41.152630Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:41.158869Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:41.160576Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:41.164812Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:41.166636Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:41.169833Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:26:41.268571Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgcq575vjnfyxag89f32jr", SessionId: ydb://session/3?node_id=1&id=YTRkYTAyZWMtM2EyZmZjYS1kOTY0MWZhZS1jOTQ2NjJhMQ==, Slow query, duration: 29.260328s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:26:41.821466Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:26:41.821980Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:26:41.823167Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7488010882591948538:6022];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038170;receive=72075186224038331; 2025-03-31T16:26:41.823644Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::AuthPermissions_Access [GOOD] Test command err: 2025-03-31T16:25:17.914045Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010555264652310:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:17.914181Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0019dd/r3tmp/tmpApAG7E/pdisk_1.dat 2025-03-31T16:25:18.223402Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62881, node 1 2025-03-31T16:25:18.251932Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:25:18.252082Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:25:18.271883Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:18.271991Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:18.274277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:25:18.392566Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:18.392594Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:18.392604Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:18.392720Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1998 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:18.874496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:19.043132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "Tenant1" } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-31T16:25:19.043478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateExtSubDomain Propose, path/Root/Tenant1, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:25:19.043579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: Tenant1, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-03-31T16:25:19.043758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-03-31T16:25:19.043950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-03-31T16:25:19.044105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:25:19.044166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:25:19.044253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-31T16:25:19.044335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-03-31T16:25:19.046928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusAccepted TxId: 281474976710658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-03-31T16:25:19.047134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/Tenant1 2025-03-31T16:25:19.047370Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-31T16:25:19.047422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-31T16:25:19.047559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-31T16:25:19.047665Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-31T16:25:19.047702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7488010559559620244:2405], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 1 2025-03-31T16:25:19.047731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7488010559559620244:2405], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 2 2025-03-31T16:25:19.047795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:25:19.047843Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:25:19.047863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710658:0, at tablet# 72057594046644480 waiting...2025-03-31T16:25:19.047890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710658 ready parts: 1/1 2025-03-31T16:25:19.052233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710658 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:25:19.054281Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-03-31T16:25:19.054373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-03-31T16:25:19.054385Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-03-31T16:25:19.054406Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-03-31T16:25:19.054427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-03-31T16:25:19.054746Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-03-31T16:25:19.054799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-03-31T16:25:19.054806Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-03-31T16:25:19.054816Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2025-03-31T16:25:19.054826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-03-31T16:25:19.054881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710658, ready parts: 0/1, is published: true 2025-03-31T16:25:19.055554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T16:25:19.055579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710658, ready parts: 0/1, is published: true 2025-03-31T16:25:19.055602Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T16:25:19.055692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710658:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710658 msg type: 269090816 2025-03-31T16:25:19.055816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710658, partId: 4294967295, tablet: 72057594046316545 2025-03-31T16:25:19.057136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710658 2025-03-31T16:25:19.057196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710658 2025-03-31T16:25:19.058526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743438319105, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T16:25:19.058724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743438319105 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-31T16:25:19.058769Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710658:0, at tablet# 72057594046644480 2025-03-31T16:25:19.058961Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2025-03-31T16:25:19.059023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710658:0, at tablet# 72057594046644480 2025-03-31T16:25:19.059198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-31T16:25:19.059241Z node 1 :FLAT_TX_SCHEMESHAR ... t: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:26:57.616280Z node 31 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user6tenant1admin },{ Sid: user2 },{ Sid: user1rootadmin }] Groups: [] } Children [.metadata,Dir1,Dir2,Table0,Tenant1,Tenant2] }] } 2025-03-31T16:26:57.616401Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7488010984624784500:2458], row count: 4, finished: 0 2025-03-31T16:26:57.616592Z node 31 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:26:57.617254Z node 31 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata TableId: [72057594046644480:5:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [workload_manager] }] } 2025-03-31T16:26:57.617323Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7488010984624784500:2458], row count: 0, finished: 0 2025-03-31T16:26:57.617677Z node 31 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:26:57.623379Z node 32 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:26:57.623414Z node 32 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:57.635502Z node 31 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager TableId: [72057594046644480:6:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [pools] }] } 2025-03-31T16:26:57.635605Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7488010984624784500:2458], row count: 0, finished: 0 2025-03-31T16:26:57.636887Z node 31 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/pools TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:26:57.639519Z node 31 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/pools TableId: [72057594046644480:7:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [default] }] } 2025-03-31T16:26:57.639595Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7488010984624784500:2458], row count: 0, finished: 0 2025-03-31T16:26:57.640076Z node 31 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/pools/default TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:26:57.640357Z node 31 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/pools/default TableId: [72057594046644480:8:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindResourcePool DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T16:26:57.640470Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7488010984624784500:2458], row count: 6, finished: 0 2025-03-31T16:26:57.641871Z node 31 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:26:57.647634Z node 31 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [72057594046644480:9:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [] }] } 2025-03-31T16:26:57.647726Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7488010984624784500:2458], row count: 1, finished: 0 2025-03-31T16:26:57.653620Z node 32 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector: TEvProcessOverloaded , top size by CPU # 0, top size by TLI # 0, time# 2025-03-31T16:26:57.653517Z 2025-03-31T16:26:57.654317Z node 31 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir2 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:26:57.655023Z node 31 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir2 TableId: [72057594046644480:10:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [] }] } 2025-03-31T16:26:57.655123Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7488010984624784500:2458], row count: 0, finished: 0 2025-03-31T16:26:57.663660Z node 31 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:26:57.664354Z node 31 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table0 TableId: [72057594046644480:4:1] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T16:26:57.664411Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7488010984624784500:2458], row count: 0, finished: 0 2025-03-31T16:26:57.666670Z node 31 :SYSTEM_VIEWS INFO: Scan finished, actor: [31:7488010984624784500:2458], owner: [31:7488010984624784497:2456], scan id: 0, table id: [72057594046644480:1:0:auth_permissions] 2025-03-31T16:26:57.681756Z node 31 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [31:7488010903020403475:2148], database# , query hash# 12107705915200741666, cpu time# 205845 2025-03-31T16:26:57.682415Z node 31 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438417599, txId: 281474976710692] shutting down 2025-03-31T16:26:57.762686Z node 35 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:26:57.757666Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 34 2025-03-31T16:26:57.758188Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-31T16:26:57.759141Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 35 2025-03-31T16:26:57.760153Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(35, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-31T16:26:57.767178Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 33 2025-03-31T16:26:57.767595Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(33, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-31T16:26:57.767726Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 32 2025-03-31T16:26:57.773184Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(32, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-31T16:26:57.779814Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[35:7488010909238148749:2107], Type=268959746 2025-03-31T16:26:57.772574Z node 32 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 >> KqpJoin::FullOuterJoinNotNullJoinKey [GOOD] |92.6%| [TA] $(B)/ydb/core/kqp/ut/perf/test-results/unittest/{meta.json ... results_accumulator.log} |92.6%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/perf/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSqlIn::SecondaryIndex_SimpleKey_In_And_In [GOOD] >> KqpSqlIn::SecondaryIndex_ComplexKey_In_And_In ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::ComplexLookupLimit [GOOD] Test command err: Trying to start YDB, gRPC: 15675, MsgBus: 7868 2025-03-31T16:26:04.219342Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010757282070484:2274];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:04.219408Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ab7/r3tmp/tmpIRegPF/pdisk_1.dat 2025-03-31T16:26:04.845678Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:04.855428Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:04.855542Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:04.858055Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15675, node 1 2025-03-31T16:26:04.995949Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:04.995986Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:04.995997Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:04.996184Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7868 TClient is connected to server localhost:7868 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:05.815212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:08.298679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010774461940114:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:08.298790Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:08.575532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:26:08.740729Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010774461940217:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:08.740819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:08.747335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010774461940222:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:08.752495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:26:08.776140Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010774461940224:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:26:08.863853Z node 1 :TX_PROXY ERROR: Actor# [1:7488010774461940277:2402] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:09.215005Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010757282070484:2274];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:09.215134Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 19692, MsgBus: 4000 2025-03-31T16:26:11.264589Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010787968089754:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:11.275633Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ab7/r3tmp/tmpeuIZnH/pdisk_1.dat 2025-03-31T16:26:11.573272Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:11.593382Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:11.593477Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:11.600460Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19692, node 2 2025-03-31T16:26:11.739573Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:11.739616Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:11.739626Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:11.739736Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4000 TClient is connected to server localhost:4000 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:12.247667Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:12.275464Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:12.320070Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:12.449720Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:12.636920Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:12.711015Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:15.147617Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010805147960694:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:15.147736Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:15.218537Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:15.273326Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:15.329532Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:15.407752Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:15.456040Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:15.514048Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:15.610089Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [T ... hemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:48.267229Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488010924601547733:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:48.267338Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:48.711527Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488010946076385978:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:48.711662Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:48.771557Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:48.855275Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:48.913250Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:48.985985Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:49.088939Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:49.148150Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:49.256118Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488010950371353798:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:49.256256Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:49.256597Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488010950371353803:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:49.261969Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:49.283357Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488010950371353805:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:26:49.359351Z node 6 :TX_PROXY ERROR: Actor# [6:7488010950371353862:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:50.974418Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 28471, MsgBus: 3937 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ab7/r3tmp/tmpHgKaZY/pdisk_1.dat 2025-03-31T16:26:53.434247Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:26:53.619054Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:53.666591Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:53.666708Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:53.677610Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28471, node 7 2025-03-31T16:26:53.879815Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:53.879844Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:53.879855Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:53.880029Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3937 TClient is connected to server localhost:3937 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:26:54.896301Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:26:54.915397Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:55.053753Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:55.344371Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:55.459848Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:58.844529Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488010988684773961:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:58.844652Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:59.015199Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:59.073227Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:59.168728Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:59.248957Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:59.354682Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:59.482651Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:59.597188Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488010992979741779:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:59.597301Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:59.598873Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488010992979741784:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:59.603684Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:59.632319Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7488010992979741786:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:26:59.727222Z node 7 :TX_PROXY ERROR: Actor# [7:7488010992979741841:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:01.321705Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 |92.6%| [TA] $(B)/ydb/core/sys_view/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastUint64ToInt64+StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 1955, MsgBus: 3715 2025-03-31T16:26:47.629692Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010941789569733:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:47.661978Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00192c/r3tmp/tmpOvLkFC/pdisk_1.dat 2025-03-31T16:26:48.449139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:48.449221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:48.466336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:26:48.511857Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1955, node 1 2025-03-31T16:26:48.710407Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:48.710427Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:48.710435Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:48.710547Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3715 TClient is connected to server localhost:3715 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:49.504386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:49.550622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:49.774936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:26:49.960765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:50.091170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:26:52.100450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010963264407843:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:52.100547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:52.453249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:52.531617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:52.594228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:52.634031Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010941789569733:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:52.634130Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:52.637011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:52.680600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:52.752341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:52.829812Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010963264408362:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:52.829903Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:52.830205Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010963264408367:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:52.834308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:52.848061Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-31T16:26:52.848841Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010963264408369:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:26:52.933329Z node 1 :TX_PROXY ERROR: Actor# [1:7488010963264408425:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:54.241908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:26:54.319133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 5736, MsgBus: 4734 2025-03-31T16:26:56.162688Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010980665366192:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:56.219797Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00192c/r3tmp/tmpoNrDX6/pdisk_1.dat 2025-03-31T16:26:56.343678Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:56.368100Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:56.368188Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:56.372881Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5736, node 2 2025-03-31T16:26:56.565009Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:56.565034Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:56.565042Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:56.565173Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4734 TClient is connected to server localhost:4734 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:57.424841Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:57.446076Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:26:57.465354Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:57.547229Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:57.743164Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:57.842853Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:00.603243Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010997845236987:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:00.603355Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:00.669848Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:27:00.708981Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:27:00.760600Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:27:00.812490Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:27:00.847585Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:27:00.910673Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:27:01.000300Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010997845237500:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:01.000426Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:01.000743Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011002140204801:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:01.005083Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:27:01.024032Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-03-31T16:27:01.024845Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488011002140204803:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:27:01.103190Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488010980665366192:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:01.103521Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:01.106209Z node 2 :TX_PROXY ERROR: Actor# [2:7488011002140204859:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:02.180966Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:27:02.329201Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 |92.6%| [TA] {RESULT} $(B)/ydb/core/sys_view/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::FullOuterJoinNotNullJoinKey [GOOD] Test command err: Trying to start YDB, gRPC: 17210, MsgBus: 13947 2025-03-31T16:26:57.338947Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010984867171258:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:57.339409Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001925/r3tmp/tmpu0NJjU/pdisk_1.dat 2025-03-31T16:26:57.949225Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:57.970940Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:57.971021Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:57.975797Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17210, node 1 2025-03-31T16:26:58.097944Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:58.098664Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:58.098694Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:58.098835Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13947 TClient is connected to server localhost:13947 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:58.918368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:58.933855Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:58.938651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:59.173390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:59.425193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:59.505319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:01.426977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011002047042099:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:01.427119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:01.746026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:27:01.827829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:27:01.859897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:27:01.904140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:27:01.948093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:27:02.022437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:27:02.129274Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011006342009914:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:02.129341Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:02.129599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011006342009919:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:02.133846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:27:02.160502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011006342009921:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:27:02.242039Z node 1 :TX_PROXY ERROR: Actor# [1:7488011006342009977:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:02.307340Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010984867171258:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:02.328836Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:03.580374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:27:03.619614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::Inner+StreamLookup [GOOD] >> KqpIndexLookupJoin::Inner-StreamLookup >> KqpJoin::AllowJoinsForComplexPredicates+StreamLookup >> KqpIndexLookupJoin::LeftJoinOnlyRightColumn+StreamLookup >> KqpJoinOrder::TPCDS96+ColumnStore >> DataShardWrite::WriteImmediateSeveralOperations [GOOD] >> DataShardWrite::UpsertPreparedManyTables+Volatile >> KqpReturning::ReplaceSerial [GOOD] >> KqpReturning::ReturningSerial >> KqpNewEngine::DeleteWithInputMultiConsumption+UseSink [GOOD] >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft+ColumnStore >> KqpIndexLookupJoin::SimpleInnerJoin-StreamLookup >> KqpNewEngine::StreamLookupForDataQuery-StreamLookupJoin [GOOD] >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder-StreamLookup [GOOD] >> KqpJoinOrder::TPCDS90-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::DeleteWithInputMultiConsumption+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 26761, MsgBus: 6525 2025-03-31T16:26:07.749362Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010771052752140:2273];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:07.749426Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ab3/r3tmp/tmpqcH5K1/pdisk_1.dat 2025-03-31T16:26:08.355528Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:08.361300Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:08.361396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:08.364172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26761, node 1 2025-03-31T16:26:08.462909Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:08.462935Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:08.462950Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:08.463076Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6525 TClient is connected to server localhost:6525 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:09.217149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:09.233751Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:09.248749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:26:09.422377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:26:09.633732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:09.725227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:11.420178Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010788232622885:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:11.420298Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:11.807799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:11.848720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:11.930842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:11.973797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:12.018741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:12.139945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:12.195503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010792527590699:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:12.195581Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:12.195932Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010792527590704:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:12.200126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:12.220744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010792527590706:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:26:12.287647Z node 1 :TX_PROXY ERROR: Actor# [1:7488010792527590758:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:12.751068Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010771052752140:2273];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:12.751203Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 32567, MsgBus: 1632 2025-03-31T16:26:14.517836Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010801541199395:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:14.566354Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ab3/r3tmp/tmpkmKfcY/pdisk_1.dat 2025-03-31T16:26:14.709121Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:14.733296Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:14.733411Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:14.735392Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32567, node 2 2025-03-31T16:26:14.855747Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:14.855771Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:14.855780Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:14.855905Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1632 TClient is connected to server localhost:1632 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:15.452394Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:15.478050Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:15.538396Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:15.734237Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:15.835072Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTab ... line=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488010933654093705:2146];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:50.592846Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:50.768297Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488010955128931856:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:50.768437Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:50.850095Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:50.888228Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:50.937185Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:50.986307Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:51.045947Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:51.130203Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:51.251504Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488010959423899670:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:51.251644Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:51.252074Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488010959423899675:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:51.257809Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:51.284202Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488010959423899677:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:26:51.378835Z node 6 :TX_PROXY ERROR: Actor# [6:7488010959423899733:3456] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 3160, MsgBus: 19166 2025-03-31T16:26:56.273418Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488010980171815936:2212];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ab3/r3tmp/tmpWkMh62/pdisk_1.dat 2025-03-31T16:26:56.552886Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:26:56.620216Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:56.642582Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:56.642880Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:56.645069Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3160, node 7 2025-03-31T16:26:56.890077Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:56.890106Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:56.890118Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:56.890632Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19166 TClient is connected to server localhost:19166 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:57.666645Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:57.698042Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:57.802567Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:58.068095Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:58.159956Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:01.274437Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7488010980171815936:2212];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:01.284624Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:02.395317Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488011005941621330:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:02.395483Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:02.453015Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:27:02.529995Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:27:02.612151Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:27:02.676601Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:27:02.741208Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:27:02.827550Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:27:02.912241Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488011005941621856:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:02.912364Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:02.912688Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488011005941621861:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:02.917894Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:27:02.936409Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7488011005941621863:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:27:03.032730Z node 7 :TX_PROXY ERROR: Actor# [7:7488011010236589222:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpReturning::ReturningWorksIndexedDelete-QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedDeleteV2+QueryService >> KqpSqlIn::TupleNotOnlyOfKeys [GOOD] >> KqpJoinOrder::DatetimeConstantFold-ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 10888, MsgBus: 21171 2025-03-31T16:26:50.785896Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010955144840690:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:50.785948Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00192a/r3tmp/tmpYhOQow/pdisk_1.dat 2025-03-31T16:26:51.458375Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:51.470695Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:51.470793Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:51.477654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10888, node 1 2025-03-31T16:26:51.749830Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:51.749851Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:51.749862Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:51.749958Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21171 TClient is connected to server localhost:21171 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:52.515918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:52.526703Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:52.545711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:52.803599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:53.054609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:53.163169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:55.473408Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010976619678961:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:55.473524Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:55.770963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:55.791210Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010955144840690:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:55.791304Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:55.838270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:55.916349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:55.971992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:56.027915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:56.113713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:56.203510Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010980914646776:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:56.203618Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:56.204076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010980914646781:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:56.208570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:56.224080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010980914646783:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:26:56.286758Z node 1 :TX_PROXY ERROR: Actor# [1:7488010980914646837:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:57.377378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:26:57.415917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:26:57.465239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:26:57.524802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:26:57.602727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:26:57.648311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 24861, MsgBus: 26126 2025-03-31T16:26:59.352304Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010992394213296:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:59.352338Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00192a/r3tmp/tmpsB8H8d/pdisk_1.dat 2025-03-31T16:26:59.653449Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:59.653526Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:59.656649Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:59.658809Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24861, node 2 2025-03-31T16:26:59.815600Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:59.815622Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:59.815632Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:59.815742Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26126 TClient is connected to server localhost:26126 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:00.376262Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:00.383909Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:27:00.395427Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:00.511990Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:00.759975Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:27:00.858594Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:27:04.125929Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011013869051537:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:04.126048Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:04.225185Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:27:04.312019Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:27:04.353299Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488010992394213296:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:04.353355Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:04.400054Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:27:04.476510Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:27:04.527800Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:27:04.680942Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:27:04.778839Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011013869052071:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:04.778983Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:04.779440Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011013869052076:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:04.783395Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:27:04.807535Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488011013869052078:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:27:04.866630Z node 2 :TX_PROXY ERROR: Actor# [2:7488011013869052134:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:06.128314Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:27:06.186173Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:27:06.268942Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:27:06.333347Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:27:06.394935Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:27:06.457150Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::StreamLookupForDataQuery-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 19336, MsgBus: 14215 2025-03-31T16:26:14.770825Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010802889271315:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:14.774710Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000aac/r3tmp/tmpGnGhAt/pdisk_1.dat 2025-03-31T16:26:15.253499Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:15.287020Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected TServer::EnableGrpc on GrpcPort 19336, node 1 2025-03-31T16:26:15.287299Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:15.307645Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:26:15.390649Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:15.390671Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:15.390683Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:15.390779Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14215 TClient is connected to server localhost:14215 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:16.047559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:18.060300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010820069141154:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:18.060415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:18.485548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:26:18.631942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-03-31T16:26:18.722140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-03-31T16:26:18.778792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715762:2, at schemeshard: 72057594046644480 2025-03-31T16:26:18.873305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715763:0, at schemeshard: 72057594046644480 2025-03-31T16:26:18.950899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:26:19.008076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715766:2, at schemeshard: 72057594046644480 2025-03-31T16:26:19.061253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715767:0, at schemeshard: 72057594046644480 2025-03-31T16:26:19.105527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715770:2, at schemeshard: 72057594046644480 2025-03-31T16:26:19.156900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715771:0, at schemeshard: 72057594046644480 2025-03-31T16:26:19.196866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715774:2, at schemeshard: 72057594046644480 2025-03-31T16:26:19.240900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715775:0, at schemeshard: 72057594046644480 2025-03-31T16:26:19.264785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:19.300983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715778:2, at schemeshard: 72057594046644480 2025-03-31T16:26:19.380927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715779:0, at schemeshard: 72057594046644480 2025-03-31T16:26:19.414226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715782:2, at schemeshard: 72057594046644480 2025-03-31T16:26:19.503714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715783:0, at schemeshard: 72057594046644480 2025-03-31T16:26:19.567511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010824364109806:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:19.567606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:19.772386Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010802889271315:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:19.772477Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:19.805960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010824364110112:2449], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:19.806003Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:19.806243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010824364110117:2452], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:19.809942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-03-31T16:26:19.826685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010824364110119:2453], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-03-31T16:26:19.911102Z node 1 :TX_PROXY ERROR: Actor# [1:7488010824364110174:3529] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 23], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } [] Trying to start YDB, gRPC: 12018, MsgBus: 8539 2025-03-31T16:26:24.840189Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010842377504521:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:24.840214Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000aac/r3tmp/tmpyXsbWT/pdisk_1.dat 2025-03-31T16:26:25.134061Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:25.196970Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:25.197093Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:25.200481Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12018, node 2 2025-03-31T16:26:25.282666Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:25.282697Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:25.282704Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:25.282840Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8539 TClient is connected to server localhost:8539 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient ... ne=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7488010941481320124:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:52.084416Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:52.372848Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488010962956158376:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:52.372978Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:52.506385Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:52.601946Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:52.664934Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:52.726684Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:52.803679Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:52.898429Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:52.975552Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488010962956158892:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:52.975694Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:52.976044Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488010962956158897:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:52.981412Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:52.998666Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7488010962956158899:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:26:53.092774Z node 5 :TX_PROXY ERROR: Actor# [5:7488010967251126251:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 22611, MsgBus: 25119 2025-03-31T16:26:57.617036Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488010986089513597:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:57.617145Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000aac/r3tmp/tmpIhp2M4/pdisk_1.dat 2025-03-31T16:26:57.957300Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:57.969718Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:57.971226Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:57.976680Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22611, node 6 2025-03-31T16:26:58.070638Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:58.070666Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:58.070675Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:58.070841Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25119 TClient is connected to server localhost:25119 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:58.864959Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:58.907133Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:59.090523Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:59.401353Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:59.511493Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:02.623848Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488010986089513597:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:02.623936Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:03.768523Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488011011859319149:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:03.768649Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:03.807062Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:27:03.857554Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:27:03.918102Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:27:03.979033Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:27:04.130811Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:27:04.227607Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:27:04.355562Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488011016154286979:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:04.355714Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:04.355895Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488011016154286984:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:04.361520Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:27:04.386862Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488011016154286987:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:27:04.488054Z node 6 :TX_PROXY ERROR: Actor# [6:7488011016154287047:3463] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpJoin::JoinLeftPureInner ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSqlIn::TupleNotOnlyOfKeys [GOOD] Test command err: Trying to start YDB, gRPC: 2303, MsgBus: 26648 2025-03-31T16:25:58.420052Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010733901058480:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:58.420104Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ac1/r3tmp/tmpnAWqq0/pdisk_1.dat 2025-03-31T16:25:58.937865Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:58.937999Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:58.939497Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:58.943314Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2303, node 1 2025-03-31T16:25:59.026479Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:59.026516Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:59.026525Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:59.026685Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26648 TClient is connected to server localhost:26648 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:59.681649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:59.755302Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:25:59.782171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:59.932346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:00.125835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:00.219424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:02.149043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010751080929452:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:02.149172Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:02.520474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:02.621141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:02.665903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:02.745578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:02.789128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:02.845597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:02.946713Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010751080929977:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:02.946799Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:02.947027Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010751080929982:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:02.951959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:02.970642Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010751080929984:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:26:03.064324Z node 1 :TX_PROXY ERROR: Actor# [1:7488010755375897335:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:03.420528Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010733901058480:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:03.420610Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:04.143260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:26:04.194040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:26:04.251598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:4:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:26: Warning: At function: Filter, At function: Coalesce
:5:49: Warning: At function: SqlIn
:5:49: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Execution, code: 1060
:4:21: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001
: Warning: Type annotation, code: 1030
:4:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:26: Warning: At function: Filter, At function: Coalesce
:5:49: Warning: At function: SqlIn
:5:49: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Execution, code: 1060
:4:21: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 Trying to start YDB, gRPC: 19852, MsgBus: 61988 2025-03-31T16:26:08.698333Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010773573263709:2128];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:08.698411Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ac1/r3tmp/tmpk1D3cc/pdisk_1.dat 2025-03-31T16:26:08.978622Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:08.982370Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:08.982461Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:08.985884Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19852, node 2 2025-03-31T16:26:09.107741Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:09.107766Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:09.107776Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:09.107930Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61988 TClient is connected to server localhost:61988 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFi ... 16:26:47.928487Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:47.964866Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7488010944788348465:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:26:48.049133Z node 5 :TX_PROXY ERROR: Actor# [5:7488010949083315816:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:49.701753Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:26:49.791006Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:26:49.988193Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:5:17: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:22: Warning: At function: Filter, At function: Coalesce
:7:31: Warning: At function: SqlIn
:7:31: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Execution, code: 1060
:5:17: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 Trying to start YDB, gRPC: 18120, MsgBus: 32568 2025-03-31T16:26:55.463620Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488010978846515257:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:55.463675Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ac1/r3tmp/tmpoHfBva/pdisk_1.dat 2025-03-31T16:26:55.827263Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:55.848756Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:55.848871Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:55.852562Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18120, node 6 2025-03-31T16:26:56.068181Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:56.068209Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:56.068221Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:56.068394Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32568 TClient is connected to server localhost:32568 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:56.888007Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:56.907849Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:56.918309Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:57.084956Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:57.362157Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:57.479538Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:00.471234Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488010978846515257:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:00.471341Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:01.052908Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488011000321353518:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:01.053055Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:01.086185Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:27:01.173443Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:27:01.270101Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:27:01.327615Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:27:01.421865Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:27:01.517824Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:27:01.631382Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488011004616321342:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:01.631541Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:01.631608Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488011004616321347:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:01.636144Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:27:01.652593Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488011004616321349:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:27:01.745718Z node 6 :TX_PROXY ERROR: Actor# [6:7488011004616321404:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:03.197382Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:27:03.278338Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:27:03.373022Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:26: Warning: At function: Filter, At function: Coalesce
:7:37: Warning: At function: SqlIn
:7:37: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:26: Warning: At function: Filter, At function: Coalesce
:7:37: Warning: At function: SqlIn
:7:37: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 >> KqpNewEngine::AutoChooseIndex [GOOD] >> KqpNewEngine::AutoChooseIndexOrderByLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::DatetimeConstantFold-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 21206, MsgBus: 3205 2025-03-31T16:26:22.599312Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010835881140083:2153];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:22.599420Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001945/r3tmp/tmpyMtSXL/pdisk_1.dat 2025-03-31T16:26:23.319847Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:23.357088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:23.357174Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:23.372350Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21206, node 1 2025-03-31T16:26:23.588986Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:23.589006Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:23.589018Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:23.589132Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3205 TClient is connected to server localhost:3205 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:24.365560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:24.383334Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:26.722240Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010853061009852:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:26.722354Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:26.727190Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010853061009860:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:26.731719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:26:26.751321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010853061009866:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:26:26.811695Z node 1 :TX_PROXY ERROR: Actor# [1:7488010853061009917:2342] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:27.203761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:26:27.390160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:26:27.457834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:27.525431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:27.539602Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010835881140083:2153];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:27.539663Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:27.576414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:27.778693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:27.832513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:27.874061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:27.928831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T16:26:27.966222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-31T16:26:28.014454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-31T16:26:28.097281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:26:28.156087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:26:28.892708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-31T16:26:28.939167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:26:28.973320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:26:29.009783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:26:29.079723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T16:26:29.154906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-31T16:26:29.227850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T16:26:29.259099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-31T16:26:29.299411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-31T16:26:29.332720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-31T16:26:29.370161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-31T16:26:29.407457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-31T16:26:29.443385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-31T16:26:29.474821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-31T16:26:29.542680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-31T16:26:29.579059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-31T16:26:29.617326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:03.973416Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038588;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:03.978785Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038432;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:03.984656Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038616;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:03.989293Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038494;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:03.989951Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:04.000040Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038590;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:04.003639Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038592;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:04.009559Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038500;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:04.012864Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038598;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:04.015345Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038530;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:04.021929Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038572;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:04.023947Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038568;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:04.032056Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038562;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:04.032105Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038518;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:04.038130Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038536;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:04.048897Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038596;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:04.048896Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038448;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:04.055385Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038564;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:04.060384Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038446;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:04.067190Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038550;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:04.072183Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038556;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:04.075800Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038470;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:04.077918Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038474;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:04.085069Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038618;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:04.089981Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038538;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:04.091854Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038532;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:04.098086Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038576;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:04.101393Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038582;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:04.103940Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038574;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:04.106937Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038498;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:04.109269Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038602;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:04.111901Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038606;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:04.115259Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038534;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:04.116838Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038608;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:04.120850Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038548;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:04.130720Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038594;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:04.133171Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038600;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:04.139716Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038610;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:04.145578Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038466;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:04.151411Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038570;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:04.152882Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038580;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:04.158091Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038528;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:04.163772Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038604;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:04.172063Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038614;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:04.172561Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038612;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:04.343419Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgd9xe799n7vy4j9fyvcjt", SessionId: ydb://session/3?node_id=1&id=N2JiODEzY2QtNDcyZTQ0ZTYtNWQ1NjZmMDUtYTBkOWQzYjQ=, Slow query, duration: 33.128759s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:27:04.681610Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:27:04.682068Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:27:04.682697Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;self_id=[1:7488010934665408184:4969];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038331;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038170;receive=72075186224038629; 2025-03-31T16:27:04.683043Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpJoin::FullOuterJoin >> OlapEstimationRowsCorrectness::TPCH2 >> DataShardWrite::UpsertPreparedManyTables+Volatile [GOOD] >> DataShardWrite::UpsertPreparedManyTables-Volatile >> KqpJoinOrder::ShuffleEliminationManyKeysJoinPredicate >> KqpJoin::ExclusionJoin >> TopicService::UseDoubleSlashInTopicPath [GOOD] >> KqpIndexLookupJoin::Inner-StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinOnlyRightColumn+StreamLookup [GOOD] >> TPersQueueTest::StreamReadCommitAndStatusMsgs [GOOD] >> TPersQueueTest::StreamReadManyUpdateTokenAndRead >> KqpIndexLookupJoin::SimpleInnerJoin-StreamLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::Inner-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 6337, MsgBus: 19208 2025-03-31T16:26:58.349715Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010988934913099:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:58.377753Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001924/r3tmp/tmpTw84bJ/pdisk_1.dat 2025-03-31T16:26:58.943806Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:58.947279Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:58.947362Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:58.949722Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6337, node 1 2025-03-31T16:26:59.075527Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:59.075553Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:59.075560Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:59.075659Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19208 TClient is connected to server localhost:19208 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:59.769074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:59.801317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:00.015841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:00.203798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:00.283185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:02.133912Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011006114784039:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:02.134055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:02.492643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:27:02.534978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:27:02.578637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:27:02.614940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:27:02.654220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:27:02.722849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:27:02.819282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011006114784560:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:02.819391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:02.819709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011006114784565:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:02.823984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:27:02.842971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011006114784567:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:27:02.914844Z node 1 :TX_PROXY ERROR: Actor# [1:7488011006114784621:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:03.351239Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010988934913099:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:03.351306Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:04.365855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:27:04.446449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:27:04.482600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:27:04.521685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:27:04.563740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:27:04.615286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 15880, MsgBus: 24797 2025-03-31T16:27:06.903511Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488011023290730258:2178];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001924/r3tmp/tmp79RwlV/pdisk_1.dat 2025-03-31T16:27:06.983018Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:27:07.106684Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:27:07.144549Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:07.144626Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:07.152205Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15880, node 2 2025-03-31T16:27:07.327690Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:07.327714Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:07.327721Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:07.327844Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24797 TClient is connected to server localhost:24797 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:08.075224Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:08.083777Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:27:08.092183Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:08.211944Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:08.402009Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:08.482250Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:10.852298Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011040470601065:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:10.852400Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:10.911552Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:27:10.972063Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:27:11.006831Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:27:11.096000Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:27:11.161878Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:27:11.263337Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:27:11.366374Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011044765568882:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:11.366518Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:11.367047Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011044765568887:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:11.378334Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:27:11.396121Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488011044765568889:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:27:11.482180Z node 2 :TX_PROXY ERROR: Actor# [2:7488011044765568944:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:11.893309Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488011023290730258:2178];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:11.893372Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:12.817912Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:27:12.878362Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:27:12.960063Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:27:13.041859Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-03-31T16:27:13.098342Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-03-31T16:27:13.152273Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 >> DemoTx::Scenario_5 [GOOD] >> TopicService::RelativePath ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftJoinOnlyRightColumn+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 13446, MsgBus: 6489 2025-03-31T16:27:07.207424Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011029522073291:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:07.257266Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00191a/r3tmp/tmp6vdvcN/pdisk_1.dat 2025-03-31T16:27:07.857663Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:07.857734Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:07.863995Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:27:07.923629Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13446, node 1 2025-03-31T16:27:08.107528Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:08.107546Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:08.107552Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:08.107646Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6489 TClient is connected to server localhost:6489 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:08.885519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:08.924135Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:27:08.935123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:09.104411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:09.338177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:09.439071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:11.469699Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011046701944233:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:11.469905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:11.928343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:27:11.968307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:27:12.042588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:27:12.089991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:27:12.128859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:27:12.200259Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011029522073291:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:12.200304Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:12.213620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:27:12.323237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011050996912048:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:12.323349Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:12.323725Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011050996912053:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:12.332411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:27:12.354704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011050996912055:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:27:12.429921Z node 1 :TX_PROXY ERROR: Actor# [1:7488011050996912110:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:13.893931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:27:14.018291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:27:14.089138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:27:14.175625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:27:14.268671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:27:14.334752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::SimpleInnerJoin-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 9469, MsgBus: 28534 2025-03-31T16:27:08.355654Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011033900809895:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:08.355708Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001918/r3tmp/tmpHuR9hT/pdisk_1.dat 2025-03-31T16:27:09.096644Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:27:09.097971Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:09.098043Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:09.101954Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9469, node 1 2025-03-31T16:27:09.317144Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:09.317163Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:09.317169Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:09.317256Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28534 TClient is connected to server localhost:28534 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:10.208120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:10.235722Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:27:10.249413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:10.433439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:10.659393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:10.745077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:12.659959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011051080680851:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:12.660074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:12.987571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:27:13.033644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:27:13.101928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:27:13.175349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:27:13.226230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:27:13.307195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:27:13.357797Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011033900809895:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:13.357878Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:13.408952Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011055375648669:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:13.409075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:13.409432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011055375648674:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:13.413532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:27:13.430542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011055375648676:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:27:13.511535Z node 1 :TX_PROXY ERROR: Actor# [1:7488011055375648730:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:14.778044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:27:14.824417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:27:14.869369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:27:14.925032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:27:14.978349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:27:15.067866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> DataShardWrite::UpsertPreparedManyTables-Volatile [GOOD] >> DataShardWrite::UpsertPreparedNoTxCache+Volatile >> KqpJoinOrder::TPCDS61-ColumnStore >> TFstClassSrcIdPQTest::TestTableCreated >> KqpSqlIn::SecondaryIndex_ComplexKey_In_And_In [GOOD] >> KqpSqlIn::PhasesCount >> KqpReturning::ReturningSerial [GOOD] >> KqpReturning::ReturningColumnsOrder >> KqpJoin::AllowJoinsForComplexPredicates+StreamLookup [GOOD] >> KqpJoin::JoinLeftPureInner [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH9 >> KqpIndexLookupJoin::LeftOnlyJoinValueColumn-StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::AllowJoinsForComplexPredicates+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 12216, MsgBus: 7960 2025-03-31T16:27:07.023603Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011023462544805:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:07.023643Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00191d/r3tmp/tmpMxX2hy/pdisk_1.dat 2025-03-31T16:27:07.744853Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:07.744947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:07.747328Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:27:07.763825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12216, node 1 2025-03-31T16:27:08.079688Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:08.079714Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:08.079724Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:08.079839Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7960 TClient is connected to server localhost:7960 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:09.113947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:09.163943Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:27:09.179368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:09.402968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:09.599005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:09.695311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:11.975951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011044937383042:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:11.976048Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:12.028849Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011023462544805:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:12.028933Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:12.462537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:27:12.505461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:27:12.557258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:27:12.590912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:27:12.628142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:27:12.697242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:27:12.791426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011049232350854:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:12.791550Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:12.795121Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011049232350860:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:12.800345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:27:12.816492Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011049232350862:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:27:12.882980Z node 1 :TX_PROXY ERROR: Actor# [1:7488011049232350916:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinLeftPureInner [GOOD] Test command err: Trying to start YDB, gRPC: 10530, MsgBus: 12882 2025-03-31T16:27:11.875615Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011047384104456:2139];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:11.875959Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001915/r3tmp/tmpOMqsOW/pdisk_1.dat 2025-03-31T16:27:12.651513Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:27:12.685960Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:12.686046Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:12.688437Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10530, node 1 2025-03-31T16:27:12.966954Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:12.966972Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:12.966979Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:12.967104Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12882 TClient is connected to server localhost:12882 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:13.899329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:13.936068Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:27:13.969558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:14.145521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:14.402379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:14.494544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:16.585916Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011068858942622:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:16.586038Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:16.839429Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011047384104456:2139];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:16.839487Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:16.960124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:27:17.011818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:27:17.058554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:27:17.101586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:27:17.148646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:27:17.214307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:27:17.317621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011073153910434:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:17.317689Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:17.318154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011073153910439:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:17.322314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:27:17.348559Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011073153910441:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:27:17.446463Z node 1 :TX_PROXY ERROR: Actor# [1:7488011073153910497:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpJoin::FullOuterJoin [GOOD] >> TPersQueueTest::CacheHead [GOOD] >> TPersQueueTest::CheckACLForGrpcWrite >> KqpReturning::ReturningWorksIndexedDeleteV2+QueryService [GOOD] >> KqpReturning::ReturningTypes >> KqpJoin::ExclusionJoin [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::FullOuterJoin [GOOD] Test command err: Trying to start YDB, gRPC: 1716, MsgBus: 12206 2025-03-31T16:27:13.042048Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011055307621852:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:13.042534Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001914/r3tmp/tmpzAgnTH/pdisk_1.dat 2025-03-31T16:27:13.904814Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:13.904896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:13.905047Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:27:13.931670Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1716, node 1 2025-03-31T16:27:14.219561Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:14.219580Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:14.219585Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:14.219678Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12206 TClient is connected to server localhost:12206 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:15.128729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:15.156244Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:27:15.177880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:15.386506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:15.632935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:15.750316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:18.011250Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011055307621852:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:18.011318Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:18.325156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011076782459993:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:18.325262Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:18.712403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:27:18.767690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:27:18.844690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:27:18.888845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:27:18.936077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:27:18.988536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:27:19.086189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011081077427808:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:19.086252Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:19.086429Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011081077427813:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:19.090533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:27:19.106244Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011081077427815:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:27:19.168726Z node 1 :TX_PROXY ERROR: Actor# [1:7488011081077427870:3461] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:20.503311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:27:20.551315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:27:20.608808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> KqpFlipJoin::RightSemi_2 >> KqpNewEngine::AutoChooseIndexOrderByLimit [GOOD] >> KqpNewEngine::AutoChooseIndexOrderByLambda >> KqpJoinOrder::TestJoinHint1-ColumnStore >> DataShardWrite::UpsertPreparedNoTxCache+Volatile [GOOD] >> DataShardWrite::UpsertPreparedNoTxCache-Volatile ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::ExclusionJoin [GOOD] Test command err: Trying to start YDB, gRPC: 26068, MsgBus: 64222 2025-03-31T16:27:15.311750Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011064275434872:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:15.312658Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00190f/r3tmp/tmp1sd4hp/pdisk_1.dat 2025-03-31T16:27:16.066828Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:27:16.068100Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:16.068190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:16.084173Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26068, node 1 2025-03-31T16:27:16.319734Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:16.319753Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:16.319778Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:16.324975Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64222 TClient is connected to server localhost:64222 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:17.400190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:17.456281Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:27:17.473722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:17.669853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:17.870684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:17.957875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:19.927399Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011081455305824:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:19.927488Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:20.277663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:27:20.315202Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011064275434872:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:20.315277Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:20.334983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:27:20.418516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:27:20.500284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:27:20.583161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:27:20.676347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:27:20.746906Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011085750273648:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:20.746990Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:20.747333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011085750273653:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:20.751578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:27:20.766131Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011085750273655:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:27:20.830867Z node 1 :TX_PROXY ERROR: Actor# [1:7488011085750273707:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:22.372474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:27:22.456565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:27:22.511650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::TPCDS23-ColumnStore >> KqpIndexLookupJoin::SimpleLeftOnlyJoin-StreamLookup >> KqpReturning::ReturningColumnsOrder [GOOD] >> KqpReturning::Random >> KqpIndexLookupJoin::LeftOnlyJoinValueColumn-StreamLookup [GOOD] >> KqpJoinOrder::TPCDS16-ColumnStore [GOOD] >> DataShardWrite::UpsertPreparedNoTxCache-Volatile [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftOnlyJoinValueColumn-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 7726, MsgBus: 5252 2025-03-31T16:27:20.911886Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011085187774073:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:20.912298Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00190a/r3tmp/tmpL4qJxm/pdisk_1.dat 2025-03-31T16:27:21.701536Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:27:21.711963Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:21.712076Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:21.717449Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7726, node 1 2025-03-31T16:27:21.995808Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:21.995828Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:21.995836Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:21.995952Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5252 TClient is connected to server localhost:5252 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:23.053485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:23.068680Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:27:23.082589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:23.283408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:23.505681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:27:23.586113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:27:25.624072Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011106662612191:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:25.624177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:25.895827Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011085187774073:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:25.895878Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:25.940845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:27:26.009263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:27:26.102209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:27:26.140857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:27:26.178310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:27:26.231631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:27:26.330918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011110957580009:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:26.330995Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:26.331784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011110957580014:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:26.336413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:27:26.359369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011110957580016:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:27:26.443027Z node 1 :TX_PROXY ERROR: Actor# [1:7488011110957580073:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:27.740118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:27:27.782533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:27:27.832119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:27:27.883768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:27:27.917826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:27:27.967964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> KqpSqlIn::PhasesCount [GOOD] >> TopicService::RelativePath [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::UpsertPreparedNoTxCache-Volatile [GOOD] Test command err: 2025-03-31T16:26:59.603683Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:26:59.604052Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:26:59.604262Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011f8/r3tmp/tmpFf0X73/pdisk_1.dat 2025-03-31T16:27:00.676837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:27:00.783073Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:27:00.857100Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:00.857291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:00.878678Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:27:01.038437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:27:01.164856Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:27:01.166249Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:27:01.166998Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:27:01.167326Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:27:01.284509Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:27:01.285324Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:27:01.285472Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:27:01.291898Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:27:01.292033Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:27:01.292136Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:27:01.295731Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:27:01.295966Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:27:01.296116Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:27:01.309734Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:27:01.392536Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:27:01.399313Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:27:01.399596Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:27:01.399672Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:27:01.399716Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:27:01.399758Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:27:01.400000Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:27:01.407222Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:27:01.411772Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:27:01.411933Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:27:01.412129Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:27:01.412188Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:27:01.412252Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T16:27:01.412300Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:27:01.412339Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:27:01.412377Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:27:01.412433Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:27:01.412575Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:27:01.412642Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:27:01.412783Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:27:01.412920Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-31T16:27:01.412966Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:27:01.413106Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:27:01.413497Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T16:27:01.413583Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:27:01.413711Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:27:01.413797Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:27:01.413851Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T16:27:01.413914Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T16:27:01.413952Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:27:01.414394Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T16:27:01.414444Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T16:27:01.414485Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T16:27:01.414554Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:27:01.414657Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T16:27:01.414706Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T16:27:01.414741Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T16:27:01.414774Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:27:01.414800Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T16:27:01.421355Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T16:27:01.421435Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:27:01.435863Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:27:01.435962Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:27:01.436005Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:27:01.436075Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-31T16:27:01.436162Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:27:01.629486Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:27:01.629557Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:27:01.629597Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T16:27:01.630030Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-31T16:27:01.630090Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:27:01.630242Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:27:01.630292Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-31T16:27:01.630338Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-31T16:27:01.630375Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-31T16:27:01.635009Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:27:01.639193Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:27:01.641912Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:27:01.641981Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:27:01.642059Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:27:0 ... 2985Z node 6 :TX_DATASHARD TRACE: Table /Root/table-1, shard: 72075186224037888, write point (Uint32 : 4) 2025-03-31T16:27:30.693017Z node 6 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint32 : 4) table: [72057594046644480:2:1] 2025-03-31T16:27:30.693089Z node 6 :TX_DATASHARD DEBUG: LoadWriteDetails at 72075186224037888 loaded writeOp from db 1500:100 keys extracted: 3 2025-03-31T16:27:30.693142Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-03-31T16:27:30.693175Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit LoadWriteDetails 2025-03-31T16:27:30.693209Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-31T16:27:30.693243Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-31T16:27:30.693359Z node 6 :TX_DATASHARD TRACE: Operation [1500:100] is the new logically complete end at 72075186224037888 2025-03-31T16:27:30.693410Z node 6 :TX_DATASHARD TRACE: Operation [1500:100] is the new logically incomplete end at 72075186224037888 2025-03-31T16:27:30.693462Z node 6 :TX_DATASHARD TRACE: Activated operation [1500:100] at 72075186224037888 2025-03-31T16:27:30.693542Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-03-31T16:27:30.693578Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-31T16:27:30.693607Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit BuildWriteOutRS 2025-03-31T16:27:30.693640Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit BuildWriteOutRS 2025-03-31T16:27:30.693695Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-03-31T16:27:30.693722Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit BuildWriteOutRS 2025-03-31T16:27:30.693746Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit StoreAndSendWriteOutRS 2025-03-31T16:27:30.693772Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit StoreAndSendWriteOutRS 2025-03-31T16:27:30.693817Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-03-31T16:27:30.693845Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit StoreAndSendWriteOutRS 2025-03-31T16:27:30.693872Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit PrepareWriteTxInRS 2025-03-31T16:27:30.693899Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit PrepareWriteTxInRS 2025-03-31T16:27:30.693926Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-03-31T16:27:30.693949Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit PrepareWriteTxInRS 2025-03-31T16:27:30.693971Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit LoadAndWaitInRS 2025-03-31T16:27:30.694028Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit LoadAndWaitInRS 2025-03-31T16:27:30.699014Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-03-31T16:27:30.699134Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit LoadAndWaitInRS 2025-03-31T16:27:30.699171Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit ExecuteWrite 2025-03-31T16:27:30.699206Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit ExecuteWrite 2025-03-31T16:27:30.699248Z node 6 :TX_DATASHARD DEBUG: Executing write operation for [1500:100] at 72075186224037888 2025-03-31T16:27:30.699437Z node 6 :TX_DATASHARD DEBUG: Executed write operation for [1500:100] at 72075186224037888, row count=3 2025-03-31T16:27:30.699497Z node 6 :TX_DATASHARD TRACE: add locks to result: 0 2025-03-31T16:27:30.699573Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:27:30.699610Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit ExecuteWrite 2025-03-31T16:27:30.699651Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit CompleteWrite 2025-03-31T16:27:30.699692Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit CompleteWrite 2025-03-31T16:27:30.699929Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is DelayComplete 2025-03-31T16:27:30.699962Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit CompleteWrite 2025-03-31T16:27:30.700023Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit CompletedOperations 2025-03-31T16:27:30.700076Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit CompletedOperations 2025-03-31T16:27:30.700118Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-03-31T16:27:30.700159Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit CompletedOperations 2025-03-31T16:27:30.700195Z node 6 :TX_DATASHARD TRACE: Execution plan for [1500:100] at 72075186224037888 has finished 2025-03-31T16:27:30.700244Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:27:30.700289Z node 6 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-03-31T16:27:30.700335Z node 6 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:27:30.700375Z node 6 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:27:30.711920Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-03-31T16:27:30.712037Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:27:30.712088Z node 6 :TX_DATASHARD TRACE: Complete execution for [1500:100] at 72075186224037888 on unit CompleteWrite 2025-03-31T16:27:30.712219Z node 6 :TX_DATASHARD DEBUG: Complete write [1500 : 100] from 72075186224037888 at tablet 72075186224037888 send result to client [6:593:2518] 2025-03-31T16:27:30.712285Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:27:30.713638Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:767:2637], Recipient [6:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:27:30.713724Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:27:30.713801Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [6:766:2636], serverId# [6:767:2637], sessionId# [0:0:0] 2025-03-31T16:27:30.713979Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [6:765:2635], Recipient [6:666:2570]: NKikimrTxDataShard.TEvGetInfoRequest 2025-03-31T16:27:30.715413Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:770:2640], Recipient [6:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:27:30.715484Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:27:30.715544Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [6:769:2639], serverId# [6:770:2640], sessionId# [0:0:0] 2025-03-31T16:27:30.715807Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [6:768:2638], Recipient [6:666:2570]: NKikimrTxDataShard.TEvRead ReadId: 1000 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-03-31T16:27:30.715923Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-31T16:27:30.715978Z node 6 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1500/100 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-03-31T16:27:30.716033Z node 6 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v1500/18446744073709551615 2025-03-31T16:27:30.716105Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2025-03-31T16:27:30.716209Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-03-31T16:27:30.716264Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2025-03-31T16:27:30.716313Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-31T16:27:30.716351Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-31T16:27:30.716423Z node 6 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037888 2025-03-31T16:27:30.716470Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-03-31T16:27:30.716494Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-31T16:27:30.716517Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2025-03-31T16:27:30.716541Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2025-03-31T16:27:30.716652Z node 6 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1000 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-03-31T16:27:30.716904Z node 6 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[6:768:2638], 1000} after executionsCount# 1 2025-03-31T16:27:30.716970Z node 6 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[6:768:2638], 1000} sends rowCount# 3, bytes# 96, quota rows left# 18446744073709551612, quota bytes left# 18446744073709551519, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-31T16:27:30.717053Z node 6 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[6:768:2638], 1000} finished in read 2025-03-31T16:27:30.717125Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-03-31T16:27:30.717151Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2025-03-31T16:27:30.717174Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2025-03-31T16:27:30.717213Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2025-03-31T16:27:30.717259Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-03-31T16:27:30.717295Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2025-03-31T16:27:30.717326Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037888 has finished 2025-03-31T16:27:30.717369Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-31T16:27:30.717474Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS16-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 5729, MsgBus: 7265 2025-03-31T16:26:21.301799Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010832402569418:2267];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:21.301835Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001948/r3tmp/tmp4fsq34/pdisk_1.dat 2025-03-31T16:26:21.886413Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:21.915868Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:21.915966Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:21.921092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5729, node 1 2025-03-31T16:26:22.067565Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:22.067607Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:22.067621Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:22.067724Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7265 TClient is connected to server localhost:7265 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:22.974786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:23.016376Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:25.141097Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010849582439056:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:25.141103Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010849582439068:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:25.141208Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:25.145112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:26:25.158163Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010849582439070:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:26:25.231437Z node 1 :TX_PROXY ERROR: Actor# [1:7488010849582439121:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:25.695142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:26:25.891456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:26:25.986587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:26.038229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:26.070099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:26.282112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:26.307808Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010832402569418:2267];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:26.307923Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:26.358464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:26.408208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:26.445065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T16:26:26.494990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-31T16:26:26.537446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-31T16:26:26.585304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:26:26.687481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:26:27.435975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-31T16:26:27.473455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:26:27.528474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:26:27.580757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:26:27.637857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T16:26:27.699687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-31T16:26:27.791850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T16:26:27.843011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-31T16:26:27.917440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-31T16:26:27.976983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-31T16:26:28.030404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-31T16:26:28.105626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-31T16:26:28.146930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-31T16:26:28.194880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-31T16:26:28.247129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-31T16:26:28.279548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-31T16:26:28.347457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, ... p:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:01.745289Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038545;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:01.745690Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:01.751479Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:01.751623Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038567;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:01.757901Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:01.757903Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:01.767728Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:01.768249Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:01.773683Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:01.773897Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:01.780060Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:01.780060Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:01.786134Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:01.787111Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:01.793175Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:01.799462Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:01.802390Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:01.805806Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:01.808386Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:01.812097Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038585;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:01.814699Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:01.817853Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038431;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:01.820623Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:01.829670Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:01.840855Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:01.845325Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:01.846466Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:01.851641Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:01.853191Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:01.859860Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:01.862725Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:01.865676Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:01.871628Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:01.872662Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:01.878756Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:01.879005Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:01.884938Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:01.885296Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:02.011343Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgd8k698p0sprb0f5amzkz", SessionId: ydb://session/3?node_id=1&id=ZGIxMjMwODUtNjNjOTAyOGItYzc3ZjA4NWQtNjg4MDAwZTk=, Slow query, duration: 32.147764s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:27:02.535267Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:27:02.535920Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:27:02.536473Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;self_id=[1:7488010879647217400:3022];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038170;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038629;receive=72075186224038331; 2025-03-31T16:27:02.536895Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:27:25.912070Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgekjeckppttem4m4j8sfy", SessionId: ydb://session/3?node_id=1&id=ZGIxMjMwODUtNjNjOTAyOGItYzc3ZjA4NWQtNjg4MDAwZTk=, Slow query, duration: 12.040876s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "-- NB: Subquerys\n$orders_with_several_warehouses = (\n select cs_order_number\n from `/Root/test/ds/catalog_sales`\n group by cs_order_number\n having count(distinct cs_warehouse_sk) > 1\n);\n\n-- start query 1 in stream 0 using template query16.tpl and seed 171719422\nselect\n count(distinct cs1.cs_order_number) as `order count`\n ,sum(cs_ext_ship_cost) as `total shipping cost`\n ,sum(cs_net_profit) as `total net profit`\nfrom\n `/Root/test/ds/catalog_sales` cs1\n cross join `/Root/test/ds/date_dim`\n cross join `/Root/test/ds/customer_address`\n cross join `/Root/test/ds/call_center`\n left semi join $orders_with_several_warehouses cs2 on cs1.cs_order_number = cs2.cs_order_number\n left only join `/Root/test/ds/catalog_returns` cr1 on cs1.cs_order_number = cr1.cr_order_number\nwhere\n cast(d_date as date) between cast('1999-4-01' as date) and\n (cast('1999-4-01' as date) + DateTime::IntervalFromDays(60))\nand cs1.cs_ship_date_sk = d_date_sk\nand cs1.cs_ship_addr_sk = ca_address_sk\nand ca_state = 'IL'\nand cs1.cs_call_center_sk = cc_call_center_sk\nand cc_county in ('Richland County','Bronx County','Maverick County','Mesa County',\n 'Raleigh County'\n)\norder by `order count`\nlimit 100;\n", parameters: 0b ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSqlIn::PhasesCount [GOOD] Test command err: Trying to start YDB, gRPC: 2035, MsgBus: 16289 2025-03-31T16:26:18.274365Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010817705366602:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:18.274400Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000aaa/r3tmp/tmpqBA62P/pdisk_1.dat 2025-03-31T16:26:18.839031Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:18.839348Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:18.848579Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:26:18.852678Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2035, node 1 2025-03-31T16:26:19.055722Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:19.055755Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:19.055763Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:19.055869Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16289 TClient is connected to server localhost:16289 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:19.882561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:19.929857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:20.110306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:20.353733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:20.462619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:22.573203Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010834885237544:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:22.573315Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:22.900735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:22.941561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:22.979861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:23.052188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:23.092534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:23.139345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:23.240753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010839180205356:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:23.240832Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:23.241305Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010839180205361:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:23.244560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:23.256566Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010839180205363:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:26:23.275256Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010817705366602:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:23.275350Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:23.323725Z node 1 :TX_PROXY ERROR: Actor# [1:7488010839180205419:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:24.590783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:26:24.709754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:26:24.810834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:26:24.973888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:72: Warning: At function: Filter, At function: Coalesce
:5:84: Warning: At function: SqlIn
:5:84: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:72: Warning: At function: Filter, At function: Coalesce
:5:84: Warning: At function: SqlIn
:5:84: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 3170, MsgBus: 29716 2025-03-31T16:26:30.398614Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010868211552918:2126];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:30.398729Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000aaa/r3tmp/tmpETfD6m/pdisk_1.dat 2025-03-31T16:26:30.697739Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:30.711908Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:30.711994Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:30.714452Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3170, node 2 2025-03-31T16:26:30.904759Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:30.904797Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:30.904807Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:30.904947Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29716 TClient is connected to server localhost:29716 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ... urce pool default not found or you don't have access permissions } 2025-03-31T16:27:11.071125Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:27:11.130783Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:27:11.193319Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:27:11.272250Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:27:11.325580Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:27:11.418917Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:27:11.543593Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488011047185446712:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:11.543715Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:11.544202Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488011047185446717:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:11.549687Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:27:11.571508Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7488011047185446719:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:27:11.666869Z node 5 :TX_PROXY ERROR: Actor# [5:7488011047185446775:3454] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:13.181847Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:27:13.284609Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:27:13.402280Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 6031, MsgBus: 1817 2025-03-31T16:27:20.036825Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488011082442117424:2143];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:20.158268Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000aaa/r3tmp/tmpsiinFY/pdisk_1.dat 2025-03-31T16:27:20.419650Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:27:20.421654Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:20.421762Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:20.428399Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6031, node 6 2025-03-31T16:27:20.703796Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:20.703826Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:20.703839Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:20.704026Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1817 TClient is connected to server localhost:1817 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:22.156650Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:22.165278Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:27:22.190119Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:22.370582Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:22.747966Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:27:22.948041Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:27:25.042114Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488011082442117424:2143];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:25.042208Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:26.629012Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488011108211922896:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:26.629135Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:26.756082Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:27:26.847152Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:27:26.909267Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:27:26.997618Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:27:27.127851Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:27:27.233407Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:27:27.335282Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488011112506890717:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:27.335438Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:27.343313Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488011112506890722:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:27.351955Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:27:27.371920Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488011112506890724:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:27:27.442984Z node 6 :TX_PROXY ERROR: Actor# [6:7488011112506890780:3461] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpJoin::FullOuterJoin2 >> KqpJoinOrder::CanonizedJoinOrderTPCH14 [GOOD] >> KqpFlipJoin::RightSemi_2 [GOOD] >> KqpFlipJoin::RightSemi_3 >> KqpReturning::ReturningTypes [GOOD] >> TopicService::AccessRights >> KqpJoinOrder::CanonizedJoinOrderTPCH22 >> KqpJoinOrder::CanonizedJoinOrderLookupBug ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpReturning::ReturningTypes [GOOD] Test command err: Trying to start YDB, gRPC: 4861, MsgBus: 6657 2025-03-31T16:26:22.452294Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010837505579576:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:22.452334Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000aa6/r3tmp/tmpuMUQ7u/pdisk_1.dat 2025-03-31T16:26:23.321230Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:23.327602Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:23.327705Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:23.336320Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4861, node 1 2025-03-31T16:26:23.615761Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:23.615786Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:23.615793Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:23.615886Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6657 TClient is connected to server localhost:6657 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:24.678402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:24.707406Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:24.742050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:24.943166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:25.155021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:25.245350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:27.421963Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010858980417840:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:27.422100Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:27.455228Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010837505579576:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:27.455312Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:27.882117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:27.932368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:27.977433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:28.023288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:28.076803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:28.164227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:28.295413Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010863275385660:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:28.295496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:28.295839Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010863275385665:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:28.300057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:28.318966Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-31T16:26:28.319815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010863275385667:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:26:28.387629Z node 1 :TX_PROXY ERROR: Actor# [1:7488010863275385725:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:29.638219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:26:29.714139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:26:29.776654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 25787, MsgBus: 1936 2025-03-31T16:26:33.723899Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010881324679174:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:33.724036Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000aa6/r3tmp/tmpNoGguv/pdisk_1.dat 2025-03-31T16:26:33.959562Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:33.972358Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:33.972450Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:33.976729Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25787, node 2 2025-03-31T16:26:34.154189Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:34.154214Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:34.154223Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:34.154367Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1936 TClient is connected to server localhost:1936 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:34.629253Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:34.636271Z node 2 :F ... 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:15.299846Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:27:15.378951Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:27:15.437505Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:27:15.517281Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:27:15.582423Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:27:15.681046Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:27:15.840718Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488011062324764026:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:15.840882Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:15.841314Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488011062324764032:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:15.847067Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:27:15.870716Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7488011062324764034:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:27:15.954489Z node 5 :TX_PROXY ERROR: Actor# [5:7488011062324764087:3456] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:17.675819Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:27:17.797420Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:27:17.889854Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 15096, MsgBus: 11540 2025-03-31T16:27:23.659250Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488011097656726489:2193];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:23.773244Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000aa6/r3tmp/tmppEj8KR/pdisk_1.dat 2025-03-31T16:27:24.110610Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:27:24.113921Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:24.114034Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:24.117150Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15096, node 6 2025-03-31T16:27:24.338201Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:24.338244Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:24.338258Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:24.338477Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11540 TClient is connected to server localhost:11540 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:25.125466Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:25.147230Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:25.271247Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:25.533649Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:25.704775Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:28.655220Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488011097656726489:2193];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:28.655370Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:29.428568Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488011123426531877:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:29.428667Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:29.530164Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:27:29.610361Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:27:29.678940Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:27:29.781484Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:27:29.905512Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:27:30.012880Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:27:30.128843Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488011127721499700:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:30.128992Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:30.129129Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488011127721499705:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:30.133932Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:27:30.148353Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488011127721499707:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:27:30.231285Z node 6 :TX_PROXY ERROR: Actor# [6:7488011127721499765:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpIndexLookupJoin::LeftSemiJoinWithLeftFilter-StreamLookup >> TPersQueueTest::StreamReadManyUpdateTokenAndRead [GOOD] >> TPersQueueTest::SetupWriteSession >> KqpJoinOrder::CanonizedJoinOrderTPCH16 [GOOD] >> KqpNewEngine::AutoChooseIndexOrderByLambda [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH14 [GOOD] Test command err: Trying to start YDB, gRPC: 14251, MsgBus: 2462 2025-03-31T16:25:56.006170Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010723162719120:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:56.006252Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00196e/r3tmp/tmpCR7UGU/pdisk_1.dat 2025-03-31T16:25:56.468617Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:56.484526Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:56.484602Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:56.488176Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14251, node 1 2025-03-31T16:25:56.603639Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:56.603663Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:56.603670Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:56.603828Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2462 TClient is connected to server localhost:2462 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:57.167517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:57.187783Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:25:59.408152Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010736047621678:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:59.408278Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:59.408470Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010736047621690:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:59.413062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:25:59.429139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010736047621692:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:25:59.524415Z node 1 :TX_PROXY ERROR: Actor# [1:7488010736047621743:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:59.909164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:26:00.138209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010740342589308:2353];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:00.138475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010740342589308:2353];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:26:00.138776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010740342589308:2353];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:26:00.138900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010740342589308:2353];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:26:00.139005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010740342589308:2353];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:26:00.139383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010740342589308:2353];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:26:00.139531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010740342589308:2353];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:26:00.139670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010740342589308:2353];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:26:00.139793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010740342589308:2353];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:26:00.139894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010740342589308:2353];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:26:00.139998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010740342589308:2353];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:26:00.140100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010740342589308:2353];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:26:00.142382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488010740342589323:2360];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:00.142436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488010740342589323:2360];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:26:00.142646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488010740342589323:2360];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:26:00.142774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488010740342589323:2360];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:26:00.142879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488010740342589323:2360];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:26:00.142972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488010740342589323:2360];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:26:00.143061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488010740342589323:2360];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:26:00.143179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488010740342589323:2360];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:26:00.143276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488010740342589323:2360];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:26:00.143398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488010740342589323:2360];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:26:00.143519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488010740342589323:2360];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:26:00.143633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488010740342589323:2360];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:26:00.178253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7488010740342589310:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:00.178321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7488010740342589310:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstra ... tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.091197Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.095914Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.099037Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.106469Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.107230Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.112614Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.118042Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.119962Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.123577Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.124810Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.130649Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.131118Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.137523Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039235;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.138077Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.143192Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.143191Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.148794Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.152164Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.154757Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.157866Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.160322Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.163818Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.166013Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.169596Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.172709Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.175854Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.178660Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.181791Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.184831Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.187989Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.191350Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.193883Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.197332Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.199970Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.203506Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.205887Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.209634Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.211259Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.216288Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.216368Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039322;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.222024Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.222440Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.228992Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.231988Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.295672Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.331552Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039233;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.451403Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgdkxvd9fajpdv9t50gwd2", SessionId: ydb://session/3?node_id=1&id=YzgwOTMyZmItZjMzZjA0NmEtMzM1MTU1NmItNmE1ZjBlMGI=, Slow query, duration: 36.979782s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:27:18.730502Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:27:18.731194Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:27:18.733350Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::CanonizedJoinOrderTPCDS64_small >> KqpIndexLookupJoin::SimpleLeftOnlyJoin-StreamLookup [GOOD] >> TPersQueueTest::CheckACLForGrpcWrite [GOOD] >> TPersQueueTest::CheckACLForGrpcRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::SimpleLeftOnlyJoin-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 18784, MsgBus: 4832 2025-03-31T16:27:28.513018Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011120803803086:2128];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:28.513056Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001905/r3tmp/tmp9Vj144/pdisk_1.dat 2025-03-31T16:27:29.224097Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:27:29.227491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:29.227587Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:29.230700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18784, node 1 2025-03-31T16:27:29.559562Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:29.559585Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:29.559592Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:29.559707Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4832 TClient is connected to server localhost:4832 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:30.398646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:30.440720Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:27:30.465483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:30.832642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:31.039422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:31.150381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:33.488701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011142278641272:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:33.488803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:33.515206Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011120803803086:2128];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:33.515269Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:34.099528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:27:34.145992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:27:34.226660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:27:34.292036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:27:34.359762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:27:34.430096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:27:34.557310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011146573609093:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:34.557384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:34.557700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011146573609098:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:34.561639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:27:34.587275Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011146573609100:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:27:34.683432Z node 1 :TX_PROXY ERROR: Actor# [1:7488011146573609157:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:36.081906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:27:36.135426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:27:36.225825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:27:36.295985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:27:36.337130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:27:36.373801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::AutoChooseIndexOrderByLambda [GOOD] Test command err: Trying to start YDB, gRPC: 29965, MsgBus: 6929 2025-03-31T16:26:28.021922Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010860872382805:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:28.021984Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000aa0/r3tmp/tmpSpZqvq/pdisk_1.dat 2025-03-31T16:26:28.609801Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:28.644050Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:28.644523Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:28.648339Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29965, node 1 2025-03-31T16:26:28.843705Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:28.843728Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:28.843737Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:28.843851Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6929 TClient is connected to server localhost:6929 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:29.786465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:29.817806Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:29.831240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:30.067378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:30.272122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:30.384096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:32.030657Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010878052253772:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:32.030781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:32.447948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:32.492164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:32.532678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:32.610057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:32.652959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:32.736698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:32.809778Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010878052254289:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:32.809858Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:32.810225Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010878052254294:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:32.815457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:32.829877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010878052254296:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:26:32.915983Z node 1 :TX_PROXY ERROR: Actor# [1:7488010878052254352:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:33.023222Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010860872382805:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:33.023301Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:34.032016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:26:34.987895Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438395013, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 30721, MsgBus: 26697 2025-03-31T16:26:35.827688Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010893044869508:2082];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:35.828545Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000aa0/r3tmp/tmp3qL2Ay/pdisk_1.dat 2025-03-31T16:26:36.070880Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:36.081640Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:36.081745Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:36.088056Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30721, node 2 2025-03-31T16:26:36.259597Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:36.259863Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:36.259879Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:36.260010Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26697 TClient is connected to server localhost:26697 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:36.793575Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:36.813067Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:36.865135Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:36.925661Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operatio ... don't have access permissions } 2025-03-31T16:27:19.140782Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:19.237783Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:27:19.397290Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:27:19.458693Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:27:19.525443Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:27:19.574755Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:27:19.642089Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:27:19.772709Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488011079353361113:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:19.772811Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:19.773295Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488011079353361118:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:19.778963Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:27:19.805951Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488011079353361120:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:27:19.863222Z node 6 :TX_PROXY ERROR: Actor# [6:7488011079353361185:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:21.510593Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 10761, MsgBus: 62889 2025-03-31T16:27:24.860169Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488011103674952531:2083];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:24.860353Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000aa0/r3tmp/tmpJuI0oB/pdisk_1.dat 2025-03-31T16:27:25.330394Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:27:25.331627Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:25.331735Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:25.338981Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10761, node 7 2025-03-31T16:27:25.535761Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:25.535789Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:25.535800Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:25.535969Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62889 TClient is connected to server localhost:62889 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:26.557840Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:26.575426Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:27:26.594997Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:26.774874Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:27.188605Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:27.310115Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:29.867229Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7488011103674952531:2083];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:29.867343Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:31.428743Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488011133739725365:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:31.428847Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:31.516964Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:27:31.591419Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:27:31.752095Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:27:31.849446Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:27:31.936142Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:27:32.044668Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:27:32.181124Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488011138034693200:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:32.181279Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:32.183349Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488011138034693205:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:32.267193Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:27:32.292215Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7488011138034693207:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:27:32.376940Z node 7 :TX_PROXY ERROR: Actor# [7:7488011138034693274:3475] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:33.901952Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> KqpReturning::Random [GOOD] >> KqpJoin::RightSemiJoin_ComplexSecondaryIndex >> KqpJoinOrder::OltpJoinTypeHintCBOTurnOFF [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH16 [GOOD] Test command err: Trying to start YDB, gRPC: 28414, MsgBus: 25445 2025-03-31T16:25:55.570436Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010720343833361:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:55.572027Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00196f/r3tmp/tmpfRuLeW/pdisk_1.dat 2025-03-31T16:25:55.965434Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:55.983530Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:55.983611Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:55.985526Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28414, node 1 2025-03-31T16:25:56.058999Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:25:56.059019Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:25:56.059026Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:25:56.059265Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25445 TClient is connected to server localhost:25445 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:56.712564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:25:58.802403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010733228735914:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:58.802504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:58.802725Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010733228735926:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:58.806742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:25:58.819151Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T16:25:58.820118Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010733228735928:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:25:58.898967Z node 1 :TX_PROXY ERROR: Actor# [1:7488010733228735979:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:59.287957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:25:59.548868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488010737523703558:2359];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:25:59.549068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488010737523703558:2359];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:25:59.549352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488010737523703558:2359];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:25:59.549448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488010737523703558:2359];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:25:59.549517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488010737523703558:2359];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:25:59.549751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488010737523703558:2359];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:25:59.549883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488010737523703558:2359];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:25:59.550099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488010737523703558:2359];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:25:59.550228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488010737523703558:2359];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:25:59.550375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488010737523703558:2359];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:25:59.550481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488010737523703558:2359];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:25:59.550602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488010737523703558:2359];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:25:59.554279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010737523703548:2354];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:25:59.554349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010737523703548:2354];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:25:59.554566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010737523703548:2354];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:25:59.554686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010737523703548:2354];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:25:59.554824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010737523703548:2354];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:25:59.554947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010737523703548:2354];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:25:59.555047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010737523703548:2354];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:25:59.555188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010737523703548:2354];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:25:59.555316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010737523703548:2354];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:25:59.555426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010737523703548:2354];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:25:59.555536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010737523703548:2354];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:25:59.555666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010737523703548:2354];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:25:59.606592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488010737523703575:2361];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:25:59.606788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488010737523703575:2361];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abs ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:17.849335Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039271;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:17.850806Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:17.860698Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:17.865554Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039227;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:17.874343Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:17.878449Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:17.889109Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:17.892104Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:17.908998Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039265;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:17.917031Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:17.928324Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:17.937135Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039293;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:17.946524Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039261;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:17.956892Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039289;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:17.964617Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039241;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:17.973378Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:17.980274Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:17.987489Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:17.994897Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.001827Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.009757Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039239;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.016855Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.023379Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039358;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.030508Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.049476Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.058595Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.064150Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.069490Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.083797Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.084015Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.095470Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.101507Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.110512Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.117552Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039237;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.122436Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.125735Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.135720Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.137439Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.143613Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.147889Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.151149Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.161596Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039354;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.165458Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.177256Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.182068Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039356;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:18.437694Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgdm414v67mj55sgmesg7y", SessionId: ydb://session/3?node_id=1&id=NTcxMmRhNDctOTNkMzc1MTctNmRjYTQ2MzQtYTVlNjI0ZA==, Slow query, duration: 36.771664s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:27:18.880498Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:27:18.880946Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:27:18.881740Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7488010926502300178:7643];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224039094; 2025-03-31T16:27:18.882121Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::TestJoinOrderHintsComplex-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpReturning::Random [GOOD] Test command err: Trying to start YDB, gRPC: 10618, MsgBus: 21013 2025-03-31T16:26:41.919855Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010916955100582:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:41.919909Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a9f/r3tmp/tmpkaaPU1/pdisk_1.dat 2025-03-31T16:26:42.741726Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:42.779048Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:42.786167Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:42.796118Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10618, node 1 2025-03-31T16:26:43.070505Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:43.070530Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:43.070538Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:43.070650Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21013 TClient is connected to server localhost:21013 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:44.062390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:44.114211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:44.285891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:44.544814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:44.693055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:46.836157Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010938429938692:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:46.836268Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:46.911257Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010916955100582:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:46.911333Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:47.223556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:47.266829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:47.314862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:47.373191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:47.421257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:47.478876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:47.577257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010942724906508:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:47.577416Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:47.578097Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010942724906514:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:47.582178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:26:47.599585Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010942724906516:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:26:47.686727Z node 1 :TX_PROXY ERROR: Actor# [1:7488010942724906571:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Warning: Type annotation, code: 1030
:4:13: Warning: At function: RemovePrefixMembers, At function: RemoveSystemMembers, At function: PersistableRepr, At function: SqlProject
:4:27: Warning: At function: Filter, At function: Coalesce
:4:50: Warning: At function: SqlIn
:4:50: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Execution, code: 1060
:4:13: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 Trying to start YDB, gRPC: 12857, MsgBus: 17102 2025-03-31T16:26:50.666521Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010954030172463:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a9f/r3tmp/tmpzcfAqr/pdisk_1.dat 2025-03-31T16:26:50.700289Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:26:50.791873Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:50.813268Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:50.813351Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:50.817907Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12857, node 2 2025-03-31T16:26:50.935770Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:50.935792Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:50.935800Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:50.935917Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17102 TClient is connected to server localhost:17102 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:51.817497Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:51.827851Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:26:51.848156Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshar ... aseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:25.599699Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:25.654097Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:27:25.704878Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:27:25.754921Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:27:25.816702Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:27:25.877232Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:27:25.967477Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:27:26.088017Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488011108517028144:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:26.088197Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:26.088600Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488011108517028150:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:26.093649Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:27:26.133506Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7488011108517028152:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:27:26.212162Z node 5 :TX_PROXY ERROR: Actor# [5:7488011108517028206:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:27.700169Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 [[[2];["321"]];[["111"];[2]]] Trying to start YDB, gRPC: 5476, MsgBus: 24707 2025-03-31T16:27:30.127165Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488011127318602156:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:30.127237Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a9f/r3tmp/tmp9BSuVn/pdisk_1.dat 2025-03-31T16:27:30.519938Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:27:30.568082Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:30.568207Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:30.572172Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5476, node 6 2025-03-31T16:27:30.794670Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:30.794700Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:30.794712Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:30.794867Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24707 TClient is connected to server localhost:24707 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:31.984898Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:32.031323Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:32.205006Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:32.501137Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:32.656738Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:35.129593Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488011127318602156:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:35.129677Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:36.076613Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488011153088407718:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:36.076732Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:36.143726Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:27:36.236032Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:27:36.326272Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:27:36.409174Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:27:36.495441Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:27:36.594584Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:27:36.690972Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488011153088408244:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:36.691115Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:36.691224Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488011153088408249:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:36.697170Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:27:36.723207Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488011153088408251:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:27:36.814902Z node 6 :TX_PROXY ERROR: Actor# [6:7488011153088408306:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:38.760864Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::LeftJoinRightNullFilter+StreamLookup >> KqpJoinOrder::CanonizedJoinOrderTPCH3 [GOOD] >> KqpJoin::FullOuterJoin2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::OltpJoinTypeHintCBOTurnOFF [GOOD] Test command err: Trying to start YDB, gRPC: 20564, MsgBus: 29313 2025-03-31T16:26:50.627823Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010953930482910:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:50.628614Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00192b/r3tmp/tmpLZiE9V/pdisk_1.dat 2025-03-31T16:26:51.255671Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:51.255752Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:51.257869Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:26:51.306972Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20564, node 1 2025-03-31T16:26:51.611432Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:51.611459Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:51.611470Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:51.611581Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29313 TClient is connected to server localhost:29313 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:52.458033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:52.490513Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:54.945885Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010971110352628:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:54.945986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:54.951820Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010971110352640:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:54.955922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:26:54.975290Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010971110352643:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:26:55.061402Z node 1 :TX_PROXY ERROR: Actor# [1:7488010975405319990:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:55.438776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:26:55.582005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:26:55.583446Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010953930482910:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:55.583681Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:55.672123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:26:55.728432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:26:55.775391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:26:55.957680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:26:56.012421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:26:56.081839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:26:56.131411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T16:26:56.204563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-31T16:26:56.248804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-31T16:26:56.291954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:26:56.337911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:26:57.216820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-31T16:26:57.282097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:26:57.349006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:26:57.402644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:26:57.444634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T16:26:57.526263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-31T16:26:57.564274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T16:26:57.599132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-31T16:26:57.649632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-31T16:26:57.683486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-31T16:26:57.762895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-31T16:26:57.806956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-31T16:26:57.870976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-31T16:26:57.984492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-31T16:26:58.028743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-31T16:26:58.077123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-31T16:26:58.120442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:32.915764Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038558;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:32.921403Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038606;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:32.922402Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038530;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:32.927513Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038522;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:32.932104Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038504;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:32.933870Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038526;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:32.943170Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038600;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:32.945385Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038572;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:32.949311Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:32.955962Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038478;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:32.956436Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038596;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:32.967127Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038554;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:32.971928Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038580;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:32.978946Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:32.981580Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:32.985584Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038592;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:32.988364Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038512;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:32.995560Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038608;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:33.002650Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:33.003761Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038536;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:33.009695Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:33.009702Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038574;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:33.016099Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038604;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:33.018000Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:33.024836Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038616;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:33.026245Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038556;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:33.031906Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:33.032171Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038612;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:33.038577Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:33.038810Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038614;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:33.046275Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038584;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:33.046536Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:33.052287Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038544;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:33.055684Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:33.058137Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:33.066318Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:33.069984Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:33.076712Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038564;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:33.082141Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:33.090006Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038588;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:33.096157Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038480;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:33.096572Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:33.108423Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:33.113878Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:33.114680Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:33.243367Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpge5jqazqm38zjnyyfc0c1", SessionId: ydb://session/3?node_id=1&id=NjU4MTFhNTctMTM4YjE1NjQtM2QwMjVhNWQtODY4N2U5MTI=, Slow query, duration: 33.699480s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:27:33.489565Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:27:33.490060Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:27:33.490686Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;self_id=[1:7488011052714748698:4588];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038331;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038170;receive=72075186224038629; 2025-03-31T16:27:33.491014Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpIndexLookupJoin::SimpleLeftSemiJoin+StreamLookup >> KqpFlipJoin::RightSemi_3 [GOOD] >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt+ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::FullOuterJoin2 [GOOD] Test command err: Trying to start YDB, gRPC: 2542, MsgBus: 23244 2025-03-31T16:27:34.279322Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011145755602843:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:34.295729Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001903/r3tmp/tmp9yFQVA/pdisk_1.dat 2025-03-31T16:27:34.984634Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:27:34.991864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:34.991935Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:34.997330Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2542, node 1 2025-03-31T16:27:35.315598Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:35.315618Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:35.315624Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:35.315710Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23244 TClient is connected to server localhost:23244 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:36.239561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:36.265712Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:27:36.275582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:36.480982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:36.676665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:36.768158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:38.767144Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011162935473664:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:38.767270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:39.043076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:27:39.129623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:27:39.177571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:27:39.220860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:27:39.268356Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011145755602843:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:39.268509Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:39.299901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:27:39.359150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:27:39.438933Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011167230441480:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:39.439012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:39.439599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011167230441485:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:39.443895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:27:39.458796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011167230441487:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:27:39.541365Z node 1 :TX_PROXY ERROR: Actor# [1:7488011167230441543:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:40.884322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:27:40.939837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:27:41.004394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpFlipJoin::RightSemi_3 [GOOD] Test command err: Trying to start YDB, gRPC: 26564, MsgBus: 7077 2025-03-31T16:27:24.662239Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011099766533431:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:24.662759Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001908/r3tmp/tmpdXShJ0/pdisk_1.dat 2025-03-31T16:27:25.547362Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:27:25.577596Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:25.577679Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:25.584280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26564, node 1 2025-03-31T16:27:25.859869Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:25.859884Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:25.859899Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:25.860000Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7077 TClient is connected to server localhost:7077 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:26.875797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:26.897469Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:27:26.912148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:27.186656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:27.420953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:27.524032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:29.615192Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011099766533431:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:29.615252Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:29.843779Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011121241371553:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:29.843912Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:30.180618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:27:30.263661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:27:30.329292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:27:30.370451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:27:30.411195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:27:30.494776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:27:30.589290Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011125536339376:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:30.589368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:30.589796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011125536339381:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:30.594211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:27:30.609143Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011125536339383:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:27:30.693748Z node 1 :TX_PROXY ERROR: Actor# [1:7488011125536339439:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:31.891931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:27:31.933430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:27:32.013823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:27:32.066520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 31127, MsgBus: 16828 2025-03-31T16:27:34.494707Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488011144632691537:2207];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:34.534917Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001908/r3tmp/tmpTrtArA/pdisk_1.dat 2025-03-31T16:27:34.775788Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:27:34.805244Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:34.805328Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:34.808168Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31127, node 2 2025-03-31T16:27:34.986743Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:34.986773Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:34.986782Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:34.986909Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16828 TClient is connected to server localhost:16828 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:35.980669Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:35.993863Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:27:36.006114Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:36.147897Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:36.497614Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:36.635979Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:39.422395Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488011144632691537:2207];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:39.422450Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:39.588406Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011166107529623:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:39.588504Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:39.643751Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:27:39.722801Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:27:39.766908Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:27:39.809975Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:27:39.889207Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:27:39.955189Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:27:40.065637Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011170402497442:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:40.065747Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:40.071258Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011170402497447:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:40.077635Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:27:40.092536Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488011170402497449:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:27:40.149637Z node 2 :TX_PROXY ERROR: Actor# [2:7488011170402497503:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:41.257496Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:27:41.332831Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:27:41.371418Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:27:41.414048Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 >> KqpFlipJoin::RightSemi_1 >> KqpIndexLookupJoin::LeftSemiJoinWithLeftFilter-StreamLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH3 [GOOD] Test command err: Trying to start YDB, gRPC: 5177, MsgBus: 23392 2025-03-31T16:26:03.431191Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010753564421547:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:03.431315Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001966/r3tmp/tmpnWDHhO/pdisk_1.dat 2025-03-31T16:26:03.880612Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:03.912751Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:03.912872Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 5177, node 1 2025-03-31T16:26:03.917168Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:26:04.103730Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:04.103757Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:04.103768Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:04.103875Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23392 TClient is connected to server localhost:23392 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:04.746190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:04.773553Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:06.936259Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010766449324111:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:06.936385Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010766449324119:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:06.936461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:06.941783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:26:06.960790Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010766449324125:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:26:07.024118Z node 1 :TX_PROXY ERROR: Actor# [1:7488010770744291472:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:07.357962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:26:07.702366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488010770744291726:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:07.702643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488010770744291726:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:26:07.702933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488010770744291726:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:26:07.703074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488010770744291726:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:26:07.703757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488010770744291726:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:26:07.703894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488010770744291726:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:26:07.704017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488010770744291726:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:26:07.704193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488010770744291726:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:26:07.704314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488010770744291726:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:26:07.704424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488010770744291726:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:26:07.704559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488010770744291726:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:26:07.704705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488010770744291726:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:26:07.708734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488010770744291730:2357];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:07.708787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488010770744291730:2357];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:26:07.709263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488010770744291730:2357];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:26:07.709420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488010770744291730:2357];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:26:07.709557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488010770744291730:2357];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:26:07.709742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488010770744291730:2357];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:26:07.709854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488010770744291730:2357];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:26:07.709966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488010770744291730:2357];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:26:07.710082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488010770744291730:2357];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:26:07.710199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488010770744291730:2357];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:26:07.710298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488010770744291730:2357];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:26:07.710403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488010770744291730:2357];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:26:07.746903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488010770744291738:2361];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:07.746967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488010770744291738:2361];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstr ... ller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.685340Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039346;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.686268Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039265;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.692947Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.698854Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.702203Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.706078Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.709122Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039261;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.711846Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.716368Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039269;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.717550Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.723004Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039362;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.723005Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039360;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.729400Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039364;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.729420Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.735452Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039277;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.735668Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.741477Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.745553Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.748060Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.753437Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.754085Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039350;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.759852Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039352;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.761484Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.767594Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.768950Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.774588Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039320;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.780304Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039354;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.786889Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039356;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.786890Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.794346Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.798000Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039316;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.802763Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039318;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.806164Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.808680Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.815809Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.821714Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.829186Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.833376Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039348;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.840388Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.843723Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.849703Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.854670Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.856632Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.860912Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:24.946530Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:25.055314Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgdvqm8zhr7cs81gvxn9sy", SessionId: ydb://session/3?node_id=1&id=YTdkNmU2ODEtNjNmMTVlOWYtNGMzZDlhM2YtYTFkMmE0ZTg=, Slow query, duration: 35.594790s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:27:25.519864Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:27:25.520290Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:27:25.521087Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7488011062802124821:11155];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-03-31T16:27:25.521483Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> TFstClassSrcIdPQTest::TestTableCreated [GOOD] >> TFstClassSrcIdPQTest::NoMapping >> KqpJoin::RightSemiJoin_FullScan ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftSemiJoinWithLeftFilter-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 4700, MsgBus: 19765 2025-03-31T16:27:36.333541Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011154310310417:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:36.343348Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018fd/r3tmp/tmpUgOqa3/pdisk_1.dat 2025-03-31T16:27:37.177178Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:37.177318Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:37.177542Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:27:37.182580Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4700, node 1 2025-03-31T16:27:37.478547Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:37.478589Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:37.478604Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:37.478710Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19765 TClient is connected to server localhost:19765 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:38.437971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:38.455486Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:27:38.473518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:38.674022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:38.932676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:39.099794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:41.311379Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011154310310417:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:41.311444Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:41.549046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011175785148532:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:41.549156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:41.967719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:27:42.065991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:27:42.119628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:27:42.172622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:27:42.262076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:27:42.333267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:27:42.472276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011180080116348:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:42.472379Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:42.472670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011180080116353:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:42.477273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:27:42.496207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011180080116355:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:27:42.587647Z node 1 :TX_PROXY ERROR: Actor# [1:7488011180080116415:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:43.856792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:27:43.903270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:27:43.981398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:27:44.072590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:27:44.152867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:27:44.199382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> KqpJoin::RightSemiJoin_SimpleKey >> TopicService::AccessRights [GOOD] >> KqpJoinOrder::GeneralPrioritiesBug1 >> KqpJoinOrder::FiveWayJoinStatsOverride-ColumnStore [GOOD] >> KqpJoinOrder::TestJoinHint2+ColumnStore >> TopicService::ThereAreGapsInTheOffsetRanges >> KqpIndexLookupJoin::LeftJoinRightNullFilter+StreamLookup [GOOD] >> KqpJoin::RightSemiJoin_ComplexSecondaryIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinStatsOverride-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 17312, MsgBus: 61767 2025-03-31T16:26:59.587809Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010993320158880:2197];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:59.588391Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001923/r3tmp/tmpCnTSgZ/pdisk_1.dat 2025-03-31T16:27:00.247127Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:27:00.249799Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:00.249886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:00.253689Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17312, node 1 2025-03-31T16:27:00.527576Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:00.527623Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:00.527638Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:00.527807Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61767 TClient is connected to server localhost:61767 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:01.635711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:01.668157Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:27:04.494414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011014794995895:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:04.494531Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:04.494945Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011014794995907:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:04.503672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:27:04.522050Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T16:27:04.523541Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011014794995909:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:27:04.587235Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010993320158880:2197];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:04.587313Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:04.615645Z node 1 :TX_PROXY ERROR: Actor# [1:7488011014794995960:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:05.204784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:27:05.329041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:27:05.365375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:27:05.402681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:27:05.436924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:27:05.690425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:27:05.721333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:27:05.771127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:27:05.808198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T16:27:05.883104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-31T16:27:05.927927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-31T16:27:05.968340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:27:06.009837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:27:06.671940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-31T16:27:06.725424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:27:06.770984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:27:06.813944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:27:06.869035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T16:27:06.931630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-31T16:27:07.004213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T16:27:07.074866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-31T16:27:07.112380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-31T16:27:07.151983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-31T16:27:07.212118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-31T16:27:07.306019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-31T16:27:07.383935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-31T16:27:07.442314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-31T16:27:07.480182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-31T16:27:07.516881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2 ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.031634Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.040677Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038568;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.045884Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.050925Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038602;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.056209Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038618;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.065502Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038604;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.070715Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.081109Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.081619Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038606;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.092956Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.094025Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038608;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.105474Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038562;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.106583Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038576;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.113978Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.117667Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.122755Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038582;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.124180Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.129009Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.130101Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.135546Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.136780Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.143006Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.145906Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038588;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.149993Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.156790Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.157125Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.162835Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038564;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.169145Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.175539Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.181460Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.187241Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.192689Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038610;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.198487Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038594;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.204130Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.210350Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.216599Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038616;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.225810Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038584;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.229729Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038578;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.238692Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038519;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.239246Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038600;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.248177Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.252695Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038596;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.266317Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.280271Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.280744Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:41.415611Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgef1wd5efn4yr2s3zr0fp", SessionId: ydb://session/3?node_id=1&id=Mzk4ZGY4YjEtZGUxNjg5ODktYTMwNGU2NDYtOGQzOTJlZWU=, Slow query, duration: 32.170442s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:27:41.937348Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:27:41.937829Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7488011135054106415:5924];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038331;receive=72075186224038170; 2025-03-31T16:27:41.938176Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:27:41.938936Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::TestJoinOrderHintsComplex+ColumnStore [GOOD] >> KqpIndexLookupJoin::SimpleLeftSemiJoin+StreamLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftJoinRightNullFilter+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 3408, MsgBus: 20913 2025-03-31T16:27:43.300360Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011181388370081:2206];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:43.300865Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018f8/r3tmp/tmp2nImag/pdisk_1.dat 2025-03-31T16:27:44.096448Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:27:44.103852Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:44.103924Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:44.115858Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3408, node 1 2025-03-31T16:27:44.386256Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:44.386280Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:44.386291Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:44.386376Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20913 TClient is connected to server localhost:20913 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:45.185915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:45.232708Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:27:45.254252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:45.428024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:45.644824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:45.731710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:47.932836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011198568240884:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:47.932926Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:48.295181Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011181388370081:2206];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:48.295249Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:48.351072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:27:48.400223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:27:48.442138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:27:48.480414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:27:48.516881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:27:48.577896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:27:48.661554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011202863208697:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:48.661646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:48.662059Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011202863208702:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:48.666714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:27:48.685701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011202863208704:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:27:48.760021Z node 1 :TX_PROXY ERROR: Actor# [1:7488011202863208760:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:49.957863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:27:50.025543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:27:50.085538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:27:50.178201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:27:50.241103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:27:50.302167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightSemiJoin_ComplexSecondaryIndex [GOOD] Test command err: Trying to start YDB, gRPC: 21836, MsgBus: 22904 2025-03-31T16:27:41.016530Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011175334640013:2277];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:41.016571Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018fa/r3tmp/tmpIgWxE4/pdisk_1.dat 2025-03-31T16:27:41.647564Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:41.647634Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:41.650900Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:27:41.711712Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21836, node 1 2025-03-31T16:27:41.927696Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:41.927721Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:41.927728Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:41.927821Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22904 TClient is connected to server localhost:22904 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:42.890685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:42.931892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:43.222705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:43.474623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:43.586905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:45.947600Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011192514510741:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:45.947742Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:46.022528Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011175334640013:2277];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:46.022599Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:46.303475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:27:46.341942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:27:46.374835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:27:46.423753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:27:46.503244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:27:46.552448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:27:46.669377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011196809478561:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:46.669495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:46.669949Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011196809478566:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:46.674671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:27:46.696010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011196809478568:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:27:46.784338Z node 1 :TX_PROXY ERROR: Actor# [1:7488011196809478624:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:48.136275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:27:48.190859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:27:48.251844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:27:48.286051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:27:48.322733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480
: Warning: Execution, code: 1060
:4:47: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001
: Warning: Execution, code: 1060
:3:57: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::SimpleLeftSemiJoin+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 11941, MsgBus: 28463 2025-03-31T16:27:44.411569Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011185972222619:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:44.436083Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018f7/r3tmp/tmpGODx3l/pdisk_1.dat 2025-03-31T16:27:45.073703Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:45.073810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:45.075719Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:27:45.077966Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11941, node 1 2025-03-31T16:27:45.302985Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:45.303007Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:45.303014Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:45.303148Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28463 TClient is connected to server localhost:28463 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:46.447682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:46.491290Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:27:46.514481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:46.722507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:47.021569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:47.114949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:49.191377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011207447060874:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:49.191487Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:49.419190Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011185972222619:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:49.433275Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:49.505002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:27:49.540766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:27:49.576444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:27:49.654905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:27:49.690442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:27:49.734855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:27:49.828219Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011207447061394:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:49.828302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:49.828706Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011207447061399:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:49.833300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:27:49.848917Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011207447061401:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:27:49.910080Z node 1 :TX_PROXY ERROR: Actor# [1:7488011207447061457:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:51.079448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:27:51.118528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:27:51.177241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:27:51.257327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:27:51.300326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:27:51.341752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt-ColumnStore >> KqpFlipJoin::RightSemi_1 [GOOD] >> KqpFlipJoin::RightOnly_3 >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable-StreamLookupJoin >> KqpJoinOrder::TestJoinOrderHintsManyHintTrees ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinOrderHintsComplex+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 31460, MsgBus: 28622 2025-03-31T16:26:10.407928Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010783755786861:2130];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:10.408725Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00195b/r3tmp/tmpAoKZOZ/pdisk_1.dat 2025-03-31T16:26:10.836360Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:10.843629Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:10.843727Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:10.845588Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31460, node 1 2025-03-31T16:26:10.942151Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:10.942174Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:10.942184Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:10.942319Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28622 TClient is connected to server localhost:28622 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:11.606498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:11.635920Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:13.674627Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010796640689347:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:13.674763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:13.677943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010796640689359:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:13.688090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:26:13.703978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010796640689361:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:26:13.800103Z node 1 :TX_PROXY ERROR: Actor# [1:7488010796640689412:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:14.145587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:26:14.469215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488010800935656954:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:14.469456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488010800935656954:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:26:14.469740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488010800935656954:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:26:14.469865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488010800935656954:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:26:14.469985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488010800935656954:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:26:14.470117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488010800935656954:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:26:14.470252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488010800935656954:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:26:14.470391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488010800935656954:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:26:14.470524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488010800935656954:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:26:14.470660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488010800935656954:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:26:14.470774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488010800935656954:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:26:14.470896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488010800935656954:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:26:14.474751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488010800935656934:2349];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:14.474823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488010800935656934:2349];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:26:14.475028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488010800935656934:2349];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:26:14.484662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488010800935656934:2349];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:26:14.484865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488010800935656934:2349];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:26:14.484985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488010800935656934:2349];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:26:14.485107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488010800935656934:2349];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:26:14.485217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488010800935656934:2349];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:26:14.485336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488010800935656934:2349];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:26:14.485456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488010800935656934:2349];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:26:14.485553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488010800935656934:2349];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:26:14.485697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488010800935656934:2349];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:26:14.508421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488010800935656958:2361];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:14.508492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488010800935656958:2361];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abs ... 10714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:36.699707Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:36.701202Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:36.705550Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:36.707448Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:36.711717Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:36.712799Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:36.717885Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:36.719525Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:36.723964Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:36.725169Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:36.729379Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:36.730577Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:36.736170Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:36.736184Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:36.741739Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:36.742070Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:36.748177Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:36.748253Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:36.754622Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:36.754622Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039237;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:36.761246Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:36.761246Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:36.767893Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039360;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:36.767892Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:36.774305Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:36.774305Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:36.779990Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:36.781189Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039241;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:36.786304Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:36.788362Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:36.793215Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:36.800101Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039332;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:36.808902Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:36.814230Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:36.820206Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:36.825801Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:36.831029Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:36.836071Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:36.841261Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:36.847805Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:36.853520Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:36.853520Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:36.860393Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:36.867450Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:37.056829Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpge4ecc1z9xk7crybtfe23", SessionId: ydb://session/3?node_id=1&id=ZmVjYzRhZmEtYTZlMzAzOWQtYTAwYjY5YWQtNGJkOGRlZjI=, Slow query, duration: 38.675475s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:27:37.377819Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:27:37.378180Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:27:37.379071Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716;
: Warning: Execution, code: 1060
: Warning: Unapplied hint: JoinOrder( (Unused1 Unused2) (Unused3 Unused4) ), code: 4534
: Warning: Unapplied hint: Rows(Unused # 10e8), code: 4534
: Warning: Unapplied hint: Rows(R T # 1), code: 4534
: Warning: Execution, code: 1060
: Warning: Unapplied hint: JoinOrder( (Unused1 Unused2) (Unused3 Unused4) ), code: 4534
: Warning: Unapplied hint: Rows(Unused # 10e8), code: 4534
: Warning: Unapplied hint: Rows(R T # 1), code: 4534 >> KqpJoin::RightSemiJoin_KeyPrefix >> KqpJoin::RightSemiJoin_FullScan [GOOD] >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightSemiJoin_FullScan [GOOD] Test command err: Trying to start YDB, gRPC: 23924, MsgBus: 13056 2025-03-31T16:27:47.716338Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011202118422097:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:47.716781Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018f4/r3tmp/tmpRIhtJi/pdisk_1.dat 2025-03-31T16:27:48.640759Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:27:48.646845Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:48.646947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:48.656502Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23924, node 1 2025-03-31T16:27:48.899573Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:48.899607Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:48.899614Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:48.899706Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13056 TClient is connected to server localhost:13056 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:49.874426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:49.934319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:50.167984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:50.374517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:50.457887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:52.518650Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011223593260237:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:52.518743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:52.695204Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011202118422097:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:52.695255Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:52.867448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:27:52.909569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:27:52.977875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:27:53.024273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:27:53.068044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:27:53.142508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:27:53.226924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011227888228051:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:53.227119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:53.234157Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011227888228056:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:53.244402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:27:53.261669Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011227888228059:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:27:53.319239Z node 1 :TX_PROXY ERROR: Actor# [1:7488011227888228115:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:54.385027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:27:54.443405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:27:54.476029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:27:54.525404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:27:54.566751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480
: Warning: Execution, code: 1060
:4:39: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001
: Warning: Execution, code: 1060
:3:49: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 >> KqpJoin::RightSemiJoin_SimpleKey [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH12 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightSemiJoin_SimpleKey [GOOD] Test command err: Trying to start YDB, gRPC: 19495, MsgBus: 13034 2025-03-31T16:27:48.682459Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011206557471775:2202];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:48.682950Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018f3/r3tmp/tmp1hcF5q/pdisk_1.dat 2025-03-31T16:27:49.389909Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:27:49.399681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:49.399754Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:49.405894Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19495, node 1 2025-03-31T16:27:49.607615Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:49.607638Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:49.607645Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:49.607747Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13034 TClient is connected to server localhost:13034 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:50.632469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:50.699718Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:27:50.721628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:50.930751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:51.178786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:51.290318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:53.409235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011228032309887:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:53.409335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:53.680354Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011206557471775:2202];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:53.680412Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:53.851908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:27:53.901434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:27:53.935683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:27:53.978732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:27:54.008178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:27:54.095475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:27:54.196520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011232327277706:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:54.196653Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:54.199644Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011232327277711:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:54.206302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:27:54.235025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011232327277713:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:27:54.322315Z node 1 :TX_PROXY ERROR: Actor# [1:7488011232327277769:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:55.437223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:27:55.489112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:27:55.536457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:27:55.576155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:27:55.628141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480
: Warning: Execution, code: 1060
:4:39: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001
: Warning: Execution, code: 1060
:3:49: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 >> TPersQueueTest::CheckACLForGrpcRead [GOOD] >> TPersQueueTest::CheckKillBalancer >> KqpIndexLookupJoin::SimpleLeftOnlyJoin+StreamLookup >> KqpJoinOrder::TestJoinHint1+ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithComplexPreds+ColumnStore >> OlapEstimationRowsCorrectness::TPCH10 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH12 [GOOD] Test command err: Trying to start YDB, gRPC: 19913, MsgBus: 3077 2025-03-31T16:26:19.342058Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010823073280534:2201];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:19.342567Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00194d/r3tmp/tmpPUJbOu/pdisk_1.dat 2025-03-31T16:26:19.780546Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:19.780658Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:19.782381Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:26:19.792379Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19913, node 1 2025-03-31T16:26:19.926022Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:19.926049Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:19.926057Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:19.926181Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3077 TClient is connected to server localhost:3077 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:20.643008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:20.667715Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:23.100931Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010840253150237:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:23.101007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010840253150245:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:23.101063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:23.104490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:26:23.113364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010840253150251:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:26:23.173915Z node 1 :TX_PROXY ERROR: Actor# [1:7488010840253150302:2342] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:23.467371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:26:23.849077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488010840253150573:2354];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:23.849312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488010840253150573:2354];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:26:23.849554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488010840253150573:2354];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:26:23.849652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488010840253150573:2354];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:26:23.849760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488010840253150573:2354];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:26:23.849870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488010840253150573:2354];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:26:23.850004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488010840253150573:2354];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:26:23.850134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488010840253150573:2354];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:26:23.850253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488010840253150573:2354];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:26:23.850354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488010840253150573:2354];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:26:23.850453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488010840253150573:2354];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:26:23.850556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488010840253150573:2354];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:26:23.851806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488010840253150583:2359];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:23.851853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488010840253150583:2359];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:26:23.852023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488010840253150583:2359];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:26:23.852126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488010840253150583:2359];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:26:23.852218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488010840253150583:2359];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:26:23.852311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488010840253150583:2359];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:26:23.852394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488010840253150583:2359];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:26:23.852484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488010840253150583:2359];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:26:23.852575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488010840253150583:2359];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:26:23.852675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488010840253150583:2359];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:26:23.852800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488010840253150583:2359];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:26:23.852884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488010840253150583:2359];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:26:23.934835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488010840253150603:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:23.938925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010840253150577:2356];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstra ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.332298Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039284;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.342168Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039231;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.346858Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039342;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.354264Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039203;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.362827Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039340;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.369115Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039195;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.375200Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039199;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.377317Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039197;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.381390Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039193;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.390316Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039187;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.403382Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039352;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.408610Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039272;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.414992Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039207;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.423654Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039358;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.428984Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039356;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.435445Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039225;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.454356Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039189;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.469440Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039205;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.489510Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039318;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.508135Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039201;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.518965Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039254;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.533395Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039211;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.539945Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.546586Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039346;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.552480Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039263;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.559065Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.565540Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039221;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.571552Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039261;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.577427Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039247;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.584114Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039223;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.590261Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039209;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.597147Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039374;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.603705Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039370;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.607719Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039255;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.610993Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.615156Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039229;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.618319Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039191;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.621908Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039185;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.710526Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.718051Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.722267Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039215;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.734112Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039249;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.737806Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.741850Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039271;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.797784Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039230;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:43.940523Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgedgz01zbrxk9xshak24y", SessionId: ydb://session/3?node_id=1&id=OGZkY2U1ZTAtMWE2NDNlNzEtZTMxNDcwNDUtZmU2N2NjYmQ=, Slow query, duration: 36.260752s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:27:44.322324Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:27:44.322729Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:27:44.323489Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7488011050706586507:8202];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224039094; 2025-03-31T16:27:44.323870Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable-StreamLookupJoin [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 8635, MsgBus: 11419 2025-03-31T16:27:56.058184Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011240693853119:2273];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:56.058238Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018ef/r3tmp/tmpMnQlyC/pdisk_1.dat 2025-03-31T16:27:56.666895Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:56.666974Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:56.668276Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:27:56.713315Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8635, node 1 2025-03-31T16:27:56.913436Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:56.913455Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:56.913462Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:56.913560Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11419 TClient is connected to server localhost:11419 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:57.909500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:57.932731Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:27:57.945886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:58.147291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:58.363037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:58.461829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:00.314375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011257873723883:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:00.314564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:00.674943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:28:00.710552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:28:00.759982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:28:00.831205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:28:00.871137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:28:00.946873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:28:01.039440Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011262168691702:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:01.039519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:01.039976Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011262168691707:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:01.046420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:28:01.058404Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011240693853119:2273];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:01.058483Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:28:01.068136Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011262168691710:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:28:01.147485Z node 1 :TX_PROXY ERROR: Actor# [1:7488011262168691766:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:28:02.274867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:28:02.324360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 >> KqpFlipJoin::RightOnly_3 [GOOD] >> KqpJoin::RightSemiJoin_ComplexSecondaryIndexPrefix >> KqpJoinOrder::FiveWayJoin+ColumnStore [GOOD] >> TopicService::ThereAreGapsInTheOffsetRanges [GOOD] >> TPersQueueTest::SetupWriteSession [GOOD] >> TPersQueueTest::StoreNoMoreThanXSourceIDs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinHint1+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 14923, MsgBus: 20601 2025-03-31T16:26:24.112670Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010845284711236:2209];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:24.112874Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00193f/r3tmp/tmp75p1sI/pdisk_1.dat 2025-03-31T16:26:24.755536Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:24.755624Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:24.758270Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:26:24.796929Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14923, node 1 2025-03-31T16:26:25.139512Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:25.139537Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:25.139545Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:25.139649Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20601 TClient is connected to server localhost:20601 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:26.180550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:26.212185Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:28.547036Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010862464580928:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:28.547140Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010862464580939:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:28.547208Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:28.551652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:26:28.567548Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010862464580942:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:26:28.627472Z node 1 :TX_PROXY ERROR: Actor# [1:7488010862464580993:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:28.976360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:26:29.101737Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010845284711236:2209];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:29.101813Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:29.292906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488010866759548557:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:29.293151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488010866759548557:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:26:29.293354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010866759548546:2349];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:29.293398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488010866759548557:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:26:29.293447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010866759548546:2349];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:26:29.293479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488010866759548557:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:26:29.293542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488010866759548557:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:26:29.293623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010866759548546:2349];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:26:29.293623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488010866759548557:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:26:29.293781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488010866759548557:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:26:29.293807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010866759548546:2349];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:26:29.293907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488010866759548557:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:26:29.293920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010866759548546:2349];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:26:29.294007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010866759548546:2349];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:26:29.294018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488010866759548557:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:26:29.294132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488010866759548557:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:26:29.294149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010866759548546:2349];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:26:29.294267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488010866759548557:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:26:29.294276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010866759548546:2349];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:26:29.294419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010866759548546:2349];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:26:29.294540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488010866759548557:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:26:29.294633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010866759548546:2349];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:26:29.295399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010866759548546:2349];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:26:29.295556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010866759548546:2349];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:26:29.326401Z node 1 :T ... tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.452111Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.458815Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.459761Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039299;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.464907Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.466195Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.471136Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.471631Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.477258Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039249;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.481574Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.495747Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.495753Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.505096Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.505595Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.514360Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.518027Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.519769Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.525058Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.528623Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.537850Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.540334Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.546898Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.549365Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.558447Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.560038Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.568998Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.572111Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.576298Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.581477Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.586843Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.589133Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039293;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.597498Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039263;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.598394Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.603849Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.607403Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.610046Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.613104Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.615778Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.618537Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.621374Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.626997Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.628090Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.632926Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.634430Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.642870Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.647701Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.649269Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.882570Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgekxvevd7m2s0dtmzb28h", SessionId: ydb://session/3?node_id=1&id=MzgyZTI5MTEtODA2Mzc1OWEtNzYwY2U4YmYtMjNmNGIzOGY=, Slow query, duration: 37.642673s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:27:52.241298Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:27:52.241426Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:27:52.242094Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCH10 [GOOD] Test command err: Trying to start YDB, gRPC: 14798, MsgBus: 63399 2025-03-31T16:26:16.995104Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010808538041596:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:17.003437Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001956/r3tmp/tmpwcXqVb/pdisk_1.dat 2025-03-31T16:26:17.527832Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:17.546764Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:17.546869Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:17.550253Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14798, node 1 2025-03-31T16:26:17.643613Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:17.643631Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:17.643637Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:17.643747Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63399 TClient is connected to server localhost:63399 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:18.374333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:18.393129Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:20.471804Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010825717911314:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:20.471946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:20.472417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010825717911326:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:20.476146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:26:20.486958Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010825717911328:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:26:20.575602Z node 1 :TX_PROXY ERROR: Actor# [1:7488010825717911379:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:20.942008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:26:21.247926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488010830012878897:2354];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:21.248168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488010830012878897:2354];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:26:21.248448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488010830012878897:2354];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:26:21.248570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488010830012878897:2354];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:26:21.248676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488010830012878897:2354];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:26:21.248831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488010830012878897:2354];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:26:21.248932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488010830012878897:2354];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:26:21.249030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488010830012878897:2354];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:26:21.249039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488010830012878931:2363];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:21.249088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488010830012878931:2363];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:26:21.249174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488010830012878897:2354];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:26:21.249254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488010830012878931:2363];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:26:21.249282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488010830012878897:2354];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:26:21.249349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488010830012878931:2363];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:26:21.249380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488010830012878897:2354];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:26:21.249438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488010830012878931:2363];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:26:21.249476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488010830012878897:2354];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:26:21.249521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488010830012878931:2363];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:26:21.249637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488010830012878931:2363];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:26:21.249754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488010830012878931:2363];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:26:21.249852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488010830012878931:2363];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:26:21.249942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488010830012878931:2363];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:26:21.250035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488010830012878931:2363];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:26:21.250121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488010830012878931:2363];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:26:21.297113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488010830012878891:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:21.307236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488010830012878891:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abs ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.129429Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039358;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.129429Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039364;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.173608Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.200553Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.208262Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039209;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.217232Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.217578Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.224371Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.268003Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.269148Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.276586Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.277983Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.284536Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.285903Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.291520Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.292744Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.298544Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.300114Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.305578Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.306882Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.312077Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.316884Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.317996Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.324266Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039241;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.324266Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.332244Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.336845Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.338157Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.344044Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.345909Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.350487Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.353314Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.357245Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.360485Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.364225Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.367761Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.371356Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039269;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.374773Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.378388Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039271;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.381972Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.385073Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039237;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.388896Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.391588Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039239;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.396196Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.399064Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:44.500870Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgebkveg7j83k2yz7e1c9t", SessionId: ydb://session/3?node_id=1&id=Nzc3ODc2NmUtNDQ2NGU4ZjMtMjhmMTIyYWUtYTVlZjNmMWY=, Slow query, duration: 38.777014s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:27:44.813465Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:27:44.813873Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:27:44.814344Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7488011027581409961:7625];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224039392; 2025-03-31T16:27:44.814771Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> TFstClassSrcIdPQTest::NoMapping [GOOD] >> TFstClassSrcIdPQTest::ProperPartitionSelected ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpFlipJoin::RightOnly_3 [GOOD] Test command err: Trying to start YDB, gRPC: 10984, MsgBus: 15947 2025-03-31T16:27:46.588902Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011195337081087:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:46.589010Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018f5/r3tmp/tmpUobz9i/pdisk_1.dat 2025-03-31T16:27:47.317112Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:27:47.350729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:47.350831Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:47.364261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10984, node 1 2025-03-31T16:27:47.607593Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:47.607629Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:47.607636Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:47.607739Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15947 TClient is connected to server localhost:15947 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:48.625194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:48.659670Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:27:48.677715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:48.884236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:49.178438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:49.297683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:51.469062Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011216811919223:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:51.469163Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:51.523257Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011195337081087:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:51.523327Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:51.843113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:27:51.912042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:27:51.951828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:27:51.997058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:27:52.035952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:27:52.093489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:27:52.187718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011221106887036:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:52.187805Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:52.188009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011221106887041:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:52.192587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:27:52.215169Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011221106887043:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:27:52.311995Z node 1 :TX_PROXY ERROR: Actor# [1:7488011221106887099:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:53.535519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:27:53.581834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:27:53.651697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:27:53.693281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22181, MsgBus: 11541 2025-03-31T16:27:55.902590Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488011233796489624:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:55.912778Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018f5/r3tmp/tmppzg0zA/pdisk_1.dat 2025-03-31T16:27:56.154866Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:27:56.194832Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:56.194914Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:56.198674Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22181, node 2 2025-03-31T16:27:56.379615Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:56.379637Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:56.379644Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:56.379757Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11541 TClient is connected to server localhost:11541 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:57.012374Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:57.097407Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:27:57.192256Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:27:57.486114Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:27:57.581929Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:28:00.459283Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011255271327700:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:00.459396Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:00.527599Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:28:00.597380Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:28:00.668792Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:28:00.801604Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:28:00.897911Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:28:00.899294Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488011233796489624:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:00.899392Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:28:01.001304Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:28:01.083393Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011259566295521:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:01.083486Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:01.084329Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011259566295526:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:01.087986Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:28:01.106452Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488011259566295528:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:28:01.174011Z node 2 :TX_PROXY ERROR: Actor# [2:7488011259566295584:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:28:02.574401Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:28:02.659538Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:28:02.703396Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:28:02.752850Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::FiveWayJoinWithComplexPreds-ColumnStore >> KqpJoin::RightSemiJoin_KeyPrefix [GOOD] >> TopicService::OnePartitionAndNoGapsInTheOffsets >> KqpJoinOrder::CanonizedJoinOrderTPCH11 >> KqpIndexLookupJoin::LeftJoinOnlyRightColumn-StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 22211, MsgBus: 13434 2025-03-31T16:26:17.529242Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010813717596134:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:17.529653Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001951/r3tmp/tmpBv9k3Q/pdisk_1.dat 2025-03-31T16:26:18.061515Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:18.061653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:18.092462Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:26:18.132060Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22211, node 1 2025-03-31T16:26:18.359804Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:18.359832Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:18.359840Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:18.359959Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13434 TClient is connected to server localhost:13434 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:19.118187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:21.221855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010830897465848:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:21.221990Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:21.222281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010830897465860:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:21.226126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:26:21.241067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010830897465862:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:26:21.312456Z node 1 :TX_PROXY ERROR: Actor# [1:7488010830897465913:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:21.774630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:26:22.096336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010830897466141:2358];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:22.096569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010830897466141:2358];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:26:22.096821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010830897466141:2358];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:26:22.096939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010830897466141:2358];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:26:22.097036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010830897466141:2358];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:26:22.097127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010830897466141:2358];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:26:22.097236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010830897466141:2358];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:26:22.097340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010830897466141:2358];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:26:22.097453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010830897466141:2358];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:26:22.097552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010830897466141:2358];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:26:22.097647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010830897466141:2358];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:26:22.097760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010830897466141:2358];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:26:22.100402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010830897466147:2361];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:22.100452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010830897466147:2361];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:26:22.100643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010830897466147:2361];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:26:22.100749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010830897466147:2361];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:26:22.100880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010830897466147:2361];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:26:22.100980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010830897466147:2361];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:26:22.101077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010830897466147:2361];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:26:22.101184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010830897466147:2361];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:26:22.101311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010830897466147:2361];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:26:22.101405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010830897466147:2361];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:26:22.101506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010830897466147:2361];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:26:22.101600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010830897466147:2361];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:26:22.152282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488010830897466137:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:22.152366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488010830897466137:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:26:22.152472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;sel ... ller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:47.848570Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039358;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:47.854114Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039312;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:47.855561Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:47.861021Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039334;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:47.862467Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:47.867756Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039330;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:47.868438Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039300;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:47.874427Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039218;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:47.874603Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039324;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:47.881194Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:47.887315Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039194;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:47.888593Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039252;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:47.897319Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:47.899894Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:47.905564Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039306;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:47.909505Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:47.920252Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:47.922091Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039236;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:47.931256Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039310;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:47.935222Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:47.941011Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039308;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:47.942874Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:47.952359Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039272;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:47.959784Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:47.965763Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:47.966595Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039354;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:47.972928Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:47.974638Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:47.979954Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039344;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:47.980533Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039268;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:47.985635Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039192;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:47.995275Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039234;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:48.005287Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039296;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:48.014827Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039326;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:48.033098Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:48.039031Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:48.053273Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:48.067788Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:48.067815Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:48.075440Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:48.079035Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039372;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:48.083672Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:48.090675Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:48.092403Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:48.261586Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:48.312293Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgecw83z21221g7b12veyc", SessionId: ydb://session/3?node_id=1&id=Y2MwNWZlMWItMjY1NjNlNmUtM2I1MGE5MWUtMjU2Mzg2YTU=, Slow query, duration: 41.295351s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:27:48.637672Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:27:48.638151Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:27:48.638504Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7488011148725102530:11105];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039094; 2025-03-31T16:27:48.638833Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightSemiJoin_KeyPrefix [GOOD] Test command err: Trying to start YDB, gRPC: 14165, MsgBus: 23149 2025-03-31T16:27:57.301878Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011244380556788:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:57.305305Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018ed/r3tmp/tmpFzf1UT/pdisk_1.dat 2025-03-31T16:27:57.973247Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:57.973338Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:57.983892Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14165, node 1 2025-03-31T16:27:58.108135Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:27:58.249466Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:58.249485Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:58.249492Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:58.249586Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23149 TClient is connected to server localhost:23149 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:59.206578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:59.248608Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:27:59.269600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:59.476973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:59.728251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:59.825292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:02.033195Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011265855395025:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:02.033327Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:02.303543Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011244380556788:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:02.303606Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:28:02.392818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:28:02.432313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:28:02.518964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:28:02.561651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:28:02.602951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:28:02.684955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:28:02.790152Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011265855395549:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:02.790224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:02.790403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011265855395554:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:02.793956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:28:02.820223Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011265855395556:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:28:02.915016Z node 1 :TX_PROXY ERROR: Actor# [1:7488011265855395611:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:28:04.138132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:28:04.208798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:28:04.302548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:28:04.360647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:28:04.414796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480
: Warning: Execution, code: 1060
:4:39: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001
: Warning: Execution, code: 1060
:3:57: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 >> KqpJoinOrder::TPCDS88-ColumnStore >> KqpJoinOrder::TPCDS90-ColumnStore [GOOD] >> TColumnShardTestSchema::ForgetAfterFail [GOOD] >> KqpIndexLookupJoin::SimpleLeftOnlyJoin+StreamLookup [GOOD] >> KqpJoin::RightSemiJoin_ComplexKey >> KqpJoinOrder::CanonizedJoinOrderTPCC ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ForgetAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143438859.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123438859.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123437659.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2025-03-31T16:24:20.702336Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:24:20.810651Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:24:20.836973Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:24:20.837261Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:24:20.845614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:24:20.845826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:24:20.846101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:24:20.846236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:24:20.846365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:24:20.846504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:24:20.846621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:24:20.846730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:24:20.846868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:24:20.847007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:24:20.847170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:24:20.847282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:24:20.878014Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:24:20.878142Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:24:20.878186Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:24:20.878316Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:24:20.878447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:24:20.878493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:24:20.878524Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:24:20.878589Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:24:20.878634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:24:20.878662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:24:20.878680Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:24:20.878819Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:24:20.878884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:24:20.878915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:24:20.878935Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:24:20.878998Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:24:20.879036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:24:20.879061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:24:20.879098Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:24:20.879145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:24:20.879177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:24:20.879205Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:24:20.879239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:24:20.879273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:24:20.879305Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:24:20.879610Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=39; 2025-03-31T16:24:20.879697Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=43; 2025-03-31T16:24:20.879760Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=28; 2025-03-31T16:24:20.879823Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=31; 2025-03-31T16:24:20.879947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:24:20.879999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:24:20.880031Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:24:20.880192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:24:20.880225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:24:20.880260Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:24:20.880364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:24:20.880398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:24:20.880427Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:24:20.880570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description= ... estamp;);;;); 2025-03-31T16:28:10.507690Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1902:3876];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:1;records_count:14867;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:28:10.507738Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1902:3876];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-31T16:28:10.507777Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1902:3876];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-31T16:28:10.507905Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1902:3876];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:28:10.508015Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1902:3876];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:1;records_count:14867;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:28:10.508056Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1902:3876];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-31T16:28:10.508147Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1902:3876];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:232;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=14867; 2025-03-31T16:28:10.508210Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1902:3876];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:252;stage=data_format;batch_size=118936;num_rows=14867;batch_columns=timestamp; 2025-03-31T16:28:10.508346Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1902:3876];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:1901:3875];bytes=118936;rows=14867;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; Got TEvKqpCompute::TEvScanData [1:1902:3876]->[1:1901:3875] 2025-03-31T16:28:10.508448Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1902:3876];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:270;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:28:10.508554Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1902:3876];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:28:10.508659Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1902:3876];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:28:10.508783Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1902:3876];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-31T16:28:10.508865Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1902:3876];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:190;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:28:10.508950Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1902:3876];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:195;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:28:10.508987Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1902:3876] finished for tablet 9437184 2025-03-31T16:28:10.509481Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1902:3876];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:416;event=scan_finish;compute_actor_id=[1:1901:3875];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.004},{"events":["l_bootstrap"],"t":0.011},{"events":["f_processing","f_task_result"],"t":0.015},{"events":["f_ack","l_task_result"],"t":1.112},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":1.12}],"full":{"a":1743438489388546,"name":"_full_task","f":1743438489388546,"d_finished":0,"c":0,"l":1743438490509046,"d":1120500},"events":[{"name":"bootstrap","f":1743438489388759,"d_finished":11355,"c":1,"l":1743438489400114,"d":11355},{"a":1743438490508766,"name":"ack","f":1743438490501305,"d_finished":6780,"c":7,"l":1743438490508688,"d":7060},{"a":1743438490508753,"name":"processing","f":1743438489403676,"d_finished":565203,"c":56,"l":1743438490508690,"d":565496},{"name":"ProduceResults","f":1743438489392925,"d_finished":17431,"c":65,"l":1743438490508970,"d":17431},{"a":1743438490508973,"name":"Finish","f":1743438490508973,"d_finished":0,"c":0,"l":1743438490509046,"d":73},{"name":"task_result","f":1743438489403701,"d_finished":557121,"c":49,"l":1743438490501087,"d":557121}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-31T16:28:10.509562Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1902:3876];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=send_data;compute_actor_id=[1:1901:3875];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-31T16:28:10.510039Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1902:3876];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:371;event=scan_finished;compute_actor_id=[1:1901:3875];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.004},{"events":["l_bootstrap"],"t":0.011},{"events":["f_processing","f_task_result"],"t":0.015},{"events":["f_ack","l_task_result"],"t":1.112},{"events":["l_ProduceResults","f_Finish"],"t":1.12},{"events":["l_ack","l_processing","l_Finish"],"t":1.121}],"full":{"a":1743438489388546,"name":"_full_task","f":1743438489388546,"d_finished":0,"c":0,"l":1743438490509605,"d":1121059},"events":[{"name":"bootstrap","f":1743438489388759,"d_finished":11355,"c":1,"l":1743438489400114,"d":11355},{"a":1743438490508766,"name":"ack","f":1743438490501305,"d_finished":6780,"c":7,"l":1743438490508688,"d":7619},{"a":1743438490508753,"name":"processing","f":1743438489403676,"d_finished":565203,"c":56,"l":1743438490508690,"d":566055},{"name":"ProduceResults","f":1743438489392925,"d_finished":17431,"c":65,"l":1743438490508970,"d":17431},{"a":1743438490508973,"name":"Finish","f":1743438490508973,"d_finished":0,"c":0,"l":1743438490509605,"d":632},{"name":"task_result","f":1743438489403701,"d_finished":557121,"c":49,"l":1743438490501087,"d":557121}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1902:3876]->[1:1901:3875] 2025-03-31T16:28:10.510128Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1902:3876];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-31T16:28:09.388010Z;index_granules=0;index_portions=7;index_batches=1260;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=10402524;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10402524;selected_rows=0; 2025-03-31T16:28:10.510171Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1902:3876];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-31T16:28:10.510442Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1902:3876];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 0/0 160000/10402524 >> KqpJoinOrder::TPCDS94-ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH1 [GOOD] >> KqpJoinOrder::TPCH12_100 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS90-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 23687, MsgBus: 1597 2025-03-31T16:27:09.284557Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011035512265563:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:09.321247Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001916/r3tmp/tmp9L7J0F/pdisk_1.dat 2025-03-31T16:27:09.906094Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:09.906206Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:09.910707Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:27:09.910901Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23687, node 1 2025-03-31T16:27:10.115280Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:10.115297Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:10.115315Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:10.115432Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1597 TClient is connected to server localhost:1597 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:10.942108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:10.985374Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:27:13.378862Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011052692135421:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:13.379238Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011052692135410:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:13.379304Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:13.383245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:27:13.394567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011052692135424:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:27:13.487554Z node 1 :TX_PROXY ERROR: Actor# [1:7488011052692135475:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:13.903516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:27:14.036703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:27:14.082121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:27:14.124717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:27:14.165731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:27:14.282145Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011035512265563:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:14.282208Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:14.427596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:27:14.476461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:27:14.536501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:27:14.588090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T16:27:14.646165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-31T16:27:14.687375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-31T16:27:14.721805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:27:14.769154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:27:15.446812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-31T16:27:15.504658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:27:15.580677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:27:15.634520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:27:15.676493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T16:27:15.728137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-31T16:27:15.801003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T16:27:15.892086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-31T16:27:15.980561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-31T16:27:16.027916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-31T16:27:16.073260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-31T16:27:16.144993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-31T16:27:16.219935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-31T16:27:16.268463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-31T16:27:16.300166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-31T16:27:16.343788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-31T16:27:16.428069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.232228Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.234387Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038482;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.237654Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038530;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.239589Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.243221Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.244840Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038616;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.248337Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038434;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.250055Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038536;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.254216Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.256085Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.261361Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038614;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.261879Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038500;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.267023Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.267106Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.272357Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038474;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.272386Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038524;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.277894Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.277893Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.286981Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038462;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.292629Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038450;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.298377Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038470;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.300781Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038498;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.306796Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038445;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.312057Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038514;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.312075Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.318192Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038538;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.325487Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038432;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.330737Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038592;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.331777Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.337250Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038464;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.337255Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038570;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.342603Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.342712Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038443;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.348082Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038504;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.348205Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038441;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.353620Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038586;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.353620Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038558;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.358667Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038439;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.359207Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.364905Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038466;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.364998Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038446;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.372136Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.372256Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038456;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.377994Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038564;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.382503Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038612;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:51.547320Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgeqxg55h20hqwexcexged", SessionId: ydb://session/3?node_id=1&id=MWRlYmY0MWUtZGM3MGVhNzgtZWM5NWU5MTQtMWRjYTc3Zjc=, Slow query, duration: 33.226823s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:27:52.154812Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:27:52.155428Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;self_id=[1:7488011087051880801:2980];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038170;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038629;receive=72075186224038331; 2025-03-31T16:27:52.155526Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:27:52.156227Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::SimpleLeftOnlyJoin+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 21738, MsgBus: 64926 2025-03-31T16:28:01.924363Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011261686974825:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:01.924818Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018eb/r3tmp/tmpiqFVxQ/pdisk_1.dat 2025-03-31T16:28:02.734110Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:28:02.742928Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:28:02.743023Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:28:02.748054Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21738, node 1 2025-03-31T16:28:03.020193Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:28:03.020213Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:28:03.020219Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:28:03.020313Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64926 TClient is connected to server localhost:64926 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:28:03.834726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:03.879822Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:28:03.890088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:04.110704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:04.364860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:04.491387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:06.901975Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011283161812958:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:06.902087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:06.908281Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011261686974825:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:06.908338Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:28:07.372128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:28:07.413996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:28:07.456562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:28:07.505387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:28:07.560423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:28:07.665487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:28:07.749542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011287456780774:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:07.749634Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:07.749912Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011287456780779:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:07.754016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:28:07.769495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011287456780781:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:28:07.852844Z node 1 :TX_PROXY ERROR: Actor# [1:7488011287456780838:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:28:09.097733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:28:09.145491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:28:09.204606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:28:09.247446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:28:09.292739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:28:09.340108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::TPCHRandomJoinViewJustWorks-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS94-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 15075, MsgBus: 26313 2025-03-31T16:27:02.902842Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011005857236093:2139];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:02.908978Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001921/r3tmp/tmpBuLQYp/pdisk_1.dat 2025-03-31T16:27:03.582852Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:27:03.602080Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:03.602168Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:03.604365Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15075, node 1 2025-03-31T16:27:03.727752Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:03.727770Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:03.727779Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:03.727870Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26313 TClient is connected to server localhost:26313 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:04.572664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:06.748407Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011023037105856:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:06.748525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:06.751193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011023037105868:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:06.755612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:27:06.772801Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011023037105870:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:27:06.841266Z node 1 :TX_PROXY ERROR: Actor# [1:7488011023037105921:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:07.224032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:27:07.376864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:27:07.461115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:27:07.522960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:27:07.557153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:27:07.712472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:27:07.786484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:27:07.828180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:27:07.875582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T16:27:07.903748Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011005857236093:2139];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:07.903804Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:07.907977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-31T16:27:07.946917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-31T16:27:08.037128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:27:08.096424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:27:08.845323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-31T16:27:08.898102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:27:08.940860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:27:09.047976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:27:09.138010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T16:27:09.181261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-31T16:27:09.215076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T16:27:09.258384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-31T16:27:09.295357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-31T16:27:09.369890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-31T16:27:09.410077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-31T16:27:09.446946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-31T16:27:09.485636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-31T16:27:09.528927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-31T16:27:09.564591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-31T16:27:09.615673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-31T16:27:09.687736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-03-31T16:27:09.725881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but p ... 76710714; 2025-03-31T16:27:45.327024Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038515;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:45.336788Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038469;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:45.341050Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038449;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:45.346525Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038425;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:45.350077Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038467;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:45.356800Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038513;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:45.361310Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038457;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:45.368803Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038431;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:45.373423Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038435;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:45.379411Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038487;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:45.380827Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038453;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:45.385232Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038459;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:45.391620Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038437;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:45.397674Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038451;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:45.403858Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038429;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:45.410473Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038461;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:45.414863Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038445;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:45.424744Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038441;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:45.429657Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038493;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:45.434669Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038463;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:45.444205Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038519;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:45.448761Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038481;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:45.458975Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038488;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:45.471653Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038497;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:45.481167Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038518;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:45.493717Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038489;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:45.495444Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038512;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:45.510347Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038526;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:45.515275Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038522;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:45.525169Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038524;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:45.528912Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038517;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:45.542486Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038443;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:45.548016Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038486;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:45.549215Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038477;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:45.555852Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038511;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:45.570719Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038510;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:45.573118Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038521;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:45.584330Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038475;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:45.603508Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038491;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:45.727580Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgegyzdmt0bdkm65mmpe0p", SessionId: ydb://session/3?node_id=1&id=NDg4YTVkZDEtMzgxYjViYy1lZGFmZTYxZS0xMjMzNWI3ZA==, Slow query, duration: 34.527799s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:27:46.320006Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;self_id=[1:7488011057396851365:2975];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038170;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038331;receive=72075186224038629; 2025-03-31T16:27:46.320111Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:27:46.320368Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:27:46.320805Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:28:08.219852Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgfxdr4j41qesrk4ffrf0m", SessionId: ydb://session/3?node_id=1&id=NDg4YTVkZDEtMzgxYjViYy1lZGFmZTYxZS0xMjMzNWI3ZA==, Slow query, duration: 11.490861s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/test/ds/\";\n\n-- NB: Subquerys\n$bla1 = (select ws_order_number\n from web_sales\n group by ws_order_number\n having COUNT(DISTINCT ws_warehouse_sk) > 1);\n\n-- start query 1 in stream 0 using template query94.tpl and seed 2031708268\nselect\n count(distinct ws1.ws_order_number) as `order count`\n ,sum(ws_ext_ship_cost) as `total shipping cost`\n ,sum(ws_net_profit) as `total net profit`\nfrom\n web_sales ws1\n cross join date_dim\n cross join customer_address\n cross join web_site\n left semi join $bla1 bla1 on (ws1.ws_order_number = bla1.ws_order_number)\n left only join web_returns on (ws1.ws_order_number = web_returns.wr_order_number)\nwhere\n cast(d_date as date) between cast('1999-4-01' as date) and\n (cast('1999-4-01' as date) + DateTime::IntervalFromDays(60))\nand ws1.ws_ship_date_sk = d_date_sk\nand ws1.ws_ship_addr_sk = ca_address_sk\nand ca_state = 'NE'\nand ws1.ws_web_site_sk = web_site_sk\nand web_company_name = 'pri'\norder by `order count`\nlimit 100;\n", parameters: 0b >> KqpJoinOrder::TPCDS90+ColumnStore >> KqpJoin::JoinLeftPureExclusion ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH1 [GOOD] Test command err: Trying to start YDB, gRPC: 29202, MsgBus: 12127 2025-03-31T16:26:20.316104Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010828423877626:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:20.316661Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00194c/r3tmp/tmpLM9nsD/pdisk_1.dat 2025-03-31T16:26:20.919918Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:20.941605Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:20.941699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:20.944508Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29202, node 1 2025-03-31T16:26:21.066624Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:21.066644Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:21.066651Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:21.066754Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12127 TClient is connected to server localhost:12127 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:21.793232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:21.815965Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:24.024151Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010845603747338:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:24.024262Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:24.024662Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010845603747350:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:24.028742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:26:24.043073Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010845603747352:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:26:24.132553Z node 1 :TX_PROXY ERROR: Actor# [1:7488010845603747403:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:24.471120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:26:24.844657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488010845603747670:2354];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:24.844888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488010845603747670:2354];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:26:24.845172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488010845603747670:2354];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:26:24.845303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488010845603747670:2354];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:26:24.845412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488010845603747670:2354];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:26:24.845530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488010845603747670:2354];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:26:24.845640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488010845603747670:2354];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:26:24.845771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488010845603747670:2354];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:26:24.845898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488010845603747670:2354];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:26:24.846004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488010845603747670:2354];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:26:24.846105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488010845603747670:2354];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:26:24.846252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488010845603747670:2354];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:26:24.850729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010845603747641:2349];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:24.851287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010845603747641:2349];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:26:24.851502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010845603747641:2349];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:26:24.851605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010845603747641:2349];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:26:24.851738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010845603747641:2349];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:26:24.851856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010845603747641:2349];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:26:24.851957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010845603747641:2349];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:26:24.852061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010845603747641:2349];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:26:24.852168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010845603747641:2349];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:26:24.852269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010845603747641:2349];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:26:24.852380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010845603747641:2349];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:26:24.852494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010845603747641:2349];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:26:24.886375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488010845603747672:2355];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:24.886437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488010845603747672:2355];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abs ... -31T16:27:52.210349Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:52.221927Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:52.223738Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:52.238497Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:52.239269Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:52.250315Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:52.253922Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:52.265439Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:52.268827Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:52.280475Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:52.283829Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:52.294673Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:52.299617Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:52.305303Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:52.313124Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:52.314788Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:52.325290Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:52.329693Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:52.340112Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:52.344277Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:52.350415Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:52.354748Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:52.360710Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:52.369367Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:52.375831Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:52.382162Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:52.386363Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:52.396645Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:52.401440Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:52.411230Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:52.415990Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:52.423045Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:52.426158Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:52.436967Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:52.440246Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:52.449839Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:52.454674Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:52.466257Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:52.470304Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:52.481582Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:52.617610Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039255;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:52.667361Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgehwq646dg3cf9wa05gqk", SessionId: ydb://session/3?node_id=1&id=MTYyOTc4MmQtYTlmMDYyOWQtZDVjM2ViNzEtYjQxZjNhZA==, Slow query, duration: 40.514481s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:27:53.063319Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:27:53.063791Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:27:53.065274Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7488011116186733483:9302];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039392; 2025-03-31T16:27:53.065672Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:27:53.919392Z node 1 :KQP_YQL WARN: TraceId: 01jqpgft2j2t5n45akwdvt0qa3, SessionId: CompileActor 2025-03-31 16:27:53.891 WARN ydb-core-kqp-ut-join(pid=522890, tid=0x00007FEBD4997640) [KQP] kqp_opt_phy_olap_agg.cpp:50: Expected TCoMember callable to get column under aggregation. Got: Failed to render expression to pretty string: yql/essentials/ast/yql_expr.cpp:1973 BuildValueNode(): requirement ctx.AllowFreeArgs failed, message: Free arguments are not allowed 2025-03-31T16:27:56.089056Z node 1 :KQP_YQL WARN: TraceId: 01jqpgfw7b4r049cnf7hdefnre, SessionId: CompileActor 2025-03-31 16:27:56.088 WARN ydb-core-kqp-ut-join(pid=522890, tid=0x00007FEBD4997640) [KQP] kqp_opt_phy_olap_agg.cpp:50: Expected TCoMember callable to get column under aggregation. Got: Failed to render expression to pretty string: yql/essentials/ast/yql_expr.cpp:1973 BuildValueNode(): requirement ctx.AllowFreeArgs failed, message: Free arguments are not allowed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCH12_100 [GOOD] Test command err: Trying to start YDB, gRPC: 25185, MsgBus: 20000 2025-03-31T16:26:26.427108Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010851072784758:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:26.427149Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00193d/r3tmp/tmpwOpz1q/pdisk_1.dat 2025-03-31T16:26:27.174917Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:27.186579Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:27.186650Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:27.188273Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25185, node 1 2025-03-31T16:26:27.437802Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:27.437827Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:27.437834Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:27.437956Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20000 TClient is connected to server localhost:20000 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:28.198103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:28.227947Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:30.801346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010868252654601:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:30.801431Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:30.807215Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010868252654609:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:30.811928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:26:30.821808Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010868252654615:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:26:30.896973Z node 1 :TX_PROXY ERROR: Actor# [1:7488010868252654666:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:31.361520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:26:31.430149Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010851072784758:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:31.430224Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:31.803698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488010872547622241:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:31.804170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488010872547622241:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:26:31.804453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488010872547622241:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:26:31.804591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488010872547622241:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:26:31.804692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488010872547622241:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:26:31.804819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488010872547622241:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:26:31.804947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488010872547622241:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:26:31.805048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488010872547622241:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:26:31.805187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488010872547622241:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:26:31.805309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488010872547622241:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:26:31.805432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488010872547622241:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:26:31.805535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488010872547622241:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:26:31.844057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010872547622265:2364];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:31.844135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010872547622265:2364];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:26:31.844406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010872547622265:2364];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:26:31.844529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010872547622265:2364];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:26:31.844632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010872547622265:2364];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:26:31.844751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010872547622265:2364];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:26:31.844854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010872547622265:2364];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:26:31.844965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010872547622265:2364];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:26:31.845063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010872547622265:2364];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:26:31.845170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010872547622265:2364];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:26:31.845271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010872547622265:2364];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:26:31.845388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488010872547622265:2364];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:26:31.914818Z node 1 :T ... tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.294146Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039266;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.294146Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039304;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.300033Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.300032Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039282;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.306259Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.311396Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.312685Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.318687Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039257;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.328296Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039294;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.332503Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039308;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.338109Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.346635Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039278;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.347351Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039284;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.357115Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.360798Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.366982Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.370563Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.376343Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039265;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.380689Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039254;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.382107Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039270;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.386654Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.387534Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.392122Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.392612Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.397719Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.399390Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039344;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.403262Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039370;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.404791Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.413054Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.414955Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039310;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.418688Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039318;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.420915Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.424742Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.430899Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039261;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.434146Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039263;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.440614Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.445293Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039350;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.454763Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039306;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.456934Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039298;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.464541Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.471485Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.478391Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039330;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.481471Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.492686Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.496486Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039280;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.504650Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039264;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:56.659105Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgeqyr6xabjxeqb1nvpvf7", SessionId: ydb://session/3?node_id=1&id=OTAxMzI1YTYtNGY3OTRmODMtN2E2ZmM3NmMtMzhiMWM1NGY=, Slow query, duration: 38.298011s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:27:56.911511Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:27:56.911862Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:27:56.912398Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv+ColumnStore [GOOD] >> KqpFlipJoin::Inner_3 >> KqpJoin::RightSemiJoin_ComplexSecondaryIndexPrefix [GOOD] >> KqpJoinOrder::TestJoinHint1-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS95+ColumnStore >> KqpJoin::LeftJoinWithNull+StreamLookupJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightSemiJoin_ComplexSecondaryIndexPrefix [GOOD] Test command err: Trying to start YDB, gRPC: 28544, MsgBus: 8623 2025-03-31T16:28:05.995800Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011276828948053:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:05.995897Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018e9/r3tmp/tmpJijtVp/pdisk_1.dat 2025-03-31T16:28:06.746220Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:28:06.752584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:28:06.752632Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:28:06.754638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28544, node 1 2025-03-31T16:28:06.991713Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:28:06.991740Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:28:06.991747Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:28:06.991833Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8623 TClient is connected to server localhost:8623 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:28:07.838441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:07.867474Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:07.897301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:28:08.113744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:08.390634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:08.502434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:10.841086Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011298303786332:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:10.841272Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:10.995986Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011276828948053:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:10.996046Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:28:11.217272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:28:11.279863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:28:11.355596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:28:11.392445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:28:11.428351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:28:11.484090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:28:11.547986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011302598754142:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:11.548069Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:11.548274Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011302598754147:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:11.552919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:28:11.570113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011302598754149:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:28:11.651998Z node 1 :TX_PROXY ERROR: Actor# [1:7488011302598754203:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:28:12.947310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:28:13.019846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:28:13.079549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:28:13.138606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:28:13.228162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480
: Warning: Execution, code: 1060
:4:39: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001
: Warning: Execution, code: 1060
:3:49: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 >> KqpIndexLookupJoin::LeftJoinOnlyRightColumn-StreamLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 4276, MsgBus: 15078 2025-03-31T16:26:30.231904Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010869049096152:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:30.233981Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00193c/r3tmp/tmpYM35NW/pdisk_1.dat 2025-03-31T16:26:30.729985Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:30.731165Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:30.731264Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:30.736048Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4276, node 1 2025-03-31T16:26:30.854834Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:30.854854Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:30.854861Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:30.854980Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15078 TClient is connected to server localhost:15078 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:31.632564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:34.319523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010886228966000:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:34.319984Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010886228965992:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:34.320076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:34.325871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:26:34.346326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010886228966006:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:26:34.444429Z node 1 :TX_PROXY ERROR: Actor# [1:7488010886228966057:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:34.794150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:26:35.236218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488010890523933662:2362];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:35.236502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488010890523933662:2362];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:26:35.236817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488010890523933662:2362];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:26:35.236949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488010890523933662:2362];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:26:35.237061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488010890523933662:2362];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:26:35.237182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488010890523933662:2362];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:26:35.237310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488010890523933662:2362];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:26:35.237465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488010890523933662:2362];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:26:35.237591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488010890523933662:2362];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:26:35.237720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488010890523933662:2362];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:26:35.237831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488010890523933662:2362];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:26:35.237933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488010890523933662:2362];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:26:35.242147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488010890523933629:2356];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:35.242204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488010890523933629:2356];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:26:35.242433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488010890523933629:2356];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:26:35.242536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488010890523933629:2356];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:26:35.242648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488010890523933629:2356];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:26:35.242754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488010890523933629:2356];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:26:35.242853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488010890523933629:2356];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:26:35.242976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488010890523933629:2356];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:26:35.243183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488010890523933629:2356];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:26:35.243291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488010890523933629:2356];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:26:35.243421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488010890523933629:2356];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:26:35.243540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488010890523933629:2356];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:26:35.325856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488010890523933619:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:35.325926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488010890523933619:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:26:35.326157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_ ... tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.290980Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039368;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.291468Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.297830Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039354;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.303924Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.310817Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.317409Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039360;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.323813Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.329314Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.334224Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.339148Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.340914Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.345877Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.347487Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.352183Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.354128Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.359851Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.361504Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039352;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.366439Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.369113Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.374132Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.377727Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.381997Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.384254Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.390621Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.393357Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.399696Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.403079Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.406025Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.410045Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.413048Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.416654Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.422660Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.429547Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.430188Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.436228Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.443054Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.443949Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.449339Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.450407Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.455298Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.458260Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.464958Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.466900Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.476874Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.481549Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.485685Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:58.751157Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgesawcvd2dth99ky9b2bn", SessionId: ydb://session/3?node_id=1&id=ZjU1NGJmOTktNjU2YWFhMjktNmY2MjAxYWYtNjdiOThlZmY=, Slow query, duration: 38.978278s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:27:59.065627Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:27:59.065675Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:27:59.066379Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftJoinOnlyRightColumn-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 28596, MsgBus: 28808 2025-03-31T16:28:09.071652Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011296276636394:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:09.072038Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018e7/r3tmp/tmpHwDFQV/pdisk_1.dat 2025-03-31T16:28:09.804030Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:28:09.829359Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:28:09.829445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:28:09.836569Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28596, node 1 2025-03-31T16:28:10.091662Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:28:10.091681Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:28:10.091689Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:28:10.091780Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28808 TClient is connected to server localhost:28808 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:28:11.092721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:11.112617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:11.293044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:11.566764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:11.707477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:14.042037Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011296276636394:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:14.042124Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:28:14.134422Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011317751474531:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:14.134520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:14.656436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:28:14.703128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:28:14.789080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:28:14.850111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:28:14.904341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:28:14.957596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:28:15.047156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011322046442347:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:15.047228Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:15.047452Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011322046442352:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:15.051580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:28:15.070575Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-31T16:28:15.071236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011322046442354:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:28:15.138743Z node 1 :TX_PROXY ERROR: Actor# [1:7488011322046442413:3461] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:28:16.326074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:28:16.390668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:28:16.443655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:28:16.509044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:28:16.581289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:28:16.648101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinHint1-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 5113, MsgBus: 25776 2025-03-31T16:27:25.050701Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011100131150044:2204];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:25.051280Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001909/r3tmp/tmpZjlWiN/pdisk_1.dat 2025-03-31T16:27:25.814585Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:25.814703Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:25.820603Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5113, node 1 2025-03-31T16:27:25.885291Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:27:25.891225Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:27:25.891252Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:27:26.070244Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:26.070273Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:26.070282Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:26.070413Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25776 TClient is connected to server localhost:25776 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:27.321121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:29.626477Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011121605987054:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:29.626577Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011121605987046:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:29.626897Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:29.631597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:27:29.648353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011121605987060:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:27:29.745255Z node 1 :TX_PROXY ERROR: Actor# [1:7488011121605987111:2343] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:30.032332Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011100131150044:2204];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:30.058452Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:30.125402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:27:30.264810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:27:30.303882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:27:30.341241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:27:30.389229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:27:30.587112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:27:30.656709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:27:30.705040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:27:30.762374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T16:27:30.810603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-31T16:27:30.847914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-31T16:27:30.890335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:27:30.941418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:27:31.751686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-31T16:27:31.833175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:27:31.898478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:27:31.972061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:27:32.063687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T16:27:32.144007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-31T16:27:32.186147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T16:27:32.255978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-31T16:27:32.345821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-31T16:27:32.422747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-31T16:27:32.461146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-31T16:27:32.511709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-31T16:27:32.553115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-31T16:27:32.626947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-31T16:27:32.664496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-31T16:27:32.744885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-31T16:27:32.795710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESche ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.762636Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038480;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.766666Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038436;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.769237Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.773425Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038430;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.776138Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038478;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.779498Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038586;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.782113Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038452;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.785971Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.791910Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038441;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.792237Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.800549Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038613;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.803241Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038443;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.807225Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.816738Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038449;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.821197Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038466;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.826314Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038426;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.826414Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.831642Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.837588Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.839144Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038428;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.844344Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.844842Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.851003Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038447;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.856795Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.860768Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038445;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.863547Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.866423Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.869385Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.874929Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.881979Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.882529Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.887698Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.893257Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.896034Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.900036Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.916901Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.922844Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.928566Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.933760Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.938838Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.944615Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.949919Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.954816Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.960980Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:11.968351Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:12.092033Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgf7pkbjabxfn3f19hbeaf", SessionId: ydb://session/3?node_id=1&id=ZmE3NzQwMDctNjE1NjQ1ZWItZDUyMzljM2EtZTZlNWJiZWI=, Slow query, duration: 37.608079s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:28:12.400338Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:28:12.400823Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:28:12.401384Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;self_id=[1:7488011160260699773:3045];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038170;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038331;receive=72075186224038629; 2025-03-31T16:28:12.401723Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpJoin::ComplexJoin >> KqpIndexLookupJoin::LeftOnly-StreamLookup >> KqpJoin::RightSemiJoin_ComplexKey [GOOD] >> TopicService::OnePartitionAndNoGapsInTheOffsets [GOOD] >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv-ColumnStore >> KqpIndexLookupJoin::LeftOnly+StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightSemiJoin_ComplexKey [GOOD] Test command err: Trying to start YDB, gRPC: 28138, MsgBus: 2440 2025-03-31T16:28:12.115781Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011308396205760:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:12.116175Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018e3/r3tmp/tmp36H3JB/pdisk_1.dat 2025-03-31T16:28:12.828617Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:28:12.828695Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:28:12.855059Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:28:12.856600Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28138, node 1 2025-03-31T16:28:13.180466Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:28:13.180489Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:28:13.180495Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:28:13.180581Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2440 TClient is connected to server localhost:2440 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:28:14.162121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:14.187862Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:28:14.199869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:14.393261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:14.626710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:14.730480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:16.620848Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011325576076590:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:16.620941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:16.945075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:28:16.992454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:28:17.052978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:28:17.106339Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011308396205760:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:17.106909Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:28:17.122262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:28:17.179588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:28:17.239176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:28:17.316551Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011329871044401:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:17.316620Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:17.316819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011329871044406:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:17.321525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:28:17.338934Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011329871044408:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:28:17.414864Z node 1 :TX_PROXY ERROR: Actor# [1:7488011329871044464:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:28:18.453332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:28:18.507855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:28:18.566225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:28:18.654634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:28:18.725576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480
: Warning: Execution, code: 1060
:4:43: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001
: Warning: Execution, code: 1060
:3:56: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 >> KqpJoinOrder::TPCDS94+ColumnStore [GOOD] >> KqpJoin::JoinLeftPureExclusion [GOOD] >> TopicService::MultiplePartitionsAndNoGapsInTheOffsets ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinLeftPureExclusion [GOOD] Test command err: Trying to start YDB, gRPC: 15869, MsgBus: 7318 2025-03-31T16:28:15.779545Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011321192153036:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:15.783894Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018de/r3tmp/tmpqyJL1Q/pdisk_1.dat 2025-03-31T16:28:16.501253Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:28:16.501347Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:28:16.506121Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:28:16.517589Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15869, node 1 2025-03-31T16:28:16.713010Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:28:16.713030Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:28:16.713037Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:28:16.713132Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7318 TClient is connected to server localhost:7318 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:28:17.555961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:17.583981Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:28:17.609258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:17.878092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:18.157575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:18.252116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:20.452702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011342666991169:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:20.452796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:20.755216Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011321192153036:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:20.755310Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:28:20.771708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:28:20.869695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:28:20.933004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:28:20.972557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:28:21.016549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:28:21.064865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:28:21.129265Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011346961958979:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:21.129350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:21.129582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011346961958984:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:21.134055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:28:21.146940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011346961958986:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:28:21.204330Z node 1 :TX_PROXY ERROR: Actor# [1:7488011346961959040:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpIndexLookupJoin::SimpleLeftJoin+StreamLookup >> TFstClassSrcIdPQTest::ProperPartitionSelected [GOOD] >> TPQCompatTest::DiscoverTopics >> KqpFlipJoin::Inner_3 [GOOD] >> KqpFlipJoin::LeftSemi_1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS94+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 23599, MsgBus: 11021 2025-03-31T16:26:19.132800Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010823995974464:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:19.132851Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00194f/r3tmp/tmpA3DQTp/pdisk_1.dat 2025-03-31T16:26:19.751263Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:19.769945Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:19.770050Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:19.775702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23599, node 1 2025-03-31T16:26:19.921897Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:19.921930Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:19.921941Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:19.922083Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11021 TClient is connected to server localhost:11021 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:20.823390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:23.053916Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010841175844306:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:23.054068Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:23.054848Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010841175844318:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:23.063853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:26:23.074652Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010841175844320:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:26:23.163148Z node 1 :TX_PROXY ERROR: Actor# [1:7488010841175844371:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:23.581917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:26:23.886818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488010841175844640:2356];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:23.886818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488010841175844634:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:23.887013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488010841175844634:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:26:23.887339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488010841175844640:2356];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:26:23.887346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488010841175844634:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:26:23.887515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488010841175844634:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:26:23.887574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488010841175844640:2356];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:26:23.887662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488010841175844634:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:26:23.887694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488010841175844640:2356];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:26:23.887804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488010841175844634:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:26:23.887813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488010841175844640:2356];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:26:23.887917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488010841175844640:2356];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:26:23.887929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488010841175844634:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:26:23.888017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488010841175844640:2356];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:26:23.888043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488010841175844634:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:26:23.888120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488010841175844640:2356];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:26:23.888209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488010841175844634:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:26:23.888309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488010841175844640:2356];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:26:23.888463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488010841175844634:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:26:23.888512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488010841175844640:2356];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:26:23.888701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488010841175844634:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:26:23.888731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488010841175844640:2356];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:26:23.889537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488010841175844634:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:26:23.893776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488010841175844640:2356];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:26:23.944494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488010841175844642:2357];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:23.944563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488010841175844642:2357];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:26:23.944820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;sel ... 76710714; 2025-03-31T16:27:46.497715Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:46.502498Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039236;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:46.503847Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039252;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:46.508966Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039348;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:46.508974Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:46.513931Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:46.515770Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039255;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:46.519056Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039288;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:46.523843Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:46.526676Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039230;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:46.528371Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039258;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:46.537359Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039239;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:46.538978Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039213;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:46.542971Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:46.550552Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039298;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:46.550693Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039225;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:46.556938Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039300;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:46.564186Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039215;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:46.570965Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039224;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:46.579318Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039330;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:46.586385Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039261;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:46.593779Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039271;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:46.600389Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039244;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:46.606733Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039293;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:46.613619Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039206;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:46.620097Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039262;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:46.625678Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039249;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:46.632269Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039211;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:46.638560Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039277;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:46.645817Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039247;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:46.649645Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039223;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:46.653124Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039296;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:46.657005Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039219;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:46.660005Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039270;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:46.666919Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039240;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:46.680657Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:46.683865Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039266;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:46.689894Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039242;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:46.690295Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039207;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:27:46.832544Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgeg2tb7b9tmm8yecjv3m0", SessionId: ydb://session/3?node_id=1&id=YWYyMTJlMzUtYjRlZjExZGItMTAwMjQ3ZTUtZTIxMmQ2MQ==, Slow query, duration: 36.533856s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:27:47.416321Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:27:47.416819Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:27:47.417592Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7488011051629278941:7888];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224039094; 2025-03-31T16:27:47.417986Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:28:14.305645Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgfzq11q5hk9cmqppvpzfe", SessionId: ydb://session/3?node_id=1&id=YWYyMTJlMzUtYjRlZjExZGItMTAwMjQ3ZTUtZTIxMmQ2MQ==, Slow query, duration: 15.231236s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/test/ds/\";\n\n-- NB: Subquerys\n$bla1 = (select ws_order_number\n from web_sales\n group by ws_order_number\n having COUNT(DISTINCT ws_warehouse_sk) > 1);\n\n-- start query 1 in stream 0 using template query94.tpl and seed 2031708268\nselect\n count(distinct ws1.ws_order_number) as `order count`\n ,sum(ws_ext_ship_cost) as `total shipping cost`\n ,sum(ws_net_profit) as `total net profit`\nfrom\n web_sales ws1\n cross join date_dim\n cross join customer_address\n cross join web_site\n left semi join $bla1 bla1 on (ws1.ws_order_number = bla1.ws_order_number)\n left only join web_returns on (ws1.ws_order_number = web_returns.wr_order_number)\nwhere\n cast(d_date as date) between cast('1999-4-01' as date) and\n (cast('1999-4-01' as date) + DateTime::IntervalFromDays(60))\nand ws1.ws_ship_date_sk = d_date_sk\nand ws1.ws_ship_addr_sk = ca_address_sk\nand ca_state = 'NE'\nand ws1.ws_web_site_sk = web_site_sk\nand web_company_name = 'pri'\norder by `order count`\nlimit 100;\n", parameters: 0b >> KqpJoinOrder::CanonizedJoinOrderTPCH20 [GOOD] >> KqpJoin::LeftJoinWithNull+StreamLookupJoin [GOOD] >> KqpJoinOrder::TestJoinHint2-ColumnStore >> KqpJoinOrder::CanonizedJoinOrderLookupBug [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::LeftJoinWithNull+StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 8223, MsgBus: 64553 2025-03-31T16:28:19.303813Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011335924551936:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:19.303870Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018da/r3tmp/tmpG6F7tl/pdisk_1.dat 2025-03-31T16:28:20.114230Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:28:20.114969Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:28:20.115072Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:28:20.116639Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8223, node 1 2025-03-31T16:28:20.463586Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:28:20.463607Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:28:20.463615Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:28:20.463712Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64553 TClient is connected to server localhost:64553 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:28:21.453345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:21.491694Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:28:21.519157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:21.766510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:21.966621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:22.097990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:24.306070Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011335924551936:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:24.306119Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:28:24.389255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011357399390168:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:24.389367Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:24.782185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:28:24.821996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:28:24.869321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:28:24.910658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:28:24.954806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:28:25.011656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:28:25.106673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011361694357984:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:25.106780Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:25.107063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011361694357989:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:25.110960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:28:25.130635Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011361694357991:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:28:25.215044Z node 1 :TX_PROXY ERROR: Actor# [1:7488011361694358048:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:28:26.323896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:28:26.398612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:28:26.491713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::Chain65Nodes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderLookupBug [GOOD] Test command err: Trying to start YDB, gRPC: 26220, MsgBus: 3103 2025-03-31T16:27:36.316507Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011154209992392:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:36.356054Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001900/r3tmp/tmpstNdy7/pdisk_1.dat 2025-03-31T16:27:37.039928Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:27:37.043020Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:37.051167Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:37.054380Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26220, node 1 2025-03-31T16:27:37.415680Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:37.415699Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:37.415716Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:37.415821Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3103 TClient is connected to server localhost:3103 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:38.526503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:41.033071Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011175684829408:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:41.033207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:41.033539Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011175684829420:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:41.037637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:27:41.057293Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011175684829422:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:27:41.135281Z node 1 :TX_PROXY ERROR: Actor# [1:7488011175684829473:2342] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:41.320247Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011154209992392:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:41.320310Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:41.584310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:27:41.773384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:27:41.820797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:27:41.860321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:27:41.929081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:27:42.156453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:27:42.236646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:27:42.278638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:27:42.317451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T16:27:42.364196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-31T16:27:42.419161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-31T16:27:42.467361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:27:42.522273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:27:43.260260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-31T16:27:43.326220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:27:43.387191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:27:43.423424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:27:43.457936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T16:27:43.495163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-31T16:27:43.529293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T16:27:43.558033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-31T16:27:43.602467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-31T16:27:43.643939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-31T16:27:43.701122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-31T16:27:43.758703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-31T16:27:43.807485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-31T16:27:43.850050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-31T16:27:43.907916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-31T16:27:43.974643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-31T16:27:44.046010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-03-31T16:27:44.090329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but prop ... 10714; 2025-03-31T16:28:18.097601Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038431;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:18.108626Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038493;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:18.110357Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038559;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:18.115797Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038547;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:18.118381Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038581;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:18.121971Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038503;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:18.127736Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:18.133456Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:18.137056Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038573;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:18.147170Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038491;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:18.151396Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:18.156607Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038549;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:18.161229Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038571;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:18.166546Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038463;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:18.170596Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038551;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:18.180475Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038495;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:18.185129Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038501;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:18.190674Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038567;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:18.195075Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:18.204764Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:18.209231Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038565;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:18.214530Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038561;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:18.224056Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038499;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:18.228767Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038541;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:18.234233Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038519;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:18.236970Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038587;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:18.246588Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038475;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:18.247819Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:18.257131Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038461;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:18.260207Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:18.266726Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038537;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:18.270173Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038435;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:18.280531Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038497;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:18.284023Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038511;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:18.290225Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038583;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:18.294057Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:18.304976Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038604;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:18.307901Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038479;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:18.313836Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038531;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:18.319353Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:18.327598Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038535;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:18.328553Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038529;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:18.342029Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:18.531562Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgfjg0ckj1t46s492qqfhz", SessionId: ydb://session/3?node_id=1&id=MjBmZmMzMDUtZTRjY2UyNmItNTY3MjA5YS1hMTMyNjNjNg==, Slow query, duration: 32.994514s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:28:18.852345Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:28:18.852804Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:28:18.853125Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7488011291648972506:5846];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038170;receive=72075186224038331; 2025-03-31T16:28:18.853454Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716;
:3:9: Warning: Symbol $limit is not used, code: 4527
:2:9: Warning: Symbol $browserGroup is not used, code: 4527
:1:9: Warning: Symbol $quotaName is not used, code: 4527
:4:9: Warning: Symbol $offset is not used, code: 4527
:3:9: Warning: Symbol $limit is not used, code: 4527
:2:9: Warning: Symbol $browserGroup is not used, code: 4527
:1:9: Warning: Symbol $quotaName is not used, code: 4527
:4:9: Warning: Symbol $offset is not used, code: 4527 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH20 [GOOD] Test command err: Trying to start YDB, gRPC: 11360, MsgBus: 3128 2025-03-31T16:26:37.873578Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010901217032316:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:37.882078Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001937/r3tmp/tmpPwojsr/pdisk_1.dat 2025-03-31T16:26:38.536200Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:38.544028Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:38.544129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:38.554584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11360, node 1 2025-03-31T16:26:38.882564Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:38.882584Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:38.882591Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:38.882696Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3128 TClient is connected to server localhost:3128 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:39.809803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:39.852089Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:42.089015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010922691869293:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:42.089113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:42.089306Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010922691869305:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:42.099485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:26:42.116945Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010922691869307:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:26:42.178496Z node 1 :TX_PROXY ERROR: Actor# [1:7488010922691869358:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:42.526682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:26:42.805339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488010922691869645:2359];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:42.805514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488010922691869645:2359];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:26:42.805788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488010922691869645:2359];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:26:42.805909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488010922691869645:2359];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:26:42.806004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488010922691869645:2359];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:26:42.806103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488010922691869645:2359];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:26:42.806223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488010922691869645:2359];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:26:42.806339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488010922691869645:2359];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:26:42.806440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488010922691869645:2359];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:26:42.806547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488010922691869645:2359];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:26:42.806650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488010922691869645:2359];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:26:42.806759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488010922691869645:2359];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:26:42.850764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488010922691869649:2361];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:42.850827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488010922691869649:2361];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:26:42.851027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488010922691869649:2361];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:26:42.851146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488010922691869649:2361];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:26:42.851278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488010922691869649:2361];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:26:42.851382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488010922691869649:2361];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:26:42.851486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488010922691869649:2361];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:26:42.851599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488010922691869649:2361];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:26:42.851703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488010922691869649:2361];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:26:42.851799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488010922691869649:2361];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:26:42.851885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488010922691869649:2361];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:26:42.851979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488010922691869649:2361];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:26:42.879644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488010922691869620:2351];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:42.879719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488010922691869620:2351];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstra ... ller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.008331Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.009688Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.014014Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.015060Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.020944Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.021052Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.026915Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.027565Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.033491Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.033491Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.039727Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.039726Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.046180Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.046180Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.052376Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.052375Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.059564Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.061648Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.073264Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.077222Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.079753Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.085464Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.087421Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.096961Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.100037Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.106724Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.110543Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.117527Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.124296Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.126851Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.133104Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.133378Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.145252Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.147746Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.157141Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.164663Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.168386Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.181271Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.187991Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.187999Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.194955Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.200699Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.214930Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.216199Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.230846Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:07.504105Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgf27na2hnfgf4sqjjvxve", SessionId: ydb://session/3?node_id=1&id=NTAyNTE4YzQtYTY5YWEwNGYtMjI1ODUxYTktODE1YWM4Zjk=, Slow query, duration: 38.617755s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:28:07.808842Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:28:07.809343Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:28:07.810170Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7488011219044665676:10727];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039392; 2025-03-31T16:28:07.810554Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpIndexLookupJoin::LeftOnly-StreamLookup [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH5 >> KqpIndexLookupJoin::LeftOnly+StreamLookup [GOOD] >> KqpJoin::ComplexJoin [GOOD] >> KqpJoin::JoinAggregate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftOnly-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 27273, MsgBus: 64864 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018d8/r3tmp/tmp5poNmL/pdisk_1.dat 2025-03-31T16:28:23.011951Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011349007848333:2206];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:23.012085Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:28:23.379935Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:28:23.380032Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:28:23.394292Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:28:23.434153Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27273, node 1 2025-03-31T16:28:23.734999Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:28:23.735017Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:28:23.735027Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:28:23.743256Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64864 TClient is connected to server localhost:64864 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:28:24.728247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:24.799277Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:28:24.817305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:25.081480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:25.310944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:25.404759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:27.364962Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011370482686428:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:27.365070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:27.571184Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011349007848333:2206];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:27.571233Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:28:27.675332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:28:27.715884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:28:27.809045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:28:27.864345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:28:27.928490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:28:27.983343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:28:28.086292Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011374777654247:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:28.086391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:28.086727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011374777654253:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:28.090676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:28:28.105541Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-31T16:28:28.105949Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011374777654255:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:28:28.196069Z node 1 :TX_PROXY ERROR: Actor# [1:7488011374777654309:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:28:29.243001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:28:29.285111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:28:29.329034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:28:29.379920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:28:29.446962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:28:29.484966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> KqpJoin::JoinLeftPureInnerConverted >> KqpJoinOrder::TestJoinOrderHintsComplex-ColumnStore [GOOD] >> KqpFlipJoin::LeftSemi_1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::ComplexJoin [GOOD] Test command err: Trying to start YDB, gRPC: 12046, MsgBus: 9452 2025-03-31T16:28:21.888795Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011345858358652:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:21.888849Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018d9/r3tmp/tmpOPSPMM/pdisk_1.dat 2025-03-31T16:28:22.569192Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:28:22.574913Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:28:22.575004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:28:22.577262Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12046, node 1 2025-03-31T16:28:22.915635Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:28:22.915667Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:28:22.915675Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:28:22.915788Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9452 TClient is connected to server localhost:9452 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:28:24.094211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:24.197904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:28:24.425006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:28:24.678602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:24.811689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:26.875268Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011345858358652:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:26.875332Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:28:26.961423Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011367333196742:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:26.961544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:27.413207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:28:27.503817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:28:27.579490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:28:27.624058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:28:27.674742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:28:27.765005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:28:27.892666Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011371628164563:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:27.892748Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:27.892938Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011371628164568:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:27.897099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:28:27.917278Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011371628164570:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:28:27.992180Z node 1 :TX_PROXY ERROR: Actor# [1:7488011371628164627:3463] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:28:29.288418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:28:29.345282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:28:29.389534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:28:29.430418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:28:29.465369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftOnly+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 31035, MsgBus: 65302 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018d5/r3tmp/tmpPXWujM/pdisk_1.dat 2025-03-31T16:28:24.550597Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:28:24.631988Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:28:24.632090Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:28:24.640985Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:28:24.685530Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31035, node 1 2025-03-31T16:28:24.928178Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:28:24.928198Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:28:24.928205Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:28:24.928327Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65302 TClient is connected to server localhost:65302 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:28:25.842564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:25.858732Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:25.879042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:28:26.145584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:26.403349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:26.533064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:28.852198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011378569064118:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:28.852310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:29.242068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:28:29.343829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:28:29.436284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:28:29.482670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:28:29.526119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:28:29.584990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:28:29.650278Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011382864031935:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:29.650381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:29.650716Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011382864031940:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:29.654203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:28:29.669225Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-31T16:28:29.669478Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011382864031942:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:28:29.773631Z node 1 :TX_PROXY ERROR: Actor# [1:7488011382864031997:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:28:30.873099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:28:30.917988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:28:30.990505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:28:31.039445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:28:31.106063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:28:31.153461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> TPersQueueTest::CheckKillBalancer [GOOD] >> TPersQueueTest::CheckDeleteTopic >> KqpIndexLookupJoin::SimpleLeftJoin+StreamLookup [GOOD] >> KqpJoinOrder::DatetimeConstantFold+ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpFlipJoin::LeftSemi_1 [GOOD] Test command err: Trying to start YDB, gRPC: 2124, MsgBus: 14056 2025-03-31T16:28:17.782318Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011330209740357:2289];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:17.784357Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018dc/r3tmp/tmp6MAL5d/pdisk_1.dat 2025-03-31T16:28:18.471528Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:28:18.494108Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:28:18.494183Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:28:18.500499Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2124, node 1 2025-03-31T16:28:18.766942Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:28:18.766962Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:28:18.766980Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:28:18.767104Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14056 TClient is connected to server localhost:14056 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:28:19.647708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:19.691364Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:28:19.722725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:19.989955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:20.263530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:20.355469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:22.349058Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011351684578358:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:22.349173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:22.724227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:28:22.769503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:28:22.781931Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011330209740357:2289];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:22.781976Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:28:22.818484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:28:22.859500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:28:22.896452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:28:22.947586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:28:23.015423Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011355979546166:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:23.015512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:23.015742Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011355979546171:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:23.022904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:28:23.036895Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-31T16:28:23.041208Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011355979546173:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:28:23.125298Z node 1 :TX_PROXY ERROR: Actor# [1:7488011355979546230:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:28:24.180301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:28:24.214817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:28:24.255432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:28:24.291722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 27199, MsgBus: 4975 2025-03-31T16:28:27.162789Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488011371043567337:2210];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018dc/r3tmp/tmpxHORa5/pdisk_1.dat 2025-03-31T16:28:27.216025Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:28:27.337784Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:28:27.337882Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:28:27.341069Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:28:27.341386Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27199, node 2 2025-03-31T16:28:27.515635Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:28:27.515661Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:28:27.515669Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:28:27.515776Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4975 TClient is connected to server localhost:4975 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:28:28.290294Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:28.297157Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:28.316591Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:28:28.434939Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:28.726689Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:28.816161Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:31.351693Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011388223438111:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:31.351764Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:31.441513Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:28:31.501884Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:28:31.583938Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:28:31.639228Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:28:31.677387Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:28:31.745685Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:28:31.828028Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011388223438626:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:31.828117Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:31.828450Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011388223438631:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:31.833136Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:28:31.850968Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-03-31T16:28:31.851974Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488011388223438633:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:28:31.909097Z node 2 :TX_PROXY ERROR: Actor# [2:7488011388223438689:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:28:32.071306Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488011371043567337:2210];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:32.077548Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:28:32.935206Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:28:32.974442Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:28:33.005386Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:28:33.055460Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::SimpleLeftJoin+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 5898, MsgBus: 29378 2025-03-31T16:28:26.595020Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011368704890128:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:26.595075Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018d3/r3tmp/tmps6ohhi/pdisk_1.dat 2025-03-31T16:28:27.345839Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:28:27.348631Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:28:27.348691Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:28:27.354688Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5898, node 1 2025-03-31T16:28:27.443297Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:28:27.443321Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:28:27.443327Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:28:27.443441Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29378 TClient is connected to server localhost:29378 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:28:28.382584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:28.428507Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:28:28.449332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:28.625516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:28.846922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:28.945707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:31.041710Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011390179728385:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:31.041850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:31.474717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:28:31.548887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:28:31.596551Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011368704890128:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:31.596606Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:28:31.639294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:28:31.680264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:28:31.717124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:28:31.806102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:28:31.904900Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011390179728908:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:31.904976Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:31.905111Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011390179728913:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:31.908606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:28:31.922914Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011390179728915:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:28:32.023109Z node 1 :TX_PROXY ERROR: Actor# [1:7488011394474696267:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:28:33.146214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:28:33.197388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:28:33.296719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:28:33.349838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:28:33.448559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:28:33.488565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinOrderHintsComplex-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 29639, MsgBus: 12124 2025-03-31T16:27:42.807676Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011178265442227:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:42.808077Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018f9/r3tmp/tmp7mk2Pe/pdisk_1.dat 2025-03-31T16:27:43.499988Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:27:43.502960Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:43.503067Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:43.512342Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29639, node 1 2025-03-31T16:27:43.800826Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:43.800848Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:43.800885Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:43.800981Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12124 TClient is connected to server localhost:12124 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:44.706150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:44.727802Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:27:46.992944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011195445311939:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:46.993107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:46.993433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011195445311951:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:46.997523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:27:47.012816Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011195445311953:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:27:47.079600Z node 1 :TX_PROXY ERROR: Actor# [1:7488011199740279300:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:47.460943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:27:47.606431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:27:47.639251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:27:47.671748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:27:47.702452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:27:47.749540Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011178265442227:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:47.749600Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:47.916606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:27:47.976680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:27:48.018567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:27:48.072578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T16:27:48.110637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-31T16:27:48.150754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-31T16:27:48.188282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:27:48.219181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:27:49.014073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-31T16:27:49.058705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:27:49.141723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:27:49.193518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:27:49.248139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T16:27:49.332824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-31T16:27:49.381410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T16:27:49.453125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-31T16:27:49.538824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-31T16:27:49.602312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-31T16:27:49.688231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-31T16:27:49.727625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-31T16:27:49.799662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-31T16:27:49.835981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-31T16:27:49.890404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-31T16:27:49.944667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-31T16:27:49.977550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.397530Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.404064Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038604;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.404510Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038580;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.410997Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038602;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.417143Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038560;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.422404Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038554;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.423775Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038596;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.430524Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038562;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.441185Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.444620Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038578;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.450721Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038542;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.457951Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038585;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.464963Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.466627Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038572;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.473241Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038540;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.473871Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038612;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.481022Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038600;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.486492Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038588;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.486731Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.496510Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038563;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.500857Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038584;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.506584Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038614;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.511005Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.521311Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038594;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.525226Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.534679Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038591;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.534967Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038606;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.540657Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.546379Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038556;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.549045Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038564;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.552548Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.558964Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038610;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.562765Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.568872Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038550;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.571837Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.580330Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038587;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.586352Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038570;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.589652Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.593296Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.599065Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038592;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.614252Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038586;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.621441Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038574;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.791430Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgfrbdcs12twp24d7ptsz0", SessionId: ydb://session/3?node_id=1&id=MTJhMWExZjItM2U0ZDYxYjctZTFhYWY3M2EtZjcxMTQ5ZjY=, Slow query, duration: 35.257567s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:28:27.258460Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:28:27.258884Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:28:27.259626Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;self_id=[1:7488011285639643938:4899];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038331;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038170;receive=72075186224038629; 2025-03-31T16:28:27.260020Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716;
: Warning: Execution, code: 1060
: Warning: Unapplied hint: JoinOrder( (Unused1 Unused2) (Unused3 Unused4) ), code: 4534
: Warning: Unapplied hint: Rows(Unused # 10e8), code: 4534
: Warning: Unapplied hint: Rows(R T # 1), code: 4534
: Warning: Execution, code: 1060
: Warning: Unapplied hint: JoinOrder( (Unused1 Unused2) (Unused3 Unused4) ), code: 4534
: Warning: Unapplied hint: Rows(Unused # 10e8), code: 4534
: Warning: Unapplied hint: Rows(R T # 1), code: 4534 >> KqpJoinOrder::TPCDS87-ColumnStore >> KqpJoinOrder::TPCDS95-ColumnStore >> KqpJoinOrder::CanonizedJoinOrderTPCH7 >> KqpJoinOrder::TPCDS96-ColumnStore >> KqpJoinOrder::FourWayJoinLeftFirst-ColumnStore >> TopicService::MultiplePartitionsAndNoGapsInTheOffsets [GOOD] >> OlapEstimationRowsCorrectness::TPCH21 [GOOD] >> KqpJoinOrder::GeneralPrioritiesBug1 [GOOD] >> TTopicYqlTest::DropTopicYql >> KqpJoinOrder::TPCDS61-ColumnStore [GOOD] >> KqpJoin::JoinLeftPureInnerConverted [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCH21 [GOOD] Test command err: Trying to start YDB, gRPC: 26379, MsgBus: 15524 2025-03-31T16:26:46.455348Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010939861323862:2201];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:46.467283Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00192e/r3tmp/tmpoYn0rQ/pdisk_1.dat 2025-03-31T16:26:47.218527Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:47.232131Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:47.232200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:47.236342Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26379, node 1 2025-03-31T16:26:47.471578Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:47.471602Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:47.471608Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:47.471697Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15524 TClient is connected to server localhost:15524 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:48.280323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:51.062528Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010961336160862:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:51.062641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:51.063062Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010961336160874:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:51.067273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:26:51.087009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010961336160876:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:26:51.184311Z node 1 :TX_PROXY ERROR: Actor# [1:7488010961336160927:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:51.448308Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010939861323862:2201];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:51.448371Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:51.574749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:26:51.847768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010961336161176:2360];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:51.847942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010961336161176:2360];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:26:51.848189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010961336161176:2360];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:26:51.848298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010961336161176:2360];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:26:51.848394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010961336161176:2360];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:26:51.848494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010961336161176:2360];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:26:51.848586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010961336161176:2360];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:26:51.848681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010961336161176:2360];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:26:51.848844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010961336161176:2360];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:26:51.848983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010961336161176:2360];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:26:51.849085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010961336161176:2360];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:26:51.849178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488010961336161176:2360];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:26:51.860368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488010961336161174:2359];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:51.860438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488010961336161174:2359];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:26:51.860655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488010961336161174:2359];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:26:51.860753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488010961336161174:2359];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:26:51.860851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488010961336161174:2359];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:26:51.860993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488010961336161174:2359];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:26:51.861099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488010961336161174:2359];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:26:51.861217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488010961336161174:2359];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:26:51.861330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488010961336161174:2359];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:26:51.861455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488010961336161174:2359];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:26:51.861558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488010961336161174:2359];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:26:51.861704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488010961336161174:2359];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:26:51.904598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488010961336161180:2362];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.c ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.056334Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039294;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.060042Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.065940Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039282;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.067296Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039237;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.072095Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039248;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.076563Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039249;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.077727Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039280;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.083456Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039276;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.086318Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039246;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.096231Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039278;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.099768Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039277;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.106092Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.109687Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039251;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.120139Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039256;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.123491Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039243;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.130148Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039247;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.132599Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.142630Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039231;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.145402Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039244;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.157350Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039348;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.159923Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.170602Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039241;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.171716Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039318;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.182174Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039316;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.185312Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039252;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.196287Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.199565Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039263;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.206255Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039257;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.209586Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039254;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.225540Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039230;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.240318Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039233;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.251795Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.269698Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039260;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.288463Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039265;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.296561Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.316193Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039258;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.326844Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.342607Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039268;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.359009Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.360970Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039326;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.370046Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.374599Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039299;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.383060Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.391049Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039296;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.411517Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039332;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:16.724823Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgfatzez2aqpazvx2cxb38", SessionId: ydb://session/3?node_id=1&id=NGI3YTAzNTEtNWVkN2RmNWEtNTVhNWNiNWQtZDE1NzFkYWU=, Slow query, duration: 39.028529s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:28:17.233604Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:28:17.234020Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:28:17.234510Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7488011176084562610:7895];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224039392; 2025-03-31T16:28:17.234835Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinLeftPureInnerConverted [GOOD] Test command err: Trying to start YDB, gRPC: 23480, MsgBus: 5647 2025-03-31T16:28:34.493616Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011403422248166:2136];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:34.493754Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018cc/r3tmp/tmpSpnCKI/pdisk_1.dat 2025-03-31T16:28:35.342087Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:28:35.342181Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:28:35.348070Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:28:35.395859Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23480, node 1 2025-03-31T16:28:35.695924Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:28:35.695948Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:28:35.695956Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:28:35.696070Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5647 TClient is connected to server localhost:5647 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:28:36.454265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:36.523658Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:28:36.553601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:36.745323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:37.008847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:37.107036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:39.080040Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011424897086333:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:39.080135Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:39.390725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:28:39.415895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:28:39.441941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:28:39.469774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:28:39.493412Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011403422248166:2136];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:39.493495Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:28:39.504405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:28:39.551649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:28:39.641527Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011424897086850:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:39.641593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:39.641946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011424897086855:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:39.645197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:28:39.655351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011424897086857:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:28:39.734642Z node 1 :TX_PROXY ERROR: Actor# [1:7488011424897086913:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:28:41.065770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:28:41.122170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:28:41.164167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::GeneralPrioritiesBug1 [GOOD] Test command err: Trying to start YDB, gRPC: 23704, MsgBus: 18985 2025-03-31T16:27:50.055432Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011213574762895:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:50.056355Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018f2/r3tmp/tmpgpDMU8/pdisk_1.dat 2025-03-31T16:27:50.678052Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:27:50.709330Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:50.709410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:50.714985Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23704, node 1 2025-03-31T16:27:50.921733Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:50.921765Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:50.921784Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:50.921892Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18985 TClient is connected to server localhost:18985 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:51.784093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:51.803278Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:27:54.097995Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011230754632717:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:54.098109Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:54.098446Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011230754632729:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:54.102316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:27:54.132119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011230754632731:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:27:54.191057Z node 1 :TX_PROXY ERROR: Actor# [1:7488011230754632782:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:54.623651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:27:54.747988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:27:54.846614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:27:54.897844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:27:54.935739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:27:55.059270Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011213574762895:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:55.059316Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:55.116514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:27:55.151354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:27:55.202892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:27:55.238480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T16:27:55.283246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-31T16:27:55.320037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-31T16:27:55.345792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:27:55.374750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:27:56.137580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-31T16:27:56.174661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:27:56.206450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:27:56.238250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:27:56.269828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T16:27:56.302163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-31T16:27:56.331505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T16:27:56.362151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-31T16:27:56.390504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-31T16:27:56.435454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-31T16:27:56.472574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-31T16:27:56.529092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-31T16:27:56.612965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-31T16:27:56.681517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-31T16:27:56.719764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-31T16:27:56.756630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-31T16:27:56.792310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.020609Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038563;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.025545Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038589;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.030915Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038567;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.032499Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.037646Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.048449Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.053069Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.058938Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.066616Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.073064Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038605;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.073710Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038587;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.079660Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038569;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.086101Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038557;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.089981Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038537;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.100483Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038543;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.105157Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.110168Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038613;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.126854Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.129491Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.141295Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038535;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.143829Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.149920Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038559;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.156766Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.162567Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.165980Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038443;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.226225Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.229544Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.236663Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.239424Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.242784Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.250320Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.250375Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.260633Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.263688Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.278358Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.285857Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.300876Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.304176Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.311791Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.316157Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.317837Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.328777Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.342713Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.348912Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.351706Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.435407Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgfza3b5dw07584d330mj9", SessionId: ydb://session/3?node_id=1&id=NjM3OGZmNzQtNGE1ZDAzM2ItYTBiMjAyY2EtNDk3MTFmM2Y=, Slow query, duration: 35.775583s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:28:34.775342Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:28:34.775398Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:28:34.775938Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;self_id=[1:7488011256524442609:2795];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038170;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038629;receive=72075186224038331; 2025-03-31T16:28:34.776334Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS61-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 26005, MsgBus: 16590 2025-03-31T16:27:19.647324Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011081456236850:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:19.647599Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00190d/r3tmp/tmpKQWkVF/pdisk_1.dat 2025-03-31T16:27:20.173571Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:20.173670Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:20.179517Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:27:20.181180Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26005, node 1 2025-03-31T16:27:20.427664Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:20.427686Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:20.427693Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:20.427824Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16590 TClient is connected to server localhost:16590 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:21.738519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:21.757910Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:27:24.409326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011102931073853:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:24.409461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:24.415260Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011102931073865:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:24.423462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:27:24.439101Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T16:27:24.439380Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011102931073867:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:27:24.506931Z node 1 :TX_PROXY ERROR: Actor# [1:7488011102931073918:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:24.609066Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011081456236850:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:24.609153Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:24.888105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:27:25.003032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:27:25.079772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:27:25.127998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:27:25.177221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:27:25.392751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:27:25.430571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:27:25.480305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:27:25.520849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T16:27:25.570423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-31T16:27:25.617844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-31T16:27:25.666760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:27:25.729380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:27:26.560266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-31T16:27:26.631456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:27:26.682782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:27:26.761724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:27:26.809906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T16:27:26.881521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-31T16:27:26.931579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T16:27:26.983701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-31T16:27:27.034922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-31T16:27:27.107941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-31T16:27:27.162047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-31T16:27:27.252757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-31T16:27:27.305023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-31T16:27:27.338174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-31T16:27:27.368491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-31T16:27:27.443439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2 ... 6710714; 2025-03-31T16:28:04.906352Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:04.911899Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038539;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:04.917492Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:04.920699Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:04.925682Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038479;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:04.937622Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:04.942471Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:04.954270Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:04.957938Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:04.967777Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038613;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:04.971298Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038583;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:04.977014Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:04.981143Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038495;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:04.986939Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:04.988569Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038603;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:04.992894Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038465;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:04.994010Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:04.999789Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038501;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:05.000117Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038521;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:05.005962Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038547;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:05.006499Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038489;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:05.016089Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038555;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:05.016089Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038471;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:05.022519Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038565;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:05.028172Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038569;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:05.030360Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:05.055967Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038505;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:05.058151Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038457;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:05.062928Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038553;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:05.064978Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:05.070429Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:05.071324Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:05.076610Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:05.077814Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038523;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:05.083428Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038573;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:05.084229Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:05.090616Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038525;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:05.219441Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgf2f76wthdzwycwkeyq2g", SessionId: ydb://session/3?node_id=1&id=MjllNDgzYWQtZDIxOWEzMTMtMWNjNDA3My1kZjBlNTYwOA==, Slow query, duration: 36.091632s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:28:05.864306Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:28:05.864653Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:28:05.865435Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;self_id=[1:7488011188830436847:4533];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038331;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038170;receive=72075186224038629; 2025-03-31T16:28:05.865733Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:28:37.012539Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpggmyya96q5aa5ynf7qphq", SessionId: ydb://session/3?node_id=1&id=MjllNDgzYWQtZDIxOWEzMTMtMWNjNDA3My1kZjBlNTYwOA==, Slow query, duration: 16.181476s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/test/ds/\";\n\n-- NB: Subquerys\n-- start query 1 in stream 0 using template query61.tpl and seed 1930872976\nselect promotions,total,cast(promotions as float)/cast(total as float)*100\nfrom\n (select sum(ss_ext_sales_price) promotions\n from store_sales\n cross join store\n cross join promotion\n cross join date_dim\n cross join customer\n cross join customer_address\n cross join item\n where ss_sold_date_sk = d_date_sk\n and ss_store_sk = s_store_sk\n and ss_promo_sk = p_promo_sk\n and ss_customer_sk= c_customer_sk\n and ca_address_sk = c_current_addr_sk\n and ss_item_sk = i_item_sk\n and ca_gmt_offset = -6\n and i_category = 'Sports'\n and (p_channel_dmail = 'Y' or p_channel_email = 'Y' or p_channel_tv = 'Y')\n and s_gmt_offset = -6\n and d_year = 2001\n and d_moy = 12) promotional_sales cross join\n (select sum(ss_ext_sales_price) total\n from store_sales\n cross join store\n cross join date_dim\n cross join customer\n cross join customer_address\n cross join item\n where ss_sold_date_sk = d_date_sk\n and ss_store_sk = s_store_sk\n and ss_customer_sk= c_customer_sk\n and ca_address_sk = c_current_addr_sk\n and ss_item_sk = i_item_sk\n and ca_gmt_offset = -6\n and i_category = 'Sports'\n and s_gmt_offset = -6\n and d_year = 2001\n and d_moy = 12) all_sales\norder by promotions, total\nlimit 100;\n", parameters: 0b >> KqpJoin::JoinAggregate [GOOD] >> KqpJoinOrder::ShuffleEliminationDifferentJoinPredicateKeyTypeCorrectness2 >> KqpJoinOrder::FiveWayJoinWithComplexPreds2-ColumnStore >> KqpFlipJoin::Right_3 >> KqpJoin::LeftJoinPushdownPredicate_Simple ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinAggregate [GOOD] Test command err: Trying to start YDB, gRPC: 23690, MsgBus: 65269 2025-03-31T16:28:33.949389Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011399135051394:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:33.952968Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018cd/r3tmp/tmpDFfnHy/pdisk_1.dat 2025-03-31T16:28:34.613773Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:28:34.613886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:28:34.617100Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23690, node 1 2025-03-31T16:28:34.689982Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:28:34.690009Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:28:34.880140Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:28:34.956327Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:28:34.956351Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:28:34.956366Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:28:34.956507Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65269 TClient is connected to server localhost:65269 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:28:35.822615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:35.888042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:36.084302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:36.406935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:36.515076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:38.849278Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011420609889633:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:38.849395Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:38.955193Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011399135051394:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:38.966236Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:28:39.215899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:28:39.324850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:28:39.359453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:28:39.391012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:28:39.468514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:28:39.512839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:28:39.590198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011424904857449:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:39.590307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:39.590595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011424904857454:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:39.594533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:28:39.639203Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011424904857456:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:28:39.694642Z node 1 :TX_PROXY ERROR: Actor# [1:7488011424904857513:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:28:40.804127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:28:40.864148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:28:40.920262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt-ColumnStore [GOOD] >> OlapEstimationRowsCorrectness::TPCDS78 >> TPQCompatTest::DiscoverTopics [GOOD] >> TPQCompatTest::SetupLockSession >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft-ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 19412, MsgBus: 8145 2025-03-31T16:27:55.648679Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011232897338432:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:55.649403Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018f0/r3tmp/tmp3RTaxX/pdisk_1.dat 2025-03-31T16:27:56.466846Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:56.466934Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:56.520274Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:27:56.521771Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19412, node 1 2025-03-31T16:27:56.750049Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:56.750070Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:56.750078Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:56.750185Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8145 TClient is connected to server localhost:8145 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:57.835708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:00.140523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011254372175580:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:00.140646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:00.141416Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011254372175592:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:00.146073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:28:00.160997Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011254372175594:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:28:00.223629Z node 1 :TX_PROXY ERROR: Actor# [1:7488011254372175645:2341] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:28:00.628289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:28:00.655218Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011232897338432:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:00.655304Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:28:00.790373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:28:00.834477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:28:00.915247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:28:00.959605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:28:01.255386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:28:01.330784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:28:01.414092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:28:01.484898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-03-31T16:28:01.525259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480 2025-03-31T16:28:01.566628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-03-31T16:28:01.605326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:28:01.657217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:28:02.408185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:2, at schemeshard: 72057594046644480 2025-03-31T16:28:02.464345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-03-31T16:28:02.509428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-03-31T16:28:02.588816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-03-31T16:28:02.654625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-03-31T16:28:02.719609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2025-03-31T16:28:02.803447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-03-31T16:28:02.860834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2025-03-31T16:28:02.910056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2025-03-31T16:28:02.962870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-03-31T16:28:03.005503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2025-03-31T16:28:03.055945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:0, at schemeshard: 72057594046644480 2025-03-31T16:28:03.120303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2025-03-31T16:28:03.178726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2025-03-31T16:28:03.217027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2025-03-31T16:28:03.254412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2025-03-31T16:28:03.295126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2025-03-31T16:28:03.340946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but prop ... oller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:38.767991Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038468;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:38.776988Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038600;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:38.779409Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038548;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:38.786460Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038504;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:38.788292Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:38.796332Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038608;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:38.802087Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038536;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:38.815888Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038572;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:38.820761Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038470;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:38.826046Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:38.831332Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038590;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:38.836098Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:38.841495Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038586;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:38.846679Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:38.851717Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:38.857166Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:38.862656Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:38.868093Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:38.873750Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:38.889396Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:38.891988Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:38.901683Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:38.903359Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:38.912964Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038474;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:38.915711Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:38.922265Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:38.925012Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:38.934446Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038558;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:38.935499Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:38.944148Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:38.945422Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:38.953553Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:38.965903Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:38.971028Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:38.971582Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:38.980858Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:38.984618Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:38.990316Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:38.994094Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:39.003229Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:39.007846Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:39.013234Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:39.016561Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:39.022442Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:39.025929Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:39.199394Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgg5h8ew4nqbsnbhj1597w", SessionId: ydb://session/3?node_id=1&id=ZDU4NmIzZWQtOGI5NTJmNGMtYmZiMGYyYzAtYWEzZGM0YjA=, Slow query, duration: 34.166298s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:28:39.593862Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976715716;tx_id=281474976715716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715716; 2025-03-31T16:28:39.594322Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;self_id=[1:7488011335976572587:4674];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038331;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038629;receive=72075186224038170; 2025-03-31T16:28:39.603616Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976715716;tx_id=281474976715716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715716; 2025-03-31T16:28:39.604864Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976715716;tx_id=281474976715716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715716; >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft+ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 9475, MsgBus: 6894 2025-03-31T16:27:58.833203Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011247025420955:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:58.834486Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018ec/r3tmp/tmprDTWbS/pdisk_1.dat 2025-03-31T16:27:59.541026Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:59.541121Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:59.550123Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:27:59.552958Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9475, node 1 2025-03-31T16:27:59.807619Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:59.807643Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:59.811239Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:59.823406Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6894 TClient is connected to server localhost:6894 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:28:00.754669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:00.772482Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:28:02.907378Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011264205290827:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:02.907745Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011264205290816:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:02.907814Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:02.911998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:28:02.926930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011264205290830:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:28:03.031186Z node 1 :TX_PROXY ERROR: Actor# [1:7488011268500258177:2342] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:28:03.383130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:28:03.485978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:28:03.543812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:28:03.582421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:28:03.620282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:28:03.782451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:28:03.833579Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011247025420955:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:03.833663Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:28:03.865140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:28:03.926314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:28:03.982162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T16:28:04.037235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-31T16:28:04.107143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-31T16:28:04.153167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:28:04.206585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:28:04.999638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-31T16:28:05.049382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:28:05.116121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:28:05.176626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:28:05.214876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T16:28:05.274012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-31T16:28:05.307576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T16:28:05.359029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-31T16:28:05.419258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-31T16:28:05.465198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-31T16:28:05.551964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-31T16:28:05.604473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-31T16:28:05.668326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-31T16:28:05.720828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-31T16:28:05.766469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-31T16:28:05.808375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-31T16:28:05.855745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.552568Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038558;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.553210Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038498;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.559217Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038470;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.559325Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038556;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.565911Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038518;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.566392Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038555;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.573765Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038536;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.576309Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038520;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.581233Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038538;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.583784Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038524;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.587962Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038522;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.590202Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038534;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.595105Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038500;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.598042Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038552;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.603670Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038512;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.605756Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038508;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.612558Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038504;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.619078Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038502;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.619967Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038490;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.626283Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038472;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.632335Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038564;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.632341Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038482;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.638697Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038464;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.643204Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038516;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.649725Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038484;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.650281Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038468;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.656237Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038476;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.657730Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038506;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.662423Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038550;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.664437Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038492;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.672545Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038528;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.678738Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038466;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.680686Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038474;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.685076Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038562;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.691771Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038488;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.698016Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038486;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.701256Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038548;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.720810Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038514;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.724773Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038568;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.726873Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038496;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.733376Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038560;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.734213Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.739655Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038510;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.751196Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038478;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.751703Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038566;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:42.859503Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgg825bx5c2tpv55baar1c", SessionId: ydb://session/3?node_id=1&id=NWViZDg2MjgtZDEzYTQ4ODUtZDVhOTViODAtMmU1ODZjNDI=, Slow query, duration: 35.237773s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:28:43.200963Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:28:43.201408Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:28:43.201738Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;self_id=[1:7488011350104653977:4526];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038331;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038170;receive=72075186224038629; 2025-03-31T16:28:43.202515Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::ShuffleEliminationReuseShuffleTwoJoins ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 21215, MsgBus: 63369 2025-03-31T16:27:08.147633Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011032451349924:2205];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:08.147994Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001919/r3tmp/tmpIMc0Rk/pdisk_1.dat 2025-03-31T16:27:08.754056Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:27:08.761228Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:08.761316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:08.765175Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21215, node 1 2025-03-31T16:27:08.895790Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:08.895814Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:08.895822Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:08.895972Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63369 TClient is connected to server localhost:63369 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:09.909861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:09.927847Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:27:12.021723Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011049631219604:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:12.021875Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:12.022124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011049631219616:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:12.025783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:27:12.043921Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T16:27:12.044407Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011049631219618:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:27:12.104229Z node 1 :TX_PROXY ERROR: Actor# [1:7488011049631219669:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:12.536733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:27:12.794376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488011049631219898:2359];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:27:12.794364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011049631219880:2350];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:27:12.794543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488011049631219898:2359];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:27:12.794556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011049631219880:2350];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:27:12.794790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488011049631219898:2359];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:27:12.794930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011049631219880:2350];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:27:12.794951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488011049631219898:2359];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:27:12.795051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488011049631219898:2359];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:27:12.795071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011049631219880:2350];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:27:12.795187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488011049631219898:2359];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:27:12.795192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011049631219880:2350];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:27:12.795316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488011049631219898:2359];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:27:12.795536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011049631219880:2350];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:27:12.795637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488011049631219898:2359];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:27:12.795657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011049631219880:2350];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:27:12.795763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488011049631219898:2359];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:27:12.795768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011049631219880:2350];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:27:12.795872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488011049631219898:2359];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:27:12.795873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011049631219880:2350];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:27:12.795977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488011049631219898:2359];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:27:12.796151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011049631219880:2350];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:27:12.796251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011049631219880:2350];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:27:12.796256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488011049631219898:2359];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:27:12.796369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011049631219880:2350];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:27:12.843608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488011049631219918:2362];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:27:12.843675Z no ... ller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:35.978736Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039348;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:35.979868Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039332;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:35.984847Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039288;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:35.985173Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039286;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:35.992772Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039352;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:35.992933Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039187;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:35.998332Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:35.999449Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039231;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:36.004644Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039326;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:36.005021Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039251;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:36.012052Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039338;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:36.017904Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039289;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:36.020306Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039344;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:36.029860Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039292;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:36.037102Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039241;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:36.042579Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039282;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:36.047652Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039294;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:36.049058Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039239;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:36.053106Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:36.054737Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039296;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:36.059129Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039366;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:36.065591Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039340;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:36.072124Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039336;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:36.077061Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039318;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:36.077889Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:36.083509Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:36.087231Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039320;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:36.097977Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:36.098199Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:36.106074Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039277;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:36.112444Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039269;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:36.115731Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:36.122636Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:36.130646Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:36.136299Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039293;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:36.141046Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039308;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:36.146779Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039316;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:36.150439Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:36.160712Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:36.164168Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:36.169575Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:36.173098Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:36.183055Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039271;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:36.185220Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:36.200389Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039306;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:36.444956Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgfzhc2q5a09x4h689ea3z", SessionId: ydb://session/3?node_id=1&id=ZGRiY2E3NTYtNTU5MDJiMzAtNmJmN2RhOTMtNzcxNzhlNGM=, Slow query, duration: 37.551906s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:28:36.720596Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:28:36.721055Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:28:36.721200Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7488011363163888401:10979];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-03-31T16:28:36.721611Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::CanonizedJoinOrderTPCH18 >> KqpJoin::LeftJoinPushdownPredicate_Simple [GOOD] >> KqpJoinOrder::TPCDS23-ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::LeftJoinPushdownPredicate_Simple [GOOD] Test command err: Trying to start YDB, gRPC: 12340, MsgBus: 8660 2025-03-31T16:28:47.619007Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011459777948038:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:47.620034Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018bc/r3tmp/tmppfBcx2/pdisk_1.dat 2025-03-31T16:28:48.403237Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:28:48.449752Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:28:48.449861Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:28:48.458230Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12340, node 1 2025-03-31T16:28:48.772908Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:28:48.772930Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:28:48.772939Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:28:48.773030Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8660 TClient is connected to server localhost:8660 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:28:49.854636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:49.869012Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:49.878754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:28:50.053978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:50.238266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:50.334941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:52.388766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011481252786293:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:52.388863Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:52.619219Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011459777948038:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:52.619272Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:28:52.723054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:28:52.767415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:28:52.859394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:28:52.898835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:28:52.944991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:28:53.008788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:28:53.111071Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011485547754111:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:53.111186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:53.111842Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011485547754116:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:53.116180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:28:53.130883Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011485547754118:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:28:53.203617Z node 1 :TX_PROXY ERROR: Actor# [1:7488011485547754171:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:28:54.539764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:28:54.616026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:28:54.729362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> KqpFlipJoin::Right_3 [GOOD] >> KqpIndexLookupJoin::CheckAllKeyTypesCast >> OlapEstimationRowsCorrectness::TPCH2 [GOOD] >> KqpFlipJoin::Inner_1 >> KqpJoinOrder::ShuffleEliminationManyKeysJoinPredicate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS23-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 28846, MsgBus: 18277 2025-03-31T16:27:27.327257Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011114384005440:2274];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:27.327437Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001906/r3tmp/tmpM8vDDG/pdisk_1.dat 2025-03-31T16:27:28.003383Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:27:28.007133Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:28.007209Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:28.014294Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28846, node 1 2025-03-31T16:27:28.363702Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:28.363732Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:28.363745Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:28.363860Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18277 TClient is connected to server localhost:18277 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:29.478842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:31.910428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011131563875056:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:31.910588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:31.910718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011131563875068:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:31.916836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:27:31.934198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011131563875070:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:27:32.033741Z node 1 :TX_PROXY ERROR: Actor# [1:7488011135858842417:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:32.319653Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011114384005440:2274];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:32.319963Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:32.436439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:27:32.553999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:27:32.627953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:27:32.700216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:27:32.740506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:27:32.918875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:27:32.955123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:27:33.030606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:27:33.067426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T16:27:33.104038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-31T16:27:33.138324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-31T16:27:33.178399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:27:33.251761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:27:34.034129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-31T16:27:34.121597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:27:34.196920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:27:34.252559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:27:34.294134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T16:27:34.374719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-31T16:27:34.424392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T16:27:34.475915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-31T16:27:34.514712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-31T16:27:34.551907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-31T16:27:34.593932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-31T16:27:34.669745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-31T16:27:34.744967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-31T16:27:34.786618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-31T16:27:34.829560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-31T16:27:34.871853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-31T16:27:34.902153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-03-31T16:27:34.935131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but p ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.065238Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038603;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.076694Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.079893Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.090427Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.091349Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038616;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.101730Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.104805Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038610;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.116295Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.117480Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.129773Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.130880Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.141359Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038618;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.145770Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.155772Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.160126Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.166053Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.170509Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.180505Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.189143Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.194888Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.210744Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.217565Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.225333Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.240138Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.246377Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.249964Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.256885Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038557;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.262218Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.277201Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.284771Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.299549Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.311482Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.317870Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.321968Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.331034Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.334423Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.337139Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.340334Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.343503Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.346340Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.350260Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.353122Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.357219Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.358913Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.416326Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:09.556531Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgf9mp2g1mjt2bvpx1wyfh", SessionId: ydb://session/3?node_id=1&id=Y2Q3MWZkMzUtYTk2YmVhYWItOWY0Mjg0YmQtYmIxZTgyMA==, Slow query, duration: 33.085136s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:28:10.098263Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:28:10.098615Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7488011260412920689:5993];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038331;receive=72075186224038170; 2025-03-31T16:28:10.098913Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:28:10.099412Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpIndexLookupJoin::LeftJoinOnlyLeftColumn+StreamLookup >> KqpJoinOrder::FiveWayJoinWithComplexPreds-ColumnStore [GOOD] >> TTopicYqlTest::DropTopicYql [GOOD] >> TTopicYqlTest::CreateTopicYqlBackCompatibility ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCH2 [GOOD] Test command err: Trying to start YDB, gRPC: 15540, MsgBus: 22271 2025-03-31T16:27:13.463348Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011055964009464:2276];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:13.463416Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001912/r3tmp/tmpn0Eup3/pdisk_1.dat 2025-03-31T16:27:14.175618Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:27:14.180159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:14.187280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:14.194433Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15540, node 1 2025-03-31T16:27:14.402926Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:14.402944Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:14.402952Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:14.403044Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22271 TClient is connected to server localhost:22271 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:15.250274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:17.534975Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011073143879088:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:17.535110Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:17.538966Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011073143879100:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:17.543369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:27:17.566106Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011073143879102:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:27:17.626882Z node 1 :TX_PROXY ERROR: Actor# [1:7488011073143879153:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:17.961126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:27:18.244807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488011077438846745:2363];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:27:18.245052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488011077438846745:2363];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:27:18.245281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488011077438846745:2363];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:27:18.245380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488011077438846745:2363];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:27:18.245473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488011077438846745:2363];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:27:18.245584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488011077438846745:2363];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:27:18.245694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488011077438846745:2363];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:27:18.245816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488011077438846745:2363];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:27:18.245910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488011077438846745:2363];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:27:18.246010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488011077438846745:2363];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:27:18.246103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488011077438846745:2363];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:27:18.246193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488011077438846745:2363];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:27:18.261994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011077438846723:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:27:18.262058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011077438846723:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:27:18.262271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011077438846723:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:27:18.262404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011077438846723:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:27:18.262541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011077438846723:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:27:18.262656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011077438846723:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:27:18.262743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011077438846723:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:27:18.262846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011077438846723:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:27:18.262971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011077438846723:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:27:18.267178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011077438846723:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:27:18.267382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011077438846723:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:27:18.267494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011077438846723:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:27:18.285890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011077438846729:2361];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:27:18.285948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011077438846729:2361];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:27:18.286145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;sel ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:38.674896Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:38.677237Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:38.686504Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039253;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:38.689690Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039217;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:38.699274Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039231;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:38.704019Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039269;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:38.717446Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:38.725221Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039199;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:38.735377Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:38.739493Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039209;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:38.745112Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:38.749620Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039243;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:38.763465Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:38.763750Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:38.773919Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039263;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:38.774169Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:38.782045Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:38.787585Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:38.788087Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039289;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:38.793234Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039261;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:38.797147Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:38.803862Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:38.809197Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:38.814703Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:38.818212Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:38.826515Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:38.833254Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039235;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:38.836064Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:38.843033Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039225;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:38.848229Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:38.853127Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039271;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:38.857378Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:38.863002Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039299;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:38.868788Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:38.874187Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:38.883807Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:38.887354Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:38.895070Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:38.900758Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:38.904075Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:38.908482Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:38.916386Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:38.922342Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:38.924866Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.022166Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.221952Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgg5cvayygt75n55pfg655", SessionId: ydb://session/3?node_id=1&id=ZTE2NmYwYmItYmRlZTczM2EtNDFjMGQ4MDctM2UyMTdlMDY=, Slow query, duration: 34.330034s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:28:39.688500Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:28:39.688656Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7488011335136932174:9522];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039392; 2025-03-31T16:28:39.689248Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:28:39.689745Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::TPCHRandomJoinViewJustWorks+ColumnStore >> KqpJoinOrder::TPCDS96+ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::ShuffleEliminationManyKeysJoinPredicate [GOOD] Test command err: Trying to start YDB, gRPC: 9235, MsgBus: 4869 2025-03-31T16:27:14.271055Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011058820224249:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:14.280429Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001911/r3tmp/tmpWQoZrQ/pdisk_1.dat 2025-03-31T16:27:14.992616Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:27:15.005567Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:15.005655Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:15.019116Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9235, node 1 2025-03-31T16:27:15.287292Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:15.287315Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:15.287324Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:15.287424Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4869 TClient is connected to server localhost:4869 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:16.079182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:16.099685Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:27:18.347999Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011076000093959:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:18.348122Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:18.348513Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011076000093971:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:18.352685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:27:18.364803Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T16:27:18.365347Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011076000093973:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:27:18.466782Z node 1 :TX_PROXY ERROR: Actor# [1:7488011076000094024:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:18.857102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:27:19.196144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011080295061596:2358];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:27:19.196393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011080295061596:2358];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:27:19.196695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011080295061596:2358];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:27:19.196841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011080295061596:2358];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:27:19.196966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011080295061596:2358];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:27:19.197073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011080295061596:2358];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:27:19.197205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011080295061596:2358];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:27:19.197329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011080295061596:2358];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:27:19.197451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011080295061596:2358];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:27:19.197564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011080295061596:2358];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:27:19.197706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011080295061596:2358];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:27:19.197829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011080295061596:2358];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:27:19.207421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011080295061594:2357];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:27:19.207537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011080295061594:2357];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:27:19.207786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011080295061594:2357];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:27:19.207915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011080295061594:2357];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:27:19.208039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011080295061594:2357];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:27:19.208161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011080295061594:2357];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:27:19.208278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011080295061594:2357];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:27:19.208392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011080295061594:2357];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:27:19.208519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011080295061594:2357];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:27:19.208630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011080295061594:2357];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:27:19.208733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011080295061594:2357];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:27:19.208849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011080295061594:2357];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:27:19.255765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011080295061602:2361];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:27:19.255827Z node 1 ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.595365Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.597003Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039237;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.601011Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039253;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.614299Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.617521Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.620011Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.625386Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.627065Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039277;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.634874Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039245;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.637358Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.646444Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.647804Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.656855Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.660165Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039267;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.666035Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.669031Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.675515Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.682389Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.688466Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039243;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.692036Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.697430Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.700088Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039209;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.709937Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.717221Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.717445Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.722812Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039271;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.731362Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.737567Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.742920Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.744553Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039251;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.754702Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.757538Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.764858Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039255;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.769527Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.781171Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.784788Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.793780Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039217;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.800232Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039193;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.806451Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.810257Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.816125Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.824708Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.833361Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039185;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.834150Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:45.974369Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:46.182235Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgg72me2a6ysmnqa79s1hz", SessionId: ydb://session/3?node_id=1&id=ZDAzNWI0ZGEtM2Q5MDZhZmItNGJjNDYwZDktODZjODAzZmM=, Slow query, duration: 39.569230s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:28:46.666622Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:28:46.667040Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:28:46.667583Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7488011350878047146:9290];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039392; 2025-03-31T16:28:46.667944Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithComplexPreds-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 10913, MsgBus: 15122 2025-03-31T16:28:08.391850Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011291551699933:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:08.425940Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018e8/r3tmp/tmpuXoUmm/pdisk_1.dat 2025-03-31T16:28:09.252731Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:28:09.266628Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:28:09.266704Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:28:09.275963Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10913, node 1 2025-03-31T16:28:09.521509Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:28:09.521535Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:28:09.521542Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:28:09.521638Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15122 TClient is connected to server localhost:15122 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:28:10.498902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:10.547376Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:28:12.930107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011308731569778:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:12.930229Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:12.930614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011308731569790:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:12.934911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:28:12.958599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011308731569792:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:28:13.020672Z node 1 :TX_PROXY ERROR: Actor# [1:7488011313026537139:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:28:13.395594Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011291551699933:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:13.395649Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:28:13.494754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:28:13.656347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:28:13.697948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:28:13.731813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:28:13.764541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:28:13.966072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:28:14.004228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:28:14.048654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:28:14.123550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T16:28:14.191150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-31T16:28:14.233875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-31T16:28:14.304447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:28:14.347032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:28:15.068792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-31T16:28:15.139177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:28:15.174439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:28:15.250382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:28:15.292739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T16:28:15.339986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-31T16:28:15.378647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T16:28:15.406184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-31T16:28:15.519539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-31T16:28:15.600283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-31T16:28:15.646296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-31T16:28:15.685402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-31T16:28:15.728084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-31T16:28:15.760966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-31T16:28:15.803103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-31T16:28:15.838497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-31T16:28:15.883632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... tablet_id=72075186224038473;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.056147Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038461;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.062191Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038561;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.062424Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038613;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.068545Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.068693Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038563;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.076006Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.076006Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.082283Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.083384Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.088287Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.089370Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.094491Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038581;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.096726Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038589;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.100431Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.104040Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.105862Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.109589Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038591;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.114898Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.120922Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038557;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.124696Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038605;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.131376Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.132172Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.138191Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.141933Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.154133Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038543;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.155385Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038549;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.161694Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038547;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.167294Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.177562Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.179745Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.187490Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038603;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.193654Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.201528Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.207943Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.220850Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.226196Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.234917Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.240451Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.245009Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.250843Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.258920Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.261053Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.297131Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.307682Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.312853Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.322455Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:51.447696Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgghrqdqbpmvmf5bgb85sn", SessionId: ydb://session/3?node_id=1&id=YzY5ZjljOGMtNzYxNjkzMmEtYzQwMDBmYzQtZTgwYjRmYTI=, Slow query, duration: 33.887469s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:28:51.832336Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:28:51.832407Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:28:51.833297Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::CanonizedJoinOrderTPCH8 [GOOD] >> KqpJoinOrder::TPCHRandomJoinViewJustWorks-ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH2 >> TPersQueueTest::StoreNoMoreThanXSourceIDs [GOOD] >> TPersQueueTest::SetupWriteSessionOnDisabledCluster >> KqpJoinOrder::GeneralPrioritiesBug3 >> TPersQueueTest::CheckDeleteTopic [GOOD] >> TPersQueueTest::CheckDecompressionTasksWithoutSession >> OlapEstimationRowsCorrectness::TPCH5 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS96+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 23281, MsgBus: 10279 2025-03-31T16:27:07.235530Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011026951960440:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:07.246728Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00191b/r3tmp/tmpvYR0YC/pdisk_1.dat 2025-03-31T16:27:08.056162Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:08.056245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:08.060978Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:27:08.090160Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23281, node 1 2025-03-31T16:27:08.331573Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:08.331599Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:08.331608Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:08.331717Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10279 TClient is connected to server localhost:10279 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:09.095442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:11.377346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011044131830281:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:11.377465Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:11.379524Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011044131830293:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:11.382795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:27:11.407932Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011044131830295:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:27:11.472063Z node 1 :TX_PROXY ERROR: Actor# [1:7488011044131830346:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:11.972699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:27:12.237356Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011026951960440:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:12.237419Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:12.321805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011048426797902:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:27:12.322029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011048426797902:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:27:12.322269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011048426797902:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:27:12.322374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011048426797902:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:27:12.322483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011048426797902:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:27:12.322630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011048426797902:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:27:12.322748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011048426797902:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:27:12.322866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011048426797902:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:27:12.322984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011048426797902:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:27:12.329498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488011048426797906:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:27:12.329550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488011048426797906:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:27:12.329780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488011048426797906:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:27:12.329887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488011048426797906:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:27:12.329989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488011048426797906:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:27:12.330084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488011048426797906:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:27:12.330172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488011048426797906:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:27:12.330264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488011048426797906:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:27:12.330359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488011048426797906:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:27:12.330450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488011048426797906:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:27:12.330579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488011048426797906:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:27:12.330719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488011048426797906:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:27:12.323078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011048426797902:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:27:12.335712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011048426797902:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:27:12.335850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011048426797902:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:27:12.382960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011048426797912:2356];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.c ... ller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.801896Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.801896Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.808329Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.808330Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.815259Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039217;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.815258Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039209;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.822396Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039229;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.822783Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.828672Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039247;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.828977Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.835645Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.835645Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.842644Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.844779Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.848728Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.852090Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.855431Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.858321Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039233;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.861371Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.864478Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039241;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.868248Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039227;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.871592Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039243;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.875433Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039203;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.878199Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039199;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.882662Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039251;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.885106Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.889079Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039235;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.890985Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039225;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.895635Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039195;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.897093Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039223;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.903461Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.904461Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.909270Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.913387Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039249;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.916216Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039213;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.920650Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.922773Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039191;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.928151Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039197;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.929929Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039239;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.936062Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039237;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.936331Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039245;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.947444Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039231;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.952708Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.962009Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039205;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:39.967370Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039207;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:40.219415Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgg1nr231j5x7g2e34rqtt", SessionId: ydb://session/3?node_id=1&id=NGQ1Mjg5NDgtNjkxOTUxOWEtYmIxYWExMjgtOGFkYTZhYjQ=, Slow query, duration: 39.138296s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:28:40.989728Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:28:40.990174Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:28:40.990914Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7488011413499081207:12610];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-03-31T16:28:40.991265Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCHRandomJoinViewJustWorks-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 28406, MsgBus: 30839 2025-03-31T16:28:15.199645Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011319295325649:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:15.200360Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018e1/r3tmp/tmphGLz0L/pdisk_1.dat 2025-03-31T16:28:15.993169Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:28:15.993244Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:28:16.002338Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:28:16.028110Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28406, node 1 2025-03-31T16:28:16.394909Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:28:16.411326Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:28:16.411362Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:28:16.419661Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30839 TClient is connected to server localhost:30839 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:28:17.301162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:17.332487Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:28:19.627072Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011336475195363:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:19.627207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011336475195352:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:19.627298Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:19.633335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:28:19.655935Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T16:28:19.656355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011336475195366:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:28:19.766821Z node 1 :TX_PROXY ERROR: Actor# [1:7488011336475195417:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:28:20.199945Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011319295325649:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:20.200021Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:28:20.292195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:28:20.427126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:28:20.474900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:28:20.516154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:28:20.559856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:28:20.753915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:28:20.788437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:28:20.821250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:28:20.851686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T16:28:20.882181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-31T16:28:20.964012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-31T16:28:21.003224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:28:21.105463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:28:21.939662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-31T16:28:22.017963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:28:22.055462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:28:22.093738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:28:22.139777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T16:28:22.177908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-31T16:28:22.244231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T16:28:22.318974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-31T16:28:22.374152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-31T16:28:22.475965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-31T16:28:22.514647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-31T16:28:22.592166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-31T16:28:22.640524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-31T16:28:22.686470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-31T16:28:22.723071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-31T16:28:22.757060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2 ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.009877Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.013122Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038605;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.017004Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038515;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.018931Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038551;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.022791Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038581;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.025340Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038521;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.032435Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038487;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.036180Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038565;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.041047Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038493;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.042782Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038559;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.046959Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038527;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.049390Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038549;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.054060Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.055004Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038535;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.059932Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038589;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.060406Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038591;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.065752Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038538;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.066108Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038523;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.071665Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038576;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.071961Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.078537Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038536;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.078537Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038580;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.084288Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038558;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.084304Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038598;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.090196Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038572;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.090220Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038588;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.096191Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038566;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.096191Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038590;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.101859Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038606;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.104691Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038582;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.105996Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038592;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.111056Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038542;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.115337Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038594;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.119363Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038560;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.124257Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038544;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.126500Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038526;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.129266Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038600;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.132037Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038574;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.135275Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038602;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.137692Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038604;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.142474Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038578;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.148340Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038568;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.153213Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038570;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.206513Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038540;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.212132Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038484;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:58.275488Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpggrbz08xg8dc5rc3kf6zf", SessionId: ydb://session/3?node_id=1&id=YWI0M2QzMDQtZjRkZDM0MGItZTkyNjI0NDUtY2QyNGUxY2I=, Slow query, duration: 33.955590s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:28:58.616838Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:28:58.617336Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:28:58.618628Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7488011456734305810:5934];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038170;receive=72075186224038331; 2025-03-31T16:28:58.619004Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH8 [GOOD] Test command err: Trying to start YDB, gRPC: 9409, MsgBus: 7495 2025-03-31T16:27:04.363363Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011013742889022:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:04.364875Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00191e/r3tmp/tmpL4XQ0f/pdisk_1.dat 2025-03-31T16:27:05.050034Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:27:05.056042Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:05.056109Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:05.058436Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9409, node 1 2025-03-31T16:27:05.299643Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:05.299665Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:05.299672Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:05.299764Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7495 TClient is connected to server localhost:7495 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:06.205646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:06.220751Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:27:08.621721Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011030922758863:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:08.621859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:08.622119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011030922758875:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:08.626198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:27:08.647292Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011030922758877:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:27:08.745938Z node 1 :TX_PROXY ERROR: Actor# [1:7488011030922758928:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:09.181887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:27:09.359237Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011013742889022:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:09.359307Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:09.532161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011035217726482:2358];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:27:09.532425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011035217726482:2358];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:27:09.532712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011035217726482:2358];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:27:09.532829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011035217726482:2358];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:27:09.536583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011035217726465:2349];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:27:09.536644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011035217726465:2349];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:27:09.536868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011035217726465:2349];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:27:09.536963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011035217726465:2349];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:27:09.537162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011035217726482:2358];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:27:09.537309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011035217726482:2358];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:27:09.537419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011035217726482:2358];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:27:09.537585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011035217726482:2358];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:27:09.537716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011035217726482:2358];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:27:09.537844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011035217726482:2358];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:27:09.537944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011035217726482:2358];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:27:09.538067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011035217726482:2358];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:27:09.547781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011035217726465:2349];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:27:09.548021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011035217726465:2349];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:27:09.548123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011035217726465:2349];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:27:09.548225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011035217726465:2349];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:27:09.548333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011035217726465:2349];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:27:09.548460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011035217726465:2349];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:27:09.548568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011035217726465:2349];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:27:09.548663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011035217726465:2349];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:27:09.606840Z node 1 :TX_COL ... 10714; 2025-03-31T16:28:34.485284Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039360;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.485434Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.492052Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.494309Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039352;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.500943Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039348;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.503924Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.513402Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.516073Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039350;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.526138Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039356;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.529176Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.538895Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.539999Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.549756Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.553181Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.560433Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.561658Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.572148Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.575367Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.581510Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.587077Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039364;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.588405Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039372;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.593844Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039366;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.599036Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.599427Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039368;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.605212Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.609959Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.618107Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.620405Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.626560Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.634440Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.635053Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.641131Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.647623Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.647657Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.653805Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.664396Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.668209Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039362;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.678864Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:34.947066Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgfy449gwgwcnxb49m8sk3", SessionId: ydb://session/3?node_id=1&id=ZDVjOWNjN2ItZWNmMmE0YjctOTg0ZWU4NDUtODQ0OTZmNWI=, Slow query, duration: 37.499411s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:28:35.294460Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:28:35.294990Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:28:35.295389Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7488011249966127148:7732];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224039392; 2025-03-31T16:28:35.295744Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:28:53.820439Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpghb868nwtdt1n79esd578", SessionId: ydb://session/3?node_id=1&id=ZDVjOWNjN2ItZWNmMmE0YjctOTg0ZWU4NDUtODQ0OTZmNWI=, Slow query, duration: 10.165041s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/\";\n\n-- TPC-H/TPC-R National Market Share Query (Q8)\n-- TPC TPC-H Parameter Substitution (Version 2.17.2 build 0)\n-- using 1680793381 as a seed to the RNG\n\nselect\n o_year,\n sum(case\n when nation = 'MOZAMBIQUE' then volume\n else 0\n end) / sum(volume) as mkt_share\nfrom\n (\n select\n DateTime::GetYear(o_orderdate) as o_year,\n l_extendedprice * (1 - l_discount) as volume,\n n2.n_name as nation\n from\n part\n cross join supplier\n cross join lineitem\n cross join orders\n cross join customer\n cross join nation n1\n cross join nation n2\n cross join region\n where\n p_partkey = l_partkey\n and s_suppkey = l_suppkey\n and l_orderkey = o_orderkey\n and o_custkey = c_custkey\n and c_nationkey = n1.n_nationkey\n and n1.n_regionkey = r_regionkey\n and r_name = 'AFRICA'\n and s_nationkey = n2.n_nationkey\n and o_orderdate between date('1995-01-01') and date('1996-12-31')\n and p_type = 'ECONOMY PLATED COPPER'\n ) as all_nations\ngroup by\n o_year\norder by\n o_year;", parameters: 0b >> KqpJoinOrder::CanonizedJoinOrderTPCC [GOOD] >> KqpFlipJoin::Inner_1 [GOOD] >> KqpFlipJoin::Inner_2 >> KqpIndexLookupJoin::SimpleLeftJoin-StreamLookup >> KqpIndexLookupJoin::JoinWithComplexCondition+StreamLookupJoin >> OlapEstimationRowsCorrectness::TPCH9 >> KqpIndexLookupJoin::LeftJoinOnlyLeftColumn+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinOnlyLeftColumn-StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCC [GOOD] Test command err: Trying to start YDB, gRPC: 1928, MsgBus: 4138 2025-03-31T16:28:12.267697Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011309431900446:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:12.268269Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018e2/r3tmp/tmpZJCjac/pdisk_1.dat 2025-03-31T16:28:12.980600Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:28:13.007468Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:28:13.007542Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:28:13.009224Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1928, node 1 2025-03-31T16:28:13.348880Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:28:13.348898Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:28:13.348904Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:28:13.349006Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4138 TClient is connected to server localhost:4138 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:28:14.511012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:14.564883Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:28:16.815912Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011326611770166:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:16.816015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:16.819766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011326611770178:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:16.824081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:28:16.836376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011326611770180:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:28:16.934764Z node 1 :TX_PROXY ERROR: Actor# [1:7488011326611770231:2342] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:28:17.235192Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011309431900446:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:17.235239Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:28:17.239384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:28:17.386834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:28:17.424358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:28:17.451349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:28:17.486543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:28:17.634158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:28:17.667594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:28:17.706640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:28:17.782445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-03-31T16:28:17.822029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480 2025-03-31T16:28:17.855598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-03-31T16:28:17.896097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:28:17.938264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:28:18.701760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:2, at schemeshard: 72057594046644480 2025-03-31T16:28:18.778097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-03-31T16:28:18.828575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-03-31T16:28:18.872440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-03-31T16:28:18.925928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-03-31T16:28:18.972042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2025-03-31T16:28:19.049103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-03-31T16:28:19.116945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2025-03-31T16:28:19.163314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2025-03-31T16:28:19.210828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-03-31T16:28:19.261395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2025-03-31T16:28:19.316479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:0, at schemeshard: 72057594046644480 2025-03-31T16:28:19.357776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2025-03-31T16:28:19.399554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2025-03-31T16:28:19.475545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2025-03-31T16:28:19.515795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2025-03-31T16:28:19.553167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, ... oller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:58.887911Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038492;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:58.892170Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038534;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:58.897729Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038526;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:58.901774Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038478;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:58.910314Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038548;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:58.912833Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038528;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:58.923220Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038578;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:58.928127Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038524;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:58.938444Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038544;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:58.939365Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038484;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:58.948335Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038598;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:58.954348Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038600;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:58.962728Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038572;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:58.964059Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038592;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:58.973669Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038616;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:58.978346Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038536;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:58.984782Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038506;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:58.991510Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038568;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:58.995643Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038542;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:59.001511Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038450;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:59.005361Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:59.010412Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038618;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:59.019971Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:59.027555Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038576;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:59.031719Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038586;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:59.037475Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038594;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:59.041522Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038580;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:59.050587Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:59.051871Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038538;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:59.061677Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:59.065016Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038560;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:59.076316Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:59.091999Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038574;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:59.102569Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:59.116327Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:59.122298Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:59.128272Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038584;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:59.134359Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:59.140266Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:59.146399Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:59.152595Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:59.154454Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:59.160615Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:59.164978Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:59.173045Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-31T16:28:59.339414Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpggn8576afkphwjm9xpvw5", SessionId: ydb://session/3?node_id=1&id=MjQzZWRlYmYtZjFhNjQzYi0yZTQ1YzhmZS02ZGI2OWIwMw==, Slow query, duration: 38.213449s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:28:59.899961Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976715716;tx_id=281474976715716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715716; 2025-03-31T16:28:59.900415Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976715716;tx_id=281474976715716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715716; 2025-03-31T16:28:59.900975Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;self_id=[1:7488011356676547402:2813];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038170;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038629;receive=72075186224038331; 2025-03-31T16:28:59.901276Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976715716;tx_id=281474976715716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715716; >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv-ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH9 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPointsAndRanges 2025-03-31 16:29:07,076 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-31 16:29:07,275 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 421744 46.2M 46.2M 23.2M test_tool run_ut @/home/runner/.ya/build/build_root/0l5c/0011a1/ydb/core/tx/datashard/ut_order/test-results/unittest/testing_out_stuff/chunk28/testing_out_stuff/test_tool.args 423337 1.2G 1.2G 879M └─ ydb-core-tx-datashard-ut_order --trace-path-append /home/runner/.ya/build/build_root/0l5c/0011a1/ydb/core/tx/datashard/ut_order/test-results/unittest/testing_out_stuff/c Test command err: 2025-03-31T16:19:12.084433Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:19:12.086964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:19:12.087023Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:19:12.096328Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:19:12.096833Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-03-31T16:19:12.097210Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:19:12.110135Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:19:12.163971Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:19:12.164409Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:19:12.166304Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-31T16:19:12.166391Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-31T16:19:12.166463Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-31T16:19:12.166925Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:19:12.167571Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:19:12.167664Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:197:2154] in generation 2 2025-03-31T16:19:12.261292Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:19:12.310838Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-31T16:19:12.311049Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:19:12.311186Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-31T16:19:12.311229Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-31T16:19:12.311265Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-31T16:19:12.311333Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:12.311568Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:12.311629Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:12.311945Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-31T16:19:12.312075Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-31T16:19:12.312144Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:19:12.312184Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:19:12.312234Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-31T16:19:12.312270Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T16:19:12.312307Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T16:19:12.312336Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-31T16:19:12.312377Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:19:12.312480Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:12.312554Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:12.312613Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-31T16:19:12.315381Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-31T16:19:12.315442Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:19:12.315528Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T16:19:12.315683Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-31T16:19:12.315765Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-31T16:19:12.315828Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-31T16:19:12.315878Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:19:12.315925Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-31T16:19:12.315976Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-31T16:19:12.316010Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:19:12.316368Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-31T16:19:12.316423Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-31T16:19:12.316465Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-31T16:19:12.316499Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:19:12.316546Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-31T16:19:12.316589Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-31T16:19:12.316626Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-31T16:19:12.316662Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-31T16:19:12.316690Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-31T16:19:12.329189Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-31T16:19:12.329267Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:19:12.329303Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:19:12.329376Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-31T16:19:12.329449Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-31T16:19:12.330031Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:12.330083Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:19:12.330139Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-31T16:19:12.330277Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-31T16:19:12.330307Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:19:12.330440Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-31T16:19:12.330488Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:19:12.330528Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-31T16:19:12.330571Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-31T16:19:12.334497Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-31T16:19:12.334587Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:19:12.334839Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:12.334879Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:19:12.334951Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:19:12.335002Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:19:12.335039Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T16:19:12.335100Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-31T16:19:12.335143Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-31T16:19:12.335194Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:19:12.335230Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-31T16:19:12.335267Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-31T16:19:12.335305Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-31T16:19:12.335515Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-31T16:19:12.335558Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:19:12.335597Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-31T16:19:12.335626Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-31T16:19:12.335653Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-31T16:19:12.335721Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:19:12.335748Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-31T16:19:12.335783Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-31T16:19:12.335821Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-31T16:19:12.335904Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-31T16:19:12.335944Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-31T16:19:12.335979Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-31T16:19:12.336017Z node 1 :TX_D ... TX_DATASHARD DEBUG: tx 47 released its data 2025-03-31T16:29:07.041425Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:47] at 9437185 is Continue 2025-03-31T16:29:07.042210Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 8 active planned 8 immediate 0 planned 364 2025-03-31T16:29:07.042255Z node 4 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-03-31T16:29:07.042285Z node 4 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 out-of-order limits exceeded 2025-03-31T16:29:07.042316Z node 4 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-03-31T16:29:07.042354Z node 4 :TX_DATASHARD TRACE: Check active operation [1000004:40] at 9437185 on unit LoadAndWaitInRS 2025-03-31T16:29:07.042387Z node 4 :TX_DATASHARD TRACE: Active operation [1000004:40] at 9437185 is not ready for LoadAndWaitInRS 2025-03-31T16:29:07.042417Z node 4 :TX_DATASHARD TRACE: Check active operation [1000004:41] at 9437185 on unit BuildAndWaitDependencies 2025-03-31T16:29:07.042443Z node 4 :TX_DATASHARD TRACE: Active operation [1000004:41] at 9437185 is not ready for BuildAndWaitDependencies 2025-03-31T16:29:07.042468Z node 4 :TX_DATASHARD TRACE: Check active operation [1000004:42] at 9437185 on unit BuildAndWaitDependencies 2025-03-31T16:29:07.042493Z node 4 :TX_DATASHARD TRACE: Active operation [1000004:42] at 9437185 is not ready for BuildAndWaitDependencies 2025-03-31T16:29:07.042516Z node 4 :TX_DATASHARD TRACE: Check active operation [1000004:43] at 9437185 on unit BuildAndWaitDependencies 2025-03-31T16:29:07.042572Z node 4 :TX_DATASHARD TRACE: Active operation [1000004:43] at 9437185 is not ready for BuildAndWaitDependencies 2025-03-31T16:29:07.042602Z node 4 :TX_DATASHARD TRACE: Check active operation [1000004:44] at 9437185 on unit BuildAndWaitDependencies 2025-03-31T16:29:07.042626Z node 4 :TX_DATASHARD TRACE: Active operation [1000004:44] at 9437185 is not ready for BuildAndWaitDependencies 2025-03-31T16:29:07.042648Z node 4 :TX_DATASHARD TRACE: Check active operation [1000004:45] at 9437185 on unit BuildAndWaitDependencies 2025-03-31T16:29:07.042670Z node 4 :TX_DATASHARD TRACE: Active operation [1000004:45] at 9437185 is not ready for BuildAndWaitDependencies 2025-03-31T16:29:07.042693Z node 4 :TX_DATASHARD TRACE: Check active operation [1000004:46] at 9437185 on unit BuildAndWaitDependencies 2025-03-31T16:29:07.042714Z node 4 :TX_DATASHARD TRACE: Active operation [1000004:46] at 9437185 is not ready for BuildAndWaitDependencies 2025-03-31T16:29:07.042902Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437186 2025-03-31T16:29:07.137108Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 active 7 active planned 7 immediate 0 planned 364 2025-03-31T16:29:07.137216Z node 4 :TX_DATASHARD DEBUG: Return cached ready operation [1000004:47] at 9437186 2025-03-31T16:29:07.137259Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:47] at 9437186 on unit PlanQueue 2025-03-31T16:29:07.137310Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:47] at 9437186 is Executed 2025-03-31T16:29:07.137351Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:47] at 9437186 executing on unit PlanQueue 2025-03-31T16:29:07.137391Z node 4 :TX_DATASHARD TRACE: Add [1000004:47] at 9437186 to execution unit LoadTxDetails 2025-03-31T16:29:07.137432Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:47] at 9437186 on unit LoadTxDetails 2025-03-31T16:29:07.139546Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 9437186 loaded tx from db 1000004:47 keys extracted: 7 2025-03-31T16:29:07.139640Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:47] at 9437186 is Executed 2025-03-31T16:29:07.139687Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:47] at 9437186 executing on unit LoadTxDetails 2025-03-31T16:29:07.139747Z node 4 :TX_DATASHARD TRACE: Add [1000004:47] at 9437186 to execution unit FinalizeDataTxPlan 2025-03-31T16:29:07.139786Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:47] at 9437186 on unit FinalizeDataTxPlan 2025-03-31T16:29:07.139840Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:47] at 9437186 is Executed 2025-03-31T16:29:07.139866Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:47] at 9437186 executing on unit FinalizeDataTxPlan 2025-03-31T16:29:07.139890Z node 4 :TX_DATASHARD TRACE: Add [1000004:47] at 9437186 to execution unit BuildAndWaitDependencies 2025-03-31T16:29:07.139919Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:47] at 9437186 on unit BuildAndWaitDependencies 2025-03-31T16:29:07.140137Z node 4 :TX_DATASHARD TRACE: Activated operation [1000004:47] at 9437186 2025-03-31T16:29:07.140252Z node 4 :TX_DATASHARD INFO: TBuildAndWaitDependenciesUnit at 9437186 released data for tx 47 2025-03-31T16:29:07.140391Z node 4 :TX_DATASHARD DEBUG: tx 47 released its data 2025-03-31T16:29:07.140428Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:47] at 9437186 is Continue 2025-03-31T16:29:07.140461Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 (dry run) active 8 active planned 8 immediate 0 planned 364 2025-03-31T16:29:07.140491Z node 4 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437186 2025-03-31T16:29:07.140523Z node 4 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437186 out-of-order limits exceeded 2025-03-31T16:29:07.140553Z node 4 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437186 2025-03-31T16:29:07.140591Z node 4 :TX_DATASHARD TRACE: Check active operation [1000004:40] at 9437186 on unit LoadAndWaitInRS 2025-03-31T16:29:07.140623Z node 4 :TX_DATASHARD TRACE: Active operation [1000004:40] at 9437186 is not ready for LoadAndWaitInRS 2025-03-31T16:29:07.140670Z node 4 :TX_DATASHARD TRACE: Check active operation [1000004:41] at 9437186 on unit BuildAndWaitDependencies 2025-03-31T16:29:07.140698Z node 4 :TX_DATASHARD TRACE: Active operation [1000004:41] at 9437186 is not ready for BuildAndWaitDependencies 2025-03-31T16:29:07.140722Z node 4 :TX_DATASHARD TRACE: Check active operation [1000004:42] at 9437186 on unit BuildAndWaitDependencies 2025-03-31T16:29:07.140745Z node 4 :TX_DATASHARD TRACE: Active operation [1000004:42] at 9437186 is not ready for BuildAndWaitDependencies 2025-03-31T16:29:07.140768Z node 4 :TX_DATASHARD TRACE: Check active operation [1000004:43] at 9437186 on unit BuildAndWaitDependencies 2025-03-31T16:29:07.140791Z node 4 :TX_DATASHARD TRACE: Active operation [1000004:43] at 9437186 is not ready for BuildAndWaitDependencies 2025-03-31T16:29:07.140815Z node 4 :TX_DATASHARD TRACE: Check active operation [1000004:44] at 9437186 on unit BuildAndWaitDependencies 2025-03-31T16:29:07.140855Z node 4 :TX_DATASHARD TRACE: Active operation [1000004:44] at 9437186 is not ready for BuildAndWaitDependencies 2025-03-31T16:29:07.140882Z node 4 :TX_DATASHARD TRACE: Check active operation [1000004:45] at 9437186 on unit BuildAndWaitDependencies 2025-03-31T16:29:07.140905Z node 4 :TX_DATASHARD TRACE: Active operation [1000004:45] at 9437186 is not ready for BuildAndWaitDependencies 2025-03-31T16:29:07.140928Z node 4 :TX_DATASHARD TRACE: Check active operation [1000004:46] at 9437186 on unit BuildAndWaitDependencies 2025-03-31T16:29:07.140950Z node 4 :TX_DATASHARD TRACE: Active operation [1000004:46] at 9437186 is not ready for BuildAndWaitDependencies 2025-03-31T16:29:07.141457Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:29:07.141504Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 7 active planned 7 immediate 0 planned 364 2025-03-31T16:29:07.141548Z node 4 :TX_DATASHARD DEBUG: Return cached ready operation [1000004:47] at 9437184 2025-03-31T16:29:07.141581Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:47] at 9437184 on unit PlanQueue 2025-03-31T16:29:07.141618Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:47] at 9437184 is Executed 2025-03-31T16:29:07.141649Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:47] at 9437184 executing on unit PlanQueue 2025-03-31T16:29:07.141681Z node 4 :TX_DATASHARD TRACE: Add [1000004:47] at 9437184 to execution unit LoadTxDetails 2025-03-31T16:29:07.141711Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:47] at 9437184 on unit LoadTxDetails 2025-03-31T16:29:07.147600Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000004:47 keys extracted: 7 2025-03-31T16:29:07.147697Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:47] at 9437184 is Executed 2025-03-31T16:29:07.147738Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:47] at 9437184 executing on unit LoadTxDetails 2025-03-31T16:29:07.147775Z node 4 :TX_DATASHARD TRACE: Add [1000004:47] at 9437184 to execution unit FinalizeDataTxPlan 2025-03-31T16:29:07.147811Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:47] at 9437184 on unit FinalizeDataTxPlan 2025-03-31T16:29:07.147878Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:47] at 9437184 is Executed 2025-03-31T16:29:07.147919Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:47] at 9437184 executing on unit FinalizeDataTxPlan 2025-03-31T16:29:07.147968Z node 4 :TX_DATASHARD TRACE: Add [1000004:47] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-31T16:29:07.147996Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:47] at 9437184 on unit BuildAndWaitDependencies 2025-03-31T16:29:07.148221Z node 4 :TX_DATASHARD TRACE: Activated operation [1000004:47] at 9437184 2025-03-31T16:29:07.148340Z node 4 :TX_DATASHARD INFO: TBuildAndWaitDependenciesUnit at 9437184 released data for tx 47 2025-03-31T16:29:07.148484Z node 4 :TX_DATASHARD DEBUG: tx 47 released its data 2025-03-31T16:29:07.148521Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:47] at 9437184 is Continue 2025-03-31T16:29:07.148550Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 8 active planned 8 immediate 0 planned 364 2025-03-31T16:29:07.148577Z node 4 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T16:29:07.148606Z node 4 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 out-of-order limits exceeded 2025-03-31T16:29:07.148632Z node 4 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T16:29:07.148669Z node 4 :TX_DATASHARD TRACE: Check active operation [1000004:40] at 9437184 on unit LoadAndWaitInRS 2025-03-31T16:29:07.148699Z node 4 :TX_DATASHARD TRACE: Active operation [1000004:40] at 9437184 is not ready for LoadAndWaitInRS Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8330113388/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/0l5c/0011a1/ydb/core/tx/datashard/ut_order/test-results/unittest/testing_out_stuff/chunk28/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8330113388/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/0l5c/0011a1/ydb/core/tx/datashard/ut_order/test-results/unittest/testing_out_stuff/chunk28/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) >> KqpJoinOrder::TestJoinOrderHintsSimple-ColumnStore >> TPQCompatTest::SetupLockSession [GOOD] >> TPQCompatTest::BadTopics ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 3161, MsgBus: 24423 2025-03-31T16:28:23.299537Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011355674743760:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018d6/r3tmp/tmpG8EUJj/pdisk_1.dat 2025-03-31T16:28:23.619232Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:28:24.086308Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:28:24.086382Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:28:24.092018Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:28:24.103188Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3161, node 1 2025-03-31T16:28:24.479756Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:28:24.479786Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:28:24.479796Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:28:24.479898Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24423 TClient is connected to server localhost:24423 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:28:25.457374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:28.044926Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011377149580747:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:28.045054Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:28.047430Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011377149580759:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:28.051626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:28:28.066760Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011377149580761:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:28:28.157163Z node 1 :TX_PROXY ERROR: Actor# [1:7488011377149580812:2343] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:28:28.290497Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011355674743760:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:28.290568Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:28:28.565606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:28:28.712884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:28:28.785732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:28:28.817639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:28:28.858434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:28:29.042119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:28:29.103604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:28:29.136212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:28:29.177161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T16:28:29.212178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-31T16:28:29.243370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-31T16:28:29.283267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:28:29.320547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:28:30.053307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-31T16:28:30.111073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:28:30.153731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:28:30.198682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:28:30.271348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T16:28:30.309070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-31T16:28:30.350373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T16:28:30.381055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-31T16:28:30.413163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-31T16:28:30.460162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-31T16:28:30.521868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-31T16:28:30.580086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-31T16:28:30.616583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-31T16:28:30.663784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-31T16:28:30.750382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-31T16:28:30.786925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-31T16:28:30.827636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-03-31T16:28:30.870703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but pro ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:03.838127Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038550;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:03.840128Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:03.852760Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038542;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:03.854036Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:03.862512Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:03.862556Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038613;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:03.868689Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038588;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:03.871501Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:03.874459Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038576;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:03.876562Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:03.879590Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:03.882135Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038618;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:03.884808Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038582;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:03.887697Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:03.890605Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038494;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:03.892958Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038602;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:03.896050Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038610;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:03.898157Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:03.901892Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038572;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:03.903562Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038564;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:03.908112Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:03.908785Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:03.913846Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038580;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:03.919660Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038586;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:03.925217Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:03.926730Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038528;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:03.932567Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:03.938046Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038562;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:03.939746Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038560;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:03.945216Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:03.948397Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:03.954246Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:03.957885Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038524;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:03.963652Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038574;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:03.968518Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:03.968891Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:03.974276Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:03.978569Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038496;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:03.984912Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:03.988253Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038522;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:03.993390Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:03.994508Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038608;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:03.998875Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038490;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:04.008512Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038554;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:04.104855Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:04.147439Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgh05y559tnjew9xbhy1vj", SessionId: ydb://session/3?node_id=1&id=ZWFkNjQ0MjctODNhZGRhN2MtNmJlOWQwY2QtYTgwYWY1ODg=, Slow query, duration: 31.827948s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:29:04.425106Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:29:04.425644Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:29:04.430302Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7488011497408691485:5941];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038331;receive=72075186224038170; 2025-03-31T16:29:04.430993Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::CanonizedJoinOrderTPCDS78 [GOOD] >> KqpFlipJoin::RightOnly_1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH9 [GOOD] Test command err: Trying to start YDB, gRPC: 1481, MsgBus: 4940 2025-03-31T16:27:20.668975Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011085849733195:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:20.675686Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00190c/r3tmp/tmpddq05Q/pdisk_1.dat 2025-03-31T16:27:21.289496Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:27:21.319694Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:21.319793Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:21.328824Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1481, node 1 2025-03-31T16:27:21.659344Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:21.659362Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:21.659368Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:21.659481Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4940 TClient is connected to server localhost:4940 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:22.794446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:22.808634Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:27:25.042308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011107324570196:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:25.042473Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:25.042980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011107324570208:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:25.047984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:27:25.061743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011107324570210:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:27:25.144164Z node 1 :TX_PROXY ERROR: Actor# [1:7488011107324570261:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:25.545750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:27:25.663300Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011085849733195:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:25.663367Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:25.821617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488011107324570565:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:27:25.821855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488011107324570565:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:27:25.822123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488011107324570565:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:27:25.822238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488011107324570565:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:27:25.822345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488011107324570565:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:27:25.822711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488011107324570565:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:27:25.822830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488011107324570565:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:27:25.822972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488011107324570565:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:27:25.823102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488011107324570565:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:27:25.823225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488011107324570565:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:27:25.823343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488011107324570565:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:27:25.823446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488011107324570565:2359];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:27:25.827211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011107324570523:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:27:25.827283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011107324570523:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:27:25.828439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011107324570523:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:27:25.828599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011107324570523:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:27:25.828708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011107324570523:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:27:25.828812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011107324570523:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:27:25.828916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011107324570523:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:27:25.829020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011107324570523:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:27:25.829141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011107324570523:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:27:25.829248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011107324570523:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:27:25.829376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011107324570523:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:27:25.829496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011107324570523:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:27:25.876246Z node 1 :TX_COL ... ller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.000223Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039209;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.002808Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.005594Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039229;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.008560Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039267;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.010827Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039231;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.014608Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039215;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.016455Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.022224Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039245;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.023491Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.028654Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039227;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.029590Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.037579Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039211;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.038605Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039266;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.051546Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039286;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.051899Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039263;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.058490Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039249;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.065080Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039269;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.067909Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039213;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.070754Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039265;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.073389Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039257;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.076173Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039235;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.079010Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.081339Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.097578Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039233;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.103801Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039255;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.108262Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.122334Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039251;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.124604Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.143298Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039272;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.147937Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039264;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.153560Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039241;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.157661Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039237;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.168146Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039354;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.168428Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039290;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.174628Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039221;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.175191Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.181875Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039219;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.186525Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.193072Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.196660Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.201615Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.202617Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039292;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.208841Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039296;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.210001Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039308;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.214757Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039299;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:52.399560Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpggecbbjy3bbrb3ew474zn", SessionId: ydb://session/3?node_id=1&id=NTg5MzNiYWMtZjMyMDBkNTgtOWJiZGI2ZjUtZDFkMzA5YmM=, Slow query, duration: 38.303980s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:28:52.732612Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:28:52.733104Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:28:52.733960Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7488011433742141880:11223];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039094; 2025-03-31T16:28:52.734362Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> AnalyzeColumnshard::AnalyzeSameOperationId [GOOD] >> KqpJoinOrder::TPCDS34+ColumnStore >> KqpFlipJoin::Inner_2 [GOOD] >> KqpIndexLookupJoin::SimpleLeftJoin-StreamLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeSameOperationId [GOOD] Test command err: 2025-03-31T16:26:14.262676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:26:14.262966Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:26:14.263042Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000e9e/r3tmp/tmp35VEWd/pdisk_1.dat 2025-03-31T16:26:14.760235Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7702, node 1 2025-03-31T16:26:15.072704Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:15.072765Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:15.072831Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:15.075825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:26:15.076335Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:26:15.179947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:15.180199Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:15.200833Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64341 2025-03-31T16:26:15.878268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:26:19.512181Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T16:26:19.555016Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:19.555168Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:19.593489Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:26:19.596394Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:26:19.904981Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:26:19.905766Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:26:19.906311Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:26:19.906443Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:26:19.906656Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:26:19.906741Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:26:19.906824Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:26:19.906930Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:26:19.907029Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:26:20.131290Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:20.131419Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:20.145500Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:26:20.305721Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:26:20.397373Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T16:26:20.397498Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T16:26:20.436353Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T16:26:20.479196Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T16:26:20.479448Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T16:26:20.479507Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T16:26:20.479567Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T16:26:20.479626Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T16:26:20.479679Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T16:26:20.479743Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T16:26:20.480485Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T16:26:20.518095Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:26:20.518217Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1876:2599], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:26:20.527457Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1889:2610] 2025-03-31T16:26:20.531963Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1908:2619] 2025-03-31T16:26:20.533371Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1908:2619], schemeshard id = 72075186224037897 2025-03-31T16:26:20.544406Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T16:26:20.560582Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T16:26:20.560646Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T16:26:20.560701Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T16:26:20.575985Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T16:26:20.581596Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T16:26:20.581759Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T16:26:20.843785Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T16:26:21.062403Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T16:26:21.138748Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:26:22.352694Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2242:3076], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:22.352849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:22.392194Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-31T16:26:22.566725Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:22.567003Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:26:22.567334Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:26:22.567490Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:26:22.567680Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:26:22.567867Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:26:22.568155Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:26:22.568368Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:26:22.568618Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:26:22.568764Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:26:22.568898Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:26:22.569052Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2329:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:26:22.608549Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:26:22.608765Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;descr ... p traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-31T16:29:04.624600Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T16:29:04.630669Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-31T16:29:04.637058Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6986:5163], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:04.637156Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6997:5168], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:04.637229Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:04.669584Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-03-31T16:29:04.778451Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7000:5171], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-03-31T16:29:05.128273Z node 2 :TX_PROXY ERROR: Actor# [2:7096:5217] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:29:05.356955Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7125:5232]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:29:05.357207Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T16:29:05.357316Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7127:5234] 2025-03-31T16:29:05.357383Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7127:5234] 2025-03-31T16:29:05.357743Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7128:5235] 2025-03-31T16:29:05.357883Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7128:5235], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-03-31T16:29:05.357952Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-03-31T16:29:05.358147Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7127:5234], server id = [2:7128:5235], tablet id = 72075186224037894, status = OK 2025-03-31T16:29:05.358214Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:29:05.358299Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7125:5232], StatRequests.size() = 1 2025-03-31T16:29:05.983683Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZjA2NTA3ZjYtZDJmMTc5YzYtNThjNzk4MmQtMzI2ZjVhNjE=, TxId: 2025-03-31T16:29:05.983777Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZjA2NTA3ZjYtZDJmMTc5YzYtNThjNzk4MmQtMzI2ZjVhNjE=, TxId: 2025-03-31T16:29:05.988069Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:29:06.012777Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-31T16:29:06.012861Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T16:29:06.062169Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-31T16:29:06.062259Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-31T16:29:06.163748Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7127:5234], schemeshard count = 1 2025-03-31T16:29:07.420460Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T16:29:07.420566Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-31T16:29:07.428894Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-31T16:29:07.460437Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-31T16:29:07.461035Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T16:29:07.461094Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 4], AnalyzedShards 1 2025-03-31T16:29:07.484370Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T16:29:07.495520Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. ... blocking NKikimr::NStat::TEvStatistics::TEvAnalyzeTableResponse from TX_COLUMNSHARD_ACTOR to STATISTICS_AGGREGATOR ... waiting for TEvAnalyzeTableResponse (done) ... unblocking NKikimr::NStat::TEvStatistics::TEvAnalyzeTableResponse from TX_COLUMNSHARD_ACTOR to STATISTICS_AGGREGATOR 2025-03-31T16:29:07.497042Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-03-31T16:29:07.497163Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-03-31T16:29:07.497754Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:2807:3226] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-03-31T16:29:07.497822Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:2807:3226] 2025-03-31T16:29:07.523990Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-03-31T16:29:07.524075Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Complete 2025-03-31T16:29:09.050931Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:29:09.051102Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-31T16:29:09.051168Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:29:09.051868Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-31T16:29:09.072471Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-31T16:29:09.073052Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T16:29:09.073167Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T16:29:09.074298Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-31T16:29:09.093163Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-31T16:29:09.093512Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-03-31T16:29:09.094187Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7254:5311], server id = [2:7255:5312], tablet id = 72075186224037899, status = OK 2025-03-31T16:29:09.094332Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7254:5311], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:29:09.113221Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-31T16:29:09.113369Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-31T16:29:09.113672Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-31T16:29:09.113926Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-31T16:29:09.114212Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7254:5311], server id = [2:7255:5312], tablet id = 72075186224037899 2025-03-31T16:29:09.114260Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:29:09.114480Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T16:29:09.118082Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-31T16:29:09.201420Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7275:5331]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:29:09.201728Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T16:29:09.201822Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7275:5331], StatRequests.size() = 1 2025-03-31T16:29:09.425549Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Y2E4NjJkZmYtZDc1MTIzNWUtNmFkOWNhMjctYjZjNjM3Mw==, TxId: 2025-03-31T16:29:09.425644Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Y2E4NjJkZmYtZDc1MTIzNWUtNmFkOWNhMjctYjZjNjM3Mw==, TxId: 2025-03-31T16:29:09.426306Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:29:09.451523Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:29:09.451616Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2807:3226] 2025-03-31T16:29:10.131804Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-03-31T16:29:10.131900Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-03-31T16:29:12.471791Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T16:29:12.472000Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:29:12.498675Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:29:15.408241Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:29:15.408322Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:29:16.795802Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:29:16.823203Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-03-31T16:29:16.823294Z node 2 :STATISTICS DEBUG: Skip TEvStatisticsRequestTimeout >> KqpJoinOrder::FiveWayJoin-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCDS78 [GOOD] Test command err: Trying to start YDB, gRPC: 30801, MsgBus: 1540 2025-03-31T16:26:54.834572Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010972590974658:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:54.851235Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001926/r3tmp/tmpZR2W1N/pdisk_1.dat 2025-03-31T16:26:55.449379Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:26:55.449517Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:26:55.456106Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:26:55.485299Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30801, node 1 2025-03-31T16:26:55.671609Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:26:55.671623Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:26:55.671628Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:26:55.671720Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1540 TClient is connected to server localhost:1540 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:26:56.688315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:26:56.715233Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:26:59.134786Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010994065811801:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:59.134896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:59.135193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010994065811813:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:26:59.139741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:26:59.157714Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010994065811815:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:26:59.263272Z node 1 :TX_PROXY ERROR: Actor# [1:7488010994065811866:2343] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:26:59.637008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:26:59.839760Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010972590974658:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:26:59.839825Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:26:59.912498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488010994065812211:2364];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:59.912726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488010994065812211:2364];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:26:59.912984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488010994065812211:2364];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:26:59.913093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488010994065812211:2364];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:26:59.913182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488010994065812211:2364];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:26:59.913270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488010994065812211:2364];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:26:59.913364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488010994065812211:2364];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:26:59.913457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488010994065812211:2364];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:26:59.913552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488010994065812211:2364];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:26:59.913650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488010994065812211:2364];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:26:59.913751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488010994065812211:2364];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:26:59.913843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488010994065812211:2364];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:26:59.915987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7488010994065812160:2360];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:26:59.916054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7488010994065812160:2360];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:26:59.916292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7488010994065812160:2360];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:26:59.916440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7488010994065812160:2360];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:26:59.916571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7488010994065812160:2360];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:26:59.916698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7488010994065812160:2360];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:26:59.916817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7488010994065812160:2360];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:26:59.916947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7488010994065812160:2360];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:26:59.917077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7488010994065812160:2360];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:26:59.917216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7488010994065812160:2360];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:26:59.917418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7488010994065812160:2360];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:26:59.917570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7488010994065812160:2360];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:26:59.957013Z node 1 :TX_C ... X WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.268818Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039350;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.270456Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.276659Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039252;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.279371Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.283750Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.285763Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039255;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.290436Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.292583Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.297094Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039372;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.300483Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.303713Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.307634Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.310714Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.314171Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.318275Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.320845Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.325288Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.328363Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039202;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.334420Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039249;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.341573Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.349458Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.358231Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.360239Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.377206Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.379558Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039253;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.386347Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.393485Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.394798Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.410503Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:26.626467Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgfm1q6p1sv110vgr6tpch", SessionId: ydb://session/3?node_id=1&id=ZjhkZWJlOTAtNGE4NTIxMmQtMjE4MWQ0NmItZDNhNDc1ZWM=, Slow query, duration: 39.498221s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:28:27.358160Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:28:27.358673Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:28:27.359519Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7488011264648798134:9288];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224038933; 2025-03-31T16:28:27.359912Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:29:05.953670Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpghcazcdm7qteqqx79asm8", SessionId: ydb://session/3?node_id=1&id=ZjhkZWJlOTAtNGE4NTIxMmQtMjE4MWQ0NmItZDNhNDc1ZWM=, Slow query, duration: 21.185035s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "PRAGMA TablePathPrefix='/Root/test/ds';\n\n-- NB: Subquerys\n\n$ws =\n\n (select date_dim.d_year AS ws_sold_year, web_sales.ws_item_sk ws_item_sk,\n\n web_sales.ws_bill_customer_sk ws_customer_sk,\n\n sum(ws_quantity) ws_qty,\n\n sum(ws_wholesale_cost) ws_wc,\n\n sum(ws_sales_price) ws_sp\n\n from web_sales as web_sales\n\n left join web_returns as web_returns on web_returns.wr_order_number=web_sales.ws_order_number and web_sales.ws_item_sk=web_returns.wr_item_sk\n\n join date_dim as date_dim on web_sales.ws_sold_date_sk = date_dim.d_date_sk\n\n where wr_order_number is null\n\n group by date_dim.d_year, web_sales.ws_item_sk, web_sales.ws_bill_customer_sk\n\n );\n\n$cs =\n\n (select date_dim.d_year AS cs_sold_year, catalog_sales.cs_item_sk cs_item_sk,\n\n catalog_sales.cs_bill_customer_sk cs_customer_sk,\n\n sum(cs_quantity) cs_qty,\n\n sum(cs_wholesale_cost) cs_wc,\n\n sum(cs_sales_price) cs_sp\n\n from catalog_sales as catalog_sales\n\n left join catalog_returns as catalog_returns on catalog_returns.cr_order_number=catalog_sales.cs_order_number and catalog_sales.cs_item_sk=catalog_returns.cr_item_sk\n\n join date_dim as date_dim on catalog_sales.cs_sold_date_sk = date_dim.d_date_sk\n\n where cr_order_number is null\n\n group by date_dim.d_year, catalog_sales.cs_item_sk, catalog_sales.cs_bill_customer_sk\n\n );\n\n$ss=\n\n (select date_dim.d_year AS ss_sold_year, store_sales.ss_item_sk ss_item_sk,\n\n store_sales.ss_customer_sk ss_customer_sk,\n\n sum(ss_quantity) ss_qty,\n\n sum(ss_wholesale_cost) ss_wc,\n\n sum(ss_sales_price) ss_sp\n\n from store_sales as store_sales\n\n left join store_returns as store_returns on store_returns.sr_ticket_number=store_sales.ss_ticket_number and store_sales.ss_item_sk=store_returns.sr_item_sk\n\n join date_dim as date_dim on store_sales.ss_sold_date_sk = date_dim.d_date_sk\n\n where sr_ticket_number is null\n\n group by date_dim.d_year, store_sales.ss_item_sk, store_sales.ss_customer_sk\n\n );\n\n-- start query 1 in stream 0 using template query78.tpl and seed 1819994127\n\n select\n\nss_sold_year, ss_item_sk, ss_customer_sk,\n\ncast(ss_qty as double)/(coalesce(ws_qty,0)+coalesce(cs_qty,0)) ratio,\n\nss_qty store_qty, ss_wc store_wholesale_cost, ss_sp store_sales_price,\n\ncoalesce(ws_qty,0)+coalesce(cs_qty,0) other_chan_qty,\n\ncoalesce(ws_wc,0)+coalesce(cs_wc,0) other_chan_wholesale_cost,\n\ncoalesce(ws_sp,0)+coalesce(cs_sp,0) other_chan_sales_price\n\nfrom $ss ss\n\nleft join $ws ws on (ws.ws_sold_year=ss.ss_sold_year and ws.ws_item_sk=ss.ss_item_sk and ws.ws_customer_sk=ss.ss_customer_sk)\n\nleft join $cs cs on (cs.cs_sold_year=ss.ss_sold_year and cs.cs_item_sk=ss.ss_item_sk and cs.cs_customer_sk=ss.ss_customer_sk)\n\nwhere (coalesce(ws_qty,0)>0 or coalesce(cs_qty, 0)>0) and ss_sold_year=2001\n\norder by\n\n ss_sold_year, ss_item_sk, ss_customer_sk,\n\n store_qty desc, store_wholesale_cost desc, store_sales_price desc,\n\n other_chan_qty,\n\n other_chan_wholesale_cost,\n\n other_chan_sales_price,\n\n ratio\n\nlimit 100;\n\n\n\n-- end query 1 in stream 0 using template query78.tpl", parameters: 0b ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpFlipJoin::Inner_2 [GOOD] Test command err: Trying to start YDB, gRPC: 1261, MsgBus: 28184 2025-03-31T16:28:59.527678Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011509690848160:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:59.531711Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018b7/r3tmp/tmptr1haw/pdisk_1.dat 2025-03-31T16:29:00.378768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:29:00.378835Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:29:00.391430Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:29:00.393740Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1261, node 1 2025-03-31T16:29:00.703621Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:29:00.703640Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:29:00.703648Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:29:00.703767Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28184 TClient is connected to server localhost:28184 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:29:01.786208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:01.845531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:02.066957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:29:02.286465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:29:02.430480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:04.491434Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011509690848160:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:29:04.491527Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:29:04.523073Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011531165686278:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:04.523209Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:05.051566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:29:05.128622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:29:05.171651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:29:05.218769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:29:05.271375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:29:05.318639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:29:05.424943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011535460654098:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:05.425011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:05.425364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011535460654103:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:05.429923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:29:05.448098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011535460654105:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:29:05.551776Z node 1 :TX_PROXY ERROR: Actor# [1:7488011535460654165:3461] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:29:06.954402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:29:06.993831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:29:07.048224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:29:07.095774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 {"Plan":{"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":7,"Plans":[{"Tables":["FJ_Table_1"],"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/FJ_Table_1","ReadRangesPointPrefixLen":"0","E-Rows":"4","Table":"FJ_Table_1","ReadColumns":["Key","Value"],"E-Cost":"0"}],"Node Type":"TableFullScan"},{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["FJ_Table_2"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/FJ_Table_2","ReadRangesPointPrefixLen":"0","E-Rows":"2","Table":"FJ_Table_2","ReadColumns":["Fk1","Value"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Stage","Stats":{"ComputeNodes":[{"Tasks":[{"FinishTimeMs":1743438548281,"Host":"ghrun-tuhoqst4xe","OutputRows":2,"StartTimeMs":1743438548281,"IngressRows":2,"ComputeTimeUs":182,"NodeId":1,"OutputChannels":[{"ChannelId":1,"Rows":2,"DstStageId":1,"Bytes":24}],"TaskId":1,"OutputBytes":24}],"PeakMemoryUsageBytes":65536,"CpuTimeUs":1695}],"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":8,"Max":8,"Min":8},"FirstMessageMs":{"Count":1,"Sum":8,"Max":8,"Min":8},"Bytes":{"Count":1,"Sum":24,"Max":24,"Min":24,"History":[10,24]}},"Name":"4","Push":{"WaitTimeUs":{"Count":1,"Sum":8225,"Max":8225,"Min":8225,"History":[10,8225]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":8,"Max":8,"Min":8},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":8,"Max":8,"Min":8},"FirstMessageMs":{"Count":1,"Sum":8,"Max":8,"Min":8}}}],"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[10,1048576]},"Tasks":1,"OutputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"FinishedTasks":1,"IngressRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"PhysicalStageId":0,"StageDurationUs":0,"Table":[{"Path":"\/Root\/FJ_Table_2","ReadRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"ReadBytes":{"Count":1,"Sum":22,"Max":22,"Min":22}}],"BaseTimeMs":1743438548273,"OutputBytes":{"Count":1,"Sum":24,"Max":24,"Min":24},"CpuTimeUs":{"Count":1,"Sum":933,"Max":933,"Min":933,"History":[10,933]},"Ingress":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":8,"Max":8,"Min":8},"FirstMessageMs":{"Count":1,"Sum":8,"Max":8,"Min":8},"Bytes":{"Count":1,"Sum":64,"Max":64,"Min":64,"History":[10,64]}},"External":{},"Name":"KqpReadRangesSource","Ingress":{},"Push":{"LastMessageMs":{"Count":1,"Sum":8,"Max":8,"Min":8},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":8,"Max":8,"Min":8},"FirstMessageMs":{"Count":1,"Sum":8,"Max":8,"Min":8},"Bytes":{"Count":1,"Sum":64,"Max":64,"Min":64,"History":[10,64] ... ax":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":11,"Min":3}}}],"CpuTimeUs":{"Count":1,"Sum":517,"Max":517,"Min":517,"History":[12,517]},"StageDurationUs":1000,"ResultRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResultBytes":{"Count":1,"Sum":38,"Max":38,"Min":38},"OutputBytes":{"Count":1,"Sum":38,"Max":38,"Min":38},"Input":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":11,"Max":11,"Min":11},"FirstMessageMs":{"Count":1,"Sum":11,"Max":11,"Min":11},"Bytes":{"Count":1,"Sum":38,"Max":38,"Min":38,"History":[12,38]}},"Name":"7","Push":{"LastMessageMs":{"Count":1,"Sum":10,"Max":10,"Min":10},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":10,"Max":10,"Min":10},"FirstMessageMs":{"Count":1,"Sum":10,"Max":10,"Min":10},"Bytes":{"Count":1,"Sum":38,"Max":38,"Min":38,"History":[12,38]},"PauseMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"WaitTimeUs":{"Count":1,"Sum":7864,"Max":7864,"Min":7864,"History":[12,7864]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":10,"Min":3}}}],"InputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Tasks":1}}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"Compilation":{"FromCache":false,"DurationUs":627669,"CpuTimeUs":623312},"ProcessCpuTimeUs":353,"TotalDurationUs":676506,"ResourcePoolId":"default","QueuedTimeUs":0},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/FJ_Table_1","ReadRangesPointPrefixLen":"0","E-Rows":"4","Table":"FJ_Table_1","ReadColumns":["Key","Value"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"4","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"0","E-Cost":"0"}],"Node Type":"Filter"},{"PlanNodeId":12,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/FJ_Table_2","ReadRangesPointPrefixLen":"0","E-Rows":"2","Table":"FJ_Table_2","ReadColumns":["Fk1","Value"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"2","Condition":"t1.Key = t2.Fk1","Name":"InnerJoin (MapJoin)","E-Size":"0","E-Cost":"14.4"}],"Node Type":"InnerJoin (MapJoin)"}],"Operators":[{"A-Rows":2,"A-SelfCpu":6.976,"A-Cpu":6.976,"A-Size":38,"Name":"TopSort","Limit":"1001","TopSortBy":"[row.t1.Value,row.t2.Value]"}],"Node Type":"TopSort"}],"Operators":[{"A-Rows":2,"A-SelfCpu":0.517,"A-Cpu":7.493,"A-Size":38,"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query"}} Trying to start YDB, gRPC: 30814, MsgBus: 8785 2025-03-31T16:29:09.599517Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488011552524479475:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:29:09.620110Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018b7/r3tmp/tmp89P0wr/pdisk_1.dat 2025-03-31T16:29:10.058575Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:29:10.064451Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:29:10.064522Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:29:10.072148Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30814, node 2 2025-03-31T16:29:10.235597Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:29:10.235627Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:29:10.235637Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:29:10.235751Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8785 TClient is connected to server localhost:8785 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:29:10.805441Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:10.815540Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:29:10.830650Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:10.901950Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:11.108937Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:11.205119Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:14.080421Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011573999317706:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:14.080509Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:14.149288Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:29:14.190245Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:29:14.224893Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:29:14.267602Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:29:14.303117Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:29:14.383361Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:29:14.488612Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011573999318226:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:14.488694Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:14.488883Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011573999318231:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:14.493325Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:29:14.511575Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488011573999318233:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:29:14.591804Z node 2 :TX_PROXY ERROR: Actor# [2:7488011573999318288:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:29:14.603191Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488011552524479475:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:29:14.603268Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:29:15.872925Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:29:15.944307Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:29:16.057646Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:29:16.149024Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::JoinWithComplexCondition+StreamLookupJoin [GOOD] >> KqpIndexLookupJoin::JoinWithComplexCondition-StreamLookupJoin >> KqpIndexLookupJoin::LeftJoinOnlyLeftColumn-StreamLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::SimpleLeftJoin-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 1329, MsgBus: 6082 2025-03-31T16:29:09.661521Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011553111633975:2206];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:29:09.662118Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018ad/r3tmp/tmpFfUHL1/pdisk_1.dat 2025-03-31T16:29:10.518659Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:29:10.520028Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:29:10.520089Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:29:10.522794Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1329, node 1 2025-03-31T16:29:10.809754Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:29:10.809775Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:29:10.809782Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:29:10.809888Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6082 TClient is connected to server localhost:6082 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:29:11.779897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:11.806869Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:29:11.826568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:12.095373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:12.338716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:12.446147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:14.644180Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011553111633975:2206];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:29:14.644243Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:29:14.835236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011574586472081:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:14.835369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:15.195511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:29:15.232021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:29:15.279453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:29:15.311771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:29:15.359375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:29:15.441658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:29:15.519119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011578881439894:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:15.519188Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:15.520767Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011578881439900:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:15.527174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:29:15.551444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011578881439902:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:29:15.619950Z node 1 :TX_PROXY ERROR: Actor# [1:7488011578881439958:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:29:16.928771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:29:16.978117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:29:17.021145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:29:17.108824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:29:17.175911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:29:17.270029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftJoinOnlyLeftColumn-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 21782, MsgBus: 18695 2025-03-31T16:29:01.483694Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011519794435417:2201];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:29:01.484225Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018b6/r3tmp/tmpqlAVfF/pdisk_1.dat 2025-03-31T16:29:02.227065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:29:02.227183Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:29:02.232624Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:29:02.279570Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21782, node 1 2025-03-31T16:29:02.576151Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:29:02.576174Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:29:02.576183Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:29:02.576321Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18695 TClient is connected to server localhost:18695 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:29:03.550232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:03.590182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:03.874001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:04.125378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:04.231146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:06.464260Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011519794435417:2201];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:29:06.464323Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:29:06.804967Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011541269273545:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:06.805084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:07.230497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:29:07.273927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:29:07.315712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:29:07.363014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:29:07.413017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:29:07.490596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:29:07.557009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011545564241359:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:07.557086Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:07.557304Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011545564241364:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:07.561046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:29:07.575823Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011545564241366:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:29:07.646949Z node 1 :TX_PROXY ERROR: Actor# [1:7488011545564241421:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:29:08.745467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:29:08.827311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:29:08.881016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:29:08.932840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:29:09.021112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:29:09.075341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 6642, MsgBus: 12727 2025-03-31T16:29:11.327553Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488011561866140686:2081];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018b6/r3tmp/tmpw4JYni/pdisk_1.dat 2025-03-31T16:29:11.516379Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:29:11.723967Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:29:11.747369Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:29:11.747443Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:29:11.756192Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6642, node 2 2025-03-31T16:29:11.923648Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:29:11.923667Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:29:11.923674Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:29:11.923776Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12727 TClient is connected to server localhost:12727 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:29:12.793304Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:12.801364Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:29:12.821072Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:12.985594Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:13.194810Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:29:13.288906Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:29:16.155838Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011583340978889:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:16.155924Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:16.218131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:29:16.262332Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:29:16.299108Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:29:16.334783Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488011561866140686:2081];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:29:16.334964Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:29:16.342810Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:29:16.436494Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:29:16.499665Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:29:16.580696Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011583340979406:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:16.580798Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:16.583677Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011583340979411:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:16.592494Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:29:16.612117Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-03-31T16:29:16.615251Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488011583340979413:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:29:16.674711Z node 2 :TX_PROXY ERROR: Actor# [2:7488011583340979471:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:29:18.512626Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:29:18.571765Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:29:18.623366Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:29:18.709928Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-03-31T16:29:18.781815Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-03-31T16:29:18.866264Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 |93.0%| [TA] $(B)/ydb/core/tx/datashard/ut_order/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpJoinOrder::TestJoinHint2-ColumnStore [GOOD] >> KqpIndexLookupJoin::JoinWithSubquery+StreamLookup >> TTopicYqlTest::CreateTopicYqlBackCompatibility [GOOD] |93.0%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_order/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinHint2-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 12135, MsgBus: 16780 2025-03-31T16:28:28.813033Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011375233268866:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:28.836696Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018d2/r3tmp/tmpwWJ9uX/pdisk_1.dat 2025-03-31T16:28:29.670973Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:28:29.702236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:28:29.702332Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:28:29.704509Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12135, node 1 2025-03-31T16:28:29.967624Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:28:29.967643Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:28:29.967650Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:28:29.967756Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16780 TClient is connected to server localhost:16780 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:28:30.869716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:33.073432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011396708105874:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:33.073533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:33.073827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011396708105886:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:33.078316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:28:33.093496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011396708105888:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:28:33.183143Z node 1 :TX_PROXY ERROR: Actor# [1:7488011396708105939:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:28:33.683125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:28:33.814401Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011375233268866:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:33.814473Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:28:33.852035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:28:33.937803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:28:33.991313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:28:34.067751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:28:34.282509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:28:34.316544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:28:34.354173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:28:34.387130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T16:28:34.424797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-31T16:28:34.519795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-31T16:28:34.596410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:28:34.637569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:28:35.352488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-31T16:28:35.391077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:28:35.426488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:28:35.496160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:28:35.553707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T16:28:35.613357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-31T16:28:35.659696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T16:28:35.699639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-31T16:28:35.736771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-31T16:28:35.778658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-31T16:28:35.830251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-31T16:28:35.907292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-31T16:28:35.978543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-31T16:28:36.032443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-31T16:28:36.077412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-31T16:28:36.118665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-31T16:28:36.168157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-03-31T16:28:36.215990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but p ... tablet_id=72075186224038567;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.048227Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038583;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.054247Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038519;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.054246Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038513;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.068826Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038565;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.073681Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.078991Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038559;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.082396Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038507;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.093254Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.099578Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.105651Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038585;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.106725Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038493;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.116816Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038543;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.120976Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038537;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.130259Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038587;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.131781Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.137670Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.142847Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038501;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.153719Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038485;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.160716Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038581;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.167019Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.171195Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038555;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.177180Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038499;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.179107Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038573;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.188616Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.193841Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.202579Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038489;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.206516Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.210746Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.216029Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.225124Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.230228Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.237876Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038541;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.241644Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.251453Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.255178Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.261119Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038523;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.267527Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038591;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.273524Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.279818Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.282982Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.296319Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.302305Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.310665Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.314488Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.323283Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.328914Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:16.587365Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgh5n824361v5fkwp20954", SessionId: ydb://session/3?node_id=1&id=Y2JkODdiNGYtZjBkZWUyZTctNjMxMWJhYzktYjY4YzliZjI=, Slow query, duration: 38.657940s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:29:16.908236Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:29:16.908265Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:29:16.909854Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpFlipJoin::RightOnly_1 [GOOD] >> KqpFlipJoin::RightOnly_2 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TTopicYqlTest::CreateTopicYqlBackCompatibility [GOOD] Test command err: 2025-03-31T16:25:54.753265Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010717048103086:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:54.753366Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:25:54.844453Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010714348120409:2210];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:54.845036Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:25:55.052742Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0009f7/r3tmp/tmpzA1g72/pdisk_1.dat 2025-03-31T16:25:55.078834Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:25:55.459924Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:55.470492Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:55.470585Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:55.471769Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:55.471830Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:55.478851Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:25:55.478981Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:25:55.480665Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7160, node 1 2025-03-31T16:25:55.788022Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/0009f7/r3tmp/yandex2sjAxF.tmp 2025-03-31T16:25:55.788053Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/0009f7/r3tmp/yandex2sjAxF.tmp 2025-03-31T16:25:55.788223Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/0009f7/r3tmp/yandex2sjAxF.tmp 2025-03-31T16:25:55.788347Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:25:55.856742Z INFO: TTestServer started on Port 21530 GrpcPort 7160 TClient is connected to server localhost:21530 PQClient connected to localhost:7160 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:56.158732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:25:56.219538Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T16:25:56.230826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-31T16:25:58.702756Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010734227973357:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:58.702776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010734227973369:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:58.702893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:58.705322Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010734227973375:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:58.705367Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:58.706932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-31T16:25:58.744244Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010734227973371:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-31T16:25:59.031252Z node 1 :TX_PROXY ERROR: Actor# [1:7488010734227973452:2758] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:59.066681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:59.173090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:59.185222Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488010738522940762:2350], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:25:59.187409Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODljNzFiZjgtZGY5NGU1YjQtOWI0Y2YxMTAtNWVlOTdmMWM=, ActorId: [1:7488010734227973354:2337], ActorState: ExecuteState, TraceId: 01jqpgca3xdzyy6gxv9y0bcmtj, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:25:59.189466Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:25:59.323486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-31T16:25:59.589237Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jqpgcawj3fhxhzbm85qm9d07, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTRiYzU5MC0zM2Y0MGY0Ni1iNTZmZmNmOC03NzkxNmZmNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7488010738522941228:3101] 2025-03-31T16:25:59.755615Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010717048103086:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:59.755730Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:25:59.843517Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488010714348120409:2210];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:59.843585Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-03-31T16:26:04.794509Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7488010721343070644:2138], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:26:04.795452Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7488010721343070644:2138], cacheItem# { Subscriber: { Subscriber: [1:7488010721343071113:2445] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:26:04.795578Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7488010759997778066:3329], recipient# [1:7488010759997778065:2451], result# { ErrorCount: 1 Databa ... : "rt3.dc1--legacy--topic1". Partition: 1 2025-03-31T16:29:20.093386Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvTxCommitDone Step 1743438560059, TxId 281474976720680, Partition 1 2025-03-31T16:29:20.095243Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893] delete TxId 281474976720680 2025-03-31T16:29:20.093415Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state EXECUTING 2025-03-31T16:29:20.093441Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976720680, State EXECUTING 2025-03-31T16:29:20.093470Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976720680 State EXECUTING FrontTxId 281474976720680 2025-03-31T16:29:20.093488Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Received 1, Expected 1 2025-03-31T16:29:20.093520Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId: 281474976720680 send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2025-03-31T16:29:20.093555Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] complete TxId 281474976720680 2025-03-31T16:29:20.094121Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 2 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 1 TopicName: "rt3.dc1--legacy--topic1" Version: 0 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--legacy--topic1" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 1 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active CreateVersion: 1 TabletId: 0 } ReadRuleGenerations: 0 PartitionStrategy { MinPartitionCount: 2 MaxPartitionCount: 5 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } AllPartitions { PartitionId: 1 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } 2025-03-31T16:29:20.094247Z node 26 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-31T16:29:20.094391Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete partitions for TxId 281474976720680 2025-03-31T16:29:20.094416Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976720680, NewState EXECUTED 2025-03-31T16:29:20.094445Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976720680 moved from EXECUTING to EXECUTED 2025-03-31T16:29:20.094978Z node 26 :PERSQUEUE DEBUG: [TxId: 281474976720680] save tx TxId: 281474976720680 State: EXECUTED MinStep: 1743438560003 MaxStep: 18446744073709551615 Step: 1743438560059 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 2 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 1 TopicName: "rt3.dc1--legacy--topic1" Version: 0 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--legacy--topic1" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 1 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active CreateVersion: 1 TabletId: 0 } ReadRuleGenerations: 0 PartitionStrategy { MinPartitionCount: 2 MaxPartitionCount: 5 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } AllPartitions { PartitionId: 1 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } } BootstrapConfig { } SourceActor { RawX1: 7488011527045826540 RawX2: 107374184597 } Partitions { Partition { PartitionId: 1 } } 2025-03-31T16:29:20.095668Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-31T16:29:20.116193Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-31T16:29:20.116239Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state EXECUTED 2025-03-31T16:29:20.116263Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976720680, State EXECUTED 2025-03-31T16:29:20.116292Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976720680 State EXECUTED FrontTxId 281474976720680 2025-03-31T16:29:20.116314Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TPersQueue::SendEvReadSetAckToSenders 2025-03-31T16:29:20.116345Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976720680, NewState WAIT_RS_ACKS 2025-03-31T16:29:20.116369Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976720680 moved from EXECUTED to WAIT_RS_ACKS 2025-03-31T16:29:20.116403Z node 26 :PERSQUEUE DEBUG: [TxId: 281474976720680] PredicateAcks: 0/0 2025-03-31T16:29:20.116413Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-03-31T16:29:20.116431Z node 26 :PERSQUEUE DEBUG: [TxId: 281474976720680] PredicateAcks: 0/0 2025-03-31T16:29:20.116455Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] add an TxId 281474976720680 to the list for deletion 2025-03-31T16:29:20.116483Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976720680, NewState DELETING 2025-03-31T16:29:20.116517Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete key for TxId 281474976720680 2025-03-31T16:29:20.116603Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-31T16:29:20.133450Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-31T16:29:20.133490Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state DELETING 2025-03-31T16:29:20.133514Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976720680, State DELETING 2025-03-31T16:29:20.133539Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete TxId 281474976720680 TClient::Ls request: /Root/PQ/rt3.dc1--legacy--topic1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "rt3.dc1--legacy--topic1" PathId: 13 SchemeshardId: 72057594046644480 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976720680 CreateStep: 1743438560059 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186224037894 } PersQueueGroup { Name: "rt3.dc1--legacy--topic1" PathId: 13 TotalGroupCount: 2 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 10... (TRUNCATED) === PATH DESCRIPTION: Name: "rt3.dc1--legacy--topic1" PathId: 13 TotalGroupCount: 2 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } YdbDatabasePath: "/Root" PartitionStrategy { MinPartitionCount: 2 MaxPartitionCount: 5 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } Partitions { PartitionId: 0 TabletId: 72075186224037893 KeyRange { ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186224037892 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active } AlterVersion: 1 BalancerTabletID: 72075186224037894 NextPartitionId: 2 2025-03-31T16:29:21.738998Z node 25 :KQP_EXECUTER ERROR: ActorId: [25:7488011604355240190:2518] TxId: 281474976720685. Ctx: { TraceId: 01jqpgjge87wx8ssetmph6cyjt, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=25&id=NmRlNmU5NGUtYzQzMTAxYWEtMzI3YzNmYi0zN2E2MDc2OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 26 2025-03-31T16:29:21.739230Z node 25 :KQP_COMPUTE ERROR: SelfId: [25:7488011604355240194:2518], TxId: 281474976720685, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jqpgjge87wx8ssetmph6cyjt. SessionId : ydb://session/3?node_id=25&id=NmRlNmU5NGUtYzQzMTAxYWEtMzI3YzNmYi0zN2E2MDc2OA==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [25:7488011604355240190:2518], status: UNAVAILABLE, reason: {
: Error: Terminate execution } >> TVectorIndexTests::CreateTableMultiColumn |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitBothWithReboots[TabletReboots] >> TAsyncIndexTests::MergeIndexWithReboots[TabletReboots] >> TAsyncIndexTests::CdcAndSplitWithReboots[TabletReboots] >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt+ColumnStore [GOOD] >> TAsyncIndexTests::MergeMainWithReboots[PipeResets] >> TPersQueueTest::CheckDecompressionTasksWithoutSession [GOOD] >> TPersQueueTest::Codecs_InitWriteSession_DefaultTopicSupportedCodecsInInitResponse >> KqpJoinOrder::CanonizedJoinOrderTPCH22 [GOOD] >> KqpJoinOrder::TestJoinHint2+ColumnStore [GOOD] |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTableMultiColumn [GOOD] >> KqpIndexLookupJoin::JoinWithComplexCondition-StreamLookupJoin [GOOD] >> TPersQueueTest::SetupWriteSessionOnDisabledCluster [GOOD] >> TPersQueueTest::SetupReadSession ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTableMultiColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:29:30.521036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:29:30.521164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:30.521210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:29:30.521250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:29:30.521325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:29:30.521361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:29:30.521421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:30.521510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:29:30.521931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:29:30.614787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:29:30.614890Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:29:30.643440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:29:30.643542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:29:30.643692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:29:30.676309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:29:30.676580Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:29:30.677306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:30.677563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:29:30.680503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:30.696587Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:30.696682Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:30.696904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:29:30.696965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:30.697064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:29:30.697262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:29:30.727222Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:29:30.921756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:29:30.922037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:30.922300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:29:30.922648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:29:30.922753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:30.926425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:30.926579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:29:30.926868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:30.926938Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:29:30.926973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:29:30.927017Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:29:30.929585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:30.929642Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:29:30.929683Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:29:30.931464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:30.931514Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:30.931580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:30.931633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:29:30.935554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:29:30.937624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:29:30.937841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:29:30.938916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:30.939047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:29:30.939133Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:30.939401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:29:30.939454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:30.939645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:29:30.939719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:29:30.943579Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:30.943636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:30.943841Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:30.943899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:29:30.944119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:30.944159Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:29:30.944274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:30.944328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:30.944379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:30.944411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:30.944474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:29:30.944541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:30.944587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:29:30.944617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:29:30.944687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:29:30.944724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:29:30.944756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:29:30.947009Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:29:30.947404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:29:30.947467Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "idx_vector" LocalPathId: 3 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "embedding" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataColumnNames: "covered1" DataColumnNames: "covered2" DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:29:31.464043Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector/indexImplLevelTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:29:31.464369Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector/indexImplLevelTable" took 311us result status StatusSuccess 2025-03-31T16:29:31.464864Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/idx_vector/indexImplLevelTable" PathDescription { Self { Name: "indexImplLevelTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplLevelTable" Columns { Name: "__ydb_parent" Type: "Uint64" TypeId: 4 Id: 1 NotNull: true IsBuildInProgress: false } Columns { Name: "__ydb_id" Type: "Uint64" TypeId: 4 Id: 2 NotNull: true IsBuildInProgress: false } Columns { Name: "__ydb_centroid" Type: "String" TypeId: 4097 Id: 3 NotNull: true IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "__ydb_id" KeyColumnIds: 1 KeyColumnIds: 2 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:29:31.472017Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector/indexImplPostingTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:29:31.472308Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector/indexImplPostingTable" took 304us result status StatusSuccess 2025-03-31T16:29:31.472804Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/idx_vector/indexImplPostingTable" PathDescription { Self { Name: "indexImplPostingTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplPostingTable" Columns { Name: "__ydb_parent" Type: "Uint64" TypeId: 4 Id: 1 NotNull: true IsBuildInProgress: false } Columns { Name: "id1" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "id2" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "covered1" Type: "String" TypeId: 4097 Id: 4 NotNull: false IsBuildInProgress: false } Columns { Name: "covered2" Type: "String" TypeId: 4097 Id: 5 NotNull: false IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "id1" KeyColumnNames: "id2" KeyColumnIds: 1 KeyColumnIds: 2 KeyColumnIds: 3 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpJoinOrder::CanonizedJoinOrderTPCDS64_small [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 24221, MsgBus: 9816 2025-03-31T16:27:45.087493Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011191538773361:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:45.087792Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018f6/r3tmp/tmpzlXlOV/pdisk_1.dat 2025-03-31T16:27:45.777806Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:27:45.780020Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:45.780093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:45.786690Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24221, node 1 2025-03-31T16:27:46.067618Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:46.067638Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:46.067644Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:46.067738Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9816 TClient is connected to server localhost:9816 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:47.288732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:47.332556Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:27:49.381985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011208718643063:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:49.382069Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:49.382888Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011208718643075:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:49.387445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:27:49.403060Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011208718643077:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:27:49.495034Z node 1 :TX_PROXY ERROR: Actor# [1:7488011208718643128:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:49.865219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:27:50.032231Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011191538773361:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:50.032279Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:50.117719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011208718643386:2350];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:27:50.117922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011208718643386:2350];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:27:50.118174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011208718643386:2350];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:27:50.118275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011208718643386:2350];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:27:50.118367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011208718643386:2350];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:27:50.118463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011208718643386:2350];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:27:50.118573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011208718643386:2350];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:27:50.118706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011208718643386:2350];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:27:50.118806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011208718643386:2350];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:27:50.118905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011208718643386:2350];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:27:50.118998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011208718643386:2350];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:27:50.119496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011208718643386:2350];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:27:50.122497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488011213013610698:2358];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:27:50.122558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488011213013610698:2358];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:27:50.123461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488011213013610698:2358];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:27:50.123618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488011213013610698:2358];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:27:50.126324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488011213013610698:2358];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:27:50.126484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488011213013610698:2358];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:27:50.126593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488011213013610698:2358];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:27:50.126704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488011213013610698:2358];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:27:50.126845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488011213013610698:2358];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:27:50.126986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488011213013610698:2358];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:27:50.127135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488011213013610698:2358];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:27:50.127239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488011213013610698:2358];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:27:50.163897Z node 1 :TX_C ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.496839Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.502844Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.512051Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039253;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.517933Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.526539Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.532690Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.535686Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039263;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.539836Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039267;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.541655Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.545963Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.552043Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.555982Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039257;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.558358Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.564672Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.568901Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.580038Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.583051Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.593913Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.596017Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.609371Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.616467Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.622712Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.629743Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.635964Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.642582Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.648815Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.664558Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.669995Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.684081Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.686518Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039211;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.693434Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.698839Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.707367Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.713923Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.718201Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.724670Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.730721Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.740975Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.746175Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.764099Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.773459Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.774911Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.788214Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.789525Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039265;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.794915Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:14.107373Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgh3kq33dr99cectgh4cvm", SessionId: ydb://session/3?node_id=1&id=NWJkYzM5Y2QtMjVjNmI1MTAtMTY5MmQ5YmUtMmRlOWI1MTU=, Slow query, duration: 38.275501s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:29:14.502511Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:29:14.503028Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:29:14.504004Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7488011475006663708:9518];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039392; 2025-03-31T16:29:14.504401Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpIndexLookupJoin::JoinWithSubquery+StreamLookup [GOOD] >> KqpIndexLookupJoin::JoinWithSubquery-StreamLookup >> TVectorIndexTests::CreateTableWithError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::JoinWithComplexCondition-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 31850, MsgBus: 22822 2025-03-31T16:29:10.231720Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011556204611406:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:29:10.232519Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018aa/r3tmp/tmpPKz5Iz/pdisk_1.dat 2025-03-31T16:29:10.964529Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:29:10.975066Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:29:10.981688Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:29:10.984260Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31850, node 1 2025-03-31T16:29:11.227247Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:29:11.227266Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:29:11.227276Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:29:11.227361Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22822 TClient is connected to server localhost:22822 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:29:12.362401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:12.433334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:12.747237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:13.063970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:13.169634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:15.227826Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011556204611406:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:29:15.227881Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:29:15.388693Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011577679449522:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:15.388796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:15.714295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:29:15.747542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:29:15.786901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:29:15.819509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:29:15.892367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:29:15.970002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:29:16.022705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011581974417336:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:16.022797Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:16.023141Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011581974417341:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:16.031215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:29:16.049332Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011581974417343:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:29:16.142704Z node 1 :TX_PROXY ERROR: Actor# [1:7488011581974417399:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:29:17.397127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:29:17.491897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 30929, MsgBus: 17606 2025-03-31T16:29:21.237357Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488011602138223423:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:29:21.237469Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018aa/r3tmp/tmpAnOki5/pdisk_1.dat 2025-03-31T16:29:21.366701Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:29:21.387186Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:29:21.387288Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:29:21.392257Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30929, node 2 2025-03-31T16:29:21.553696Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:29:21.553721Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:29:21.553728Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:29:21.553878Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17606 TClient is connected to server localhost:17606 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:29:22.241617Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:22.268439Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:29:22.293306Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:22.393711Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:22.610655Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:22.712906Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:25.440423Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011619318094372:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:25.440532Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:25.551215Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:29:25.593386Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:29:25.648759Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:29:25.694005Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:29:25.737530Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:29:25.794720Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:29:25.887321Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011619318094882:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:25.887458Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:25.895303Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011619318094887:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:25.904176Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:29:25.935357Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488011619318094889:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:29:26.002126Z node 2 :TX_PROXY ERROR: Actor# [2:7488011619318094945:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:29:26.238351Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488011602138223423:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:29:26.238498Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:29:27.285287Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:29:27.322505Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH22 [GOOD] Test command err: Trying to start YDB, gRPC: 62762, MsgBus: 31001 2025-03-31T16:27:35.705465Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011150313263393:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:35.706030Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001901/r3tmp/tmpY1tCFD/pdisk_1.dat 2025-03-31T16:27:36.338925Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:27:36.345574Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:36.345649Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:36.356309Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62762, node 1 2025-03-31T16:27:36.714556Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:36.714577Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:36.714607Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:36.714740Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31001 TClient is connected to server localhost:31001 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:37.711340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:40.173622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011171788100407:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:40.173733Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:40.174033Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011171788100419:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:40.178496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:27:40.197008Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011171788100421:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:27:40.257964Z node 1 :TX_PROXY ERROR: Actor# [1:7488011171788100472:2342] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:40.672314Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011150313263393:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:40.672372Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:40.851161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:27:41.183272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011176083068000:2360];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:27:41.183483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011176083068000:2360];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:27:41.183771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011176083068000:2360];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:27:41.183925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011176083068000:2360];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:27:41.184032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011176083068000:2360];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:27:41.184154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011176083068000:2360];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:27:41.184268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011176083068000:2360];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:27:41.184382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011176083068000:2360];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:27:41.184492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011176083068000:2360];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:27:41.184597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011176083068000:2360];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:27:41.184728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011176083068000:2360];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:27:41.184851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011176083068000:2360];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:27:41.189653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011171788100688:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:27:41.189724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011171788100688:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:27:41.190011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011171788100688:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:27:41.190119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011171788100688:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:27:41.190219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011171788100688:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:27:41.190318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011171788100688:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:27:41.190413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011171788100688:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:27:41.190511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011171788100688:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:27:41.190619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011171788100688:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:27:41.190722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011171788100688:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:27:41.190826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011171788100688:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:27:41.190921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011171788100688:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:27:41.280456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488011171788100684:2350];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.c ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.621285Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.631698Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.635522Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.641636Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.645327Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.648013Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.654354Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.663781Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.668494Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.674936Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039185;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.678792Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039213;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.689272Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039221;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.693248Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039233;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.703594Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039217;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.707839Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.713866Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.718228Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.728424Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.732322Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.738357Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.742086Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.751689Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039239;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.756361Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039211;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.761713Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.764187Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039209;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.773586Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.777425Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.787283Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.791202Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.796571Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.800774Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.806322Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.810569Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039276;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.820145Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039219;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.824251Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.829818Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.833954Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.844554Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.849954Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.850720Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.856083Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039237;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.865890Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.869576Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.881374Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:10.893671Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039241;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:11.279846Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpggy030g91nveqd5yszrsm", SessionId: ydb://session/3?node_id=1&id=YTZmZjQzMDQtNDg0NDcyMDItN2E0ZDI2OWEtM2E1MjY5Nw==, Slow query, duration: 41.196002s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:29:11.792658Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:29:11.798937Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:29:11.799481Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7488011455255988228:9380];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224038933; 2025-03-31T16:29:11.799953Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::FourWayJoinLeftFirst-ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinHint2+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 21793, MsgBus: 21697 2025-03-31T16:27:50.767279Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011214396096634:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:50.768081Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018f1/r3tmp/tmpWooXo5/pdisk_1.dat 2025-03-31T16:27:51.545890Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:51.545970Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:51.568201Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:27:51.642936Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21793, node 1 2025-03-31T16:27:51.851327Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:51.851377Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:51.851389Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:51.851502Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21697 TClient is connected to server localhost:21697 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:52.637035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:55.144075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011235870933643:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:55.144084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011235870933655:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:55.144195Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:55.148504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:27:55.159917Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011235870933657:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:27:55.251536Z node 1 :TX_PROXY ERROR: Actor# [1:7488011235870933708:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:55.579039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:27:55.711163Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011214396096634:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:55.711230Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:55.877919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011235870933951:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:27:55.877919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011235870933953:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:27:55.878117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011235870933953:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:27:55.878125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011235870933951:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:27:55.878403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011235870933951:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:27:55.878479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011235870933953:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:27:55.878536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011235870933951:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:27:55.878584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011235870933953:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:27:55.878635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011235870933951:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:27:55.878685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011235870933953:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:27:55.878735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011235870933951:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:27:55.878787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011235870933953:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:27:55.878881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011235870933951:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:27:55.878901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011235870933953:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:27:55.879042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011235870933951:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:27:55.879212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011235870933951:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:27:55.879363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011235870933953:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:27:55.879384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011235870933951:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:27:55.879480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011235870933953:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:27:55.879500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011235870933951:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:27:55.879696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011235870933953:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:27:55.879742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011235870933951:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:27:55.879797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011235870933953:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:27:55.879880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011235870933953:2352];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:27:55.939590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011235870933947:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.c ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.295449Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039218;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.302473Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039260;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.314149Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039188;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.315584Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039206;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.326287Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039278;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.331322Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039246;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.340093Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039234;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.346128Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039280;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.351474Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039236;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.358475Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039286;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.363950Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039214;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.368972Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039228;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.370590Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039198;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.377214Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039312;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.381931Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039256;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.388761Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039252;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.394043Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039268;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.396157Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039298;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.401191Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039288;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.403363Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039300;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.408357Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039274;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.409584Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039272;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.416062Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039308;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.420153Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039282;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.426554Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039258;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.426602Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039244;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.437399Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039296;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.441972Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039220;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.453011Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039294;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.459964Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039194;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.461264Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039262;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.472305Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039276;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.475642Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039266;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.482215Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039222;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.485984Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039292;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.497495Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039248;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.500722Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039310;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.508589Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039284;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.510088Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039238;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.518645Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039306;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.520765Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039242;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.528367Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039250;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.529368Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039270;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.535635Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039290;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.542492Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039264;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:18.929662Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgh8cq87rq0xbep9bvkz83", SessionId: ydb://session/3?node_id=1&id=MzdhNDZlYWMtNzM3ZmEzMTYtZWFmMmFiNy01ZDRmMDUzMA==, Slow query, duration: 38.201812s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:29:19.388668Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:29:19.389255Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:29:19.395495Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7488011442029399557:7603];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224039094; 2025-03-31T16:29:19.395998Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> TVectorIndexTests::VectorKmeansTreePostingImplTable [GOOD] >> TVectorIndexTests::CreateTablePrefix |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::VectorKmeansTreePostingImplTable [GOOD] >> TVectorIndexTests::CreateTableWithError [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCDS64_small [GOOD] Test command err: Trying to start YDB, gRPC: 23929, MsgBus: 7969 2025-03-31T16:27:38.872669Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011161739671905:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:38.872724Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018fc/r3tmp/tmp8IqVCN/pdisk_1.dat 2025-03-31T16:27:39.684398Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:27:39.737220Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:39.737301Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:39.740528Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23929, node 1 2025-03-31T16:27:40.015163Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:40.015185Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:40.015194Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:40.015318Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7969 TClient is connected to server localhost:7969 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:41.112746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:41.171469Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:27:43.827440Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011183214509055:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:43.827549Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:43.828059Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011183214509067:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:27:43.835333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:27:43.853445Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011183214509069:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:27:43.873412Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011161739671905:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:43.873486Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:27:43.935000Z node 1 :TX_PROXY ERROR: Actor# [1:7488011183214509120:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:27:44.414763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:27:44.694420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011187509476640:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:27:44.703397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011187509476640:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:27:44.703778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011187509476640:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:27:44.703964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011187509476640:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:27:44.704110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011187509476640:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:27:44.704294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011187509476640:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:27:44.704459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011187509476640:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:27:44.706292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011187509476642:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:27:44.706337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011187509476642:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:27:44.706494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011187509476642:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:27:44.706609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011187509476642:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:27:44.706714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011187509476642:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:27:44.706800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011187509476642:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:27:44.706891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011187509476642:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:27:44.707037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011187509476642:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:27:44.707162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011187509476642:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:27:44.707273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011187509476642:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:27:44.707394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011187509476642:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:27:44.707499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011187509476642:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:27:44.727263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011187509476640:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:27:44.727438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011187509476640:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:27:44.727555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011187509476640:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:27:44.727665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011187509476640:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:27:44.727763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011187509476640:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:27:44.787549Z node 1 :TX_C ... tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:12.938410Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:12.940486Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039289;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:12.949108Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039267;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:12.952299Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:12.962681Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039231;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:12.965634Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:12.977220Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:12.977393Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:12.993218Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039233;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:12.996327Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.006340Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039243;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.007785Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039299;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.020065Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039211;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.021583Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.034263Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039217;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.036558Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.043813Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.050220Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.053763Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039235;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.056489Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039253;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.062859Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039255;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.063598Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.069671Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039245;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.074584Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.085598Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.088708Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.097289Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.100599Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039261;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.110395Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039277;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.111283Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.118975Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.122393Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.132284Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.135181Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.143887Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.148722Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.156001Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.159672Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039293;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.165785Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.169187Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.177409Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039221;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.181380Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.192080Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.193234Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.204976Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.207299Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039201;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:13.431357Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgh0kw52m2815jrhfm9mtn", SessionId: ydb://session/3?node_id=1&id=YzFiNmJmYTctMWI3NGQ5NjUtNzIyOWU0Yi0yYWMwYWY5Mw==, Slow query, duration: 40.666085s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:29:14.079975Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:29:14.080378Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:29:14.081770Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> TAsyncIndexTests::CdcAndSplitWithReboots[PipeResets] |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> KqpFlipJoin::RightOnly_2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTableWithError [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:29:34.854999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:29:34.855266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:34.855323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:29:34.855364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:29:34.855443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:29:34.855490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:29:34.855563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:34.855675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:29:34.856106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:29:34.966070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:29:34.966145Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:29:34.971439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:29:34.971549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:29:34.971715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:29:34.984968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:29:34.985263Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:29:34.986092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:34.986368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:29:34.988756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:34.990233Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:34.990312Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:34.990565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:29:34.990628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:34.990687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:29:34.990928Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:29:35.001450Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:29:35.179768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:29:35.180058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:35.180393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:29:35.180723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:29:35.180815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:35.183777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:35.183982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:29:35.184245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:35.184331Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:29:35.184385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:29:35.184430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:29:35.186957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:35.187036Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:29:35.187108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:29:35.189571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:35.189632Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:35.189736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:35.189842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:29:35.194827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:29:35.197576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:29:35.197838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:29:35.199275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:35.199454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:29:35.199535Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:35.199883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:29:35.199956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:35.200189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:29:35.200302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:29:35.203051Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:35.203124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:35.203322Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:35.203372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:29:35.203664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:35.203720Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:29:35.203827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:35.203897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:35.203950Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:35.203983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:35.204024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:29:35.204100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:35.204157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:29:35.204195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:29:35.204283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:29:35.204334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:29:35.204377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:29:35.206918Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:29:35.207077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:29:35.207183Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-31T16:29:35.207218Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-31T16:29:35.207262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:29:35.207368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-31T16:29:35.211753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-31T16:29:35.212466Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-03-31T16:29:35.214963Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Bootstrap 2025-03-31T16:29:35.245188Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Become StateWork (SchemeCache [1:272:2263]) 2025-03-31T16:29:35.248033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "vectors" Columns { Name: "id" Type: "Uint64" } Columns { Name: "__ydb_parent" Type: "String" } KeyColumnNames: "id" } IndexDescription { Name: "idx_vector" KeyColumnNames: "__ydb_parent" Type: EIndexTypeGlobalVectorKmeansTree VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:29:35.248484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTableIndex construct operation table path: /MyRoot/vectors domain path id: [OwnerId: 72057594046678944, LocalPathId: 1] domain path: /MyRoot shardsToCreate: 2 GetShardsInside: 0 MaxShards: 200000 2025-03-31T16:29:35.248632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 101:0, explain: index key column shouldn't have a reserved name, at schemeshard: 72057594046678944 2025-03-31T16:29:35.248672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusInvalidParameter, reason: index key column shouldn't have a reserved name, at schemeshard: 72057594046678944 2025-03-31T16:29:35.252849Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T16:29:35.264813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusInvalidParameter Reason: "index key column shouldn\'t have a reserved name" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:29:35.265050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: index key column shouldn't have a reserved name, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/vectors 2025-03-31T16:29:35.265939Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-03-31T16:29:35.271040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "vectors" Columns { Name: "id" Type: "Uint64" } Columns { Name: "embedding" Type: "String" } KeyColumnNames: "id" } IndexDescription { Name: "idx_vector" KeyColumnNames: "embedding" Type: EIndexTypeGlobalVectorKmeansTree DataColumnNames: "id" VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:29:35.271646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTableIndex construct operation table path: /MyRoot/vectors domain path id: [OwnerId: 72057594046678944, LocalPathId: 1] domain path: /MyRoot shardsToCreate: 2 GetShardsInside: 0 MaxShards: 200000 2025-03-31T16:29:35.271841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 102:0, explain: the same column can't be used as key and data column for one index, for example id, at schemeshard: 72057594046678944 2025-03-31T16:29:35.271899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: the same column can't be used as key and data column for one index, for example id, at schemeshard: 72057594046678944 2025-03-31T16:29:35.280360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "the same column can\'t be used as key and data column for one index, for example id" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:29:35.280617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: the same column can't be used as key and data column for one index, for example id, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/vectors TestModificationResult got TxId: 102, wait until txId: 102 |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FourWayJoinLeftFirst-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 7491, MsgBus: 26849 2025-03-31T16:28:40.104888Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011428533786663:2201];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:40.104938Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018c3/r3tmp/tmpLqP8kY/pdisk_1.dat 2025-03-31T16:28:41.083368Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:28:41.124165Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:28:41.124241Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:28:41.125285Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:28:41.133748Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7491, node 1 2025-03-31T16:28:41.387768Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:28:41.387789Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:28:41.387807Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:28:41.387940Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26849 TClient is connected to server localhost:26849 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:28:42.432385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:44.848013Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011445713656377:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:44.848156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:44.855230Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011445713656389:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:44.863922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:28:44.876507Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T16:28:44.877409Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011445713656391:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:28:44.948702Z node 1 :TX_PROXY ERROR: Actor# [1:7488011445713656442:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:28:45.074762Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011428533786663:2201];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:45.074836Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:28:45.305937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:28:45.436271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:28:45.469855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:28:45.506753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:28:45.548323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:28:45.871103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:28:45.961953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:28:46.042125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:28:46.104406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T16:28:46.180132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-31T16:28:46.235897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-31T16:28:46.275841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:28:46.324474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:28:47.310814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-31T16:28:47.365586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:28:47.445913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:28:47.531946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:28:47.581324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T16:28:47.661749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-31T16:28:47.718043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T16:28:47.797971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-31T16:28:47.839029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-31T16:28:47.875024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-31T16:28:47.958183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-31T16:28:48.002198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-31T16:28:48.042741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-31T16:28:48.079264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-31T16:28:48.159868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-31T16:28:48.202840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.417048Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038521;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.421049Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038507;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.422690Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038523;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.426995Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038508;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.427707Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038504;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.433040Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038526;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.433040Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038451;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.438815Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038449;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.438815Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038512;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.444517Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038491;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.444516Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038489;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.450267Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038540;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.450268Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038490;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.455888Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038485;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.455890Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038543;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.461809Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038488;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.461809Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038556;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.467422Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038469;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.467422Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038479;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.473057Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038550;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.473057Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038487;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.478893Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038483;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.478893Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038492;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.484495Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038524;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.484495Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038435;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.490429Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038453;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.490429Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038552;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.496280Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038455;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.496281Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038494;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.501872Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038481;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.501872Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038457;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.508016Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038459;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.508016Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038473;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.513781Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038475;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.513781Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038567;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.519709Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038548;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.519728Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038462;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.525589Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038502;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.525590Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038563;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.529782Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038565;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.535262Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038555;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.535267Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038549;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.540754Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038554;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.543800Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.552531Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038558;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:25.635653Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpghhjcerx2m5htbwrjmcnv", SessionId: ydb://session/3?node_id=1&id=YjgzZDdlZmUtZjBiNmQ2NWMtODgzYTE1ZDUtY2UyNmU0MzQ=, Slow query, duration: 35.508480s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:29:25.985310Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:29:25.985746Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7488011578857668417:5860];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038170;receive=72075186224038331; 2025-03-31T16:29:25.985772Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:29:25.986513Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> TVectorIndexTests::CreateTablePrefix [GOOD] >> KqpJoinOrder::TPCDS96-ColumnStore [GOOD] |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpFlipJoin::RightOnly_2 [GOOD] Test command err: Trying to start YDB, gRPC: 8369, MsgBus: 61001 2025-03-31T16:29:17.975819Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011586692171971:2268];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:29:17.975869Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018a4/r3tmp/tmpDlpSs5/pdisk_1.dat 2025-03-31T16:29:18.674230Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:29:18.674347Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:29:18.676594Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:29:18.749164Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8369, node 1 2025-03-31T16:29:19.003636Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:29:19.003661Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:29:19.003669Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:29:19.003807Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61001 TClient is connected to server localhost:61001 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:29:20.354029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:20.372388Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:29:20.378265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:20.609132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:20.909593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:21.006341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:22.991527Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011586692171971:2268];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:29:23.006040Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:29:23.088105Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011612461977323:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:23.088226Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:23.441414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:29:23.504438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:29:23.565163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:29:23.617510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:29:23.661826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:29:23.764220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:29:23.859532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011612461977840:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:23.859659Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:23.860080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011612461977845:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:23.864259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:29:23.891381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011612461977847:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:29:23.982680Z node 1 :TX_PROXY ERROR: Actor# [1:7488011612461977902:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:29:25.309143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:29:25.382693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:29:25.458732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:29:25.521106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 6623, MsgBus: 16487 2025-03-31T16:29:27.336096Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488011629497336229:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:29:27.336130Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018a4/r3tmp/tmpaayFBi/pdisk_1.dat 2025-03-31T16:29:27.636047Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:29:27.658868Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:29:27.658961Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:29:27.660699Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6623, node 2 2025-03-31T16:29:27.867673Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:29:27.867694Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:29:27.867703Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:29:27.867819Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16487 TClient is connected to server localhost:16487 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:29:28.573258Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:28.591701Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:29:28.616403Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:28.696504Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:28.911100Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:28.999356Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:31.795259Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011646677207189:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:31.795381Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:31.839344Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:29:31.880106Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:29:31.929577Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:29:31.968792Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:29:32.008190Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:29:32.067035Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:29:32.166736Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011650972175003:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:32.166826Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:32.167076Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011650972175008:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:32.171485Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:29:32.189164Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-03-31T16:29:32.189849Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488011650972175010:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:29:32.281826Z node 2 :TX_PROXY ERROR: Actor# [2:7488011650972175066:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:29:32.343180Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488011629497336229:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:29:32.343314Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:29:33.416942Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:29:33.477306Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:29:33.552073Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:29:33.639130Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTablePrefix [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:29:36.185003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:29:36.185123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:36.185163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:29:36.185221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:29:36.185297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:29:36.185331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:29:36.185391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:36.185476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:29:36.191401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:29:36.311330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:29:36.311398Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:29:36.324364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:29:36.324478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:29:36.324630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:29:36.335052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:29:36.335295Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:29:36.335947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:36.336169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:29:36.338149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:36.339277Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:36.339339Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:36.339543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:29:36.339603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:36.339646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:29:36.339820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:29:36.347287Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:29:36.479685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:29:36.479918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:36.480131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:29:36.480377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:29:36.480469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:36.482756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:36.482888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:29:36.483138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:36.483218Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:29:36.483250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:29:36.483282Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:29:36.485155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:36.485220Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:29:36.485253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:29:36.486912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:36.486953Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:36.487007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:36.487067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:29:36.490739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:29:36.492672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:29:36.492883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:29:36.494088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:36.494224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:29:36.494286Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:36.494593Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:29:36.494650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:36.494861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:29:36.494945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:29:36.496812Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:36.496853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:36.497048Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:36.497088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:29:36.497322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:36.497365Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:29:36.497450Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:36.497480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:36.497514Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:36.497567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:36.497604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:29:36.497656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:36.497690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:29:36.497715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:29:36.497771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:29:36.497821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:29:36.497851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:29:36.499729Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:29:36.499835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:29:36.499878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ration: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:29:37.124258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:29:37.124277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:29:37.124400Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:29:37.124523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:29:37.124551Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:29:37.124574Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-03-31T16:29:37.124596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-03-31T16:29:37.124826Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:29:37.124900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:29:37.124920Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:29:37.124963Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-03-31T16:29:37.124987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-03-31T16:29:37.125234Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:29:37.125310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:29:37.125336Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:29:37.125361Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 18446744073709551615 2025-03-31T16:29:37.125400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2025-03-31T16:29:37.125452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/5, is published: true 2025-03-31T16:29:37.129963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:3, at schemeshard: 72057594046678944 2025-03-31T16:29:37.130024Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:3 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:29:37.130357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-03-31T16:29:37.130473Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:3 progress is 2/5 2025-03-31T16:29:37.130504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/5 2025-03-31T16:29:37.130535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:3 progress is 2/5 2025-03-31T16:29:37.130565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/5 2025-03-31T16:29:37.130609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/5, is published: true 2025-03-31T16:29:37.131756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:4, at schemeshard: 72057594046678944 2025-03-31T16:29:37.131796Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:4 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:29:37.132049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-03-31T16:29:37.132150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:4 progress is 3/5 2025-03-31T16:29:37.132195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/5 2025-03-31T16:29:37.132223Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:4 progress is 3/5 2025-03-31T16:29:37.132244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/5 2025-03-31T16:29:37.132270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/5, is published: true 2025-03-31T16:29:37.133378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:2, at schemeshard: 72057594046678944 2025-03-31T16:29:37.133420Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:2 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:29:37.133564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-31T16:29:37.133647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 4/5 2025-03-31T16:29:37.133668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/5 2025-03-31T16:29:37.133696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 4/5 2025-03-31T16:29:37.133717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/5 2025-03-31T16:29:37.133740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 4/5, is published: true 2025-03-31T16:29:37.134016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:29:37.134209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:29:37.134240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:29:37.134278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:29:37.134385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:29:37.134418Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:29:37.134576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T16:29:37.134657Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 5/5 2025-03-31T16:29:37.134680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 5/5 2025-03-31T16:29:37.134715Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 5/5 2025-03-31T16:29:37.134734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 5/5 2025-03-31T16:29:37.134828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 5/5, is published: true 2025-03-31T16:29:37.134895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:448:2394] message: TxId: 102 2025-03-31T16:29:37.134932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 5/5 2025-03-31T16:29:37.135002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-31T16:29:37.135036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-31T16:29:37.135154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:29:37.135196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:1 2025-03-31T16:29:37.135215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:1 2025-03-31T16:29:37.135241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-31T16:29:37.135258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:2 2025-03-31T16:29:37.135290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:2 2025-03-31T16:29:37.135344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-03-31T16:29:37.135376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:3 2025-03-31T16:29:37.135393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:3 2025-03-31T16:29:37.135424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-03-31T16:29:37.135444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:4 2025-03-31T16:29:37.135460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:4 2025-03-31T16:29:37.135491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-03-31T16:29:37.135992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:29:37.136037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:29:37.136064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:29:37.136515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:29:37.136578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:29:37.136620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:29:37.139198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:29:37.139250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:591:2530] TestWaitNotification: OK eventTxId 102 |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitIndexWithReboots[TabletReboots] |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeBothWithReboots[PipeResets] |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS96-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 32494, MsgBus: 7496 2025-03-31T16:28:39.636406Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011425283146179:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:39.636815Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018c4/r3tmp/tmpUrFPyn/pdisk_1.dat 2025-03-31T16:28:40.373898Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:28:40.373984Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:28:40.382888Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:28:40.402823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32494, node 1 2025-03-31T16:28:40.677660Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:28:40.677685Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:28:40.677692Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:28:40.677844Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7496 TClient is connected to server localhost:7496 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:28:41.453107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:41.523575Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:28:44.146900Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011446757983193:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:44.147033Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:44.147324Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011446757983205:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:44.154394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:28:44.173744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011446757983207:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:28:44.254994Z node 1 :TX_PROXY ERROR: Actor# [1:7488011446757983258:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:28:44.616403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:28:44.625476Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011425283146179:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:44.625528Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:28:44.736812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:28:44.778360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:28:44.833710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:28:44.886220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:28:45.115483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:28:45.152377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:28:45.202441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:28:45.257487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T16:28:45.297024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-31T16:28:45.382057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-31T16:28:45.441651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:28:45.514983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:28:46.356089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-31T16:28:46.443412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:28:46.495319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:28:46.544472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:28:46.621479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T16:28:46.698443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-31T16:28:46.760318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T16:28:46.826268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-31T16:28:46.858994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-31T16:28:46.888149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-31T16:28:46.926054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-31T16:28:46.961043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-31T16:28:47.038895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-31T16:28:47.095368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-31T16:28:47.130309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-31T16:28:47.179454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-31T16:28:47.257552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable ... tablet_id=72075186224038541;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.079621Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038571;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.082948Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038549;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.084429Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.088352Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038576;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.094072Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038574;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.098465Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038587;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.105419Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038581;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.107329Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038578;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.110521Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.112803Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.117880Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038570;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.121174Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038572;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.123056Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038557;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.126789Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038568;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.128747Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038553;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.134676Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038555;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.140500Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038517;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.146515Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038521;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.146992Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038566;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.158886Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038561;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.167144Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038515;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.172669Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038563;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.176145Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038547;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.182691Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038539;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.185987Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038559;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.197224Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038545;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.202925Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038533;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.210864Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038537;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.212451Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038535;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.219040Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038519;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.223632Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038525;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.225201Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038513;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.229466Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038509;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.230809Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038567;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.235681Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038511;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.236715Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038538;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.241492Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038529;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.242010Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038569;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.247396Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038531;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.247602Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038523;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.256907Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038536;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.259953Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038584;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.264869Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038507;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.264985Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038551;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.270248Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038505;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.271447Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038503;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.375318Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpghga746zc6kvegxr9c4ca", SessionId: ydb://session/3?node_id=1&id=MWY3OTY0MDAtODcwY2JlZmQtZjU4MDdjNWEtMjVlNTRjNWM=, Slow query, duration: 34.534722s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:29:23.985562Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:29:23.985563Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:29:23.986445Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::OnlineBuild |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TPQCompatTest::BadTopics [GOOD] >> TPQCompatTest::CommitOffsets >> TAsyncIndexTests::SplitBothWithReboots[PipeResets] |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CreateTable |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::OnlineBuild [GOOD] |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> KqpIndexLookupJoin::JoinWithSubquery-StreamLookup [GOOD] >> KqpJoinOrder::FiveWayJoinWithComplexPreds2-ColumnStore [GOOD] >> KqpJoinOrder::ShuffleEliminationDifferentJoinPredicateKeyTypeCorrectness2 [GOOD] |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::OnlineBuild [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-31T16:29:41.297995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:29:41.298103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:41.298150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:29:41.298205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:29:41.298256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:29:41.298282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:29:41.298345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:41.298435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:29:41.298747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:29:41.390338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:29:41.390400Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:29:41.400411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:29:41.400791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:29:41.401002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:29:41.408338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:29:41.408595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:29:41.409365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:41.409726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:29:41.411626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:41.412887Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:41.412954Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:41.413057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:29:41.413120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:41.413169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:29:41.413406Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:29:41.420326Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-31T16:29:41.566645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:29:41.566909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:41.567166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:29:41.567405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:29:41.567471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:41.569706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:41.569875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:29:41.570109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:41.570177Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:29:41.570217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:29:41.570253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:29:41.572139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:41.572269Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:29:41.572321Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:29:41.574188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:41.574237Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:41.574279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:41.574344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:29:41.578517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:29:41.580564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:29:41.580765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:29:41.581854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:41.582013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:29:41.582072Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:41.582347Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:29:41.582399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:41.582577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:29:41.582680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:29:41.584456Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:41.584498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:41.584639Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:41.584674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:29:41.584983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:41.585022Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:29:41.585103Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:41.585128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:41.585158Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:41.585206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:41.585246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:29:41.585298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:41.585328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:29:41.585351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:29:41.585401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:29:41.585431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:29:41.585458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:29:41.586855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:29:41.586964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:29:41.586998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710760, database: /MyRoot, subject: , status: StatusAccepted, operation: DROP LOCK, path: /MyRoot/Table 2025-03-31T16:29:42.358236Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976710760, status# StatusAccepted 2025-03-31T16:29:42.358286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusAccepted TxId: 281474976710760 SchemeshardId: 72057594046678944 PathId: 2 2025-03-31T16:29:42.358418Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, BuildIndexId: 102, cookie: 102, txId: 281474976710760, status: StatusAccepted 2025-03-31T16:29:42.358549Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:385:2357], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }}, record: Status: StatusAccepted TxId: 281474976710760 SchemeshardId: 72057594046678944 PathId: 2 2025-03-31T16:29:42.358840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-03-31T16:29:42.358891Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 ProgressState 2025-03-31T16:29:42.358938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710760 ready parts: 1/1 2025-03-31T16:29:42.359060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710760 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:29:42.362084Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-03-31T16:29:42.362167Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:385:2357], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:29:42.362594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710760:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710760 msg type: 269090816 2025-03-31T16:29:42.362716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710760, partId: 4294967295, tablet: 72057594046316545 2025-03-31T16:29:42.362905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2025-03-31T16:29:42.362936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2025-03-31T16:29:42.362992Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2025-03-31T16:29:42.363300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:42.363394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:29:42.363464Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2025-03-31T16:29:42.363513Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2025-03-31T16:29:42.365502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-03-31T16:29:42.365572Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2025-03-31T16:29:42.365657Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-03-31T16:29:42.365704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-03-31T16:29:42.365753Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-03-31T16:29:42.365810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-03-31T16:29:42.365870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-03-31T16:29:42.365937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:124:2150] message: TxId: 281474976710760 2025-03-31T16:29:42.365978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-03-31T16:29:42.366009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2025-03-31T16:29:42.366037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2025-03-31T16:29:42.366096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 FAKE_COORDINATOR: Erasing txId 281474976710760 2025-03-31T16:29:42.367988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-03-31T16:29:42.368066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2025-03-31T16:29:42.368126Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2025-03-31T16:29:42.368217Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:385:2357], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:29:42.369919Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-03-31T16:29:42.370009Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:385:2357], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:29:42.370058Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-03-31T16:29:42.371749Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-03-31T16:29:42.371845Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:385:2357], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:29:42.371881Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-03-31T16:29:42.372008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:29:42.372052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:476:2437] TestWaitNotification: OK eventTxId 102 >> TAsyncIndexTests::CreateTable [GOOD] |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::JoinWithSubquery-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 6514, MsgBus: 15248 2025-03-31T16:29:24.300803Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011616725803632:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:29:24.310366Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00189c/r3tmp/tmpferIqw/pdisk_1.dat 2025-03-31T16:29:25.033837Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:29:25.038016Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:29:25.038092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:29:25.041102Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6514, node 1 2025-03-31T16:29:25.299949Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:29:25.299971Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:29:25.299994Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:29:25.300091Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15248 TClient is connected to server localhost:15248 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:29:26.125985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:26.204287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:26.437724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:26.768203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:26.885078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:28.989624Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011633905674581:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:28.989752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:29.279936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:29:29.307207Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011616725803632:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:29:29.307279Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:29:29.339175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:29:29.404684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:29:29.448779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:29:29.483572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:29:29.572199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:29:29.631547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011638200642398:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:29.631606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:29.635508Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011638200642403:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:29.639507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:29:29.657404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011638200642405:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:29:29.754466Z node 1 :TX_PROXY ERROR: Actor# [1:7488011638200642461:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:29:30.967435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:29:31.042677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:29:31.084209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:29:31.133235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:29:31.215285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:29:31.255614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 20043, MsgBus: 4619 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00189c/r3tmp/tmpO66Zq5/pdisk_1.dat 2025-03-31T16:29:33.935214Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:29:33.967313Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:29:33.982645Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:29:33.982713Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:29:33.992196Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20043, node 2 2025-03-31T16:29:34.155299Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:29:34.155321Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:29:34.155328Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:29:34.155444Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4619 TClient is connected to server localhost:4619 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:29:34.961430Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:34.972038Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:29:34.981914Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:35.076012Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:35.273662Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:35.387797Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:38.176177Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011677885328989:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:38.176299Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:38.207245Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:29:38.255217Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:29:38.301174Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:29:38.345693Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:29:38.429403Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:29:38.521008Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:29:38.611321Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011677885329508:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:38.611446Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:38.619320Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011677885329513:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:38.625493Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:29:38.648154Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-31T16:29:38.648918Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488011677885329515:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:29:38.749965Z node 2 :TX_PROXY ERROR: Actor# [2:7488011677885329573:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:29:40.108137Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:29:40.157343Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:29:40.246129Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:29:40.322328Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:29:40.378725Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:29:40.433540Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:29:43.123651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:29:43.123742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:43.123782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:29:43.123812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:29:43.123878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:29:43.123906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:29:43.123960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:43.124037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:29:43.124342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:29:43.213442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:29:43.213506Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:29:43.218010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:29:43.218118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:29:43.218280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:29:43.223132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:29:43.223335Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:29:43.224011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:43.224247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:29:43.225919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:43.226985Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:43.227042Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:43.227269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:29:43.227324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:43.227367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:29:43.227524Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:29:43.234150Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:29:43.362418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:29:43.362669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:43.362893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:29:43.363232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:29:43.363305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:43.365369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:43.365511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:29:43.365733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:43.365830Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:29:43.365866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:29:43.365894Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:29:43.367518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:43.367562Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:29:43.367586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:29:43.368939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:43.368970Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:43.369010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:43.369059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:29:43.371816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:29:43.373328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:29:43.373508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:29:43.374413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:43.374521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:29:43.374567Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:43.374754Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:29:43.374790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:43.374918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:29:43.374992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:29:43.376553Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:43.376586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:43.376730Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:43.376759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:29:43.376912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:43.376943Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:29:43.377020Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:43.377059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:43.377113Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:43.377139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:43.377163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:29:43.377204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:43.377229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:29:43.377249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:29:43.377298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:29:43.377321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:29:43.377341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:29:43.378770Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:29:43.378867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:29:43.378896Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... e.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:43.668499Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:29:43.668545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-03-31T16:29:43.668589Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-03-31T16:29:43.669237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 319 RawX2: 4294969599 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-31T16:29:43.669284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 2 2025-03-31T16:29:43.669383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:2, at schemeshard: 72057594046678944, message: Source { RawX1: 319 RawX2: 4294969599 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-31T16:29:43.669418Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-31T16:29:43.669503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 319 RawX2: 4294969599 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-31T16:29:43.669560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:43.669591Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:2, at schemeshard: 72057594046678944 2025-03-31T16:29:43.669618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-31T16:29:43.669654Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:2 129 -> 240 2025-03-31T16:29:43.677351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T16:29:43.678625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T16:29:43.683693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T16:29:43.683869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:29:43.684169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T16:29:43.684507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-03-31T16:29:43.684768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:29:43.685167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-03-31T16:29:43.685394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:29:43.685457Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-31T16:29:43.685570Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 2/3 2025-03-31T16:29:43.685608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-03-31T16:29:43.685643Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 2/3 2025-03-31T16:29:43.685672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-03-31T16:29:43.685708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2025-03-31T16:29:43.686371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2025-03-31T16:29:43.686416Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:2 ProgressState 2025-03-31T16:29:43.686491Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 3/3 2025-03-31T16:29:43.686550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-03-31T16:29:43.686587Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 3/3 2025-03-31T16:29:43.686610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-03-31T16:29:43.686632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2025-03-31T16:29:43.686721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:377:2345] message: TxId: 101 2025-03-31T16:29:43.686776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-03-31T16:29:43.686818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-31T16:29:43.686851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-31T16:29:43.687000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T16:29:43.687050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:1 2025-03-31T16:29:43.687070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:1 2025-03-31T16:29:43.687248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-31T16:29:43.687279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:2 2025-03-31T16:29:43.687313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:2 2025-03-31T16:29:43.687364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-31T16:29:43.690647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T16:29:43.690703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:378:2346] TestWaitNotification: OK eventTxId 101 2025-03-31T16:29:43.691325Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:29:43.691652Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex" took 302us result status StatusSuccess 2025-03-31T16:29:43.692508Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex" PathDescription { Self { Name: "UserDefinedIndex" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "UserDefinedIndex" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> KqpJoinOrder::TestJoinOrderHintsManyHintTrees [GOOD] |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithComplexPreds2-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 10637, MsgBus: 1275 2025-03-31T16:28:47.365479Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011459298433717:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:47.366146Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018be/r3tmp/tmplEgTd4/pdisk_1.dat 2025-03-31T16:28:48.078312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:28:48.078444Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:28:48.088294Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:28:48.141794Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10637, node 1 2025-03-31T16:28:48.404365Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:28:48.404384Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:28:48.404392Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:28:48.404484Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1275 TClient is connected to server localhost:1275 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:28:49.364617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:49.408012Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:28:51.813458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011476478303434:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:51.813576Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:51.814215Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011476478303446:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:51.822642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:28:51.835152Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011476478303448:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:28:51.928513Z node 1 :TX_PROXY ERROR: Actor# [1:7488011476478303499:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:28:52.343178Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011459298433717:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:52.343272Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:28:52.435168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:28:52.573747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:28:52.623880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:28:52.675231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:28:52.734632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:28:52.955972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:28:53.011146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:28:53.081268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:28:53.115265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T16:28:53.158282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-31T16:28:53.241355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-31T16:28:53.340448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:28:53.414675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:28:54.151924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-31T16:28:54.232112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:28:54.283760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:28:54.347362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:28:54.415609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T16:28:54.476284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-31T16:28:54.515436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T16:28:54.568984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-31T16:28:54.626434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-31T16:28:54.706894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-31T16:28:54.784936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-31T16:28:54.834943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-31T16:28:54.880352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-31T16:28:54.958353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-31T16:28:54.999152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-31T16:28:55.071319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-31T16:28:55.117799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable ... tablet_id=72075186224038574;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.220374Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038603;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.224278Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.234567Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038527;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.235843Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038606;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.240292Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.241972Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.246405Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038557;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.249480Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.256438Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038598;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.262869Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.268814Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.269542Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.278679Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038589;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.281860Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.285127Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038591;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.291317Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.294526Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038571;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.298032Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.300104Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038535;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.306406Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.306892Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.315059Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.315979Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.334897Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.335449Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.341364Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.346933Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.351955Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.352017Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.358181Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.359864Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.364284Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.365349Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.370310Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038613;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.371074Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038585;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.378450Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.382464Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038581;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.384864Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.394195Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.397375Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.408525Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.409930Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.416193Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.422798Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.427055Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.439761Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:33.647223Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpghr8zct1f4jqqb4p6354p", SessionId: ydb://session/3?node_id=1&id=MWUwMWI3NGYtNDA0MDhhZi1lNzc3NjBlYi0xZjBkYWUxNQ==, Slow query, duration: 36.655359s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:29:33.977914Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:29:33.978341Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:29:33.979285Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> TAsyncIndexTests::MergeBothWithReboots[TabletReboots] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::ShuffleEliminationDifferentJoinPredicateKeyTypeCorrectness2 [GOOD] Test command err: Trying to start YDB, gRPC: 14273, MsgBus: 1995 2025-03-31T16:28:47.007783Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011456358133633:2289];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:47.007840Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018c1/r3tmp/tmpEpdrBo/pdisk_1.dat 2025-03-31T16:28:47.348761Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:28:47.348879Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:28:47.356191Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:28:47.362458Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14273, node 1 2025-03-31T16:28:47.538046Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:28:47.538075Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:28:47.538087Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:28:47.538226Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1995 TClient is connected to server localhost:1995 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:28:48.490378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:48.519515Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:28:51.383400Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011473538003221:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:51.383499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:51.387193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011473538003233:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:51.393060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:28:51.409682Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011473538003235:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:28:51.478992Z node 1 :TX_PROXY ERROR: Actor# [1:7488011473538003286:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:28:51.823444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:28:51.942208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:28:52.005273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:28:52.011304Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011456358133633:2289];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:52.011375Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:28:52.047316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:28:52.086790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:28:52.358538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:28:52.392510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:28:52.429269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:28:52.465600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T16:28:52.512629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-31T16:28:52.563981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-31T16:28:52.644917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:28:52.704179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:28:53.633277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-31T16:28:53.702299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:28:53.816284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:28:53.911146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:28:53.954854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T16:28:54.000371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-31T16:28:54.042874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T16:28:54.138991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-31T16:28:54.205738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-31T16:28:54.261337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-31T16:28:54.304720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-31T16:28:54.350127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-31T16:28:54.410985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-31T16:28:54.504561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-31T16:28:54.589054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-31T16:28:54.642388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-31T16:28:54.688582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable ... tablet_id=72075186224038521;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.219492Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038497;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.221095Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038523;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.226365Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038439;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.229090Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038493;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.236619Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038517;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.242308Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038525;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.253463Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038511;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.264319Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038489;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.270813Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038437;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.274075Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038495;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.280930Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038477;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.281047Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038453;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.286494Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038513;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.286494Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038483;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.292891Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038509;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.292986Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038445;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.298452Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038441;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.299357Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038455;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.303706Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038544;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.304490Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038481;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.308922Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038561;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.310207Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038479;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.314299Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038485;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.314900Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038447;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.319530Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038548;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.320428Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038461;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.324782Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038457;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.325595Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038463;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.329964Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038465;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.331276Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038449;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.335375Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038467;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.336347Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038567;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.340562Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038582;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.341450Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038541;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.345881Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038565;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.348206Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038552;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.351236Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.353545Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038469;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.357080Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038569;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.359865Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038487;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.363186Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038459;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.366088Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038550;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.368849Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038529;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.375149Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038559;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.375957Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038518;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.390544Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038502;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:32.515396Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpghr35fk0bmwq8d0y0atnc", SessionId: ydb://session/3?node_id=1&id=M2EzN2E3ODYtODU1MzhiZTYtMjA4YmQ3MTctYzM5Njk5MmE=, Slow query, duration: 35.709900s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:29:32.847928Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:29:32.847974Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:29:32.849070Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTableCoveredEmbedding |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeIndexWithReboots[PipeResets] >> KqpJoinOrder::TPCDS87-ColumnStore [GOOD] |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinOrderHintsManyHintTrees [GOOD] Test command err: Trying to start YDB, gRPC: 64040, MsgBus: 62032 2025-03-31T16:27:56.619244Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011240036801146:2206];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:27:56.619583Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018ee/r3tmp/tmpI0jAPT/pdisk_1.dat 2025-03-31T16:27:57.224591Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:27:57.224704Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:27:57.230522Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:27:57.231581Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64040, node 1 2025-03-31T16:27:57.495585Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:27:57.495618Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:27:57.495625Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:27:57.495722Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62032 TClient is connected to server localhost:62032 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:27:58.380885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:27:58.412436Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:28:01.072596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011261511638140:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:01.072719Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:01.075166Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011261511638147:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:01.079856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:28:01.090089Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011261511638154:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:28:01.178510Z node 1 :TX_PROXY ERROR: Actor# [1:7488011261511638205:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:28:01.607266Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011240036801146:2206];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:01.607327Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:28:01.636508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:28:01.905575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011261511638482:2358];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:28:01.905767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011261511638482:2358];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:28:01.906005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011261511638486:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:28:01.906048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011261511638486:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:28:01.906094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011261511638482:2358];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:28:01.906212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011261511638486:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:28:01.906241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011261511638482:2358];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:28:01.906305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011261511638486:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:28:01.906352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011261511638482:2358];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:28:01.906416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011261511638486:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:28:01.906487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011261511638482:2358];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:28:01.906533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011261511638486:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:28:01.906628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011261511638482:2358];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:28:01.906633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011261511638486:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:28:01.907322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011261511638486:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:28:01.907495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011261511638486:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:28:01.907669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011261511638486:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:28:01.907777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011261511638486:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:28:01.907865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011261511638486:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:28:01.911192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011261511638482:2358];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:28:01.911353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011261511638482:2358];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:28:01.911492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011261511638482:2358];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:28:01.911606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011261511638482:2358];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:28:01.911738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011261511638482:2358];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:28:01.992160Z node 1 :T ... 0714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.415926Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.420103Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.425333Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.432180Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.438533Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.438600Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.445927Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039277;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.447776Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.453716Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.456055Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.460040Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.462233Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.466823Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.468782Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.473445Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.474718Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.479758Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.480609Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.486306Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.486306Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.492353Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.493934Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.497820Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.499729Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.503170Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.505845Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.508733Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.511695Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.514224Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.517612Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.519761Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.525573Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.527809Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.530822Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.533554Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.536624Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.540683Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.541816Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.552105Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.556987Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.564344Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.567332Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.571984Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.581107Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:29.793058Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpghfxk01q4qqea3kkqf9sk", SessionId: ydb://session/3?node_id=1&id=YzMwZmY3NjUtYWNiZmY0Ni1iZWQ0YmIyOS1iMDAyZjIxNg==, Slow query, duration: 41.357500s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:29:30.140634Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:29:30.140634Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:29:30.141309Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7488011596519144874:11353];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-03-31T16:29:30.141782Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716;
: Warning: Execution, code: 1060
: Warning: Unapplied hint: Rows(R T # 1), code: 4534
: Warning: Execution, code: 1060
: Warning: Unapplied hint: Rows(R T # 1), code: 4534 >> TVectorIndexTests::CreateTableCoveredEmbedding [GOOD] |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::Decimal |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTableCoveredEmbedding [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-31T16:29:47.378087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:29:47.378196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:47.378274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:29:47.378324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:29:47.378383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:29:47.378410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:29:47.378473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:47.378548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:29:47.378834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:29:47.482688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:29:47.482747Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:29:47.491631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:29:47.491974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:29:47.492140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:29:47.499326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:29:47.499540Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:29:47.500168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:47.500487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:29:47.502376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:47.503514Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:47.503572Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:47.503691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:29:47.503738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:47.503780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:29:47.503993Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:29:47.510407Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-31T16:29:47.644229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:29:47.644445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:47.644652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:29:47.644879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:29:47.644943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:47.647954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:47.648096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:29:47.648334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:47.648389Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:29:47.648422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:29:47.648456Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:29:47.651028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:47.651123Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:29:47.651168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:29:47.654383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:47.654446Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:47.654501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:47.654571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:29:47.658560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:29:47.661053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:29:47.661261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:29:47.662358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:47.662528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:29:47.662599Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:47.662886Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:29:47.662946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:47.663160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:29:47.663320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:29:47.665528Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:47.665587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:47.665800Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:47.665867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:29:47.666262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:47.666311Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:29:47.666417Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:47.666477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:47.666526Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:47.666558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:47.666601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:29:47.666659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:47.666700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:29:47.666731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:29:47.666804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:29:47.666848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:29:47.666911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:29:47.668972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:29:47.669105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:29:47.669168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Children { Name: "indexImplPostingTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "idx_vector" LocalPathId: 3 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "embedding" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataColumnNames: "embedding" DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } clusters: 4 levels: 5 } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:29:48.179913Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector/indexImplLevelTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:29:48.180188Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector/indexImplLevelTable" took 248us result status StatusSuccess 2025-03-31T16:29:48.180701Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/idx_vector/indexImplLevelTable" PathDescription { Self { Name: "indexImplLevelTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplLevelTable" Columns { Name: "__ydb_parent" Type: "Uint64" TypeId: 4 Id: 1 NotNull: true IsBuildInProgress: false } Columns { Name: "__ydb_id" Type: "Uint64" TypeId: 4 Id: 2 NotNull: true IsBuildInProgress: false } Columns { Name: "__ydb_centroid" Type: "String" TypeId: 4097 Id: 3 NotNull: true IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "__ydb_id" KeyColumnIds: 1 KeyColumnIds: 2 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:29:48.181489Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector/indexImplPostingTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:29:48.181735Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector/indexImplPostingTable" took 255us result status StatusSuccess 2025-03-31T16:29:48.182185Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/idx_vector/indexImplPostingTable" PathDescription { Self { Name: "indexImplPostingTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplPostingTable" Columns { Name: "__ydb_parent" Type: "Uint64" TypeId: 4 Id: 1 NotNull: true IsBuildInProgress: false } Columns { Name: "id" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "embedding" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "id" KeyColumnIds: 1 KeyColumnIds: 2 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS87-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 25098, MsgBus: 25914 2025-03-31T16:28:37.860059Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011414589261941:2138];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:37.860108Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018c9/r3tmp/tmpJCYXiK/pdisk_1.dat 2025-03-31T16:28:38.557930Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:28:38.558015Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:28:38.568078Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:28:38.662494Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25098, node 1 2025-03-31T16:28:38.867573Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:28:38.867597Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:28:38.867608Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:28:38.867767Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25914 TClient is connected to server localhost:25914 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:28:39.553991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:39.575791Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:28:42.021217Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011436064098998:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:42.021370Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:42.025656Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011436064099010:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:42.039971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:28:42.057513Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011436064099012:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:28:42.151456Z node 1 :TX_PROXY ERROR: Actor# [1:7488011436064099063:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:28:42.553742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:28:42.687375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:28:42.721805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:28:42.770826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:28:42.837322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:28:42.863629Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011414589261941:2138];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:42.863684Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:28:43.064734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:28:43.146534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:28:43.189065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:28:43.231031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T16:28:43.308407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-31T16:28:43.360018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-31T16:28:43.443430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:28:43.507282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:28:44.183876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-31T16:28:44.236380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:28:44.307578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:28:44.378487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:28:44.420919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T16:28:44.461354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-31T16:28:44.498317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T16:28:44.530174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-31T16:28:44.560931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-31T16:28:44.605677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-31T16:28:44.692556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-31T16:28:44.737322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-31T16:28:44.807770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-31T16:28:44.851583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-31T16:28:44.895906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-31T16:28:44.949897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-31T16:28:45.000641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.730367Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.732213Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038543;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.741731Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038425;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.743906Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038567;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.753289Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038545;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.755507Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038529;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.764992Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038573;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.767188Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038533;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.774745Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038509;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.776564Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.786158Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038515;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.788566Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.797861Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038557;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.799740Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038451;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.805242Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038525;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.809926Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038497;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.820608Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038549;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.826178Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038565;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.827631Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038541;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.832970Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038519;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.838406Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038513;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.847714Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038585;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.851979Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038581;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.852772Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038561;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.857949Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038591;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.861122Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038563;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.866487Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038555;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.871211Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038521;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.879775Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.881707Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038613;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.886769Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038547;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.891659Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.900243Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.902642Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038553;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.912080Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038589;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.915725Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.921281Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.927229Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.929267Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038559;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.935107Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038583;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.942057Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.948919Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.951781Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.958495Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.961561Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038517;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:24.151589Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpghe9fcnkr2kb3wcap0r87", SessionId: ydb://session/3?node_id=1&id=Njk5NGVmOGQtYTFlNzZmMWItZDI0ZjhlYWYtMzA5MTFjYzI=, Slow query, duration: 37.380033s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:29:24.780649Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:29:24.781066Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:29:24.781690Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7488011569208111688:5933];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038170;receive=72075186224038331; 2025-03-31T16:29:24.782017Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeMainWithReboots[TabletReboots] >> TAsyncIndexTests::Decimal [GOOD] >> KqpJoinOrder::FiveWayJoinWithComplexPreds+ColumnStore [GOOD] >> TPersQueueTest::Codecs_InitWriteSession_DefaultTopicSupportedCodecsInInitResponse [GOOD] >> TPersQueueTest::Codecs_WriteMessageWithDefaultCodecs_MessagesAreAcknowledged |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> KqpJoinOrder::TPCDS95-ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::Decimal [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-31T16:29:49.265407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:29:49.265512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:49.265558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:29:49.265607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:29:49.265662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:29:49.265691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:29:49.265755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:49.265854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:29:49.266155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:29:49.361726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:29:49.361815Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:29:49.372587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:29:49.372907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:29:49.373082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:29:49.380683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:29:49.380941Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:29:49.381550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:49.381929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:29:49.383895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:49.385078Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:49.385131Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:49.385348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:29:49.385396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:49.385435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:29:49.385654Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:29:49.392066Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-31T16:29:49.517961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:29:49.518211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:49.518419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:29:49.518634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:29:49.518709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:49.527916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:49.528062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:29:49.528268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:49.528330Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:29:49.528365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:29:49.528395Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:29:49.533903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:49.533979Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:29:49.534023Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:29:49.536222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:49.536277Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:49.536325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:49.536373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:29:49.551892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:29:49.555358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:29:49.555554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:29:49.556621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:49.556782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:29:49.556838Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:49.557107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:29:49.557162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:49.557318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:29:49.557456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:29:49.560500Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:49.560582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:49.560754Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:49.560803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:29:49.561165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:49.561214Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:29:49.561310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:49.561343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:49.561380Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:49.561415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:49.561462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:29:49.561517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:49.561552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:29:49.561582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:29:49.561636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:29:49.561685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:29:49.561714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:29:49.563619Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:29:49.563734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:29:49.563776Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... atency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1030 } } 2025-03-31T16:29:49.928794Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1030 } } 2025-03-31T16:29:49.930182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 325 RawX2: 4294969604 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-31T16:29:49.930221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2025-03-31T16:29:49.930311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 325 RawX2: 4294969604 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-31T16:29:49.930357Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-31T16:29:49.930436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 325 RawX2: 4294969604 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-31T16:29:49.930484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:49.930515Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:29:49.930542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-03-31T16:29:49.930572Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-03-31T16:29:49.935133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T16:29:49.935582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T16:29:49.935906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T16:29:49.941978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-03-31T16:29:49.942139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T16:29:49.942325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-03-31T16:29:49.942684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:29:49.942803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:29:49.943924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2025-03-31T16:29:49.943983Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:2 ProgressState 2025-03-31T16:29:49.944089Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-03-31T16:29:49.944143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-03-31T16:29:49.944182Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-03-31T16:29:49.944220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-03-31T16:29:49.944312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2025-03-31T16:29:49.944736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:29:49.944774Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-31T16:29:49.944850Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-03-31T16:29:49.944878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-03-31T16:29:49.944924Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-03-31T16:29:49.944949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-03-31T16:29:49.944972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2025-03-31T16:29:49.945039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:377:2345] message: TxId: 101 2025-03-31T16:29:49.945087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-03-31T16:29:49.945129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-31T16:29:49.945161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-31T16:29:49.945307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T16:29:49.945362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:1 2025-03-31T16:29:49.945388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:1 2025-03-31T16:29:49.945417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-31T16:29:49.945437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:2 2025-03-31T16:29:49.945472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:2 2025-03-31T16:29:49.945524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-31T16:29:49.950179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T16:29:49.950235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:378:2346] TestWaitNotification: OK eventTxId 101 2025-03-31T16:29:49.950756Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:29:49.951054Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex" took 293us result status StatusSuccess 2025-03-31T16:29:49.951911Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex" PathDescription { Self { Name: "UserDefinedIndex" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "UserDefinedIndex" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TAsyncIndexTests::DropTableWithInflightChanges[TabletReboots] |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTable |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TUniqueIndexTests::CreateTable |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS95-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 18744, MsgBus: 32174 2025-03-31T16:28:37.935396Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011414740692666:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:37.935620Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018c7/r3tmp/tmpIPpKeZ/pdisk_1.dat 2025-03-31T16:28:38.676511Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:28:38.694244Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:28:38.694391Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:28:38.703283Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18744, node 1 2025-03-31T16:28:38.864730Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:28:38.864755Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:28:38.864762Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:28:38.864852Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32174 TClient is connected to server localhost:32174 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:28:39.818377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:39.839959Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:28:41.959064Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011431920562376:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:41.959264Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:41.959554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011431920562388:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:41.963616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:28:41.979403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011431920562390:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:28:42.041895Z node 1 :TX_PROXY ERROR: Actor# [1:7488011436215529737:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:28:42.439745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:28:42.568560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:28:42.633327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:28:42.681840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:28:42.761277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:28:42.919563Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011414740692666:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:42.920041Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:28:42.950519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:28:42.986993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:28:43.027295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:28:43.099480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T16:28:43.156817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-31T16:28:43.196877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-31T16:28:43.242010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:28:43.276686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:28:44.052356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-31T16:28:44.140226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:28:44.236907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:28:44.329903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:28:44.372578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T16:28:44.411880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-31T16:28:44.454132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T16:28:44.498272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-31T16:28:44.536437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-31T16:28:44.655180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-31T16:28:44.701759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-31T16:28:44.761152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-31T16:28:44.824941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-31T16:28:44.892058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-31T16:28:44.931411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-31T16:28:44.981551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-31T16:28:45.024104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... 81474976710714; 2025-03-31T16:29:23.923018Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.925585Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.930159Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.931341Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.939057Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.939319Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.945209Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.945271Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.951620Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.952357Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.958527Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.958581Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.964366Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.964366Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.969156Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.972299Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.975248Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.978284Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:23.992144Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:24.002301Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:24.008483Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:24.014050Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:24.020047Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:24.025565Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:24.031396Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:24.036848Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:24.042368Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:24.049229Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038591;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:24.054811Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038605;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:24.060791Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:24.066431Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:24.070079Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:24.072319Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038583;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:24.078716Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:24.083973Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:24.084457Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:24.097516Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:24.097524Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038547;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:24.211449Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgheev85fe5d9m7d5y00sa", SessionId: ydb://session/3?node_id=1&id=N2Y2ODMyMzUtYjRkM2IxMzktZDkzOThkMC04MWVmOWU2Nw==, Slow query, duration: 37.271685s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:29:24.751305Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:29:24.751312Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:29:24.751849Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7488011569359541707:5781];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038170;receive=72075186224038331; 2025-03-31T16:29:24.752226Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:29:46.627730Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgjybb9xqswxgqv51e1zbc", SessionId: ydb://session/3?node_id=1&id=N2Y2ODMyMzUtYjRkM2IxMzktZDkzOThkMC04MWVmOWU2Nw==, Slow query, duration: 10.645343s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/test/ds/\";\n-- NB: Subquerys\n$ws_wh =\n(select ws1.ws_order_number ws_order_number,ws1.ws_warehouse_sk wh1,ws2.ws_warehouse_sk wh2\n from web_sales ws1 cross join web_sales ws2\n where ws1.ws_order_number = ws2.ws_order_number\n and ws1.ws_warehouse_sk <> ws2.ws_warehouse_sk);\n-- start query 1 in stream 0 using template query95.tpl and seed 2031708268\n select\n count(distinct ws1.ws_order_number) as `order count`\n ,sum(ws_ext_ship_cost) as `total shipping cost`\n ,sum(ws_net_profit) as `total net profit`\nfrom\n web_sales ws1\n cross join date_dim\n cross join customer_address\n cross join web_site\nwhere\n cast(d_date as date) between cast('2002-4-01' as date) and\n (cast('2002-4-01' as date) + DateTime::IntervalFromDays(60))\nand ws1.ws_ship_date_sk = d_date_sk\nand ws1.ws_ship_addr_sk = ca_address_sk\nand ca_state = 'AL'\nand ws1.ws_web_site_sk = web_site_sk\nand web_company_name = 'pri'\nand ws1.ws_order_number in (select ws_order_number\n from $ws_wh)\nand ws1.ws_order_number in (select wr_order_number\n from web_returns cross join $ws_wh ws_wh\n where wr_order_number = ws_wh.ws_order_number)\norder by `order count`\nlimit 100;\n", parameters: 0b |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTable [GOOD] >> TUniqueIndexTests::CreateTable [GOOD] |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithComplexPreds+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 28156, MsgBus: 11881 2025-03-31T16:28:03.339636Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011270177427464:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:03.340105Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018ea/r3tmp/tmpN6LZQv/pdisk_1.dat 2025-03-31T16:28:04.131511Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:28:04.131614Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:28:04.136423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:28:04.209244Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28156, node 1 2025-03-31T16:28:04.419771Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:28:04.419793Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:28:04.419805Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:28:04.419922Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11881 TClient is connected to server localhost:11881 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:28:05.638162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:05.670872Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:28:08.028468Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011291652264476:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:08.028536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011291652264487:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:08.028645Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:08.039176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:28:08.058461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011291652264490:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:28:08.127850Z node 1 :TX_PROXY ERROR: Actor# [1:7488011291652264541:2343] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:28:08.339211Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011270177427464:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:08.339294Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:28:08.580723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:28:08.862010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011291652264774:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:28:08.862038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011291652264778:2362];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:28:08.862218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011291652264774:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:28:08.862449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011291652264774:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:28:08.862595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011291652264774:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:28:08.862693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011291652264774:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:28:08.862722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011291652264778:2362];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:28:08.862796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011291652264774:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:28:08.862890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011291652264774:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:28:08.862913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011291652264778:2362];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:28:08.863002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011291652264774:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:28:08.863063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011291652264778:2362];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:28:08.863186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011291652264778:2362];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:28:08.863271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011291652264778:2362];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:28:08.863359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011291652264778:2362];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:28:08.863448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011291652264778:2362];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:28:08.863545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011291652264778:2362];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:28:08.863682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011291652264778:2362];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:28:08.863737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011291652264774:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:28:08.863874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011291652264778:2362];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:28:08.863882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011291652264774:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:28:08.863983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011291652264778:2362];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:28:08.863983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011291652264774:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:28:08.864064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011291652264774:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:28:08.934177Z node 1 :T ... ller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.659972Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.665057Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039187;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.665057Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039238;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.670634Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039231;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.670634Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039199;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.676778Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039249;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.680049Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039234;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.684514Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039215;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.684541Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039213;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.690077Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039278;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.690080Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039298;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.697103Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.698391Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039310;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.703636Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039239;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.704310Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039232;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.710589Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039190;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.714671Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.721217Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039256;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.723961Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039235;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.728086Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039189;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.730285Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039332;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.734692Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039241;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.741660Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039267;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.746036Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039251;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.748173Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039205;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.754906Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039219;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.755984Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039261;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.761962Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.764467Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.775324Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039258;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.776456Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039257;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.782774Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039246;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.782774Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.789250Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039237;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.793578Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.795314Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039274;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.799798Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039284;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.805155Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.805155Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.811187Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039316;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.811187Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.817350Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.817350Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039247;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.823584Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039290;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:34.823583Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039322;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:35.008551Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpghq7g0mmm3d0jw285dtw4", SessionId: ydb://session/3?node_id=1&id=NDBjOWRmZjItNjc1MDRiYy01OGYyMjZkMy0yM2ZkMTczYQ==, Slow query, duration: 39.088117s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:29:35.466828Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:29:35.467184Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7488011622364803519:11200];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-03-31T16:29:35.467536Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:29:35.468291Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-31T16:29:52.030906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:29:52.031009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:52.031095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:29:52.031138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:29:52.031197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:29:52.031229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:29:52.031291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:52.031389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:29:52.031758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:29:52.141394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:29:52.141478Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:29:52.155950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:29:52.156308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:29:52.156486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:29:52.172967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:29:52.173236Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:29:52.174016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:52.174398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:29:52.176550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:52.177892Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:52.177960Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:52.178069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:29:52.178118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:52.178155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:29:52.178415Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:29:52.185849Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-31T16:29:52.305154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:29:52.305440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:52.305687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:29:52.305964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:29:52.306055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:52.308635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:52.308795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:29:52.309024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:52.309096Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:29:52.309132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:29:52.309169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:29:52.311272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:52.311342Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:29:52.311380Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:29:52.313520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:52.313574Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:52.313623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:52.313674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:29:52.317678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:29:52.320031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:29:52.320246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:29:52.321423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:52.321604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:29:52.321677Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:52.321983Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:29:52.322052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:52.322260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:29:52.322353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:29:52.324646Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:52.324706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:52.324888Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:52.325119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:29:52.325515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:52.325562Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:29:52.325663Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:52.325707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:52.325837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:52.325870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:52.325908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:29:52.325992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:52.326041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:29:52.326074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:29:52.326149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:29:52.326188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:29:52.326221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:29:52.328285Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:29:52.328420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:29:52.328475Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 6:29:52.837119Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-03-31T16:29:52.837150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-31T16:29:52.837669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:29:52.837737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:29:52.837765Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:29:52.838090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:29:52.838196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:29:52.838227Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:29:52.838254Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-03-31T16:29:52.838283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:29:52.838608Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:29:52.838674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:29:52.838700Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:29:52.839038Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:29:52.839161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:29:52.839193Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:29:52.840395Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:29:52.840469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:29:52.840496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:29:52.840523Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-03-31T16:29:52.840572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-03-31T16:29:52.841812Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:29:52.841920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:29:52.841950Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:29:52.841984Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-03-31T16:29:52.842014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-03-31T16:29:52.842087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/4, is published: true 2025-03-31T16:29:52.845037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:3, at schemeshard: 72057594046678944 2025-03-31T16:29:52.845107Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:3 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:29:52.845505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-03-31T16:29:52.845643Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:3 progress is 2/4 2025-03-31T16:29:52.845680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/4 2025-03-31T16:29:52.845724Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:3 progress is 2/4 2025-03-31T16:29:52.845760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/4 2025-03-31T16:29:52.845832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/4, is published: true 2025-03-31T16:29:52.846460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:29:52.846829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:2, at schemeshard: 72057594046678944 2025-03-31T16:29:52.846886Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:2 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:29:52.847159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-31T16:29:52.847258Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 3/4 2025-03-31T16:29:52.847311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/4 2025-03-31T16:29:52.847352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 3/4 2025-03-31T16:29:52.847376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/4 2025-03-31T16:29:52.847405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/4, is published: true 2025-03-31T16:29:52.847999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:29:52.848058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:29:52.848158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-31T16:29:52.848194Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:29:52.848385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T16:29:52.848470Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 4/4 2025-03-31T16:29:52.848496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-03-31T16:29:52.848527Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 4/4 2025-03-31T16:29:52.848569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-03-31T16:29:52.848602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 4/4, is published: true 2025-03-31T16:29:52.848678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:415:2372] message: TxId: 102 2025-03-31T16:29:52.848739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-03-31T16:29:52.848785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-31T16:29:52.848821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-31T16:29:52.849169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:29:52.849215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:1 2025-03-31T16:29:52.849237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:1 2025-03-31T16:29:52.849280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-31T16:29:52.849302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:2 2025-03-31T16:29:52.849323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:2 2025-03-31T16:29:52.849385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-03-31T16:29:52.849418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:3 2025-03-31T16:29:52.849439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:3 2025-03-31T16:29:52.849492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-03-31T16:29:52.850007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:29:52.850077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:29:52.850122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:29:52.850208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:29:52.852005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:29:52.861354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:29:52.861420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:534:2484] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TUniqueIndexTests::CreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:29:52.691490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:29:52.691584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:52.691624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:29:52.691655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:29:52.691723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:29:52.691754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:29:52.691809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:52.691883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:29:52.692210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:29:52.781065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:29:52.781130Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:29:52.785838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:29:52.785968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:29:52.786131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:29:52.791360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:29:52.791567Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:29:52.792242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:52.792482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:29:52.794327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:52.795485Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:52.795551Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:52.795751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:29:52.795816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:52.795860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:29:52.796039Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:29:52.803279Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:29:52.940717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:29:52.946158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:52.946429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:29:52.946674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:29:52.946773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:52.957820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:52.957967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:29:52.958236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:52.958316Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:29:52.958354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:29:52.958386Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:29:52.960636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:52.960731Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:29:52.960771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:29:52.962804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:52.962853Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:52.962909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:52.962977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:29:52.973861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:29:52.976242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:29:52.976495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:29:52.977531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:52.977676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:29:52.977736Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:52.978068Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:29:52.978123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:52.978333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:29:52.978426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:29:52.980812Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:52.980861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:52.981169Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:52.981221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:29:52.981464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:52.981516Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:29:52.981610Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:52.981646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:52.981683Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:52.981733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:52.981785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:29:52.981836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:52.981876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:29:52.981902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:29:52.981968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:29:52.982003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:29:52.982037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:29:52.984136Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:29:52.984305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:29:52.984344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... e.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:53.306264Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:29:53.306313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-03-31T16:29:53.306357Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-03-31T16:29:53.306732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 319 RawX2: 4294969599 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-31T16:29:53.306768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 2 2025-03-31T16:29:53.306860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:2, at schemeshard: 72057594046678944, message: Source { RawX1: 319 RawX2: 4294969599 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-31T16:29:53.306895Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-31T16:29:53.306964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 319 RawX2: 4294969599 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-31T16:29:53.307003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:53.307026Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:2, at schemeshard: 72057594046678944 2025-03-31T16:29:53.307049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-31T16:29:53.307075Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:2 129 -> 240 2025-03-31T16:29:53.320474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T16:29:53.320755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T16:29:53.328502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T16:29:53.328651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:29:53.328717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-31T16:29:53.328786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-03-31T16:29:53.328869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:29:53.328970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-03-31T16:29:53.329317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:29:53.329368Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-31T16:29:53.329474Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 2/3 2025-03-31T16:29:53.329518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-03-31T16:29:53.329563Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 2/3 2025-03-31T16:29:53.329595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-03-31T16:29:53.329634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2025-03-31T16:29:53.329920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2025-03-31T16:29:53.329950Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:2 ProgressState 2025-03-31T16:29:53.330025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 3/3 2025-03-31T16:29:53.330053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-03-31T16:29:53.330086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 3/3 2025-03-31T16:29:53.330106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-03-31T16:29:53.330129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2025-03-31T16:29:53.330191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:377:2345] message: TxId: 101 2025-03-31T16:29:53.330253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-03-31T16:29:53.330295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-31T16:29:53.330325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-31T16:29:53.330474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T16:29:53.330524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:1 2025-03-31T16:29:53.330558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:1 2025-03-31T16:29:53.330587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-31T16:29:53.330609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:2 2025-03-31T16:29:53.330630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:2 2025-03-31T16:29:53.330673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-31T16:29:53.336426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T16:29:53.336518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:378:2346] TestWaitNotification: OK eventTxId 101 2025-03-31T16:29:53.337094Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:29:53.337376Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex" took 291us result status StatusSuccess 2025-03-31T16:29:53.338233Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex" PathDescription { Self { Name: "UserDefinedIndex" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "UserDefinedIndex" LocalPathId: 3 Type: EIndexTypeGlobalUnique State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> KqpJoinOrder::GeneralPrioritiesBug3 [GOOD] >> TAsyncIndexTests::DropTableWithInflightChanges[PipeResets] |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndMergeWithReboots[PipeResets] >> TAsyncIndexTests::SplitMainWithReboots[PipeResets] >> TAsyncIndexTests::SplitIndexWithReboots[PipeResets] >> TAsyncIndexTests::SplitMainWithReboots[TabletReboots] |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndMergeWithReboots[TabletReboots] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::GeneralPrioritiesBug3 [GOOD] Test command err: Trying to start YDB, gRPC: 8950, MsgBus: 26220 2025-03-31T16:29:06.203715Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011538741887385:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:29:06.204108Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018b0/r3tmp/tmpWQg7Qh/pdisk_1.dat 2025-03-31T16:29:07.086487Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:29:07.098109Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:29:07.098183Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:29:07.120350Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8950, node 1 2025-03-31T16:29:07.432551Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:29:07.432572Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:29:07.432579Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:29:07.432683Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26220 TClient is connected to server localhost:26220 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:29:08.320542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:10.757144Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011555921757116:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:10.757242Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011555921757105:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:10.757383Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:10.761731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:29:10.777019Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T16:29:10.777677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011555921757119:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:29:10.879210Z node 1 :TX_PROXY ERROR: Actor# [1:7488011555921757170:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:29:11.199628Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011538741887385:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:29:11.199966Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:29:11.251363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:29:11.472856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:29:11.537765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:29:11.593185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:29:11.640552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:29:11.830237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:29:11.890899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:29:11.927260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:29:11.977027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T16:29:12.014240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-31T16:29:12.054422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-31T16:29:12.096359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:29:12.142526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:29:12.993882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-31T16:29:13.034892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:29:13.110986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:29:13.157762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:29:13.222002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T16:29:13.267855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-31T16:29:13.297436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T16:29:13.333073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-31T16:29:13.406102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-31T16:29:13.452847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-31T16:29:13.514355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-31T16:29:13.605869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-31T16:29:13.696809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-31T16:29:13.735710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-31T16:29:13.777863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-31T16:29:13.850561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-31T16:29:13.922029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTabl ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.106175Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.108275Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.112889Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038523;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.116726Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.118311Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.127607Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.131762Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038559;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.136783Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.140792Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038571;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.145187Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.149298Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038569;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.154451Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038501;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.158260Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.173741Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038549;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.179628Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.186740Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.192286Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038613;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.202447Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.205105Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.216037Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.224666Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.230438Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.231151Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.237896Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.246515Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.250149Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.252271Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.258353Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.260052Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.266241Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.270709Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.272715Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.279683Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.280686Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.287269Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.288133Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038543;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.294665Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.295768Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.302475Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.302794Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.310104Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.310311Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.317521Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.317871Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.327056Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:48.435280Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgjahh0r63y7gx03es47pe", SessionId: ydb://session/3?node_id=1&id=M2EzOTFkZGEtMzE2MGNmNzItNDQ3ZmYzYjctY2Y1YzFjODY=, Slow query, duration: 32.736952s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:29:48.697448Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:29:48.697941Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:29:48.698167Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;self_id=[1:7488011663295960745:5318];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038331;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038170;receive=72075186224038629; 2025-03-31T16:29:48.698744Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> TVectorIndexTests::CreateTablePrefixInvalidKeyType |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTablePrefixInvalidKeyType [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTablePrefixInvalidKeyType [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:29:59.145540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:29:59.145639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:59.145676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:29:59.145705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:29:59.145788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:29:59.145816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:29:59.145867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:59.145935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:29:59.146245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:29:59.241048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:29:59.241160Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:29:59.247453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:29:59.247573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:29:59.247766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:29:59.259929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:29:59.260183Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:29:59.260901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:59.261117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:29:59.263067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:59.264369Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:59.264448Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:59.264735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:29:59.264805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:59.264864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:29:59.265089Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:29:59.273917Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:29:59.415342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:29:59.415571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:59.415797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:29:59.416037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:29:59.416109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:59.418776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:59.418935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:29:59.419171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:59.419237Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:29:59.419269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:29:59.419301Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:29:59.420996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:59.421048Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:29:59.421082Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:29:59.422693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:59.422742Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:59.422805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:59.422852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:29:59.426652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:29:59.428388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:29:59.428613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:29:59.429575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:59.429698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:29:59.429783Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:59.430074Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:29:59.430126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:59.430299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:29:59.430371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:29:59.432856Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:59.432898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:59.433087Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:59.433136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:29:59.433362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:59.433406Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:29:59.433495Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:59.433542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:59.433583Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:59.433610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:59.433646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:29:59.433695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:59.433729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:29:59.433755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:29:59.433833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:29:59.433868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:29:59.433898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:29:59.435876Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:29:59.436020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:29:59.436063Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-31T16:29:59.436099Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-31T16:29:59.436141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:29:59.436230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-31T16:29:59.439419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-31T16:29:59.439945Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-03-31T16:29:59.441943Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Bootstrap 2025-03-31T16:29:59.459236Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Become StateWork (SchemeCache [1:272:2263]) 2025-03-31T16:29:59.461897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "vectors" Columns { Name: "id" Type: "Uint64" } Columns { Name: "embedding" Type: "String" } Columns { Name: "covered" Type: "String" } Columns { Name: "prefix" Type: "Float" } KeyColumnNames: "id" } IndexDescription { Name: "idx_vector" KeyColumnNames: "prefix" KeyColumnNames: "embedding" Type: EIndexTypeGlobalVectorKmeansTree DataColumnNames: "covered" VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } clusters: 4 levels: 5 } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:29:59.462317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTableIndex construct operation table path: /MyRoot/vectors domain path id: [OwnerId: 72057594046678944, LocalPathId: 1] domain path: /MyRoot shardsToCreate: 2 GetShardsInside: 0 MaxShards: 200000 2025-03-31T16:29:59.462608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 101:0, explain: Column 'prefix' has wrong key type Float for being key, at schemeshard: 72057594046678944 2025-03-31T16:29:59.462651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusInvalidParameter, reason: Column 'prefix' has wrong key type Float for being key, at schemeshard: 72057594046678944 2025-03-31T16:29:59.463890Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T16:29:59.471797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusInvalidParameter Reason: "Column \'prefix\' has wrong key type Float for being key" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:29:59.471975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Column 'prefix' has wrong key type Float for being key, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/vectors 2025-03-31T16:29:59.472554Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-31T16:29:59.472786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-31T16:29:59.472831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-31T16:29:59.473230Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-31T16:29:59.473340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T16:29:59.473373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:282:2273] TestWaitNotification: OK eventTxId 101 2025-03-31T16:29:59.473903Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:29:59.474130Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector" took 218us result status StatusPathDoesNotExist 2025-03-31T16:29:59.474293Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/vectors/idx_vector\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/vectors/idx_vector" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |93.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest |93.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::CopyPartUpload >> TS3WrapperTests::UploadUnknownPart >> TPQCompatTest::CommitOffsets [GOOD] >> TPQCompatTest::LongProducerAndLongMessageGroupId >> TS3WrapperTests::UploadUnknownPart [GOOD] >> TS3WrapperTests::CopyPartUpload [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::UploadUnknownPart [GOOD] Test command err: 2025-03-31T16:30:02.853640Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 98DA92A5-BAF8-47DE-B23D-3F0A73BC5496, request# UploadPart { Bucket: TEST Key: key UploadId: uploadId PartNumber: 1 } REQUEST: PUT /TEST/key?partNumber=1&uploadId=uploadId HTTP/1.1 HEADERS: Host: localhost:18496 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 4C5657E7-461E-4FDA-AB08-4601C7963025 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /TEST/key / partNumber=1&uploadId=uploadId / 4 2025-03-31T16:30:02.889095Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 98DA92A5-BAF8-47DE-B23D-3F0A73BC5496, response# ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::CopyPartUpload [GOOD] Test command err: 2025-03-31T16:30:02.846824Z node 1 :S3_WRAPPER NOTICE: Request: uuid# A96D70C6-C83D-44EE-BAA3-FA646811541C, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:11328 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 4B6DB5BB-1A81-455C-BB7E-8DED932AAFEB amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2025-03-31T16:30:02.888618Z node 1 :S3_WRAPPER NOTICE: Response: uuid# A96D70C6-C83D-44EE-BAA3-FA646811541C, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2025-03-31T16:30:02.889039Z node 1 :S3_WRAPPER NOTICE: Request: uuid# D1CBC757-7124-4FA0-8C3C-1DFC13951588, request# CreateMultipartUpload { Bucket: TEST Key: key1 } REQUEST: POST /TEST/key1?uploads HTTP/1.1 HEADERS: Host: localhost:11328 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B895040D-5014-482C-B9FE-5F3B45DC4DDF amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-storage-class: STANDARD S3_MOCK::HttpServeAction: 4 / /TEST/key1 / uploads= 2025-03-31T16:30:02.893644Z node 1 :S3_WRAPPER NOTICE: Response: uuid# D1CBC757-7124-4FA0-8C3C-1DFC13951588, response# CreateMultipartUploadResult { Bucket: Key: TEST/key1 UploadId: 1 } 2025-03-31T16:30:02.893997Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 5612924A-1155-453D-9AFA-D15526E5F7A7, request# UploadPartCopy { Bucket: TEST Key: key1 UploadId: 1 PartNumber: 1 } REQUEST: PUT /TEST/key1?partNumber=1&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:11328 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: AEA3B83E-D4B7-4134-9B5D-223251CCC1F1 amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-copy-source: /TEST/key x-amz-copy-source-range: bytes=1-2 S3_MOCK::HttpServeWrite: /TEST/key1 / partNumber=1&uploadId=1 / 0 2025-03-31T16:30:02.897796Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 5612924A-1155-453D-9AFA-D15526E5F7A7, response# UploadPartCopyResult { } 2025-03-31T16:30:02.898429Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 8D776FA5-88B7-4CF0-836B-CC0A79E439D2, request# CompleteMultipartUpload { Bucket: TEST Key: key1 UploadId: 1 MultipartUpload: { Parts: [afc7e8a98f75755e513d9d5ead888e1d] } } REQUEST: POST /TEST/key1?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:11328 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 01B6AC81-51FE-4C95-A243-7488B46EAAAB amz-sdk-request: attempt=1 content-length: 235 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /TEST/key1 / uploadId=1 2025-03-31T16:30:02.905538Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 8D776FA5-88B7-4CF0-836B-CC0A79E439D2, response# CompleteMultipartUploadResult { Bucket: Key: TEST/key1 ETag: afc7e8a98f75755e513d9d5ead888e1d } 2025-03-31T16:30:02.905908Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 5155BD9D-6B66-4658-9E00-E6F03D315D97, request# GetObject { Bucket: TEST Key: key1 Range: bytes=0-1 } REQUEST: GET /TEST/key1 HTTP/1.1 HEADERS: Host: localhost:11328 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B91E610E-683C-4255-AEDE-125AD5510FC9 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-1 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key1 / 2 2025-03-31T16:30:02.910956Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 5155BD9D-6B66-4658-9E00-E6F03D315D97, response# GetObjectResult { } >> KqpJoinOrder::CanonizedJoinOrderTPCH11 [GOOD] |93.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::HeadObject >> KqpJoinOrder::TestJoinOrderHintsSimple-ColumnStore [GOOD] >> TS3WrapperTests::HeadObject [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::HeadObject [GOOD] Test command err: 2025-03-31T16:30:04.633355Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 2BA5F2D2-6F4F-47FD-8664-B46D6D303DDF, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:20362 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 259AEF1C-5517-4B46-BF21-5F8D5A0FF7B1 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2025-03-31T16:30:04.644984Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 2BA5F2D2-6F4F-47FD-8664-B46D6D303DDF, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2025-03-31T16:30:04.645810Z node 1 :S3_WRAPPER NOTICE: Request: uuid# D18877A2-3215-4994-A73D-4458E366FCB4, request# HeadObject { Bucket: TEST Key: key } REQUEST: HEAD /TEST/key HTTP/1.1 HEADERS: Host: localhost:20362 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 49FC584F-DB58-4A11-A5BA-69E139D797C5 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key / 4 2025-03-31T16:30:04.654563Z node 1 :S3_WRAPPER NOTICE: Response: uuid# D18877A2-3215-4994-A73D-4458E366FCB4, response# HeadObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc ContentLength: 4 } >> TS3WrapperTests::HeadUnknownObject >> TS3WrapperTests::AbortMultipartUpload >> TS3WrapperTests::HeadUnknownObject [GOOD] >> TS3WrapperTests::AbortMultipartUpload [GOOD] >> TS3WrapperTests::GetObject >> TS3WrapperTests::GetObject [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::HeadUnknownObject [GOOD] Test command err: 2025-03-31T16:30:05.728322Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 1226F184-908F-4709-B16D-A1F89E4BA691, request# HeadObject { Bucket: TEST Key: key } REQUEST: HEAD /TEST/key HTTP/1.1 HEADERS: Host: localhost:2644 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 6A601AD2-F278-48D5-952B-C18C801203E7 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 2025-03-31T16:30:05.740044Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 1226F184-908F-4709-B16D-A1F89E4BA691, response# No response body. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinOrderHintsSimple-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 2283, MsgBus: 23765 2025-03-31T16:29:15.281442Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011578593308747:2273];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:29:15.282016Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018a7/r3tmp/tmpVExsnX/pdisk_1.dat 2025-03-31T16:29:16.085581Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:29:16.100258Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:29:16.100336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:29:16.105278Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2283, node 1 2025-03-31T16:29:16.439588Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:29:16.439606Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:29:16.439613Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:29:16.439708Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23765 TClient is connected to server localhost:23765 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:29:17.637393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:20.131021Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011600068145673:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:20.131177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:20.132893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011600068145685:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:20.139751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:29:20.160970Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T16:29:20.162001Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011600068145687:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:29:20.239256Z node 1 :TX_PROXY ERROR: Actor# [1:7488011600068145738:2342] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:29:20.285474Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011578593308747:2273];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:29:20.302485Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:29:20.687800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:29:20.878227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:29:20.924487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:29:20.992162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:29:21.026167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:29:21.235466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:29:21.310691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:29:21.367233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:29:21.406180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T16:29:21.480754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-31T16:29:21.557406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-31T16:29:21.606075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:29:21.647137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:29:22.464868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-31T16:29:22.522203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:29:22.561001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:29:22.601184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:29:22.652292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T16:29:22.699233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-31T16:29:22.772393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T16:29:22.818701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-31T16:29:22.881506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-31T16:29:22.920215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-31T16:29:22.995745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-31T16:29:23.033367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-31T16:29:23.068304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-31T16:29:23.111825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-31T16:29:23.166679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-31T16:29:23.217540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-31T16:29:23.263316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTabl ... 10714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:58.878885Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038441;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:58.884101Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038469;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:58.890555Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038447;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:58.893259Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038429;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:58.900991Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038453;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:58.902965Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038445;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:58.907810Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038457;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:58.908713Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:58.914630Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:58.914889Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:58.922330Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:58.924562Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:58.928826Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038473;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:58.933094Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:58.935123Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038477;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:58.942361Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:58.942539Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038618;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:58.949528Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038463;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:58.953234Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:58.958579Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038487;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:58.964431Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:58.967538Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:58.970696Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:58.976699Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:58.979263Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038614;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:58.983283Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:58.985133Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038471;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:58.989913Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:58.991714Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038439;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:58.996450Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038529;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:58.998360Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038451;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:59.002937Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:59.009252Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038531;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:59.014751Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:59.020605Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:59.027467Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038517;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:59.033622Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038527;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:59.039843Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:59.045419Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:59.046133Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038616;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:59.051494Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038501;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:59.051788Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038499;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:59.057922Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:59.057936Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:59.147113Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgjkjb8ggc0bsg4pqsrp1w", SessionId: ydb://session/3?node_id=1&id=ODdhOWMzMDctZjVlNjdjZDktMTFiMDk5NGEtZjQ3YWQ4NzQ=, Slow query, duration: 34.206857s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:29:59.433581Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:29:59.434077Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:29:59.434436Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;self_id=[1:7488011630132923209:2865];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038170;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038331;receive=72075186224038629; 2025-03-31T16:29:59.434841Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716;
: Warning: Execution, code: 1060
: Warning: Unapplied hint: Rows(R T # 1), code: 4534
: Warning: Execution, code: 1060
: Warning: Unapplied hint: Rows(R T # 1), code: 4534 ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::AbortMultipartUpload [GOOD] Test command err: 2025-03-31T16:30:05.953322Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 0B4E3C49-CDDC-48F9-8BFD-069318C593F1, request# CreateMultipartUpload { Bucket: TEST Key: key } REQUEST: POST /TEST/key?uploads HTTP/1.1 HEADERS: Host: localhost:14735 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 1E551EAB-2A7E-4B63-A694-D27830FA1215 amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-storage-class: STANDARD S3_MOCK::HttpServeAction: 4 / /TEST/key / uploads= 2025-03-31T16:30:05.963700Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 0B4E3C49-CDDC-48F9-8BFD-069318C593F1, response# CreateMultipartUploadResult { Bucket: Key: TEST/key UploadId: 1 } 2025-03-31T16:30:05.964264Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 05F125A0-77C3-4AFF-BF96-5499C5F9E4B0, request# AbortMultipartUpload { Bucket: TEST Key: key UploadId: 1 } REQUEST: DELETE /TEST/key?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:14735 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 01FCBAC2-C0D2-41E4-AF93-C9B1AC958BED amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 6 / /TEST/key / uploadId=1 2025-03-31T16:30:05.967734Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 05F125A0-77C3-4AFF-BF96-5499C5F9E4B0, response# AbortMultipartUploadResult { } 2025-03-31T16:30:05.968279Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 14DE2C10-CB8E-4470-9959-800B7B7D4CAD, request# HeadObject { Bucket: TEST Key: key } REQUEST: HEAD /TEST/key HTTP/1.1 HEADERS: Host: localhost:14735 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 7561F36B-7ACD-40C6-AAB7-90833B07102E amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 2025-03-31T16:30:05.971260Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 14DE2C10-CB8E-4470-9959-800B7B7D4CAD, response# No response body. ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::GetObject [GOOD] Test command err: 2025-03-31T16:30:06.345508Z node 1 :S3_WRAPPER NOTICE: Request: uuid# C41C42E7-8AB3-4F3A-B477-264013FC9C10, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:5463 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 4D2C99F4-8E89-4960-B6CF-FD340556625C amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2025-03-31T16:30:06.352775Z node 1 :S3_WRAPPER NOTICE: Response: uuid# C41C42E7-8AB3-4F3A-B477-264013FC9C10, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2025-03-31T16:30:06.353210Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 6C7E2BEE-FAC2-4B2E-91A0-D318E87A70FC, request# GetObject { Bucket: TEST Key: key Range: bytes=0-3 } REQUEST: GET /TEST/key HTTP/1.1 HEADERS: Host: localhost:5463 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B320119C-B7D3-4BB5-8703-4F717D443E01 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-3 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key / 4 2025-03-31T16:30:06.357024Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 6C7E2BEE-FAC2-4B2E-91A0-D318E87A70FC, response# GetObjectResult { } |93.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH11 [GOOD] Test command err: Trying to start YDB, gRPC: 6246, MsgBus: 4022 2025-03-31T16:28:08.823572Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011290253472876:2277];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:08.823655Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018e6/r3tmp/tmpNbR0Cc/pdisk_1.dat 2025-03-31T16:28:09.546122Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:28:09.597411Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:28:09.597501Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:28:09.620242Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6246, node 1 2025-03-31T16:28:09.934964Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:28:09.934981Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:28:09.934987Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:28:09.935114Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4022 TClient is connected to server localhost:4022 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:28:10.994366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:11.018279Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:28:13.314198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011311728309782:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:13.314314Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:13.315191Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011311728309794:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:13.319472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:28:13.333281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011311728309796:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:28:13.419833Z node 1 :TX_PROXY ERROR: Actor# [1:7488011311728309847:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:28:13.695997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:28:13.832370Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011290253472876:2277];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:13.832432Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:28:14.100326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011311728310105:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:28:14.100516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011311728310105:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:28:14.100746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011311728310105:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:28:14.100844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011311728310105:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:28:14.100945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011311728310105:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:28:14.101039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011311728310105:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:28:14.101156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011311728310105:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:28:14.101271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011311728310105:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:28:14.101386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011311728310105:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:28:14.101496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011311728310105:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:28:14.101601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011311728310105:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:28:14.101703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011311728310105:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:28:14.103836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011311728310103:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:28:14.103915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011311728310103:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:28:14.104098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011311728310103:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:28:14.104230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011311728310103:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:28:14.104364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011311728310103:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:28:14.104482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011311728310103:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:28:14.104594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011311728310103:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:28:14.104692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011311728310103:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:28:14.104788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011311728310103:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:28:14.104877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011311728310103:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:28:14.104986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011311728310103:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:28:14.105091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011311728310103:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:28:14.149427Z node 1 :TX_COL ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.245438Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.249143Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.252246Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.256883Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.260284Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.265406Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.267632Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.274537Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.277753Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.282681Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.289483Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039277;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.296522Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.303629Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.309972Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039271;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.316256Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.322706Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.329180Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.335536Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.342173Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.346964Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.350730Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.354177Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.359489Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.361861Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.370542Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.382955Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.386746Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.397628Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.398747Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.404364Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.410001Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.413452Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.421866Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.423711Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.436003Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.445419Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.452353Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.458175Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.460748Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.466620Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.469175Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.476182Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.482279Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.489019Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.570924Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039210;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:42.684687Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpghyfqa3rnehzwzgs4bpvz", SessionId: ydb://session/3?node_id=1&id=Y2U4M2FkMWMtYzQ3MTRmYjUtODgwOTdlNzQtNzkzNTZmNTk=, Slow query, duration: 39.332082s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:29:42.987531Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:29:42.989038Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7488011595196197909:9393];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039392; 2025-03-31T16:29:42.989451Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:29:42.991265Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::FiveWayJoin-ColumnStore [GOOD] >> TPersQueueTest::SetupReadSession [GOOD] >> TPersQueueTest::TestBigMessage >> TS3WrapperTests::GetUnknownObject >> TS3WrapperTests::CompleteUnknownUpload |93.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::GetUnknownObject [GOOD] >> TS3WrapperTests::CompleteUnknownUpload [GOOD] >> TS3WrapperTests::PutObject >> TS3WrapperTests::AbortUnknownUpload ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::GetUnknownObject [GOOD] Test command err: 2025-03-31T16:30:08.636261Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 7EF5ED27-10A8-4148-80FE-234DF0E726BD, request# GetObject { Bucket: TEST Key: key Range: bytes=0-3 } REQUEST: GET /TEST/key HTTP/1.1 HEADERS: Host: localhost:1679 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 9B7184C5-DCF2-435F-BD8A-D85EB433F417 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-3 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 2025-03-31T16:30:08.643368Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 7EF5ED27-10A8-4148-80FE-234DF0E726BD, response# No response body. ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::CompleteUnknownUpload [GOOD] Test command err: 2025-03-31T16:30:08.705009Z node 1 :S3_WRAPPER NOTICE: Request: uuid# C0D9048C-10B6-441C-BB0D-9C1B8EE58990, request# CompleteMultipartUpload { Bucket: TEST Key: key UploadId: uploadId MultipartUpload: { Parts: [ETag] } } REQUEST: POST /TEST/key?uploadId=uploadId HTTP/1.1 HEADERS: Host: localhost:28054 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 7AF82BE3-E684-4B0E-91B4-97AAD6F87262 amz-sdk-request: attempt=1 content-length: 207 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /TEST/key / uploadId=uploadId 2025-03-31T16:30:08.713356Z node 1 :S3_WRAPPER NOTICE: Response: uuid# C0D9048C-10B6-441C-BB0D-9C1B8EE58990, response# >> TS3WrapperTests::PutObject [GOOD] >> TPersQueueTest::Codecs_WriteMessageWithDefaultCodecs_MessagesAreAcknowledged [GOOD] >> TPersQueueTest::Codecs_WriteMessageWithNonDefaultCodecThatHasToBeConfiguredAdditionally_SessionClosedWithBadRequestError >> TS3WrapperTests::AbortUnknownUpload [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::PutObject [GOOD] Test command err: 2025-03-31T16:30:09.257219Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 4C65537B-F22D-4A7B-A834-9B6BA7DCC431, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:17690 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A6E55E83-5E06-4652-B24F-3F442D171E20 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2025-03-31T16:30:09.264905Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 4C65537B-F22D-4A7B-A834-9B6BA7DCC431, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } |93.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoin-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 23902, MsgBus: 22167 2025-03-31T16:29:20.794506Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011600377773569:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:29:20.794533Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018a1/r3tmp/tmpDyXPYm/pdisk_1.dat 2025-03-31T16:29:21.536103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:29:21.536222Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:29:21.539411Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:29:21.552961Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23902, node 1 2025-03-31T16:29:21.829847Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:29:21.829869Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:29:21.829876Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:29:21.837256Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22167 TClient is connected to server localhost:22167 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:29:22.742473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:22.771607Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:29:25.298095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011621852610718:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:25.298214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:25.298694Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011621852610730:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:25.311632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:29:25.326511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011621852610732:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:29:25.401296Z node 1 :TX_PROXY ERROR: Actor# [1:7488011621852610783:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:29:25.759171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:29:25.798272Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011600377773569:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:29:25.798388Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:29:26.014198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:29:26.084617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:29:26.130503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:29:26.181442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:29:26.429769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:29:26.476948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:29:26.526546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:29:26.574728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T16:29:26.623911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-31T16:29:26.676323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-31T16:29:26.725461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:29:26.782645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:29:27.589061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-31T16:29:27.657910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:29:27.707113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:29:27.770195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:29:27.806526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T16:29:27.893341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-31T16:29:27.925377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T16:29:27.969042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-31T16:29:28.003944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-31T16:29:28.076013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-31T16:29:28.134401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-31T16:29:28.177223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-31T16:29:28.225657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-31T16:29:28.306645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-31T16:29:28.357918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-31T16:29:28.434246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-31T16:29:28.500836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.643324Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.651413Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.654888Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038535;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.659161Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.663761Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038551;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.665726Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.670692Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.670812Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.675735Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038523;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.678118Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.682539Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.685194Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.689931Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.691992Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.697323Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.698359Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.704579Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.704578Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.711330Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.711329Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.718140Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.718139Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.725023Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.725022Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.731824Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.731823Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.738585Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.738585Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.745333Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.745333Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.752033Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.752038Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.758651Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.758650Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.765437Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.765437Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.771943Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.771943Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.778243Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.778243Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.784762Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.784762Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.790962Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.791038Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.797292Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.797401Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038557;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.891433Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgjrhb0hs07qv8zt4dstyh", SessionId: ydb://session/3?node_id=1&id=N2JhOThkMmQtNmJkNjM2YTMtNTZkNTcwOTgtNGMzNDE3N2E=, Slow query, duration: 30.863229s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:30:01.196964Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:30:01.196987Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:30:01.197733Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::AbortUnknownUpload [GOOD] Test command err: 2025-03-31T16:30:09.403813Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 999E4231-4A5B-4FC0-B758-F57B55B03F6C, request# AbortMultipartUpload { Bucket: TEST Key: key UploadId: uploadId } REQUEST: DELETE /TEST/key?uploadId=uploadId HTTP/1.1 HEADERS: Host: localhost:27120 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B5974043-CFDB-4EE8-ABF1-EFC4B89DE8E5 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 6 / /TEST/key / uploadId=uploadId 2025-03-31T16:30:09.410633Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 999E4231-4A5B-4FC0-B758-F57B55B03F6C, response# >> KqpJoinOrder::TPCDS88-ColumnStore [GOOD] |93.5%| [TA] $(B)/ydb/core/wrappers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.6%| [TA] {RESULT} $(B)/ydb/core/wrappers/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpJoinOrder::TPCDS90+ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS88-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 12002, MsgBus: 6143 2025-03-31T16:28:10.246961Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011298124006667:2138];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:10.246999Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018e5/r3tmp/tmpYygCiX/pdisk_1.dat 2025-03-31T16:28:11.002295Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:28:11.017003Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:28:11.017103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:28:11.021225Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12002, node 1 2025-03-31T16:28:11.395303Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:28:11.395332Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:28:11.395339Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:28:11.395461Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6143 TClient is connected to server localhost:6143 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:28:12.458411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:14.845431Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011315303876428:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:14.845577Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:14.846237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011315303876440:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:14.851212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:28:14.875078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011315303876442:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:28:14.983670Z node 1 :TX_PROXY ERROR: Actor# [1:7488011315303876493:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:28:15.253799Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011298124006667:2138];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:15.253881Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:28:15.451563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:28:15.652077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:28:15.735248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:28:15.778953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:28:15.828442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:28:16.018799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:28:16.071602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:28:16.120712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:28:16.170548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T16:28:16.214900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-31T16:28:16.257345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-31T16:28:16.321517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:28:16.360814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:28:17.080515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-31T16:28:17.136397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:28:17.195024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:28:17.247199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:28:17.327552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T16:28:17.398062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-31T16:28:17.443959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T16:28:17.489799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-31T16:28:17.526130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-31T16:28:17.572284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-31T16:28:17.654387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-31T16:28:17.693853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-31T16:28:17.725724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-31T16:28:17.780758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-31T16:28:17.827452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-31T16:28:17.858812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-31T16:28:17.931583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-03-31T16:28:17.964778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but prop ... _state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:55.008484Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:55.017970Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038592;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:55.018145Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038589;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:55.024755Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038600;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:55.028877Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038608;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:55.041292Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038612;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:55.043317Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038554;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:55.047283Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038550;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:55.049854Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:55.055293Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038582;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:55.056645Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:55.067027Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:55.068348Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038590;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:55.078245Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038618;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:55.083067Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038586;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:55.092978Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038594;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:55.095951Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:55.185308Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:28:55.261826Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpggkd7ed0ds1q8rmnrcqnj", SessionId: ydb://session/3?node_id=1&id=M2Q2MmMzYWMtNmFkYTQwYmEtNDAwZmY0MGItZGExZmEwOTY=, Slow query, duration: 36.021818s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:28:55.879406Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:28:55.879502Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:28:55.879774Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;self_id=[1:7488011396908271440:4410];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038331;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038629;receive=72075186224038170; 2025-03-31T16:28:55.880115Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:30:06.316642Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgjr2jcc97q8gdap6rcp91", SessionId: ydb://session/3?node_id=1&id=M2Q2MmMzYWMtNmFkYTQwYmEtNDAwZmY0MGItZGExZmEwOTY=, Slow query, duration: 36.761230s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/test/ds/\";\n-- NB: Subquerys\n-- start query 1 in stream 0 using template query88.tpl and seed 318176889\nselect *\nfrom\n (select count(*) h8_30_to_9\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 8\n and time_dim.t_minute >= 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s1 cross join\n (select count(*) h9_to_9_30\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 9\n and time_dim.t_minute < 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s2 cross join\n (select count(*) h9_30_to_10\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 9\n and time_dim.t_minute >= 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s3 cross join\n (select count(*) h10_to_10_30\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 10\n and time_dim.t_minute < 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s4 cross join\n (select count(*) h10_30_to_11\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 10\n and time_dim.t_minute >= 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s5 cross join\n (select count(*) h11_to_11_30\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 11\n and time_dim.t_minute < 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s6 cross join\n (select count(*) h11_30_to_12\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 11\n and time_dim.t_minute >= 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s7 cross join\n (select count(*) h12_to_12_30\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 12\n and time_dim.t_minute < 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s8\n;", parameters: 0b >> TGRpcConsoleTest::SimpleConfigTest [GOOD] |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest >> KqpJoinOrder::DatetimeConstantFold+ColumnStore [GOOD] |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest >> TGRpcConsoleTest::SimpleConfigTest [GOOD] |93.6%| [TA] $(B)/ydb/services/dynamic_config/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.6%| [TA] {RESULT} $(B)/ydb/services/dynamic_config/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt32 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS90+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 10535, MsgBus: 29100 2025-03-31T16:28:15.613331Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011319914019891:2190];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:15.613418Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018df/r3tmp/tmp8aZtaJ/pdisk_1.dat 2025-03-31T16:28:16.432184Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:28:16.466715Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:28:16.466813Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:28:16.468666Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10535, node 1 2025-03-31T16:28:16.779618Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:28:16.779645Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:28:16.779653Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:28:16.784657Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29100 TClient is connected to server localhost:29100 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:28:17.947282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:20.322321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011341388856921:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:20.322515Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011341388856910:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:20.322720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:20.326626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:28:20.340584Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011341388856924:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:28:20.402447Z node 1 :TX_PROXY ERROR: Actor# [1:7488011341388856975:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:28:20.619219Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011319914019891:2190];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:20.619369Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:28:20.834825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:28:21.136258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011341388857217:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:28:21.137504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011341388857217:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:28:21.137519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488011341388857222:2355];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:28:21.137555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488011341388857222:2355];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:28:21.137781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488011341388857222:2355];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:28:21.137910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488011341388857222:2355];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:28:21.138008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488011341388857222:2355];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:28:21.138108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488011341388857222:2355];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:28:21.138224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488011341388857222:2355];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:28:21.138347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488011341388857222:2355];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:28:21.138454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488011341388857222:2355];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:28:21.138545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488011341388857222:2355];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:28:21.138637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488011341388857222:2355];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:28:21.138736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488011341388857222:2355];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:28:21.140296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011341388857217:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:28:21.140518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011341388857217:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:28:21.140698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011341388857217:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:28:21.140872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011341388857217:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:28:21.141049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011341388857217:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:28:21.141346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011341388857217:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:28:21.141546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011341388857217:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:28:21.141697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011341388857217:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:28:21.142017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011341388857217:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:28:21.142202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011341388857217:2353];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:28:21.193151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011341388857207:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.c ... tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.209843Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.215870Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039364;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.216321Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039320;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.230609Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039328;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.234923Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039358;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.245826Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.248066Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039330;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.256841Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.261290Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.264315Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.267660Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.270512Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.277247Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.282345Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.283504Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.287905Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039346;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.288519Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.294096Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039354;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.294110Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.300279Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039336;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.300749Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039374;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.307192Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.313670Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039372;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.314095Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039370;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.324577Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.327219Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.333148Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.340976Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.342857Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.348326Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.351129Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039299;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.356197Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.357434Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.362895Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039324;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.365699Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.371735Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039342;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.377291Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.379857Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.387687Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.390712Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.401842Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.403033Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.411787Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.412692Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.419624Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.426264Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:46.631583Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgj48w8y9j9v402a554gp2", SessionId: ydb://session/3?node_id=1&id=NmNjZGRkNWMtNGY1YmU3MmYtZGVjNzljMzEtMzc1ZThmZWU=, Slow query, duration: 37.354425s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:29:47.192463Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:29:47.192957Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:29:47.193861Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::SimpleEcho |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::WriteAndFinishWorks >> TGRpcStreamingTest::ClientDisconnects >> TGRpcStreamingTest::WritesDoneFromClient >> TGRpcStreamingTest::ReadFinish ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::DatetimeConstantFold+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 27779, MsgBus: 24806 2025-03-31T16:28:36.392516Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011412808937813:2213];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:36.399581Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018ca/r3tmp/tmpJiRGZP/pdisk_1.dat 2025-03-31T16:28:37.115751Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:28:37.115878Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:28:37.135125Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:28:37.197243Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27779, node 1 2025-03-31T16:28:37.393704Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:28:37.393725Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:28:37.393732Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:28:37.393851Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24806 TClient is connected to server localhost:24806 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:28:38.633300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:38.659162Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:28:41.379747Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011412808937813:2213];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:41.380554Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:28:41.453454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011434283774793:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:41.453590Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:41.454908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011434283774805:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:41.459380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:28:41.481776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011434283774807:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:28:41.580086Z node 1 :TX_PROXY ERROR: Actor# [1:7488011434283774859:2343] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:28:41.992241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:28:42.320273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011438578742444:2360];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:28:42.320704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011438578742444:2360];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:28:42.321025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011438578742444:2360];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:28:42.321193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011438578742444:2360];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:28:42.321316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011438578742444:2360];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:28:42.321448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011438578742444:2360];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:28:42.321604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011438578742444:2360];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:28:42.321962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011438578742444:2360];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:28:42.322087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011438578742444:2360];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:28:42.322211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011438578742444:2360];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:28:42.322343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011438578742444:2360];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:28:42.322445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011438578742444:2360];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:28:42.379988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011438578742418:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:28:42.380050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011438578742418:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:28:42.380292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011438578742418:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:28:42.380416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011438578742418:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:28:42.380538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011438578742418:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:28:42.380645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011438578742418:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:28:42.383749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011438578742418:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:28:42.384012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011438578742418:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:28:42.384129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011438578742418:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:28:42.384257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011438578742418:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:28:42.384377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011438578742418:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:28:42.384487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011438578742418:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:28:42.414109Z node 1 :T ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.059962Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.065943Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039239;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.070667Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.071713Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.076801Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.077584Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.084606Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.084625Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.093039Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039241;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.094825Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.099693Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.100394Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.106315Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.106535Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.112873Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.113736Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.119696Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.119787Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039277;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.125637Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.126382Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.131884Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.132271Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.137153Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.138340Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.143135Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.144376Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039245;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.149120Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.150751Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039225;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.155350Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.156961Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.161437Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.163068Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.168033Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.169836Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.174268Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.176567Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.180406Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.182991Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.187249Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.189643Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.193502Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.197227Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.199512Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039257;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.204043Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.205448Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:03.414304Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgjs58f8gv7048evy76n5d", SessionId: ydb://session/3?node_id=1&id=NGI1ZDI5NjEtNzEzN2I3MzgtZTQ2ZjI3ZS04Mzk5ODdiNw==, Slow query, duration: 32.749721s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:30:03.737084Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:30:03.737305Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7488011657622110872:7954];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224039094; 2025-03-31T16:30:03.737559Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:30:03.739126Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt32 [GOOD] Test command err: 2025-03-31T16:21:31.178083Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:21:31.264964Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:21:31.290020Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:21:31.290317Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:21:31.298670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:21:31.298885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:21:31.299140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:21:31.299256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:21:31.299370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:21:31.299519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:21:31.299684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:21:31.299810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:21:31.299925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:21:31.300024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:21:31.300157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:21:31.300273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:21:31.330055Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:21:31.330179Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:21:31.330215Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:21:31.330358Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:31.330479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:21:31.330535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:21:31.330566Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:21:31.330627Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:21:31.330665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:21:31.330694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:21:31.330749Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:21:31.330918Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:31.330974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:21:31.331014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:21:31.331033Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:21:31.331145Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:21:31.331217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:21:31.331270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:21:31.331298Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:21:31.331372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:21:31.331413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:21:31.331441Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:21:31.331493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:21:31.331529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:21:31.331559Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:21:31.331911Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=36; 2025-03-31T16:21:31.331975Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=26; 2025-03-31T16:21:31.332041Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=29; 2025-03-31T16:21:31.332103Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=30; 2025-03-31T16:21:31.332274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:21:31.332329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:21:31.332359Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:21:31.332533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:21:31.332573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:21:31.332599Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:21:31.332696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:21:31.332724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:21:31.332741Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:21:31.332981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:21:31.333026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:21:31.333063Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:21:31.333144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:21:31.333177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:21:31.333226Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... 31T16:30:12.523889Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:10507:12468];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; 2025-03-31T16:30:13.487825Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-03-31T16:30:13.487958Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=16; 2025-03-31T16:30:13.488611Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=556; 2025-03-31T16:30:13.488669Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=646; 2025-03-31T16:30:13.494917Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-03-31T16:30:13.495037Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=15; 2025-03-31T16:30:13.519998Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=24729; 2025-03-31T16:30:13.547447Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=25682; 2025-03-31T16:30:13.547610Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=27480; 2025-03-31T16:30:13.547848Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=147; 2025-03-31T16:30:13.548022Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=111; 2025-03-31T16:30:13.548263Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=181; 2025-03-31T16:30:13.548485Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=161; 2025-03-31T16:30:13.548783Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=226; 2025-03-31T16:30:13.548847Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=53642; 2025-03-31T16:30:13.555782Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-03-31T16:30:13.555902Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=15; 2025-03-31T16:30:13.561167Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=5131; 2025-03-31T16:30:13.599302Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=37987; 2025-03-31T16:30:13.599477Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=53; 2025-03-31T16:30:13.599591Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=53; 2025-03-31T16:30:13.599658Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=11; 2025-03-31T16:30:13.599714Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=13; 2025-03-31T16:30:13.599764Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=9; 2025-03-31T16:30:13.599850Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=48; 2025-03-31T16:30:13.599898Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=8; 2025-03-31T16:30:13.600011Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=73; 2025-03-31T16:30:13.600060Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=8; 2025-03-31T16:30:13.600127Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=29; 2025-03-31T16:30:13.600267Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=87; 2025-03-31T16:30:13.600385Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=67; 2025-03-31T16:30:13.600425Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=44459; 2025-03-31T16:30:13.600611Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=113961708;raw_bytes=176366876;count=47;records=1845000} inactive {blob_bytes=174125508;raw_bytes=270077548;count=81;records=2819164} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-31T16:30:13.601669Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:10507:12468];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-31T16:30:13.601758Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:10507:12468];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-31T16:30:13.601845Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10507:12468];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-31T16:30:13.601900Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10507:12468];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=0; 2025-03-31T16:30:13.602119Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:30:13.602207Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:30:13.602486Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=20; 2025-03-31T16:30:13.602568Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-03-31T16:30:13.602615Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=20;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:30:13.602671Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:30:13.602715Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:30:13.602848Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:30:13.608415Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:30:13.614366Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10507:12468];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-31T16:30:13.615827Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10507:12468];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-31T16:30:13.615904Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-31T16:30:13.615950Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-31T16:30:13.616028Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10507:12468];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:30:13.616144Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10507:12468];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:30:13.616499Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10507:12468];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=20; 2025-03-31T16:30:13.616614Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10507:12468];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-03-31T16:30:13.616693Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10507:12468];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=20;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:30:13.616776Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10507:12468];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:30:13.616841Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10507:12468];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:30:13.617041Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10507:12468];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.999000s; 2025-03-31T16:30:13.617165Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10507:12468];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH5 [GOOD] >> TGRpcStreamingTest::WriteAndFinishWorks [GOOD] >> TGRpcStreamingTest::SimpleEcho [GOOD] >> TGRpcStreamingTest::ClientDisconnects [GOOD] >> TGRpcStreamingTest::WritesDoneFromClient [GOOD] >> TGRpcStreamingTest::ReadFinish [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ClientDisconnects [GOOD] Test command err: 2025-03-31T16:30:15.982534Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011835543558804:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:15.990052Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0013bf/r3tmp/tmppuoQ40/pdisk_1.dat 2025-03-31T16:30:16.482427Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:16.501570Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:16.501676Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:16.512602Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:30:16.617644Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream accepted Name# Session ok# true peer# ipv6:[::1]:57676 2025-03-31T16:30:16.618063Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade attach Name# Session actor# [1:7488011839838526626:2256] peer# ipv6:[::1]:57676 2025-03-31T16:30:16.618850Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream done notification Name# Session ok# true peer# ipv6:[::1]:57676 2025-03-31T16:30:16.618954Z node 1 :GRPC_SERVER DEBUG: Received TEvNotifiedWhenDone 2025-03-31T16:30:16.619417Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream finished Name# Session ok# false peer# unknown grpc status# (1) message# Request abandoned 2025-03-31T16:30:16.619474Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] deregistering request Name# Session peer# unknown (finish done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::WriteAndFinishWorks [GOOD] Test command err: 2025-03-31T16:30:15.966538Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011836692475332:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:15.966590Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00138c/r3tmp/tmpA9G01n/pdisk_1.dat 2025-03-31T16:30:16.457705Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:16.512071Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:16.512180Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:16.516119Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:30:16.622939Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream accepted Name# Session ok# true peer# ipv6:[::1]:40612 2025-03-31T16:30:16.623326Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade attach Name# Session actor# [1:7488011840987443163:2257] peer# ipv6:[::1]:40612 2025-03-31T16:30:16.623363Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade write Name# Session data# peer# ipv6:[::1]:40612 2025-03-31T16:30:16.623762Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade write Name# Session data# peer# ipv6:[::1]:40612 grpc status# (0) message# 2025-03-31T16:30:16.624373Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] write finished Name# Session ok# true peer# ipv6:[::1]:40612 2025-03-31T16:30:16.624490Z node 1 :GRPC_SERVER DEBUG: Received TEvWriteFinished, success = 1 2025-03-31T16:30:16.624844Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] write finished Name# Session ok# true peer# ipv6:[::1]:40612 2025-03-31T16:30:16.624848Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream done notification Name# Session ok# true peer# ipv6:[::1]:40612 2025-03-31T16:30:16.624879Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream finished Name# Session ok# true peer# ipv6:[::1]:40612 grpc status# (0) message# 2025-03-31T16:30:16.624911Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] deregistering request Name# Session peer# ipv6:[::1]:40612 (finish done) 2025-03-31T16:30:16.624913Z node 1 :GRPC_SERVER DEBUG: Received TEvWriteFinished, success = 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::SimpleEcho [GOOD] Test command err: 2025-03-31T16:30:15.972705Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011834860341956:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:15.972769Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001404/r3tmp/tmpA3UpsJ/pdisk_1.dat 2025-03-31T16:30:16.533938Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:16.558816Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:16.559201Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:16.568158Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:30:16.652842Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream accepted Name# Session ok# true peer# ipv6:[::1]:38220 2025-03-31T16:30:16.653192Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade attach Name# Session actor# [1:7488011839155309788:2258] peer# ipv6:[::1]:38220 2025-03-31T16:30:16.653227Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade read Name# Session peer# ipv6:[::1]:38220 2025-03-31T16:30:16.653597Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] read finished Name# Session ok# true data# peer# ipv6:[::1]:38220 2025-03-31T16:30:16.653732Z node 1 :GRPC_SERVER DEBUG: Received TEvReadFinished, success = 1 2025-03-31T16:30:16.653784Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade write Name# Session data# peer# ipv6:[::1]:38220 2025-03-31T16:30:16.653971Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade finish Name# Session peer# ipv6:[::1]:38220 grpc status# (0) message# 2025-03-31T16:30:16.654202Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] write finished Name# Session ok# true peer# ipv6:[::1]:38220 2025-03-31T16:30:16.654632Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream done notification Name# Session ok# true peer# ipv6:[::1]:38220 2025-03-31T16:30:16.654643Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream finished Name# Session ok# true peer# ipv6:[::1]:38220 grpc status# (0) message# 2025-03-31T16:30:16.654703Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] deregistering request Name# Session peer# ipv6:[::1]:38220 (finish done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::WritesDoneFromClient [GOOD] Test command err: 2025-03-31T16:30:15.973588Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011836892298255:2264];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:15.973665Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0013e1/r3tmp/tmpTiLk6e/pdisk_1.dat 2025-03-31T16:30:16.502760Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:16.508991Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:16.509114Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:16.514813Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:30:16.636838Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream accepted Name# Session ok# true peer# ipv6:[::1]:57220 2025-03-31T16:30:16.637224Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade attach Name# Session actor# [1:7488011841187265875:2258] peer# ipv6:[::1]:57220 2025-03-31T16:30:16.637268Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade read Name# Session peer# ipv6:[::1]:57220 2025-03-31T16:30:16.637594Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] read finished Name# Session ok# false data# peer# ipv6:[::1]:57220 2025-03-31T16:30:16.637667Z node 1 :GRPC_SERVER DEBUG: Received TEvReadFinished, success = 0 2025-03-31T16:30:16.637729Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade finish Name# Session peer# ipv6:[::1]:57220 grpc status# (9) message# Everything is A-OK 2025-03-31T16:30:16.638271Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream done notification Name# Session ok# true peer# ipv6:[::1]:57220 2025-03-31T16:30:16.638337Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream finished Name# Session ok# true peer# ipv6:[::1]:57220 grpc status# (9) message# Everything is A-OK 2025-03-31T16:30:16.638373Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] deregistering request Name# Session peer# ipv6:[::1]:57220 (finish done) 2025-03-31T16:30:16.638504Z node 1 :GRPC_SERVER DEBUG: Received TEvNotifiedWhenDone ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ReadFinish [GOOD] Test command err: 2025-03-31T16:30:16.007214Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011836324662265:2262];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:16.007497Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0013f5/r3tmp/tmpuz8uuO/pdisk_1.dat 2025-03-31T16:30:16.610513Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:16.613387Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:16.613487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:16.619078Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:30:16.732746Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream accepted Name# Session ok# true peer# ipv6:[::1]:59530 2025-03-31T16:30:16.733439Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade attach Name# Session actor# [1:7488011840619629885:2258] peer# ipv6:[::1]:59530 2025-03-31T16:30:16.733463Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade read Name# Session peer# ipv6:[::1]:59530 2025-03-31T16:30:16.733531Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade finish Name# Session peer# ipv6:[::1]:59530 grpc status# (0) message# 2025-03-31T16:30:16.735196Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] read finished Name# Session ok# false data# peer# ipv6:[::1]:59530 2025-03-31T16:30:16.735275Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream done notification Name# Session ok# true peer# ipv6:[::1]:59530 2025-03-31T16:30:16.735310Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream finished Name# Session ok# true peer# ipv6:[::1]:59530 grpc status# (0) message# 2025-03-31T16:30:16.735356Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] deregistering request Name# Session peer# ipv6:[::1]:59530 (finish done) 2025-03-31T16:30:16.735415Z node 1 :GRPC_SERVER DEBUG: Received TEvReadFinished, success = 0 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-37 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-31 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-1 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-1 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-2 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-43 >> KqpJoinOrder::TPCDS95+ColumnStore [GOOD] |93.7%| [TA] $(B)/ydb/core/grpc_streaming/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.7%| [TA] {RESULT} $(B)/ydb/core/grpc_streaming/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH5 [GOOD] Test command err: Trying to start YDB, gRPC: 8225, MsgBus: 4959 2025-03-31T16:28:32.553626Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011392399039911:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:32.553943Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018cf/r3tmp/tmp9QyJuE/pdisk_1.dat 2025-03-31T16:28:33.211297Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:28:33.211400Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:28:33.213106Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:28:33.223421Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8225, node 1 2025-03-31T16:28:33.415612Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:28:33.415634Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:28:33.415645Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:28:33.415745Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4959 TClient is connected to server localhost:4959 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:28:34.546216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:34.569432Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:28:37.177955Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011413873876924:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:37.178102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:37.178212Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011413873876935:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:37.188934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:28:37.217610Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011413873876938:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:28:37.312330Z node 1 :TX_PROXY ERROR: Actor# [1:7488011413873876989:2343] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:28:37.541242Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011392399039911:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:37.541304Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:28:37.753944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:28:38.107717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011413873877258:2354];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:28:38.107954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011413873877258:2354];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:28:38.108228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011413873877258:2354];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:28:38.108329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011413873877258:2354];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:28:38.108429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011413873877258:2354];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:28:38.108533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011413873877258:2354];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:28:38.108624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011413873877258:2354];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:28:38.108725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011413873877258:2354];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:28:38.108865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011413873877258:2354];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:28:38.108979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011413873877258:2354];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:28:38.109085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011413873877258:2354];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:28:38.109191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011413873877258:2354];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:28:38.171048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488011413873877272:2361];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:28:38.175206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488011413873877272:2361];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:28:38.175468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488011413873877272:2361];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:28:38.175567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488011413873877272:2361];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:28:38.175674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488011413873877272:2361];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:28:38.175765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488011413873877272:2361];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:28:38.175864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488011413873877272:2361];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:28:38.175972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488011413873877272:2361];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:28:38.176078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488011413873877272:2361];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:28:38.176183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488011413873877272:2361];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:28:38.176307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488011413873877272:2361];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:28:38.176431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488011413873877272:2361];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:28:38.181653Z node 1 :TX_COL ... tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.833460Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.838758Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.839503Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.845079Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.845375Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.851862Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.855859Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.857973Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039257;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.861197Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039225;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.863805Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.870858Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.874937Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039221;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.876026Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.880578Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.881175Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.885813Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039289;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.886696Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.891453Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039269;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.892204Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.898516Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.898531Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.904499Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.904499Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.910073Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.910072Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.915744Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.915744Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.921369Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.921369Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.927155Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039223;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.927155Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.932677Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.932677Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.938346Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.938346Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.943947Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.943947Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.949831Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.949831Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.955779Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.955779Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.961466Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.962718Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.967487Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:00.968713Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:01.064241Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039237;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:01.110718Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgjp5r5qnqccyz5p6qd69a", SessionId: ydb://session/3?node_id=1&id=NTAzYzJlY2ItMTcwNzQzODItODBlY2I3MGUtYTU4OWEzOWQ=, Slow query, duration: 33.499831s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:30:01.464381Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:30:01.464400Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:30:01.465067Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-25 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-25 >> TPQCompatTest::LongProducerAndLongMessageGroupId [GOOD] >> TPQCompatTest::ReadWriteSessions >> KqpJoinOrder::CanonizedJoinOrderTPCH7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-25 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS95+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 4990, MsgBus: 19948 2025-03-31T16:28:18.924108Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011334302519027:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:18.925179Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018db/r3tmp/tmpkhqF7m/pdisk_1.dat 2025-03-31T16:28:19.588611Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:28:19.588699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:28:19.597761Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:28:19.646995Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4990, node 1 2025-03-31T16:28:19.847680Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:28:19.847702Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:28:19.847711Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:28:19.847834Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19948 TClient is connected to server localhost:19948 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:28:20.817755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:20.848019Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:28:23.227567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011355777356192:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:23.231677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011355777356180:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:23.231841Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:23.232501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:28:23.255435Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011355777356194:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:28:23.323873Z node 1 :TX_PROXY ERROR: Actor# [1:7488011355777356245:2342] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:28:23.785252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:28:23.927254Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011334302519027:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:23.927341Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:28:24.040960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488011355777356515:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:28:24.041169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488011355777356515:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:28:24.041468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488011355777356515:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:28:24.041667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488011355777356515:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:28:24.041771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488011355777356515:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:28:24.041903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488011355777356515:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:28:24.042018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488011355777356515:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:28:24.042122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011355777356507:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:28:24.042150Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488011355777356515:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:28:24.042173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011355777356507:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:28:24.042368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488011355777356515:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:28:24.042497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488011355777356515:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:28:24.042599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488011355777356515:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:28:24.042701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488011355777356515:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:28:24.047333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011355777356507:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:28:24.047482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011355777356507:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:28:24.047613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011355777356507:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:28:24.047737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011355777356507:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:28:24.047847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011355777356507:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:28:24.047952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011355777356507:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:28:24.048049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011355777356507:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:28:24.048164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011355777356507:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:28:24.048299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011355777356507:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:28:24.048432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011355777356507:2351];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:28:24.112267Z node 1 :TX_ ... 81474976710714; 2025-03-31T16:29:47.493645Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:47.498873Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039336;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:47.499360Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:47.504077Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:47.504920Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:47.510819Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:47.511438Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:47.516692Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039348;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:47.520295Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:47.526504Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:47.528719Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:47.531946Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:47.536154Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:47.538933Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:47.541862Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:47.545270Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:47.548382Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:47.551216Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:47.553976Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:47.557480Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:47.561919Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:47.563905Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:47.568964Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:47.570976Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:47.576105Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:47.577468Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:47.583310Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:47.583794Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:47.589893Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:47.590369Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039201;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:47.595828Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:47.596582Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039374;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:47.601717Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:47.602842Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:47.607738Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:47.672148Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:47.673734Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:47.678189Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:29:47.797916Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgj4d7a35ypwmfjve3qwkg", SessionId: ydb://session/3?node_id=1&id=NzI5NzJkNzgtODk4ZjZlZmQtYjIzMDc3MzEtYmU3M2YyMDQ=, Slow query, duration: 38.382052s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:29:48.427043Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:29:48.427845Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7488011561935821896:7653];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224039094; 2025-03-31T16:29:48.428371Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:29:48.429073Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:30:12.952628Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgkp4j57aevbx5n5qfgcgf", SessionId: ydb://session/3?node_id=1&id=NzI5NzJkNzgtODk4ZjZlZmQtYjIzMDc3MzEtYmU3M2YyMDQ=, Slow query, duration: 12.613219s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/test/ds/\";\n-- NB: Subquerys\n$ws_wh =\n(select ws1.ws_order_number ws_order_number,ws1.ws_warehouse_sk wh1,ws2.ws_warehouse_sk wh2\n from web_sales ws1 cross join web_sales ws2\n where ws1.ws_order_number = ws2.ws_order_number\n and ws1.ws_warehouse_sk <> ws2.ws_warehouse_sk);\n-- start query 1 in stream 0 using template query95.tpl and seed 2031708268\n select\n count(distinct ws1.ws_order_number) as `order count`\n ,sum(ws_ext_ship_cost) as `total shipping cost`\n ,sum(ws_net_profit) as `total net profit`\nfrom\n web_sales ws1\n cross join date_dim\n cross join customer_address\n cross join web_site\nwhere\n cast(d_date as date) between cast('2002-4-01' as date) and\n (cast('2002-4-01' as date) + DateTime::IntervalFromDays(60))\nand ws1.ws_ship_date_sk = d_date_sk\nand ws1.ws_ship_addr_sk = ca_address_sk\nand ca_state = 'AL'\nand ws1.ws_web_site_sk = web_site_sk\nand web_company_name = 'pri'\nand ws1.ws_order_number in (select ws_order_number\n from $ws_wh)\nand ws1.ws_order_number in (select wr_order_number\n from web_returns cross join $ws_wh ws_wh\n where wr_order_number = ws_wh.ws_order_number)\norder by `order count`\nlimit 100;\n", parameters: 0b >> TDatabaseResolverTests::DataStreams_Dedicated >> TDatabaseResolverTests::Ydb_Serverless_Timeout >> TDatabaseResolverTests::PostgreSQL >> TDatabaseResolverTests::PostgreSQL [GOOD] >> TDatabaseResolverTests::DataStreams_Dedicated [GOOD] >> TDatabaseResolverTests::ClickHouse_PermissionDenied >> TDatabaseResolverTests::Ydb_Serverless_Timeout [GOOD] >> TDatabaseResolverTests::PostgreSQL_PermissionDenied [GOOD] >> TDatabaseResolverTests::ClickHouse_PermissionDenied [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Serverless_Timeout [GOOD] Test command err: 2025-03-31T16:30:24.635075Z node 1 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed Ydb database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgbh': Connection timeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ClickHouse_PermissionDenied [GOOD] Test command err: 2025-03-31T16:30:24.725562Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed ClickHouse database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-clickhouse/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. Please check that your service account has role `managed-clickhouse.viewer`. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::PostgreSQL_PermissionDenied [GOOD] Test command err: 2025-03-31T16:30:24.735030Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed PostgreSQL database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-postgresql/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. Please check that your service account has role `managed-postgresql.viewer`. >> TDatabaseResolverTests::ResolveTwoDataStreamsFirstError >> TDatabaseResolverTests::ResolveTwoDataStreamsFirstError [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH7 [GOOD] Test command err: Trying to start YDB, gRPC: 11740, MsgBus: 7057 2025-03-31T16:28:38.654538Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011419826003978:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:38.659610Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018c6/r3tmp/tmper2QVy/pdisk_1.dat 2025-03-31T16:28:39.506035Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:28:39.538140Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:28:39.538256Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:28:39.541665Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11740, node 1 2025-03-31T16:28:39.819637Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:28:39.819666Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:28:39.819676Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:28:39.819788Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7057 TClient is connected to server localhost:7057 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:28:40.900246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:40.936233Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:28:43.422770Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011441300840986:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:43.422877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:43.423196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011441300840998:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:43.427243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:28:43.440289Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T16:28:43.441183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011441300841000:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:28:43.507888Z node 1 :TX_PROXY ERROR: Actor# [1:7488011441300841051:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:28:43.610226Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011419826003978:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:43.610296Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:28:43.817358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:28:44.121186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011441300841276:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:28:44.121410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011441300841276:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:28:44.121680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011441300841276:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:28:44.121798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011441300841276:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:28:44.121941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011441300841276:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:28:44.122052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011441300841276:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:28:44.122117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011445595808645:2358];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:28:44.122220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011441300841276:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:28:44.122349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011441300841276:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:28:44.122462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011441300841276:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:28:44.122559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011441300841276:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:28:44.122651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011441300841276:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:28:44.122780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011441300841276:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:28:44.122966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011445595808645:2358];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:28:44.123130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011445595808645:2358];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:28:44.123223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011445595808645:2358];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:28:44.123318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011445595808645:2358];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:28:44.123435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011445595808645:2358];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:28:44.123538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011445595808645:2358];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:28:44.123650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011445595808645:2358];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:28:44.123770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011445595808645:2358];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:28:44.123883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011445595808645:2358];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:28:44.123983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011445595808645:2358];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:28:44.124085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011445595808645:2358];tablet_id=72075186224037900 ... ller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.383965Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039197;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.384036Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039229;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.390285Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039227;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.390439Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039217;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.396493Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039231;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.396497Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039193;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.402947Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039277;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.402949Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.410141Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.410154Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.416736Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039249;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.416748Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039225;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.423063Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039215;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.423164Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.429057Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039223;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.429062Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039251;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.435357Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039257;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.435365Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039267;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.441633Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039289;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.441652Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.448081Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039243;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.448088Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.454363Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039201;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.454671Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039265;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.460781Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039245;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.460816Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039261;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.466385Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039271;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.466873Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.473597Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039269;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.473637Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.479931Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.479945Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039237;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.486182Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039239;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.486187Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039247;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.492519Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039241;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.492537Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.498026Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.498394Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039299;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.504637Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039255;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.504646Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.511446Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039318;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.511466Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039320;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.518275Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.518395Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039195;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.524971Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:04.660910Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgjvdg96qpjqxp9059ppwa", SessionId: ydb://session/3?node_id=1&id=ZGY4MDE5OGUtY2VjNWJjYTctM2E3YmNmMGEtYzZjMzIyNTg=, Slow query, duration: 31.683546s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:30:04.912412Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:30:04.913021Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7488011754833510694:11111];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039094; 2025-03-31T16:30:04.913394Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:30:04.914212Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-10 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-43 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-44 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-2 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-37 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-38 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-3 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-32 >> TAsyncIndexTests::DropTableWithInflightChanges[PipeResets] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ResolveTwoDataStreamsFirstError [GOOD] Test command err: 2025-03-31T16:30:25.605759Z node 1 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed DataStreams database with id etn021us5r9rhld1vgb1 via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgb1': Status: 404 Response body: {"message":"Database not found"} >> TDatabaseResolverTests::Greenplum_MasterNode >> TDatabaseResolverTests::Greenplum_MasterNode [GOOD] >> TDatabaseResolverTests::Greenplum_PermissionDenied [GOOD] >> TDatabaseResolverTests::ClickHouseNative [GOOD] >> TDatabaseResolverTests::ClickHouseHttp >> TPersQueueTest::Codecs_WriteMessageWithNonDefaultCodecThatHasToBeConfiguredAdditionally_SessionClosedWithBadRequestError [GOOD] >> TPersQueueTest::CreateTopicWithMeteringMode ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::DropTableWithInflightChanges[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:135:2157] sender: [1:136:2058] recipient: [1:111:2142] 2025-03-31T16:29:56.293814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:29:56.293921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:56.293972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:29:56.294009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:29:56.294067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:29:56.294101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:29:56.294177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:56.294290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:29:56.294631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:29:56.389157Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-31T16:29:56.389229Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:169:2058] recipient: [1:15:2062] 2025-03-31T16:29:56.408901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:29:56.409093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:29:56.409260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:29:56.414191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:29:56.414433Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:29:56.415071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:56.415319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:29:56.419796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:56.421304Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:56.421365Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:56.421482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:29:56.421535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:56.421579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:29:56.421702Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:210:2058] recipient: [1:208:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:210:2058] recipient: [1:208:2209] Leader for TabletID 72057594037968897 is [1:214:2213] sender: [1:215:2058] recipient: [1:208:2209] 2025-03-31T16:29:56.430382Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:235:2058] recipient: [1:15:2062] 2025-03-31T16:29:56.560987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:29:56.561290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:56.561523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:29:56.561825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:29:56.561889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:56.564562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:56.564715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:29:56.564966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:56.565037Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:29:56.565073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:29:56.565110Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:29:56.567128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:56.567190Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:29:56.567227Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:29:56.569212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:56.569259Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:56.569302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:56.569356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:29:56.573297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:29:56.575470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:29:56.575689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:135:2157] sender: [1:250:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:29:56.576603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:56.576725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:29:56.576770Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:56.576972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:29:56.577010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:56.577149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:29:56.577236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:29:56.578883Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:56.578922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:56.579056Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:56.579108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:202:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:29:56.579463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:56.579528Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:29:56.579654Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:56.579690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:56.579733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:56.579793Z no ... perationId: 1003:2, at schemeshard: 72057594046678944 2025-03-31T16:30:25.709344Z node 26 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1003:2 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:30:25.709535Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-03-31T16:30:25.709623Z node 26 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:2 progress is 2/3 2025-03-31T16:30:25.709648Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 2/3 2025-03-31T16:30:25.709675Z node 26 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:2 progress is 2/3 2025-03-31T16:30:25.709696Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 2/3 2025-03-31T16:30:25.709721Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: false 2025-03-31T16:30:25.710942Z node 26 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-03-31T16:30:25.711007Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-03-31T16:30:25.711028Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-03-31T16:30:25.712343Z node 26 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-03-31T16:30:25.712438Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-03-31T16:30:25.712469Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-03-31T16:30:25.712502Z node 26 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-03-31T16:30:25.712540Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-03-31T16:30:25.712620Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: true 2025-03-31T16:30:25.717054Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-03-31T16:30:25.717110Z node 26 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:30:25.717323Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-31T16:30:25.717450Z node 26 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 3/3 2025-03-31T16:30:25.717486Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-03-31T16:30:25.717524Z node 26 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 3/3 2025-03-31T16:30:25.717555Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-03-31T16:30:25.717590Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: true 2025-03-31T16:30:25.717619Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-03-31T16:30:25.717647Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2025-03-31T16:30:25.717670Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2025-03-31T16:30:25.717780Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-31T16:30:25.717820Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2025-03-31T16:30:25.717846Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2025-03-31T16:30:25.717871Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-03-31T16:30:25.717890Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2025-03-31T16:30:25.717906Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2025-03-31T16:30:25.717936Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-03-31T16:30:25.718575Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-03-31T16:30:25.719024Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-03-31T16:30:25.722163Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-03-31T16:30:25.722217Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-03-31T16:30:25.722368Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-03-31T16:30:25.724309Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-03-31T16:30:25.726004Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 343 RawX2: 111669152023 } TabletId: 72075186233409546 State: 4 2025-03-31T16:30:25.726066Z node 26 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2025-03-31T16:30:25.727628Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-03-31T16:30:25.728074Z node 26 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409546 2025-03-31T16:30:25.728249Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-31T16:30:25.728493Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 Forgetting tablet 72075186233409546 2025-03-31T16:30:25.731045Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:30:25.731115Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-03-31T16:30:25.731189Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-03-31T16:30:25.731234Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-03-31T16:30:25.731273Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T16:30:25.734368Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-31T16:30:25.734432Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409546 2025-03-31T16:30:25.734964Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-03-31T16:30:25.735224Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-03-31T16:30:25.735266Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-03-31T16:30:25.736294Z node 26 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-03-31T16:30:25.736376Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-03-31T16:30:25.736412Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [26:624:2550] 2025-03-31T16:30:25.741561Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 347 RawX2: 111669152026 } TabletId: 72075186233409547 State: 4 2025-03-31T16:30:25.741639Z node 26 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2025-03-31T16:30:25.743484Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-03-31T16:30:25.744002Z node 26 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409547 Forgetting tablet 72075186233409547 2025-03-31T16:30:25.744237Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-03-31T16:30:25.744481Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-31T16:30:25.745003Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:30:25.745049Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-31T16:30:25.745110Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:30:25.750332Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-31T16:30:25.750400Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409547 2025-03-31T16:30:25.750837Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1003 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2025-03-31T16:30:25.751200Z node 26 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-03-31T16:30:25.751277Z node 26 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 >> TDatabaseResolverTests::ClickHouseHttp [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-26 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-26 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Greenplum_PermissionDenied [GOOD] Test command err: 2025-03-31T16:30:26.884274Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed Greenplum database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-greenplum/v1/clusters/etn021us5r9rhld1vgbh/master-hosts': you have no permission to resolve database id into database endpoint. |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ClickHouseHttp [GOOD] >> TDatabaseResolverTests::Ydb_Dedicated >> KqpJoinOrder::ShuffleEliminationReuseShuffleTwoJoins [GOOD] >> TDatabaseResolverTests::Ydb_Dedicated [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-26 |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Dedicated [GOOD] |93.7%| [TA] $(B)/ydb/core/fq/libs/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.7%| [TA] {RESULT} $(B)/ydb/core/fq/libs/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-10 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-11 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-44 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-45 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-4 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-39 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-3 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-32 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-33 >> KqpUniqueIndex::UpdateOnFkAlreadyExist >> KqpUniqueIndex::InsertNullInPk >> KqpMultishardIndex::DataColumnWriteNull >> KqpIndexes::SecondaryIndexOrderBy >> KqpUniqueIndex::UpdateOnFkSelectResultSameValue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::ShuffleEliminationReuseShuffleTwoJoins [GOOD] Test command err: Trying to start YDB, gRPC: 65432, MsgBus: 29127 2025-03-31T16:28:55.548010Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011492090430104:2084];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:55.826993Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018ba/r3tmp/tmpBDLbk6/pdisk_1.dat 2025-03-31T16:28:56.226325Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:28:56.226391Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:28:56.231531Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:28:56.233559Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65432, node 1 2025-03-31T16:28:56.491761Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:28:56.491780Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:28:56.491787Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:28:56.491871Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29127 TClient is connected to server localhost:29127 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:28:57.568195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:57.588852Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:28:59.755234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011509270299910:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:59.755359Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:59.756247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011509270299922:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:59.760220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:28:59.771522Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011509270299924:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:28:59.846177Z node 1 :TX_PROXY ERROR: Actor# [1:7488011509270299977:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:29:00.197922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:29:00.528085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7488011513565267551:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:29:00.528263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7488011513565267551:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:29:00.528535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7488011513565267551:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:29:00.528654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7488011513565267551:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:29:00.528751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7488011513565267551:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:29:00.528873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7488011513565267551:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:29:00.528977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7488011513565267551:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:29:00.529076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7488011513565267551:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:29:00.529197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7488011513565267551:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:29:00.529301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7488011513565267551:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:29:00.529397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7488011513565267551:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:29:00.529495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7488011513565267551:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:29:00.536847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488011513565267541:2354];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:29:00.536934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488011513565267541:2354];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:29:00.537133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488011513565267541:2354];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:29:00.537243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488011513565267541:2354];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:29:00.537366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488011513565267541:2354];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:29:00.537478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488011513565267541:2354];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:29:00.537575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488011513565267541:2354];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:29:00.537687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488011513565267541:2354];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:29:00.537816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488011513565267541:2354];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:29:00.537936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488011513565267541:2354];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:29:00.538062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488011513565267541:2354];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:29:00.538177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488011513565267541:2354];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:29:00.616920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011513565267559:2363];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:29:00.616989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011513565267559:2363];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abs ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:16.864384Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:16.864384Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:16.870519Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:16.870520Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:16.876611Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:16.876611Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:16.882781Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:16.882779Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:16.889326Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:16.889326Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039199;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:16.895735Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039227;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:16.895735Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039271;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:16.901858Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:16.901858Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039223;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:16.908946Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:16.908955Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:16.916775Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:16.916775Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:16.923914Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:16.923914Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:16.930752Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:16.930752Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:16.941147Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:16.941217Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:16.947805Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:16.947806Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039257;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:16.954711Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:16.954718Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:16.961601Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:16.961601Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:16.968688Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:16.968688Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039225;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:16.975515Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:16.975516Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039255;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:16.982036Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:16.982036Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:16.988522Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039219;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:16.988526Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039263;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:16.997201Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:17.006115Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:17.013566Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:17.021072Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:17.027718Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:17.035663Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:17.043828Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:17.193098Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgka68ehm0psggtd0t0arp", SessionId: ydb://session/3?node_id=1&id=OGFkZjVmYjQtY2NhMTE2NTQtMzEzYjY2MTAtNjQ4ODliZTc=, Slow query, duration: 29.087453s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:30:17.477940Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:30:17.478117Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7488011771263352047:9374];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224038933; 2025-03-31T16:30:17.478794Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:30:17.479185Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpUniqueIndex::ReplaceFkAlreadyExist >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-26 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-27 >> KqpJoinOrder::TPCHRandomJoinViewJustWorks+ColumnStore [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-26 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-27 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-26 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-27 >> KqpIndexes::ForbidViewModification >> KqpJoinOrder::CanonizedJoinOrderTPCH18 [GOOD] >> TAsyncIndexTests::MergeMainWithReboots[PipeResets] [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-11 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-12 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-5 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCHRandomJoinViewJustWorks+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 5556, MsgBus: 63789 2025-03-31T16:29:03.191558Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011528375353886:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:29:03.192254Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018b4/r3tmp/tmpWO4X4q/pdisk_1.dat 2025-03-31T16:29:03.908243Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:29:03.908319Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:29:03.937545Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:29:03.938847Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5556, node 1 2025-03-31T16:29:04.275641Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:29:04.275678Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:29:04.275688Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:29:04.275798Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63789 TClient is connected to server localhost:63789 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:29:05.471829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:05.496843Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:29:07.762621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011545555223597:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:07.762783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:07.763065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011545555223609:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:07.771382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:29:07.784770Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T16:29:07.785014Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011545555223611:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:29:07.864859Z node 1 :TX_PROXY ERROR: Actor# [1:7488011545555223662:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:29:08.187206Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011528375353886:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:29:08.225230Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:29:08.254847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:29:08.501858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488011549850191188:2359];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:29:08.501858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011549850191173:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:29:08.502041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011549850191173:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:29:08.502104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488011549850191188:2359];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:29:08.502316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488011549850191188:2359];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:29:08.502327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011549850191173:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:29:08.502438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011549850191173:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:29:08.502486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488011549850191188:2359];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:29:08.502590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011549850191173:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:29:08.502606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488011549850191188:2359];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:29:08.502708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011549850191173:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:29:08.502815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011549850191173:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:29:08.502846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488011549850191188:2359];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:29:08.502971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011549850191173:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:29:08.503030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488011549850191188:2359];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:29:08.503113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011549850191173:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:29:08.503147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488011549850191188:2359];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:29:08.503214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011549850191173:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:29:08.503248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488011549850191188:2359];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:29:08.503330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011549850191173:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:29:08.503362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488011549850191188:2359];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:29:08.503463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011549850191173:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:29:08.503466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488011549850191188:2359];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:29:08.503553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488011549850191188:2359];tablet_id=7207518622403789 ... tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.332520Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.338148Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.338510Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039247;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.344352Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.344367Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.350536Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039293;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.350698Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039213;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.356304Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039299;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.357091Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.362073Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.363805Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.368115Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.369416Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039217;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.374111Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.375405Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039255;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.379956Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.381482Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.385909Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.387483Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.392299Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.393074Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.400940Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.401262Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.406629Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.407233Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.413222Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.413319Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.419140Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.419305Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039289;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.425043Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.425389Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.431294Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.431543Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.437933Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.438050Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.444516Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.444652Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.449440Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.450728Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.453194Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039253;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.457189Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.458254Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.463158Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.464199Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.469947Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.470403Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:21.644088Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgkf847nxyj7s79tsgqkgm", SessionId: ydb://session/3?node_id=1&id=ZDljY2JmNC02YTc2YjE1Zi1kNGM0NGNiZi1hMGZlNTI0Yg==, Slow query, duration: 28.358884s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:30:21.917762Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:30:21.917767Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:30:21.918518Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> TPersQueueTest::TestBigMessage [GOOD] >> TPersQueueTest::SetMeteringMode >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-45 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-46 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeMainWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:135:2157] sender: [1:136:2058] recipient: [1:111:2142] 2025-03-31T16:29:30.507402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:29:30.507577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:30.507628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:29:30.507670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:29:30.511832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:29:30.511910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:29:30.512023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:30.512119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:29:30.513915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:29:30.613771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-31T16:29:30.613874Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:169:2058] recipient: [1:15:2062] 2025-03-31T16:29:30.646102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:29:30.646285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:29:30.646434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:29:30.655519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:29:30.655774Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:29:30.660127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:30.660354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:29:30.680666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:30.698038Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:30.698128Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:30.698269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:29:30.698335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:30.698393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:29:30.698536Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:210:2058] recipient: [1:208:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:210:2058] recipient: [1:208:2209] Leader for TabletID 72057594037968897 is [1:214:2213] sender: [1:215:2058] recipient: [1:208:2209] 2025-03-31T16:29:30.708097Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:235:2058] recipient: [1:15:2062] 2025-03-31T16:29:30.872570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:29:30.874846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:30.883230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:29:30.887272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:29:30.887390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:30.891119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:30.891304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:29:30.891546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:30.891713Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:29:30.891752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:29:30.891800Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:29:30.894386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:30.894473Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:29:30.894517Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:29:30.897029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:30.897091Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:30.897128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:30.897176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:29:30.909220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:29:30.911965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:29:30.923371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:135:2157] sender: [1:250:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:29:30.924734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:30.924883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:29:30.924966Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:30.935560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:29:30.935652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:30.935874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:29:30.935988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:29:30.944337Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:30.944401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:30.944597Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:30.944642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:202:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:29:30.944993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:30.945044Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:29:30.945157Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:30.945197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:30.945266Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:30.945324Z no ... 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:30:33.410894Z node 30 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:30:33.411558Z node 30 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 685us result status StatusSuccess 2025-03-31T16:30:33.412518Z node 30 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:30:33.423641Z node 30 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][30:845:2676] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-03-31T16:30:33.423761Z node 30 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409549:2][30:784:2676] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-03-31T16:30:33.423916Z node 30 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][30:845:2676] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1743438633398348 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1743438633398348 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1743438633398348 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-03-31T16:30:33.426546Z node 30 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][30:845:2676] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-03-31T16:30:33.426642Z node 30 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409549:2][30:784:2676] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-39 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-40 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-4 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-34 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH18 [GOOD] Test command err: Trying to start YDB, gRPC: 5365, MsgBus: 22695 2025-03-31T16:28:56.937231Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011495870682527:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:56.937785Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018b9/r3tmp/tmpT6uSSh/pdisk_1.dat 2025-03-31T16:28:57.687052Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:28:57.689495Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:28:57.689755Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:28:57.696118Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5365, node 1 2025-03-31T16:28:58.064595Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:28:58.064615Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:28:58.064621Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:28:58.064717Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22695 TClient is connected to server localhost:22695 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:28:58.938062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:58.955889Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:29:01.754293Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011517345519542:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:01.754409Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:01.754544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011517345519554:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:01.758922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:29:01.774520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011517345519556:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:29:01.839506Z node 1 :TX_PROXY ERROR: Actor# [1:7488011517345519607:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:29:01.906099Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011495870682527:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:29:01.906161Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:29:02.410327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:29:02.772956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011521640487193:2363];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:29:02.773135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011521640487193:2363];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:29:02.773354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011521640487193:2363];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:29:02.773458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011521640487193:2363];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:29:02.773548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011521640487193:2363];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:29:02.773645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011521640487193:2363];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:29:02.773750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011521640487193:2363];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:29:02.773874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011521640487193:2363];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:29:02.773980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011521640487193:2363];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:29:02.774079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011521640487193:2363];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:29:02.774174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011521640487193:2363];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:29:02.774263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7488011521640487193:2363];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:29:02.775225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011521640487187:2360];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:29:02.775260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011521640487187:2360];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:29:02.775410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011521640487187:2360];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:29:02.775518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011521640487187:2360];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:29:02.775635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011521640487187:2360];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:29:02.775757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011521640487187:2360];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:29:02.775855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011521640487187:2360];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:29:02.775941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011521640487187:2360];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:29:02.776040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011521640487187:2360];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:29:02.776128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011521640487187:2360];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:29:02.776222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011521640487187:2360];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:29:02.776304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011521640487187:2360];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:29:02.857712Z node 1 :TX_ ... tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.729071Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.734034Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.734343Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.739327Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039247;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.739327Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.744406Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.745231Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039249;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.749928Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.754160Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.755239Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039241;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.760410Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039265;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.760416Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.766117Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.766431Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039263;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.772557Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.772568Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.779180Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.779179Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039251;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.786109Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.786120Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.792219Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.792904Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.799008Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.799242Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.805721Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039271;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.805720Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.812312Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.812312Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.819709Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.819709Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.827276Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039215;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.827277Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039372;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.834366Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.834367Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.840368Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.840923Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.845467Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.846844Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.851119Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.853238Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.858495Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.859694Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.865387Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.866914Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.870942Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.874644Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:19.984208Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgkc57dbvrs290g7ty0k73", SessionId: ydb://session/3?node_id=1&id=OGJjNjk4ZjItNThlMDg2ZWEtNTFlN2Q4MWEtN2NkZjc1Zg==, Slow query, duration: 29.864340s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:30:20.255508Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:30:20.255510Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:30:20.256327Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpIndexMetadata::HandleNotReadyIndex >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-28 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-28 >> KqpIndexes::MultipleSecondaryIndex+UseSink >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-28 >> KqpIndexes::WriteWithParamsFieldOrder >> KqpUniqueIndex::InsertNullInPk [GOOD] >> KqpUniqueIndex::InsertNullInFk >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-6 >> KqpMultishardIndex::DataColumnWriteNull [GOOD] >> KqpMultishardIndex::DuplicateUpsert >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-13 >> KqpIndexes::ForbidViewModification [GOOD] >> KqpIndexes::IndexOr >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-46 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-47 >> KqpUniqueIndex::UpdateOnFkAlreadyExist [GOOD] >> KqpUniqueIndex::UpdateImplicitNullInComplexFk2 >> KqpUniqueIndex::ReplaceFkAlreadyExist [GOOD] >> KqpUniqueIndex::ReplaceFkDuplicate >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-5 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-41 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-35 >> KqpUniqueIndex::UpdateOnFkSelectResultSameValue [GOOD] >> KqpUniqueIndex::UpdateOnHidenChanges+DataColumn >> TPQCompatTest::ReadWriteSessions [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH2 [GOOD] >> TAsyncIndexTests::SplitIndexWithReboots[PipeResets] [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-29 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-29 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-29 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitIndexWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:135:2157] sender: [1:136:2058] recipient: [1:111:2142] 2025-03-31T16:29:57.465673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:29:57.465793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:57.465839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:29:57.465872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:29:57.465931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:29:57.465961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:29:57.466037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:57.466135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:29:57.466491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:29:57.562689Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-31T16:29:57.562787Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:169:2058] recipient: [1:15:2062] 2025-03-31T16:29:57.580283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:29:57.580434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:29:57.580597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:29:57.584694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:29:57.584923Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:29:57.585529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:57.585737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:29:57.590607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:57.592077Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:57.592136Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:57.592260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:29:57.592327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:57.592376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:29:57.592525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:210:2058] recipient: [1:208:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:210:2058] recipient: [1:208:2209] Leader for TabletID 72057594037968897 is [1:214:2213] sender: [1:215:2058] recipient: [1:208:2209] 2025-03-31T16:29:57.599308Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:235:2058] recipient: [1:15:2062] 2025-03-31T16:29:57.742065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:29:57.742306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:57.742502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:29:57.742741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:29:57.742802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:57.744890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:57.745007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:29:57.745169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:57.745227Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:29:57.745260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:29:57.745305Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:29:57.746934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:57.746997Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:29:57.747032Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:29:57.748686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:57.748728Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:57.748765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:57.748808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:29:57.752450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:29:57.754226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:29:57.754399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:135:2157] sender: [1:250:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:29:57.755316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:57.755434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:29:57.755488Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:57.755739Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:29:57.755788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:57.755957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:29:57.756038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:29:57.757830Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:57.757910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:57.758072Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:57.758110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:202:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:29:57.758429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:57.758492Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:29:57.758580Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:57.758611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:57.758646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:57.758690Z no ... CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:30:40.819152Z node 22 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:30:40.819373Z node 22 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 276us result status StatusSuccess 2025-03-31T16:30:40.820443Z node 22 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } Tuple { } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0002\000\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPQCompatTest::ReadWriteSessions [GOOD] Test command err: 2025-03-31T16:25:54.728804Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010715553301825:2148];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:54.734159Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:25:54.846769Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010714366594463:2143];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:54.850969Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:25:55.073990Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a18/r3tmp/tmpWZ7807/pdisk_1.dat 2025-03-31T16:25:55.111302Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:25:55.524826Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:55.527371Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:55.527460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:55.529577Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:55.529666Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:55.537467Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:25:55.537584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:25:55.538006Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62216, node 1 2025-03-31T16:25:55.731350Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/000a18/r3tmp/yandexLQWU11.tmp 2025-03-31T16:25:55.731382Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/000a18/r3tmp/yandexLQWU11.tmp 2025-03-31T16:25:55.731555Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/000a18/r3tmp/yandexLQWU11.tmp 2025-03-31T16:25:55.731678Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:25:55.836875Z INFO: TTestServer started on Port 3356 GrpcPort 62216 TClient is connected to server localhost:3356 PQClient connected to localhost:62216 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:56.145217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:25:56.214593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-31T16:25:58.652516Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010732733172008:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:58.652637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:58.652732Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010732733172028:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:58.657496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-31T16:25:58.681938Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488010732733172063:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:58.682039Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:58.718963Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488010732733172030:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-31T16:25:59.043185Z node 1 :TX_PROXY ERROR: Actor# [1:7488010732733172125:2766] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:59.068969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:25:59.069200Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488010731546463976:2319], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:25:59.069441Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488010737028139435:2350], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:25:59.069523Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDE1NzNmMTMtN2ZjYjU3YWYtODdlMjNkMGQtYzYxNDk5MzI=, ActorId: [2:7488010731546463935:2312], ActorState: ExecuteState, TraceId: 01jqpgcabq81fmajsj9gwbt0z1, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:25:59.070989Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDc0MTY1N2EtNDUzYzFiYjgtY2ZhYzc1OWEtZjhiYTUwNjk=, ActorId: [1:7488010732733172005:2337], ActorState: ExecuteState, TraceId: 01jqpgca3tdn1p2sy3w7bnsd5x, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:25:59.071890Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:25:59.071988Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:25:59.175677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:25:59.317461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-31T16:25:59.557476Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jqpgcavp5cfc86wre08q3kcq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDczNGYzZjctODI0ZDBiYWYtNTI5ZjE1NWQtZWZjZjM5ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7488010737028139881:3098] 2025-03-31T16:25:59.728372Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010715553301825:2148];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:59.728464Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:25:59.850038Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488010714366594463:2143];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:59.850103Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-03-31T16:26:06.056326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModify ... pics 2025-03-31T16:30:38.188135Z node 27 :PQ_METACACHE DEBUG: SendSchemeCacheRequest 2025-03-31T16:30:38.188206Z node 27 :PQ_METACACHE DEBUG: send request for 1 topics, got 1 requests infly, db = "Root/LbCommunal" 2025-03-31T16:30:38.188840Z node 27 :PQ_METACACHE DEBUG: Handle SchemeCache response: result# { ErrorCount: 0 DatabaseName: Root/LbCommunal DomainOwnerId: 0 Instant: 12 ResultSet [{ Path: Root/LbCommunal/account/topic2-mirrored-from-dc2 TableId: [72057594046644480:18:0] RequestType: ByPath Operation: OpList RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindTopic DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T16:30:38.188992Z node 27 :PQ_METACACHE DEBUG: Got describe topics SC response 2025-03-31T16:30:38.189029Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_16929600718554728493_v1 Handle describe topics response 2025-03-31T16:30:38.189252Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_16929600718554728493_v1 auth is DEAD 2025-03-31T16:30:38.189279Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_16929600718554728493_v1 auth ok: topics# 1, initDone# 0 2025-03-31T16:30:38.190557Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_16929600718554728493_v1 register session: topic# rt3.dc2--account--topic2 ===Got response: status: SUCCESS init_response { session_id: "shared/user_27_6_16929600718554728493_v1" } 2025-03-31T16:30:38.190995Z node 27 :PERSQUEUE_READ_BALANCER INFO: [72075186224037899][topic2-mirrored-from-dc2] pipe [27:7488011936589475194:2665] connected; active server actors: 1 2025-03-31T16:30:38.191048Z node 27 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037899][topic2-mirrored-from-dc2] consumer "user" register session for pipe [27:7488011936589475194:2665] session shared/user_27_6_16929600718554728493_v1 2025-03-31T16:30:38.191124Z node 27 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user register readable partition 0 2025-03-31T16:30:38.191221Z node 27 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user family created family=1 (Status=Free, Partitions=[0]) 2025-03-31T16:30:38.191296Z node 27 :PERSQUEUE_READ_BALANCER INFO: [72075186224037899][topic2-mirrored-from-dc2] consumer user register reading session ReadingSession "shared/user_27_6_16929600718554728493_v1" (Sender=[27:7488011936589475191:2665], Pipe=[27:7488011936589475194:2665], Partitions=[], ActiveFamilyCount=0) 2025-03-31T16:30:38.191342Z node 27 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user rebalancing was scheduled 2025-03-31T16:30:38.191437Z node 27 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user balancing. Sessions=1, Families=1, UnradableFamilies=1 [1 (0), ], RequireBalancing=0 [] 2025-03-31T16:30:38.191542Z node 27 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user balancing family=1 (Status=Free, Partitions=[0]) for ReadingSession "shared/user_27_6_16929600718554728493_v1" (Sender=[27:7488011936589475191:2665], Pipe=[27:7488011936589475194:2665], Partitions=[], ActiveFamilyCount=0) 2025-03-31T16:30:38.191642Z node 27 :PERSQUEUE_READ_BALANCER INFO: [72075186224037899][topic2-mirrored-from-dc2] consumer user family 1 status Active partitions [0] session "shared/user_27_6_16929600718554728493_v1" sender [27:7488011936589475191:2665] lock partition 0 for ReadingSession "shared/user_27_6_16929600718554728493_v1" (Sender=[27:7488011936589475191:2665], Pipe=[27:7488011936589475194:2665], Partitions=[], ActiveFamilyCount=1) generation 1 step 3 2025-03-31T16:30:38.191735Z node 27 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2025-03-31T16:30:38.191791Z node 27 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user balancing duration: 0.000309s 2025-03-31T16:30:38.192418Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_16929600718554728493_v1 assign: record# { Partition: 0 TabletId: 72075186224037898 Topic: "topic2-mirrored-from-dc2" Generation: 1 Step: 3 Session: "shared/user_27_6_16929600718554728493_v1" ClientId: "user" PipeClient { RawX1: 7488011936589475194 RawX2: 4503715591490153 } Path: "/Root/LbCommunal/account/topic2-mirrored-from-dc2" } 2025-03-31T16:30:38.192546Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_16929600718554728493_v1 INITING TopicId: Topic topic2-mirrored-from-dc2 in dc dc2 in database: Root, partition 0(assignId:1) 2025-03-31T16:30:38.193142Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898] server connected, pipe [27:7488011936589475197:2668], now have 1 active actors on pipe 2025-03-31T16:30:38.193443Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_16929600718554728493_v1 TopicId: Topic topic2-mirrored-from-dc2 in dc dc2 in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037898 Generation: 1, pipe: [27:7488011936589475197:2668] 2025-03-31T16:30:38.193621Z node 28 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic2-mirrored-from-dc2' requestId: 2025-03-31T16:30:38.193669Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898] got client message batch for topic 'rt3.dc2--account--topic2' partition 0 2025-03-31T16:30:38.193735Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898] Created session shared/user_27_6_16929600718554728493_v1 on pipe: [27:7488011936589475197:2668] 2025-03-31T16:30:38.193819Z node 28 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: shared/user_27_6_16929600718554728493_v1:1 with generation 1 2025-03-31T16:30:38.193936Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] Topic 'rt3.dc2--account--topic2' partition 0 user user session is set to 0 (startOffset 0) session shared/user_27_6_16929600718554728493_v1 2025-03-31T16:30:38.194183Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:30:38.194241Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:30:38.194280Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:30:38.194327Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:30:38.194349Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] m0000000000cuser 2025-03-31T16:30:38.194366Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] m0000000000uuser 2025-03-31T16:30:38.194402Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:30:38.194443Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] =========================== 2025-03-31T16:30:38.194507Z node 28 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-31T16:30:38.198185Z node 28 :PERSQUEUE DEBUG: Answer ok topic: 'topic2-mirrored-from-dc2' partition: 0 messageNo: 0 requestId: cookie: 18446744073709551615 2025-03-31T16:30:38.198223Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:30:38.198631Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_16929600718554728493_v1 TopicId: Topic topic2-mirrored-from-dc2 in dc dc2 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 0 WriteTimestampMS: 1743438638126 CreateTimestampMS: 1743438638126 SizeLag: 0 WriteTimestampEstimateMS: 0 } Cookie: 18446744073709551615 } 2025-03-31T16:30:38.198698Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_16929600718554728493_v1 INIT DONE TopicId: Topic topic2-mirrored-from-dc2 in dc dc2 in database: Root, partition 0(assignId:1) EndOffset 0 readOffset 0 committedOffset 0 2025-03-31T16:30:38.198807Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_16929600718554728493_v1 sending to client partition status ===Got response: status: SUCCESS start_partition_session_request { partition_session { partition_session_id: 1 path: "account/topic2-mirrored-from-dc2" } partition_offsets { } } 2025-03-31T16:30:38.200757Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_16929600718554728493_v1 grpc read done: success# 0, data# { } 2025-03-31T16:30:38.200783Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_16929600718554728493_v1 grpc read failed 2025-03-31T16:30:38.200810Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_16929600718554728493_v1 grpc closed 2025-03-31T16:30:38.200855Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_16929600718554728493_v1 is DEAD 2025-03-31T16:30:38.201404Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898] Destroy direct read session shared/user_27_6_16929600718554728493_v1 2025-03-31T16:30:38.201487Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898] server disconnected, pipe [27:7488011936589475197:2668] destroyed 2025-03-31T16:30:38.201524Z node 28 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_27_6_16929600718554728493_v1 2025-03-31T16:30:38.201635Z node 27 :PERSQUEUE_READ_BALANCER INFO: [72075186224037899][topic2-mirrored-from-dc2] pipe [27:7488011936589475194:2665] disconnected; active server actors: 1 2025-03-31T16:30:38.201676Z node 27 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037899][topic2-mirrored-from-dc2] pipe [27:7488011936589475194:2665] client user disconnected session shared/user_27_6_16929600718554728493_v1 2025-03-31T16:30:38.605977Z node 27 :PQ_METACACHE DEBUG: Check version rescan 2025-03-31T16:30:38.621462Z node 27 :PQ_METACACHE DEBUG: Got config version: 6 2025-03-31T16:30:38.630912Z node 27 :KQP_EXECUTER ERROR: ActorId: [27:7488011936589475249:2677] TxId: 281474976715705. Ctx: { TraceId: 01jqpgmvh34pqh2wb3vx7rvkhe, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=27&id=YjkxZTE5MjQtZjYzYTYyMDgtYjI2ODNiNjYtZGE3MGViMjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 28 2025-03-31T16:30:38.631176Z node 27 :KQP_COMPUTE ERROR: SelfId: [27:7488011936589475253:2677], TxId: 281474976715705, task: 2. Ctx: { SessionId : ydb://session/3?node_id=27&id=YjkxZTE5MjQtZjYzYTYyMDgtYjI2ODNiNjYtZGE3MGViMjA=. TraceId : 01jqpgmvh34pqh2wb3vx7rvkhe. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [27:7488011936589475249:2677], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-03-31T16:30:38.920167Z node 27 :PQ_METACACHE DEBUG: HandleClustersUpdate 2025-03-31T16:30:38.920198Z node 27 :PQ_METACACHE DEBUG: HandleClustersUpdate LocalCluster !LocalCluster.empty() >> KqpIndexes::SecondaryIndexOrderBy [GOOD] >> KqpIndexes::SecondaryIndexOrderBy2 >> TAsyncIndexTests::SplitMainWithReboots[PipeResets] [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranule_PKDatetime [GOOD] >> OlapEstimationRowsCorrectness::TPCH5 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitMainWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:135:2157] sender: [1:136:2058] recipient: [1:111:2142] 2025-03-31T16:29:57.149946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:29:57.150043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:57.150104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:29:57.150147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:29:57.150202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:29:57.150236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:29:57.150318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:57.150425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:29:57.150768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:29:57.247524Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-31T16:29:57.247613Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:169:2058] recipient: [1:15:2062] 2025-03-31T16:29:57.273371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:29:57.273578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:29:57.273804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:29:57.278604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:29:57.278925Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:29:57.279686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:57.279934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:29:57.285155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:57.286890Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:57.286966Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:57.287126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:29:57.287184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:57.287247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:29:57.287401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:210:2058] recipient: [1:208:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:210:2058] recipient: [1:208:2209] Leader for TabletID 72057594037968897 is [1:214:2213] sender: [1:215:2058] recipient: [1:208:2209] 2025-03-31T16:29:57.294935Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:235:2058] recipient: [1:15:2062] 2025-03-31T16:29:57.458448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:29:57.458694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:57.458930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:29:57.459268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:29:57.459334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:57.464855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:57.464994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:29:57.465140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:57.465194Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:29:57.465225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:29:57.465269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:29:57.468926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:57.468999Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:29:57.469042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:29:57.472211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:57.472274Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:57.472322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:57.472373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:29:57.476419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:29:57.479042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:29:57.479289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:135:2157] sender: [1:250:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:29:57.480412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:57.480556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:29:57.480619Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:57.480898Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:29:57.480958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:57.481165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:29:57.481291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:29:57.483692Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:57.483743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:57.483953Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:57.483999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:202:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:29:57.484342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:57.484394Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:29:57.484493Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:57.484528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:57.484573Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:57.484627Z no ... 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:30:41.825074Z node 24 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:30:41.825298Z node 24 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 248us result status StatusSuccess 2025-03-31T16:30:41.825974Z node 24 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:30:41.839541Z node 24 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][24:803:2619] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-03-31T16:30:41.839642Z node 24 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][24:723:2619] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-03-31T16:30:41.839767Z node 24 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][24:803:2619] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1743438641810777 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1743438641810777 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1743438641810777 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-03-31T16:30:41.841867Z node 24 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][24:803:2619] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-03-31T16:30:41.841967Z node 24 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][24:723:2619] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-13 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-14 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-7 >> KqpIndexes::UpdateIndexSubsetPk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH2 [GOOD] Test command err: Trying to start YDB, gRPC: 31691, MsgBus: 28789 2025-03-31T16:29:05.525389Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011535029377679:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:29:05.525757Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018b3/r3tmp/tmp3PTcbP/pdisk_1.dat 2025-03-31T16:29:06.310534Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:29:06.312306Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:29:06.312385Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:29:06.316138Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31691, node 1 2025-03-31T16:29:06.700543Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:29:06.700561Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:29:06.700569Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:29:06.700685Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28789 TClient is connected to server localhost:28789 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:29:07.742522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:10.119132Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011556504214690:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:10.119235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:10.119744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011556504214702:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:10.125964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:29:10.149897Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011556504214704:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:29:10.229437Z node 1 :TX_PROXY ERROR: Actor# [1:7488011556504214755:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:29:10.519207Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011535029377679:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:29:10.519282Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:29:10.737054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:29:11.103660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011556504215003:2352];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:29:11.103901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011556504215003:2352];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:29:11.104189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011556504215003:2352];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:29:11.104312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011556504215003:2352];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:29:11.104412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011556504215003:2352];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:29:11.104533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011556504215003:2352];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:29:11.104637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011556504215003:2352];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:29:11.104784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011556504215003:2352];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:29:11.104919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011556504215003:2352];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:29:11.105029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011556504215003:2352];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:29:11.105154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011556504215003:2352];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:29:11.105263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011556504215003:2352];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:29:11.161727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011556504215008:2354];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:29:11.161822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011556504215008:2354];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:29:11.162089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011556504215008:2354];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:29:11.162202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011556504215008:2354];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:29:11.162311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011556504215008:2354];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:29:11.162410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011556504215008:2354];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:29:11.162513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011556504215008:2354];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:29:11.162618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011556504215008:2354];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:29:11.162724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011556504215008:2354];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:29:11.162824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011556504215008:2354];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:29:11.162923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011556504215008:2354];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:29:11.163018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488011556504215008:2354];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:29:11.184894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488011556504214999:2350];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.c ... tablet_id=72075186224039229;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.019495Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.019878Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.023832Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.024640Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.027849Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.029608Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039257;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.032142Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.035009Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.036722Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.040887Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039255;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.040944Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039235;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.045327Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.046630Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.050130Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039221;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.052422Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039239;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.054703Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039215;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.058493Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.059320Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039185;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.063174Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.064143Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.077373Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.079341Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.085962Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.091437Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039195;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.091731Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.096086Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039191;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.097036Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.100880Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.102878Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.105466Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.108457Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.110258Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039187;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.114146Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039249;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.114172Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039251;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.118668Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.120008Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039263;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.124763Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.125612Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.130423Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039253;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.131548Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.136870Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.137681Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.142656Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.144349Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.149495Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039207;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.149883Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:23.298489Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgkhjfd9bdsdabc3m9stb4", SessionId: ydb://session/3?node_id=1&id=YmU4MjI5M2QtZTc5ZDUwNzYtZGQwZmFiMDctNGFjM2M5Ng==, Slow query, duration: 27.633958s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:30:23.622646Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:30:23.622754Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:30:23.623389Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-47 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-48 >> KqpIndexes::UpsertMultipleUniqIndexes >> OlapEstimationRowsCorrectness::TPCH9 [GOOD] >> KqpIndexes::MultipleSecondaryIndex+UseSink [GOOD] >> KqpIndexes::MultipleSecondaryIndex-UseSink >> KqpIndexes::WriteWithParamsFieldOrder [GOOD] >> KqpMultishardIndex::CheckPushTopSort >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-6 >> TColumnShardTestSchema::ColdCompactionSmoke [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-42 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKDatetime [GOOD] Test command err: 2025-03-31T16:21:58.255612Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:21:58.354400Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:21:58.375398Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:21:58.375695Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:21:58.383606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:21:58.383825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:21:58.384075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:21:58.384227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:21:58.384387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:21:58.384525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:21:58.384664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:21:58.384807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:21:58.384937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:21:58.385068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:21:58.385207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:21:58.385323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:21:58.415441Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:21:58.415609Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:21:58.415667Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:21:58.415902Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:58.416095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:21:58.416182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:21:58.416230Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:21:58.416392Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:21:58.416470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:21:58.416535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:21:58.416579Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:21:58.416807Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:58.416903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:21:58.416954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:21:58.416987Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:21:58.417107Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:21:58.417181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:21:58.417228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:21:58.417258Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:21:58.417334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:21:58.417373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:21:58.417412Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:21:58.417474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:21:58.417524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:21:58.417568Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:21:58.418023Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=56; 2025-03-31T16:21:58.418128Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=49; 2025-03-31T16:21:58.418229Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=44; 2025-03-31T16:21:58.418386Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=104; 2025-03-31T16:21:58.418583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:21:58.418645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:21:58.418686Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:21:58.418914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:21:58.418970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:21:58.419005Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:21:58.419214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:21:58.419277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:21:58.419336Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:21:58.419566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:21:58.419617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:21:58.419648Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:21:58.419774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:21:58.419815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:21:58.419873Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... 3-31T16:30:40.616863Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:10501:12462];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; 2025-03-31T16:30:41.532869Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-03-31T16:30:41.532994Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=17; 2025-03-31T16:30:41.533652Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=566; 2025-03-31T16:30:41.533727Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=656; 2025-03-31T16:30:41.542463Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-03-31T16:30:41.542588Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=16; 2025-03-31T16:30:41.567275Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=24566; 2025-03-31T16:30:41.591577Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=22745; 2025-03-31T16:30:41.591723Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=24326; 2025-03-31T16:30:41.591929Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=128; 2025-03-31T16:30:41.592083Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=100; 2025-03-31T16:30:41.592295Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=165; 2025-03-31T16:30:41.592481Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=136; 2025-03-31T16:30:41.592726Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=192; 2025-03-31T16:30:41.592777Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=50129; 2025-03-31T16:30:41.598595Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-03-31T16:30:41.598716Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=16; 2025-03-31T16:30:41.603907Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=5073; 2025-03-31T16:30:41.638918Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=34876; 2025-03-31T16:30:41.639065Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=44; 2025-03-31T16:30:41.639165Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=33; 2025-03-31T16:30:41.639207Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=7; 2025-03-31T16:30:41.639246Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=9; 2025-03-31T16:30:41.639284Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2025-03-31T16:30:41.639355Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=40; 2025-03-31T16:30:41.639395Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2025-03-31T16:30:41.639475Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=50; 2025-03-31T16:30:41.639527Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=8; 2025-03-31T16:30:41.639595Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=29; 2025-03-31T16:30:41.639694Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=62; 2025-03-31T16:30:41.639789Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=61; 2025-03-31T16:30:41.639831Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=41053; 2025-03-31T16:30:41.640002Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=113961708;raw_bytes=176366876;count=47;records=1845000} inactive {blob_bytes=174125508;raw_bytes=270077548;count=81;records=2819164} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-31T16:30:41.640900Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:10501:12462];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-31T16:30:41.640967Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:10501:12462];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-31T16:30:41.641054Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10501:12462];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-31T16:30:41.641106Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10501:12462];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=0; 2025-03-31T16:30:41.641301Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:30:41.641364Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:30:41.641569Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=20; 2025-03-31T16:30:41.641641Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-03-31T16:30:41.641745Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=20;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:30:41.641798Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:30:41.641838Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:30:41.641940Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:30:41.644519Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:30:41.647667Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10501:12462];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-31T16:30:41.649294Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10501:12462];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-31T16:30:41.649336Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-31T16:30:41.649389Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-31T16:30:41.649465Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10501:12462];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:30:41.649552Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10501:12462];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:30:41.649629Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10501:12462];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=20; 2025-03-31T16:30:41.649728Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10501:12462];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-03-31T16:30:41.649778Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10501:12462];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=20;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:30:41.649845Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10501:12462];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:30:41.649888Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10501:12462];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:30:41.649975Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10501:12462];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=1.000000s; 2025-03-31T16:30:41.650026Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10501:12462];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; >> KqpMultishardIndex::SecondaryIndexSelectNull >> KqpIndexMetadata::HandleNotReadyIndex [GOOD] >> KqpIndexMetadata::TestNoReadFromMainTableBeforeJoin >> KqpMultishardIndex::DuplicateUpsert [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-36 >> KqpIndexes::CheckUpsertNonEquatableType+NotNull ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCH5 [GOOD] Test command err: Trying to start YDB, gRPC: 21175, MsgBus: 26044 2025-03-31T16:29:06.791343Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011538008928892:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:29:06.799533Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018ae/r3tmp/tmpjYrtmn/pdisk_1.dat 2025-03-31T16:29:07.411617Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:29:07.411701Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:29:07.557425Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:29:07.559002Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21175, node 1 2025-03-31T16:29:07.908759Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:29:07.908781Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:29:07.908787Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:29:07.908895Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26044 TClient is connected to server localhost:26044 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:29:08.838596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:08.900361Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:29:11.699226Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011559483765901:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:11.699378Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:11.699905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011559483765913:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:11.704520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:29:11.715768Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011538008928892:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:29:11.715925Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:29:11.734594Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T16:29:11.734877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011559483765915:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:29:11.827461Z node 1 :TX_PROXY ERROR: Actor# [1:7488011559483765966:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:29:12.414568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:29:12.689038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011563778733540:2356];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:29:12.689038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488011563778733553:2362];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:29:12.689196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488011563778733553:2362];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:29:12.689427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488011563778733553:2362];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:29:12.689496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488011563778733553:2362];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:29:12.689557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488011563778733553:2362];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:29:12.689631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011563778733540:2356];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:29:12.689636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488011563778733553:2362];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:29:12.689747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488011563778733553:2362];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:29:12.689772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011563778733540:2356];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:29:12.689886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011563778733540:2356];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:29:12.689911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488011563778733553:2362];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:29:12.690053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011563778733540:2356];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:29:12.690071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488011563778733553:2362];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:29:12.690162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011563778733540:2356];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:29:12.690180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488011563778733553:2362];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:29:12.690285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011563778733540:2356];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:29:12.690291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488011563778733553:2362];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:29:12.690376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011563778733540:2356];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:29:12.690398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488011563778733553:2362];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:29:12.690484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011563778733540:2356];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:29:12.690588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011563778733540:2356];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:29:12.690708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011563778733540:2356];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:29:12.690817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488011563778733540:2356];tablet_id=72075186224037 ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:26.862019Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:26.867778Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039265;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:26.867946Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:26.872848Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:26.873664Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039241;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:26.877724Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:26.879569Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:26.882107Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:26.885364Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:26.886662Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:26.891071Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:26.891446Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:26.896036Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:26.896948Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:26.900690Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:26.902076Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:26.905246Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:26.906501Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:26.910262Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:26.912604Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:26.917011Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:26.918773Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:26.922002Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:26.925148Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:26.926734Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:26.931819Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:26.933169Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:26.938725Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:26.940394Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:26.945822Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:26.947463Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:26.952474Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:26.953944Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:26.959478Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:26.960507Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:26.966067Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:26.967939Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:26.972713Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:26.974068Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:26.979024Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:26.983987Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:26.990744Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:26.997411Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:27.004436Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:27.034871Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:27.139303Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgkke41zky0z47wf7nsx6k", SessionId: ydb://session/3?node_id=1&id=NWQ0YWZmZmUtZWQ0Y2ZiMzgtZDU1MTRhYTItZTA3ZjJhNzI=, Slow query, duration: 29.566298s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:30:27.398619Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:30:27.398619Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:30:27.398979Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7488011821476820922:9821];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039392; 2025-03-31T16:30:27.399418Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-30 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-30 >> KqpUniqueIndex::InsertNullInFk [GOOD] >> KqpIndexes::IndexOr [GOOD] >> KqpIndexes::IndexFilterPushDown >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-30 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::DuplicateUpsert [GOOD] Test command err: Trying to start YDB, gRPC: 16547, MsgBus: 25973 2025-03-31T16:30:30.898072Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011899244831277:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:30.898652Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000de0/r3tmp/tmpc2Hlg7/pdisk_1.dat 2025-03-31T16:30:31.335539Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:31.349042Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:31.349127Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:31.352929Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16547, node 1 2025-03-31T16:30:31.506377Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:31.506405Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:31.506413Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:31.506550Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25973 TClient is connected to server localhost:25973 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:32.251182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:32.271951Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:30:32.282581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:32.446081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:32.610821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:32.695855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:34.117115Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011916424702223:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:34.117328Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:34.601239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:30:34.670019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:30:34.695653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:30:34.725221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:30:34.756869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:30:34.826751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:30:34.914710Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011916424702742:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:34.915008Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:34.915354Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011916424702747:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:34.919156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:30:34.928594Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011916424702749:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:30:35.017115Z node 1 :TX_PROXY ERROR: Actor# [1:7488011920719670102:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:30:35.898493Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011899244831277:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:35.898568Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:30:36.039312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 65496, MsgBus: 15932 2025-03-31T16:30:38.881046Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488011936274897750:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:38.881094Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000de0/r3tmp/tmp1L1c6R/pdisk_1.dat 2025-03-31T16:30:39.016865Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:39.046570Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:39.046675Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 65496, node 2 2025-03-31T16:30:39.048770Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:30:39.088804Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:39.088828Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:39.088837Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:39.088942Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15932 TClient is connected to server localhost:15932 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:39.529428Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:39.540550Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:30:39.619669Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:30:39.772483Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:39.843430Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:42.069839Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011953454768698:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:42.069951Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:42.103539Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:30:42.136324Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:30:42.172771Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:30:42.208017Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:30:42.241454Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:30:42.310886Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:30:42.352184Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011953454769210:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:42.352264Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:42.352388Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011953454769215:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:42.355628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:30:42.364338Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488011953454769217:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:30:42.460896Z node 2 :TX_PROXY ERROR: Actor# [2:7488011953454769270:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:30:43.316628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:43.881544Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488011936274897750:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:43.881592Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ColdCompactionSmoke [GOOD] Test command err: 2025-03-31T16:23:21.037832Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:23:21.145879Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:23:21.171980Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:23:21.172360Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:23:21.181070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:23:21.181286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:23:21.181572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:23:21.181703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:23:21.181812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:23:21.181929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:23:21.182040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:23:21.182170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:23:21.182309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:23:21.182417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:23:21.182542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:23:21.182656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:23:21.214362Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:23:21.214596Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:23:21.214659Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:23:21.214887Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:21.215056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:23:21.215173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:23:21.215290Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:23:21.215397Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:23:21.215470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:23:21.215520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:23:21.215559Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:23:21.215780Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:23:21.215861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:23:21.215914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:23:21.215949Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:23:21.216046Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:23:21.216110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:23:21.216197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:23:21.216247Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:23:21.216328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:23:21.216375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:23:21.216433Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:23:21.216497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:23:21.216549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:23:21.216579Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:23:21.217049Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=51; 2025-03-31T16:23:21.217149Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=40; 2025-03-31T16:23:21.217236Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=35; 2025-03-31T16:23:21.217317Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=37; 2025-03-31T16:23:21.217576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:23:21.217691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:23:21.217747Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:23:21.217976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:23:21.218027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:23:21.218073Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:23:21.218260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:23:21.218311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:23:21.218346Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:23:21.218572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:23:21.218632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:23:21.218667Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:23:21.218805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:23:21.218849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:23:21.218899Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... Object;id=[9437184:2:99:255:108:2776:0]; 2025-03-31T16:30:39.628425Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:109:2792:0]; 2025-03-31T16:30:39.628486Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:110:2776:0]; 2025-03-31T16:30:39.628557Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:111:2784:0]; 2025-03-31T16:30:39.628645Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:112:2760:0]; 2025-03-31T16:30:39.628712Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:113:2776:0]; 2025-03-31T16:30:39.628786Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:114:9568:0]; 2025-03-31T16:30:39.628857Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:115:2848:0]; 2025-03-31T16:30:39.628917Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:116:2840:0]; 2025-03-31T16:30:39.628985Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:117:2856:0]; 2025-03-31T16:30:39.629044Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:118:2856:0]; 2025-03-31T16:30:39.629130Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:119:2848:0]; 2025-03-31T16:30:39.629218Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:120:2840:0]; 2025-03-31T16:30:39.629293Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:121:2856:0]; 2025-03-31T16:30:39.629354Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:122:2848:0]; 2025-03-31T16:30:39.629416Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:123:2848:0]; 2025-03-31T16:30:39.629476Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:124:2856:0]; 2025-03-31T16:30:39.629537Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:125:2856:0]; 2025-03-31T16:30:39.629601Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:126:2848:0]; 2025-03-31T16:30:39.629688Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:127:2856:0]; 2025-03-31T16:30:39.629774Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:128:2832:0]; 2025-03-31T16:30:39.629836Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:129:2840:0]; 2025-03-31T16:30:39.629894Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:130:2848:0]; 2025-03-31T16:30:39.629955Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:131:2840:0]; 2025-03-31T16:30:39.630042Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:132:2840:0]; 2025-03-31T16:30:39.630130Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:133:2848:0]; 2025-03-31T16:30:39.630194Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:134:2848:0]; 2025-03-31T16:30:39.630271Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:135:2832:0]; 2025-03-31T16:30:39.630354Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:136:2848:0]; 2025-03-31T16:30:39.630426Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:137:2848:0]; 2025-03-31T16:30:39.630491Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:138:2840:0]; 2025-03-31T16:30:39.630555Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:139:2832:0]; 2025-03-31T16:30:39.630632Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:140:2840:0]; 2025-03-31T16:30:39.630705Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:141:2848:0]; 2025-03-31T16:30:39.630800Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:142:2848:0]; 2025-03-31T16:30:39.630874Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:143:2776:0]; 2025-03-31T16:30:39.630960Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:144:2792:0]; 2025-03-31T16:30:39.631042Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:145:2784:0]; 2025-03-31T16:30:39.631192Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:146:2784:0]; 2025-03-31T16:30:39.631261Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:147:2776:0]; 2025-03-31T16:30:39.631352Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:148:2792:0]; 2025-03-31T16:30:39.631439Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:149:2776:0]; 2025-03-31T16:30:39.631508Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:150:2768:0]; 2025-03-31T16:30:39.631574Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:151:2776:0]; 2025-03-31T16:30:39.631633Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:152:9576:0]; 2025-03-31T16:30:39.636586Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NOlap::NResourceBroker::NSubscribe::TEvStartTask;fline=actor.cpp:38;event=ask_resources;task=cpu=0;mem=2799958;external_task_id=7b867f5e-e4d11f0-8c79ef69-c9d54074;type=CS::INDEXATION;priority=0;; 2025-03-31T16:30:39.639414Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=100;task=cpu=0;mem=3571882;external_task_id=7b863364-e4d11f0-a668e500-fb20e334;type=CS::INDEXATION;priority=0;; 2025-03-31T16:30:39.639482Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=7b863364-e4d11f0-a668e500-fb20e334;mem=3571882;cpu=0; 2025-03-31T16:30:39.639532Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=7b863364-e4d11f0-a668e500-fb20e334;task_id=100;mem=3571882;cpu=0; 2025-03-31T16:30:39.642343Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=7b863364-e4d11f0-a668e500-fb20e334;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=7b863364-e4d11f0-a668e500-fb20e334; 2025-03-31T16:30:41.463518Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=7b863364-e4d11f0-a668e500-fb20e334;fline=actor.cpp:48;task=agents_waiting=0;additional_info=();; 2025-03-31T16:30:41.465757Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=2; 2025-03-31T16:30:41.478115Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=101;task=cpu=0;mem=2799958;external_task_id=7b867f5e-e4d11f0-8c79ef69-c9d54074;type=CS::INDEXATION;priority=0;; 2025-03-31T16:30:41.478199Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=7b867f5e-e4d11f0-8c79ef69-c9d54074;mem=2799958;cpu=0; 2025-03-31T16:30:41.478250Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=7b867f5e-e4d11f0-8c79ef69-c9d54074;task_id=101;mem=2799958;cpu=0; 2025-03-31T16:30:41.483417Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=7b867f5e-e4d11f0-8c79ef69-c9d54074;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=7b867f5e-e4d11f0-8c79ef69-c9d54074; 2025-03-31T16:30:42.977559Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=7b867f5e-e4d11f0-8c79ef69-c9d54074;fline=actor.cpp:48;task=agents_waiting=0;additional_info=();; 2025-03-31T16:30:42.980944Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=2; 2025-03-31T16:30:43.528214Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-03-31T16:30:43.528903Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[306] (CS::GENERAL) apply at tablet 9437184 2025-03-31T16:30:43.606699Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:125 Blob count: 422 2025-03-31T16:30:43.611640Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=3445356;raw_bytes=5239242;count=3;records=53332} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=5183316;raw_bytes=7864534;count=3;records=80000} inactive {blob_bytes=215061504;raw_bytes=326598142;count=144;records=3322060} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/hot' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/hot' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 >> KqpUniqueIndex::InsertFkAlreadyExist >> TAsyncIndexTests::MergeIndexWithReboots[PipeResets] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCH9 [GOOD] Test command err: Trying to start YDB, gRPC: 15935, MsgBus: 21032 2025-03-31T16:29:10.795341Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011558173393814:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:29:10.795393Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018a8/r3tmp/tmpQ1Qvuu/pdisk_1.dat 2025-03-31T16:29:11.626319Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:29:11.626401Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:29:11.634510Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:29:11.643315Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15935, node 1 2025-03-31T16:29:11.839927Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:29:11.839945Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:29:11.839954Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:29:11.840044Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21032 TClient is connected to server localhost:21032 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:29:12.671124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:12.706527Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:29:15.178990Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011579648230904:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:15.179268Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:15.179717Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011579648230916:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:15.183406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:29:15.194201Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011579648230918:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:29:15.283039Z node 1 :TX_PROXY ERROR: Actor# [1:7488011579648230969:2344] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:29:15.701854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:29:15.795272Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011558173393814:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:29:15.795361Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:29:16.023846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011579648231251:2362];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:29:16.024093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011579648231251:2362];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:29:16.024384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011579648231251:2362];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:29:16.024513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011579648231251:2362];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:29:16.024644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011579648231251:2362];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:29:16.024803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011579648231251:2362];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:29:16.024919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011579648231251:2362];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:29:16.025026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011579648231251:2362];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:29:16.025133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011579648231251:2362];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:29:16.025232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011579648231251:2362];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:29:16.025360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011579648231251:2362];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:29:16.025513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488011579648231251:2362];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:29:16.030187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488011579648231230:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:29:16.030247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488011579648231230:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:29:16.030482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488011579648231230:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:29:16.030592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488011579648231230:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:29:16.030695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488011579648231230:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:29:16.030788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488011579648231230:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:29:16.030877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488011579648231230:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:29:16.030987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488011579648231230:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:29:16.039821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488011579648231230:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:29:16.040106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488011579648231230:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:29:16.040231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488011579648231230:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:29:16.040344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488011579648231230:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:29:16.098206Z node 1 :T ... ller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.470875Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.470875Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.477362Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.477363Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039271;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.484488Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.484488Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.490970Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.490970Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.496853Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039269;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.496950Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.503333Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.503333Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.510365Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.510367Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.517312Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039360;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.517312Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.524493Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.525149Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.530927Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.531260Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.537106Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.537359Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.543017Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.543827Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.548921Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.550044Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.554515Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.556246Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.560678Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.567719Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.573268Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.573988Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.580948Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.581981Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.588394Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.589064Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.595054Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.595219Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.601928Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.602080Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.609464Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.609464Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.616703Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039356;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.616705Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039326;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.624082Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:28.756505Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgkqk8fthvbgg6ajd765jf", SessionId: ydb://session/3?node_id=1&id=NzNhN2Y2Yy1iNzQxZTkyNy1hMTVkOThjMC05MjMzOTI0Zg==, Slow query, duration: 26.923508s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:30:29.117026Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:30:29.117112Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:30:29.117383Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7488011863116128354:10704];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-03-31T16:30:29.117711Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::InsertNullInFk [GOOD] Test command err: Trying to start YDB, gRPC: 11674, MsgBus: 1401 2025-03-31T16:30:30.893293Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011900484219554:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:30.893359Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000de8/r3tmp/tmp1MXEWf/pdisk_1.dat 2025-03-31T16:30:31.332249Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:31.348877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:31.349035Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:31.351848Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11674, node 1 2025-03-31T16:30:31.504207Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:31.504238Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:31.504248Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:31.504395Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1401 TClient is connected to server localhost:1401 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:32.245701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:32.301218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:32.470147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:32.648790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:30:32.727493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:30:34.148032Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011917664090384:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:34.148142Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:34.598061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:30:34.630667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:30:34.655971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:30:34.682221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:30:34.745390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:30:34.779990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:30:34.863155Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011917664090901:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:34.863258Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:34.863368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011917664090906:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:34.867586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:30:34.879167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011917664090908:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:30:34.979567Z node 1 :TX_PROXY ERROR: Actor# [1:7488011917664090965:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:30:35.885237Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011900484219554:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:35.885319Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:30:36.031060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:37.890947Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488011930548994413:2619], TxId: 281474976710679, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=NTg5MWQ2ZjgtM2Y5NGFlYWUtZDM4OTg1OGUtY2Y0NzQ1NzE=. TraceId : 01jqpgmtevb5tx0t9b0rn3sr3x. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-03-31T16:30:37.891394Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488011930548994415:2620], TxId: 281474976710679, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NTg5MWQ2ZjgtM2Y5NGFlYWUtZDM4OTg1OGUtY2Y0NzQ1NzE=. TraceId : 01jqpgmtevb5tx0t9b0rn3sr3x. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7488011930548994410:2556], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-31T16:30:37.891703Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTg5MWQ2ZjgtM2Y5NGFlYWUtZDM4OTg1OGUtY2Y0NzQ1NzE=, ActorId: [1:7488011926254026622:2556], ActorState: ExecuteState, TraceId: 01jqpgmtevb5tx0t9b0rn3sr3x, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 17744, MsgBus: 30757 2025-03-31T16:30:38.740947Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488011936783324375:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:38.740990Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000de8/r3tmp/tmp7wdGK4/pdisk_1.dat 2025-03-31T16:30:38.841150Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17744, node 2 2025-03-31T16:30:38.876053Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:38.876130Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:38.878523Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:30:38.923694Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:38.923717Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:38.923725Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:38.923867Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30757 TClient is connected to server localhost:30757 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:39.378373Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:39.394655Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:30:39.475661Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:30:39.647417Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:39.722895Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:42.042260Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011953963195327:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:42.042398Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:42.080164Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:30:42.112916Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:30:42.145935Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:30:42.178223Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:30:42.207767Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:30:42.241527Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:30:42.319835Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011953963195838:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:42.320225Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:42.320257Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011953963195843:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:42.323805Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:30:42.334289Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488011953963195845:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:30:42.405841Z node 2 :TX_PROXY ERROR: Actor# [2:7488011953963195900:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:30:43.234279Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:43.741233Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488011936783324375:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:43.741305Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpIndexes::SimpleVectorIndexOrderByCosineDistanceNullableLevel1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeIndexWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:135:2157] sender: [1:136:2058] recipient: [1:111:2142] 2025-03-31T16:29:47.605379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:29:47.605482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:47.605523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:29:47.605557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:29:47.605607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:29:47.605633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:29:47.605699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:47.605801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:29:47.606108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:29:47.704279Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-31T16:29:47.704364Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:169:2058] recipient: [1:15:2062] 2025-03-31T16:29:47.750154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:29:47.750342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:29:47.750487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:29:47.763738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:29:47.763983Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:29:47.764601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:47.764831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:29:47.772145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:47.773701Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:47.773771Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:47.773922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:29:47.773973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:47.774010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:29:47.774146Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:210:2058] recipient: [1:208:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:210:2058] recipient: [1:208:2209] Leader for TabletID 72057594037968897 is [1:214:2213] sender: [1:215:2058] recipient: [1:208:2209] 2025-03-31T16:29:47.781220Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:235:2058] recipient: [1:15:2062] 2025-03-31T16:29:47.902390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:29:47.902639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:47.902841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:29:47.903191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:29:47.903255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:47.905500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:47.905624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:29:47.905822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:47.905898Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:29:47.905938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:29:47.905972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:29:47.907763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:47.907813Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:29:47.907845Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:29:47.909694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:47.909737Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:47.909787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:47.909840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:29:47.913430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:29:47.915405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:29:47.915604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:135:2157] sender: [1:250:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:29:47.916551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:47.916680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:29:47.916729Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:47.916996Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:29:47.917045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:47.917211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:29:47.917310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:29:47.924011Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:47.924063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:47.924243Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:47.924283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:202:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:29:47.924583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:47.924627Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:29:47.924716Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:47.924748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:47.924789Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:47.924840Z no ... tusSuccess 2025-03-31T16:30:46.658721Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:30:46.664071Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:30:46.664287Z node 26 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 249us result status StatusSuccess 2025-03-31T16:30:46.664976Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpUniqueIndex::UpdateFkSameValue >> KqpUniqueIndex::UpdateImplicitNullInComplexFk2 [GOOD] >> KqpMultishardIndex::WriteIntoRenamingSyncIndex >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-14 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-25 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-8 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-48 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-1 >> KqpIndexes::UpdateDeletePlan+UseSink >> KqpUniqueIndex::UpdateOnHidenChanges+DataColumn [GOOD] >> KqpUniqueIndex::ReplaceFkDuplicate [GOOD] >> KqpUniqueIndex::ReplaceFkPartialColumnSet >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-7 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpdateImplicitNullInComplexFk2 [GOOD] Test command err: Trying to start YDB, gRPC: 16213, MsgBus: 11719 2025-03-31T16:30:30.874469Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011902379519544:2138];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:30.875454Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000de2/r3tmp/tmp22OJdZ/pdisk_1.dat 2025-03-31T16:30:31.343506Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:31.362678Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:31.362801Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:31.366174Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16213, node 1 2025-03-31T16:30:31.504005Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:31.504033Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:31.504040Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:31.504189Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11719 TClient is connected to server localhost:11719 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:32.245104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:32.296002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:32.430475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:32.598998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:30:32.673705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:30:34.205372Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011919559390418:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:34.205497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:34.598114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:30:34.629979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:30:34.655797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:30:34.683207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:30:34.712589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:30:34.745102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:30:34.800880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011919559390927:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:34.800954Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:34.801028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011919559390932:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:34.809596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:30:34.819684Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011919559390934:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:30:34.911999Z node 1 :TX_PROXY ERROR: Actor# [1:7488011919559390990:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:30:35.883183Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011902379519544:2138];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:35.883304Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:30:36.033575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:37.891142Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqpgmt48cnfe6j9xt0dhd1fv, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjQ4YTM1NDItNWI4NTY3Y2EtYmY4ZWYxYjItZWRkYzg1ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-31T16:30:37.902274Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjQ4YTM1NDItNWI4NTY3Y2EtYmY4ZWYxYjItZWRkYzg1ZA==, ActorId: [1:7488011928149326690:2556], ActorState: ExecuteState, TraceId: 01jqpgmt48cnfe6j9xt0dhd1fv, Create QueryResponse for error on request, msg: 2025-03-31T16:30:38.619679Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqpgmttr864jztme4n9178bb, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjQ4YTM1NDItNWI4NTY3Y2EtYmY4ZWYxYjItZWRkYzg1ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-31T16:30:38.619963Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjQ4YTM1NDItNWI4NTY3Y2EtYmY4ZWYxYjItZWRkYzg1ZA==, ActorId: [1:7488011928149326690:2556], ActorState: ExecuteState, TraceId: 01jqpgmttr864jztme4n9178bb, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 23677, MsgBus: 16041 2025-03-31T16:30:39.364279Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488011939988736938:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:39.364331Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000de2/r3tmp/tmp5F8IzX/pdisk_1.dat 2025-03-31T16:30:39.484301Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:39.501225Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:39.501323Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:39.502823Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23677, node 2 2025-03-31T16:30:39.544194Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:39.544226Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:39.544233Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:39.544324Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16041 TClient is connected to server localhost:16041 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:30:39.968502Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:30:39.980336Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:40.038486Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:40.203714Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:40.265772Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:42.557516Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011952873640594:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:42.557626Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:42.606780Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:30:42.640970Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:30:42.707612Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:30:42.736220Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:30:42.768953Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:30:42.807770Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:30:42.856789Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011952873641106:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:42.856879Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:42.856946Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011952873641111:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:42.860309Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:30:42.870036Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488011952873641113:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:30:42.932467Z node 2 :TX_PROXY ERROR: Actor# [2:7488011952873641167:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:30:43.762252Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:44.364775Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488011939988736938:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:44.364943Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:30:46.052636Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqpgn1vm8d4na03etpkwqwbv, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjJjODhkMDUtMWVmNmFmNjItYzFhZDU5ZGQtMTkwYjdkNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-31T16:30:46.052976Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjJjODhkMDUtMWVmNmFmNjItYzFhZDU5ZGQtMTkwYjdkNGY=, ActorId: [2:7488011961463576850:2554], ActorState: ExecuteState, TraceId: 01jqpgn1vm8d4na03etpkwqwbv, Create QueryResponse for error on request, msg: 2025-03-31T16:30:47.472761Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqpgn2zw3ym1z0p5kffx1f37, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjJjODhkMDUtMWVmNmFmNjItYzFhZDU5ZGQtMTkwYjdkNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-31T16:30:47.473046Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjJjODhkMDUtMWVmNmFmNjItYzFhZDU5ZGQtMTkwYjdkNGY=, ActorId: [2:7488011961463576850:2554], ActorState: ExecuteState, TraceId: 01jqpgn2zw3ym1z0p5kffx1f37, Create QueryResponse for error on request, msg: >> KqpIndexes::PrefixedVectorIndexOrderByCosineDistanceNotNullableLevel1 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-43 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpdateOnHidenChanges+DataColumn [GOOD] Test command err: Trying to start YDB, gRPC: 29329, MsgBus: 64428 2025-03-31T16:30:30.875647Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011900053715680:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:30.875683Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000de1/r3tmp/tmpnoOQDq/pdisk_1.dat 2025-03-31T16:30:31.323527Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:31.335864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:31.336587Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:31.347060Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29329, node 1 2025-03-31T16:30:31.503931Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:31.503953Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:31.503958Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:31.504063Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64428 TClient is connected to server localhost:64428 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:32.244236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:32.289976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:32.474785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:30:32.652252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:32.727461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:30:34.233528Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011917233586635:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:34.233636Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:34.598023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:30:34.630616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:30:34.656922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:30:34.684758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:30:34.720816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:30:34.793106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:30:34.848782Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011917233587150:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:34.848851Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:34.849048Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011917233587155:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:34.852523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:30:34.862896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011917233587157:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:30:34.930309Z node 1 :TX_PROXY ERROR: Actor# [1:7488011917233587211:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:30:35.875934Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011900053715680:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:35.876010Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:30:36.029592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 1383, MsgBus: 3080 2025-03-31T16:30:40.470594Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488011944454017133:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:40.470662Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000de1/r3tmp/tmpCnzDVF/pdisk_1.dat 2025-03-31T16:30:40.573806Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1383, node 2 2025-03-31T16:30:40.607362Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:40.607496Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:40.608758Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:30:40.651849Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:40.651880Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:40.651889Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:40.652052Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3080 TClient is connected to server localhost:3080 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:41.086712Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:41.102766Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:41.191563Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:41.387275Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:41.478290Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:43.915319Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011957338920782:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:43.915431Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:43.970530Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:30:44.006135Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:30:44.043812Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:30:44.085945Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:30:44.127954Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:30:44.168377Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:30:44.249204Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011961633888594:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:44.249346Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:44.254818Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011961633888600:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:44.258351Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:30:44.273534Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488011961633888602:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:30:44.372843Z node 2 :TX_PROXY ERROR: Actor# [2:7488011961633888657:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:30:45.247767Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:45.472625Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488011944454017133:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:45.472909Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:30:47.154550Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqpgn31m3nvzb0jkjgtbm8de, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODJiNDdlZGItYjkzZDFkMy05ODYzYjhiNi05Yzc2YTY5, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-31T16:30:47.164175Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODJiNDdlZGItYjkzZDFkMy05ODYzYjhiNi05Yzc2YTY5, ActorId: [2:7488011965928857051:2554], ActorState: ExecuteState, TraceId: 01jqpgn31m3nvzb0jkjgtbm8de, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::ReplaceFkDuplicate [GOOD] Test command err: Trying to start YDB, gRPC: 27672, MsgBus: 3480 2025-03-31T16:30:30.870027Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011899992438555:2133];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:30.870069Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000de6/r3tmp/tmpSXFS3y/pdisk_1.dat 2025-03-31T16:30:31.332529Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:31.337107Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:31.337188Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:31.347037Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27672, node 1 2025-03-31T16:30:31.503931Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:31.503952Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:31.503963Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:31.504082Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3480 TClient is connected to server localhost:3480 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:32.253193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:32.283047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:32.440615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:32.607283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:32.686238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:34.071651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011917172309438:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:34.071782Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:34.599384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:30:34.630729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:30:34.658087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:30:34.685015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:30:34.712354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:30:34.744413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:30:34.826757Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011917172309953:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:34.826884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:34.827224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011917172309958:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:34.831613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:30:34.845664Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011917172309960:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:30:34.926951Z node 1 :TX_PROXY ERROR: Actor# [1:7488011917172310017:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:30:35.870252Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011899992438555:2133];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:35.870356Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:30:36.034421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:37.891155Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqpgmt4fegmd4d8wgp6ay50e, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2RiMzljNDYtNWM0N2UzMTQtZjdkOTlmYjMtMzA2ODJjYWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-31T16:30:37.902843Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2RiMzljNDYtNWM0N2UzMTQtZjdkOTlmYjMtMzA2ODJjYWU=, ActorId: [1:7488011925762245722:2556], ActorState: ExecuteState, TraceId: 01jqpgmt4fegmd4d8wgp6ay50e, Create QueryResponse for error on request, msg: 2025-03-31T16:30:38.551301Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqpgmttpcysfynag2707r219, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2RiMzljNDYtNWM0N2UzMTQtZjdkOTlmYjMtMzA2ODJjYWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-31T16:30:38.551595Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2RiMzljNDYtNWM0N2UzMTQtZjdkOTlmYjMtMzA2ODJjYWU=, ActorId: [1:7488011925762245722:2556], ActorState: ExecuteState, TraceId: 01jqpgmttpcysfynag2707r219, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 29062, MsgBus: 23867 2025-03-31T16:30:39.347414Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488011938604196404:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:39.348486Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000de6/r3tmp/tmp0kTels/pdisk_1.dat 2025-03-31T16:30:39.442892Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29062, node 2 2025-03-31T16:30:39.486014Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:39.486117Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:39.494334Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:30:39.542259Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:39.542277Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:39.542286Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:39.542415Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23867 TClient is connected to server localhost:23867 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:39.987029Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:39.992977Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:30:40.004071Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:40.061141Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:40.204606Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:40.268108Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:42.544073Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011951489100062:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:42.544192Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:42.592159Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:30:42.629032Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:30:42.660541Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:30:42.691226Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:30:42.725000Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:30:42.761547Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:30:42.822306Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011951489100570:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:42.822380Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:42.822484Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011951489100575:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:42.827236Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:30:42.837778Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488011951489100577:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:30:42.898923Z node 2 :TX_PROXY ERROR: Actor# [2:7488011951489100630:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:30:43.665831Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:44.345359Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488011938604196404:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:44.345429Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:30:45.707750Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqpgn1pf9sjdpc4kjhhwemjn, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjkwN2FmZDItOGEwYWI2YjUtYTUxMjMzZTQtZWM4NzlhM2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-31T16:30:45.708048Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjkwN2FmZDItOGEwYWI2YjUtYTUxMjMzZTQtZWM4NzlhM2Y=, ActorId: [2:7488011955784068997:2554], ActorState: ExecuteState, TraceId: 01jqpgn1pf9sjdpc4kjhhwemjn, Create QueryResponse for error on request, msg: 2025-03-31T16:30:46.507398Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqpgn2ek4j9qqwvmx6ebg055, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjkwN2FmZDItOGEwYWI2YjUtYTUxMjMzZTQtZWM4NzlhM2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-31T16:30:46.507715Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjkwN2FmZDItOGEwYWI2YjUtYTUxMjMzZTQtZWM4NzlhM2Y=, ActorId: [2:7488011955784068997:2554], ActorState: ExecuteState, TraceId: 01jqpgn2ek4j9qqwvmx6ebg055, Create QueryResponse for error on request, msg: >> KqpMultishardIndex::CheckPushTopSort [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-37 >> KqpIndexes::SecondaryIndexOrderBy2 [GOOD] >> KqpIndexes::SecondaryIndexReplace+UseSink >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-31 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-49 >> KqpIndexes::UpdateIndexSubsetPk [GOOD] >> KqpIndexes::UpdateOnReadColumns >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-31 >> KqpIndexes::UpsertWithNullKeysSimple >> KqpIndexes::MultipleSecondaryIndex-UseSink [GOOD] >> KqpIndexes::MultipleSecondaryIndexWithSameComulns+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::CheckPushTopSort [GOOD] Test command err: Trying to start YDB, gRPC: 11996, MsgBus: 7060 2025-03-31T16:30:38.207864Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011935386779811:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:38.211430Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dd5/r3tmp/tmpO0rq8v/pdisk_1.dat 2025-03-31T16:30:38.551548Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11996, node 1 2025-03-31T16:30:38.600424Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:38.601341Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:38.618151Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:30:38.655633Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:38.655662Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:38.655669Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:38.655790Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7060 TClient is connected to server localhost:7060 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:39.214769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:39.248136Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:30:39.260593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:39.401840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:39.581604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:39.666253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:41.361214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011948271683491:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:41.361305Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:41.686899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:30:41.724134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:30:41.755352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:30:41.794439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:30:41.834007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:30:41.873112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:30:41.926358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011948271684002:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:41.926458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:41.926797Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011948271684007:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:41.931444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:30:41.942432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011948271684009:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:30:42.025980Z node 1 :TX_PROXY ERROR: Actor# [1:7488011952566651358:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:30:42.914830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:30:43.205283Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011935386779811:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:43.205344Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:30:43.589013Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 7315, MsgBus: 12997 2025-03-31T16:30:44.552721Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488011962760729908:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:44.552769Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dd5/r3tmp/tmpwxlA5J/pdisk_1.dat 2025-03-31T16:30:44.657457Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:44.678209Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:44.678299Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:44.680664Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7315, node 2 2025-03-31T16:30:44.715400Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:44.715423Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:44.715432Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:44.715544Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12997 TClient is connected to server localhost:12997 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:45.166908Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:45.177515Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:30:45.193427Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:45.250884Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:45.402941Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:30:45.483214Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:30:47.341298Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011975645633548:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:47.341378Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:47.382010Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:30:47.406786Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:30:47.433760Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:30:47.466313Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:30:47.495577Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:30:47.528726Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:30:47.613360Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011975645634061:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:47.613443Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:47.613498Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011975645634066:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:47.617738Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:30:47.632138Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488011975645634068:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:30:47.689315Z node 2 :TX_PROXY ERROR: Actor# [2:7488011975645634123:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:30:48.446772Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:49.553055Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488011962760729908:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:49.553118Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpIndexes::CheckUpsertNonEquatableType+NotNull [GOOD] >> KqpIndexes::CheckUpsertNonEquatableType-NotNull >> OlapEstimationRowsCorrectness::TPCDS78 [GOOD] >> KqpIndexes::NullInIndexTableNoDataRead >> KqpIndexes::SelectConcurentTX2 >> KqpIndexMetadata::TestNoReadFromMainTableBeforeJoin [GOOD] >> KqpIndexMetadata::HandleWriteOnlyIndex >> KqpMultishardIndex::SecondaryIndexSelectNull [GOOD] >> KqpMultishardIndex::SecondaryIndexSelect >> KqpIndexes::UpsertMultipleUniqIndexes [GOOD] >> KqpIndexes::UpsertNoIndexColumns >> KqpUniqueIndex::UpsertExplicitNullInComplexFk |93.8%| [TA] $(B)/ydb/core/tx/columnshard/ut_schema/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpUniqueIndex::InsertFkAlreadyExist [GOOD] >> KqpUniqueIndex::InsertFkDuplicate >> KqpIndexes::IndexFilterPushDown [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-9 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-2 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-26 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-43 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-8 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-44 >> KqpIndexes::UpdateDeletePlan+UseSink [GOOD] >> KqpIndexes::UpdateDeletePlan-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCDS78 [GOOD] Test command err: Trying to start YDB, gRPC: 27024, MsgBus: 64285 2025-03-31T16:28:49.932528Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011468803622576:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:49.932555Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018bb/r3tmp/tmpKaQdJJ/pdisk_1.dat 2025-03-31T16:28:50.725423Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:28:50.740625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:28:50.740712Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:28:50.748252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27024, node 1 2025-03-31T16:28:50.989943Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:28:50.989973Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:28:50.989980Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:28:50.990083Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64285 TClient is connected to server localhost:64285 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:28:52.075901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:52.124382Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:28:54.501880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011490278459735:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:54.501987Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:54.502365Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011490278459747:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:54.506384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:28:54.524618Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011490278459749:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:28:54.595304Z node 1 :TX_PROXY ERROR: Actor# [1:7488011490278459800:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:28:54.935295Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011468803622576:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:54.935350Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:28:55.316208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:28:55.716594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488011494573427356:2356];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:28:55.716814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488011494573427356:2356];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:28:55.717069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488011494573427356:2356];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:28:55.717204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488011494573427356:2356];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:28:55.717321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488011494573427356:2356];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:28:55.717422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488011494573427356:2356];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:28:55.717519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488011494573427356:2356];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:28:55.717624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488011494573427356:2356];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:28:55.717753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488011494573427356:2356];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:28:55.717877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488011494573427356:2356];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:28:55.717975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488011494573427356:2356];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:28:55.718062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488011494573427356:2356];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:28:55.740139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011494573427348:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:28:55.740262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011494573427348:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:28:55.740477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011494573427348:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:28:55.740574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011494573427348:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:28:55.740683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011494573427348:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:28:55.740779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011494573427348:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:28:55.740867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011494573427348:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:28:55.740971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011494573427348:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:28:55.741082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011494573427348:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:28:55.741189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011494573427348:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:28:55.741279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011494573427348:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:28:55.741372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7488011494573427348:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:28:55.800429Z node 1 :T ... tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.304204Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.308862Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.309546Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.315423Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.315620Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.322198Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.322199Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.328258Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.328377Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039364;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.334447Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.334450Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.340377Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039332;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.340636Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.346820Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.346820Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.352512Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.352653Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.358190Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.358564Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.363795Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.364268Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.369423Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.369854Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039227;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.375451Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039245;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.375500Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.381157Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039197;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.381273Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039257;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.387024Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.387308Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.393648Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.393833Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.399644Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.399823Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.405931Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.405935Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.411888Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.411942Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.417887Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.418391Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.424550Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.425211Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.432057Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.432057Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.445206Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.445234Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.451402Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:13.628578Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgk55gdmqfptbh0qb3a7td", SessionId: ydb://session/3?node_id=1&id=YWMzMjk4YzUtYWRjM2UxMDgtOWMxOTBiOTUtOGU5NzE0NzM=, Slow query, duration: 30.667402s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:30:14.157796Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:30:14.157797Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:30:14.158461Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::IndexFilterPushDown [GOOD] Test command err: Trying to start YDB, gRPC: 64029, MsgBus: 7363 2025-03-31T16:30:32.690081Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011910922482135:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:32.690174Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ddb/r3tmp/tmp8D4ubZ/pdisk_1.dat 2025-03-31T16:30:33.060950Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:33.061048Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:33.085139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64029, node 1 2025-03-31T16:30:33.102117Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:33.109295Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:30:33.109324Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:30:33.147646Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:33.147676Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:33.147686Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:33.147797Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7363 TClient is connected to server localhost:7363 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:33.721539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:33.735454Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:30:33.746836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:33.906441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:34.069693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:34.134143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:35.807719Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011923807385808:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:35.807890Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:36.096718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:30:36.137243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:30:36.165368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:30:36.199677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:30:36.266571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:30:36.338679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:30:36.430145Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011928102353629:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:36.430235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:36.430506Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011928102353634:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:36.435371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:30:36.447327Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011928102353636:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:30:36.517024Z node 1 :TX_PROXY ERROR: Actor# [1:7488011928102353691:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:30:37.426295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:30:37.690308Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011910922482135:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:37.690448Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:30:38.095078Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488011936692288869:2529], status: GENERIC_ERROR, issues:
:3:46: Error: mismatched input 'VIEW' expecting {'(', DEFAULT, DISCARD, FROM, PROCESS, REDUCE, SELECT, VALUES} 2025-03-31T16:30:38.095338Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTFiYzAyZjMtMTM3NGNlMTAtOTliNmUwNmItZjZjMGYzNzk=, ActorId: [1:7488011932397321276:2496], ActorState: ExecuteState, TraceId: 01jqpgmtzff7w6pkhrjfjr66vw, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-31T16:30:38.114720Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488011936692288876:2531], status: GENERIC_ERROR, issues:
:3:46: Error: mismatched input 'VIEW' expecting {'(', DEFAULT, DISCARD, FROM, PROCESS, REDUCE, SELECT, VALUES} 2025-03-31T16:30:38.114983Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTFiYzAyZjMtMTM3NGNlMTAtOTliNmUwNmItZjZjMGYzNzk=, ActorId: [1:7488011932397321276:2496], ActorState: ExecuteState, TraceId: 01jqpgmv0wdjbm18ynr0vfy7qd, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-31T16:30:38.130195Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488011936692288881:2533], status: GENERIC_ERROR, issues:
:3:41: Error: mismatched input 'VIEW' expecting {ON, SET} 2025-03-31T16:30:38.130422Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTFiYzAyZjMtMTM3NGNlMTAtOTliNmUwNmItZjZjMGYzNzk=, ActorId: [1:7488011932397321276:2496], ActorState: ExecuteState, TraceId: 01jqpgmv1b085fs3yq37p5re3x, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-31T16:30:38.147344Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488011936692288887:2535], status: GENERIC_ERROR, issues:
:3:46: Error: mismatched input 'VIEW' expecting {, ';'} 2025-03-31T16:30:38.147628Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTFiYzAyZjMtMTM3NGNlMTAtOTliNmUwNmItZjZjMGYzNzk=, ActorId: [1:7488011932397321276:2496], ActorState: ExecuteState, TraceId: 01jqpgmv1vcx6vmgsv3axkbh8h, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 26891, MsgBus: 8370 2025-03-31T16:30:38.957742Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488011932814646140:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:38.958168Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ddb/r3tmp/tmpz6DK5K/pdisk_1.dat 2025-03-31T16:30:39.085995Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:39.128679Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:39.128778Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:39.130408Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26891, node 2 2025-03-31T16:30:39.231565Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31 ... 4976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:30:42.199145Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:30:42.269304Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:30:42.308936Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011949994517616:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:42.309001Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011949994517621:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:42.309008Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:42.311980Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:30:42.321073Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488011949994517623:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:30:42.394282Z node 2 :TX_PROXY ERROR: Actor# [2:7488011949994517676:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:30:43.190681Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:30:43.267930Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:30:43.313147Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:30:43.958306Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488011932814646140:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:43.970357Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 12410, MsgBus: 15480 2025-03-31T16:30:46.270653Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488011969527602707:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:46.270690Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ddb/r3tmp/tmpEeEHrY/pdisk_1.dat 2025-03-31T16:30:46.408411Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:46.419422Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:46.419510Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:46.421619Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12410, node 3 2025-03-31T16:30:46.468812Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:46.468844Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:46.468852Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:46.468995Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15480 TClient is connected to server localhost:15480 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:46.973428Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:46.990968Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:47.078155Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:47.246429Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:47.333812Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:49.636322Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488011982412506372:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:49.636430Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:49.694743Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:30:49.728701Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:30:49.765466Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:30:49.842471Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:30:49.875716Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:30:49.917586Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:30:49.973879Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488011982412506887:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:49.973969Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:49.974208Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488011982412506892:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:49.977779Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:30:49.990040Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488011982412506894:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:30:50.064822Z node 3 :TX_PROXY ERROR: Actor# [3:7488011986707474243:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:30:51.042969Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:30:51.119748Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:30:51.170429Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:30:51.271131Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488011969527602707:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:51.271204Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-37 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-38 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-32 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-49 [GOOD] >> KqpUniqueIndex::ReplaceFkPartialColumnSet [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-32 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-50 >> KqpUniqueIndex::UpdateFkAlreadyExist |93.8%| [TA] {RESULT} $(B)/ydb/core/tx/columnshard/ut_schema/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpUniqueIndex::UpdateFkSameValue [GOOD] >> KqpUniqueIndex::UpdateFkPkOverlap >> KqpMultishardIndex::SortedRangeReadDesc >> KqpMultishardIndex::DataColumnUpsertMixedSemantic >> KqpIndexes::CheckUpsertNonEquatableType-NotNull [GOOD] >> KqpIndexes::CreateTableWithExplicitAsyncIndexSQL >> KqpIndexes::UpsertWithNullKeysSimple [GOOD] >> KqpIndexes::UpsertWithNullKeysComplex >> KqpIndexes::SecondaryIndexReplace+UseSink [GOOD] >> KqpJoinOrder::TPCDS34+ColumnStore [GOOD] >> TPersQueueTest::CreateTopicWithMeteringMode [GOOD] >> TPersQueueTest::DefaultMeteringMode >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-15 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-26 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-27 >> KqpIndexes::SelectConcurentTX2 [GOOD] >> KqpIndexes::SelectFromAsyncIndexedTable >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-3 >> KqpIndexes::NullInIndexTableNoDataRead [GOOD] >> KqpIndexes::NullInIndexTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexReplace+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 12022, MsgBus: 16675 2025-03-31T16:30:30.868280Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011900784146827:2194];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:30.868329Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ddc/r3tmp/tmp2GPTZ6/pdisk_1.dat 2025-03-31T16:30:31.303794Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:31.347884Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:31.347974Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:31.356246Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12022, node 1 2025-03-31T16:30:31.504092Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:31.504114Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:31.504126Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:31.504253Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16675 TClient is connected to server localhost:16675 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:32.274457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:30:32.295529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:30:32.449393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:32.606699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:32.694020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:34.072949Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011917964017648:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:34.073075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:34.598110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:30:34.672068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:30:34.705144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:30:34.735018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:30:34.765389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:30:34.840193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:30:34.890508Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011917964018165:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:34.890630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:34.890727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011917964018170:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:34.896239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:30:34.915912Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011917964018172:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:30:35.008139Z node 1 :TX_PROXY ERROR: Actor# [1:7488011922258985526:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:30:35.871399Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011900784146827:2194];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:35.871488Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:30:36.083351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 7068, MsgBus: 30924 2025-03-31T16:30:42.331970Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488011952731876107:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:42.332020Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ddc/r3tmp/tmp8ZT0Kd/pdisk_1.dat 2025-03-31T16:30:42.433701Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:42.488042Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:42.488100Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:42.489672Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7068, node 2 2025-03-31T16:30:42.530685Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:42.530706Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:42.530713Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:42.530840Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30924 TClient is connected to server localhost:30924 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:43.007279Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:43.021316Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:43.100660Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:30:43.292414Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:30:43.371236Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, bu ... ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:30:45.711517Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:30:45.771317Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:30:45.804717Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:30:45.843523Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:30:45.883320Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:30:45.943481Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011965616780288:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:45.943580Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:45.943797Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011965616780293:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:45.948162Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:30:45.961781Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488011965616780295:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:30:46.021242Z node 2 :TX_PROXY ERROR: Actor# [2:7488011969911747644:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:30:46.978335Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:30:47.332116Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488011952731876107:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:47.332183Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 9549, MsgBus: 8645 2025-03-31T16:30:50.948540Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488011985206174931:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:50.948597Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ddc/r3tmp/tmpxQPiZT/pdisk_1.dat 2025-03-31T16:30:51.089327Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:51.117626Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:51.117734Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:51.121358Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9549, node 3 2025-03-31T16:30:51.207514Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:51.207533Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:51.207540Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:51.207648Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8645 TClient is connected to server localhost:8645 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:51.756607Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:51.774645Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:51.840788Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:52.072188Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:52.159068Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:54.867135Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012002386045868:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:54.867225Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:54.945353Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:30:54.981273Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:30:55.022645Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:30:55.106538Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:30:55.178172Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:30:55.220846Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:30:55.278215Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012006681013685:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:55.278306Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012006681013690:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:55.278333Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:55.282138Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:30:55.294668Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488012006681013692:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:30:55.369373Z node 3 :TX_PROXY ERROR: Actor# [3:7488012006681013746:3454] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:30:55.949054Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488011985206174931:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:55.949132Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:30:56.363295Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:30:57.288442Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:30:58.008757Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-9 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-44 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-45 >> KqpUniqueIndex::InsertFkDuplicate [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-39 >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-32 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-33 >> KqpIndexes::UniqAndNoUniqSecondaryIndex >> KqpIndexes::UpdateDeletePlan-UseSink [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-32 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-33 >> KqpUniqueIndex::UpsertExplicitNullInComplexFk [GOOD] >> KqpUniqueIndex::UpsertImplicitNullInComplexFk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS34+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 22207, MsgBus: 15211 2025-03-31T16:29:18.937734Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011590676211892:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:29:18.956501Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018a2/r3tmp/tmpQmP7sH/pdisk_1.dat 2025-03-31T16:29:19.717736Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:29:19.771893Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:29:19.771998Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:29:19.780236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22207, node 1 2025-03-31T16:29:20.035631Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:29:20.035651Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:29:20.035659Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:29:20.035773Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15211 TClient is connected to server localhost:15211 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:29:21.232829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:29:21.260153Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:29:23.470915Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011612151048898:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:23.471039Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:23.471547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011612151048910:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:29:23.484368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:29:23.509738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011612151048912:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:29:23.576551Z node 1 :TX_PROXY ERROR: Actor# [1:7488011612151048963:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:29:23.903295Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011590676211892:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:29:23.903356Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:29:24.081165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:29:24.401210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011616446016500:2351];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:29:24.401399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011616446016500:2351];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:29:24.401650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011616446016500:2351];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:29:24.401763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011616446016500:2351];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:29:24.401877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011616446016500:2351];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:29:24.401984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011616446016500:2351];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:29:24.402109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011616446016500:2351];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:29:24.402211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011616446016500:2351];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:29:24.402328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011616446016500:2351];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:29:24.402436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011616446016500:2351];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:29:24.402561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011616446016500:2351];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:29:24.403285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7488011616446016500:2351];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:29:24.405636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011616446016498:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:29:24.405680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011616446016498:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:29:24.405880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011616446016498:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:29:24.405981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011616446016498:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:29:24.406078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011616446016498:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:29:24.406166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011616446016498:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:29:24.406262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011616446016498:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:29:24.406404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011616446016498:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:29:24.406524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011616446016498:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:29:24.406623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011616446016498:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:29:24.406714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011616446016498:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:29:24.406807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488011616446016498:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:29:24.471462Z node 1 :T ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.135215Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039243;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.135574Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.140672Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.141390Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.145907Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.146770Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.151189Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.152544Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.156188Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.158612Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.160806Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.164251Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.165246Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.170510Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.170578Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.175170Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.175986Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.179100Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039277;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.181238Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.182816Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.186372Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.186522Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.190348Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.191707Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.195209Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.197327Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.199237Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.203396Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.204391Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039245;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.208311Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.210857Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.213803Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.217655Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.218687Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.223519Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.223839Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.228472Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.229793Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.235866Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.236598Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.241786Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.242551Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.248267Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.248267Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.253913Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-31T16:30:35.394278Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqpgkx1m51nmfzr48v2hnea2", SessionId: ydb://session/3?node_id=1&id=NmU4YzdkMTQtZWM2MTk4MjgtZjY4Y2ZlM2QtZTNjNjNkYQ==, Slow query, duration: 27.981702s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-31T16:30:35.932175Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:30:35.932228Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-31T16:30:35.932610Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7488011809719581547:7852];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224039392; 2025-03-31T16:30:35.933014Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-50 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-51 >> KqpIndexes::UpsertNoIndexColumns [GOOD] >> KqpIndexMetadata::HandleWriteOnlyIndex [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 [GOOD] >> KqpMultishardIndex::SecondaryIndexSelect [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::InsertFkDuplicate [GOOD] Test command err: Trying to start YDB, gRPC: 5773, MsgBus: 10450 2025-03-31T16:30:46.831296Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011967606753147:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:46.833560Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dc9/r3tmp/tmpx4eZwu/pdisk_1.dat 2025-03-31T16:30:47.259516Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:47.297021Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:47.297133Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:47.298154Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5773, node 1 2025-03-31T16:30:47.359581Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:47.359618Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:47.359625Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:47.359762Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10450 TClient is connected to server localhost:10450 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:47.904689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:47.926745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:48.051411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:48.207314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:48.284159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:50.072929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011984786624098:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:50.073067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:50.414774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:30:50.445027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:30:50.477675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:30:50.507831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:30:50.539500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:30:50.576022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:30:50.658018Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011984786624612:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:50.658121Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:50.658509Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011984786624617:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:50.662467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:30:50.673639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011984786624619:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:30:50.745342Z node 1 :TX_PROXY ERROR: Actor# [1:7488011984786624674:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:30:51.642432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:51.831786Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011967606753147:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:51.831952Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:30:53.080224Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488011997671528025:2599], TxId: 281474976710676, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZTE2YjNhZjItOTkwOGM0MDEtOWUzYjUwM2EtYTRlMDAxMzM=. TraceId : 01jqpgn98n4895bp70ahdgamxv. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-03-31T16:30:53.080625Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488011997671528026:2600], TxId: 281474976710676, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZTE2YjNhZjItOTkwOGM0MDEtOWUzYjUwM2EtYTRlMDAxMzM=. CustomerSuppliedId : . TraceId : 01jqpgn98n4895bp70ahdgamxv. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7488011997671528022:2555], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-31T16:30:53.080959Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTE2YjNhZjItOTkwOGM0MDEtOWUzYjUwM2EtYTRlMDAxMzM=, ActorId: [1:7488011989081593039:2555], ActorState: ExecuteState, TraceId: 01jqpgn98n4895bp70ahdgamxv, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 23006, MsgBus: 11114 2025-03-31T16:30:53.969823Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488011999402828021:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:53.970211Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dc9/r3tmp/tmp6xiq9G/pdisk_1.dat 2025-03-31T16:30:54.095806Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:54.112252Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:54.112316Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 23006, node 2 2025-03-31T16:30:54.113716Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:30:54.155639Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:54.155666Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:54.155674Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:54.155804Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11114 TClient is connected to server localhost:11114 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:30:54.703658Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:30:54.718218Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:54.780004Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:54.955031Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:55.072481Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:57.364137Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012016582698982:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:57.364219Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:57.415233Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:30:57.449868Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:30:57.500103Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:30:57.531714Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:30:57.563381Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:30:57.606118Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:30:57.693190Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012016582699499:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:57.693347Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:57.693580Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012016582699505:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:57.696911Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:30:57.708195Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012016582699507:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:30:57.805557Z node 2 :TX_PROXY ERROR: Actor# [2:7488012016582699562:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:30:58.655349Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:58.968936Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488011999402828021:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:58.969011Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:00.281817Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488012029467602937:2598], TxId: 281474976715676, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jqpgng4426h0ns8n9tzq0g75. SessionId : ydb://session/3?node_id=2&id=Mjc2YzkwMTYtZjI0Y2YyMTgtOTk3YTVkNDktODAyZmNkYTE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-03-31T16:31:00.282521Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488012029467602938:2599], TxId: 281474976715676, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=Mjc2YzkwMTYtZjI0Y2YyMTgtOTk3YTVkNDktODAyZmNkYTE=. TraceId : 01jqpgng4426h0ns8n9tzq0g75. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7488012029467602934:2554], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-31T16:31:00.282868Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Mjc2YzkwMTYtZjI0Y2YyMTgtOTk3YTVkNDktODAyZmNkYTE=, ActorId: [2:7488012020877667931:2554], ActorState: ExecuteState, TraceId: 01jqpgng4426h0ns8n9tzq0g75, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UpdateDeletePlan-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 29359, MsgBus: 23224 2025-03-31T16:30:48.920917Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011978298669788:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:48.921220Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dbe/r3tmp/tmpgqtCqS/pdisk_1.dat 2025-03-31T16:30:49.419795Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:49.423985Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:49.424089Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:49.427691Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29359, node 1 2025-03-31T16:30:49.523551Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:49.523580Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:49.523595Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:49.523777Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23224 TClient is connected to server localhost:23224 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:50.050142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:50.080944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:50.272251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:50.448513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:50.521663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:52.402027Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011995478540618:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:52.402153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:52.751749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:30:52.786063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:30:52.858229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:30:52.893224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:30:52.939731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:30:53.011642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:30:53.060258Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011999773508434:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:53.060337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:53.060762Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011999773508439:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:53.065007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:30:53.074082Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011999773508441:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:30:53.137602Z node 1 :TX_PROXY ERROR: Actor# [1:7488011999773508494:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:30:53.917528Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011978298669788:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:53.918586Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:30:54.028811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22384, MsgBus: 64892 2025-03-31T16:30:55.569745Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012008398196086:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:55.569811Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dbe/r3tmp/tmpvvGuox/pdisk_1.dat 2025-03-31T16:30:55.664073Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22384, node 2 2025-03-31T16:30:55.695666Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:55.695744Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:55.697142Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:30:55.728405Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:55.728429Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:55.728436Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:55.728560Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64892 TClient is connected to server localhost:64892 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:56.144095Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:56.158953Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:56.214907Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:56.398221Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:56.482258Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:58.644319Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012021283099747:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:58.644421Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:58.700864Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:30:58.735301Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:30:58.783050Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:30:58.821541Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:30:58.857861Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:30:58.899554Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:30:58.981130Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012021283100266:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:58.981188Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:58.981331Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012021283100271:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:58.985105Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:30:58.995534Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012021283100273:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:30:59.065592Z node 2 :TX_PROXY ERROR: Actor# [2:7488012025578067624:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:00.093079Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:31:00.574849Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488012008398196086:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:00.574922Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UpsertNoIndexColumns [GOOD] Test command err: Trying to start YDB, gRPC: 1555, MsgBus: 17841 2025-03-31T16:30:44.135198Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011961737921408:2134];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:44.135340Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dcf/r3tmp/tmpkPVvDs/pdisk_1.dat 2025-03-31T16:30:44.512561Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1555, node 1 2025-03-31T16:30:44.543957Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:44.544100Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:44.547612Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:30:44.621420Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:44.621448Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:44.621463Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:44.621614Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17841 TClient is connected to server localhost:17841 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:45.197467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:45.223971Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:30:45.236538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:45.389163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:45.542202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:45.604305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:47.374843Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011974622824986:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:47.375013Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:47.703822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:30:47.731707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:30:47.760071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:30:47.787527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:30:47.817285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:30:47.853801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:30:47.895922Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011974622825494:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:47.896013Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:47.896052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011974622825499:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:47.899040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:30:47.907728Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011974622825501:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:30:47.987407Z node 1 :TX_PROXY ERROR: Actor# [1:7488011974622825555:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:30:48.835506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:30:49.131758Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011961737921408:2134];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:49.131835Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:30:50.132742Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:30:50.177688Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:30:50.218413Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:30:51.350848Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqpgn6vybbf258egtgnphand, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGQ2ZmIwNDYtYzYwNjMwZmEtZjJiNWI4OGQtMTk3YjNlZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-31T16:30:51.365088Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGQ2ZmIwNDYtYzYwNjMwZmEtZjJiNWI4OGQtMTk3YjNlZmY=, ActorId: [1:7488011978917793140:2496], ActorState: ExecuteState, TraceId: 01jqpgn6vybbf258egtgnphand, Create QueryResponse for error on request, msg: 2025-03-31T16:30:51.395212Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:30:51.428502Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:30:51.451952Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:30:52.559257Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:30:52.602994Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:30:52.614454Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 5638, MsgBus: 18333 2025-03-31T16:30:53.389578Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012001266581941:2124];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:53.389628Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dcf/r3tmp/tmppNNw9a/pdisk_1.dat 2025-03-31T16:30:53.569572Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:53.587551Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:53.587639Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:53.589344Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5638, node 2 2025-03-31T16:30:53.644149Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:53.644174Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:53.644180Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:53.644305Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18333 TClient is connected to server localhost:18333 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:54.117551Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:54.123369Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:54.135640Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:54.219163Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:54.427800Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:30:54.507444Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:56.782339Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012014151485554:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:56.782463Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:56.824707Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:30:56.901916Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:30:56.935373Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:30:56.990410Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:30:57.064080Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:30:57.112838Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:30:57.166353Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012018446453365:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:57.166476Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:57.166669Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012018446453370:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:57.171424Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:30:57.187044Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012018446453372:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:30:57.269633Z node 2 :TX_PROXY ERROR: Actor# [2:7488012018446453425:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:30:58.260293Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:30:58.338329Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:30:58.388925Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:30:58.390974Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488012001266581941:2124];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:58.391034Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpIndexes::CreateTableWithExplicitAsyncIndexSQL [GOOD] >> KqpIndexes::SecondaryIndexUpdateOnUsingIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexMetadata::HandleWriteOnlyIndex [GOOD] Test command err: Trying to start YDB, gRPC: 25728, MsgBus: 24047 2025-03-31T16:30:36.385065Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011927142729417:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:36.385106Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dd7/r3tmp/tmpY494ej/pdisk_1.dat 2025-03-31T16:30:36.769838Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25728, node 1 2025-03-31T16:30:36.785922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:36.786034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:36.799313Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:30:36.877859Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:36.877879Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:36.877898Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:36.878014Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24047 TClient is connected to server localhost:24047 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:37.477414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:37.515524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:37.667557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:37.819361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:37.886117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:39.380638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011940027633079:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:39.380793Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:39.722482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:30:39.758240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:30:39.792390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:30:39.832414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:30:39.868630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:30:39.940274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:30:39.986009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011940027633595:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:39.986100Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:39.986274Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011940027633600:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:39.990469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:30:40.003640Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011940027633602:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:30:40.097335Z node 1 :TX_PROXY ERROR: Actor# [1:7488011944322600952:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:30:40.891052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:30:41.385496Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011927142729417:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:41.385599Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 1028, MsgBus: 63879 2025-03-31T16:30:45.090731Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488011965430300439:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:45.090782Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dd7/r3tmp/tmpWaMZ2w/pdisk_1.dat 2025-03-31T16:30:45.213509Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1028, node 2 2025-03-31T16:30:45.248504Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:45.248619Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:45.251571Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:30:45.291177Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:45.291209Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:45.291216Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:45.291368Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63879 TClient is connected to server localhost:63879 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:45.761633Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:45.777660Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:45.851663Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:46.027369Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:46.109664Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, bu ... xternalPlanNodeId":11}],"Predicate":"Exist(item.id)","E-Cost":"No estimate","E-Size":"No estimate","Name":"Filter"}],"Plans":[{"E-Size":"No estimate","LookupKeyColumns":["b","id"],"Node Type":"TableLookup","PlanNodeId":11,"Path":"\/Root\/tg","Columns":["am","b","cur","id","pa_id","product","status","system_date","type"],"E-Rows":"No estimate","Plans":[{"PlanNodeId":10,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Iterator":"PartitionByKey","Name":"Iterator"},{"Inputs":[],"Input":"precompute_1_0","Name":"PartitionByKey"}],"Node Type":"ConstantExpr-Aggregate","CTE Name":"precompute_1_0"}],"Table":"tg","PlanNodeType":"Connection","E-Cost":"No estimate"}],"Node Type":"TopSort-Filter"}],"Node Type":"Merge","SortColumns":["system_date (Desc)","id (Desc)"],"PlanNodeType":"Connection"}],"Node Type":"Limit"}],"Node Type":"ResultSet_2","PlanNodeType":"ResultSet"},{"PlanNodeId":8,"Plans":[{"PlanNodeId":7,"Operators":[{"Inputs":[{"ExternalPlanNodeId":6}],"Limit":"11","Name":"Top","TopBy":"[row.b,row.pa_id,row.system_date,row.id]"}],"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Operators":[{"Inputs":[{"InternalOperatorId":1},{"InternalOperatorId":1},{"InternalOperatorId":1},{"InternalOperatorId":1}],"Limit":"11","Name":"Limit"},{"E-Rows":"No estimate","Inputs":[{"ExternalPlanNodeId":4}],"Predicate":"NOT If AND item.status != $status_1 AND item.am != $am_1","E-Cost":"No estimate","E-Size":"No estimate","Name":"Filter"}],"Plans":[{"Tables":["tg\/tg_index\/indexImplTable"],"PlanNodeId":4,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":"%kqp%tx_result_binding_0_0","Reverse":true,"Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/tg\/tg_index\/indexImplTable","E-Rows":"No estimate","ReadRangesPointPrefixLen":"2","ReadRangesKeys":["b","pa_id","system_date","id"],"Table":"tg\/tg_index\/indexImplTable","ReadColumns":["am","b","id","pa_id","status","system_date","type"],"E-Cost":"No estimate","ReadRangesExpectedSize":"3"}],"Node Type":"TableRangeScan"}],"Node Type":"Limit-Filter"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Top"}],"Subplan Name":"CTE precompute_1_0","Node Type":"Precompute_1","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"},{"PlanNodeId":2,"Plans":[{"PlanNodeId":1,"Node Type":"Stage"}],"Subplan Name":"CTE precompute_0_0","Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"ResourcePoolId":"default"}},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tg","reads":[{"lookup_by":["b","id"],"columns":["am","b","cur","id","pa_id","product","status","system_date","type"],"type":"Lookup"}]},{"name":"\/Root\/tg\/tg_index\/indexImplTable","reads":[{"columns":["am","b","id","pa_id","status","system_date","type"],"reverse":true,"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"Limit":"1001","Name":"Limit"}],"Plans":[{"PlanNodeId":4,"Operators":[{"Limit":"1001","Name":"TopSort","TopSortBy":"[row.system_date,row.id]"}],"Plans":[{"PlanNodeId":5,"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.id)","E-Cost":"No estimate","E-Size":"No estimate","Name":"Filter"}],"Plans":[{"PlanNodeId":6,"Operators":[{"E-Rows":"No estimate","Columns":["am","b","cur","id","pa_id","product","status","system_date","type"],"Name":"TableLookup","E-Cost":"No estimate","E-Size":"No estimate","LookupKeyColumns":["b","id"],"Table":"tg"}],"Node Type":"TableLookup","PlanNodeType":"Connection"}],"Node Type":"Filter"}],"Node Type":"TopSort"}],"Node Type":"Limit"}],"Node Type":"ResultSet_2","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":2}}} Trying to start YDB, gRPC: 6472, MsgBus: 14368 2025-03-31T16:30:52.557150Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488011994888889895:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:52.557203Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dd7/r3tmp/tmphNOEAC/pdisk_1.dat 2025-03-31T16:30:52.694905Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:52.722778Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:52.722871Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:52.724523Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6472, node 3 2025-03-31T16:30:52.779288Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:52.779322Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:52.779331Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:52.779471Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14368 TClient is connected to server localhost:14368 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:53.264895Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:53.284981Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:30:53.366449Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:30:53.576692Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:53.656242Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:56.132574Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012012068760861:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:56.132702Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:56.181274Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:30:56.214236Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:30:56.246801Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:30:56.280897Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:30:56.313963Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:30:56.365861Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:30:56.437870Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012012068761373:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:56.438000Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:56.438230Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012012068761378:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:56.442175Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:30:56.456368Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488012012068761380:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:30:56.537925Z node 3 :TX_PROXY ERROR: Actor# [3:7488012012068761435:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:30:57.557494Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488011994888889895:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:57.557675Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:30:57.563444Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::SecondaryIndexSelect [GOOD] Test command err: Trying to start YDB, gRPC: 11444, MsgBus: 4447 2025-03-31T16:30:45.049579Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011964672210086:2172];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:45.055361Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dce/r3tmp/tmplNybJL/pdisk_1.dat 2025-03-31T16:30:45.428842Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:45.433368Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:45.433447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:45.435200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11444, node 1 2025-03-31T16:30:45.498590Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:45.498629Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:45.498637Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:45.498758Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4447 TClient is connected to server localhost:4447 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:45.970449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:45.985601Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:30:46.002337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:46.173822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:46.371707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:46.462153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:48.201960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011977557113649:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:48.202093Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:48.507788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:30:48.545197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:30:48.577477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:30:48.612007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:30:48.646885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:30:48.683278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:30:48.728745Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011977557114157:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:48.728850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:48.729051Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011977557114162:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:48.734238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:30:48.750236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011977557114164:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:30:48.848487Z node 1 :TX_PROXY ERROR: Actor# [1:7488011977557114218:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:30:49.815459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:50.046705Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011964672210086:2172];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:50.046799Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 16624, MsgBus: 27796 2025-03-31T16:30:53.373728Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012001029673350:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:53.373791Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dce/r3tmp/tmpb4y6DU/pdisk_1.dat 2025-03-31T16:30:53.483535Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:53.518953Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:53.519038Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 16624, node 2 2025-03-31T16:30:53.521186Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:30:53.569650Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:53.569676Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:53.569704Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:53.569832Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27796 TClient is connected to server localhost:27796 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:54.037403Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:54.049496Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:30:54.065113Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:54.134964Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:54.299130Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:54.371665Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:56.662135Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012013914577009:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:56.662253Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:56.713746Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:30:56.746132Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:30:56.776211Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:30:56.809101Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:30:56.847745Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:30:56.923436Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:30:56.973996Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012013914577526:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:56.974106Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:56.974301Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012013914577531:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:56.978133Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:30:56.989553Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012013914577533:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:30:57.085914Z node 2 :TX_PROXY ERROR: Actor# [2:7488012018209544883:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:30:57.992552Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:58.375721Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488012001029673350:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:58.375819Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpMultishardIndex::SortedRangeReadDesc [GOOD] >> KqpMultishardIndex::SortByPk >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-15 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-16 >> KqpIndexes::SimpleVectorIndexOrderByCosineDistanceNotNullableLevel1 >> KqpMultishardIndex::DataColumnUpsertMixedSemantic [GOOD] >> KqpMultishardIndex::DataColumnSelect ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::CreateTableWithExplicitAsyncIndexSQL [GOOD] Test command err: Trying to start YDB, gRPC: 61283, MsgBus: 62632 2025-03-31T16:30:45.938888Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011964251807282:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:45.938965Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dca/r3tmp/tmpU6Y3Xk/pdisk_1.dat 2025-03-31T16:30:46.331557Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:46.362696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:46.362804Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 61283, node 1 2025-03-31T16:30:46.365143Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:30:46.419633Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:46.419653Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:46.419660Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:46.419765Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62632 TClient is connected to server localhost:62632 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:47.007467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:47.034621Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:30:47.050137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:47.223665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:47.398186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:47.468917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:49.208271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011981431678252:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:49.208403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:49.560170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:30:49.596435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:30:49.632318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:30:49.667160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:30:49.705839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:30:49.775354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:30:49.842378Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011981431678773:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:49.842444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:49.842587Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011981431678778:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:49.846375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:30:49.856858Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011981431678780:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:30:49.917793Z node 1 :TX_PROXY ERROR: Actor# [1:7488011981431678832:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:30:50.779200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:30:50.936243Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011964251807282:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:50.973999Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:30:51.278974Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 31993, MsgBus: 13988 2025-03-31T16:30:52.010869Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488011995432932296:2188];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:52.089687Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dca/r3tmp/tmpStoS08/pdisk_1.dat 2025-03-31T16:30:52.175155Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31993, node 2 2025-03-31T16:30:52.203666Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:52.203755Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:52.205537Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:30:52.251648Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:52.251670Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:52.251678Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:52.251806Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13988 TClient is connected to server localhost:13988 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:52.656483Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:52.664003Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:30:52.682446Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:52.752557Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: E ... 664:0, at schemeshard: 72057594046644480 2025-03-31T16:30:55.381906Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:30:55.418585Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:30:55.501322Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:30:55.558113Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012008317836323:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:55.558194Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:55.558421Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012008317836328:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:55.561706Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:30:55.573446Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012008317836330:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:30:55.629312Z node 2 :TX_PROXY ERROR: Actor# [2:7488012008317836382:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:30:56.462350Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:30:57.006370Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488011995432932296:2188];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:57.006433Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:30:57.025595Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 21621, MsgBus: 20242 2025-03-31T16:30:57.887515Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488012015315427096:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:57.887589Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dca/r3tmp/tmpgY0otl/pdisk_1.dat 2025-03-31T16:30:57.988373Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:58.002415Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:58.002504Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:58.004017Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21621, node 3 2025-03-31T16:30:58.063354Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:58.063379Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:58.063387Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:58.063513Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20242 TClient is connected to server localhost:20242 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:58.504589Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:58.509603Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:30:58.517462Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:58.575989Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:58.721824Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:58.794850Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:01.125489Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012032495298017:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:01.125581Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:01.168862Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:01.193578Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:01.221159Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:01.245969Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:01.272621Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:01.300109Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:01.380385Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012032495298535:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:01.380464Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:01.380500Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012032495298540:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:01.383200Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:01.392384Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488012032495298542:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:31:01.468064Z node 3 :TX_PROXY ERROR: Actor# [3:7488012032495298598:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:02.448379Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:31:02.549580Z node 3 :TX_PROXY ERROR: Actor# [3:7488012036790266470:3809] txid# 281474976715672, issues: { message: "Check failed: path: \'/Root/TestTable\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:02.780209Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:02.887752Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488012015315427096:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:02.887883Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 [GOOD] Test command err: 2025-03-31T16:21:27.784652Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:21:27.869767Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:21:27.889101Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:21:27.889372Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:21:27.896167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:21:27.896368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:21:27.896565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:21:27.896682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:21:27.896774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:21:27.896860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:21:27.896947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:21:27.897054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:21:27.897135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:21:27.897201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:21:27.897317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:21:27.897394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:21:27.917001Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:21:27.917133Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:21:27.917175Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:21:27.917572Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:27.917710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:21:27.917769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:21:27.917804Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:21:27.917876Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:21:27.917940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:21:27.917970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:21:27.917989Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:21:27.918163Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:27.918228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:21:27.918259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:21:27.918278Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:21:27.918360Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:21:27.918413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:21:27.918442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:21:27.918461Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:21:27.918508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:21:27.918534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:21:27.918556Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:21:27.918596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:21:27.918620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:21:27.918656Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:21:27.919011Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=36; 2025-03-31T16:21:27.919101Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=34; 2025-03-31T16:21:27.919186Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=37; 2025-03-31T16:21:27.919246Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=29; 2025-03-31T16:21:27.919386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:21:27.919427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:21:27.919458Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:21:27.919610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:21:27.919645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:21:27.919675Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:21:27.919799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:21:27.919832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:21:27.919855Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:21:27.920016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:21:27.920050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:21:27.920069Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:21:27.920182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:21:27.920213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:21:27.920250Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15898:17856];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; 2025-03-31T16:31:00.771663Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-03-31T16:31:00.771785Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=16; 2025-03-31T16:31:00.772229Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=354; 2025-03-31T16:31:00.772282Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=435; 2025-03-31T16:31:00.780269Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-03-31T16:31:00.780384Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=16; 2025-03-31T16:31:00.793708Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=13185; 2025-03-31T16:31:00.812344Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=16970; 2025-03-31T16:31:00.812485Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=18658; 2025-03-31T16:31:00.812715Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=145; 2025-03-31T16:31:00.812884Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=110; 2025-03-31T16:31:00.813126Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=186; 2025-03-31T16:31:00.813337Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=158; 2025-03-31T16:31:00.813626Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=231; 2025-03-31T16:31:00.813696Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=33237; 2025-03-31T16:31:00.823594Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-03-31T16:31:00.823709Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=15; 2025-03-31T16:31:00.839063Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=15232; 2025-03-31T16:31:00.892473Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=53237; 2025-03-31T16:31:00.892637Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=49; 2025-03-31T16:31:00.892725Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=33; 2025-03-31T16:31:00.892781Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=10; 2025-03-31T16:31:00.892839Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2025-03-31T16:31:00.892895Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=9; 2025-03-31T16:31:00.892994Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=56; 2025-03-31T16:31:00.893052Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=10; 2025-03-31T16:31:00.893175Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=80; 2025-03-31T16:31:00.893231Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=8; 2025-03-31T16:31:00.893315Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=41; 2025-03-31T16:31:00.893434Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=72; 2025-03-31T16:31:00.893545Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=64; 2025-03-31T16:31:00.893597Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=69818; 2025-03-31T16:31:00.893835Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=2425692;raw_bytes=4011492;count=1;records=39328} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=111582272;raw_bytes=187115688;count=44;records=1805672} inactive {blob_bytes=184231896;raw_bytes=309107354;count=94;records=2982033} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-31T16:31:00.894690Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15898:17856];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-31T16:31:00.894790Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15898:17856];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-31T16:31:00.894896Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15898:17856];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-31T16:31:00.894966Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15898:17856];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=0; 2025-03-31T16:31:00.895246Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:31:00.895346Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:31:00.895613Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=22; 2025-03-31T16:31:00.895708Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-03-31T16:31:00.895772Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=22;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:31:00.895837Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:31:00.895895Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:31:00.896021Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:31:00.900120Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:31:00.905740Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15898:17856];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-31T16:31:00.908287Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15898:17856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-31T16:31:00.908383Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-31T16:31:00.908427Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-31T16:31:00.908495Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15898:17856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:31:00.908603Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15898:17856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:31:00.908932Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15898:17856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=22; 2025-03-31T16:31:00.909080Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15898:17856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-03-31T16:31:00.909145Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15898:17856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=22;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:31:00.909225Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15898:17856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:31:00.909286Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15898:17856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:31:00.909406Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15898:17856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.999000s; 2025-03-31T16:31:00.909484Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15898:17856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-28 >> KqpIndexes::CreateTableWithImplicitSyncIndexSQL >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-10 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-4 >> KqpUniqueIndex::InsertFkPartialColumnSet >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-45 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-46 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 [GOOD] Test command err: 2025-03-31T16:21:28.670388Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:21:28.769773Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:21:28.792571Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:21:28.792864Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:21:28.800922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:21:28.801148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:21:28.801374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:21:28.801488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:21:28.801574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:21:28.801713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:21:28.801834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:21:28.801963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:21:28.802077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:21:28.802184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:21:28.802300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:21:28.802409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:21:28.826954Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:21:28.827130Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:21:28.827187Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:21:28.827394Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:28.827560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:21:28.827618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:21:28.827650Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:21:28.827734Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:21:28.827790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:21:28.827846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:21:28.827871Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:21:28.828055Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:28.828152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:21:28.828186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:21:28.828207Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:21:28.828315Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:21:28.828378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:21:28.828430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:21:28.828459Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:21:28.828532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:21:28.828567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:21:28.828596Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:21:28.828640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:21:28.828667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:21:28.828686Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:21:28.829062Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=50; 2025-03-31T16:21:28.829154Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=45; 2025-03-31T16:21:28.829232Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=29; 2025-03-31T16:21:28.829319Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=49; 2025-03-31T16:21:28.829499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:21:28.829555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:21:28.829592Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:21:28.829824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:21:28.829878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:21:28.829910Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:21:28.830081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:21:28.830127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:21:28.830155Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:21:28.830362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:21:28.830419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:21:28.830449Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:21:28.830579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:21:28.830643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:21:28.830697Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... 5721Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15719:17677];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; 2025-03-31T16:31:01.562464Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-03-31T16:31:01.562582Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=15; 2025-03-31T16:31:01.563036Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=373; 2025-03-31T16:31:01.563109Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=455; 2025-03-31T16:31:01.570450Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-03-31T16:31:01.570561Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=14; 2025-03-31T16:31:01.582933Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=12249; 2025-03-31T16:31:01.596579Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=11983; 2025-03-31T16:31:01.596711Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=13655; 2025-03-31T16:31:01.596911Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=128; 2025-03-31T16:31:01.597053Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=91; 2025-03-31T16:31:01.597264Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=164; 2025-03-31T16:31:01.597462Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=146; 2025-03-31T16:31:01.597740Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=223; 2025-03-31T16:31:01.597793Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=27171; 2025-03-31T16:31:01.603112Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-03-31T16:31:01.603212Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=16; 2025-03-31T16:31:01.616932Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=13607; 2025-03-31T16:31:01.662339Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=45272; 2025-03-31T16:31:01.662472Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=36; 2025-03-31T16:31:01.662535Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=24; 2025-03-31T16:31:01.662576Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=7; 2025-03-31T16:31:01.662617Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=8; 2025-03-31T16:31:01.662656Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=8; 2025-03-31T16:31:01.662743Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=41; 2025-03-31T16:31:01.662782Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2025-03-31T16:31:01.662868Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=57; 2025-03-31T16:31:01.662909Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2025-03-31T16:31:01.662967Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=26; 2025-03-31T16:31:01.663077Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=74; 2025-03-31T16:31:01.663207Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=60; 2025-03-31T16:31:01.663256Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=59987; 2025-03-31T16:31:01.663429Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=2425692;raw_bytes=4011492;count=1;records=39328} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=111593408;raw_bytes=187115688;count=44;records=1805672} inactive {blob_bytes=178990352;raw_bytes=300417312;count=90;records=2897034} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-31T16:31:01.664077Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15719:17677];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-31T16:31:01.664134Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15719:17677];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-31T16:31:01.664226Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15719:17677];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-31T16:31:01.664288Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15719:17677];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=0; 2025-03-31T16:31:01.664460Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:31:01.664524Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:31:01.664707Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=22; 2025-03-31T16:31:01.664771Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-03-31T16:31:01.664908Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=22;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:31:01.664957Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:31:01.664998Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:31:01.665089Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:31:01.670221Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:31:01.676709Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15719:17677];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-31T16:31:01.678224Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15719:17677];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-31T16:31:01.678273Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-31T16:31:01.678301Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-31T16:31:01.678351Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15719:17677];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:31:01.678428Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15719:17677];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:31:01.678651Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15719:17677];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=22; 2025-03-31T16:31:01.678718Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15719:17677];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-03-31T16:31:01.678769Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15719:17677];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=22;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:31:01.678835Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15719:17677];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:31:01.678892Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15719:17677];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:31:01.678990Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15719:17677];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.999000s; 2025-03-31T16:31:01.679042Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15719:17677];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; >> KqpIndexes::SelectFromAsyncIndexedTable [GOOD] >> KqpIndexes::SelectFromIndexesAndFreeSpaceLogicDoesntTimeout >> KqpIndexes::MultipleSecondaryIndexWithSameComulns+UseSink [GOOD] >> KqpIndexes::DoUpsertWithoutIndexUpdate+UniqIndex-UseSink >> KqpIndexes::UpsertWithNullKeysComplex [GOOD] >> KqpIndexes::SecondaryIndexUpsert1DeleteUpdate >> KqpIndexes::UpsertWithoutExtraNullDelete+UseSink >> KqpIndexes::SelectConcurentTX >> TPersQueueTest::SetMeteringMode [GOOD] >> TPersQueueTest::TClusterTrackerTest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-34 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::MultipleSecondaryIndexWithSameComulns+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 23876, MsgBus: 18274 2025-03-31T16:30:36.840694Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011928479277150:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:36.840802Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dd6/r3tmp/tmpmCsZ4f/pdisk_1.dat 2025-03-31T16:30:37.277040Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:37.289975Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:37.290073Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:37.294852Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23876, node 1 2025-03-31T16:30:37.431719Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:37.431741Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:37.431750Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:37.431877Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18274 TClient is connected to server localhost:18274 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:37.950490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:37.970736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:38.124405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:38.284062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:38.362167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:40.075964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011945659148106:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:40.076120Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:40.373461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:30:40.401175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:30:40.430005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:30:40.458980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:30:40.491200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:30:40.522107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:30:40.570029Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011945659148615:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:40.570111Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:40.570153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011945659148620:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:40.573731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:30:40.583614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011945659148622:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:30:40.648387Z node 1 :TX_PROXY ERROR: Actor# [1:7488011945659148675:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:30:41.565012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:30:41.841228Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011928479277150:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:41.841311Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:30:43.144016Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:30:43.159273Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:30:43.179181Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 10475, MsgBus: 18146 2025-03-31T16:30:44.112318Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488011959218652817:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:44.112472Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dd6/r3tmp/tmpD7vTt8/pdisk_1.dat 2025-03-31T16:30:44.240464Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:44.267162Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:44.267242Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:44.268422Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10475, node 2 2025-03-31T16:30:44.327170Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:44.327194Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:44.327203Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:44.327338Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18146 TClient is connected to server localhost:18146 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:44.756636Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:44.764962Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:30:44.771771Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:30:44.855064Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but pr ... es: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:47.906984Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488011972103556990:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:47.910341Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:30:47.920322Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488011972103556992:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:30:47.980825Z node 2 :TX_PROXY ERROR: Actor# [2:7488011972103557045:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:30:49.057038Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:30:49.120006Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488011959218652817:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:49.120057Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:30:50.872018Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 28128, MsgBus: 3470 2025-03-31T16:30:51.823919Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488011991295047511:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:51.823968Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dd6/r3tmp/tmpt8m1OC/pdisk_1.dat 2025-03-31T16:30:51.954791Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:51.983259Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:51.983341Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:51.984660Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28128, node 3 2025-03-31T16:30:52.102345Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:52.102367Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:52.102373Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:52.102477Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3470 TClient is connected to server localhost:3470 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:52.764094Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:52.783307Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:52.859300Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:53.028614Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:30:53.104039Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:30:55.624397Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012008474918466:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:55.624552Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:55.666315Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:30:55.700319Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:30:55.735029Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:30:55.767894Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:30:55.803226Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:30:55.845431Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:30:55.903185Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012008474918981:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:55.903283Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:55.903300Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012008474918986:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:55.907906Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:30:55.917998Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488012008474918988:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:30:56.001963Z node 3 :TX_PROXY ERROR: Actor# [3:7488012008474919041:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:30:56.824461Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488011991295047511:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:56.824533Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:30:57.021471Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:30:58.080681Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:30:59.340210Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:30:59.357776Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:30:59.382249Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:00.440269Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:00.495308Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:01.095678Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:02.014638Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:02.043803Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:02.821484Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:02.846276Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:03.373932Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:03.393775Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:03.922338Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:03.940026Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:04.201385Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:04.232065Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:04.717325Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-39 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-40 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-34 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UpsertWithNullKeysComplex [GOOD] Test command err: Trying to start YDB, gRPC: 64240, MsgBus: 22704 2025-03-31T16:30:51.626002Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011990373749342:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:51.626073Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000db8/r3tmp/tmp4zZACL/pdisk_1.dat 2025-03-31T16:30:52.100274Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:52.163343Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:52.163476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 64240, node 1 2025-03-31T16:30:52.165521Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:30:52.242479Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:52.242502Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:52.242515Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:52.242668Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22704 TClient is connected to server localhost:22704 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:52.962086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:52.984559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:53.125736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:53.306077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:30:53.379498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:30:55.141776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012007553620311:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:55.141869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:55.433210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:30:55.466544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:30:55.536387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:30:55.576232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:30:55.608363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:30:55.640083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:30:55.705996Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012007553620826:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:55.706098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:55.706347Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012007553620831:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:55.710572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:30:55.722698Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012007553620833:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:30:55.785362Z node 1 :TX_PROXY ERROR: Actor# [1:7488012007553620886:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:30:56.627183Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011990373749342:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:56.627352Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:30:56.687835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:30:56.827809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-03-31T16:30:56.872131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-03-31T16:30:57.235215Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 22962, MsgBus: 30476 2025-03-31T16:30:58.760366Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012021284530524:2065];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000db8/r3tmp/tmpJWHBko/pdisk_1.dat 2025-03-31T16:30:58.819117Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:30:58.920062Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:58.937178Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:58.937265Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:58.939294Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22962, node 2 2025-03-31T16:30:58.976218Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:58.976236Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:58.976242Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:58.976328Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30476 TClient is connected to server localhost:30476 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:59.371650Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:59.386977Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:59.453016Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:30:59.603802Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:30:59.695056Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:01.875746Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012034169434175:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:01.875853Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:01.926763Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:01.958937Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:02.010417Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:02.053017Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:02.110276Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:02.149741Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:02.213090Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012038464401981:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:02.213159Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:02.213286Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012038464401986:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:02.216794Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:02.233545Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012038464401988:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:31:02.321080Z node 2 :TX_PROXY ERROR: Actor# [2:7488012038464402044:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:03.209017Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:31:03.732672Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488012021284530524:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:03.732740Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:03.998434Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:04.016644Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-03-31T16:31:04.058011Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-03-31T16:31:05.215266Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:05.239478Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt32 [GOOD] >> KqpIndexes::MultipleModifications >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-51 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-52 >> TAsyncIndexTests::CdcAndSplitWithReboots[PipeResets] [GOOD] >> KqpIndexes::NullInIndexTable [GOOD] >> KqpIndexes::MultipleSecondaryIndexWithSameComulns-UseSink >> KqpUniqueIndex::UpdateFkPkOverlap [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndSplitWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:135:2157] sender: [1:136:2058] recipient: [1:111:2142] 2025-03-31T16:29:37.014113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:29:37.014200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:37.014240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:29:37.014274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:29:37.014325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:29:37.014354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:29:37.014446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:37.014556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:29:37.014849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:29:37.134483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-31T16:29:37.134564Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:169:2058] recipient: [1:15:2062] 2025-03-31T16:29:37.164860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:29:37.165074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:29:37.165242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:29:37.184297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:29:37.184574Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:29:37.185185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:37.185419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:29:37.190277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:37.191822Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:37.191886Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:37.192001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:29:37.192057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:37.192096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:29:37.192250Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:210:2058] recipient: [1:208:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:210:2058] recipient: [1:208:2209] Leader for TabletID 72057594037968897 is [1:214:2213] sender: [1:215:2058] recipient: [1:208:2209] 2025-03-31T16:29:37.199119Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:235:2058] recipient: [1:15:2062] 2025-03-31T16:29:37.333605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:29:37.333839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:37.334047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:29:37.334250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:29:37.334297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:37.339799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:37.339941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:29:37.340215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:37.340282Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:29:37.340319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:29:37.340358Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:29:37.346388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:37.346454Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:29:37.346488Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:29:37.356793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:37.356861Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:37.356900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:37.356945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:29:37.361280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:29:37.375909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:29:37.376168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:135:2157] sender: [1:250:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:29:37.377156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:37.377297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:29:37.377351Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:37.377641Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:29:37.377699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:37.378094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:29:37.378205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:29:37.381217Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:37.381277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:37.381474Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:37.381514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:202:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:29:37.381852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:37.381891Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:29:37.381978Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:37.382007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:37.382049Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:37.382114Z no ... BrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:31:07.190257Z node 34 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:31:07.190510Z node 34 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 279us result status StatusSuccess 2025-03-31T16:31:07.191655Z node 34 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:31:07.202785Z node 34 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][34:1083:2870] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-03-31T16:31:07.202883Z node 34 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][34:1041:2870] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-03-31T16:31:07.203036Z node 34 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][34:1083:2870] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1743438667161110 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1743438667161110 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 5 Group: 1743438667161110 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 }] } 2025-03-31T16:31:07.206245Z node 34 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][34:1083:2870] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 5 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 5 2025-03-31T16:31:07.206350Z node 34 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][34:1041:2870] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> KqpUniqueIndex::UpdateFkAlreadyExist [GOOD] >> KqpUniqueIndex::UpdateOnHidenChanges-DataColumn >> KqpIndexes::SecondaryIndexUsingInJoin-UseStreamJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpdateFkPkOverlap [GOOD] Test command err: Trying to start YDB, gRPC: 15133, MsgBus: 6025 2025-03-31T16:30:48.247606Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011976329629970:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:48.247661Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dc4/r3tmp/tmpvDVEA5/pdisk_1.dat 2025-03-31T16:30:48.589625Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15133, node 1 2025-03-31T16:30:48.643294Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:48.643405Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:48.645352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:30:48.697191Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:48.697209Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:48.697217Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:48.697327Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6025 TClient is connected to server localhost:6025 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:49.353932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:49.376115Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:30:49.391440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:49.553590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:49.734755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:49.807038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:51.692844Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011989214533634:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:51.692967Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:52.034173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:30:52.069616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:30:52.142448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:30:52.178203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:30:52.212584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:30:52.294222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:30:52.347335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011993509501450:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:52.347435Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:52.347647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011993509501455:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:52.352532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:30:52.365581Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011993509501457:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:30:52.468636Z node 1 :TX_PROXY ERROR: Actor# [1:7488011993509501512:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:30:53.247993Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011976329629970:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:53.248083Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:30:53.325930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 29161, MsgBus: 22445 2025-03-31T16:30:57.473299Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012015049604879:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:57.473336Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dc4/r3tmp/tmpZcaJKl/pdisk_1.dat 2025-03-31T16:30:57.607234Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:57.624477Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:57.624568Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:57.626272Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29161, node 2 2025-03-31T16:30:57.669337Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:57.669367Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:57.669375Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:57.669508Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22445 TClient is connected to server localhost:22445 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:58.208479Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:58.235748Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:58.326549Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:58.511099Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:58.588568Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:00.996246Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012027934508543:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:00.996345Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:01.055421Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:01.084811Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:01.122194Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:01.160539Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:01.193558Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:01.226762Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:01.306773Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012032229476354:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:01.306862Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:01.307102Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012032229476359:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:01.310660Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:01.323294Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012032229476361:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:31:01.406228Z node 2 :TX_PROXY ERROR: Actor# [2:7488012032229476416:3454] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:02.194283Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:02.475442Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488012015049604879:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:02.475538Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt32 [GOOD] Test command err: 2025-03-31T16:22:15.287740Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:22:15.376348Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:22:15.397635Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:22:15.397903Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:22:15.405808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:22:15.406012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:22:15.406241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:22:15.406361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:22:15.406464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:22:15.406632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:22:15.406756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:22:15.406896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:22:15.407007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:22:15.407127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:22:15.407255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:22:15.407382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:22:15.433642Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:22:15.433810Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:22:15.433876Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:22:15.434069Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:22:15.434229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:22:15.434298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:22:15.434339Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:22:15.434434Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:22:15.434491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:22:15.434529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:22:15.434559Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:22:15.434795Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:22:15.434880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:22:15.434926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:22:15.434974Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:22:15.435137Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:22:15.435198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:22:15.435243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:22:15.435275Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:22:15.435341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:22:15.435376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:22:15.435408Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:22:15.435466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:22:15.435505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:22:15.435531Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:22:15.435907Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=46; 2025-03-31T16:22:15.436001Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=44; 2025-03-31T16:22:15.436099Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=43; 2025-03-31T16:22:15.436184Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=35; 2025-03-31T16:22:15.436343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:22:15.436396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:22:15.436430Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:22:15.436655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:22:15.436705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:22:15.436756Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:22:15.436911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:22:15.436956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:22:15.436985Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:22:15.437223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:22:15.437269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:22:15.437302Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:22:15.437437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:22:15.437476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:22:15.437523Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... 1T16:31:05.657390Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:10502:12463];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; 2025-03-31T16:31:06.522689Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-03-31T16:31:06.522822Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=16; 2025-03-31T16:31:06.523523Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=608; 2025-03-31T16:31:06.523572Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=692; 2025-03-31T16:31:06.528853Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-03-31T16:31:06.528966Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=16; 2025-03-31T16:31:06.547298Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=18187; 2025-03-31T16:31:06.571427Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=22585; 2025-03-31T16:31:06.571578Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=24169; 2025-03-31T16:31:06.571797Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=138; 2025-03-31T16:31:06.571951Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=98; 2025-03-31T16:31:06.572179Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=172; 2025-03-31T16:31:06.572374Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=140; 2025-03-31T16:31:06.572689Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=250; 2025-03-31T16:31:06.572744Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=43699; 2025-03-31T16:31:06.577905Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-03-31T16:31:06.578010Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=14; 2025-03-31T16:31:06.582111Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=3989; 2025-03-31T16:31:06.619519Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=37269; 2025-03-31T16:31:06.619715Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=66; 2025-03-31T16:31:06.619818Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=36; 2025-03-31T16:31:06.619883Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=12; 2025-03-31T16:31:06.619943Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2025-03-31T16:31:06.620008Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=11; 2025-03-31T16:31:06.620128Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=67; 2025-03-31T16:31:06.620190Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=9; 2025-03-31T16:31:06.620317Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=74; 2025-03-31T16:31:06.620378Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=10; 2025-03-31T16:31:06.620492Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=62; 2025-03-31T16:31:06.620631Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=78; 2025-03-31T16:31:06.620749Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=69; 2025-03-31T16:31:06.620802Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=42728; 2025-03-31T16:31:06.621050Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=113961708;raw_bytes=176366876;count=47;records=1845000} inactive {blob_bytes=174125508;raw_bytes=270077548;count=81;records=2819164} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-31T16:31:06.622440Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:10502:12463];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-31T16:31:06.622554Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:10502:12463];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-31T16:31:06.622663Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10502:12463];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-31T16:31:06.622738Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10502:12463];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=0; 2025-03-31T16:31:06.623024Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:31:06.623152Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:31:06.623454Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=20; 2025-03-31T16:31:06.623561Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-03-31T16:31:06.623630Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=20;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:31:06.623703Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:31:06.623758Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:31:06.623901Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:31:06.627597Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:31:06.631170Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10502:12463];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-31T16:31:06.633246Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10502:12463];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-31T16:31:06.633322Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-31T16:31:06.633365Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-31T16:31:06.633441Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10502:12463];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:31:06.633552Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10502:12463];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:31:06.633640Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10502:12463];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=20; 2025-03-31T16:31:06.633752Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10502:12463];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-03-31T16:31:06.633830Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10502:12463];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=20;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:31:06.633917Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10502:12463];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:31:06.633979Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10502:12463];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:31:06.634104Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10502:12463];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=1.000000s; 2025-03-31T16:31:06.634182Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10502:12463];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpdateFkAlreadyExist [GOOD] Test command err: Trying to start YDB, gRPC: 25352, MsgBus: 16649 2025-03-31T16:30:49.371275Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011982934842138:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:49.375420Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dbd/r3tmp/tmpMGYMPC/pdisk_1.dat 2025-03-31T16:30:49.814829Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:49.829069Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:49.829172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 25352, node 1 2025-03-31T16:30:49.835152Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:30:49.908262Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:49.908299Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:49.908311Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:49.908440Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16649 TClient is connected to server localhost:16649 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:50.530212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:50.542642Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:50.549680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:30:50.694546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:50.840388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:50.926301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:52.696107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011995819745663:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:52.696243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:53.025531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:30:53.059264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:30:53.091338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:30:53.159601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:30:53.194614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:30:53.261531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:30:53.326027Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012000114713479:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:53.326157Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:53.326396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012000114713484:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:53.330377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:30:53.341653Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012000114713486:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:30:53.407405Z node 1 :TX_PROXY ERROR: Actor# [1:7488012000114713541:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:30:54.274202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:54.317607Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011982934842138:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:54.317714Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 22168, MsgBus: 28842 2025-03-31T16:30:57.156534Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012015027197001:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:57.156587Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dbd/r3tmp/tmpX30l4r/pdisk_1.dat 2025-03-31T16:30:57.267510Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22168, node 2 2025-03-31T16:30:57.295563Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:57.295643Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:57.296636Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:30:57.333865Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:57.333893Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:57.333900Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:57.334031Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28842 TClient is connected to server localhost:28842 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:57.775913Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:57.792432Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:57.882253Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:58.042635Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:58.106920Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:00.361975Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012027912100649:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:00.362069Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:00.420372Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:00.491759Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:00.568325Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:00.607558Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:00.652271Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:00.696783Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:00.794179Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012027912101171:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:00.794263Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:00.794547Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012027912101176:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:00.798065Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:00.810926Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-03-31T16:31:00.811132Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012027912101178:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:31:00.872066Z node 2 :TX_PROXY ERROR: Actor# [2:7488012027912101233:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:01.702246Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:02.156746Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488012015027197001:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:02.156814Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:03.727696Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqpgnk635s8z1r0gstfq09m9, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmM0MDgwMTUtN2NiNDZkMjUtNWZlODBmYjAtNDI0NDJmMzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-31T16:31:03.737904Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmM0MDgwMTUtN2NiNDZkMjUtNWZlODBmYjAtNDI0NDJmMzc=, ActorId: [2:7488012032207069596:2554], ActorState: ExecuteState, TraceId: 01jqpgnk635s8z1r0gstfq09m9, Create QueryResponse for error on request, msg: 2025-03-31T16:31:04.634691Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqpgnm20808m7neeah12rtev, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmM0MDgwMTUtN2NiNDZkMjUtNWZlODBmYjAtNDI0NDJmMzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-31T16:31:04.634953Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmM0MDgwMTUtN2NiNDZkMjUtNWZlODBmYjAtNDI0NDJmMzc=, ActorId: [2:7488012032207069596:2554], ActorState: ExecuteState, TraceId: 01jqpgnm20808m7neeah12rtev, Create QueryResponse for error on request, msg: 2025-03-31T16:31:04.659627Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqpgnmy52cqdqksyd4s100ey, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmM0MDgwMTUtN2NiNDZkMjUtNWZlODBmYjAtNDI0NDJmMzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-31T16:31:04.659895Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmM0MDgwMTUtN2NiNDZkMjUtNWZlODBmYjAtNDI0NDJmMzc=, ActorId: [2:7488012032207069596:2554], ActorState: ExecuteState, TraceId: 01jqpgnmy52cqdqksyd4s100ey, Create QueryResponse for error on request, msg: 2025-03-31T16:31:05.639057Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqpgnmyv67eqv881y83n4hkj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmM0MDgwMTUtN2NiNDZkMjUtNWZlODBmYjAtNDI0NDJmMzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-31T16:31:05.639332Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmM0MDgwMTUtN2NiNDZkMjUtNWZlODBmYjAtNDI0NDJmMzc=, ActorId: [2:7488012032207069596:2554], ActorState: ExecuteState, TraceId: 01jqpgnmyv67eqv881y83n4hkj, Create QueryResponse for error on request, msg: >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-16 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-17 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-10 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-11 >> KqpIndexes::SecondaryIndexUpdateOnUsingIndex [GOOD] >> KqpIndexes::SecondaryIndexSelectUsingScripting >> KqpUniqueIndex::InsertNullInComplexFk >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-29 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-5 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-46 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-47 >> KqpIndexes::UniqAndNoUniqSecondaryIndex [GOOD] >> KqpIndexes::Uint8Index >> KqpIndexes::DoUpsertWithoutIndexUpdate-UniqIndex-UseSink >> KqpIndexes::CreateTableWithImplicitSyncIndexSQL [GOOD] >> KqpIndexes::CreateTableWithExplicitSyncIndexSQL >> KqpMultishardIndex::YqWorksFineAfterAlterIndexTableDirectly >> KqpMultishardIndex::SortByPk [GOOD] >> KqpIndexes::ExplainCollectFullDiagnostics >> KqpUniqueIndex::InsertFkPartialColumnSet [GOOD] >> KqpUniqueIndex::InsertFkPkOverlap >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-35 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-35 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-41 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::SortByPk [GOOD] Test command err: Trying to start YDB, gRPC: 26455, MsgBus: 28401 2025-03-31T16:30:57.579506Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012018558414090:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:57.579782Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dad/r3tmp/tmpZ4mNDn/pdisk_1.dat 2025-03-31T16:30:57.938013Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:57.940472Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:57.940596Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:57.946045Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26455, node 1 2025-03-31T16:30:58.050714Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:58.050731Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:58.050738Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:58.050813Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28401 TClient is connected to server localhost:28401 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:58.594563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:58.609908Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:30:58.618460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:58.789829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:58.952733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:30:59.023319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:31:00.868462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012031443317750:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:00.868608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:01.200919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:01.232336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:01.262507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:01.290534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:01.322642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:01.355858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:01.433406Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012035738285564:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:01.433488Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:01.433500Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012035738285569:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:01.436798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:01.448692Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012035738285571:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:31:01.551663Z node 1 :TX_PROXY ERROR: Actor# [1:7488012035738285626:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:02.446628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:02.635512Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012018558414090:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:02.649787Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 8509, MsgBus: 29621 2025-03-31T16:31:04.382065Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012045178033670:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:04.382229Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dad/r3tmp/tmplgrrOH/pdisk_1.dat 2025-03-31T16:31:04.471436Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8509, node 2 2025-03-31T16:31:04.525506Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:04.525613Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:04.529633Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:04.552552Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:04.552576Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:04.552585Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:04.552694Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29621 TClient is connected to server localhost:29621 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:05.000619Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:05.009234Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:31:05.022048Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:05.099384Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:05.241704Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:05.312736Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:07.707512Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012058062937338:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:07.707632Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:07.735386Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:07.791789Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:07.834718Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:07.872123Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:07.912171Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:07.956137Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:08.042066Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012062357905151:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:08.042160Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:08.042634Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012062357905156:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:08.046657Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:08.061525Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012062357905158:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:31:08.153639Z node 2 :TX_PROXY ERROR: Actor# [2:7488012062357905213:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:09.114132Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:09.383791Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488012045178033670:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:09.383861Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpUniqueIndex::UpsertImplicitNullInComplexFk [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-52 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-53 >> KqpIndexes::DoUpsertWithoutIndexUpdate+UniqIndex-UseSink [GOOD] >> KqpIndexes::DoUpsertWithoutIndexUpdate-UniqIndex+UseSink >> KqpIndexes::SelectConcurentTX [GOOD] >> KqpIndexes::SecondaryIndexWithPrimaryKeySameComulns+UseSink >> KqpMultishardIndex::DataColumnSelect [GOOD] >> KqpIndexes::SecondaryIndexUpsert1DeleteUpdate [GOOD] >> KqpIndexes::SecondaryIndexUpsert2Update >> KqpIndexes::MultipleModifications [GOOD] >> KqpIndexes::JoinWithNonPKColumnsInPredicate+UseStreamJoin >> KqpIndexes::UpsertWithoutExtraNullDelete+UseSink [GOOD] >> KqpIndexes::UpsertWithoutExtraNullDelete-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpsertImplicitNullInComplexFk [GOOD] Test command err: Trying to start YDB, gRPC: 22885, MsgBus: 8592 2025-03-31T16:30:53.818814Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011999717285154:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:53.820099Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000db2/r3tmp/tmpvYLfER/pdisk_1.dat 2025-03-31T16:30:54.189425Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22885, node 1 2025-03-31T16:30:54.221962Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:54.222614Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:54.232988Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:30:54.263762Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:54.263787Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:54.263794Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:54.263943Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8592 TClient is connected to server localhost:8592 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:54.889345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:54.921754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:55.082998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:30:55.238721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:55.313250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:30:56.955804Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012012602188789:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:56.955930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:57.254640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:30:57.323440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:30:57.356804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:30:57.391877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:30:57.430540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:30:57.474477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:30:57.535508Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012016897156598:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:57.535622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:57.535928Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012016897156603:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:57.540560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:30:57.552063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012016897156605:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:30:57.622484Z node 1 :TX_PROXY ERROR: Actor# [1:7488012016897156658:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:30:58.538697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:58.818791Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011999717285154:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:58.818865Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 3910, MsgBus: 29466 2025-03-31T16:31:02.076959Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012038234543051:2268];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:02.077055Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000db2/r3tmp/tmpCztRKK/pdisk_1.dat 2025-03-31T16:31:02.196013Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:02.227710Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:02.227815Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:02.230410Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3910, node 2 2025-03-31T16:31:02.273393Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:02.273421Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:02.273429Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:02.273564Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29466 TClient is connected to server localhost:29466 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:31:02.756726Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:31:02.767443Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:31:02.780919Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:02.856605Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:03.025823Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:03.107531Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:05.368876Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012051119446489:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:05.368966Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:05.422624Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:05.463593Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:05.502736Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:05.546021Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:05.586119Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:05.624353Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:05.681687Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012051119447004:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:05.681780Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:05.681947Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012051119447010:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:05.685711Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:05.696546Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012051119447012:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:31:05.755808Z node 2 :TX_PROXY ERROR: Actor# [2:7488012051119447065:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:06.670307Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:07.076819Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488012038234543051:2268];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:07.076874Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:12.788754Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqpgnvx36b9fq46bz4sb5ssy, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWFlZmNlNjMtNzY0NWZlNzYtZWNlMTZhZjAtNjhhZGExNGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-31T16:31:12.801210Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OWFlZmNlNjMtNzY0NWZlNzYtZWNlMTZhZjAtNjhhZGExNGU=, ActorId: [2:7488012055414415433:2554], ActorState: ExecuteState, TraceId: 01jqpgnvx36b9fq46bz4sb5ssy, Create QueryResponse for error on request, msg: >> KqpIndexes::PrefixedVectorIndexOrderByCosineSimilarityNullableLevel1 >> KqpIndexes::SelectFromIndexesAndFreeSpaceLogicDoesntTimeout [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::DataColumnSelect [GOOD] Test command err: Trying to start YDB, gRPC: 64219, MsgBus: 27114 2025-03-31T16:30:57.898437Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012017258775437:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:57.898597Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dac/r3tmp/tmpRB2BiO/pdisk_1.dat 2025-03-31T16:30:58.316248Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:58.324898Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:58.324984Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 64219, node 1 2025-03-31T16:30:58.328712Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:30:58.369912Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:58.369939Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:58.369947Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:58.370083Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27114 TClient is connected to server localhost:27114 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:58.925042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:58.967242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:30:59.072329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:30:59.209437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:59.288523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:01.001100Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012030143679103:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:01.001216Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:01.283368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:01.352386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:01.382005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:01.411648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:01.437492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:01.504968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:01.545368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012034438646913:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:01.545412Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:01.545456Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012034438646918:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:01.548967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:01.557808Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012034438646920:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:31:01.647757Z node 1 :TX_PROXY ERROR: Actor# [1:7488012034438646973:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:02.576021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:02.898331Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012017258775437:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:02.898384Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 21488, MsgBus: 13914 2025-03-31T16:31:04.841114Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012045470706238:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:04.841214Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dac/r3tmp/tmpDcD9Gv/pdisk_1.dat 2025-03-31T16:31:04.955173Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:04.991010Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:04.991117Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 21488, node 2 2025-03-31T16:31:04.995207Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:05.031645Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:05.031669Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:05.031677Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:05.031795Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13914 TClient is connected to server localhost:13914 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:05.506282Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:05.538374Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:05.625277Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:05.774781Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:05.841111Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:08.012467Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012062650577185:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:08.012543Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:08.094073Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:08.123061Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:08.159612Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:08.199713Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:08.240138Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:08.322133Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:08.379395Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012062650577700:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:08.379458Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:08.379697Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012062650577705:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:08.384264Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:08.398096Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012062650577707:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:31:08.482508Z node 2 :TX_PROXY ERROR: Actor# [2:7488012062650577761:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:09.374848Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:31:09.454301Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:31:09.496774Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:31:09.842961Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488012045470706238:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:09.843040Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:11.279931Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 waiting... >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-17 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-18 >> KqpIndexes::PrefixedVectorIndexOrderByCosineDistanceNotNullableLevel1 [GOOD] >> KqpIndexes::PrefixedVectorIndexOrderByCosineDistanceNullableLevel1 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-11 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-12 >> KqpIndexes::SecondaryIndexSelectUsingScripting [GOOD] >> KqpIndexes::SecondaryIndexReplace-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SelectFromIndexesAndFreeSpaceLogicDoesntTimeout [GOOD] Test command err: Trying to start YDB, gRPC: 3335, MsgBus: 26797 2025-03-31T16:30:52.415547Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011996003843274:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:52.416747Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000db3/r3tmp/tmpMtbc0u/pdisk_1.dat 2025-03-31T16:30:52.897822Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:52.917019Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:52.917153Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:52.918843Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3335, node 1 2025-03-31T16:30:53.030908Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:53.030932Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:53.030938Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:53.031058Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26797 TClient is connected to server localhost:26797 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:53.567051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:53.591653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:53.731950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:53.884135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:53.951634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:55.595504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012008888746913:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:55.595682Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:55.906839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:30:55.944101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:30:55.973047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:30:56.003119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:30:56.029208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:30:56.064900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:30:56.110011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012013183714719:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:56.110073Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:56.110164Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012013183714724:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:56.113482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:30:56.123838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012013183714726:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:30:56.213820Z node 1 :TX_PROXY ERROR: Actor# [1:7488012013183714780:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:30:57.029423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:30:57.410753Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011996003843274:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:57.410840Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:30:58.799472Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976710678; 2025-03-31T16:30:58.810413Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488012021773650170:2496], Table: `/Root/TestTable/Index/indexImplTable` ([72057594046644480:18:1]), SessionActorId: [1:7488012017478682367:2496]Got LOCKS BROKEN for table `/Root/TestTable/Index/indexImplTable`. ShardID=72075186224037919, Sink=[1:7488012021773650170:2496].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-03-31T16:30:58.811042Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488012021773650140:2496], SessionActorId: [1:7488012017478682367:2496], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TestTable/Index/indexImplTable`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7488012017478682367:2496]. isRollback=0 2025-03-31T16:30:58.811374Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjlmNzY0NGItYjc2YTZiZjItNjViMzI1MWEtNDYwYWNkOWY=, ActorId: [1:7488012017478682367:2496], ActorState: ExecuteState, TraceId: 01jqpgnerkemvc0117f219h0h6, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7488012021773650153:2496] from: [1:7488012021773650140:2496] 2025-03-31T16:30:58.811495Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7488012021773650153:2496] TxId: 281474976710678. Ctx: { TraceId: 01jqpgnerkemvc0117f219h0h6, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjlmNzY0NGItYjc2YTZiZjItNjViMzI1MWEtNDYwYWNkOWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/TestTable/Index/indexImplTable`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-03-31T16:30:58.812162Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976710678; 2025-03-31T16:30:58.812336Z node 1 :TX_DATASHARD ERROR: Complete volatile write [1743438658843 : 281474976710678] from 72075186224037920 at tablet 72075186224037920, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } 2025-03-31T16:30:58.813523Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjlmNzY0NGItYjc2YTZiZjItNjViMzI1MWEtNDYwYWNkOWY=, ActorId: [1:7488012017478682367:2496], ActorState: ExecuteState, TraceId: 01jqpgnerkemvc0117f219h0h6, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 6381, MsgBus: 64723 2025-03-31T16:30:59.670231Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012024574388184:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:59.670272Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000db3/r3tmp/tmp2nBFHn/pdisk_1.dat 2025-03-31T16:30:59.776331Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6381, node 2 2025-03-31T16:30:59.808856Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:59.809006Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:59.819503Z node 2 :HIVE WARN: HIVE#72057594037 ... xistsActor;event=timeout;self_id=[2:7488012024574388184:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:04.670414Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:04.821192Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7488012046049227548:2496] TxId: 281474976715672. Ctx: { TraceId: 01jqpgnn3cf7bvrhkwxe6wt187, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjkyMWNiYjItOGQ0YTcxYi1hZTQwZGVjZC00M2NiZWQzZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Read operation can be performed on async index table: [72057594046644480:18:1] only with StaleRO isolation level 2025-03-31T16:31:04.821408Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjkyMWNiYjItOGQ0YTcxYi1hZTQwZGVjZC00M2NiZWQzZg==, ActorId: [2:7488012046049227320:2496], ActorState: ExecuteState, TraceId: 01jqpgnn3cf7bvrhkwxe6wt187, Create QueryResponse for error on request, msg: 2025-03-31T16:31:04.839128Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7488012046049227561:2496] TxId: 281474976715674. Ctx: { TraceId: 01jqpgnn44bdhtheyzdfmw5mmn, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjkyMWNiYjItOGQ0YTcxYi1hZTQwZGVjZC00M2NiZWQzZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Read operation can be performed on async index table: [72057594046644480:18:1] only with StaleRO isolation level 2025-03-31T16:31:04.839302Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjkyMWNiYjItOGQ0YTcxYi1hZTQwZGVjZC00M2NiZWQzZg==, ActorId: [2:7488012046049227320:2496], ActorState: ExecuteState, TraceId: 01jqpgnn44bdhtheyzdfmw5mmn, Create QueryResponse for error on request, msg: 2025-03-31T16:31:04.857594Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7488012046049227570:2496] TxId: 281474976715676. Ctx: { TraceId: 01jqpgnn4q5atgz259yp5me423, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjkyMWNiYjItOGQ0YTcxYi1hZTQwZGVjZC00M2NiZWQzZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Read operation can be performed on async index table: [72057594046644480:18:1] only with StaleRO isolation level 2025-03-31T16:31:04.857813Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjkyMWNiYjItOGQ0YTcxYi1hZTQwZGVjZC00M2NiZWQzZg==, ActorId: [2:7488012046049227320:2496], ActorState: ExecuteState, TraceId: 01jqpgnn4q5atgz259yp5me423, Create QueryResponse for error on request, msg: 2025-03-31T16:31:04.872760Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7488012046049227579:2496] TxId: 281474976715678. Ctx: { TraceId: 01jqpgnn576ep4bbdj2cw2d59k, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjkyMWNiYjItOGQ0YTcxYi1hZTQwZGVjZC00M2NiZWQzZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Read operation can be performed on async index table: [72057594046644480:18:1] only with StaleRO isolation level 2025-03-31T16:31:04.872942Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjkyMWNiYjItOGQ0YTcxYi1hZTQwZGVjZC00M2NiZWQzZg==, ActorId: [2:7488012046049227320:2496], ActorState: ExecuteState, TraceId: 01jqpgnn576ep4bbdj2cw2d59k, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 61980, MsgBus: 10269 2025-03-31T16:31:05.790990Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488012050517975396:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:05.791033Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000db3/r3tmp/tmpmJHfQl/pdisk_1.dat 2025-03-31T16:31:05.932790Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61980, node 3 2025-03-31T16:31:05.966132Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:05.966211Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:05.981176Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:06.044344Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:06.044369Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:06.044373Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:06.044480Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10269 TClient is connected to server localhost:10269 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:06.538008Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:06.553558Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:06.639767Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:06.795035Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:06.866458Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:09.536021Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012067697846339:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:09.536118Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:09.582460Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:09.635120Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:09.674614Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:09.715940Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:09.752468Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:09.800746Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:09.858058Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012067697846852:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:09.858178Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:09.858532Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012067697846857:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:09.862566Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:09.873841Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488012067697846859:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:31:09.970936Z node 3 :TX_PROXY ERROR: Actor# [3:7488012067697846917:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:10.791888Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488012050517975396:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:10.791974Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:11.012074Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:31:11.056230Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:31:11.103410Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-30 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-47 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-48 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-6 >> KqpUniqueIndex::UpdateOnHidenChanges-DataColumn [GOOD] >> KqpUniqueIndex::UpdateOnNullInComplexFk >> KqpIndexes::SecondaryIndexUsingInJoin-UseStreamJoin [GOOD] >> KqpIndexes::SecondaryIndexUsingInJoin2+UseStreamJoin >> KqpIndexes::UniqIndexComplexPkComplexFkOverlap >> KqpIndexes::CreateTableWithExplicitSyncIndexSQL [GOOD] >> KqpIndexes::DeleteByIndex >> KqpIndexes::ExplainCollectFullDiagnostics [GOOD] >> KqpIndexes::ForbidDirectIndexTableCreation >> KqpUniqueIndex::InsertNullInComplexFk [GOOD] >> KqpUniqueIndex::InsertNullInComplexFkDuplicate >> KqpIndexes::Uint8Index [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-36 >> KqpIndexes::SimpleVectorIndexOrderByCosineDistanceNullableLevel1 [GOOD] >> KqpIndexes::SimpleVectorIndexOrderByCosineDistanceNullableLevel2 >> KqpIndexes::DoUpsertWithoutIndexUpdate-UniqIndex-UseSink [GOOD] >> KqpIndexes::DuplicateUpsertInterleave >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-36 >> KqpMultishardIndex::YqWorksFineAfterAlterIndexTableDirectly [GOOD] >> KqpUniqueIndex::InsertComplexFkPkOverlapDuplicate >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-42 >> TColumnShardTestReadWrite::CompactionSplitGranule_PKTimestamp [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-53 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-54 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::Uint8Index [GOOD] Test command err: Trying to start YDB, gRPC: 9289, MsgBus: 17373 2025-03-31T16:31:01.948339Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012032582973279:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:01.948423Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000da6/r3tmp/tmp22876V/pdisk_1.dat 2025-03-31T16:31:02.422676Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:02.427906Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:02.428000Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:02.431450Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9289, node 1 2025-03-31T16:31:02.524842Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:02.524864Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:02.524884Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:02.525040Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17373 TClient is connected to server localhost:17373 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:03.047204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:03.079074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:03.234203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:03.395127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:03.464964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:05.283216Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012049762844245:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:05.283371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:05.596152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:05.630199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:05.663764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:05.689926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:05.718374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:05.763915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:05.852931Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012049762844759:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:05.852986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:05.853388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012049762844764:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:05.858063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:05.871012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012049762844767:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:31:05.926068Z node 1 :TX_PROXY ERROR: Actor# [1:7488012049762844821:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:06.851224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:31:06.948403Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012032582973279:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:06.948497Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:08.941028Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqpgnr705gzdvsp94zc8b2hv, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzVkMGQ1NWMtMWU1Zjk4YzQtM2ZlZDQwOWItODI2NjFmMGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-31T16:31:08.959165Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzVkMGQ1NWMtMWU1Zjk4YzQtM2ZlZDQwOWItODI2NjFmMGI=, ActorId: [1:7488012054057812410:2496], ActorState: ExecuteState, TraceId: 01jqpgnr705gzdvsp94zc8b2hv, Create QueryResponse for error on request, msg: 2025-03-31T16:31:09.373798Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488012066942714897:2584], TxId: 281474976710677, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=NzVkMGQ1NWMtMWU1Zjk4YzQtM2ZlZDQwOWItODI2NjFmMGI=. TraceId : 01jqpgns5be38c99nynd0h2zge. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-03-31T16:31:09.374648Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488012066942714898:2585], TxId: 281474976710677, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NzVkMGQ1NWMtMWU1Zjk4YzQtM2ZlZDQwOWItODI2NjFmMGI=. TraceId : 01jqpgns5be38c99nynd0h2zge. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7488012066942714894:2496], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-31T16:31:09.375037Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzVkMGQ1NWMtMWU1Zjk4YzQtM2ZlZDQwOWItODI2NjFmMGI=, ActorId: [1:7488012054057812410:2496], ActorState: ExecuteState, TraceId: 01jqpgns5be38c99nynd0h2zge, Create QueryResponse for error on request, msg: 2025-03-31T16:31:10.314173Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqpgnsja4zq5fcv1p7p1gwtv, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzVkMGQ1NWMtMWU1Zjk4YzQtM2ZlZDQwOWItODI2NjFmMGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-31T16:31:10.314361Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzVkMGQ1NWMtMWU1Zjk4YzQtM2ZlZDQwOWItODI2NjFmMGI=, ActorId: [1:7488012054057812410:2496], ActorState: ExecuteState, TraceId: 01jqpgnsja4zq5fcv1p7p1gwtv, Create QueryResponse for error on request, msg: 2025-03-31T16:31:10.358230Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:11.084821Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:11.113559Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:11.141363Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 7192, MsgBus: 16922 2025-03-31T16:31:11.904343Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012075251387756:2217];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:11.908537Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000da6/r3tmp/tmp2BV1UI/pdisk_1.dat 2025-03-31T16:31:12.062189Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:12.091672Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:12.091772Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:12.093841Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7192, node 2 2025-03-31T16:31:12.180992Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:12.181025Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:12.181036Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:12.181210Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16922 TClient is connected to server localhost:16922 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:12.721145Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:12.736215Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:12.820375Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:13.046262Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:13.131830Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:15.578605Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012092431258576:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:15.578682Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:15.633609Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:15.670205Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:15.706195Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:15.738641Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:15.768748Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:15.811642Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:15.898367Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012092431259089:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:15.898452Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:15.898708Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012092431259094:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:15.902919Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:15.911324Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012092431259096:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:31:16.013717Z node 2 :TX_PROXY ERROR: Actor# [2:7488012096726226450:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:16.863194Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488012075251387756:2217];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:16.863284Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:16.911710Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:31:17.068563Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-03-31T16:31:17.133760Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-03-31T16:31:17.199731Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> KqpUniqueIndex::InsertFkPkOverlap [GOOD] >> KqpIndexes::DoUpsertWithoutIndexUpdate-UniqIndex+UseSink [GOOD] >> KqpIndexes::DoUpsertWithoutIndexUpdate+UniqIndex+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::InsertFkPkOverlap [GOOD] Test command err: Trying to start YDB, gRPC: 11156, MsgBus: 1960 2025-03-31T16:31:05.601732Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012051859717921:2268];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:05.601787Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d94/r3tmp/tmpS5su6L/pdisk_1.dat 2025-03-31T16:31:06.078095Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:06.103834Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:06.103941Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:06.105764Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11156, node 1 2025-03-31T16:31:06.187152Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:06.187184Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:06.187191Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:06.187351Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1960 TClient is connected to server localhost:1960 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:06.741652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:06.764562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:06.901113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:07.075186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:31:07.140025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:31:08.886283Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012064744621387:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:08.886421Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:09.235069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:09.270604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:09.301436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:09.329995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:09.365059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:09.399678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:09.466462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012069039589194:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:09.466513Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:09.466787Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012069039589199:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:09.470244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:09.485561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012069039589201:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:31:09.561269Z node 1 :TX_PROXY ERROR: Actor# [1:7488012069039589254:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:10.450049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:10.608835Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012051859717921:2268];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:10.609278Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 27137, MsgBus: 16267 2025-03-31T16:31:12.775491Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012081753826167:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:12.775583Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d94/r3tmp/tmp8Tq2Sj/pdisk_1.dat 2025-03-31T16:31:12.878299Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27137, node 2 2025-03-31T16:31:12.944510Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:12.944621Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:12.962799Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:12.985282Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:12.985296Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:12.985301Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:12.985380Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16267 TClient is connected to server localhost:16267 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:31:13.405352Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:31:13.423944Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:13.507491Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:13.659348Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:13.757809Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:16.101181Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012098933697128:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:16.101255Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:16.149614Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:16.180063Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:16.249096Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:16.283608Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:16.351748Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:16.398035Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:16.447379Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012098933697646:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:16.447479Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:16.447961Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012098933697651:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:16.452428Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:16.480327Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012098933697653:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:31:16.542108Z node 2 :TX_PROXY ERROR: Actor# [2:7488012098933697706:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:17.493848Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:17.776136Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488012081753826167:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:17.776189Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::DataErasureRun3Cycles >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-19 >> KqpIndexes::SecondaryIndexUpsert2Update [GOOD] >> KqpIndexes::SecondaryIndexUsingInJoin+UseStreamJoin >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-13 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKTimestamp [GOOD] Test command err: 2025-03-31T16:22:04.615451Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:22:04.708499Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:22:04.733090Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:22:04.733384Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:22:04.741696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:22:04.741915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:22:04.742165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:22:04.742284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:22:04.742381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:22:04.742524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:22:04.742668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:22:04.742796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:22:04.742917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:22:04.743019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:22:04.743140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:22:04.743264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:22:04.777728Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:22:04.777873Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:22:04.777927Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:22:04.778127Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:22:04.778269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:22:04.778350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:22:04.778396Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:22:04.778492Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:22:04.778555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:22:04.778594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:22:04.778621Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:22:04.778875Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:22:04.778978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:22:04.779023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:22:04.779051Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:22:04.779203Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:22:04.779273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:22:04.779320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:22:04.779347Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:22:04.779422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:22:04.779460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:22:04.779495Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:22:04.779553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:22:04.779589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:22:04.779616Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:22:04.780001Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=49; 2025-03-31T16:22:04.780119Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=48; 2025-03-31T16:22:04.780209Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=36; 2025-03-31T16:22:04.780293Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=48; 2025-03-31T16:22:04.780497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:22:04.780563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:22:04.780603Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:22:04.780831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:22:04.780878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:22:04.780908Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:22:04.781108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:22:04.781154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:22:04.781189Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:22:04.781390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:22:04.781438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:22:04.781466Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:22:04.781604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:22:04.781666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:22:04.781715Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15722:17680];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; 2025-03-31T16:31:17.355259Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-03-31T16:31:17.355365Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=14; 2025-03-31T16:31:17.355820Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=372; 2025-03-31T16:31:17.355864Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=447; 2025-03-31T16:31:17.360834Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-03-31T16:31:17.360953Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=16; 2025-03-31T16:31:17.371619Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=10548; 2025-03-31T16:31:17.384398Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=11517; 2025-03-31T16:31:17.384568Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=12836; 2025-03-31T16:31:17.384804Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=144; 2025-03-31T16:31:17.384972Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=104; 2025-03-31T16:31:17.385193Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=165; 2025-03-31T16:31:17.385402Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=145; 2025-03-31T16:31:17.385704Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=242; 2025-03-31T16:31:17.385762Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=24740; 2025-03-31T16:31:17.394457Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-03-31T16:31:17.394584Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=18; 2025-03-31T16:31:17.409907Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=15194; 2025-03-31T16:31:17.468487Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=58414; 2025-03-31T16:31:17.468714Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=53; 2025-03-31T16:31:17.468814Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=37; 2025-03-31T16:31:17.468874Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=11; 2025-03-31T16:31:17.468937Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=13; 2025-03-31T16:31:17.469001Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=12; 2025-03-31T16:31:17.469103Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=57; 2025-03-31T16:31:17.469162Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=10; 2025-03-31T16:31:17.469311Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=101; 2025-03-31T16:31:17.469373Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=8; 2025-03-31T16:31:17.469457Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=38; 2025-03-31T16:31:17.469589Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=85; 2025-03-31T16:31:17.469725Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=86; 2025-03-31T16:31:17.469779Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=75118; 2025-03-31T16:31:17.470042Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=2425692;raw_bytes=4011492;count=1;records=39328} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=111593408;raw_bytes=187115688;count=44;records=1805672} inactive {blob_bytes=178990352;raw_bytes=300417312;count=90;records=2897034} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-31T16:31:17.471008Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15722:17680];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-31T16:31:17.471122Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15722:17680];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-31T16:31:17.471227Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15722:17680];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-31T16:31:17.471296Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15722:17680];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=0; 2025-03-31T16:31:17.471598Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:31:17.471702Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:31:17.472000Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=22; 2025-03-31T16:31:17.472095Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-03-31T16:31:17.472158Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=22;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:31:17.472225Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:31:17.472281Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:31:17.472416Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:31:17.477634Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:31:17.486616Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15722:17680];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-31T16:31:17.490123Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15722:17680];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-31T16:31:17.490200Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-31T16:31:17.490242Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-31T16:31:17.490320Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15722:17680];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T16:31:17.490435Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15722:17680];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:31:17.490749Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15722:17680];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=22; 2025-03-31T16:31:17.490864Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15722:17680];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-03-31T16:31:17.490955Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15722:17680];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=22;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T16:31:17.491044Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15722:17680];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T16:31:17.491135Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15722:17680];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T16:31:17.491259Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15722:17680];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.999000s; 2025-03-31T16:31:17.491334Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15722:17680];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; >> KqpIndexes::MultipleSecondaryIndexWithSameComulns-UseSink [GOOD] |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-31 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-48 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-1 >> KqpIndexes::UpsertWithoutExtraNullDelete-UseSink [GOOD] |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-7 >> TestDataErasure::DataErasureManualLaunch3Cycles ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::MultipleSecondaryIndexWithSameComulns-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 21472, MsgBus: 16670 2025-03-31T16:30:52.347984Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011993651743604:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:52.348077Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000db7/r3tmp/tmpocPVHS/pdisk_1.dat 2025-03-31T16:30:52.781424Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:52.796673Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:52.796790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:52.798915Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21472, node 1 2025-03-31T16:30:52.867593Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:52.867628Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:52.867637Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:52.867741Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16670 TClient is connected to server localhost:16670 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:53.389865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:53.404016Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:30:53.416986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:53.551157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:53.724940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:30:53.799675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:30:55.721870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012006536647247:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:55.721987Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:56.038491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:30:56.069007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:30:56.097033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:30:56.124203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:30:56.155354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:30:56.226236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:30:56.267671Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012010831615058:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:56.267776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:56.268133Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012010831615063:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:56.271868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:30:56.285802Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012010831615065:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:30:56.364753Z node 1 :TX_PROXY ERROR: Actor# [1:7488012010831615118:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:30:57.348103Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011993651743604:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:57.348195Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:30:57.355616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:30:57.408943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:30:57.446785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 12137, MsgBus: 31305 2025-03-31T16:31:00.020928Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012028263082184:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:00.020987Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000db7/r3tmp/tmpajA5mU/pdisk_1.dat 2025-03-31T16:31:00.198799Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:00.211001Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:00.211366Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:00.212997Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12137, node 2 2025-03-31T16:31:00.297936Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:00.297962Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:00.297970Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:00.298099Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31305 TClient is connected to server localhost:31305 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:00.818623Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:00.824358Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:31:00.832690Z n ... 1:03.913159Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012041147986371:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:31:03.972600Z node 2 :TX_PROXY ERROR: Actor# [2:7488012041147986424:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:04.915434Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:31:04.995507Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:31:05.036866Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488012028263082184:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:05.036920Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:05.066648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 14241, MsgBus: 3215 2025-03-31T16:31:08.083663Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488012063289221099:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:08.084628Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000db7/r3tmp/tmpFtKNYr/pdisk_1.dat 2025-03-31T16:31:08.271425Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:08.275195Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:08.275304Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:08.277031Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14241, node 3 2025-03-31T16:31:08.403682Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:08.403711Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:08.403721Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:08.403865Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3215 TClient is connected to server localhost:3215 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:08.961800Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:08.974351Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:09.065445Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:09.293929Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:31:09.375377Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:31:11.661207Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012076174124756:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:11.661357Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:11.708420Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:11.752083Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:11.791836Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:11.831549Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:11.875708Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:11.954375Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:12.038723Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012080469092570:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:12.038808Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:12.038940Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012080469092575:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:12.044049Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:12.058058Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488012080469092577:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:31:12.159401Z node 3 :TX_PROXY ERROR: Actor# [3:7488012080469092634:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:13.083978Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488012063289221099:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:13.084045Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:13.389551Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:31:14.396736Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:14.413819Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:14.443182Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:15.793521Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:15.805847Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:15.837587Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:16.755180Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:16.773865Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:16.796473Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:17.456955Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:17.522514Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:19.236742Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:19.849970Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:19.879177Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:19.905067Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:20.414488Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:20.433172Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:20.842830Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:20.857700Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:21.352190Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:21.366665Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> KqpIndexes::SecondaryIndexReplace-UseSink [GOOD] >> KqpIndexes::ForbidDirectIndexTableCreation [GOOD] >> KqpIndexes::DuplicateUpsertInterleaveParams-UseSink >> TestDataErasure::SimpleDataErasureTest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UpsertWithoutExtraNullDelete-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 9831, MsgBus: 14032 2025-03-31T16:31:06.937593Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012054764037185:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:06.937657Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d81/r3tmp/tmpzCHCH7/pdisk_1.dat 2025-03-31T16:31:07.350427Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:07.398035Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:07.398139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 9831, node 1 2025-03-31T16:31:07.402367Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:07.452976Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:07.453004Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:07.453022Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:07.453138Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14032 TClient is connected to server localhost:14032 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:08.150534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:08.211506Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:31:08.232438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:08.377437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:08.557591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:08.642195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:10.359669Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012071943908133:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:10.359783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:10.676111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:10.745957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:10.773337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:10.845856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:10.879727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:10.930358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:11.006932Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012076238875946:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:11.007123Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:11.007419Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012076238875952:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:11.011327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:11.024503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012076238875954:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:31:11.120007Z node 1 :TX_PROXY ERROR: Actor# [1:7488012076238876010:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:11.937570Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012054764037185:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:11.937627Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:11.976871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:31:12.732433Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:13.662196Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:13.979813Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:14.215395Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 23704, MsgBus: 15055 2025-03-31T16:31:15.024851Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012093302541670:2138];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:15.025363Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d81/r3tmp/tmp6hZnz8/pdisk_1.dat 2025-03-31T16:31:15.123964Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:15.144739Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:15.144817Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:15.148217Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23704, node 2 2025-03-31T16:31:15.189196Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:15.189222Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:15.189230Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:15.189365Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15055 TClient is connected to server localhost:15055 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:15.667001Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:15.672681Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:31:15.680294Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:15.760041Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:15.917065Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:16.002936Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:18.184116Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012106187445233:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:18.184201Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:18.226252Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:18.263168Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:18.302526Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:18.330643Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:18.363168Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:18.403171Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:18.489311Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012106187445746:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:18.489431Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:18.489761Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012106187445751:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:18.493784Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:18.509579Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012106187445753:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:31:18.581391Z node 2 :TX_PROXY ERROR: Actor# [2:7488012106187445808:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:19.540994Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:31:20.025178Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488012093302541670:2138];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:20.025242Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:20.281888Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:21.484187Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:22.109901Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> KqpUniqueIndex::InsertNullInComplexFkDuplicate [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-37 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-55 |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> KqpIndexes::DuplicateUpsertInterleave [GOOD] >> KqpIndexes::DuplicateUpsertInterleaveParams+UseSink >> KqpIndexes::SecondaryIndexUsingInJoin2+UseStreamJoin [GOOD] >> KqpIndexes::SecondaryIndexUsingInJoin2-UseStreamJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexReplace-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 11269, MsgBus: 26996 2025-03-31T16:31:03.963665Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012043078893871:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:03.963853Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000da1/r3tmp/tmpAvGrDm/pdisk_1.dat 2025-03-31T16:31:04.346612Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11269, node 1 2025-03-31T16:31:04.366798Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:04.366953Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:04.368892Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:04.422112Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:04.422143Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:04.422151Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:04.422294Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26996 TClient is connected to server localhost:26996 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:04.950325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:04.973658Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:31:04.990724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:05.182692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:05.353867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:31:05.434164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:31:07.216255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012060258764857:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:07.216406Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:07.483810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:07.556649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:07.597101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:07.633140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:07.675981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:07.745395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:07.793903Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012060258765372:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:07.793977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:07.794158Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012060258765377:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:07.797541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:07.810581Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012060258765379:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:31:07.874591Z node 1 :TX_PROXY ERROR: Actor# [1:7488012060258765431:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:08.706245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:31:08.963354Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012043078893871:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:08.963465Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:09.278913Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:10.054336Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 8185, MsgBus: 8840 2025-03-31T16:31:10.813332Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012071192584743:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:10.813374Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000da1/r3tmp/tmpWdHe99/pdisk_1.dat 2025-03-31T16:31:10.934892Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:10.945897Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:10.945997Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:10.947932Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8185, node 2 2025-03-31T16:31:10.995863Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:10.995884Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:10.995892Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:10.996004Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8840 TClient is connected to server localhost:8840 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:11.386873Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:11.396982Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:11.460001Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 720 ... osed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:14.105624Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:14.135604Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:14.161648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:14.194309Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:14.250388Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012088372456209:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:14.250479Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:14.250651Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012088372456214:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:14.253395Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:14.261863Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012088372456216:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:31:14.334511Z node 2 :TX_PROXY ERROR: Actor# [2:7488012088372456270:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:15.266069Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:31:15.813421Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488012071192584743:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:15.813493Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 7435, MsgBus: 15819 2025-03-31T16:31:16.535215Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488012096848632535:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:16.535294Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000da1/r3tmp/tmpyzwKq7/pdisk_1.dat 2025-03-31T16:31:16.665806Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7435, node 3 2025-03-31T16:31:16.679451Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:16.679548Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:16.681961Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:16.727293Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:16.727313Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:16.727319Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:16.727400Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15819 TClient is connected to server localhost:15819 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:17.174073Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:17.179602Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:31:17.193502Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:17.271426Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:17.434529Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:17.516322Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:19.868899Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012109733536179:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:19.869009Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:19.937808Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:19.978430Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:20.055278Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:20.105582Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:20.146284Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:20.204606Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:20.266245Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012114028503989:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:20.266327Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:20.266384Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012114028503995:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:20.269867Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:20.279728Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488012114028503997:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:31:20.377929Z node 3 :TX_PROXY ERROR: Actor# [3:7488012114028504052:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:21.473854Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:31:21.535250Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488012096848632535:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:21.535319Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:22.244622Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:22.970880Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-43 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-54 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-55 >> KqpIndexes::SecondaryIndexWithPrimaryKeySameComulns+UseSink [GOOD] >> KqpIndexes::SecondaryIndexWithPrimaryKeySameComulns-UseSink >> KqpIndexes::DeleteByIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::InsertNullInComplexFkDuplicate [GOOD] Test command err: Trying to start YDB, gRPC: 8641, MsgBus: 7582 2025-03-31T16:31:10.818184Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012070515666952:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:10.818617Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d61/r3tmp/tmpNwQAnq/pdisk_1.dat 2025-03-31T16:31:11.256212Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:11.256312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:11.258772Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:11.276242Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8641, node 1 2025-03-31T16:31:11.366547Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:11.366580Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:11.366586Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:11.366675Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7582 TClient is connected to server localhost:7582 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:11.873002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:11.886386Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:31:11.893460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:12.026047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:12.214557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:12.301957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:14.053150Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012087695537927:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:14.053319Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:14.354174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:14.384502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:14.411978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:14.437247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:14.468416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:14.541429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:14.591371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012087695538440:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:14.591449Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:14.591746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012087695538445:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:14.595199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:14.605996Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012087695538447:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:31:14.689958Z node 1 :TX_PROXY ERROR: Actor# [1:7488012087695538500:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:15.538043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:15.815758Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012070515666952:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:15.815810Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 20152, MsgBus: 18185 2025-03-31T16:31:18.425533Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012107012478321:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:18.425596Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d61/r3tmp/tmpdStJTC/pdisk_1.dat 2025-03-31T16:31:18.566876Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:18.581452Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:18.581541Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:18.586458Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20152, node 2 2025-03-31T16:31:18.644098Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:18.644121Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:18.644129Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:18.644237Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18185 TClient is connected to server localhost:18185 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:19.088548Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:19.109677Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:31:19.194266Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:31:19.354422Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:19.438618Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:21.619395Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012119897381973:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:21.619476Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:21.661370Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:21.696289Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:21.735157Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:21.773257Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:21.805354Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:21.845094Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:21.931364Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012119897382487:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:21.931435Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:21.931745Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012119897382492:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:21.934743Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:21.945041Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012119897382494:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:31:22.005172Z node 2 :TX_PROXY ERROR: Actor# [2:7488012124192349843:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:22.882723Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:23.424599Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488012107012478321:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:23.424650Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpUniqueIndex::InsertComplexFkPkOverlapDuplicate [GOOD] >> TestDataErasure::DataErasureManualLaunch ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::DeleteByIndex [GOOD] Test command err: Trying to start YDB, gRPC: 64795, MsgBus: 65302 2025-03-31T16:31:05.180149Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012051572199918:2197];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:05.180758Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d9d/r3tmp/tmpV1VMVZ/pdisk_1.dat 2025-03-31T16:31:05.597395Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:05.612125Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:05.612247Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:05.616645Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64795, node 1 2025-03-31T16:31:05.703709Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:05.703734Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:05.703740Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:05.703858Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65302 TClient is connected to server localhost:65302 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:06.294733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:06.315478Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:06.329978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:31:06.497288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:06.665896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:06.741334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:08.521931Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012064457103448:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:08.522562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:08.906103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:08.945147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:08.976841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:09.009305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:09.040017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:09.071774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:09.123577Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012068752071251:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:09.123691Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:09.123907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012068752071256:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:09.128400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:09.142369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012068752071258:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:31:09.223359Z node 1 :TX_PROXY ERROR: Actor# [1:7488012068752071314:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:10.129238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:31:10.171765Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012051572199918:2197];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:10.171839Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:10.257863Z node 1 :TX_PROXY ERROR: Actor# [1:7488012073047039184:3807] txid# 281474976710672, issues: { message: "Check failed: path: \'/Root/TestTable\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:11.119364Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:11.165170Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 27014, MsgBus: 14466 2025-03-31T16:31:12.035909Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012081326736705:2074];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d9d/r3tmp/tmpiV5YW6/pdisk_1.dat 2025-03-31T16:31:12.085296Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:31:12.183673Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:12.187504Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:12.187593Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:12.190051Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27014, node 2 2025-03-31T16:31:12.234591Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:12.234615Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:12.234623Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:12.234739Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14466 TClient is connected to server localhost:14466 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:12.564088Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:12.579831Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo ... 057594046644480 2025-03-31T16:31:15.142291Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:15.214063Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:15.249039Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:15.288651Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:15.351391Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012094211640868:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:15.351476Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:15.351633Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012094211640873:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:15.355317Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:15.365031Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012094211640875:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:31:15.450803Z node 2 :TX_PROXY ERROR: Actor# [2:7488012094211640929:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:16.332053Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:31:16.414024Z node 2 :TX_PROXY ERROR: Actor# [2:7488012098506608778:3795] txid# 281474976715672, issues: { message: "Check failed: path: \'/Root/TestTable\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:17.035156Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488012081326736705:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:17.035218Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 3012, MsgBus: 32065 2025-03-31T16:31:18.080571Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488012107107366629:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:18.080705Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d9d/r3tmp/tmpabELE4/pdisk_1.dat 2025-03-31T16:31:18.208585Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3012, node 3 2025-03-31T16:31:18.243243Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:18.243316Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:18.267888Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:18.311692Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:18.311730Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:18.311741Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:18.311880Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32065 TClient is connected to server localhost:32065 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:18.753036Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:18.760124Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:31:18.766477Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:31:18.839634Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:31:19.011384Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:19.098531Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:21.540158Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012119992270275:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:21.540275Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:21.590019Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:21.642416Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:21.671404Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:21.704373Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:21.741661Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:21.780722Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:21.843132Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012119992270786:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:21.843211Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:21.843434Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012119992270791:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:21.847439Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:21.857931Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488012119992270793:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:31:21.910658Z node 3 :TX_PROXY ERROR: Actor# [3:7488012119992270846:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:22.888185Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:31:23.080794Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488012107107366629:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:23.080864Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::InsertComplexFkPkOverlapDuplicate [GOOD] Test command err: Trying to start YDB, gRPC: 1571, MsgBus: 10527 2025-03-31T16:31:12.162452Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012082216371194:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:12.162647Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d4f/r3tmp/tmpddzaNR/pdisk_1.dat TServer::EnableGrpc on GrpcPort 1571, node 1 2025-03-31T16:31:12.595623Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:12.601352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:12.601435Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:12.605033Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:12.652592Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:12.652614Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:12.652620Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:12.652722Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10527 TClient is connected to server localhost:10527 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:13.232974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:13.252788Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:31:13.269165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:13.428445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:31:13.605820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:31:13.677649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:15.440495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012095101274848:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:15.440641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:15.744825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:15.781619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:15.816983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:15.893260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:15.918791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:15.951887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:16.032549Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012099396242662:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:16.032627Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:16.032632Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012099396242667:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:16.035911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:16.047038Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012099396242669:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:31:16.104477Z node 1 :TX_PROXY ERROR: Actor# [1:7488012099396242722:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:16.990327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:17.200385Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012082216371194:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:17.202891Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:17.858689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710674:1, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 12320, MsgBus: 63105 2025-03-31T16:31:19.076922Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012111189871009:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:19.076970Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d4f/r3tmp/tmpcCpRnw/pdisk_1.dat 2025-03-31T16:31:19.233905Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:19.259702Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:19.259793Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:19.262063Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12320, node 2 2025-03-31T16:31:19.324803Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:19.324824Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:19.324836Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:19.324968Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63105 TClient is connected to server localhost:63105 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:19.885338Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:19.891894Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:31:19.901847Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:19.969134Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:20.139062Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:20.218995Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:22.311698Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012124074774649:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:22.311868Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:22.373260Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:22.414204Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:22.447646Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:22.481936Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:22.512228Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:22.588433Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:22.651239Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012124074775165:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:22.651323Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:22.651410Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012124074775170:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:22.655959Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:22.666754Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012124074775172:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:31:22.733564Z node 2 :TX_PROXY ERROR: Actor# [2:7488012124074775224:3439] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:23.618996Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:24.078820Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488012111189871009:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:24.078901Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-13 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-14 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-20 >> KqpIndexes::JoinWithNonPKColumnsInPredicate+UseStreamJoin [GOOD] >> KqpIndexes::JoinWithNonPKColumnsInPredicate-UseStreamJoin >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-32 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-2 >> TestDataErasure::SimpleDataErasureTest [GOOD] |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::SingleChannel >> KqpIndexes::DoUpsertWithoutIndexUpdate+UniqIndex+UseSink [GOOD] >> ExternalBlobsMultipleChannels::WithCompaction >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-8 >> TestDataErasure::DataErasureManualLaunch3Cycles [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::SimpleDataErasureTest [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:31:24.597388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:31:24.597559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:31:24.597599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:31:24.597654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:31:24.597702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:31:24.597749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:31:24.597817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:31:24.597905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:31:24.598269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:31:24.693304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:31:24.693363Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:24.698217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:31:24.698309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:31:24.698452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:31:24.703602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:31:24.703788Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:31:24.704431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:31:24.704644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:31:24.707216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:31:24.708362Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:31:24.708431Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:31:24.708597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:31:24.708644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:31:24.708681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:31:24.708857Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:31:24.716278Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:31:24.854340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:31:24.854535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:31:24.854709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:31:24.854860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:31:24.854895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:31:24.857385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:31:24.857490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:31:24.857645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:31:24.857702Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:31:24.857728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:31:24.857750Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:31:24.859468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:31:24.859519Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:31:24.859545Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:31:24.861123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:31:24.861174Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:31:24.861211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:31:24.861246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:31:24.864419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:31:24.866062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:31:24.866225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:31:24.867039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:31:24.867160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:31:24.867202Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:31:24.867443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:31:24.867482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:31:24.867636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:31:24.867728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:31:24.869921Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:31:24.869966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:31:24.870144Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:31:24.870207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:31:24.870428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:31:24.870491Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:31:24.870582Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:31:24.870618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:31:24.870645Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:31:24.870667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:31:24.870693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:31:24.870719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:31:24.870755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:31:24.870780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:31:24.870843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:31:24.870876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:31:24.870906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:31:24.872796Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:31:24.872931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:31:24.872972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:31:27.586104Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-03-31T16:31:27.586241Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:1983:3654], Recipient [1:830:2714]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046678944 Status: OK ServerId: [1:1984:3655] Leader: 1 Dead: 0 Generation: 3 VersionInfo: } 2025-03-31T16:31:27.586275Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-03-31T16:31:27.586324Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046678944, status: OK, at schemeshard: 72075186233409551 2025-03-31T16:31:27.586455Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125514, Sender [1:830:2714], Recipient [1:290:2274]: NKikimrScheme.TEvTenantDataErasureResponse PathId { OwnerId: 72057594046678944 LocalId: 3 } Generation: 1 Status: COMPLETED 2025-03-31T16:31:27.586504Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvTenantDataErasureResponse 2025-03-31T16:31:27.586555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Execute at schemeshard: 72057594046678944 2025-03-31T16:31:27.586598Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] [Finished] Data erasure completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 3] in# 62 ms, next wakeup# 599.938000s, rate# 0, in queue# 0 tenants, running# 0 tenants at schemeshard 72057594046678944 2025-03-31T16:31:27.586671Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Data erasure in tenants is completed. Send request to BS controller 2025-03-31T16:31:27.588109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2025-03-31T16:31:27.588153Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-03-31T16:31:27.588481Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:1988:3659], Recipient [1:290:2274]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037932033 Status: OK ServerId: [1:1989:3660] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-03-31T16:31:27.588503Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-03-31T16:31:27.588536Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594037932033, status: OK, at schemeshard: 72057594046678944 2025-03-31T16:31:27.588693Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:293:2276], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 0 2025-03-31T16:31:27.588740Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-03-31T16:31:27.588796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-03-31T16:31:27.588869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-03-31T16:31:27.588903Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Progress data shred in BSC 0 2025-03-31T16:31:27.588988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-03-31T16:31:27.589039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-03-31T16:31:28.019980Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:453:2406]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:31:28.020094Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:31:28.020195Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:830:2714]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:31:28.020224Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:31:28.020279Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:31:28.020304Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:31:28.020369Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:830:2714], Recipient [1:830:2714]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:31:28.020393Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:31:28.020458Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:290:2274], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:31:28.020474Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:31:28.020518Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:453:2406], Recipient [1:453:2406]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:31:28.020546Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:31:28.072869Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-03-31T16:31:28.072955Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-03-31T16:31:28.073022Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-03-31T16:31:28.073263Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:293:2276], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 5000 2025-03-31T16:31:28.073305Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-03-31T16:31:28.073339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-03-31T16:31:28.073405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-03-31T16:31:28.073443Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Progress data shred in BSC 5000 2025-03-31T16:31:28.073507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-03-31T16:31:28.073556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-03-31T16:31:28.434854Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:31:28.434945Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:31:28.435012Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:453:2406]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:31:28.435036Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:31:28.435116Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:830:2714]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:31:28.435155Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:31:28.435235Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:453:2406], Recipient [1:453:2406]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:31:28.435260Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:31:28.435342Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:830:2714], Recipient [1:830:2714]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:31:28.435370Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:31:28.435435Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:290:2274], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:31:28.435458Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:31:28.487194Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-03-31T16:31:28.487268Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-03-31T16:31:28.487312Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-03-31T16:31:28.487538Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:293:2276], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2025-03-31T16:31:28.487573Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-03-31T16:31:28.487602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-03-31T16:31:28.487670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-03-31T16:31:28.487708Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Data shred in BSC is completed 2025-03-31T16:31:28.487776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleDataErasureWakeup: Interval# 0.937000s 2025-03-31T16:31:28.487842Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Complete: Generation# 1, duration# 2 s 2025-03-31T16:31:28.500184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-03-31T16:31:28.500903Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:2010:3681], Recipient [1:290:2274]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:31:28.500973Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:31:28.501017Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-03-31T16:31:28.501197Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125519, Sender [1:272:2263], Recipient [1:290:2274]: NKikimrScheme.TEvDataErasureInfoRequest 2025-03-31T16:31:28.501233Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDataErasureInfoRequest 2025-03-31T16:31:28.501280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvDataErasureInfoRequest, at schemeshard: 72057594046678944 >> KqpIndexes::SecondaryIndexUsingInJoin+UseStreamJoin [GOOD] >> TestDataErasure::DataErasureRun3Cycles [GOOD] >> TestDataErasure::DataErasureManualLaunch [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::DoUpsertWithoutIndexUpdate+UniqIndex+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 18835, MsgBus: 63993 2025-03-31T16:31:05.988606Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012049574918363:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:05.988663Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d90/r3tmp/tmpaUpjIw/pdisk_1.dat 2025-03-31T16:31:06.437817Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18835, node 1 2025-03-31T16:31:06.473258Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:06.473371Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:06.481158Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:06.542567Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:06.542588Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:06.542594Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:06.542715Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63993 TClient is connected to server localhost:63993 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:07.155938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:31:07.189041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:31:07.326188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:07.507868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:07.597566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:09.485773Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012066754789325:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:09.485875Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:09.800915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:09.830953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:09.860407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:09.893647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:09.930547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:09.970107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:10.035634Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012071049757132:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:10.035829Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:10.036193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012071049757137:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:10.039947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:10.050304Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012071049757139:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:31:10.154826Z node 1 :TX_PROXY ERROR: Actor# [1:7488012071049757194:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:10.991202Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012049574918363:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:10.991306Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:11.265930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 query_phases { duration_us: 502 cpu_time_us: 502 } query_phases { duration_us: 1464 cpu_time_us: 1464 } query_phases { duration_us: 6159 table_access { name: "/Root/TestTable" partitions_count: 1 } cpu_time_us: 5904 affected_shards: 1 } query_phases { duration_us: 1074 cpu_time_us: 1074 } query_phases { duration_us: 5895 cpu_time_us: 5895 } query_phases { duration_us: 2582 table_access { name: "/Root/TestTable/Index/indexImplTable" } cpu_time_us: 3487 } query_phases { duration_us: 2644 cpu_time_us: 2644 } query_phases { duration_us: 6015 cpu_time_us: 6015 } query_phases { duration_us: 3769 cpu_time_us: 5347 } query_phases { duration_us: 6942 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 31 } partitions_count: 1 } table_access { name: "/Root/TestTable/Index/indexImplTable" updates { rows: 1 bytes: 24 } partitions_count: 1 } cpu_time_us: 3528 affected_shards: 2 } compilation { duration_us: 883171 cpu_time_us: 867715 } process_cpu_time_us: 19508 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":46,\"Plans\":[{\"Tables\":[\"TestTable\"],\"PlanNodeId\":45,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Path\":\"\\/Root\\/TestTable\",\"Name\":\"Upsert\",\"Table\":\"TestTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_7_1\",\"Name\":\"Iterator\"}],\"Node Type\":\"Upsert-ConstantExpr\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"NodeId\":1,\"FinishTimeMs\":1743438672255,\"TaskId\":1,\"Host\":\"ghrun-tuhoqst4xe\",\"ComputeTimeUs\":106}],\"CpuTimeUs\":645}],\"UseLlvm\":\"undefined\",\"Tasks\":1,\"FinishedTasks\":0,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Table\":[{\"Path\":\"\\/Root\\/TestTable\"}],\"BaseTimeMs\":1743438672255,\"NodesScanShards\":[],\"CpuTimeUs\":{\"Count\":1,\"Sum\":645,\"Max\":645,\"Min\":645}},\"CTE Name\":\"precompute_7_1\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":44,\"Plans\":[{\"Tables\":[\"TestTable\\/Index\\/indexImplTable\"],\"PlanNodeId\":43,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Path\":\"\\/Root\\/TestTable\\/Index\\/indexImplTable\",\"Name\":\"Delete\",\"Table\":\"TestTable\\/Index\\/indexImplTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_8_1\",\"Name\":\"Iterator\"}],\"Node Type\":\"Delete-ConstantExpr\",\"Stats\":{\"StageDurationUs\":0,\"PhysicalStageId\":1,\"BaseTimeMs\":1743438672255,\"FinishedTasks\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"},\"CTE Name\":\"precompute_8_1\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":42,\"Plans\":[{\"Tables\":[\"TestTable\\/Index\\/indexImplTable\"],\"PlanNodeId\":41,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Path\":\"\\/Root\\/TestTable\\/Index\\/indexImplTable\",\"Name\":\"Upsert\",\"Table\":\"TestTable\\/Index\\/indexImplTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_8_0\",\"Name\":\"Iterator\"}],\"Node Type\":\"Upsert-ConstantExpr\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"NodeId\":1,\"FinishTimeMs\":1743438672256,\"TaskId\":2,\"Host\":\"ghrun-tuhoqst4xe\",\"ComputeTimeUs\":70}],\"CpuTimeUs\":452}],\"UseLlvm\":\"undefined\",\"Tasks\":1,\"FinishedTasks\":0,\"PhysicalStageId\":2,\"StageDurationUs\":0,\"Table\":[{\"Path\":\"\\/Root\\/TestTable\\/Index\\/indexImplTable\"}],\"BaseTimeMs\":1743438672255,\"NodesScanShards\":[],\"CpuTimeUs\":{\"Count\":1,\"Sum\":452,\"Max\":452,\"Min\":452}},\"CTE Name\":\"precompute_8_0\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":39,\"Subplan Name\":\"CTE precompute_8_0\",\"Plans\":[{\"PlanNodeId\":38,\"Plans\":[{\"PlanNodeId\":37,\"Plans\":[{\"PlanNodeId\":36,\"Operators\":[{\"Inputs\":[{\"Other\":\"ConstantExpression\"},{\"Other\":\"ConstantExpression\"},{\"Other\":\"ConstantExpression\"},{\"Other\":\"ConstantEx ... 147 \'\"fk1\")) \'(\'\"fk3\" (Member $147 \'\"fk3\"))))) \'0 $138))\n (let $140 (OrderedFilter $137 (lambda \'($148) (And (Exists (Member $148 \'\"fk1\")) (Exists (Member $148 \'\"fk2\")) (Exists (Member $148 \'\"fk3\"))))))\n (let $141 (lambda \'($150) (Void)))\n (let $142 (ToDict $140 (lambda \'($149) (AsStruct \'(\'\"fk1\" (Member $149 \'\"fk1\")) \'(\'\"fk2\" (Member $149 \'\"fk2\")) \'(\'\"fk3\" (Member $149 \'\"fk3\")))) $141 $35))\n (let $143 (Variant (DictKeys $142) \'1 $138))\n (let $144 (Variant (== (Length $140) (Length $142)) \'2 $138))\n (let $145 (ToDict $137 (lambda \'($151) (AsStruct \'(\'\"Key\" (Member $151 \'\"Key\")))) $141 $35))\n (let $146 (Variant $145 \'\"3\" $138))\n (return (Iterator (AsList $139 $143 $144 $146)))\n))) \'(\'(\'\"_logical_id\" \'4308) \'(\'\"_id\" \'\"e7e02225-dd9249ce-798ce7da-c91393c2\"))))\n(let $59 (DqCnValue (TDqOutput $58 \'\"3\")))\n(let $60 (DqCnValue (TDqOutput $58 \'0)))\n(let $61 (DqCnValue (TDqOutput $58 \'2)))\n(let $62 (DqCnValue (TDqOutput $58 \'1)))\n(let $63 \'($59 $60 $61 $62))\n(let $64 (KqpTxResultBinding $54 \'\"3\" \'0))\n(let $65 (KqpPhysicalTx \'($58) $63 \'(\'($53 $64)) $3))\n(let $66 \'\"%kqp%tx_result_binding_4_3\")\n(let $67 (DqPhyStage \'() (lambda \'() (Iterator %kqp%tx_result_binding_4_3)) \'(\'(\'\"_logical_id\" \'4703) \'(\'\"_id\" \'\"e747f59-ea6e4956-7981f7f6-df4652ea\"))))\n(let $68 (KqpTable \'\"/Root/TestTable/Index/indexImplTable\" \'\"72057594046644480:18\" \'\"\" \'1))\n(let $69 (KqpCnStreamLookup (TDqOutput $67 \'0) $68 \'(\'\"Key\") $55 $24))\n(let $70 \'\"%kqp%tx_result_binding_4_0\")\n(let $71 (Bool \'false))\n(let $72 (DqPhyStage \'($69) (lambda \'($152) (Map (Filter (Take $152 (Uint64 \'1)) (lambda \'($153) (Not (Contains %kqp%tx_result_binding_4_0 $153)))) (lambda \'($154) $71))) \'(\'(\'\"_logical_id\" \'4691) \'(\'\"_id\" \'\"86f7f94c-1bcf34ab-682d4b53-bad1f607\"))))\n(let $73 (DqCnUnionAll (TDqOutput $72 \'0)))\n(let $74 (Bool \'true))\n(let $75 (DqPhyStage \'($73) (lambda \'($155) (block \'(\n (let $156 (lambda \'($157 $158) $71))\n (return (FromFlow (Condense (ToFlow $155) $74 $156 $156)))\n))) \'(\'(\'\"_logical_id\" \'4671) \'(\'\"_id\" \'\"78644a6a-69cb6ab4-555d9060-247d5a76\"))))\n(let $76 \'($67 $72 $75))\n(let $77 (DqCnValue (TDqOutput $75 \'0)))\n(let $78 (KqpTxResultBinding $57 \'\"4\" \'0))\n(let $79 (KqpTxResultBinding $55 \'\"4\" \'\"3\"))\n(let $80 (KqpPhysicalTx $76 \'($77) \'(\'($70 $78) \'($66 $79)) $41))\n(let $81 \'\"%kqp%tx_result_binding_4_2\")\n(let $82 \'\"%kqp%tx_result_binding_5_0\")\n(let $83 \'\"%kqp%tx_result_binding_4_1\")\n(let $84 (DqPhyStage \'() (lambda \'() (block \'(\n (let $159 (KqpEnsure $74 %kqp%tx_result_binding_4_2 \'\"2012\" (Utf8 \'\"Duplicated keys found.\")))\n (let $160 (KqpEnsure $74 %kqp%tx_result_binding_5_0 \'\"2012\" (Utf8 \'\"Conflict with existing key.\")))\n (let $161 (If (And $159 $160) %kqp%tx_result_binding_4_1 (List $11)))\n (return (ToStream (Just (PartitionByKey $161 (lambda \'($162) (Member $162 \'\"Key\")) (Void) (Void) (lambda \'($163) (FlatMap $163 (lambda \'($164) (Last (ForwardList (Nth $164 \'1))))))))))\n))) \'(\'(\'\"_logical_id\" \'5033) \'(\'\"_id\" \'\"ffc2131c-23035a06-6103d57b-2ffb4683\"))))\n(let $85 (DqCnValue (TDqOutput $84 \'0)))\n(let $86 (KqpTxResultBinding $11 \'\"4\" \'1))\n(let $87 (KqpTxResultBinding $56 \'\"4\" \'2))\n(let $88 (KqpTxResultBinding $56 \'\"5\" \'0))\n(let $89 \'(\'($83 $86) \'($81 $87) \'($82 $88)))\n(let $90 (KqpPhysicalTx \'($84) \'($85) $89 $3))\n(let $91 \'\"%kqp%tx_result_binding_6_0\")\n(let $92 %kqp%tx_result_binding_6_0)\n(let $93 (DqPhyStage \'() (lambda \'() (Iterator (AsList (ToDict (FlatMap (Map $92 (lambda \'($165) (AsStruct \'(\'\"Key\" (Member $165 \'\"Key\")) \'(\'\"fk1\" (Member $165 \'\"fk1\")) \'(\'\"fk3\" (Member $165 \'\"fk3\"))))) (lambda \'($166) (block \'(\n (let $167 (AsStruct \'(\'\"Key\" (Member $166 \'\"Key\"))))\n (return (IfPresent (Lookup $46 $167) (lambda \'($168) (Just \'($167 $168 (Or (AggrNotEquals (Member $166 \'\"fk1\") (Member $168 \'\"fk1\")) (AggrNotEquals (Member $166 \'\"fk3\") (Member $168 \'\"fk3\")))))) (Nothing (OptionalType (TupleType $19 $44 $56)))))\n)))) (lambda \'($169) (Nth $169 \'0)) (lambda \'($170) \'((Nth $170 \'1) (Nth $170 \'2))) $35)))) \'(\'(\'\"_logical_id\" \'5184) \'(\'\"_id\" \'\"8f2c827c-ce4a1026-9725d04a-35ac8f4\"))))\n(let $94 (DqCnValue (TDqOutput $93 \'0)))\n(let $95 (KqpTxResultBinding $11 \'\"6\" \'0))\n(let $96 \'($91 $95))\n(let $97 (KqpPhysicalTx \'($93) \'($94) \'($51 $96) $3))\n(let $98 (DataSink \'\"KqpTableSink\" \'\"db\"))\n(let $99 (KqpTableSinkSettings $22 \'false \'\"upsert\" \'0 \'\"oltp\" \'false \'false \'()))\n(let $100 (DqPhyStage \'() (lambda \'() (Iterator $92)) \'(\'(\'\"_logical_id\" \'5732) \'(\'\"_id\" \'\"6654a4e9-1043422-bcdc5307-138ca1fa\")) \'((DqSink \'0 $98 $99))))\n(let $101 \'\"%kqp%tx_result_binding_7_0\")\n(let $102 (DictType $19 (TupleType $44 $56)))\n(let $103 %kqp%tx_result_binding_7_0)\n(let $104 \'(\'(\'\"_logical_id\" \'5760) \'(\'\"_id\" \'\"9b076787-ed1fdbca-d5568e2e-a4b89a93\") $30))\n(let $105 (DqPhyStage \'() (lambda \'() (block \'(\n (let $171 (lambda \'($173) (block \'(\n (let $174 (Nth $173 \'1))\n (let $175 (Nth $174 \'0))\n (return (Member (Nth $173 \'0) \'\"Key\") (Member $175 \'\"fk1\") (Member $175 \'\"fk2\") (Member $175 \'\"fk3\") (Nth $174 \'1))\n ))))\n (let $172 (lambda \'($181 $182 $183 $184 $185) $181 $182 $183 $184))\n (return (FromFlow (WideMap (WideFilter (ExpandMap (ToFlow (DictItems $103)) $171) (lambda \'($176 $177 $178 $179 $180) $180)) $172)))\n))) $104))\n(let $106 (DqCnUnionAll (TDqOutput $105 \'0)))\n(let $107 (lambda \'($186) (FromFlow (NarrowMap (ToFlow $186) $34))))\n(let $108 (KqpTableSinkSettings $68 \'false \'\"delete\" \'1 \'\"oltp\" \'false \'false \'()))\n(let $109 (DqPhyStage \'($106) $107 \'(\'(\'\"_logical_id\" \'5746) \'(\'\"_id\" \'\"c96debbf-61f7c28d-e875352d-428b41a2\")) \'((DqSink \'0 $98 $108))))\n(let $110 \'(\'(\'\"_logical_id\" \'5812) \'(\'\"_id\" \'\"2e5f7920-da576136-d6f03fdb-9b489580\") $30))\n(let $111 (DqPhyStage \'() (lambda \'() (FromFlow (ExpandMap (FlatMap (Map (ToFlow $92) (lambda \'($187) (AsStruct \'(\'\"Key\" (Member $187 \'\"Key\")) \'(\'\"fk1\" (Member $187 \'\"fk1\")) \'(\'\"fk3\" (Member $187 \'\"fk3\"))))) (lambda \'($188) (block \'(\n (let $189 \'(\'\"Key\" (Member $188 \'\"Key\")))\n (let $190 \'(\'\"fk1\" (Member $188 \'\"fk1\")))\n (let $191 \'(\'\"fk3\" (Member $188 \'\"fk3\")))\n (return (IfPresent (Lookup $103 (AsStruct $189)) (lambda \'($192) (If (Nth $192 \'1) (Just (AsStruct $189 $190 \'(\'\"fk2\" (Member (Nth $192 \'0) \'\"fk2\")) $191)) (Nothing (OptionalType $29)))) (Just (AsStruct $189 $190 $47 $191))))\n)))) $26))) $110))\n(let $112 (DqCnUnionAll (TDqOutput $111 \'0)))\n(let $113 (KqpTableSinkSettings $68 \'false \'\"\" \'2 \'\"oltp\" \'false \'false \'()))\n(let $114 (DqPhyStage \'($112) $107 \'(\'(\'\"_logical_id\" \'5774) \'(\'\"_id\" \'\"2cef8e31-dc4d46e3-be38ac30-36d200fb\")) \'((DqSink \'0 $98 $113))))\n(let $115 \'($100 $105 $109 $111 $114))\n(let $116 (KqpTxResultBinding $102 \'\"7\" \'0))\n(let $117 (KqpPhysicalTx $115 \'() \'($96 \'($101 $116)) \'($40 \'(\'\"with_effects\"))))\n(let $118 \'($4 $17 $42 $52 $65 $80 $90 $97 $117))\n(return (KqpPhysicalQuery $118 \'() \'(\'(\'\"type\" \'\"data_query\"))))\n)\n" total_duration_us: 1130916 total_cpu_time_us: 1109645 query_meta: "{\"query_database\":\"/Root\",\"query_parameter_types\":{},\"table_metadata\":[\"{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/TestTable\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":16},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Key\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Value\\\",\\\"Id\\\":5,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"fk1\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"fk2\\\",\\\"Id\\\":3,\\\"Type\\\":\\\"Int32\\\",\\\"TypeId\\\":1,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"fk3\\\",\\\"Id\\\":4,\\\"Type\\\":\\\"Uint64\\\",\\\"TypeId\\\":4,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"Key\\\"],\\\"Indexes\\\":[{\\\"Name\\\":\\\"Index\\\",\\\"Type\\\":2,\\\"State\\\":1,\\\"SchemaVersion\\\":1,\\\"LocalPathId\\\":17,\\\"PathOwnerId\\\":8716544,\\\"KeyColumns\\\":[\\\"fk1\\\",\\\"fk2\\\",\\\"fk3\\\"]}],\\\"SecondaryGlobalIndexMetadata\\\":[{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/TestTable/Index/indexImplTable\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":18},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Key\\\",\\\"Id\\\":4,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"fk1\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"fk2\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"Int32\\\",\\\"TypeId\\\":1,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"fk3\\\",\\\"Id\\\":3,\\\"Type\\\":\\\"Uint64\\\",\\\"TypeId\\\":4,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"fk1\\\",\\\"fk2\\\",\\\"fk3\\\",\\\"Key\\\"],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}\"],\"table_meta_serialization_type\":2,\"created_at\":\"1743438687\",\"query_type\":\"QUERY_TYPE_SQL_DML\",\"query_syntax\":\"1\",\"query_cluster\":\"db\",\"query_id\":\"48179c0a-80e411a9-df40a6bc-3e9866bc\",\"version\":\"1.0\"}" 2025-03-31T16:31:27.125022Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:28.245302Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::DataErasureManualLaunch3Cycles [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:31:23.696647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:31:23.696780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:31:23.696844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:31:23.696895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:31:23.696958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:31:23.697013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:31:23.697117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:31:23.697255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:31:23.697824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:31:23.820396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:31:23.820481Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:23.827398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:31:23.827517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:31:23.827688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:31:23.833987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:31:23.834145Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:31:23.834631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:31:23.834833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:31:23.836749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:31:23.837987Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:31:23.838056Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:31:23.838222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:31:23.838273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:31:23.838311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:31:23.838509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:31:23.852704Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:31:23.988306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:31:23.988552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:31:23.988779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:31:23.989039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:31:23.989104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:31:23.994726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:31:23.994900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:31:23.995125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:31:23.995184Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:31:23.995242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:31:23.995277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:31:23.997829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:31:23.997906Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:31:23.997948Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:31:23.999951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:31:24.000008Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:31:24.000060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:31:24.000121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:31:24.004132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:31:24.006691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:31:24.006921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:31:24.008131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:31:24.008296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:31:24.008371Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:31:24.008706Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:31:24.008771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:31:24.008977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:31:24.009079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:31:24.011878Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:31:24.011937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:31:24.012134Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:31:24.012179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:31:24.012418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:31:24.012482Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:31:24.012588Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:31:24.012625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:31:24.012663Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:31:24.012699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:31:24.012734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:31:24.012775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:31:24.012823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:31:24.012855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:31:24.012932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:31:24.012976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:31:24.013007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:31:24.016313Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:31:24.016491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:31:24.016533Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ing# 1 shards at schemeshard 72075186233409551 2025-03-31T16:31:28.249018Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553241, Sender [1:1010:2857], Recipient [1:830:2714]: NKikimrTxDataShard.TEvForceDataCleanupResult DataCleanupGeneration: 3 TabletId: 72075186233409555 Status: OK 2025-03-31T16:31:28.249057Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvForceDataCleanupResult 2025-03-31T16:31:28.249099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Execute at schemestard: 72075186233409551 2025-03-31T16:31:28.249149Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] [Finished] Data erasure is completed for pathId# [OwnerId: 72075186233409551, LocalPathId: 2], datashard# 72075186233409555, shardIdx# 72075186233409551:5 in# 62 ms, next wakeup in# 10.813000s, rate# 1, in queue# 0 shards, running# 0 shards at schemeshard 72075186233409551 2025-03-31T16:31:28.249190Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] Data erasure in shards is completed. Send response to root schemeshard 2025-03-31T16:31:28.249219Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] Complete: Generation# 3 2025-03-31T16:31:28.252243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Complete at schemestard: 72075186233409551, NeedResponseComplete# false 2025-03-31T16:31:28.252674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Complete at schemestard: 72075186233409551, NeedResponseComplete# true 2025-03-31T16:31:28.252896Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125514, Sender [1:830:2714], Recipient [1:290:2274]: NKikimrScheme.TEvTenantDataErasureResponse PathId { OwnerId: 72057594046678944 LocalId: 3 } Generation: 3 Status: COMPLETED 2025-03-31T16:31:28.252940Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvTenantDataErasureResponse 2025-03-31T16:31:28.253002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Execute at schemeshard: 72057594046678944 2025-03-31T16:31:28.253080Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] [Finished] Data erasure completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 3] in# 64 ms, next wakeup# 595.811000s, rate# 0, in queue# 0 tenants, running# 0 tenants at schemeshard 72057594046678944 2025-03-31T16:31:28.253155Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Data erasure in tenants is completed. Send request to BS controller 2025-03-31T16:31:28.254721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2025-03-31T16:31:28.254777Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 3 2025-03-31T16:31:28.255034Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:293:2276], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 0 2025-03-31T16:31:28.255100Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-03-31T16:31:28.255141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-03-31T16:31:28.255212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-03-31T16:31:28.255250Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Progress data shred in BSC 0 2025-03-31T16:31:28.255312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-03-31T16:31:28.255386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-03-31T16:31:28.717161Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:31:28.717247Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:31:28.717318Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:290:2274], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:31:28.717340Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:31:28.727748Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:453:2406]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:31:28.727815Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:31:28.727859Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:830:2714]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:31:28.727874Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:31:28.727916Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:830:2714], Recipient [1:830:2714]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:31:28.727933Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:31:28.727982Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:453:2406], Recipient [1:453:2406]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:31:28.727996Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:31:28.769377Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-03-31T16:31:28.769451Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-03-31T16:31:28.769494Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 3 2025-03-31T16:31:28.769785Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:293:2276], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 5000 2025-03-31T16:31:28.769820Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-03-31T16:31:28.769847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-03-31T16:31:28.769913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-03-31T16:31:28.769943Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Progress data shred in BSC 5000 2025-03-31T16:31:28.770002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-03-31T16:31:28.770042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-03-31T16:31:29.126070Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:31:29.126140Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:31:29.126224Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:290:2274], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:31:29.126254Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:31:29.136678Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:453:2406]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:31:29.136747Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:31:29.136814Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:830:2714]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:31:29.136839Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:31:29.136900Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:453:2406], Recipient [1:453:2406]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:31:29.136928Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:31:29.136994Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:830:2714], Recipient [1:830:2714]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:31:29.137016Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:31:29.178309Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-03-31T16:31:29.178385Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-03-31T16:31:29.178431Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 3 2025-03-31T16:31:29.178694Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:293:2276], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: true Progress10k: 10000 2025-03-31T16:31:29.178731Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-03-31T16:31:29.178759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-03-31T16:31:29.178827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-03-31T16:31:29.178856Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Data shred in BSC is completed 2025-03-31T16:31:29.178903Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Complete: Generation# 3, duration# 2 s 2025-03-31T16:31:29.181277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-03-31T16:31:29.181923Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:3550:4909], Recipient [1:290:2274]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:31:29.181982Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:31:29.182023Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-03-31T16:31:29.182179Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125519, Sender [1:2765:4280], Recipient [1:290:2274]: NKikimrScheme.TEvDataErasureInfoRequest 2025-03-31T16:31:29.182208Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDataErasureInfoRequest 2025-03-31T16:31:29.182243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvDataErasureInfoRequest, at schemeshard: 72057594046678944 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-55 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-56 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-37 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-38 >> KqpIndexes::UniqIndexComplexPkComplexFkOverlap [GOOD] >> KqpIndexes::UniqAndNoUniqSecondaryIndexWithCover ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::DataErasureRun3Cycles [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:31:22.448301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:31:22.448493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:31:22.448540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:31:22.448576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:31:22.449305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:31:22.449371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:31:22.449448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:31:22.449567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:31:22.451029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:31:22.551295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:31:22.551388Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:22.558707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:31:22.558828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:31:22.558983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:31:22.566573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:31:22.566923Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:31:22.569919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:31:22.570225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:31:22.575410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:31:22.583344Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:31:22.583421Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:31:22.584330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:31:22.584377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:31:22.584439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:31:22.584603Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:31:22.594748Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:31:22.724080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:31:22.726231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:31:22.728526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:31:22.730193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:31:22.730303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:31:22.735492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:31:22.735682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:31:22.735914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:31:22.736091Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:31:22.736138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:31:22.736176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:31:22.738785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:31:22.738873Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:31:22.738921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:31:22.741410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:31:22.741473Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:31:22.741532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:31:22.741585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:31:22.746664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:31:22.751349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:31:22.752620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:31:22.754076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:31:22.754274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:31:22.754359Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:31:22.757667Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:31:22.757772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:31:22.758037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:31:22.758159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:31:22.761350Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:31:22.761400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:31:22.761603Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:31:22.761669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:31:22.761954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:31:22.762013Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:31:22.762117Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:31:22.762157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:31:22.762197Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:31:22.762243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:31:22.762286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:31:22.762331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:31:22.762388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:31:22.762422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:31:22.762496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:31:22.762545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:31:22.762579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:31:22.764879Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:31:22.765038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:31:22.765081Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... nt# 269553241, Sender [1:1005:2854], Recipient [1:830:2714]: NKikimrTxDataShard.TEvForceDataCleanupResult DataCleanupGeneration: 3 TabletId: 72075186233409554 Status: OK 2025-03-31T16:31:28.579334Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvForceDataCleanupResult 2025-03-31T16:31:28.579368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Execute at schemestard: 72075186233409551 2025-03-31T16:31:28.579411Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] [Finished] Data erasure is completed for pathId# [OwnerId: 72075186233409551, LocalPathId: 2], datashard# 72075186233409554, shardIdx# 72075186233409551:4 in# 66 ms, next wakeup in# 8.934000s, rate# 1, in queue# 0 shards, running# 0 shards at schemeshard 72075186233409551 2025-03-31T16:31:28.579453Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] Data erasure in shards is completed. Send response to root schemeshard 2025-03-31T16:31:28.579477Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] Complete: Generation# 3 2025-03-31T16:31:28.581825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Complete at schemestard: 72075186233409551, NeedResponseComplete# false 2025-03-31T16:31:28.582094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Complete at schemestard: 72075186233409551, NeedResponseComplete# true 2025-03-31T16:31:28.582272Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125514, Sender [1:830:2714], Recipient [1:290:2274]: NKikimrScheme.TEvTenantDataErasureResponse PathId { OwnerId: 72057594046678944 LocalId: 3 } Generation: 3 Status: COMPLETED 2025-03-31T16:31:28.582309Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvTenantDataErasureResponse 2025-03-31T16:31:28.582364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Execute at schemeshard: 72057594046678944 2025-03-31T16:31:28.582417Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] [Finished] Data erasure completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 3] in# 68 ms, next wakeup# 593.932000s, rate# 0, in queue# 0 tenants, running# 0 tenants at schemeshard 72057594046678944 2025-03-31T16:31:28.582477Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Data erasure in tenants is completed. Send request to BS controller 2025-03-31T16:31:28.583912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2025-03-31T16:31:28.583956Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 3 2025-03-31T16:31:28.584141Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:293:2276], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 0 2025-03-31T16:31:28.584176Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-03-31T16:31:28.584202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-03-31T16:31:28.584243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-03-31T16:31:28.584268Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Progress data shred in BSC 0 2025-03-31T16:31:28.584314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-03-31T16:31:28.584354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-03-31T16:31:29.064694Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:830:2714]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:31:29.064761Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:31:29.064856Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:31:29.064876Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:31:29.064925Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:453:2406]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:31:29.064944Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:31:29.064987Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:453:2406], Recipient [1:453:2406]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:31:29.065007Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:31:29.065061Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:830:2714], Recipient [1:830:2714]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:31:29.065078Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:31:29.065126Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:290:2274], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:31:29.065143Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:31:29.096136Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-03-31T16:31:29.096219Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-03-31T16:31:29.096258Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 3 2025-03-31T16:31:29.096536Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:293:2276], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 5000 2025-03-31T16:31:29.096577Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-03-31T16:31:29.096609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-03-31T16:31:29.096701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-03-31T16:31:29.096736Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Progress data shred in BSC 5000 2025-03-31T16:31:29.096803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-03-31T16:31:29.096853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-03-31T16:31:29.510353Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:830:2714]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:31:29.510476Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:31:29.510568Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:31:29.510600Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:31:29.510661Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:453:2406]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:31:29.510688Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:31:29.510748Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:290:2274], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:31:29.510794Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:31:29.510873Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:453:2406], Recipient [1:453:2406]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:31:29.510900Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:31:29.510963Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:830:2714], Recipient [1:830:2714]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:31:29.510990Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:31:29.542053Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-03-31T16:31:29.542137Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-03-31T16:31:29.542177Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 3 2025-03-31T16:31:29.542443Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:293:2276], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: true Progress10k: 10000 2025-03-31T16:31:29.542478Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-03-31T16:31:29.542510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-03-31T16:31:29.542584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-03-31T16:31:29.542617Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Data shred in BSC is completed 2025-03-31T16:31:29.542664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleDataErasureWakeup: Interval# 0.931000s 2025-03-31T16:31:29.542699Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Complete: Generation# 3, duration# 2 s 2025-03-31T16:31:29.552275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-03-31T16:31:29.553005Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:3593:4952], Recipient [1:290:2274]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:31:29.553068Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:31:29.553097Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-03-31T16:31:29.553237Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125519, Sender [1:272:2263], Recipient [1:290:2274]: NKikimrScheme.TEvDataErasureInfoRequest 2025-03-31T16:31:29.553273Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDataErasureInfoRequest 2025-03-31T16:31:29.553305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvDataErasureInfoRequest, at schemeshard: 72057594046678944 >> TPersQueueTest::TClusterTrackerTest [GOOD] >> TPersQueueTest::SrcIdCompatibility ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::DataErasureManualLaunch [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:31:26.871727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:31:26.871844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:31:26.871891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:31:26.871928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:31:26.871974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:31:26.872027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:31:26.872097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:31:26.872206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:31:26.872619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:31:26.953403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:31:26.953499Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:26.961333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:31:26.961454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:31:26.961641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:31:26.967181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:31:26.967397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:31:26.968103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:31:26.968356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:31:26.970509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:31:26.971641Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:31:26.971705Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:31:26.971840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:31:26.971878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:31:26.971909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:31:26.972064Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:31:26.979530Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:31:27.082119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:31:27.082352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:31:27.082566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:31:27.082830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:31:27.082898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:31:27.085057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:31:27.085180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:31:27.085335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:31:27.085385Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:31:27.085436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:31:27.085463Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:31:27.087262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:31:27.087322Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:31:27.087352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:31:27.088879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:31:27.088937Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:31:27.088976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:31:27.089011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:31:27.091762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:31:27.093501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:31:27.093677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:31:27.094481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:31:27.094607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:31:27.094652Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:31:27.094881Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:31:27.094923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:31:27.095064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:31:27.095178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:31:27.097204Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:31:27.097251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:31:27.097447Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:31:27.097492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:31:27.097723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:31:27.097761Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:31:27.097839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:31:27.097866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:31:27.097898Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:31:27.097924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:31:27.097954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:31:27.097996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:31:27.098051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:31:27.098086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:31:27.098166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:31:27.098204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:31:27.098242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:31:27.100003Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:31:27.100132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:31:27.100190Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 16:31:28.493278Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvTenantDataErasureResponse 2025-03-31T16:31:28.493359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Execute at schemeshard: 72057594046678944 2025-03-31T16:31:28.493491Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] [Finished] Data erasure completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 3] in# 60 ms, next wakeup# 599.940000s, rate# 0, in queue# 0 tenants, running# 1 tenants at schemeshard 72057594046678944 2025-03-31T16:31:28.495442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# false 2025-03-31T16:31:28.509251Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553241, Sender [1:637:2554], Recipient [1:453:2406]: NKikimrTxDataShard.TEvForceDataCleanupResult DataCleanupGeneration: 1 TabletId: 72075186233409549 Status: OK 2025-03-31T16:31:28.509309Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvForceDataCleanupResult 2025-03-31T16:31:28.509358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Execute at schemestard: 72075186233409546 2025-03-31T16:31:28.509417Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] [Finished] Data erasure is completed for pathId# [OwnerId: 72075186233409546, LocalPathId: 2], datashard# 72075186233409549, shardIdx# 72075186233409546:4 in# 62 ms, next wakeup in# 14.938000s, rate# 1, in queue# 0 shards, running# 1 shards at schemeshard 72075186233409546 2025-03-31T16:31:28.511438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Complete at schemestard: 72075186233409546, NeedResponseComplete# false 2025-03-31T16:31:28.533880Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553241, Sender [1:639:2555], Recipient [1:453:2406]: NKikimrTxDataShard.TEvForceDataCleanupResult DataCleanupGeneration: 1 TabletId: 72075186233409550 Status: OK 2025-03-31T16:31:28.533943Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvForceDataCleanupResult 2025-03-31T16:31:28.533990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Execute at schemestard: 72075186233409546 2025-03-31T16:31:28.534049Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] [Finished] Data erasure is completed for pathId# [OwnerId: 72075186233409546, LocalPathId: 2], datashard# 72075186233409550, shardIdx# 72075186233409546:5 in# 64 ms, next wakeup in# 14.936000s, rate# 1, in queue# 0 shards, running# 0 shards at schemeshard 72075186233409546 2025-03-31T16:31:28.534104Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] Data erasure in shards is completed. Send response to root schemeshard 2025-03-31T16:31:28.534124Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] Complete: Generation# 1 2025-03-31T16:31:28.535962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Complete at schemestard: 72075186233409546, NeedResponseComplete# true 2025-03-31T16:31:28.536242Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:1906:3577], Recipient [1:290:2274]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:31:28.536274Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:31:28.536293Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-03-31T16:31:28.536419Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125514, Sender [1:453:2406], Recipient [1:290:2274]: NKikimrScheme.TEvTenantDataErasureResponse PathId { OwnerId: 72057594046678944 LocalId: 2 } Generation: 1 Status: COMPLETED 2025-03-31T16:31:28.536447Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvTenantDataErasureResponse 2025-03-31T16:31:28.536508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Execute at schemeshard: 72057594046678944 2025-03-31T16:31:28.536564Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] [Finished] Data erasure completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 2] in# 65 ms, next wakeup# 599.935000s, rate# 0, in queue# 0 tenants, running# 0 tenants at schemeshard 72057594046678944 2025-03-31T16:31:28.536623Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Data erasure in tenants is completed. Send request to BS controller 2025-03-31T16:31:28.536825Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:1905:3576], Recipient [1:453:2406]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046678944 Status: OK ServerId: [1:1906:3577] Leader: 1 Dead: 0 Generation: 3 VersionInfo: } 2025-03-31T16:31:28.536860Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-03-31T16:31:28.536883Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046678944, status: OK, at schemeshard: 72075186233409546 2025-03-31T16:31:28.538349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2025-03-31T16:31:28.538413Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-03-31T16:31:28.538774Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:1910:3581], Recipient [1:290:2274]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037932033 Status: OK ServerId: [1:1911:3582] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-03-31T16:31:28.538807Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-03-31T16:31:28.538829Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594037932033, status: OK, at schemeshard: 72057594046678944 2025-03-31T16:31:28.538967Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:293:2276], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 0 2025-03-31T16:31:28.538995Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-03-31T16:31:28.539034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-03-31T16:31:28.539137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-03-31T16:31:28.539187Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Progress data shred in BSC 0 2025-03-31T16:31:28.539239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-03-31T16:31:28.539296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-03-31T16:31:29.418484Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-03-31T16:31:29.418564Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-03-31T16:31:29.418618Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-03-31T16:31:29.418844Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:290:2274], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:31:29.418885Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:31:29.419049Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:293:2276], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 5000 2025-03-31T16:31:29.419099Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-03-31T16:31:29.419128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-03-31T16:31:29.419219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-03-31T16:31:29.419255Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Progress data shred in BSC 5000 2025-03-31T16:31:29.419315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-03-31T16:31:29.419357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-03-31T16:31:29.805310Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:31:29.805410Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:31:29.805586Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-03-31T16:31:29.805644Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-03-31T16:31:29.805672Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-03-31T16:31:29.805813Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:290:2274], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:31:29.805835Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:31:29.805971Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:293:2276], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2025-03-31T16:31:29.805997Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-03-31T16:31:29.806026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-03-31T16:31:29.806090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-03-31T16:31:29.806119Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Data shred in BSC is completed 2025-03-31T16:31:29.806162Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Complete: Generation# 1, duration# 2 s 2025-03-31T16:31:29.809420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-03-31T16:31:29.810139Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:1992:3663], Recipient [1:290:2274]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:31:29.810203Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:31:29.810237Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-03-31T16:31:29.810329Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125519, Sender [1:272:2263], Recipient [1:290:2274]: NKikimrScheme.TEvDataErasureInfoRequest 2025-03-31T16:31:29.810359Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDataErasureInfoRequest 2025-03-31T16:31:29.810417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvDataErasureInfoRequest, at schemeshard: 72057594046678944 >> KqpIndexes::DuplicateUpsertInterleaveParams-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexUsingInJoin+UseStreamJoin [GOOD] Test command err: Trying to start YDB, gRPC: 9466, MsgBus: 23841 2025-03-31T16:31:06.202934Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012054435616110:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:06.203056Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d82/r3tmp/tmpPnZ9wG/pdisk_1.dat 2025-03-31T16:31:06.604480Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:06.657387Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:06.657528Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 9466, node 1 2025-03-31T16:31:06.659427Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:06.727933Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:06.728005Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:06.728026Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:06.728192Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23841 TClient is connected to server localhost:23841 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:07.286038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:07.299734Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:31:07.309547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:07.515462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:07.707379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:07.807891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:09.669171Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012067320519789:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:09.669323Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:10.013724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:10.046535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:10.074377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:10.102656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:10.134054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:10.174089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:10.225746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012071615487599:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:10.225846Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:10.226075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012071615487604:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:10.229577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:10.242703Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012071615487606:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:31:10.319069Z node 1 :TX_PROXY ERROR: Actor# [1:7488012071615487660:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:11.203680Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012054435616110:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:11.203734Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:11.269915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:31:12.458079Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488012080205422964:2552], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/TestTable/Index/indexImplTable]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:31:12.459577Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGM1Y2NjMjMtNmY1MjAwYzAtZGMzZmM3MGUtNzYzY2EyYzk=, ActorId: [1:7488012075910455251:2497], ActorState: ExecuteState, TraceId: 01jqpgnwgy34fembykz5y40q20, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:31:12.508269Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488012080205422970:2555], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:17: Error: At function: KiReadTable!
:3:17: Error: Required global index not found, index name: WrongView, code: 2003 2025-03-31T16:31:12.509687Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGM1Y2NjMjMtNmY1MjAwYzAtZGMzZmM3MGUtNzYzY2EyYzk=, ActorId: [1:7488012075910455251:2497], ActorState: ExecuteState, TraceId: 01jqpgnwjrf4sa6wcr5q96exh4, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:31:12.980030Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:13.673571Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:13.974443Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:14.003218Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 63661, MsgBus: 20248 2025-03-31T16:31:14.651475Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012090856445472:2130];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:14.654208Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d82/r3tmp/tmp6EhCEs/pdisk_1.dat 2025-03-31T16:31:14.820103Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:14.826494Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:14.826643Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:14.829957Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63661, node 2 2025-03-31T16:31:14.876674Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:14.876700Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:14.876706Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:14.876835Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20248 TClient is connected to server localhost:20248 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 ... roposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:18.156196Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:18.191313Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:18.259693Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:18.344393Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012108036316873:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:18.344486Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:18.344571Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012108036316878:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:18.348707Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:18.361386Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012108036316880:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:31:18.428436Z node 2 :TX_PROXY ERROR: Actor# [2:7488012108036316935:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:19.425218Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:31:19.649309Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488012090856445472:2130];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:19.649407Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:20.630173Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:20.981749Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 9637, MsgBus: 7120 2025-03-31T16:31:21.779013Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488012119101443554:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:21.779141Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d82/r3tmp/tmpo7sF3X/pdisk_1.dat 2025-03-31T16:31:21.892191Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:21.925037Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:21.925143Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:21.926996Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9637, node 3 2025-03-31T16:31:21.995160Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:21.995184Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:21.995191Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:21.995350Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7120 TClient is connected to server localhost:7120 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:22.557240Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:22.564394Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:31:22.578390Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:22.670387Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:22.894797Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:22.980404Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:25.368545Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012136281314498:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:25.368619Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:25.416329Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:25.448425Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:25.480953Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:25.510974Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:25.542381Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:25.615237Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:25.667649Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012136281315013:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:25.667735Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012136281315018:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:25.667747Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:25.671867Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:25.682082Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488012136281315020:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:31:25.739241Z node 3 :TX_PROXY ERROR: Actor# [3:7488012136281315073:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:26.779681Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:31:26.780597Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488012119101443554:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:26.780661Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:26.864812Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 |94.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_data_erasure/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-43 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-44 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-55 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-56 |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |94.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/test-results/unittest/{meta.json ... results_accumulator.log} |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> KqpIndexes::DuplicateUpsertInterleaveParams+UseSink [GOOD] |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::DuplicateUpsertInterleaveParams-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 10165, MsgBus: 25555 2025-03-31T16:31:12.152372Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012082722027418:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:12.153367Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d3f/r3tmp/tmpVQBvSF/pdisk_1.dat 2025-03-31T16:31:12.668840Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:12.675117Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:12.675226Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:12.677630Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10165, node 1 2025-03-31T16:31:12.757471Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:12.757504Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:12.757519Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:12.757620Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25555 TClient is connected to server localhost:25555 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:13.282604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:13.314913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:13.458346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:31:13.644994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:13.732128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:31:15.477107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012095606931072:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:15.477251Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:15.797673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:15.831588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:15.861909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:15.928136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:15.956679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:16.028951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:16.072901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012099901898887:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:16.073000Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:16.073216Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012099901898892:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:16.076572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:16.105426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012099901898894:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:31:16.159632Z node 1 :TX_PROXY ERROR: Actor# [1:7488012099901898947:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:17.082682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:31:17.155326Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012082722027418:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:17.159296Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 19176, MsgBus: 64724 2025-03-31T16:31:18.449389Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012104843415134:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:18.449433Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d3f/r3tmp/tmp9TcDDL/pdisk_1.dat 2025-03-31T16:31:18.563182Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19176, node 2 2025-03-31T16:31:18.629961Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:18.630055Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:18.646052Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:18.663641Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:18.663668Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:18.663675Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:18.663782Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64724 TClient is connected to server localhost:64724 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:19.156677Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:19.168704Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:31:19.172222Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:19.251458Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:19.410786Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, o ... Table, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:21.822839Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:21.900584Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:21.954351Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:21.999432Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012117728319312:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:21.999503Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:21.999547Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012117728319317:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:22.002689Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:22.010927Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012117728319319:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:31:22.071044Z node 2 :TX_PROXY ERROR: Actor# [2:7488012122023286667:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:23.011259Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:31:23.060682Z node 2 :TX_PROXY ERROR: Actor# [2:7488012126318254322:3694] txid# 281474976715672, issues: { message: "Check failed: path: \'/Root/TestTable\', error: path is not a directory (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:23.078395Z node 2 :TX_PROXY ERROR: Actor# [2:7488012126318254336:3700] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/TestTable\', error: path is not a directory (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:23.093112Z node 2 :TX_PROXY ERROR: Actor# [2:7488012126318254343:3705] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/TestTable\', error: path is not a directory (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 18510, MsgBus: 27965 2025-03-31T16:31:23.987343Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488012126862537427:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:23.987436Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d3f/r3tmp/tmpijq54a/pdisk_1.dat 2025-03-31T16:31:24.084295Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18510, node 3 2025-03-31T16:31:24.132109Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:24.132242Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:24.134999Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:24.163629Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:24.163655Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:24.163664Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:24.163804Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27965 TClient is connected to server localhost:27965 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:24.621295Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:24.637459Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:24.715064Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:24.914743Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:25.001361Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:27.616412Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012144042408401:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:27.616503Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:27.643190Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:27.673961Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:27.705613Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:27.737640Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:27.765604Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:27.798721Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:27.874954Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012144042408917:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:27.875038Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012144042408922:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:27.875043Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:27.878559Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:27.886790Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488012144042408924:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:31:27.968821Z node 3 :TX_PROXY ERROR: Actor# [3:7488012144042408979:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:28.987450Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488012126862537427:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:28.987503Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:29.097386Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:31:30.236847Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-14 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-15 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::DuplicateUpsertInterleaveParams+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 2728, MsgBus: 61545 2025-03-31T16:31:11.877359Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012077788586457:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:11.877436Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d51/r3tmp/tmpmgRLne/pdisk_1.dat 2025-03-31T16:31:12.268763Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:12.276474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:12.276591Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:12.279174Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2728, node 1 2025-03-31T16:31:12.344209Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:12.344230Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:12.344250Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:12.344365Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61545 TClient is connected to server localhost:61545 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:12.920638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:12.947682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:13.103463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:13.266496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:13.345857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:15.008684Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012094968457405:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:15.008877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:15.340961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:15.366385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:15.389133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:15.421697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:15.457751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:15.498880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:15.554769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012094968457916:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:15.554863Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:15.555139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012094968457921:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:15.559012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:15.571511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012094968457923:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:31:15.647924Z node 1 :TX_PROXY ERROR: Actor# [1:7488012094968457976:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:16.644189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:31:16.883200Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012077788586457:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:16.883288Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; query_phases { duration_us: 938 cpu_time_us: 938 } query_phases { duration_us: 5934 table_access { name: "/Root/TestTable" partitions_count: 1 } cpu_time_us: 8164 affected_shards: 1 } query_phases { duration_us: 1902 cpu_time_us: 1902 } query_phases { duration_us: 3882 cpu_time_us: 5977 } query_phases { duration_us: 5446 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 31 } partitions_count: 1 } table_access { name: "/Root/TestTable/Index/indexImplTable" updates { rows: 1 bytes: 24 } partitions_count: 1 } cpu_time_us: 3552 affected_shards: 2 } compilation { duration_us: 635588 cpu_time_us: 624354 } process_cpu_time_us: 5677 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":27,\"Plans\":[{\"Tables\":[\"TestTable\"],\"PlanNodeId\":26,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Path\":\"\\/Root\\/TestTable\",\"Name\":\"Upsert\",\"Table\":\"TestTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_1_1\",\"Name\":\"Iterator\"}],\"Node Type\":\"Upsert-ConstantExpr\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"NodeId\":1,\"FinishTimeMs\":1743438677368,\"TaskId\":1,\"Host\":\"ghrun-tuhoqst4xe\",\"ComputeTimeUs\":75}],\"CpuTimeUs\":583}],\"UseLlvm\":\"undefined\",\"Tasks\":1,\"FinishedTasks\":0,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Table\":[{\"Path\":\"\\/Root\\/TestTable\"}],\"BaseTimeMs\":1743438677368,\"NodesScanShards\":[],\"CpuTimeUs\":{\"Count\":1,\"Sum\":583,\"Max\":583,\"Min\":583}},\"CTE Name\":\"precompute_1_1\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":25,\"Plans\":[{\"Tables\":[\"TestTable\\/Index\\/indexImplTable\"],\"PlanNodeId\":24,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Path\":\"\\/Root\\/TestTable\\/Index\\/indexImplTable\",\"Name\":\"Delete\",\"Table\":\"TestTable\\/Index\\/indexImplTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_3_1\",\"Name\":\"Iterator\"}],\"Node Type\":\"Delete-ConstantExpr\",\"Stats\":{\"StageDurationUs\":0,\"PhysicalStageId\":1,\"BaseTimeMs\":1743438677368,\"FinishedTasks\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"},\"CTE Name\":\"precompute_3_1\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":23,\"Plans\":[{\"Tables\":[\"TestTable\\/Index\\/indexImplTable\"],\"PlanNodeId\":22,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Path\":\"\\/Root\\/TestTable\\/Index\\/indexImplTable\",\"Name\":\"Upsert\",\"Table\":\"TestTable\\/Index\\/indexImplTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_3_0\",\"Name\":\"Iterator\"}],\"Node Type\":\"Upsert-ConstantExpr\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"NodeId\":1,\"FinishTimeMs\":1743438677368,\"TaskId\":2,\"Host\":\"ghrun-tuhoqst4xe\",\"ComputeTimeUs\":84}],\"CpuTimeUs\":638}],\"UseLlvm\":\"undefined\",\"Tasks\":1,\"FinishedTasks\":0,\"PhysicalStageId\":2,\"StageDurationUs\":0,\"Table\":[{\"Path\":\"\\/Root\\/TestTable\\/Index\\/indexImplTable\"}],\"BaseTimeMs\":1743438677368,\"NodesScanShards\":[],\"CpuTimeUs\":{\"Count\":1,\"Sum\":638,\"Max\":638,\"Min\":638}},\"CTE Name\":\"precompute_3_0\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":20,\"Subplan Name\":\"CTE precompute_3_0\",\"Plans\":[{\"PlanNodeId\":19,\"Plans\":[{\"PlanNodeId\":18,\"Plans\":[{\"PlanNodeId\":17,\"Operators\":[{\"Inputs\":[{\"Other\":\"ConstantExpression\"},{\"Other\":\"ConstantExpression\"},{\"Other\":\"ConstantExpression\"},{\"Other\":\"ConstantExpression\"}],\"Iterator\":\"FlatMap\",\"Name\":\"Iterator\"}],\"Node Type\":\"ConstantExpr\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"FinishTimeMs\":1743438677357,\"Host\":\"ghrun-tuhoqst4xe\",\"OutputRows\":1,\"ComputeTimeUs\":88,\"NodeId\":1,\"OutputChannels\":[{\"ChannelId\":1,\"Rows\":1,\"DstStageId\":2,\"Bytes\":29}],\"TaskId\":1,\"OutputBytes\":29} ... 057594046644480 2025-03-31T16:31:22.357425Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:22.398658Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:22.434170Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:22.505143Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:22.541359Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:22.635496Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012124546546967:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:22.635624Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:22.635873Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012124546546972:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:22.639885Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:22.651046Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012124546546974:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:31:22.742465Z node 2 :TX_PROXY ERROR: Actor# [2:7488012124546547030:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:23.607763Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:31:23.979785Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488012107366675508:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:23.979849Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 31556, MsgBus: 62495 2025-03-31T16:31:25.096208Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488012137034073370:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:25.096306Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d51/r3tmp/tmpS2LBlO/pdisk_1.dat 2025-03-31T16:31:25.226778Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:25.250073Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:25.250176Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:25.252534Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31556, node 3 2025-03-31T16:31:25.296303Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:25.296327Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:25.296335Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:25.296485Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62495 TClient is connected to server localhost:62495 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:25.776303Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:25.781530Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:31:25.795708Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:25.853345Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:26.014765Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:26.094471Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:28.370360Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012149918977024:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:28.370467Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:28.419548Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:28.456200Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:28.495858Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:28.530950Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:28.562772Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:28.597775Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:28.686503Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012149918977538:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:28.686607Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:28.686855Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012149918977543:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:28.691122Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:28.702391Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488012149918977545:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:31:28.784864Z node 3 :TX_PROXY ERROR: Actor# [3:7488012149918977600:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:29.769729Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:31:30.096314Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488012137034073370:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:30.096396Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:31.031516Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> KqpUserConstraint::KqpReadNull+UploadNull >> KqpMultishardIndex::WriteIntoRenamingSyncIndex [GOOD] >> KqpMultishardIndex::WriteIntoRenamingAsyncIndex >> KqpIndexes::SecondaryIndexUsingInJoin2-UseStreamJoin [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-20 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-21 >> KqpIndexes::SimpleVectorIndexOrderByCosineDistanceNotNullableLevel1 [GOOD] >> KqpIndexes::SimpleVectorIndexOrderByCosineDistanceNotNullableLevel2 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-3 >> TPersQueueTest::DefaultMeteringMode [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-32 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-33 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-9 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexUsingInJoin2-UseStreamJoin [GOOD] Test command err: Trying to start YDB, gRPC: 61798, MsgBus: 24251 2025-03-31T16:31:09.432203Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012066082055774:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:09.432288Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d63/r3tmp/tmpLKNf7L/pdisk_1.dat 2025-03-31T16:31:09.856576Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61798, node 1 2025-03-31T16:31:09.879364Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:09.879879Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:09.891596Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:09.919702Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:09.919744Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:09.919752Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:09.919926Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24251 TClient is connected to server localhost:24251 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:10.416859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:10.467380Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:31:10.482663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:10.646279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:10.815314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:10.907837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:12.632362Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012078966959428:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:12.632487Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:12.993474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:13.029547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:13.060645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:13.098648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:13.129478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:13.164044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:13.249016Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012083261927238:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:13.249104Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:13.249167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012083261927243:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:13.252617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:13.263313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012083261927245:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:31:13.323355Z node 1 :TX_PROXY ERROR: Actor# [1:7488012083261927298:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:14.220417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:31:14.283013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:31:14.432676Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012066082055774:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:14.432737Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 14106, MsgBus: 12050 2025-03-31T16:31:17.922107Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012103233828498:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:17.922166Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d63/r3tmp/tmpbFwkaD/pdisk_1.dat 2025-03-31T16:31:18.057363Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:18.085304Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:18.085390Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:18.088105Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14106, node 2 2025-03-31T16:31:18.133304Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:18.133329Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:18.133337Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:18.133462Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12050 TClient is connected to server localhost:12050 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:18.562155Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:18.569200Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:31:18.578143Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:1 ... undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:21.494305Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:21.534815Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:21.584203Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:21.633240Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012120413699964:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:21.633362Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:21.633590Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012120413699969:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:21.637604Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:21.652377Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012120413699971:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:31:21.715063Z node 2 :TX_PROXY ERROR: Actor# [2:7488012120413700024:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:22.632820Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:31:22.708666Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:31:22.927181Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488012103233828498:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:22.927248Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 29889, MsgBus: 27702 2025-03-31T16:31:25.254012Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488012135555764781:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:25.254069Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d63/r3tmp/tmpCgZYjv/pdisk_1.dat 2025-03-31T16:31:25.450362Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:25.482117Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:25.482213Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:25.483787Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29889, node 3 2025-03-31T16:31:25.534365Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:25.534386Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:25.534394Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:25.534536Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27702 TClient is connected to server localhost:27702 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:26.067033Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:26.074756Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:31:26.084628Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:26.143153Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:26.331594Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:26.404332Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:28.672354Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012148440668449:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:28.672453Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:28.711850Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:28.741288Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:28.770241Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:28.841425Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:28.880328Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:28.916739Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:28.966349Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012148440668965:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:28.966435Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:28.966875Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012148440668970:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:28.969964Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:28.979611Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488012148440668972:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:31:29.075769Z node 3 :TX_PROXY ERROR: Actor# [3:7488012152735636322:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:30.102520Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:31:30.147359Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:31:30.254311Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488012135555764781:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:30.254400Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDontExistGroupAttribute >> LdapAuthProviderTest_LdapsScheme::LdapRefreshRemoveUserBad >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsDisableRequestToAD >> LdapAuthProviderTest::LdapServerIsUnavailable >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsFromAdLdapServer >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDontExistGroupAttribute >> LdapAuthProviderTest_nonSecure::LdapRefreshRemoveUserBad >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGood ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::DefaultMeteringMode [GOOD] Test command err: 2025-03-31T16:25:54.769266Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010716707103218:2279];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:54.771373Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:25:54.883631Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010717014173346:2156];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:55.112409Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:25:55.158186Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a03/r3tmp/tmpkCD5MW/pdisk_1.dat 2025-03-31T16:25:55.164114Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:25:55.640507Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:55.664801Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:55.664893Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:55.666667Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:55.666705Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:55.669555Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:25:55.671692Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:25:55.672791Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64399, node 1 2025-03-31T16:25:55.827797Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/000a03/r3tmp/yandexqgSQYY.tmp 2025-03-31T16:25:55.827830Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/000a03/r3tmp/yandexqgSQYY.tmp 2025-03-31T16:25:55.828075Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/000a03/r3tmp/yandexqgSQYY.tmp 2025-03-31T16:25:55.828258Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:25:55.890260Z INFO: TTestServer started on Port 3482 GrpcPort 64399 TClient is connected to server localhost:3482 PQClient connected to localhost:64399 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:56.215449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:25:56.294574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-31T16:25:58.738890Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010734194042839:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:58.740539Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488010734194042828:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:58.740640Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:25:58.744971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-03-31T16:25:58.785454Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488010734194042842:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-03-31T16:25:58.877186Z node 2 :TX_PROXY ERROR: Actor# [2:7488010734194042869:2179] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:25:59.204252Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488010733886973369:2345], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:25:59.205660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:25:59.205729Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWY2ZDIzNS01NmJiYTg5MS03NWY1N2QyMy1kYTg5MjU4Mg==, ActorId: [1:7488010733886973341:2336], ActorState: ExecuteState, TraceId: 01jqpgcabvcj1xp1dw9rzktdht, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:25:59.223451Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:25:59.263313Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488010734194042876:2321], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:25:59.265200Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGE2ZjgyYy1kNWE5ZTQwMi04MDFkZjEyYS00ODlhZWRhZg==, ActorId: [2:7488010734194042826:2312], ActorState: ExecuteState, TraceId: 01jqpgca6gatgem8pyqcad6t2v, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:25:59.265692Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:25:59.315170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:25:59.539569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-31T16:25:59.767969Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488010716707103218:2279];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:59.775609Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:25:59.853071Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jqpgcb41f7rzd4dq4d5cpr46, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWNlMTI3NzMtMWFkZTIxZGMtNmRlMmVjNWMtMmIyZTUxYWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:25:59.882550Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488010717014173346:2156];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:59.882623Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Subcribe to ClusterTracker from [1:7488010738181941128:3088] === CheckClustersList. Ok PQ Client: create topic: rt3.dc1--topic with 1 partitions CallPersQueueGRPC request to localhost:64399 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic" } } 2025-03-31T16:26:06.401254Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:64399 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20 ... 4267Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672 State CALCULATING FrontTxId 281474976715672 2025-03-31T16:31:31.774303Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Received 1, Expected 1 2025-03-31T16:31:31.774346Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672, NewState CALCULATED 2025-03-31T16:31:31.774386Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672 moved from CALCULATING to CALCULATED 2025-03-31T16:31:31.774959Z node 30 :PERSQUEUE DEBUG: [TxId: 281474976715672] save tx TxId: 281474976715672 State: CALCULATED MinStep: 1743438691624 MaxStep: 18446744073709551615 Step: 1743438691792 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "ttt" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/ttt" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } } BootstrapConfig { } SourceActor { RawX1: 7488012102055425710 RawX2: 124554053801 } Partitions { Partition { PartitionId: 0 } } 2025-03-31T16:31:31.775653Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-31T16:31:31.790363Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-31T16:31:31.790445Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state CALCULATED 2025-03-31T16:31:31.790482Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672, State CALCULATED 2025-03-31T16:31:31.790525Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672 State CALCULATED FrontTxId 281474976715672 2025-03-31T16:31:31.790561Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672, NewState WAIT_RS 2025-03-31T16:31:31.790606Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672 moved from CALCULATED to WAIT_RS 2025-03-31T16:31:31.790700Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvTxProcessing::TEvReadSet to 0 receivers. Wait TEvTxProcessing::TEvReadSet from 0 senders. 2025-03-31T16:31:31.790765Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] HaveParticipantsDecision 1 2025-03-31T16:31:31.790901Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672, NewState EXECUTING 2025-03-31T16:31:31.790946Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672 moved from WAIT_RS to EXECUTING 2025-03-31T16:31:31.790970Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1743438691792, TxId 281474976715672 2025-03-31T16:31:31.790984Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Received 0, Expected 1 2025-03-31T16:31:31.791526Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:31:31.791571Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:31:31.791617Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:31:31.791664Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:31:31.791681Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] I0000000000 2025-03-31T16:31:31.791697Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] _config_0 2025-03-31T16:31:31.791759Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:31:31.791809Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] =========================== 2025-03-31T16:31:31.791862Z node 30 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-31T16:31:31.797550Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:31:31.797784Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvTxCommitDone Step 1743438691792, TxId 281474976715672, Partition 0 2025-03-31T16:31:31.797834Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state EXECUTING 2025-03-31T16:31:31.797875Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672, State EXECUTING 2025-03-31T16:31:31.797920Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672 State EXECUTING FrontTxId 281474976715672 2025-03-31T16:31:31.797950Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Received 1, Expected 1 2025-03-31T16:31:31.798003Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId: 281474976715672 send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2025-03-31T16:31:31.798079Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] complete TxId 281474976715672 2025-03-31T16:31:31.798564Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "ttt" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/ttt" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } 2025-03-31T16:31:31.798675Z node 30 :PERSQUEUE NOTICE: [PQ: 72075186224037892] metering mode METERING_MODE_REQUEST_UNITS 2025-03-31T16:31:31.798829Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete partitions for TxId 281474976715672 2025-03-31T16:31:31.798865Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672, NewState EXECUTED 2025-03-31T16:31:31.798905Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672 moved from EXECUTING to EXECUTED 2025-03-31T16:31:31.799425Z node 30 :PERSQUEUE DEBUG: [TxId: 281474976715672] save tx TxId: 281474976715672 State: EXECUTED MinStep: 1743438691624 MaxStep: 18446744073709551615 Step: 1743438691792 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "ttt" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/ttt" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } } BootstrapConfig { } SourceActor { RawX1: 7488012102055425710 RawX2: 124554053801 } Partitions { Partition { PartitionId: 0 } } 2025-03-31T16:31:31.799776Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-31T16:31:31.815529Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-31T16:31:31.815604Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state EXECUTED 2025-03-31T16:31:31.815637Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672, State EXECUTED 2025-03-31T16:31:31.815683Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672 State EXECUTED FrontTxId 281474976715672 2025-03-31T16:31:31.815724Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TPersQueue::SendEvReadSetAckToSenders 2025-03-31T16:31:31.815771Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672, NewState WAIT_RS_ACKS 2025-03-31T16:31:31.815811Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672 moved from EXECUTED to WAIT_RS_ACKS 2025-03-31T16:31:31.815865Z node 30 :PERSQUEUE DEBUG: [TxId: 281474976715672] PredicateAcks: 0/0 2025-03-31T16:31:31.815882Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-03-31T16:31:31.815910Z node 30 :PERSQUEUE DEBUG: [TxId: 281474976715672] PredicateAcks: 0/0 2025-03-31T16:31:31.815947Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] add an TxId 281474976715672 to the list for deletion 2025-03-31T16:31:31.816017Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672, NewState DELETING 2025-03-31T16:31:31.816070Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete key for TxId 281474976715672 2025-03-31T16:31:31.816194Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-31T16:31:31.823910Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-31T16:31:31.823976Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state DELETING 2025-03-31T16:31:31.824008Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672, State DELETING 2025-03-31T16:31:31.824047Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete TxId 281474976715672 2025-03-31T16:31:31.840418Z node 29 :PQ_READ_PROXY DEBUG: new Describe topic request 2025-03-31T16:31:31.840506Z node 29 :PQ_READ_PROXY DEBUG: TDescribeTopicActor for request operation_params { } path: "/Root/PQ/ttt" 2025-03-31T16:31:31.840623Z node 29 :PQ_READ_PROXY DEBUG: Describe topic actor for path /Root/PQ/ttt 2025-03-31T16:31:32.661634Z node 29 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:31:32.661673Z node 29 :IMPORT WARN: Table profiles were not loaded |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-56 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-57 |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> Worker::Basic >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-39 >> TAsyncIndexTests::SplitBothWithReboots[PipeResets] [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-44 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-45 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-56 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-57 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitBothWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:135:2157] sender: [1:136:2058] recipient: [1:111:2142] 2025-03-31T16:29:42.285304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:29:42.285400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:42.285464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:29:42.285497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:29:42.285552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:29:42.285583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:29:42.285675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:42.285804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:29:42.286129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:29:42.373939Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-31T16:29:42.374057Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:169:2058] recipient: [1:15:2062] 2025-03-31T16:29:42.395331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:29:42.395528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:29:42.395684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:29:42.399945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:29:42.400187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:29:42.400848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:42.401085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:29:42.405663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:42.407338Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:42.407424Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:42.407546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:29:42.407592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:42.407634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:29:42.407780Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:210:2058] recipient: [1:208:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:210:2058] recipient: [1:208:2209] Leader for TabletID 72057594037968897 is [1:214:2213] sender: [1:215:2058] recipient: [1:208:2209] 2025-03-31T16:29:42.415699Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:235:2058] recipient: [1:15:2062] 2025-03-31T16:29:42.581136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:29:42.581392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:42.581624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:29:42.581911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:29:42.581984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:42.588120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:42.588293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:29:42.588527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:42.588597Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:29:42.588629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:29:42.588661Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:29:42.590720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:42.590779Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:29:42.590817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:29:42.592838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:42.592888Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:42.592928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:42.592973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:29:42.596807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:29:42.600801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:29:42.600999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:135:2157] sender: [1:250:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:29:42.602041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:42.602186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:29:42.602243Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:42.602526Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:29:42.602577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:42.602775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:29:42.602883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:29:42.605158Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:42.605211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:42.605406Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:42.605452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:202:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:29:42.605772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:42.605841Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:29:42.605935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:42.605965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:42.606003Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:42.606055Z no ... nId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:31:35.746482Z node 46 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:31:35.746717Z node 46 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 265us result status StatusSuccess 2025-03-31T16:31:35.747477Z node 46 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } Tuple { } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0002\000\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:31:35.758806Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][46:1036:2806] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-03-31T16:31:35.758910Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][46:1037:2806] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-03-31T16:31:35.758996Z node 46 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][46:961:2806] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409550 } 2025-03-31T16:31:35.759111Z node 46 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][46:961:2806] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } 2025-03-31T16:31:35.759249Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][46:1036:2806] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1743438695732467 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1743438695732467 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-03-31T16:31:35.759402Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][46:1037:2806] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 3 Group: 1743438695732467 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-03-31T16:31:35.768097Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][46:1036:2806] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 2 2025-03-31T16:31:35.768247Z node 46 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][46:961:2806] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409550 } 2025-03-31T16:31:35.769240Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][46:1037:2806] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-03-31T16:31:35.769361Z node 46 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][46:961:2806] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } >> KqpIndexes::UpdateOnReadColumns [GOOD] |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-15 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-16 >> KqpIndexes::SecondaryIndexWithPrimaryKeySameComulns-UseSink [GOOD] >> KqpIndexLookupJoin::CheckAllKeyTypesCast [GOOD] >> KqpUniqueIndex::UpdateOnNullInComplexFk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UpdateOnReadColumns [GOOD] Test command err: Trying to start YDB, gRPC: 22106, MsgBus: 23600 2025-03-31T16:30:43.832240Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011956578865157:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:43.833079Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dd0/r3tmp/tmpA1bNiD/pdisk_1.dat 2025-03-31T16:30:44.222819Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:44.237532Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:44.237621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:44.240524Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22106, node 1 2025-03-31T16:30:44.355239Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:44.355263Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:44.355269Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:44.355424Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23600 TClient is connected to server localhost:23600 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:44.968504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:44.999737Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:30:45.016081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:45.181272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:45.354310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:45.424135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:47.119340Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011973758736133:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:47.119499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:47.474527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:30:47.503278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:30:47.531763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:30:47.568185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:30:47.599481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:30:47.668807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:30:47.709919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011973758736647:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:47.710000Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:47.710040Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011973758736652:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:47.712941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:30:47.721649Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011973758736654:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:30:47.797791Z node 1 :TX_PROXY ERROR: Actor# [1:7488011973758736707:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:30:48.783283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:30:48.828355Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011956578865157:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:48.828536Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 30181, MsgBus: 4455 2025-03-31T16:30:51.504167Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488011989982515756:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:51.504227Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dd0/r3tmp/tmpdgdBPm/pdisk_1.dat 2025-03-31T16:30:51.592206Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30181, node 2 2025-03-31T16:30:51.634515Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:51.634636Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:51.655474Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:30:51.694146Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:51.694170Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:51.694178Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:51.694301Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4455 TClient is connected to server localhost:4455 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:52.156216Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:52.171875Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:52.259639Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:52.431585Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId ... 1474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:22.411547Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:22.458135Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:22.514589Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488012124102618344:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:22.514673Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488012124102618349:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:22.514676Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:22.519363Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:22.534383Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7488012124102618351:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:31:22.612552Z node 5 :TX_PROXY ERROR: Actor# [5:7488012124102618404:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:22.953919Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7488012102627779575:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:22.953999Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:23.889323Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:31:23.977796Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:31:24.028365Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 29162, MsgBus: 8850 2025-03-31T16:31:27.604475Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488012146365809540:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:27.604542Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dd0/r3tmp/tmpZkAoHq/pdisk_1.dat 2025-03-31T16:31:27.773050Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:27.804178Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:27.804272Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:27.806554Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29162, node 6 2025-03-31T16:31:27.859819Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:27.859850Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:27.859861Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:27.860032Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8850 TClient is connected to server localhost:8850 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:28.469310Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:28.482635Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:28.563061Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:28.794733Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:28.881898Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:31.904565Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012163545680503:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:31.904681Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:31.952289Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:31.996476Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:32.039443Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:32.083201Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:32.122689Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:32.193887Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:32.278463Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012167840648318:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:32.278588Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:32.278692Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012167840648323:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:32.282706Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:32.296704Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488012167840648325:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:31:32.398793Z node 6 :TX_PROXY ERROR: Actor# [6:7488012167840648381:3454] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:32.604856Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488012146365809540:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:32.604923Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:33.644245Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:31:33.719582Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:31:33.763683Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-21 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-22 >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDontExistGroupAttribute [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserLoginBad >> LdapAuthProviderTest::LdapServerIsUnavailable [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyHost >> KqpUserConstraint::KqpReadNull+UploadNull [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsDisableRequestToAD [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithRemovedUserCredentialsBad ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString 2025-03-31 16:31:35,538 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-31 16:31:35,728 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 468333 46.5M 46.6M 23.6M test_tool run_ut @/home/runner/.ya/build/build_root/0l5c/001a99/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk28/testing_out_stuff/test_tool.args 468696 1.7G 1.7G 1.5G └─ ydb-core-tx-columnshard-ut_rw --trace-path-append /home/runner/.ya/build/build_root/0l5c/001a99/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chu Test command err: 2025-03-31T16:21:36.914420Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T16:21:37.004541Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:21:37.028151Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:21:37.028479Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T16:21:37.035399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:21:37.035578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:21:37.035766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:21:37.035874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:21:37.035937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:21:37.036011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:21:37.036112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:21:37.036225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:21:37.036298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:21:37.036366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:21:37.036441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:21:37.036506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:21:37.055678Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T16:21:37.055847Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:21:37.055918Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:21:37.056158Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:37.056342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:21:37.056452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:21:37.056497Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:21:37.056594Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:21:37.056687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:21:37.056731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:21:37.056765Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:21:37.056985Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:21:37.057079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:21:37.057125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:21:37.057165Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T16:21:37.057277Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T16:21:37.057321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:21:37.057348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:21:37.057366Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T16:21:37.057442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:21:37.057479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:21:37.057510Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T16:21:37.057566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:21:37.057600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:21:37.057626Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T16:21:37.058029Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=48; 2025-03-31T16:21:37.058114Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=36; 2025-03-31T16:21:37.058188Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=28; 2025-03-31T16:21:37.058308Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=58; 2025-03-31T16:21:37.058484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:21:37.058550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:21:37.058596Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T16:21:37.058761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:21:37.058811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:21:37.058840Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T16:21:37.059022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:21:37.059073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:21:37.059127Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T16:21:37.059315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:21:37.059350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:21:37.059370Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T16:21:37.059455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:21:37.059481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:21:37.059514Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... 35.217277Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:82:255:226:2792:0];range=bytes=0-2791;object_exists=1; 2025-03-31T16:31:35.217359Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:84:255:18:2776:0];range=bytes=0-2775;object_exists=1; 2025-03-31T16:31:35.217442Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:84:255:63:2784:0];range=bytes=0-2783;object_exists=1; 2025-03-31T16:31:35.217562Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:82:255:16:2792:0];range=bytes=0-2791;object_exists=1; 2025-03-31T16:31:35.217699Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:81:255:73:2808:0];range=bytes=0-2807;object_exists=1; 2025-03-31T16:31:35.217790Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:82:255:118:2784:0];range=bytes=0-2783;object_exists=1; 2025-03-31T16:31:35.217879Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:81:255:4:2856:0];range=bytes=0-2855;object_exists=1; 2025-03-31T16:31:35.217961Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:81:255:113:2784:0];range=bytes=0-2783;object_exists=1; 2025-03-31T16:31:35.218047Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:82:255:145:2816:0];range=bytes=0-2815;object_exists=1; 2025-03-31T16:31:35.218143Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:82:255:193:2792:0];range=bytes=0-2791;object_exists=1; 2025-03-31T16:31:35.218228Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:81:255:42:8016:0];range=bytes=0-8015;object_exists=1; 2025-03-31T16:31:35.218331Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:82:255:142:2792:0];range=bytes=0-2791;object_exists=1; 2025-03-31T16:31:35.218419Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:82:255:120:2792:0];range=bytes=0-2791;object_exists=1; 2025-03-31T16:31:35.218507Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:81:255:217:2792:0];range=bytes=0-2791;object_exists=1; 2025-03-31T16:31:35.218593Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:81:255:176:2816:0];range=bytes=0-2815;object_exists=1; 2025-03-31T16:31:35.218684Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:81:255:158:2800:0];range=bytes=0-2799;object_exists=1; 2025-03-31T16:31:35.218773Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:84:255:54:2832:0];range=bytes=0-2831;object_exists=1; 2025-03-31T16:31:35.218860Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:82:255:77:2864:0];range=bytes=0-2863;object_exists=1; 2025-03-31T16:31:35.218945Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:81:255:49:2784:0];range=bytes=0-2783;object_exists=1; 2025-03-31T16:31:35.219108Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:81:255:125:2728:0];range=bytes=0-2727;object_exists=1; 2025-03-31T16:31:35.219219Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:82:255:191:2800:0];range=bytes=0-2799;object_exists=1; 2025-03-31T16:31:35.219309Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:82:255:167:2832:0];range=bytes=0-2831;object_exists=1; 2025-03-31T16:31:35.219381Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:84:255:150:2848:0];range=bytes=0-2847;object_exists=1; 2025-03-31T16:31:35.219443Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:84:255:16:2784:0];range=bytes=0-2783;object_exists=1; 2025-03-31T16:31:35.219504Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:84:255:44:2832:0];range=bytes=0-2831;object_exists=1; 2025-03-31T16:31:35.219589Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:81:255:194:2800:0];range=bytes=0-2799;object_exists=1; 2025-03-31T16:31:35.219689Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:84:255:23:2776:0];range=bytes=0-2775;object_exists=1; 2025-03-31T16:31:35.219773Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:81:255:152:2888:0];range=bytes=0-2887;object_exists=1; 2025-03-31T16:31:35.219858Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:81:255:46:2816:0];range=bytes=0-2815;object_exists=1; 2025-03-31T16:31:35.219940Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:82:255:1:2784:0];range=bytes=0-2783;object_exists=1; 2025-03-31T16:31:35.220026Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:84:255:138:2864:0];range=bytes=0-2863;object_exists=1; 2025-03-31T16:31:35.220137Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:84:255:141:2784:0];range=bytes=0-2783;object_exists=1; 2025-03-31T16:31:35.220228Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:82:255:73:2784:0];range=bytes=0-2783;object_exists=1; 2025-03-31T16:31:35.220324Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:81:255:233:2784:0];range=bytes=0-2783;object_exists=1; 2025-03-31T16:31:35.220413Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:83:255:1:2784:0];range=bytes=0-2783;object_exists=1; 2025-03-31T16:31:35.220501Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:84:255:86:2856:0];range=bytes=0-2855;object_exists=1; 2025-03-31T16:31:35.220611Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:81:255:179:2880:0];range=bytes=0-2879;object_exists=1; 2025-03-31T16:31:35.220703Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:82:255:208:2736:0];range=bytes=0-2735;object_exists=1; 2025-03-31T16:31:35.220796Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:84:255:132:2776:0];range=bytes=0-2775;object_exists=1; 2025-03-31T16:31:35.220882Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:81:255:43:2880:0];range=bytes=0-2879;object_exists=1; 2025-03-31T16:31:35.220971Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:81:255:24:2784:0];range=bytes=0-2783;object_exists=1; 2025-03-31T16:31:35.221062Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:81:255:181:2784:0];range=bytes=0-2783;object_exists=1; 2025-03-31T16:31:35.221155Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:81:255:59:2880:0];range=bytes=0-2879;object_exists=1; 2025-03-31T16:31:35.221238Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:82:255:151:2872:0];range=bytes=0-2871;object_exists=1; 2025-03-31T16:31:35.221321Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:82:255:192:2792:0];range=bytes=0-2791;object_exists=1; 2025-03-31T16:31:35.221405Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:84:255:137:2776:0];range=bytes=0-2775;object_exists=1; 2025-03-31T16:31:35.221489Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:82:255:157:2824:0];range=bytes=0-2823;object_exists=1; 2025-03-31T16:31:35.221570Z node 1 :S3_WRAPPER DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:82:255:179:2784:0];range=bytes=0-2783;object_exists=1; 2025-03-31T16:31:35.248744Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=9c9e07a2-e4d11f0-a7b9e52b-9175e97e;fline=actor.cpp:48;task=agents_waiting=2;additional_info=();; Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8330113388/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/0l5c/001a99/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk28/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8330113388/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/0l5c/001a99/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk28/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsFromAdLdapServer [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGood >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDontExistGroupAttribute [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserLoginBad ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexWithPrimaryKeySameComulns-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 6057, MsgBus: 18368 2025-03-31T16:31:06.929608Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012056666899773:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:06.929812Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d7f/r3tmp/tmpLl9r8W/pdisk_1.dat 2025-03-31T16:31:07.372127Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:07.372239Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:07.374237Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:07.379550Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6057, node 1 2025-03-31T16:31:07.475410Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:07.475432Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:07.475441Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:07.475553Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18368 TClient is connected to server localhost:18368 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:08.199825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:08.220092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:31:08.372302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:31:08.555230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:08.642592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:10.276074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012073846770734:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:10.276170Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:10.599233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:10.641158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:10.671341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:10.705175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:10.778353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:10.810929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:10.864828Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012073846771251:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:10.864925Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:10.865445Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012073846771256:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:10.869155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:10.879998Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012073846771258:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:31:10.963417Z node 1 :TX_PROXY ERROR: Actor# [1:7488012073846771311:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:11.888952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:31:11.929894Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012056666899773:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:11.929982Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 27172, MsgBus: 6196 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d7f/r3tmp/tmpm7Ve3q/pdisk_1.dat 2025-03-31T16:31:14.331205Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:31:14.337547Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27172, node 2 2025-03-31T16:31:14.376313Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:14.376436Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:14.381599Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:14.413707Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:14.413731Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:14.413738Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:14.413860Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6196 TClient is connected to server localhost:6196 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:14.888075Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:14.894576Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:14.907502Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:31:14.990415Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:15.141818Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:15.231377Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation t ... es: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:17.865613Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012102532338378:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:17.869125Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:17.878388Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012102532338380:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:31:17.977988Z node 2 :TX_PROXY ERROR: Actor# [2:7488012102532338433:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:18.952145Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:31:19.798494Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:19.838644Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:20.998398Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:21.782076Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:21.808372Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:21.852691Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:22.600712Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:22.684352Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:23.852718Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:24.245193Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 27088, MsgBus: 24820 2025-03-31T16:31:25.762081Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488012134784068626:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:25.762200Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d7f/r3tmp/tmpuhoukw/pdisk_1.dat 2025-03-31T16:31:25.919266Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:25.956004Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:25.956083Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:25.957847Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27088, node 3 2025-03-31T16:31:26.003585Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:26.003612Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:26.003619Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:26.003789Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24820 TClient is connected to server localhost:24820 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:31:26.469227Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:31:26.485400Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:26.545345Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:26.716452Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:26.795057Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:29.339723Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012151963939586:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:29.339845Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:29.396108Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:29.448932Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:29.484988Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:29.518652Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:29.549642Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:29.610225Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:29.663410Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012151963940095:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:29.663544Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:29.667374Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012151963940100:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:29.674259Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:29.687111Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488012151963940102:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:31:29.771448Z node 3 :TX_PROXY ERROR: Actor# [3:7488012151963940157:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:30.762277Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488012134784068626:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:30.762350Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:30.790877Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:31:31.732924Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:31.759007Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:33.259735Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:33.273450Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:33.287537Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:34.023974Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:34.941442Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:34.962370Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:35.354491Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:35.369645Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:36.839229Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:37.162444Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:37.181828Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-10 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckAllKeyTypesCast [GOOD] Test command err: Trying to start YDB, gRPC: 9763, MsgBus: 25528 2025-03-31T16:28:47.466709Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011457589192541:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:47.475591Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018bf/r3tmp/tmprFIFLm/pdisk_1.dat 2025-03-31T16:28:48.255481Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:28:48.276895Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:28:48.276991Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:28:48.283666Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9763, node 1 2025-03-31T16:28:48.644355Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:28:48.644383Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:28:48.644392Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:28:48.644501Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25528 TClient is connected to server localhost:25528 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:28:49.774090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:49.809602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:50.041797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:50.296549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:50.424615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:52.455224Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011457589192541:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:52.455297Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:28:53.204024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011483358997956:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:53.204150Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:53.632075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:28:53.676862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:28:53.726217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:28:53.780208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:28:53.818691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:28:53.901002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:28:54.019239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011487653965774:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:54.019432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:54.019878Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011487653965779:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:54.024310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:28:54.041243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011487653965782:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:28:54.147884Z node 1 :TX_PROXY ERROR: Actor# [1:7488011487653965837:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:28:55.576545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:28:55.634076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:28:55.683287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:28:55.777048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480
: Warning: Execution, code: 1060
:8:40: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 Trying to start YDB, gRPC: 26164, MsgBus: 10972 2025-03-31T16:28:58.988694Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488011505099673355:2128];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:58.988738Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018bf/r3tmp/tmpbhAoBy/pdisk_1.dat 2025-03-31T16:28:59.254731Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:28:59.276415Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:28:59.276501Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:28:59.281569Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26164, node 2 2025-03-31T16:28:59.495296Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:28:59.495321Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:28:59.495330Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:28:59.495438Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10972 TClient is connected to server localhost:10972 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:29:00.381002Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, a ... in>:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:36: Error: At function: EquiJoin
:3:36: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-03-31T16:30:56.757462Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTZlNTdhNGYtN2RmNjkwNzQtNmY4YjJhMWItZTA5Yjg0OTM=, ActorId: [2:7488011535164447008:2499], ActorState: ExecuteState, TraceId: 01jqpgnd6r21cm7fk86mdkgwzd, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-31T16:31:00.201182Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488012029085698440:5930], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:36: Error: At function: EquiJoin
:3:36: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-03-31T16:31:00.201432Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTZlNTdhNGYtN2RmNjkwNzQtNmY4YjJhMWItZTA5Yjg0OTM=, ActorId: [2:7488011535164447008:2499], ActorState: ExecuteState, TraceId: 01jqpgnghv9g4b46j12f8wpywm, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-31T16:31:00.236425Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488012029085698454:5936], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:36: Error: At function: EquiJoin
:3:36: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-03-31T16:31:00.237021Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTZlNTdhNGYtN2RmNjkwNzQtNmY4YjJhMWItZTA5Yjg0OTM=, ActorId: [2:7488011535164447008:2499], ActorState: ExecuteState, TraceId: 01jqpgngkj6ff5cftkhw0r0gn5, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-31T16:31:07.335911Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488012059150470104:6087], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:36: Error: At function: EquiJoin
:3:36: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-03-31T16:31:07.337624Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTZlNTdhNGYtN2RmNjkwNzQtNmY4YjJhMWItZTA5Yjg0OTM=, ActorId: [2:7488011535164447008:2499], ActorState: ExecuteState, TraceId: 01jqpgnqhmbq7b79p1j6we8mfg, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-31T16:31:10.712720Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488012072035372264:6164], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:36: Error: At function: EquiJoin
:3:36: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-03-31T16:31:10.713698Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTZlNTdhNGYtN2RmNjkwNzQtNmY4YjJhMWItZTA5Yjg0OTM=, ActorId: [2:7488011535164447008:2499], ActorState: ExecuteState, TraceId: 01jqpgntv7abhqfr884feqer3x, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-31T16:31:10.742685Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488012072035372277:6170], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:36: Error: At function: EquiJoin
:3:36: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-03-31T16:31:10.743236Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTZlNTdhNGYtN2RmNjkwNzQtNmY4YjJhMWItZTA5Yjg0OTM=, ActorId: [2:7488011535164447008:2499], ActorState: ExecuteState, TraceId: 01jqpgntvz8htvy7ze7ka065a6, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-31T16:31:10.776731Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488012072035372290:6176], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:36: Error: At function: EquiJoin
:3:36: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-03-31T16:31:10.777127Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTZlNTdhNGYtN2RmNjkwNzQtNmY4YjJhMWItZTA5Yjg0OTM=, ActorId: [2:7488011535164447008:2499], ActorState: ExecuteState, TraceId: 01jqpgntx03senfrf3a8d2249w, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-31T16:31:17.373413Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488012102100143909:6323], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:34: Error: At function: EquiJoin
:3:34: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-03-31T16:31:17.375345Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTZlNTdhNGYtN2RmNjkwNzQtNmY4YjJhMWItZTA5Yjg0OTM=, ActorId: [2:7488011535164447008:2499], ActorState: ExecuteState, TraceId: 01jqpgp1b165bvr7nbkac70hze, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-31T16:31:17.414095Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488012102100143923:6329], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:34: Error: At function: EquiJoin
:3:34: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-03-31T16:31:17.415325Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTZlNTdhNGYtN2RmNjkwNzQtNmY4YjJhMWItZTA5Yjg0OTM=, ActorId: [2:7488011535164447008:2499], ActorState: ExecuteState, TraceId: 01jqpgp1cbbmq6pejz0qsxp80n, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-31T16:31:20.981706Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488012114985046103:6406], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:34: Error: At function: EquiJoin
:3:34: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-03-31T16:31:20.983313Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTZlNTdhNGYtN2RmNjkwNzQtNmY4YjJhMWItZTA5Yjg0OTM=, ActorId: [2:7488011535164447008:2499], ActorState: ExecuteState, TraceId: 01jqpgp4vv0bh8sjaed0q7xe27, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-31T16:31:21.032617Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488012114985046119:6413], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:34: Error: At function: EquiJoin
:3:34: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-03-31T16:31:21.032898Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTZlNTdhNGYtN2RmNjkwNzQtNmY4YjJhMWItZTA5Yjg0OTM=, ActorId: [2:7488011535164447008:2499], ActorState: ExecuteState, TraceId: 01jqpgp4x47wswe1b26jsnq5rw, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-31T16:31:27.613299Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488012145049817723:6560], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:34: Error: At function: EquiJoin
:3:34: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-03-31T16:31:27.613581Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTZlNTdhNGYtN2RmNjkwNzQtNmY4YjJhMWItZTA5Yjg0OTM=, ActorId: [2:7488011535164447008:2499], ActorState: ExecuteState, TraceId: 01jqpgpbb7087m19vgrx9qbqne, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-31T16:31:30.682046Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488012157934719880:6636], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:34: Error: At function: EquiJoin
:3:34: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-03-31T16:31:30.682382Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTZlNTdhNGYtN2RmNjkwNzQtNmY4YjJhMWItZTA5Yjg0OTM=, ActorId: [2:7488011535164447008:2499], ActorState: ExecuteState, TraceId: 01jqpgpe9r057283j39mj0w4er, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-31T16:31:30.708055Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488012157934719902:6642], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:34: Error: At function: EquiJoin
:3:34: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-03-31T16:31:30.708297Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTZlNTdhNGYtN2RmNjkwNzQtNmY4YjJhMWItZTA5Yjg0OTM=, ActorId: [2:7488011535164447008:2499], ActorState: ExecuteState, TraceId: 01jqpgpec042c1aq2th6ccvx0m, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-31T16:31:30.739169Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488012157934719915:6648], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:34: Error: At function: EquiJoin
:3:34: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-03-31T16:31:30.739421Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTZlNTdhNGYtN2RmNjkwNzQtNmY4YjJhMWItZTA5Yjg0OTM=, ActorId: [2:7488011535164447008:2499], ActorState: ExecuteState, TraceId: 01jqpgpecw67313gssjhhj4gzx, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpdateOnNullInComplexFk [GOOD] Test command err: Trying to start YDB, gRPC: 30400, MsgBus: 24604 2025-03-31T16:31:09.149434Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012070073326929:2109];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:09.154215Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d70/r3tmp/tmpSHraDm/pdisk_1.dat 2025-03-31T16:31:09.589966Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:09.594725Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:09.594821Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:09.597770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30400, node 1 2025-03-31T16:31:09.692686Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:09.692705Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:09.692712Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:09.692847Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24604 TClient is connected to server localhost:24604 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:10.177831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:10.195346Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:31:10.206414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:10.381491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:10.535888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:10.615650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:12.442959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012082958230530:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:12.443119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:12.787316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:12.826739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:12.868304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:12.901135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:12.948412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:13.018940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:13.068527Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012087253198340:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:13.068607Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:13.068880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012087253198345:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:13.072555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:13.083557Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012087253198347:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:31:13.162715Z node 1 :TX_PROXY ERROR: Actor# [1:7488012087253198399:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:14.006605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:14.179465Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012070073326929:2109];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:14.180626Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:15.904734Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqpgnz5vdt97hebhez75yqbj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWI3ODk3YjctMmMzOTliMS1jY2IyZTk5YS0yYWMyNDdiOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-31T16:31:15.912402Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWI3ODk3YjctMmMzOTliMS1jY2IyZTk5YS0yYWMyNDdiOA==, ActorId: [1:7488012091548166765:2555], ActorState: ExecuteState, TraceId: 01jqpgnz5vdt97hebhez75yqbj, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 9888, MsgBus: 5947 2025-03-31T16:31:17.588367Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012102422156948:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:17.588413Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d70/r3tmp/tmpuMT973/pdisk_1.dat 2025-03-31T16:31:17.678863Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9888, node 2 2025-03-31T16:31:17.715507Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:17.715606Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:17.717809Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:17.742135Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:17.742160Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:17.742168Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:17.742290Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5947 TClient is connected to server localhost:5947 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:18.167375Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:18.187687Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:18.263450Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:18.395508Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:18.456616Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:20.884069Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012115307060589:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:20.884162Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:20.928018Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:20.982261Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:21.014984Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:21.052207Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:21.128912Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:21.186658Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:21.239317Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012119602028400:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:21.239417Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:21.239707Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012119602028405:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:21.243401Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:21.255345Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012119602028407:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:31:21.350738Z node 2 :TX_PROXY ERROR: Actor# [2:7488012119602028463:3454] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:22.162343Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:22.591139Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488012102422156948:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:22.591208Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:27.900379Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqpgpaw466q6g2m6w0h2cd1d, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDM5ZThjZjQtNmQwYmMzNDktNzI0YmI1N2YtOWY4YzJiNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-31T16:31:27.900582Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDM5ZThjZjQtNmQwYmMzNDktNzI0YmI1N2YtOWY4YzJiNjU=, ActorId: [2:7488012123896996856:2554], ActorState: ExecuteState, TraceId: 01jqpgpaw466q6g2m6w0h2cd1d, Create QueryResponse for error on request, msg: 2025-03-31T16:31:32.549334Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037927 not found 2025-03-31T16:31:32.549836Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037924 not found 2025-03-31T16:31:32.549861Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037933 not found 2025-03-31T16:31:32.553078Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037934 not found 2025-03-31T16:31:32.553110Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037926 not found 2025-03-31T16:31:32.553125Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037928 not found 2025-03-31T16:31:32.553172Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037929 not found 2025-03-31T16:31:32.553297Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037923 not found 2025-03-31T16:31:32.669776Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:31:32.669803Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:35.005264Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqpgphez3dqtk1y24475hvtf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDM5ZThjZjQtNmQwYmMzNDktNzI0YmI1N2YtOWY4YzJiNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-31T16:31:35.005528Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDM5ZThjZjQtNmQwYmMzNDktNzI0YmI1N2YtOWY4YzJiNjU=, ActorId: [2:7488012123896996856:2554], ActorState: ExecuteState, TraceId: 01jqpgphez3dqtk1y24475hvtf, Create QueryResponse for error on request, msg: 2025-03-31T16:31:35.992786Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqpgpjn7amf2w56n3161c1r3, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDM5ZThjZjQtNmQwYmMzNDktNzI0YmI1N2YtOWY4YzJiNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-31T16:31:35.993135Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDM5ZThjZjQtNmQwYmMzNDktNzI0YmI1N2YtOWY4YzJiNjU=, ActorId: [2:7488012123896996856:2554], ActorState: ExecuteState, TraceId: 01jqpgpjn7amf2w56n3161c1r3, Create QueryResponse for error on request, msg: 2025-03-31T16:31:37.515222Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037932 not found 2025-03-31T16:31:37.518057Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037931 not found 2025-03-31T16:31:37.518096Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037921 not found 2025-03-31T16:31:37.519833Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037922 not found 2025-03-31T16:31:37.524819Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037920 not found 2025-03-31T16:31:37.526985Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqpgpmzfdpyrfxh2f4wne366, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDM5ZThjZjQtNmQwYmMzNDktNzI0YmI1N2YtOWY4YzJiNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-31T16:31:37.527355Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDM5ZThjZjQtNmQwYmMzNDktNzI0YmI1N2YtOWY4YzJiNjU=, ActorId: [2:7488012123896996856:2554], ActorState: ExecuteState, TraceId: 01jqpgpmzfdpyrfxh2f4wne366, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> KqpUserConstraint::KqpReadNull+UploadNull [GOOD] Test command err: 2025-03-31T16:31:36.095750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:31:36.096102Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:31:36.096332Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f1d/r3tmp/tmpeVw88D/pdisk_1.dat 2025-03-31T16:31:36.694258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:31:36.752629Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:36.805962Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:36.806928Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:36.819997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:36.922788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:31:37.415232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:865:2713], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:37.415377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:874:2718], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:37.415450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:37.426132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:31:37.579396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:879:2721], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:31:37.657074Z node 1 :TX_PROXY ERROR: Actor# [1:961:2772] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:38.475180Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpgpmy4bxbv2ewwft98k22n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjMxNmVjOWItOWRkZTAzZmUtYzg5MzZhNjMtOWU2N2M4NTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:38.523317Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:992:2793], TxId: 281474976715660, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZjMxNmVjOWItOWRkZTAzZmUtYzg5MzZhNjMtOWU2N2M4NTY=. TraceId : 01jqpgpmy4bxbv2ewwft98k22n. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Source[0] fatal error: {
: Fatal: Read from column index 1: got NULL from NOT NULL column, code: 2012 } 2025-03-31T16:31:38.526552Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:992:2793], TxId: 281474976715660, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZjMxNmVjOWItOWRkZTAzZmUtYzg5MzZhNjMtOWU2N2M4NTY=. TraceId : 01jqpgpmy4bxbv2ewwft98k22n. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. InternalError: INTERNAL_ERROR KIKIMR_CONSTRAINT_VIOLATION: {
: Fatal: Read from column index 1: got NULL from NOT NULL column, code: 2012 }. 2025-03-31T16:31:38.553491Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:993:2794], TxId: 281474976715660, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZjMxNmVjOWItOWRkZTAzZmUtYzg5MzZhNjMtOWU2N2M4NTY=. TraceId : 01jqpgpmy4bxbv2ewwft98k22n. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: Terminate execution }. 2025-03-31T16:31:38.563450Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjMxNmVjOWItOWRkZTAzZmUtYzg5MzZhNjMtOWU2N2M4NTY=, ActorId: [1:863:2711], ActorState: ExecuteState, TraceId: 01jqpgpmy4bxbv2ewwft98k22n, Create QueryResponse for error on request, msg: 2025-03-31T16:31:38.568677Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpgpmy4bxbv2ewwft98k22n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjMxNmVjOWItOWRkZTAzZmUtYzg5MzZhNjMtOWU2N2M4NTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |94.2%| [TA] $(B)/ydb/core/kqp/ut/data/test-results/unittest/{meta.json ... results_accumulator.log} |94.2%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/data/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpIndexes::UniqAndNoUniqSecondaryIndexWithCover [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-10 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-34 >> TTopicWriterTests::TestEnterMessage_OnlyDelimiters [GOOD] >> TTopicWriterTests::TestEnterMessage_SomeBinaryData [GOOD] >> TTopicReaderTests::TestRun_ReadOneMessage >> TTopicWriterTests::TestEnterMessage_EmptyInput [GOOD] >> TTopicWriterTests::TestEnterMessage_No_Base64_Transform [GOOD] >> TTopicWriterTests::TestEnterMessage_ZeroSymbol_Delimited [GOOD] >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_NewlineDelimited [GOOD] >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NewlineDelimited >> TTopicWriterTests::TestTopicWriterParams_No_Delimiter [GOOD] >> TTopicWriterTests::TestTopicWriterParams_InvalidDelimiter [GOOD] >> TTopicWriterTests::TestEnterMessage_1KiB_No_Delimiter [GOOD] >> TTopicWriterTests::TestEnterMessage_Custom_Delimiter_Delimited [GOOD] >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_Invalid_Encode [GOOD] >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform [GOOD] >> ExternalBlobsMultipleChannels::SingleChannel [GOOD] |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_SomeBinaryData [GOOD] >> KqpIndexes::PrefixedVectorIndexOrderByCosineSimilarityNullableLevel1 [GOOD] >> KqpIndexes::PrefixedVectorIndexOrderByCosineSimilarityNullableLevel2 >> TTopicWriterTests::TestTopicWriterParams_Format_NewlineDelimited [GOOD] >> TTopicWriterTests::TestTopicWriterParams_Format_Concatenated [GOOD] |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_No_Base64_Transform [GOOD] |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestTopicWriterParams_InvalidDelimiter [GOOD] |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_NewlineDelimited [GOOD] |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_Custom_Delimiter_Delimited [GOOD] |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UniqAndNoUniqSecondaryIndexWithCover [GOOD] Test command err: Trying to start YDB, gRPC: 8039, MsgBus: 62814 2025-03-31T16:31:18.016078Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012107983195569:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:18.016143Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d32/r3tmp/tmpsgq9nW/pdisk_1.dat 2025-03-31T16:31:18.418761Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:18.442120Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:18.442271Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:18.445041Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8039, node 1 2025-03-31T16:31:18.558211Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:18.558234Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:18.558241Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:18.558376Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62814 TClient is connected to server localhost:62814 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:19.174817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:19.196903Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:19.213597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:31:19.393283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:19.560565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:19.643245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:21.307724Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012120868099224:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:21.307848Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:21.662642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:21.693396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:21.726149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:21.752769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:21.793735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:21.830039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:21.922421Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012120868099739:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:21.922495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:21.922807Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012120868099744:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:21.926546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:21.942428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012120868099746:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:31:22.017710Z node 1 :TX_PROXY ERROR: Actor# [1:7488012125163067096:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:22.974075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:31:23.016070Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012107983195569:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:23.016172Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:24.063773Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:24.831051Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:24.847956Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:25.602313Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqpgp8nk55eqn5h1tvb82dfc, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzA0OTkwMzMtNmQ2Zjk4NmItNTUxZTNmYi0zMWZmODFiNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-31T16:31:25.621832Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzA0OTkwMzMtNmQ2Zjk4NmItNTUxZTNmYi0zMWZmODFiNA==, ActorId: [1:7488012125163067389:2496], ActorState: ExecuteState, TraceId: 01jqpgp8nk55eqn5h1tvb82dfc, Create QueryResponse for error on request, msg: 2025-03-31T16:31:26.433946Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:26.445729Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:27.295336Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqpgpa7j59jfydd7p0gm87qy, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzA0OTkwMzMtNmQ2Zjk4NmItNTUxZTNmYi0zMWZmODFiNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-31T16:31:27.295568Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzA0OTkwMzMtNmQ2Zjk4NmItNTUxZTNmYi0zMWZmODFiNA==, ActorId: [1:7488012125163067389:2496], ActorState: ExecuteState, TraceId: 01jqpgpa7j59jfydd7p0gm87qy, Create QueryResponse for error on request, msg: 2025-03-31T16:31:27.310852Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:27.355335Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:27.647994Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488012146637904707:2644], TxId: 281474976710701, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=MzA0OTkwMzMtNmQ2Zjk4NmItNTUxZTNmYi0zMWZmODFiNA==. CustomerSuppliedId : . TraceId : 01jqpgpb3z0rtwqddmt3s22tpr. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-03-31T16:31:27.648310Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488012146637904709:2645], TxId: 281474976710701, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=MzA0OTkwMzMtNmQ2Zjk4NmItNTUxZTNmYi0zMWZmODFiNA==. CustomerSuppliedId : . TraceId : 01jqpgpb3z0rtwqddmt3s22tpr. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7488012146637904704:2496], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-31T16:31:27.648660Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzA0OTkwMzMtNmQ2Zjk4NmItNTUxZTNmYi0zMWZmODFiNA==, ActorId: [1:7488012125163067389:2496], ActorState: ExecuteState, TraceId: 01jqpgpb3z0rtwqddmt3s22tpr, Create QueryResponse for error on request, msg: 2025-03-31T16:31:28.479248Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqpgpbd7a946pfv8rj7f9phw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzA0OTkwMzMtNmQ2Zjk4NmItNTUxZTNmYi0zMWZmODFiNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralE ... : [0:0:0] TxId: 0. Ctx: { TraceId: 01jqpgpc752ss6qst194576y5w, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzA0OTkwMzMtNmQ2Zjk4NmItNTUxZTNmYi0zMWZmODFiNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-31T16:31:29.678027Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzA0OTkwMzMtNmQ2Zjk4NmItNTUxZTNmYi0zMWZmODFiNA==, ActorId: [1:7488012125163067389:2496], ActorState: ExecuteState, TraceId: 01jqpgpc752ss6qst194576y5w, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 21905, MsgBus: 8602 2025-03-31T16:31:30.663627Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012158755958825:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:30.663717Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d32/r3tmp/tmpPL5X9W/pdisk_1.dat 2025-03-31T16:31:30.782453Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:30.799443Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:30.799540Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:30.804159Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21905, node 2 2025-03-31T16:31:30.841991Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:30.842012Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:30.842022Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:30.842153Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8602 TClient is connected to server localhost:8602 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:31:31.316633Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:31:31.340952Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:31:31.421503Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:31.599912Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:31:31.693720Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:34.005815Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012175935829794:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:34.005920Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:34.047425Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:34.076562Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:34.102012Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:34.131955Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:34.162026Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:34.229870Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:34.272743Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012175935830308:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:34.272839Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:34.272911Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012175935830313:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:34.276424Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:34.286037Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012175935830315:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:31:34.361015Z node 2 :TX_PROXY ERROR: Actor# [2:7488012175935830368:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:35.228596Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:31:35.663931Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488012158755958825:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:35.664003Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:37.296913Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqpgpkx87w69nqs1ajanzh90, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzU1MDBmYTctY2U1NmNkMTAtZmViZjA1MDEtNDFiZTY4N2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-31T16:31:37.297229Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzU1MDBmYTctY2U1NmNkMTAtZmViZjA1MDEtNDFiZTY4N2U=, ActorId: [2:7488012180230797955:2496], ActorState: ExecuteState, TraceId: 01jqpgpkx87w69nqs1ajanzh90, Create QueryResponse for error on request, msg: 2025-03-31T16:31:37.767172Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488012188820733144:2582], TxId: 281474976715677, task: 1. Ctx: { TraceId : 01jqpgpmts3fp102jaykb81244. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MzU1MDBmYTctY2U1NmNkMTAtZmViZjA1MDEtNDFiZTY4N2U=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-03-31T16:31:37.767427Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488012188820733146:2583], TxId: 281474976715677, task: 2. Ctx: { TraceId : 01jqpgpmts3fp102jaykb81244. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MzU1MDBmYTctY2U1NmNkMTAtZmViZjA1MDEtNDFiZTY4N2U=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7488012188820733141:2496], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-31T16:31:37.767637Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzU1MDBmYTctY2U1NmNkMTAtZmViZjA1MDEtNDFiZTY4N2U=, ActorId: [2:7488012180230797955:2496], ActorState: ExecuteState, TraceId: 01jqpgpmts3fp102jaykb81244, Create QueryResponse for error on request, msg: 2025-03-31T16:31:38.550241Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqpgpn9f3171d2d07hngxkt5, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzU1MDBmYTctY2U1NmNkMTAtZmViZjA1MDEtNDFiZTY4N2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-31T16:31:38.550488Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzU1MDBmYTctY2U1NmNkMTAtZmViZjA1MDEtNDFiZTY4N2U=, ActorId: [2:7488012180230797955:2496], ActorState: ExecuteState, TraceId: 01jqpgpn9f3171d2d07hngxkt5, Create QueryResponse for error on request, msg: 2025-03-31T16:31:38.572134Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:38.585421Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:38.600304Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:39.652214Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:39.675031Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:31:39.697076Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-57 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-58 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-39 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-40 |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestTopicWriterParams_Format_Concatenated [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserLoginBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserPasswordBad >> ExternalBlobsMultipleChannels::WithCompaction [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserLoginBad [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserPasswordBad >> LdapAuthProviderTest::LdapRequestWithEmptyHost [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyBaseDn >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithRemovedUserCredentialsBad [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoGood ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::SingleChannel [GOOD] Test command err: 2025-03-31T16:31:32.536818Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:31:32.537100Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:31:32.537308Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001616/r3tmp/tmp40TRZ1/pdisk_1.dat 2025-03-31T16:31:33.026135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:31:33.069863Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:33.113047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:33.114172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:33.127802Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:33.235963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:31:33.611757Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:742:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:33.612482Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:752:2628], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:33.612583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:33.621005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:31:33.779113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:756:2631], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:31:33.872735Z node 1 :TX_PROXY ERROR: Actor# [1:830:2674] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:34.562657Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpgph776zb14ksqwjdh1zwf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTM0ZDI3MTQtYTY1Y2I3ZjAtN2EyOGYzMTMtYTE5ZmU3YjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:34.672739Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpgpj6r72kvvsybg5dxmgq1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTIwOGIwNzgtY2ZlMTMxZTAtZGQyMTNhNjktZGQ1MzlkZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:34.734958Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpgpj8q2n93zmk699na7vhe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGMwYjk0MWYtYzQxNzkyZGUtYTZhYmUwYjMtMTM2N2NjMDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:34.796527Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqpgpjan8q130zf100t259wr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzI5YzM5OGEtNGVjNjA4YmUtZDJlNGE4ZTgtMTBjNTYyNTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:34.858590Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqpgpjckfn2en146vsfpbqpd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2IwYjk4NTAtMWNiMzZhNjktOWY3NWIxZjYtZTIwZDYwOTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:34.920607Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqpgpjeh5j20ft7ywzjh941z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTAzMzRmNmMtMzIwNjBiOWQtYjllZDcxMzktNTVmOTE0ZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:34.984584Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqpgpjgf9kwc1bhb9h0dbdh9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Njg3ZTU4ZWMtYmI3NDZlYjYtZGUyN2ZiYzktYzE0ZGIzMmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:35.043287Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jqpgpjjff4zmngpchmdbjxvy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWM3ZDRlYjAtMzY5MTFmNGItZTc2MzM3MjQtM2YxN2QyOGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:35.100941Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jqpgpjma65qccp861424wdtp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDRmNmMzMzYtZDdkNDUwZDMtNzRmNGFiMTItNzZhOWEzN2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:35.152854Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jqpgpjp2229x6yjn3kqxqa7e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzcyZWVkNTUtZjZiYzYwNTMtY2ViN2UyYmQtZWNmM2VkM2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:35.208732Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jqpgpjqnc8pzgsk8tdq6nweg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmQ3N2JmMWUtYjAxYjQ4MTQtNGM2MGRhMjEtMzYyNjc3Yjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:35.263127Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jqpgpjsdb8rasa7yahc0bcq8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmEyODc3YWItMjE4ZTg1Y2YtYzFlZDA1NGYtYjRjZTI2MTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:35.309385Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jqpgpjv5aj7zfxbp9n6f5gje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2JkNjAwMzUtM2EwNzc1Ni01OTdiYjM5My1mZTZjNDNhNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:35.358383Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jqpgpjwk1eva0vnatdbh426a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzY2MTVkY2QtYzM1NmFhOTUtOTczZDMyZTEtNzAzOGI4N2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:35.415267Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jqpgpjy34wftqyvfp1wmbeh7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTQzNzcxNzktZjc0ZDFjYWQtNWYxYjQ4ZjMtODcxYWVjMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:35.534240Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jqpgpk10cb17r9nymntjq3ph, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTdlOWVkLTY5MzljODVmLTkwYjc2MTQ2LWQzYWFhN2Ux, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:35.618400Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jqpgpk3pdkebvg1ed9g4hj78, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mjg1YWNjYjktODBiMzY2ZGItY2JjMzY4YTMtNDUzZjhmYjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:35.701207Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jqpgpk6adjdxncdtzyycqbc5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGJjODVmZDctY2Y2ZDc4MGItZDM5OGZhODgtYzZhNTg5NWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:35.776495Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jqpgpk8xa947brk6z8fjayzp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWYxMDU3ZDAtZjI1ZjllZmUtZDA2OGNhNWYtMmVlZDhkNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:35.825338Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jqpgpkb59gj2h7exmf393gjy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWU3NGU4ZTYtMjU1YzQ1ZmYtNGRiNjU1ODEtNTcwMmI1Nzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:35.886979Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jqpgpkcqabxge7ttk2xjn5kn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGUzY2Q3OS0xZmU2MmI2Yi0yOTA1MGE4YS0yZjMwZDRiZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:35.952217Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jqpgpkepb2w8j1gjpzx49fxg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWIyMGZlNDctYWRkM2FhMjItODk1ZWQzMTQtYTA3MDdjNjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:36.011442Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jqpgpkgp870bse1eersxazs8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmJlNmIxMjItZjhkODViOWYtZjk4OGRiMmQtMzgwMjFjNmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:36.072154Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jqpgpkjg66pjpgz3cdy8xgc2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTc0YmRmZGYtZWJlZWM1Yy04MDAzNTNhNS05OWIwM2Nk, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:36.135546Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jqpgpkmee8dn1qh4e3h30crq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2Q4YmRmYzgtZThiMjVkM2MtMTg1NDdlMDQtOGRlYTkyMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:36.192864Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jqpgpkpeetv5fczx4s2eva80, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmVjOWNhYjUtYjUzNzAxZDItMTAwNGFiNjItZDVlNDRkNjY=, Cu ... 645817Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715725. Ctx: { TraceId: 01jqpgpp3acamxrd4cc43b5nc1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmRiZjlhM2MtOTYzZTAyNzQtZTc3MmE2NTctYWJjMTA1Mzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:38.697572Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715726. Ctx: { TraceId: 01jqpgpp4v5k2v9txywpznhfzn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWExNmQ3N2ItOGUyYjliOTMtMzU5YzgxZTUtZmQxODA3MzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:38.753240Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715727. Ctx: { TraceId: 01jqpgpp6fb355syxa09kvhsq4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzFmZGYzMzYtNjYzYzkxZGQtMTUyM2M4MGUtZjcwNmY3ZTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:38.806858Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01jqpgpp889t99wh7mtc2zw3hf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGI0MmI2NTQtNTFhYjcxOTItYzlmOTE5Y2QtOGM5MGI0MDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:38.870928Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715729. Ctx: { TraceId: 01jqpgpp9y89bx75sj3azcy3ez, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTI4OWUxNTQtNWQzYjMzNmItZjQxZWYwZjgtZmNmYWJhMDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:38.957010Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715730. Ctx: { TraceId: 01jqpgppcp6pq21dbyny9h34bn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmQzYTg3NWQtZTliZjExMjYtMmYxMDhmODItYTkxOWJlNmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:39.017932Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715731. Ctx: { TraceId: 01jqpgppek7kppprwatm58nggk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWY1ZjQwM2ItMWU4MTk3OGQtNDliNDU1ZDctZDlkNTA0NzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:39.077444Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715732. Ctx: { TraceId: 01jqpgppggfmd9wg8k1myp1cab, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjA2ZTM5NGEtMmQwYzI5ZDgtMzcxOGRmNjQtMjk3OTQ0ZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:39.140045Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715733. Ctx: { TraceId: 01jqpgppjd7hqevk6wyb1h8242, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmM0Yjc3MTYtZTEzNjVjNS00Y2VkZDRhZi0zYzU5MTE4Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:39.200087Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715734. Ctx: { TraceId: 01jqpgppm91f17vwp0s8hbrhwh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRiZjQ2YmItZmQ5YmZkNzktNTk2MmYyM2YtY2E4OGE0M2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:39.261775Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715735. Ctx: { TraceId: 01jqpgppp7b1fmdt5ex0aexx74, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTE2OTg0NzEtNDI4ZGIzNjQtYjU3ODgyM2QtNmE3YTlhY2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:39.322144Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715736. Ctx: { TraceId: 01jqpgppr4anztxrpv2mkahvm0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjgxNGYzOTQtYzI4YmFmYzYtMTg3OGRhMmQtODYzZDEzZjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:39.384978Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715737. Ctx: { TraceId: 01jqpgppt1199ck7ztm85pqdz8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTI0ODA3ZjgtZjU0ZDU4YjktMTYzYTczN2ItNTBkM2JkMjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:39.444739Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715738. Ctx: { TraceId: 01jqpgppw03tx1k11dcp229z5s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTcyZDIxMzEtYWVmNGVjZWItMjMyMTk1ZWEtZTFlZTJlNGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:39.503469Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715739. Ctx: { TraceId: 01jqpgppxw1nepff0xn24pvkyc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjI5NjcwMWUtNGMwZDA0OTMtMWJlMTllYjgtZGExNWE3ZDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:39.557720Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715740. Ctx: { TraceId: 01jqpgppzn0t1jz3aw0zgmf6sf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWY0ZTczNzEtMzk0ODdjMDUtNDIzOTM4Y2ItNTM0MzhmMmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:39.619137Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715741. Ctx: { TraceId: 01jqpgpq1cb2c5aeppnh3e4fwv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmU3MjUzNWUtZmRmZWRiM2ItNjhhMWRmN2EtNjE2MjM5YzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:39.680851Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715742. Ctx: { TraceId: 01jqpgpq3ae8dg3b2a8hj3c67v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTkwNjRjYmMtYTQ1YTNkMTQtMmE0NzU5M2UtMTZmYzJmNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:39.740414Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715743. Ctx: { TraceId: 01jqpgpq58afwc621rt57jy50m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjY1NWYyMjAtYTYwMjkzYWMtOThhNzY3OWQtODFmYjgxNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:39.799634Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715744. Ctx: { TraceId: 01jqpgpq7340mkmah8q8xqb9a2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWY0ZGY4ZTUtNWUwZWIwNjUtNjNjMDAwYzAtZjBiYTI4MWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:39.845477Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715745. Ctx: { TraceId: 01jqpgpq8xchkxg4ajymj82ztd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGUxMmM0NDQtMWNlZGEwMTItMTVmYzE4OGYtNTFmYmE3MDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:39.894299Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715746. Ctx: { TraceId: 01jqpgpqab1bvfr28vfwptysqa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTE3ZGIwMWMtNDliMDc5ZmMtZTJkODcyOWEtMjEwYWFiMjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:39.945529Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715747. Ctx: { TraceId: 01jqpgpqbw0z80rfpvba5wkxtz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzQ5MWM2MjItY2IzODZkZmYtZmE5ZGNhMzAtOTU2ZmU5YmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:39.995866Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715748. Ctx: { TraceId: 01jqpgpqdg6rk485ye1azw6bte, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWQ3YzBjN2MtNmE2MGQzNWQtZTZiZjBhNTAtNWFiMDNhYmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:40.064428Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715749. Ctx: { TraceId: 01jqpgpqf15m67pc84ktzntz5h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmNjN2QxYjItOWVjNWE3Y2QtZmMwMDI0MTAtNmRhOTc2ZGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:40.115878Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715750. Ctx: { TraceId: 01jqpgpqh691r7xmnwz9dt9c6w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2VjZjliNDgtM2QyZGJlNDUtOTczMWFlODMtZmY5MTU1Mzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:40.160827Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715751. Ctx: { TraceId: 01jqpgpqjtd3w4j09gnk3b4cgz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTZmYWVmZS00ZjA3ZjYwZi1mN2JmZjVlNC0yOWNkNmU2, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:40.209963Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715752. Ctx: { TraceId: 01jqpgpqm5eqrpj401kazmf92m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTAxMTgwMDgtZjI5ZmFkZmQtYjRlNzY1MWItNGQxZWIwNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:40.258788Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715753. Ctx: { TraceId: 01jqpgpqnq54q7jxke67wxnren, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGE0MmNiMTAtYWU4YjNjYi1iOTcxNmNhNy01YzViMmQ2Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:40.310760Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715754. Ctx: { TraceId: 01jqpgpqq92gj6vtm3ydvzqgz1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDRkMWQ5YTEtMWJlODk1ZGUtYjZhNDIwMjctNmUyZTAxNTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:40.366711Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715755. Ctx: { TraceId: 01jqpgpqrx91spw5y3dzqwsrey, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGVjYTliYmUtNWNjYjkxMjItYmQ4MGY0ZGMtNDVmNTQ3MGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:40.417480Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715756. Ctx: { TraceId: 01jqpgpqtpd3m6q1tfgw4md1pp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTdmYTY1ZWUtMjQ0Y2JlOGQtNTUwYzY1ZTgtYjFlMDY3NWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:40.470275Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715757. Ctx: { TraceId: 01jqpgpqw7fk8r7fr503n9b4eb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjlmYTk2ODAtZWYxYzJhNDEtNWZmNTZlOGItMjE4NzhiMTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:40.536635Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715758. Ctx: { TraceId: 01jqpgpqxw1w16rffk0y8a5tjh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmZmZDRmYjYtNDU3Nzc3MGUtOWIzYTAwNTUtOWM2NjQ1ZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:40.590330Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715759. Ctx: { TraceId: 01jqpgpqzz8k4zaqa72yqn4xfr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWQ1MzI3ZjMtMWFmMmEwZGItZWU0NjlmNDUtZGE5ZjA5ZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:40.826706Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715760. Ctx: { TraceId: 01jqpgpr2289nabcbfqdbs5gy1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmUxYjZiYjEtYjkyYzAyMmMtZDQ5NGE4MWEtYjk3NGYzYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimiter [GOOD] >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimited_With_Two_Delimiters_In_A_Row [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-45 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-46 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-57 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-58 |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimited_With_Two_Delimiters_In_A_Row [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::WithCompaction [GOOD] Test command err: 2025-03-31T16:31:32.539209Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:31:32.539628Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:31:32.539850Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0015f9/r3tmp/tmpxhDiNB/pdisk_1.dat 2025-03-31T16:31:33.023404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:31:33.067730Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:33.113030Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:33.114190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:33.127845Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:33.235897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:31:33.611621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:742:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:33.612468Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:752:2628], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:33.612561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:33.620844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:31:33.784203Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:756:2631], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:31:33.856460Z node 1 :TX_PROXY ERROR: Actor# [1:830:2674] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:34.562641Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpgph771g10javtqfrhp4ss, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTQ3OTY3YWYtYjMzMmQyZjMtZjQyNzI3MmEtOThlNDNhNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:34.668006Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpgpj6vezexx7t1jh779cnj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzNiNjdhY2EtMmMxOGM0OGItM2Q0NDYxNWUtOTI5OWI0OGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:34.734952Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpgpj8x4r7742w1nwdfb8as, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjlhZTZiZWMtZDhiNDAxMjctNWIzMTNkOTctZDJhOTczZjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:34.800896Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqpgpjb006me9fm4fs62ekmx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzQzYzlkNTgtYTFjZDQ5NC1mOTQ5YmE2Yi00ZGU2NjJiYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:34.855161Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqpgpjd18hsxws9m4mzw86gg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Nzc5MmRjODYtN2U3N2E1MzQtZjZkODY4NjktNDMzZjYxNWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:34.925969Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqpgpjeq9fbkf5w9am62fj5m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2ZjZmYzZGEtOGU0YzRjMWQtZjJhZDIzYWYtZmRiMGU0M2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:34.993070Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqpgpjgz0pscg7v03w01qk1w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2Y4MzhmMy01ZTFjYzZiMC1mM2UxODEyYS02YTIwY2Y1Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:35.062644Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jqpgpjk2ayesf0jeefdy7jt2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTZiMDBhNDItMmZhNTA1ZjAtMTAyNzg4MWUtOWFjZjZlNDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:35.122474Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jqpgpjn71f6g98zpsx6kec66, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWM1NmMxMDMtZDMyNDc0NmYtYjhlOWEyZDAtZjg3OGQxMzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:35.176373Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jqpgpjq27yf42zepd64tmmp0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzY3ZGM3ODEtYzcwZWViNTgtOWY4NDJiMzQtNTk4YTIzYjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:35.234959Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jqpgpjrscnyjhehwpqmn187m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzY5NWUzNjktZmE4Zjc5OTMtYjE3MTY5MzktZDM2NDg0YmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:35.301484Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jqpgpjtk8fy63nn82c57r5hf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODg5NmMxZDEtMTQyMjE3ODgtM2YxZDlkOGYtNmE0OTNmZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:35.368110Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jqpgpjwpbzbx5nbkrw706j2e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGZkYmQxN2UtNjQ3MmNlZmItYWI0YmU4YTctNzNlZTJlMTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:35.423847Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jqpgpjyr5gea0kmm31dy6yzg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmYwZGExNDEtYmQ1NmRlNDItYzRkZDllNDItNTUyNTYyOGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:35.489454Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jqpgpk0gf4w7ha57g2ke5v1m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yzg4OTgzNzYtMjIxZjA2M2ItMzBhNzE2ZmUtMjAxMjllNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:35.557378Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jqpgpk2k21wby2bjw1213mb8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmUwOTM2N2YtODhiMmVkNGUtYTI5Y2NkMzAtZmU4MTMzNjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:35.616764Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jqpgpk4p2fs9m8yg2gd65vjq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmVhNDMzNTctOTc5MGU5NmUtYTAwYmJjZTgtNzVjMWVjYTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:35.682869Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jqpgpk6jawg6xw6z4kp5ngxj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTlmYmI1MTctYjZlZTQzYmMtYjJhYTNlODMtNTNjZjg5NGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:35.746977Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jqpgpk8k7fe3sdz3nbj8e4m9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmE3NDU4MmQtMzdlNGY4MTYtMjE3Njc2Mi02ZmY5ZWYyYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:35.814641Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jqpgpkakd3aw0c9d6gbx5f0j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjdlYzg2MWYtNzM4ZWM3ODYtNDhmOTc2ZTYtZWMyNzljYzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:35.879458Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jqpgpkcq0qv40fxtmjfc0kkm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzVmNWY2NDYtODg0ODVhNmUtNGE2ZDc4ZTgtNWJiYTEzZjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:35.946904Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jqpgpkev2xh2tertg0fxe5ts, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTBlZjQwOGQtNzQzMTlkY2MtNjg2NTBiNDktOGJjMTMxOTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:36.016432Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jqpgpkgyfbcvtncphnc18rme, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzhjOGU2MWEtMzI3Njk4My1kNmUwYjU5My1hMzRjMzA4NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:36.095451Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jqpgpkk69jzgvpx11dk6hsmv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjdlY2YyODQtNDU1ZmI0ZWMtMjM1MGFhNGYtZjNlYjY0ZGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:36.167820Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jqpgpknm1afppdbeakh7yrrn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzQzMTA2ZDktOGU5ZGFlN2YtMmI3ZmI3NS1mNWNhNTljYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:36.240566Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jqpgpkqv7mzn2bx211cj0m03, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTc1OTk2NDAtMjA0YzhkODAtZjUzZmVmZDUtMTM5ZjYw ... hM2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:39.079753Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715726. Ctx: { TraceId: 01jqpgppgv5sg6j1jcht2vt5cm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjdhMzYyMmEtMTVkNzU1ZjgtNDQ0NDkxNmUtMjhiMjQ5MzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:39.143723Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715727. Ctx: { TraceId: 01jqpgppjr40vwdsnv79z95r6h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2IzNGE4MTYtMmE0YmUxYzgtYzkxZWEzZGQtMzk4MTkzNGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:39.214520Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01jqpgppms2je17dmj8mh2w16f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTIxYzhiMWEtOGMyOGVlMjQtZDY5YjQ3YmEtMTA2NDdkNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:39.287445Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715729. Ctx: { TraceId: 01jqpgppq45a9bkh50w1z4y6a6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjY5ZjdiNDEtYWVhYjUzYTYtODI2YmEyZWMtNzlkODI3YTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:39.356074Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715730. Ctx: { TraceId: 01jqpgppsa5x7b5m5x6m6v8j9c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmYyZTU1NWEtYzQ2NzYxOWMtMzA4NGFkZS01ODU5MGZjOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:39.424434Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715731. Ctx: { TraceId: 01jqpgppve6f1j6dvb87baf1hd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjYwZThmYTQtOWU4YzRjMzAtNTQwNzdiYjctZTQ2NWI2YTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:39.489217Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715732. Ctx: { TraceId: 01jqpgppxh3xzryp4pb2gndcy4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDRhNDhmNDktZWE4OGJjNjItMWQ1NjViZGYtZDMxZWRjODQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:39.556694Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715733. Ctx: { TraceId: 01jqpgppzk0n0t7kt5xanwxwt7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjJjYmM3MmItODc5N2U1NTYtZWEyYmM2MWMtZDJiNGQxZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:39.625488Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715734. Ctx: { TraceId: 01jqpgpq1peqw3jrvtb5jvsgp4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjcyNzc0YjYtMWU0YzcxNmQtYTFhM2NjMmMtZTVhODIxOWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:39.694229Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715735. Ctx: { TraceId: 01jqpgpq3vbpztdb5ra7p26d6k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzA2NTg4NTAtY2IwZmQ2ZDQtNGIxZjhiMGMtOGMxNTkyM2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:39.755144Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715736. Ctx: { TraceId: 01jqpgpq5zaz2x2v4j1fc3a1e6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWM3MTVmMDYtNjQ3ZWJjYWUtMThjNTNhNDgtODc0N2YyM2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:39.821326Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715737. Ctx: { TraceId: 01jqpgpq7x84cpv7sn2wxbg3rv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzZmMmRmZDctZDA4MTJjOTctMjcwMTNkYTktZmQ2NDhkZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:39.886303Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715738. Ctx: { TraceId: 01jqpgpq9z5k6bsc2x1fjv5q4g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWU2YTNkZmQtZGZiODY0YmYtYjA5MzJhZjUtZmQyYmJmYmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:39.950558Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715739. Ctx: { TraceId: 01jqpgpqbzeqbrrt27v0g4pwzr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTY4YWRhZjgtYWUzMDhkNDUtZTE1YTI2ODAtMWVhNTNjODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:40.007479Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715740. Ctx: { TraceId: 01jqpgpqdz8hk99zejapt53k5q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzlmYzgzMWQtNTJlYzVhNDUtZWQyNDdlOTMtZDY0YjBhZTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:40.068181Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715741. Ctx: { TraceId: 01jqpgpqfr57jbgr00sgjpykb7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTk0NzYyNTUtZDcwNTQzZTktYmJhNTJhNzctZWI5MjMwZTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:40.125433Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715742. Ctx: { TraceId: 01jqpgpqhm81zp9374xatbzq96, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2QyOWQyMTEtZTM4ZjlmNWQtZTQ2NTYzMjMtMzQwNTEzYTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:40.216602Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715743. Ctx: { TraceId: 01jqpgpqkf20xpz69fqb9x9er5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTFmYWY5N2QtZTdjZDg0YmItYjQ3ODljNzYtMjZlMzc3NjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:40.276317Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715744. Ctx: { TraceId: 01jqpgpqpa98ep0jc3aa0m2718, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmIyNGM2OC0yNTMwMDk5MS1lYjY3MGRmNC0xYjUxNTkyZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:40.331705Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715745. Ctx: { TraceId: 01jqpgpqr5bgz8vpce12szpthk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjE0OTFjMmItYmU3ZmQyZDYtOThkOWIxODUtNWRkMjM1NDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:40.395005Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715746. Ctx: { TraceId: 01jqpgpqswe0gytd62fbehx2k3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjVlZDQ4NWEtMmEzZGM2ODgtNzlhOTYxYzctZjliM2JkYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:40.461959Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715747. Ctx: { TraceId: 01jqpgpqvxeand0nb5kgzfsx05, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjMzNmY2NzMtYTQ3MzVhNDItOTk1MThlZGItNjcwMzQwZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:40.528652Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715748. Ctx: { TraceId: 01jqpgpqxzf6kk9b2dtdnkes32, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTAwNTFiODUtZDZjZjk1NTAtMWZhODNmZmEtNzhiOTFhYmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:40.594611Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715749. Ctx: { TraceId: 01jqpgpr0270x1pfvx60f50ss4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjQ5MWM0NmYtODE1NDJiYTUtZGZhYjIwZjgtZDE5NjcyZTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:40.660958Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715750. Ctx: { TraceId: 01jqpgpr2455znbnze69h431h2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzVlMjZkMTQtMTRkZjAxYzAtNjAxOTY2YjktM2E4YTNkY2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:40.727534Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715751. Ctx: { TraceId: 01jqpgpr4639zhh8f9v5vxhqhh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzVlZjBjMjQtYzVmNGIzM2UtYjdiNjU4MmMtOWVhZGM0NWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:40.793786Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715752. Ctx: { TraceId: 01jqpgpr690xkncqbyp6jk77mj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjAyMjlkMDYtZmVmMDQwZTYtYmJmNjM2ZDYtYTg1NTBlNWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:40.857543Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715753. Ctx: { TraceId: 01jqpgpr8a10spdbamt1cjkgxj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTcxMGVjMGUtZTA3MjFhM2UtYWI4ZTM3ZjEtMWI1NjgwMjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:40.917101Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715754. Ctx: { TraceId: 01jqpgprabd1m1fa37v8htn3ah, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTkxYTMxNC01NDViOWM1OC0zZTA0MzE4NC02YTAzMjUzNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:40.991897Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715755. Ctx: { TraceId: 01jqpgprcmbapfarmnab61qb21, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDE5ZWQ2ZjktM2U2NTY3ZjAtMzJkNjRjMTYtMmQyYzYxYTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:41.045163Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715756. Ctx: { TraceId: 01jqpgpreg9jnj1krx7992p05q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjEyZGI4YmMtYmMzZjAyZjItNjc5MjIxMjEtNDdlMmJkNjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:41.104317Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715757. Ctx: { TraceId: 01jqpgprg6efr83nrcqaejnsjt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzFkYTIyOTgtNTYzOGIxMjUtOWE5NjMxYzEtZDljMjhkYjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:41.165270Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715758. Ctx: { TraceId: 01jqpgprj2f3bpmmdeb65w63yh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWM1YzdkNTMtMThlOGM4NjQtMmFmYWYxNzYtYjYzZjI1NDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:41.227205Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715759. Ctx: { TraceId: 01jqpgprkz62zxb9wjkj5hj5xb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWExN2VjNWQtZmVmMDdmMDgtZjUwY2VlYjgtMjU3Njg4ZGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:31:41.252061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 100:0, at schemeshard: 72057594046644480 2025-03-31T16:31:41.700531Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715760. Ctx: { TraceId: 01jqpgpryt8esr88me7hctvabb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2NkNzgyYzctMjU3ZGY1NTgtOTBmYWExNTQtNDI4MjM0YzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> KqpIndexes::JoinWithNonPKColumnsInPredicate-UseStreamJoin [GOOD] |94.2%| [TA] $(B)/ydb/core/tx/datashard/ut_external_blobs/test-results/unittest/{meta.json ... results_accumulator.log} |94.2%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_external_blobs/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-16 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-17 >> TCacheTest::Attributes >> TCacheTest::Recreate >> TCacheTest::WatchRoot >> TCacheTest::SystemView >> TCacheTest::MigrationCommon >> TCacheTest::RacyRecreateAndSync >> TCacheTest::Navigate >> TCacheTest::MigrationLostMessage >> TCacheTest::CheckSystemViewAccess >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-22 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-23 >> Worker::Basic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::JoinWithNonPKColumnsInPredicate-UseStreamJoin [GOOD] Test command err: Trying to start YDB, gRPC: 18105, MsgBus: 8434 2025-03-31T16:31:07.828442Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012057953271117:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:07.828608Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d71/r3tmp/tmpy6ebJP/pdisk_1.dat 2025-03-31T16:31:08.352716Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:08.352814Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:08.357107Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:08.396801Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18105, node 1 2025-03-31T16:31:08.482791Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:08.482812Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:08.482818Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:08.482908Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8434 TClient is connected to server localhost:8434 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:09.035833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:09.056496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:31:09.190239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:31:09.382734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:31:09.460594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:31:11.279926Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012075133142069:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:11.280031Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:11.585626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:11.626755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:11.659036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:11.686119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:11.725537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:11.800146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:11.885314Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012075133142590:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:11.885391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:11.885680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012075133142595:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:11.890050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:11.904388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012075133142597:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:31:12.005718Z node 1 :TX_PROXY ERROR: Actor# [1:7488012079428109949:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:12.824312Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012057953271117:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:12.824384Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:12.990422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 11890, MsgBus: 7353 2025-03-31T16:31:14.745299Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012088159697114:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:14.745391Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d71/r3tmp/tmpYUFQpj/pdisk_1.dat 2025-03-31T16:31:14.843895Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11890, node 2 2025-03-31T16:31:14.906824Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:14.906907Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:14.908257Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:14.957586Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:14.957609Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:14.957616Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:14.957738Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7353 TClient is connected to server localhost:7353 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:31:15.385495Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:31:15.404583Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:15.463015Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:15.633237Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:15.703418Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but pr ... ccess permissions } 2025-03-31T16:31:18.278546Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:18.288178Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012105339568603:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:31:18.343899Z node 2 :TX_PROXY ERROR: Actor# [2:7488012105339568656:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:19.360125Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2025-03-31T16:31:19.438880Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:31:19.539652Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:31:19.651310Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-03-31T16:31:19.743839Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-03-31T16:31:19.747448Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488012088159697114:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:19.747518Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:19.824294Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22588, MsgBus: 23349 2025-03-31T16:31:28.435306Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488012148111902188:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:28.435394Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d71/r3tmp/tmpflQp4i/pdisk_1.dat 2025-03-31T16:31:28.557910Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:28.585083Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:28.585187Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:28.586737Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22588, node 3 2025-03-31T16:31:28.627116Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:28.627155Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:28.627166Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:28.627330Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23349 TClient is connected to server localhost:23349 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:29.110190Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:29.126485Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:29.184835Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:29.391108Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:29.462519Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:32.057964Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012165291773149:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:32.058074Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:32.105809Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:32.173843Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:32.205547Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:32.237308Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:32.269947Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:32.303870Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:32.348321Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012165291773660:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:32.348420Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:32.348415Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012165291773665:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:32.351663Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:32.360503Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488012165291773667:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:31:32.439211Z node 3 :TX_PROXY ERROR: Actor# [3:7488012165291773721:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:33.435345Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488012148111902188:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:33.435404Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:33.512686Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2025-03-31T16:31:33.561394Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:31:33.620774Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:31:33.657968Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-03-31T16:31:33.739575Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-03-31T16:31:33.796706Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 >> TCacheTest::Recreate [GOOD] >> TCacheTest::SysLocks >> TCacheTest::SystemView [GOOD] >> TCacheTest::TableSchemaVersion >> TCacheTest::RacyRecreateAndSync [GOOD] >> TCacheTest::RacyCreateAndSync >> TCacheTest::Navigate [GOOD] >> TCacheTest::PathBelongsToDomain >> TCacheTest::Attributes [GOOD] >> TCacheTest::CheckAccess >> TCacheTest::CheckSystemViewAccess [GOOD] >> TCacheTest::CookiesArePreserved >> TCacheTest::WatchRoot [GOOD] >> TCacheTestWithDrops::LookupErrorUponEviction >> TCacheTest::RacyCreateAndSync [GOOD] >> TCacheTest::PathBelongsToDomain [GOOD] >> TCacheTest::CheckAccess [GOOD] >> TCacheTest::CookiesArePreserved [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts >> TCacheTest::SysLocks [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithRemovedUserCredentialsBad >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDontExistGroupAttribute >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithRemovedUserCredentialsBad ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> Worker::Basic [GOOD] Test command err: 2025-03-31T16:31:36.592845Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012184348920725:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:36.593287Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001648/r3tmp/tmprTtUa1/pdisk_1.dat 2025-03-31T16:31:37.046324Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:37.052661Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:37.053105Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:37.063954Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12420 TServer::EnableGrpc on GrpcPort 26573, node 1 2025-03-31T16:31:37.372756Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:37.372792Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:37.372802Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:37.372983Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12420 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:37.924726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:38.146111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743438698260 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) 2025-03-31T16:31:38.288254Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7488012192938856002:2421] Handshake: worker# [1:7488012192938856001:2421] 2025-03-31T16:31:38.288342Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7488012192938856003:2421] Handshake: worker# [1:7488012192938856001:2421] 2025-03-31T16:31:38.288652Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7488012192938856003:2421] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:3:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-31T16:31:38.288915Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7488012192938856003:2421] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 3] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-03-31T16:31:38.288947Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7488012192938856003:2421] Send handshake: worker# [1:7488012192938856001:2421] 2025-03-31T16:31:38.289016Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7488012192938856001:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2025-03-31T16:31:38.289064Z node 1 :REPLICATION_SERVICE INFO: [Worker][1:7488012192938856001:2421] Handshake with writer: sender# [1:7488012192938856003:2421] 2025-03-31T16:31:38.294192Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7488012192938856002:2421] Create read session: session# [1:7488012192938856006:2293] 2025-03-31T16:31:38.294282Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7488012192938856001:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2025-03-31T16:31:38.294298Z node 1 :REPLICATION_SERVICE INFO: [Worker][1:7488012192938856001:2421] Handshake with reader: sender# [1:7488012192938856002:2421] 2025-03-31T16:31:38.294340Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7488012192938856002:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-03-31T16:31:38.362806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:2, at schemeshard: 72057594046644480 2025-03-31T16:31:39.341245Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012197233823494:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:39.341351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012197233823495:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:39.343840Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012197233823479:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:39.343987Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:39.346887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:2, at schemeshard: 72057594046644480 2025-03-31T16:31:39.356941Z node 1 :TX_PROXY ERROR: Actor# [1:7488012197233823501:2508] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T16:31:39.358746Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-03-31T16:31:39.358917Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012197233823498:2378], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-31T16:31:39.358984Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-03-31T16:31:39.359115Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012197233823499:2379], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-31T16:31:39.440228Z node 1 :TX_PROXY ERROR: Actor# [1:7488012197233823547:2538] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:39.450019Z node 1 :TX_PROXY ERROR: Actor# [1:7488012197233823565:2546] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:40.657747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-31T16:31:41.118320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:31:41.592665Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012184348920725:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:41.592720Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:41.604317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T16:31:42.047068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-31T16:31:42.518049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-31T16:31:43.308722Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7488012192938856002:2421] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 36b Offset: 0 SeqNo: 1 CreateTime: 2025-03-31T16:31:43.281000Z MessageGroupId: producer ProducerId: producer }] } } 2025-03-31T16:31:43.308836Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7488012192938856001:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 0 SeqNo: 1 CreateTime: 2025-03-31T16:31:43.281000Z MessageGroupId: producer ProducerId: producer }] } 2025-03-31T16:31:43.308944Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7488012192938856003:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 0 SeqNo: 1 CreateTime: 2025-03-31T16:31:43.281000Z MessageGroupId: producer ProducerId: producer }] } 2025-03-31T16:31:43.309094Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7488012192938856003:2421] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 0 BodySize: 36 }] } 2025-03-31T16:31:43.310688Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7488012214413693461:2421] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-03-31T16:31:43.310765Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7488012192938856003:2421] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2025-03-31T16:31:43.310867Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7488012214413693461:2421] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2025-03-31T16:31:43.312807Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7488012214413693461:2421] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-03-31T16:31:43.312868Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7488012192938856003:2421] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2025-03-31T16:31:43.312934Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7488012192938856003:2421] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [0] } 2025-03-31T16:31:43.313014Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7488012192938856001:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-03-31T16:31:43.313073Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7488012192938856002:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-03-31T16:31:43.443586Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7488012192938856002:2421] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 36b Offset: 1 SeqNo: 2 CreateTime: 2025-03-31T16:31:43.437000Z MessageGroupId: producer ProducerId: producer }] } } 2025-03-31T16:31:43.443654Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7488012192938856001:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 1 SeqNo: 2 CreateTime: 2025-03-31T16:31:43.437000Z MessageGroupId: producer ProducerId: producer }] } 2025-03-31T16:31:43.443710Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7488012192938856003:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 1 SeqNo: 2 CreateTime: 2025-03-31T16:31:43.437000Z MessageGroupId: producer ProducerId: producer }] } 2025-03-31T16:31:43.443812Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7488012192938856003:2421] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 36 }] } 2025-03-31T16:31:43.443908Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7488012214413693461:2421] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2025-03-31T16:31:43.445383Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7488012214413693461:2421] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-03-31T16:31:43.445435Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7488012192938856003:2421] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2025-03-31T16:31:43.445471Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7488012192938856003:2421] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } 2025-03-31T16:31:43.445517Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7488012192938856001:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-03-31T16:31:43.445559Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7488012192938856002:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-03-31T16:31:43.614889Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7488012192938856002:2421] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 36b Offset: 2 SeqNo: 3 CreateTime: 2025-03-31T16:31:43.589000Z MessageGroupId: producer ProducerId: producer }] } } 2025-03-31T16:31:43.614990Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7488012192938856001:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 2 SeqNo: 3 CreateTime: 2025-03-31T16:31:43.589000Z MessageGroupId: producer ProducerId: producer }] } 2025-03-31T16:31:43.615048Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7488012192938856003:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 2 SeqNo: 3 CreateTime: 2025-03-31T16:31:43.589000Z MessageGroupId: producer ProducerId: producer }] } 2025-03-31T16:31:43.615204Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7488012192938856003:2421] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 2 BodySize: 36 }] } 2025-03-31T16:31:43.615305Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7488012214413693461:2421] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2025-03-31T16:31:43.616928Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7488012214413693461:2421] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-03-31T16:31:43.616997Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7488012192938856003:2421] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2025-03-31T16:31:43.617035Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7488012192938856003:2421] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2] } 2025-03-31T16:31:43.617133Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7488012192938856001:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-03-31T16:31:43.617183Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7488012192938856002:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-03-31T16:31:43.727653Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7488012192938856002:2421] Handle NKikimr::NReplication::TEvYdbProxy::TEvTopicReaderGone { Result: { status: UNAVAILABLE, issues: {
: Error: PartitionSessionClosed { Partition session id: 1 Topic: "topic" Partition: 0 Reason: ConnectionLost } } } } 2025-03-31T16:31:43.727682Z node 1 :REPLICATION_SERVICE INFO: [RemoteTopicReader][/Root/topic][0][1:7488012192938856002:2421] Leave 2025-03-31T16:31:43.727727Z node 1 :REPLICATION_SERVICE INFO: [Worker][1:7488012192938856001:2421] Reader has gone: sender# [1:7488012192938856002:2421] 2025-03-31T16:31:43.727786Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7488012214413693640:2421] Handshake: worker# [1:7488012192938856001:2421] 2025-03-31T16:31:43.728796Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7488012214413693640:2421] Create read session: session# [1:7488012214413693642:2293] 2025-03-31T16:31:43.728833Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7488012192938856001:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2025-03-31T16:31:43.728844Z node 1 :REPLICATION_SERVICE INFO: [Worker][1:7488012192938856001:2421] Handshake with reader: sender# [1:7488012214413693640:2421] 2025-03-31T16:31:43.728871Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7488012214413693640:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } >> TCacheTest::TableSchemaVersion [GOOD] >> TCacheTest::MigrationCommon [GOOD] >> TCacheTest::MigrationDeletedPathNavigate >> LdapAuthProviderTest::LdapRequestWithEmptyBaseDn [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyBindDn >> KqpIndexes::PrefixedVectorIndexOrderByCosineDistanceNullableLevel1 [GOOD] >> KqpIndexes::PrefixedVectorIndexOrderByCosineDistanceNotNullableLevel2 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-10 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-11 >> TCacheTest::MigrationLostMessage [GOOD] >> TCacheTest::MigrationUndo ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::CheckAccess [GOOD] Test command err: 2025-03-31T16:31:44.596150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:31:44.596224Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-03-31T16:31:44.795621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-31T16:31:44.822164Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 2025-03-31T16:31:45.219052Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:31:45.219144Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-03-31T16:31:45.273402Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-03-31T16:31:45.287328Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 102:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 2025-03-31T16:31:45.291318Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:197:2187], for# user1@builtin, access# DescribeSchema 2025-03-31T16:31:45.291865Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:201:2191], for# user1@builtin, access# DescribeSchema ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::RacyCreateAndSync [GOOD] Test command err: 2025-03-31T16:31:44.596923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:31:44.596977Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-03-31T16:31:44.811639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-31T16:31:44.831671Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-03-31T16:31:44.834349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-31T16:31:44.882942Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 FAKE_COORDINATOR: Erasing txId 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-03-31T16:31:44.905005Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 2025-03-31T16:31:45.234116Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:31:45.234189Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-03-31T16:31:45.291709Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-31T16:31:45.304210Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::CookiesArePreserved [GOOD] Test command err: 2025-03-31T16:31:44.602435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:31:44.602490Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-03-31T16:31:44.785203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2025-03-31T16:31:44.805706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-31T16:31:44.815621Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-03-31T16:31:44.816798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 102:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 2025-03-31T16:31:44.822817Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:205:2195], for# user1@builtin, access# DescribeSchema 2025-03-31T16:31:44.823672Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:211:2201], for# user1@builtin, access# 2025-03-31T16:31:45.237507Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:31:45.237576Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-03-31T16:31:45.290752Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2025-03-31T16:31:45.296527Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-31T16:31:45.301299Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::SysLocks [GOOD] Test command err: 2025-03-31T16:31:44.601912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:31:44.601989Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-03-31T16:31:44.797438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-31T16:31:44.816476Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-03-31T16:31:44.818270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-31T16:31:44.852857Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 FAKE_COORDINATOR: Erasing txId 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-03-31T16:31:44.865372Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 2025-03-31T16:31:45.196378Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:31:45.196449Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-03-31T16:31:45.245939Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 |94.3%| [TA] $(B)/ydb/core/tx/replication/service/ut_worker/test-results/unittest/{meta.json ... results_accumulator.log} |94.3%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_worker/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::PathBelongsToDomain [GOOD] Test command err: 2025-03-31T16:31:44.598956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:31:44.599010Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-03-31T16:31:44.811063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-31T16:31:44.835880Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 2025-03-31T16:31:45.219463Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:31:45.219526Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-03-31T16:31:45.276819Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2025-03-31T16:31:45.283440Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-31T16:31:45.290574Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 2025-03-31T16:31:45.301120Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Path does not belong to the specified domain: self# [2:226:2204], domain# [OwnerId: 72057594046678944, LocalPathId: 1], path's domain# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:31:45.301397Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Path does not belong to the specified domain: self# [2:228:2206], domain# [OwnerId: 72057594046678944, LocalPathId: 1], path's domain# [OwnerId: 72057594046678944, LocalPathId: 2] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::TableSchemaVersion [GOOD] Test command err: 2025-03-31T16:31:44.603584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:31:44.603653Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-03-31T16:31:44.786921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 2025-03-31T16:31:45.200524Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:31:45.200572Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-03-31T16:31:45.249274Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2025-03-31T16:31:45.292027Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 101:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-03-31T16:31:45.461225Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 102:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-35 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-10 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-11 >> TCacheTest::MigrationUndo [GOOD] >> AutoConfig::GetServicePoolsWith3CPUs [GOOD] >> AutoConfig::GetASPoolsWith3CPUs [GOOD] |94.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith4AndMoreCPUs [GOOD] |94.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |94.3%| [TA] $(B)/ydb/core/tx/columnshard/ut_rw/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-58 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-59 |94.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith3CPUs [GOOD] |94.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith3CPUs [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshRemoveUserBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoWithError >> AutoConfig::GetASPoolsWith2CPUs [GOOD] >> AutoConfig::GetServicePoolsWith2CPUs [GOOD] >> LdapAuthProviderTest_nonSecure::LdapRefreshRemoveUserBad [GOOD] >> AutoConfig::GetASPoolsith1CPU [GOOD] >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoWithError >> AutoConfig::GetServicePoolsWith1CPU [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-41 |94.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith4AndMoreCPUs [GOOD] |94.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith2CPUs [GOOD] |94.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith2CPUs [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts [GOOD] >> TPersQueueTest::SrcIdCompatibility [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-46 [GOOD] >> DataShardSnapshots::VolatileSnapshotSplit >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithRemovedUserCredentialsBad [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-58 [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDontExistGroupAttribute [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithRemovedUserCredentialsBad [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyBindDn [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithCustomGroupAttributeGood >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-47 >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoGood >> LdapAuthProviderTest::LdapRequestWithEmptyBindPassword >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserLoginBad >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoGood >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-59 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::MigrationUndo [GOOD] Test command err: 2025-03-31T16:31:44.598788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:31:44.598851Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-03-31T16:31:44.789438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 Leader for TabletID 72057594046678944 is [1:70:2109] sender: [1:175:2067] recipient: [1:46:2093] Leader for TabletID 72057594046678944 is [1:70:2109] sender: [1:178:2067] recipient: [1:24:2071] Leader for TabletID 72057594046678944 is [1:70:2109] sender: [1:179:2067] recipient: [1:177:2171] Leader for TabletID 72057594046678944 is [1:180:2172] sender: [1:181:2067] recipient: [1:177:2171] 2025-03-31T16:31:44.851073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:31:44.851155Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 101 Leader for TabletID 72057594046678944 is [1:180:2172] sender: [1:211:2067] recipient: [1:24:2071] 2025-03-31T16:31:44.881886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-03-31T16:31:44.888839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 102:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409546 is [0:0:0] sender: [1:247:2067] recipient: [1:238:2213] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [1:247:2067] recipient: [1:238:2213] Leader for TabletID 72075186233409546 is [0:0:0] sender: [1:248:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [1:248:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409547 is [0:0:0] sender: [1:249:2067] recipient: [1:243:2217] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [1:249:2067] recipient: [1:243:2217] Leader for TabletID 72075186233409547 is [0:0:0] sender: [1:252:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [1:252:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409546 is [1:251:2219] sender: [1:253:2067] recipient: [1:238:2213] Leader for TabletID 72075186233409547 is [1:255:2221] sender: [1:256:2067] recipient: [1:243:2217] TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 TestWaitNotification wait txId: 102 2025-03-31T16:31:44.927794Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 Leader for TabletID 72075186233409546 is [1:251:2219] sender: [1:289:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409547 is [1:255:2221] sender: [1:290:2067] recipient: [1:24:2071] FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 2025-03-31T16:31:45.036240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 TestWaitNotification wait txId: 104 Leader for TabletID 72075186233409548 is [0:0:0] sender: [1:341:2067] recipient: [1:336:2285] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [1:341:2067] recipient: [1:336:2285] Leader for TabletID 72075186233409548 is [0:0:0] sender: [1:343:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [1:343:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409548 is [1:344:2289] sender: [1:345:2067] recipient: [1:336:2285] TestWaitNotification: OK eventTxId 103 Leader for TabletID 72075186233409548 is [1:344:2289] sender: [1:362:2067] recipient: [1:24:2071] TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-03-31T16:31:45.262057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomain, opId: 105:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409549 is [0:0:0] sender: [1:420:2067] recipient: [1:416:2333] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [1:420:2067] recipient: [1:416:2333] Leader for TabletID 72075186233409549 is [0:0:0] sender: [1:421:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [1:421:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409549 is [1:423:2337] sender: [1:424:2067] recipient: [1:416:2333] Leader for TabletID 72075186233409549 is [1:423:2337] sender: [1:425:2067] recipient: [1:24:2071] 2025-03-31T16:31:45.309732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:31:45.309799Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 TestWaitNotification: OK eventTxId 105 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } TestModificationResults wait txId: 106 2025-03-31T16:31:45.340194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-31T16:31:45.340251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 4] 2025-03-31T16:31:45.340591Z node 1 :FLAT_TX_SCHEMESHARD ERROR: TWait ProgressState, dependent transaction: 106, parent transaction: 105, at schemeshard: 72057594046678944 2025-03-31T16:31:45.340702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomainDecision, opId: 106:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-03-31T16:31:45.361880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 2025-03-31T16:31:45.362558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } TestModificationResults wait txId: 107 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 skipDeleteNotification path: /Root/USER_0/DirA/Table1 pathId: [OwnerId: 72057594046678944, LocalPathId: 4] Strong: 1 TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2025-03-31T16:31:45.404198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 108:0, at schemeshard: 72075186233409549 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 skipDeleteNotification path: /Root/USER_0/DirA pathId: [OwnerId: 72057594046678944, LocalPathId: 3] Strong: 1 TestWaitNotification: OK eventTxId 108 TestModificationResults wait txId: 109 skipDeleteNotification path: /Root/USER_0/DirA pathId: [OwnerId: 72057594046678944, LocalPathId: 3] Strong: 1 TestModificationResult got TxId: 109, wait until txId: 109 TestWaitNotification wait txId: 109 TestWaitNotification: OK eventTxId 109 TestModificationResults wait txId: 110 2025-03-31T16:31:45.500316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 110:0, at schemeshard: 72075186233409549 Leader for TabletID 72075186233409550 is [0:0:0] sender: [1:623:2067] recipient: [1:618:2502] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [1:623:2067] recipient: [1:618:2502] Leader for TabletID 72075186233409550 is [0:0:0] sender: [1:626:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [1:626:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409550 is [1:627:2506] sender: [1:628:2067] recipient: [1:618:2502] Leader for TabletID 72075186233409550 is [1:627:2506] sender: [1:629:2067] recipient: [1:24:2071] TestModificationResult got TxId: 110, wait until txId: 110 TestWaitNotification wait txId: 110 TestWaitNotification: OK eventTxId 110 2025-03-31T16:31:46.008806Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:31:46.008863Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-03-31T16:31:46.059806Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 2025-03-31T16:31:46.064293Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 65543, Sender [2:174:2170], Recipient [2:70:2109]: NActors::TEvents::TEvPoison 2025-03-31T16:31:46.064750Z node 2 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [2:70:2109] sender: [2:175:2067] recipient: [2:46:2093] Leader for TabletID 72057594046678944 is [2:70:2109] sender: [2:178:2067] recipient: [2:24:2071] Leader for TabletID 72057594046678944 is [2:70:2109] sender: [2:179:2067] recipient: [2:177:2171] Leader for TabletID 72057594046678944 is [2:180:2172] sender: [2:181:2067] recipient: [2:177:2171] 2025-03-31T16:31:46.070080Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received ev ... : 2] was 2 2025-03-31T16:31:46.606362Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:2, tabletId: 72075186233409547, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: Mediator, at schemeshard: 72057594046678944 2025-03-31T16:31:46.606382Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:31:46.606403Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:3, tabletId: 72075186233409548, PathId: [OwnerId: 72057594046678944, LocalPathId: 4], TabletType: DataShard, at schemeshard: 72057594046678944 2025-03-31T16:31:46.606420Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-03-31T16:31:46.606508Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 1, at schemeshard: 72057594046678944 2025-03-31T16:31:46.606640Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:31:46.606796Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2025-03-31T16:31:46.607026Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:31:46.607172Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:31:46.607469Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:31:46.607522Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:31:46.607668Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:31:46.607777Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:31:46.607843Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:31:46.608020Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:31:46.608086Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:31:46.608242Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:31:46.608420Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-03-31T16:31:46.608566Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-03-31T16:31:46.608605Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-03-31T16:31:46.608653Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-03-31T16:31:46.608881Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-31T16:31:46.610248Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:31:46.610395Z node 2 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-03-31T16:31:46.611454Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435082, Sender [2:516:2402], Recipient [2:516:2402]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-03-31T16:31:46.611500Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-03-31T16:31:46.612017Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:31:46.612081Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:31:46.612287Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:31:46.612340Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:31:46.612389Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:31:46.612420Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:31:46.612581Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [2:532:2402], Recipient [2:516:2402]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-03-31T16:31:46.612617Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-03-31T16:31:46.612648Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:31:46.624265Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:160:2157], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:31:46.624461Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:160:2157], cacheItem# { Subscriber: { Subscriber: [2:382:2319] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 5000002 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] DomainId: [OwnerId: 72057594046678944, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:31:46.624778Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:544:2419], recipient# [2:543:2418], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0 TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } { Path: Root/USER_0 TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T16:31:46.625304Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:160:2157], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0/DirA TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:31:46.625434Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:160:2157], cacheItem# { Subscriber: { Subscriber: [2:391:2322] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 200 PathId: [OwnerId: 72057594046678944, LocalPathId: 3] DomainId: [OwnerId: 72057594046678944, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/USER_0/DirA TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:31:46.625697Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:546:2421], recipient# [2:545:2420], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T16:31:46.626181Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:160:2157], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0/DirA/Table1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:31:46.626312Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:160:2157], cacheItem# { Subscriber: { Subscriber: [2:400:2325] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 250 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] DomainId: [OwnerId: 72057594046678944, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: Root/USER_0/DirA/Table1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:31:46.626521Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:548:2423], recipient# [2:547:2422], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0/DirA/Table1 TableId: [72057594046678944:4:1] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } |94.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith1CPU [GOOD] |94.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest |94.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsith1CPU [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-23 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-17 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-24 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-18 >> ColumnShardTiers::TieringUsage >> S3SettingsConversion::Basic [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-11 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-12 >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithCustomGroupAttributeGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsUseInvalidSearchFilterBad |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.4%| [TA] {RESULT} $(B)/ydb/core/tx/columnshard/ut_rw/test-results/unittest/{meta.json ... results_accumulator.log} |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::SrcIdCompatibility [GOOD] Test command err: === Start server === Server->StartServer(false); 2025-03-31T16:25:54.823480Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488010716687480305:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:54.823543Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:25:54.922635Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488010717007500124:2186];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:25:54.922865Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:25:55.195137Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:25:55.210079Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a25/r3tmp/tmp18ZXkG/pdisk_1.dat 2025-03-31T16:25:55.607019Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:25:55.634094Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:55.634197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:55.636647Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:25:55.636851Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:25:55.638573Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:25:55.644656Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:25:55.645886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29834, node 1 2025-03-31T16:25:55.883340Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/000a25/r3tmp/yandexNPR4uc.tmp 2025-03-31T16:25:55.883367Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/000a25/r3tmp/yandexNPR4uc.tmp 2025-03-31T16:25:55.883518Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/000a25/r3tmp/yandexNPR4uc.tmp 2025-03-31T16:25:55.883684Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:25:55.944302Z INFO: TTestServer started on Port 15385 GrpcPort 29834 TClient is connected to server localhost:15385 PQClient connected to localhost:29834 === TenantModeEnabled() = 0 === Init PQ - start server on port 29834 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:25:56.325572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-31T16:25:56.325809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:25:56.326025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-31T16:25:56.326240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:25:56.326284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:25:56.335804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-31T16:25:56.335941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-31T16:25:56.336150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:25:56.336191Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-31T16:25:56.336214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-03-31T16:25:56.336227Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-03-31T16:25:56.340465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:25:56.340507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-03-31T16:25:56.340530Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:25:56.344633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:25:56.344683Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-03-31T16:25:56.344702Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-03-31T16:25:56.347775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:25:56.347813Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:25:56.347854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-31T16:25:56.347891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-03-31T16:25:56.352844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:25:56.356171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-03-31T16:25:56.356330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-03-31T16:25:56.361709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743438356401, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T16:25:56.361869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743438356401 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-31T16:25:56.361898Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-31T16:25:56.362526Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-03-31T16:25:56.362568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-31T16:25:56.362796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-31T16:25:56.362870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-03-31T16:25:56.365351Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-31T16:25:56.365391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-31T16:25:56.365589Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-31T16:25:56.365622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7488010720982448237:2387], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-03-31T16:25:56.365680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:25:56.365701Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-03-31T16:25:56.365816Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-31T16:25:56.365828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-31T16:25:56.365848Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-31T16:25:56.365855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-31T16:25:56.365887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-03-31T16:25:56.365910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-31T16:25:56.365923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-03-31T16:25:56.365940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2025-03-31T16:25:56.365982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [O ... n: 7 expectedGeneration: (NULL) 2025-03-31T16:31:46.391992Z node 27 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037910 (partition=7) TEvClientConnected Status OK, TabletId: 72075186224037910, NodeId 27, Generation: 1 2025-03-31T16:31:46.391995Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server connected, pipe [27:7488012225095682489:2733], now have 1 active actors on pipe 2025-03-31T16:31:46.392025Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--account--topic100' requestId: 2025-03-31T16:31:46.392056Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910] got client message batch for topic 'rt3.dc1--account--topic100' partition 7 2025-03-31T16:31:46.392177Z node 27 :PERSQUEUE INFO: new Cookie test-src-id-compat2|79b7c525-df74829f-b658c942-febeeab0_0 generated for partition 7 topic 'rt3.dc1--account--topic100' owner test-src-id-compat2 2025-03-31T16:31:46.392345Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 7 2025-03-31T16:31:46.392467Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--account--topic100' partition: 7 messageNo: 0 requestId: cookie: 0 2025-03-31T16:31:46.392614Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--account--topic100' requestId: 2025-03-31T16:31:46.392644Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910] got client message batch for topic 'rt3.dc1--account--topic100' partition 7 2025-03-31T16:31:46.392741Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--account--topic100' partition: 7 messageNo: 0 requestId: cookie: 0 2025-03-31T16:31:46.392925Z node 27 :PQ_WRITE_PROXY INFO: session inited cookie: 5 partition: 7 MaxSeqNo: 0 sessionId: test-src-id-compat2|79b7c525-df74829f-b658c942-febeeab0_0 2025-03-31T16:31:46.393725Z :INFO: [] MessageGroupId [test-src-id-compat2] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1743438706393 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T16:31:46.393913Z :INFO: [] MessageGroupId [test-src-id-compat2] SessionId [] Write session established. Init response: session_id: "test-src-id-compat2|79b7c525-df74829f-b658c942-febeeab0_0" topic: "account/topic100" cluster: "dc1" partition_id: 7 supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-03-31T16:31:46.394224Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|79b7c525-df74829f-b658c942-febeeab0_0] Write 1 messages with Id from 1 to 1 2025-03-31T16:31:46.394796Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|79b7c525-df74829f-b658c942-febeeab0_0] Write session: try to update token 2025-03-31T16:31:46.394862Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|79b7c525-df74829f-b658c942-febeeab0_0] Send 1 message(s) (0 left), first sequence number is 1 2025-03-31T16:31:46.395390Z node 27 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: test-src-id-compat2|79b7c525-df74829f-b658c942-febeeab0_0 grpc read done: success: 1 data: write_request[data omitted] 2025-03-31T16:31:46.395699Z node 27 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037910 (partition=7) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-03-31T16:31:46.395861Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--account--topic100' requestId: 2025-03-31T16:31:46.395909Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910] got client message batch for topic 'rt3.dc1--account--topic100' partition 7 2025-03-31T16:31:46.396030Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--account--topic100' partition: 7 messageNo: 0 requestId: cookie: 1 2025-03-31T16:31:46.396102Z node 27 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037910 (partition=7) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-03-31T16:31:46.396247Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--account--topic100' requestId: 2025-03-31T16:31:46.396272Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910] got client message batch for topic 'rt3.dc1--account--topic100' partition 7 2025-03-31T16:31:46.396339Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910] got client message topic: rt3.dc1--account--topic100 partition: 7 SourceId: '\0test-src-id-compat2' SeqNo: 1 partNo : 0 messageNo: 1 size 102 offset: -1 2025-03-31T16:31:46.396647Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Topic 'rt3.dc1--account--topic100' partition 7 part blob processing sourceId '\0test-src-id-compat2' seqNo 1 partNo 0 2025-03-31T16:31:46.397492Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Topic 'rt3.dc1--account--topic100' partition 7 part blob complete sourceId '\0test-src-id-compat2' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 189 count 1 nextOffset 1 batches 1 2025-03-31T16:31:46.398542Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Add new write blob: topic 'rt3.dc1--account--topic100' partition 7 compactOffset 0,1 HeadOffset 0 endOffset 0 curOffset 1 d0000000007_00000000000000000000_00000_0000000001_00000| size 177 WTime 1743438706398 2025-03-31T16:31:46.398749Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:31:46.398786Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] --- delete ---------------- 2025-03-31T16:31:46.398815Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] [x0000000007, x0000000008) 2025-03-31T16:31:46.398850Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] --- write ----------------- 2025-03-31T16:31:46.398885Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] m0000000007ptest-src-id-compat2 2025-03-31T16:31:46.398907Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] d0000000007_00000000000000000000_00000_0000000001_00000| 2025-03-31T16:31:46.398921Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] i0000000007 2025-03-31T16:31:46.398955Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] --- rename ---------------- 2025-03-31T16:31:46.398990Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] =========================== 2025-03-31T16:31:46.399060Z node 27 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-31T16:31:46.399246Z node 27 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 7 offset 0 partNo 0 count 1 size 177 2025-03-31T16:31:46.402203Z node 27 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 7 offset 0 count 1 size 177 actorID [27:7488012216505746404:2506] 2025-03-31T16:31:46.402339Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 122 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:31:46.402366Z node 27 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037910' partition 7 offset 0 partno 0 count 1 parts 0 size 177 2025-03-31T16:31:46.402398Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] TPartition::ReplyWrite. Partition: 7 2025-03-31T16:31:46.402473Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Answering for message sourceid: '\0test-src-id-compat2', Topic: 'rt3.dc1--account--topic100', Partition: 7, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2025-03-31T16:31:46.402479Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--account--topic100' partition: 7 messageNo: 1 requestId: cookie: 1 2025-03-31T16:31:46.402599Z node 27 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037910 (partition=7) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-03-31T16:31:46.402708Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Topic 'rt3.dc1--account--topic100' partition 7 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-31T16:31:46.402740Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Topic 'rt3.dc1--account--topic100' partition 7 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-03-31T16:31:46.402876Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] read cookie 0 Topic 'rt3.dc1--account--topic100' partition 7 user user offset 0 count 1 size 1024000 endOffset 1 max time lag 0ms effective offset 0 2025-03-31T16:31:46.402910Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 1 2025-03-31T16:31:46.402960Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-03-31T16:31:46.402989Z node 27 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-31T16:31:46.403048Z node 27 :PERSQUEUE DEBUG: Topic 'rt3.dc1--account--topic100' partition 7 user user readTimeStamp done, result 1743438706396 queuesize 0 startOffset 0 2025-03-31T16:31:46.403379Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|79b7c525-df74829f-b658c942-febeeab0_0] Write session got write response: sequence_numbers: 1 offsets: 0 already_written: false partition_id: 7 write_statistics { persist_duration_ms: 4 queued_in_partition_duration_ms: 1 } 2025-03-31T16:31:46.403442Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|79b7c525-df74829f-b658c942-febeeab0_0] Write session: acknoledged message 1 2025-03-31T16:31:46.403733Z :INFO: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|79b7c525-df74829f-b658c942-febeeab0_0] Write session: close. Timeout = 0 ms 2025-03-31T16:31:46.403826Z :INFO: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|79b7c525-df74829f-b658c942-febeeab0_0] Write session will now close 2025-03-31T16:31:46.403899Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|79b7c525-df74829f-b658c942-febeeab0_0] Write session: aborting 2025-03-31T16:31:46.404551Z :INFO: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|79b7c525-df74829f-b658c942-febeeab0_0] Write session: gracefully shut down, all writes complete 2025-03-31T16:31:46.404626Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|79b7c525-df74829f-b658c942-febeeab0_0] Write session: destroy 2025-03-31T16:31:46.405556Z node 27 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: test-src-id-compat2|79b7c525-df74829f-b658c942-febeeab0_0 grpc read done: success: 0 data: 2025-03-31T16:31:46.405603Z node 27 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-src-id-compat2|79b7c525-df74829f-b658c942-febeeab0_0 grpc read failed 2025-03-31T16:31:46.405652Z node 27 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-src-id-compat2|79b7c525-df74829f-b658c942-febeeab0_0 grpc closed 2025-03-31T16:31:46.405685Z node 27 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-src-id-compat2|79b7c525-df74829f-b658c942-febeeab0_0 is DEAD 2025-03-31T16:31:46.407029Z node 27 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037910 (partition=7) Received event: NActors::TEvents::TEvPoison 2025-03-31T16:31:46.407218Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server disconnected, pipe [27:7488012225095682489:2733] destroyed 2025-03-31T16:31:46.407272Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] TPartition::DropOwner. |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::Basic [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-11 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-12 |94.4%| [TA] $(B)/ydb/core/driver_lib/run/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.4%| [TA] {RESULT} $(B)/ydb/core/driver_lib/run/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.4%| [TA] $(B)/ydb/core/tx/replication/service/ut_json_change_record/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-36 |94.4%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_json_change_record/test-results/unittest/{meta.json ... results_accumulator.log} >> LdapAuthProviderTest::LdapRequestWithEmptyBindPassword [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsDisableRequestToAD >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserLoginBad [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserPasswordBad >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-59 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-60 |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.5%| [TA] $(B)/ydb/services/persqueue_v1/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::FoldersStrictStyle [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-42 |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::FoldersStrictStyle [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoDisableNestedGroupsGood |94.5%| [TA] {RESULT} $(B)/ydb/services/persqueue_v1/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-47 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-48 >> TTopicReaderTests::TestRun_ReadOneMessage [GOOD] >> TTopicReaderTests::TestRun_ReadTwoMessages_With_Limit_1 >> DataShardSnapshots::VolatileSnapshotSplit [GOOD] >> DataShardSnapshots::VolatileSnapshotMerge >> KqpMultishardIndex::WriteIntoRenamingAsyncIndex [GOOD] |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigs >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-59 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-60 |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] >> ColumnShardTiers::DSConfigsWithQueryServiceDdl >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NewlineDelimited [GOOD] >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NoDelimiter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::WriteIntoRenamingAsyncIndex [GOOD] Test command err: Trying to start YDB, gRPC: 63592, MsgBus: 11956 2025-03-31T16:30:48.548814Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011978272444895:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:48.549094Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dc3/r3tmp/tmpFomtHI/pdisk_1.dat 2025-03-31T16:30:48.944967Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63592, node 1 2025-03-31T16:30:48.986758Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:48.986880Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:48.993440Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:30:49.059684Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:49.059712Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:49.059719Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:49.059846Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11956 TClient is connected to server localhost:11956 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:49.667862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:49.684117Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:30:49.693900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:49.859651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:50.033541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:50.115114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:51.876880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011991157348533:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:51.877013Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:52.235750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:30:52.274319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:30:52.308613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:30:52.378565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:30:52.414563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:30:52.453773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:30:52.502255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011995452316342:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:52.502326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:52.502404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011995452316347:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:52.505907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:30:52.514989Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011995452316349:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:30:52.599636Z node 1 :TX_PROXY ERROR: Actor# [1:7488011995452316404:3441] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:30:53.540576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:30:53.549541Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011978272444895:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:53.549725Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; waiting... 2025-03-31T16:30:54.929397Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710696. Error resolving keys for entry: { TableId: [OwnerId: 72057594046644480, LocalPathId: 18] Access: 4 SyncVersion: false Status: PathErrorNotExist Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Uint32 : NULL, Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-03-31T16:30:54.942387Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTBiZDc2ODQtNmViYzU2YzEtODA3OGI1MzYtMjk4ODk0Mzk=, ActorId: [1:7488011999747284792:2557], ActorState: ExecuteState, TraceId: 01jqpgnbde5pg0jyffcw320raj, Create QueryResponse for error on request, msg: 2025-03-31T16:31:03.891816Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037926 not found 2025-03-31T16:31:03.893499Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037929 not found 2025-03-31T16:31:03.893526Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037928 not found 2025-03-31T16:31:03.904690Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_WRONG_SHARD_STATE;details=Rejecting data TxId 281474976711178 because datashard 72075186224037927: is in a pre/offline state assuming this is due to a finished split (wrong shard state);tx_id=281474976711178; 2025-03-31T16:31:03.905052Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488012042696969757:2557], Table: `/Root/MultiShardIndexed/index_new/indexImplTable` ([72057594046644480:20:2]), SessionActorId: [1:7488011999747284792:2557]Got WRONG SHARD STATE for table `/Root/MultiShardIndexed/index_new/indexImplTable`. ShardID=72075186224037927, Sink=[1:7488012042696969757:2557].{
: Error: Rejecting data TxId 281474976711178 because datashard 72075186224037927: is in a pre/offline state assuming this is due to a finished split (wrong shard state), code: 2029 } 2025-03-31T16:31:03.905629Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488012042696969701:2557], SessionActorId: [1:7488011999747284792:2557], statusCode=UNAVAILABLE. Issue=
: Error: Wrong shard state. Table `/Root/MultiShardIndexed/index_new/indexImplTable`., code: 2005
: Error: Rejecting data TxId 281474976711178 because datashard 72075186224037927: is in a pre/offline state assuming this is due to a finished split (wrong shard state), code: 2029 . sessionActorId=[1:7488011999747284792:2557]. isRollback=0 2025-03-31T16:31:03.906060Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTBiZDc2ODQtNmViYzU2YzEtODA3OGI1MzYtMjk4ODk0Mzk=, ActorId: [1:7488011999747284792:2557], ActorState: ExecuteState, TraceId: 01jqpgnm5xdtt85eyczzmvkkt7, got TEvKqpBuffer::TEvError in ExecuteState, status: UNAVAILABLE send to: [1:7488012042696969729:2557] from: [1:7488012042696969701:2557] 2025-03-31T16:31:03.906144Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7488012042696969729:2557] TxId: 281474976711178. Ctx: { TraceId: 01jqpgnm5xdtt85eyczzmvkkt7, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTBiZDc2ODQtNmViYzU2YzEtODA3OGI1MzYtMjk4ODk0Mzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: {
: Error: Wrong shard state. Table `/Root/MultiShardIndexed/index_new/indexImplTable`., code: 2005 subissue: {
: Error: Rejecting data TxId 281474976711178 because datashard 72075186224037927: is in a pre/offline state assuming this is due to a finished split (wrong shard state), code: 2029 } } 2025-03-31T16:31:03.908803Z node 1 :KQP_SESSION WARN: Sess ... 16:31:15.134138Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037919 not found 2025-03-31T16:31:15.167666Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037941 not found 2025-03-31T16:31:15.167706Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037942 not found 2025-03-31T16:31:20.151141Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037940 not found 2025-03-31T16:31:20.151239Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037944 not found 2025-03-31T16:31:20.151260Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037921 not found 2025-03-31T16:31:25.317656Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037945 not found 2025-03-31T16:31:25.321203Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037946 not found Trying to start YDB, gRPC: 2255, MsgBus: 10238 2025-03-31T16:31:33.361891Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012171623611630:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:33.361984Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dc3/r3tmp/tmpydA58C/pdisk_1.dat 2025-03-31T16:31:33.448117Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2255, node 2 2025-03-31T16:31:33.493328Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:33.493453Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:33.495399Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:33.522982Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:33.523007Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:33.523014Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:33.523175Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10238 TClient is connected to server localhost:10238 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:33.892010Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:33.909132Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:33.981081Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:34.130686Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:34.194509Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:36.394891Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012184508515303:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:36.394994Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:36.436090Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:36.474227Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:36.509601Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:36.562987Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:36.603305Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:36.643838Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:36.701900Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012184508515812:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:36.702011Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:36.702295Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012184508515817:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:36.706172Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:36.724809Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012184508515819:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:31:36.819735Z node 2 :TX_PROXY ERROR: Actor# [2:7488012184508515875:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:37.738648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:38.362082Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488012171623611630:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:38.362145Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:48.079871Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037924 not found 2025-03-31T16:31:48.079913Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037922 not found 2025-03-31T16:31:48.080603Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037925 not found 2025-03-31T16:31:48.080647Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037923 not found 2025-03-31T16:31:48.094249Z node 2 :CHANGE_EXCHANGE ERROR: [TableChangeSenderShard][72075186224037937:1][72075186224037927][2:7488012193098452234:2628] Apply status: status# 2, reason# 7 2025-03-31T16:31:48.101339Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037920 not found 2025-03-31T16:31:48.103504Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037921 not found 2025-03-31T16:31:48.106364Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037928 not found 2025-03-31T16:31:48.106406Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037929 not found 2025-03-31T16:31:48.126146Z node 2 :CHANGE_EXCHANGE ERROR: [TableChangeSenderShard][72075186224037937:1][72075186224037927][2:7488012236048135327:2628] Handshake status: status# 2, reason# 7 2025-03-31T16:31:48.428015Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:31:48.428052Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:53.099828Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037930 not found 2025-03-31T16:31:53.099870Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037932 not found 2025-03-31T16:31:53.099887Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037933 not found 2025-03-31T16:31:53.099904Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037934 not found 2025-03-31T16:31:53.099920Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037931 not found >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsDisableRequestToAD [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-19 |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-24 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-49 |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] Test command err: 2025-03-31T16:31:35.775330Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012179920317470:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:35.776751Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001763/r3tmp/tmp59uU1F/pdisk_1.dat 2025-03-31T16:31:36.203785Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:36.247990Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:36.248109Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:36.250281Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6308, node 1 2025-03-31T16:31:36.420117Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:36.420152Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:36.420164Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:36.420303Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:31:36.738533Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:31:36.742052Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:31:36.742110Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:31:36.743712Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:1902, port: 1902 2025-03-31T16:31:36.743816Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:36.799734Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:31:36.843506Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-31T16:31:36.888566Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****Dsrw (D682FFD3) () has now valid token of ldapuser@ldap 2025-03-31T16:31:39.118619Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012196690489195:2222];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:39.122706Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001763/r3tmp/tmpjQVd9a/pdisk_1.dat 2025-03-31T16:31:39.284464Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:39.296826Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:39.296904Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:39.298554Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12230, node 2 2025-03-31T16:31:39.365695Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:39.365721Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:39.365829Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:39.365963Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:31:39.459664Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:31:39.462991Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:31:39.463026Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:31:39.463855Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:7409, port: 7409 2025-03-31T16:31:39.463960Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:39.519673Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:31:39.563420Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-31T16:31:39.564103Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-31T16:31:39.564178Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:39.607506Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:39.655479Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:39.656609Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****fXoQ (79982E5C) () has now valid token of ldapuser@ldap 2025-03-31T16:31:42.302705Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488012210793118935:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:42.302779Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001763/r3tmp/tmpsqP0s7/pdisk_1.dat 2025-03-31T16:31:42.445900Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:42.459808Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:42.459893Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:42.461445Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24423, node 3 2025-03-31T16:31:42.530998Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:42.531022Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:42.531030Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:42.531180Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:31:42.784893Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:31:42.788204Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:31:42.788238Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:31:42.788907Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:5660, port: 5660 2025-03-31T16:31:42.788960Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:42.847679Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:31:42.895985Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****6RxQ (BD0098EA) () has now valid token of ldapuser@ldap 2025-03-31T16:31:45.503217Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488012222782388653:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:45.503295Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001763/r3tmp/tmpHxpKOl/pdisk_1.dat 2025-03-31T16:31:45.622341Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:45.656588Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:45.656701Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:45.658493Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19112, node 4 2025-03-31T16:31:45.694732Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:45.694769Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:45.694780Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:45.694938Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:31:45.831098Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:31:45.831476Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:31:45.831494Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:31:45.832201Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://qqq:26480 ldaps://localhost:26480 ldaps://localhost:11111, port: 26480 2025-03-31T16:31:45.832294Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:45.891660Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:31:45.937537Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-31T16:31:45.939912Z node 4 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-31T16:31:45.939976Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:45.984323Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:46.030006Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:46.031276Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****l3mw (DF5DE207) () has now valid token of ldapuser@ldap 2025-03-31T16:31:48.602748Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7488012235585975512:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:48.602853Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001763/r3tmp/tmplEhk8q/pdisk_1.dat 2025-03-31T16:31:48.692072Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26505, node 5 2025-03-31T16:31:48.734501Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:48.734611Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:48.736335Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:48.754607Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:48.754637Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:48.754643Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:48.754737Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:31:48.840078Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:31:48.844013Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:31:48.844043Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:31:48.844782Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:18358, port: 18358 2025-03-31T16:31:48.844884Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:48.895597Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2025-03-31T16:31:48.939446Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-31T16:31:48.940012Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-31T16:31:48.940084Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-03-31T16:31:48.987425Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-03-31T16:31:49.031461Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-03-31T16:31:49.032514Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****1aZA (CE5F5CA2) () has now valid token of ldapuser@ldap 2025-03-31T16:31:51.452218Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488012250319826986:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:51.454615Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001763/r3tmp/tmpiwzYLG/pdisk_1.dat 2025-03-31T16:31:51.600240Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:51.615685Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:51.615790Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:51.617690Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9061, node 6 2025-03-31T16:31:51.659259Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:51.659282Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:51.659289Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:51.659424Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:31:51.811065Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:31:51.814633Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:31:51.814671Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:31:51.815517Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:62678, port: 62678 2025-03-31T16:31:51.815625Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:51.871683Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: &(uid=ldapuser)(), attributes: memberOf 2025-03-31T16:31:51.871778Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter &(uid=ldapuser)() on server ldaps://localhost:62678. Bad search filter 2025-03-31T16:31:51.872320Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****9iOQ (97686A11) () has now permanent error message 'Could not login via LDAP (Could not perform search for filter &(uid=ldapuser)() on server ldaps://localhost:62678. Bad search filter)' |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::StyleDeduction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] Test command err: 2025-03-31T16:31:35.777135Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012178874177219:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:35.777755Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001720/r3tmp/tmpYnJk75/pdisk_1.dat 2025-03-31T16:31:36.219137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:36.219929Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:36.223603Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:36.243338Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16558, node 1 2025-03-31T16:31:36.416102Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:36.416123Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:36.416147Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:36.416251Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:31:36.719285Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:31:36.724492Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:31:36.724539Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:31:36.726096Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:12390, port: 12390 2025-03-31T16:31:36.726194Z node 1 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-03-31T16:31:36.740578Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:36.783793Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:31:36.827516Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-31T16:31:36.828133Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-31T16:31:36.828179Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:36.871532Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:36.918685Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:36.920410Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****B0Rg (96BBCA65) () has now valid token of ldapuser@ldap 2025-03-31T16:31:39.135199Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012195640477737:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:39.145269Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001720/r3tmp/tmpPmYHWu/pdisk_1.dat 2025-03-31T16:31:39.299829Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5889, node 2 2025-03-31T16:31:39.317390Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:39.317462Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:39.321361Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:39.376822Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:39.376849Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:39.376859Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:39.377014Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:31:39.454749Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:31:39.455069Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:31:39.455107Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:31:39.455851Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:3540, port: 3540 2025-03-31T16:31:39.455937Z node 2 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-03-31T16:31:39.466378Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:39.511587Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:31:39.555981Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****a1qA (F2B82C74) () has now valid token of ldapuser@ldap 2025-03-31T16:31:42.307393Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488012209654708451:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:42.308268Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001720/r3tmp/tmpdZYH84/pdisk_1.dat 2025-03-31T16:31:42.490183Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:42.506922Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:42.507014Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:42.508359Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7528, node 3 2025-03-31T16:31:42.551400Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:42.551426Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:42.551435Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:42.551607Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:31:42.868332Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:31:42.871855Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:31:42.871898Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:31:42.872623Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://qqq:14956 ldap://localhost:14956 ldap://localhost:11111, port: 14956 2025-03-31T16:31:42.872715Z node 3 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-03-31T16:31:42.890759Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:42.939684Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:31:42.987438Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-31T16:31:42.987992Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-31T16:31:42.988046Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:43.031514Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:43.075430Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:43.076449Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****Exwg (3FB00869) () has now valid token of ldapuser@ldap 2025-03-31T16:31:45.584111Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488012224108319129:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:45.584186Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001720/r3tmp/tmpdmXUto/pdisk_1.dat 2025-03-31T16:31:45.713566Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:45.742006Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:45.742105Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:45.743629Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20130, node 4 2025-03-31T16:31:45.789636Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:45.789667Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:45.789682Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:45.789817Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:31:45.874204Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:31:45.877493Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:31:45.877528Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:31:45.878336Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:10189, port: 10189 2025-03-31T16:31:45.878454Z node 4 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-03-31T16:31:45.889189Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:45.935586Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2025-03-31T16:31:45.983919Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****IB0Q (A4FDE348) () has now valid token of ldapuser@ldap 2025-03-31T16:31:48.934399Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7488012236904602132:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:48.934486Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001720/r3tmp/tmpxv8N67/pdisk_1.dat 2025-03-31T16:31:49.014571Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30158, node 5 2025-03-31T16:31:49.063882Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:49.063949Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:49.065401Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:49.084534Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:49.084558Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:49.084567Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:49.084963Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:31:49.257724Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:31:49.261436Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:31:49.261479Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:31:49.262242Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:8603, port: 8603 2025-03-31T16:31:49.262333Z node 5 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-03-31T16:31:49.272710Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=invalidRobouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:49.319582Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldap://localhost:8603. Invalid credentials 2025-03-31T16:31:49.320113Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****NiUQ (C19A42E3) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldap://localhost:8603. Invalid credentials)' 2025-03-31T16:31:52.391779Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488012254612473954:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:52.391984Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001720/r3tmp/tmp14GzCn/pdisk_1.dat 2025-03-31T16:31:52.523618Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:52.553492Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:52.553610Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:52.557074Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4853, node 6 2025-03-31T16:31:52.603714Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:52.603744Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:52.603753Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:52.603886Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:31:52.715880Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:31:52.718668Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:31:52.718702Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:31:52.719450Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:30763, port: 30763 2025-03-31T16:31:52.719542Z node 6 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-03-31T16:31:52.729815Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:52.775616Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:30763. Invalid credentials 2025-03-31T16:31:52.776154Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****WE6A (C0A5276A) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:30763. Invalid credentials)' ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsDisableRequestToAD [GOOD] Test command err: 2025-03-31T16:31:35.783210Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012181485727142:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:35.783293Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001741/r3tmp/tmp608SUt/pdisk_1.dat 2025-03-31T16:31:36.237767Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:36.237889Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:36.240004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:36.274163Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23407, node 1 2025-03-31T16:31:36.415738Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:36.415770Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:36.415785Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:36.415913Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:31:36.711311Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:31:36.712946Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:31:36.712990Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:31:36.714584Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://unavailablehost:21366, port: 21366 2025-03-31T16:31:36.714681Z node 1 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-03-31T16:31:36.719339Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Could not start TLS. Can't contact LDAP server 2025-03-31T16:31:36.719932Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****Z0GQ (1723FECD) () has now retryable error message 'Could not login via LDAP (Could not start TLS. Can't contact LDAP server)' 2025-03-31T16:31:36.720253Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:31:36.720276Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:31:36.721162Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://unavailablehost:21366, port: 21366 2025-03-31T16:31:36.721251Z node 1 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-03-31T16:31:36.725387Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Could not start TLS. Can't contact LDAP server 2025-03-31T16:31:36.725549Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****Z0GQ (1723FECD) () has now retryable error message 'Could not login via LDAP (Could not start TLS. Can't contact LDAP server)' 2025-03-31T16:31:39.058299Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012195465977617:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:39.058467Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001741/r3tmp/tmp4zotYl/pdisk_1.dat 2025-03-31T16:31:39.196287Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:39.209076Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:39.209149Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:39.210827Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65154, node 2 2025-03-31T16:31:39.301314Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:39.301333Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:39.301339Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:39.301457Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:31:39.446381Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:31:39.449252Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:31:39.449275Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:31:39.449936Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****0_Xw (74F439BB) () has now permanent error message 'Could not login via LDAP (List of ldap server hosts is empty)' 2025-03-31T16:31:42.395683Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488012208369160703:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:42.395718Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001741/r3tmp/tmpYZZ7Yj/pdisk_1.dat 2025-03-31T16:31:42.535995Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:42.554142Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:42.554226Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 62545, node 3 2025-03-31T16:31:42.555453Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:42.584495Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:42.584516Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:42.584522Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:42.584675Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:31:42.848597Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:31:42.851404Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:31:42.851433Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:31:42.852171Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****7TKA (CA634376) () has now permanent error message 'Could not login via LDAP (Parameter BaseDn is empty)' 2025-03-31T16:31:45.752024Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488012221935642421:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:45.752665Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001741/r3tmp/tmpadTZA4/pdisk_1.dat 2025-03-31T16:31:45.898326Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:45.917954Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:45.918074Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:45.919802Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7614, node 4 2025-03-31T16:31:45.959473Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:45.959522Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:45.959531Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:45.959686Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:31:46.227223Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:31:46.229836Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:31:46.229877Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:31:46.230676Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****UtPg (5B8F4E73) () has now permanent error message 'Could not login via LDAP (Parameter BindDn is empty)' 2025-03-31T16:31:49.073309Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7488012241819841983:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:49.073426Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001741/r3tmp/tmpk44b7H/pdisk_1.dat 2025-03-31T16:31:49.153958Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15450, node 5 2025-03-31T16:31:49.204603Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:49.204692Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:49.206338Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:49.208543Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:49.208562Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:49.208568Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:49.208700Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:31:49.284173Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:31:49.288045Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:31:49.288081Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:31:49.288919Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****OkwQ (3E17D8F7) () has now permanent error message 'Could not login via LDAP (Parameter BindPassword is empty)' 2025-03-31T16:31:52.233826Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488012253662294180:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:52.233898Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001741/r3tmp/tmp3Qvo90/pdisk_1.dat 2025-03-31T16:31:52.343771Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:52.367167Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:52.367240Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:52.368739Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21830, node 6 2025-03-31T16:31:52.431703Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:52.431727Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:52.431735Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:52.431872Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:31:52.619236Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:31:52.622589Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:31:52.622622Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:31:52.623452Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:29984, port: 29984 2025-03-31T16:31:52.623551Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:52.679685Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:31:52.724055Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****Op5w (8107E27C) () has now valid token of ldapuser@ldap |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::StyleDeduction [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-13 |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::FoldersStyleDeduction [GOOD] |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-13 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-37 |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::Port [GOOD] |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::FoldersStyleDeduction [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoWithError [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsFromAdLdapServer |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::Port [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-60 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-61 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-61 >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoWithError [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetDefaultFilter [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithOneLoginPlaceholder [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithSearchAttribute [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithFewLoginPlaceholders [GOOD] >> ColumnShardTiers::DSConfigsStub ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithFewLoginPlaceholders [GOOD] Test command err: 2025-03-31T16:31:35.776404Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012180290280899:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:35.776494Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001748/r3tmp/tmpcScSUx/pdisk_1.dat 2025-03-31T16:31:36.214955Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:36.252217Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:36.252326Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:36.258992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24936, node 1 2025-03-31T16:31:36.416209Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:36.416244Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:36.416258Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:36.416407Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:31:36.705921Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:31:36.708162Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:31:36.708202Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:31:36.709075Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:28839, port: 28839 2025-03-31T16:31:36.709953Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:36.720097Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:31:36.772599Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-31T16:31:36.773123Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-31T16:31:36.773204Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:36.819475Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:36.863385Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:36.865269Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****tTIA (FB23DA6D) () has now valid token of ldapuser@ldap 2025-03-31T16:31:40.776606Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012180290280899:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:40.776682Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:40.867041Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****tTIA (FB23DA6D) 2025-03-31T16:31:40.867188Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:28839, port: 28839 2025-03-31T16:31:40.867294Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:40.870341Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:31:40.870674Z node 1 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:28839 return no entries 2025-03-31T16:31:40.870916Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****tTIA (FB23DA6D) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:28839 return no entries)' 2025-03-31T16:31:45.869621Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****tTIA (FB23DA6D) 2025-03-31T16:31:47.469947Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012232062590233:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:47.470007Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001748/r3tmp/tmpDsxdnJ/pdisk_1.dat 2025-03-31T16:31:47.592862Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28460, node 2 2025-03-31T16:31:47.631134Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:47.631241Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:47.642086Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:47.678170Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:47.678198Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:47.678204Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:47.678286Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:31:48.063223Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:31:48.065397Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:31:48.065430Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:31:48.066069Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:19654, port: 19654 2025-03-31T16:31:48.066173Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:48.069839Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:31:48.070192Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldap://localhost:19654. Server is busy 2025-03-31T16:31:48.070429Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****12hg (85FEC175) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:19654. Server is busy)' 2025-03-31T16:31:48.070878Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:31:48.070903Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:31:48.071693Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:19654, port: 19654 2025-03-31T16:31:48.071782Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:48.074554Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:31:48.074894Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldap://localhost:19654. Server is busy 2025-03-31T16:31:48.075049Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****12hg (85FEC175) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:19654. Server is busy)' 2025-03-31T16:31:49.476952Z node 2 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****12hg (85FEC175) 2025-03-31T16:31:49.477238Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:31:49.477266Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:31:49.478431Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:19654, port: 19654 2025-03-31T16:31:49.478545Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:49.481993Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:31:49.482471Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldap://localhost:19654. Server is busy 2025-03-31T16:31:49.482844Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****12hg (85FEC175) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:19654. Server is busy)' 2025-03-31T16:31:52.469613Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488012232062590233:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:52.469711Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:52.487453Z node 2 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****12hg (85FEC175) 2025-03-31T16:31:52.487708Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:31:52.487733Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:31:52.488413Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:19654, port: 19654 2025-03-31T16:31:52.488492Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:52.491610Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:31:52.539210Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-31T16:31:52.539647Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-31T16:31:52.539685Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:52.583401Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:52.631356Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:52.632085Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****12hg (85FEC175) () has now valid token of ldapuser@ldap 2025-03-31T16:31:57.491222Z node 2 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****12hg (85FEC175) 2025-03-31T16:31:57.491299Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:19654, port: 19654 2025-03-31T16:31:57.491368Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:57.494683Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:31:57.535441Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-31T16:31:57.535844Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-31T16:31:57.535896Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:57.579495Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:57.623393Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:57.624212Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****12hg (85FEC175) () has now valid token of ldapuser@ldap >> DataShardSnapshots::VolatileSnapshotMerge [GOOD] >> DataShardSnapshots::VolatileSnapshotAndLocalMKQLUpdate >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoDisableNestedGroupsGood >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-48 [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoDisableNestedGroupsGood >> ReadOnlyVDisk::TestGetWithMustRestoreFirst |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestStorageLoad >> ReadOnlyVDisk::TestReads >> ReadOnlyVDisk::TestSync >> ReadOnlyVDisk::TestDiscover >> ReadOnlyVDisk::TestWrites >> ReadOnlyVDisk::TestGarbageCollect >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-60 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-61 >> TOosLogicTests::RenderHtml [GOOD] >> TVPatchTests::FindingPartsWhenError >> TVPatchTests::FullPatchTest [GOOD] >> TVPatchTests::FullPatchTestSpecialCase1 [GOOD] >> TVPatchTests::PatchPartFastXorDiffDisorder >> TVPatchTests::FindingPartsWhenSeveralPartsExist >> LdapAuthProviderTest_StartTls::LdapFetchGroupsFromAdLdapServer [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsDisableRequestToAD >> TVPatchTests::PatchPartFastXorDiffDisorder [GOOD] >> TVPatchTests::FindingPartsWhenError [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-20 >> TVPatchTests::FindingPartsWhenSeveralPartsExist [GOOD] >> TVPatchTests::FindingPartsWithTimeout >> TVPatchTests::FindingPartsWhenPartsAreDontExist >> TVPatchTests::PatchPartGetError |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FullPatchTestSpecialCase1 [GOOD] >> TVPatchTests::PatchPartFastXorDiffBeyoundBlob >> TVPatchTests::FindingPartsWithTimeout [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartFastXorDiffDisorder [GOOD] Test command err: Recv 65537 2025-03-31T16:32:01.040820Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:100:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-03-31T16:32:01.043562Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:100:0] Status# OK ResultSize# 1 2025-03-31T16:32:01.043627Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:100:0] FoundParts# [5] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchXorDiff 2025-03-31T16:32:01.043844Z node 1 :BS_VDISK_PATCH INFO: {BSVSP13@skeleton_vpatch_actor.cpp:674} [0:1:0:0:0] TEvVPatch: received xor diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] FromPart# 4 ToPart# 0 HasBuffer# no ReceivedXorDiffCount# 1/0 Send NKikimr::TEvBlobStorage::TEvVPatchXorDiffResult 2025-03-31T16:32:01.045074Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-03-31T16:32:01.045249Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] OriginalPartId# 0 PatchedPartId# 0 Status# ERROR ErrorReason# [XorDiff from datapart] the start of the diff at index 0 righter than the start of the diff at index 1; PrevDiffStart# 2 DiffStart# 0 Send NKikimr::TEvBlobStorage::TEvVPatchResult Recv NKikimr::TEvVPatchDyingConfirm ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FindingPartsWhenError [GOOD] Test command err: Recv 65537 2025-03-31T16:32:01.040835Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-03-31T16:32:01.044118Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# ERROR ResultSize# 1 2025-03-31T16:32:01.044213Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [] Status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts 2025-03-31T16:32:01.044320Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm >> TVPatchTests::FindingPartsWhenPartsAreDontExist [GOOD] >> TVPatchTests::FindingPartsWhenOnlyOnePartExists >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-49 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-50 >> TVPatchTests::PatchPartGetError [GOOD] >> TVPatchTests::PatchPartFastXorDiffBeyoundBlob [GOOD] >> TVPatchTests::FullPatchTestXorDiffFasterVGetResult [GOOD] >> TVPatchTests::FindingPartsWhenOnlyOnePartExists [GOOD] >> KqpIndexes::SimpleVectorIndexOrderByCosineDistanceNullableLevel2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FindingPartsWithTimeout [GOOD] Test command err: Recv 65537 2025-03-31T16:32:01.040752Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-03-31T16:32:01.043476Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-03-31T16:32:01.043544Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1 2] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-03-31T16:32:01.043719Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# yes 2025-03-31T16:32:01.045011Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: received force end; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK ErrorReason# Send NKikimr::TEvBlobStorage::TEvVPatchResult 2025-03-31T16:32:01.045100Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm Recv 65537 2025-03-31T16:32:01.303020Z node 2 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NActors::TEvents::TEvWakeup 2025-03-31T16:32:01.313506Z node 2 :BS_VDISK_PATCH ERROR: {BSVSP11@skeleton_vpatch_actor.cpp:734} [0:1:0:0:0] TEvVPatch: the vpatch actor died due to a deadline, before receiving diff; 2025-03-31T16:32:01.313590Z node 2 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [] Status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts 2025-03-31T16:32:01.313661Z node 2 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartGetError [GOOD] Test command err: Recv 65537 2025-03-31T16:32:01.472006Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-03-31T16:32:01.473152Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-03-31T16:32:01.473215Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-03-31T16:32:01.473415Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# no 2025-03-31T16:32:01.473468Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:10:0] PullingPart# 1 Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-03-31T16:32:01.473671Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# ERROR ErrorReason# Recieve not OK status from VGetResult, received status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchResult 2025-03-31T16:32:01.473746Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FullPatchTestXorDiffFasterVGetResult [GOOD] Test command err: Recv 65537 2025-03-31T16:32:01.495287Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:100:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-03-31T16:32:01.496244Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:100:0] Status# OK ResultSize# 1 2025-03-31T16:32:01.496299Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:100:0] FoundParts# [5] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchXorDiff 2025-03-31T16:32:01.496458Z node 1 :BS_VDISK_PATCH INFO: {BSVSP13@skeleton_vpatch_actor.cpp:674} [0:1:0:0:0] TEvVPatch: received xor diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] FromPart# 4 ToPart# 0 HasBuffer# no ReceivedXorDiffCount# 1/0 Send NKikimr::TEvBlobStorage::TEvVPatchXorDiffResult 2025-03-31T16:32:01.496520Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-03-31T16:32:01.496635Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] OriginalPartId# 0 PatchedPartId# 0 Status# ERROR ErrorReason# The diff at index 0 went beyound the blob part; DiffStart# 100 DiffEnd# 96 BlobPartSize# 32 Send NKikimr::TEvBlobStorage::TEvVPatchResult Recv NKikimr::TEvVPatchDyingConfirm ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FindingPartsWhenOnlyOnePartExists [GOOD] Test command err: Recv 65537 2025-03-31T16:32:01.416793Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-03-31T16:32:01.417857Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-03-31T16:32:01.417916Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts 2025-03-31T16:32:01.418006Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm Recv 65537 2025-03-31T16:32:01.684644Z node 2 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-03-31T16:32:01.684879Z node 2 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-03-31T16:32:01.684916Z node 2 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-03-31T16:32:01.685070Z node 2 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# yes 2025-03-31T16:32:01.685158Z node 2 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: received force end; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK ErrorReason# Send NKikimr::TEvBlobStorage::TEvVPatchResult 2025-03-31T16:32:01.685209Z node 2 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-13 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-14 |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-13 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-14 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SimpleVectorIndexOrderByCosineDistanceNullableLevel2 [GOOD] Test command err: Trying to start YDB, gRPC: 5195, MsgBus: 15991 2025-03-31T16:30:47.726927Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011971875326518:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:47.727001Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dc5/r3tmp/tmppTCGoR/pdisk_1.dat 2025-03-31T16:30:48.067939Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5195, node 1 2025-03-31T16:30:48.097126Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:48.097384Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:48.102663Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:30:48.133284Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:48.133334Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:48.133349Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:48.133517Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15991 TClient is connected to server localhost:15991 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:48.698108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:48.719413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:48.853836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:49.027398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:49.098092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:50.779064Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011984760230174:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:50.779187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:51.009069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:30:51.045208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:30:51.077064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:30:51.100572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:30:51.126818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:30:51.162908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:30:51.214097Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011989055197979:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:51.214239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:51.214322Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011989055197984:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:51.217783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:30:51.227595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011989055197986:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:30:51.324169Z node 1 :TX_PROXY ERROR: Actor# [1:7488011989055198040:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:30:52.208880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:30:52.450654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-03-31T16:30:52.536168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-03-31T16:30:52.727193Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011971875326518:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:52.727280Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:03.059398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:31:03.059433Z node 1 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 32603, MsgBus: 11710 2025-03-31T16:31:18.878698Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012105969422339:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:18.878811Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dc5/r3tmp/tmpdeIniA/pdisk_1.dat 2025-03-31T16:31:19.025851Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:19.037680Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:19.048227Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:19.052487Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32603, node 2 2025-03-31T16:31:19.111688Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:19.111712Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:19.111720Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:19.111848Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11710 TClient is connected to server localhost:11710 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:31:19.576263Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:31:19.597202Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:19.685198Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:19.879899Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:20.010194Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:22.627774Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012123149293304:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:22.627843Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:22.685323Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:22.730693Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:22.765190Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:22.838366Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:22.870711Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:22.950521Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:22.999909Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012123149293826:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:23.000004Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:23.000267Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012123149293831:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:23.003836Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:23.015150Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012127444261129:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:31:23.102022Z node 2 :TX_PROXY ERROR: Actor# [2:7488012127444261183:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:23.881667Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488012105969422339:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:23.881933Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:24.297755Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:31:24.602252Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-03-31T16:31:24.660854Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-03-31T16:31:24.718482Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710760:0, at schemeshard: 72057594046644480 2025-03-31T16:31:24.765094Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710763:0, at schemeshard: 72057594046644480 2025-03-31T16:31:24.808450Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037926 not found 2025-03-31T16:31:24.808496Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037925 not found 2025-03-31T16:31:24.808516Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037924 not found 2025-03-31T16:31:33.989778Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:31:33.989810Z node 2 :IMPORT WARN: Table profiles were not loaded |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> ReadOnlyVDisk::TestGetWithMustRestoreFirst [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-37 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-38 |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-61 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-62 >> TVPatchTests::PatchPartOk >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-61 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-62 |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TAsyncIndexTests::MergeBothWithReboots[PipeResets] [GOOD] >> TVPatchTests::PatchPartOk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestGetWithMustRestoreFirst [GOOD] Test command err: RandomSeed# 1528567425575371224 === Trying to put and get a blob === SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #0 to read-only === Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Write 10 blobs, expect some VDisks refuse parts but writes go through === SEND TEvPut with key [1:1:1:0:0:32768:0] 2025-03-31T16:32:01.600469Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5308:698] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:131072:0] 2025-03-31T16:32:01.605292Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5308:698] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:3:0:0:32768:0] 2025-03-31T16:32:01.610179Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5308:698] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:4:0:0:131072:0] 2025-03-31T16:32:01.612860Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5308:698] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:5:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:6:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:7:0:0:32768:0] 2025-03-31T16:32:01.620687Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5308:698] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:8:0:0:131072:0] 2025-03-31T16:32:01.623405Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5308:698] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:9:0:0:32768:0] 2025-03-31T16:32:01.626233Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5308:698] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:10:0:0:131072:0] 2025-03-31T16:32:01.628980Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5308:698] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 11 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Put 2 more VDisks to read-only === Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Write 10 more blobs, expect errors === SEND TEvPut with key [1:1:11:0:0:32768:0] 2025-03-31T16:32:03.442670Z 1 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5308:698] 2025-03-31T16:32:03.442761Z 3 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5322:712] 2025-03-31T16:32:03.442870Z 2 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5315:705] 2025-03-31T16:32:03.443593Z 1 00h05m30.160512s :BS_PROXY_PUT ERROR: [c1415650e69a0f16] Result# TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:12:0:0:131072:0] 2025-03-31T16:32:03.444873Z 1 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5308:698] 2025-03-31T16:32:03.445190Z 2 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5315:705] 2025-03-31T16:32:03.446105Z 3 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5322:712] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:12:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 4 Situations# SUUUUU } { OrderNumber# 5 Situations# USUUUU } { OrderNumber# 6 Situations# UUSUUU } { OrderNumber# 7 Situations# UUUSUU } { OrderNumber# 0 Situations# UUUUEU } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUEU } { OrderNumber# 3 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:13:0:0:32768:0] 2025-03-31T16:32:03.447521Z 1 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5308:698] 2025-03-31T16:32:03.448132Z 2 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5315:705] 2025-03-31T16:32:03.448921Z 3 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5322:712] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:13:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 3 Situations# PUUUUU } { OrderNumber# 4 Situations# UPUUUU } { OrderNumber# 5 Situations# UUPUUU } { OrderNumber# 6 Situations# UUUPUU } { OrderNumber# 7 Situations# UUUUPU } { OrderNumber# 0 Situations# UUUUUE } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUUE } ] " ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:14:0:0:131072:0] 2025-03-31T16:32:03.450091Z 3 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5322:712] 2025-03-31T16:32:03.451121Z 1 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5308:698] 2025-03-31T16:32:03.451702Z 2 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5315:705] TEvPutResult: TEvPutResult {Id# [1:1:14:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:14:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 2 Situations# EUUUUU } { OrderNumber# 3 Situations# UPUUUU } { OrderNumber# 4 Situations# UUPUUU } { OrderNumber# 5 Situations# UUUPUU } { OrderNumber# 6 Situations# UUUUPU } { OrderNumber# 7 Situations# UUUUUP } { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:15:0:0:32768:0] 2025-03-31T16:32:03.452824Z 3 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5322:712] 2025-03-31T16:32:03.452907Z 2 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5315:705] 2025-03-31T16:32:03.453682Z 1 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5308:698] TEvPutResult: TEvPutResult {Id# [1:1:15:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:15:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 1 Situations# EUUUUU } { OrderNumber# 2 Situations# UEUUUU } { OrderNumber# 3 Situations# UUSUUU } { OrderNumber# 4 Situations# UUUSUU } { OrderNumber# 5 Situations# UUUUSU } { OrderNumber# 6 Situations# UUUUUS } { OrderNumber# 7 Situations# USUUUU } { OrderNumber# 0 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:16:0:0:131072:0] 2025-03-31T16:32:03.455334Z 3 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5322:712] 2025-03-31T16:32:03.455422Z 2 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5315:705] 2025-03-31T16:32:03.456350Z 1 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5308:698] TEvPutResult: TEvPutResult {Id# [1:1:16:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:16:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 1 Situations# EUUUUU } { OrderNumber# 2 Situations# UEUUUU } { OrderNumber# 3 Situations# UUSUUU } { OrderNumber# 4 Situations# UUUSUU } { OrderNumber# 5 Situations# UUUUSU } { OrderNumber# 6 Situations# UUUUUS } { OrderNumber# 7 Situations# USUUUU } { OrderNumber# 0 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:17:0:0:32768:0] 2025-03-31T16:32:03.457924Z 1 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5308:698] 2025-03-31T16:32:03.458132Z 3 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5322:712] 2025-03-31T16:32:03.458189Z 2 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5315:705] TEvPutResult: TEvPutResult {Id# [1:1:17:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:17:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# UEUUUU } { OrderNumber# 2 Situations# UUEUUU } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUSU } { OrderNumber# 5 Situations# UUUUUS } { OrderNumber# 6 Situations# SUUUUU } { OrderNumber# 7 Situations# UUSUUU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:18:0:0:131072:0] 2025-03-31T16:32:03.460158Z 1 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5308:698] 2025-03-31T16:32:03.460357Z 2 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5315:705] 2025-03-31T16:32:03.460473Z 3 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5322:712] TEvPutResult: TEvPutResult {Id# [1:1:18:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:18:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 7 Situations# SUUUUU } { OrderNumber# 0 Situations# UEUUUU } { OrderNumber# 1 Situations# UUEUUU } { OrderNumber# 2 Situations# UUUEUU } { OrderNumber# 3 Situations# UUUUSU } { OrderNumber# 4 Situations# UUUUUS } { OrderNumber# 5 Situations# USUUUU } { OrderNumber# 6 Situations# UUSUUU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:19:0:0:32768:0] 2025-03-31T16:32:03.462703Z 1 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5308:698] 2025-03-31T16:32:03.462912Z 3 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5322:712] 2025-03-31T16:32:03.462984Z 2 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5315:705] TEvPutResult: TEvPutResult {Id# [1:1:19:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:19:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 6 Situations# SUUUUU } { OrderNumber# 7 Situations# USUUUU } { OrderNumber# 0 Situations# UUEUUU } { OrderNumber# 1 Situations# UUUEUU } { OrderNumber# 2 Situations# UUUUEU } { OrderNumber# 3 Situations# UUUUUS } { OrderNumber# 4 Situations# UUSUUU } { OrderNumber# 5 Situations# UUUUSU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:20:0:0:131072:0] 2025-03-31T16:32:03.464777Z 1 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5308:698] 2025-03-31T16:32:03.464883Z 3 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5322:712] 2025-03-31T16:32:03.464997Z 2 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5315:705] TEvPutResult: TEvPutResult {Id# [1:1:20:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:20:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvGet with key [1:1:11:0:0:32768:0] 2025-03-31T16:32:03.470131Z 1 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5309:699] 2025-03-31T16:32:03.470320Z 2 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5316:706] 2025-03-31T16:32:03.470382Z 3 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5323:713] 2025-03-31T16:32:03.470983Z 1 00h05m30.160512s :BS_PROXY_GET ERROR: [f38b56e71ed38ba4] Response# TEvGetResult {Status# ERROR ResponseSz# 1 {[1:1:11:0:0:32768:0] ERROR Size# 0 RequestedSize# 32768} ErrorReason# "TStrategyBase saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# PUUUUU } { OrderNumber# 6 Situations# UPUUUU } { OrderNumber# 7 Situations# UUPUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# AAAPAA } { OrderNumber# 4 Situations# AAAAAA } ] "} Marker# BPG29 2025-03-31T16:32:03.471132Z 2 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5316:706] 2025-03-31T16:32:03.471206Z 3 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5323:713] TEvGetResult: TEvGetResult {Status# ERROR ResponseSz# 1 {[1:1:11:0:0:32768:0] ERROR Size# 0 RequestedSize# 32768} ErrorReason# "TStrategyBase saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# PUUUUU } { OrderNumber# 6 Situations# UPUUUU } { OrderNumber# 7 Situations# UUPUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# AAAPAA } { OrderNumber# 4 Situations# AAAAAA } ] "} >> LdapAuthProviderTest_StartTls::LdapFetchGroupsDisableRequestToAD [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithCustomGroupAttributeGood ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartOk [GOOD] Test command err: Recv 65537 2025-03-31T16:32:03.938024Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-03-31T16:32:03.939077Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-03-31T16:32:03.939165Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-03-31T16:32:03.939364Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# no 2025-03-31T16:32:03.939423Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:10:0] PullingPart# 1 Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-03-31T16:32:03.939625Z node 1 :BS_VDISK_PATCH INFO: {BSVSP08@skeleton_vpatch_actor.cpp:383} [0:1:0:0:0] TEvVPatch: received part data; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 DataParts# 4 ReceivedBlobId# [1:2:3:4:6:10:1] Status# OK ResultSize# 1 ParityPart# no 2025-03-31T16:32:03.953842Z node 1 :BS_VDISK_PATCH INFO: {BSVSP14@skeleton_vpatch_actor.cpp:462} [0:1:0:0:0] TEvVPatch: send xor diffs; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorDiffCount# 0 2025-03-31T16:32:03.953964Z node 1 :BS_VDISK_PATCH INFO: {BSVSP15@skeleton_vpatch_actor.cpp:502} [0:1:0:0:0] TEvVPatch: send vPut; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 ReceivedXorDiffs# 0 ExpectedXorDiffs# 0 Send NKikimr::TEvBlobStorage::TEvVPut Recv NKikimr::TEvBlobStorage::TEvVPutResult 2025-03-31T16:32:03.955120Z node 1 :BS_VDISK_PATCH INFO: {BSVSP10@skeleton_vpatch_actor.cpp:627} [0:1:0:0:0] TEvVPatch: received put result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK 2025-03-31T16:32:03.955181Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK ErrorReason# Send NKikimr::TEvBlobStorage::TEvVPatchResult 2025-03-31T16:32:03.955291Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> DataShardSnapshots::VolatileSnapshotAndLocalMKQLUpdate [GOOD] >> DataShardSnapshots::VolatileSnapshotReadTable |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshRemoveUserBad >> ExternalIndex::Simple |94.8%| [TA] $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.8%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeBothWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:135:2157] sender: [1:136:2058] recipient: [1:111:2142] 2025-03-31T16:29:40.488182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:29:40.488282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:40.488340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:29:40.488394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:29:40.488447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:29:40.488482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:29:40.488555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:40.488666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:29:40.489004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:29:40.607670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-31T16:29:40.607784Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:169:2058] recipient: [1:15:2062] 2025-03-31T16:29:40.627452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:29:40.627659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:29:40.627851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:29:40.633420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:29:40.633798Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:29:40.634579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:40.634849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:29:40.640400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:40.642151Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:40.642224Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:40.642368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:29:40.642422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:40.642469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:29:40.642628Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:210:2058] recipient: [1:208:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:210:2058] recipient: [1:208:2209] Leader for TabletID 72057594037968897 is [1:214:2213] sender: [1:215:2058] recipient: [1:208:2209] 2025-03-31T16:29:40.651035Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:235:2058] recipient: [1:15:2062] 2025-03-31T16:29:40.796181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:29:40.796466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:40.796710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:29:40.796971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:29:40.797042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:40.804264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:40.804451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:29:40.804681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:40.804752Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:29:40.804794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:29:40.804831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:29:40.809706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:40.809824Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:29:40.809872Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:29:40.813274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:40.813344Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:40.813393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:40.813457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:29:40.817725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:29:40.820934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:29:40.821191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:135:2157] sender: [1:250:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:29:40.822304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:40.822449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:29:40.822526Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:40.822851Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:29:40.822916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:40.823135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:29:40.823249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:29:40.826558Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:40.826609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:40.826799Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:40.826841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:202:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:29:40.827236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:40.827296Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:29:40.827401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:40.827443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:40.827484Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:40.827548Z no ... Size: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:32:03.659285Z node 54 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:32:03.659575Z node 54 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 321us result status StatusSuccess 2025-03-31T16:32:03.660432Z node 54 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:32:03.671557Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409551][54:1126:2890] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-03-31T16:32:03.671691Z node 54 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][54:1072:2890] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } 2025-03-31T16:32:03.671857Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409551][54:1126:2890] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1743438723647226 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1743438723647226 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1743438723647226 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-03-31T16:32:03.674284Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409551][54:1126:2890] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-03-31T16:32:03.674392Z node 54 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][54:1072:2890] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } >> ReadOnlyVDisk::TestDiscover [GOOD] |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> ReadOnlyVDisk::TestWrites [GOOD] |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestDiscover [GOOD] Test command err: RandomSeed# 16311318338783516788 SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:1:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 3 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:3:0:0:32768:0] 2025-03-31T16:32:01.612102Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5309:698] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] SEND TEvPut with key [1:1:4:0:0:131072:0] 2025-03-31T16:32:01.876962Z 1 00h02m00.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5309:698] 2025-03-31T16:32:01.878103Z 2 00h02m00.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5316:705] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] SEND TEvPut with key [1:1:5:0:0:32768:0] 2025-03-31T16:32:02.143586Z 3 00h02m30.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5323:712] 2025-03-31T16:32:02.144486Z 1 00h02m30.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5309:698] 2025-03-31T16:32:02.145131Z 2 00h02m30.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5316:705] 2025-03-31T16:32:02.145424Z 1 00h02m30.110512s :BS_PROXY_PUT ERROR: [ed04577209d21826] Result# TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:5:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 2 Situations# EUUUUU } { OrderNumber# 3 Situations# UPUUUU } { OrderNumber# 4 Situations# UUPUUU } { OrderNumber# 5 Situations# UUUPUU } { OrderNumber# 6 Situations# UUUUPU } { OrderNumber# 7 Situations# UUUUUP } { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:5:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 2 Situations# EUUUUU } { OrderNumber# 3 Situations# UPUUUU } { OrderNumber# 4 Situations# UUPUUU } { OrderNumber# 5 Situations# UUUPUU } { OrderNumber# 6 Situations# UUUUPU } { OrderNumber# 7 Situations# UUUUUP } { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} === Putting VDisk #3 to read-only === Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #4 to read-only === Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #5 to read-only === Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #6 to read-only === Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #0 to normal === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Putting VDisk #1 to normal === Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] === Putting VDisk #2 to normal === Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Putting VDisk #3 to normal === Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Putting VDisk #4 to normal === Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Putting VDisk #5 to normal === Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Putting VDisk #6 to normal === Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] SEND TEvPut with key [1:1:6:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-61 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-62 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestWrites [GOOD] Test command err: RandomSeed# 3833048515169831679 === Trying to put and get a blob === SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #0 to read-only === Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Write 10 blobs, expect some VDisks refuse parts but writes go through === SEND TEvPut with key [1:1:1:0:0:32768:0] 2025-03-31T16:32:01.639621Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5309:698] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:131072:0] 2025-03-31T16:32:01.644767Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5309:698] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:3:0:0:32768:0] 2025-03-31T16:32:01.650357Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5309:698] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:4:0:0:131072:0] 2025-03-31T16:32:01.653485Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5309:698] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:5:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:6:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:7:0:0:32768:0] 2025-03-31T16:32:01.662262Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5309:698] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:8:0:0:131072:0] 2025-03-31T16:32:01.665268Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5309:698] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:9:0:0:32768:0] 2025-03-31T16:32:01.668429Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5309:698] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:10:0:0:131072:0] 2025-03-31T16:32:01.671406Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5309:698] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 11 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Put 2 more VDisks to read-only === Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Write 10 more blobs, expect errors === SEND TEvPut with key [1:1:11:0:0:32768:0] 2025-03-31T16:32:02.762467Z 1 00h03m30.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5309:698] 2025-03-31T16:32:02.762565Z 3 00h03m30.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5323:712] 2025-03-31T16:32:02.762666Z 2 00h03m30.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5316:705] 2025-03-31T16:32:02.763420Z 1 00h03m30.110512s :BS_PROXY_PUT ERROR: [71ad210f7231a68b] Result# TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:12:0:0:131072:0] 2025-03-31T16:32:02.764792Z 1 00h03m30.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5309:698] 2025-03-31T16:32:02.764907Z 2 00h03m30.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5316:705] 2025-03-31T16:32:02.765792Z 3 00h03m30.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5323:712] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:12:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 4 Situations# SUUUUU } { OrderNumber# 5 Situations# USUUUU } { OrderNumber# 6 Situations# UUSUUU } { OrderNumber# 7 Situations# UUUSUU } { OrderNumber# 0 Situations# UUUUEU } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUEU } { OrderNumber# 3 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:13:0:0:32768:0] 2025-03-31T16:32:02.767281Z 1 00h03m30.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5309:698] 2025-03-31T16:32:02.768003Z 2 00h03m30.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5316:705] 2025-03-31T16:32:02.768756Z 3 00h03m30.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5323:712] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:13:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 3 Situations# PUUUUU } { OrderNumber# 4 Situations# UPUUUU } { OrderNumber# 5 Situations# UUPUUU } { OrderNumber# 6 Situations# UUUPUU } { OrderNumber# 7 Situations# UUUUPU } { OrderNumber# 0 Situations# UUUUUE } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUUE } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:14:0:0:131072:0] 2025-03-31T16:32:02.769895Z 3 00h03m30.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5323:712] 2025-03-31T16:32:02.770879Z 1 00h03m30.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5309:698] 2025-03-31T16:32:02.771413Z 2 00h03m30.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5316:705] TEvPutResult: TEvPutResult {Id# [1:1:14:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:14:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 2 Situations# EUUUUU } { OrderNumber# 3 Situations# UPUUUU } { OrderNumber# 4 Situations# UUPUUU } { OrderNumber# 5 Situations# UUUPUU } { OrderNumber# 6 Situations# UUUUPU } { OrderNumber# 7 Situations# UUUUUP } { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:15:0:0:32768:0] 2025-03-31T16:32:02.772446Z 3 00h03m30.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sende ... 2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:11:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:11:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:12:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:12:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:13:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:13:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:14:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:14:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:15:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:15:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:16:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:16:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:17:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:17:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:18:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:18:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:19:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:19:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:20:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:20:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #0 === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Write 10 blobs, expect some VDisks refuse parts but the writes still go through === SEND TEvPut with key [1:1:21:0:0:32768:0] 2025-03-31T16:32:05.207114Z 3 00h08m00.161536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5323:712] 2025-03-31T16:32:05.207297Z 2 00h08m00.161536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5316:705] TEvPutResult: TEvPutResult {Id# [1:1:21:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:22:0:0:131072:0] 2025-03-31T16:32:05.210878Z 2 00h08m00.161536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5316:705] 2025-03-31T16:32:05.212376Z 3 00h08m00.161536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5323:712] TEvPutResult: TEvPutResult {Id# [1:1:22:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:23:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:23:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:24:0:0:131072:0] 2025-03-31T16:32:05.217035Z 3 00h08m00.161536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5323:712] TEvPutResult: TEvPutResult {Id# [1:1:24:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:25:0:0:32768:0] 2025-03-31T16:32:05.220055Z 3 00h08m00.161536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5323:712] 2025-03-31T16:32:05.220156Z 2 00h08m00.161536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5316:705] TEvPutResult: TEvPutResult {Id# [1:1:25:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:26:0:0:131072:0] 2025-03-31T16:32:05.223046Z 3 00h08m00.161536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5323:712] 2025-03-31T16:32:05.223176Z 2 00h08m00.161536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5316:705] TEvPutResult: TEvPutResult {Id# [1:1:26:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:27:0:0:32768:0] 2025-03-31T16:32:05.226320Z 3 00h08m00.161536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5323:712] 2025-03-31T16:32:05.226423Z 2 00h08m00.161536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5316:705] TEvPutResult: TEvPutResult {Id# [1:1:27:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:28:0:0:131072:0] 2025-03-31T16:32:05.229344Z 2 00h08m00.161536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5316:705] 2025-03-31T16:32:05.229626Z 3 00h08m00.161536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5323:712] TEvPutResult: TEvPutResult {Id# [1:1:28:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:29:0:0:32768:0] 2025-03-31T16:32:05.232643Z 3 00h08m00.161536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5323:712] 2025-03-31T16:32:05.232763Z 2 00h08m00.161536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5316:705] TEvPutResult: TEvPutResult {Id# [1:1:29:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:30:0:0:131072:0] 2025-03-31T16:32:05.235537Z 3 00h08m00.161536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5323:712] 2025-03-31T16:32:05.235699Z 2 00h08m00.161536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5316:705] TEvPutResult: TEvPutResult {Id# [1:1:30:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} === Read all 31 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:11:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:11:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:12:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:12:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:13:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:13:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:14:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:14:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:15:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:15:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:16:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:16:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:17:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:17:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:18:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:18:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:19:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:19:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:20:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:20:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:21:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:21:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:22:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:22:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:23:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:23:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:24:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:24:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:25:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:25:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:26:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:26:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:27:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:27:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:28:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:28:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:29:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:29:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:30:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:30:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-20 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-21 |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> KqpRanges::UpdateWhereInNoFullScan+UseSink >> TTopicReaderTests::TestRun_ReadTwoMessages_With_Limit_1 [GOOD] >> TTopicReaderTests::TestRun_Read_Less_Messages_Than_Sent >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-StrictAclCheck >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-50 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-51 >> TAsyncIndexTests::DropTableWithInflightChanges[TabletReboots] [GOOD] |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_transfer_writer/unittest >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithCustomGroupAttributeGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsUseInvalidSearchFilterBad >> KqpNewEngine::MultiSelect |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> KqpNewEngine::DuplicatedResults >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite-Volatile ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::DropTableWithInflightChanges[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:135:2157] sender: [1:136:2058] recipient: [1:111:2142] 2025-03-31T16:29:51.513256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:29:51.513349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:51.513391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:29:51.513423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:29:51.513480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:29:51.513509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:29:51.513601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:51.513696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:29:51.514078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:29:51.603000Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-31T16:29:51.603072Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:169:2058] recipient: [1:15:2062] 2025-03-31T16:29:51.629825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:29:51.630107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:29:51.630269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:29:51.634812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:29:51.635104Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:29:51.635742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:51.635989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:29:51.640755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:51.642358Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:51.642427Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:51.642556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:29:51.642606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:51.642647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:29:51.642785Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:210:2058] recipient: [1:208:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:210:2058] recipient: [1:208:2209] Leader for TabletID 72057594037968897 is [1:214:2213] sender: [1:215:2058] recipient: [1:208:2209] 2025-03-31T16:29:51.649935Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:235:2058] recipient: [1:15:2062] 2025-03-31T16:29:51.761746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:29:51.762006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:51.762204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:29:51.762444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:29:51.762495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:51.764597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:51.764727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:29:51.764905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:51.764983Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:29:51.765020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:29:51.765050Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:29:51.766831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:51.766879Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:29:51.766913Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:29:51.768659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:51.768706Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:51.768746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:51.768784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:29:51.772508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:29:51.774348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:29:51.774514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:135:2157] sender: [1:250:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:29:51.775572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:51.775719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:29:51.775778Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:51.776037Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:29:51.776094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:51.776267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:29:51.776367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:29:51.778755Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:51.778804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:51.779021Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:51.779072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:202:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:29:51.779430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:51.779487Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:29:51.779585Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:51.779623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:51.779663Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:51.779711Z no ... 927024Z node 114 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1003:2 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:32:06.927307Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-03-31T16:32:06.927421Z node 114 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:2 progress is 2/3 2025-03-31T16:32:06.927449Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 2/3 2025-03-31T16:32:06.927479Z node 114 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:2 progress is 2/3 2025-03-31T16:32:06.927505Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 2/3 2025-03-31T16:32:06.927537Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: false 2025-03-31T16:32:06.929101Z node 114 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-03-31T16:32:06.929204Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-03-31T16:32:06.929237Z node 114 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-03-31T16:32:06.930328Z node 114 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-03-31T16:32:06.933867Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-03-31T16:32:06.933930Z node 114 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-03-31T16:32:06.933969Z node 114 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-03-31T16:32:06.934001Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-03-31T16:32:06.934083Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: true 2025-03-31T16:32:06.935330Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-03-31T16:32:06.935378Z node 114 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:32:06.935601Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-31T16:32:06.935701Z node 114 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 3/3 2025-03-31T16:32:06.935729Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-03-31T16:32:06.935764Z node 114 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 3/3 2025-03-31T16:32:06.935790Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-03-31T16:32:06.935820Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: true 2025-03-31T16:32:06.935848Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-03-31T16:32:06.935882Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2025-03-31T16:32:06.935905Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2025-03-31T16:32:06.935984Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-31T16:32:06.936019Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2025-03-31T16:32:06.936039Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2025-03-31T16:32:06.936065Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-03-31T16:32:06.936087Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2025-03-31T16:32:06.936106Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2025-03-31T16:32:06.936139Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-03-31T16:32:06.936682Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-03-31T16:32:06.936834Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-03-31T16:32:06.939692Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-03-31T16:32:06.939760Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-03-31T16:32:06.941607Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-03-31T16:32:06.941839Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-03-31T16:32:06.946624Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 345 RawX2: 489626274073 } TabletId: 72075186233409546 State: 4 2025-03-31T16:32:06.946727Z node 114 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2025-03-31T16:32:06.948945Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-03-31T16:32:06.949510Z node 114 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409546 2025-03-31T16:32:06.949736Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-31T16:32:06.950034Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 Forgetting tablet 72075186233409546 2025-03-31T16:32:06.953361Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:32:06.953424Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-03-31T16:32:06.953499Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-03-31T16:32:06.953563Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-03-31T16:32:06.953611Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T16:32:06.956499Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-31T16:32:06.956574Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409546 2025-03-31T16:32:06.957937Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-03-31T16:32:06.958215Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-03-31T16:32:06.958261Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-03-31T16:32:06.959250Z node 114 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-03-31T16:32:06.959348Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-03-31T16:32:06.959385Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [114:625:2551] 2025-03-31T16:32:06.965520Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 348 RawX2: 489626274075 } TabletId: 72075186233409547 State: 4 2025-03-31T16:32:06.965625Z node 114 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2025-03-31T16:32:06.967576Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-03-31T16:32:06.968199Z node 114 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409547 2025-03-31T16:32:06.968402Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-03-31T16:32:06.968668Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409547 2025-03-31T16:32:06.971843Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:32:06.971894Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-31T16:32:06.971964Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:32:06.975243Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-31T16:32:06.975312Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409547 2025-03-31T16:32:06.975611Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1003 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2025-03-31T16:32:06.975997Z node 114 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-03-31T16:32:06.976070Z node 114 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-14 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-15 |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TKesusTest::TestQuoterAccountResourcesBurst >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateResourceSessions [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestStopConsuming [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateConsumptionState [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateConsumptionStateAfterAllResourceAllocated [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestVeryBigWeights [GOOD] >> TKesusTest::TestAcquireUpgrade >> THDRRQuoterResourceTreeRuntimeTest::TestWeights [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestWeightsChange [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestVerySmallSpeed [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaRelease >> THDRRQuoterResourceTreeRuntimeTest::TestAllocateResource [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestAllocationGranularity [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestAmountIsLessThanEpsilon [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestActiveSessionDisconnectsAndThenConnectsAgain [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestActiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NoDelimiter [GOOD] >> TTopicReaderTests::TestRun_ReadMessages_Output_Base64 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-14 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-15 >> KqpSinkTx::SnapshotROInteractive2 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-62 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-63 |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestVeryBigWeights [GOOD] |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestActiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-62 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-63 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-39 >> ReadOnlyVDisk::TestReads [GOOD] >> TKesusTest::TestAcquireUpgrade [GOOD] >> TKesusTest::TestAcquireTimeout >> TKesusTest::TestAcquireBeforeTimeoutViaRelease [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange >> ReadOnlyVDisk::TestGarbageCollect [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestReads [GOOD] Test command err: RandomSeed# 8664196620194317151 === Trying to put and get a blob === SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #0 to read-only === Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #1 to read-only === Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #2 to read-only === Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #3 to read-only === Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #4 to read-only === Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #5 to read-only === Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #6 to read-only === Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #0 === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #1 === Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #2 === Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #3 === Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #4 === Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #5 === Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #6 === Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_transfer_writer/unittest >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange [GOOD] >> KqpNewEngine::LocksSingleShard ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestGarbageCollect [GOOD] Test command err: RandomSeed# 223585838696870472 SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:1:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 2 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:1:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:2:0:0:131072:0] 2025-03-31T16:32:01.939385Z 1 00h01m40.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5310:698] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} 2025-03-31T16:32:01.944649Z 1 00h01m40.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5310:698] SEND TEvGet with key [1:1:2:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] SEND TEvPut with key [1:1:3:0:0:32768:0] 2025-03-31T16:32:02.820966Z 1 00h03m20.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5310:698] 2025-03-31T16:32:02.821898Z 2 00h03m20.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5317:705] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} 2025-03-31T16:32:03.272681Z 1 00h04m20.161024s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5310:698] 2025-03-31T16:32:03.272882Z 2 00h04m20.161024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5317:705] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] SEND TEvPut with key [1:1:4:0:0:131072:0] 2025-03-31T16:32:03.585484Z 1 00h05m00.200000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5310:698] 2025-03-31T16:32:03.586693Z 2 00h05m00.200000s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5317:705] 2025-03-31T16:32:03.587799Z 3 00h05m00.200000s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5324:712] 2025-03-31T16:32:03.588113Z 1 00h05m00.200000s :BS_PROXY_PUT ERROR: [c1c500ffc76d12b0] Result# TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:4:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 3 Situations# PUUUUU } { OrderNumber# 4 Situations# UPUUUU } { OrderNumber# 5 Situations# UUPUUU } { OrderNumber# 6 Situations# UUUPUU } { OrderNumber# 7 Situations# UUUUPU } { OrderNumber# 0 Situations# UUUUUE } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUUE } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:4:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 3 Situations# PUUUUU } { OrderNumber# 4 Situations# UPUUUU } { OrderNumber# 5 Situations# UUPUUU } { OrderNumber# 6 Situations# UUUPUU } { OrderNumber# 7 Situations# UUUUPU } { OrderNumber# 0 Situations# UUUUUE } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUUE } ] " ApproximateFreeSpaceShare# 0.999988} 2025-03-31T16:32:04.034403Z 1 00h06m00.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5310:698] 2025-03-31T16:32:04.034623Z 2 00h06m00.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5317:705] 2025-03-31T16:32:04.034683Z 3 00h06m00.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5324:712] === Putting VDisk #3 to read-only === Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] 2025-03-31T16:32:04.823987Z 1 00h07m40.260512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5310:698] 2025-03-31T16:32:04.824181Z 2 00h07m40.260512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5317:705] 2025-03-31T16:32:04.824241Z 3 00h07m40.260512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5324:712] 2025-03-31T16:32:04.824295Z 4 00h07m40.260512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:5331:719] === Putting VDisk #4 to read-only === Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] 2025-03-31T16:32:05.148207Z 1 00h08m20.262048s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5310:698] 2025-03-31T16:32:05.148407Z 2 00h08m20.262048s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5317:705] 2025-03-31T16:32:05.148469Z 3 00h08m20.262048s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5324:712] 2025-03-31T16:32:05.148520Z 4 00h08m20.262048s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:5331:719] 2025-03-31T16:32:05.148571Z 5 00h08m20.262048s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:5338:726] === Putting VDisk #5 to read-only === Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] 2025-03-31T16:32:05.423215Z 1 00h09m00.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5310:698] 2025-03-31T16:32:05.423417Z 2 00h09m00.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5317:705] 2025-03-31T16:32:05.423482Z 3 00h09m00.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5324:712] 2025-03-31T16:32:05.423537Z 4 00h09m00.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:5331:719] 2025-03-31T16:32:05.423591Z 5 00h09m00.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:5338:726] 2025-03-31T16:32:05.423647Z 6 00h09m00.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:5345:733] === Putting VDisk #6 to read-only === Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] 2025-03-31T16:32:05.661822Z 1 00h09m40.312048s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5310:698] 2025-03-31T16:32:05.662038Z 2 00h09m40.312048s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5317:705] 2025-03-31T16:32:05.662103Z 3 00h09m40.312048s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5324:712] 2025-03-31T16:32:05.662157Z 4 00h09m40.312048s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:5331:719] 2025-03-31T16:32:05.662212Z 5 00h09m40.312048s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:5338:726] 2025-03-31T16:32:05.662268Z 6 00h09m40.312048s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:5345:733] 2025-03-31T16:32:05.662322Z 7 00h09m40.312048s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:5352:740] === Putting VDisk #0 to normal === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] 2025-03-31T16:32:05.934378Z 2 00h10m20.360512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5317:705] 2025-03-31T16:32:05.934488Z 3 00h10m20.360512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5324:712] 2025-03-31T16:32:05.934547Z 4 00h10m20.360512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:5331:719] 2025-03-31T16:32:05.934602Z 5 00h10m20.360512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:5338:726] 2025-03-31T16:32:05.934657Z 6 00h10m20.360512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:5345:733] 2025-03-31T16:32:05.934710Z 7 00h10m20.360512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:5352:740] === Putting VDisk #1 to normal === Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] 2025-03-31T16:32:06.246421Z 3 00h11m00.400000s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5324:712] 2025-03-31T16:32:06.246516Z 4 00h11m00.400000s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:5331:719] 2025-03-31T16:32:06.246574Z 5 00h11m00.400000s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:5338:726] 2025-03-31T16:32:06.246631Z 6 00h11m00.400000s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:5345:733] 2025-03-31T16:32:06.246680Z 7 00h11m00.400000s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:5352:740] === Putting VDisk #2 to normal === Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] 2025-03-31T16:32:06.589604Z 4 00h11m40.410512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:5331:719] 2025-03-31T16:32:06.589699Z 5 00h11m40.410512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:5338:726] 2025-03-31T16:32:06.589755Z 6 00h11m40.410512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:5345:733] 2025-03-31T16:32:06.589810Z 7 00h11m40.410512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:5352:740] === Putting VDisk #3 to normal === Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] 2025-03-31T16:32:06.986043Z 5 00h12m20.450512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:5338:726] 2025-03-31T16:32:06.986133Z 6 00h12m20.450512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:5345:733] 2025-03-31T16:32:06.986187Z 7 00h12m20.450512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:5352:740] Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] 2025-03-31T16:32:08.069730Z 6 00h14m00.461536s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:5345:733] 2025-03-31T16:32:08.069831Z 7 00h14m00.461536s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:5352:740] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] 2025-03-31T16:32:08.470125Z 7 00h14m40.500000s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:5352:740] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} SEND TEvPut with key [1:1:4:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999976} SEND TEvGet with key [1:1:4:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:1:0] NODATA Size# 0}} >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] |94.9%| [TA] $(B)/ydb/core/tx/replication/service/ut_transfer_writer/test-results/unittest/{meta.json ... results_accumulator.log} |94.9%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_transfer_writer/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardSnapshots::VolatileSnapshotReadTable [GOOD] >> DataShardSnapshots::VolatileSnapshotRefreshDiscard >> TKesusTest::TestAcquireSemaphoreTimeout >> TKesusTest::TestAttachNewSessions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange [GOOD] Test command err: 2025-03-31T16:32:09.520790Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:09.520951Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:09.545098Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:09.545228Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:09.573657Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:09.575473Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=11718480051727774258, session=0, seqNo=0) 2025-03-31T16:32:09.575677Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-31T16:32:09.589887Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=11718480051727774258, session=1) 2025-03-31T16:32:09.590210Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:131:2157], cookie=15673357991309762966, session=0, seqNo=0) 2025-03-31T16:32:09.590345Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-03-31T16:32:09.603700Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:131:2157], cookie=15673357991309762966, session=2) 2025-03-31T16:32:09.604889Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-03-31T16:32:09.605071Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-03-31T16:32:09.605170Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-03-31T16:32:09.617041Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=111) 2025-03-31T16:32:09.617338Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=112, session=1, semaphore="Lock2" count=1) 2025-03-31T16:32:09.617483Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-03-31T16:32:09.617647Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-03-31T16:32:09.629816Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=112) 2025-03-31T16:32:09.630245Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:130:2156], cookie=333, name="Lock1") 2025-03-31T16:32:09.630342Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2025-03-31T16:32:09.630527Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:131:2157], cookie=222, session=2, semaphore="Lock1" count=1) 2025-03-31T16:32:09.630640Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 3 "Lock1" 2025-03-31T16:32:09.630712Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 3 "Lock1" queue: next order #3 session 2 2025-03-31T16:32:09.630830Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:131:2157], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2025-03-31T16:32:09.643055Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:130:2156], cookie=333) 2025-03-31T16:32:09.643181Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:131:2157], cookie=222) 2025-03-31T16:32:09.643238Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:131:2157], cookie=223) 2025-03-31T16:32:09.643544Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:130:2156], cookie=334, name="Lock2") 2025-03-31T16:32:09.643652Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 2 "Lock2" owner link 2025-03-31T16:32:09.643724Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-03-31T16:32:09.655766Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:130:2156], cookie=334) 2025-03-31T16:32:09.656464Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:159:2183], cookie=18185971011399793568, name="Lock1") 2025-03-31T16:32:09.656575Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:159:2183], cookie=18185971011399793568) 2025-03-31T16:32:09.657010Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:162:2186], cookie=13550124237895385102, name="Lock2") 2025-03-31T16:32:09.657089Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:162:2186], cookie=13550124237895385102) 2025-03-31T16:32:09.675330Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:09.675438Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:09.675997Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:09.676530Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:09.694650Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:09.694839Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-03-31T16:32:09.694885Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 3 "Lock1" queue: next order #3 session 2 2025-03-31T16:32:09.695240Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:202:2216], cookie=18386313520519767025, name="Lock1") 2025-03-31T16:32:09.695337Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:202:2216], cookie=18386313520519767025) 2025-03-31T16:32:09.695860Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:210:2223], cookie=17968236941877624550, name="Lock2") 2025-03-31T16:32:09.695917Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:210:2223], cookie=17968236941877624550) 2025-03-31T16:32:10.083052Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:10.083182Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:10.101826Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:10.101962Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:10.118417Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:10.119282Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:130:2156], cookie=15939487290401704206, session=0, seqNo=0) 2025-03-31T16:32:10.119438Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-31T16:32:10.142311Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:130:2156], cookie=15939487290401704206, session=1) 2025-03-31T16:32:10.142650Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:131:2157], cookie=11979677548776372975, session=0, seqNo=0) 2025-03-31T16:32:10.142799Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-03-31T16:32:10.155214Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:131:2157], cookie=11979677548776372975, session=2) 2025-03-31T16:32:10.156390Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:130:2156], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-03-31T16:32:10.156558Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-03-31T16:32:10.156693Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-03-31T16:32:10.169102Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:130:2156], cookie=111) 2025-03-31T16:32:10.169463Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:130:2156], cookie=112, session=1, semaphore="Lock2" count=1) 2025-03-31T16:32:10.169622Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-03-31T16:32:10.169706Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-03-31T16:32:10.181921Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:130:2156], cookie=112) 2025-03-31T16:32:10.182343Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:130:2156], cookie=333, session=1, semaphore="Lock1" count=1) 2025-03-31T16:32:10.182613Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:131:2157], cookie=222, session=2, semaphore="Lock1" count=1) 2025-03-31T16:32:10.182712Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-03-31T16:32:10.182828Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:131:2157], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2025-03-31T16:32:10.195226Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:130:2156], cookie=333) 2025-03-31T16:32:10.195321Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:131:2157], cookie=222) 2025-03-31T16:32:10.195355Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:131:2157], cookie=223) 2025-03-31T16:32:10.196025Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:157:2181], cookie=12855498004827122041, name="Lock1") 2025-03-31T16:32:10.196131Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:157:2181], cookie=12855498004827122041) 2025-03-31T16:32:10.196649Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:160:2184], cookie=12098677354043625498, name="Lock2") 2025-03-31T16:32:10.196732Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:160:2184], cookie=12098677354043625498) 2025-03-31T16:32:10.197164Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:163:2187], cookie=3641846834349703775, name="Lock1") 2025-03-31T16:32:10.197237Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:163:2187], cookie=3641846834349703775) 2025-03-31T16:32:10.197775Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:166:2190], cookie=17043997212675339236, name="Lock2") 2025-03-31T16:32:10.197853Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:166:2190], cookie=17043997212675339236) 2025-03-31T16:32:10.198126Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:131:2157], cookie=444, session=2, semaphore="Lock2" count=1) 2025-03-31T16:32:10.198267Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-03-31T16:32:10.210524Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:131:2157], cookie=444) 2025-03-31T16:32:10.211270Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:171:2195], cookie=8577563173563248871, name="Lock2") 2025-03-31T16:32:10.211372Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:171:2195], cookie=8577563173563248871) 2025-03-31T16:32:10.211937Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:174:2198], cookie=6777922523388577091, name="Lock2") 2025-03-31T16:32:10.212031Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:174:2198], cookie=6777922523388577091) 2025-03-31T16:32:10.226621Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:10.226722Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:10.227389Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:10.228203Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:10.276283Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:10.276471Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-03-31T16:32:10.276522Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-03-31T16:32:10.276564Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-03-31T16:32:10.276592Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-03-31T16:32:10.276941Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:214:2228], cookie=10724555555196456840, name="Lock1") 2025-03-31T16:32:10.277033Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:214:2228], cookie=10724555555196456840) 2025-03-31T16:32:10.277838Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:222:2235], cookie=12768222630063294147, name="Lock2") 2025-03-31T16:32:10.277915Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:222:2235], cookie=12768222630063294147) >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] >> TKesusTest::TestQuoterAccountResourcesBurst [GOOD] >> TKesusTest::TestQuoterAccountResourcesAggregateClients >> TKesusTest::TestAttachNewSessions [GOOD] >> TKesusTest::TestAttachMissingSession >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-21 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-22 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] Test command err: 2025-03-31T16:31:35.792850Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012181224268756:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:35.792921Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00171a/r3tmp/tmpnZO4rw/pdisk_1.dat 2025-03-31T16:31:36.239444Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:36.247722Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:36.247811Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:36.249098Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1291, node 1 2025-03-31T16:31:36.414734Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:36.414763Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:36.414780Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:36.414911Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:31:36.723291Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:31:36.724885Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:31:36.724909Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:31:36.725891Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:62216, port: 62216 2025-03-31T16:31:36.726513Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:36.732255Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2025-03-31T16:31:36.789787Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****n2gg (9D3EECFF) () has now valid token of ldapuser@ldap 2025-03-31T16:31:38.969867Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012194305031721:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:38.969966Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00171a/r3tmp/tmpKykCRA/pdisk_1.dat 2025-03-31T16:31:39.078074Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15046, node 2 2025-03-31T16:31:39.112711Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:39.112817Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:39.114803Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:39.162239Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:39.162264Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:39.162271Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:39.162396Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:31:39.324267Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:31:39.325098Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:31:39.325128Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:31:39.325865Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:18777, port: 18777 2025-03-31T16:31:39.325947Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=invalidRobouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:39.330171Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldap://localhost:18777. Invalid credentials 2025-03-31T16:31:39.330438Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****PoKg (A735379E) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldap://localhost:18777. Invalid credentials)' 2025-03-31T16:31:42.350087Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488012210421546082:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:42.350169Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00171a/r3tmp/tmp867Y6K/pdisk_1.dat 2025-03-31T16:31:42.505312Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:42.528483Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:42.528557Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:42.533495Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3886, node 3 2025-03-31T16:31:42.564113Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:42.564134Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:42.564139Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:42.564264Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:31:42.687212Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:31:42.688954Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:31:42.688973Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:31:42.689431Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:8167, port: 8167 2025-03-31T16:31:42.689481Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:42.692425Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:8167. Invalid credentials 2025-03-31T16:31:42.692622Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****TtsA (8EE43BD5) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:8167. Invalid credentials)' 2025-03-31T16:31:45.657223Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488012223668258273:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:45.657539Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00171a/r3tmp/tmpiPXV4r/pdisk_1.dat 2025-03-31T16:31:45.760182Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3177, node 4 2025-03-31T16:31:45.791592Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:45.791697Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:45.793502Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:45.833402Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:45.833428Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:45.833435Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:45.833571Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:31:46.157342Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:31:46.163866Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:31:46.163901Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:31:46.165764Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:11180, port: 11180 2025-03-31T16:31:46.165874Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:46.169977Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:31:46.170358Z node 4 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:11180 return no entries 2025-03-31T16:31:46.170605Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****yKkQ (21896ED4) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:11180 return no entries)' 2025-03-31T16:31:48.989914Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7488012235115365569:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:48.989978Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00171a/r3tmp/tmpJ3Ydso/pdisk_1.dat 2025-03-31T16:31:49.080683Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29584, node 5 2025-03-31T16:31:49.119925Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:49.120041Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:49.121771Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:49.138775Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:49.138794Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:49.138802Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:49.138945Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:31:49.223144Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:31:49.226007Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:31:49.226038Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:31:49.226664Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:17648, port: 17648 2025-03-31T16:31:49.226752Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:49.229895Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:31:49.271462Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-31T16:31:49.272090Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-31T16:31:49.272171Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:49.315412Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:49.359407Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:49.360126Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****d7tQ (ED021466) () has now valid token of ldapuser@ldap 2025-03-31T16:31:53.990478Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7488012235115365569:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:53.990564Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:53.997690Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****d7tQ (ED021466) 2025-03-31T16:31:53.997776Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:17648, port: 17648 2025-03-31T16:31:53.997847Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:54.001901Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:31:54.047341Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-31T16:31:54.047773Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-31T16:31:54.047811Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:54.095401Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:54.143382Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:54.144161Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****d7tQ (ED021466) () has now valid token of ldapuser@ldap 2025-03-31T16:31:59.899398Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488012282850279767:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:59.899490Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00171a/r3tmp/tmpJ6HIaC/pdisk_1.dat 2025-03-31T16:31:59.981647Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2324, node 6 2025-03-31T16:32:00.028633Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:00.028731Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:00.030160Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:32:00.041831Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:00.041858Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:00.041868Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:00.041986Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:32:00.151793Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:32:00.154312Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:32:00.154340Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:32:00.154904Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:28376, port: 28376 2025-03-31T16:32:00.154986Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:32:00.160124Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:32:00.207661Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****eygA (7BFD2301) () has now valid token of ldapuser@ldap 2025-03-31T16:32:03.907438Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****eygA (7BFD2301) 2025-03-31T16:32:03.907657Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:28376, port: 28376 2025-03-31T16:32:03.907809Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:32:03.911967Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:32:03.955765Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****eygA (7BFD2301) () has now valid token of ldapuser@ldap 2025-03-31T16:32:04.899123Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488012282850279767:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:04.899198Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:32:06.911784Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****eygA (7BFD2301) 2025-03-31T16:32:06.912115Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:28376, port: 28376 2025-03-31T16:32:06.912214Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:32:06.916989Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:32:06.959661Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****eygA (7BFD2301) () has now valid token of ldapuser@ldap 2025-03-31T16:32:09.912648Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****eygA (7BFD2301) 2025-03-31T16:32:09.912926Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:28376, port: 28376 2025-03-31T16:32:09.913018Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:32:09.920204Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:32:09.963663Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****eygA (7BFD2301) () has now valid token of ldapuser@ldap >> TKesusTest::TestAttachMissingSession [GOOD] >> TKesusTest::TestAttachOldGeneration >> THDRRQuoterResourceTreeRuntimeTest::TestHierarchicalQuotas [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestHangDefence [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestMoreStrongChildLimit [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveSessionDisconnectsAndThenConnectsAgain [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_StartTls::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] Test command err: 2025-03-31T16:31:35.775162Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012180577245579:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:35.775330Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001754/r3tmp/tmphCT1th/pdisk_1.dat 2025-03-31T16:31:36.254535Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:36.259120Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:36.259220Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:36.262705Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4175, node 1 2025-03-31T16:31:36.415309Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:36.415342Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:36.415349Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:36.415491Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:31:36.694522Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:31:36.698678Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:31:36.698728Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:31:36.700245Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:17309, port: 17309 2025-03-31T16:31:36.700323Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:36.767623Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:31:36.812033Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-31T16:31:36.812531Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-31T16:31:36.812585Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:36.855741Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:36.903716Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:36.905648Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****oPOg (AAD0D158) () has now valid token of ldapuser@ldap 2025-03-31T16:31:40.775518Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012180577245579:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:40.775607Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:40.906685Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****oPOg (AAD0D158) 2025-03-31T16:31:40.906871Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:17309, port: 17309 2025-03-31T16:31:40.906953Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:40.959683Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:31:40.960148Z node 1 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldaps://localhost:17309 return no entries 2025-03-31T16:31:40.960731Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****oPOg (AAD0D158) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldaps://localhost:17309 return no entries)' 2025-03-31T16:31:45.909554Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****oPOg (AAD0D158) 2025-03-31T16:31:47.439014Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012231526055596:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:47.439075Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001754/r3tmp/tmpXyVPbi/pdisk_1.dat 2025-03-31T16:31:47.549075Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4462, node 2 2025-03-31T16:31:47.587809Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:47.587921Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:47.592684Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:47.631413Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:47.631445Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:47.631453Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:47.631617Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:31:47.710383Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:31:47.713097Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:31:47.713129Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:31:47.713874Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:1862, port: 1862 2025-03-31T16:31:47.713945Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:47.771724Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:31:47.772198Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldaps://localhost:1862. Server is busy 2025-03-31T16:31:47.772738Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****Fnew (7CFEB064) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldaps://localhost:1862. Server is busy)' 2025-03-31T16:31:47.773027Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:31:47.773045Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:31:47.773668Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:1862, port: 1862 2025-03-31T16:31:47.773739Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:47.827629Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:31:47.828166Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldaps://localhost:1862. Server is busy 2025-03-31T16:31:47.828601Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****Fnew (7CFEB064) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldaps://localhost:1862. Server is busy)' 2025-03-31T16:31:50.447266Z node 2 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****Fnew (7CFEB064) 2025-03-31T16:31:50.447609Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:31:50.447644Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:31:50.448902Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:1862, port: 1862 2025-03-31T16:31:50.449010Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:50.499905Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:31:50.500405Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldaps://localhost:1862. Server is busy 2025-03-31T16:31:50.500807Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****Fnew (7CFEB064) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldaps://localhost:1862. Server is busy)' 2025-03-31T16:31:52.439427Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488012231526055596:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:52.439509Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:54.452643Z node 2 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****Fnew (7CFEB064) 2025-03-31T16:31:54.452946Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:31:54.452972Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:31:54.453766Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:1862, port: 1862 2025-03-31T16:31:54.453820Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:54.507525Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:31:54.551468Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-31T16:31:54.552171Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-31T16:31:54.552219Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:54.599462Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:54.643499Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:54.644592Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****Fnew (7CFEB064) () has now valid token of ldapuser@ldap 2025-03-31T16:31:58.307830Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488012277138166234:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:58.307895Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001754/r3tmp/tmpvgX3MC/pdisk_1.dat 2025-03-31T16:31:58.382755Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1532, node 3 2025-03-31T16:31:58.434265Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:58.434370Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:58.436039Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:58.438549Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:58.438566Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:58.438579Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:58.438689Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:31:58.623462Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:31:58.626025Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:31:58.626050Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:31:58.626576Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:12316, port: 12316 2025-03-31T16:31:58.626656Z node 3 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-03-31T16:31:58.636193Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:58.679513Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:31:58.723403Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-31T16:31:58.767972Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****mwxg (20CBC837) () has now valid token of ldapuser@ldap 2025-03-31T16:32:01.262716Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488012290608073010:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:01.262774Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001754/r3tmp/tmpeBHEOp/pdisk_1.dat 2025-03-31T16:32:01.367166Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:32:01.400505Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:01.400611Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 26933, node 4 2025-03-31T16:32:01.405041Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:32:01.439588Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:01.439608Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:01.439614Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:01.439764Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:32:01.531963Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:32:01.535295Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:32:01.535326Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:32:01.536262Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:5017, port: 5017 2025-03-31T16:32:01.536360Z node 4 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-03-31T16:32:01.545583Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:32:01.587551Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:32:01.632063Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****Rgpg (72F4EB3F) () has now valid token of ldapuser@ldap 2025-03-31T16:32:04.394268Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7488012302681882062:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:04.394342Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001754/r3tmp/tmplNKa8L/pdisk_1.dat 2025-03-31T16:32:04.524249Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:32:04.542401Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:04.542493Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:04.544297Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19280, node 5 2025-03-31T16:32:04.582521Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:04.582545Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:04.582553Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:04.582694Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:32:04.720273Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:32:04.722794Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:32:04.722843Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:32:04.723580Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:11316, port: 11316 2025-03-31T16:32:04.723682Z node 5 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-03-31T16:32:04.733260Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:32:04.775595Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2025-03-31T16:32:04.823480Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-31T16:32:04.824091Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-31T16:32:04.824144Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-03-31T16:32:04.867491Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-03-31T16:32:04.911447Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-03-31T16:32:04.912473Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****E8nQ (2737EC7F) () has now valid token of ldapuser@ldap 2025-03-31T16:32:07.481357Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488012319148613471:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:07.481406Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001754/r3tmp/tmpFDTYrn/pdisk_1.dat 2025-03-31T16:32:07.572921Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62913, node 6 2025-03-31T16:32:07.616740Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:07.616910Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:07.618377Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:32:07.636564Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:07.636588Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:07.636593Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:07.636702Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:32:07.762718Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:32:07.765395Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:32:07.765423Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:32:07.766178Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:24619, port: 24619 2025-03-31T16:32:07.766285Z node 6 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-03-31T16:32:07.774899Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:32:07.819641Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: &(uid=ldapuser)(), attributes: memberOf 2025-03-31T16:32:07.819739Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter &(uid=ldapuser)() on server ldap://localhost:24619. Bad search filter 2025-03-31T16:32:07.820246Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****w38A (1B09FFA9) () has now permanent error message 'Could not login via LDAP (Could not perform search for filter &(uid=ldapuser)() on server ldap://localhost:24619. Bad search filter)' >> TKesusTest::TestKesusConfig >> KqpIndexes::SimpleVectorIndexOrderByCosineDistanceNotNullableLevel2 [GOOD] >> KqpRanges::IsNullPartial ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] Test command err: 2025-03-31T16:31:35.775875Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012180693624858:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:35.775946Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00170c/r3tmp/tmpaC7J5l/pdisk_1.dat 2025-03-31T16:31:36.235006Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:36.239724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:36.239818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:36.249284Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9177, node 1 2025-03-31T16:31:36.418377Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:36.418412Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:36.418422Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:36.418526Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:31:36.687807Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:31:36.690152Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:31:36.690188Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:31:36.699602Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:13415, port: 13415 2025-03-31T16:31:36.699690Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:36.765406Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2025-03-31T16:31:36.811817Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****rGqg (AF76D38B) () has now valid token of ldapuser@ldap 2025-03-31T16:31:39.208542Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012195831668881:2210];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:39.235878Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00170c/r3tmp/tmpCAqDwH/pdisk_1.dat 2025-03-31T16:31:39.336912Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:39.363272Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:39.363353Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:39.364972Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13517, node 2 2025-03-31T16:31:39.414867Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:39.414898Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:39.414910Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:39.415043Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:31:39.561536Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:31:39.565316Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:31:39.565346Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:31:39.566063Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:1620, port: 1620 2025-03-31T16:31:39.566130Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=invalidRobouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:39.623570Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldaps://localhost:1620. Invalid credentials 2025-03-31T16:31:39.624075Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****ONDw (082B6542) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldaps://localhost:1620. Invalid credentials)' 2025-03-31T16:31:42.274822Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488012210314128269:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:42.274892Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00170c/r3tmp/tmpB74uNT/pdisk_1.dat 2025-03-31T16:31:42.385154Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2210, node 3 2025-03-31T16:31:42.407495Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:42.407574Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:42.409369Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:42.447654Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:42.447677Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:42.447685Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:42.447805Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:31:42.523226Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:31:42.525735Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:31:42.525753Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:31:42.526433Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:25350, port: 25350 2025-03-31T16:31:42.526491Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:42.579632Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldaps://localhost:25350. Invalid credentials 2025-03-31T16:31:42.580051Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****2osA (B8687605) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldaps://localhost:25350. Invalid credentials)' 2025-03-31T16:31:45.562076Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488012224183999970:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:45.562168Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00170c/r3tmp/tmp8aDz84/pdisk_1.dat 2025-03-31T16:31:45.678803Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19076, node 4 2025-03-31T16:31:45.713170Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:45.713256Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:45.723515Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:45.744803Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:45.744836Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:45.744845Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:45.745004Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:31:45.895091Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:31:45.899044Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:31:45.899076Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:31:45.899846Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:26449, port: 26449 2025-03-31T16:31:45.899925Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:45.955687Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:31:45.956235Z node 4 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldaps://localhost:26449 return no entries 2025-03-31T16:31:45.956829Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****6A3A (44013DAB) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldaps://localhost:26449 return no entries)' 2025-03-31T16:31:48.895255Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7488012237049083586:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:48.895354Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00170c/r3tmp/tmp8haSdZ/pdisk_1.dat 2025-03-31T16:31:48.993600Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1898, node 5 2025-03-31T16:31:49.036665Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:49.036751Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:49.038449Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:49.069047Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:49.069067Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:49.069075Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:49.069179Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:31:49.149687Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:31:49.153211Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:31:49.153244Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:31:49.154007Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:23871, port: 23871 2025-03-31T16:31:49.154088Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:49.207749Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:31:49.251459Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-31T16:31:49.252100Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-31T16:31:49.252162Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:49.299486Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:49.343415Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:49.344704Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****im8w (E2BCCAF2) () has now valid token of ldapuser@ldap 2025-03-31T16:31:53.895722Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7488012237049083586:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:53.895820Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:54.907321Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****im8w (E2BCCAF2) 2025-03-31T16:31:54.907443Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:23871, port: 23871 2025-03-31T16:31:54.907520Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:54.963582Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:31:55.011423Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-31T16:31:55.012032Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-31T16:31:55.012081Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:55.059423Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:55.107461Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:55.108559Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****im8w (E2BCCAF2) () has now valid token of ldapuser@ldap 2025-03-31T16:31:57.909822Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****im8w (E2BCCAF2) 2025-03-31T16:31:57.909949Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:23871, port: 23871 2025-03-31T16:31:57.910048Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:57.963730Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:31:58.011437Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-31T16:31:58.011965Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-31T16:31:58.011996Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:58.059451Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:58.103419Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:58.104507Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****im8w (E2BCCAF2) () has now valid token of ldapuser@ldap 2025-03-31T16:32:00.058897Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488012287165715326:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:00.058970Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00170c/r3tmp/tmpt0JXH2/pdisk_1.dat 2025-03-31T16:32:00.151237Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18188, node 6 2025-03-31T16:32:00.185164Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:00.185268Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:00.188262Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:32:00.223871Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:00.223900Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:00.223937Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:00.224083Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:32:00.437803Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:32:00.441202Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:32:00.441241Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:32:00.441867Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:9167, port: 9167 2025-03-31T16:32:00.441950Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:32:00.499647Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:32:00.544024Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****nuYg (C73C2B75) () has now valid token of ldapuser@ldap 2025-03-31T16:32:04.063479Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****nuYg (C73C2B75) 2025-03-31T16:32:04.063733Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:9167, port: 9167 2025-03-31T16:32:04.063856Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:32:04.115623Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:32:04.160004Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****nuYg (C73C2B75) () has now valid token of ldapuser@ldap 2025-03-31T16:32:05.059274Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488012287165715326:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:05.059346Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:32:09.065717Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****nuYg (C73C2B75) 2025-03-31T16:32:09.065827Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:9167, port: 9167 2025-03-31T16:32:09.065946Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:32:09.119577Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:32:09.163951Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****nuYg (C73C2B75) () has now valid token of ldapuser@ldap |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-62 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-63 >> TKesusTest::TestAttachOldGeneration [GOOD] >> TKesusTest::TestAttachFastPath >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-51 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-52 >> TCacheTest::MigrationDeletedPathNavigate [GOOD] >> KqpNewEngine::MultiSelect [GOOD] >> KqpNewEngine::MultiOutput >> TKesusTest::TestAttachFastPath [GOOD] >> TKesusTest::TestAttachFastPathBlocked >> TKesusTest::TestKesusConfig [GOOD] >> TKesusTest::TestLockNotFound >> TKesusTest::TestAttachFastPathBlocked [GOOD] |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> TKesusTest::TestLockNotFound [GOOD] >> TKesusTest::TestDeleteSemaphore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::MigrationDeletedPathNavigate [GOOD] Test command err: 2025-03-31T16:31:44.600361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:31:44.600428Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-03-31T16:31:44.800681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 2025-03-31T16:31:44.817069Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 65543, Sender [1:174:2170], Recipient [1:70:2109]: NActors::TEvents::TEvPoison 2025-03-31T16:31:44.817861Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:70:2109] sender: [1:175:2067] recipient: [1:46:2093] Leader for TabletID 72057594046678944 is [1:70:2109] sender: [1:178:2067] recipient: [1:24:2071] Leader for TabletID 72057594046678944 is [1:70:2109] sender: [1:179:2067] recipient: [1:177:2171] Leader for TabletID 72057594046678944 is [1:180:2172] sender: [1:181:2067] recipient: [1:177:2171] 2025-03-31T16:31:44.824909Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 268828672, Sender [1:177:2171], Recipient [1:180:2172]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:31:44.826126Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 268828673, Sender [1:177:2171], Recipient [1:180:2172]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:31:44.826268Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 268828684, Sender [1:177:2171], Recipient [1:180:2172]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:31:44.844293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:31:44.844405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:31:44.844448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:31:44.844495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:31:44.844544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:31:44.844573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:31:44.844668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:31:44.844765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:31:44.845175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:31:44.865419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:31:44.866810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:31:44.866976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:31:44.867213Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 65542, Sender [1:7238242728502259555:7369577], Recipient [1:180:2172]: TSystem::Undelivered 2025-03-31T16:31:44.867254Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, processing event TEvents::TEvUndelivered 2025-03-31T16:31:44.867287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:31:44.867322Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:44.867570Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Clear operation queue and active pipes 2025-03-31T16:31:44.867617Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:31:44.868313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2025-03-31T16:31:44.868447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:31:44.868517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:31:44.868954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:31:44.869037Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-03-31T16:31:44.869287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:31:44.869399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:31:44.869481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:31:44.869611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:31:44.869718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:31:44.869876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:31:44.870220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:31:44.870340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:31:44.870708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:31:44.870792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:31:44.870939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:31:44.871045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:31:44.871166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:31:44.871369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:31:44.871460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:31:44.871604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:31:44.871869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-03-31T16:31:44.872050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-03-31T16:31:44.872123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-03-31T16:31:44.872186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-03-31T16:31:44.872446Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-31T16:31:44.873982Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:31:44.874167Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-03-31T16:31:44.874752Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435082, Sender [1:180:2172], Recipient [1:180:2172]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-03-31T16:31:44.874802Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-03-31T16:31:44.875572Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:31:44.875632Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:31:44.875777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:31:44.875844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:31:44.875889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:31:44.875924Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:31:44.876248Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [1:196:2172], Recipient [1:180:2172]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-03-31T16:31:44.876290Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-03-31T16:31:44.876330Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 Leader for TabletID 72057594046678944 is [1:180:2172] sender: [1:211:2067] recipient: [1:24:2071] 2025-03-31T16:31:44.898430Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:210:2189], Recipient [1:180:2172]: {TEvModifySchemeTransaction txid# 101 TabletId# 72057594046678944} 2025-03-31T16:31:44.898553Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-31T16:31:44.995634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateSubDomain SubDomain { Name: "USER_0" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:31:44.995952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/USER_0, opId: 101:0, at schemeshard: 72057594046678944 2025-03-31T16:31:44.996075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: Root, child name: USER_0, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T16:31:44.996284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-03-31T16:31:44.996482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T16:31:44.996583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:31:44.996615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 7205759404667894 ... 72057594046678944, status: OK, at schemeshard: 72075186233409549 2025-03-31T16:31:45.509857Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125003, Sender [1:423:2337], Recipient [1:491:2383]: NKikimrScheme.TEvSyncTenantSchemeShard DomainSchemeShard: 72057594046678944 DomainPathId: 2 TabletID: 72075186233409549 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 3 UserAttributesVersion: 1 TenantHive: 18446744073709551615 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-03-31T16:31:45.509961Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvSyncTenantSchemeShard 2025-03-31T16:31:45.510050Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 2 TabletID: 72075186233409549 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 3 UserAttributesVersion: 1 TenantHive: 18446744073709551615 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-03-31T16:31:45.510143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T16:31:45.510184Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-31T16:31:45.510288Z node 1 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[1:423:2337], EffectiveACLVersion: 0, SubdomainVersion: 3, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 3, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-31T16:31:45.510375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T16:31:45.510418Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T16:31:45.843628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:31:45.843701Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-03-31T16:31:45.894283Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 Leader for TabletID 72057594046678944 is [2:70:2109] sender: [2:175:2067] recipient: [2:46:2093] Leader for TabletID 72057594046678944 is [2:70:2109] sender: [2:178:2067] recipient: [2:24:2071] Leader for TabletID 72057594046678944 is [2:70:2109] sender: [2:179:2067] recipient: [2:177:2171] Leader for TabletID 72057594046678944 is [2:180:2172] sender: [2:181:2067] recipient: [2:177:2171] 2025-03-31T16:31:45.942297Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:31:45.942361Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 101 Leader for TabletID 72057594046678944 is [2:180:2172] sender: [2:211:2067] recipient: [2:24:2071] 2025-03-31T16:31:45.970144Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-03-31T16:31:45.977239Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 102:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:247:2067] recipient: [2:238:2213] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:247:2067] recipient: [2:238:2213] Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:248:2067] recipient: [2:240:2215] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:248:2067] recipient: [2:240:2215] Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:250:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:250:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:253:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:253:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409546 is [2:251:2219] sender: [2:255:2067] recipient: [2:238:2213] Leader for TabletID 72075186233409547 is [2:254:2221] sender: [2:256:2067] recipient: [2:240:2215] TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 TestWaitNotification wait txId: 102 2025-03-31T16:31:45.996843Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 Leader for TabletID 72075186233409547 is [2:254:2221] sender: [2:289:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409546 is [2:251:2219] sender: [2:290:2067] recipient: [2:24:2071] FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 2025-03-31T16:31:46.042859Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 TestWaitNotification wait txId: 104 Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:340:2067] recipient: [2:336:2285] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:340:2067] recipient: [2:336:2285] Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:341:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:341:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409548 is [2:343:2289] sender: [2:344:2067] recipient: [2:336:2285] Leader for TabletID 72075186233409548 is [2:343:2289] sender: [2:345:2067] recipient: [2:24:2071] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-03-31T16:31:46.183426Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomain, opId: 105:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:420:2067] recipient: [2:415:2333] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:420:2067] recipient: [2:415:2333] Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:422:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:422:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409549 is [2:423:2337] sender: [2:424:2067] recipient: [2:415:2333] 2025-03-31T16:31:46.230736Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:31:46.230803Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 Leader for TabletID 72075186233409549 is [2:423:2337] sender: [2:451:2067] recipient: [2:24:2071] TestWaitNotification: OK eventTxId 105 TestModificationResults wait txId: 106 2025-03-31T16:31:46.287281Z node 2 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-31T16:31:46.287347Z node 2 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 4] 2025-03-31T16:31:46.287714Z node 2 :FLAT_TX_SCHEMESHARD ERROR: TWait ProgressState, dependent transaction: 106, parent transaction: 105, at schemeshard: 72057594046678944 2025-03-31T16:31:46.287849Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomainDecision, opId: 106:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-03-31T16:31:46.306002Z node 2 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 2025-03-31T16:31:46.306439Z node 2 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 TestModificationResults wait txId: 107 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2025-03-31T16:31:46.349720Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 108:0, at schemeshard: 72075186233409549 Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:555:2067] recipient: [2:552:2441] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:555:2067] recipient: [2:552:2441] Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:557:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:557:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409550 is [2:559:2445] sender: [2:560:2067] recipient: [2:552:2441] Leader for TabletID 72075186233409550 is [2:559:2445] sender: [2:561:2067] recipient: [2:24:2071] TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 Forgetting tablet 72075186233409548 TestWaitNotification: OK eventTxId 108 2025-03-31T16:31:48.714811Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:31:48.714886Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:48.778478Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:31:48.778548Z node 2 :IMPORT WARN: Table profiles were not loaded >> KqpRanges::UpdateWhereInNoFullScan+UseSink [GOOD] >> KqpRanges::UpdateWhereInNoFullScan-UseSink >> KqpNewEngine::DuplicatedResults [GOOD] >> KqpNewEngine::FlatmapLambdaMutiusedConnections >> TKesusTest::TestReleaseLockFailure ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SimpleVectorIndexOrderByCosineDistanceNotNullableLevel2 [GOOD] Test command err: Trying to start YDB, gRPC: 17850, MsgBus: 23120 2025-03-31T16:31:04.966586Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012046369252059:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:04.966627Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d9f/r3tmp/tmpIFFawX/pdisk_1.dat 2025-03-31T16:31:05.427273Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:05.439866Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:05.439994Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:05.442759Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17850, node 1 2025-03-31T16:31:05.523787Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:05.523815Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:05.523829Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:05.523954Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23120 TClient is connected to server localhost:23120 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:06.156436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:06.175408Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:31:06.186142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:31:06.355184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:06.514143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:31:06.570766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:08.385367Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012063549122951:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:08.385517Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:08.707825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:08.741860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:08.773977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:08.803495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:08.839530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:08.907670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:08.994079Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012063549123471:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:08.994173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:08.994358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012063549123476:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:08.998639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:09.012224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012063549123478:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:31:09.083188Z node 1 :TX_PROXY ERROR: Actor# [1:7488012067844090831:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:09.963023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:31:09.967234Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012046369252059:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:09.967293Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:10.175494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-03-31T16:31:10.233554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-03-31T16:31:20.413830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:31:20.413860Z node 1 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 6557, MsgBus: 4958 2025-03-31T16:31:33.761646Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012171974919293:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:33.761722Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d9f/r3tmp/tmpQWFn3g/pdisk_1.dat 2025-03-31T16:31:33.916533Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:33.919175Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:33.919261Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:33.920567Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6557, node 2 2025-03-31T16:31:33.972269Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:33.972296Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:33.972304Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:33.972498Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4958 TClient is connected to server localhost:4958 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:34.382203Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:34.399225Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:34.473018Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:34.606064Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:34.674363Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:37.039280Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012189154790242:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:37.039405Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:37.067926Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:37.102349Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:37.132346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:37.166521Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:37.198685Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:37.242123Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:37.299549Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012189154790756:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:37.299643Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:37.299787Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012189154790761:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:37.303919Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:37.316574Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012189154790763:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:31:37.409516Z node 2 :TX_PROXY ERROR: Actor# [2:7488012189154790818:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:38.437331Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:31:38.659875Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-03-31T16:31:38.747942Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-03-31T16:31:38.765284Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488012171974919293:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:38.767148Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:38.829387Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710760:0, at schemeshard: 72057594046644480 2025-03-31T16:31:38.868667Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710763:0, at schemeshard: 72057594046644480 2025-03-31T16:31:38.900465Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037926 not found 2025-03-31T16:31:38.900499Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037925 not found 2025-03-31T16:31:38.900513Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037924 not found 2025-03-31T16:31:48.869701Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:31:48.869736Z node 2 :IMPORT WARN: Table profiles were not loaded >> KqpSqlIn::SimpleKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAttachFastPathBlocked [GOOD] Test command err: 2025-03-31T16:32:11.230258Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:11.230384Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:11.248743Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:11.248861Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:11.274304Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:11.274844Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=271765683837600198, session=0, seqNo=0) 2025-03-31T16:32:11.275040Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-31T16:32:11.286980Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=271765683837600198, session=1) 2025-03-31T16:32:11.287360Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=12076831376938599514, session=0, seqNo=0) 2025-03-31T16:32:11.287467Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-03-31T16:32:11.299545Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=12076831376938599514, session=2) 2025-03-31T16:32:11.638810Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:11.638910Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:11.657004Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:11.657125Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:11.671600Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:11.672030Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:130:2156], cookie=7861977219223781497, session=1, seqNo=0) 2025-03-31T16:32:11.694768Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:130:2156], cookie=7861977219223781497, session=1) 2025-03-31T16:32:12.024837Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:12.024928Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:12.036673Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:12.036870Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:12.050207Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:12.050992Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:132:2158], cookie=11430089944198564633, session=0, seqNo=0) 2025-03-31T16:32:12.051160Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-31T16:32:12.073642Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:132:2158], cookie=11430089944198564633, session=1) 2025-03-31T16:32:12.454804Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:12.454907Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:12.473627Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:12.473786Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:12.487850Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:12.488161Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[4:132:2158], cookie=14960354155901148113, path="") 2025-03-31T16:32:12.511032Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[4:132:2158], cookie=14960354155901148113, status=SUCCESS) 2025-03-31T16:32:12.512029Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:141:2165], cookie=3009445915593381155, session=0, seqNo=0) 2025-03-31T16:32:12.512157Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-31T16:32:12.524331Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:141:2165], cookie=3009445915593381155, session=1) 2025-03-31T16:32:12.525100Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:142:2166], cookie=111, session=0, seqNo=0) 2025-03-31T16:32:12.525225Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-03-31T16:32:12.525399Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path attach session=1 to sender=[4:142:2166], cookie=222, seqNo=0 2025-03-31T16:32:12.537679Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:142:2166], cookie=111, session=2) 2025-03-31T16:32:12.855986Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:12.856084Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:12.875972Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:12.876313Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:12.900950Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:12.901288Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[5:132:2158], cookie=15725542495081746252, path="") 2025-03-31T16:32:12.913359Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[5:132:2158], cookie=15725542495081746252, status=SUCCESS) 2025-03-31T16:32:12.914379Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:141:2165], cookie=16611624188565047412, session=0, seqNo=0) 2025-03-31T16:32:12.914512Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-31T16:32:12.926466Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:141:2165], cookie=16611624188565047412, session=1) 2025-03-31T16:32:12.927266Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:141:2165], cookie=123, session=1, semaphore="Lock1" count=18446744073709551615) 2025-03-31T16:32:12.927422Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-03-31T16:32:12.927510Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-03-31T16:32:12.927839Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:142:2166], cookie=111, session=0, seqNo=0) 2025-03-31T16:32:12.927915Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-03-31T16:32:12.928020Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:142:2166], cookie=222, session=1, seqNo=0) 2025-03-31T16:32:12.939924Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:141:2165], cookie=123) 2025-03-31T16:32:12.940016Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:142:2166], cookie=111, session=2) 2025-03-31T16:32:12.940066Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:142:2166], cookie=222, session=1) >> TCacheTestWithDrops::LookupErrorUponEviction [GOOD] >> TKesusTest::TestSessionDetach >> TKesusTest::TestDeleteSemaphore [GOOD] >> TKesusTest::TestDescribeSemaphoreWatches >> TKesusTest::TestQuoterAccountResourcesAggregateClients [GOOD] >> TKesusTest::TestQuoterAccountResourcesAggregateResources >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite-Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite-Volatile >> KqpRanges::UpdateMulti >> TKesusTest::TestSessionDetach [GOOD] >> TKesusTest::TestSessionDetachFutureId ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTestWithDrops::LookupErrorUponEviction [GOOD] Test command err: 2025-03-31T16:31:44.608645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:31:44.608708Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-03-31T16:31:44.796035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-31T16:31:44.819213Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-03-31T16:31:44.821227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-31T16:31:44.865840Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-03-31T16:31:45.397852Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:31:45.397913Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-03-31T16:31:45.450253Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 >> TKesusTest::TestReleaseLockFailure [GOOD] >> TKesusTest::TestReleaseSemaphore >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-15 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-16 |95.0%| [TA] $(B)/ydb/core/tx/scheme_board/ut_cache/test-results/unittest/{meta.json ... results_accumulator.log} |95.0%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_cache/test-results/unittest/{meta.json ... results_accumulator.log} >> TKesusTest::TestReleaseSemaphore [GOOD] >> TKesusTest::TestSemaphoreData >> TKesusTest::TestSessionDetachFutureId [GOOD] >> TKesusTest::TestSessionDestroy >> THDRRQuoterResourceTreeRuntimeTest::TestCreateInactiveSession [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceSessions [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDistributeResourcesBetweenConsumers [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestEffectiveProps [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceWithActiveChildren [GOOD] >> TKesusTest::TestSessionTimeoutAfterDetach >> IcbAsActorTests::TestHttpPostReaction >> TKesusTest::TestSessionDestroy [GOOD] >> TKesusTest::TestSessionStealing >> IcbAsActorTests::TestHttpPostReaction [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-63 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-64 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-15 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-16 |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceWithActiveChildren [GOOD] >> TKesusTest::TestSemaphoreData [GOOD] >> TKesusTest::TestSemaphoreReleaseReacquire >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly >> KqpTx::CommitRequired >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-63 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-64 >> KqpIndexes::PrefixedVectorIndexOrderByCosineSimilarityNullableLevel2 [GOOD] >> KqpIndexes::SecondaryIndexInsert1 >> TKesusTest::TestSessionStealing [GOOD] >> TKesusTest::TestSessionStealingAnyKey >> TKesusTest::TestDescribeSemaphoreWatches [GOOD] >> TKesusTest::TestGetQuoterResourceCounters >> KqpNewEngine::LocksSingleShard [GOOD] >> KqpNewEngine::LocksMultiShard |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> IcbAsActorTests::TestHttpPostReaction [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-39 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-40 >> KqpSort::TopSortParameter >> TKesusTest::TestSessionStealingAnyKey [GOOD] >> TKesusTest::TestSemaphoreReleaseReacquire [GOOD] >> TKesusTest::TestSemaphoreSessionFailures >> TKesusTest::TestQuoterAccountResourcesAggregateResources [GOOD] >> TKesusTest::TestQuoterAccountLabels >> LdapAuthProviderTest_StartTls::LdapRefreshRemoveUserBad [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoWithError >> TAsyncIndexTests::CdcAndMergeWithReboots[PipeResets] [GOOD] >> TKesusTest::TestGetQuoterResourceCounters [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSessionStealingAnyKey [GOOD] Test command err: 2025-03-31T16:32:13.890707Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:13.890843Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:13.908466Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:13.908586Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:13.933809Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:13.934398Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=5560715596370620241, session=0, seqNo=0) 2025-03-31T16:32:13.934611Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-31T16:32:13.946695Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=5560715596370620241, session=1) 2025-03-31T16:32:13.948533Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Execute (sender=[1:130:2156], cookie=6233141593263121154, session=2) 2025-03-31T16:32:13.948623Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Complete (sender=[1:130:2156], cookie=6233141593263121154) 2025-03-31T16:32:13.949189Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path detach session=1 from sender=[1:130:2156], cookie=2731537670301713460 2025-03-31T16:32:13.949982Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=16494261062807632338, session=1, seqNo=0) 2025-03-31T16:32:13.962231Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=16494261062807632338, session=1) 2025-03-31T16:32:13.962648Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-03-31T16:32:13.962830Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-03-31T16:32:13.962933Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-03-31T16:32:13.963141Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Execute (sender=[1:130:2156], cookie=4490674075326437167, session=1) 2025-03-31T16:32:13.973516Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-03-31T16:32:13.973621Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-03-31T16:32:13.973675Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2025-03-31T16:32:13.985923Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=111) 2025-03-31T16:32:13.986015Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Complete (sender=[1:130:2156], cookie=4490674075326437167) 2025-03-31T16:32:13.986091Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-03-31T16:32:14.304716Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:14.304818Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:14.322928Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:14.323046Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:14.337975Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:14.338284Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[2:130:2156], cookie=11807615720795416867, path="") 2025-03-31T16:32:14.361246Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[2:130:2156], cookie=11807615720795416867, status=SUCCESS) 2025-03-31T16:32:14.361969Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:139:2163], cookie=111, session=0, seqNo=0) 2025-03-31T16:32:14.362109Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-31T16:32:14.362268Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Execute (sender=[2:139:2163], cookie=16604782348297904396, session=1) 2025-03-31T16:32:14.372598Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-03-31T16:32:14.372661Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-03-31T16:32:14.384413Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:139:2163], cookie=111, session=1) 2025-03-31T16:32:14.384488Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Complete (sender=[2:139:2163], cookie=16604782348297904396) 2025-03-31T16:32:14.384537Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-03-31T16:32:14.711356Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:14.711459Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:14.730513Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:14.730792Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:14.744840Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:14.745332Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:132:2158], cookie=4594696035219164258, session=0, seqNo=0) 2025-03-31T16:32:14.745480Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-31T16:32:14.768431Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:132:2158], cookie=4594696035219164258, session=1) 2025-03-31T16:32:14.769218Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Execute (sender=[3:132:2158], cookie=1813054288039068514, session=1) 2025-03-31T16:32:14.769331Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-03-31T16:32:14.781661Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Complete (sender=[3:132:2158], cookie=1813054288039068514) 2025-03-31T16:32:14.782519Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:149:2173], cookie=6871166108857841988) 2025-03-31T16:32:14.782587Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:149:2173], cookie=6871166108857841988) 2025-03-31T16:32:14.783196Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:152:2176], cookie=14118111409873936827, session=0, seqNo=0) 2025-03-31T16:32:14.783325Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-03-31T16:32:14.795498Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:152:2176], cookie=14118111409873936827, session=2) 2025-03-31T16:32:14.796693Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Execute (sender=[3:132:2158], cookie=11663827784036120633, session=2) 2025-03-31T16:32:14.796797Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 2025-03-31T16:32:14.809008Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Complete (sender=[3:132:2158], cookie=11663827784036120633) 2025-03-31T16:32:15.126203Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:15.126316Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:15.142592Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:15.142760Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:15.157175Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:15.158086Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:132:2158], cookie=12345, session=0, seqNo=0) 2025-03-31T16:32:15.158251Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-31T16:32:15.181059Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:132:2158], cookie=12345, session=1) 2025-03-31T16:32:15.181836Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:138:2163], cookie=23456, session=1, seqNo=0) 2025-03-31T16:32:15.194190Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:138:2163], cookie=23456, session=1) 2025-03-31T16:32:15.583747Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:15.583847Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:15.606496Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:15.606831Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:15.632026Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:15.632930Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=12345, session=0, seqNo=0) 2025-03-31T16:32:15.633083Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-31T16:32:15.647794Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=12345, session=1) 2025-03-31T16:32:15.648573Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:139:2163], cookie=23456, session=1, seqNo=0) 2025-03-31T16:32:15.661763Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:139:2163], cookie=23456, session=1) >> TKesusTest::TestAcquireWaiterDowngrade >> TKesusTest::TestSemaphoreSessionFailures [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestGetQuoterResourceCounters [GOOD] Test command err: 2025-03-31T16:32:12.319476Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:12.319576Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:12.332440Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:12.332546Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:12.358563Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:12.359001Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[1:130:2156], cookie=9736710812774649784, path="/foo/bar/baz") 2025-03-31T16:32:12.373577Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[1:130:2156], cookie=9736710812774649784, status=SUCCESS) 2025-03-31T16:32:12.374181Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Execute (sender=[1:139:2163], cookie=11623361440873908274) 2025-03-31T16:32:12.386575Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Complete (sender=[1:139:2163], cookie=11623361440873908274) 2025-03-31T16:32:12.387277Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[1:144:2168], cookie=12448310664255125524, path="/foo/bar/baz") 2025-03-31T16:32:12.399783Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[1:144:2168], cookie=12448310664255125524, status=SUCCESS) 2025-03-31T16:32:12.400413Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Execute (sender=[1:149:2173], cookie=15986444119998110362) 2025-03-31T16:32:12.412799Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Complete (sender=[1:149:2173], cookie=15986444119998110362) 2025-03-31T16:32:12.424381Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:12.424504Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:12.425121Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:12.425768Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:12.462235Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:12.462565Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Execute (sender=[1:191:2205], cookie=575493255310839960) 2025-03-31T16:32:12.487833Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Complete (sender=[1:191:2205], cookie=575493255310839960) 2025-03-31T16:32:12.488515Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[1:199:2212], cookie=4966787060879431976, path="/foo/bar/baz") 2025-03-31T16:32:12.500706Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[1:199:2212], cookie=4966787060879431976, status=SUCCESS) 2025-03-31T16:32:12.501380Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[1:204:2217], cookie=9310817595322187053, path="/foo/bar/baz") 2025-03-31T16:32:12.501467Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[1:204:2217], cookie=9310817595322187053, status=PRECONDITION_FAILED) 2025-03-31T16:32:12.935809Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:12.935914Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:12.950056Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:12.950204Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:12.965215Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:12.965482Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:130:2156], cookie=13901890757910971842, name="Lock1") 2025-03-31T16:32:12.965568Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:130:2156], cookie=13901890757910971842) 2025-03-31T16:32:13.334358Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:13.334459Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:13.348980Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:13.349188Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:13.363283Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:13.363800Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:132:2158], cookie=15667030878082304950, session=0, seqNo=0) 2025-03-31T16:32:13.363951Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-31T16:32:13.386695Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:132:2158], cookie=15667030878082304950, session=1) 2025-03-31T16:32:13.387036Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:132:2158], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-03-31T16:32:13.387242Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-03-31T16:32:13.387340Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-03-31T16:32:13.399468Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:132:2158], cookie=111) 2025-03-31T16:32:13.400085Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:143:2167], cookie=11824474799543468841, name="Lock1", force=0) 2025-03-31T16:32:13.412363Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:143:2167], cookie=11824474799543468841) 2025-03-31T16:32:13.413002Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:148:2172], cookie=8106682140746491368, name="Sem1", force=0) 2025-03-31T16:32:13.425329Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:148:2172], cookie=8106682140746491368) 2025-03-31T16:32:13.425933Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:153:2177], cookie=11423406596012279453, name="Sem1", limit=42) 2025-03-31T16:32:13.426073Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 2 "Sem1" 2025-03-31T16:32:13.438122Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:153:2177], cookie=11423406596012279453) 2025-03-31T16:32:13.438683Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:158:2182], cookie=15772202368553982969, name="Sem1", force=0) 2025-03-31T16:32:13.438787Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 2 "Sem1" 2025-03-31T16:32:13.450760Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:158:2182], cookie=15772202368553982969) 2025-03-31T16:32:13.451307Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:163:2187], cookie=11393896757267094454, name="Sem1", force=0) 2025-03-31T16:32:13.463577Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:163:2187], cookie=11393896757267094454) 2025-03-31T16:32:13.782041Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:13.782153Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:13.801993Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:13.802150Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:13.816373Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:13.816882Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:132:2158], cookie=14498571487913900713, session=0, seqNo=0) 2025-03-31T16:32:13.817037Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-31T16:32:13.839691Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:132:2158], cookie=14498571487913900713, session=1) 2025-03-31T16:32:13.839998Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:132:2158], cookie=6194092931385209394, session=0, seqNo=0) 2025-03-31T16:32:13.840130Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-03-31T16:32:13.852273Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:132:2158], cookie=6194092931385209394, session=2) 2025-03-31T16:32:13.852533Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path detach session=2 from sender=[4:132:2158], cookie=9111727265849016181 2025-03-31T16:32:13.853028Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[4:144:2168], cookie=3401625104643320634, name="Sem1", limit=3) 2025-03-31T16:32:13.853173Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-03-31T16:32:13.865262Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:144:2168], cookie=3401625104643320634) 2025-03-31T16:32:13.865580Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:132:2158], cookie=112, name="Sem1") 2025-03-31T16:32:13.865668Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:132:2158], cookie=112) 2025-03-31T16:32:13.865903Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:132:2158], cookie=113, name="Sem1") 2025-03-31T16:32:13.865961Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:132:2158], cookie=113) 2025-03-31T16:32:13.866151Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:132:2158], cookie=14132742252836468371, session=2, seqNo=0) 2025-03-31T16:32:13.878141Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:132:2158], cookie=14132742252836468371, session=2) 2025-03-31T16:32:13.878478Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:132:2158], cookie=114, name="Sem1") 2025-03-31T16:32:13.878565Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:132:2158], cookie=114) 2025-03-31T16:32:13.879887Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:132:2158], cookie=115, name="Sem1") 2025-03-31T16:32:13.879962Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:132:2158], cookie=115) 2025-03-31T16:32:13.880356Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[4:151:2175], cookie=5632266817254989996, name="Sem1") 2025-03-31T16:32:13.892290Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[4:151:2175], cookie=5632266817254989996) 2025-03-31T16:32:13.892576Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:132:2158], cookie=116, session=1, semaphore="Sem1" count=1) 2025-03-31T16:32:13.892689Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-03-31T16:32:13.906883Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:132:2158], cookie=116) 2025-03-31T16:32:13.907409Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:132:2158], cookie=117, session=2, semaphore="Sem1" count=2) 2025-03-31T16:32:13.907567Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2025-03-31T16:32:13.919566Z node 4 :KESUS_TABLET DEBUG: [720575 ... 4.854470Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 2 "Sem2" 2025-03-31T16:32:14.866959Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:202:2220], cookie=2216471405081455540) 2025-03-31T16:32:14.867394Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:132:2158], cookie=126, session=1, semaphore="Sem2" count=3) 2025-03-31T16:32:14.867553Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Sem2" queue: next order #5 session 1 2025-03-31T16:32:14.879920Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:132:2158], cookie=126) 2025-03-31T16:32:14.880205Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:132:2158], cookie=127, name="Sem2") 2025-03-31T16:32:14.880289Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:132:2158], cookie=127) 2025-03-31T16:32:14.880547Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:132:2158], cookie=128, session=1, semaphore="Sem2" count=3) 2025-03-31T16:32:14.892515Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:132:2158], cookie=128) 2025-03-31T16:32:15.233719Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:15.246206Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:15.256886Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:132:2158], cookie=129, session=1, semaphore="Sem2" count=2) 2025-03-31T16:32:15.269347Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:132:2158], cookie=129) 2025-03-31T16:32:15.269811Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:132:2158], cookie=130, name="Sem2") 2025-03-31T16:32:15.269920Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:132:2158], cookie=130) 2025-03-31T16:32:15.270200Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:132:2158], cookie=131, session=1, semaphore="Sem2" count=1) 2025-03-31T16:32:15.282432Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:132:2158], cookie=131) 2025-03-31T16:32:15.282854Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:132:2158], cookie=132, name="Sem2") 2025-03-31T16:32:15.282936Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:132:2158], cookie=132) 2025-03-31T16:32:15.283242Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:132:2158], cookie=133, name="Sem2") 2025-03-31T16:32:15.283319Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:132:2158], cookie=133) 2025-03-31T16:32:15.583408Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:15.583498Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:15.600156Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:15.600285Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:15.625570Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:15.630179Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:132:2158], cookie=67789429060217245, path="/Root1", config={ MaxUnitsPerSecond: 1000 }) 2025-03-31T16:32:15.630386Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root1" 2025-03-31T16:32:15.645029Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:132:2158], cookie=67789429060217245) 2025-03-31T16:32:15.645658Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:141:2165], cookie=1433099384117747261, path="/Root1/Res", config={ }) 2025-03-31T16:32:15.645938Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root1/Res" 2025-03-31T16:32:15.661622Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:141:2165], cookie=1433099384117747261) 2025-03-31T16:32:15.662271Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:146:2170], cookie=13499299190398749285, path="/Root2", config={ MaxUnitsPerSecond: 1000 }) 2025-03-31T16:32:15.662456Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 3 "Root2" 2025-03-31T16:32:15.675099Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:146:2170], cookie=13499299190398749285) 2025-03-31T16:32:15.675718Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:151:2175], cookie=15502166713029011358, path="/Root2/Res", config={ }) 2025-03-31T16:32:15.675953Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 4 "Root2/Res" 2025-03-31T16:32:15.691946Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:151:2175], cookie=15502166713029011358) 2025-03-31T16:32:15.692587Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:156:2180], cookie=13901265751723072870, path="/Root2/Res/Subres", config={ }) 2025-03-31T16:32:15.692831Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 5 "Root2/Res/Subres" 2025-03-31T16:32:15.705145Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:156:2180], cookie=13901265751723072870) 2025-03-31T16:32:15.706421Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:161:2185]. Cookie: 9300788522030707488. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root1/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-03-31T16:32:15.706484Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:161:2185], cookie=9300788522030707488) 2025-03-31T16:32:15.748598Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:161:2185]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-03-31T16:32:15.790385Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:161:2185]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-03-31T16:32:15.821556Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:161:2185]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-03-31T16:32:15.822247Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:168:2189]. Cookie: 3710603226905896219. Data: { ResourceCounters { ResourcePath: "Root2/Res" } ResourceCounters { ResourcePath: "Root2/Res/Subres" } ResourceCounters { ResourcePath: "Root2" } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 300 } ResourceCounters { ResourcePath: "Root1" Allocated: 300 } } 2025-03-31T16:32:15.823152Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:171:2192]. Cookie: 13521461869556484387. Data: { Results { ResourceId: 5 Error { Status: SUCCESS } EffectiveProps { ResourceId: 5 ResourcePath: "Root2/Res/Subres" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-03-31T16:32:15.823210Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:171:2192], cookie=13521461869556484387) 2025-03-31T16:32:15.859514Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:171:2192]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-03-31T16:32:15.902650Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:171:2192]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-03-31T16:32:15.903385Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:176:2196]. Cookie: 1930602597323882217. Data: { ResourceCounters { ResourcePath: "Root2/Res" Allocated: 200 } ResourceCounters { ResourcePath: "Root2/Res/Subres" Allocated: 200 } ResourceCounters { ResourcePath: "Root2" Allocated: 200 } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 300 } ResourceCounters { ResourcePath: "Root1" Allocated: 300 } } 2025-03-31T16:32:15.904188Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:161:2185]. Cookie: 4267089926995834872. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root1/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-03-31T16:32:15.904248Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:161:2185], cookie=4267089926995834872) 2025-03-31T16:32:15.904945Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:171:2192]. Cookie: 1322751370410403029. Data: { Results { ResourceId: 5 Error { Status: SUCCESS } EffectiveProps { ResourceId: 5 ResourcePath: "Root2/Res/Subres" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-03-31T16:32:15.904996Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:171:2192], cookie=1322751370410403029) 2025-03-31T16:32:15.936094Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:161:2185]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 20 StateNotification { Status: SUCCESS } } } 2025-03-31T16:32:15.936212Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:171:2192]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 50 StateNotification { Status: SUCCESS } } } 2025-03-31T16:32:15.936995Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:183:2203]. Cookie: 15389212525443759160. Data: { ResourceCounters { ResourcePath: "Root2/Res" Allocated: 250 } ResourceCounters { ResourcePath: "Root2/Res/Subres" Allocated: 250 } ResourceCounters { ResourcePath: "Root2" Allocated: 250 } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 320 } ResourceCounters { ResourcePath: "Root1" Allocated: 320 } } >> TKesusTest::TestQuoterResourceDescribe >> DataShardSnapshots::VolatileSnapshotRefreshDiscard [GOOD] >> DataShardSnapshots::VolatileSnapshotTimeout >> TKesusTest::TestAcquireWaiterDowngrade [GOOD] >> TKesusTest::TestAcquireWaiterUpgrade ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSemaphoreSessionFailures [GOOD] Test command err: 2025-03-31T16:32:13.658852Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:13.659186Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:13.676423Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:13.676550Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:13.702390Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:13.702981Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=17414219933330474318, session=0, seqNo=0) 2025-03-31T16:32:13.703180Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-31T16:32:13.717098Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=17414219933330474318, session=1) 2025-03-31T16:32:13.717443Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=13423454182822540672, session=0, seqNo=0) 2025-03-31T16:32:13.717598Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-03-31T16:32:13.731321Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=13423454182822540672, session=2) 2025-03-31T16:32:13.731718Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:130:2156], cookie=111, name="Lock1") 2025-03-31T16:32:13.743959Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:130:2156], cookie=111) 2025-03-31T16:32:13.744327Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=222, session=1, semaphore="Lock1" count=18446744073709551615) 2025-03-31T16:32:13.744517Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-03-31T16:32:13.744682Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-03-31T16:32:13.759830Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=222) 2025-03-31T16:32:13.760062Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:130:2156], cookie=333, name="Lock1") 2025-03-31T16:32:13.776137Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:130:2156], cookie=333) 2025-03-31T16:32:14.304804Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:14.304898Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:14.317332Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:14.317439Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:14.332857Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:14.333391Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:130:2156], cookie=3354834298801789227, session=0, seqNo=0) 2025-03-31T16:32:14.333567Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-31T16:32:14.358247Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:130:2156], cookie=3354834298801789227, session=1) 2025-03-31T16:32:14.358620Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:130:2156], cookie=15004048034889201229, session=0, seqNo=0) 2025-03-31T16:32:14.358754Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-03-31T16:32:14.370922Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:130:2156], cookie=15004048034889201229, session=2) 2025-03-31T16:32:14.371600Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[2:141:2165], cookie=4732601899844812923, name="Sem1", limit=1) 2025-03-31T16:32:14.371747Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-03-31T16:32:14.383952Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[2:141:2165], cookie=4732601899844812923) 2025-03-31T16:32:14.384321Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:130:2156], cookie=111, session=1, semaphore="Sem1" count=1) 2025-03-31T16:32:14.384470Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-03-31T16:32:14.384676Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:130:2156], cookie=222, session=2, semaphore="Sem1" count=1) 2025-03-31T16:32:14.397455Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:130:2156], cookie=111) 2025-03-31T16:32:14.397581Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:130:2156], cookie=222) 2025-03-31T16:32:14.398201Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:149:2173], cookie=15732984599029790050, name="Sem1") 2025-03-31T16:32:14.398318Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:149:2173], cookie=15732984599029790050) 2025-03-31T16:32:14.398844Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:152:2176], cookie=652883626675805813, name="Sem1") 2025-03-31T16:32:14.398943Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:152:2176], cookie=652883626675805813) 2025-03-31T16:32:14.399236Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[2:130:2156], cookie=333, name="Sem1") 2025-03-31T16:32:14.399358Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 1 "Sem1" waiter link 2025-03-31T16:32:14.411625Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[2:130:2156], cookie=333) 2025-03-31T16:32:14.412329Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:157:2181], cookie=10075915594880710906, name="Sem1") 2025-03-31T16:32:14.412419Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:157:2181], cookie=10075915594880710906) 2025-03-31T16:32:14.412934Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:160:2184], cookie=15325818177134275644, name="Sem1") 2025-03-31T16:32:14.413006Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:160:2184], cookie=15325818177134275644) 2025-03-31T16:32:14.413294Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[2:130:2156], cookie=444, name="Sem1") 2025-03-31T16:32:14.413398Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2025-03-31T16:32:14.425810Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[2:130:2156], cookie=444) 2025-03-31T16:32:14.426532Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:165:2189], cookie=301512159113050046, name="Sem1") 2025-03-31T16:32:14.426632Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:165:2189], cookie=301512159113050046) 2025-03-31T16:32:14.427196Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:168:2192], cookie=4404480158367583188, name="Sem1") 2025-03-31T16:32:14.427258Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:168:2192], cookie=4404480158367583188) 2025-03-31T16:32:14.723057Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:14.723191Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:14.737490Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:14.737744Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:14.751845Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:14.752131Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:132:2158], cookie=9439337537590455005, name="Sem1", limit=1) 2025-03-31T16:32:14.752268Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-03-31T16:32:14.774513Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:132:2158], cookie=9439337537590455005) 2025-03-31T16:32:14.775025Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:141:2165], cookie=7693458091529383485, name="Sem2", limit=1) 2025-03-31T16:32:14.775180Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 2 "Sem2" 2025-03-31T16:32:14.787317Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:141:2165], cookie=7693458091529383485) 2025-03-31T16:32:14.787749Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:146:2170], cookie=4257492824087370129, name="Sem1") 2025-03-31T16:32:14.787812Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:146:2170], cookie=4257492824087370129) 2025-03-31T16:32:14.788146Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:149:2173], cookie=3027472985753020663, name="Sem2") 2025-03-31T16:32:14.788206Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:149:2173], cookie=3027472985753020663) 2025-03-31T16:32:14.798013Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:14.798155Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:14.798685Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:14.799146Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:14.835530Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:14.835906Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:189:2203], cookie=12335892403615300627, name="Sem1") 2025-03-31T16:32:14.836002Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:189:2203], cookie=12335892403615300627) 2025-03-31T16:32:14.836574Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:195:2208], cookie=3882528812413506892, name="Sem2") 2025-03-31T16:32:14.836645Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:195:2208], cookie=3882528812413506892) 2025-03-31T16:32:14.837175Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:198:2211], cookie=8130118502832354268, name="Sem1", limit=1) 2025-03-31T16:32:14.859988Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:198:2211], cookie=8130118502832354268) 2025-03-31T16:32:14.860692Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:203:2216], cookie=7307703765042671551, name="Sem2", limit=1) 2025-03-31T16:32:14.873117Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:203:2216], cookie=7307703765042671551) 2025-03-31T16:32:14.873848Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:208:2221], cookie=15855245347744335194, name="Sem1") 2025-03-31T16:32:14.873939Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:208:2221], cookie=1585524534 ... 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 9 "Sem1" 2025-03-31T16:32:15.593142Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:224:2247], cookie=16155879132551625553) 2025-03-31T16:32:15.593865Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[4:229:2252], cookie=671726452807108483, name="Sem1", force=0) 2025-03-31T16:32:15.593956Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 9 "Sem1" 2025-03-31T16:32:15.606156Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[4:229:2252], cookie=671726452807108483) 2025-03-31T16:32:15.606847Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[4:234:2257], cookie=4617082346081285241, name="Sem1", limit=1) 2025-03-31T16:32:15.606972Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 10 "Sem1" 2025-03-31T16:32:15.619574Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:234:2257], cookie=4617082346081285241) 2025-03-31T16:32:15.620276Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[4:239:2262], cookie=4896446515982362535, name="Sem1", force=0) 2025-03-31T16:32:15.620369Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 10 "Sem1" 2025-03-31T16:32:15.632644Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[4:239:2262], cookie=4896446515982362535) 2025-03-31T16:32:15.633472Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[4:244:2267], cookie=11947400488739233085, name="Sem1", limit=1) 2025-03-31T16:32:15.633633Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 11 "Sem1" 2025-03-31T16:32:15.648920Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:244:2267], cookie=11947400488739233085) 2025-03-31T16:32:15.649543Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:132:2158], cookie=111, session=1, semaphore="Sem1" count=1) 2025-03-31T16:32:15.649734Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 11 "Sem1" queue: next order #1 session 1 2025-03-31T16:32:15.663428Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:132:2158], cookie=111) 2025-03-31T16:32:15.664056Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:132:2158], cookie=222, session=2, semaphore="Sem1" count=1) 2025-03-31T16:32:15.693529Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:132:2158], cookie=222) 2025-03-31T16:32:15.694144Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:132:2158], cookie=333, name="Sem1") 2025-03-31T16:32:15.694274Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 11 "Sem1" waiter link 2025-03-31T16:32:15.706495Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:132:2158], cookie=333) 2025-03-31T16:32:15.707150Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:132:2158], cookie=444, session=2, semaphore="Sem1" count=1) 2025-03-31T16:32:15.719687Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:132:2158], cookie=444) 2025-03-31T16:32:15.720297Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:132:2158], cookie=555, name="Sem1") 2025-03-31T16:32:15.720418Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 11 "Sem1" owner link 2025-03-31T16:32:15.720489Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 11 "Sem1" queue: next order #3 session 2 2025-03-31T16:32:15.733155Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:132:2158], cookie=555) 2025-03-31T16:32:16.145616Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:16.145727Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:16.165350Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:16.165592Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:16.189081Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:16.189627Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=15173085993444956783, session=0, seqNo=0) 2025-03-31T16:32:16.189784Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-31T16:32:16.201873Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=15173085993444956783, session=1) 2025-03-31T16:32:16.202236Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:132:2158], cookie=112, name="Sem1", limit=5) 2025-03-31T16:32:16.202378Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-03-31T16:32:16.216025Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:132:2158], cookie=112) 2025-03-31T16:32:16.216380Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:132:2158], cookie=113, name="Sem1") 2025-03-31T16:32:16.228726Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:132:2158], cookie=113) 2025-03-31T16:32:16.229083Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:132:2158], cookie=114, name="Sem1", force=0) 2025-03-31T16:32:16.229180Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 1 "Sem1" 2025-03-31T16:32:16.241458Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:132:2158], cookie=114) 2025-03-31T16:32:16.241776Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path detach session=1 from sender=[5:132:2158], cookie=15481489958018146614 2025-03-31T16:32:16.242041Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:132:2158], cookie=115, name="Sem1", limit=5) 2025-03-31T16:32:16.255879Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:132:2158], cookie=115) 2025-03-31T16:32:16.256214Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:132:2158], cookie=116, name="Sem1") 2025-03-31T16:32:16.268944Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:132:2158], cookie=116) 2025-03-31T16:32:16.269275Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:132:2158], cookie=117, name="Sem1", force=0) 2025-03-31T16:32:16.281593Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:132:2158], cookie=117) 2025-03-31T16:32:16.281960Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=118, session=1, semaphore="Sem1" count=1) 2025-03-31T16:32:16.294395Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=118) 2025-03-31T16:32:16.294759Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:132:2158], cookie=119, name="Sem1") 2025-03-31T16:32:16.306944Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:132:2158], cookie=119) 2025-03-31T16:32:16.307333Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:132:2158], cookie=120, name="Sem1") 2025-03-31T16:32:16.307420Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:132:2158], cookie=120) 2025-03-31T16:32:16.307683Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Execute (sender=[5:132:2158], cookie=11010555101203424537, session=1) 2025-03-31T16:32:16.307781Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-03-31T16:32:16.320191Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Complete (sender=[5:132:2158], cookie=11010555101203424537) 2025-03-31T16:32:16.320525Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:132:2158], cookie=121, name="Sem1", limit=5) 2025-03-31T16:32:16.332837Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:132:2158], cookie=121) 2025-03-31T16:32:16.333212Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:132:2158], cookie=122, name="Sem1") 2025-03-31T16:32:16.345597Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:132:2158], cookie=122) 2025-03-31T16:32:16.345937Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:132:2158], cookie=123, name="Sem1", force=0) 2025-03-31T16:32:16.358375Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:132:2158], cookie=123) 2025-03-31T16:32:16.358697Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=124, session=1, semaphore="Sem1" count=1) 2025-03-31T16:32:16.371290Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=124) 2025-03-31T16:32:16.371621Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:132:2158], cookie=125, name="Sem1") 2025-03-31T16:32:16.395134Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:132:2158], cookie=125) 2025-03-31T16:32:16.395511Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:132:2158], cookie=126, name="Sem1") 2025-03-31T16:32:16.395602Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:132:2158], cookie=126) 2025-03-31T16:32:16.396213Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:132:2158], cookie=127, name="Sem1", limit=5) 2025-03-31T16:32:16.396293Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:132:2158], cookie=127) 2025-03-31T16:32:16.396520Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:132:2158], cookie=128, name="Sem1") 2025-03-31T16:32:16.396584Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:132:2158], cookie=128) 2025-03-31T16:32:16.396782Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:132:2158], cookie=129, name="Sem1", force=0) 2025-03-31T16:32:16.396838Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:132:2158], cookie=129) 2025-03-31T16:32:16.397107Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=130, session=1, semaphore="Sem1" count=1) 2025-03-31T16:32:16.397174Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=130) 2025-03-31T16:32:16.397371Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:132:2158], cookie=131, name="Sem1") 2025-03-31T16:32:16.397429Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:132:2158], cookie=131) 2025-03-31T16:32:16.397644Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:132:2158], cookie=132, name="Sem1") 2025-03-31T16:32:16.397707Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:132:2158], cookie=132) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndMergeWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:135:2157] sender: [1:136:2058] recipient: [1:111:2142] 2025-03-31T16:29:56.898933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:29:56.899042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:56.899108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:29:56.899140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:29:56.899198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:29:56.899224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:29:56.899317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:56.899397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:29:56.899728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:29:57.018359Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-31T16:29:57.018427Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:169:2058] recipient: [1:15:2062] 2025-03-31T16:29:57.052405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:29:57.052590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:29:57.052756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:29:57.064222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:29:57.064497Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:29:57.065255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:57.065493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:29:57.071526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:57.073394Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:57.073475Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:57.073618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:29:57.073692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:57.073744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:29:57.073922Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:210:2058] recipient: [1:208:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:210:2058] recipient: [1:208:2209] Leader for TabletID 72057594037968897 is [1:214:2213] sender: [1:215:2058] recipient: [1:208:2209] 2025-03-31T16:29:57.082415Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:235:2058] recipient: [1:15:2062] 2025-03-31T16:29:57.242600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:29:57.242827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:57.243034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:29:57.243332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:29:57.243389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:57.255984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:57.256140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:29:57.256350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:57.256415Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:29:57.256451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:29:57.256481Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:29:57.261031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:57.261105Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:29:57.261146Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:29:57.267861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:57.267912Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:57.267955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:57.268011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:29:57.271987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:29:57.281154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:29:57.281364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:135:2157] sender: [1:250:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:29:57.282352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:57.282483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:29:57.282537Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:57.282810Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:29:57.282862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:57.283035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:29:57.283146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:29:57.292151Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:57.292205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:57.292375Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:57.292415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:202:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:29:57.292729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:57.292777Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:29:57.292863Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:57.292894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:57.292931Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:57.292984Z no ... nd_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:32:15.573134Z node 54 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:32:15.573431Z node 54 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 336us result status StatusSuccess 2025-03-31T16:32:15.574374Z node 54 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:32:15.595816Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][54:1184:2967] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-03-31T16:32:15.595944Z node 54 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409551:2][54:1153:2967] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-03-31T16:32:15.596115Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][54:1184:2967] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1743438735534643 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1743438735534643 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 5 Group: 1743438735534643 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 }] } 2025-03-31T16:32:15.598703Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][54:1184:2967] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 5 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 5 2025-03-31T16:32:15.598808Z node 54 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409551:2][54:1153:2967] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> TKesusTest::TestAcquireWaiterUpgrade [GOOD] >> TKesusTest::TestAcquireWaiterChangeTimeoutToZero >> TKesusTest::TestQuoterResourceDescribe [GOOD] >> TKesusTest::TestQuoterResourceCreation >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-22 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-23 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-63 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-64 >> TKesusTest::TestQuoterAccountLabels [GOOD] >> TKesusTest::TestPassesUpdatedPropsToSession >> TKesusTest::TestAcquireWaiterChangeTimeoutToZero [GOOD] >> TKesusTest::TestAcquireWaiterRelease >> KqpNewEngine::MultiOutput [GOOD] >> KqpNewEngine::MultiStatement >> TKesusTest::TestRegisterProxy >> KqpExtractPredicateLookup::SimpleRange >> KqpRanges::IsNullPartial [GOOD] >> KqpRanges::LiteralOr >> TKesusTest::TestQuoterResourceCreation [GOOD] >> TKesusTest::TestQuoterResourceModification >> TKesusTest::TestPassesUpdatedPropsToSession [GOOD] >> TKesusTest::TestAcquireWaiterRelease [GOOD] >> TKesusTest::TestAllocatesResources >> ReadOnlyVDisk::TestSync [GOOD] >> TKesusTest::TestAcquireLocks >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-52 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-53 >> TKesusTest::TestRegisterProxy [GOOD] >> TKesusTest::TestRegisterProxyBadGeneration >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite-Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite+Volatile >> KqpNewEngine::FlatmapLambdaMutiusedConnections [GOOD] >> KqpNewEngine::EmptyMapWithBroadcast >> TKesusTest::TestRegisterProxyBadGeneration [GOOD] >> TKesusTest::TestRegisterProxyFromDeadActor ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestPassesUpdatedPropsToSession [GOOD] Test command err: 2025-03-31T16:32:09.520782Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:09.521016Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:09.545090Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:09.545264Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:09.573397Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:09.585793Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:130:2156], cookie=13268034812291581507, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-03-31T16:32:09.586757Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-03-31T16:32:09.598929Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:130:2156], cookie=13268034812291581507) 2025-03-31T16:32:09.599606Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:140:2164], cookie=2400562252173923006, path="/Root/Res", config={ }) 2025-03-31T16:32:09.599826Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-03-31T16:32:09.612196Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:140:2164], cookie=2400562252173923006) 2025-03-31T16:32:09.615843Z node 1 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [1:145:2169]. Cookie: 10350567254912803627. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-03-31T16:32:09.615948Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[1:145:2169], cookie=10350567254912803627) 2025-03-31T16:32:09.616699Z node 1 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [1:145:2169]. Cookie: 10717342178558930919. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 28000 } } 2025-03-31T16:32:09.616756Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[1:145:2169], cookie=10717342178558930919) 2025-03-31T16:32:11.638855Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:11.638945Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:11.657017Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:11.657125Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:11.671651Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:11.672061Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:130:2156], cookie=13829395761479686117, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-03-31T16:32:11.672359Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-03-31T16:32:11.694884Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:130:2156], cookie=13829395761479686117) 2025-03-31T16:32:11.695730Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:140:2164]. Cookie: 17215901929172460887. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-03-31T16:32:11.695784Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[2:140:2164], cookie=17215901929172460887) 2025-03-31T16:32:11.696317Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:140:2164]. Cookie: 9054671404055296390. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-03-31T16:32:11.696369Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[2:140:2164], cookie=9054671404055296390) 2025-03-31T16:32:11.696837Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [2:140:2164]. Cookie: 17007456038163252116. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1016500 } } 2025-03-31T16:32:11.696880Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[2:140:2164], cookie=17007456038163252116) 2025-03-31T16:32:11.697274Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [2:140:2164]. Cookie: 9938796420617784766. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1016500 } } 2025-03-31T16:32:11.697317Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[2:140:2164], cookie=9938796420617784766) 2025-03-31T16:32:13.900606Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:13.900740Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:13.919801Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:13.920075Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:13.937132Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:13.937564Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:132:2158], cookie=17705906995430431260, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-03-31T16:32:13.937842Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-03-31T16:32:13.963889Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:132:2158], cookie=17705906995430431260) 2025-03-31T16:32:13.964478Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:142:2166], cookie=14596551984422939939, path="/Root/Res1", config={ }) 2025-03-31T16:32:13.964763Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res1" 2025-03-31T16:32:13.977051Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:142:2166], cookie=14596551984422939939) 2025-03-31T16:32:13.977758Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:147:2171], cookie=15666363296657084782, path="/Root/Res2", config={ }) 2025-03-31T16:32:13.977980Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 3 "Root/Res2" 2025-03-31T16:32:13.991529Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:147:2171], cookie=15666363296657084782) 2025-03-31T16:32:13.992517Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:152:2176]. Cookie: 16393838029705915774. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-03-31T16:32:13.992585Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[3:152:2176], cookie=16393838029705915774) 2025-03-31T16:32:13.993230Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:152:2176]. Cookie: 6199068290881073033. Data: { Results { ResourceId: 3 Error { Status: SUCCESS } EffectiveProps { ResourceId: 3 ResourcePath: "Root/Res2" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-03-31T16:32:13.993290Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[3:152:2176], cookie=6199068290881073033) 2025-03-31T16:32:13.993847Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [3:152:2176]. Cookie: 8301151160994049858. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 1020500 } ResourcesInfo { ResourceId: 3 AcceptedUs: 1020500 } } 2025-03-31T16:32:13.993918Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[3:152:2176], cookie=8301151160994049858) 2025-03-31T16:32:16.157337Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:16.157440Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:16.177048Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:16.177192Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:16.191161Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:16.191555Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:132:2158], cookie=13007137497026929795, path="/Root", config={ MaxUnitsPerSecond: 100 PrefetchCoefficient: 300 }) 2025-03-31T16:32:16.191858Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-03-31T16:32:16.217230Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:132:2158], cookie=13007137497026929795) 2025-03-31T16:32:16.218444Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:142:2166]. Cookie: 5950936926702334615. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 300 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { Enabled: true BillingPeriodSec: 2 Labels { key: "k1" value: "v1" } Labels { key: "k2" value: "v2" } } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-03-31T16:32:16.218515Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[4:142:2166], cookie=5950936926702334615) 2025-03-31T16:32:16.219022Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [4:142:2166]. Cookie: 9273689041226306222. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 27500 } } 2025-03-31T16:32:16.219074Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[4:142:2166], cookie=9273689041226306222) 2025-03-31T16:32:18.434910Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:18.435013Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:18.456941Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:18.457289Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:18.484610Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:18.485031Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:132:2158], cookie=14216912571084982486, path="/Root", config={ MaxUnitsPerSecond: 100 }) 2025-03-31T16:32:18.485206Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-03-31T16:32:18.496942Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:132:2158], cookie=14216912571084982486) 2025-03-31T16:32:18.497650Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:141:2165], cookie=17724934449017055678, path="/Root/Res", config={ }) 2025-03-31T16:32:18.497906Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-03-31T16:32:18.509923Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:141:2165], cookie=17724934449017055678) 2025-03-31T16:32:18.510627Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:146:2170]. Cookie: 707022709555426715. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-03-31T16:32:18.510669Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:146:2170], cookie=707022709555426715) 2025-03-31T16:32:18.511140Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceUpdate::Execute (sender=[5:150:2174], cookie=7414760444771133152, id=0, path="/Root", config={ MaxUnitsPerSecond: 150 }) 2025-03-31T16:32:18.511285Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Updated quoter resource 1 "Root" 2025-03-31T16:32:18.511460Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:146:2170]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 150 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } StateNotification { Status: SUCCESS } } } 2025-03-31T16:32:18.523391Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceUpdate::Complete (sender=[5:150:2174], cookie=7414760444771133152) 2025-03-31T16:32:18.523981Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvUpdateConsumptionStateAck to [5:146:2170]. Cookie: 3494833027783372862. Data: { } 2025-03-31T16:32:18.524026Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Update quoter resources consumption state (sender=[5:146:2170], cookie=3494833027783372862) >> TKesusTest::TestAllocatesResources [GOOD] >> KqpRanges::UpdateMulti [GOOD] >> KqpRanges::UpdateWhereInBigLiteralList >> TKesusTest::TestQuoterResourceModification [GOOD] >> TKesusTest::TestQuoterResourceDeletion ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestSync [GOOD] Test command err: RandomSeed# 5891975330225757889 Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] SEND TEvPut with key [1:1:0:0:0:131072:0] 2025-03-31T16:32:01.968376Z 1 00h02m00.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:8809:940] 2025-03-31T16:32:01.968839Z 2 00h02m00.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:8816:947] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] SEND TEvPut with key [1:1:1:0:0:32768:0] 2025-03-31T16:32:04.012342Z 3 00h06m00.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:8823:954] 2025-03-31T16:32:04.012525Z 2 00h06m00.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:8816:947] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] SEND TEvPut with key [1:1:2:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] SEND TEvPut with key [1:1:3:0:0:32768:0] 2025-03-31T16:32:08.619190Z 5 00h14m00.361536s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:8837:968] 2025-03-31T16:32:08.619268Z 4 00h14m00.361536s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:8830:961] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] SEND TEvPut with key [1:1:4:0:0:131072:0] 2025-03-31T16:32:10.895429Z 6 00h18m00.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:8844:975] 2025-03-31T16:32:10.895526Z 5 00h18m00.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:8837:968] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] SEND TEvPut with key [1:1:5:0:0:32768:0] 2025-03-31T16:32:13.435826Z 7 00h22m00.500000s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:8851:982] 2025-03-31T16:32:13.435942Z 6 00h22m00.500000s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:8844:975] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:6:0:0:131072:0] 2025-03-31T16:32:16.274773Z 7 00h26m00.561536s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:8851:982] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Read all 7 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> TKesusTest::TestRegisterProxyFromDeadActor [GOOD] >> TKesusTest::TestRegisterProxyLinkFailure ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAllocatesResources [GOOD] Test command err: 2025-03-31T16:32:16.932992Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:16.933123Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:16.951628Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:16.951754Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:16.977559Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:16.978116Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=13749893099953117813, session=0, seqNo=0) 2025-03-31T16:32:16.978296Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-31T16:32:16.990558Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=13749893099953117813, session=1) 2025-03-31T16:32:16.990893Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=11353494765970078343, session=0, seqNo=0) 2025-03-31T16:32:16.991015Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-03-31T16:32:17.003289Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=11353494765970078343, session=2) 2025-03-31T16:32:17.003651Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=111, session=1, semaphore="Lock1" count=1) 2025-03-31T16:32:17.003854Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-03-31T16:32:17.003949Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-03-31T16:32:17.016315Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=111) 2025-03-31T16:32:17.016667Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=222, session=2, semaphore="Lock1" count=18446744073709551615) 2025-03-31T16:32:17.017053Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=333, session=2, semaphore="Lock1" count=1) 2025-03-31T16:32:17.017150Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #2 session 2 2025-03-31T16:32:17.029246Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=222) 2025-03-31T16:32:17.029342Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=333) 2025-03-31T16:32:17.029964Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:146:2170], cookie=6247370101486095586, name="Lock1") 2025-03-31T16:32:17.030077Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:146:2170], cookie=6247370101486095586) 2025-03-31T16:32:17.321503Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:17.321691Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:17.340414Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:17.340552Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:17.355398Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:17.355922Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:130:2156], cookie=6400606566938623909, session=0, seqNo=0) 2025-03-31T16:32:17.356099Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-31T16:32:17.378883Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:130:2156], cookie=6400606566938623909, session=1) 2025-03-31T16:32:17.379239Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:130:2156], cookie=10133922959165192895, session=0, seqNo=0) 2025-03-31T16:32:17.379380Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-03-31T16:32:17.391533Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:130:2156], cookie=10133922959165192895, session=2) 2025-03-31T16:32:17.391896Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:130:2156], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-03-31T16:32:17.392022Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-03-31T16:32:17.392095Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-03-31T16:32:17.404471Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:130:2156], cookie=111) 2025-03-31T16:32:17.404835Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:130:2156], cookie=222, session=2, semaphore="Lock1" count=1) 2025-03-31T16:32:17.405182Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:130:2156], cookie=333, session=2, semaphore="Lock1" count=18446744073709551615) 2025-03-31T16:32:17.417427Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:130:2156], cookie=222) 2025-03-31T16:32:17.417529Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:130:2156], cookie=333) 2025-03-31T16:32:17.418141Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:146:2170], cookie=14789534064172757935, name="Lock1") 2025-03-31T16:32:17.418252Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:146:2170], cookie=14789534064172757935) 2025-03-31T16:32:17.418815Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:149:2173], cookie=17016477466302376890, name="Lock1") 2025-03-31T16:32:17.418897Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:149:2173], cookie=17016477466302376890) 2025-03-31T16:32:17.759444Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:17.759554Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:17.774574Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:17.774815Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:17.788628Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:17.789051Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:132:2158], cookie=17050636209362239890, session=0, seqNo=0) 2025-03-31T16:32:17.789172Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-31T16:32:17.811992Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:132:2158], cookie=17050636209362239890, session=1) 2025-03-31T16:32:17.812324Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:132:2158], cookie=6959805967467657073, session=0, seqNo=0) 2025-03-31T16:32:17.812466Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-03-31T16:32:17.824519Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:132:2158], cookie=6959805967467657073, session=2) 2025-03-31T16:32:17.825308Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:132:2158], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-03-31T16:32:17.825454Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-03-31T16:32:17.825551Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-03-31T16:32:17.837653Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:132:2158], cookie=111) 2025-03-31T16:32:17.837965Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:132:2158], cookie=222, session=2, semaphore="Lock1" count=1) 2025-03-31T16:32:17.838307Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:132:2158], cookie=333, session=2, semaphore="Lock1" count=1) 2025-03-31T16:32:17.838382Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2025-03-31T16:32:17.850373Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:132:2158], cookie=222) 2025-03-31T16:32:17.850452Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:132:2158], cookie=333) 2025-03-31T16:32:17.851025Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:151:2175], cookie=6573087957677520669, name="Lock1") 2025-03-31T16:32:17.851134Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:151:2175], cookie=6573087957677520669) 2025-03-31T16:32:17.851551Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:154:2178], cookie=5554904384128712091, name="Lock1") 2025-03-31T16:32:17.851600Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:154:2178], cookie=5554904384128712091) 2025-03-31T16:32:17.863475Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:17.863590Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:17.864105Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:17.864616Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:17.911353Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:17.911517Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-03-31T16:32:17.911829Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:194:2208], cookie=17460440455005547412, name="Lock1") 2025-03-31T16:32:17.911902Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:194:2208], cookie=17460440455005547412) 2025-03-31T16:32:17.912337Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:202:2215], cookie=3691539651382200130, name="Lock1") 2025-03-31T16:32:17.912396Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:202:2215], cookie=3691539651382200130) 2025-03-31T16:32:18.398303Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:18.398421Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:18.417991Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:18.418155Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:18.436730Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:18.437218Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:132:2158], cookie=21672802673057110, session=0, seqNo=0) 2025-03-31T16:32:18.437358Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-31T16:32:18.464176Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:132:2158], cookie=21672802673057110, session=1) 2025-03-31T16:32:18.464501Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:132:2158], cookie=18004712542156547302, session=0, seqNo=0) 2025-03-31T16:32:18.464652Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-03-31T16:32:18.478694Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:132:2158], cookie=18004712542156547302, session=2) 2025-03-31T16:32:18.479008Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:132:2158], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-03-31T16:32:18.479172Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-03-31T16:32:18.479282Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-03-31T16:32:18.493054Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:132:2158], cookie=111) 2025-03-31T16:32:18.493435Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:132:2158], cookie=222, session=2, semaphore="Lock1" count=1) 2025-03-31T16:32:18.493836Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:132:2158], cookie=333, name="Lock1") 2025-03-31T16:32:18.493939Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2025-03-31T16:32:18.506098Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:132:2158], cookie=222) 2025-03-31T16:32:18.506186Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:132:2158], cookie=333) 2025-03-31T16:32:18.829900Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:18.830003Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:18.844203Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:18.844444Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:18.868232Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:18.873103Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:132:2158], cookie=14128094990329003272, path="/Root", config={ MaxUnitsPerSecond: 100 }) 2025-03-31T16:32:18.873325Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-03-31T16:32:18.885244Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:132:2158], cookie=14128094990329003272) 2025-03-31T16:32:18.885871Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:141:2165], cookie=1577605117953467055, path="/Root/Res", config={ }) 2025-03-31T16:32:18.886084Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-03-31T16:32:18.898005Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:141:2165], cookie=1577605117953467055) 2025-03-31T16:32:18.899668Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:146:2170]. Cookie: 18426140694290943. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-03-31T16:32:18.899739Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:146:2170], cookie=18426140694290943) 2025-03-31T16:32:18.900286Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvUpdateConsumptionStateAck to [5:146:2170]. Cookie: 12626166249641494357. Data: { } 2025-03-31T16:32:18.900332Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Update quoter resources consumption state (sender=[5:146:2170], cookie=12626166249641494357) 2025-03-31T16:32:18.941834Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:146:2170]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-03-31T16:32:18.983489Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:146:2170]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-03-31T16:32:19.014687Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:146:2170]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-03-31T16:32:19.045691Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:146:2170]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-03-31T16:32:19.087285Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:146:2170]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } |95.0%| [TA] $(B)/ydb/core/control/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.0%| [TA] {RESULT} $(B)/ydb/core/control/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TKesusTest::TestQuoterResourceDeletion [GOOD] >> TKesusTest::TestQuoterSubscribeOnResource >> KqpRanges::UpdateWhereInNoFullScan-UseSink [GOOD] >> KqpRanges::UpdateWhereInWithNull >> TTopicReaderTests::TestRun_Read_Less_Messages_Than_Sent [GOOD] >> KqpTx::CommitRequired [GOOD] >> KqpTx::CommitPrepared >> TKesusTest::TestRegisterProxyLinkFailure [GOOD] >> TKesusTest::TestRegisterProxyLinkFailureRace >> TKesusTest::TestQuoterSubscribeOnResource [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-16 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-17 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-65 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-16 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-17 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestQuoterSubscribeOnResource [GOOD] Test command err: 2025-03-31T16:32:17.301211Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:17.301322Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:17.314093Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:17.314214Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:17.338774Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:17.346228Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:130:2156], cookie=4070596786089504608, path="/Root", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2025-03-31T16:32:17.346448Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-03-31T16:32:17.358501Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:130:2156], cookie=4070596786089504608) 2025-03-31T16:32:17.359191Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:139:2163], cookie=3454086360330926053, path="/Root/Folder", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2025-03-31T16:32:17.359378Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Folder" 2025-03-31T16:32:17.371334Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:139:2163], cookie=3454086360330926053) 2025-03-31T16:32:17.371911Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:144:2168], cookie=457014503075819922, path="/Root/Q1", config={ MaxUnitsPerSecond: 10 }) 2025-03-31T16:32:17.372109Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 3 "Root/Q1" 2025-03-31T16:32:17.384268Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:144:2168], cookie=457014503075819922) 2025-03-31T16:32:17.384944Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:149:2173], cookie=9128508868823496037, path="/Root/Folder/Q1", config={ MaxUnitsPerSecond: 10 }) 2025-03-31T16:32:17.385160Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 4 "Root/Folder/Q1" 2025-03-31T16:32:17.397462Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:149:2173], cookie=9128508868823496037) 2025-03-31T16:32:17.398094Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:154:2178], cookie=9427035281989977601, path="/Root/Folder/Q2", config={ MaxUnitsPerSecond: 10 }) 2025-03-31T16:32:17.398296Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 5 "Root/Folder/Q2" 2025-03-31T16:32:17.410563Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:154:2178], cookie=9427035281989977601) 2025-03-31T16:32:17.411287Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:159:2183], cookie=5090434270664699442, path="/Root/Folder/Q3", config={ MaxUnitsPerSecond: 10 }) 2025-03-31T16:32:17.411506Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 6 "Root/Folder/Q3" 2025-03-31T16:32:17.423673Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:159:2183], cookie=5090434270664699442) 2025-03-31T16:32:17.424362Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:164:2188], cookie=584072539093144745, path="/Root2", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2025-03-31T16:32:17.424524Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 7 "Root2" 2025-03-31T16:32:17.436873Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:164:2188], cookie=584072539093144745) 2025-03-31T16:32:17.437590Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:169:2193], cookie=11674094524133324742, path="/Root2/Q", config={ MaxUnitsPerSecond: 10 }) 2025-03-31T16:32:17.437792Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 8 "Root2/Q" 2025-03-31T16:32:17.450162Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:169:2193], cookie=11674094524133324742) 2025-03-31T16:32:17.450987Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:174:2198], cookie=9914249602422738567, ids=[100], paths=[], recursive=0) 2025-03-31T16:32:17.451098Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:174:2198], cookie=9914249602422738567) 2025-03-31T16:32:17.451642Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:177:2201], cookie=11477216800124991179, ids=[], paths=[Nonexistent/Path], recursive=0) 2025-03-31T16:32:17.451746Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:177:2201], cookie=11477216800124991179) 2025-03-31T16:32:17.452277Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:180:2204], cookie=14377707465940544805, ids=[], paths=[/Root, ], recursive=0) 2025-03-31T16:32:17.452370Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:180:2204], cookie=14377707465940544805) 2025-03-31T16:32:17.452902Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:183:2207], cookie=4171199852064173131, ids=[1, 1], paths=[], recursive=0) 2025-03-31T16:32:17.452962Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:183:2207], cookie=4171199852064173131) 2025-03-31T16:32:17.453443Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:186:2210], cookie=14441273877066215017, ids=[], paths=[/Root2/Q, /Root2/Q], recursive=0) 2025-03-31T16:32:17.453534Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:186:2210], cookie=14441273877066215017) 2025-03-31T16:32:17.454109Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:189:2213], cookie=8014263527100350331, ids=[], paths=[], recursive=1) 2025-03-31T16:32:17.454186Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:189:2213], cookie=8014263527100350331) 2025-03-31T16:32:17.454926Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:192:2216], cookie=11999537994104887376, ids=[], paths=[], recursive=0) 2025-03-31T16:32:17.455011Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:192:2216], cookie=11999537994104887376) 2025-03-31T16:32:17.455643Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:195:2219], cookie=9165478477773432144, ids=[3, 2], paths=[], recursive=1) 2025-03-31T16:32:17.455736Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:195:2219], cookie=9165478477773432144) 2025-03-31T16:32:17.456336Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:198:2222], cookie=1709782792700613886, ids=[3, 2], paths=[], recursive=0) 2025-03-31T16:32:17.456428Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:198:2222], cookie=1709782792700613886) 2025-03-31T16:32:17.457036Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:201:2225], cookie=4968579942573020066, ids=[], paths=[Root2/], recursive=1) 2025-03-31T16:32:17.457103Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:201:2225], cookie=4968579942573020066) 2025-03-31T16:32:17.457674Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:204:2228], cookie=3783605930200649277, ids=[], paths=[Root2/], recursive=0) 2025-03-31T16:32:17.457733Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:204:2228], cookie=3783605930200649277) 2025-03-31T16:32:17.476714Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:17.476790Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:17.477240Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:17.477749Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:17.514714Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:17.515113Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:244:2258], cookie=16503650087153319380, ids=[100], paths=[], recursive=0) 2025-03-31T16:32:17.515223Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:244:2258], cookie=16503650087153319380) 2025-03-31T16:32:17.516000Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:250:2263], cookie=4510336630014282068, ids=[], paths=[Nonexistent/Path], recursive=0) 2025-03-31T16:32:17.516097Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:250:2263], cookie=4510336630014282068) 2025-03-31T16:32:17.516791Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:253:2266], cookie=16575076630893860682, ids=[], paths=[/Root, ], recursive=0) 2025-03-31T16:32:17.516875Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:253:2266], cookie=16575076630893860682) 2025-03-31T16:32:17.517564Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:256:2269], cookie=6689716598300175496, ids=[1, 1], paths=[], recursive=0) 2025-03-31T16:32:17.517643Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:256:2269], cookie=6689716598300175496) 2025-03-31T16:32:17.518320Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:259:2272], cookie=15904313206354387171, ids=[], paths=[/Root2/Q, /Root2/Q], recursive=0) 2025-03-31T16:32:17.518392Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:259:2272], cookie=15904313206354387171) 2025-03-31T16:32:17.519117Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:262:2275], cookie=10281850544502418968, ids=[], paths=[], recursive=1) 2025-03-31T16:32:17.519188Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:262:2275], cookie=10281850544502418968) 2025-03-31T16:32:17.519929Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:265:2278], cookie=7951614311335314957, ids=[], paths=[], recursive=0) 2025-03-31T16:32:17.519988Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:265:2278], cookie=7951614311335314957) 2025-03-31T16:32:17.520669Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:268:2281], cookie=9081513645309067188, ids=[3, 2], paths=[], recursive=1) 2025-03-31T16:32:17.520750Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:268:2281], cookie=9081513645309067188) 2025-03-31T16:32:17.521557Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:271:2284], cookie=889702686248879795, ids=[3, 2], paths=[], recursive=0) 2025-03- ... _TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:19.597145Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:19.597602Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:132:2158], cookie=3403278100637948077, path="/Root", config={ MaxUnitsPerSecond: 1 }) 2025-03-31T16:32:19.597820Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-03-31T16:32:19.623502Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:132:2158], cookie=3403278100637948077) 2025-03-31T16:32:19.624132Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:141:2165], cookie=18179360265996835820, path="/Root/Q", config={ }) 2025-03-31T16:32:19.624376Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Q" 2025-03-31T16:32:19.636777Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:141:2165], cookie=18179360265996835820) 2025-03-31T16:32:19.637457Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:146:2170], cookie=3052411198784315113, path="/Root/Folder", config={ }) 2025-03-31T16:32:19.637723Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 3 "Root/Folder" 2025-03-31T16:32:19.650086Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:146:2170], cookie=3052411198784315113) 2025-03-31T16:32:19.650713Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:151:2175], cookie=14478629895214763663, path="/Root/Folder/Q1", config={ }) 2025-03-31T16:32:19.650954Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 4 "Root/Folder/Q1" 2025-03-31T16:32:19.663258Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:151:2175], cookie=14478629895214763663) 2025-03-31T16:32:19.663895Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:156:2180], cookie=4781372058258421255, ids=[], paths=[], recursive=1) 2025-03-31T16:32:19.663998Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:156:2180], cookie=4781372058258421255) 2025-03-31T16:32:19.664988Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:162:2186], cookie=6320591691413621949, ids=[], paths=[], recursive=1) 2025-03-31T16:32:19.665079Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:162:2186], cookie=6320591691413621949) 2025-03-31T16:32:19.666095Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:168:2192], cookie=7388475503986108330, ids=[], paths=[], recursive=1) 2025-03-31T16:32:19.666178Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:168:2192], cookie=7388475503986108330) 2025-03-31T16:32:19.666669Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:171:2195], cookie=6793506640082972665, id=0, path="/Root/Folder/NonexistingRes") 2025-03-31T16:32:19.666771Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:171:2195], cookie=6793506640082972665) 2025-03-31T16:32:19.667324Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:174:2198], cookie=3812506954250513247, ids=[], paths=[], recursive=1) 2025-03-31T16:32:19.667403Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:174:2198], cookie=3812506954250513247) 2025-03-31T16:32:19.667879Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:177:2201], cookie=10843463010120911520, id=100, path="") 2025-03-31T16:32:19.667940Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:177:2201], cookie=10843463010120911520) 2025-03-31T16:32:19.668412Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:180:2204], cookie=16244961771353061261, ids=[], paths=[], recursive=1) 2025-03-31T16:32:19.668487Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:180:2204], cookie=16244961771353061261) 2025-03-31T16:32:19.669003Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:183:2207], cookie=6389023895914941113, id=3, path="") 2025-03-31T16:32:19.669073Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:183:2207], cookie=6389023895914941113) 2025-03-31T16:32:19.669673Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:186:2210], cookie=16947463460316288226, ids=[], paths=[], recursive=1) 2025-03-31T16:32:19.669762Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:186:2210], cookie=16947463460316288226) 2025-03-31T16:32:19.670286Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:189:2213], cookie=7816953539425883548, id=0, path="/Root/Folder/Q1") 2025-03-31T16:32:19.670415Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleted quoter resource 4 "Root/Folder/Q1" 2025-03-31T16:32:19.682634Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:189:2213], cookie=7816953539425883548) 2025-03-31T16:32:19.683490Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:194:2218], cookie=13080435858311945052, ids=[], paths=[], recursive=1) 2025-03-31T16:32:19.683592Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:194:2218], cookie=13080435858311945052) 2025-03-31T16:32:19.697339Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:19.697462Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:19.698127Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:19.698786Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:19.736485Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:19.736864Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:234:2248], cookie=5150898162822005126, ids=[], paths=[], recursive=1) 2025-03-31T16:32:19.736966Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:234:2248], cookie=5150898162822005126) 2025-03-31T16:32:19.737711Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:240:2253], cookie=9282896463618814258, id=3, path="") 2025-03-31T16:32:19.737878Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleted quoter resource 3 "Root/Folder" 2025-03-31T16:32:19.750334Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:240:2253], cookie=9282896463618814258) 2025-03-31T16:32:19.751147Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:245:2258], cookie=10266740514982060265, ids=[], paths=[], recursive=1) 2025-03-31T16:32:19.751252Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:245:2258], cookie=10266740514982060265) 2025-03-31T16:32:19.764860Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:19.764976Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:19.765675Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:19.766343Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:19.804096Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:19.804481Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:285:2288], cookie=10869952677099901643, ids=[], paths=[], recursive=1) 2025-03-31T16:32:19.804574Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:285:2288], cookie=10869952677099901643) 2025-03-31T16:32:20.191524Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:20.191646Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:20.212250Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:20.212567Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:20.236332Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:20.236643Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:132:2158], cookie=3135930188048102341, path="/Q1", config={ MaxUnitsPerSecond: 10 }) 2025-03-31T16:32:20.236793Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Q1" 2025-03-31T16:32:20.248632Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:132:2158], cookie=3135930188048102341) 2025-03-31T16:32:20.249204Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:141:2165], cookie=8105629956637335802, path="/Q2", config={ MaxUnitsPerSecond: 10 }) 2025-03-31T16:32:20.249402Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Q2" 2025-03-31T16:32:20.261756Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:141:2165], cookie=8105629956637335802) 2025-03-31T16:32:20.265737Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:146:2170]. Cookie: 377787912522053371. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Q1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-03-31T16:32:20.265810Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:146:2170], cookie=377787912522053371) 2025-03-31T16:32:20.266566Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:146:2170]. Cookie: 12997888119435547370. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Q1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Q2" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } Results { Error { Status: NOT_FOUND Issues { message: "Resource \"/Q3\" doesn\'t exist." } } } ProtocolVersion: 1 } 2025-03-31T16:32:20.266608Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:146:2170], cookie=12997888119435547370) >> TKesusTest::TestUnregisterProxy >> KqpNewEngine::LocksMultiShard [GOOD] >> KqpNewEngine::LocksMultiShardOk >> KqpSinkTx::SnapshotROInteractive2 [GOOD] >> KqpSnapshotIsolation::TConflictReadWriteOlap >> TKesusTest::TestAttachOutOfSequence >> KqpNewEngine::DeleteWithInputMultiConsumption-UseSink >> KqpSort::TopSortParameter [GOOD] >> KqpSort::TopSortExpr >> TKesusTest::TestRegisterProxyLinkFailureRace [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-65 >> TKesusTest::TestUnregisterProxy [GOOD] >> TKesusTest::TestUnregisterProxyBadGeneration >> TKesusTest::TestAttachOutOfSequence [GOOD] >> TKesusTest::TestAttachOutOfSequenceInTx >> KqpNewEngine::BlindWrite >> KqpSqlIn::SimpleKey [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicReaderTests::TestRun_Read_Less_Messages_Than_Sent [GOOD] Test command err: === Starting PQ server === Server->StartServer(false); 2025-03-31T16:31:41.209196Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012203791080074:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:41.209264Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:31:41.267928Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012205703889101:2144];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:41.268519Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:31:41.391907Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:31:41.402022Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000776/r3tmp/tmp2ZlotU/pdisk_1.dat 2025-03-31T16:31:41.731349Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:41.740796Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:41.740903Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:41.741799Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:41.741877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:41.753877Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:31:41.754092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:41.762936Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61528, node 1 2025-03-31T16:31:41.891466Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/000776/r3tmp/yandexi4R7md.tmp 2025-03-31T16:31:41.891523Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/000776/r3tmp/yandexi4R7md.tmp 2025-03-31T16:31:41.892236Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/000776/r3tmp/yandexi4R7md.tmp 2025-03-31T16:31:41.892392Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:31:42.115044Z INFO: TTestServer started on Port 14496 GrpcPort 61528 TClient is connected to server localhost:14496 PQClient connected to localhost:61528 === TenantModeEnabled() = 0 === Init PQ - start server on port 61528 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:42.554055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-31T16:31:42.554305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:31:42.554570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-31T16:31:42.554860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:31:42.554909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:31:42.558227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-31T16:31:42.558371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-31T16:31:42.558604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:31:42.558662Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-31T16:31:42.558724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-03-31T16:31:42.558752Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-03-31T16:31:42.561238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:31:42.561277Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-03-31T16:31:42.561296Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-03-31T16:31:42.563287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:31:42.563319Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:31:42.563352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-31T16:31:42.563428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-03-31T16:31:42.567284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:31:42.567702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:31:42.567728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-03-31T16:31:42.567752Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:31:42.569405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-03-31T16:31:42.569589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-03-31T16:31:42.573135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743438702614, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T16:31:42.573276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743438702614 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-31T16:31:42.573309Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-31T16:31:42.573529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-03-31T16:31:42.573562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-31T16:31:42.573739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-31T16:31:42.573767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-03-31T16:31:42.575448Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-31T16:31:42.575488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-31T16:31:42.576036Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-31T16:31:42.576071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7488012203791080722:2391], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-03-31T16:31:42.576128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:31:42.576150Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-03-31T16:31:42.576259Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-31T16:31:42.576269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-31T16:31:42.576297Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-31T16:31:42.576305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-31T16:31:42.576318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-03-31T16:31:42.576334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-31T16:31:42.576353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-03-31T16:31:42.576362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2025-03-31T16:31:42.576407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pat ... 322Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli balancing family=1 (Status=Free, Partitions=[0]) for ReadingSession "shared/cli_5_1_2020305431878009368_v1" (Sender=[5:7488012363861857706:2636], Pipe=[5:7488012363861857711:2636], Partitions=[], ActiveFamilyCount=0) 2025-03-31T16:32:18.202397Z node 6 :PERSQUEUE_READ_BALANCER INFO: [72075186224037898][rt3.dc1--topic1] consumer cli family 1 status Active partitions [0] session "shared/cli_5_1_2020305431878009368_v1" sender [5:7488012363861857706:2636] lock partition 0 for ReadingSession "shared/cli_5_1_2020305431878009368_v1" (Sender=[5:7488012363861857706:2636], Pipe=[5:7488012363861857711:2636], Partitions=[], ActiveFamilyCount=1) generation 1 step 1 2025-03-31T16:32:18.202458Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2025-03-31T16:32:18.202505Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli balancing duration: 0.000209s 2025-03-31T16:32:18.203676Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_2020305431878009368_v1 assign: record# { Partition: 0 TabletId: 72075186224037897 Topic: "rt3.dc1--topic1" Generation: 1 Step: 1 Session: "shared/cli_5_1_2020305431878009368_v1" ClientId: "cli" PipeClient { RawX1: 7488012363861857711 RawX2: 4503621102209612 } Path: "/Root/PQ/rt3.dc1--topic1" } 2025-03-31T16:32:18.203875Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_2020305431878009368_v1 INITING TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) 2025-03-31T16:32:18.204152Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_2020305431878009368_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037897 Generation: 1, pipe: [5:7488012363861857713:2639] 2025-03-31T16:32:18.204220Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: shared/cli_5_1_2020305431878009368_v1:1 with generation 1 2025-03-31T16:32:18.208837Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_2020305431878009368_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 3 SizeLag: 409 WriteTimestampEstimateMS: 1743438738190 } Cookie: 18446744073709551615 } 2025-03-31T16:32:18.208889Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_2020305431878009368_v1 INIT DONE TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 2025-03-31T16:32:18.208964Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_2020305431878009368_v1 sending to client partition status 2025-03-31T16:32:18.209757Z :INFO: [] [] [f7512230-c14cce6a-2584aae2-50b8c7b2] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "/topic1". Partition: 0. Read offset: (NULL) 2025-03-31T16:32:18.210251Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_2020305431878009368_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 } } 2025-03-31T16:32:18.210408Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_2020305431878009368_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, commitOffset# (empty maybe) 2025-03-31T16:32:18.210473Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_2020305431878009368_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 0 2025-03-31T16:32:18.210504Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_2020305431878009368_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 0 endOffset 3 2025-03-31T16:32:18.210569Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_2020305431878009368_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, endOffset# 3, WTime# 0, sizeLag# 409 2025-03-31T16:32:18.210594Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_2020305431878009368_v1TEvPartitionReady. Aval parts: 1 2025-03-31T16:32:18.210643Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_2020305431878009368_v1 performing read request: guid# ce5f7d3e-26d21b66-9b3e5bde-2c5c4518, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 3, size# 490, partitionsAsked# 1, maxTimeLag# 0ms 2025-03-31T16:32:18.210729Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_2020305431878009368_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 3 maxSize 490 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 3 ClientCommitOffset 0 committedOffset 0 Guid ce5f7d3e-26d21b66-9b3e5bde-2c5c4518 2025-03-31T16:32:18.212067Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_2020305431878009368_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 3 Result { Offset: 0 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 1 WriteTimestampMS: 1743438738088 CreateTimestampMS: 1743438738086 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 2 WriteTimestampMS: 1743438738093 CreateTimestampMS: 1743438738087 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 3 WriteTimestampMS: 1743438738127 CreateTimestampMS: 1743438738087 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 43 RealReadOffset: 2 WaitQuotaTimeMs: 0 EndOffset: 3 StartOffset: 0 } Cookie: 0 } 2025-03-31T16:32:18.212275Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_2020305431878009368_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset3 2025-03-31T16:32:18.212346Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_2020305431878009368_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid ce5f7d3e-26d21b66-9b3e5bde-2c5c4518 has messages 1 2025-03-31T16:32:18.212451Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_2020305431878009368_v1 read done: guid# ce5f7d3e-26d21b66-9b3e5bde-2c5c4518, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 484 2025-03-31T16:32:18.212496Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_2020305431878009368_v1 response to read: guid# ce5f7d3e-26d21b66-9b3e5bde-2c5c4518 2025-03-31T16:32:18.212703Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_2020305431878009368_v1 Process answer. Aval parts: 0 2025-03-31T16:32:18.213058Z :DEBUG: [] [] [f7512230-c14cce6a-2584aae2-50b8c7b2] [] Got ReadResponse, serverBytesSize = 484, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428316 2025-03-31T16:32:18.213217Z :DEBUG: [] [] [f7512230-c14cce6a-2584aae2-50b8c7b2] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428316 2025-03-31T16:32:18.213483Z :DEBUG: [] Decompression task done. Partition/PartitionSessionId: 1 (0-2) 2025-03-31T16:32:18.213580Z :DEBUG: [] [] [f7512230-c14cce6a-2584aae2-50b8c7b2] [] Returning serverBytesSize = 484 to budget 2025-03-31T16:32:18.213619Z :DEBUG: [] [] [f7512230-c14cce6a-2584aae2-50b8c7b2] [] In ContinueReadingDataImpl, ReadSizeBudget = 484, ReadSizeServerDelta = 52428316 2025-03-31T16:32:18.213979Z :DEBUG: [] [] [f7512230-c14cce6a-2584aae2-50b8c7b2] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-03-31T16:32:18.214164Z :DEBUG: [] Take Data. Partition 0. Read: {0, 0} (0-0) 2025-03-31T16:32:18.214231Z :DEBUG: [] Take Data. Partition 0. Read: {1, 0} (1-1) 2025-03-31T16:32:18.214265Z :DEBUG: [] Take Data. Partition 0. Read: {2, 0} (2-2) 2025-03-31T16:32:18.214312Z :DEBUG: [] [] [f7512230-c14cce6a-2584aae2-50b8c7b2] [] The application data is transferred to the client. Number of messages 3, size 24 bytes 2025-03-31T16:32:18.214253Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_2020305431878009368_v1 grpc read done: success# 1, data# { read_request { bytes_size: 484 } } 2025-03-31T16:32:18.214366Z :DEBUG: [] [] [f7512230-c14cce6a-2584aae2-50b8c7b2] [] Returning serverBytesSize = 0 to budget 2025-03-31T16:32:18.214405Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_2020305431878009368_v1 got read request: guid# 4db92e6-a64865f6-b7ec1e06-a76063eb 2025-03-31T16:32:18.214592Z :INFO: [] [] [f7512230-c14cce6a-2584aae2-50b8c7b2] Closing read session. Close timeout: 0.000000s 2025-03-31T16:32:18.214646Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:2:0 2025-03-31T16:32:18.214691Z :INFO: [] [] [f7512230-c14cce6a-2584aae2-50b8c7b2] Counters: { Errors: 0 CurrentSessionLifetimeMs: 23 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T16:32:18.214794Z :NOTICE: [] [] [f7512230-c14cce6a-2584aae2-50b8c7b2] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-03-31T16:32:18.214832Z :DEBUG: [] [] [f7512230-c14cce6a-2584aae2-50b8c7b2] [] Abort session to cluster 2025-03-31T16:32:18.215374Z :NOTICE: [] [] [f7512230-c14cce6a-2584aae2-50b8c7b2] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-31T16:32:18.216456Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_2020305431878009368_v1 grpc read done: success# 0, data# { } 2025-03-31T16:32:18.216485Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_2020305431878009368_v1 grpc read failed 2025-03-31T16:32:18.216511Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_2020305431878009368_v1 grpc closed 2025-03-31T16:32:18.216552Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_2020305431878009368_v1 is DEAD 2025-03-31T16:32:18.217225Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/cli_5_1_2020305431878009368_v1 2025-03-31T16:32:18.217822Z node 6 :PERSQUEUE_READ_BALANCER INFO: [72075186224037898][rt3.dc1--topic1] pipe [5:7488012363861857711:2636] disconnected; active server actors: 1 2025-03-31T16:32:18.217865Z node 6 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037898][rt3.dc1--topic1] pipe [5:7488012363861857711:2636] client cli disconnected session shared/cli_5_1_2020305431878009368_v1 >> KqpSqlIn::SelectNotAllElements >> TKesusTest::TestUnregisterProxyBadGeneration [GOOD] >> TKesusTest::TestSessionTimeoutAfterUnregister ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestRegisterProxyLinkFailureRace [GOOD] Test command err: 2025-03-31T16:32:18.508010Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:18.508138Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:18.520071Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:18.520164Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:18.534430Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:18.931585Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:18.931694Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:18.950457Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:18.951415Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:18.966055Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:19.393958Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:19.394068Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:19.412238Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:19.412540Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:19.426744Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:19.837830Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:19.837937Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:19.857076Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:19.857234Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:19.871692Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:19.873946Z node 4 :PIPE_SERVER ERROR: [72057594037927937] NodeDisconnected NodeId# 5 2025-03-31T16:32:19.874610Z node 4 :KESUS_TABLET TRACE: Got TEvServerDisconnected([4:187:2159]) 2025-03-31T16:32:20.490363Z node 6 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:20.490451Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:20.504731Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:20.504861Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute ... waiting for register request 2025-03-31T16:32:20.529216Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete ... blocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from TEST_ACTOR_RUNTIME to KESUS_TABLET_ACTOR ... waiting for register request (done) ... unblocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from TEST_ACTOR_RUNTIME to KESUS_TABLET_ACTOR 2025-03-31T16:32:20.529890Z node 6 :PIPE_SERVER ERROR: [72057594037927937] NodeDisconnected NodeId# 7 2025-03-31T16:32:20.529946Z node 6 :KESUS_TABLET TRACE: Got TEvServerDisconnected([6:187:2159]) >> TKesusTest::TestAttachOutOfSequenceInTx [GOOD] >> TKesusTest::TestAttachThenReRegister >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-41 >> TTopicReaderTests::TestRun_ReadMessages_Output_Base64 [GOOD] >> TKesusTest::TestAttachThenReRegister [GOOD] >> TKesusTest::TestAttachTimeoutTooBig >> KqpIndexes::PrefixedVectorIndexOrderByCosineDistanceNotNullableLevel2 [GOOD] >> TKesusTest::TestAttachTimeoutTooBig [GOOD] >> TKesusTest::TestCreateSemaphore >> KqpIndexes::SecondaryIndexInsert1 [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly-StrictAclCheck >> TKesusTest::TestCreateSemaphore [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-23 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-24 >> KqpNewEngine::MultiStatement [GOOD] >> KqpNewEngine::MultiStatementMixPure >> AnalyzeColumnshard::AnalyzeRebootColumnShard >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite+Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite+Volatile >> TKesusTest::TestQuoterHDRRParametersValidation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestCreateSemaphore [GOOD] Test command err: 2025-03-31T16:32:21.248184Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:21.248302Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:21.261742Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:21.261821Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:21.288461Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:21.289368Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=11172787065391541808, session=0, seqNo=222) 2025-03-31T16:32:21.289557Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-31T16:32:21.301599Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=11172787065391541808, session=1) 2025-03-31T16:32:21.301904Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:131:2157], cookie=18308878056813133348, session=1, seqNo=111) 2025-03-31T16:32:21.313903Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:131:2157], cookie=18308878056813133348, session=1) 2025-03-31T16:32:21.660258Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:21.660362Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:21.679639Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:21.679761Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:21.696935Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:21.697381Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:130:2156], cookie=111, session=0, seqNo=42) 2025-03-31T16:32:21.697566Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-31T16:32:21.697763Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:130:2156], cookie=222, session=1, seqNo=41) 2025-03-31T16:32:21.724640Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:130:2156], cookie=111, session=1) 2025-03-31T16:32:21.724759Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:130:2156], cookie=222, session=1) 2025-03-31T16:32:22.099589Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:22.099714Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:22.118020Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:22.118279Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:22.134539Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:22.135031Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:132:2158], cookie=12790577277655192945, session=0, seqNo=0) 2025-03-31T16:32:22.135191Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-31T16:32:22.157814Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:132:2158], cookie=12790577277655192945, session=1) 2025-03-31T16:32:22.159466Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:149:2173], cookie=14008119409045326570) 2025-03-31T16:32:22.159562Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:149:2173], cookie=14008119409045326570) 2025-03-31T16:32:22.543812Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:22.543903Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:22.559590Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:22.559721Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:22.573249Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:22.968216Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:22.968324Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:22.988172Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:22.988479Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:23.012023Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:23.012421Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=2349582058466148821, session=0, seqNo=0) 2025-03-31T16:32:23.012541Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-31T16:32:23.024239Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=2349582058466148821, session=1) 2025-03-31T16:32:23.024552Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-03-31T16:32:23.024736Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-03-31T16:32:23.024834Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-03-31T16:32:23.036720Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=111) 2025-03-31T16:32:23.037393Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:146:2170], cookie=17435553880742390568, name="Sem1", limit=42) 2025-03-31T16:32:23.037495Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 2 "Sem1" 2025-03-31T16:32:23.049484Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:146:2170], cookie=17435553880742390568) 2025-03-31T16:32:23.050016Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:151:2175], cookie=14818395304580184106, name="Sem1", limit=42) 2025-03-31T16:32:23.062179Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:151:2175], cookie=14818395304580184106) 2025-03-31T16:32:23.062724Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:156:2180], cookie=16542739871303890330, name="Sem1", limit=51) 2025-03-31T16:32:23.074751Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:156:2180], cookie=16542739871303890330) 2025-03-31T16:32:23.075305Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:161:2185], cookie=12260426505724811795, name="Lock1", limit=42) 2025-03-31T16:32:23.087590Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:161:2185], cookie=12260426505724811795) 2025-03-31T16:32:23.088225Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:166:2190], cookie=4639527538047703877, name="Lock1", limit=18446744073709551615) 2025-03-31T16:32:23.100411Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:166:2190], cookie=4639527538047703877) 2025-03-31T16:32:23.101085Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:171:2195], cookie=12463949122580632075, name="Sem1") 2025-03-31T16:32:23.101188Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:171:2195], cookie=12463949122580632075) 2025-03-31T16:32:23.101771Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:174:2198], cookie=16265197888290860876, name="Sem2") 2025-03-31T16:32:23.101927Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:174:2198], cookie=16265197888290860876) 2025-03-31T16:32:23.113413Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:23.113520Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:23.114013Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:23.114534Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:23.161948Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:23.162095Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-03-31T16:32:23.162420Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:214:2228], cookie=17319928972608116940, name="Sem1") 2025-03-31T16:32:23.162498Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:214:2228], cookie=17319928972608116940) 2025-03-31T16:32:23.163058Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:221:2234], cookie=13187124837641747333, name="Sem2") 2025-03-31T16:32:23.163150Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:221:2234], cookie=13187124837641747333) ------- [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicReaderTests::TestRun_ReadMessages_Output_Base64 [GOOD] Test command err: === Starting PQ server === Server->StartServer(false); 2025-03-31T16:31:41.209366Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012204477376961:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:41.209427Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:31:41.258720Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012207035778504:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:41.258833Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0006fa/r3tmp/tmpUo5Kul/pdisk_1.dat 2025-03-31T16:31:41.441302Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:31:41.439452Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:31:41.696055Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:41.696492Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:41.699168Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:41.699266Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:41.702947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:41.704172Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:31:41.705119Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:41.735985Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29220, node 1 2025-03-31T16:31:41.773073Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:31:41.773760Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:31:41.891209Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/0006fa/r3tmp/yandexdtBa5C.tmp 2025-03-31T16:31:41.891252Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/0006fa/r3tmp/yandexdtBa5C.tmp 2025-03-31T16:31:41.892232Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/0006fa/r3tmp/yandexdtBa5C.tmp 2025-03-31T16:31:41.892447Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:31:42.114942Z INFO: TTestServer started on Port 26680 GrpcPort 29220 TClient is connected to server localhost:26680 PQClient connected to localhost:29220 === TenantModeEnabled() = 0 === Init PQ - start server on port 29220 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:42.508812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-31T16:31:42.511833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:31:42.512930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-31T16:31:42.518483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:31:42.518621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:31:42.529354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-31T16:31:42.531295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-31T16:31:42.531531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:31:42.531575Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-31T16:31:42.531662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-03-31T16:31:42.531674Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-03-31T16:31:42.534701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:31:42.534746Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-03-31T16:31:42.534761Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-03-31T16:31:42.537095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:31:42.537139Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:31:42.537188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-31T16:31:42.537224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-03-31T16:31:42.543336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:31:42.543773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:31:42.543795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-03-31T16:31:42.543823Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:31:42.547854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-03-31T16:31:42.551207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-03-31T16:31:42.551484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743438702593, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T16:31:42.551656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743438702593 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-31T16:31:42.551712Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-31T16:31:42.555549Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-03-31T16:31:42.555603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-31T16:31:42.555854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-31T16:31:42.555926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-03-31T16:31:42.560901Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-31T16:31:42.560931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-31T16:31:42.561142Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-31T16:31:42.561171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7488012204477377690:2438], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-03-31T16:31:42.561212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:31:42.561294Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-03-31T16:31:42.561401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-31T16:31:42.561415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-31T16:31:42.561763Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-31T16:31:42.561819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-31T16:31:42.561846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-03-31T16:31:42.561862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-31T16:31:42.561876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-03-31T16:31:42.561891Z node 1 :FLAT_TX_SCHEMESHA ... consumer shared/cli session shared/cli_5_1_5605982857107585471_v1 assign: record# { Partition: 0 TabletId: 72075186224037897 Topic: "rt3.dc1--topic1" Generation: 1 Step: 1 Session: "shared/cli_5_1_5605982857107585471_v1" ClientId: "cli" PipeClient { RawX1: 7488012372193333516 RawX2: 4503621102209575 } Path: "/Root/PQ/rt3.dc1--topic1" } 2025-03-31T16:32:20.467201Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5605982857107585471_v1 grpc read done: success# 1, data# { read_request { bytes_size: 52428800 } } 2025-03-31T16:32:20.467210Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_5605982857107585471_v1 INITING TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) 2025-03-31T16:32:20.467373Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5605982857107585471_v1 got read request: guid# 4a964df6-68371afa-e61f6794-b541bd22 2025-03-31T16:32:20.467936Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_5605982857107585471_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037897 Generation: 1, pipe: [5:7488012372193333519:2602] 2025-03-31T16:32:20.468154Z node 6 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: shared/cli_5_1_5605982857107585471_v1:1 with generation 1 2025-03-31T16:32:20.472437Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5605982857107585471_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 3 SizeLag: 409 WriteTimestampEstimateMS: 1743438740455 } Cookie: 18446744073709551615 } 2025-03-31T16:32:20.472497Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_5605982857107585471_v1 INIT DONE TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 2025-03-31T16:32:20.472623Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5605982857107585471_v1 sending to client partition status 2025-03-31T16:32:20.473457Z :INFO: [] [] [e8bb9b2f-85c4842e-92e8376a-4d74822d] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "/topic1". Partition: 0. Read offset: (NULL) 2025-03-31T16:32:20.474086Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5605982857107585471_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 } } 2025-03-31T16:32:20.474264Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_5605982857107585471_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, commitOffset# (empty maybe) 2025-03-31T16:32:20.474329Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_5605982857107585471_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 0 2025-03-31T16:32:20.474366Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5605982857107585471_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 0 endOffset 3 2025-03-31T16:32:20.474458Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5605982857107585471_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, endOffset# 3, WTime# 0, sizeLag# 409 2025-03-31T16:32:20.474483Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5605982857107585471_v1TEvPartitionReady. Aval parts: 1 2025-03-31T16:32:20.474535Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5605982857107585471_v1 performing read request: guid# 1d2e8009-d121fe81-b9af082b-4c752f59, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 3, size# 490, partitionsAsked# 1, maxTimeLag# 0ms 2025-03-31T16:32:20.474636Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5605982857107585471_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 3 maxSize 490 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 3 ClientCommitOffset 0 committedOffset 0 Guid 1d2e8009-d121fe81-b9af082b-4c752f59 2025-03-31T16:32:20.476159Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5605982857107585471_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 3 Result { Offset: 0 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 1 WriteTimestampMS: 1743438740352 CreateTimestampMS: 1743438740350 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 2 WriteTimestampMS: 1743438740358 CreateTimestampMS: 1743438740350 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 3 WriteTimestampMS: 1743438740416 CreateTimestampMS: 1743438740350 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 43 RealReadOffset: 2 WaitQuotaTimeMs: 0 EndOffset: 3 StartOffset: 0 } Cookie: 0 } 2025-03-31T16:32:20.476378Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5605982857107585471_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset3 2025-03-31T16:32:20.476427Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5605982857107585471_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid 1d2e8009-d121fe81-b9af082b-4c752f59 has messages 1 2025-03-31T16:32:20.476560Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5605982857107585471_v1 read done: guid# 1d2e8009-d121fe81-b9af082b-4c752f59, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 490 2025-03-31T16:32:20.476607Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5605982857107585471_v1 response to read: guid# 1d2e8009-d121fe81-b9af082b-4c752f59 2025-03-31T16:32:20.476870Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5605982857107585471_v1 Process answer. Aval parts: 0 2025-03-31T16:32:20.477258Z :DEBUG: [] [] [e8bb9b2f-85c4842e-92e8376a-4d74822d] [] Got ReadResponse, serverBytesSize = 490, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428310 2025-03-31T16:32:20.477401Z :DEBUG: [] [] [e8bb9b2f-85c4842e-92e8376a-4d74822d] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428310 2025-03-31T16:32:20.477690Z :DEBUG: [] Decompression task done. Partition/PartitionSessionId: 1 (0-2) 2025-03-31T16:32:20.477752Z :DEBUG: [] [] [e8bb9b2f-85c4842e-92e8376a-4d74822d] [] Returning serverBytesSize = 490 to budget 2025-03-31T16:32:20.477808Z :DEBUG: [] [] [e8bb9b2f-85c4842e-92e8376a-4d74822d] [] In ContinueReadingDataImpl, ReadSizeBudget = 490, ReadSizeServerDelta = 52428310 2025-03-31T16:32:20.478088Z :DEBUG: [] [] [e8bb9b2f-85c4842e-92e8376a-4d74822d] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-03-31T16:32:20.478250Z :DEBUG: [] Take Data. Partition 0. Read: {0, 0} (0-0) 2025-03-31T16:32:20.478317Z :DEBUG: [] Take Data. Partition 0. Read: {1, 0} (1-1) 2025-03-31T16:32:20.478351Z :DEBUG: [] Take Data. Partition 0. Read: {2, 0} (2-2) 2025-03-31T16:32:20.478362Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5605982857107585471_v1 grpc read done: success# 1, data# { read_request { bytes_size: 490 } } 2025-03-31T16:32:20.478398Z :DEBUG: [] [] [e8bb9b2f-85c4842e-92e8376a-4d74822d] [] The application data is transferred to the client. Number of messages 3, size 24 bytes 2025-03-31T16:32:20.478454Z :DEBUG: [] [] [e8bb9b2f-85c4842e-92e8376a-4d74822d] [] Returning serverBytesSize = 0 to budget 2025-03-31T16:32:20.478460Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5605982857107585471_v1 got read request: guid# d0c9d70b-cac6cc19-143d2c2c-21760e79 2025-03-31T16:32:20.478670Z :INFO: [] [] [e8bb9b2f-85c4842e-92e8376a-4d74822d] Closing read session. Close timeout: 0.000000s 2025-03-31T16:32:20.478721Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:2:0 2025-03-31T16:32:20.478767Z :INFO: [] [] [e8bb9b2f-85c4842e-92e8376a-4d74822d] Counters: { Errors: 0 CurrentSessionLifetimeMs: 26 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T16:32:20.478869Z :NOTICE: [] [] [e8bb9b2f-85c4842e-92e8376a-4d74822d] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-03-31T16:32:20.478911Z :DEBUG: [] [] [e8bb9b2f-85c4842e-92e8376a-4d74822d] [] Abort session to cluster 2025-03-31T16:32:20.479772Z :NOTICE: [] [] [e8bb9b2f-85c4842e-92e8376a-4d74822d] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-31T16:32:20.479808Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_5605982857107585471_v1 grpc read done: success# 0, data# { } 2025-03-31T16:32:20.479835Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_5605982857107585471_v1 grpc read failed 2025-03-31T16:32:20.479854Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_5605982857107585471_v1 grpc closed 2025-03-31T16:32:20.479889Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_5605982857107585471_v1 is DEAD 2025-03-31T16:32:20.480284Z node 6 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/cli_5_1_5605982857107585471_v1 2025-03-31T16:32:20.483473Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037898][rt3.dc1--topic1] pipe [5:7488012372193333516:2599] disconnected; active server actors: 1 2025-03-31T16:32:20.483516Z node 5 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037898][rt3.dc1--topic1] pipe [5:7488012372193333516:2599] client cli disconnected session shared/cli_5_1_5605982857107585471_v1 2025-03-31T16:32:21.125321Z node 5 :KQP_COMPUTE WARN: TxId: 281474976710696, task: 1, CA Id [5:7488012376488300867:2606]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 0 2025-03-31T16:32:21.158929Z node 5 :KQP_COMPUTE WARN: TxId: 281474976710696, task: 1, CA Id [5:7488012376488300867:2606]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-03-31T16:32:21.210078Z node 5 :KQP_COMPUTE WARN: TxId: 281474976710696, task: 1, CA Id [5:7488012376488300867:2606]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-03-31T16:32:21.276489Z node 5 :KQP_COMPUTE WARN: TxId: 281474976710696, task: 1, CA Id [5:7488012376488300867:2606]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::PrefixedVectorIndexOrderByCosineDistanceNotNullableLevel2 [GOOD] Test command err: Trying to start YDB, gRPC: 16759, MsgBus: 9919 2025-03-31T16:30:49.839660Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011980158282576:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:49.839720Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000dbc/r3tmp/tmp4aPOqC/pdisk_1.dat 2025-03-31T16:30:50.184803Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16759, node 1 2025-03-31T16:30:50.246492Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:50.251327Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:50.283502Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:30:50.319307Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:50.319331Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:50.319343Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:50.319461Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9919 TClient is connected to server localhost:9919 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:30:50.927125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:50.967621Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:30:50.989838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:51.152910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:51.320104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:51.387165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:30:53.305237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011997338153535:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:53.305376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:53.602242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:30:53.641957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:30:53.675658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:30:53.704589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:30:53.737730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:30:53.776139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:30:53.859336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011997338154051:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:53.859431Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:53.859944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011997338154056:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:53.864197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:30:53.879643Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011997338154058:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:30:53.967479Z node 1 :TX_PROXY ERROR: Actor# [1:7488011997338154115:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:30:54.839744Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011980158282576:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:54.839810Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:30:54.971064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:30:55.316281Z node 1 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: DoExecute TxId: 281474976710673 DatabaseName: "/Root" Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "user" index_columns: "emb" global_vector_kmeans_tree_index { vector_settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_UINT8 vector_dimension: 2 } clusters: 2 levels: 1 } } } } 2025-03-31T16:30:55.317414Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-03-31T16:30:55.317525Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7488012005928089237:2521], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:30:55.317667Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 281474976710673, txId# 281474976715757 2025-03-31T16:30:55.317742Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7488012005928089237:2521], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:30:55.318067Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-03-31T16:30:55.318107Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7488012005928089237:2521], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, Loc ... e: id# 281474976710673 2025-03-31T16:31:52.079901Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7488012249420174634:2520], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 60, upload bytes: 1500, read rows: 69, read bytes: 2322 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:31:52.080042Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 281474976710673, txId# 281474976715766 2025-03-31T16:31:52.080143Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7488012249420174634:2520], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 60, upload bytes: 1500, read rows: 69, read bytes: 2322 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:31:52.080538Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-03-31T16:31:52.080606Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7488012249420174634:2520], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715766, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 60, upload bytes: 1500, read rows: 69, read bytes: 2322 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:31:52.081783Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, BuildIndexId: 281474976710673, cookie: 281474976710673, txId: 281474976715766, status: StatusAccepted 2025-03-31T16:31:52.081934Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7488012249420174634:2520], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715766, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 60, upload bytes: 1500, read rows: 69, read bytes: 2322 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }}, record: Status: StatusAccepted TxId: 281474976715766 SchemeshardId: 72057594046644480 PathId: 16 2025-03-31T16:31:52.083070Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-03-31T16:31:52.083169Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7488012249420174634:2520], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 60, upload bytes: 1500, read rows: 69, read bytes: 2322 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:31:52.086736Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976715766, buildInfoId: 281474976710673 2025-03-31T16:31:52.086824Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976715766, buildInfo: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7488012249420174634:2520], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 60, upload bytes: 1500, read rows: 69, read bytes: 2322 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:31:52.089055Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037928 not found 2025-03-31T16:31:52.089102Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037927 not found 2025-03-31T16:31:52.089119Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037926 not found 2025-03-31T16:31:52.089138Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037929 not found 2025-03-31T16:31:52.089624Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-03-31T16:31:52.089705Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7488012249420174634:2520], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 60, upload bytes: 1500, read rows: 69, read bytes: 2322 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:31:52.089746Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-03-31T16:31:52.090135Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-03-31T16:31:52.090209Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7488012249420174634:2520], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 60, upload bytes: 1500, read rows: 69, read bytes: 2322 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:31:52.090609Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/Root" IndexBuildId: 281474976710673 2025-03-31T16:31:52.090844Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 281474976710673 State: STATE_DONE Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "user" index_columns: "emb" global_vector_kmeans_tree_index { } } max_batch_rows: 50000 max_batch_bytes: 8388608 max_shards_in_flight: 32 max_retries_upload_batch: 50 } Progress: 100 } 2025-03-31T16:32:01.000224Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:32:01.000274Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:32:01.798836Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TTxBilling, id# 281474976710673 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexInsert1 [GOOD] Test command err: Trying to start YDB, gRPC: 32079, MsgBus: 20525 2025-03-31T16:31:15.447755Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012095174561453:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:15.447837Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d37/r3tmp/tmpgZiflm/pdisk_1.dat 2025-03-31T16:31:15.851683Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32079, node 1 2025-03-31T16:31:15.884511Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:15.884591Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:15.885557Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:15.955434Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:15.955458Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:15.955472Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:15.956283Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20525 TClient is connected to server localhost:20525 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:31:16.474104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:16.486723Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:31:16.492235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:16.622374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:16.789216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:16.857972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:31:18.698487Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012108059465113:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:18.698659Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:19.001676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:31:19.035543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:31:19.066726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:31:19.097226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:31:19.138346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:31:19.212048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:31:19.274079Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012112354432928:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:19.274171Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:19.274381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012112354432933:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:31:19.278203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:31:19.288988Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012112354432935:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:31:19.355192Z node 1 :TX_PROXY ERROR: Actor# [1:7488012112354432987:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:31:20.304517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:31:20.448082Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012095174561453:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:20.448163Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:20.712524Z node 1 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: DoExecute TxId: 281474976710673 DatabaseName: "/Root" Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "user" index_columns: "emb" global_vector_kmeans_tree_index { vector_settings { settings { metric: SIMILARITY_COSINE vector_type: VECTOR_TYPE_UINT8 vector_dimension: 2 } clusters: 2 levels: 1 } } } } 2025-03-31T16:31:20.713581Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-03-31T16:31:20.713696Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7488012116649400821:2520], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:31:20.713839Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 281474976710673, txId# 281474976715757 2025-03-31T16:31:20.713914Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7488012116649400821:2520], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:31:20.714220Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-03-31T16:31:20.714256Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7488012116649400821:2520], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0 ... tes: 1500, read rows: 69, read bytes: 2322 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:31:47.305476Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-03-31T16:31:47.305559Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7488012227478742394:2520], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 60, upload bytes: 1500, read rows: 69, read bytes: 2322 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:31:47.305606Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-03-31T16:31:47.305853Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-03-31T16:31:47.305901Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7488012227478742394:2520], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 60, upload bytes: 1500, read rows: 69, read bytes: 2322 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:31:47.306145Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/Root" IndexBuildId: 281474976710673 2025-03-31T16:31:47.306330Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 281474976710673 State: STATE_DONE Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "user" index_columns: "emb" global_vector_kmeans_tree_index { } } max_batch_rows: 50000 max_batch_bytes: 8388608 max_shards_in_flight: 32 max_retries_upload_batch: 50 } Progress: 100 } 2025-03-31T16:31:56.576305Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:31:56.576334Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:56.973217Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TTxBilling, id# 281474976710673 Trying to start YDB, gRPC: 10782, MsgBus: 26232 2025-03-31T16:32:15.464495Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488012350323014999:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:15.464575Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000d37/r3tmp/tmpuyxHRO/pdisk_1.dat 2025-03-31T16:32:15.573678Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:32:15.600882Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:15.600988Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:15.602247Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10782, node 3 2025-03-31T16:32:15.681988Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:15.682018Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:15.682029Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:15.682200Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26232 TClient is connected to server localhost:26232 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:16.155403Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:16.168812Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:16.256865Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:16.475120Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:16.577275Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:19.728201Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012367502885965:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:19.728290Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:19.778602Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:32:19.811211Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:32:19.847492Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:32:19.884069Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:32:19.918304Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:32:19.989797Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:32:20.035999Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012371797853773:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:20.036080Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:20.036153Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012371797853778:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:20.041300Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:32:20.052074Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488012371797853780:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:32:20.116786Z node 3 :TX_PROXY ERROR: Actor# [3:7488012371797853833:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:32:20.464713Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488012350323014999:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:20.464800Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:32:21.247249Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:32:21.793623Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> TKesusTest::TestQuoterHDRRParametersValidation [GOOD] >> TKesusTest::TestQuoterAccountResourcesOnDemand |95.1%| [TA] $(B)/ydb/public/lib/ydb_cli/topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpRanges::LiteralOr [GOOD] >> KqpRanges::LiteralOrCompisite >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-53 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-54 |95.1%| [TA] {RESULT} $(B)/ydb/public/lib/ydb_cli/topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-65 |95.1%| [TA] $(B)/ydb/core/kqp/ut/indexes/test-results/unittest/{meta.json ... results_accumulator.log} |95.1%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/indexes/test-results/unittest/{meta.json ... results_accumulator.log} >> TInterconnectTest::TestNotifyUndelivered >> KqpTx::CommitPrepared [GOOD] |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> KqpNewEngine::EmptyMapWithBroadcast [GOOD] >> KqpNewEngine::FlatMapLambdaInnerPrecompute >> TraverseColumnShard::TraverseColumnTableHiveDistributionAbsentNodes >> KqpLocks::TwoPhaseTx >> KqpRanges::UpdateWhereInBigLiteralList [GOOD] >> KqpRanges::UpdateWhereInBigLiteralListPrefix >> TInterconnectTest::TestNotifyUndelivered [GOOD] >> TInterconnectTest::TestNotifyUndeliveredOnMissedActor >> KqpSort::TopSortExpr [GOOD] >> KqpSort::TopSortExprPk >> TInterconnectTest::TestNotifyUndeliveredOnMissedActor [GOOD] >> TInterconnectTest::TestPreSerializedBlobEventUpToMebibytes >> KqpNewEngine::DeleteWithInputMultiConsumption-UseSink [GOOD] >> KqpNewEngine::DeleteWithInputMultiConsumptionLimit+UseSink >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-17 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-18 >> KqpNewEngine::LocksMultiShardOk [GOOD] >> KqpNewEngine::LocksNoMutations >> TKesusTest::TestQuoterAccountResourcesOnDemand [GOOD] >> TKesusTest::TestQuoterAccountResourcesPaced ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::CommitPrepared [GOOD] Test command err: Trying to start YDB, gRPC: 25827, MsgBus: 64281 2025-03-31T16:32:15.332872Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012353447901222:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:15.333178Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001285/r3tmp/tmpcGfbJd/pdisk_1.dat 2025-03-31T16:32:15.671910Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:32:15.675462Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:15.675615Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:15.680454Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25827, node 1 2025-03-31T16:32:15.732777Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:15.732797Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:15.732804Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:15.732961Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64281 TClient is connected to server localhost:64281 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:16.226708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:16.261736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:16.394781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:16.563665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:16.637436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:18.113352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012366332804854:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:18.113448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:18.373196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:32:18.401472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:32:18.430545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:32:18.464006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:32:18.496625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:32:18.567034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:32:18.614169Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012366332805371:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:18.614257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:18.614341Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012366332805376:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:18.618317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:32:18.628808Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012366332805378:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:32:18.715463Z node 1 :TX_PROXY ERROR: Actor# [1:7488012366332805431:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 18918, MsgBus: 22119 2025-03-31T16:32:20.301367Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012372777925868:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:20.301538Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001285/r3tmp/tmpOstNwZ/pdisk_1.dat 2025-03-31T16:32:20.402358Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:32:20.426042Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:20.426140Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:20.428408Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18918, node 2 2025-03-31T16:32:20.458916Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:20.458940Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:20.458947Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:20.459104Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22119 TClient is connected to server localhost:22119 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:20.824085Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:20.841263Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:20.886734Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:20.999198Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:21.059656Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:23.124714Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012385662829518:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:23.124802Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:23.167110Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:32:23.193838Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:32:23.236501Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:32:23.262681Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:32:23.292301Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:32:23.327058Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:32:23.368092Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012385662830028:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:23.368179Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:23.368185Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012385662830033:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:23.371680Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:32:23.381071Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012385662830035:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:32:23.444333Z node 2 :TX_PROXY ERROR: Actor# [2:7488012385662830088:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-65 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-66 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-17 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-18 >> TInterconnectTest::TestPreSerializedBlobEventUpToMebibytes [GOOD] >> TInterconnectTest::TestPingPongThroughSubChannel >> KqpRanges::UpdateWhereInWithNull [GOOD] >> KqpRanges::UpdateWhereInMultipleUpdate >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-65 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-66 >> KqpNewEngine::BlindWrite [GOOD] >> KqpNewEngine::BlindWriteParameters >> KqpAgg::AggWithLookup >> TInterconnectTest::TestPingPongThroughSubChannel [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoWithError [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-42 |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestPingPongThroughSubChannel [GOOD] >> TestProtocols::TestConnectProtocol ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoWithError [GOOD] Test command err: 2025-03-31T16:31:35.807249Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012179448905480:2081];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:35.808605Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001728/r3tmp/tmp4E3h5g/pdisk_1.dat 2025-03-31T16:31:36.245392Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:36.269716Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:36.269856Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:36.271526Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9630, node 1 2025-03-31T16:31:36.415230Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:36.415255Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:36.415261Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:36.415330Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:31:36.844522Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:31:36.848134Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:31:36.848176Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:31:36.849060Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:19198, port: 19198 2025-03-31T16:31:36.849860Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:36.855911Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:31:36.906316Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****CViw (5AC652F4) () has now valid token of ldapuser@ldap 2025-03-31T16:31:39.092251Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012197226867397:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:39.092369Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001728/r3tmp/tmpMMyiex/pdisk_1.dat 2025-03-31T16:31:39.248197Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:39.268500Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:39.268563Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:39.270010Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26667, node 2 2025-03-31T16:31:39.335132Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:39.335166Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:39.335173Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:39.335286Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:31:39.423403Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:31:39.425968Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:31:39.426013Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:31:39.426873Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:6792, port: 6792 2025-03-31T16:31:39.426970Z node 2 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-03-31T16:31:39.438900Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:39.487630Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:31:39.488145Z node 2 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:6792 return no entries 2025-03-31T16:31:39.488652Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****taFw (5DF36BB5) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:6792 return no entries)' 2025-03-31T16:31:42.709306Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488012209589153719:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:42.709398Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001728/r3tmp/tmphNupWf/pdisk_1.dat 2025-03-31T16:31:42.815458Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:42.838980Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:42.839060Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:42.841060Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11358, node 3 2025-03-31T16:31:42.891310Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:42.891337Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:42.891346Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:42.891503Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:31:43.155504Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:31:43.158751Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:31:43.158796Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:31:43.159502Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:18272, port: 18272 2025-03-31T16:31:43.159570Z node 3 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-03-31T16:31:43.173736Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:43.215919Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:31:43.263458Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-31T16:31:43.264063Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-31T16:31:43.264115Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:43.311471Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:43.355439Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:43.356611Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****YfrA (147BB92D) () has now valid token of ldapuser@ldap 2025-03-31T16:31:46.720189Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****YfrA (147BB92D) 2025-03-31T16:31:46.720293Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:18272, port: 18272 2025-03-31T16:31:46.720373Z node 3 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-03-31T16:31:46.734890Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:46.779517Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:31:46.823413Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-31T16:31:46.823986Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-31T16:31:46.824028Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:46.867433Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:46.912321Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:46.913256Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****YfrA (147BB92D) () has now valid token of ldapuser@ldap 2025-03-31T16:31:47.709393Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488012209589153719:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:31:47.709490Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:31:51.722962Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****YfrA (147BB92D) 2025-03-31T16:31:51.723111Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:18272, port: 18272 2025-03-31T16:31:51.723184Z node 3 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-03-31T16:31:51.744605Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:31:51.791696Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:31:51.835494Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-31T16:31:51.836201Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-31T16:31:51.836237Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:31:51.883426Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: bas ... VIDER DEBUG: Try to get nested groups - tree traversal 2025-03-31T16:32:05.452016Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:32:05.499425Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:32:05.547397Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:32:05.548456Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****RVXQ (402FB064) () has now valid token of ldapuser@ldap 2025-03-31T16:32:10.035203Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7488012309659226022:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:10.035292Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:32:10.043616Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****RVXQ (402FB064) 2025-03-31T16:32:10.043710Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:8245, port: 8245 2025-03-31T16:32:10.043789Z node 5 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-03-31T16:32:10.056857Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:32:10.103672Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:32:10.104312Z node 5 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:8245 return no entries 2025-03-31T16:32:10.104813Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****RVXQ (402FB064) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:8245 return no entries)' 2025-03-31T16:32:14.046174Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****RVXQ (402FB064) 2025-03-31T16:32:16.249740Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488012356911165389:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:16.249813Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001728/r3tmp/tmpV7Z3uH/pdisk_1.dat 2025-03-31T16:32:16.394740Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:32:16.413629Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:16.413729Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:16.417929Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1879, node 6 2025-03-31T16:32:16.463648Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:16.463674Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:16.463689Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:16.463823Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:32:16.647284Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-31T16:32:16.649181Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:32:16.649221Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:32:16.649878Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:20432, port: 20432 2025-03-31T16:32:16.649930Z node 6 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-03-31T16:32:16.665504Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:32:16.712788Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:32:16.713343Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldap://localhost:20432. Server is busy 2025-03-31T16:32:16.713847Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****qbag (91969653) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:20432. Server is busy)' 2025-03-31T16:32:16.714239Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:32:16.714266Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:32:16.715264Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:20432, port: 20432 2025-03-31T16:32:16.715357Z node 6 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-03-31T16:32:16.727458Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:32:16.775710Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:32:16.776706Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldap://localhost:20432. Server is busy 2025-03-31T16:32:16.777085Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****qbag (91969653) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:20432. Server is busy)' 2025-03-31T16:32:18.253266Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****qbag (91969653) 2025-03-31T16:32:18.253661Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:32:18.253685Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:32:18.254968Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:20432, port: 20432 2025-03-31T16:32:18.255098Z node 6 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-03-31T16:32:18.265643Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:32:18.311615Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:32:18.312105Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldap://localhost:20432. Server is busy 2025-03-31T16:32:18.312493Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****qbag (91969653) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:20432. Server is busy)' 2025-03-31T16:32:21.249973Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488012356911165389:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:21.250059Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:32:22.256563Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****qbag (91969653) 2025-03-31T16:32:22.256826Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-31T16:32:22.256850Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-31T16:32:22.257497Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:20432, port: 20432 2025-03-31T16:32:22.257608Z node 6 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-03-31T16:32:22.273349Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:32:22.315591Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:32:22.363703Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-31T16:32:22.364399Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-31T16:32:22.364477Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:32:22.407501Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:32:22.455520Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:32:22.456781Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****qbag (91969653) () has now valid token of ldapuser@ldap 2025-03-31T16:32:25.258415Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****qbag (91969653) 2025-03-31T16:32:25.258548Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:20432, port: 20432 2025-03-31T16:32:25.258647Z node 6 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-03-31T16:32:25.270926Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-31T16:32:25.315593Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-31T16:32:25.363434Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-31T16:32:25.363998Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-31T16:32:25.364051Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:32:25.407464Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:32:25.455468Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-31T16:32:25.456574Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****qbag (91969653) () has now valid token of ldapuser@ldap >> TestProtocols::TestConnectProtocol [GOOD] >> TestProtocols::TestHTTPCollected >> KqpSqlIn::SelectNotAllElements [GOOD] >> KqpSqlIn::SimpleKey_In_And_In >> TKesusTest::TestQuoterAccountResourcesPaced [GOOD] >> TKesusTest::TestQuoterAccountResourcesDeduplicateClient >> TestProtocols::TestHTTPCollected [GOOD] >> TInterconnectTest::TestTraceIdPassThrough >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite+Volatile [GOOD] >> DataShardWrite::InsertImmediate |95.1%| [TA] $(B)/ydb/core/security/ldap_auth_provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.1%| [TA] {RESULT} $(B)/ydb/core/security/ldap_auth_provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TInterconnectTest::TestBlobEvent >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionFetchToken >> TInterconnectTest::TestTraceIdPassThrough [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-24 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-49 >> TInterconnectTest::TestBlobEvent [GOOD] >> TInterconnectTest::TestBlobEvent220Bytes >> KqpNewEngine::MultiStatementMixPure [GOOD] >> KqpNewEngine::MultiEffectsOnSameTable |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestTraceIdPassThrough [GOOD] >> KqpNewEngine::PkSelect1 >> TInterconnectTest::TestBlobEvent220Bytes [GOOD] >> TInterconnectTest::TestAddressResolve >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-54 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-55 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-65 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-66 >> TInterconnectTest::TestAddressResolve [GOOD] >> TInterconnectTest::OldNbs >> KqpRanges::LiteralOrCompisite [GOOD] >> KqpRanges::LiteralOrCompisiteCollision >> TKesusTest::TestQuoterAccountResourcesDeduplicateClient [GOOD] >> TKesusTest::TestQuoterAccountResourcesForgetClient >> KqpLocks::TwoPhaseTx [GOOD] >> KqpLocks::MixedTxFail-useSink >> TInterconnectTest::OldFormat >> TInterconnectTest::OldNbs [GOOD] >> TInterconnectTest::OldFormat [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheckOnNew >> KqpSort::TopSortExprPk [GOOD] >> KqpSort::TopSortTableExpr >> KqpNewEngine::LocksNoMutations [GOOD] >> KqpNewEngine::LocksNoMutationsSharded >> KqpNewEngine::DeleteWithInputMultiConsumptionLimit+UseSink [GOOD] >> KqpNewEngine::DeleteWithInputMultiConsumptionLimit-UseSink >> KqpNewEngine::BlindWriteParameters [GOOD] >> KqpNewEngine::BlindWriteListParameter |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::OldNbs [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant >> TInterconnectTest::OldFormatSuppressVersionCheckOnNew [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheckOnOld >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionFetchToken [GOOD] >> ReadOnlyVDisk::TestStorageLoad [GOOD] >> DataShardSnapshots::VolatileSnapshotTimeout [GOOD] >> DataShardSnapshots::VolatileSnapshotTimeoutRefresh >> KqpRanges::UpdateWhereInBigLiteralListPrefix [GOOD] >> KqpRanges::UpdateWhereInFullScan+UseSink >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-66 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-67 >> TInterconnectTest::OldFormatSuppressVersionCheckOnOld [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheck >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-19 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-66 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-67 >> KqpNewEngine::FlatMapLambdaInnerPrecompute [GOOD] >> KqpNewEngine::DqSourceLiteralRange >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-19 >> TKesusTest::TestAcquireTimeout [GOOD] >> TKesusTest::TestAcquireSharedBlocked ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestStorageLoad [GOOD] Test command err: RandomSeed# 414775959206691400 Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] 2025-03-31T16:32:06.437266Z 1 00h02m38.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:06.440912Z 1 00h02m38.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:06.444704Z 1 00h02m38.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:06.450506Z 1 00h02m38.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:06.453002Z 1 00h02m38.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:06.479686Z 1 00h02m38.200000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:06.505043Z 1 00h02m38.300000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:06.560465Z 1 00h02m38.500000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:06.575267Z 1 00h02m38.600000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:06.860839Z 1 00h02m38.800000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:06.871145Z 1 00h02m38.900000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:06.925693Z 1 00h02m39.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:06.928412Z 1 00h02m39.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:06.942816Z 1 00h02m39.200000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:06.965146Z 1 00h02m39.400000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:06.975106Z 1 00h02m39.500000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:07.305158Z 1 00h02m39.700000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:07.319257Z 1 00h02m39.800000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:07.423164Z 1 00h02m40.000000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:07.529905Z 1 00h02m40.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:07.530421Z 1 00h02m40.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:07.543779Z 1 00h02m40.200000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:07.554565Z 1 00h02m40.300000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:07.566535Z 1 00h02m40.400000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:07.578503Z 1 00h02m40.500000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:07.830911Z 1 00h02m40.600000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:07.841685Z 1 00h02m40.700000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:07.874498Z 1 00h02m40.800000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:07.888643Z 1 00h02m40.900000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:08.018706Z 1 00h02m41.000000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:08.030511Z 1 00h02m41.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:08.033251Z 1 00h02m41.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:08.055842Z 1 00h02m41.300000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:08.066607Z 1 00h02m41.400000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:08.085683Z 1 00h02m41.600000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:08.126530Z 1 00h02m41.700000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:08.156178Z 1 00h02m41.900000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:08.387266Z 1 00h02m42.000000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:08.428476Z 1 00h02m42.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:08.431102Z 1 00h02m42.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:08.455322Z 1 00h02m42.300000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:08.485031Z 1 00h02m42.400000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:08.747280Z 1 00h02m42.600000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:08.759362Z 1 00h02m42.700000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:08.779602Z 1 00h02m42.900000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:08.792675Z 1 00h02m43.000000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:08.845237Z 1 00h02m43.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:08.847224Z 1 00h02m43.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:08.862332Z 1 00h02m43.200000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:08.968720Z 1 00h02m43.300000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:09.143747Z 1 00h02m43.400000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:09.206039Z 1 00h02m43.500000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:09.216256Z 1 00h02m43.600000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:09.277385Z 1 00h02m43.700000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:09.290144Z 1 00h02m43.800000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:09.301329Z 1 00h02m43.900000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:09.378574Z 1 00h02m44.000000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:09.497599Z 1 00h02m44.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:09.509485Z 1 00h02m44.200000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:09.523003Z 1 00h02m44.300000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:09.548589Z 1 00h02m44.500000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:09.784631Z 1 00h02m44.600000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:09.814130Z 1 00h02m44.800000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:09.876082Z 1 00h02m44.900000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:09.943336Z 1 00h02m45.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:09.961515Z 1 00h02m45.200000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:09.974905Z 1 00h02m45.300000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:10.128701Z 1 00h02m45.500000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:10.326972Z 1 00h02m45.600000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:10.357599Z 1 00h02m45.800000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:10.371289Z 1 00h02m45.900000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:10.519702Z 1 00h02m46.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:10.519875Z 1 00h02m46.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:10.523127Z 1 00h02m46.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] 2025-03-31T16:32:12.228515Z 1 00h05m16.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:12.228712Z 2 00h05m16.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5318:708] 2025-03-31T16:32:12.232649Z 1 00h05m16.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:12.232874Z 2 00h05m16.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5318:708] 2025-03-31T16:32:12.237844Z 1 00h05m16.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:12.238038Z 2 00h05m16.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5318:708] 2025-03-31T16:32:12.244677Z 1 00h05m16.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:12.245116Z 2 00h05m16.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5318:708] 2025-03-31T16:32:12.245364Z 1 00h05m16.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:12.246681Z 2 00h05m16.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5318:708] 2025-03-31T16:32:12.264614Z 1 00h05m16.260512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:12.264828Z 2 00h05m16.26 ... 25-03-31T16:32:18.822686Z 1 00h10m24.261536s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-31T16:32:18.822886Z 2 00h10m24.261536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5318:708] 2025-03-31T16:32:18.822946Z 3 00h10m24.261536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5325:715] 2025-03-31T16:32:18.822995Z 4 00h10m24.261536s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:5332:722] 2025-03-31T16:32:18.823051Z 5 00h10m24.261536s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:5339:729] 2025-03-31T16:32:18.823119Z 6 00h10m24.261536s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:5346:736] 2025-03-31T16:32:18.823171Z 7 00h10m24.261536s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:5353:743] 2025-03-31T16:32:18.823223Z 8 00h10m24.261536s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:18.823672Z 1 00h10m24.261536s :BS_LOAD_TEST ERROR: TabletId# 1 Generation# 4 recieved not OK, msg# TEvBlockResult {Status# ERROR ErrorReason# "Status# ERROR From# [82000000:1:0:2:0] NodeId# 3 QuorumTracker# {Erroneous# 00000111 Successful# 00000000}"} Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] 2025-03-31T16:32:21.967342Z 8 00h20m54.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:21.970625Z 8 00h20m54.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:21.981092Z 8 00h20m54.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:21.988050Z 8 00h20m54.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:21.989543Z 8 00h20m54.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:22.014853Z 8 00h20m54.512560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:22.036554Z 8 00h20m54.612560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:22.085982Z 8 00h20m54.812560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:22.147139Z 8 00h20m54.912560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:22.564824Z 8 00h20m55.112560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:22.577934Z 8 00h20m55.212560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:22.607983Z 8 00h20m55.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:22.609200Z 8 00h20m55.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:22.623669Z 8 00h20m55.512560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:22.649218Z 8 00h20m55.712560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:22.857010Z 8 00h20m55.812560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:22.968346Z 8 00h20m56.012560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:23.090016Z 8 00h20m56.112560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:23.128528Z 8 00h20m56.312560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:23.159448Z 8 00h20m56.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:23.161067Z 8 00h20m56.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:23.176214Z 8 00h20m56.512560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:23.278356Z 8 00h20m56.712560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:23.293372Z 8 00h20m56.812560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:23.508248Z 8 00h20m56.912560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:23.517749Z 8 00h20m57.012560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:23.642592Z 8 00h20m57.112560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:23.718633Z 8 00h20m57.212560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:23.740386Z 8 00h20m57.312560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:23.798655Z 8 00h20m57.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:23.799835Z 8 00h20m57.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:24.081948Z 8 00h20m57.612560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:24.098865Z 8 00h20m57.712560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:24.246394Z 8 00h20m57.912560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:24.263617Z 8 00h20m58.012560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:24.294399Z 8 00h20m58.212560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:24.412336Z 8 00h20m58.312560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:24.428022Z 8 00h20m58.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:24.428826Z 8 00h20m58.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:24.457837Z 8 00h20m58.612560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:24.469235Z 8 00h20m58.712560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:24.553015Z 8 00h20m58.912560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:24.570226Z 8 00h20m59.012560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:24.595947Z 8 00h20m59.212560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:24.608373Z 8 00h20m59.312560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:24.623593Z 8 00h20m59.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:24.624623Z 8 00h20m59.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:24.663888Z 8 00h20m59.612560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:24.756797Z 8 00h20m59.712560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:24.922862Z 8 00h20m59.912560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:24.998709Z 8 00h21m00.012560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:25.027048Z 8 00h21m00.212560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:25.043282Z 8 00h21m00.312560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:25.118763Z 8 00h21m00.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:25.119736Z 8 00h21m00.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:25.134320Z 8 00h21m00.512560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:25.151708Z 8 00h21m00.612560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:25.162802Z 8 00h21m00.712560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:25.376602Z 8 00h21m00.812560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:25.389540Z 8 00h21m00.912560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:25.485700Z 8 00h21m01.112560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:25.575359Z 8 00h21m01.212560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:25.680430Z 8 00h21m01.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:25.698636Z 8 00h21m01.512560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:25.712044Z 8 00h21m01.612560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:25.740084Z 8 00h21m01.812560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:25.779738Z 8 00h21m01.912560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:25.809294Z 8 00h21m02.112560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:25.822183Z 8 00h21m02.212560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:25.867034Z 8 00h21m02.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:25.867494Z 8 00h21m02.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-31T16:32:25.870358Z 8 00h21m02.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] >> TInterconnectTest::OldFormatSuppressVersionCheck [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionFetchToken [GOOD] Test command err: Trying to start YDB, gRPC: 21194, MsgBus: 65260 2025-03-31T16:32:29.374511Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012410679991001:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:29.375186Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f27/r3tmp/tmpJ5lOOU/pdisk_1.dat 2025-03-31T16:32:29.699631Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:32:29.742860Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:29.743728Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:29.746805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21194, node 1 2025-03-31T16:32:29.875940Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:29.875971Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:29.875978Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:29.876079Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65260 TClient is connected to server localhost:65260 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:30.525164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... >> KqpAgg::AggWithLookup [GOOD] >> KqpAgg::AggWithSelfLookup >> TInterconnectTest::TestBlobEvent220BytesPreSerialized |95.1%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} |95.2%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> TKesusTest::TestAcquireSharedBlocked [GOOD] >> TKesusTest::TestAcquireTimeoutAfterReboot >> DataShardWrite::InsertImmediate [GOOD] >> DataShardWrite::ImmediateAndPlannedCommittedOpsRace ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::OldFormatSuppressVersionCheck [GOOD] Test command err: 2025-03-31T16:32:31.690129Z node 4 :INTERCONNECT WARN: Handshake [4:20:2056] [node 3] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2025-03-31T16:32:32.175463Z node 5 :INTERCONNECT WARN: Handshake [5:18:2057] [node 6] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2025-03-31T16:32:32.650180Z node 8 :INTERCONNECT WARN: Handshake [8:20:2056] [node 7] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2025-03-31T16:32:32.652705Z node 7 :INTERCONNECT WARN: Handshake [7:18:2057] [node 8] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-43 >> TInterconnectTest::TestBlobEvent220BytesPreSerialized [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizes >> TKesusTest::TestAcquireSemaphoreTimeout [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeoutTooBig >> TInterconnectTest::TestBlobEventDifferentSizes [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizesPreSerialized >> TKesusTest::TestAcquireSemaphoreTimeoutTooBig [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeoutInfinite >> KqpNewEngine::PkSelect1 [GOOD] >> KqpNewEngine::PkSelect2 >> GenericFederatedQuery::PostgreSQLOnPremSelectConstant >> KqpReturning::ReturningWorksIndexedDeleteV2-QueryService >> TInterconnectTest::TestBlobEventDifferentSizesPreSerialized [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizesPreSerializedAndRaw >> KqpRanges::UpdateWhereInMultipleUpdate [GOOD] >> KqpRanges::ValidatePredicates >> TKesusTest::TestAcquireSemaphoreTimeoutInfinite [GOOD] >> TKesusTest::TestAcquireSemaphoreRebootTimeout >> TActorActivity::Basic [GOOD] >> ActorBootstrapped::TestBootstrapped >> ActorBootstrapped::TestBootstrapped [GOOD] >> ActorBootstrapped::TestBootstrappedParent [GOOD] >> TActorTracker::Basic >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-49 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-50 >> TInterconnectTest::TestBlobEventDifferentSizesPreSerializedAndRaw [GOOD] >> TActorTracker::Basic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TActorTracker::Basic [GOOD] Test command err: ASYNC_DESTROYER |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestBlobEventDifferentSizesPreSerializedAndRaw [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-55 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-56 >> KqpNewEngine::MultiEffectsOnSameTable [GOOD] >> KqpNewEngine::MultiUsagePrecompute >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-66 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-67 >> KqpSqlIn::SimpleKey_In_And_In [GOOD] >> KqpSqlIn::SecondaryIndex_TupleParameter >> TestProtocols::TestResolveProtocol >> KqpNewEngine::BlindWriteListParameter [GOOD] >> KqpNewEngine::BatchUpload >> KqpNewEngine::DeleteWithInputMultiConsumptionLimit-UseSink [GOOD] >> KqpNewEngine::DependentSelect >> KqpRanges::NullInKey >> KqpLocks::MixedTxFail-useSink [GOOD] >> KqpLocksTricky::TestNoLocksIssue+withSink >> TestProtocols::TestResolveProtocol [GOOD] >> TestProtocols::TestHTTPCollectedVerySlow >> TKesusTest::TestQuoterAccountResourcesForgetClient [GOOD] >> TKesusTest::TestSessionTimeoutAfterDetach [GOOD] >> TKesusTest::TestSessionTimeoutAfterReboot >> KqpRanges::LiteralOrCompisiteCollision [GOOD] >> KqpRanges::MergeRanges >> KqpSort::TopSortTableExpr [GOOD] >> KqpSort::TopSortResults >> KqpNewEngine::LocksNoMutationsSharded [GOOD] >> KqpNewEngine::MultiEffects >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-67 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-68 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestQuoterAccountResourcesForgetClient [GOOD] Test command err: 2025-03-31T16:32:23.981353Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:23.981493Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:23.997802Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:23.997913Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:24.022607Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:24.028265Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:130:2156], cookie=12924050469593785420, path="/Res", config={ MaxUnitsPerSecond: -100 }) 2025-03-31T16:32:24.028496Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:130:2156], cookie=12924050469593785420) 2025-03-31T16:32:24.029157Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:137:2161], cookie=17399643301595339889, path="/ResWithoutMaxUnitsPerSecond", config={ }) 2025-03-31T16:32:24.029266Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:137:2161], cookie=17399643301595339889) 2025-03-31T16:32:24.029785Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:140:2164], cookie=14614311952214148957, path="/ResWithMaxUnitsPerSecond", config={ MaxUnitsPerSecond: 1 }) 2025-03-31T16:32:24.029972Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "ResWithMaxUnitsPerSecond" 2025-03-31T16:32:24.042302Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:140:2164], cookie=14614311952214148957) 2025-03-31T16:32:24.042934Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:145:2169], cookie=18259231047952241973, path="/ResWithMaxUnitsPerSecond/ChildWithoutMaxUnitsPerSecond", config={ }) 2025-03-31T16:32:24.043197Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "ResWithMaxUnitsPerSecond/ChildWithoutMaxUnitsPerSecond" 2025-03-31T16:32:24.055331Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:145:2169], cookie=18259231047952241973) 2025-03-31T16:32:24.425653Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:24.425757Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:24.444465Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:24.444599Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:24.461146Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:24.461624Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:130:2156], cookie=15107425536322344823, path="/Root", config={ MaxUnitsPerSecond: 100 PrefetchCoefficient: 300 }) 2025-03-31T16:32:24.462021Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-03-31T16:32:24.484831Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:130:2156], cookie=15107425536322344823) 2025-03-31T16:32:24.485485Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:140:2164], cookie=16008574201412550709, path="/Root/Res", config={ }) 2025-03-31T16:32:24.485732Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-03-31T16:32:24.498039Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:140:2164], cookie=16008574201412550709) 2025-03-31T16:32:24.500300Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:145:2169]. Cookie: 9756042472467195267. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 300 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 2 Version: "version" Schema: "schema" CloudId: "cloud" FolderId: "folder" ResourceId: "resource" SourceId: "source" Tags { key: "key" value: "value" } } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-03-31T16:32:24.500401Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[2:145:2169], cookie=9756042472467195267) 2025-03-31T16:32:24.500957Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [2:145:2169]. Cookie: 9112524201297039513. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 28000 } } 2025-03-31T16:32:24.501004Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[2:145:2169], cookie=9112524201297039513) 2025-03-31T16:32:26.628909Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:26.628988Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:26.646931Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:26.647229Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:26.661139Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:26.661572Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:132:2158], cookie=17833403655745817099, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-03-31T16:32:26.661898Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-03-31T16:32:26.684427Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:132:2158], cookie=17833403655745817099) 2025-03-31T16:32:26.684999Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:142:2166], cookie=9666388743708249529, path="/Root/Res", config={ }) 2025-03-31T16:32:26.685197Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-03-31T16:32:26.697268Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:142:2166], cookie=9666388743708249529) 2025-03-31T16:32:26.698093Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:147:2171]. Cookie: 10194162769427003760. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-03-31T16:32:26.698157Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[3:147:2171], cookie=10194162769427003760) 2025-03-31T16:32:26.698632Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [3:147:2171]. Cookie: 17111473493517063083. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 1019000 } } 2025-03-31T16:32:26.698667Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[3:147:2171], cookie=17111473493517063083) 2025-03-31T16:32:28.862415Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:28.862519Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:28.878240Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:28.878397Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:28.892220Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:28.892557Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:132:2158], cookie=10334155088682939342, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-03-31T16:32:28.892792Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-03-31T16:32:28.915396Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:132:2158], cookie=10334155088682939342) 2025-03-31T16:32:28.916205Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:142:2166]. Cookie: 4684825395744528181. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-03-31T16:32:28.916265Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[4:142:2166], cookie=4684825395744528181) 2025-03-31T16:32:28.916788Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [4:142:2166]. Cookie: 6806706855759017717. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2025-03-31T16:32:28.916836Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[4:142:2166], cookie=6806706855759017717) 2025-03-31T16:32:28.917248Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [4:142:2166]. Cookie: 11165647966115750555. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2025-03-31T16:32:28.917295Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[4:142:2166], cookie=11165647966115750555) 2025-03-31T16:32:31.073697Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:31.073801Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:31.089040Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:31.089343Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:31.113422Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:31.113853Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:132:2158], cookie=11164305960417699954, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-03-31T16:32:31.114162Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-03-31T16:32:31.126267Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:132:2158], cookie=11164305960417699954) 2025-03-31T16:32:31.127165Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:142:2166]. Cookie: 15029747588705860804. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-03-31T16:32:31.127227Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:142:2166], cookie=15029747588705860804) 2025-03-31T16:32:31.127732Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [5:142:2166]. Cookie: 13256146679992895727. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 3000000 } } 2025-03-31T16:32:31.127785Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[5:142:2166], cookie=13256146679992895727) 2025-03-31T16:32:33.524565Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:173:2190]. Cookie: 18441596833277211514. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-03-31T16:32:33.524623Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:173:2190], cookie=18441596833277211514) 2025-03-31T16:32:33.525043Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [5:173:2190]. Cookie: 4850081489760324085. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 9000000 } } 2025-03-31T16:32:33.525099Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[5:173:2190], cookie=4850081489760324085) 2025-03-31T16:32:35.538698Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:200:2216]. Cookie: 5562445073427233259. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-03-31T16:32:35.538783Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:200:2216], cookie=5562445073427233259) 2025-03-31T16:32:35.539405Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [5:200:2216]. Cookie: 554412482660086650. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 15000000 } } 2025-03-31T16:32:35.539469Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[5:200:2216], cookie=554412482660086650) >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-20 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-20 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-67 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-68 >> KqpRanges::UpdateWhereInFullScan+UseSink [GOOD] >> KqpRanges::UpdateWhereInFullScan-UseSink >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-43 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-44 >> KqpNewEngine::DqSourceLiteralRange [GOOD] >> KqpNewEngine::DqSourceSequentialLimit >> DataShardWrite::ImmediateAndPlannedCommittedOpsRace [GOOD] >> DataShardWrite::PreparedDistributedWritePageFault >> KqpNewEngine::PkSelect2 [GOOD] >> KqpNewEngine::PkRangeSelect2 >> TInterconnectTest::TestSimplePingPong >> KqpAgg::AggWithSelfLookup [GOOD] >> KqpAgg::AggWithSelfLookup2 >> TestProtocols::TestHTTPCollectedVerySlow [GOOD] >> TestProtocols::TestHTTPRequest >> TInterconnectTest::TestSimplePingPong [GOOD] >> TInterconnectTest::TestSubscribeByFlag >> TestProtocols::TestHTTPRequest [GOOD] >> TInterconnectTest::TestSubscribeByFlag [GOOD] >> TInterconnectTest::TestReconnect >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-50 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-51 |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TestProtocols::TestHTTPRequest [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-StrictAclCheck >> TInterconnectTest::TestReconnect [GOOD] >> TInterconnectTest::TestSubscribeAndUnsubsribeByEvent >> KqpReturning::ReturningWorksIndexedDeleteV2-QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedInsert+QueryService >> TInterconnectTest::TestSubscribeAndUnsubsribeByEvent [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-56 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-57 >> TInterconnectTest::TestConnectAndDisconnect >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-67 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-68 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestSubscribeAndUnsubsribeByEvent [GOOD] Test command err: 2025-03-31T16:32:40.618726Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @201 (null) -> PendingActivation 2025-03-31T16:32:40.618815Z node 5 :INTERCONNECT INFO: Proxy [5:1:2048] [node 6] ICP01 ready to work 2025-03-31T16:32:40.619607Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @201 (null) -> PendingActivation 2025-03-31T16:32:40.619647Z node 6 :INTERCONNECT INFO: Proxy [6:9:2048] [node 5] ICP01 ready to work 2025-03-31T16:32:40.619931Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @99 PendingActivation -> PendingNodeInfo 2025-03-31T16:32:40.620934Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP02 configured for host ::1:15933 2025-03-31T16:32:40.621061Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @483 PendingNodeInfo -> PendingConnection 2025-03-31T16:32:40.621440Z node 5 :INTERCONNECT DEBUG: Handshake [5:19:2057] [node 6] ICH01 starting outgoing handshake 2025-03-31T16:32:40.621574Z node 5 :INTERCONNECT DEBUG: ICR04 Host: ::1, RESOLVED address 2025-03-31T16:32:40.622219Z node 5 :INTERCONNECT DEBUG: Handshake [5:19:2057] [node 6] ICH05 connected to peer 2025-03-31T16:32:40.622691Z node 6 :INTERCONNECT DEBUG: ICListener: [0:0:0] ICL04 Accepted from: ::1:33104 2025-03-31T16:32:40.623192Z node 6 :INTERCONNECT DEBUG: Handshake [6:21:2057] [node 0] ICH02 starting incoming handshake 2025-03-31T16:32:40.627207Z node 5 :INTERCONNECT DEBUG: Handshake [5:19:2057] [node 6] ICH07 SendExBlock ExRequest Protocol: 2 ProgramPID: 626016 ProgramStartTime: 5564939959400 Serial: 2069374000 ReceiverNodeId: 6 SenderActorId: "[5:2069374000:0]" SenderHostName: "::1" ReceiverHostName: "::1" UUID: "Cluster for process with id: 626016" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 626016" AcceptUUID: "Cluster for process with id: 626016" } RequestModernFrame: true RequestAuthOnly: false RequestExtendedTraceFmt: true RequestExternalDataChannel: true HandshakeId: "g\377\346\320z\303d\261XT\205\234\031q\256]V\021\204\004\266\222a\035L\316~\250g\213i\242" RequestXxhash: true RequestXdcShuffle: true 2025-03-31T16:32:40.627846Z node 6 :INTERCONNECT DEBUG: Handshake [6:21:2057] [node 5] ICH07 ReceiveExBlock ExRequest Protocol: 2 ProgramPID: 626016 ProgramStartTime: 5564939959400 Serial: 2069374000 ReceiverNodeId: 6 SenderActorId: "[5:2069374000:0]" SenderHostName: "::1" ReceiverHostName: "::1" UUID: "Cluster for process with id: 626016" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 626016" AcceptUUID: "Cluster for process with id: 626016" } RequestModernFrame: true RequestAuthOnly: false RequestExtendedTraceFmt: true RequestExternalDataChannel: true HandshakeId: "g\377\346\320z\303d\261XT\205\234\031q\256]V\021\204\004\266\222a\035L\316~\250g\213i\242" RequestXxhash: true RequestXdcShuffle: true 2025-03-31T16:32:40.627912Z node 6 :INTERCONNECT WARN: Handshake [6:21:2057] [node 5] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2025-03-31T16:32:40.628273Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @99 PendingActivation -> PendingNodeInfo 2025-03-31T16:32:40.629248Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP02 configured for host ::1:30281 2025-03-31T16:32:40.629292Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP17 incoming handshake (actor [6:21:2057]) 2025-03-31T16:32:40.629337Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @483 PendingNodeInfo -> PendingConnection 2025-03-31T16:32:40.629387Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP07 issued incoming handshake reply 2025-03-31T16:32:40.629434Z node 6 :INTERCONNECT INFO: Proxy [6:9:2048] [node 5] ICP08 No active sessions, becoming PendingConnection 2025-03-31T16:32:40.629493Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @220 PendingConnection -> PendingConnection 2025-03-31T16:32:40.629829Z node 6 :INTERCONNECT DEBUG: Handshake [6:21:2057] [node 5] ICH07 SendExBlock ExReply Success { Protocol: 2 ProgramPID: 626016 ProgramStartTime: 5564954959720 Serial: 150573959 SenderActorId: "[6:150573959:0]" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 626016" AcceptUUID: "Cluster for process with id: 626016" } StartEncryption: false UseModernFrame: true AuthOnly: false UseExtendedTraceFmt: true UseExternalDataChannel: true UseXxhash: true UseXdcShuffle: true } 2025-03-31T16:32:40.630290Z node 5 :INTERCONNECT DEBUG: Handshake [5:19:2057] [node 6] ICH07 ReceiveExBlock ExReply Success { Protocol: 2 ProgramPID: 626016 ProgramStartTime: 5564954959720 Serial: 150573959 SenderActorId: "[6:150573959:0]" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 626016" AcceptUUID: "Cluster for process with id: 626016" } StartEncryption: false UseModernFrame: true AuthOnly: false UseExtendedTraceFmt: true UseExternalDataChannel: true UseXxhash: true UseXdcShuffle: true } 2025-03-31T16:32:40.630356Z node 5 :INTERCONNECT WARN: Handshake [5:19:2057] [node 6] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2025-03-31T16:32:40.630486Z node 5 :INTERCONNECT DEBUG: ICR04 Host: ::1, RESOLVED address 2025-03-31T16:32:40.631065Z node 5 :INTERCONNECT DEBUG: Handshake [5:19:2057] [node 6] ICH07 SendExBlock ExternalDataChannelParams HandshakeId: "g\377\346\320z\303d\261XT\205\234\031q\256]V\021\204\004\266\222a\035L\316~\250g\213i\242" 2025-03-31T16:32:40.631174Z node 5 :INTERCONNECT INFO: Handshake [5:19:2057] [node 6] ICH04 handshake succeeded 2025-03-31T16:32:40.631443Z node 5 :INTERCONNECT INFO: Proxy [5:1:2048] [node 6] ICP20 outgoing handshake succeeded 2025-03-31T16:32:40.631481Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP052 dropped outgoing handshake: [5:19:2057] poison: false 2025-03-31T16:32:40.631523Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @350 PendingConnection -> StateWork 2025-03-31T16:32:40.631662Z node 5 :INTERCONNECT INFO: Proxy [5:1:2048] [node 6] ICP22 created new session: [5:23:2048] 2025-03-31T16:32:40.631718Z node 5 :INTERCONNECT_SESSION INFO: Session [5:23:2048] [node 6] ICS09 handshake done sender: [5:19:2057] self: [5:2069374000:0] peer: [6:150573959:0] socket: 24 2025-03-31T16:32:40.631759Z node 5 :INTERCONNECT_SESSION INFO: Session [5:23:2048] [node 6] ICS10 traffic start 2025-03-31T16:32:40.631837Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS11 registering socket in PollerActor 2025-03-31T16:32:40.631890Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS23 confirm count: 0 2025-03-31T16:32:40.631944Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS06 rewind SendQueue size# 0 LastConfirmed# 0 NextSerial# 1 2025-03-31T16:32:40.631986Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS23 confirm count: 0 2025-03-31T16:32:40.632037Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS04 subscribe for session state for [5:17:2056] 2025-03-31T16:32:40.632117Z node 6 :INTERCONNECT DEBUG: ICListener: [0:0:0] ICL04 Accepted from: ::1:33108 2025-03-31T16:32:40.632469Z node 6 :INTERCONNECT DEBUG: Handshake [6:25:2058] [node 0] ICH02 starting incoming handshake 2025-03-31T16:32:40.632578Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:24:2048] [node 6] ICIS01 InputSession created 2025-03-31T16:32:40.633391Z node 6 :INTERCONNECT INFO: Handshake [6:21:2057] [node 5] ICH04 handshake succeeded 2025-03-31T16:32:40.633576Z node 6 :INTERCONNECT INFO: Proxy [6:9:2048] [node 5] ICP19 incoming handshake succeeded 2025-03-31T16:32:40.633622Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP111 dropped incoming handshake: [6:21:2057] poison: false 2025-03-31T16:32:40.633654Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @350 PendingConnection -> StateWork 2025-03-31T16:32:40.633735Z node 6 :INTERCONNECT INFO: Proxy [6:9:2048] [node 5] ICP22 created new session: [6:26:2048] 2025-03-31T16:32:40.633768Z node 6 :INTERCONNECT_SESSION INFO: Session [6:26:2048] [node 5] ICS09 handshake done sender: [6:21:2057] self: [6:150573959:0] peer: [5:2069374000:0] socket: 25 2025-03-31T16:32:40.654734Z node 6 :INTERCONNECT_SESSION INFO: Session [6:26:2048] [node 5] ICS10 traffic start 2025-03-31T16:32:40.654893Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS11 registering socket in PollerActor 2025-03-31T16:32:40.654954Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 0 2025-03-31T16:32:40.655007Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS06 rewind SendQueue size# 0 LastConfirmed# 0 NextSerial# 1 2025-03-31T16:32:40.655058Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 0 2025-03-31T16:32:40.655224Z node 6 :INTERCONNECT ERROR: Proxy [6:9:2048] [node 5] ICP35 event processing took too much time PendingConnection:273(21542665ns) {HandleHandshakeStatus:328(21540680ns) {DropIncomingHandshake:436(37062ns) DropOutgoingHandshake:451(334ns) SwitchToState:173(22934ns) ProcessPendingSessionEvents:494(721ns)}} 2025-03-31T16:32:40.655297Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:24:2048] [node 6] ICIS02 ReceiveData called 2025-03-31T16:32:40.655403Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:24:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-03-31T16:32:40.655569Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:27:2048] [node 5] ICIS01 InputSession created 2025-03-31T16:32:40.655641Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:27:2048] [node 5] ICIS02 ReceiveData called 2025-03-31T16:32:40.655699Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:27:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-03-31T16:32:40.655750Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:24:2048] [node 6] ICIS02 ReceiveData called 2025-03-31T16:32:40.655793Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:24:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-03-31T16:32:40.655920Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:24:2048] [node 6] ICIS02 ReceiveData called 2025-03-31T16:32:40.655958Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:24:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-03-31T16:32:40.655997Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:27:2048] [node 5] ICIS02 ReceiveData called 2025-03-31T16:32:40.656025Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:27:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-03-31T16:32:40.656066Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:27:2048] [node 5] ICIS02 ReceiveData called 2025-03-31T16:32:40.656093Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:27:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-03-31T16:32:40.656132Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS23 confirm count: 0 2025-03-31T16:32:40.656173Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS23 confirm count: 0 2025-03-31T16:32:40.656270Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 0 2025-03-31T16:32:40.656297Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 0 2025-03-31T16:32:40.656332Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS23 confirm count: 0 2025-03-31T16:32:40.656364Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS23 confirm count: 0 2025-03-31T16:32:40.656439Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 0 2025-03-31T16:32:40.656484Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 0 2025-03-31T16:32:40.656595Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS02 send event from: [5:17:2056] to: [6:18:2056] 2025-03-31T16:32:40.656787Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS22 outgoing packet Serial ... ke [5:29:2058] [node 6] ICH01 starting outgoing handshake 2025-03-31T16:32:40.664682Z node 6 :INTERCONNECT DEBUG: ICR04 Host: ::1, RESOLVED address 2025-03-31T16:32:40.665100Z node 5 :INTERCONNECT DEBUG: ICR04 Host: ::1, RESOLVED address 2025-03-31T16:32:40.665196Z node 5 :INTERCONNECT DEBUG: ICListener: [0:0:0] ICL04 Accepted from: ::1:51258 2025-03-31T16:32:40.665488Z node 6 :INTERCONNECT DEBUG: Handshake [6:28:2059] [node 5] ICH05 connected to peer 2025-03-31T16:32:40.665605Z node 6 :INTERCONNECT DEBUG: Handshake [6:28:2059] [node 5] ICH07 SendExBlock ExRequest HandshakeId: "?\363]V\004\024\311\211#\373\002,9\354\021\243\354~&\365\212\271\272q]\362\254\324\264\017\270\276" 2025-03-31T16:32:40.665890Z node 5 :INTERCONNECT DEBUG: Handshake [5:32:2059] [node 0] ICH02 starting incoming handshake 2025-03-31T16:32:40.666474Z node 6 :INTERCONNECT DEBUG: ICListener: [0:0:0] ICL04 Accepted from: ::1:33122 2025-03-31T16:32:40.666760Z node 6 :INTERCONNECT DEBUG: Handshake [6:33:2060] [node 0] ICH02 starting incoming handshake 2025-03-31T16:32:40.667003Z node 5 :INTERCONNECT DEBUG: Handshake [5:29:2058] [node 6] ICH05 connected to peer 2025-03-31T16:32:40.667114Z node 5 :INTERCONNECT DEBUG: Handshake [5:29:2058] [node 6] ICH07 SendExBlock ExRequest HandshakeId: "\t\203V\251E\304.6\r\372f5\266\021\213I\201^\212\r\r\255\350$ \"\247/\351k\343\314" 2025-03-31T16:32:40.667429Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP09 (actor [5:32:2059]) from: [6:150573959:0] for: [5:2069374000:0] 2025-03-31T16:32:40.755844Z node 5 :INTERCONNECT_SESSION INFO: Session [5:23:2048] [node 6] ICS08 incoming handshake Self# [6:150573959:0] Peer# [5:2069374000:0] Counter# 1 LastInputSerial# 1 2025-03-31T16:32:40.755969Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP06 reply for incoming handshake (actor [5:32:2059]) is held 2025-03-31T16:32:40.756125Z node 5 :INTERCONNECT ERROR: Proxy [5:1:2048] [node 6] ICP35 event processing took too much time StateWork:288(88603511ns) {IncomingHandshake:228(88600973ns) {DropIncomingHandshake:436(3392ns) IssueIncomingHandshakeReply:192(81793ns)}} 2025-03-31T16:32:40.756229Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP09 (actor [6:33:2060]) from: [5:2069374000:0] for: [6:150573959:0] 2025-03-31T16:32:40.756301Z node 6 :INTERCONNECT_SESSION INFO: Session [6:26:2048] [node 5] ICS08 incoming handshake Self# [5:2069374000:0] Peer# [6:150573959:0] Counter# 1 LastInputSerial# 1 2025-03-31T16:32:40.756356Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP07 issued incoming handshake reply 2025-03-31T16:32:40.757074Z node 5 :INTERCONNECT DEBUG: ICR04 Host: ::1, RESOLVED address 2025-03-31T16:32:40.757633Z node 5 :INTERCONNECT DEBUG: Handshake [5:29:2058] [node 6] ICH07 SendExBlock ExternalDataChannelParams HandshakeId: "\t\203V\251E\304.6\r\372f5\266\021\213I\201^\212\r\r\255\350$ \"\247/\351k\343\314" 2025-03-31T16:32:40.757707Z node 5 :INTERCONNECT INFO: Handshake [5:29:2058] [node 6] ICH04 handshake succeeded 2025-03-31T16:32:40.757933Z node 5 :INTERCONNECT INFO: Proxy [5:1:2048] [node 6] ICP20 outgoing handshake succeeded 2025-03-31T16:32:40.757977Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP111 dropped incoming handshake: [5:32:2059] poison: true 2025-03-31T16:32:40.758015Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP052 dropped outgoing handshake: [5:29:2058] poison: false 2025-03-31T16:32:40.758047Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @350 StateWork -> StateWork 2025-03-31T16:32:40.758081Z node 5 :INTERCONNECT_SESSION INFO: Session [5:23:2048] [node 6] ICS09 handshake done sender: [5:29:2058] self: [5:2069374000:0] peer: [6:150573959:0] socket: 30 2025-03-31T16:32:40.758125Z node 5 :INTERCONNECT_SESSION INFO: Session [5:23:2048] [node 6] ICS10 traffic start 2025-03-31T16:32:40.758196Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS11 registering socket in PollerActor 2025-03-31T16:32:40.758239Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS23 confirm count: 1 2025-03-31T16:32:40.758271Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS06 rewind SendQueue size# 0 LastConfirmed# 1 NextSerial# 2 2025-03-31T16:32:40.758331Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS22 outgoing packet Serial# 2 Confirm# 1 DataSize# 84 InflightDataAmount# 84 2025-03-31T16:32:40.758403Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS23 confirm count: 1 2025-03-31T16:32:40.758471Z node 6 :INTERCONNECT DEBUG: ICListener: [0:0:0] ICL04 Accepted from: ::1:33132 2025-03-31T16:32:40.758823Z node 6 :INTERCONNECT DEBUG: Handshake [6:36:2061] [node 0] ICH02 starting incoming handshake 2025-03-31T16:32:40.759398Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS01 InputSession created 2025-03-31T16:32:40.760631Z node 6 :INTERCONNECT NOTICE: Proxy [6:9:2048] [node 5] ICP25 outgoing handshake failed, temporary: 0 explanation: outgoing handshake Peer# ::1(::1:30281) Socket error# connection unexpectedly closed state# ReceiveResponse processed# 0 remain# 52 incoming: [6:33:2060] held: no 2025-03-31T16:32:40.760699Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP052 dropped outgoing handshake: [6:28:2059] poison: false 2025-03-31T16:32:40.760742Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP28 other handshake is still going on 2025-03-31T16:32:40.760836Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS02 ReceiveData called 2025-03-31T16:32:40.760922Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-03-31T16:32:40.761223Z node 6 :INTERCONNECT INFO: Handshake [6:33:2060] [node 5] ICH04 handshake succeeded 2025-03-31T16:32:40.761470Z node 6 :INTERCONNECT INFO: Proxy [6:9:2048] [node 5] ICP19 incoming handshake succeeded 2025-03-31T16:32:40.761508Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP111 dropped incoming handshake: [6:33:2060] poison: false 2025-03-31T16:32:40.761555Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @350 StateWork -> StateWork 2025-03-31T16:32:40.761594Z node 6 :INTERCONNECT_SESSION INFO: Session [6:26:2048] [node 5] ICS09 handshake done sender: [6:33:2060] self: [6:150573959:0] peer: [5:2069374000:0] socket: 31 2025-03-31T16:32:40.761620Z node 6 :INTERCONNECT_SESSION INFO: Session [6:26:2048] [node 5] ICS10 traffic start 2025-03-31T16:32:40.761676Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS11 registering socket in PollerActor 2025-03-31T16:32:40.761708Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 1 2025-03-31T16:32:40.761737Z node 6 :INTERCONNECT_SESSION DEBUG: OutputChannel 0 [node 5] ICOCH98 Dropping confirmed messages 2025-03-31T16:32:40.761789Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS24 exit InflightDataAmount: 0 bytes droppedDataAmount: 84 bytes dropped 1 packets 2025-03-31T16:32:40.761827Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS06 rewind SendQueue size# 0 LastConfirmed# 1 NextSerial# 2 2025-03-31T16:32:40.761860Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 1 2025-03-31T16:32:40.761903Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS02 ReceiveData called 2025-03-31T16:32:40.761943Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-03-31T16:32:40.762019Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS01 InputSession created 2025-03-31T16:32:40.762183Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS02 ReceiveData called 2025-03-31T16:32:40.762229Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS12 Read recvres# 106 num# 1 err# 2025-03-31T16:32:40.762306Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-03-31T16:32:40.762332Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-03-31T16:32:40.762363Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS02 ReceiveData called 2025-03-31T16:32:40.762388Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-03-31T16:32:40.762751Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS23 confirm count: 1 2025-03-31T16:32:40.762815Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS23 confirm count: 1 2025-03-31T16:32:40.762884Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS02 ReceiveData called 2025-03-31T16:32:40.762926Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-03-31T16:32:40.762969Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS02 ReceiveData called 2025-03-31T16:32:40.763034Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-03-31T16:32:40.763198Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS23 confirm count: 1 2025-03-31T16:32:40.763226Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS23 confirm count: 1 2025-03-31T16:32:40.763262Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 1 2025-03-31T16:32:40.763284Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 1 2025-03-31T16:32:40.763341Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS02 send event from: [6:18:2056] to: [5:17:2056] 2025-03-31T16:32:40.763427Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS22 outgoing packet Serial# 2 Confirm# 2 DataSize# 84 InflightDataAmount# 84 2025-03-31T16:32:40.763535Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 1 2025-03-31T16:32:40.763576Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS02 ReceiveData called 2025-03-31T16:32:40.763616Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS12 Read recvres# 106 num# 1 err# 2025-03-31T16:32:40.763688Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-03-31T16:32:40.763748Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 1 2025-03-31T16:32:40.763776Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 1 2025-03-31T16:32:40.857085Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS23 confirm count: 2 2025-03-31T16:32:40.857185Z node 5 :INTERCONNECT_SESSION DEBUG: OutputChannel 0 [node 6] ICOCH98 Dropping confirmed messages 2025-03-31T16:32:40.857263Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS24 exit InflightDataAmount: 0 bytes droppedDataAmount: 84 bytes dropped 1 packets 2025-03-31T16:32:40.857337Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:23:2048] [node 6] ICS23 confirm count: 2 2025-03-31T16:32:40.857482Z node 5 :INTERCONNECT_SESSION INFO: Session [5:23:2048] [node 6] ICS01 socket: 30 reason# 2025-03-31T16:32:40.857550Z node 5 :INTERCONNECT INFO: Proxy [5:1:2048] [node 6] ICP30 unregister session Session# [5:23:2048] VirtualId# [5:2069374000:0] 2025-03-31T16:32:40.857604Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @201 StateWork -> PendingActivation 2025-03-31T16:32:40.857650Z node 5 :INTERCONNECT_SESSION INFO: Session [5:23:2048] [node 6] ICS25 shutdown socket, reason# 2025-03-31T16:32:40.857787Z node 5 :INTERCONNECT_SESSION DEBUG: OutputChannel 0 [node 6] ICOCH89 Notyfying about Undelivered messages! NotYetConfirmed size: 0, Queue size: 0 >> TInterconnectTest::TestConnectAndDisconnect [GOOD] >> TInterconnectTest::TestBlobEventPreSerialized >> KqpNewEngine::BatchUpload [GOOD] >> KqpNewEngine::BrokenLocksAtROTx >> KqpNewEngine::MultiUsagePrecompute [GOOD] >> KqpNewEngine::MultiUsageInnerConnection >> TInterconnectTest::TestBlobEventPreSerialized [GOOD] >> TInterconnectTest::TestBlobEventUpToMebibytes >> KqpNewEngine::DependentSelect [GOOD] >> KqpNewEngine::DqSourceCount >> KqpRanges::NullInKey [GOOD] >> KqpRanges::NullInKeySuffix >> TInterconnectTest::TestBlobEventUpToMebibytes [GOOD] >> TInterconnectTest::TestBlobEventsThroughSubChannels >> GenericFederatedQuery::PostgreSQLOnPremSelectConstant [GOOD] >> GenericFederatedQuery::PostgreSQLSelectCount >> TInterconnectTest::TestBlobEventsThroughSubChannels [GOOD] >> KqpSinkMvcc::OltpNamedStatementNoSink >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-68 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-69 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-20 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-21 >> KqpNewEngine::MultiEffects [GOOD] >> KqpNewEngine::LookupColumns |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestBlobEventsThroughSubChannels [GOOD] >> TKesusTest::TestSessionTimeoutAfterUnregister [GOOD] >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed >> KqpRanges::MergeRanges [GOOD] >> KqpRanges::Like >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-68 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-69 >> DataShardWrite::PreparedDistributedWritePageFault [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-20 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-21 >> KqpNewEngine::PkRangeSelect2 [GOOD] >> KqpNewEngine::PkRangeSelect3 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-44 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-45 >> KqpSnapshotIsolation::TConflictReadWriteOlap [FAIL] >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed [GOOD] >> KqpNewEngine::DqSourceSequentialLimit [GOOD] >> KqpNewEngine::DqSourceLocksEffects >> TKesusTest::TestAcquireLocks [GOOD] >> TKesusTest::TestAcquireRepeat ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed [GOOD] Test command err: 2025-03-31T16:32:21.169656Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:21.169767Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:21.183686Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:21.183773Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:21.213031Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:21.582375Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:21.582471Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:21.599995Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:21.600087Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:21.614974Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:22.044403Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:22.044522Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:22.063276Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:22.063595Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:22.077643Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:22.078203Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:132:2158], cookie=14191024765968423645, session=0, seqNo=0) 2025-03-31T16:32:22.078397Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-31T16:32:22.101955Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:132:2158], cookie=14191024765968423645, session=1) 2025-03-31T16:32:22.102606Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:141:2165], cookie=15633467053285586622) 2025-03-31T16:32:22.102705Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:141:2165], cookie=15633467053285586622) 2025-03-31T16:32:22.507879Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:22.520647Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:22.871504Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:22.883972Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:23.223115Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:23.235155Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:23.575288Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:23.587416Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:23.951339Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:23.963621Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:24.304747Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:24.317172Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:24.649107Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:24.661520Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:25.002412Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:25.014627Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:25.345868Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:25.358100Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:25.752242Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:25.764050Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:26.114237Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:26.126484Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:26.476507Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:26.488635Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:26.844140Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:26.856489Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:27.207390Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:27.220970Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:27.616715Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:27.629027Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:27.980065Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:27.992253Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:28.345743Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:28.357779Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:28.707905Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:28.720701Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:29.071609Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:29.083841Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:29.456833Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:29.469048Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:29.831322Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:29.843848Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:30.200596Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:30.212840Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:30.569180Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:30.582127Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:30.935662Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:30.948088Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:31.309895Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:31.322293Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:31.684687Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:31.697356Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:32.050447Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:32.062500Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:32.426953Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:32.440027Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:32.791659Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:32.807954Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:33.205909Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:33.227323Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:33.581931Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:33.597857Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:33.952560Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:33.964980Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:34.306319Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:34.318731Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:34.670686Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:34.683000Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:35.045210Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:35.059794Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:35.420511Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:35.433005Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:35.783732Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:35.795976Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:36.148923Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:36.161374Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:36.515924Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:36.527921Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:36.912182Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:36.924160Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:37.275821Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:37.288100Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:37.650463Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:37.662834Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:38.016573Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:38.028907Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:38.381417Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:38.394009Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:38.779289Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:38.791455Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:39.149182Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:39.166023Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:39.534502Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:39.546918Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:39.901231Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:39.913677Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:40.267169Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:40.279411Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:40.644408Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:40.657109Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:40.999035Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:41.011018Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:41.363443Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:41.375874Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:41.718350Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:41.730642Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:42.072212Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:42.084516Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:42.439776Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:42.452109Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:42.794490Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:42.806783Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:43.139489Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:43.152854Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:43.506112Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:43.518470Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:43.860525Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:43.872607Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:44.270491Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-03-31T16:32:44.270616Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-03-31T16:32:44.284067Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-03-31T16:32:44.295297Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:472:2479], cookie=6732283968794532814) 2025-03-31T16:32:44.295428Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:472:2479], cookie=6732283968794532814) 2025-03-31T16:32:44.784785Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:44.784904Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:44.805627Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:44.805805Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:44.820490Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:44.826405Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:132:2158], cookie=13667735388066466816, path="Root", config={ MaxUnitsPerSecond: 100 }) 2025-03-31T16:32:44.826666Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-03-31T16:32:44.851483Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:132:2158], cookie=13667735388066466816) 2025-03-31T16:32:44.853530Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:141:2165]. Cookie: 0. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-03-31T16:32:44.853613Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[4:141:2165], cookie=0) 2025-03-31T16:32:44.853940Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:143:2167]. Cookie: 0. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-03-31T16:32:44.853977Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[4:143:2167], cookie=0) 2025-03-31T16:32:44.896787Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [4:143:2167]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 5 StateNotification { Status: SUCCESS } } } 2025-03-31T16:32:44.896886Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [4:141:2165]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 5 StateNotification { Status: SUCCESS } } } 2025-03-31T16:32:44.897135Z node 4 :KESUS_TABLET TRACE: Got TEvServerDisconnected([4:146:2170]) 2025-03-31T16:32:44.897263Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [4:143:2167]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 StateNotification { Status: SESSION_EXPIRED Issues { message: "Disconected." } } } } 2025-03-31T16:32:44.939644Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [4:141:2165]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 10 StateNotification { Status: SUCCESS } } } >> KqpSqlIn::SecondaryIndex_TupleParameter [GOOD] >> KqpSqlIn::SecondaryIndex_TupleLiteral ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::PreparedDistributedWritePageFault [GOOD] Test command err: 2025-03-31T16:32:10.943429Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:32:10.943717Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:32:10.943880Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011f1/r3tmp/tmpLPELOd/pdisk_1.dat 2025-03-31T16:32:11.343290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:32:11.389058Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:32:11.431762Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:11.431917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:11.443422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:32:11.525850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:32:11.558619Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:32:11.559478Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:32:11.559892Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:32:11.560111Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:32:11.593589Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:32:11.594153Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:32:11.594245Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:32:11.595613Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:32:11.595687Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:32:11.595734Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:32:11.596017Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:32:11.596130Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:32:11.596193Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:32:11.606916Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:32:11.635377Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:32:11.635555Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:32:11.635660Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:32:11.635687Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:32:11.635720Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:32:11.635758Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:32:11.635968Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:32:11.636016Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:32:11.636228Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:32:11.636310Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:32:11.636418Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:32:11.636455Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:32:11.636511Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T16:32:11.636550Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:32:11.636578Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:32:11.636609Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:32:11.636645Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:32:11.636740Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:32:11.636767Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:32:11.636813Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:32:11.636951Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-31T16:32:11.636989Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:32:11.637075Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:32:11.637274Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T16:32:11.637325Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:32:11.637416Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:32:11.637451Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:32:11.637478Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T16:32:11.637503Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T16:32:11.637546Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:32:11.637755Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T16:32:11.637791Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T16:32:11.637819Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T16:32:11.637841Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:32:11.637881Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T16:32:11.637914Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T16:32:11.637950Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T16:32:11.637989Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:32:11.638009Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T16:32:11.639562Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T16:32:11.639617Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:32:11.650418Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:32:11.650512Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:32:11.650554Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:32:11.650601Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-31T16:32:11.650677Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:32:11.800499Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:32:11.800559Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:32:11.800595Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T16:32:11.800916Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-31T16:32:11.800959Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:32:11.801089Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:32:11.801132Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-31T16:32:11.801183Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-31T16:32:11.801224Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-31T16:32:11.809008Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:32:11.809084Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:32:11.809685Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:32:11.809717Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:32:11.809757Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:32:1 ... 75186224037888 is Executed 2025-03-31T16:32:44.273792Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit WaitForPlan 2025-03-31T16:32:44.273848Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit PlanQueue 2025-03-31T16:32:44.274061Z node 7 :TX_DATASHARD DEBUG: Planned transaction txId 1234567890011 at step 3500 at tablet 72075186224037888 { Transactions { TxId: 1234567890011 AckTo { RawX1: 0 RawX2: 0 } } Step: 3500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:32:44.274138Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:32:44.274502Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [7:810:2667], Recipient [7:810:2667]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:32:44.274552Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:32:44.274628Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:32:44.274682Z node 7 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:32:44.274727Z node 7 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-03-31T16:32:44.274785Z node 7 :TX_DATASHARD DEBUG: Found ready operation [3500:1234567890011] in PlanQueue unit at 72075186224037888 2025-03-31T16:32:44.274846Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit PlanQueue 2025-03-31T16:32:44.274898Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-03-31T16:32:44.274951Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit PlanQueue 2025-03-31T16:32:44.275013Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit LoadWriteDetails 2025-03-31T16:32:44.275070Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit LoadTxDetails 2025-03-31T16:32:44.275444Z node 7 :TX_DATASHARD TRACE: Parsing write transaction for 1234567890011 at 72075186224037888, record: Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC } TxId: 1234567890011 TxMode: MODE_PREPARE Locks { Op: Commit } 2025-03-31T16:32:44.275545Z node 7 :TX_DATASHARD TRACE: Table /Root/table, shard: 72075186224037888, write point (Int32 : 1) 2025-03-31T16:32:44.275623Z node 7 :TX_DATASHARD TRACE: -- AddWriteRange: (Int32 : 1) table: [72057594046644480:2:1] 2025-03-31T16:32:44.275721Z node 7 :TX_DATASHARD DEBUG: LoadWriteDetails at 72075186224037888 loaded writeOp from db 3500:1234567890011 keys extracted: 1 2025-03-31T16:32:44.275784Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-03-31T16:32:44.275814Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit LoadWriteDetails 2025-03-31T16:32:44.275841Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-31T16:32:44.275870Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-31T16:32:44.275941Z node 7 :TX_DATASHARD TRACE: Operation [3500:1234567890011] is the new logically complete end at 72075186224037888 2025-03-31T16:32:44.275992Z node 7 :TX_DATASHARD TRACE: Operation [3500:1234567890011] is the new logically incomplete end at 72075186224037888 2025-03-31T16:32:44.276042Z node 7 :TX_DATASHARD TRACE: Activated operation [3500:1234567890011] at 72075186224037888 2025-03-31T16:32:44.276088Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-03-31T16:32:44.276113Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-31T16:32:44.276151Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit BuildWriteOutRS 2025-03-31T16:32:44.276187Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit BuildWriteOutRS 2025-03-31T16:32:44.276241Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-03-31T16:32:44.276267Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit BuildWriteOutRS 2025-03-31T16:32:44.276291Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit StoreAndSendWriteOutRS 2025-03-31T16:32:44.276315Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit StoreAndSendWriteOutRS 2025-03-31T16:32:44.276343Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-03-31T16:32:44.276387Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit StoreAndSendWriteOutRS 2025-03-31T16:32:44.276413Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit PrepareWriteTxInRS 2025-03-31T16:32:44.276456Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit PrepareWriteTxInRS 2025-03-31T16:32:44.276530Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-03-31T16:32:44.276555Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit PrepareWriteTxInRS 2025-03-31T16:32:44.276579Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit LoadAndWaitInRS 2025-03-31T16:32:44.276604Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit LoadAndWaitInRS 2025-03-31T16:32:44.276632Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-03-31T16:32:44.276666Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit LoadAndWaitInRS 2025-03-31T16:32:44.276697Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit ExecuteWrite 2025-03-31T16:32:44.276736Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit ExecuteWrite 2025-03-31T16:32:44.276772Z node 7 :TX_DATASHARD DEBUG: Executing write operation for [3500:1234567890011] at 72075186224037888 2025-03-31T16:32:44.277142Z node 7 :TX_DATASHARD TRACE: Tablet 72075186224037888 is not ready for [3500:1234567890011] execution 2025-03-31T16:32:44.277227Z node 7 :TX_DATASHARD DEBUG: tx 1234567890011 at 72075186224037888 released its data 2025-03-31T16:32:44.277283Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Restart 2025-03-31T16:32:44.277321Z node 7 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:32:44.277378Z node 7 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-03-31T16:32:44.277431Z node 7 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:32:44.277498Z node 7 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:32:44.278060Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:32:44.278124Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit ExecuteWrite 2025-03-31T16:32:44.278190Z node 7 :TX_DATASHARD DEBUG: Executing write operation for [3500:1234567890011] at 72075186224037888 2025-03-31T16:32:44.278524Z node 7 :TX_DATASHARD TRACE: Parsing write transaction for 1234567890011 at 72075186224037888, record: Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC } TxId: 1234567890011 TxMode: MODE_PREPARE Locks { Op: Commit } 2025-03-31T16:32:44.278620Z node 7 :TX_DATASHARD TRACE: Table /Root/table, shard: 72075186224037888, write point (Int32 : 1) 2025-03-31T16:32:44.278688Z node 7 :TX_DATASHARD TRACE: -- AddWriteRange: (Int32 : 1) table: [72057594046644480:2:1] 2025-03-31T16:32:44.278786Z node 7 :TX_DATASHARD DEBUG: tx 1234567890011 at 72075186224037888 restored its data 2025-03-31T16:32:44.278972Z node 7 :TX_DATASHARD DEBUG: Executed write operation for [3500:1234567890011] at 72075186224037888, row count=1 2025-03-31T16:32:44.279037Z node 7 :TX_DATASHARD TRACE: Lock 1234567890001 marked broken at v{min} 2025-03-31T16:32:44.279163Z node 7 :TX_DATASHARD TRACE: add locks to result: 0 2025-03-31T16:32:44.279245Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:32:44.279301Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit ExecuteWrite 2025-03-31T16:32:44.279351Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit CompleteWrite 2025-03-31T16:32:44.279397Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit CompleteWrite 2025-03-31T16:32:44.279672Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is DelayComplete 2025-03-31T16:32:44.279714Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit CompleteWrite 2025-03-31T16:32:44.279767Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit CompletedOperations 2025-03-31T16:32:44.279814Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit CompletedOperations 2025-03-31T16:32:44.279853Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-03-31T16:32:44.279891Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit CompletedOperations 2025-03-31T16:32:44.279936Z node 7 :TX_DATASHARD TRACE: Execution plan for [3500:1234567890011] at 72075186224037888 has finished 2025-03-31T16:32:44.279982Z node 7 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:32:44.280029Z node 7 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-03-31T16:32:44.280090Z node 7 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:32:44.280148Z node 7 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:32:44.280635Z node 7 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2025-03-31T16:32:44.281613Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:32:44.281676Z node 7 :TX_DATASHARD TRACE: Complete execution for [3500:1234567890011] at 72075186224037888 on unit CompleteWrite 2025-03-31T16:32:44.281754Z node 7 :TX_DATASHARD DEBUG: Complete write [3500 : 1234567890011] from 72075186224037888 at tablet 72075186224037888 send result to client [7:803:2661] 2025-03-31T16:32:44.281840Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> GenericFederatedQuery::YdbFilterPushdown >> TKesusTest::TestAcquireRepeat [GOOD] >> TKesusTest::TestAcquireDowngrade >> TAsyncIndexTests::SplitIndexWithReboots[TabletReboots] [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-51 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-52 |95.2%| [TA] $(B)/ydb/core/tx/datashard/ut_write/test-results/unittest/{meta.json ... results_accumulator.log} |95.2%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpAgg::AggWithSelfLookup2 [GOOD] >> KqpAgg::AggWithHop >> KqpRanges::UpdateWhereInFullScan-UseSink [GOOD] >> KqpRanges::ScanKeyPrefix >> TKesusTest::TestAcquireDowngrade [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaSessionTimeout >> KqpSort::TopSortResults [GOOD] >> KqpSort::TopParameterFilter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitIndexWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:135:2157] sender: [1:136:2058] recipient: [1:111:2142] 2025-03-31T16:29:39.473704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:29:39.473824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:39.473868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:29:39.473901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:29:39.473955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:29:39.473985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:29:39.474053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:39.474150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:29:39.474436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:29:39.567810Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-31T16:29:39.567897Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:169:2058] recipient: [1:15:2062] 2025-03-31T16:29:39.598918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:29:39.599125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:29:39.599313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:29:39.603982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:29:39.604285Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:29:39.604964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:39.605229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:29:39.610249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:39.611913Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:39.611987Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:39.612108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:29:39.612178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:39.612227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:29:39.612370Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:210:2058] recipient: [1:208:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:210:2058] recipient: [1:208:2209] Leader for TabletID 72057594037968897 is [1:214:2213] sender: [1:215:2058] recipient: [1:208:2209] 2025-03-31T16:29:39.619802Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:235:2058] recipient: [1:15:2062] 2025-03-31T16:29:39.749044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:29:39.749280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:39.749475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:29:39.749686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:29:39.749740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:39.754661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:39.754827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:29:39.755067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:39.755150Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:29:39.755187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:29:39.755219Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:29:39.763964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:39.764046Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:29:39.764085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:29:39.770742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:39.770814Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:39.770853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:39.770904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:29:39.774971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:29:39.780023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:29:39.780294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:135:2157] sender: [1:250:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:29:39.781384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:39.781522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:29:39.781574Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:39.781917Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:29:39.781977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:39.782164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:29:39.782267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:29:39.784636Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:39.784680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:39.784859Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:39.784913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:202:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:29:39.785220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:39.785260Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:29:39.785352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:39.785387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:39.785428Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:39.785476Z no ... CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:32:46.125414Z node 93 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:32:46.125731Z node 93 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 361us result status StatusSuccess 2025-03-31T16:32:46.126715Z node 93 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } Tuple { } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0002\000\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpNewEngine::JoinWithParams >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-57 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-58 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-68 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-69 >> TInterconnectTest::TestManyEvents >> KqpReturning::ReturningWorksIndexedInsert+QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedInsert-QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictReadWriteOlap [FAIL] Test command err: Trying to start YDB, gRPC: 64262, MsgBus: 10845 2025-03-31T16:32:09.553015Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012325395218806:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:09.554067Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001288/r3tmp/tmparQnyz/pdisk_1.dat 2025-03-31T16:32:09.867226Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:32:09.888770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:09.888896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:09.891679Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64262, node 1 2025-03-31T16:32:09.983851Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:09.983876Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:09.983883Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:09.984022Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10845 TClient is connected to server localhost:10845 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:10.471459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:12.215385Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012338280121218:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:12.215504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012338280121211:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:12.215950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:12.219848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:32:12.228863Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012338280121225:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:32:12.330958Z node 1 :TX_PROXY ERROR: Actor# [1:7488012338280121276:2335] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:32:12.638985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:32:12.780938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:32:13.650622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:32:14.544998Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012325395218806:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:14.545161Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 25699, MsgBus: 15986 2025-03-31T16:32:21.072067Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012377581054616:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:21.072143Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001288/r3tmp/tmpwUFROf/pdisk_1.dat 2025-03-31T16:32:21.169939Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25699, node 2 2025-03-31T16:32:21.207238Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:21.207330Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:21.223457Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:32:21.255941Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:21.255964Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:21.255972Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:21.256094Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15986 TClient is connected to server localhost:15986 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:32:21.707978Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:32:24.012044Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012390465957162:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:24.012113Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012390465957148:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:24.012373Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:24.015351Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:32:24.022413Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012390465957177:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:32:24.079076Z node 2 :TX_PROXY ERROR: Actor# [2:7488012390465957228:2336] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:32:24.123685Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:32:24.272617Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7488012390465957413:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:32:24.272625Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7488012390465957415:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:32:24.272852Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7488012390465957415:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:32:24.273169Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7488012390465957415:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:32:24.273293Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7488012390465957415:2345];tablet_i ... 6224038008;self_id=[2:7488012411940799844:3315];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038008;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:32:43.393940Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7488012411940799844:3315];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038008;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:32:43.394681Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[2:7488012411940799807:3308];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038042;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:32:43.394916Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[2:7488012411940799807:3308];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038042;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:32:43.395688Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[2:7488012411940799802:3305];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038021;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:32:43.395898Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[2:7488012411940799802:3305];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038021;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:32:43.398216Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038026;self_id=[2:7488012407645832427:3270];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038026;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:32:43.398446Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038026;self_id=[2:7488012407645832427:3270];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038026;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:32:43.400256Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038049;self_id=[2:7488012407645832303:3227];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038049;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:32:43.400435Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038049;self_id=[2:7488012407645832303:3227];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038049;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:32:43.406951Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[2:7488012411940799934:3320];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038023;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:32:43.407243Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[2:7488012411940799934:3320];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038023;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:32:43.414677Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038060;self_id=[2:7488012407645832462:3289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038060;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:32:43.415008Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038060;self_id=[2:7488012407645832462:3289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038060;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:32:43.415266Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038040;self_id=[2:7488012411940799833:3314];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038040;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:32:43.415451Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038040;self_id=[2:7488012411940799833:3314];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038040;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:32:43.415498Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038024;self_id=[2:7488012407645832472:3294];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038024;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:32:43.415592Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038038;self_id=[2:7488012407645832481:3298];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038038;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:32:43.415645Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038024;self_id=[2:7488012407645832472:3294];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038024;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:32:43.415736Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038038;self_id=[2:7488012407645832481:3298];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038038;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:32:43.416713Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038059;self_id=[2:7488012407645832500:3302];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038059;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:32:43.416944Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038059;self_id=[2:7488012407645832500:3302];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038059;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:32:43.417164Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038045;self_id=[2:7488012407645832460:3288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038045;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:32:43.417323Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038045;self_id=[2:7488012407645832460:3288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038045;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:32:43.417711Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038039;self_id=[2:7488012411940799947:3323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038039;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:32:43.417886Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038039;self_id=[2:7488012411940799947:3323];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038039;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:32:43.418147Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038043;self_id=[2:7488012411940799938:3321];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038043;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:32:43.418311Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038043;self_id=[2:7488012411940799938:3321];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038043;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:32:43.423323Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;self_id=[2:7488012411940799930:3318];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038011;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:32:43.425930Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7488012411940799986:3325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038003;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:32:43.426039Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;self_id=[2:7488012411940799930:3318];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038011;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:32:43.426194Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7488012411940799986:3325];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038003;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:32:43.737757Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037992;self_id=[2:7488012394760926003:2485];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037992;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:32:43.738090Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037992;self_id=[2:7488012394760926003:2485];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037992;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:32:43.763844Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037934;self_id=[2:7488012394760926290:2551];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037934;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:32:43.764144Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037934;self_id=[2:7488012394760926290:2551];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037934;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:146, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (ABORTED != SUCCESS)
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 , with diff: (ABORT|SUCC)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:146: DoExecute @ 0x18DD15F8 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:172: Execute_ @ 0x18DB8CCA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7F2AADD52D8F 18. ??:0: ?? @ 0x7F2AADD52E3F 19. ??:0: ?? @ 0x163B4028 >> DataShardSnapshots::VolatileSnapshotTimeoutRefresh [GOOD] >> DataShardSnapshots::VolatileSnapshotCleanupOnReboot >> KqpLocksTricky::TestNoLocksIssue+withSink [GOOD] >> KqpNewEngine::BrokenLocksAtROTx [GOOD] >> KqpNewEngine::BrokenLocksAtROTxSharded >> KqpRanges::NullInKeySuffix [GOOD] >> KqpRanges::NullInPredicate >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly >> KqpNewEngine::DqSourceCount [GOOD] >> KqpNewEngine::DqSource >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-69 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-70 >> TKesusTest::TestAcquireSemaphoreRebootTimeout [GOOD] >> TKesusTest::TestAcquireSemaphoreViaDecrease >> TInterconnectTest::TestManyEvents [GOOD] >> TInterconnectTest::TestCrossConnect ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocksTricky::TestNoLocksIssue+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 5189, MsgBus: 15430 2025-03-31T16:32:25.716239Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012396470232560:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:25.716300Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001282/r3tmp/tmpT2JWYs/pdisk_1.dat 2025-03-31T16:32:26.031032Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5189, node 1 2025-03-31T16:32:26.074437Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:26.074463Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:26.074478Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:26.074694Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:32:26.090136Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:26.090332Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:26.092052Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15430 TClient is connected to server localhost:15430 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:26.580727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:26.605988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:26.753766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:26.913383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:26.993666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:28.520471Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012409355136239:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:28.520543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:28.847890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:32:28.871059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:32:28.895827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:32:28.918402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:32:28.940975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:32:28.970268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:32:29.007780Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012413650104045:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:29.007853Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:29.007932Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012413650104050:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:29.011177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:32:29.019405Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012413650104052:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:32:29.112544Z node 1 :TX_PROXY ERROR: Actor# [1:7488012413650104107:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:32:30.421942Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976710675; 2025-03-31T16:32:30.432799Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488012417945071745:2496], Table: `/Root/Test` ([72057594046644480:9:1]), SessionActorId: [1:7488012413650104397:2496]Got LOCKS BROKEN for table `/Root/Test`. ShardID=72075186224037914, Sink=[1:7488012417945071745:2496].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-03-31T16:32:30.433506Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488012417945071738:2496], SessionActorId: [1:7488012413650104397:2496], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7488012413650104397:2496]. isRollback=0 2025-03-31T16:32:30.433760Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzhhMGEzYWQtM2NlMTM1ZDMtNDRiYjQzMjktYmM2NDkzODM=, ActorId: [1:7488012413650104397:2496], ActorState: ExecuteState, TraceId: 01jqpgr8pk5xsq865py6srwn71, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7488012417945071826:2496] from: [1:7488012417945071738:2496] 2025-03-31T16:32:30.433859Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7488012417945071826:2496] TxId: 281474976710675. Ctx: { TraceId: 01jqpgr8pk5xsq865py6srwn71, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzhhMGEzYWQtM2NlMTM1ZDMtNDRiYjQzMjktYmM2NDkzODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-03-31T16:32:30.434033Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzhhMGEzYWQtM2NlMTM1ZDMtNDRiYjQzMjktYmM2NDkzODM=, ActorId: [1:7488012413650104397:2496], ActorState: ExecuteState, TraceId: 01jqpgr8pk5xsq865py6srwn71, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 Trying to start YDB, gRPC: 7878, MsgBus: 19087 2025-03-31T16:32:31.157787Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012418811540103:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:31.157882Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001282/r3tmp/tmp295Qfh/pdisk_1.dat 2025-03-31T16:32:31.244880Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7878, node 2 2025-03-31T16:32:31.288909Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:31.288988Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:31.290551Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:32:31.297884Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:31.297908Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:31.297918Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:31.298038Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19087 TClient is connected to server localhost:19087 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty ... 6715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:32:36.017376Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037915;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:32:36.019414Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:32:36.020775Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:32:36.022899Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037943;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:32:36.158035Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488012418811540103:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:36.158101Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:32:36.247209Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715664; 2025-03-31T16:32:36.247214Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715664; 2025-03-31T16:32:36.247873Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715664; 2025-03-31T16:32:36.248007Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715664; 2025-03-31T16:32:36.701173Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjEwZWQ0Y2MtODNhNTdkNjMtZGM4NmM2NjMtYmVmNmYzNDQ=, ActorId: [2:7488012440286380310:2817], ActorState: ExecuteState, TraceId: 01jqpgrervfssjdv8ygvprp1yc, Create QueryResponse for error on request, msg: 2025-03-31T16:32:36.701815Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=281474976715670;tx_id=281474976715670;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715670; 2025-03-31T16:32:36.703020Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037936;self_id=[2:7488012431696443514:2374];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037936;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715665;problem=finished; 2025-03-31T16:32:36.703342Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037936;tx_state=TTxProgressTx::Complete;fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=lock invalidated;tx_id=281474976715670; 2025-03-31T16:32:36.703437Z node 2 :TX_COLUMNSHARD_TX WARN: fline=manager.cpp:134;event=abort;tx_id=281474976715665;problem=finished; Trying to start YDB, gRPC: 61948, MsgBus: 3594 2025-03-31T16:32:40.889192Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:304:2347], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:32:40.889469Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:32:40.889559Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001282/r3tmp/tmpmhhIS9/pdisk_1.dat TServer::EnableGrpc on GrpcPort 61948, node 3 2025-03-31T16:32:41.339929Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:32:41.343018Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:41.343104Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:41.343157Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:41.343610Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:32:41.379421Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:41.379543Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:41.391048Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3594 TClient is connected to server localhost:3594 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:41.703099Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:41.811581Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:42.118436Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:42.540763Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:42.832018Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:43.371342Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1807:3403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:43.371526Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:43.392976Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:32:43.616600Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:32:43.858525Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:32:44.113543Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:32:44.365807Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:32:44.696813Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:32:44.991025Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2396:3856], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:44.991183Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:44.991561Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2401:3861], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:44.998565Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:32:45.159786Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:2403:3863], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:32:45.206482Z node 3 :TX_PROXY ERROR: Actor# [3:2466:3907] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:32:46.184794Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:32:46.438805Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:32:46.774818Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> KqpNotNullColumns::InsertNotNullPkPg+useSink >> KqpNewEngine::PkRangeSelect3 [GOOD] >> KqpNewEngine::PkRangeSelect4 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-69 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-70 >> TKesusTest::TestAcquireSemaphoreViaDecrease [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-21 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-21 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-22 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-45 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-46 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphoreViaDecrease [GOOD] Test command err: 2025-03-31T16:32:11.270949Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:11.271074Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:11.286457Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:11.286586Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:11.312127Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:11.312760Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=11865375920580177407, session=0, seqNo=0) 2025-03-31T16:32:11.312950Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-31T16:32:11.325408Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=11865375920580177407, session=1) 2025-03-31T16:32:11.325794Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=12300919035948730036, session=0, seqNo=0) 2025-03-31T16:32:11.325948Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-03-31T16:32:11.338288Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=12300919035948730036, session=2) 2025-03-31T16:32:11.338863Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[1:141:2165], cookie=10380860524989803764, name="Sem1", limit=1) 2025-03-31T16:32:11.339005Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-03-31T16:32:11.351473Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[1:141:2165], cookie=10380860524989803764) 2025-03-31T16:32:11.351823Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=111, session=1, semaphore="Sem1" count=1) 2025-03-31T16:32:11.352014Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-03-31T16:32:11.352242Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=222, session=2, semaphore="Sem1" count=1) 2025-03-31T16:32:11.364648Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=111) 2025-03-31T16:32:11.364746Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=222) 2025-03-31T16:32:11.365321Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:149:2173], cookie=11795597923891533886, name="Sem1") 2025-03-31T16:32:11.365432Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:149:2173], cookie=11795597923891533886) 2025-03-31T16:32:11.366985Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:152:2176], cookie=17850889538754879508, name="Sem1") 2025-03-31T16:32:11.367068Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:152:2176], cookie=17850889538754879508) 2025-03-31T16:32:11.760617Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:11.773039Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:12.123848Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:12.136167Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:12.487457Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:12.499730Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:12.843397Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:12.855627Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:13.200331Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:13.212558Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:13.554571Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:13.567753Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:13.909980Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:13.921845Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:14.278845Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:14.292494Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:14.637259Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:14.649456Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:15.032335Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:15.044654Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:15.413601Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:15.430215Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:15.785881Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:15.798778Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:16.159547Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:16.175892Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:16.541255Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:16.556155Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:16.954604Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:16.966865Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:17.333172Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:17.345292Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:17.696681Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:17.709145Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:18.060124Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:18.072557Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:18.434812Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:18.448238Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:18.821089Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:18.833058Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:19.197878Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:19.211353Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:19.565699Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:19.579964Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:19.927809Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:19.940102Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:20.297527Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:20.309694Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:20.669848Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:20.681945Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:21.042746Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:21.059703Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:21.425404Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:21.437835Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:21.790525Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:21.807978Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:22.151099Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:22.163417Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:22.558562Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:22.570800Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:22.921645Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:22.933613Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:23.295488Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:23.307786Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:23.659179Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:23.671497Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:24.012759Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:24.025085Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:24.387764Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:24.400284Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:24.753541Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:24.765760Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:25.127862Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:25.140182Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:25.492385Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:25.508985Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:25.862438Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:25.875071Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:26.274595Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:26.286967Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:26.639652Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:26.651900Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:27.008003Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:27.024269Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:27.383268Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:27.400275Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:27.752512Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:27.764669Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:28.170067Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:2 ... 2057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:41.466639Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:41.820522Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:41.832640Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:42.183917Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:42.196164Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:42.572095Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:42.584290Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:42.935433Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:42.947480Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:43.298336Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:43.310695Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:43.661347Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:43.673041Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:44.023194Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:44.035350Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:44.401546Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:44.413532Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:44.768962Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:44.781177Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:45.128587Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:45.141344Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:45.517324Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:45.529784Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:45.885103Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:45.897519Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:46.307699Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:46.320230Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:46.681567Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:46.693671Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:47.057021Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:47.083928Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:47.445285Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:47.457641Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:47.809004Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:47.820996Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:48.175491Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:48.187883Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:48.553028Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:48.565541Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:48.919178Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:48.931131Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:49.285717Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:49.297916Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:49.641424Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:49.653675Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:50.005643Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-03-31T16:32:50.005731Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-03-31T16:32:50.005792Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2025-03-31T16:32:50.018017Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-03-31T16:32:50.028821Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:411:2411], cookie=11727507351046493083, name="Sem1") 2025-03-31T16:32:50.028909Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:411:2411], cookie=11727507351046493083) 2025-03-31T16:32:50.376375Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:50.376492Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:50.398709Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:50.399044Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:50.427968Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:50.428526Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=4442678071371468837, session=0, seqNo=0) 2025-03-31T16:32:50.428686Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-31T16:32:50.440885Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=4442678071371468837, session=1) 2025-03-31T16:32:50.441231Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=5241917251602537786, session=0, seqNo=0) 2025-03-31T16:32:50.441468Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-03-31T16:32:50.453735Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=5241917251602537786, session=2) 2025-03-31T16:32:50.454081Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=5028336539021409776, session=0, seqNo=0) 2025-03-31T16:32:50.454205Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 3 2025-03-31T16:32:50.466584Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=5028336539021409776, session=3) 2025-03-31T16:32:50.467300Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:145:2169], cookie=13723501504890656857, name="Sem1", limit=3) 2025-03-31T16:32:50.467449Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-03-31T16:32:50.481340Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:145:2169], cookie=13723501504890656857) 2025-03-31T16:32:50.481747Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=111, session=1, semaphore="Sem1" count=2) 2025-03-31T16:32:50.481906Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-03-31T16:32:50.482087Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=222, session=2, semaphore="Sem1" count=1) 2025-03-31T16:32:50.482153Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2025-03-31T16:32:50.482241Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=333, session=3, semaphore="Sem1" count=1) 2025-03-31T16:32:50.494736Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=111) 2025-03-31T16:32:50.494828Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=222) 2025-03-31T16:32:50.494862Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=333) 2025-03-31T16:32:50.495559Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:153:2177], cookie=16348014015975095139, name="Sem1") 2025-03-31T16:32:50.495664Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:153:2177], cookie=16348014015975095139) 2025-03-31T16:32:50.496146Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:156:2180], cookie=7560428376389303894, name="Sem1") 2025-03-31T16:32:50.496231Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:156:2180], cookie=7560428376389303894) 2025-03-31T16:32:50.496533Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=444, session=1, semaphore="Sem1" count=1) 2025-03-31T16:32:50.496662Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2025-03-31T16:32:50.510126Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=444) 2025-03-31T16:32:50.510793Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:161:2185], cookie=3135720225172897340, name="Sem1") 2025-03-31T16:32:50.510902Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:161:2185], cookie=3135720225172897340) 2025-03-31T16:32:50.511454Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:164:2188], cookie=5881096221305613443, name="Sem1") 2025-03-31T16:32:50.511537Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:164:2188], cookie=5881096221305613443) 2025-03-31T16:32:50.522043Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:50.522124Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:50.522651Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:50.522885Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:50.561451Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:50.561616Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-03-31T16:32:50.561654Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2025-03-31T16:32:50.561675Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2025-03-31T16:32:50.562052Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:204:2218], cookie=8454611801462059098, name="Sem1") 2025-03-31T16:32:50.562143Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:204:2218], cookie=8454611801462059098) 2025-03-31T16:32:50.562630Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:213:2226], cookie=11528252310922300809, name="Sem1") 2025-03-31T16:32:50.562688Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:213:2226], cookie=11528252310922300809) >> TTabletPipeTest::TestConnectReject >> KqpNewEngine::MultiUsageInnerConnection [GOOD] >> KqpNewEngine::LookupColumns [GOOD] >> KqpRanges::Like [GOOD] >> TTabletResolver::TabletResolvePriority [GOOD] >> TTabletPipeTest::TestConnectReject [GOOD] >> TTabletPipeTest::TestClientDisconnectAfterPipeOpen |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletResolver::TabletResolvePriority [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-52 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-53 >> TResourceBroker::TestQueueWithConfigure >> TTabletPipeTest::TestClientDisconnectAfterPipeOpen [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::MultiUsageInnerConnection [GOOD] Test command err: Trying to start YDB, gRPC: 19968, MsgBus: 22944 2025-03-31T16:32:07.523819Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012318418662808:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:07.523928Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a95/r3tmp/tmpBxBGA0/pdisk_1.dat 2025-03-31T16:32:07.839248Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19968, node 1 2025-03-31T16:32:07.917513Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:07.917683Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:07.919368Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:32:07.923586Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:07.923608Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:07.923614Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:07.923727Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22944 TClient is connected to server localhost:22944 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:08.363225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:08.386633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:08.510902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:08.657071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:08.732129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:10.132977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012331303566475:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:10.133099Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:10.396716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:32:10.421885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:32:10.446388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:32:10.472403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:32:10.513453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:32:10.546518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:32:10.623542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012331303566993:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:10.623628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:10.623685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012331303566998:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:10.627348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:32:10.636672Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012331303567000:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:32:10.722071Z node 1 :TX_PROXY ERROR: Actor# [1:7488012331303567055:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 21213, MsgBus: 15346 2025-03-31T16:32:12.678487Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012337527853646:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:12.678601Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a95/r3tmp/tmpZMCVhh/pdisk_1.dat 2025-03-31T16:32:12.783255Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21213, node 2 2025-03-31T16:32:12.808359Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:12.808483Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:12.810603Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:32:12.831424Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:12.831452Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:12.831461Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:12.831578Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15346 TClient is connected to server localhost:15346 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:13.197426Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:13.207065Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:13.250418Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:13.394903Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:13.472769Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:15.885954Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012350412757320:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:15.886085Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: { ... WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012454506311910:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:39.871528Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:39.924318Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:32:39.958170Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:32:39.990987Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:32:40.023421Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:32:40.057174Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:32:40.094519Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:32:40.147420Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012458801279718:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:40.147559Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:40.147833Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012458801279723:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:40.152027Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:32:40.162851Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488012458801279725:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:32:40.225022Z node 6 :TX_PROXY ERROR: Actor# [6:7488012458801279779:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:32:41.262573Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488012441621408235:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:41.262669Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 10280, MsgBus: 17964 2025-03-31T16:32:43.147773Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488012473493957844:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:43.147851Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a95/r3tmp/tmpHRyGRR/pdisk_1.dat 2025-03-31T16:32:43.270027Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:32:43.297882Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:43.297968Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:43.299454Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10280, node 7 2025-03-31T16:32:43.348691Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:43.348729Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:43.348737Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:43.348887Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17964 TClient is connected to server localhost:17964 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:43.841549Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:43.860347Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:43.941404Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:44.101862Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:44.182845Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:47.293789Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488012490673828818:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:47.293977Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:47.346028Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:32:47.382333Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:32:47.418471Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:32:47.454668Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:32:47.493007Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:32:47.535211Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:32:47.616587Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488012490673829333:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:47.616667Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:47.616836Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488012490673829338:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:47.621578Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:32:47.633273Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7488012490673829340:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:32:47.715904Z node 7 :TX_PROXY ERROR: Actor# [7:7488012490673829395:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:32:48.147921Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7488012473493957844:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:48.148019Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpNewEngine::JoinWithParams [GOOD] >> KqpNewEngine::JoinIdxLookupWithPredicate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::Like [GOOD] Test command err: Trying to start YDB, gRPC: 30572, MsgBus: 18655 2025-03-31T16:32:12.184660Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012339972877413:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:12.184736Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a83/r3tmp/tmpadx87R/pdisk_1.dat 2025-03-31T16:32:12.444346Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30572, node 1 2025-03-31T16:32:12.508257Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:12.508289Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:12.508299Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:12.508471Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:32:12.520798Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:12.520916Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:12.522789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18655 TClient is connected to server localhost:18655 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:12.979948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:12.997378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:13.143211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:13.301274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:13.367623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:15.062439Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012352857781071:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:15.062551Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:15.388991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:32:15.419431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:32:15.447196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:32:15.476157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:32:15.505200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:32:15.578883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:32:15.662608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012352857781593:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:15.662713Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:15.663046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012352857781598:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:15.666902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:32:15.676894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012352857781600:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:32:15.745385Z node 1 :TX_PROXY ERROR: Actor# [1:7488012352857781654:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:32:16.560805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:32:16.762937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:32:16.945746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:32:17.048049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T16:32:17.184663Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012339972877413:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:17.184738Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:32:17.285692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 10674, MsgBus: 7664 2025-03-31T16:32:18.421228Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012364386428025:2209];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:18.421392Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a83/r3tmp/tmpvFKw54/pdisk_1.dat 2025-03-31T16:32:18.497963Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:32:18.519704Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:18.519790Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:18.521784Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10674, node 2 2025-03-31T16:32:18.566989Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:18.567021Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:18.567028Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:18.567160Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7664 TClient is connected to server localhost:7664 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:18.985476Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation ty ... suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:32:41.582472Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:32:41.614264Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:32:41.648891Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:32:41.687468Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:32:41.763926Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:32:41.836703Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488012463569592736:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:41.836832Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:41.836836Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488012463569592741:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:41.839870Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:32:41.849059Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7488012463569592743:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:32:41.950649Z node 5 :TX_PROXY ERROR: Actor# [5:7488012463569592799:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:32:42.928191Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:32:43.110734Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7488012450684688537:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:43.110811Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:32:43.529895Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438763556, txId: 281474976715673] shutting down Trying to start YDB, gRPC: 23707, MsgBus: 6750 2025-03-31T16:32:44.789950Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488012475440565453:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:44.790011Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a83/r3tmp/tmp0uHGpr/pdisk_1.dat 2025-03-31T16:32:44.910741Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:32:44.941398Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:44.941542Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:44.944234Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23707, node 6 2025-03-31T16:32:45.007724Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:45.007752Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:45.007761Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:45.007920Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6750 TClient is connected to server localhost:6750 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:45.613174Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:45.631133Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:45.700377Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:45.892194Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:45.974547Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:48.746271Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012492620436408:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:48.746379Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:48.806972Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:32:48.842915Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:32:48.877096Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:32:48.912624Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:32:48.952636Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:32:49.028773Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:32:49.108387Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012496915404223:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:49.108489Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:49.108508Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012496915404228:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:49.112172Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:32:49.122501Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488012496915404230:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:32:49.215951Z node 6 :TX_PROXY ERROR: Actor# [6:7488012496915404285:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:32:49.790360Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488012475440565453:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:49.790434Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:32:50.315398Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::LookupColumns [GOOD] Test command err: Trying to start YDB, gRPC: 19196, MsgBus: 5988 2025-03-31T16:32:10.656032Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012327982554713:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:10.656083Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a8e/r3tmp/tmpZKXmxD/pdisk_1.dat 2025-03-31T16:32:10.982047Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19196, node 1 2025-03-31T16:32:11.050789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:11.050930Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:11.052909Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:32:11.053597Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:11.053627Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:11.053638Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:11.053818Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5988 TClient is connected to server localhost:5988 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:11.488976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:32:11.511420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:32:11.628612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:11.770187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:11.840055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:13.202778Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012340867458358:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:13.202961Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:13.501835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:32:13.553817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:32:13.580997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:32:13.614444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:32:13.642227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:32:13.704514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:32:13.748568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012340867458872:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:13.748617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:13.748785Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012340867458877:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:13.753410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:32:13.763234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012340867458879:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:32:13.818520Z node 1 :TX_PROXY ERROR: Actor# [1:7488012340867458932:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 17694, MsgBus: 6059 2025-03-31T16:32:15.494105Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012353300541864:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:15.494187Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a8e/r3tmp/tmpj4rAdE/pdisk_1.dat 2025-03-31T16:32:15.644783Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17694, node 2 2025-03-31T16:32:15.665999Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:15.666116Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:15.668412Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:32:15.706552Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:15.706580Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:15.706588Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:15.706726Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6059 TClient is connected to server localhost:6059 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:16.163863Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:16.181414Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:16.238724Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:16.416350Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:16.493388Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:18.919035Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012366185445544:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:18.919165Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:42.000147Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:42.054146Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:32:42.087254Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:32:42.122193Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:32:42.163424Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:32:42.194952Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:32:42.229069Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:32:42.273274Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012469531584107:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:42.273386Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:42.273399Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012469531584112:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:42.277105Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:32:42.292780Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488012469531584114:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:32:42.358158Z node 6 :TX_PROXY ERROR: Actor# [6:7488012469531584167:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:32:43.118001Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488012452351712624:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:43.118103Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 1731, MsgBus: 21505 2025-03-31T16:32:44.571778Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488012477743526798:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:44.571867Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a8e/r3tmp/tmpt0nFf5/pdisk_1.dat 2025-03-31T16:32:44.682013Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:32:44.712365Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:44.712492Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:44.714156Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1731, node 7 2025-03-31T16:32:44.785644Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:44.785669Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:44.785678Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:44.785788Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21505 TClient is connected to server localhost:21505 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:45.379975Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:45.400002Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:45.475043Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:45.662782Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:45.743833Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:48.911281Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488012494923397763:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:48.911429Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:48.967239Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:32:49.038063Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:32:49.073460Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:32:49.112363Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:32:49.149676Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:32:49.221695Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:32:49.303275Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488012499218365581:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:49.303381Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:49.303471Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488012499218365586:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:49.307310Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:32:49.318856Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7488012499218365588:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:32:49.386408Z node 7 :TX_PROXY ERROR: Actor# [7:7488012499218365643:3456] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:32:49.571783Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7488012477743526798:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:49.571875Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpRanges::ScanKeyPrefix [GOOD] >> GenericFederatedQuery::PostgreSQLSelectCount [GOOD] >> TResourceBroker::TestQueueWithConfigure [GOOD] >> TResourceBroker::TestOverusageDifferentResources >> KqpAgg::AggWithHop [GOOD] >> KqpAgg::GroupByLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestClientDisconnectAfterPipeOpen [GOOD] Test command err: 2025-03-31T16:32:52.478671Z node 3 :PIPE_SERVER ERROR: [9437185] NodeDisconnected NodeId# 2 >> KqpNewEngine::DqSourceLocksEffects [GOOD] >> TResourceBroker::TestOverusageDifferentResources [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestOverusageDifferentResources [GOOD] Test command err: 2025-03-31T16:32:53.048397Z node 1 :RESOURCE_BROKER ERROR: Configure result: Success: false Message: "task \'compaction1\' uses unknown queue \'queue_default1\'" 2025-03-31T16:32:53.048633Z node 1 :RESOURCE_BROKER ERROR: Configure result: Success: false Message: "task \'unknown\' is required" 2025-03-31T16:32:53.048795Z node 1 :RESOURCE_BROKER ERROR: Configure result: Success: false Message: "task \'unknown\' uses unknown queue \'queue_default\'" >> GenericFederatedQuery::YdbSelectCount >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-58 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-59 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-69 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-70 >> KqpSort::TopParameterFilter [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::PostgreSQLSelectCount [GOOD] Test command err: Trying to start YDB, gRPC: 19602, MsgBus: 28736 2025-03-31T16:32:34.804429Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012431516087455:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:34.804531Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f22/r3tmp/tmpK85moi/pdisk_1.dat 2025-03-31T16:32:35.090691Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19602, node 1 2025-03-31T16:32:35.167444Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:35.167467Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:35.167496Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:35.167643Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:32:35.180748Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:35.180892Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:35.182847Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28736 TClient is connected to server localhost:28736 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:35.638134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:37.341804Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012444400990010:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:37.342858Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:37.816790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-03-31T16:32:37.955765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012444400990134:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:37.955866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:37.955956Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012444400990139:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:37.960539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-03-31T16:32:37.968240Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012444400990141:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:32:38.038314Z node 1 :TX_PROXY ERROR: Actor# [1:7488012448695957477:2399] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:32:39.079925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:32:39.474214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-03-31T16:32:39.804097Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012431516087455:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:39.804187Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:32:39.892843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:32:40.343677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:32:40.805869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T16:32:41.265842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-03-31T16:32:41.303978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-03-31T16:32:42.978193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710702:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result ... gRPC: 13908, MsgBus: 27940 2025-03-31T16:32:43.958773Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012473781056510:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:43.958871Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f22/r3tmp/tmpREK8xz/pdisk_1.dat 2025-03-31T16:32:44.052299Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13908, node 2 2025-03-31T16:32:44.092804Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:44.092897Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:44.095684Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:32:44.116025Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:44.116055Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:44.116063Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:44.116174Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27940 TClient is connected to server localhost:27940 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:44.583245Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:46.799122Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012486665959055:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:46.799229Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:46.820461Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-03-31T16:32:46.863994Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012486665959176:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:46.864077Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:46.864123Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012486665959181:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:46.867969Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72057594046644480 2025-03-31T16:32:46.880284Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012486665959183:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:32:46.981232Z node 2 :TX_PROXY ERROR: Actor# [2:7488012486665959232:2397] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:32:47.426201Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:32:47.884225Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:1, at schemeshard: 72057594046644480 2025-03-31T16:32:48.308093Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:32:48.759363Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-03-31T16:32:48.959164Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488012473781056510:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:48.959237Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:32:49.216076Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-03-31T16:32:49.657280Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-03-31T16:32:49.689152Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-03-31T16:32:51.535609Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715701:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 >> TTabletCountersAggregator::IntegralPercentileAggregationRegularNoOverflowCheck >> KqpMergeCn::TopSortBy_PK_Uint64_Limit3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::ScanKeyPrefix [GOOD] Test command err: Trying to start YDB, gRPC: 26887, MsgBus: 14284 2025-03-31T16:32:13.974347Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012342992550272:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:13.974498Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a77/r3tmp/tmp8mLawT/pdisk_1.dat 2025-03-31T16:32:14.253690Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26887, node 1 2025-03-31T16:32:14.325864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:14.326043Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:14.330723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:32:14.356589Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:14.356616Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:14.356628Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:14.356764Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14284 TClient is connected to server localhost:14284 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:14.825701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:14.845292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:14.957035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:15.103453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:15.179728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:16.750250Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012355877453964:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:16.750382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:17.055353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:32:17.086999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:32:17.122014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:32:17.192133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:32:17.223219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:32:17.254368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:32:17.298916Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012360172421776:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:17.298998Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:17.299004Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012360172421781:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:17.302468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:32:17.312352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012360172421783:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:32:17.389988Z node 1 :TX_PROXY ERROR: Actor# [1:7488012360172421836:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 16018, MsgBus: 5766 2025-03-31T16:32:19.324977Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012366584928132:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:19.325065Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a77/r3tmp/tmp193ME1/pdisk_1.dat 2025-03-31T16:32:19.435948Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16018, node 2 2025-03-31T16:32:19.459211Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:19.459318Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:19.460918Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:32:19.505983Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:19.506005Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:19.506012Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:19.506122Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5766 TClient is connected to server localhost:5766 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:19.916277Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:19.931924Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:19.980517Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:20.142721Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:20.206455Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:22.221036Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012379469831774:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:22.221126Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: { : Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:43.334134Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488012469803745132:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:43.334164Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:43.339164Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:32:43.351864Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7488012469803745134:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:32:43.415290Z node 5 :TX_PROXY ERROR: Actor# [5:7488012469803745189:3452] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:32:44.232802Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7488012452623873632:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:44.232878Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:32:44.524863Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:32:44.805746Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:32:45.050233Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-03-31T16:32:45.222490Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-03-31T16:32:45.584467Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:1:44: Warning: At function: Coalesce
:1:58: Warning: At function: SqlIn
:1:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 61425, MsgBus: 21821 2025-03-31T16:32:46.928161Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488012485522185057:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:46.928280Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a77/r3tmp/tmpLKemCA/pdisk_1.dat 2025-03-31T16:32:47.083410Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:32:47.116614Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:47.116738Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:47.118124Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61425, node 6 2025-03-31T16:32:47.177212Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:47.177238Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:47.177247Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:47.177401Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21821 TClient is connected to server localhost:21821 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:47.775614Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:47.793378Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:47.885232Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:48.058983Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:48.134940Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:50.571277Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012502702056027:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:50.571420Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:50.629196Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:32:50.662473Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:32:50.697995Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:32:50.736900Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:32:50.775914Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:32:50.823470Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:32:50.879896Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012502702056539:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:50.880073Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:50.880453Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012502702056544:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:50.903035Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:32:50.919896Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488012502702056546:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:32:51.013379Z node 6 :TX_PROXY ERROR: Actor# [6:7488012506997023898:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:32:51.928211Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488012485522185057:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:51.928303Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TTabletPipeTest::TestTwoNodesAndRebootOfProducer >> KqpNotNullColumns::InsertNotNullPkPg+useSink [GOOD] >> KqpNotNullColumns::InsertNotNullPkPg-useSink >> TTabletCountersAggregator::IntegralPercentileAggregationRegularNoOverflowCheck [GOOD] >> TTabletCountersPercentile::SingleBucket [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::DqSourceLocksEffects [GOOD] Test command err: Trying to start YDB, gRPC: 25779, MsgBus: 14915 2025-03-31T16:32:08.113051Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012323139511979:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:08.113190Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a94/r3tmp/tmpLERLdj/pdisk_1.dat 2025-03-31T16:32:08.405207Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25779, node 1 2025-03-31T16:32:08.461737Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:08.461843Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:08.463858Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:32:08.482530Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:08.482547Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:08.482557Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:08.482673Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14915 TClient is connected to server localhost:14915 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:08.921466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:08.943965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:09.049836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:09.203330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:09.267259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:11.026598Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012336024415648:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:11.026715Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:11.364513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:32:11.391266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:32:11.417425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:32:11.442789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:32:11.470755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:32:11.503533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:32:11.543009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012336024416158:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:11.543128Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012336024416163:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:11.543155Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:11.546583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:32:11.556017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012336024416165:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:32:11.638278Z node 1 :TX_PROXY ERROR: Actor# [1:7488012336024416220:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 16763, MsgBus: 5978 2025-03-31T16:32:13.493616Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012343128455678:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:13.493713Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a94/r3tmp/tmplglY7v/pdisk_1.dat 2025-03-31T16:32:13.597568Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16763, node 2 2025-03-31T16:32:13.631924Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:13.632020Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:13.633070Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:32:13.684164Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:13.684185Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:13.684194Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:13.684321Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5978 TClient is connected to server localhost:5978 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:14.032806Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:14.050428Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:14.128582Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:32:14.273404Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:32:14.346570Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:16.416669Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012356013359348:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:16.416754Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: { : Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:32:43.622647Z node 6 :TX_PROXY ERROR: Actor# [6:7488012473590411803:3454] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:32:44.317412Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488012456410540246:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:44.317506Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 20630, MsgBus: 7524 2025-03-31T16:32:45.934036Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488012479517259660:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:45.934112Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a94/r3tmp/tmpdX6myB/pdisk_1.dat 2025-03-31T16:32:46.084117Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:32:46.127583Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:46.127698Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:46.129384Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20630, node 7 2025-03-31T16:32:46.180392Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:46.180412Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:46.180423Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:46.180591Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7524 TClient is connected to server localhost:7524 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:46.785093Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:46.803397Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:46.883449Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:47.081493Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:47.171433Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:50.326349Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488012500992097920:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:50.326481Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:50.385418Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:32:50.425832Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:32:50.466343Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:32:50.502869Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:32:50.537296Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:32:50.579970Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:32:50.663992Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488012500992098437:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:50.664121Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:50.664335Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488012500992098442:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:50.668355Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:32:50.680941Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7488012500992098445:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:32:50.738134Z node 7 :TX_PROXY ERROR: Actor# [7:7488012500992098497:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:32:50.934163Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7488012479517259660:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:50.934262Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:32:52.353736Z node 7 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=5; 2025-03-31T16:32:52.353989Z node 7 :TX_DATASHARD ERROR: Prepare transaction failed. txid 5 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-03-31T16:32:52.354157Z node 7 :TX_DATASHARD ERROR: Errors while proposing transaction txid 5 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-03-31T16:32:52.354359Z node 7 :KQP_COMPUTE ERROR: SelfId: [7:7488012509582033523:2498], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [7:7488012505287066097:2498]Got LOCKS BROKEN for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[7:7488012509582033523:2498].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-03-31T16:32:52.355066Z node 7 :KQP_COMPUTE ERROR: SelfId: [7:7488012509582033515:2498], SessionActorId: [7:7488012505287066097:2498], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TwoShard`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[7:7488012505287066097:2498]. isRollback=0 2025-03-31T16:32:52.355416Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=YjU4MTQ3YTktNjVhMWJjMmUtZTRkZTdiYzItMjA0NDg2Yg==, ActorId: [7:7488012505287066097:2498], ActorState: ExecuteState, TraceId: 01jqpgry1yf7t6w7s4p5ghn5nv, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [7:7488012509582033516:2498] from: [7:7488012509582033515:2498] 2025-03-31T16:32:52.355537Z node 7 :KQP_EXECUTER ERROR: ActorId: [7:7488012509582033516:2498] TxId: 281474976715673. Ctx: { TraceId: 01jqpgry1yf7t6w7s4p5ghn5nv, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YjU4MTQ3YTktNjVhMWJjMmUtZTRkZTdiYzItMjA0NDg2Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/TwoShard`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-03-31T16:32:52.356823Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=YjU4MTQ3YTktNjVhMWJjMmUtZTRkZTdiYzItMjA0NDg2Yg==, ActorId: [7:7488012505287066097:2498], ActorState: ExecuteState, TraceId: 01jqpgry1yf7t6w7s4p5ghn5nv, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/TwoShard`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 >> GenericFederatedQuery::YdbFilterPushdown [GOOD] >> KqpNotNullColumns::UpsertNotNullPk |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersPercentile::SingleBucket [GOOD] >> KqpSqlIn::SecondaryIndex_TupleLiteral [GOOD] >> KqpSqlIn::SecondaryIndex_TupleSelect >> TFlatMetrics::MaximumValue1 [GOOD] >> TFlatMetrics::MaximumValue2 [GOOD] >> TTabletPipeTest::TestTwoNodesAndRebootOfProducer [GOOD] >> KqpNewEngine::BrokenLocksAtROTxSharded [GOOD] >> KqpNewEngine::BrokenLocksOnUpdate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSort::TopParameterFilter [GOOD] Test command err: Trying to start YDB, gRPC: 1541, MsgBus: 20793 2025-03-31T16:32:15.885399Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012352092302960:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:15.885496Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a71/r3tmp/tmp1LTWxX/pdisk_1.dat 2025-03-31T16:32:16.234830Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:32:16.273757Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:16.273886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:16.276323Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1541, node 1 2025-03-31T16:32:16.334797Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:16.334836Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:16.334844Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:16.335011Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20793 TClient is connected to server localhost:20793 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:16.840299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:16.864970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:16.999847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:17.161347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:17.232818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:18.817037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012364977206631:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:18.817140Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:19.115033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:32:19.147413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:32:19.172451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:32:19.201696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:32:19.231601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:32:19.265490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:32:19.318189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012369272174440:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:19.318282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:19.318491Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012369272174445:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:19.322068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:32:19.332448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012369272174447:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:32:19.425064Z node 1 :TX_PROXY ERROR: Actor# [1:7488012369272174501:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 28696, MsgBus: 18375 2025-03-31T16:32:21.201142Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012379098295593:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:21.201206Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a71/r3tmp/tmpI3esWw/pdisk_1.dat 2025-03-31T16:32:21.292115Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:32:21.310687Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:21.310752Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:21.312201Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28696, node 2 2025-03-31T16:32:21.366642Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:21.366660Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:21.366669Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:21.366776Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18375 TClient is connected to server localhost:18375 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:21.762666Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:21.768823Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:32:21.774049Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:21.839690Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:22.002664Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:22.072305Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:23.962764Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012387688231925:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31 ... eOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:32:41.882628Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488012465224794413:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:41.882732Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488012465224794418:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:41.882750Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:41.886801Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:32:41.896365Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7488012465224794420:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:32:41.950531Z node 5 :TX_PROXY ERROR: Actor# [5:7488012465224794473:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:32:42.981741Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:32:43.130103Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7488012452339890241:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:43.130186Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 23223, MsgBus: 23629 2025-03-31T16:32:47.494499Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488012487769804514:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:47.494600Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a71/r3tmp/tmpNSsc3h/pdisk_1.dat 2025-03-31T16:32:47.615925Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:32:47.651039Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:47.651177Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:47.652814Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23223, node 6 2025-03-31T16:32:47.702658Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:47.702686Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:47.702698Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:47.702871Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23629 TClient is connected to server localhost:23629 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:48.284284Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:48.301809Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:48.373194Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:48.587413Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:48.671534Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:51.462419Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012504949675474:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:51.462542Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:51.543825Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:32:51.587800Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:32:51.623911Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:32:51.654523Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:32:51.686200Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:32:51.722957Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:32:51.770307Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012504949675988:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:51.770418Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:51.770467Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012504949675993:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:51.774725Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:32:51.784435Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488012504949675995:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:32:51.878837Z node 6 :TX_PROXY ERROR: Actor# [6:7488012504949676049:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:32:52.494587Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488012487769804514:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:52.494664Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ( (declare $limit (DataType 'Uint64)) (declare $value (DataType 'Int32)) (let $1 (KqpTable '"/Root/TwoShard" '"72057594046644480:2" '"" '1)) (let $2 '('"Key" '"Value1" '"Value2")) (let $3 (KqpRowsSourceSettings $1 $2 '() (Void) '())) (let $4 (DataType 'Int32)) (let $5 (Min (Uint64 '"1001") $limit)) (let $6 (StructType '('"Key" (OptionalType (DataType 'Uint32))) '('"Value1" (OptionalType (DataType 'String))) '('"Value2" (OptionalType $4)))) (let $7 '('('"_logical_id" '497) '('"_id" '"99def84b-afb1ef76-7f8032d7-8ac7bda0") '('"_wide_channels" $6))) (let $8 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $3)) (lambda '($12) (block '( (let $13 (lambda '($16) (block '( (let $17 (Member $16 '"Value2")) (return (Member $16 '"Key") (Member $16 '"Value1") $17 (Coalesce (!= $17 $value) (Bool 'false))) )))) (let $14 (WideFilter (ExpandMap (ToFlow $12) $13) (lambda '($18 $19 $20 $21) $21) $5)) (let $15 (lambda '($22 $23 $24 $25) $22 $23 $24)) (return (FromFlow (WideMap $14 $15))) ))) $7)) (let $9 (DqCnUnionAll (TDqOutput $8 '"0"))) (let $10 (DqPhyStage '($9) (lambda '($26) (FromFlow (NarrowMap (Take (ToFlow $26) $5) (lambda '($27 $28 $29) (AsStruct '('"Key" $27) '('"Value1" $28) '('"Value2" $29)))))) '('('"_logical_id" '510) '('"_id" '"ac33261d-338d4e61-139be359-e0bab68d")))) (let $11 (DqCnResult (TDqOutput $10 '"0") '())) (return (KqpPhysicalQuery '((KqpPhysicalTx '($8 $10) '($11) '('('"$limit") '('"$value")) '('('"type" '"data")))) '((KqpTxResultBinding (ListType $6) '"0" '"0")) '('('"type" '"data_query")))) ) |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestTwoNodesAndRebootOfProducer [GOOD] |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::MaximumValue2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-70 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-71 >> TResourceBrokerConfig::UpdateTasks [GOOD] >> TResourceBrokerInstant::Test >> TAsyncIndexTests::SplitMainWithReboots[TabletReboots] [GOOD] >> KqpNewEngine::PkRangeSelect4 [GOOD] >> KqpNewEngine::PrecomputeKey ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::YdbFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 16589, MsgBus: 18015 2025-03-31T16:32:46.326964Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012483001930044:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:46.327101Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f1c/r3tmp/tmpBga9PQ/pdisk_1.dat 2025-03-31T16:32:46.660612Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16589, node 1 2025-03-31T16:32:46.713796Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:46.713826Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:46.713834Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:46.713973Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:32:46.722576Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:46.722702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:46.724446Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18015 TClient is connected to server localhost:18015 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:47.228503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:49.283502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012495886832598:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:49.283643Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:49.505986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-03-31T16:32:49.626199Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012495886832721:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:49.626296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:49.626416Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012495886832726:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:49.629314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-03-31T16:32:49.637128Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012495886832728:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:32:49.709934Z node 1 :TX_PROXY ERROR: Actor# [1:7488012495886832768:2398] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:32:50.324106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:32:50.730893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-03-31T16:32:51.156310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:32:51.326843Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012483001930044:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:51.326925Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:32:51.625923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:32:51.997941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T16:32:52.359705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-03-31T16:32:52.392512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-03-31T16:32:53.953872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710702:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } columns { name: "data_column" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 >> DataShardSnapshots::VolatileSnapshotCleanupOnReboot [GOOD] >> DataShardSnapshots::VolatileSnapshotCleanupOnFinish >> KqpJoinOrder::Chain65Nodes [GOOD] >> TKesusTest::TestAcquireTimeoutAfterReboot [GOOD] >> TKesusTest::TestAcquireSemaphoreViaRelease >> AnalyzeColumnshard::AnalyzeRebootSaInAggregate >> KqpSinkMvcc::OltpNamedStatementNoSink [GOOD] >> KqpSinkMvcc::OltpNamedStatement >> KqpNewEngine::DqSource [GOOD] >> KqpNewEngine::DqSourceLimit >> KqpRanges::NullInPredicate [GOOD] >> KqpRanges::NullInPredicateRow >> TResourceBrokerInstant::Test [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2-withSink >> TKesusTest::TestAcquireSemaphoreViaRelease [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-46 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-47 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-70 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-71 |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBrokerInstant::Test [GOOD] >> GenericFederatedQuery::PostgreSQLOnPremSelectAll >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-23 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitMainWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:135:2157] sender: [1:136:2058] recipient: [1:111:2142] 2025-03-31T16:29:57.512990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:29:57.513072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:57.513113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:29:57.513144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:29:57.513212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:29:57.513242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:29:57.513332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:57.513437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:29:57.513716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:29:57.594996Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-31T16:29:57.595116Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:169:2058] recipient: [1:15:2062] 2025-03-31T16:29:57.613320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:29:57.613462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:29:57.613621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:29:57.618476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:29:57.618737Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:29:57.619451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:57.619684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:29:57.624343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:57.625937Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:57.626004Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:57.626122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:29:57.626169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:57.626215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:29:57.626365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:210:2058] recipient: [1:208:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:210:2058] recipient: [1:208:2209] Leader for TabletID 72057594037968897 is [1:214:2213] sender: [1:215:2058] recipient: [1:208:2209] 2025-03-31T16:29:57.633022Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:235:2058] recipient: [1:15:2062] 2025-03-31T16:29:57.772679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:29:57.772917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:57.773128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:29:57.773353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:29:57.773409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:57.775535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:57.775681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:29:57.775862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:57.775944Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:29:57.775977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:29:57.776008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:29:57.777883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:57.777948Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:29:57.777991Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:29:57.779741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:57.779782Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:57.779817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:57.779862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:29:57.783435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:29:57.785195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:29:57.785360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:135:2157] sender: [1:250:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:29:57.786328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:57.786444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:29:57.786508Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:57.786767Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:29:57.786821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:57.786988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:29:57.787096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:29:57.788960Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:57.788999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:57.789138Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:57.789174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:202:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:29:57.789470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:57.789511Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:29:57.789593Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:57.789629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:57.789665Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:57.789713Z no ... 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:32:55.585309Z node 93 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:32:55.585620Z node 93 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 347us result status StatusSuccess 2025-03-31T16:32:55.586518Z node 93 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:32:55.597712Z node 93 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][93:804:2622] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-03-31T16:32:55.597823Z node 93 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][93:727:2622] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-03-31T16:32:55.597987Z node 93 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][93:804:2622] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1743438775567011 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1743438775567011 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1743438775567011 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-03-31T16:32:55.600499Z node 93 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][93:804:2622] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-03-31T16:32:55.600598Z node 93 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][93:727:2622] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> KqpReturning::ReturningWorksIndexedInsert-QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedReplace+QueryService >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-22 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-23 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphoreViaRelease [GOOD] Test command err: 2025-03-31T16:32:09.520807Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:09.520980Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:09.547746Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:09.547847Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:09.574090Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:09.575143Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=6992947707315685581, session=0, seqNo=0) 2025-03-31T16:32:09.575380Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-31T16:32:09.588185Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=6992947707315685581, session=1) 2025-03-31T16:32:09.590019Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=111, session=1, semaphore="Lock1" count=1) 2025-03-31T16:32:09.591340Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-03-31T16:32:09.591466Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-03-31T16:32:09.603575Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=111) 2025-03-31T16:32:09.603872Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=222, session=1, semaphore="Lock1" count=18446744073709551615) 2025-03-31T16:32:09.616985Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=222) 2025-03-31T16:32:09.617598Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:146:2170], cookie=16397735763853829776, name="Lock1") 2025-03-31T16:32:09.618691Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:146:2170], cookie=16397735763853829776) 2025-03-31T16:32:10.045222Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:10.045316Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:10.062977Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:10.063117Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:10.078777Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:10.079639Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:130:2156], cookie=10776825160425651896, session=0, seqNo=0) 2025-03-31T16:32:10.079804Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-31T16:32:10.102219Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:130:2156], cookie=10776825160425651896, session=1) 2025-03-31T16:32:10.102488Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:131:2157], cookie=18091615210454732630, session=0, seqNo=0) 2025-03-31T16:32:10.102596Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-03-31T16:32:10.114777Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:131:2157], cookie=18091615210454732630, session=2) 2025-03-31T16:32:10.115942Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:130:2156], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-03-31T16:32:10.116098Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-03-31T16:32:10.116188Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-03-31T16:32:10.129347Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:130:2156], cookie=111) 2025-03-31T16:32:10.129737Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:130:2156], cookie=112, session=1, semaphore="Lock2" count=1) 2025-03-31T16:32:10.129893Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-03-31T16:32:10.129981Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-03-31T16:32:10.142616Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:130:2156], cookie=112) 2025-03-31T16:32:10.143062Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:131:2157], cookie=222, session=2, semaphore="Lock1" count=1) 2025-03-31T16:32:10.143334Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:131:2157], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2025-03-31T16:32:10.159835Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:131:2157], cookie=222) 2025-03-31T16:32:10.159907Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:131:2157], cookie=223) 2025-03-31T16:32:10.160269Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:131:2157], cookie=333, session=2, semaphore="Lock1" count=1) 2025-03-31T16:32:10.160601Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:131:2157], cookie=334, session=2, semaphore="Lock2" count=18446744073709551615) 2025-03-31T16:32:10.173669Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:131:2157], cookie=333) 2025-03-31T16:32:10.173744Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:131:2157], cookie=334) 2025-03-31T16:32:10.570968Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:10.584584Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:10.925553Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:10.937870Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:11.289435Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:11.301169Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:11.641454Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:11.653611Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:12.004151Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:12.016301Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:12.360575Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:12.372887Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:12.704362Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:12.716385Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:13.055778Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:13.067918Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:13.407350Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:13.419358Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:13.780912Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:13.793069Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:14.166690Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:14.178854Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:14.536094Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:14.549011Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:14.899015Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:14.910797Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:15.259947Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:15.273682Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:15.648152Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:15.663890Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:16.027502Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:16.044071Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:16.411893Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:16.431100Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:16.799168Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:16.818977Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:17.183050Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:17.195166Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:17.557912Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:17.570101Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:17.931301Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:17.943287Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:18.294467Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:18.307793Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:18.665019Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:18.680703Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:19.033399Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:19.046370Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:19.413819Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:19.426229Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:19.779361Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:19.791621Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:20.158145Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:20.171901Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:20.522631Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:20.534659Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:20.885371Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:20.897527Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:21.246293Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreTimeout::Execute (session=2, semaphore=2) 2025-03-31T16:32:21.246502Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 2 "Lock2" waiter link 2025-03-31T16:32:21.259895Z node 2 :KESUS_TABLET DEBUG: ... te 2025-03-31T16:32:46.439968Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:46.803693Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:46.815941Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:47.182189Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:47.194310Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:47.545682Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:47.557915Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:47.901137Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:47.914257Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:48.266713Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:48.278862Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:48.652700Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:48.664965Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:49.026142Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:49.038407Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:49.391057Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:49.403275Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:49.745632Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:49.757926Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:50.109697Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:50.122016Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:50.498052Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:50.509988Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:50.877311Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:50.892034Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:51.247115Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:51.261780Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:51.631468Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:51.643899Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:51.996793Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:52.009089Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:52.362905Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:52.375136Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:52.726770Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:52.738941Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:53.092670Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:53.105160Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:53.437397Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:53.449638Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:53.791793Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:53.804041Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:54.146114Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:54.158408Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:54.503479Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:54.515688Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:54.877364Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:54.889946Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:55.224608Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:55.237140Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:55.581692Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:55.593836Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:56.031122Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreTimeout::Execute (session=2, semaphore=1) 2025-03-31T16:32:56.031230Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2025-03-31T16:32:56.043470Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreTimeout::Complete (session=2, semaphore=1) 2025-03-31T16:32:56.064879Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:537:2533], cookie=12829176635489890784) 2025-03-31T16:32:56.065017Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:537:2533], cookie=12829176635489890784) 2025-03-31T16:32:56.065653Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:540:2536], cookie=12008934105066089520) 2025-03-31T16:32:56.065735Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:540:2536], cookie=12008934105066089520) 2025-03-31T16:32:56.066359Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:543:2539], cookie=7892438063532461262, name="Lock1") 2025-03-31T16:32:56.066447Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:543:2539], cookie=7892438063532461262) 2025-03-31T16:32:56.067111Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:546:2542], cookie=9300146407913581589, name="Lock1") 2025-03-31T16:32:56.067200Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:546:2542], cookie=9300146407913581589) 2025-03-31T16:32:56.485621Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:56.485739Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:56.507864Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:56.508229Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:56.533237Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:56.533833Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=353248909303630861, session=0, seqNo=0) 2025-03-31T16:32:56.534006Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-31T16:32:56.546485Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=353248909303630861, session=1) 2025-03-31T16:32:56.546841Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=8025044646088169399, session=0, seqNo=0) 2025-03-31T16:32:56.546980Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-03-31T16:32:56.559595Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=8025044646088169399, session=2) 2025-03-31T16:32:56.559969Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=4301588720079285139, session=0, seqNo=0) 2025-03-31T16:32:56.560133Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 3 2025-03-31T16:32:56.573478Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=4301588720079285139, session=3) 2025-03-31T16:32:56.574201Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:145:2169], cookie=6914324868601542047, name="Sem1", limit=3) 2025-03-31T16:32:56.574380Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-03-31T16:32:56.586668Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:145:2169], cookie=6914324868601542047) 2025-03-31T16:32:56.587008Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=111, session=1, semaphore="Sem1" count=2) 2025-03-31T16:32:56.587193Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-03-31T16:32:56.587388Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=222, session=2, semaphore="Sem1" count=2) 2025-03-31T16:32:56.587600Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=333, session=3, semaphore="Sem1" count=1) 2025-03-31T16:32:56.599976Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=111) 2025-03-31T16:32:56.600071Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=222) 2025-03-31T16:32:56.600110Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=333) 2025-03-31T16:32:56.600821Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:154:2178], cookie=1666868695451491285, name="Sem1") 2025-03-31T16:32:56.600919Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:154:2178], cookie=1666868695451491285) 2025-03-31T16:32:56.601427Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:157:2181], cookie=4790844661765572406, name="Sem1") 2025-03-31T16:32:56.601528Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:157:2181], cookie=4790844661765572406) 2025-03-31T16:32:56.601808Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:132:2158], cookie=444, name="Sem1") 2025-03-31T16:32:56.601918Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2025-03-31T16:32:56.601993Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2025-03-31T16:32:56.602053Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2025-03-31T16:32:56.614467Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:132:2158], cookie=444) 2025-03-31T16:32:56.615296Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:162:2186], cookie=15718424148594778237, name="Sem1") 2025-03-31T16:32:56.615403Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:162:2186], cookie=15718424148594778237) 2025-03-31T16:32:56.615977Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:165:2189], cookie=13035846546052863657, name="Sem1") 2025-03-31T16:32:56.616059Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:165:2189], cookie=13035846546052863657) >> KqpRanges::IsNull >> TPipeTrackerTest::TestSimpleAdd [GOOD] >> TResourceBroker::TestAutoTaskId >> TResourceBroker::TestRealUsage >> KqpNotNullColumns::CreateTableWithDisabledNotNullDataColumns >> TResourceBroker::TestAutoTaskId [GOOD] >> TResourceBroker::TestRealUsage [GOOD] >> TResourceBroker::TestRandomQueue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::Chain65Nodes [GOOD] Test command err: Trying to start YDB, gRPC: 3457, MsgBus: 26915 2025-03-31T16:28:30.387434Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011385509985131:2195];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:30.435143Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0018d0/r3tmp/tmpDJBgGh/pdisk_1.dat 2025-03-31T16:28:31.245868Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:28:31.264833Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:28:31.264913Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:28:31.271757Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3457, node 1 2025-03-31T16:28:31.546195Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:28:31.546219Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:28:31.546226Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:28:31.546324Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26915 TClient is connected to server localhost:26915 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:28:32.566588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:28:32.611915Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:28:35.097301Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011406984822142:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:35.097414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:35.381470Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488011385509985131:2195];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:28:35.398803Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:28:35.441520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:28:35.647434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011406984822247:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:35.647494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:35.660614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:28:35.747441Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011406984822325:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:35.747534Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:35.760145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:28:35.819469Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011406984822400:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:35.819550Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:35.840640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:28:35.903805Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011406984822476:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:35.903886Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:35.924929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:28:35.987528Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011406984822555:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:35.987599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:36.004716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:28:36.092231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011411279789930:2388], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:36.092292Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:36.104897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:28:36.164272Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011411279790007:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:36.164332Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:36.176537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:28:36.247269Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011411279790085:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:36.247357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:36.264932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:28:36.319306Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011411279790164:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:36.319382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:36.328789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:28:36.419098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011411279790247:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:36.419212Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access perm ... ult, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:39.957151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710710:0, at schemeshard: 72057594046644480 2025-03-31T16:28:40.011834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011428459662918:2822], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:40.011916Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:40.024967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710711:0, at schemeshard: 72057594046644480 2025-03-31T16:28:40.077757Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011428459662999:2832], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:40.077829Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:40.095122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710712:0, at schemeshard: 72057594046644480 2025-03-31T16:28:40.163452Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011428459663080:2841], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:40.163514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:40.171998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710713:0, at schemeshard: 72057594046644480 2025-03-31T16:28:40.231145Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011428459663159:2850], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:40.231222Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:40.242332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710714:0, at schemeshard: 72057594046644480 2025-03-31T16:28:40.299889Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011428459663240:2859], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:40.299952Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:40.306664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710715:0, at schemeshard: 72057594046644480 2025-03-31T16:28:40.411503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011428459663326:2868], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:40.411570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:40.426033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710716:0, at schemeshard: 72057594046644480 2025-03-31T16:28:40.492291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011428459663409:2878], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:40.492361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:40.505004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710717:0, at schemeshard: 72057594046644480 2025-03-31T16:28:40.563449Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011428459663492:2888], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:40.563510Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:40.571316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710718:0, at schemeshard: 72057594046644480 2025-03-31T16:28:40.699546Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011428459663577:2897], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:40.699621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:40.715036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710719:0, at schemeshard: 72057594046644480 2025-03-31T16:28:40.771326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011428459663657:2906], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:40.771393Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:40.781456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710720:0, at schemeshard: 72057594046644480 2025-03-31T16:28:40.851626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011428459663737:2915], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:40.851712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:40.864723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710721:0, at schemeshard: 72057594046644480 2025-03-31T16:28:40.927258Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011428459663819:2924], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:40.927343Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:40.945476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710722:0, at schemeshard: 72057594046644480 2025-03-31T16:28:41.026384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011432754631198:2933], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:41.026447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:41.026651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011432754631203:2936], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:28:41.030252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710723:3, at schemeshard: 72057594046644480 2025-03-31T16:28:41.043331Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488011432754631205:2937], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710723 completed, doublechecking } 2025-03-31T16:28:41.131326Z node 1 :TX_PROXY ERROR: Actor# [1:7488011432754631265:5804] txid# 281474976710724, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 70], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:28:46.242719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:28:46.242749Z node 1 :IMPORT WARN: Table profiles were not loaded
: Warning: Execution, code: 1060
: Warning: Cost Based Optimizer could not be applied to this query: Enumeration is too large, use PRAGMA MaxDPHypDPTableSize='4294967295' to disable the limitation, code: 8000 >> TResourceBroker::TestRandomQueue [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationRegularCheckSingleTablet |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestAutoTaskId [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaSessionTimeout [GOOD] >> TKesusTest::TestAcquireSemaphore >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly-StrictAclCheck >> TResourceBrokerConfig::UpdateQueues [GOOD] >> TResourceBrokerConfig::UpdateResourceLimit [GOOD] >> KqpNewEngine::JoinIdxLookupWithPredicate [GOOD] >> KqpNewEngine::JoinPure >> TResourceBroker::TestErrors >> TTabletCountersAggregator::IntegralPercentileAggregationRegularCheckSingleTablet [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationRegular >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-53 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestRandomQueue [GOOD] Test command err: 2025-03-31T16:32:58.100905Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-1 (1 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.100983Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-1 (1 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.101043Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-2 (2 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.101085Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-3 (3 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.101124Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-4 (4 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.101160Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-5 (5 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.101231Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-8 (8 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.101279Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-9 (9 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.101338Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-10 (10 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.101389Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-11 (11 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.101571Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-19 (19 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.101755Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-29 (29 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.101796Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-30 (30 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.101878Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-34 (34 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.101929Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-36 (36 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.101982Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-37 (37 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.102234Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-49 (49 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.102292Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-51 (51 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.102359Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-53 (53 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.102464Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-58 (58 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.102532Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-60 (60 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.102618Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-64 (64 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.102848Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-74 (74 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.102908Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-76 (76 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.102958Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-78 (78 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.103026Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-81 (81 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.103221Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-84 (84 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.103427Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-93 (93 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.103466Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-94 (94 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.103629Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-101 (101 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.103728Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-106 (106 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.103834Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-111 (111 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.103986Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-120 (120 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.104035Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-122 (122 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.104192Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-129 (129 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.104303Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-134 (134 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.104337Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-135 (135 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.104525Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-145 (145 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.104567Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-146 (146 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.104608Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-147 (147 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.104660Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-149 (149 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.104740Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-153 (153 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.104895Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-161 (161 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.105108Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-171 (171 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.105156Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-172 (172 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.105259Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-177 (177 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.105493Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-189 (189 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.105579Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-193 (193 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.105642Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-195 (195 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.105728Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-199 (199 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.105832Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-203 (203 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.105884Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-205 (205 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.105982Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-210 (210 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.106054Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-212 (212 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.106115Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-213 (213 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.106157Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-214 (214 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.106190Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-215 (215 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.106360Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-223 (223 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.106540Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-228 (228 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.106644Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-233 (233 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.106782Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-240 (240 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.106875Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-244 (244 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.107018Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-251 (251 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.107438Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-271 (271 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.107483Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-272 (272 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.107577Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-276 (276 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.107652Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-279 (279 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.107756Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-283 (283 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.107834Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-286 (286 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.107873Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-287 (287 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.107996Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-293 (293 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.108084Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-297 (297 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.108143Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-299 (299 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.108212Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-302 (302 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.108247Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-303 (303 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.108300Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-305 (305 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.108337Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-306 (306 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.108464Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-310 (310 by [2:99:2134])' of unknown type 'wrong' to ... 3-31T16:32:58.148593Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-469 (469 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.148633Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-473 (473 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.148720Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-506 (506 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.148756Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-508 (508 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.148796Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-514 (514 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.148842Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-561 (561 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.148869Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-580 (580 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.148908Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-587 (587 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.148962Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-603 (603 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.149029Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-649 (649 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.149062Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-651 (651 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.149138Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-665 (665 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.149190Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-681 (681 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.149236Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-686 (686 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.149308Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-710 (710 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.149367Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-730 (730 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.149437Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-736 (736 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.149507Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-758 (758 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.149573Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-764 (764 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.149656Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-793 (793 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.149701Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-808 (808 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.149747Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-819 (819 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.149797Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-825 (825 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.149892Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-829 (829 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.149937Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-835 (835 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.150014Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-845 (845 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.150067Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-866 (866 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.150106Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-867 (867 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.150165Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-890 (890 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.150207Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-902 (902 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.150245Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-913 (913 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.150282Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-918 (918 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.150382Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-934 (934 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.150429Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-938 (938 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.150508Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-977 (977 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.150636Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-11 (11 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.150682Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-36 (36 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.150757Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-81 (81 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.150852Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-120 (120 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.150888Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-122 (122 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.150944Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-149 (149 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.150996Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-153 (153 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.151053Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-161 (161 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.151109Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-172 (172 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.151256Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-223 (223 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.151311Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-272 (272 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.151387Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-303 (303 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.151430Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-310 (310 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.151499Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-321 (321 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.151566Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-325 (325 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.151668Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-330 (330 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.151712Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-343 (343 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.151748Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-351 (351 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.151781Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-363 (363 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.151830Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-380 (380 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.151855Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-402 (402 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.151907Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-429 (429 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.151967Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-491 (491 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.152100Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-528 (528 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.152154Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-536 (536 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.152205Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-542 (542 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.152245Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-568 (568 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.152319Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-607 (607 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.152358Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-615 (615 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.152414Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-633 (633 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.152502Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-659 (659 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.152589Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-701 (701 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.152638Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-703 (703 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.152699Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-715 (715 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.152779Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-768 (768 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.152821Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-807 (807 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.152913Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-860 (860 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.152972Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-863 (863 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.153032Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-882 (882 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.153099Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-915 (915 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.153152Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-923 (923 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.153193Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-937 (937 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-31T16:32:58.153285Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-996 (996 by [2:99:2134])' of unknown type 'wrong' to default queue >> TTabletCountersAggregator::IntegralPercentileAggregationRegular [GOOD] |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBrokerConfig::UpdateResourceLimit [GOOD] >> TKesusTest::TestAcquireSemaphore [GOOD] >> TResourceBroker::TestErrors [GOOD] >> TResourceBroker::TestExecutionStat >> KqpNotNullColumns::UpsertNotNullPk [GOOD] >> KqpNotNullColumns::UpsertNotNullPkPg >> KqpNotNullColumns::InsertNotNullPkPg-useSink [GOOD] >> KqpNotNullColumns::JoinBothTablesWithNotNullPk+StreamLookup >> TTabletPipeTest::TestKillClientBeforServerIdKnown |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationRegular [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphore [GOOD] Test command err: 2025-03-31T16:32:18.965168Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:18.965283Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:18.981204Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:18.981319Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:19.009279Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:19.009892Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=7849179319813273231, session=0, seqNo=0) 2025-03-31T16:32:19.010062Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-31T16:32:19.022395Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=7849179319813273231, session=1) 2025-03-31T16:32:19.022696Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=2333389702392996431, session=0, seqNo=0) 2025-03-31T16:32:19.022828Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-03-31T16:32:19.035018Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=2333389702392996431, session=2) 2025-03-31T16:32:19.035888Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-03-31T16:32:19.036029Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-03-31T16:32:19.036141Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-03-31T16:32:19.036295Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=222, session=2, semaphore="Lock2" count=1) 2025-03-31T16:32:19.036342Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-03-31T16:32:19.036371Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 2 2025-03-31T16:32:19.036429Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=333, session=1, semaphore="Lock2" count=1) 2025-03-31T16:32:19.036465Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #3 session 1 2025-03-31T16:32:19.051922Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=111) 2025-03-31T16:32:19.052036Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=222) 2025-03-31T16:32:19.052066Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=333) 2025-03-31T16:32:19.052688Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:146:2170], cookie=17874600807932397811, name="Lock1") 2025-03-31T16:32:19.052793Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:146:2170], cookie=17874600807932397811) 2025-03-31T16:32:19.053229Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:149:2173], cookie=11155643090049630948, name="Lock2") 2025-03-31T16:32:19.053296Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:149:2173], cookie=11155643090049630948) 2025-03-31T16:32:19.067123Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:19.067240Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:19.067857Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:19.068514Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:19.109991Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:19.110148Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-03-31T16:32:19.110231Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 2 2025-03-31T16:32:19.110284Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #3 session 1 2025-03-31T16:32:19.110622Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:189:2203], cookie=4758595000998180234, name="Lock1") 2025-03-31T16:32:19.110773Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:189:2203], cookie=4758595000998180234) 2025-03-31T16:32:19.111345Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:197:2210], cookie=18247660719733563378, name="Lock2") 2025-03-31T16:32:19.111500Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:197:2210], cookie=18247660719733563378) 2025-03-31T16:32:19.556313Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:19.570250Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:19.927838Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:19.943872Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:20.277802Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:20.290256Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:20.631132Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:20.643067Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:21.004624Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:21.016609Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:21.360812Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:21.374079Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:21.705684Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:21.719888Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:22.059498Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:22.071735Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:22.423645Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:22.435990Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:22.829563Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:22.841939Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:23.202675Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:23.214960Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:23.575992Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:23.588207Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:23.950417Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:23.962489Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:24.327785Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:24.344021Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:24.727902Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:24.740317Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:25.091173Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:25.104538Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:25.455733Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:25.468012Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:25.821793Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:25.834082Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:26.188806Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:26.201253Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:26.596344Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:26.611870Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:26.954997Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:26.967312Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:27.326665Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:27.339471Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:27.704205Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:27.719241Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:28.076230Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:28.088531Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:28.460114Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:28.472294Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:28.823945Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:28.840216Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:29.191062Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:29.203368Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:29.544372Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:29.556504Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:29.913636Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:29.925790Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:30.311295Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:363:2365], cookie=17045000532081909670, name="Lock1") 2025-03-31T16:32:30.311411Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:363:2365], cookie=17045000532081909670) 2025-03-31T16:32:30.311956Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:366:2368], cookie=5292767188715428756, name="Lock2") 2025-03-31T16:32:30.312044Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:366:2368], cookie=5292767188715428756) 2025-03-31T16:32:30.365063Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:30.377285Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:30.735517Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:30.747776Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:31.091516Z node 1 :KESUS_TABLET DEBUG: [720575 ... 3-31T16:32:50.815987Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:51.183469Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:51.195876Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:51.553252Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:51.568563Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:51.924005Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:51.940217Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:52.294565Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:52.306955Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:52.681141Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:52.693477Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:53.057231Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:53.070784Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:53.423580Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:53.435813Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:53.787011Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:53.799321Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:54.150379Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:54.162611Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:54.525498Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:54.539761Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:54.918741Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:54.931002Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:55.286114Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:55.298453Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:55.650292Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:55.662040Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:56.013106Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:56.026503Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:56.381128Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:56.393313Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:56.760493Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:56.772644Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:57.131163Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:57.143140Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:57.516898Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:57.531919Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:57.887645Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:57.903829Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:58.254339Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-03-31T16:32:58.254469Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-03-31T16:32:58.254540Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2025-03-31T16:32:58.254637Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-03-31T16:32:58.254698Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 2 "Lock2" owner link 2025-03-31T16:32:58.254749Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-03-31T16:32:58.266837Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-03-31T16:32:58.267665Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:331:2344], cookie=621853484601700367, name="Lock1") 2025-03-31T16:32:58.267764Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:331:2344], cookie=621853484601700367) 2025-03-31T16:32:58.268301Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:334:2347], cookie=17913025218356959095, name="Lock2") 2025-03-31T16:32:58.268368Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:334:2347], cookie=17913025218356959095) 2025-03-31T16:32:58.268836Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:337:2350], cookie=17568617313797264122) 2025-03-31T16:32:58.268918Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:337:2350], cookie=17568617313797264122) 2025-03-31T16:32:58.285327Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:58.285450Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:58.286088Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:58.286759Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:58.334552Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:58.334691Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-03-31T16:32:58.334746Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-03-31T16:32:58.335078Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:377:2380], cookie=18058809621192853882) 2025-03-31T16:32:58.335172Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:377:2380], cookie=18058809621192853882) 2025-03-31T16:32:58.335844Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:384:2386], cookie=17941347643040691717, name="Lock1") 2025-03-31T16:32:58.335919Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:384:2386], cookie=17941347643040691717) 2025-03-31T16:32:58.336445Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:387:2389], cookie=14187681441053075853, name="Lock2") 2025-03-31T16:32:58.336510Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:387:2389], cookie=14187681441053075853) 2025-03-31T16:32:58.739953Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:58.740057Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:58.761469Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:58.761744Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:58.786082Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:58.786626Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=10013382028533784915, session=0, seqNo=0) 2025-03-31T16:32:58.786794Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-31T16:32:58.799034Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=10013382028533784915, session=1) 2025-03-31T16:32:58.799408Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=6993578548827272698, session=0, seqNo=0) 2025-03-31T16:32:58.799550Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-03-31T16:32:58.812028Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=6993578548827272698, session=2) 2025-03-31T16:32:58.812396Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=111, session=1, semaphore="Sem1" count=1) 2025-03-31T16:32:58.827961Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=111) 2025-03-31T16:32:58.828625Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:145:2169], cookie=7407320175342453692, name="Sem1", limit=1) 2025-03-31T16:32:58.828787Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-03-31T16:32:58.841239Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:145:2169], cookie=7407320175342453692) 2025-03-31T16:32:58.841757Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=333, session=1, semaphore="Sem1" count=100500) 2025-03-31T16:32:58.854059Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=333) 2025-03-31T16:32:58.854436Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=222, session=1, semaphore="Sem1" count=1) 2025-03-31T16:32:58.854602Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-03-31T16:32:58.854794Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=333, session=2, semaphore="Sem1" count=1) 2025-03-31T16:32:58.867007Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=222) 2025-03-31T16:32:58.867169Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=333) 2025-03-31T16:32:58.867787Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:155:2179], cookie=9063279924160482311, name="Sem1") 2025-03-31T16:32:58.867880Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:155:2179], cookie=9063279924160482311) 2025-03-31T16:32:58.868362Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:158:2182], cookie=3035199018345261186, name="Sem1") 2025-03-31T16:32:58.868436Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:158:2182], cookie=3035199018345261186) 2025-03-31T16:32:58.868876Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:161:2185], cookie=15523123841199425023, name="Sem1", force=0) 2025-03-31T16:32:58.881317Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:161:2185], cookie=15523123841199425023) 2025-03-31T16:32:58.881942Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:166:2190], cookie=4023436048835347458, name="Sem1", force=1) 2025-03-31T16:32:58.882030Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 1 "Sem1" 2025-03-31T16:32:58.894736Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:166:2190], cookie=4023436048835347458) >> TResourceBroker::TestExecutionStat [GOOD] >> TTabletPipeTest::TestSendBeforeBootTarget >> TTabletPipeTest::TestKillClientBeforServerIdKnown [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-59 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-60 |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestExecutionStat [GOOD] >> TTabletCountersPercentile::WithoutZero [GOOD] >> TTabletCountersPercentile::StartFromZero [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-70 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-71 >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedSingleBucket ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestKillClientBeforServerIdKnown [GOOD] Test command err: 2025-03-31T16:32:59.733566Z node 1 :PIPE_SERVER DEBUG: [9437185] Detach 2025-03-31T16:32:59.751684Z node 1 :PIPE_SERVER DEBUG: [9437185] Activate 2025-03-31T16:32:59.758199Z node 1 :PIPE_SERVER DEBUG: [9437185] Activate 2025-03-31T16:32:59.761181Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] ::Bootstrap [1:128:2154] 2025-03-31T16:32:59.761245Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] lookup [1:128:2154] 2025-03-31T16:32:59.761544Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] forward result local node, try to connect [1:128:2154] 2025-03-31T16:32:59.761617Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185]::SendEvent [1:128:2154] 2025-03-31T16:32:59.761697Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] poison pill while connecting [1:128:2154] 2025-03-31T16:32:59.761736Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] connect failed [1:128:2154] 2025-03-31T16:32:59.761825Z node 1 :PIPE_SERVER DEBUG: [9437185] Accept Connect Originator# [1:128:2154] 2025-03-31T16:32:59.761962Z node 1 :PIPE_SERVER INFO: [9437185] Undelivered Target# [1:128:2154] Type# 269877249 Reason# ActorUnknown 2025-03-31T16:32:59.762091Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] ::Bootstrap [1:131:2156] 2025-03-31T16:32:59.762110Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] lookup [1:131:2156] 2025-03-31T16:32:59.762166Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] forward result local node, try to connect [1:131:2156] 2025-03-31T16:32:59.762191Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185]::SendEvent [1:131:2156] 2025-03-31T16:32:59.762230Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] poison pill while connecting [1:131:2156] 2025-03-31T16:32:59.762246Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] connect failed [1:131:2156] 2025-03-31T16:32:59.762276Z node 1 :PIPE_SERVER DEBUG: [9437185] Accept Connect Originator# [1:131:2156] 2025-03-31T16:32:59.762338Z node 1 :PIPE_SERVER INFO: [9437185] Undelivered Target# [1:131:2156] Type# 269877249 Reason# ActorUnknown 2025-03-31T16:32:59.762415Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] ::Bootstrap [1:133:2158] 2025-03-31T16:32:59.762430Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] lookup [1:133:2158] 2025-03-31T16:32:59.762467Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] forward result local node, try to connect [1:133:2158] 2025-03-31T16:32:59.762486Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185]::SendEvent [1:133:2158] 2025-03-31T16:32:59.762504Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] poison pill while connecting [1:133:2158] 2025-03-31T16:32:59.762547Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] connect failed [1:133:2158] 2025-03-31T16:32:59.762612Z node 1 :PIPE_SERVER DEBUG: [9437185] Accept Connect Originator# [1:133:2158] 2025-03-31T16:32:59.762723Z node 1 :PIPE_SERVER INFO: [9437185] Undelivered Target# [1:133:2158] Type# 269877249 Reason# ActorUnknown >> KqpMergeCn::TopSortBy_PK_Uint64_Limit3 [GOOD] >> KqpMergeCn::TopSortBy_Int32_Limit3 >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedSingleBucket [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedNoOverflowCheck |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersPercentile::StartFromZero [GOOD] >> TPipeCacheTest::TestIdleRefresh >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedNoOverflowCheck [GOOD] >> TResourceBrokerInstant::TestErrors |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedNoOverflowCheck [GOOD] >> TPipeCacheTest::TestIdleRefresh [GOOD] >> TPipeCacheTest::TestTabletNode >> TResourceBrokerInstant::TestErrors [GOOD] >> TResourceBrokerInstant::TestMerge >> KqpNotNullColumns::CreateTableWithDisabledNotNullDataColumns [GOOD] >> KqpNotNullColumns::AlterAddNotNullColumnPg >> TTabletPipeTest::TestPipeWithVersionInfo >> TPipeCacheTest::TestTabletNode [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-71 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-72 >> TTabletResolver::NodeProblem >> TResourceBrokerInstant::TestMerge [GOOD] >> TTabletPipeTest::TestPipeWithVersionInfo [GOOD] >> TTabletPipeTest::TestSendBeforeBootTarget [GOOD] >> TFlatMetrics::TimeSeriesKV2 [GOOD] >> TPipeCacheTest::TestAutoConnect |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeCacheTest::TestTabletNode [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBrokerInstant::TestMerge [GOOD] Test command err: 2025-03-31T16:33:01.423343Z node 1 :RESOURCE_BROKER ERROR: FinishTaskInstant failed for task 2: cannot finish unknown task |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestPipeWithVersionInfo [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendBeforeBootTarget [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:104:2057] recipient: [1:102:2136] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:104:2057] recipient: [1:102:2136] Leader for TabletID 9437184 is [1:108:2140] sender: [1:109:2057] recipient: [1:102:2136] Leader for TabletID 9437184 is [1:108:2140] sender: [1:128:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [0:0:0] sender: [1:163:2057] recipient: [1:161:2168] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [1:163:2057] recipient: [1:161:2168] Leader for TabletID 9437185 is [1:167:2172] sender: [1:168:2057] recipient: [1:161:2168] Leader for TabletID 9437185 is [1:167:2172] sender: [1:187:2057] recipient: [1:14:2061] >> KqpNewEngine::BrokenLocksOnUpdate [GOOD] >> TTabletResolver::NodeProblem [GOOD] >> KqpNewEngine::PrecomputeKey [GOOD] >> KqpNewEngine::PrimaryView >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-47 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-48 >> TResourceBroker::TestOverusage >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-71 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72 >> GenericFederatedQuery::YdbSelectCount [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-23 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 >> KqpRanges::NullInPredicateRow [GOOD] >> KqpRanges::NoFullScanAtScanQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletResolver::NodeProblem [GOOD] Test command err: 2025-03-31T16:33:02.297025Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StInit ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-03-31T16:33:02.297300Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 123 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 123 Cookie: 0 CurrentLeader: [1:207:2136] CurrentLeaderTablet: [1:208:2137] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-03-31T16:33:02.297338Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 123 followers: 0 2025-03-31T16:33:02.297423Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [1:207:2136] 2025-03-31T16:33:02.297631Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StInit ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-03-31T16:33:02.297908Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 234 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 234 Cookie: 0 CurrentLeader: [1:213:2140] CurrentLeaderTablet: [1:214:2141] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {0, 6, 9}} 2025-03-31T16:33:02.297944Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 234 followers: 0 2025-03-31T16:33:02.297985Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [1:213:2140] 2025-03-31T16:33:02.299403Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StNormal ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-03-31T16:33:02.299480Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [1:207:2136] 2025-03-31T16:33:02.299653Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StNormal ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-03-31T16:33:02.299707Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [1:213:2140] 2025-03-31T16:33:02.299893Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvNodeProblem nodeId: 1 max(problemEpoch): 2 2025-03-31T16:33:02.299954Z node 1 :TABLET_RESOLVER DEBUG: Delayed invalidation of tabletId: 123 leader: [1:207:2136] by NodeId 2025-03-31T16:33:02.300023Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StProblemResolve ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-03-31T16:33:02.300241Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 123 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 123 Cookie: 0 CurrentLeader: [2:223:2094] CurrentLeaderTablet: [2:224:2095] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-03-31T16:33:02.300279Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 123 followers: 0 2025-03-31T16:33:02.300322Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [2:223:2094] 2025-03-31T16:33:02.300548Z node 1 :TABLET_RESOLVER DEBUG: Delayed invalidation of tabletId: 234 leader: [1:213:2140] by NodeId 2025-03-31T16:33:02.300602Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StProblemResolve ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-03-31T16:33:02.300804Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 234 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 234 Cookie: 0 CurrentLeader: [2:229:2096] CurrentLeaderTablet: [2:230:2097] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-03-31T16:33:02.300852Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 234 followers: 0 2025-03-31T16:33:02.300895Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:229:2096] 2025-03-31T16:33:02.302414Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 2 2025-03-31T16:33:02.302475Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StNormal ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-03-31T16:33:02.302530Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [2:223:2094] 2025-03-31T16:33:02.302760Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StNormal ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-03-31T16:33:02.302812Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:229:2096] 2025-03-31T16:33:02.303013Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 4 2025-03-31T16:33:02.303054Z node 1 :TABLET_RESOLVER DEBUG: Delayed invalidation of tabletId: 123 leader: [2:223:2094] by NodeId 2025-03-31T16:33:02.303126Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StProblemResolve ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-03-31T16:33:02.303357Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 123 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 123 Cookie: 0 CurrentLeader: [3:241:2094] CurrentLeaderTablet: [3:242:2095] CurrentGeneration: 3 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-03-31T16:33:02.303393Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 123 followers: 0 2025-03-31T16:33:02.303434Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [3:241:2094] 2025-03-31T16:33:02.303712Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StNormal ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-03-31T16:33:02.303765Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:229:2096] 2025-03-31T16:33:02.303979Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 5 2025-03-31T16:33:02.304030Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StNormal ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-03-31T16:33:02.304070Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [3:241:2094] 2025-03-31T16:33:02.304293Z node 1 :TABLET_RESOLVER DEBUG: Delayed invalidation of tabletId: 234 leader: [2:229:2096] by NodeId 2025-03-31T16:33:02.304398Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StProblemResolve ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-03-31T16:33:02.304583Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 234 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 234 Cookie: 0 CurrentLeader: [3:247:2096] CurrentLeaderTablet: [3:248:2097] CurrentGeneration: 3 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-03-31T16:33:02.304615Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 234 followers: 0 2025-03-31T16:33:02.304664Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [3:247:2096] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-23 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24 >> KqpNewEngine::DqSourceLimit [GOOD] >> TResourceBroker::TestOverusage [GOOD] >> TResourceBroker::TestNotifyActorDied >> KqpNotNullColumns::UpsertNotNullPkPg [GOOD] >> KqpNotNullColumns::UpsertNotNullPg >> DataShardSnapshots::VolatileSnapshotCleanupOnFinish [GOOD] >> DataShardSnapshots::VolatileSnapshotRenameTimeout >> KqpExtractPredicateLookup::SimpleRange [GOOD] >> KqpExtractPredicateLookup::PointJoin >> TResourceBroker::TestNotifyActorDied [GOOD] >> TPipeCacheTest::TestAutoConnect [GOOD] >> KqpRanges::IsNull [GOOD] >> KqpRanges::IsNotNullSecondComponent >> TTabletPipeTest::TestTwoNodes >> TTabletPipeTest::TestSendAfterReboot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::BrokenLocksOnUpdate [GOOD] Test command err: Trying to start YDB, gRPC: 61617, MsgBus: 11244 2025-03-31T16:32:21.700926Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012375349245521:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:21.701059Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a53/r3tmp/tmp6bOvNM/pdisk_1.dat 2025-03-31T16:32:22.033220Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61617, node 1 2025-03-31T16:32:22.043487Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:32:22.071646Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:22.075353Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:22.077026Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:32:22.101533Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:22.101559Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:22.101573Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:22.102511Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11244 TClient is connected to server localhost:11244 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:22.608920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:22.623671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:22.757133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:22.897239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:22.971195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:24.621279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012388234149192:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:24.621395Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:24.926883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:32:24.949615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:32:24.973237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:32:25.036395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:32:25.060000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:32:25.091856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:32:25.130477Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012392529116999:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:25.130527Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:25.130529Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012392529117004:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:25.133244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:32:25.151131Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012392529117006:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:32:25.244247Z node 1 :TX_PROXY ERROR: Actor# [1:7488012392529117060:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 3356, MsgBus: 27573 2025-03-31T16:32:27.113461Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012404097104968:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:27.113589Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a53/r3tmp/tmpwLVxY1/pdisk_1.dat 2025-03-31T16:32:27.240210Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:32:27.268392Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:27.268486Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:27.273084Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3356, node 2 2025-03-31T16:32:27.337792Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:27.337813Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:27.337822Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:27.337941Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27573 TClient is connected to server localhost:27573 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:27.777473Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:27.797847Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:27.872152Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:28.013183Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:28.098377Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:30.009919Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012416982008640:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:30.010028Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [S ... to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:53.379398Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:32:53.389856Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488012513818343739:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:32:53.466704Z node 6 :TX_PROXY ERROR: Actor# [6:7488012513818343794:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:32:54.495686Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488012496638472255:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:54.495775Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 62770, MsgBus: 18098 2025-03-31T16:32:55.703073Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488012523069077494:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:55.703168Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a53/r3tmp/tmpTvHwWL/pdisk_1.dat 2025-03-31T16:32:55.829967Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:32:55.858154Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:55.858273Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:55.859800Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62770, node 7 2025-03-31T16:32:55.908807Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:55.908838Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:55.908847Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:55.909018Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18098 TClient is connected to server localhost:18098 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:32:56.490257Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:32:56.502910Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:56.586900Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:56.862311Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:56.956327Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:59.830762Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488012540248948459:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:59.830902Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:59.891667Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:32:59.935631Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:32:59.977106Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:00.014672Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:00.053179Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:00.100423Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:00.183486Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488012544543916271:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:00.183599Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:00.183706Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488012544543916276:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:00.188035Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:00.199629Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7488012544543916278:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:33:00.256327Z node 7 :TX_PROXY ERROR: Actor# [7:7488012544543916330:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:00.703182Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7488012523069077494:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:00.703252Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:33:01.665235Z node 7 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976715673; 2025-03-31T16:33:01.694568Z node 7 :KQP_COMPUTE ERROR: SelfId: [7:7488012548838883965:2497], SessionActorId: [7:7488012548838883926:2497], Got LOCKS BROKEN for table. ShardID=72075186224037889, Sink=[7:7488012548838883965:2497].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-03-31T16:33:01.695412Z node 7 :KQP_COMPUTE ERROR: SelfId: [7:7488012548838883965:2497], SessionActorId: [7:7488012548838883926:2497], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TwoShard`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[7:7488012548838883926:2497]. isRollback=0 2025-03-31T16:33:01.695783Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=ZDU4MzQyZDEtMmYzM2I4YmMtM2VlMzE0M2ItMTg5ZTY5NWQ=, ActorId: [7:7488012548838883926:2497], ActorState: ExecuteState, TraceId: 01jqpgs76w0558wwfdzvcwkpgp, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [7:7488012548838884016:2497] from: [7:7488012548838883965:2497] 2025-03-31T16:33:01.695917Z node 7 :KQP_EXECUTER ERROR: ActorId: [7:7488012548838884016:2497] TxId: 281474976715673. Ctx: { TraceId: 01jqpgs76w0558wwfdzvcwkpgp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZDU4MzQyZDEtMmYzM2I4YmMtM2VlMzE0M2ItMTg5ZTY5NWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/TwoShard`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-03-31T16:33:01.696597Z node 7 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976715673; 2025-03-31T16:33:01.696841Z node 7 :TX_DATASHARD ERROR: Complete volatile write [1743438781714 : 281474976715673] from 72075186224037888 at tablet 72075186224037888, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } 2025-03-31T16:33:01.697347Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=ZDU4MzQyZDEtMmYzM2I4YmMtM2VlMzE0M2ItMTg5ZTY5NWQ=, ActorId: [7:7488012548838883926:2497], ActorState: ExecuteState, TraceId: 01jqpgs76w0558wwfdzvcwkpgp, Create QueryResponse for error on request, msg: >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamed >> TBlockBlobStorageTest::DelayedErrorsNotIgnored >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2-withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3+withSink |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestNotifyActorDied [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::YdbSelectCount [GOOD] Test command err: Trying to start YDB, gRPC: 2187, MsgBus: 29120 2025-03-31T16:32:54.300078Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012517621345701:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:54.300218Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f12/r3tmp/tmp5ldKqq/pdisk_1.dat 2025-03-31T16:32:54.568362Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2187, node 1 2025-03-31T16:32:54.645764Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:54.646446Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:54.648625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:32:54.663782Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:54.663815Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:54.663823Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:54.663952Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29120 TClient is connected to server localhost:29120 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:55.153385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:55.175165Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:32:57.033679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012530506248253:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:57.033845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:57.327152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-03-31T16:32:57.455830Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012530506248377:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:57.455901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:57.455989Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012530506248382:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:57.459382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-03-31T16:32:57.470364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012530506248384:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:32:57.546084Z node 1 :TX_PROXY ERROR: Actor# [1:7488012530506248424:2399] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:32:58.168291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:32:58.601020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-03-31T16:32:59.020047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:32:59.300419Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012517621345701:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:59.300490Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:32:59.471570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:32:59.935129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T16:33:00.405058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-03-31T16:33:00.440516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-03-31T16:33:02.130058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710702:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamed [GOOD] >> TTabletCountersAggregator::ColumnShardCounters |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeCacheTest::TestAutoConnect [GOOD] >> TKesusTest::TestSessionTimeoutAfterReboot [GOOD] >> TKesusTest::TestSessionStealingSameKey >> TTabletPipeTest::TestTwoNodes [GOOD] >> KqpNewEngine::JoinPure [GOOD] >> KqpNewEngine::JoinPureUncomparableKeys >> TTabletPipeTest::TestSendAfterReboot [GOOD] >> TTabletCountersAggregator::ColumnShardCounters [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::DqSourceLimit [GOOD] Test command err: Trying to start YDB, gRPC: 10489, MsgBus: 14865 2025-03-31T16:32:21.103969Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012375772634455:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:21.104310Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a60/r3tmp/tmpCEuW04/pdisk_1.dat 2025-03-31T16:32:21.404264Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10489, node 1 2025-03-31T16:32:21.453716Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:21.453867Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:21.455528Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:32:21.461167Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:21.461190Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:21.461197Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:21.461354Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14865 TClient is connected to server localhost:14865 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:21.954554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:21.985238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:22.103054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:22.270100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:22.340510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:23.709964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012384362570824:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:23.710077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:24.018383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:32:24.044735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:32:24.071468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:32:24.099734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:32:24.127549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:32:24.158824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:32:24.202280Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012388657538631:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:24.202371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:24.202418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012388657538636:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:24.205478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:32:24.214346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012388657538638:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:32:24.287165Z node 1 :TX_PROXY ERROR: Actor# [1:7488012388657538692:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 9136, MsgBus: 8230 2025-03-31T16:32:26.480428Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012398375179865:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:26.480516Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a60/r3tmp/tmpIkg6VS/pdisk_1.dat 2025-03-31T16:32:26.567688Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9136, node 2 2025-03-31T16:32:26.607689Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:26.607782Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:26.609084Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:32:26.625968Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:26.625995Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:26.626003Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:26.626142Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8230 TClient is connected to server localhost:8230 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:27.028284Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:27.047453Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:27.101293Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:27.259535Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:27.343021Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:29.496271Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012411260083519:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:29.496371Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: { : Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:53.599110Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:53.669948Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:32:53.706890Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:32:53.744274Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:32:53.816140Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:32:53.852557Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:32:53.890705Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:32:53.938902Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012515767595136:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:53.938999Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:53.939012Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012515767595141:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:53.944088Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:32:53.955844Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488012515767595143:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:32:54.016787Z node 6 :TX_PROXY ERROR: Actor# [6:7488012520062562491:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:32:54.668201Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488012498587723648:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:54.668285Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 64816, MsgBus: 18832 2025-03-31T16:32:56.515278Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488012526285011613:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:56.515401Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a60/r3tmp/tmppcCcj0/pdisk_1.dat 2025-03-31T16:32:56.633360Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:32:56.659815Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:56.659930Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:56.661600Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64816, node 7 2025-03-31T16:32:56.712561Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:56.712586Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:56.712597Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:56.712740Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18832 TClient is connected to server localhost:18832 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:57.334295Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:57.355814Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:32:57.498182Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:32:57.761294Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:57.854950Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:00.533743Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488012543464882567:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:00.533880Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:00.602182Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:00.641755Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:00.680245Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:00.718693Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:00.757363Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:00.801632Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:00.886860Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488012543464883081:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:00.886974Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:00.887205Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488012543464883086:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:00.891588Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:00.904689Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7488012543464883088:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:33:00.992721Z node 7 :TX_PROXY ERROR: Actor# [7:7488012543464883143:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:01.515586Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7488012526285011613:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:01.515661Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TTabletLabeledCountersAggregator::SimpleAggregation >> TKesusTest::TestSessionStealingSameKey [GOOD] >> TKesusTest::TestSessionStealingDifferentKey |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestTwoNodes [GOOD] >> TTabletLabeledCountersAggregator::SimpleAggregation [GOOD] >> TTabletLabeledCountersAggregator::Version3Aggregation |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::ColumnShardCounters [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendAfterReboot [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:108:2057] recipient: [1:104:2137] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:108:2057] recipient: [1:104:2137] Leader for TabletID 9437185 is [0:0:0] sender: [1:109:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [1:109:2057] recipient: [1:105:2138] Leader for TabletID 9437184 is [1:116:2145] sender: [1:117:2057] recipient: [1:104:2137] Leader for TabletID 9437185 is [1:120:2147] sender: [1:122:2057] recipient: [1:105:2138] Leader for TabletID 9437184 is [1:116:2145] sender: [1:156:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:120:2147] sender: [1:158:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:120:2147] sender: [1:161:2057] recipient: [1:101:2136] Leader for TabletID 9437185 is [1:120:2147] sender: [1:162:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:120:2147] sender: [1:165:2057] recipient: [1:164:2176] Leader for TabletID 9437185 is [1:166:2177] sender: [1:167:2057] recipient: [1:164:2176] Leader for TabletID 9437185 is [1:166:2177] sender: [1:195:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:116:2145] sender: [1:198:2057] recipient: [1:100:2135] Leader for TabletID 9437184 is [1:116:2145] sender: [1:200:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:116:2145] sender: [1:202:2057] recipient: [1:201:2200] Leader for TabletID 9437184 is [1:203:2201] sender: [1:204:2057] recipient: [1:201:2200] Leader for TabletID 9437184 is [1:203:2201] sender: [1:232:2057] recipient: [1:14:2061] >> TTabletLabeledCountersAggregator::Version3Aggregation [GOOD] >> TKesusTest::TestSessionStealingDifferentKey [GOOD] >> KqpNotNullColumns::JoinBothTablesWithNotNullPk+StreamLookup [GOOD] >> KqpNotNullColumns::JoinBothTablesWithNotNullPk-StreamLookup >> TTabletPipeTest::TestSendAfterOpenUsingTabletWithoutAcceptor >> KqpSqlIn::SecondaryIndex_TupleSelect [GOOD] >> TTabletPipeTest::TestShutdown >> TPipeTrackerTest::TestAddSameTabletTwice [GOOD] >> TPipeTrackerTest::TestAddTwoTablets [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletLabeledCountersAggregator::Version3Aggregation [GOOD] Test command err: { LabeledCountersByGroup { Group: "aba/caba/daba|man" LabeledCounter { Value: 13 AggregateFunc: EAF_SUM Type: CT_SIMPLE NameId: 0 } Delimiter: "|" } LabeledCountersByGroup { Group: "cons/aaa|1|aba/caba/daba|man" LabeledCounter { Value: 13 AggregateFunc: EAF_SUM Type: CT_SIMPLE NameId: 0 } Delimiter: "|" } CounterNames: "value1" } >> TFlatMetrics::MaximumValue3 [GOOD] >> TFlatMetrics::MaximumValue4 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSessionStealingDifferentKey [GOOD] Test command err: 2025-03-31T16:32:15.052099Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:32:15.052257Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:32:15.069419Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:32:15.069604Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:32:15.094703Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:32:15.095304Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=15366949434785112803, session=0, seqNo=0) 2025-03-31T16:32:15.095472Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-31T16:32:15.107709Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=15366949434785112803, session=1) 2025-03-31T16:32:15.108544Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path detach session=1 from sender=[1:130:2156], cookie=5784641410527325370 2025-03-31T16:32:15.109046Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[1:143:2167], cookie=10435785983778848553) 2025-03-31T16:32:15.109151Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[1:143:2167], cookie=10435785983778848553) 2025-03-31T16:32:15.506333Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:15.518871Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:15.889469Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:15.901762Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:16.278367Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:16.303467Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:16.672149Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:16.685123Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:17.050271Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:17.062602Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:17.404636Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:17.417050Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:17.747762Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:17.759770Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:18.103601Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:18.115791Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:18.474418Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:18.487551Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:18.901239Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:18.916085Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:19.288540Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:19.301627Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:19.668037Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:19.681084Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:20.042791Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:20.057690Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:20.410224Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:20.422467Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:20.793603Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:20.805582Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:21.166029Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:21.178243Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:21.537696Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:21.549997Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:21.892812Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:21.906827Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:22.259764Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:22.275409Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:22.660775Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:22.672972Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:23.034923Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:23.047351Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:23.397719Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:23.409795Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:23.761146Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:23.774619Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:24.134795Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:24.147200Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:24.509156Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:24.521548Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:24.883313Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:24.895795Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:25.247388Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:25.259549Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:25.611369Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:25.623287Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:25.978039Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:25.990386Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:26.385176Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:26.397358Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:26.748943Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:26.761273Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:27.116181Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:27.132040Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:27.493238Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:27.507928Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:27.853977Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:27.866198Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:28.228005Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:28.240372Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:28.592264Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:28.604697Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:28.966323Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:28.978280Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:29.328857Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:29.340985Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:29.692809Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:29.705044Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:30.111863Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:30.124709Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:30.479376Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:30.491594Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:30.857225Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:30.869223Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:31.230964Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:31.243025Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:31.594331Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:31.606803Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:31.990579Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:32.002810Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:32.361744Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:32.374729Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:32.738213Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:32.750626Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:33.096114Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:33.111698Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:33.473377Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:33.486436Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:33.881341Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:33.893668Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:34.235039Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:34.247320Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:34.589545Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:34.602023Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:34.953920Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:34.966338Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:35.307794Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:35.319918Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] T ... eck::Execute 2025-03-31T16:32:47.025172Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:47.383753Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:47.395749Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:47.747898Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:47.760032Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:48.112789Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:48.124669Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:48.482890Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:48.494995Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:48.858472Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:48.870664Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:49.257616Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:49.269500Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:49.622124Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:49.634363Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:49.984607Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:49.996472Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:50.339355Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:50.355817Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:50.719634Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:50.735836Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:51.103406Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:51.123754Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:51.476824Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:51.489211Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:51.845666Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:51.857885Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:52.210156Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:52.222057Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:52.572865Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:52.585125Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:52.970964Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:52.983314Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:53.335922Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:53.348328Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:53.699799Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:53.712191Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:54.063167Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:54.075099Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:54.433342Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:54.445483Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:54.869158Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:54.883913Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:55.250435Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:55.262656Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:55.614609Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:55.626585Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:55.977961Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:55.991849Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:56.338826Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:56.351927Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:56.738640Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:56.753447Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:57.119410Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:57.136101Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:57.491494Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:57.505301Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:57.871744Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:57.884010Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:58.233801Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:58.246020Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:58.592693Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:58.605762Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:58.970440Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:58.982768Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:59.331999Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:59.345727Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:32:59.703513Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:32:59.719835Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:33:00.077275Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:33:00.089704Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:33:00.555673Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:33:00.568719Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:33:00.942096Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:33:00.956677Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:33:01.320642Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:33:01.333233Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:33:01.717867Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:33:01.731257Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:33:02.073976Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:33:02.086355Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:33:02.450319Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:33:02.462831Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:33:02.830828Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:33:02.843012Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:33:03.222490Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:33:03.234798Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:33:03.595633Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:33:03.612271Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:33:03.986826Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-31T16:33:03.999835Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-31T16:33:04.342617Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-03-31T16:33:04.342709Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-03-31T16:33:04.355110Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-03-31T16:33:04.366191Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[2:570:2564], cookie=9732961646212616787) 2025-03-31T16:33:04.366298Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[2:570:2564], cookie=9732961646212616787) 2025-03-31T16:33:04.820921Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:33:04.821026Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:33:04.835002Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:33:04.835262Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:33:04.849576Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:33:04.850363Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:132:2158], cookie=12345, session=0, seqNo=0) 2025-03-31T16:33:04.850521Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-31T16:33:04.873809Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:132:2158], cookie=12345, session=1) 2025-03-31T16:33:04.874690Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:138:2163], cookie=23456, session=1, seqNo=0) 2025-03-31T16:33:04.886975Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:138:2163], cookie=23456, session=1) 2025-03-31T16:33:05.240313Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-31T16:33:05.240419Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-31T16:33:05.258857Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-31T16:33:05.259014Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-31T16:33:05.273287Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-31T16:33:05.273969Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:132:2158], cookie=12345, session=0, seqNo=0) 2025-03-31T16:33:05.274093Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-31T16:33:05.296923Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:132:2158], cookie=12345, session=1) 2025-03-31T16:33:05.297693Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:138:2163], cookie=23456, session=1, seqNo=0) 2025-03-31T16:33:05.310035Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:138:2163], cookie=23456, session=1) >> TTabletPipeTest::TestSendAfterOpenUsingTabletWithoutAcceptor [GOOD] >> TTabletPipeTest::TestShutdown [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-60 [GOOD] >> TResourceBroker::TestCounters |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeTrackerTest::TestAddTwoTablets [GOOD] |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::MaximumValue4 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54 [GOOD] Test command err: Starting YDB, grpc: 15828, msgbus: 28667 2025-03-31T16:30:21.049765Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011860005026289:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:21.049912Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001596/r3tmp/tmpcbMoj8/pdisk_1.dat 2025-03-31T16:30:21.534394Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:21.553760Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:21.553874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:21.568935Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15828, node 1 2025-03-31T16:30:21.791671Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:21.791692Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:21.791699Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:21.791889Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28667 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T16:30:22.218734Z node 1 :TX_PROXY DEBUG: actor# [1:7488011860005026571:2133] Handle TEvNavigate describe path dc-1 2025-03-31T16:30:22.218800Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864299994377:2457] HANDLE EvNavigateScheme dc-1 2025-03-31T16:30:22.227621Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864299994377:2457] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T16:30:22.270738Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864299994377:2457] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-03-31T16:30:22.289942Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864299994377:2457] Handle TEvDescribeSchemeResult Forward to# [1:7488011864299994376:2456] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:30:22.337031Z node 1 :TX_PROXY DEBUG: actor# [1:7488011860005026571:2133] Handle TEvProposeTransaction 2025-03-31T16:30:22.337066Z node 1 :TX_PROXY DEBUG: actor# [1:7488011860005026571:2133] TxId# 281474976710657 ProcessProposeTransaction 2025-03-31T16:30:22.337553Z node 1 :TX_PROXY DEBUG: actor# [1:7488011860005026571:2133] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7488011864299994385:2464] 2025-03-31T16:30:22.434231Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864299994385:2464] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-31T16:30:22.434326Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864299994385:2464] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:30:22.434350Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864299994385:2464] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T16:30:22.434491Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864299994385:2464] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:30:22.434987Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864299994385:2464] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:30:22.435241Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864299994385:2464] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-31T16:30:22.435314Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864299994385:2464] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-03-31T16:30:22.435488Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864299994385:2464] txid# 281474976710657 HANDLE EvClientConnected 2025-03-31T16:30:22.436361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:30:22.445657Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864299994385:2464] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-03-31T16:30:22.445723Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864299994385:2464] txid# 281474976710657 SEND to# [1:7488011864299994384:2463] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-03-31T16:30:22.475586Z node 1 :TX_PROXY DEBUG: actor# [1:7488011860005026571:2133] Handle TEvProposeTransaction 2025-03-31T16:30:22.475612Z node 1 :TX_PROXY DEBUG: actor# [1:7488011860005026571:2133] TxId# 281474976710658 ProcessProposeTransaction 2025-03-31T16:30:22.475652Z node 1 :TX_PROXY DEBUG: actor# [1:7488011860005026571:2133] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7488011864299994427:2502] 2025-03-31T16:30:22.478147Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864299994427:2502] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-31T16:30:22.478228Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864299994427:2502] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:30:22.478253Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864299994427:2502] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T16:30:22.478299Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864299994427:2502] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:30:22.478542Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864299994427:2502] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:30:22.478659Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864299994427:2502] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:30:22.478695Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864299994427:2502] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-31T16:30:22.478837Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864299994427:2502] txid# 281474976710658 HANDLE EvClientConnected 2025-03-31T16:30:22.479340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:30:22.481433Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864299994427:2502] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-03-31T16:30:22.481478Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864299994427:2502] txid# 281474976710658 SEND to# [1:7488011864299994426:2501] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-03-31T16:30:22.567258Z node 1 :TX_PROXY DEBUG: actor# [1:7488011860005026571:2133] Handle TEvProposeTransaction 2025-03-31T16:30:22.567296Z node 1 :TX_PROXY DEBUG: actor# [1:7488011860005026571:2133] TxId# 281474976710659 ProcessProposeTransaction 2025-03-31T16:30:22.567379Z node 1 :TX_PROXY DEBUG: actor# [1:7488011860005026571:2133] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7488011864299994446:2513] 2025-03-31T16:30:22.570032Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864299994446:2513] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\033\010\001\022\027\032\025cluster_admin@builtin\n#\010\000\022\037\010\001\020\200\200\002\032\025cluster_admin@builtin \000\n\"\010\000\022\036\010\001\020\200\010\032\025cluster_admin@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:50924" 2025-03-31T16:30:22.570094Z node 1 :TX_PROXY DEBUG: ... LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:33:03.685046Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012557650413682:2530] txid# 281474976715660 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715660 TabletId# 72057594046644480} 2025-03-31T16:33:03.685195Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012557650413682:2530] txid# 281474976715660 HANDLE EvClientConnected 2025-03-31T16:33:03.686793Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-03-31T16:33:03.690948Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012557650413682:2530] txid# 281474976715660 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715660} 2025-03-31T16:33:03.690999Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012557650413682:2530] txid# 281474976715660 SEND to# [59:7488012557650413681:2342] Source {TEvProposeTransactionStatus txid# 281474976715660 Status# 53} 2025-03-31T16:33:03.712964Z node 59 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [59:7488012557650413681:2342], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-03-31T16:33:03.786727Z node 59 :TX_PROXY DEBUG: actor# [59:7488012540470543728:2111] Handle TEvProposeTransaction 2025-03-31T16:33:03.786770Z node 59 :TX_PROXY DEBUG: actor# [59:7488012540470543728:2111] TxId# 281474976715661 ProcessProposeTransaction 2025-03-31T16:33:03.786833Z node 59 :TX_PROXY DEBUG: actor# [59:7488012540470543728:2111] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [59:7488012557650413754:2582] 2025-03-31T16:33:03.790075Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012557650413754:2582] txid# 281474976715661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003\n#\010\000\022\037\010\001\020\377\377\003\032\025cluster_admin@builtin \003\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/dc-1" 2025-03-31T16:33:03.790155Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012557650413754:2582] txid# 281474976715661 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:33:03.790182Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012557650413754:2582] txid# 281474976715661 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2025-03-31T16:33:03.790787Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012557650413754:2582] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-31T16:33:03.790894Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012557650413754:2582] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:33:03.791190Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012557650413754:2582] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:33:03.791399Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012557650413754:2582] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:33:03.791468Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012557650413754:2582] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-03-31T16:33:03.791618Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012557650413754:2582] txid# 281474976715661 HANDLE EvClientConnected 2025-03-31T16:33:03.794722Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012557650413754:2582] txid# 281474976715661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-03-31T16:33:03.794911Z node 59 :TX_PROXY ERROR: Actor# [59:7488012557650413754:2582] txid# 281474976715661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:03.794956Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012557650413754:2582] txid# 281474976715661 SEND to# [59:7488012557650413681:2342] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-03-31T16:33:03.812074Z node 59 :TX_PROXY DEBUG: actor# [59:7488012540470543728:2111] Handle TEvProposeTransaction 2025-03-31T16:33:03.812113Z node 59 :TX_PROXY DEBUG: actor# [59:7488012540470543728:2111] TxId# 281474976715662 ProcessProposeTransaction 2025-03-31T16:33:03.812169Z node 59 :TX_PROXY DEBUG: actor# [59:7488012540470543728:2111] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [59:7488012557650413777:2593] 2025-03-31T16:33:03.814847Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012557650413777:2593] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:60194" 2025-03-31T16:33:03.814936Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012557650413777:2593] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:33:03.814957Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012557650413777:2593] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T16:33:03.815015Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012557650413777:2593] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:33:03.815458Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012557650413777:2593] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:33:03.815621Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012557650413777:2593] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:33:03.815690Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012557650413777:2593] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-03-31T16:33:03.815874Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012557650413777:2593] txid# 281474976715662 HANDLE EvClientConnected 2025-03-31T16:33:03.824508Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012557650413777:2593] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-03-31T16:33:03.824576Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012557650413777:2593] txid# 281474976715662 SEND to# [59:7488012557650413776:2334] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-03-31T16:33:03.897147Z node 59 :TX_PROXY DEBUG: actor# [59:7488012540470543728:2111] Handle TEvProposeTransaction 2025-03-31T16:33:03.897194Z node 59 :TX_PROXY DEBUG: actor# [59:7488012540470543728:2111] TxId# 281474976715663 ProcessProposeTransaction 2025-03-31T16:33:03.897269Z node 59 :TX_PROXY DEBUG: actor# [59:7488012540470543728:2111] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [59:7488012557650413810:2607] 2025-03-31T16:33:03.900076Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012557650413810:2607] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\025cluster_admin@builtin\022\030\022\026\n\024all-users@well-known\032\025cluster_admin@builtin\"\007Builtin*\027clus****ltin (2AB0E265)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:60222" 2025-03-31T16:33:03.900167Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012557650413810:2607] txid# 281474976715663 Bootstrap, UserSID: cluster_admin@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:33:03.900192Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012557650413810:2607] txid# 281474976715663 Bootstrap, UserSID: cluster_admin@builtin IsClusterAdministrator: 1 2025-03-31T16:33:03.900246Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012557650413810:2607] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:33:03.900668Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012557650413810:2607] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:33:03.900752Z node 59 :TX_PROXY ERROR: Actor# [59:7488012557650413810:2607] txid# 281474976715663, Access denied for cluster_admin@builtin on path /dc-1, with access AlterSchema 2025-03-31T16:33:03.900883Z node 59 :TX_PROXY ERROR: Actor# [59:7488012557650413810:2607] txid# 281474976715663, issues: { message: "Access denied for cluster_admin@builtin on path /dc-1" issue_code: 200000 severity: 1 } 2025-03-31T16:33:03.900918Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012557650413810:2607] txid# 281474976715663 SEND to# [59:7488012557650413809:2352] Source {TEvProposeTransactionStatus Status# 5} 2025-03-31T16:33:03.901200Z node 59 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=59&id=YTJjMGEwYjAtODc1ZDQ0MjMtODEyY2JmM2YtMzlmNjcyMjM=, ActorId: [59:7488012557650413795:2352], ActorState: ExecuteState, TraceId: 01jqpgs9c855k65e1e3jqpv1gp, Create QueryResponse for error on request, msg: 2025-03-31T16:33:03.901641Z node 59 :TX_PROXY DEBUG: actor# [59:7488012540470543728:2111] Handle TEvExecuteKqpTransaction 2025-03-31T16:33:03.901658Z node 59 :TX_PROXY DEBUG: actor# [59:7488012540470543728:2111] TxId# 281474976715664 ProcessProposeKqpTransaction 2025-03-31T16:33:04.054139Z node 59 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[59:7488012540470543575:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:04.054220Z node 59 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; >> KqpMergeCn::TopSortBy_Int32_Limit3 [GOOD] >> KqpMergeCn::TopSortBy_Float_Limit4 |95.5%| [TA] $(B)/ydb/core/kesus/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.5%| [TA] {RESULT} $(B)/ydb/core/kesus/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendAfterOpenUsingTabletWithoutAcceptor [GOOD] >> TTabletPipeTest::TestOpen >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-71 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-72 >> TResourceBroker::TestCounters [GOOD] >> TResourceBroker::TestChangeTaskType >> TBlockBlobStorageTest::DelayedErrorsNotIgnored [GOOD] >> TFlatMetrics::DecayingAverageAvg [GOOD] >> TTabletPipeTest::TestSendWithoutWaitOpenToWrongTablet |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestShutdown [GOOD] >> TTabletPipeTest::TestRebootUsingTabletWithoutAcceptor >> KqpNotNullColumns::AlterAddNotNullColumnPg [GOOD] >> KqpNotNullColumns::AlterDropNotNullColumn >> KqpReturning::ReturningWorksIndexedReplace+QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedOperationsWithDefault+QueryService >> TTabletPipeTest::TestOpen [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSqlIn::SecondaryIndex_TupleSelect [GOOD] Test command err: Trying to start YDB, gRPC: 16518, MsgBus: 2651 2025-03-31T16:32:13.623965Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012341074004645:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:13.624075Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a7d/r3tmp/tmpWI7OFN/pdisk_1.dat 2025-03-31T16:32:13.951968Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16518, node 1 2025-03-31T16:32:14.038235Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:14.038401Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:14.040418Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:32:14.046410Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:14.046438Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:14.046445Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:14.046575Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2651 TClient is connected to server localhost:2651 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:14.571046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:14.589887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:14.713002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:14.867964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:14.942827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:16.363562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012353958908315:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:16.363692Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:16.682778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:32:16.709919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:32:16.741446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:32:16.767981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:32:16.797383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:32:16.867061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:32:16.911200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012353958908829:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:16.911277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:16.911571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012353958908834:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:16.916338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:32:16.928400Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012353958908836:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:32:17.026577Z node 1 :TX_PROXY ERROR: Actor# [1:7488012358253876185:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:32:17.858403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:32:17.929893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:32:17.966115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:32:18.625931Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012341074004645:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:18.626017Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:44: Warning: At function: Filter, At function: Coalesce
:5:67: Warning: At function: SqlIn
:5:67: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:44: Warning: At function: Filter, At function: Coalesce
:5:67: Warning: At function: SqlIn
:5:67: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:44: Warning: At function: Filter, At function: Coalesce
:5:67: Warning: At function: SqlIn
:5:67: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:44: Warning: At function: Filter, At function: Coalesce
:5:67: Warning: At function: SqlIn
:5:67: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 27268, MsgBus: 15380 2025-03-31T16:32:21.652219Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012377529890547:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:21.652319Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a7d/r3tmp/tmps6a2AC/pdisk_1.dat 2025-03-31T16:32:21.765576Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27268, node 2 2025-03-31T16:32:21.800900Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:21.800985Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:21.805184Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:32:21.855358Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:21.855382Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:21.855390Z node 2 :NET_CLA ... cheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:32:50.178374Z node 5 :TX_PROXY ERROR: Actor# [5:7488012503304571495:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:32:51.029178Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7488012486124699950:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:51.029256Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:32:51.240489Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:32:51.280697Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:32:51.333992Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:4:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:60: Warning: At function: Filter, At function: Coalesce
:6:33: Warning: At function: SqlIn
:6:33: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:4:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:60: Warning: At function: Filter, At function: Coalesce
:6:33: Warning: At function: SqlIn
:6:33: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 18215, MsgBus: 64829 2025-03-31T16:32:55.558174Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488012522060760633:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:55.558246Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a7d/r3tmp/tmpxBspfr/pdisk_1.dat 2025-03-31T16:32:55.678655Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:32:55.693272Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:55.693444Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:55.696627Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18215, node 6 2025-03-31T16:32:55.743845Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:55.743877Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:55.743885Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:55.744015Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64829 TClient is connected to server localhost:64829 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:32:56.292799Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:32:56.300165Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:32:56.318476Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:56.404597Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:56.670562Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:56.750414Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:59.679756Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012539240631584:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:59.679855Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:59.741662Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:32:59.780020Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:32:59.819434Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:32:59.854020Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:32:59.893247Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:00.030900Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:00.083294Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012543535599395:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:00.083360Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:00.083394Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012543535599400:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:00.087816Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:00.100759Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488012543535599402:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:33:00.200343Z node 6 :TX_PROXY ERROR: Actor# [6:7488012543535599458:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:00.558515Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488012522060760633:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:00.558601Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:33:01.430779Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:33:01.471598Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:33:01.513695Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:5:17: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:56: Warning: At function: Filter, At function: Coalesce
:7:29: Warning: At function: SqlIn
:7:29: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Execution, code: 1060
:5:17: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 >> TResourceBroker::TestChangeTaskType [GOOD] >> KqpNotNullColumns::UpsertNotNullPg [GOOD] >> KqpRanges::DateKeyPredicate >> TTabletPipeTest::TestSendWithoutWaitOpenToWrongTablet [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::DecayingAverageAvg [GOOD] Test command err: ... waiting for all block results ... passing block result OK for [1:101:2135] ... blocking block result NO_GROUP for [1:102:2135] ... blocking block result NO_GROUP for [1:103:2135] ... blocking block result NO_GROUP for [1:104:2135] >> TTabletPipeTest::TestPipeConnectToHint ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-60 [GOOD] Test command err: Starting YDB, grpc: 15594, msgbus: 18353 2025-03-31T16:30:21.045416Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011860431858262:2209];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:21.045846Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0015c6/r3tmp/tmpAhhnPm/pdisk_1.dat 2025-03-31T16:30:21.552373Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:21.552482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:21.562388Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:21.568854Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15594, node 1 2025-03-31T16:30:21.788501Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:21.788527Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:21.788538Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:21.788672Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18353 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T16:30:22.206684Z node 1 :TX_PROXY DEBUG: actor# [1:7488011860431858358:2116] Handle TEvNavigate describe path dc-1 2025-03-31T16:30:22.206740Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864726826182:2454] HANDLE EvNavigateScheme dc-1 2025-03-31T16:30:22.223608Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864726826182:2454] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T16:30:22.303882Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864726826182:2454] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-03-31T16:30:22.322147Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864726826182:2454] Handle TEvDescribeSchemeResult Forward to# [1:7488011864726826181:2453] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:30:22.351488Z node 1 :TX_PROXY DEBUG: actor# [1:7488011860431858358:2116] Handle TEvProposeTransaction 2025-03-31T16:30:22.351519Z node 1 :TX_PROXY DEBUG: actor# [1:7488011860431858358:2116] TxId# 281474976710657 ProcessProposeTransaction 2025-03-31T16:30:22.351664Z node 1 :TX_PROXY DEBUG: actor# [1:7488011860431858358:2116] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7488011864726826193:2461] 2025-03-31T16:30:22.480943Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864726826193:2461] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-31T16:30:22.481023Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864726826193:2461] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:30:22.481050Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864726826193:2461] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T16:30:22.481107Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864726826193:2461] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:30:22.481418Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864726826193:2461] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:30:22.481565Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864726826193:2461] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-31T16:30:22.481637Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864726826193:2461] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-03-31T16:30:22.481780Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864726826193:2461] txid# 281474976710657 HANDLE EvClientConnected 2025-03-31T16:30:22.482609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:30:22.488079Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864726826193:2461] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-03-31T16:30:22.488138Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864726826193:2461] txid# 281474976710657 SEND to# [1:7488011864726826192:2460] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-03-31T16:30:22.506831Z node 1 :TX_PROXY DEBUG: actor# [1:7488011860431858358:2116] Handle TEvProposeTransaction 2025-03-31T16:30:22.506860Z node 1 :TX_PROXY DEBUG: actor# [1:7488011860431858358:2116] TxId# 281474976710658 ProcessProposeTransaction 2025-03-31T16:30:22.506887Z node 1 :TX_PROXY DEBUG: actor# [1:7488011860431858358:2116] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7488011864726826235:2499] 2025-03-31T16:30:22.509186Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864726826235:2499] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-31T16:30:22.509249Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864726826235:2499] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:30:22.509264Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864726826235:2499] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T16:30:22.509313Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864726826235:2499] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:30:22.509574Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864726826235:2499] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:30:22.509673Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864726826235:2499] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:30:22.509762Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864726826235:2499] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-31T16:30:22.509889Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864726826235:2499] txid# 281474976710658 HANDLE EvClientConnected 2025-03-31T16:30:22.510355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:30:22.513067Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864726826235:2499] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-03-31T16:30:22.513128Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864726826235:2499] txid# 281474976710658 SEND to# [1:7488011864726826234:2498] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-03-31T16:30:22.600634Z node 1 :TX_PROXY DEBUG: actor# [1:7488011860431858358:2116] Handle TEvProposeTransaction 2025-03-31T16:30:22.600668Z node 1 :TX_PROXY DEBUG: actor# [1:7488011860431858358:2116] TxId# 281474976710659 ProcessProposeTransaction 2025-03-31T16:30:22.600713Z node 1 :TX_PROXY DEBUG: actor# [1:7488011860431858358:2116] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7488011864726826253:2509] 2025-03-31T16:30:22.603288Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864726826253:2509] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\033\010\001\022\027\032\025cluster_admin@builtin\n#\010\000\022\037\010\001\020\200\200\002\032\025cluster_admin@builtin \000\n\"\010\000\022\036\010\001\020\200\010\032\025cluster_admin@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:45718" 2025-03-31T16:30:22.603358Z node 1 :TX_PROXY DEBUG: ... 7488012560447277445:2578] txid# 281474976715661 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-03-31T16:33:04.938874Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012560447277445:2578] txid# 281474976715661 HandleResolveDatabase, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-03-31T16:33:04.939426Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012560447277445:2578] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-31T16:33:04.939541Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012560447277445:2578] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:33:04.939913Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012560447277445:2578] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:33:04.940121Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012560447277445:2578] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:33:04.940188Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012560447277445:2578] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-03-31T16:33:04.940429Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012560447277445:2578] txid# 281474976715661 HANDLE EvClientConnected 2025-03-31T16:33:04.944365Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012560447277445:2578] txid# 281474976715661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-03-31T16:33:04.944516Z node 59 :TX_PROXY ERROR: Actor# [59:7488012560447277445:2578] txid# 281474976715661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:04.944550Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012560447277445:2578] txid# 281474976715661 SEND to# [59:7488012560447277375:2342] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-03-31T16:33:04.959337Z node 59 :TX_PROXY DEBUG: actor# [59:7488012543267407417:2112] Handle TEvProposeTransaction 2025-03-31T16:33:04.959376Z node 59 :TX_PROXY DEBUG: actor# [59:7488012543267407417:2112] TxId# 281474976715662 ProcessProposeTransaction 2025-03-31T16:33:04.959433Z node 59 :TX_PROXY DEBUG: actor# [59:7488012543267407417:2112] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [59:7488012560447277469:2590] 2025-03-31T16:33:04.962592Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012560447277469:2590] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:53096" 2025-03-31T16:33:04.962678Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012560447277469:2590] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T16:33:04.962701Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012560447277469:2590] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T16:33:04.962754Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012560447277469:2590] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:33:04.963127Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012560447277469:2590] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:33:04.963271Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012560447277469:2590] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:33:04.963334Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012560447277469:2590] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-03-31T16:33:04.963489Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012560447277469:2590] txid# 281474976715662 HANDLE EvClientConnected 2025-03-31T16:33:04.971568Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012560447277469:2590] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-03-31T16:33:04.971635Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012560447277469:2590] txid# 281474976715662 SEND to# [59:7488012560447277468:2335] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-03-31T16:33:04.985177Z node 59 :TX_PROXY DEBUG: actor# [59:7488012543267407417:2112] Handle TEvProposeTransaction 2025-03-31T16:33:04.985231Z node 59 :TX_PROXY DEBUG: actor# [59:7488012543267407417:2112] TxId# 281474976715663 ProcessProposeTransaction 2025-03-31T16:33:04.985293Z node 59 :TX_PROXY DEBUG: actor# [59:7488012543267407417:2112] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [59:7488012560447277482:2599] 2025-03-31T16:33:04.988125Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012560447277482:2599] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "" NewOwner: "db_admin@builtin" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:53096" 2025-03-31T16:33:04.988212Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012560447277482:2599] txid# 281474976715663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T16:33:04.988236Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012560447277482:2599] txid# 281474976715663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T16:33:04.988295Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012560447277482:2599] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:33:04.988737Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012560447277482:2599] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:33:04.988894Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012560447277482:2599] HANDLE EvNavigateKeySetResult, txid# 281474976715663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:33:04.988972Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012560447277482:2599] txid# 281474976715663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715663 TabletId# 72057594046644480} 2025-03-31T16:33:04.989165Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012560447277482:2599] txid# 281474976715663 HANDLE EvClientConnected 2025-03-31T16:33:04.989871Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:04.992819Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012560447277482:2599] txid# 281474976715663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715663} 2025-03-31T16:33:04.992882Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012560447277482:2599] txid# 281474976715663 SEND to# [59:7488012560447277481:2347] Source {TEvProposeTransactionStatus txid# 281474976715663 Status# 48} 2025-03-31T16:33:05.045838Z node 59 :TX_PROXY DEBUG: actor# [59:7488012543267407417:2112] Handle TEvProposeTransaction 2025-03-31T16:33:05.045874Z node 59 :TX_PROXY DEBUG: actor# [59:7488012543267407417:2112] TxId# 281474976715664 ProcessProposeTransaction 2025-03-31T16:33:05.045927Z node 59 :TX_PROXY DEBUG: actor# [59:7488012543267407417:2112] Cookie# 0 userReqId# "" txid# 281474976715664 SEND to# [59:7488012564742244810:2613] 2025-03-31T16:33:05.048675Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012564742244810:2613] txid# 281474976715664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\020db_admin@builtin\022\030\022\026\n\024all-users@well-known\032\020db_admin@builtin\"\007Builtin*\027db_a****ltin (DEFA2CD5)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:53130" 2025-03-31T16:33:05.048751Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012564742244810:2613] txid# 281474976715664 Bootstrap, UserSID: db_admin@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T16:33:05.048774Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012564742244810:2613] txid# 281474976715664 Bootstrap, UserSID: db_admin@builtin IsClusterAdministrator: 0 2025-03-31T16:33:05.048928Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012564742244810:2613] txid# 281474976715664 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-03-31T16:33:05.048961Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012564742244810:2613] txid# 281474976715664 HandleResolveDatabase, UserSID: db_admin@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 1 DatabaseOwner: db_admin@builtin 2025-03-31T16:33:05.049009Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012564742244810:2613] txid# 281474976715664 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:33:05.049336Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012564742244810:2613] txid# 281474976715664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:33:05.049458Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012564742244810:2613] HANDLE EvNavigateKeySetResult, txid# 281474976715664 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:33:05.049514Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012564742244810:2613] txid# 281474976715664 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715664 TabletId# 72057594046644480} 2025-03-31T16:33:05.049673Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012564742244810:2613] txid# 281474976715664 HANDLE EvClientConnected 2025-03-31T16:33:05.052394Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012564742244810:2613] txid# 281474976715664 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715664} 2025-03-31T16:33:05.052452Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012564742244810:2613] txid# 281474976715664 SEND to# [59:7488012564742244809:2353] Source {TEvProposeTransactionStatus txid# 281474976715664 Status# 48} >> TTabletPipeTest::TestRewriteSameNode >> TTabletPipeTest::TestRebootUsingTabletWithoutAcceptor [GOOD] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestChangeTaskType [GOOD] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendWithoutWaitOpenToWrongTablet [GOOD] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestOpen [GOOD] >> TTabletPipeTest::TestPipeConnectToHint [GOOD] >> BootstrapperTest::LoneBootstrapper >> TFlatMetrics::TimeSeriesAvg4 [GOOD] >> TFlatMetrics::TimeSeriesKV [GOOD] >> TPipeTrackerTest::TestShareTablet [GOOD] >> TPipeTrackerTest::TestIdempotentAttachDetach [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestRebootUsingTabletWithoutAcceptor [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:108:2057] recipient: [1:104:2137] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:108:2057] recipient: [1:104:2137] Leader for TabletID 9437185 is [0:0:0] sender: [1:109:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [1:109:2057] recipient: [1:105:2138] Leader for TabletID 9437184 is [1:116:2145] sender: [1:117:2057] recipient: [1:104:2137] Leader for TabletID 9437185 is [1:120:2147] sender: [1:122:2057] recipient: [1:105:2138] Leader for TabletID 9437184 is [1:116:2145] sender: [1:156:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:120:2147] sender: [1:158:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:120:2147] sender: [1:161:2057] recipient: [1:101:2136] Leader for TabletID 9437185 is [1:120:2147] sender: [1:163:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:120:2147] sender: [1:165:2057] recipient: [1:164:2176] Leader for TabletID 9437185 is [1:166:2177] sender: [1:167:2057] recipient: [1:164:2176] Leader for TabletID 9437185 is [1:166:2177] sender: [1:195:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:116:2145] sender: [1:198:2057] recipient: [1:100:2135] Leader for TabletID 9437184 is [1:116:2145] sender: [1:200:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:116:2145] sender: [1:202:2057] recipient: [1:201:2200] Leader for TabletID 9437184 is [1:203:2201] sender: [1:204:2057] recipient: [1:201:2200] Leader for TabletID 9437184 is [1:203:2201] sender: [1:232:2057] recipient: [1:14:2061] >> GenericFederatedQuery::PostgreSQLOnPremSelectAll [GOOD] >> GenericFederatedQuery::PostgreSQLFilterPushdown >> TTabletLabeledCountersAggregator::HeavyAggregation >> BootstrapperTest::KeepExistingTablet >> TTabletPipeTest::TestRewriteSameNode [GOOD] >> BootstrapperTest::LoneBootstrapper [GOOD] >> BootstrapperTest::MultipleBootstrappers |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestPipeConnectToHint [GOOD] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::TimeSeriesKV [GOOD] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeTrackerTest::TestIdempotentAttachDetach [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-72 [GOOD] >> BootstrapperTest::RestartUnavailableTablet >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-48 [GOOD] >> TResourceBroker::TestResubmitTask |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestRewriteSameNode [GOOD] >> KqpSinkMvcc::OltpNamedStatement [GOOD] >> TTabletPipeTest::TestSendWithoutWaitOpen >> TResourceBroker::TestResubmitTask [GOOD] >> TResourceBroker::TestUpdateCookie >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72 [GOOD] >> TTabletPipeTest::TestSendWithoutWaitOpen [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3+withSink [GOOD] >> BootstrapperTest::RestartUnavailableTablet [GOOD] >> BootstrapperTest::UnavailableStateStorage >> BootstrapperTest::KeepExistingTablet [GOOD] >> BootstrapperTest::DuplicateNodes >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-72 [GOOD] Test command err: Starting YDB, grpc: 13093, msgbus: 32149 2025-03-31T16:30:22.336375Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011867555437612:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:22.336531Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00158e/r3tmp/tmp5BEHKi/pdisk_1.dat 2025-03-31T16:30:22.833635Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:22.833751Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:22.838317Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:30:22.886340Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13093, node 1 2025-03-31T16:30:22.932986Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-03-31T16:30:23.001323Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:23.001346Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:23.001353Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:23.001499Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32149 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T16:30:23.281669Z node 1 :TX_PROXY DEBUG: actor# [1:7488011867555437832:2113] Handle TEvNavigate describe path dc-1 2025-03-31T16:30:23.281777Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011871850405634:2435] HANDLE EvNavigateScheme dc-1 2025-03-31T16:30:23.283846Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011871850405634:2435] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T16:30:23.327344Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011871850405634:2435] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-03-31T16:30:23.338440Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011871850405634:2435] Handle TEvDescribeSchemeResult Forward to# [1:7488011871850405633:2434] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:30:23.382875Z node 1 :TX_PROXY DEBUG: actor# [1:7488011867555437832:2113] Handle TEvProposeTransaction 2025-03-31T16:30:23.382908Z node 1 :TX_PROXY DEBUG: actor# [1:7488011867555437832:2113] TxId# 281474976710657 ProcessProposeTransaction 2025-03-31T16:30:23.383039Z node 1 :TX_PROXY DEBUG: actor# [1:7488011867555437832:2113] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7488011871850405658:2445] 2025-03-31T16:30:23.492937Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011871850405658:2445] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-31T16:30:23.493026Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011871850405658:2445] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:30:23.493054Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011871850405658:2445] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T16:30:23.493136Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011871850405658:2445] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:30:23.493493Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011871850405658:2445] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:30:23.493652Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011871850405658:2445] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-31T16:30:23.493713Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011871850405658:2445] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-03-31T16:30:23.493896Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011871850405658:2445] txid# 281474976710657 HANDLE EvClientConnected 2025-03-31T16:30:23.494745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:30:23.505300Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011871850405658:2445] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-03-31T16:30:23.505369Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011871850405658:2445] txid# 281474976710657 SEND to# [1:7488011871850405657:2444] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-03-31T16:30:23.535424Z node 1 :TX_PROXY DEBUG: actor# [1:7488011867555437832:2113] Handle TEvProposeTransaction 2025-03-31T16:30:23.535454Z node 1 :TX_PROXY DEBUG: actor# [1:7488011867555437832:2113] TxId# 281474976710658 ProcessProposeTransaction 2025-03-31T16:30:23.535509Z node 1 :TX_PROXY DEBUG: actor# [1:7488011867555437832:2113] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7488011871850405703:2486] 2025-03-31T16:30:23.538211Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011871850405703:2486] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-31T16:30:23.538290Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011871850405703:2486] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:30:23.538309Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011871850405703:2486] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T16:30:23.538357Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011871850405703:2486] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:30:23.538728Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011871850405703:2486] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:30:23.538838Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011871850405703:2486] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:30:23.538899Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011871850405703:2486] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-31T16:30:23.539115Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011871850405703:2486] txid# 281474976710658 HANDLE EvClientConnected 2025-03-31T16:30:23.539635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:30:23.543113Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011871850405703:2486] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-03-31T16:30:23.543161Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011871850405703:2486] txid# 281474976710658 SEND to# [1:7488011871850405702:2485] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-03-31T16:30:23.590168Z node 1 :TX_PROXY DEBUG: actor# [1:7488011867555437832:2113] Handle TEvProposeTransaction 2025-03-31T16:30:23.590203Z node 1 :TX_PROXY DEBUG: actor# [1:7488011867555437832:2113] TxId# 281474976710659 ProcessProposeTransaction 2025-03-31T16:30:23.590246Z node 1 :TX_PROXY DEBUG: actor# [1:7488011867555437832:2113] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7488011871850405721:2496] 2025-03-31T16:30:23.592987Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011871850405721:2496] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\033\010\001\022\027\032\025cluster_admin@builtin\n#\010\000\022\037\010\001\020\200\200\002\032\025cluster_admin@builtin \000\n\"\010\000\022\036\010\001\020\200\010\032\025cluster_admin@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestTyp ... Actor# [59:7488012570598865839:2535] txid# 281474976715660 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715660} 2025-03-31T16:33:06.886655Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012570598865839:2535] txid# 281474976715660 SEND to# [59:7488012570598865838:2342] Source {TEvProposeTransactionStatus txid# 281474976715660 Status# 53} 2025-03-31T16:33:06.910934Z node 59 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [59:7488012570598865838:2342], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-03-31T16:33:06.986996Z node 59 :TX_PROXY DEBUG: actor# [59:7488012553418995949:2113] Handle TEvProposeTransaction 2025-03-31T16:33:06.987042Z node 59 :TX_PROXY DEBUG: actor# [59:7488012553418995949:2113] TxId# 281474976715661 ProcessProposeTransaction 2025-03-31T16:33:06.987117Z node 59 :TX_PROXY DEBUG: actor# [59:7488012553418995949:2113] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [59:7488012570598865915:2591] 2025-03-31T16:33:06.990033Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012570598865915:2591] txid# 281474976715661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/dc-1" 2025-03-31T16:33:06.990100Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012570598865915:2591] txid# 281474976715661 Bootstrap, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T16:33:06.990123Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012570598865915:2591] txid# 281474976715661 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2025-03-31T16:33:06.990271Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012570598865915:2591] txid# 281474976715661 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-03-31T16:33:06.990306Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012570598865915:2591] txid# 281474976715661 HandleResolveDatabase, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-03-31T16:33:06.990865Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012570598865915:2591] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-31T16:33:06.990961Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012570598865915:2591] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:33:06.991232Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012570598865915:2591] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:33:06.991386Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012570598865915:2591] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:33:06.991438Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012570598865915:2591] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-03-31T16:33:06.991586Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012570598865915:2591] txid# 281474976715661 HANDLE EvClientConnected 2025-03-31T16:33:06.995354Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012570598865915:2591] txid# 281474976715661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-03-31T16:33:06.995507Z node 59 :TX_PROXY ERROR: Actor# [59:7488012570598865915:2591] txid# 281474976715661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:06.995553Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012570598865915:2591] txid# 281474976715661 SEND to# [59:7488012570598865838:2342] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-03-31T16:33:07.008405Z node 59 :TX_PROXY DEBUG: actor# [59:7488012553418995949:2113] Handle TEvProposeTransaction 2025-03-31T16:33:07.008442Z node 59 :TX_PROXY DEBUG: actor# [59:7488012553418995949:2113] TxId# 281474976715662 ProcessProposeTransaction 2025-03-31T16:33:07.008507Z node 59 :TX_PROXY DEBUG: actor# [59:7488012553418995949:2113] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [59:7488012574893833235:2603] 2025-03-31T16:33:07.011320Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574893833235:2603] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:33624" 2025-03-31T16:33:07.011401Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574893833235:2603] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T16:33:07.011426Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574893833235:2603] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T16:33:07.011481Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574893833235:2603] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:33:07.011862Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574893833235:2603] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:33:07.011993Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574893833235:2603] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:33:07.012056Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574893833235:2603] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-03-31T16:33:07.012207Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574893833235:2603] txid# 281474976715662 HANDLE EvClientConnected 2025-03-31T16:33:07.020585Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574893833235:2603] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-03-31T16:33:07.020657Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574893833235:2603] txid# 281474976715662 SEND to# [59:7488012574893833234:2335] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-03-31T16:33:07.090372Z node 59 :TX_PROXY DEBUG: actor# [59:7488012553418995949:2113] Handle TEvProposeTransaction 2025-03-31T16:33:07.090406Z node 59 :TX_PROXY DEBUG: actor# [59:7488012553418995949:2113] TxId# 281474976715663 ProcessProposeTransaction 2025-03-31T16:33:07.090457Z node 59 :TX_PROXY DEBUG: actor# [59:7488012553418995949:2113] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [59:7488012574893833268:2617] 2025-03-31T16:33:07.093167Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574893833268:2617] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\024ordinaryuser@builtin\022\030\022\026\n\024all-users@well-known\032\024ordinaryuser@builtin\"\007Builtin*\027ordi****ltin (32520BBF)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:33648" 2025-03-31T16:33:07.093237Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574893833268:2617] txid# 281474976715663 Bootstrap, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T16:33:07.093261Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574893833268:2617] txid# 281474976715663 Bootstrap, UserSID: ordinaryuser@builtin IsClusterAdministrator: 0 2025-03-31T16:33:07.093769Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574893833268:2617] txid# 281474976715663 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-03-31T16:33:07.093809Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574893833268:2617] txid# 281474976715663 HandleResolveDatabase, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-03-31T16:33:07.093860Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574893833268:2617] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:33:07.094128Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574893833268:2617] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:33:07.094156Z node 59 :TX_PROXY ERROR: Actor# [59:7488012574893833268:2617] txid# 281474976715663, Access denied for ordinaryuser@builtin, attempt to manage user 2025-03-31T16:33:07.094255Z node 59 :TX_PROXY ERROR: Actor# [59:7488012574893833268:2617] txid# 281474976715663, issues: { message: "Access denied for ordinaryuser@builtin" issue_code: 200000 severity: 1 } 2025-03-31T16:33:07.094286Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574893833268:2617] txid# 281474976715663 SEND to# [59:7488012574893833267:2352] Source {TEvProposeTransactionStatus Status# 5} 2025-03-31T16:33:07.094894Z node 59 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=59&id=ZGY0MTgyODctOTJiMGE2OWMtYThkOWU2MGYtNGZiNmUxZTg=, ActorId: [59:7488012574893833253:2352], ActorState: ExecuteState, TraceId: 01jqpgscg02dtv7qjd34y6ye5d, Create QueryResponse for error on request, msg: 2025-03-31T16:33:07.095239Z node 59 :TX_PROXY DEBUG: actor# [59:7488012553418995949:2113] Handle TEvExecuteKqpTransaction 2025-03-31T16:33:07.095260Z node 59 :TX_PROXY DEBUG: actor# [59:7488012553418995949:2113] TxId# 281474976715664 ProcessProposeKqpTransaction 2025-03-31T16:33:07.278601Z node 59 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[59:7488012553418995727:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:07.278693Z node 59 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24 [GOOD] >> TResourceBroker::TestUpdateCookie [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-48 [GOOD] Test command err: Starting YDB, grpc: 21403, msgbus: 27549 2025-03-31T16:30:21.028116Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011863077836747:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:21.028198Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0015a7/r3tmp/tmpH49DvQ/pdisk_1.dat 2025-03-31T16:30:21.637894Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:21.638003Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:21.638767Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:21.644576Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21403, node 1 2025-03-31T16:30:21.795719Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:21.795741Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:21.795760Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:21.795882Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27549 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T16:30:22.207841Z node 1 :TX_PROXY DEBUG: actor# [1:7488011863077836997:2124] Handle TEvNavigate describe path dc-1 2025-03-31T16:30:22.207894Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011867372804802:2447] HANDLE EvNavigateScheme dc-1 2025-03-31T16:30:22.224069Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011867372804802:2447] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T16:30:22.270874Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011867372804802:2447] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-03-31T16:30:22.296751Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011867372804802:2447] Handle TEvDescribeSchemeResult Forward to# [1:7488011867372804801:2446] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:30:22.336180Z node 1 :TX_PROXY DEBUG: actor# [1:7488011863077836997:2124] Handle TEvProposeTransaction 2025-03-31T16:30:22.336332Z node 1 :TX_PROXY DEBUG: actor# [1:7488011863077836997:2124] TxId# 281474976710657 ProcessProposeTransaction 2025-03-31T16:30:22.337404Z node 1 :TX_PROXY DEBUG: actor# [1:7488011863077836997:2124] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7488011867372804813:2454] 2025-03-31T16:30:22.493987Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011867372804813:2454] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-31T16:30:22.494073Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011867372804813:2454] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:30:22.494092Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011867372804813:2454] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T16:30:22.494163Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011867372804813:2454] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:30:22.494458Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011867372804813:2454] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:30:22.494682Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011867372804813:2454] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-31T16:30:22.494749Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011867372804813:2454] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-03-31T16:30:22.495029Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011867372804813:2454] txid# 281474976710657 HANDLE EvClientConnected 2025-03-31T16:30:22.495916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:30:22.500781Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011867372804813:2454] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-03-31T16:30:22.500854Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011867372804813:2454] txid# 281474976710657 SEND to# [1:7488011867372804812:2453] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-03-31T16:30:22.555642Z node 1 :TX_PROXY DEBUG: actor# [1:7488011863077836997:2124] Handle TEvProposeTransaction 2025-03-31T16:30:22.555673Z node 1 :TX_PROXY DEBUG: actor# [1:7488011863077836997:2124] TxId# 281474976710658 ProcessProposeTransaction 2025-03-31T16:30:22.555707Z node 1 :TX_PROXY DEBUG: actor# [1:7488011863077836997:2124] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7488011867372804858:2495] 2025-03-31T16:30:22.557487Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011867372804858:2495] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-31T16:30:22.557535Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011867372804858:2495] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:30:22.557545Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011867372804858:2495] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T16:30:22.557585Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011867372804858:2495] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:30:22.557776Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011867372804858:2495] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:30:22.557832Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011867372804858:2495] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:30:22.557869Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011867372804858:2495] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-31T16:30:22.557994Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011867372804858:2495] txid# 281474976710658 HANDLE EvClientConnected 2025-03-31T16:30:22.558383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:30:22.565861Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011867372804858:2495] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-03-31T16:30:22.565911Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011867372804858:2495] txid# 281474976710658 SEND to# [1:7488011867372804857:2494] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-03-31T16:30:22.625999Z node 1 :TX_PROXY DEBUG: actor# [1:7488011863077836997:2124] Handle TEvProposeTransaction 2025-03-31T16:30:22.626031Z node 1 :TX_PROXY DEBUG: actor# [1:7488011863077836997:2124] TxId# 281474976710659 ProcessProposeTransaction 2025-03-31T16:30:22.626067Z node 1 :TX_PROXY DEBUG: actor# [1:7488011863077836997:2124] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7488011867372804876:2505] 2025-03-31T16:30:22.628584Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011867372804876:2505] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\033\010\001\022\027\032\025cluster_admin@builtin\n#\010\000\022\037\010\001\020\200\200\002\032\025cluster_admin@builtin \000\n\"\010\000\022\036\010\001\020\200\010\032\025cluster_admin@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:48532" 2025-03-31T16:30:22.628632Z node 1 :TX_PROXY DEBUG: ... dPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:49988" 2025-03-31T16:33:07.280859Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574588955848:2593] txid# 281474976715661 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T16:33:07.280882Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574588955848:2593] txid# 281474976715661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T16:33:07.280929Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574588955848:2593] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:33:07.281282Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574588955848:2593] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:33:07.281423Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574588955848:2593] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:33:07.281491Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574588955848:2593] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-03-31T16:33:07.281646Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574588955848:2593] txid# 281474976715661 HANDLE EvClientConnected 2025-03-31T16:33:07.288271Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574588955848:2593] txid# 281474976715661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715661} 2025-03-31T16:33:07.288330Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574588955848:2593] txid# 281474976715661 SEND to# [59:7488012574588955847:2334] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-03-31T16:33:07.398387Z node 59 :TX_PROXY DEBUG: actor# [59:7488012557409085874:2113] Handle TEvProposeTransaction 2025-03-31T16:33:07.398427Z node 59 :TX_PROXY DEBUG: actor# [59:7488012557409085874:2113] TxId# 281474976715662 ProcessProposeTransaction 2025-03-31T16:33:07.398489Z node 59 :TX_PROXY DEBUG: actor# [59:7488012557409085874:2113] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [59:7488012574588955871:2607] 2025-03-31T16:33:07.401044Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574588955871:2607] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:49998" 2025-03-31T16:33:07.401130Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574588955871:2607] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T16:33:07.401155Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574588955871:2607] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T16:33:07.401212Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574588955871:2607] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:33:07.401602Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574588955871:2607] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:33:07.401753Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574588955871:2607] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:33:07.401831Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574588955871:2607] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-03-31T16:33:07.402018Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574588955871:2607] txid# 281474976715662 HANDLE EvClientConnected 2025-03-31T16:33:07.402585Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:07.405777Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574588955871:2607] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-03-31T16:33:07.405845Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574588955871:2607] txid# 281474976715662 SEND to# [59:7488012574588955870:2347] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-03-31T16:33:07.440865Z node 59 :TX_PROXY DEBUG: actor# [59:7488012557409085874:2113] Handle TEvProposeTransaction 2025-03-31T16:33:07.440910Z node 59 :TX_PROXY DEBUG: actor# [59:7488012557409085874:2113] TxId# 281474976715663 ProcessProposeTransaction 2025-03-31T16:33:07.440971Z node 59 :TX_PROXY DEBUG: actor# [59:7488012557409085874:2113] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [59:7488012574588955904:2626] 2025-03-31T16:33:07.443790Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574588955904:2626] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:50006" 2025-03-31T16:33:07.443877Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574588955904:2626] txid# 281474976715663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T16:33:07.443902Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574588955904:2626] txid# 281474976715663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T16:33:07.443951Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574588955904:2626] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:33:07.444356Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574588955904:2626] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:33:07.444488Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574588955904:2626] HANDLE EvNavigateKeySetResult, txid# 281474976715663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:33:07.444555Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574588955904:2626] txid# 281474976715663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715663 TabletId# 72057594046644480} 2025-03-31T16:33:07.444759Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574588955904:2626] txid# 281474976715663 HANDLE EvClientConnected 2025-03-31T16:33:07.452533Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574588955904:2626] txid# 281474976715663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715663} 2025-03-31T16:33:07.452596Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574588955904:2626] txid# 281474976715663 SEND to# [59:7488012574588955903:2349] Source {TEvProposeTransactionStatus txid# 281474976715663 Status# 48} 2025-03-31T16:33:07.493760Z node 59 :TX_PROXY DEBUG: actor# [59:7488012557409085874:2113] Handle TEvProposeTransaction 2025-03-31T16:33:07.493814Z node 59 :TX_PROXY DEBUG: actor# [59:7488012557409085874:2113] TxId# 281474976715664 ProcessProposeTransaction 2025-03-31T16:33:07.493881Z node 59 :TX_PROXY DEBUG: actor# [59:7488012557409085874:2113] Cookie# 0 userReqId# "" txid# 281474976715664 SEND to# [59:7488012574588955932:2638] 2025-03-31T16:33:07.496502Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574588955932:2638] txid# 281474976715664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { ModifyUser { User: "targetuser" Password: "passwd" IsHashedPassword: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzQ4MTk4NywiaWF0IjoxNzQzNDM4Nzg3LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.Earg5l_vEX0r5_Yq78SpJaAvEZ9PzfUUsWmVZxrf97JmwTIcLi436NAZ9W85dSrgqmucf3ONN3LIYZtjQGLBZLwj6xH3lnoyMyxSzq9WEFXTWpGE5TQoDhK-RLBNP_BiVh1gpihR1ShiUPsM4cSlu-sIpxf-f7Sqv9Vrj19KVCwal-iOQbpBI0wcaIr7UahucBLgEWwKmkQlpfGMyq1stYHvAONGtyU4c_kuR7YAw-yq6bfbs1smieLL5OFw0HSrvdmY5IcmP6_-3Lr_8BY5rHAbOmRooqR71cGeOqH2g_RAwZSqwTi-g1IY8rp-g2E561_eVq4yqswSKsAMVNrAwQ\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzQ4MTk4NywiaWF0IjoxNzQzNDM4Nzg3LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:50030" 2025-03-31T16:33:07.496592Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574588955932:2638] txid# 281474976715664 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T16:33:07.496616Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574588955932:2638] txid# 281474976715664 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-03-31T16:33:07.496788Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574588955932:2638] txid# 281474976715664 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-03-31T16:33:07.496821Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574588955932:2638] txid# 281474976715664 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-03-31T16:33:07.496870Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574588955932:2638] txid# 281474976715664 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:33:07.497185Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574588955932:2638] txid# 281474976715664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:33:07.497228Z node 59 :TX_PROXY ERROR: Actor# [59:7488012574588955932:2638] txid# 281474976715664, Access denied for ordinaryuser, attempt to manage user 2025-03-31T16:33:07.497350Z node 59 :TX_PROXY ERROR: Actor# [59:7488012574588955932:2638] txid# 281474976715664, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-03-31T16:33:07.497394Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012574588955932:2638] txid# 281474976715664 SEND to# [59:7488012574588955931:2361] Source {TEvProposeTransactionStatus Status# 5} 2025-03-31T16:33:07.497591Z node 59 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=59&id=OGNmZTk0MTUtNGIxZTE3NWUtODM0Zjg2OC1hYWI0NTE5Nw==, ActorId: [59:7488012574588955922:2361], ActorState: ExecuteState, TraceId: 01jqpgscws37rhc75rgve1xgdr, Create QueryResponse for error on request, msg: 2025-03-31T16:33:07.497807Z node 59 :TX_PROXY DEBUG: actor# [59:7488012557409085874:2113] Handle TEvExecuteKqpTransaction 2025-03-31T16:33:07.497832Z node 59 :TX_PROXY DEBUG: actor# [59:7488012557409085874:2113] TxId# 281474976715665 ProcessProposeKqpTransaction |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendWithoutWaitOpen [GOOD] >> KqpRanges::IsNotNullSecondComponent [GOOD] >> KqpRanges::IsNullInValue >> TFlatMetrics::TimeSeriesAvg16x60 [GOOD] >> TFlatMetrics::TimeSeriesAvg16Signed [GOOD] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestUpdateCookie [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OltpNamedStatement [GOOD] Test command err: Trying to start YDB, gRPC: 21327, MsgBus: 24578 2025-03-31T16:32:44.155779Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012474868557823:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:44.155933Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00127f/r3tmp/tmp0hC0e8/pdisk_1.dat 2025-03-31T16:32:44.445458Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21327, node 1 2025-03-31T16:32:44.502907Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:44.502943Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:44.502956Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:44.503075Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:32:44.528092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:44.528210Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:44.530513Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24578 TClient is connected to server localhost:24578 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:44.983593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:45.011873Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:32:46.970550Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012483458493066:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:46.970645Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012483458493085:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:46.970701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:46.975849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:32:46.991430Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012483458493095:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:32:47.052010Z node 1 :TX_PROXY ERROR: Actor# [1:7488012487753460442:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:32:47.350427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:32:47.452991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:32:48.322338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:32:49.155420Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012474868557823:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:49.155492Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 29022, MsgBus: 24294 2025-03-31T16:32:56.393667Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012528603204922:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:56.393725Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00127f/r3tmp/tmpVqX5o2/pdisk_1.dat 2025-03-31T16:32:56.501293Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29022, node 2 2025-03-31T16:32:56.536003Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:56.536093Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:56.537664Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:32:56.591063Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:56.591106Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:56.591115Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:56.591232Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24294 TClient is connected to server localhost:24294 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:57.013121Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:57.020353Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:32:59.344999Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012541488107464:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:59.345063Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012541488107443:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:59.345350Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:59.348541Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:32:59.358184Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012541488107481:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:32:59.425586Z node 2 :TX_PROXY ERROR: Actor# [2:7488012541488107535:2336] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:32:59.462952Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:32:59.508096Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:33:00.770802Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:01.561490Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488012528603204922:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:01.572587Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72 [GOOD] Test command err: Starting YDB, grpc: 15867, msgbus: 9144 2025-03-31T16:30:23.627556Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011870297740822:2269];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:23.627619Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001588/r3tmp/tmp5NAB11/pdisk_1.dat 2025-03-31T16:30:24.137251Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:24.148174Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:24.148291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:24.153438Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15867, node 1 2025-03-31T16:30:24.271707Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:24.271731Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:24.271741Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:24.271860Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9144 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T16:30:24.483864Z node 1 :TX_PROXY DEBUG: actor# [1:7488011870297740837:2110] Handle TEvNavigate describe path dc-1 2025-03-31T16:30:24.484029Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011874592708656:2444] HANDLE EvNavigateScheme dc-1 2025-03-31T16:30:24.485772Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011874592708656:2444] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T16:30:24.515512Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011874592708656:2444] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-03-31T16:30:24.529047Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011874592708656:2444] Handle TEvDescribeSchemeResult Forward to# [1:7488011874592708655:2443] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:30:24.547105Z node 1 :TX_PROXY DEBUG: actor# [1:7488011870297740837:2110] Handle TEvProposeTransaction 2025-03-31T16:30:24.547207Z node 1 :TX_PROXY DEBUG: actor# [1:7488011870297740837:2110] TxId# 281474976710657 ProcessProposeTransaction 2025-03-31T16:30:24.547321Z node 1 :TX_PROXY DEBUG: actor# [1:7488011870297740837:2110] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7488011874592708669:2450] 2025-03-31T16:30:24.644162Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011874592708669:2450] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-31T16:30:24.644255Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011874592708669:2450] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:30:24.644274Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011874592708669:2450] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T16:30:24.644375Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011874592708669:2450] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:30:24.644687Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011874592708669:2450] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:30:24.644827Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011874592708669:2450] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-31T16:30:24.644883Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011874592708669:2450] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-03-31T16:30:24.645107Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011874592708669:2450] txid# 281474976710657 HANDLE EvClientConnected 2025-03-31T16:30:24.645924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:30:24.648206Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011874592708669:2450] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-03-31T16:30:24.648273Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011874592708669:2450] txid# 281474976710657 SEND to# [1:7488011874592708668:2449] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-03-31T16:30:24.663125Z node 1 :TX_PROXY DEBUG: actor# [1:7488011870297740837:2110] Handle TEvProposeTransaction 2025-03-31T16:30:24.663165Z node 1 :TX_PROXY DEBUG: actor# [1:7488011870297740837:2110] TxId# 281474976710658 ProcessProposeTransaction 2025-03-31T16:30:24.663203Z node 1 :TX_PROXY DEBUG: actor# [1:7488011870297740837:2110] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7488011874592708719:2489] 2025-03-31T16:30:24.665708Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011874592708719:2489] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-31T16:30:24.665792Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011874592708719:2489] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:30:24.665808Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011874592708719:2489] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T16:30:24.665876Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011874592708719:2489] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:30:24.666155Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011874592708719:2489] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:30:24.666270Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011874592708719:2489] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:30:24.666319Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011874592708719:2489] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-31T16:30:24.666451Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011874592708719:2489] txid# 281474976710658 HANDLE EvClientConnected 2025-03-31T16:30:24.666954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:30:24.669555Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011874592708719:2489] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-03-31T16:30:24.669619Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011874592708719:2489] txid# 281474976710658 SEND to# [1:7488011874592708718:2488] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-03-31T16:30:24.701063Z node 1 :TX_PROXY DEBUG: actor# [1:7488011870297740837:2110] Handle TEvProposeTransaction 2025-03-31T16:30:24.701098Z node 1 :TX_PROXY DEBUG: actor# [1:7488011870297740837:2110] TxId# 281474976710659 ProcessProposeTransaction 2025-03-31T16:30:24.701142Z node 1 :TX_PROXY DEBUG: actor# [1:7488011870297740837:2110] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7488011874592708737:2499] 2025-03-31T16:30:24.711410Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011874592708737:2499] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\033\010\001\022\027\032\025cluster_admin@builtin\n#\010\000\022\037\010\001\020\200\200\002\032\025cluster_admin@builtin \000\n\"\010\000\022\036\010\001\020\200\010\032\025cluster_admin@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:42114" 2025-03-31T16:30:24.711477Z node 1 :TX_PROXY DEBUG: A ... 15494Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012573997804302:2528] txid# 281474976715660 HANDLE EvClientConnected 2025-03-31T16:33:07.716734Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-03-31T16:33:07.719226Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012573997804302:2528] txid# 281474976715660 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715660} 2025-03-31T16:33:07.719287Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012573997804302:2528] txid# 281474976715660 SEND to# [59:7488012573997804301:2342] Source {TEvProposeTransactionStatus txid# 281474976715660 Status# 53} 2025-03-31T16:33:07.731748Z node 59 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [59:7488012573997804301:2342], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-03-31T16:33:07.790922Z node 59 :TX_PROXY DEBUG: actor# [59:7488012556817934214:2111] Handle TEvProposeTransaction 2025-03-31T16:33:07.790969Z node 59 :TX_PROXY DEBUG: actor# [59:7488012556817934214:2111] TxId# 281474976715661 ProcessProposeTransaction 2025-03-31T16:33:07.791034Z node 59 :TX_PROXY DEBUG: actor# [59:7488012556817934214:2111] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [59:7488012573997804379:2585] 2025-03-31T16:33:07.793370Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012573997804379:2585] txid# 281474976715661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/dc-1" 2025-03-31T16:33:07.793447Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012573997804379:2585] txid# 281474976715661 Bootstrap, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T16:33:07.793465Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012573997804379:2585] txid# 281474976715661 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2025-03-31T16:33:07.793646Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012573997804379:2585] txid# 281474976715661 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-03-31T16:33:07.793699Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012573997804379:2585] txid# 281474976715661 HandleResolveDatabase, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-03-31T16:33:07.794162Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012573997804379:2585] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-31T16:33:07.794262Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012573997804379:2585] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:33:07.794489Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012573997804379:2585] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:33:07.794641Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012573997804379:2585] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:33:07.794700Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012573997804379:2585] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-03-31T16:33:07.794858Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012573997804379:2585] txid# 281474976715661 HANDLE EvClientConnected 2025-03-31T16:33:07.798040Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012573997804379:2585] txid# 281474976715661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-03-31T16:33:07.798197Z node 59 :TX_PROXY ERROR: Actor# [59:7488012573997804379:2585] txid# 281474976715661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:07.798244Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012573997804379:2585] txid# 281474976715661 SEND to# [59:7488012573997804301:2342] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-03-31T16:33:07.813886Z node 59 :TX_PROXY DEBUG: actor# [59:7488012556817934214:2111] Handle TEvProposeTransaction 2025-03-31T16:33:07.813926Z node 59 :TX_PROXY DEBUG: actor# [59:7488012556817934214:2111] TxId# 281474976715662 ProcessProposeTransaction 2025-03-31T16:33:07.814010Z node 59 :TX_PROXY DEBUG: actor# [59:7488012556817934214:2111] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [59:7488012573997804403:2597] 2025-03-31T16:33:07.816065Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012573997804403:2597] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:60124" 2025-03-31T16:33:07.816126Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012573997804403:2597] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T16:33:07.816150Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012573997804403:2597] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T16:33:07.816194Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012573997804403:2597] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:33:07.816475Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012573997804403:2597] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:33:07.816554Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012573997804403:2597] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:33:07.816601Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012573997804403:2597] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-03-31T16:33:07.816754Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012573997804403:2597] txid# 281474976715662 HANDLE EvClientConnected 2025-03-31T16:33:07.824945Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012573997804403:2597] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-03-31T16:33:07.825008Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012573997804403:2597] txid# 281474976715662 SEND to# [59:7488012573997804402:2335] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-03-31T16:33:07.864905Z node 59 :TX_PROXY DEBUG: actor# [59:7488012556817934214:2111] Handle TEvProposeTransaction 2025-03-31T16:33:07.864944Z node 59 :TX_PROXY DEBUG: actor# [59:7488012556817934214:2111] TxId# 281474976715663 ProcessProposeTransaction 2025-03-31T16:33:07.865003Z node 59 :TX_PROXY DEBUG: actor# [59:7488012556817934214:2111] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [59:7488012573997804436:2611] 2025-03-31T16:33:07.867804Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012573997804436:2611] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\024ordinaryuser@builtin\022\030\022\026\n\024all-users@well-known\032\024ordinaryuser@builtin\"\007Builtin*\027ordi****ltin (32520BBF)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:60154" 2025-03-31T16:33:07.867897Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012573997804436:2611] txid# 281474976715663 Bootstrap, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T16:33:07.867923Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012573997804436:2611] txid# 281474976715663 Bootstrap, UserSID: ordinaryuser@builtin IsClusterAdministrator: 0 2025-03-31T16:33:07.868119Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012573997804436:2611] txid# 281474976715663 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-03-31T16:33:07.868159Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012573997804436:2611] txid# 281474976715663 HandleResolveDatabase, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-03-31T16:33:07.868218Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012573997804436:2611] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:33:07.868538Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012573997804436:2611] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:33:07.868577Z node 59 :TX_PROXY ERROR: Actor# [59:7488012573997804436:2611] txid# 281474976715663, Access denied for ordinaryuser@builtin, attempt to manage user 2025-03-31T16:33:07.868681Z node 59 :TX_PROXY ERROR: Actor# [59:7488012573997804436:2611] txid# 281474976715663, issues: { message: "Access denied for ordinaryuser@builtin" issue_code: 200000 severity: 1 } 2025-03-31T16:33:07.868733Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012573997804436:2611] txid# 281474976715663 SEND to# [59:7488012573997804435:2352] Source {TEvProposeTransactionStatus Status# 5} 2025-03-31T16:33:07.868940Z node 59 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=59&id=MWY3ZjkzZTktZmU4MzM3MzctMjFlMGYzMTQtYWQyNzljZTY=, ActorId: [59:7488012573997804421:2352], ActorState: ExecuteState, TraceId: 01jqpgsd8aebavqqm5021mts2p, Create QueryResponse for error on request, msg: 2025-03-31T16:33:07.869208Z node 59 :TX_PROXY DEBUG: actor# [59:7488012556817934214:2111] Handle TEvExecuteKqpTransaction 2025-03-31T16:33:07.869237Z node 59 :TX_PROXY DEBUG: actor# [59:7488012556817934214:2111] TxId# 281474976715664 ProcessProposeKqpTransaction >> KqpNotNullColumns::AlterDropNotNullColumn [GOOD] >> KqpNotNullColumns::CreateIndexedTableWithDisabledNotNullDataColumns |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::TimeSeriesAvg16Signed [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24 [GOOD] Test command err: Starting YDB, grpc: 1383, msgbus: 19040 2025-03-31T16:30:21.092319Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011860296927853:2211];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:21.098644Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0015b2/r3tmp/tmpHjyb1D/pdisk_1.dat 2025-03-31T16:30:21.619153Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:21.619255Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:21.626189Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:30:21.663297Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1383, node 1 2025-03-31T16:30:21.700177Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-03-31T16:30:21.700395Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-03-31T16:30:21.788403Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:21.788424Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:21.788436Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:21.788559Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19040 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T16:30:22.207156Z node 1 :TX_PROXY DEBUG: actor# [1:7488011860296927933:2114] Handle TEvNavigate describe path dc-1 2025-03-31T16:30:22.207203Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864591895769:2452] HANDLE EvNavigateScheme dc-1 2025-03-31T16:30:22.227331Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864591895769:2452] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T16:30:22.287345Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864591895769:2452] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-03-31T16:30:22.300747Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864591895769:2452] Handle TEvDescribeSchemeResult Forward to# [1:7488011864591895768:2451] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:30:22.336132Z node 1 :TX_PROXY DEBUG: actor# [1:7488011860296927933:2114] Handle TEvProposeTransaction 2025-03-31T16:30:22.336162Z node 1 :TX_PROXY DEBUG: actor# [1:7488011860296927933:2114] TxId# 281474976710657 ProcessProposeTransaction 2025-03-31T16:30:22.337321Z node 1 :TX_PROXY DEBUG: actor# [1:7488011860296927933:2114] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7488011864591895777:2459] 2025-03-31T16:30:22.437630Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864591895777:2459] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-31T16:30:22.437715Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864591895777:2459] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-03-31T16:30:22.437753Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864591895777:2459] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T16:30:22.437811Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864591895777:2459] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:30:22.438105Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864591895777:2459] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:30:22.438259Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864591895777:2459] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-31T16:30:22.438315Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864591895777:2459] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-03-31T16:30:22.438455Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864591895777:2459] txid# 281474976710657 HANDLE EvClientConnected 2025-03-31T16:30:22.439886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:30:22.446063Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864591895777:2459] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-03-31T16:30:22.446137Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864591895777:2459] txid# 281474976710657 SEND to# [1:7488011864591895776:2458] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-03-31T16:30:22.480968Z node 1 :TX_PROXY DEBUG: actor# [1:7488011860296927933:2114] Handle TEvProposeTransaction 2025-03-31T16:30:22.481003Z node 1 :TX_PROXY DEBUG: actor# [1:7488011860296927933:2114] TxId# 281474976710658 ProcessProposeTransaction 2025-03-31T16:30:22.481032Z node 1 :TX_PROXY DEBUG: actor# [1:7488011860296927933:2114] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7488011864591895825:2500] 2025-03-31T16:30:22.483493Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864591895825:2500] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-31T16:30:22.483564Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864591895825:2500] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-03-31T16:30:22.483611Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864591895825:2500] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T16:30:22.483674Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864591895825:2500] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:30:22.483964Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864591895825:2500] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:30:22.484036Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864591895825:2500] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:30:22.484096Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864591895825:2500] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-31T16:30:22.484221Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864591895825:2500] txid# 281474976710658 HANDLE EvClientConnected 2025-03-31T16:30:22.485121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:30:22.487741Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864591895825:2500] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-03-31T16:30:22.487794Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864591895825:2500] txid# 281474976710658 SEND to# [1:7488011864591895824:2499] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-03-31T16:30:22.564330Z node 1 :TX_PROXY DEBUG: actor# [1:7488011860296927933:2114] Handle TEvProposeTransaction 2025-03-31T16:30:22.564361Z node 1 :TX_PROXY DEBUG: actor# [1:7488011860296927933:2114] TxId# 281474976710659 ProcessProposeTransaction 2025-03-31T16:30:22.564401Z node 1 :TX_PROXY DEBUG: actor# [1:7488011860296927933:2114] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7488011864591895843:2510] 2025-03-31T16:30:22.566815Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011864591895843:2510] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\001\022\026\032\024ordinaryuser@builtin\n\"\010\000\022\036\010\001\020\200\200\002\032\024ordinaryuser@builtin \000\n!\010\000\022\035\010\001\020\200\010\032\024ordinaryuser@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\ ... LE EvNavigateKeySetResult, txid# 281474976715660 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:33:08.008644Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012577542886048:2577] txid# 281474976715660 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715660 TabletId# 72057594046644480} 2025-03-31T16:33:08.008810Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012577542886048:2577] txid# 281474976715660 HANDLE EvClientConnected 2025-03-31T16:33:08.012018Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012577542886048:2577] txid# 281474976715660 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-03-31T16:33:08.012155Z node 59 :TX_PROXY ERROR: Actor# [59:7488012577542886048:2577] txid# 281474976715660, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:08.012198Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012577542886048:2577] txid# 281474976715660 SEND to# [59:7488012573247918676:2341] Source {TEvProposeTransactionStatus txid# 281474976715660 Status# 48} 2025-03-31T16:33:08.028949Z node 59 :TX_PROXY DEBUG: actor# [59:7488012556068048675:2113] Handle TEvProposeTransaction 2025-03-31T16:33:08.028986Z node 59 :TX_PROXY DEBUG: actor# [59:7488012556068048675:2113] TxId# 281474976715661 ProcessProposeTransaction 2025-03-31T16:33:08.029039Z node 59 :TX_PROXY DEBUG: actor# [59:7488012556068048675:2113] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [59:7488012577542886072:2589] 2025-03-31T16:33:08.031824Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012577542886072:2589] txid# 281474976715661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "ordinaryuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:54402" 2025-03-31T16:33:08.031904Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012577542886072:2589] txid# 281474976715661 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T16:33:08.031927Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012577542886072:2589] txid# 281474976715661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T16:33:08.031978Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012577542886072:2589] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:33:08.032281Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012577542886072:2589] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:33:08.032413Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012577542886072:2589] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:33:08.032485Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012577542886072:2589] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-03-31T16:33:08.032696Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012577542886072:2589] txid# 281474976715661 HANDLE EvClientConnected 2025-03-31T16:33:08.040962Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012577542886072:2589] txid# 281474976715661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715661} 2025-03-31T16:33:08.041025Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012577542886072:2589] txid# 281474976715661 SEND to# [59:7488012577542886071:2334] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-03-31T16:33:08.305484Z node 59 :TX_PROXY DEBUG: actor# [59:7488012556068048675:2113] Handle TEvProposeTransaction 2025-03-31T16:33:08.305526Z node 59 :TX_PROXY DEBUG: actor# [59:7488012556068048675:2113] TxId# 281474976715662 ProcessProposeTransaction 2025-03-31T16:33:08.305583Z node 59 :TX_PROXY DEBUG: actor# [59:7488012556068048675:2113] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [59:7488012577542886092:2603] 2025-03-31T16:33:08.308422Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012577542886092:2603] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:54410" 2025-03-31T16:33:08.308511Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012577542886092:2603] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T16:33:08.308537Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012577542886092:2603] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T16:33:08.308595Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012577542886092:2603] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:33:08.308962Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012577542886092:2603] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:33:08.309076Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012577542886092:2603] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:33:08.309140Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012577542886092:2603] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-03-31T16:33:08.309308Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012577542886092:2603] txid# 281474976715662 HANDLE EvClientConnected 2025-03-31T16:33:08.309781Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:08.314854Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012577542886092:2603] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-03-31T16:33:08.314925Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012577542886092:2603] txid# 281474976715662 SEND to# [59:7488012577542886091:2347] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-03-31T16:33:08.367587Z node 59 :TX_PROXY DEBUG: actor# [59:7488012556068048675:2113] Handle TEvProposeTransaction 2025-03-31T16:33:08.367622Z node 59 :TX_PROXY DEBUG: actor# [59:7488012556068048675:2113] TxId# 281474976715663 ProcessProposeTransaction 2025-03-31T16:33:08.367680Z node 59 :TX_PROXY DEBUG: actor# [59:7488012556068048675:2113] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [59:7488012577542886130:2623] 2025-03-31T16:33:08.369784Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012577542886130:2623] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzQ4MTk4OCwiaWF0IjoxNzQzNDM4Nzg4LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.GaXWGh43y2KaeuPMiLQUaNzISpbxfyKnIo_cOhX9XOnx1j3AMUeU20nX85NMi4W7lqhASuizGqUvoMJ-8T5qIBoZL-Gbz3JD7QS-jMeC9MOPRjTLHtKVv56yAG94ulFVTEoB8M2mOe0nvOMFwCI8P1dL31ueZ_BDccN49g50nv5QvTNDmFe-miqvu6PqQGsmWYROUoDDNIFXeI_BQAg6U5cMSduLb1bf1skCdZm5N9uBToVpjITWnrsWYDV8PqBSxfaUBhuyaYYV_T4nNI5XSNKZREp8cGiu8QnaFo_GU4fZcr6rwP8vMo3_MaNuOn6Fmj8-GV-5EcUmRcl2HgLCjw\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzQ4MTk4OCwiaWF0IjoxNzQzNDM4Nzg4LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:54434" 2025-03-31T16:33:08.369851Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012577542886130:2623] txid# 281474976715663 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T16:33:08.369868Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012577542886130:2623] txid# 281474976715663 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-03-31T16:33:08.370077Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012577542886130:2623] txid# 281474976715663 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-03-31T16:33:08.370124Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012577542886130:2623] txid# 281474976715663 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-03-31T16:33:08.370176Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012577542886130:2623] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:33:08.370490Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012577542886130:2623] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:33:08.370527Z node 59 :TX_PROXY ERROR: Actor# [59:7488012577542886130:2623] txid# 281474976715663, Access denied for ordinaryuser, attempt to manage user 2025-03-31T16:33:08.370648Z node 59 :TX_PROXY ERROR: Actor# [59:7488012577542886130:2623] txid# 281474976715663, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-03-31T16:33:08.370688Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012577542886130:2623] txid# 281474976715663 SEND to# [59:7488012577542886129:2353] Source {TEvProposeTransactionStatus Status# 5} 2025-03-31T16:33:08.370874Z node 59 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=59&id=MjNkN2JiNi00NzZjNTcyZS1jMDU4YjcyZC02Nzk5YmIyNg==, ActorId: [59:7488012577542886115:2353], ActorState: ExecuteState, TraceId: 01jqpgsdqyd4d33rb791rsqjwt, Create QueryResponse for error on request, msg: 2025-03-31T16:33:08.371033Z node 59 :TX_PROXY DEBUG: actor# [59:7488012556068048675:2113] Handle TEvExecuteKqpTransaction 2025-03-31T16:33:08.371058Z node 59 :TX_PROXY DEBUG: actor# [59:7488012556068048675:2113] TxId# 281474976715664 ProcessProposeKqpTransaction 2025-03-31T16:33:08.564848Z node 59 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[59:7488012556068048585:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:08.564946Z node 59 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; >> BootstrapperTest::UnavailableStateStorage [GOOD] >> KqpNewEngine::JoinPureUncomparableKeys [GOOD] >> KqpNewEngine::JoinProjectMulti ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 [GOOD] Test command err: Starting YDB, grpc: 23104, msgbus: 24202 2025-03-31T16:30:21.035569Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011863972962200:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:21.041727Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0015be/r3tmp/tmpkkWSsq/pdisk_1.dat 2025-03-31T16:30:21.575687Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:21.575794Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:21.583901Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:30:21.599447Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23104, node 1 2025-03-31T16:30:21.791647Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:21.791669Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:21.791676Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:21.791812Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24202 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T16:30:22.239477Z node 1 :TX_PROXY DEBUG: actor# [1:7488011863972962432:2113] Handle TEvNavigate describe path dc-1 2025-03-31T16:30:22.239636Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011868267930276:2458] HANDLE EvNavigateScheme dc-1 2025-03-31T16:30:22.240364Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011868267930276:2458] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T16:30:22.283883Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011868267930276:2458] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-03-31T16:30:22.298283Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011868267930276:2458] Handle TEvDescribeSchemeResult Forward to# [1:7488011868267930275:2457] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:30:22.337828Z node 1 :TX_PROXY DEBUG: actor# [1:7488011863972962432:2113] Handle TEvProposeTransaction 2025-03-31T16:30:22.337881Z node 1 :TX_PROXY DEBUG: actor# [1:7488011863972962432:2113] TxId# 281474976710657 ProcessProposeTransaction 2025-03-31T16:30:22.338014Z node 1 :TX_PROXY DEBUG: actor# [1:7488011863972962432:2113] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7488011868267930284:2465] 2025-03-31T16:30:22.445905Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011868267930284:2465] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-31T16:30:22.445975Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011868267930284:2465] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:30:22.446006Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011868267930284:2465] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T16:30:22.446070Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011868267930284:2465] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:30:22.446412Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011868267930284:2465] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:30:22.446610Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011868267930284:2465] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-31T16:30:22.446716Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011868267930284:2465] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-03-31T16:30:22.446873Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011868267930284:2465] txid# 281474976710657 HANDLE EvClientConnected 2025-03-31T16:30:22.447876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:30:22.452112Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011868267930284:2465] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-03-31T16:30:22.452196Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011868267930284:2465] txid# 281474976710657 SEND to# [1:7488011868267930283:2464] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-03-31T16:30:22.480307Z node 1 :TX_PROXY DEBUG: actor# [1:7488011863972962432:2113] Handle TEvProposeTransaction 2025-03-31T16:30:22.480333Z node 1 :TX_PROXY DEBUG: actor# [1:7488011863972962432:2113] TxId# 281474976710658 ProcessProposeTransaction 2025-03-31T16:30:22.480365Z node 1 :TX_PROXY DEBUG: actor# [1:7488011863972962432:2113] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7488011868267930329:2503] 2025-03-31T16:30:22.482169Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011868267930329:2503] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-31T16:30:22.482240Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011868267930329:2503] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:30:22.482257Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011868267930329:2503] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T16:30:22.482304Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011868267930329:2503] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:30:22.482583Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011868267930329:2503] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:30:22.482721Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011868267930329:2503] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:30:22.482792Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011868267930329:2503] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-31T16:30:22.482925Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011868267930329:2503] txid# 281474976710658 HANDLE EvClientConnected 2025-03-31T16:30:22.483414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:30:22.485141Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011868267930329:2503] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-03-31T16:30:22.485181Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011868267930329:2503] txid# 281474976710658 SEND to# [1:7488011868267930328:2502] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-03-31T16:30:22.567537Z node 1 :TX_PROXY DEBUG: actor# [1:7488011863972962432:2113] Handle TEvProposeTransaction 2025-03-31T16:30:22.567567Z node 1 :TX_PROXY DEBUG: actor# [1:7488011863972962432:2113] TxId# 281474976710659 ProcessProposeTransaction 2025-03-31T16:30:22.567619Z node 1 :TX_PROXY DEBUG: actor# [1:7488011863972962432:2113] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7488011868267930347:2513] 2025-03-31T16:30:22.570153Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011868267930347:2513] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\026\010\001\022\022\032\020db_admin@builtin\n\036\010\000\022\032\010\001\020\200\200\002\032\020db_admin@builtin \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:52960" 2025-03-31T16:30:22.570216Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011868267930347:2513] txid# 281474976710659 Bootstrap, UserSID: ro ... e 59 :TX_PROXY DEBUG: Actor# [59:7488012576753987279:2597] txid# 281474976710660 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:33:07.990432Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012576753987279:2597] txid# 281474976710660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:33:07.990537Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012576753987279:2597] HANDLE EvNavigateKeySetResult, txid# 281474976710660 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:33:07.990579Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012576753987279:2597] txid# 281474976710660 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710660 TabletId# 72057594046644480} 2025-03-31T16:33:07.990716Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012576753987279:2597] txid# 281474976710660 HANDLE EvClientConnected 2025-03-31T16:33:07.993059Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012576753987279:2597] txid# 281474976710660 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-03-31T16:33:07.993167Z node 59 :TX_PROXY ERROR: Actor# [59:7488012576753987279:2597] txid# 281474976710660, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:07.993194Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012576753987279:2597] txid# 281474976710660 SEND to# [59:7488012576753987200:2341] Source {TEvProposeTransactionStatus txid# 281474976710660 Status# 48} 2025-03-31T16:33:08.008701Z node 59 :TX_PROXY DEBUG: actor# [59:7488012559574117120:2113] Handle TEvProposeTransaction 2025-03-31T16:33:08.008743Z node 59 :TX_PROXY DEBUG: actor# [59:7488012559574117120:2113] TxId# 281474976710661 ProcessProposeTransaction 2025-03-31T16:33:08.008793Z node 59 :TX_PROXY DEBUG: actor# [59:7488012559574117120:2113] Cookie# 0 userReqId# "" txid# 281474976710661 SEND to# [59:7488012581048954599:2609] 2025-03-31T16:33:08.011836Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012581048954599:2609] txid# 281474976710661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "ordinaryuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:33270" 2025-03-31T16:33:08.011916Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012581048954599:2609] txid# 281474976710661 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T16:33:08.011940Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012581048954599:2609] txid# 281474976710661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T16:33:08.012018Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012581048954599:2609] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:33:08.012547Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012581048954599:2609] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:33:08.012666Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012581048954599:2609] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:33:08.012734Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012581048954599:2609] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-03-31T16:33:08.012920Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012581048954599:2609] txid# 281474976710661 HANDLE EvClientConnected 2025-03-31T16:33:08.020531Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012581048954599:2609] txid# 281474976710661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710661} 2025-03-31T16:33:08.020601Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012581048954599:2609] txid# 281474976710661 SEND to# [59:7488012581048954598:2334] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-03-31T16:33:08.103522Z node 59 :TX_PROXY DEBUG: actor# [59:7488012559574117120:2113] Handle TEvProposeTransaction 2025-03-31T16:33:08.103564Z node 59 :TX_PROXY DEBUG: actor# [59:7488012559574117120:2113] TxId# 281474976710662 ProcessProposeTransaction 2025-03-31T16:33:08.103616Z node 59 :TX_PROXY DEBUG: actor# [59:7488012559574117120:2113] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [59:7488012581048954619:2623] 2025-03-31T16:33:08.106386Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012581048954619:2623] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:33272" 2025-03-31T16:33:08.106482Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012581048954619:2623] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T16:33:08.106508Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012581048954619:2623] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T16:33:08.106565Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012581048954619:2623] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:33:08.106966Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012581048954619:2623] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:33:08.107148Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012581048954619:2623] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:33:08.107221Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012581048954619:2623] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-03-31T16:33:08.107440Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012581048954619:2623] txid# 281474976710662 HANDLE EvClientConnected 2025-03-31T16:33:08.107994Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:08.110645Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012581048954619:2623] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-03-31T16:33:08.110713Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012581048954619:2623] txid# 281474976710662 SEND to# [59:7488012581048954618:2347] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-03-31T16:33:08.152459Z node 59 :TX_PROXY DEBUG: actor# [59:7488012559574117120:2113] Handle TEvProposeTransaction 2025-03-31T16:33:08.152509Z node 59 :TX_PROXY DEBUG: actor# [59:7488012559574117120:2113] TxId# 281474976710663 ProcessProposeTransaction 2025-03-31T16:33:08.152574Z node 59 :TX_PROXY DEBUG: actor# [59:7488012559574117120:2113] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [59:7488012581048954657:2643] 2025-03-31T16:33:08.155440Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012581048954657:2643] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzQ4MTk4OCwiaWF0IjoxNzQzNDM4Nzg4LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.dILtBIlv7rTmQPydicUHFYl0DCQMOwqs_vi1V5GeoTmCijR9-2Ij-MVAm7GxemfrJITUABAgNVoNyyOE5-Wde5F95vDjhgaQTtm3n0Bsh5SHIOudToinmrp0T-ti7tinI1UkCg7vB6TMxhQp6NY-d3YV2M-4gk8uJtM0qmeOlhbUxL1eV271y0f3smXw7edV_lkNRzETvlcWPNVyo8yTqNpXCKlMQ7dKvdR04B2HazYb4LIm3tvFyjf8xZSo3wFB0H0e7wU2rEDehhEi3mac31kHuxrW-DR8edmC3YueZxTRmGrtFCta9xKwae3lvDB2pIqDmLy85MRADinWFU9LwQ\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzQ4MTk4OCwiaWF0IjoxNzQzNDM4Nzg4LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:33302" 2025-03-31T16:33:08.155538Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012581048954657:2643] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T16:33:08.155562Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012581048954657:2643] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-03-31T16:33:08.155744Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012581048954657:2643] txid# 281474976710663 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-03-31T16:33:08.155781Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012581048954657:2643] txid# 281474976710663 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-03-31T16:33:08.155825Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012581048954657:2643] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:33:08.156098Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012581048954657:2643] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:33:08.156138Z node 59 :TX_PROXY ERROR: Actor# [59:7488012581048954657:2643] txid# 281474976710663, Access denied for ordinaryuser, attempt to manage user 2025-03-31T16:33:08.156241Z node 59 :TX_PROXY ERROR: Actor# [59:7488012581048954657:2643] txid# 281474976710663, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-03-31T16:33:08.156271Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012581048954657:2643] txid# 281474976710663 SEND to# [59:7488012581048954656:2353] Source {TEvProposeTransactionStatus Status# 5} 2025-03-31T16:33:08.156609Z node 59 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=59&id=ODc4MTk5YzctNGMxNjRjMGYtNDhhODVhOTQtYzRjZmZjMTk=, ActorId: [59:7488012581048954642:2353], ActorState: ExecuteState, TraceId: 01jqpgsdh73xyh5a4vbktw6c47, Create QueryResponse for error on request, msg: 2025-03-31T16:33:08.156838Z node 59 :TX_PROXY DEBUG: actor# [59:7488012559574117120:2113] Handle TEvExecuteKqpTransaction 2025-03-31T16:33:08.156859Z node 59 :TX_PROXY DEBUG: actor# [59:7488012559574117120:2113] TxId# 281474976710664 ProcessProposeKqpTransaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 25391, MsgBus: 8746 2025-03-31T16:32:56.651270Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012526884019444:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:56.651334Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00127c/r3tmp/tmpNve9WQ/pdisk_1.dat 2025-03-31T16:32:57.008734Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:32:57.048370Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:57.048466Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:57.050168Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25391, node 1 2025-03-31T16:32:57.087343Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:57.087374Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:57.087386Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:57.087502Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8746 TClient is connected to server localhost:8746 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:57.605959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:57.630836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:57.780287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:57.951648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:58.032280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:59.558939Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012539768923112:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:59.559056Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:59.806884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:32:59.832566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:32:59.859905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:32:59.885729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:32:59.920449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:32:59.956621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:00.001351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012544063890924:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:00.001471Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:00.001545Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012544063890929:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:00.004988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:00.013997Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012544063890931:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:33:00.072003Z node 1 :TX_PROXY ERROR: Actor# [1:7488012544063890984:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:01.651345Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012526884019444:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:01.651468Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:33:03.506873Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWE1OGQxZC1iNzYyYWQ4Mi05NjZlZTdjZS0xOGUxMGM4Yw==, ActorId: [1:7488012544063891242:2488], ActorState: ExecuteState, TraceId: 01jqpgs8sw24gxz2vpb7rm1wac, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken Trying to start YDB, gRPC: 62416, MsgBus: 2743 2025-03-31T16:33:04.268598Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012564066960689:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:04.268664Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00127c/r3tmp/tmpEgSMi7/pdisk_1.dat 2025-03-31T16:33:04.370682Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62416, node 2 2025-03-31T16:33:04.399594Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:04.399694Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:04.401530Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:04.436135Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:04.436156Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:04.436164Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:04.436334Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2743 TClient is connected to server localhost:2743 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:04.828383Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:04.838027Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:04.889733Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:05.040846Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:05.103803Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:07.040708Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012576951864347:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:07.040800Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:07.081608Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:07.114402Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:07.145365Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:07.180063Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:07.216874Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:07.286188Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:07.324337Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012576951864862:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:07.324404Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:07.324527Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012576951864867:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:07.327711Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:07.337178Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012576951864869:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:33:07.406266Z node 2 :TX_PROXY ERROR: Actor# [2:7488012576951864922:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:08.756525Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTgyODBiMjItNGI3MjhkYWUtY2I0ZGMxMWUtNmVkZTEzYzI=, ActorId: [2:7488012581246832501:2495], ActorState: ExecuteState, TraceId: 01jqpgse257g9gghrff6mhbyew, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken >> BootstrapperTest::DuplicateNodes [GOOD] >> KqpAgg::GroupByLimit [GOOD] >> KqpExtractPredicateLookup::OverflowLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> BootstrapperTest::UnavailableStateStorage [GOOD] Test command err: ... waiting for pipe to connect ... waiting for blocked connect attempt ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR ... waiting for blocked connect attempt (done) ... disconnecting nodes 2 <-> 1 ... waiting for pipe to disconnect ... waiting for pipe to connect ... waiting for pipe to connect ... waiting for multiple state storage lookup attempts 2025-03-31T16:33:09.666449Z node 6 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 0} for [4:3:2050]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 1} for [4:6:2053]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 2} for [4:9:2056]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA 2025-03-31T16:33:09.667064Z node 6 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: ERROR, leader: [0:0:0] 2025-03-31T16:33:09.667130Z node 6 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, state storage unavailable, sleeping for 0.148014s 2025-03-31T16:33:09.782410Z node 6 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 0} for [4:3:2050]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 1} for [4:6:2053]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 2} for [4:9:2056]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... waiting for multiple state storage lookup attempts (done) >> KqpNotNullColumns::JoinBothTablesWithNotNullPk-StreamLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> BootstrapperTest::DuplicateNodes [GOOD] Test command err: ... waiting for pipe to connect ... sleeping (original instance should be preserved) ... waiting for original instance to stop ... waiting for original instance to stop (done) ... waiting for pipe to connect 2025-03-31T16:33:09.636624Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-03-31T16:33:09.636746Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-03-31T16:33:09.637394Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-03-31T16:33:09.637438Z node 5 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 12552810490399048506 2025-03-31T16:33:09.637543Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-03-31T16:33:09.637566Z node 4 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 15249746964198841502 2025-03-31T16:33:09.638406Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2025-03-31T16:33:09.638475Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lost round, wait for 0.139961s 2025-03-31T16:33:09.638683Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2025-03-31T16:33:09.638714Z node 5 :BOOTSTRAPPER NOTICE: tablet: 9437184, type: Dummy, boot 2025-03-31T16:33:09.795540Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-03-31T16:33:09.795979Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [5:212:2095] 2025-03-31T16:33:09.796336Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2025-03-31T16:33:09.796375Z node 4 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting >> KqpNotNullColumns::JoinLeftTableWithNotNullPk+StreamLookup >> KqpMergeCn::TopSortBy_Float_Limit4 [GOOD] >> KqpMergeCn::TopSortBy_String_Limit3 >> ReadAttributesUtils::AttributesGatheringEmpry [GOOD] >> ReadAttributesUtils::AttributesGatheringFilter [GOOD] >> ReadAttributesUtils::AttributesGatheringRecursive [GOOD] >> KikimrProvider::TestFillAuthPropertiesNone [GOOD] >> KikimrProvider::TestFillAuthPropertiesServiceAccount [GOOD] >> KikimrProvider::TestFillAuthPropertiesMdbBasic [GOOD] >> KikimrIcGateway::TestCreateSameExternalTable >> KikimrIcGateway::TestLoadExternalTable >> KikimrProvider::TestFillAuthPropertiesBasic [GOOD] >> KikimrProvider::TestFillAuthPropertiesAws [GOOD] >> KikimrProvider::AlterTableAddIndexWithTableSettings [GOOD] >> KikimrIcGateway::TestLoadBasicSecretValueFromExternalDataSourceMetadata >> KikimrIcGateway::TestCreateExternalTable >> KikimrIcGateway::TestLoadTableMetadata >> KqpNewEngine::PrimaryView [GOOD] >> KikimrIcGateway::TestListPath >> BootstrapperTest::MultipleBootstrappers [GOOD] >> ReadAttributesUtils::ReplaceAttributesEmpty [GOOD] >> ReadAttributesUtils::ReplaceAttributesFilter [GOOD] |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrProvider::AlterTableAddIndexWithTableSettings [GOOD] |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> ReadAttributesUtils::AttributesGatheringRecursive [GOOD] |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrProvider::TestFillAuthPropertiesMdbBasic [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-72 [GOOD] |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> ReadAttributesUtils::ReplaceAttributesFilter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> BootstrapperTest::MultipleBootstrappers [GOOD] Test command err: ... waiting for pipe to connect ... stopping current instance ... waiting for pipe to disconnect ... waiting for pipe to connect ... sleeping for 2 seconds 2025-03-31T16:33:08.524186Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-03-31T16:33:08.524274Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-03-31T16:33:08.524355Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-03-31T16:33:08.525333Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-03-31T16:33:08.525394Z node 5 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 15249746964198841502 2025-03-31T16:33:08.525583Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-03-31T16:33:08.525609Z node 3 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 838756400823690829 2025-03-31T16:33:08.525799Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-03-31T16:33:08.525821Z node 4 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 2303809724928703835 2025-03-31T16:33:08.526868Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2025-03-31T16:33:08.527048Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2025-03-31T16:33:08.527077Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lost round, wait for 0.149198s 2025-03-31T16:33:08.527192Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2025-03-31T16:33:08.527311Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2025-03-31T16:33:08.527375Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2025-03-31T16:33:08.527400Z node 3 :BOOTSTRAPPER NOTICE: tablet: 9437184, type: Dummy, boot 2025-03-31T16:33:08.527573Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2025-03-31T16:33:08.527607Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lost round, wait for 0.190190s 2025-03-31T16:33:08.696823Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-03-31T16:33:08.697487Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:274:2096] 2025-03-31T16:33:08.697980Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2025-03-31T16:33:08.698026Z node 5 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting 2025-03-31T16:33:08.740464Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-03-31T16:33:08.741099Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:274:2096] 2025-03-31T16:33:08.741552Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2025-03-31T16:33:08.741588Z node 4 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting ... waiting for pipe to connect ... tablet initially started on node 3 (idx 1) in gen 2 ... disconnecting other nodes ... sleeping for 2 seconds (tablet expected to survive) 2025-03-31T16:33:09.505673Z node 3 :PIPE_SERVER ERROR: [9437184] NodeDisconnected NodeId# 5 2025-03-31T16:33:09.505773Z node 3 :PIPE_SERVER ERROR: [9437184] NodeDisconnected NodeId# 4 2025-03-31T16:33:09.506051Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected 2025-03-31T16:33:09.506092Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-03-31T16:33:09.506132Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected 2025-03-31T16:33:09.506154Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-03-31T16:33:09.508008Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:274:2096] 2025-03-31T16:33:09.508210Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:274:2096] 2025-03-31T16:33:09.509392Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2025-03-31T16:33:09.509436Z node 4 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting 2025-03-31T16:33:09.509695Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2025-03-31T16:33:09.509720Z node 5 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting ... disconnecting other nodes (new tablet connections fail) ... sleeping for 2 seconds (tablet expected to survive) 2025-03-31T16:33:10.206372Z node 3 :PIPE_SERVER ERROR: [9437184] NodeDisconnected NodeId# 4 2025-03-31T16:33:10.206446Z node 3 :PIPE_SERVER ERROR: [9437184] NodeDisconnected NodeId# 5 2025-03-31T16:33:10.206557Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected 2025-03-31T16:33:10.206596Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-03-31T16:33:10.206641Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected 2025-03-31T16:33:10.206662Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-03-31T16:33:10.207527Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:274:2096] 2025-03-31T16:33:10.207668Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:274:2096] ... disconnecting nodes 1 <-> 2 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR ... disconnecting nodes 1 <-> 3 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR 2025-03-31T16:33:10.208237Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: ERROR 2025-03-31T16:33:10.208281Z node 4 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 6528562917658346564 2025-03-31T16:33:10.208437Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: ERROR 2025-03-31T16:33:10.208460Z node 5 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 16349739802483488852 2025-03-31T16:33:10.208955Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: OWNER 2025-03-31T16:33:10.208995Z node 4 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, become watch on node 3 (owner) 2025-03-31T16:33:10.209225Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: OWNER 2025-03-31T16:33:10.209248Z node 5 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, become watch on node 3 (owner) ... disconnect other nodes (new owner expected) ... sleeping for 2 seconds (new tablet expected to start once) 2025-03-31T16:33:10.905895Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected from 3, round 16045690984833335029 2025-03-31T16:33:10.905950Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-03-31T16:33:10.906001Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected from 3, round 16045690984833335029 2025-03-31T16:33:10.906024Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-03-31T16:33:10.906766Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:274:2096] 2025-03-31T16:33:10.906837Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:274:2096] ... disconnecting nodes 1 <-> 2 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR ... disconnecting nodes 1 <-> 3 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR 2025-03-31T16:33:10.907322Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: ERROR 2025-03-31T16:33:10.907354Z node 4 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 13164802727073798053 2025-03-31T16:33:10.907596Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: ERROR 2025-03-31T16:33:10.907619Z node 5 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 10171326560769670008 ... disconnecting nodes 1 <-> 2 (bootstrap watch attempt) ... blocking NKikimr::TEvBootstrapper::TEvWatch from TABLET_BOOTSTRAPPER to TABLET_BOOTSTRAPPER ... disconnecting nodes 1 <-> 3 (bootstrap watch attempt) ... blocking NKikimr::TEvBootstrapper::TEvWatch from TABLET_BOOTSTRAPPER to TABLET_BOOTSTRAPPER 2025-03-31T16:33:10.908037Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected from 3, round 16045690984833335031 2025-03-31T16:33:10.908087Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: DISCONNECTED 2025-03-31T16:33:10.908133Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2025-03-31T16:33:10.908160Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lost round, wait for 0.116418s 2025-03-31T16:33:10.908197Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected from 3, round 16045690984833335031 2025-03-31T16:33:10.908226Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: DISCONNECTED 2025-03-31T16:33:10.908280Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2025-03-31T16:33:10.908296Z node 5 :BOOTSTRAPPER NOTICE: tablet: 9437184, type: Dummy, boot 2025-03-31T16:33:10.910112Z node 3 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, tablet dead 2025-03-31T16:33:10.910188Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-03-31T16:33:10.913114Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [5:394:2096] 2025-03-31T16:33:10.926380Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2025-03-31T16:33:10.926422Z node 3 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting 2025-03-31T16:33:10.991281Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-03-31T16:33:10.991820Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [5:394:2096] 2025-03-31T16:33:10.992239Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2025-03-31T16:33:10.992270Z node 4 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting ... waiting for pipe to connect ... disconnecting nodes 1 <-> 0 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to >> KqpRanges::DateKeyPredicate [GOOD] >> KqpRanges::DeleteNotFullScan+UseSink >> TInterconnectTest::TestCrossConnect [GOOD] >> TInterconnectTest::TestManyEventsWithReconnect ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::PrimaryView [GOOD] Test command err: Trying to start YDB, gRPC: 23445, MsgBus: 62051 2025-03-31T16:32:30.025903Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012415987993526:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:30.026030Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0009f5/r3tmp/tmpuC2RSJ/pdisk_1.dat 2025-03-31T16:32:30.319198Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23445, node 1 2025-03-31T16:32:30.392111Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:30.392138Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:30.392163Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:30.392286Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:32:30.403341Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:30.403480Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:30.405040Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62051 TClient is connected to server localhost:62051 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:30.828334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:30.854038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:30.993401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:31.144523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:31.205374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:32.537773Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012424577929908:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:32.537853Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:32.801194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:32:32.827702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:32:32.858712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:32:32.887477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:32:32.929380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:32:32.972094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:32:33.036384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012428872897717:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:33.036486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:33.036736Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012428872897722:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:33.040809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:32:33.050125Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012428872897724:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:32:33.151308Z node 1 :TX_PROXY ERROR: Actor# [1:7488012428872897781:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 15124, MsgBus: 26311 2025-03-31T16:32:34.707606Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012432386945769:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:34.707755Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0009f5/r3tmp/tmptRvMVd/pdisk_1.dat 2025-03-31T16:32:34.832900Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:32:34.854338Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:34.854405Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:34.855993Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15124, node 2 2025-03-31T16:32:34.899658Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:34.899682Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:34.899689Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:34.899812Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26311 TClient is connected to server localhost:26311 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:35.316150Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:35.332710Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:35.405814Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:35.554986Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:35.632303Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:37.535537Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012445271849438:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:37.535619Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: { ... 4976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:32:59.807592Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:32:59.841880Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:32:59.912729Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:32:59.980877Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:00.054985Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:00.106558Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012546485035652:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:00.106628Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012546485035657:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:00.106645Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:00.110370Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:00.123222Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488012546485035659:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:33:00.223390Z node 6 :TX_PROXY ERROR: Actor# [6:7488012546485035715:3453] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:01.177776Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488012529305164167:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:01.178404Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 61572, MsgBus: 11029 2025-03-31T16:33:02.907077Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488012551569026535:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:02.907200Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0009f5/r3tmp/tmp8G02oZ/pdisk_1.dat 2025-03-31T16:33:03.044529Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:03.076810Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:03.076926Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:03.078805Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61572, node 7 2025-03-31T16:33:03.127326Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:03.127351Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:03.127367Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:03.127521Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11029 TClient is connected to server localhost:11029 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:03.726474Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:03.738812Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:03.866103Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:04.109299Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:04.192736Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:07.218340Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488012573043864806:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:07.218553Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:07.270374Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:07.309345Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:07.347908Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:07.384736Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:07.422323Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:07.463026Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:07.511730Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488012573043865320:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:07.511835Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:07.511852Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488012573043865325:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:07.516974Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:07.530121Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7488012573043865327:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:33:07.624746Z node 7 :TX_PROXY ERROR: Actor# [7:7488012573043865380:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:07.907270Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7488012551569026535:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:07.907357Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:33:08.618709Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:33:08.661899Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:33:08.741718Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> KqpReturning::ReturningWorksIndexedOperationsWithDefault+QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedOperationsWithDefault-QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-72 [GOOD] Test command err: Starting YDB, grpc: 16919, msgbus: 2402 2025-03-31T16:30:22.362534Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011865910458397:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:22.364237Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001595/r3tmp/tmpR6gF4b/pdisk_1.dat 2025-03-31T16:30:22.922756Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:22.937653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:22.938475Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:22.948352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16919, node 1 2025-03-31T16:30:23.047708Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:23.047733Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:23.047745Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:23.047855Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2402 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T16:30:23.261899Z node 1 :TX_PROXY DEBUG: actor# [1:7488011865910458623:2115] Handle TEvNavigate describe path dc-1 2025-03-31T16:30:23.261968Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011870205426432:2444] HANDLE EvNavigateScheme dc-1 2025-03-31T16:30:23.263062Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011870205426432:2444] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T16:30:23.317810Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011870205426432:2444] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-03-31T16:30:23.333109Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011870205426432:2444] Handle TEvDescribeSchemeResult Forward to# [1:7488011870205426431:2443] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:30:23.359640Z node 1 :TX_PROXY DEBUG: actor# [1:7488011865910458623:2115] Handle TEvProposeTransaction 2025-03-31T16:30:23.359670Z node 1 :TX_PROXY DEBUG: actor# [1:7488011865910458623:2115] TxId# 281474976710657 ProcessProposeTransaction 2025-03-31T16:30:23.359798Z node 1 :TX_PROXY DEBUG: actor# [1:7488011865910458623:2115] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7488011870205426447:2452] 2025-03-31T16:30:23.432621Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011870205426447:2452] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-31T16:30:23.432717Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011870205426447:2452] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:30:23.432737Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011870205426447:2452] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T16:30:23.432805Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011870205426447:2452] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:30:23.433378Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011870205426447:2452] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:30:23.433573Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011870205426447:2452] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-31T16:30:23.433651Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011870205426447:2452] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-03-31T16:30:23.433863Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011870205426447:2452] txid# 281474976710657 HANDLE EvClientConnected 2025-03-31T16:30:23.434807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:30:23.441149Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011870205426447:2452] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-03-31T16:30:23.441234Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011870205426447:2452] txid# 281474976710657 SEND to# [1:7488011870205426446:2451] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-03-31T16:30:23.465847Z node 1 :TX_PROXY DEBUG: actor# [1:7488011865910458623:2115] Handle TEvProposeTransaction 2025-03-31T16:30:23.465873Z node 1 :TX_PROXY DEBUG: actor# [1:7488011865910458623:2115] TxId# 281474976710658 ProcessProposeTransaction 2025-03-31T16:30:23.465905Z node 1 :TX_PROXY DEBUG: actor# [1:7488011865910458623:2115] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7488011870205426495:2489] 2025-03-31T16:30:23.468256Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011870205426495:2489] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-31T16:30:23.468299Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011870205426495:2489] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:30:23.468314Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011870205426495:2489] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T16:30:23.468374Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011870205426495:2489] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:30:23.468644Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011870205426495:2489] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:30:23.468726Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011870205426495:2489] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:30:23.468783Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011870205426495:2489] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-31T16:30:23.468911Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011870205426495:2489] txid# 281474976710658 HANDLE EvClientConnected 2025-03-31T16:30:23.469328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:30:23.472211Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011870205426495:2489] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-03-31T16:30:23.472240Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011870205426495:2489] txid# 281474976710658 SEND to# [1:7488011870205426494:2488] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-03-31T16:30:25.609081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011878795361185:2339], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:25.609204Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011878795361174:2336], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:25.609314Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:25.609483Z node 1 :TX_PROXY DEBUG: actor# [1:7488011865910458623:2115] Handle TEvProposeTransaction 2025-03-31T16:30:25.609501Z node 1 :TX_PROXY DEBUG: actor# [1:7488011865910458623:2115] TxId# 281474976710659 ProcessProposeTransaction 2025-03-31T1 ... wd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:54262" 2025-03-31T16:33:11.087023Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012593266368566:2593] txid# 281474976715661 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T16:33:11.087045Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012593266368566:2593] txid# 281474976715661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T16:33:11.087118Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012593266368566:2593] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:33:11.087489Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012593266368566:2593] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:33:11.087601Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012593266368566:2593] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:33:11.087662Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012593266368566:2593] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-03-31T16:33:11.087837Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012593266368566:2593] txid# 281474976715661 HANDLE EvClientConnected 2025-03-31T16:33:11.094432Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012593266368566:2593] txid# 281474976715661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715661} 2025-03-31T16:33:11.094480Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012593266368566:2593] txid# 281474976715661 SEND to# [59:7488012593266368565:2334] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-03-31T16:33:11.132602Z node 59 :TX_PROXY DEBUG: actor# [59:7488012571791531292:2113] Handle TEvProposeTransaction 2025-03-31T16:33:11.132635Z node 59 :TX_PROXY DEBUG: actor# [59:7488012571791531292:2113] TxId# 281474976715662 ProcessProposeTransaction 2025-03-31T16:33:11.132679Z node 59 :TX_PROXY DEBUG: actor# [59:7488012571791531292:2113] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [59:7488012593266368586:2607] 2025-03-31T16:33:11.135313Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012593266368586:2607] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:54270" 2025-03-31T16:33:11.135390Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012593266368586:2607] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T16:33:11.135415Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012593266368586:2607] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T16:33:11.135471Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012593266368586:2607] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:33:11.135746Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012593266368586:2607] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:33:11.135857Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012593266368586:2607] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:33:11.135922Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012593266368586:2607] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-03-31T16:33:11.136103Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012593266368586:2607] txid# 281474976715662 HANDLE EvClientConnected 2025-03-31T16:33:11.136489Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:11.138647Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012593266368586:2607] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-03-31T16:33:11.138697Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012593266368586:2607] txid# 281474976715662 SEND to# [59:7488012593266368585:2347] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-03-31T16:33:11.171317Z node 59 :TX_PROXY DEBUG: actor# [59:7488012571791531292:2113] Handle TEvProposeTransaction 2025-03-31T16:33:11.171345Z node 59 :TX_PROXY DEBUG: actor# [59:7488012571791531292:2113] TxId# 281474976715663 ProcessProposeTransaction 2025-03-31T16:33:11.171379Z node 59 :TX_PROXY DEBUG: actor# [59:7488012571791531292:2113] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [59:7488012593266368623:2630] 2025-03-31T16:33:11.173954Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012593266368623:2630] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:54288" 2025-03-31T16:33:11.174033Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012593266368623:2630] txid# 281474976715663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T16:33:11.174055Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012593266368623:2630] txid# 281474976715663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T16:33:11.174105Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012593266368623:2630] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:33:11.174425Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012593266368623:2630] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:33:11.174534Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012593266368623:2630] HANDLE EvNavigateKeySetResult, txid# 281474976715663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:33:11.174595Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012593266368623:2630] txid# 281474976715663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715663 TabletId# 72057594046644480} 2025-03-31T16:33:11.174758Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012593266368623:2630] txid# 281474976715663 HANDLE EvClientConnected 2025-03-31T16:33:11.181910Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012593266368623:2630] txid# 281474976715663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715663} 2025-03-31T16:33:11.181970Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012593266368623:2630] txid# 281474976715663 SEND to# [59:7488012593266368622:2349] Source {TEvProposeTransactionStatus txid# 281474976715663 Status# 48} 2025-03-31T16:33:11.222446Z node 59 :TX_PROXY DEBUG: actor# [59:7488012571791531292:2113] Handle TEvProposeTransaction 2025-03-31T16:33:11.222484Z node 59 :TX_PROXY DEBUG: actor# [59:7488012571791531292:2113] TxId# 281474976715664 ProcessProposeTransaction 2025-03-31T16:33:11.222548Z node 59 :TX_PROXY DEBUG: actor# [59:7488012571791531292:2113] Cookie# 0 userReqId# "" txid# 281474976715664 SEND to# [59:7488012593266368651:2642] 2025-03-31T16:33:11.225031Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012593266368651:2642] txid# 281474976715664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzQ4MTk5MSwiaWF0IjoxNzQzNDM4NzkxLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.10HIDW2REJBelCeiTRo5pILP3VE6RCg9zHj48Q8wtgywCPsl_MT9dWEb-XFeGbACoGnVja_zkR-1MXdEtsp2QyP-3EJbyNr31G3HvUPDtEdX4XAJCGxOR5_A_lHPPKurE4S92-Mkzual9jGD3mbnqWVbuXJh8scDaMvIgR-fhowvkYPqBaSrdcgIFq1JosF1JO-AEs4eT4GzPUFTwKDXAHtdARIaM_5QilaolxT8dOj_cVQRKNCK75pcHN3DyybwnMmnIUU6kmARJ4Fmdh5U5uyWdUTH_u-OlXe2PTwmBXSNbTAc9gMHgQfCaJw0UdkywIvw5X5PvUMINmuzy8EB1Q\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzQ4MTk5MSwiaWF0IjoxNzQzNDM4NzkxLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:54310" 2025-03-31T16:33:11.225113Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012593266368651:2642] txid# 281474976715664 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T16:33:11.225137Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012593266368651:2642] txid# 281474976715664 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-03-31T16:33:11.225312Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012593266368651:2642] txid# 281474976715664 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-03-31T16:33:11.225350Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012593266368651:2642] txid# 281474976715664 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-03-31T16:33:11.225428Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012593266368651:2642] txid# 281474976715664 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:33:11.225735Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012593266368651:2642] txid# 281474976715664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:33:11.225762Z node 59 :TX_PROXY ERROR: Actor# [59:7488012593266368651:2642] txid# 281474976715664, Access denied for ordinaryuser, attempt to manage user 2025-03-31T16:33:11.225830Z node 59 :TX_PROXY ERROR: Actor# [59:7488012593266368651:2642] txid# 281474976715664, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-03-31T16:33:11.225848Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012593266368651:2642] txid# 281474976715664 SEND to# [59:7488012593266368650:2361] Source {TEvProposeTransactionStatus Status# 5} 2025-03-31T16:33:11.226116Z node 59 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=59&id=NzllZTM0OTYtZTZlZmQ1MTctODg4MzcxNjgtNGRiYWExMmY=, ActorId: [59:7488012593266368641:2361], ActorState: ExecuteState, TraceId: 01jqpgsgh84k9y6xf5qsrmrcgd, Create QueryResponse for error on request, msg: 2025-03-31T16:33:11.226363Z node 59 :TX_PROXY DEBUG: actor# [59:7488012571791531292:2113] Handle TEvExecuteKqpTransaction 2025-03-31T16:33:11.226390Z node 59 :TX_PROXY DEBUG: actor# [59:7488012571791531292:2113] TxId# 281474976715665 ProcessProposeKqpTransaction >> TInterconnectTest::TestManyEventsWithReconnect [GOOD] >> TInterconnectTest::TestEventWithPayloadSerialization >> TTabletLabeledCountersAggregator::HeavyAggregation [GOOD] >> TTabletLabeledCountersAggregator::DbAggregation >> TSchemeShardTopicSplitMergeTest::MargePartitions >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions >> TSchemeShardTopicSplitMergeTest::MargeUnorderedPartitions >> KqpNotNullColumns::CreateIndexedTableWithDisabledNotNullDataColumns [GOOD] >> KqpNotNullColumns::Describe >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition >> TSchemeShardTopicSplitMergeTest::Boot >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition >> TSchemeShardTopicSplitMergeTest::CreateTopicWithOnePartition >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition >> TTabletLabeledCountersAggregator::DbAggregation [GOOD] >> TInterconnectTest::TestEventWithPayloadSerialization [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestEventWithPayloadSerialization [GOOD] Test command err: Starting iteration 0 Starting iteration 1 Starting iteration 2 Starting iteration 3 Starting iteration 4 Starting iteration 5 Starting iteration 6 Starting iteration 7 Starting iteration 8 Starting iteration 9 Starting iteration 10 Starting iteration 11 Starting iteration 12 Starting iteration 13 Starting iteration 14 Starting iteration 15 Starting iteration 16 Starting iteration 17 Starting iteration 18 Starting iteration 19 Starting iteration 20 Starting iteration 21 Starting iteration 22 Starting iteration 23 Starting iteration 24 Starting iteration 25 Starting iteration 26 Starting iteration 27 Starting iteration 28 Starting iteration 29 Starting iteration 30 Starting iteration 31 Starting iteration 32 Starting iteration 33 Starting iteration 34 Starting iteration 35 Starting iteration 36 Starting iteration 37 Starting iteration 38 Starting iteration 39 Starting iteration 40 Starting iteration 41 Starting iteration 42 Starting iteration 43 Starting iteration 44 Starting iteration 45 Starting iteration 46 Starting iteration 47 Starting iteration 48 Starting iteration 49 0 0 0 1 0 3 0 7 0 15 0 31 0 63 0 127 0 255 0 511 0 1023 0 2047 0 4095 0 8191 0 16383 0 32767 0 65535 1 0 1 1 1 3 1 7 1 15 1 31 1 63 1 127 1 255 1 511 1 1023 1 2047 1 4095 1 8191 1 16383 1 32767 1 65535 3 0 3 1 3 3 3 7 3 15 3 31 3 63 3 127 3 255 3 511 3 1023 3 2047 3 4095 3 8191 3 16383 3 32767 3 65535 7 0 7 1 7 3 7 7 7 15 7 31 7 63 7 127 7 255 7 511 7 1023 7 2047 7 4095 7 8191 7 16383 7 32767 7 65535 15 0 15 1 15 3 15 7 15 15 15 31 15 63 15 127 15 255 15 511 15 1023 15 2047 15 4095 15 8191 15 16383 15 32767 15 65535 31 0 31 1 31 3 31 7 31 15 31 31 31 63 31 127 31 255 31 511 31 1023 31 2047 31 4095 31 8191 31 16383 31 32767 31 65535 63 0 63 1 63 3 63 7 63 15 63 31 63 63 63 127 63 255 63 511 63 1023 63 2047 63 4095 63 8191 63 16383 63 32767 63 65535 127 0 127 1 127 3 127 7 127 15 127 31 127 63 127 127 127 255 127 511 127 1023 127 2047 127 4095 127 8191 127 16383 127 32767 127 65535 255 0 255 1 255 3 255 7 255 15 255 31 255 63 255 127 255 255 255 511 255 1023 255 2047 255 4095 255 8191 255 16383 255 32767 255 65535 511 0 511 1 511 3 511 7 511 15 511 31 511 63 511 127 511 255 511 511 511 1023 511 2047 511 4095 511 8191 511 16383 511 32767 511 65535 1023 0 1023 1 1023 3 1023 7 1023 15 1023 31 1023 63 1023 127 1023 255 1023 511 1023 1023 1023 2047 1023 4095 1023 8191 1023 16383 1023 32767 1023 65535 2047 0 2047 1 2047 3 2047 7 2047 15 2047 31 2047 63 2047 127 2047 255 2047 511 2047 1023 2047 2047 2047 4095 2047 8191 2047 16383 2047 32767 2047 65535 4095 0 4095 1 4095 3 4095 7 4095 15 4095 31 4095 63 4095 127 4095 255 4095 511 4095 1023 4095 2047 4095 4095 4095 8191 4095 16383 4095 32767 4095 65535 8191 0 8191 1 8191 3 8191 7 8191 15 8191 31 8191 63 8191 127 8191 255 8191 511 8191 1023 8191 2047 8191 4095 8191 8191 8191 16383 8191 32767 8191 65535 16383 0 16383 1 16383 3 16383 7 16383 15 16383 31 16383 63 16383 127 16383 255 16383 511 16383 1023 16383 2047 16383 4095 16383 8191 16383 16383 16383 32767 16383 65535 32767 0 32767 1 32767 3 32767 7 32767 15 32767 31 32767 63 32767 127 32767 255 32767 511 32767 1023 32767 2047 32767 4095 32767 8191 32767 16383 32767 32767 32767 65535 65535 0 65535 1 65535 3 65535 7 65535 15 65535 31 65535 63 65535 127 65535 255 65535 511 65535 1023 65535 2047 65535 4095 65535 8191 65535 16383 65535 32767 65535 65535 >> KikimrIcGateway::TestCreateExternalTable [GOOD] >> KikimrIcGateway::TestCreateResourcePool >> TSchemeShardTopicSplitMergeTest::Boot [GOOD] >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition >> KikimrIcGateway::TestCreateSameExternalTable [GOOD] >> KikimrIcGateway::TestDropExternalTable |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::CleanupDroppedVolumesOnRestart |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletLabeledCountersAggregator::DbAggregation [GOOD] Test command err: 2025-03-31T16:33:08.125459Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 [1:7:2054] 2025-03-31T16:33:08.125690Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:8:2055] worker 0 2025-03-31T16:33:08.125744Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:9:2056] worker 1 2025-03-31T16:33:08.125764Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:10:2057] worker 2 2025-03-31T16:33:08.125780Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:11:2058] worker 3 2025-03-31T16:33:08.125793Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:12:2059] worker 4 2025-03-31T16:33:08.125817Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:13:2060] worker 5 2025-03-31T16:33:08.125845Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:14:2061] worker 6 2025-03-31T16:33:08.125862Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:15:2062] worker 7 2025-03-31T16:33:08.125875Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:16:2063] worker 8 2025-03-31T16:33:08.125899Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:17:2064] worker 9 Sending message to [1:9:2056] from [1:7:2054] id 1 Sending message to [1:10:2057] from [1:7:2054] id 2 Sending message to [1:11:2058] from [1:7:2054] id 3 Sending message to [1:12:2059] from [1:7:2054] id 4 Sending message to [1:13:2060] from [1:7:2054] id 5 Sending message to [1:14:2061] from [1:7:2054] id 6 Sending message to [1:15:2062] from [1:7:2054] id 7 Sending message to [1:16:2063] from [1:7:2054] id 8 Sending message to [1:17:2064] from [1:7:2054] id 9 Sending message to [1:8:2055] from [1:7:2054] id 10 2025-03-31T16:33:08.824714Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 5 [1:13:2060] 2025-03-31T16:33:08.824821Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 6 [1:14:2061] 2025-03-31T16:33:08.824876Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 7 [1:15:2062] 2025-03-31T16:33:08.824922Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 8 [1:16:2063] 2025-03-31T16:33:08.824980Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 9 [1:17:2064] 2025-03-31T16:33:08.825280Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 10 [1:8:2055] 2025-03-31T16:33:08.825326Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 1 [1:9:2056] 2025-03-31T16:33:08.825387Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 2 [1:10:2057] 2025-03-31T16:33:08.825458Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 3 [1:11:2058] 2025-03-31T16:33:08.825526Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 4 [1:12:2059] 2025-03-31T16:33:08.825583Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 6 [1:14:2061] 2025-03-31T16:33:08.826954Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 6 [1:14:2061] 2025-03-31T16:33:08.856179Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:14:2061] Initiator [1:7:2054] 2025-03-31T16:33:08.871093Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 7 [1:15:2062] 2025-03-31T16:33:08.872486Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 7 [1:15:2062] 2025-03-31T16:33:08.894883Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:15:2062] Initiator [1:7:2054] 2025-03-31T16:33:08.910090Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 8 [1:16:2063] 2025-03-31T16:33:08.911235Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 8 [1:16:2063] 2025-03-31T16:33:08.940826Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:16:2063] Initiator [1:7:2054] 2025-03-31T16:33:08.963138Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 9 [1:17:2064] 2025-03-31T16:33:08.964584Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 9 [1:17:2064] 2025-03-31T16:33:08.985913Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:17:2064] Initiator [1:7:2054] 2025-03-31T16:33:09.006526Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 6 [1:7:2054] 2025-03-31T16:33:09.006650Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 6 [1:7:2054] 2025-03-31T16:33:09.011841Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 10 [1:8:2055] 2025-03-31T16:33:09.013400Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 10 [1:8:2055] 2025-03-31T16:33:09.036149Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:8:2055] Initiator [1:7:2054] 2025-03-31T16:33:09.054035Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 1 [1:9:2056] 2025-03-31T16:33:09.055018Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 1 [1:9:2056] 2025-03-31T16:33:09.076340Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:9:2056] Initiator [1:7:2054] 2025-03-31T16:33:09.094196Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 2 [1:10:2057] 2025-03-31T16:33:09.095669Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 2 [1:10:2057] 2025-03-31T16:33:09.128486Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:10:2057] Initiator [1:7:2054] 2025-03-31T16:33:09.151383Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 3 [1:11:2058] 2025-03-31T16:33:09.152783Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 3 [1:11:2058] 2025-03-31T16:33:09.179036Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:11:2058] Initiator [1:7:2054] 2025-03-31T16:33:09.201358Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 4 [1:12:2059] 2025-03-31T16:33:09.202780Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 4 [1:12:2059] 2025-03-31T16:33:09.232061Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:12:2059] Initiator [1:7:2054] 2025-03-31T16:33:09.245217Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 5 [1:13:2060] 2025-03-31T16:33:09.246200Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 5 [1:13:2060] 2025-03-31T16:33:09.271154Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:13:2060] Initiator [1:7:2054] 2025-03-31T16:33:09.291831Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 7 [1:7:2054] 2025-03-31T16:33:09.291946Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 7 [1:7:2054] 2025-03-31T16:33:09.296099Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 8 [1:7:2054] 2025-03-31T16:33:09.296187Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 8 [1:7:2054] 2025-03-31T16:33:09.299811Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 9 [1:7:2054] 2025-03-31T16:33:09.299894Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 9 [1:7:2054] 2025-03-31T16:33:09.304212Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 0 [1:7:2054] 2025-03-31T16:33:09.304299Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 0 [1:7:2054] 2025-03-31T16:33:09.307733Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 1 [1:7:2054] 2025-03-31T16:33:09.307816Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 1 [1:7:2054] 2025-03-31T16:33:09.311182Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 2 [1:7:2054] 2025-03-31T16:33:09.311262Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 2 [1:7:2054] 2025-03-31T16:33:09.316768Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 3 [1:7:2054] 2025-03-31T16:33:09.316857Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 3 [1:7:2054] 2025-03-31T16:33:09.321124Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 4 [1:7:2054] 2025-03-31T16:33:09.321210Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 4 [1:7:2054] 2025-03-31T16:33:09.325129Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 5 [1:7:2054] 2025-03-31T16:33:09.325265Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 5 [1:7:2054] 2025-03-31T16:33:09.329902Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:7:2054] Initiator [1:6:2053] TEST 2 10 duration 1.322068s 2025-03-31T16:33:09.593114Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 [2:7:2054] 2025-03-31T16:33:09.593580Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:8:2055] worker 0 2025-03-31T16:33:09.593611Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:9:2056] worker 1 2025-03-31T16:33:09.593627Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:10:2057] worker 2 2025-03-31T16:33:09.593644Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:11:2058] worker 3 2025-03-31T16:33:09.593664Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:12:2059] worker 4 2025-03-31T16:33:09.593688Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:13:2060] worker 5 2025-03-31T16:33:09.593710Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:14:2061] worker 6 2025-03-31T16:33:09.593747Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:15:2062] worker 7 2025-03-31T16:33:09.593795Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:16:2063] worker 8 2025-03-31T16:33:09.593867Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:17:2064] worker 9 2025-03-31T16:33:09.593905Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:18:2065] worker 10 2025-03-31T16:33:09.593929Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:19:2066] worker 11 2025-03-31T16:33:09.593952Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:20:2067] worker 12 2025-03-31T16:33:09.593973Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:21:2068] worker 13 2025-03-31T16:33:09.593995Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:22:2069] worker 14 2025-03-31T16:33:09.594027Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:23:2070] worker 15 2025-03-31T16:33:09.594077Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:24:2071] worker 16 2025-03-31T16:33:09.594101Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:25:2072] worker 17 2025-03-31T16:33:09.594125Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:26:2073] worker 18 2025-03-31T16:33:09.594147Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:27:2074] worker 19 Sending message to [2:9:2056] from [2:7:2054] id 1 Sending message to [2:10:2057] from [2:7:2054] id 2 Sending message to [2:11:2058] from [2:7:2054] id 3 Sending message to [2:12:2059] from [2:7:2054] id 4 Sending message to [2:13:2060] from [2:7:2054] id 5 Sending message to [2:14:2061] from [2:7:2054] id 6 Sending message to [2:15:2062] from [2:7:2054] id 7 Sending message to [2:16:2063] from [2:7:2054] id 8 Sending message to [2:17:2064] from [2:7: ... response node 17 [2:7:2054] 2025-03-31T16:33:10.490739Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 18 [2:7:2054] 2025-03-31T16:33:10.490751Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 18 [2:7:2054] 2025-03-31T16:33:10.490778Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 19 [2:7:2054] 2025-03-31T16:33:10.490814Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 19 [2:7:2054] 2025-03-31T16:33:10.490834Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 0 [2:7:2054] 2025-03-31T16:33:10.490849Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 0 [2:7:2054] 2025-03-31T16:33:10.490877Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 11 [2:7:2054] 2025-03-31T16:33:10.490888Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 11 [2:7:2054] 2025-03-31T16:33:10.490953Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 12 [2:7:2054] 2025-03-31T16:33:10.490969Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 12 [2:7:2054] 2025-03-31T16:33:10.490988Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 6 [2:7:2054] 2025-03-31T16:33:10.491045Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 6 [2:7:2054] 2025-03-31T16:33:10.494150Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 7 [2:7:2054] 2025-03-31T16:33:10.494238Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 7 [2:7:2054] 2025-03-31T16:33:10.497808Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 8 [2:7:2054] 2025-03-31T16:33:10.497918Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 8 [2:7:2054] 2025-03-31T16:33:10.501237Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 9 [2:7:2054] 2025-03-31T16:33:10.501321Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 9 [2:7:2054] 2025-03-31T16:33:10.505072Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 10 [2:7:2054] 2025-03-31T16:33:10.505156Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 10 [2:7:2054] 2025-03-31T16:33:10.508655Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 1 [2:7:2054] 2025-03-31T16:33:10.508735Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 1 [2:7:2054] 2025-03-31T16:33:10.512122Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 2 [2:7:2054] 2025-03-31T16:33:10.512212Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 2 [2:7:2054] 2025-03-31T16:33:10.517002Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 3 [2:7:2054] 2025-03-31T16:33:10.517089Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 3 [2:7:2054] 2025-03-31T16:33:10.520431Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 4 [2:7:2054] 2025-03-31T16:33:10.520520Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 4 [2:7:2054] 2025-03-31T16:33:10.523968Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 5 [2:7:2054] 2025-03-31T16:33:10.524045Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 5 [2:7:2054] 2025-03-31T16:33:10.527499Z node 2 :TABLET_AGGREGATOR INFO: aggregator request processed [2:7:2054] Initiator [2:6:2053] TEST 2 20 duration 1.036892s 2025-03-31T16:33:10.654652Z node 3 :TABLET_AGGREGATOR INFO: aggregator new request V2 [3:7:2054] 2025-03-31T16:33:10.654730Z node 3 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [3:7:2054] self [3:8:2055] worker 0 Sending message to [3:8:2055] from [3:7:2054] id 1 Sending message to [3:8:2055] from [3:7:2054] id 2 Sending message to [3:8:2055] from [3:7:2054] id 3 Sending message to [3:8:2055] from [3:7:2054] id 4 Sending message to [3:8:2055] from [3:7:2054] id 5 Sending message to [3:8:2055] from [3:7:2054] id 6 Sending message to [3:8:2055] from [3:7:2054] id 7 Sending message to [3:8:2055] from [3:7:2054] id 8 Sending message to [3:8:2055] from [3:7:2054] id 9 Sending message to [3:8:2055] from [3:7:2054] id 10 2025-03-31T16:33:11.184963Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 1 [3:8:2055] 2025-03-31T16:33:11.185015Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 2 [3:8:2055] 2025-03-31T16:33:11.185059Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 3 [3:8:2055] 2025-03-31T16:33:11.185157Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 4 [3:8:2055] 2025-03-31T16:33:11.185217Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 5 [3:8:2055] 2025-03-31T16:33:11.185264Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 6 [3:8:2055] 2025-03-31T16:33:11.185314Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 7 [3:8:2055] 2025-03-31T16:33:11.185344Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 8 [3:8:2055] 2025-03-31T16:33:11.185388Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 9 [3:8:2055] 2025-03-31T16:33:11.185417Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 10 [3:8:2055] 2025-03-31T16:33:11.185779Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 1 [3:8:2055] 2025-03-31T16:33:11.187119Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 1 [3:8:2055] 2025-03-31T16:33:11.210739Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 2 [3:8:2055] 2025-03-31T16:33:11.212043Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 2 [3:8:2055] 2025-03-31T16:33:11.236745Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 3 [3:8:2055] 2025-03-31T16:33:11.237870Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 3 [3:8:2055] 2025-03-31T16:33:11.269435Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 4 [3:8:2055] 2025-03-31T16:33:11.270651Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 4 [3:8:2055] 2025-03-31T16:33:11.296520Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 5 [3:8:2055] 2025-03-31T16:33:11.297601Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 5 [3:8:2055] 2025-03-31T16:33:11.327637Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 6 [3:8:2055] 2025-03-31T16:33:11.328702Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 6 [3:8:2055] 2025-03-31T16:33:11.353310Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 7 [3:8:2055] 2025-03-31T16:33:11.354407Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 7 [3:8:2055] 2025-03-31T16:33:11.379409Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 8 [3:8:2055] 2025-03-31T16:33:11.380451Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 8 [3:8:2055] 2025-03-31T16:33:11.402712Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 9 [3:8:2055] 2025-03-31T16:33:11.403877Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 9 [3:8:2055] 2025-03-31T16:33:11.429334Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 10 [3:8:2055] 2025-03-31T16:33:11.431028Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 10 [3:8:2055] 2025-03-31T16:33:11.481402Z node 3 :TABLET_AGGREGATOR INFO: aggregator request processed [3:8:2055] Initiator [3:7:2054] 2025-03-31T16:33:11.690924Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 0 [3:7:2054] 2025-03-31T16:33:11.691577Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 0 [3:7:2054] 2025-03-31T16:33:11.734187Z node 3 :TABLET_AGGREGATOR INFO: aggregator request processed [3:7:2054] Initiator [3:6:2053] TEST 2 1 duration 1.213237s 2025-03-31T16:33:11.945795Z node 4 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [4:6:2053] self [4:7:2054] worker 0 Sending message to [4:7:2054] from [4:7:2054] id 1 Sending message to [4:7:2054] from [4:7:2054] id 2 Sending message to [4:7:2054] from [4:7:2054] id 3 Sending message to [4:7:2054] from [4:7:2054] id 4 Sending message to [4:7:2054] from [4:7:2054] id 5 Sending message to [4:7:2054] from [4:7:2054] id 6 Sending message to [4:7:2054] from [4:7:2054] id 7 Sending message to [4:7:2054] from [4:7:2054] id 8 Sending message to [4:7:2054] from [4:7:2054] id 9 Sending message to [4:7:2054] from [4:7:2054] id 10 2025-03-31T16:33:12.792335Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 1 [4:7:2054] 2025-03-31T16:33:12.792405Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 2 [4:7:2054] 2025-03-31T16:33:12.792434Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 3 [4:7:2054] 2025-03-31T16:33:12.792461Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 4 [4:7:2054] 2025-03-31T16:33:12.792562Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 5 [4:7:2054] 2025-03-31T16:33:12.792607Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 6 [4:7:2054] 2025-03-31T16:33:12.792647Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 7 [4:7:2054] 2025-03-31T16:33:12.792687Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 8 [4:7:2054] 2025-03-31T16:33:12.792723Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 9 [4:7:2054] 2025-03-31T16:33:12.792767Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 10 [4:7:2054] 2025-03-31T16:33:12.793157Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 1 [4:7:2054] 2025-03-31T16:33:12.795287Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 1 [4:7:2054] 2025-03-31T16:33:12.833570Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 2 [4:7:2054] 2025-03-31T16:33:12.835542Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 2 [4:7:2054] 2025-03-31T16:33:12.876491Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 3 [4:7:2054] 2025-03-31T16:33:12.878270Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 3 [4:7:2054] 2025-03-31T16:33:12.922099Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 4 [4:7:2054] 2025-03-31T16:33:12.923957Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 4 [4:7:2054] 2025-03-31T16:33:12.963473Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 5 [4:7:2054] 2025-03-31T16:33:12.965449Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 5 [4:7:2054] 2025-03-31T16:33:13.022174Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 6 [4:7:2054] 2025-03-31T16:33:13.024172Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 6 [4:7:2054] 2025-03-31T16:33:13.061868Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 7 [4:7:2054] 2025-03-31T16:33:13.063724Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 7 [4:7:2054] 2025-03-31T16:33:13.104234Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 8 [4:7:2054] 2025-03-31T16:33:13.105995Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 8 [4:7:2054] 2025-03-31T16:33:13.144552Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 9 [4:7:2054] 2025-03-31T16:33:13.146639Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 9 [4:7:2054] 2025-03-31T16:33:13.188141Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 10 [4:7:2054] 2025-03-31T16:33:13.190086Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 10 [4:7:2054] 2025-03-31T16:33:13.259738Z node 4 :TABLET_AGGREGATOR INFO: aggregator request processed [4:7:2054] Initiator [4:6:2053] TEST 2 1 duration 1.779056s |95.7%| [TA] $(B)/ydb/core/actorlib_impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.7%| [TA] {RESULT} $(B)/ydb/core/actorlib_impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBSV::ShardsNotLeftInShardsToDelete >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition >> TSchemeShardTopicSplitMergeTest::MargeUnorderedPartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::MargePartitions2 >> TSchemeShardTopicSplitMergeTest::CreateTopicWithOnePartition [GOOD] >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition [GOOD] >> KqpRanges::IsNullInValue [GOOD] >> KqpRanges::IsNullInJsonValue |95.7%| [TA] $(B)/ydb/core/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.7%| [TA] {RESULT} $(B)/ydb/core/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTopicSplitMergeTest::MargePartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::MargeNotAdjacentRangePartitions >> GenericFederatedQuery::PostgreSQLFilterPushdown [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:33:14.737385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:33:14.737467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:14.737506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:33:14.737551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:33:14.738197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:33:14.738232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:33:14.738308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:14.738389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:33:14.739489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:33:14.825569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:33:14.825616Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:14.829662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:33:14.829744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:33:14.829906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:33:14.834556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:33:14.834740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:33:14.835332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:14.835530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:33:14.837123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:14.838096Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:14.838149Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:14.838305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:33:14.838343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:14.838382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:33:14.838517Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:33:14.844701Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:33:14.979686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:33:14.979947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:14.980146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:33:14.980350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:33:14.980398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:14.982831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:14.982971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:33:14.983165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:14.983226Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:33:14.983264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:33:14.983304Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:33:14.985321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:14.985389Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:33:14.985441Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:33:14.987265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:14.987312Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:14.987370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:14.987417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:33:14.991209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:33:14.993191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:33:14.993485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:33:14.994495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:14.994629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:14.994680Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:14.994979Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:33:14.995036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:14.995219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:33:14.995305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:33:14.997661Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:14.997719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:14.997899Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:14.997936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:33:14.998152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:14.998206Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:33:14.998359Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:14.998391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:14.998428Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:14.998456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:14.998491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:33:14.998529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:14.998562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:33:14.998641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:33:14.998711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:33:14.998747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:33:14.998782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:33:15.000858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:15.000965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:15.001007Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 57594046678944 message# TabletId: 72075186233409548 TxId: 104 Status: OK 2025-03-31T16:33:15.345408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2025-03-31T16:33:15.345447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-03-31T16:33:15.347563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-03-31T16:33:15.347822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-03-31T16:33:15.347863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-03-31T16:33:15.348267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 104, at schemeshard: 72057594046678944 2025-03-31T16:33:15.348315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-03-31T16:33:15.348366Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 104, at schemeshard: 72057594046678944 2025-03-31T16:33:15.382408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 150, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:15.382548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 AckTo { RawX1: 0 RawX2: 0 } } Step: 150 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:15.382618Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvOperationPlan, step: 150, at tablet: 72057594046678944 2025-03-31T16:33:15.382698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-03-31T16:33:15.411548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409548, partId: 0 2025-03-31T16:33:15.411673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 150 2025-03-31T16:33:15.411730Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 150 2025-03-31T16:33:15.411779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.411814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-03-31T16:33:15.411938Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-03-31T16:33:15.412075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T16:33:15.412118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-31T16:33:15.414055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.414189Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:15.414225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:33:15.414343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-31T16:33:15.414511Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:15.414577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 104, path id: 2 2025-03-31T16:33:15.414644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-03-31T16:33:15.415312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.415351Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-03-31T16:33:15.415425Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-31T16:33:15.415447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T16:33:15.415483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-31T16:33:15.415517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T16:33:15.415544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-03-31T16:33:15.415583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T16:33:15.415611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-03-31T16:33:15.415630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-03-31T16:33:15.415737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-03-31T16:33:15.415766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 1 2025-03-31T16:33:15.415798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-03-31T16:33:15.415820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-03-31T16:33:15.416656Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T16:33:15.416761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T16:33:15.416805Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-03-31T16:33:15.416839Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-03-31T16:33:15.416871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-31T16:33:15.417925Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T16:33:15.417980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T16:33:15.417997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-03-31T16:33:15.418025Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-03-31T16:33:15.418051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-31T16:33:15.418138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 1 2025-03-31T16:33:15.418167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:404:2371] 2025-03-31T16:33:15.421260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-31T16:33:15.423253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-31T16:33:15.423332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-03-31T16:33:15.423390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:545:2479] TestWaitNotification: OK eventTxId 104 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 7 SplitBoundary: "W" } TestModificationResults wait txId: 105 2025-03-31T16:33:15.435975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 7 SplitBoundary: "W" } } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:33:15.436128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 105:0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.436289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: Splitting partition does not exists: 7, at schemeshard: 72057594046678944 2025-03-31T16:33:15.440214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "Splitting partition does not exists: 7" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:15.440350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Splitting partition does not exists: 7, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-03-31T16:33:15.440599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-03-31T16:33:15.440666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-03-31T16:33:15.441095Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-03-31T16:33:15.441215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-03-31T16:33:15.441252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:639:2562] TestWaitNotification: OK eventTxId 105 >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:33:14.737096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:33:14.737251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:14.737282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:33:14.737305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:33:14.738177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:33:14.738203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:33:14.738279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:14.738356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:33:14.739532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:33:14.811214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:33:14.811258Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:14.815381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:33:14.815501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:33:14.815693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:33:14.820508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:33:14.820691Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:33:14.824261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:14.824517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:33:14.829045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:14.835950Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:14.836025Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:14.836214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:33:14.836276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:14.836338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:33:14.836490Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:33:14.843935Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:33:14.984321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:33:14.984595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:14.984771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:33:14.985043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:33:14.985113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:14.987432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:14.987587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:33:14.987766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:14.987836Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:33:14.987879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:33:14.987926Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:33:14.989841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:14.989903Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:33:14.989983Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:33:14.991783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:14.991831Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:14.991873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:14.991921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:33:14.995376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:33:14.997509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:33:14.997719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:33:14.998646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:14.998762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:14.998812Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:14.999055Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:33:14.999120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:14.999289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:33:14.999357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:33:15.001411Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:15.001496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:15.001655Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:15.001698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:33:15.001974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.002035Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:33:15.002147Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:15.002185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:15.002228Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:15.002269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:15.002299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:33:15.002330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:15.002359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:33:15.002404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:33:15.002461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:33:15.002500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:33:15.002531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:33:15.004174Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:15.004318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:15.004357Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:15.544049Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:737:2058] recipient: [1:102:2137] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:740:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:741:2058] recipient: [1:739:2653] Leader for TabletID 72057594046678944 is [1:742:2654] sender: [1:743:2058] recipient: [1:739:2653] 2025-03-31T16:33:15.582111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:33:15.582223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:15.582258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:33:15.582287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:33:15.582316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:33:15.582342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:33:15.582415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:15.582498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:33:15.582797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:33:15.597823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:33:15.598988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:33:15.599200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:33:15.599357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:33:15.599386Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:15.599552Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:33:15.600169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 3, at schemeshard: 72057594046678944 2025-03-31T16:33:15.600263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_1, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T16:33:15.600295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: USER_1, child name: Topic1, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-31T16:33:15.600354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.600410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.600564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T16:33:15.600764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.600833Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-03-31T16:33:15.600981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.601039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.601137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2025-03-31T16:33:15.601176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T16:33:15.601219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:33:15.601235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-03-31T16:33:15.601257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-31T16:33:15.601331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.601429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.601594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2025-03-31T16:33:15.601728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T16:33:15.601998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.602091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.602405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.602471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.602670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.602757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.602824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.602965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.603037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.603213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.603408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.603597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.603646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.603694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.609921Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:15.609981Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:15.610991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:33:15.611058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:15.611128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:33:15.612848Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:742:2654] sender: [1:801:2058] recipient: [1:15:2062] 2025-03-31T16:33:15.676765Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:33:15.677025Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 268us result status StatusSuccess 2025-03-31T16:33:15.677677Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\177" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\177" } Status: Active ParentPartitionIds: 0 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\177" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\177" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:33:14.737113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:33:14.737197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:14.737232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:33:14.737261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:33:14.738160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:33:14.738212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:33:14.738281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:14.738350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:33:14.739464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:33:14.812180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:33:14.812225Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:14.816261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:33:14.816331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:33:14.816446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:33:14.820664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:33:14.820824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:33:14.824257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:14.824527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:33:14.828994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:14.835888Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:14.835954Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:14.836135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:33:14.836179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:14.836222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:33:14.836342Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:33:14.843993Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:33:14.955919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:33:14.957310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:14.958366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:33:14.959549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:33:14.959624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:14.962669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:14.962799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:33:14.962992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:14.963183Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:33:14.963276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:33:14.963322Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:33:14.965449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:14.965497Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:33:14.965542Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:33:14.967183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:14.967237Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:14.967287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:14.967338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:33:14.971369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:33:14.973224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:33:14.974105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:33:14.975193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:14.975351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:14.975409Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:14.976623Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:33:14.976694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:14.976894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:33:14.976985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:33:14.979233Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:14.979295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:14.979442Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:14.979479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:33:14.979690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:14.979732Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:33:14.979820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:14.979860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:14.979906Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:14.979934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:14.979966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:33:14.980004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:14.980034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:33:14.980080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:33:14.980150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:33:14.980190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:33:14.980223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:33:14.982166Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:14.982272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:14.982307Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... y Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Inactive ChildPartitionIds: 3 ChildPartitionIds: 4 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 3 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\177" } Status: Active ParentPartitionIds: 1 } Partitions { PartitionId: 4 TabletId: 72075186233409548 KeyRange { FromBound: "\177" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active ParentPartitionIds: 1 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 5 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 5 NextPartitionId: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\177" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 KeyRange { FromBound: "\177" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 5 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:15.558834Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:757:2058] recipient: [1:102:2137] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:760:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:761:2058] recipient: [1:759:2667] Leader for TabletID 72057594046678944 is [1:762:2668] sender: [1:763:2058] recipient: [1:759:2667] 2025-03-31T16:33:15.608576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:33:15.608683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:15.608734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:33:15.608773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:33:15.608818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:33:15.608848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:33:15.608918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:15.608998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:33:15.609403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:33:15.628692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:33:15.630271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:33:15.630447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:33:15.630598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:33:15.630642Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:15.630842Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:33:15.631665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 3, at schemeshard: 72057594046678944 2025-03-31T16:33:15.631784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_1, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T16:33:15.631834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: USER_1, child name: Topic1, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-31T16:33:15.631913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.631990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.632299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T16:33:15.632604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.632727Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-03-31T16:33:15.632967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.633069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.633189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2025-03-31T16:33:15.633248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T16:33:15.633290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:33:15.633319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-03-31T16:33:15.633340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-31T16:33:15.633493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.633601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.633852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2025-03-31T16:33:15.634072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T16:33:15.634537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.634655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.635133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.635231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.635516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.635617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.635743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.635974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.636081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.636326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.636599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.636787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.636849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.636917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.646681Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:15.646763Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:15.646895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:33:15.646948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:15.646989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:33:15.647631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 >> KqpNewEngine::JoinProjectMulti [GOOD] >> KqpNewEngine::JoinMultiConsumer >> KikimrIcGateway::TestListPath [GOOD] >> KikimrIcGateway::TestDropTable >> KikimrIcGateway::TestLoadTableMetadata [GOOD] >> KikimrIcGateway::TestLoadTokenSecretValueFromExternalDataSourceMetadata >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition [GOOD] >> TSchemeShardTopicSplitMergeTest::MargePartitions2 [GOOD] >> TBSV::ShardsNotLeftInShardsToDelete [GOOD] >> TSchemeShardTopicSplitMergeTest::MargeNotAdjacentRangePartitions [GOOD] >> TBSV::CleanupDroppedVolumesOnRestart [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:33:14.737142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:33:14.737253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:14.737310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:33:14.737346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:33:14.738195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:33:14.738263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:33:14.738336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:14.738452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:33:14.739503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:33:14.832175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:33:14.832234Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:14.836294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:33:14.836376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:33:14.836495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:33:14.840940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:33:14.841123Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:33:14.841692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:14.841867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:33:14.843386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:14.844297Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:14.844343Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:14.844461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:33:14.844498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:14.844533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:33:14.844638Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:33:14.851199Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:33:14.999848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:33:15.000129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.000377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:33:15.000601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:33:15.000663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.003104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:15.003268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:33:15.003456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.003509Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:33:15.003575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:33:15.003626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:33:15.005656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.005737Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:33:15.005780Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:33:15.007660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.007714Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.007767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:15.007822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:33:15.012118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:33:15.014127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:33:15.014332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:33:15.015451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:15.015581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:15.015645Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:15.015904Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:33:15.015957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:15.016147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:33:15.016233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:33:15.018329Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:15.018385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:15.018556Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:15.018614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:33:15.018824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.018874Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:33:15.018971Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:15.019011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:15.019050Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:15.019106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:15.019149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:33:15.019190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:15.019244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:33:15.019281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:33:15.019340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:33:15.019375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:33:15.019412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:33:15.021552Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:15.021704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:15.021762Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 678944 is [2:123:2149] sender: [2:636:2058] recipient: [2:15:2062] Leader for TabletID 72057594046678944 is [2:123:2149] sender: [2:637:2058] recipient: [2:635:2559] Leader for TabletID 72057594046678944 is [2:638:2560] sender: [2:639:2058] recipient: [2:635:2559] 2025-03-31T16:33:16.092438Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:33:16.092556Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:16.092602Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:33:16.092643Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:33:16.092684Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:33:16.092717Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:33:16.092779Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:16.092872Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:33:16.093201Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:33:16.112461Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:33:16.114002Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:33:16.114206Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:33:16.114372Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:33:16.114412Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:16.114592Z node 2 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:33:16.115416Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 3, at schemeshard: 72057594046678944 2025-03-31T16:33:16.115513Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_1, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T16:33:16.115562Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: USER_1, child name: Topic1, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-31T16:33:16.115654Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.115729Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.115977Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T16:33:16.116263Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.116368Z node 2 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-03-31T16:33:16.116571Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.116672Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.116792Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2025-03-31T16:33:16.116836Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T16:33:16.116875Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:33:16.116902Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-03-31T16:33:16.116924Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-31T16:33:16.117022Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.117108Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.117346Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2025-03-31T16:33:16.117563Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-31T16:33:16.117899Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.118014Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.118421Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.118502Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.118736Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.118826Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.118935Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.119158Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.119248Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.119470Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.119695Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.119879Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.119944Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.119996Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.126943Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:16.127023Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:16.128002Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:33:16.128067Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:16.128127Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:33:16.130453Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [2:638:2560] sender: [2:698:2058] recipient: [2:15:2062] 2025-03-31T16:33:16.184256Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:33:16.184522Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 299us result status StatusSuccess 2025-03-31T16:33:16.185174Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 1024 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 1 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 1024 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KikimrIcGateway::TestLoadExternalTable [GOOD] >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::ShardsNotLeftInShardsToDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:33:16.141386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:33:16.141505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:16.141547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:33:16.141585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:33:16.142712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:33:16.142759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:33:16.142854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:16.142935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:33:16.144258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:33:16.243163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:33:16.243224Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:16.247966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:33:16.248070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:33:16.248228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:33:16.253909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:33:16.254202Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:33:16.257625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:16.257929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:33:16.263655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:16.269603Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:16.269682Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:16.269849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:33:16.269914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:16.269968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:33:16.270122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.278101Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:33:16.409596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:33:16.410925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.411911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:33:16.413005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:33:16.413065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.416166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:16.416282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:33:16.416474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.416578Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:33:16.416634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:33:16.416675Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:33:16.418623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.418672Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:33:16.418713Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:33:16.420555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.420611Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.420672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:16.420734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:33:16.424894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:33:16.426796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:33:16.427616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:33:16.428480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:16.428590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:16.428640Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:16.429827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:33:16.429879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:16.430079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:33:16.430141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:33:16.432759Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:16.432817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:16.432961Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:16.433013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:33:16.433178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.433215Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:33:16.433289Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:16.433317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:16.433345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:16.433396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:16.433431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:33:16.433472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:16.433505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:33:16.433540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:33:16.433595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:33:16.433623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:33:16.433663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:33:16.435212Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:16.435328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:16.435379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... cated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:16.530622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T16:33:16.530833Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-03-31T16:33:16.531415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-31T16:33:16.531553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409546 Forgetting tablet 72075186233409547 2025-03-31T16:33:16.532533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:33:16.533527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-03-31T16:33:16.533663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-03-31T16:33:16.534395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:16.534519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:16.534585Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropBlockStoreVolume TPropose, operationId: 102:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2025-03-31T16:33:16.534687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T16:33:16.534820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:33:16.534856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:33:16.534903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-31T16:33:16.534940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:33:16.535009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:33:16.535064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T16:33:16.535141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-03-31T16:33:16.535194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-31T16:33:16.535229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-31T16:33:16.535257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-31T16:33:16.535359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-31T16:33:16.535410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-03-31T16:33:16.535447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-03-31T16:33:16.535475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-03-31T16:33:16.537302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-31T16:33:16.537378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-03-31T16:33:16.537704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-31T16:33:16.537760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-03-31T16:33:16.539257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-03-31T16:33:16.539300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-03-31T16:33:16.539430Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:16.539459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:16.539587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:33:16.539700Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:16.539746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-03-31T16:33:16.539789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: Erasing txId 102 2025-03-31T16:33:16.540259Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:33:16.540357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:33:16.540407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:33:16.540444Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-31T16:33:16.540482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T16:33:16.540805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:33:16.540848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T16:33:16.540927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:33:16.541520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:33:16.541596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:33:16.541622Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:33:16.541647Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-03-31T16:33:16.541672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:33:16.541739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-03-31T16:33:16.542399Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 2025-03-31T16:33:16.542888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:16.543071Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 2025-03-31T16:33:16.543488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-31T16:33:16.545073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:33:16.546753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-31T16:33:16.547020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-31T16:33:16.547229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-31T16:33:16.548007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-31T16:33:16.548381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-31T16:33:16.548435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-31T16:33:16.548917Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-31T16:33:16.549006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:33:16.549037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:388:2368] TestWaitNotification: OK eventTxId 102 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2025-03-31T16:33:16.549494Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-03-31T16:33:16.549583Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 { Type { Kind: Struct Struct { Member { Name: "ShardsToDelete" Type { Kind: Optional Optional { Item { Kind: Struct Struct { Member { Name: "List" Type { Kind: List List { Item { Kind: Struct Struct { Member { Name: "ShardIdx" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } Member { Name: "Truncated" Type { Kind: Data Data { Scheme: 6 } } } } } } } } } } Value { Struct { Optional { Struct { } Struct { Bool: false } } } } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:33:14.737122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:33:14.737225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:14.737267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:33:14.737302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:33:14.738183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:33:14.738257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:33:14.738346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:14.738442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:33:14.739499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:33:14.831561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:33:14.831622Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:14.836015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:33:14.836119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:33:14.836275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:33:14.840965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:33:14.841117Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:33:14.841677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:14.841876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:33:14.843377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:14.844294Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:14.844340Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:14.844470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:33:14.844510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:14.844549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:33:14.844638Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:33:14.851032Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:33:15.001778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:33:15.001971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.002171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:33:15.002352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:33:15.002406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.004300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:15.004408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:33:15.004597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.004679Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:33:15.004714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:33:15.004780Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:33:15.006578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.006655Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:33:15.006692Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:33:15.008181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.008237Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.008284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:15.008332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:33:15.011040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:33:15.012610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:33:15.012772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:33:15.013552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:15.013653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:15.013697Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:15.013919Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:33:15.013959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:15.014091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:33:15.014157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:33:15.015820Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:15.015854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:15.015985Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:15.016025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:33:15.016251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.016286Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:33:15.016367Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:15.016394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:15.016425Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:15.016457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:15.016494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:33:15.016529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:15.016581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:33:15.016634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:33:15.016694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:33:15.016725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:33:15.016750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:33:15.018280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:15.018394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:15.018459Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 2057594046678944 2025-03-31T16:33:16.451632Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose ProgressState, operationId: 105:0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.451726Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 105 ready parts: 1/1 2025-03-31T16:33:16.451878Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72075186233409546 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409548 Flags: 2 } ExecLevel: 0 TxId: 105 MinStep: 151 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72075186233409546 2025-03-31T16:33:16.453293Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 105:0 from tablet: 72057594046678944 to tablet: 72075186233409548 cookie: 72057594046678944:3 msg type: 269550082 2025-03-31T16:33:16.453393Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 105:4294967295 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 0:105 msg type: 269090816 2025-03-31T16:33:16.453468Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 105, partId: 4294967295, tablet: 72075186233409546 2025-03-31T16:33:16.453709Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2025-03-31T16:33:16.453853Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: TabletId: 72075186233409548 TxId: 105 Status: OK 2025-03-31T16:33:16.453929Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionAttachResult triggers early, at schemeshard: 72057594046678944 message# TabletId: 72075186233409548 TxId: 105 Status: OK 2025-03-31T16:33:16.453977Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2025-03-31T16:33:16.454023Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-03-31T16:33:16.455563Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-03-31T16:33:16.455817Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-03-31T16:33:16.455857Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-03-31T16:33:16.456243Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 105, at schemeshard: 72057594046678944 2025-03-31T16:33:16.456288Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2025-03-31T16:33:16.456351Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 105, at schemeshard: 72057594046678944 2025-03-31T16:33:16.490114Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 200, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:16.490267Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 AckTo { RawX1: 0 RawX2: 0 } } Step: 200 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:16.490342Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 105:0 HandleReply TEvOperationPlan, step: 200, at tablet: 72057594046678944 2025-03-31T16:33:16.490405Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-03-31T16:33:16.518873Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2025-03-31T16:33:16.519045Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 200 2025-03-31T16:33:16.519151Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 200 2025-03-31T16:33:16.519210Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.519271Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-03-31T16:33:16.519444Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 128 -> 240 2025-03-31T16:33:16.519632Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-31T16:33:16.521995Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.522374Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:16.522424Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-31T16:33:16.522725Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:16.522776Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 105, path id: 3 2025-03-31T16:33:16.523461Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.523521Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2025-03-31T16:33:16.523644Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-03-31T16:33:16.523684Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-31T16:33:16.523724Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-03-31T16:33:16.523763Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-31T16:33:16.523803Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-03-31T16:33:16.523854Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-31T16:33:16.523897Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-03-31T16:33:16.523936Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-03-31T16:33:16.524073Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-03-31T16:33:16.524115Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2025-03-31T16:33:16.524150Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-03-31T16:33:16.524819Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-03-31T16:33:16.524924Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-03-31T16:33:16.524967Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-03-31T16:33:16.525010Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-03-31T16:33:16.525054Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-31T16:33:16.525141Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2025-03-31T16:33:16.525199Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:407:2375] 2025-03-31T16:33:16.530478Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-03-31T16:33:16.530593Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-03-31T16:33:16.530639Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:664:2589] TestWaitNotification: OK eventTxId 105 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "W" } TestModificationResults wait txId: 106 2025-03-31T16:33:16.540125Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "W" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:33:16.540376Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.540578Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Invalid partition status: 2, at schemeshard: 72057594046678944 2025-03-31T16:33:16.542890Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Invalid partition status: 2" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:16.543071Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Invalid partition status: 2, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-03-31T16:33:16.543403Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-03-31T16:33:16.543447Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-03-31T16:33:16.543868Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-03-31T16:33:16.543968Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-03-31T16:33:16.544008Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:755:2668] TestWaitNotification: OK eventTxId 106 ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::PostgreSQLFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 1315, MsgBus: 23590 2025-03-31T16:32:57.116949Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012533526418801:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:57.116997Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f08/r3tmp/tmpcF5Nlu/pdisk_1.dat 2025-03-31T16:32:57.508626Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1315, node 1 2025-03-31T16:32:57.544846Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:57.544982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:57.559444Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:32:57.624195Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:57.624222Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:57.624234Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:57.624385Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23590 TClient is connected to server localhost:23590 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:58.154494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:00.069392Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012546411321349:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:00.069518Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:00.300348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-03-31T16:33:00.420477Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012546411321472:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:00.420543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:00.420656Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012546411321477:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:00.423499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-03-31T16:33:00.433675Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012546411321479:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:33:00.492299Z node 1 :TX_PROXY ERROR: Actor# [1:7488012546411321519:2398] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:01.082969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:01.477238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-03-31T16:33:01.948778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:33:02.117244Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012533526418801:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:02.117315Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:33:02.418583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:33:02.820850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T16:33:03.252796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-03-31T16:33:03.287838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-03-31T16:33:05.003355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710702:0, at schemeshard: 72057594046644480 2025-03-31T16:33:05.031432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710703:0, at schemeshard: 72057594046644480 2025-03-31T16:33:05.032976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710704:0, at schemeshard: 72057594046644480 2025-03-31T16:33:05.034037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710705:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "col1" type { type_id: UINT16 } } } } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } ... ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:33:08.813200Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:33:11.095914Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012592493633499:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:11.096019Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:11.097886Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-03-31T16:33:11.129975Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012592493633615:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:11.130034Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:11.130163Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012592493633620:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:11.133842Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72057594046644480 2025-03-31T16:33:11.143238Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012592493633622:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:33:11.221867Z node 2 :TX_PROXY ERROR: Actor# [2:7488012592493633683:2401] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:11.632253Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:11.994219Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:1, at schemeshard: 72057594046644480 2025-03-31T16:33:12.373129Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:33:12.895477Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-03-31T16:33:13.197970Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488012579608730950:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:13.198051Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:33:13.326665Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-03-31T16:33:13.777535Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-03-31T16:33:13.809285Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-03-31T16:33:15.512775Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715702:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } columns { name: "data_column" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::MargePartitions2 [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:33:14.737118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:33:14.737204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:14.737233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:33:14.737260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:33:14.738180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:33:14.738222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:33:14.738288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:14.738363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:33:14.739503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:33:14.807908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:33:14.807964Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:14.813439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:33:14.813537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:33:14.813712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:33:14.820529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:33:14.820691Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:33:14.824263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:14.824517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:33:14.829093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:14.835770Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:14.835824Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:14.835976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:33:14.836026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:14.836079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:33:14.836184Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:33:14.844343Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:33:14.964598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:33:14.964838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:14.965036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:33:14.965253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:33:14.965303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:14.967645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:14.967910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:33:14.968095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:14.968164Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:33:14.968203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:33:14.968246Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:33:14.970120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:14.970185Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:33:14.970254Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:33:14.971973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:14.972031Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:14.972080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:14.972128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:33:14.980897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:33:14.982751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:33:14.982934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:33:14.983983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:14.984108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:14.984174Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:14.984435Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:33:14.984507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:14.984660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:33:14.984745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:33:14.986930Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:14.986993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:14.987187Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:14.987234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:33:14.987481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:14.987528Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:33:14.987644Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:14.987681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:14.987717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:14.987747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:14.987803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:33:14.987845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:14.987896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:33:14.987931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:33:14.987997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:33:14.988042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:33:14.988090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:33:14.990117Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:14.990220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:14.990258Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... iber for txId 105: send EvNotifyTxCompletion 2025-03-31T16:33:16.454388Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-03-31T16:33:16.454663Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 105, at schemeshard: 72057594046678944 2025-03-31T16:33:16.454706Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2025-03-31T16:33:16.454745Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 105, at schemeshard: 72057594046678944 2025-03-31T16:33:16.488327Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 200, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:16.488471Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 AckTo { RawX1: 0 RawX2: 0 } } Step: 200 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:16.488529Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 105:0 HandleReply TEvOperationPlan, step: 200, at tablet: 72057594046678944 2025-03-31T16:33:16.488583Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-03-31T16:33:16.520337Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2025-03-31T16:33:16.520523Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 200 2025-03-31T16:33:16.520604Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 200 2025-03-31T16:33:16.520656Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.520694Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-03-31T16:33:16.520865Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 128 -> 240 2025-03-31T16:33:16.521047Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-31T16:33:16.523615Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.523834Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:16.523878Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-31T16:33:16.524139Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:16.524183Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 105, path id: 3 2025-03-31T16:33:16.524282Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.524328Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2025-03-31T16:33:16.524426Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-03-31T16:33:16.524464Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-31T16:33:16.524511Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-03-31T16:33:16.524544Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-31T16:33:16.524584Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-03-31T16:33:16.524624Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-31T16:33:16.524667Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-03-31T16:33:16.524702Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-03-31T16:33:16.524843Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-03-31T16:33:16.524886Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2025-03-31T16:33:16.524920Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-03-31T16:33:16.526727Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-03-31T16:33:16.526828Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-03-31T16:33:16.526867Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-03-31T16:33:16.526904Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-03-31T16:33:16.526942Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-31T16:33:16.527025Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2025-03-31T16:33:16.527064Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:407:2375] 2025-03-31T16:33:16.530733Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-03-31T16:33:16.530862Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-03-31T16:33:16.530901Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:672:2595] TestWaitNotification: OK eventTxId 105 2025-03-31T16:33:16.537873Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:33:16.538113Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 273us result status StatusSuccess 2025-03-31T16:33:16.538785Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" } Status: Inactive ChildPartitionIds: 4 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Inactive ChildPartitionIds: 4 } Partitions { PartitionId: 3 TabletId: 72075186233409548 KeyRange { FromBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Active } Partitions { PartitionId: 4 TabletId: 72075186233409548 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Active ParentPartitionIds: 1 ParentPartitionIds: 2 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 5 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 5 NextPartitionId: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 ParentPartitionIds: 2 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 5 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |95.7%| [TA] $(B)/ydb/core/kqp/ut/join/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::CleanupDroppedVolumesOnRestart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:33:16.141360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:33:16.141605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:16.141660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:33:16.141697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:33:16.142674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:33:16.142716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:33:16.142823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:16.142908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:33:16.144168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:33:16.241192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:33:16.241253Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:16.246674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:33:16.246771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:33:16.246933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:33:16.253902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:33:16.254196Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:33:16.257635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:16.257988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:33:16.263641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:16.269559Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:16.269641Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:16.269804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:33:16.269852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:16.269954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:33:16.270120Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.278087Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:33:16.410089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:33:16.410913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.411915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:33:16.413047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:33:16.413124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.416173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:16.416285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:33:16.416474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.416539Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:33:16.416577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:33:16.416607Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:33:16.418630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.418683Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:33:16.418731Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:33:16.420555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.420607Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.420662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:16.420710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:33:16.424537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:33:16.426667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:33:16.427609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:33:16.428414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:16.428511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:16.428552Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:16.429850Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:33:16.429900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:16.430047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:33:16.430112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:33:16.432789Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:16.432864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:16.433030Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:16.433080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:33:16.433304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.433361Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:33:16.433452Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:16.433483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:16.433522Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:16.433552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:16.433616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:33:16.433653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:16.433686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:33:16.433732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:33:16.433795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:33:16.433831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:33:16.433860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:33:16.435358Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:16.435440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:16.435468Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... -31T16:33:16.615943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.615985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.616036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.622024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-31T16:33:16.623376Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:16.624469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:33:16.624539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:16.624961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:33:16.625015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-31T16:33:16.625084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:33:16.625515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.627674Z node 1 :FLAT_TX_SCHEMESHARD WARN: TTxCleanBlockStoreVolumes Complete, done PersistRemoveBlockStoreVolume for 1 volumes, left 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.627730Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:16.627763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:33:16.628596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:399:2375] sender: [1:465:2058] recipient: [1:15:2062] 2025-03-31T16:33:16.681201Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:33:16.681383Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/BSVolume" took 195us result status StatusPathDoesNotExist 2025-03-31T16:33:16.681515Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-31T16:33:16.682220Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:399:2375] sender: [1:466:2058] recipient: [1:102:2137] Leader for TabletID 72057594046678944 is [1:399:2375] sender: [1:469:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:399:2375] sender: [1:470:2058] recipient: [1:468:2428] Leader for TabletID 72057594046678944 is [1:471:2429] sender: [1:472:2058] recipient: [1:468:2428] 2025-03-31T16:33:16.717101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:33:16.717219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:16.717265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:33:16.717300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:33:16.717333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:33:16.717398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:33:16.717455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:16.717530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:33:16.717805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:33:16.734408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:33:16.735753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:33:16.735919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:33:16.736058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:33:16.736092Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:16.736377Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:33:16.737104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:16.737226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.737292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.737677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.737775Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-03-31T16:33:16.737958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.738056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.738158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.738316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.738425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.738604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.738868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.738975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.739392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.739475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.739681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.739790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.739921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.740079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.740167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.740304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.740538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.740685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.740727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.740785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.748299Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:16.748380Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:16.748563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:33:16.748612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:16.748649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:33:16.748785Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:471:2429] sender: [1:530:2058] recipient: [1:15:2062] 2025-03-31T16:33:16.781242Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:33:16.781477Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/BSVolume" took 264us result status StatusPathDoesNotExist 2025-03-31T16:33:16.781632Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::MargeNotAdjacentRangePartitions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:33:14.737108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:33:14.737214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:14.737261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:33:14.737297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:33:14.738177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:33:14.738218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:33:14.738286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:14.738364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:33:14.739464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:33:14.808007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:33:14.808070Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:14.813157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:33:14.813229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:33:14.813410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:33:14.820376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:33:14.820774Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:33:14.824269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:14.824521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:33:14.829021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:14.835759Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:14.835815Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:14.835928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:33:14.835961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:14.836030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:33:14.836200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:33:14.843906Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:33:14.965501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:33:14.965665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:14.965786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:33:14.965919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:33:14.965962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:14.967899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:14.967985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:33:14.968106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:14.968158Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:33:14.968226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:33:14.968251Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:33:14.969853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:14.969902Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:33:14.969931Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:33:14.971383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:14.971420Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:14.971464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:14.971502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:33:14.974253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:33:14.976051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:33:14.976217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:33:14.977093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:14.977194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:14.977230Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:14.977435Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:33:14.977473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:14.977590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:33:14.977639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:33:14.979199Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:14.979232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:14.979364Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:14.979394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:33:14.979549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:14.979599Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:33:14.979682Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:14.979705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:14.979730Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:14.979750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:14.979786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:33:14.979817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:14.979849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:33:14.979874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:33:14.979920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:33:14.979945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:33:14.979966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:33:14.981396Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:14.981479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:14.981505Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... essage# TabletId: 72075186233409548 TxId: 104 Status: OK 2025-03-31T16:33:16.582602Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2025-03-31T16:33:16.582638Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-03-31T16:33:16.583819Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-03-31T16:33:16.583992Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-03-31T16:33:16.584029Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-03-31T16:33:16.584326Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 104, at schemeshard: 72057594046678944 2025-03-31T16:33:16.584360Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-03-31T16:33:16.584401Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 104, at schemeshard: 72057594046678944 2025-03-31T16:33:16.619050Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 150, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:16.619235Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 AckTo { RawX1: 0 RawX2: 0 } } Step: 150 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:16.619309Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvOperationPlan, step: 150, at tablet: 72057594046678944 2025-03-31T16:33:16.619407Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-03-31T16:33:16.649036Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409548, partId: 0 2025-03-31T16:33:16.649209Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 150 2025-03-31T16:33:16.649291Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 150 2025-03-31T16:33:16.649371Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.649415Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-03-31T16:33:16.649617Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-03-31T16:33:16.649818Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T16:33:16.649879Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-31T16:33:16.652517Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.652778Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:16.652821Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:33:16.652987Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-31T16:33:16.653139Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:16.653176Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:205:2207], at schemeshard: 72057594046678944, txId: 104, path id: 2 2025-03-31T16:33:16.653225Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:205:2207], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-03-31T16:33:16.653562Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.653611Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-03-31T16:33:16.653711Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-31T16:33:16.653743Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T16:33:16.653778Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-31T16:33:16.653810Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T16:33:16.653848Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-03-31T16:33:16.653886Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-31T16:33:16.653926Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-03-31T16:33:16.653954Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-03-31T16:33:16.654101Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-03-31T16:33:16.654144Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 1 2025-03-31T16:33:16.654170Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-03-31T16:33:16.654196Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-03-31T16:33:16.655237Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T16:33:16.655361Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T16:33:16.655414Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-03-31T16:33:16.655464Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-03-31T16:33:16.655514Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-31T16:33:16.656697Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T16:33:16.656762Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-03-31T16:33:16.656783Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-03-31T16:33:16.656803Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-03-31T16:33:16.656825Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-31T16:33:16.656893Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 1 2025-03-31T16:33:16.656936Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:408:2376] 2025-03-31T16:33:16.660213Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-31T16:33:16.661573Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-31T16:33:16.661663Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-03-31T16:33:16.661699Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [2:542:2479] TestWaitNotification: OK eventTxId 104 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Merge { Partition: 0 AdjacentPartition: 2 } TestModificationResults wait txId: 105 2025-03-31T16:33:16.669796Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Merge { Partition: 0 AdjacentPartition: 2 } } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:33:16.670010Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 105:0, at schemeshard: 72057594046678944 2025-03-31T16:33:16.670168Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: You cannot merge non-contiguous partitions, at schemeshard: 72057594046678944 2025-03-31T16:33:16.671950Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "You cannot merge non-contiguous partitions" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:16.672096Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: You cannot merge non-contiguous partitions, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-03-31T16:33:16.672311Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-03-31T16:33:16.672342Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-03-31T16:33:16.672648Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-03-31T16:33:16.672721Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-03-31T16:33:16.672750Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:636:2562] TestWaitNotification: OK eventTxId 105 >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck >> KqpMergeCn::TopSortBy_String_Limit3 [GOOD] >> KqpMergeCn::TopSortBy_Timestamp_Limit2 >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge [GOOD] >> KqpNotNullColumns::JoinLeftTableWithNotNullPk+StreamLookup [GOOD] >> KqpNotNullColumns::JoinLeftTableWithNotNullPk-StreamLookup >> KqpNotNullColumns::Describe [GOOD] >> KqpNotNullColumns::CreateTableWithNotNullColumns >> KikimrIcGateway::TestDropExternalTable [GOOD] >> KikimrIcGateway::TestDropExternalDataSource |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/runtime/unittest |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/runtime/unittest >> KqpScanSpilling::SelfJoinQueryService >> KqpScanSpilling::SpillingInRuntimeNodes-EnabledSpilling >> KqpScanSpilling::SpillingInRuntimeNodes+EnabledSpilling >> KqpScanSpilling::SelfJoin >> KqpScanLogs::WideCombine >> KqpScanSpilling::HandleErrorsCorrectly >> KqpScanLogs::GraceJoin >> KikimrIcGateway::TestCreateResourcePool [GOOD] >> KikimrIcGateway::TestALterResourcePool ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:33:14.737635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:33:14.737725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:14.737767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:33:14.737801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:33:14.738216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:33:14.738284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:33:14.738354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:14.738460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:33:14.739506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:33:14.825297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:33:14.825352Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:14.829260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:33:14.829334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:33:14.829499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:33:14.833737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:33:14.833902Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:33:14.834458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:14.834647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:33:14.836516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:14.837629Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:14.837689Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:14.837855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:33:14.837900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:14.837954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:33:14.838084Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:33:14.845110Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:33:15.004180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:33:15.004447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.004696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:33:15.004945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:33:15.005000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.007244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:15.007408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:33:15.007599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.007672Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:33:15.007712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:33:15.007751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:33:15.009855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.009937Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:33:15.009981Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:33:15.011838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.011891Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.011948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:15.011997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:33:15.015876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:33:15.017905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:33:15.018114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:33:15.019122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:15.019269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:15.019346Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:15.019650Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:33:15.019707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:15.019873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:33:15.019965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:33:15.022628Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:15.022682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:15.022872Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:15.022915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:33:15.023229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:15.023281Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:33:15.023391Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:15.023428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:15.023469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:15.023501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:15.023539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:33:15.023578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:15.023635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:33:15.023679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:33:15.023743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:33:15.023795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:33:15.023841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:33:15.025546Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:15.025653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:15.025685Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... arts: 1/1 2025-03-31T16:33:17.181550Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-03-31T16:33:17.181593Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-31T16:33:17.181630Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-03-31T16:33:17.181670Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-31T16:33:17.181707Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-03-31T16:33:17.181738Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-03-31T16:33:17.181870Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-03-31T16:33:17.181914Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 1, subscribers: 0 2025-03-31T16:33:17.181944Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-03-31T16:33:17.182534Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-03-31T16:33:17.182618Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-03-31T16:33:17.182652Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-03-31T16:33:17.182688Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-03-31T16:33:17.182725Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-31T16:33:17.182808Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-03-31T16:33:17.186670Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-03-31T16:33:17.193076Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-03-31T16:33:17.193131Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-03-31T16:33:17.193533Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-03-31T16:33:17.193649Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-03-31T16:33:17.193692Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:750:2665] TestWaitNotification: OK eventTxId 105 2025-03-31T16:33:17.754254Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 3 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:17.754626Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 3 took 394us result status StatusSuccess 2025-03-31T16:33:17.755343Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\010" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\010" } Status: Active ParentPartitionIds: 0 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\010" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\010" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:17.828333Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:33:17.828635Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 351us result status StatusSuccess 2025-03-31T16:33:17.829302Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\010" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\010" } Status: Active ParentPartitionIds: 0 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\010" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\010" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } PartitionStrategy { PartitionStrategyType: DISABLED } } TestModificationResults wait txId: 106 2025-03-31T16:33:17.832774Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } PartitionStrategy { PartitionStrategyType: DISABLED } } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:33:17.832985Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2025-03-31T16:33:17.833124Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Can`t disable auto partitioning., at schemeshard: 72057594046678944 2025-03-31T16:33:17.835869Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Can`t disable auto partitioning." TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:17.836034Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Can`t disable auto partitioning., operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-03-31T16:33:17.836362Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-03-31T16:33:17.836400Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-03-31T16:33:17.836903Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-03-31T16:33:17.837007Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-03-31T16:33:17.837045Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:765:2679] TestWaitNotification: OK eventTxId 106 >> KqpRanges::DeleteNotFullScan+UseSink [GOOD] >> KqpRanges::CastKeyBounds >> KqpRanges::ValidatePredicates [GOOD] >> KqpRanges::ValidatePredicatesDataQuery >> TAsyncIndexTests::MergeIndexWithReboots[TabletReboots] [GOOD] >> DataShardSnapshots::VolatileSnapshotRenameTimeout [GOOD] >> KikimrIcGateway::TestDropTable [GOOD] >> TSchemeshardCompactionQueueTest::EnqueueBelowSearchHeightThreshold [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsSchemeshardRestart >> DataShardSnapshots::UncommittedWriteRestartDuringCommit >> KikimrIcGateway::TestDropResourcePool >> TSchemeshardCompactionQueueTest::EnqueueBelowRowDeletesThreshold [GOOD] >> TSchemeshardCompactionQueueTest::CheckOrderWhenAllQueues [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerless >> TSchemeshardCompactionQueueTest::UpdateBelowThreshold [GOOD] >> KqpReturning::ReturningWorksIndexedOperationsWithDefault-QueryService [GOOD] >> TSchemeshardCompactionQueueTest::ShouldNotEnqueueSinglePartedShardWithMemData [GOOD] >> TSchemeshardCompactionQueueTest::ShouldPopWhenOnlyLastCompactionQueue [GOOD] >> KqpNotNullColumns::CreateTableWithNotNullColumns [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactBorrowedAfterSplitMergeWhenDisabled >> TSchemeshardCompactionQueueTest::UpdateWithEmptyShard [GOOD] >> KikimrIcGateway::TestLoadBasicSecretValueFromExternalDataSourceMetadata [GOOD] >> KikimrIcGateway::TestALterResourcePool [GOOD] >> TSchemeshardCompactionQueueTest::ShouldNotEnqueueEmptyShard [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBackups >> KikimrIcGateway::TestDropExternalDataSource [GOOD] >> KikimrIcGateway::TestLoadAwsSecretValueFromExternalDataSourceMetadata >> TSchemeshardCompactionQueueTest::RemoveLastShardFromSubQueues [GOOD] >> IndexBuildTest::BaseCase >> KqpRanges::IsNullInJsonValue [GOOD] >> KqpRanges::IsNotNullInValue >> VectorIndexBuildTest::VectorIndexDescriptionIsPersisted |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::CheckOrderWhenAllQueues [GOOD] |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::UpdateWithEmptyShard [GOOD] |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::ShouldPopWhenOnlyLastCompactionQueue [GOOD] |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::RemoveLastShardFromSubQueues [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeIndexWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:135:2157] sender: [1:136:2058] recipient: [1:111:2142] 2025-03-31T16:29:30.507819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:29:30.507931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:30.507986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:29:30.508028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:29:30.511839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:29:30.511913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:29:30.512039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:30.512146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:29:30.513911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:29:30.613763Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-31T16:29:30.613869Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:169:2058] recipient: [1:15:2062] 2025-03-31T16:29:30.641762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:29:30.641950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:29:30.642101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:29:30.656161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:29:30.656458Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:29:30.658798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:30.659198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:29:30.686549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:30.702578Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:30.702674Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:30.702869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:29:30.702944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:30.703017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:29:30.703221Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:210:2058] recipient: [1:208:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:210:2058] recipient: [1:208:2209] Leader for TabletID 72057594037968897 is [1:214:2213] sender: [1:215:2058] recipient: [1:208:2209] 2025-03-31T16:29:30.721290Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:235:2058] recipient: [1:15:2062] 2025-03-31T16:29:30.922040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:29:30.922273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:30.922474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:29:30.922731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:29:30.922792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:30.928210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:30.928352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:29:30.928591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:30.928686Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:29:30.928725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:29:30.928771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:29:30.930977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:30.931061Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:29:30.931128Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:29:30.933183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:30.933241Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:30.933285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:30.933333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:29:30.937513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:29:30.939683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:29:30.939876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:135:2157] sender: [1:250:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:29:30.940967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:30.941099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:29:30.941155Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:30.941427Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:29:30.941485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:30.941696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:29:30.941822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:29:30.944205Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:30.944253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:30.944419Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:30.944466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:202:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:29:30.944813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:30.944867Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:29:30.945008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:30.945043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:30.945084Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:30.945142Z no ... uccess 2025-03-31T16:33:19.249865Z node 104 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:19.255006Z node 104 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:33:19.255274Z node 104 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 304us result status StatusSuccess 2025-03-31T16:33:19.255911Z node 104 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestDropExternalDataSource [GOOD] Test command err: Trying to start YDB, gRPC: 25711, MsgBus: 19321 2025-03-31T16:33:12.295879Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012597908042548:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:12.295972Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ac4/r3tmp/tmpGzJoZD/pdisk_1.dat 2025-03-31T16:33:12.636516Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25711, node 1 2025-03-31T16:33:12.687248Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:12.687387Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:12.688948Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:12.799158Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:12.799195Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:12.799202Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:12.799337Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19321 TClient is connected to server localhost:19321 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:13.422258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:13.449903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-03-31T16:33:13.473970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:33:13.493164Z node 1 :TX_PROXY ERROR: Actor# [1:7488012602203010542:2343] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/f1/f2/external_table\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeExternalTable, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:13.493265Z node 1 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976710660, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/f1/f2/external_table', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeExternalTable, state: EPathStateNoChanges)
: Error: Scheme operation failed, status: ExecComplete, reason: Check failed: path: '/Root/f1/f2/external_table', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeExternalTable, state: EPathStateNoChanges) Trying to start YDB, gRPC: 25848, MsgBus: 9673 2025-03-31T16:33:15.373479Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012608014559357:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:15.373581Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ac4/r3tmp/tmpJM5ecG/pdisk_1.dat 2025-03-31T16:33:15.477064Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25848, node 2 2025-03-31T16:33:15.513150Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:15.513239Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:15.515855Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:15.540044Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:15.540072Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:15.540078Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:15.540202Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9673 TClient is connected to server localhost:9673 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:15.947579Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:15.972364Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-03-31T16:33:15.991123Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 2841, MsgBus: 13069 2025-03-31T16:33:18.196872Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488012623540427392:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:18.196927Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ac4/r3tmp/tmpHbzREy/pdisk_1.dat 2025-03-31T16:33:18.314053Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:18.345135Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:18.345230Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 2841, node 3 2025-03-31T16:33:18.348695Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:18.373794Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:18.373821Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:18.373830Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:18.373951Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13069 TClient is connected to server localhost:13069 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:33:18.776429Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:33:18.796241Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710658:2, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpReturning::ReturningWorksIndexedOperationsWithDefault-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 19668, MsgBus: 24285 2025-03-31T16:32:34.971435Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012434084243360:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:34.971512Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0009ed/r3tmp/tmp3rupNe/pdisk_1.dat 2025-03-31T16:32:35.264540Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19668, node 1 2025-03-31T16:32:35.333234Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:35.333254Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:35.333264Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:35.333362Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:32:35.359951Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:35.360120Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:35.361915Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24285 TClient is connected to server localhost:24285 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:35.803277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:35.814790Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:32:35.822700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:35.925846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:36.066289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:36.122565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:37.482933Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012446969147013:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:37.483063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:37.792277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:32:37.817838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:32:37.846802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:32:37.873769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:32:37.902490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:32:37.973893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:32:38.007191Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012451264114822:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:38.007276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:38.007386Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012451264114827:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:38.010400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:32:38.017433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012451264114829:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:32:38.095711Z node 1 :TX_PROXY ERROR: Actor# [1:7488012451264114885:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:32:38.821198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:32:38.893386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:32:38.927532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:32:39.971617Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012434084243360:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:39.971697Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 12866, MsgBus: 4138 2025-03-31T16:32:41.654929Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012461737341634:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:41.655011Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0009ed/r3tmp/tmp0fsNz2/pdisk_1.dat 2025-03-31T16:32:41.744432Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12866, node 2 2025-03-31T16:32:41.805135Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:41.805207Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:41.806937Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:32:41.808729Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:41.808756Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:41.808770Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:41.808883Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4138 TClient is connected to server localhost:4138 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:42.181338Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:42.197472Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, a ... not found or you don't have access permissions } 2025-03-31T16:33:10.592916Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:10.623709Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:10.655589Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:10.690003Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:10.726444Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:10.759898Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:10.805416Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488012588794248828:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:10.805503Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488012588794248833:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:10.805507Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:10.808899Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:10.818932Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7488012588794248835:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:33:10.877601Z node 5 :TX_PROXY ERROR: Actor# [5:7488012588794248888:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:11.755365Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:33:11.957805Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7488012571614377368:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:11.957890Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 23673, MsgBus: 64994 2025-03-31T16:33:13.718836Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488012600407196730:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:13.718949Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0009ed/r3tmp/tmp2QWkms/pdisk_1.dat 2025-03-31T16:33:13.848782Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:13.867127Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:13.867232Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:13.869143Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23673, node 6 2025-03-31T16:33:13.917594Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:13.917620Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:13.917629Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:13.917801Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64994 TClient is connected to server localhost:64994 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:14.470786Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:14.490111Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:14.565312Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:14.792245Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:14.872188Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:17.478040Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012617587067703:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:17.478191Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:17.521760Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:17.556666Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:17.590536Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:17.627905Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:17.666103Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:17.703641Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:17.796785Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012617587068217:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:17.796883Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:17.796894Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012617587068222:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:17.801037Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:17.811936Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488012617587068224:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:33:17.883829Z node 6 :TX_PROXY ERROR: Actor# [6:7488012617587068278:3454] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:18.720095Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488012600407196730:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:18.720194Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:33:18.924578Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> KqpNewEngine::JoinMultiConsumer [GOOD] >> KqpNewEngine::JoinSameKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestALterResourcePool [GOOD] Test command err: Trying to start YDB, gRPC: 22841, MsgBus: 7539 2025-03-31T16:33:12.295695Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012595645230958:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:12.295729Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a74/r3tmp/tmpMPFhay/pdisk_1.dat 2025-03-31T16:33:12.626422Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22841, node 1 2025-03-31T16:33:12.677656Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:12.677724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:12.679545Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:12.799667Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:12.799691Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:12.799698Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:12.799796Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7539 TClient is connected to server localhost:7539 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:13.464349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:13.497178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-03-31T16:33:13.513068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 12726, MsgBus: 24528 2025-03-31T16:33:15.339470Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012607382474750:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:15.339566Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a74/r3tmp/tmpN2CMmk/pdisk_1.dat 2025-03-31T16:33:15.457521Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12726, node 2 2025-03-31T16:33:15.472215Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:15.472437Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:15.474749Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:15.505406Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:15.505428Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:15.505439Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:15.505573Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24528 TClient is connected to server localhost:24528 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:15.964010Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:15.979792Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 32248, MsgBus: 8116 2025-03-31T16:33:18.614556Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488012623198819913:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:18.614644Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a74/r3tmp/tmpEjUzaQ/pdisk_1.dat 2025-03-31T16:33:18.698648Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32248, node 3 2025-03-31T16:33:18.750160Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:18.750255Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:18.752049Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:18.763608Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:18.763634Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:18.763641Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:18.763766Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8116 TClient is connected to server localhost:8116 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:19.170893Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:19.192623Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:33:19.210138Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterResourcePool, opId: 281474976715659:0, at schemeshard: 72057594046644480 |95.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_bsvolume/test-results/unittest/{meta.json ... results_accumulator.log} |95.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/test-results/unittest/{meta.json ... results_accumulator.log} >> KikimrIcGateway::TestDropResourcePool [GOOD] |95.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/test-results/unittest/{meta.json ... results_accumulator.log} >> TAsyncIndexTests::MergeMainWithReboots[TabletReboots] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::CreateTableWithNotNullColumns [GOOD] Test command err: Trying to start YDB, gRPC: 10696, MsgBus: 27085 2025-03-31T16:32:57.897558Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012533300759816:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:57.897642Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0008e1/r3tmp/tmplaXKrh/pdisk_1.dat 2025-03-31T16:32:58.256214Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10696, node 1 2025-03-31T16:32:58.311434Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:58.311582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:58.313097Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:32:58.330857Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:58.330895Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:58.330906Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:58.331031Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27085 TClient is connected to server localhost:27085 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:58.829441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:00.858245Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012546185662351:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:00.858364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:01.089624Z node 1 :TX_PROXY ERROR: Actor# [1:7488012550480629671:2306] txid# 281474976710658, issues: { message: "It is not allowed to create not null data column: Value" severity: 1 } 2025-03-31T16:33:01.110711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012550480629679:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:01.110807Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:01.127827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 15990, MsgBus: 14255 2025-03-31T16:33:01.775522Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012549630826298:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:01.775588Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0008e1/r3tmp/tmpuoqSrf/pdisk_1.dat 2025-03-31T16:33:01.873519Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15990, node 2 2025-03-31T16:33:01.904071Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:01.904168Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:01.905846Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:01.926423Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:01.926449Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:01.926457Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:01.926571Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14255 TClient is connected to server localhost:14255 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:02.273744Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:02.289201Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:02.345158Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:02.539986Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:02.614600Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:04.800212Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012562515729949:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:04.800296Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:04.863900Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:04.893758Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:04.927110Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:04.968268Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:04.996411Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:05.066906Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:05.145492Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012566810697759:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:05.145575Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:05.145634Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012566810697764:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:05.149277Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:05.158132Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012566810697766:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:33:05.241473Z node 2 :TX_PROXY ERROR: Actor# [2:7488012566810697821:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request acc ... ermissions } 2025-03-31T16:33:09.746683Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:09.760066Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 30242, MsgBus: 24677 2025-03-31T16:33:10.413629Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488012585998159306:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:10.413781Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0008e1/r3tmp/tmpvqCPMB/pdisk_1.dat 2025-03-31T16:33:10.532142Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:10.549098Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:10.549199Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:10.550729Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30242, node 4 2025-03-31T16:33:10.591517Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:10.591543Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:10.591549Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:10.591633Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24677 TClient is connected to server localhost:24677 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:11.026951Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:13.310269Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488012598883061840:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:13.310360Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:13.320191Z node 4 :TX_PROXY ERROR: Actor# [4:7488012598883061861:2304] txid# 281474976715658, issues: { message: "It is not allowed to create not null data column: Value" severity: 1 } 2025-03-31T16:33:13.330786Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488012598883061869:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:13.330860Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:13.354354Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 27719, MsgBus: 63912 2025-03-31T16:33:14.235277Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7488012605887053544:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:14.235360Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0008e1/r3tmp/tmp5TAeB6/pdisk_1.dat 2025-03-31T16:33:14.332282Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:14.382517Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:14.382590Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:14.384632Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27719, node 5 2025-03-31T16:33:14.423514Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:14.423539Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:14.423549Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:14.423700Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63912 TClient is connected to server localhost:63912 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:14.867061Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:17.479239Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488012618771956079:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:17.479436Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:17.489600Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9693, MsgBus: 19847 2025-03-31T16:33:18.211005Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488012621740544929:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:18.211130Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0008e1/r3tmp/tmpW5DKeY/pdisk_1.dat 2025-03-31T16:33:18.340887Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:18.371231Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:18.371334Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:18.372703Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9693, node 6 2025-03-31T16:33:18.418628Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:18.418656Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:18.418666Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:18.418801Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19847 TClient is connected to server localhost:19847 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:19.002440Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:21.682547Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 |95.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/test-results/unittest/{meta.json ... results_accumulator.log} >> IndexBuildTest::CheckLimitWithDroppedIndex >> IndexBuildTest::CancellationNotEnoughRetries >> IndexBuildTest::RejectsCreate >> VectorIndexBuildTest::BaseCase >> KqpScanSpilling::SelfJoin [GOOD] >> KqpScanSpilling::SelfJoinQueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestDropResourcePool [GOOD] Test command err: Trying to start YDB, gRPC: 21680, MsgBus: 10014 2025-03-31T16:33:12.321989Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012596212760859:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:12.322306Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a98/r3tmp/tmpxS72jM/pdisk_1.dat 2025-03-31T16:33:12.644404Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21680, node 1 2025-03-31T16:33:12.696694Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:12.696761Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:12.698428Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:12.799122Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:12.799148Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:12.799156Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:12.799272Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10014 TClient is connected to server localhost:10014 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:13.415201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:15.125028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012609097663455:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:15.125139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:15.399802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:33:15.515900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:33:15.546184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:15.573269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:15.620291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012609097663770:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:15.620374Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012609097663775:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:15.620430Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:15.625490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710664:3, at schemeshard: 72057594046644480 2025-03-31T16:33:15.635773Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012609097663777:2369], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710664 completed, doublechecking } 2025-03-31T16:33:15.693066Z node 1 :TX_PROXY ERROR: Actor# [1:7488012609097663829:2567] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 31990, MsgBus: 22209 2025-03-31T16:33:16.754788Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012612417402850:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:16.754897Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a98/r3tmp/tmprXHbBE/pdisk_1.dat 2025-03-31T16:33:16.885583Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:16.916860Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:16.916950Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 31990, node 2 2025-03-31T16:33:16.918452Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:16.951207Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:16.951233Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:16.951241Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:16.951375Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22209 TClient is connected to server localhost:22209 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:17.363029Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:19.424107Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012625302305439:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:19.424188Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:19.446585Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:33:19.523314Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:33:19.556030Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:19.585372Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:19.630291Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012625302305752:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:19.630346Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:19.630479Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012625302305757:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:19.634324Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710664:3, at schemeshard: 72057594046644480 2025-03-31T16:33:19.647184Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012625302305759:2368], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710664 completed, doublechecking } 2025-03-31T16:33:19.721975Z node 2 :TX_PROXY ERROR: Actor# [2:7488012625302305812:2565] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:19.920360Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found
: Info: Success, code: 4 Trying to start YDB, gRPC: 15873, MsgBus: 24931 2025-03-31T16:33:20.635990Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488012629400741431:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:20.636106Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a98/r3tmp/tmpc6r9o0/pdisk_1.dat 2025-03-31T16:33:20.751248Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15873, node 3 2025-03-31T16:33:20.781659Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:20.781930Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:20.783513Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:20.802223Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:20.802249Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:20.802257Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:20.802372Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24931 TClient is connected to server localhost:24931 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:21.204287Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:21.226151Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 >> KqpMergeCn::TopSortBy_Timestamp_Limit2 [GOOD] >> KqpMergeCn::TopSortBy_Interval_Limit3 >> KqpScanSpilling::SpillingPragmaParseError >> KqpNotNullColumns::JoinLeftTableWithNotNullPk-StreamLookup [GOOD] >> KqpRanges::CastKeyBounds [GOOD] >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata [GOOD] >> IndexBuildTest::CheckLimitWithDroppedIndex [GOOD] >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata >> IndexBuildTest::DropIndex >> VectorIndexBuildTest::VectorIndexDescriptionIsPersisted [GOOD] >> IndexBuildTest::RejectsCreate [GOOD] >> KikimrIcGateway::TestLoadTokenSecretValueFromExternalDataSourceMetadata [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCreateClean >> KqpScanArrowInChanels::AllTypesColumns >> IndexBuildTest::RejectsDropIndex >> KikimrIcGateway::TestSecretsExistingValidation >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck [GOOD] >> KqpScanArrowFormat::AggregateCountStar >> IndexBuildTest::DropIndex [GOOD] >> IndexBuildTest::RejectsDropIndex [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly >> DataShardSnapshots::UncommittedWriteRestartDuringCommit [GOOD] >> DataShardSnapshots::UncommittedWriteRestartDuringCommitThenBulkErase ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeMainWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:135:2157] sender: [1:136:2058] recipient: [1:111:2142] 2025-03-31T16:29:50.888900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:29:50.888985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:50.889026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:29:50.889065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:29:50.889118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:29:50.889146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:29:50.889228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:50.889341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:29:50.889652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:29:50.979436Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-31T16:29:50.979517Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:169:2058] recipient: [1:15:2062] 2025-03-31T16:29:50.996710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:29:50.996910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:29:50.997073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:29:51.001390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:29:51.001676Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:29:51.002340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:51.002577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:29:51.007716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:51.009329Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:51.009395Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:51.009532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:29:51.009581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:51.009621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:29:51.009760Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:210:2058] recipient: [1:208:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:210:2058] recipient: [1:208:2209] Leader for TabletID 72057594037968897 is [1:214:2213] sender: [1:215:2058] recipient: [1:208:2209] 2025-03-31T16:29:51.017154Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:235:2058] recipient: [1:15:2062] 2025-03-31T16:29:51.157036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:29:51.157289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:51.157501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:29:51.157729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:29:51.157804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:51.160200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:51.160344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:29:51.160546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:51.160604Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:29:51.160656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:29:51.160685Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:29:51.162607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:51.162670Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:29:51.162706Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:29:51.164572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:51.164618Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:51.164652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:51.164701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:29:51.174580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:29:51.176864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:29:51.177058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:135:2157] sender: [1:250:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:29:51.178179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:51.178317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:29:51.178382Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:51.178631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:29:51.178683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:51.178872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:29:51.178967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:29:51.181249Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:51.181293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:51.181489Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:51.181535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:202:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:29:51.181902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:51.181950Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:29:51.182046Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:51.182078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:51.182121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:51.182177Z no ... ct: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:23.431344Z node 107 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:33:23.431640Z node 107 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 328us result status StatusSuccess 2025-03-31T16:33:23.432487Z node 107 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:23.443533Z node 107 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][107:848:2680] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-03-31T16:33:23.443636Z node 107 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409549:2][107:788:2680] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-03-31T16:33:23.443762Z node 107 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][107:848:2680] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1743438803418711 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1743438803418711 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1743438803418711 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-03-31T16:33:23.446398Z node 107 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][107:848:2680] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-03-31T16:33:23.446528Z node 107 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409549:2][107:788:2680] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } |95.8%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/join/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/runtime/unittest >> KqpScanSpilling::SelfJoin [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/0l5c/000a5b/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk3 Trying to start YDB, gRPC: 12452, MsgBus: 7470 2025-03-31T16:33:18.501275Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012622421365046:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:18.501363Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a5b/r3tmp/tmpd71qx3/pdisk_1.dat 2025-03-31T16:33:18.906919Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:18.910987Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:18.911130Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:18.913895Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12452, node 1 2025-03-31T16:33:19.061687Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:19.061711Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:19.061722Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:19.061832Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7470 TClient is connected to server localhost:7470 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:19.718522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:19.740642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:19.861768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:20.004560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:20.067012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:21.414129Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012635306268718:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:21.414233Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:21.685406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:21.708897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:21.738706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:21.803958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:21.831398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:21.860119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:21.894684Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012635306269230:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:21.894831Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:21.895131Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012635306269235:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:21.899193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:21.908388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012635306269237:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:33:21.994115Z node 1 :TX_PROXY ERROR: Actor# [1:7488012635306269290:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:22.875042Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:148;event=channel_info;ch_size=50;ch_count=1;ch_limit=50;inputs=0;input_channels_count=0; 2025-03-31T16:33:22.875212Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7488012639601236899:2496], TxId: 281474976710671, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=NjQ4M2E4N2MtNDEyNWExOTctM2YzMzViODItNzM5ZmNkNTg=. TraceId : 01jqpgsvvx6fby6tbw0j70emk0. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Start compute actor [1:7488012639601236899:2496], task: 1 2025-03-31T16:33:22.875234Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7488012639601236899:2496], TxId: 281474976710671, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=NjQ4M2E4N2MtNDEyNWExOTctM2YzMzViODItNzM5ZmNkNTg=. TraceId : 01jqpgsvvx6fby6tbw0j70emk0. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Set execution timeout 299.938784s 2025-03-31T16:33:22.878426Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7488012639601236899:2496], TxId: 281474976710671, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=NjQ4M2E4N2MtNDEyNWExOTctM2YzMzViODItNzM5ZmNkNTg=. TraceId : 01jqpgsvvx6fby6tbw0j70emk0. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Create sink for output 0 { Sink { Type: "KqpTableSink" Settings { type_url: "type.googleapis.com/NKikimrKqp.TKqpTableSinkSettings" value: "\032\036\n\016/Root/KeyValue\020\200\202\224\204\200\200\200\200\001\030\006(\001\"\t\n\003Key\020\001 \004*\t\n\003Key\020\001 \004*\014\n\005Value\020\002 \201 0\217\200\200\200\200\200@8\001H\000R\022\t\237\033\021\221\322\303\352g\021\300\t\000\000\001\000\020\000X\000`\000h\000h\001x\000" } } } 2025-03-31T16:33:22.878545Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7488012639601236899:2496], TxId: 281474976710671, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=NjQ4M2E4N2MtNDEyNWExOTctM2YzMzViODItNzM5ZmNkNTg=. TraceId : 01jqpgsvvx6fby6tbw0j70emk0. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646926 2025-03-31T16:33:22.878596Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7488012639601236899:2496], TxId: 281474976710671, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=NjQ4M2E4N2MtNDEyNWExOTctM2YzMzViODItNzM5ZmNkNTg=. TraceId : 01jqpgsvvx6fby6tbw0j70emk0. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Received channels info: 2025-03-31T16:33:22.878655Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7488012639601236899:2496], TxId: 281474976710671, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=NjQ4M2E4N2MtNDEyNWExOTctM2YzMzViODItNzM5ZmNkNTg=. TraceId : 01jqpgsvvx6fby6tbw0j70emk0. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. About to drain async output 0. FreeSpace: 67108864, allowedOvercommit: 0, toSend: 67108864, finished: 0 2025-03-31T16:33:22.878762Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710671, task: 1. Add data: 32 / 32 2025-03-31T16:33:22.878800Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710671, task: 1. Send data=32, closed=1, bufferActorId=[1:7488012639601236895:2496] 2025-03-31T16:33:22.878832Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7488012639601236899:2496], TxId: 281474976710671, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=NjQ4M2E4N2MtNDEyNWExOTctM2YzMzViODItNzM5ZmNkNTg=. TraceId : 01jqpgsvvx6fby6tbw0j70emk0. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Drain async output 0. Free space decreased: -9223372036787666944, sent data from buffer: 32 2025-03-31T16:33:22.878848Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710671, task: 1. Tasks execution finished 2025-03-31T16:33:22.878857Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7488012639601236899:2496], TxId: 281474976710671, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=NjQ4M2E4N2MtNDEyNWExOTctM2YzMzViODItNzM5ZmNkNTg=. TraceId : 01jqpgsvvx6fby6tbw0j70emk0. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. Po ... 496:2550], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jqpgswdq7zw3cyzym1w4bq61. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZjJkNjc0ZjUtOWNjNjdjMWItMjY1NWE1Y2QtYTU2OWQ5Mjg=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-03-31T16:33:23.923859Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7488012643896204496:2550], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jqpgswdq7zw3cyzym1w4bq61. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZjJkNjc0ZjUtOWNjNjdjMWItMjY1NWE1Y2QtYTU2OWQ5Mjg=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-03-31T16:33:23.923926Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7488012643896204496:2550], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jqpgswdq7zw3cyzym1w4bq61. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZjJkNjc0ZjUtOWNjNjdjMWItMjY1NWE1Y2QtYTU2OWQ5Mjg=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-03-31T16:33:23.924198Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7488012643896204495:2549], TxId: 281474976710682, task: 3. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZjJkNjc0ZjUtOWNjNjdjMWItMjY1NWE1Y2QtYTU2OWQ5Mjg=. TraceId : 01jqpgswdq7zw3cyzym1w4bq61. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646927 2025-03-31T16:33:23.924222Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7488012643896204495:2549], TxId: 281474976710682, task: 3. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZjJkNjc0ZjUtOWNjNjdjMWItMjY1NWE1Y2QtYTU2OWQ5Mjg=. TraceId : 01jqpgswdq7zw3cyzym1w4bq61. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2025-03-31T16:33:23.924256Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7488012643896204495:2549], TxId: 281474976710682, task: 3. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZjJkNjc0ZjUtOWNjNjdjMWItMjY1NWE1Y2QtYTU2OWQ5Mjg=. TraceId : 01jqpgswdq7zw3cyzym1w4bq61. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-03-31T16:33:23.924411Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7488012643896204495:2549], TxId: 281474976710682, task: 3. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZjJkNjc0ZjUtOWNjNjdjMWItMjY1NWE1Y2QtYTU2OWQ5Mjg=. TraceId : 01jqpgswdq7zw3cyzym1w4bq61. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Send stats to executor actor [1:7488012643896204487:2541] TaskId: 3 Stats: CpuTimeUs: 35373 DurationUs: 20000 Tasks { TaskId: 3 StageId: 2 CpuTimeUs: 31990 FinishTimeMs: 1743438803924 InputRows: 20 InputBytes: 560 OutputRows: 10 OutputBytes: 500 ComputeCpuTimeUs: 13125 BuildCpuTimeUs: 18865 WaitOutputTimeUs: 5186 WaitInputTimeUs: 173 HostName: "ghrun-tuhoqst4xe" NodeId: 1 StartTimeMs: 1743438803904 CreateTimeMs: 1743438803882 } MaxMemoryUsage: 104857600 2025-03-31T16:33:23.924652Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7488012643896204496:2550], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jqpgswdq7zw3cyzym1w4bq61. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZjJkNjc0ZjUtOWNjNjdjMWItMjY1NWE1Y2QtYTU2OWQ5Mjg=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646923 2025-03-31T16:33:23.924679Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 4. Finish input channelId: 4, from: [1:7488012643896204495:2549] 2025-03-31T16:33:23.924702Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7488012643896204496:2550], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jqpgswdq7zw3cyzym1w4bq61. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZjJkNjc0ZjUtOWNjNjdjMWItMjY1NWE1Y2QtYTU2OWQ5Mjg=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-03-31T16:33:23.924790Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7488012643896204495:2549], TxId: 281474976710682, task: 3. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZjJkNjc0ZjUtOWNjNjdjMWItMjY1NWE1Y2QtYTU2OWQ5Mjg=. TraceId : 01jqpgswdq7zw3cyzym1w4bq61. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646927 2025-03-31T16:33:23.924822Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7488012643896204495:2549], TxId: 281474976710682, task: 3. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZjJkNjc0ZjUtOWNjNjdjMWItMjY1NWE1Y2QtYTU2OWQ5Mjg=. TraceId : 01jqpgswdq7zw3cyzym1w4bq61. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2025-03-31T16:33:23.924854Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 3. Tasks execution finished, don't wait for ack delivery in input channelId: 2, seqNo: [10] 2025-03-31T16:33:23.924865Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 3. Tasks execution finished, don't wait for ack delivery in input channelId: 3, seqNo: [10] 2025-03-31T16:33:23.924873Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 3. Tasks execution finished 2025-03-31T16:33:23.924885Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7488012643896204495:2549], TxId: 281474976710682, task: 3. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZjJkNjc0ZjUtOWNjNjdjMWItMjY1NWE1Y2QtYTU2OWQ5Mjg=. TraceId : 01jqpgswdq7zw3cyzym1w4bq61. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Compute state finished. All channels and sinks finished 2025-03-31T16:33:23.924958Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 3. pass away 2025-03-31T16:33:23.925045Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710682;task_id=3;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-03-31T16:33:23.925290Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7488012643896204496:2550], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jqpgswdq7zw3cyzym1w4bq61. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZjJkNjc0ZjUtOWNjNjdjMWItMjY1NWE1Y2QtYTU2OWQ5Mjg=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-03-31T16:33:23.925365Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7488012643896204496:2550], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jqpgswdq7zw3cyzym1w4bq61. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZjJkNjc0ZjUtOWNjNjdjMWItMjY1NWE1Y2QtYTU2OWQ5Mjg=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-03-31T16:33:23.925899Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7488012643896204496:2550], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jqpgswdq7zw3cyzym1w4bq61. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZjJkNjc0ZjUtOWNjNjdjMWItMjY1NWE1Y2QtYTU2OWQ5Mjg=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-03-31T16:33:23.925951Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7488012643896204496:2550], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jqpgswdq7zw3cyzym1w4bq61. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZjJkNjc0ZjUtOWNjNjdjMWItMjY1NWE1Y2QtYTU2OWQ5Mjg=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-03-31T16:33:23.926040Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7488012643896204496:2550], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jqpgswdq7zw3cyzym1w4bq61. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZjJkNjc0ZjUtOWNjNjdjMWItMjY1NWE1Y2QtYTU2OWQ5Mjg=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-03-31T16:33:23.926100Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7488012643896204496:2550], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jqpgswdq7zw3cyzym1w4bq61. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZjJkNjc0ZjUtOWNjNjdjMWItMjY1NWE1Y2QtYTU2OWQ5Mjg=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-03-31T16:33:23.926347Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7488012643896204496:2550], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jqpgswdq7zw3cyzym1w4bq61. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZjJkNjc0ZjUtOWNjNjdjMWItMjY1NWE1Y2QtYTU2OWQ5Mjg=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-03-31T16:33:23.926372Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7488012643896204496:2550], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jqpgswdq7zw3cyzym1w4bq61. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZjJkNjc0ZjUtOWNjNjdjMWItMjY1NWE1Y2QtYTU2OWQ5Mjg=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-03-31T16:33:23.926521Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7488012643896204496:2550], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jqpgswdq7zw3cyzym1w4bq61. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZjJkNjc0ZjUtOWNjNjdjMWItMjY1NWE1Y2QtYTU2OWQ5Mjg=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-03-31T16:33:23.926604Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7488012643896204496:2550], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jqpgswdq7zw3cyzym1w4bq61. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZjJkNjc0ZjUtOWNjNjdjMWItMjY1NWE1Y2QtYTU2OWQ5Mjg=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-03-31T16:33:23.926945Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7488012643896204496:2550], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jqpgswdq7zw3cyzym1w4bq61. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZjJkNjc0ZjUtOWNjNjdjMWItMjY1NWE1Y2QtYTU2OWQ5Mjg=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-03-31T16:33:23.927003Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 4. Tasks execution finished, don't wait for ack delivery in input channelId: 4, seqNo: [11] 2025-03-31T16:33:23.927019Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 4. Tasks execution finished 2025-03-31T16:33:23.927035Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7488012643896204496:2550], TxId: 281474976710682, task: 4. Ctx: { TraceId : 01jqpgswdq7zw3cyzym1w4bq61. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZjJkNjc0ZjUtOWNjNjdjMWItMjY1NWE1Y2QtYTU2OWQ5Mjg=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Compute state finished. All channels and sinks finished 2025-03-31T16:33:23.927124Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 4. pass away 2025-03-31T16:33:23.927215Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710682;task_id=4;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-03-31T16:33:23.930735Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438803904, txId: 281474976710681] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/runtime/unittest >> KqpScanSpilling::SelfJoinQueryService [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/0l5c/000a0b/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk4 Trying to start YDB, gRPC: 19624, MsgBus: 20439 2025-03-31T16:33:18.507312Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012620695715931:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:18.507388Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a0b/r3tmp/tmp7ZR4nM/pdisk_1.dat 2025-03-31T16:33:18.886803Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:18.901699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:18.901938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:18.904726Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19624, node 1 2025-03-31T16:33:19.061603Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:19.061629Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:19.061636Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:19.061769Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20439 TClient is connected to server localhost:20439 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:19.686050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:19.719948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:19.858279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:19.994625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:33:20.053211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:33:21.223150Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012633580619606:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:21.223285Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:21.685814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:21.709701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:21.736035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:21.762985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:21.791523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:21.820749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:21.875931Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012633580620117:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:21.876010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:21.876877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012633580620122:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:21.881708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:21.895113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012633580620124:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:33:21.963868Z node 1 :TX_PROXY ERROR: Actor# [1:7488012633580620178:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:23.501999Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012620695715931:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:23.502079Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (DataType 'String)) (let $5 (OptionalType $4)) (let $6 (StructType '('"Key" $3) '('"Value" $5))) (let $7 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($21) (block '( (let $22 (lambda '($23) (block '( (let $24 (VariantType (TupleType $6 $6))) (let $25 (Variant $23 '0 $24)) (let $26 (Variant $23 '1 $24)) (return $25 $26) )))) (return (FromFlow (MultiMap (ToFlow $21) $22))) ))) '('('"_logical_id" '706) '('"_id" '"d5c5a0bd-e9104af5-cf9d92c-f12125c0")))) (let $8 (DqCnUnionAll (TDqOutput $7 '1))) (let $9 '('('"_logical_id" '551) '('"_id" '"c23acb27-8c0b91ab-747358ba-ed08f694") '('"_wide_channels" $6))) (let $10 (DqPhyStage '($8) (lambda '($27) (block '( (let $28 (lambda '($29) (Member $29 '"Key") (Member $29 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $27) $28))) ))) $9)) (let $11 (DqCnMap (TDqOutput $7 '0))) (let $12 (DqCnBroadcast (TDqOutput $10 '0))) (let $13 (StructType '('"t1.Key" $3) '('"t1.Value" $5) '('"t2.Key" $3) '('"t2.Value" $5))) (let $14 '('('"_logical_id" '621) '('"_id" '"cfcc1928-77431b98-9878a7f4-8d00be59") '('"_wide_channels" $13))) (let $15 (DqPhyStage '($11 $12) (lambda '($30 $31) (block '( (let $32 '('Many 'Hashed 'Compact)) (let $33 (SqueezeToDict (NarrowFlatMap (WideFilter (ToFlow $31) (lambda '($36 $37) (Exists $37))) (lambda '($38 $39) (IfPresent $39 (lambda '($40) (Just '($40 (AsStruct '('"Key" $38) '('"Value" $39))))) (Nothing (OptionalType (TupleType $4 $6)))))) (lambda '($41) (Nth $41 '0)) (lambda '($42) (Nth $42 '1)) $32)) (let $34 (Sort (FlatMap $33 (lambda '($43) (block '( (let $44 '('"Value")) (let $45 '('"Key" '"t1.Key" '"Value" '"t1.Value")) (let $46 '('"Key" '"t2.Key" '"Value" '"t2.Value")) (return (MapJoinCore (OrderedFilter (ToFlow $30) (lambda '($47) (Exists (Member $47 '"Value")))) $43 'Inner $44 $44 $45 $46 '('"t1.Value") '('"t2.Value"))) )))) (Bool 'true) (lambda '($48) (Member $48 '"t1.Key")))) (let $35 (lambda '($49) (Member $49 '"t1.Key") (Member $49 '"t1.Value") (Member $49 '"t2.Key") (Member $49 '"t2.Value"))) (return (FromFlow (ExpandMap $34 $35))) ))) $14)) (let $16 (DqCnMerge (TDqOutput $15 '0) '('('0 '"Asc")))) (let $17 (DqPhyStage '($16) (lambda '($50) (FromFlow (NarrowMap (ToFlow $50) (lambda '($51 $52 $53 $54) (AsStruct '('"t1.Key" $51) '('"t1.Value" $52) '('"t2.Key" $53) '('"t2.Value" $54)))))) '('('"_logical_id" '633) '('"_id" '"828e9f12-951451ee-b5e28fd2-3c025245")))) (let $18 '($7 $10 $15 $17)) (let $19 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $20 (DqCnResult (TDqOutput $17 '0) $19)) (return (KqpPhysicalQuery '((KqpPhysicalTx $18 '($20) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $13) '0 '0)) '('('"type" '"query")))) ) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> VectorIndexBuildTest::VectorIndexDescriptionIsPersisted [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:33:23.302953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:33:23.303063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:23.303122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:33:23.303161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:33:23.304419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:33:23.304493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:33:23.304582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:23.304679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:33:23.305945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:33:23.380812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:33:23.380906Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:23.387042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:33:23.387185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:33:23.387366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:33:23.394926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:33:23.396254Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:33:23.400454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:23.401663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:33:23.415332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:23.430302Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:23.430412Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:23.430608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:33:23.430659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:23.430749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:33:23.430936Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:33:23.439439Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:33:23.562982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:33:23.565009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:23.566124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:33:23.567610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:33:23.567710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:23.570576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:23.570754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:33:23.570942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:23.571003Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:33:23.571031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:33:23.571064Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:33:23.572803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:23.572875Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:33:23.572910Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:33:23.574375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:23.574418Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:23.574475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:23.574531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:33:23.578360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:33:23.580343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:33:23.580551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:33:23.583268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:23.583426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:23.583513Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:23.584884Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:33:23.584943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:23.585915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:33:23.585999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:33:23.588524Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:23.588587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:23.588797Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:23.588851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:33:23.589118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:23.589282Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:33:23.589400Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:23.589439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:23.589480Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:23.589559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:23.589606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:33:23.589649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:23.589714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:33:23.589752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:33:23.589821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:33:23.589868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:33:23.589903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:33:23.592095Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:23.592284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:23.592329Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... athId: 3] was 2 2025-03-31T16:33:26.860374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:26.860605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:26.860648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:26.860881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:26.860964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:26.861072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:26.861221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:26.861275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:26.861544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:26.861778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:26.861929Z node 1 :BUILD_INDEX DEBUG: AddShardStatus id# 102 shard 72057594046678944:11 range { From: -inf, To: inf } 2025-03-31T16:33:26.861984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:26.862019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:26.862056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-03-31T16:33:26.866416Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-03-31T16:33:26.866496Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: by_embedding, IndexColumn: embedding, DataColumns: covered, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [0:0:0], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976720765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976720766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:33:26.866530Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 0 2025-03-31T16:33:26.870188Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:26.870265Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:26.870733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:33:26.870784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:26.870826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:33:26.871477Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:4163:5624] sender: [1:4225:2058] recipient: [1:15:2062] 2025-03-31T16:33:26.914131Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/by_embedding" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:33:26.914322Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/by_embedding" took 227us result status StatusSuccess 2025-03-31T16:33:26.915076Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/by_embedding" PathDescription { Self { Name: "by_embedding" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplLevelTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Children { Name: "indexImplPostingTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "by_embedding" LocalPathId: 3 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "embedding" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataColumnNames: "covered" DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } } VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } clusters: 4 levels: 5 } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::JoinLeftTableWithNotNullPk-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 25707, MsgBus: 27737 2025-03-31T16:32:50.726750Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012502176415957:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:50.726797Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00096c/r3tmp/tmp9YcL1D/pdisk_1.dat 2025-03-31T16:32:51.045405Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25707, node 1 2025-03-31T16:32:51.098844Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:51.098959Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:51.100633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:32:51.114777Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:51.114803Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:51.114814Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:51.114947Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27737 TClient is connected to server localhost:27737 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:51.621739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:53.622347Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012515061318503:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:53.622490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:53.679136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:32:53.791844Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012515061318608:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:53.791927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:53.792044Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012515061318613:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:53.795648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:32:53.805163Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012515061318615:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:32:53.879423Z node 1 :TX_PROXY ERROR: Actor# [1:7488012515061318666:2396] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:32:54.050579Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488012519356286017:2328], TxId: 281474976710662, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jqpgrzq2anb20a0y1dqhme1x. SessionId : ydb://session/3?node_id=1&id=YTBhZDBmZTMtMjYzOTc2MDUtYjFiYzBjNTAtMjQyNjliOTA=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: BAD_REQUEST KIKIMR_BAD_COLUMN_TYPE: {
: Error: Tried to insert NULL value into NOT NULL column: key, code: 2031 }. 2025-03-31T16:32:54.051181Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTBhZDBmZTMtMjYzOTc2MDUtYjFiYzBjNTAtMjQyNjliOTA=, ActorId: [1:7488012515061318500:2328], ActorState: ExecuteState, TraceId: 01jqpgrzq2anb20a0y1dqhme1x, Create QueryResponse for error on request, msg: 2025-03-31T16:32:54.128017Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488012519356286034:2328], TxId: 281474976710663, task: 1. Ctx: { TraceId : 01jqpgrzsa7harq9nr2x3nnjh7. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YTBhZDBmZTMtMjYzOTc2MDUtYjFiYzBjNTAtMjQyNjliOTA=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: BAD_REQUEST KIKIMR_BAD_COLUMN_TYPE: {
: Error: Tried to insert NULL value into NOT NULL column: key, code: 2031 }. 2025-03-31T16:32:54.128404Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTBhZDBmZTMtMjYzOTc2MDUtYjFiYzBjNTAtMjQyNjliOTA=, ActorId: [1:7488012515061318500:2328], ActorState: ExecuteState, TraceId: 01jqpgrzsa7harq9nr2x3nnjh7, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 1179, MsgBus: 8736 2025-03-31T16:32:54.821853Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012520562727110:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:54.821947Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00096c/r3tmp/tmpIJKDxM/pdisk_1.dat 2025-03-31T16:32:54.908626Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1179, node 2 2025-03-31T16:32:54.956684Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:54.956777Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:54.958293Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:32:54.976310Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:54.976340Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:54.976348Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:54.976461Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8736 TClient is connected to server localhost:8736 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:55.373026Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:57.828361Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012533447629655:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:57.828473Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:57.842042Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:32:57.898404Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012533447629757:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:57.898517Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:57.898916Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012533447629762:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:57.903303Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025- ... 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:14.932701Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:14.965145Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:15.007439Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:15.037044Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:15.076917Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488012609427123075:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:15.076999Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488012609427123080:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:15.077003Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:15.080321Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:15.089473Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7488012609427123082:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:33:15.150833Z node 5 :TX_PROXY ERROR: Actor# [5:7488012609427123135:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:16.008018Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:33:16.136142Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:33:16.345478Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7488012592247251596:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:16.345553Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 5024, MsgBus: 7234 2025-03-31T16:33:18.138405Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488012623951980267:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:18.138474Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00096c/r3tmp/tmpk4nTKz/pdisk_1.dat 2025-03-31T16:33:18.228090Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5024, node 6 2025-03-31T16:33:18.268510Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:18.268619Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:18.270471Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:18.302890Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:18.302917Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:18.302925Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:18.303062Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7234 TClient is connected to server localhost:7234 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:33:18.860131Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:33:18.882407Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:18.940098Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:19.136277Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:19.222549Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:21.701097Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012636836883937:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:21.701207Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:21.745594Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:21.779813Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:21.813636Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:21.845855Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:21.882204Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:21.919937Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:21.963141Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012636836884445:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:21.963251Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:21.963300Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012636836884450:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:21.967577Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:21.979188Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488012636836884452:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:33:22.059518Z node 6 :TX_PROXY ERROR: Actor# [6:7488012641131851802:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:23.028655Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:33:23.138641Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488012623951980267:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:23.138722Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:33:23.167863Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::CastKeyBounds [GOOD] Test command err: Trying to start YDB, gRPC: 25900, MsgBus: 27782 2025-03-31T16:32:55.216898Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012525394795507:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:55.217022Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000918/r3tmp/tmpnXEKzU/pdisk_1.dat 2025-03-31T16:32:55.528672Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25900, node 1 2025-03-31T16:32:55.605850Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:55.605966Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:55.607600Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:32:55.611211Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:55.611244Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:55.611265Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:55.611384Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27782 TClient is connected to server localhost:27782 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:56.069880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:58.074070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012538279698056:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:58.074165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:58.307673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:32:58.409721Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012538279698160:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:58.409806Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:58.409974Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012538279698165:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:58.414193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:32:58.423655Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012538279698167:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:32:58.511478Z node 1 :TX_PROXY ERROR: Actor# [1:7488012538279698218:2395] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:32:58.660919Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488012538279698261:2355], status: PRECONDITION_FAILED, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing key column in input: Key for table: /Root/TestUpsertNotNullPk, code: 2029 2025-03-31T16:32:58.661133Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGNlYjJmZWEtOWUzY2UyYTgtY2ZmNDNhMWQtODE3OTRlYjY=, ActorId: [1:7488012538279698038:2328], ActorState: ExecuteState, TraceId: 01jqpgs48n1hgcbkwedfahstr8, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id: 2025-03-31T16:32:58.684388Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488012538279698270:2359], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:47: Error: Failed to convert type: Struct<'Key':Null,'Value':String> to Struct<'Key':Uint64,'Value':String?>
:1:47: Error: Failed to convert 'Key': Null to Uint64
:1:47: Error: Failed to convert input columns types to scheme types, code: 2031 2025-03-31T16:32:58.685658Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGNlYjJmZWEtOWUzY2UyYTgtY2ZmNDNhMWQtODE3OTRlYjY=, ActorId: [1:7488012538279698038:2328], ActorState: ExecuteState, TraceId: 01jqpgs49bf1w8wgy2eyjthjj6, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 23527, MsgBus: 11936 2025-03-31T16:32:59.355719Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012540805590535:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:59.355878Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000918/r3tmp/tmpEzCGbK/pdisk_1.dat 2025-03-31T16:32:59.440666Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23527, node 2 2025-03-31T16:32:59.488660Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:59.488792Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:59.499357Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:59.499379Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:59.499386Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:59.499491Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:32:59.517592Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11936 TClient is connected to server localhost:11936 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:59.955791Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:02.489570Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012553690493082:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:02.489655Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:02.499251Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:33:02.534913Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012553690493182:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:02.534988Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:02.535127Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012553690493187:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:02.539220Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:33:02.552421Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] ... /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:17.147505Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488012617118209729:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:17.147526Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:17.151333Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:17.162134Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7488012617118209731:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:33:17.230405Z node 5 :TX_PROXY ERROR: Actor# [5:7488012617118209784:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:18.242244Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7488012599938338252:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:18.242326Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Join2"],"PlanNodeId":1,"Operators":[{"Inputs":[],"Path":"\/Root\/Join2","Name":"Delete","Table":"Join2","SinkType":"KqpTableSink"}],"Node Type":"Stage-Sink","Stats":{"ComputeNodes":[{"Tasks":[{"EgressRows":3,"NodeId":5,"FinishTimeMs":1743438798431,"EgressBytes":39,"TaskId":1,"Host":"ghrun-tuhoqst4xe","ComputeTimeUs":287}],"PeakMemoryUsageBytes":196608,"CpuTimeUs":2630}],"UseLlvm":"undefined","MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[1,1048576]},"Tasks":1,"FinishedTasks":1,"Egress":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":39,"Max":39,"Min":39,"History":[1,39]}},"Name":"KqpTableSink","Egress":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Splits":{"Count":1,"Sum":1,"Max":1,"Min":1},"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Bytes":{"Count":1,"Sum":39,"Max":39,"Min":39}},"Push":{"Chunks":{"Count":1,"Sum":3,"Max":3,"Min":3},"Rows":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":39,"Max":39,"Min":39,"History":[1,39]}}}],"PhysicalStageId":0,"StageDurationUs":0,"EgressRows":{"Count":1,"Sum":3,"Max":3,"Min":3},"BaseTimeMs":1743438798430,"EgressBytes":{"Count":1,"Sum":39,"Max":39,"Min":39},"CpuTimeUs":{"Count":1,"Sum":2198,"Max":2198,"Min":2198,"History":[1,2198]}}}],"Node Type":"Sink"}],"Node Type":"Query","Stats":{"Compilation":{"FromCache":false,"DurationUs":331648,"CpuTimeUs":328408},"ProcessCpuTimeUs":470,"TotalDurationUs":342473,"ResourcePoolId":"default","QueuedTimeUs":472},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"A-SelfCpu":2.198,"A-Cpu":2.198,"Path":"\/Root\/Join2","Name":"Delete","Table":"Join2","SinkType":"KqpTableSink"}],"Node Type":"Delete"}],"Node Type":"Sink"}],"Node Type":"Query","PlanNodeType":"Query"}} Trying to start YDB, gRPC: 26082, MsgBus: 5021 2025-03-31T16:33:19.374591Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488012625807512860:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:19.374669Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000918/r3tmp/tmp59FuWI/pdisk_1.dat 2025-03-31T16:33:19.504863Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:19.536600Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:19.536712Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:19.538368Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26082, node 6 2025-03-31T16:33:19.582504Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:19.582532Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:19.582539Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:19.582641Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5021 TClient is connected to server localhost:5021 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:20.073225Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:20.090683Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:33:20.163790Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:33:20.332935Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:20.397159Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:23.047456Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012642987383844:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:23.047561Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:23.106050Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:23.142798Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:23.172061Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:23.201103Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:23.232944Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:23.269205Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:23.310702Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012642987384351:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:23.310795Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012642987384356:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:23.310820Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:23.314584Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:23.327121Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488012642987384358:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:33:23.414094Z node 6 :TX_PROXY ERROR: Actor# [6:7488012642987384414:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:24.374951Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488012625807512860:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:24.375028Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::DropIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:33:24.594482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:33:24.594564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:24.594595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:33:24.594621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:33:24.594656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:33:24.594687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:33:24.594729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:24.594810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:33:24.595131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:33:24.662282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:33:24.662336Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:24.666077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:33:24.666151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:33:24.666279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:33:24.670371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:33:24.670555Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:33:24.671046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:24.671246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:33:24.672650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:24.673623Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:24.673669Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:24.673795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:33:24.673832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:24.673860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:33:24.673973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:33:24.679141Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:33:24.767076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:33:24.767280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:24.767440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:33:24.767603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:33:24.767645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:24.769526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:24.769645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:33:24.769825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:24.769884Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:33:24.769912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:33:24.769936Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:33:24.771494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:24.771546Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:33:24.771576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:33:24.772815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:24.772853Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:24.772890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:24.772937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:33:24.780707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:33:24.782215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:33:24.782351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:33:24.783119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:24.783228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:24.783279Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:24.783482Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:33:24.783521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:24.783642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:33:24.783708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:33:24.785158Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:24.785207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:24.785351Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:24.785382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:33:24.785538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:24.785575Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:33:24.785638Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:24.785662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:24.785688Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:24.785715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:24.785747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:33:24.785786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:24.785812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:33:24.785833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:33:24.785884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:33:24.785908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:33:24.785930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:33:24.787246Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:24.787340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:24.787368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 09550, at schemeshard: 72057594046678944 2025-03-31T16:33:27.213480Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 129 -> 240 2025-03-31T16:33:27.213984Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:1, at schemeshard: 72057594046678944 2025-03-31T16:33:27.214020Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:1 ProgressState 2025-03-31T16:33:27.214123Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:1 progress is 1/3 2025-03-31T16:33:27.214161Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/3 2025-03-31T16:33:27.214199Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:1 progress is 1/3 2025-03-31T16:33:27.214230Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/3 2025-03-31T16:33:27.214271Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/3, is published: false 2025-03-31T16:33:27.214477Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:2, at schemeshard: 72057594046678944 2025-03-31T16:33:27.214508Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 105:2 ProgressState at tablet: 72057594046678944 2025-03-31T16:33:27.214558Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 105:2, at schemeshard: 72057594046678944 2025-03-31T16:33:27.214589Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 105:2, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-03-31T16:33:27.214615Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:2 129 -> 240 2025-03-31T16:33:27.215142Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-03-31T16:33:27.215240Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-03-31T16:33:27.215278Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-03-31T16:33:27.215320Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 18446744073709551615 2025-03-31T16:33:27.215363Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 5 2025-03-31T16:33:27.215868Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-03-31T16:33:27.215926Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-03-31T16:33:27.215949Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-03-31T16:33:27.215971Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 8], version: 18446744073709551615 2025-03-31T16:33:27.215996Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 3 2025-03-31T16:33:27.217426Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 105 2025-03-31T16:33:27.217494Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 105 2025-03-31T16:33:27.217518Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-03-31T16:33:27.217544Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2025-03-31T16:33:27.217569Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:33:27.218598Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-03-31T16:33:27.218683Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-03-31T16:33:27.218707Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-03-31T16:33:27.219250Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-03-31T16:33:27.219313Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-03-31T16:33:27.219335Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-03-31T16:33:27.220431Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-03-31T16:33:27.220489Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 105:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:33:27.220732Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 2025-03-31T16:33:27.220860Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 2/3 2025-03-31T16:33:27.220899Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 2/3 2025-03-31T16:33:27.220937Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 2/3 2025-03-31T16:33:27.221037Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 2/3 2025-03-31T16:33:27.221075Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 2/3, is published: false 2025-03-31T16:33:27.221407Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-03-31T16:33:27.221471Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-03-31T16:33:27.221493Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-03-31T16:33:27.221519Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 9], version: 18446744073709551615 2025-03-31T16:33:27.221546Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 4 2025-03-31T16:33:27.221636Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 2/3, is published: true 2025-03-31T16:33:27.223542Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:2, at schemeshard: 72057594046678944 2025-03-31T16:33:27.223592Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 105:2 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:33:27.223792Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 3 2025-03-31T16:33:27.223886Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:2 progress is 3/3 2025-03-31T16:33:27.223913Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 3/3 2025-03-31T16:33:27.223948Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:2 progress is 3/3 2025-03-31T16:33:27.223968Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 3/3 2025-03-31T16:33:27.223994Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 3/3, is published: true 2025-03-31T16:33:27.224062Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:415:2372] message: TxId: 105 2025-03-31T16:33:27.224107Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 3/3 2025-03-31T16:33:27.224172Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-03-31T16:33:27.224207Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-03-31T16:33:27.224306Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-03-31T16:33:27.224346Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:1 2025-03-31T16:33:27.224366Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:1 2025-03-31T16:33:27.224394Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 2 2025-03-31T16:33:27.224414Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:2 2025-03-31T16:33:27.224431Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:2 2025-03-31T16:33:27.224467Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 2 2025-03-31T16:33:27.224962Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-03-31T16:33:27.225132Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-03-31T16:33:27.226325Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-03-31T16:33:27.226378Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-03-31T16:33:27.226423Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-03-31T16:33:27.228223Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-03-31T16:33:27.228396Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-03-31T16:33:27.228443Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:941:2865] TestWaitNotification: OK eventTxId 105 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::RejectsDropIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:33:24.801373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:33:24.801467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:24.801505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:33:24.801537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:33:24.801584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:33:24.801625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:33:24.801680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:24.801759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:33:24.802096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:33:24.890084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:33:24.890150Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:24.894626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:33:24.894721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:33:24.894871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:33:24.899459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:33:24.899644Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:33:24.900138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:24.900311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:33:24.901743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:24.902695Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:24.902739Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:24.902840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:33:24.902868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:24.902893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:33:24.902997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:33:24.908419Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:33:25.044279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:33:25.044507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:25.044726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:33:25.044980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:33:25.045042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:25.047433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:25.047567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:33:25.047746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:25.047806Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:33:25.047835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:33:25.047862Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:33:25.049765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:25.049814Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:33:25.049845Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:33:25.051655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:25.051698Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:25.051747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:25.051788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:33:25.060590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:33:25.062672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:33:25.062843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:33:25.063817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:25.063945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:25.064000Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:25.064232Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:33:25.064272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:25.064408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:33:25.064501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:33:25.066452Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:25.066498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:25.066660Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:25.066697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:33:25.066908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:25.066948Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:33:25.067027Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:25.067053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:25.067109Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:25.067135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:25.067165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:33:25.067218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:25.067250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:33:25.067279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:33:25.067338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:33:25.067373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:33:25.067399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:33:25.069012Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:25.069114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:25.069161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 107 at step: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 107 at step: 5000004 2025-03-31T16:33:27.411405Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:27.411485Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 107 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 8589936749 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:27.411527Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TPropose operationId# 107:0 HandleReply TEvOperationPlan, step: 5000004, at schemeshard: 72057594046678944 2025-03-31T16:33:27.411577Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 128 -> 136 2025-03-31T16:33:27.414471Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-03-31T16:33:27.414541Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TWaitRenamedPathPublication operationId: 107:0 ProgressState, operation type: TxDropTable, at tablet# 72057594046678944 2025-03-31T16:33:27.414596Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TWaitRenamedPathPublication operationId: 107:0 ProgressState, no renaming has been detected for this operation 2025-03-31T16:33:27.414632Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 136 -> 137 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2025-03-31T16:33:27.415996Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 107 Step: 5000004 OrderId: 107 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 473 } } 2025-03-31T16:33:27.416040Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409547, partId: 0 2025-03-31T16:33:27.416142Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 107 Step: 5000004 OrderId: 107 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 473 } } 2025-03-31T16:33:27.416231Z node 2 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 107 Step: 5000004 OrderId: 107 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 473 } } FAKE_COORDINATOR: Erasing txId 107 2025-03-31T16:33:27.416911Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 327 RawX2: 8589936901 } Origin: 72075186233409547 State: 5 TxId: 107 Step: 0 Generation: 2 2025-03-31T16:33:27.416946Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409547, partId: 0 2025-03-31T16:33:27.417040Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: Source { RawX1: 327 RawX2: 8589936901 } Origin: 72075186233409547 State: 5 TxId: 107 Step: 0 Generation: 2 2025-03-31T16:33:27.417094Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2025-03-31T16:33:27.417503Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-03-31T16:33:27.417547Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 ProgressState, operation type: TxDropTable, at tablet# 72057594046678944 2025-03-31T16:33:27.417582Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 107:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-03-31T16:33:27.417613Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 107, done: 0, blocked: 1 2025-03-31T16:33:27.417672Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 107 Name: RenamePathBarrier }, at tablet# 72057594046678944 2025-03-31T16:33:27.417751Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 137 -> 129 2025-03-31T16:33:27.417839Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:33:27.417880Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T16:33:27.418947Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-03-31T16:33:27.419806Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-03-31T16:33:27.419984Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:27.420024Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:27.420162Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:33:27.420289Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:27.420331Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 107, path id: 1 2025-03-31T16:33:27.420378Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 107, path id: 2 2025-03-31T16:33:27.420643Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-03-31T16:33:27.420682Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 107:0 ProgressState at tablet: 72057594046678944 2025-03-31T16:33:27.420740Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72057594046678944 2025-03-31T16:33:27.420767Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 107:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-03-31T16:33:27.420797Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 129 -> 240 2025-03-31T16:33:27.421233Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 107 2025-03-31T16:33:27.421307Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 107 2025-03-31T16:33:27.421363Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2025-03-31T16:33:27.421407Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-03-31T16:33:27.421446Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:33:27.421881Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2025-03-31T16:33:27.421934Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2025-03-31T16:33:27.421952Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2025-03-31T16:33:27.421968Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-31T16:33:27.421986Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-31T16:33:27.422032Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 0/1, is published: true 2025-03-31T16:33:27.424386Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-03-31T16:33:27.424446Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 107:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:33:27.424698Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-31T16:33:27.424821Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2025-03-31T16:33:27.424862Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-03-31T16:33:27.424898Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2025-03-31T16:33:27.424931Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-03-31T16:33:27.424969Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2025-03-31T16:33:27.425036Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:375:2343] message: TxId: 107 2025-03-31T16:33:27.425080Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-03-31T16:33:27.425116Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2025-03-31T16:33:27.425147Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2025-03-31T16:33:27.425234Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-31T16:33:27.426213Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-03-31T16:33:27.426483Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-03-31T16:33:27.427482Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-03-31T16:33:27.427518Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [2:580:2540] TestWaitNotification: OK eventTxId 107 >> KqpRanges::IsNotNullInValue [GOOD] >> KqpRanges::IsNotNullInJsonValue2 >> KqpRanges::NoFullScanAtScanQuery [GOOD] >> KqpRanges::NoFullScanAtDNFPredicate >> KqpScanSpilling::SpillingPragmaParseError [GOOD] >> KqpNewEngine::JoinSameKey [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedBeforeSplit >> IndexBuildTest::MergeIndexTableShardsOnlyWhenReady >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanWithRetry >> KqpScanArrowInChanels::AggregateNoColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::JoinSameKey [GOOD] Test command err: Trying to start YDB, gRPC: 6088, MsgBus: 23143 2025-03-31T16:32:47.732442Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012489777189583:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:47.732590Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00099d/r3tmp/tmpRGM4Bj/pdisk_1.dat TServer::EnableGrpc on GrpcPort 6088, node 1 2025-03-31T16:32:48.035764Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:32:48.036287Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:32:48.056294Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:32:48.079231Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:48.079264Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:48.079276Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:48.079410Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:32:48.088371Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:48.088510Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:48.090704Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23143 TClient is connected to server localhost:23143 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:48.585102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:48.611163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:48.736657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:48.869842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:48.935051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:50.586383Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012502662093251:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:50.586501Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:50.918099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:32:50.947350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:32:50.971285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:32:51.000327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:32:51.029049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:32:51.070565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:32:51.113981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012506957061060:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:51.114057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:51.114257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012506957061065:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:51.117228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:32:51.126124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012506957061067:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:32:51.197961Z node 1 :TX_PROXY ERROR: Actor# [1:7488012506957061121:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 25094, MsgBus: 17834 2025-03-31T16:32:53.131711Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012512682161640:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:53.131752Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00099d/r3tmp/tmpgvTJXa/pdisk_1.dat 2025-03-31T16:32:53.222784Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25094, node 2 2025-03-31T16:32:53.261724Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:53.261819Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:53.263218Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:32:53.284816Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:53.284833Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:53.284838Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:53.284912Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17834 TClient is connected to server localhost:17834 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:53.688709Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:53.702282Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:53.742062Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:53.878528Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:53.962064Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:55.854417Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012521272098002:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permission ... : [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012629570802693:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:20.072978Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:20.128621Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:20.162440Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:20.198126Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:20.235980Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:20.269731Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:20.321324Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:20.400093Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012629570803211:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:20.400213Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:20.400245Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012629570803216:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:20.403820Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:20.413485Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488012629570803218:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:33:20.505403Z node 6 :TX_PROXY ERROR: Actor# [6:7488012629570803273:3453] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:21.616529Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488012612390931747:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:21.616596Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 29759, MsgBus: 4210 2025-03-31T16:33:23.249536Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488012642743587315:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:23.249604Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00099d/r3tmp/tmpAYB2Gt/pdisk_1.dat 2025-03-31T16:33:23.382594Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:23.417183Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:23.417313Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:23.418930Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29759, node 7 2025-03-31T16:33:23.469212Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:23.469239Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:23.469251Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:23.469406Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4210 TClient is connected to server localhost:4210 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:24.045485Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:24.064075Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:24.140079Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:24.348494Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:24.431420Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:26.986296Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488012655628490985:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:26.986375Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:27.035684Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:27.074606Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:27.112328Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:27.148890Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:27.180878Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:27.214468Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:27.255981Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488012659923458792:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:27.256073Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488012659923458797:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:27.256086Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:27.261440Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:27.278034Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7488012659923458799:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:33:27.348341Z node 7 :TX_PROXY ERROR: Actor# [7:7488012659923458855:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:28.250049Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7488012642743587315:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:28.250135Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/runtime/unittest >> KqpScanSpilling::SpillingPragmaParseError [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/0l5c/000920/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk7 Trying to start YDB, gRPC: 27756, MsgBus: 22109 2025-03-31T16:33:25.663013Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012650638510457:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:25.663166Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000920/r3tmp/tmpRCPhKS/pdisk_1.dat 2025-03-31T16:33:25.936696Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27756, node 1 2025-03-31T16:33:25.983426Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:25.983447Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:25.983454Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:25.983601Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:33:26.002101Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:26.002254Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:26.003920Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22109 TClient is connected to server localhost:22109 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:26.376658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:26.399365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:26.530135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:26.654644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:26.730310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:27.903431Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012659228446838:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:27.903551Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:28.145094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:28.175732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:28.202634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:28.233750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:28.258089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:28.286477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:28.362605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012663523414646:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:28.362667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:28.362771Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012663523414651:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:28.366476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:28.374216Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012663523414653:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:33:28.456591Z node 1 :TX_PROXY ERROR: Actor# [1:7488012663523414709:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:29.312809Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488012667818382301:2499], status: GENERIC_ERROR, issues:
: Error: Pre type annotation, code: 1020
:3:40: Error: Bad "EnableSpillingNodes" setting for "$all" cluster: (yexception) tools/enum_parser/enum_serialization_runtime/enum_runtime.cpp:70: Key 'GraceJoin1' not found in enum NYql::NDq::EEnabledSpillingNodes. Valid options are: 'None', 'GraceJoin', 'Aggregation', 'All'. 2025-03-31T16:33:29.313096Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjE2NzE3NWMtZGYyMDNiNmQtZDNmNWUyOC00ODVhODM4, ActorId: [1:7488012667818382294:2495], ActorState: ExecuteState, TraceId: 01jqpgt24y05q8wg7g0ta7m2a7, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> KikimrIcGateway::TestLoadAwsSecretValueFromExternalDataSourceMetadata [GOOD] >> KikimrIcGateway::TestLoadDataSourceProperties >> KqpMergeCn::TopSortBy_Interval_Limit3 [GOOD] >> KqpMergeCn::TopSortBy_Decimal_Limit5 >> KikimrIcGateway::TestSecretsExistingValidation [GOOD] |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestSecretsExistingValidation [GOOD] Test command err: Trying to start YDB, gRPC: 64077, MsgBus: 27655 2025-03-31T16:33:12.295852Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012596458058801:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:12.295900Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ab8/r3tmp/tmpuII6Zi/pdisk_1.dat 2025-03-31T16:33:12.629118Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64077, node 1 2025-03-31T16:33:12.686193Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:12.686381Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:12.688092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:12.799189Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:12.799248Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:12.799262Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:12.799385Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27655 TClient is connected to server localhost:27655 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:13.433693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:14.923161Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012605047994098:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:14.923266Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:15.399900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:33:15.496351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:33:15.562145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:15.590067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:15.628172Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012609342961712:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:15.628256Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:15.628326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012609342961717:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:15.631814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710664:3, at schemeshard: 72057594046644480 2025-03-31T16:33:15.641139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012609342961719:2369], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710664 completed, doublechecking } 2025-03-31T16:33:15.725063Z node 1 :TX_PROXY ERROR: Actor# [1:7488012609342961771:2569] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 20234, MsgBus: 26625 2025-03-31T16:33:16.706744Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012612611370645:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:16.706967Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ab8/r3tmp/tmpRW6FDM/pdisk_1.dat 2025-03-31T16:33:16.841882Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:16.869539Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:16.869626Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:16.871661Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20234, node 2 2025-03-31T16:33:16.910748Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:16.910778Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:16.910786Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:16.910921Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26625 TClient is connected to server localhost:26625 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:17.261547Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:17.277311Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:17.351352Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:17.468129Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:17.522948Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:19.858874Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012625496274305:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:19.858955Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:19.916359Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:19.946026Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:19.973953Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:20.004337Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:20.070597Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:20.140332Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:20.177434Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012629791242118:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:20.177502Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:20.177513Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012629791242123:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:20.180843Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:20.193770Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012629791242125:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:33:20.258145Z node 2 :TX_PROXY ERROR: Actor# [2:7488012629791242177:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:20.985540Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2025-03-31T16:33:21.464309Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-03-31T16:33:21.707163Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488012612611370645:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:21.707248Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:33:21.838675Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:1, at schemeshard: 72057594046644480 2025-03-31T16:33:22.269215Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-03-31T16:33:22.600890Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715685:0, at schemeshard: 72057594046644480 2025-03-31T16:33:23.015697Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715692:0, at schemeshard: 72057594046644480 2025-03-31T16:33:23.401286Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-03-31T16:33:23.434816Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-03-31T16:33:25.100887Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715708:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 23781, MsgBus: 14093 2025-03-31T16:33:26.977259Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488012654901612537:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:26.977360Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ab8/r3tmp/tmpjgReZE/pdisk_1.dat 2025-03-31T16:33:27.082667Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23781, node 3 2025-03-31T16:33:27.109037Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:27.109148Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:27.111048Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:27.129339Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:27.129359Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:27.129368Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:27.129494Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14093 TClient is connected to server localhost:14093 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:27.519746Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:27.535767Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:27.607729Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:27.735441Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:27.795120Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:29.916344Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012667786516210:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:29.916427Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:29.949862Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:29.979590Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:30.013730Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:30.045763Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:30.078642Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:30.123045Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:30.163103Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012672081484017:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:30.163189Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012672081484022:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:30.163193Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:30.166680Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:30.176232Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488012672081484024:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:33:30.248606Z node 3 :TX_PROXY ERROR: Actor# [3:7488012672081484078:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> StreamCreator::WithResolvedTimestamps |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerless [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldCompactServerless |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> KqpScanArrowInChanels::AllTypesColumns [GOOD] >> KqpScanArrowInChanels::SingleKey >> KqpScanArrowFormat::AggregateCountStar [GOOD] >> KqpScanArrowFormat::AggregateByColumn |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> IndexBuildTest::BaseCase [GOOD] >> IndexBuildTest::CancelBuild |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> DataShardSnapshots::UncommittedWriteRestartDuringCommitThenBulkErase [GOOD] >> DataShardSnapshots::UncommittedChangesRenameTable-UseSink |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsSchemeshardRestart [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsConfigRequest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> KqpRanges::IsNotNullInJsonValue2 [GOOD] >> Initializer::Simple >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata [GOOD] >> StreamCreator::WithResolvedTimestamps [GOOD] >> KqpRanges::NoFullScanAtDNFPredicate [GOOD] |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> StreamCreator::WithResolvedTimestamps [GOOD] Test command err: 2025-03-31T16:33:33.241768Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012687770384059:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:33.241821Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000fc6/r3tmp/tmpT0aerA/pdisk_1.dat 2025-03-31T16:33:33.623005Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:33.664100Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:33.664180Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:33.667405Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64753 TServer::EnableGrpc on GrpcPort 16231, node 1 2025-03-31T16:33:33.991513Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:33.991536Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:33.991542Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:33.991643Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64753 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:34.495711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:34.538708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743438814656 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743438814572 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743438814656 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" PathId: 1844... (TRUNCATED) 2025-03-31T16:33:34.702242Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T16:33:34.702449Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-31T16:33:34.702470Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-03-31T16:33:34.703118Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-03-31T16:33:35.904713Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743438814656, tx_id: 281474976710658 } } } 2025-03-31T16:33:35.906090Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-03-31T16:33:35.907926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:33:35.908666Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-03-31T16:33:35.908689Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-03-31T16:33:35.931354Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-03-31T16:33:35.931383Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] 2025-03-31T16:33:35.931931Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::NController::TEvPrivate::TEvAllowCreateStream 2025-03-31T16:33:36.024687Z node 1 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037888:1][1:7488012700655286865:2346] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:5:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-03-31T16:33:36.065602Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTableResponse { Result: { status: SUCCESS, issues: } } 2025-03-31T16:33:36.065632Z node 1 :REPLICATION_CONTROLLER INFO: [StreamCreator][rid 1][tid 1] Success: issues# 2025-03-31T16:33:36.082753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:36.096397Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTopicResponse { Result: { status: SUCCESS, issues: } } 2025-03-31T16:33:36.096424Z node 1 :REPLICATION_CONTROLLER INFO: [StreamCreator][rid 1][tid 1] Success: issues# TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743438814656 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyC... (TRUNCATED) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata [GOOD] Test command err: Trying to start YDB, gRPC: 1758, MsgBus: 14701 2025-03-31T16:33:12.295698Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012596177261124:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:12.295731Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a78/r3tmp/tmpnKrkbD/pdisk_1.dat 2025-03-31T16:33:12.624126Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1758, node 1 2025-03-31T16:33:12.686402Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:12.686484Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:12.688169Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:12.799123Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:12.799144Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:12.799151Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:12.799276Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14701 TClient is connected to server localhost:14701 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:13.415441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:13.444391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:13.605179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:13.737735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:13.813962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:14.957233Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012604767197502:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:14.957384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:15.399453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:15.449793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:15.475431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:15.500401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:15.532857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:15.600037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:15.641860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012609062165314:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:15.641931Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012609062165319:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:15.641949Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:15.645369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:15.654431Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012609062165321:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:33:15.718569Z node 1 :TX_PROXY ERROR: Actor# [1:7488012609062165374:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:16.490411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:33:16.495354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22303, MsgBus: 3146 2025-03-31T16:33:17.196116Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012617735913140:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:17.196171Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a78/r3tmp/tmp05vVTD/pdisk_1.dat 2025-03-31T16:33:17.266584Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22303, node 2 2025-03-31T16:33:17.316909Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:17.316940Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:17.316947Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:17.317062Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:33:17.320401Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:17.320468Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:17.322193Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3146 TClient is connected to server localhost:3146 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:17.646628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:17.664054Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:17.735416Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:17.873526Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:17.923471Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31 ... eExistsActor;event=timeout;self_id=[2:7488012617735913140:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:22.196406Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:33:22.505717Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:0, at schemeshard: 72057594046644480 2025-03-31T16:33:22.993045Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715687:0, at schemeshard: 72057594046644480 2025-03-31T16:33:23.399449Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715692:0, at schemeshard: 72057594046644480 2025-03-31T16:33:23.771113Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-03-31T16:33:23.802010Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-03-31T16:33:25.323470Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715713:0, at schemeshard: 72057594046644480 2025-03-31T16:33:25.330045Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976715714:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 31903, MsgBus: 12206 2025-03-31T16:33:26.040525Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488012656364762502:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:26.040583Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a78/r3tmp/tmpij08nn/pdisk_1.dat 2025-03-31T16:33:26.122840Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31903, node 3 2025-03-31T16:33:26.168784Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:26.168882Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:26.170372Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:26.172809Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:26.172831Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:26.172837Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:26.172933Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12206 TClient is connected to server localhost:12206 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:26.533513Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:26.540945Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:26.592611Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:26.733100Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:26.791280Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:28.702902Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012664954698863:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:28.703012Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:28.748568Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:28.780591Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:28.812920Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:28.843546Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:28.875444Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:28.909517Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:28.955168Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012664954699370:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:28.955276Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:28.955359Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012664954699375:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:28.958681Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:28.967135Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488012664954699377:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:33:29.060733Z node 3 :TX_PROXY ERROR: Actor# [3:7488012669249666727:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:29.799014Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2025-03-31T16:33:30.215371Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-03-31T16:33:30.632965Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:1, at schemeshard: 72057594046644480 2025-03-31T16:33:31.040593Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488012656364762502:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:31.040648Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:33:31.046340Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-03-31T16:33:31.504261Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715687:0, at schemeshard: 72057594046644480 2025-03-31T16:33:31.898812Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715690:0, at schemeshard: 72057594046644480 2025-03-31T16:33:32.280480Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-03-31T16:33:32.308333Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-03-31T16:33:35.152178Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715726:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::IsNotNullInJsonValue2 [GOOD] Test command err: Trying to start YDB, gRPC: 19393, MsgBus: 15461 2025-03-31T16:32:57.582417Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012533957345588:2241];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:57.582530Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0008fb/r3tmp/tmp3tJzwy/pdisk_1.dat 2025-03-31T16:32:57.981680Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:32:57.987294Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:57.987428Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:57.989442Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19393, node 1 2025-03-31T16:32:58.060589Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:58.060609Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:58.060615Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:58.060729Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15461 TClient is connected to server localhost:15461 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:58.539294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:58.567699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:58.682617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:58.833740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:58.902531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:00.594172Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012546842249063:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:00.594317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:00.972888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:01.000022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:01.025073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:01.051520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:01.081017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:01.148150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:01.189999Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012551137216876:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:01.190071Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:01.190349Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012551137216881:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:01.193726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:01.203361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012551137216883:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:33:01.303040Z node 1 :TX_PROXY ERROR: Actor# [1:7488012551137216936:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:02.123456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:33:02.296894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:33:02.449482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:33:02.566538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T16:33:02.605269Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012533957345588:2241];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:02.607748Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:33:02.808428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 18964, MsgBus: 32569 2025-03-31T16:33:04.024273Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012563174934653:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:04.024396Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0008fb/r3tmp/tmpkbFn61/pdisk_1.dat 2025-03-31T16:33:04.132535Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18964, node 2 2025-03-31T16:33:04.169184Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:04.169279Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:04.174809Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:04.215676Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:04.215698Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:04.215706Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:04.215823Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32569 TClient is connected to server localhost:32569 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:04.603173Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... iled to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:26.337864Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488012654731818560:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:26.341064Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:26.350161Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7488012654731818562:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:33:26.414387Z node 5 :TX_PROXY ERROR: Actor# [5:7488012654731818616:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:27.360467Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:33:27.582341Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:33:27.697501Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7488012637551947077:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:27.697563Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:33:27.729820Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-03-31T16:33:27.885875Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-03-31T16:33:28.173018Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 15721, MsgBus: 10322 2025-03-31T16:33:29.367828Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488012669581364501:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:29.367976Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0008fb/r3tmp/tmppa6D3d/pdisk_1.dat 2025-03-31T16:33:29.473288Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:29.501296Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:29.501424Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:29.503164Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15721, node 6 2025-03-31T16:33:29.549199Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:29.549228Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:29.549236Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:29.549392Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10322 TClient is connected to server localhost:10322 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:30.123375Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:30.141284Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:30.252270Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:30.456099Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:30.533764Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:32.712513Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012682466268178:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:32.712672Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:32.757272Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:32.788307Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:32.818151Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:32.848106Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:32.879059Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:32.915438Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:32.998053Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012682466268690:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:32.998127Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012682466268695:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:32.998193Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:33.002390Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:33.018686Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488012682466268697:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:33:33.119305Z node 6 :TX_PROXY ERROR: Actor# [6:7488012686761236048:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:34.119307Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:33:34.367914Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488012669581364501:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:34.367979Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:33:34.370900Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:33:34.523897Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-03-31T16:33:34.701338Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-03-31T16:33:34.997394Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 >> KqpExtractPredicateLookup::OverflowLookup [GOOD] >> KqpExtractPredicateLookup::ComplexRange >> KqpScanArrowInChanels::AggregateNoColumn [GOOD] >> KqpScanArrowInChanels::AggregateNoColumnNoRemaps >> IndexBuildTest::CancelBuild [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly [GOOD] >> KqpScanArrowInChanels::SingleKey [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck >> KqpScanArrowInChanels::JoinWithParams |96.1%| [TA] $(B)/ydb/core/tx/replication/controller/ut_stream_creator/test-results/unittest/{meta.json ... results_accumulator.log} |96.1%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::NoFullScanAtDNFPredicate [GOOD] Test command err: Trying to start YDB, gRPC: 9866, MsgBus: 3155 2025-03-31T16:32:37.695846Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012446981817604:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:37.696032Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0009de/r3tmp/tmp44NJKm/pdisk_1.dat 2025-03-31T16:32:37.976793Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9866, node 1 2025-03-31T16:32:38.055980Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:38.056181Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:38.058395Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:32:38.060075Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:38.060098Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:38.060107Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:38.060231Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3155 TClient is connected to server localhost:3155 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:38.540782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:38.556706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:38.652739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:38.804876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:38.882030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:40.342275Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012459866721266:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:40.342412Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:40.642764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:32:40.668540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:32:40.696046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:32:40.721904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:32:40.746861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:32:40.774281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:32:40.808450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012459866721774:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:40.808537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:40.808776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012459866721779:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:40.812442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:32:40.822348Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012459866721781:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:32:40.907975Z node 1 :TX_PROXY ERROR: Actor# [1:7488012459866721835:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:32:41.719061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:32:41.892160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:32:42.034058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:32:42.133736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T16:32:42.347675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-31T16:32:42.699882Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012446981817604:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:42.700002Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 8865, MsgBus: 26114 2025-03-31T16:32:43.466274Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012473271316539:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:43.466351Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0009de/r3tmp/tmp0zOTMB/pdisk_1.dat 2025-03-31T16:32:43.544166Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8865, node 2 2025-03-31T16:32:43.593782Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:43.593864Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:43.595209Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:32:43.615293Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:43.615312Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:43.615321Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:43.615430Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26114 TClient is connected to server localhost:26114 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:44.003325Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ... e 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7488012656785506318:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:26.794886Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:26.794892Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7488012656785506323:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:26.798619Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:26.807282Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7488012656785506325:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:33:26.896354Z node 8 :TX_PROXY ERROR: Actor# [8:7488012656785506380:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:27.651810Z node 8 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7488012639605634824:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:27.651931Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:33:27.746031Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:33:28.578779Z node 8 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438808580, txId: 281474976715673] shutting down ---------QUERY---------- --!syntax_v1 SELECT * FROM `/Root/TableWithIntKey` WHERE Key1 IN (1, 2, 100, 101, 102, 200, 201, 201, 1000, 1001, 1002, 2000, 2001, 2002) AND (Key1 > 2000) ORDER BY Key1; ---------RESULT--------- [[[2001];#];[[2002];[2]]] ------------------------ Trying to start YDB, gRPC: 30999, MsgBus: 2822 2025-03-31T16:33:29.439577Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7488012669418424136:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:29.439693Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0009de/r3tmp/tmpRM4j56/pdisk_1.dat 2025-03-31T16:33:29.529811Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 309992025-03-31T16:33:29.625690Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected , node 9 2025-03-31T16:33:29.625800Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:29.627319Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:29.670496Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:29.670528Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:29.670541Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:29.670725Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2822 TClient is connected to server localhost:2822 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:30.250686Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:30.269085Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:30.331171Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:30.540070Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:30.624022Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:33.423860Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7488012686598295086:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:33.423965Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:33.479836Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:33.520250Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:33.558606Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:33.597174Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:33.640403Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:33.696371Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:33.744733Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7488012686598295599:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:33.744828Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7488012686598295604:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:33.744862Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:33.749517Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:33.761629Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [9:7488012686598295606:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:33:33.830760Z node 9 :TX_PROXY ERROR: Actor# [9:7488012686598295659:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:34.439938Z node 9 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[9:7488012669418424136:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:34.440030Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:33:34.835264Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:33:35.516108Z node 9 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438815552, txId: 281474976715673] shutting down ---------QUERY---------- --!syntax_v1 SELECT Value FROM `/Root/TestDNF` WHERE Key1 = 1 AND (Key2 = 100 OR Key2 = 300) ORDER BY Value; ---------RESULT--------- [[[5u]];[[9u]]] ------------------------ 2025-03-31T16:33:35.968985Z node 9 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438816007, txId: 281474976715675] shutting down ---------QUERY---------- --!syntax_v1 SELECT Value FROM `/Root/TestDNF` WHERE Key1 = 1 AND Key2 IN (100, 300, 400) ORDER BY Value; ---------RESULT--------- [[[5u]];[[9u]];[[10u]]] ------------------------ |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::CancelBuild [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:33:23.302945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:33:23.303043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:23.303077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:33:23.303121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:33:23.304427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:33:23.304492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:33:23.304559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:23.304637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:33:23.305944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:33:23.397124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:33:23.397188Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:23.402308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:33:23.402398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:33:23.402571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:33:23.408584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:33:23.408839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:33:23.409630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:23.409902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:33:23.415385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:23.430267Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:23.430479Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:23.430686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:33:23.430743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:23.430797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:33:23.430954Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:33:23.439309Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:33:23.566884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:33:23.567115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:23.567313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:33:23.567595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:33:23.567661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:23.570352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:23.570474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:33:23.570656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:23.570724Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:33:23.570777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:33:23.570824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:33:23.572652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:23.572704Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:33:23.572735Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:33:23.574155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:23.574191Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:23.574242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:23.574296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:33:23.585131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:33:23.587327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:33:23.587503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:33:23.588478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:23.588611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:23.588675Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:23.588974Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:33:23.589021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:23.589178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:33:23.589241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:33:23.591223Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:23.591262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:23.591398Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:23.591453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:33:23.591643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:23.591693Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:33:23.591793Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:23.591829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:23.591878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:23.591913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:23.591953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:33:23.592024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:23.592071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:33:23.592104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:33:23.592180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:33:23.592249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:33:23.592284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:33:23.594119Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:23.594217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:23.594257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2025-03-31T16:33:37.445896Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:37.445972Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 8589936749 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:37.446014Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2025-03-31T16:33:37.446051Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2025-03-31T16:33:37.447584Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-03-31T16:33:37.447623Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2025-03-31T16:33:37.447701Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-03-31T16:33:37.447727Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-03-31T16:33:37.447764Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-03-31T16:33:37.447789Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-03-31T16:33:37.447814Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-03-31T16:33:37.447868Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:123:2149] message: TxId: 281474976710760 2025-03-31T16:33:37.447898Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-03-31T16:33:37.447923Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2025-03-31T16:33:37.447945Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2025-03-31T16:33:37.447990Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 13 FAKE_COORDINATOR: Erasing txId 281474976710760 2025-03-31T16:33:37.449693Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-03-31T16:33:37.449744Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2025-03-31T16:33:37.449794Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2025-03-31T16:33:37.449852Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Cancellation_Unlocking, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [2:1164:3018], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:33:37.451337Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-03-31T16:33:37.451405Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Cancellation_Unlocking, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [2:1164:3018], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:33:37.451442Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Cancellation_Unlocking to Cancelled 2025-03-31T16:33:37.452752Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-03-31T16:33:37.452826Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Cancelled, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [2:1164:3018], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:33:37.452870Z node 2 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-03-31T16:33:37.453022Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:33:37.453062Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:1262:3105] TestWaitNotification: OK eventTxId 102 2025-03-31T16:33:37.455126Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2025-03-31T16:33:37.455334Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 } Progress: 0 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 } Progress: 0 } 2025-03-31T16:33:37.457657Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:33:37.457859Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 219us result status StatusSuccess 2025-03-31T16:33:37.458210Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 11 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:37.460040Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/index1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:33:37.460221Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/index1" took 206us result status StatusPathDoesNotExist 2025-03-31T16:33:37.460373Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/index1\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 5000005, drop txId: 281474976710759" Path: "/MyRoot/Table/index1" PathId: 3 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> KqpMergeCn::TopSortBy_Decimal_Limit5 [GOOD] |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> KqpScanArrowFormat::AggregateByColumn [GOOD] >> KqpScanArrowFormat::AggregateNoColumn >> VectorIndexBuildTest::BaseCase [GOOD] |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpMergeCn::TopSortBy_Decimal_Limit5 [GOOD] Test command err: Trying to start YDB, gRPC: 4536, MsgBus: 15490 2025-03-31T16:32:54.693912Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012517062763585:2240];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:54.693977Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000937/r3tmp/tmpiIBWaN/pdisk_1.dat 2025-03-31T16:32:55.054356Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:32:55.059176Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:55.059274Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:55.061254Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4536, node 1 2025-03-31T16:32:55.122304Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:55.122334Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:55.122343Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:55.122481Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15490 TClient is connected to server localhost:15490 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:55.640352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:55.661243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:55.758499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:55.907383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:55.986925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:57.550442Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012529947667068:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:57.550535Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:57.862383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:32:57.893920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:32:57.929146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:32:57.996907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:32:58.032730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:32:58.066148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:32:58.114189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012534242634877:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:58.114267Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:58.114354Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012534242634882:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:58.118126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:32:58.128102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012534242634884:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:32:58.208615Z node 1 :TX_PROXY ERROR: Actor# [1:7488012534242634937:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:32:59.124426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:32:59.692116Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012517062763585:2240];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:59.692202Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:32:59.895708Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438779922, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 7111, MsgBus: 11668 2025-03-31T16:33:00.700359Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012545411787986:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:00.700498Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000937/r3tmp/tmpK55atb/pdisk_1.dat 2025-03-31T16:33:00.795233Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7111, node 2 2025-03-31T16:33:00.831782Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:00.831864Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:00.836444Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:00.882986Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:00.883010Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:00.883025Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:00.883157Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11668 TClient is connected to server localhost:11668 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:01.275201Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:01.282638Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:01.351624Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:01.505669Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpC ... oposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:28.424773Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:28.460173Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:28.492663Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:28.529078Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:28.606771Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012666410761241:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:28.606884Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:28.606936Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012666410761246:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:28.609924Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:28.617592Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488012666410761248:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:33:28.720023Z node 6 :TX_PROXY ERROR: Actor# [6:7488012666410761303:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:29.667143Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:33:29.895099Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488012649230889775:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:29.895190Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:33:30.594701Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438810631, txId: 281474976715673] shutting down Trying to start YDB, gRPC: 21883, MsgBus: 27396 2025-03-31T16:33:31.508206Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488012677846275446:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:31.508292Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000937/r3tmp/tmp1cQLXx/pdisk_1.dat 2025-03-31T16:33:31.651726Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:31.678846Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:31.678934Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:31.680355Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21883, node 7 2025-03-31T16:33:31.731059Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:31.731101Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:31.731111Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:31.731266Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27396 TClient is connected to server localhost:27396 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:32.302363Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:32.314882Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:32.389614Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:32.557187Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:32.634419Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:35.294014Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488012695026146415:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:35.294142Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:35.373816Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:35.411485Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:35.450446Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:35.486691Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:35.526912Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:35.564500Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:35.643103Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488012695026146931:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:35.643202Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:35.643265Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488012695026146936:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:35.647661Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:35.660286Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7488012695026146938:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:33:35.722519Z node 7 :TX_PROXY ERROR: Actor# [7:7488012695026146991:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:36.508617Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7488012677846275446:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:36.508714Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:33:36.900425Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:33:37.857409Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438817890, txId: 281474976715673] shutting down |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> VectorIndexBuildTest::BaseCase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:33:24.787829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:33:24.787915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:24.787945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:33:24.787970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:33:24.788005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:33:24.788049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:33:24.788093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:24.788166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:33:24.788456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:33:24.851665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:33:24.851719Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:24.855848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:33:24.855930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:33:24.856076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:33:24.860522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:33:24.860739Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:33:24.861312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:24.861507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:33:24.863039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:24.864078Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:24.864129Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:24.864254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:33:24.864298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:24.864329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:33:24.864475Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:33:24.870139Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:33:24.981416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:33:24.981673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:24.981880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:33:24.982155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:33:24.982210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:24.984492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:24.984659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:33:24.984872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:24.984941Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:33:24.984979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:33:24.985016Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:33:24.986877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:24.986966Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:33:24.987005Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:33:24.988817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:24.988866Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:24.988920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:24.988986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:33:25.004113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:33:25.006650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:33:25.006891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:33:25.008122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:25.008290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:25.008366Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:25.008677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:33:25.008732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:25.008914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:33:25.008994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:33:25.011219Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:25.011262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:25.011395Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:25.011444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:33:25.011635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:25.011671Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:33:25.011758Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:25.011782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:25.011816Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:25.011858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:25.011902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:33:25.011935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:25.011975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:33:25.012003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:33:25.012061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:33:25.012092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:33:25.012116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:33:25.020342Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:25.020498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:25.020550Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 54536Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvStateChangedResult 2025-03-31T16:33:39.154566Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186233409568 state Offline 2025-03-31T16:33:39.154894Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72075186233409561 ShardLocalIdx: 7 TxId_Deprecated: 7 TabletID: 72075186233409567 2025-03-31T16:33:39.155265Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268829696, Sender [1:4445:6115], Recipient [1:4455:6123]: NKikimr::TEvTablet::TEvTabletDead 2025-03-31T16:33:39.155596Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186233409567 2025-03-31T16:33:39.155749Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186233409567 2025-03-31T16:33:39.157414Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [1:4886:6528], Recipient [1:4578:6234]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186233409561 ClientId: [1:4886:6528] ServerId: [1:4888:6530] } 2025-03-31T16:33:39.157464Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed Forgetting tablet 72075186233409567 2025-03-31T16:33:39.158227Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72075186233409561 ShardLocalIdx: 8 TxId_Deprecated: 8 TabletID: 72075186233409568 2025-03-31T16:33:39.158538Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268829696, Sender [1:4569:6227], Recipient [1:4578:6234]: NKikimr::TEvTablet::TEvTabletDead 2025-03-31T16:33:39.158849Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186233409568 2025-03-31T16:33:39.158978Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186233409568 2025-03-31T16:33:39.160502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 7 ShardOwnerId: 72075186233409561 ShardLocalIdx: 7, at schemeshard: 72075186233409561 2025-03-31T16:33:39.160840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72075186233409561, LocalPathId: 6] was 1 Forgetting tablet 72075186233409568 2025-03-31T16:33:39.162011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 8 ShardOwnerId: 72075186233409561 ShardLocalIdx: 8, at schemeshard: 72075186233409561 2025-03-31T16:33:39.162255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72075186233409561, LocalPathId: 7] was 1 2025-03-31T16:33:39.162760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72075186233409561 2025-03-31T16:33:39.162820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72075186233409561, LocalPathId: 7], at schemeshard: 72075186233409561 2025-03-31T16:33:39.162923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72075186233409561, LocalPathId: 3] was 5 2025-03-31T16:33:39.162976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72075186233409561, LocalPathId: 6], at schemeshard: 72075186233409561 2025-03-31T16:33:39.163011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72075186233409561, LocalPathId: 3] was 4 2025-03-31T16:33:39.166317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72075186233409561:7 2025-03-31T16:33:39.166386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72075186233409561:7 tabletId 72075186233409567 2025-03-31T16:33:39.168479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72075186233409561:8 2025-03-31T16:33:39.168567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72075186233409561:8 tabletId 72075186233409568 2025-03-31T16:33:39.168927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72075186233409561 2025-03-31T16:33:39.229936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1900, transactions count in step: 1, at schemeshard: 72075186233409561 2025-03-31T16:33:39.230109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976735762 AckTo { RawX1: 0 RawX2: 0 } } Step: 1900 MediatorID: 72075186233409563 TabletID: 72075186233409561, at schemeshard: 72075186233409561 2025-03-31T16:33:39.230181Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409561] TDropLock TPropose opId# 281474976735762:0 HandleReply TEvOperationPlan: step# 1900 2025-03-31T16:33:39.230235Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976735762:0 128 -> 240 2025-03-31T16:33:39.233558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976735762:0, at schemeshard: 72075186233409561 2025-03-31T16:33:39.233634Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409561] TDone opId# 281474976735762:0 ProgressState 2025-03-31T16:33:39.233732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976735762:0 progress is 1/1 2025-03-31T16:33:39.233767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976735762 ready parts: 1/1 2025-03-31T16:33:39.233807Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976735762:0 progress is 1/1 2025-03-31T16:33:39.233838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976735762 ready parts: 1/1 2025-03-31T16:33:39.233873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976735762, ready parts: 1/1, is published: true 2025-03-31T16:33:39.233950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:3315:5054] message: TxId: 281474976735762 2025-03-31T16:33:39.233998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976735762 ready parts: 1/1 2025-03-31T16:33:39.234035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976735762:0 2025-03-31T16:33:39.234068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976735762:0 2025-03-31T16:33:39.234169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409561, LocalPathId: 2] was 4 2025-03-31T16:33:39.238308Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976735762 2025-03-31T16:33:39.238410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976735762 2025-03-31T16:33:39.238489Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976735762, buildInfoId: 115 2025-03-31T16:33:39.238578Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976735762, buildInfo: TBuildInfo{ IndexBuildId: 115, Uid: , DomainPathId: [OwnerId: 72075186233409561, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409561, LocalPathId: 2], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:4189:5882], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976735757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976735758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976735761, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976735762, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:33:39.242671Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 115 2025-03-31T16:33:39.242779Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 115, Uid: , DomainPathId: [OwnerId: 72075186233409561, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409561, LocalPathId: 2], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:4189:5882], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976735757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976735758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976735761, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976735762, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:33:39.242843Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-03-31T16:33:39.245710Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 115 2025-03-31T16:33:39.245824Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 115, Uid: , DomainPathId: [OwnerId: 72075186233409561, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409561, LocalPathId: 2], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:4189:5882], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976735757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976735758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976735761, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976735762, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:33:39.245871Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 115, subscribers count# 1 2025-03-31T16:33:39.246038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 115: got EvNotifyTxCompletionResult 2025-03-31T16:33:39.246093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 115: satisfy waiter [1:4327:5998] TestWaitNotification: OK eventTxId 115 2025-03-31T16:33:39.255842Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/CommonDB" IndexBuildId: 115 2025-03-31T16:33:39.256158Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 115 State: STATE_DONE Settings { source_path: "/MyRoot/CommonDB/Table" index { name: "index1" index_columns: "embedding" global_vector_kmeans_tree_index { } } max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 } Progress: 100 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 115 State: STATE_DONE Settings { source_path: "/MyRoot/CommonDB/Table" index { name: "index1" index_columns: "embedding" global_vector_kmeans_tree_index { } } max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 } Progress: 100 } |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> DataShardSnapshots::UncommittedChangesRenameTable-UseSink [GOOD] >> KqpRanges::ValidatePredicatesDataQuery [GOOD] >> KqpScanArrowInChanels::JoinWithParams [GOOD] >> KikimrIcGateway::TestLoadDataSourceProperties [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowInChanels::JoinWithParams [GOOD] Test command err: Trying to start YDB, gRPC: 61166, MsgBus: 8500 2025-03-31T16:33:27.891180Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012661566955310:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:27.891817Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0017e1/r3tmp/tmpJPy2LI/pdisk_1.dat 2025-03-31T16:33:28.246596Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:28.294122Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:28.294218Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 61166, node 1 2025-03-31T16:33:28.296022Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:28.426834Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:28.426860Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:28.426871Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:28.426983Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8500 TClient is connected to server localhost:8500 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:29.073056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:29.101875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:29.238263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:29.352484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:29.404337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:30.420751Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012674451858982:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:30.420860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:31.064206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:31.089190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:31.113440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:31.136545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:31.163121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:31.193221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:31.268405Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012678746826792:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:31.268493Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:31.268554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012678746826797:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:31.272205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:31.280603Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012678746826799:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:33:31.376254Z node 1 :TX_PROXY ERROR: Actor# [1:7488012678746826857:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:32.297254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:33:32.691509Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438812703, txId: 281474976710673] shutting down 864000000000 Trying to start YDB, gRPC: 27080, MsgBus: 13612 2025-03-31T16:33:33.392727Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012687343472348:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:33.392782Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0017e1/r3tmp/tmphyUhj0/pdisk_1.dat 2025-03-31T16:33:33.468373Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27080, node 2 2025-03-31T16:33:33.520182Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:33.520310Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:33.522505Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:33.522530Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:33.522538Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:33.522659Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:33:33.532753Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13612 TClient is connected to server localhost:13612 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:33.935200Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:33.953387Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:34.000467Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:34.128462Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:34.207724Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:36.080014Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012700228376017:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:36.080091Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:36.118347Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:36.145139Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:36.174404Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:36.204851Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:36.236455Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:36.267873Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:36.324629Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012700228376527:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:36.324715Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:36.324843Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012700228376532:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:36.328599Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:36.338944Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012700228376534:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:33:36.429226Z node 2 :TX_PROXY ERROR: Actor# [2:7488012700228376590:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:37.251658Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438817288, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 5051, MsgBus: 7422 2025-03-31T16:33:37.988142Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488012705352474248:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:37.988209Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0017e1/r3tmp/tmpFsc3dO/pdisk_1.dat 2025-03-31T16:33:38.099769Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5051, node 3 2025-03-31T16:33:38.134846Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:38.134916Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:38.136423Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:38.159247Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:38.159271Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:38.159278Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:38.159366Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7422 TClient is connected to server localhost:7422 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:38.541132Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:38.558029Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:38.611017Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:38.753181Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:38.820985Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:40.744381Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012718237377923:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:40.744490Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:40.760606Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:40.783924Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:40.809962Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:40.836315Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:40.861934Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:40.889258Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:40.924688Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012718237378432:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:40.924764Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:40.924781Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012718237378437:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:40.927519Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:40.936140Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488012718237378439:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:33:41.000152Z node 3 :TX_PROXY ERROR: Actor# [3:7488012718237378493:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:41.827600Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438821852, txId: 281474976715671] shutting down 2025-03-31T16:33:41.964540Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438822006, txId: 281474976715673] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::UncommittedChangesRenameTable-UseSink [GOOD] Test command err: 2025-03-31T16:31:50.867713Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:31:50.868056Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:31:50.868271Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001352/r3tmp/tmpllHQXZ/pdisk_1.dat 2025-03-31T16:31:51.300553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:31:51.346072Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:51.388198Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T16:31:51.389949Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:51.390839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:51.391030Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-31T16:31:51.403616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:31:51.489396Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-03-31T16:31:51.489504Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-03-31T16:31:51.490434Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-03-31T16:31:51.620040Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-31T16:31:51.620140Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:31:51.622683Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-31T16:31:51.622806Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:31:51.623200Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:31:51.623413Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:31:51.623601Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-31T16:31:51.623914Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-03-31T16:31:51.627053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:31:51.628461Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-31T16:31:51.628529Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-03-31T16:31:51.677992Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:31:51.679061Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:31:51.687467Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:31:51.687765Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:31:51.737994Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:31:51.738776Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:31:51.738928Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:31:51.741521Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:31:51.741629Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:31:51.741710Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:31:51.746384Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:31:51.746571Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:31:51.746673Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:31:51.757557Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:31:51.806535Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:31:51.808211Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:31:51.808384Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:31:51.808415Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:31:51.808460Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:31:51.808508Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:31:51.808745Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:31:51.808820Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:31:51.809919Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:31:51.810051Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:31:51.810178Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:31:51.810232Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:31:51.812059Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T16:31:51.812131Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:31:51.812172Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:31:51.812207Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:31:51.812273Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:31:51.812455Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:31:51.812534Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:31:51.812592Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:31:51.812710Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-31T16:31:51.812756Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:31:51.812870Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:31:51.813207Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T16:31:51.813273Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:31:51.813361Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:31:51.813483Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:31:51.813538Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T16:31:51.813596Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T16:31:51.813635Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:31:51.813930Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T16:31:51.813981Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T16:31:51.814021Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T16:31:51.814052Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:31:51.814102Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T16:31:51.814144Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T16:31:51.814192Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T16:31:51.814235Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:31:51.814283Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T16:31:51.817183Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T16:31:51.817240Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:31:51.827877Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... teState, TraceId: 01jqpgte0gd8tms35qxjyya930, Created new KQP executer: [13:993:2684] isRollback: 1 2025-03-31T16:33:41.482437Z node 13 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=13&id=N2Y4N2QxZmUtYWY1ZTU0ZS1iNTI5MmQ1Yy02OWFiYzI0, ActorId: [13:839:2684], ActorState: ExecuteState, TraceId: 01jqpgte0gd8tms35qxjyya930, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 1 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-31T16:33:41.482993Z node 13 :KQP_EXECUTER DEBUG: TxId: 281474976715665. Resolved key sets: 0 2025-03-31T16:33:41.483098Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqpgte0gd8tms35qxjyya930, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=N2Y4N2QxZmUtYWY1ZTU0ZS1iNTI5MmQ1Yy02OWFiYzI0, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:33:41.483131Z node 13 :KQP_EXECUTER DEBUG: TxId: 281474976715665. Ctx: { TraceId: 01jqpgte0gd8tms35qxjyya930, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=N2Y4N2QxZmUtYWY1ZTU0ZS1iNTI5MmQ1Yy02OWFiYzI0, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 1, snapshot: {0, 0} 2025-03-31T16:33:41.483230Z node 13 :KQP_EXECUTER DEBUG: ActorId: [13:993:2684] TxId: 281474976715665. Ctx: { TraceId: 01jqpgte0gd8tms35qxjyya930, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=N2Y4N2QxZmUtYWY1ZTU0ZS1iNTI5MmQ1Yy02OWFiYzI0, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. State: WaitResolveState, Executing KQP transaction on shard: 72075186224037888, tasks: [], lockTxId: (empty maybe), locks: Locks { LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 HasWrites: true } Op: Rollback, immediate: 1 2025-03-31T16:33:41.483303Z node 13 :KQP_EXECUTER DEBUG: ActorId: [13:993:2684] TxId: 281474976715665. Ctx: { TraceId: 01jqpgte0gd8tms35qxjyya930, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=N2Y4N2QxZmUtYWY1ZTU0ZS1iNTI5MmQ1Yy02OWFiYzI0, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ExecuteDatashardTransaction traceId.verbosity: 0 2025-03-31T16:33:41.483353Z node 13 :KQP_EXECUTER INFO: ActorId: [13:993:2684] TxId: 281474976715665. Ctx: { TraceId: 01jqpgte0gd8tms35qxjyya930, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=N2Y4N2QxZmUtYWY1ZTU0ZS1iNTI5MmQ1Yy02OWFiYzI0, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 1, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-03-31T16:33:41.483383Z node 13 :KQP_EXECUTER DEBUG: ActorId: [13:993:2684] TxId: 281474976715665. Ctx: { TraceId: 01jqpgte0gd8tms35qxjyya930, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=N2Y4N2QxZmUtYWY1ZTU0ZS1iNTI5MmQ1Yy02OWFiYzI0, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, datashard 72075186224037888 not finished yet: Executing 2025-03-31T16:33:41.483431Z node 13 :KQP_EXECUTER DEBUG: ActorId: [13:993:2684] TxId: 281474976715665. Ctx: { TraceId: 01jqpgte0gd8tms35qxjyya930, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=N2Y4N2QxZmUtYWY1ZTU0ZS1iNTI5MmQ1Yy02OWFiYzI0, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 0 compute actor(s) and 1 datashard(s): DS 72075186224037888 (Executing), 2025-03-31T16:33:41.483464Z node 13 :KQP_EXECUTER DEBUG: ActorId: [13:993:2684] TxId: 281474976715665. Ctx: { TraceId: 01jqpgte0gd8tms35qxjyya930, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=N2Y4N2QxZmUtYWY1ZTU0ZS1iNTI5MmQ1Yy02OWFiYzI0, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-03-31T16:33:41.483662Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [13:993:2684], Recipient [13:962:2778]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 993 RawX2: 55834577532 } TxBody: " \0018\001j3\010\001\032\'\n#\t\215\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0028\001 \003\"\006\020\0020\000@\n\220\001\000" TxId: 281474976715665 ExecLevel: 0 Flags: 8 2025-03-31T16:33:41.483697Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:33:41.483795Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [13:962:2778], Recipient [13:962:2778]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-03-31T16:33:41.483827Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-03-31T16:33:41.483880Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:33:41.484010Z node 13 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2025-03-31T16:33:41.484071Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CheckDataTx 2025-03-31T16:33:41.484111Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-03-31T16:33:41.484134Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CheckDataTx 2025-03-31T16:33:41.484157Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-31T16:33:41.484180Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-31T16:33:41.484223Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v500/281474976715663 IncompleteEdge# v{min} UnprotectedReadEdge# v400/18446744073709551615 ImmediateWriteEdge# v400/18446744073709551615 ImmediateWriteEdgeReplied# v1000/18446744073709551615 2025-03-31T16:33:41.484261Z node 13 :TX_DATASHARD TRACE: Activated operation [0:281474976715665] at 72075186224037888 2025-03-31T16:33:41.484286Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-03-31T16:33:41.484305Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-31T16:33:41.484323Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-03-31T16:33:41.484342Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit ExecuteKqpDataTx 2025-03-31T16:33:41.484397Z node 13 :TX_DATASHARD TRACE: Operation [0:281474976715665] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193448 2025-03-31T16:33:41.484508Z node 13 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 HasWrites: true 2025-03-31T16:33:41.484590Z node 13 :TX_DATASHARD TRACE: add locks to result: 0 2025-03-31T16:33:41.484647Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-03-31T16:33:41.484674Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-03-31T16:33:41.484698Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit FinishPropose 2025-03-31T16:33:41.484717Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit FinishPropose 2025-03-31T16:33:41.484759Z node 13 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715665 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-03-31T16:33:41.484852Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is DelayComplete 2025-03-31T16:33:41.484872Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit FinishPropose 2025-03-31T16:33:41.484893Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit CompletedOperations 2025-03-31T16:33:41.484917Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CompletedOperations 2025-03-31T16:33:41.484950Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-03-31T16:33:41.484980Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CompletedOperations 2025-03-31T16:33:41.485002Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:281474976715665] at 72075186224037888 has finished 2025-03-31T16:33:41.485055Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:33:41.485085Z node 13 :TX_DATASHARD TRACE: Complete execution for [0:281474976715665] at 72075186224037888 on unit FinishPropose 2025-03-31T16:33:41.485127Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:33:41.485301Z node 13 :KQP_EXECUTER DEBUG: ActorId: [13:993:2684] TxId: 281474976715665. Ctx: { TraceId: 01jqpgte0gd8tms35qxjyya930, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=N2Y4N2QxZmUtYWY1ZTU0ZS1iNTI5MmQ1Yy02OWFiYzI0, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got propose result, shard: 72075186224037888, status: COMPLETE, error: 2025-03-31T16:33:41.485432Z node 13 :KQP_EXECUTER DEBUG: ActorId: [13:993:2684] TxId: 281474976715665. Ctx: { TraceId: 01jqpgte0gd8tms35qxjyya930, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=N2Y4N2QxZmUtYWY1ZTU0ZS1iNTI5MmQ1Yy02OWFiYzI0, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-31T16:33:41.485522Z node 13 :KQP_EXECUTER DEBUG: ActorId: [13:993:2684] TxId: 281474976715665. Ctx: { TraceId: 01jqpgte0gd8tms35qxjyya930, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=N2Y4N2QxZmUtYWY1ZTU0ZS1iNTI5MmQ1Yy02OWFiYzI0, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-03-31T16:33:41.485642Z node 13 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=13&id=N2Y4N2QxZmUtYWY1ZTU0ZS1iNTI5MmQ1Yy02OWFiYzI0, ActorId: [13:839:2684], ActorState: CleanupState, TraceId: 01jqpgte0gd8tms35qxjyya930, EndCleanup, isFinal: 0 2025-03-31T16:33:41.485803Z node 13 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=13&id=N2Y4N2QxZmUtYWY1ZTU0ZS1iNTI5MmQ1Yy02OWFiYzI0, ActorId: [13:839:2684], ActorState: CleanupState, TraceId: 01jqpgte0gd8tms35qxjyya930, Sent query response back to proxy, proxyRequestId: 8, proxyId: [13:57:2104] 2025-03-31T16:33:41.742558Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [13:1009:2811], Recipient [13:962:2778]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:33:41.742669Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:33:41.742750Z node 13 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [13:1008:2810], serverId# [13:1009:2811], sessionId# [0:0:0] 2025-03-31T16:33:41.742886Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553224, Sender [13:593:2518], Recipient [13:962:2778]: NKikimr::TEvDataShard::TEvGetOpenTxs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::ValidatePredicatesDataQuery [GOOD] Test command err: Trying to start YDB, gRPC: 20807, MsgBus: 3924 2025-03-31T16:32:07.050584Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012317088355757:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:07.050708Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a9d/r3tmp/tmptuUw9n/pdisk_1.dat 2025-03-31T16:32:07.345635Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20807, node 1 2025-03-31T16:32:07.417465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:07.417598Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:07.419917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:32:07.436681Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:07.436727Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:07.436736Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:07.436855Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3924 TClient is connected to server localhost:3924 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:07.875162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:07.888332Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:32:07.904027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:32:08.034425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:32:08.176776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:08.242373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:10.117601Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012329973259429:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:10.117727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:10.406289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:32:10.431976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:32:10.457821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:32:10.481813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:32:10.508447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:32:10.536999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:32:10.574726Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012329973259938:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:10.574803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012329973259943:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:10.574822Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:10.578093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:32:10.588574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012329973259945:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:32:10.677993Z node 1 :TX_PROXY ERROR: Actor# [1:7488012329973260000:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:32:11.426561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:32:11.606200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:32:11.748769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:32:11.882333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T16:32:12.050398Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012317088355757:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:12.050502Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:32:12.125251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:1:44: Warning: At function: Coalesce
:1:58: Warning: At function: SqlIn
:1:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 27883, MsgBus: 5225 2025-03-31T16:32:13.395877Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012341161093294:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:13.395996Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a9d/r3tmp/tmpwRHQq9/pdisk_1.dat 2025-03-31T16:32:13.495605Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27883, node 2 2025-03-31T16:32:13.536262Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:13.536350Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:13.539376Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:32:13.586638Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:13.586673Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:13.586678Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:13.586795Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5225 TClient is connected to server localhost:5225 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardI ... 007Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438796890, txId: 281474976715785] shutting down 2025-03-31T16:33:17.096564Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438797135, txId: 281474976715787] shutting down EXPECTED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]];[[11u]];[[12u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]];[[18u]];[[19u]];[[20u]]] RECEIVED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]];[[11u]];[[12u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]];[[18u]];[[19u]];[[20u]]] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Key1 < 9000 OR Key3 IS NULL ORDER BY `Value`; 2025-03-31T16:33:17.321659Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438797359, txId: 281474976715789] shutting down 2025-03-31T16:33:17.565439Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438797597, txId: 281474976715791] shutting down EXPECTED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]];[[11u]];[[12u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]]] RECEIVED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]];[[11u]];[[12u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]]] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Value = 20 ORDER BY `Value`; 2025-03-31T16:33:17.749014Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438797786, txId: 281474976715793] shutting down 2025-03-31T16:33:17.929551Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438797968, txId: 281474976715795] shutting down EXPECTED: [[[20u]]] RECEIVED: [[[20u]]] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE (Key1 <= 1000) OR (Key1 > 2000 AND Key1 < 5000) OR (Key1 >= 8000) ORDER BY `Value`; 2025-03-31T16:33:18.402715Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438798437, txId: 281474976715797] shutting down 2025-03-31T16:33:18.883869Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438798920, txId: 281474976715799] shutting down EXPECTED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[11u]];[[12u]];[[16u]];[[17u]];[[18u]];[[19u]];[[20u]]] RECEIVED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[11u]];[[12u]];[[16u]];[[17u]];[[18u]];[[19u]];[[20u]]] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Key1 < NULL ORDER BY `Value`; 2025-03-31T16:33:19.005392Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438798999, txId: 281474976715801] shutting down 2025-03-31T16:33:19.123246Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438799119, txId: 281474976715803] shutting down EXPECTED: [] RECEIVED: [] 2025-03-31T16:33:19.170790Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037920 not found 2025-03-31T16:33:19.170838Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037919 not found 2025-03-31T16:33:19.173245Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037921 not found Trying to start YDB, gRPC: 12819, MsgBus: 25388 2025-03-31T16:33:20.201733Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488012630057782777:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:20.201837Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a9d/r3tmp/tmpJ0dCSl/pdisk_1.dat 2025-03-31T16:33:20.348146Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:20.391011Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:20.391136Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:20.393248Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12819, node 7 2025-03-31T16:33:20.450202Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:20.450243Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:20.450255Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:20.450435Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25388 TClient is connected to server localhost:25388 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:21.085412Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:21.102956Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:21.206372Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:21.409053Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:21.490513Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:24.753811Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488012647237653748:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:24.753957Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:24.812077Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:24.852811Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:24.891214Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:24.931371Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:24.972412Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:25.028053Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:25.106947Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488012651532621558:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:25.107029Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488012651532621563:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:25.107067Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:25.110584Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:25.121561Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7488012651532621565:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:33:25.200794Z node 7 :TX_PROXY ERROR: Actor# [7:7488012651532621620:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:25.201852Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7488012630057782777:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:25.201912Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:33:26.280555Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:33:35.334329Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:33:35.334367Z node 7 :IMPORT WARN: Table profiles were not loaded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestLoadDataSourceProperties [GOOD] Test command err: Trying to start YDB, gRPC: 1544, MsgBus: 27418 2025-03-31T16:33:12.295881Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012595456222048:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:12.295925Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ad3/r3tmp/tmpw6SBml/pdisk_1.dat 2025-03-31T16:33:12.631183Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1544, node 1 2025-03-31T16:33:12.699800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:12.699915Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:12.701600Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:12.799195Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:12.799225Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:12.799241Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:12.799359Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27418 TClient is connected to server localhost:27418 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:13.427270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:13.448703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:13.599737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:13.750242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:13.826478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:14.943953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012604046158413:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:14.944068Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:15.399461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:15.432242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:15.461288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:15.488914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:15.518633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:15.585093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:15.624439Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012608341126226:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:15.624528Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012608341126231:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:15.624532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:15.627985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:15.636647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012608341126233:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:33:15.708047Z node 1 :TX_PROXY ERROR: Actor# [1:7488012608341126286:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:16.609985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:1, at schemeshard: 72057594046644480 2025-03-31T16:33:17.295975Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012595456222048:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:17.296034Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:33:17.390787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:33:17.741417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:1, at schemeshard: 72057594046644480 2025-03-31T16:33:18.147286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-31T16:33:18.558294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-03-31T16:33:18.959712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710692:0, at schemeshard: 72057594046644480 2025-03-31T16:33:19.418143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-03-31T16:33:19.463575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-03-31T16:33:21.048586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710711:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 4842, MsgBus: 11545 2025-03-31T16:33:21.792597Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012634209430557:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:21.792686Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ad3/r3tmp/tmpo1L5Fq/pdisk_1.dat 2025-03-31T16:33:21.880074Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4842, node 2 2025-03-31T16:33:21.921981Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:21.922068Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:21.923521Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:21.937459Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:21.937486Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:21.937491Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:21.937597Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11545 TClient is connected to server localhost:11545 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPath ... but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:1, at schemeshard: 72057594046644480 2025-03-31T16:33:26.635325Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-03-31T16:33:26.793065Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488012634209430557:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:26.793138Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:33:27.031528Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715687:0, at schemeshard: 72057594046644480 2025-03-31T16:33:27.348753Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715690:0, at schemeshard: 72057594046644480 2025-03-31T16:33:27.728980Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-03-31T16:33:27.754755Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-03-31T16:33:30.316291Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715723:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 23373, MsgBus: 61735 2025-03-31T16:33:31.109592Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488012678212820456:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:31.109693Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ad3/r3tmp/tmpKcJKb0/pdisk_1.dat 2025-03-31T16:33:31.219672Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:31.250770Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:31.250866Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:31.252147Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23373, node 3 2025-03-31T16:33:31.291707Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:31.291749Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:31.291757Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:31.291895Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61735 TClient is connected to server localhost:61735 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:31.656173Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:31.673654Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:31.724075Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:31.857172Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:31.921693Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:34.422337Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012691097724121:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:34.422446Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:34.454726Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:34.484568Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:34.514018Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:34.541746Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:34.569984Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:34.604258Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:34.645973Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012691097724628:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:34.646021Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012691097724633:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:34.646062Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:34.649831Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:34.659516Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488012691097724635:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:33:34.740628Z node 3 :TX_PROXY ERROR: Actor# [3:7488012691097724690:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:35.469845Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2025-03-31T16:33:35.946475Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-03-31T16:33:36.109977Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488012678212820456:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:36.110077Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:33:36.496666Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:1, at schemeshard: 72057594046644480 2025-03-31T16:33:37.137156Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-03-31T16:33:37.772044Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715689:0, at schemeshard: 72057594046644480 2025-03-31T16:33:38.311360Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715692:0, at schemeshard: 72057594046644480 2025-03-31T16:33:38.878872Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-03-31T16:33:38.915447Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-03-31T16:33:41.753583Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715722:0, at schemeshard: 72057594046644480 >> KqpScanArrowInChanels::AggregateNoColumnNoRemaps [GOOD] >> KqpScanArrowInChanels::AggregateWithFunction >> TKeyValueTracingTest::WriteHuge >> TKeyValueTracingTest::ReadHuge |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest |96.3%| [TA] $(B)/ydb/core/kqp/provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.3%| [TA] {RESULT} $(B)/ydb/core/kqp/provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeshardBackgroundCompactionTest::ShouldCompactServerless [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerlessAfterDisable |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::CreateExtSubdomain >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-true >> TSchemeShardAuditSettings::CreateSubdomain >> ApplyClusterEndpointTest::NoPorts [GOOD] >> ApplyClusterEndpointTest::PortFromCds [GOOD] >> ApplyClusterEndpointTest::PortFromDriver [GOOD] >> BasicUsage::MaxByteSizeEqualZero >> ReadSessionImplTest::DecompressRaw [GOOD] >> ReadSessionImplTest::DecompressGzip [GOOD] >> ReadSessionImplTest::DecompressZstd [GOOD] >> ReadSessionImplTest::DecompressRawEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressGzipEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressWithSynchronousExecutor [GOOD] >> ReadSessionImplTest::DataReceivedCallbackReal >> TSchemeShardAuditSettings::AlterSubdomain >> Compression::WriteRAW |96.3%| [TA] $(B)/ydb/core/tx/datashard/ut_snapshot/test-results/unittest/{meta.json ... results_accumulator.log} >> ReadSessionImplTest::ReconnectOnTmpError [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression >> ReadSessionImplTest::ReconnectOnTmpErrorAndThenTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeoutAndThenCreate [GOOD] >> ReadSessionImplTest::ReconnectsAfterFailure [GOOD] >> ReadSessionImplTest::SimpleDataHandlers >> ReadSessionImplTest::SimpleDataHandlers [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithCommit |96.3%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot/test-results/unittest/{meta.json ... results_accumulator.log} >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] Test command err: 2025-03-31T16:33:44.407030Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.407164Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.407186Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T16:33:44.407653Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-03-31T16:33:44.407719Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.407747Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.408732Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.006716s 2025-03-31T16:33:44.409340Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T16:33:44.409737Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-03-31T16:33:44.409832Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.410807Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.410829Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.410846Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T16:33:44.411178Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-03-31T16:33:44.411209Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.411229Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.411283Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.009714s 2025-03-31T16:33:44.411632Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T16:33:44.411993Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-03-31T16:33:44.412085Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.412874Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.412899Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.412925Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T16:33:44.413245Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-03-31T16:33:44.413299Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.413312Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.413375Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.243407s 2025-03-31T16:33:44.413758Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T16:33:44.414096Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-03-31T16:33:44.414287Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.414995Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.415012Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.415028Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T16:33:44.415444Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-03-31T16:33:44.415484Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.415504Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.415562Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.217249s 2025-03-31T16:33:44.415983Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T16:33:44.416445Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-03-31T16:33:44.416502Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.417401Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.417425Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.417459Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T16:33:44.417708Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T16:33:44.418626Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T16:33:44.427207Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.427620Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TRANSPORT_UNAVAILABLE. Description:
: Error: GRpc error: (14): 2025-03-31T16:33:44.427658Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.427675Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.427733Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.172176s 2025-03-31T16:33:44.428021Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-03-31T16:33:44.429260Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.429295Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.429330Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T16:33:44.429588Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T16:33:44.429953Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T16:33:44.430081Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.430459Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-31T16:33:44.531382Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.531644Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-03-31T16:33:44.531755Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T16:33:44.531825Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-03-31T16:33:44.531926Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-03-31T16:33:44.632282Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-03-31T16:33:44.632472Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-03-31T16:33:44.633707Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.633732Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.633753Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T16:33:44.634076Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T16:33:44.634507Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T16:33:44.634645Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.635116Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-31T16:33:44.737756Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.739499Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-03-31T16:33:44.739590Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T16:33:44.739631Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-03-31T16:33:44.739870Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2025-03-31T16:33:44.739996Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-03-31T16:33:44.740088Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-03-31T16:33:44.740239Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2025-03-31T16:33:44.740418Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster >> GenericFederatedQuery::YdbManagedSelectAll >> KqpNewEngine::SimpleUpsertSelect >> KqpNotNullColumns::UpdateNotNullPk >> ColumnShardTiers::DSConfigsStub [GOOD] >> KqpTx::DeferredEffects >> KqpScanArrowFormat::AggregateNoColumn [GOOD] >> KqpScanArrowFormat::AggregateEmptySum >> TSchemeShardAuditSettings::CreateExtSubdomain [GOOD] >> TSchemeShardAuditSettings::CreateSubdomain [GOOD] >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionOperationId >> KqpSort::ReverseOptimized >> KqpNewEngine::PureExpr |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::CreateExtSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:33:44.881808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:33:44.881901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:44.881931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:33:44.881956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:33:44.882824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:33:44.882857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:33:44.882984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:44.883062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:33:44.884118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:33:44.974382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:33:44.974436Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:44.978948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:33:44.979032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:33:44.979220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:33:44.983522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:33:44.983749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:33:44.984332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:44.984555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:33:44.986360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:44.991078Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:44.991164Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:44.992364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:33:44.992427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:44.992480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:33:44.992626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:33:44.999116Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:33:45.154839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:33:45.155042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:45.156166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:33:45.157135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:33:45.157221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:45.159907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:45.160024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:33:45.160204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:45.160336Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:33:45.160371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:33:45.160402Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:33:45.162150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:45.162199Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:33:45.162247Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:33:45.163782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:45.163832Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:45.163883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:45.163936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:33:45.168529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:33:45.170329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:33:45.171373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:33:45.172460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:45.172580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:45.172642Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:45.173837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:33:45.173912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:45.174124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:33:45.174209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:33:45.176692Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:45.176748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:45.176899Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:45.176942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:33:45.177165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:45.177206Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:33:45.177310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:45.177343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:45.177390Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:45.177423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:45.177457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:33:45.177494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:45.177524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:33:45.177566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:33:45.177624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:33:45.177658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:33:45.177701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:33:45.184677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:45.184814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:45.184865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 26 PathOwnerId: 72057594046678944, cookie: 112 2025-03-31T16:33:45.444073Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2025-03-31T16:33:45.444095Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 26 2025-03-31T16:33:45.444122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:33:45.444944Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 112 2025-03-31T16:33:45.445009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 112 2025-03-31T16:33:45.445046Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2025-03-31T16:33:45.445085Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 3 2025-03-31T16:33:45.445112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-03-31T16:33:45.445160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 0/1, is published: true 2025-03-31T16:33:45.446510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 112:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:112 msg type: 269090816 2025-03-31T16:33:45.446605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 112, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 112 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 112 at step: 5000013 2025-03-31T16:33:45.447258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000013, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:45.447337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 112 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000013 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:45.447383Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TPropose, operationId: 112:0 HandleReply TEvOperationPlan, step: 5000013, at schemeshard: 72057594046678944 2025-03-31T16:33:45.447426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 7] name: USER_0 type: EPathTypeExtSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 112 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:45.447443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2025-03-31T16:33:45.447461Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 112:0 128 -> 134 2025-03-31T16:33:45.448603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-03-31T16:33:45.449846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-03-31T16:33:45.450894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2025-03-31T16:33:45.450951Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TDeleteExternalShards, operationId: 112:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:33:45.451050Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 112:0 134 -> 135 2025-03-31T16:33:45.451227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:33:45.451280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 FAKE_COORDINATOR: Erasing txId 112 2025-03-31T16:33:45.452831Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:45.452872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:45.453007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2025-03-31T16:33:45.453118Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:45.453165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 112, path id: 1 2025-03-31T16:33:45.453200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 112, path id: 7 2025-03-31T16:33:45.453284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2025-03-31T16:33:45.453316Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 112:0 ProgressState 2025-03-31T16:33:45.453355Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 112:0 135 -> 240 2025-03-31T16:33:45.454373Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 27 PathOwnerId: 72057594046678944, cookie: 112 2025-03-31T16:33:45.454457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 27 PathOwnerId: 72057594046678944, cookie: 112 2025-03-31T16:33:45.454480Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2025-03-31T16:33:45.454556Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 27 2025-03-31T16:33:45.454593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:33:45.454953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-03-31T16:33:45.455013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-03-31T16:33:45.455035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2025-03-31T16:33:45.455059Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 18446744073709551615 2025-03-31T16:33:45.455104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-03-31T16:33:45.455159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 0/1, is published: true 2025-03-31T16:33:45.457476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2025-03-31T16:33:45.457522Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 112:0 ProgressState 2025-03-31T16:33:45.457587Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#112:0 progress is 1/1 2025-03-31T16:33:45.457632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-03-31T16:33:45.457665Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#112:0 progress is 1/1 2025-03-31T16:33:45.457689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-03-31T16:33:45.457722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 1/1, is published: true 2025-03-31T16:33:45.457764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-03-31T16:33:45.457794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 112:0 2025-03-31T16:33:45.457815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 112:0 2025-03-31T16:33:45.457860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2025-03-31T16:33:45.458413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:33:45.458447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-03-31T16:33:45.458505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2025-03-31T16:33:45.459179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:33:45.459217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-03-31T16:33:45.459279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:33:45.459736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-03-31T16:33:45.459894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-03-31T16:33:45.461886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-31T16:33:45.461986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 112, wait until txId: 112 TestWaitNotification wait txId: 112 2025-03-31T16:33:45.462297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: send EvNotifyTxCompletion 2025-03-31T16:33:45.462390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 112 2025-03-31T16:33:45.462897Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2025-03-31T16:33:45.462966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2025-03-31T16:33:45.462992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:651:2642] TestWaitNotification: OK eventTxId 112 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::CreateSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:33:44.882003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:33:44.882100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:44.882130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:33:44.882153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:33:44.882832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:33:44.882857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:33:44.882934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:44.883022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:33:44.884158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:33:44.974379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:33:44.974445Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:44.978744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:33:44.978806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:33:44.978939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:33:44.983586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:33:44.983768Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:33:44.984378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:44.984570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:33:44.986323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:44.991026Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:44.991135Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:44.992362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:33:44.992420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:44.992529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:33:44.992651Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:33:44.998521Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:33:45.160283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:33:45.160490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:45.160694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:33:45.160895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:33:45.160951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:45.162937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:45.163094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:33:45.163254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:45.163316Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:33:45.163348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:33:45.163377Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:33:45.165008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:45.165059Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:33:45.165122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:33:45.170263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:45.170322Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:45.170375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:45.170427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:33:45.173608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:33:45.175458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:33:45.175610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:33:45.176321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:45.176428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:45.176469Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:45.176659Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:33:45.176695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:45.176822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:33:45.176896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:33:45.178511Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:45.178567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:45.178668Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:45.178710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:33:45.178879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:45.178933Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:33:45.179021Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:45.179056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:45.179118Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:45.179146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:45.179214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:33:45.179247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:45.179278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:33:45.179328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:33:45.179385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:33:45.179417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:33:45.179458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:33:45.181120Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:45.181252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:45.181303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... e TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 26 PathOwnerId: 72057594046678944, cookie: 112 2025-03-31T16:33:45.453779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 26 PathOwnerId: 72057594046678944, cookie: 112 2025-03-31T16:33:45.453822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2025-03-31T16:33:45.453853Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 26 2025-03-31T16:33:45.453882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:33:45.454709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 112 2025-03-31T16:33:45.454790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 112 2025-03-31T16:33:45.454820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2025-03-31T16:33:45.454847Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 3 2025-03-31T16:33:45.454890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-03-31T16:33:45.454954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 0/1, is published: true 2025-03-31T16:33:45.456960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 112:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:112 msg type: 269090816 2025-03-31T16:33:45.457111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 112, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 112 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 112 at step: 5000013 2025-03-31T16:33:45.457867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000013, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:45.457961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 112 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000013 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:45.458002Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropForceUnsafe TPropose, operationId: 112:0 HandleReply TEvOperationPlan, step: 5000013, at schemeshard: 72057594046678944 2025-03-31T16:33:45.458040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 7] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 112 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:45.458063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2025-03-31T16:33:45.458157Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 112:0 128 -> 130 2025-03-31T16:33:45.458316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:33:45.458383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2025-03-31T16:33:45.459786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-03-31T16:33:45.459957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 FAKE_COORDINATOR: Erasing txId 112 2025-03-31T16:33:45.461395Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:45.461428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:45.461521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2025-03-31T16:33:45.461607Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:45.461655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 112, path id: 1 2025-03-31T16:33:45.461715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 112, path id: 7 2025-03-31T16:33:45.461778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2025-03-31T16:33:45.461815Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 112:0 ProgressState 2025-03-31T16:33:45.461872Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#112:0 progress is 1/1 2025-03-31T16:33:45.461894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-03-31T16:33:45.461921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#112:0 progress is 1/1 2025-03-31T16:33:45.461942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-03-31T16:33:45.461977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 1/1, is published: false 2025-03-31T16:33:45.462008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-03-31T16:33:45.462034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 112:0 2025-03-31T16:33:45.462055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 112:0 2025-03-31T16:33:45.462107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-03-31T16:33:45.462133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 112, publications: 2, subscribers: 0 2025-03-31T16:33:45.462157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 1], 27 2025-03-31T16:33:45.462191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 7], 18446744073709551615 2025-03-31T16:33:45.463244Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 27 PathOwnerId: 72057594046678944, cookie: 112 2025-03-31T16:33:45.463340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 27 PathOwnerId: 72057594046678944, cookie: 112 2025-03-31T16:33:45.463371Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 112 2025-03-31T16:33:45.463398Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 27 2025-03-31T16:33:45.463422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:33:45.464344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-03-31T16:33:45.464419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-03-31T16:33:45.464453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 112 2025-03-31T16:33:45.464500Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 18446744073709551615 2025-03-31T16:33:45.464532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2025-03-31T16:33:45.464611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 112, subscribers: 0 2025-03-31T16:33:45.464839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:33:45.464875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-03-31T16:33:45.464962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2025-03-31T16:33:45.465438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:33:45.465464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-03-31T16:33:45.465496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:33:45.466753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-03-31T16:33:45.468266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-03-31T16:33:45.468408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-31T16:33:45.468971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 112, wait until txId: 112 TestWaitNotification wait txId: 112 2025-03-31T16:33:45.469216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: send EvNotifyTxCompletion 2025-03-31T16:33:45.469238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 112 2025-03-31T16:33:45.469608Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2025-03-31T16:33:45.469668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2025-03-31T16:33:45.469698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:661:2652] TestWaitNotification: OK eventTxId 112 >> TKeyValueTracingTest::WriteHuge [FAIL] >> TKeyValueTracingTest::ReadHuge [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigsStub [GOOD] Test command err: 2025-03-31T16:32:01.401328Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:32:01.401510Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:32:01.401665Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001aef/r3tmp/tmpuJNdpl/pdisk_1.dat TServer::EnableGrpc on GrpcPort 6623, node 1 TClient is connected to server localhost:8183 2025-03-31T16:32:01.989989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:32:02.057717Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:32:02.058410Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:02.058460Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:02.058511Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:02.058983Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:32:02.095741Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:02.095910Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:02.107469Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:32:02.234239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-03-31T16:32:02.377644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:32:02.377898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:32:02.378169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:32:02.378257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:32:02.378346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:32:02.378443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:32:02.378604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:32:02.378714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:32:02.378814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:32:02.378897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:32:02.378983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:32:02.379125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:32:02.400190Z node 1 :TX_TIERING INFO: fline=manager.cpp:128;event=start_subscribing_metadata; 2025-03-31T16:32:02.426698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:32:02.426820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:32:02.427131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:32:02.427286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:32:02.427381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:32:02.427465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:32:02.427555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:32:02.427647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:32:02.427719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:32:02.427802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:32:02.427877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:32:02.427944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:32:02.431569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:32:02.431686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:32:02.431849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:32:02.431928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:32:02.432165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:32:02.432219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:32:02.432371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:32:02.432436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:32:02.432526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:32:02.432597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:32:02.432661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:32:02.432705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:32:02.434323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:32:02.434400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:32:02.434595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:32:02.434640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;de ... -03-31T16:32:59.221055Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=1; 2025-03-31T16:32:59.221103Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 0 2025-03-31T16:32:59.221140Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier2' at tablet 0 2025-03-31T16:32:59.221191Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 2025-03-31T16:32:59.221254Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier2' started at tablet 0 2025-03-31T16:32:59.221302Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1}{id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; Initialization finished REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=0;WAITING=1 2025-03-31T16:33:10.389151Z node 1 :TX_PROXY ERROR: Actor# [1:3704:4840] txid# 281474976715753, issues: { message: "Other entities depend on this data source, please remove them at the beginning: /Root/olapStore/olapTable" severity: 1 } REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;RESULT=
: Error: Execution, code: 1060
:1:27: Error: Executing DROP OBJECT EXTERNAL_DATA_SOURCE
: Error:
: Error: Other entities depend on this data source, please remove them at the beginning: /Root/olapStore/olapTable, code: 2003 , code: 2003 ;EXPECTATION=0 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=0;WAITING=1 REQUEST=DROP TABLE `/Root/olapStore/olapTable`;EXPECTATION=1;WAITING=1 2025-03-31T16:33:21.559157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976715764:0, at schemeshard: 72057594046644480 2025-03-31T16:33:22.291402Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715764;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715764; 2025-03-31T16:33:22.291771Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715764;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715764; 2025-03-31T16:33:22.292188Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715764;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715764; REQUEST=DROP TABLE `/Root/olapStore/olapTable`;RESULT=
: Info: Execution, code: 1060
:1:12: Info: Executing DROP TABLE
: Info: Success, code: 4 ;EXPECTATION=1 FINISHED_REQUEST=DROP TABLE `/Root/olapStore/olapTable`;EXPECTATION=1;WAITING=1 REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 2025-03-31T16:33:32.718969Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-03-31T16:33:32.719357Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-03-31T16:33:32.719423Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-03-31T16:33:32.719486Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-03-31T16:33:32.719658Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-03-31T16:33:32.719708Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2025-03-31T16:33:32.719761Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-03-31T16:33:32.719809Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier2' at tablet 0 2025-03-31T16:33:32.719843Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 2025-03-31T16:33:32.719918Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier2' started at tablet 0 2025-03-31T16:33:32.719976Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-31T16:33:32.720089Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-03-31T16:33:32.720110Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037888;has_config=0; 2025-03-31T16:33:32.720144Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037888 2025-03-31T16:33:32.720167Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier2' at tablet 72075186224037888 2025-03-31T16:33:32.720184Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037888 2025-03-31T16:33:32.720207Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier2' started at tablet 72075186224037888 2025-03-31T16:33:32.720243Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-31T16:33:32.720270Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-03-31T16:33:32.720292Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037889;has_config=0; 2025-03-31T16:33:32.720321Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037889 2025-03-31T16:33:32.720347Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier2' at tablet 72075186224037889 2025-03-31T16:33:32.720369Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037889 2025-03-31T16:33:32.720400Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier2' started at tablet 72075186224037889 2025-03-31T16:33:32.720443Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-31T16:33:32.720467Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-03-31T16:33:32.720484Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037890;has_config=0; 2025-03-31T16:33:32.720503Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037890 2025-03-31T16:33:32.720521Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier2' at tablet 72075186224037890 2025-03-31T16:33:32.720535Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037890 2025-03-31T16:33:32.720555Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier2' started at tablet 72075186224037890 2025-03-31T16:33:32.720580Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-31T16:33:32.720868Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037888;self_id=[1:746:2626];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2025-03-31T16:33:32.720956Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037889;self_id=[1:749:2629];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2025-03-31T16:33:32.721038Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037890;self_id=[1:757:2635];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 2025-03-31T16:33:43.882278Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-03-31T16:33:43.882594Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-03-31T16:33:43.882634Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=0;has_config=0; 2025-03-31T16:33:43.882682Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 2025-03-31T16:33:43.882746Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-31T16:33:43.882793Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-03-31T16:33:43.882816Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-03-31T16:33:43.882838Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-03-31T16:33:43.882894Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-03-31T16:33:43.882912Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037888;has_config=0; 2025-03-31T16:33:43.882933Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037888 2025-03-31T16:33:43.882961Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-31T16:33:43.882980Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-03-31T16:33:43.882994Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037889;has_config=0; 2025-03-31T16:33:43.883011Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037889 2025-03-31T16:33:43.883037Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-31T16:33:43.883057Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-03-31T16:33:43.883072Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037890;has_config=0; 2025-03-31T16:33:43.883117Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037890 2025-03-31T16:33:43.883154Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-31T16:33:43.883380Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037888;self_id=[1:746:2626];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2025-03-31T16:33:43.883481Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037889;self_id=[1:749:2629];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2025-03-31T16:33:43.883520Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037890;self_id=[1:757:2635];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBackups [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBorrowed >> KqpNewEngine::PkRangeSelect1 >> GenericFederatedQuery::ClickHouseManagedSelectConstant >> TSchemeShardAuditSettings::AlterSubdomain [GOOD] >> KqpSqlIn::KeySuffix >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck [GOOD] |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletInAggregate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:33:44.881825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:33:44.882021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:44.882060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:33:44.882089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:33:44.882845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:33:44.882887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:33:44.882985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:44.883065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:33:44.884146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:33:44.960557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:33:44.960623Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:44.966708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:33:44.966815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:33:44.967030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:33:44.974489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:33:44.974747Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:33:44.978108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:44.978474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:33:44.984102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:44.991111Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:44.991192Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:44.992379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:33:44.992441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:44.992516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:33:44.992662Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:33:44.999430Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:33:45.171972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:33:45.172165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:45.172369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:33:45.172564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:33:45.172610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:45.174989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:45.175197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:33:45.175378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:45.175459Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:33:45.175499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:33:45.175534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:33:45.177519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:45.177578Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:33:45.177634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:33:45.179443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:45.179495Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:45.179556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:45.179624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:33:45.183258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:33:45.184922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:33:45.185143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:33:45.186054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:45.186188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:45.186225Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:45.186510Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:33:45.186568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:45.186756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:33:45.186830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:33:45.188609Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:45.188660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:45.188808Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:45.188846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:33:45.189104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:45.189153Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:33:45.189252Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:45.189305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:45.189343Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:45.189379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:45.189416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:33:45.189446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:45.189474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:33:45.189518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:33:45.189571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:33:45.189632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:33:45.189673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:33:45.191555Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:45.191672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:45.191705Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... hard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 102 PathOwnerId: 72057594046678944, cookie: 175 2025-03-31T16:33:47.138788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 102 PathOwnerId: 72057594046678944, cookie: 175 2025-03-31T16:33:47.138817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-03-31T16:33:47.138855Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 102 2025-03-31T16:33:47.138888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:33:47.139891Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 4 PathOwnerId: 72057594046678944, cookie: 175 2025-03-31T16:33:47.139998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 4 PathOwnerId: 72057594046678944, cookie: 175 2025-03-31T16:33:47.140034Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-03-31T16:33:47.140065Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 26], version: 4 2025-03-31T16:33:47.140095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 3 2025-03-31T16:33:47.140174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 0/1, is published: true 2025-03-31T16:33:47.142542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 175:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:175 msg type: 269090816 2025-03-31T16:33:47.142655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 175, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 175 at step: 5000076 FAKE_COORDINATOR: advance: minStep5000076 State->FrontStep: 5000075 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 175 at step: 5000076 2025-03-31T16:33:47.143295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000076, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:47.143401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 175 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000076 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:47.143446Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropForceUnsafe TPropose, operationId: 175:0 HandleReply TEvOperationPlan, step: 5000076, at schemeshard: 72057594046678944 2025-03-31T16:33:47.143488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 26] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 175 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:47.143520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 26] 2025-03-31T16:33:47.143613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 128 -> 130 2025-03-31T16:33:47.143789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:33:47.143846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 2 2025-03-31T16:33:47.144267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-03-31T16:33:47.146094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 FAKE_COORDINATOR: Erasing txId 175 2025-03-31T16:33:47.147471Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:47.147511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:47.147632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 26] 2025-03-31T16:33:47.147754Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:47.147788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 175, path id: 1 2025-03-31T16:33:47.147823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 175, path id: 26 2025-03-31T16:33:47.147945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2025-03-31T16:33:47.147989Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 175:0 ProgressState 2025-03-31T16:33:47.148061Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#175:0 progress is 1/1 2025-03-31T16:33:47.148094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-03-31T16:33:47.148129Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#175:0 progress is 1/1 2025-03-31T16:33:47.148157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-03-31T16:33:47.148190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 1/1, is published: false 2025-03-31T16:33:47.148232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-03-31T16:33:47.148276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 175:0 2025-03-31T16:33:47.148303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 175:0 2025-03-31T16:33:47.148374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 3 2025-03-31T16:33:47.148405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 175, publications: 2, subscribers: 0 2025-03-31T16:33:47.148433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 175, [OwnerId: 72057594046678944, LocalPathId: 1], 103 2025-03-31T16:33:47.148477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 175, [OwnerId: 72057594046678944, LocalPathId: 26], 18446744073709551615 2025-03-31T16:33:47.149661Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2025-03-31T16:33:47.149739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2025-03-31T16:33:47.149768Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 175 2025-03-31T16:33:47.149797Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 103 2025-03-31T16:33:47.149830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:33:47.151057Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2025-03-31T16:33:47.151168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2025-03-31T16:33:47.151199Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 175 2025-03-31T16:33:47.151227Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 26], version: 18446744073709551615 2025-03-31T16:33:47.151258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 2 2025-03-31T16:33:47.151347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 175, subscribers: 0 2025-03-31T16:33:47.151937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:33:47.151981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2025-03-31T16:33:47.152067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 1 2025-03-31T16:33:47.152434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:33:47.152473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2025-03-31T16:33:47.152532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:33:47.154206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-03-31T16:33:47.156399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-03-31T16:33:47.156517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-31T16:33:47.156583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 175, wait until txId: 175 TestWaitNotification wait txId: 175 2025-03-31T16:33:47.157979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: send EvNotifyTxCompletion 2025-03-31T16:33:47.158020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 175 2025-03-31T16:33:47.159501Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 175, at schemeshard: 72057594046678944 2025-03-31T16:33:47.159619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: got EvNotifyTxCompletionResult 2025-03-31T16:33:47.159666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: satisfy waiter [1:2470:4461] TestWaitNotification: OK eventTxId 175 >> KqpLocks::Invalidate >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionOperationId [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck [GOOD] Test command err: Starting YDB, grpc: 20312, msgbus: 20354 2025-03-31T16:30:21.048822Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488011861357900723:2079];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:30:21.049587Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0015d1/r3tmp/tmpmlOO2a/pdisk_1.dat 2025-03-31T16:30:21.536511Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:30:21.562330Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:30:21.562425Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:30:21.588466Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20312, node 1 2025-03-31T16:30:21.792754Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:30:21.792773Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:30:21.792785Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:30:21.792896Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20354 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T16:30:22.231958Z node 1 :TX_PROXY DEBUG: actor# [1:7488011861357900945:2117] Handle TEvNavigate describe path dc-1 2025-03-31T16:30:22.232010Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011865652868767:2451] HANDLE EvNavigateScheme dc-1 2025-03-31T16:30:22.232430Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011865652868767:2451] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T16:30:22.290648Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011865652868767:2451] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-03-31T16:30:22.304040Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011865652868767:2451] Handle TEvDescribeSchemeResult Forward to# [1:7488011865652868766:2450] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:30:22.337554Z node 1 :TX_PROXY DEBUG: actor# [1:7488011861357900945:2117] Handle TEvProposeTransaction 2025-03-31T16:30:22.337580Z node 1 :TX_PROXY DEBUG: actor# [1:7488011861357900945:2117] TxId# 281474976710657 ProcessProposeTransaction 2025-03-31T16:30:22.337711Z node 1 :TX_PROXY DEBUG: actor# [1:7488011861357900945:2117] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7488011865652868778:2458] 2025-03-31T16:30:22.420506Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011865652868778:2458] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-31T16:30:22.422186Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011865652868778:2458] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-03-31T16:30:22.422230Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011865652868778:2458] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T16:30:22.422374Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011865652868778:2458] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:30:22.422717Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011865652868778:2458] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:30:22.423033Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011865652868778:2458] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-31T16:30:22.423133Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011865652868778:2458] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-03-31T16:30:22.423283Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011865652868778:2458] txid# 281474976710657 HANDLE EvClientConnected 2025-03-31T16:30:22.435305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:30:22.452491Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011865652868778:2458] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-03-31T16:30:22.452565Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011865652868778:2458] txid# 281474976710657 SEND to# [1:7488011865652868777:2457] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-03-31T16:30:22.503718Z node 1 :TX_PROXY DEBUG: actor# [1:7488011861357900945:2117] Handle TEvProposeTransaction 2025-03-31T16:30:22.503740Z node 1 :TX_PROXY DEBUG: actor# [1:7488011861357900945:2117] TxId# 281474976710658 ProcessProposeTransaction 2025-03-31T16:30:22.503759Z node 1 :TX_PROXY DEBUG: actor# [1:7488011861357900945:2117] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7488011865652868821:2497] 2025-03-31T16:30:22.505412Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011865652868821:2497] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-31T16:30:22.505446Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011865652868821:2497] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-03-31T16:30:22.505472Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011865652868821:2497] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T16:30:22.505509Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011865652868821:2497] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:30:22.505700Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011865652868821:2497] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:30:22.505774Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011865652868821:2497] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:30:22.505809Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011865652868821:2497] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-31T16:30:22.505899Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011865652868821:2497] txid# 281474976710658 HANDLE EvClientConnected 2025-03-31T16:30:22.506327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:30:22.508759Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011865652868821:2497] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-03-31T16:30:22.508811Z node 1 :TX_PROXY DEBUG: Actor# [1:7488011865652868821:2497] txid# 281474976710658 SEND to# [1:7488011865652868820:2496] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-03-31T16:30:24.248894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011874242803492:2336], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:24.249007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:24.255381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488011874242803504:2339], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:30:24.257955Z node 1 :TX_PROXY DEBUG: actor# [1:7488011861357900945:2117] Handle TEvProposeTransaction 2025-03-31T16:30:24.257994Z node 1 :TX_PROXY DEBUG: actor# [1:7488011861357900945:2117] TxId# 281474976710659 ProcessProposeTransaction 2025-03-31 ... 5 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-03-31T16:33:45.365413Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012737838335946:2862] txid# 281474976710665 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T16:33:45.365527Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012737838335946:2862] txid# 281474976710665 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:33:45.365989Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012737838335946:2862] txid# 281474976710665 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:33:45.366176Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012737838335946:2862] HANDLE EvNavigateKeySetResult, txid# 281474976710665 shardToRequest# 72075186224037891 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 2] DomainInfo.Params# Version: 3 PlanResolution: 50 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037889 SchemeShard: 72075186224037891 Hive: 72075186224037888 RedirectRequired# true 2025-03-31T16:33:45.366287Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012737838335946:2862] txid# 281474976710665 SEND to# 72075186224037891 shardToRequest {TEvModifySchemeTransaction txid# 281474976710665 TabletId# 72075186224037891} 2025-03-31T16:33:45.366992Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012737838335946:2862] txid# 281474976710665 HANDLE EvClientConnected 2025-03-31T16:33:45.370019Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1" OperationType: ESchemeOpModifyACL ModifyACL { Name: "tenant-db" DiffACL: "\n\022\010\001\022\016\032\014clusteradmin\n\031\010\000\022\025\010\001\020\200\004\032\014clusteradmin \003" } } TxId: 281474976710665 TabletId: 72075186224037891 Owner: "root@builtin" UserToken: "***" PeerName: "ipv6:[::1]:57990" , at schemeshard: 72075186224037891 2025-03-31T16:33:45.370465Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /dc-1/tenant-db, operationId: 281474976710665:0, at schemeshard: 72075186224037891 2025-03-31T16:33:45.370694Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72075186224037891, LocalPathId: 1] name: dc-1/tenant-db type: EPathTypeSubDomain state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-03-31T16:33:45.370718Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-03-31T16:33:45.370964Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710665:1, propose status:StatusSuccess, reason: , at schemeshard: 72075186224037891 2025-03-31T16:33:45.371000Z node 60 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72075186224037891 2025-03-31T16:33:45.371146Z node 60 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710665:0 progress is 1/1 2025-03-31T16:33:45.371169Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710665 ready parts: 1/1 2025-03-31T16:33:45.371208Z node 60 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710665:0 progress is 1/1 2025-03-31T16:33:45.371231Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710665 ready parts: 1/1 2025-03-31T16:33:45.371290Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186224037891, LocalPathId: 1] was 4 2025-03-31T16:33:45.371370Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710665, ready parts: 1/1, is published: false 2025-03-31T16:33:45.371404Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72075186224037891, LocalPathId: 1], at schemeshard: 72075186224037891 2025-03-31T16:33:45.371423Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710665 ready parts: 1/1 2025-03-31T16:33:45.371442Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710665:0 2025-03-31T16:33:45.371469Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710665, publications: 1, subscribers: 0 2025-03-31T16:33:45.371491Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710665, [OwnerId: 72075186224037891, LocalPathId: 1], 9 2025-03-31T16:33:45.374891Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710665, response: Status: StatusSuccess TxId: 281474976710665 SchemeshardId: 72075186224037891, at schemeshard: 72075186224037891 2025-03-31T16:33:45.375260Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710665, subject: root@builtin, status: StatusSuccess, operation: MODIFY ACL, path: /dc-1/tenant-db, add access: +(DS):clusteradmin, remove access: -():clusteradmin:- 2025-03-31T16:33:45.375598Z node 60 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186224037891 2025-03-31T16:33:45.375599Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012737838335946:2862] txid# 281474976710665 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710665} 2025-03-31T16:33:45.375638Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186224037891, txId: 281474976710665, path id: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-03-31T16:33:45.375711Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012737838335946:2862] txid# 281474976710665 SEND to# [59:7488012737838335945:2359] Source {TEvProposeTransactionStatus txid# 281474976710665 Status# 48} 2025-03-31T16:33:45.375989Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186224037891, txId: 281474976710665, path id: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-03-31T16:33:45.376103Z node 60 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186224037891 2025-03-31T16:33:45.376147Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [60:7488012712571783498:2308], at schemeshard: 72075186224037891, txId: 281474976710665, path id: 1 2025-03-31T16:33:45.376180Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [60:7488012712571783498:2308], at schemeshard: 72075186224037891, txId: 281474976710665, path id: 1 TEST clusteradmin triggers auth on tenant 2025-03-31T16:33:45.377134Z node 60 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186224037891, msg: Owner: 72075186224037891 Generation: 1 LocalPathId: 1 Version: 9 PathOwnerId: 72075186224037891, cookie: 281474976710665 2025-03-31T16:33:45.377331Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186224037891, msg: Owner: 72075186224037891 Generation: 1 LocalPathId: 1 Version: 9 PathOwnerId: 72075186224037891, cookie: 281474976710665 2025-03-31T16:33:45.377365Z node 60 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72075186224037891, txId: 281474976710665 2025-03-31T16:33:45.377394Z node 60 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186224037891, txId: 281474976710665, pathId: [OwnerId: 72075186224037891, LocalPathId: 1], version: 9 2025-03-31T16:33:45.377435Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186224037891, LocalPathId: 1] was 5 2025-03-31T16:33:45.377566Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72075186224037891, txId: 281474976710665, subscribers: 0 2025-03-31T16:33:45.380274Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976710665 TClient is connected to server localhost:25948 TClient::Ls request: /dc-1/tenant-db 2025-03-31T16:33:45.981457Z node 59 :TX_PROXY DEBUG: actor# [59:7488012707773563682:2113] Handle TEvNavigate describe path /dc-1/tenant-db 2025-03-31T16:33:45.981558Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012737838335959:2871] HANDLE EvNavigateScheme /dc-1/tenant-db 2025-03-31T16:33:45.982156Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012737838335959:2871] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T16:33:45.982334Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012737838335959:2871] SEND to# 72075186224037891 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/tenant-db" Options { ReturnBoundaries: false ShowPrivateTable: true ReturnRangeKey: false } 2025-03-31T16:33:45.985228Z node 59 :TX_PROXY DEBUG: Actor# [59:7488012737838335959:2871] Handle TEvDescribeSchemeResult Forward to# [59:7488012737838335958:2870] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 0 Record# Status: StatusSuccess Path: "/dc-1/tenant-db" PathDescription { Self { Name: "dc-1/tenant-db" PathId: 1 SchemeshardId: 72075186224037891 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\025\010\001\020\200\004\032\014clusteradmin \003" EffectiveACL: "\n\030\010\001\020\377\377\003\032\014root@builtin \003(\001\n\025\010\001\020\200\004\032\014clusteradmin \003" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 1 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 3 SecurityStateVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 2 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037889 SchemeShard: 72075186224037891 Hive: 72075186224037888 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } StoragePools { Name: "name_tenant-db_kind_tenant-db" Kind: "tenant-db" } StoragePools { Name: "name_tenant-db_kind_test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Sids { Name: "tenantuser" Type: USER } Audience: "/dc-1/tenant-db" } } } PathId: 1 PathOwnerId: 72075186224037891 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1/tenant-db" PathId: 1 SchemeshardId: 72075186224037891 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\025\010\001\020\200\004\032\014clusteradmin \003" EffectiveACL: "\n\030\010\001\020\377\377\003\032\014root@builtin \003(\001\n\025\010\001\020\200\004\032\014clusteradmin \003" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 1 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 3 SecurityStateVersion: 1 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72075186224037891 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 184467440737095... (TRUNCATED) 2025-03-31T16:33:46.032837Z node 59 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 60 2025-03-31T16:33:46.033538Z node 59 :HIVE WARN: HIVE#72057594037968897 Node(60, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-31T16:33:46.035043Z node 60 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 >> KqpKv::ReadRows_SpecificKey >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsConfigRequest [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotRequestCompactionsAfterDisable >> KqpNewEngine::SimpleUpsertSelect [GOOD] >> KqpNewEngine::ShuffleWrite >> KqpScanArrowInChanels::AggregateWithFunction [GOOD] >> KqpNotNullColumns::UpdateNotNullPk [GOOD] >> KqpNotNullColumns::UpdateNotNullPkPg >> KqpScanArrowInChanels::AggregateEmptySum ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionOperationId [GOOD] Test command err: Trying to start YDB, gRPC: 4694, MsgBus: 10191 2025-03-31T16:33:45.970374Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012739471430337:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:45.971418Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000efa/r3tmp/tmpgPotaz/pdisk_1.dat 2025-03-31T16:33:46.285648Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4694, node 1 2025-03-31T16:33:46.337471Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:46.337613Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:46.343600Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:46.356007Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:46.356039Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:46.356048Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:46.356206Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10191 TClient is connected to server localhost:10191 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:46.841119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... |96.3%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/{meta.json ... results_accumulator.log} |96.4%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-true [GOOD] >> KqpNewEngine::PureExpr [GOOD] >> KqpNewEngine::PrunePartitionsByLiteral >> KqpNotNullColumns::ReplaceNotNullPk >> KqpSort::ReverseOptimized [GOOD] >> KqpSort::ReverseOptimizedWithPredicate >> KqpTx::DeferredEffects [GOOD] >> KqpTx::CommitStats ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:33:44.881809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:33:44.881919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:44.881959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:33:44.881989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:33:44.882827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:33:44.882883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:33:44.882977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:44.883056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:33:44.884142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:33:44.965599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:33:44.965647Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:44.972925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:33:44.973059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:33:44.973286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:33:44.979211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:33:44.979474Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:33:44.980233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:44.980493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:33:44.984442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:44.991061Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:44.991151Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:44.992360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:33:44.992424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:44.992476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:33:44.992634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:33:44.999202Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:33:45.153658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:33:45.155025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:45.156176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:33:45.157147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:33:45.157209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:45.159905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:45.160027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:33:45.160200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:45.160275Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:33:45.160309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:33:45.160340Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:33:45.162287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:45.162345Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:33:45.162400Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:33:45.164143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:45.164193Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:45.164243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:45.164301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:33:45.168523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:33:45.170411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:33:45.171376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:33:45.172405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:45.172523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:45.172579Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:45.175421Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:33:45.175485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:45.175650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:33:45.175752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:33:45.177893Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:45.177951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:45.178085Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:45.178122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:33:45.178337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:45.178378Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:33:45.178460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:45.178494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:45.178527Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:45.178555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:45.178588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:33:45.178632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:45.178666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:33:45.178711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:33:45.178766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:33:45.178814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:33:45.178846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:33:45.180735Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:45.180840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:45.180876Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 233409618 TxId: 175 } 2025-03-31T16:33:50.319323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free owner tablets reply, message: Status: ALREADY Owner: 72075186233409618 TxId: 175 Origin: 72057594037968897, at schemeshard: 72057594046678944 2025-03-31T16:33:50.319424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 175:0, at schemeshard: 72057594046678944, message: Status: ALREADY Owner: 72075186233409618 TxId: 175 Origin: 72057594037968897 2025-03-31T16:33:50.319463Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TDeleteExternalShards, operationId: 175:0 HandleReply TDeleteExternalShards, Status: ALREADY, from Hive: 72057594037968897, Owner: 72075186233409618, at schemeshard: 72057594046678944 2025-03-31T16:33:50.319563Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 134 -> 135 2025-03-31T16:33:50.319703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:33:50.319752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 5 2025-03-31T16:33:50.321310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 175:0, at schemeshard: 72057594046678944 2025-03-31T16:33:50.321443Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:50.321470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:50.321583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 26] 2025-03-31T16:33:50.321661Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:50.321683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 175, path id: 1 2025-03-31T16:33:50.321708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 175, path id: 26 2025-03-31T16:33:50.321936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2025-03-31T16:33:50.321970Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 175:0 ProgressState 2025-03-31T16:33:50.322002Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 135 -> 240 2025-03-31T16:33:50.322592Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2025-03-31T16:33:50.322644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2025-03-31T16:33:50.322662Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-03-31T16:33:50.322682Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 103 2025-03-31T16:33:50.322703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:33:50.323227Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2025-03-31T16:33:50.323283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2025-03-31T16:33:50.323303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-03-31T16:33:50.323332Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 26], version: 18446744073709551615 2025-03-31T16:33:50.323355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 6 2025-03-31T16:33:50.323404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 0/1, is published: true 2025-03-31T16:33:50.325259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:74 hive 72057594037968897 at ss 72057594046678944 2025-03-31T16:33:50.325304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:73 hive 72057594037968897 at ss 72057594046678944 2025-03-31T16:33:50.325329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:75 hive 72057594037968897 at ss 72057594046678944 2025-03-31T16:33:50.325468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2025-03-31T16:33:50.325505Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 175:0 ProgressState 2025-03-31T16:33:50.325580Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#175:0 progress is 1/1 2025-03-31T16:33:50.325608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-03-31T16:33:50.325636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#175:0 progress is 1/1 2025-03-31T16:33:50.325659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-03-31T16:33:50.325685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 1/1, is published: true 2025-03-31T16:33:50.325716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-03-31T16:33:50.325743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 175:0 2025-03-31T16:33:50.325772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 175:0 2025-03-31T16:33:50.325907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 5 2025-03-31T16:33:50.327269Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 74 TxId_Deprecated: 74 TabletID: 72075186233409619 Forgetting tablet 72075186233409619 2025-03-31T16:33:50.327909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 74 ShardOwnerId: 72057594046678944 ShardLocalIdx: 74, at schemeshard: 72057594046678944 2025-03-31T16:33:50.328072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 4 2025-03-31T16:33:50.328593Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 73 TxId_Deprecated: 73 TabletID: 72075186233409618 2025-03-31T16:33:50.329620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:33:50.331875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 73 ShardOwnerId: 72057594046678944 ShardLocalIdx: 73, at schemeshard: 72057594046678944 2025-03-31T16:33:50.332075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 3 2025-03-31T16:33:50.332353Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 75 TxId_Deprecated: 75 TabletID: 72075186233409620 Forgetting tablet 72075186233409618 2025-03-31T16:33:50.333888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 75 ShardOwnerId: 72057594046678944 ShardLocalIdx: 75, at schemeshard: 72057594046678944 2025-03-31T16:33:50.334041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 2 Forgetting tablet 72075186233409620 2025-03-31T16:33:50.334625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:33:50.334667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2025-03-31T16:33:50.334760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 1 2025-03-31T16:33:50.335126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-03-31T16:33:50.335361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-03-31T16:33:50.335447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-31T16:33:50.335469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2025-03-31T16:33:50.335517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:33:50.338765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:74 2025-03-31T16:33:50.338803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:74 tabletId 72075186233409619 2025-03-31T16:33:50.338954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:73 2025-03-31T16:33:50.338973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:73 tabletId 72075186233409618 2025-03-31T16:33:50.339016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:75 2025-03-31T16:33:50.339040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:75 tabletId 72075186233409620 2025-03-31T16:33:50.339279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-31T16:33:50.339379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 175, wait until txId: 175 TestWaitNotification wait txId: 175 2025-03-31T16:33:50.340417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: send EvNotifyTxCompletion 2025-03-31T16:33:50.340445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 175 2025-03-31T16:33:50.341659Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 175, at schemeshard: 72057594046678944 2025-03-31T16:33:50.341743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: got EvNotifyTxCompletionResult 2025-03-31T16:33:50.341769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: satisfy waiter [1:6731:7711] TestWaitNotification: OK eventTxId 175 >> KqpScanArrowFormat::AggregateEmptySum [GOOD] >> KqpNewEngine::PkRangeSelect1 [GOOD] >> KqpNewEngine::OnlineRO_Consistent |96.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_auditsettings/test-results/unittest/{meta.json ... results_accumulator.log} |96.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowFormat::AggregateEmptySum [GOOD] Test command err: Trying to start YDB, gRPC: 3027, MsgBus: 23114 2025-03-31T16:33:27.893215Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012661718922849:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:27.893662Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001700/r3tmp/tmp6307M2/pdisk_1.dat 2025-03-31T16:33:28.243028Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:28.283996Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:28.284368Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:28.286129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3027, node 1 2025-03-31T16:33:28.426867Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:28.426888Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:28.426895Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:28.427023Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23114 TClient is connected to server localhost:23114 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:29.073141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:29.108153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:29.250811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:29.383939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:29.439294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:30.510376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012674603826376:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:30.510504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:31.064206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:31.089196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:31.113556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:31.136430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:31.163149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:31.191133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:31.244752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012678898794182:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:31.244822Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012678898794187:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:31.244823Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:31.249947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:31.258869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012678898794189:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:33:31.317434Z node 1 :TX_PROXY ERROR: Actor# [1:7488012678898794243:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:33.127347Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012661718922849:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:33.128107Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:33:33.464133Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438812766, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 14507, MsgBus: 5308 2025-03-31T16:33:34.125805Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012692620047342:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:34.125854Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001700/r3tmp/tmpada4pp/pdisk_1.dat 2025-03-31T16:33:34.216315Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14507, node 2 2025-03-31T16:33:34.254644Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:34.254749Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:34.256421Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:34.270368Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:34.270400Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:34.270408Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:34.270551Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5308 TClient is connected to server localhost:5308 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:34.607793Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:34.625168Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:34.720053Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:34.860233Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:34.922473Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ... :33:41.969962Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:41.992210Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:42.013879Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:42.034519Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:42.054823Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:42.077110Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:42.106141Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012725440687390:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:42.106225Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:42.106230Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012725440687395:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:42.108808Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:42.114631Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488012725440687397:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:33:42.213320Z node 3 :TX_PROXY ERROR: Actor# [3:7488012725440687451:3439] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:44.651301Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488012712555783232:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:44.652641Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:33:45.091567Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438823574, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 18387, MsgBus: 28831 2025-03-31T16:33:45.870773Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488012739967505779:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:45.870829Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001700/r3tmp/tmpnDCkWE/pdisk_1.dat 2025-03-31T16:33:45.969471Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18387, node 4 2025-03-31T16:33:46.010588Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:46.010716Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:46.011936Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:46.036564Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:46.036586Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:46.036594Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:46.036720Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28831 TClient is connected to server localhost:28831 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:46.470753Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:46.476267Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:33:46.490042Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:46.556417Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:46.758291Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:46.820894Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:49.237277Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488012757147376732:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:49.237393Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:49.297873Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:49.327471Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:49.358838Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:49.393921Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:49.425431Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:49.458762Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:49.539611Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488012757147377243:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:49.539696Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488012757147377248:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:49.539718Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:49.543263Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:49.555760Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488012757147377250:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:33:49.648804Z node 4 :TX_PROXY ERROR: Actor# [4:7488012757147377305:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:51.051429Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7488012739967505779:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:51.052273Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:33:51.098188Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438830882, txId: 281474976715671] shutting down >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData >> KqpKv::ReadRows_SpecificKey [GOOD] >> KqpKv::ReadRows_UnknownTable >> YdbTableSplit::SplitByLoadWithReads >> KqpNotNullColumns::UpdateNotNullPkPg [GOOD] >> KqpNotNullColumns::UpdateNotNull >> KqpLocks::Invalidate [GOOD] >> KqpLocks::InvalidateOnCommit >> KqpNewEngine::ShuffleWrite [GOOD] >> KqpNewEngine::StaleRO >> KqpNotNullColumns::ReplaceNotNullPk [GOOD] >> KqpNotNullColumns::ReplaceNotNullPkPg >> ColumnShardTiers::DSConfigs [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadHuge [FAIL] Test command err: equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture()+28 (0x1013BF9C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x105F7E30) TestOneRead(TBasicString>, TBasicString>)+4828 (0xFD8444C) NTestSuiteTKeyValueTracingTest::TTestCaseReadHuge::Execute_(NUnitTest::TTestContext&)+318 (0xFD8B01E) std::__y1::__function::__func, void ()>::operator()()+280 (0xFD9E7F8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x10625D96) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x105FE9A9) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFD9D6A4) NUnitTest::TTestFactory::Execute()+2438 (0x10600276) NUnitTest::RunMain(int, char**)+5213 (0x1062030D) ??+0 (0x7F5A20AC9D90) __libc_start_main+128 (0x7F5A20AC9E40) _start+41 (0xD71A029) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteHuge [FAIL] Test command err: assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture()+28 (0x1013BF9C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x105F7E30) TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&)+4253 (0xFD7EA7D) NTestSuiteTKeyValueTracingTest::TTestCaseWriteHuge::Execute_(NUnitTest::TTestContext&)+216 (0xFD8A8B8) std::__y1::__function::__func, void ()>::operator()()+280 (0xFD9E7F8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x10625D96) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x105FE9A9) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFD9D6A4) NUnitTest::TTestFactory::Execute()+2438 (0x10600276) NUnitTest::RunMain(int, char**)+5213 (0x1062030D) ??+0 (0x7F44C822BD90) __libc_start_main+128 (0x7F44C822BE40) _start+41 (0xD71A029) >> ColumnShardTiers::DSConfigsWithQueryServiceDdl [GOOD] >> KqpNewEngine::InShardsWrite |96.4%| [TA] $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |96.4%| [TA] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} >> GenericFederatedQuery::YdbManagedSelectAll [GOOD] >> KqpSqlIn::KeySuffix [GOOD] >> KqpSqlIn::KeySuffix_OnlyTail ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigs [GOOD] Test command err: 2025-03-31T16:31:57.243118Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:31:57.243334Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:31:57.243476Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001b47/r3tmp/tmpeAxi9Z/pdisk_1.dat TServer::EnableGrpc on GrpcPort 29715, node 1 TClient is connected to server localhost:28258 2025-03-31T16:31:57.788324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:31:57.819806Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:57.820290Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:57.820323Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:57.820352Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:57.820696Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:31:57.855295Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:57.855426Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:57.866869Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 2025-03-31T16:32:09.733755Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:751:2629], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:09.733932Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:09.737217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480 2025-03-31T16:32:09.943038Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:868:2707], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:09.943313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:09.943558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:873:2712], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:09.946980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-03-31T16:32:10.077012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:875:2714], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:32:10.346583Z node 1 :TX_PROXY ERROR: Actor# [1:971:2781] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:32:10.857951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:32:11.309832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480 2025-03-31T16:32:12.050091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-03-31T16:32:12.836263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:32:13.263494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-03-31T16:32:14.249995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-03-31T16:32:14.555206Z node 1 :TX_DATASHARD NOTICE: Starting TBuildIndexScan BuildIndexId: 281474976715679 TabletId: 72075186224037889 OwnerId: 72057594046644480 PathId: 9 TargetName: "/Root/.metadata/secrets/values/index_by_secret_id/indexImplTable" IndexColumns: "secretId" IndexColumns: "ownerUserId" KeyRange { From: "\002\000\000\000\000\200\000\000\000\200" To: "" FromInclusive: true ToInclusive: false } SnapshotTxId: 281474976710758 SnapshotStep: 13000 MaxBatchRows: 50000 MaxBatchBytes: 8388608 SeqNoGeneration: 2 SeqNoRound: 1 MaxRetries: 50 row version v13000/281474976710758 2025-03-31T16:32:14.557281Z node 1 :TX_DATASHARD NOTICE: FinishTBuildIndexScan: datashard: 72075186224037889, requested range: [(Utf8 : NULL, Utf8 : NULL) ; ()), last acked point: ()Stats { RowsSent: 0 BytesSent: 0 }Status { Code: SUCCESS Issues:
: Error: Shard or requested range is empty } BuildIndexId: 281474976715679 TabletId: 72075186224037889 Status: DONE UploadStatus: SUCCESS Issues { message: "Shard or requested range is empty" severity: 1 } RequestSeqNoGeneration: 2 RequestSeqNoRound: 1 2025-03-31T16:32:14.559948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-03-31T16:32:29.288183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715702:0, at schemeshard: 72057594046644480 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;RESULT=;EXPECTATION=1 2025-03-31T16:32:30.941000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:32:30.941079Z node 1 :IMPORT WARN: Table profiles were not loaded FINISHED_REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-03-31T16:32:31.306560Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:215;event=skip_tier_manager_start;tier=/Root/tier1;has_secrets=1;tier_config=0; 2025-03-31T16:32:31.306651Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:196;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2025-03-31T16:32:31.306713Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=0};SECRETS={}; 2025-03-31T16:32:31.306843Z node 1 :TX_TIERING INFO: fline=manager.cpp:128;event=start_subscribing_metadata; 2025-03-31T16:32:31.307017Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:154;event=watch_scheme_objects;names=/Root/tier1; 2025-03-31T16:32:31.307411Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:62;event=TEvRefreshSubscriberData;snapshot=secrets; 2025-03-31T16:32:31.307464Z node 1 :TX_TIERING INFO: fline=manager.cpp:271;event=update_secrets;tablet=0; 2025-03-31T16:32:31.307519Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:196;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2025-03-31T16:32:31.307583Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-31T16:32:31.309237Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:111;component=TSchemeObjectWatcher;event=NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult;path=Root/tier1; 2025-03-31T16:32:31.310611Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:140;event=object_fetched;path=/Root/tier1; 2025-03-31T16:32:31.310796Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:75;component=tiering_manager;event=object_updated;path=/Root/tier1; 2025-03-31T16:32:31.310954Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=1; 2025-03-31T16:32:31.311051Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 0 2025-03-31T16:32:31.311138Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier2` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc2", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME ... :Tier '/Root/tier2' stopped at tablet 0 2025-03-31T16:33:30.898415Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier2' started at tablet 0 2025-03-31T16:33:30.898452Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1}{id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 2025-03-31T16:33:41.895733Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-03-31T16:33:41.895823Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-03-31T16:33:41.895851Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-03-31T16:33:41.895883Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-03-31T16:33:41.896129Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-03-31T16:33:41.896437Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-03-31T16:33:41.896483Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037892;has_config=0; 2025-03-31T16:33:41.896542Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037892 2025-03-31T16:33:41.896601Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 72075186224037892 2025-03-31T16:33:41.896643Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037892 2025-03-31T16:33:41.896708Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 72075186224037892 2025-03-31T16:33:41.896813Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-31T16:33:41.896869Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-03-31T16:33:41.896907Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037893;has_config=0; 2025-03-31T16:33:41.896944Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037893 2025-03-31T16:33:41.896982Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 72075186224037893 2025-03-31T16:33:41.897011Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037893 2025-03-31T16:33:41.897051Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 72075186224037893 2025-03-31T16:33:41.897098Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-31T16:33:41.897219Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-03-31T16:33:41.897240Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037894;has_config=0; 2025-03-31T16:33:41.897264Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037894 2025-03-31T16:33:41.897302Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 72075186224037894 2025-03-31T16:33:41.897323Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037894 2025-03-31T16:33:41.897348Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 72075186224037894 2025-03-31T16:33:41.897376Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-31T16:33:41.897448Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-03-31T16:33:41.897467Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=0;has_config=0; 2025-03-31T16:33:41.897489Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 2025-03-31T16:33:41.897513Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 0 2025-03-31T16:33:41.897536Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-03-31T16:33:41.897569Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 0 2025-03-31T16:33:41.897606Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-31T16:33:41.898365Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-03-31T16:33:41.898394Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=0;has_config=0; 2025-03-31T16:33:41.898419Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 2025-03-31T16:33:41.898445Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 0 2025-03-31T16:33:41.898466Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-03-31T16:33:41.898496Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 0 2025-03-31T16:33:41.898525Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-31T16:33:41.899200Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037892;self_id=[1:3156:4420];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2025-03-31T16:33:41.899284Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037893;self_id=[1:3160:4423];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2025-03-31T16:33:41.899336Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037894;self_id=[1:3166:4429];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 2025-03-31T16:33:52.861452Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-03-31T16:33:52.861520Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-03-31T16:33:52.861554Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-03-31T16:33:52.861589Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-03-31T16:33:52.861787Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-03-31T16:33:52.862084Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-03-31T16:33:52.862291Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-03-31T16:33:52.862324Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2025-03-31T16:33:52.862368Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-03-31T16:33:52.862431Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-31T16:33:52.862466Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-03-31T16:33:52.862483Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2025-03-31T16:33:52.862501Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-03-31T16:33:52.862524Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-31T16:33:52.862787Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-03-31T16:33:52.862819Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2025-03-31T16:33:52.862852Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-03-31T16:33:52.862897Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-31T16:33:52.863016Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-03-31T16:33:52.863039Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037892;has_config=0; 2025-03-31T16:33:52.863059Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037892 2025-03-31T16:33:52.863119Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-31T16:33:52.863149Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-03-31T16:33:52.863171Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037893;has_config=0; 2025-03-31T16:33:52.863190Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037893 2025-03-31T16:33:52.863217Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-31T16:33:52.863315Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-03-31T16:33:52.863334Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037894;has_config=0; 2025-03-31T16:33:52.863353Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037894 2025-03-31T16:33:52.863378Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-31T16:33:52.863828Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037892;self_id=[1:3156:4420];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2025-03-31T16:33:52.863886Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037893;self_id=[1:3160:4423];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2025-03-31T16:33:52.863924Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037894;self_id=[1:3166:4429];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 >> KqpNewEngine::PrunePartitionsByLiteral [GOOD] >> KqpNewEngine::PrunePartitionsByExpr >> KqpTx::CommitStats [GOOD] >> KqpSort::ReverseOptimizedWithPredicate [GOOD] >> KqpSort::ReverseMixedOrderNotOptimized ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigsWithQueryServiceDdl [GOOD] Test command err: 2025-03-31T16:31:57.731265Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:31:57.731595Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:31:57.731811Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001b3e/r3tmp/tmplCzXno/pdisk_1.dat TServer::EnableGrpc on GrpcPort 12656, node 1 TClient is connected to server localhost:24097 2025-03-31T16:31:58.274862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:31:58.314706Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:58.315273Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:58.315308Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:58.315336Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:58.315729Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:31:58.350856Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:58.351006Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:58.362430Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 2025-03-31T16:32:10.008754Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:748:2628], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:10.008845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:758:2633], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:10.008895Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:10.015374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-03-31T16:32:10.034292Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:762:2636], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-03-31T16:32:10.092237Z node 1 :TX_PROXY ERROR: Actor# [1:813:2668] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:32:10.335327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480 2025-03-31T16:32:11.137713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:32:11.567316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:1, at schemeshard: 72057594046644480 2025-03-31T16:32:12.282079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-03-31T16:32:12.924477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:32:13.332069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-03-31T16:32:14.280260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-03-31T16:32:14.597149Z node 1 :TX_DATASHARD NOTICE: Starting TBuildIndexScan BuildIndexId: 281474976715679 TabletId: 72075186224037889 OwnerId: 72057594046644480 PathId: 9 TargetName: "/Root/.metadata/secrets/values/index_by_secret_id/indexImplTable" IndexColumns: "secretId" IndexColumns: "ownerUserId" KeyRange { From: "\002\000\000\000\000\200\000\000\000\200" To: "" FromInclusive: true ToInclusive: false } SnapshotTxId: 281474976710758 SnapshotStep: 13000 MaxBatchRows: 50000 MaxBatchBytes: 8388608 SeqNoGeneration: 2 SeqNoRound: 1 MaxRetries: 50 row version v13000/281474976710758 2025-03-31T16:32:14.600398Z node 1 :TX_DATASHARD NOTICE: FinishTBuildIndexScan: datashard: 72075186224037889, requested range: [(Utf8 : NULL, Utf8 : NULL) ; ()), last acked point: ()Stats { RowsSent: 0 BytesSent: 0 }Status { Code: SUCCESS Issues:
: Error: Shard or requested range is empty } BuildIndexId: 281474976715679 TabletId: 72075186224037889 Status: DONE UploadStatus: SUCCESS Issues { message: "Shard or requested range is empty" severity: 1 } RequestSeqNoGeneration: 2 RequestSeqNoRound: 1 2025-03-31T16:32:14.604462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-03-31T16:32:30.229707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715702:0, at schemeshard: 72057594046644480 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;RESULT=;EXPECTATION=1 2025-03-31T16:32:31.087880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:32:31.087939Z node 1 :IMPORT WARN: Table profiles were not loaded FINISHED_REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-03-31T16:32:31.398927Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:215;event=skip_tier_manager_start;tier=/Root/tier1;has_secrets=1;tier_config=0; 2025-03-31T16:32:31.399014Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:196;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2025-03-31T16:32:31.399057Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=0};SECRETS={}; 2025-03-31T16:32:31.399154Z node 1 :TX_TIERING INFO: fline=manager.cpp:128;event=start_subscribing_metadata; 2025-03-31T16:32:31.399336Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:154;event=watch_scheme_objects;names=/Root/tier1; 2025-03-31T16:32:31.399562Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:62;event=TEvRefreshSubscriberData;snapshot=secrets; 2025-03-31T16:32:31.399617Z node 1 :TX_TIERING INFO: fline=manager.cpp:271;event=update_secrets;tablet=0; 2025-03-31T16:32:31.399662Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:196;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2025-03-31T16:32:31.399721Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-31T16:32:31.401201Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:111;component=TSchemeObjectWatcher;event=NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult;path=Root/tier1; 2025-03-31T16:32:31.402464Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:140;event=object_fetched;path=/Root/tier1; 2025-03-31T16:32:31.402601Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:75;component=tiering_manager;event=object_updated;path=/Root/tier1; 2025-03-31T16:32:31.402748Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=1; 2025-03-31T16:32:31.402830Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 0 2025-03-31T16:32:31.402886Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier2` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc2", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-03-31T16:32:42.813430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715717:0, at schemeshard: 72057594046644480 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier2` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc2", AUTH_METHOD="AWS", ... :Tier '/Root/tier2' stopped at tablet 0 2025-03-31T16:33:30.939949Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier2' started at tablet 0 2025-03-31T16:33:30.939986Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1}{id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 2025-03-31T16:33:41.903012Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-03-31T16:33:41.903107Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-03-31T16:33:41.903140Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-03-31T16:33:41.903175Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-03-31T16:33:41.904212Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-03-31T16:33:41.904517Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-03-31T16:33:41.904575Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037892;has_config=0; 2025-03-31T16:33:41.904628Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037892 2025-03-31T16:33:41.904681Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 72075186224037892 2025-03-31T16:33:41.904721Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037892 2025-03-31T16:33:41.904777Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 72075186224037892 2025-03-31T16:33:41.904842Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-31T16:33:41.904881Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-03-31T16:33:41.904906Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037893;has_config=0; 2025-03-31T16:33:41.904938Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037893 2025-03-31T16:33:41.904970Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 72075186224037893 2025-03-31T16:33:41.904991Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037893 2025-03-31T16:33:41.905020Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 72075186224037893 2025-03-31T16:33:41.905048Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-31T16:33:41.905162Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-03-31T16:33:41.905184Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037894;has_config=0; 2025-03-31T16:33:41.905207Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037894 2025-03-31T16:33:41.905226Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 72075186224037894 2025-03-31T16:33:41.905244Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037894 2025-03-31T16:33:41.905267Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 72075186224037894 2025-03-31T16:33:41.905314Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-31T16:33:41.905400Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-03-31T16:33:41.905418Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=0;has_config=0; 2025-03-31T16:33:41.905440Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 2025-03-31T16:33:41.905468Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 0 2025-03-31T16:33:41.905493Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-03-31T16:33:41.905520Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 0 2025-03-31T16:33:41.905544Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-31T16:33:41.905948Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-03-31T16:33:41.905973Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=0;has_config=0; 2025-03-31T16:33:41.906007Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 2025-03-31T16:33:41.906043Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 0 2025-03-31T16:33:41.906071Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-03-31T16:33:41.906100Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 0 2025-03-31T16:33:41.906127Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-31T16:33:41.906399Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037892;self_id=[1:3151:4422];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2025-03-31T16:33:41.906471Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037893;self_id=[1:3157:4425];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2025-03-31T16:33:41.906519Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037894;self_id=[1:3165:4431];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 2025-03-31T16:33:52.897720Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-03-31T16:33:52.898305Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-03-31T16:33:52.898839Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-03-31T16:33:52.898891Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-03-31T16:33:52.898923Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-03-31T16:33:52.898989Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-03-31T16:33:52.899265Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-03-31T16:33:52.899314Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2025-03-31T16:33:52.899377Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-03-31T16:33:52.899453Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-31T16:33:52.899540Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-03-31T16:33:52.899563Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037892;has_config=0; 2025-03-31T16:33:52.899589Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037892 2025-03-31T16:33:52.899625Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-31T16:33:52.899646Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-03-31T16:33:52.899665Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037893;has_config=0; 2025-03-31T16:33:52.899686Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037893 2025-03-31T16:33:52.899715Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-31T16:33:52.899739Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-03-31T16:33:52.899758Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037894;has_config=0; 2025-03-31T16:33:52.899776Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037894 2025-03-31T16:33:52.899806Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-31T16:33:52.899872Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-03-31T16:33:52.899900Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2025-03-31T16:33:52.899923Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-03-31T16:33:52.899953Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-31T16:33:52.900135Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-03-31T16:33:52.900154Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2025-03-31T16:33:52.900177Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-03-31T16:33:52.900203Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-31T16:33:52.900432Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037892;self_id=[1:3151:4422];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2025-03-31T16:33:52.900502Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037893;self_id=[1:3157:4425];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2025-03-31T16:33:52.900545Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037894;self_id=[1:3165:4431];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 >> KqpScanArrowInChanels::AggregateEmptySum [GOOD] >> KqpSnapshotRead::TestReadOnly-withSink >> GenericFederatedQuery::ClickHouseManagedSelectConstant [GOOD] >> GenericFederatedQuery::ClickHouseSelectCount >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads >> KqpNewEngine::OnlineRO_Consistent [GOOD] >> KqpNewEngine::OnlineRO_Inconsistent ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::YdbManagedSelectAll [GOOD] Test command err: Trying to start YDB, gRPC: 4741, MsgBus: 10667 2025-03-31T16:33:45.408828Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012738091099434:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:45.409016Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f06/r3tmp/tmpdeNGMo/pdisk_1.dat 2025-03-31T16:33:45.687474Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4741, node 1 2025-03-31T16:33:45.752066Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:45.752092Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:45.752098Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:45.752233Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:33:45.778047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:45.778204Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:45.779943Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10667 TClient is connected to server localhost:10667 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:46.246853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:48.086442Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012750976001990:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:48.086582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:48.326426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-03-31T16:33:48.427543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012750976002114:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:48.427675Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:48.427838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012750976002123:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:48.427900Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012750976002124:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:48.427937Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012750976002127:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:48.427971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:48.430591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:2, at schemeshard: 72057594046644480 2025-03-31T16:33:48.433754Z node 1 :TX_PROXY ERROR: Actor# [1:7488012750976002132:2377] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T16:33:48.438691Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012750976002131:2355], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-03-31T16:33:48.438700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012750976002130:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-03-31T16:33:48.498211Z node 1 :TX_PROXY ERROR: Actor# [1:7488012750976002179:2408] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:48.536681Z node 1 :TX_PROXY ERROR: Actor# [1:7488012750976002197:2416] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:49.017464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:49.417744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:1, at schemeshard: 72057594046644480 2025-03-31T16:33:49.765747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:33:50.134140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-31T16:33:50.408439Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012738091099434:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:50.408528Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:33:50.512748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-31T16:33:50.937130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-03-31T16:33:50.966370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-03-31T16:33:52.551017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710702:0, at schemeshard: 72057594046644480 2025-03-31T16:33:52.569385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710703:0, at schemeshard: 72057594046644480 2025-03-31T16:33:52.570624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710704:0, at schemeshard: 72057594046644480 2025-03-31T16:33:52.571720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710705:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "col1" type { type_id: UINT16 } } } } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "col1" type { type_id: UINT16 } } } } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "col1" type { type_id: UINT16 } } } } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2025-03-31T16:33:55.059963Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438835096, txId: 281474976710752] shutting down >> KqpKv::ReadRows_UnknownTable [GOOD] >> KqpKv::ReadRows_NonExistentKeys ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::CommitStats [GOOD] Test command err: Trying to start YDB, gRPC: 26469, MsgBus: 11531 2025-03-31T16:33:45.828854Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012736943404232:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:45.828903Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001279/r3tmp/tmp028gIO/pdisk_1.dat 2025-03-31T16:33:46.158113Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26469, node 1 2025-03-31T16:33:46.202254Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:46.202583Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:46.208771Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:46.235671Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:46.235695Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:46.235702Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:46.235818Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11531 TClient is connected to server localhost:11531 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:46.735407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:46.751704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:46.878684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:47.032829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:47.102223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:48.793316Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012749828307905:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:48.793433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:49.065622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:49.093317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:49.118812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:49.143795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:49.171569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:49.239155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:49.276784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012754123275716:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:49.276986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:49.277392Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012754123275721:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:49.280973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:49.288635Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012754123275723:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:33:49.359464Z node 1 :TX_PROXY ERROR: Actor# [1:7488012754123275777:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 20877, MsgBus: 15351 2025-03-31T16:33:51.219683Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012765948014560:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:51.219747Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001279/r3tmp/tmpqj4DXT/pdisk_1.dat 2025-03-31T16:33:51.289856Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20877, node 2 2025-03-31T16:33:51.345050Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:51.345109Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:51.346294Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:51.346944Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:51.346962Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:51.346968Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:51.347055Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15351 TClient is connected to server localhost:15351 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:51.753938Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:51.762479Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:51.834084Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:51.975418Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:52.056542Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:54.042901Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012778832918218:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:54.042998Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:54.088789Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:54.124504Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:54.157294Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:54.191791Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:54.225534Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:54.264114Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:54.301788Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012778832918729:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:54.301864Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:54.301914Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012778832918734:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:54.305097Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:54.315920Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012778832918736:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:33:54.370109Z node 2 :TX_PROXY ERROR: Actor# [2:7488012778832918789:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowInChanels::AggregateEmptySum [GOOD] Test command err: Trying to start YDB, gRPC: 27530, MsgBus: 4808 2025-03-31T16:33:31.106476Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012679728515698:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:31.106564Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0016f6/r3tmp/tmpt6D7u3/pdisk_1.dat 2025-03-31T16:33:31.376658Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27530, node 1 2025-03-31T16:33:31.442111Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:31.442222Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:31.443898Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:31.448864Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:31.448889Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:31.448898Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:31.449053Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4808 TClient is connected to server localhost:4808 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:31.943933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:31.966907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:32.073018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:32.199274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:32.251282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:33.595771Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012688318452076:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:33.595891Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:33.844572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:33.871244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:33.895323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:33.924592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:33.951581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:34.017334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:34.052165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012692613419885:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:34.052180Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012692613419890:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:34.052291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:34.055254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:34.063491Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012692613419892:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:33:34.150826Z node 1 :TX_PROXY ERROR: Actor# [1:7488012692613419946:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:36.186149Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012679728515698:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:36.679381Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:33:37.199479Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438815671, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 12868, MsgBus: 17747 2025-03-31T16:33:37.833823Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012702430047574:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:37.833895Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0016f6/r3tmp/tmpZeMvUb/pdisk_1.dat 2025-03-31T16:33:37.915947Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12868, node 2 2025-03-31T16:33:37.964855Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:37.964919Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:37.966568Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:37.973897Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:37.973929Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:37.973938Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:37.974049Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17747 TClient is connected to server localhost:17747 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:38.304215Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:38.312659Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:38.383697Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:38.523624Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:38.597666Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type ... pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:47.165001Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:47.197612Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:47.233902Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:47.264854Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:47.295510Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:47.330160Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:47.406864Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012746662146185:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:47.406952Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:47.407162Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012746662146190:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:47.410038Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:47.418410Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488012746662146192:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:33:47.515673Z node 3 :TX_PROXY ERROR: Actor# [3:7488012746662146247:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:49.191627Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488012733777242010:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:49.191720Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:33:49.257305Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438828901, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 8041, MsgBus: 12013 2025-03-31T16:33:50.016674Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488012761298330259:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:50.016726Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0016f6/r3tmp/tmp7HiFbY/pdisk_1.dat 2025-03-31T16:33:50.106719Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8041, node 4 2025-03-31T16:33:50.149367Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:50.149505Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:50.151198Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:50.170220Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:50.170250Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:50.170257Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:50.170383Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12013 TClient is connected to server localhost:12013 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:50.616760Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:50.633740Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:50.686639Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:50.832201Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:50.905484Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:53.179686Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488012774183233925:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:53.179814Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:53.253832Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:53.291755Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:53.324729Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:53.361933Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:53.395218Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:53.429634Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:53.473309Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488012774183234432:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:53.473391Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:53.473452Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488012774183234437:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:53.477039Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:53.488138Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488012774183234439:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:33:53.557918Z node 4 :TX_PROXY ERROR: Actor# [4:7488012774183234492:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:55.017144Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7488012761298330259:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:55.017254Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:33:55.413973Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438835131, txId: 281474976715671] shutting down >> BasicUsage::MaxByteSizeEqualZero [GOOD] >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage >> Compression::WriteRAW [GOOD] >> Compression::WriteGZIP >> KqpNotNullColumns::UpdateNotNull [GOOD] >> KqpNotNullColumns::UpdateNotNullPg >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression |96.4%| [TA] $(B)/ydb/core/kqp/ut/arrow/test-results/unittest/{meta.json ... results_accumulator.log} |96.4%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/arrow/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCreateClean [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestReboot >> KqpNotNullColumns::ReplaceNotNullPkPg [GOOD] >> KqpNotNullColumns::SelectNotNullColumns >> YdbTableSplit::SplitByLoadWithUpdates >> TableCreation::MultipleTablesCreation >> KqpProxy::CalcPeerStats [GOOD] >> KqpProxy::CreatesScriptExecutionsTable >> ScriptExecutionsTest::RunCheckLeaseStatus >> TableCreation::SimpleTableCreation >> KqpLocks::InvalidateOnCommit [GOOD] >> KqpLocks::MixedTxFail+useSink >> KqpProxy::PingNotExistedSession >> IndexBuildTest::MergeIndexTableShardsOnlyWhenReady [GOOD] >> IndexBuildTest::RejectsCancel >> KqpNewEngine::InShardsWrite [GOOD] >> KqpNewEngine::Join >> KqpKv::ReadRows_NonExistentKeys [GOOD] >> KqpKv::ReadRows_NotFullPK >> KqpNewEngine::StaleRO [GOOD] >> KqpNewEngine::SqlInFromCompact >> KqpExtractPredicateLookup::PointJoin [GOOD] >> KqpExtractPredicateLookup::SqlInJoin >> KqpSort::ReverseMixedOrderNotOptimized [GOOD] >> KqpSort::ReverseRangeOptimized >> KqpNewEngine::PrunePartitionsByExpr [GOOD] >> KqpNewEngine::PruneWritePartitions+UseSink >> KqpNewEngine::OnlineRO_Inconsistent [GOOD] >> KqpNewEngine::Nondeterministic >> KqpNotNullColumns::UpdateNotNullPg [GOOD] >> KqpNotNullColumns::UpdateOnNotNull >> KqpSnapshotRead::TestReadOnly-withSink [GOOD] >> KqpSnapshotRead::TestSnapshotExpiration+withSink >> KqpNotNullColumns::SelectNotNullColumns [GOOD] >> KqpNotNullColumns::SecondaryKeyWithNotNullColumn >> KqpSqlIn::KeySuffix_OnlyTail [GOOD] >> KqpSqlIn::KeySuffix_NotPointPrefix >> KqpProxy::PingNotExistedSession [GOOD] >> ScriptExecutionsTest::AttemptToUpdateDeletedLease >> TableCreation::SimpleTableCreation [GOOD] >> TableCreation::SimpleUpdateTable >> TableCreation::MultipleTablesCreation [GOOD] >> TableCreation::CreateOldTable >> KqpKv::ReadRows_NotFullPK [GOOD] >> KqpKv::ReadRows_SpecificReturnValue >> IndexBuildTest::RejectsCancel [GOOD] >> ScriptExecutionsTest::RunCheckLeaseStatus [GOOD] >> ScriptExecutionsTest::UpdatesLeaseAfterExpiring >> KqpNewEngine::Join [GOOD] >> KqpNewEngine::JoinIdxLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::RejectsCancel [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:33:31.211358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:33:31.211454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:31.211489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:33:31.211517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:33:31.211563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:33:31.211607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:33:31.211671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:31.211789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:33:31.212153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:33:31.286364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:33:31.286431Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:31.291585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:33:31.291692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:33:31.291872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:33:31.297695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:33:31.297931Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:33:31.298581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:31.298817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:33:31.300932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:31.302203Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:31.302269Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:31.302445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:33:31.302494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:31.302535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:33:31.302700Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:33:31.310292Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:33:31.459788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:33:31.460079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:31.460284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:33:31.460503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:33:31.460562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:31.463024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:31.463205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:33:31.463434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:31.463513Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:33:31.463550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:33:31.463588Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:33:31.465648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:31.465703Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:33:31.465741Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:33:31.467584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:31.467630Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:31.467687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:31.467736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:33:31.472112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:33:31.474448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:33:31.474655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:33:31.475833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:31.476019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:31.476096Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:31.476385Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:33:31.476448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:31.476628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:33:31.476763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:33:31.479184Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:31.479255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:31.479458Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:31.479507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:33:31.479760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:31.479810Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:33:31.479902Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:31.479935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:31.479974Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:31.480003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:31.480040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:33:31.480105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:31.480151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:33:31.480184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:33:31.480252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:33:31.480294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:33:31.480327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:33:31.482711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:31.482859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:31.482907Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:1162:3017], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 1818, read rows: 101, read bytes: 1818 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:34:05.479371Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-03-31T16:34:05.480912Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-03-31T16:34:05.480984Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:1162:3017], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 1818, read rows: 101, read bytes: 1818 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-31T16:34:05.481026Z node 2 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-03-31T16:34:05.481215Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:34:05.481266Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:1258:3102] TestWaitNotification: OK eventTxId 102 2025-03-31T16:34:05.483833Z node 2 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CANCEL_INDEX_BUILD: DoExecute TxId: 105 DatabaseName: "/MyRoot" IndexBuildId: 102 2025-03-31T16:34:05.483989Z node 2 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CANCEL_INDEX_BUILD: Reply TxId: 105 Status: PRECONDITION_FAILED Issues { message: "Index build process with id <102> has been finished already" severity: 1 } BUILDINDEX RESPONSE CANCEL: NKikimrIndexBuilder.TEvCancelResponse TxId: 105 Status: PRECONDITION_FAILED Issues { message: "Index build process with id <102> has been finished already" severity: 1 } 2025-03-31T16:34:05.486443Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2025-03-31T16:34:05.486675Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 State: STATE_DONE Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 } Progress: 100 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 State: STATE_DONE Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 } Progress: 100 } 2025-03-31T16:34:05.489239Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:34:05.489504Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 315us result status StatusSuccess 2025-03-31T16:34:05.489948Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "index1" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "index" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 11 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:34:05.492474Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/index1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:34:05.492784Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/index1" took 329us result status StatusSuccess 2025-03-31T16:34:05.493568Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/index1" PathDescription { Self { Name: "index1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 11 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "index1" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "index" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "index" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpLocks::MixedTxFail+useSink [GOOD] >> KqpProxy::CreatesScriptExecutionsTable [GOOD] >> KqpProxy::DatabasesCacheForServerless >> KqpNotNullColumns::UpdateOnNotNull [GOOD] >> KqpNotNullColumns::UpdateOnNotNullPg >> KqpNewEngine::PruneWritePartitions+UseSink [GOOD] >> KqpNewEngine::PruneWritePartitions-UseSink >> KqpSort::ReverseRangeOptimized [GOOD] >> KqpSort::ReverseRangeLimitOptimized |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> KqpNewEngine::SqlInFromCompact [GOOD] >> KqpNewEngine::SqlInAsScalar ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocks::MixedTxFail+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 15055, MsgBus: 24065 2025-03-31T16:33:48.450340Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012750440909631:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:48.450478Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001276/r3tmp/tmpFMX1iY/pdisk_1.dat 2025-03-31T16:33:48.770827Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15055, node 1 2025-03-31T16:33:48.820890Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:48.820916Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:48.820921Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:48.821026Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:33:48.823256Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:48.823353Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:48.825038Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24065 TClient is connected to server localhost:24065 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:49.310973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:49.338895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:49.476072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:49.602031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:49.666004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:51.164002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012763325813309:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:51.164125Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:51.480567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:51.510059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:51.537667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:51.564583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:51.591535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:51.659684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:51.699924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012763325813824:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:51.700002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012763325813829:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:51.700006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:51.703756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:51.713796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012763325813831:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:33:51.789495Z node 1 :TX_PROXY ERROR: Actor# [1:7488012763325813885:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:52.805704Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=5; 2025-03-31T16:33:52.805898Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 5 at tablet 72075186224037914 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-03-31T16:33:52.806043Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 5 at tablet 72075186224037914 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-03-31T16:33:52.806249Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488012767620781569:2495], Table: `/Root/Test` ([72057594046644480:9:1]), SessionActorId: [1:7488012767620781465:2495]Got LOCKS BROKEN for table `/Root/Test`. ShardID=72075186224037914, Sink=[1:7488012767620781569:2495].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-03-31T16:33:52.806661Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488012767620781555:2495], SessionActorId: [1:7488012767620781465:2495], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7488012767620781465:2495]. isRollback=0 2025-03-31T16:33:52.806910Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDQxYzQ3OS00MjY5M2NjNy1lNjE5MTk5YS1kMmFjNjFlZA==, ActorId: [1:7488012767620781465:2495], ActorState: ExecuteState, TraceId: 01jqpgts350gykwtxcd2dw5p7n, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7488012767620781556:2495] from: [1:7488012767620781555:2495] 2025-03-31T16:33:52.806987Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7488012767620781556:2495] TxId: 281474976710672. Ctx: { TraceId: 01jqpgts350gykwtxcd2dw5p7n, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDQxYzQ3OS00MjY5M2NjNy1lNjE5MTk5YS1kMmFjNjFlZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-03-31T16:33:52.808516Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDQxYzQ3OS00MjY5M2NjNy1lNjE5MTk5YS1kMmFjNjFlZA==, ActorId: [1:7488012767620781465:2495], ActorState: ExecuteState, TraceId: 01jqpgts350gykwtxcd2dw5p7n, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 Trying to start YDB, gRPC: 4777, MsgBus: 11729 2025-03-31T16:33:53.602232Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012772084416704:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:53.602341Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001276/r3tmp/tmpXq6U4i/pdisk_1.dat 2025-03-31T16:33:53.716069Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4777, node 2 2025-03-31T16:33:53.753714Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:53.753799Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:53.755552Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:53.791344Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:53.791386Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:53.791396Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:53.791523Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11729 TClient is connec ... 1;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:34:05.464262Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:34:05.470858Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037940;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:34:05.471167Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037916;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:34:05.476436Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:34:05.477738Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:34:05.482051Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:34:05.484473Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:34:05.488425Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:34:05.491301Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037942;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:34:05.494327Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:34:05.498174Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:34:05.499412Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037943;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:34:05.502949Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:34:05.506201Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037941;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:34:05.508289Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:34:05.511526Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037920;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:34:05.514718Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:34:05.517034Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:34:05.519208Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:34:05.523430Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:34:05.525461Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037918;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:34:05.529634Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:34:05.530880Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037946;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:34:05.535809Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037951;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:34:05.535842Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:34:05.541444Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:34:05.542249Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037950;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:34:05.548463Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:34:05.548613Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:34:05.556501Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037944;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:34:05.823770Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715664; 2025-03-31T16:34:05.824016Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715664; 2025-03-31T16:34:05.824288Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715664; 2025-03-31T16:34:05.824396Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;self_id=[3:7488012814108925662:2410];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037899;local_tx_no=13;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903,72075186224037907;receive=72075186224037947; 2025-03-31T16:34:05.824457Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;self_id=[3:7488012814108925662:2410];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037899;local_tx_no=14;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903,72075186224037907;receive=72075186224037947; 2025-03-31T16:34:05.824568Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;self_id=[3:7488012814108925662:2410];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037899;local_tx_no=16;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903;receive=72075186224037907; 2025-03-31T16:34:05.824616Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;self_id=[3:7488012814108925662:2410];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037899;local_tx_no=17;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903;receive=72075186224037907; 2025-03-31T16:34:05.827037Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715664; 2025-03-31T16:34:06.151996Z node 3 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976715668; 2025-03-31T16:34:06.153188Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7488012826993829567:2817], SessionActorId: [3:7488012822698862213:2817], Got LOCKS BROKEN for table. ShardID=72075186224037888, Sink=[3:7488012826993829567:2817].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-03-31T16:34:06.153367Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7488012826993829567:2817], SessionActorId: [3:7488012822698862213:2817], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/DataShard`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[3:7488012822698862213:2817]. isRollback=0 2025-03-31T16:34:06.153558Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTVhNWRlZWMtN2Q0MDVlY2YtNDQzNzMxOWUtM2U2YTU5ODU=, ActorId: [3:7488012822698862213:2817], ActorState: ExecuteState, TraceId: 01jqpgv63he1h4n0ez10qq9yt7, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:7488012826993829595:2817] from: [3:7488012826993829567:2817] 2025-03-31T16:34:06.153635Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7488012826993829595:2817] TxId: 281474976715668. Ctx: { TraceId: 01jqpgv63he1h4n0ez10qq9yt7, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTVhNWRlZWMtN2Q0MDVlY2YtNDQzNzMxOWUtM2U2YTU5ODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/DataShard`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-03-31T16:34:06.153814Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTVhNWRlZWMtN2Q0MDVlY2YtNDQzNzMxOWUtM2U2YTU5ODU=, ActorId: [3:7488012822698862213:2817], ActorState: ExecuteState, TraceId: 01jqpgv63he1h4n0ez10qq9yt7, Create QueryResponse for error on request, msg: 2025-03-31T16:34:06.154027Z node 3 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037936;self_id=[3:7488012814108925344:2358];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037936;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:34:06.156396Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=281474976715668;tx_id=281474976715668;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715668; 2025-03-31T16:34:06.161147Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037936;tx_state=TTxProgressTx::Complete;fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=lock invalidated;tx_id=281474976715668; >> KqpNotNullColumns::SecondaryKeyWithNotNullColumn [GOOD] >> KqpNotNullColumns::SecondaryIndexWithNotNullDataColumn >> GenericFederatedQuery::ClickHouseSelectCount [GOOD] >> TableCreation::SimpleUpdateTable [GOOD] >> TableCreation::CreateOldTable [GOOD] >> KqpNewEngine::Nondeterministic [GOOD] >> KqpNewEngine::OrderedScalarContext >> KqpKv::ReadRows_SpecificReturnValue [GOOD] >> KqpKv::ReadRows_PgValue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::SimpleUpdateTable [GOOD] Test command err: 2025-03-31T16:33:59.198050Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012797895392295:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:59.198192Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000c85/r3tmp/tmpsz7ASa/pdisk_1.dat 2025-03-31T16:33:59.552154Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:59.615372Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:59.615575Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:59.620912Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20072 TServer::EnableGrpc on GrpcPort 7171, node 1 2025-03-31T16:33:59.818548Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:59.818583Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:59.818592Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:59.818712Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:34:00.144385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:02.073700Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-31T16:34:02.078084Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-31T16:34:02.079320Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-03-31T16:34:02.079358Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-03-31T16:34:02.079387Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-31T16:34:02.079426Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-31T16:34:02.079489Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:02.079526Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:02.079775Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:02.079806Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:02.082994Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-03-31T16:34:02.083016Z node 1 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-03-31T16:34:02.083059Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-03-31T16:34:02.083070Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-03-31T16:34:02.083078Z node 1 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-03-31T16:34:02.083148Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-03-31T16:34:02.083216Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-03-31T16:34:02.083224Z node 1 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-03-31T16:34:02.083517Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-03-31T16:34:02.097019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:1, at schemeshard: 72057594046644480 2025-03-31T16:34:02.098571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:34:02.100268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:34:02.106524Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2025-03-31T16:34:02.106524Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2025-03-31T16:34:02.106585Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Subscribe on create table tx: 281474976710660 2025-03-31T16:34:02.106586Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Subscribe on create table tx: 281474976710658 2025-03-31T16:34:02.106670Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2025-03-31T16:34:02.106690Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Subscribe on create table tx: 281474976710659 2025-03-31T16:34:02.231908Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976710658. Doublechecking... 2025-03-31T16:34:02.269162Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976710660. Doublechecking... 2025-03-31T16:34:02.272099Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976710659. Doublechecking... 2025-03-31T16:34:02.295430Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Column diff is empty, finishing 2025-03-31T16:34:02.318483Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Column diff is empty, finishing 2025-03-31T16:34:02.324410Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Column diff is empty, finishing 2025-03-31T16:34:02.324857Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: fb1b7575-f58052f1-4ff3e816-712112a5, Bootstrap. Database: /dc-1 2025-03-31T16:34:02.342335Z node 1 :KQP_PROXY DEBUG: Request has 18445000634867.209318s seconds to be completed 2025-03-31T16:34:02.345750Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=Y2NkZmY4OGUtZTE0MzU1YjItMjU5ZTYxYzUtYTRjYzRlZmI=, workerId: [1:7488012810780295064:2333], database: /dc-1, longSession: 1, local sessions count: 1 2025-03-31T16:34:02.345913Z node 1 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-31T16:34:02.346751Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: fb1b7575-f58052f1-4ff3e816-712112a5, RunDataQuery: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $execution_meta_ttl AS Interval; UPSERT INTO `.metadata/script_executions` (database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at) VALUES ($database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl); UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2025-03-31T16:34:02.347189Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=Y2NkZmY4OGUtZTE0MzU1YjItMjU5ZTYxYzUtYTRjYzRlZmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7488012810780295064:2333] 2025-03-31T16:34:02.347228Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7488012810780295066:2465] 2025-03-31T16:34:02.348976Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012810780295067:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:02.348976Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012810780295079:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:02.349059Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:02.356531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:2, at schemeshard: 72057594046644480 2025-03-31T16:34:02.365609Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012810780295081:2339], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-31T16:34:02.438457Z node 1 :TX_PROXY ERROR: Actor# [1:7488012810780295123:2497] txid# 281474976710662, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges) ... :2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:07.966210Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:07.966493Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012832917250381:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:07.969341Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:2, at schemeshard: 72057594046644480 2025-03-31T16:34:07.976772Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012832917250383:2339], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-03-31T16:34:08.052937Z node 2 :TX_PROXY ERROR: Actor# [2:7488012837212217721:2497] txid# 281474976715662, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:08.225124Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [2:7488012832917250366:2334], selfId: [2:7488012820032347794:2264], source: [2:7488012832917250365:2333] 2025-03-31T16:34:08.225328Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 5e99082e-563316b3-aa023a6b-4927cef7, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MmI3MGY2YjktZTRiYTIwYTAtZTk5N2ViOTYtZDkzOTk4Nzc=, TxId: 2025-03-31T16:34:08.225354Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 5e99082e-563316b3-aa023a6b-4927cef7, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MmI3MGY2YjktZTRiYTIwYTAtZTk5N2ViOTYtZDkzOTk4Nzc=, TxId: 2025-03-31T16:34:08.225365Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] Create script execution operation. ExecutionId: 5e99082e-563316b3-aa023a6b-4927cef7. Result: SUCCESS. Issues: 2025-03-31T16:34:08.227228Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=NTY1ZjljN2MtNGVmNjMyYzAtYzY0Zjg1NTItZDk4ZDk2MGE=, workerId: [2:7488012837212217780:2351], database: dc-1, longSession: 1, local sessions count: 2 2025-03-31T16:34:08.227365Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-31T16:34:08.227670Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=NTY1ZjljN2MtNGVmNjMyYzAtYzY0Zjg1NTItZDk4ZDk2MGE=, CurrentExecutionId: 5e99082e-563316b3-aa023a6b-4927cef7, CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 604800.000000s timeout: 604800.000000s cancelAfter: 0.000000s. Send request to target, requestId: 5, targetId: [2:7488012837212217780:2351] 2025-03-31T16:34:08.227704Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 5 timeout: 604800.000000s actor id: [2:7488012837212217782:2530] 2025-03-31T16:34:08.227747Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=MmI3MGY2YjktZTRiYTIwYTAtZTk5N2ViOTYtZDkzOTk4Nzc=, workerId: [2:7488012832917250365:2333], local sessions count: 1 2025-03-31T16:34:08.238132Z node 2 :KQP_PROXY DEBUG: TraceId: "01jqpgv87dfw5fj41ta39f32k8", Request has 18445000634861.313508s seconds to be completed 2025-03-31T16:34:08.239720Z node 2 :KQP_PROXY DEBUG: TraceId: "01jqpgv87dfw5fj41ta39f32k8", Created new session, sessionId: ydb://session/3?node_id=2&id=MzEzOWU5YjctZTI5MjMyZjItODU3YTA1MmUtZTNkOTY0Zg==, workerId: [2:7488012837212217796:2361], database: /dc-1, longSession: 1, local sessions count: 2 2025-03-31T16:34:08.239862Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 01jqpgv87dfw5fj41ta39f32k8 2025-03-31T16:34:08.244102Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Describe result: PathErrorUnknown 2025-03-31T16:34:08.244126Z node 2 :KQP_PROXY NOTICE: Table test_table updater. Creating table 2025-03-31T16:34:08.244157Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Full table path:/dc-1/.test/test_table 2025-03-31T16:34:08.246507Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:1, at schemeshard: 72057594046644480 2025-03-31T16:34:08.247423Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715664 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 10 } 2025-03-31T16:34:08.247452Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Subscribe on create table tx: 281474976715664 2025-03-31T16:34:08.273753Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Request: create. Transaction completed: 281474976715664. Doublechecking... 2025-03-31T16:34:08.291929Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 5e99082e-563316b3-aa023a6b-4927cef7, Bootstrap. Database: /dc-1 2025-03-31T16:34:08.292649Z node 2 :KQP_PROXY DEBUG: Request has 18445000634861.258982s seconds to be completed 2025-03-31T16:34:08.294275Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=OTM0NmZiMmQtNjBmOWRiODUtYmEyN2M4ODUtYmEyNGY4YzA=, workerId: [2:7488012837212217886:2366], database: /dc-1, longSession: 1, local sessions count: 3 2025-03-31T16:34:08.294398Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-31T16:34:08.294465Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 5, sender: [2:7488012832917250362:2462], selfId: [2:7488012820032347794:2264], source: [2:7488012837212217780:2351] 2025-03-31T16:34:08.294734Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 5e99082e-563316b3-aa023a6b-4927cef7, RunDataQuery: -- TSaveScriptExecutionResultMetaQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_metas AS JsonDocument; UPDATE `.metadata/script_executions` SET result_set_metas = $result_set_metas WHERE database = $database AND execution_id = $execution_id; 2025-03-31T16:34:08.295076Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=OTM0NmZiMmQtNjBmOWRiODUtYmEyN2M4ODUtYmEyNGY4YzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 8, targetId: [2:7488012837212217886:2366] 2025-03-31T16:34:08.295140Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 8 timeout: 300.000000s actor id: [2:7488012837212217888:2593] 2025-03-31T16:34:08.345748Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-03-31T16:34:08.346421Z node 2 :KQP_PROXY NOTICE: Table test_table updater. Adding columns. New columns: col4, col5. Existing columns: col1, col2, col3 2025-03-31T16:34:08.346496Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Full table path:/dc-1/.test/test_table 2025-03-31T16:34:08.348111Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:08.349282Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715666 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 } 2025-03-31T16:34:08.349299Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Subscribe on create table tx: 281474976715666 2025-03-31T16:34:08.359366Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Request: alter. Transaction completed: 281474976715666. Doublechecking... 2025-03-31T16:34:08.430703Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-03-31T16:34:08.453952Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 8, sender: [2:7488012837212217887:2367], selfId: [2:7488012820032347794:2264], source: [2:7488012837212217886:2366] 2025-03-31T16:34:08.454128Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 5e99082e-563316b3-aa023a6b-4927cef7, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OTM0NmZiMmQtNjBmOWRiODUtYmEyN2M4ODUtYmEyNGY4YzA=, TxId: 2025-03-31T16:34:08.454201Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 5e99082e-563316b3-aa023a6b-4927cef7, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTM0NmZiMmQtNjBmOWRiODUtYmEyN2M4ODUtYmEyNGY4YzA=, TxId: 2025-03-31T16:34:08.454446Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 5e99082e-563316b3-aa023a6b-4927cef7, start saving rows range [0; 1) 2025-03-31T16:34:08.454520Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=OTM0NmZiMmQtNjBmOWRiODUtYmEyN2M4ODUtYmEyNGY4YzA=, workerId: [2:7488012837212217886:2366], local sessions count: 2 2025-03-31T16:34:08.454555Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 5e99082e-563316b3-aa023a6b-4927cef7, Bootstrap. Database: /dc-1 2025-03-31T16:34:08.454690Z node 2 :KQP_PROXY DEBUG: Request has 18445000634861.096936s seconds to be completed 2025-03-31T16:34:08.456573Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=MzA4NzcyYmYtZDc4ODdlZTAtNTNjMTk3YTgtYjliMzU2YjA=, workerId: [2:7488012837212217952:2377], database: /dc-1, longSession: 1, local sessions count: 3 2025-03-31T16:34:08.456693Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-31T16:34:08.457081Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 5e99082e-563316b3-aa023a6b-4927cef7, RunDataQuery: -- TSaveScriptExecutionResultQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_id AS Int32; DECLARE $expire_at AS Optional; DECLARE $items AS List>; UPSERT INTO `.metadata/result_sets` SELECT $database as database, $execution_id as execution_id, $result_set_id as result_set_id, T.row_id as row_id, $expire_at as expire_at, T.result_set as result_set, T.accumulated_size as accumulated_size FROM AS_TABLE($items) AS T; 2025-03-31T16:34:08.457510Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=MzA4NzcyYmYtZDc4ODdlZTAtNTNjMTk3YTgtYjliMzU2YjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 10, targetId: [2:7488012837212217952:2377] 2025-03-31T16:34:08.457550Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 10 timeout: 300.000000s actor id: [2:7488012837212217954:2637] 2025-03-31T16:34:08.460673Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=MzEzOWU5YjctZTI5MjMyZjItODU3YTA1MmUtZTNkOTY0Zg==, workerId: [2:7488012837212217796:2361], local sessions count: 2 >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanWithRetry [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanManyTables ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::CreateOldTable [GOOD] Test command err: 2025-03-31T16:33:58.909974Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012792158281811:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:58.910107Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000c96/r3tmp/tmp2O7KD9/pdisk_1.dat 2025-03-31T16:33:59.356834Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:59.384112Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:59.386529Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:59.395485Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6315 TServer::EnableGrpc on GrpcPort 26094, node 1 2025-03-31T16:33:59.791315Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:59.791344Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:59.791355Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:59.791495Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:34:00.136599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:01.966488Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-31T16:34:01.971822Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-31T16:34:01.983677Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-03-31T16:34:01.983722Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-03-31T16:34:01.983774Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-31T16:34:01.983842Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-31T16:34:01.983928Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:01.984003Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:01.988386Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-03-31T16:34:01.988399Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-03-31T16:34:01.988411Z node 1 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-03-31T16:34:01.988419Z node 1 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-03-31T16:34:01.988448Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-03-31T16:34:01.988586Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-03-31T16:34:01.988594Z node 1 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-03-31T16:34:01.988608Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-03-31T16:34:01.989160Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:01.989251Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:01.990286Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-03-31T16:34:01.997061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:1, at schemeshard: 72057594046644480 2025-03-31T16:34:02.001560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:34:02.003240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:34:02.025943Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2025-03-31T16:34:02.026012Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Subscribe on create table tx: 281474976710658 2025-03-31T16:34:02.026146Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2025-03-31T16:34:02.026163Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Subscribe on create table tx: 281474976710660 2025-03-31T16:34:02.031204Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2025-03-31T16:34:02.031237Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Subscribe on create table tx: 281474976710659 2025-03-31T16:34:02.155049Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976710658. Doublechecking... 2025-03-31T16:34:02.189276Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976710660. Doublechecking... 2025-03-31T16:34:02.202109Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976710659. Doublechecking... 2025-03-31T16:34:02.228680Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Column diff is empty, finishing 2025-03-31T16:34:02.253333Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Column diff is empty, finishing 2025-03-31T16:34:02.262510Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Column diff is empty, finishing 2025-03-31T16:34:02.265304Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: d773e164-dfaae1d3-f12bfd02-7510dc51, Bootstrap. Database: /dc-1 2025-03-31T16:34:02.299382Z node 1 :KQP_PROXY DEBUG: Request has 18445000634867.252269s seconds to be completed 2025-03-31T16:34:02.334855Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=Y2M5MDdlOGQtN2YyNDk5ZTctMTZiZDMzNjAtM2NiZTg3Zjg=, workerId: [1:7488012809338151876:2333], database: /dc-1, longSession: 1, local sessions count: 1 2025-03-31T16:34:02.335218Z node 1 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-31T16:34:02.341110Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: d773e164-dfaae1d3-f12bfd02-7510dc51, RunDataQuery: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $execution_meta_ttl AS Interval; UPSERT INTO `.metadata/script_executions` (database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at) VALUES ($database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl); UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2025-03-31T16:34:02.345697Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=Y2M5MDdlOGQtN2YyNDk5ZTctMTZiZDMzNjAtM2NiZTg3Zjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7488012809338151876:2333] 2025-03-31T16:34:02.345753Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7488012809338151879:2469] 2025-03-31T16:34:02.348295Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012809338151878:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:02.348392Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:02.348574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012809338151891:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:02.355213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:2, at schemeshard: 72057594046644480 2025-03-31T16:34:02.365750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012809338151893:2339], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-31T16:34:02.431458Z node 1 :TX_PROXY ERROR: Actor# [1:7488012809338151935:2501] txid# 281474976710662, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges) ... UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2025-03-31T16:34:07.960360Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=NDhkN2ZjY2EtN2MxNjIyY2MtY2M0YzA5NGUtZTFhNWJkYTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [2:7488012832836688587:2333] 2025-03-31T16:34:07.960400Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [2:7488012832836688589:2464] 2025-03-31T16:34:07.961857Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012832836688590:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:07.961858Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012832836688598:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:07.961928Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:07.964854Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:2, at schemeshard: 72057594046644480 2025-03-31T16:34:07.974286Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012832836688604:2339], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-03-31T16:34:08.067193Z node 2 :TX_PROXY ERROR: Actor# [2:7488012837131655943:2497] txid# 281474976715662, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:08.214152Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [2:7488012832836688588:2334], selfId: [2:7488012819951786035:2272], source: [2:7488012832836688587:2333] 2025-03-31T16:34:08.214427Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 96630dd1-eb926289-f97c8570-1d3dcc95, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NDhkN2ZjY2EtN2MxNjIyY2MtY2M0YzA5NGUtZTFhNWJkYTM=, TxId: 2025-03-31T16:34:08.214442Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 96630dd1-eb926289-f97c8570-1d3dcc95, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDhkN2ZjY2EtN2MxNjIyY2MtY2M0YzA5NGUtZTFhNWJkYTM=, TxId: 2025-03-31T16:34:08.214450Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] Create script execution operation. ExecutionId: 96630dd1-eb926289-f97c8570-1d3dcc95. Result: SUCCESS. Issues: 2025-03-31T16:34:08.216665Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=MjM2NGM0NWQtOWFlMTAyMmEtNDdlOTVmNDQtMWJmMzhkNjg=, workerId: [2:7488012837131656002:2351], database: dc-1, longSession: 1, local sessions count: 2 2025-03-31T16:34:08.216791Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-31T16:34:08.217002Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=MjM2NGM0NWQtOWFlMTAyMmEtNDdlOTVmNDQtMWJmMzhkNjg=, CurrentExecutionId: 96630dd1-eb926289-f97c8570-1d3dcc95, CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 604800.000000s timeout: 604800.000000s cancelAfter: 0.000000s. Send request to target, requestId: 5, targetId: [2:7488012837131656002:2351] 2025-03-31T16:34:08.217031Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 5 timeout: 604800.000000s actor id: [2:7488012837131656004:2530] 2025-03-31T16:34:08.217087Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=NDhkN2ZjY2EtN2MxNjIyY2MtY2M0YzA5NGUtZTFhNWJkYTM=, workerId: [2:7488012832836688587:2333], local sessions count: 1 2025-03-31T16:34:08.231290Z node 2 :KQP_PROXY DEBUG: TraceId: "01jqpgv8765jvwy52ajhpy13s5", Request has 18445000634861.320355s seconds to be completed 2025-03-31T16:34:08.233250Z node 2 :KQP_PROXY DEBUG: TraceId: "01jqpgv8765jvwy52ajhpy13s5", Created new session, sessionId: ydb://session/3?node_id=2&id=M2U0NzA2ZTgtNjczNWE0NWMtMzM3MzY1NTYtZWQ2NGZkZTA=, workerId: [2:7488012837131656018:2361], database: /dc-1, longSession: 1, local sessions count: 2 2025-03-31T16:34:08.233381Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 01jqpgv8765jvwy52ajhpy13s5 2025-03-31T16:34:08.235747Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Describe result: PathErrorUnknown 2025-03-31T16:34:08.235770Z node 2 :KQP_PROXY NOTICE: Table test_table updater. Creating table 2025-03-31T16:34:08.235803Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Full table path:/dc-1/.test/test_table 2025-03-31T16:34:08.238859Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:1, at schemeshard: 72057594046644480 2025-03-31T16:34:08.240072Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715664 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 10 } 2025-03-31T16:34:08.240107Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Subscribe on create table tx: 281474976715664 2025-03-31T16:34:08.279621Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Request: create. Transaction completed: 281474976715664. Doublechecking... 2025-03-31T16:34:08.291759Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 96630dd1-eb926289-f97c8570-1d3dcc95, Bootstrap. Database: /dc-1 2025-03-31T16:34:08.291935Z node 2 :KQP_PROXY DEBUG: Request has 18445000634861.259702s seconds to be completed 2025-03-31T16:34:08.293964Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=MWFkYjIzMDgtN2MwZjU2MDAtYjlkOTE5YjMtOTJhNzFmODY=, workerId: [2:7488012837131656109:2366], database: /dc-1, longSession: 1, local sessions count: 3 2025-03-31T16:34:08.294123Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-31T16:34:08.294217Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 5, sender: [2:7488012832836688584:2462], selfId: [2:7488012819951786035:2272], source: [2:7488012837131656002:2351] 2025-03-31T16:34:08.294401Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 96630dd1-eb926289-f97c8570-1d3dcc95, RunDataQuery: -- TSaveScriptExecutionResultMetaQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_metas AS JsonDocument; UPDATE `.metadata/script_executions` SET result_set_metas = $result_set_metas WHERE database = $database AND execution_id = $execution_id; 2025-03-31T16:34:08.294682Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=MWFkYjIzMDgtN2MwZjU2MDAtYjlkOTE5YjMtOTJhNzFmODY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 8, targetId: [2:7488012837131656109:2366] 2025-03-31T16:34:08.294716Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 8 timeout: 300.000000s actor id: [2:7488012837131656111:2593] 2025-03-31T16:34:08.348486Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-03-31T16:34:08.348915Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-03-31T16:34:08.367310Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=M2U0NzA2ZTgtNjczNWE0NWMtMzM3MzY1NTYtZWQ2NGZkZTA=, workerId: [2:7488012837131656018:2361], local sessions count: 2 2025-03-31T16:34:08.464581Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 8, sender: [2:7488012837131656110:2367], selfId: [2:7488012819951786035:2272], source: [2:7488012837131656109:2366] 2025-03-31T16:34:08.464799Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 96630dd1-eb926289-f97c8570-1d3dcc95, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MWFkYjIzMDgtN2MwZjU2MDAtYjlkOTE5YjMtOTJhNzFmODY=, TxId: 2025-03-31T16:34:08.464834Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 96630dd1-eb926289-f97c8570-1d3dcc95, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MWFkYjIzMDgtN2MwZjU2MDAtYjlkOTE5YjMtOTJhNzFmODY=, TxId: 2025-03-31T16:34:08.465018Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 96630dd1-eb926289-f97c8570-1d3dcc95, start saving rows range [0; 1) 2025-03-31T16:34:08.465114Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 96630dd1-eb926289-f97c8570-1d3dcc95, Bootstrap. Database: /dc-1 2025-03-31T16:34:08.465554Z node 2 :KQP_PROXY DEBUG: Request has 18445000634861.086079s seconds to be completed 2025-03-31T16:34:08.467449Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=YmJiYzI2ZTMtOWJjN2ZkZC1kMWIwYmMwZC1kNjU0NWMwOQ==, workerId: [2:7488012837131656152:2377], database: /dc-1, longSession: 1, local sessions count: 3 2025-03-31T16:34:08.467712Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-31T16:34:08.467793Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=MWFkYjIzMDgtN2MwZjU2MDAtYjlkOTE5YjMtOTJhNzFmODY=, workerId: [2:7488012837131656109:2366], local sessions count: 2 2025-03-31T16:34:08.468166Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 96630dd1-eb926289-f97c8570-1d3dcc95, RunDataQuery: -- TSaveScriptExecutionResultQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_id AS Int32; DECLARE $expire_at AS Optional; DECLARE $items AS List>; UPSERT INTO `.metadata/result_sets` SELECT $database as database, $execution_id as execution_id, $result_set_id as result_set_id, T.row_id as row_id, $expire_at as expire_at, T.result_set as result_set, T.accumulated_size as accumulated_size FROM AS_TABLE($items) AS T; 2025-03-31T16:34:08.468624Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=YmJiYzI2ZTMtOWJjN2ZkZC1kMWIwYmMwZC1kNjU0NWMwOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 10, targetId: [2:7488012837131656152:2377] 2025-03-31T16:34:08.468662Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 10 timeout: 300.000000s actor id: [2:7488012837131656154:2614] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::ClickHouseSelectCount [GOOD] Test command err: Trying to start YDB, gRPC: 1091, MsgBus: 7153 2025-03-31T16:33:47.566614Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012746657362582:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:47.566752Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ef6/r3tmp/tmpnjlweM/pdisk_1.dat 2025-03-31T16:33:47.841930Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1091, node 1 2025-03-31T16:33:47.919678Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:47.919701Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:47.919708Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:47.919805Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:33:47.923522Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:47.923767Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:47.925954Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7153 TClient is connected to server localhost:7153 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:48.374934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:50.104604Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012759542265136:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:50.104734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:50.318353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-03-31T16:33:50.427175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012759542265257:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:50.427264Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:50.427353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012759542265262:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:50.430089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-03-31T16:33:50.437172Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012759542265264:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:33:50.501866Z node 1 :TX_PROXY ERROR: Actor# [1:7488012759542265304:2397] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:50.983877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:51.394277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-03-31T16:33:51.836166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:33:52.192384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:33:52.526213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T16:33:52.566683Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012746657362582:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:52.566741Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:33:52.946431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-03-31T16:33:52.980648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-03-31T16:33:55.772856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710712:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 Trying to start YDB, gRPC: 15330, MsgBus: 24906 2025-03-31T16:33:56.772025Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012783623924702:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:56.772115Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ef6/r3tmp/tmpyuKij1/pdisk_1.dat 2025-03-31T16:33:56.884491Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:56.907058Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:56.907158Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 15330, node 2 2025-03-31T16:33:56.910279Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:56.939759Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:56.939782Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:56.939789Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:56.939902Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24906 TClient is connected to server localhost:24906 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:57.341979Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:59.807294Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012796508827249:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:59.807425Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:59.830306Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-03-31T16:33:59.881266Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012796508827367:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:59.881361Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012796508827372:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:59.881369Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:59.885919Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72057594046644480 2025-03-31T16:33:59.897493Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012796508827374:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:33:59.975233Z node 2 :TX_PROXY ERROR: Actor# [2:7488012796508827415:2395] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:00.492518Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:01.038016Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:1, at schemeshard: 72057594046644480 2025-03-31T16:34:01.602502Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:34:01.775273Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488012783623924702:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:01.775421Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:34:02.192676Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-03-31T16:34:02.699419Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-03-31T16:34:03.282994Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-03-31T16:34:03.329516Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-03-31T16:34:06.717213Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715719:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 >> KqpTx::ExplicitTcl >> YdbTableSplit::SplitByLoadWithDeletes >> Compression::WriteGZIP [GOOD] >> Compression::WriteZSTD |96.5%| [TA] $(B)/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.5%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage [GOOD] >> BasicUsage::TWriteSession_AutoBatching [GOOD] >> BasicUsage::TWriteSession_BatchingProducesContinueTokens [GOOD] >> BasicUsage::BrokenCredentialsProvider >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression >> KqpSqlIn::KeySuffix_NotPointPrefix [GOOD] >> KqpSqlIn::KeyTypeMissmatch_Int >> ScriptExecutionsTest::AttemptToUpdateDeletedLease [GOOD] >> KqpProxy::PassErrroViaSessionActor >> TableCreation::ConcurrentTableCreationWithDifferentVersions >> YdbTableSplit::RenameTablesAndSplit >> ScriptExecutionsTest::UpdatesLeaseAfterExpiring [GOOD] >> KqpNewEngine::JoinIdxLookup [GOOD] >> KqpNewEngine::ItemsLimit >> IndexBuildTest::CancellationNotEnoughRetries [GOOD] >> IndexBuildTest::CancellationNoTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> ScriptExecutionsTest::AttemptToUpdateDeletedLease [GOOD] Test command err: 2025-03-31T16:33:59.862322Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012797438680588:2225];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:59.864591Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000c7a/r3tmp/tmpadX0Nd/pdisk_1.dat 2025-03-31T16:34:00.324074Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:00.356805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:00.356935Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:00.362273Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22442, node 1 2025-03-31T16:34:00.505304Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:00.505332Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:00.505361Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:00.505507Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25158 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:00.843598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:03.099952Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-31T16:34:03.101184Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2025-03-31T16:34:03.102774Z node 1 :KQP_PROXY DEBUG: Received ping session request, request_id: 2, sender: [1:7488012801733648658:2318], trace_id: 01jqpgv13p5aa34t4jb6h69s2t 2025-03-31T16:34:03.102984Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 2 timeout: 5.000000s actor id: [0:0:0] 2025-03-31T16:34:03.103035Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:03.103071Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:03.103167Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-03-31T16:34:03.103188Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-03-31T16:34:03.103214Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-31T16:34:03.103424Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:03.103475Z node 1 :KQP_PROXY DEBUG: Session not found, targetId: [2:8678280833929343339:121] requestId: 2 2025-03-31T16:34:03.105765Z node 1 :KQP_PROXY DEBUG: TraceId: "01jqpgv13p5aa34t4jb6h69s2t", Forwarded response to sender actor, requestId: 2, sender: [1:7488012801733648658:2318], selfId: [1:7488012797438680661:2281], source: [1:7488012797438680661:2281] 2025-03-31T16:34:04.503372Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488012821404265026:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:04.503490Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000c7a/r3tmp/tmpJCpUIg/pdisk_1.dat 2025-03-31T16:34:04.666014Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:04.697311Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:04.697424Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:04.698927Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10519 TServer::EnableGrpc on GrpcPort 9253, node 4 2025-03-31T16:34:04.953324Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:04.953353Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:04.953361Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:04.953489Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:34:05.026618Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:07.631765Z node 4 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-31T16:34:07.632774Z node 4 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-31T16:34:07.639684Z node 4 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-03-31T16:34:07.639725Z node 4 :KQP_PROXY DEBUG: Updated table service config. 2025-03-31T16:34:07.639746Z node 4 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-31T16:34:07.639791Z node 4 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-31T16:34:07.639849Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:07.639895Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:07.647970Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:07.648035Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:07.650562Z node 4 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-03-31T16:34:07.650580Z node 4 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-03-31T16:34:07.650618Z node 4 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-03-31T16:34:07.650923Z node 4 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-03-31T16:34:07.650928Z node 4 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-03-31T16:34:07.650944Z node 4 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-03-31T16:34:07.654843Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480 2025-03-31T16:34:07.656229Z node 4 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-03-31T16:34:07.656244Z node 4 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-03-31T16:34:07.656270Z node 4 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-03-31T16:34:07.665146Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:34:07.672733Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:34:07.678083Z node 4 :KQP_PROXY DEBUG: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2025-03-31T16:34:07.678127Z node 4 :KQP_PROXY DEBUG: Table result_sets updater. Subscribe on create table tx: 281474976715659 2025-03-31T16:34:07.678193Z node 4 :KQP_PROXY DEBUG: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2025-03-31T16:34:07.678202Z node 4 :KQP_PROXY DEBUG: Table script_execution_leases updater. Subscribe on create table tx: 281474976715658 2025-03-31T16:34:07.679435Z node 4 :KQP_PROXY DEBUG: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2025-03-31T16:34:07.679474Z node 4 :KQP_PROXY DEBUG: Table script_executions updater. Subscribe on create table tx: 281474976715660 2025-03-31T16:34:07.796776Z node 4 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976715659. Doublechecking... 2025-03-31T16:34:07.825155Z node 4 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976715660. Doublechecking... 2025-03-31T16:34:07.871727Z node 4 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976715658. Doublechecking... 2025-03-31T16:34:07.889305Z node 4 :KQP_PROXY DEBUG: Table script_executions updater. Column diff is empty, finishing 2025-03-31T16:34:07.894822Z node 4 ... ode 4 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-31T16:34:10.761153Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 4308c172-7f2b4175-d945851-f1efc912, RunDataQuery: -- TSaveScriptFinalStatusActor::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT operation_status, finalization_status, meta, customer_supplied_id, user_token, script_sinks, script_secret_names FROM `.metadata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_generation FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-03-31T16:34:10.761462Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=ZmI5YjYyODktOWE5ZTllMmYtYTFlMzZmNjItMTljMGVjNjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 20, targetId: [4:7488012847174070107:2446] 2025-03-31T16:34:10.761500Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 20 timeout: 300.000000s actor id: [4:7488012847174070109:2666] 2025-03-31T16:34:10.769761Z node 4 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 20, sender: [4:7488012847174070108:2447], selfId: [4:7488012821404265237:2274], source: [4:7488012847174070107:2446] 2025-03-31T16:34:10.770315Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 4308c172-7f2b4175-d945851-f1efc912, State: Get operation info, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=ZmI5YjYyODktOWE5ZTllMmYtYTFlMzZmNjItMTljMGVjNjg=, TxId: 01jqpgvap975p9gqdkfx5amykz 2025-03-31T16:34:10.770835Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 4308c172-7f2b4175-d945851-f1efc912, State: Get operation info, RunDataQuery: -- TSaveScriptFinalStatusActor::FinishScriptExecution DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $operation_status AS Int32; DECLARE $execution_status AS Int32; DECLARE $finalization_status AS Int32; DECLARE $issues AS JsonDocument; DECLARE $plan AS JsonDocument; DECLARE $stats AS JsonDocument; DECLARE $ast AS Optional; DECLARE $ast_compressed AS Optional; DECLARE $ast_compression_method AS Optional; DECLARE $operation_ttl AS Interval; DECLARE $customer_supplied_id AS Text; DECLARE $user_token AS Text; DECLARE $script_sinks AS Optional; DECLARE $script_secret_names AS Optional; DECLARE $applicate_script_external_effect_required AS Bool; UPDATE `.metadata/script_executions` SET operation_status = $operation_status, execution_status = $execution_status, finalization_status = IF($applicate_script_external_effect_required, $finalization_status, NULL), issues = $issues, plan = $plan, end_ts = CurrentUtcTimestamp(), stats = $stats, ast = $ast, ast_compressed = $ast_compressed, ast_compression_method = $ast_compression_method, expire_at = IF($operation_ttl > CAST(0 AS Interval), CurrentUtcTimestamp() + $operation_ttl, NULL), customer_supplied_id = IF($applicate_script_external_effect_required, $customer_supplied_id, NULL), user_token = IF($applicate_script_external_effect_required, $user_token, NULL), script_sinks = IF($applicate_script_external_effect_required, $script_sinks, NULL), script_secret_names = IF($applicate_script_external_effect_required, $script_secret_names, NULL) WHERE database = $database AND execution_id = $execution_id; DELETE FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id; 2025-03-31T16:34:10.771391Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=ZmI5YjYyODktOWE5ZTllMmYtYTFlMzZmNjItMTljMGVjNjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 21, targetId: [4:7488012847174070107:2446] 2025-03-31T16:34:10.771430Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 21 timeout: 300.000000s actor id: [4:7488012847174070130:2671] 2025-03-31T16:34:10.785987Z node 4 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 21, sender: [4:7488012847174070129:2453], selfId: [4:7488012821404265237:2274], source: [4:7488012847174070107:2446] 2025-03-31T16:34:10.786404Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 4308c172-7f2b4175-d945851-f1efc912, State: Update final status, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=ZmI5YjYyODktOWE5ZTllMmYtYTFlMzZmNjItMTljMGVjNjg=, TxId: 2025-03-31T16:34:10.786490Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 4308c172-7f2b4175-d945851-f1efc912, State: Update final status, Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=ZmI5YjYyODktOWE5ZTllMmYtYTFlMzZmNjItMTljMGVjNjg=, TxId: 2025-03-31T16:34:10.786605Z node 4 :KQP_PROXY DEBUG: [ScriptExecutions] Finish script execution operation. ExecutionId: 4308c172-7f2b4175-d945851-f1efc912. UNAVAILABLE. Issues: {
: Error: Lease expired } 2025-03-31T16:34:10.786969Z node 4 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=4&id=ZmI5YjYyODktOWE5ZTllMmYtYTFlMzZmNjItMTljMGVjNjg=, workerId: [4:7488012847174070107:2446], local sessions count: 1 2025-03-31T16:34:10.788898Z node 4 :KQP_PROXY DEBUG: [ScriptExecutions] [TCheckLeaseStatusActor] ExecutionId: 4308c172-7f2b4175-d945851-f1efc912, successfully finalized script execution operation 2025-03-31T16:34:10.788931Z node 4 :KQP_PROXY DEBUG: [ScriptExecutions] [TCheckLeaseStatusActor] ExecutionId: 4308c172-7f2b4175-d945851-f1efc912, reply success 2025-03-31T16:34:10.794864Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: 01jqpgvaqa4ykhrpmh0wsqv40d, Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=N2RlNzQ5ZDYtM2Y2Njc2MS1mNzIxOGRkOS05MjNhZmQxMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 22, targetId: [4:7488012838584135229:2362] 2025-03-31T16:34:10.794900Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 22 timeout: 300.000000s actor id: [4:7488012847174070155:2678] 2025-03-31T16:34:11.256144Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:11.266423Z node 4 :KQP_PROXY DEBUG: TraceId: "01jqpgvaqa4ykhrpmh0wsqv40d", Forwarded response to sender actor, requestId: 22, sender: [4:7488012847174070154:2458], selfId: [4:7488012821404265237:2274], source: [4:7488012838584135229:2362] 2025-03-31T16:34:11.275474Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: 4308c172-7f2b4175-d945851-f1efc912, Bootstrap. Database: /dc-1 2025-03-31T16:34:11.275618Z node 4 :KQP_PROXY DEBUG: Request has 18445000634858.276007s seconds to be completed 2025-03-31T16:34:11.277451Z node 4 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=4&id=MzkxMzJlYzgtZWEyODg5ZTUtMmQ0ZTU3ZjAtODI4ODQxNmM=, workerId: [4:7488012851469037500:2470], database: /dc-1, longSession: 1, local sessions count: 2 2025-03-31T16:34:11.277604Z node 4 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-31T16:34:11.277978Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: 4308c172-7f2b4175-d945851-f1efc912, RunDataQuery: -- TScriptLeaseUpdater::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT lease_deadline FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-03-31T16:34:11.278344Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=MzkxMzJlYzgtZWEyODg5ZTUtMmQ0ZTU3ZjAtODI4ODQxNmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 24, targetId: [4:7488012851469037500:2470] 2025-03-31T16:34:11.278398Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 24 timeout: 300.000000s actor id: [4:7488012851469037502:2703] 2025-03-31T16:34:11.475876Z node 4 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 24, sender: [4:7488012851469037501:2471], selfId: [4:7488012821404265237:2274], source: [4:7488012851469037500:2470] 2025-03-31T16:34:11.476479Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: 4308c172-7f2b4175-d945851-f1efc912, State: Get lease info, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=MzkxMzJlYzgtZWEyODg5ZTUtMmQ0ZTU3ZjAtODI4ODQxNmM=, TxId: 01jqpgvbcd7jge94p89ptwfk7b 2025-03-31T16:34:11.476594Z node 4 :KQP_PROXY WARN: [TQueryBase] [TScriptLeaseUpdater] TraceId: 4308c172-7f2b4175-d945851-f1efc912, State: Get lease info, Finish with BAD_REQUEST, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=4&id=MzkxMzJlYzgtZWEyODg5ZTUtMmQ0ZTU3ZjAtODI4ODQxNmM=, TxId: 01jqpgvbcd7jge94p89ptwfk7b 2025-03-31T16:34:11.476640Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: 4308c172-7f2b4175-d945851-f1efc912, State: Get lease info, Rollback transaction: 01jqpgvbcd7jge94p89ptwfk7b 2025-03-31T16:34:11.478756Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=MzkxMzJlYzgtZWEyODg5ZTUtMmQ0ZTU3ZjAtODI4ODQxNmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 25, targetId: [4:7488012851469037500:2470] 2025-03-31T16:34:11.478796Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 25 timeout: 600.000000s actor id: [4:7488012851469037528:2715] 2025-03-31T16:34:11.483713Z node 4 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 25, sender: [4:7488012851469037527:2478], selfId: [4:7488012821404265237:2274], source: [4:7488012851469037500:2470] 2025-03-31T16:34:11.483841Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: 4308c172-7f2b4175-d945851-f1efc912, State: Get lease info, RollbackTransactionResult: SUCCESS. Issues: 2025-03-31T16:34:11.485461Z node 4 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=4&id=MzkxMzJlYzgtZWEyODg5ZTUtMmQ0ZTU3ZjAtODI4ODQxNmM=, workerId: [4:7488012851469037500:2470], local sessions count: 1 2025-03-31T16:34:11.491461Z node 4 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=4&id=N2RlNzQ5ZDYtM2Y2Njc2MS1mNzIxOGRkOS05MjNhZmQxMw==, workerId: [4:7488012838584135229:2362], local sessions count: 0 >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] >> ReadSessionImplTest::DataReceivedCallback >> KqpNotNullColumns::UpdateOnNotNullPg [GOOD] >> IndexBuildTest::CancellationNoTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> ScriptExecutionsTest::UpdatesLeaseAfterExpiring [GOOD] Test command err: 2025-03-31T16:33:58.912380Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012791807596351:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:58.912433Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000c87/r3tmp/tmpEEE4TO/pdisk_1.dat 2025-03-31T16:33:59.351910Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:59.397001Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:59.397200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:59.404804Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2352 TServer::EnableGrpc on GrpcPort 8337, node 1 2025-03-31T16:33:59.791732Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:59.791755Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:59.791763Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:59.791868Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:34:00.135740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:01.985363Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-31T16:34:01.987249Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-31T16:34:01.992041Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-03-31T16:34:01.992075Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-03-31T16:34:01.992166Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-31T16:34:01.992274Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-31T16:34:01.992342Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:01.992385Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:01.992458Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:01.993475Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-03-31T16:34:01.993477Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:01.993495Z node 1 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-03-31T16:34:01.993527Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-03-31T16:34:01.993710Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-03-31T16:34:01.993720Z node 1 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-03-31T16:34:01.993760Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-03-31T16:34:01.994076Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-03-31T16:34:01.994086Z node 1 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-03-31T16:34:01.994099Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-03-31T16:34:01.999465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480 2025-03-31T16:34:02.002003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:34:02.003244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:34:02.008343Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2025-03-31T16:34:02.008404Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Subscribe on create table tx: 281474976710659 2025-03-31T16:34:02.009328Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2025-03-31T16:34:02.009386Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Subscribe on create table tx: 281474976710660 2025-03-31T16:34:02.009439Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2025-03-31T16:34:02.009447Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Subscribe on create table tx: 281474976710658 2025-03-31T16:34:02.144944Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976710659. Doublechecking... 2025-03-31T16:34:02.177122Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976710660. Doublechecking... 2025-03-31T16:34:02.178447Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976710658. Doublechecking... 2025-03-31T16:34:02.231308Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Column diff is empty, finishing 2025-03-31T16:34:02.246408Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Column diff is empty, finishing 2025-03-31T16:34:02.275823Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Column diff is empty, finishing 2025-03-31T16:34:02.276557Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 39f8d1ec-839064c3-46b50fb5-6eb73759, Bootstrap. Database: /dc-1 2025-03-31T16:34:02.288281Z node 1 :KQP_PROXY DEBUG: Request has 18445000634867.263381s seconds to be completed 2025-03-31T16:34:02.334822Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=ZTEzY2M3NGYtNzdkZGExN2MtYzU3N2Y2YzItMzVmOTdkMGM=, workerId: [1:7488012808987466421:2333], database: /dc-1, longSession: 1, local sessions count: 1 2025-03-31T16:34:02.335044Z node 1 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-31T16:34:02.341062Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 39f8d1ec-839064c3-46b50fb5-6eb73759, RunDataQuery: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $execution_meta_ttl AS Interval; UPSERT INTO `.metadata/script_executions` (database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at) VALUES ($database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl); UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2025-03-31T16:34:02.345142Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=ZTEzY2M3NGYtNzdkZGExN2MtYzU3N2Y2YzItMzVmOTdkMGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7488012808987466421:2333] 2025-03-31T16:34:02.345190Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7488012808987466426:2472] 2025-03-31T16:34:02.348281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012808987466425:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:02.348396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:02.349815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012808987466438:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:02.356069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:2, at schemeshard: 72057594046644480 2025-03-31T16:34:02.367227Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-03-31T16:34:02.367615Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012808987466440:2339], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-31T16:34:02.469280Z node 1 :TX_PROXY ERROR: Actor# [1:7488012808987466481:2503] txid# 281474976710662, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/poo ... mer_supplied_id, NULL), user_token = IF($applicate_script_external_effect_required, $user_token, NULL), script_sinks = IF($applicate_script_external_effect_required, $script_sinks, NULL), script_secret_names = IF($applicate_script_external_effect_required, $script_secret_names, NULL) WHERE database = $database AND execution_id = $execution_id; DELETE FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id; 2025-03-31T16:34:11.011531Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=ZGRhNTNmOTYtOTAzNmMxYTktMTMzODgyMmYtNzNkMjA2MzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 16, targetId: [2:7488012843700620487:2395] 2025-03-31T16:34:11.011565Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 16 timeout: 300.000000s actor id: [2:7488012847995587852:2610] 2025-03-31T16:34:11.078075Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: ad71d083-923867dd-d88afaf2-b683141f, Bootstrap. Database: /dc-1 2025-03-31T16:34:11.078242Z node 2 :KQP_PROXY DEBUG: Request has 18445000634858.473398s seconds to be completed 2025-03-31T16:34:11.079826Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=YTcwMWY2ZGEtNzY4Y2MzMDktZmE1OGY4MzQtOWQwNDZiMjU=, workerId: [2:7488012847995587860:2416], database: /dc-1, longSession: 1, local sessions count: 4 2025-03-31T16:34:11.079975Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-31T16:34:11.081165Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: ad71d083-923867dd-d88afaf2-b683141f, RunDataQuery: -- TScriptLeaseUpdater::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT lease_deadline FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-03-31T16:34:11.082252Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=YTcwMWY2ZGEtNzY4Y2MzMDktZmE1OGY4MzQtOWQwNDZiMjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 18, targetId: [2:7488012847995587860:2416] 2025-03-31T16:34:11.082278Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 18 timeout: 300.000000s actor id: [2:7488012847995587868:2618] 2025-03-31T16:34:11.302955Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 18, sender: [2:7488012847995587863:2419], selfId: [2:7488012826520750380:2268], source: [2:7488012847995587860:2416] 2025-03-31T16:34:11.303296Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: ad71d083-923867dd-d88afaf2-b683141f, State: Get lease info, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YTcwMWY2ZGEtNzY4Y2MzMDktZmE1OGY4MzQtOWQwNDZiMjU=, TxId: 01jqpgvb7047tamxexqgvd0140 2025-03-31T16:34:11.303504Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: ad71d083-923867dd-d88afaf2-b683141f, State: Get lease info, RunDataQuery: -- TScriptLeaseUpdater::OnGetLeaseInfo DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $lease_duration AS Interval; UPDATE `.metadata/script_execution_leases` SET lease_deadline=(CurrentUtcTimestamp() + $lease_duration) WHERE database = $database AND execution_id = $execution_id; 2025-03-31T16:34:11.303795Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=YTcwMWY2ZGEtNzY4Y2MzMDktZmE1OGY4MzQtOWQwNDZiMjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 19, targetId: [2:7488012847995587860:2416] 2025-03-31T16:34:11.303823Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 19 timeout: 300.000000s actor id: [2:7488012847995587898:2634] 2025-03-31T16:34:11.449835Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488012826520750160:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:11.453141Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:34:11.461816Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 19, sender: [2:7488012847995587897:2428], selfId: [2:7488012826520750380:2268], source: [2:7488012847995587860:2416] 2025-03-31T16:34:11.462016Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: ad71d083-923867dd-d88afaf2-b683141f, State: Update lease, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YTcwMWY2ZGEtNzY4Y2MzMDktZmE1OGY4MzQtOWQwNDZiMjU=, TxId: 2025-03-31T16:34:11.462075Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: ad71d083-923867dd-d88afaf2-b683141f, State: Update lease, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTcwMWY2ZGEtNzY4Y2MzMDktZmE1OGY4MzQtOWQwNDZiMjU=, TxId: 2025-03-31T16:34:11.462814Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=YTcwMWY2ZGEtNzY4Y2MzMDktZmE1OGY4MzQtOWQwNDZiMjU=, workerId: [2:7488012847995587860:2416], local sessions count: 3 2025-03-31T16:34:11.477025Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: 01jqpgvbck6pw3ctq0awhgpwbk, Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=Nzc2ZjBjMTYtODFiZjY5M2UtZjg2NWYzNTktMTkxNDU3OWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 20, targetId: [2:7488012843700620375:2361] 2025-03-31T16:34:11.477084Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 20 timeout: 300.000000s actor id: [2:7488012847995587939:2646] 2025-03-31T16:34:11.486467Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 16, sender: [2:7488012847995587851:2413], selfId: [2:7488012826520750380:2268], source: [2:7488012843700620487:2395] 2025-03-31T16:34:11.487371Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 4c5e60d0-568faf9d-24b3b356-65326812, State: Update final status, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZGRhNTNmOTYtOTAzNmMxYTktMTMzODgyMmYtNzNkMjA2MzA=, TxId: 2025-03-31T16:34:11.487449Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 4c5e60d0-568faf9d-24b3b356-65326812, State: Update final status, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZGRhNTNmOTYtOTAzNmMxYTktMTMzODgyMmYtNzNkMjA2MzA=, TxId: 2025-03-31T16:34:11.487461Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] Finish script execution operation. ExecutionId: 4c5e60d0-568faf9d-24b3b356-65326812. SUCCESS. Issues: 2025-03-31T16:34:11.488045Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=ZGRhNTNmOTYtOTAzNmMxYTktMTMzODgyMmYtNzNkMjA2MzA=, workerId: [2:7488012843700620487:2395], local sessions count: 2 2025-03-31T16:34:11.488209Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=MjUxOGZjNGMtOWVhMDkyNy1kODJiMDc3ZC1hMmIwZjY1, workerId: [2:7488012843700620359:2351], local sessions count: 1 2025-03-31T16:34:11.980379Z node 2 :KQP_PROXY DEBUG: TraceId: "01jqpgvbck6pw3ctq0awhgpwbk", Forwarded response to sender actor, requestId: 20, sender: [2:7488012847995587931:2436], selfId: [2:7488012826520750380:2268], source: [2:7488012843700620375:2361] 2025-03-31T16:34:11.987446Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TCheckLeaseStatusActor] ExecutionId: ad71d083-923867dd-d88afaf2-b683141f, Bootstrap. Start TCheckLeaseStatusQueryActor 2025-03-31T16:34:11.987548Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: ad71d083-923867dd-d88afaf2-b683141f, Bootstrap. Database: /dc-1 2025-03-31T16:34:11.987759Z node 2 :KQP_PROXY DEBUG: Request has 18445000634857.563884s seconds to be completed 2025-03-31T16:34:11.989672Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=MmVmZjc2ZTktZDliYWRjMDctODFkYjljYTItYTRkZWU3ZDk=, workerId: [2:7488012847995588002:2453], database: /dc-1, longSession: 1, local sessions count: 2 2025-03-31T16:34:11.989799Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-31T16:34:11.990190Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: ad71d083-923867dd-d88afaf2-b683141f, RunDataQuery: -- TCheckLeaseStatusQueryActor::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT operation_status, execution_status, finalization_status, issues, run_script_actor_id FROM `.metadata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_deadline FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-03-31T16:34:11.990591Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=MmVmZjc2ZTktZDliYWRjMDctODFkYjljYTItYTRkZWU3ZDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 22, targetId: [2:7488012847995588002:2453] 2025-03-31T16:34:11.990645Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 22 timeout: 300.000000s actor id: [2:7488012847995588004:2677] 2025-03-31T16:34:12.085602Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:12.417126Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 22, sender: [2:7488012847995588003:2454], selfId: [2:7488012826520750380:2268], source: [2:7488012847995588002:2453] 2025-03-31T16:34:12.417715Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: ad71d083-923867dd-d88afaf2-b683141f, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MmVmZjc2ZTktZDliYWRjMDctODFkYjljYTItYTRkZWU3ZDk=, TxId: 2025-03-31T16:34:12.417863Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: ad71d083-923867dd-d88afaf2-b683141f, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MmVmZjc2ZTktZDliYWRjMDctODFkYjljYTItYTRkZWU3ZDk=, TxId: 2025-03-31T16:34:12.418007Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TCheckLeaseStatusActor] ExecutionId: ad71d083-923867dd-d88afaf2-b683141f, reply success 2025-03-31T16:34:12.418293Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=MmVmZjc2ZTktZDliYWRjMDctODFkYjljYTItYTRkZWU3ZDk=, workerId: [2:7488012847995588002:2453], local sessions count: 1 2025-03-31T16:34:12.438439Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=Nzc2ZjBjMTYtODFiZjY5M2UtZjg2NWYzNTktMTkxNDU3OWY=, workerId: [2:7488012843700620375:2361], local sessions count: 0 >> KqpNotNullColumns::SecondaryIndexWithNotNullDataColumn [GOOD] >> KqpNotNullColumns::SecondaryIndexWithNotNullDataColumnPg ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::UpdateOnNotNullPg [GOOD] Test command err: Trying to start YDB, gRPC: 29924, MsgBus: 63556 2025-03-31T16:33:45.504524Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012737231039313:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:45.504612Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0008a7/r3tmp/tmpdi0DmA/pdisk_1.dat 2025-03-31T16:33:45.847128Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29924, node 1 2025-03-31T16:33:45.908339Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:45.908522Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:45.910827Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:45.925145Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:45.925177Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:45.925187Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:45.925346Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63556 TClient is connected to server localhost:63556 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:46.419903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:46.440122Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:33:48.461775Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012750115941867:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:48.461912Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:48.763117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:33:48.871228Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012750115941972:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:48.871328Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:48.871409Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012750115941977:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:48.874964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:33:48.883970Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012750115941979:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:33:48.985733Z node 1 :TX_PROXY ERROR: Actor# [1:7488012750115942030:2396] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:49.280312Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488012754410909392:2361], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:61: Error: At function: KiUpdateTable!
:1:61: Error: Cannot update primary key column: Key 2025-03-31T16:33:49.280578Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGUzYjE4MmEtMjg5NzBlZjMtZDY3MzJkNy1jNDQwZTA0Mw==, ActorId: [1:7488012750115941849:2328], ActorState: ExecuteState, TraceId: 01jqpgtnp0324j8m4r13w7t8a8, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-31T16:33:49.301060Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488012754410909401:2365], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:63: Error: At function: KiUpdateTable!
:1:63: Error: Cannot update primary key column: Key 2025-03-31T16:33:49.301294Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGUzYjE4MmEtMjg5NzBlZjMtZDY3MzJkNy1jNDQwZTA0Mw==, ActorId: [1:7488012750115941849:2328], ActorState: ExecuteState, TraceId: 01jqpgtnq62h2129nr8db4pjhd, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 8191, MsgBus: 13345 2025-03-31T16:33:49.966213Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012756650239058:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:49.966300Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0008a7/r3tmp/tmpWTEnjV/pdisk_1.dat 2025-03-31T16:33:50.091227Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8191, node 2 2025-03-31T16:33:50.137039Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:50.137189Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:50.138953Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:50.157649Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:50.157674Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:50.157681Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:50.157771Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13345 TClient is connected to server localhost:13345 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:50.543298Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:52.601013Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012769535141603:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:52.601180Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:52.609129Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:33:52.647571Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012769535141703:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:52.647673Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012769535141708:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:52.647679Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:52.651266Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:33:52.659399Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012769535141710:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: ... e 5 2025-03-31T16:34:03.278302Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:03.278327Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:03.278335Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:03.278452Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20417 TClient is connected to server localhost:20417 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:03.823419Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:06.782012Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488012827914793522:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:06.782125Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:06.805549Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:34:06.906505Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488012827914793624:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:06.906636Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:06.911139Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488012827914793629:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:06.916854Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:34:06.935112Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7488012827914793631:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:34:06.992564Z node 5 :TX_PROXY ERROR: Actor# [5:7488012827914793682:2393] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:07.227359Z node 5 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [5:7488012832209761041:2360], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:63: Error: At function: KiWriteTable!
:1:45: Error: Failed to convert type: Struct<'Key':Int32,'Value':Null> to Struct<'Key':Uint64?,'Value':String>
:1:45: Error: Failed to convert 'Value': Null to String
:1:45: Error: Failed to convert input columns types to scheme types, code: 2031 2025-03-31T16:34:07.229367Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=MTExODc1Y2MtNmFkN2FhNy04ZmQ1NmJmNC1mNTIyNTMzYw==, ActorId: [5:7488012827914793519:2329], ActorState: ExecuteState, TraceId: 01jqpgv778eg4vzzhr88c9ke6s, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 4089, MsgBus: 18130 2025-03-31T16:34:08.008133Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488012838084877861:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:08.008423Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0008a7/r3tmp/tmp6o3yFT/pdisk_1.dat 2025-03-31T16:34:08.136693Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:08.152168Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:08.152270Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:08.153766Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4089, node 6 2025-03-31T16:34:08.203694Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:08.203721Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:08.203731Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:08.203871Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18130 TClient is connected to server localhost:18130 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:08.788312Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:12.439768Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012855264747676:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:12.439923Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:12.470016Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:34:12.539451Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012855264747777:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:12.539572Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:12.539677Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012855264747782:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:12.549477Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:34:12.561899Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488012855264747784:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:34:12.626824Z node 6 :TX_PROXY ERROR: Actor# [6:7488012855264747835:2395] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:13.002741Z node 6 :KQP_COMPUTE ERROR: SelfId: [6:7488012859559715205:2329], TxId: 281474976715663, task: 1. Ctx: { SessionId : ydb://session/3?node_id=6&id=MWU3MjdhZTEtYzlmMzk1NTAtOTdjOTUyZjktYTc4MTlmYjU=. CustomerSuppliedId : . TraceId : 01jqpgvcs1cq63wvwtb859x1aj. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: BAD_REQUEST KIKIMR_BAD_COLUMN_TYPE: {
: Error: Tried to insert NULL value into NOT NULL column: Value, code: 2031 }. 2025-03-31T16:34:13.003191Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=MWU3MjdhZTEtYzlmMzk1NTAtOTdjOTUyZjktYTc4MTlmYjU=, ActorId: [6:7488012855264747649:2329], ActorState: ExecuteState, TraceId: 01jqpgvcs1cq63wvwtb859x1aj, Create QueryResponse for error on request, msg: 2025-03-31T16:34:13.009010Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488012838084877861:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:13.009198Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::CancellationNoTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:33:24.700771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:33:24.700857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:24.700892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:33:24.700920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:33:24.700955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:33:24.700987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:33:24.701029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:24.701100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:33:24.701375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:33:24.771407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:33:24.771458Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:24.775257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:33:24.775339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:33:24.775468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:33:24.779159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:33:24.779315Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:33:24.779823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:24.779984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:33:24.781402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:24.782308Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:24.782354Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:24.782462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:33:24.782495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:24.782525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:33:24.782621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:33:24.787788Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:33:24.873046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:33:24.873253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:24.873454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:33:24.873635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:33:24.873677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:24.875610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:24.875722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:33:24.875897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:24.875949Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:33:24.875978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:33:24.876005Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:33:24.877576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:24.877621Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:33:24.877649Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:33:24.878866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:24.878901Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:24.878949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:24.878987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:33:24.881478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:33:24.882940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:33:24.883106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:33:24.883873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:24.883970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:24.884025Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:24.884255Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:33:24.884297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:24.884427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:33:24.884484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:33:24.886197Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:24.886242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:24.886376Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:24.886441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:33:24.886629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:24.886668Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:33:24.886742Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:24.886769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:24.886798Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:24.886822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:24.886847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:33:24.886887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:24.886913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:33:24.886935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:33:24.886990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:33:24.887017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:33:24.887038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:33:24.888474Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:24.888564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:24.888594Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... less db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:34:13.883568Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:34:13.883713Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:34:13.895465Z node 2 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [2:123:2149] sender: [2:239:2058] recipient: [2:15:2062] 2025-03-31T16:34:13.911334Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:34:13.911606Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:34:13.911868Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:34:13.912112Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:34:13.912169Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:34:13.915365Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:34:13.915485Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:34:13.915710Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:34:13.915772Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:34:13.915823Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:34:13.915860Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:34:13.920693Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:34:13.920793Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:34:13.920840Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:34:13.924002Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:34:13.924081Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:34:13.924133Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:34:13.924194Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:34:13.924367Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:34:13.926467Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:34:13.926655Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:34:13.927765Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:34:13.927914Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 8589936749 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:34:13.927967Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:34:13.928260Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:34:13.928322Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:34:13.928516Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:34:13.928601Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:34:13.931024Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:34:13.931105Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:34:13.931333Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:34:13.931392Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:34:13.931868Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:34:13.931924Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:34:13.932044Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:34:13.932087Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:34:13.932133Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:34:13.932178Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:34:13.932223Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:34:13.932271Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:34:13.932311Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:34:13.932350Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:34:13.932438Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:34:13.932486Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:34:13.932528Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:34:13.933203Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:34:13.933319Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:34:13.933366Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-31T16:34:13.933411Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-31T16:34:13.933461Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:34:13.933577Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-31T16:34:13.937690Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-31T16:34:13.938285Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2025-03-31T16:34:13.939042Z node 2 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: DoExecute TxId: 101 DatabaseName: "/MyRoot" Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { settings { } } } max_batch_rows: 2 max_shards_in_flight: 2 } 2025-03-31T16:34:13.939312Z node 2 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: Reply TxId: 101 Status: BAD_REQUEST Issues { message: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" severity: 1 } SchemeStatus: 2 2025-03-31T16:34:13.939490Z node 2 :TX_PROXY DEBUG: actor# [2:269:2260] Bootstrap 2025-03-31T16:34:13.963107Z node 2 :TX_PROXY DEBUG: actor# [2:269:2260] Become StateWork (SchemeCache [2:274:2265]) 2025-03-31T16:34:13.963775Z node 2 :TX_PROXY DEBUG: actor# [2:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T16:34:13.966362Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 BUILDINDEX RESPONSE CREATE: NKikimrIndexBuilder.TEvCreateResponse TxId: 101 Status: BAD_REQUEST Issues { message: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" severity: 1 } SchemeStatus: 2 TestWaitNotification wait txId: 101 2025-03-31T16:34:13.966781Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-31T16:34:13.966836Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-31T16:34:13.967337Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-31T16:34:13.967455Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-31T16:34:13.967499Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:281:2272] TestWaitNotification: OK eventTxId 101 2025-03-31T16:34:13.967999Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_LIST_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" PageSize: 100 PageToken: "" 2025-03-31T16:34:13.968110Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_LIST_INDEX_BUILD: Reply Status: SUCCESS NextPageToken: "0" BUILDINDEX RESPONSE LIST: NKikimrIndexBuilder.TEvListResponse Status: SUCCESS NextPageToken: "0" >> ColumnShardTiers::TieringUsage [GOOD] >> KqpNewEngine::PruneWritePartitions-UseSink [GOOD] >> KqpNewEngine::PruneEffectPartitions+UseSink >> KqpSort::ReverseRangeLimitOptimized [GOOD] >> KqpSort::TopParameter >> KqpProxy::PassErrroViaSessionActor [GOOD] >> KqpProxy::NodeDisconnectedTest >> ReadSessionImplTest::DataReceivedCallback [GOOD] >> KqpProxy::InvalidSessionID >> KqpTx::ExplicitTcl [GOOD] >> KqpTx::EmptyTxOnCommit >> KqpNewEngine::OrderedScalarContext [GOOD] >> KqpNewEngine::PagingNoPredicateExtract >> KqpNewEngine::SqlInAsScalar [GOOD] >> KqpNewEngine::SequentialReadsPragma-Enabled ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TieringUsage [GOOD] Test command err: 2025-03-31T16:31:53.956815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:31:53.957102Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:31:53.957278Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001b81/r3tmp/tmpwp9lBw/pdisk_1.dat TServer::EnableGrpc on GrpcPort 16339, node 1 TClient is connected to server localhost:62449 2025-03-31T16:31:54.842864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:31:54.896510Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:31:54.897988Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:31:54.898044Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:31:54.898083Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:31:54.899290Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:31:54.937135Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:31:54.937979Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:31:54.950763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 2025-03-31T16:32:05.478593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:679:2569], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:05.478737Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:05.596549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480 2025-03-31T16:32:05.929134Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:822:2661], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:05.929257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:05.929631Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:827:2666], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:05.935773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-03-31T16:32:06.065007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:829:2668], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:32:06.592897Z node 1 :TX_PROXY ERROR: Actor# [1:924:2734] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:32:07.239229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:32:07.661229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480 2025-03-31T16:32:08.385995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-03-31T16:32:09.057214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:32:09.556728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-03-31T16:32:11.082939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-03-31T16:32:11.396367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-03-31T16:32:26.276114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715702:0, at schemeshard: 72057594046644480 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier2` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-03-31T16:32:38.075748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715709:0, at schemeshard: 72057594046644480 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier2` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier2` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-03-31T16:32:40.702658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715726:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715726 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 15 2025-03-31T16:32:40.949297Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037892;self_id=[1:2931:4243];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:32:40.973320Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037892;self_id=[1:2931:4243];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:32:40.973684Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037892 2025-03-31T16:32:40.981667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:2931:4243];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:32:40.981889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:2931:4243];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:32:40.982179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:2931:4243];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:32:40.982298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:2931:4243];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:32:40.982375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:2931:4243];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:32:40.982480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:2931:4243];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:32:40.982566Z node ... 6:34:14.849823Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;task_id=fbc8525a-e4d11f0-82cd0b0f-c7f68efe;tablet_id=72075186224037893;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T16:34:14.849922Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037893 Delete Blob DS:2181038080:[72075186224037893:1:13:15:0:1136:0] 2025-03-31T16:34:14.849970Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037893 Delete Blob DS:2181038080:[72075186224037893:1:12:14:0:1520:0] 2025-03-31T16:34:14.850015Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=19;external_task_id=fbc8525a-e4d11f0-82cd0b0f-c7f68efe;mem=504;cpu=0; 2025-03-31T16:34:14.850091Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T16:34:14.850205Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;parent=[1:2941:4246];ev_type=NKikimr::NOlap::NResourceBroker::NSubscribe::TEvStartTask;fline=actor.cpp:38;event=ask_resources;task=cpu=0;mem=504;external_task_id=fbd0665c-e4d11f0-b4f14be7-6fd6c1c0;type=CS::TTL;priority=0;; 2025-03-31T16:34:14.850840Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037893;self_id=[1:2941:4246];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;path_id=16;fline=storage.cpp:87;event=granule_compaction_weight;priority=(10,19999998864); 2025-03-31T16:34:14.850923Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037893;self_id=[1:2941:4246];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;path_id=16;fline=optimizer.h:894;stop_instant=NO_VALUE_OPTIONAL;size=2656;next=;count=2;info={bytes=1136;count=1;records=1};event=start_optimization;stop_point=;main_portion=19; 2025-03-31T16:34:14.854706Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;self_id=[1:2941:4246];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=manager.cpp:10;event=lock;process_id=CS::GENERAL::fbd09f14-e4d11f0-9e211ca3-8b154c09; 2025-03-31T16:34:14.854952Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;parent=[1:2941:4246];ev_type=NKikimr::NOlap::NResourceBroker::NSubscribe::TEvStartTask;fline=actor.cpp:38;event=ask_resources;task=cpu=0;mem=5382;external_task_id=fbd09f14-e4d11f0-9e211ca3-8b154c09;type=CS::GENERAL;priority=0;; 2025-03-31T16:34:14.855743Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;self_id=[1:2941:4246];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:67;event=granule_locked;path_id=16;lock_id=CS::GENERAL::fbd09f14-e4d11f0-9e211ca3-8b154c09; 2025-03-31T16:34:14.855783Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;self_id=[1:2941:4246];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:82;event=no_granules; 2025-03-31T16:34:14.855816Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;self_id=[1:2941:4246];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=column_engine_logs.cpp:206;event=no granules for start compaction; 2025-03-31T16:34:14.855843Z node 1 :TX_COLUMNSHARD DEBUG: Compaction not started: cannot prepare compaction at tablet 72075186224037893 2025-03-31T16:34:14.855917Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;parent=[1:2941:4246];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=20;task=cpu=0;mem=504;external_task_id=fbd0665c-e4d11f0-b4f14be7-6fd6c1c0;type=CS::TTL;priority=0;; 2025-03-31T16:34:14.855952Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;parent=[1:2941:4246];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=fbd0665c-e4d11f0-b4f14be7-6fd6c1c0;mem=504;cpu=0; 2025-03-31T16:34:14.855988Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;parent=[1:2941:4246];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=fbd0665c-e4d11f0-b4f14be7-6fd6c1c0;task_id=20;mem=504;cpu=0; 2025-03-31T16:34:14.856256Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:2985:4274];tablet_id=72075186224037893;parent=[1:2941:4246];fline=manager.cpp:82;event=ask_data;request=request_id=41;16={portions_count=2};; 2025-03-31T16:34:14.856444Z node 1 :TX_COLUMNSHARD DEBUG: self_id=[1:2985:4274];tablet_id=72075186224037893;parent=[1:2941:4246];fline=columnshard_impl.cpp:1035;background=cleanup;changes_info=type=CS::CLEANUP::PORTIONS;details=(drop 2 portions(portion_id:18;path_id:16;records_count:1;min_schema_snapshot:(plan_step=9500;tx_id=281474976715729;);schema_version:1;level:0;column_size:1136;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1734800595500;tx_id=18446744073709551615;);)(portion_id:17;path_id:16;records_count:1;min_schema_snapshot:(plan_step=9500;tx_id=281474976715729;);schema_version:1;level:0;column_size:1520;index_size:0;meta:((produced=SPLIT_COMPACTED;));remove_snapshot:(plan_step=1734800595500;tx_id=18446744073709551615;);));; 2025-03-31T16:34:14.856579Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 72075186224037893 2025-03-31T16:34:14.856726Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[45] (CS::CLEANUP::PORTIONS) apply at tablet 72075186224037893 2025-03-31T16:34:14.857257Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=4040;raw_bytes=58699;count=2;records=52} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=109040;raw_bytes=3549871;count=2;records=2959} inactive {blob_bytes=2656;raw_bytes=2178;count=2;records=2} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 72075186224037893 2025-03-31T16:34:14.857496Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;parent=[1:2941:4246];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=21;task=cpu=0;mem=5382;external_task_id=fbd09f14-e4d11f0-9e211ca3-8b154c09;type=CS::GENERAL;priority=0;; 2025-03-31T16:34:14.857541Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;parent=[1:2941:4246];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=fbd09f14-e4d11f0-9e211ca3-8b154c09;mem=5382;cpu=0; 2025-03-31T16:34:14.857577Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;parent=[1:2941:4246];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=fbd09f14-e4d11f0-9e211ca3-8b154c09;task_id=21;mem=5382;cpu=0; 2025-03-31T16:34:14.857756Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:2985:4274];tablet_id=72075186224037893;parent=[1:2941:4246];fline=manager.cpp:82;event=ask_data;request=request_id=42;16={portions_count=2};; 2025-03-31T16:34:14.857914Z node 1 :TX_COLUMNSHARD DEBUG: self_id=[1:2985:4274];tablet_id=72075186224037893;parent=[1:2941:4246];fline=columnshard_impl.cpp:881;event=compaction;external_task_id=fbd09f14-e4d11f0-9e211ca3-8b154c09; 2025-03-31T16:34:14.857977Z node 1 :TX_COLUMNSHARD DEBUG: self_id=[1:2985:4274];tablet_id=72075186224037893;parent=[1:2941:4246];fline=columnshard_impl.cpp:620;event=start_changes;type=CS::GENERAL;task_id=fbd09f14-e4d11f0-9e211ca3-8b154c09; 2025-03-31T16:34:14.858697Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=fbd09f14-e4d11f0-9e211ca3-8b154c09;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2025-03-31T16:34:14.858979Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=fbd09f14-e4d11f0-9e211ca3-8b154c09; 2025-03-31T16:34:14.864445Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;parent_id=[1:2941:4246];fline=general_compaction.cpp:133;event=blobs_created_diff;appended=0;;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:264];;column_id:3;chunk_idx:0;blob_range:[NO_BLOB:264:256];;column_id:2;chunk_idx:0;blob_range:[NO_BLOB:520:232];;column_id:4294967040;chunk_idx:0;blob_range:[NO_BLOB:752:192];;column_id:1;chunk_idx:0;blob_range:[NO_BLOB:944:192];;column_id:4294967041;chunk_idx:0;blob_range:[NO_BLOB:1136:192];;column_id:4;chunk_idx:0;blob_range:[NO_BLOB:1328:192];;;;switched=(portion_id:20;path_id:16;records_count:1;min_schema_snapshot:(plan_step=9500;tx_id=281474976715729;);schema_version:1;level:0;column_size:1136;index_size:0;meta:((produced=INSERTED;)););(portion_id:19;path_id:16;records_count:1;min_schema_snapshot:(plan_step=9500;tx_id=281474976715729;);schema_version:1;level:0;column_size:1520;index_size:0;meta:((produced=SPLIT_COMPACTED;)););; 2025-03-31T16:34:14.864509Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037893;parent_id=[1:2941:4246];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=2; 2025-03-31T16:34:14.864674Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;self_id=[1:2941:4246];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; 2025-03-31T16:34:14.865028Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 72075186224037893 2025-03-31T16:34:14.865247Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[47] (CS::GENERAL) apply at tablet 72075186224037893 2025-03-31T16:34:14.866488Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 72075186224037893 Save Batch GenStep: 1:18 Blob count: 1 2025-03-31T16:34:14.866607Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=4040;raw_bytes=58699;count=2;records=52} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=109040;raw_bytes=3549871;count=2;records=2959} inactive {blob_bytes=2656;raw_bytes=2178;count=2;records=2} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 72075186224037893 Cleaning waiting... Fake storage clean FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037893 |96.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSnapshotRead::TestReadOnly+withSink ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::DataReceivedCallback [GOOD] Test command err: 2025-03-31T16:33:44.309523Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.309573Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.309680Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T16:33:44.311033Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T16:33:44.325187Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T16:33:44.325436Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.327473Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-31T16:33:44.330625Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.330766Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-31T16:33:44.330895Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T16:33:44.330951Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-03-31T16:33:44.331721Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.331745Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.331787Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T16:33:44.332114Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T16:33:44.332769Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T16:33:44.332881Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.333023Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-31T16:33:44.333431Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.333532Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-31T16:33:44.333632Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T16:33:44.333671Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-03-31T16:33:44.334589Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.334613Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.334633Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T16:33:44.334887Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T16:33:44.335418Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T16:33:44.335538Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.335658Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-31T16:33:44.336265Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.336412Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-31T16:33:44.336507Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T16:33:44.336544Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-03-31T16:33:44.337254Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.337302Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.337316Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T16:33:44.337563Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T16:33:44.338092Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T16:33:44.338453Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.338638Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-31T16:33:44.347892Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.356785Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-31T16:33:44.356947Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T16:33:44.357005Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-03-31T16:33:44.359510Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.359537Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.359558Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T16:33:44.359898Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T16:33:44.361722Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T16:33:44.361935Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.362172Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-31T16:33:44.362538Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.362656Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-31T16:33:44.362750Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T16:33:44.362778Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-03-31T16:33:44.363292Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.363308Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.363325Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T16:33:44.363594Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T16:33:44.364269Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T16:33:44.364373Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.364578Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-31T16:33:44.364965Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.365108Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-31T16:33:44.365199Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T16:33:44.365237Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-03-31T16:33:44.366045Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.366064Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.366082Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T16:33:44.366353Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T16:33:44.366944Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T16:33:44.367065Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.367276Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-31T16:33:44.367908Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.368037Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-31T16:33:44.368111Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T16:33:44.368138Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-03-31T16:33:44.368860Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.368880Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.368907Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T16:33:44.369181Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T16:33:44.369637Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T16:33:44.369726Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.369918Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-31T16:33:44.371240Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:33:44.371573Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-31T16:33:44.371647Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T16:33:44.371674Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-03-31T16:33:44.377760Z :ReadSession INFO: Random seed for debugging is 1743438824377729 2025-03-31T16:33:44.685834Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012733546525262:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:44.686496Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:33:44.734015Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012732351917405:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:44.734066Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error ... pv6:[::1]:56434" UncompressedSize: 8 Meta: { "logtype": "unknown", "ident": "unknown", "server": "ipv6:[::1]:56434" } } } 2025-03-31T16:34:02.495898Z :DEBUG: [/Root] [/Root] [b6ee8181-d68b42cc-38569edc-46596d3b] [dc1] Commit offsets [2, 3). Partition stream id: 1 2025-03-31T16:34:02.496208Z :DEBUG: [/Root] [/Root] [b6ee8181-d68b42cc-38569edc-46596d3b] [dc1] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-03-31T16:34:02.498312Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|853f6dd1-95e558ac-5a4d516d-6cd14ede_0] Write session got write response: sequence_numbers: 3 offsets: 2 already_written: false write_statistics { persist_duration_ms: 1 } 2025-03-31T16:34:02.498393Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|853f6dd1-95e558ac-5a4d516d-6cd14ede_0] Write session: acknoledged message 1 2025-03-31T16:34:02.499295Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_13103498943929209815_v1 grpc read done: success# 1, data# { commit { cookies { assign_id: 1 partition_cookie: 3 } } } 2025-03-31T16:34:02.499493Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_13103498943929209815_v1 commit request from client for 3 in TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) 2025-03-31T16:34:02.499524Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_13103498943929209815_v1 commit request from 3 to 3 in TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) 2025-03-31T16:34:02.499563Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_13103498943929209815_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) committing to position 3 prev 2 end 3 by cookie 3 2025-03-31T16:34:02.499723Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-03-31T16:34:02.499749Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-03-31T16:34:02.499845Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user offset is set to 3 (startOffset 0) session shared/user_1_1_13103498943929209815_v1 2025-03-31T16:34:02.499938Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-31T16:34:02.499952Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-31T16:34:02.499967Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-31T16:34:02.499980Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] i0000000000 2025-03-31T16:34:02.499991Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000cuser 2025-03-31T16:34:02.500002Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000uuser 2025-03-31T16:34:02.500017Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-31T16:34:02.500032Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] =========================== 2025-03-31T16:34:02.500057Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-31T16:34:02.504586Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 3 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-31T16:34:02.504654Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-31T16:34:02.504669Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 3 2025-03-31T16:34:02.504767Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_13103498943929209815_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 3 } 2025-03-31T16:34:02.504807Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_13103498943929209815_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) commit done to position 3 endOffset 3 with cookie 3 2025-03-31T16:34:02.504828Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_13103498943929209815_v1 replying for commits: assignId# 1, from# 3, to# 3, offset# 3 2025-03-31T16:34:02.505459Z :DEBUG: [/Root] [/Root] [b6ee8181-d68b42cc-38569edc-46596d3b] [dc1] Committed response: { cookies { assign_id: 1 partition_cookie: 3 } } 2025-03-31T16:34:02.585729Z :INFO: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|853f6dd1-95e558ac-5a4d516d-6cd14ede_0] Write session will now close 2025-03-31T16:34:02.585829Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|853f6dd1-95e558ac-5a4d516d-6cd14ede_0] Write session: aborting 2025-03-31T16:34:02.586435Z :INFO: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|853f6dd1-95e558ac-5a4d516d-6cd14ede_0] Write session: gracefully shut down, all writes complete 2025-03-31T16:34:02.586488Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|853f6dd1-95e558ac-5a4d516d-6cd14ede_0] Write session: destroy 2025-03-31T16:34:02.587258Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: test-message-group-id|853f6dd1-95e558ac-5a4d516d-6cd14ede_0 grpc read done: success: 0 data: 2025-03-31T16:34:02.587289Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|853f6dd1-95e558ac-5a4d516d-6cd14ede_0 grpc read failed 2025-03-31T16:34:02.587322Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|853f6dd1-95e558ac-5a4d516d-6cd14ede_0 grpc closed 2025-03-31T16:34:02.587336Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|853f6dd1-95e558ac-5a4d516d-6cd14ede_0 is DEAD 2025-03-31T16:34:02.587904Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-31T16:34:02.589415Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7488012810855939955:2665] destroyed 2025-03-31T16:34:02.589459Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-03-31T16:34:05.190187Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_13103498943929209815_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 5 from offset3 2025-03-31T16:34:12.489143Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_13103498943929209815_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 6 from offset3 2025-03-31T16:34:12.589533Z :INFO: [/Root] [/Root] [b6ee8181-d68b42cc-38569edc-46596d3b] Closing read session. Close timeout: 0.000000s 2025-03-31T16:34:12.589633Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:3 2025-03-31T16:34:12.589682Z :INFO: [/Root] [/Root] [b6ee8181-d68b42cc-38569edc-46596d3b] Counters: { Errors: 0 CurrentSessionLifetimeMs: 16474 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T16:34:12.589798Z :NOTICE: [/Root] [/Root] [b6ee8181-d68b42cc-38569edc-46596d3b] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-03-31T16:34:12.589844Z :DEBUG: [/Root] [/Root] [b6ee8181-d68b42cc-38569edc-46596d3b] [dc1] Abort session to cluster 2025-03-31T16:34:12.590395Z :NOTICE: [/Root] [/Root] [b6ee8181-d68b42cc-38569edc-46596d3b] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-31T16:34:12.599943Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_13103498943929209815_v1 grpc read done: success# 0, data# { } 2025-03-31T16:34:12.600006Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_13103498943929209815_v1 grpc read failed 2025-03-31T16:34:12.600049Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_13103498943929209815_v1 grpc closed 2025-03-31T16:34:12.600099Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_13103498943929209815_v1 is DEAD 2025-03-31T16:34:12.605960Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_1_1_13103498943929209815_v1 2025-03-31T16:34:12.606020Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7488012785086135480:2553] destroyed 2025-03-31T16:34:12.606095Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_1_1_13103498943929209815_v1 2025-03-31T16:34:12.606548Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [1:7488012785086135478:2550] disconnected; active server actors: 1 2025-03-31T16:34:12.606597Z node 2 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [1:7488012785086135478:2550] client user disconnected session shared/user_1_1_13103498943929209815_v1 2025-03-31T16:34:13.918006Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:34:13.918050Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:34:13.918086Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-31T16:34:13.918580Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-31T16:34:13.919130Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-31T16:34:13.919327Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:34:13.919583Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-31T16:34:13.920349Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-03-31T16:34:13.920810Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-03-31T16:34:13.923448Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2025-03-31T16:34:13.923550Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-31T16:34:13.923604Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-31T16:34:13.923678Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2025-03-31T16:34:13.923877Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-03-31T16:34:13.923935Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes >> TableCreation::ConcurrentTableCreationWithDifferentVersions [GOOD] >> TableCreation::ConcurrentUpdateTable |96.5%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/{meta.json ... results_accumulator.log} >> YdbTableSplit::MergeByNoLoadAfterSplit |96.5%| [TA] $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.5%| [TA] {RESULT} $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactBorrowedAfterSplitMergeWhenDisabled [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleDataShardReboot >> KqpSnapshotRead::TestSnapshotExpiration+withSink [GOOD] >> KqpProxy::InvalidSessionID [GOOD] >> KqpProxy::LoadedMetadataAfterCompilationTimeout |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest >> KqpKv::ReadRows_PgValue [GOOD] >> KqpKv::ReadRows_PgKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::TestSnapshotExpiration+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 18626, MsgBus: 28874 2025-03-31T16:33:56.718109Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012784702068016:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:56.718442Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001273/r3tmp/tmpNYtPSO/pdisk_1.dat 2025-03-31T16:33:57.031753Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18626, node 1 2025-03-31T16:33:57.098584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:57.098728Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:57.100819Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:57.113962Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:57.113998Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:57.114007Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:57.114164Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28874 TClient is connected to server localhost:28874 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:57.511259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:57.530543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:57.648755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:57.800111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:57.872552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:59.648461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012797586971671:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:59.649155Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:59.980592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:00.017571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:00.052777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:00.085618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:00.119632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:00.153511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:34:00.208402Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012801881939479:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:00.208481Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:00.208667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012801881939484:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:00.212017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:00.221002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012801881939486:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:34:00.317629Z node 1 :TX_PROXY ERROR: Actor# [1:7488012801881939540:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:01.718444Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012784702068016:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:01.718506Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 12256, MsgBus: 15384 2025-03-31T16:34:03.099215Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012817070448275:2204];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:03.125575Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001273/r3tmp/tmpTiNhbE/pdisk_1.dat 2025-03-31T16:34:03.198856Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12256, node 2 2025-03-31T16:34:03.237511Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:03.237607Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:03.238979Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:03.279443Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:03.279464Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:03.279472Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:03.279584Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15384 TClient is connected to server localhost:15384 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:03.811173Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:03.820147Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:34:03.835954Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:34:03.907443Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:04.103103Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:34:04.174747Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:06.319380Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012829955351786:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:06.319479Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:06.363182Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:06.402043Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:06.480571Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:06.515384Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:06.551410Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:06.587644Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:34:06.654122Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012829955352301:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:06.654227Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:06.654581Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012829955352306:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:06.658517Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:06.669443Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012829955352308:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:34:06.723758Z node 2 :TX_PROXY ERROR: Actor# [2:7488012829955352361:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:08.054728Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488012817070448275:2204];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:08.054813Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:34:18.190521Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:34:18.190555Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:19.480612Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488012885789928122:2641], TxId: 281474976715681, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jqpgvjzc5t1ceg9y34zz1s1r. SessionId : ydb://session/3?node_id=2&id=MWQ0ZWMzNDgtYjJkY2I2ZDUtYTliMDZjZWQtNGIxNGU4YWY=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Table id 2 has no snapshot at v1743438847584/18446744073709551615 shard 72075186224037888 with lowWatermark v1743438848000/18446744073709551615 (node# 2 state# Ready) } } 2025-03-31T16:34:19.481035Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488012885789928122:2641], TxId: 281474976715681, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jqpgvjzc5t1ceg9y34zz1s1r. SessionId : ydb://session/3?node_id=2&id=MWQ0ZWMzNDgtYjJkY2I2ZDUtYTliMDZjZWQtNGIxNGU4YWY=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Table id 2 has no snapshot at v1743438847584/18446744073709551615 shard 72075186224037888 with lowWatermark v1743438848000/18446744073709551615 (node# 2 state# Ready) } }. 2025-03-31T16:34:19.481402Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488012885789928123:2642], TxId: 281474976715681, task: 2. Ctx: { TraceId : 01jqpgvjzc5t1ceg9y34zz1s1r. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MWQ0ZWMzNDgtYjJkY2I2ZDUtYTliMDZjZWQtNGIxNGU4YWY=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7488012885789928118:2496], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-31T16:34:19.481696Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MWQ0ZWMzNDgtYjJkY2I2ZDUtYTliMDZjZWQtNGIxNGU4YWY=, ActorId: [2:7488012834250319950:2496], ActorState: ExecuteState, TraceId: 01jqpgvjzc5t1ceg9y34zz1s1r, Create QueryResponse for error on request, msg: >> KqpSqlIn::KeyTypeMissmatch_Int [GOOD] >> KqpSqlIn::KeyTypeMissmatch_Str |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest >> KqpTx::EmptyTxOnCommit [GOOD] |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |96.6%| [TA] $(B)/ydb/core/kqp/executer_actor/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.6%| [TA] {RESULT} $(B)/ydb/core/kqp/executer_actor/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TableCreation::ConcurrentUpdateTable [GOOD] >> YdbTableSplit::SplitByLoadWithReads [GOOD] >> KqpNotNullColumns::SecondaryIndexWithNotNullDataColumnPg [GOOD] >> KqpNewEngine::PruneEffectPartitions+UseSink [GOOD] >> KqpNewEngine::PruneEffectPartitions-UseSink |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> KqpSnapshotRead::TestReadOnly+withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3-withSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::EmptyTxOnCommit [GOOD] Test command err: Trying to start YDB, gRPC: 6064, MsgBus: 12560 2025-03-31T16:34:10.638573Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012843952428178:2187];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:10.638967Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001270/r3tmp/tmpLm6Knn/pdisk_1.dat 2025-03-31T16:34:11.012506Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6064, node 1 2025-03-31T16:34:11.090530Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:11.090654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:11.094764Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:11.115144Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:11.115168Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:11.115180Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:11.115374Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12560 TClient is connected to server localhost:12560 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:11.687676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:11.704718Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:34:11.717762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:11.884708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:12.069693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:12.147923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:13.983628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012856837331716:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:13.983752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:14.344053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:14.416316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:14.452683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:14.498340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:14.538708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:14.586454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:34:14.678937Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012861132299533:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:14.679006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:14.679067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012861132299538:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:14.683556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:14.694513Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012861132299540:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:34:14.783041Z node 1 :TX_PROXY ERROR: Actor# [1:7488012861132299595:3452] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:15.637761Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012843952428178:2187];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:15.637879Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:34:16.101850Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWQ3YjFmNDEtNDQ4NjRjNDUtNDMzNTNhOS1iMmJjYTg0YQ==, ActorId: [1:7488012865427267181:2496], ActorState: ReadyState, TraceId: 01jqpgvfwy5pt03xmgc031tgbc, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 4944, MsgBus: 64276 2025-03-31T16:34:16.989949Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012871179285643:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:16.990003Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001270/r3tmp/tmpJqnoHq/pdisk_1.dat 2025-03-31T16:34:17.177958Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:17.206749Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:17.206805Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:17.211407Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4944, node 2 2025-03-31T16:34:17.267659Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:17.267689Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:17.267699Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:17.267869Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64276 TClient is connected to server localhost:64276 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:17.769249Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:17.780242Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:17.836582Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:18.016062Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:34:18.087220Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:34:20.223269Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012888359156585:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:20.223416Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:20.264573Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:20.301926Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:20.335626Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:20.367352Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:20.399651Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:20.470705Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:34:20.529166Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012888359157102:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:20.529244Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:20.529303Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012888359157107:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:20.532687Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:20.547282Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012888359157109:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:34:20.649642Z node 2 :TX_PROXY ERROR: Actor# [2:7488012888359157164:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::ConcurrentUpdateTable [GOOD] Test command err: 2025-03-31T16:34:12.497370Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012856084577899:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:12.497596Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000c6e/r3tmp/tmplha4gC/pdisk_1.dat 2025-03-31T16:34:12.904506Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:12.904596Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:12.906948Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:12.911027Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22823 TServer::EnableGrpc on GrpcPort 14109, node 1 2025-03-31T16:34:13.169114Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:13.169140Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:13.169147Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:13.169277Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:34:13.309793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:15.460102Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-31T16:34:15.462874Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-31T16:34:15.468823Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-03-31T16:34:15.468856Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-03-31T16:34:15.468889Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-31T16:34:15.468927Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-31T16:34:15.469031Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:15.469067Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:15.469205Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:15.469263Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:15.470847Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-03-31T16:34:15.470864Z node 1 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-03-31T16:34:15.470884Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-03-31T16:34:15.471007Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-03-31T16:34:15.471017Z node 1 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-03-31T16:34:15.471030Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-03-31T16:34:15.471282Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-03-31T16:34:15.471297Z node 1 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-03-31T16:34:15.471308Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-03-31T16:34:15.475400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480 2025-03-31T16:34:15.478100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:34:15.480329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:34:15.487927Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2025-03-31T16:34:15.487989Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Subscribe on create table tx: 281474976710659 2025-03-31T16:34:15.488250Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2025-03-31T16:34:15.488271Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Subscribe on create table tx: 281474976710660 2025-03-31T16:34:15.488573Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2025-03-31T16:34:15.488594Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Subscribe on create table tx: 281474976710658 2025-03-31T16:34:15.612022Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976710659. Doublechecking... 2025-03-31T16:34:15.653223Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976710660. Doublechecking... 2025-03-31T16:34:15.660831Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976710658. Doublechecking... 2025-03-31T16:34:15.708525Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Column diff is empty, finishing 2025-03-31T16:34:15.721862Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Column diff is empty, finishing 2025-03-31T16:34:15.722043Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Column diff is empty, finishing 2025-03-31T16:34:15.723076Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 91f6bf52-bdd73db2-56fa53e-3b7f8554, Bootstrap. Database: /dc-1 2025-03-31T16:34:15.734478Z node 1 :KQP_PROXY DEBUG: Request has 18445000634853.817170s seconds to be completed 2025-03-31T16:34:15.737877Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=ZjY4Zjk5NjItZDk0NWRlNzUtNDVkYzFiMmYtZGUxYzIwNjQ=, workerId: [1:7488012868969480683:2333], database: /dc-1, longSession: 1, local sessions count: 1 2025-03-31T16:34:15.738018Z node 1 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-31T16:34:15.739149Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 91f6bf52-bdd73db2-56fa53e-3b7f8554, RunDataQuery: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $execution_meta_ttl AS Interval; UPSERT INTO `.metadata/script_executions` (database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at) VALUES ($database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl); UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2025-03-31T16:34:15.739718Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=ZjY4Zjk5NjItZDk0NWRlNzUtNDVkYzFiMmYtZGUxYzIwNjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7488012868969480683:2333] 2025-03-31T16:34:15.739775Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7488012868969480685:2468] 2025-03-31T16:34:15.741800Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012868969480686:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:15.741891Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:15.741975Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012868969480697:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:15.745440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:2, at schemeshard: 72057594046644480 2025-03-31T16:34:15.754847Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012868969480700:2339], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-31T16:34:15.813847Z node 1 :TX_PROXY ERROR: Actor# [1:7488012868969480740:2498] txid# 281474976710662, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" ... thStateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2025-03-31T16:34:21.284055Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Unable to subscribe to concurrent transaction, falling back 2025-03-31T16:34:21.284120Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976715673 Issues { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2025-03-31T16:34:21.284125Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Unable to subscribe to concurrent transaction, falling back 2025-03-31T16:34:21.284126Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976715675 Issues { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2025-03-31T16:34:21.284133Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Unable to subscribe to concurrent transaction, falling back 2025-03-31T16:34:21.284191Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976715669 Issues { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2025-03-31T16:34:21.284197Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Unable to subscribe to concurrent transaction, falling back 2025-03-31T16:34:21.285075Z node 2 :TX_PROXY ERROR: Actor# [2:7488012892273965375:2637] txid# 281474976715672, issues: { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } 2025-03-31T16:34:21.285223Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976715672 Issues { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2025-03-31T16:34:21.285238Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Unable to subscribe to concurrent transaction, falling back 2025-03-31T16:34:21.295315Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Request: alter. Transaction completed: 281474976715666. Doublechecking... 2025-03-31T16:34:21.326831Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 8, sender: [2:7488012892273965303:2366], selfId: [2:7488012875094095238:2266], source: [2:7488012892273965301:2364] 2025-03-31T16:34:21.326997Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 72e0c17f-9107d10-f42dd436-2b15cfaa, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NWQ0MzhjYmMtNzEzNDNkNzktNTBkZTQ4NGYtM2VjMTllMTI=, TxId: 2025-03-31T16:34:21.327025Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 72e0c17f-9107d10-f42dd436-2b15cfaa, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NWQ0MzhjYmMtNzEzNDNkNzktNTBkZTQ4NGYtM2VjMTllMTI=, TxId: 2025-03-31T16:34:21.327181Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 72e0c17f-9107d10-f42dd436-2b15cfaa, start saving rows range [0; 1) 2025-03-31T16:34:21.327248Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 72e0c17f-9107d10-f42dd436-2b15cfaa, Bootstrap. Database: /dc-1 2025-03-31T16:34:21.327517Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=NWQ0MzhjYmMtNzEzNDNkNzktNTBkZTQ4NGYtM2VjMTllMTI=, workerId: [2:7488012892273965301:2364], local sessions count: 2 2025-03-31T16:34:21.327551Z node 2 :KQP_PROXY DEBUG: Request has 18445000634848.224074s seconds to be completed 2025-03-31T16:34:21.329053Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=OTE2ODVkMmItYWE4MDI2Yy1mMDI0ZjgwMi05NGQ3ODNlYg==, workerId: [2:7488012892273965458:2376], database: /dc-1, longSession: 1, local sessions count: 3 2025-03-31T16:34:21.329167Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-31T16:34:21.329414Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 72e0c17f-9107d10-f42dd436-2b15cfaa, RunDataQuery: -- TSaveScriptExecutionResultQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_id AS Int32; DECLARE $expire_at AS Optional; DECLARE $items AS List>; UPSERT INTO `.metadata/result_sets` SELECT $database as database, $execution_id as execution_id, $result_set_id as result_set_id, T.row_id as row_id, $expire_at as expire_at, T.result_set as result_set, T.accumulated_size as accumulated_size FROM AS_TABLE($items) AS T; 2025-03-31T16:34:21.329732Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=OTE2ODVkMmItYWE4MDI2Yy1mMDI0ZjgwMi05NGQ3ODNlYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 10, targetId: [2:7488012892273965458:2376] 2025-03-31T16:34:21.329767Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 10 timeout: 300.000000s actor id: [2:7488012892273965460:2695] 2025-03-31T16:34:21.342909Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-03-31T16:34:21.343379Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-03-31T16:34:21.348528Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-03-31T16:34:21.363398Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-03-31T16:34:21.365965Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-03-31T16:34:21.366062Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-03-31T16:34:21.366101Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-03-31T16:34:21.369618Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-03-31T16:34:21.369618Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-03-31T16:34:21.377652Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-03-31T16:34:21.397822Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=NzQ2MDgyOWQtNzgyNjA3MzYtNmQwYzEwODEtMTI1Y2UwNGY=, workerId: [2:7488012892273965235:2361], local sessions count: 2 2025-03-31T16:34:21.452991Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 10, sender: [2:7488012892273965459:2377], selfId: [2:7488012875094095238:2266], source: [2:7488012892273965458:2376] 2025-03-31T16:34:21.453456Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 72e0c17f-9107d10-f42dd436-2b15cfaa, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OTE2ODVkMmItYWE4MDI2Yy1mMDI0ZjgwMi05NGQ3ODNlYg==, TxId: 2025-03-31T16:34:21.453483Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 72e0c17f-9107d10-f42dd436-2b15cfaa, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTE2ODVkMmItYWE4MDI2Yy1mMDI0ZjgwMi05NGQ3ODNlYg==, TxId: 2025-03-31T16:34:21.453589Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 72e0c17f-9107d10-f42dd436-2b15cfaa, result part successfully saved 2025-03-31T16:34:21.453603Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 72e0c17f-9107d10-f42dd436-2b15cfaa, reply SUCCESS, issues: 2025-03-31T16:34:21.453780Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 72e0c17f-9107d10-f42dd436-2b15cfaa, Bootstrap. Database: /dc-1 2025-03-31T16:34:21.453824Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=OTE2ODVkMmItYWE4MDI2Yy1mMDI0ZjgwMi05NGQ3ODNlYg==, workerId: [2:7488012892273965458:2376], local sessions count: 1 2025-03-31T16:34:21.453901Z node 2 :KQP_PROXY DEBUG: Request has 18445000634848.097724s seconds to be completed 2025-03-31T16:34:21.455485Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=ZThiODkxZjYtZmQ5NWNjY2MtNDJkYzA2ZTEtN2UyNzA1MTI=, workerId: [2:7488012892273965504:2388], database: /dc-1, longSession: 1, local sessions count: 2 2025-03-31T16:34:21.455582Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-31T16:34:21.455707Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 72e0c17f-9107d10-f42dd436-2b15cfaa, RunDataQuery: -- TSaveScriptFinalStatusActor::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT operation_status, finalization_status, meta, customer_supplied_id, user_token, script_sinks, script_secret_names FROM `.metadata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_generation FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-03-31T16:34:21.455951Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=ZThiODkxZjYtZmQ5NWNjY2MtNDJkYzA2ZTEtN2UyNzA1MTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 12, targetId: [2:7488012892273965504:2388] 2025-03-31T16:34:21.455973Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 12 timeout: 300.000000s actor id: [2:7488012892273965506:2720] >> KqpSort::TopParameter [GOOD] >> AnalyzeColumnshard::AnalyzeRebootSaBeforeSave >> AnalyzeDatashard::AnalyzeTwoTables ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithReads [GOOD] Test command err: 2025-03-31T16:33:53.663715Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012772945204012:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:53.663891Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00179f/r3tmp/tmpXDQJxT/pdisk_1.dat 2025-03-31T16:33:54.086377Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:54.087480Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:54.087611Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:54.096376Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30394, node 1 2025-03-31T16:33:54.339581Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:54.339610Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:54.339618Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:54.339777Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21949 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:54.858350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:21949 2025-03-31T16:33:56.587826Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012785830106948:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:56.588649Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:56.983277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:33:57.146505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012790125074423:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:57.146595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:57.175038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743438837112 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 1 shards TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743438837112 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-03-31T16:33:57.232727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012790125074540:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:57.232802Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:57.233053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012790125074561:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:57.233067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012790125074562:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:57.233104Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012790125074563:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:57.233179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012790125074564:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:57.233281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012790125074565:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:57.233324Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012790125074566:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:57.234364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012790125074590:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:57.234432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012790125074587:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:57.234518Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:57.235596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012790125074617:2416], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:57.235654Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012790125074619:2418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:57.235712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:57.236748Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012790125074633:2421], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:57.236803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012790125074635:2423], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:57.236859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:57.237579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012790125074654:2426], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don' ... INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-03-31T16:34:17.236930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-03-31T16:34:17.239279Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to activate 281474976715657:0 2025-03-31T16:34:17.240221Z node 1 :TX_DATASHARD INFO: 72075186224037888 Reporting state Offline to schemeshard 72057594046644480 2025-03-31T16:34:17.240597Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 TClient::Ls request: /Root/Foo 2025-03-31T16:34:17.244852Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726761. Ctx: { TraceId: 01jqpgvh0v4jswnsmecewkyhjj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGZjMzNjYjQtM2ZkMGU1MDMtYmQwYTAzMzctZmFiYjEzZWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:17.245788Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037888 reason = ReasonStop 2025-03-31T16:34:17.246375Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-03-31T16:34:17.246446Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-03-31T16:34:17.247419Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-03-31T16:34:17.248279Z node 1 :KQP_COMPUTE WARN: TxId: 281474976726760, task: 1, CA Id [1:7488012876024532577:2365]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 0 2025-03-31T16:34:17.248312Z node 1 :KQP_COMPUTE WARN: TxId: 281474976726756, task: 1, CA Id [1:7488012876024532495:2371]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 0 2025-03-31T16:34:17.248367Z node 1 :KQP_COMPUTE WARN: TxId: 281474976726757, task: 1, CA Id [1:7488012876024532527:2364]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 0 2025-03-31T16:34:17.249684Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726762. Ctx: { TraceId: 01jqpgvh0wcs4tmzjvy3c0vrxs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDM5NTRlYzAtYTE0ZTNmYTctMTE3YTE5ZmItYzY1YTkxZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:17.250775Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726764. Ctx: { TraceId: 01jqpgvh0x9gwfa2222hfgjp94, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmJiMmRhZjAtZjkxNzE1NWItNWI0MzkyYTMtNjZmOTVkYjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:17.251124Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726763. Ctx: { TraceId: 01jqpgvh0ydx1epfy239b0jtwq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTk4NDJkZC00MzJkZGU5Ny1lMGI3OTAyMi05NmJkZDAyZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls response: 2025-03-31T16:34:17.252840Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726765. Ctx: { TraceId: 01jqpgvh0y4kemgc5hfg6gs5a8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjA0ZTQ3OGQtZTg4ZjYzM2QtZTE2YjczNGQtMzFmMzJlMWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:17.252940Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726766. Ctx: { TraceId: 01jqpgvh0y22cwbc519kjar2cp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWE2NTJmZTktYjMyNmUyZC02M2EwMjk4Yy0yMzg0ZWYyNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743438837112 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-03-31T16:34:17.256002Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726767. Ctx: { TraceId: 01jqpgvh1312w646v4mvbf8pky, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGQ5ODVjNjgtODVkOTc3OTUtMWU1YzdlNTktZjc4MjNhOWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:17.256616Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726768. Ctx: { TraceId: 01jqpgvh14fnjbcy4wwnztd7tz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGZjMzNjYjQtM2ZkMGU1MDMtYmQwYTAzMzctZmFiYjEzZWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:17.259617Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726769. Ctx: { TraceId: 01jqpgvh19ed3n01gqn8yfe8z1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDM5NTRlYzAtYTE0ZTNmYTctMTE3YTE5ZmItYzY1YTkxZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:17.282900Z node 1 :KQP_COMPUTE WARN: TxId: 281474976726757, task: 1, CA Id [1:7488012876024532527:2364]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-03-31T16:34:17.282972Z node 1 :KQP_COMPUTE WARN: TxId: 281474976726756, task: 1, CA Id [1:7488012876024532495:2371]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-03-31T16:34:17.283001Z node 1 :KQP_COMPUTE WARN: TxId: 281474976726760, task: 1, CA Id [1:7488012876024532577:2365]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-03-31T16:34:17.325542Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2025-03-31T16:34:17.325735Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037889 followerId=0, pathId 2: RowCount 0, DataSize 0 2025-03-31T16:34:17.325868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037890 followerId=0, pathId 2: RowCount 0, DataSize 0 2025-03-31T16:34:17.326014Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-03-31T16:34:17.356643Z node 1 :KQP_COMPUTE WARN: TxId: 281474976726757, task: 1, CA Id [1:7488012876024532527:2364]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-03-31T16:34:17.356711Z node 1 :KQP_COMPUTE WARN: TxId: 281474976726756, task: 1, CA Id [1:7488012876024532495:2371]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-03-31T16:34:17.394266Z node 1 :KQP_COMPUTE WARN: TxId: 281474976726760, task: 1, CA Id [1:7488012876024532577:2365]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-03-31T16:34:17.476427Z node 1 :KQP_COMPUTE WARN: TxId: 281474976726757, task: 1, CA Id [1:7488012876024532527:2364]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-03-31T16:34:17.531171Z node 1 :KQP_COMPUTE WARN: TxId: 281474976726756, task: 1, CA Id [1:7488012876024532495:2371]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-03-31T16:34:17.582377Z node 1 :KQP_COMPUTE WARN: TxId: 281474976726760, task: 1, CA Id [1:7488012876024532577:2365]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-03-31T16:34:17.743197Z node 1 :KQP_COMPUTE WARN: TxId: 281474976726757, task: 1, CA Id [1:7488012876024532527:2364]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-03-31T16:34:17.743209Z node 1 :KQP_COMPUTE WARN: TxId: 281474976726756, task: 1, CA Id [1:7488012876024532495:2371]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-03-31T16:34:17.877825Z node 1 :KQP_COMPUTE WARN: TxId: 281474976726760, task: 1, CA Id [1:7488012876024532577:2365]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-03-31T16:34:18.143193Z node 1 :KQP_COMPUTE WARN: TxId: 281474976726756, task: 1, CA Id [1:7488012876024532495:2371]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-03-31T16:34:18.339147Z node 1 :KQP_COMPUTE WARN: TxId: 281474976726757, task: 1, CA Id [1:7488012876024532527:2364]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-03-31T16:34:18.412357Z node 1 :KQP_COMPUTE WARN: TxId: 281474976726760, task: 1, CA Id [1:7488012876024532577:2365]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-03-31T16:34:18.878727Z node 1 :KQP_COMPUTE WARN: TxId: 281474976726757, task: 1, CA Id [1:7488012876024532527:2364]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-03-31T16:34:18.946794Z node 1 :KQP_COMPUTE WARN: TxId: 281474976726756, task: 1, CA Id [1:7488012876024532495:2371]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-03-31T16:34:18.946816Z node 1 :KQP_COMPUTE WARN: TxId: 281474976726760, task: 1, CA Id [1:7488012876024532577:2365]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-03-31T16:34:19.543166Z node 1 :KQP_COMPUTE WARN: TxId: 281474976726757, task: 1, CA Id [1:7488012876024532527:2364]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-03-31T16:34:19.905749Z node 1 :KQP_COMPUTE WARN: TxId: 281474976726756, task: 1, CA Id [1:7488012876024532495:2371]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-03-31T16:34:19.905794Z node 1 :KQP_COMPUTE WARN: TxId: 281474976726760, task: 1, CA Id [1:7488012876024532577:2365]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-03-31T16:34:20.472116Z node 1 :KQP_COMPUTE WARN: TxId: 281474976726757, task: 1, CA Id [1:7488012876024532527:2364]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-03-31T16:34:20.472142Z node 1 :KQP_COMPUTE WARN: TxId: 281474976726756, task: 1, CA Id [1:7488012876024532495:2371]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-03-31T16:34:20.472264Z node 1 :KQP_COMPUTE WARN: TxId: 281474976726760, task: 1, CA Id [1:7488012876024532577:2365]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743438837112 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 2 shards ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::SecondaryIndexWithNotNullDataColumnPg [GOOD] Test command err: Trying to start YDB, gRPC: 24046, MsgBus: 29429 2025-03-31T16:33:51.011413Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012762809024735:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:51.011481Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00083b/r3tmp/tmpY10ZjV/pdisk_1.dat 2025-03-31T16:33:51.287008Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24046, node 1 2025-03-31T16:33:51.361578Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:51.361609Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:51.361617Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:51.361732Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:33:51.401153Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:51.401315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:51.403277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29429 TClient is connected to server localhost:29429 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:51.857673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:53.492031Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012771398959988:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:53.492183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:53.748701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:33:53.867859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012771398960091:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:53.867950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:53.868059Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012771398960096:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:53.871777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:33:53.881057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012771398960098:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:33:53.971068Z node 1 :TX_PROXY ERROR: Actor# [1:7488012771398960149:2393] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:54.123662Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488012775693927490:2355], status: PRECONDITION_FAILED, issues:
: Error: Type annotation, code: 1030
:1:14: Error: At function: KiWriteTable!
:1:14: Error: Missing key column in input: Key for table: /Root/TestReplaceNotNullPk, code: 2029 2025-03-31T16:33:54.123999Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDRhNTBlZGYtYjA2N2E4MDktZTIzOTY1NC1kOGI1ODdkYg==, ActorId: [1:7488012771398959985:2328], ActorState: ExecuteState, TraceId: 01jqpgttdp35fm70b17b3rgya1, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id: 2025-03-31T16:33:54.147102Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488012775693927499:2359], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:14: Error: At function: KiWriteTable!
:1:49: Error: Failed to convert type: Struct<'Key':Null,'Value':String> to Struct<'Key':Uint64,'Value':String?>
:1:49: Error: Failed to convert 'Key': Null to Uint64
:1:49: Error: Failed to convert input columns types to scheme types, code: 2031 2025-03-31T16:33:54.147344Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDRhNTBlZGYtYjA2N2E4MDktZTIzOTY1NC1kOGI1ODdkYg==, ActorId: [1:7488012771398959985:2328], ActorState: ExecuteState, TraceId: 01jqpgttem7tnc2msdx962zfvq, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 21892, MsgBus: 24027 2025-03-31T16:33:54.731152Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012777728311303:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:54.731207Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00083b/r3tmp/tmpu0vkUe/pdisk_1.dat 2025-03-31T16:33:54.881756Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:54.903052Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:54.903146Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:54.905425Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21892, node 2 2025-03-31T16:33:54.982618Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:54.982639Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:54.982648Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:54.982742Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24027 TClient is connected to server localhost:24027 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:55.325662Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:57.542108Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012790613213829:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:57.542214Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:57.557256Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:33:57.595845Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012790613213929:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:57.595940Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012790613213934:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:57.595940Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:57.600291Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:33:57.608521Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService ... 037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22300, node 6 2025-03-31T16:34:15.457460Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:15.457489Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:15.457501Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:15.457671Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13977 TClient is connected to server localhost:13977 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:16.053842Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:16.059785Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:34:19.307777Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012883428174541:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:19.308081Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:19.322479Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:34:19.415615Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012883428174692:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:19.415722Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:19.415760Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012883428174698:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:19.420142Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:34:19.430851Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488012883428174700:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:34:19.522854Z node 6 :TX_PROXY ERROR: Actor# [6:7488012883428174751:2432] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:20.224265Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488012866248304692:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:20.224340Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:34:20.589312Z node 6 :KQP_COMPUTE ERROR: SelfId: [6:7488012887723142211:2382], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01jqpgvkx9evnqg1b76sx0nz8j. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=6&id=ZDRhYjJlZDEtNDQxYjM5NzUtZjNjNzlhZjMtNjNjMTY1ZWI=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: BAD_REQUEST KIKIMR_BAD_COLUMN_TYPE: {
: Error: Tried to insert NULL value into NOT NULL column: Index1, code: 2031 }. 2025-03-31T16:34:20.589963Z node 6 :KQP_COMPUTE ERROR: SelfId: [6:7488012887723142214:2384], TxId: 281474976715663, task: 3. Ctx: { TraceId : 01jqpgvkx9evnqg1b76sx0nz8j. SessionId : ydb://session/3?node_id=6&id=ZDRhYjJlZDEtNDQxYjM5NzUtZjNjNzlhZjMtNjNjMTY1ZWI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [6:7488012887723142207:2330], status: BAD_REQUEST, reason: {
: Error: Terminate execution } 2025-03-31T16:34:20.589963Z node 6 :KQP_COMPUTE ERROR: SelfId: [6:7488012887723142213:2383], TxId: 281474976715663, task: 2. Ctx: { TraceId : 01jqpgvkx9evnqg1b76sx0nz8j. SessionId : ydb://session/3?node_id=6&id=ZDRhYjJlZDEtNDQxYjM5NzUtZjNjNzlhZjMtNjNjMTY1ZWI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [6:7488012887723142207:2330], status: BAD_REQUEST, reason: {
: Error: Terminate execution } 2025-03-31T16:34:20.590337Z node 6 :KQP_COMPUTE ERROR: SelfId: [6:7488012887723142215:2385], TxId: 281474976715663, task: 4. Ctx: { TraceId : 01jqpgvkx9evnqg1b76sx0nz8j. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=6&id=ZDRhYjJlZDEtNDQxYjM5NzUtZjNjNzlhZjMtNjNjMTY1ZWI=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [6:7488012887723142207:2330], status: BAD_REQUEST, reason: {
: Error: Terminate execution } 2025-03-31T16:34:20.592922Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=ZDRhYjJlZDEtNDQxYjM5NzUtZjNjNzlhZjMtNjNjMTY1ZWI=, ActorId: [6:7488012883428174536:2330], ActorState: ExecuteState, TraceId: 01jqpgvkx9evnqg1b76sx0nz8j, Create QueryResponse for error on request, msg: 2025-03-31T16:34:20.628726Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-31T16:34:20.652019Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7488012887723142238:2389], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing not null column in input: Index1. All not null columns should be initialized, code: 2032 2025-03-31T16:34:20.654058Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=ZDRhYjJlZDEtNDQxYjM5NzUtZjNjNzlhZjMtNjNjMTY1ZWI=, ActorId: [6:7488012883428174536:2330], ActorState: ExecuteState, TraceId: 01jqpgvmat2cc5s671nfnm4936, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-03-31T16:34:20.690346Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7488012887723142257:2397], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing not null column in input: Index1. All not null columns should be initialized, code: 2032 2025-03-31T16:34:20.692457Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=ZDRhYjJlZDEtNDQxYjM5NzUtZjNjNzlhZjMtNjNjMTY1ZWI=, ActorId: [6:7488012883428174536:2330], ActorState: ExecuteState, TraceId: 01jqpgvmbqagm10sff31962bvf, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-03-31T16:34:20.717383Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7488012887723142277:2405], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:14: Error: At function: KiWriteTable!
:1:14: Error: Missing not null column in input: Index1. All not null columns should be initialized, code: 2032 2025-03-31T16:34:20.717652Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=ZDRhYjJlZDEtNDQxYjM5NzUtZjNjNzlhZjMtNjNjMTY1ZWI=, ActorId: [6:7488012883428174536:2330], ActorState: ExecuteState, TraceId: 01jqpgvmcveak4at7xqnb25kdt, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-03-31T16:34:21.090388Z node 6 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-31T16:34:21.105043Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7488012887723142296:2413], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
: Error: Tried to insert NULL value into NOT NULL column: Index1, code: 2031 2025-03-31T16:34:21.105307Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=ZDRhYjJlZDEtNDQxYjM5NzUtZjNjNzlhZjMtNjNjMTY1ZWI=, ActorId: [6:7488012883428174536:2330], ActorState: ExecuteState, TraceId: 01jqpgvmdq15drsx7v9nt13v44, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-03-31T16:34:21.678111Z node 6 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-31T16:34:21.684445Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7488012892018109619:2422], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
: Error: Tried to insert NULL value into NOT NULL column: Index1, code: 2031 2025-03-31T16:34:21.685591Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=ZDRhYjJlZDEtNDQxYjM5NzUtZjNjNzlhZjMtNjNjMTY1ZWI=, ActorId: [6:7488012883428174536:2330], ActorState: ExecuteState, TraceId: 01jqpgvmss3ks962hd580wy9dq, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-03-31T16:34:22.358078Z node 6 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-31T16:34:22.364536Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7488012892018109655:2434], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
: Error: Tried to insert NULL value into NOT NULL column: Index1, code: 2031 2025-03-31T16:34:22.364871Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=ZDRhYjJlZDEtNDQxYjM5NzUtZjNjNzlhZjMtNjNjMTY1ZWI=, ActorId: [6:7488012883428174536:2330], ActorState: ExecuteState, TraceId: 01jqpgvnc07bw6gkwdfs3d6p86, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: >> KqpNewEngine::SequentialReadsPragma-Enabled [GOOD] >> KqpNewEngine::PagingNoPredicateExtract [GOOD] >> KqpNewEngine::MultipleBroadcastJoin >> DataShardReadTableSnapshots::ReadTableSnapshot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSort::TopParameter [GOOD] Test command err: Trying to start YDB, gRPC: 30191, MsgBus: 2387 2025-03-31T16:33:46.064298Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012742994602363:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:46.064348Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00088d/r3tmp/tmpbnaZgl/pdisk_1.dat 2025-03-31T16:33:46.352331Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30191, node 1 2025-03-31T16:33:46.427129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:46.427320Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:46.428988Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:46.431046Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:46.431067Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:46.431074Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:46.431213Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2387 TClient is connected to server localhost:2387 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:46.969184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:46.989578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:47.140180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:47.274649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:47.335719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:48.722925Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012751584538730:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:48.723076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:48.965750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:48.995784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:49.022507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:49.051863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:49.083862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:49.117997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:49.156792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012755879506538:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:49.156870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012755879506543:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:49.156884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:49.160320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:49.170114Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012755879506545:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:33:49.251972Z node 1 :TX_PROXY ERROR: Actor# [1:7488012755879506600:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 12571, MsgBus: 18558 2025-03-31T16:33:51.041997Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012762451080605:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:51.042088Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00088d/r3tmp/tmpHVTnKj/pdisk_1.dat 2025-03-31T16:33:51.123117Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12571, node 2 2025-03-31T16:33:51.175348Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:51.175446Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:51.176468Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:51.176489Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:51.176498Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:51.176625Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:33:51.176913Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18558 TClient is connected to server localhost:18558 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:51.501177Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:51.518918Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:51.593972Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:51.729828Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:51.797407Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:53.652078Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012771041016980:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:53.652212Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: { : Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:12.818144Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:12.883841Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:12.936311Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:12.983990Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:13.032346Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:13.078080Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:13.120228Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:34:13.207070Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488012860074976454:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:13.207219Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:13.207551Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488012860074976459:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:13.212780Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:13.225607Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7488012860074976461:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:34:13.319290Z node 5 :TX_PROXY ERROR: Actor# [5:7488012860074976517:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:13.662442Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7488012838600137680:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:13.662532Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 18686, MsgBus: 26009 2025-03-31T16:34:16.016695Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488012870763722058:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:16.016782Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00088d/r3tmp/tmpEHAcey/pdisk_1.dat 2025-03-31T16:34:16.144904Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:16.224693Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:16.224810Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:16.226688Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18686, node 6 2025-03-31T16:34:16.319826Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:16.319857Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:16.319871Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:16.320024Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26009 TClient is connected to server localhost:26009 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:16.933497Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:16.942370Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:34:16.962069Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:17.092928Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:17.324925Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:17.438675Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:20.446634Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012887943593019:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:20.446756Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:20.517580Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:20.558234Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:20.596649Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:20.638580Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:20.680472Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:20.734145Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:34:20.780936Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012887943593531:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:20.781029Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:20.781216Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012887943593536:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:20.785895Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:20.797543Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488012887943593538:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:34:20.863460Z node 6 :TX_PROXY ERROR: Actor# [6:7488012887943593591:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:21.017022Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488012870763722058:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:21.017130Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> Compression::WriteZSTD [GOOD] >> Compression::WriteWithMixedCodecs >> KqpSinkTx::SnapshotRO >> KqpNewEngine::ItemsLimit [GOOD] >> KqpNewEngine::JoinDictWithPure >> YdbTableSplit::SplitByLoadWithUpdates [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitBefore >> KqpProxy::DatabasesCacheForServerless [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages >> BasicUsage::BrokenCredentialsProvider [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::SequentialReadsPragma-Enabled [GOOD] Test command err: Trying to start YDB, gRPC: 15517, MsgBus: 64544 2025-03-31T16:33:45.594976Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012736936358226:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:45.595176Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0008ba/r3tmp/tmpLcpkJ7/pdisk_1.dat 2025-03-31T16:33:45.896718Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15517, node 1 2025-03-31T16:33:45.969494Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:45.969637Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:45.971054Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:45.973316Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:45.973340Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:45.973371Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:45.973507Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64544 TClient is connected to server localhost:64544 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:46.460314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:48.465500Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012749821260776:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:48.465595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:48.756850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:33:48.862029Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012749821260880:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:48.862082Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:48.862152Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012749821260885:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:48.865815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:33:48.875701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012749821260887:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:33:48.967105Z node 1 :TX_PROXY ERROR: Actor# [1:7488012749821260938:2394] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 19114, MsgBus: 5530 2025-03-31T16:33:49.783947Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012757066779810:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:49.784087Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0008ba/r3tmp/tmpQssYn2/pdisk_1.dat 2025-03-31T16:33:49.898075Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:49.923535Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:49.923623Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:49.925088Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19114, node 2 2025-03-31T16:33:49.965627Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:49.965652Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:49.965660Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:49.965784Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5530 TClient is connected to server localhost:5530 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:50.331311Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:50.347008Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:50.420167Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:50.575124Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:50.642300Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:52.491699Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012769951683472:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:52.491794Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:52.533978Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:52.556954Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:52.582582Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:52.609295Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:52.639729Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:52.674677Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:52.710724Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012769951683983:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:52.710800Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:52.710803Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadSe ... : [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488012856732932231:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:13.089082Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:13.146047Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:13.188915Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:13.235629Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:13.274831Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:13.318742Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:13.365069Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:34:13.422573Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488012856732932743:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:13.422688Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:13.422893Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488012856732932748:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:13.427118Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:13.437784Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7488012856732932750:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:34:13.535272Z node 5 :TX_PROXY ERROR: Actor# [5:7488012856732932806:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:13.997331Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7488012835258094000:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:13.997393Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 22851, MsgBus: 6589 2025-03-31T16:34:17.360305Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488012875638934092:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:17.360573Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0008ba/r3tmp/tmpuzIOsB/pdisk_1.dat 2025-03-31T16:34:17.523446Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:17.523657Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:17.523753Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:17.540647Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22851, node 6 2025-03-31T16:34:17.627872Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:17.627908Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:17.627921Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:17.628089Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6589 TClient is connected to server localhost:6589 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:18.316168Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:18.334783Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:18.422574Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:18.636649Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:18.732688Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:21.635461Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012892818805015:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:21.635583Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:21.702601Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:21.745035Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:21.800739Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:21.855455Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:21.906435Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:21.961127Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:34:22.095241Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012897113772828:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:22.095443Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:22.096117Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012897113772833:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:22.100776Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:22.113152Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488012897113772835:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:34:22.182870Z node 6 :TX_PROXY ERROR: Actor# [6:7488012897113772889:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:22.362217Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488012875638934092:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:22.362306Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> DataShardReadTableSnapshots::ReadTableDropColumnLatePropose >> KqpKv::ReadRows_PgKey [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithUpdates [GOOD] Test command err: 2025-03-31T16:33:58.939053Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012792857917649:2078];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:58.939114Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001779/r3tmp/tmpGqs9gh/pdisk_1.dat 2025-03-31T16:33:59.350064Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:59.364069Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:59.364205Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:59.373043Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25610, node 1 2025-03-31T16:33:59.565231Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:59.565277Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:59.565296Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:59.565448Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28091 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:59.842522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:28091 2025-03-31T16:34:02.237968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012810037787859:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:02.238085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:02.482223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:34:02.672275Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012810037788043:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:02.672356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:02.692779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743438842600 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 1 shards TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743438842600 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-03-31T16:34:02.813794Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012810037788141:2381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:02.813860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:02.814146Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012810037788146:2384], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:02.820154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012810037788163:2388], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:02.820326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012810037788171:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:02.820360Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012810037788173:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:02.822233Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012810037788174:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:02.822363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:02.828547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata, operationId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:34:02.828685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:34:02.828699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976710660:1, at schemeshard: 72057594046644480 2025-03-31T16:34:02.828792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:34:02.828806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976710660:2, at schemeshard: 72057594046644480 2025-03-31T16:34:02.828842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:34:02.828892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710660:3, path# /Root/.metadata/workload_manager/pools/default 2025-03-31T16:34:02.829085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710660:3 1 -> 128 2025-03-31T16:34:02.829318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:34:02.829333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-03-31T16:34:02.830146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710661:0, path# /Root/.metadata/workload_manager/pools/default 2025-03-31T16:34:02.830263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710661:1, propose status:StatusMultipleModifications, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate), at schemeshard: 72057594046644480 2025-03-31T16:34:02.831326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710662:0, path# /Root/.metadata/workload_manager/pools/default 2025-03-31T16:34:02.831429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710662:1, propose status:StatusMultipleModifications, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathSt ... /Root 2025-03-31T16:34:22.768438Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722817. Ctx: { TraceId: 01jqpgvpd60db8ckgr4qrwez8c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDJiMjRmODAtOWJiMzAxZmUtNzc4ZDNjOWUtNTFiMDk5OTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:22.770173Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722818. Ctx: { TraceId: 01jqpgvpd682w9ndgyphy6phpp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmZiNGZiZGQtZGYyNDVkNGUtYmNhODYzZjktNDM1MTEzZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:22.775785Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722819. Ctx: { TraceId: 01jqpgvpd9f8d6ms2n6xpgy4f5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmU1YzUyYzUtNTBmN2VkZDktYTk2ODUxYWQtMmMzOWNiZTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:22.783489Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722820. Ctx: { TraceId: 01jqpgvpdbda75qzs8jk1vcgyx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmFiYTk1MDMtOThlYTk0ZTEtMzAzZTZkMTQtZDBiM2M5YTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:22.791716Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722821. Ctx: { TraceId: 01jqpgvpdt4ak8p3ggd85vakz6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWYzYmRiZDctMjhmODI4YTEtNGQ4MDhhNzQtNTA1OTg1NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:22.797391Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722822. Ctx: { TraceId: 01jqpgvpdzakyw59gdnxw8w4ba, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2E0MTQxNTktNTM0NDQyMWUtODllYWVhZDMtNWExNGVlMTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:22.798038Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722823. Ctx: { TraceId: 01jqpgvpdvepfezajnc4nppywk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Nzg5MDZiYzQtOTEyYTdkMy1kMGQxZjNlYS1iY2U0YjBiOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:22.799542Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722824. Ctx: { TraceId: 01jqpgvpe6dh7mr6geq22rk76d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGM2Nzc0OTQtZDJkOTA4MzEtM2NlYjIyNmQtZjU2Yzk5ZGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:22.799977Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722826. Ctx: { TraceId: 01jqpgvpe3efhqqmyrez3pwx01, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmZiNGZiZGQtZGYyNDVkNGUtYmNhODYzZjktNDM1MTEzZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:22.801402Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722825. Ctx: { TraceId: 01jqpgvpe8d37pnfhhnaesq682, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmQ2YzZhNjAtNTU0ZWE1MzMtNGUxNDQ1YTQtNWI0MmI4MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:22.804366Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722827. Ctx: { TraceId: 01jqpgvpejdgkva7ss124agw12, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDJiMjRmODAtOWJiMzAxZmUtNzc4ZDNjOWUtNTFiMDk5OTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:22.806828Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722828. Ctx: { TraceId: 01jqpgvpej73rf653wefmkrp59, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmU1YzUyYzUtNTBmN2VkZDktYTk2ODUxYWQtMmMzOWNiZTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:22.810064Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722829. Ctx: { TraceId: 01jqpgvpejey53mha0tv506h9g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTZlODVmOTMtZWViNDEyOC02M2VjZjdlOC00NzU0N2I1Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:22.814293Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722830. Ctx: { TraceId: 01jqpgvper04zdqsa0rvay40ja, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWYzYmRiZDctMjhmODI4YTEtNGQ4MDhhNzQtNTA1OTg1NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:22.815892Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722831. Ctx: { TraceId: 01jqpgvpej2a3n1nmkh43sdqwh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmFiYTk1MDMtOThlYTk0ZTEtMzAzZTZkMTQtZDBiM2M5YTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:22.818288Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722832. Ctx: { TraceId: 01jqpgvpey55wcs2tgf6phj7vf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmZiNGZiZGQtZGYyNDVkNGUtYmNhODYzZjktNDM1MTEzZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:22.820769Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722833. Ctx: { TraceId: 01jqpgvpew1hm2qq7jh2f88qra, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Nzg5MDZiYzQtOTEyYTdkMy1kMGQxZjNlYS1iY2U0YjBiOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:22.822370Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722835. Ctx: { TraceId: 01jqpgvpexad9kacq5zey6azzp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGM2Nzc0OTQtZDJkOTA4MzEtM2NlYjIyNmQtZjU2Yzk5ZGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:22.824453Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722834. Ctx: { TraceId: 01jqpgvpex75v0vamp78jy3bjc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2E0MTQxNTktNTM0NDQyMWUtODllYWVhZDMtNWExNGVlMTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo 2025-03-31T16:34:22.828641Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722836. Ctx: { TraceId: 01jqpgvpez1bfj31fxa5b50v4a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmQ2YzZhNjAtNTU0ZWE1MzMtNGUxNDQ1YTQtNWI0MmI4MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743438842600 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-03-31T16:34:22.831415Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722837. Ctx: { TraceId: 01jqpgvpf3524y1v30e2968yvh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDJiMjRmODAtOWJiMzAxZmUtNzc4ZDNjOWUtNTFiMDk5OTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:22.834006Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722838. Ctx: { TraceId: 01jqpgvpf36gwqhywjjyec80v0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmU1YzUyYzUtNTBmN2VkZDktYTk2ODUxYWQtMmMzOWNiZTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:22.835691Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722839. Ctx: { TraceId: 01jqpgvpf793ht9w5s66hrzh30, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTZlODVmOTMtZWViNDEyOC02M2VjZjdlOC00NzU0N2I1Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:22.843032Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722840. Ctx: { TraceId: 01jqpgvpfb8hj3p88cwzdvq963, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWYzYmRiZDctMjhmODI4YTEtNGQ4MDhhNzQtNTA1OTg1NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:22.844958Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722841. Ctx: { TraceId: 01jqpgvpfb66d89s40yh2j61gj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmFiYTk1MDMtOThlYTk0ZTEtMzAzZTZkMTQtZDBiM2M5YTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:22.848430Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722842. Ctx: { TraceId: 01jqpgvpfkentmfw4ykntfhmj3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmZiNGZiZGQtZGYyNDVkNGUtYmNhODYzZjktNDM1MTEzZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743438842600 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-03-31T16:34:23.821347Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 256734 rowCount 3833 cpuUsage 0 2025-03-31T16:34:23.821411Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037889 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 216579 rowCount 3314 cpuUsage 0 Table has 2 shards 2025-03-31T16:34:23.923175Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2025-03-31T16:34:23.923329Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037890 followerId=0, pathId 2: RowCount 3833, DataSize 256734 2025-03-31T16:34:23.923462Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037889 followerId=0, pathId 2: RowCount 3314, DataSize 216579 2025-03-31T16:34:23.925904Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 >> KqpProxy::NodeDisconnectedTest [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanManyTables [GOOD] >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> KqpProxy::DatabasesCacheForServerless [GOOD] Test command err: 2025-03-31T16:33:59.209058Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012799304362185:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:59.209092Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:33:59.238323Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012797652339207:2210];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:59.246361Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488012796945511973:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:59.246515Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:33:59.283628Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488012799760083753:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:59.283688Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:33:59.348856Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7488012798769115665:2204];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:59.348892Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ca9/r3tmp/tmpcY9DWK/pdisk_1.dat 2025-03-31T16:33:59.946514Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:34:00.268326Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:34:00.270956Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:34:00.286289Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:34:00.356143Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:34:00.365363Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:00.365492Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:00.366607Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:00.366676Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:00.366955Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:00.367056Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:00.368740Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:00.368826Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:00.368952Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:00.368992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:00.373293Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-03-31T16:34:00.373339Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:34:00.373411Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-03-31T16:34:00.373424Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-03-31T16:34:00.373542Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:00.374943Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:00.375363Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:00.375695Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:00.375911Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:00.380105Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:3389 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:34:00.803778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:03.931837Z node 3 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-31T16:34:03.944745Z node 3 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-31T16:34:03.946658Z node 3 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-03-31T16:34:03.946696Z node 3 :KQP_PROXY DEBUG: Updated table service config. 2025-03-31T16:34:03.946724Z node 3 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-31T16:34:03.946761Z node 3 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-31T16:34:03.946843Z node 3 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:03.946916Z node 3 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:03.946988Z node 3 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:03.947509Z node 3 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:03.953403Z node 3 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-03-31T16:34:03.953424Z node 3 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-03-31T16:34:03.953469Z node 3 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-03-31T16:34:03.953664Z node 3 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-03-31T16:34:03.953674Z node 3 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-03-31T16:34:03.953690Z node 3 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-03-31T16:34:03.955104Z node 3 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-03-31T16:34:03.955122Z node 3 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-03-31T16:34:03.955162Z node 3 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-03-31T16:34:03.958081Z node 5 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-31T16:34:03.963049Z node 5 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-31T16:34:03.966515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725657:1, at schemeshard: 72057594046644480 2025-03-31T16:34:03.967969Z node 5 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-03-31T16:34:03.968011Z node 5 :KQP_PROXY DEBUG: Updated table service config. 2025-03-31T16:34:03.968028Z node 5 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-31T16:34:03.968065Z node 5 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-31T16:34:03.968333Z node 5 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:03.968624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725658:0, at schemeshard: 72057594046644480 2025-03-31T16:34:03.979397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725659:0, at schemeshard: 72057594046644480 2025-03-31T16:34:03.970336Z node 5 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:03.975263Z node 5 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:03.975308Z node 5 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:03.982091Z node 5 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-03-31T16:34:03.982123Z node 5 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-03-31T16:34:03.982173Z node 5 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-03-31T16:34:03.983275Z node 2 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-31T16:34:03.985915Z node 5 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-03-31T16:34:03.985931Z node 5 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-03-31T16:34:03.985974Z node 5 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/scri ... e] [Service] Cleanup completed, tables exists: 0 2025-03-31T16:34:12.406242Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:34:12.511772Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:12.511862Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:12.517638Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 8 Cookie 8 2025-03-31T16:34:12.520165Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:12.678452Z node 8 :HIVE WARN: HIVE#72075186224037888 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:12.678596Z node 8 :HIVE WARN: HIVE#72075186224037888 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:12.678870Z node 8 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:12.679059Z node 8 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:12.679216Z node 8 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:12.679366Z node 8 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:12.679598Z node 8 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:12.679666Z node 8 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:12.679773Z node 8 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:12.679867Z node 8 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:12.679980Z node 8 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:12.683832Z node 8 :HIVE WARN: HIVE#72075186224037888 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:12.832742Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:12.843251Z node 8 :STATISTICS WARN: [72075186224037894] TTxInit::Complete. EnableColumnStatistics=false 2025-03-31T16:34:12.972487Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:34:12.996248Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488012855085889161:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:12.997095Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/test-shared/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:34:13.033848Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:13.034020Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:13.040305Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 7 Cookie 7 2025-03-31T16:34:13.042797Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:13.095772Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:13.096025Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:13.096099Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:13.096190Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:13.096300Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:13.096374Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:13.096482Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:13.096566Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:13.096646Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:13.133364Z node 7 :HIVE WARN: HIVE#72075186224038889 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:13.133483Z node 7 :HIVE WARN: HIVE#72075186224038889 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:13.143034Z node 7 :HIVE WARN: HIVE#72075186224038889 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:13.346756Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:13.349277Z node 7 :STATISTICS WARN: [72075186224038895] TTxInit::Complete. EnableColumnStatistics=false 2025-03-31T16:34:13.359875Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488012836660903931:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:13.360039Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:34:13.521712Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:13.673208Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:13.749922Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7488012859380857405:2537], Database: /Root/test-serverless, Start database fetching 2025-03-31T16:34:13.750101Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7488012859380857405:2537], Database: /Root/test-serverless, Database info successfully fetched, serverless: 1 2025-03-31T16:34:16.848677Z node 8 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-03-31T16:34:16.848936Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [8:7488012871139057603:2346], Start check tables existence, number paths: 2 2025-03-31T16:34:16.849404Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-03-31T16:34:16.849427Z node 8 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-03-31T16:34:16.852804Z node 8 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 3 2025-03-31T16:34:16.852893Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [8:7488012871139057603:2346], Describe table /Root/test-dedicated/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-03-31T16:34:16.852965Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [8:7488012871139057603:2346], Describe table /Root/test-dedicated/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-03-31T16:34:16.853017Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [8:7488012871139057603:2346], Successfully finished 2025-03-31T16:34:16.853076Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-03-31T16:34:17.177050Z node 7 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-03-31T16:34:17.177113Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [7:7488012876560726703:2372], Start check tables existence, number paths: 2 2025-03-31T16:34:17.177352Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-03-31T16:34:17.177374Z node 7 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-03-31T16:34:17.183359Z node 7 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 3 2025-03-31T16:34:17.183501Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [7:7488012876560726703:2372], Describe table /Root/test-shared/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-03-31T16:34:17.183589Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [7:7488012876560726703:2372], Describe table /Root/test-shared/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-03-31T16:34:17.183642Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [7:7488012876560726703:2372], Successfully finished 2025-03-31T16:34:17.183843Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-03-31T16:34:17.996557Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7488012855085889161:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:17.996681Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/test-shared/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:34:23.704416Z node 6 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:34:23.704456Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:23.759577Z node 6 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 7 2025-03-31T16:34:23.760091Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-31T16:34:23.760270Z node 6 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 8 2025-03-31T16:34:23.760428Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-31T16:34:23.760664Z node 8 :HIVE WARN: HIVE#72075186224037888 THive::TTxStatus(status=2 node=Connected) - killing node 8 2025-03-31T16:34:23.799877Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=NjQwNzk0ODItN2JlYTVkNTYtNDIwNmY2YzItZTllMWMwOGM=, ActorId: [6:7488012853840773946:2334], ActorState: ReadyState, Session closed due to explicit close event 2025-03-31T16:34:23.799923Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=NjQwNzk0ODItN2JlYTVkNTYtNDIwNmY2YzItZTllMWMwOGM=, ActorId: [6:7488012853840773946:2334], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-31T16:34:23.799952Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NjQwNzk0ODItN2JlYTVkNTYtNDIwNmY2YzItZTllMWMwOGM=, ActorId: [6:7488012853840773946:2334], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-03-31T16:34:23.799976Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NjQwNzk0ODItN2JlYTVkNTYtNDIwNmY2YzItZTllMWMwOGM=, ActorId: [6:7488012853840773946:2334], ActorState: unknown state, Cleanup temp tables: 0 2025-03-31T16:34:23.800033Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NjQwNzk0ODItN2JlYTVkNTYtNDIwNmY2YzItZTllMWMwOGM=, ActorId: [6:7488012853840773946:2334], ActorState: unknown state, Session actor destroyed ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> BasicUsage::BrokenCredentialsProvider [GOOD] Test command err: 2025-03-31T16:33:44.357754Z :MaxByteSizeEqualZero INFO: Random seed for debugging is 1743438824357725 2025-03-31T16:33:44.686029Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012733009744987:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:44.686235Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:33:44.724573Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012731675150135:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:44.724772Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:33:44.873938Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:33:44.874151Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011af/r3tmp/tmpEPEwK4/pdisk_1.dat 2025-03-31T16:33:45.114522Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:45.126389Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:45.126476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:45.127468Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:45.127534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:45.140263Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:33:45.140418Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:45.141074Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13008, node 1 2025-03-31T16:33:45.259032Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/0011af/r3tmp/yandexNXkxjG.tmp 2025-03-31T16:33:45.259108Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/0011af/r3tmp/yandexNXkxjG.tmp 2025-03-31T16:33:45.259840Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/0011af/r3tmp/yandexNXkxjG.tmp 2025-03-31T16:33:45.260007Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:33:45.456623Z INFO: TTestServer started on Port 25274 GrpcPort 13008 TClient is connected to server localhost:25274 PQClient connected to localhost:13008 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:45.715851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-03-31T16:33:47.658050Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012745894647888:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:47.658209Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012745894647900:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:47.658230Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:47.661748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-03-31T16:33:47.664884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012745894647936:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:47.664953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:47.682637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012745894647902:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-31T16:33:47.957163Z node 1 :TX_PROXY ERROR: Actor# [1:7488012745894647978:2668] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:47.986627Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488012744560052363:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:33:47.986872Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjllNzM0MzktOGY5N2FjZWItNDRiZTVmNWYtN2U2N2NhOWE=, ActorId: [2:7488012744560052335:2307], ActorState: ExecuteState, TraceId: 01jqpgtm6m32d0wbhzrh6ana15, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:33:47.987579Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488012745894648003:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:33:47.988682Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWUxMzE3NzQtMmJhMjdkYmUtYzM4ODk5OC1lZmUzYWYzNg==, ActorId: [1:7488012745894647885:2335], ActorState: ExecuteState, TraceId: 01jqpgtm36588ymhdqsznkxqr0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:33:47.988652Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:33:47.988992Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:33:48.042622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:48.178730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:48.297879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:13008", true, true, 1000); 2025-03-31T16:33:48.587373Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jqpgtmtd48aknfjhasxmhjdh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mjc4ZjUxMi02NGIyMTkyLTdkZjUxNTM4LWI4YTNlNDhh, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7488012750189615700:2971] 2025-03-31T16:33:49.685934Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012733009744987:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:49.686012Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:33:49.724313Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488012731675150135:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:49.724390Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-03-31T16:33:54.460237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:13008 M ... T16:34:24.273005Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2025-03-31T16:34:24.273192Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-03-31T16:34:24.273228Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-03-31T16:34:24.273245Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-03-31T16:34:24.273269Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7488012905303613270:2523] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2025-03-31T16:34:24.276426Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7488012905303613270:2523] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2025-03-31T16:34:24.450522Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7488012905303613270:2523] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2025-03-31T16:34:24.450899Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7488012905303613320:2523] connected; active server actors: 1 2025-03-31T16:34:24.451008Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7488012905303613270:2523] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2025-03-31T16:34:24.451030Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7488012905303613270:2523] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-03-31T16:34:24.453256Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7488012905303613320:2523] disconnected; active server actors: 1 2025-03-31T16:34:24.453293Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7488012905303613320:2523] disconnected no session 2025-03-31T16:34:24.579434Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [5:7488012905303613346:2523], now have 1 active actors on pipe 2025-03-31T16:34:24.579773Z node 6 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-03-31T16:34:24.579814Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-03-31T16:34:24.579905Z node 6 :PERSQUEUE INFO: new Cookie src|e7eff394-e72b00d0-aac983d0-aad870e5_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-03-31T16:34:24.580014Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-03-31T16:34:24.580106Z node 6 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-31T16:34:24.580813Z node 6 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-03-31T16:34:24.580856Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-03-31T16:34:24.580959Z node 6 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-31T16:34:24.582578Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1743438864582 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T16:34:24.582717Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|e7eff394-e72b00d0-aac983d0-aad870e5_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-03-31T16:34:24.583017Z :INFO: [] MessageGroupId [src] SessionId [src|e7eff394-e72b00d0-aac983d0-aad870e5_0] Write session: close. Timeout = 0 ms 2025-03-31T16:34:24.583064Z :INFO: [] MessageGroupId [src] SessionId [src|e7eff394-e72b00d0-aac983d0-aad870e5_0] Write session will now close 2025-03-31T16:34:24.583121Z :DEBUG: [] MessageGroupId [src] SessionId [src|e7eff394-e72b00d0-aac983d0-aad870e5_0] Write session: aborting 2025-03-31T16:34:24.578120Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7488012905303613270:2523] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-03-31T16:34:24.578154Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7488012905303613270:2523] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-03-31T16:34:24.578171Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7488012905303613270:2523] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-03-31T16:34:24.578198Z node 5 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-03-31T16:34:24.579597Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 6, Generation: 1 2025-03-31T16:34:24.581474Z node 5 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|e7eff394-e72b00d0-aac983d0-aad870e5_0 2025-03-31T16:34:24.583837Z :INFO: [] MessageGroupId [src] SessionId [src|e7eff394-e72b00d0-aac983d0-aad870e5_0] Write session: gracefully shut down, all writes complete 2025-03-31T16:34:24.583881Z :DEBUG: [] MessageGroupId [src] SessionId [src|e7eff394-e72b00d0-aac983d0-aad870e5_0] Write session is aborting and will not restart 2025-03-31T16:34:24.583943Z :DEBUG: [] MessageGroupId [src] SessionId [src|e7eff394-e72b00d0-aac983d0-aad870e5_0] Write session: destroy 2025-03-31T16:34:24.584072Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|e7eff394-e72b00d0-aac983d0-aad870e5_0 grpc read done: success: 0 data: 2025-03-31T16:34:24.584101Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|e7eff394-e72b00d0-aac983d0-aad870e5_0 grpc read failed 2025-03-31T16:34:24.584562Z node 5 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 1 sessionId: src|e7eff394-e72b00d0-aac983d0-aad870e5_0 2025-03-31T16:34:24.584593Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|e7eff394-e72b00d0-aac983d0-aad870e5_0 is DEAD 2025-03-31T16:34:24.585063Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-31T16:34:24.585946Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7488012905303613346:2523] destroyed 2025-03-31T16:34:24.586049Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-03-31T16:34:24.605318Z :INFO: [/Root] [/Root] [5fcd70b6-e4d216ed-887ba600-96fd322c] Starting read session 2025-03-31T16:34:24.605363Z :DEBUG: [/Root] [/Root] [5fcd70b6-e4d216ed-887ba600-96fd322c] Starting session to cluster null (localhost:17356) 2025-03-31T16:34:24.606840Z :DEBUG: [/Root] [/Root] [5fcd70b6-e4d216ed-887ba600-96fd322c] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:34:24.606885Z :DEBUG: [/Root] [/Root] [5fcd70b6-e4d216ed-887ba600-96fd322c] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:34:24.606955Z :DEBUG: [/Root] [/Root] [5fcd70b6-e4d216ed-887ba600-96fd322c] [null] Reconnecting session to cluster null in 0.000000s 2025-03-31T16:34:24.608097Z :ERROR: [/Root] [/Root] [5fcd70b6-e4d216ed-887ba600-96fd322c] [null] Got error. Status: CLIENT_UNAUTHENTICATED. Description:
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation 2025-03-31T16:34:24.608150Z :DEBUG: [/Root] [/Root] [5fcd70b6-e4d216ed-887ba600-96fd322c] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:34:24.608191Z :DEBUG: [/Root] [/Root] [5fcd70b6-e4d216ed-887ba600-96fd322c] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-31T16:34:24.608311Z :INFO: [/Root] [/Root] [5fcd70b6-e4d216ed-887ba600-96fd322c] [null] Closing session to cluster: SessionClosed { Status: CLIENT_UNAUTHENTICATED Issues: "
: Error: Failed to establish connection to server "" ( cluster null). Attempts done: 1
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation " } Get event on client 2025-03-31T16:34:24.608488Z :NOTICE: [/Root] [/Root] [5fcd70b6-e4d216ed-887ba600-96fd322c] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-31T16:34:24.608519Z :DEBUG: [/Root] [/Root] [5fcd70b6-e4d216ed-887ba600-96fd322c] [null] Abort session to cluster Got close event: SessionClosed { Status: CLIENT_UNAUTHENTICATED Issues: "
: Error: Failed to establish connection to server "" ( cluster null). Attempts done: 1
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation " }2025-03-31T16:34:24.608579Z :INFO: [/Root] [/Root] [5fcd70b6-e4d216ed-887ba600-96fd322c] Closing read session. Close timeout: 0.000000s 2025-03-31T16:34:24.608607Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-03-31T16:34:24.608640Z :INFO: [/Root] [/Root] [5fcd70b6-e4d216ed-887ba600-96fd322c] Counters: { Errors: 1 CurrentSessionLifetimeMs: 3 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T16:34:24.608700Z :NOTICE: [/Root] [/Root] [5fcd70b6-e4d216ed-887ba600-96fd322c] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-31T16:34:24.875834Z node 5 :KQP_COMPUTE WARN: TxId: 281474976715689, task: 1, CA Id [5:7488012905303613373:2542]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2025-03-31T16:34:24.909618Z node 5 :KQP_COMPUTE WARN: TxId: 281474976715689, task: 1, CA Id [5:7488012905303613373:2542]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-31T16:34:24.962457Z node 5 :KQP_COMPUTE WARN: TxId: 281474976715689, task: 1, CA Id [5:7488012905303613373:2542]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-31T16:34:25.017230Z node 5 :KQP_COMPUTE WARN: TxId: 281474976715689, task: 1, CA Id [5:7488012905303613373:2542]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-31T16:34:25.118136Z node 5 :KQP_COMPUTE WARN: TxId: 281474976715689, task: 1, CA Id [5:7488012905303613373:2542]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-31T16:34:25.261845Z node 5 :KQP_COMPUTE WARN: TxId: 281474976715689, task: 1, CA Id [5:7488012905303613373:2542]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpKv::ReadRows_PgKey [GOOD] Test command err: Trying to start YDB, gRPC: 29352, MsgBus: 19086 2025-03-31T16:33:49.730336Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012756100023829:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:49.730404Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00085b/r3tmp/tmppbXyV7/pdisk_1.dat 2025-03-31T16:33:50.013147Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29352, node 1 2025-03-31T16:33:50.081110Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:50.081216Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:50.081745Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:50.081781Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:50.081792Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:50.081924Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:33:50.082939Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19086 TClient is connected to server localhost:19086 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:50.504106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:52.286495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012768984926371:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:52.286626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:52.491125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 IsSuccess(): 1 GetStatus(): SUCCESS Trying to start YDB, gRPC: 14499, MsgBus: 21918 2025-03-31T16:33:53.228351Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012772951365756:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:53.228470Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00085b/r3tmp/tmp8XLxPC/pdisk_1.dat 2025-03-31T16:33:53.311511Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14499, node 2 2025-03-31T16:33:53.358901Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:53.358995Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:53.360445Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:53.375830Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:53.375848Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:53.375854Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:53.375937Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21918 TClient is connected to server localhost:21918 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:33:53.761008Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:33:56.126653Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012785836268300:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:56.126714Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:56.144203Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:33:56.197576Z node 2 :RPC_REQUEST ERROR: TReadRowsRPC ReplyWithError: Unknown table '/Root/WrongTable' Trying to start YDB, gRPC: 3697, MsgBus: 18571 2025-03-31T16:33:56.961671Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488012784797878868:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:56.961740Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00085b/r3tmp/tmpK5MwRw/pdisk_1.dat 2025-03-31T16:33:57.057748Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3697, node 3 2025-03-31T16:33:57.091393Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:57.091481Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:57.093346Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:57.119661Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:57.119683Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:57.119692Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:57.119800Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18571 TClient is connected to server localhost:18571 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:57.494126Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:59.876980Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012797682781403:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:59.877085Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:59.893246Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 IsSuccess(): 1 GetStatus(): SUCCESS [] IsSuccess(): 1 GetStatus(): SUCCESS 2025-03-31T16:34:00.019723Z node 3 :RPC_REQUEST ERROR: TReadRowsRPC ReplyWithError: no keys are found in request's proto Trying to start YDB, gRPC: 20770, MsgBus: 3635 2025-03-31T16:34:00.780631Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488012801691794655:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:00.780711Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp ... unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710722:0, at schemeshard: 72057594046644480 2025-03-31T16:34:17.832760Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037920 not found 2025-03-31T16:34:17.836266Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710724:0, at schemeshard: 72057594046644480 2025-03-31T16:34:17.918710Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037921 not found 2025-03-31T16:34:17.924201Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710726:0, at schemeshard: 72057594046644480 2025-03-31T16:34:18.038217Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037922 not found 2025-03-31T16:34:18.047309Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710728:0, at schemeshard: 72057594046644480 2025-03-31T16:34:18.149725Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037923 not found 2025-03-31T16:34:18.154732Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710730:0, at schemeshard: 72057594046644480 2025-03-31T16:34:18.242145Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037924 not found 2025-03-31T16:34:18.248191Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710732:0, at schemeshard: 72057594046644480 2025-03-31T16:34:18.343405Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037925 not found 2025-03-31T16:34:18.351377Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710734:0, at schemeshard: 72057594046644480 2025-03-31T16:34:18.447264Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037926 not found 2025-03-31T16:34:18.452266Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710736:0, at schemeshard: 72057594046644480 2025-03-31T16:34:18.538370Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710738:0, at schemeshard: 72057594046644480 2025-03-31T16:34:18.541580Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037927 not found 2025-03-31T16:34:18.664139Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037928 not found 2025-03-31T16:34:18.668315Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710740:0, at schemeshard: 72057594046644480 2025-03-31T16:34:18.764265Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710742:0, at schemeshard: 72057594046644480 2025-03-31T16:34:18.770351Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037929 not found 2025-03-31T16:34:18.884363Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037930 not found 2025-03-31T16:34:18.888868Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710744:0, at schemeshard: 72057594046644480 2025-03-31T16:34:18.980622Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037931 not found 2025-03-31T16:34:18.985597Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710746:0, at schemeshard: 72057594046644480 2025-03-31T16:34:19.090519Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037932 not found 2025-03-31T16:34:19.093990Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710748:0, at schemeshard: 72057594046644480 2025-03-31T16:34:19.180696Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037933 not found 2025-03-31T16:34:19.182863Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710750:0, at schemeshard: 72057594046644480 2025-03-31T16:34:19.305701Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037934 not found 2025-03-31T16:34:19.311373Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710752:0, at schemeshard: 72057594046644480 2025-03-31T16:34:19.436669Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037935 not found 2025-03-31T16:34:19.439837Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710754:0, at schemeshard: 72057594046644480 2025-03-31T16:34:19.530530Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037936 not found 2025-03-31T16:34:19.534708Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710756:0, at schemeshard: 72057594046644480 2025-03-31T16:34:19.626724Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037937 not found 2025-03-31T16:34:19.630268Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710758:0, at schemeshard: 72057594046644480 2025-03-31T16:34:19.728715Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037938 not found 2025-03-31T16:34:19.732648Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710760:0, at schemeshard: 72057594046644480 2025-03-31T16:34:19.831643Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037939 not found Trying to start YDB, gRPC: 20407, MsgBus: 14806 2025-03-31T16:34:20.884876Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488012888331098615:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:20.884954Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00085b/r3tmp/tmptvsrNQ/pdisk_1.dat 2025-03-31T16:34:21.014367Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:21.053714Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:21.053825Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:21.059272Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20407, node 7 2025-03-31T16:34:21.122502Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:21.122529Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:21.122541Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:21.122708Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14806 TClient is connected to server localhost:14806 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:21.801223Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:25.351658Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:34:25.503686Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037888 not found 2025-03-31T16:34:25.517618Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:34:25.631028Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037889 not found 2025-03-31T16:34:25.640952Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:25.743439Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037890 not found 2025-03-31T16:34:25.885249Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7488012888331098615:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:25.885346Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> KqpProxy::NodeDisconnectedTest [GOOD] Test command err: 2025-03-31T16:34:12.386920Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012856086068979:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:12.387159Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000c72/r3tmp/tmpdu8QHc/pdisk_1.dat 2025-03-31T16:34:12.845591Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:12.845671Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:12.848222Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:12.849805Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:23258 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:34:13.155212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:13.173721Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:34:15.393700Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-31T16:34:15.396719Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-31T16:34:15.404976Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=N2Q5YTIyZWMtY2Q1ZDcwOGYtYTE2ODBlZGUtYjFiYjY1MDY=, workerId: [1:7488012868970971478:2310], database: , longSession: 0, local sessions count: 1 2025-03-31T16:34:15.405030Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-31T16:34:15.405244Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=1&id=N2Q5YTIyZWMtY2Q1ZDcwOGYtYTE2ODBlZGUtYjFiYjY1MDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.010000s timeout: 0.010000s cancelAfter: 0.000000s. Send request to target, requestId: 2, targetId: [1:7488012868970971478:2310] 2025-03-31T16:34:15.405276Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 2 timeout: 0.010000s actor id: [0:0:0] 2025-03-31T16:34:15.405338Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-03-31T16:34:15.405389Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-03-31T16:34:15.405419Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-31T16:34:15.405462Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-31T16:34:15.405531Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:15.405589Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:15.405744Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:15.406009Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2Q5YTIyZWMtY2Q1ZDcwOGYtYTE2ODBlZGUtYjFiYjY1MDY=, ActorId: [1:7488012868970971478:2310], ActorState: ReadyState, Reply query error, msg:
: Error: SomeUniqTextForUt proxyRequestId: 2 2025-03-31T16:34:15.406187Z node 1 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 2, sender: [1:7488012860381036830:2282], selfId: [1:7488012856086069217:2278], source: [1:7488012868970971478:2310] 2025-03-31T16:34:15.406749Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:15.406789Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:15.409324Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012868970971479:2311], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:15.409427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:15.416197Z node 1 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(2) 2025-03-31T16:34:15.416222Z node 1 :KQP_PROXY DEBUG: Invalid request info while on request timeout handle. RequestId: 2 2025-03-31T16:34:22.478437Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:34:22.478888Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:34:22.479277Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:34:22.479835Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:34:22.480247Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:34:22.480542Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000c72/r3tmp/tmpgNptul/pdisk_1.dat 2025-03-31T16:34:22.847572Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:13467 KQP PROXY1 [2:8678280833929343339:121] KQP PROXY2 [3:8678280833929343339:121] SENDER [2:1142:2688] 2025-03-31T16:34:23.200249Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=M2JiYzZkNi0zOTc5NzZiNy1lZTBmNzEwMi02YjYwMWY4OQ==, workerId: [3:1143:2375], database: , longSession: 1, local sessions count: 1 2025-03-31T16:34:23.200453Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: Created session ydb://session/3?node_id=3&id=M2JiYzZkNi0zOTc5NzZiNy1lZTBmNzEwMi02YjYwMWY4OQ== 2025-03-31T16:34:23.201251Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=3&id=M2JiYzZkNi0zOTc5NzZiNy1lZTBmNzEwMi02YjYwMWY4OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 2, targetId: [3:8678280833929343339:121] 2025-03-31T16:34:23.201336Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 2 timeout: 0.001000s actor id: [0:0:0] 2025-03-31T16:34:23.201981Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=3&id=M2JiYzZkNi0zOTc5NzZiNy1lZTBmNzEwMi02YjYwMWY4OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [3:1143:2375] 2025-03-31T16:34:23.202044Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 0.001000s actor id: [0:0:0] 2025-03-31T16:34:23.558083Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1145:2376], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:23.558277Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1144:2689], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:23.558473Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:23.558638Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:23.582043Z node 3 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(3) 2025-03-31T16:34:23.582172Z node 3 :KQP_PROXY DEBUG: Reply timeout: requestId 3 sessionId: ydb://session/3?node_id=3&id=M2JiYzZkNi0zOTc5NzZiNy1lZTBmNzEwMi02YjYwMWY4OQ== status: TIMEOUT round: 0 2025-03-31T16:34:23.582375Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(2) 2025-03-31T16:34:23.582413Z node 2 :KQP_PROXY DEBUG: Reply timeout: requestId 2 sessionId: ydb://session/3?node_id=3&id=M2JiYzZkNi0zOTc5NzZiNy1lZTBmNzEwMi02YjYwMWY4OQ== status: TIMEOUT round: 0 2025-03-31T16:34:23.582551Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 2, sender: [2:1142:2688], selfId: [2:206:2171], source: [2:206:2171] 2025-03-31T16:34:23.582741Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=M2JiYzZkNi0zOTc5NzZiNy1lZTBmNzEwMi02YjYwMWY4OQ==, ActorId: [3:1143:2375], ActorState: ExecuteState, TraceId: 01jqpgvpv2f6hjag67x971daxp, Create QueryResponse for error on request, msg: 2025-03-31T16:34:23.585797Z node 3 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [2:206:2171], selfId: [3:236:2127], source: [3:1143:2375] 2025-03-31T16:34:23.586063Z node 2 :KQP_PROXY ERROR: Unknown sender for proxy response, requestId: 2 2025-03-31T16:34:23.589162Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=YTkyZGRiYWMtMzViNjQ4MTQtMzE4Mjc4MjQtZTU4ZjE1NTc=, workerId: [3:1166:2380], database: , longSession: 1, local sessions count: 2 2025-03-31T16:34:23.589357Z node 3 :KQP_PROXY D ... DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=Njk1MmEzNzUtNmFkMjJiM2UtMzM4OWZmOWEtYWRkMTc1ZGM=, workerId: [3:1412:2518], database: , longSession: 1, local sessions count: 56 2025-03-31T16:34:25.192041Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-31T16:34:25.192423Z node 2 :KQP_PROXY DEBUG: Received ping session request, request_id: 57, sender: [2:1142:2688], trace_id: 2025-03-31T16:34:25.192620Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 57 timeout: 0.001000s actor id: [0:0:0] 2025-03-31T16:34:25.203069Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(57) 2025-03-31T16:34:25.203164Z node 2 :KQP_PROXY DEBUG: Reply timeout: requestId 57 sessionId: ydb://session/3?node_id=3&id=Njk1MmEzNzUtNmFkMjJiM2UtMzM4OWZmOWEtYWRkMTc1ZGM= status: TIMEOUT round: 0 2025-03-31T16:34:25.203312Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 57, sender: [2:1142:2688], selfId: [2:206:2171], source: [2:206:2171] 2025-03-31T16:34:25.205217Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=ODg2ZDEzZjUtN2QwMjc3NjYtNTlhOWZjNTUtODYxMzRmMzE=, workerId: [3:1413:2519], database: , longSession: 1, local sessions count: 57 2025-03-31T16:34:25.205385Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: Created session ydb://session/3?node_id=3&id=ODg2ZDEzZjUtN2QwMjc3NjYtNTlhOWZjNTUtODYxMzRmMzE= 2025-03-31T16:34:25.205986Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=3&id=ODg2ZDEzZjUtN2QwMjc3NjYtNTlhOWZjNTUtODYxMzRmMzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 58, targetId: [3:8678280833929343339:121] 2025-03-31T16:34:25.206033Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 58 timeout: 0.001000s actor id: [0:0:0] 2025-03-31T16:34:25.206319Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=3&id=ODg2ZDEzZjUtN2QwMjc3NjYtNTlhOWZjNTUtODYxMzRmMzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 87, targetId: [3:1413:2519] 2025-03-31T16:34:25.206377Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 87 timeout: 0.001000s actor id: [0:0:0] 2025-03-31T16:34:25.234054Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1415:2520], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:25.234201Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1414:2751], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:25.234316Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:25.234358Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:25.244836Z node 3 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(87) 2025-03-31T16:34:25.244910Z node 3 :KQP_PROXY DEBUG: Reply timeout: requestId 87 sessionId: ydb://session/3?node_id=3&id=ODg2ZDEzZjUtN2QwMjc3NjYtNTlhOWZjNTUtODYxMzRmMzE= status: TIMEOUT round: 0 2025-03-31T16:34:25.244982Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(58) 2025-03-31T16:34:25.245012Z node 2 :KQP_PROXY DEBUG: Reply timeout: requestId 58 sessionId: ydb://session/3?node_id=3&id=ODg2ZDEzZjUtN2QwMjc3NjYtNTlhOWZjNTUtODYxMzRmMzE= status: TIMEOUT round: 0 2025-03-31T16:34:25.245171Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ODg2ZDEzZjUtN2QwMjc3NjYtNTlhOWZjNTUtODYxMzRmMzE=, ActorId: [3:1413:2519], ActorState: ExecuteState, TraceId: 01jqpgvrsp86m1t44vser012eg, Create QueryResponse for error on request, msg: 2025-03-31T16:34:25.245269Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 58, sender: [2:1142:2688], selfId: [2:206:2171], source: [2:206:2171] 2025-03-31T16:34:25.246949Z node 3 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 87, sender: [2:206:2171], selfId: [3:236:2127], source: [3:1413:2519] 2025-03-31T16:34:25.247115Z node 2 :KQP_PROXY ERROR: Unknown sender for proxy response, requestId: 58 2025-03-31T16:34:25.248671Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=YTc1YTM2ZDEtYzliMGI3M2EtYzAyZTFhNzgtMWY4YmZhYWE=, workerId: [3:1420:2523], database: , longSession: 1, local sessions count: 58 2025-03-31T16:34:25.248834Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-31T16:34:25.249119Z node 2 :KQP_PROXY DEBUG: Received ping session request, request_id: 59, sender: [2:1142:2688], trace_id: 2025-03-31T16:34:25.249216Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 59 timeout: 0.001000s actor id: [0:0:0] 2025-03-31T16:34:25.249324Z node 3 :KQP_PROXY DEBUG: Received ping session request, has local session: ydb://session/3?node_id=3&id=YTc1YTM2ZDEtYzliMGI3M2EtYzAyZTFhNzgtMWY4YmZhYWE=, rpc ctrl: [0:0:0], sameNode: 0, trace_id: 2025-03-31T16:34:25.249427Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 59, sender: [2:1142:2688], selfId: [2:206:2171], source: [3:236:2127] 2025-03-31T16:34:25.250780Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=ODIwZjNhNGYtYzEwNDJhNTItZTRhNmE3NjMtOTMwZGVhZTY=, workerId: [3:1421:2524], database: , longSession: 1, local sessions count: 59 2025-03-31T16:34:25.251017Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: Created session ydb://session/3?node_id=3&id=ODIwZjNhNGYtYzEwNDJhNTItZTRhNmE3NjMtOTMwZGVhZTY= 2025-03-31T16:34:25.251631Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=3&id=ODIwZjNhNGYtYzEwNDJhNTItZTRhNmE3NjMtOTMwZGVhZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 60, targetId: [3:8678280833929343339:121] 2025-03-31T16:34:25.251684Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 60 timeout: 0.001000s actor id: [0:0:0] 2025-03-31T16:34:25.252249Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=3&id=ODIwZjNhNGYtYzEwNDJhNTItZTRhNmE3NjMtOTMwZGVhZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 90, targetId: [3:1421:2524] 2025-03-31T16:34:25.252288Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 90 timeout: 0.001000s actor id: [0:0:0] 2025-03-31T16:34:25.253543Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1422:2753], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:25.253739Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:25.278583Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1424:2525], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:25.278774Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:25.293154Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:25.293289Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:25.297685Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:25.297825Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:25.309047Z node 3 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(90) 2025-03-31T16:34:25.309174Z node 3 :KQP_PROXY DEBUG: Reply timeout: requestId 90 sessionId: ydb://session/3?node_id=3&id=ODIwZjNhNGYtYzEwNDJhNTItZTRhNmE3NjMtOTMwZGVhZTY= status: TIMEOUT round: 0 2025-03-31T16:34:25.309329Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(59) 2025-03-31T16:34:25.309361Z node 2 :KQP_PROXY DEBUG: Invalid request info while on request timeout handle. RequestId: 59 2025-03-31T16:34:25.309416Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(60) 2025-03-31T16:34:25.309476Z node 2 :KQP_PROXY DEBUG: Reply timeout: requestId 60 sessionId: ydb://session/3?node_id=3&id=ODIwZjNhNGYtYzEwNDJhNTItZTRhNmE3NjMtOTMwZGVhZTY= status: TIMEOUT round: 0 2025-03-31T16:34:25.309621Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ODIwZjNhNGYtYzEwNDJhNTItZTRhNmE3NjMtOTMwZGVhZTY=, ActorId: [3:1421:2524], ActorState: ExecuteState, TraceId: 01jqpgvrv4dteh73nd8fwmcv48, Create QueryResponse for error on request, msg: 2025-03-31T16:34:25.311478Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 60, sender: [2:1142:2688], selfId: [2:206:2171], source: [2:206:2171] 2025-03-31T16:34:25.311698Z node 3 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 90, sender: [2:206:2171], selfId: [3:236:2127], source: [3:1421:2524] 2025-03-31T16:34:25.311863Z node 2 :KQP_PROXY ERROR: Unknown sender for proxy response, requestId: 60 2025-03-31T16:34:25.313699Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=ZTBkNDQ2ZWYtN2RlOTg5ODItODQ0NzIzM2ItNWY1NGQzMjE=, workerId: [3:1441:2528], database: , longSession: 1, local sessions count: 60 2025-03-31T16:34:25.313852Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-31T16:34:25.314307Z node 2 :KQP_PROXY DEBUG: Received ping session request, request_id: 61, sender: [2:1142:2688], trace_id: 2025-03-31T16:34:25.314415Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 61 timeout: 0.001000s actor id: [0:0:0] 2025-03-31T16:34:25.330196Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-03-31T16:34:25.330889Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:25.331357Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:25.341833Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(61) 2025-03-31T16:34:25.341912Z node 2 :KQP_PROXY DEBUG: Reply timeout: requestId 61 sessionId: ydb://session/3?node_id=3&id=ZTBkNDQ2ZWYtN2RlOTg5ODItODQ0NzIzM2ItNWY1NGQzMjE= status: TIMEOUT round: 0 2025-03-31T16:34:25.342059Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 61, sender: [2:1142:2688], selfId: [2:206:2171], source: [2:206:2171] >> KqpTx::CommitRoTx >> KqpSqlIn::KeyTypeMissmatch_Str [GOOD] >> KqpSqlIn::InWithCast >> DataShardReadTableSnapshots::ReadTableSplitNewTxIdResolveResultReorder >> KqpQueryService::TableSink_HtapComplex+withOltpSink >> KqpDocumentApi::RestrictWrite >> KqpQueryService::TableSink_OltpLiteralUpsert >> KqpQueryService::ExecuteCollectMeta >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3-withSink [GOOD] >> KqpQueryService::TableSink_OlapUpdate >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleDataShardReboot [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactAfterDrop >> KqpQueryService::ExecuteQueryPure ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3-withSink [GOOD] Test command err: Trying to start YDB, gRPC: 25966, MsgBus: 9674 2025-03-31T16:34:17.385977Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012877050831668:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:17.386154Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00126d/r3tmp/tmpJxnszy/pdisk_1.dat 2025-03-31T16:34:17.854050Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:17.885897Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:17.886032Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:17.887918Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25966, node 1 2025-03-31T16:34:17.943460Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:17.943488Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:17.943499Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:17.943655Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9674 TClient is connected to server localhost:9674 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:18.494147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:18.516804Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:34:18.533770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:18.685214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:18.852774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:18.928736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:20.465383Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012889935735332:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:20.465518Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:20.786857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:20.814268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:20.845082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:20.872752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:20.900607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:20.930241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:34:20.972815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012889935735839:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:20.972897Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:20.972995Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012889935735844:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:20.976572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:21.002837Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012889935735846:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:34:21.069396Z node 1 :TX_PROXY ERROR: Actor# [1:7488012894230703197:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:22.386661Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012877050831668:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:22.386841Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 21755, MsgBus: 16858 2025-03-31T16:34:23.477644Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012902160742583:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:23.477700Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00126d/r3tmp/tmpS55JjE/pdisk_1.dat 2025-03-31T16:34:23.585619Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:23.615049Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:23.615157Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 21755, node 2 2025-03-31T16:34:23.616781Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:23.659773Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:23.659795Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:23.659803Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:23.659916Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16858 TClient is connected to server localhost:16858 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:24.124324Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:24.132641Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:24.141263Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:34:24.228027Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:24.466175Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:24.557675Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:26.683780Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012915045646243:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:26.683878Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:26.736684Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:26.789459Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:26.831009Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:26.865992Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:26.902779Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:26.938554Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:34:26.990365Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012915045646753:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:26.990473Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:26.990790Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012915045646758:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:26.994029Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:27.005450Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012915045646760:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:34:27.074292Z node 2 :TX_PROXY ERROR: Actor# [2:7488012919340614110:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:28.478201Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488012902160742583:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:28.478308Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:34:28.739247Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzFmY2NhYTgtZjUwZGVkYmQtYjQ0OTk4MGItZTM3OWNhZDk=, ActorId: [2:7488012923635581663:2488], ActorState: ExecuteState, TraceId: 01jqpgvw59cqhc7deyv9c3krr5, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken >> DataShardReadTableSnapshots::ReadTableDropColumn >> KqpNewEngine::PruneEffectPartitions-UseSink [GOOD] >> DataShardReadTableSnapshots::ReadTableSnapshot [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitAfter >> DataShardReadTableSnapshots::ReadTableSplitBefore [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitFinished >> DataShardReadTableSnapshots::ReadTableDropColumnLatePropose [GOOD] >> DataShardReadTableSnapshots::ReadTableMaxRows >> KqpProxy::LoadedMetadataAfterCompilationTimeout [GOOD] >> KqpProxy::ExecuteScriptFailsWithoutFeatureFlag >> KqpQueryService::SessionFromPoolError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::PruneEffectPartitions-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 20906, MsgBus: 20051 2025-03-31T16:33:46.052043Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012743756165746:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:46.052454Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000889/r3tmp/tmpFuHZNz/pdisk_1.dat 2025-03-31T16:33:46.359067Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20906, node 1 2025-03-31T16:33:46.429160Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:46.429316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:46.431036Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:46.436668Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:46.436699Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:46.436738Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:46.436888Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20051 TClient is connected to server localhost:20051 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:46.918863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:46.944549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:47.085941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:47.246421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:47.315117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:48.716577Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012752346102111:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:48.716711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:48.970520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:48.996265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:49.021503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:49.048980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:49.080346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:49.148338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:49.185352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012756641069923:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:49.185415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012756641069928:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:49.185466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:49.188389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:49.198345Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012756641069930:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:33:49.301721Z node 1 :TX_PROXY ERROR: Actor# [1:7488012756641069983:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 24215, MsgBus: 18501 2025-03-31T16:33:50.941510Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012758087408015:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:50.941570Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000889/r3tmp/tmpiqASzW/pdisk_1.dat 2025-03-31T16:33:51.009675Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24215, node 2 2025-03-31T16:33:51.065621Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:51.065660Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:51.065672Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:51.065837Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:33:51.068481Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:51.068653Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:51.070311Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18501 TClient is connected to server localhost:18501 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:51.354344Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:51.362768Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:51.406463Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:51.556066Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:51.631559Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:53.478224Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012770972311652:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:53.478319Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: { ...
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:19.886769Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:19.929222Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:19.973562Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:20.024147Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:20.095370Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:20.168871Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:20.213515Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:34:20.270571Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012889510769678:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:20.270677Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:20.270684Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012889510769683:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:20.277439Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:20.290093Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488012889510769685:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:34:20.370854Z node 6 :TX_PROXY ERROR: Actor# [6:7488012889510769739:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:21.001424Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488012872330898201:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:21.001516Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 29186, MsgBus: 17613 2025-03-31T16:34:23.383639Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488012901275411826:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:23.383785Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000889/r3tmp/tmpLYawYj/pdisk_1.dat 2025-03-31T16:34:23.544371Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:23.585826Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:23.585967Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:23.587876Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29186, node 7 2025-03-31T16:34:23.643613Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:23.643646Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:23.643657Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:23.643805Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17613 TClient is connected to server localhost:17613 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:24.292039Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:24.303188Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:34:24.312669Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:24.417895Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:24.614961Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:24.724365Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:27.668187Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488012918455282778:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:27.668318Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:27.728719Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:27.773448Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:27.824484Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:27.894969Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:27.937956Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:27.982909Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:34:28.034992Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488012922750250585:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:28.035139Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:28.035455Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488012922750250590:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:28.039352Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:28.049404Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7488012922750250592:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:34:28.121708Z node 7 :TX_PROXY ERROR: Actor# [7:7488012922750250646:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:28.382908Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7488012901275411826:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:28.382983Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpNewEngine::JoinDictWithPure [GOOD] >> KqpNewEngine::IdxLookupExtractMembers >> KqpTx::CommitRoTx [GOOD] >> KqpTx::CommitRoTx_TLI >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBorrowed [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldHandleCompactionTimeouts >> DataShardReadTableSnapshots::ReadTableSplitNewTxIdResolveResultReorder [GOOD] >> DataShardReadTableSnapshots::ReadTableUUID >> KqpDocumentApi::RestrictWriteExplicitPrepare >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerlessAfterDisable [GOOD] >> KqpQueryService::TableSink_OltpLiteralUpsert [GOOD] >> KqpQueryService::TableSink_OltpInsert >> KqpNewEngine::MultipleBroadcastJoin [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerlessAfterDisable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:33:21.624728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:33:21.624854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:21.624902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:33:21.624934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:33:21.625570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:33:21.625613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:33:21.625685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:21.625769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:33:21.627327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:33:21.715979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:33:21.716059Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:21.721237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:33:21.721360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:33:21.721615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:33:21.727256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:33:21.727499Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:33:21.731513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:21.732557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:33:21.738483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:21.747595Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:21.747686Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:21.747873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:33:21.747920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:21.747962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:33:21.748159Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:33:21.755920Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:33:21.865006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:33:21.866567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:21.867641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:33:21.868759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:33:21.868841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:21.871792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:21.871929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:33:21.872160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:21.872316Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:33:21.872362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:33:21.872394Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:33:21.874520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:21.874564Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:33:21.874589Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:33:21.876220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:21.876254Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:21.876297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:21.876350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:33:21.880318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:33:21.882122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:33:21.882290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:33:21.884582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:21.884731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:21.884784Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:21.886084Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:33:21.886145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:21.886346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:33:21.886420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:33:21.888520Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:21.888559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:21.888719Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:21.888753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:33:21.888969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:21.889013Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:33:21.889098Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:21.889130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:21.889169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:21.889198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:21.889251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:33:21.889310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:21.889357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:33:21.889383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:33:21.889444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:33:21.889479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:33:21.889506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:33:21.892167Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:21.892271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:21.892299Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... onalErase DoExecute: at schemeshard: 72057594046678944 2025-03-31T16:34:34.271040Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-03-31T16:34:34.326866Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:350:2328]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:34:34.326949Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:34:34.327028Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:350:2328], Recipient [3:350:2328]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:34:34.327060Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:34:34.352092Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435095, Sender [0:0:0], Recipient [3:350:2328]: NKikimr::NSchemeShard::TEvPrivate::TEvSendBaseStatsToSA 2025-03-31T16:34:34.352184Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvSendBaseStatsToSA 2025-03-31T16:34:34.352295Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435076, Sender [0:0:0], Recipient [3:350:2328]: NKikimr::NSchemeShard::TEvPrivate::TEvRunConditionalErase 2025-03-31T16:34:34.352347Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvRunConditionalErase 2025-03-31T16:34:34.352396Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409546 2025-03-31T16:34:34.352481Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409546 2025-03-31T16:34:34.352560Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2025-03-31T16:34:34.466652Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:754:2640]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-03-31T16:34:34.466725Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-03-31T16:34:34.466823Z node 3 :TX_DATASHARD TRACE: No cleanup at 72075186233409552 outdated step 200 last cleanup 0 2025-03-31T16:34:34.466902Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186233409552 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:34:34.466937Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186233409552 2025-03-31T16:34:34.466968Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186233409552 has no attached operations 2025-03-31T16:34:34.467016Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186233409552 2025-03-31T16:34:34.467228Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:754:2640]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-03-31T16:34:34.467391Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409552, FollowerId 0, tableId 2 2025-03-31T16:34:34.467709Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:754:2640], Recipient [3:884:2742]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409552 TableLocalId: 2 Generation: 2 Round: 10 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 29 Memory: 119352 } ShardState: 2 UserTablePartOwners: 72075186233409552 NodeId: 3 StartTime: 114 TableOwnerId: 72075186233409549 FollowerId: 0 2025-03-31T16:34:34.467746Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-03-31T16:34:34.467789Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0029 2025-03-31T16:34:34.467892Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 2] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-03-31T16:34:34.467934Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-03-31T16:34:34.478328Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:756:2641]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-03-31T16:34:34.478423Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-03-31T16:34:34.478541Z node 3 :TX_DATASHARD TRACE: No cleanup at 72075186233409553 outdated step 200 last cleanup 0 2025-03-31T16:34:34.478629Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186233409553 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:34:34.478662Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186233409553 2025-03-31T16:34:34.478690Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186233409553 has no attached operations 2025-03-31T16:34:34.478719Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186233409553 2025-03-31T16:34:34.478887Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:756:2641]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-03-31T16:34:34.479020Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409553, FollowerId 0, tableId 2 2025-03-31T16:34:34.479446Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:756:2641], Recipient [3:884:2742]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409553 TableLocalId: 2 Generation: 2 Round: 10 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 27 Memory: 119352 } ShardState: 2 UserTablePartOwners: 72075186233409553 NodeId: 3 StartTime: 114 TableOwnerId: 72075186233409549 FollowerId: 0 2025-03-31T16:34:34.479500Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-03-31T16:34:34.479551Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409553 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0027 2025-03-31T16:34:34.479650Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409553 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 2] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-03-31T16:34:34.492231Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:884:2742]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:34:34.492291Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:34:34.492342Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:884:2742], Recipient [3:884:2742]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:34:34.492363Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:34:34.506880Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435095, Sender [0:0:0], Recipient [3:884:2742]: NKikimr::NSchemeShard::TEvPrivate::TEvSendBaseStatsToSA 2025-03-31T16:34:34.506954Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvSendBaseStatsToSA 2025-03-31T16:34:34.507275Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435076, Sender [0:0:0], Recipient [3:884:2742]: NKikimr::NSchemeShard::TEvPrivate::TEvRunConditionalErase 2025-03-31T16:34:34.507321Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvRunConditionalErase 2025-03-31T16:34:34.507367Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409549 2025-03-31T16:34:34.507437Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409549 2025-03-31T16:34:34.507500Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409549 2025-03-31T16:34:34.507625Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269746180, Sender [3:2029:3848], Recipient [3:884:2742]: NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult 2025-03-31T16:34:34.507666Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProxySchemeCache::TEvNavigateKeySetResult 2025-03-31T16:34:34.529116Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:2032:3851], Recipient [3:754:2640]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:34:34.529179Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:34:34.529221Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186233409552, clientId# [3:2031:3850], serverId# [3:2032:3851], sessionId# [0:0:0] 2025-03-31T16:34:34.529369Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553213, Sender [3:2030:3849], Recipient [3:754:2640]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72075186233409549 LocalId: 2 } 2025-03-31T16:34:34.529938Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:2035:3854], Recipient [3:756:2641]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:34:34.529971Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:34:34.530004Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186233409553, clientId# [3:2034:3853], serverId# [3:2035:3854], sessionId# [0:0:0] 2025-03-31T16:34:34.530104Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553213, Sender [3:2033:3852], Recipient [3:756:2641]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72075186233409549 LocalId: 2 } >> KqpDocumentApi::RestrictWrite [GOOD] >> KqpDocumentApi::AllowRead >> KqpQueryService::ExecuteQueryPure [GOOD] >> KqpQueryService::ExecuteQueryPg >> KqpQueryService::ExecuteCollectMeta [GOOD] >> KqpQueryService::ExecuteQuery >> DataShardReadTableSnapshots::ReadTableSplitAfter [GOOD] >> DataShardReadTableSnapshots::ReadTableDropColumn [GOOD] >> DataShardReadTableSnapshots::CorruptedDyNumber ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::MultipleBroadcastJoin [GOOD] Test command err: Trying to start YDB, gRPC: 12915, MsgBus: 20094 2025-03-31T16:33:47.135250Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012748721811743:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:47.135372Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000870/r3tmp/tmpvihWKx/pdisk_1.dat 2025-03-31T16:33:47.429754Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12915, node 1 2025-03-31T16:33:47.499154Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:47.499186Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:47.499197Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:47.499332Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:33:47.518655Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:47.518809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:47.520676Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20094 TClient is connected to server localhost:20094 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:47.976542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:48.010362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:48.111773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:48.244675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:48.321519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:49.900858Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012757311748115:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:49.900974Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:50.170275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:50.196540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:50.221663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:50.244023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:50.267935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:50.294565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:50.329119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012761606715922:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:50.329165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:50.329274Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012761606715927:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:50.332147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:50.339808Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012761606715929:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:33:50.417694Z node 1 :TX_PROXY ERROR: Actor# [1:7488012761606715983:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 18679, MsgBus: 10262 2025-03-31T16:33:51.879755Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012764149244279:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:51.879861Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000870/r3tmp/tmpTCVGG9/pdisk_1.dat 2025-03-31T16:33:52.013492Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:52.031240Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:52.031323Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:52.033014Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18679, node 2 2025-03-31T16:33:52.066330Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:52.066352Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:52.066357Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:52.066442Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10262 TClient is connected to server localhost:10262 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:52.412968Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:52.420710Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:52.460217Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:52.577051Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:52.639692Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:54.623230Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012777034147934:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:54.623340Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: { ... 6715662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:21.426396Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:21.477571Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:21.527041Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:21.570314Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:21.619736Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:34:21.675461Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012892678386385:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:21.675588Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:21.678169Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012892678386390:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:21.684009Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:21.697569Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488012892678386392:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:34:21.776908Z node 6 :TX_PROXY ERROR: Actor# [6:7488012892678386446:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:22.215210Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488012875498514917:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:22.215311Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 11221, MsgBus: 22288 2025-03-31T16:34:25.003520Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488012911218291529:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:25.003591Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000870/r3tmp/tmpDWcy6u/pdisk_1.dat 2025-03-31T16:34:25.148066Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:25.180016Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:25.180120Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:25.183027Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11221, node 7 2025-03-31T16:34:25.246425Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:25.246455Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:25.246465Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:25.246611Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22288 TClient is connected to server localhost:22288 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:34:26.022061Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:34:26.041746Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:26.124982Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:26.384027Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:26.483294Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:29.641294Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488012928398162457:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:29.641464Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:29.688227Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:29.741695Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:29.799908Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:29.866857Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:29.913851Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:29.986872Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:34:30.015645Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7488012911218291529:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:30.018849Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:34:30.063118Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488012932693130269:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:30.063255Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:30.063541Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488012932693130274:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:30.068877Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:30.088811Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7488012932693130276:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:34:30.178158Z node 7 :TX_PROXY ERROR: Actor# [7:7488012932693130332:3454] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:31.411644Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:34:31.460680Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:34:31.578835Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 [] >> DataShardReadTableSnapshots::ReadTableSplitFinished [GOOD] >> DataShardReadTableSnapshots::ReadTableMaxRows [GOOD] >> KqpSqlIn::InWithCast [GOOD] >> KqpProxy::ExecuteScriptFailsWithoutFeatureFlag [GOOD] |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableSplitAfter [GOOD] Test command err: 2025-03-31T16:34:28.244519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:34:28.244779Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:34:28.244984Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00138a/r3tmp/tmpXIzAwh/pdisk_1.dat 2025-03-31T16:34:28.772696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:34:28.827350Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:28.871630Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T16:34:28.873935Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:28.874287Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:28.874465Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-31T16:34:28.887442Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:28.981038Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-03-31T16:34:28.981169Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-03-31T16:34:28.982366Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-03-31T16:34:29.102310Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-31T16:34:29.102417Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:34:29.103030Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-31T16:34:29.103139Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:34:29.103380Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:34:29.103603Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:34:29.103765Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-31T16:34:29.104031Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-03-31T16:34:29.108894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:34:29.111995Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-31T16:34:29.112257Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-03-31T16:34:29.164474Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:34:29.166037Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:34:29.166870Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:34:29.167237Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:34:29.231972Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:34:29.232840Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:34:29.232989Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:34:29.235874Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:34:29.235971Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:34:29.236052Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:34:29.239425Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:34:29.239655Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:34:29.239754Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:34:29.251278Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:34:29.305158Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:34:29.306798Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:34:29.307040Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:34:29.307098Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:34:29.307169Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:34:29.307215Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:34:29.307503Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:34:29.307572Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:34:29.310601Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:34:29.310734Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:34:29.310880Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:34:29.310928Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:34:29.311070Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T16:34:29.311141Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:34:29.311181Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:34:29.311217Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:34:29.311274Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:34:29.311415Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:34:29.311482Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:34:29.311534Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:34:29.311655Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-31T16:34:29.311717Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:34:29.311841Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:34:29.312272Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T16:34:29.312335Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:34:29.312479Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:34:29.312561Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:34:29.312622Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T16:34:29.312666Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T16:34:29.312702Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:34:29.313003Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T16:34:29.313044Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T16:34:29.313084Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T16:34:29.313143Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:34:29.313223Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T16:34:29.313272Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T16:34:29.313310Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T16:34:29.313343Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:34:29.313390Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T16:34:29.316567Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T16:34:29.316653Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:34:29.327587Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... 90 has no attached operations 2025-03-31T16:34:35.982645Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-03-31T16:34:35.982708Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-31T16:34:35.983227Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:1002:2799], Recipient [2:864:2693]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715662 ShardId: 72075186224037890 2025-03-31T16:34:35.983265Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2025-03-31T16:34:35.983303Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037890 2025-03-31T16:34:35.983388Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2025-03-31T16:34:35.983646Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:1002:2799], Recipient [2:905:2724]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-03-31T16:34:35.983682Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-03-31T16:34:35.983811Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715662, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-31T16:34:35.983934Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:1002:2799], Recipient [2:864:2693]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715662 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\003\000\000\000b\005\035!\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 1 DataLastKey: "\001\000\004\000\000\000\003\000\000\000" 2025-03-31T16:34:35.983967Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received stream data from ShardId# 72075186224037890 2025-03-31T16:34:35.984017Z node 2 :TX_PROXY TRACE: [ReadTable [2:864:2693] TxId# 281474976715661] Sending TEvStreamDataAck to [2:1002:2799] ShardId# 72075186224037890 2025-03-31T16:34:35.984106Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:1002:2799], Recipient [2:864:2693]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715662 ShardId: 72075186224037890 2025-03-31T16:34:35.984132Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2025-03-31T16:34:35.984202Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715662, PendingAcks: 0 2025-03-31T16:34:35.984537Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287941, Sender [2:863:2693], Recipient [2:864:2693]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715661 MessageSizeLimit: 1 ReservedMessages: 1 2025-03-31T16:34:35.984573Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-03-31T16:34:35.984606Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037890 2025-03-31T16:34:35.984648Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2025-03-31T16:34:35.984736Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715662, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-31T16:34:35.984866Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:1002:2799], Recipient [2:864:2693]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715662 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\004\000\000\000b\005\035,\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 2 DataLastKey: "\001\000\004\000\000\000\004\000\000\000" 2025-03-31T16:34:35.984896Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received stream data from ShardId# 72075186224037890 2025-03-31T16:34:35.984924Z node 2 :TX_PROXY TRACE: [ReadTable [2:864:2693] TxId# 281474976715661] Sending TEvStreamDataAck to [2:1002:2799] ShardId# 72075186224037890 2025-03-31T16:34:35.984977Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715662, PendingAcks: 0 2025-03-31T16:34:35.985042Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:1002:2799], Recipient [2:864:2693]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715662 ShardId: 72075186224037890 2025-03-31T16:34:35.985072Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2025-03-31T16:34:35.985341Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287941, Sender [2:863:2693], Recipient [2:864:2693]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715661 MessageSizeLimit: 1 ReservedMessages: 1 2025-03-31T16:34:35.985370Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-03-31T16:34:35.985394Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037890 2025-03-31T16:34:35.985433Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2025-03-31T16:34:35.985488Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2025-03-31T16:34:35.985638Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037890 2025-03-31T16:34:35.985669Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715662, at: 72075186224037890 2025-03-31T16:34:35.985799Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287429, Sender [2:1002:2799], Recipient [2:864:2693]: NKikimrTx.TEvStreamQuotaRelease TxId: 281474976715662 ShardId: 72075186224037890 2025-03-31T16:34:35.985831Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received TEvStreamQuotaRelease from ShardId# 72075186224037890 2025-03-31T16:34:35.985860Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Released quota 1 reserved messages from ShardId# 72075186224037890 2025-03-31T16:34:35.985937Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:905:2724], Recipient [2:905:2724]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:34:35.985966Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:34:35.986016Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-03-31T16:34:35.986048Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:34:35.986082Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715662] at 72075186224037890 for ReadTableScan 2025-03-31T16:34:35.986110Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037890 on unit ReadTableScan 2025-03-31T16:34:35.986141Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715662] at 72075186224037890 error: , IsFatalError: 0 2025-03-31T16:34:35.986180Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037890 is Executed 2025-03-31T16:34:35.986209Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037890 executing on unit ReadTableScan 2025-03-31T16:34:35.986237Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037890 to execution unit FinishPropose 2025-03-31T16:34:35.986266Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037890 on unit FinishPropose 2025-03-31T16:34:35.986302Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037890 is DelayComplete 2025-03-31T16:34:35.986327Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037890 executing on unit FinishPropose 2025-03-31T16:34:35.986355Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037890 to execution unit CompletedOperations 2025-03-31T16:34:35.986384Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037890 on unit CompletedOperations 2025-03-31T16:34:35.986429Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037890 is Executed 2025-03-31T16:34:35.986454Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037890 executing on unit CompletedOperations 2025-03-31T16:34:35.986477Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715662] at 72075186224037890 has finished 2025-03-31T16:34:35.986504Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:34:35.986530Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2025-03-31T16:34:35.986559Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-03-31T16:34:35.986587Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-03-31T16:34:35.986641Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-31T16:34:35.986674Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715662] at 72075186224037890 on unit FinishPropose 2025-03-31T16:34:35.986713Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715662 at tablet 72075186224037890 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-03-31T16:34:35.986779Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-31T16:34:35.987000Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:905:2724], Recipient [2:864:2693]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: COMPLETE TxId: 281474976715662 Step: 0 OrderId: 281474976715662 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 333 } } 2025-03-31T16:34:35.987034Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received stream complete from ShardId# 72075186224037890 2025-03-31T16:34:35.987157Z node 2 :TX_PROXY INFO: [ReadTable [2:864:2693] TxId# 281474976715661] RESPONSE Status# ExecComplete prepare time: 0.014277s execute time: 0.340357s total time: 0.354634s 2025-03-31T16:34:35.987504Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:864:2693], Recipient [2:667:2571]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715661 2025-03-31T16:34:35.987728Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:864:2693], Recipient [2:901:2722]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715661 2025-03-31T16:34:35.987971Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:864:2693], Recipient [2:905:2724]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715661 >> YdbTableSplit::SplitByLoadWithDeletes [GOOD] >> KqpSinkTx::SnapshotRO [GOOD] >> KqpSinkTx::SnapshotROInteractive1 >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotRequestCompactionsAfterDisable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableSplitFinished [GOOD] Test command err: 2025-03-31T16:34:28.483248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:34:28.483523Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:34:28.483722Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001383/r3tmp/tmpuqPWC6/pdisk_1.dat 2025-03-31T16:34:28.843200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:34:28.884378Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:28.923492Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T16:34:28.924544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:28.924680Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:28.924864Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-31T16:34:28.936329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:29.018840Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-03-31T16:34:29.018902Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-03-31T16:34:29.019106Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-03-31T16:34:29.187533Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-31T16:34:29.187647Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:34:29.188318Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-31T16:34:29.188432Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:34:29.188782Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:34:29.188976Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:34:29.189079Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-31T16:34:29.189433Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-03-31T16:34:29.191155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:34:29.192341Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-31T16:34:29.192427Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-03-31T16:34:29.226313Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:34:29.227749Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:34:29.228305Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:34:29.228593Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:34:29.280504Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:34:29.281434Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:34:29.281577Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:34:29.283652Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:34:29.283742Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:34:29.283810Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:34:29.284270Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:34:29.284453Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:34:29.284549Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:34:29.295518Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:34:29.340996Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:34:29.341282Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:34:29.341449Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:34:29.341488Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:34:29.341553Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:34:29.341592Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:34:29.341860Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:34:29.341910Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:34:29.342238Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:34:29.342350Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:34:29.342489Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:34:29.342536Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:34:29.342595Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T16:34:29.342639Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:34:29.342681Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:34:29.342716Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:34:29.342762Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:34:29.342863Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:34:29.342913Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:34:29.342975Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:34:29.343106Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-31T16:34:29.343264Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:34:29.343410Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:34:29.343695Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T16:34:29.343752Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:34:29.343891Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:34:29.343951Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:34:29.344006Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T16:34:29.344053Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T16:34:29.344090Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:34:29.344408Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T16:34:29.344454Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T16:34:29.344496Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T16:34:29.344534Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:34:29.344589Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T16:34:29.344641Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T16:34:29.344697Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T16:34:29.344753Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:34:29.344784Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T16:34:29.346728Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T16:34:29.346809Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:34:29.357755Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... 186224037896 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:34:36.512986Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037896 2025-03-31T16:34:36.513027Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037896 has no attached operations 2025-03-31T16:34:36.513099Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037896 2025-03-31T16:34:36.513161Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037896 2025-03-31T16:34:36.513625Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:1364:3077], Recipient [2:1263:2998]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-03-31T16:34:36.513660Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-03-31T16:34:36.513735Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:1364:3077], Recipient [2:1084:2857]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715664 ShardId: 72075186224037896 2025-03-31T16:34:36.513767Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1084:2857] TxId# 281474976715663] Received TEvStreamQuotaRequest from ShardId# 72075186224037896 2025-03-31T16:34:36.513799Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1084:2857] TxId# 281474976715663] Reserving quota 1 messages for ShardId# 72075186224037896 2025-03-31T16:34:36.513868Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037896, TxId: 281474976715664, MessageQuota: 1 2025-03-31T16:34:36.514204Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037896, TxId: 281474976715664, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-31T16:34:36.514337Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:1364:3077], Recipient [2:1084:2857]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037896 Status: RESPONSE_DATA TxId: 281474976715664 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\006\000\000\000b\005\035B\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 1 DataLastKey: "\001\000\004\000\000\000\006\000\000\000" 2025-03-31T16:34:36.514370Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1084:2857] TxId# 281474976715663] Received stream data from ShardId# 72075186224037896 2025-03-31T16:34:36.514400Z node 2 :TX_PROXY TRACE: [ReadTable [2:1084:2857] TxId# 281474976715663] Sending TEvStreamDataAck to [2:1364:3077] ShardId# 72075186224037896 2025-03-31T16:34:36.514489Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:1364:3077], Recipient [2:1084:2857]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715664 ShardId: 72075186224037896 2025-03-31T16:34:36.514512Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1084:2857] TxId# 281474976715663] Received TEvStreamQuotaRequest from ShardId# 72075186224037896 2025-03-31T16:34:36.514573Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037896, TxId: 281474976715664, PendingAcks: 0 2025-03-31T16:34:36.514917Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287941, Sender [2:1083:2857], Recipient [2:1084:2857]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715663 MessageSizeLimit: 1 ReservedMessages: 1 2025-03-31T16:34:36.514952Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1084:2857] TxId# 281474976715663] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-03-31T16:34:36.514996Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1084:2857] TxId# 281474976715663] Reserving quota 1 messages for ShardId# 72075186224037896 2025-03-31T16:34:36.515048Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037896, TxId: 281474976715664, MessageQuota: 1 2025-03-31T16:34:36.519243Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037896, TxId: 281474976715664, MessageQuota: 1 2025-03-31T16:34:36.519508Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037896 2025-03-31T16:34:36.531160Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715664, at: 72075186224037896 2025-03-31T16:34:36.531498Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287429, Sender [2:1364:3077], Recipient [2:1084:2857]: NKikimrTx.TEvStreamQuotaRelease TxId: 281474976715664 ShardId: 72075186224037896 2025-03-31T16:34:36.531542Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1084:2857] TxId# 281474976715663] Received TEvStreamQuotaRelease from ShardId# 72075186224037896 2025-03-31T16:34:36.531574Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1084:2857] TxId# 281474976715663] Released quota 1 reserved messages from ShardId# 72075186224037896 2025-03-31T16:34:36.531704Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:1263:2998], Recipient [2:1263:2998]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:34:36.531741Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:34:36.531805Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037896 2025-03-31T16:34:36.531841Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:34:36.531877Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715664] at 72075186224037896 for ReadTableScan 2025-03-31T16:34:36.531905Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037896 on unit ReadTableScan 2025-03-31T16:34:36.531940Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715664] at 72075186224037896 error: , IsFatalError: 0 2025-03-31T16:34:36.531986Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037896 is Executed 2025-03-31T16:34:36.532017Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037896 executing on unit ReadTableScan 2025-03-31T16:34:36.532046Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037896 to execution unit FinishPropose 2025-03-31T16:34:36.532075Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037896 on unit FinishPropose 2025-03-31T16:34:36.532109Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037896 is DelayComplete 2025-03-31T16:34:36.532135Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037896 executing on unit FinishPropose 2025-03-31T16:34:36.532160Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037896 to execution unit CompletedOperations 2025-03-31T16:34:36.532188Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037896 on unit CompletedOperations 2025-03-31T16:34:36.532229Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037896 is Executed 2025-03-31T16:34:36.532251Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037896 executing on unit CompletedOperations 2025-03-31T16:34:36.532276Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715664] at 72075186224037896 has finished 2025-03-31T16:34:36.532305Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:34:36.532334Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037896 2025-03-31T16:34:36.532360Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037896 has no attached operations 2025-03-31T16:34:36.532393Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037896 2025-03-31T16:34:36.532457Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037896 2025-03-31T16:34:36.532488Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715664] at 72075186224037896 on unit FinishPropose 2025-03-31T16:34:36.532523Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715664 at tablet 72075186224037896 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-03-31T16:34:36.532598Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037896 2025-03-31T16:34:36.532845Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:1263:2998], Recipient [2:1084:2857]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037896 Status: COMPLETE TxId: 281474976715664 Step: 0 OrderId: 281474976715664 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037896 CpuTimeUsec: 289 } } 2025-03-31T16:34:36.532884Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1084:2857] TxId# 281474976715663] Received stream complete from ShardId# 72075186224037896 2025-03-31T16:34:36.532942Z node 2 :TX_PROXY INFO: [ReadTable [2:1084:2857] TxId# 281474976715663] RESPONSE Status# ExecComplete prepare time: 0.016492s execute time: 0.599417s total time: 0.615909s 2025-03-31T16:34:36.533377Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1084:2857], Recipient [2:885:2709]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2025-03-31T16:34:36.533601Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1084:2857], Recipient [2:996:2791]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2025-03-31T16:34:36.533856Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1084:2857], Recipient [2:998:2793]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2025-03-31T16:34:36.534264Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1084:2857], Recipient [2:1259:2996]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2025-03-31T16:34:36.534567Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1084:2857], Recipient [2:1263:2998]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2025-03-31T16:34:36.534813Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:1367:3080], Recipient [2:1149:2914]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:34:36.534846Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:34:36.534881Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037893, clientId# [2:1365:3078], serverId# [2:1367:3080], sessionId# [0:0:0] 2025-03-31T16:34:36.534958Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1084:2857], Recipient [2:1149:2914]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2025-03-31T16:34:36.535133Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:1368:3081], Recipient [2:1153:2916]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:34:36.535164Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:34:36.535190Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1366:3079], serverId# [2:1368:3081], sessionId# [0:0:0] 2025-03-31T16:34:36.535299Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1084:2857], Recipient [2:1153:2916]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableMaxRows [GOOD] Test command err: 2025-03-31T16:34:29.322768Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:34:29.323008Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:34:29.323209Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001379/r3tmp/tmpWr6c7B/pdisk_1.dat 2025-03-31T16:34:29.721128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:34:29.762081Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:29.805849Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T16:34:29.806947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:29.807127Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:29.807328Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-31T16:34:29.818764Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:29.904092Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-03-31T16:34:29.904194Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-03-31T16:34:29.904365Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-03-31T16:34:30.039764Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-31T16:34:30.039861Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:34:30.040359Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-31T16:34:30.040453Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:34:30.040777Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:34:30.040997Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:34:30.041127Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-31T16:34:30.041513Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-03-31T16:34:30.043428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:34:30.044650Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-31T16:34:30.044766Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-03-31T16:34:30.077916Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:34:30.079244Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:34:30.079776Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:34:30.080038Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:34:30.138446Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:34:30.142395Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:34:30.142581Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:34:30.144484Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:34:30.144618Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:34:30.144684Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:34:30.145128Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:34:30.145291Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:34:30.145389Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:34:30.157685Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:34:30.201749Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:34:30.202032Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:34:30.202205Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:34:30.202245Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:34:30.202319Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:34:30.202386Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:34:30.202646Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:34:30.202696Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:34:30.203040Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:34:30.203175Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:34:30.203326Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:34:30.203371Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:34:30.203424Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T16:34:30.203480Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:34:30.203514Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:34:30.203544Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:34:30.203601Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:34:30.203748Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:34:30.203852Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:34:30.203900Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:34:30.204019Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-31T16:34:30.204079Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:34:30.204201Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:34:30.204471Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T16:34:30.204528Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:34:30.204668Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:34:30.204745Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:34:30.204811Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T16:34:30.204852Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T16:34:30.204904Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:34:30.205259Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T16:34:30.205301Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T16:34:30.205340Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T16:34:30.205374Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:34:30.205429Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T16:34:30.205478Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T16:34:30.205540Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T16:34:30.205574Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:34:30.205600Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T16:34:30.207819Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T16:34:30.207895Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:34:30.219867Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... planned 0 immediate 1 planned 0 2025-03-31T16:34:36.561666Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715663] at 72075186224037890 for WaitForStreamClearance 2025-03-31T16:34:36.561698Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037890 on unit WaitForStreamClearance 2025-03-31T16:34:36.561734Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715663] at 72075186224037890 2025-03-31T16:34:36.561778Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037890 is Executed 2025-03-31T16:34:36.561808Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit WaitForStreamClearance 2025-03-31T16:34:36.561836Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715663] at 72075186224037890 to execution unit ReadTableScan 2025-03-31T16:34:36.561865Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037890 on unit ReadTableScan 2025-03-31T16:34:36.562101Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037890 is Continue 2025-03-31T16:34:36.562132Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:34:36.562161Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037890 2025-03-31T16:34:36.562200Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-03-31T16:34:36.562236Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-03-31T16:34:36.562303Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-31T16:34:36.562837Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:1004:2803], Recipient [2:889:2711]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-03-31T16:34:36.562881Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-03-31T16:34:36.563010Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:1004:2803], Recipient [2:974:2775]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715663 ShardId: 72075186224037890 2025-03-31T16:34:36.563044Z node 2 :TX_PROXY DEBUG: [ReadTable [2:974:2775] TxId# 281474976715662] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2025-03-31T16:34:36.563099Z node 2 :TX_PROXY DEBUG: [ReadTable [2:974:2775] TxId# 281474976715662] Reserving quota 1 messages for ShardId# 72075186224037890 ... observed row limit of 2 rows at [2:1004:2803] 2025-03-31T16:34:36.563249Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715663, MessageQuota: 1 2025-03-31T16:34:36.563658Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715663, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-31T16:34:36.563847Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:1004:2803], Recipient [2:974:2775]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715663 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\004\000\000\000b\005\035,\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 1 DataLastKey: "\001\000\004\000\000\000\004\000\000\000" 2025-03-31T16:34:36.563886Z node 2 :TX_PROXY DEBUG: [ReadTable [2:974:2775] TxId# 281474976715662] Received stream data from ShardId# 72075186224037890 2025-03-31T16:34:36.563914Z node 2 :TX_PROXY TRACE: [ReadTable [2:974:2775] TxId# 281474976715662] Sending TEvStreamDataAck to [2:1004:2803] ShardId# 72075186224037890 2025-03-31T16:34:36.563972Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715663, PendingAcks: 0 2025-03-31T16:34:36.564051Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:1004:2803], Recipient [2:974:2775]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715663 ShardId: 72075186224037890 2025-03-31T16:34:36.564077Z node 2 :TX_PROXY DEBUG: [ReadTable [2:974:2775] TxId# 281474976715662] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2025-03-31T16:34:36.564429Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287941, Sender [2:973:2775], Recipient [2:974:2775]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715662 MessageSizeLimit: 1 ReservedMessages: 1 2025-03-31T16:34:36.564463Z node 2 :TX_PROXY DEBUG: [ReadTable [2:974:2775] TxId# 281474976715662] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-03-31T16:34:36.564524Z node 2 :TX_PROXY DEBUG: [ReadTable [2:974:2775] TxId# 281474976715662] Reserving quota 1 messages for ShardId# 72075186224037890 ... observed row limit of 1 rows at [2:1004:2803] 2025-03-31T16:34:36.564586Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715663, MessageQuota: 1 2025-03-31T16:34:36.564662Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715663, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-31T16:34:36.564802Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:1004:2803], Recipient [2:974:2775]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715663 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\005\000\000\000b\005\0357\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 2 DataLastKey: "\001\000\004\000\000\000\005\000\000\000" 2025-03-31T16:34:36.564834Z node 2 :TX_PROXY DEBUG: [ReadTable [2:974:2775] TxId# 281474976715662] Received stream data from ShardId# 72075186224037890 2025-03-31T16:34:36.564860Z node 2 :TX_PROXY TRACE: [ReadTable [2:974:2775] TxId# 281474976715662] Sending TEvStreamDataAck to [2:1004:2803] ShardId# 72075186224037890 2025-03-31T16:34:36.564931Z node 2 :TX_PROXY INFO: [ReadTable [2:974:2775] TxId# 281474976715662] RESPONSE Status# ExecComplete prepare time: 0.016456s execute time: 0.180084s total time: 0.196540s 2025-03-31T16:34:36.565133Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715663, PendingAcks: 0 2025-03-31T16:34:36.565176Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037890, TxId: 281474976715663, MessageQuota: 0 2025-03-31T16:34:36.565381Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037890 2025-03-31T16:34:36.565414Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715663, at: 72075186224037890 2025-03-31T16:34:36.565778Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:974:2775], Recipient [2:885:2709]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715662 2025-03-31T16:34:36.566146Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:889:2711], Recipient [2:889:2711]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:34:36.566185Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:34:36.566233Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-03-31T16:34:36.566265Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:34:36.566307Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715663] at 72075186224037890 for ReadTableScan 2025-03-31T16:34:36.566337Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037890 on unit ReadTableScan 2025-03-31T16:34:36.566371Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715663] at 72075186224037890 error: , IsFatalError: 0 2025-03-31T16:34:36.566414Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037890 is Executed 2025-03-31T16:34:36.566444Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit ReadTableScan 2025-03-31T16:34:36.566471Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715663] at 72075186224037890 to execution unit FinishPropose 2025-03-31T16:34:36.566498Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037890 on unit FinishPropose 2025-03-31T16:34:36.566552Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037890 is DelayComplete 2025-03-31T16:34:36.566583Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit FinishPropose 2025-03-31T16:34:36.566611Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715663] at 72075186224037890 to execution unit CompletedOperations 2025-03-31T16:34:36.566641Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037890 on unit CompletedOperations 2025-03-31T16:34:36.566688Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037890 is Executed 2025-03-31T16:34:36.566711Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit CompletedOperations 2025-03-31T16:34:36.566736Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715663] at 72075186224037890 has finished 2025-03-31T16:34:36.566767Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:34:36.566795Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2025-03-31T16:34:36.566835Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-03-31T16:34:36.566863Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-03-31T16:34:36.566919Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-31T16:34:36.566948Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715663] at 72075186224037890 on unit FinishPropose 2025-03-31T16:34:36.566984Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715663 at tablet 72075186224037890 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-03-31T16:34:36.567068Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-31T16:34:36.568934Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549569, Sender [2:974:2775], Recipient [2:889:2711]: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715663 2025-03-31T16:34:36.568988Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvCancelTransactionProposal 2025-03-31T16:34:36.569034Z node 2 :TX_DATASHARD DEBUG: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037890 txId 281474976715663 2025-03-31T16:34:36.569112Z node 2 :TX_DATASHARD DEBUG: Start TTxCancelTransactionProposal at tablet 72075186224037890 txId 281474976715663 2025-03-31T16:34:36.569338Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287431, Sender [2:974:2775], Recipient [2:889:2711]: NKikimrTx.TEvInterruptTransaction TxId: 281474976715663 2025-03-31T16:34:36.569378Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvInterruptTransaction 2025-03-31T16:34:36.569484Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:974:2775], Recipient [2:889:2711]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715662 >> KqpQueryService::SessionFromPoolError [GOOD] >> KqpQueryService::SessionFromPoolSuccess ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotRequestCompactionsAfterDisable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:33:21.624505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:33:21.624602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:21.624642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:33:21.624675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:33:21.625559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:33:21.625604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:33:21.625702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:21.625788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:33:21.627349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:33:21.713662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:33:21.713723Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:21.718313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:33:21.718393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:33:21.718539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:33:21.725876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:33:21.726497Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:33:21.731503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:21.732550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:33:21.738725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:21.747604Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:21.747662Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:21.747779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:33:21.747815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:21.747840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:33:21.748037Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:33:21.755099Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:33:21.864683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:33:21.866603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:21.867653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:33:21.868765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:33:21.868840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:21.871839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:21.872025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:33:21.872230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:21.872281Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:33:21.872336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:33:21.872368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:33:21.874180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:21.874226Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:33:21.874254Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:33:21.876078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:21.876139Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:21.876181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:21.876230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:33:21.881085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:33:21.882953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:33:21.883147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:33:21.884528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:21.884650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:21.884710Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:21.886094Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:33:21.886172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:21.886355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:33:21.886439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:33:21.888399Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:21.888443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:21.888603Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:21.888644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:33:21.888864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:21.888921Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:33:21.889013Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:21.889042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:21.889079Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:21.889107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:21.889141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:33:21.889178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:21.889211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:33:21.889248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:33:21.889308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:33:21.889369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:33:21.889410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:33:21.892328Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:21.892435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:21.892474Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 73880Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvSendBaseStatsToSA 2025-03-31T16:34:37.173985Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435076, Sender [0:0:0], Recipient [3:125:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvRunConditionalErase 2025-03-31T16:34:37.174021Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvRunConditionalErase 2025-03-31T16:34:37.174059Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2025-03-31T16:34:37.174150Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-03-31T16:34:37.174241Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-03-31T16:34:37.227881Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:318:2302]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-03-31T16:34:37.227948Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-03-31T16:34:37.228016Z node 3 :TX_DATASHARD TRACE: No cleanup at 72075186233409546 outdated step 5000002 last cleanup 0 2025-03-31T16:34:37.228070Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186233409546 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:34:37.228106Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186233409546 2025-03-31T16:34:37.228135Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186233409546 has no attached operations 2025-03-31T16:34:37.228164Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186233409546 2025-03-31T16:34:37.228270Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:318:2302]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-03-31T16:34:37.228416Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409546, FollowerId 0, tableId 2 2025-03-31T16:34:37.228774Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:318:2302], Recipient [3:125:2151]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409546 TableLocalId: 2 Generation: 2 Round: 10 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 29 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 26 Memory: 124232 Storage: 14156 } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 41 TableOwnerId: 72057594046678944 FollowerId: 0 2025-03-31T16:34:37.228814Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-03-31T16:34:37.228862Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0026 2025-03-31T16:34:37.229101Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 29 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-03-31T16:34:37.229144Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-03-31T16:34:37.239622Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:326:2307]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-03-31T16:34:37.239694Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-03-31T16:34:37.239785Z node 3 :TX_DATASHARD TRACE: No cleanup at 72075186233409547 outdated step 5000002 last cleanup 0 2025-03-31T16:34:37.239855Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186233409547 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:34:37.239890Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186233409547 2025-03-31T16:34:37.239920Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186233409547 has no attached operations 2025-03-31T16:34:37.239948Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186233409547 2025-03-31T16:34:37.240082Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:326:2307]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-03-31T16:34:37.240190Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409547, FollowerId 0, tableId 2 2025-03-31T16:34:37.240506Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:326:2307], Recipient [3:125:2151]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409547 TableLocalId: 2 Generation: 2 Round: 10 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 27 Memory: 119352 } ShardState: 2 UserTablePartOwners: 72075186233409547 NodeId: 3 StartTime: 41 TableOwnerId: 72057594046678944 FollowerId: 0 2025-03-31T16:34:37.240557Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-03-31T16:34:37.240607Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0027 2025-03-31T16:34:37.240711Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-03-31T16:34:37.283275Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [3:125:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-31T16:34:37.283358Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-31T16:34:37.283389Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-03-31T16:34:37.283455Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 2 2025-03-31T16:34:37.283487Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 2 2025-03-31T16:34:37.283580Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 13940 row count 100 2025-03-31T16:34:37.283643Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0 2025-03-31T16:34:37.283687Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 100, DataSize 13940 2025-03-31T16:34:37.283754Z node 3 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:29.000000Z at schemeshard 72057594046678944 2025-03-31T16:34:37.283850Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:2 data size 0 row count 0 2025-03-31T16:34:37.283884Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0 2025-03-31T16:34:37.283910Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 2: RowCount 0, DataSize 0 2025-03-31T16:34:37.283945Z node 3 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:2 with partCount# 0, rowCount# 0, searchHeight# 0, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046678944 2025-03-31T16:34:37.284029Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:34:37.294495Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [3:125:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-31T16:34:37.294561Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-31T16:34:37.294599Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-03-31T16:34:37.328446Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:1334:3255], Recipient [3:318:2302]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:34:37.328524Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:34:37.328578Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186233409546, clientId# [3:1333:3254], serverId# [3:1334:3255], sessionId# [0:0:0] 2025-03-31T16:34:37.328803Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553213, Sender [3:1332:3253], Recipient [3:318:2302]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72057594046678944 LocalId: 2 } 2025-03-31T16:34:37.331245Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:1337:3258], Recipient [3:326:2307]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:34:37.331302Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:34:37.331339Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186233409547, clientId# [3:1336:3257], serverId# [3:1337:3258], sessionId# [0:0:0] 2025-03-31T16:34:37.331522Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553213, Sender [3:1335:3256], Recipient [3:326:2307]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72057594046678944 LocalId: 2 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> KqpProxy::ExecuteScriptFailsWithoutFeatureFlag [GOOD] Test command err: 2025-03-31T16:34:16.676098Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012871187047189:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:16.676160Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000c60/r3tmp/tmpKyYJ8u/pdisk_1.dat 2025-03-31T16:34:17.067529Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:17.077413Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:17.077518Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:17.080405Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19234 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:34:17.391676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:19.680891Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-31T16:34:19.682204Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-31T16:34:19.683694Z node 1 :KQP_PROXY WARN: Failed to parse session id: ydb://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=1234&node_id=12345 2025-03-31T16:34:19.688216Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-03-31T16:34:19.688259Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-03-31T16:34:19.688293Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-31T16:34:19.688349Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-31T16:34:19.688421Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:19.688623Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:19.688709Z node 1 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 2, sender: [1:7488012875482015047:2282], selfId: [1:7488012871187047436:2278], source: [1:7488012871187047436:2278] 2025-03-31T16:34:19.688730Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:19.688784Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:19.689330Z node 1 :KQP_PROXY WARN: Failed to parse session id: unknown://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=1234&node_id=12345 2025-03-31T16:34:19.689457Z node 1 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [1:7488012875482015047:2282], selfId: [1:7488012871187047436:2278], source: [1:7488012871187047436:2278] 2025-03-31T16:34:19.690234Z node 1 :KQP_PROXY WARN: Failed to parse session id: ydb://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=eqweq 2025-03-31T16:34:19.690339Z node 1 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 4, sender: [1:7488012875482015047:2282], selfId: [1:7488012871187047436:2278], source: [1:7488012871187047436:2278] 2025-03-31T16:34:19.692089Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012884071949677:2309], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:19.692456Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:23.296231Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:101:2147], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:34:23.296670Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:34:23.296789Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000c60/r3tmp/tmpohwYrz/pdisk_1.dat 2025-03-31T16:34:23.630035Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:34:23.662562Z node 2 :KQP_PROXY DEBUG: Updated table service config. 2025-03-31T16:34:23.662630Z node 2 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-31T16:34:23.662763Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:23.688009Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:92:2139], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: Root/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:34:23.690007Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [2:92:2139], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /Root/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2025-03-31T16:34:23.690197Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [2:92:2139], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /Root/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [2:611:2533] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-31T16:34:23.690351Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:92:2139], cacheItem# { Subscriber: { Subscriber: [2:611:2533] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:34:23.690512Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [2:92:2139], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /Root/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-03-31T16:34:23.690591Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [2:92:2139], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /Root/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [2:612:2534] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-31T16:34:23.690665Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:92:2139], cacheItem# { Subscriber: { Subscriber: [2:612:2534] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-31T16:34:23.690873Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:625:2535], recipient# [2:101:2147], result# { ErrorCount: 2 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: Root/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-31T16:34:23.703596Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:23.703774Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:23.716632Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:23.797658Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [2:92:2139], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /Root PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 500 ParentPathId: 1 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" Effective ... tem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to TICKET_PARSER_ACTOR Captured TEvents::TSystem::Wakeup to KQP_COMPILE_COMPUTATION_PATTERN_SERVICE Captured TEvents::TSystem::Wakeup to KQP_NODE_SERVICE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_RESPONSIVENESS_PINGER Captured TEvents::TSystem::Wakeup to TABLET_RESPONSIVENESS_PINGER Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to TABLET_COUNTERS_AGGREGATOR Captured TEvents::TSystem::Wakeup to BSC_STAT_PROCESSOR Captured TEvents::TSystem::Wakeup to TICKET_PARSER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NIcNodeCache::TIcNodeCacheServiceActor Captured TEvents::TSystem::Wakeup to KQP_COMPILE_COMPUTATION_PATTERN_SERVICE Captured TEvents::TSystem::Wakeup to KQP_NODE_SERVICE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_RESPONSIVENESS_PINGER Captured TEvents::TSystem::Wakeup to TABLET_RESPONSIVENESS_PINGER Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to NKikimr::NBsController::TBlobStorageController::TSelfHealActor Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to TICKET_PARSER_ACTOR Captured TEvents::TSystem::Wakeup to KQP_COMPILE_COMPUTATION_PATTERN_SERVICE Captured TEvents::TSystem::Wakeup to KQP_NODE_SERVICE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to TABLET_RESPONSIVENESS_PINGER Captured TEvents::TSystem::Wakeup to TABLET_RESPONSIVENESS_PINGER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR 2025-03-31T16:34:31.862515Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(20) 2025-03-31T16:34:31.862608Z node 2 :KQP_PROXY DEBUG: Reply timeout: requestId 20 sessionId: ydb://session/3?node_id=2&id=YzI5NGM0NDAtYjc2NTM2Yy1kNGI1NGNmZS03ZWEyMTE1Zg== status: TIMEOUT round: 0 Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER 2025-03-31T16:34:31.862828Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzI5NGM0NDAtYjc2NTM2Yy1kNGI1NGNmZS03ZWEyMTE1Zg==, ActorId: [2:1134:2937], ActorState: ExecuteState, TraceId: 01jqpgvyec372x9qhk4bny783w, Create QueryResponse for error on request, msg: Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR 2025-03-31T16:34:31.863069Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 20, sender: [2:594:2519], selfId: [2:57:2104], source: [2:1134:2937] Send scheduled evet back 2025-03-31T16:34:31.863213Z node 2 :KQP_COMPILE_ACTOR NOTICE: Compilation timeout, self: [2:1136:2939], cluster: db, database: , text: "SELECT * FROM `/Root/Table`;", startTime: 2025-03-31T16:34:30.988999Z 2025-03-31T16:34:31.863289Z node 2 :KQP_COMPILE_ACTOR DEBUG: Send response, self: [2:1136:2939], owner: [2:86:2133], status: TIMEOUT, issues:
: Error: Query compilation timed out. , uid: de400da9-faf860df-3accecd8-ed79cbe Send captured event back Send captured event back Send captured event back Send captured event back Send captured event back 2025-03-31T16:34:32.839350Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488012941000357707:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:32.839412Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000c60/r3tmp/tmpnkNxAz/pdisk_1.dat 2025-03-31T16:34:32.988564Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:33.007020Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:33.007120Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:33.011409Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13134, node 3 2025-03-31T16:34:33.096879Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:33.096917Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:33.096934Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:33.097114Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15938 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:33.321292Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:36.206572Z node 3 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-31T16:34:36.220433Z node 3 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2025-03-31T16:34:36.221165Z node 3 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-03-31T16:34:36.221191Z node 3 :KQP_PROXY DEBUG: Updated table service config. 2025-03-31T16:34:36.221214Z node 3 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-31T16:34:36.221250Z node 3 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2025-03-31T16:34:36.221360Z node 3 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:36.221409Z node 3 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:36.221432Z node 3 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:34:36.228306Z node 3 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 >> KqpSnapshotIsolation::TConflictWriteOltp ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSqlIn::InWithCast [GOOD] Test command err: Trying to start YDB, gRPC: 23533, MsgBus: 65392 2025-03-31T16:33:48.120280Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012752506259217:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:48.120409Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00085f/r3tmp/tmp6GSixR/pdisk_1.dat 2025-03-31T16:33:48.391507Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23533, node 1 2025-03-31T16:33:48.449413Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:48.449445Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:48.449452Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:48.449595Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:33:48.475364Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:48.475517Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:48.477176Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:65392 TClient is connected to server localhost:65392 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:48.871145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:48.889428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:48.999578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:49.127296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:49.186012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:50.668266Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012761096195587:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:50.668343Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:50.982477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:51.008745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:51.034820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:51.059440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:51.103563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:51.131187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:51.165194Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012765391163394:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:51.165268Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:51.165348Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012765391163399:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:51.168381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:51.175513Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012765391163401:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:33:51.269627Z node 1 :TX_PROXY ERROR: Actor# [1:7488012765391163456:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:51.988771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:33:52.060362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:33:52.106021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:33:53.120271Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012752506259217:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:53.151385Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:26: Warning: At function: Filter, At function: Coalesce
:7:49: Warning: At function: And
:7:41: Warning: At function: SqlIn
:7:41: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:26: Warning: At function: Filter, At function: Coalesce
:7:49: Warning: At function: And
:7:41: Warning: At function: SqlIn
:7:41: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:26: Warning: At function: Filter, At function: Coalesce
:7:49: Warning: At function: And
:7:41: Warning: At function: SqlIn
:7:41: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:26: Warning: At function: Filter, At function: Coalesce
:7:49: Warning: At function: And
:7:41: Warning: At function: SqlIn
:7:41: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 20433, MsgBus: 15225 2025-03-31T16:33:55.864308Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012780965706838:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:55.864446Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00085f/r3tmp/tmpMLhqki/pdisk_1.dat 2025-03-31T16:33:55.964308Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20433, node 2 2025-03-31T16:33:56.001732Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:56.001819Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:56.003471Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:56.020885Z node 2 :NET_CLASSI ... uboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:25.768141Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:25.843680Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:25.889524Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:25.988646Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:26.036917Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:34:26.100304Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488012914802011675:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:26.100413Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:26.100869Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488012914802011680:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:26.105376Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:26.114765Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7488012914802011682:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:34:26.189831Z node 5 :TX_PROXY ERROR: Actor# [5:7488012914802011735:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:26.610696Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7488012893327172896:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:26.610807Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Warning: Type annotation, code: 1030
:4:17: Warning: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:5:22: Warning: At function: Filter, At function: Coalesce
:6:23: Warning: At function: SqlIn
:6:23: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 20813, MsgBus: 4676 2025-03-31T16:34:28.886184Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7488012922031062485:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:28.886274Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00085f/r3tmp/tmpmkL11s/pdisk_1.dat 2025-03-31T16:34:29.016716Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:29.033040Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:29.033180Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:29.034748Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20813, node 6 2025-03-31T16:34:29.106046Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:29.106084Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:29.106095Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:29.106278Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4676 TClient is connected to server localhost:4676 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:29.902217Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:29.910493Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:34:29.923240Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:30.020666Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:30.283653Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:30.427810Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:33.656574Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012943505900725:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:33.656711Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:33.719016Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:33.772293Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:33.834616Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:33.881570Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:33.886499Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488012922031062485:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:33.886594Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:34:33.926248Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:33.970814Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:34:34.032881Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012947800868534:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:34.033033Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:34.033381Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012947800868539:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:34.038297Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:34.059390Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488012947800868541:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:34:34.155850Z node 6 :TX_PROXY ERROR: Actor# [6:7488012947800868598:3453] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |96.7%| [TA] $(B)/ydb/core/kqp/proxy_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.7%| [TA] {RESULT} $(B)/ydb/core/kqp/proxy_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithDeletes [GOOD] Test command err: 2025-03-31T16:34:11.228975Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012848398017995:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:11.229025Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00175b/r3tmp/tmptsihrX/pdisk_1.dat 2025-03-31T16:34:11.641002Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:11.661353Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:11.661462Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:11.670994Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6146, node 1 2025-03-31T16:34:11.780015Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:11.780043Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:11.780055Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:11.780183Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27305 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:12.087172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:27305 2025-03-31T16:34:14.374015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012861282920909:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:14.374151Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:14.641018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:34:14.844877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012861282921094:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:14.844966Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:14.863920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743438854780 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 1 shards TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743438854780 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-03-31T16:34:14.999950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012861282921193:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:15.000058Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:15.000547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012865577888494:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:15.000984Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012865577888498:2387], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:15.001049Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:15.001670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012865577888502:2390], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:15.004741Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012865577888512:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:15.004856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012865577888520:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:15.004922Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012865577888521:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:15.004970Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:15.005344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata, operationId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:34:15.005598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710661:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:34:15.005640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976710661:1, at schemeshard: 72057594046644480 2025-03-31T16:34:15.005778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710661:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:34:15.005804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976710661:2, at schemeshard: 72057594046644480 2025-03-31T16:34:15.005908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710661:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:34:15.006004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710661:3, path# /Root/.metadata/workload_manager/pools/default 2025-03-31T16:34:15.006341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710661:3 1 -> 128 2025-03-31T16:34:15.006364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012865577888548:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:15.006457Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:15.006532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012865577888550:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:15.006974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710661:4, propose status:StatusAcce ... t 2025-03-31T16:34:34.923740Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722935. Ctx: { TraceId: 01jqpgw28ybzv261pvswtd1dk5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDIwZTdmZGUtNDk4YjdjNDYtNzEwMDU0YzItNjYyZjhkOTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:34.925145Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722930. Ctx: { TraceId: 01jqpgw28z9mt5cscpsh1nvkrh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjRkZGE5MmQtODcwYjRjMWQtOGM5YWUxYjctMjdjMzk2YWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:34.932320Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722936. Ctx: { TraceId: 01jqpgw2915vsx9ft1dr2djpc9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDQwZWFlNDEtOWMxOGVkMDItNjNiYWY0N2EtNTM0NjIwNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:34.945713Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722937. Ctx: { TraceId: 01jqpgw29p7fmy5dwzvn1y2e78, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGQ0OTNiNGItNjMzZWY5MzMtNWQ4ZDllZDAtNDUwMzYzZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:34.947267Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722938. Ctx: { TraceId: 01jqpgw29pcg7ke57gjkcms5m5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjVjN2ZmMmItZjg3ZjU5YmItZDEwNmMxMmUtYWQ3N2VmMjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:34.950341Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722940. Ctx: { TraceId: 01jqpgw29xdpd3a2sdnxv17jrx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDRmNzEwNGUtNjVhZTgyNjgtOTg4NTk2Ni1jYzVkMTU0NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:34.950413Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722939. Ctx: { TraceId: 01jqpgw29vfpmazg5rt7kfxx99, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGU1YzMyZGYtMWQ0ZTMxNmItNjZjZjllNGItOWYwZWE5MWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:34.952696Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722941. Ctx: { TraceId: 01jqpgw2a5frgzkqn2pwv6tahv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTBjNWMzZS1jNmIyYzU1Zi0yNThkZGU0OS1jM2U5YTNjOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:34.952973Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722942. Ctx: { TraceId: 01jqpgw2a45w5xwfx5d9pvh43p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWE2NzdhMGEtMjA3MmI3NzYtYzQyNGEwZjctNmU1NmI3ZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:34.955783Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722943. Ctx: { TraceId: 01jqpgw2a6c1aebbgcxhhcrh3y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDIwZTdmZGUtNDk4YjdjNDYtNzEwMDU0YzItNjYyZjhkOTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:34.960935Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722944. Ctx: { TraceId: 01jqpgw2ab5489e8yw317efrjc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDQwZWFlNDEtOWMxOGVkMDItNjNiYWY0N2EtNTM0NjIwNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:34.961706Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722945. Ctx: { TraceId: 01jqpgw2ab6fjtf3p3810s6hnq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjRkZGE5MmQtODcwYjRjMWQtOGM5YWUxYjctMjdjMzk2YWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:34.962880Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722946. Ctx: { TraceId: 01jqpgw2ab7778n22h4egebn5p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTFmN2ZmZTItZjA3ODFmOGItNTE0NzZkZTgtODlhNDE0M2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:34.965418Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722947. Ctx: { TraceId: 01jqpgw2ag2b4mk24h17f8447x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGQ0OTNiNGItNjMzZWY5MzMtNWQ4ZDllZDAtNDUwMzYzZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:34.965638Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722948. Ctx: { TraceId: 01jqpgw2aj0h9t3p9xfdz0vxa7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDRmNzEwNGUtNjVhZTgyNjgtOTg4NTk2Ni1jYzVkMTU0NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:34.966862Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722949. Ctx: { TraceId: 01jqpgw2ajc2dvzsg10w95wap8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGU1YzMyZGYtMWQ0ZTMxNmItNjZjZjllNGItOWYwZWE5MWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo 2025-03-31T16:34:34.970945Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722951. Ctx: { TraceId: 01jqpgw2ar1ktcgajyfsapd5s5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWE2NzdhMGEtMjA3MmI3NzYtYzQyNGEwZjctNmU1NmI3ZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:34.972091Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722950. Ctx: { TraceId: 01jqpgw2ap2mggyreppgqfkjmz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjVjN2ZmMmItZjg3ZjU5YmItZDEwNmMxMmUtYWQ3N2VmMjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:34.972263Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722952. Ctx: { TraceId: 01jqpgw2ar2nzs4j9x0rhytgxt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTBjNWMzZS1jNmIyYzU1Zi0yNThkZGU0OS1jM2U5YTNjOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743438854780 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-03-31T16:34:34.978838Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722953. Ctx: { TraceId: 01jqpgw2aybq6eepssx9jfz02n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDQwZWFlNDEtOWMxOGVkMDItNjNiYWY0N2EtNTM0NjIwNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:34.979419Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722954. Ctx: { TraceId: 01jqpgw2ata4nzckpgqrfqwxy1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDIwZTdmZGUtNDk4YjdjNDYtNzEwMDU0YzItNjYyZjhkOTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:34.984829Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722955. Ctx: { TraceId: 01jqpgw2b155dc6pq7rd7sdmgr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjRkZGE5MmQtODcwYjRjMWQtOGM5YWUxYjctMjdjMzk2YWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:34.986674Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722956. Ctx: { TraceId: 01jqpgw2b32sk3a0yzd15cqct6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTFmN2ZmZTItZjA3ODFmOGItNTE0NzZkZTgtODlhNDE0M2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:34.988074Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976722957. Ctx: { TraceId: 01jqpgw2b3696ya04j2ahta320, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDRmNzEwNGUtNjVhZTgyNjgtOTg4NTk2Ni1jYzVkMTU0NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:34.990361Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037889 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743438854780 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-03-31T16:34:35.090583Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-03-31T16:34:35.090761Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037889 followerId=0, pathId 2: RowCount 0, DataSize 0 2025-03-31T16:34:35.091041Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-03-31T16:34:35.256308Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037889 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 640 rowCount 2 cpuUsage 0 2025-03-31T16:34:35.260938Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 17536 rowCount 202 cpuUsage 0 2025-03-31T16:34:35.356271Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2025-03-31T16:34:35.356474Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037889 followerId=0, pathId 2: RowCount 2, DataSize 640 2025-03-31T16:34:35.356615Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037890 followerId=0, pathId 2: RowCount 202, DataSize 17536 2025-03-31T16:34:35.356867Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 Table has 2 shards >> DataShardReadTableSnapshots::ReadTableUUID [GOOD] >> KqpSinkLocks::EmptyRange >> KqpQueryService::TableSink_ReplaceFromSelectLargeOlap >> KqpTx::CommitRoTx_TLI [GOOD] >> KqpQueryService::TableSink_HtapComplex+withOltpSink [GOOD] >> KqpQueryService::TableSink_HtapComplex-withOltpSink >> KqpSnapshotIsolation::TConflictReadWriteOltp >> KqpQueryService::PeriodicTaskInSessionPoolSessionCloseByIdle >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery >> KqpService::CloseSessionsWithLoad ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableUUID [GOOD] Test command err: 2025-03-31T16:34:31.893684Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:34:31.893911Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:34:31.894071Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001375/r3tmp/tmp5CVnOI/pdisk_1.dat 2025-03-31T16:34:32.250082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:34:32.298547Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:32.340842Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T16:34:32.341749Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:32.341865Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:32.341985Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-31T16:34:32.353320Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:32.436649Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-03-31T16:34:32.436723Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-03-31T16:34:32.436879Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-03-31T16:34:32.541567Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-31T16:34:32.541645Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:34:32.542153Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-31T16:34:32.542233Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:34:32.542447Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:34:32.542638Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:34:32.542721Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-31T16:34:32.542986Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-03-31T16:34:32.544398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:34:32.545465Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-31T16:34:32.545552Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-03-31T16:34:32.585333Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:34:32.586459Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:34:32.587122Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:34:32.587371Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:34:32.634016Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:34:32.634820Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:34:32.634930Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:34:32.636625Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:34:32.636707Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:34:32.636762Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:34:32.637143Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:34:32.637284Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:34:32.637380Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:34:32.648124Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:34:32.678574Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:34:32.678801Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:34:32.678950Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:34:32.678992Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:34:32.679053Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:34:32.679106Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:34:32.679319Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:34:32.679362Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:34:32.679624Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:34:32.679717Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:34:32.679831Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:34:32.679868Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:34:32.679909Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T16:34:32.679942Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:34:32.679972Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:34:32.679999Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:34:32.680033Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:34:32.680125Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:34:32.680169Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:34:32.680212Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:34:32.680312Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-31T16:34:32.680358Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:34:32.680484Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:34:32.680694Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T16:34:32.680737Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:34:32.680843Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:34:32.680902Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:34:32.680950Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T16:34:32.680990Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T16:34:32.681016Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:34:32.681298Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T16:34:32.681331Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T16:34:32.681366Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T16:34:32.681402Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:34:32.681466Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T16:34:32.681520Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T16:34:32.681565Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T16:34:32.681594Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:34:32.681617Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T16:34:32.683234Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T16:34:32.683300Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:34:32.694086Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... _DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearancePending 2025-03-31T16:34:38.658720Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [2:864:2693], Recipient [2:667:2571]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715662 Cleared: true 2025-03-31T16:34:38.658764Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-03-31T16:34:38.658855Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:667:2571], Recipient [2:667:2571]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:34:38.658880Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:34:38.658935Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:34:38.658965Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:34:38.659009Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715662] at 72075186224037888 for WaitForStreamClearance 2025-03-31T16:34:38.659045Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit WaitForStreamClearance 2025-03-31T16:34:38.659103Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715662] at 72075186224037888 2025-03-31T16:34:38.659138Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2025-03-31T16:34:38.659168Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit WaitForStreamClearance 2025-03-31T16:34:38.659194Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit ReadTableScan 2025-03-31T16:34:38.659220Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit ReadTableScan 2025-03-31T16:34:38.659431Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Continue 2025-03-31T16:34:38.659465Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:34:38.659491Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-03-31T16:34:38.659520Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:34:38.659544Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:34:38.659600Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:34:38.660132Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:876:2704], Recipient [2:667:2571]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-03-31T16:34:38.660171Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-03-31T16:34:38.660232Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2025-03-31T16:34:38.660462Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-03-31T16:34:38.660531Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037888 2025-03-31T16:34:38.660611Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2025-03-31T16:34:38.660781Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715662, Size: 38, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-31T16:34:38.660885Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received stream data from ShardId# 72075186224037888 2025-03-31T16:34:38.660953Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715662, PendingAcks: 0 2025-03-31T16:34:38.661002Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2025-03-31T16:34:38.661396Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-03-31T16:34:38.661427Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037888 2025-03-31T16:34:38.661466Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2025-03-31T16:34:38.661527Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715662, Size: 38, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-31T16:34:38.661578Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received stream data from ShardId# 72075186224037888 2025-03-31T16:34:38.661625Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715662, PendingAcks: 0 2025-03-31T16:34:38.661650Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2025-03-31T16:34:38.661844Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-03-31T16:34:38.661870Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037888 2025-03-31T16:34:38.661926Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2025-03-31T16:34:38.661984Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715662, Size: 38, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-31T16:34:38.662032Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received stream data from ShardId# 72075186224037888 2025-03-31T16:34:38.662083Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715662, PendingAcks: 0 2025-03-31T16:34:38.662124Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2025-03-31T16:34:38.662327Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-03-31T16:34:38.662353Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037888 2025-03-31T16:34:38.662385Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2025-03-31T16:34:38.662445Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2025-03-31T16:34:38.662594Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-03-31T16:34:38.662623Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715662, at: 72075186224037888 2025-03-31T16:34:38.662672Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received TEvStreamQuotaRelease from ShardId# 72075186224037888 2025-03-31T16:34:38.662702Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Released quota 1 reserved messages from ShardId# 72075186224037888 2025-03-31T16:34:38.662838Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:667:2571], Recipient [2:667:2571]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:34:38.662917Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:34:38.662994Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:34:38.663038Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:34:38.663104Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715662] at 72075186224037888 for ReadTableScan 2025-03-31T16:34:38.663143Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit ReadTableScan 2025-03-31T16:34:38.663185Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715662] at 72075186224037888 error: , IsFatalError: 0 2025-03-31T16:34:38.663232Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2025-03-31T16:34:38.663275Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit ReadTableScan 2025-03-31T16:34:38.663311Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit FinishPropose 2025-03-31T16:34:38.663344Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit FinishPropose 2025-03-31T16:34:38.663381Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is DelayComplete 2025-03-31T16:34:38.663415Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit FinishPropose 2025-03-31T16:34:38.663449Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit CompletedOperations 2025-03-31T16:34:38.663482Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit CompletedOperations 2025-03-31T16:34:38.663522Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2025-03-31T16:34:38.663545Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit CompletedOperations 2025-03-31T16:34:38.663570Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715662] at 72075186224037888 has finished 2025-03-31T16:34:38.663605Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:34:38.663636Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-03-31T16:34:38.663670Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:34:38.663723Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:34:38.663799Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:34:38.663839Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715662] at 72075186224037888 on unit FinishPropose 2025-03-31T16:34:38.663877Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715662 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-03-31T16:34:38.663952Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:34:38.664109Z node 2 :TX_PROXY DEBUG: [ReadTable [2:864:2693] TxId# 281474976715661] Received stream complete from ShardId# 72075186224037888 2025-03-31T16:34:38.664197Z node 2 :TX_PROXY INFO: [ReadTable [2:864:2693] TxId# 281474976715661] RESPONSE Status# ExecComplete prepare time: 0.015866s execute time: 0.108153s total time: 0.124019s 2025-03-31T16:34:38.664528Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:864:2693], Recipient [2:667:2571]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715661 >> KqpDocumentApi::RestrictWriteExplicitPrepare [GOOD] >> KqpDocumentApi::Scripting >> KqpQueryService::ReplaceIntoWithDefaultValue >> KqpSinkLocks::EmptyRangeOlap >> Compression::WriteWithMixedCodecs [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::CommitRoTx_TLI [GOOD] Test command err: Trying to start YDB, gRPC: 19401, MsgBus: 5244 2025-03-31T16:34:28.590415Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012921604937777:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:28.590481Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001266/r3tmp/tmplAPOE9/pdisk_1.dat 2025-03-31T16:34:28.902155Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19401, node 1 2025-03-31T16:34:28.963743Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:28.963871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:28.964711Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:28.964737Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:28.964745Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:28.964846Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:34:28.965909Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5244 TClient is connected to server localhost:5244 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:29.478091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:29.504961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:29.654400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:29.881603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:29.967153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:31.824044Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012934489841445:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:31.824202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:32.144790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:32.178455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:32.211993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:32.241158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:32.270537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:32.340500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:34:32.384509Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012938784809257:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:32.384592Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:32.384793Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012938784809262:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:32.388675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:32.399290Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012938784809264:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:34:32.487417Z node 1 :TX_PROXY ERROR: Actor# [1:7488012938784809317:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:33.590849Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012921604937777:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:33.590931Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 25770, MsgBus: 4031 2025-03-31T16:34:34.232205Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012949837465049:2080];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:34.232339Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001266/r3tmp/tmp1AyYLM/pdisk_1.dat 2025-03-31T16:34:34.331960Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:34.356875Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:34.356972Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:34.362408Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25770, node 2 2025-03-31T16:34:34.398403Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:34.398422Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:34.398427Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:34.398516Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4031 TClient is connected to server localhost:4031 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:34.851234Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:34.874392Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:34.960203Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:35.110481Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:35.174521Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:37.382421Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012962722368658:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:37.382519Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:37.428749Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:37.461034Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:37.493389Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:37.526462Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:37.560713Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:37.635865Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:34:37.715968Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012962722369177:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:37.716042Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:37.716154Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012962722369182:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:37.719402Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:37.729676Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012962722369184:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:34:37.784696Z node 2 :TX_PROXY ERROR: Actor# [2:7488012962722369236:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:224:2060] recipient: [1:218:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:224:2060] recipient: [1:218:2142] Leader for TabletID 72057594046678944 is [1:235:2153] sender: [1:236:2060] recipient: [1:218:2142] 2025-03-31T16:33:31.374206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:33:31.374275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:31.374303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:33:31.374328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:33:31.374420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:33:31.374445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:33:31.374484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:31.374555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:33:31.374881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:33:31.438167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:33:31.438232Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:31.452433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:33:31.452588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:33:31.452733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:33:31.459105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:33:31.459323Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:33:31.459952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:31.460228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:33:31.464058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:31.465422Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:31.465495Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:31.465650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:33:31.465690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:31.465726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:33:31.465913Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:33:31.472425Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:235:2153] sender: [1:348:2060] recipient: [1:17:2064] 2025-03-31T16:33:31.598842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:33:31.599107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:31.599325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:33:31.599558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:33:31.599623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:31.602175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:31.602310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:33:31.602504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:31.602555Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:33:31.602594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:33:31.602629Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:33:31.604820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:31.604879Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:33:31.604936Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:33:31.606947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:31.606999Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:31.607049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:31.607119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:33:31.617696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:33:31.620119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:33:31.620349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:33:31.621468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:31.621625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 243 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:31.621674Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:31.621958Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:33:31.622009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:31.622185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:33:31.622269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:33:31.624764Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:31.624813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:31.624990Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:31.625040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:315:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:33:31.625120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:31.625159Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:33:31.625252Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:31.625283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:31.625339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:31.625370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:31.625414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:33:31.625458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:31.625509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:33:31.625541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:33:31.625610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:33:31.625650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:33:31.625694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:33:31.628042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:31.628165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:31.628199Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... er [7:235:2153], Recipient [7:974:2743]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 2025-03-31T16:34:39.580530Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-03-31T16:34:39.580577Z node 7 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 104 datashard 72075186233409550 state Ready 2025-03-31T16:34:39.580655Z node 7 :TX_DATASHARD DEBUG: 72075186233409550 Got TEvSchemaChangedResult from SS at 72075186233409550 2025-03-31T16:34:39.580977Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-31T16:34:39.581031Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-31T16:34:39.581115Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:2, at schemeshard: 72057594046678944 2025-03-31T16:34:39.581175Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:2 ProgressState 2025-03-31T16:34:39.581275Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-31T16:34:39.581303Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:2 progress is 2/3 2025-03-31T16:34:39.581334Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 2/3 2025-03-31T16:34:39.581372Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:2 progress is 2/3 2025-03-31T16:34:39.581412Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 2/3 2025-03-31T16:34:39.581475Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: true 2025-03-31T16:34:39.581684Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-31T16:34:39.581712Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:34:39.581818Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-31T16:34:39.581845Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:34:39.582004Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-31T16:34:39.582029Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:34:39.582049Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 104:0 2025-03-31T16:34:39.582099Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:978:2746] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 at schemeshard: 72057594046678944 2025-03-31T16:34:39.582168Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [7:235:2153], Recipient [7:978:2746]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 2025-03-31T16:34:39.582191Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-03-31T16:34:39.582224Z node 7 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 104 datashard 72075186233409551 state Ready 2025-03-31T16:34:39.582258Z node 7 :TX_DATASHARD DEBUG: 72075186233409551 Got TEvSchemaChangedResult from SS at 72075186233409551 2025-03-31T16:34:39.582449Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-31T16:34:39.582490Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-31T16:34:39.582536Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-31T16:34:39.582562Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-03-31T16:34:39.582607Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-31T16:34:39.582625Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 3/3 2025-03-31T16:34:39.582643Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-03-31T16:34:39.582666Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 3/3 2025-03-31T16:34:39.582683Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-03-31T16:34:39.582706Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 3/3, is published: true 2025-03-31T16:34:39.582764Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:578:2404] message: TxId: 104 2025-03-31T16:34:39.582805Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-03-31T16:34:39.582838Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-03-31T16:34:39.582878Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-03-31T16:34:39.582974Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 4 2025-03-31T16:34:39.583002Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:1 2025-03-31T16:34:39.583016Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:1 2025-03-31T16:34:39.583034Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 3 2025-03-31T16:34:39.583059Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:2 2025-03-31T16:34:39.583077Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:2 2025-03-31T16:34:39.583164Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 3 2025-03-31T16:34:39.586659Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:34:39.586810Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:34:39.586927Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:578:2404] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 104 at schemeshard: 72057594046678944 2025-03-31T16:34:39.587130Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-03-31T16:34:39.587179Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [7:1032:2785] 2025-03-31T16:34:39.587445Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:1034:2787], Recipient [7:235:2153]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-31T16:34:39.587488Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-31T16:34:39.587516Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-03-31T16:34:39.588528Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [8:552:2102], Recipient [7:235:2153] 2025-03-31T16:34:39.588579Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-31T16:34:39.591023Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/tmp" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "NotTempTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Utf8" } KeyColumnNames: "key" } IndexDescription { Name: "ValueIndex" KeyColumnNames: "value" } } AllowCreateInTempDir: false } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:34:39.591513Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 105:0, explain: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-03-31T16:34:39.591569Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-03-31T16:34:39.593235Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-31T16:34:39.596098Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusPreconditionFailed Reason: "Check failed: path: \'/MyRoot/tmp\', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNoChanges)" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:34:39.596315Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNoChanges), operation: CREATE TABLE WITH INDEXES, path: /MyRoot/tmp/NotTempTable 2025-03-31T16:34:39.596369Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-03-31T16:34:39.596823Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-03-31T16:34:39.596871Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-03-31T16:34:39.597319Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:1102:2855], Recipient [7:235:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:34:39.597414Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:34:39.597453Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-03-31T16:34:39.597611Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [7:578:2404], Recipient [7:235:2153]: NKikimrScheme.TEvNotifyTxCompletion TxId: 105 2025-03-31T16:34:39.597646Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-03-31T16:34:39.597721Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-03-31T16:34:39.597864Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-03-31T16:34:39.597923Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [7:1100:2853] 2025-03-31T16:34:39.598138Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:1102:2855], Recipient [7:235:2153]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-31T16:34:39.598173Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-31T16:34:39.598228Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 105 >> KqpTx::LocksAbortOnCommit >> KqpNewEngine::IdxLookupExtractMembers [GOOD] >> KqpNewEngine::FullScanCount >> KqpDocumentApi::AllowRead [GOOD] >> KqpDocumentApi::RestrictAlter >> DataShardReadTableSnapshots::CorruptedDyNumber [GOOD] >> KqpQueryService::ExecuteQuery [GOOD] >> KqpQueryService::ExecuteDDLStatusCodeSchemeError >> KqpQueryService::ExecuteQueryPg [GOOD] >> KqpQueryService::ExecuteQueryPgTableSelect >> KqpQueryService::TableSink_OlapUpdate [GOOD] >> KqpQueryService::TableSink_OlapOrder >> KqpQueryService::StreamExecuteQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::CorruptedDyNumber [GOOD] Test command err: 2025-03-31T16:34:34.025339Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:34:34.025567Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:34:34.025756Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001363/r3tmp/tmpAUCEHg/pdisk_1.dat 2025-03-31T16:34:34.398440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:34:34.456416Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:34.495990Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T16:34:34.497048Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:34.497200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:34.497384Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-31T16:34:34.508701Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:34.593162Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-03-31T16:34:34.593259Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-03-31T16:34:34.593417Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-03-31T16:34:34.727657Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-31T16:34:34.727748Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:34:34.728357Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-31T16:34:34.728459Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:34:34.728777Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:34:34.728977Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:34:34.729080Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-31T16:34:34.729387Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-03-31T16:34:34.730878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:34:34.731932Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-31T16:34:34.732028Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-03-31T16:34:34.763730Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:34:34.764903Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:34:34.765372Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:34:34.765589Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:34:34.813613Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:34:34.814394Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:34:34.814517Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:34:34.816286Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:34:34.816357Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:34:34.816413Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:34:34.816779Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:34:34.816913Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:34:34.816986Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:34:34.827729Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:34:34.865862Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:34:34.866061Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:34:34.866179Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:34:34.866224Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:34:34.866284Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:34:34.866334Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:34:34.866536Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:34:34.866576Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:34:34.866825Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:34:34.866964Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:34:34.867104Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:34:34.867145Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:34:34.867188Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T16:34:34.867227Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:34:34.867258Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:34:34.867285Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:34:34.867330Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:34:34.867436Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:34:34.867492Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:34:34.867535Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:34:34.867638Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-31T16:34:34.867689Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:34:34.867798Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:34:34.868010Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T16:34:34.868060Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:34:34.868180Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:34:34.868233Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:34:34.868287Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T16:34:34.868330Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T16:34:34.868362Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:34:34.868666Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T16:34:34.868707Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T16:34:34.868743Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T16:34:34.868774Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:34:34.868817Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T16:34:34.868863Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T16:34:34.868913Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T16:34:34.868946Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:34:34.868968Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T16:34:34.870651Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T16:34:34.870722Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:34:34.883946Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... 281474976715659] at 72075186224037888 is Executed 2025-03-31T16:34:40.933621Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit MakeScanSnapshot 2025-03-31T16:34:40.933644Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715659] at 72075186224037888 to execution unit WaitForStreamClearance 2025-03-31T16:34:40.933672Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit WaitForStreamClearance 2025-03-31T16:34:40.933736Z node 2 :TX_DATASHARD TRACE: Requested stream clearance from [2:745:2626] for [0:281474976715659] at 72075186224037888 2025-03-31T16:34:40.933869Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is Continue 2025-03-31T16:34:40.933934Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:34:40.934093Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287427, Sender [2:667:2571], Recipient [2:745:2626]: NKikimrTx.TEvStreamClearanceRequest TxId: 281474976715659 ShardId: 72075186224037888 KeyRange { From: "\001\000\000\000\000\200" To: "" FromInclusive: true ToInclusive: false } 2025-03-31T16:34:40.934133Z node 2 :TX_PROXY DEBUG: [ReadTable [2:745:2626] TxId# 281474976715658] Received TEvStreamClearanceRequest from ShardId# 72075186224037888 2025-03-31T16:34:40.934188Z node 2 :TX_PROXY DEBUG: [ReadTable [2:745:2626] TxId# 281474976715658] Sending TEvStreamClearanceResponse to [2:667:2571] ShardId# 72075186224037888 2025-03-31T16:34:40.934686Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287942, Sender [2:745:2626], Recipient [2:667:2571]: NKikimrTx.TEvStreamClearancePending TxId: 281474976715659 2025-03-31T16:34:40.934732Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearancePending 2025-03-31T16:34:40.935881Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [2:745:2626], Recipient [2:667:2571]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715659 Cleared: true 2025-03-31T16:34:40.938579Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-03-31T16:34:40.938830Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:667:2571], Recipient [2:667:2571]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:34:40.938883Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:34:40.939172Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:34:40.939219Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:34:40.939271Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715659] at 72075186224037888 for WaitForStreamClearance 2025-03-31T16:34:40.939327Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit WaitForStreamClearance 2025-03-31T16:34:40.939404Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715659] at 72075186224037888 2025-03-31T16:34:40.939528Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is Executed 2025-03-31T16:34:40.939565Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit WaitForStreamClearance 2025-03-31T16:34:40.939602Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715659] at 72075186224037888 to execution unit ReadTableScan 2025-03-31T16:34:40.939634Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit ReadTableScan 2025-03-31T16:34:40.940059Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is Continue 2025-03-31T16:34:40.940154Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:34:40.940231Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-03-31T16:34:40.940265Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:34:40.940295Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:34:40.940368Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:34:40.940993Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:779:2647], Recipient [2:667:2571]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-03-31T16:34:40.941038Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-03-31T16:34:40.941180Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:779:2647], Recipient [2:745:2626]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715659 ShardId: 72075186224037888 2025-03-31T16:34:40.941241Z node 2 :TX_PROXY DEBUG: [ReadTable [2:745:2626] TxId# 281474976715658] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2025-03-31T16:34:40.941658Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287941, Sender [2:744:2626], Recipient [2:745:2626]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715658 MessageSizeLimit: 1 ReservedMessages: 1 2025-03-31T16:34:40.941730Z node 2 :TX_PROXY DEBUG: [ReadTable [2:745:2626] TxId# 281474976715658] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-03-31T16:34:40.941782Z node 2 :TX_PROXY DEBUG: [ReadTable [2:745:2626] TxId# 281474976715658] Reserving quota 1 messages for ShardId# 72075186224037888 2025-03-31T16:34:40.941852Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 1 2025-03-31T16:34:40.941977Z node 2 :TX_DATASHARD ERROR: Got scan fatal error: Invalid DyNumber binary representation 2025-03-31T16:34:40.942026Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 1 2025-03-31T16:34:40.942208Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-03-31T16:34:40.942249Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715659, at: 72075186224037888 2025-03-31T16:34:40.942369Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287429, Sender [2:779:2647], Recipient [2:745:2626]: NKikimrTx.TEvStreamQuotaRelease TxId: 281474976715659 ShardId: 72075186224037888 2025-03-31T16:34:40.942402Z node 2 :TX_PROXY DEBUG: [ReadTable [2:745:2626] TxId# 281474976715658] Received TEvStreamQuotaRelease from ShardId# 72075186224037888 2025-03-31T16:34:40.942448Z node 2 :TX_PROXY DEBUG: [ReadTable [2:745:2626] TxId# 281474976715658] Released quota 1 reserved messages from ShardId# 72075186224037888 2025-03-31T16:34:40.942623Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:667:2571], Recipient [2:667:2571]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:34:40.942677Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:34:40.942749Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:34:40.942795Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-31T16:34:40.942859Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715659] at 72075186224037888 for ReadTableScan 2025-03-31T16:34:40.942901Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit ReadTableScan 2025-03-31T16:34:40.942953Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715659] at 72075186224037888 error: Invalid DyNumber binary representation, IsFatalError: 1 2025-03-31T16:34:40.943018Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is Executed 2025-03-31T16:34:40.943060Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit ReadTableScan 2025-03-31T16:34:40.943367Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715659] at 72075186224037888 to execution unit FinishPropose 2025-03-31T16:34:40.943421Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit FinishPropose 2025-03-31T16:34:40.943471Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is DelayComplete 2025-03-31T16:34:40.943552Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit FinishPropose 2025-03-31T16:34:40.943591Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715659] at 72075186224037888 to execution unit CompletedOperations 2025-03-31T16:34:40.943650Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit CompletedOperations 2025-03-31T16:34:40.943705Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is Executed 2025-03-31T16:34:40.943730Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit CompletedOperations 2025-03-31T16:34:40.943757Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715659] at 72075186224037888 has finished 2025-03-31T16:34:40.943798Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:34:40.943836Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-03-31T16:34:40.943886Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:34:40.943924Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:34:40.944022Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:34:40.944084Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715659] at 72075186224037888 on unit FinishPropose 2025-03-31T16:34:40.944216Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715659 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: EXEC_ERROR 2025-03-31T16:34:40.944401Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715659 at tablet 72075186224037888 status: EXEC_ERROR errors: PROGRAM_ERROR (Invalid DyNumber binary representation) | 2025-03-31T16:34:40.944606Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:34:40.944942Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:667:2571], Recipient [2:745:2626]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037888 Status: EXEC_ERROR Error { Kind: PROGRAM_ERROR Reason: "Invalid DyNumber binary representation" } TxId: 281474976715659 Step: 0 OrderId: 281474976715659 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 857 } } 2025-03-31T16:34:40.945003Z node 2 :TX_PROXY DEBUG: [ReadTable [2:745:2626] TxId# 281474976715658] Received TEvProposeTransactionResult Status# EXEC_ERROR ShardId# 72075186224037888 2025-03-31T16:34:40.945099Z node 2 :TX_PROXY ERROR: [ReadTable [2:745:2626] TxId# 281474976715658] RESPONSE Status# ExecError shard: 72075186224037888 table: /Root/Table 2025-03-31T16:34:40.945495Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:745:2626], Recipient [2:667:2571]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1500 TxId: 281474976715658 >> KqpService::Shutdown >> KqpSinkMvcc::ReadOnlyTxCommitsOnConcurrentWrite |96.7%| [TA] $(B)/ydb/core/tx/datashard/ut_read_table/test-results/unittest/{meta.json ... results_accumulator.log} |96.7%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_table/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactAfterDrop [GOOD] >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData [GOOD] >> TAsyncIndexTests::CdcAndSplitWithReboots[TabletReboots] [GOOD] >> KqpQueryServiceScripts::ExecuteScript >> KqpQueryService::TableSink_OltpInsert [GOOD] >> KqpQueryService::TableSink_OltpInteractive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactAfterDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:33:21.624281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:33:21.624364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:21.624415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:33:21.624457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:33:21.625537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:33:21.625597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:33:21.625724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:21.625797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:33:21.627381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:33:21.715870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:33:21.715930Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:21.720778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:33:21.720874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:33:21.721031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:33:21.726301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:33:21.726500Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:33:21.731511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:21.732560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:33:21.738466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:21.747655Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:21.747741Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:21.747951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:33:21.748008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:21.748051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:33:21.748207Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:33:21.755605Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:33:21.890005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:33:21.890284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:21.890499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:33:21.890736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:33:21.890793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:21.893422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:21.893570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:33:21.893785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:21.893856Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:33:21.893895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:33:21.893949Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:33:21.896029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:21.896095Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:33:21.896135Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:33:21.898124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:21.898178Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:21.898229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:21.898291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:33:21.902329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:33:21.904311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:33:21.904521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:33:21.905763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:21.905935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:21.906011Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:21.906360Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:33:21.906426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:21.906571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:33:21.906645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:33:21.908776Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:21.908816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:21.908961Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:21.908989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:33:21.909182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:21.909224Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:33:21.909330Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:21.909378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:21.909434Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:21.909461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:21.909497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:33:21.909538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:21.909591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:33:21.909620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:33:21.909671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:33:21.909702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:33:21.909724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:33:21.911516Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:21.911654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:21.911702Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... e set wakeup after delta# 0 seconds 2025-03-31T16:34:42.446309Z node 3 :FLAT_TX_SCHEMESHARD INFO: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 2], datashard# 72075186233409546, shardIdx# 72057594046678944:1 in# 4 ms, with status# 0, next wakeup in# 0.996000s, rate# 1, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046678944 2025-03-31T16:34:42.449107Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [3:299:2288], Recipient [3:309:2296]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-03-31T16:34:42.463580Z node 3 :TX_DATASHARD DEBUG: Updated last full compaction of tablet# 72075186233409546, tableId# 2, last full compaction# 1970-01-01T00:00:19.152000Z 2025-03-31T16:34:42.516044Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [3:125:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-31T16:34:42.516136Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-31T16:34:42.516188Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2025-03-31T16:34:42.516274Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-03-31T16:34:42.516321Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-03-31T16:34:42.516453Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 13940 row count 100 2025-03-31T16:34:42.516546Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0 2025-03-31T16:34:42.516613Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 100, DataSize 13940 2025-03-31T16:34:42.516704Z node 3 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:18.000000Z at schemeshard 72057594046678944 2025-03-31T16:34:42.516837Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:34:42.527471Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [3:125:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-31T16:34:42.527566Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-31T16:34:42.527604Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-03-31T16:34:42.902857Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:309:2296]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-03-31T16:34:42.903120Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:125:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:34:42.903174Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:34:42.903308Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:125:2151], Recipient [3:125:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:34:42.903343Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:34:42.945216Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Operation queue wakeup 2025-03-31T16:34:42.945346Z node 3 :FLAT_TX_SCHEMESHARD INFO: [BackgroundCompaction] [Start] Compacting for pathId# [OwnerId: 72057594046678944, LocalPathId: 2], datashard# 72075186233409546, compactionInfo# {72057594046678944:1, SH# 1, Rows# 100, Deletes# 0, Compaction# 1970-01-01T00:00:18.000000Z}, next wakeup in# 0.000000s, rate# 1, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046678944 2025-03-31T16:34:42.945507Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Operation queue set wakeup after delta# 30 seconds 2025-03-31T16:34:42.945748Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553210, Sender [3:125:2151], Recipient [3:309:2296]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046678944 LocalId: 2 } CompactSinglePartedShards: true 2025-03-31T16:34:42.945941Z node 3 :TX_DATASHARD INFO: Started background compaction# 7 of 72075186233409546 tableId# 2 localTid# 1001, requested from [3:125:2151], partsCount# 1, memtableSize# 0, memtableWaste# 0, memtableRows# 0 2025-03-31T16:34:42.947060Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 6, ts 1970-01-01T00:00:19.152000Z 2025-03-31T16:34:42.947170Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 6, front# 7 2025-03-31T16:34:42.958860Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435080, Sender [3:1265:3202], Recipient [3:309:2296]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvAsyncTableStats 2025-03-31T16:34:42.958977Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409546, FollowerId 0, tableId 2 2025-03-31T16:34:42.960415Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:309:2296], Recipient [3:125:2151]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409546 TableLocalId: 2 Generation: 2 Round: 5 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 19 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 20988 Memory: 124232 Storage: 14156 } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 40 TableOwnerId: 72057594046678944 FollowerId: 0 2025-03-31T16:34:42.960474Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-03-31T16:34:42.960525Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 2.0988 2025-03-31T16:34:42.960632Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 19 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-03-31T16:34:42.960674Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-03-31T16:34:42.964715Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [3:299:2288], Recipient [3:309:2296]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-03-31T16:34:42.970777Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 7, ts 1970-01-01T00:00:20.152000Z 2025-03-31T16:34:42.970886Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 7, front# 7 2025-03-31T16:34:42.970930Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001 sending TEvCompactTableResult to# [3:125:2151]pathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:34:42.971307Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553211, Sender [3:309:2296], Recipient [3:125:2151]: NKikimrTxDataShard.TEvCompactTableResult TabletId: 72075186233409546 PathId { OwnerId: 72057594046678944 LocalId: 2 } Status: OK 2025-03-31T16:34:42.971352Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvCompactTableResult 2025-03-31T16:34:42.971424Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Operation queue set wakeup after delta# 0 seconds 2025-03-31T16:34:42.971483Z node 3 :FLAT_TX_SCHEMESHARD INFO: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 2], datashard# 72075186233409546, shardIdx# 72057594046678944:1 in# 4 ms, with status# 0, next wakeup in# 0.996000s, rate# 1, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046678944 2025-03-31T16:34:42.974800Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [3:299:2288], Recipient [3:309:2296]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-03-31T16:34:42.987443Z node 3 :TX_DATASHARD DEBUG: Updated last full compaction of tablet# 72075186233409546, tableId# 2, last full compaction# 1970-01-01T00:00:20.152000Z 2025-03-31T16:34:43.041317Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [3:125:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-31T16:34:43.041396Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-31T16:34:43.041428Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2025-03-31T16:34:43.041517Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-03-31T16:34:43.041549Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-03-31T16:34:43.041697Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 13940 row count 100 2025-03-31T16:34:43.041781Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0 2025-03-31T16:34:43.041828Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 100, DataSize 13940 2025-03-31T16:34:43.045378Z node 3 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:19.000000Z at schemeshard 72057594046678944 2025-03-31T16:34:43.045633Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:34:43.056340Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [3:125:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-31T16:34:43.056413Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-31T16:34:43.056449Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 >> KqpQueryService::SessionFromPoolSuccess [GOOD] >> KqpQueryService::ReturnAndCloseSameTime >> KqpQueryService::ReplaceIntoWithDefaultValue [GOOD] >> KqpQueryService::ReadManyShardsRange ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndSplitWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:135:2157] sender: [1:136:2058] recipient: [1:111:2142] 2025-03-31T16:29:30.507390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:29:30.508168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:30.508235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:29:30.508282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:29:30.511952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:29:30.512049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:29:30.512168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:30.512303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:29:30.513958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:29:30.614490Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-31T16:29:30.614590Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:169:2058] recipient: [1:15:2062] 2025-03-31T16:29:30.652964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:29:30.653182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:29:30.653371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:29:30.658677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:29:30.658951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:29:30.659787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:30.660010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:29:30.680522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:30.698807Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:30.698868Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:30.699000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:29:30.699046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:30.699097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:29:30.699251Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:210:2058] recipient: [1:208:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:210:2058] recipient: [1:208:2209] Leader for TabletID 72057594037968897 is [1:214:2213] sender: [1:215:2058] recipient: [1:208:2209] 2025-03-31T16:29:30.723701Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:235:2058] recipient: [1:15:2062] 2025-03-31T16:29:30.898829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:29:30.899237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:30.899516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:29:30.899792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:29:30.899862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:30.903004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:30.903259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:29:30.903504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:30.903580Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:29:30.903622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:29:30.903665Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:29:30.907193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:30.907263Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:29:30.907303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:29:30.912147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:30.912225Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:30.912271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:30.912338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:29:30.917031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:29:30.919659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:29:30.923458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:135:2157] sender: [1:250:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:29:30.924798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:30.924949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:29:30.925012Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:30.935463Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:29:30.935572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:30.935878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:29:30.935994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:29:30.948163Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:30.948211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:30.948395Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:30.948441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:202:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:29:30.948816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:30.948867Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:29:30.948969Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:30.949006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:30.949046Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:30.949104Z no ... DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:34:43.132194Z node 118 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:34:43.133831Z node 118 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 1.64ms result status StatusSuccess 2025-03-31T16:34:43.134727Z node 118 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:34:43.146348Z node 118 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][118:1093:2886] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-03-31T16:34:43.146452Z node 118 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][118:1057:2886] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-03-31T16:34:43.146606Z node 118 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][118:1093:2886] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1743438883098334 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1743438883098334 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 5 Group: 1743438883098334 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 }] } 2025-03-31T16:34:43.149360Z node 118 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][118:1093:2886] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 5 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 5 2025-03-31T16:34:43.149469Z node 118 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][118:1057:2886] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData [GOOD] Test command err: 2025-03-31T16:33:53.659471Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012770901440919:2140];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:53.659522Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0017b6/r3tmp/tmpGkNZa5/pdisk_1.dat 2025-03-31T16:33:54.087652Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:54.087779Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:54.099495Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:54.102696Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15006, node 1 2025-03-31T16:33:54.339413Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:54.339446Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:54.339453Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:54.339597Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11873 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:54.865380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:11873 2025-03-31T16:33:56.485824Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012783786343785:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:56.486455Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:56.983281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:33:57.169234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012788081311288:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:57.169357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:57.169491Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012788081311305:2383], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:57.169520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012788081311309:2387], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:57.169652Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012788081311311:2388], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:57.169701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012788081311312:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:57.169809Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012788081311313:2390], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:57.169951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012788081311314:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:57.171695Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012788081311337:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:57.171774Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012788081311339:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:57.171839Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:57.172479Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012788081311364:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:57.174075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012788081311385:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:57.174398Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012788081311383:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:57.174474Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:57.175198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012788081311396:2414], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:57.175769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710664:3, at schemeshard: 72057594046644480 2025-03-31T16:33:57.183444Z node 1 :TX_PROXY ERROR: Actor# [1:7488012788081311333:2769] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T16:33:57.183686Z node 1 :TX_PROXY ERROR: Actor# [1:7488012788081311334:2770] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T16:33:57.184256Z node 1 :TX_PROXY ERROR: Actor# [1:7488012788081311323:2763] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T16:33:57.185663Z node 1 :TX_PROXY ERROR: Actor# [1:7488012788081311332:2768] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T16:33:57.185890Z node 1 :TX_PROXY ERROR: Actor# [1:7488012788081311365:2776] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T16:33:57.186066Z node 1 :TX_PROXY ERROR: Actor# [1:7488012788081311376:2785] txid# 281474976710666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T16:33:57.186142Z node 1 :TX_PROXY ERROR: Actor# [1:7488012788081311325:2764] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T16:33:57.187275Z node 1 :TX_PROXY ERROR: Actor# [1:7488012788081311400:2797] txid# 281474976710667, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T16:33:57.187468Z node 1 :TX_PROXY ERROR: Actor# [1:7488012788081311410:2805] txid# 281474976710668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: ... n/3?node_id=1&id=ZjE2YjdkMzMtZmRiYWFmNzQtZjM2ZTExM2UtYjM5YTA4NzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:40.690157Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976739058. Ctx: { TraceId: 01jqpgw7xb4rkb8q5f6bzx2hgr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2M3YzgxZTAtYjQ0OWIzYzAtMTU1YjI0NmItOWQ0MjM3ZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:40.690640Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976739059. Ctx: { TraceId: 01jqpgw7xcfn7zzcn4bp4z5fpn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjYyOTBhNTgtZmVmNTg4MTEtZTgzZDA3ZTktNmExYTM2ZmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:40.693536Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976739061. Ctx: { TraceId: 01jqpgw7xf4z4ysrhd3v130tw4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWY1NzZmY2MtOWY0NzI2NWQtZWQ2NWNjMmMtOTNlN2JmZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:40.694428Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976739062. Ctx: { TraceId: 01jqpgw7xf56hmjpeqf2xfhhwm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTRlMzI1ZjctNmEwYWExYTctNDE4MDNhYy00M2YzY2Q4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:40.696140Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976739063. Ctx: { TraceId: 01jqpgw7xmc10j4mfncyzgnw1j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGYxOGU3NzktYWFkNTljMjAtM2E0YmQwODgtNThiY2Y1MjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:40.696682Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976739064. Ctx: { TraceId: 01jqpgw7xm1fh687xqwgqbnxmf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzNkZDNjYzctZGQ0YjE0NTYtYTc4NTViYWQtYzMzNDg4MGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:40.697729Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976739065. Ctx: { TraceId: 01jqpgw7xn8m7j1ra8t0cdkef1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDYxYmYwNDItYjA4YmUwNDAtNTQ0YTg3ZWEtNWE2MjBiYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:40.700444Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976739066. Ctx: { TraceId: 01jqpgw7xrcdrcx378b177a7e1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjNiMzQ3OC1mMmE0ZmJmNS0yYzI1NjE4Zi1kY2Q4ZWVjZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:40.700512Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976739067. Ctx: { TraceId: 01jqpgw7xr4er9qbwnbxh8sdtn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWI4MzNiMTUtMmJmODZiYzQtMWQ2ZjliNDMtNzE0MmRkNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:40.706215Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976739069. Ctx: { TraceId: 01jqpgw7xzae4mhb3sp1qm1685, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjE2YjdkMzMtZmRiYWFmNzQtZjM2ZTExM2UtYjM5YTA4NzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:40.706232Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976739068. Ctx: { TraceId: 01jqpgw7xz84zr6tqfp1x4cyq4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2M3YzgxZTAtYjQ0OWIzYzAtMTU1YjI0NmItOWQ0MjM3ZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:40.706980Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976739070. Ctx: { TraceId: 01jqpgw7y07s6hsggdef866k0p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTRlMzI1ZjctNmEwYWExYTctNDE4MDNhYy00M2YzY2Q4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:40.707014Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976739072. Ctx: { TraceId: 01jqpgw7y01t116p9yzsptj703, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGYxOGU3NzktYWFkNTljMjAtM2E0YmQwODgtNThiY2Y1MjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:40.707618Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976739071. Ctx: { TraceId: 01jqpgw7xz6p1yt8gfm7dy9p7v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjYyOTBhNTgtZmVmNTg4MTEtZTgzZDA3ZTktNmExYTM2ZmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:40.713535Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976739073. Ctx: { TraceId: 01jqpgw7y7fqp82kfmekdyggkz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWY1NzZmY2MtOWY0NzI2NWQtZWQ2NWNjMmMtOTNlN2JmZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:40.713605Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976739074. Ctx: { TraceId: 01jqpgw7y771kxp81f15cxbb0h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzNkZDNjYzctZGQ0YjE0NTYtYTc4NTViYWQtYzMzNDg4MGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:40.714255Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976739075. Ctx: { TraceId: 01jqpgw7y7ehj4y2vs9zxh2y4c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjNiMzQ3OC1mMmE0ZmJmNS0yYzI1NjE4Zi1kY2Q4ZWVjZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:40.716090Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976739076. Ctx: { TraceId: 01jqpgw7y79w7bjbthtqcerqc8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDYxYmYwNDItYjA4YmUwNDAtNTQ0YTg3ZWEtNWE2MjBiYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:40.718732Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976739077. Ctx: { TraceId: 01jqpgw7yb9p1ncw8fqfz1e5r0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWI4MzNiMTUtMmJmODZiYzQtMWQ2ZjliNDMtNzE0MmRkNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:40.720225Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976739078. Ctx: { TraceId: 01jqpgw7yddcfhhwh004xasyg9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGYxOGU3NzktYWFkNTljMjAtM2E0YmQwODgtNThiY2Y1MjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:40.720763Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976739079. Ctx: { TraceId: 01jqpgw7ye664y84s1a0a13237, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTRlMzI1ZjctNmEwYWExYTctNDE4MDNhYy00M2YzY2Q4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:40.721112Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976739080. Ctx: { TraceId: 01jqpgw7ydfm412ced3m7cr2sf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2M3YzgxZTAtYjQ0OWIzYzAtMTU1YjI0NmItOWQ0MjM3ZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:40.721256Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976739081. Ctx: { TraceId: 01jqpgw7yd4kh03r8819c5yh5q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjE2YjdkMzMtZmRiYWFmNzQtZjM2ZTExM2UtYjM5YTA4NzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo 2025-03-31T16:34:40.725713Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976739082. Ctx: { TraceId: 01jqpgw7yj02qjweb0qq3xxjxp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjYyOTBhNTgtZmVmNTg4MTEtZTgzZDA3ZTktNmExYTM2ZmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:40.728506Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976739083. Ctx: { TraceId: 01jqpgw7yp8wweextqx5ktwb5n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWY1NzZmY2MtOWY0NzI2NWQtZWQ2NWNjMmMtOTNlN2JmZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:40.728533Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976739084. Ctx: { TraceId: 01jqpgw7ypec49t4r8z074spvv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzNkZDNjYzctZGQ0YjE0NTYtYTc4NTViYWQtYzMzNDg4MGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743438837112 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 4 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-03-31T16:34:40.733897Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976739085. Ctx: { TraceId: 01jqpgw7yv1hy7v2yxwgd9wmtw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDYxYmYwNDItYjA4YmUwNDAtNTQ0YTg3ZWEtNWE2MjBiYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:40.733952Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976739086. Ctx: { TraceId: 01jqpgw7yv3xw1qj1pxpctxg1n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjNiMzQ3OC1mMmE0ZmJmNS0yYzI1NjE4Zi1kY2Q4ZWVjZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:40.735242Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976739087. Ctx: { TraceId: 01jqpgw7ywea387t47d40z5v19, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWI4MzNiMTUtMmJmODZiYzQtMWQ2ZjliNDMtNzE0MmRkNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743438837112 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 4 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 4 shards >> KqpQueryService::ExecuteDDLStatusCodeSchemeError [GOOD] >> KqpDocumentApi::Scripting [GOOD] >> KqpQueryService::AlterTable_DropNotNull_Valid >> KqpQueryService::ExecuteQueryPgTableSelect [GOOD] >> KqpQueryService::CreateTempTable |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ExecuteDDLStatusCodeSchemeError [GOOD] Test command err: Trying to start YDB, gRPC: 29504, MsgBus: 10973 2025-03-31T16:34:29.687366Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012927964539991:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:29.688005Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0014f0/r3tmp/tmpHqPEDy/pdisk_1.dat 2025-03-31T16:34:30.193497Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:30.193599Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:30.197488Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:30.203238Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29504, node 1 2025-03-31T16:34:30.486159Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:30.486193Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:30.486201Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:30.486338Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10973 TClient is connected to server localhost:10973 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:31.318929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:31.343851Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:34:31.366593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:31.498439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:31.666052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:31.784029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:33.077124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012945144410938:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:33.077233Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:33.609019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:33.641880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:33.674211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:33.710053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:33.745409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:33.782944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:34:33.835383Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012945144411452:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:33.835566Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:33.835914Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012945144411458:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:33.840126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:33.854731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012945144411460:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:34:33.912636Z node 1 :TX_PROXY ERROR: Actor# [1:7488012945144411513:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:34.688022Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012927964539991:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:34.688285Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 31124, MsgBus: 3633 2025-03-31T16:34:35.886145Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012953291907852:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:35.886208Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0014f0/r3tmp/tmpzklv0c/pdisk_1.dat 2025-03-31T16:34:36.006370Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31124, node 2 2025-03-31T16:34:36.039150Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:36.039244Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:36.043937Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:36.094101Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:36.094132Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:36.094140Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:36.094251Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3633 TClient is connected to server localhost:3633 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:36.538304Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:36.545546Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:34:36.550942Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:36.621068Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:36.764317Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:36.852309Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:39.085071Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012970471778807:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:39.085251Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:39.129900Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:39.169402Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:39.202849Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:39.271347Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:39.308579Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:39.378826Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:34:39.458814Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012970471779329:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:39.458953Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:39.459217Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012970471779334:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:39.462465Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:39.474082Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012970471779336:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:34:39.537230Z node 2 :TX_PROXY ERROR: Actor# [2:7488012970471779390:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 20207, MsgBus: 13134 2025-03-31T16:34:41.468623Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488012979816887842:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:41.468714Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0014f0/r3tmp/tmplM239q/pdisk_1.dat 2025-03-31T16:34:41.633426Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:41.650429Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:41.650514Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:41.651825Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20207, node 3 2025-03-31T16:34:41.698378Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:41.698401Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:41.698422Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:41.698528Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13134 TClient is connected to server localhost:13134 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:42.202007Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:42.211618Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:34:44.912597Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012992701790366:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:44.912719Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:44.916763Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012992701790400:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:44.928724Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:34:44.944068Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488012992701790402:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:34:45.030128Z node 3 :TX_PROXY ERROR: Actor# [3:7488012996996757749:2335] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:45.065820Z node 3 :TX_PROXY ERROR: Actor# [3:7488012996996757780:2346] txid# 281474976715660, issues: { message: "Type \'TzTimestamp\' specified for column \'payload\' is not supported by storage" severity: 1 } 2025-03-31T16:34:45.078182Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=Y2ZhZGU0OTktMjJkODcwOTYtMmFhODU4MDEtNmUyMmZmOTc=, ActorId: [3:7488012992701790363:2329], ActorState: ExecuteState, TraceId: 01jqpgw9dn50jeev4vez7pcd5e, Create QueryResponse for error on request, msg: >> AnalyzeColumnshard::AnalyzeStatus >> KqpDocumentApi::RestrictAlter [GOOD] >> KqpDocumentApi::RestrictDrop >> KqpTx::LocksAbortOnCommit [GOOD] >> KqpTx::MixEnginesOldNew ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ExecuteQueryPgTableSelect [GOOD] Test command err: Trying to start YDB, gRPC: 25440, MsgBus: 15563 2025-03-31T16:34:29.939833Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012926111717216:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:29.939919Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0014d2/r3tmp/tmpQQ9Css/pdisk_1.dat TServer::EnableGrpc on GrpcPort 25440, node 1 2025-03-31T16:34:30.441471Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:34:30.441504Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:34:30.444269Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:30.449657Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:30.449750Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:30.453953Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:30.507686Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:30.507711Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:30.507718Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:30.507838Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15563 TClient is connected to server localhost:15563 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:31.372716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:31.393904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:31.536103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:31.706943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:31.785803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:33.456462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012943291588181:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:33.456711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:33.785648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:33.814540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:33.882857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:33.915938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:33.983404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:34.053069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:34:34.116456Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012947586555999:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:34.116536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:34.116600Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012947586556004:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:34.120406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:34.130561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012947586556006:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:34:34.214800Z node 1 :TX_PROXY ERROR: Actor# [1:7488012947586556059:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:34.955581Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012926111717216:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:34.955677Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 13351, MsgBus: 15826 2025-03-31T16:34:35.968860Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012951872233669:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:35.968946Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0014d2/r3tmp/tmpnrCN5L/pdisk_1.dat 2025-03-31T16:34:36.079643Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13351, node 2 2025-03-31T16:34:36.127950Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:36.128057Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:36.132897Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:36.195697Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:36.195722Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:36.195730Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:36.195853Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15826 TClient is connected to server localhost:15826 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:36.649604Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:36.674327Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:34:36.699956Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:36.762093Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:36.910339Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:37.025996Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:39.291602Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012969052104630:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:39.291709Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:39.339380Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:39.374315Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:39.409609Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:39.443952Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:39.484690Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:39.554991Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:34:39.612097Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012969052105148:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:39.612211Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:39.612454Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012969052105153:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:39.616740Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:39.628253Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012969052105155:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:34:39.705406Z node 2 :TX_PROXY ERROR: Actor# [2:7488012969052105208:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:40.968012Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488012951872233669:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:40.968103Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 1144, MsgBus: 3671 2025-03-31T16:34:41.693187Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488012978589287511:2214];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:41.721917Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0014d2/r3tmp/tmpCxNfEW/pdisk_1.dat 2025-03-31T16:34:41.827937Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:41.852719Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:41.852806Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:41.854693Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1144, node 3 2025-03-31T16:34:41.931122Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:41.931147Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:41.931155Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:41.931271Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3671 TClient is connected to server localhost:3671 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:34:42.412821Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:34:45.128054Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012995769157172:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:45.128175Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:45.152139Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:34:45.235379Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012995769157275:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:45.235475Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:45.238767Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012995769157280:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:45.247221Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:34:45.262305Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488012995769157282:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:34:45.345174Z node 3 :TX_PROXY ERROR: Actor# [3:7488012995769157333:2393] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryService::StreamExecuteQuery [GOOD] >> KqpQueryService::StreamExecuteCollectMeta >> KqpSinkTx::ExplicitTcl >> KqpSinkLocks::TInvalidate >> Initializer::Simple [GOOD] >> KqpQueryService::TableSink_Htap+withOltpSink >> KqpSnapshotIsolation::TConflictWriteOltp [FAIL] >> KqpSnapshotIsolation::TConflictWriteOlap >> KqpSinkTx::SnapshotROInteractive1 [GOOD] >> KqpQueryService::ReadManyShardsRange [GOOD] >> KqpQueryService::ReadManyRangesAndPoints >> KqpQueryService::TableSink_HtapComplex-withOltpSink [GOOD] >> KqpQueryService::TableSink_HtapInteractive+withOltpSink ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> Initializer::Simple [GOOD] Test command err: 2025-03-31T16:33:39.062241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:33:39.062485Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:33:39.062663Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001c3b/r3tmp/tmp6JD9sl/pdisk_1.dat TServer::EnableGrpc on GrpcPort 15112, node 1 TClient is connected to server localhost:11910 2025-03-31T16:33:39.941722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:33:40.010863Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:40.012441Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:40.012514Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:40.012551Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:40.013816Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:33:40.060992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:40.061592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:40.074337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:50.216800Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:678:2569], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:50.216946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:688:2574], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:50.217096Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:50.230376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-03-31T16:33:50.323388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:692:2577], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-03-31T16:33:50.437432Z node 1 :TX_PROXY ERROR: Actor# [1:764:2618] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:50.808113Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:775:2628], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:33:50.815290Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODhhMmQ4YTMtNmRjMTkxOTAtMTI0OTFjOGMtODA0MTU1Yg==, ActorId: [1:674:2566], ActorState: ExecuteState, TraceId: 01jqpgtpm115mj47w1w9sckg1d, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: REQUEST=SELECT * FROM `/Root/.metadata/test`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 REQUEST=SELECT * FROM `/Root/.metadata/test`;EXPECTATION=0 2025-03-31T16:33:50.916013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480 2025-03-31T16:33:52.474948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:52.843844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:53.631822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715670:0, at schemeshard: 72057594046644480 Initialization finished 2025-03-31T16:34:04.766647Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jqpgv4pc66npp99h0qab4av0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGU0MmZiN2EtYmZkNDBmYzUtZmQ5MTVkMWEtOTRkNzdhOWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT * FROM `/Root/.metadata/test`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/test`;EXPECTATION=1 REQUEST=DROP TABLE `/Root/.metadata/test`;EXPECTATION=0;WAITING=1 2025-03-31T16:34:16.181924Z node 1 :TX_PROXY ERROR: Actor# [1:1372:3068] txid# 281474976715678, Access denied for root@builtin on path /Root/.metadata/test, with access RemoveSchema 2025-03-31T16:34:16.182067Z node 1 :TX_PROXY ERROR: Actor# [1:1372:3068] txid# 281474976715678, issues: { message: "Access denied for root@builtin on path /Root/.metadata/test" issue_code: 200000 severity: 1 } REQUEST=DROP TABLE `/Root/.metadata/test`;RESULT=
: Error: Execution, code: 1060
:1:12: Error: Executing DROP TABLE
: Error: Access denied., code: 2018
: Error: Access denied for root@builtin on path /Root/.metadata/test, code: 200000 ;EXPECTATION=0 FINISHED_REQUEST=DROP TABLE `/Root/.metadata/test`;EXPECTATION=0;WAITING=1 2025-03-31T16:34:26.818515Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jqpgvt8f46htfr7ndxbjhb7z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTMwNTdkMzMtNzRhMmMyMzYtOWQ5ZWJiMmQtNjFlNGU1N2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=DELETE FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 REQUEST=DELETE FROM `/Root/.metadata/initialization/migrations`;RESULT=
: Fatal: ydb/core/kqp/host/kqp_host.cpp:977 ExecuteDataQuery(): requirement false failed, message: Unexpected query type for execute script action: Ddl, code: 1 ;EXPECTATION=0 FINISHED_REQUEST=DELETE FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 REQUEST=DROP TABLE `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 2025-03-31T16:34:48.134981Z node 1 :TX_PROXY ERROR: Actor# [1:1571:3217] txid# 281474976715686, Access denied for root@builtin on path /Root/.metadata/initialization/migrations, with access RemoveSchema 2025-03-31T16:34:48.135212Z node 1 :TX_PROXY ERROR: Actor# [1:1571:3217] txid# 281474976715686, issues: { message: "Access denied for root@builtin on path /Root/.metadata/initialization/migrations" issue_code: 200000 severity: 1 } REQUEST=DROP TABLE `/Root/.metadata/initialization/migrations`;RESULT=
: Error: Execution, code: 1060
:1:12: Error: Executing DROP TABLE
: Error: Access denied., code: 2018
: Error: Access denied for root@builtin on path /Root/.metadata/initialization/migrations, code: 200000 ;EXPECTATION=0 FINISHED_REQUEST=DROP TABLE `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 >> KqpQueryService::TableSink_OltpInteractive [GOOD] >> KqpService::Shutdown [GOOD] >> KqpService::SessionBusyRetryOperationSync >> KqpNewEngine::FullScanCount [GOOD] >> KqpQueryService::AlterTable_DropNotNull_Valid [GOOD] >> KqpQueryService::AlterCdcTopic |96.8%| [TA] $(B)/ydb/services/metadata/initializer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.8%| [TA] {RESULT} $(B)/ydb/services/metadata/initializer/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSnapshotIsolation::TConflictReadWriteOltp [FAIL] >> KqpSnapshotIsolation::TConflictReadWriteOltpNoSink >> KqpSinkLocks::EmptyRange [GOOD] >> KqpSinkLocks::EmptyRangeAlreadyBroken ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::SnapshotROInteractive1 [GOOD] Test command err: Trying to start YDB, gRPC: 12204, MsgBus: 10253 2025-03-31T16:34:25.332533Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012908797161099:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:25.332592Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001268/r3tmp/tmpUS42WD/pdisk_1.dat 2025-03-31T16:34:25.671864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:25.671997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:25.673693Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12204, node 1 2025-03-31T16:34:25.701840Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:34:25.701959Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:34:25.704043Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:25.783457Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:25.783525Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:25.783538Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:25.783719Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10253 TClient is connected to server localhost:10253 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:26.264031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:28.331423Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012921682063631:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:28.331544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012921682063657:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:28.331617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:28.337045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:34:28.349870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012921682063660:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:34:28.420991Z node 1 :TX_PROXY ERROR: Actor# [1:7488012921682063713:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:28.759601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:34:28.883152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:34:29.913805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:30.659605Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012908797161099:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:30.705947Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:34:31.906681Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzkzZjI3OTgtNzE5YTcyMjctYWNjNTIzMTYtZTc1ZTBjNDg=, ActorId: [1:7488012934566974151:2969], ActorState: ExecuteState, TraceId: 01jqpgvz8y52y8snfqwqmr73k5, Create QueryResponse for error on request, msg:
:3:29: Error: Operation 'Upsert' can't be performed in read only transaction, code: 2008 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 65086, MsgBus: 13294 2025-03-31T16:34:37.841006Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012961685418274:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:37.841075Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001268/r3tmp/tmpX8qqe0/pdisk_1.dat 2025-03-31T16:34:37.970359Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65086, node 2 2025-03-31T16:34:38.000388Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:38.000471Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:38.007254Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:38.051885Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:38.051910Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:38.051917Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:38.052077Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13294 TClient is connected to server localhost:13294 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:38.533911Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:41.182396Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012978865288090:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:41.182496Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:41.182661Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012978865288125:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:41.191472Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:34:41.203812Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012978865288128:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:34:41.274116Z node 2 :TX_PROXY ERROR: Actor# [2:7488012978865288179:2336] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:41.318853Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:34:41.372087Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:34:42.579934Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:43.353925Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488012961685418274:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:43.451464Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpQueryService::CreateTempTable [GOOD] >> KqpQueryService::CreateAndDropTopic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_OltpInteractive [GOOD] Test command err: Trying to start YDB, gRPC: 5610, MsgBus: 15948 2025-03-31T16:34:29.697526Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012927444963806:2190];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:29.697575Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0014e1/r3tmp/tmpT8FcCY/pdisk_1.dat 2025-03-31T16:34:30.195928Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:30.231903Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:30.232018Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:30.249637Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5610, node 1 2025-03-31T16:34:30.467955Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:30.467976Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:30.467982Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:30.468101Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15948 TClient is connected to server localhost:15948 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:31.316970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:33.024575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012944624833532:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:33.024699Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:33.609227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:34:33.791399Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012944624833639:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:33.791504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:33.791686Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012944624833644:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:33.803374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:34:33.818297Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012944624833646:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:34:33.882729Z node 1 :TX_PROXY ERROR: Actor# [1:7488012944624833698:2402] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 61831, MsgBus: 5928 2025-03-31T16:34:35.259065Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012951206653792:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:35.259234Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0014e1/r3tmp/tmp1B7Jhw/pdisk_1.dat 2025-03-31T16:34:35.350893Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61831, node 2 2025-03-31T16:34:35.390426Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:35.390526Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:35.392724Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:35.419638Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:35.419675Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:35.419683Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:35.419796Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5928 TClient is connected to server localhost:5928 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:35.816182Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:38.192348Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012964091556338:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:38.192475Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:38.222167Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:34:38.269947Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012964091556439:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:38.270018Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:38.270193Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012964091556446:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:38.274286Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:34:38.283071Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012964091556448:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:34:38.366346Z node 2 :TX_PROXY ERROR: Actor# [2:7488012964091556499:2393] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:38.759557Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=4; 2025-03-31T16:34:38.786565Z node 2 :TX_DATASHARD ERROR: Prepare transaction failed. txid 4 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-31T16:34:38.786732Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 4 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-31T16:34:38.786979Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488012964091556586:2366], Table: `/Root/DataShard` ([72057594046644480:2:1]), SessionActorId: [2:7488012964091556569:2366]Got CONSTRAINT VIOLATION for table `/Root/DataShard`. ShardID=72075186224037888, Sink=[2:7488012964091556586:2366].{
: Error: Duplicate keys have been found., code: 2012 } 2025-03-31T16:34:38.787689Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488012964091556579:2366], SessionActorId: [2:7488012964091556569:2366], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[2:7488012964091556569:2366]. isRollback=0 2025-03-31T16:34:38.787938Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTQ4OWE2NC00MmI4MGE0MS05NDhmNjZiNS05NTc5MGQ4OA==, ActorId: [2:7488012964091556569:2366], ActorState: ExecuteState, TraceId: 01jqpgw5z60p4eryj87jw1sj2e, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [2:7488012964091556580:2366] from: [2:7488012964091556579:2366] 2025-03-31T16:34:38.788025Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7488012964091556580:2366] TxId: 281474976715663. Ctx: { TraceId: 01jqpgw5z60p4eryj87jw1sj2e, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTQ4OWE2NC00MmI4MGE0MS05NDhmNjZiNS05NTc5MGQ4OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-31T16:34:38.788814Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTQ4OWE2NC00MmI4MGE0MS05NDhmNjZiNS05NTc5MGQ4OA==, ActorId: [2:7488012964091556569:2366], ActorState: ExecuteState, TraceId: 01jqpgw5z60p4eryj87jw1sj2e, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-03-31T16:34:40.259369Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488012951206653792:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:40.259460Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 8629, MsgBus: 27383 2025-03-31T16:34:44.562234Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488012989634891217:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:44.562317Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0014e1/r3tmp/tmpIKETRi/pdisk_1.dat 2025-03-31T16:34:44.736408Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:44.769273Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:44.769369Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:44.770913Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8629, node 3 2025-03-31T16:34:44.851733Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:44.851767Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:44.851776Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:44.851906Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27383 TClient is connected to server localhost:27383 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:45.433534Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:45.443587Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:34:48.107368Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013006814761049:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:48.107474Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:48.144019Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:34:48.342115Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:34:48.621301Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013006814762426:2444], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:48.621397Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:48.627574Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013006814762430:2446], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:48.627644Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:48.627905Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013006814762435:2449], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:48.631419Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-03-31T16:34:48.645754Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488013006814762437:2450], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-03-31T16:34:48.744375Z node 3 :TX_PROXY ERROR: Actor# [3:7488013006814762521:3249] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:49.571559Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488012989634891217:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:49.573237Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryService::TableSink_OltpReplace+HasSecondaryIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::FullScanCount [GOOD] Test command err: Trying to start YDB, gRPC: 26174, MsgBus: 62811 2025-03-31T16:33:55.162822Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012781264610948:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:55.162963Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000827/r3tmp/tmpYX7UC3/pdisk_1.dat 2025-03-31T16:33:55.464147Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26174, node 1 2025-03-31T16:33:55.537451Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:55.537590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:55.539793Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:55.549400Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:55.549428Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:55.549609Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:55.549790Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62811 TClient is connected to server localhost:62811 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:56.032493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:56.044237Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:33:56.051569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:56.201697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:56.351817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:56.418881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:57.949445Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012789854547321:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:57.949539Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:58.332076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:58.368563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:58.396226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:58.424009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:58.454935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:58.486757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:58.528274Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012794149515126:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:58.528347Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:58.528427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012794149515131:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:58.532766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:58.542778Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012794149515133:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:33:58.626350Z node 1 :TX_PROXY ERROR: Actor# [1:7488012794149515188:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 21891, MsgBus: 28301 2025-03-31T16:34:00.565400Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012802568694578:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:00.565521Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000827/r3tmp/tmphpgoJr/pdisk_1.dat 2025-03-31T16:34:00.708505Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21891, node 2 2025-03-31T16:34:00.738687Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:00.738800Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:00.740581Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:00.810087Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:00.810108Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:00.810116Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:00.810204Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28301 TClient is connected to server localhost:28301 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:01.260929Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:01.268437Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:34:01.278489Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:01.348085Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:01.516546Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:01.608271Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:03.804589Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012815453598231:2406], DatabaseId: /Root, ... hemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:37.845242Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:37.895206Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:37.983745Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:34:38.049706Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012964189503601:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:38.049798Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:38.050036Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7488012964189503606:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:38.054806Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:38.072169Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7488012964189503608:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:34:38.146475Z node 6 :TX_PROXY ERROR: Actor# [6:7488012964189503664:3452] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:38.270025Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7488012942714664854:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:38.270130Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 16696, MsgBus: 11944 2025-03-31T16:34:41.283345Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488012976893962103:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:41.283523Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000827/r3tmp/tmptwz4de/pdisk_1.dat 2025-03-31T16:34:41.463886Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:41.483265Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:41.483388Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:41.485012Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16696, node 7 2025-03-31T16:34:41.587834Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:41.587869Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:41.587882Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:41.588058Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11944 TClient is connected to server localhost:11944 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:42.304958Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:42.313833Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:34:42.318895Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:42.411629Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:42.646581Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:42.739020Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:45.988938Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488012994073833071:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:45.989097Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:46.056307Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:46.102790Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:46.145886Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:46.194070Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:46.250698Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:46.279556Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7488012976893962103:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:46.279747Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:34:46.345833Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:34:46.402438Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488012998368800887:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:46.402547Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:46.402585Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7488012998368800892:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:46.408643Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:46.420798Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7488012998368800894:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:34:46.515932Z node 7 :TX_PROXY ERROR: Actor# [7:7488012998368800951:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Warning: Type annotation, code: 1030
:3:17: Warning: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:3:33: Warning: At function: Filter, At function: Coalesce
:3:58: Warning: At function: SqlIn
:3:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:3:17: Warning: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:3:33: Warning: At function: Filter, At function: Coalesce
:3:58: Warning: At function: SqlIn
:3:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 >> KqpExtractPredicateLookup::ComplexRange [GOOD] >> KqpQueryServiceScripts::ExecuteScript [GOOD] >> KqpQueryServiceScripts::ExecuteMultiScript >> KqpTx::MixEnginesOldNew [GOOD] >> KqpQueryService::StreamExecuteCollectMeta [GOOD] >> KqpQueryService::ShowCreateTableOnView >> KqpTx::RollbackTx >> KqpQueryServiceScripts::ExecuteScriptWithWorkloadManager >> KqpDocumentApi::RestrictDrop [GOOD] >> KqpQueryService::TableSink_HtapInteractive-withOltpSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::MixEnginesOldNew [GOOD] Test command err: Trying to start YDB, gRPC: 4882, MsgBus: 1745 2025-03-31T16:34:41.196211Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012979453196594:2195];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:41.198551Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012e6/r3tmp/tmpUL8XWO/pdisk_1.dat 2025-03-31T16:34:41.567890Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:41.577997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:41.578144Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 4882, node 1 2025-03-31T16:34:41.603799Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:41.663597Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:41.663625Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:41.663639Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:41.663765Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1745 TClient is connected to server localhost:1745 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:42.233880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:42.252678Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:34:42.267851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:42.418097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:42.603317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:42.686917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:44.327702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012992338100148:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:44.327837Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:44.659464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:44.707909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:44.740138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:44.773036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:44.813738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:44.865983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:34:44.928357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012992338100658:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:44.928442Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:44.928790Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012992338100663:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:44.933046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:44.947366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012992338100665:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:34:45.002227Z node 1 :TX_PROXY ERROR: Actor# [1:7488012992338100718:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:46.189499Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012979453196594:2195];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:46.189583Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:34:46.668722Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODliOTExNmYtZGU5MTlmNjQtNDQ4MGNiNDctM2M3OGIzYjk=, ActorId: [1:7488013000928035571:2488], ActorState: ExecuteState, TraceId: 01jqpgwdm24dfjqvne4bxx7tnk, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken 2025-03-31T16:34:46.677660Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODliOTExNmYtZGU5MTlmNjQtNDQ4MGNiNDctM2M3OGIzYjk=, ActorId: [1:7488013000928035571:2488], ActorState: ReadyState, TraceId: 01jqpgwdrnen5qge0qg4vdx8ye, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 20680, MsgBus: 13197 2025-03-31T16:34:47.475621Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013004334953209:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:47.475695Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012e6/r3tmp/tmpY6IlxA/pdisk_1.dat 2025-03-31T16:34:47.591802Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20680, node 2 2025-03-31T16:34:47.629505Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:47.629584Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:47.631906Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:47.654525Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:47.654545Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:47.654550Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:47.654634Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13197 TClient is connected to server localhost:13197 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:48.054257Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:48.071850Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:48.153157Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:48.341638Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:48.411684Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:50.563267Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013017219856872:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:50.563346Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:50.608277Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:50.643741Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:50.675067Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:50.704801Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:50.731861Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:50.769050Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:34:50.825802Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013017219857385:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:50.825886Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:50.826268Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013017219857390:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:50.830234Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:50.840680Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488013017219857392:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:34:50.898887Z node 2 :TX_PROXY ERROR: Actor# [2:7488013017219857446:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpExtractPredicateLookup::ComplexRange [GOOD] Test command err: Trying to start YDB, gRPC: 3921, MsgBus: 24220 2025-03-31T16:32:27.185152Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012401670241669:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:27.185519Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0009fc/r3tmp/tmpPHSvOu/pdisk_1.dat 2025-03-31T16:32:27.552608Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3921, node 1 2025-03-31T16:32:27.592825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:27.592990Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:27.595101Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:32:27.600244Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:27.600283Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:27.600296Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:27.600448Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24220 TClient is connected to server localhost:24220 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:28.045163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:28.071478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:28.190524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:28.329823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:28.406264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:29.915673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012410260178031:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:29.915788Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:30.274503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:32:30.301383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:32:30.327097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:32:30.353999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:32:30.384152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:32:30.417636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:32:30.455604Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012414555145838:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:30.455706Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:30.455893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012414555145843:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:30.459597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:32:30.469413Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012414555145845:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:32:30.542103Z node 1 :TX_PROXY ERROR: Actor# [1:7488012414555145899:3440] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:32:32.178869Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012401670241669:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:32.178952Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 28694, MsgBus: 63146 2025-03-31T16:32:33.382184Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012428410181759:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:33.382253Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0009fc/r3tmp/tmpEIDf9b/pdisk_1.dat 2025-03-31T16:32:33.469709Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28694, node 2 2025-03-31T16:32:33.510785Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:33.510877Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:33.511868Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:32:33.529145Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:33.529168Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:33.529175Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:33.529281Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63146 TClient is connected to server localhost:63146 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:33.967641Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:33.974618Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:32:33.984087Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:34.028531Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:34.166459Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:34.216198Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreat ... e, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:34:33.860557Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-03-31T16:34:33.922802Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-03-31T16:34:34.011747Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-03-31T16:34:34.069191Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-03-31T16:34:34.125873Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2025-03-31T16:34:34.182774Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-03-31T16:34:34.246572Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 10983, MsgBus: 25924 2025-03-31T16:34:38.527675Z node 14 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[14:7488012966470916622:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:38.527838Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0009fc/r3tmp/tmpu3kZHv/pdisk_1.dat 2025-03-31T16:34:38.751568Z node 14 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:38.773104Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:38.773215Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:38.775138Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10983, node 14 2025-03-31T16:34:38.853781Z node 14 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:38.853809Z node 14 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:38.853823Z node 14 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:38.854037Z node 14 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25924 TClient is connected to server localhost:25924 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:39.964518Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:39.987304Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:40.082787Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:34:40.383892Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:34:40.515752Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:43.531364Z node 14 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[14:7488012966470916622:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:43.531472Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:34:45.198771Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7488012996535689465:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:45.198929Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:45.261498Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:45.345750Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:45.418203Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:45.486782Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:45.555050Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:45.649929Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:34:45.765108Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7488012996535690005:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:45.765327Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:45.765711Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7488012996535690011:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:45.773636Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:45.793016Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7488012996535690013:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:34:45.876754Z node 14 :TX_PROXY ERROR: Actor# [14:7488012996535690067:3471] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:47.657025Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:34:47.730618Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:34:47.786637Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:34:47.845626Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-03-31T16:34:47.941187Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-03-31T16:34:48.020736Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-03-31T16:34:48.084022Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-03-31T16:34:48.145116Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2025-03-31T16:34:48.234577Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-03-31T16:34:48.296565Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 >> KqpQueryService::ReadManyRangesAndPoints [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpDocumentApi::RestrictDrop [GOOD] Test command err: Trying to start YDB, gRPC: 19851, MsgBus: 11285 2025-03-31T16:34:29.689450Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012928783338613:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:29.689495Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0014e0/r3tmp/tmpAOKUAK/pdisk_1.dat 2025-03-31T16:34:30.189120Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:30.195241Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:30.197080Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:30.259647Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19851, node 1 2025-03-31T16:34:30.473510Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:30.473549Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:30.473560Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:30.473693Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11285 TClient is connected to server localhost:11285 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:31.317219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:31.359242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:31.525978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:31.686619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:31.783258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:33.011241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012945963209583:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:33.011390Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:33.609174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:33.641689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:33.672176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:33.708489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:33.742985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:33.813570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:34:33.923458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012945963210106:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:33.923536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:33.923567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012945963210111:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:33.926746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:33.940017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012945963210113:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:34:34.011328Z node 1 :TX_PROXY ERROR: Actor# [1:7488012950258177466:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:34.689627Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012928783338613:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:34.689737Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:34:34.878207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:34:34.986763Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488012950258177827:2507], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:25: Error: At function: KiWriteTable!
:2:25: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 2025-03-31T16:34:34.987041Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTJiNmZlNDQtNWRkMGU2YjctMTgyYjg0NDctN2JjZTlmZTM=, ActorId: [1:7488012950258177754:2497], ActorState: ExecuteState, TraceId: 01jqpgw2ab8x3yv3py9pxn292s, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:2:25: Error: At function: KiWriteTable!
:2:25: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 Trying to start YDB, gRPC: 2322, MsgBus: 18896 2025-03-31T16:34:35.879886Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012954308311698:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:35.879977Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0014e0/r3tmp/tmpMJtlYy/pdisk_1.dat 2025-03-31T16:34:35.998484Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:36.008625Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:36.008721Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:36.010173Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2322, node 2 2025-03-31T16:34:36.077489Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:36.077514Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:36.077521Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:36.077625Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18896 TClient is connected to server localhost:18896 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:36.476724Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSub ... missions } 2025-03-31T16:34:44.895698Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:44.932324Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:44.974061Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:45.062324Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:45.114919Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:45.163661Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:34:45.228615Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012995611673882:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:45.228757Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:45.229116Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488012995611673887:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:45.233342Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:45.245778Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488012995611673889:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:34:45.308182Z node 3 :TX_PROXY ERROR: Actor# [3:7488012995611673942:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:46.294981Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480
: Error: Type annotation, code: 1030
:2:61: Error: At function: KiAlterTable!
:2:61: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 2025-03-31T16:34:46.372055Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:34:46.413383Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488012978431802566:2212];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:46.413489Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 28126, MsgBus: 6096 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0014e0/r3tmp/tmpnuIzYd/pdisk_1.dat 2025-03-31T16:34:47.558429Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:47.559954Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:34:47.590308Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:47.590405Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:47.592083Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28126, node 4 2025-03-31T16:34:47.635762Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:47.635791Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:47.635799Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:47.635916Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6096 TClient is connected to server localhost:6096 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:48.091838Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:48.116641Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:48.202144Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:48.375165Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:48.451594Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:51.176423Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488013022114484214:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:51.176529Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:51.230125Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:51.305016Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:51.344069Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:51.384734Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:51.431792Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:51.486556Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:34:51.550380Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488013022114484728:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:51.550492Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:51.550685Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488013022114484733:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:51.554513Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:51.574501Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488013022114484735:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:34:51.676399Z node 4 :TX_PROXY ERROR: Actor# [4:7488013022114484792:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:53.024626Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480
: Error: Type annotation, code: 1030
:2:24: Error: At function: KiDropTable!
:2:24: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 >> KqpSinkMvcc::ReadOnlyTxCommitsOnConcurrentWrite [GOOD] >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite1 >> PersQueueSdkReadSessionTest::ReadSessionWithAbort [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithClose >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery [GOOD] >> BasicUsage::TWriteSession_WriteEncoded >> KqpQueryService::CreateAndDropTopic [GOOD] >> KqpQueryService::CreateAndAlterTopic >> KqpQueryServiceScripts::ExecuteScriptWithUnspecifiedMode >> KqpQueryService::Write ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ReadManyRangesAndPoints [GOOD] Test command err: Trying to start YDB, gRPC: 6270, MsgBus: 6377 2025-03-31T16:34:40.663189Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012975494814554:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:40.663300Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0014b3/r3tmp/tmpGUe7g0/pdisk_1.dat 2025-03-31T16:34:41.018482Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:41.049356Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:41.049456Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 6270, node 1 2025-03-31T16:34:41.052340Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:41.121406Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:41.121429Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:41.121434Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:41.121566Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6377 TClient is connected to server localhost:6377 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:41.766651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:43.808094Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012988379717114:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:43.808318Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012988379717102:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:43.808484Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:43.814326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:34:43.824173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012988379717117:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:34:43.915943Z node 1 :TX_PROXY ERROR: Actor# [1:7488012988379717168:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:44.275618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:1, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 23176, MsgBus: 20585 2025-03-31T16:34:45.341859Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012993732893309:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:45.341998Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0014b3/r3tmp/tmpwSPZPO/pdisk_1.dat 2025-03-31T16:34:45.510780Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:45.525030Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:45.525154Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:45.526706Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23176, node 2 2025-03-31T16:34:45.583657Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:45.583681Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:45.583690Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:45.583814Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20585 TClient is connected to server localhost:20585 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:46.039138Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:48.671666Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013006617795858:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:48.671778Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:48.708653Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:34:48.775650Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013006617796074:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:48.775751Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:48.776058Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013006617796079:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:48.779517Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:34:48.788937Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488013006617796081:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:34:48.874576Z node 2 :TX_PROXY ERROR: Actor# [2:7488013006617796132:2469] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 19907, MsgBus: 6315 2025-03-31T16:34:50.263712Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488013018437019448:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:50.264675Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0014b3/r3tmp/tmpODuwsn/pdisk_1.dat 2025-03-31T16:34:50.428297Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:50.454962Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:50.455036Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:50.456865Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19907, node 3 2025-03-31T16:34:50.510039Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:50.510057Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:50.510063Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:50.510166Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6315 TClient is connected to server localhost:6315 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:51.005370Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:51.011210Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:34:53.860006Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013031321921979:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:53.860101Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:53.871960Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:34:53.999803Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013031321922440:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:53.999922Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:54.000763Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013031321922445:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:54.005617Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:34:54.017159Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488013035616889743:2372], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:34:54.113001Z node 3 :TX_PROXY ERROR: Actor# [3:7488013035616889794:2616] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedBeforeSplit [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedAfterSplitMerge >> KqpQueryService::TableSink_HtapInteractive+withOltpSink [GOOD] >> KqpQueryService::PeriodicTaskInSessionPoolSessionCloseByIdle [GOOD] >> KqpQueryService::ReadDatashardAndColumnshard >> KqpService::SessionBusy >> KqpQueryService::AlterCdcTopic [GOOD] >> KqpService::SessionBusyRetryOperationSync [GOOD] >> KqpService::SwitchCache+UseCache >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestReboot [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_HtapInteractive+withOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 30509, MsgBus: 15334 2025-03-31T16:34:29.735320Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012928035981611:2195];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:29.738754Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0014e9/r3tmp/tmpLxztSu/pdisk_1.dat 2025-03-31T16:34:30.256988Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:30.258998Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:30.260460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:30.268983Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30509, node 1 2025-03-31T16:34:30.467881Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:30.467904Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:30.467916Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:30.468101Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15334 TClient is connected to server localhost:15334 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:31.316742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:33.011257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012945215851324:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:33.011393Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:33.618508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:34:33.822526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488012945215851502:2341];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:34:33.822526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488012945215851496:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:34:33.822766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488012945215851496:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:34:33.823060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488012945215851496:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:34:33.823217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488012945215851496:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:34:33.823290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488012945215851502:2341];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:34:33.823363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488012945215851496:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:34:33.823511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488012945215851502:2341];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:34:33.823522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488012945215851496:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:34:33.823646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488012945215851502:2341];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:34:33.823666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488012945215851496:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:34:33.823758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488012945215851502:2341];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:34:33.823806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488012945215851496:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:34:33.823856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488012945215851502:2341];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:34:33.823922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488012945215851496:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:34:33.823949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488012945215851502:2341];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:34:33.824045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488012945215851496:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:34:33.824091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488012945215851502:2341];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:34:33.824196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488012945215851496:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:34:33.824227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488012945215851502:2341];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:34:33.824747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488012945215851496:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:34:33.824799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488012945215851502:2341];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:34:33.824939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488012945215851502:2341];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:34:33.825046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488012945215851502:2341];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:34:33.862159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488012945215851506:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:34:33.862376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488012945215851506:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:34:33.862595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488012945215851506:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:34:33.862722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488012945215851506:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:34:33.862832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488012945215851506:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:34:33.862936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488012945215851506:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:34:33.863027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488012945215851506:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11; ... line=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:34:54.615555Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[3:7488013032794379367:2337];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:34:54.616144Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:34:54.616209Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:34:54.616362Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:34:54.616392Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:34:54.616556Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:34:54.616583Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:34:54.616680Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:34:54.616706Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:34:54.634925Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:34:54.634998Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:34:54.635430Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:34:54.635484Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:34:54.635703Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:34:54.635740Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:34:54.635852Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:34:54.635893Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:34:54.635972Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:34:54.636007Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:34:54.636060Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:34:54.636097Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:34:54.636863Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:34:54.636916Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:34:54.637127Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:34:54.637165Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:34:54.637314Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:34:54.637372Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:34:54.637591Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:34:54.637631Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:34:54.637752Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:34:54.637793Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:34:54.681513Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-31T16:34:54.681701Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-31T16:34:54.688389Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-31T16:34:54.688895Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-31T16:34:54.695426Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-31T16:34:54.696299Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-31T16:34:54.701323Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-31T16:34:54.704108Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-31T16:34:54.708201Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-31T16:34:54.712118Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-31T16:34:54.718689Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:34:54.801562Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013032794379824:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:54.801664Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013032794379829:2416], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:54.801668Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:54.805823Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-03-31T16:34:54.817107Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488013032794379831:2417], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-03-31T16:34:54.919502Z node 3 :TX_PROXY ERROR: Actor# [3:7488013032794379882:2660] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:55.116694Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715664; 2025-03-31T16:34:55.323704Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=281474976715667;tx_id=281474976715667;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715667; 2025-03-31T16:34:55.379694Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488013015614509415:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:55.379800Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSnapshotIsolation::TConflictReadWriteOltpNoSink [FAIL] >> KqpQueryService::DdlUser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::AlterCdcTopic [GOOD] Test command err: Trying to start YDB, gRPC: 1926, MsgBus: 29820 2025-03-31T16:34:34.921019Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012949407015384:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:34.921851Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0014c4/r3tmp/tmpPAPHFE/pdisk_1.dat 2025-03-31T16:34:35.275588Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1926, node 1 2025-03-31T16:34:35.340240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:35.340341Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:35.342589Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:35.344633Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:35.344664Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:35.344699Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:35.344835Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29820 TClient is connected to server localhost:29820 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:35.837044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:34:35.862053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:34:35.994597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:36.151173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:36.233329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:37.973362Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012962291919054:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:37.973593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:38.356396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:38.390525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:38.422664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:38.459529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:38.487548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:38.519715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:34:38.625554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012966586886863:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:38.625665Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:38.626070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012966586886868:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:38.631023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:38.643412Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012966586886870:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:34:38.742239Z node 1 :TX_PROXY ERROR: Actor# [1:7488012966586886924:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:39.611040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:34:39.732260Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488012970881854580:2506], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:25: Error: At function: KiWriteTable!
:3:25: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 2025-03-31T16:34:39.732536Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2Y3ZmIyYTEtMTRmNTNlY2YtNTRiZmJhNzItN2M3NzcxYTM=, ActorId: [1:7488012970881854512:2496], ActorState: ExecuteState, TraceId: 01jqpgw6x5c4qknjcbwfb73twh, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:3:25: Error: At function: KiWriteTable!
:3:25: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 2025-03-31T16:34:39.920377Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012949407015384:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:39.920482Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 15269, MsgBus: 24088 2025-03-31T16:34:40.577292Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012973960223760:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:40.577341Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0014c4/r3tmp/tmpiwYW4r/pdisk_1.dat 2025-03-31T16:34:40.703879Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15269, node 2 2025-03-31T16:34:40.741993Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:40.742079Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:40.763353Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:40.831645Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:40.831672Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:40.831679Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:40.831818Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24088 TClient is connected to server localhost:24088 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:41.245262Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSub ... , Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:49.390459Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:49.393860Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:34:49.404955Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488013011794940347:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:34:49.472223Z node 3 :TX_PROXY ERROR: Actor# [3:7488013011794940398:2334] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:49.503159Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:1, at schemeshard: 72057594046644480 2025-03-31T16:34:49.708942Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7488013011794940551:2355], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:30: Error: At function: KiWriteTable!
:2:65: Error: Failed to convert type: Struct<'id':Int32,'val':Null> to Struct<'id':Int32,'val':Int32>
:2:65: Error: Failed to convert 'val': Null to Int32
:2:65: Error: Failed to convert input columns types to scheme types, code: 2031 2025-03-31T16:34:49.710376Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ODQ3NzY3ZWYtMjY5M2MxYjItMjgwMDFmYzctY2Q4MTBlMGQ=, ActorId: [3:7488013011794940549:2354], ActorState: ExecuteState, TraceId: 01jqpgwgpxebtrx813s9hg237r, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-31T16:34:49.737783Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 23131, MsgBus: 15024 2025-03-31T16:34:50.926144Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488013018081556738:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:50.926213Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0014c4/r3tmp/tmpxVUjwQ/pdisk_1.dat 2025-03-31T16:34:51.081614Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:51.081717Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:51.097499Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:51.097727Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23131, node 4 2025-03-31T16:34:51.170828Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:51.170857Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:51.170867Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:51.171003Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15024 TClient is connected to server localhost:15024 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:51.757135Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:34:51.771077Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:34:51.866564Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:52.072747Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:52.174387Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:54.619391Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488013035261427689:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:54.619481Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:54.673789Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:54.708876Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:54.747194Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:54.784572Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:54.829479Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:54.903989Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:34:55.037751Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488013039556395508:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:55.037828Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488013039556395514:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:55.037861Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:55.041646Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:55.056963Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488013039556395516:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:34:55.160488Z node 4 :TX_PROXY ERROR: Actor# [4:7488013039556395573:3457] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:55.923918Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7488013018081556738:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:55.924001Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:34:56.102426Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:34:56.249165Z node 4 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037919:1][4:7488013043851363365:2523] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:18:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-03-31T16:34:56.349122Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715673:0, at schemeshard: 72057594046644480 Query failed, status: BAD_REQUEST:
: Error: Cannot change partition count. Use split/merge instead, code: 2017 2025-03-31T16:34:56.399181Z node 4 :TX_PROXY ERROR: Actor# [4:7488013043851363521:3872] txid# 281474976715674, issues: { message: "Cannot change partition count. Use split/merge instead" severity: 1 } 2025-03-31T16:34:56.399447Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YTFkNGYxNTUtMjdmNjVhNzYtYmQxNTNjODQtZWIyNjc3ZTU=, ActorId: [4:7488013043851363435:2534], ActorState: ExecuteState, TraceId: 01jqpgwq7x7xmcnfhpzs27j6fg, Create QueryResponse for error on request, msg: >> KqpQueryService::TableSink_OltpReplace+HasSecondaryIndex [GOOD] >> KqpQueryService::TableSink_OltpReplace-HasSecondaryIndex >> KqpQueryService::ShowCreateTableOnView [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads [GOOD] Test command err: 2025-03-31T16:33:56.936182Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012786123914767:2275];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:56.936458Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00179a/r3tmp/tmpcmX1gn/pdisk_1.dat 2025-03-31T16:33:57.273119Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:57.316989Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:57.317130Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 29408, node 1 2025-03-31T16:33:57.339287Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:57.356436Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:57.356477Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:57.356488Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:57.356617Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15870 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:57.580939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:15870 2025-03-31T16:33:59.724906Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012799008817465:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:59.725057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:59.966052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:34:00.153070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012803303784934:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:00.153137Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:00.153196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012803303784939:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:00.156390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:34:00.187680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012803303784941:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:34:00.284035Z node 1 :TX_PROXY ERROR: Actor# [1:7488012803303785020:2809] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:00.409923Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqpgv0ar5yxgcea8am7sxqft, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjA5NjFjYzctZjczMWExOTktOGFjNWNkMDctNDc5ZTQwNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:00.442571Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jqpgv0kq494php9qqa50n6kd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjA5NjFjYzctZjczMWExOTktOGFjNWNkMDctNDc5ZTQwNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:00.461843Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jqpgv0mb2pk3fd8wahgwa021, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjA5NjFjYzctZjczMWExOTktOGFjNWNkMDctNDc5ZTQwNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:00.479122Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jqpgv0mwd1vb4aw8ak9rdv8e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjA5NjFjYzctZjczMWExOTktOGFjNWNkMDctNDc5ZTQwNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:00.496126Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jqpgv0ne9xj156gx6bvwkzet, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjA5NjFjYzctZjczMWExOTktOGFjNWNkMDctNDc5ZTQwNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:00.510732Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jqpgv0nw4pgza09s21ncwf5h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjA5NjFjYzctZjczMWExOTktOGFjNWNkMDctNDc5ZTQwNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:00.531553Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jqpgv0phc1dx21e90k11jr9j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjA5NjFjYzctZjczMWExOTktOGFjNWNkMDctNDc5ZTQwNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:00.545809Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710668. Ctx: { TraceId: 01jqpgv0q05mjt5zhev169a6bm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjA5NjFjYzctZjczMWExOTktOGFjNWNkMDctNDc5ZTQwNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:00.571357Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01jqpgv0qs8qe5w9yt0dnaf0tv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjA5NjFjYzctZjczMWExOTktOGFjNWNkMDctNDc5ZTQwNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:00.595274Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710670. Ctx: { TraceId: 01jqpgv0rheh9y8s2syx76p9t4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjA5NjFjYzctZjczMWExOTktOGFjNWNkMDctNDc5ZTQwNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:00.619895Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01jqpgv0s89x2dhsqgayrax8ye, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjA5NjFjYzctZjczMWExOTktOGFjNWNkMDctNDc5ZTQwNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:00.649965Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jqpgv0t7fmrde96rv7t4x0hd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjA5NjFjYzctZjczMWExOTktOGFjNWNkMDctNDc5ZTQwNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:00.669855Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jqpgv0tvbwdgdv882abbcn6e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjA5NjFjYzctZjczMWExOTktOGFjNWNkMDctNDc5ZTQwNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:00.701788Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jqpgv0vvfxvhjs506wt0eycr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjA5NjFjYzctZjczMWExOTktOGFjNWNkMDctNDc5ZTQwNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:00.732724Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jqpgv0ws5apd7nvd0p3n1g09, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjA5NjFjYzctZjczMWExOTktOGFjNWNkMDctNDc5ZTQwNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:00.745867Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jqpgv0x84qvn5152befpjpx6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjA5NjFjYzctZjczMWExOTktOGFjNWNkMDctNDc5ZTQwNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:00.765703Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jqpgv0xv46s15d3jxddj8qaq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjA5NjFjYzctZjczMWExOTktOGFjNWNkMDctNDc5ZTQwNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:00.785755Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710678. Ctx: { TraceId: 01jqpgv0yg2qebvdj1rdavnze4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjA5NjFjYzctZjczMWExOTktOGFjNWNkMDctNDc5ZTQwNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:00.799905Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710679. Ctx: { TraceId: 01jqpgv0yydahyxckw07qwy9ns, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjA5NjFjYzctZjczMWExOTktOGFjNWNkMDctNDc5ZTQwNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:00.817842Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710680. Ctx: { TraceId: 01jqpgv0zg9axats07r9jvg5wq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node ... t, use /Root 2025-03-31T16:34:55.050337Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724977. Ctx: { TraceId: 01jqpgwny709s459ps1bymqas2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2EzZWY4NWItMzEzMmQ0ODYtZTY4MjBjMGQtZTM5NzRlYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:55.055935Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724979. Ctx: { TraceId: 01jqpgwny9bgtqtecekc5dkhzg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGFlNzFkMDAtOTExZmY3NGQtNDBjZjExN2QtODdiNGZhNmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:55.056503Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724980. Ctx: { TraceId: 01jqpgwny9eat26wgz1bqtm2bw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDllZTI1NjMtOTNmMDdiMGMtZjgxNWIzYzYtOTYyNWUxYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:55.056921Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724981. Ctx: { TraceId: 01jqpgwny9dp83wtm6hdj8w0pw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzM3MDEzMmUtYzFmZDZhNjEtMzk0YThhNjctYzAwMjA2Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:55.057566Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724982. Ctx: { TraceId: 01jqpgwny96fx68ammgq8d4kd1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWY0M2I0YTUtZTllZGQzMzYtYmE1ZTNiZDQtYWY0MmQxMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:55.057988Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724983. Ctx: { TraceId: 01jqpgwnyd7b2yehtwys8kaq9f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE1OGUyN2MtYWI2YWRlZDItNTU0ZTM5MzQtNDQzYjBjYzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:55.065367Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724984. Ctx: { TraceId: 01jqpgwnyjejxg13dvpe962ect, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2UwOGQxMWItZDVjYzA5ODQtNDg2Mjk1ZC01YWIwZjUzMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:55.068105Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724985. Ctx: { TraceId: 01jqpgwnyhezqfmrc5z7rm968q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmMxYTBhNzYtNGE0NWJjYWUtMTZiMzgxN2YtNWY2OGJlOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:55.068455Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724986. Ctx: { TraceId: 01jqpgwnyhdh2jac1gdfx9knbv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODZmMjI5N2QtZDJmZjc2MjItZTExN2NiY2MtZTRkMWE0ODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:55.072621Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724987. Ctx: { TraceId: 01jqpgwnyqaexvqcnre1vhxzc7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWIyNmY4YWQtMTNhYjM0My03YzYwYThlNS1lODM5NDBlOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:55.072991Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724988. Ctx: { TraceId: 01jqpgwnywb8vdbntknz2zkyms, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWY0M2I0YTUtZTllZGQzMzYtYmE1ZTNiZDQtYWY0MmQxMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:55.075100Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724989. Ctx: { TraceId: 01jqpgwnyq5ja4282dq3vkw19m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2EzZWY4NWItMzEzMmQ0ODYtZTY4MjBjMGQtZTM5NzRlYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:55.076336Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724990. Ctx: { TraceId: 01jqpgwnz1e2geehtdtrt8esq2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGFlNzFkMDAtOTExZmY3NGQtNDBjZjExN2QtODdiNGZhNmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:55.078813Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724991. Ctx: { TraceId: 01jqpgwnz12w4fdbt7t3arqhg5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE1OGUyN2MtYWI2YWRlZDItNTU0ZTM5MzQtNDQzYjBjYzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:55.080848Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724992. Ctx: { TraceId: 01jqpgwnz6dwyx4zk9vqbjefem, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDllZTI1NjMtOTNmMDdiMGMtZjgxNWIzYzYtOTYyNWUxYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:55.084542Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724994. Ctx: { TraceId: 01jqpgwnz7fcsdww1m7r2bhpjz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmMxYTBhNzYtNGE0NWJjYWUtMTZiMzgxN2YtNWY2OGJlOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:55.085711Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724997. Ctx: { TraceId: 01jqpgwnz93cmvqtdzb1g4t0bn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWY0M2I0YTUtZTllZGQzMzYtYmE1ZTNiZDQtYWY0MmQxMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:55.086468Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724993. Ctx: { TraceId: 01jqpgwnz77hqwjrpsd1xxaf6w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2UwOGQxMWItZDVjYzA5ODQtNDg2Mjk1ZC01YWIwZjUzMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:55.086966Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724995. Ctx: { TraceId: 01jqpgwnz7060vfrjrf2tzyk5w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzM3MDEzMmUtYzFmZDZhNjEtMzk0YThhNjctYzAwMjA2Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:55.088630Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724996. Ctx: { TraceId: 01jqpgwnz903z7z1zeefjj2c5x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODZmMjI5N2QtZDJmZjc2MjItZTExN2NiY2MtZTRkMWE0ODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo 2025-03-31T16:34:55.092391Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724998. Ctx: { TraceId: 01jqpgwnza3y5hwt8vfk9c8m75, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWIyNmY4YWQtMTNhYjM0My03YzYwYThlNS1lODM5NDBlOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls response: 2025-03-31T16:34:55.095230Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976724999. Ctx: { TraceId: 01jqpgwnzccyzeh6dygwgnyy58, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2EzZWY4NWItMzEzMmQ0ODYtZTY4MjBjMGQtZTM5NzRlYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743438840094 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-03-31T16:34:55.096594Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976725000. Ctx: { TraceId: 01jqpgwnzj4v4mpqrn5yqf19xh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDllZTI1NjMtOTNmMDdiMGMtZjgxNWIzYzYtOTYyNWUxYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:55.097265Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976725001. Ctx: { TraceId: 01jqpgwnzje2n3141bag3fy53s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE1OGUyN2MtYWI2YWRlZDItNTU0ZTM5MzQtNDQzYjBjYzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:55.099608Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976725002. Ctx: { TraceId: 01jqpgwnzb4hj93tzednp1e0b3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGFlNzFkMDAtOTExZmY3NGQtNDBjZjExN2QtODdiNGZhNmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:55.105731Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976725003. Ctx: { TraceId: 01jqpgwnzt87xhssq0qw62hs8r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWY0M2I0YTUtZTllZGQzMzYtYmE1ZTNiZDQtYWY0MmQxMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:34:55.180903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 66146 rowCount 1076 cpuUsage 0 2025-03-31T16:34:55.181100Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037889 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 56672 rowCount 924 cpuUsage 0 2025-03-31T16:34:55.281107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2025-03-31T16:34:55.281281Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037890 followerId=0, pathId 2: RowCount 1076, DataSize 66146 2025-03-31T16:34:55.281437Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037889 followerId=0, pathId 2: RowCount 924, DataSize 56672 2025-03-31T16:34:55.281616Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743438840094 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 2 shards >> KqpSinkLocks::EmptyRangeOlap [GOOD] >> KqpSinkLocks::EmptyRangeAlreadyBrokenOlap >> KqpTx::RollbackTx [GOOD] >> KqpTx::RollbackTx2 >> KqpQueryServiceScripts::ExecuteScriptWithParameters >> KqpQueryService::TableSink_Olap_Replace >> KqpQueryService::TableSink_HtapInteractive-withOltpSink [GOOD] >> KqpQueryService::TableSink_OlapInsert ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ShowCreateTableOnView [GOOD] Test command err: Trying to start YDB, gRPC: 13752, MsgBus: 18175 2025-03-31T16:34:42.517794Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012982693903951:2158];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:42.519398Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0014ab/r3tmp/tmpCne2P0/pdisk_1.dat 2025-03-31T16:34:42.917301Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13752, node 1 2025-03-31T16:34:42.958483Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:42.958574Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:42.963284Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:43.051759Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:43.051780Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:43.051790Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:43.051913Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18175 TClient is connected to server localhost:18175 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:43.553509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:43.580659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:43.739023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:34:43.893734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:34:43.968613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:45.796047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012995578807511:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:45.796154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:46.149676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:46.178704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:46.221419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:46.290091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:46.324041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:46.366707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:34:46.411868Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012999873775323:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:46.411944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:46.412148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012999873775328:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:46.415697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:46.423743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012999873775330:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:34:46.503355Z node 1 :TX_PROXY ERROR: Actor# [1:7488012999873775383:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:47.517790Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012982693903951:2158];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:47.517866Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 21844, MsgBus: 8543 2025-03-31T16:34:48.351821Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013008942356462:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:48.351881Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0014ab/r3tmp/tmp2ixBNU/pdisk_1.dat 2025-03-31T16:34:48.480924Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:48.494818Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:48.494911Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:48.496489Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21844, node 2 2025-03-31T16:34:48.549642Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:48.549660Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:48.549667Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:48.549776Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8543 TClient is connected to server localhost:8543 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:48.932453Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:48.946867Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:49.008644Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:49.195791Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:49.274708Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:51.264835Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [ ... X_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:51.421615Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:51.453695Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:51.487639Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:51.524827Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:34:51.583282Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013021827260607:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:51.583460Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:51.586536Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013021827260612:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:51.590192Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:51.600140Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488013021827260615:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:34:51.687502Z node 2 :TX_PROXY ERROR: Actor# [2:7488013021827260668:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 23773, MsgBus: 1061 2025-03-31T16:34:53.649359Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488013028401725106:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:53.649460Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0014ab/r3tmp/tmp820HKR/pdisk_1.dat 2025-03-31T16:34:53.773261Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:53.796355Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:53.796438Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:53.798217Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23773, node 3 2025-03-31T16:34:53.851040Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:53.851063Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:53.851071Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:53.851223Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1061 TClient is connected to server localhost:1061 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:54.332339Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:54.339893Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:34:54.348547Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:54.455506Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:54.613453Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:54.715204Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:57.044007Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013045581596061:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:57.044090Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:57.094117Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:57.131955Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:57.204082Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:57.276456Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:57.346929Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:57.384136Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:34:57.475585Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013045581596584:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:57.475703Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:57.476039Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013045581596589:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:57.480533Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:57.491836Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488013045581596591:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:34:57.549427Z node 3 :TX_PROXY ERROR: Actor# [3:7488013045581596646:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:58.589193Z node 3 :SYSTEM_VIEWS ERROR: Scan error, actor: [3:7488013049876564303:2510], owner: [3:7488013049876564300:2508], scan id: 0, table id: [1:0:0:show_create], error: Invalid path type 2025-03-31T16:34:58.590289Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7488013049876564301:2509], TxId: 281474976715672, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=MjgzNDEyYzQtNjlhNDAxZTAtODljMDE1YTAtOTI2OWVjYmI=. TraceId : 01jqpgws992b25kdx0jbta1tfs. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7488013049876564297:2496], status: SCHEME_ERROR, reason: {
: Error: Terminate execution } 2025-03-31T16:34:58.600891Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MjgzNDEyYzQtNjlhNDAxZTAtODljMDE1YTAtOTI2OWVjYmI=, ActorId: [3:7488013049876564233:2496], ActorState: ExecuteState, TraceId: 01jqpgws992b25kdx0jbta1tfs, Create QueryResponse for error on request, msg: 2025-03-31T16:34:58.644565Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488013028401725106:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:58.644646Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSinkTx::ExplicitTcl [GOOD] >> KqpSinkTx::Interactive >> KqpQueryServiceScripts::ExecuteMultiScript [GOOD] >> KqpQueryServiceScripts::ExecuteScriptPg >> KqpQueryService::ShowCreateTable >> KqpSinkLocks::TInvalidate [GOOD] >> KqpSinkLocks::OlapUncommittedRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictReadWriteOltpNoSink [FAIL] Test command err: Trying to start YDB, gRPC: 11152, MsgBus: 17683 2025-03-31T16:34:40.365821Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012972772251140:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:40.365973Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012f3/r3tmp/tmpkqFVi8/pdisk_1.dat 2025-03-31T16:34:40.731941Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:40.780142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected TServer::EnableGrpc on GrpcPort 11152, node 1 2025-03-31T16:34:40.780273Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:40.782381Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:40.840800Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:40.840826Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:40.840832Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:40.840997Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17683 TClient is connected to server localhost:17683 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:41.422853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:43.388217Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012985657153674:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:43.388309Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012985657153686:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:43.388490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:43.392661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:34:43.404688Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012985657153711:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:34:43.465132Z node 1 :TX_PROXY ERROR: Actor# [1:7488012985657153764:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:43.838857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:34:43.949097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:34:44.976144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:45.734935Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012972772251140:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:45.797218Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:34:46.367876Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjA2NDRiYjQtNmM0MTdkN2QtYzQ0NmRkMDctNTZlZjcxMzg=, ActorId: [1:7488012998542064034:2969], ActorState: ExecuteState, TraceId: 01jqpgwdbp37rv9y6gty1b0cy8, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131: DoExecute @ 0x18DCECE7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:159: Execute_ @ 0x18DB8872 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7FCF3B373D8F 18. ??:0: ?? @ 0x7FCF3B373E3F 19. ??:0: ?? @ 0x163B4028 Trying to start YDB, gRPC: 22903, MsgBus: 3821 2025-03-31T16:34:51.385867Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013019566483493:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:51.385979Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012f3/r3tmp/tmpOiINWK/pdisk_1.dat 2025-03-31T16:34:51.508157Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:51.532271Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:51.532370Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:51.533627Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22903, node 2 2025-03-31T16:34:51.606261Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:51.606278Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:51.606283Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:51.606386Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3821 TClient is connected to server localhost:3821 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:52.094928Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:52.103685Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:34:54.708236Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013032451386015:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:54.708248Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013032451386034:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:54.708330Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:54.712607Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:34:54.725681Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488013032451386044:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:34:54.789897Z node 2 :TX_PROXY ERROR: Actor# [2:7488013032451386095:2335] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:54.843706Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:34:54.915501Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:34:55.898851Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:56.660248Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488013019566483493:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:56.706856Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:34:57.286984Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzgwMWJmN2ItZDQ1MjE0NmEtZDM4YTMzOTMtZWQwZDczYzg=, ActorId: [2:7488013045336296415:2968], ActorState: ExecuteState, TraceId: 01jqpgwr23cfwew6n1vzawb4bs, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131: DoExecute @ 0x18DCECE7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:166: Execute_ @ 0x18DB8A9A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7FCF3B373D8F 18. ??:0: ?? @ 0x7FCF3B373E3F 19. ??:0: ?? @ 0x163B4028 >> KqpQueryServiceScripts::ExecuteScriptWithUnspecifiedMode [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithTimeout >> KqpQueryServiceScripts::ExecuteScriptWithWorkloadManager [GOOD] >> KqpQueryServiceScripts::ExplainScript >> KqpQueryService::Write [GOOD] >> KqpQueryServiceScripts::CancelScriptExecution >> KqpQueryService::TableSink_Htap+withOltpSink [GOOD] >> KqpQueryService::TableSink_Htap-withOltpSink >> KqpQueryService::CreateAndAlterTopic [GOOD] >> KqpQueryService::CreateOrDropTopicOverTable >> KqpQueryServiceScripts::ExecuteScriptStatsBasic >> TraverseColumnShard::TraverseColumnTableHiveDistributionAbsentNodes [GOOD] >> KqpService::SessionBusy [GOOD] >> KqpService::SessionBusyRetryOperation >> KqpQueryService::TableSink_OltpReplace-HasSecondaryIndex [GOOD] >> KqpQueryService::TableSink_OltpOrder >> KqpQueryServiceScripts::ParseScript >> KqpQueryService::ReadDatashardAndColumnshard [GOOD] >> KqpQueryService::ReadManyRanges ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableHiveDistributionAbsentNodes [GOOD] Test command err: 2025-03-31T16:32:28.308349Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:32:28.308726Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:32:28.308851Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ed2/r3tmp/tmpBA9Wt9/pdisk_1.dat 2025-03-31T16:32:28.686091Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16470, node 1 2025-03-31T16:32:28.935975Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:28.936055Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:28.936095Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:28.936631Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:32:28.944267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:32:29.042546Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:29.042701Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:29.056721Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22759 2025-03-31T16:32:29.569767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:32:33.043627Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T16:32:33.078300Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:33.078418Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:33.109610Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:32:33.112458Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:32:33.390101Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:32:33.390720Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:32:33.391396Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:32:33.391544Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:32:33.391864Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:32:33.391988Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:32:33.392099Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:32:33.392184Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:32:33.392315Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:32:33.561220Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:33.561339Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:33.575060Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:32:33.736024Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:32:33.781545Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T16:32:33.781639Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T16:32:33.808949Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T16:32:33.810022Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T16:32:33.810234Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T16:32:33.810297Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T16:32:33.810350Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T16:32:33.810406Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T16:32:33.810457Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T16:32:33.810506Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T16:32:33.810892Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T16:32:33.846122Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:32:33.846261Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1872:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:32:33.852681Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1883:2604] 2025-03-31T16:32:33.858301Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1915:2621] 2025-03-31T16:32:33.858405Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1915:2621], schemeshard id = 72075186224037897 2025-03-31T16:32:33.861956Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T16:32:33.884810Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T16:32:33.884877Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T16:32:33.885036Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T16:32:33.945746Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T16:32:33.955215Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T16:32:33.955401Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T16:32:34.155931Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T16:32:34.329026Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T16:32:34.407022Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:32:35.477771Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2236:3073], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:35.477946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:35.505239Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-31T16:32:35.921528Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:32:35.921828Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:32:35.922180Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:32:35.922336Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:32:35.922506Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:32:35.922664Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:32:35.922807Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:32:35.922974Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:32:35.923149Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:32:35.923299Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:32:35.923440Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:32:35.923574Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:32:35.952753Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2397:2889];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:32:35.952887Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2397:2889];tablet_id=72075186224037900;process= ... 72:6415], schemeshard count = 1 2025-03-31T16:34:59.892969Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:34:59.893041Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-31T16:34:59.893078Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-31T16:34:59.893128Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:34:59.896696Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-31T16:34:59.915500Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-31T16:34:59.916025Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T16:34:59.916114Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T16:34:59.917062Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-31T16:34:59.917118Z node 2 :STATISTICS WARN: [72075186224037894] TTxResponseTabletDistribution::Execute. Some tablets do not exist in Hive anymore; tablet count = 3 2025-03-31T16:34:59.917178Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-31T16:35:01.129464Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T16:35:01.129544Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T16:35:01.129996Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-31T16:35:01.144420Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-31T16:35:01.144661Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-03-31T16:35:01.145623Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8633:6499], server id = [2:8638:6504], tablet id = 72075186224037899, status = OK 2025-03-31T16:35:01.146066Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8633:6499], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:35:01.146472Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8634:6500], server id = [2:8639:6505], tablet id = 72075186224037900, status = OK 2025-03-31T16:35:01.146534Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8634:6500], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:35:01.148228Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8635:6501], server id = [2:8640:6506], tablet id = 72075186224037901, status = OK 2025-03-31T16:35:01.148322Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8635:6501], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:35:01.148941Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8636:6502], server id = [2:8641:6507], tablet id = 72075186224037902, status = OK 2025-03-31T16:35:01.149017Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8636:6502], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:35:01.150403Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8637:6503], server id = [2:8642:6508], tablet id = 72075186224037903, status = OK 2025-03-31T16:35:01.150473Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8637:6503], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:35:01.156012Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-31T16:35:01.156767Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8633:6499], server id = [2:8638:6504], tablet id = 72075186224037899 2025-03-31T16:35:01.156812Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:35:01.158759Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-03-31T16:35:01.159419Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8656:6519], server id = [2:8658:6520], tablet id = 72075186224037904, status = OK 2025-03-31T16:35:01.159515Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8656:6519], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:35:01.159668Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8634:6500], server id = [2:8639:6505], tablet id = 72075186224037900 2025-03-31T16:35:01.159697Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:35:01.160470Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-03-31T16:35:01.161966Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8635:6501], server id = [2:8640:6506], tablet id = 72075186224037901 2025-03-31T16:35:01.162000Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:35:01.162608Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8659:6521], server id = [2:8662:6523], tablet id = 72075186224037905, status = OK 2025-03-31T16:35:01.162707Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8659:6521], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:35:01.163184Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-03-31T16:35:01.163918Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8636:6502], server id = [2:8641:6507], tablet id = 72075186224037902 2025-03-31T16:35:01.163958Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:35:01.165160Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-03-31T16:35:01.165748Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8661:6522], server id = [2:8666:6527], tablet id = 72075186224037906, status = OK 2025-03-31T16:35:01.165836Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8661:6522], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:35:01.166238Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8637:6503], server id = [2:8642:6508], tablet id = 72075186224037903 2025-03-31T16:35:01.166268Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:35:01.167919Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8664:6525], server id = [2:8670:6531], tablet id = 72075186224037907, status = OK 2025-03-31T16:35:01.167999Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8664:6525], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:35:01.168800Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8668:6529], server id = [2:8672:6532], tablet id = 72075186224037908, status = OK 2025-03-31T16:35:01.168862Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8668:6529], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:35:01.171041Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-03-31T16:35:01.171668Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8656:6519], server id = [2:8658:6520], tablet id = 72075186224037904 2025-03-31T16:35:01.171703Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:35:01.173656Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-03-31T16:35:01.174408Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8659:6521], server id = [2:8662:6523], tablet id = 72075186224037905 2025-03-31T16:35:01.174443Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:35:01.175981Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-03-31T16:35:01.177062Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8661:6522], server id = [2:8666:6527], tablet id = 72075186224037906 2025-03-31T16:35:01.177096Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:35:01.177419Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-03-31T16:35:01.177821Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8664:6525], server id = [2:8670:6531], tablet id = 72075186224037907 2025-03-31T16:35:01.177851Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:35:01.178026Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-03-31T16:35:01.178076Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-31T16:35:01.178350Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8668:6529], server id = [2:8672:6532], tablet id = 72075186224037908 2025-03-31T16:35:01.178378Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:35:01.178467Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-31T16:35:01.178704Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-31T16:35:01.179144Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T16:35:01.181954Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-31T16:35:01.229735Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8699:6555]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:35:01.230035Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T16:35:01.230083Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8699:6555], StatRequests.size() = 1 2025-03-31T16:35:01.348077Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YjZhMjYyN2YtYjVjYWZiNWEtMTI2ODk2YjQtZTc4MmQxNw==, TxId: 2025-03-31T16:35:01.348143Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YjZhMjYyN2YtYjVjYWZiNWEtMTI2ODk2YjQtZTc4MmQxNw==, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-03-31T16:35:01.348935Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8712:6561]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-31T16:35:01.349322Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:35:01.349719Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-31T16:35:01.349789Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-31T16:35:01.353112Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-31T16:35:01.353174Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-03-31T16:35:01.353230Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-31T16:35:01.360114Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> KqpQueryService::DdlUser [GOOD] >> KqpQueryService::DdlTx >> KqpSinkLocks::EmptyRangeAlreadyBroken [GOOD] >> KqpTx::RollbackTx2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::EmptyRangeAlreadyBroken [GOOD] Test command err: Trying to start YDB, gRPC: 62977, MsgBus: 15471 2025-03-31T16:34:39.423380Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012971426245944:2217];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:39.423493Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012f8/r3tmp/tmpfG0koa/pdisk_1.dat 2025-03-31T16:34:39.784262Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62977, node 1 2025-03-31T16:34:39.848614Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:39.850352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:39.853432Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:39.872401Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:39.872430Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:39.872440Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:39.872583Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15471 TClient is connected to server localhost:15471 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:40.471271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:42.502340Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012984311148316:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:42.502467Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:42.502946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012984311148330:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:42.507955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:34:42.520432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012984311148354:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:34:42.624287Z node 1 :TX_PROXY ERROR: Actor# [1:7488012984311148405:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:42.980119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:34:43.091650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:34:44.044000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:44.776943Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012971426245944:2217];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:44.816989Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:34:45.674483Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=6; 2025-03-31T16:34:45.685326Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 6 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-03-31T16:34:45.685513Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 6 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-03-31T16:34:45.685799Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488012997196059037:2969], Table: `/Root/Test` ([72057594046644480:6:1]), SessionActorId: [1:7488012997196058745:2969]Got LOCKS BROKEN for table `/Root/Test`. ShardID=72075186224037888, Sink=[1:7488012997196059037:2969].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-03-31T16:34:45.686325Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488012997196059030:2969], SessionActorId: [1:7488012997196058745:2969], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7488012997196058745:2969]. isRollback=0 2025-03-31T16:34:45.686582Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzY5YzRmM2UtNDI1YTZiNzctODBiNWNlOTctODUyMGVlZmE=, ActorId: [1:7488012997196058745:2969], ActorState: ExecuteState, TraceId: 01jqpgwcqg7gy023tga2d9h9sw, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7488012997196059031:2969] from: [1:7488012997196059030:2969] 2025-03-31T16:34:45.686660Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7488012997196059031:2969] TxId: 281474976710667. Ctx: { TraceId: 01jqpgwcqg7gy023tga2d9h9sw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzY5YzRmM2UtNDI1YTZiNzctODBiNWNlOTctODUyMGVlZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-03-31T16:34:45.687830Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzY5YzRmM2UtNDI1YTZiNzctODBiNWNlOTctODUyMGVlZmE=, ActorId: [1:7488012997196058745:2969], ActorState: ExecuteState, TraceId: 01jqpgwcqg7gy023tga2d9h9sw, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 26891, MsgBus: 22123 2025-03-31T16:34:51.820343Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013019970084263:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:51.830893Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012f8/r3tmp/tmpJhrfUQ/pdisk_1.dat 2025-03-31T16:34:51.972748Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:51.986442Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:51.986529Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:51.988762Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26891, node 2 2025-03-31T16:34:52.035870Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:52.035895Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:52.035903Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:52.036023Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22123 TClient is connected to server localhost:22123 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:52.441200Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:52.448952Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:34:55.198752Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013037149954063:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:55.198823Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013037149954081:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:55.198906Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:55.202966Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:34:55.211997Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488013037149954085:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:34:55.272001Z node 2 :TX_PROXY ERROR: Actor# [2:7488013037149954136:2335] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:55.315567Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:34:55.358486Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:34:56.419177Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:57.274826Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488013019970084263:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:57.333328Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:34:58.207435Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmNmZmRlNjctOThjMTNlNDMtMmM1MTQwNTMtNjk3YjEzNg==, ActorId: [2:7488013045739897177:2969], ActorState: ExecuteState, TraceId: 01jqpgwrwg5zgwstxgqsvf5b4m, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001
: Error: tx has deferred effects, but locks are broken WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpQueryService::Followers >> KqpQueryService::TableSink_Olap_Replace [GOOD] >> KqpQueryService::TableSink_OlapUpsert ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::RollbackTx2 [GOOD] Test command err: Trying to start YDB, gRPC: 16819, MsgBus: 3714 2025-03-31T16:34:53.783168Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013030758658886:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:53.783239Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012d9/r3tmp/tmpePlq45/pdisk_1.dat 2025-03-31T16:34:54.154872Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:54.159219Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:54.159337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:54.162210Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16819, node 1 2025-03-31T16:34:54.255642Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:54.255664Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:54.255671Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:54.255802Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3714 TClient is connected to server localhost:3714 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:54.834606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:54.855235Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:34:54.861909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:54.990538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:55.159978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:55.220854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:56.988483Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013043643562533:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:56.988614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:57.300783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:57.330773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:57.358614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:57.390565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:57.422856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:57.457757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:34:57.506824Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013047938530338:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:57.506896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:57.507149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013047938530343:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:57.511277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:57.522197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013047938530345:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:34:57.588749Z node 1 :TX_PROXY ERROR: Actor# [1:7488013047938530398:3442] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:58.787271Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488013030758658886:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:58.787364Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:34:58.789515Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGU1NGIzNmItZGIxZTliNDgtZGJiNmM2NGMtYjkwNGI3Y2I=, ActorId: [1:7488013052233497991:2496], ActorState: ReadyState, TraceId: 01jqpgwsjw6ya7gr1xvjvvrmtc, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 13712, MsgBus: 64448 2025-03-31T16:34:59.643260Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013057171818883:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:59.643326Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012d9/r3tmp/tmpOTnIYG/pdisk_1.dat 2025-03-31T16:34:59.750394Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13712, node 2 2025-03-31T16:34:59.769685Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:59.769763Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:59.771356Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:59.814208Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:59.814239Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:59.814252Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:59.814369Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64448 TClient is connected to server localhost:64448 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:00.263436Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:00.280558Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:00.342452Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:00.505853Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:00.602607Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:02.642950Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013070056722512:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:02.643035Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:02.692198Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:02.724417Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:02.760660Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:02.791506Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:02.828947Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:02.870475Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:02.926381Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013070056723025:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:02.926452Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:02.926662Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013070056723030:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:02.929737Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:02.940732Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488013070056723032:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:35:03.021757Z node 2 :TX_PROXY ERROR: Actor# [2:7488013074351690381:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:04.286405Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTY0MDAyYWItOGFkMWVjOTItZjZjOWFiOGYtNDViOGUxNTg=, ActorId: [2:7488013074351690673:2496], ActorState: ReadyState, TraceId: 01jqpgwyyx0vj7p4e2jedrx55t, Create QueryResponse for error on request, msg: >> KqpQueryService::ShowCreateTable [GOOD] >> KqpQueryService::ShowCreateTableDisable >> KqpSnapshotIsolation::TConflictWriteOlap [FAIL] >> KqpQueryService::Ddl >> KqpQueryServiceScripts::ExecuteScriptWithParameters [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithForgetAfter >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite1 [GOOD] >> KqpQueryService::ExecuteQueryUpsertDoesntChangeIndexedValuesIfNotChanged >> KqpQueryService::TableSink_OlapOrder [GOOD] >> KqpQueryService::TableSink_OlapRWQueries >> KqpQueryService::CreateOrDropTopicOverTable [GOOD] >> KqpQueryServiceScripts::ParseScript [GOOD] >> KqpQueryServiceScripts::ListScriptExecutions >> KqpQueryServiceScripts::ExplainScript [GOOD] >> KqpQueryServiceScripts::ForgetScriptExecution >> KqpService::SessionBusyRetryOperation [GOOD] >> KqpService::RangeCache-UseCache >> KqpQueryService::DdlTx [GOOD] >> KqpQueryService::DdlWithExplicitTransaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite1 [GOOD] Test command err: Trying to start YDB, gRPC: 3956, MsgBus: 2364 2025-03-31T16:34:43.218888Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012986092891718:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:43.218949Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012e4/r3tmp/tmpTZHxeh/pdisk_1.dat 2025-03-31T16:34:43.581466Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3956, node 1 2025-03-31T16:34:43.622962Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:43.624847Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:43.628849Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:43.666422Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:43.666444Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:43.666455Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:43.666590Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2364 TClient is connected to server localhost:2364 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:44.239402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:46.268032Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012998977794237:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:46.268131Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:46.268224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012998977794261:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:46.274066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:34:46.289378Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012998977794275:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:34:46.391854Z node 1 :TX_PROXY ERROR: Actor# [1:7488012998977794326:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:46.646716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:34:46.765365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:34:47.747029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:48.427474Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012986092891718:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:48.452637Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 17962, MsgBus: 17565 2025-03-31T16:34:55.538368Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013038511942793:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:55.538499Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012e4/r3tmp/tmp5XRNMN/pdisk_1.dat 2025-03-31T16:34:55.639351Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17962, node 2 2025-03-31T16:34:55.689448Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:55.689557Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:55.692324Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:55.711679Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:55.711701Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:55.711708Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:55.711825Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17565 TClient is connected to server localhost:17565 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:56.115421Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:58.800612Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013051396845325:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:58.800621Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013051396845344:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:58.800720Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:58.804289Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:34:58.813967Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488013051396845354:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:34:58.907863Z node 2 :TX_PROXY ERROR: Actor# [2:7488013051396845406:2336] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:58.961234Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:34:59.035672Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:35:00.238775Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:01.060334Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488013038511942793:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:01.160904Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:01.995967Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976715666; 2025-03-31T16:35:01.997294Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488013064281756059:2969], Table: `/Root/KV` ([72057594046644480:7:1]), SessionActorId: [2:7488013064281755738:2969]Got LOCKS BROKEN for table `/Root/KV`. ShardID=72075186224037889, Sink=[2:7488013064281756059:2969].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-03-31T16:35:01.997862Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488013064281756052:2969], SessionActorId: [2:7488013064281755738:2969], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[2:7488013064281755738:2969]. isRollback=0 2025-03-31T16:35:01.998127Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODI1N2I5MDAtYjEwN2Y3M2YtZTExMzI4OWYtMTk1Yjk5Y2Q=, ActorId: [2:7488013064281755738:2969], ActorState: ExecuteState, TraceId: 01jqpgwwmnfvddfksjmsqej49a, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7488013064281756053:2969] from: [2:7488013064281756052:2969] 2025-03-31T16:35:01.998210Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7488013064281756053:2969] TxId: 281474976715666. Ctx: { TraceId: 01jqpgwwmnfvddfksjmsqej49a, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODI1N2I5MDAtYjEwN2Y3M2YtZTExMzI4OWYtMTk1Yjk5Y2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-03-31T16:35:01.999296Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODI1N2I5MDAtYjEwN2Y3M2YtZTExMzI4OWYtMTk1Yjk5Y2Q=, ActorId: [2:7488013064281755738:2969], ActorState: ExecuteState, TraceId: 01jqpgwwmnfvddfksjmsqej49a, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpQueryServiceScripts::ExecuteScriptPg [GOOD] >> KqpQueryServiceScripts::CancelScriptExecution [GOOD] >> KqpQueryServiceScripts::EmptyNextFetchToken >> KqpQueryServiceScripts::ExecuteScriptStatsBasic [GOOD] >> KqpQueryServiceScripts::ExecuteScriptStatsFull >> KqpQueryService::ReadManyRanges [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::CreateOrDropTopicOverTable [GOOD] Test command err: Trying to start YDB, gRPC: 16914, MsgBus: 10224 2025-03-31T16:34:46.583492Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012998223917769:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:46.583601Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00149f/r3tmp/tmpTUkp6m/pdisk_1.dat 2025-03-31T16:34:46.943995Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16914, node 1 2025-03-31T16:34:46.980532Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:46.980639Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:46.983059Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:47.032216Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:47.032244Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:47.032260Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:47.032380Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10224 TClient is connected to server localhost:10224 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:47.520794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:47.545502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:34:49.494764Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013011108820323:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:49.494845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013011108820334:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:49.494902Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:49.498211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:34:49.508614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013011108820337:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:34:49.583127Z node 1 :TX_PROXY ERROR: Actor# [1:7488013011108820388:2341] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:49.912773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:1, at schemeshard: 72057594046644480 2025-03-31T16:34:50.197602Z node 1 :TX_PROXY ERROR: Actor# [1:7488013015403787922:2468] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/Yjc5MDE4NzktZjBiZjczZDQtNDhmMjMwYTQtOTA5NTgyMDU=\', error: path is temporary (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:50.208724Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Yjc5MDE4NzktZjBiZjczZDQtNDhmMjMwYTQtOTA5NTgyMDU=, ActorId: [1:7488013011108820302:2329], ActorState: ExecuteState, TraceId: 01jqpgwh5w004dybgr4k353wv8, Create QueryResponse for error on request, msg: 2025-03-31T16:34:50.231539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T16:34:50.236878Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488013015403787952:2367], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:17: Error: At function: KiReadTable!
:3:17: Error: Cannot find table 'db.[/Root/Temp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:34:50.237162Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWViNDM1Yy1hZDE4NGM0ZS1hNWIwYzBhZS0yZTE1MDYzOQ==, ActorId: [1:7488013015403787948:2366], ActorState: ExecuteState, TraceId: 01jqpgwh7c762yjp8ekwd1skcj, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:34:50.238842Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-03-31T16:34:50.239696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710669:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 64371, MsgBus: 2363 2025-03-31T16:34:51.994033Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013020248165759:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:51.994986Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00149f/r3tmp/tmpa7nYYS/pdisk_1.dat 2025-03-31T16:34:52.121207Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:52.141834Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:52.141931Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:52.143288Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64371, node 2 2025-03-31T16:34:52.198819Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:52.198843Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:52.198854Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:52.199008Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2363 TClient is connected to server localhost:2363 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:52.672336Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:52.686098Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:34:55.083417Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013037428035608:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:55.083550Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:55.084105Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013037428035620:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:55.088239Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:34:55.100427Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488013037428035622:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:34:55.191895Z node 2 :TX_PROXY ERROR: Actor# [2:7488013037428035673:2336] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: ... lechecking } 2025-03-31T16:35:06.435955Z node 4 :TX_PROXY ERROR: Actor# [4:7488013088107671836:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:07.460364Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:35:07.559299Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7488013070927800336:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:07.559588Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:07.588102Z node 4 :TX_PROXY ERROR: Actor# [4:7488013092402639545:3717] txid# 281474976715672, issues: { message: "Check failed: path: \'/Root/TmpTable\', error: unexpected path type (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges), expected types: EPathTypePersQueueGroup" severity: 1 } 2025-03-31T16:35:07.588211Z node 4 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976715672, ProxyStatus: ExecError, SchemeShardReason: Check failed: path: '/Root/TmpTable', error: unexpected path type (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges), expected types: EPathTypePersQueueGroup 2025-03-31T16:35:07.588391Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZTgxYjE2YjItNGZkMzRhNi04NGQ3MTAxNS0xMGFmNzhkNg==, ActorId: [4:7488013092402639515:2513], ActorState: ExecuteState, TraceId: 01jqpgx25m4w555etnf3kpxmj5, Create QueryResponse for error on request, msg: Query failed, status: GENERIC_ERROR:
: Error: Scheme operation failed, status: ExecError, reason: Check failed: path: '/Root/TmpTable', error: unexpected path type (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges), expected types: EPathTypePersQueueGroup Scheme entry: { name: .metadata, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1743438906405, tx_id: 281474976715668 } } Scheme entry: { name: BatchUpload, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743438903787, tx_id: 281474976715661 } } Scheme entry: { name: EightShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743438903500, tx_id: 281474976715659 } } Scheme entry: { name: Join1, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743438906328, tx_id: 281474976715666 } } Scheme entry: { name: Join2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743438906370, tx_id: 281474976715667 } } Scheme entry: { name: KeyValue, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743438906167, tx_id: 281474976715662 } } Scheme entry: { name: KeyValue2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743438906202, tx_id: 281474976715663 } } Scheme entry: { name: KeyValueLargePartition, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743438906244, tx_id: 281474976715664 } } Scheme entry: { name: Logs, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743438903605, tx_id: 281474976715660 } } Scheme entry: { name: Test, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743438906279, tx_id: 281474976715665 } } Scheme entry: { name: TmpTable, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743438907532, tx_id: 281474976715671 } } Scheme entry: { name: TwoShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743438903346, tx_id: 281474976715658 } } Scheme entry: { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } } 2025-03-31T16:35:07.627027Z node 4 :TX_PROXY ERROR: Actor# [4:7488013092402639565:3728] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/TmpTable\', error: unexpected path type (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges), expected types: EPathTypePersQueueGroup" severity: 1 } 2025-03-31T16:35:07.627293Z node 4 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976715674, ProxyStatus: ExecError, SchemeShardReason: Check failed: path: '/Root/TmpTable', error: unexpected path type (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges), expected types: EPathTypePersQueueGroup 2025-03-31T16:35:07.627452Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZTgxYjE2YjItNGZkMzRhNi04NGQ3MTAxNS0xMGFmNzhkNg==, ActorId: [4:7488013092402639515:2513], ActorState: ExecuteState, TraceId: 01jqpgx26qdp361mm4y8ze8bvv, Create QueryResponse for error on request, msg: Query failed, status: GENERIC_ERROR:
: Error: Scheme operation failed, status: ExecError, reason: Check failed: path: '/Root/TmpTable', error: unexpected path type (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges), expected types: EPathTypePersQueueGroup Scheme entry: { name: .metadata, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1743438906405, tx_id: 281474976715668 } } Scheme entry: { name: BatchUpload, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743438903787, tx_id: 281474976715661 } } Scheme entry: { name: EightShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743438903500, tx_id: 281474976715659 } } Scheme entry: { name: Join1, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743438906328, tx_id: 281474976715666 } } Scheme entry: { name: Join2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743438906370, tx_id: 281474976715667 } } Scheme entry: { name: KeyValue, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743438906167, tx_id: 281474976715662 } } Scheme entry: { name: KeyValue2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743438906202, tx_id: 281474976715663 } } Scheme entry: { name: KeyValueLargePartition, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743438906244, tx_id: 281474976715664 } } Scheme entry: { name: Logs, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743438903605, tx_id: 281474976715660 } } Scheme entry: { name: Test, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743438906279, tx_id: 281474976715665 } } Scheme entry: { name: TmpTable, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743438907532, tx_id: 281474976715671 } } Scheme entry: { name: TwoShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743438903346, tx_id: 281474976715658 } } Scheme entry: { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } } 2025-03-31T16:35:07.660268Z node 4 :TX_PROXY ERROR: Actor# [4:7488013092402639586:3740] txid# 281474976715676, issues: { message: "Check failed: path: \'/Root/TmpTable\', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:07.660380Z node 4 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976715676, ProxyStatus: ExecError, SchemeShardReason: Check failed: path: '/Root/TmpTable', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges) 2025-03-31T16:35:07.660534Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZTgxYjE2YjItNGZkMzRhNi04NGQ3MTAxNS0xMGFmNzhkNg==, ActorId: [4:7488013092402639515:2513], ActorState: ExecuteState, TraceId: 01jqpgx27w37cypwvnvg9mgfcn, Create QueryResponse for error on request, msg: Query failed, status: GENERIC_ERROR:
: Error: Scheme operation failed, status: ExecError, reason: Check failed: path: '/Root/TmpTable', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges) 2025-03-31T16:35:07.687293Z node 4 :TX_PROXY ERROR: Actor# [4:7488013092402639601:3747] txid# 281474976715678, issues: { message: "Check failed: path: \'/Root/TmpTable\', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:07.687404Z node 4 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976715678, ProxyStatus: ExecError, SchemeShardReason: Check failed: path: '/Root/TmpTable', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges) 2025-03-31T16:35:07.687554Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZTgxYjE2YjItNGZkMzRhNi04NGQ3MTAxNS0xMGFmNzhkNg==, ActorId: [4:7488013092402639515:2513], ActorState: ExecuteState, TraceId: 01jqpgx28q301d101ypewsjek0, Create QueryResponse for error on request, msg: Query failed, status: GENERIC_ERROR:
: Error: Scheme operation failed, status: ExecError, reason: Check failed: path: '/Root/TmpTable', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges) Scheme entry: { name: .metadata, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1743438906405, tx_id: 281474976715668 } } Scheme entry: { name: BatchUpload, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743438903787, tx_id: 281474976715661 } } Scheme entry: { name: EightShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743438903500, tx_id: 281474976715659 } } Scheme entry: { name: Join1, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743438906328, tx_id: 281474976715666 } } Scheme entry: { name: Join2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743438906370, tx_id: 281474976715667 } } Scheme entry: { name: KeyValue, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743438906167, tx_id: 281474976715662 } } Scheme entry: { name: KeyValue2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743438906202, tx_id: 281474976715663 } } Scheme entry: { name: KeyValueLargePartition, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743438906244, tx_id: 281474976715664 } } Scheme entry: { name: Logs, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743438903605, tx_id: 281474976715660 } } Scheme entry: { name: Test, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743438906279, tx_id: 281474976715665 } } Scheme entry: { name: TmpTable, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743438907532, tx_id: 281474976715671 } } Scheme entry: { name: TwoShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743438903346, tx_id: 281474976715658 } } Scheme entry: { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } } 2025-03-31T16:35:07.742265Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 >> PersQueueSdkReadSessionTest::ReadSessionWithClose [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted >> KqpQueryService::TableSink_OlapInsert [GOOD] >> KqpQueryService::TableSink_OlapDelete ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::ExecuteScriptPg [GOOD] Test command err: Trying to start YDB, gRPC: 6088, MsgBus: 1101 2025-03-31T16:34:44.644677Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012991708499248:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:44.645535Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0014a4/r3tmp/tmpfoYTuM/pdisk_1.dat 2025-03-31T16:34:45.073220Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:45.078234Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:45.078367Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:45.081534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6088, node 1 2025-03-31T16:34:45.246198Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:45.246234Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:45.246252Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:45.247303Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1101 TClient is connected to server localhost:1101 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:45.797341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:45.812176Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:34:45.829854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:45.990700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:46.145966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:46.218152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:48.018804Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013008888370215:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:48.018926Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:48.325138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:48.355338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:48.393527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:48.427700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:48.465544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:48.518787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:34:48.569018Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013008888370727:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:48.569124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:48.569169Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013008888370732:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:48.573165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:48.586299Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013008888370734:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:34:48.660763Z node 1 :TX_PROXY ERROR: Actor# [1:7488013008888370787:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:49.550639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:34:49.553071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:34:49.574882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:34:49.643800Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012991708499248:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:49.643938Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:34:51.837226Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438891866, txId: 281474976710701] shutting down Trying to start YDB, gRPC: 6695, MsgBus: 64543 2025-03-31T16:34:52.736249Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013025543551923:2139];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:52.737349Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0014a4/r3tmp/tmpyLz3wG/pdisk_1.dat 2025-03-31T16:34:52.876281Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:52.885274Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:52.885365Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:52.886598Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6695, node 2 2025-03-31T16:34:52.963734Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:52.963760Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:52.963774Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:52.963900Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64543 TClient is connected to server localhost:64543 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:53.451032Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:53 ... h pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:56.574161Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013042723423315:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:56.574177Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:56.577857Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:56.588179Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488013042723423317:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:34:56.646524Z node 2 :TX_PROXY ERROR: Actor# [2:7488013042723423370:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:57.588018Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:34:57.589639Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:34:57.590969Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:34:57.735937Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488013025543551923:2139];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:57.736022Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:34:59.852248Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438899888, txId: 281474976715706] shutting down 2025-03-31T16:35:00.164312Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438900203, txId: 281474976715709] shutting down Trying to start YDB, gRPC: 10710, MsgBus: 25975 2025-03-31T16:35:01.207806Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488013065090080785:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:01.207874Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0014a4/r3tmp/tmpCcYaDg/pdisk_1.dat 2025-03-31T16:35:01.312868Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:01.338342Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:01.338444Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:01.341639Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10710, node 3 2025-03-31T16:35:01.382013Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:01.382035Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:01.382044Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:01.382164Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25975 TClient is connected to server localhost:25975 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:01.846438Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:01.855737Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:01.873587Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:35:01.934748Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:02.114399Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:02.192218Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:05.056442Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013082269951731:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:05.056539Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:05.108793Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:05.145017Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:05.183271Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:05.253492Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:05.290451Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:05.366341Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:05.421512Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013082269952250:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:05.421610Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013082269952255:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:05.421614Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:05.425201Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:05.435437Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488013082269952257:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:35:05.506436Z node 3 :TX_PROXY ERROR: Actor# [3:7488013082269952310:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:06.204387Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488013065090080785:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:06.204468Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:06.471924Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:35:06.474270Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:35:06.476298Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:35:08.955393Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438908988, txId: 281474976715699] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictWriteOlap [FAIL] Test command err: Trying to start YDB, gRPC: 11645, MsgBus: 12097 2025-03-31T16:34:38.757905Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012964845565852:2175];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:38.760475Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00125b/r3tmp/tmp954FYP/pdisk_1.dat 2025-03-31T16:34:39.129790Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11645, node 1 2025-03-31T16:34:39.187185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:39.187338Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:39.192073Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:39.211302Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:39.211325Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:39.211336Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:39.211454Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12097 TClient is connected to server localhost:12097 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:39.749724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:41.775798Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012977730468279:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:41.776011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012977730468266:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:41.776182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:41.780164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:34:41.795757Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012977730468295:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:34:41.859701Z node 1 :TX_PROXY ERROR: Actor# [1:7488012977730468346:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:42.215333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:34:42.341982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:34:43.328283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:44.029788Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012964845565852:2175];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:44.066923Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:34:44.646427Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODY2MjhlMzMtZDhiZDExZGItYmVmNzE5MTEtNmMxZjBmNDE=, ActorId: [1:7488012990615378722:2969], ActorState: ExecuteState, TraceId: 01jqpgwbnmfhadgda9d1c4kr49, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76: DoExecute @ 0x18DC7347 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:105: Execute_ @ 0x18DB81F2 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7FD53D694D8F 18. ??:0: ?? @ 0x7FD53D694E3F 19. ??:0: ?? @ 0x163B4028 Trying to start YDB, gRPC: 13714, MsgBus: 10435 2025-03-31T16:34:50.089467Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013017382062760:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:50.089515Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00125b/r3tmp/tmpfs5nm7/pdisk_1.dat 2025-03-31T16:34:50.210554Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:50.222003Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected TServer::EnableGrpc on GrpcPort 13714, node 2 2025-03-31T16:34:50.222105Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:50.223477Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:50.252178Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:50.252198Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:50.252206Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:50.252319Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10435 TClient is connected to server localhost:10435 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:50.751353Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:50.760731Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:34:53.350795Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013030266965303:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:53.350946Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013030266965295:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default ... Step;tablet_id=72075186224037922;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:06.092637Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037916;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:06.092718Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037972;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:06.092797Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037962;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:06.092960Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=281474976715669;tx_id=281474976715669;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715669; 2025-03-31T16:35:06.093181Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=281474976715669;tx_id=281474976715669;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715669; 2025-03-31T16:35:06.093195Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=281474976715669;tx_id=281474976715669;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715669; 2025-03-31T16:35:06.093312Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037940;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:06.093382Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037990;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:06.093480Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037987;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:06.093555Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037966;tx_state=TTxProgressTx::Execute;tx_current=281474976715669;tx_id=281474976715669;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715669; 2025-03-31T16:35:06.093740Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037948;tx_state=TTxProgressTx::Execute;tx_current=281474976715669;tx_id=281474976715669;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715669; 2025-03-31T16:35:06.093915Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037944;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:06.094392Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037938;tx_state=TTxProgressTx::Execute;tx_current=281474976715669;tx_id=281474976715669;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715669; 2025-03-31T16:35:06.094414Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037965;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:06.094582Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:06.094694Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[2:7488013038856901870:2584];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037903;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:06.094892Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[2:7488013038856901870:2584];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037903;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:06.095333Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037968;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:06.095381Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037912;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:06.095445Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037957;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:06.095805Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037938;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:06.095991Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=281474976715669;tx_id=281474976715669;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715669; 2025-03-31T16:35:06.096001Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=281474976715669;tx_id=281474976715669;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715669; 2025-03-31T16:35:06.096132Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037966;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:06.096136Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037949;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:06.096298Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037950;tx_state=TTxProgressTx::Execute;tx_current=281474976715669;tx_id=281474976715669;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715669; 2025-03-31T16:35:06.096300Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=281474976715669;tx_id=281474976715669;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715669; 2025-03-31T16:35:06.096426Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:06.096479Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037918;tx_state=TTxProgressTx::Execute;tx_current=281474976715669;tx_id=281474976715669;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715669; 2025-03-31T16:35:06.096518Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037978;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:06.096595Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037948;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:06.096601Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:06.096740Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=281474976715669;tx_id=281474976715669;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715669; 2025-03-31T16:35:06.098115Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:06.098212Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:06.098223Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:06.098302Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:06.098313Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037918;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:06.098387Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:06.098473Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;self_id=[2:7488013038856901872:2585];ev=NActors::TEvents::TEvWakeup;fline=sync.h:19;event=tx_timeout;lock=281474976715665;tx_id=281474976715669;d=2.003411s; 2025-03-31T16:35:06.098473Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037939;self_id=[2:7488013034561934240:2514];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037939;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:06.098588Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;self_id=[2:7488013038856901872:2585];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037914;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:06.099052Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037939;self_id=[2:7488013034561934240:2514];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037939;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:06.099243Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;self_id=[2:7488013038856901872:2585];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037914;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:06.102451Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=281474976715669;tx_id=281474976715669;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715669; 2025-03-31T16:35:06.104436Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:92, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::ABORTED) failed: (SUCCESS != ABORTED) , with diff: (SUCC|ABORT)E(SS|D) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:92: DoExecute @ 0x18DC9C58 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:118: Execute_ @ 0x18DB864A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7FD53D694D8F 18. ??:0: ?? @ 0x7FD53D694E3F 19. ??:0: ?? @ 0x163B4028 >> KqpQueryService::StreamExecuteQueryPure ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ReadManyRanges [GOOD] Test command err: Trying to start YDB, gRPC: 5792, MsgBus: 6410 2025-03-31T16:34:40.321334Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012975241370050:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:40.321467Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0014ba/r3tmp/tmpwbQDYq/pdisk_1.dat 2025-03-31T16:34:40.791582Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5792, node 1 2025-03-31T16:34:40.800474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:40.800560Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:40.804200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:40.805294Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:34:40.805410Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:34:40.835302Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:40.835334Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:40.835341Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:40.835508Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6410 TClient is connected to server localhost:6410 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:41.418473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:41.443953Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:34:41.456257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:41.645494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:41.826462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:41.911737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:43.631839Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012988126273710:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:43.631953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:44.042465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:44.083461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:44.122447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:44.195794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:44.229725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:44.266490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:34:44.337495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012992421241521:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:44.337562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:44.337632Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012992421241526:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:44.341042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:44.351946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012992421241528:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:34:44.454596Z node 1 :TX_PROXY ERROR: Actor# [1:7488012992421241586:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:45.322520Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012975241370050:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:45.322595Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:34:55.768585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:34:55.768616Z node 1 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 14424, MsgBus: 9059 2025-03-31T16:34:57.159394Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013048262258089:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:57.159492Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0014ba/r3tmp/tmprxklmw/pdisk_1.dat 2025-03-31T16:34:57.289044Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:57.311158Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:57.311264Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 14424, node 2 2025-03-31T16:34:57.313004Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:57.383710Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:57.383734Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:57.383742Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:57.383886Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9059 TClient is connected to server localhost:9059 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:57.887327Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:57.898538Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:35:00.446852Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013061147160623:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:00.446965Z node 2 :KQP_WORKLOAD_SERVICE WARN: ... : tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:35:01.082031Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:35:01.082080Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:35:01.082115Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:35:01.082701Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:35:01.082761Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:35:01.082991Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:35:01.083029Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:35:01.083209Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:35:01.083236Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:35:01.083445Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:35:01.083487Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:35:01.083639Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:35:01.083668Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:35:01.129007Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:35:01.129401Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:35:01.134528Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:35:01.140018Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:35:01.142545Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:35:01.145174Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:35:01.151250Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:35:01.151505Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037898;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:35:01.158236Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:35:01.158265Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-31T16:35:01.377213Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037898;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715664; 2025-03-31T16:35:02.159425Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488013048262258089:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:02.159492Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 26974, MsgBus: 5485 2025-03-31T16:35:03.972596Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488013071468015437:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:03.972656Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0014ba/r3tmp/tmpUxglfx/pdisk_1.dat 2025-03-31T16:35:04.121960Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:04.133354Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:04.133424Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:04.135602Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26974, node 3 2025-03-31T16:35:04.187741Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:04.187773Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:04.187782Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:04.187920Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5485 TClient is connected to server localhost:5485 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:04.710692Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:04.720168Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:35:07.534807Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013088647885276:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:07.534905Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:07.572461Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:35:07.787400Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013088647885832:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:07.787500Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:07.787785Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013088647885837:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:07.792267Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:35:07.807287Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488013088647885839:2381], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:35:07.881404Z node 3 :TX_PROXY ERROR: Actor# [3:7488013088647885890:2692] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:08.973142Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488013071468015437:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:08.973224Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryServiceScripts::ValidateScript >> KqpQueryService::ShowCreateTableDisable [GOOD] >> KqpQueryService::ShowCreateTableNotSuccess >> KqpQueryService::ShowCreateSysView >> KqpQueryService::PeriodicTaskInSessionPool >> KqpSinkTx::Interactive [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldHandleCompactionTimeouts [GOOD] >> KqpQueryServiceScripts::ForgetScriptExecutionOnLongQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldHandleCompactionTimeouts [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:33:21.624685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:33:21.624778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:21.624825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:33:21.624856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:33:21.625558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:33:21.625605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:33:21.625680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:21.625768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:33:21.627351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:33:21.711971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:33:21.712055Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:21.717718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:33:21.717801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:33:21.717984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:33:21.725470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:33:21.726476Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:33:21.731515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:21.732562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:33:21.738473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:21.747580Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:21.747648Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:21.747791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:33:21.747839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:21.747872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:33:21.748107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:33:21.755522Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:33:21.879352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:33:21.879594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:21.879840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:33:21.880121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:33:21.880199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:21.882673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:21.882839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:33:21.883061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:21.883182Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:33:21.883221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:33:21.883252Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:33:21.885353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:21.885422Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:33:21.885462Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:33:21.887453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:21.887517Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:21.887578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:21.887638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:33:21.897191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:33:21.899160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:33:21.899423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:33:21.900640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:21.900782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:21.900832Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:21.901148Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:33:21.901201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:21.901383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:33:21.901456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:33:21.903861Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:21.903915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:21.904104Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:21.904144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:33:21.904380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:21.904427Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:33:21.904541Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:21.904575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:21.904612Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:21.904642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:21.904760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:33:21.904858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:21.904903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:33:21.904931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:33:21.904994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:33:21.905030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:33:21.905057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:33:21.907023Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:21.907192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:21.907233Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... EvMeasureSelfResponseTime 2025-03-31T16:35:09.823456Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:35:10.229770Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:125:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:35:10.229857Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:35:10.229955Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:125:2151], Recipient [3:125:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:35:10.229989Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:35:10.644883Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:125:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:35:10.644973Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:35:10.645444Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:125:2151], Recipient [3:125:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:35:10.645484Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:35:10.722113Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:309:2296]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-03-31T16:35:10.722191Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-03-31T16:35:10.722278Z node 3 :TX_DATASHARD TRACE: No cleanup at 72075186233409546 outdated step 5000002 last cleanup 0 2025-03-31T16:35:10.722339Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186233409546 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:35:10.722369Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186233409546 2025-03-31T16:35:10.722401Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186233409546 has no attached operations 2025-03-31T16:35:10.722429Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186233409546 2025-03-31T16:35:10.722641Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:309:2296]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-03-31T16:35:10.722951Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409546, FollowerId 0, tableId 2 2025-03-31T16:35:10.723367Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:309:2296], Recipient [3:125:2151]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409546 TableLocalId: 2 Generation: 2 Round: 7 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 80 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 146 Memory: 124232 Storage: 14156 GroupWriteThroughput { GroupID: 0 Channel: 0 Throughput: 263 } GroupWriteThroughput { GroupID: 0 Channel: 1 Throughput: 444 } GroupWriteIops { GroupID: 0 Channel: 0 Iops: 1 } } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 40 TableOwnerId: 72057594046678944 FollowerId: 2025-03-31T16:35:10.723412Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-03-31T16:35:10.723460Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0146 2025-03-31T16:35:10.723691Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 80 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-03-31T16:35:10.723731Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-03-31T16:35:10.725898Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435080, Sender [3:1060:3004], Recipient [3:309:2296]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvAsyncTableStats 2025-03-31T16:35:10.768284Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [3:125:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-31T16:35:10.768355Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-31T16:35:10.768389Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2025-03-31T16:35:10.768455Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-03-31T16:35:10.768487Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-03-31T16:35:10.768606Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 13940 row count 100 2025-03-31T16:35:10.768668Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0 2025-03-31T16:35:10.768707Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 100, DataSize 13940 2025-03-31T16:35:10.768771Z node 3 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:01:20.000000Z at schemeshard 72057594046678944 2025-03-31T16:35:10.768905Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:35:10.779517Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [3:125:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-31T16:35:10.779598Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-31T16:35:10.779630Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-03-31T16:35:11.093850Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:125:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:35:11.093930Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:35:11.094011Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:125:2151], Recipient [3:125:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:35:11.094057Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:35:11.459511Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:125:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:35:11.459587Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:35:11.459690Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:125:2151], Recipient [3:125:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:35:11.459723Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:35:11.828384Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:125:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:35:11.828491Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:35:11.828631Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:125:2151], Recipient [3:125:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:35:11.828670Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:35:12.208619Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:125:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:35:12.208706Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:35:12.208813Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:125:2151], Recipient [3:125:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:35:12.208848Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:35:12.572144Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:125:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:35:12.572223Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:35:12.572309Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:125:2151], Recipient [3:125:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:35:12.572340Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:35:12.604185Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:309:2296]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-03-31T16:35:12.935423Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:125:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:35:12.935502Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:35:12.935596Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:125:2151], Recipient [3:125:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:35:12.935633Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime >> KqpQueryService::TableSink_OlapRWQueries [GOOD] >> KqpQueryService::Ddl [GOOD] >> KqpQueryService::DdlColumnTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::Interactive [GOOD] Test command err: Trying to start YDB, gRPC: 22768, MsgBus: 17499 2025-03-31T16:34:48.959396Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013006875097576:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:48.959458Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00124d/r3tmp/tmpyS6jp8/pdisk_1.dat 2025-03-31T16:34:49.412424Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:49.417603Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:49.417719Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 22768, node 1 2025-03-31T16:34:49.423376Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:49.506743Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:49.506773Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:49.506780Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:49.506909Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17499 TClient is connected to server localhost:17499 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:50.017584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:50.037209Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:34:52.022933Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013024054967398:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:52.023060Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013024054967430:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:52.023161Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:52.027545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:34:52.040337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013024054967436:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:34:52.141201Z node 1 :TX_PROXY ERROR: Actor# [1:7488013024054967487:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:52.441200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:34:52.546064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:34:53.587578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:54.308970Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488013006875097576:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:54.364924Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:34:55.135047Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzE5MzFiYzAtOWI2NDUxNjctZmRjZmIwMzItMTA0Zjc5MmI=, ActorId: [1:7488013032644910388:2969], ActorState: ReadyState, TraceId: 01jqpgwp04fmx1ef84231nfdny, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 21158, MsgBus: 1803 2025-03-31T16:35:01.185753Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013065436141998:2139];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:01.197291Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00124d/r3tmp/tmpkSAsFM/pdisk_1.dat 2025-03-31T16:35:01.293220Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:01.326258Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:01.326724Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 21158, node 2 2025-03-31T16:35:01.328795Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:01.385814Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:01.385840Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:01.385866Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:01.385988Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1803 TClient is connected to server localhost:1803 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:01.832181Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:01.838299Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:35:04.325552Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013078321044459:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:04.325694Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:04.326599Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013078321044471:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:04.330071Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:35:04.339143Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488013078321044473:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:35:04.410835Z node 2 :TX_PROXY ERROR: Actor# [2:7488013078321044524:2334] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:04.489654Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:35:04.565117Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:35:05.660672Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:06.455239Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488013065436141998:2139];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:06.484462Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpQueryService::DdlWithExplicitTransaction [GOOD] >> KqpQueryService::Ddl_Dml >> KqpQueryService::Followers [GOOD] >> KqpQueryService::IssuesInCaseOfSuccess ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_OlapRWQueries [GOOD] Test command err: Trying to start YDB, gRPC: 29519, MsgBus: 19132 2025-03-31T16:34:29.689649Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012924947799087:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:29.689712Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0014da/r3tmp/tmpLO3rVm/pdisk_1.dat 2025-03-31T16:34:30.294035Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:30.294888Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:30.295036Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:30.301429Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29519, node 1 2025-03-31T16:34:30.470316Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:30.470340Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:30.470352Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:30.470510Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19132 TClient is connected to server localhost:19132 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:31.318719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:33.013236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012942127668933:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:33.013351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:33.618594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:34:33.787663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488012942127669093:2338];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:34:33.787663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488012942127669099:2341];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:34:33.788047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488012942127669093:2338];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:34:33.788340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488012942127669093:2338];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:34:33.788514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488012942127669093:2338];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:34:33.788531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488012942127669099:2341];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:34:33.788691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488012942127669093:2338];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:34:33.788787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488012942127669099:2341];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:34:33.788836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488012942127669093:2338];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:34:33.788937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488012942127669099:2341];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:34:33.789018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488012942127669093:2338];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:34:33.789135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488012942127669099:2341];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:34:33.789194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488012942127669093:2338];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:34:33.789254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488012942127669099:2341];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:34:33.789370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488012942127669093:2338];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:34:33.789375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488012942127669099:2341];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:34:33.789487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488012942127669099:2341];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:34:33.789497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488012942127669093:2338];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:34:33.789613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488012942127669099:2341];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:34:33.789645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488012942127669093:2338];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:34:33.789742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488012942127669099:2341];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:34:33.789754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488012942127669093:2338];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:34:33.789913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488012942127669099:2341];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:34:33.790041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488012942127669099:2341];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:34:33.829363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488012942127669088:2335];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:34:33.829451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488012942127669088:2335];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:34:33.829689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488012942127669088:2335];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:34:33.829796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488012942127669088:2335];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:34:33.829967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488012942127669088:2335];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:34:33.830090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488012942127669088:2335];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:34:33.830240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488012942127669088:2335];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11; ... E=RestorePortionFromChunks; 2025-03-31T16:35:12.335064Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:35:12.335130Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:35:12.335148Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7488013109628074865:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:35:12.335309Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7488013109628074865:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:35:12.335404Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:35:12.335442Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:35:12.335478Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7488013109628074865:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:35:12.335606Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:35:12.335635Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7488013109628074865:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:35:12.335646Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:35:12.335889Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:35:12.335917Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:35:12.336027Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:35:12.336056Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:35:12.339524Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:35:12.339602Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:35:12.339709Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:35:12.339757Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:35:12.339939Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:35:12.339984Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:35:12.340075Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:35:12.340116Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:35:12.340182Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:35:12.340219Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:35:12.340293Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:35:12.340345Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:35:12.341066Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:35:12.341121Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:35:12.341366Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:35:12.341435Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:35:12.341631Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:35:12.341701Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:35:12.341934Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:35:12.341978Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:35:12.342087Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:35:12.342129Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:35:12.357083Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:35:12.357152Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:35:12.363127Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:35:12.377175Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013109628074969:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:12.377259Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:12.377378Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013109628074974:2361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:12.381023Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:35:12.392467Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488013109628074976:2362], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:35:12.472725Z node 3 :TX_PROXY ERROR: Actor# [3:7488013109628075029:2437] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:13.434649Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488013092448204942:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:13.434723Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:13.569636Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=281474976710665;tx_id=281474976710665;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710665; 2025-03-31T16:35:13.570225Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=281474976710665;tx_id=281474976710665;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710665; 2025-03-31T16:35:13.570563Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;self_id=[3:7488013109628074865:2337];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037888;local_tx_no=13;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037889;receive=72075186224037890; 2025-03-31T16:35:13.571001Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=281474976710665;tx_id=281474976710665;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710665; >> KqpQueryService::ExecuteQueryWithWorkloadManager >> KqpQueryService::TableSink_Htap-withOltpSink [GOOD] >> KqpQueryService::TableSink_DisableSink >> KqpQueryService::AlterTempTable >> KqpQueryService::TableSink_OlapUpsert [GOOD] >> KqpQueryService::TableSink_OltpDelete >> KqpQueryServiceScripts::ExecuteScriptWithTimeout [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithResultsTtlAndForgetAfter >> KqpQueryServiceScripts::ExecuteScriptStatsFull [GOOD] >> KqpQueryServiceScripts::ExecuteScriptStatsNone >> KqpQueryService::StreamExecuteQueryPure [GOOD] >> KqpQueryService::StreamExecuteQueryMultiResult >> BasicUsage::TWriteSession_WriteEncoded [GOOD] >> CompressExecutor::TestReorderedExecutor >> KqpQueryServiceScripts::ValidateScript [GOOD] >> KqpQueryServiceScripts::TestTruncatedByRows >> KqpQueryService::TempTablesDrop >> KqpExtractPredicateLookup::SqlInJoin [GOOD] >> KqpKv::BulkUpsert >> KqpQueryServiceScripts::EmptyNextFetchToken [GOOD] >> KqpQueryService::ShowCreateTableNotSuccess [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithForgetAfter [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithResultsTtl >> KqpQueryService::ShowCreateSysView [GOOD] >> KqpQueryService::SeveralCTAS+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::EmptyNextFetchToken [GOOD] Test command err: Trying to start YDB, gRPC: 22525, MsgBus: 19382 2025-03-31T16:34:56.402684Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013044096087270:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:56.402735Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00147f/r3tmp/tmp5AO4cC/pdisk_1.dat 2025-03-31T16:34:56.792576Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22525, node 1 2025-03-31T16:34:56.835498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:56.835603Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:56.843791Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:56.904008Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:56.904052Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:56.904066Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:56.904195Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19382 TClient is connected to server localhost:19382 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:57.455027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:57.494023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:57.678478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:34:57.863442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:34:57.930834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:59.565290Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013056980990949:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:59.565390Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:59.882534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:59.913961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:59.952112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:00.025518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:00.063978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:00.138965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:00.183138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013061275958763:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:00.183197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:00.183367Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013061275958768:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:00.187371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:00.200811Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013061275958770:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:35:00.283650Z node 1 :TX_PROXY ERROR: Actor# [1:7488013061275958824:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:01.403307Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488013044096087270:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:01.403383Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 29128, MsgBus: 9550 2025-03-31T16:35:02.114766Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013068464260281:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:02.115137Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00147f/r3tmp/tmpTW9CX8/pdisk_1.dat 2025-03-31T16:35:02.207191Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29128, node 2 2025-03-31T16:35:02.242173Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:02.242259Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:02.243926Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:02.259057Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:02.259095Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:02.259103Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:02.259233Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9550 TClient is connected to server localhost:9550 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:02.729910Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:02.735848Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:02.744792Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:35:02.804074Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:02.951964Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:03.016518Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreate ... suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:05.365720Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:05.444083Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013081349164473:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:05.444200Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:05.444308Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013081349164478:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:05.447859Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:05.462501Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488013081349164480:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:35:05.549448Z node 2 :TX_PROXY ERROR: Actor# [2:7488013081349164536:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:06.324560Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:35:06.326243Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:35:06.327977Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:35:07.200857Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488013068464260281:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:07.201181Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 18289, MsgBus: 7296 2025-03-31T16:35:09.852180Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488013099677867690:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:09.852239Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00147f/r3tmp/tmp32ak1A/pdisk_1.dat 2025-03-31T16:35:09.978302Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:10.004328Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:10.004426Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:10.005770Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18289, node 3 2025-03-31T16:35:10.111695Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:10.111721Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:10.111728Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:10.111851Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7296 TClient is connected to server localhost:7296 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:10.587774Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:10.611917Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:10.680319Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:10.843359Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:10.920518Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:13.527346Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013116857738644:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:13.527453Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:13.584287Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:13.637603Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:13.671821Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:13.703998Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:13.737938Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:13.777009Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:13.862880Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013116857739159:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:13.862985Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:13.863758Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013116857739164:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:13.868031Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:13.877230Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488013116857739166:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:35:13.957761Z node 3 :TX_PROXY ERROR: Actor# [3:7488013116857739223:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:14.787037Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:35:14.789383Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:35:14.790883Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:35:14.854152Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488013099677867690:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:14.854214Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:17.281040Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438917311, txId: 281474976715704] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ShowCreateTableNotSuccess [GOOD] Test command err: Trying to start YDB, gRPC: 19083, MsgBus: 21370 2025-03-31T16:35:01.409169Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013064121729751:2195];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:01.409208Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001466/r3tmp/tmpkYNwyC/pdisk_1.dat 2025-03-31T16:35:01.781673Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:01.797703Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:01.797787Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:01.800868Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19083, node 1 2025-03-31T16:35:01.883707Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:01.883726Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:01.883738Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:01.883892Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21370 TClient is connected to server localhost:21370 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:02.423707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:02.443793Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:35:02.454358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:02.600911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:02.747481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:02.819265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:04.621943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013077006633268:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:04.622046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:04.948044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:05.017354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:05.056876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:05.092780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:05.125804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:05.199621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:05.281689Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013081301601086:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:05.281787Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:05.282017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013081301601091:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:05.285441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:05.296428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013081301601093:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:35:05.393111Z node 1 :TX_PROXY ERROR: Actor# [1:7488013081301601149:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:06.276748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:35:06.410201Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488013064121729751:2195];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:06.440390Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 21521, MsgBus: 4798 2025-03-31T16:35:07.249212Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013088622276762:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:07.249244Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001466/r3tmp/tmpovoUy4/pdisk_1.dat 2025-03-31T16:35:07.334423Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21521, node 2 2025-03-31T16:35:07.385539Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:07.385628Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:07.387811Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:07.430744Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:07.430763Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:07.430771Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:07.430873Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4798 TClient is connected to server localhost:4798 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:07.840305Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:07.847200Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:35:07.853936Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:07.938299Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31 ... ve access permissions } 2025-03-31T16:35:10.581660Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:10.581891Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013101507180944:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:10.585538Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:10.599933Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488013101507180946:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:35:10.654524Z node 2 :TX_PROXY ERROR: Actor# [2:7488013101507180999:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:11.609601Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:35:11.664899Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488013105802148676:2510], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:35: Error: At function: KiReadTable!
:2:35: Error: SHOW CREATE statement is not supported 2025-03-31T16:35:11.665210Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2RjMjFiNTctOWIxZjBjMGMtOGRhYTYxNDYtNTA0MTExMGE=, ActorId: [2:7488013105802148590:2496], ActorState: ExecuteState, TraceId: 01jqpgx650ddg3gwbwth3z3s5r, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 16505, MsgBus: 25940 2025-03-31T16:35:12.471297Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488013112772056453:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:12.471392Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001466/r3tmp/tmpBa8rdB/pdisk_1.dat 2025-03-31T16:35:12.603343Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:12.603534Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:12.603621Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:12.620108Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16505, node 3 2025-03-31T16:35:12.673484Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:12.673509Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:12.673517Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:12.673641Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25940 TClient is connected to server localhost:25940 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:13.121164Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:13.129174Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:35:13.141223Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:13.209458Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:13.380835Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:13.456844Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:15.996601Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013125656960121:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:15.997074Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:16.044777Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:16.084381Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:16.167960Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:16.209319Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:16.243692Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:16.322804Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:16.381912Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013129951927935:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:16.381983Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:16.382171Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013129951927940:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:16.385322Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:16.395457Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488013129951927942:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:35:16.474085Z node 3 :TX_PROXY ERROR: Actor# [3:7488013129951927995:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:17.433500Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7488013134246895597:2502], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:35: Error: At function: KiReadTable!
:2:35: Error: Cannot find table 'db.[/Root/test_show_create]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:35:17.433715Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YTFhZmQ0MmUtMzI4MTkyMTItMzkzNGI4MzctNmI0ZTI2Mzc=, ActorId: [3:7488013134246895588:2496], ActorState: ExecuteState, TraceId: 01jqpgxbsb63y24vbbv9tb5zxn, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:35:17.451201Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7488013134246895610:2505], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:35: Error: At function: KiReadTable!
:2:35: Error: Cannot find table 'db.[/Root/.sys/show_create]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:35:17.452671Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YTFhZmQ0MmUtMzI4MTkyMTItMzkzNGI4MzctNmI0ZTI2Mzc=, ActorId: [3:7488013134246895588:2496], ActorState: ExecuteState, TraceId: 01jqpgxbsz0xgzy73nz8nhv5hn, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:35:17.471635Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488013112772056453:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:17.471711Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSinkLocks::EmptyRangeAlreadyBrokenOlap [GOOD] >> KqpSinkLocks::OlapUncommittedRead [GOOD] >> KqpQueryServiceScripts::ForgetScriptExecutionOnLongQuery [GOOD] >> KqpQueryServiceScripts::ForgetScriptExecutionRace >> KqpQueryService::DdlGroup >> KqpQueryService::TableSink_OltpUpsert ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::EmptyRangeAlreadyBrokenOlap [GOOD] Test command err: Trying to start YDB, gRPC: 2859, MsgBus: 13923 2025-03-31T16:34:40.764684Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012974601058080:2207];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:40.770787Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001258/r3tmp/tmpy8jEYl/pdisk_1.dat 2025-03-31T16:34:41.132212Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2859, node 1 2025-03-31T16:34:41.196681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:41.196788Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:41.201182Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:41.235975Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:41.235998Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:41.236006Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:41.236116Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13923 TClient is connected to server localhost:13923 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:41.904315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:43.739910Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012987485960449:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:43.740046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012987485960475:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:43.740095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:43.745317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:34:43.760172Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012987485960478:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:34:43.848599Z node 1 :TX_PROXY ERROR: Actor# [1:7488012987485960531:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:44.183215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:34:44.377499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488012991780928007:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:34:44.377812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488012991780928007:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:34:44.378111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488012991780928007:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:34:44.378256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488012991780928007:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:34:44.378371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488012991780928007:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:34:44.378488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488012991780928007:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:34:44.381944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488012991780928024:2348];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:34:44.382007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488012991780928024:2348];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:34:44.382215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488012991780928024:2348];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:34:44.382508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488012991780928024:2348];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:34:44.382612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488012991780928024:2348];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:34:44.382732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488012991780928024:2348];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:34:44.382969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488012991780928007:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:34:44.383161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488012991780928007:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:34:44.383282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488012991780928007:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:34:44.383419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488012991780928007:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:34:44.383542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488012991780928007:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:34:44.383644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488012991780928007:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:34:44.386834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488012991780928024:2348];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:34:44.387035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488012991780928024:2348];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:34:44.387196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488012991780928024:2348];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:34:44.387320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488012991780928024:2348];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:34:44.387442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488012991780928024:2348];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:34:44.387589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488012991780928024:2348];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:34:44.419807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488012991780928013:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:34:44.419897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488012991780928013:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:34:44.420145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_ ... ent=skip_indexation;reason=disabled; 2025-03-31T16:35:12.915498Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038057;self_id=[2:7488013094045527446:3382];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038057;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:12.924973Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[2:7488013068275717078:2351];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:12.925296Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[2:7488013068275717078:2351];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037896;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:12.928705Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037891;self_id=[2:7488013068275717070:2348];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037891;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=tablet lock have another internal generation counter: 18446744073709551615 != 0;tx_id=0; 2025-03-31T16:35:12.929767Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488013111225400142:3887], SessionActorId: [2:7488013106930431852:3887], Got LOCKS BROKEN for table. ShardID=72075186224037891, Sink=[2:7488013111225400142:3887].{
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 } 2025-03-31T16:35:12.929891Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488013111225400142:3887], SessionActorId: [2:7488013106930431852:3887], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 . sessionActorId=[2:7488013106930431852:3887]. isRollback=0 2025-03-31T16:35:12.930041Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038060;self_id=[2:7488013094045527521:3421];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038060;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:12.930189Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=M2FhYjczNWEtNTA2MzhlMTEtNGUyYTA1Zi01ODdhYjkyMg==, ActorId: [2:7488013106930431852:3887], ActorState: ExecuteState, TraceId: 01jqpgx6ydfqhxhhgybk5c40wt, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7488013111225400337:3887] from: [2:7488013111225400142:3887] 2025-03-31T16:35:12.930250Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7488013111225400337:3887] TxId: 281474976715671. Ctx: { TraceId: 01jqpgx6ydfqhxhhgybk5c40wt, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2FhYjczNWEtNTA2MzhlMTEtNGUyYTA1Zi01ODdhYjkyMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 } } 2025-03-31T16:35:12.930365Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038060;self_id=[2:7488013094045527521:3421];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038060;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:12.930399Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=M2FhYjczNWEtNTA2MzhlMTEtNGUyYTA1Zi01ODdhYjkyMg==, ActorId: [2:7488013106930431852:3887], ActorState: ExecuteState, TraceId: 01jqpgx6ydfqhxhhgybk5c40wt, Create QueryResponse for error on request, msg: 2025-03-31T16:35:12.930582Z node 2 :TX_COLUMNSHARD WARN: TColumnShard.StateWork at 72075186224037895 unhandled event type: NKikimr::TEvDataShard::TEvCancelTransactionProposal event: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715671 2025-03-31T16:35:12.930662Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037895;self_id=[2:7488013068275717072:2349];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037895;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:35:12.930871Z node 2 :TX_COLUMNSHARD WARN: TColumnShard.StateWork at 72075186224037896 unhandled event type: NKikimr::TEvDataShard::TEvCancelTransactionProposal event: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715671 2025-03-31T16:35:12.930936Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037896;self_id=[2:7488013068275717078:2351];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037896;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:35:12.931109Z node 2 :TX_COLUMNSHARD WARN: TColumnShard.StateWork at 72075186224037897 unhandled event type: NKikimr::TEvDataShard::TEvCancelTransactionProposal event: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715671 2025-03-31T16:35:12.931165Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037897;self_id=[2:7488013068275717075:2350];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037897;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:35:12.931282Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037891;self_id=[2:7488013068275717070:2348];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037891;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:35:12.931439Z node 2 :TX_COLUMNSHARD WARN: TColumnShard.StateWork at 72075186224037893 unhandled event type: NKikimr::TEvDataShard::TEvCancelTransactionProposal event: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715671 2025-03-31T16:35:12.931516Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037893;self_id=[2:7488013072570684485:2353];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037893;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0;
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 2025-03-31T16:35:12.931781Z node 2 :TX_COLUMNSHARD WARN: TColumnShard.StateWork at 72075186224037888 unhandled event type: NKikimr::TEvDataShard::TEvCancelTransactionProposal event: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715671 2025-03-31T16:35:12.931829Z node 2 :TX_COLUMNSHARD WARN: TColumnShard.StateWork at 72075186224037889 unhandled event type: NKikimr::TEvDataShard::TEvCancelTransactionProposal event: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715671 2025-03-31T16:35:12.931845Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037888;self_id=[2:7488013068275717058:2344];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037888;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:35:12.931894Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037889;self_id=[2:7488013068275717062:2346];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037889;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:35:12.932057Z node 2 :TX_COLUMNSHARD WARN: TColumnShard.StateWork at 72075186224037894 unhandled event type: NKikimr::TEvDataShard::TEvCancelTransactionProposal event: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715671 2025-03-31T16:35:12.932081Z node 2 :TX_COLUMNSHARD WARN: TColumnShard.StateWork at 72075186224037890 unhandled event type: NKikimr::TEvDataShard::TEvCancelTransactionProposal event: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715671 2025-03-31T16:35:12.932106Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037894;self_id=[2:7488013068275717081:2352];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037894;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:35:12.932135Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037890;self_id=[2:7488013068275717064:2347];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037890;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:35:12.932325Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038065;self_id=[2:7488013094045527261:3323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038065;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:12.932332Z node 2 :TX_COLUMNSHARD WARN: TColumnShard.StateWork at 72075186224037892 unhandled event type: NKikimr::TEvDataShard::TEvCancelTransactionProposal event: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715671 2025-03-31T16:35:12.932394Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037892;self_id=[2:7488013068275717060:2345];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037892;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:35:12.932629Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038065;self_id=[2:7488013094045527261:3323];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038065;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:12.939332Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038066;self_id=[2:7488013094045527583:3425];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038066;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:12.939593Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038066;self_id=[2:7488013094045527583:3425];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038066;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:13.026912Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037991;self_id=[2:7488013076865652908:2448];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037991;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:13.027359Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037991;self_id=[2:7488013076865652908:2448];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037991;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:13.286226Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;self_id=[2:7488013072570685379:2418];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037914;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:13.288124Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;self_id=[2:7488013072570685379:2418];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037914;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-03-31T16:35:14.542392Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:35:14.542438Z node 2 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpQueryService::AlterTempTable [GOOD] >> KqpQueryService::CTASWithoutPerStatement ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::OlapUncommittedRead [GOOD] Test command err: Trying to start YDB, gRPC: 11646, MsgBus: 14692 2025-03-31T16:34:49.022379Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013012496138398:2189];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:49.022440Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012e1/r3tmp/tmpIuKwBc/pdisk_1.dat 2025-03-31T16:34:49.422354Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:49.441393Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:49.441496Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:49.442942Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11646, node 1 2025-03-31T16:34:49.527968Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:49.527997Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:49.528004Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:49.528135Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14692 TClient is connected to server localhost:14692 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:50.054342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:50.077855Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:34:52.062577Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013025381040829:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:52.062576Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013025381040794:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:52.062661Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:52.066031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:34:52.080506Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013025381040831:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:34:52.152282Z node 1 :TX_PROXY ERROR: Actor# [1:7488013025381040882:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:52.407849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:34:52.537872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:34:53.495739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:54.236583Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488013012496138398:2189];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:54.260390Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:34:55.190226Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=5; 2025-03-31T16:34:55.190404Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 5 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-03-31T16:34:55.190544Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 5 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-03-31T16:34:55.190726Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488013038265951437:2969], Table: `/Root/Test` ([72057594046644480:6:1]), SessionActorId: [1:7488013033970983793:2969]Got LOCKS BROKEN for table `/Root/Test`. ShardID=72075186224037888, Sink=[1:7488013038265951437:2969].{
: Error: Operation is aborting because locks are not valid, code: 2001 }
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 2025-03-31T16:34:55.191374Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488013038265951423:2969], SessionActorId: [1:7488013033970983793:2969], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7488013033970983793:2969]. isRollback=0 2025-03-31T16:34:55.191673Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2IxMDMxOWEtMmFjNDE4NDAtM2UyZmY0Y2UtNjUxMDg1NA==, ActorId: [1:7488013033970983793:2969], ActorState: ExecuteState, TraceId: 01jqpgwnzxd9megtv0ptgdr5f9, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7488013038265951424:2969] from: [1:7488013038265951423:2969] 2025-03-31T16:34:55.191769Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7488013038265951424:2969] TxId: 281474976710665. Ctx: { TraceId: 01jqpgwnzxd9megtv0ptgdr5f9, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2IxMDMxOWEtMmFjNDE4NDAtM2UyZmY0Y2UtNjUxMDg1NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-03-31T16:34:55.192957Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2IxMDMxOWEtMmFjNDE4NDAtM2UyZmY0Y2UtNjUxMDg1NA==, ActorId: [1:7488013033970983793:2969], ActorState: ExecuteState, TraceId: 01jqpgwnzxd9megtv0ptgdr5f9, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 15633, MsgBus: 17624 2025-03-31T16:35:01.423920Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013062485158758:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:01.424021Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012e1/r3tmp/tmpa9Xtya/pdisk_1.dat 2025-03-31T16:35:01.541946Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15633, node 2 2025-03-31T16:35:01.566089Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:01.566215Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:01.570006Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:01.623687Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:01.623715Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:01.623725Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:01.623860Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17624 TClient is connected to server localhost:17624 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:35:02.061024Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:35:04.593105Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013075370061305:2330], DatabaseId ... let_id=72075186224038025;self_id=[2:7488013096844904105:3363];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038025;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:13.209370Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[2:7488013096844904343:3395];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038016;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:13.209823Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[2:7488013096844904343:3395];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038016;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:13.213069Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038041;self_id=[2:7488013096844904042:3355];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038041;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:13.213606Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038041;self_id=[2:7488013096844904042:3355];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038041;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:13.215391Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7488013096844904214:3386];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038012;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:13.215972Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7488013096844904131:3373];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038008;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:13.216429Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7488013096844904131:3373];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038008;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:13.216921Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7488013096844904214:3386];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038012;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:13.217231Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;self_id=[2:7488013096844904217:3387];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038011;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:13.217626Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;self_id=[2:7488013096844904217:3387];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038011;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:13.217857Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038035;self_id=[2:7488013096844904136:3375];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038035;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:13.218229Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038035;self_id=[2:7488013096844904136:3375];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038035;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:13.221698Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038034;self_id=[2:7488013096844904151:3378];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038034;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:13.222117Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038034;self_id=[2:7488013096844904151:3378];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038034;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:13.222539Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[2:7488013096844904311:3393];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038021;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:13.222914Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[2:7488013096844904311:3393];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038021;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:13.225721Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038024;self_id=[2:7488013096844904033:3353];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038024;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:13.226100Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038024;self_id=[2:7488013096844904033:3353];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038024;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:13.229712Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[2:7488013096844904206:3385];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038017;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:13.231655Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[2:7488013096844904119:3369];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038023;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:13.232059Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[2:7488013096844904119:3369];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038023;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:13.232199Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[2:7488013096844904206:3385];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038017;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:13.232715Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;self_id=[2:7488013096844904188:3383];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038018;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:13.233044Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;self_id=[2:7488013096844904188:3383];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038018;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:13.235349Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038028;self_id=[2:7488013096844904003:3347];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038028;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:13.235772Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038028;self_id=[2:7488013096844904003:3347];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038028;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:13.237994Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[2:7488013096844904322:3394];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038010;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:13.238360Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[2:7488013096844904322:3394];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038010;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:13.240553Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7488013096844904226:3389];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038003;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:13.241052Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7488013096844904226:3389];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038003;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 2025-03-31T16:35:13.715287Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037888;self_id=[2:7488013075370061558:2345];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037888;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:35:13.715401Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037889;self_id=[2:7488013075370061564:2348];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037889;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:35:13.715455Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037890;self_id=[2:7488013075370061605:2352];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037890;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:35:13.715521Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037891;self_id=[2:7488013075370061555:2344];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037891;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:35:13.715594Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037893;self_id=[2:7488013075370061582:2350];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037893;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:35:13.715609Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037892;self_id=[2:7488013075370061681:2353];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037892;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:35:13.715656Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037894;self_id=[2:7488013075370061562:2347];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037894;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:35:13.715677Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037895;self_id=[2:7488013075370061573:2349];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037895;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:35:13.715731Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037897;self_id=[2:7488013075370061560:2346];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037897;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:35:13.715745Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037896;self_id=[2:7488013075370061597:2351];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037896;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-03-31T16:35:16.509319Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:35:16.509364Z node 2 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpQueryService::ExecuteQueryWithWorkloadManager [GOOD] >> KqpQueryService::ExecuteRetryQuery >> KqpQueryService::TempTablesDrop [GOOD] >> KqpQueryService::Tcl >> KqpQueryService::ExecuteQueryUpsertDoesntChangeIndexedValuesIfNotChanged [GOOD] >> KqpQueryService::ExecuteQueryWithResourcePoolClassifier >> KqpQueryService::StreamExecuteQueryMultiResult [GOOD] >> KqpQueryService::TableSink_BadTransactions >> KqpQueryService::TableSink_OlapDelete [GOOD] >> KqpQueryService::TableSink_DisableSink [GOOD] >> YdbTableSplit::MergeByNoLoadAfterSplit [GOOD] >> KqpQueryService::IssuesInCaseOfSuccess [GOOD] >> KqpQueryService::ForbidInteractiveTxOnImplicitSession >> KqpQueryService::ExecuteQueryExplicitBeginCommitRollback >> KqpService::RangeCache-UseCache [GOOD] >> KqpQueryServiceScripts::ExecuteScriptStatsProfile >> KqpQueryService::SeveralCTAS+UseSink [GOOD] >> KqpQueryService::SeveralCTAS-UseSink >> KqpQueryServiceScripts::ListScriptExecutions [GOOD] >> KqpQueryServiceScripts::Tcl ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_OlapDelete [GOOD] Test command err: Trying to start YDB, gRPC: 14922, MsgBus: 7123 2025-03-31T16:34:54.327625Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013034865572317:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:54.327681Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001487/r3tmp/tmpdkBLqF/pdisk_1.dat 2025-03-31T16:34:54.724167Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:54.759073Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:54.759211Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 14922, node 1 2025-03-31T16:34:54.766584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:54.800849Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:54.800903Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:54.800918Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:54.801059Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7123 TClient is connected to server localhost:7123 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:55.329405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:55.351916Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:34:57.305220Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013047750474873:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:57.305340Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:57.620432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:34:57.782139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488013047750475043:2339];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:34:57.782367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488013047750475043:2339];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:34:57.782668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488013047750475043:2339];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:34:57.782846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488013047750475043:2339];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:34:57.782988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488013047750475043:2339];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:34:57.783153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488013047750475043:2339];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:34:57.783274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488013047750475043:2339];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:34:57.783402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488013047750475043:2339];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:34:57.783545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488013047750475043:2339];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:34:57.783657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488013047750475043:2339];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:34:57.783775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488013047750475043:2339];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:34:57.783892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488013047750475043:2339];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:34:57.787757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013047750475041:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:34:57.787885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013047750475041:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:34:57.788173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013047750475041:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:34:57.788298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013047750475041:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:34:57.788413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013047750475041:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:34:57.788587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013047750475041:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:34:57.788707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013047750475041:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:34:57.788821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013047750475041:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:34:57.788937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013047750475041:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:34:57.789122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013047750475041:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:34:57.789251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013047750475041:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:34:57.789373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013047750475041:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:34:57.821878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488013047750475057:2340];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:34:57.821944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488013047750475057:2340];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:34:57.822138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488013047750475057:2340];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:34:57.822269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488013047750475057:2340];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:34:57.822396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488013047750475057:2340];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:34:57.822516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488013047750475057:2340];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:34:57.822622Z node 1 :TX_COLUMNS ... 7888;self_id=[3:7488013118339361379:2336];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:16.111650Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7488013118339361379:2336];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:16.111964Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[3:7488013118339361389:2338];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:16.112548Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[3:7488013118339361514:2344];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037894;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:16.114939Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[3:7488013118339361389:2338];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:16.407151Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=281474976715667;tx_id=281474976715667;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715667; 2025-03-31T16:35:16.408018Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=281474976715667;tx_id=281474976715667;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715667; 2025-03-31T16:35:16.408029Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=281474976715667;tx_id=281474976715667;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715667; 2025-03-31T16:35:16.408182Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=281474976715667;tx_id=281474976715667;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715667; 2025-03-31T16:35:16.408264Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=281474976715667;tx_id=281474976715667;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715667; 2025-03-31T16:35:16.408542Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=281474976715667;tx_id=281474976715667;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715667; 2025-03-31T16:35:16.408616Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=281474976715667;tx_id=281474976715667;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715667; 2025-03-31T16:35:16.408806Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=281474976715667;tx_id=281474976715667;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715667; 2025-03-31T16:35:16.409041Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=281474976715667;tx_id=281474976715667;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715667; 2025-03-31T16:35:16.409056Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:16.409312Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=281474976715667;tx_id=281474976715667;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715667; 2025-03-31T16:35:16.409512Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:16.409634Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:16.409783Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:16.409811Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:16.410026Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:16.410115Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:16.410240Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:16.411490Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:16.411490Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:16.812383Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;self_id=[3:7488013118339361411:2343];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037896;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-03-31T16:35:16.812452Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;self_id=[3:7488013118339361411:2343];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037896;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-03-31T16:35:16.812578Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;self_id=[3:7488013118339361379:2336];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037888;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-03-31T16:35:16.812643Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;self_id=[3:7488013118339361379:2336];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037888;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-03-31T16:35:16.812752Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;self_id=[3:7488013118339361389:2338];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037889;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-03-31T16:35:16.812779Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;self_id=[3:7488013118339361389:2338];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037889;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-03-31T16:35:16.812842Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;self_id=[3:7488013118339361403:2340];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037890;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-03-31T16:35:16.812864Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;self_id=[3:7488013118339361403:2340];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037890;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-03-31T16:35:16.812922Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;self_id=[3:7488013118339361401:2339];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037891;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-03-31T16:35:16.812959Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;self_id=[3:7488013118339361401:2339];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037891;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-03-31T16:35:16.813061Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;self_id=[3:7488013118339361408:2342];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037892;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-03-31T16:35:16.813086Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;self_id=[3:7488013118339361408:2342];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037892;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-03-31T16:35:16.813149Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;self_id=[3:7488013118339361405:2341];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037893;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-03-31T16:35:16.813169Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;self_id=[3:7488013118339361405:2341];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037893;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-03-31T16:35:16.813230Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;self_id=[3:7488013118339361378:2335];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037895;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-03-31T16:35:16.813252Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;self_id=[3:7488013118339361378:2335];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037895;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-03-31T16:35:16.813308Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;self_id=[3:7488013118339361514:2344];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037894;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-03-31T16:35:16.813327Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;self_id=[3:7488013118339361514:2344];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037894;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-03-31T16:35:16.813392Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;self_id=[3:7488013118339361387:2337];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037897;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-03-31T16:35:16.813413Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;self_id=[3:7488013118339361387:2337];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037897;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-03-31T16:35:16.900198Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715672; 2025-03-31T16:35:16.901333Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Complete;commit_tx_id=281474976715672;commit_lock_id=281474976715671;fline=manager.cpp:94;broken_lock_id=281474976715669; 2025-03-31T16:35:16.901523Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpService::SwitchCache+UseCache [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_DisableSink [GOOD] Test command err: Trying to start YDB, gRPC: 11891, MsgBus: 11750 2025-03-31T16:34:49.641857Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013013795465553:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:49.642063Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001495/r3tmp/tmpYMfteo/pdisk_1.dat 2025-03-31T16:34:50.001248Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11891, node 1 2025-03-31T16:34:50.026093Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:34:50.027478Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:34:50.030532Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:50.032775Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:50.040078Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:50.077996Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:50.078027Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:50.078035Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:50.078151Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11750 TClient is connected to server localhost:11750 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:50.546963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:52.737416Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013026680368109:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:52.737541Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:53.080812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:34:53.256023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488013030975335547:2340];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:34:53.256217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488013030975335547:2340];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:34:53.256460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488013030975335547:2340];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:34:53.256538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488013030975335547:2340];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:34:53.256615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488013030975335547:2340];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:34:53.256679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488013030975335547:2340];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:34:53.256769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488013030975335547:2340];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:34:53.256851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488013030975335547:2340];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:34:53.256918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488013030975335547:2340];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:34:53.256976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488013030975335547:2340];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:34:53.257049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488013030975335547:2340];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:34:53.257120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488013030975335547:2340];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:34:53.262261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488013030975335539:2336];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:34:53.262333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488013030975335539:2336];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:34:53.262573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488013030975335539:2336];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:34:53.262716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488013030975335539:2336];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:34:53.262879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488013030975335539:2336];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:34:53.263014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488013030975335539:2336];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:34:53.263249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488013030975335539:2336];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:34:53.263419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488013030975335539:2336];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:34:53.263544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488013030975335539:2336];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:34:53.263703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488013030975335539:2336];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:34:53.263819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488013030975335539:2336];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:34:53.263953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488013030975335539:2336];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:34:53.298318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488013030975335541:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:34:53.298417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488013030975335541:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:34:53.298635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488013030975335541:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:34:53.298746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488013030975335541:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:34:53.298874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488013030975335541:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:34:53.299008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488013030975335541:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:34:53.299134Z n ... 86224037903;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:35:21.310065Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:35:21.310139Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:35:21.310159Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:35:21.311373Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:35:21.311416Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:35:21.311487Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:35:21.311516Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:35:21.311636Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T16:35:21.311666Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T16:35:21.311729Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T16:35:21.311759Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T16:35:21.311808Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T16:35:21.311829Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T16:35:21.311862Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T16:35:21.311884Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T16:35:21.312428Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T16:35:21.312489Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T16:35:21.312680Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T16:35:21.312723Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T16:35:21.312890Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:35:21.312984Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:35:21.313191Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:35:21.313231Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:35:21.313351Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:35:21.313385Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:35:21.373366Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-31T16:35:21.373423Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-31T16:35:21.381894Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037898;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-31T16:35:21.382812Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-31T16:35:21.389904Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-31T16:35:21.390118Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-31T16:35:21.396928Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-31T16:35:21.396952Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-31T16:35:21.404165Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-31T16:35:21.404165Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-31T16:35:21.412258Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-31T16:35:21.412267Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-31T16:35:21.420512Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-31T16:35:21.420511Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-31T16:35:21.428155Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-31T16:35:21.428166Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-31T16:35:21.452656Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013152555197853:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:21.452758Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:21.453072Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013152555197858:2440], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:21.457501Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:35:21.468762Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488013152555197860:2441], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:35:21.544706Z node 3 :TX_PROXY ERROR: Actor# [3:7488013152555197911:2711] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:21.638666Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7488013152555197937:2436] TxId: 281474976715661. Ctx: { TraceId: 01jqpgxfq9fwey0mtsjmgv7qy9, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZDc2YTgxNTAtZDFhY2FmMzgtYzczZDE5YTctM2YwOTA5MTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Data manipulation queries do not support column shard tables. 2025-03-31T16:35:21.638943Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZDc2YTgxNTAtZDFhY2FmMzgtYzczZDE5YTctM2YwOTA5MTI=, ActorId: [3:7488013152555197851:2436], ActorState: ExecuteState, TraceId: 01jqpgxfq9fwey0mtsjmgv7qy9, Create QueryResponse for error on request, msg: 2025-03-31T16:35:21.666090Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488013131080360038:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:21.666149Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryServiceScripts::ExecuteScriptStatsNone [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::MergeByNoLoadAfterSplit [GOOD] Test command err: 2025-03-31T16:34:18.843298Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012881165799150:2161];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:18.845068Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00170d/r3tmp/tmpcKj3vp/pdisk_1.dat 2025-03-31T16:34:19.211320Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:19.244818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:19.244945Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:19.270490Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19494, node 1 2025-03-31T16:34:19.335281Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:19.335305Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:19.335313Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:19.335463Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14373 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:19.609720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... Triggering split by load TClient is connected to server localhost:14373 2025-03-31T16:34:21.971537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012894050701998:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:21.971649Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:22.237761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:34:22.396275Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012898345669477:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:22.396387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:22.415900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743438862361 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 1 shards TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743438862361 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-03-31T16:34:22.489544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012898345669576:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:22.489702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:22.489983Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012898345669592:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:22.490047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012898345669591:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:22.490130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012898345669594:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:22.490216Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:22.490299Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012898345669597:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:22.490617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012898345669600:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:22.490638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012898345669608:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:22.490696Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012898345669606:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:22.492914Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012898345669649:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:22.493228Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012898345669647:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:22.493380Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:22.493684Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012898345669662:2414], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:22.493732Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:22.493983Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012898345669665:2416], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:22.495025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata, operationId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:22.497451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710665:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:34:22.497505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976710665:1, at schemeshard: 72057594046644480 2025-03-31T16:34:22.497628Z nod ... s response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743438862361 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-03-31T16:35:17.488287Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037889 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.4497 2025-03-31T16:35:17.488347Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.4004 2025-03-31T16:35:17.588641Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2025-03-31T16:35:17.588829Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037889 followerId=0, pathId 2: RowCount 0, DataSize 0 2025-03-31T16:35:17.589236Z node 1 :FLAT_TX_SCHEMESHARD INFO: Propose merge request : Transaction { WorkingDir: "/Root" OperationType: ESchemeOpSplitMergeTablePartitions SplitMergeTablePartitions { TablePath: "/Root/Foo" SourceTabletId: 72075186224037889 SourceTabletId: 72075186224037890 SchemeshardId: 72057594046644480 } Internal: true FailOnExist: false } TxId: 281474976715658 TabletId: 72057594046644480, reason: shard with tabletId: 72075186224037889 merge by load (shardLoad: 0.02), shardToMergeCount: 2, totalSize: 0, sizeToMerge: 0, totalLoad: 0.04, loadThreshold: 0.07 2025-03-31T16:35:17.589362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TSplitMerge Propose, tableStr: /Root/Foo, tableId: , opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:35:17.589878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:35:17.590605Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-03-31T16:35:17.590675Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037890 followerId=0, pathId 2: RowCount 0, DataSize 0 2025-03-31T16:35:17.593307Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 2025-03-31T16:35:17.597648Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-03-31T16:35:17.597731Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2025-03-31T16:35:17.600207Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TConfigureDestination ProgressState, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:35:17.604228Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:7488013134569043159:12286] 2025-03-31T16:35:17.630014Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037891 2025-03-31T16:35:17.630165Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2025-03-31T16:35:17.630435Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037891 TxInFly 0 2025-03-31T16:35:17.636058Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TConfigureDestination operationId# 281474976715658:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976715658:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976715658 TabletId: 72075186224037891 2025-03-31T16:35:17.636117Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 131 2025-03-31T16:35:17.638167Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TTransferData operationId# 281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2025-03-31T16:35:17.659700Z node 1 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037891 2025-03-31T16:35:17.659834Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037891 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:35:17.659903Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-03-31T16:35:17.659938Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037891 2025-03-31T16:35:17.660229Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037891 TxInFly 0 2025-03-31T16:35:17.663802Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TTransferData operationId# 281474976715658:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715658 TabletId: 72075186224037889 2025-03-31T16:35:17.664145Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TTransferData operationId# 281474976715658:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715658 TabletId: 72075186224037890 2025-03-31T16:35:17.664436Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 131 -> 132 2025-03-31T16:35:17.667054Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-31T16:35:17.667431Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-31T16:35:17.667515Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2025-03-31T16:35:17.669026Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037891 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-03-31T16:35:17.669388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-03-31T16:35:17.669436Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-03-31T16:35:17.669456Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 6 2025-03-31T16:35:17.676199Z node 1 :TX_DATASHARD INFO: 72075186224037890 Initiating switch from PreOffline to Offline state 2025-03-31T16:35:17.678457Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 281474976715658:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037890, at schemeshard: 72057594046644480 2025-03-31T16:35:17.678510Z node 1 :TX_DATASHARD INFO: 72075186224037889 Initiating switch from PreOffline to Offline state 2025-03-31T16:35:17.678780Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 281474976715658:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037889, at schemeshard: 72057594046644480 2025-03-31T16:35:17.678846Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-03-31T16:35:17.678868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-03-31T16:35:17.678906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-03-31T16:35:17.681857Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to activate 281474976715658:0 2025-03-31T16:35:17.681968Z node 1 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-03-31T16:35:17.683396Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-03-31T16:35:17.685750Z node 1 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-03-31T16:35:17.686885Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-03-31T16:35:17.689757Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-03-31T16:35:17.690310Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-03-31T16:35:17.690738Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-03-31T16:35:17.690838Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2025-03-31T16:35:17.697467Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-03-31T16:35:17.697931Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-03-31T16:35:17.698029Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2025-03-31T16:35:17.698044Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-03-31T16:35:17.768843Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-03-31T16:35:17.769054Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037891 followerId=0, pathId 2: RowCount 0, DataSize 0 2025-03-31T16:35:17.769271Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-03-31T16:35:19.237574Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046644480 2025-03-31T16:35:19.237686Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046644480 2025-03-31T16:35:19.237738Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046644480 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743438862361 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 3 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpService::RangeCache-UseCache [GOOD] Test command err: Trying to start YDB, gRPC: 32639, MsgBus: 15145 2025-03-31T16:34:57.321222Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013046551148891:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:57.325620Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001479/r3tmp/tmpPqmGxs/pdisk_1.dat 2025-03-31T16:34:57.681294Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32639, node 1 2025-03-31T16:34:57.734351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:57.734433Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:57.753094Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:57.799577Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:57.799600Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:57.799607Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:57.799740Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15145 TClient is connected to server localhost:15145 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:58.347482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:58.387387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:58.538393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:58.699187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:58.765885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:00.531961Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013059436052547:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:00.532070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:00.949965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:00.990712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:01.019701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:01.048191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:01.119769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:01.168339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:01.256015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013063731020363:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:01.256101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:01.258517Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013063731020368:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:01.261757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:01.272770Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013063731020370:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:35:01.351905Z node 1 :TX_PROXY ERROR: Actor# [1:7488013063731020427:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:02.192120Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzkzMjQ3N2UtOTVkMzA3MGQtNTMzYzllLTYzNTdhYTYy, ActorId: [1:7488013068025988012:2496], ActorState: ExecuteState, TraceId: 01jqpgwwx88yy7j23rnzaybcsn, Reply query error, msg: Pending previous query completion proxyRequestId: 7 2025-03-31T16:35:02.192208Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzkzMjQ3N2UtOTVkMzA3MGQtNTMzYzllLTYzNTdhYTYy, ActorId: [1:7488013068025988012:2496], ActorState: ExecuteState, TraceId: 01jqpgwwx88yy7j23rnzaybcsn, Reply query error, msg: Pending previous query completion proxyRequestId: 8 2025-03-31T16:35:02.192242Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzkzMjQ3N2UtOTVkMzA3MGQtNTMzYzllLTYzNTdhYTYy, ActorId: [1:7488013068025988012:2496], ActorState: ExecuteState, TraceId: 01jqpgwwx88yy7j23rnzaybcsn, Reply query error, msg: Pending previous query completion proxyRequestId: 9 2025-03-31T16:35:02.192284Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzkzMjQ3N2UtOTVkMzA3MGQtNTMzYzllLTYzNTdhYTYy, ActorId: [1:7488013068025988012:2496], ActorState: ExecuteState, TraceId: 01jqpgwwx88yy7j23rnzaybcsn, Reply query error, msg: Pending previous query completion proxyRequestId: 10 2025-03-31T16:35:02.192308Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzkzMjQ3N2UtOTVkMzA3MGQtNTMzYzllLTYzNTdhYTYy, ActorId: [1:7488013068025988012:2496], ActorState: ExecuteState, TraceId: 01jqpgwwx88yy7j23rnzaybcsn, Reply query error, msg: Pending previous query completion proxyRequestId: 11 2025-03-31T16:35:02.193557Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzkzMjQ3N2UtOTVkMzA3MGQtNTMzYzllLTYzNTdhYTYy, ActorId: [1:7488013068025988012:2496], ActorState: ExecuteState, TraceId: 01jqpgwwx88yy7j23rnzaybcsn, Reply query error, msg: Pending previous query completion proxyRequestId: 12 2025-03-31T16:35:02.200824Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzkzMjQ3N2UtOTVkMzA3MGQtNTMzYzllLTYzNTdhYTYy, ActorId: [1:7488013068025988012:2496], ActorState: ExecuteState, TraceId: 01jqpgwwx88yy7j23rnzaybcsn, Reply query error, msg: Pending previous query completion proxyRequestId: 13 2025-03-31T16:35:02.203352Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzkzMjQ3N2UtOTVkMzA3MGQtNTMzYzllLTYzNTdhYTYy, ActorId: [1:7488013068025988012:2496], ActorState: ExecuteState, TraceId: 01jqpgwwx88yy7j23rnzaybcsn, Reply query error, msg: Pending previous query completion proxyRequestId: 14 2025-03-31T16:35:02.203843Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzkzMjQ3N2UtOTVkMzA3MGQtNTMzYzllLTYzNTdhYTYy, ActorId: [1:7488013068025988012:2496], ActorState: ExecuteState, TraceId: 01jqpgwwx88yy7j23rnzaybcsn, Reply query error, msg: Pending previous query completion proxyRequestId: 15 2025-03-31T16:35:02.319863Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488013046551148891:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:02.319959Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 3291, MsgBus: 31284 2025-03-31T16:35:03.129428Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013074152238455:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:03.135205Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001479/r3tmp/tmprNUEYc/pdisk_1.dat 2025-03-31T16:35:03.273413Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3291, node 2 2025-03-31T16:35:03.295661Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:03.295740Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:03.297414Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:03.339116Z node 2 :NET_CLASSIFIER ... RjOGMtYmE2MGZmZjQtZmUxNTcwMmU=, ActorId: [2:7488013091332110477:2558], ActorState: ExecuteState, TraceId: 01jqpgx2h72tr4dctyfjzvw3gg, Reply query error, msg: Pending previous query completion proxyRequestId: 49 2025-03-31T16:35:08.071722Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWVjZGZjNzItNjMzYWFlNmItYWZiNjBjYmMtZTM1N2EwODE=, ActorId: [2:7488013095627077823:2569], ActorState: ExecuteState, TraceId: 01jqpgx2n6bt80pw3b0zsfb56t, Reply query error, msg: Pending previous query completion proxyRequestId: 52 2025-03-31T16:35:08.076072Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWVjZGZjNzItNjMzYWFlNmItYWZiNjBjYmMtZTM1N2EwODE=, ActorId: [2:7488013095627077823:2569], ActorState: ExecuteState, TraceId: 01jqpgx2n6bt80pw3b0zsfb56t, Reply query error, msg: Pending previous query completion proxyRequestId: 53 2025-03-31T16:35:08.076167Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWVjZGZjNzItNjMzYWFlNmItYWZiNjBjYmMtZTM1N2EwODE=, ActorId: [2:7488013095627077823:2569], ActorState: ExecuteState, TraceId: 01jqpgx2n6bt80pw3b0zsfb56t, Reply query error, msg: Pending previous query completion proxyRequestId: 54 2025-03-31T16:35:08.076206Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWVjZGZjNzItNjMzYWFlNmItYWZiNjBjYmMtZTM1N2EwODE=, ActorId: [2:7488013095627077823:2569], ActorState: ExecuteState, TraceId: 01jqpgx2n6bt80pw3b0zsfb56t, Reply query error, msg: Pending previous query completion proxyRequestId: 55 2025-03-31T16:35:08.129521Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488013074152238455:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:08.129596Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:08.210347Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDkxYjBlYzItMzczMDY0MzYtOTlkMWIwMzQtOWIxMzhmOTk=, ActorId: [2:7488013095627077855:2580], ActorState: ExecuteState, TraceId: 01jqpgx2sg0sheq964xxppzc8j, Reply query error, msg: Pending previous query completion proxyRequestId: 58 2025-03-31T16:35:08.211062Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDkxYjBlYzItMzczMDY0MzYtOTlkMWIwMzQtOWIxMzhmOTk=, ActorId: [2:7488013095627077855:2580], ActorState: ExecuteState, TraceId: 01jqpgx2sg0sheq964xxppzc8j, Reply query error, msg: Pending previous query completion proxyRequestId: 59 2025-03-31T16:35:08.211152Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDkxYjBlYzItMzczMDY0MzYtOTlkMWIwMzQtOWIxMzhmOTk=, ActorId: [2:7488013095627077855:2580], ActorState: ExecuteState, TraceId: 01jqpgx2sg0sheq964xxppzc8j, Reply query error, msg: Pending previous query completion proxyRequestId: 60 2025-03-31T16:35:08.348616Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NmQ3ZTE3ZTgtOGRiYTY5YzAtYTE4OTgyMTQtNjAyNmI1ZmM=, ActorId: [2:7488013095627077908:2593], ActorState: ExecuteState, TraceId: 01jqpgx2xw9nj6y33ephf7b32h, Reply query error, msg: Pending previous query completion proxyRequestId: 63 2025-03-31T16:35:08.355732Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NmQ3ZTE3ZTgtOGRiYTY5YzAtYTE4OTgyMTQtNjAyNmI1ZmM=, ActorId: [2:7488013095627077908:2593], ActorState: ExecuteState, TraceId: 01jqpgx2xw9nj6y33ephf7b32h, Reply query error, msg: Pending previous query completion proxyRequestId: 64 2025-03-31T16:35:08.483803Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjhhOGFjMDgtNmQ0MjExMy1iZTc1OGU1MS1jNGNjOGUyZg==, ActorId: [2:7488013095627077946:2602], ActorState: ExecuteState, TraceId: 01jqpgx3231702nbfy8gjanh1b, Reply query error, msg: Pending previous query completion proxyRequestId: 67 Trying to start YDB, gRPC: 9732, MsgBus: 9023 2025-03-31T16:35:09.521191Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488013099030756063:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:09.521294Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001479/r3tmp/tmpx0izU2/pdisk_1.dat 2025-03-31T16:35:09.637016Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9732, node 3 2025-03-31T16:35:09.666448Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:09.666569Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:09.668550Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:09.723674Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:09.723698Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:09.723705Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:09.723830Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9023 TClient is connected to server localhost:9023 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:35:10.292509Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:10.311400Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:35:10.386027Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:10.554213Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:10.631199Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:13.129792Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013116210627033:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:13.129884Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:13.183110Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:13.219335Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:13.289476Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:13.326995Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:13.360640Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:13.405260Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:13.457198Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013116210627550:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:13.457297Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:13.457351Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013116210627555:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:13.461913Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:13.504672Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488013116210627557:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:35:13.566075Z node 3 :TX_PROXY ERROR: Actor# [3:7488013116210627613:3452] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:14.523231Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488013099030756063:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:14.523301Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; took: 8.450356s took: 8.464139s took: 8.467152s took: 8.436285s took: 8.439864s took: 8.438449s took: 8.447934s took: 8.463043s took: 8.450671s took: 8.436195s >> KqpQueryService::Ddl_Dml [GOOD] >> KqpService::CloseSessionsWithLoad [GOOD] >> KqpService::PatternCache >> KqpQueryService::CTASWithoutPerStatement [GOOD] >> KqpQueryService::CheckIsolationLevelFroPerStatementMode ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::ExecuteScriptStatsNone [GOOD] Test command err: Trying to start YDB, gRPC: 62086, MsgBus: 9219 2025-03-31T16:35:02.790332Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013070375088731:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:02.790456Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00145a/r3tmp/tmpdVPMBf/pdisk_1.dat 2025-03-31T16:35:03.160285Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62086, node 1 2025-03-31T16:35:03.188147Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:03.188248Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:03.191373Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:03.245754Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:03.245776Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:03.245782Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:03.245903Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9219 TClient is connected to server localhost:9219 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:03.785253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:35:03.808790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:35:03.968333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:04.127841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:04.199421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:05.844442Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013083259992378:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:05.844547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:06.181140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:06.212268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:06.250425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:06.283302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:06.323170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:06.360657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:06.411889Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013087554960183:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:06.411969Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:06.412162Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013087554960189:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:06.416778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:06.426686Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013087554960191:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:35:06.510903Z node 1 :TX_PROXY ERROR: Actor# [1:7488013087554960244:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:07.422960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:35:07.424538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:35:07.425833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:35:07.822319Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488013070375088731:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:07.822372Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 26385, MsgBus: 13007 2025-03-31T16:35:09.922708Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013099745736467:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:09.922896Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00145a/r3tmp/tmpek0czU/pdisk_1.dat 2025-03-31T16:35:10.089207Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:10.112666Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:10.112755Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:10.115890Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26385, node 2 2025-03-31T16:35:10.194961Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:10.194991Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:10.195010Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:10.195156Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13007 TClient is connected to server localhost:13007 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:10.618734Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:10.627960Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:10.649682Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976 ... 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:13.320861Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:13.394256Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:13.457968Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013116925607934:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:13.458054Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:13.458290Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013116925607939:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:13.461835Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:13.471770Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488013116925607941:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:35:13.554863Z node 2 :TX_PROXY ERROR: Actor# [2:7488013116925607995:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:14.349027Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:35:14.350604Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:35:14.352729Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:35:14.924304Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488013099745736467:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:14.924365Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 2104, MsgBus: 1545 2025-03-31T16:35:17.191681Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488013131760951925:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:17.203842Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00145a/r3tmp/tmpjLgvTy/pdisk_1.dat 2025-03-31T16:35:17.333830Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:17.362068Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:17.362170Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:17.364114Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2104, node 3 2025-03-31T16:35:17.411139Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:17.411162Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:17.411171Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:17.411303Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1545 TClient is connected to server localhost:1545 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:17.916576Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:17.929644Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:35:18.007370Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:35:18.196290Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:18.280534Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:20.745649Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013144645855586:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:20.745732Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:20.792410Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:20.819515Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:20.846592Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:20.877049Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:20.910191Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:20.943659Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:20.992449Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013144645856096:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:20.992535Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013144645856101:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:20.992540Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:20.996121Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:21.005224Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488013144645856103:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:35:21.061893Z node 3 :TX_PROXY ERROR: Actor# [3:7488013148940823452:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:21.940951Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:35:21.942639Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:35:21.944068Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:35:22.191971Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488013131760951925:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:22.192070Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpService::SwitchCache+UseCache [GOOD] Test command err: Trying to start YDB, gRPC: 11889, MsgBus: 11452 2025-03-31T16:34:43.046251Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012988589965709:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:43.046485Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0014a5/r3tmp/tmpUygCkG/pdisk_1.dat 2025-03-31T16:34:43.444070Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11889, node 1 2025-03-31T16:34:43.488486Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:43.488587Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:43.498402Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:43.543954Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:43.543987Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:43.543994Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:43.544139Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11452 TClient is connected to server localhost:11452 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:44.130009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:44.148492Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:34:44.166264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:44.330448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:44.482799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:44.566669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:46.312971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013001474869371:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:46.313117Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:46.721241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:46.751112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:46.786126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:46.835860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:46.870733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:46.944083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:34:47.037226Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013005769837188:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:47.037303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:47.037514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013005769837193:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:47.041397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:47.053401Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-31T16:34:47.053890Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013005769837195:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:34:47.134818Z node 1 :TX_PROXY ERROR: Actor# [1:7488013005769837252:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:48.047254Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012988589965709:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:48.047317Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 15209, MsgBus: 62717 2025-03-31T16:34:50.815911Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013016239481297:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:50.816024Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0014a5/r3tmp/tmpw5BwBX/pdisk_1.dat 2025-03-31T16:34:50.924873Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15209, node 2 2025-03-31T16:34:50.963363Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:50.963457Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:50.965747Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:50.980567Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:50.980593Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:50.980600Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:50.980706Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62717 TClient is connected to server localhost:62717 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:34:51.413835Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:34:51.429671Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:51.522360Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:51.703658Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting ... ous query completion proxyRequestId: 49 2025-03-31T16:34:56.022447Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjAxZTEzMWMtNTIyNGE3MTgtMTUwZDk1MWYtNWM3MTgwNjY=, ActorId: [2:7488013042009287948:2570], ActorState: ExecuteState, TraceId: 01jqpgwpwn24tymcjzjzmsw43p, Reply query error, msg: Pending previous query completion proxyRequestId: 52 2025-03-31T16:34:56.022502Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjAxZTEzMWMtNTIyNGE3MTgtMTUwZDk1MWYtNWM3MTgwNjY=, ActorId: [2:7488013042009287948:2570], ActorState: ExecuteState, TraceId: 01jqpgwpwn24tymcjzjzmsw43p, Reply query error, msg: Pending previous query completion proxyRequestId: 53 2025-03-31T16:34:56.028115Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjAxZTEzMWMtNTIyNGE3MTgtMTUwZDk1MWYtNWM3MTgwNjY=, ActorId: [2:7488013042009287948:2570], ActorState: ExecuteState, TraceId: 01jqpgwpwn24tymcjzjzmsw43p, Reply query error, msg: Pending previous query completion proxyRequestId: 54 2025-03-31T16:34:56.028605Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjAxZTEzMWMtNTIyNGE3MTgtMTUwZDk1MWYtNWM3MTgwNjY=, ActorId: [2:7488013042009287948:2570], ActorState: ExecuteState, TraceId: 01jqpgwpwn24tymcjzjzmsw43p, Reply query error, msg: Pending previous query completion proxyRequestId: 55 2025-03-31T16:34:56.133995Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MWNhY2U4MDctYmYxMzYxOTYtYTkzNjE2ZjgtNjg2ZjdjY2I=, ActorId: [2:7488013042009287991:2580], ActorState: ExecuteState, TraceId: 01jqpgwq05fmfhbr0nsjgy94p6, Reply query error, msg: Pending previous query completion proxyRequestId: 58 2025-03-31T16:34:56.134477Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MWNhY2U4MDctYmYxMzYxOTYtYTkzNjE2ZjgtNjg2ZjdjY2I=, ActorId: [2:7488013042009287991:2580], ActorState: ExecuteState, TraceId: 01jqpgwq05fmfhbr0nsjgy94p6, Reply query error, msg: Pending previous query completion proxyRequestId: 59 2025-03-31T16:34:56.137612Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MWNhY2U4MDctYmYxMzYxOTYtYTkzNjE2ZjgtNjg2ZjdjY2I=, ActorId: [2:7488013042009287991:2580], ActorState: ExecuteState, TraceId: 01jqpgwq05fmfhbr0nsjgy94p6, Reply query error, msg: Pending previous query completion proxyRequestId: 60 2025-03-31T16:34:56.279112Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODU3ODA4ZGEtNzg5NTliYy03MWQ4ZDM5Yi00Y2I3YjU4NA==, ActorId: [2:7488013042009288025:2592], ActorState: ExecuteState, TraceId: 01jqpgwq4p5j72tramnv7bpmb1, Reply query error, msg: Pending previous query completion proxyRequestId: 63 2025-03-31T16:34:56.279180Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODU3ODA4ZGEtNzg5NTliYy03MWQ4ZDM5Yi00Y2I3YjU4NA==, ActorId: [2:7488013042009288025:2592], ActorState: ExecuteState, TraceId: 01jqpgwq4p5j72tramnv7bpmb1, Reply query error, msg: Pending previous query completion proxyRequestId: 64 2025-03-31T16:34:56.394594Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=N2VhYzUxYzQtNTBlOTFlYTQtYzNlY2FkMzItODc1ZTA5YmM=, ActorId: [2:7488013042009288066:2601], ActorState: ExecuteState, TraceId: 01jqpgwq8939wnf05pkw008a4n, Reply query error, msg: Pending previous query completion proxyRequestId: 67 Trying to start YDB, gRPC: 22766, MsgBus: 19739 2025-03-31T16:34:57.575479Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488013047674288499:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:57.575543Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0014a5/r3tmp/tmpE1cGkB/pdisk_1.dat 2025-03-31T16:34:57.706235Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:57.722126Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:57.722212Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:57.724393Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22766, node 3 2025-03-31T16:34:57.770931Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:57.770954Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:57.770961Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:57.771108Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19739 TClient is connected to server localhost:19739 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:58.284765Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:58.292477Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:34:58.306001Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:58.391804Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:58.564732Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:58.645763Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:01.152109Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013064854159456:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:01.152209Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:01.198348Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:01.236178Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:01.273679Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:01.306148Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:01.337525Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:01.438171Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:01.490036Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013064854159969:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:01.490132Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:01.490386Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013064854159974:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:01.493881Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:01.503942Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488013064854159976:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:35:01.579727Z node 3 :TX_PROXY ERROR: Actor# [3:7488013064854160029:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:02.539676Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:35:02.576831Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488013047674288499:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:02.577031Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:12.681064Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:35:12.681091Z node 3 :IMPORT WARN: Table profiles were not loaded took: 20.842887s took: 20.850854s took: 20.850852s took: 20.851502s took: 20.852112s took: 20.854429s took: 20.854556s took: 20.859891s took: 20.862625s took: 20.866742s >> KqpQueryService::FlowControllOnHugeLiteralAsTable >> KqpQueryServiceScripts::ForgetScriptExecution [GOOD] >> KqpQueryService::ExecuteQueryInteractiveTxCommitWithQuery >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted [GOOD] >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds >> KqpKv::BulkUpsert [GOOD] >> KqpKv::ReadRows_ExternalBlobs+UseExtBlobsPrecharge ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::Ddl_Dml [GOOD] Test command err: Trying to start YDB, gRPC: 9165, MsgBus: 19812 2025-03-31T16:34:58.508095Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013049704024967:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:58.508195Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001470/r3tmp/tmpgG6WCo/pdisk_1.dat 2025-03-31T16:34:58.820592Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9165, node 1 2025-03-31T16:34:58.894481Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:58.894576Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:58.897632Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:58.911616Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:58.911635Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:58.911639Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:58.911728Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19812 TClient is connected to server localhost:19812 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:59.442550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:59.464706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:59.622403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:59.790676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:59.858703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:01.802673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013062588928629:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:01.802803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:02.116429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:02.155677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:02.183997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:02.213581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:02.243483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:02.284237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:02.330101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013066883896434:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:02.330183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:02.330432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013066883896439:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:02.334085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:02.343907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013066883896441:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:35:02.409600Z node 1 :TX_PROXY ERROR: Actor# [1:7488013066883896495:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:03.373320Z node 1 :TX_PROXY ERROR: Actor# [1:7488013071178864111:3663] txid# 281474976710672, issues: { message: "User already exists" severity: 1 } 2025-03-31T16:35:03.383566Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjE5NzYwOWYtZjFkNjc0MzAtMzZjZDFlN2UtOWJkNDQwN2E=, ActorId: [1:7488013071178864105:2502], ActorState: ExecuteState, TraceId: 01jqpgwy22ft72xqzrbhpjh0re, Create QueryResponse for error on request, msg: 2025-03-31T16:35:03.457242Z node 1 :TX_PROXY ERROR: Actor# [1:7488013071178864167:3696] txid# 281474976710676, issues: { message: "User not found" severity: 1 } 2025-03-31T16:35:03.457467Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTQ3NWNhZjMtN2YxYTg4YjUtZjEzY2QyYS1mOWVhZjU4Mw==, ActorId: [1:7488013071178864161:2511], ActorState: ExecuteState, TraceId: 01jqpgwy4jaa30pcjadatxveeb, Create QueryResponse for error on request, msg: 2025-03-31T16:35:03.478244Z node 1 :TX_PROXY ERROR: Actor# [1:7488013071178864185:3705] txid# 281474976710678, issues: { message: "User not found" severity: 1 } 2025-03-31T16:35:03.478651Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGE3ZTM2NWYtODZkODgzMTktN2Q0YmFhZmYtZDhjMGZmZDc=, ActorId: [1:7488013071178864177:2514], ActorState: ExecuteState, TraceId: 01jqpgwy5b0a28kpge0qzk3jc4, Create QueryResponse for error on request, msg: 2025-03-31T16:35:03.508133Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488013049704024967:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:03.508253Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 9780, MsgBus: 26899 2025-03-31T16:35:04.370834Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013077744514262:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:04.370936Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001470/r3tmp/tmpz8Fqbz/pdisk_1.dat 2025-03-31T16:35:04.462583Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:04.502239Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:04.502332Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:04.505288Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9780, node 2 2025-03-31T16:35:04.546648Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:04.546669Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:04.546674Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:04.546767Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26899 TClient is connected to server localhost:26899 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depri ... 6224037919 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-03-31T16:35:21.123242Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7488013148590318122:2511], Table: `/Root/TestDdlDml2` ([72057594046644480:16:1]), SessionActorId: [4:7488013144295350777:2511]Got LOCKS BROKEN for table `/Root/TestDdlDml2`. ShardID=72075186224037919, Sink=[4:7488013148590318122:2511].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-03-31T16:35:21.123776Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7488013148590318115:2511], SessionActorId: [4:7488013144295350777:2511], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TestDdlDml2`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[4:7488013144295350777:2511]. isRollback=0 2025-03-31T16:35:21.124037Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZjY3MjMxMTMtYzMzYWMyY2YtNDQ4NDRhYWQtZGQ2M2MyMmM=, ActorId: [4:7488013144295350777:2511], ActorState: ExecuteState, TraceId: 01jqpgxf613y3vrafhrzrjsweg, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [4:7488013148590318191:2511] from: [4:7488013148590318115:2511] 2025-03-31T16:35:21.124123Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7488013148590318191:2511] TxId: 281474976710675. Ctx: { TraceId: 01jqpgxf613y3vrafhrzrjsweg, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZjY3MjMxMTMtYzMzYWMyY2YtNDQ4NDRhYWQtZGQ2M2MyMmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/TestDdlDml2`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-03-31T16:35:21.124318Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZjY3MjMxMTMtYzMzYWMyY2YtNDQ4NDRhYWQtZGQ2M2MyMmM=, ActorId: [4:7488013144295350777:2511], ActorState: ExecuteState, TraceId: 01jqpgxf613y3vrafhrzrjsweg, Create QueryResponse for error on request, msg: 2025-03-31T16:35:21.212166Z node 4 :KQP_COMPILE_SERVICE WARN: queryId in recompile request and queryId in cache are different, queryId in request: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n UPSERT INTO TestDdlDml2 (Key, Value1) VALUES (1, \"1\");\n SELECT * FROM TestDdlDml2;\n UPSERT INTO TestDdlDml2 (Key, Value1) VALUES (2, \"2\");\n SELECT * FROM TestDdlDml2;\n CREATE TABLE TestDdlDml33 (\n Key Uint64,\n PRIMARY KEY (Key)\n );\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_database":"Root" }, "settings": { "ydb_database":"Root" }, "rollback_settings": { } } }}, queryId in cache: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n UPSERT INTO TestDdlDml2 (Key, Value1, Value2) VALUES (1, \"1\", \"1\");\n SELECT * FROM TestDdlDml2;\n ALTER TABLE TestDdlDml2 DROP COLUMN Value2;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_database":"Root" }, "settings": { "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-03-31T16:35:21.394099Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-31T16:35:21.584208Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-31T16:35:21.824774Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7488013148590318615:2590], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:11:17: Error: At function: KiReadTable!
:11:17: Error: Cannot find table 'db.[/Root/TestDdlDml5]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:35:21.825071Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZTE1Yjc1ODEtMTQ3ZDY1ZGMtZTA2OTkwNzMtZWNlMDdhNDc=, ActorId: [4:7488013148590318435:2569], ActorState: ExecuteState, TraceId: 01jqpgxft76qwa4twd03tw0gpm, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:35:21.912607Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-03-31T16:35:22.035937Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710690:0, at schemeshard: 72057594046644480 2025-03-31T16:35:22.476706Z node 4 :TX_PROXY ERROR: Actor# [4:7488013152885286227:4201] txid# 281474976710696, issues: { message: "Check failed: path: \'/Root/TestDdl1\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 19], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:22.476864Z node 4 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976710696, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/TestDdl1', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 19], type: EPathTypeTable, state: EPathStateNoChanges) 2025-03-31T16:35:22.477043Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OWNhZTlmZWQtMjlkNDg2YjgtM2Q4NWY2ZWQtYTRjNmNiZmY=, ActorId: [4:7488013152885286209:2645], ActorState: ExecuteState, TraceId: 01jqpgxgpf2hnh0wggy77867rs, Create QueryResponse for error on request, msg: 2025-03-31T16:35:22.521279Z node 4 :TX_PROXY ERROR: Actor# [4:7488013152885286251:4212] txid# 281474976710698, issues: { message: "Check failed: path: \'/Root/TestDdl2\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 20], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:22.521517Z node 4 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976710698, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/TestDdl2', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 20], type: EPathTypeTable, state: EPathStateNoChanges) 2025-03-31T16:35:22.521663Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YTJjMzU2NWEtNDMyOTlhZmEtZmJlZDczYWEtZGM4NzdhOWI=, ActorId: [4:7488013152885286238:2653], ActorState: ExecuteState, TraceId: 01jqpgxgqwft2k3428btc0hbyx, Create QueryResponse for error on request, msg: 2025-03-31T16:35:22.876737Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710702:0, at schemeshard: 72057594046644480 2025-03-31T16:35:23.045749Z node 4 :TX_PROXY ERROR: Actor# [4:7488013157180253732:4315] txid# 281474976710703, issues: { message: "Check failed: path: \'/Root/TestDdl2\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 20], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:23.046001Z node 4 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976710703, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/TestDdl2', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 20], type: EPathTypeTable, state: EPathStateNoChanges) 2025-03-31T16:35:23.046174Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ODkzOTJjNDUtNjEyMjhhMTAtZTFkMWYyYWEtMmY1Mjg0ZjY=, ActorId: [4:7488013152885286326:2678], ActorState: ExecuteState, TraceId: 01jqpgxh0j73fgzg5rsz8wke43, Create QueryResponse for error on request, msg: 2025-03-31T16:35:23.197466Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7488013157180253789:2715], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/TestDdl4]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:35:23.198983Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NDRhYzQ3NjAtNzE2N2MwMDUtMWRiMzE4MjUtYzMzODQ3MDk=, ActorId: [4:7488013157180253786:2713], ActorState: ExecuteState, TraceId: 01jqpgxhdb6pct4zj68k6f0rh1, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:35:23.431099Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710710:0, at schemeshard: 72057594046644480 2025-03-31T16:35:24.020519Z node 4 :KQP_COMPILE_ACTOR ERROR: Get parsing result with error, self: [4:7488013161475221332:2765], owner: [4:7488013140000382484:2397], statement id: 1 2025-03-31T16:35:24.020786Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmU1NzQ5YWEtZDhlZGM2YjQtYjYzYmZmN2YtMWEyMGE4NTA=, ActorId: [4:7488013161475221330:2764], ActorState: ExecuteState, TraceId: 01jqpgxj7b73hkr52d0g1j1kjd, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-31T16:35:24.217999Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7488013161475221379:2782], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:29: Error: At function: KiWriteTable!
:3:44: Error: Failed to convert type: Struct<'Key':Int32,'Value':String> to Struct<'Key':Uint64?,'Value':Uint64?>
:3:44: Error: Failed to convert 'Value': String to Optional
:3:44: Error: Failed to convert input columns types to scheme types, code: 2031 2025-03-31T16:35:24.218233Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YzlmZWEzZjAtNWQyZTY3NWItNmJlNzQwYWQtOTJiYjc3NTA=, ActorId: [4:7488013161475221361:2776], ActorState: ExecuteState, TraceId: 01jqpgxjb71d37dbygyxqnbtp7, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-31T16:35:24.290531Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710719:0, at schemeshard: 72057594046644480 2025-03-31T16:35:24.427391Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7488013161475221499:2804], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:8:29: Error: At function: KiWriteTable!
:8:44: Error: Failed to convert type: Struct<'Key':Int32,'Value':String> to Struct<'Key':Uint64?,'Value':Uint64?>
:8:44: Error: Failed to convert 'Value': String to Optional
:8:44: Error: Failed to convert input columns types to scheme types, code: 2031 2025-03-31T16:35:24.427641Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWJlZjU1MGYtNmNiOWJlMGUtZjg2NTk1ZTAtYzIyYThjYg==, ActorId: [4:7488013161475221406:2791], ActorState: ExecuteState, TraceId: 01jqpgxjez3m9mb4pymcvm0a8y, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> KqpQueryService::ExecStats >> KqpQueryService::TableSink_OltpOrder [GOOD] >> KqpQueryService::ClosedSessionRemovedWhileActiveWithQuery >> KqpQueryService::TableSink_OltpDelete [GOOD] >> KqpQueryService::Tcl [GOOD] >> KqpQueryService::TableSink_ReplaceFromSelectOlap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::ForgetScriptExecution [GOOD] Test command err: Trying to start YDB, gRPC: 31133, MsgBus: 23121 2025-03-31T16:34:53.997967Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013029577945436:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:53.998860Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001490/r3tmp/tmpb5SCx6/pdisk_1.dat 2025-03-31T16:34:54.379909Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31133, node 1 2025-03-31T16:34:54.413185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:54.413298Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:54.419185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:54.469910Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:54.469937Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:54.469946Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:54.470101Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23121 TClient is connected to server localhost:23121 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:54.941767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:54.957854Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:34:54.970148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:55.125736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:34:55.296358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:34:55.361321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:57.232100Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013046757816372:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:57.232195Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:57.617786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:57.650989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:57.686586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:57.723895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:57.755910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:57.799039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:34:57.861194Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013046757816883:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:57.861289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:57.861332Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013046757816888:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:57.864887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:57.881283Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013046757816890:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:34:57.965903Z node 1 :TX_PROXY ERROR: Actor# [1:7488013046757816945:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:58.848973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:34:58.850958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:34:58.852289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:34:58.998053Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488013029577945436:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:58.998129Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:01.050406Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438901085, txId: 281474976710699] shutting down 2025-03-31T16:35:01.073815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013063937687550:2782], DatabaseId: /Root, PoolId: another_pool_id, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-03-31T16:35:01.073819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013063937687552:2784], DatabaseId: /Root, PoolId: another_pool_id, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-03-31T16:35:01.073917Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool another_pool_id, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-03-31T16:35:01.073928Z node 1 :KQP_WORKLOAD_SERVICE ERROR: [WorkloadService] [TPoolResolverActor] ActorId: [1:7488013063937687551:2783], DatabaseId: /Root, PoolId: another_pool_id, SessionId: ydb://session/3?node_id=1&id=NGVhZjViMTAtMjUxYzVkZDQtOWVkYmVjNWItNDc3MjVmNDQ=, Failed to fetch pool info NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-03-31T16:35:01.074024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolResolverActor] ActorId: [1:7488013063937687551:2783], DatabaseId: /Root, PoolId: another_pool_id, SessionId: ydb://session/3?node_id=1&id=NGVhZjViMTAtMjUxYzVkZDQtOWVkYmVjNWItNDc3MjVmNDQ=, Failed to resolve pool, NOT_FOUND, issues: {
: Error: Failed to resolve pool id another_pool_id subissue: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } } 2025-03-31T16:35:01.074137Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply continue error NOT_FOUND to [1:7488013063937687548:2781]: {
: Error: Failed to resolve pool id another_pool_id subissue: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } } 2025-03-31T16:35:01.074225Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGVhZjViMTAtMjUxYzVkZDQtOWVkYmVjNWItNDc3MjVmNDQ=, ActorId: [1:7488013063937687548:2781], ActorState: ExecuteState, TraceId: 01jqpgwvt355zk2x08f9e6s90q, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool 2025-03-31T16:35:01.074418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [1:7488013063937687548:2781]: Pool another_pool_id not found Trying to start YDB, gRPC: 5506, MsgBus: 24685 2025-03-31T16:35:01.939005Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013063277294443:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:01.939071Z node 2 :METADATA_PROVIDER ERRO ... D WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:05.637800Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013080457165899:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:05.637872Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:05.637921Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013080457165904:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:05.642780Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:05.656437Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488013080457165906:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:35:05.753055Z node 2 :TX_PROXY ERROR: Actor# [2:7488013080457165962:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:06.662724Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:35:06.664202Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:35:06.665166Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:35:06.939216Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488013063277294443:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:06.939292Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 27124, MsgBus: 22012 2025-03-31T16:35:09.354323Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488013098602548380:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:09.354478Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001490/r3tmp/tmpWluZhj/pdisk_1.dat 2025-03-31T16:35:09.524073Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:09.544806Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:09.544928Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:09.547381Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27124, node 3 2025-03-31T16:35:09.634074Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:09.634100Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:09.634109Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:09.634254Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22012 TClient is connected to server localhost:22012 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:10.259184Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:10.269436Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:35:10.283570Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:10.363325Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:10.562683Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:10.653614Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:13.166603Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013115782419315:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:13.166730Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:13.233720Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:13.285358Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:13.317195Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:13.356783Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:13.394880Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:13.440720Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:13.525368Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013115782419832:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:13.525487Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:13.525686Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013115782419837:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:13.529625Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:13.539587Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488013115782419839:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:35:13.596572Z node 3 :TX_PROXY ERROR: Actor# [3:7488013115782419891:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:14.355199Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488013098602548380:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:14.355300Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:14.549011Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:35:14.550444Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:35:14.561332Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:35:24.523273Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:35:24.523303Z node 3 :IMPORT WARN: Table profiles were not loaded >> KqpQueryService::ExecuteRetryQuery [GOOD] >> KqpQueryService::Explain >> KqpQueryServiceScripts::TestPaging >> KqpQueryService::DdlGroup [GOOD] >> KqpQueryService::DdlPermission ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_OltpOrder [GOOD] Test command err: Trying to start YDB, gRPC: 16002, MsgBus: 3027 2025-03-31T16:34:52.324005Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013025867863061:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:52.324092Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001494/r3tmp/tmpK9kasJ/pdisk_1.dat 2025-03-31T16:34:52.705171Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:52.746341Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:52.746437Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 16002, node 1 2025-03-31T16:34:52.748156Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:52.820779Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:52.820808Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:52.820818Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:52.820935Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3027 TClient is connected to server localhost:3027 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:53.400559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:55.444086Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013038752765613:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:55.444197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:55.750963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:34:55.875067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013038752765768:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:55.875164Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:55.875245Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013038752765773:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:55.878675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:34:55.887861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013038752765775:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:34:55.988885Z node 1 :TX_PROXY ERROR: Actor# [1:7488013038752765826:2432] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:57.324409Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488013025867863061:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:57.324487Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 2838, MsgBus: 31373 2025-03-31T16:34:58.917715Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013050102923623:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:58.917800Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001494/r3tmp/tmpHFNv3g/pdisk_1.dat 2025-03-31T16:34:59.020786Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2838, node 2 2025-03-31T16:34:59.049976Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:59.050065Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:59.051472Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:59.086462Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:59.086522Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:59.086532Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:59.086661Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31373 TClient is connected to server localhost:31373 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:59.572954Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:02.109512Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013067282793464:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:02.109602Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:02.118445Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:35:02.196954Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013067282793568:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:02.197055Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:02.197217Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013067282793573:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:02.200506Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:35:02.211268Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488013067282793575:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:35:02.281341Z node 2 :TX_PROXY ERROR: Actor# [2:7488013067282793626:2394] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 19616, MsgBus: 20753 2025-03-31T16:35:03.346131Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488013072687531903:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:03.346198Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001494/r3tmp/tmpF5BFtZ/pdisk_1. ... 0.588398Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7488013145701980100:2422] TxId: 281474976715755. Ctx: { TraceId: 01jqpgxetf9vef58ta50k0fx8h, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YWJjMDVkNDQtMWVhNTdlZDgtYzRhMGVmOGEtY2NmMDU4ZWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-31T16:35:20.589338Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YWJjMDVkNDQtMWVhNTdlZDgtYzRhMGVmOGEtY2NmMDU4ZWQ=, ActorId: [3:7488013089867402134:2422], ActorState: ExecuteState, TraceId: 01jqpgxetf9vef58ta50k0fx8h, Create QueryResponse for error on request, msg: 2025-03-31T16:35:20.713693Z node 3 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=97; 2025-03-31T16:35:20.713865Z node 3 :TX_DATASHARD ERROR: Prepare transaction failed. txid 97 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-31T16:35:20.713961Z node 3 :TX_DATASHARD ERROR: Errors while proposing transaction txid 97 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-31T16:35:20.714093Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7488013145701980143:2340], Table: `/Root/DataShard` ([72057594046644480:2:1]), SessionActorId: [3:7488013085572434537:2340]Got CONSTRAINT VIOLATION for table `/Root/DataShard`. ShardID=72075186224037888, Sink=[3:7488013145701980143:2340].{
: Error: Duplicate keys have been found., code: 2012 } 2025-03-31T16:35:20.714159Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7488013145701980133:2340], SessionActorId: [3:7488013085572434537:2340], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[3:7488013085572434537:2340]. isRollback=0 2025-03-31T16:35:20.714354Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MjM4NTUwOTEtY2I2OTU0NTYtYTY4NmUxYmYtM2FiZGM4ODk=, ActorId: [3:7488013085572434537:2340], ActorState: ExecuteState, TraceId: 01jqpgxeyeaa22kmchf73zrc1s, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7488013145701980134:2340] from: [3:7488013145701980133:2340] 2025-03-31T16:35:20.714423Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7488013145701980134:2340] TxId: 281474976715756. Ctx: { TraceId: 01jqpgxeyeaa22kmchf73zrc1s, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MjM4NTUwOTEtY2I2OTU0NTYtYTY4NmUxYmYtM2FiZGM4ODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-31T16:35:20.715421Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MjM4NTUwOTEtY2I2OTU0NTYtYTY4NmUxYmYtM2FiZGM4ODk=, ActorId: [3:7488013085572434537:2340], ActorState: ExecuteState, TraceId: 01jqpgxeyeaa22kmchf73zrc1s, Create QueryResponse for error on request, msg: 2025-03-31T16:35:20.841435Z node 3 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=98; 2025-03-31T16:35:20.841767Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7488013145701980177:2422], Table: `/Root/DataShard` ([72057594046644480:2:1]), SessionActorId: [3:7488013089867402134:2422]Got CONSTRAINT VIOLATION for table `/Root/DataShard`. ShardID=72075186224037888, Sink=[3:7488013145701980177:2422].{
: Error: Duplicate keys have been found., code: 2012 } 2025-03-31T16:35:20.841839Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7488013145701980167:2422], SessionActorId: [3:7488013089867402134:2422], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[3:7488013089867402134:2422]. isRollback=0 2025-03-31T16:35:20.842027Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YWJjMDVkNDQtMWVhNTdlZDgtYzRhMGVmOGEtY2NmMDU4ZWQ=, ActorId: [3:7488013089867402134:2422], ActorState: ExecuteState, TraceId: 01jqpgxf2a8ev7rpdaw1f6whk9, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7488013145701980168:2422] from: [3:7488013145701980167:2422] 2025-03-31T16:35:20.842098Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7488013145701980168:2422] TxId: 281474976715757. Ctx: { TraceId: 01jqpgxf2a8ev7rpdaw1f6whk9, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YWJjMDVkNDQtMWVhNTdlZDgtYzRhMGVmOGEtY2NmMDU4ZWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-31T16:35:20.843298Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YWJjMDVkNDQtMWVhNTdlZDgtYzRhMGVmOGEtY2NmMDU4ZWQ=, ActorId: [3:7488013089867402134:2422], ActorState: ExecuteState, TraceId: 01jqpgxf2a8ev7rpdaw1f6whk9, Create QueryResponse for error on request, msg: 2025-03-31T16:35:20.971515Z node 3 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=99; 2025-03-31T16:35:20.971747Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7488013145701980210:2340], Table: `/Root/DataShard` ([72057594046644480:2:1]), SessionActorId: [3:7488013085572434537:2340]Got CONSTRAINT VIOLATION for table `/Root/DataShard`. ShardID=72075186224037888, Sink=[3:7488013145701980210:2340].{
: Error: Duplicate keys have been found., code: 2012 } 2025-03-31T16:35:20.971814Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7488013145701980201:2340], SessionActorId: [3:7488013085572434537:2340], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[3:7488013085572434537:2340]. isRollback=0 2025-03-31T16:35:20.971977Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MjM4NTUwOTEtY2I2OTU0NTYtYTY4NmUxYmYtM2FiZGM4ODk=, ActorId: [3:7488013085572434537:2340], ActorState: ExecuteState, TraceId: 01jqpgxf6fbnsrc9gvtge27dtp, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7488013145701980202:2340] from: [3:7488013145701980201:2340] 2025-03-31T16:35:20.972041Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7488013145701980202:2340] TxId: 281474976715758. Ctx: { TraceId: 01jqpgxf6fbnsrc9gvtge27dtp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MjM4NTUwOTEtY2I2OTU0NTYtYTY4NmUxYmYtM2FiZGM4ODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-31T16:35:20.972793Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MjM4NTUwOTEtY2I2OTU0NTYtYTY4NmUxYmYtM2FiZGM4ODk=, ActorId: [3:7488013085572434537:2340], ActorState: ExecuteState, TraceId: 01jqpgxf6fbnsrc9gvtge27dtp, Create QueryResponse for error on request, msg: 2025-03-31T16:35:21.096210Z node 3 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=100; 2025-03-31T16:35:21.096502Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7488013149996947541:2422], Table: `/Root/DataShard` ([72057594046644480:2:1]), SessionActorId: [3:7488013089867402134:2422]Got CONSTRAINT VIOLATION for table `/Root/DataShard`. ShardID=72075186224037888, Sink=[3:7488013149996947541:2422].{
: Error: Duplicate keys have been found., code: 2012 } 2025-03-31T16:35:21.096576Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7488013149996947531:2422], SessionActorId: [3:7488013089867402134:2422], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[3:7488013089867402134:2422]. isRollback=0 2025-03-31T16:35:21.096781Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YWJjMDVkNDQtMWVhNTdlZDgtYzRhMGVmOGEtY2NmMDU4ZWQ=, ActorId: [3:7488013089867402134:2422], ActorState: ExecuteState, TraceId: 01jqpgxfaebt891xnw67cqtzw3, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7488013149996947532:2422] from: [3:7488013149996947531:2422] 2025-03-31T16:35:21.096845Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7488013149996947532:2422] TxId: 281474976715759. Ctx: { TraceId: 01jqpgxfaebt891xnw67cqtzw3, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YWJjMDVkNDQtMWVhNTdlZDgtYzRhMGVmOGEtY2NmMDU4ZWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-31T16:35:21.097968Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YWJjMDVkNDQtMWVhNTdlZDgtYzRhMGVmOGEtY2NmMDU4ZWQ=, ActorId: [3:7488013089867402134:2422], ActorState: ExecuteState, TraceId: 01jqpgxfaebt891xnw67cqtzw3, Create QueryResponse for error on request, msg: 2025-03-31T16:35:21.232884Z node 3 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=101; 2025-03-31T16:35:21.233178Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7488013149996947573:2422], Table: `/Root/DataShard` ([72057594046644480:2:1]), SessionActorId: [3:7488013089867402134:2422]Got CONSTRAINT VIOLATION for table `/Root/DataShard`. ShardID=72075186224037888, Sink=[3:7488013149996947573:2422].{
: Error: Duplicate keys have been found., code: 2012 } 2025-03-31T16:35:21.233265Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7488013149996947565:2422], SessionActorId: [3:7488013089867402134:2422], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[3:7488013089867402134:2422]. isRollback=0 2025-03-31T16:35:21.233423Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YWJjMDVkNDQtMWVhNTdlZDgtYzRhMGVmOGEtY2NmMDU4ZWQ=, ActorId: [3:7488013089867402134:2422], ActorState: ExecuteState, TraceId: 01jqpgxfeccyr108737sb9jmhd, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7488013149996947566:2422] from: [3:7488013149996947565:2422] 2025-03-31T16:35:21.233493Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7488013149996947566:2422] TxId: 281474976715760. Ctx: { TraceId: 01jqpgxfeccyr108737sb9jmhd, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YWJjMDVkNDQtMWVhNTdlZDgtYzRhMGVmOGEtY2NmMDU4ZWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-31T16:35:21.234654Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YWJjMDVkNDQtMWVhNTdlZDgtYzRhMGVmOGEtY2NmMDU4ZWQ=, ActorId: [3:7488013089867402134:2422], ActorState: ExecuteState, TraceId: 01jqpgxfeccyr108737sb9jmhd, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_OltpDelete [GOOD] Test command err: Trying to start YDB, gRPC: 29138, MsgBus: 3402 2025-03-31T16:35:00.447560Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013060312740085:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:00.449113Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00146a/r3tmp/tmpU3XbVh/pdisk_1.dat 2025-03-31T16:35:00.890812Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:00.928629Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:00.928755Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 29138, node 1 2025-03-31T16:35:00.931004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:00.989758Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:00.989806Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:00.989819Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:00.989975Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3402 TClient is connected to server localhost:3402 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:01.611596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:03.640256Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013073197642615:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:03.640376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:03.912317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:35:04.066673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488013073197642759:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:35:04.066687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013073197642771:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:35:04.066885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488013073197642759:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:35:04.066935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013073197642771:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:35:04.070498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488013073197642759:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:35:04.070498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013073197642771:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:35:04.070721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013073197642771:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:35:04.070841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013073197642771:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:35:04.070975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488013073197642759:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:35:04.071056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013073197642771:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:35:04.071163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488013073197642759:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:35:04.071231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013073197642771:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:35:04.071293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488013073197642759:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:35:04.071346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013073197642771:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:35:04.071416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488013073197642759:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:35:04.071475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013073197642771:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:35:04.071612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013073197642771:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:35:04.071616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488013073197642759:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:35:04.071765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013073197642771:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:35:04.071963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488013073197642759:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:35:04.072017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013073197642771:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:35:04.072106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488013073197642759:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:35:04.072246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488013073197642759:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:35:04.072356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488013073197642759:2335];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:35:04.114101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488013077492610069:2338];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:35:04.114104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488013077492610091:2342];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:35:04.114158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488013077492610091:2342];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:35:04.114158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488013077492610069:2338];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:35:04.114385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488013077492610091:2342];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:35:04.114385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488013077492610069:2338];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:35:04.114507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7488013077492610069:2338];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS ... xation;reason=disabled; 2025-03-31T16:35:10.946030Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[2:7488013097727761862:2338];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:10.947468Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[2:7488013097727761875:2342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037892;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:10.947946Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[2:7488013097727761875:2342];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037892;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:10.948157Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[2:7488013097727761869:2340];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:10.949151Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[2:7488013097727761864:2339];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037897;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:10.949444Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7488013097727761847:2336];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:10.949699Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[2:7488013097727761872:2341];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037893;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:10.950129Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[2:7488013097727761986:2348];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037894;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:10.950673Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7488013097727761839:2335];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:10.951267Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[2:7488013097727761862:2338];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:10.951986Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[2:7488013097727761860:2337];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037895;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:10.952194Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7488013097727761839:2335];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:10.952334Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[2:7488013097727761869:2340];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037896;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:10.952826Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[2:7488013097727761986:2348];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037894;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:10.953017Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[2:7488013097727761872:2341];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037893;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:10.955445Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7488013097727761847:2336];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:10.957372Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[2:7488013097727761864:2339];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037897;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:10.989095Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;task_id=1d410ce2-e4e11f0-916bd657-81eeae59;tablet_id=72075186224037894;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:10.989383Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=1d40b4d6-e4e11f0-88fc011e-31df93e;tablet_id=72075186224037890;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:10.989744Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:35:10.991266Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 2025-03-31T16:35:11.152543Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488013084842859187:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:11.152615Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 61315, MsgBus: 3166 2025-03-31T16:35:16.905493Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488013128447153704:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:16.905554Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00146a/r3tmp/tmpjSuLm8/pdisk_1.dat 2025-03-31T16:35:17.035429Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:17.055159Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:17.055256Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:17.057583Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61315, node 3 2025-03-31T16:35:17.099548Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:17.099571Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:17.099580Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:17.099703Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3166 TClient is connected to server localhost:3166 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:17.613250Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:20.533725Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013145627023530:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:20.533888Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:20.545135Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:35:20.637289Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013145627023633:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:20.637382Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:20.637410Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013145627023638:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:20.640835Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:35:20.650355Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488013145627023640:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:35:20.752737Z node 3 :TX_PROXY ERROR: Actor# [3:7488013145627023691:2393] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } WAIT_INDEXATION: 0 2025-03-31T16:35:21.907011Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488013128447153704:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:21.907143Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpQueryServiceScripts::ExecuteScriptWithResultsTtl [GOOD] >> KqpService::SwitchCache-UseCache >> KqpQueryService::ForbidInteractiveTxOnImplicitSession [GOOD] >> KqpQueryService::SeveralCTAS-UseSink [GOOD] >> KqpQueryService::ExecuteQueryExplicitBeginCommitRollback [GOOD] >> KqpQueryService::ExecuteQueryExplicitTxTLI >> KqpQueryService::TableSink_BadTransactions [GOOD] >> KqpQueryService::PeriodicTaskInSessionPool [GOOD] >> KqpQueryService::MaterializeTxResults >> KqpQueryServiceScripts::ForgetScriptExecutionRace [GOOD] >> KqpQueryServiceScripts::InvalidFetchToken >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::ExecuteScriptWithResultsTtl [GOOD] Test command err: Trying to start YDB, gRPC: 28520, MsgBus: 6989 2025-03-31T16:35:00.274208Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013061398072496:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:00.274264Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00146b/r3tmp/tmp8LZxqz/pdisk_1.dat 2025-03-31T16:35:00.710496Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:00.715516Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:00.715653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:00.718948Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28520, node 1 2025-03-31T16:35:00.841369Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:00.841399Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:00.841412Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:00.841552Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6989 TClient is connected to server localhost:6989 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:01.385624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:01.408110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:01.544523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:01.680149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:01.755530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:03.475844Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013074282976164:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:03.478894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:03.832595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:03.867785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:03.900923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:03.967661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:04.007179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:04.078163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:04.128202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013078577943978:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:04.128280Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:04.128408Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013078577943983:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:04.131541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:04.141375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013078577943985:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:35:04.220597Z node 1 :TX_PROXY ERROR: Actor# [1:7488013078577944039:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:05.152247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:35:05.153635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:35:05.154523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:35:05.274612Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488013061398072496:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:05.274727Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:07.330632Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438907364, txId: 281474976710698] shutting down Trying to start YDB, gRPC: 8380, MsgBus: 29436 2025-03-31T16:35:08.278798Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013096658167299:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:08.278864Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00146b/r3tmp/tmpny1QyN/pdisk_1.dat 2025-03-31T16:35:08.388508Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8380, node 2 2025-03-31T16:35:08.439623Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:08.439791Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:08.466842Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:08.495582Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:08.495607Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:08.495614Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:08.495749Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29436 TClient is connected to server localhost:29436 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:08.958372Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:08.975703Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 28147 ... 72057594046644480 2025-03-31T16:35:13.279302Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488013096658167299:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:13.279382Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:14.789659Z node 2 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionOperationQueryActor] TraceId: 39136703-2c08c9ab-4462fcc4-a4e81322, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=2&id=OTk5MGNjOGItMTVjNDk4ZTEtNTE5MmNmYzEtNjY0MmJjMDA=, TxId: 2025-03-31T16:35:15.630808Z node 2 :KQP_PROXY WARN: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: 39136703-2c08c9ab-4462fcc4-a4e81322, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=2&id=NzUwZDUwOTktZDU4ZTJmZTEtMjhiMjg5ZWItNDQ2ZmNkNTc=, TxId: 2025-03-31T16:35:15.777414Z node 2 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 39136703-2c08c9ab-4462fcc4-a4e81322, reply NOT_FOUND, issues: {
: Error: No such execution } 2025-03-31T16:35:15.801897Z node 2 :KQP_PROXY WARN: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: 39136703-2c08c9ab-4462fcc4-a4e81322, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=2&id=NzhiZTg3NDAtOTY5YzFlZTUtZDM0YzdlNWItMTNlODMxNjU=, TxId: 2025-03-31T16:35:15.802000Z node 2 :KQP_PROXY WARN: [ScriptExecutions] [TCancelScriptExecutionOperationActor] ExecutionId: 39136703-2c08c9ab-4462fcc4-a4e81322, check lease failed 2025-03-31T16:35:16.122051Z node 2 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionResultQueryActor] TraceId: 39136703-2c08c9ab-4462fcc4-a4e81322, State: Get results info, Finish with NOT_FOUND, Issues: {
: Error: Script execution not found }, SessionId: ydb://session/3?node_id=2&id=N2JmODUxOTctMWM0ODg4MDMtZjAzNjE5Yi1jN2MwMDBhZA==, TxId: Trying to start YDB, gRPC: 17076, MsgBus: 11580 2025-03-31T16:35:18.426438Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488013138406812536:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:18.426499Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00146b/r3tmp/tmpwclz51/pdisk_1.dat 2025-03-31T16:35:18.563913Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:18.590184Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:18.590284Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:18.591828Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17076, node 3 2025-03-31T16:35:18.644389Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:18.644413Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:18.644422Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:18.644572Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11580 TClient is connected to server localhost:11580 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:35:19.180993Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:35:19.218800Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:19.285895Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:19.504672Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:19.589024Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:22.231411Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013155586683503:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:22.231505Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:22.284218Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:22.316605Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:22.353424Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:22.427195Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:22.469004Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:22.507640Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:22.561886Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013155586684022:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:22.561952Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013155586684027:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:22.561987Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:22.565158Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:22.575037Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488013155586684029:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:35:22.649027Z node 3 :TX_PROXY ERROR: Actor# [3:7488013155586684082:3453] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:23.427001Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488013138406812536:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:23.427111Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:23.705159Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:35:23.706738Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:35:23.709214Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:35:26.211591Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438926250, txId: 281474976715710] shutting down 2025-03-31T16:35:26.477239Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438926516, txId: 281474976715713] shutting down 2025-03-31T16:35:26.796561Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438926831, txId: 281474976715716] shutting down 2025-03-31T16:35:27.131315Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438927167, txId: 281474976715719] shutting down 2025-03-31T16:35:27.447500Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438927482, txId: 281474976715722] shutting down 2025-03-31T16:35:27.477877Z node 3 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionResultQueryActor] TraceId: d4dacc83-1c247e80-2882c62c-dd79a4ac, State: Get results info, Finish with NOT_FOUND, Issues: {
: Error: Results are expired }, SessionId: ydb://session/3?node_id=3&id=NTkwODM0NmYtNjIyODZkNzMtNThiOTI3Ny1kNTlhYThhNg==, TxId: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::SeveralCTAS-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 12934, MsgBus: 6640 2025-03-31T16:35:13.122758Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013117101998512:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:13.122848Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00142e/r3tmp/tmp6rR0qA/pdisk_1.dat 2025-03-31T16:35:13.535000Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12934, node 1 2025-03-31T16:35:13.568537Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:13.568614Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:13.603151Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:13.663725Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:13.663763Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:13.663771Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:13.663887Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6640 TClient is connected to server localhost:6640 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:14.158135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:14.191676Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:35:14.203855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:14.367922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:14.521672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:14.593559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:16.355062Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013129986902179:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:16.355189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:16.715479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:16.787427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:16.832978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:16.904780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:16.942446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:16.981666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:17.034018Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013134281869990:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:17.034103Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:17.034171Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013134281869995:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:17.037796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:17.047928Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013134281869997:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:35:17.140439Z node 1 :TX_PROXY ERROR: Actor# [1:7488013134281870053:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:18.021404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:35:18.110859Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488013138576837725:2510], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/.sys/show_create]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:35:18.112210Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODhmYmFjMzQtN2JhYTI1OWEtM2FkYmMxYmEtNTU1NTE5MDc=, ActorId: [1:7488013134281870341:2496], ActorState: ExecuteState, TraceId: 01jqpgxcdb0y8tacfekn59bwtv, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:35:18.127424Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488013117101998512:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:18.127492Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:18.151660Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488013138576837752:2513], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/.sys/show_create]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:35:18.152091Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODhmYmFjMzQtN2JhYTI1OWEtM2FkYmMxYmEtNTU1NTE5MDc=, ActorId: [1:7488013134281870341:2496], ActorState: ExecuteState, TraceId: 01jqpgxcf908ewx1vdy0jm6qm1, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 21645, MsgBus: 22634 2025-03-31T16:35:19.001441Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013141903157779:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:19.001506Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00142e/r3tmp/tmpkCpthc/pdisk_1.dat 2025-03-31T16:35:19.113018Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21645, node 2 2025-03-31T16:35:19.168168Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:19.168303Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:19.175661Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:19.199560Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:19.199580Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:19.199587Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:19.199691Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22634 TClient is connected to server localhost:22634 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:19.661753Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:22.166025Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013154788060314:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:22.166097Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013154788060333:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:22.166161Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:22.169740Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:35:22.178276Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488013154788060343:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:35:22.272118Z node 2 :TX_PROXY ERROR: Actor# [2:7488013154788060394:2336] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:22.343771Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:1, at schemeshard: 72057594046644480 2025-03-31T16:35:22.513105Z node 2 :TX_PROXY ERROR: Actor# [2:7488013154788060676:2502] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:22.515148Z node 2 :TX_PROXY ERROR: Actor# [2:7488013154788060683:2507] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/ODUzNWIwMzQtYmViMzQ2Yy1jYThjODIwYy0zMzIzNGU2Zg==\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:22.517794Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:22.674983Z node 2 :TX_PROXY ERROR: Actor# [2:7488013154788060870:2620] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:22.676935Z node 2 :TX_PROXY ERROR: Actor# [2:7488013154788060877:2625] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/ODUzNWIwMzQtYmViMzQ2Yy1jYThjODIwYy0zMzIzNGU2Zg==\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:22.679805Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 1684, MsgBus: 2379 2025-03-31T16:35:24.085334Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488013165198695514:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:24.085388Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00142e/r3tmp/tmpRG46dq/pdisk_1.dat 2025-03-31T16:35:24.222538Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:24.246872Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:24.246960Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:24.248999Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1684, node 3 2025-03-31T16:35:24.318855Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:24.318881Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:24.318889Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:24.319022Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2379 TClient is connected to server localhost:2379 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:24.874230Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:27.458950Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013178083598058:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:27.459055Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:27.459342Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013178083598070:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:27.466048Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:35:27.480962Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488013178083598072:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:35:27.570058Z node 3 :TX_PROXY ERROR: Actor# [3:7488013178083598123:2334] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:27.652868Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:1, at schemeshard: 72057594046644480 2025-03-31T16:35:27.889067Z node 3 :TX_PROXY ERROR: Actor# [3:7488013178083598407:2501] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:27.891724Z node 3 :TX_PROXY ERROR: Actor# [3:7488013178083598415:2507] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/NzAyNGEzM2QtZjlmMWIwZmQtOTYyNDllZjQtOWI3NjExZmI=\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:27.894564Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:28.070068Z node 3 :TX_PROXY ERROR: Actor# [3:7488013182378565898:2620] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:28.072293Z node 3 :TX_PROXY ERROR: Actor# [3:7488013182378565905:2625] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/NzAyNGEzM2QtZjlmMWIwZmQtOTYyNDllZjQtOWI3NjExZmI=\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:28.075049Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ForbidInteractiveTxOnImplicitSession [GOOD] Test command err: Trying to start YDB, gRPC: 30929, MsgBus: 15575 2025-03-31T16:35:08.863148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:35:08.863383Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:35:08.863550Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001455/r3tmp/tmpZBVSeg/pdisk_1.dat TServer::EnableGrpc on GrpcPort 30929, node 1 2025-03-31T16:35:09.343753Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:09.348251Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:09.348330Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:09.348371Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:09.348668Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:35:09.386256Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:09.386449Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:09.400400Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15575 TClient is connected to server localhost:15575 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:09.731028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:09.774364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:10.162259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:10.569071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:10.901547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:11.717693Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1809:3405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:11.718075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:11.744697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:11.969131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:12.223612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:12.489246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:12.750016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:13.089566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:13.378189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2395:3856], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:13.378287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:13.378719Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2400:3861], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:13.384500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:13.566597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2402:3863], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:35:13.617048Z node 1 :TX_PROXY ERROR: Actor# [1:2467:3909] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 12128, MsgBus: 62933 2025-03-31T16:35:15.624270Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013123880859456:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:15.624475Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001455/r3tmp/tmp1UZD8x/pdisk_1.dat 2025-03-31T16:35:15.751168Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:15.780924Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:15.781050Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:15.782468Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12128, node 2 2025-03-31T16:35:15.907448Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:15.907482Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:15.907491Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:15.907643Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62933 TClient is connected to server localhost:62933 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:16.373481Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:16.386645Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:16.465356Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:35:16.616042Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:35:16.678853Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:18.923042Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013136765763094:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default ... undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:19.013806Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:19.051767Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:19.090590Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:19.127668Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:19.190901Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013141060730902:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:19.190988Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:19.191166Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013141060730907:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:19.195792Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:19.207637Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488013141060730909:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:35:19.297689Z node 2 :TX_PROXY ERROR: Actor# [2:7488013141060730964:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:20.276957Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:35:20.314125Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:35:20.386895Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:35:20.624427Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488013123880859456:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:20.624532Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 24338, MsgBus: 22029 2025-03-31T16:35:23.322272Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488013158385459701:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:23.322316Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001455/r3tmp/tmpx1af6p/pdisk_1.dat 2025-03-31T16:35:23.509911Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:23.527140Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:23.527233Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:23.531321Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24338, node 3 2025-03-31T16:35:23.608813Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:23.608838Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:23.608846Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:23.608992Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22029 TClient is connected to server localhost:22029 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:24.090427Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:24.100391Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:24.108234Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:24.195525Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:35:24.381888Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:35:24.451172Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:35:26.892072Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013171270363358:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:26.892170Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:26.950370Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:26.981859Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:27.052726Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:27.087779Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:27.121393Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:27.158268Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:27.213978Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013175565331165:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:27.214060Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:27.214151Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013175565331170:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:27.217366Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:27.228780Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488013175565331173:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:35:27.309215Z node 3 :TX_PROXY ERROR: Actor# [3:7488013175565331226:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:28.326387Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488013158385459701:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:28.326463Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryServiceScripts::ExecuteScriptStatsProfile [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithCancelAfter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_BadTransactions [GOOD] Test command err: Trying to start YDB, gRPC: 24578, MsgBus: 25536 2025-03-31T16:35:11.639231Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013107114885885:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:11.639382Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001443/r3tmp/tmpbk0JTN/pdisk_1.dat 2025-03-31T16:35:12.018710Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24578, node 1 2025-03-31T16:35:12.062672Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:12.062859Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:12.065808Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:12.096278Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:12.096307Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:12.096316Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:12.096448Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25536 TClient is connected to server localhost:25536 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:12.591431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:12.623849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:12.800088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:12.936096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:13.000137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:14.767053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013119999789539:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:14.767177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:15.129650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:15.164987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:15.203587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:15.237409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:15.305578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:15.347101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:15.402413Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013124294757353:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:15.402499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:15.402708Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013124294757358:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:15.406343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:15.416585Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013124294757360:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:35:15.511181Z node 1 :TX_PROXY ERROR: Actor# [1:7488013124294757412:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 10964, MsgBus: 6402 2025-03-31T16:35:17.276264Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013132984617364:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:17.276382Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001443/r3tmp/tmpST6wlM/pdisk_1.dat 2025-03-31T16:35:17.415456Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:17.430051Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:17.430138Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:17.431919Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10964, node 2 2025-03-31T16:35:17.519828Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:17.519847Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:17.519855Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:17.519967Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6402 TClient is connected to server localhost:6402 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:17.983007Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:17.989775Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:35:17.997009Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:18.073339Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:18.210677Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:18.310463Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:20.466231Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013145869521019:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T ... LASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:35:26.717219Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:35:26.717243Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:35:26.717331Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:35:26.717353Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:35:26.757216Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-31T16:35:26.757538Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-31T16:35:26.761800Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-31T16:35:26.764526Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-31T16:35:26.766812Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-31T16:35:26.771062Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-31T16:35:26.772436Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-31T16:35:26.777388Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-31T16:35:26.777906Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-31T16:35:26.782134Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-31T16:35:26.788797Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:35:26.892702Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013173217882059:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:26.892798Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:26.893018Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013173217882064:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:26.896740Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-03-31T16:35:26.907796Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488013173217882066:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-03-31T16:35:26.968175Z node 3 :TX_PROXY ERROR: Actor# [3:7488013173217882119:2688] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:27.122270Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=281474976715663;tx_id=281474976715663;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715663; 2025-03-31T16:35:27.123290Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;self_id=[3:7488013173217881578:2343];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037893;local_tx_no=13;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037889,72075186224037897;receive=72075186224037894; 2025-03-31T16:35:27.123413Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;self_id=[3:7488013173217881578:2343];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037893;local_tx_no=14;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037889,72075186224037897;receive=72075186224037894; 2025-03-31T16:35:27.123815Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=281474976715663;tx_id=281474976715663;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715663; 2025-03-31T16:35:27.124271Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=281474976715663;tx_id=281474976715663;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715663; 2025-03-31T16:35:27.125378Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;self_id=[3:7488013173217881578:2343];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037893;local_tx_no=16;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037897;receive=72075186224037889; 2025-03-31T16:35:27.125455Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;self_id=[3:7488013173217881578:2343];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037893;local_tx_no=17;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037897;receive=72075186224037889; 2025-03-31T16:35:27.125855Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=281474976715663;tx_id=281474976715663;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715663; 2025-03-31T16:35:27.519079Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MzUwNDViMWQtNzU1MDQ0ZGYtMjc4ZDI4NGQtMjM1OTMyYjY=, ActorId: [3:7488013177512849551:2496], ActorState: ExecuteState, TraceId: 01jqpgxndb1tpzjdxdk1tarj9v, Create QueryResponse for error on request, msg: Write transactions between column and row tables are disabled at current time. 2025-03-31T16:35:27.614394Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OGUyYTNmYzEtYjNiYTlkODYtODJkMzZjYzctNzBjMTU1ZWE=, ActorId: [3:7488013177512849572:2504], ActorState: ExecuteState, TraceId: 01jqpgxnn63whv851xz0zyn6vw, Create QueryResponse for error on request, msg: Write transactions between column and row tables are disabled at current time. 2025-03-31T16:35:27.770003Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZTk4NzJkNzEtNDQ5ZDhkYi0zMWUxNGE5ZC1lM2EwNzk2Ng==, ActorId: [3:7488013177512849591:2511], ActorState: ExecuteState, TraceId: 01jqpgxnr534q24zvbwbphdn7q, Create QueryResponse for error on request, msg: Write transactions between column and row tables are disabled at current time. 2025-03-31T16:35:27.922814Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488013156038011547:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:27.922896Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:28.077674Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZTkwYzI3OGQtMzUxYmVjNzAtYzFmMGI1OGEtOTBmYzEzODY=, ActorId: [3:7488013177512849614:2520], ActorState: ExecuteState, TraceId: 01jqpgxnx375td63kgb49y9bss, Create QueryResponse for error on request, msg: Write transactions between column and row tables are disabled at current time. 2025-03-31T16:35:28.267674Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTIyNGQzNGEtMzkxM2U4NDEtOWU2MjVlZDYtODNjODNlYTA=, ActorId: [3:7488013181807816932:2528], ActorState: ExecuteState, TraceId: 01jqpgxp6tcexjs4n6byxaygf4, Create QueryResponse for error on request, msg: Write transactions between column and row tables are disabled at current time. 2025-03-31T16:35:28.458469Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=281474976715671;tx_id=281474976715671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715671; 2025-03-31T16:35:28.458869Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=281474976715671;tx_id=281474976715671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715671; 2025-03-31T16:35:28.459800Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;self_id=[3:7488013173217881553:2337];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037889;local_tx_no=25;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037893,72075186224037897;receive=72075186224037894; 2025-03-31T16:35:28.459875Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;self_id=[3:7488013173217881553:2337];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037889;local_tx_no=26;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037893,72075186224037897;receive=72075186224037894; 2025-03-31T16:35:28.460027Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;self_id=[3:7488013173217881553:2337];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037889;local_tx_no=28;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037897;receive=72075186224037893; 2025-03-31T16:35:28.460086Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;self_id=[3:7488013173217881553:2337];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037889;local_tx_no=29;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037897;receive=72075186224037893; 2025-03-31T16:35:28.460550Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=281474976715671;tx_id=281474976715671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715671; 2025-03-31T16:35:28.461403Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=281474976715671;tx_id=281474976715671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715671; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:224:2060] recipient: [1:218:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:224:2060] recipient: [1:218:2142] Leader for TabletID 72057594046678944 is [1:235:2153] sender: [1:236:2060] recipient: [1:218:2142] 2025-03-31T16:33:27.828193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:33:27.828378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:27.828424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:33:27.828460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:33:27.829462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:33:27.829525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:33:27.829617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:27.829741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:33:27.831211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:33:27.909071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:33:27.909190Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:27.924734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:33:27.924862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:33:27.925038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:33:27.935581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:33:27.935950Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:33:27.940226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:27.940650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:33:27.950319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:27.959588Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:27.959694Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:27.959818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:33:27.959873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:27.959958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:33:27.960094Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:33:27.967902Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:235:2153] sender: [1:347:2060] recipient: [1:17:2064] 2025-03-31T16:33:28.128702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:33:28.130376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:28.131344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:33:28.132728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:33:28.132824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:28.136214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:28.136312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:33:28.136465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:28.136537Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:33:28.136590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:33:28.136625Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:33:28.139295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:28.139356Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:33:28.139405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:33:28.141507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:28.141563Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:28.141644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:28.141711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:33:28.146529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:33:28.149204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:33:28.150462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:33:28.151817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:28.152015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 243 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:28.152082Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:28.153688Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:33:28.153771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:28.155002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:33:28.155157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:33:28.158836Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:28.158904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:28.159149Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:28.159213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:314:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:33:28.159716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:28.159916Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:33:28.160040Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:28.160083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:28.160131Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:28.160184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:28.160252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:33:28.160307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:28.160347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:33:28.160383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:33:28.160459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:33:28.160522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:33:28.160565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:33:28.162880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:28.163015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:28.163054Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:35:26.219659Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:35:26.219740Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:35:26.219769Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:35:26.592174Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:35:26.592253Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:35:26.592327Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:35:26.592357Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:35:26.957366Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:35:26.957446Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:35:26.957519Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:35:26.957549Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:35:27.319419Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:35:27.319503Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:35:27.319587Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:35:27.319617Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:35:27.686069Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:35:27.686145Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:35:27.686213Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:35:27.686243Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:35:28.050168Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:35:28.050278Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:35:28.050387Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:35:28.050420Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:35:28.445063Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:35:28.445143Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:35:28.445210Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:35:28.445233Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:35:28.807437Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:35:28.807513Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:35:28.807587Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:35:28.807616Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:35:29.162956Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:35:29.163062Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:35:29.163184Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:35:29.163220Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:35:29.554973Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:35:29.555065Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:35:29.555219Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:35:29.555265Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:35:29.599387Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [7:1081:2839], Recipient [7:235:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp/TempTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-03-31T16:35:29.599481Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-03-31T16:35:29.599638Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp/TempTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:35:29.599877Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/tmp/TempTable" took 232us result status StatusPathDoesNotExist 2025-03-31T16:35:29.600070Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp/TempTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/tmp/TempTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-31T16:35:29.600648Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [7:1082:2840], Recipient [7:235:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-03-31T16:35:29.600710Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-03-31T16:35:29.600824Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:35:29.601016Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/tmp" took 192us result status StatusPathDoesNotExist 2025-03-31T16:35:29.601165Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/tmp" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-31T16:35:29.601684Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [7:1083:2841], Recipient [7:235:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp/TempTable/ValueIndex" Options { ShowPrivateTable: true } 2025-03-31T16:35:29.601736Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-03-31T16:35:29.601830Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp/TempTable/ValueIndex" Options { ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:35:29.602008Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/tmp/TempTable/ValueIndex" took 181us result status StatusPathDoesNotExist 2025-03-31T16:35:29.602153Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp/TempTable/ValueIndex\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/tmp/TempTable/ValueIndex" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> KqpQueryService::TableSink_OltpUpsert [GOOD] >> KqpQueryService::TableSink_OltpUpdate |97.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/test-results/unittest/{meta.json ... results_accumulator.log} |97.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/test-results/unittest/{meta.json ... results_accumulator.log} >> TTxDataShardMiniKQL::WriteKeyTooLarge >> KqpQueryService::ExecuteQueryInteractiveTxCommitWithQuery [GOOD] >> KqpQueryService::ExecuteQueryMultiResult >> KqpQueryService::FlowControllOnHugeLiteralAsTable [GOOD] >> KqpQueryService::FlowControllOnHugeRealTable+LongRow >> TTxDataShardMiniKQL::WriteEraseRead >> TTxDataShardMiniKQL::Write >> TTxDataShardMiniKQL::ReadSpecialColumns >> KqpQueryService::ExecStats [GOOD] >> KqpQueryService::ExecStatsPlan >> KqpQueryService::CheckIsolationLevelFroPerStatementMode [GOOD] >> KqpQueryService::AlterTable_DropNotNull_WithSetFamily_Valid >> AnalyzeColumnshard::AnalyzeRebootSaInAggregate [GOOD] >> KqpQueryServiceScripts::Tcl [GOOD] >> KqpQueryService::Explain [GOOD] >> KqpQueryService::ClosedSessionRemovedWhileActiveWithQuery [GOOD] >> KqpQueryService::CloseSessionsWithLoad >> TTxDataShardMiniKQL::WriteEraseRead [GOOD] >> TTxDataShardMiniKQL::WriteAndReadMultipleShards >> TTxDataShardMiniKQL::ReadSpecialColumns [GOOD] >> TTxDataShardMiniKQL::SelectRange >> EscapingBasics::EncloseSecretShouldWork [GOOD] >> EscapingBasics::EncloseAndEscapeStringShouldWork [GOOD] >> TTxDataShardMiniKQL::Write [GOOD] >> TTxDataShardMiniKQL::TableStats >> EscapingBasics::HideSecretsShouldWork [GOOD] >> IssuesTextFiltering::ShouldRemoveDatabasePath [GOOD] >> EscapingBasics::HideSecretsOverEncloseSecretShouldWork [GOOD] >> EscapingBasics::EscapeStringShouldWork [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaInAggregate [GOOD] Test command err: 2025-03-31T16:32:59.330938Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:32:59.331221Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:32:59.331302Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ecd/r3tmp/tmpJmeIAC/pdisk_1.dat 2025-03-31T16:32:59.707566Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28452, node 1 2025-03-31T16:32:59.951851Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:59.951920Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:59.951959Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:59.959746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:32:59.960347Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:33:00.051308Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:00.051460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:00.068409Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25761 2025-03-31T16:33:00.626892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:33:03.758214Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T16:33:03.794093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:03.794220Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:03.824065Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:33:03.826911Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:04.056048Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:33:04.056701Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:33:04.057247Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:33:04.057380Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:33:04.057659Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:33:04.057743Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:33:04.057830Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:33:04.057956Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:33:04.058053Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:33:04.224311Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:04.224419Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:04.237651Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:04.385491Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:04.441481Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T16:33:04.441607Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T16:33:04.475283Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T16:33:04.476030Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T16:33:04.476258Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T16:33:04.476328Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T16:33:04.476440Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T16:33:04.476499Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T16:33:04.476553Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T16:33:04.476619Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T16:33:04.477168Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T16:33:04.501816Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:33:04.501937Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1872:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:33:04.509696Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-03-31T16:33:04.517755Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1908:2617] 2025-03-31T16:33:04.518547Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1908:2617], schemeshard id = 72075186224037897 2025-03-31T16:33:04.521181Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T16:33:04.557097Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T16:33:04.557166Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T16:33:04.557246Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T16:33:04.573480Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T16:33:04.586457Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T16:33:04.586627Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T16:33:04.824343Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T16:33:05.010088Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T16:33:05.137010Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:33:06.296159Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2241:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:06.296323Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:06.322862Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-31T16:33:06.713985Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:33:06.714307Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:33:06.714711Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:33:06.714844Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:33:06.714934Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:33:06.715068Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:33:06.715234Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:33:06.715360Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:33:06.715473Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:33:06.715601Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:33:06.715749Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:33:06.715885Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:33:06.755505Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2398:2889];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:33:06.755616Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2398:2889];tablet_id=72075186224037900;process= ... ATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8749:6644] 2025-03-31T16:35:28.806322Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:8749:6644], schemeshard id = 72075186224037897 2025-03-31T16:35:28.821742Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:35:28.821824Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T16:35:28.883348Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8753:6647] 2025-03-31T16:35:28.884112Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:4082:3312] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-03-31T16:35:28.884175Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:4082:3312] 2025-03-31T16:35:28.884245Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Complete 2025-03-31T16:35:29.436760Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-03-31T16:35:29.436851Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-03-31T16:35:29.447780Z node 2 :STATISTICS DEBUG: Event round 3 is different from the current 0 2025-03-31T16:35:29.447854Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-03-31T16:35:30.196332Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T16:35:30.196424Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T16:35:30.196469Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-31T16:35:31.534512Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:35:31.534679Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-31T16:35:31.534731Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:35:31.535570Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-31T16:35:31.551840Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-31T16:35:31.552680Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T16:35:31.552775Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T16:35:31.553347Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-31T16:35:31.567736Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-31T16:35:31.567916Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2025-03-31T16:35:31.568767Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8830:6688], server id = [2:8835:6693], tablet id = 72075186224037899, status = OK 2025-03-31T16:35:31.568902Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8830:6688], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:35:31.569096Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8831:6689], server id = [2:8836:6694], tablet id = 72075186224037900, status = OK 2025-03-31T16:35:31.569148Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8831:6689], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:35:31.571208Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8832:6690], server id = [2:8838:6696], tablet id = 72075186224037901, status = OK 2025-03-31T16:35:31.571308Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8832:6690], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:35:31.571466Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8833:6691], server id = [2:8837:6695], tablet id = 72075186224037902, status = OK 2025-03-31T16:35:31.571516Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8833:6691], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:35:31.572342Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8834:6692], server id = [2:8840:6698], tablet id = 72075186224037903, status = OK 2025-03-31T16:35:31.572402Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8834:6692], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:35:31.573409Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-31T16:35:31.574332Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-03-31T16:35:31.574581Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8830:6688], server id = [2:8835:6693], tablet id = 72075186224037899 2025-03-31T16:35:31.574626Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:35:31.575621Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8831:6689], server id = [2:8836:6694], tablet id = 72075186224037900 2025-03-31T16:35:31.575654Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:35:31.576068Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-03-31T16:35:31.576373Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-03-31T16:35:31.576617Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8844:6702], server id = [2:8847:6705], tablet id = 72075186224037904, status = OK 2025-03-31T16:35:31.576703Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8844:6702], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:35:31.577034Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-03-31T16:35:31.577201Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8833:6691], server id = [2:8837:6695], tablet id = 72075186224037902 2025-03-31T16:35:31.577247Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:35:31.578034Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8846:6704], server id = [2:8848:6706], tablet id = 72075186224037905, status = OK 2025-03-31T16:35:31.578108Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8846:6704], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:35:31.578446Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8832:6690], server id = [2:8838:6696], tablet id = 72075186224037901 2025-03-31T16:35:31.578473Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:35:31.579285Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8834:6692], server id = [2:8840:6698], tablet id = 72075186224037903 2025-03-31T16:35:31.579332Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:35:31.579510Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8849:6707], server id = [2:8851:6709], tablet id = 72075186224037906, status = OK 2025-03-31T16:35:31.579574Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8849:6707], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:35:31.579897Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8850:6708], server id = [2:8853:6711], tablet id = 72075186224037907, status = OK 2025-03-31T16:35:31.579948Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8850:6708], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:35:31.580581Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8852:6710], server id = [2:8855:6713], tablet id = 72075186224037908, status = OK 2025-03-31T16:35:31.580638Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8852:6710], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:35:31.580826Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-03-31T16:35:31.581920Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-03-31T16:35:31.582268Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8844:6702], server id = [2:8847:6705], tablet id = 72075186224037904 2025-03-31T16:35:31.582297Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:35:31.582646Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-03-31T16:35:31.583062Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8846:6704], server id = [2:8848:6706], tablet id = 72075186224037905 2025-03-31T16:35:31.583110Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:35:31.583274Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-03-31T16:35:31.583406Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8849:6707], server id = [2:8851:6709], tablet id = 72075186224037906 2025-03-31T16:35:31.583429Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:35:31.583581Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-03-31T16:35:31.583626Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-31T16:35:31.583839Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-31T16:35:31.584060Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-31T16:35:31.584359Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T16:35:31.587278Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8850:6708], server id = [2:8853:6711], tablet id = 72075186224037907 2025-03-31T16:35:31.587319Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:35:31.587901Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8852:6710], server id = [2:8855:6713], tablet id = 72075186224037908 2025-03-31T16:35:31.587930Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:35:31.588310Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-31T16:35:31.612476Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OWFiM2RjNDAtNWUyODcwNjEtNjQ5NWZmNmUtMjA0MjYzMzQ=, TxId: 2025-03-31T16:35:31.612563Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OWFiM2RjNDAtNWUyODcwNjEtNjQ5NWZmNmUtMjA0MjYzMzQ=, TxId: 2025-03-31T16:35:31.613245Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:35:31.627905Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:35:31.628009Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:4082:3312] >> Cache::Test1 [GOOD] >> Cache::Test2 [GOOD] >> Cache::Test3 [GOOD] >> Cache::Test4 [GOOD] >> TTxDataShardMiniKQL::WriteKeyTooLarge [GOOD] >> TTxDataShardMiniKQL::WriteValueTooLarge >> KqpQueryService::ExecuteQueryWithResourcePoolClassifier [GOOD] >> KqpQueryService::ExecuteQueryScalar |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> IssuesTextFiltering::ShouldRemoveDatabasePath [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EscapingBasics::EscapeStringShouldWork [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> Cache::Test2 [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EscapingBasics::EncloseAndEscapeStringShouldWork [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> Cache::Test4 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::Explain [GOOD] Test command err: Trying to start YDB, gRPC: 29715, MsgBus: 28727 2025-03-31T16:35:16.243341Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013129432194530:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:16.243391Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001409/r3tmp/tmpdFXrih/pdisk_1.dat 2025-03-31T16:35:16.648439Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:16.675717Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:16.675809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:16.678837Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29715, node 1 2025-03-31T16:35:16.819054Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:16.819098Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:16.819110Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:16.819221Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28727 TClient is connected to server localhost:28727 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:17.354027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:17.371943Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:35:17.393665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:17.527844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:17.695697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:17.774086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:19.553130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013142317098209:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:19.553257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:19.955966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:20.001959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:20.040101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:20.077756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:20.108796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:20.182904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:20.240508Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013146612066019:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:20.240583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:20.240785Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013146612066025:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:20.245597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:20.256875Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013146612066027:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:35:20.326181Z node 1 :TX_PROXY ERROR: Actor# [1:7488013146612066081:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:21.243380Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488013129432194530:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:21.243457Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:21.479507Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013150907033725:2510], DatabaseId: /Root, PoolId: another_pool_id, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-03-31T16:35:21.479539Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013150907033727:2512], DatabaseId: /Root, PoolId: another_pool_id, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-03-31T16:35:21.479588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool another_pool_id, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-03-31T16:35:21.479645Z node 1 :KQP_WORKLOAD_SERVICE ERROR: [WorkloadService] [TPoolResolverActor] ActorId: [1:7488013150907033726:2511], DatabaseId: /Root, PoolId: another_pool_id, SessionId: ydb://session/3?node_id=1&id=ZTVhMjkzNWItNTVjNjMwMWYtYTM3YjNkNTUtZjgwNjYwZQ==, Failed to fetch pool info NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-03-31T16:35:21.479755Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolResolverActor] ActorId: [1:7488013150907033726:2511], DatabaseId: /Root, PoolId: another_pool_id, SessionId: ydb://session/3?node_id=1&id=ZTVhMjkzNWItNTVjNjMwMWYtYTM3YjNkNTUtZjgwNjYwZQ==, Failed to resolve pool, NOT_FOUND, issues: {
: Error: Failed to resolve pool id another_pool_id subissue: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } } 2025-03-31T16:35:21.479861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply continue error NOT_FOUND to [1:7488013150907033723:2509]: {
: Error: Failed to resolve pool id another_pool_id subissue: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } } 2025-03-31T16:35:21.480034Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTVhMjkzNWItNTVjNjMwMWYtYTM3YjNkNTUtZjgwNjYwZQ==, ActorId: [1:7488013150907033723:2509], ActorState: ExecuteState, TraceId: 01jqpgxfr4ewqe9pndhpgdb6ap, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool 2025-03-31T16:35:21.480187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [1:7488013150907033723:2509]: Pool another_pool_id not found Trying to start YDB, gRPC: 5358, MsgBus: 16302 2025-03-31T16:35:22.340274Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013153055440579:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:22.340339Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001409/r3tmp/tmpN9i1zJ/pdisk_1.dat 2025-03-31T16:35:22.464416Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:22.485649Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:22.485955Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 5358, node 2 2025-03-31T16:35:22.489994Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:22.549041Z node 2 :NET_CLASSIFIER WARN: distributable config i ... 94046644480 waiting... 2025-03-31T16:35:22.975549Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:23.131385Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:23.202012Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:25.446517Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013165940344218:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:25.446612Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:25.499033Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:25.527630Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:25.554942Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:25.582314Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:25.611320Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:25.642513Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:25.685075Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013165940344726:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:25.685171Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:25.685212Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013165940344732:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:25.688415Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:25.697402Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488013165940344734:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:35:25.781955Z node 2 :TX_PROXY ERROR: Actor# [2:7488013165940344789:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 23136, MsgBus: 65374 2025-03-31T16:35:27.800553Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488013175483011269:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:27.800608Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001409/r3tmp/tmp9N2bqG/pdisk_1.dat 2025-03-31T16:35:27.909819Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23136, node 3 2025-03-31T16:35:27.956522Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:27.956636Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:27.957565Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:27.975627Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:27.975654Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:27.975662Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:27.975768Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65374 TClient is connected to server localhost:65374 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:28.396614Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:28.406315Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:28.495798Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:28.632531Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:35:28.695508Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:35:31.166698Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013192662882199:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:31.166795Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:31.220423Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:31.252004Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:31.285764Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:31.354454Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:31.395534Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:31.451160Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:31.514220Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013192662882711:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:31.514322Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:31.514506Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013192662882716:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:31.519051Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:31.532497Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488013192662882718:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:35:31.608710Z node 3 :TX_PROXY ERROR: Actor# [3:7488013192662882772:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::Tcl [GOOD] Test command err: Trying to start YDB, gRPC: 5043, MsgBus: 5742 2025-03-31T16:35:03.623543Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013073048482540:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:03.623636Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001458/r3tmp/tmpuwnqUc/pdisk_1.dat 2025-03-31T16:35:04.018014Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5043, node 1 2025-03-31T16:35:04.033871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:04.034891Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:35:04.036400Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:04.040126Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:35:04.040585Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:04.090200Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:04.090223Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:04.090230Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:04.090351Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5742 TClient is connected to server localhost:5742 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:04.586172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:04.608034Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:35:04.622194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:04.763952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:35:04.943102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:35:05.017679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:06.688821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013085933386218:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:06.688942Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:07.035195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:07.068929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:07.142733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:07.210998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:07.244247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:07.311610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:07.400042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013090228354043:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:07.400114Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:07.400181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013090228354048:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:07.404622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:07.419887Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013090228354050:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:35:07.482962Z node 1 :TX_PROXY ERROR: Actor# [1:7488013090228354105:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 64679, MsgBus: 4281 2025-03-31T16:35:09.018527Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013097429069341:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:09.023212Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001458/r3tmp/tmpg0c1hC/pdisk_1.dat 2025-03-31T16:35:09.145368Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:09.171023Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected TServer::EnableGrpc on GrpcPort 64679, node 2 2025-03-31T16:35:09.171348Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:09.172532Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:09.241389Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:09.241423Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:09.241437Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:09.241580Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4281 TClient is connected to server localhost:4281 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:09.689556Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:09.693686Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:35:09.705999Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:09.768734Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:09.918613Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:09.998682Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 wai ... ch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:12.478991Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:12.488700Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488013110313973508:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:35:12.561326Z node 2 :TX_PROXY ERROR: Actor# [2:7488013110313973561:3438] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:13.404804Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:35:13.406034Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:35:13.407370Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:35:14.015301Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488013097429069341:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:14.015380Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 18103, MsgBus: 8579 2025-03-31T16:35:24.378490Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488013163946079820:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:24.378594Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001458/r3tmp/tmpc7Af94/pdisk_1.dat 2025-03-31T16:35:24.489640Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:24.518175Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:24.518282Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:24.524234Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18103, node 3 2025-03-31T16:35:24.588954Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:24.588974Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:24.588984Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:24.589132Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8579 TClient is connected to server localhost:8579 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:25.095357Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:25.113369Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:25.196997Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:25.369582Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:25.463299Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:27.912357Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013176830983483:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:27.912455Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:27.966594Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:28.004029Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:28.041764Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:28.076765Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:28.148103Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:28.188976Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:28.242525Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013181125951295:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:28.242645Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:28.243268Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013181125951300:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:28.248143Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:28.268229Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488013181125951302:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:35:28.362019Z node 3 :TX_PROXY ERROR: Actor# [3:7488013181125951357:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:29.379070Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488013163946079820:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:29.379159Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:29.383076Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:35:29.387462Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:35:29.389594Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:35:29.763288Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7488013185420919251:2521], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:13: Error: At function: Commit!
:3:13: Error: COMMIT not supported inside YDB query, code: 2008 2025-03-31T16:35:29.765246Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NjFhNjYzNmUtZDY0YWVkYjAtM2UzM2EyYTMtMjQ1YWM2NjQ=, ActorId: [3:7488013185420919249:2520], ActorState: ExecuteState, TraceId: 01jqpgxqf045ny0r3eyk58g431, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-31T16:35:31.197208Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7488013194010854559:2744], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:13: Error: At function: Commit!
:3:13: Error: ROLLBACK not supported inside YDB query, code: 2008 2025-03-31T16:35:31.199727Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=Y2RlYTM0NzYtYzA5MzNiMWMtY2RkOGY1NTktYjQ1YTRhN2U=, ActorId: [3:7488013194010854556:2743], ActorState: ExecuteState, TraceId: 01jqpgxs6v5388bs92gqnd876h, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> KqpQueryService::DdlPermission [GOOD] >> KqpQueryService::DdlSecret >> TTxDataShardMiniKQL::SelectRange [GOOD] >> TTxDataShardMiniKQL::SelectRangeWithNotFullKey >> KqpQueryService::ExecuteQueryExplicitTxTLI [GOOD] >> KqpQueryService::ExecuteQueryInteractiveTx >> CompressExecutor::TestReorderedExecutor [GOOD] >> CompressExecutor::TestExecutorMemUsage >> KqpQueryService::TableSink_ReplaceFromSelectOlap [GOOD] >> TTxDataShardMiniKQL::WriteAndReadMultipleShards [GOOD] >> TTxDataShardMiniKQL::WriteAndReadMany >> TTxDataShardMiniKQL::TableStats [GOOD] >> TTxDataShardMiniKQL::TableStatsHistograms >> KqpQueryService::MaterializeTxResults [GOOD] >> KqpQueryService::MixedReadQueryWithoutStreamLookup >> SplitterBasic::LimitExceed >> KqpKv::ReadRows_ExternalBlobs+UseExtBlobsPrecharge [GOOD] >> KqpKv::ReadRows_ExternalBlobs-UseExtBlobsPrecharge >> SplitterBasic::LimitExceed [GOOD] >> TTxDataShardMiniKQL::SelectRangeWithNotFullKey [GOOD] >> Cache::Test5 >> EntityId::Order >> KqpQueryServiceScripts::TestPaging [GOOD] >> KqpQueryServiceScripts::TestFetchMoreThanLimit >> TTxDataShardMiniKQL::WriteValueTooLarge [GOOD] >> TTxDataShardMiniKQL::WriteLargeExternalBlob |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> SplitterBasic::LimitExceed [GOOD] >> EntityId::Order [GOOD] >> EntityId::MinId [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::SelectRangeWithNotFullKey [GOOD] Test command err: 2025-03-31T16:35:33.204433Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:35:33.205907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:35:33.205964Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:33.215109Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:35:33.215662Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-03-31T16:35:33.216000Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:35:33.227826Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:35:33.276633Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:35:33.276931Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:35:33.280690Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-31T16:35:33.280771Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-31T16:35:33.280829Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-31T16:35:33.282057Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:35:33.283159Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:35:33.283231Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:197:2154] in generation 2 2025-03-31T16:35:33.371943Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:35:33.412978Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-31T16:35:33.415224Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:35:33.415396Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-31T16:35:33.415433Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-31T16:35:33.415469Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-31T16:35:33.415507Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:35:33.415803Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:35:33.415863Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:35:33.416887Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-31T16:35:33.417084Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-31T16:35:33.417148Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:35:33.417198Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:35:33.417251Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-31T16:35:33.417298Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T16:35:33.417356Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T16:35:33.417388Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-31T16:35:33.417442Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:35:33.417548Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:35:33.417609Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:35:33.419530Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-31T16:35:33.423818Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nx\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\016\n\010__tablet\030\004 9\032\023\n\r__updateEpoch\030\004 :\032\020\n\n__updateNo\030\004 ;(\"J\014/Root/table1\222\002\013\th\020\000\000\000\000\000\000\020\r" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-31T16:35:33.423890Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:35:33.423988Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T16:35:33.424171Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-31T16:35:33.424216Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-31T16:35:33.424272Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-31T16:35:33.424319Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:35:33.424365Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-31T16:35:33.424409Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-31T16:35:33.424439Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:35:33.424786Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-31T16:35:33.424836Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-31T16:35:33.424897Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-31T16:35:33.424936Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:35:33.424978Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-31T16:35:33.425003Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-31T16:35:33.425047Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-31T16:35:33.425088Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-31T16:35:33.425117Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-31T16:35:33.441995Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-31T16:35:33.442060Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:35:33.442094Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:35:33.442146Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-31T16:35:33.442230Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-31T16:35:33.442743Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:35:33.442796Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:35:33.442870Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-31T16:35:33.442985Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-31T16:35:33.443016Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:35:33.443155Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-31T16:35:33.443213Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:35:33.443254Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-31T16:35:33.443288Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-31T16:35:33.447167Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-31T16:35:33.447247Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:35:33.447489Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:35:33.447530Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:35:33.447584Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:35:33.447620Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:35:33.447650Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T16:35:33.447698Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-31T16:35:33.447730Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-31T16:35:33.447792Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:35:33.447831Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-31T16:35:33.447862Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-31T16:35:33.447897Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-31T16:35:33.448036Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-31T16:35:33.448067Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:35:33.448118Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-31T16:35:33.448140Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-31T16:35:33.448162Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-31T16:35:33.448216Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:35:33.448240Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-31T16:35:33.448293Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-31T16:35:33.448330Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-31T16:35:33.448383Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-31T16:35:33.448434Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-31T16:35:33.448467Z node 1 :TX_DATASHARD TR ... 592794Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-31T16:35:35.592840Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:6] at 9437184 on unit FinishPropose 2025-03-31T16:35:35.592907Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:35:35.597613Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:99:2134], Recipient [3:235:2228]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 99 RawX2: 12884904022 } 2025-03-31T16:35:35.597683Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-03-31T16:35:35.597978Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:303:2285], Recipient [3:235:2228]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:35:35.598016Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:35:35.598061Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:302:2284], serverId# [3:303:2285], sessionId# [0:0:0] 2025-03-31T16:35:35.598215Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:99:2134], Recipient [3:235:2228]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 99 RawX2: 12884904022 } TxBody: "\032\351\002\037\010\0021\010key1\010key2\nvalue\005\205\n\205\002\205\004\206\205\006\207\203\004\207\203\001H\207\203\001H\006\n\016\203\014\020List$Truncated\002\205\004\205\002?\022\002\205\000\034MyReads MyWrites\205\004\205\002?\022\002\206\202\024Reply\024Write?\030\205\002\206\203\010\002 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\024)\211\026?\022\203\005\004\200\205\006\203\004\203\004\203\004\006\n\016\213\004\203\004\207\203\001H\213\002\203\004\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?* h\020\000\000\000\000\000\000\016\000\000\000\000\000\000\000?\014\005?2\003?,D\003?.F\003?0p\007\013?:\003?4e\005\'?8\003\013?>\003?\000\003?@\000\003?B\000\006\004?F\003\203\014\000\003\203\014\000\003\003?H\000\377\007\002\000\005?\032\005?\026?r\000\005?\030\003\005? \005?\034?r\000\006\000?\036\003?x\005?&\006\0 2025-03-31T16:35:35.609624Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:35:35.609715Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T16:35:35.610512Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit CheckDataTx 2025-03-31T16:35:35.610634Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is Executed 2025-03-31T16:35:35.610679Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit CheckDataTx 2025-03-31T16:35:35.610754Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-31T16:35:35.610801Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit BuildAndWaitDependencies 2025-03-31T16:35:35.610852Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-03-31T16:35:35.610918Z node 3 :TX_DATASHARD TRACE: Activated operation [0:8] at 9437184 2025-03-31T16:35:35.610965Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is Executed 2025-03-31T16:35:35.610989Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit BuildAndWaitDependencies 2025-03-31T16:35:35.611015Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 9437184 to execution unit ExecuteDataTx 2025-03-31T16:35:35.611041Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit ExecuteDataTx 2025-03-31T16:35:35.619871Z node 3 :TX_DATASHARD TRACE: Executed operation [0:8] at tablet 9437184 with status COMPLETE 2025-03-31T16:35:35.619967Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:8] at 9437184: {NSelectRow: 0, NSelectRange: 1, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 2, SelectRangeBytes: 31, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-31T16:35:35.620027Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is Executed 2025-03-31T16:35:35.620059Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit ExecuteDataTx 2025-03-31T16:35:35.620091Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 9437184 to execution unit FinishPropose 2025-03-31T16:35:35.620138Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit FinishPropose 2025-03-31T16:35:35.620197Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 8 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-03-31T16:35:35.620276Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is DelayComplete 2025-03-31T16:35:35.620307Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit FinishPropose 2025-03-31T16:35:35.620342Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 9437184 to execution unit CompletedOperations 2025-03-31T16:35:35.620374Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit CompletedOperations 2025-03-31T16:35:35.620421Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is Executed 2025-03-31T16:35:35.620443Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit CompletedOperations 2025-03-31T16:35:35.620467Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:8] at 9437184 has finished 2025-03-31T16:35:35.620534Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-31T16:35:35.620576Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:8] at 9437184 on unit FinishPropose 2025-03-31T16:35:35.620614Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 >> KqpTx::SnapshotRO ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_ReplaceFromSelectOlap [GOOD] Test command err: Trying to start YDB, gRPC: 14102, MsgBus: 6641 2025-03-31T16:35:18.080331Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013137461720336:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:18.080534Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0013e8/r3tmp/tmp6SGIrY/pdisk_1.dat 2025-03-31T16:35:18.536965Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:18.546265Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:18.546379Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:18.551312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14102, node 1 2025-03-31T16:35:18.675385Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:18.675407Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:18.675415Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:18.675535Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6641 TClient is connected to server localhost:6641 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:19.230353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:19.250173Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:35:21.110705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013150346622885:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:21.110705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013150346622895:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:21.110799Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:21.114013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:35:21.123401Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013150346622899:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:35:21.186847Z node 1 :TX_PROXY ERROR: Actor# [1:7488013150346622952:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:21.455314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:2, at schemeshard: 72057594046644480 2025-03-31T16:35:21.693559Z node 1 :TX_PROXY ERROR: Actor# [1:7488013150346623191:2470] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:21.706680Z node 1 :TX_PROXY ERROR: Actor# [1:7488013150346623198:2475] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/NTIyMGQzYmItOGVhMzVhNzAtYWI0NGQ3MWYtOTIxOGI0OTU=\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:21.723877Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-03-31T16:35:21.739077Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488013150346623251:2366], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:17: Error: At function: KiReadTable!
:3:17: Error: Cannot find table 'db.[/Root/test/Temp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:35:21.739434Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTIyMGQzYmItOGVhMzVhNzAtYWI0NGQ3MWYtOTIxOGI0OTU=, ActorId: [1:7488013150346622866:2328], ActorState: ExecuteState, TraceId: 01jqpgxfzwbp4asajsp6xzjda0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:35:21.779991Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488013150346623262:2373], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:17: Error: At function: KiReadTable!
:3:17: Error: Cannot find table 'db.[/Root/test/Temp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:35:21.781461Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmQ5M2Q0YzgtZWE4YjdiNjktZDA4NTQ1ZDktZjRiMmYzMTg=, ActorId: [1:7488013150346623258:2370], ActorState: ExecuteState, TraceId: 01jqpgxg15918zpvgxx7ntn2cj, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 18312, MsgBus: 19893 2025-03-31T16:35:22.429611Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013155774199359:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:22.429668Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0013e8/r3tmp/tmpfBsZCx/pdisk_1.dat 2025-03-31T16:35:22.620003Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:22.623452Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:22.623531Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:22.625929Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18312, node 2 2025-03-31T16:35:22.667643Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:22.667665Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:22.667673Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:22.667785Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19893 TClient is connected to server localhost:19893 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:23.043073Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:23.052191Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:35:23.062638Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:23.149851Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:23.313042Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:23.387753Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:25.453736Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013168659102998:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:25.453839Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool defa ... -03-31T16:35:33.667069Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037898;tx_state=TTxProgressTx::Execute;tx_current=281474976715669;tx_id=281474976715669;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715669; 2025-03-31T16:35:33.667245Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=281474976715669;tx_id=281474976715669;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715669; 2025-03-31T16:35:34.045360Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715672; 2025-03-31T16:35:34.046085Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715672; 2025-03-31T16:35:34.046412Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715672; 2025-03-31T16:35:34.046573Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715672; 2025-03-31T16:35:34.046790Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715672; 2025-03-31T16:35:34.046919Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715672; 2025-03-31T16:35:34.046991Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715672; 2025-03-31T16:35:34.047449Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;self_id=[3:7488013193547291191:2489];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037910;local_tx_no=23;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037909,72075186224037915;receive=72075186224037903; 2025-03-31T16:35:34.047522Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;self_id=[3:7488013193547291191:2489];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037910;local_tx_no=24;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037909,72075186224037915;receive=72075186224037903; 2025-03-31T16:35:34.047587Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;self_id=[3:7488013193547291191:2489];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037910;local_tx_no=25;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037909,72075186224037915;receive=72075186224037903; 2025-03-31T16:35:34.047693Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;self_id=[3:7488013193547291191:2489];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037910;local_tx_no=26;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037909,72075186224037915;receive=72075186224037903; 2025-03-31T16:35:34.047762Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;self_id=[3:7488013193547291191:2489];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037910;local_tx_no=27;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037909,72075186224037915;receive=72075186224037903; 2025-03-31T16:35:34.047838Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;self_id=[3:7488013193547291191:2489];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037910;local_tx_no=28;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037909,72075186224037915;receive=72075186224037903; 2025-03-31T16:35:34.047927Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;self_id=[3:7488013193547291191:2489];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037910;local_tx_no=29;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037909,72075186224037915;receive=72075186224037903; 2025-03-31T16:35:34.047939Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037915;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715672; 2025-03-31T16:35:34.048051Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;self_id=[3:7488013193547291191:2489];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037910;local_tx_no=30;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037909,72075186224037915;receive=72075186224037903; 2025-03-31T16:35:34.048153Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;self_id=[3:7488013193547291191:2489];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037910;local_tx_no=31;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037909,72075186224037915;receive=72075186224037903; 2025-03-31T16:35:34.048232Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;self_id=[3:7488013193547291191:2489];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037910;local_tx_no=32;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037909,72075186224037915;receive=72075186224037903; 2025-03-31T16:35:34.048415Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;self_id=[3:7488013193547291191:2489];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037910;local_tx_no=34;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037915;receive=72075186224037903; 2025-03-31T16:35:34.048452Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715672; 2025-03-31T16:35:34.048488Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;self_id=[3:7488013193547291191:2489];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037910;local_tx_no=35;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037915;receive=72075186224037903; 2025-03-31T16:35:34.048549Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;self_id=[3:7488013193547291191:2489];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037910;local_tx_no=36;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037915;receive=72075186224037909; 2025-03-31T16:35:34.048612Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;self_id=[3:7488013193547291191:2489];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037910;local_tx_no=37;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037915;receive=72075186224037909; 2025-03-31T16:35:34.048629Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715672; 2025-03-31T16:35:34.048676Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;self_id=[3:7488013193547291191:2489];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037910;local_tx_no=38;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037915;receive=72075186224037909; 2025-03-31T16:35:34.048736Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;self_id=[3:7488013193547291191:2489];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037910;local_tx_no=39;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037915;receive=72075186224037909; 2025-03-31T16:35:34.048797Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;self_id=[3:7488013193547291191:2489];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037910;local_tx_no=40;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037915;receive=72075186224037909; 2025-03-31T16:35:34.048804Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715672; 2025-03-31T16:35:34.048858Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;self_id=[3:7488013193547291191:2489];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037910;local_tx_no=41;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037915;receive=72075186224037909; 2025-03-31T16:35:34.048934Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;self_id=[3:7488013193547291191:2489];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037910;local_tx_no=42;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037915;receive=72075186224037909; 2025-03-31T16:35:34.049032Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;self_id=[3:7488013193547291191:2489];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037910;local_tx_no=43;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037915;receive=72075186224037909; 2025-03-31T16:35:34.049109Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;self_id=[3:7488013193547291191:2489];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037910;local_tx_no=44;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037915;receive=72075186224037909; 2025-03-31T16:35:34.049175Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;self_id=[3:7488013193547291191:2489];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037910;local_tx_no=45;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037915;receive=72075186224037909; 2025-03-31T16:35:34.049236Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;self_id=[3:7488013193547291191:2489];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037910;local_tx_no=46;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037915;receive=72075186224037909; 2025-03-31T16:35:34.049336Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;self_id=[3:7488013193547291191:2489];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037910;local_tx_no=47;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037915;receive=72075186224037909; 2025-03-31T16:35:34.049731Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715672; 2025-03-31T16:35:34.050042Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715672; 2025-03-31T16:35:34.050142Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715672; |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EntityId::MinId [GOOD] >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite2 >> Cache::Test5 [GOOD] >> EntityId::CheckId [GOOD] >> EntityId::Distinct [GOOD] >> EntityId::MaxId [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EntityId::CheckId [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EntityId::MaxId [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> KqpQueryService::ExecStatsPlan [GOOD] >> KqpQueryService::ExecStatsAst |97.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_column_build/test-results/unittest/{meta.json ... results_accumulator.log} |97.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQueryService::AlterTable_DropNotNull_WithSetFamily_Valid [GOOD] |97.1%| [TA] $(B)/ydb/core/fq/libs/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.1%| [TA] {RESULT} $(B)/ydb/core/fq/libs/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQueryService::ExecuteQueryMultiResult [GOOD] >> KqpQueryService::ExecuteQueryMultiScalar >> KqpQueryService::FlowControllOnHugeRealTable+LongRow [GOOD] >> KqpQueryService::FlowControllOnHugeRealTable-LongRow >> KqpQueryServiceScripts::InvalidFetchToken [GOOD] >> AnalyzeColumnshard::AnalyzeAnalyzeOneColumnTableSpecificColumns >> KqpSnapshotIsolation::TReadOnlyOltp >> KqpSnapshotIsolation::TSimpleOltp >> TraverseDatashard::TraverseOneTable >> KqpQueryServiceScripts::ExecuteScriptWithResultsTtlAndForgetAfter [GOOD] >> KqpSinkMvcc::OlapNamedStatement >> TTxDataShardMiniKQL::WriteLargeExternalBlob [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::AlterTable_DropNotNull_WithSetFamily_Valid [GOOD] Test command err: Trying to start YDB, gRPC: 4232, MsgBus: 17080 2025-03-31T16:35:16.735856Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013128803762654:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:16.735942Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001403/r3tmp/tmp4Hupl0/pdisk_1.dat 2025-03-31T16:35:17.076971Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4232, node 1 2025-03-31T16:35:17.123763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:17.123848Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:17.126478Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:17.235682Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:17.235703Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:17.235714Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:17.235849Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17080 TClient is connected to server localhost:17080 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:17.766966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:17.796057Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:35:19.846116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013141688665192:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:19.846210Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:19.846375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013141688665200:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:19.850203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:35:19.861174Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013141688665206:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:35:19.958616Z node 1 :TX_PROXY ERROR: Actor# [1:7488013141688665257:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:20.329633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:1, at schemeshard: 72057594046644480 2025-03-31T16:35:20.484811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:20.538272Z node 1 :TX_PROXY ERROR: Actor# [1:7488013145983632797:2480] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:20.552081Z node 1 :TX_PROXY ERROR: Actor# [1:7488013145983632804:2485] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/MzY4N2FkMjktYjg5NWI2NjAtZTQ0NTcwMTktMWYxMTI0OTY=\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:20.570657Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-03-31T16:35:20.593668Z node 1 :TX_PROXY ERROR: Actor# [1:7488013145983632864:2532] txid# 281474976710667, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:20.595730Z node 1 :TX_PROXY ERROR: Actor# [1:7488013145983632871:2537] txid# 281474976710668, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/MzY4N2FkMjktYjg5NWI2NjAtZTQ0NTcwMTktMWYxMTI0OTY=\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:20.598717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-31T16:35:20.907101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:35:21.028864Z node 1 :TX_PROXY ERROR: Actor# [1:7488013150278600350:2644] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:21.030705Z node 1 :TX_PROXY ERROR: Actor# [1:7488013150278600357:2649] txid# 281474976710675, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/MzY4N2FkMjktYjg5NWI2NjAtZTQ0NTcwMTktMWYxMTI0OTY=\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:21.046004Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-03-31T16:35:21.061034Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488013150278600411:2408], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:21: Error: At function: KiReadTable!
:3:21: Error: Cannot find table 'db.[/Root/Temp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:35:21.061282Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTJjZmQ2MDEtOWNjMWE4YTMtOTdkODI0YzItMTY0ZDdkZWQ=, ActorId: [1:7488013150278600409:2407], ActorState: ExecuteState, TraceId: 01jqpgxfaqdw6yhpjhkt5r6301, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:35:21.087808Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488013150278600421:2413], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:17: Error: At function: KiReadTable!
:3:17: Error: Cannot find table 'db.[/Root/Temp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:35:21.088001Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTIzN2Y5NmYtYWRlNzA4ZjUtNWMzZDg1YS1kNTVhMGNhOA==, ActorId: [1:7488013150278600419:2412], ActorState: ExecuteState, TraceId: 01jqpgxfbhbsqn1w5nk3zszgvr, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 28364, MsgBus: 28176 2025-03-31T16:35:21.915589Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013150908107894:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:21.916717Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001403/r3tmp/tmpWRNIi5/pdisk_1.dat 2025-03-31T16:35:22.063238Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:22.089196Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:22.089276Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:22.090603Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28364, node 2 2025-03-31T16:35:22.171277Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:22.171308Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:22.171320Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:22.171452Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28176 TClient is connected to server localhost:28176 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: ... don't have access permissions } 2025-03-31T16:35:29.400009Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:29.434212Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:29.467167Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:29.508223Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:29.542339Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:29.579958Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:29.626333Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013182752966266:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:29.626428Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:29.626648Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013182752966271:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:29.630843Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:29.642819Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488013182752966273:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:35:29.718175Z node 3 :TX_PROXY ERROR: Actor# [3:7488013182752966327:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:30.604452Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:35:30.690205Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:35:30.774703Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:35:31.056672Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488013169868062107:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:31.056739Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:31.178698Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-03-31T16:35:31.349407Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2025-03-31T16:35:31.449281Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-03-31T16:35:31.592212Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:0, at schemeshard: 72057594046644480 2025-03-31T16:35:31.793053Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2025-03-31T16:35:31.927589Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 13847, MsgBus: 20845 2025-03-31T16:35:32.838265Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488013198439037273:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:32.838339Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001403/r3tmp/tmpnAaQhi/pdisk_1.dat 2025-03-31T16:35:32.987126Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:32.989311Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:32.989404Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:32.992479Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13847, node 4 2025-03-31T16:35:33.056107Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:33.056127Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:33.056133Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:33.056233Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20845 TClient is connected to server localhost:20845 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:33.539433Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:33.547166Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:35:36.479603Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488013215618907106:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:36.479668Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488013215618907094:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:36.479926Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:36.484483Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:35:36.494715Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488013215618907109:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:35:36.567259Z node 4 :TX_PROXY ERROR: Actor# [4:7488013215618907160:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:36.627594Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:1, at schemeshard: 72057594046644480 2025-03-31T16:35:36.996948Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7488013215618907317:2356], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:30: Error: At function: KiWriteTable!
:2:84: Error: Failed to convert type: Struct<'id':Int32,'val1':Null,'val2':Int32> to Struct<'id':Int32,'val1':Int32,'val2':Int32?>
:2:84: Error: Failed to convert 'val1': Null to Int32
:2:84: Error: Failed to convert input columns types to scheme types, code: 2031 2025-03-31T16:35:36.997366Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZjE4Y2RmMDMtYzlkZGFmMzAtZWI0Y2FkM2UtNGY5M2E2OGY=, ActorId: [4:7488013215618907315:2355], ActorState: ExecuteState, TraceId: 01jqpgxyw91z324a74a1nzxefw, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-31T16:35:37.040371Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:37.088135Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 >> KqpSinkTx::OlapLocksAbortOnCommit |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::AnalyzeOneTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::InvalidFetchToken [GOOD] Test command err: Trying to start YDB, gRPC: 5799, MsgBus: 11376 2025-03-31T16:35:13.567929Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013117409177618:2274];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:13.568058Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00141b/r3tmp/tmpi28DXd/pdisk_1.dat 2025-03-31T16:35:13.853498Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5799, node 1 2025-03-31T16:35:13.934309Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:13.934336Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:13.934356Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:13.934471Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:35:13.938904Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:13.939033Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:13.940963Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11376 TClient is connected to server localhost:11376 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:14.412145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:14.428475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:14.566937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:14.734941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:35:14.815259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:35:16.574447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013130294081058:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:16.574597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:16.980619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:17.018730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:17.094039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:17.124784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:17.152100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:17.190567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:17.238254Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013134589048868:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:17.238404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:17.238711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013134589048873:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:17.242246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:17.255514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013134589048875:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:35:17.354432Z node 1 :TX_PROXY ERROR: Actor# [1:7488013134589048931:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:18.189277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:35:18.190707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:35:18.193428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:35:18.566416Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488013117409177618:2274];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:18.566500Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:19.136633Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: fa8511c2-d2cdbf44-a22b36c4-9d5075f5, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-03-31T16:35:19.160870Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: fa8511c2-d2cdbf44-a22b36c4-9d5075f5, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-03-31T16:35:19.178700Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: fa8511c2-d2cdbf44-a22b36c4-9d5075f5, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-03-31T16:35:19.215619Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: fa8511c2-d2cdbf44-a22b36c4-9d5075f5, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-03-31T16:35:19.233460Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: fa8511c2-d2cdbf44-a22b36c4-9d5075f5, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-03-31T16:35:19.259955Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: fa8511c2-d2cdbf44-a22b36c4-9d5075f5, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-03-31T16:35:19.282107Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: fa8511c2-d2cdbf44-a22b36c4-9d5075f5, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-03-31T16:35:19.298819Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: fa8511c2-d2cdbf44-a22b36c4-9d5075f5, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-03-31T16:35:19.338527Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: fa8511c2-d2cdbf44-a22b36c4-9d5075f5, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-03-31T16:35:19.362979Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: fa8511c2-d2cdbf44-a22b36c4-9d5075f5, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-03-31T16:35:19.395407Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: fa8511c2-d2cdbf44-a22b36c4-9d5075f5, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-03-31T16:35:19.414125Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: fa8511c2-d2cdbf44-a22b36c4-9d5075f5, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-03-31T16:35:19.436134Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: fa8511c2-d2cdbf44-a22b36c4-9d5075f5, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-03-31T16:35:19.456834Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: fa8511c2-d2cdbf44-a22b36c4-9d5075f5, reply PRECONDITION_FAILED, issues: { ... l default not found or you don't have access permissions } 2025-03-31T16:35:25.339397Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013165609295223:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:25.343000Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:25.352031Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488013165609295225:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:35:25.413410Z node 2 :TX_PROXY ERROR: Actor# [2:7488013165609295279:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:26.231732Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:35:26.233331Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:35:26.234334Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:35:26.769317Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488013148429423761:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:26.769689Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:28.754256Z node 2 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionOperationQueryActor] TraceId: e7804a7b-bd63a779-4f2aea83-ac54a8ac, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=2&id=YzVkNjgzYmQtZjM3ZGY5OWMtMjg1ZDI2MGEtZTU5N2NhNmQ=, TxId: 2025-03-31T16:35:28.787065Z node 2 :KQP_PROXY WARN: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: e7804a7b-bd63a779-4f2aea83-ac54a8ac, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=2&id=NjJkMGY5OWEtZTRkNWYxNzQtYmI1OWRhMDUtNjM3MjUzZGY=, TxId: 2025-03-31T16:35:28.799187Z node 2 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: e7804a7b-bd63a779-4f2aea83-ac54a8ac, reply NOT_FOUND, issues: {
: Error: No such execution } Trying to start YDB, gRPC: 26250, MsgBus: 17961 2025-03-31T16:35:30.379467Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488013186958465145:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:30.379564Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00141b/r3tmp/tmpojccbC/pdisk_1.dat 2025-03-31T16:35:30.496176Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:30.526874Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:30.526983Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:30.528505Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26250, node 3 2025-03-31T16:35:30.584694Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:30.584723Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:30.584734Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:30.584914Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17961 TClient is connected to server localhost:17961 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:31.130546Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:31.140455Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:35:31.148408Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:31.228999Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:31.427675Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:31.504187Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:34.127159Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013204138336092:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:34.127228Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:34.145590Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:34.182214Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:34.213847Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:34.247889Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:34.283234Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:34.318838Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:34.380406Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013204138336607:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:34.380525Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:34.380893Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013204138336612:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:34.387833Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:34.404966Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488013204138336614:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:35:34.494703Z node 3 :TX_PROXY ERROR: Actor# [3:7488013204138336671:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:35.379456Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488013186958465145:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:35.379539Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:35.442990Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:35:35.444894Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:35:35.448247Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::WriteLargeExternalBlob [GOOD] Test command err: 2025-03-31T16:35:33.206142Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:35:33.207523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:35:33.207576Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:33.217162Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:35:33.217666Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-03-31T16:35:33.217947Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:35:33.228060Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:35:33.276569Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:35:33.276845Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:35:33.280135Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-31T16:35:33.280195Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-31T16:35:33.280240Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-31T16:35:33.282032Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:35:33.282622Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:35:33.282697Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:197:2154] in generation 2 2025-03-31T16:35:33.369527Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:35:33.414827Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-31T16:35:33.415213Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:35:33.415348Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-31T16:35:33.415397Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-31T16:35:33.415437Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-31T16:35:33.415469Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:35:33.415740Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:35:33.415791Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:35:33.416916Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-31T16:35:33.417036Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-31T16:35:33.417095Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:35:33.417141Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:35:33.417247Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-31T16:35:33.417282Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T16:35:33.417312Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T16:35:33.417354Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-31T16:35:33.417397Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:35:33.417505Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:35:33.417556Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:35:33.419498Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-31T16:35:33.423344Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nY\n\006table2\032\n\n\004key1\030\002 \"\032\013\n\004key2\030\200$ #\032\014\n\005value\030\200$ 8(\"(#:\010Z\006\010\000\030\000(\000J\014/Root/table2\222\002\013\th\020\000\000\000\000\000\000\020\016" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-31T16:35:33.423402Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:35:33.423481Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T16:35:33.423671Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-31T16:35:33.423706Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-31T16:35:33.423750Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-31T16:35:33.423852Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:35:33.423909Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-31T16:35:33.423943Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-31T16:35:33.423965Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:35:33.424251Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-31T16:35:33.424313Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-31T16:35:33.424363Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-31T16:35:33.424401Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:35:33.424444Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-31T16:35:33.424469Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-31T16:35:33.424497Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-31T16:35:33.424526Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-31T16:35:33.424557Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-31T16:35:33.436665Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-31T16:35:33.436742Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:35:33.436773Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:35:33.436820Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-31T16:35:33.437737Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-31T16:35:33.441023Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:35:33.441085Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:35:33.441133Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-31T16:35:33.441290Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-31T16:35:33.441322Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:35:33.441480Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-31T16:35:33.441525Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:35:33.441569Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-31T16:35:33.441599Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-31T16:35:33.444466Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-31T16:35:33.444525Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:35:33.444834Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:35:33.444894Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:35:33.444948Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:35:33.444989Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:35:33.445043Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T16:35:33.445105Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-31T16:35:33.445137Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-31T16:35:33.445191Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:35:33.445233Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-31T16:35:33.445285Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-31T16:35:33.445325Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-31T16:35:33.445496Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-31T16:35:33.445536Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:35:33.445580Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-31T16:35:33.445611Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-31T16:35:33.445633Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-31T16:35:33.445709Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:35:33.445734Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-31T16:35:33.445784Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-31T16:35:33.445817Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-31T16:35:33.445879Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-31T16:35:33.445910Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-31T16:35:33.445944Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at ... v{min} ImmediateWriteEdgeReplied# v{min} 2025-03-31T16:35:37.219902Z node 3 :TX_DATASHARD TRACE: Activated operation [0:2] at 9437184 2025-03-31T16:35:37.219950Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-03-31T16:35:37.219978Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit BuildAndWaitDependencies 2025-03-31T16:35:37.220006Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit ExecuteDataTx 2025-03-31T16:35:37.220032Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-03-31T16:35:37.220084Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-03-31T16:35:37.220147Z node 3 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 requested 33554432 more memory 2025-03-31T16:35:37.220192Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2025-03-31T16:35:37.220343Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T16:35:37.220394Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-03-31T16:35:37.220456Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-03-31T16:35:37.254476Z node 3 :TX_DATASHARD TRACE: Executed operation [0:2] at tablet 9437184 with status COMPLETE 2025-03-31T16:35:37.254607Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:2] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 7340039, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-31T16:35:37.254728Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:35:37.254785Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit ExecuteDataTx 2025-03-31T16:35:37.254833Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit FinishPropose 2025-03-31T16:35:37.254886Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit FinishPropose 2025-03-31T16:35:37.254999Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-31T16:35:37.255047Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit FinishPropose 2025-03-31T16:35:37.255122Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit CompletedOperations 2025-03-31T16:35:37.255166Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CompletedOperations 2025-03-31T16:35:37.255221Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-03-31T16:35:37.255253Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CompletedOperations 2025-03-31T16:35:37.255293Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:2] at 9437184 has finished 2025-03-31T16:35:37.284213Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-31T16:35:37.284298Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:2] at 9437184 on unit FinishPropose 2025-03-31T16:35:37.284355Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 2 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 5 ms, status: COMPLETE 2025-03-31T16:35:37.284445Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:35:38.152652Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:99:2134], Recipient [3:235:2228]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 99 RawX2: 12884904022 } 2025-03-31T16:35:38.152744Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-03-31T16:35:38.153238Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:300:2281], Recipient [3:235:2228]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:35:38.153286Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:35:38.153338Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:299:2280], serverId# [3:300:2281], sessionId# [0:0:0] 2025-03-31T16:35:38.341176Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:99:2134], Recipient [3:235:2228]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 99 RawX2: 12884904022 } TxBody: "\032\332\201\200\010\037\000\005\205\n\205\000\205\004?\000\205\002\202\0041\034MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\004\203\004\203\001H\205\002\203\001H\01056$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\016\000\000\000\000\000\000\000\013?\024\003?\020\251\003\003?\022\006bar\003\005?\030\003?\026\007\000\000\000\001xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx 2025-03-31T16:35:38.343847Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:35:38.344042Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T16:35:38.395730Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit CheckDataTx 2025-03-31T16:35:38.395864Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Executed 2025-03-31T16:35:38.395918Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit CheckDataTx 2025-03-31T16:35:38.395964Z node 3 :TX_DATASHARD TRACE: Add [0:3] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-31T16:35:38.396005Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit BuildAndWaitDependencies 2025-03-31T16:35:38.396055Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-03-31T16:35:38.396122Z node 3 :TX_DATASHARD TRACE: Activated operation [0:3] at 9437184 2025-03-31T16:35:38.396165Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Executed 2025-03-31T16:35:38.396190Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit BuildAndWaitDependencies 2025-03-31T16:35:38.396215Z node 3 :TX_DATASHARD TRACE: Add [0:3] at 9437184 to execution unit ExecuteDataTx 2025-03-31T16:35:38.396241Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit ExecuteDataTx 2025-03-31T16:35:38.396293Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-03-31T16:35:38.396348Z node 3 :TX_DATASHARD TRACE: Operation [0:3] at 9437184 requested 46269638 more memory 2025-03-31T16:35:38.396387Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Restart 2025-03-31T16:35:38.396533Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T16:35:38.396577Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit ExecuteDataTx 2025-03-31T16:35:38.396629Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-03-31T16:35:38.456169Z node 3 :TX_DATASHARD TRACE: Operation [0:3] at 9437184 exceeded memory limit 50463942 and requests 403711536 more for the next try 2025-03-31T16:35:38.464138Z node 3 :TX_DATASHARD DEBUG: tx 3 released its data 2025-03-31T16:35:38.464237Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Restart 2025-03-31T16:35:38.464609Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T16:35:38.464652Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit ExecuteDataTx 2025-03-31T16:35:38.538547Z node 3 :TX_DATASHARD DEBUG: tx 3 at 9437184 restored its data 2025-03-31T16:35:38.538666Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-03-31T16:35:38.642437Z node 3 :TX_DATASHARD TRACE: Executed operation [0:3] at tablet 9437184 with status COMPLETE 2025-03-31T16:35:38.642524Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:3] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 16777223, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-31T16:35:38.642590Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:35:38.642621Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit ExecuteDataTx 2025-03-31T16:35:38.642654Z node 3 :TX_DATASHARD TRACE: Add [0:3] at 9437184 to execution unit FinishPropose 2025-03-31T16:35:38.642687Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit FinishPropose 2025-03-31T16:35:38.642723Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is DelayComplete 2025-03-31T16:35:38.642745Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit FinishPropose 2025-03-31T16:35:38.642771Z node 3 :TX_DATASHARD TRACE: Add [0:3] at 9437184 to execution unit CompletedOperations 2025-03-31T16:35:38.642798Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit CompletedOperations 2025-03-31T16:35:38.642835Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Executed 2025-03-31T16:35:38.642853Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit CompletedOperations 2025-03-31T16:35:38.642882Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:3] at 9437184 has finished 2025-03-31T16:35:38.746292Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-31T16:35:38.746387Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:3] at 9437184 on unit FinishPropose 2025-03-31T16:35:38.746447Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 3 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 3 ms, status: COMPLETE 2025-03-31T16:35:38.746544Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:35:38.831785Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 9437184, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-31T16:35:38.831874Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 9437184, table# 1001, finished edge# 0, front# 0 2025-03-31T16:35:38.839151Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [3:232:2227], Recipient [3:235:2228]: NKikimr::TEvTablet::TEvFollowerGcApplied ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::ExecuteScriptWithResultsTtlAndForgetAfter [GOOD] Test command err: Trying to start YDB, gRPC: 6284, MsgBus: 3593 2025-03-31T16:34:56.335744Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013042634034861:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:56.335775Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001480/r3tmp/tmpyrhFmz/pdisk_1.dat 2025-03-31T16:34:56.700714Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6284, node 1 2025-03-31T16:34:56.741356Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:56.741454Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:56.743529Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:56.787762Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:56.787788Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:56.787802Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:56.787934Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3593 TClient is connected to server localhost:3593 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:57.298080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:57.321820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:57.468276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:57.626039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:57.697922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:59.426004Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013055518938517:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:59.426089Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:59.725332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:59.757588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:59.828011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:59.864489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:59.900929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:59.943059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:00.025277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013059813906331:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:00.025356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:00.025567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013059813906336:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:00.030437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:00.044946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013059813906338:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:35:00.139477Z node 1 :TX_PROXY ERROR: Actor# [1:7488013059813906394:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 4745, MsgBus: 8934 2025-03-31T16:35:01.809825Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013066419940131:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:01.809908Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001480/r3tmp/tmpvr5A6S/pdisk_1.dat 2025-03-31T16:35:01.941911Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:01.953656Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:01.953740Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:01.955242Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4745, node 2 2025-03-31T16:35:02.033113Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:02.033134Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:02.033141Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:02.033250Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8934 TClient is connected to server localhost:8934 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:02.423173Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:02.442415Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:02.519768Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:35:02.666797Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:35:02.749861Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:05.069910Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013083599811071:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:05.069989Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: E ... ed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:35:22.574477Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:35:22.575639Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:35:24.976707Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438925011, txId: 281474976710699] shutting down 2025-03-31T16:35:25.248223Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438925284, txId: 281474976710702] shutting down 2025-03-31T16:35:25.542817Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438925578, txId: 281474976710705] shutting down 2025-03-31T16:35:25.855721Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438925893, txId: 281474976710708] shutting down 2025-03-31T16:35:25.878604Z node 4 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionResultQueryActor] TraceId: 8031d421-2db47dd9-a58d1348-8c647700, State: Get results info, Finish with NOT_FOUND, Issues: {
: Error: Results are expired }, SessionId: ydb://session/3?node_id=4&id=NmFhZDMzZmEtYmE3NjU1ZDAtNWE0YjRlOC1iYWU2NWI1YQ==, TxId: Trying to start YDB, gRPC: 61885, MsgBus: 30712 2025-03-31T16:35:27.681660Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7488013177881005887:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:27.681747Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001480/r3tmp/tmp9oQ0QH/pdisk_1.dat 2025-03-31T16:35:27.844507Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:27.873867Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:27.873976Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:27.875453Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61885, node 5 2025-03-31T16:35:27.927741Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:27.927772Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:27.927783Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:27.927921Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30712 TClient is connected to server localhost:30712 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:35:28.511509Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:35:28.525710Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:28.607057Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:28.796014Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:35:28.880608Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:35:32.066349Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488013199355844136:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:32.066489Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:32.124624Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:32.162518Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:32.202769Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:32.242722Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:32.285733Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:32.326905Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:32.384454Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488013199355844646:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:32.384563Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:32.384589Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488013199355844651:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:32.389320Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:32.402471Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7488013199355844653:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:35:32.468249Z node 5 :TX_PROXY ERROR: Actor# [5:7488013199355844706:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:32.681732Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7488013177881005887:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:32.681813Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:33.546043Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:35:33.548515Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:35:33.550873Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:35:35.526361Z node 5 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionOperationQueryActor] TraceId: a30928fd-d20f752a-c4188ab4-bb7b85a3, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=5&id=NDcxMjM2MTAtNWIwZjllZTQtNzNhZjhmNDgtNzg5YzFmYjc=, TxId: 2025-03-31T16:35:36.485844Z node 5 :KQP_PROXY WARN: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: a30928fd-d20f752a-c4188ab4-bb7b85a3, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=5&id=YzI2ZDIyMTItODg4OWI2ZWYtM2Q4ODIxZjMtN2ViYTY1NDA=, TxId: 2025-03-31T16:35:36.673055Z node 5 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: a30928fd-d20f752a-c4188ab4-bb7b85a3, reply NOT_FOUND, issues: {
: Error: No such execution } 2025-03-31T16:35:36.711941Z node 5 :KQP_PROXY WARN: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: a30928fd-d20f752a-c4188ab4-bb7b85a3, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=5&id=NjhmN2UxMDUtZjlhMzYyODQtZTc5NTk3MDEtMzVjMGViOTQ=, TxId: 2025-03-31T16:35:36.712110Z node 5 :KQP_PROXY WARN: [ScriptExecutions] [TCancelScriptExecutionOperationActor] ExecutionId: a30928fd-d20f752a-c4188ab4-bb7b85a3, check lease failed 2025-03-31T16:35:37.173837Z node 5 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionResultQueryActor] TraceId: a30928fd-d20f752a-c4188ab4-bb7b85a3, State: Get results info, Finish with NOT_FOUND, Issues: {
: Error: Script execution not found }, SessionId: ydb://session/3?node_id=5&id=Y2VkNzA1MDktOTI1Mjc0NTMtY2MxNWEzM2ItNWVkYTM1MmU=, TxId: >> KqpQueryService::ExecuteQueryScalar [GOOD] >> TraverseDatashard::TraverseTwoTablesServerless |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> KqpQueryService::TableSink_OltpUpdate [GOOD] >> KqpQueryService::TableSink_Oltp_Replace+UseSink >> KqpQueryService::ExecuteQueryInteractiveTx [GOOD] >> KqpSinkLocks::DifferentKeyUpdate >> KqpSinkMvcc::OltpMultiSinksNoSinks >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAnalyzeTableResponse ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ExecuteQueryScalar [GOOD] Test command err: Trying to start YDB, gRPC: 25291, MsgBus: 16224 2025-03-31T16:35:08.441432Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013096145893171:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:08.441772Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001444/r3tmp/tmpbZBA3y/pdisk_1.dat 2025-03-31T16:35:08.846254Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:08.850325Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:08.850407Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:08.852742Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25291, node 1 2025-03-31T16:35:08.924851Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:08.924877Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:08.924884Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:08.925013Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16224 TClient is connected to server localhost:16224 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:09.518327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:09.535826Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:35:09.552063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:09.698770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:09.879136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:09.955358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:11.746461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013109030796686:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:11.746594Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:12.099619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:12.130956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:12.160192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:12.187607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:12.218058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:12.253301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:12.297744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013113325764493:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:12.297856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:12.297860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013113325764498:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:12.301471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:12.314222Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013113325764500:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:35:12.390691Z node 1 :TX_PROXY ERROR: Actor# [1:7488013113325764554:3442] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:13.309534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:35:13.437371Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488013096145893171:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:13.437460Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 18990, MsgBus: 1652 2025-03-31T16:35:22.847447Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013156645504852:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:22.847532Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001444/r3tmp/tmpGf1wUW/pdisk_1.dat 2025-03-31T16:35:22.959921Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18990, node 2 2025-03-31T16:35:22.986158Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:22.986272Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:22.991860Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:23.043698Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:23.043721Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:23.043729Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:23.043843Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1652 TClient is connected to server localhost:1652 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:23.567880Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:23.590761Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:23.668828Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:23.859493Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId ... don't have access permissions } 2025-03-31T16:35:26.751516Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013173825376339:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:26.755075Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:26.766017Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488013173825376341:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:35:26.824105Z node 2 :TX_PROXY ERROR: Actor# [2:7488013173825376394:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:27.698389Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:35:27.711886Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:1, at schemeshard: 72057594046644480 2025-03-31T16:35:27.848014Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488013156645504852:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:27.848218Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:28.408853Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-03-31T16:35:28.995636Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:1, at schemeshard: 72057594046644480 2025-03-31T16:35:29.572438Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2025-03-31T16:35:30.123722Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715692:0, at schemeshard: 72057594046644480 2025-03-31T16:35:30.676643Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715699:0, at schemeshard: 72057594046644480 2025-03-31T16:35:32.323949Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715714:0, at schemeshard: 72057594046644480 Wait resource pool classifier 0.076340s: status = SUCCESS, issues = 2025-03-31T16:35:33.422905Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTY5YWNhZC0yYmMyNTNiMi1hZjc1ODA1Mi1lOTI4NjRmMA==, ActorId: [2:7488013203890149139:2824], ActorState: ExecuteState, TraceId: 01jqpgxvdbfrq83qbkq3y7zfpa, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool MyPool Trying to start YDB, gRPC: 30144, MsgBus: 11289 2025-03-31T16:35:34.313451Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488013207458888072:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:34.313498Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001444/r3tmp/tmpojDhvS/pdisk_1.dat 2025-03-31T16:35:34.441212Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:34.472990Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:34.473079Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 30144, node 3 2025-03-31T16:35:34.474540Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:34.517985Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:34.518016Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:34.518024Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:34.518148Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11289 TClient is connected to server localhost:11289 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:35.080691Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:35.091576Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:35:35.107973Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:35.194406Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:35.363653Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:35:35.447800Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:35:38.024187Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013224638759030:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:38.024299Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:38.076367Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:38.115493Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:38.155193Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:38.224215Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:38.263454Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:38.305012Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:38.376402Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013224638759544:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:38.376495Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:38.376709Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013224638759549:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:38.380659Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:38.392830Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488013224638759551:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:35:38.447260Z node 3 :TX_PROXY ERROR: Actor# [3:7488013224638759604:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:39.313818Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488013207458888072:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:39.313886Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpTx::SnapshotRO [GOOD] >> KqpTx::SnapshotROInteractive1 >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ExecuteQueryInteractiveTx [GOOD] Test command err: Trying to start YDB, gRPC: 25344, MsgBus: 4745 2025-03-31T16:35:23.674981Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013158605019348:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:23.675071Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0013aa/r3tmp/tmpU0XhQF/pdisk_1.dat 2025-03-31T16:35:24.050879Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:24.072521Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:24.072613Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 25344, node 1 2025-03-31T16:35:24.082306Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:24.178428Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:24.178454Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:24.178463Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:24.178654Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4745 TClient is connected to server localhost:4745 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:24.726009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:24.746508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:24.895410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:25.052547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:25.114166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:26.844077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013171489923002:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:26.844215Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:27.216822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:27.252063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:27.283147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:27.313969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:27.343664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:27.415436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:27.494800Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013175784890814:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:27.494900Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:27.494989Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013175784890819:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:27.498641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:27.506681Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013175784890821:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:35:27.609954Z node 1 :TX_PROXY ERROR: Actor# [1:7488013175784890876:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:28.547701Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzlmN2QwMzctZjU0ZTQ3ZmMtZjBjYzM0ZjEtYTQ4ZmFjMTM=, ActorId: [1:7488013180079858464:2496], ActorState: ReadyState, TraceId: 01jqpgxpms6e3mr8m51n5rnvvg, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 16444, MsgBus: 14926 2025-03-31T16:35:29.326172Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013184883954769:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:29.326290Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0013aa/r3tmp/tmpOo8KlJ/pdisk_1.dat 2025-03-31T16:35:29.441456Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16444, node 2 2025-03-31T16:35:29.479224Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:29.479347Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:29.481537Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:29.508953Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:29.508974Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:29.508980Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:29.509095Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14926 TClient is connected to server localhost:14926 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:29.900741Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:29.920334Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:30.002897Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:30.159078Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:30.234806Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:32.501957Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013197768858420:2406], DatabaseId: /Root, PoolId: default, Failed to fe ... Id: [2:7488013197768858942:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:32.857343Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:32.866930Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488013197768858944:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:35:32.931988Z node 2 :TX_PROXY ERROR: Actor# [2:7488013197768858998:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:34.171490Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976715672; 2025-03-31T16:35:34.171683Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976715672; 2025-03-31T16:35:34.172460Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488013206358793976:2496], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [2:7488013202063826584:2496]Got LOCKS BROKEN for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[2:7488013206358793976:2496].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-03-31T16:35:34.173113Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488013206358793962:2496], SessionActorId: [2:7488013202063826584:2496], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TwoShard`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[2:7488013202063826584:2496]. isRollback=0 2025-03-31T16:35:34.173375Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjhmN2ExNzEtNzA5ZjJiYzUtZWY3ZDFkYTQtZmUyMzBmNzQ=, ActorId: [2:7488013202063826584:2496], ActorState: ExecuteState, TraceId: 01jqpgxw4katy180f9ztb7kc7a, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7488013206358793963:2496] from: [2:7488013206358793962:2496] 2025-03-31T16:35:34.173454Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7488013206358793963:2496] TxId: 281474976715672. Ctx: { TraceId: 01jqpgxw4katy180f9ztb7kc7a, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjhmN2ExNzEtNzA5ZjJiYzUtZWY3ZDFkYTQtZmUyMzBmNzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/TwoShard`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-03-31T16:35:34.173654Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjhmN2ExNzEtNzA5ZjJiYzUtZWY3ZDFkYTQtZmUyMzBmNzQ=, ActorId: [2:7488013202063826584:2496], ActorState: ExecuteState, TraceId: 01jqpgxw4katy180f9ztb7kc7a, Create QueryResponse for error on request, msg: 2025-03-31T16:35:34.331222Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488013184883954769:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:34.331311Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 15657, MsgBus: 28516 2025-03-31T16:35:35.084998Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488013211573838920:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:35.085092Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0013aa/r3tmp/tmp6r9hI8/pdisk_1.dat 2025-03-31T16:35:35.199549Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:35.232706Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:35.232797Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:35.236890Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15657, node 3 2025-03-31T16:35:35.284088Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:35.284123Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:35.284132Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:35.284260Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28516 TClient is connected to server localhost:28516 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:35:35.776687Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:35:35.783781Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:35:35.801656Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:35.887376Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:35:36.053541Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:35:36.137655Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:38.605908Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013224458742599:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:38.606012Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:38.648552Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:38.689653Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:38.724456Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:38.756079Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:38.788000Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:38.826385Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:38.944360Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013224458743115:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:38.944447Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:38.944707Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013224458743120:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:38.952378Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:38.962581Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488013224458743122:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:35:39.018193Z node 3 :TX_PROXY ERROR: Actor# [3:7488013228753710472:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:40.087192Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488013211573838920:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:40.087268Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSnapshotIsolation::TConflictWriteOltpNoSink >> KqpQueryService::ExecStatsAst [GOOD] >> KqpQueryService::DmlNoTx >> KqpSinkMvcc::OlapReadOnlyTxCommitsOnConcurrentWrite >> KqpQueryServiceScripts::TestFetchMoreThanLimit [GOOD] >> KqpQueryServiceScripts::TestAstWithCompression >> KqpQueryService::MixedReadQueryWithoutStreamLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] Test command err: 2025-03-31T16:33:44.403199Z :WriteRAW INFO: Random seed for debugging is 1743438824403173 2025-03-31T16:33:44.687352Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012734903674983:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:44.687431Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:33:44.724659Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012735547515227:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:44.724699Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:33:44.857407Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011d3/r3tmp/tmpqveSMN/pdisk_1.dat 2025-03-31T16:33:44.871483Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:33:45.116801Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:45.118198Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:45.118299Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:45.120107Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:45.120176Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:45.125328Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:33:45.125512Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:45.126145Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9490, node 1 2025-03-31T16:33:45.259044Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/0011d3/r3tmp/yandex2x8Hmc.tmp 2025-03-31T16:33:45.259077Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/0011d3/r3tmp/yandex2x8Hmc.tmp 2025-03-31T16:33:45.259841Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/0011d3/r3tmp/yandex2x8Hmc.tmp 2025-03-31T16:33:45.260014Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:33:45.456586Z INFO: TTestServer started on Port 25018 GrpcPort 9490 TClient is connected to server localhost:25018 PQClient connected to localhost:9490 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:45.706840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-03-31T16:33:47.714204Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012747788577892:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:47.714404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012747788577873:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:47.714533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:47.718110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-03-31T16:33:47.721469Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012747788577938:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:47.721541Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:47.733875Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012747788577903:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-31T16:33:47.949904Z node 1 :TX_PROXY ERROR: Actor# [1:7488012747788577982:2673] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:47.980810Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488012747788578004:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:33:47.983811Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488012748432417454:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:33:47.984082Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjQwM2IxYS1hMzFmY2NlMy04NjcwMWIzNS1iOTc0MzgyYQ==, ActorId: [2:7488012748432417414:2308], ActorState: ExecuteState, TraceId: 01jqpgtm81ed41qfpybc75rejy, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:33:47.986486Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:33:48.013401Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODU1ZDQ1MGMtM2M4NzI4Yi03NWM1NTU4OS1hN2FiZmE1Nw==, ActorId: [1:7488012747788577870:2335], ActorState: ExecuteState, TraceId: 01jqpgtm58b1qp4947kqkw4ded, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:33:48.013743Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:33:48.042571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:48.163900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:48.288885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:9490", true, true, 1000); 2025-03-31T16:33:48.587384Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jqpgtmtp6tzjrbrdn7tad0wp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWY3OWZhN2QtMjc0Mzc4ZDctY2NlM2Q2ZTMtZDAzMjI3N2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7488012752083545695:2966] 2025-03-31T16:33:49.686187Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012734903674983:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:49.686259Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:33:49.724559Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488012735547515227:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:49.724637Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-03-31T16:33:54.621916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:9490 MetaRequest { ... sh, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-03-31T16:35:40.065145Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-03-31T16:35:40.065161Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7488013230762046799:2499] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2025-03-31T16:35:40.068104Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7488013230762046799:2499] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2025-03-31T16:35:40.276757Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7488013230762046799:2499] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2025-03-31T16:35:40.277809Z node 15 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [15:7488013230762046852:2499] connected; active server actors: 1 2025-03-31T16:35:40.277980Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7488013230762046799:2499] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2025-03-31T16:35:40.278009Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7488013230762046799:2499] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-03-31T16:35:40.278506Z node 15 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [15:7488013230762046852:2499] disconnected; active server actors: 1 2025-03-31T16:35:40.278532Z node 15 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [15:7488013230762046852:2499] disconnected no session 2025-03-31T16:35:40.410241Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7488013230762046799:2499] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-03-31T16:35:40.410298Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7488013230762046799:2499] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-03-31T16:35:40.410320Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7488013230762046799:2499] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-03-31T16:35:40.410371Z node 15 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-03-31T16:35:40.411542Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [15:7488013230762046880:2499], now have 1 active actors on pipe 2025-03-31T16:35:40.411659Z node 15 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 16, Generation: 1 2025-03-31T16:35:40.411901Z node 16 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-03-31T16:35:40.411939Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-03-31T16:35:40.412056Z node 16 :PERSQUEUE INFO: new Cookie src|6c8bea03-12f5f567-18881849-79a0cb6c_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-03-31T16:35:40.412171Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-03-31T16:35:40.412274Z node 16 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-31T16:35:40.412871Z node 16 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-03-31T16:35:40.412920Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-03-31T16:35:40.413081Z node 16 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-31T16:35:40.413439Z node 15 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|6c8bea03-12f5f567-18881849-79a0cb6c_0 2025-03-31T16:35:40.415267Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1743438940415 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T16:35:40.415406Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|6c8bea03-12f5f567-18881849-79a0cb6c_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-03-31T16:35:40.415773Z :INFO: [] MessageGroupId [src] SessionId [src|6c8bea03-12f5f567-18881849-79a0cb6c_0] Write session: close. Timeout = 0 ms 2025-03-31T16:35:40.415853Z :INFO: [] MessageGroupId [src] SessionId [src|6c8bea03-12f5f567-18881849-79a0cb6c_0] Write session will now close 2025-03-31T16:35:40.415928Z :DEBUG: [] MessageGroupId [src] SessionId [src|6c8bea03-12f5f567-18881849-79a0cb6c_0] Write session: aborting 2025-03-31T16:35:40.416473Z :INFO: [] MessageGroupId [src] SessionId [src|6c8bea03-12f5f567-18881849-79a0cb6c_0] Write session: gracefully shut down, all writes complete 2025-03-31T16:35:40.416527Z :DEBUG: [] MessageGroupId [src] SessionId [src|6c8bea03-12f5f567-18881849-79a0cb6c_0] Write session: destroy 2025-03-31T16:35:40.417995Z node 15 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|6c8bea03-12f5f567-18881849-79a0cb6c_0 grpc read done: success: 0 data: 2025-03-31T16:35:40.418026Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|6c8bea03-12f5f567-18881849-79a0cb6c_0 grpc read failed 2025-03-31T16:35:40.418063Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|6c8bea03-12f5f567-18881849-79a0cb6c_0 grpc closed 2025-03-31T16:35:40.418084Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|6c8bea03-12f5f567-18881849-79a0cb6c_0 is DEAD 2025-03-31T16:35:40.418981Z node 15 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-31T16:35:40.419619Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [15:7488013230762046880:2499] destroyed 2025-03-31T16:35:40.419696Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-03-31T16:35:40.457228Z :INFO: [/Root] [/Root] [4270837c-3ef2ab99-8180ec74-236eae2f] Starting read session 2025-03-31T16:35:40.457286Z :DEBUG: [/Root] [/Root] [4270837c-3ef2ab99-8180ec74-236eae2f] Starting cluster discovery 2025-03-31T16:35:40.457574Z :INFO: [/Root] [/Root] [4270837c-3ef2ab99-8180ec74-236eae2f] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:25611: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:25611
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:25611. " 2025-03-31T16:35:40.457631Z :DEBUG: [/Root] [/Root] [4270837c-3ef2ab99-8180ec74-236eae2f] Restart cluster discovery in 0.006880s 2025-03-31T16:35:40.465316Z :DEBUG: [/Root] [/Root] [4270837c-3ef2ab99-8180ec74-236eae2f] Starting cluster discovery 2025-03-31T16:35:40.465870Z :INFO: [/Root] [/Root] [4270837c-3ef2ab99-8180ec74-236eae2f] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:25611: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:25611
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:25611. " 2025-03-31T16:35:40.465943Z :DEBUG: [/Root] [/Root] [4270837c-3ef2ab99-8180ec74-236eae2f] Restart cluster discovery in 0.015798s 2025-03-31T16:35:40.483258Z :DEBUG: [/Root] [/Root] [4270837c-3ef2ab99-8180ec74-236eae2f] Starting cluster discovery 2025-03-31T16:35:40.483529Z :INFO: [/Root] [/Root] [4270837c-3ef2ab99-8180ec74-236eae2f] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:25611: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:25611
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:25611. " 2025-03-31T16:35:40.483588Z :DEBUG: [/Root] [/Root] [4270837c-3ef2ab99-8180ec74-236eae2f] Restart cluster discovery in 0.021706s 2025-03-31T16:35:40.506809Z :DEBUG: [/Root] [/Root] [4270837c-3ef2ab99-8180ec74-236eae2f] Starting cluster discovery 2025-03-31T16:35:40.507218Z :NOTICE: [/Root] [/Root] [4270837c-3ef2ab99-8180ec74-236eae2f] Aborting read session. Description: SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:25611: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:25611
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:25611. " } 2025-03-31T16:35:40.507456Z :NOTICE: [/Root] [/Root] [4270837c-3ef2ab99-8180ec74-236eae2f] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:25611: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:25611
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:25611. " } 2025-03-31T16:35:40.507629Z :INFO: [/Root] [/Root] [4270837c-3ef2ab99-8180ec74-236eae2f] Closing read session. Close timeout: 0.000000s 2025-03-31T16:35:40.507762Z :NOTICE: [/Root] [/Root] [4270837c-3ef2ab99-8180ec74-236eae2f] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-31T16:35:40.615631Z node 15 :KQP_COMPUTE WARN: TxId: 281474976710684, task: 1, CA Id [15:7488013230762046898:2514]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2025-03-31T16:35:40.648833Z node 15 :KQP_COMPUTE WARN: TxId: 281474976710684, task: 1, CA Id [15:7488013230762046898:2514]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-31T16:35:40.705238Z node 15 :KQP_COMPUTE WARN: TxId: 281474976710684, task: 1, CA Id [15:7488013230762046898:2514]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-31T16:35:40.779533Z node 15 :KQP_COMPUTE WARN: TxId: 281474976710684, task: 1, CA Id [15:7488013230762046898:2514]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-31T16:35:40.860961Z node 15 :KQP_COMPUTE WARN: TxId: 281474976710684, task: 1, CA Id [15:7488013230762046898:2514]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-31T16:35:40.997636Z node 15 :KQP_COMPUTE WARN: TxId: 281474976710684, task: 1, CA Id [15:7488013230762046898:2514]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-31T16:35:41.335322Z node 15 :KQP_COMPUTE WARN: TxId: 281474976710684, task: 1, CA Id [15:7488013230762046898:2514]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 >> KqpQueryService::ExecuteQueryMultiScalar [GOOD] >> KqpKv::ReadRows_ExternalBlobs-UseExtBlobsPrecharge [GOOD] >> KqpKv::ReadRows_Decimal >> AnalyzeDatashard::DropTableNavigateError >> KqpQueryServiceScripts::ExecuteScriptWithCancelAfter [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithCancelAfterAndTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::MixedReadQueryWithoutStreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 12471, MsgBus: 18799 2025-03-31T16:35:13.228058Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013114168184912:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:13.230865Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001432/r3tmp/tmpycuiiQ/pdisk_1.dat 2025-03-31T16:35:13.601180Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:13.620783Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:13.620864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:13.622552Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12471, node 1 2025-03-31T16:35:13.719367Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:13.719397Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:13.719408Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:13.719547Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18799 TClient is connected to server localhost:18799 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:14.205501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:14.233499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:14.365048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:14.509646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:14.580069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:16.273683Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013127053088568:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:16.273808Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:16.607054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:16.642212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:16.678756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:16.759860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:16.795615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:16.866417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:16.914085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013127053089084:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:16.914187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:16.914442Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013127053089089:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:16.918461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:16.930349Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013127053089091:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:35:17.024312Z node 1 :TX_PROXY ERROR: Actor# [1:7488013131348056442:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:18.228377Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488013114168184912:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:18.228472Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:28.598927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:35:28.598962Z node 1 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 1429, MsgBus: 63506 2025-03-31T16:35:29.720428Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013184997078189:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:29.720527Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001432/r3tmp/tmphpBhjM/pdisk_1.dat 2025-03-31T16:35:29.862147Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:29.891346Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:29.891436Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:29.893751Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1429, node 2 2025-03-31T16:35:29.987648Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:29.987671Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:29.987678Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:29.987847Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63506 TClient is connected to server localhost:63506 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:30.489213Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:30.508041Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:30.568606Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:30.774077Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:30.859582Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESche ... ntroller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.169206Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.170597Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037948;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.177974Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037938;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.179690Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.183061Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.187290Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037916;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.190004Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.193397Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.194070Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.198676Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.201842Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.204854Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.210023Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037944;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.210134Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037915;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.216266Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037946;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.217956Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037918;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.222624Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.225045Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037898;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.230865Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.232931Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.237293Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.239758Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.248927Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.250505Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.254699Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.255713Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.260361Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.261664Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037939;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.265223Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037951;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.267721Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.269823Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.273935Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037917;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.274773Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.279121Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.279786Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.283945Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.285386Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.288904Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.290541Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.293473Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037943;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.296444Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.297411Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.302084Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037941;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.302455Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.307128Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.313515Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-31T16:35:41.572880Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=281474976710664;tx_id=281474976710664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710664; 2025-03-31T16:35:41.573235Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;self_id=[3:7488013229635296162:2366];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037907;local_tx_no=13;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903,72075186224037947;receive=72075186224037899; 2025-03-31T16:35:41.573988Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=281474976710664;tx_id=281474976710664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710664; 2025-03-31T16:35:41.574249Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;self_id=[3:7488013229635296162:2366];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037907;local_tx_no=15;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903;receive=72075186224037947; 2025-03-31T16:35:41.574610Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=281474976710664;tx_id=281474976710664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710664; 2025-03-31T16:35:41.575264Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=281474976710664;tx_id=281474976710664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710664; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ExecuteQueryMultiScalar [GOOD] Test command err: Trying to start YDB, gRPC: 28008, MsgBus: 13037 2025-03-31T16:35:26.533344Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013173119203154:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:26.533962Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001397/r3tmp/tmp9xXFxQ/pdisk_1.dat 2025-03-31T16:35:26.927395Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28008, node 1 2025-03-31T16:35:26.940078Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:26.940222Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:26.942327Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:27.039680Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:27.039707Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:27.039717Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:27.039850Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13037 TClient is connected to server localhost:13037 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:27.552212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:27.569481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:27.689523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:27.868761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:27.934755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:29.784723Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013186004106797:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:29.784900Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:30.151543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:30.181633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:30.211179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:30.240341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:30.269664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:30.302675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:30.358499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013190299074604:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:30.358599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:30.358772Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013190299074609:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:30.364240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:30.377836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013190299074611:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:35:30.456482Z node 1 :TX_PROXY ERROR: Actor# [1:7488013190299074665:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:31.533039Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488013173119203154:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:31.533137Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 28739, MsgBus: 25309 2025-03-31T16:35:32.571137Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013198577505863:2068];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001397/r3tmp/tmpankpJB/pdisk_1.dat 2025-03-31T16:35:32.616769Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:35:32.693991Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:32.717759Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:32.717840Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:32.719168Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28739, node 2 2025-03-31T16:35:32.795838Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:32.795863Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:32.795872Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:32.796001Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25309 TClient is connected to server localhost:25309 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:33.210414Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:33.226391Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:33.288078Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:33.437619Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:33.509142Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:35.747746Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013211462409506:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:35.747830Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:35.792826Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:35.824467Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:35.853718Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:35.927836Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:35.964789Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:36.036963Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:36.086569Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013215757377320:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:36.086649Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013215757377325:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:36.086672Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:36.089611Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:36.098562Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488013215757377327:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:35:36.183854Z node 2 :TX_PROXY ERROR: Actor# [2:7488013215757377380:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 25775, MsgBus: 11119 2025-03-31T16:35:38.260027Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488013222801899060:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:38.260152Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001397/r3tmp/tmpKYfhPh/pdisk_1.dat 2025-03-31T16:35:38.420427Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:38.421473Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:38.421571Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:38.423061Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25775, node 3 2025-03-31T16:35:38.487245Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:38.487268Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:38.487278Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:38.487406Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11119 TClient is connected to server localhost:11119 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:39.026249Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:39.044292Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:39.108175Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:39.309639Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:39.388617Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:41.705309Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013235686802715:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:41.705440Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:41.733819Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:41.772305Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:41.809838Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:41.883152Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:41.960383Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:41.998305Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:42.098765Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013239981770531:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:42.098915Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:42.099237Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013239981770536:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:42.104327Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:42.120238Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488013239981770538:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:35:42.209620Z node 3 :TX_PROXY ERROR: Actor# [3:7488013239981770594:3453] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:43.260358Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488013222801899060:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:43.260433Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> YdbTableSplit::RenameTablesAndSplit [GOOD] >> KqpQueryService::FlowControllOnHugeRealTable-LongRow [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::RenameTablesAndSplit [GOOD] Test command err: 2025-03-31T16:34:12.998404Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012854602435664:2272];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:12.998685Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001739/r3tmp/tmpKw5lU3/pdisk_1.dat 2025-03-31T16:34:13.586229Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:13.616967Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:13.617084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:13.620868Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29845, node 1 2025-03-31T16:34:13.716515Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:13.716535Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:13.716541Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:13.716659Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12604 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:14.096828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:16.260178Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012871782305702:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:16.260255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:16.502804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Foo, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:34:16.504056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:34:16.504109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:34:16.507983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Foo 2025-03-31T16:34:16.595736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743438856642, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T16:34:16.679153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-03-31T16:34:16.729605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012871782305925:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:16.729698Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:16.736170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /Root/Foo, pathId: , opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:34:16.736668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:34:16.736687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:34:16.738599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, database: /Root, subject: , status: StatusAccepted, operation: ALTER TABLE, path: /Root/Foo 2025-03-31T16:34:16.750326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743438856796, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T16:34:16.758710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 Fast forward 1m 2025-03-31T16:34:17.974983Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012854602435664:2272];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:17.975067Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; partitions 2 Fast forward 1m partitions 2 Fast forward 1m partitions 2 Fast forward 1m 2025-03-31T16:34:26.673340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TSplitMerge Propose, tableStr: /Root/Foo, tableId: , opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:34:26.673817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:34:26.746564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-03-31T16:34:26.754998Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-03-31T16:34:26.755068Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-03-31T16:34:28.577917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:34:28.577958Z node 1 :IMPORT WARN: Table profiles were not loaded partitions 1 2025-03-31T16:34:28.825886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMoveTable Propose, from: /Root/Foo, to: /Root/Bar, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:34:28.826172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:34:28.832411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: , status: StatusAccepted, operation: ALTER TABLE RENAME, dst path: /Root/Foo, dst path: /Root/Bar 2025-03-31T16:34:28.851497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743439348896, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T16:34:28.861595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976710660, done: 0, blocked: 1 2025-03-31T16:34:28.875398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710660:0 2025-03-31T16:34:28.877447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 Fast forward 1m partitions 1 Fast forward 1m 2025-03-31T16:34:36.711593Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 6841088 rowCount 50000 cpuUsage 0.4795 2025-03-31T16:34:36.812090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-03-31T16:34:36.812205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:3 data size 6841088 row count 50000 2025-03-31T16:34:36.812257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037890 maps to shardIdx: 72057594046644480:3 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=Bar, is column=0, is olap=0 2025-03-31T16:34:36.812306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037890 followerId=0, pathId 3: RowCount 50000, DataSize 6841088 2025-03-31T16:34:36.812413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Requesting full stats from datashard 72075186224037890 2025-03-31T16:34:36.813990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got partition histogram at tablet 72057594046644480 from datashard 72075186224037890 state: 'Ready' data size: 6841088 row count: 50000 2025-03-31T16:34:36.814130Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPartitionHistogram::Execute partition histogram at tablet 72057594046644480 from datashard 72075186224037890 for pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 6841088 rowCount 50000 dataSizeHistogram buckets 0 2025-03-31T16:34:36.814228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Failed to find proper split key (initially) for 'Split by size' of datashard 72075186224037890 2025-03-31T16:34:36.815589Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 partitions 1 Fast forward 1m partitions 1 Fast forward 1m 2025-03-31T16:34:41.712928Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 6841088 rowCount 50000 cpuUsage 0.1199 2025-03-31T16:34:41.813151Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-03-31T16:34:41.813259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:3 data size 6841088 row count 50000 2025-03-31T16:34:41.813307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037890 maps to shardIdx: 72057594046644480:3 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=Bar, is column=0, is olap=0 2025-03-31T16:34:41.813327Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037890 followerId=0, pathId 3: RowCount 50000, DataSize 6841088 2025-03-31T16 ... node 1 :TX_DATASHARD INFO: 72075186224037892 Initiating switch from PreOffline to Offline state 2025-03-31T16:35:45.427650Z node 1 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-03-31T16:35:45.427717Z node 1 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-03-31T16:35:45.427821Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [1:7488013254034399549:2829], serverId# [1:7488013254034399557:4871], sessionId# [0:0:0] 2025-03-31T16:35:45.427913Z node 1 :TX_DATASHARD DEBUG: 72075186224037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-03-31T16:35:45.427981Z node 1 :TX_DATASHARD DEBUG: 72075186224037891 parts [ [72075186224037890:1:119:1:12288:11472:0] ] return ack processed 2025-03-31T16:35:45.428030Z node 1 :TX_DATASHARD DEBUG: 72075186224037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-03-31T16:35:45.429099Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [1:7488013254034399559:2831], serverId# [1:7488013254034399571:4883], sessionId# [0:0:0] 2025-03-31T16:35:45.429196Z node 1 :TX_DATASHARD INFO: 72075186224037891 Initiating switch from PreOffline to Offline state 2025-03-31T16:35:45.429301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7488012914731979306 RawX2: 4503603922340188 } TabletId: 72075186224037890 State: 4 2025-03-31T16:35:45.429359Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-03-31T16:35:45.429536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7488012914731979306 RawX2: 4503603922340188 } TabletId: 72075186224037890 State: 4 2025-03-31T16:35:45.429561Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-03-31T16:35:45.432181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:35:45.432270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:35:45.432276Z node 1 :TX_DATASHARD INFO: 72075186224037892 Reporting state Offline to schemeshard 72057594046644480 2025-03-31T16:35:45.432316Z node 1 :TX_DATASHARD INFO: 72075186224037892 Reporting state Offline to schemeshard 72057594046644480 2025-03-31T16:35:45.432385Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-03-31T16:35:45.432414Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-03-31T16:35:45.432997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7488013236854529868 RawX2: 4503603922340579 } TabletId: 72075186224037892 State: 4 2025-03-31T16:35:45.433039Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2025-03-31T16:35:45.433212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7488013236854529868 RawX2: 4503603922340579 } TabletId: 72075186224037892 State: 4 2025-03-31T16:35:45.433234Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2025-03-31T16:35:45.433301Z node 1 :TX_DATASHARD INFO: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2025-03-31T16:35:45.433338Z node 1 :TX_DATASHARD INFO: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2025-03-31T16:35:45.435278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:35:45.435395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:35:45.435541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-03-31T16:35:45.435789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-03-31T16:35:45.436008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-03-31T16:35:45.437591Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2025-03-31T16:35:45.437623Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2025-03-31T16:35:45.437707Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-03-31T16:35:45.437743Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [1:7488012923321914266:3237], serverId# [1:7488012923321914267:3238], sessionId# [0:0:0] 2025-03-31T16:35:45.438222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7488013236854529870 RawX2: 4503603922340580 } TabletId: 72075186224037891 State: 4 2025-03-31T16:35:45.438288Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-03-31T16:35:45.438521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7488013236854529870 RawX2: 4503603922340580 } TabletId: 72075186224037891 State: 4 2025-03-31T16:35:45.438573Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-03-31T16:35:45.439855Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-03-31T16:35:45.439972Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2025-03-31T16:35:45.439972Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-03-31T16:35:45.440696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-03-31T16:35:45.440737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-03-31T16:35:45.440797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-03-31T16:35:45.442620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:35:45.442694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:35:45.442784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-03-31T16:35:45.442971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-03-31T16:35:45.443179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-03-31T16:35:45.443892Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-03-31T16:35:45.443918Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-03-31T16:35:45.443945Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037892 reason = ReasonStop 2025-03-31T16:35:45.444285Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2025-03-31T16:35:45.444646Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2025-03-31T16:35:45.444747Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037892 2025-03-31T16:35:45.446837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-03-31T16:35:45.446865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-03-31T16:35:45.446920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-03-31T16:35:45.447021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-03-31T16:35:45.447288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-03-31T16:35:45.447474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-03-31T16:35:45.447611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-03-31T16:35:45.447631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-03-31T16:35:45.447682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-03-31T16:35:45.449148Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-03-31T16:35:45.449239Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037891, clientId# [1:7488013254034399470:4797], serverId# [1:7488013254034399471:4798], sessionId# [0:0:0] 2025-03-31T16:35:45.449475Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-03-31T16:35:45.449542Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037891, clientId# [1:7488013254034399529:4845], serverId# [1:7488013254034399532:4848], sessionId# [0:0:0] 2025-03-31T16:35:45.449811Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-03-31T16:35:45.449885Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-03-31T16:35:45.452414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-03-31T16:35:45.452453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-03-31T16:35:45.452489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-03-31T16:35:45.452518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 >> KqpLocksTricky::TestNoLocksIssue-withSink >> KqpSinkTx::LocksAbortOnCommit |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> KqpQueryServiceScripts::TestTruncatedByRows [GOOD] >> KqpQueryServiceScripts::TestTruncatedBySize >> KqpQueryService::TableSink_Oltp_Replace+UseSink [GOOD] >> KqpTx::SnapshotROInteractive1 [GOOD] |97.2%| [TA] $(B)/ydb/services/ydb/table_split_ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::FlowControllOnHugeRealTable-LongRow [GOOD] Test command err: Trying to start YDB, gRPC: 6877, MsgBus: 20423 2025-03-31T16:35:26.199864Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013171166754344:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:26.199915Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0013a5/r3tmp/tmp1xS5ug/pdisk_1.dat 2025-03-31T16:35:26.457728Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6877, node 1 2025-03-31T16:35:26.562166Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:26.562258Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:26.564448Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:26.566866Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:26.566884Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:26.566908Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:26.566998Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20423 TClient is connected to server localhost:20423 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:27.148115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:27.181880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:27.330639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:27.492349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:27.573769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:29.387004Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013184051658000:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:29.387176Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:29.733019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:29.802261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:29.831592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:29.864564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:29.899625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:29.933182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:30.029230Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013188346625812:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:30.029295Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013188346625817:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:30.029366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:30.032418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:30.042790Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013188346625819:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:35:30.098286Z node 1 :TX_PROXY ERROR: Actor# [1:7488013188346625872:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:31.203184Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488013171166754344:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:31.203284Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 30526, MsgBus: 27451 2025-03-31T16:35:32.591236Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013198862839279:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:32.591385Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0013a5/r3tmp/tmpudt20Q/pdisk_1.dat 2025-03-31T16:35:32.712134Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:32.733000Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:32.733089Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:32.734712Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30526, node 2 2025-03-31T16:35:32.815966Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:32.815985Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:32.816005Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:32.816128Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27451 TClient is connected to server localhost:27451 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:33.241798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:33.258988Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:33.335358Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:35:33.493281Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:33.570548Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:35:35.696515Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] ... o unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:35.814038Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:35.846767Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:35.873837Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:35.910394Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:35.981292Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:36.027536Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013216042710737:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:36.027603Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:36.027831Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013216042710742:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:36.031796Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:36.041005Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488013216042710744:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:35:36.137535Z node 2 :TX_PROXY ERROR: Actor# [2:7488013216042710797:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:37.045219Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 10 2025-03-31T16:35:37.591336Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488013198862839279:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:37.591430Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 22366, MsgBus: 7711 2025-03-31T16:35:38.282401Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488013222457071626:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:38.282437Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0013a5/r3tmp/tmpKNrTYZ/pdisk_1.dat 2025-03-31T16:35:38.386553Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22366, node 3 2025-03-31T16:35:38.413726Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:38.413852Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:38.416691Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:38.443693Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:38.443730Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:38.443739Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:38.443847Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7711 TClient is connected to server localhost:7711 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:38.838068Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:38.845057Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:35:38.855639Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:38.935203Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:39.122242Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:39.206863Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:41.535468Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013235341975311:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:41.535580Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:41.581195Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:41.618878Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:41.663428Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:41.731576Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:41.766651Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:41.805038Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:41.885029Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013235341975825:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:41.885122Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:41.885470Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013235341975830:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:41.889159Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:41.903869Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488013235341975832:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:35:42.005234Z node 3 :TX_PROXY ERROR: Actor# [3:7488013239636943185:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:42.973643Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:35:43.283174Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488013222457071626:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:43.283244Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 121 |97.2%| [TA] {RESULT} $(B)/ydb/services/ydb/table_split_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSinkLocks::TInvalidateOlap >> KqpSinkTx::OlapInvalidateOnError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_Oltp_Replace+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 30187, MsgBus: 11835 2025-03-31T16:35:21.783522Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013151415677399:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:21.783583Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0013c2/r3tmp/tmpjQIXV4/pdisk_1.dat 2025-03-31T16:35:22.188706Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30187, node 1 2025-03-31T16:35:22.254522Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:22.254637Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:22.256190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:22.273290Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:22.273315Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:22.273343Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:22.273472Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11835 TClient is connected to server localhost:11835 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:22.797322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:24.883070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013164300579953:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:24.883210Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:25.142591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:35:25.257361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013168595547353:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:25.257442Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:25.257739Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013168595547358:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:25.261331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:35:25.271299Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013168595547360:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:35:25.365801Z node 1 :TX_PROXY ERROR: Actor# [1:7488013168595547411:2395] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:25.793499Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488013168595547504:2373], status: PRECONDITION_FAILED, issues:
: Error: Type annotation, code: 1030
:2:29: Error: At function: KiWriteTable!
:2:29: Error: Missing key column in input: Col1 for table: /Root/DataShard, code: 2029 2025-03-31T16:35:25.793868Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWI2NDllM2EtNTkzZmNkYjAtYWZiNmFmMDctMTExZmZkZTc=, ActorId: [1:7488013168595547502:2372], ActorState: ExecuteState, TraceId: 01jqpgxkyb1ghv2sp6y79yy89m, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id: WAIT_INDEXATION: 0 2025-03-31T16:35:26.784549Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488013151415677399:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:26.784635Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 21537, MsgBus: 11920 2025-03-31T16:35:31.649088Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013192231239759:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:31.649206Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0013c2/r3tmp/tmpsNO5EO/pdisk_1.dat 2025-03-31T16:35:31.815194Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:31.842599Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:31.842700Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:31.843773Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21537, node 2 2025-03-31T16:35:31.908200Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:31.908236Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:31.908248Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:31.908370Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11920 TClient is connected to server localhost:11920 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:32.382689Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:34.930581Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013205116142301:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:34.930639Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:34.953258Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:35:35.000584Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013205116142403:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:35.000665Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:35.000741Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013205116142408:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:35.005472Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:35:35.018243Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488013205116142410:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:35:35.107201Z node 2 :TX_PROXY ERROR: Actor# [2:7488013209411109757:2394] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } WAIT_INDEXATION: 0 2025-03-31T16:35:36.646650Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488013192231239759:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:36.646729Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 19461, MsgBus: 24591 2025-03-31T16:35:41.667810Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488013238132594838:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:41.667925Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0013c2/r3tmp/tmpfUKGpU/pdisk_1.dat 2025-03-31T16:35:41.795025Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19461, node 3 2025-03-31T16:35:41.839138Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:41.839285Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:41.843921Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:41.873780Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:41.873806Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:41.873813Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:41.873937Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24591 TClient is connected to server localhost:24591 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:42.348389Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:42.358077Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:35:45.202615Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013255312464681:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:45.202712Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:45.230660Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:35:45.425220Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:35:45.650630Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013255312466057:2441], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:45.650690Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:45.650731Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013255312466061:2444], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:45.655969Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-03-31T16:35:45.669959Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488013255312466064:2445], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-03-31T16:35:45.755911Z node 3 :TX_PROXY ERROR: Actor# [3:7488013255312466130:3232] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:46.668621Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488013238132594838:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:46.668702Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSnapshotIsolation::TReadOnlyOltp [FAIL] >> KqpSnapshotIsolation::TReadOnlyOltpNoSink >> KqpSnapshotIsolation::TSimpleOltp [FAIL] >> KqpSnapshotIsolation::TSimpleOlap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::SnapshotROInteractive1 [GOOD] Test command err: Trying to start YDB, gRPC: 2522, MsgBus: 16853 2025-03-31T16:35:36.610216Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013214258414868:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:36.610263Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012d0/r3tmp/tmp4UEncu/pdisk_1.dat 2025-03-31T16:35:37.020076Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2522, node 1 2025-03-31T16:35:37.056241Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:37.056371Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:37.058184Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:37.119061Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:37.119099Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:37.119105Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:37.119205Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16853 TClient is connected to server localhost:16853 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:37.656233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:37.675686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:37.816494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:37.985911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:38.052187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:39.764475Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013227143318532:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:39.764647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:40.097476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:40.171516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:40.210565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:40.252303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:40.283106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:40.323588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:40.402609Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013231438286349:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:40.402683Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013231438286354:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:40.402697Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:40.406236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:40.418908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013231438286356:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:35:40.502020Z node 1 :TX_PROXY ERROR: Actor# [1:7488013231438286411:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:41.611303Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488013214258414868:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:41.611421Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:42.116576Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWUyZGNmN2YtNGU3MjQ4YmItNDdkYzE5NzAtODUzYTM0NDE=, ActorId: [1:7488013235733254000:2496], ActorState: ExecuteState, TraceId: 01jqpgy3twf6pae4ft9sqt5bpn, Create QueryResponse for error on request, msg:
:3:25: Error: Operation 'Upsert' can't be performed in read only transaction, code: 2008 Trying to start YDB, gRPC: 15694, MsgBus: 30465 2025-03-31T16:35:42.877428Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013239231254173:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:42.877616Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012d0/r3tmp/tmp1L41lA/pdisk_1.dat 2025-03-31T16:35:42.979520Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15694, node 2 2025-03-31T16:35:43.034803Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:43.034913Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:43.037858Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:43.095138Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:43.095161Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:43.095169Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:43.095295Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30465 TClient is connected to server localhost:30465 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:43.559827Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:43.567926Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:35:43.578245Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:43.656663Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:43.819031Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:43.912471Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:46.197339Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013256411125138:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:46.197450Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:46.243507Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:46.277618Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:46.307030Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:46.338033Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:46.381225Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:46.418843Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:46.501897Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013256411125651:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:46.502030Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:46.502365Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013256411125657:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:46.506246Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:46.521226Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488013256411125659:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:35:46.591849Z node 2 :TX_PROXY ERROR: Actor# [2:7488013256411125713:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:47.877131Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488013239231254173:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:47.877216Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryService::DmlNoTx [GOOD] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite2 [GOOD] >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite3 >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeReqDistribution ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::DmlNoTx [GOOD] Test command err: Trying to start YDB, gRPC: 25582, MsgBus: 14086 2025-03-31T16:35:26.825276Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013169928313243:2136];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:26.831715Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001396/r3tmp/tmpNaGZY9/pdisk_1.dat 2025-03-31T16:35:27.216874Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:27.248551Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:27.248642Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:27.251201Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25582, node 1 2025-03-31T16:35:27.303663Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:27.303683Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:27.303689Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:27.303797Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14086 TClient is connected to server localhost:14086 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:27.812341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:27.836832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:27.971681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:28.155772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:28.232045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:30.111014Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013187108184121:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:30.111172Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:30.448947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:30.518881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:30.548765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:30.583286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:30.611589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:30.646521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:30.698111Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013187108184632:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:30.698194Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:30.698251Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013187108184637:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:30.701564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:30.712262Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013187108184639:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:35:30.812186Z node 1 :TX_PROXY ERROR: Actor# [1:7488013187108184693:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:31.824220Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488013169928313243:2136];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:31.824319Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 25037, MsgBus: 23645 2025-03-31T16:35:32.660277Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013199302787836:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:32.664073Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001396/r3tmp/tmp6UOdZF/pdisk_1.dat 2025-03-31T16:35:32.753776Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25037, node 2 2025-03-31T16:35:32.799725Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:32.799824Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:32.803133Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:32.836067Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:32.836104Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:32.836112Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:32.836231Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23645 TClient is connected to server localhost:23645 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:33.275473Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:33.284178Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:33.361250Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:35:33.528469Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:35:33.605440Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:35.508257Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService ... opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:41.557935Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:41.645965Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013238020048826:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:41.646072Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:41.646277Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013238020048831:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:41.650593Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:41.665207Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488013238020048833:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:35:41.754710Z node 3 :TX_PROXY ERROR: Actor# [3:7488013238020048889:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:42.851501Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7488013242315016495:2503], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject, At function: SqlProjectItem
:1:8: Error: At function: Member
:1:8: Error: Member not found: test_ast_column 2025-03-31T16:35:42.853384Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjlkMWQ5YWItNWE2ZWZhZjQtOGVjMTBiZTAtZGEzYzk1MzQ=, ActorId: [3:7488013242315016493:2502], ActorState: ExecuteState, TraceId: 01jqpgy4jv3w5z6hqdzspp4c49, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-31T16:35:42.957562Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7488013242315016528:2507], TxId: 281474976715672, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jqpgy4mecsdncewjs0zkqmtw. SessionId : ydb://session/3?node_id=3&id=NDY5MzQ1YzUtNGYyZDRmMGItY2IyY2UwZTEtYjRmZDlmZDY=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED DEFAULT_ERROR: {
: Error: Terminate was called, reason(43):
:1:8: Failed to unwrap empty optional }. 2025-03-31T16:35:42.959407Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NDY5MzQ1YzUtNGYyZDRmMGItY2IyY2UwZTEtYjRmZDlmZDY=, ActorId: [3:7488013242315016521:2507], ActorState: ExecuteState, TraceId: 01jqpgy4mecsdncewjs0zkqmtw, Create QueryResponse for error on request, msg: 2025-03-31T16:35:42.970095Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488013220840177363:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:42.970171Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 3854, MsgBus: 10881 2025-03-31T16:35:44.031662Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488013247838270220:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:44.031718Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001396/r3tmp/tmpGgy9GE/pdisk_1.dat 2025-03-31T16:35:44.205607Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:44.222221Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:44.222314Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:44.225675Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3854, node 4 2025-03-31T16:35:44.287707Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:44.287733Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:44.287740Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:44.287868Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10881 TClient is connected to server localhost:10881 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:44.861577Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:44.883684Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:44.958213Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:45.153577Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:45.233951Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:47.828003Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488013260723173817:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:47.828089Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:47.865821Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:47.916672Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:47.957558Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:48.032399Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:48.073241Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:48.148475Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:48.304432Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488013265018141639:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:48.304512Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:48.304669Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488013265018141644:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:48.309247Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:48.321215Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488013265018141646:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:35:48.389726Z node 4 :TX_PROXY ERROR: Actor# [4:7488013265018141702:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:49.031686Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7488013247838270220:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:49.031776Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> AnalyzeColumnshard::AnalyzeMultiOperationId >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1-withSink >> TraverseDatashard::TraverseOneTable [GOOD] >> KqpService::SwitchCache-UseCache [GOOD] >> KqpService::ToDictCache+UseCache >> KqpQueryServiceScripts::TestAstWithCompression [GOOD] >> KqpKv::ReadRows_Decimal [GOOD] >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseOneTable [GOOD] Test command err: 2025-03-31T16:35:41.928587Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:35:41.929016Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:35:41.929177Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f07/r3tmp/tmpgWCnM9/pdisk_1.dat 2025-03-31T16:35:42.387920Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6684, node 1 2025-03-31T16:35:42.643649Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:42.643714Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:42.643753Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:42.644187Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:35:42.646450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:35:42.734201Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:42.734361Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:42.748884Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2130 2025-03-31T16:35:43.325875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:35:46.672138Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T16:35:46.711778Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:46.711896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:46.747560Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:35:46.750744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:46.997552Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:46.998184Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:46.998784Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:46.998967Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:46.999045Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:47.003563Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:47.003687Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:47.003775Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:47.003867Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:47.161509Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:47.161642Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:47.198798Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:47.399997Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:47.457010Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T16:35:47.457101Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T16:35:47.498642Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T16:35:47.498965Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T16:35:47.499232Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T16:35:47.499310Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T16:35:47.499373Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T16:35:47.499444Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T16:35:47.499522Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T16:35:47.499587Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T16:35:47.500101Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T16:35:47.531664Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:35:47.531785Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1872:2599], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:35:47.542202Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1881:2606] 2025-03-31T16:35:47.550258Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1913:2622] 2025-03-31T16:35:47.551030Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1913:2622], schemeshard id = 72075186224037897 2025-03-31T16:35:47.558347Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T16:35:47.579120Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T16:35:47.579183Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T16:35:47.579281Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T16:35:47.597811Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T16:35:47.663367Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T16:35:47.663516Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T16:35:47.864993Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T16:35:48.062966Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T16:35:48.195654Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:35:49.368458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2235:3069], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:49.368588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:49.387747Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-31T16:35:49.893481Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2536:3118], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:49.893676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:49.895007Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2541:3122]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:35:49.895264Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T16:35:49.895390Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2543:3124] 2025-03-31T16:35:49.895463Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2543:3124] 2025-03-31T16:35:49.896072Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2544:2991] 2025-03-31T16:35:49.896345Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2543:3124], server id = [2:2544:2991], tablet id = 72075186224037894, status = OK 2025-03-31T16:35:49.896593Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2544:2991], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-03-31T16:35:49.896668Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-03-31T16:35:49.896917Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:35:49.897004Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2541:3122], StatRequests.size() = 1 2025-03-31T16:35:49.918904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2548:3128], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:49.919025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:49.919459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2553:3133], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:49.926656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-03-31T16:35:50.092094Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-31T16:35:50.092175Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-31T16:35:50.204377Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2543:3124], schemeshard count = 1 2025-03-31T16:35:50.600076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2555:3135], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-03-31T16:35:50.788311Z node 1 :TX_PROXY ERROR: Actor# [1:2678:3210] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:50.799004Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2701:3226]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:35:50.799245Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T16:35:50.799293Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2701:3226], StatRequests.size() = 1 2025-03-31T16:35:50.852993Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpgybemd6rc4bf84kxpwed1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzllMTRmZmItMzNmNGZkNDQtNDQxNDUxYWEtNGVmNWQxMWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:35:50.930630Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:2750:3044]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-31T16:35:50.933343Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T16:35:50.933415Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-31T16:35:50.934043Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T16:35:50.934104Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-03-31T16:35:50.934157Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-31T16:35:50.980169Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-03-31T16:35:50.980508Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 >> KqpSinkLocks::DifferentKeyUpdate [GOOD] >> KqpSinkLocks::DifferentKeyUpdateOlap >> KqpSnapshotIsolation::TConflictWriteOltpNoSink [FAIL] >> KqpSnapshotIsolation::TReadOnlyOlap >> KqpSinkMvcc::OltpMultiSinksNoSinks [GOOD] >> KqpSinkMvcc::OltpMultiSinks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::TestAstWithCompression [GOOD] Test command err: Trying to start YDB, gRPC: 7568, MsgBus: 25893 2025-03-31T16:35:27.965807Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013177197467820:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:27.965958Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001385/r3tmp/tmpWL0XnU/pdisk_1.dat 2025-03-31T16:35:28.357868Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:28.412409Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:28.412489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 7568, node 1 2025-03-31T16:35:28.414019Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:28.463375Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:28.463398Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:28.463408Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:28.463532Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25893 TClient is connected to server localhost:25893 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:28.989647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:29.004038Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:35:29.013626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:29.129933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:29.277194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:29.355264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:31.121069Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013194377338795:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:31.121402Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:31.447759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:31.479672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:31.510287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:31.538937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:31.572851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:31.606882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:31.703836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013194377339308:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:31.703927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:31.704081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013194377339313:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:31.707993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:31.719546Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013194377339315:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:35:31.783426Z node 1 :TX_PROXY ERROR: Actor# [1:7488013194377339369:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:32.609456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:35:32.611403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:35:32.612911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:35:32.991253Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488013177197467820:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:32.991478Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:34.729367Z node 1 :RPC_REQUEST WARN: Client lost 2025-03-31T16:35:34.731728Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438934762, txId: 281474976710704] shutting down 2025-03-31T16:35:34.950853Z node 1 :RPC_REQUEST WARN: Client lost 2025-03-31T16:35:34.951151Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7488013207262242825:2830] TxId: 281474976710708. Ctx: { TraceId: 01jqpgxwps6srzs6wymcd08sfv, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODdlZjQ5OTItYjk1ZGNlZWQtMmRiYWQ5YzEtNmFmMWUwZjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-03-31T16:35:34.952569Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODdlZjQ5OTItYjk1ZGNlZWQtMmRiYWQ5YzEtNmFmMWUwZjg=, ActorId: [1:7488013207262242803:2830], ActorState: ExecuteState, TraceId: 01jqpgxwps6srzs6wymcd08sfv, Create QueryResponse for error on request, msg: 2025-03-31T16:35:34.953013Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438934993, txId: 281474976710707] shutting down 2025-03-31T16:35:34.953381Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488013207262242832:2835], TxId: 281474976710708, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ODdlZjQ5OTItYjk1ZGNlZWQtMmRiYWQ5YzEtNmFmMWUwZjg=. TraceId : 01jqpgxwps6srzs6wymcd08sfv. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7488013207262242825:2830], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-31T16:35:35.169538Z node 1 :RPC_REQUEST WARN: Client lost 2025-03-31T16:35:35.169753Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7488013211557210193:2846] TxId: 281474976710712. Ctx: { TraceId: 01jqpgxwxq90d367zgmvx6sawp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2JmYzI4OGItYWZlMjcwMzktYzgwNjdmMGMtZmIwMWI3MzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-03-31T16:35:35.170826Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2JmYzI4OGItYWZlMjcwMzktYzgwNjdmMGMtZmIwMWI3MzU=, ActorId: [1:7488013207262242870:2846], ActorState: ExecuteState, TraceId: 01jqpgxwxq90d367zgmvx6sawp, Create QueryResponse for error on request, msg: 2025-03-31T16:35:35.171315Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438935203, txId: 281474976710711] shutting down 2025-03-31T16:35:35.171432Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488013211557210199:2853], TxId: 281474976710712, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=N2JmYzI4OGItYWZlMjcwMzktYzgwNjdmMGMtZmIwMWI3MzU=. TraceId : 01jqpgxwxq90d367zgmvx6sawp. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7488013211557210193:2846], status: ABORTED, reason: {
: Error: Terminate execution } Trying to start YDB, gRPC: 17037, MsgBus: 9729 2025-03-31T16:35:36.095 ... schemeshard: 72057594046644480 2025-03-31T16:35:39.543955Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:39.602125Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013229836462259:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:39.602221Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:39.602387Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013229836462264:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:39.607186Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:39.620302Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488013229836462266:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:35:39.710059Z node 2 :TX_PROXY ERROR: Actor# [2:7488013229836462321:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:40.566828Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:35:40.568506Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:35:40.569948Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:35:41.317483Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488013216951558086:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:41.317598Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:43.294879Z node 2 :RPC_REQUEST WARN: Client lost 2025-03-31T16:35:43.295964Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438943330, txId: 281474976715712] shutting down Trying to start YDB, gRPC: 17557, MsgBus: 23719 2025-03-31T16:35:44.334548Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488013248191767897:2139];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:44.343365Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001385/r3tmp/tmpLv7qxB/pdisk_1.dat 2025-03-31T16:35:44.557804Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:44.561289Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:44.561382Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:44.562838Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17557, node 3 2025-03-31T16:35:44.613295Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:44.613323Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:44.613338Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:44.613474Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23719 TClient is connected to server localhost:23719 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:45.148513Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:45.167839Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:45.253313Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:45.486877Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:45.575273Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:48.152130Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013265371638766:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:48.152217Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:48.204650Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:48.269359Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:48.347011Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:48.389013Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:48.432687Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:48.478157Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:48.529019Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013265371639283:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:48.529130Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:48.529225Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013265371639288:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:48.533337Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:48.548153Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488013265371639290:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:35:48.604708Z node 3 :TX_PROXY ERROR: Actor# [3:7488013265371639344:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:49.328601Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488013248191767897:2139];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:49.328684Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:49.551978Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:35:49.554331Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:35:49.555720Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpKv::ReadRows_Decimal [GOOD] Test command err: Trying to start YDB, gRPC: 62546, MsgBus: 11190 2025-03-31T16:32:18.348935Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012362318942482:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:18.348989Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a6e/r3tmp/tmpD71EuY/pdisk_1.dat 2025-03-31T16:32:18.690040Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:32:18.732658Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:18.732781Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:18.734205Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62546, node 1 2025-03-31T16:32:18.785504Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:18.785549Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:18.785555Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:18.785670Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11190 TClient is connected to server localhost:11190 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:32:19.329390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:19.361850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:19.527506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:19.672655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:19.750940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:32:21.418747Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012375203846154:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:21.418863Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:21.719815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:32:21.790183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:32:21.830025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:32:21.859822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:32:21.895214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:32:21.935851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:32:22.018403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012379498813967:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:22.018493Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:22.018568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012379498813972:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:22.022282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:32:22.031851Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012379498813974:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:32:22.110409Z node 1 :TX_PROXY ERROR: Actor# [1:7488012379498814029:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:32:23.011232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:32:23.036738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:32:23.066198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:32:23.097489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-31T16:32:23.168781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-31T16:32:23.198557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T16:32:23.228075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-31T16:32:23.255014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-31T16:32:23.281396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T16:32:23.311528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-31T16:32:23.349205Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012362318942482:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:23.349278Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 4721, MsgBus: 23696 2025-03-31T16:32:25.367884Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012392573587724:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:32:25.367938Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a6e/r3tmp/tmp4coaUT/pdisk_1.dat 2025-03-31T16:32:25.451037Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4721, node 2 2025-03-31T16:32:25.497099Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:25.497200Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:25.498677Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:32:25.517332Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:25.517356Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:25.517370Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:25.517477Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23696 TClient is connected to server localhost:23696 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDesc ... ], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 12101, MsgBus: 25887 2025-03-31T16:35:26.816563Z node 18 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[18:7488013172248592476:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:26.816705Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a6e/r3tmp/tmp1YFzUr/pdisk_1.dat 2025-03-31T16:35:27.072154Z node 18 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:27.121478Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:27.121648Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:27.123878Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12101, node 18 2025-03-31T16:35:27.199104Z node 18 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:27.199144Z node 18 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:27.199159Z node 18 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:27.199369Z node 18 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25887 TClient is connected to server localhost:25887 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:28.510008Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:31.817059Z node 18 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[18:7488013172248592476:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:31.817192Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:33.786917Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 25254, MsgBus: 17105 2025-03-31T16:35:35.924639Z node 19 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[19:7488013209988140268:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:35.924878Z node 19 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a6e/r3tmp/tmpL6V8Xv/pdisk_1.dat 2025-03-31T16:35:36.124593Z node 19 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:36.171786Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:36.171944Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:36.175066Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25254, node 19 2025-03-31T16:35:36.267944Z node 19 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:36.267978Z node 19 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:36.267994Z node 19 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:36.268208Z node 19 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17105 TClient is connected to server localhost:17105 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:37.365370Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:40.925019Z node 19 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[19:7488013209988140268:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:40.925151Z node 19 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:42.933202Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 24528, MsgBus: 2668 2025-03-31T16:35:44.945184Z node 20 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[20:7488013247612942173:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:44.945286Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a6e/r3tmp/tmpdQi6a8/pdisk_1.dat 2025-03-31T16:35:45.144288Z node 20 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:45.191057Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:45.191245Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:45.192785Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24528, node 20 2025-03-31T16:35:45.279957Z node 20 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:45.279993Z node 20 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:45.280008Z node 20 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:45.280233Z node 20 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2668 TClient is connected to server localhost:2668 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:46.450592Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:46.460622Z node 20 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:35:49.946585Z node 20 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[20:7488013247612942173:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:49.946712Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:51.974729Z node 20 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7488013277677713921:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:51.975032Z node 20 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:51.996420Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:35:52.157939Z node 20 :RPC_REQUEST ERROR: TReadRowsRPC ReplyWithError: Type mismatch, got type Uint64 for column Key22, but expected Decimal(22,9) 2025-03-31T16:35:52.175598Z node 20 :RPC_REQUEST ERROR: TReadRowsRPC ReplyWithError: Type mismatch, got type Decimal(35,10) for column Key22, but expected Decimal(22,9) >> AnalyzeColumnshard::AnalyzeRebootSaBeforeReqDistribution |97.2%| [TA] $(B)/ydb/core/kqp/ut/opt/test-results/unittest/{meta.json ... results_accumulator.log} |97.2%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/opt/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpTx::RollbackByIdle >> CompressExecutor::TestExecutorMemUsage [GOOD] >> KqpSnapshotIsolation::TReadOnlyOltpNoSink [FAIL] >> KqpSinkMvcc::SnapshotExpiration >> TraverseDatashard::TraverseTwoTablesServerless [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTablesServerless [GOOD] Test command err: 2025-03-31T16:35:44.534000Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:35:44.534275Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:35:44.534370Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000eb5/r3tmp/tmpzjlpQh/pdisk_1.dat 2025-03-31T16:35:44.944307Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11552, node 1 2025-03-31T16:35:45.269610Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:45.269676Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:45.269709Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:45.270155Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:35:45.274258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:35:45.374803Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:45.374940Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:45.392707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6668 2025-03-31T16:35:45.985851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:35:49.336454Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T16:35:49.368272Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:49.368418Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:49.400954Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:35:49.402972Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:49.644684Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:49.645214Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:49.645769Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:49.645895Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:49.646124Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:49.646213Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:49.646284Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:49.646348Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:49.646417Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:49.820048Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:49.820174Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:49.835641Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:49.979746Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:50.030162Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T16:35:50.030274Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T16:35:50.063074Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T16:35:50.064447Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T16:35:50.064692Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T16:35:50.064763Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T16:35:50.064861Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T16:35:50.064917Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T16:35:50.065004Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T16:35:50.065068Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T16:35:50.065602Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T16:35:50.104127Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:35:50.104274Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1872:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:35:50.111578Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1883:2604] 2025-03-31T16:35:50.119913Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1915:2621] 2025-03-31T16:35:50.120036Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1915:2621], schemeshard id = 72075186224037897 2025-03-31T16:35:50.126031Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-03-31T16:35:50.154823Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T16:35:50.154890Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T16:35:50.155045Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-03-31T16:35:50.207357Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T16:35:50.214678Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T16:35:50.214799Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T16:35:50.424410Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T16:35:50.593835Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T16:35:50.693846Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:35:51.566490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-31T16:35:52.417898Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:52.623723Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-03-31T16:35:52.623802Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-03-31T16:35:52.623891Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2586:2943], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-03-31T16:35:52.624696Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2587:2944] 2025-03-31T16:35:52.624881Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2587:2944], schemeshard id = 72075186224037899 2025-03-31T16:35:53.940881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2715:3240], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:53.941073Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:53.959617Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-03-31T16:35:54.364083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3024:3290], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:54.364262Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:54.410227Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3029:3294]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:35:54.410584Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T16:35:54.410844Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-03-31T16:35:54.410940Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:3032:3297] 2025-03-31T16:35:54.411004Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:3032:3297] 2025-03-31T16:35:54.411669Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:3033:3184] 2025-03-31T16:35:54.411944Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:3032:3297], server id = [2:3033:3184], tablet id = 72075186224037894, status = OK 2025-03-31T16:35:54.412140Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:3033:3184], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-03-31T16:35:54.412189Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-03-31T16:35:54.412380Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:35:54.412438Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:3029:3294], StatRequests.size() = 1 2025-03-31T16:35:54.429904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3037:3301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:54.430057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:54.430405Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3042:3306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:54.437635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-03-31T16:35:54.599524Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-31T16:35:54.599621Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-31T16:35:54.666686Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:3032:3297], schemeshard count = 1 2025-03-31T16:35:55.042627Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3044:3308], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-03-31T16:35:55.273517Z node 1 :TX_PROXY ERROR: Actor# [1:3169:3379] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:55.284577Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3192:3395]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:35:55.284695Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T16:35:55.284730Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:3192:3395], StatRequests.size() = 1 2025-03-31T16:35:55.333833Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqpgyfvt51b1vctp0w6acj7b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yzg2YzI0ZDQtMjgwNjkzZmQtYjU5NjE5ODUtYjJhMGRkYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:35:55.444980Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72075186224037899 2025-03-31T16:35:55.846509Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:3507:3455]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:35:55.846742Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-31T16:35:55.846789Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [1:3507:3455], StatRequests.size() = 1 2025-03-31T16:35:55.873395Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [1:3516:3464]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:35:55.873623Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2025-03-31T16:35:55.873673Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 4, ReplyToActorId = [1:3516:3464], StatRequests.size() = 1 2025-03-31T16:35:55.918227Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqpgyh9qdj4qz7dq8g3r1t1f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTY1MzE0YjItMzgxYWJhNGItYzVkNzk5NmUtZWQyZjk5Y2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:35:56.003917Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:3561:3431]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-31T16:35:56.007907Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T16:35:56.007981Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-31T16:35:56.008391Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T16:35:56.008468Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-03-31T16:35:56.008544Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-31T16:35:56.029082Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-03-31T16:35:56.029377Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 2025-03-31T16:35:56.029775Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:3586:3444]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-31T16:35:56.032746Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T16:35:56.032827Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-31T16:35:56.033202Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T16:35:56.033237Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-03-31T16:35:56.033283Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 3] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-31T16:35:56.036698Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2025-03-31T16:35:56.036995Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> CompressExecutor::TestExecutorMemUsage [GOOD] Test command err: 2025-03-31T16:33:44.394946Z :WriteAndReadSomeMessagesWithAsyncCompression INFO: Random seed for debugging is 1743438824394916 2025-03-31T16:33:44.692185Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012734526515849:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:44.692268Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:33:44.724525Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012732793546856:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:44.724762Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:33:44.857682Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0011d5/r3tmp/tmpR1Wn1b/pdisk_1.dat 2025-03-31T16:33:44.879124Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T16:33:45.082145Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:45.082236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:45.084388Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:33:45.085352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:45.089908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:45.089984Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:45.120438Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:45.121033Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21525, node 1 2025-03-31T16:33:45.131653Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:33:45.132095Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:33:45.259023Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/0011d5/r3tmp/yandexsJcJWw.tmp 2025-03-31T16:33:45.259055Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/0011d5/r3tmp/yandexsJcJWw.tmp 2025-03-31T16:33:45.259862Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/0011d5/r3tmp/yandexsJcJWw.tmp 2025-03-31T16:33:45.260024Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:33:45.456645Z INFO: TTestServer started on Port 5953 GrpcPort 21525 TClient is connected to server localhost:5953 PQClient connected to localhost:21525 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:45.711354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-03-31T16:33:47.862108Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012747411418750:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:47.862107Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012745678449065:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:47.862128Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012747411418755:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:47.862109Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012745678449055:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:47.862238Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:47.862236Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:47.866584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-03-31T16:33:47.869409Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012747411418801:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:47.869482Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:47.869768Z node 2 :TX_PROXY ERROR: Actor# [2:7488012745678449076:2124] txid# 281474976715657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T16:33:47.882415Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012745678449075:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-31T16:33:47.882414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012747411418765:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-31T16:33:47.939541Z node 2 :TX_PROXY ERROR: Actor# [2:7488012745678449103:2130] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:47.959670Z node 1 :TX_PROXY ERROR: Actor# [1:7488012747411418859:2694] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:48.257429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:48.258123Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488012747411418878:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:33:48.258380Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NmY5YTAzNWEtNWEwYmU2OWQtMTNlMTI2OWEtZDEzM2ZiMQ==, ActorId: [1:7488012747411418733:2336], ActorState: ExecuteState, TraceId: 01jqpgtm8p4cpkj0s76hrv5y76, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:33:48.259346Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488012745678449118:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:33:48.259551Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjgxMjIxY2QtNjk3OTZkOS1jODgwYTdlNS05OThiZWNiNA==, ActorId: [2:7488012745678449044:2308], ActorState: ExecuteState, TraceId: 01jqpgtm9r2rqa5q7vt150ty0x, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:33:48.260781Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:33:48.260786Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:33:48.387150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself ... dn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jqpgyexe62hb4zwwwh4w1rwa" } } YdbStatus: UNAVAILABLE ConsumedRu: 332 } 2025-03-31T16:35:54.005638Z :INFO: [/Root] MessageGroupId [test-message-group-id] Running cds request ms 2025-03-31T16:35:54.017295Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|c38593f0-afc72b92-36addb37-9a061c1_0] Got CDS response: write_sessions_clusters { clusters { endpoint: "localhost:27744" name: "dc1" available: true } primary_cluster_selection_reason: CLIENT_LOCATION } version: 1 2025-03-31T16:35:54.017397Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|c38593f0-afc72b92-36addb37-9a061c1_0] Start write session. Will connect to endpoint: localhost:27744 2025-03-31T16:35:54.026977Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|c38593f0-afc72b92-36addb37-9a061c1_0] Write session: send init request: init_request { topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" } 2025-03-31T16:35:54.026959Z node 15 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-03-31T16:35:54.027004Z node 15 :PQ_WRITE_PROXY DEBUG: new session created cookie 3 2025-03-31T16:35:54.027814Z node 15 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" } 2025-03-31T16:35:54.028015Z node 15 :PQ_WRITE_PROXY INFO: session request cookie: 3 topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" from ipv6:[::1]:53218 2025-03-31T16:35:54.028039Z node 15 :PQ_WRITE_PROXY INFO: write session: cookie=3 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:53218 proto=v1 topic=test-topic durationSec=0 2025-03-31T16:35:54.028055Z node 15 :PQ_WRITE_PROXY INFO: init check schema 2025-03-31T16:35:54.030428Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: describe result for acl check 2025-03-31T16:35:54.030657Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-03-31T16:35:54.030675Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-03-31T16:35:54.030694Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-03-31T16:35:54.030720Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7488013291991951962:2575] (SourceId=test-message-group-id, PreferedPartition=(NULL)) StartKqpSession 2025-03-31T16:35:54.035644Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7488013291991951962:2575] (SourceId=test-message-group-id, PreferedPartition=(NULL)) Select from the table 2025-03-31T16:35:54.218428Z node 15 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715696. Failed to resolve tablet: 72075186224037891 after several retries. 2025-03-31T16:35:54.218548Z node 15 :KQP_EXECUTER WARN: ActorId: [15:7488013291991951970:2577] TxId: 281474976715696. Ctx: { TraceId: 01jqpgyfhm9t2dt3ww8yzj1g5p, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=NjdjY2FhOGEtNzc3MzdjNGQtMjdhNzU1ZWItNTI2YjczOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037891 after several retries. 2025-03-31T16:35:54.218767Z node 15 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=15&id=NjdjY2FhOGEtNzc3MzdjNGQtMjdhNzU1ZWItNTI2YjczOGQ=, ActorId: [15:7488013291991951963:2577], ActorState: ExecuteState, TraceId: 01jqpgyfhm9t2dt3ww8yzj1g5p, Create QueryResponse for error on request, msg: 2025-03-31T16:35:54.220600Z node 15 :PQ_PARTITION_CHOOSER INFO: TPartitionChooser [15:7488013291991951962:2575] (SourceId=test-message-group-id, PreferedPartition=(NULL)) ReplyError: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=15&id=NjdjY2FhOGEtNzc3MzdjNGQtMjdhNzU1ZWItNTI2YjczOGQ=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jqpgyfhm9t2dt3ww90sm6tkn" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 2025-03-31T16:35:54.220768Z node 15 :PQ_WRITE_PROXY INFO: session v1 error cookie: 3 reason: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=15&id=NjdjY2FhOGEtNzc3MzdjNGQtMjdhNzU1ZWItNTI2YjczOGQ=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jqpgyfhm9t2dt3ww90sm6tkn" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 sessionId: 2025-03-31T16:35:54.221214Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: is DEAD Test retry state: get retry delay 2025-03-31T16:35:54.223393Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|c38593f0-afc72b92-36addb37-9a061c1_0] Got error. Status: UNAVAILABLE, Description:
: Error: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=15&id=NjdjY2FhOGEtNzc3MzdjNGQtMjdhNzU1ZWItNTI2YjczOGQ=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jqpgyfhm9t2dt3ww90sm6tkn" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 , code: 500001 2025-03-31T16:35:54.223444Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|c38593f0-afc72b92-36addb37-9a061c1_0] Write session will restart in 2.000000s 2025-03-31T16:35:54.223577Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|c38593f0-afc72b92-36addb37-9a061c1_0] Write session: Do CDS request 2025-03-31T16:35:54.223624Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|c38593f0-afc72b92-36addb37-9a061c1_0] Do schedule cds request after 2000 ms 2025-03-31T16:35:54.435296Z node 16 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976720682. Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-31T16:35:54.435436Z node 16 :KQP_EXECUTER WARN: ActorId: [16:7488013290234742261:2495] TxId: 281474976720682. Ctx: { TraceId: 01jqpgyfrn7wtbyf2p4mdf0shj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=ZmJkZmE1NDgtZDM4MzAxNWUtNDUwNWY2ZGUtNjFkY2FhNWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-31T16:35:54.435651Z node 16 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=16&id=ZmJkZmE1NDgtZDM4MzAxNWUtNDUwNWY2ZGUtNjFkY2FhNWI=, ActorId: [16:7488013290234742258:2495], ActorState: ExecuteState, TraceId: 01jqpgyfrn7wtbyf2p4mdf0shj, Create QueryResponse for error on request, msg: 2025-03-31T16:35:54.436768Z node 16 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jqpgyfrn7wtbyf2p4p8kjyje" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } 2025-03-31T16:35:54.635708Z node 15 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715698. Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-31T16:35:54.635803Z node 15 :KQP_EXECUTER WARN: ActorId: [15:7488013291991952019:2579] TxId: 281474976715698. Ctx: { TraceId: 01jqpgyfy46gxc47yv4ddb2mwf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=Zjk3OTBhZi0zZjVlMDU1Ni0xMzI3OWQ4Ny00MzFmZWI1MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-31T16:35:54.635964Z node 15 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=15&id=Zjk3OTBhZi0zZjVlMDU1Ni0xMzI3OWQ4Ny00MzFmZWI1MQ==, ActorId: [15:7488013291991952016:2579], ActorState: ExecuteState, TraceId: 01jqpgyfy46gxc47yv4ddb2mwf, Create QueryResponse for error on request, msg: 2025-03-31T16:35:54.637096Z node 15 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jqpgyfy55cdpjb4xt011zp2j" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } 2025-03-31T16:35:55.014979Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|c38593f0-afc72b92-36addb37-9a061c1_0] Write session: close. Timeout = 0 ms 2025-03-31T16:35:55.015066Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|c38593f0-afc72b92-36addb37-9a061c1_0] Write session will now close 2025-03-31T16:35:55.015155Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|c38593f0-afc72b92-36addb37-9a061c1_0] Write session: aborting 2025-03-31T16:35:55.016033Z :WARNING: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|c38593f0-afc72b92-36addb37-9a061c1_0] Write session: could not confirm all writes in time or session aborted, perform hard shutdown 2025-03-31T16:35:55.016092Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|c38593f0-afc72b92-36addb37-9a061c1_0] Write session: destroy 2025-03-31T16:35:55.192981Z node 15 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715700. Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-31T16:35:55.193132Z node 15 :KQP_EXECUTER WARN: ActorId: [15:7488013296286919390:2580] TxId: 281474976715700. Ctx: { TraceId: 01jqpgyfzb7vwged3m8j1ew5z3, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=MWY5NmI2MzUtOGUzNzM2NjAtMTUyNTZlYzQtZDAwNTRlYzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-31T16:35:55.193405Z node 15 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=15&id=MWY5NmI2MzUtOGUzNzM2NjAtMTUyNTZlYzQtZDAwNTRlYzQ=, ActorId: [15:7488013291991952035:2580], ActorState: ExecuteState, TraceId: 01jqpgyfzb7vwged3m8j1ew5z3, Create QueryResponse for error on request, msg: 2025-03-31T16:35:55.194826Z node 15 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jqpgygg30kwex36f70wst28k" } } YdbStatus: UNAVAILABLE ConsumedRu: 351 } >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1-withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2+withSink |97.2%| [TA] $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.2%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSinkMvcc::OlapNamedStatement [GOOD] >> KqpSinkMvcc::OlapMultiSinks >> KqpLocksTricky::TestNoLocksIssue-withSink [GOOD] >> KqpLocksTricky::TestNoLocksIssueInteractiveTx+withSink >> KqpSinkTx::LocksAbortOnCommit [GOOD] >> KqpSinkTx::InvalidateOnError >> KqpQueryServiceScripts::TestTruncatedBySize [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TReadOnlyOltpNoSink [FAIL] Test command err: Trying to start YDB, gRPC: 13405, MsgBus: 9281 2025-03-31T16:35:38.471447Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013225037928382:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:38.471582Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012cc/r3tmp/tmpPszNCK/pdisk_1.dat 2025-03-31T16:35:38.890495Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:38.922376Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:38.922493Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 13405, node 1 2025-03-31T16:35:38.927830Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:38.995419Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:38.995445Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:38.995453Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:38.995572Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9281 TClient is connected to server localhost:9281 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:39.551211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:41.580288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013237922830937:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:41.580490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:41.581131Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013237922830949:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:41.585596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:35:41.596346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013237922830951:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:35:41.658200Z node 1 :TX_PROXY ERROR: Actor# [1:7488013237922831002:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:41.924027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:35:42.049777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:35:43.137020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:43.838002Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488013225037928382:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:43.877591Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:44.565481Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzY2MTk1NzUtOGY0NDdmZDctODY1MjVhZjktN2E4MGE1MQ==, ActorId: [1:7488013250807741297:2969], ActorState: ExecuteState, TraceId: 01jqpgy6316fsrcae4dnhfkw0m, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TReadOnly::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185: DoExecute @ 0x18DD66A3 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:214: Execute_ @ 0x18DB8EF2 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7F2FAAFCED8F 18. ??:0: ?? @ 0x7F2FAAFCEE3F 19. ??:0: ?? @ 0x163B4028 Trying to start YDB, gRPC: 25321, MsgBus: 7599 2025-03-31T16:35:49.942919Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013269240738773:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:49.957444Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012cc/r3tmp/tmp9TO709/pdisk_1.dat 2025-03-31T16:35:50.118140Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:50.132706Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:50.132790Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:50.134397Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25321, node 2 2025-03-31T16:35:50.203598Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:50.203620Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:50.203629Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:50.203744Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7599 TClient is connected to server localhost:7599 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:35:50.696564Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:35:53.248210Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013286420608526:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:53.248372Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:53.248968Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013286420608562:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:53.252663Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:35:53.264659Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488013286420608564:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:35:53.320295Z node 2 :TX_PROXY ERROR: Actor# [2:7488013286420608615:2335] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:53.361874Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:35:53.397676Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:35:54.458170Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:55.324796Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488013269240738773:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:55.393565Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:55.999310Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OWYxOWMyZDEtNWNiYjgwNmYtNzkxZjcwNTktZDQ1OGIwYTU=, ActorId: [2:7488013295010551559:2968], ActorState: ExecuteState, TraceId: 01jqpgyha2f6yc94jm1q5b2a8q, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TReadOnly::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185: DoExecute @ 0x18DD66A3 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:221: Execute_ @ 0x18DB911A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7F2FAAFCED8F 18. ??:0: ?? @ 0x7F2FAAFCEE3F 19. ??:0: ?? @ 0x163B4028 |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableAggrStatNonLocalTablet >> KqpQueryService::DdlSecret [GOOD] >> KqpQueryService::DdlMixedDml ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::TestTruncatedBySize [GOOD] Test command err: Trying to start YDB, gRPC: 1687, MsgBus: 20989 2025-03-31T16:35:12.327661Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013111841717306:2133];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:12.327819Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001434/r3tmp/tmpJgeKYX/pdisk_1.dat 2025-03-31T16:35:12.651555Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1687, node 1 2025-03-31T16:35:12.718621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:12.718961Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:12.722242Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:12.757311Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:12.757348Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:12.757357Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:12.757537Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20989 TClient is connected to server localhost:20989 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:13.304246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:13.345872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:13.501935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:13.672141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:13.754991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:15.529199Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013124726620893:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:15.529297Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:15.930009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:15.973586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:16.011326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:16.038725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:16.067820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:16.104705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:16.148166Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013129021588697:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:16.148251Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:16.148439Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013129021588702:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:16.151297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:16.159904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013129021588704:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:35:16.234065Z node 1 :TX_PROXY ERROR: Actor# [1:7488013129021588757:3437] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 6448, MsgBus: 10190 2025-03-31T16:35:18.015568Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013137428368270:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:18.015642Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001434/r3tmp/tmppnK2Tj/pdisk_1.dat 2025-03-31T16:35:18.125206Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:18.150932Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:18.151011Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 6448, node 2 2025-03-31T16:35:18.154570Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:18.199456Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:18.199478Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:18.199485Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:18.199598Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10190 TClient is connected to server localhost:10190 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:18.650456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:18.670304Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:18.681169Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:35:18.761214Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:18.927397Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:18.995229Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:21.180941Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013150313271915:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T1 ... >,<|idx:1773|>,<|idx:1774|>,<|idx:1775|>,<|idx:1776|>,<|idx:1777|>,<|idx:1778|>,<|idx:1779|>,<|idx:1780|>,<|idx:1781|>,<|idx:1782|>,<|idx:1783|>,<|idx:1784|>,<|idx:1785|>,<|idx:1786|>,<|idx:1787|>,<|idx:1788|>,<|idx:1789|>,<|idx:1790|>,<|idx:1791|>,<|idx:1792|>,<|idx:1793|>,<|idx:1794|>,<|idx:1795|>,<|idx:1796|>,<|idx:1797|>,<|idx:1798|>,<|idx:1799|>,<|idx:1800|>,<|idx:1801|>,<|idx:1802|>,<|idx:1803|>,<|idx:1804|>,<|idx:1805|>,<|idx:1806|>,<|idx:1807|>,<|idx:1808|>,<|idx:1809|>,<|idx:1810|>,<|idx:1811|>,<|idx:1812|>,<|idx:1813|>,<|idx:1814|>,<|idx:1815|>,<|idx:1816|>,<|idx:1817|>,<|idx:1818|>,<|idx:1819|>,<|idx:1820|>,<|idx:1821|>,<|idx:1822|>,<|idx:1823|>,<|idx:1824|>,<|idx:1825|>,<|idx:1826|>,<|idx:1827|>,<|idx:1828|>,<|idx:1829|>,<|idx:1830|>,<|idx:1831|>,<|idx:1832|>,<|idx:1833|>,<|idx:1834|>,<|idx:1835|>,<|idx:1836|>,<|idx:1837|>,<|idx:1838|>,<|idx:1839|>,<|idx:1840|>,<|idx:1841|>,<|idx:1842|>,<|idx:1843|>,<|idx:1844|>,<|idx:1845|>,<|idx:1846|>,<|idx:1847|>,<|idx:1848|>,<|idx:1849|>,<|idx:1850|>,<|idx:1851|>,<|idx:1852|>,<|idx:1853|>,<|idx:1854|>,<|idx:1855|>,<|idx:1856|>,<|idx:1857|>,<|idx:1858|>,<|idx:1859|>,<|idx:1860|>,<|idx:1861|>,<|idx:1862|>,<|idx:1863|>,<|idx:1864|>,<|idx:1865|>,<|idx:1866|>,<|idx:1867|>,<|idx:1868|>,<|idx:1869|>,<|idx:1870|>,<|idx:1871|>,<|idx:1872|>,<|idx:1873|>,<|idx:1874|>,<|idx:1875|>,<|idx:1876|>,<|idx:1877|>,<|idx:1878|>,<|idx:1879|>,<|idx:1880|>,<|idx:1881|>,<|idx:1882|>,<|idx:1883|>,<|idx:1884|>,<|idx:1885|>,<|idx:1886|>,<|idx:1887|>,<|idx:1888|>,<|idx:1889|>,<|idx:1890|>,<|idx:1891|>,<|idx:1892|>,<|idx:1893|>,<|idx:1894|>,<|idx:1895|>,<|idx:1896|>,<|idx:1897|>,<|idx:1898|>,<|idx:1899|>,<|idx:1900|>,<|idx:1901|>,<|idx:1902|>,<|idx:1903|>,<|idx:1904|>,<|idx:1905|>,<|idx:1906|>,<|idx:1907|>,<|idx:1908|>,<|idx:1909|>,<|idx:1910|>,<|idx:1911|>,<|idx:1912|>,<|idx:1913|>,<|idx:1914|>,<|idx:1915|>,<|idx:1916|>,<|idx:1917|>,<|idx:1918|>,<|idx:1919|>,<|idx:1920|>,<|idx:1921|>,<|idx:1922|>,<|idx:1923|>,<|idx:1924|>,<|idx:1925|>,<|idx:1926|>,<|idx:1927|>,<|idx:1928|>,<|idx:1929|>,<|idx:1930|>,<|idx:1931|>,<|idx:1932|>,<|idx:1933|>,<|idx:1934|>,<|idx:1935|>,<|idx:1936|>,<|idx:1937|>,<|idx:1938|>,<|idx:1939|>,<|idx:1940|>,<|idx:1941|>,<|idx:1942|>,<|idx:1943|>,<|idx:1944|>,<|idx:1945|>,<|idx:1946|>,<|idx:1947|>,<|idx:1948|>,<|idx:1949|>,<|idx:1950|>,<|idx:1951|>,<|idx:1952|>,<|idx:1953|>,<|idx:1954|>,<|idx:1955|>,<|idx:1956|>,<|idx:1957|>,<|idx:1958|>,<|idx:1959|>,<|idx:1960|>,<|idx:1961|>,<|idx:1962|>,<|idx:1963|>,<|idx:1964|>,<|idx:1965|>,<|idx:1966|>,<|idx:1967|>,<|idx:1968|>,<|idx:1969|>,<|idx:1970|>,<|idx:1971|>,<|idx:1972|>,<|idx:1973|>,<|idx:1974|>,<|idx:1975|>,<|idx:1976|>,<|idx:1977|>,<|idx:1978|>,<|idx:1979|>,<|idx:1980|>,<|idx:1981|>,<|idx:1982|>,<|idx:1983|>,<|idx:1984|>,<|idx:1985|>,<|idx:1986|>,<|idx:1987|>,<|idx:1988|>,<|idx:1989|>,<|idx:1990|>,<|idx:1991|>,<|idx:1992|>,<|idx:1993|>,<|idx:1994|>,<|idx:1995|>,<|idx:1996|>,<|idx:1997|>,<|idx:1998|>,<|idx:1999|>]);", parameters: 0b 2025-03-31T16:35:47.056753Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438947026, txId: 281474976716266] shutting down Trying to start YDB, gRPC: 15718, MsgBus: 17621 2025-03-31T16:35:48.186726Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488013264262509450:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:48.186906Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001434/r3tmp/tmpTatRFy/pdisk_1.dat 2025-03-31T16:35:48.339333Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:48.373367Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:48.373478Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:48.375903Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15718, node 3 2025-03-31T16:35:48.443124Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:48.443153Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:48.443162Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:48.443361Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17621 TClient is connected to server localhost:17621 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:49.038927Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:49.057092Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:49.152803Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:49.367807Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:49.451302Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:52.398714Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013281442380425:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:52.398796Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:52.454057Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:52.494471Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:52.528514Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:52.560934Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:52.596456Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:52.634741Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:52.685867Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013281442380934:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:52.685953Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:52.685962Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013281442380939:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:52.689892Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:52.700400Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488013281442380941:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:35:52.783837Z node 3 :TX_PROXY ERROR: Actor# [3:7488013281442380995:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:53.186325Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488013264262509450:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:53.186431Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:53.957808Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:35:53.967135Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:35:53.969079Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:35:58.950560Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743438958982, txId: 281474976715754] shutting down >> KqpSinkTx::OlapLocksAbortOnCommit [GOOD] >> KqpSinkTx::OlapSnapshotRO >> KqpTx::RollbackByIdle [GOOD] >> KqpTx::RollbackInvalidated >> TTxDataShardMiniKQL::TableStatsHistograms [GOOD] >> KqpSnapshotRead::TestSnapshotExpiration-withSink >> KqpSnapshotIsolation::TSimpleOltpNoSink >> KqpQueryServiceScripts::ExecuteScriptWithCancelAfterAndTimeout [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::TableStatsHistograms [GOOD] Test command err: 2025-03-31T16:35:33.197850Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:35:33.200309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:35:33.200368Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:33.208254Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:35:33.210587Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-03-31T16:35:33.212219Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:35:33.227730Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:35:33.277175Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:35:33.277505Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:35:33.280300Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-31T16:35:33.280381Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-31T16:35:33.280436Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-31T16:35:33.282010Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:35:33.282732Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:35:33.282849Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:197:2154] in generation 2 2025-03-31T16:35:33.379425Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:35:33.413779Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-31T16:35:33.415224Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:35:33.415394Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-31T16:35:33.415444Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-31T16:35:33.415482Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-31T16:35:33.415514Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:35:33.415712Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:35:33.415752Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:35:33.416963Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-31T16:35:33.417104Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-31T16:35:33.417171Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:35:33.417210Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:35:33.417246Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-31T16:35:33.417296Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T16:35:33.417326Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T16:35:33.417353Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-31T16:35:33.417386Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:35:33.417482Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:35:33.417540Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:35:33.419520Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-31T16:35:33.423711Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-31T16:35:33.423781Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:35:33.423893Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T16:35:33.424022Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-31T16:35:33.424071Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-31T16:35:33.424128Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-31T16:35:33.424179Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:35:33.424228Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-31T16:35:33.424284Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-31T16:35:33.424320Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:35:33.424589Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-31T16:35:33.424652Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-31T16:35:33.424693Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-31T16:35:33.424728Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:35:33.424769Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-31T16:35:33.424807Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-31T16:35:33.424883Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-31T16:35:33.424927Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-31T16:35:33.424957Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-31T16:35:33.438015Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-31T16:35:33.438071Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:35:33.438100Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:35:33.438150Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-31T16:35:33.438215Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-31T16:35:33.441071Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:35:33.441187Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:35:33.441241Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-31T16:35:33.441379Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-31T16:35:33.441413Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:35:33.441580Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-31T16:35:33.441636Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:35:33.441677Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-31T16:35:33.441736Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-31T16:35:33.445285Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-31T16:35:33.445367Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:35:33.445618Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:35:33.445663Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:35:33.445722Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:35:33.445767Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:35:33.445808Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T16:35:33.445859Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-31T16:35:33.445914Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-31T16:35:33.445987Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:35:33.446034Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-31T16:35:33.446095Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-31T16:35:33.446139Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-31T16:35:33.446306Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-31T16:35:33.446355Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:35:33.446409Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-31T16:35:33.446437Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-31T16:35:33.446462Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-31T16:35:33.446515Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:35:33.446543Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-31T16:35:33.446569Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-31T16:35:33.446603Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-31T16:35:33.446654Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-31T16:35:33.446695Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-31T16:35:33.446732Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-31T16:35:33.446771Z node 1 :TX_DATA ... 4MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\002\203\004\205\002\203\001H\01056$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000\013?\022\003?\020\231\017\001\005?\026\003?\024\322ImInShard111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111\001\007\002\000\003\005?\010?\014\006\002?\006?$\000\003?\014?\014\037/ \0018\000" TxId: 1002 ExecLevel: 0 Flags: 0 2025-03-31T16:36:01.286967Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:36:01.287046Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T16:36:01.287600Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit CheckDataTx 2025-03-31T16:36:01.287656Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is Executed 2025-03-31T16:36:01.287685Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit CheckDataTx 2025-03-31T16:36:01.287714Z node 3 :TX_DATASHARD TRACE: Add [0:1002] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-31T16:36:01.287741Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit BuildAndWaitDependencies 2025-03-31T16:36:01.287776Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-03-31T16:36:01.287827Z node 3 :TX_DATASHARD TRACE: Activated operation [0:1002] at 9437184 2025-03-31T16:36:01.287857Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is Executed 2025-03-31T16:36:01.287879Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit BuildAndWaitDependencies 2025-03-31T16:36:01.287900Z node 3 :TX_DATASHARD TRACE: Add [0:1002] at 9437184 to execution unit ExecuteDataTx 2025-03-31T16:36:01.287923Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit ExecuteDataTx 2025-03-31T16:36:01.287958Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-03-31T16:36:01.288264Z node 3 :TX_DATASHARD TRACE: Executed operation [0:1002] at tablet 9437184 with status COMPLETE 2025-03-31T16:36:01.288315Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:1002] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 109, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-31T16:36:01.288365Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:36:01.288390Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit ExecuteDataTx 2025-03-31T16:36:01.288418Z node 3 :TX_DATASHARD TRACE: Add [0:1002] at 9437184 to execution unit FinishPropose 2025-03-31T16:36:01.288444Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit FinishPropose 2025-03-31T16:36:01.288480Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is DelayComplete 2025-03-31T16:36:01.288502Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit FinishPropose 2025-03-31T16:36:01.288526Z node 3 :TX_DATASHARD TRACE: Add [0:1002] at 9437184 to execution unit CompletedOperations 2025-03-31T16:36:01.288555Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit CompletedOperations 2025-03-31T16:36:01.288597Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is Executed 2025-03-31T16:36:01.288620Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit CompletedOperations 2025-03-31T16:36:01.288644Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:1002] at 9437184 has finished 2025-03-31T16:36:01.305014Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-31T16:36:01.305084Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:1002] at 9437184 on unit FinishPropose 2025-03-31T16:36:01.305121Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 1002 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2025-03-31T16:36:01.305196Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 .2025-03-31T16:36:01.310638Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:99:2134], Recipient [3:235:2228]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 99 RawX2: 12884904022 } 2025-03-31T16:36:01.310698Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-03-31T16:36:01.311821Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:4546:6465], Recipient [3:235:2228]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:36:01.311868Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:36:01.311907Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:4545:6464], serverId# [3:4546:6465], sessionId# [0:0:0] 2025-03-31T16:36:01.312227Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:99:2134], Recipient [3:235:2228]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 99 RawX2: 12884904022 } TxBody: "\032\265\002\037\000\005\205\n\205\000\205\004?\000\205\002\202\0041\034MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\002\203\004\205\002\203\001H\01056$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000\013?\022\003?\020\235\017\001\005?\026\003?\024\322ImInShard111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111\001\007\002\000\003\005?\010?\014\006\002?\006?$\000\003?\014?\014\037/ \0018\000" TxId: 1003 ExecLevel: 0 Flags: 0 2025-03-31T16:36:01.312267Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:36:01.312347Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T16:36:01.312904Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit CheckDataTx 2025-03-31T16:36:01.312962Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is Executed 2025-03-31T16:36:01.312994Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit CheckDataTx 2025-03-31T16:36:01.313026Z node 3 :TX_DATASHARD TRACE: Add [0:1003] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-31T16:36:01.313052Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit BuildAndWaitDependencies 2025-03-31T16:36:01.313106Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-03-31T16:36:01.313163Z node 3 :TX_DATASHARD TRACE: Activated operation [0:1003] at 9437184 2025-03-31T16:36:01.313194Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is Executed 2025-03-31T16:36:01.313217Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit BuildAndWaitDependencies 2025-03-31T16:36:01.313239Z node 3 :TX_DATASHARD TRACE: Add [0:1003] at 9437184 to execution unit ExecuteDataTx 2025-03-31T16:36:01.313262Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit ExecuteDataTx 2025-03-31T16:36:01.313298Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-03-31T16:36:01.313607Z node 3 :TX_DATASHARD TRACE: Executed operation [0:1003] at tablet 9437184 with status COMPLETE 2025-03-31T16:36:01.313654Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:1003] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 109, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-31T16:36:01.313702Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:36:01.313724Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit ExecuteDataTx 2025-03-31T16:36:01.313749Z node 3 :TX_DATASHARD TRACE: Add [0:1003] at 9437184 to execution unit FinishPropose 2025-03-31T16:36:01.313775Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit FinishPropose 2025-03-31T16:36:01.313810Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is DelayComplete 2025-03-31T16:36:01.313833Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit FinishPropose 2025-03-31T16:36:01.313859Z node 3 :TX_DATASHARD TRACE: Add [0:1003] at 9437184 to execution unit CompletedOperations 2025-03-31T16:36:01.313883Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit CompletedOperations 2025-03-31T16:36:01.313924Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is Executed 2025-03-31T16:36:01.313945Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit CompletedOperations 2025-03-31T16:36:01.313969Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:1003] at 9437184 has finished 2025-03-31T16:36:01.351189Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 9437184, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-31T16:36:01.351249Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 9437184, table# 1001, finished edge# 0, front# 0 2025-03-31T16:36:01.352624Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-31T16:36:01.352678Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:1003] at 9437184 on unit FinishPropose 2025-03-31T16:36:01.352716Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 1003 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 3 ms, status: COMPLETE 2025-03-31T16:36:01.352808Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:36:01.356598Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [3:232:2227], Recipient [3:235:2228]: NKikimr::TEvTablet::TEvFollowerGcApplied .2025-03-31T16:36:01.360038Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:4560:6478], Recipient [3:235:2228]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:36:01.360113Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:36:01.360166Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:4559:6477], serverId# [3:4560:6478], sessionId# [0:0:0] 2025-03-31T16:36:01.360499Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553160, Sender [3:4558:6476], Recipient [3:235:2228]: NKikimrTxDataShard.TEvGetTableStats TableId: 13 { InMemSize: 0 LastAccessTime: 1719 LastUpdateTime: 1719 } >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite3 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::ExecuteScriptWithCancelAfterAndTimeout [GOOD] Test command err: Trying to start YDB, gRPC: 30035, MsgBus: 25552 2025-03-31T16:35:23.975982Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013159112895675:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:23.976675Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0013a9/r3tmp/tmpIARy57/pdisk_1.dat 2025-03-31T16:35:24.352001Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30035, node 1 2025-03-31T16:35:24.403816Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:24.403942Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:24.405174Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:24.420635Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:24.420665Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:24.420680Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:24.420814Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25552 TClient is connected to server localhost:25552 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:24.876206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:24.888459Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:35:24.896213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:25.005404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:25.155005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:25.233911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:26.801210Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013171997799328:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:26.801309Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:27.204955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:27.245214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:27.316865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:27.348702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:27.381331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:27.418758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:27.502403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013176292767144:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:27.502483Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:27.502620Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013176292767149:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:27.505854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:27.516395Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013176292767151:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:35:27.601472Z node 1 :TX_PROXY ERROR: Actor# [1:7488013176292767207:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:28.426515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:35:28.428530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T16:35:28.430017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-31T16:35:29.033209Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488013159112895675:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:29.033489Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 64855, MsgBus: 16776 2025-03-31T16:35:30.794614Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013189826907378:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:30.794724Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0013a9/r3tmp/tmpLkHo0D/pdisk_1.dat 2025-03-31T16:35:30.929545Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:30.945011Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:30.945121Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:30.946917Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64855, node 2 2025-03-31T16:35:30.998058Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:30.998094Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:30.998105Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:30.998230Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16776 TClient is connected to server localhost:16776 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:31.492381Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:31.507129Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0 ... ool default not found or you don't have access permissions } 2025-03-31T16:35:49.725777Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:49.725996Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488013269424567415:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:49.729543Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:49.737964Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488013269424567417:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:35:49.806330Z node 4 :TX_PROXY ERROR: Actor# [4:7488013269424567471:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:50.649728Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7488013252244695937:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:50.649793Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:50.851939Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:35:50.853859Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:35:50.855315Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:35:51.180764Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MmI3MDI1NWYtYjY3OTcxZmYtZWZlMzM0ZjktYjgzZWZhMzE=, ActorId: [4:7488013278014502658:2520], ActorState: ExecuteState, TraceId: 01jqpgycdyf4nrfevwe8kqv279, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 11063, MsgBus: 15067 2025-03-31T16:35:53.830788Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7488013286652968098:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:53.830869Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0013a9/r3tmp/tmptUjv88/pdisk_1.dat 2025-03-31T16:35:53.954774Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:53.970825Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:53.971259Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:53.972895Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11063, node 5 2025-03-31T16:35:54.029790Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:54.029819Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:54.029831Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:54.029982Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15067 TClient is connected to server localhost:15067 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:54.637425Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:54.644501Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:35:54.650975Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:54.737077Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:54.948221Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:55.030247Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:57.827747Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488013303832839055:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:57.827856Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:57.869190Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:57.906353Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:57.941392Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:57.980659Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:58.020379Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:58.059270Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:58.108668Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488013308127806862:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:58.108760Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7488013308127806867:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:58.108780Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:58.112227Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:58.167605Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7488013308127806869:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:35:58.233647Z node 5 :TX_PROXY ERROR: Actor# [5:7488013308127806924:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:58.830954Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7488013286652968098:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:58.831037Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:59.398373Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:35:59.401217Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:35:59.403466Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:35:59.735841Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=OGY3MDQ3NjktZTEzNzIzZDgtOWVkN2E3YWEtZTg3ZGM3ZDk=, ActorId: [5:7488013312422774816:2520], ActorState: ExecuteState, TraceId: 01jqpgyms13j8hh2sddkgzrjw5, Create QueryResponse for error on request, msg: >> KqpSinkMvcc::OlapReadOnlyTxCommitsOnConcurrentWrite [GOOD] >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite1 |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> KqpSinkTx::OlapDeferredEffects ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite3 [GOOD] Test command err: Trying to start YDB, gRPC: 22191, MsgBus: 6647 2025-03-31T16:35:36.820884Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013216204424959:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:36.820957Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012cd/r3tmp/tmprcrgEO/pdisk_1.dat 2025-03-31T16:35:37.215894Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22191, node 1 2025-03-31T16:35:37.260462Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:37.260564Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:37.262170Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:37.298050Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:37.298073Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:37.298080Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:37.298182Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6647 TClient is connected to server localhost:6647 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:37.825489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:40.043601Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013233384294794:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:40.043744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:40.044370Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013233384294806:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:40.049678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:35:40.067245Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013233384294808:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:35:40.158978Z node 1 :TX_PROXY ERROR: Actor# [1:7488013233384294859:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:40.478036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:35:40.597212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:35:41.607553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:42.244175Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488013216204424959:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:42.334509Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:45.389703Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976710666; 2025-03-31T16:35:45.391518Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488013254859140332:2969], SessionActorId: [1:7488013241974237857:2969], Got LOCKS BROKEN for table. ShardID=72075186224037989, Sink=[1:7488013254859140332:2969].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-03-31T16:35:45.392002Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488013254859140332:2969], SessionActorId: [1:7488013241974237857:2969], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV2`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7488013241974237857:2969]. isRollback=0 2025-03-31T16:35:45.392269Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjdhNDlhMzYtNjliNzIyYWMtYTQ0ZDFlYzMtZmZlOGYxYjY=, ActorId: [1:7488013241974237857:2969], ActorState: ExecuteState, TraceId: 01jqpgy6vw08rv2tsxs2ejpm6a, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7488013254859140333:2969] from: [1:7488013254859140332:2969] 2025-03-31T16:35:45.392343Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7488013254859140333:2969] TxId: 281474976710666. Ctx: { TraceId: 01jqpgy6vw08rv2tsxs2ejpm6a, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjdhNDlhMzYtNjliNzIyYWMtYTQ0ZDFlYzMtZmZlOGYxYjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/KV2`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-03-31T16:35:45.392538Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjdhNDlhMzYtNjliNzIyYWMtYTQ0ZDFlYzMtZmZlOGYxYjY=, ActorId: [1:7488013241974237857:2969], ActorState: ExecuteState, TraceId: 01jqpgy6vw08rv2tsxs2ejpm6a, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 2025-03-31T16:35:45.395977Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976710666; 2025-03-31T16:35:45.396165Z node 1 :TX_DATASHARD ERROR: Complete volatile write [1743438945437 : 281474976710666] from 72075186224037889 at tablet 72075186224037889, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 13199, MsgBus: 26892 2025-03-31T16:35:51.268698Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013281078611032:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:51.268777Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012cd/r3tmp/tmpzKfa9n/pdisk_1.dat 2025-03-31T16:35:51.376285Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:51.409868Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:51.409954Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 13199, node 2 2025-03-31T16:35:51.415542Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:51.464199Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:51.464220Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:51.464227Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:51.464348Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26892 TClient is connected to server localhost:26892 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:51.868581Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:54.333640Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013293963513565:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:54.333775Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013293963513555:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:54.334162Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:54.338040Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:35:54.352343Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488013293963513593:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:35:54.448061Z node 2 :TX_PROXY ERROR: Actor# [2:7488013293963513644:2334] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:54.503135Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:35:54.585265Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:35:55.649259Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:56.465405Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488013281078611032:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:56.483704Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:57.428559Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjFkMmE1ZWMtNjhlNGU0MTYtNjJhOGZiYzQtMjMyMjVmNmY=, ActorId: [2:7488013302553456568:2969], ActorState: ExecuteState, TraceId: 01jqpgyjsq6zwwzk8vhkpr5e8z, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedAfterSplitMerge [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleBorrowCompactionTimeouts >> KqpSinkTx::OlapInvalidateOnError [FAIL] >> KqpSinkTx::OlapInteractive >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2+withSink [GOOD] >> KqpTx::InteractiveTx >> KqpTx::RollbackInvalidated [GOOD] >> KqpSinkMvcc::OltpMultiSinks [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 7077, MsgBus: 26835 2025-03-31T16:35:52.382800Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013283704144127:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:52.383027Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012a9/r3tmp/tmpNjR9us/pdisk_1.dat 2025-03-31T16:35:52.717330Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7077, node 1 2025-03-31T16:35:52.769205Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:52.769310Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:52.777291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:52.804200Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:52.804232Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:52.804243Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:52.804374Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26835 TClient is connected to server localhost:26835 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:53.338227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:35:53.378177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:35:53.516874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:53.670532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:53.748684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:55.687735Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013296589047790:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:55.687927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:56.010193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:56.039557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:56.067959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:56.100430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:56.129150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:56.197481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:56.261210Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013300884015601:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:56.261305Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:56.261641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013300884015606:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:56.265510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:56.277640Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013300884015608:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:35:56.351910Z node 1 :TX_PROXY ERROR: Actor# [1:7488013300884015661:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:57.383235Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488013283704144127:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:57.383314Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:57.818679Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Nzg5NDc3ZWUtNjFiMGZkOGQtZDUzNDA0NjUtZjg1YmY3YTE=, ActorId: [1:7488013305178983215:2488], ActorState: ExecuteState, TraceId: 01jqpgyk4ffb404q27mn9hz27d, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 18641, MsgBus: 61103 2025-03-31T16:35:58.593820Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013310628140352:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:58.607320Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012a9/r3tmp/tmpqqUDHQ/pdisk_1.dat 2025-03-31T16:35:58.693900Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18641, node 2 2025-03-31T16:35:58.734057Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:58.734135Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:58.737290Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:58.765178Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:58.765200Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:58.765210Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:58.765324Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61103 TClient is connected to server localhost:61103 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:59.164831Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:59.169640Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:35:59.178548Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:59.223869Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:35:59.367457Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:35:59.478445Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:01.570281Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013323513043980:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:01.570366Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:01.613267Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:36:01.644943Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:36:01.673615Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:36:01.705496Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:36:01.732775Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:36:01.800354Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:36:01.844914Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013323513044495:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:01.844993Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:01.845069Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013323513044500:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:01.848238Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:36:01.856785Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488013323513044502:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:36:01.923005Z node 2 :TX_PROXY ERROR: Actor# [2:7488013323513044555:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:36:03.594409Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488013310628140352:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:36:03.594499Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:36:05.360605Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976715673; 2025-03-31T16:36:05.361647Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488013340692914258:2496], Table: `/Root/EightShard` ([72057594046644480:3:1]), SessionActorId: [2:7488013327808012141:2496]Got LOCKS BROKEN for table `/Root/EightShard`. ShardID=72075186224037891, Sink=[2:7488013340692914258:2496].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-03-31T16:36:05.362247Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488013340692914239:2496], SessionActorId: [2:7488013327808012141:2496], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/EightShard`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[2:7488013327808012141:2496]. isRollback=0 2025-03-31T16:36:05.362563Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=M2NkMjkzNTAtZjJjZjI5ODMtM2Q3NTNlNGQtYzEyYjM0ODM=, ActorId: [2:7488013327808012141:2496], ActorState: ExecuteState, TraceId: 01jqpgytc37p8e0scgn6ke2fs5, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7488013340692914240:2496] from: [2:7488013340692914239:2496] 2025-03-31T16:36:05.362699Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7488013340692914240:2496] TxId: 281474976715673. Ctx: { TraceId: 01jqpgytc37p8e0scgn6ke2fs5, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2NkMjkzNTAtZjJjZjI5ODMtM2Q3NTNlNGQtYzEyYjM0ODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/EightShard`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-03-31T16:36:05.362979Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=M2NkMjkzNTAtZjJjZjI5ODMtM2Q3NTNlNGQtYzEyYjM0ODM=, ActorId: [2:7488013327808012141:2496], ActorState: ExecuteState, TraceId: 01jqpgytc37p8e0scgn6ke2fs5, Create QueryResponse for error on request, msg: 2025-03-31T16:36:05.363367Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976715673; 2025-03-31T16:36:05.363543Z node 2 :TX_DATASHARD ERROR: Complete volatile write [1743438965408 : 281474976715673] from 72075186224037888 at tablet 72075186224037888, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } >> KqpSinkLocks::TInvalidateOlap [GOOD] >> KqpSinkLocks::UncommittedRead >> KqpQueryService::DdlMixedDml [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::RollbackInvalidated [GOOD] Test command err: Trying to start YDB, gRPC: 62134, MsgBus: 16341 2025-03-31T16:35:56.650629Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013302742422236:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:56.650754Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012a6/r3tmp/tmp4kpVhr/pdisk_1.dat 2025-03-31T16:35:56.973033Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62134, node 1 2025-03-31T16:35:57.007820Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:57.008029Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:57.023903Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:57.079692Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:57.079722Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:57.079729Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:57.079854Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16341 TClient is connected to server localhost:16341 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:57.568893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:57.599004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:35:57.755628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:35:57.909612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:57.978299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:59.622832Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013315627325900:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:59.622964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:59.976220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:36:00.007581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:36:00.042419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:36:00.072970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:36:00.105575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:36:00.136286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:36:00.217321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013319922293713:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:00.217420Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:00.217782Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013319922293718:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:00.221832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:36:00.235806Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013319922293720:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:36:00.321635Z node 1 :TX_PROXY ERROR: Actor# [1:7488013319922293775:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:36:01.124144Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWYzN2Y3MTYtOGU3MTY2NGYtOGZlZTk3ZjItZmJkZDhjNmM=, ActorId: [1:7488013324217261360:2496], ActorState: ReadyState, TraceId: 01jqpgypes1rpwt20t4v59q603, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 6879, MsgBus: 26034 2025-03-31T16:36:01.805123Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013322369380390:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:36:01.805216Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012a6/r3tmp/tmpG3ROQE/pdisk_1.dat 2025-03-31T16:36:01.901583Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6879, node 2 2025-03-31T16:36:01.944921Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:01.945024Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:01.948671Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:36:01.965047Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:36:01.965073Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:36:01.965080Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:36:01.965184Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26034 TClient is connected to server localhost:26034 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:36:02.362975Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:02.380483Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:02.445378Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:02.602779Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:02.670103Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:04.836441Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013335254284043:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:04.836540Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:04.861339Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:36:04.889818Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:36:04.917500Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:36:04.947942Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:36:04.978685Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:36:05.016866Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:36:05.056265Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013339549251850:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:05.056370Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:05.056430Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013339549251855:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:05.059469Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:36:05.068502Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488013339549251857:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:36:05.147588Z node 2 :TX_PROXY ERROR: Actor# [2:7488013339549251911:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:36:05.974826Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488013339549252223:2506], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/BadTable]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:36:05.975106Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=M2RhNDY4ZmUtMmU3YjEwYzAtN2JkOWFiMTktYjdhZDI1OWE=, ActorId: [2:7488013339549252200:2496], ActorState: ExecuteState, TraceId: 01jqpgyv5j4kjxecke9d5chdhc, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 01jqpgyv5a0zp0w2h1zwxqp6ry 2025-03-31T16:36:05.982071Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=M2RhNDY4ZmUtMmU3YjEwYzAtN2JkOWFiMTktYjdhZDI1OWE=, ActorId: [2:7488013339549252200:2496], ActorState: ReadyState, TraceId: 01jqpgyv6x6efyk0tm3gdvff3m, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OltpMultiSinks [GOOD] Test command err: Trying to start YDB, gRPC: 23179, MsgBus: 6723 2025-03-31T16:35:42.060128Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013242113333951:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:42.060175Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012ba/r3tmp/tmpwjXMYq/pdisk_1.dat 2025-03-31T16:35:42.496603Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23179, node 1 2025-03-31T16:35:42.548018Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:42.548460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:42.558645Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:42.584847Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:42.584868Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:42.584887Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:42.585039Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6723 TClient is connected to server localhost:6723 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:43.186384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:45.257555Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013254998236500:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:45.257635Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013254998236517:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:45.257729Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:45.262074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:35:45.277427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013254998236521:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:35:45.378608Z node 1 :TX_PROXY ERROR: Actor# [1:7488013254998236572:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:45.719816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:35:45.864717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:35:46.925458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:47.552655Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488013242113333951:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:47.735445Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 30022, MsgBus: 22795 2025-03-31T16:35:54.610212Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013293420490442:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:54.610269Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012ba/r3tmp/tmpDEnnse/pdisk_1.dat 2025-03-31T16:35:54.753988Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30022, node 2 2025-03-31T16:35:54.767538Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:54.767615Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:54.819620Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:54.866960Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:54.866985Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:54.866994Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:54.867141Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22795 TClient is connected to server localhost:22795 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:35:55.322111Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:35:57.931161Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013306305392945:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:57.931343Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:57.937912Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013306305392980:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:57.943070Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:35:57.959237Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488013306305392982:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:35:58.045810Z node 2 :TX_PROXY ERROR: Actor# [2:7488013310600360330:2336] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:58.139310Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:35:58.180521Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:35:59.330817Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:36:00.105598Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488013293420490442:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:36:00.212288Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpSnapshotIsolation::TSimpleOlap [GOOD] >> MetadataConversion::MakeAuthTest [GOOD] >> MetadataConversion::ConvertingExternalSourceMetadata [GOOD] |97.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/gateway/ut/gtest >> MetadataConversion::ConvertingExternalSourceMetadata [GOOD] |97.3%| [TS] {RESULT} ydb/core/kqp/gateway/ut/gtest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::DdlMixedDml [GOOD] Test command err: Trying to start YDB, gRPC: 17471, MsgBus: 3354 2025-03-31T16:35:21.760350Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013149780351599:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:21.760549Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0013b9/r3tmp/tmpVOoTcI/pdisk_1.dat 2025-03-31T16:35:22.154514Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:22.186613Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:22.186726Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:22.188096Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17471, node 1 2025-03-31T16:35:22.240294Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:22.240318Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:22.240329Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:22.240442Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3354 TClient is connected to server localhost:3354 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:22.783756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:22.814086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:22.938307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:23.090818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:23.158332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:24.939847Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013162665255252:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:24.939987Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:25.258559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:25.292549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:25.320287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:25.346884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:25.372709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:25.403679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:25.446348Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013166960223058:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:25.446448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:25.446582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013166960223063:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:25.450488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:25.460763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013166960223065:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:35:25.556081Z node 1 :TX_PROXY ERROR: Actor# [1:7488013166960223120:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:26.511203Z node 1 :TX_PROXY ERROR: Actor# [1:7488013171255190737:3662] txid# 281474976710672, issues: { message: "Group already exists" severity: 1 } 2025-03-31T16:35:26.538397Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmZkMzhmN2UtMWY2MmI0LWJkYmFlOTRiLTMxZjNjMThl, ActorId: [1:7488013171255190731:2502], ActorState: ExecuteState, TraceId: 01jqpgxmn221pa2tbypbxd8cb8, Create QueryResponse for error on request, msg: 2025-03-31T16:35:26.620334Z node 1 :TX_PROXY ERROR: Actor# [1:7488013171255190797:3699] txid# 281474976710676, issues: { message: "Group not found" severity: 1 } 2025-03-31T16:35:26.620598Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWMxMDAzODgtNDRlNjQyYWItODg3YTU0YS01ZWY0MTk4Ng==, ActorId: [1:7488013171255190789:2511], ActorState: ExecuteState, TraceId: 01jqpgxmrf39c4brj1pv0b3frm, Create QueryResponse for error on request, msg: 2025-03-31T16:35:26.710690Z node 1 :TX_PROXY ERROR: Actor# [1:7488013171255190855:3727] txid# 281474976710681, issues: { message: "Group already exists" severity: 1 } 2025-03-31T16:35:26.710862Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjAzMjJkMzUtOWRhMThlZjAtYjQ1YTczYTItYTUwMjMzNTc=, ActorId: [1:7488013171255190849:2523], ActorState: ExecuteState, TraceId: 01jqpgxmva6m65c6wmdy2wcq0p, Create QueryResponse for error on request, msg: 2025-03-31T16:35:26.760200Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488013149780351599:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:26.760285Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:26.844744Z node 1 :TX_PROXY ERROR: Actor# [1:7488013171255190931:3772] txid# 281474976710686, issues: { message: "Role \"user1\" is already a member of role \"group1\"" issue_code: 2 severity: 3 } 2025-03-31T16:35:26.868520Z node 1 :TX_PROXY ERROR: Actor# [1:7488013171255190945:3779] txid# 281474976710687, issues: { message: "Member account not found" severity: 1 } 2025-03-31T16:35:26.868878Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjM4ZWIwMWQtNjE3ZmI5MC04ZmJkN2I1My1iM2M3ZGVmMQ==, ActorId: [1:7488013171255190939:2539], ActorState: ExecuteState, TraceId: 01jqpgxn0723cbx4jq456tczct, Create QueryResponse for error on request, msg: 2025-03-31T16:35:26.921527Z node 1 :TX_PROXY ERROR: Actor# [1:7488013171255190991:3808] txid# 281474976710690, issues: { message: "Role \"user1\" is not a member of role \"group1\"" issue_code: 3 severity: 2 } 2025-03-31T16:35:26.966676Z node 1 :TX_PROXY ERROR: Actor# [1:7488013171255191008:3818] txid# 281474976710691, issues: { message: "Role \"user3\" is not a member of role \"group1\"" issue_code: 3 severity: 2 } 2025-03-31T16:35:27.006619Z node 1 :TX_PROXY ERROR: Actor# [1:7488013175550158331:3833] txid# 281474976710693, issues: { message: "Member account not found" severity: 1 } 2025-03-31T16:35:27.007182Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjU5NTg4ODgtYTdjZjEwOTEtYjU5ZDY5MWYtNWYwNWMxOWQ=, ActorId: [1:7488013171255191017:2552], ActorState: ExecuteState, TraceId: 01jqpgxn48cq4v34x2bd31mr9j, Create QueryResponse for error on request, msg: 2025-03-31T16:35:27.032766Z node 1 :TX_PROXY ERROR: Actor# [1:7488013175550158347:3840] txid# 281474976710695, issues: { message: "Role \"user1\" is already a member of role \"group1\"" issue_code: 2 severity: 3 } 2025-03-31T16:35:27.121658Z node 1 :TX_PROXY ERROR: Actor# [1:7488013175550158402:3876] txid# 281474976710698, issues: { message: "Role \"user3\" is not a member of role \"group1\"" issue_code: 3 severity: 2 } 2025-03-31T16:35:27.147944Z node 1 :TX_PROXY ERROR: Actor# [1:7488013175550158427:3891] txid# 281474976710700, issues: { message: "Role \"user1\" is not a member of role \"group1\"" issue_code: 3 severity: 2 } 2025-03-31T16:35:27.172599Z node 1 :TX_PROXY ERROR: Actor# [1:7488013175550158441:3898] txid# 281474976710701, issues: { message: "Role \"user2\" is not a member of role \"group1\"" issue_code: 3 severity: 2 } 2025-03-31T16:35:27.234378Z node 1 :TX_PROXY ERROR: Actor# [1:7488013175550158482:3920] txid# 281474976710704, issues: { message: "Group already exists" severity: 1 } 2025-03-31T16:35:27.234683Z node 1 :KQP_SESSION WARN: ... CREATE OBJECT my_secret_2 (TYPE SECRET) WITH (value="qwerty"); 2025-03-31T16:35:58.253425Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7488013308969753584:4103], TxId: 281474976715865, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=NDAyNjgzMmMtZWQ0MmJlOTEtYTlhYzU5MzktYTVhZTVjN2E=. TraceId : 01jqpgyk9083gf8g0cp3mmf70n. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-03-31T16:35:58.254723Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7488013308969753585:4104], TxId: 281474976715865, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jqpgyk9083gf8g0cp3mmf70n. SessionId : ydb://session/3?node_id=3&id=NDAyNjgzMmMtZWQ0MmJlOTEtYTlhYzU5MzktYTVhZTVjN2E=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7488013308969753576:3989], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-31T16:35:58.255485Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NDAyNjgzMmMtZWQ0MmJlOTEtYTlhYzU5MzktYTVhZTVjN2E=, ActorId: [3:7488013304674785946:3989], ActorState: ExecuteState, TraceId: 01jqpgyk9083gf8g0cp3mmf70n, Create QueryResponse for error on request, msg: 2025-03-31T16:35:58.257333Z node 3 :METADATA_PROVIDER ERROR: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: PRECONDITION_FAILED issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { id: "01jqpgyjvycjhrj6yg2vqw98bk" } } } } ;request=session_id: "ydb://session/3?node_id=3&id=NDAyNjgzMmMtZWQ0MmJlOTEtYTlhYzU5MzktYTVhZTVjN2E=" tx_control { tx_id: "01jqpgyjvycjhrj6yg2vqw98bk" } query { yql_text: "DECLARE $objects AS List>;\nINSERT INTO `//Root/.metadata/secrets/values`\nSELECT ownerUserId,secretId,value FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "ownerUserId" type { type_id: UTF8 } } members { name: "secretId" type { type_id: UTF8 } } members { name: "value" type { type_id: UTF8 } } } } } } value { items { items { text_value: "" } items { text_value: "my_secret_2" } items { text_value: "qwerty" } } } } } ; 2025-03-31T16:35:58.259967Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZmMwOTMyZWUtY2YwMGFlNWYtOTk2MWI3NzQtODEwOGNiODQ=, ActorId: [3:7488013304674785938:3984], ActorState: ExecuteState, TraceId: 01jqpgyjf99q85z7mst1ns2387, Create QueryResponse for error on request, msg: Execute SQL: UPSERT OBJECT my_secret_2 (TYPE SECRET) WITH value = "edcba"; Trying to start YDB, gRPC: 13676, MsgBus: 22873 2025-03-31T16:36:01.177069Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488013322116430514:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:36:01.177178Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0013b9/r3tmp/tmpiWrawP/pdisk_1.dat 2025-03-31T16:36:01.297545Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:36:01.327684Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:01.327799Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:01.329131Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13676, node 4 2025-03-31T16:36:01.373355Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:36:01.373386Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:36:01.373398Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:36:01.373552Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22873 TClient is connected to server localhost:22873 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:36:02.006702Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:36:02.023753Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:02.191936Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:02.414417Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:02.508018Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:05.158788Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488013339296301476:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:05.158929Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:05.236290Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:36:05.275501Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:36:05.311532Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:36:05.344859Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:36:05.375370Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:36:05.414353Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:36:05.456381Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488013339296301987:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:05.456464Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:05.456513Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488013339296301992:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:05.460399Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:36:05.471260Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488013339296301994:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:36:05.568120Z node 4 :TX_PROXY ERROR: Actor# [4:7488013339296302048:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:36:06.177521Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7488013322116430514:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:36:06.177601Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:36:06.791669Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7488013343591269653:2501], status: GENERIC_ERROR, issues:
: Error: Optimization, code: 1070
:8:25: Error: Queries with mixed data and scheme operations are not supported. Use separate queries for different types of operations., code: 2009 2025-03-31T16:36:06.791952Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZjYwZWM4YjktZjllODg4ZjUtNjEzNzcxYjItNDU3NDkzNzA=, ActorId: [4:7488013343591269646:2497], ActorState: ExecuteState, TraceId: 01jqpgyvz58djatxffge1xr100, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> KqpService::ToDictCache+UseCache [GOOD] >> KqpService::ToDictCache-UseCache >> TSentinelUnstableTests::BSControllerCantChangeStatus >> KqpLocksTricky::TestNoLocksIssueInteractiveTx+withSink [GOOD] >> KqpLocksTricky::TestNoLocksIssueInteractiveTx-withSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TSimpleOlap [GOOD] Test command err: Trying to start YDB, gRPC: 2838, MsgBus: 3148 2025-03-31T16:35:38.585356Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013224501634245:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:38.585390Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012c8/r3tmp/tmpaNIPw7/pdisk_1.dat 2025-03-31T16:35:38.967736Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2838, node 1 2025-03-31T16:35:39.024194Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:39.024302Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:39.027023Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:39.065122Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:39.065146Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:39.065151Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:39.065268Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3148 TClient is connected to server localhost:3148 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:39.551830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:39.564907Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:35:41.697166Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013237386536787:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:41.697353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013237386536795:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:41.697445Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:41.702235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:35:41.713720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013237386536801:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:35:41.788572Z node 1 :TX_PROXY ERROR: Actor# [1:7488013237386536852:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:42.075107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:35:42.227547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:35:43.263373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:43.945825Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488013224501634245:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:43.979117Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:44.642394Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGIzZGQ0NTAtYWYwYjE4ZmEtZTI4MzYyYzYtNTAxMjAyNDE=, ActorId: [1:7488013250271447244:2969], ActorState: ExecuteState, TraceId: 01jqpgy6500456qzaabx0qkmfn, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TSimple::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25: DoExecute @ 0x18DC0DB7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:50: Execute_ @ 0x18DB7B72 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7FDAAAD20D8F 18. ??:0: ?? @ 0x7FDAAAD20E3F 19. ??:0: ?? @ 0x163B4028 Trying to start YDB, gRPC: 17556, MsgBus: 13944 2025-03-31T16:35:49.955823Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013269949365319:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:49.957088Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012c8/r3tmp/tmpz4qjqm/pdisk_1.dat 2025-03-31T16:35:50.076927Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17556, node 2 2025-03-31T16:35:50.103937Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:50.104024Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:50.105981Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:50.147627Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:50.147648Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:50.147655Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:50.147766Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13944 TClient is connected to server localhost:13944 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:50.617379Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:52.921941Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013282834267848:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:52.921999Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013282834267865:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or ... ;tablet_id=72075186224038053;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.461328Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;self_id=[2:7488013308604077958:3357];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038053;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.462156Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038045;self_id=[2:7488013304309110574:3334];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038045;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.462619Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038045;self_id=[2:7488013304309110574:3334];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038045;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.462876Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038038;self_id=[2:7488013308604078050:3379];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038038;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.463274Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038038;self_id=[2:7488013308604078050:3379];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038038;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.465233Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[2:7488013308604078013:3362];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038010;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.465642Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[2:7488013308604078013:3362];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038010;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.468095Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038026;self_id=[2:7488013308604078018:3364];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038026;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.468443Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038026;self_id=[2:7488013308604078018:3364];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038026;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.469482Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038034;self_id=[2:7488013308604078009:3360];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038034;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.469829Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038034;self_id=[2:7488013308604078009:3360];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038034;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.471208Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[2:7488013308604078043:3375];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038016;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.471737Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[2:7488013308604078043:3375];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038016;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.475185Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7488013308604077945:3351];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038003;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.475653Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7488013308604077945:3351];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038003;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.477393Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;self_id=[2:7488013308604078076:3389];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038001;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.477814Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;self_id=[2:7488013308604078076:3389];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038001;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.480122Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[2:7488013308604078060:3383];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038017;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.480527Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[2:7488013308604078060:3383];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038017;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.480937Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[2:7488013308604078034:3372];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038023;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.481309Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[2:7488013308604078034:3372];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038023;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.483010Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038032;self_id=[2:7488013308604078057:3382];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038032;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.483405Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038032;self_id=[2:7488013308604078057:3382];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038032;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.484591Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038040;self_id=[2:7488013308604078020:3365];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038040;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.484983Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038040;self_id=[2:7488013308604078020:3365];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038040;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.498890Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;self_id=[2:7488013308604078047:3377];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038018;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.499426Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;self_id=[2:7488013308604078047:3377];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038018;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.505743Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038035;self_id=[2:7488013308604077938:3348];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038035;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.506164Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038035;self_id=[2:7488013308604077938:3348];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038035;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.508517Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[2:7488013308604078166:3397];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038013;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.508898Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[2:7488013308604078166:3397];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038013;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.508923Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[2:7488013308604078045:3376];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038007;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.509236Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[2:7488013308604078045:3376];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038007;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.517456Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7488013308604078124:3392];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038005;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.518042Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038044;self_id=[2:7488013304309110572:3333];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038044;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.518043Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7488013308604078124:3392];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038005;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.518400Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038044;self_id=[2:7488013304309110572:3333];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038044;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.518725Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038039;self_id=[2:7488013308604078030:3370];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038039;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.519045Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038039;self_id=[2:7488013308604078030:3370];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038039;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.861335Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037916;self_id=[2:7488013291424204737:2585];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037916;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.861632Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037916;self_id=[2:7488013291424204737:2585];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037916;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.878708Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=281474976715667;tx_id=281474976715667;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715667; 2025-03-31T16:36:01.880498Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-03-31T16:36:05.057549Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:36:05.057600Z node 2 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 >> KqpTx::InteractiveTx [GOOD] >> KqpTx::InvalidateOnError >> KqpSinkTx::InvalidateOnError [GOOD] >> KqpSinkLocks::DifferentKeyUpdateOlap [GOOD] >> RangeOps::Intersection [GOOD] >> KqpSnapshotIsolation::TSimpleOltpNoSink [FAIL] >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite+withSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_range_ops/unittest >> RangeOps::Intersection [GOOD] Test command err: first [(Uint64 : NULL, Uint64 : NULL) ; ()) second [(Uint64 : NULL, Uint64 : 1) ; (Uint64 : 20, Uint64 : 20)] result [(Uint64 : NULL, Uint64 : 1) ; (Uint64 : 20, Uint64 : 20)] correct [(Uint64 : NULL, Uint64 : 1) ; (Uint64 : 20, Uint64 : 20)] first [(Uint64 : NULL) ; ()) second [(Uint64 : NULL, Uint64 : 1) ; (Uint64 : 20, Uint64 : 20)] result [(Uint64 : NULL) ; (Uint64 : 20, Uint64 : 20)] correct [(Uint64 : NULL) ; (Uint64 : 20, Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 5)] result [(Uint64 : 10) ; (Uint64 : 5)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)] result [(Uint64 : 10) ; (Uint64 : 10)] correct [(Uint64 : 10) ; (Uint64 : 10)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 15)] result [(Uint64 : 10) ; (Uint64 : 15)] correct [(Uint64 : 10) ; (Uint64 : 15)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 20)] result [(Uint64 : 10) ; (Uint64 : 20)] correct [(Uint64 : 10) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 30)] result [(Uint64 : 10) ; (Uint64 : 20)] correct [(Uint64 : 10) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 10) ; (Uint64 : 10)] result [(Uint64 : 10) ; (Uint64 : 10)] correct [(Uint64 : 10) ; (Uint64 : 10)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 10) ; (Uint64 : 15)] result [(Uint64 : 10) ; (Uint64 : 15)] correct [(Uint64 : 10) ; (Uint64 : 15)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 10) ; (Uint64 : 20)] result [(Uint64 : 10) ; (Uint64 : 20)] correct [(Uint64 : 10) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 10) ; (Uint64 : 30)] result [(Uint64 : 10) ; (Uint64 : 20)] correct [(Uint64 : 10) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 15) ; (Uint64 : 17)] result [(Uint64 : 15) ; (Uint64 : 17)] correct [(Uint64 : 15) ; (Uint64 : 17)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 15) ; (Uint64 : 20)] result [(Uint64 : 15) ; (Uint64 : 20)] correct [(Uint64 : 15) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 15) ; (Uint64 : 30)] result [(Uint64 : 15) ; (Uint64 : 20)] correct [(Uint64 : 15) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 20) ; (Uint64 : 20)] result [(Uint64 : 20) ; (Uint64 : 20)] correct [(Uint64 : 20) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 20) ; (Uint64 : 30)] result [(Uint64 : 20) ; (Uint64 : 20)] correct [(Uint64 : 20) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 25) ; (Uint64 : 30)] result [(Uint64 : 25) ; (Uint64 : 20)] first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)] result ((Uint64 : 10) ; (Uint64 : 10)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)) result [(Uint64 : 10) ; (Uint64 : 10)) first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)) result ((Uint64 : 10) ; (Uint64 : 10)) first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 15)] result ((Uint64 : 10) ; (Uint64 : 15)] correct ((Uint64 : 10) ; (Uint64 : 15)] first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 15)) result ((Uint64 : 10) ; (Uint64 : 15)) correct ((Uint64 : 10) ; (Uint64 : 15)) first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 20)) result ((Uint64 : 10) ; (Uint64 : 20)) correct ((Uint64 : 10) ; (Uint64 : 20)) first ((Uint64 : 10) ; (Uint64 : 20)) second [(Uint64 : 1) ; (Uint64 : 20)) result ((Uint64 : 10) ; (Uint64 : 20)) correct ((Uint64 : 10) ; (Uint64 : 20)) first [(Uint64 : NULL) ; ()) second [(Uint64 : 1) ; (Uint64 : 20)) result [(Uint64 : 1) ; (Uint64 : 20)) correct [(Uint64 : 1) ; (Uint64 : 20)) first [(Uint64 : 10) ; ()) second [(Uint64 : 1) ; (Uint64 : 20)) result [(Uint64 : 10) ; (Uint64 : 20)) correct [(Uint64 : 10) ; (Uint64 : 20)) first ((Uint64 : 10) ; ()) second [(Uint64 : 1) ; (Uint64 : 10)) result ((Uint64 : 10) ; (Uint64 : 10)) first ((Uint64 : 10) ; ()) second [(Uint64 : 1) ; (Uint64 : 20)) result ((Uint64 : 10) ; (Uint64 : 20)) correct ((Uint64 : 10) ; (Uint64 : 20)) first [(Uint64 : NULL) ; (Uint64 : 10)] second [(Uint64 : 1) ; (Uint64 : 20)) result [(Uint64 : 1) ; (Uint64 : 10)] correct [(Uint64 : 1) ; (Uint64 : 10)] first [(Uint64 : NULL) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)) result [(Uint64 : 1) ; (Uint64 : 10)) correct [(Uint64 : 1) ; (Uint64 : 10)) |97.3%| [TM] {RESULT} ydb/core/tx/datashard/ut_range_ops/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::InvalidateOnError [GOOD] Test command err: Trying to start YDB, gRPC: 63247, MsgBus: 1604 2025-03-31T16:35:48.007955Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013264568526667:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:48.008108Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012b1/r3tmp/tmphureWZ/pdisk_1.dat 2025-03-31T16:35:48.409380Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:48.429101Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:48.429228Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:48.433334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63247, node 1 2025-03-31T16:35:48.523608Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:48.523651Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:48.523666Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:48.523781Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1604 TClient is connected to server localhost:1604 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:49.010033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:50.889228Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013273158461927:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:50.889671Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013273158461904:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:50.889777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:50.894352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:35:50.905969Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013273158461933:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:35:50.977201Z node 1 :TX_PROXY ERROR: Actor# [1:7488013273158461984:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:51.276056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:35:51.394556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:35:52.310256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:53.020691Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488013264568526667:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:53.024981Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:53.948849Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976710667; 2025-03-31T16:35:53.966790Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488013286043373066:2969], Table: `/Root/KV` ([72057594046644480:7:1]), SessionActorId: [1:7488013286043372339:2969]Got LOCKS BROKEN for table `/Root/KV`. ShardID=72075186224037889, Sink=[1:7488013286043373066:2969].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-03-31T16:35:53.967539Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488013286043373055:2969], SessionActorId: [1:7488013286043372339:2969], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7488013286043372339:2969]. isRollback=0 2025-03-31T16:35:53.969028Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTI5ZDA5NGQtMzY1NzdiM2ItNDExZDYyZDctNTA2OTA3MA==, ActorId: [1:7488013286043372339:2969], ActorState: ExecuteState, TraceId: 01jqpgyfdmawbsydrr2e9cjw0e, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7488013286043373056:2969] from: [1:7488013286043373055:2969] 2025-03-31T16:35:53.969168Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7488013286043373056:2969] TxId: 281474976710667. Ctx: { TraceId: 01jqpgyfdmawbsydrr2e9cjw0e, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTI5ZDA5NGQtMzY1NzdiM2ItNDExZDYyZDctNTA2OTA3MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-03-31T16:35:53.969428Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTI5ZDA5NGQtMzY1NzdiM2ItNDExZDYyZDctNTA2OTA3MA==, ActorId: [1:7488013286043372339:2969], ActorState: ExecuteState, TraceId: 01jqpgyfdmawbsydrr2e9cjw0e, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 4035, MsgBus: 10239 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012b1/r3tmp/tmpDuH38n/pdisk_1.dat 2025-03-31T16:36:00.096020Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:36:00.097087Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:36:00.122299Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:00.122398Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:00.123909Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4035, node 2 2025-03-31T16:36:00.169988Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:36:00.170018Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:36:00.170026Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:36:00.170134Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10239 TClient is connected to server localhost:10239 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:36:00.601928Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:00.606524Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:36:03.220734Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013329884808724:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:03.220800Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013329884808740:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:03.220845Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:03.224002Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:36:03.231779Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488013329884808753:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:36:03.306210Z node 2 :TX_PROXY ERROR: Actor# [2:7488013329884808804:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:36:03.354158Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:36:03.451184Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:36:04.457507Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:36:05.700073Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=3; 2025-03-31T16:36:05.700324Z node 2 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037889 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-31T16:36:05.700506Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037889 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-31T16:36:05.700675Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488013338474751863:2969], Table: `/Root/KV` ([72057594046644480:7:1]), SessionActorId: [2:7488013338474751809:2969]Got CONSTRAINT VIOLATION for table `/Root/KV`. ShardID=72075186224037889, Sink=[2:7488013338474751863:2969].{
: Error: Duplicate keys have been found., code: 2012 } 2025-03-31T16:36:05.700840Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488013338474751854:2969], SessionActorId: [2:7488013338474751809:2969], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/KV`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[2:7488013338474751809:2969]. isRollback=0 2025-03-31T16:36:05.701035Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODE0NTk3MmUtMTA4NDk2MzItNzZjMDc2ZDQtYWM1MDlkNzc=, ActorId: [2:7488013338474751809:2969], ActorState: ExecuteState, TraceId: 01jqpgytw210t0bagpesp8kc1m, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [2:7488013338474751855:2969] from: [2:7488013338474751854:2969] 2025-03-31T16:36:05.701119Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7488013338474751855:2969] TxId: 281474976710664. Ctx: { TraceId: 01jqpgytw210t0bagpesp8kc1m, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODE0NTk3MmUtMTA4NDk2MzItNzZjMDc2ZDQtYWM1MDlkNzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/KV`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-31T16:36:05.702216Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODE0NTk3MmUtMTA4NDk2MzItNzZjMDc2ZDQtYWM1MDlkNzc=, ActorId: [2:7488013338474751809:2969], ActorState: ExecuteState, TraceId: 01jqpgytw210t0bagpesp8kc1m, Create QueryResponse for error on request, msg:
: Error: Constraint violated. Table: `/Root/KV`., code: 2012
: Error: Duplicate keys have been found., code: 2012 2025-03-31T16:36:05.755252Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODE0NTk3MmUtMTA4NDk2MzItNzZjMDc2ZDQtYWM1MDlkNzc=, ActorId: [2:7488013338474751809:2969], ActorState: ExecuteState, TraceId: 01jqpgytyb2jwvdqr7ms7kvh6b, Create QueryResponse for error on request, msg:
: Error: Transaction not found: 01jqpgytvw49gyh2seb80r3bwm, code: 2015 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpSnapshotIsolation::TReadOnlyOlap [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_rows ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::DifferentKeyUpdateOlap [GOOD] Test command err: Trying to start YDB, gRPC: 22076, MsgBus: 21288 2025-03-31T16:35:41.791257Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013237762863327:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:41.791314Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012bb/r3tmp/tmpN7Xd1l/pdisk_1.dat 2025-03-31T16:35:42.235421Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22076, node 1 2025-03-31T16:35:42.270104Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:42.270227Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:42.271646Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:42.329161Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:42.329195Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:42.329202Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:42.329317Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21288 TClient is connected to server localhost:21288 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:42.924414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:44.824691Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013250647765868:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:44.824856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:44.825216Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013250647765891:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:44.829799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:35:44.843505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013250647765893:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:35:44.947758Z node 1 :TX_PROXY ERROR: Actor# [1:7488013250647765944:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:45.251584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:35:45.377836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:35:46.411364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:47.167441Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488013237762863327:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:47.222552Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 9474, MsgBus: 18846 2025-03-31T16:35:53.916078Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013286261254413:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:53.916124Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012bb/r3tmp/tmpvyW9zZ/pdisk_1.dat 2025-03-31T16:35:54.035450Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:54.064268Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:54.064353Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:54.067820Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9474, node 2 2025-03-31T16:35:54.109236Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:54.109258Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:54.109269Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:54.109413Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18846 TClient is connected to server localhost:18846 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:54.547382Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:57.064439Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013303441124237:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:57.064504Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013303441124261:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:57.064558Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:57.069392Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:35:57.085353Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488013303441124266:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:35:57.141047Z node 2 :TX_PROXY ERROR: Actor# [2:7488013303441124317:2335] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:57.197286Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:35:57.362896Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7488013303441124557:2353];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:35:57.363135Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7488013303441124557:2353];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:35:57.363479Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7488013303441124557:2353];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:35:57.363617Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7488013303441124557:2353];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:35:57.363721Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:748801330344112 ... ;tablet_id=72075186224038015;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:05.377254Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[2:7488013320620999766:3353];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038015;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:05.377263Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[2:7488013324915967205:3383];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038016;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:05.377598Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038045;self_id=[2:7488013324915967161:3370];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038045;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:05.377623Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[2:7488013324915967205:3383];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038016;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:05.377822Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038043;self_id=[2:7488013320620999790:3359];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038043;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:05.377950Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038045;self_id=[2:7488013324915967161:3370];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038045;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:05.378173Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038043;self_id=[2:7488013320620999790:3359];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038043;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:05.378372Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[2:7488013324915967195:3378];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038010;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:05.378734Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[2:7488013324915967195:3378];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038010;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:05.379044Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038032;self_id=[2:7488013324915967130:3367];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038032;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:05.379509Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038032;self_id=[2:7488013324915967130:3367];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038032;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:05.508870Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7488013324915967240:3391];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038005;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:05.509344Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7488013324915967240:3391];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038005;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:05.509591Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[2:7488013324915967228:3388];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038025;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:05.509940Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[2:7488013324915967228:3388];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038025;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:05.513813Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038027;self_id=[2:7488013324915967199:3380];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038027;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:05.513814Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[2:7488013324915967257:3393];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038007;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:05.514222Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[2:7488013324915967257:3393];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038007;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:05.514257Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038027;self_id=[2:7488013324915967199:3380];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038027;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:05.514442Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7488013324915967254:3392];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038003;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:05.514503Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038020;self_id=[2:7488013324915967264:3398];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038020;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:05.514731Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7488013324915967254:3392];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038003;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:05.514815Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038020;self_id=[2:7488013324915967264:3398];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038020;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:05.514965Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038058;self_id=[2:7488013324915967154:3369];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038058;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:05.515290Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038058;self_id=[2:7488013324915967154:3369];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038058;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:05.515612Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038038;self_id=[2:7488013324915967182:3372];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038038;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:05.515875Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038038;self_id=[2:7488013324915967182:3372];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038038;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:05.516022Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038036;self_id=[2:7488013324915967187:3374];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038036;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:05.516303Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038036;self_id=[2:7488013324915967187:3374];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038036;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:05.516431Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[2:7488013324915967277:3399];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038009;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:05.516461Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7488013324915967201:3381];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038012;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:05.516729Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7488013324915967201:3381];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038012;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:05.516827Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[2:7488013324915967277:3399];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038009;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:05.517057Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[2:7488013324915967219:3384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038023;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:05.517188Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7488013324915967222:3385];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038008;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:05.517397Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[2:7488013324915967219:3384];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038023;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:05.517531Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7488013324915967222:3385];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038008;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:05.517699Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[2:7488013324915967609:3401];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038017;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:05.518018Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038034;self_id=[2:7488013324915967203:3382];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038034;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:05.518099Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[2:7488013324915967609:3401];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038017;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:05.518338Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038034;self_id=[2:7488013324915967203:3382];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038034;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:05.655047Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=281474976715667;tx_id=281474976715667;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715667; 2025-03-31T16:36:05.656630Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-03-31T16:36:09.026984Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:36:09.027016Z node 2 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 >> KqpQueryService::CloseSessionsWithLoad [GOOD] >> KqpQueryService::ClosedSessionRemovedFromPool >> test_postgres.py::TestPostgresSuite::test_postgres_suite[horology] >> test_liveness_wardens.py::TestLivenessWarden::test_hive_liveness_warden_reports_issues >> TTxDataShardLocalKMeansScan::BadRequest >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite2 [GOOD] >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TReadOnlyOlap [GOOD] Test command err: Trying to start YDB, gRPC: 25866, MsgBus: 1199 2025-03-31T16:35:43.339806Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013242865959352:2225];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:43.339904Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012b8/r3tmp/tmpPbpBHu/pdisk_1.dat 2025-03-31T16:35:43.663511Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25866, node 1 2025-03-31T16:35:43.747419Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:43.748274Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:43.800090Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:43.827665Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:43.827688Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:43.827700Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:43.827825Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1199 TClient is connected to server localhost:1199 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:44.440364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:44.465711Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:35:46.548040Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013255750861717:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:46.548344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:46.548806Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013255750861752:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:46.556296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:35:46.569449Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013255750861754:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:35:46.642708Z node 1 :TX_PROXY ERROR: Actor# [1:7488013255750861805:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:46.905994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:35:47.016677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:35:47.934835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:48.637936Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488013242865959352:2225];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:48.670523Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:49.208631Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODQxZTFmZTktODQ4ODJjNGItNGQwOTNlMDAtZmQwMDhiNTM=, ActorId: [1:7488013268635772060:2968], ActorState: ExecuteState, TraceId: 01jqpgyaqy8924qybegvgs0b73, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76: DoExecute @ 0x18DC7347 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:112: Execute_ @ 0x18DB841A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7FE828A22D8F 18. ??:0: ?? @ 0x7FE828A22E3F 19. ??:0: ?? @ 0x163B4028 Trying to start YDB, gRPC: 28927, MsgBus: 27464 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012b8/r3tmp/tmpcln4rX/pdisk_1.dat 2025-03-31T16:35:54.608484Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:35:54.609365Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:54.641234Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:54.641315Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:54.642897Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28927, node 2 2025-03-31T16:35:54.718496Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:54.718520Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:54.718528Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:54.718652Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27464 TClient is connected to server localhost:27464 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:35:55.153614Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:35:57.540588Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013304873725862:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:57.540770Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013304873725841:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:57.541170Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, ... ColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038040;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:06.497119Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[2:7488013326348568821:3385];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038009;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:06.497297Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038075;self_id=[2:7488013326348568522:3329];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038075;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:06.497504Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038040;self_id=[2:7488013326348568816:3383];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038040;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:06.497606Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038075;self_id=[2:7488013326348568522:3329];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038075;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:06.497791Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038029;self_id=[2:7488013330643536162:3389];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038029;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:06.497801Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038054;self_id=[2:7488013326348568814:3382];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038054;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:06.498097Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038029;self_id=[2:7488013330643536162:3389];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038029;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:06.498165Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038054;self_id=[2:7488013326348568814:3382];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038054;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:06.498304Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038050;self_id=[2:7488013326348568796:3373];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038050;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:06.498387Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[2:7488013330643536182:3395];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038019;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:06.498618Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038050;self_id=[2:7488013326348568796:3373];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038050;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:06.498777Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[2:7488013330643536182:3395];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038019;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:06.498815Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038034;self_id=[2:7488013326348568789:3372];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038034;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:06.499013Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038049;self_id=[2:7488013326348568783:3369];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038049;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:06.499158Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038034;self_id=[2:7488013326348568789:3372];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038034;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:06.499357Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;self_id=[2:7488013330643536417:3397];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038001;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:06.499437Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038049;self_id=[2:7488013326348568783:3369];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038049;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:06.499661Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;self_id=[2:7488013330643536417:3397];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038001;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:06.499673Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038024;self_id=[2:7488013330643536175:3392];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038024;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:06.500073Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038024;self_id=[2:7488013330643536175:3392];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038024;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:06.500443Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[2:7488013326348568763:3359];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038016;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:06.500707Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[2:7488013326348568763:3359];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038016;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:06.500943Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[2:7488013330643536168:3391];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038004;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:06.501319Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[2:7488013330643536168:3391];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038004;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:06.501554Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038036;self_id=[2:7488013330643536119:3386];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038036;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:06.501901Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038036;self_id=[2:7488013330643536119:3386];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038036;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:06.502154Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7488013326348568812:3381];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038012;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:06.502507Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7488013326348568812:3381];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038012;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:06.505054Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[2:7488013313463662479:2582];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037909;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:06.505312Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[2:7488013313463662479:2582];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037909;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:06.505564Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[2:7488013330643536184:3396];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038013;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:06.505976Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[2:7488013330643536184:3396];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038013;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:06.506280Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[2:7488013330643536121:3387];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038023;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:06.506673Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[2:7488013330643536121:3387];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038023;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:06.506720Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[2:7488013330643536176:3393];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038007;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:06.506958Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037999;self_id=[2:7488013326348568029:3230];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037999;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:06.507178Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[2:7488013330643536176:3393];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038007;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:06.507383Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037999;self_id=[2:7488013326348568029:3230];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037999;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:06.507429Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;self_id=[2:7488013326348568799:3375];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038037;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:06.507800Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;self_id=[2:7488013326348568799:3375];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038037;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:06.696666Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=281474976715667;tx_id=281474976715667;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715667; 2025-03-31T16:36:06.698260Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Complete;commit_tx_id=281474976715667;commit_lock_id=281474976715666;fline=manager.cpp:94;broken_lock_id=281474976715665; 2025-03-31T16:36:06.698561Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-03-31T16:36:09.590117Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:36:09.590145Z node 2 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> alter_compression.py::TestAlterCompression::test_all_supported_compression >> KqpTx::InvalidateOnError [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_bs_controller] >> TTxDataShardLocalKMeansScan::BadRequest [GOOD] >> TTxDataShardLocalKMeansScan::MainToPosting ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::InvalidateOnError [GOOD] Test command err: Trying to start YDB, gRPC: 7368, MsgBus: 14437 2025-03-31T16:36:06.484700Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013344440170474:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:36:06.484814Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00129b/r3tmp/tmpmaDyOG/pdisk_1.dat 2025-03-31T16:36:06.822044Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7368, node 1 2025-03-31T16:36:06.875860Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:06.875997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:06.877659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:36:06.883902Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:36:06.883925Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:36:06.883936Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:36:06.884058Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14437 TClient is connected to server localhost:14437 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:36:07.346457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:07.372613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:07.476153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:07.619811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:07.688038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:09.198648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013357325074139:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:09.198774Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:09.477930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:36:09.512965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:36:09.541940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:36:09.566774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:36:09.592551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:36:09.619224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:36:09.696808Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013357325074654:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:09.696905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:09.697147Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013357325074659:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:09.700370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:36:09.709267Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013357325074661:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:36:09.801439Z node 1 :TX_PROXY ERROR: Actor# [1:7488013357325074717:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 9834, MsgBus: 16728 2025-03-31T16:36:11.645100Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013365293224207:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:36:11.645202Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00129b/r3tmp/tmpQMWvKq/pdisk_1.dat 2025-03-31T16:36:11.738952Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9834, node 2 2025-03-31T16:36:11.783215Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:11.783304Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:11.784970Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:36:11.800541Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:36:11.800567Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:36:11.800573Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:36:11.800658Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16728 TClient is connected to server localhost:16728 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:36:12.178336Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:12.196404Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:12.245636Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:12.378729Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:12.452371Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:14.758648Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013378178127874:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:14.758744Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:14.807567Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:36:14.835990Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:36:14.866256Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:36:14.897819Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:36:14.928903Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:36:14.998393Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:36:15.081724Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013382473095691:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:15.081816Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:15.082028Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013382473095696:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:15.085464Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:36:15.095065Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488013382473095698:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:36:15.180372Z node 2 :TX_PROXY ERROR: Actor# [2:7488013382473095753:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:36:16.021536Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=3; 2025-03-31T16:36:16.030158Z node 2 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037911 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-31T16:36:16.030385Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037911 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-31T16:36:16.030609Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488013386768063364:2496], Table: `/Root/KeyValue` ([72057594046644480:6:1]), SessionActorId: [2:7488013382473096040:2496]Got CONSTRAINT VIOLATION for table `/Root/KeyValue`. ShardID=72075186224037911, Sink=[2:7488013386768063364:2496].{
: Error: Duplicate keys have been found., code: 2012 } 2025-03-31T16:36:16.031186Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7488013386768063357:2496], SessionActorId: [2:7488013382473096040:2496], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/KeyValue`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[2:7488013382473096040:2496]. isRollback=0 2025-03-31T16:36:16.031460Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzYyZjdhNmMtY2E1ZjQ0YjYtNjUxYjhlNDktODgxNmEzMTE=, ActorId: [2:7488013382473096040:2496], ActorState: ExecuteState, TraceId: 01jqpgz4yw9tmt2sb03qstj7vm, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [2:7488013386768063358:2496] from: [2:7488013386768063357:2496] 2025-03-31T16:36:16.031555Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7488013386768063358:2496] TxId: 281474976715671. Ctx: { TraceId: 01jqpgz4yw9tmt2sb03qstj7vm, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzYyZjdhNmMtY2E1ZjQ0YjYtNjUxYjhlNDktODgxNmEzMTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/KeyValue`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-31T16:36:16.032554Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzYyZjdhNmMtY2E1ZjQ0YjYtNjUxYjhlNDktODgxNmEzMTE=, ActorId: [2:7488013382473096040:2496], ActorState: ExecuteState, TraceId: 01jqpgz4yw9tmt2sb03qstj7vm, Create QueryResponse for error on request, msg: 2025-03-31T16:36:16.088813Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzYyZjdhNmMtY2E1ZjQ0YjYtNjUxYjhlNDktODgxNmEzMTE=, ActorId: [2:7488013382473096040:2496], ActorState: ExecuteState, TraceId: 01jqpgz516czxnnx9125p7gqrc, Create QueryResponse for error on request, msg: >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite+withSink [GOOD] >> KqpSinkLocks::UncommittedRead [GOOD] >> KqpSinkMvcc::OlapMultiSinks [GOOD] >> KqpSnapshotRead::TestSnapshotExpiration-withSink [GOOD] >> KqpTx::BeginTransactionBadMode ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::UncommittedRead [GOOD] Test command err: Trying to start YDB, gRPC: 24899, MsgBus: 2513 2025-03-31T16:35:49.661003Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013271754163248:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:49.661074Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012ae/r3tmp/tmpHHMvWI/pdisk_1.dat 2025-03-31T16:35:50.056551Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24899, node 1 2025-03-31T16:35:50.074717Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:50.079286Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:50.116463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:50.166558Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:50.166575Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:50.166579Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:50.166662Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2513 TClient is connected to server localhost:2513 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:50.689065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:52.541173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013284639065783:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:52.541266Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013284639065804:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:52.541323Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:52.545174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:35:52.558661Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013284639065810:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:35:52.633873Z node 1 :TX_PROXY ERROR: Actor# [1:7488013284639065861:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:52.912375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:35:53.063904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013284639066056:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:35:53.063905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488013284639066052:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:35:53.064130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488013284639066052:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:35:53.064390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488013284639066052:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:35:53.064480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488013284639066052:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:35:53.064556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013284639066056:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:35:53.064558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488013284639066052:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:35:53.064715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488013284639066052:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:35:53.064753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013284639066056:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:35:53.064843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488013284639066052:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:35:53.064866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013284639066056:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:35:53.064951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488013284639066052:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:35:53.064972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013284639066056:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:35:53.065054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488013284639066052:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:35:53.065071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013284639066056:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:35:53.065173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013284639066056:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:35:53.065266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488013284639066052:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:35:53.065295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013284639066056:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:35:53.065404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488013284639066052:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:35:53.065418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013284639066056:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:35:53.065525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488013284639066052:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:35:53.065532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013284639066056:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:35:53.065633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013284639066056:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:35:53.065780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013284639066056:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:35:53.104410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488013288934033371:2350];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:35:53.104466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488013288934033371:2350];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:35:53.104633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_i ... HARD WARN: tablet_id=72075186224037897;task_id=3b29c51e-e4e11f0-a7736a43-339a2c26;tablet_id=72075186224037897;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.157719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.158516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038071;self_id=[1:7488013306113908660:3274];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038071;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.158736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038071;self_id=[1:7488013306113908660:3274];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038071;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.159757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038095;self_id=[1:7488013306113908373:3213];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038095;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.159940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038095;self_id=[1:7488013306113908373:3213];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038095;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.164314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038061;self_id=[1:7488013306113908627:3260];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038061;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.164528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038061;self_id=[1:7488013306113908627:3260];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038061;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.170778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038074;self_id=[1:7488013306113908776:3302];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038074;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.171033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038074;self_id=[1:7488013306113908776:3302];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038074;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.171975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038082;self_id=[1:7488013306113908481:3240];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038082;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.172208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038082;self_id=[1:7488013306113908481:3240];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038082;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.172376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;self_id=[1:7488013306113908802:3320];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038037;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.172522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;self_id=[1:7488013306113908802:3320];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038037;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.172786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038036;self_id=[1:7488013306113908760:3296];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038036;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.172928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038036;self_id=[1:7488013306113908760:3296];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038036;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.173830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038085;self_id=[1:7488013306113908483:3241];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038085;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.174073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038085;self_id=[1:7488013306113908483:3241];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038085;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.174683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038077;self_id=[1:7488013306113908465:3230];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038077;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.174846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038077;self_id=[1:7488013306113908465:3230];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038077;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.179187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038093;self_id=[1:7488013306113908380:3214];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038093;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:01.179418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038093;self_id=[1:7488013306113908380:3214];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038093;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-03-31T16:36:05.054005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:36:05.054042Z node 1 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 25069, MsgBus: 17954 2025-03-31T16:36:07.407288Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013347232702485:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:36:07.407335Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012ae/r3tmp/tmp2cvQf0/pdisk_1.dat 2025-03-31T16:36:07.483377Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25069, node 2 2025-03-31T16:36:07.531456Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:07.531529Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:07.533389Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:36:07.544658Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:36:07.544688Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:36:07.544700Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:36:07.544848Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17954 TClient is connected to server localhost:17954 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:36:07.930484Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:10.307203Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013360117605022:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:10.307269Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013360117605010:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:10.307440Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:10.313312Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:36:10.337680Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488013360117605036:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:36:10.417067Z node 2 :TX_PROXY ERROR: Actor# [2:7488013360117605087:2333] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:36:10.463799Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:36:10.539416Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:36:11.450024Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:36:12.407255Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488013347232702485:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:36:12.407294Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OlapMultiSinks [GOOD] Test command err: Trying to start YDB, gRPC: 19642, MsgBus: 5913 2025-03-31T16:35:39.323322Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013228856002492:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:39.323430Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012c5/r3tmp/tmpVtp88R/pdisk_1.dat 2025-03-31T16:35:39.791545Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:39.792956Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:39.793078Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:39.799401Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19642, node 1 2025-03-31T16:35:39.879568Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:39.879589Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:39.879605Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:39.879717Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5913 TClient is connected to server localhost:5913 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:40.406598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:40.426807Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:35:42.483128Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013241740905026:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:42.483246Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:42.483690Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013241740905044:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:42.487740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:35:42.498940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013241740905046:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:35:42.555498Z node 1 :TX_PROXY ERROR: Actor# [1:7488013241740905097:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:42.907963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:35:43.089675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013246035872608:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:35:43.089677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013246035872592:2348];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:35:43.089911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013246035872592:2348];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:35:43.090332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013246035872608:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:35:43.090351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013246035872592:2348];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:35:43.090484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013246035872592:2348];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:35:43.090565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013246035872608:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:35:43.090622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013246035872592:2348];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:35:43.090688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013246035872608:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:35:43.090763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013246035872592:2348];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:35:43.090786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013246035872608:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:35:43.090877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013246035872608:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:35:43.090883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013246035872592:2348];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:35:43.090971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013246035872608:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:35:43.091009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013246035872592:2348];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:35:43.091105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013246035872608:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:35:43.091213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013246035872592:2348];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:35:43.091227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013246035872608:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:35:43.091324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013246035872608:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:35:43.091325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013246035872592:2348];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:35:43.091438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013246035872608:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:35:43.091562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013246035872608:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:35:43.092653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013246035872592:2348];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:35:43.092861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013246035872592:2348];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:35:43.129552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488013246035872594:2349];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:35:43.129618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488013246035872594:2349];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstra ... 1674Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038036;self_id=[2:7488013354086170417:3328];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038036;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:12.191916Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[2:7488013354086170427:3333];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038023;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:12.192273Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[2:7488013354086170427:3333];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038023;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:12.194079Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[2:7488013354086170505:3344];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038010;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:12.194419Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[2:7488013354086170505:3344];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038010;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:12.194611Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[2:7488013354086170490:3340];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038013;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:12.194972Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[2:7488013354086170490:3340];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038013;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:12.196567Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038044;self_id=[2:7488013354086170362:3306];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038044;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:12.196957Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038044;self_id=[2:7488013354086170362:3306];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038044;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:12.197248Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[2:7488013354086170496:3343];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038009;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:12.197662Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[2:7488013354086170496:3343];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038009;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:12.203246Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[2:7488013354086170432:3335];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038031;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:12.203688Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[2:7488013354086170432:3335];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038031;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:12.204280Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038040;self_id=[2:7488013354086170405:3324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038040;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:12.204609Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038040;self_id=[2:7488013354086170405:3324];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038040;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:12.204843Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038039;self_id=[2:7488013354086170366:3308];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038039;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:12.205240Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038039;self_id=[2:7488013354086170366:3308];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038039;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:12.208797Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[2:7488013354086170396:3320];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038021;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:12.209265Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[2:7488013354086170396:3320];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038021;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:12.209692Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038043;self_id=[2:7488013354086170400:3322];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038043;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:12.210053Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038043;self_id=[2:7488013354086170400:3322];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038043;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:12.210963Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038056;self_id=[2:7488013354086170080:3288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038056;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:12.211314Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038056;self_id=[2:7488013354086170080:3288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038056;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:12.211662Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[2:7488013354086170662:3346];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038004;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:12.212035Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[2:7488013354086170662:3346];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038004;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:12.815916Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[2:7488013332611327658:2351];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:12.815916Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[2:7488013332611327661:2352];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037892;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:12.816170Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[2:7488013332611327658:2351];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:12.816172Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[2:7488013332611327661:2352];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037892;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:12.816364Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[2:7488013332611327648:2347];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:12.816515Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[2:7488013332611327648:2347];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037895;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:12.816619Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[2:7488013332611327627:2346];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:12.816911Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[2:7488013332611327627:2346];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:12.842076Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037995;self_id=[2:7488013336906296083:2423];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037995;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:12.842374Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037995;self_id=[2:7488013336906296083:2423];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037995;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:12.842618Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037978;self_id=[2:7488013336906296070:2422];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037978;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:12.842809Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037978;self_id=[2:7488013336906296070:2422];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037978;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:12.842871Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7488013332611327625:2345];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:12.843049Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7488013332611327625:2345];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:12.844702Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[2:7488013332611327764:2353];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037897;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:12.845022Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[2:7488013332611327764:2353];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037897;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:12.848847Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037959;self_id=[2:7488013336906296211:2438];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037959;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:12.849143Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037959;self_id=[2:7488013336906296211:2438];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037959;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-03-31T16:36:14.861408Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:36:14.861430Z node 2 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 31279, MsgBus: 2943 2025-03-31T16:36:02.254854Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013326748741567:2269];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:36:02.255047Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00129e/r3tmp/tmpkCcDeU/pdisk_1.dat 2025-03-31T16:36:02.587877Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31279, node 1 2025-03-31T16:36:02.638326Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:02.640265Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:02.648864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:36:02.707510Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:36:02.707539Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:36:02.707552Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:36:02.707692Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2943 TClient is connected to server localhost:2943 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:36:03.178645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:05.205226Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013339633643910:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:05.205345Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013339633643902:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:05.205702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:05.210412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:36:05.221551Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013339633643916:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:36:05.318290Z node 1 :TX_PROXY ERROR: Actor# [1:7488013339633643969:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:36:05.573491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:36:05.674969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:36:06.549021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:36:07.262863Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488013326748741567:2269];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:36:07.267068Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:36:07.809577Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTJhYzdiMmItZDZkMDA3MzYtNjQxMjc3Mi1kMzBlM2E3YQ==, ActorId: [1:7488013348223587190:2968], ActorState: ExecuteState, TraceId: 01jqpgywszafc5btf6f3rfw746, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TSimple::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25: DoExecute @ 0x18DC0DB7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:57: Execute_ @ 0x18DB7D9A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7FB718678D8F 18. ??:0: ?? @ 0x7FB718678E3F 19. ??:0: ?? @ 0x163B4028 Trying to start YDB, gRPC: 3750, MsgBus: 1987 2025-03-31T16:36:12.550890Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013371351115149:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:36:12.550982Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00129e/r3tmp/tmpFt1Geo/pdisk_1.dat 2025-03-31T16:36:12.665159Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:36:12.691728Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:12.691810Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:12.693108Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3750, node 2 2025-03-31T16:36:12.729430Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:36:12.729459Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:36:12.729467Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:36:12.729596Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1987 TClient is connected to server localhost:1987 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:36:13.120904Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:13.138912Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:13.194655Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:13.344957Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:13.402678Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:15.428123Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013384236018815:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:15.428207Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:15.491732Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:36:15.521834Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:36:15.552723Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:36:15.588840Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:36:15.620917Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:36:15.692513Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:36:15.736279Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013384236019333:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:15.736360Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:15.736585Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013384236019338:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:15.739920Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:36:15.752736Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488013384236019340:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:36:15.808799Z node 2 :TX_PROXY ERROR: Actor# [2:7488013384236019393:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:36:17.551171Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488013371351115149:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:36:17.551252Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSinkTx::OlapSnapshotRO [GOOD] >> KqpSinkTx::OlapDeferredEffects [GOOD] >> KqpSinkTx::OlapExplicitTcl >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_bs_controller] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_datashard] >> KqpSinkMvcc::SnapshotExpiration [GOOD] >> KqpSinkTx::DeferredEffects >> TabletService_ChangeSchema::Basics >> TSentinelBaseTests::PDiskInitialStatus [GOOD] >> TSentinelBaseTests::PDiskErrorState [GOOD] >> TSentinelBaseTests::PDiskInactiveAfterStateChange [GOOD] >> TSentinelBaseTests::PDiskFaultyState [GOOD] >> TSentinelBaseTests::PDiskStateChangeNormalFlow [GOOD] >> TSentinelBaseTests::PDiskStateChangeNodePermanentlyBad [GOOD] >> TSentinelBaseTests::PDiskStateChangeNodeNotExpectedRestart [GOOD] >> TSentinelBaseTests::PDiskStateChangeNodeExpectedRestart [GOOD] >> TSentinelBaseTests::GuardianDataCenterRatio >> TSentinelBaseTests::GuardianDataCenterRatio [GOOD] >> TSentinelBaseTests::GuardianRackRatio [GOOD] >> TSentinelTests::Smoke >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapSnapshotRO [GOOD] Test command err: Trying to start YDB, gRPC: 18924, MsgBus: 29070 2025-03-31T16:35:39.519188Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013229630622450:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:39.519286Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012c3/r3tmp/tmpIu2sAF/pdisk_1.dat 2025-03-31T16:35:39.903968Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:39.923910Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:39.924044Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:39.925771Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18924, node 1 2025-03-31T16:35:39.999612Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:39.999637Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:39.999643Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:39.999748Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29070 TClient is connected to server localhost:29070 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:40.550801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:40.567327Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:35:42.558194Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013242515524969:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:42.558309Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013242515524947:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:42.558379Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:42.563498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:35:42.574192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013242515524985:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:35:42.648156Z node 1 :TX_PROXY ERROR: Actor# [1:7488013242515525036:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:42.928692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:35:43.086585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488013246810492537:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:35:43.086585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013246810492533:2349];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:35:43.086777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013246810492533:2349];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:35:43.086824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488013246810492537:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:35:43.087182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013246810492533:2349];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:35:43.087226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488013246810492537:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:35:43.087364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013246810492533:2349];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:35:43.087378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488013246810492537:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:35:43.087482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488013246810492537:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:35:43.087483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013246810492533:2349];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:35:43.087634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013246810492533:2349];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:35:43.087634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488013246810492537:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:35:43.087777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488013246810492537:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:35:43.087783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013246810492533:2349];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:35:43.088228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488013246810492537:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:35:43.088229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013246810492533:2349];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:35:43.088369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013246810492533:2349];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:35:43.088494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013246810492533:2349];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:35:43.088633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013246810492533:2349];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:35:43.088785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013246810492533:2349];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:35:43.091653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488013246810492537:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:35:43.091859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488013246810492537:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:35:43.091967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488013246810492537:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:35:43.092077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488013246810492537:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:35:43.129624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488013246810492531:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:35:43.129699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488013246810492531:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abs ... Shard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038018;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:14.600253Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;self_id=[2:7488013357559514206:3328];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038018;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:14.606070Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7488013361854481618:3338];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038012;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:14.606361Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7488013361854481618:3338];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038012;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:14.613358Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[2:7488013357559514237:3329];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038015;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:14.613680Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[2:7488013357559514237:3329];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038015;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:14.615362Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038032;self_id=[2:7488013357559514100:3319];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038032;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:14.615631Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038032;self_id=[2:7488013357559514100:3319];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038032;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:14.615834Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[2:7488013357559514045:3309];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038033;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:14.616010Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[2:7488013357559514045:3309];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038033;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:14.627392Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7488013357559514253:3332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038003;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:14.627720Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7488013357559514253:3332];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038003;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:14.628025Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;self_id=[2:7488013357559514054:3313];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038022;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:14.628211Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;self_id=[2:7488013357559514054:3313];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038022;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:14.629410Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[2:7488013361854481585:3335];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038021;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:14.629621Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[2:7488013361854481585:3335];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038021;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:14.632570Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038029;self_id=[2:7488013357559514172:3321];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038029;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:14.632833Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038029;self_id=[2:7488013357559514172:3321];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038029;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:14.645817Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;self_id=[2:7488013357559514058:3314];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038053;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:14.646261Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;self_id=[2:7488013357559514058:3314];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038053;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:14.646509Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038034;self_id=[2:7488013357559514190:3324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038034;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:14.646768Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038034;self_id=[2:7488013357559514190:3324];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038034;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:14.647007Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038027;self_id=[2:7488013361854481608:3337];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038027;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:14.647259Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038027;self_id=[2:7488013361854481608:3337];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038027;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:14.647538Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038038;self_id=[2:7488013357559514103:3320];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038038;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:14.647670Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038038;self_id=[2:7488013357559514103:3320];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038038;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:14.652840Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[2:7488013357559514049:3311];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038023;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:14.653268Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[2:7488013357559514049:3311];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038023;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:14.653612Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038060;self_id=[2:7488013357559513985:3297];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038060;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:14.653929Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038060;self_id=[2:7488013357559513985:3297];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038060;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:14.654174Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7488013357559514263:3333];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038008;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:14.654378Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7488013357559514263:3333];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038008;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:14.654571Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[2:7488013361854481623:3339];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038025;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:14.654745Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[2:7488013361854481623:3339];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038025;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:14.655385Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7488013357559514251:3331];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038005;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:14.655632Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7488013357559514251:3331];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038005;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:14.655881Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038045;self_id=[2:7488013357559514047:3310];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038045;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:14.656213Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038045;self_id=[2:7488013357559514047:3310];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038045;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:14.656758Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[2:7488013361854481736:3344];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038016;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:14.657163Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[2:7488013361854481736:3344];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038016;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:14.721265Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[2:7488013361854481591:3336];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038013;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:14.721573Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[2:7488013361854481591:3336];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038013;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:15.082028Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjBlMjA0NDAtNzMzNzE1OGEtYWM3Zjg5YjEtMTg4ZjZjOTM=, ActorId: [2:7488013374739386377:3884], ActorState: ExecuteState, TraceId: 01jqpgz418bkgwww8jar1kgne8, Create QueryResponse for error on request, msg:
:3:29: Error: Operation 'Upsert' can't be performed in read only transaction, code: 2008 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-03-31T16:36:16.639966Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:36:16.639995Z node 2 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpLocksTricky::TestNoLocksIssueInteractiveTx-withSink [GOOD] >> TTxDataShardLocalKMeansScan::MainToPosting [GOOD] >> TTxDataShardLocalKMeansScan::MainToBuild >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_datashard] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_hive] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_schemeshard] >> KqpTx::BeginTransactionBadMode [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_schemeshard] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_tx_coordinator] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[tx_allocator] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[keyvalueflat] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite1 [GOOD] Test command err: Trying to start YDB, gRPC: 16901, MsgBus: 64919 2025-03-31T16:35:44.299110Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013249951780832:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:44.299295Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012b6/r3tmp/tmpaBxLLi/pdisk_1.dat 2025-03-31T16:35:44.764459Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:44.770051Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:44.770152Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:44.771886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16901, node 1 2025-03-31T16:35:44.879682Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:44.879706Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:44.879713Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:44.879861Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64919 TClient is connected to server localhost:64919 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:45.518100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:45.534627Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:35:47.609125Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013262836683380:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:47.609334Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:47.611609Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013262836683400:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:47.616101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:35:47.627339Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013262836683402:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:35:47.697304Z node 1 :TX_PROXY ERROR: Actor# [1:7488013262836683453:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:48.051580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:35:48.226716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013267131650951:2348];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:35:48.226915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013267131650951:2348];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:35:48.231858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488013267131650941:2347];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:35:48.231923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488013267131650941:2347];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:35:48.232159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488013267131650941:2347];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:35:48.232258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488013267131650941:2347];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:35:48.232357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488013267131650941:2347];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:35:48.232468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488013267131650941:2347];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:35:48.232581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488013267131650941:2347];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:35:48.232697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488013267131650941:2347];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:35:48.232796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488013267131650941:2347];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:35:48.232910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488013267131650941:2347];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:35:48.233004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488013267131650941:2347];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:35:48.233112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488013267131650941:2347];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:35:48.235539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013267131650951:2348];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:35:48.235685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013267131650951:2348];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:35:48.235810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013267131650951:2348];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:35:48.235909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013267131650951:2348];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:35:48.236008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013267131650951:2348];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:35:48.236108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013267131650951:2348];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:35:48.236204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013267131650951:2348];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:35:48.236295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013267131650951:2348];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:35:48.236391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013267131650951:2348];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:35:48.236502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013267131650951:2348];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:35:48.282711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488013267131650935:2344];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:35:48.282783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488013267131650935:2344];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abs ... 4;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:16.995218Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037958;self_id=[2:7488013354050981079:2464];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037958;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:16.995227Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037975;self_id=[2:7488013354050980979:2428];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037975;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:16.995289Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037963;self_id=[2:7488013354050981121:2485];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037963;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:16.995292Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037959;self_id=[2:7488013354050980997:2437];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037959;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:16.995350Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037993;self_id=[2:7488013354050980991:2434];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037993;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:16.995355Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037977;self_id=[2:7488013354050980971:2424];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037977;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:16.995397Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037978;self_id=[2:7488013354050981031:2454];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037978;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:16.995418Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037994;self_id=[2:7488013354050981015:2446];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037994;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:16.995463Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037979;self_id=[2:7488013354050981019:2448];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037979;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:16.995534Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037962;self_id=[2:7488013354050980961:2419];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037962;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:16.995535Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037989;self_id=[2:7488013354050981113:2481];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037989;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:16.995601Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037965;self_id=[2:7488013354050981125:2487];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037965;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:16.995624Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037980;self_id=[2:7488013354050981117:2483];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037980;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:16.995666Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037967;self_id=[2:7488013354050980973:2425];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037967;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:16.995708Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037973;self_id=[2:7488013354050981127:2488];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037973;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:16.995719Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037968;self_id=[2:7488013354050981101:2475];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037968;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:16.995769Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037969;self_id=[2:7488013354050981077:2463];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037969;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:16.995806Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037982;self_id=[2:7488013354050981035:2456];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037982;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:16.995825Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037960;self_id=[2:7488013354050981173:2517];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037960;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:16.995867Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037970;self_id=[2:7488013354050981007:2442];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037970;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:16.995876Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037983;self_id=[2:7488013354050981115:2482];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037983;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:16.995911Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037966;self_id=[2:7488013354050981073:2461];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037966;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:16.995949Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037984;self_id=[2:7488013354050981029:2453];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037984;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:16.995964Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037988;self_id=[2:7488013354050981131:2490];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037988;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:16.996021Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037981;self_id=[2:7488013354050981109:2479];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037981;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:16.996049Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037985;self_id=[2:7488013354050981105:2477];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037985;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:16.996075Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037990;self_id=[2:7488013354050981027:2452];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037990;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:16.996120Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037991;self_id=[2:7488013354050981071:2460];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037991;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:16.996121Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037986;self_id=[2:7488013354050981103:2476];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037986;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:16.996166Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037976;self_id=[2:7488013354050980989:2433];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037976;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:16.996187Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037987;self_id=[2:7488013354050981185:2523];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037987;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:16.996227Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037995;self_id=[2:7488013354050980967:2422];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037995;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:16.996266Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037992;self_id=[2:7488013354050981011:2444];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037992;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:16.996288Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037997;self_id=[2:7488013354050980963:2420];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037997;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:16.996361Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037996;self_id=[2:7488013354050980965:2421];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037996;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-03-31T16:36:19.293934Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:36:19.293974Z node 2 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> test.py::test[solomon-BadDownsamplingAggregation-] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocksTricky::TestNoLocksIssueInteractiveTx-withSink [GOOD] Test command err: Trying to start YDB, gRPC: 30856, MsgBus: 5013 2025-03-31T16:35:50.886226Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:35:50.886493Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:35:50.886632Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012b3/r3tmp/tmp7KgTcy/pdisk_1.dat TServer::EnableGrpc on GrpcPort 30856, node 1 2025-03-31T16:35:51.376458Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:51.379515Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:51.379587Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:51.379628Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:51.379951Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:35:51.421580Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:51.421701Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:51.436425Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5013 TClient is connected to server localhost:5013 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:51.764654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:51.817690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:52.186193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:52.588481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:52.871438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:53.711784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1810:3405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:53.711997Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:53.738044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:53.948490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:54.224579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:54.489891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:54.770209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:55.137004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:55.436500Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2397:3858], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:55.436657Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:55.436982Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2402:3863], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:55.444569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:55.624893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2404:3865], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:35:55.673833Z node 1 :TX_PROXY ERROR: Actor# [1:2467:3909] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:56.841986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:35:57.100309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:35:57.484833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 19046, MsgBus: 20402 2025-03-31T16:36:03.072589Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:101:2147], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:36:03.073029Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:36:03.073126Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012b3/r3tmp/tmpDjA0Ex/pdisk_1.dat TServer::EnableGrpc on GrpcPort 19046, node 2 2025-03-31T16:36:03.508263Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:36:03.509282Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:36:03.509340Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:36:03.509378Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:36:03.509787Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:36:03.545187Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:03.545292Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:03.556638Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20402 TClient is connected to server localhost:20402 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:36:03.780247Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:03.812888Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025- ... 57594046644480 2025-03-31T16:36:05.596582Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:36:05.860672Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:36:06.102841Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:36:06.406144Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:36:06.666198Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:36:07.013771Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2396:3855], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:07.013938Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:07.014249Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2401:3860], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:07.020947Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:36:07.173480Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:2403:3862], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:36:07.242447Z node 2 :TX_PROXY ERROR: Actor# [2:2468:3908] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:36:08.291168Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:36:08.536872Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:36:08.873161Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 23994, MsgBus: 5030 2025-03-31T16:36:14.895127Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:304:2347], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:36:14.895388Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:36:14.895480Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012b3/r3tmp/tmpGONJN5/pdisk_1.dat TServer::EnableGrpc on GrpcPort 23994, node 3 2025-03-31T16:36:15.311029Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:36:15.312124Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:36:15.312182Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:36:15.312250Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:36:15.312653Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:36:15.347900Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:15.348065Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:15.359479Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5030 TClient is connected to server localhost:5030 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:36:15.741205Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:15.797427Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:16.104479Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:16.502540Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:16.817953Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:17.281614Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1807:3403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:17.281877Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:17.303348Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:36:17.563042Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:36:17.791479Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:36:18.044561Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:36:18.286809Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:36:18.604612Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:36:18.897276Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2395:3856], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:18.897386Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:18.897794Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2400:3861], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:18.904036Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:36:19.066718Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:2402:3863], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:36:19.112998Z node 3 :TX_PROXY ERROR: Actor# [3:2465:3907] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:36:20.160425Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:36:20.469554Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:36:20.827045Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> TSentinelTests::Smoke [GOOD] >> TSentinelTests::PDiskUnknownState ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::BeginTransactionBadMode [GOOD] Test command err: Trying to start YDB, gRPC: 29562, MsgBus: 10388 2025-03-31T16:36:02.069086Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013324597364427:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:36:02.069315Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012a0/r3tmp/tmpcFwRaM/pdisk_1.dat 2025-03-31T16:36:02.408826Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:36:02.427445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:02.427558Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:02.429999Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29562, node 1 2025-03-31T16:36:02.493993Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:36:02.494028Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:36:02.494034Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:36:02.494151Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10388 TClient is connected to server localhost:10388 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:36:03.021298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:03.047725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:03.183656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:03.328263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:03.390931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:04.828327Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013333187300794:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:04.828433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:05.171880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:36:05.202935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:36:05.230999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:36:05.260069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:36:05.289011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:36:05.358862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:36:05.398924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013337482268607:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:05.399063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:05.399381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013337482268612:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:05.403342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:36:05.412636Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013337482268614:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:36:05.516107Z node 1 :TX_PROXY ERROR: Actor# [1:7488013337482268667:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:36:07.068235Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488013324597364427:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:36:07.068314Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:36:17.402411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:36:17.402450Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:36:18.725551Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488013393316844402:2640], TxId: 281474976710682, task: 1. Ctx: { TraceId : 01jqpgz7ev3150d48p22nvsatt. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NDQxOGZlNmYtYzMyMjExNzktOGNjYTExNzEtZGMyNWI3YTE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Table id 2 has no snapshot at v1743438966395/18446744073709551615 shard 72075186224037888 with lowWatermark v1743438966647/18446744073709551615 (node# 1 state# Ready) } } 2025-03-31T16:36:18.726057Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488013393316844402:2640], TxId: 281474976710682, task: 1. Ctx: { TraceId : 01jqpgz7ev3150d48p22nvsatt. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NDQxOGZlNmYtYzMyMjExNzktOGNjYTExNzEtZGMyNWI3YTE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Table id 2 has no snapshot at v1743438966395/18446744073709551615 shard 72075186224037888 with lowWatermark v1743438966647/18446744073709551615 (node# 1 state# Ready) } }. 2025-03-31T16:36:18.726486Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488013393316844404:2641], TxId: 281474976710682, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NDQxOGZlNmYtYzMyMjExNzktOGNjYTExNzEtZGMyNWI3YTE=. TraceId : 01jqpgz7ev3150d48p22nvsatt. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7488013393316844398:2488], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-31T16:36:18.726913Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDQxOGZlNmYtYzMyMjExNzktOGNjYTExNzEtZGMyNWI3YTE=, ActorId: [1:7488013341777236222:2488], ActorState: ExecuteState, TraceId: 01jqpgz7ev3150d48p22nvsatt, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 25403, MsgBus: 3835 2025-03-31T16:36:19.436414Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013398032712064:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:36:19.436482Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012a0/r3tmp/tmparpmNC/pdisk_1.dat 2025-03-31T16:36:19.537976Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:36:19.566575Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:19.566660Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:19.568309Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25403, node 2 2025-03-31T16:36:19.604549Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:36:19.604565Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:36:19.604570Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:36:19.604667Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3835 TClient is connected to server localhost:3835 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:36:20.036367Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:20.052573Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:20.129925Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:20.292224Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:20.365760Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:22.474582Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013410917615734:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:22.474644Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:22.497290Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:36:22.529878Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:36:22.561802Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:36:22.589830Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:36:22.618508Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:36:22.651236Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:36:22.726619Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013410917616250:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:22.726722Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:22.726729Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013410917616255:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:22.729818Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:36:22.768035Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488013410917616257:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:36:22.828759Z node 2 :TX_PROXY ERROR: Actor# [2:7488013410917616312:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryService::ClosedSessionRemovedFromPool [GOOD] >> KqpQueryService::CloseConnection >> test_postgres.py::TestPostgresSuite::test_postgres_suite[horology] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[name] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[name] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float4] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[keyvalueflat] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[tx_mediator] >> KqpSinkTx::OlapInteractive [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[tx_mediator] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[persqueue] >> HttpRouter::Basic [GOOD] >> TSentinelTests::PDiskUnknownState [GOOD] >> TSentinelTests::PDiskErrorState >> test_kqprun_recipe.py::TestKqprunRecipe::test_query_execution |97.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/public_http/ut/unittest >> HttpRouter::Basic [GOOD] |97.4%| [TS] {RESULT} ydb/core/public_http/ut/unittest >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float4] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[withtable] >> TabletService_ChangeSchema::Basics [GOOD] >> TabletService_ChangeSchema::OnlyAdminsAllowed >> test_kqprun_recipe.py::TestKqprunRecipe::test_query_execution [GOOD] >> TTxDataShardLocalKMeansScan::MainToBuild [GOOD] >> TTxDataShardLocalKMeansScan::BuildToPosting >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[persqueue] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[kesus] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapInteractive [GOOD] Test command err: Trying to start YDB, gRPC: 13423, MsgBus: 4561 2025-03-31T16:35:49.853870Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013269767560630:2133];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:49.854060Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012ab/r3tmp/tmpeFdZBN/pdisk_1.dat 2025-03-31T16:35:50.275311Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:50.295650Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:50.295767Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:50.297467Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13423, node 1 2025-03-31T16:35:50.382913Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:50.382939Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:50.382946Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:50.383098Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4561 TClient is connected to server localhost:4561 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:50.914849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:52.765685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013282652463083:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:52.765891Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:52.767678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013282652463118:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:52.771891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:35:52.782621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013282652463120:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:35:52.844211Z node 1 :TX_PROXY ERROR: Actor# [1:7488013282652463171:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:53.121551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:35:53.278440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013286947430628:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:35:53.279312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013286947430628:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:35:53.279620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013286947430628:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:35:53.279763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013286947430628:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:35:53.279907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013286947430628:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:35:53.280064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013286947430628:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:35:53.280201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013286947430628:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:35:53.280308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013286947430628:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:35:53.280450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013286947430628:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:35:53.280587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013286947430628:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:35:53.280739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013286947430628:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:35:53.280887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488013286947430628:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:35:53.283787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488013286947430635:2347];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:35:53.283854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488013286947430635:2347];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:35:53.284073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488013286947430635:2347];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:35:53.284184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488013286947430635:2347];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:35:53.284328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488013286947430635:2347];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:35:53.284430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488013286947430635:2347];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:35:53.284549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488013286947430635:2347];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:35:53.284665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488013286947430635:2347];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:35:53.284845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488013286947430635:2347];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:35:53.284966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488013286947430635:2347];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:35:53.285110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488013286947430635:2347];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:35:53.285568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7488013286947430635:2347];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:35:53.318818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488013286947430630:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:35:53.318908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488013286947430630:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:35:53.319136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_i ... 4781Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038055;self_id=[2:7488013374992724893:3183];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038055;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:19.595422Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038048;self_id=[2:7488013374992724973:3229];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038048;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:19.595839Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038048;self_id=[2:7488013374992724973:3229];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038048;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:19.596966Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038059;self_id=[2:7488013374992725203:3264];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038059;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:19.597333Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038059;self_id=[2:7488013374992725203:3264];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038059;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:19.597995Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038064;self_id=[2:7488013374992724919:3202];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038064;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:19.598279Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038064;self_id=[2:7488013374992724919:3202];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038064;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:19.598439Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038043;self_id=[2:7488013374992724964:3225];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038043;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:19.598608Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038043;self_id=[2:7488013374992724964:3225];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038043;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:19.602692Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7488013374992725196:3261];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038003;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:19.602974Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7488013374992725196:3261];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038003;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:19.619370Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038052;self_id=[2:7488013374992724938:3212];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038052;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:19.619629Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038052;self_id=[2:7488013374992724938:3212];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038052;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:19.621568Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038070;self_id=[2:7488013374992724934:3210];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038070;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:19.621797Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038070;self_id=[2:7488013374992724934:3210];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038070;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:19.630816Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;self_id=[2:7488013374992724976:3231];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038037;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:19.631191Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;self_id=[2:7488013374992724976:3231];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038037;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:19.642231Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7488013374992725291:3272];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038005;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:19.642602Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7488013374992725291:3272];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038005;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:19.642865Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038045;self_id=[2:7488013374992724951:3219];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038045;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:19.643160Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038045;self_id=[2:7488013374992724951:3219];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038045;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:19.646194Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038060;self_id=[2:7488013374992724999:3242];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038060;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:19.646482Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038060;self_id=[2:7488013374992724999:3242];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038060;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:19.653369Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[2:7488013374992724932:3209];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038023;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:19.653749Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[2:7488013374992724932:3209];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038023;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:19.653956Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7488013374992725208:3266];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038008;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:19.654187Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7488013374992725208:3266];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038008;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:19.654739Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038038;self_id=[2:7488013374992725212:3268];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038038;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:19.654930Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038038;self_id=[2:7488013374992725212:3268];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038038;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:19.655880Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[2:7488013374992725002:3243];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038019;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:19.656166Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[2:7488013374992725002:3243];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038019;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:19.657950Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[2:7488013374992725210:3267];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038021;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:19.658199Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[2:7488013374992725210:3267];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038021;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:19.661576Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038027;self_id=[2:7488013374992725173:3253];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038027;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:19.662625Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038027;self_id=[2:7488013374992725173:3253];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038027;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:19.664581Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;self_id=[2:7488013374992725217:3270];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038001;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:19.664799Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;self_id=[2:7488013374992725217:3270];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038001;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:19.666143Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[2:7488013374992724958:3222];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038007;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:19.666416Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[2:7488013374992724958:3222];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038007;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:19.668203Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038026;self_id=[2:7488013374992725182:3254];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038026;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:19.668498Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038026;self_id=[2:7488013374992725182:3254];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038026;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:19.675404Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[2:7488013374992724989:3237];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038033;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:19.675756Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[2:7488013374992724989:3237];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038033;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 2025-03-31T16:36:20.848308Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:36:20.848346Z node 2 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpService::ToDictCache-UseCache [GOOD] >> test.py::test[solomon-BadDownsamplingAggregation-] [GOOD] >> test.py::test[solomon-BadDownsamplingDisabled-] >> BulkUpsert::BulkUpsert >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[kesus] [GOOD] >> TTxDataShardTestInit::TestGetShardStateAfterInitialization >> TGRpcRateLimiterTest::CreateResource ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpService::ToDictCache-UseCache [GOOD] Test command err: Trying to start YDB, gRPC: 16613, MsgBus: 4818 2025-03-31T16:35:29.008881Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013184223288816:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:29.008967Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00137e/r3tmp/tmpKuOkD9/pdisk_1.dat 2025-03-31T16:35:29.433184Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:29.466884Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:29.466979Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:29.468573Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16613, node 1 2025-03-31T16:35:29.521092Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:29.521118Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:29.521129Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:29.521258Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4818 TClient is connected to server localhost:4818 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:30.081432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:30.097267Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:35:30.105768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:30.243246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:30.405050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:30.483797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:32.145240Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013197108192468:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:32.145345Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:32.446613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:32.475983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:32.502944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:32.531303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:32.564991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:32.598597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:32.679010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013197108192982:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:32.679097Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:32.679235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013197108192987:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:32.682968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:32.692614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013197108192989:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:35:32.780992Z node 1 :TX_PROXY ERROR: Actor# [1:7488013197108193044:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:33.618621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:35:34.008783Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488013184223288816:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:34.008919Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:44.413708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:35:44.413733Z node 1 :IMPORT WARN: Table profiles were not loaded took: 18.573497s took: 18.592067s took: 18.586718s took: 18.588341s took: 18.599058s took: 18.613103s took: 18.594750s took: 18.606596s took: 18.617217s took: 18.617746s Trying to start YDB, gRPC: 28251, MsgBus: 3129 2025-03-31T16:35:53.261198Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013286957420187:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:53.261362Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00137e/r3tmp/tmpPj4jWz/pdisk_1.dat 2025-03-31T16:35:53.397404Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:53.403609Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:53.403697Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:53.405173Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28251, node 2 2025-03-31T16:35:53.449443Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:53.449472Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:53.449479Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:53.449639Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3129 TClient is connected to server localhost:3129 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:53.894616Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:53.904212Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:35:56.939836Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013299842322734:2344] ... es)" severity: 1 } took: 3.979609s took: 3.980278s took: 3.981819s took: 3.982802s 2025-03-31T16:35:58.259667Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488013286957420187:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:58.259754Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; took: 0.972424s took: 0.972376s took: 0.972455s took: 0.976974s took: 0.832855s took: 0.834432s took: 0.836259s took: 0.836998s took: 0.884859s took: 0.885538s took: 0.886252s took: 0.888534s took: 0.816162s took: 0.817045s took: 0.817667s took: 0.817987s took: 0.827693s took: 0.828300s took: 0.830684s took: 0.832921s took: 0.823851s took: 0.825819s took: 0.829142s took: 0.829838s took: 0.766296s took: 0.766910s took: 0.767447s took: 0.767796s took: 0.678200s took: 0.678819s took: 0.679323s took: 0.679728s took: 0.839999s took: 0.840913s took: 0.840966s took: 0.842494s took: 0.731975s took: 0.732436s took: 0.732715s took: 0.733606s took: 0.752591s took: 0.753386s took: 0.753948s took: 0.754378s took: 0.754079s took: 0.755020s took: 0.755181s took: 0.755768s took: 0.664724s took: 0.665353s took: 0.665744s took: 0.667617s 2025-03-31T16:36:08.373653Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:36:08.373694Z node 2 :IMPORT WARN: Table profiles were not loaded took: 0.811001s took: 0.812027s took: 0.813015s took: 0.813626s took: 0.849664s took: 0.850455s took: 0.851275s took: 0.851478s Trying to start YDB, gRPC: 4982, MsgBus: 22289 2025-03-31T16:36:10.731879Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488013359717855406:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:36:10.731980Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00137e/r3tmp/tmpQD6o2a/pdisk_1.dat 2025-03-31T16:36:10.833176Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4982, node 3 2025-03-31T16:36:10.865026Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:10.865121Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:10.866587Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:36:10.906778Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:36:10.906804Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:36:10.906810Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:36:10.906923Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22289 TClient is connected to server localhost:22289 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:36:11.377796Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:14.467618Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013376897725282:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:14.467693Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013376897725277:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:14.467733Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013376897725280:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:14.467778Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013376897725258:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:14.467822Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013376897725279:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:14.468337Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:14.471146Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-03-31T16:36:14.473943Z node 3 :TX_PROXY ERROR: Actor# [3:7488013376897725293:2315] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T16:36:14.474052Z node 3 :TX_PROXY ERROR: Actor# [3:7488013376897725291:2314] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T16:36:14.474162Z node 3 :TX_PROXY ERROR: Actor# [3:7488013376897725312:2320] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-31T16:36:14.480922Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488013376897725292:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-03-31T16:36:14.480991Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488013376897725290:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-03-31T16:36:14.481054Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488013376897725299:2356], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-03-31T16:36:14.481169Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488013376897725295:2355], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-03-31T16:36:14.542095Z node 3 :TX_PROXY ERROR: Actor# [3:7488013376897725368:2362] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:36:14.545329Z node 3 :TX_PROXY ERROR: Actor# [3:7488013376897725386:2370] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:36:14.561527Z node 3 :TX_PROXY ERROR: Actor# [3:7488013376897725395:2377] txid# 281474976715664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:36:14.578373Z node 3 :TX_PROXY ERROR: Actor# [3:7488013376897725403:2383] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } took: 4.054393s took: 4.054528s took: 4.054779s took: 4.054909s 2025-03-31T16:36:15.732187Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488013359717855406:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:36:15.732277Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; took: 0.943126s took: 0.943796s took: 0.944952s took: 0.945076s took: 0.787569s took: 0.788886s took: 0.789072s took: 0.790243s took: 0.831204s took: 0.832550s took: 0.833483s took: 0.834085s took: 0.820232s took: 0.820234s took: 0.820681s took: 0.820794s took: 0.906841s took: 0.908419s took: 0.912278s took: 0.913679s took: 0.961178s took: 0.962976s took: 0.963076s took: 0.964213s took: 0.964212s took: 0.964305s took: 0.964697s took: 0.964865s took: 0.870589s took: 0.870661s took: 0.874760s took: 0.875171s took: 0.908295s took: 0.908820s took: 0.909423s took: 0.959387s took: 0.912700s took: 0.913749s took: 0.915106s took: 0.915315s took: 0.952053s took: 0.952032s took: 0.952593s took: 0.954823s 2025-03-31T16:36:25.826784Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:36:25.826814Z node 3 :IMPORT WARN: Table profiles were not loaded took: 0.873775s took: 0.875213s took: 0.875578s took: 0.878721s took: 0.902199s took: 0.902266s took: 0.904355s took: 0.904733s took: 0.924940s took: 0.927660s took: 0.927880s took: 0.930743s took: 0.929198s took: 0.930348s took: 0.931916s took: 0.931919s >> TabletService_ChangeSchema::OnlyAdminsAllowed [GOOD] >> TabletService_ExecuteMiniKQL::BasicMiniKQLRead >> TTxDataShardTestInit::TestGetShardStateAfterInitialization [GOOD] >> TTxDataShardTestInit::TestTableHasPath >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_rows [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_concurrent_inserts ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/kqprun/tests/py3test >> test_kqprun_recipe.py::TestKqprunRecipe::test_query_execution [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. |97.4%| [TM] {RESULT} ydb/tests/tools/kqprun/tests/py3test >> TTxDataShardLocalKMeansScan::BuildToPosting [GOOD] >> TTxDataShardLocalKMeansScan::BuildToBuild >> test_postgres.py::TestPostgresSuite::test_postgres_suite[withtable] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[roles] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[roles] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[char] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[char] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[select_1] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[select_1] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[numeric] >> KqpSinkTx::DeferredEffects [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::DeferredEffects [GOOD] Test command err: Trying to start YDB, gRPC: 13725, MsgBus: 21861 2025-03-31T16:35:57.184046Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013307015432498:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:57.184086Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012a3/r3tmp/tmpxNmcBZ/pdisk_1.dat 2025-03-31T16:35:57.516400Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13725, node 1 2025-03-31T16:35:57.559358Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:57.559723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:57.562678Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:57.583299Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:57.583330Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:57.583337Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:57.583467Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21861 TClient is connected to server localhost:21861 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:58.114885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:59.970543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013315605367734:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:59.970645Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013315605367754:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:59.970713Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:59.975430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:35:59.985941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013315605367771:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:36:00.074045Z node 1 :TX_PROXY ERROR: Actor# [1:7488013319900335118:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:36:00.395405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:36:00.508504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:36:01.373944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:36:02.244747Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488013307015432498:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:36:02.245387Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:36:12.510046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:36:12.510094Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:36:16.131294Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488013388619821904:3245], TxId: 281474976710678, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=NDJkZTdhMzYtYzI0ZWRhMDItOGY2MzJhYTctNGYwYjM0NDU=. TraceId : 01jqpgz53z7qbnn3ten8abmb4q. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Table id 7 has no snapshot at v1743438962517/18446744073709551615 shard 72075186224037889 with lowWatermark v1743438962790/18446744073709551615 (node# 1 state# Ready) } } 2025-03-31T16:36:16.132011Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488013388619821904:3245], TxId: 281474976710678, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=NDJkZTdhMzYtYzI0ZWRhMDItOGY2MzJhYTctNGYwYjM0NDU=. TraceId : 01jqpgz53z7qbnn3ten8abmb4q. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Table id 7 has no snapshot at v1743438962517/18446744073709551615 shard 72075186224037889 with lowWatermark v1743438962790/18446744073709551615 (node# 1 state# Ready) } }. 2025-03-31T16:36:16.132662Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488013388619821907:3247], TxId: 281474976710678, task: 3. Ctx: { TraceId : 01jqpgz53z7qbnn3ten8abmb4q. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NDJkZTdhMzYtYzI0ZWRhMDItOGY2MzJhYTctNGYwYjM0NDU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7488013388619821900:2969], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-31T16:36:16.134235Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDJkZTdhMzYtYzI0ZWRhMDItOGY2MzJhYTctNGYwYjM0NDU=, ActorId: [1:7488013328490278346:2969], ActorState: ExecuteState, TraceId: 01jqpgz53z7qbnn3ten8abmb4q, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 24436, MsgBus: 8992 2025-03-31T16:36:21.978761Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013408605789824:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:36:21.978881Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012a3/r3tmp/tmpT6kIot/pdisk_1.dat 2025-03-31T16:36:22.082604Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24436, node 2 2025-03-31T16:36:22.117930Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:22.118386Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:22.121561Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:36:22.148646Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:36:22.148688Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:36:22.148703Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:36:22.148863Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8992 TClient is connected to server localhost:8992 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:36:22.554389Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:25.018418Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013425785659670:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:25.018505Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013425785659664:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:25.018589Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:25.022779Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:36:25.038128Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488013425785659679:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:36:25.094269Z node 2 :TX_PROXY ERROR: Actor# [2:7488013425785659730:2335] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:36:25.139835Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T16:36:25.175266Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-31T16:36:26.208796Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:36:27.016605Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488013408605789824:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:36:27.019438Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> TDqPqRdReadActorTests::TestReadFromTopic2 [GOOD] >> test.py::test[solomon-BadDownsamplingDisabled-] [GOOD] >> test.py::test[solomon-BadDownsamplingFill-] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleBorrowCompactionTimeouts [GOOD] >> TDqPqRdReadActorTests::IgnoreUndeliveredWithWrongGeneration [GOOD] >> TGRpcRateLimiterTest::CreateResource [GOOD] >> TGRpcRateLimiterTest::UpdateResource >> TraverseColumnShard::TraverseColumnTableRebootSaTabletInAggregate [GOOD] >> TDqPqRdReadActorTests::SessionError [GOOD] >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite3 [GOOD] >> TDqPqRdReadActorTests::ReadWithFreeSpace [GOOD] >> TabletService_ExecuteMiniKQL::BasicMiniKQLRead [GOOD] >> TabletService_ExecuteMiniKQL::ParamsMiniKQLRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleBorrowCompactionTimeouts [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:33:31.106998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:33:31.107060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:31.107122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:33:31.107153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:33:31.107215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:33:31.107246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:33:31.107335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:33:31.107429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:33:31.107721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:33:31.178867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:33:31.178944Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:31.183483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:33:31.183603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:33:31.183799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:33:31.189136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:33:31.189374Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:33:31.190005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:31.190246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:33:31.192390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:31.193625Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:31.193684Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:31.193838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:33:31.193895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:31.193939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:33:31.194135Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:33:31.201708Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:33:31.334749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:33:31.334978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:31.335202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:33:31.335395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:33:31.335462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:31.337908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:31.338048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:33:31.338234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:31.338291Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:33:31.338329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:33:31.338358Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:33:31.340339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:31.340393Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:33:31.340448Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:33:31.342236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:31.342285Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:31.342323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:31.342375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:33:31.346068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:33:31.348128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:33:31.348351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:33:31.349495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:33:31.349635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:33:31.349691Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:31.349977Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:33:31.350031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:33:31.350193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:33:31.350297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:33:31.352589Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:33:31.352632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:33:31.352815Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:33:31.352863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:33:31.353092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:33:31.353139Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:33:31.353237Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:31.353276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:31.353337Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:33:31.353368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:31.353420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:33:31.353478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:33:31.353513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:33:31.353541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:33:31.353610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:33:31.353647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:33:31.353677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:33:31.355730Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:31.355874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:33:31.355931Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Sender [0:0:0], Recipient [3:125:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:36:33.821977Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:36:33.822085Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:125:2151], Recipient [3:125:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:36:33.822120Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:36:34.226783Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:309:2296]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-03-31T16:36:34.227040Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409546, FollowerId 0, tableId 2 2025-03-31T16:36:34.227504Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:309:2296], Recipient [3:125:2151]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409546 TableLocalId: 2 Generation: 2 Round: 12 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: true Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 351 Memory: 124232 Storage: 14156 } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 40 TableOwnerId: 72057594046678944 FollowerId: 0 2025-03-31T16:36:34.227554Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-03-31T16:36:34.227610Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0351 2025-03-31T16:36:34.227755Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: true Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-03-31T16:36:34.227811Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-03-31T16:36:34.238314Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:125:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:36:34.238402Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:36:34.238509Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:125:2151], Recipient [3:125:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:36:34.238543Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:36:34.280981Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [3:125:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-31T16:36:34.281055Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-31T16:36:34.281103Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2025-03-31T16:36:34.281173Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-03-31T16:36:34.281209Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-03-31T16:36:34.281357Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 13940 row count 100 2025-03-31T16:36:34.281460Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0 2025-03-31T16:36:34.281513Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 100, DataSize 13940 2025-03-31T16:36:34.281671Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:36:34.292245Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [3:125:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-31T16:36:34.292324Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-31T16:36:34.292358Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-03-31T16:36:34.323643Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:712:2679]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-03-31T16:36:34.323907Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409547, FollowerId 0, tableId 3 2025-03-31T16:36:34.324320Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:712:2679], Recipient [3:125:2151]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409547 TableLocalId: 3 Generation: 2 Round: 12 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 60 Memory: 124232 } ShardState: 2 UserTablePartOwners: 72075186233409547 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 212 TableOwnerId: 72057594046678944 FollowerId: 0 2025-03-31T16:36:34.324375Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-03-31T16:36:34.324429Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 3] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.006 2025-03-31T16:36:34.324558Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 3] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-03-31T16:36:34.324636Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-03-31T16:36:34.356758Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Operation queue wakeup 2025-03-31T16:36:34.356859Z node 3 :FLAT_TX_SCHEMESHARD INFO: Borrowed compaction timeout for pathId# [OwnerId: 72057594046678944, LocalPathId: 3], datashard# 72075186233409547, next wakeup# 0.000000s, in queue# 0 shards, running# 0 shards at schemeshard 72057594046678944 2025-03-31T16:36:34.356931Z node 3 :FLAT_TX_SCHEMESHARD INFO: RunBorrowedCompaction for pathId# [OwnerId: 72057594046678944, LocalPathId: 3], datashard# 72075186233409547, next wakeup# 0.000000s, rate# 0, in queue# 1 shards, running# 0 shards at schemeshard 72057594046678944 2025-03-31T16:36:34.357010Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Operation queue set wakeup after delta# 3 seconds 2025-03-31T16:36:34.357039Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Borrowed compaction enqueued shard# 72057594046678944:2 at schemeshard 72057594046678944 2025-03-31T16:36:34.357203Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [3:125:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-31T16:36:34.357251Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-31T16:36:34.357279Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2025-03-31T16:36:34.357349Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-03-31T16:36:34.357379Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-03-31T16:36:34.357477Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046678944:2 data size 13940 row count 100 2025-03-31T16:36:34.357538Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], pathId map=CopyTable, is column=0, is olap=0 2025-03-31T16:36:34.357577Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 3: RowCount 100, DataSize 13940, with borrowed parts 2025-03-31T16:36:34.357692Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Borrowed compaction enqueued shard# 72057594046678944:2 at schemeshard 72057594046678944 2025-03-31T16:36:34.357772Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-31T16:36:34.369207Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [3:125:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-31T16:36:34.369301Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-31T16:36:34.369341Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-03-31T16:36:34.621590Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:125:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:36:34.621686Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T16:36:34.621835Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:125:2151], Recipient [3:125:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T16:36:34.621884Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime >> TTxDataShardTestInit::TestTableHasPath [GOOD] >> TTxDataShardTestInit::TestResolvePathAfterRestart >> TDqPqRdReadActorTests::TestSaveLoadPqRdRead >> DataShardFollowers::FollowerKeepsWorkingAfterMvccReadTable >> KqpQueryService::CloseConnection [GOOD] >> TCheckpointCoordinatorTests::ShouldTriggerCheckpointWithSource |97.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_compaction/test-results/unittest/{meta.json ... results_accumulator.log} |97.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_compaction/test-results/unittest/{meta.json ... results_accumulator.log} >> TCheckpointCoordinatorTests::ShouldTriggerCheckpointWithSource [GOOD] >> TCheckpointCoordinatorTests::ShouldTriggerCheckpointWithSourcesAndWithChannel >> TCheckpointCoordinatorTests::ShouldTriggerCheckpointWithSourcesAndWithChannel [GOOD] >> TCheckpointCoordinatorTests::ShouldAllSnapshots [GOOD] >> TCheckpointCoordinatorTests::Should2Increments1Snapshot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletInAggregate [GOOD] Test command err: 2025-03-31T16:33:50.996741Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:33:50.997082Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:33:50.997223Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000e9c/r3tmp/tmpn1WQYw/pdisk_1.dat 2025-03-31T16:33:51.387652Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25249, node 1 2025-03-31T16:33:51.612644Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:51.612707Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:51.612733Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:51.613064Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:33:51.614887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:33:51.702592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:51.702747Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:51.717125Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8582 2025-03-31T16:33:52.207505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:33:55.466254Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T16:33:55.503590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:55.503726Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:55.543547Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:33:55.546040Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:55.795567Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:33:55.796122Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:33:55.796729Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:33:55.796867Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:33:55.797090Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:33:55.797185Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:33:55.797288Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:33:55.797372Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:33:55.797442Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:33:55.973101Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:55.973216Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:55.992485Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:56.161198Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:56.215077Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T16:33:56.215202Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T16:33:56.253034Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T16:33:56.253632Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T16:33:56.253874Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T16:33:56.253945Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T16:33:56.254020Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T16:33:56.254076Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T16:33:56.254134Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T16:33:56.254196Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T16:33:56.256196Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T16:33:56.291879Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:33:56.292035Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1870:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:33:56.302610Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1881:2607] 2025-03-31T16:33:56.307952Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2618] 2025-03-31T16:33:56.308199Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1904:2618], schemeshard id = 72075186224037897 2025-03-31T16:33:56.320571Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T16:33:56.342745Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T16:33:56.342810Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T16:33:56.342900Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T16:33:56.410072Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T16:33:56.423335Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T16:33:56.423516Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T16:33:56.609983Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T16:33:56.778122Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T16:33:56.869334Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:33:57.850289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2238:3072], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:57.850448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:57.876566Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-31T16:33:58.191905Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:33:58.192158Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:33:58.192471Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:33:58.192634Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:33:58.192777Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:33:58.192907Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:33:58.193051Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:33:58.193194Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:33:58.193367Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:33:58.193533Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:33:58.193676Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:33:58.193806Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:33:58.240805Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2397:2888];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:33:58.240900Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2397:2888];tablet_id=72075186224037900;process=T ... 3-31T16:36:34.340984Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:36:34.342312Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T16:36:34.342367Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T16:36:34.344315Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-31T16:36:34.431434Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-31T16:36:34.431650Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 2 2025-03-31T16:36:34.432234Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8561:6487], server id = [2:8568:6494], tablet id = 72075186224037903 2025-03-31T16:36:34.432280Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:36:34.432801Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8654:6553], server id = [2:8659:6558], tablet id = 72075186224037899, status = OK 2025-03-31T16:36:34.432941Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8654:6553], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:36:34.433350Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8655:6554], server id = [2:8660:6559], tablet id = 72075186224037900, status = OK 2025-03-31T16:36:34.433405Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8655:6554], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:36:34.434484Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8656:6555], server id = [2:8662:6561], tablet id = 72075186224037901, status = OK 2025-03-31T16:36:34.434545Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8656:6555], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:36:34.434776Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8657:6556], server id = [2:8661:6560], tablet id = 72075186224037902, status = OK 2025-03-31T16:36:34.434822Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8657:6556], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:36:34.436041Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8658:6557], server id = [2:8663:6562], tablet id = 72075186224037903, status = OK 2025-03-31T16:36:34.436097Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8658:6557], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:36:34.436323Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-31T16:36:34.437882Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8654:6553], server id = [2:8659:6558], tablet id = 72075186224037899 2025-03-31T16:36:34.437914Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:36:34.438365Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-03-31T16:36:34.439242Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-03-31T16:36:34.439809Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8655:6554], server id = [2:8660:6559], tablet id = 72075186224037900 2025-03-31T16:36:34.439841Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:36:34.440063Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-03-31T16:36:34.440467Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8656:6555], server id = [2:8662:6561], tablet id = 72075186224037901 2025-03-31T16:36:34.440497Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:36:34.441060Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8667:6566], server id = [2:8672:6571], tablet id = 72075186224037904, status = OK 2025-03-31T16:36:34.441136Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8667:6566], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:36:34.441552Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8657:6556], server id = [2:8661:6560], tablet id = 72075186224037902 2025-03-31T16:36:34.441579Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:36:34.442399Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8670:6569], server id = [2:8675:6574], tablet id = 72075186224037905, status = OK 2025-03-31T16:36:34.442465Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8670:6569], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:36:34.442750Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8673:6572], server id = [2:8676:6575], tablet id = 72075186224037906, status = OK 2025-03-31T16:36:34.442800Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8673:6572], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:36:34.443608Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8674:6573], server id = [2:8678:6576], tablet id = 72075186224037907, status = OK 2025-03-31T16:36:34.443666Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8674:6573], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:36:34.444965Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-03-31T16:36:34.445458Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-03-31T16:36:34.446351Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8667:6566], server id = [2:8672:6571], tablet id = 72075186224037904 2025-03-31T16:36:34.446379Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:36:34.446826Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8658:6557], server id = [2:8663:6562], tablet id = 72075186224037903 2025-03-31T16:36:34.446858Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:36:34.447418Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-03-31T16:36:34.447589Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-03-31T16:36:34.447826Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8682:6580], server id = [2:8684:6582], tablet id = 72075186224037908, status = OK 2025-03-31T16:36:34.447913Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8682:6580], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:36:34.448022Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-03-31T16:36:34.448656Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8670:6569], server id = [2:8675:6574], tablet id = 72075186224037905 2025-03-31T16:36:34.448686Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:36:34.448916Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8673:6572], server id = [2:8676:6575], tablet id = 72075186224037906 2025-03-31T16:36:34.448939Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:36:34.448995Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8674:6573], server id = [2:8678:6576], tablet id = 72075186224037907 2025-03-31T16:36:34.449014Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:36:34.449316Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-03-31T16:36:34.449360Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-31T16:36:34.449604Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-31T16:36:34.449823Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-31T16:36:34.450101Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T16:36:34.452796Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8682:6580], server id = [2:8684:6582], tablet id = 72075186224037908 2025-03-31T16:36:34.452831Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:36:34.453593Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-31T16:36:34.492994Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8702:6600]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:36:34.493293Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T16:36:34.493346Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8702:6600], StatRequests.size() = 1 2025-03-31T16:36:34.624692Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OGU1MjdlMDEtOTg3NzZiYy0xOWIwZWE1MS1kYmU5ODZmNA==, TxId: 2025-03-31T16:36:34.624787Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OGU1MjdlMDEtOTg3NzZiYy0xOWIwZWE1MS1kYmU5ODZmNA==, TxId: 2025-03-31T16:36:34.625480Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:36:34.649593Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8717:6606] 2025-03-31T16:36:34.649725Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8577:6501], server id = [2:8717:6606], tablet id = 72075186224037894, status = OK 2025-03-31T16:36:34.649888Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:8717:6606], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-03-31T16:36:34.650094Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8718:6607] 2025-03-31T16:36:34.650220Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:8718:6607], schemeshard id = 72075186224037897 2025-03-31T16:36:34.663966Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:36:34.664065Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T16:36:34.719878Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8721:6610]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-31T16:36:34.720327Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-31T16:36:34.720399Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-31T16:36:34.724101Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-31T16:36:34.724183Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-03-31T16:36:34.724241Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-31T16:36:34.731052Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> TCheckpointCoordinatorTests::Should2Increments1Snapshot [GOOD] >> TCheckpointCoordinatorTests::ShouldAbortPreviousCheckpointsIfNodeStateCantBeSaved >> CoordinatorTests::Route >> TCheckpointCoordinatorTests::ShouldAbortPreviousCheckpointsIfNodeStateCantBeSaved [GOOD] >> CoordinatorTests::Route [GOOD] >> CoordinatorTests::RouteTwoTopicWichSameName >> CoordinatorTests::RouteTwoTopicWichSameName [GOOD] >> LeaderElectionTests::Test1 >> TDqPqRdReadActorTests::TestSaveLoadPqRdRead [GOOD] >> LeaderElectionTests::Test1 [GOOD] >> LeaderElectionTests::TestLocalMode >> TDqPqRdReadActorTests::CoordinatorChanged >> LeaderElectionTests::TestLocalMode [GOOD] >> TopicSessionTests::TwoSessionsWithoutOffsets ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpointing/ut/unittest >> TCheckpointCoordinatorTests::ShouldAbortPreviousCheckpointsIfNodeStateCantBeSaved [GOOD] Test command err: 2025-03-31T16:36:36.871640Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT 2025-03-31T16:36:36.871825Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Send TEvRegisterCoordinatorRequest Waiting for TEvRegisterCoordinatorRequest (storage) 2025-03-31T16:36:36.871962Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvRegisterCoordinatorResponse; issues: 2025-03-31T16:36:36.872003Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Successfully registered in storage 2025-03-31T16:36:36.872070Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Send TEvNewCheckpointCoordinator to 3 actor(s) 2025-03-31T16:36:36.873110Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Send TEvGetCheckpointsMetadataRequest; state load mode: FROM_LAST_CHECKPOINT; load graph: 0 Waiting for TEvGetCheckpointsMetadataRequest (storage) 2025-03-31T16:36:36.879609Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvGetCheckpointsMetadataResponse 2025-03-31T16:36:36.879683Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Found no checkpoints to restore from, creating a 'zero' checkpoint 2025-03-31T16:36:36.879713Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-03-31T16:36:36.885208Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvCreateCheckpointResponse 2025-03-31T16:36:36.885266Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Checkpoint successfully created, going to inject barriers to 1 actor(s) 2025-03-31T16:36:36.885322Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Send TEvRun to all actors Waiting for TEvInjectCheckpointBarrier (ingress) 2025-03-31T16:36:36.885418Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-03-31T16:36:36.885448Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 2 more acks 2025-03-31T16:36:36.885479Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-03-31T16:36:36.885505Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 1 more acks 2025-03-31T16:36:36.885534Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-03-31T16:36:36.885556Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 0 more acks 2025-03-31T16:36:36.885576Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2025-03-31T16:36:36.885637Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSetCheckpointPendingCommitStatusResponse 2025-03-31T16:36:36.885659Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Checkpoint status changed to 'PendingCommit', committing states Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2025-03-31T16:36:36.885868Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvStateCommitted; task: 1 2025-03-31T16:36:36.885908Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] State committed [1:6:2053], need 1 more acks 2025-03-31T16:36:36.885950Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvStateCommitted; task: 3 2025-03-31T16:36:36.885986Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] State committed [1:8:2055], need 0 more acks 2025-03-31T16:36:36.886011Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2025-03-31T16:36:36.886102Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvCompleteCheckpointResponse 2025-03-31T16:36:36.886131Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Checkpoint completed 2025-03-31T16:36:36.962428Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT 2025-03-31T16:36:36.962570Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Send TEvRegisterCoordinatorRequest Waiting for TEvRegisterCoordinatorRequest (storage) 2025-03-31T16:36:36.962649Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvRegisterCoordinatorResponse; issues: 2025-03-31T16:36:36.962670Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Successfully registered in storage 2025-03-31T16:36:36.962712Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Send TEvNewCheckpointCoordinator to 3 actor(s) 2025-03-31T16:36:36.962763Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Send TEvGetCheckpointsMetadataRequest; state load mode: FROM_LAST_CHECKPOINT; load graph: 0 Waiting for TEvGetCheckpointsMetadataRequest (storage) 2025-03-31T16:36:36.962887Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvGetCheckpointsMetadataResponse 2025-03-31T16:36:36.962909Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Found no checkpoints to restore from, creating a 'zero' checkpoint 2025-03-31T16:36:36.962931Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-03-31T16:36:36.963032Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvCreateCheckpointResponse 2025-03-31T16:36:36.963060Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Checkpoint successfully created, going to inject barriers to 1 actor(s) 2025-03-31T16:36:36.963102Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Send TEvRun to all actors Waiting for TEvInjectCheckpointBarrier (ingress) 2025-03-31T16:36:36.963165Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-03-31T16:36:36.963198Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 2 more acks 2025-03-31T16:36:36.963231Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-03-31T16:36:36.963262Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 1 more acks 2025-03-31T16:36:36.963291Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-03-31T16:36:36.963312Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 0 more acks 2025-03-31T16:36:36.963334Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2025-03-31T16:36:36.963376Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSetCheckpointPendingCommitStatusResponse 2025-03-31T16:36:36.963399Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Checkpoint status changed to 'PendingCommit', committing states Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2025-03-31T16:36:36.963507Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvStateCommitted; task: 1 2025-03-31T16:36:36.963543Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] State committed [2:6:2053], need 1 more acks 2025-03-31T16:36:36.963570Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvStateCommitted; task: 3 2025-03-31T16:36:36.963598Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] State committed [2:8:2055], need 0 more acks 2025-03-31T16:36:36.963629Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2025-03-31T16:36:36.963693Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvCompleteCheckpointResponse 2025-03-31T16:36:36.963766Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Checkpoint completed 2025-03-31T16:36:37.049886Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT 2025-03-31T16:36:37.050054Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Send TEvRegisterCoordinatorRequest Waiting for TEvRegisterCoordinatorRequest (storage) 2025-03-31T16:36:37.050158Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvRegisterCoordinatorResponse; issues: 2025-03-31T16:36:37.050200Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Successfully registered in storage 2025-03-31T16:36:37.050233Z node 3 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Send TEvNewCheckpointCoordinator to 3 actor(s) 2025-03-31T16:36:37.050298Z node 3 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Send TEvGetCheckpointsMetadataRequest; state load mode: FROM_LAST_CHECKPOINT; load graph: 0 Waiting for TEvGetCheckpointsMetadataRequest (storage) 2025-03-31T16:36:37.050461Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvGetCheckpointsMetadataResponse 2025-03-31T16:36:37.050499Z node 3 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Found no checkpoints to restore from, creating a 'zero' checkpoint 2025-03-31T16:36:37.050534Z node 3 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-03-31T16:36:37.050658Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvCreateCheckpointResponse 2025-03-31T16:36:37.050702Z node 3 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Checkpoint successfully created, going to inject barriers to 1 actor(s) 2025-03-31T16:36:37.050741Z node 3 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Send TEvRun to all actors Waiting for TEvInjectCheckpointBarrier (ingress) 2025-03-31T16:36:37.050839Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-03-31T16:36:37.050883Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 2 more acks 2025-03-31T16:36:37.050935Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-03-31T16:36:37.050968Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 1 more acks 2025-03-31T16:36:37.051007Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-03-31T16:36:37.051039Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 0 more acks 2025-03-31T16:36:37.051097Z node 3 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2025-03-31T16:36:37.051196Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSetCheckpointPendingCommitStatusResponse 2025-03-31T16:36:37.051231Z node 3 :STREAMS_C ... :2] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-03-31T16:36:37.140791Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:2] Task state saved, need 0 more acks 2025-03-31T16:36:37.140818Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:2] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2025-03-31T16:36:37.140941Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:2] Got TEvSetCheckpointPendingCommitStatusResponse 2025-03-31T16:36:37.140974Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:2] Checkpoint status changed to 'PendingCommit', committing states Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2025-03-31T16:36:37.141066Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:2] Got TEvStateCommitted; task: 1 2025-03-31T16:36:37.141100Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:2] State committed [4:6:2053], need 1 more acks 2025-03-31T16:36:37.141140Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:2] Got TEvStateCommitted; task: 3 2025-03-31T16:36:37.141171Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:2] State committed [4:8:2055], need 0 more acks 2025-03-31T16:36:37.141196Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:2] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2025-03-31T16:36:37.141241Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:2] Got TEvCompleteCheckpointResponse 2025-03-31T16:36:37.141268Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:2] Checkpoint completed 2025-03-31T16:36:37.141306Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvScheduleCheckpointing 2025-03-31T16:36:37.141340Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:3] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-03-31T16:36:37.141409Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Got TEvCreateCheckpointResponse 2025-03-31T16:36:37.141455Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:3] Checkpoint successfully created, going to inject barriers to 1 actor(s) Waiting for TEvInjectCheckpointBarrier (ingress) 2025-03-31T16:36:37.141523Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-03-31T16:36:37.141554Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Task state saved, need 2 more acks 2025-03-31T16:36:37.141591Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-03-31T16:36:37.141623Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Task state saved, need 1 more acks 2025-03-31T16:36:37.141666Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-03-31T16:36:37.141698Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Task state saved, need 0 more acks 2025-03-31T16:36:37.141724Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:3] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2025-03-31T16:36:37.141786Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Got TEvSetCheckpointPendingCommitStatusResponse 2025-03-31T16:36:37.141818Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:3] Checkpoint status changed to 'PendingCommit', committing states Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2025-03-31T16:36:37.141933Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Got TEvStateCommitted; task: 1 2025-03-31T16:36:37.141964Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] State committed [4:6:2053], need 1 more acks 2025-03-31T16:36:37.142002Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Got TEvStateCommitted; task: 3 2025-03-31T16:36:37.142034Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] State committed [4:8:2055], need 0 more acks 2025-03-31T16:36:37.142060Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:3] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2025-03-31T16:36:37.142128Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Got TEvCompleteCheckpointResponse 2025-03-31T16:36:37.142158Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:3] Checkpoint completed 2025-03-31T16:36:37.142192Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvScheduleCheckpointing 2025-03-31T16:36:37.142234Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:4] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-03-31T16:36:37.142286Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Got TEvCreateCheckpointResponse 2025-03-31T16:36:37.142324Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:4] Checkpoint successfully created, going to inject barriers to 1 actor(s) Waiting for TEvInjectCheckpointBarrier (ingress) 2025-03-31T16:36:37.142397Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-03-31T16:36:37.142429Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Task state saved, need 2 more acks 2025-03-31T16:36:37.142466Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-03-31T16:36:37.142494Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Task state saved, need 1 more acks 2025-03-31T16:36:37.142527Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-03-31T16:36:37.142555Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Task state saved, need 0 more acks 2025-03-31T16:36:37.142590Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:4] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2025-03-31T16:36:37.142648Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Got TEvSetCheckpointPendingCommitStatusResponse 2025-03-31T16:36:37.142677Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:4] Checkpoint status changed to 'PendingCommit', committing states Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2025-03-31T16:36:37.142765Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Got TEvStateCommitted; task: 1 2025-03-31T16:36:37.142824Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] State committed [4:6:2053], need 1 more acks 2025-03-31T16:36:37.142861Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Got TEvStateCommitted; task: 3 2025-03-31T16:36:37.142892Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] State committed [4:8:2055], need 0 more acks 2025-03-31T16:36:37.142918Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:4] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2025-03-31T16:36:37.142963Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Got TEvCompleteCheckpointResponse 2025-03-31T16:36:37.143000Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:4] Checkpoint completed 2025-03-31T16:36:37.228636Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT 2025-03-31T16:36:37.228776Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Send TEvRegisterCoordinatorRequest Waiting for TEvRegisterCoordinatorRequest (storage) 2025-03-31T16:36:37.228887Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvRegisterCoordinatorResponse; issues: 2025-03-31T16:36:37.228922Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Successfully registered in storage 2025-03-31T16:36:37.228952Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Send TEvNewCheckpointCoordinator to 3 actor(s) 2025-03-31T16:36:37.229003Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Send TEvGetCheckpointsMetadataRequest; state load mode: FROM_LAST_CHECKPOINT; load graph: 0 Waiting for TEvGetCheckpointsMetadataRequest (storage) 2025-03-31T16:36:37.229173Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvGetCheckpointsMetadataResponse 2025-03-31T16:36:37.229209Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Found no checkpoints to restore from, creating a 'zero' checkpoint 2025-03-31T16:36:37.229237Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-03-31T16:36:37.229354Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvCreateCheckpointResponse 2025-03-31T16:36:37.229396Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Checkpoint successfully created, going to inject barriers to 1 actor(s) 2025-03-31T16:36:37.229443Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Send TEvRun to all actors Waiting for TEvInjectCheckpointBarrier (ingress) 2025-03-31T16:36:37.229535Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-03-31T16:36:37.229572Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 2 more acks 2025-03-31T16:36:37.229617Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: STORAGE_ERROR, size: 0 2025-03-31T16:36:37.229648Z node 5 :STREAMS_CHECKPOINT_COORDINATOR ERROR: [my-graph-id.42] [42:1] StorageError: can't save node state, aborting checkpoint 2025-03-31T16:36:37.229683Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: STORAGE_ERROR, size: 0 2025-03-31T16:36:37.229711Z node 5 :STREAMS_CHECKPOINT_COORDINATOR ERROR: [my-graph-id.42] [42:1] StorageError: can't save node state, aborting checkpoint 2025-03-31T16:36:37.229755Z node 5 :STREAMS_CHECKPOINT_COORDINATOR ERROR: [my-graph-id.42] [42:1] Got all acks for aborted checkpoint, aborting in storage Waiting for TEvAbortCheckpointRequest (storage) 2025-03-31T16:36:37.229815Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvAbortCheckpointResponse 2025-03-31T16:36:37.229845Z node 5 :STREAMS_CHECKPOINT_COORDINATOR WARN: [my-graph-id.42] [42:1] Checkpoint aborted 2025-03-31T16:36:37.229882Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvScheduleCheckpointing 2025-03-31T16:36:37.229915Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:2] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-03-31T16:36:37.229974Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:2] Got TEvCreateCheckpointResponse 2025-03-31T16:36:37.230006Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:2] Checkpoint successfully created, going to inject barriers to 1 actor(s) Waiting for TEvInjectCheckpointBarrier (ingress) |97.4%| [TM] {RESULT} ydb/core/fq/libs/checkpointing/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite3 [GOOD] Test command err: Trying to start YDB, gRPC: 23516, MsgBus: 25752 2025-03-31T16:35:53.610431Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013289940124920:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:53.610553Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0012a7/r3tmp/tmpoeodYt/pdisk_1.dat 2025-03-31T16:35:53.956346Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23516, node 1 2025-03-31T16:35:54.026171Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:54.026312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:54.028695Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:54.055566Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:54.055587Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:54.055607Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:54.055734Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25752 TClient is connected to server localhost:25752 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:54.528174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:54.546903Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:35:56.633077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013302825027479:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:56.633174Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013302825027470:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:56.633762Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:56.637973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:35:56.648596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013302825027484:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:35:56.750294Z node 1 :TX_PROXY ERROR: Actor# [1:7488013302825027537:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:57.061226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:35:57.219973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488013307119995016:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:35:57.219973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013307119995072:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:35:57.220204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013307119995072:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:35:57.220530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013307119995072:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:35:57.220634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488013307119995016:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:35:57.220698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013307119995072:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:35:57.220865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488013307119995016:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:35:57.220867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013307119995072:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:35:57.220991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488013307119995016:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:35:57.221000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013307119995072:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:35:57.221090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488013307119995016:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:35:57.221109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013307119995072:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:35:57.221211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488013307119995016:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:35:57.221232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013307119995072:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:35:57.221557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488013307119995016:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:35:57.221589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013307119995072:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:35:57.221700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488013307119995016:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:35:57.221764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013307119995072:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:35:57.221825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488013307119995016:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:35:57.221879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013307119995072:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:35:57.221924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488013307119995016:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:35:57.222026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488013307119995072:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:35:57.222071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488013307119995016:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:35:57.222271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488013307119995016:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:35:57.256389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488013307119995014:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:35:57.256454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488013307119995014:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abs ... 5;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:29.584614Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037966;self_id=[2:7488013400303336456:2440];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037966;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:29.584634Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037956;self_id=[2:7488013400303336594:2457];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037956;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:29.584711Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037957;self_id=[2:7488013400303336584:2453];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037957;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:29.584771Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037958;self_id=[2:7488013400303336619:2468];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037958;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:29.584831Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037974;self_id=[2:7488013400303336425:2436];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037974;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:29.584888Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037975;self_id=[2:7488013400303336419:2434];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037975;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:29.584951Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037976;self_id=[2:7488013400303336417:2433];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037976;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:29.585018Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037977;self_id=[2:7488013400303336454:2439];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037977;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:29.585029Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037967;self_id=[2:7488013400303336687:2474];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037967;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:29.585094Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037968;self_id=[2:7488013400303336611:2464];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037968;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:29.585097Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037978;self_id=[2:7488013400303336557:2445];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037978;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:29.585156Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037979;self_id=[2:7488013400303336563:2448];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037979;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:29.585167Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037969;self_id=[2:7488013400303336707:2484];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037969;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:29.585217Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037980;self_id=[2:7488013400303336572:2450];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037980;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:29.585228Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037970;self_id=[2:7488013400303336582:2452];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037970;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:29.585279Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037981;self_id=[2:7488013400303336561:2447];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037981;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:29.585284Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037971;self_id=[2:7488013400303336596:2458];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037971;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:29.585339Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037972;self_id=[2:7488013400303336407:2431];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037972;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:29.585344Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037982;self_id=[2:7488013400303336492:2441];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037982;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:29.585390Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037973;self_id=[2:7488013400303336493:2442];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037973;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:29.585410Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037983;self_id=[2:7488013400303336559:2446];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037983;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:29.585450Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037990;self_id=[2:7488013400303336409:2432];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037990;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:29.585474Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037984;self_id=[2:7488013400303336511:2444];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037984;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:29.585515Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037991;self_id=[2:7488013400303336401:2429];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037991;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:29.585536Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037985;self_id=[2:7488013400303336423:2435];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037985;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:29.585571Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037992;self_id=[2:7488013400303336302:2425];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037992;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:29.585602Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037986;self_id=[2:7488013400303336510:2443];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037986;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:29.585630Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037993;self_id=[2:7488013400303336299:2424];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037993;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:29.585667Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037987;self_id=[2:7488013400303336403:2430];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037987;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:29.585696Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037994;self_id=[2:7488013400303336365:2427];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037994;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:29.585737Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037988;self_id=[2:7488013400303336435:2437];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037988;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:29.585793Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037995;self_id=[2:7488013400303336367:2428];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037995;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:29.585833Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037989;self_id=[2:7488013400303336437:2438];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037989;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:29.585872Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037996;self_id=[2:7488013400303336235:2418];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037996;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:29.585929Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037997;self_id=[2:7488013400303336303:2426];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037997;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-31T16:36:29.962804Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:36:29.962834Z node 2 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> TTxDataShardLocalKMeansScan::BuildToBuild [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::CloseConnection [GOOD] Test command err: Trying to start YDB, gRPC: 18731, MsgBus: 14873 2025-03-31T16:35:27.114462Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013177361959021:2144];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:27.117944Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00138f/r3tmp/tmpWkCWin/pdisk_1.dat 2025-03-31T16:35:27.447664Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18731, node 1 2025-03-31T16:35:27.510850Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:27.510991Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:27.512692Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:27.519679Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:27.519704Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:27.519715Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:27.519875Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14873 TClient is connected to server localhost:14873 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:28.086270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:28.106933Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:28.124989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:35:28.271599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:28.447066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:35:28.531618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:35:30.240861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013190246862586:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:30.240993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:30.636006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:30.668653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:30.697037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:30.730814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:30.760661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:30.835317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:30.883941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013190246863100:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:30.884025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:30.884307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013190246863105:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:30.888620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:30.900404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013190246863107:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:35:30.956135Z node 1 :TX_PROXY ERROR: Actor# [1:7488013190246863160:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:32.114364Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488013177361959021:2144];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:32.114450Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 1732, MsgBus: 29119 2025-03-31T16:35:33.759168Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013200112709914:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:33.759341Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00138f/r3tmp/tmpawknmw/pdisk_1.dat 2025-03-31T16:35:33.889358Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1732, node 2 2025-03-31T16:35:33.933615Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:33.933718Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:33.934859Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:33.963042Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:33.963068Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:33.963075Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:33.963242Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29119 TClient is connected to server localhost:29119 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:34.405592Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:34.422822Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:34.472728Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:34.620053Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:34.697730Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreat ... task: 5. Ctx: { TraceId : 01jqpgzna4f73z9rmamydwda77. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZmY0Y2Q1MDYtOGJkNmQwZTUtNDg5NDExZDgtZmE3Nzk2OGI=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7488013453410236740:2550], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-31T16:36:32.724371Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmY0Y2Q1MDYtOGJkNmQwZTUtNDg5NDExZDgtZmE3Nzk2OGI=, ActorId: [4:7488013453410236737:2550], ActorState: ExecuteState, TraceId: 01jqpgzna4f73z9rmamydwda77, Create QueryResponse for error on request, msg: 2025-03-31T16:36:32.746363Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7488013453410236791:2558] TxId: 281474976715675. Ctx: { TraceId: 01jqpgznax7v3q6r7g1eknchfg, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MmYwODY3ZTUtZjFkNGJiOGYtZjgxM2UzODUtMTA1Zjk3MDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-03-31T16:36:32.746461Z node 4 :RPC_REQUEST WARN: Client lost 2025-03-31T16:36:32.746913Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7488013453410236798:2561], TxId: 281474976715675, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=MmYwODY3ZTUtZjFkNGJiOGYtZjgxM2UzODUtMTA1Zjk3MDE=. CustomerSuppliedId : . TraceId : 01jqpgznax7v3q6r7g1eknchfg. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7488013453410236791:2558], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-31T16:36:32.747462Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7488013453410236799:2562], TxId: 281474976715675, task: 3. Ctx: { TraceId : 01jqpgznax7v3q6r7g1eknchfg. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=MmYwODY3ZTUtZjFkNGJiOGYtZjgxM2UzODUtMTA1Zjk3MDE=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7488013453410236791:2558], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-31T16:36:32.747747Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7488013453410236800:2563], TxId: 281474976715675, task: 4. Ctx: { SessionId : ydb://session/3?node_id=4&id=MmYwODY3ZTUtZjFkNGJiOGYtZjgxM2UzODUtMTA1Zjk3MDE=. CustomerSuppliedId : . TraceId : 01jqpgznax7v3q6r7g1eknchfg. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7488013453410236791:2558], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-31T16:36:32.748072Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7488013453410236797:2560], TxId: 281474976715675, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=MmYwODY3ZTUtZjFkNGJiOGYtZjgxM2UzODUtMTA1Zjk3MDE=. CustomerSuppliedId : . TraceId : 01jqpgznax7v3q6r7g1eknchfg. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7488013453410236791:2558], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-31T16:36:32.748272Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7488013453410236801:2564], TxId: 281474976715675, task: 5. Ctx: { CustomerSuppliedId : . TraceId : 01jqpgznax7v3q6r7g1eknchfg. SessionId : ydb://session/3?node_id=4&id=MmYwODY3ZTUtZjFkNGJiOGYtZjgxM2UzODUtMTA1Zjk3MDE=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7488013453410236791:2558], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-31T16:36:32.750087Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MmYwODY3ZTUtZjFkNGJiOGYtZjgxM2UzODUtMTA1Zjk3MDE=, ActorId: [4:7488013453410236776:2558], ActorState: ExecuteState, TraceId: 01jqpgznax7v3q6r7g1eknchfg, Create QueryResponse for error on request, msg: 2025-03-31T16:36:32.843972Z node 4 :RPC_REQUEST WARN: Client lost 2025-03-31T16:36:32.844457Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7488013453410236910:2590] TxId: 281474976715680. Ctx: { TraceId: 01jqpgzndh166747fp8f1xch83, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NWY5MTVkNjQtMmQyOGZmMDctNjgxY2U0OTEtNDI5MmVhMjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-03-31T16:36:32.900114Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7488013453410236920:2594], TxId: 281474976715680, task: 3. Ctx: { TraceId : 01jqpgzndh166747fp8f1xch83. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NWY5MTVkNjQtMmQyOGZmMDctNjgxY2U0OTEtNDI5MmVhMjg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7488013453410236910:2590], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-31T16:36:32.901985Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NWY5MTVkNjQtMmQyOGZmMDctNjgxY2U0OTEtNDI5MmVhMjg=, ActorId: [4:7488013453410236907:2590], ActorState: ExecuteState, TraceId: 01jqpgzndh166747fp8f1xch83, Create QueryResponse for error on request, msg: 2025-03-31T16:36:33.217298Z node 4 :RPC_REQUEST WARN: Client lost 2025-03-31T16:36:33.217913Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7488013457705204662:2704] TxId: 281474976715695. Ctx: { TraceId: 01jqpgznrwerqn77kfmq3pz1vj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=OTNlMTE3YjctMWUxZTcwZTUtZGY0Y2ZjNzgtMzNkMjYxZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-03-31T16:36:33.276523Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7488013457705204662:2704] TxId: 281474976715695. Ctx: { TraceId: 01jqpgznrwerqn77kfmq3pz1vj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=OTNlMTE3YjctMWUxZTcwZTUtZGY0Y2ZjNzgtMzNkMjYxZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Unexpected event while waiting for shutdown: NYql::NDq::TEvDqCompute::TEvChannelData 2025-03-31T16:36:33.276576Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7488013457705204673:2710], TxId: 281474976715695, task: 5. Ctx: { SessionId : ydb://session/3?node_id=4&id=OTNlMTE3YjctMWUxZTcwZTUtZGY0Y2ZjNzgtMzNkMjYxZA==. TraceId : 01jqpgznrwerqn77kfmq3pz1vj. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7488013457705204662:2704], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-31T16:36:33.277744Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTNlMTE3YjctMWUxZTcwZTUtZGY0Y2ZjNzgtMzNkMjYxZA==, ActorId: [4:7488013457705204659:2704], ActorState: ExecuteState, TraceId: 01jqpgznrwerqn77kfmq3pz1vj, Create QueryResponse for error on request, msg: 2025-03-31T16:36:33.411490Z node 4 :RPC_REQUEST WARN: Client lost 2025-03-31T16:36:33.655716Z node 4 :RPC_REQUEST WARN: Client lost 2025-03-31T16:36:33.656139Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7488013457705205182:2833] TxId: 281474976715712. Ctx: { TraceId: 01jqpgzp6261a19dnhvkp74k04, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NTFlYWNkYTItNGM2ZDNjN2ItMmY3YzEwZGYtYjIzMTE1MTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-03-31T16:36:33.656306Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7488013457705205193:2839], TxId: 281474976715712, task: 5. Ctx: { TraceId : 01jqpgzp6261a19dnhvkp74k04. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NTFlYWNkYTItNGM2ZDNjN2ItMmY3YzEwZGYtYjIzMTE1MTQ=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7488013457705205182:2833], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-31T16:36:33.678971Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7488013457705205192:2838], TxId: 281474976715712, task: 4. Ctx: { SessionId : ydb://session/3?node_id=4&id=NTFlYWNkYTItNGM2ZDNjN2ItMmY3YzEwZGYtYjIzMTE1MTQ=. CustomerSuppliedId : . TraceId : 01jqpgzp6261a19dnhvkp74k04. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7488013457705205182:2833], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-31T16:36:33.680539Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NTFlYWNkYTItNGM2ZDNjN2ItMmY3YzEwZGYtYjIzMTE1MTQ=, ActorId: [4:7488013457705205179:2833], ActorState: ExecuteState, TraceId: 01jqpgzp6261a19dnhvkp74k04, Create QueryResponse for error on request, msg: 2025-03-31T16:36:34.285456Z node 4 :RPC_REQUEST WARN: Client lost 2025-03-31T16:36:34.286334Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7488013462000173182:3019] TxId: 281474976715736. Ctx: { TraceId: 01jqpgzps098npbcd81d722zz1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MWViY2UyYmItN2NjMzNmNzAtZmIxZDc0Y2YtZjJmNTc2ODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-03-31T16:36:34.333915Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7488013462000173182:3019] TxId: 281474976715736. Ctx: { TraceId: 01jqpgzps098npbcd81d722zz1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MWViY2UyYmItN2NjMzNmNzAtZmIxZDc0Y2YtZjJmNTc2ODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Unexpected event while waiting for shutdown: NKikimr::NKqp::TEvKqpNode::TEvStartKqpTasksResponse 2025-03-31T16:36:35.286934Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWViY2UyYmItN2NjMzNmNzAtZmIxZDc0Y2YtZjJmNTc2ODA=, ActorId: [4:7488013462000173179:3019], ActorState: ExecuteState, TraceId: 01jqpgzps098npbcd81d722zz1, Create QueryResponse for error on request, msg: 2025-03-31T16:36:35.336037Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7488013462000173207:3024], TxId: 281474976715736, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=MWViY2UyYmItN2NjMzNmNzAtZmIxZDc0Y2YtZjJmNTc2ODA=. TraceId : 01jqpgzps098npbcd81d722zz1. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle undelivered TEvState event, abort execution 2025-03-31T16:36:35.336424Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7488013462000173210:3026], TxId: 281474976715736, task: 3. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=MWViY2UyYmItN2NjMzNmNzAtZmIxZDc0Y2YtZjJmNTc2ODA=. TraceId : 01jqpgzps098npbcd81d722zz1. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle undelivered TEvState event, abort execution 2025-03-31T16:36:35.336539Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7488013462000173208:3025], TxId: 281474976715736, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=MWViY2UyYmItN2NjMzNmNzAtZmIxZDc0Y2YtZjJmNTc2ODA=. TraceId : 01jqpgzps098npbcd81d722zz1. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle undelivered TEvState event, abort execution 2025-03-31T16:36:35.336902Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7488013462000173212:3028], TxId: 281474976715736, task: 5. Ctx: { CustomerSuppliedId : . TraceId : 01jqpgzps098npbcd81d722zz1. SessionId : ydb://session/3?node_id=4&id=MWViY2UyYmItN2NjMzNmNzAtZmIxZDc0Y2YtZjJmNTc2ODA=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle undelivered TEvState event, abort execution 2025-03-31T16:36:35.336939Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7488013462000173211:3027], TxId: 281474976715736, task: 4. Ctx: { SessionId : ydb://session/3?node_id=4&id=MWViY2UyYmItN2NjMzNmNzAtZmIxZDc0Y2YtZjJmNTc2ODA=. TraceId : 01jqpgzps098npbcd81d722zz1. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle undelivered TEvState event, abort execution |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_tests/py3test >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[kesus] [GOOD] |97.5%| [TM] {RESULT} ydb/tests/functional/scheme_tests/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_local_kmeans/unittest >> TTxDataShardLocalKMeansScan::BuildToBuild [GOOD] Test command err: 2025-03-31T16:36:14.906435Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013379046917944:2215];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:36:14.906666Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00051c/r3tmp/tmp9CXlHP/pdisk_1.dat 2025-03-31T16:36:15.334633Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:36:15.359456Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:15.359545Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:15.362821Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:36:15.431920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:36:15.477526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:36:15.527219Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7488013383341885664:2295] 2025-03-31T16:36:15.527536Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:36:15.543135Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:36:15.543212Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:36:15.546618Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:36:15.546681Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:36:15.546738Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:36:15.548627Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:36:15.548715Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:36:15.548756Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:7488013383341885681:2295] in generation 1 2025-03-31T16:36:15.550216Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:36:15.581147Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:36:15.582164Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:36:15.582286Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:7488013383341885683:2296] 2025-03-31T16:36:15.582302Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:36:15.582317Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:36:15.582336Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:36:15.583690Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7488013383341885661:2295], serverId# [1:7488013383341885679:2303], sessionId# [0:0:0] 2025-03-31T16:36:15.583913Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:36:15.584014Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:36:15.584049Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:36:15.584070Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:36:15.584163Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:36:15.584189Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:36:15.584213Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:36:15.584597Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:36:15.584840Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-03-31T16:36:15.586474Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:36:15.586635Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:36:15.586759Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:36:15.588994Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7488013383341885697:2313], serverId# [1:7488013383341885698:2314], sessionId# [0:0:0] 2025-03-31T16:36:15.602854Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710657 at step 1743438975635 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743438975635 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:36:15.602907Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:36:15.603160Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:36:15.603258Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:36:15.603281Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:36:15.603331Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1743438975635:281474976710657] in PlanQueue unit at 72075186224037888 2025-03-31T16:36:15.603617Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1743438975635:281474976710657 keys extracted: 0 2025-03-31T16:36:15.603786Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:36:15.603881Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:36:15.603926Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:36:15.607143Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:36:15.608913Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:36:15.610458Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1743438975634 2025-03-31T16:36:15.610494Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:36:15.610534Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1743438975635} 2025-03-31T16:36:15.610609Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:36:15.610655Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:36:15.610673Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:36:15.610712Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:36:15.610765Z node 1 :TX_DATASHARD DEBUG: Complete [1743438975635 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7488013383341885482:2180], exec latency: 4 ms, propose latency: 7 ms 2025-03-31T16:36:15.610789Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-03-31T16:36:15.610925Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:36:15.611019Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1743438975642 2025-03-31T16:36:15.614680Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2025-03-31T16:36:15.614752Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:36:15.631643Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7488013383341885736:2342], serverId# [1:7488013383341885737:2343], sessionId# [0:0:0] 2025-03-31T16:36:15.633114Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:36:15.633244Z node 1 :TX_DATASHARD DEBUG: Prepared Snapshot transaction txId 281474976710658 at tablet 72075186224037888 2025-03-31T16:36:15.634755Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:36:15.636217Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710658 at step 1743438975684 at tablet 72075186224037888 { Transactions { TxId: 281474976710658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743438975684 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:36:15.636250Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:36:15.636371Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:36:15.636445Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:36:15.636475Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1743438975684:281474976710658] in PlanQueue unit at 72075186224037888 2025-03-31T16:36:15.636620Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1743438975684:281474976710658 keys extracted: 0 2025-03-31T16:36:15.637077Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:36:15.637556Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1743438975684} 2025-03-31T16:36:15.637604Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:36:15.637636Z node 1 :TX_DATASHARD DEBUG: Complete [1743438975684 : 281474976710658] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7488013383341885731:2338], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:36:15.637693Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:36:15.641220Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7488013383341885747:2353], serverId# [1:7488013383341885748:2354], sessionId# [0:0:0] 2025-03-31T16:36:15.642685Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:36:15.642814Z node 1 :TX_DATASHARD DEBUG: Prepared Snapshot transaction txId 281474976710659 at tablet 72075186224037888 2025-03-31T16:36:15.644907Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037 ... ARD DEBUG: GetNextActiveOp at 72075186224037903 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:36:37.352340Z node 5 :TX_DATASHARD DEBUG: Found ready operation [1743438997398:281474976715710] in PlanQueue unit at 72075186224037903 2025-03-31T16:36:37.352578Z node 5 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037903 loaded tx from db 1743438997398:281474976715710 keys extracted: 0 2025-03-31T16:36:37.352735Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037903 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:36:37.352832Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037903 2025-03-31T16:36:37.352877Z node 5 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037903 tableId# [OwnerId: 72057594046644480, LocalPathId: 21] schema version# 1 2025-03-31T16:36:37.353270Z node 5 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037903 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:36:37.353375Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:36:37.353641Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037903 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:36:37.354885Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037903 2025-03-31T16:36:37.354932Z node 5 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037903 time 1743438997397 2025-03-31T16:36:37.354946Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037903 2025-03-31T16:36:37.354965Z node 5 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037903 coordinator 72057594046316545 last step 0 next step 1743438997398 2025-03-31T16:36:37.355004Z node 5 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037903 step# 1743438997398} 2025-03-31T16:36:37.355035Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037903 2025-03-31T16:36:37.355067Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037903 2025-03-31T16:36:37.355103Z node 5 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037903 2025-03-31T16:36:37.355130Z node 5 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037903 2025-03-31T16:36:37.355169Z node 5 :TX_DATASHARD DEBUG: Complete [1743438997398 : 281474976715710] from 72075186224037903 at tablet 72075186224037903 send result to client [5:7488013461316329960:2142], exec latency: 0 ms, propose latency: 2 ms 2025-03-31T16:36:37.355199Z node 5 :TX_DATASHARD INFO: 72075186224037903 Sending notify to schemeshard 72057594046644480 txId 281474976715710 state Ready TxInFly 0 2025-03-31T16:36:37.355243Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037903 2025-03-31T16:36:37.355969Z node 5 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715710 datashard 72075186224037903 state Ready 2025-03-31T16:36:37.356004Z node 5 :TX_DATASHARD DEBUG: 72075186224037903 Got TEvSchemaChangedResult from SS at 72075186224037903 2025-03-31T16:36:37.360246Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715711:0, at schemeshard: 72057594046644480 2025-03-31T16:36:37.362053Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037903 2025-03-31T16:36:37.362102Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:36:37.366246Z node 5 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037904 actor [5:7488013478496201655:2399] 2025-03-31T16:36:37.366468Z node 5 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:36:37.375501Z node 5 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:36:37.375585Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:36:37.377038Z node 5 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037904 2025-03-31T16:36:37.377092Z node 5 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037904 2025-03-31T16:36:37.377128Z node 5 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037904 2025-03-31T16:36:37.377409Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:36:37.377464Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:36:37.377498Z node 5 :TX_DATASHARD DEBUG: DataShard 72075186224037904 persisting started state actor id [5:7488013478496201673:2399] in generation 1 2025-03-31T16:36:37.378822Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:36:37.378864Z node 5 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037904 2025-03-31T16:36:37.378955Z node 5 :TX_DATASHARD DEBUG: 72075186224037904 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:36:37.378991Z node 5 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037904, actorId: [5:7488013478496201675:2400] 2025-03-31T16:36:37.379003Z node 5 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037904 2025-03-31T16:36:37.379016Z node 5 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037904, state: WaitScheme 2025-03-31T16:36:37.379028Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037904 2025-03-31T16:36:37.379161Z node 5 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037904 2025-03-31T16:36:37.379224Z node 5 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037904 2025-03-31T16:36:37.379246Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037904 2025-03-31T16:36:37.379262Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037904 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:36:37.379283Z node 5 :TX_DATASHARD INFO: No tx to execute at 72075186224037904 TxInFly 0 2025-03-31T16:36:37.379299Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037904 2025-03-31T16:36:37.379480Z node 5 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037904, clientId# [5:7488013478496201660:4014], serverId# [5:7488013478496201665:4018], sessionId# [0:0:0] 2025-03-31T16:36:37.379610Z node 5 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037904 2025-03-31T16:36:37.379834Z node 5 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037904 txId 281474976715711 ssId 72057594046644480 seqNo 2:31 2025-03-31T16:36:37.379896Z node 5 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715711 at tablet 72075186224037904 2025-03-31T16:36:37.380304Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037904 2025-03-31T16:36:37.381278Z node 5 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037904 2025-03-31T16:36:37.381362Z node 5 :TX_DATASHARD DEBUG: 72075186224037904 not sending time cast registration request in state WaitScheme 2025-03-31T16:36:37.383160Z node 5 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037904, clientId# [5:7488013478496201681:4028], serverId# [5:7488013478496201682:4029], sessionId# [0:0:0] 2025-03-31T16:36:37.383439Z node 5 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715711 at step 1743438997426 at tablet 72075186224037904 { Transactions { TxId: 281474976715711 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743438997426 MediatorID: 72057594046382081 TabletID: 72075186224037904 } 2025-03-31T16:36:37.383464Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037904 2025-03-31T16:36:37.383568Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037904 2025-03-31T16:36:37.383591Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037904 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:36:37.383617Z node 5 :TX_DATASHARD DEBUG: Found ready operation [1743438997426:281474976715711] in PlanQueue unit at 72075186224037904 2025-03-31T16:36:37.383865Z node 5 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037904 loaded tx from db 1743438997426:281474976715711 keys extracted: 0 2025-03-31T16:36:37.383983Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037904 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:36:37.384073Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037904 2025-03-31T16:36:37.384117Z node 5 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037904 tableId# [OwnerId: 72057594046644480, LocalPathId: 22] schema version# 1 2025-03-31T16:36:37.384512Z node 5 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037904 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:36:37.384630Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:36:37.384729Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037903 2025-03-31T16:36:37.384854Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037904 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:36:37.386187Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037904 2025-03-31T16:36:37.386239Z node 5 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037904 time 1743438997425 2025-03-31T16:36:37.386253Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037904 2025-03-31T16:36:37.386272Z node 5 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037904 coordinator 72057594046316545 last step 0 next step 1743438997426 2025-03-31T16:36:37.386324Z node 5 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037904 step# 1743438997426} 2025-03-31T16:36:37.386360Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037904 2025-03-31T16:36:37.386398Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037904 2025-03-31T16:36:37.386422Z node 5 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037904 2025-03-31T16:36:37.386446Z node 5 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037904 2025-03-31T16:36:37.386490Z node 5 :TX_DATASHARD DEBUG: Complete [1743438997426 : 281474976715711] from 72075186224037904 at tablet 72075186224037904 send result to client [5:7488013461316329960:2142], exec latency: 0 ms, propose latency: 2 ms 2025-03-31T16:36:37.386523Z node 5 :TX_DATASHARD INFO: 72075186224037904 Sending notify to schemeshard 72057594046644480 txId 281474976715711 state Ready TxInFly 0 2025-03-31T16:36:37.386564Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037904 2025-03-31T16:36:37.387460Z node 5 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715711 datashard 72075186224037904 state Ready 2025-03-31T16:36:37.387506Z node 5 :TX_DATASHARD DEBUG: 72075186224037904 Got TEvSchemaChangedResult from SS at 72075186224037904 |97.5%| [TM] {RESULT} ydb/core/tx/datashard/ut_local_kmeans/unittest >> TSequence::CreateTableWithDefaultFromSequence >> test_inserts.py::TestYdbInsertsOperations::test_concurrent_inserts [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_transactional_update >> TGRpcRateLimiterTest::UpdateResource [GOOD] >> ReadUpdateWrite::Load >> TGRpcRateLimiterTest::DropResource >> TDqPqRdReadActorTests::CoordinatorChanged [GOOD] >> test.py::test[solomon-BadDownsamplingFill-] [GOOD] >> test.py::test[solomon-BadDownsamplingInterval-] >> TTxDataShardTestInit::TestResolvePathAfterRestart [GOOD] >> TDqPqRdReadActorTests::Backpressure >> TabletService_ExecuteMiniKQL::ParamsMiniKQLRead [GOOD] >> TabletService_ExecuteMiniKQL::MalformedParams >> test_postgres.py::TestPostgresSuite::test_postgres_suite[numeric] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[comments] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_init/unittest >> TTxDataShardTestInit::TestResolvePathAfterRestart [GOOD] Test command err: 2025-03-31T16:36:31.582968Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:108:2140]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:36:31.600792Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:108:2140]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:36:31.605372Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:108:2140] 2025-03-31T16:36:31.606708Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:36:31.667463Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:102:2136], Recipient [1:108:2140]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:36:31.678403Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:36:31.678517Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:36:31.682674Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-31T16:36:31.682776Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-31T16:36:31.682822Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-31T16:36:31.684167Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:36:31.684290Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:36:31.684377Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:131:2140] in generation 2 2025-03-31T16:36:31.709282Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:36:31.746857Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-31T16:36:31.750103Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:36:31.750325Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:136:2160] 2025-03-31T16:36:31.750370Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-31T16:36:31.750415Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-31T16:36:31.750457Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:36:31.750622Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:108:2140], Recipient [1:108:2140]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:36:31.751353Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:36:31.752303Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-31T16:36:31.752430Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-31T16:36:31.752490Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:36:31.752548Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:36:31.752603Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-31T16:36:31.752666Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T16:36:31.752706Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T16:36:31.752742Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-31T16:36:31.752809Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:36:31.755015Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [1:99:2134], Recipient [1:108:2140]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 99 RawX2: 4294969430 } 2025-03-31T16:36:31.755106Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-03-31T16:36:34.712210Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:101:2147], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:36:34.712863Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:36:34.712984Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0005ad/r3tmp/tmpEmWcvi/pdisk_1.dat 2025-03-31T16:36:35.191684Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:36:35.237413Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:36:35.283113Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:35.284042Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:35.296802Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:36:35.391194Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:36:35.440274Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:667:2571] 2025-03-31T16:36:35.440480Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:36:35.476018Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:36:35.476126Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:36:35.477373Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:36:35.477445Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:36:35.477502Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:36:35.477764Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:36:35.477855Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:36:35.477942Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:683:2571] in generation 1 2025-03-31T16:36:35.488728Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:36:35.488818Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:36:35.488949Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:36:35.489042Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:685:2581] 2025-03-31T16:36:35.489079Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:36:35.489122Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:36:35.489170Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:36:35.489572Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:36:35.489683Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:36:35.489800Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:36:35.489841Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:36:35.489878Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:36:35.489924Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:36:35.491342Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:663:2568], serverId# [2:674:2575], sessionId# [0:0:0] 2025-03-31T16:36:35.491836Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:36:35.493016Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:36:35.493839Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:36:35.495949Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:36:35.506705Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:36:35.506819Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:36:35.656378Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:703:2593], serverId# [2:705:2595], sessionId# [0:0:0] 2025-03-31T16:36:35.661708Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:36:35.661806Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:36:35.662357Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:36:35.662399Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:36:35.662447Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T16:36:35.662632Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:36:35.662802Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:36:35.663391Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:36:35.663494Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:36:35.666007Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:36:35.667558Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:36:35.668838Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-31T16:36:35.668879Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:36:35.670347Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-31T16:36:35.670439Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:36:35.670922Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:36:35.670961Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:36:35.671018Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037 ... :39.244499Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:36:39.244545Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:36:39.244851Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:662:2567], serverId# [3:673:2574], sessionId# [0:0:0] 2025-03-31T16:36:39.245083Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:36:39.245203Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:36:39.245265Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:36:39.246687Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:36:39.257411Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:36:39.257502Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:36:39.406319Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:703:2593], serverId# [3:705:2595], sessionId# [0:0:0] 2025-03-31T16:36:39.406706Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:36:39.406748Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:36:39.407377Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:36:39.407417Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:36:39.407452Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T16:36:39.407651Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:36:39.407767Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:36:39.408422Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:36:39.408482Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:36:39.408761Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:36:39.409078Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:36:39.410129Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-31T16:36:39.410158Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:36:39.410741Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-31T16:36:39.410782Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:36:39.411798Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:36:39.411837Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:36:39.411867Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:36:39.411917Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:36:39.411956Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T16:36:39.412033Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:36:39.412797Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:36:39.414334Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-31T16:36:39.414388Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:36:39.415068Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T16:36:39.418485Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-03-31T16:36:39.419674Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-03-31T16:36:39.462195Z node 3 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [3:742:2622] 2025-03-31T16:36:39.462435Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:36:39.467004Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:36:39.467940Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:36:39.469866Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:36:39.469946Z node 3 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:36:39.469999Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:36:39.470355Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:36:39.470608Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:36:39.470680Z node 3 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [3:757:2622] in generation 2 2025-03-31T16:36:39.492052Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:36:39.492162Z node 3 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037888 2025-03-31T16:36:39.492273Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T16:36:39.492371Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:36:39.492438Z node 3 :TX_DATASHARD DEBUG: Resolve path at 72075186224037888: reason# empty path 2025-03-31T16:36:39.492529Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [3:761:2632] 2025-03-31T16:36:39.492556Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:36:39.492607Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:36:39.492642Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:36:39.492862Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Execute 2025-03-31T16:36:39.493019Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Complete 2025-03-31T16:36:39.493723Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:36:39.493814Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:36:39.494034Z node 3 :FLAT_TX_SCHEMESHARD WARN: Got TEvDataShard::TEvSchemaChanged for unknown txId 281474976715657 message# Source { RawX1: 742 RawX2: 12884904510 } Origin: 72075186224037888 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-03-31T16:36:39.494107Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1000 2025-03-31T16:36:39.494143Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:36:39.494640Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:36:39.494751Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:36:39.494792Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:36:39.494824Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:36:39.494852Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:36:39.495104Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-31T16:36:39.521634Z node 3 :TX_DATASHARD DEBUG: Got scheme resolve result at 72075186224037888: Status: StatusSuccess Path: "/Root/table-1" PathDescription { Self { Name: "table-1" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1000 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046644480 2025-03-31T16:36:39.521949Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T16:36:39.522039Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:36:39.522307Z node 3 :TX_DATASHARD DEBUG: TTxStoreTablePath::Execute at 72075186224037888 2025-03-31T16:36:39.524205Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:765:2636], serverId# [3:767:2637], sessionId# [0:0:0] 2025-03-31T16:36:39.537183Z node 3 :TX_DATASHARD DEBUG: TTxStoreTablePath::Complete at 72075186224037888 |97.5%| [TM] {RESULT} ydb/core/tx/datashard/ut_init/unittest >> test_tpch_import.py::TestS3TpchImport::test_import_and_export >> KqpSinkTx::OlapExplicitTcl [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[comments] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[text] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[text] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[strings] >> ExternalIndex::Simple [GOOD] >> NodeWardenDsProxyConfigRetrieval::Disconnect >> TTestYqlToMiniKQLCompile::CheckResolve >> TTestYqlToMiniKQLCompile::CheckResolve [GOOD] >> TTestYqlToMiniKQLCompile::OnlyResult >> AnalyzeColumnshard::AnalyzeRebootSaBeforeSave [GOOD] >> TTestYqlToMiniKQLCompile::OnlyResult [GOOD] >> TTestYqlToMiniKQLCompile::EraseRow >> TTestYqlToMiniKQLCompile::EraseRow [GOOD] >> TTestYqlToMiniKQLCompile::UpdateRow >> TTestYqlToMiniKQLCompile::UpdateRow [GOOD] >> TTestYqlToMiniKQLCompile::SelectRow [GOOD] >> TTestYqlToMiniKQLCompile::SelectRange >> TTestYqlToMiniKQLCompile::SelectRange [GOOD] >> TTestYqlToMiniKQLCompile::SimpleCrossShardTx ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapExplicitTcl [GOOD] Test command err: Trying to start YDB, gRPC: 12095, MsgBus: 3871 2025-03-31T16:36:04.589602Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013335572866824:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:36:04.589861Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00129d/r3tmp/tmpn8SzFC/pdisk_1.dat 2025-03-31T16:36:04.867233Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12095, node 1 2025-03-31T16:36:04.945633Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:36:04.945664Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:36:04.945672Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:36:04.945810Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:36:04.969010Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:04.969130Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:04.970931Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3871 TClient is connected to server localhost:3871 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:36:05.388977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:07.224834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013348457769361:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:07.224968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013348457769380:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:07.225026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:07.228712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:36:07.238424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013348457769390:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:36:07.318112Z node 1 :TX_PROXY ERROR: Actor# [1:7488013348457769442:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:36:07.676818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T16:36:07.806446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488013348457769640:2347];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:36:07.806625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488013348457769640:2347];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:36:07.806887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488013348457769640:2347];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:36:07.807004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488013348457769640:2347];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:36:07.807299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488013348457769640:2347];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:36:07.807479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488013348457769640:2347];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:36:07.807599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488013348457769640:2347];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:36:07.807704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488013348457769640:2347];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:36:07.807823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488013348457769640:2347];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:36:07.807937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488013348457769640:2347];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:36:07.808048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488013348457769640:2347];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:36:07.808187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488013348457769640:2347];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:36:07.811928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488013348457769647:2349];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:36:07.812003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488013348457769647:2349];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:36:07.812262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488013348457769647:2349];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:36:07.812436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488013348457769647:2349];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:36:07.812550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488013348457769647:2349];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:36:07.812687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488013348457769647:2349];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:36:07.812828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488013348457769647:2349];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:36:07.812979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488013348457769647:2349];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:36:07.813122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488013348457769647:2349];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:36:07.813274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488013348457769647:2349];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:36:07.813406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488013348457769647:2349];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:36:07.813520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488013348457769647:2349];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:36:07.817862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:36:07.817965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:36:07.818133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME ... NSHARD WARN: tablet_id=72075186224038027;self_id=[2:7488013448192824112:3282];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038027;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:35.082685Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038027;self_id=[2:7488013448192824112:3282];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038027;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:35.085178Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038081;self_id=[2:7488013448192823384:3132];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038081;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:35.085400Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038081;self_id=[2:7488013448192823384:3132];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038081;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:35.085591Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;self_id=[2:7488013448192824158:3289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038022;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:35.085794Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;self_id=[2:7488013448192824158:3289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038022;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:35.088205Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7488013448192824369:3309];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038012;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:35.088390Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7488013448192824369:3309];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038012;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:35.092458Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038076;self_id=[2:7488013448192823248:3115];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038076;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:35.092715Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038076;self_id=[2:7488013448192823248:3115];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038076;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:35.092969Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038095;self_id=[2:7488013448192823233:3104];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038095;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:35.093113Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038095;self_id=[2:7488013448192823233:3104];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038095;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:35.093263Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7488013448192824259:3297];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038008;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:35.093444Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7488013448192824259:3297];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038008;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:35.093622Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[2:7488013448192824239:3294];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038019;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:35.093790Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[2:7488013448192824239:3294];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038019;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:35.094684Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7488013448192824290:3305];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038005;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:35.094869Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7488013448192824290:3305];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038005;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:35.095001Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038087;self_id=[2:7488013448192823430:3135];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038087;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:35.095164Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038087;self_id=[2:7488013448192823430:3135];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038087;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:35.096450Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038002;self_id=[2:7488013448192823439:3140];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038002;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:35.096639Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038002;self_id=[2:7488013448192823439:3140];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038002;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:35.099806Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038000;self_id=[2:7488013448192823134:3079];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038000;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:35.100021Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038000;self_id=[2:7488013448192823134:3079];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038000;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:35.104516Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[2:7488013448192824171:3292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038023;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:35.104741Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[2:7488013448192824171:3292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038023;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:35.104873Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[2:7488013448192824270:3298];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038010;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:35.104984Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[2:7488013448192824270:3298];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038010;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:35.105117Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038057;self_id=[2:7488013448192823841:3258];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038057;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:35.105311Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038057;self_id=[2:7488013448192823841:3258];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038057;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:35.105351Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038073;self_id=[2:7488013448192823771:3242];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038073;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:35.105479Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038073;self_id=[2:7488013448192823771:3242];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038073;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:35.105678Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038006;self_id=[2:7488013448192823117:3066];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038006;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:35.105820Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038006;self_id=[2:7488013448192823117:3066];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038006;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:35.109283Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038065;self_id=[2:7488013448192823742:3223];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038065;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:35.109489Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038065;self_id=[2:7488013448192823742:3223];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038065;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:35.110952Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[2:7488013448192824288:3304];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038013;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:35.111180Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[2:7488013448192824288:3304];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038013;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:35.116576Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[2:7488013448192824306:3307];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038033;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:35.116841Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[2:7488013448192824306:3307];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038033;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:35.117912Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038056;self_id=[2:7488013448192823751:3230];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038056;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:35.118088Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038056;self_id=[2:7488013448192823751:3230];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038056;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T16:36:35.273845Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjI5YWQ4MTYtN2QyZDg1MTUtZmU2ZTcyZGQtODNjNmFkZjE=, ActorId: [2:7488013461077729141:3886], ActorState: ReadyState, TraceId: 01jqpgzqsx196v6nfy34y68nw4, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-03-31T16:36:37.076953Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:36:37.076985Z node 2 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> TTestYqlToMiniKQLCompile::SimpleCrossShardTx [GOOD] >> TTestYqlToMiniKQLCompile::AcquireLocks >> TTestYqlToMiniKQLCompile::AcquireLocks [GOOD] >> TTestYqlToMiniKQLCompile::StaticMapTypeOf [GOOD] >> TTestYqlToMiniKQLCompile::SelectRangeAtomInRange >> SequenceShardTests::Basics >> TTestYqlToMiniKQLCompile::SelectRangeAtomInRange [GOOD] >> TTestYqlToMiniKQLCompile::Extract >> NodeWardenDsProxyConfigRetrieval::Disconnect [GOOD] >> TGRpcRateLimiterTest::DropResource [GOOD] >> TGRpcRateLimiterTest::DescribeResource >> TTestYqlToMiniKQLCompile::Extract [GOOD] >> DataShardBackgroundCompaction::ShouldCompact >> TabletService_ExecuteMiniKQL::MalformedParams [GOOD] >> TabletService_ExecuteMiniKQL::MalformedProgram ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut_sequence/unittest >> NodeWardenDsProxyConfigRetrieval::Disconnect [GOOD] Test command err: Caught NodeWarden registration actorId# [1:11:2058] 2025-03-31T16:36:42.921963Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-03-31T16:36:42.951424Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:/home/runner/.ya/build/build_root/0l5c/000549/r3tmp/tmpCf59Xc/static.dat" PDiskGuid: 7844685429528324456 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 2 } } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 1 PDiskGuid: 7844685429528324456 } VDiskKind: Default } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1 PDiskGuid: 7844685429528324456 } } } } AvailabilityDomains: 0 } 2025-03-31T16:36:42.953875Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:/home/runner/.ya/build/build_root/0l5c/000549/r3tmp/tmpCf59Xc/static.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-03-31T16:36:42.962223Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-03-31T16:36:42.967917Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:1 PDiskGuid# 7844685429528324456 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-03-31T16:36:42.969358Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:1 PDiskGuid# 7844685429528324456 2025-03-31T16:36:42.969419Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-03-31T16:36:42.971213Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:29:2075] ControllerId# 72057594037932033 2025-03-31T16:36:42.971261Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-31T16:36:42.972169Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-03-31T16:36:42.972673Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-03-31T16:36:42.992007Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-03-31T16:36:42.998330Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-03-31T16:36:43.027760Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-03-31T16:36:43.027839Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-03-31T16:36:43.036886Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-03-31T16:36:43.036947Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-03-31T16:36:43.044404Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-03-31T16:36:43.049489Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-03-31T16:36:43.051251Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-03-31T16:36:43.056061Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:/home/runner/.ya/build/build_root/0l5c/000549/r3tmp/tmpCf59Xc/static.dat" PDiskGuid: 7844685429528324456 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 2 } } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 1 PDiskGuid: 7844685429528324456 } VDiskKind: Default } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1 PDiskGuid: 7844685429528324456 } } } } AvailabilityDomains: 0 } 2025-03-31T16:36:43.056323Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-03-31T16:36:43.111481Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2025-03-31T16:36:43.111544Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-03-31T16:36:43.114950Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:322} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: " \317,\035(\315\223{\274S\377D\345\0072\264i\234%W" } 2025-03-31T16:36:43.115323Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 2 2025-03-31T16:36:43.115377Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:317} StateFunc Type# 2146435075 Sender# [1:47:2090] SessionId# [0:0:0] Cookie# 0 2025-03-31T16:36:43.115439Z node 1 :BS_NODE DEBUG: {NWDC36@distconf_persistent_storage.cpp:205} TEvStorageConfigStored NumOk# 0 NumError# 0 Passed# 0.078336s 2025-03-31T16:36:43.115804Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2025-03-31T16:36:43.115848Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:317} StateFunc Type# 268639248 Sender# [1:11:2058] SessionId# [0:0:0] Cookie# 0 2025-03-31T16:36:43.122534Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1133} Handle(TEvStatusUpdate) 2025-03-31T16:36:43.132095Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1133} Handle(TEvStatusUpdate) 2025-03-31T16:36:43.165138Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-03-31T16:36:43.199991Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-03-31T16:36:43.204184Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-03-31T16:36:43.204795Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:36:43.206935Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-03-31T16:36:43.214532Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-03-31T16:36:43.218731Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-03-31T16:36:43.219176Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-03-31T16:36:43.235596Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-03-31T16:36:43.235870Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-03-31T16:36:43.236898Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-03-31T16:36:43.237041Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-31T16:36:43.237133Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-31T16:36:43.237348Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-03-31T16:36:43.273853Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-03-31T16:36:43.274051Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-31T16:36:43.287797Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-31T16:36:43.287949Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-31T16:36:43.288026Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-31T16:36:43.288098Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-31T16:36:43.288247Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-31T16:36:43.288322Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-31T16:36:43.288368Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-31T16:36:43.288424Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-31T16:36:43.300698Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-31T16:36:43.300843Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-31T16:36:43.313204Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-31T16:36:43.313360Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-03-31T16:36:43.316939Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-03-31T16:36:43.317031Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-03-31T16:36:43.332410Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-03-31T16:36:43.332489Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed Pipe connected clientId# [1:29:2075] 2025-03-31T16:36:43.333318Z node 1 :BS_NODE DEBUG: {NW05@node_warden_pipe.cpp:52} TEvTabletPipe::TEvClientConnected OK ClientId# [1:29:2075] ServerId# [1:124:2146] TabletId# 72057594037932033 PipeClientId# [1:29:2075] 2025-03-31T16:36:43.333905Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1 PDiskGuid: 7844685429528324456 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-03-31T16:36:43.335188Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "SectorMap:/home/runner/.ya/build/build_root/0l5c/000549/r3tmp/tmpCf59Xc/static.dat" PDiskConfig { ExpectedSlotCount: 2 } } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 ErasureSpecies: "none" VDiskKind: "Default" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-03-31T16:36:43.338152Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1 Path# SectorMap:/home/runner/.ya/build/build_root/0l5c/000549/r3tmp/tmpCf59Xc/static.dat 2025-03-31T16:36:43.347185Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } State: Initial Replicated: false DiskSpace: Green } } 2025-03-31T16:36:43.347352Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } } 2025-03-31T16:36:43.347444Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } 2025-03-31T16:36:43.347728Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1 PDiskGuid: 7844685429528324456 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-03-31T16:36:43.347862Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1 PDiskGuid: 7844685429528324456 Status: READY OnlyPhantomsRemain: false } } 2025-03-31T16:36:43.349007Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1133} Handle(TEvStatusUpdate) 2025-03-31T16:36:43.350810Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1133} Handle(TEvStatusUpdate) 2025-03-31T16:36:43.350935Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } Success: true } 2025-03-31T16:36:43.351157Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1133} Handle(TEvStatusUpdate) 2025-03-31T16:36:43.351261Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } } 2025-03-31T16:36:43.365367Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] === Waiting for pipe to establish === === Breaking pipe === === Sending put === Pipe disconnected clientId# [1:29:2075] 2025-03-31T16:36:43.366280Z node 1 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [1:29:2075] ServerId# [1:124:2146] TabletId# 72057594037932033 PipeClientId# [1:29:2075] 2025-03-31T16:36:43.366379Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:139:2159] ControllerId# 72057594037932033 2025-03-31T16:36:43.366413Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-31T16:36:43.366873Z node 1 :BS_NODE DEBUG: {NW46@node_warden_proxy.cpp:127} HandleForwarded GroupId# 2147483648 EnableProxyMock# false NoGroup# false 2025-03-31T16:36:43.366914Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 2147483648 2025-03-31T16:36:43.366955Z node 1 :BS_NODE DEBUG: {NW98@node_warden_group.cpp:265} RequestGroupConfig GroupId# 2147483648 2025-03-31T16:36:43.367250Z node 1 :BS_NODE INFO: {NW79@node_warden_group_resolver.cpp:74} TGroupResolverActor::Bootstrap GroupId# 2147483648 Pipe connected clientId# [1:139:2159] 2025-03-31T16:36:43.367540Z node 1 :BS_NODE DEBUG: {NW05@node_warden_pipe.cpp:52} TEvTabletPipe::TEvClientConnected OK ClientId# [1:139:2159] ServerId# [1:142:2161] TabletId# 72057594037932033 PipeClientId# [1:139:2159] 2025-03-31T16:36:43.367751Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1 PDiskGuid: 7844685429528324456 Status: READY OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-03-31T16:36:43.368081Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } Success: true } 2025-03-31T16:36:43.368257Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG02@get_group.cpp:58} TEvControllerGetGroup Sender# [1:11:2058] Cookie# 0 Recipient# [1:142:2161] RecipientRewrite# [1:90:2122] Request# {NodeID: 1 GroupIDs: 2147483648 } StopGivingGroups# false 2025-03-31T16:36:43.368335Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG01@get_group.cpp:22} Handle TEvControllerGetGroup Request# {NodeID: 1 GroupIDs: 2147483648 } 2025-03-31T16:36:43.368455Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } } 2025-03-31T16:36:43.368574Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank: 0 VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } State: OK Replicated: true DiskSpace: Green IsThrottling: false ThrottlingRate: 0 } } 2025-03-31T16:36:43.381635Z node 1 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:792} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 1 ServiceSet { PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:/home/runner/.ya/build/build_root/0l5c/000549/r3tmp/tmpCf59Xc/static.dat" PDiskGuid: 7844685429528324456 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 2 } EntityStatus: INITIAL ExpectedSerial: "" ManagementStage: DISCOVER_SERIAL SpaceColorBorder: GREEN } VDisks { VDiskID { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 7844685429528324456 } VDiskKind: Default StoragePoolName: "" } Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 7844685429528324456 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } InstanceId: "f75a6e43-2c2f38c5-17dbcac5-2626194" Comprehensive: true AvailDomain: 0 } 2025-03-31T16:36:43.381931Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# false Comprehensive# true Origin# controller ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:/home/runner/.ya/build/build_root/0l5c/000549/r3tmp/tmpCf59Xc/static.dat" PDiskGuid: 7844685429528324456 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 2 } EntityStatus: INITIAL ExpectedSerial: "" ManagementStage: DISCOVER_SERIAL SpaceColorBorder: GREEN } VDisks { VDiskID { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 7844685429528324456 } VDiskKind: Default StoragePoolName: "" } Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 7844685429528324456 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } 2025-03-31T16:36:43.382162Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [80000000:1:0:0:0] VSlotId# 1:1:1000 PDiskGuid# 7844685429528324456 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-03-31T16:36:43.383070Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [80000000:1:0:0:0] VSlotId# 1:1:1000 PDiskGuid# 7844685429528324456 2025-03-31T16:36:43.383377Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-03-31T16:36:43.388635Z node 1 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:792} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 1 ServiceSet { Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 7844685429528324456 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } } 2025-03-31T16:36:43.388768Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# false Comprehensive# false Origin# controller ServiceSet# {Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 7844685429528324456 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } 2025-03-31T16:36:43.390704Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1000 PDiskGuid: 7844685429528324456 Status: INIT_PENDING OnlyPhantomsRemain: false } } 2025-03-31T16:36:43.391056Z node 1 :BS_NODE INFO: {NW81@node_warden_group_resolver.cpp:270} TGroupResolverActor::PassAway GroupId# 2147483648 2025-03-31T16:36:43.393240Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } State: Initial Replicated: false DiskSpace: Green } } 2025-03-31T16:36:43.398243Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } } 2025-03-31T16:36:43.401734Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1133} Handle(TEvStatusUpdate) 2025-03-31T16:36:43.402381Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1000 PDiskGuid: 7844685429528324456 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-03-31T16:36:43.403507Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1133} Handle(TEvStatusUpdate) 2025-03-31T16:36:43.404731Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1000 PDiskGuid: 7844685429528324456 Status: READY OnlyPhantomsRemain: false } } |97.5%| [TM] {RESULT} ydb/core/blobstorage/nodewarden/ut_sequence/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeSave [GOOD] Test command err: 2025-03-31T16:34:27.173096Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:34:27.173373Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:34:27.173445Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000e94/r3tmp/tmpIFGJpi/pdisk_1.dat 2025-03-31T16:34:27.603339Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65309, node 1 2025-03-31T16:34:27.862989Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:27.863047Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:27.863073Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:27.865353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:34:27.865823Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:34:27.960525Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:27.960709Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:27.975799Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2508 2025-03-31T16:34:28.536789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:34:32.204705Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T16:34:32.246134Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:32.246271Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:32.276573Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:34:32.278946Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:32.525331Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:32.526143Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:32.526826Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:32.526995Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:32.527351Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:32.527469Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:32.527586Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:32.527709Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:32.527841Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:32.698940Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:32.699068Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:32.716946Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:32.880844Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:32.936800Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T16:34:32.936912Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T16:34:32.978451Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T16:34:32.979472Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T16:34:32.979710Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T16:34:32.979768Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T16:34:32.979842Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T16:34:32.979905Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T16:34:32.979959Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T16:34:32.980016Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T16:34:32.980596Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T16:34:33.009078Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:34:33.009214Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1872:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:34:33.016692Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-03-31T16:34:33.024036Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1908:2617] 2025-03-31T16:34:33.024788Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1908:2617], schemeshard id = 72075186224037897 2025-03-31T16:34:33.027237Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T16:34:33.053222Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T16:34:33.053294Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T16:34:33.053368Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T16:34:33.069831Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T16:34:33.120794Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T16:34:33.120967Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T16:34:33.308104Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T16:34:33.483198Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T16:34:33.576870Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:34:34.837697Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2241:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:34.837858Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:34.860983Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-31T16:34:35.042719Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:34:35.042967Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:34:35.043336Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:34:35.043514Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:34:35.043651Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:34:35.043779Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:34:35.043922Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:34:35.044074Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:34:35.044201Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:34:35.044329Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:34:35.044458Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:34:35.044586Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:34:35.075929Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:34:35.076033Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;descr ... DEBUG: ConnectToSA(), pipe client id: [2:7260:5318], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:36:39.592444Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T16:36:39.592522Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T16:36:39.593001Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T16:36:39.593569Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T16:36:39.593882Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded database: /Root/Database 2025-03-31T16:36:39.593930Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal start key 2025-03-31T16:36:39.593961Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal table owner id: 72075186224037897 2025-03-31T16:36:39.593986Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal table local path id: 4 2025-03-31T16:36:39.594011Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal start time: 1743438999514733 2025-03-31T16:36:39.594036Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal IsColumnTable: 1 2025-03-31T16:36:39.594062Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded global traversal round: 2 2025-03-31T16:36:39.594138Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2025-03-31T16:36:39.594219Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T16:36:39.594290Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2025-03-31T16:36:39.594356Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 1 2025-03-31T16:36:39.594441Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 1 2025-03-31T16:36:39.594500Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T16:36:39.594624Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:36:39.595314Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T16:36:39.596124Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-31T16:36:39.596177Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-31T16:36:39.596268Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:36:39.597516Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T16:36:39.597586Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T16:36:39.599583Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-31T16:36:39.663367Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-31T16:36:39.663578Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-03-31T16:36:39.664225Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7307:5349], server id = [2:7308:5350], tablet id = 72075186224037899, status = OK 2025-03-31T16:36:39.664338Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7307:5349], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:36:39.665710Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-31T16:36:39.665844Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-31T16:36:39.666042Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-31T16:36:39.666238Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-31T16:36:39.666499Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T16:36:39.669431Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7307:5349], server id = [2:7308:5350], tablet id = 72075186224037899 2025-03-31T16:36:39.669481Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:36:39.670050Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-31T16:36:39.708211Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7326:5368]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:36:39.708457Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T16:36:39.708514Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7326:5368], StatRequests.size() = 1 2025-03-31T16:36:39.845615Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZGI5MWNlYmQtZGQ2M2UzY2YtNzMxOThkNGUtMjE1NzhiOQ==, TxId: 2025-03-31T16:36:39.845687Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZGI5MWNlYmQtZGQ2M2UzY2YtNzMxOThkNGUtMjE1NzhiOQ==, TxId: 2025-03-31T16:36:39.846289Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:36:39.859403Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7341:5374] 2025-03-31T16:36:39.859647Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7259:5317], server id = [2:7341:5374], tablet id = 72075186224037894, status = OK 2025-03-31T16:36:39.859733Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7341:5374], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-03-31T16:36:39.859925Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7342:5375] 2025-03-31T16:36:39.860032Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:7342:5375], schemeshard id = 72075186224037897 2025-03-31T16:36:39.873761Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:36:39.873829Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T16:36:39.952734Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7353:5378] 2025-03-31T16:36:39.953512Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:2804:3223] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-03-31T16:36:39.953566Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:2804:3223] 2025-03-31T16:36:39.953633Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Complete 2025-03-31T16:36:40.278706Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-03-31T16:36:40.278798Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-03-31T16:36:40.289498Z node 2 :STATISTICS DEBUG: Event round 3 is different from the current 0 2025-03-31T16:36:40.289575Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-03-31T16:36:40.856256Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T16:36:40.856343Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T16:36:40.856386Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-31T16:36:41.723966Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:36:41.724112Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-31T16:36:41.724161Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:36:41.724802Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-31T16:36:41.738451Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-31T16:36:41.738800Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T16:36:41.738860Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T16:36:41.739330Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-31T16:36:41.753130Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-31T16:36:41.753340Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2025-03-31T16:36:41.753981Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7425:5421], server id = [2:7426:5422], tablet id = 72075186224037899, status = OK 2025-03-31T16:36:41.754100Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7425:5421], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:36:41.755588Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-31T16:36:41.755699Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-31T16:36:41.755900Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-31T16:36:41.756105Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-31T16:36:41.756408Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T16:36:41.759007Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7425:5421], server id = [2:7426:5422], tablet id = 72075186224037899 2025-03-31T16:36:41.759047Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:36:41.759524Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-31T16:36:41.792848Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=M2Q4ZWI0ODMtNmE4YTkxMGMtZTkwYmU4MzktODhmZjllNWU=, TxId: 2025-03-31T16:36:41.792925Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=M2Q4ZWI0ODMtNmE4YTkxMGMtZTkwYmU4MzktODhmZjllNWU=, TxId: 2025-03-31T16:36:41.793488Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:36:41.806897Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:36:41.806966Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2804:3223] |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/client/minikql_compile/ut/unittest >> TTestYqlToMiniKQLCompile::Extract [GOOD] |97.5%| [TS] {RESULT} ydb/core/client/minikql_compile/ut/unittest >> DataShardFollowers::FollowerKeepsWorkingAfterMvccReadTable [GOOD] >> DataShardFollowers::FollowerStaleRo >> SequenceShardTests::Basics [GOOD] >> SequenceShardTests::MarkedPipeRetries ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> ExternalIndex::Simple [GOOD] Test command err: 2025-03-31T16:32:08.016692Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:100:2146], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:32:08.017320Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:32:08.017426Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:32:08.017459Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/cs_index/external;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000c08/r3tmp/tmpzXrBbe/pdisk_1.dat TServer::EnableGrpc on GrpcPort 11649, node 1 TClient is connected to server localhost:20551 2025-03-31T16:32:08.916545Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] Handle TEvGetProxyServicesRequest 2025-03-31T16:32:08.916821Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] Handle TEvGetProxyServicesRequest 2025-03-31T16:32:08.926452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:32:08.983778Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:32:08.992115Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:08.992184Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:08.992217Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:08.992603Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:32:09.028826Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T16:32:09.030147Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-31T16:32:09.030385Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:09.030988Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:09.042532Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:32:09.159815Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] Handle TEvProposeTransaction 2025-03-31T16:32:09.159898Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] TxId# 281474976715657 ProcessProposeTransaction 2025-03-31T16:32:09.161450Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:678:2571] 2025-03-31T16:32:09.249890Z node 1 :TX_PROXY DEBUG: Actor# [1:678:2571] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "olapStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "resource_id" Type: "Utf8" DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Name: "uid" Type: "Utf8" NotNull: true StorageId: "__MEMORY" } Columns { Name: "level" Type: "Int32" } Columns { Name: "message" Type: "Utf8" StorageId: "__MEMORY" } Columns { Name: "json_payload" Type: "JsonDocument" } KeyColumnNames: "timestamp" KeyColumnNames: "uid" } } } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-31T16:32:09.250007Z node 1 :TX_PROXY DEBUG: Actor# [1:678:2571] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:32:09.250682Z node 1 :TX_PROXY DEBUG: Actor# [1:678:2571] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-31T16:32:09.250784Z node 1 :TX_PROXY DEBUG: Actor# [1:678:2571] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:32:09.251121Z node 1 :TX_PROXY DEBUG: Actor# [1:678:2571] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:32:09.251423Z node 1 :TX_PROXY DEBUG: Actor# [1:678:2571] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:32:09.251508Z node 1 :TX_PROXY DEBUG: Actor# [1:678:2571] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-31T16:32:09.257611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:32:09.258753Z node 1 :TX_PROXY DEBUG: Actor# [1:678:2571] txid# 281474976715657 HANDLE EvClientConnected 2025-03-31T16:32:09.260375Z node 1 :TX_PROXY DEBUG: Actor# [1:678:2571] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-31T16:32:09.260450Z node 1 :TX_PROXY DEBUG: Actor# [1:678:2571] txid# 281474976715657 SEND to# [1:677:2570] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-03-31T16:32:09.367809Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:752:2632];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T16:32:09.393852Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:752:2632];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T16:32:09.394227Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2025-03-31T16:32:09.403302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:752:2632];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:32:09.403560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:752:2632];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:32:09.403866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:752:2632];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:32:09.404003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:752:2632];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:32:09.404127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:752:2632];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:32:09.404286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:752:2632];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:32:09.404434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:752:2632];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:32:09.404555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:752:2632];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:32:09.404710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:752:2632];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:32:09.404838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:752:2632];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:32:09.404975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:752:2632];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:32:09.405108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:752:2632];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:32:09.429546Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 72075186224037888 2025-03-31T16:32:09.430638Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T16:32:09.430717Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T16:32:09.430940Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:32:09.431149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:32:09.431242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T16:32:09.431294Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T16:32:09.431384Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T16:32:09.431457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T16:32:09.431503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T16:32:09.431532Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T16:32:09.431725Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T16:32:09.431810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute ... ToFlow $15) $3) (lambda '($16 $17 $18) (AsStruct '('"componentId" $16) '('"instant" $17) '('"modificationId" $18)))))) '('('"_logical_id" '351) '('"_id" '"1ebd6a55-a59a8ef6-6543701b-532c7e94")))) (let $11 (DqCnResult (TDqOutput $10 '"0") '())) (return (KqpPhysicalTx '($8 $10) '($11) '() '('('"type" '"data")))) ) 2025-03-31T16:36:41.642942Z node 1 :KQP_YQL TRACE: TraceId: 01jqpgzxtybavdz4j40eedt3cc, SessionId: CompileActor 2025-03-31 16:36:41.641 TRACE ydb-services-ext_index-ut(pid=607649, tid=0x00007F42408F1D00) [KQP] kqp_transform.cpp:33: PhysicalPeepholeTransformer: ( (let $1 (KqpTable '"//Root/.metadata/initialization/migrations" '"72057594046644480:6" '"" '1)) (let $2 '('"componentId" '"instant" '"modificationId")) (let $3 (Uint64 '"1001")) (let $4 (KqpRowsSourceSettings $1 $2 '('('"ItemsLimit" $3) '('"Sequential" '1)) (Void) '())) (let $5 (OptionalType (DataType 'Utf8))) (let $6 (StructType '('"componentId" $5) '('"instant" (OptionalType (DataType 'Uint32))) '('"modificationId" $5))) (let $7 '('('"_logical_id" '338) '('"_id" '"8ddb8f06-2c3afb0-ab21ae4b-2f5e29cb") '('"_wide_channels" $6))) (let $8 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $4)) (lambda '($12) (block '( (let $13 (lambda '($14) (Member $14 '"componentId") (Member $14 '"instant") (Member $14 '"modificationId"))) (return (FromFlow (ExpandMap (Take (ToFlow $12) $3) $13))) ))) $7)) (let $9 (DqCnUnionAll (TDqOutput $8 '"0"))) (let $10 (DqPhyStage '($9) (lambda '($15) (FromFlow (NarrowMap (Take (ToFlow $15) $3) (lambda '($16 $17 $18) (AsStruct '('"componentId" $16) '('"instant" $17) '('"modificationId" $18)))))) '('('"_logical_id" '351) '('"_id" '"1ebd6a55-a59a8ef6-6543701b-532c7e94")))) (let $11 (DqCnResult (TDqOutput $10 '"0") '())) (return (KqpPhysicalQuery '((KqpPhysicalTx '($8 $10) '($11) '() '('('"type" '"data")))) '((KqpTxResultBinding (ListType $6) '"0" '"0")) '('('"type" '"data_query")))) ) 2025-03-31T16:36:41.656104Z node 1 :KQP_YQL INFO: TraceId: 01jqpgzxtybavdz4j40eedt3cc, SessionId: CompileActor 2025-03-31 16:36:41.656 INFO ydb-services-ext_index-ut(pid=607649, tid=0x00007F42408F1D00) [core exec] yql_execution.cpp:466: Register async execution for node #268 2025-03-31T16:36:41.656251Z node 1 :KQP_YQL TRACE: TraceId: 01jqpgzxtybavdz4j40eedt3cc, SessionId: CompileActor 2025-03-31 16:36:41.656 TRACE ydb-services-ext_index-ut(pid=607649, tid=0x00007F42408F1D00) [core exec] yql_execution.cpp:387: {3}, callable #277 2025-03-31T16:36:41.656355Z node 1 :KQP_YQL INFO: TraceId: 01jqpgzxtybavdz4j40eedt3cc, SessionId: CompileActor 2025-03-31 16:36:41.656 INFO ydb-services-ext_index-ut(pid=607649, tid=0x00007F42408F1D00) [core exec] yql_execution.cpp:577: Node #277 finished execution 2025-03-31T16:36:41.656422Z node 1 :KQP_YQL INFO: TraceId: 01jqpgzxtybavdz4j40eedt3cc, SessionId: CompileActor 2025-03-31 16:36:41.656 INFO ydb-services-ext_index-ut(pid=607649, tid=0x00007F42408F1D00) [core exec] yql_execution.cpp:594: Node #277 created 0 trackable nodes: 2025-03-31T16:36:41.656505Z node 1 :KQP_YQL INFO: TraceId: 01jqpgzxtybavdz4j40eedt3cc, SessionId: CompileActor 2025-03-31 16:36:41.656 INFO ydb-services-ext_index-ut(pid=607649, tid=0x00007F42408F1D00) [core exec] yql_execution.cpp:87: Finish, output #280, status: Async 2025-03-31T16:36:41.657135Z node 1 :KQP_YQL INFO: TraceId: 01jqpgzxtybavdz4j40eedt3cc, SessionId: CompileActor 2025-03-31 16:36:41.657 INFO ydb-services-ext_index-ut(pid=607649, tid=0x00007F42408F1D00) [core exec] yql_execution.cpp:133: Completed async execution for node #268 2025-03-31T16:36:41.657219Z node 1 :KQP_YQL INFO: TraceId: 01jqpgzxtybavdz4j40eedt3cc, SessionId: CompileActor 2025-03-31 16:36:41.657 INFO ydb-services-ext_index-ut(pid=607649, tid=0x00007F42408F1D00) [core exec] yql_execution.cpp:153: State is ExecutionRequired after apply async changes for node #268 2025-03-31T16:36:41.657297Z node 1 :KQP_YQL INFO: TraceId: 01jqpgzxtybavdz4j40eedt3cc, SessionId: CompileActor 2025-03-31 16:36:41.657 INFO ydb-services-ext_index-ut(pid=607649, tid=0x00007F42408F1D00) [core exec] yql_execution.cpp:59: Begin, root #280 2025-03-31T16:36:41.657360Z node 1 :KQP_YQL INFO: TraceId: 01jqpgzxtybavdz4j40eedt3cc, SessionId: CompileActor 2025-03-31 16:36:41.657 INFO ydb-services-ext_index-ut(pid=607649, tid=0x00007F42408F1D00) [core exec] yql_execution.cpp:72: Collect unused nodes for root #280, status: Ok 2025-03-31T16:36:41.657417Z node 1 :KQP_YQL TRACE: TraceId: 01jqpgzxtybavdz4j40eedt3cc, SessionId: CompileActor 2025-03-31 16:36:41.657 TRACE ydb-services-ext_index-ut(pid=607649, tid=0x00007F42408F1D00) [core exec] yql_execution.cpp:387: {0}, callable #280 2025-03-31T16:36:41.657478Z node 1 :KQP_YQL TRACE: TraceId: 01jqpgzxtybavdz4j40eedt3cc, SessionId: CompileActor 2025-03-31 16:36:41.657 TRACE ydb-services-ext_index-ut(pid=607649, tid=0x00007F42408F1D00) [core exec] yql_execution.cpp:387: {1}, callable #279 2025-03-31T16:36:41.657540Z node 1 :KQP_YQL TRACE: TraceId: 01jqpgzxtybavdz4j40eedt3cc, SessionId: CompileActor 2025-03-31 16:36:41.657 TRACE ydb-services-ext_index-ut(pid=607649, tid=0x00007F42408F1D00) [core exec] yql_execution.cpp:387: {2}, callable #278 2025-03-31T16:36:41.657637Z node 1 :KQP_YQL TRACE: TraceId: 01jqpgzxtybavdz4j40eedt3cc, SessionId: CompileActor 2025-03-31 16:36:41.657 TRACE ydb-services-ext_index-ut(pid=607649, tid=0x00007F42408F1D00) [core exec] yql_execution.cpp:387: {3}, callable #275 2025-03-31T16:36:41.657696Z node 1 :KQP_YQL TRACE: TraceId: 01jqpgzxtybavdz4j40eedt3cc, SessionId: CompileActor 2025-03-31 16:36:41.657 TRACE ydb-services-ext_index-ut(pid=607649, tid=0x00007F42408F1D00) [core exec] yql_execution.cpp:387: {4}, callable #268 2025-03-31T16:36:41.657876Z node 1 :KQP_YQL INFO: TraceId: 01jqpgzxtybavdz4j40eedt3cc, SessionId: CompileActor 2025-03-31 16:36:41.657 INFO ydb-services-ext_index-ut(pid=607649, tid=0x00007F42408F1D00) [core exec] yql_execution.cpp:577: Node #268 finished execution 2025-03-31T16:36:41.657949Z node 1 :KQP_YQL INFO: TraceId: 01jqpgzxtybavdz4j40eedt3cc, SessionId: CompileActor 2025-03-31 16:36:41.657 INFO ydb-services-ext_index-ut(pid=607649, tid=0x00007F42408F1D00) [core exec] yql_execution.cpp:594: Node #268 created 0 trackable nodes: 2025-03-31T16:36:41.658011Z node 1 :KQP_YQL TRACE: TraceId: 01jqpgzxtybavdz4j40eedt3cc, SessionId: CompileActor 2025-03-31 16:36:41.657 TRACE ydb-services-ext_index-ut(pid=607649, tid=0x00007F42408F1D00) [core exec] yql_execution.cpp:387: {3}, callable #275 2025-03-31T16:36:41.658069Z node 1 :KQP_YQL INFO: TraceId: 01jqpgzxtybavdz4j40eedt3cc, SessionId: CompileActor 2025-03-31 16:36:41.658 INFO ydb-services-ext_index-ut(pid=607649, tid=0x00007F42408F1D00) [core exec] yql_execution.cpp:577: Node #275 finished execution 2025-03-31T16:36:41.658140Z node 1 :KQP_YQL TRACE: TraceId: 01jqpgzxtybavdz4j40eedt3cc, SessionId: CompileActor 2025-03-31 16:36:41.658 TRACE ydb-services-ext_index-ut(pid=607649, tid=0x00007F42408F1D00) [core exec] yql_execution.cpp:387: {2}, callable #278 2025-03-31T16:36:41.658405Z node 1 :KQP_YQL INFO: TraceId: 01jqpgzxtybavdz4j40eedt3cc, SessionId: CompileActor 2025-03-31 16:36:41.658 INFO ydb-services-ext_index-ut(pid=607649, tid=0x00007F42408F1D00) [core exec] yql_execution.cpp:577: Node #278 finished execution 2025-03-31T16:36:41.658490Z node 1 :KQP_YQL INFO: TraceId: 01jqpgzxtybavdz4j40eedt3cc, SessionId: CompileActor 2025-03-31 16:36:41.658 INFO ydb-services-ext_index-ut(pid=607649, tid=0x00007F42408F1D00) [core exec] yql_execution.cpp:594: Node #278 created 0 trackable nodes: 2025-03-31T16:36:41.658562Z node 1 :KQP_YQL TRACE: TraceId: 01jqpgzxtybavdz4j40eedt3cc, SessionId: CompileActor 2025-03-31 16:36:41.658 TRACE ydb-services-ext_index-ut(pid=607649, tid=0x00007F42408F1D00) [core exec] yql_execution.cpp:387: {1}, callable #279 2025-03-31T16:36:41.658650Z node 1 :KQP_YQL INFO: TraceId: 01jqpgzxtybavdz4j40eedt3cc, SessionId: CompileActor 2025-03-31 16:36:41.658 INFO ydb-services-ext_index-ut(pid=607649, tid=0x00007F42408F1D00) [core exec] yql_execution.cpp:577: Node #279 finished execution 2025-03-31T16:36:41.658716Z node 1 :KQP_YQL INFO: TraceId: 01jqpgzxtybavdz4j40eedt3cc, SessionId: CompileActor 2025-03-31 16:36:41.658 INFO ydb-services-ext_index-ut(pid=607649, tid=0x00007F42408F1D00) [core exec] yql_execution.cpp:594: Node #279 created 0 trackable nodes: 2025-03-31T16:36:41.658796Z node 1 :KQP_YQL TRACE: TraceId: 01jqpgzxtybavdz4j40eedt3cc, SessionId: CompileActor 2025-03-31 16:36:41.658 TRACE ydb-services-ext_index-ut(pid=607649, tid=0x00007F42408F1D00) [core exec] yql_execution.cpp:387: {0}, callable #280 2025-03-31T16:36:41.658872Z node 1 :KQP_YQL INFO: TraceId: 01jqpgzxtybavdz4j40eedt3cc, SessionId: CompileActor 2025-03-31 16:36:41.658 INFO ydb-services-ext_index-ut(pid=607649, tid=0x00007F42408F1D00) [core exec] yql_execution.cpp:577: Node #280 finished execution 2025-03-31T16:36:41.658931Z node 1 :KQP_YQL INFO: TraceId: 01jqpgzxtybavdz4j40eedt3cc, SessionId: CompileActor 2025-03-31 16:36:41.658 INFO ydb-services-ext_index-ut(pid=607649, tid=0x00007F42408F1D00) [core exec] yql_execution.cpp:594: Node #280 created 0 trackable nodes: 2025-03-31T16:36:41.658993Z node 1 :KQP_YQL INFO: TraceId: 01jqpgzxtybavdz4j40eedt3cc, SessionId: CompileActor 2025-03-31 16:36:41.658 INFO ydb-services-ext_index-ut(pid=607649, tid=0x00007F42408F1D00) [core exec] yql_execution.cpp:87: Finish, output #280, status: Ok 2025-03-31T16:36:41.659051Z node 1 :KQP_YQL INFO: TraceId: 01jqpgzxtybavdz4j40eedt3cc, SessionId: CompileActor 2025-03-31 16:36:41.659 INFO ydb-services-ext_index-ut(pid=607649, tid=0x00007F42408F1D00) [core exec] yql_execution.cpp:93: Creating finalizing transformer, output #280 2025-03-31T16:36:41.678711Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] Handle TEvExecuteKqpTransaction 2025-03-31T16:36:41.678777Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] TxId# 281474976716232 ProcessProposeKqpTransaction 2025-03-31T16:36:41.688547Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] Handle TEvExecuteKqpTransaction 2025-03-31T16:36:41.688634Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] TxId# 281474976716233 ProcessProposeKqpTransaction 2025-03-31T16:36:41.856150Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[1:752:2632];fline=actor.cpp:33;event=skip_flush_writing; 2025-03-31T16:36:41.856428Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[1:755:2635];fline=actor.cpp:33;event=skip_flush_writing; 2025-03-31T16:36:41.856497Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[1:759:2638];fline=actor.cpp:33;event=skip_flush_writing; 2025-03-31T16:36:41.856550Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[1:764:2641];fline=actor.cpp:33;event=skip_flush_writing; 2025-03-31T16:36:41.869109Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:752:2632];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-03-31T16:36:41.869268Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:755:2635];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2025-03-31T16:36:41.869374Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:759:2638];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2025-03-31T16:36:41.869475Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:764:2641];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 >> test.py::test[solomon-BadDownsamplingInterval-] [GOOD] >> test.py::test[solomon-Basic-default.txt] |97.5%| [TA] $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} >> LongTxService::BasicTransactions >> SequenceShardTests::MarkedPipeRetries [GOOD] >> SequenceShardTests::FreezeRestoreRedirect >> ConfigGRPCService::ReplaceConfig >> TSentinelTests::PDiskErrorState [GOOD] >> TSentinelTests::PDiskFaultyState >> SequenceShardTests::FreezeRestoreRedirect [GOOD] >> TSequence::CreateTableWithDefaultFromSequence [GOOD] >> TSequence::SequencesIndex >> SequenceShardTests::NegativeIncrement |97.5%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} >> SequenceShardTests::NegativeIncrement [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_transactional_update [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert >> test_alloc_default.py::TestAlloc::test_default_limits[kikimr0] >> LongTxService::BasicTransactions [GOOD] >> LongTxService::AcquireSnapshot ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/sequenceshard/ut/unittest >> SequenceShardTests::NegativeIncrement [GOOD] Test command err: 2025-03-31T16:36:44.213433Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] OnActivateExecutor 2025-03-31T16:36:44.213643Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Execute 2025-03-31T16:36:44.229623Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Execute 2025-03-31T16:36:44.233244Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Complete 2025-03-31T16:36:44.233330Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Complete 2025-03-31T16:36:44.240877Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } 2025-03-31T16:36:44.242162Z node 1 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] MinValue# 1 MaxValue# 9223372036854775807 StartValue# 1 Cache# 1 Increment# 1 Cycle# false State# Active 2025-03-31T16:36:44.265491Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Complete 2025-03-31T16:36:44.265876Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } 2025-03-31T16:36:44.265920Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SEQUENCE_ALREADY_EXISTS PathId# [OwnerId: 123, LocalPathId: 42] 2025-03-31T16:36:44.265980Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Complete 2025-03-31T16:36:44.266246Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Record# PathId { OwnerId: 123 LocalId: 51 } StartValue: 100001 Cache: 10 2025-03-31T16:36:44.266407Z node 1 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] MinValue# 1 MaxValue# 9223372036854775807 StartValue# 100001 Cache# 10 Increment# 1 Cycle# false State# Active 2025-03-31T16:36:44.282552Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Complete 2025-03-31T16:36:44.282938Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-03-31T16:36:44.283118Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# 1 AllocationCount# 1 AllocationIncrement# 1 2025-03-31T16:36:44.295369Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-03-31T16:36:44.295733Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 10 2025-03-31T16:36:44.295826Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# 2 AllocationCount# 10 AllocationIncrement# 1 2025-03-31T16:36:44.308383Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-03-31T16:36:44.308785Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 0 2025-03-31T16:36:44.308887Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 100001 AllocationCount# 10 AllocationIncrement# 1 2025-03-31T16:36:44.326011Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-03-31T16:36:44.326500Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 50 2025-03-31T16:36:44.326601Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 100011 AllocationCount# 50 AllocationIncrement# 1 2025-03-31T16:36:44.341072Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-03-31T16:36:44.341531Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 99] Cache# 0 2025-03-31T16:36:44.341587Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_NOT_FOUND PathId# [OwnerId: 123, LocalPathId: 99] 2025-03-31T16:36:44.341651Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-03-31T16:36:44.341888Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 18446744073709551615 2025-03-31T16:36:44.341989Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# 12 AllocationCount# 9223372036854775796 AllocationIncrement# 1 2025-03-31T16:36:44.354285Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-03-31T16:36:44.354694Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 1 2025-03-31T16:36:44.354760Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_OVERFLOW PathId# [OwnerId: 123, LocalPathId: 42] 2025-03-31T16:36:44.354850Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-03-31T16:36:44.355166Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxDropSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] 2025-03-31T16:36:44.355262Z node 1 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxDropSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] 2025-03-31T16:36:44.367921Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxDropSequence.Complete 2025-03-31T16:36:44.368342Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxDropSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] 2025-03-31T16:36:44.368395Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxDropSequence.Execute SEQUENCE_NOT_FOUND PathId# [OwnerId: 123, LocalPathId: 42] 2025-03-31T16:36:44.368463Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxDropSequence.Complete 2025-03-31T16:36:44.386062Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] OnActivateExecutor 2025-03-31T16:36:44.386162Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Execute 2025-03-31T16:36:44.386742Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Complete 2025-03-31T16:36:44.387376Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Execute 2025-03-31T16:36:44.388721Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Complete 2025-03-31T16:36:44.396493Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-03-31T16:36:44.396592Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_NOT_FOUND PathId# [OwnerId: 123, LocalPathId: 42] 2025-03-31T16:36:44.396667Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-03-31T16:36:44.396950Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 0 2025-03-31T16:36:44.397052Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 100061 AllocationCount# 10 AllocationIncrement# 1 2025-03-31T16:36:44.409484Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-03-31T16:36:44.410038Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Record# PathId { OwnerId: 123 LocalId: 51 } NextValue: 200000 NextUsed: true 2025-03-31T16:36:44.410147Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] 2025-03-31T16:36:44.422512Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Complete 2025-03-31T16:36:44.422986Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 0 2025-03-31T16:36:44.423108Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 200001 AllocationCount# 10 AllocationIncrement# 1 2025-03-31T16:36:44.456179Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-03-31T16:36:44.456701Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Record# PathId { OwnerId: 123 LocalId: 51 } Cache: 5 2025-03-31T16:36:44.456798Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] 2025-03-31T16:36:44.479419Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Complete 2025-03-31T16:36:44.479916Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 0 2025-03-31T16:36:44.479996Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 200011 AllocationCount# 5 AllocationIncrement# 1 2025-03-31T16:36:44.502843Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-03-31T16:36:44.503414Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxGetSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] 2025-03-31T16:36:44.503475Z node 1 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxGetSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] 2025-03-31T16:36:44.503560Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxGetSequence.Complete 2025-03-31T16:36:44.897201Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] OnActivateExecutor 2025-03-31T16:36:44.897299Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Execute 2025-03-31T16:36:44.909539Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Execute 2025-03-31T16:36:44.912456Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Complete 2025-03-31T16:36:44.912528Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Complete 2025-03-31T16:36:44.914923Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxMarkSchemeShardPipe.Execute SchemeShardId# 123 Generation# 1 Round# 1 2025-03-31T16:36:44.915209Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } 2025-03-31T16:36:44.915307Z node 2 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] MinValue# 1 MaxValue# 9223372036854775807 StartValue# 1 Cache# 1 Increment# 1 Cycle# false State# Active 2025-03-31T16:36:44.940559Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 7205 ... ENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Complete 2025-03-31T16:36:45.398661Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] 2025-03-31T16:36:45.398751Z node 3 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] 2025-03-31T16:36:45.410820Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Complete 2025-03-31T16:36:45.411222Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 10 2025-03-31T16:36:45.411278Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_FROZEN PathId# [OwnerId: 123, LocalPathId: 42] 2025-03-31T16:36:45.411374Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-03-31T16:36:45.411726Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] Record# PathId { OwnerId: 123 LocalId: 43 } MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 11 Cache: 100 Increment: 1 2025-03-31T16:36:45.411830Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 43] 2025-03-31T16:36:45.424069Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Complete 2025-03-31T16:36:45.424454Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] Cache# 0 2025-03-31T16:36:45.424546Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 43] AllocationStart# 11 AllocationCount# 100 AllocationIncrement# 1 2025-03-31T16:36:45.436632Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-03-31T16:36:45.437017Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] Record# PathId { OwnerId: 123 LocalId: 43 } MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 11 Cache: 100 Increment: 1 2025-03-31T16:36:45.437063Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute SEQUENCE_ALREADY_ACTIVE PathId# [OwnerId: 123, LocalPathId: 43] 2025-03-31T16:36:45.437123Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Complete 2025-03-31T16:36:45.437382Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2025-03-31T16:36:45.437488Z node 3 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2025-03-31T16:36:45.449738Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Complete 2025-03-31T16:36:45.450095Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2025-03-31T16:36:45.450181Z node 3 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2025-03-31T16:36:45.462265Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Complete 2025-03-31T16:36:45.462615Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2025-03-31T16:36:45.462694Z node 3 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2025-03-31T16:36:45.474813Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Complete 2025-03-31T16:36:45.475194Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-03-31T16:36:45.475258Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_MOVED PathId# [OwnerId: 123, LocalPathId: 42] MovedTo# 12345 2025-03-31T16:36:45.475327Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-03-31T16:36:45.475660Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] 2025-03-31T16:36:45.475764Z node 3 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 43] 2025-03-31T16:36:45.487896Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Complete 2025-03-31T16:36:45.488387Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 111 Cache: 100 Increment: 1 2025-03-31T16:36:45.488546Z node 3 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 111 Cache: 100 Increment: 1 2025-03-31T16:36:45.500561Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Complete 2025-03-31T16:36:45.500970Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] RedirectTo# 54321 2025-03-31T16:36:45.501055Z node 3 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 43] RedirectTo# 54321 2025-03-31T16:36:45.513820Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Complete 2025-03-31T16:36:45.514194Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] 2025-03-31T16:36:45.514247Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute SEQUENCE_MOVED PathId# [OwnerId: 123, LocalPathId: 43] MovedTo# 54321 2025-03-31T16:36:45.514311Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Complete 2025-03-31T16:36:45.514607Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-03-31T16:36:45.514693Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# 111 AllocationCount# 100 AllocationIncrement# 1 2025-03-31T16:36:45.527852Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-03-31T16:36:45.956779Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] OnActivateExecutor 2025-03-31T16:36:45.956885Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Execute 2025-03-31T16:36:45.966940Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Execute 2025-03-31T16:36:45.970202Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Complete 2025-03-31T16:36:45.970262Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Complete 2025-03-31T16:36:45.972268Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } Cache: 10 Increment: -1 2025-03-31T16:36:45.972402Z node 4 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] MinValue# -9223372036854775808 MaxValue# -1 StartValue# -1 Cache# 10 Increment# -1 Cycle# false State# Active 2025-03-31T16:36:45.995511Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Complete 2025-03-31T16:36:45.995918Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-03-31T16:36:45.996034Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -1 AllocationCount# 10 AllocationIncrement# -1 2025-03-31T16:36:46.011937Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-03-31T16:36:46.012339Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-03-31T16:36:46.012440Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -11 AllocationCount# 10 AllocationIncrement# -1 2025-03-31T16:36:46.031725Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-03-31T16:36:46.032084Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 18446744073709551615 2025-03-31T16:36:46.032171Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -21 AllocationCount# 9223372036854775788 AllocationIncrement# -1 2025-03-31T16:36:46.047758Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-03-31T16:36:46.048181Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 1 2025-03-31T16:36:46.048254Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_OVERFLOW PathId# [OwnerId: 123, LocalPathId: 42] 2025-03-31T16:36:46.048322Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-03-31T16:36:46.048706Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } Cycle: true 2025-03-31T16:36:46.048820Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] 2025-03-31T16:36:46.061389Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Complete 2025-03-31T16:36:46.061695Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-03-31T16:36:46.061775Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -1 AllocationCount# 10 AllocationIncrement# -1 2025-03-31T16:36:46.074070Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-03-31T16:36:46.074482Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-03-31T16:36:46.074594Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -11 AllocationCount# 10 AllocationIncrement# -1 2025-03-31T16:36:46.086903Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete |97.5%| [TS] {RESULT} ydb/core/tx/sequenceshard/ut/unittest |97.5%| [TA] $(B)/ydb/services/ext_index/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.5%| [TA] {RESULT} $(B)/ydb/services/ext_index/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TabletService_ExecuteMiniKQL::MalformedProgram [GOOD] >> TabletService_ExecuteMiniKQL::DryRunEraseRow >> TGRpcRateLimiterTest::DescribeResource [GOOD] >> TGRpcRateLimiterTest::ListResources >> LongTxService::AcquireSnapshot [GOOD] >> LongTxService::LockSubscribe >> ConfigGRPCService::ReplaceConfig [GOOD] >> ConfigGRPCService::FetchConfig >> TIndexProcesorTests::TestCreateIndexProcessor >> DataShardFollowers::FollowerStaleRo [GOOD] >> DataShardFollowers::FollowerRebootAfterSysCompaction >> DataShardBackgroundCompaction::ShouldCompact [GOOD] >> DataShardBackgroundCompaction::ShouldNotCompactWhenBorrowed >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values >> test.py::test[solomon-Basic-default.txt] [GOOD] >> test.py::test[solomon-Downsampling-default.txt] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[strings] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[boolean] >> SequenceProxy::Basics >> LongTxService::LockSubscribe [GOOD] >> TopicSessionTests::TwoSessionsWithoutOffsets [GOOD] >> TSentinelTests::PDiskFaultyState [GOOD] >> TSentinelTests::PDiskRackGuardHalfRack >> TopicSessionTests::TwoSessionWithoutPredicate >> TRUCalculatorTests::TestReadTable [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/long_tx_service/ut/unittest >> LongTxService::LockSubscribe [GOOD] Test command err: 2025-03-31T16:36:46.301850Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-31T16:36:46.302396Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/0l5c/0005fc/r3tmp/tmpqFtuz3/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-31T16:36:46.303027Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/0l5c/0005fc/r3tmp/tmpqFtuz3/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/0005fc/r3tmp/tmpqFtuz3/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 121604244444127130 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-31T16:36:46.375487Z node 1 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 1] Received TEvBeginTx from [1:433:2321] 2025-03-31T16:36:46.375648Z node 1 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 1] Created new LongTxId# ydb://long-tx/000000001h1dv80m03bzhb9101?node_id=1 2025-03-31T16:36:46.387743Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvAttachColumnShardWrites from [2:434:2099] LongTxId# ydb://long-tx/000000001h1dv80m03bzhb9101?node_id=1 2025-03-31T16:36:46.387854Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvNodeConnected for NodeId# 1 from session [2:84:2048] 2025-03-31T16:36:46.387967Z node 1 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 1] Received TEvAttachColumnShardWrites from [2:147:2088] LongTxId# ydb://long-tx/000000001h1dv80m03bzhb9101?node_id=1 2025-03-31T16:36:46.388178Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvCommitTx from [2:434:2099] LongTxId# ydb://long-tx/000000001h1dv80m03bzhb9101?node_id=1 2025-03-31T16:36:46.388300Z node 1 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 1] Received TEvCommitTx from [2:147:2088] LongTxId# ydb://long-tx/000000001h1dv80m03bzhb9101?node_id=1 2025-03-31T16:36:46.389690Z node 1 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 1] Committed LongTxId# ydb://long-tx/000000001h1dv80m03bzhb9101?node_id=1 without side-effects 2025-03-31T16:36:46.390023Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvRollbackTx from [2:434:2099] LongTxId# ydb://long-tx/000000001h1dv80m03bzhb9101?node_id=1 2025-03-31T16:36:46.390160Z node 1 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 1] Received TEvRollbackTx from [2:147:2088] LongTxId# ydb://long-tx/000000001h1dv80m03bzhb9101?node_id=1 2025-03-31T16:36:46.392036Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvRollbackTx from [2:434:2099] LongTxId# ydb://long-tx/000000001h1dv80m03bzhb9101?node_id=1 2025-03-31T16:36:46.392208Z node 1 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 1] Received TEvRollbackTx from [2:147:2088] LongTxId# ydb://long-tx/000000001h1dv80m03bzhb9101?node_id=1 2025-03-31T16:36:46.394159Z node 1 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 2025-03-31T16:36:46.394601Z node 1 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 2 2025-03-31T16:36:46.394953Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvNodeDisconnected for NodeId# 1 from session [2:84:2048] 2025-03-31T16:36:46.395267Z node 2 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:58:2073] ServerId# [1:353:2271] TabletId# 72057594037932033 PipeClientId# [2:58:2073] 2025-03-31T16:36:46.398875Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvCommitTx from [2:434:2099] LongTxId# ydb://long-tx/000000001h1dv80m03bzhb9101?node_id=3 2025-03-31T16:36:46.399155Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvNodeDisconnected for NodeId# 3 from session [2:472:2101] 2025-03-31T16:36:47.178290Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:36:47.178372Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:36:47.252353Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:36:48.043577Z node 4 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-31T16:36:48.044127Z node 4 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/0l5c/0005fc/r3tmp/tmpMaY1VH/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-31T16:36:48.048960Z node 4 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/0l5c/0005fc/r3tmp/tmpMaY1VH/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/0005fc/r3tmp/tmpMaY1VH/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 14298142144709098344 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-31T16:36:48.509296Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvAcquireReadSnapshot from [3:512:2385] for database /dc-1 2025-03-31T16:36:48.509408Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Scheduling TEvAcquireSnapshotFlush for database /dc-1 2025-03-31T16:36:48.520045Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvAcquireSnapshotFlush for database /dc-1 2025-03-31T16:36:48.520253Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:564:2424] Sending navigate request for /dc-1 2025-03-31T16:36:48.524191Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:564:2424] Received navigate response status Ok 2025-03-31T16:36:48.524266Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:564:2424] Sending acquire step to coordinator 72057594046316545 2025-03-31T16:36:48.532673Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:564:2424] Received read step 1000 2025-03-31T16:36:48.532823Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvAcquireSnapshotFinished, cookie = 1 2025-03-31T16:36:48.534970Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvBeginTx from [3:512:2385] 2025-03-31T16:36:48.535025Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Scheduling TEvAcquireSnapshotFlush for database /dc-1 2025-03-31T16:36:48.545387Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvAcquireSnapshotFlush for database /dc-1 2025-03-31T16:36:48.545573Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:582:2436] Sending navigate request for /dc-1 2025-03-31T16:36:48.545782Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:582:2436] Received navigate response status Ok 2025-03-31T16:36:48.545832Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:582:2436] Sending acquire step to coordinator 72057594046316545 2025-03-31T16:36:48.546004Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:582:2436] Received read step 1500 2025-03-31T16:36:48.546085Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvAcquireSnapshotFinished, cookie = 2 2025-03-31T16:36:48.546162Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Created new read-only LongTxId# ydb://long-tx/read-only?snapshot=1500%3Amax 2025-03-31T16:36:48.546357Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvBeginTx from [3:512:2385] 2025-03-31T16:36:48.546401Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Scheduling TEvAcquireSnapshotFlush for database /dc-1 2025-03-31T16:36:48.559516Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvAcquireSnapshotFlush for database /dc-1 2025-03-31T16:36:48.559732Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:584:2438] Sending navigate request for /dc-1 2025-03-31T16:36:48.559928Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:584:2438] Received navigate response status Ok 2025-03-31T16:36:48.559974Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:584:2438] Sending acquire step to coordinator 72057594046316545 2025-03-31T16:36:48.560164Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:584:2438] Received read step 1500 2025-03-31T16:36:48.560274Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvAcquireSnapshotFinished, cookie = 3 2025-03-31T16:36:48.560366Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Created new read-write LongTxId# ydb://long-tx/00000001eacqr3ee8gcyyr3ngk?node_id=3&snapshot=1500%3Amax 2025-03-31T16:36:49.330275Z node 6 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-31T16:36:49.330680Z node 6 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/0l5c/0005fc/r3tmp/tmp8MfBbh/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-31T16:36:49.330917Z node 6 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/0l5c/0005fc/r3tmp/tmp8MfBbh/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/0005fc/r3tmp/tmp8MfBbh/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 4480641625385901592 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-31T16:36:49.371253Z node 5 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 5] Received TEvRegisterLock for LockId# 123 2025-03-31T16:36:49.371432Z node 5 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 5] Received TEvSubscribeLock from [5:429:2317] for LockId# 987 LockNode# 5 2025-03-31T16:36:49.382151Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvSubscribeLock from [6:430:2099] for LockId# 987 LockNode# 5 2025-03-31T16:36:49.383360Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeConnected for NodeId# 5 from session [6:97:2048] 2025-03-31T16:36:49.384762Z node 5 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 5] Received TEvSubscribeLock from [6:147:2088] for LockId# 987 LockNode# 5 2025-03-31T16:36:49.386207Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvLockStatus from [5:146:2135] for LockId# 987 LockNode# 5 LockStatus# STATUS_NOT_FOUND 2025-03-31T16:36:49.386426Z node 5 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 5] Received TEvSubscribeLock from [5:429:2317] for LockId# 123 LockNode# 5 2025-03-31T16:36:49.386583Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvSubscribeLock from [6:430:2099] for LockId# 123 LockNode# 5 2025-03-31T16:36:49.387819Z node 5 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 5] Received TEvSubscribeLock from [6:147:2088] for LockId# 123 LockNode# 5 2025-03-31T16:36:49.388056Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvLockStatus from [5:146:2135] for LockId# 123 LockNode# 5 LockStatus# STATUS_SUBSCRIBED 2025-03-31T16:36:49.388215Z node 5 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 5] Received TEvUnregisterLock for LockId# 123 2025-03-31T16:36:49.388372Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvLockStatus from [5:146:2135] for LockId# 123 LockNode# 5 LockStatus# STATUS_NOT_FOUND 2025-03-31T16:36:49.388537Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvSubscribeLock from [6:430:2099] for LockId# 234 LockNode# 5 2025-03-31T16:36:49.388756Z node 5 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2025-03-31T16:36:49.389328Z node 5 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 6 2025-03-31T16:36:49.389565Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeDisconnected for NodeId# 5 from session [6:97:2048] 2025-03-31T16:36:49.389801Z node 6 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:83:2074] ServerId# [5:351:2269] TabletId# 72057594037932033 PipeClientId# [6:83:2074] 2025-03-31T16:36:49.608900Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeConnected for NodeId# 5 from session [6:461:2048] 2025-03-31T16:36:49.609159Z node 5 :PIPE_SERVER ERROR: [72057594046447617] NodeDisconnected NodeId# 6 2025-03-31T16:36:49.609436Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeDisconnected for NodeId# 5 from session [6:461:2048] 2025-03-31T16:36:49.609550Z node 5 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 6 2025-03-31T16:36:49.609584Z node 5 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2025-03-31T16:36:49.609696Z node 6 :TX_PROXY WARN: actor# [6:145:2087] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2025-03-31T16:36:49.610085Z node 6 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:462:2100] ServerId# [5:466:2337] TabletId# 72057594037932033 PipeClientId# [6:462:2100] 2025-03-31T16:36:49.855312Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeConnected for NodeId# 5 from session [6:489:2048] 2025-03-31T16:36:49.855554Z node 5 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2025-03-31T16:36:49.855611Z node 5 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 6 2025-03-31T16:36:49.856186Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeDisconnected for NodeId# 5 from session [6:489:2048] 2025-03-31T16:36:49.856423Z node 6 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:492:2101] ServerId# [5:495:2357] TabletId# 72057594037932033 PipeClientId# [6:492:2101] 2025-03-31T16:36:50.113162Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeConnected for NodeId# 5 from session [6:511:2048] 2025-03-31T16:36:50.113423Z node 5 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2025-03-31T16:36:50.113472Z node 5 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 6 2025-03-31T16:36:50.113918Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeDisconnected for NodeId# 5 from session [6:511:2048] 2025-03-31T16:36:50.114254Z node 6 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:512:2103] ServerId# [5:516:2371] TabletId# 72057594037932033 PipeClientId# [6:512:2103] |97.6%| [TS] {RESULT} ydb/core/tx/long_tx_service/ut/unittest >> TRUCalculatorTests::TestBulkUpsert [GOOD] |97.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ru_calculator/unittest >> TRUCalculatorTests::TestBulkUpsert [GOOD] |97.6%| [TS] {RESULT} ydb/core/tx/schemeshard/ut_ru_calculator/unittest >> TSequence::SequencesIndex [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceFromSelect >> TIndexProcesorTests::TestCreateIndexProcessor [GOOD] >> TIndexProcesorTests::TestSingleCreateQueueEvent >> KeyValueGRPCService::SimpleAcquireLock >> ReadUpdateWrite::Load [GOOD] >> GenericProviderLookupActor::Lookup >> SequenceProxy::Basics [GOOD] >> SequenceProxy::DropRecreate >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values_simple >> GenericProviderLookupActor::Lookup [GOOD] >> GenericProviderLookupActor::LookupWithErrors >> GenericProviderLookupActor::LookupWithErrors [GOOD] >> TabletService_ExecuteMiniKQL::DryRunEraseRow [GOOD] >> TabletService_ExecuteMiniKQL::OnlyAdminsAllowed >> ConfigGRPCService::FetchConfig [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/actors/ut/unittest >> GenericProviderLookupActor::LookupWithErrors [GOOD] Test command err: 2025-03-31 16:36:52.483 INFO ydb-library-yql-providers-generic-actors-ut(pid=724932, tid=0x00007F5B146B3B40) [generic] yql_generic_lookup_actor.cpp:151: New generic proivider lookup source actor(ActorId=[1:4:2051]) for kind=YDB, endpoint=host: "some_host" port: 2135, database=some_db, use_tls=1, protocol=NATIVE, table=lookup_test 2025-03-31 16:36:52.512 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=724932, tid=0x00007F5B146B3B40) [generic] yql_generic_lookup_actor.cpp:288: ActorId=[1:4:2051] Got LookupRequest for 3 keys Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "TEST_TOKEN" } } use_tls: true protocol: NATIVE } what { items { column { name: "id" type { type_id: UINT64 } } } items { column { name: "optional_id" type { optional_type { item { type_id: UINT64 } } } } } items { column { name: "string_value" type { optional_type { item { type_id: STRING } } } } } } from { table: "lookup_test" } where { filter_typed { disjunction { operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 1 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 101 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 0 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 100 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } } } } } max_split_count: 1 CRAB Expected: selects { data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "TEST_TOKEN" } } use_tls: true protocol: NATIVE } what { items { column { name: "id" type { type_id: UINT64 } } } items { column { name: "optional_id" type { optional_type { item { type_id: UINT64 } } } } } items { column { name: "string_value" type { optional_type { item { type_id: STRING } } } } } } from { table: "lookup_test" } where { filter_typed { disjunction { operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 1 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 101 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 0 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 100 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } } } } } max_split_count: 1 CRAB Actual: selects { data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "TEST_TOKEN" } } use_tls: true protocol: NATIVE } what { items { column { name: "id" type { type_id: UINT64 } } } items { column { name: "optional_id" type { optional_type { item { type_id: UINT64 } } } } } ite ... left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 1 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 101 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 0 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 100 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } } } } } max_split_count: 1 CRAB Actual: selects { data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "TEST_TOKEN" } } use_tls: true protocol: NATIVE } what { items { column { name: "id" type { type_id: UINT64 } } } items { column { name: "optional_id" type { optional_type { item { type_id: UINT64 } } } } } items { column { name: "string_value" type { optional_type { item { type_id: STRING } } } } } } from { table: "lookup_test" } where { filter_typed { disjunction { operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 1 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 101 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 0 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 100 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } } } } } max_split_count: 1 ListSplits result. GRpcStatusCode: 0 2025-03-31 16:36:52.621 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=724932, tid=0x00007F5B1103C640) [generic] yql_generic_lookup_actor.cpp:319: ActorId=[2:7488013540366452476:2051] Got TListSplitsStreamIterator 2025-03-31 16:36:52.622 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=724932, tid=0x00007F5B1103C640) [generic] yql_generic_lookup_actor.cpp:196: ActorId=[2:7488013540366452476:2051] Got TListSplitsResponse from Connector Call ReadSplits. data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "TEST_TOKEN" } } use_tls: true protocol: NATIVE } splits { select { from { table: "example_1" } } description: "Actual split info is not important" } format: ARROW_IPC_STREAMING filtering: FILTERING_MANDATORY CRAB Expected: data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "TEST_TOKEN" } } use_tls: true protocol: NATIVE } splits { select { from { table: "example_1" } } description: "Actual split info is not important" } format: ARROW_IPC_STREAMING filtering: FILTERING_MANDATORY CRAB Actual: data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "TEST_TOKEN" } } use_tls: true protocol: NATIVE } splits { select { from { table: "example_1" } } description: "Actual split info is not important" } format: ARROW_IPC_STREAMING filtering: FILTERING_MANDATORY ReadSplits result. GRpcStatusCode: 0 2025-03-31 16:36:52.622 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=724932, tid=0x00007F5B1103C640) [generic] yql_generic_lookup_actor.cpp:229: ActorId=[2:7488013540366452476:2051] Got ReadSplitsStreamIterator from Connector 2025-03-31 16:36:52.622 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=724932, tid=0x00007F5B1103C640) [generic] yql_generic_lookup_actor.cpp:341: ActorId=[2:7488013540366452476:2051] Got DataChunk 2025-03-31 16:36:52.623 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=724932, tid=0x00007F5B1103C640) [generic] yql_generic_lookup_actor.cpp:352: ActorId=[2:7488013540366452476:2051] Got EOF 2025-03-31 16:36:52.623 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=724932, tid=0x00007F5B1103C640) [generic] yql_generic_lookup_actor.cpp:402: Sending lookup results for 3 keys |97.6%| [TS] {RESULT} ydb/library/yql/providers/generic/actors/ut/unittest >> TGRpcRateLimiterTest::ListResources [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredGrpcApi >> DataShardStats::OneChannelStatsCorrect ------- [TM] {asan, default-linux-x86_64, release} ydb/services/config/ut/unittest >> ConfigGRPCService::FetchConfig [GOOD] Test command err: 2025-03-31T16:36:46.043236Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013514875976532:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:36:46.044813Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0007a1/r3tmp/tmp1vq3ge/pdisk_1.dat 2025-03-31T16:36:46.483293Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:46.484141Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:46.489357Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:36:46.533178Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18182, node 1 2025-03-31T16:36:46.581990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-03-31T16:36:46.582166Z node 1 :GRPC_SERVER NOTICE: Grpc request proxy started, nodeid# 1, serve as static node 2025-03-31T16:36:46.582703Z node 1 :GRPC_SERVER DEBUG: Subscribe to /Root 2025-03-31T16:36:46.582775Z node 1 :GRPC_SERVER NOTICE: Grpc request proxy started, nodeid# 1, serve as static node 2025-03-31T16:36:46.582869Z node 1 :GRPC_SERVER DEBUG: Subscribe to /Root 2025-03-31T16:36:46.583925Z node 1 :GRPC_SERVER INFO: Subscribed for config changes 2025-03-31T16:36:46.583954Z node 1 :GRPC_SERVER INFO: Updated app config 2025-03-31T16:36:46.583985Z node 1 :GRPC_SERVER INFO: Subscribed for config changes 2025-03-31T16:36:46.585172Z node 1 :GRPC_SERVER INFO: Updated app config 2025-03-31T16:36:46.588949Z node 1 :GRPC_SERVER DEBUG: Got proxy service configuration 2025-03-31T16:36:46.588987Z node 1 :GRPC_SERVER DEBUG: Got proxy service configuration 2025-03-31T16:36:46.593789Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:36:46.593815Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T16:36:46.601911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-03-31T16:36:46.607357Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-31T16:36:46.607527Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-31T16:36:46.607576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:36:46.607606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-31T16:36:46.607678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:36:46.607762Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-03-31T16:36:46.610775Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-31T16:36:46.610844Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-31T16:36:46.610858Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-31T16:36:46.610871Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-31T16:36:46.653379Z node 1 :GRPC_SERVER DEBUG: [0x51a000028e80] created request Name# BlobStorageConfig 2025-03-31T16:36:46.654368Z node 1 :GRPC_SERVER DEBUG: [0x51a000029480] created request Name# HiveCreateTablet 2025-03-31T16:36:46.654744Z node 1 :GRPC_SERVER DEBUG: [0x51a000029a80] created request Name# TabletStateRequest 2025-03-31T16:36:46.655096Z node 1 :GRPC_SERVER DEBUG: [0x51a00002a080] created request Name# SchemeOperationStatus 2025-03-31T16:36:46.656019Z node 1 :GRPC_SERVER DEBUG: [0x51a00002a680] created request Name# ChooseProxy 2025-03-31T16:36:46.656435Z node 1 :GRPC_SERVER DEBUG: [0x51a00002ac80] created request Name# ResolveNode 2025-03-31T16:36:46.657145Z node 1 :GRPC_SERVER DEBUG: [0x51a00002b280] created request Name# FillNode 2025-03-31T16:36:46.657602Z node 1 :GRPC_SERVER DEBUG: [0x51a00002b880] created request Name# DrainNode 2025-03-31T16:36:46.657924Z node 1 :GRPC_SERVER DEBUG: [0x51a00002be80] created request Name# InterconnectDebug 2025-03-31T16:36:46.658218Z node 1 :GRPC_SERVER DEBUG: [0x51a00002c480] created request Name# TestShardControl 2025-03-31T16:36:46.658537Z node 1 :GRPC_SERVER DEBUG: [0x51a00002ca80] created request Name# RegisterNode 2025-03-31T16:36:46.658837Z node 1 :GRPC_SERVER DEBUG: [0x51a00002d080] created request Name# CmsRequest 2025-03-31T16:36:46.659195Z node 1 :GRPC_SERVER DEBUG: [0x51a00002d680] created request Name# ConsoleRequest 2025-03-31T16:36:46.660203Z node 1 :GRPC_SERVER DEBUG: [0x51a00002dc80] created request Name# SchemeInitRoot 2025-03-31T16:36:46.660540Z node 1 :GRPC_SERVER DEBUG: [0x51a00002e280] created request Name# PersQueueRequest 2025-03-31T16:36:46.661564Z node 1 :GRPC_SERVER DEBUG: [0x51a00002e880] created request Name# SchemeOperation 2025-03-31T16:36:46.661852Z node 1 :GRPC_SERVER DEBUG: [0x51a00002ee80] created request Name# SchemeDescribe 2025-03-31T16:36:46.755704Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:36:46.755730Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:36:46.755743Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:36:46.755902Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4553 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:36:47.249475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "hdd2" Kind: "hdd2" } StoragePools { Name: "hdd" Kind: "hdd" } StoragePools { Name: "hdd1" Kind: "hdd1" } StoragePools { Name: "ssd" Kind: "ssd" } StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-31T16:36:47.251156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:36:47.252911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-31T16:36:47.254664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:36:47.254728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:36:47.257639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-31T16:36:47.258673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-31T16:36:47.258888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:36:47.258932Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-31T16:36:47.259013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-03-31T16:36:47.259026Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-03-31T16:36:47.261656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:36:47.261685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-03-31T16:36:47.261704Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:36:47.262065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:36:47.262107Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-03-31T16:36:47.262140Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-03-31T16:36:47.263945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:36:47.263992Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:36:47.264048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-31T16:36:47.264086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-03-31T16:36:47.268843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:36:47.270681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-03-31T16:36:47.272299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-03-31T16:36:47.274489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743439007317, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T16:36:47.274652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1 ... ionPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:50.119816Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-31T16:36:50.119945Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:36:50.119968Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-31T16:36:50.119981Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2025-03-31T16:36:50.119995Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2025-03-31T16:36:50.121889Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:36:50.121905Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: true 2025-03-31T16:36:50.121915Z node 3 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:36:50.124062Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:36:50.124083Z node 3 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-03-31T16:36:50.124093Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-03-31T16:36:50.126233Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:36:50.126260Z node 3 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:36:50.126287Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet# 72057594046644480 2025-03-31T16:36:50.126311Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2025-03-31T16:36:50.126434Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:36:50.128150Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2025-03-31T16:36:50.128299Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 2025-03-31T16:36:50.130980Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743439010173, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T16:36:50.131105Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743439010173 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-31T16:36:50.131156Z node 3 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-03-31T16:36:50.131396Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-03-31T16:36:50.131435Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-03-31T16:36:50.156013Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-31T16:36:50.156095Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-03-31T16:36:50.162070Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-31T16:36:50.162095Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-31T16:36:50.162236Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-31T16:36:50.162252Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:7488013527367899137:2367], at schemeshard: 72057594046644480, txId: 281474976715657, path id: 1 2025-03-31T16:36:50.162301Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:36:50.162321Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-03-31T16:36:50.162387Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-03-31T16:36:50.162398Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2025-03-31T16:36:50.162410Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-03-31T16:36:50.162419Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2025-03-31T16:36:50.162440Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 1/1, is published: false 2025-03-31T16:36:50.162457Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2025-03-31T16:36:50.162470Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-03-31T16:36:50.162478Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:0 2025-03-31T16:36:50.162549Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-03-31T16:36:50.162560Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-03-31T16:36:50.162581Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715657, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2025-03-31T16:36:50.163866Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-03-31T16:36:50.163945Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-03-31T16:36:50.163953Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-03-31T16:36:50.163978Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-03-31T16:36:50.163992Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-31T16:36:50.164068Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2025-03-31T16:36:50.164083Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [3:7488013531662866720:2316] 2025-03-31T16:36:50.164197Z node 3 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-31T16:36:50.164248Z node 3 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-31T16:36:50.164370Z node 3 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-31T16:36:50.164394Z node 3 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-31T16:36:50.165320Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715657 2025-03-31T16:36:50.214106Z node 3 :GRPC_SERVER DEBUG: Got grpc request# FetchConfigRequest, traceId# 01jqph06d5a126dgtgra7g47bx, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:36312, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-03-31T16:36:50.218344Z node 3 :GRPC_SERVER DEBUG: [0x51a000011a80] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-03-31T16:36:50.218429Z node 3 :GRPC_SERVER DEBUG: [0x51a0000d5680] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-03-31T16:36:50.218567Z node 3 :GRPC_SERVER DEBUG: [0x51a0000d5c80] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-03-31T16:36:50.218735Z node 3 :GRPC_SERVER DEBUG: [0x51a000011480] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-03-31T16:36:50.218747Z node 3 :GRPC_SERVER DEBUG: [0x51a0000d5080] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-03-31T16:36:50.218910Z node 3 :GRPC_SERVER DEBUG: [0x51a0000d4a80] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-03-31T16:36:50.218927Z node 3 :GRPC_SERVER DEBUG: [0x51a000010e80] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-03-31T16:36:50.219147Z node 3 :GRPC_SERVER DEBUG: [0x51a000010880] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-03-31T16:36:50.219171Z node 3 :GRPC_SERVER DEBUG: [0x51a000010280] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-03-31T16:36:50.219329Z node 3 :GRPC_SERVER DEBUG: [0x51a000012c80] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-03-31T16:36:50.219339Z node 3 :GRPC_SERVER DEBUG: [0x51a000012680] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-03-31T16:36:50.219489Z node 3 :GRPC_SERVER DEBUG: [0x51a00000f680] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-03-31T16:36:50.219501Z node 3 :GRPC_SERVER DEBUG: [0x51a00000f080] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-03-31T16:36:50.219655Z node 3 :GRPC_SERVER DEBUG: [0x51a00000ea80] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-03-31T16:36:50.219666Z node 3 :GRPC_SERVER DEBUG: [0x51a0000d4480] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-03-31T16:36:50.219822Z node 3 :GRPC_SERVER DEBUG: [0x51a00000fc80] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-03-31T16:36:50.219874Z node 3 :GRPC_SERVER DEBUG: [0x51a000012080] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 |97.6%| [TM] {RESULT} ydb/services/config/ut/unittest >> KqpTpch::Query01 >> test_postgres.py::TestPostgresSuite::test_postgres_suite[boolean] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[int2] >> DataShardBackgroundCompaction::ShouldNotCompactWhenBorrowed [GOOD] >> DataShardBackgroundCompaction::ShouldNotCompactWhenCopyTable >> SequenceProxy::DropRecreate [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values_simple [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_with_valid_and_invalid_data >> TIndexProcesorTests::TestSingleCreateQueueEvent [GOOD] >> TIndexProcesorTests::TestReindexSingleQueue >> test_postgres.py::TestPostgresSuite::test_postgres_suite[int2] [GOOD] >> test.py::test[solomon-Downsampling-default.txt] [GOOD] >> test.py::test[solomon-DownsamplingValidSettings-default.txt] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/sequenceproxy/ut/unittest >> SequenceProxy::DropRecreate [GOOD] Test command err: 2025-03-31T16:36:50.863615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:36:50.863675Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:36:50.960100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:36:51.716079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976715657:0, at schemeshard: 72057594046578944 2025-03-31T16:36:52.002571Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-31T16:36:52.003057Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/0l5c/0008c9/r3tmp/tmpfWsmjl/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-31T16:36:52.003720Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/0l5c/0008c9/r3tmp/tmpfWsmjl/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/0008c9/r3tmp/tmpfWsmjl/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 2577604730116583088 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-31T16:36:52.737370Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:36:52.737461Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:36:52.795761Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-31T16:36:53.361954Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976715657:0, at schemeshard: 72057594046578944 2025-03-31T16:36:53.602585Z node 4 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-31T16:36:53.603812Z node 4 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/0l5c/0008c9/r3tmp/tmptRWWYi/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-31T16:36:53.604061Z node 4 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/0l5c/0008c9/r3tmp/tmptRWWYi/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/0l5c/0008c9/r3tmp/tmptRWWYi/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 14740019445519857003 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-31T16:36:53.771114Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropSequence, opId: 281474976715658:0, at schemeshard: 72057594046578944 2025-03-31T16:36:54.105080Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976715659:0, at schemeshard: 72057594046578944 |97.6%| [TS] {RESULT} ydb/core/tx/sequenceproxy/ut/unittest >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_with_valid_and_invalid_data [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_parallel >> TAsyncIndexTests::SplitBothWithReboots[TabletReboots] [GOOD] >> DataShardFollowers::FollowerRebootAfterSysCompaction [GOOD] >> DataShardFollowers::FollowerAfterSysCompaction >> test_http_api.py::TestHttpApi::test_simple_analytics_query >> KeyValueGRPCService::SimpleAcquireLock [GOOD] >> KeyValueGRPCService::SimpleExecuteTransaction ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/high_load/unittest >> ReadUpdateWrite::Load [GOOD] Test command err: Step 1. only write Was written: 0 MiB, Speed: 0 MiB/s Write: 10% Write: 10% 0.217920s 30% 0.217920s 50% 0.217920s 90% 0.217920s 99% 0.217920s0.214555s 30% 0.214555s 50% 0.214555s 90% 0.214555s 99% 0.214555s Write: 10% 0.232830s 30% 0.232830s 50% 0.232830s 90% 0.232830s 99% 0.232830s Write: 10% 0.233263s 30% 0.233263s 50% 0.233263s 90% 0.233263s 99% 0.233263s Write: 10% 0.218433s 30% 0.218433s 50% 0.218433s 90% Write: 10% 0.235183s 30% 0.235183s 50% 0.218433s 99% 0.218433s 0.235183s 90% 0.235183s 99% 0.235183s Write: 10% 0.227092s 30% 0.227092s 50% 0.227092s 90% 0.227092s 99% 0.227092s Write: 10% 0.263972s 30% 0.263972s 50% 0.263972s 90% 0.263972s 99% 0.263972s Write: 10% 0.341357s 30% 0.341357s 50% 0.341357s 90% 0.341357s 99% 0.341357s Write: 10% 0.336777s 30% 0.336777s 50% 0.336777s 90% 0.336777s 99% 0.336777s Write: 10% Write: 10% 0.327276s 30% 0.327276s 50% 0.331222s 30% 0.331222s 50% 0.327276s0.331222s 90% 90% 0.331222s 99% 0.327276s0.331222s 99% 0.327276s Write: 10% 0.349406s 30% 0.349406s 50% 0.349406s 90% 0.349406s 99% 0.349406s Write: 10% 0.362061s 30% 0.362061s 50% 0.362061s 90% 0.362061s 99% 0.362061s Write: 10% 0.388109s 30% 0.388109s 50% 0.388109s 90% 0.388109s 99% 0.388109s Write: 10% 0.311986s 30% 0.311986s 50% 0.311986s 90% 0.311986s 99% 0.311986s Write: 10% 0.389884s 30% 0.389884s 50% 0.389884s 90% 0.389884s 99% 0.389884s Write: 10% 0.351111s 30% 0.351111s 50% 0.351111s 90% 0.351111s 99% 0.351111s Write: 10% 0.371521s 30% 0.371521s 50% 0.371521s 90% 0.371521s 99% 0.371521s Write: 10% 0.370774s 30% 0.370774s 50% 0.370774s 90% 0.370774s 99% 0.370774s Write: 10% 0.326430s 30% 0.326430s 50% 0.326430s 90% 0.326430s 99% 0.326430s Write: 10% 0.389038s 30% 0.389038s 50% 0.389038s 90% 0.389038s 99% 0.389038s Write: 10% 0.379421s 30% 0.379421s 50% 0.379421s 90% 0.379421s 99% 0.379421s Write: 10% 0.402812s 30% 0.402812s 50% 0.402812s 90% 0.402812s 99% 0.402812s Write: 10% Write: 10% 0.357680s 30% 0.357680s 50% 0.357680s 90% 0.357680s 99% 0.380566s 30% 0.357680s0.380566s 50% 0.380566s 90% 0.380566s 99% 0.380566s Write: 10% Write: 10% 0.330756s 30% 0.382134s0.330756s 50% 30% 0.330756s0.382134s 90% 50% 0.330756s 99% 0.382134s 90% 0.330756s0.382134s 99% 0.382134s Write: 10% 0.368695s 30% 0.368695s 50% 0.368695s 90% 0.368695s 99% 0.368695s Write: 10% 0.374074s 30% 0.374074s 50% 0.374074s 90% 0.374074s 99% 0.374074s Write: 10% 0.332799s 30% 0.332799s 50% 0.332799s 90% 0.332799s 99% 0.332799s Write: 10% 0.357996s 30% 0.357996s 50% 0.357996s 90% 0.357996s 99% 0.357996s Write: 10% 0.394166s 30% 0.394166s 50% 0.394166s 90% 0.394166s 99% 0.394166s Write: 10% 0.344759s 30% 0.344759s 50% 0.344759s 90% 0.344759s 99% 0.344759s Write: 10% 0.400939s 30% Write: 10% 0.400939s 50% 0.338030s 30% 0.400939s 90% 0.338030s0.400939s 50% 0.338030s 90% 99% 0.400939s0.338030s 99% 0.338030s Write: 10% 0.374068s 30% 0.374068s 50% 0.374068s 90% 0.374068s 99% 0.374068s Write: 10% 0.394944s 30% 0.394944s 50% 0.394944s 90% 0.394944s 99% 0.394944s Write: 10% 0.343843s 30% 0.343843s 50% 0.343843s 90% 0.343843s 99% 0.343843s Write: 10% 0.383328s 30% 0.383328s 50% 0.383328s 90% 0.383328s 99% 0.383328s Write: 10% 0.417876s 30% 0.417876s 50% 0.417876s 90% 0.417876s 99% 0.417876s Write: 10% 0.379706s 30% 0.379706s 50% 0.379706s 90% 0.379706s 99% 0.379706s Write: 10% 0.376889s 30% 0.376889s 50% 0.376889s 90% 0.376889s 99% 0.376889s Write: 10% 0.421554s 30% 0.421554s 50% 0.421554s 90% 0.421554s 99% 0.421554s Write: 10% 0.410684s 30% 0.410684s 50% 0.410684s 90% 0.410684s 99% 0.410684s Write: 10% 0.383996s 30% 0.383996s 50% 0.383996s 90% 0.383996s 99% 0.383996s Write: 10% 0.352682s 30% 0.352682s 50% 0.352682s 90% 0.352682s 99% 0.352682s Write: 10% 0.394153s 30% 0.394153s 50% 0.394153s 90% 0.394153s 99% 0.394153s Write: 10% 0.384015s 30% 0.384015s 50% 0.384015s 90% 0.384015s 99% 0.384015s Write: 10% 0.412932s 30% 0.412932s 50% 0.412932s 90% 0.412932s 99% 0.412932s Write: 10% 0.396463s 30% 0.396463s 50% 0.396463s 90% 0.396463s 99% 0.396463s Write: 10% 0.405407s 30% 0.405407s 50% 0.405407s 90% 0.405407s 99% 0.405407s Write: 10% 0.417505s 30% 0.417505s 50% 0.417505s 90% 0.417505s 99% 0.417505s Write: 10% 0.417152s 30% 0.417152s 50% 0.417152s 90% 0.417152s 99% 0.417152s Write: 10% 0.386474s 30% 0.386474s 50% 0.386474s 90% 0.386474s 99% 0.386474s Write: 10% 0.362218s 30% 0.362218s 50% 0.362218s 90% 0.362218s 99% 0.362218s Write: 10% 0.391915s 30% 0.391915s 50% 0.391915s 90% 0.391915s 99% 0.391915s Write: 10% 0.433584s 30% 0.433584s 50% 0.433584s 90% 0.433584s 99% 0.433584s Write: 10% 0.415671s 30% 0.415671s 50% 0.415671s 90% 0.415671s 99% 0.415671s Write: 10% 0.425594s 30% 0.425594s 50% 0.425594s 90% 0.425594s 99% 0.425594s Write: 10% 0.400733s 30% 0.400733s 50% 0.400733s 90% 0.400733s 99% 0.400733s Write: 10% 0.370491s 30% 0.370491s 50% 0.370491s 90% 0.370491s 99% 0.370491s Write: 10% 0.375185s 30% 0.375185s 50% 0.375185s 90% 0.375185s 99% 0.375185s Write: 10% 0.426954s 30% 0.426954s 50% 0.426954s 90% 0.426954s 99% 0.426954s Step 2. read write Write: 10% 0.338200s 30% 0.338200s 50% 0.338200s 90% 0.338200s 99% 0.338200s Write: 10% 0.294980s 30% 0.294980s 50% 0.294980s 90% 0.294980s 99% 0.294980s Write: 10% 0.300898s 30% 0.300898s 50% 0.300898s 90% 0.300898s 99% 0.300898s Write: 10% 0.347461s 30% 0.347461s 50% 0.347461s 90% 0.347461s 99% 0.347461s Write: 10% 0.300678s 30% 0.300678s 50% 0.300678s 90% 0.300678s 99% 0.300678s Write: 10% 0.326959s 30% 0.326959s 50% 0.326959s 90% 0.326959s 99% 0.326959s Write: 10% 0.345588s 30% 0.345588s 50% 0.345588s 90% 0.345588s 99% 0.345588s Write: 10% 0.316985s 30% 0.316985s 50% 0.316985s 90% 0.316985s 99% 0.316985s Write: 10% 0.317590s 30% 0.317590s 50% 0.317590s 90% 0.317590s 99% 0.317590s Write: 10% 0.298826s 30% 0.298826s 50% 0.298826s 90% 0.298826s 99% 0.298826s Write: 10% 0.335152s 30% 0.335152s 50% 0.335152s 90% 0.335152s 99% 0.335152s Write: 10% 0.350061s 30% 0.350061s 50% 0.350061s 90% 0.350061s 99% 0.350061s Write: 10% 0.319851s 30% 0.319851s 50% 0.319851s 90% 0.319851s 99% 0.319851s Write: 10% 0.293602s 30% 0.293602s 50% 0.293602s 90% 0.293602s 99% 0.293602s Write: 10% 0.351357s 30% 0.351357s 50% 0.351357s 90% 0.351357s 99% 0.351357s Write: 10% 0.175207s 30% 0.175207s 50% 0.175207s 90% 0.175207s 99% 0.175207s Write: 10% 0.353601s 30% 0.353601s 50% 0.353601s 90% 0.353601s 99% 0.353601s Write: 10% 0.307687s 30% 0.307687s 50% 0.307687s 90% 0.307687s 99% 0.307687s Write: 10% 0.299476s 30% 0.299476s 50% 0.299476s 90% 0.299476s 99% 0.299476s Write: 10% 0.361359s 30% 0.361359s 50% 0.361359s 90% 0.361359s 99% 0.361359s Write: 10% 0.301474s 30% 0.301474s 50% 0.301474s 90% 0.301474s 99% 0.301474s Write: 10% 0.349786s 30% 0.349786s 50% 0.349786s 90% 0.349786s 99% 0.349786s Write: 10% 0.246220s 30% 0.246220s 50% 0.246220s 90% 0.246220s 99% 0.246220s Write: 10% 0.185607s 30% 0.185607s 50% 0.185607s 90% 0.185607s 99% 0.185607s Write: 10% 0.232058s 30% 0.232058s 50% 0.232058s 90% 0.232058s 99% 0.232058s Write: 10% 0.375578s 30% 0.375578s 50% 0.375578s 90% 0.375578s 99% 0.375578s Write: 10% 0.347002s 30% 0.347002s 50% 0.347002s 90% 0.347002s 99% 0.347002s Write: 10% 0.329462s 30% 0.329462s 50% 0.329462s 90% 0.329462s 99% 0.329462s Write: 10% 0.322054s 30% 0.322054s 50% 0.322054s 90% 0.322054s 99% 0.322054s Write: 10% 0.329039s 30% 0.329039s 50% 0.329039s 90% 0.329039s 99% 0.329039s Write: 10% 0.331689s 30% 0.331689s 50% 0.331689s 90% 0.331689s 99% 0.331689s Write: 10% 0.370339s 30% 0.370339s 50% 0.370339s 90% Write0.370339s: 10% 99% 0.370339s0.343892s 30% 0.343892s 50% Write: 10% 0.350789s 30% 0.350789s 50% 0.343892s 90% 0.350789s 90% 0.343892s 99% 0.343892s 0.350789s 99% 0.350789s Write: 10% 0.185258s 30% 0.185258s 50% 0.185258s 90% 0.185258s 99% 0.185258s Write: 10% 0.204639s 30% 0.204639s 50% 0.204639s 90% 0.204639s 99% 0.204639s Write: 10% 0.336428s 30% 0.336428s 50% 0.336428s 90% Write: 10% 0.336428s 99% 0.329547s 30% 0.329547s0.336428s 50% 0.329547s 90% 0.329547s 99% 0.329547s Write: 10% 0.347072s 30% 0.347072s 50% 0.347072s 90% 0.347072s 99% 0.347072s Write: 10% Write: 10% 0.340108s0.182956s 30% 0.340108s 50% 30% 0.182956s 50% 0.340108s 90% 0.182956s 90% 0.340108s 99% 0.182956s 99% 0.340108s0.182956s Write: 10% 0.274913s 30% 0.274913s 50% 0.274913s 90% 0.274913s 99% 0.274913s Write: 10% 0.178525s 30% 0.178525s 50% 0.178525s 90% 0.178525s 99% 0.178525s Write: 10% 0.231685s 30% 0.231685s 50% 0.231685s 90% 0.231685s 99% 0.231685s Write: 10% 0.188544s 30% 0.188544s 50% 0.188544s 90% 0.188544s 99% 0.188544s Write: 10% 0.173384s 30% 0.173384s 50% 0.173384s 90% 0.173384s 99% 0.173384s Write: 10% 0.207122s 30% 0.207122s 50% 0.207122s 90% 0.207122s 99% 0.207122s Write: 10% 0.359148s 30% 0.359148s 50% 0.359148s 90% 0.359148s 99% 0.359148s Write: 10% 0.206998s 30% 0.206998s 50% 0.206998s 90% 0.206998s 99% 0.206998s Write: 10% 0.184092s 30% 0.184092s 50% 0.184092s 90% 0.184092s 99% 0.184092s Write: 10% 0.156990s 30% 0.156990s 50% 0.156990s 90% 0.156990s 99% 0.156990s Write: 10% 0.168597s 30% 0.168597s 50% 0.168597s 90% 0.168597s 99% 0.168597s Write: 10% 0.144932s 30% 0.144932s 50% 0.144932s 90% Write: 10% 0.169777s 30% 0.169777s 50% 0.144932s 99% 0.144932s 0.169777s 90% 0.169777s 99% 0.169777s Write: 10% 0.167871s 30% 0.167871s 50% 0.167871s 90% 0.167871s 99% 0.167871s Write: 10% 0.151409s 30% 0.151409s 50% 0.151409s 90% 0.151409s 99% 0.151409s Write: 10% 0.182800s 30% 0.182800s 50% 0.182800s 90% 0.182800s 99% 0.182800s Write: 10% 0.147449s 30% 0.147449s 50% 0.147449s 90% 0.147449s 99% 0.147449s Write: 10% 0.152533s 30% 0.152533s 50% 0.152533s 90% Write0.152533s: 10% 99% 0.152533s0.168587s 30% 0.168587s 50% 0.168587s 90% 0.168587s 99% 0.168587s Write: 10% 0.151729s 30% 0.151729s 50% Write: 10% 0.160896s 30% 0.160896s 50% 0.151729s 90% 0.151729s 99% 0.151729s 0.160896s 90% 0.160896s 99% 0.160896s Write: 10% 0.196912s 30% 0.196912s 50% 0.196912s 90% 0.196912s 99% 0.196912s Write: 10% 0.149273s 30% 0.149273s 50% 0.149273s 90% 0.149273s 99% 0.149273s Read: 10% 0.994503s 30% 0.994503s 50% 0.994503s 90% 0.994503s 99% 0.994503s Step 3. write modify Write: 10% 0.102326s 30% 0.102326s 50% 0.102326s 90% Write: 10% 0.198311s 30% 0.198311s 50% 0.102326s 99% 0.102326s 0.198311s 90% 0.198311s 99% 0.198311s Write: 10% 0.232078s 30% 0.232078s 50% 0.232078s 90% 0.232078s 99% 0.232078s Write: 10% 0.217780s 30% 0.217780s 50% 0.217780s 90% 0.217780s 99% 0.217780s Write: 10% 0.121997s 30% 0.121997s 50% 0.121997s 90% 0.121997s 99% 0.121997s Write: 10% 0.252995s 30% 0.252995s 50% 0.252995s 90% 0.252995s 99% 0.252995s Write: 10% 0.252995s 30% 0.252995s 50% 0.252995s 90% 0.252995s 99% 0.252995s Write: 10% 0.270173s 30% 0.270173s 50% 0.270173s 90% 0.270173s 99% 0.270173s Write: 10% 0.121855s 30% 0.121855s 50% 0.121855s 90% 0.121855s 99% 0.121855s Write: 10% 0.257154s 30% 0.257154s 50% 0.257154s 90% 0.257154s 99% 0.257154s Write: 10% 0.245042s 30% 0.245042s 50% 0.245042s 90% 0.245042s 99% 0.245042s Write: 10% 0.301978s 30% 0.301978s 50% 0.301978s 90% 0.301978s 99% 0.301978s Write: 10% 0.312578s 30% 0.312578s 50% 0.312578s 90% 0.312578s 99% 0.312578s Write: 10% 0.268656s 30% 0.268656s 50% 0.268656s 90% 0.268656s 99% 0.268656s Write: 10% 0.229226s 30% 0.229226s 50% 0.229226s 90% 0.229226s 99% 0.229226s Write: 10% 0.336194s 30% 0.336194s 50% 0.336194s 90% 0.336194s 99% 0.336194s Write: 10% 0.331876s 30% 0.331876s 50% Write: 10% 0.306718s 30% 0.306718s 50% 0.331876s 90% 0.331876s 99% 0.331876s 0.306718s 90% 0.306718s 99% 0.306718s Write: 10% 0.385562s 30% 0.385562s 50% Write: 10% 0.352016s 30% 0.352016s 50% 0.352016s 90% 0.352016s 99% 0.352016s Write: 10% 0.372013s 30% 0.372013s 50% 0.372013s 90% 0.385562s 90% 0.385562s 99% 0.385562s Write: 10% 0.217634s 30% 0.217634s 50% 0.372013s0.217634s 90% 99% 0.217634s 99% 0.217634s0.372013s Write: 10% 0.300490s 30% 0.300490s 50% 0.300490s 90% Write: 10% 0.398584s 30% 0.398584s 50% 0.398584s 90% 0.398584s 99% 0.398584s Write: 10% Write: 10% 0.347043s 30% 0.312476s0.347043s 50% 30% 0.312476s 50% 0.300490s 99% 0.300490s 0.312476s 90% 0.347043s 90% 0.347043s 99% 0.347043s 0.312476s 99% 0.312476s Write: 10% 0.269316s 30% 0.269316s 50% 0.269316s 90% 0.269316s 99% 0.269316s Write: 10% 0.361020s 30% 0.361020s 50% 0.361020s 90% 0.361020s 99% 0.361020s Write: 10% 0.301121s 30% 0.301121s 50% 0.301121s 90% 0.301121s 99% 0.301121s Write: 10% 0.372559s 30% 0.372559s 50% 0.372559s 90% 0.372559s 99% 0.372559s Write: 10% 0.209622s 30% 0.209622s 50% 0.209622s 90% 0.209622s 99% 0.209622s Write: 10% 0.392070s 30% 0.392070s 50% 0.392070s 90% 0.392070s 99% 0.392070s Write: 10% 0.316757s 30% 0.316757s 50% 0.316757s 90% 0.316757s 99% 0.316757s Write: 10% 0.157875s 30% 0.157875s 50% 0.157875s 90% 0.157875s 99% 0.157875s Write: 10% 0.363854s 30% 0.363854s 50% 0.363854s 90% Write: 10% 0.363854s 99% 0.369603s0.363854s 30% 0.369603s 50% 0.369603s 90% 0.369603s 99% 0.369603s Write: 10% 0.316251s 30% 0.316251s 50% 0.316251s 90% 0.316251s 99% 0.316251s Write: 10% 0.419164s 30% 0.419164s 50% 0.419164s 90% 0.419164s 99% 0.419164s Write: 10% 0.372581s 30% 0.372581s 50% 0.372581s 90% 0.372581s 99% 0.372581s Write: 10% 0.159899s 30% 0.159899s 50% 0.159899s 90% Write: 10% 0.368587s 30% 0.368587s 50% Write0.368587s 90% 0.368587s 99% : 10% 0.368587sWrite: 10% 0.159899s 99% 0.356993s 30% 0.356993s 50% 0.197311s 30% 0.159899s 0.197311s 50% 0.197311s 90% 0.356993s 90% 0.356993s 99% 0.197311s 99% 0.356993s0.197311s Write: 10% 0.411772s 30% 0.411772s 50% Write: 10% 0.163067s 30% 0.163067s 50% 0.411772sWrite: 10% 0.401479s 90% 30% 0.401479s 50% 0.401479s 90% 0.163067s 90% 0.163067s 99% 0.163067s 0.411772s 99% 0.401479s 99% 0.401479s 0.411772s Write: 10% 0.186360s 30% 0.186360s 50% 0.186360s 90% 0.186360s 99% 0.186360s Write: 10% 0.463541s 30% 0.463541s 50% 0.463541s 90% 0.463541s 99% 0.463541s Write: 10% 0.182377s 30% 0.182377s 50% 0.182377s 90% 0.182377s 99% 0.182377s Write: 10% 0.189568s 30% 0.189568s 50% 0.189568sWrite: 10% 90% Write: 10% 0.345828s 30% 0.340914s 30% 0.340914s 50% 0.345828s 50% 0.345828s 90% 0.189568s 99% 0.189568s Write: 10% 0.264990s 30% 0.264990s 50% Write: 10% 0.181829s 30% 0.181829s 50% 0.340914s 90% 0.345828s 99% 0.264990s 90% 0.181829s 90% 0.345828s 0.340914s 99% 0.340914s 0.264990s 99% 0.264990s 0.181829s 99% 0.181829s Write: 10% Write: 10% 0.173069s 30% 0.159951s 30% 0.159951s 50% 0.173069s 50% 0.159951s0.173069s 90% 0.159951s 99% 0.159951s 90% 0.173069s 99% 0.173069s Write: 10% 0.214985s 30% 0.214985s 50% 0.214985s 90% Write: 10% 0.214985s0.365432s 30% 0.365432s 50% 0.365432s 90% 99% 0.365432s 99% 0.214985s0.365432s Write: 10% Write: 10% 0.378010s 30% 0.177003s 30% 0.378010s0.177003s 50% 50% 0.378010s 90% 0.177003s 90% 0.378010s 99% 0.378010s 0.177003s 99% 0.177003s Write: 10% 0.202971s 30% 0.202971s 50% 0.202971s 90% 0.202971s 99% 0.202971s Write: 10% 0.337983s 30% 0.337983s 50% 0.337983s 90% Write: 10% 0.187253s 30% 0.187253s 50% 0.337983s 99% 0.187253s 90% 0.337983s Write: 10% 0.179823s 30% 0.179823s 50% 0.187253s 99% 0.187253s 0.179823s 90% 0.179823s 99% 0.179823s Update: 10% 0.105684s 30% 0.105684s 50% 0.157162s 90% 0.157162s 99% 0.157162s Step 4. read modify write Write: 10% 0.247954s 30% 0.247954s 50% 0.247954s 90% 0.247954s 99% 0.247954s Write: 10% 0.311689s 30% 0.311689s 50% 0.311689s 90% 0.311689s 99% 0.311689s Write: 10% 0.302652s 30% 0.302652s 50% 0.302652s 90% 0.302652s 99% 0.302652s Write: 10% 0.336528s 30% 0.336528s 50% 0.336528s 90% 0.336528s 99% 0.336528s Write: 10% 0.340216s 30% 0.340216s 50% 0.340216s 90% 0.340216s 99% 0.340216s Write: 10% 0.423536s 30% 0.423536s 50% 0.423536s 90% 0.423536s 99% 0.423536s Write: 10% 0.478910s 30% 0.478910s 50% Write: 10% 0.400069s 30% 0.400069s 50% 0.478910s 90% 0.478910s 99% 0.400069s 90% 0.400069s 99% 0.400069s0.478910s Write: 10% 0.401906s 30% 0.401906s 50% 0.401906s 90% 0.401906s 99% 0.401906s Write: 10% 0.393108s 30% 0.393108s 50% 0.393108s 90% 0.393108s 99% 0.393108s Write: 10% 0.457331s 30% 0.457331s 50% 0.457331s 90% 0.457331s 99% 0.457331s Write: 10% 0.494003s 30% 0.494003s 50% 0.494003s 90% 0.494003s 99% 0.494003s Write: 10% Write: 10% 0.556412s 30% 0.556412s 50% 0.556412s 90% 0.509055s 30% 0.509055s 50% 0.556412s 99% 0.556412s0.509055s 90% 0.509055s 99% 0.509055s Write: 10% 0.460959s 30% 0.460959s 50% 0.460959s 90% 0.460959s 99% 0.460959s Write: 10% 0.482310s 30% 0.482310s 50% 0.482310s 90% 0.482310s 99% 0.482310s Write: 10% 0.619560s 30% 0.619560s 50% 0.619560s 90% 0.619560s 99% 0.619560s Write: 10% 0.533719s 30% 0.533719s 50% 0.533719s 90% 0.533719s 99% 0.533719s Write: 10% 0.675213s 30% 0.675213s 50% 0.675213s 90% 0.675213s 99% 0.675213s Write: 10% 0.477673s 30% 0.477673s 50% 0.477673s 90% 0.477673s 99% 0.477673s Write: 10% 0.494509s 30% 0.494509s 50% 0.494509s 90% 0.494509s 99% 0.494509s Write: 10% 0.625093s 30% 0.625093s 50% 0.625093s 90% 0.625093s 99% 0.625093s Write: 10% 0.567424s 30% 0.567424s 50% 0.567424s 90% 0.567424s 99% 0.567424s Write: 10% 0.509598s 30% 0.509598s 50% 0.509598s 90% 0.509598s 99% 0.509598s Write: 10% 0.537067s 30% 0.537067s 50% 0.537067s 90% 0.537067s 99% 0.537067s Write: 10% 0.573278s 30% 0.573278s 50% 0.573278s 90% 0.573278s 99% 0.573278s Write: 10% 0.545638s 30% 0.545638s 50% 0.545638s 90% 0.545638s 99% 0.545638s Write: 10% 0.555408s 30% 0.555408s 50% 0.555408s 90% 0.555408s 99% 0.555408s Write: 10% 0.582256s 30% 0.582256s 50% 0.582256s 90% 0.582256s 99% 0.582256s Write: 10% 0.586524s 30% 0.586524s 50% 0.586524s 90% 0.586524s 99% 0.586524s Write: 10% 0.584041s 30% 0.584041s 50% 0.584041s 90% 0.584041s 99% 0.584041s Write: 10% 0.600955s 30% 0.600955s 50% 0.600955s 90% 0.600955s 99% 0.600955s Write: 10% 0.573642s 30% 0.573642s 50% 0.573642s 90% 0.573642s 99% 0.573642s Write: 10% 0.574203s 30% 0.574203s 50% 0.574203s 90% 0.574203s 99% 0.574203s Write: 10% 0.608664s 30% 0.608664s 50% 0.608664s 90% 0.608664s 99% 0.608664s Write: 10% 0.564166s 30% 0.564166s 50% 0.564166s 90% 0.564166s 99% 0.564166s Write: 10% 0.593512s 30% 0.593512s 50% 0.593512s 90% 0.593512s 99% 0.593512s Write: 10% 0.520801s 30% 0.520801s 50% 0.520801s 90% 0.520801s 99% 0.520801s Write: 10% 0.611920s 30% 0.611920s 50% 0.611920s 90% 0.611920s 99% 0.611920s Write: 10% 0.566878s 30% 0.566878s 50% 0.566878s 90% 0.566878s 99% 0.566878s Write: 10% 0.568727s 30% 0.568727s 50% 0.568727s 90% 0.568727s 99% 0.568727s Write: 10% 0.527375s 30% 0.527375s 50% 0.527375s 90% 0.527375s 99% 0.527375s Write: 10% 0.540825s 30% 0.540825s 50% 0.540825s 90% 0.540825s 99% 0.540825s Write: 10% 0.577432s 30% 0.577432s 50% 0.577432s 90% 0.577432s 99% 0.577432s Write: 10% 0.528053s 30% 0.528053s 50% 0.528053s 90% 0.528053s 99% 0.528053s Write: 10% 0.592292s 30% 0.592292s 50% 0.592292s 90% 0.592292s 99% 0.592292s Write: 10% 0.532501s 30% 0.532501s 50% 0.532501s 90% 0.532501s 99% 0.532501s Write: 10% 0.548357s 30% 0.548357s 50% 0.548357s 90% 0.548357s 99% 0.548357s Write: 10% 0.587931s 30% 0.587931s 50% 0.587931s 90% 0.587931s 99% 0.587931s Write: 10% 0.549194s 30% 0.549194s 50% 0.549194s 90% 0.549194s 99% 0.549194s Write: 10% 0.580587s 30% 0.580587s 50% 0.580587s 90% 0.580587s 99% 0.580587s Write: 10% 0.580109s 30% 0.580109s 50% 0.580109s 90% 0.580109s 99% 0.580109s Write: 10% 0.517528s 30% 0.517528s 50% 0.517528s 90% 0.517528s 99% 0.517528s Write: 10% 0.515310s 30% 0.515310s 50% 0.515310s 90% 0.515310s 99% 0.515310s Write: 10% 0.593301s 30% 0.593301s 50% 0.593301s 90% 0.593301s 99% 0.593301s Write: 10% 0.575722s 30% 0.575722s 50% 0.575722s 90% 0.575722s 99% 0.575722s WriteWrite: 10% 0.520145s 30% 0.520145s 50% 0.520145s 90% 0.520145s 99% 0.520145s : 10% 0.580523s 30% 0.580523s 50% 0.580523s 90% 0.580523s 99% 0.580523sWrite: 10% 0.560967s 30% 0.560967s 50% Write: 10% 0.588679s 30% 0.588679s 50% Write: 10% 0.589802s 30% 0.589802s 50% 0.589802s 90% 0.589802s 99% 0.589802s Write: 10% 0.584088s 30% 0.584088s 50% 0.560967s 90% 0.560967s 99% 0.560967s0.588679s 90% 0.588679s 99% 0.588679sWrite: 10% 0.593123s 30% 0.593123s 50% 0.584088s 90% 0.584088s 99% 0.584088s 0.593123s 90% 0.593123s 99% 0.593123s Write: 10% 0.587988s 30% 0.587988s 50% 0.587988s 90% 0.587988s 99% 0.587988s Update: 10% 0.087861s 30% 0.087861s 50% 0.549137s 90% 0.549137s 99% 0.549137s Read: 10% 1.284009s 30% 1.284009s 50% 1.284009s 90% 1.284009s 99% 1.284009s |97.6%| [TM] {RESULT} ydb/tests/olap/high_load/unittest >> TIndexProcesorTests::TestReindexSingleQueue [GOOD] >> TIndexProcesorTests::TestDeletedQueueNotReindexed >> TopicSessionTests::TwoSessionWithoutPredicate [GOOD] >> TabletService_ExecuteMiniKQL::OnlyAdminsAllowed [GOOD] >> TabletService_Restart::Basics ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitBothWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:135:2157] sender: [1:136:2058] recipient: [1:111:2142] 2025-03-31T16:29:30.515627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:29:30.515742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:30.515794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:29:30.515836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:29:30.515894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:29:30.515930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:29:30.516021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:30.516147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:29:30.516519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:29:30.613812Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-31T16:29:30.613895Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:169:2058] recipient: [1:15:2062] 2025-03-31T16:29:30.641298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:29:30.641496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:29:30.641692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:29:30.652829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:29:30.655070Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:29:30.659500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:30.659759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:29:30.680584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:30.697952Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:30.698061Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:30.698236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:29:30.698294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:30.698356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:29:30.698539Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:210:2058] recipient: [1:208:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:210:2058] recipient: [1:208:2209] Leader for TabletID 72057594037968897 is [1:214:2213] sender: [1:215:2058] recipient: [1:208:2209] 2025-03-31T16:29:30.715335Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:235:2058] recipient: [1:15:2062] 2025-03-31T16:29:30.937322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:29:30.937611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:30.937889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:29:30.938152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:29:30.938225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:30.946608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:30.946784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:29:30.947011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:30.947117Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:29:30.947170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:29:30.947224Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:29:30.956168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:30.956258Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:29:30.956302Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:29:30.964110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:30.964186Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:30.964233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:30.964285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:29:30.982485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:29:30.988420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:29:30.988659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:135:2157] sender: [1:250:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:29:30.989826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:30.989971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:29:30.990036Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:30.990337Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:29:30.990402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:30.990603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:29:30.990712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:29:30.993372Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:30.993438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:30.993615Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:30.993657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:202:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:29:30.993966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:30.994017Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:29:30.994109Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:30.994144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:30.994186Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:30.994243Z no ... CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:36:55.457043Z node 180 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:36:55.457359Z node 180 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 333us result status StatusSuccess 2025-03-31T16:36:55.458286Z node 180 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } Tuple { } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0002\000\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:36:55.470629Z node 180 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][180:1044:2814] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-03-31T16:36:55.470725Z node 180 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][180:1045:2814] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-03-31T16:36:55.470800Z node 180 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][180:969:2814] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409550 } 2025-03-31T16:36:55.470875Z node 180 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][180:969:2814] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } 2025-03-31T16:36:55.470995Z node 180 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][180:1044:2814] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1743439015439579 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1743439015439579 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-03-31T16:36:55.471201Z node 180 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][180:1045:2814] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 3 Group: 1743439015439579 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-03-31T16:36:55.475283Z node 180 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][180:1044:2814] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 2 2025-03-31T16:36:55.475413Z node 180 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][180:969:2814] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409550 } 2025-03-31T16:36:55.476309Z node 180 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][180:1045:2814] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-03-31T16:36:55.476413Z node 180 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][180:969:2814] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } >> TSequence::CreateTableWithDefaultFromSequenceFromSelect [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceBadRequest >> TTxDataShardReshuffleKMeansScan::BadRequest >> TopicSessionTests::SessionWithPredicateAndSessionWithoutPredicate >> test_liveness_wardens.py::TestLivenessWarden::test_hive_liveness_warden_reports_issues [GOOD] >> test_liveness_wardens.py::TestLivenessWarden::test_scheme_shard_has_no_in_flight_transactions >> KesusProxyTest::ReconnectsWithKesusWhenNotConnected >> KesusProxyTest::ReconnectsWithKesusWhenNotConnected [GOOD] >> KesusProxyTest::ReconnectsWithKesusWhenPipeDestroyed [GOOD] >> KesusProxyTest::RejectsNotCanonizedResourceName [GOOD] >> KesusProxyTest::SubscribesOnResource [GOOD] >> KesusProxyTest::SubscribesOnResourcesWhenReconnected [GOOD] >> KesusProxyTest::ProxyRequestDuringDisconnection >> KesusProxyTest::ProxyRequestDuringDisconnection [GOOD] >> KesusProxyTest::DeactivateSessionWhenResourceClosed [GOOD] >> KesusProxyTest::SendsProxySessionOnceOnSuccess [GOOD] >> KesusProxyTest::SendsProxySessionOnceOnFailure [GOOD] >> KesusProxyTest::AnswersWithSessionWhenResourceIsAlreadyKnown [GOOD] >> KesusProxyTest::SendsBrokenUpdateWhenKesusPassesError [GOOD] >> KesusProxyTest::AllocatesResourceWithKesus [GOOD] >> KesusProxyTest::DisconnectsDuringActiveSession [GOOD] >> KesusProxyTest::AllocatesResourceOffline >> KesusProxyTest::AllocatesResourceOffline [GOOD] >> KesusProxyTest::ConnectsDuringOfflineAllocation [GOOD] >> KesusResourceAllocationStatisticsTest::ReturnsDefaultValues [GOOD] >> KesusResourceAllocationStatisticsTest::CalculatesAverage [GOOD] >> KesusResourceAllocationStatisticsTest::TakesBestStat [GOOD] >> TQuoterServiceTest::StaticRateLimiter >> TGRpcRateLimiterTest::AcquireResourceManyRequiredGrpcApi [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredActorApi >> DataShardBackgroundCompaction::ShouldNotCompactWhenCopyTable [GOOD] >> DataShardBackgroundCompaction::ShouldNotCompactEmptyTable >> Coordinator::ReadStepSubscribe >> KeyValueGRPCService::SimpleExecuteTransaction [GOOD] >> KeyValueGRPCService::SimpleExecuteTransactionWithWrongGeneration >> TIndexProcesorTests::TestDeletedQueueNotReindexed [GOOD] >> TIndexProcesorTests::TestManyMessages >> test.py::test[solomon-DownsamplingValidSettings-default.txt] [GOOD] >> test.py::test[solomon-HistResponse-default.txt] >> TTxDataShardReshuffleKMeansScan::BadRequest [GOOD] >> TTxDataShardReshuffleKMeansScan::MainToPosting >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] [GOOD] >> test_alter_compression.py::TestAlterCompression::test[alter_compression] >> KqpTpch::Query01 [GOOD] >> KqpTpch::Query02 >> DataShardFollowers::FollowerAfterSysCompaction [GOOD] >> DataShardFollowers::FollowerAfterDataCompaction >> TabletService_Restart::Basics [GOOD] >> TabletService_Restart::OnlyAdminsAllowed >> TSequence::CreateTableWithDefaultFromSequenceBadRequest [GOOD] >> DataShardReassign::AutoReassignOnYellowFlag >> KqpQueryService::DdlColumnTable [GOOD] >> KqpQueryService::DdlCache >> TQuoterServiceTest::StaticRateLimiter [GOOD] >> TQuoterServiceTest::StaticMultipleAndResources >> test_liveness_wardens.py::TestLivenessWarden::test_scheme_shard_has_no_in_flight_transactions [GOOD] >> TIndexProcesorTests::TestManyMessages [GOOD] >> TIndexProcesorTests::TestOver1000Queues >> DataShardBackgroundCompaction::ShouldNotCompactEmptyTable [GOOD] >> DataShardBackgroundCompaction::ShouldNotCompactSecondTime ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_sequence/unittest >> TSequence::CreateTableWithDefaultFromSequenceBadRequest [GOOD] Test command err: 2025-03-31T16:36:42.453008Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:36:42.453351Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:36:42.453581Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000682/r3tmp/tmpeRv58P/pdisk_1.dat 2025-03-31T16:36:42.972330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:36:43.016619Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:36:43.061009Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:43.061714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:43.074755Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:36:43.177494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:36:43.617852Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:770:2642], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:43.618621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:779:2647], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:43.618720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:43.627415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:36:43.793742Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:784:2650], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:36:43.874278Z node 1 :TX_PROXY ERROR: Actor# [1:858:2693] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:36:44.754986Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpgzzyxbr43z473jz7s683g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTcwMWQyMTQtODYzM2MyMTEtM2U2NjE1YjMtZTVhNTk2YzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:36:44.972804Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqph015wenqwwenz105qky8c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDZlYmExOTgtOWZlYjI2MmYtZGViYTBmYWEtODI2MTI3MDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:36:45.148732Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqph01as377fzr0t7ma5qj0c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzBkOGJkNGMtMWY5ZjkxMWItOGZmOTQxMjQtNDU4ZWI3MGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:36:45.406854Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqph01ga0x8pa3qdrz8d90bw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzMwNTU5YjAtNzA2NmQ4YzYtZDQzMjExNWItZmVmMWZkMjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { int64_value: 1 } items { uint32_value: 1 } }, { items { int64_value: 2 } items { uint32_value: 2 } }, { items { int64_value: 3 } items { uint32_value: 3 } }, { items { int64_value: 4 } items { uint32_value: 4 } }, { items { int64_value: 5 } items { uint32_value: 5 } }, { items { int64_value: 6 } items { uint32_value: 6 } }, { items { int64_value: 7 } items { uint32_value: 7 } }, { items { int64_value: 8 } items { uint32_value: 8 } }, { items { int64_value: 9 } items { uint32_value: 9 } } 2025-03-31T16:36:48.725334Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:101:2147], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:36:48.725648Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:36:48.725726Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000682/r3tmp/tmpn260LB/pdisk_1.dat 2025-03-31T16:36:48.995450Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:36:49.023466Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:36:49.060775Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:49.060903Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:49.072513Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:36:49.155345Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:36:49.496997Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:822:2681], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:49.497092Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:832:2686], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:49.497166Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:49.502626Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:36:49.664381Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:836:2689], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:36:49.700110Z node 2 :TX_PROXY ERROR: Actor# [2:914:2736] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:36:50.226441Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqph05pq7fbwf6kr2dbbycwa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjM1ZjU1NGItOTY1MDExNDYtNTcxYmZhYWYtOWYzYjI1N2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:36:50.258964Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqph05pq7fbwf6kr2dbbycwa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjM1ZjU1NGItOTY1MDExNDYtNTcxYmZhYWYtOWYzYjI1N2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:36:50.277683Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqph05pq7fbwf6kr2dbbycwa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjM1ZjU1NGItOTY1MDExNDYtNTcxYmZhYWYtOWYzYjI1N2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:36:50.694667Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqph06gqep6jnt6rj1a819e1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTAwMjAzZC0xNzI1NGJkNi0xYjE1YWI5NC03MzVjNGY2MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:36:50.740122Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqph06gqep6jnt6rj1a819e1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTAwMjAzZC0xNzI1NGJkNi0xYjE1YWI5NC03MzVjNGY2MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:36:50.748788Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqph06gqep6jnt6rj1a819e1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTAwMjAzZC0xNzI1NGJkNi0xYjE1YWI5NC03MzVjNGY2MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:36:50.753654Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqph06gqep6jnt6rj1a819e1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTAwMjAzZC0xNzI1NGJkNi0xYjE1YWI5NC03MzVjNGY2MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:36:51.243769Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jqph06yw2r47r0rjsbxe5148, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWE5NWRmMC0xNTVmYzM5ZC0zMGQ0ZjA1NC02ZThmOTgxNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:36:51.284006Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jqph06yw2r47r0rjsbxe5148, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWE5NWRmMC0xNTVmYzM5ZC0zMGQ0ZjA1NC02ZThmOTgxNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:36:51.291029Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jqph06yw2r47r0rjsbxe5148, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWE5NWRmMC0xNTVmYzM5ZC0zMGQ0ZjA1NC02ZThmOTgxNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Ro ... 1vd1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmYyMGNjZS0zOGMwMzBkYS1mNmJiOWNlNS0yNmVhNWJhMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { int64_value: 1 } items { uint32_value: 1 } }, { items { int64_value: 2 } items { uint32_value: 2 } }, { items { int64_value: 3 } items { uint32_value: 3 } }, { items { int64_value: 4 } items { uint32_value: 4 } }, { items { int64_value: 5 } items { uint32_value: 5 } }, { items { int64_value: 6 } items { uint32_value: 6 } }, { items { int64_value: 7 } items { uint32_value: 7 } }, { items { int64_value: 8 } items { uint32_value: 8 } }, { items { int64_value: 9 } items { uint32_value: 9 } } 2025-03-31T16:36:51.580918Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jqph07m66wjr403b7wss201p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDllMDVmZjctYzE0ODI0NWYtNzk4MDAyYWYtODliODFjMzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { int64_value: 1 } items { uint32_value: 1 } }, { items { int64_value: 2 } items { uint32_value: 2 } }, { items { int64_value: 3 } items { uint32_value: 3 } }, { items { int64_value: 4 } items { uint32_value: 4 } }, { items { int64_value: 5 } items { uint32_value: 5 } }, { items { int64_value: 6 } items { uint32_value: 6 } }, { items { int64_value: 7 } items { uint32_value: 7 } }, { items { int64_value: 8 } items { uint32_value: 8 } }, { items { int64_value: 9 } items { uint32_value: 9 } } 2025-03-31T16:36:55.166249Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:304:2347], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:36:55.166470Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:36:55.166546Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000682/r3tmp/tmpCFztvR/pdisk_1.dat 2025-03-31T16:36:55.449510Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:36:55.481877Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:36:55.519375Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:55.519525Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:55.531130Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:36:55.612109Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:36:55.909920Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:770:2642], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:55.910085Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:781:2647], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:55.910174Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:55.916142Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:36:56.081541Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:784:2650], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:36:56.115946Z node 3 :TX_PROXY ERROR: Actor# [3:858:2693] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:36:56.225744Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqph0bz3f35wr6kqf6cn8wv1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YzkxNmU2ODItMzQ1ZmJiYzYtZDUyNjYzMTctZTE3MGZmNTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:36:56.425897Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqph0ca70tn4bq5vgxf70at7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YTk5YmIwNzYtOGRkMjUxZjMtNmE3M2MwMjctOWI4ODQ1M2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { int64_value: 1 } items { uint32_value: 303 } } 2025-03-31T16:36:56.574519Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqph0cfk8kxhtezfzhzq22yf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MmE3NGYzNzctY2FjNGNjOWMtNjY5ZjJlOTYtOTVlMGE5NjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:36:56.756857Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqph0cn3958y10vbw2117mxb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZjAxNzVhZTYtMjM3ZjM4NTgtMjk3N2JkMGYtNDdjMWMyMWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { int64_value: 1 } items { uint32_value: 303 } }, { items { int64_value: 2 } items { uint32_value: 303 } } 2025-03-31T16:36:56.928412Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqph0csy657h58ccw3587f1k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZDAwNDkxZjItNWI5ZDc4MTEtNzRjMDljMzItZmFkYTNjODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:36:57.100139Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqph0d063551af5k6mqgb8t7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGY3MWY0ZDQtZTIyNmYyMDctMmViMGRkNDctZGEwNzA5YWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { int64_value: 1 } items { uint32_value: 303 } }, { items { int64_value: 2 } items { uint32_value: 303 } }, { items { int64_value: 3 } items { uint32_value: 303 } } 2025-03-31T16:37:00.952683Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:37:00.952911Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:37:00.953052Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000682/r3tmp/tmpY96JyJ/pdisk_1.dat 2025-03-31T16:37:01.296482Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:37:01.327334Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:37:01.364406Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:37:01.364567Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:37:01.376252Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:37:01.458825Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:37:01.765079Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:770:2642], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:37:01.765214Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:780:2647], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:37:01.765302Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:37:01.771523Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:37:01.938438Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:784:2650], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:37:01.974914Z node 4 :TX_PROXY ERROR: Actor# [4:858:2693] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:37:02.068474Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:868:2702], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
:1:98: Error: Key columns are not specified., code: 2017
: Error: Execution, code: 1060
:1:98: Error: Key columns are not specified., code: 2017 2025-03-31T16:37:02.071100Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZjNlZjNkYmEtYTYzNTk3ZC1jYmVjYmY3Yy00ZTAyNGUxZA==, ActorId: [4:767:2639], ActorState: ExecuteState, TraceId: 01jqph0hp2b2bvh0nf3g9dk0yx, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-03-31T16:37:02.116051Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:890:2718], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
:1:103: Error: Key columns are not specified., code: 2017
: Error: Execution, code: 1060
:1:103: Error: Key columns are not specified., code: 2017 2025-03-31T16:37:02.118336Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZGE3ZmFjOGEtNDI4ZWQwN2ItY2UyYWZkNTEtYWFjMTU2NzI=, ActorId: [4:882:2710], ActorState: ExecuteState, TraceId: 01jqph0hzv2r5h8sxdpkb8q9cy, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: |97.6%| [TM] {RESULT} ydb/core/tx/datashard/ut_sequence/unittest >> AttributesMD5Test::AmazonSampleWithString [GOOD] >> AttributesMD5Test::AmazonSampleWithBinary [GOOD] >> InflyTest::AddMessage [GOOD] >> InflyTest::DeleteMessage [GOOD] >> InflyTest::ChangeMesageVisibility [GOOD] >> InflyTest::ReceiveMessages [GOOD] >> InflyTest::DeleteReceivedMessage [GOOD] >> MessageDelayStatsTest::All [GOOD] >> MessageDelayStatsTest::BigTimeDiff [GOOD] >> MessageDelayStatsTest::MaxMessageDelay [GOOD] >> Metering::BillingRecords >> KeyValueGRPCService::SimpleExecuteTransactionWithWrongGeneration [GOOD] >> KeyValueGRPCService::SimpleRenameUnexistedKey >> test_alloc_default.py::TestAlloc::test_default_limits[kikimr0] [GOOD] >> TopicSessionTests::SessionWithPredicateAndSessionWithoutPredicate [GOOD] >> TopicSessionTests::SecondSessionWithoutOffsetsAfterSessionConnected >> TGRpcRateLimiterTest::AcquireResourceManyRequiredActorApi [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredGrpcApiWithCancelAfter >> TQuoterServiceTest::StaticMultipleAndResources [GOOD] >> TQuoterServiceTest::StaticDeadlines >> TTxDataShardReshuffleKMeansScan::MainToPosting [GOOD] >> TTxDataShardReshuffleKMeansScan::MainToBuild >> test.py::test[solomon-HistResponse-default.txt] [GOOD] >> test.py::test[solomon-InvalidProject-] >> DataShardStats::OneChannelStatsCorrect [GOOD] >> DataShardStats::MultipleChannelsStatsCorrect >> TSentinelTests::PDiskRackGuardHalfRack [GOOD] >> TSentinelTests::PDiskRackGuardFullRack >> TabletService_Restart::OnlyAdminsAllowed [GOOD] >> DataShardBackgroundCompaction::ShouldNotCompactSecondTime [GOOD] >> DataShardFollowers::FollowerAfterDataCompaction [GOOD] >> DataShardFollowers::FollowerDuringSysPartSwitch >> KeyValueGRPCService::SimpleRenameUnexistedKey [GOOD] >> KeyValueGRPCService::SimpleConcatUnexistedKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/tablet/ut/unittest >> TabletService_Restart::OnlyAdminsAllowed [GOOD] Test command err: 2025-03-31T16:36:25.563276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:36:25.563522Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:36:25.563696Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000641/r3tmp/tmpqg9rrH/pdisk_1.dat 2025-03-31T16:36:26.044209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:36:26.091349Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:36:26.135142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:26.135710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:26.148531Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected ... reading schema ... changing schema (dry run) ... reading schema ... changing schema ... reading schema 2025-03-31T16:36:30.723153Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:101:2147], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:36:30.723544Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:36:30.723638Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000641/r3tmp/tmp5fOSKR/pdisk_1.dat 2025-03-31T16:36:31.029604Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:36:31.060779Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:36:31.097858Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:31.097980Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:31.109755Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected ... reading schema (without token) ... reading schema (non-admin token) ... reading schema (admin token) 2025-03-31T16:36:34.973756Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:304:2347], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:36:34.973983Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:36:34.974077Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000641/r3tmp/tmpVn9CL7/pdisk_1.dat 2025-03-31T16:36:35.293073Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:36:35.324362Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:36:35.408921Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:35.409072Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:35.420831Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:36:39.134850Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:36:39.135110Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:36:39.135271Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000641/r3tmp/tmp19EOUA/pdisk_1.dat 2025-03-31T16:36:39.491515Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:36:39.522868Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:36:39.559564Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:39.559676Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:39.571175Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:36:43.149928Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:36:43.150151Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:36:43.150339Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000641/r3tmp/tmp48bHRB/pdisk_1.dat 2025-03-31T16:36:43.390645Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:36:43.417985Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:36:43.453841Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:43.453975Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:43.465568Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:36:47.099173Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:304:2347], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:36:47.099346Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:36:47.099404Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000641/r3tmp/tmpodXg2E/pdisk_1.dat 2025-03-31T16:36:47.428802Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:36:47.461716Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:36:47.499134Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:47.499295Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:47.513712Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:36:51.625355Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:36:51.625680Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:36:51.625861Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000641/r3tmp/tmpdaXJwy/pdisk_1.dat 2025-03-31T16:36:51.969039Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:36:52.003939Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:36:52.042299Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:52.042453Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:52.054108Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:36:56.411686Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:36:56.411984Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:36:56.412137Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000641/r3tmp/tmpna1eiK/pdisk_1.dat 2025-03-31T16:36:56.712786Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:36:56.752269Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:36:56.789908Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:56.790047Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:56.801797Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:37:01.165622Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:37:01.166010Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:37:01.166131Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000641/r3tmp/tmpJWRYID/pdisk_1.dat 2025-03-31T16:37:01.525458Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:37:01.557715Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:37:01.596350Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:37:01.596492Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:37:01.608112Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected ... restarting tablet 72057594046644480 2025-03-31T16:37:01.749398Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:37:06.411815Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:312:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:37:06.412092Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:37:06.412296Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000641/r3tmp/tmp6XxyU7/pdisk_1.dat 2025-03-31T16:37:06.725711Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:37:06.758886Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:37:06.801871Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:37:06.802082Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:37:06.814084Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected ... restarting tablet 72057594046644480 (without token) ... restarting tablet 72057594046644480 (non-admin token) ... restarting tablet 72057594046644480 (admin token) 2025-03-31T16:37:07.207164Z node 10 :IMPORT WARN: Table profiles were not loaded |97.6%| [TM] {RESULT} ydb/core/grpc_services/tablet/ut/unittest >> KqpQueryService::DdlCache [GOOD] >> KqpQueryService::DdlExecuteScript ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_background_compaction/unittest >> DataShardBackgroundCompaction::ShouldNotCompactSecondTime [GOOD] Test command err: 2025-03-31T16:36:47.135353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:36:47.135697Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:36:47.135890Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0007c0/r3tmp/tmpSfTAWx/pdisk_1.dat 2025-03-31T16:36:47.664842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:36:47.729050Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:36:47.775919Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T16:36:47.778001Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:47.778741Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:47.778984Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-31T16:36:47.791651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:36:47.878363Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-03-31T16:36:47.878456Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-03-31T16:36:47.879960Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-03-31T16:36:47.998805Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-31T16:36:47.998920Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:36:47.999614Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-31T16:36:47.999757Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:36:48.000144Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:36:48.000428Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:36:48.000622Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-31T16:36:48.000892Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-03-31T16:36:48.004029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:36:48.006742Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-31T16:36:48.006841Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-03-31T16:36:48.059124Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:36:48.060365Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:36:48.067832Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:36:48.068172Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:36:48.134928Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:36:48.135849Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:36:48.135986Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:36:48.137861Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:36:48.137949Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:36:48.138002Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:36:48.139972Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:36:48.140143Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:36:48.140229Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:36:48.151459Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:36:48.201083Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:36:48.202383Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:36:48.202565Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:36:48.202627Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:36:48.202679Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:36:48.202721Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:36:48.202947Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:36:48.203008Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:36:48.204194Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:36:48.204315Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:36:48.204457Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:36:48.204505Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:36:48.204630Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T16:36:48.204681Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:36:48.204713Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:36:48.204747Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:36:48.204797Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:36:48.204896Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:36:48.204950Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:36:48.205011Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:36:48.205126Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-31T16:36:48.205173Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:36:48.205272Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:36:48.205618Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T16:36:48.205681Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:36:48.205777Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:36:48.205822Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:36:48.205882Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T16:36:48.205921Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T16:36:48.205954Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:36:48.206226Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T16:36:48.206264Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T16:36:48.206296Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T16:36:48.206343Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:36:48.206423Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T16:36:48.206480Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T16:36:48.206536Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T16:36:48.206567Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:36:48.206590Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T16:36:48.209640Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T16:36:48.209706Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:36:48.223780Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... :07.678936Z node 5 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-31T16:37:07.678974Z node 5 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-31T16:37:07.679013Z node 5 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-03-31T16:37:07.679108Z node 5 :TX_DATASHARD TRACE: Activated operation [0:2] at 72075186224037888 2025-03-31T16:37:07.679148Z node 5 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-03-31T16:37:07.679175Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-31T16:37:07.679200Z node 5 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit ExecuteWrite 2025-03-31T16:37:07.679228Z node 5 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit ExecuteWrite 2025-03-31T16:37:07.679259Z node 5 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-03-31T16:37:07.679313Z node 5 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-03-31T16:37:07.679466Z node 5 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=3 2025-03-31T16:37:07.679524Z node 5 :TX_DATASHARD TRACE: add locks to result: 0 2025-03-31T16:37:07.679583Z node 5 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:37:07.679613Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteWrite 2025-03-31T16:37:07.679651Z node 5 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit FinishProposeWrite 2025-03-31T16:37:07.679687Z node 5 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit FinishProposeWrite 2025-03-31T16:37:07.679774Z node 5 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T16:37:07.679805Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit FinishProposeWrite 2025-03-31T16:37:07.679841Z node 5 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2025-03-31T16:37:07.679875Z node 5 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2025-03-31T16:37:07.679916Z node 5 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-03-31T16:37:07.679938Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2025-03-31T16:37:07.679962Z node 5 :TX_DATASHARD TRACE: Execution plan for [0:2] at 72075186224037888 has finished 2025-03-31T16:37:07.690845Z node 5 :TX_DATASHARD TRACE: TTxWrite complete: at tablet# 72075186224037888 2025-03-31T16:37:07.690936Z node 5 :TX_DATASHARD TRACE: Complete execution for [0:2] at 72075186224037888 on unit FinishProposeWrite 2025-03-31T16:37:07.690991Z node 5 :TX_DATASHARD TRACE: Propose transaction complete txid 2 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2025-03-31T16:37:07.691115Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:37:07.694000Z node 5 :TX_PROXY DEBUG: actor# [5:59:2106] Handle TEvNavigate describe path /Root/table-1 2025-03-31T16:37:07.694138Z node 5 :TX_PROXY DEBUG: Actor# [5:859:2690] HANDLE EvNavigateScheme /Root/table-1 2025-03-31T16:37:07.694639Z node 5 :TX_PROXY DEBUG: Actor# [5:859:2690] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T16:37:07.694777Z node 5 :TX_PROXY DEBUG: Actor# [5:859:2690] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/table-1" Options { ShowPrivateTable: true } 2025-03-31T16:37:07.696277Z node 5 :TX_PROXY DEBUG: Actor# [5:859:2690] Handle TEvDescribeSchemeResult Forward to# [5:593:2518] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/table-1" PathDescription { Self { Name: "table-1" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1000 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046644480 2025-03-31T16:37:07.697372Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [5:863:2694], Recipient [5:665:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:37:07.697445Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:37:07.697501Z node 5 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [5:862:2693], serverId# [5:863:2694], sessionId# [0:0:0] 2025-03-31T16:37:07.697672Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [5:861:2692], Recipient [5:665:2570]: NKikimrTxDataShard.TEvGetInfoRequest 2025-03-31T16:37:07.698553Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [5:866:2697], Recipient [5:665:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:37:07.698614Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:37:07.698659Z node 5 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [5:865:2696], serverId# [5:866:2697], sessionId# [0:0:0] 2025-03-31T16:37:07.698838Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269553210, Sender [5:864:2695], Recipient [5:665:2570]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046644480 LocalId: 2 } CompactBorrowed: false 2025-03-31T16:37:07.698979Z node 5 :TX_DATASHARD INFO: Started background compaction# 1 of 72075186224037888 tableId# 2 localTid# 1001, requested from [5:864:2695], partsCount# 0, memtableSize# 728, memtableWaste# 3880, memtableRows# 3 2025-03-31T16:37:07.702321Z node 5 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 1, ts 1970-01-01T00:00:01.530653Z 2025-03-31T16:37:07.702416Z node 5 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 1, front# 1 2025-03-31T16:37:07.702472Z node 5 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001 sending TEvCompactTableResult to# [5:864:2695]pathId# [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-31T16:37:07.703193Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [5:657:2564], Recipient [5:665:2570]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-03-31T16:37:07.703691Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [5:873:2703], Recipient [5:665:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:37:07.703757Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:37:07.703812Z node 5 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [5:872:2702], serverId# [5:873:2703], sessionId# [0:0:0] 2025-03-31T16:37:07.704007Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269553210, Sender [5:871:2701], Recipient [5:665:2570]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046644480 LocalId: 2 } CompactBorrowed: false 2025-03-31T16:37:07.704125Z node 5 :TX_DATASHARD DEBUG: Background compaction of tablet# 72075186224037888 of path# [OwnerId: 72057594046644480, LocalPathId: 2], requested from# [5:871:2701] is not needed |97.6%| [TM] {RESULT} ydb/core/tx/datashard/ut_background_compaction/unittest >> test_query_cache.py::TestQueryCache::test >> TQuoterServiceTest::StaticDeadlines [GOOD] >> QuoterWithKesusTest::ForbidsNotCanonizedQuoterPath >> TTxDataShardReshuffleKMeansScan::MainToBuild [GOOD] >> TTxDataShardReshuffleKMeansScan::BuildToPosting >> Metering::BillingRecords [GOOD] >> Metering::MockedNetClassifierOnly >> test_http_api.py::TestHttpApi::test_simple_analytics_query [GOOD] >> test_http_api.py::TestHttpApi::test_empty_query [GOOD] >> test_http_api.py::TestHttpApi::test_warning >> TGRpcRateLimiterTest::AcquireResourceManyRequiredGrpcApiWithCancelAfter [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredActorApiWithCancelAfter >> test_alloc_default.py::TestAlloc::test_default_delta[kikimr0] >> KqpTpch::Query02 [GOOD] >> KqpTpch::Query03 >> TopicSessionTests::SecondSessionWithoutOffsetsAfterSessionConnected [GOOD] >> DataShardReassign::AutoReassignOnYellowFlag [GOOD] >> test_commit.py::TestCommit::test_commit >> TopicSessionTests::TwoSessionsWithOffsets >> test.py::test[solomon-InvalidProject-] [GOOD] >> test.py::test[solomon-LabelColumns-default.txt] >> test_http_api.py::TestHttpApi::test_warning [GOOD] >> test_http_api.py::TestHttpApi::test_get_unknown_query [GOOD] >> test_http_api.py::TestHttpApi::test_unauthenticated [GOOD] >> test_http_api.py::TestHttpApi::test_create_idempotency ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_reassign/unittest >> DataShardReassign::AutoReassignOnYellowFlag [GOOD] Test command err: 2025-03-31T16:37:06.195504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:37:06.195846Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:37:06.196093Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000822/r3tmp/tmpYQR3MX/pdisk_1.dat 2025-03-31T16:37:06.735249Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:5} Tx{8, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose 2025-03-31T16:37:06.735371Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:5} Tx{8, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T16:37:06.751784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:37:06.752855Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:5} Tx{8, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} hope 1 -> done Change{4, redo 987b alter 0b annex 0, ~{ 1, 33, 35, 42, 4 } -{ }, 0 gb} 2025-03-31T16:37:06.752983Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:5} Tx{8, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T16:37:06.754279Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:5:1:24576:515:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:37:06.754425Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:5:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:37:06.754563Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} commited cookie 1 for step 5 2025-03-31T16:37:06.767637Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} Tx{9, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress 2025-03-31T16:37:06.767738Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} Tx{9, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T16:37:06.769321Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} Tx{9, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{5, redo 174b alter 0b annex 0, ~{ 42, 4 } -{ }, 0 gb} 2025-03-31T16:37:06.769427Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} Tx{9, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T16:37:06.769900Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:6:1:24576:129:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:37:06.769994Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:6:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:37:06.770121Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} commited cookie 1 for step 6 2025-03-31T16:37:06.770303Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} Tx{10, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress 2025-03-31T16:37:06.770348Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} Tx{10, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T16:37:06.770547Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} Tx{10, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{6, redo 174b alter 0b annex 0, ~{ 42, 4 } -{ }, 0 gb} 2025-03-31T16:37:06.770596Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} Tx{10, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T16:37:06.770880Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:7:1:24576:130:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:37:06.771000Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:7:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:37:06.771139Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} commited cookie 1 for step 7 2025-03-31T16:37:06.771279Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} Tx{11, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress 2025-03-31T16:37:06.771324Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} Tx{11, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T16:37:06.772346Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} Tx{11, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{7, redo 120b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2025-03-31T16:37:06.772429Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} Tx{11, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T16:37:06.772733Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:8:1:24576:89:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:37:06.772815Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:8:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:37:06.772893Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} commited cookie 1 for step 8 2025-03-31T16:37:06.776117Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} Tx{12, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion 2025-03-31T16:37:06.776202Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} Tx{12, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T16:37:06.776294Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} Tx{12, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} hope 1 -> done Change{8, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-31T16:37:06.776365Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} Tx{12, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T16:37:06.787504Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{14, NKikimr::NBsController::TBlobStorageController::TTxRegisterNode} queued, type NKikimr::NBsController::TBlobStorageController::TTxRegisterNode 2025-03-31T16:37:06.787605Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{14, NKikimr::NBsController::TBlobStorageController::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T16:37:06.787972Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{14, NKikimr::NBsController::TBlobStorageController::TTxRegisterNode} hope 1 -> done Change{7, redo 79b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-03-31T16:37:06.788089Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{14, NKikimr::NBsController::TBlobStorageController::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T16:37:06.819136Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:37:06.820948Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936130:2:3} Tx{3, NKikimr::NTenantSlotBroker::TTenantSlotBroker::TTxUpdateConfig} queued, type NKikimr::NTenantSlotBroker::TTenantSlotBroker::TTxUpdateConfig 2025-03-31T16:37:06.821055Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936130:2:3} Tx{3, NKikimr::NTenantSlotBroker::TTenantSlotBroker::TTxUpdateConfig} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T16:37:06.827682Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936130:2:3} Tx{3, NKikimr::NTenantSlotBroker::TTenantSlotBroker::TTxUpdateConfig} hope 1 -> done Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-31T16:37:06.827817Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936130:2:3} Tx{3, NKikimr::NTenantSlotBroker::TTenantSlotBroker::TTxUpdateConfig} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T16:37:06.828041Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936129:2:4} Tx{3, NKikimr::NNodeBroker::TNodeBroker::TTxUpdateConfig} queued, type NKikimr::NNodeBroker::TNodeBroker::TTxUpdateConfig 2025-03-31T16:37:06.828111Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936129:2:4} Tx{3, NKikimr::NNodeBroker::TNodeBroker::TTxUpdateConfig} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T16:37:06.830113Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936129:2:4} Tx{3, NKikimr::NNodeBroker::TNodeBroker::TTxUpdateConfig} hope 1 -> done Change{3, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-31T16:37:06.830220Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936129:2:4} Tx{3, NKikimr::NNodeBroker::TNodeBroker::TTxUpdateConfig} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T16:37:06.843363Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037932033:2:8:0:0:87:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:37:06.843540Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} commited cookie 1 for step 8 2025-03-31T16:37:06.843686Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{15, NKikimr::NBsController::TBlobStorageController::TTxUpdateNodeDrives} queued, type NKikimr::NBsController::TBlobStorageController::TTxUpdateNodeDrives 2025-03-31T16:37:06.843747Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{15, NKikimr::NBsController::TBlobStorageController::TTxUpdateNodeDrives} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T16:37:06.844226Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{15, NKikimr::NBsController::TBlobStorageController::TTxUpdateNodeDrives} hope 1 -> done Change{8, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-31T16:37:06.844298Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{15, NKikimr::NBsController::TBlobStorageController::TTxUpdateNodeDrives} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T16:37:06.867932Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T16:37:06.868084Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046447617:2:4} Tx{3, NKikimr::NTxAllocator::TTxAllocator::TTxReserve} queued, type NKikimr::NTxAllocator::TTxAllocator::TTxReserve 2025-03-31T16:37:06.868139Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046447617:2:4} Tx{3, NKikimr::NTxAllocator::TTxAllocator::TTxReserve} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T16:37:06.868359Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046447617:2:4} Tx{3, NKikimr::NTxAllocator::TTxAllocator::TTxReserve} hope 1 -> done Change{3, redo 76b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2025-03-31T16:37:06.868441Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046447617:2:4} Tx{3, NKikimr::NTx ... er{72057594046316545:2:26} Tx{24, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{20, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-03-31T16:37:10.183626Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:26} Tx{24, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T16:37:10.184013Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:26:1:24576:90:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:37:10.184052Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:26:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:37:10.184131Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} commited cookie 1 for step 26 2025-03-31T16:37:10.206934Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:10} Tx{17, NKikimr::NHive::TTxUpdateTabletMetrics} queued, type NKikimr::NHive::TTxUpdateTabletMetrics 2025-03-31T16:37:10.207016Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:10} Tx{17, NKikimr::NHive::TTxUpdateTabletMetrics} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T16:37:10.207204Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:10} Tx{17, NKikimr::NHive::TTxUpdateTabletMetrics} hope 1 -> done Change{12, redo 82b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2025-03-31T16:37:10.207264Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:10} Tx{17, NKikimr::NHive::TTxUpdateTabletMetrics} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T16:37:10.218051Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037968897:2:10:0:0:94:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:37:10.218172Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:11} commited cookie 1 for step 10 2025-03-31T16:37:10.347280Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} Tx{25, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-03-31T16:37:10.347374Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} Tx{25, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T16:37:10.347554Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} Tx{25, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{21, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-03-31T16:37:10.347616Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} Tx{25, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T16:37:10.348078Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:27:1:24576:90:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:37:10.348188Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:27:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:37:10.348308Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} commited cookie 1 for step 27 2025-03-31T16:37:10.487469Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} Tx{26, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-03-31T16:37:10.487569Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} Tx{26, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T16:37:10.487759Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} Tx{26, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{22, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-03-31T16:37:10.487840Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} Tx{26, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T16:37:10.488388Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:28:1:24576:89:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:37:10.488498Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:28:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:37:10.488615Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} commited cookie 1 for step 28 2025-03-31T16:37:10.640435Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} Tx{27, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-03-31T16:37:10.640541Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} Tx{27, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T16:37:10.640685Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} Tx{27, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{23, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-03-31T16:37:10.640731Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} Tx{27, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T16:37:10.641115Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:29:1:24576:90:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:37:10.641170Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:29:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:37:10.641239Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} commited cookie 1 for step 29 2025-03-31T16:37:10.792214Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} Tx{28, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-03-31T16:37:10.792309Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} Tx{28, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T16:37:10.792498Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} Tx{28, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{24, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-03-31T16:37:10.792567Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} Tx{28, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T16:37:10.793033Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:30:1:24576:90:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:37:10.793112Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:30:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:37:10.793230Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:31} commited cookie 1 for step 30 2025-03-31T16:37:10.807635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:37:10.807699Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:37:10.831745Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:10} Tx{18, NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics} queued, type NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics 2025-03-31T16:37:10.831850Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:10} Tx{18, NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T16:37:10.831962Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:10} Tx{18, NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics} hope 1 -> done Change{9, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-31T16:37:10.832037Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:10} Tx{18, NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T16:37:10.926350Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-03-31T16:37:10.926427Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-03-31T16:37:10.926576Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:15} Tx{19, NKikimr::NDataShard::TDataShard::TTxCleanupTransaction} queued, type NKikimr::NDataShard::TDataShard::TTxCleanupTransaction 2025-03-31T16:37:10.926647Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:15} Tx{19, NKikimr::NDataShard::TDataShard::TTxCleanupTransaction} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T16:37:10.926760Z node 1 :TX_DATASHARD TRACE: No cleanup at 72075186224037888 outdated step 15000 last cleanup 0 2025-03-31T16:37:10.926835Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:37:10.926894Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-03-31T16:37:10.926937Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:37:10.926976Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:37:10.927107Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:15} Tx{19, NKikimr::NDataShard::TDataShard::TTxCleanupTransaction} hope 1 -> done Change{11, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-31T16:37:10.927190Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:15} Tx{19, NKikimr::NDataShard::TDataShard::TTxCleanupTransaction} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T16:37:10.927365Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-03-31T16:37:10.990857Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:31} Tx{29, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-03-31T16:37:10.990936Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:31} Tx{29, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T16:37:10.991075Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:31} Tx{29, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{25, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-03-31T16:37:10.991160Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:31} Tx{29, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T16:37:10.991637Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:31:1:24576:90:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:37:10.991756Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:31:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-31T16:37:10.991884Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:32} commited cookie 1 for step 31 --- Captured TEvCheckBlobstorageStatusResult event --- Waiting for TEvReassignTablet event... 2025-03-31T16:37:11.121332Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:15} CheckYellow current light yellow move channels: 0 1 2 --- Captured TEvReassignTablet event |97.7%| [TM] {RESULT} ydb/core/tx/datashard/ut_reassign/unittest >> test_commit.py::TestCommit::test_commit [GOOD] >> test_timeout.py::TestTimeout::test_timeout >> test_ctas.py::TestYtCtas::test_simple_ctast >> KeyValueGRPCService::SimpleConcatUnexistedKey [GOOD] >> KeyValueGRPCService::SimpleCopyUnexistedKey >> KqpTpch::Query03 [GOOD] >> KqpTpch::Query04 >> AnalyzeDatashard::AnalyzeTwoTables [GOOD] >> TTxDataShardReshuffleKMeansScan::BuildToPosting [GOOD] >> TTxDataShardReshuffleKMeansScan::BuildToBuild >> QuoterWithKesusTest::ForbidsNotCanonizedQuoterPath [GOOD] >> QuoterWithKesusTest::ForbidsNotCanonizedResourcePath ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::AnalyzeTwoTables [GOOD] Test command err: 2025-03-31T16:34:27.390009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:34:27.390363Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:34:27.390471Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ec0/r3tmp/tmpGonnYn/pdisk_1.dat 2025-03-31T16:34:27.791505Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11315, node 1 2025-03-31T16:34:28.058216Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:28.058292Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:28.058336Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:28.058846Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:34:28.061535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:34:28.154824Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:28.155023Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:28.170157Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20217 2025-03-31T16:34:28.729671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:34:32.180618Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T16:34:32.219053Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:32.219192Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:32.248183Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:34:32.250804Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:32.491225Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:32.491733Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:32.492346Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:32.492481Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:32.492721Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:32.492812Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:32.492882Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:32.492983Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:32.493057Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:32.654844Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:32.654959Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:32.668461Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:32.832141Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:32.884953Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T16:34:32.885059Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T16:34:32.919355Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T16:34:32.920615Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T16:34:32.920789Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T16:34:32.920834Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T16:34:32.920874Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T16:34:32.920919Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T16:34:32.920961Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T16:34:32.921002Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T16:34:32.921426Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T16:34:32.961382Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:34:32.961510Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1872:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:34:32.974628Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1883:2604] 2025-03-31T16:34:32.982711Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1915:2621] 2025-03-31T16:34:32.982822Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1915:2621], schemeshard id = 72075186224037897 2025-03-31T16:34:32.987473Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T16:34:33.011478Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T16:34:33.011544Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T16:34:33.011617Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T16:34:33.027137Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T16:34:33.033264Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T16:34:33.033387Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T16:34:33.244906Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T16:34:33.416039Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T16:34:33.495041Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:34:34.582500Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2236:3073], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:34.582667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:34.601331Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-31T16:34:35.227937Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2541:3121], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:35.228094Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:35.229526Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2546:3125]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:34:35.229770Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T16:34:35.229883Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2548:3127] 2025-03-31T16:34:35.229949Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2548:3127] 2025-03-31T16:34:35.230548Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2549:2991] 2025-03-31T16:34:35.230895Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2548:3127], server id = [2:2549:2991], tablet id = 72075186224037894, status = OK 2025-03-31T16:34:35.231158Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2549:2991], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-03-31T16:34:35.231238Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-03-31T16:34:35.231505Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:34:35.231606Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2546:3125], StatRequests.size() = 1 2025-03-31T16:34:35.251050Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2553:3131], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:35.251189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:35.251539Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2558:3136], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:35.257952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-03-31T16:34:35.411341Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-31T16:34:35.411461Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-31T16:34:35.533139Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2548:3127], schemeshard count = 1 2025-03-31T16:34:36.014993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreator ... ry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-03-31T16:37:04.764609Z node 2 :TX_PROXY ERROR: Actor# [2:7242:5021] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:37:04.822730Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7271:5036]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:37:04.822986Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T16:37:04.823100Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7273:5038] 2025-03-31T16:37:04.823195Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7273:5038] 2025-03-31T16:37:04.823627Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7273:5038], server id = [2:7274:5039], tablet id = 72075186224037894, status = OK 2025-03-31T16:37:04.823701Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7274:5039] 2025-03-31T16:37:04.823780Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7274:5039], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-03-31T16:37:04.823840Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-03-31T16:37:04.823951Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:37:04.824040Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7271:5036], StatRequests.size() = 1 2025-03-31T16:37:04.973543Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MTVlMjdkZWEtZjMxOWI3N2YtOWJjNmYyZWUtY2IxMWRmNTg=, TxId: 2025-03-31T16:37:04.973637Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MTVlMjdkZWEtZjMxOWI3N2YtOWJjNmYyZWUtY2IxMWRmNTg=, TxId: 2025-03-31T16:37:04.974557Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:37:04.989015Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 5] 2025-03-31T16:37:04.989103Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T16:37:05.031593Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-31T16:37:05.031682Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-31T16:37:05.116731Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7273:5038], schemeshard count = 1 2025-03-31T16:37:06.347546Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T16:37:06.347642Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-03-31T16:37:06.347680Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. Skip analyze for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:37:07.666647Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:37:07.688170Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:37:07.688325Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-03-31T16:37:07.688366Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:37:07.688656Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T16:37:07.691008Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-31T16:37:07.703821Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YzBmNWU3YTUtNDNkZGQ1NGUtZWNjYzEyY2ItMTc5YmNlZjg=, TxId: 2025-03-31T16:37:07.703892Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YzBmNWU3YTUtNDNkZGQ1NGUtZWNjYzEyY2ItMTc5YmNlZjg=, TxId: 2025-03-31T16:37:07.704621Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:37:07.718707Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:37:07.718773Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Don't send TEvAnalyzeResponse. There are pending operations, OperationId operationId , ActorId=[1:3114:3316] 2025-03-31T16:37:09.075195Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T16:37:09.075281Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 5] is data table. 2025-03-31T16:37:09.075325Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. Skip analyze for datashard table [OwnerId: 72075186224037897, LocalPathId: 5] 2025-03-31T16:37:10.331496Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-03-31T16:37:10.333119Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:37:10.333489Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:37:10.355399Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:37:10.355481Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-03-31T16:37:10.355538Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-31T16:37:10.355907Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T16:37:10.358923Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-31T16:37:10.371058Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MjMxZGI1YjMtMjc3ZTExNC1jY2RhNGM5YS1hZWNjNGRlNQ==, TxId: 2025-03-31T16:37:10.371185Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MjMxZGI1YjMtMjc3ZTExNC1jY2RhNGM5YS1hZWNjNGRlNQ==, TxId: 2025-03-31T16:37:10.372079Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:37:10.387653Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-31T16:37:10.387719Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T16:37:11.757042Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T16:37:11.757126Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T16:37:11.757184Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-31T16:37:13.069033Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:37:13.069167Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 5] is data table. 2025-03-31T16:37:13.069203Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 5] 2025-03-31T16:37:13.069562Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T16:37:13.072119Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-31T16:37:13.088627Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OTdiYzA5NzktMjY0OWIxMDMtMjhmOWU2NjEtYWJkMTMwM2M=, TxId: 2025-03-31T16:37:13.088691Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTdiYzA5NzktMjY0OWIxMDMtMjhmOWU2NjEtYWJkMTMwM2M=, TxId: 2025-03-31T16:37:13.089126Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:37:13.104426Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 5] 2025-03-31T16:37:13.104514Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3114:3316] 2025-03-31T16:37:13.105179Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7598:5220]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-31T16:37:13.108755Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T16:37:13.108823Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-31T16:37:13.113717Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T16:37:13.113802Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-03-31T16:37:13.113861Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-31T16:37:13.116952Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-03-31T16:37:13.117338Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 2025-03-31T16:37:13.117735Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:7628:5232]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-31T16:37:13.120863Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-31T16:37:13.120920Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-31T16:37:13.121326Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-31T16:37:13.121377Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-03-31T16:37:13.121431Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 5] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-31T16:37:13.123384Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2025-03-31T16:37:13.123790Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >> Coordinator::ReadStepSubscribe [GOOD] >> Coordinator::LastStepSubscribe >> DataShardFollowers::FollowerDuringSysPartSwitch [GOOD] >> DataShardFollowers::FollowerDuringDataPartSwitch >> KqpTpch::Query04 [GOOD] >> KqpTpch::Query05 >> test_http_api.py::TestHttpApi::test_create_idempotency [GOOD] >> test_http_api.py::TestHttpApi::test_stop_idempotency >> test_timeout.py::TestTimeout::test_timeout [GOOD] >> TMemoryController::Counters >> TGRpcRateLimiterTest::AcquireResourceManyRequiredActorApiWithCancelAfter [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedGrpcApi >> test.py::test[solomon-LabelColumns-default.txt] [GOOD] >> test.py::test[solomon-Subquery-default.txt] [SKIPPED] >> test.py::test[solomon-UnknownSetting-] >> KqpQueryService::DdlExecuteScript [GOOD] >> test_query_cache.py::TestQueryCache::test [GOOD] >> DataShardStats::MultipleChannelsStatsCorrect [GOOD] >> DataShardStats::HistogramStatsCorrect >> AnalyzeColumnshard::AnalyzeStatus [GOOD] >> TopicSessionTests::TwoSessionsWithOffsets [GOOD] >> TTxDataShardReshuffleKMeansScan::BuildToBuild [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::DdlExecuteScript [GOOD] Test command err: Trying to start YDB, gRPC: 63729, MsgBus: 5251 2025-03-31T16:35:07.773978Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013090445247629:2132];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:07.775675Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001448/r3tmp/tmpgOcoN6/pdisk_1.dat 2025-03-31T16:35:08.227402Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:08.271257Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:08.271355Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:08.275142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63729, node 1 2025-03-31T16:35:08.361791Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:08.361813Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:08.361818Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:08.361921Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5251 TClient is connected to server localhost:5251 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:35:08.905031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:08.933724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:35:09.077676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:35:09.246667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:09.332042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:35:11.204766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013107625118512:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:11.204885Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:11.572457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:35:11.606258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:35:11.643549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:35:11.675839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:35:11.746048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:35:11.786282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:35:11.865776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013107625119035:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:11.865864Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:11.866073Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013107625119040:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:11.870147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:35:11.883289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013107625119042:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:35:11.981665Z node 1 :TX_PROXY ERROR: Actor# [1:7488013107625119098:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:12.777641Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488013090445247629:2132];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:35:12.778052Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:12.823595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T16:35:13.089343Z node 1 :TX_PROXY ERROR: Actor# [1:7488013116215054136:3743] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/TestDdl_0\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:13.089456Z node 1 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976710674, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/TestDdl_0', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges) 2025-03-31T16:35:13.089669Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWUzZTAwMjQtODNiODdhMGItZWMyZDM4MGEtMzI3MDgzNDY=, ActorId: [1:7488013116215054123:2521], ActorState: ExecuteState, TraceId: 01jqpgx7hdbne7pqbn90hvy70c, Create QueryResponse for error on request, msg: 2025-03-31T16:35:13.122414Z node 1 :TX_PROXY ERROR: Actor# [1:7488013116215054174:3769] txid# 281474976710676, issues: { message: "Check failed: path: \'/Root/TestDdl_0\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:35:13.211038Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037919 not found 2025-03-31T16:35:13.225393Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488013116215054256:2546], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/TestDdl_0]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:35:13.226395Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWVhNjIwZTItZjE2ZGY3YS0xOGJiNWNiNS1hNjhkNDRiMQ==, ActorId: [1:7488013116215054253:2545], ActorState: ExecuteState, TraceId: 01jqpgx7ntfsbwz53wcn43pbpy, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:35:13.247167Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488013116215054268:2551], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:29: Error: At function: KiDropTable!
:2:29: Error: Cannot find table 'db.[/Root/TestDdl_0]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:35:13.248432Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjAwNWNkOTEtZWU1MTc1OTgtMWJiOTk2MzYtZjlkZWFjMGM=, ActorId: [1:7488013116215054265:2550], ActorState: ExecuteState, TraceId: 01jqpgx7pj9kycaj62y5qvbaha, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:35:13.278762Z node 1 :TX_PROXY ERROR: Actor# [1:7488013116215054296:3848] txid# 281474976710679, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-03-31T16:35:13.299441Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488013116215054305:2559], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/TestDdl_0]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:35:13.300082Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzZmMjNhMDEtMjUzOTJmNzEtZGVkODMyNTUtZjg4MjMwZjA=, ActorId: [1:7488013116215054303:2558], Actor ... elf is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:37:07.100869Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:37:07.149112Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013604276019545:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:37:07.149221Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:37:07.149305Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013604276019550:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:37:07.153468Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:37:07.165342Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488013604276019552:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:37:07.266136Z node 3 :TX_PROXY ERROR: Actor# [3:7488013604276019608:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:37:07.966609Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488013582801180788:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:37:07.966691Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:37:08.181358Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:37:08.302016Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037919 not found 2025-03-31T16:37:08.329972Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 11296, MsgBus: 26033 2025-03-31T16:37:09.438831Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488013616220025242:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:37:09.438909Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/001448/r3tmp/tmp5AqfvK/pdisk_1.dat 2025-03-31T16:37:09.616202Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:37:09.634671Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:37:09.634794Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:37:09.636283Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11296, node 4 2025-03-31T16:37:09.688252Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:37:09.688278Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:37:09.688287Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:37:09.688445Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26033 TClient is connected to server localhost:26033 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:37:10.247827Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:37:10.265759Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:37:10.324752Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:37:10.551907Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:37:10.655717Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:37:13.298454Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488013633399896174:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:37:13.298559Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:37:13.360596Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:37:13.399817Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:37:13.441374Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:37:13.495120Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:37:13.543744Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:37:13.592554Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:37:13.679581Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488013633399896693:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:37:13.679737Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:37:13.682546Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488013633399896698:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:37:13.688802Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:37:13.703543Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488013633399896700:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:37:13.800198Z node 4 :TX_PROXY ERROR: Actor# [4:7488013633399896756:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:37:14.439422Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7488013616220025242:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:37:14.439517Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:37:14.902160Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-31T16:37:14.904527Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-31T16:37:14.907616Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-31T16:37:15.268640Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 >> KeyValueGRPCService::SimpleCopyUnexistedKey [GOOD] >> KeyValueGRPCService::SimpleWriteRead >> TopicSessionTests::BadDataSessionError >> QuoterWithKesusTest::ForbidsNotCanonizedResourcePath [GOOD] >> QuoterWithKesusTest::HandlesNonExistentResource >> TTxDataShardPrefixKMeansScan::BadRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeStatus [GOOD] Test command err: 2025-03-31T16:34:50.430292Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:34:50.430683Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:34:50.430832Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000e92/r3tmp/tmpyvvXlE/pdisk_1.dat 2025-03-31T16:34:50.887414Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25452, node 1 2025-03-31T16:34:51.159573Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:51.159648Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:51.159683Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:51.162155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:34:51.162596Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:34:51.249675Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:51.249781Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:51.263967Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30952 2025-03-31T16:34:51.879648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:34:55.325870Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T16:34:55.361647Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:55.361755Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:55.401158Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:34:55.403294Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:55.650576Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:55.651473Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:55.652175Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:55.652332Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:55.652419Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:55.652685Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:55.652778Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:55.652891Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:55.652975Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:34:55.830443Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:55.830572Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:55.844614Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:56.003437Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:56.056855Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T16:34:56.057004Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T16:34:56.090319Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T16:34:56.091142Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T16:34:56.091450Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T16:34:56.091516Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T16:34:56.091568Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T16:34:56.091621Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T16:34:56.091670Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T16:34:56.091733Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T16:34:56.092447Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T16:34:56.121842Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:34:56.121955Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:34:56.131321Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1885:2609] 2025-03-31T16:34:56.139487Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1926:2626] 2025-03-31T16:34:56.139998Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1926:2626], schemeshard id = 72075186224037897 2025-03-31T16:34:56.147241Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T16:34:56.217568Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T16:34:56.217638Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T16:34:56.217720Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T16:34:56.230745Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T16:34:56.244074Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T16:34:56.244243Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T16:34:56.431790Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T16:34:56.606262Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T16:34:56.706561Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:34:57.838356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2240:3072], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:57.838526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:57.859910Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-31T16:34:58.029109Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2851];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:34:58.029347Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2851];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:34:58.029683Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2851];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:34:58.029832Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2851];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:34:58.029966Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2851];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:34:58.030109Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2851];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:34:58.030234Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2851];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:34:58.030371Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2851];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:34:58.030502Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2851];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:34:58.030679Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2851];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:34:58.030809Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2851];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:34:58.030942Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2851];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:34:58.063663Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:34:58.063773Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;desc ... ase 2025-03-31T16:37:14.533595Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-31T16:37:14.538012Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6987:5167], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:37:14.538131Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6997:5172], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:37:14.538234Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:37:14.552523Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-03-31T16:37:14.614882Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7001:5175], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-03-31T16:37:14.798982Z node 2 :TX_PROXY ERROR: Actor# [2:7094:5221] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:37:14.886729Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7123:5236]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:37:14.887034Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T16:37:14.887191Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7125:5238] 2025-03-31T16:37:14.887259Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7125:5238] 2025-03-31T16:37:14.887673Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7126:5239] 2025-03-31T16:37:14.887824Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7126:5239], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-03-31T16:37:14.887905Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-03-31T16:37:14.888101Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7125:5238], server id = [2:7126:5239], tablet id = 72075186224037894, status = OK 2025-03-31T16:37:14.888195Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:37:14.888286Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7123:5236], StatRequests.size() = 1 2025-03-31T16:37:15.048136Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Yjk4ZTg0ZGQtOTk2MjM5ZDYtMTgyYjQ1Mi00OGMwYTFhMw==, TxId: 2025-03-31T16:37:15.048215Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Yjk4ZTg0ZGQtOTk2MjM5ZDYtMTgyYjQ1Mi00OGMwYTFhMw==, TxId: 2025-03-31T16:37:15.048845Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:37:15.068843Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-31T16:37:15.068924Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T16:37:15.113616Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-31T16:37:15.113718Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-31T16:37:15.157160Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7125:5238], schemeshard count = 1 2025-03-31T16:37:15.985670Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T16:37:15.985766Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-31T16:37:15.989378Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-31T16:37:16.011381Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-31T16:37:16.011969Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T16:37:16.012068Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 4], AnalyzedShards 1 2025-03-31T16:37:16.025966Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T16:37:16.047854Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. ... blocking NKikimr::NStat::TEvStatistics::TEvAnalyzeTableResponse from TX_COLUMNSHARD_ACTOR to STATISTICS_AGGREGATOR ... waiting for TEvAnalyzeTableResponse (done) 2025-03-31T16:37:16.050683Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7207:5287] 2025-03-31T16:37:16.051456Z node 2 :STATISTICS DEBUG: [72075186224037894] Send TEvStatistics::TEvAnalyzeStatusResponse. Status STATUS_ENQUEUED 2025-03-31T16:37:16.052708Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7209:5288]
---- StatisticsAggregator ----
Database: /Root/Database
BaseStatistics: 1
SchemeShards: 1
    72075186224037897
Nodes: 1
    2
RequestedSchemeShards: 1
    72075186224037897
FastCounter: 3
FastCheckInFlight: 0
FastSchemeShards: 0
FastNodes: 0
PropagationInFlight: 0
PropagationSchemeShards: 0
PropagationNodes: 0
LastSSIndex: 0
PendingRequests: 0
ProcessUrgentInFlight: 0
Columns: 2
DatashardRanges: 0
CountMinSketches: 0
ScheduleTraversalsByTime: 2
  oldest table: [OwnerId: 72075186224037897, LocalPathId: 4], update time: 1970-01-01T00:00:00Z
ScheduleTraversalsBySchemeShard: 1
    72075186224037897
    [OwnerId: 72075186224037897, LocalPathId: 4], [OwnerId: 72075186224037897, LocalPathId: 3]
ForceTraversals: 1
    1970-01-01T00:00:05Z
NavigateType: Analyze
NavigateAnalyzeOperationId: 
NavigatePathId: 
ForceTraversalOperationId: 
TraversalStartTime: 1970-01-01T00:00:00Z
TraversalPathId: 
TraversalIsColumnTable: 0
TraversalStartKey: 
GlobalTraversalRound: 1
TraversalRound: 0
HiveRequestRound: 0
... unblocking NKikimr::NStat::TEvStatistics::TEvAnalyzeTableResponse from TX_COLUMNSHARD_ACTOR to STATISTICS_AGGREGATOR 2025-03-31T16:37:16.054279Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-03-31T16:37:16.054399Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-03-31T16:37:16.068662Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-03-31T16:37:17.050496Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:37:17.050647Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-31T16:37:17.050716Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:37:17.051446Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-31T16:37:17.072828Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-31T16:37:17.073267Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T16:37:17.073354Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T16:37:17.074343Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-31T16:37:17.100496Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-31T16:37:17.100736Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-03-31T16:37:17.101799Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7259:5317], server id = [2:7260:5318], tablet id = 72075186224037899, status = OK 2025-03-31T16:37:17.101949Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7259:5317], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:37:17.109028Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-31T16:37:17.109180Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-31T16:37:17.109578Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-31T16:37:17.109807Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-31T16:37:17.110065Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7259:5317], server id = [2:7260:5318], tablet id = 72075186224037899 2025-03-31T16:37:17.110111Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:37:17.110313Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T16:37:17.114103Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-31T16:37:17.154991Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7280:5337]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:37:17.155319Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T16:37:17.155374Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7280:5337], StatRequests.size() = 1 2025-03-31T16:37:17.282394Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MTAyZjFmMzAtMmEyZDg3Ny0xNTZjYjc2OC00OTgyNTYzZg==, TxId: 2025-03-31T16:37:17.282472Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MTAyZjFmMzAtMmEyZDg3Ny0xNTZjYjc2OC00OTgyNTYzZg==, TxId: 2025-03-31T16:37:17.283015Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:37:17.297632Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:37:17.297711Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:950:2754] 2025-03-31T16:37:17.298975Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7297:5345] 2025-03-31T16:37:17.299557Z node 2 :STATISTICS DEBUG: [72075186224037894] Send TEvStatistics::TEvAnalyzeStatusResponse. Status STATUS_NO_OPERATION ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_reshuffle_kmeans/unittest >> TTxDataShardReshuffleKMeansScan::BuildToBuild [GOOD] Test command err: 2025-03-31T16:36:57.915806Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013562879789596:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:36:57.915919Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000872/r3tmp/tmpTzsJrJ/pdisk_1.dat 2025-03-31T16:36:58.351227Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:36:58.390336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:58.390975Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:58.393726Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:36:58.450769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:36:58.513104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:36:58.543654Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7488013567174757467:2295] 2025-03-31T16:36:58.543920Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:36:58.563063Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:36:58.563156Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:36:58.566488Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:36:58.566563Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:36:58.566726Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:36:58.569177Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:36:58.569282Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:36:58.569313Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:7488013567174757481:2295] in generation 1 2025-03-31T16:36:58.570916Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:36:58.632667Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:36:58.633977Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:36:58.634047Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:7488013567174757485:2296] 2025-03-31T16:36:58.634075Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:36:58.634116Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:36:58.634130Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:36:58.635126Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7488013567174757464:2295], serverId# [1:7488013567174757484:2304], sessionId# [0:0:0] 2025-03-31T16:36:58.635282Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:36:58.635350Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:36:58.635375Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:36:58.635392Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:36:58.635467Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:36:58.635495Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:36:58.635516Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:36:58.635870Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:36:58.636749Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-03-31T16:36:58.638146Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:36:58.638752Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:36:58.638829Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:36:58.640892Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7488013567174757499:2312], serverId# [1:7488013567174757500:2313], sessionId# [0:0:0] 2025-03-31T16:36:58.645638Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710657 at step 1743439018685 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743439018685 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:36:58.645678Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:36:58.645831Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:36:58.645920Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:36:58.645936Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:36:58.645976Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1743439018685:281474976710657] in PlanQueue unit at 72075186224037888 2025-03-31T16:36:58.646257Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1743439018685:281474976710657 keys extracted: 0 2025-03-31T16:36:58.646394Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:36:58.646632Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:36:58.646695Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:36:58.650643Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:36:58.652786Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:36:58.653811Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1743439018685} 2025-03-31T16:36:58.653867Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:36:58.653950Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1743439018684 2025-03-31T16:36:58.653976Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:36:58.654270Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1743439018692 2025-03-31T16:36:58.654616Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:36:58.654639Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:36:58.654653Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:36:58.654691Z node 1 :TX_DATASHARD DEBUG: Complete [1743439018685 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7488013567174757295:2186], exec latency: 6 ms, propose latency: 8 ms 2025-03-31T16:36:58.654718Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-03-31T16:36:58.654833Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:36:58.665825Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2025-03-31T16:36:58.665896Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:36:58.680011Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7488013567174757538:2341], serverId# [1:7488013567174757539:2342], sessionId# [0:0:0] 2025-03-31T16:36:58.681034Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:36:58.681174Z node 1 :TX_DATASHARD DEBUG: Prepared Snapshot transaction txId 281474976710658 at tablet 72075186224037888 2025-03-31T16:36:58.682800Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:36:58.684205Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710658 at step 1743439018727 at tablet 72075186224037888 { Transactions { TxId: 281474976710658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743439018727 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:36:58.684229Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:36:58.684341Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:36:58.684355Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:36:58.684396Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1743439018727:281474976710658] in PlanQueue unit at 72075186224037888 2025-03-31T16:36:58.684545Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1743439018727:281474976710658 keys extracted: 0 2025-03-31T16:36:58.684904Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:36:58.685463Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1743439018727} 2025-03-31T16:36:58.685511Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:36:58.685544Z node 1 :TX_DATASHARD DEBUG: Complete [1743439018727 : 281474976710658] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7488013567174757533:2337], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:36:58.685566Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:36:58.688680Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7488013567174757549:2352], serverId# [1:7488013567174757550:2353], sessionId# [0:0:0] 2025-03-31T16:36:58.689720Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:36:58.689795Z node 1 :TX_DATASHARD DEBUG: Prepared Snapshot transaction txId 281474976710659 at tablet 72075186224037888 2025-03-31T16:36:58.691189Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037 ... [1743439038733:281474976715688] in PlanQueue unit at 72075186224037895 2025-03-31T16:37:18.684392Z node 5 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037895 loaded tx from db 1743439038733:281474976715688 keys extracted: 0 2025-03-31T16:37:18.684532Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037895 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:37:18.684621Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037895 2025-03-31T16:37:18.684681Z node 5 :TX_DATASHARD INFO: Trying to DROP TABLE at 72075186224037895 2025-03-31T16:37:18.685061Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037895 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:37:18.686158Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:37:18.686352Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037895 2025-03-31T16:37:18.687133Z node 5 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037895 step# 1743439038733} 2025-03-31T16:37:18.687177Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037895 2025-03-31T16:37:18.687287Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037895 2025-03-31T16:37:18.687346Z node 5 :TX_DATASHARD DEBUG: Complete [1743439038733 : 281474976715688] from 72075186224037895 at tablet 72075186224037895 send result to client [5:7488013634834567156:2145], exec latency: 0 ms, propose latency: 2 ms 2025-03-31T16:37:18.687379Z node 5 :TX_DATASHARD INFO: 72075186224037895 Sending notify to schemeshard 72057594046644480 txId 281474976715688 state PreOffline TxInFly 0 2025-03-31T16:37:18.687431Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-03-31T16:37:18.688372Z node 5 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715688 datashard 72075186224037895 state PreOffline 2025-03-31T16:37:18.688431Z node 5 :TX_DATASHARD DEBUG: 72075186224037895 Got TEvSchemaChangedResult from SS at 72075186224037895 2025-03-31T16:37:18.691830Z node 5 :TX_DATASHARD DEBUG: 72075186224037895 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-03-31T16:37:18.691930Z node 5 :TX_DATASHARD INFO: 72075186224037895 Initiating switch from PreOffline to Offline state 2025-03-31T16:37:18.693646Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2025-03-31T16:37:18.694409Z node 5 :TX_DATASHARD INFO: 72075186224037895 Reporting state Offline to schemeshard 72057594046644480 2025-03-31T16:37:18.696740Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:37:18.696842Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037895 2025-03-31T16:37:18.700798Z node 5 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037895 state Offline 2025-03-31T16:37:18.705462Z node 5 :TX_DATASHARD INFO: OnTabletStop: 72075186224037895 reason = ReasonStop 2025-03-31T16:37:18.705537Z node 5 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037895, clientId# [5:7488013652014437803:3192], serverId# [5:7488013652014437804:3193], sessionId# [0:0:0] 2025-03-31T16:37:18.705803Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037895 not found 2025-03-31T16:37:18.706147Z node 5 :TX_DATASHARD INFO: OnTabletDead: 72075186224037895 2025-03-31T16:37:18.706227Z node 5 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037895 2025-03-31T16:37:18.706734Z node 5 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037896 actor [5:7488013652014437876:2359] 2025-03-31T16:37:18.706936Z node 5 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:37:18.717681Z node 5 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:37:18.717771Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:37:18.719488Z node 5 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037896 2025-03-31T16:37:18.719545Z node 5 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037896 2025-03-31T16:37:18.719596Z node 5 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037896 2025-03-31T16:37:18.719955Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:37:18.720010Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:37:18.720036Z node 5 :TX_DATASHARD DEBUG: DataShard 72075186224037896 persisting started state actor id [5:7488013652014437900:2359] in generation 1 2025-03-31T16:37:18.723722Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:37:18.723771Z node 5 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037896 2025-03-31T16:37:18.723861Z node 5 :TX_DATASHARD DEBUG: 72075186224037896 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:37:18.723904Z node 5 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037896, actorId: [5:7488013652014437902:2360] 2025-03-31T16:37:18.723922Z node 5 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037896 2025-03-31T16:37:18.723935Z node 5 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037896, state: WaitScheme 2025-03-31T16:37:18.723948Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037896 2025-03-31T16:37:18.724060Z node 5 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037896 2025-03-31T16:37:18.724135Z node 5 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037896 2025-03-31T16:37:18.724171Z node 5 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037896, clientId# [5:7488013652014437877:3251], serverId# [5:7488013652014437889:3259], sessionId# [0:0:0] 2025-03-31T16:37:18.724196Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037896 2025-03-31T16:37:18.724214Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:37:18.724230Z node 5 :TX_DATASHARD INFO: No tx to execute at 72075186224037896 TxInFly 0 2025-03-31T16:37:18.724247Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037896 2025-03-31T16:37:18.724550Z node 5 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037896 2025-03-31T16:37:18.724743Z node 5 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037896 txId 281474976715689 ssId 72057594046644480 seqNo 2:16 2025-03-31T16:37:18.724807Z node 5 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715689 at tablet 72075186224037896 2025-03-31T16:37:18.725217Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037896 2025-03-31T16:37:18.726422Z node 5 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037896 2025-03-31T16:37:18.726482Z node 5 :TX_DATASHARD DEBUG: 72075186224037896 not sending time cast registration request in state WaitScheme 2025-03-31T16:37:18.728687Z node 5 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037896, clientId# [5:7488013652014437908:3272], serverId# [5:7488013652014437909:3273], sessionId# [0:0:0] 2025-03-31T16:37:18.729026Z node 5 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715689 at step 1743439038775 at tablet 72075186224037896 { Transactions { TxId: 281474976715689 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743439038775 MediatorID: 72057594046382081 TabletID: 72075186224037896 } 2025-03-31T16:37:18.729052Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037896 2025-03-31T16:37:18.729144Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037896 2025-03-31T16:37:18.729163Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:37:18.729184Z node 5 :TX_DATASHARD DEBUG: Found ready operation [1743439038775:281474976715689] in PlanQueue unit at 72075186224037896 2025-03-31T16:37:18.729447Z node 5 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037896 loaded tx from db 1743439038775:281474976715689 keys extracted: 0 2025-03-31T16:37:18.729573Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:37:18.729682Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037896 2025-03-31T16:37:18.729719Z node 5 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037896 tableId# [OwnerId: 72057594046644480, LocalPathId: 14] schema version# 1 2025-03-31T16:37:18.730165Z node 5 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037896 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:37:18.730531Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:37:18.730969Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:37:18.732398Z node 5 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037896 time 1743439038774 2025-03-31T16:37:18.732435Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037896 2025-03-31T16:37:18.732469Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037896 2025-03-31T16:37:18.732513Z node 5 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037896 coordinator 72057594046316545 last step 0 next step 1743439038775 2025-03-31T16:37:18.732550Z node 5 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037896 step# 1743439038775} 2025-03-31T16:37:18.732577Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037896 2025-03-31T16:37:18.732614Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037896 2025-03-31T16:37:18.732636Z node 5 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037896 2025-03-31T16:37:18.732658Z node 5 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037896 2025-03-31T16:37:18.732707Z node 5 :TX_DATASHARD DEBUG: Complete [1743439038775 : 281474976715689] from 72075186224037896 at tablet 72075186224037896 send result to client [5:7488013634834567156:2145], exec latency: 0 ms, propose latency: 3 ms 2025-03-31T16:37:18.732740Z node 5 :TX_DATASHARD INFO: 72075186224037896 Sending notify to schemeshard 72057594046644480 txId 281474976715689 state Ready TxInFly 0 2025-03-31T16:37:18.732786Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037896 2025-03-31T16:37:18.733932Z node 5 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715689 datashard 72075186224037896 state Ready 2025-03-31T16:37:18.733994Z node 5 :TX_DATASHARD DEBUG: 72075186224037896 Got TEvSchemaChangedResult from SS at 72075186224037896 |97.7%| [TM] {RESULT} ydb/core/tx/datashard/ut_reshuffle_kmeans/unittest |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/wardens/py3test >> test_liveness_wardens.py::TestLivenessWarden::test_scheme_shard_has_no_in_flight_transactions [GOOD] |97.7%| [TM] {RESULT} ydb/tests/functional/wardens/py3test >> Metering::MockedNetClassifierOnly [GOOD] >> Metering::MockedNetClassifierLabelTransformation |97.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/pq_read/test/py3test >> test_timeout.py::TestTimeout::test_timeout [GOOD] |97.7%| [TS] {RESULT} ydb/tests/tools/pq_read/test/py3test >> KqpTpch::Query05 [GOOD] >> KqpTpch::Query06 >> TMemoryController::Counters [GOOD] >> TMemoryController::Counters_HardLimit >> TGRpcRateLimiterTest::AcquireResourceManyUsedGrpcApi [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApi >> TTxDataShardPrefixKMeansScan::BadRequest [GOOD] >> TTxDataShardPrefixKMeansScan::BuildToPosting >> test_ctas.py::TestYtCtas::test_simple_ctast [GOOD] >> test_yt_reading.py::TestYtReading::test_partitioned_reading >> DataShardFollowers::FollowerDuringDataPartSwitch [GOOD] >> DataShardFollowers::FollowerReadDuringSplit >> KqpTpch::Query06 [GOOD] >> KqpTpch::Query07 >> test.py::test[solomon-UnknownSetting-] [GOOD] >> TSentinelTests::PDiskRackGuardFullRack [GOOD] >> TSentinelTests::BSControllerUnresponsive >> StatisticsScan::RunScanOnShard >> KeyValueGRPCService::SimpleWriteRead [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithIncorreectPath >> QuoterWithKesusTest::HandlesNonExistentResource [GOOD] >> QuoterWithKesusTest::HandlesAllRequestsForNonExistentResource |97.7%| [TM] {asan, default-linux-x86_64, pic, release} ydb/tests/fq/solomon/py3test >> test.py::test[solomon-UnknownSetting-] [GOOD] |97.7%| [TM] {RESULT} ydb/tests/fq/solomon/py3test >> TRangeTreap::Simple [GOOD] >> TRangeTreap::Sequential >> TopicSessionTests::BadDataSessionError [GOOD] >> TopicSessionTests::WrongFieldType >> test_http_api.py::TestHttpApi::test_stop_idempotency [GOOD] >> test_http_api.py::TestHttpApi::test_restart_idempotency >> TMemoryController::Counters_HardLimit [GOOD] >> TMemoryController::Counters_NoHardLimit >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_parallel [GOOD] >> Metering::MockedNetClassifierLabelTransformation [GOOD] >> SHA256Test::SHA256Test [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_empty_rows >> AnalyzeDatashard::AnalyzeOneTable [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/ymq/actor/ut/unittest >> SHA256Test::SHA256Test [GOOD] Test command err: 2025-03-31T16:37:04.489487Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013592280475083:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:37:04.489614Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00076d/r3tmp/tmppJvJ7H/pdisk_1.dat 2025-03-31T16:37:04.896461Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:37:04.927376Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:37:04.927484Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:37:04.930251Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:37:04.975815Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:37:04.975837Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:37:04.975845Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:37:04.975979Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:37:09.490984Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488013592280475083:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:37:09.491124Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00076d/r3tmp/tmpSVzHYD/pdisk_1.dat 2025-03-31T16:37:10.551265Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:37:10.605757Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:37:10.627485Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:37:10.627514Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:37:10.627520Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:37:10.627648Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:37:10.636092Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:37:10.636173Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:37:10.639441Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:37:21.207487Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488013664612806291:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:37:21.207547Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00076d/r3tmp/tmpqdohL2/pdisk_1.dat 2025-03-31T16:37:21.311400Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:37:21.339872Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:37:21.339901Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:37:21.339908Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:37:21.339994Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:37:21.351264Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:37:21.351343Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:37:21.352869Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:37:26.208171Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488013664612806291:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:37:26.208308Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; |97.7%| [TS] {RESULT} ydb/core/ymq/actor/ut/unittest >> Coordinator::LastStepSubscribe [GOOD] >> Coordinator::RestoreDomainConfiguration >> test_yt_reading.py::TestYtReading::test_partitioned_reading [GOOD] >> test_yt_reading.py::TestYtReading::test_block_reading >> TIndexProcesorTests::TestOver1000Queues [GOOD] >> DiscoveryIsNotBroken::NoKafkaEndpointInDiscovery >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApi [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedGrpcApiWithCancelAfter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::AnalyzeOneTable [GOOD] Test command err: 2025-03-31T16:35:42.781936Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:35:42.782349Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:35:42.782515Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000e79/r3tmp/tmpdGHoXJ/pdisk_1.dat 2025-03-31T16:35:43.195930Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61673, node 1 2025-03-31T16:35:43.447349Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:43.447424Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:43.447461Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:43.448003Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:35:43.450424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:35:43.545506Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:43.545645Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:43.560465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22903 2025-03-31T16:35:44.159037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:35:47.564029Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T16:35:47.598277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:47.598378Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:47.637779Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:35:47.640663Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:47.903636Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:47.904196Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:47.906433Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:47.906636Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:47.906952Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:47.907077Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:47.907365Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:47.907502Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:47.907612Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:48.096741Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:48.096878Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:48.110478Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:48.273857Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:48.326466Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T16:35:48.326567Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T16:35:48.356490Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T16:35:48.357078Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T16:35:48.357287Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T16:35:48.357348Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T16:35:48.357433Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T16:35:48.357491Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T16:35:48.357539Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T16:35:48.357596Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T16:35:48.359981Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T16:35:48.398812Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:35:48.398936Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1870:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:35:48.407414Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1881:2607] 2025-03-31T16:35:48.411360Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2618] 2025-03-31T16:35:48.411496Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1904:2618], schemeshard id = 72075186224037897 2025-03-31T16:35:48.421133Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T16:35:48.444112Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T16:35:48.444170Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T16:35:48.444254Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T16:35:48.463215Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T16:35:48.475893Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T16:35:48.476080Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T16:35:48.715973Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T16:35:48.890938Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T16:35:48.982137Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:35:49.988324Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2238:3072], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:49.988464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:50.008281Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-31T16:35:50.583953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2546:3121], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:50.584130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:50.585441Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2551:3125]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:35:50.585667Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T16:35:50.585765Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2553:3127] 2025-03-31T16:35:50.585829Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2553:3127] 2025-03-31T16:35:50.586500Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2554:2997] 2025-03-31T16:35:50.586752Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2553:3127], server id = [2:2554:2997], tablet id = 72075186224037894, status = OK 2025-03-31T16:35:50.586938Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2554:2997], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-03-31T16:35:50.586999Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-03-31T16:35:50.587250Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:35:50.587340Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2551:3125], StatRequests.size() = 1 2025-03-31T16:35:50.603542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2558:3131], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:50.603640Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:50.604046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2563:3136], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:50.609285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-03-31T16:35:50.782189Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-31T16:35:50.782254Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-31T16:35:50.858404Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2553:3127], schemeshard count = 1 2025-03-31T16:35:51.306926Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreator ... 2075186224037894] EvPropagateTimeout 2025-03-31T16:37:03.653562Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T16:37:03.653915Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:37:05.699615Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:37:06.995066Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T16:37:06.995445Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:37:08.768158Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:37:10.336962Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T16:37:10.337297Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:37:12.293093Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:37:13.914266Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T16:37:13.914562Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:37:15.953568Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:37:17.422152Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T16:37:17.422499Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:37:19.444563Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:37:20.707633Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T16:37:20.708012Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:37:22.664303Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:37:23.389438Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-03-31T16:37:23.389530Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-31T16:37:23.389570Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-31T16:37:23.389614Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-03-31T16:37:24.288657Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T16:37:24.289022Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:37:24.354558Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-03-31T16:37:24.354648Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 203.000000s, at schemeshard: 72075186224037897 2025-03-31T16:37:24.354950Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 49 2025-03-31T16:37:24.372743Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-31T16:37:25.080563Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:37:25.080668Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T16:37:25.080718Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-31T16:37:25.080773Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-03-31T16:37:25.080836Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-31T16:37:25.081280Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T16:37:25.112112Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-31T16:37:25.115431Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6651:4682], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:37:25.115565Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6660:4687], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:37:25.115686Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:37:25.127776Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-03-31T16:37:25.189555Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6665:4690], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-03-31T16:37:25.313007Z node 2 :TX_PROXY ERROR: Actor# [2:6760:4738] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:37:25.381414Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:6789:4753]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:37:25.381672Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T16:37:25.381779Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:6791:4755] 2025-03-31T16:37:25.381852Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:6791:4755] 2025-03-31T16:37:25.382516Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:6792:4756] 2025-03-31T16:37:25.382681Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:6792:4756], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-03-31T16:37:25.382758Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-03-31T16:37:25.382920Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:6791:4755], server id = [2:6792:4756], tablet id = 72075186224037894, status = OK 2025-03-31T16:37:25.382989Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:37:25.383116Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:6789:4753], StatRequests.size() = 1 2025-03-31T16:37:25.531756Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NzVmZmQ4NmEtODM5MDg0ZS1mZjY0NTFhZC05NDJlYTU=, TxId: 2025-03-31T16:37:25.531858Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NzVmZmQ4NmEtODM5MDg0ZS1mZjY0NTFhZC05NDJlYTU=, TxId: 2025-03-31T16:37:25.532406Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:37:25.550044Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-31T16:37:25.550130Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T16:37:25.585838Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-31T16:37:25.585952Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-31T16:37:25.629250Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:6791:4755], schemeshard count = 1 2025-03-31T16:37:26.177898Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T16:37:26.177994Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-03-31T16:37:26.178039Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. Skip analyze for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:37:26.868186Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:37:26.891512Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:37:26.891705Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-03-31T16:37:26.891754Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:37:26.892120Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T16:37:26.894503Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-31T16:37:26.910541Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Yzg2ZTg5M2YtMzRhMjg3OWItNDEyNzBjOWItNzY4MDY3YQ==, TxId: 2025-03-31T16:37:26.910617Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Yzg2ZTg5M2YtMzRhMjg3OWItNDEyNzBjOWItNzY4MDY3YQ==, TxId: 2025-03-31T16:37:26.911073Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:37:26.928655Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:37:26.928727Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2763:3244] 2025-03-31T16:37:26.929265Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:6916:4829]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-31T16:37:26.932975Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T16:37:26.933039Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-31T16:37:26.939799Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T16:37:26.939886Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-03-31T16:37:26.939941Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-31T16:37:26.942688Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-03-31T16:37:26.943409Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 >> TTxDataShardPrefixKMeansScan::BuildToPosting [GOOD] >> TTxDataShardPrefixKMeansScan::BuildToBuild >> DataShardFollowers::FollowerReadDuringSplit [GOOD] >> TxKeys::ComparePointKeys >> QuoterWithKesusTest::HandlesAllRequestsForNonExistentResource [GOOD] >> QuoterWithKesusTest::GetsQuota >> TAsyncIndexTests::CdcAndMergeWithReboots[TabletReboots] [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithIncorreectPath [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutToken |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/query_cache/py3test >> test_query_cache.py::TestQueryCache::test [GOOD] |97.7%| [TM] {RESULT} ydb/tests/functional/query_cache/py3test >> TestFilterSet::FilterGroup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/ymq/actor/yc_search_ut/unittest >> TIndexProcesorTests::TestOver1000Queues [GOOD] Test command err: 2025-03-31T16:36:49.180401Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013526365213614:2130];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:36:49.180447Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000749/r3tmp/tmpeMAIiX/pdisk_1.dat 2025-03-31T16:36:49.474665Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28485, node 1 2025-03-31T16:36:49.559396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:49.563656Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:49.566526Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:36:49.585189Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:36:49.585218Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:36:49.585230Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:36:49.585367Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2586 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:36:49.947404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... TClient is connected to server localhost:2586 waiting... 2025-03-31T16:36:51.972980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:1, at schemeshard: 72057594046644480 2025-03-31T16:36:51.974748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:2586 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743439010012 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "SQS" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743439010026 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: ".sys" PathId: 184467... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:36:52.292934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:52.310537Z node 1 :TX_PROXY ERROR: Actor# [1:7488013539250116337:2469] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/SQS\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } Error 1: Check failed: path: '/Root/SQS', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges) TClient is connected to server localhost:2586 waiting... 2025-03-31T16:36:52.529003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:1, at schemeshard: 72057594046644480 2025-03-31T16:36:52.530545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 ===Execute query: UPSERT INTO `/Root/SQS/SingleCreateQueueEvent/.Events` (Account, QueueName, EventType, CustomQueueName, EventTimestamp, FolderId, Labels) VALUES ("cloud1", "queue1", 1, "myQueueCustomName", 1743439012332, "myFolder", "{\"k1\": \"v1\"}"); 2025-03-31T16:36:52.674761Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013539250116509:2361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:52.674775Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013539250116521:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:52.674871Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:52.681711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710667:3, at schemeshard: 72057594046644480 2025-03-31T16:36:52.691796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013539250116523:2365], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710667 completed, doublechecking } 2025-03-31T16:36:52.790054Z node 1 :TX_PROXY ERROR: Actor# [1:7488013539250116577:2626] txid# 281474976710668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:36:53.529145Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01jqph08sy5rxhm1ftfvajxezq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTYxYjEwM2MtNDI1ZGVhMGUtZDgwZjYzZjYtODQxY2FhZjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:36:53.991355Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710670. Ctx: { TraceId: 01jqph09pv5c1395d1r4xw5ym0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWNlYTM5ZTYtNzZmNGVjMTAtMWYwNDA4ZTItM2MzM2I5Yzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:36:54.124374Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01jqph0a42cxcx8g4zrg1rq0p4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjJhZGI3NTctYTFhN2Q3YmItZGQzYmNmZjctZTRlZWNjNTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:36:54.180457Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488013526365213614:2130];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:36:54.180542Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:36:54.296817Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jqph0a7n6kw3aa6e7cctcj09, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmM3ODE1OTQtZjU2NjYwLTc5YmQ3MGRkLTYyYmY5Mjhh, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-03-31T16:36:52.332000Z","resource_id":"queue1","name":"myQueueCustomName","service":"message-queue","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-03-31T16:36:52.332000Z","resource_id":"queue1","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2025-03-31T16:36:54.304237Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} 2025-03-31T16:36:54.406963Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jqph0ag52m6gmx459pv0vmvh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmQxNDY4YzgtYzQxOGI2NWUtZTg3Nzc5NzQtNjgyZDhjNTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:36:54.413617Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jqph0agcd0s0t6by306ycm1z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzdjNzNkYzUtYTBiOTExN2ItOWZmNjNjNGItYzJlMWFjNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:36:54.524623Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jqph0akq1e8emhdy8h28mm1c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWU4ODIxNS04OTc5MGZkYi0zYTZhZTIzYy01NGFkZGUzNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:36:54.531041Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jqph0am13hb6e1mvpy27w3ws, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTVmNGRhZTYtYzNhNWEyNmItYmZiYzFhYTAtODY1YWUwMmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:36:54.638996Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jqph0aqbb9p286tkktfskp5e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjIzMGY1YjUtZTM2MDRkMmYtMjkzNmIzMzEtNjJkNTY2MWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use ... }. Database not set, use /Root ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-03-31T16:37:00.793000Z","resource_id":"deleting2","name":"myQueueCustomName","service":"message-queue","deleted":"2025-03-31T16:37:00.793000Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-03-31T16:37:00.793000Z","resource_id":"deleting1","name":"myQueueCustomName","service":"message-queue","deleted":"2025-03-31T16:37:00.793000Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-03-31T16:37:00.793000Z","resource_id":"creating1","name":"myQueueCustomName","service":"message-queue","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-03-31T16:37:00.793000Z","resource_id":"creating2","name":"myQueueCustomName","service":"message-queue","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-03-31T16:36:59.000000Z","resource_id":"existing1","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2025-03-31T16:37:02.770039Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-03-31T16:36:59.000000Z","resource_id":"existing2","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2025-03-31T16:37:02.770110Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-03-31T16:37:00.793000Z","resource_id":"creating1","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2025-03-31T16:37:02.770148Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-03-31T16:36:59.000000Z","resource_id":"existing3","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2025-03-31T16:37:02.770177Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-03-31T16:37:00.793000Z","resource_id":"creating2","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2025-03-31T16:37:02.770209Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} 2025-03-31T16:37:02.878756Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710749. Ctx: { TraceId: 01jqph0jrx2vxz3gshztq2aae7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDliN2JlMjYtNGIzNWQ4MDItOWUwYjNmOWEtNDA1MDgzMWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:37:02.884463Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710750. Ctx: { TraceId: 01jqph0js3751j52zkqy4s4jfv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDliN2JlMjYtNGIzNWQ4MDItOWUwYjNmOWEtNDA1MDgzMWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:37:02.891239Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710751. Ctx: { TraceId: 01jqph0js9e6tces9g6chf19w4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmYwYjEwODQtNWEwMDdhODYtZmU2MmE4ZDUtYTc3ZDA5YjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:37:02.999261Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710752. Ctx: { TraceId: 01jqph0jwnaehera6w7k9rsqy7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTEzNzAyOWUtYmQ0NjNmZGYtNGM0NzlhMC1iMTI2OTYwNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:37:03.004599Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710753. Ctx: { TraceId: 01jqph0jwvf9pgs5sefywqvtb6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTEzNzAyOWUtYmQ0NjNmZGYtNGM0NzlhMC1iMTI2OTYwNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:37:03.012548Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710754. Ctx: { TraceId: 01jqph0jx3efjbejy9bty48w5v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzFhNTVlYS1kYzQxZjIwNi1iOWRjYjRlMS0zOWQxMzFmMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:37:03.128068Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710755. Ctx: { TraceId: 01jqph0k0e5scqx66xwjy39zqf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTc4MzNkYzUtNTJjZGMwNmMtZTMyYmJhYi02NDA1Y2RjNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:37:03.145141Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710756. Ctx: { TraceId: 01jqph0k17dr8hpnc0q2mtck19, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTc4MzNkYzUtNTJjZGMwNmMtZTMyYmJhYi02NDA1Y2RjNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:37:03.152333Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710757. Ctx: { TraceId: 01jqph0k1ebegs9ccg4pby5hz3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzMwMDJhMTAtNWUyODg0ZmYtMmQ5NmY0MzUtYmI0MGRiMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:37:03.166991Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710758. Ctx: { TraceId: 01jqph0jp272abqf829tmssj41, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTYxYjEwM2MtNDI1ZGVhMGUtZDgwZjYzZjYtODQxY2FhZjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient is connected to server localhost:2586 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743439010012 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 13 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 13 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 5 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".metadata" PathId: 13 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710667 CreateStep: 1743439012735 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "SQS" Pat... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:37:03.387051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710759:0, at schemeshard: 72057594046644480 waiting... Error 1: Check failed: path: '/Root/SQS', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges) 2025-03-31T16:37:03.395902Z node 1 :TX_PROXY ERROR: Actor# [1:7488013586494758797:3606] txid# 281474976710760, issues: { message: "Check failed: path: \'/Root/SQS\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } TClient is connected to server localhost:2586 waiting... 2025-03-31T16:37:03.677844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710762:1, at schemeshard: 72057594046644480 2025-03-31T16:37:03.680523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710763:0, at schemeshard: 72057594046644480 ===Started add queue batch 2025-03-31T16:37:04.472705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:37:04.472747Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:37:23.510532Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710764. Ctx: { TraceId: 01jqph16nm71h6q1wse2892rx0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTYxYjEwM2MtNDI1ZGVhMGUtZDgwZjYzZjYtODQxY2FhZjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:37:27.116902Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710765. Ctx: { TraceId: 01jqph19z10t34qv84wq4znwke, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTYxYjEwM2MtNDI1ZGVhMGUtZDgwZjYzZjYtODQxY2FhZjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:37:28.396579Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710766. Ctx: { TraceId: 01jqph1be65frj4tt578s77ed6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE2MzJkMWUtYjkzZWExMWQtZDYzNWQwYzYtYTI3YWNiYmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:37:28.471835Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710767. Ctx: { TraceId: 01jqph1brn88fkwbn6wr3qfyan, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE2MzJkMWUtYjkzZWExMWQtZDYzNWQwYzYtYTI3YWNiYmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:37:28.549365Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710768. Ctx: { TraceId: 01jqph1bv311cqn4ewnq26ymqr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE2MzJkMWUtYjkzZWExMWQtZDYzNWQwYzYtYTI3YWNiYmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:37:28.563398Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710769. Ctx: { TraceId: 01jqph1bvh3pzp83b7gpt3kz83, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE2MzJkMWUtYjkzZWExMWQtZDYzNWQwYzYtYTI3YWNiYmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:37:28.771926Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710770. Ctx: { TraceId: 01jqph1bvt5v6d7wtz7k69hqf2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTY0MGQxZjUtYTljY2VmN2ItYjRmNDkwYmYtZjMxNDY4MjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |97.7%| [TM] {RESULT} ydb/core/ymq/actor/yc_search_ut/unittest >> StatisticsScan::RunScanOnShard [GOOD] >> KqpTpch::Query07 [GOOD] >> KqpTpch::Query08 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_followers/unittest >> DataShardFollowers::FollowerReadDuringSplit [GOOD] Test command err: 2025-03-31T16:36:40.152387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:36:40.152730Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:36:40.152931Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000534/r3tmp/tmp71OgGE/pdisk_1.dat 2025-03-31T16:36:40.669958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:36:40.725359Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:36:40.768099Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T16:36:40.771207Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:40.771709Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:40.771930Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-31T16:36:40.784752Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:36:40.872301Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-03-31T16:36:40.872414Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-03-31T16:36:40.873607Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-03-31T16:36:40.985899Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 PartitionConfig { FollowerGroups { FollowerCount: 1 AllowLeaderPromotion: false } } } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-31T16:36:40.986023Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:36:40.986706Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-31T16:36:40.986856Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:36:40.987273Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:36:40.987528Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:36:40.987710Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-31T16:36:40.988067Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-03-31T16:36:40.990831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:36:40.992735Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-31T16:36:40.992845Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-03-31T16:36:41.038214Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:36:41.039480Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:36:41.040046Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:36:41.040318Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:36:41.091857Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:36:41.092664Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:36:41.092807Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:36:41.096121Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:36:41.096236Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:36:41.096297Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:36:41.098528Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:36:41.098722Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:36:41.098844Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:36:41.109721Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:36:41.134526Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:36:41.135889Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:36:41.136086Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:36:41.136165Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:36:41.136205Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:36:41.136248Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:36:41.136512Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:36:41.136580Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:36:41.137709Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:36:41.137837Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:36:41.137960Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:36:41.138001Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:36:41.138058Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T16:36:41.138107Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:36:41.138146Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:36:41.138181Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:36:41.138224Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:36:41.138354Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:36:41.138397Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:36:41.138473Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:36:41.138594Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-31T16:36:41.138636Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:36:41.138848Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:36:41.139216Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T16:36:41.139293Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:36:41.140012Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:36:41.140079Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:36:41.140149Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T16:36:41.140190Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T16:36:41.140227Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:36:41.140566Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T16:36:41.140624Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T16:36:41.140674Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T16:36:41.140712Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:36:41.140779Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T16:36:41.140830Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T16:36:41.140892Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T16:36:41.140944Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:36:41.140973Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T16:36:41.144408Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T16:36:41.144475Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025- ... cessing event TEvDataShard::TEvReadAck 2025-03-31T16:37:29.312072Z node 8 :TX_DATASHARD DEBUG: 72075186224037889 ReadAck from [8:1096:2864] on missing iterator: { ReadId: 0 SeqNo: 1 MaxRows: 999 MaxBytes: 5242880 } 2025-03-31T16:37:29.312264Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 269553218, Sender [8:1096:2864], Recipient [8:1085:2859]: NKikimrTxDataShard.TEvReadAck ReadId: 0 SeqNo: 2 MaxRows: 999 MaxBytes: 5242880 2025-03-31T16:37:29.312297Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvDataShard::TEvReadAck 2025-03-31T16:37:29.312345Z node 8 :TX_DATASHARD DEBUG: 72075186224037889 ReadAck from [8:1096:2864] on missing iterator: { ReadId: 0 SeqNo: 2 MaxRows: 999 MaxBytes: 5242880 } 2025-03-31T16:37:29.312716Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 269553219, Sender [8:1096:2864], Recipient [8:1085:2859]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-03-31T16:37:29.312771Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvDataShard::TEvReadCancel 2025-03-31T16:37:29.312860Z node 8 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 22 } } ... reading from the right follower 2025-03-31T16:37:29.494695Z node 8 :TX_PROXY DEBUG: actor# [8:59:2106] Handle TEvExecuteKqpTransaction 2025-03-31T16:37:29.494785Z node 8 :TX_PROXY DEBUG: actor# [8:59:2106] TxId# 281474976715665 ProcessProposeKqpTransaction 2025-03-31T16:37:29.496429Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 269877761, Sender [8:1115:2879], Recipient [8:1088:2861]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:37:29.496495Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:37:29.496560Z node 8 :TX_DATASHARD DEBUG: Server connected at follower 1 tablet# 72075186224037890, clientId# [8:1113:2878], serverId# [8:1115:2879], sessionId# [0:0:0] 2025-03-31T16:37:29.496728Z node 8 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqph1cka2ttj1c5khpc4p1w8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=8&id=OWI2ZjlkMzAtNzI2NjA1OTAtNzcwOWE3ZTQtMWYwZTI5MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:37:29.498389Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 269553215, Sender [8:1119:2880], Recipient [8:1088:2861]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 MaxRowsInResult: 1 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-03-31T16:37:29.498447Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvDataShard::TEvRead 2025-03-31T16:37:29.498640Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} queued, type NKikimr::NDataShard::TDataShard::TTxReadViaPipeline 2025-03-31T16:37:29.498736Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T16:37:29.498939Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 1 2025-03-31T16:37:29.499049Z node 8 :TX_DATASHARD DEBUG: Updating sys metadata on follower, tabletId 72075186224037890 prev TChangeCounter{serial=0, epoch=0} current TChangeCounter{serial=6, epoch=1} 2025-03-31T16:37:29.499818Z node 8 :TX_DATASHARD DEBUG: Updating tables metadata on follower, tabletId 72075186224037890 prev TChangeCounter{serial=0, epoch=0} current TChangeCounter{serial=4, epoch=1} 2025-03-31T16:37:29.500263Z node 8 :TX_DATASHARD DEBUG: Updating snapshots metadata on follower, tabletId 72075186224037890 prev TChangeCounter{serial=0, epoch=0} current TChangeCounter{serial=0, epoch=1} 2025-03-31T16:37:29.500399Z node 8 :TX_DATASHARD TRACE: 72075186224037890 changed HEAD read to repeatable v1500/18446744073709551615 2025-03-31T16:37:29.500490Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037890 on unit CheckRead 2025-03-31T16:37:29.500597Z node 8 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037890 is Executed 2025-03-31T16:37:29.500651Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037890 executing on unit CheckRead 2025-03-31T16:37:29.500702Z node 8 :TX_DATASHARD TRACE: Add [0:1] at 72075186224037890 to execution unit BuildAndWaitDependencies 2025-03-31T16:37:29.500748Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037890 on unit BuildAndWaitDependencies 2025-03-31T16:37:29.500789Z node 8 :TX_DATASHARD TRACE: Activated operation [0:1] at 72075186224037890 2025-03-31T16:37:29.500839Z node 8 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037890 is Executed 2025-03-31T16:37:29.500863Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037890 executing on unit BuildAndWaitDependencies 2025-03-31T16:37:29.500885Z node 8 :TX_DATASHARD TRACE: Add [0:1] at 72075186224037890 to execution unit ExecuteRead 2025-03-31T16:37:29.500910Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037890 on unit ExecuteRead 2025-03-31T16:37:29.501067Z node 8 :TX_DATASHARD TRACE: 72075186224037890 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 MaxRowsInResult: 1 Reverse: false TotalRowsLimit: 1001 } 2025-03-31T16:37:29.501341Z node 8 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037890 is Restart 2025-03-31T16:37:29.501377Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Restart at tablet# 72075186224037890 2025-03-31T16:37:29.501488Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 1 -> retry Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-31T16:37:29.501582Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} touch new 0b, 65b lo load (65b in total), 0b requested for data (4194304b in total) 2025-03-31T16:37:29.501653Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} took 8388608b of static mem, Memory{8388608 dyn 0} 2025-03-31T16:37:29.501715Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} requests PageCollection [72075186224037888:1:28:1:12288:190:0] 65 bytes, 1 pages: [0 4] 2025-03-31T16:37:29.501789Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} postponed, 65b, pages {1 wait, 1 load}, freshly touched 1 pages 2025-03-31T16:37:29.501944Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} got result TEvResult{1 pages [72075186224037888:1:28:1:12288:190:0] ok OK}, category 1 2025-03-31T16:37:29.502075Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 1 2025-03-31T16:37:29.502104Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037890 on unit ExecuteRead 2025-03-31T16:37:29.502178Z node 8 :TX_DATASHARD TRACE: 72075186224037890 Execute read# 2, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 MaxRowsInResult: 1 Reverse: false TotalRowsLimit: 1001 } 2025-03-31T16:37:29.502342Z node 8 :TX_DATASHARD TRACE: 72075186224037890 Complete read# {[8:1119:2880], 0} after executionsCount# 2 2025-03-31T16:37:29.502402Z node 8 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[8:1119:2880], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-03-31T16:37:29.502515Z node 8 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037890 is Executed 2025-03-31T16:37:29.502534Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037890 executing on unit ExecuteRead 2025-03-31T16:37:29.502554Z node 8 :TX_DATASHARD TRACE: Add [0:1] at 72075186224037890 to execution unit CompletedOperations 2025-03-31T16:37:29.502578Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037890 on unit CompletedOperations 2025-03-31T16:37:29.502619Z node 8 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037890 is Executed 2025-03-31T16:37:29.502640Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037890 executing on unit CompletedOperations 2025-03-31T16:37:29.502664Z node 8 :TX_DATASHARD TRACE: Execution plan for [0:1] at 72075186224037890 has finished 2025-03-31T16:37:29.502704Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2025-03-31T16:37:29.502801Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 2 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-31T16:37:29.502892Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} release 8388608b of static, Memory{0 dyn 0} 2025-03-31T16:37:29.502955Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2025-03-31T16:37:29.503152Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 269553217, Sender [8:1088:2861], Recipient [8:1088:2861]: NKikimr::TEvDataShard::TEvReadContinue 2025-03-31T16:37:29.503195Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvDataShard::TEvReadContinue 2025-03-31T16:37:29.503301Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{2, NKikimr::NDataShard::TDataShard::TTxReadContinue} queued, type NKikimr::NDataShard::TDataShard::TTxReadContinue 2025-03-31T16:37:29.503379Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{2, NKikimr::NDataShard::TDataShard::TTxReadContinue} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T16:37:29.503465Z node 8 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[8:1119:2880], 0}, firstUnprocessedQuery# 0 2025-03-31T16:37:29.503558Z node 8 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[8:1119:2880], 0}, FirstUnprocessedQuery# 0 2025-03-31T16:37:29.503677Z node 8 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[8:1119:2880], 0} sends rowCount# 0, bytes# 0, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-31T16:37:29.503735Z node 8 :TX_DATASHARD DEBUG: 72075186224037890 read iterator# {[8:1119:2880], 0} finished in ReadContinue 2025-03-31T16:37:29.503841Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{2, NKikimr::NDataShard::TDataShard::TTxReadContinue} hope 1 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-31T16:37:29.503913Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{2, NKikimr::NDataShard::TDataShard::TTxReadContinue} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T16:37:29.504873Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 269553219, Sender [8:1119:2880], Recipient [8:1088:2861]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-03-31T16:37:29.504927Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvDataShard::TEvReadCancel 2025-03-31T16:37:29.505004Z node 8 :TX_DATASHARD TRACE: 72075186224037890 ReadCancel: { ReadId: 0 } { items { uint32_value: 3 } items { uint32_value: 33 } } |97.8%| [TM] {RESULT} ydb/core/tx/datashard/ut_followers/unittest >> TMemoryController::Counters_NoHardLimit [GOOD] >> TMemoryController::Config_ConsumerLimits >> TxKeys::ComparePointKeys [GOOD] >> TxKeys::ComparePointKeysWithNull >> Splitter::Simple ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_column_stats/unittest >> StatisticsScan::RunScanOnShard [GOOD] Test command err: 2025-03-31T16:37:27.965021Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:37:27.965313Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:37:27.965507Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00060a/r3tmp/tmpyxmxZh/pdisk_1.dat 2025-03-31T16:37:28.699700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:37:28.759981Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:37:28.807423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:37:28.807941Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:37:28.821067Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:37:28.929515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:37:29.349239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:37:29.349402Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:37:29.349478Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:37:29.361054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:37:29.529835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:37:29.615735Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:37:30.999301Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqph1cm2ey7hw3rst4jsg67r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWIwNmUxMDktNjkxODIzYmYtZTEzMGU3OGUtZTEyOTdkYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |97.8%| [TM] {RESULT} ydb/core/tx/datashard/ut_column_stats/unittest >> Splitter::Simple [GOOD] >> Splitter::Small [GOOD] >> Splitter::Minimal [GOOD] >> Splitter::Trivial [GOOD] >> Splitter::BigAndSmall ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndMergeWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:135:2157] sender: [1:136:2058] recipient: [1:111:2142] 2025-03-31T16:29:57.649684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:29:57.649797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:57.649839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:29:57.649877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:29:57.649931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:29:57.649965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:29:57.650056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:57.650171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:29:57.650505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:29:57.745795Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-31T16:29:57.745885Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:169:2058] recipient: [1:15:2062] 2025-03-31T16:29:57.772988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:29:57.773194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:29:57.773349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:29:57.780583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:29:57.780892Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:29:57.781615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:57.781859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:29:57.786804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:57.788501Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:57.788570Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:57.788698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:29:57.788759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:57.788804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:29:57.788935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:210:2058] recipient: [1:208:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:210:2058] recipient: [1:208:2209] Leader for TabletID 72057594037968897 is [1:214:2213] sender: [1:215:2058] recipient: [1:208:2209] 2025-03-31T16:29:57.796455Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:235:2058] recipient: [1:15:2062] 2025-03-31T16:29:57.929646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:29:57.929941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:57.930199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:29:57.930502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:29:57.930577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:57.936315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:57.936506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:29:57.936732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:57.936806Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:29:57.936848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:29:57.936887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:29:57.939505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:57.939591Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:29:57.939639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:29:57.941974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:57.942040Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:57.942079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:57.942141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:29:57.960847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:29:57.963481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:29:57.963745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:135:2157] sender: [1:250:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:29:57.964942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:57.965105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:29:57.965198Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:57.965488Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:29:57.965553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:57.965749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:29:57.965894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:29:57.968104Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:57.968156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:57.968345Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:57.968392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:202:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:29:57.968752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:57.968799Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:29:57.968914Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:57.968950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:57.968994Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:57.969039Z no ... " } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:37:30.065422Z node 164 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:37:30.065743Z node 164 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 365us result status StatusSuccess 2025-03-31T16:37:30.066650Z node 164 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:37:30.078328Z node 164 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][164:1172:2956] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-03-31T16:37:30.078496Z node 164 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409551:2][164:1142:2956] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-03-31T16:37:30.078670Z node 164 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][164:1172:2956] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1743439049977674 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1743439049977674 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 5 Group: 1743439049977674 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 }] } 2025-03-31T16:37:30.082045Z node 164 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][164:1172:2956] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 5 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 5 2025-03-31T16:37:30.082199Z node 164 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409551:2][164:1142:2956] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> TxKeys::ComparePointKeysWithNull [GOOD] >> TxKeys::ComparePointAndRange >> Splitter::BigAndSmall [GOOD] >> Splitter::CritSmallPortions >> Coordinator::RestoreDomainConfiguration [GOOD] >> Coordinator::RestoreTenantConfiguration-AlterDatabaseCreateHiveFirst-false >> TCreateAndDropViewTest::CheckCreatedView >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] [FAIL] >> TopicSessionTests::WrongFieldType [GOOD] >> TxKeys::ComparePointAndRange [GOOD] >> TxKeys::ComparePointAndRangeWithNull >> test_alloc_default.py::TestAlloc::test_default_delta[kikimr0] [GOOD] >> TopicSessionTests::RestartSessionIfNewClientWithOffset >> TestFilterSet::FilterGroup [GOOD] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/postgresql/py3test >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] [FAIL] |97.8%| [TM] {RESULT} ydb/tests/functional/postgresql/py3test >> TestFilterSet::DuplicationValidation >> test_yt_reading.py::TestYtReading::test_block_reading [GOOD] >> kikimr_config.py::test_kikimr_config_generator_generic_connector_config >> kikimr_config.py::test_kikimr_config_generator_generic_connector_config [GOOD] >> TxKeys::ComparePointAndRangeWithNull [GOOD] >> TxKeys::ComparePointAndRangeWithInf >> TTxDataShardPrefixKMeansScan::BuildToBuild [GOOD] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_import/py3test >> test_yt_reading.py::TestYtReading::test_block_reading [GOOD] |97.8%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_import/py3test >> TGRpcRateLimiterTest::AcquireResourceManyUsedGrpcApiWithCancelAfter [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApiWithCancelAfter |97.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/library/ut/py3test >> kikimr_config.py::test_kikimr_config_generator_generic_connector_config [GOOD] |97.8%| [TS] {RESULT} ydb/tests/library/ut/py3test >> Splitter::CritSmallPortions [GOOD] >> Splitter::Crit >> test_http_api.py::TestHttpApi::test_restart_idempotency [GOOD] >> test_http_api.py::TestHttpApi::test_simple_streaming_query >> test.py::test_order_conflict [GOOD] >> test.py::test_missing_value [GOOD] >> test.py::test_unexpected_value [GOOD] >> test.py::test_local >> TxKeys::ComparePointAndRangeWithInf [GOOD] >> QuoterWithKesusTest::GetsQuota [GOOD] >> QuoterWithKesusTest::GetsBigQuota ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_prefix_kmeans/unittest >> TTxDataShardPrefixKMeansScan::BuildToBuild [GOOD] Test command err: 2025-03-31T16:37:19.910196Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013656945792824:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:37:19.910333Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0004df/r3tmp/tmpcm5lA2/pdisk_1.dat 2025-03-31T16:37:20.418042Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:37:20.459338Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:37:20.459474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:37:20.462786Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:37:20.518859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:37:20.598368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:37:20.649806Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7488013661240760704:2295] 2025-03-31T16:37:20.650088Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:37:20.667719Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:37:20.667840Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:37:20.672501Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:37:20.672560Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:37:20.672603Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:37:20.674966Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:37:20.675062Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:37:20.675106Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:7488013661240760718:2295] in generation 1 2025-03-31T16:37:20.676538Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:37:20.720649Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:37:20.721860Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:37:20.721930Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:7488013661240760723:2296] 2025-03-31T16:37:20.721972Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:37:20.721987Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:37:20.721995Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:37:20.723073Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7488013661240760701:2297], serverId# [1:7488013661240760721:2306], sessionId# [0:0:0] 2025-03-31T16:37:20.723330Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:37:20.723416Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:37:20.723439Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:37:20.723468Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:37:20.723724Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:37:20.723756Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:37:20.723789Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:37:20.724154Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:37:20.725003Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-03-31T16:37:20.726933Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:37:20.727391Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:37:20.727473Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:37:20.730192Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7488013661240760737:2315], serverId# [1:7488013661240760738:2316], sessionId# [0:0:0] 2025-03-31T16:37:20.737343Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710657 at step 1743439040777 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743439040777 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:37:20.737398Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:37:20.737571Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:37:20.737647Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:37:20.737670Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:37:20.737716Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1743439040777:281474976710657] in PlanQueue unit at 72075186224037888 2025-03-31T16:37:20.737925Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1743439040777:281474976710657 keys extracted: 0 2025-03-31T16:37:20.738043Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:37:20.738248Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:37:20.738310Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:37:20.741256Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:37:20.742766Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:37:20.743971Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1743439040776 2025-03-31T16:37:20.744001Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:37:20.744036Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1743439040784 2025-03-31T16:37:20.744767Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1743439040777} 2025-03-31T16:37:20.744817Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:37:20.744851Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:37:20.744865Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:37:20.744887Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:37:20.744934Z node 1 :TX_DATASHARD DEBUG: Complete [1743439040777 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7488013661240760519:2180], exec latency: 4 ms, propose latency: 6 ms 2025-03-31T16:37:20.744961Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-03-31T16:37:20.745040Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:37:20.749436Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2025-03-31T16:37:20.749495Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:37:20.759169Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7488013661240760773:2342], serverId# [1:7488013661240760774:2343], sessionId# [0:0:0] 2025-03-31T16:37:20.770307Z node 1 :BUILD_INDEX NOTICE: Starting TPrefixKMeansScan Id: 1 TabletId: 72075186224037888 PathId { OwnerId: 72057594046644480 LocalId: 2 } SeqNoGeneration: 1 SeqNoRound: 1 Settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 2 } Seed: 1337 Upload: UPLOAD_BUILD_TO_POSTING K: 0 NeedsRounds: 3 Child: 1 LevelName: "/Root/table-level" PostingName: "/Root/table-posting" EmbeddingColumn: "embedding" row version v1743439040784/18446744073709551615 2025-03-31T16:37:20.772395Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7488013661240760778:2347], serverId# [1:7488013661240760779:2348], sessionId# [0:0:0] 2025-03-31T16:37:20.772752Z node 1 :BUILD_INDEX NOTICE: Starting TPrefixKMeansScan Id: 1 TabletId: 72075186224037888 PathId { OwnerId: 72057594046644480 LocalId: 2 } SeqNoGeneration: 2 SeqNoRound: 1 Settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 2 } Seed: 1337 Upload: UPLOAD_BUILD_TO_POSTING K: 1 NeedsRounds: 3 Child: 1 LevelName: "/Root/table-level" PostingName: "/Root/table-posting" EmbeddingColumn: "embedding" row version v1743439040784/18446744073709551615 2025-03-31T16:37:20.774284Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7488013661240760783:2352], serverId# [1:7488013661240760784:2353], sessionId# [0:0:0] 2025-03-31T16:37:20.774477Z node 1 :BUILD_INDEX NOTICE: Starting TPrefixKMeansScan Id: 1 TabletId: 72075186224037888 PathId { OwnerId: 72057594046644480 LocalId: 2 } SeqNoGeneration: 3 SeqNoRound: 1 Settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 2 } Seed: 1337 Upload: UPLOAD_BUILD_TO_POSTING K: 2 NeedsRounds: 3 Child: 1 LevelName: "/Root/table-level" PostingName: "/Root/table-posting" row version v1743439040784/18446744073709551615 2025-03-31T16:37:20.776025Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7488013661240760788:2357], serverId# [1:7488013661240760789:2358], sessionId# [0:0:0] 2025-03-31T16:37:20.776181Z node 1 :BUILD_INDEX NOTICE: Starting TPrefixKMeansScan Id: 1 TabletId: 0 PathId { OwnerId: 72057594046644480 LocalId: 2 } SeqNoGeneration: 4 SeqNoRound: 1 Settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 2 } Seed: 1337 Upload: UPLOAD_BUILD_TO_POSTING K: 2 NeedsRounds: 3 Child: 1 LevelName: "/Root/table-level" PostingName: "/Root/table-posting" EmbeddingColumn: "embedding" row version v1743439040784/18446744073709551615 2025-03-31T16:37:20.777660Z node 1 :TX_DATASHARD DEBUG: Server connected ... ned 1 immediate 0 planned 1 2025-03-31T16:37:35.180152Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037911 2025-03-31T16:37:35.180191Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037911 tableId# [OwnerId: 72057594046644480, LocalPathId: 29] schema version# 1 2025-03-31T16:37:35.180604Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037911 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:37:35.180910Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037911 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:37:35.181424Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:37:35.182218Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037911 time 1743439055224 2025-03-31T16:37:35.182232Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037911 2025-03-31T16:37:35.182251Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037911 2025-03-31T16:37:35.182297Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037910 2025-03-31T16:37:35.182363Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037911 step# 1743439055225} 2025-03-31T16:37:35.182392Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037911 2025-03-31T16:37:35.182921Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037911 coordinator 72057594046316545 last step 0 next step 1743439055225 2025-03-31T16:37:35.182963Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037911 2025-03-31T16:37:35.182979Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037911 2025-03-31T16:37:35.182995Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037911 2025-03-31T16:37:35.183033Z node 3 :TX_DATASHARD DEBUG: Complete [1743439055225 : 281474976710725] from 72075186224037911 at tablet 72075186224037911 send result to client [3:7488013705757414929:2143], exec latency: 0 ms, propose latency: 2 ms 2025-03-31T16:37:35.183055Z node 3 :TX_DATASHARD INFO: 72075186224037911 Sending notify to schemeshard 72057594046644480 txId 281474976710725 state Ready TxInFly 0 2025-03-31T16:37:35.183109Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037911 2025-03-31T16:37:35.183875Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710725 datashard 72075186224037911 state Ready 2025-03-31T16:37:35.183910Z node 3 :TX_DATASHARD DEBUG: 72075186224037911 Got TEvSchemaChangedResult from SS at 72075186224037911 2025-03-31T16:37:35.190132Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710726:0, at schemeshard: 72057594046644480 2025-03-31T16:37:35.192634Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:37:35.192726Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037910 2025-03-31T16:37:35.192767Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037911 2025-03-31T16:37:35.197042Z node 3 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037912 actor [3:7488013727232254819:2439] 2025-03-31T16:37:35.197221Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:37:35.207399Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:37:35.207467Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:37:35.208782Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037912 2025-03-31T16:37:35.208819Z node 3 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037912 2025-03-31T16:37:35.208851Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037912 2025-03-31T16:37:35.209189Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:37:35.209236Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:37:35.209260Z node 3 :TX_DATASHARD DEBUG: DataShard 72075186224037912 persisting started state actor id [3:7488013727232254837:2439] in generation 1 2025-03-31T16:37:35.211101Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:37:35.211143Z node 3 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037912 2025-03-31T16:37:35.211213Z node 3 :TX_DATASHARD DEBUG: 72075186224037912 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:37:35.211250Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037912, actorId: [3:7488013727232254839:2440] 2025-03-31T16:37:35.211262Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037912 2025-03-31T16:37:35.211273Z node 3 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037912, state: WaitScheme 2025-03-31T16:37:35.211284Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037912 2025-03-31T16:37:35.211383Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037912 2025-03-31T16:37:35.211446Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037912 2025-03-31T16:37:35.211465Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037912 2025-03-31T16:37:35.211479Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037912 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:37:35.211494Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037912 TxInFly 0 2025-03-31T16:37:35.211511Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037912 2025-03-31T16:37:35.212481Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037912, clientId# [3:7488013727232254825:4707], serverId# [3:7488013727232254829:4710], sessionId# [0:0:0] 2025-03-31T16:37:35.212570Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037912 2025-03-31T16:37:35.212754Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037912 txId 281474976710726 ssId 72057594046644480 seqNo 2:46 2025-03-31T16:37:35.212806Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710726 at tablet 72075186224037912 2025-03-31T16:37:35.213662Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037912 2025-03-31T16:37:35.214139Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037912 2025-03-31T16:37:35.214190Z node 3 :TX_DATASHARD DEBUG: 72075186224037912 not sending time cast registration request in state WaitScheme 2025-03-31T16:37:35.217737Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037912, clientId# [3:7488013727232254845:4720], serverId# [3:7488013727232254846:4721], sessionId# [0:0:0] 2025-03-31T16:37:35.218034Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710726 at step 1743439055260 at tablet 72075186224037912 { Transactions { TxId: 281474976710726 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743439055260 MediatorID: 72057594046382081 TabletID: 72075186224037912 } 2025-03-31T16:37:35.218051Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037912 2025-03-31T16:37:35.218171Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037912 2025-03-31T16:37:35.218171Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037911 2025-03-31T16:37:35.218187Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037912 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:37:35.218210Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1743439055260:281474976710726] in PlanQueue unit at 72075186224037912 2025-03-31T16:37:35.218266Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:37:35.218327Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037910 2025-03-31T16:37:35.218470Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037912 loaded tx from db 1743439055260:281474976710726 keys extracted: 0 2025-03-31T16:37:35.218584Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037912 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:37:35.218687Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037912 2025-03-31T16:37:35.218717Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037912 2025-03-31T16:37:35.218752Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037912 tableId# [OwnerId: 72057594046644480, LocalPathId: 30] schema version# 1 2025-03-31T16:37:35.219169Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037912 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:37:35.219515Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037912 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:37:35.220985Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037912 time 1743439055259 2025-03-31T16:37:35.221026Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037912 2025-03-31T16:37:35.221045Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037912 coordinator 72057594046316545 last step 0 next step 1743439055267 2025-03-31T16:37:35.221087Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037912 step# 1743439055260} 2025-03-31T16:37:35.221116Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037912 2025-03-31T16:37:35.221146Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037912 2025-03-31T16:37:35.221165Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037912 2025-03-31T16:37:35.221185Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037912 2025-03-31T16:37:35.221220Z node 3 :TX_DATASHARD DEBUG: Complete [1743439055260 : 281474976710726] from 72075186224037912 at tablet 72075186224037912 send result to client [3:7488013705757414929:2143], exec latency: 0 ms, propose latency: 2 ms 2025-03-31T16:37:35.221241Z node 3 :TX_DATASHARD INFO: 72075186224037912 Sending notify to schemeshard 72057594046644480 txId 281474976710726 state Ready TxInFly 0 2025-03-31T16:37:35.221276Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037912 2025-03-31T16:37:35.222602Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710726 datashard 72075186224037912 state Ready 2025-03-31T16:37:35.222645Z node 3 :TX_DATASHARD DEBUG: 72075186224037912 Got TEvSchemaChangedResult from SS at 72075186224037912 |97.8%| [TM] {RESULT} ydb/core/tx/datashard/ut_prefix_kmeans/unittest >> KeyValueGRPCService::SimpleWriteReadWithoutToken [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration1 >> TRangeTreap::Sequential [GOOD] >> TRangeTreap::Random ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_keys/unittest >> TxKeys::ComparePointAndRangeWithInf [GOOD] Test command err: 2025-03-31T16:37:31.505000Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:37:31.507995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:37:31.508070Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:37:31.519861Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:37:31.527636Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-03-31T16:37:31.528810Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:37:31.549507Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:37:31.598471Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:37:31.598796Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:37:31.621728Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-31T16:37:31.621850Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-31T16:37:31.621925Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-31T16:37:31.624480Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:37:31.627685Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:37:31.627818Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:197:2154] in generation 2 2025-03-31T16:37:31.743072Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:37:31.796597Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-31T16:37:31.798768Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:37:31.799234Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-31T16:37:31.799344Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-31T16:37:31.799454Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-31T16:37:31.799622Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:37:31.800355Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:37:31.800606Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:37:31.802658Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-31T16:37:31.802867Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-31T16:37:31.802993Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:37:31.803068Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:37:31.803284Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-31T16:37:31.803341Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T16:37:31.803465Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T16:37:31.803582Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-31T16:37:31.803708Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:37:31.803939Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:37:31.804066Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:37:31.804217Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-31T16:37:31.815507Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nY\n\006table2\032\n\n\004key1\030\002 \"\032\013\n\004key2\030\200$ #\032\014\n\005value\030\200$ 8(\"(#:\010Z\006\010\000\030\000(\000J\014/Root/table2\222\002\013\th\020\000\000\000\000\000\000\020\016" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-31T16:37:31.815640Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:37:31.815794Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T16:37:31.816150Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-31T16:37:31.816251Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-31T16:37:31.816321Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-31T16:37:31.816537Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:37:31.816705Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-31T16:37:31.816940Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-31T16:37:31.817021Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:37:31.817649Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-31T16:37:31.817712Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-31T16:37:31.817765Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-31T16:37:31.817840Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:37:31.817911Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-31T16:37:31.817960Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-31T16:37:31.818035Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-31T16:37:31.818102Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-31T16:37:31.818174Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-31T16:37:31.831573Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-31T16:37:31.831699Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:37:31.831753Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:37:31.831834Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-31T16:37:31.832917Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-31T16:37:31.836252Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:37:31.836389Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:37:31.836473Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-31T16:37:31.836687Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-31T16:37:31.836719Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:37:31.836926Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-31T16:37:31.836978Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:37:31.837041Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-31T16:37:31.837089Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-31T16:37:31.841194Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-31T16:37:31.841268Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:37:31.841549Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:37:31.841587Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:37:31.841647Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:37:31.841698Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:37:31.841742Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T16:37:31.841805Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-31T16:37:31.841854Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-31T16:37:31.841908Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:37:31.841966Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-31T16:37:31.842008Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-31T16:37:31.842061Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-31T16:37:31.842261Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-31T16:37:31.842305Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:37:31.842337Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-31T16:37:31.842374Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-31T16:37:31.842400Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-31T16:37:31.842470Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:37:31.842486Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-31T16:37:31.842511Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-31T16:37:31.842544Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-31T16:37:31.842589Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-31T16:37:31.842622Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-31T16:37:31.842662Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at ... 25-03-31T16:37:35.966439Z node 5 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit CreateIncrementalRestoreSrc 2025-03-31T16:37:35.966503Z node 5 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:37:35.966549Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit CreateIncrementalRestoreSrc 2025-03-31T16:37:35.966577Z node 5 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit CompleteOperation 2025-03-31T16:37:35.966601Z node 5 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit CompleteOperation 2025-03-31T16:37:35.966835Z node 5 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is DelayComplete 2025-03-31T16:37:35.966879Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit CompleteOperation 2025-03-31T16:37:35.966919Z node 5 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit CompletedOperations 2025-03-31T16:37:35.966960Z node 5 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit CompletedOperations 2025-03-31T16:37:35.966996Z node 5 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:37:35.967022Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit CompletedOperations 2025-03-31T16:37:35.967052Z node 5 :TX_DATASHARD TRACE: Execution plan for [1000001:1] at 9437184 has finished 2025-03-31T16:37:35.967114Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:37:35.967152Z node 5 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T16:37:35.967194Z node 5 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T16:37:35.967236Z node 5 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T16:37:35.968719Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [5:24:2071], Recipient [5:131:2154]: {TEvRegisterTabletResult TabletId# 9437184 Entry# 0} 2025-03-31T16:37:35.968770Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-03-31T16:37:35.968801Z node 5 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 9437184 time 0 2025-03-31T16:37:35.968836Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:37:35.973557Z node 5 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000001 txid# 1} 2025-03-31T16:37:35.973623Z node 5 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000001} 2025-03-31T16:37:35.973681Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:37:35.974040Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:37:35.974073Z node 5 :TX_DATASHARD TRACE: Complete execution for [1000001:1] at 9437184 on unit CreateTable 2025-03-31T16:37:35.974111Z node 5 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-31T16:37:35.974162Z node 5 :TX_DATASHARD INFO: Change sender activated: at tablet: 9437184 2025-03-31T16:37:35.974196Z node 5 :TX_DATASHARD TRACE: Complete execution for [1000001:1] at 9437184 on unit CompleteOperation 2025-03-31T16:37:35.974264Z node 5 :TX_DATASHARD DEBUG: Complete [1000001 : 1] from 9437184 at tablet 9437184 send result to client [5:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-03-31T16:37:35.974320Z node 5 :TX_DATASHARD INFO: 9437184 Sending notify to schemeshard 4200 txId 1 state Ready TxInFly 0 2025-03-31T16:37:35.974423Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:37:35.975040Z node 5 :FLAT_TX_SCHEMESHARD WARN: Got TEvDataShard::TEvSchemaChanged for unknown txId 1 message# Source { RawX1: 131 RawX2: 21474838634 } Origin: 9437184 State: 2 TxId: 1 Step: 0 Generation: 2 2025-03-31T16:37:35.975156Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [5:227:2225], Recipient [5:131:2154]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 4200 Status: OK ServerId: [5:229:2226] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-03-31T16:37:35.975202Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-03-31T16:37:35.975291Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [5:124:2150], Recipient [5:131:2154]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1 2025-03-31T16:37:35.975324Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-03-31T16:37:35.975363Z node 5 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 1 datashard 9437184 state Ready 2025-03-31T16:37:35.975416Z node 5 :TX_DATASHARD DEBUG: 9437184 Got TEvSchemaChangedResult from SS at 9437184 2025-03-31T16:37:35.975732Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 65543, Sender [5:99:2134], Recipient [5:131:2154]: NActors::TEvents::TEvPoison 2025-03-31T16:37:35.976052Z node 5 :TX_DATASHARD INFO: OnDetach: 9437184 2025-03-31T16:37:35.976286Z node 5 :TX_DATASHARD INFO: Change sender killed: at tablet: 9437184 2025-03-31T16:37:35.985401Z node 5 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [5:232:2227], Recipient [5:235:2228]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:37:35.991470Z node 5 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [5:232:2227], Recipient [5:235:2228]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:37:35.991680Z node 5 :TX_DATASHARD TRACE: StateInit, received event# 268828684, Sender [5:232:2227], Recipient [5:235:2228]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:37:36.002430Z node 5 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [5:235:2228] 2025-03-31T16:37:36.002722Z node 5 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:37:36.009630Z node 5 :TX_DATASHARD DEBUG: TxInitSchema.Execute Persist Sys_SubDomainInfo 2025-03-31T16:37:36.045005Z node 5 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:37:36.045132Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:37:36.047265Z node 5 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-31T16:37:36.047380Z node 5 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-31T16:37:36.047453Z node 5 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-31T16:37:36.047992Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:37:36.049071Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:37:36.049191Z node 5 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [5:278:2228] in generation 3 2025-03-31T16:37:36.062097Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:37:36.062241Z node 5 :TX_DATASHARD INFO: Switched to work state Ready tabletId 9437184 2025-03-31T16:37:36.062350Z node 5 :TX_DATASHARD INFO: 9437184 Sending notify to schemeshard 4200 txId 1 state Ready TxInFly 0 2025-03-31T16:37:36.062500Z node 5 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 9437184 mediators count is 0 coordinators count is 1 buckets per mediator 2 2025-03-31T16:37:36.062786Z node 5 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [5:283:2267] 2025-03-31T16:37:36.062834Z node 5 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-31T16:37:36.062888Z node 5 :TX_DATASHARD INFO: Change sender activated: at tablet: 9437184 2025-03-31T16:37:36.062935Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:37:36.063397Z node 5 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Execute 2025-03-31T16:37:36.063554Z node 5 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Complete 2025-03-31T16:37:36.063838Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [5:235:2228], Recipient [5:235:2228]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:37:36.063909Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:37:36.064200Z node 5 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-31T16:37:36.064306Z node 5 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-31T16:37:36.064473Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [5:24:2071], Recipient [5:235:2228]: {TEvRegisterTabletResult TabletId# 9437184 Entry# 0} 2025-03-31T16:37:36.064515Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-03-31T16:37:36.064563Z node 5 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 9437184 time 0 2025-03-31T16:37:36.064605Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:37:36.064763Z node 5 :FLAT_TX_SCHEMESHARD WARN: Got TEvDataShard::TEvSchemaChanged for unknown txId 1 message# Source { RawX1: 235 RawX2: 21474838708 } Origin: 9437184 State: 2 TxId: 1 Step: 0 Generation: 3 2025-03-31T16:37:36.064885Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [5:24:2071], Recipient [5:235:2228]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 0 ReadStep# 0 } 2025-03-31T16:37:36.064924Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-03-31T16:37:36.064974Z node 5 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 9437184 coordinator 72057594046316545 last step 0 next step 0 2025-03-31T16:37:36.065052Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:37:36.065100Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:37:36.065141Z node 5 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-31T16:37:36.065183Z node 5 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T16:37:36.065221Z node 5 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T16:37:36.065263Z node 5 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-31T16:37:36.065309Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:37:36.065428Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [5:281:2265], Recipient [5:235:2228]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 4200 Status: OK ServerId: [5:285:2269] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-03-31T16:37:36.065467Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-03-31T16:37:36.065564Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [5:124:2150], Recipient [5:235:2228]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1 2025-03-31T16:37:36.065597Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-03-31T16:37:36.065644Z node 5 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 1 datashard 9437184 state Ready 2025-03-31T16:37:36.065708Z node 5 :TX_DATASHARD DEBUG: 9437184 Got TEvSchemaChangedResult from SS at 9437184 2025-03-31T16:37:36.082117Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [5:281:2265], Recipient [5:235:2228]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 4200 ClientId: [5:281:2265] ServerId: [5:285:2269] } 2025-03-31T16:37:36.082218Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed |97.8%| [TM] {RESULT} ydb/core/tx/datashard/ut_keys/unittest >> test_http_api.py::TestHttpApi::test_simple_streaming_query [GOOD] >> test_http_api.py::TestHttpApi::test_integral_results >> TMemoryController::Config_ConsumerLimits [GOOD] >> TMemoryController::SharedCache >> TRangeTreap::Random [GOOD] >> test.py::TestSqsSplitMergeStdTables::test_std_merge_split >> TestFilterSet::DuplicationValidation [GOOD] >> TestFilterSet::CompilationValidation >> TDqSolomonWriteActorTest::TestWriteFormat >> TCreateAndDropViewTest::CheckCreatedView [GOOD] >> TCreateAndDropViewTest::CreateViewDisabledFeatureFlag ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/locks/ut_range_treap/unittest >> TRangeTreap::Random [GOOD] Test command err: NOTE: building treap of size 1000000 got height 47 and needed 1000000 ops (1000000 inserts 0 updates 0 deletes) and 29217068 comparisons (29.217068 per op) NOTE: building treap of size 9744 got height 34 and needed 10000 ops (9744 inserts 256 updates 0 deletes) and 190422 comparisons (19.0422 per op) Checking point 8358 ... found 2503 ranges, needed 8982 comparisons (3.588493807 per range) Checking point 3550 ... found 2295 ranges, needed 7341 comparisons (3.19869281 per range) Checking point 9906 ... found 2464 ranges, needed 8797 comparisons (3.570211039 per range) Checking point 9339 ... found 2539 ranges, needed 9008 comparisons (3.547853486 per range) Checking point 4219 ... found 2421 ranges, needed 8404 comparisons (3.471292854 per range) Checking point 9052 ... found 2508 ranges, needed 9121 comparisons (3.63676236 per range) Checking point 8111 ... found 2495 ranges, needed 9142 comparisons (3.664128257 per range) Checking point 3415 ... found 2245 ranges, needed 7125 comparisons (3.173719376 per range) Checking point 5041 ... found 2528 ranges, needed 9404 comparisons (3.719936709 per range) Checking point 3076 ... found 2114 ranges, needed 6479 comparisons (3.064806055 per range) |97.8%| [TM] {RESULT} ydb/core/tx/locks/ut_range_treap/unittest >> test_http_api.py::TestHttpApi::test_integral_results [GOOD] >> test_http_api.py::TestHttpApi::test_optional_results >> DataCleanup::ForceDataCleanup >> DiscoveryIsNotBroken::NoKafkaEndpointInDiscovery [GOOD] >> DiscoveryIsNotBroken::NoKafkaSslEndpointInDiscovery >> Splitter::Crit [GOOD] >> Splitter::CritSimple >> test_http_api.py::TestHttpApi::test_optional_results [GOOD] >> test_http_api.py::TestHttpApi::test_pg_results >> test_alloc_default.py::TestAlloc::test_node_limit[kikimr0] >> TestFilterSet::CompilationValidation [GOOD] >> DataShardStats::HistogramStatsCorrect [GOOD] >> DataShardStats::BlobsStatsCorrect >> TestFormatHandler::ManyJsonClients >> KqpScanLogs::GraceJoin [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApiWithCancelAfter [GOOD] >> TCreateAndDropViewTest::CreateViewDisabledFeatureFlag [GOOD] >> TCreateAndDropViewTest::InvalidQuery >> GraphShard::NormalizeAndDownsample1 [GOOD] >> GraphShard::NormalizeAndDownsample2 [GOOD] >> GraphShard::NormalizeAndDownsample3 [GOOD] >> GraphShard::NormalizeAndDownsample4 [GOOD] >> GraphShard::NormalizeAndDownsample5 [GOOD] >> GraphShard::NormalizeAndDownsample6 [GOOD] >> GraphShard::CheckHistogramToPercentileConversions [GOOD] >> GraphShard::CreateGraphShard >> test_http_api.py::TestHttpApi::test_pg_results [GOOD] >> test_http_api.py::TestHttpApi::test_set_result >> QuoterWithKesusTest::GetsBigQuota [GOOD] >> QuoterWithKesusTest::GetsBigQuotaWithDeadline >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration1 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration2 >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateQuery >> test_example.py::TestExample::test_example >> KqpScanSpilling::HandleErrorsCorrectly [GOOD] >> KqpScanSpilling::SpillingInRuntimeNodes-EnabledSpilling [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/runtime/unittest >> KqpScanLogs::GraceJoin [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/0l5c/000966/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk0 Trying to start YDB, gRPC: 8731, MsgBus: 6816 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000966/r3tmp/tmpWbqI2o/pdisk_1.dat TServer::EnableGrpc on GrpcPort 8731, node 1 TClient is connected to server localhost:6816 TClient is connected to server localhost:6816 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (OptionalType (DataType 'String))) (let $5 '('('"_logical_id" '776) '('"_id" '"56ac5809-5b1f2ef8-30c9d2e0-c93e2e62") '('"_wide_channels" (StructType '('"Key" $3) '('"Value" $4))))) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($17) (block '( (let $18 (lambda '($19) (Member $19 '"Key") (Member $19 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $17) $18))) ))) $5)) (let $7 '('1)) (let $8 (DqCnHashShuffle (TDqOutput $6 '0) $7)) (let $9 (StructType '('"t1.Key" $3) '('"t1.Value" $4) '('"t2.Key" $3) '('"t2.Value" $4))) (let $10 '('('"_logical_id" '674) '('"_id" '"8225bdd2-a43ace49-8d0230d4-537e8fc3") '('"_wide_channels" $9))) (let $11 (DqPhyStage '($8) (lambda '($20) (block '( (let $21 '('0 '0 '1 '1)) (let $22 '('0 '2 '1 '3)) (let $23 (GraceSelfJoinCore (ToFlow $20) 'Full $7 $7 $21 $22 '('"t1.Value") '('"t2.Value") '())) (return (FromFlow (WideSort $23 '('('1 (Bool 'true)))))) ))) $10)) (let $12 (DqCnMerge (TDqOutput $11 '0) '('('1 '"Asc")))) (let $13 (DqPhyStage '($12) (lambda '($24) (FromFlow (NarrowMap (ToFlow $24) (lambda '($25 $26 $27 $28) (AsStruct '('"t1.Key" $25) '('"t1.Value" $26) '('"t2.Key" $27) '('"t2.Value" $28)))))) '('('"_logical_id" '686) '('"_id" '"b2837c9c-c3eda892-582f2ee5-bb8ac191")))) (let $14 '($6 $11 $13)) (let $15 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $16 (DqCnResult (TDqOutput $13 '0) $15)) (return (KqpPhysicalQuery '((KqpPhysicalTx $14 '($16) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $9) '0 '0)) '('('"type" '"query")))) ) >> KqpScanLogs::WideCombine [GOOD] >> KqpScanSpilling::SpillingInRuntimeNodes+EnabledSpilling [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/rate_limiter/ut/unittest >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApiWithCancelAfter [GOOD] Test command err: 2025-03-31T16:36:31.699579Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013451991939779:2207];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:36:31.701933Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0006b7/r3tmp/tmprOazFZ/pdisk_1.dat 2025-03-31T16:36:32.161991Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:36:32.201481Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:32.202274Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:32.224097Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18064, node 1 2025-03-31T16:36:32.440548Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:36:32.440582Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:36:32.440591Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:36:32.440750Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25500 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:36:32.929084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:33.042716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:36:35.746561Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488013469401310739:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:36:35.746622Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0006b7/r3tmp/tmpbBzLFM/pdisk_1.dat 2025-03-31T16:36:35.897736Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:36:35.940215Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:35.940305Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:35.943974Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28838, node 4 2025-03-31T16:36:36.009457Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:36:36.009473Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:36:36.009478Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:36:36.009718Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1252 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:36:36.285495Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:36.351796Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:36:39.895738Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7488013484343417022:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:36:39.895783Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0006b7/r3tmp/tmpUSzbe3/pdisk_1.dat 2025-03-31T16:36:40.005395Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:36:40.041179Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:40.041265Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:40.050450Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64014, node 7 2025-03-31T16:36:40.106578Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:36:40.106603Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:36:40.106610Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:36:40.106744Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61155 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:36:40.365024Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:36:40.418941Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:36:43.852314Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7488013501115255885:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:36:43.852371Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0006b7/r3tmp/tmpcmMM3y/pdisk_1.dat 2025-03-31T16:36:44.003744Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:36:44.042331Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:44.042410Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:44.046466Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13037, node 10 2025-03-31T16:36:44.126656Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:36:44.126681Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:36:44.126690Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:36:44.126881Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26542 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:36 ... /r3tmp/tmp6QBkVZ/pdisk_1.dat 2025-03-31T16:37:17.381596Z node 28 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16163, node 28 2025-03-31T16:37:17.489953Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:37:17.490074Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:37:17.497763Z node 28 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:37:17.497795Z node 28 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:37:17.497808Z node 28 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:37:17.497977Z node 28 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:37:17.501668Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22947 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:37:17.893039Z node 28 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:37:17.984120Z node 28 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:37:23.412154Z node 31 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[31:7488013674410010847:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:37:23.412237Z node 31 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0006b7/r3tmp/tmpkDtm8w/pdisk_1.dat 2025-03-31T16:37:23.574609Z node 31 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:37:23.630144Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:37:23.630260Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:37:23.633808Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9368, node 31 2025-03-31T16:37:23.719044Z node 31 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:37:23.719076Z node 31 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:37:23.719107Z node 31 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:37:23.719315Z node 31 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27926 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:37:24.136510Z node 31 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:37:24.210764Z node 31 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:37:29.727630Z node 34 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[34:7488013698319545336:2209];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0006b7/r3tmp/tmprucG5B/pdisk_1.dat 2025-03-31T16:37:29.846465Z node 34 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:37:29.965044Z node 34 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:37:30.027724Z node 34 :HIVE WARN: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:37:30.027858Z node 34 :HIVE WARN: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:37:30.034701Z node 34 :HIVE WARN: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13552, node 34 2025-03-31T16:37:30.226139Z node 34 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:37:30.226165Z node 34 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:37:30.226182Z node 34 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:37:30.226369Z node 34 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12853 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:37:30.453023Z node 34 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:37:30.542325Z node 34 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:37:36.296324Z node 37 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[37:7488013728201937470:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:37:36.296529Z node 37 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0006b7/r3tmp/tmpsPEulb/pdisk_1.dat 2025-03-31T16:37:36.479191Z node 37 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:37:36.536720Z node 37 :HIVE WARN: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:37:36.536853Z node 37 :HIVE WARN: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:37:36.543955Z node 37 :HIVE WARN: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4575, node 37 2025-03-31T16:37:36.658482Z node 37 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:37:36.658505Z node 37 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:37:36.658514Z node 37 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:37:36.658650Z node 37 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18945 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:37:36.890480Z node 37 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:37:36.978208Z node 37 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 |97.8%| [TM] {RESULT} ydb/services/rate_limiter/ut/unittest >> DataShardCompaction::CompactBorrowed >> TopicSessionTests::RestartSessionIfNewClientWithOffset [GOOD] >> test_http_api.py::TestHttpApi::test_set_result [GOOD] >> test_http_api.py::TestHttpApi::test_complex_results >> Graph::CreateGraphShard >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListQueries >> TopicSessionTests::ReadNonExistentTopic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/runtime/unittest >> KqpScanSpilling::HandleErrorsCorrectly [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/0l5c/000a2b/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk2 Trying to start YDB, gRPC: 19346, MsgBus: 27900 2025-03-31T16:33:18.525711Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012622422506290:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:18.525840Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a2b/r3tmp/tmpUJe5Cl/pdisk_1.dat 2025-03-31T16:33:18.866911Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:18.919915Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:18.920064Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:33:18.921741Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19346, node 1 2025-03-31T16:33:19.061653Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:19.061681Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:19.061690Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:19.061806Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27900 TClient is connected to server localhost:27900 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:19.734732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:19.767188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:19.895658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:20.039010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:20.097052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:21.359217Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012635307409971:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:21.359333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:21.685434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:21.713229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:21.740972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:21.765673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:21.791879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:21.858560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:21.931807Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012635307410489:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:21.931887Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:21.931930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012635307410494:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:21.934601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:21.942018Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012635307410496:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:33:22.025380Z node 1 :TX_PROXY ERROR: Actor# [1:7488012639602377846:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:23.525772Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012622422506290:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:23.525877Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:33:33.865074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:33:33.865103Z node 1 :IMPORT WARN: Table profiles were not loaded ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (OptionalType (DataType 'String))) (let $5 '('('"_logical_id" '785) '('"_id" '"615fa163-8f2a9e72-4c8dabb5-74799d91") '('"_wide_channels" (StructType '('"Key" $3) '('"Value" $4))))) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($17) (block '( (let $18 (lambda '($19) (Member $19 '"Key") (Member $19 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $17) $18))) ))) $5)) (let $7 '('1)) (let $8 (DqCnHashShuffle (TDqOutput $6 '0) $7)) (let $9 (StructType '('"t1.Key" $3) '('"t1.Value" $4) '('"t2.Key" $3) '('"t2.Value" $4))) (let $10 '('('"_logical_id" '683) '('"_id" '"df323306-f75ab819-706e9f8f-aa47970a") '('"_wide_channels" $9))) (let $11 (DqPhyStage '($8) (lambda '($20) (block '( (let $21 '('0 '0 '1 '1)) (let $22 '('0 '2 '1 '3)) (let $23 (GraceSelfJoinCore (ToFlow $20) 'Full $7 $7 $21 $22 '('"t1.Value") '('"t2.Value") '())) (return (FromFlow (WideSort $23 '('('1 (Bool 'true)))))) ))) $10)) (let $12 (DqCnMerge (TDqOutput $11 '0) '('('1 '"Asc")))) (let $13 (DqPhyStage '($12) (lambda '($24) (FromFlow (NarrowMap (ToFlow $24) (lambda '($25 $26 $27 $28) (AsStruct '('"t1.Key" $25) '('"t1.Value" $26) '('"t2.Key" $27) '('"t2.Value" $28)))))) '('('"_logical_id" '695) '('"_id" '"2bf19e89-e3204f08-95dd3766-e87a1cd")))) (let $14 '($6 $11 $13)) (let $15 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $16 (DqCnResult (TDqOutput $13 '0) $15)) (return (KqpPhysicalQuery '((KqpPhysicalTx $14 '($16) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $9) '0 '0)) '('('"type" '"query")))) ) 2025-03-31T16:37:43.363470Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7488013756293883437:7372], blobId: 0, bytes: 1401088 2025-03-31T16:37:43.365058Z node 1 :KQP_COMPUTE ERROR: TxId: 281474976710971. Error: [TEvError] File size limit exceeded: 1/0Mb 2025-03-31T16:37:43.367445Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7488013756293883437:7372], blobId: 1, bytes: 84 2025-03-31T16:37:43.367558Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7488013756293883437:7372], blobId: 2, bytes: 2402376 2025-03-31T16:37:43.370581Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7488013756293883437:7372], blobId: 3, bytes: 144 2025-03-31T16:37:43.370647Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7488013756293883437:7372], blobId: 4, bytes: 1200936 2025-03-31T16:37:43.370945Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7488013756293883437:7372], blobId: 5, bytes: 72 2025-03-31T16:37:43.370975Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7488013756293883437:7372], blobId: 6, bytes: 1200744 2025-03-31T16:37:43.371223Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7488013756293883437:7372], blobId: 7, bytes: 72 2025-03-31T16:37:43.374071Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488013756293883428:4584], TxId: 281474976710971, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=MWMwZGYzNmUtZGE2NmRiYWYtNWU4MGFkZTctNDE1NmMyYTE=. TraceId : 01jqph1rre39fkp58sw4ag3bpg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: [Compute spilling][TEvError] File size limit exceeded: 1/0Mb }. 2025-03-31T16:37:43.374659Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488013756293883429:4585], TxId: 281474976710971, task: 3. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MWMwZGYzNmUtZGE2NmRiYWYtNWU4MGFkZTctNDE1NmMyYTE=. TraceId : 01jqph1rre39fkp58sw4ag3bpg. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: Terminate execution }. 2025-03-31T16:37:43.395508Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWMwZGYzNmUtZGE2NmRiYWYtNWU4MGFkZTctNDE1NmMyYTE=, ActorId: [1:7488013751998916113:4577], ActorState: ExecuteState, TraceId: 01jqph1rre39fkp58sw4ag3bpg, Create QueryResponse for error on request, msg: >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeQuery >> GraphShard::CreateGraphShard [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/runtime/unittest >> KqpScanLogs::WideCombine [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/0l5c/0009f8/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk1 Trying to start YDB, gRPC: 30893, MsgBus: 12121 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0009f8/r3tmp/tmpRUgL6m/pdisk_1.dat TServer::EnableGrpc on GrpcPort 30893, node 1 TClient is connected to server localhost:12121 TClient is connected to server localhost:12121 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (DataType 'Uint64)) (let $4 '('('"_logical_id" '505) '('"_id" '"354d5547-c9b11c00-396e5b2d-dfae1acf") '('"_wide_channels" (StructType '('"Value" (OptionalType (DataType 'String))) '('_yql_agg_0 $3))))) (let $5 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($12) (block '( (let $13 (lambda '($15) (Member $15 '"Key") (Member $15 '"Value"))) (let $14 (lambda '($25 $26) $25 $26)) (return (FromFlow (WideCombiner (ExpandMap (ToFlow $12) $13) '-1073741824 (lambda '($16 $17) $17) (lambda '($18 $19 $20) (AggrCountInit $19)) (lambda '($21 $22 $23 $24) (AggrCountUpdate $22 $24)) $14))) ))) $4)) (let $6 (DqCnHashShuffle (TDqOutput $5 '0) '('0))) (let $7 (DqPhyStage '($6) (lambda '($27) (block '( (let $28 (WideCombiner (ToFlow $27) '"" (lambda '($29 $30) $29) (lambda '($31 $32 $33) $33) (lambda '($34 $35 $36 $37) (AggrAdd $36 $37)) (lambda '($38 $39) $39))) (return (FromFlow (NarrowMap $28 (lambda '($40) (AsStruct '('"column0" $40)))))) ))) '('('"_logical_id" '1265) '('"_id" '"20e96a18-e54dfa9-56a474ac-90bddb52")))) (let $8 (DqCnUnionAll (TDqOutput $7 '0))) (let $9 (DqPhyStage '($8) (lambda '($41) $41) '('('"_logical_id" '1533) '('"_id" '"e30e0688-9e8ad2d1-fe6c9c34-bde2a77c")))) (let $10 '($5 $7 $9)) (let $11 (DqCnResult (TDqOutput $9 '0) '('"column0"))) (return (KqpPhysicalQuery '((KqpPhysicalTx $10 '($11) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType (StructType '('"column0" $3))) '0 '0)) '('('"type" '"query")))) ) >> KqpTpch::Query08 [GOOD] >> KqpTpch::Query09 >> TSentinelTests::BSControllerUnresponsive [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/runtime/unittest >> KqpScanSpilling::SpillingInRuntimeNodes-EnabledSpilling [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/0l5c/000a19/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk6 Trying to start YDB, gRPC: 27753, MsgBus: 28509 2025-03-31T16:33:18.501156Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012623464402471:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:18.501230Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a19/r3tmp/tmpAc5bR2/pdisk_1.dat 2025-03-31T16:33:18.885112Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:18.929816Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:18.929926Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 27753, node 1 2025-03-31T16:33:18.933150Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:19.061650Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:19.061673Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:19.061680Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:19.061816Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28509 TClient is connected to server localhost:28509 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:19.705000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:19.734777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:19.856435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:19.985661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:20.059819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:21.338904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012636349306143:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:21.339014Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:21.685540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:21.712505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:21.739442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:21.765642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:21.792389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:21.824379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:21.875896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012636349306652:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:21.875998Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:21.876155Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012636349306658:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:21.881572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:21.890702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012636349306660:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:33:21.965330Z node 1 :TX_PROXY ERROR: Actor# [1:7488012636349306714:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:23.501228Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012623464402471:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:23.501299Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:33:33.871764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:33:33.871795Z node 1 :IMPORT WARN: Table profiles were not loaded ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (OptionalType (DataType 'String))) (let $5 '('('"_logical_id" '785) '('"_id" '"be0b1c81-69794781-8c3bae09-33c5dbb5") '('"_wide_channels" (StructType '('"Key" $3) '('"Value" $4))))) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($17) (block '( (let $18 (lambda '($19) (Member $19 '"Key") (Member $19 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $17) $18))) ))) $5)) (let $7 '('1)) (let $8 (DqCnHashShuffle (TDqOutput $6 '0) $7)) (let $9 (StructType '('"t1.Key" $3) '('"t1.Value" $4) '('"t2.Key" $3) '('"t2.Value" $4))) (let $10 '('('"_logical_id" '683) '('"_id" '"798c2451-9c462dd6-4126656e-bfd18163") '('"_wide_channels" $9))) (let $11 (DqPhyStage '($8) (lambda '($20) (block '( (let $21 '('0 '0 '1 '1)) (let $22 '('0 '2 '1 '3)) (let $23 (GraceSelfJoinCore (ToFlow $20) 'Full $7 $7 $21 $22 '('"t1.Value") '('"t2.Value") '())) (return (FromFlow (WideSort $23 '('('1 (Bool 'true)))))) ))) $10)) (let $12 (DqCnMerge (TDqOutput $11 '0) '('('1 '"Asc")))) (let $13 (DqPhyStage '($12) (lambda '($24) (FromFlow (NarrowMap (ToFlow $24) (lambda '($25 $26 $27 $28) (AsStruct '('"t1.Key" $25) '('"t1.Value" $26) '('"t2.Key" $27) '('"t2.Value" $28)))))) '('('"_logical_id" '695) '('"_id" '"6760b6b3-be8a4c78-a34cd8d3-7904297f")))) (let $14 '($6 $11 $13)) (let $15 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $16 (DqCnResult (TDqOutput $13 '0) $15)) (return (KqpPhysicalQuery '((KqpPhysicalTx $14 '($16) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $9) '0 '0)) '('('"type" '"query")))) ) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/runtime/unittest >> KqpScanSpilling::SpillingInRuntimeNodes+EnabledSpilling [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/0l5c/0009a7/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk5 Trying to start YDB, gRPC: 28951, MsgBus: 15008 2025-03-31T16:33:18.501523Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012621410906328:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:18.501582Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0009a7/r3tmp/tmpfSr5M1/pdisk_1.dat 2025-03-31T16:33:18.908461Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:33:18.951920Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:33:18.952073Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 28951, node 1 2025-03-31T16:33:18.954072Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:33:19.062541Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:33:19.062573Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:33:19.062578Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:33:19.062684Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15008 TClient is connected to server localhost:15008 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:33:19.705920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:19.726591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:19.846193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:33:19.996601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:33:20.072280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T16:33:21.506784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012634295809993:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:21.506904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:21.837019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:33:21.862651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:33:21.888478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:33:21.914421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:33:21.938628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:33:21.967540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:33:22.040869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012638590777803:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:22.040948Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:22.041006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012638590777808:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:33:22.044191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:33:22.052493Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012638590777810:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:33:22.107115Z node 1 :TX_PROXY ERROR: Actor# [1:7488012638590777864:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:33:23.501837Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012621410906328:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:33:23.501909Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:33:33.894421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:33:33.894483Z node 1 :IMPORT WARN: Table profiles were not loaded ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (OptionalType (DataType 'String))) (let $5 '('('"_logical_id" '785) '('"_id" '"f622b255-cac323af-db221016-940dfbd2") '('"_wide_channels" (StructType '('"Key" $3) '('"Value" $4))))) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($17) (block '( (let $18 (lambda '($19) (Member $19 '"Key") (Member $19 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $17) $18))) ))) $5)) (let $7 '('1)) (let $8 (DqCnHashShuffle (TDqOutput $6 '0) $7)) (let $9 (StructType '('"t1.Key" $3) '('"t1.Value" $4) '('"t2.Key" $3) '('"t2.Value" $4))) (let $10 '('('"_logical_id" '683) '('"_id" '"ce74143f-98bcf486-786213dc-dd78bbae") '('"_wide_channels" $9))) (let $11 (DqPhyStage '($8) (lambda '($20) (block '( (let $21 '('0 '0 '1 '1)) (let $22 '('0 '2 '1 '3)) (let $23 (GraceSelfJoinCore (ToFlow $20) 'Full $7 $7 $21 $22 '('"t1.Value") '('"t2.Value") '())) (return (FromFlow (WideSort $23 '('('1 (Bool 'true)))))) ))) $10)) (let $12 (DqCnMerge (TDqOutput $11 '0) '('('1 '"Asc")))) (let $13 (DqPhyStage '($12) (lambda '($24) (FromFlow (NarrowMap (ToFlow $24) (lambda '($25 $26 $27 $28) (AsStruct '('"t1.Key" $25) '('"t1.Value" $26) '('"t2.Key" $27) '('"t2.Value" $28)))))) '('('"_logical_id" '695) '('"_id" '"e223f4ed-a53d99ad-f01ad040-97caf0c7")))) (let $14 '($6 $11 $13)) (let $15 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $16 (DqCnResult (TDqOutput $13 '0) $15)) (return (KqpPhysicalQuery '((KqpPhysicalTx $14 '($16) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $9) '0 '0)) '('('"type" '"query")))) ) >> TestFormatHandler::ManyJsonClients [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_empty_rows [GOOD] >> Splitter::CritSimple [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/graph/shard/ut/unittest >> GraphShard::CreateGraphShard [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:37:45.117592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:37:45.117719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:37:45.117764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:37:45.117803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:37:45.123193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:37:45.123290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:37:45.123413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:37:45.123506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:37:45.127585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:37:45.262635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:37:45.262716Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:37:45.283599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:37:45.283718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:37:45.283872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:37:45.307275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:37:45.307614Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:37:45.319191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:37:45.319614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:37:45.348007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:37:45.364526Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:37:45.364622Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:37:45.364844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:37:45.364906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:37:45.365008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:37:45.365202Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:37:45.380353Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:37:45.575691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:37:45.577998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:37:45.580092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:37:45.584995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:37:45.585159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:37:45.592563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:37:45.592706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:37:45.592925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:37:45.593046Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:37:45.593082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:37:45.593130Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:37:45.595379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:37:45.595448Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:37:45.595494Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:37:45.598175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:37:45.598248Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:37:45.598290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:37:45.598347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:37:45.603690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:37:45.606042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:37:45.607657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:37:45.609014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:37:45.609171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:37:45.609229Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:37:45.610370Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:37:45.610437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:37:45.610673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:37:45.610749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:37:45.613630Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:37:45.613675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:37:45.613870Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:37:45.613915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:37:45.614213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:37:45.614297Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:37:45.614422Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:37:45.614462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:37:45.614515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:37:45.614548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:37:45.614585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:37:45.614639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:37:45.614674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:37:45.614709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:37:45.614777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:37:45.614813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:37:45.614843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:37:45.616974Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:37:45.617104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:37:45.617154Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... chemeshard:72057594046678944 2025-03-31T16:37:45.969610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 102:1 Got OK TEvConfigureStatus from tablet# 72075186234409547 shardIdx# 72057594046678944:3 at schemeshard# 72057594046678944 2025-03-31T16:37:45.970144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186234409548, partId: 1 2025-03-31T16:37:45.970235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:1, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186234409548 2025-03-31T16:37:45.970266Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 102:1 HandleReply TEvConfigureStatus operationId:102:1 at schemeshard:72057594046678944 2025-03-31T16:37:45.970286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 102:1 Got OK TEvConfigureStatus from tablet# 72075186234409548 shardIdx# 72057594046678944:4 at schemeshard# 72057594046678944 2025-03-31T16:37:45.970310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:1 3 -> 128 2025-03-31T16:37:45.973072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:1, at schemeshard: 72057594046678944 2025-03-31T16:37:45.975212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:1, at schemeshard: 72057594046678944 2025-03-31T16:37:45.975388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:1, at schemeshard: 72057594046678944 2025-03-31T16:37:45.975503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:1, at schemeshard: 72057594046678944 2025-03-31T16:37:45.975529Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 102:1, at schemeshard: 72057594046678944 2025-03-31T16:37:45.975556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 102:1, at tablet# 72057594046678944 2025-03-31T16:37:45.975589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 2/2 2025-03-31T16:37:45.975690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:37:45.977560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-03-31T16:37:45.977702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-03-31T16:37:45.977954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:37:45.978045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:37:45.978072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute operation part is already done, operationId: 102:0 2025-03-31T16:37:45.978101Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:1, at tablet# 72057594046678944 2025-03-31T16:37:45.978297Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:1 128 -> 240 2025-03-31T16:37:45.978342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:1, at tablet# 72057594046678944 2025-03-31T16:37:45.978416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-03-31T16:37:45.978487Z node 1 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[1:399:2367], EffectiveACLVersion: 0, SubdomainVersion: 3, UserAttributesVersion: 1, TenantHive: 72075186233409546, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 72075186234409549, TenantRootACL: }, subDomain->GetVersion(): 3, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 72075186233409546, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-03-31T16:37:45.980380Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:37:45.980408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-31T16:37:45.980537Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:37:45.980562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-31T16:37:45.980774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:1, at schemeshard: 72057594046678944 2025-03-31T16:37:45.980815Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 102:1, ProgressState, NeedSyncHive: 0 2025-03-31T16:37:45.980842Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:1 240 -> 240 2025-03-31T16:37:45.981281Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:37:45.981363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-03-31T16:37:45.981388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-31T16:37:45.981409Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-03-31T16:37:45.981428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2025-03-31T16:37:45.981473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/2, is published: true 2025-03-31T16:37:45.983846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:1, at schemeshard: 72057594046678944 2025-03-31T16:37:45.983899Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:1 ProgressState 2025-03-31T16:37:45.983967Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:1 progress is 2/2 2025-03-31T16:37:45.983993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-03-31T16:37:45.984045Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:1 progress is 2/2 2025-03-31T16:37:45.984073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-03-31T16:37:45.984099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/2, is published: true 2025-03-31T16:37:45.984130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-03-31T16:37:45.984171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-31T16:37:45.984201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-31T16:37:45.984365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-03-31T16:37:45.984423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:1 2025-03-31T16:37:45.984445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:1 2025-03-31T16:37:45.984517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-03-31T16:37:45.985066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-31T16:37:45.986663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-31T16:37:45.986724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-31T16:37:45.987131Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-31T16:37:45.987223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-31T16:37:45.987261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:560:2489] TestWaitNotification: OK eventTxId 102 2025-03-31T16:37:45.988718Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/db1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-31T16:37:45.988922Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/db1" took 233us result status StatusSuccess 2025-03-31T16:37:45.990521Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/db1" PathDescription { Self { Name: "db1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 Hive: 72075186233409546 GraphShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |97.9%| [TS] {RESULT} ydb/core/graph/shard/ut/unittest >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeQuery [GOOD] >> TestFormatHandler::ManyRawClients >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendGetQueryStatus >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyQuery ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/splitter/ut/unittest >> Splitter::CritSimple [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280336;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280336;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=2088936;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=2088936;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5184936;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5184936;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50200;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=seria ... 82944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8947912;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=71282912;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8947912;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7964800;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964800;columns=1; |97.9%| [TS] {RESULT} ydb/core/tx/columnshard/splitter/ut/unittest >> Graph::CreateGraphShard [GOOD] >> Graph::UseGraphShard >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteQuery |97.9%| [TA] $(B)/ydb/core/kqp/ut/runtime/test-results/unittest/{meta.json ... results_accumulator.log} >> test_http_api.py::TestHttpApi::test_complex_results [GOOD] >> test_http_api.py::TestHttpApi::test_result_offset_limit >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendControlQuery >> TCreateAndDropViewTest::InvalidQuery [GOOD] >> MediatorTest::BasicTimecastUpdates >> TCreateAndDropViewTest::ParsingSecurityInvoker >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendControlQuery [GOOD] |97.9%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/runtime/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelTests::BSControllerUnresponsive [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendGetResultData Test command err: 2025-03-31T16:36:23.689914Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-03-31T16:36:23.689982Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-03-31T16:36:23.690039Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-03-31T16:36:23.690069Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-03-31T16:36:23.690118Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-03-31T16:36:23.690192Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-03-31T16:36:23.691962Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2025-03-31T16:36:23.697576Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDom ... AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860029 2025-03-31T16:37:44.593845Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 68, response# PDiskStateInfo { PDiskId: 272 CreateTime: 0 ChangeTime: 0 Path: "/68/pdisk-272.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 273 CreateTime: 0 ChangeTime: 0 Path: "/68/pdisk-273.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 274 CreateTime: 0 ChangeTime: 0 Path: "/68/pdisk-274.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 275 CreateTime: 0 ChangeTime: 0 Path: "/68/pdisk-275.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860029 2025-03-31T16:37:44.593997Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 71, response# PDiskStateInfo { PDiskId: 284 CreateTime: 0 ChangeTime: 0 Path: "/71/pdisk-284.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 285 CreateTime: 0 ChangeTime: 0 Path: "/71/pdisk-285.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 286 CreateTime: 0 ChangeTime: 0 Path: "/71/pdisk-286.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 287 CreateTime: 0 ChangeTime: 0 Path: "/71/pdisk-287.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860029 2025-03-31T16:37:44.594150Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 72, response# PDiskStateInfo { PDiskId: 288 CreateTime: 0 ChangeTime: 0 Path: "/72/pdisk-288.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 289 CreateTime: 0 ChangeTime: 0 Path: "/72/pdisk-289.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 290 CreateTime: 0 ChangeTime: 0 Path: "/72/pdisk-290.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 291 CreateTime: 0 ChangeTime: 0 Path: "/72/pdisk-291.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860029 2025-03-31T16:37:44.594211Z node 65 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-03-31T16:37:44.594613Z node 65 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 70:283, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-03-31T16:37:44.594677Z node 65 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 65:260, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-03-31T16:37:44.594739Z node 65 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 72:288, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-03-31T16:37:44.594792Z node 65 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2025-03-31T16:37:44.611340Z node 65 :CMS DEBUG: [Sentinel] [Main] Retrying: attempt# 1 2025-03-31T16:37:44.611412Z node 65 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2025-03-31T16:37:44.626973Z node 65 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-03-31T16:37:44.627059Z node 65 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-03-31T16:37:44.627211Z node 65 :CMS DEBUG: [Sentinel] [Main] Retrying: attempt# 2 2025-03-31T16:37:44.627240Z node 65 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2025-03-31T16:37:44.627422Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 65, wbId# [65:8388350642965737326:1634689637] 2025-03-31T16:37:44.627481Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 66, wbId# [66:8388350642965737326:1634689637] 2025-03-31T16:37:44.627519Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 67, wbId# [67:8388350642965737326:1634689637] 2025-03-31T16:37:44.627551Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 68, wbId# [68:8388350642965737326:1634689637] 2025-03-31T16:37:44.627580Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 69, wbId# [69:8388350642965737326:1634689637] 2025-03-31T16:37:44.627607Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 70, wbId# [70:8388350642965737326:1634689637] 2025-03-31T16:37:44.627660Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 71, wbId# [71:8388350642965737326:1634689637] 2025-03-31T16:37:44.627707Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 72, wbId# [72:8388350642965737326:1634689637] 2025-03-31T16:37:44.628257Z node 65 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { }, cookie# 123 2025-03-31T16:37:44.628297Z node 65 :CMS ERROR: [Sentinel] [Main] Unsuccesful response from BSC: error# 2025-03-31T16:37:44.628593Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 65, response# PDiskStateInfo { PDiskId: 260 CreateTime: 0 ChangeTime: 0 Path: "/65/pdisk-260.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 261 CreateTime: 0 ChangeTime: 0 Path: "/65/pdisk-261.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 262 CreateTime: 0 ChangeTime: 0 Path: "/65/pdisk-262.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 263 CreateTime: 0 ChangeTime: 0 Path: "/65/pdisk-263.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880029 2025-03-31T16:37:44.629035Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 71, response# PDiskStateInfo { PDiskId: 284 CreateTime: 0 ChangeTime: 0 Path: "/71/pdisk-284.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 285 CreateTime: 0 ChangeTime: 0 Path: "/71/pdisk-285.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 286 CreateTime: 0 ChangeTime: 0 Path: "/71/pdisk-286.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 287 CreateTime: 0 ChangeTime: 0 Path: "/71/pdisk-287.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880029 2025-03-31T16:37:44.629391Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 72, response# PDiskStateInfo { PDiskId: 288 CreateTime: 0 ChangeTime: 0 Path: "/72/pdisk-288.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 289 CreateTime: 0 ChangeTime: 0 Path: "/72/pdisk-289.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 290 CreateTime: 0 ChangeTime: 0 Path: "/72/pdisk-290.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 291 CreateTime: 0 ChangeTime: 0 Path: "/72/pdisk-291.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880029 2025-03-31T16:37:44.629548Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 66, response# PDiskStateInfo { PDiskId: 264 CreateTime: 0 ChangeTime: 0 Path: "/66/pdisk-264.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 265 CreateTime: 0 ChangeTime: 0 Path: "/66/pdisk-265.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 266 CreateTime: 0 ChangeTime: 0 Path: "/66/pdisk-266.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 267 CreateTime: 0 ChangeTime: 0 Path: "/66/pdisk-267.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880029 2025-03-31T16:37:44.629677Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 67, response# PDiskStateInfo { PDiskId: 268 CreateTime: 0 ChangeTime: 0 Path: "/67/pdisk-268.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 269 CreateTime: 0 ChangeTime: 0 Path: "/67/pdisk-269.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 270 CreateTime: 0 ChangeTime: 0 Path: "/67/pdisk-270.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 271 CreateTime: 0 ChangeTime: 0 Path: "/67/pdisk-271.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880029 2025-03-31T16:37:44.629802Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 68, response# PDiskStateInfo { PDiskId: 272 CreateTime: 0 ChangeTime: 0 Path: "/68/pdisk-272.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 273 CreateTime: 0 ChangeTime: 0 Path: "/68/pdisk-273.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 274 CreateTime: 0 ChangeTime: 0 Path: "/68/pdisk-274.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 275 CreateTime: 0 ChangeTime: 0 Path: "/68/pdisk-275.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880029 2025-03-31T16:37:44.629939Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 69, response# PDiskStateInfo { PDiskId: 276 CreateTime: 0 ChangeTime: 0 Path: "/69/pdisk-276.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 277 CreateTime: 0 ChangeTime: 0 Path: "/69/pdisk-277.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 278 CreateTime: 0 ChangeTime: 0 Path: "/69/pdisk-278.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 279 CreateTime: 0 ChangeTime: 0 Path: "/69/pdisk-279.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880029 2025-03-31T16:37:44.630102Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 70, response# PDiskStateInfo { PDiskId: 280 CreateTime: 0 ChangeTime: 0 Path: "/70/pdisk-280.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 281 CreateTime: 0 ChangeTime: 0 Path: "/70/pdisk-281.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 282 CreateTime: 0 ChangeTime: 0 Path: "/70/pdisk-282.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 283 CreateTime: 0 ChangeTime: 0 Path: "/70/pdisk-283.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880029 2025-03-31T16:37:44.630177Z node 65 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s >> TDqSolomonWriteActorTest::TestWriteFormat [GOOD] >> TDqSolomonWriteActorTest::TestWriteBigBatchMonitoring >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v2-client0] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListJobs |97.9%| [TM] {RESULT} ydb/core/cms/ut_sentinel/unittest >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeJob >> test_http_api.py::TestHttpApi::test_result_offset_limit [GOOD] >> test_http_api.py::TestHttpApi::test_openapi_spec >> Graph::UseGraphShard [GOOD] >> Graph::MemoryBackendFullCycle >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateConnection >> test_http_api.py::TestHttpApi::test_openapi_spec [GOOD] >> DataCleanup::ForceDataCleanup [GOOD] >> DataCleanup::ForceDataCleanupWithoutCompaction >> TestFormatHandler::ManyRawClients [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateConnectionWithServiceAccount >> TestFormatHandler::ClientValidation >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListConnections >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration2 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithGetChannelStatus >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeConnection >> QuoterWithKesusTest::GetsBigQuotaWithDeadline [GOOD] >> QuoterWithKesusTest::FailsToGetBigQuota >> AnalyzeColumnshard::AnalyzeAnalyzeOneColumnTableSpecificColumns [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyConnection >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyConnectionWithServiceAccount >> DiscoveryIsNotBroken::NoKafkaSslEndpointInDiscovery [GOOD] >> DiscoveryIsNotBroken::HaveKafkaEndpointInDiscovery >> MediatorTest::BasicTimecastUpdates [GOOD] >> TopicSessionTests::ReadNonExistentTopic [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteConnection >> TopicSessionTests::SlowSession >> MediatorTest::MultipleTablets ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeAnalyzeOneColumnTableSpecificColumns [GOOD] Test command err: 2025-03-31T16:35:41.771074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:35:41.771348Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:35:41.771428Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000f09/r3tmp/tmpftDdXy/pdisk_1.dat 2025-03-31T16:35:42.228384Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23535, node 1 2025-03-31T16:35:42.542840Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:42.542886Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:42.542908Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:42.545033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:35:42.545458Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:35:42.657370Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:42.657531Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:42.674715Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22732 2025-03-31T16:35:43.272532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:35:46.822571Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T16:35:46.862700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:46.862821Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:46.891573Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:35:46.893779Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:47.139923Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:47.140538Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:47.141069Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:47.141194Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:47.141449Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:47.141532Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:47.141614Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:47.141697Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:47.141795Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:47.362890Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:47.363019Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:47.381032Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:47.545194Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:47.603272Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T16:35:47.603391Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T16:35:47.642127Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T16:35:47.643434Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T16:35:47.643688Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T16:35:47.643788Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T16:35:47.643844Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T16:35:47.643895Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T16:35:47.643943Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T16:35:47.644001Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T16:35:47.644514Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T16:35:47.673504Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:35:47.673607Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1872:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:35:47.686193Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-03-31T16:35:47.695133Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1908:2617] 2025-03-31T16:35:47.695889Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1908:2617], schemeshard id = 72075186224037897 2025-03-31T16:35:47.698719Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T16:35:47.731551Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T16:35:47.731597Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T16:35:47.731660Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T16:35:47.752525Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T16:35:47.761575Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T16:35:47.761741Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T16:35:47.982126Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T16:35:48.172083Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T16:35:48.275631Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:35:49.465051Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2241:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:49.465186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:49.484071Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-31T16:35:49.650379Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:35:49.650639Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:35:49.650993Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:35:49.651191Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:35:49.651338Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:35:49.651485Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:35:49.651635Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:35:49.651815Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:35:49.651959Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:35:49.652107Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:35:49.652242Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:35:49.652378Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:35:49.686008Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:35:49.686124Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;desc ... 03-31T16:36:05.649727Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:37:45.899852Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-03-31T16:37:45.899956Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-31T16:37:45.900004Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-31T16:37:45.900052Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-03-31T16:37:47.239302Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-03-31T16:37:47.239399Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 187.000000s, at schemeshard: 72075186224037897 2025-03-31T16:37:47.239801Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 51 2025-03-31T16:37:47.272634Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-31T16:37:48.188221Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:37:48.188327Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T16:37:48.188377Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-31T16:37:48.188434Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-03-31T16:37:48.188481Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-31T16:37:48.188981Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T16:37:48.193318Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-31T16:37:48.198333Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6989:5166], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:37:48.198469Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6999:5171], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:37:48.198558Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:37:48.212338Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-03-31T16:37:48.289277Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7003:5174], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-03-31T16:37:48.485923Z node 2 :TX_PROXY ERROR: Actor# [2:7098:5221] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:37:48.604143Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7127:5236]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:37:48.604469Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T16:37:48.604586Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7129:5238] 2025-03-31T16:37:48.604665Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7129:5238] 2025-03-31T16:37:48.605061Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7130:5239] 2025-03-31T16:37:48.605224Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7130:5239], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-03-31T16:37:48.609019Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-03-31T16:37:48.609452Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7129:5238], server id = [2:7130:5239], tablet id = 72075186224037894, status = OK 2025-03-31T16:37:48.609545Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:37:48.609649Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7127:5236], StatRequests.size() = 1 2025-03-31T16:37:48.792935Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZWNmZTQ0ZTItYmVkMTRjZjItYTFjZTM3NzEtNWU3MTk1NjM=, TxId: 2025-03-31T16:37:48.793037Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZWNmZTQ0ZTItYmVkMTRjZjItYTFjZTM3NzEtNWU3MTk1NjM=, TxId: 2025-03-31T16:37:48.793544Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:37:48.808262Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-31T16:37:48.808388Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T16:37:48.854715Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-31T16:37:48.854996Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-31T16:37:48.895843Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7129:5238], schemeshard count = 1 2025-03-31T16:37:49.726162Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T16:37:49.726271Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-31T16:37:49.729634Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-31T16:37:49.748001Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-31T16:37:49.748697Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T16:37:49.748776Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 4], AnalyzedShards 1 2025-03-31T16:37:49.776759Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T16:37:49.798774Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-03-31T16:37:49.799865Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-03-31T16:37:49.799986Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-03-31T16:37:49.814018Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-03-31T16:37:50.744792Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:37:50.744932Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-31T16:37:50.744994Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:37:50.745671Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-31T16:37:50.759779Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-31T16:37:50.760218Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T16:37:50.760323Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T16:37:50.761320Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-31T16:37:50.786987Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-31T16:37:50.787340Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-03-31T16:37:50.788055Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7259:5315], server id = [2:7260:5316], tablet id = 72075186224037899, status = OK 2025-03-31T16:37:50.788191Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7259:5315], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:37:50.795859Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-31T16:37:50.796002Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-31T16:37:50.796303Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-31T16:37:50.796532Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-31T16:37:50.796722Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7259:5315], server id = [2:7260:5316], tablet id = 72075186224037899 2025-03-31T16:37:50.796765Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:37:50.796963Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T16:37:50.800663Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-31T16:37:50.839965Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7280:5335]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:37:50.840216Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T16:37:50.840268Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7280:5335], StatRequests.size() = 1 2025-03-31T16:37:50.970964Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODcxNWMyOWYtNGVmY2ViODUtMjUxN2NlZmItYjUxYWVmOWQ=, TxId: 2025-03-31T16:37:50.971038Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODcxNWMyOWYtNGVmY2ViODUtMjUxN2NlZmItYjUxYWVmOWQ=, TxId: 2025-03-31T16:37:50.971688Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:37:50.991290Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:37:50.991388Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2804:3223] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendTestConnection >> TDataShardRSTest::TestCleanupInRS+UseSink >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendTestConnectionWithServiceAccount >> test_example.py::TestExample::test_example [GOOD] >> test_example.py::TestExample::test_example2 [GOOD] >> TCreateAndDropViewTest::ParsingSecurityInvoker [GOOD] >> TCreateAndDropViewTest::ListCreatedView >> TTxDataShardBuildIndexScan::RunScan >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateBinding >> DataShardReplication::SimpleApplyChanges >> DataShardStats::BlobsStatsCorrect [GOOD] >> DataShardStats::SharedCacheGarbage >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateBinding [GOOD] >> TestFormatHandler::ClientValidation [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListBindings >> MediatorTimeCast::ReadStepSubscribe >> TestFormatHandler::ClientError >> KqpTpch::Query09 [GOOD] >> KqpTpch::Query10 >> DataCleanup::ForceDataCleanupWithoutCompaction [GOOD] >> DataCleanup::MultipleDataCleanups >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeBinding >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyBinding >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteBinding >> MediatorTest::MultipleTablets [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateQuery >> ServerRestartTest::RestartOnGetSession >> MediatorTest::TabletAckBeforePlanComplete >> TTxDataShardSampleKScan::RunScan >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListQueries >> Graph::MemoryBackendFullCycle [GOOD] >> Graph::LocalBackendFullCycle >> KeyValueGRPCService::SimpleWriteReadWithGetChannelStatus [GOOD] >> KeyValueGRPCService::SimpleWriteReadOverrun >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeQuery >> BasicExample::BasicExample >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendGetQueryStatus >> QuoterWithKesusTest::FailsToGetBigQuota [GOOD] >> QuoterWithKesusTest::PrefetchCoefficient >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyQuery >> TCreateAndDropViewTest::ListCreatedView [GOOD] >> TCreateAndDropViewTest::CreateSameViewTwice >> TDqSolomonWriteActorTest::TestWriteBigBatchMonitoring [GOOD] >> TDqSolomonWriteActorTest::TestWriteBigBatchSolomon [GOOD] >> TDqSolomonWriteActorTest::TestWriteWithTimeseries >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteQuery >> TestFormatHandler::ClientError [GOOD] >> TMemoryController::SharedCache [GOOD] >> TMemoryController::SharedCache_ConfigLimit >> TestFormatHandler::ClientErrorWithEmptyFilter >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendControlQuery >> Coordinator::RestoreTenantConfiguration-AlterDatabaseCreateHiveFirst-false [GOOD] >> Coordinator::RestoreTenantConfiguration-AlterDatabaseCreateHiveFirst-true >> KqpTpch::Query10 [GOOD] >> KqpTpch::Query11 >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendGetResultData >> MediatorTest::TabletAckBeforePlanComplete [GOOD] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/http_api/py3test >> test_http_api.py::TestHttpApi::test_openapi_spec [GOOD] |97.9%| [TM] {RESULT} ydb/tests/fq/http_api/py3test >> DataShardReplication::SimpleApplyChanges [GOOD] >> DataShardReplication::SplitMergeChanges >> DataCleanup::MultipleDataCleanups [GOOD] >> DataCleanup::MultipleDataCleanupsWithOldGenerations >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListJobs >> MediatorTest::TabletAckWhenDead |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/example/py3test >> test_example.py::TestExample::test_example2 [GOOD] |97.9%| [TM] {RESULT} ydb/tests/example/py3test >> TTxDataShardBuildIndexScan::RunScan [GOOD] >> TTxDataShardBuildIndexScan::ShadowBorrowCompaction >> DiscoveryIsNotBroken::HaveKafkaEndpointInDiscovery [GOOD] >> DiscoveryIsNotBroken::HaveKafkaSslEndpointInDiscovery >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeJob >> SdkCredProvider::PingFromProviderSyncDiscovery >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeJob [GOOD] >> MediatorTimeCast::ReadStepSubscribe [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateConnection >> MediatorTimeCast::GranularTimecast >> test_crud.py::TestYdbCrudOperations::test_crud_operations >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateConnectionWithServiceAccount >> TTxDataShardSampleKScan::RunScan [GOOD] >> TTxDataShardSampleKScan::ScanBadParameters >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListConnections >> TopicSessionTests::SlowSession [GOOD] >> DataShardCompaction::CompactBorrowed [GOOD] >> DataShardCompaction::CompactBorrowedTxStatus >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeConnection >> QueryActorTest::SimpleQuery >> TopicSessionTests::TwoSessionsWithDifferentSchemes >> BasicExample::BasicExample [GOOD] >> SdkCredProvider::PingFromProviderSyncDiscovery [GOOD] >> SdkCredProvider::PingFromProviderAsyncDiscovery >> TCreateAndDropViewTest::CreateSameViewTwice [GOOD] >> TCreateAndDropViewTest::CreateViewOccupiedName >> TestFormatHandler::ClientErrorWithEmptyFilter [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyConnection >> TestJsonParser::Simple1 >> test.py::test_local [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyConnectionWithServiceAccount >> TestJsonParser::Simple1 [GOOD] >> TestJsonParser::Simple2 >> KeyValueGRPCService::SimpleWriteReadOverrun [GOOD] >> KeyValueGRPCService::SimpleWriteReadRange >> TMemoryController::SharedCache_ConfigLimit [GOOD] >> TMemoryController::MemTable >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteConnection >> TestJsonParser::Simple2 [GOOD] >> Graph::LocalBackendFullCycle [GOOD] >> Graph::MemoryBordersOnGet >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v1-client0] >> TestJsonParser::Simple3 >> TestJsonParser::Simple3 [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendTestConnection >> TestJsonParser::Simple4 >> TestJsonParser::Simple4 [GOOD] >> QuoterWithKesusTest::PrefetchCoefficient [GOOD] >> QuoterWithKesusTest::GetsQuotaAfterPause >> TTxDataShardSampleKScan::ScanBadParameters [GOOD] >> TestJsonParser::LargeStrings >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendTestConnectionWithServiceAccount >> MediatorTimeCast::GranularTimecast [GOOD] >> SdkCredProvider::PingFromProviderAsyncDiscovery [GOOD] >> DataCleanup::MultipleDataCleanupsWithOldGenerations [GOOD] >> DataCleanup::ForceDataCleanupWithRestart >> TestJsonParser::LargeStrings [GOOD] >> DataShardReplication::SplitMergeChanges [GOOD] >> DataShardReplication::SplitMergeChangesReboots >> TestJsonParser::ManyValues >> Graph::MemoryBordersOnGet [GOOD] >> Graph::LocalBordersOnGet >> TestJsonParser::ManyValues [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateBinding |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_sample_k/unittest >> TTxDataShardSampleKScan::ScanBadParameters [GOOD] Test command err: 2025-03-31T16:37:58.797464Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013825437176290:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:37:58.797513Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000a96/r3tmp/tmpV55nho/pdisk_1.dat 2025-03-31T16:37:59.293723Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:37:59.332672Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:37:59.332748Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:37:59.335607Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:37:59.382967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:37:59.456116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:37:59.513246Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7488013829732144161:2295] 2025-03-31T16:37:59.513528Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:37:59.541709Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:37:59.541771Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:37:59.549400Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:37:59.549480Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:37:59.549514Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:37:59.559367Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:37:59.559483Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:37:59.559518Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:7488013829732144177:2295] in generation 1 2025-03-31T16:37:59.571201Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:37:59.624086Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:37:59.625457Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:37:59.625555Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:7488013829732144180:2296] 2025-03-31T16:37:59.625573Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:37:59.625584Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:37:59.625595Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:37:59.630007Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:37:59.630074Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:37:59.630107Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7488013829732144158:2296], serverId# [1:7488013829732144176:2304], sessionId# [0:0:0] 2025-03-31T16:37:59.630197Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:37:59.630215Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:37:59.630272Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:37:59.630284Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:37:59.630298Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:37:59.630570Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:37:59.630995Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-03-31T16:37:59.635145Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:37:59.635295Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:37:59.635384Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:37:59.638599Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7488013829732144194:2314], serverId# [1:7488013829732144195:2315], sessionId# [0:0:0] 2025-03-31T16:37:59.648051Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710657 at step 1743439079683 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743439079683 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:37:59.648094Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:37:59.648304Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:37:59.648380Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:37:59.648399Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:37:59.648432Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1743439079683:281474976710657] in PlanQueue unit at 72075186224037888 2025-03-31T16:37:59.648690Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1743439079683:281474976710657 keys extracted: 0 2025-03-31T16:37:59.648838Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:37:59.649052Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:37:59.649111Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:37:59.660050Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:37:59.661387Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:37:59.662564Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1743439079683} 2025-03-31T16:37:59.662629Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:37:59.662676Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1743439079682 2025-03-31T16:37:59.662695Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:37:59.663310Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:37:59.663331Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:37:59.663364Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:37:59.663423Z node 1 :TX_DATASHARD DEBUG: Complete [1743439079683 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7488013829732143989:2185], exec latency: 12 ms, propose latency: 14 ms 2025-03-31T16:37:59.663452Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-03-31T16:37:59.663596Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:37:59.668212Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1743439079690 2025-03-31T16:37:59.669889Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2025-03-31T16:37:59.669933Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:38:02.433393Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013842617046178:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:38:02.433544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:38:02.435248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013842617046190:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:38:02.447531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T16:38:02.453676Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:38:02.462178Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:38:02.462331Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013842617046192:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T16:38:02.612401Z node 1 :TX_PROXY ERROR: Actor# [1:7488013842617046243:2394] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:38:03.748613Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710660. Ctx: { TraceId: 01jqph2cxxd9gxx1waefvss1pf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGJmZTQyZmEtZGFkZWE1NjEtYjM1YWNmYjgtYzU0ZTA0ZjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:38:03.790929Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7488013846912013575:2409], serverId# [1:7488013846912013576:2410], sessionId# [0:0:0] 2025-03-31T16:38:03.795056Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-03-31T16:38:03.795368Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row ... 0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:38:04.742643Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:38:04.744787Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:38:04.753735Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:38:04.761399Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:38:04.776325Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:7488013851150341475:2295] 2025-03-31T16:38:04.776581Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:38:04.785832Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:38:04.785896Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:38:04.788009Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:38:04.788079Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:38:04.788126Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:38:04.788476Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:38:04.788543Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:38:04.788583Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:7488013851150341491:2295] in generation 1 2025-03-31T16:38:04.792160Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:38:04.792222Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:38:04.792302Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:38:04.792345Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:7488013851150341493:2296] 2025-03-31T16:38:04.792356Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:38:04.792366Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:38:04.792384Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:38:04.792512Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:38:04.792586Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:38:04.792612Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:38:04.792627Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:38:04.792647Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:38:04.792663Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:38:04.793531Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7488013851150341473:2294], serverId# [2:7488013851150341481:2298], sessionId# [0:0:0] 2025-03-31T16:38:04.793638Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:38:04.793914Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:38:04.793979Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:38:04.795458Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:38:04.799880Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:38:04.799950Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:38:04.805537Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7488013851150341507:2312], serverId# [2:7488013851150341509:2314], sessionId# [0:0:0] 2025-03-31T16:38:04.805828Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1743439084849 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743439084849 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:38:04.805849Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:38:04.805996Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:38:04.806057Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:38:04.806075Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:38:04.806093Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1743439084849:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T16:38:04.806312Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1743439084849:281474976715657 keys extracted: 0 2025-03-31T16:38:04.806431Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:38:04.806529Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:38:04.806571Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:38:04.806997Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:38:04.807433Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:38:04.810034Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1743439084848 2025-03-31T16:38:04.810071Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:38:04.810115Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1743439084849} 2025-03-31T16:38:04.810162Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:38:04.810201Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:38:04.810216Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:38:04.810231Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:38:04.810275Z node 2 :TX_DATASHARD DEBUG: Complete [1743439084849 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:7488013851150341250:2149], exec latency: 0 ms, propose latency: 3 ms 2025-03-31T16:38:04.810304Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T16:38:04.810336Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:38:04.810398Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1743439084856 2025-03-31T16:38:04.816790Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-31T16:38:04.816842Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:38:04.821085Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7488013851150341546:2341], serverId# [2:7488013851150341547:2342], sessionId# [0:0:0] 2025-03-31T16:38:04.821188Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:38:04.821263Z node 2 :TX_DATASHARD DEBUG: Prepared Snapshot transaction txId 281474976715658 at tablet 72075186224037888 2025-03-31T16:38:04.824053Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:38:04.828058Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1743439084870 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743439084870 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:38:04.828084Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:38:04.828226Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:38:04.828240Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:38:04.828265Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1743439084870:281474976715658] in PlanQueue unit at 72075186224037888 2025-03-31T16:38:04.828403Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1743439084870:281474976715658 keys extracted: 0 2025-03-31T16:38:04.828707Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:38:04.831994Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1743439084870} 2025-03-31T16:38:04.832044Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:38:04.832089Z node 2 :TX_DATASHARD DEBUG: Complete [1743439084870 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [2:7488013851150341541:2337], exec latency: 0 ms, propose latency: 3 ms 2025-03-31T16:38:04.832111Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:38:04.835192Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7488013851150341557:2352], serverId# [2:7488013851150341558:2353], sessionId# [0:0:0] 2025-03-31T16:38:04.837119Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7488013851150341562:2357], serverId# [2:7488013851150341563:2358], sessionId# [0:0:0] 2025-03-31T16:38:04.838879Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7488013851150341567:2362], serverId# [2:7488013851150341568:2363], sessionId# [0:0:0] 2025-03-31T16:38:04.845294Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7488013851150341572:2367], serverId# [2:7488013851150341573:2368], sessionId# [0:0:0] 2025-03-31T16:38:04.847385Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7488013851150341577:2372], serverId# [2:7488013851150341578:2373], sessionId# [0:0:0] 2025-03-31T16:38:04.849144Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7488013851150341582:2377], serverId# [2:7488013851150341583:2378], sessionId# [0:0:0] |97.9%| [TM] {RESULT} ydb/core/tx/datashard/ut_sample_k/unittest >> KqpTpch::Query11 [GOOD] >> KqpTpch::Query12 >> TestJsonParser::MissingFields >> MediatorTest::TabletAckWhenDead [GOOD] >> TestJsonParser::MissingFields [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/time_cast/ut/unittest >> MediatorTimeCast::GranularTimecast [GOOD] Test command err: 2025-03-31T16:38:00.350407Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:38:00.350742Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:38:00.350951Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0006e0/r3tmp/tmpCwRMHi/pdisk_1.dat 2025-03-31T16:38:00.953637Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE NKikimr::TEvMediatorTimecast::TEvSubscribeReadStep{ CoordinatorId# 72057594046316545 } 2025-03-31T16:38:00.954287Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE EvClientConnected 2025-03-31T16:38:00.959025Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepResult CoordinatorID: 72057594046316545 SeqNo: 1 LastAcquireStep: 0 NextAcquireStep: 0 2025-03-31T16:38:01.005775Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:38:01.054432Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:38:01.055056Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:38:01.067749Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:38:01.157103Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 500 2025-03-31T16:38:01.266493Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 1000 2025-03-31T16:38:01.471885Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 2000 2025-03-31T16:38:01.608214Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 3000 2025-03-31T16:38:01.738750Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 4000 2025-03-31T16:38:01.890540Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 5000 2025-03-31T16:38:01.940127Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE NKikimr::TEvMediatorTimecast::TEvWaitReadStep{ CoordinatorId# 72057594046316545 ReadStep# 7000 } 2025-03-31T16:38:02.088156Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 6000 2025-03-31T16:38:02.226262Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 7000 2025-03-31T16:38:02.229056Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE EvClientDestroyed 2025-03-31T16:38:02.260133Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE EvClientConnected 2025-03-31T16:38:02.261276Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepResult CoordinatorID: 72057594046316545 SeqNo: 2 LastAcquireStep: 0 NextAcquireStep: 7000 2025-03-31T16:38:02.277431Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE NKikimr::TEvMediatorTimecast::TEvWaitReadStep{ CoordinatorId# 72057594046316545 ReadStep# 12000 } 2025-03-31T16:38:02.404805Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 7500 2025-03-31T16:38:02.509301Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 8000 2025-03-31T16:38:02.668298Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 9000 2025-03-31T16:38:02.803385Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 10000 2025-03-31T16:38:02.964626Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 11000 2025-03-31T16:38:03.128238Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 12000 2025-03-31T16:38:06.765964Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:101:2147], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:38:06.766410Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:38:06.766505Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0006e0/r3tmp/tmpeUCDYe/pdisk_1.dat 2025-03-31T16:38:07.149828Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvRegisterTablet TabletId# 72057594047365120 ProcessingParams { Version: 0 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 1 Mediators: 72057594046382081 }} 2025-03-31T16:38:07.150896Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatch Bucket: 0 SubscriptionId: 1 Tablets: 72057594047365120 MinStep: 0 2025-03-31T16:38:07.150979Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 {TEvWatch Bucket# 0} 2025-03-31T16:38:07.151039Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Sender# [2:651:2549] {TEvRegisterTabletResult TabletId# 72057594047365120 Entry# 0} 2025-03-31T16:38:07.159683Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE EvClientConnected 2025-03-31T16:38:07.159929Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 1 LatestStep: 0 2025-03-31T16:38:07.160135Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 0} 2025-03-31T16:38:07.160439Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvRegisterTablet TabletId# 72057594047365121 ProcessingParams { Version: 0 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 1 Mediators: 72057594046382081 }} 2025-03-31T16:38:07.160543Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatchModify Bucket: 0 SubscriptionId: 2 AddTablets: 72057594047365121 2025-03-31T16:38:07.160604Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Sender# [2:654:2551] {TEvRegisterTabletResult TabletId# 72057594047365121 Entry# 0} 2025-03-31T16:38:07.160874Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 2 LatestStep: 0 2025-03-31T16:38:07.161139Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvRegisterTablet TabletId# 72057594047365123 ProcessingParams { Version: 0 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 1 Mediators: 72057594046382081 }} 2025-03-31T16:38:07.161234Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatchModify Bucket: 0 SubscriptionId: 3 AddTablets: 72057594047365123 2025-03-31T16:38:07.161290Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Sender# [2:655:2552] {TEvRegisterTabletResult TabletId# 72057594047365123 Entry# 0} 2025-03-31T16:38:07.161512Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 3 LatestStep: 0 2025-03-31T16:38:07.192992Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:38:07.230205Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:38:07.230351Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:38:07.242072Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:38:07.324161Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 3 LatestStep: 500 2025-03-31T16:38:07.324303Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 500} 2025-03-31T16:38:07.431336Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 3 LatestStep: 1000 2025-03-31T16:38:07.431436Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 1000} 2025-03-31T16:38:07.632391Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 3 LatestStep: 2000 2025-03-31T16:38:07.632483Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 2000} ... have step 0 and 2000 after sleep 2025-03-31T16:38:07.733084Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 3 LatestStep: 2500 FrozenTablets: 72057594047365120 FrozenTablets: 72057594047365121 FrozenSteps: 2499 FrozenSteps: 2499 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... tx1 planned at step 2500 ... tablet1 at 2499 ... tablet2 at 2499 ... tablet3 at 2500 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet 2025-03-31T16:38:07.848756Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 3 LatestStep: 3000 ... tx2 planned at step 3000 ... tablet1 at 2499 ... tablet2 at 2499 ... tablet3 at 3000 ... unblocking tx1 at tablet2 ... unblocking NKikimr::TEvTxProcessing: ... R_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 0 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-03-31T16:38:08.083773Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 0 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 2500 FrozenTablets: 72057594047365120 FrozenTablets: 72057594047365121 FrozenSteps: 2499 FrozenSteps: 2499 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-03-31T16:38:08.094531Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 2500 FrozenTablets: 72057594047365120 FrozenTablets: 72057594047365121 FrozenSteps: 2499 FrozenSteps: 2499 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 3000 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-03-31T16:38:08.105308Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 3000 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 3500 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-03-31T16:38:08.116555Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 3500 ... unblocking plan for tablet2 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 3500 FrozenTablets: 72057594047365121 FrozenSteps: 2999 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-03-31T16:38:08.139411Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 3500 FrozenTablets: 72057594047365121 FrozenSteps: 2999 ... unblocking plan for tablet2 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 3500 FrozenTablets: 72057594047365121 FrozenSteps: 3499 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-03-31T16:38:08.161125Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 3500 FrozenTablets: 72057594047365121 FrozenSteps: 3499 ... unblocking plan for tablet2 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 3500 UnfrozenTablets: 72057594047365121 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-03-31T16:38:08.189011Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 3500 UnfrozenTablets: 72057594047365121 ... restarting mediator 2025-03-31T16:38:08.200991Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE EvClientDestroyed 2025-03-31T16:38:08.201182Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatch Bucket: 0 SubscriptionId: 7 Tablets: 72057594047365123 Tablets: 72057594047365120 Tablets: 72057594047365121 MinStep: 3500 2025-03-31T16:38:08.201239Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 {TEvWatch Bucket# 0} 2025-03-31T16:38:08.202259Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE EvClientConnected 2025-03-31T16:38:08.202423Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatch Bucket: 0 SubscriptionId: 8 Tablets: 72057594047365123 Tablets: 72057594047365120 Tablets: 72057594047365121 MinStep: 3500 2025-03-31T16:38:08.202470Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 {TEvWatch Bucket# 0} 2025-03-31T16:38:08.202957Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE EvClientConnected 2025-03-31T16:38:08.203058Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatch Bucket: 0 SubscriptionId: 9 Tablets: 72057594047365123 Tablets: 72057594047365120 Tablets: 72057594047365121 MinStep: 3500 2025-03-31T16:38:08.203154Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 {TEvWatch Bucket# 0} 2025-03-31T16:38:08.209257Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE EvClientConnected ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-03-31T16:38:08.209820Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 0} ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... fully unblocking tx1 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR 2025-03-31T16:38:08.235843Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 2500} ... tablet1 at 2500 ... tablet2 at 3500 ... tablet3 at 3500 ... fully unblocking tx2 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR 2025-03-31T16:38:08.246978Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 3000} ... tablet1 at 3000 ... tablet2 at 3500 ... tablet3 at 3500 ... fully unblocking tx3 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR 2025-03-31T16:38:08.258082Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 3500} ... tablet1 at 3500 ... tablet2 at 3500 ... tablet3 at 3500 |98.0%| [TM] {RESULT} ydb/core/tx/time_cast/ut/unittest >> ServerRestartTest::RestartOnGetSession [GOOD] >> TestJsonParser::NestedTypes >> MediatorTest::PlanStepAckToReconnectedMediator |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/basic_example/gtest >> BasicExample::BasicExample [GOOD] |98.0%| [TM] {RESULT} ydb/public/sdk/cpp/tests/integration/basic_example/gtest >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListBindings >> TestJsonParser::NestedTypes [GOOD] >> TestJsonParser::SimpleBooleans >> TDqSolomonWriteActorTest::TestWriteWithTimeseries [GOOD] >> TDqSolomonWriteActorTest::TestCheckpoints >> Graph::LocalBordersOnGet [GOOD] >> TestJsonParser::SimpleBooleans [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeBinding >> TestJsonParser::ManyBatches >> TestJsonParser::ManyBatches [GOOD] >> TDqSolomonWriteActorTest::TestCheckpoints [GOOD] >> TDqSolomonWriteActorTest::TestShouldReturnAfterCheckpoint >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyBinding >> QueryActorTest::SimpleQuery [GOOD] >> QueryActorTest::Rollback >> TestJsonParser::LittleBatches ------- [TS] {asan, default-linux-x86_64, release} ydb/core/graph/ut/unittest >> Graph::LocalBordersOnGet [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:110:2141] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:110:2141] 2025-03-31T16:37:47.087314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:37:47.087546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:37:47.087587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:37:47.087639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:37:47.091179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:37:47.091252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:37:47.091366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:37:47.091470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:37:47.095535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:37:47.231304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:37:47.231390Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:37:47.238566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:37:47.238673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:37:47.238821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:37:47.248162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:37:47.248515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:37:47.252407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:37:47.252786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:37:47.261627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:37:47.270059Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:37:47.270146Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:37:47.270340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:37:47.270411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:37:47.270507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:37:47.270662Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-31T16:37:47.286621Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-31T16:37:47.411151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:37:47.412309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:37:47.413442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:37:47.414390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:37:47.414465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:37:47.420347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:37:47.420492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:37:47.420692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:37:47.420815Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:37:47.420848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:37:47.420887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:37:47.425600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:37:47.425681Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:37:47.425720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:37:47.430293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:37:47.430357Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:37:47.430397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:37:47.430490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:37:47.435955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:37:47.447958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:37:47.451262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:37:47.452707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:37:47.452890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 4294969445 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:37:47.452994Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:37:47.454304Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:37:47.454378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:37:47.454636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:37:47.454732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:37:47.460877Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:37:47.460941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:37:47.461146Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:37:47.461219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:37:47.461495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:37:47.461545Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:37:47.461671Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:37:47.461706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:37:47.461747Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:37:47.461777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:37:47.461836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-31T16:37:47.461886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:37:47.461924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-31T16:37:47.461975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-31T16:37:47.462050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-31T16:37:47.462087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-31T16:37:47.462120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-31T16:37:47.464262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:37:47.464409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-31T16:37:47.464451Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... cs 2025-03-31T16:38:10.645160Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 101 } Time: 101 2025-03-31T16:38:10.645183Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-03-31T16:38:10.645208Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-03-31T16:38:10.645238Z node 6 :GRAPH TRACE: DB Stored metrics 2025-03-31T16:38:10.645321Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 102 } Time: 102 2025-03-31T16:38:10.645343Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-03-31T16:38:10.645368Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-03-31T16:38:10.645397Z node 6 :GRAPH TRACE: DB Stored metrics 2025-03-31T16:38:10.645467Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 103 } Time: 103 2025-03-31T16:38:10.645489Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-03-31T16:38:10.645514Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-03-31T16:38:10.645547Z node 6 :GRAPH TRACE: DB Stored metrics 2025-03-31T16:38:10.645631Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 104 } Time: 104 2025-03-31T16:38:10.645653Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-03-31T16:38:10.645679Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-03-31T16:38:10.645708Z node 6 :GRAPH TRACE: DB Stored metrics 2025-03-31T16:38:10.645786Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 105 } Time: 105 2025-03-31T16:38:10.645811Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-03-31T16:38:10.645837Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-03-31T16:38:10.645868Z node 6 :GRAPH TRACE: DB Stored metrics 2025-03-31T16:38:10.645934Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 106 } Time: 106 2025-03-31T16:38:10.645956Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-03-31T16:38:10.645981Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-03-31T16:38:10.646011Z node 6 :GRAPH TRACE: DB Stored metrics 2025-03-31T16:38:10.646094Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 107 } Time: 107 2025-03-31T16:38:10.646114Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-03-31T16:38:10.646141Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-03-31T16:38:10.646171Z node 6 :GRAPH TRACE: DB Stored metrics 2025-03-31T16:38:10.646242Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 108 } Time: 108 2025-03-31T16:38:10.646266Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-03-31T16:38:10.646294Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-03-31T16:38:10.646322Z node 6 :GRAPH TRACE: DB Stored metrics 2025-03-31T16:38:10.646383Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 109 } Time: 109 2025-03-31T16:38:10.646404Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-03-31T16:38:10.646429Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-03-31T16:38:10.646458Z node 6 :GRAPH TRACE: DB Stored metrics 2025-03-31T16:38:10.646537Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 110 } Time: 110 2025-03-31T16:38:10.646558Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-03-31T16:38:10.646582Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-03-31T16:38:10.646612Z node 6 :GRAPH TRACE: DB Stored metrics 2025-03-31T16:38:10.646681Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 111 } Time: 111 2025-03-31T16:38:10.646703Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-03-31T16:38:10.646726Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-03-31T16:38:10.646756Z node 6 :GRAPH TRACE: DB Stored metrics 2025-03-31T16:38:10.646826Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 112 } Time: 112 2025-03-31T16:38:10.646851Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-03-31T16:38:10.646875Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-03-31T16:38:10.646906Z node 6 :GRAPH TRACE: DB Stored metrics 2025-03-31T16:38:10.646976Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 113 } Time: 113 2025-03-31T16:38:10.646999Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-03-31T16:38:10.647024Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-03-31T16:38:10.647052Z node 6 :GRAPH TRACE: DB Stored metrics 2025-03-31T16:38:10.647390Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 114 } Time: 114 2025-03-31T16:38:10.647425Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-03-31T16:38:10.647467Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-03-31T16:38:10.647506Z node 6 :GRAPH TRACE: DB Stored metrics 2025-03-31T16:38:10.647602Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 115 } Time: 115 2025-03-31T16:38:10.647626Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-03-31T16:38:10.647651Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-03-31T16:38:10.647680Z node 6 :GRAPH TRACE: DB Stored metrics 2025-03-31T16:38:10.647753Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 116 } Time: 116 2025-03-31T16:38:10.647775Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-03-31T16:38:10.647800Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-03-31T16:38:10.647830Z node 6 :GRAPH TRACE: DB Stored metrics 2025-03-31T16:38:10.647892Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 117 } Time: 117 2025-03-31T16:38:10.647914Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-03-31T16:38:10.647939Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-03-31T16:38:10.647967Z node 6 :GRAPH TRACE: DB Stored metrics 2025-03-31T16:38:10.648055Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 118 } Time: 118 2025-03-31T16:38:10.648081Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-03-31T16:38:10.648107Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-03-31T16:38:10.648136Z node 6 :GRAPH TRACE: DB Stored metrics 2025-03-31T16:38:10.648219Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 119 } Time: 119 2025-03-31T16:38:10.648243Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-03-31T16:38:10.648271Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-03-31T16:38:10.648306Z node 6 :GRAPH TRACE: DB Stored metrics 2025-03-31T16:38:10.648392Z node 6 :GRAPH TRACE: SHARD Handle TEvGraph::TEvGetMetrics from [6:574:2503] 2025-03-31T16:38:10.648459Z node 6 :GRAPH DEBUG: SHARD TTxGetMetrics::Execute 2025-03-31T16:38:10.648507Z node 6 :GRAPH DEBUG: DB Querying from 0 to 119 2025-03-31T16:38:10.668326Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.668400Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.668417Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.668433Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.668451Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.668466Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.668480Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.668496Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.668511Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.668526Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.668541Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.668556Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.668570Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.668586Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.668601Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.668615Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.668630Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.668643Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.668658Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.668673Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.668689Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.668705Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.668721Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.668735Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.668749Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.668762Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.668776Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.668792Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.668808Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.668839Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.668853Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.668869Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.668888Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.668909Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.668933Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.668955Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.668975Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.668992Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.669009Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.669033Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.669053Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.669077Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.669105Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.669128Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.669152Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.669176Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.669196Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.669211Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.669226Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.669241Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.669255Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.669272Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.669287Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.669302Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.669323Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.669345Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.669367Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.669389Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.669410Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.669431Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-31T16:38:10.669465Z node 6 :GRAPH DEBUG: SHARD TTxGetMetric::Complete 2025-03-31T16:38:10.669518Z node 6 :GRAPH TRACE: SHARD TxGetMetrics returned 60 points for request 3 2025-03-31T16:38:10.669646Z node 6 :GRAPH TRACE: SVC TEvMetricsResult 3 2025-03-31T16:38:10.669692Z node 6 :GRAPH TRACE: SVC TEvMetricsResult found request 3 resending to [6:575:2504] |98.0%| [TS] {RESULT} ydb/core/graph/ut/unittest >> TestJsonParser::LittleBatches [GOOD] >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v2-client0] [GOOD] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serializable/py3test >> test.py::test_local [GOOD] |98.0%| [TM] {RESULT} ydb/tests/functional/serializable/py3test >> TestJsonParser::MissingFieldsValidation >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteBinding >> TCreateAndDropViewTest::CreateViewOccupiedName [GOOD] >> TCreateAndDropViewTest::CreateViewIfNotExists >> TestJsonParser::MissingFieldsValidation [GOOD] >> TestJsonParser::TypeKindsValidation >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sdk/cpp/sdk_credprovider/unittest >> SdkCredProvider::PingFromProviderAsyncDiscovery [GOOD] Test command err: 2 2 |98.0%| [TM] {RESULT} ydb/tests/functional/sdk/cpp/sdk_credprovider/unittest >> TestJsonParser::TypeKindsValidation [GOOD] >> AnalyzeDatashard::DropTableNavigateError [GOOD] >> TestJsonParser::NumbersValidation >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListQueries >> DiscoveryIsNotBroken::HaveKafkaSslEndpointInDiscovery [GOOD] >> Functions::CreateRequest [GOOD] >> Functions::CreateResponse [GOOD] >> KafkaProtocol::ProduceScenario >> TestJsonParser::NumbersValidation [GOOD] >> KqpTpch::Query12 [GOOD] >> KqpTpch::Query13 >> MediatorTest::PlanStepAckToReconnectedMediator [GOOD] >> TestJsonParser::StringsValidation >> TopicSessionTests::TwoSessionsWithDifferentSchemes [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeQuery >> TestJsonParser::StringsValidation [GOOD] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--false] >> MediatorTest::WatcherReconnect >> TestJsonParser::NestedJsonValidation |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/server_restart/gtest >> ServerRestartTest::RestartOnGetSession [GOOD] |98.0%| [TM] {RESULT} ydb/public/sdk/cpp/tests/integration/server_restart/gtest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::DropTableNavigateError [GOOD] Test command err: 2025-03-31T16:35:48.099333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:35:48.099611Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:35:48.099683Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000eb4/r3tmp/tmpK1wjX5/pdisk_1.dat 2025-03-31T16:35:48.500690Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61330, node 1 2025-03-31T16:35:48.788132Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:48.788195Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:48.788246Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:48.790909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:35:48.791409Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:35:48.879233Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:48.879435Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:48.895676Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7163 2025-03-31T16:35:49.473627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:35:53.009055Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T16:35:53.049480Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:53.049612Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:53.081852Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:35:53.084053Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:53.341345Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:53.342032Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:53.342573Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:53.342699Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:53.342982Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:53.343066Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:53.343179Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:53.343249Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:53.343364Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:53.532341Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:53.532482Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:53.548838Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:53.721382Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:53.781324Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T16:35:53.781441Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T16:35:53.816998Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T16:35:53.817824Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T16:35:53.818107Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T16:35:53.818169Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T16:35:53.818238Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T16:35:53.818325Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T16:35:53.818406Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T16:35:53.818471Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T16:35:53.819022Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T16:35:53.844563Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:35:53.844690Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1872:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:35:53.853390Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1886:2608] 2025-03-31T16:35:53.865060Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1908:2617] 2025-03-31T16:35:53.865987Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1908:2617], schemeshard id = 72075186224037897 2025-03-31T16:35:53.869083Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T16:35:53.950535Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T16:35:53.950606Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T16:35:53.950686Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T16:35:53.968642Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T16:35:53.983437Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T16:35:53.983642Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T16:35:54.177972Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T16:35:54.351296Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T16:35:54.499758Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:35:55.628976Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2241:3074], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:55.629147Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:55.647894Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-31T16:35:56.200231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2542:3123], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:56.200410Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:56.201969Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2547:3127]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:35:56.202187Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T16:35:56.202291Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2549:3129] 2025-03-31T16:35:56.202360Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2549:3129] 2025-03-31T16:35:56.203042Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2550:2993] 2025-03-31T16:35:56.203405Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2549:3129], server id = [2:2550:2993], tablet id = 72075186224037894, status = OK 2025-03-31T16:35:56.203645Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2550:2993], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-03-31T16:35:56.203725Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-03-31T16:35:56.203960Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:35:56.204047Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2547:3127], StatRequests.size() = 1 2025-03-31T16:35:56.225461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2554:3133], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:56.225595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:56.226052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2559:3138], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:56.233271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-03-31T16:35:56.395852Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-31T16:35:56.395932Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-31T16:35:56.505932Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2549:3129], schemeshard count = 1 2025-03-31T16:35:56.991488Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorA ... 03-31T16:37:03.941896Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:37:06.485083Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T16:37:06.485352Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:37:09.713210Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:37:12.295778Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T16:37:12.296135Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:37:15.686685Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:37:18.072350Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T16:37:18.072748Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:37:21.557573Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:37:23.740501Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T16:37:23.772575Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:37:27.166537Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:37:29.997915Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T16:37:29.998309Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:37:33.415294Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:37:35.791851Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T16:37:35.792240Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:37:39.126711Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:37:41.967679Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T16:37:41.967992Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:37:45.715656Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:37:48.627662Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T16:37:48.628068Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:37:52.441456Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:37:55.177369Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T16:37:55.177721Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:37:59.227584Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:38:01.857474Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T16:38:01.857872Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:38:05.786418Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:38:07.224709Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-03-31T16:38:07.224799Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-31T16:38:07.224851Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-31T16:38:07.224903Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-03-31T16:38:08.878550Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T16:38:08.878939Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-31T16:38:08.922575Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037897 2025-03-31T16:38:08.922655Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 219.000000s, at schemeshard: 72075186224037897 2025-03-31T16:38:08.922984Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 25 2025-03-31T16:38:08.938080Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-31T16:38:10.239102Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:38:10.239215Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T16:38:10.239265Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-31T16:38:10.239325Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-03-31T16:38:10.239379Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-31T16:38:10.239847Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T16:38:10.244876Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-31T16:38:10.249317Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6725:4734], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:38:10.249438Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6736:4739], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:38:10.249576Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:38:10.265367Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-03-31T16:38:10.344214Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6739:4742], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-03-31T16:38:10.529163Z node 2 :TX_PROXY ERROR: Actor# [2:6837:4789] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:38:10.576797Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:6866:4804]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:38:10.577041Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-31T16:38:10.577134Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:6868:4806] 2025-03-31T16:38:10.577316Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:6868:4806] 2025-03-31T16:38:10.577680Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:6869:4807] 2025-03-31T16:38:10.577833Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:6869:4807], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-03-31T16:38:10.577882Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-03-31T16:38:10.577993Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:6868:4806], server id = [2:6869:4807], tablet id = 72075186224037894, status = OK 2025-03-31T16:38:10.578061Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:38:10.578134Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:6866:4804], StatRequests.size() = 1 2025-03-31T16:38:10.712402Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YTk2NGJmZjgtZmYzZTY1NDEtYTIxNTU4MzctMWQxYjVmZjc=, TxId: 2025-03-31T16:38:10.712479Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTk2NGJmZjgtZmYzZTY1NDEtYTIxNTU4MzctMWQxYjVmZjc=, TxId: 2025-03-31T16:38:10.713014Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:38:10.733027Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-31T16:38:10.733119Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T16:38:10.802766Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-31T16:38:10.802866Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-31T16:38:10.886618Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:6868:4806], schemeshard count = 1 2025-03-31T16:38:11.992941Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T16:38:11.993058Z node 2 :STATISTICS ERROR: [72075186224037894] IsColumnTable. traversal path [OwnerId: 72075186224037897, LocalPathId: 4] is not known to schemeshard 2025-03-31T16:38:11.993391Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T16:38:11.996263Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-31T16:38:12.015005Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MWZkNmEwNzUtZWYzN2E5MDYtNjUxMmY1MGQtNmM5YWY2YWQ=, TxId: 2025-03-31T16:38:12.015105Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MWZkNmEwNzUtZWYzN2E5MDYtNjUxMmY1MGQtNmM5YWY2YWQ=, TxId: 2025-03-31T16:38:12.015588Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:38:12.036097Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:38:12.036208Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2864:3276] 2025-03-31T16:38:12.036846Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:6949:4860]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-31T16:38:12.043934Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T16:38:12.044020Z node 2 :STATISTICS ERROR: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] Navigate failed 2025-03-31T16:38:12.044065Z node 2 :STATISTICS DEBUG: ReplyFailed(), request id = 2 >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyQuery >> TopicSessionTests::TwoSessionsWithDifferentColumnTypes >> TestJsonParser::NestedJsonValidation [GOOD] >> BulkUpsert::BulkUpsert [GOOD] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> KeyValueGRPCService::SimpleWriteReadRange [GOOD] >> KeyValueGRPCService::SimpleWriteListRange >> TestJsonParser::BoolsValidation >> TestJsonParser::BoolsValidation [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteQuery >> TestJsonParser::JsonStructureValidation >> DataCleanup::ForceDataCleanupWithRestart [GOOD] >> DataCleanup::OutReadSetsCleanedAfterCopyTable >> test.py::test_wait_for_cluster_ready [GOOD] >> test.py::test_counter >> test_config_with_metadata.py::TestConfigWithMetadataBlock::test_cluster_is_operational_with_metadata >> TestJsonParser::JsonStructureValidation [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendControlQuery >> TestPurecalcFilter::Simple1 >> QueryActorTest::Rollback [GOOD] >> QueryActorTest::Commit >> DataShardReplication::SplitMergeChangesReboots [GOOD] >> DataShardReplication::ReplicatedTable+UseSink >> QuoterWithKesusTest::GetsQuotaAfterPause [GOOD] >> QuoterWithKesusTest::GetsSeveralQuotas >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendGetResultData >> test.py::test_counter [GOOD] >> KqpTpch::Query13 [GOOD] >> KqpTpch::Query14 |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test.py::test_viewer_nodes >> test.py::test_viewer_nodes [GOOD] >> test.py::test_storage_groups [GOOD] >> test.py::test_viewer_sysinfo >> test.py::test_viewer_sysinfo [GOOD] >> test.py::test_viewer_vdiskinfo [GOOD] >> test.py::test_viewer_pdiskinfo [GOOD] >> test.py::test_viewer_bsgroupinfo >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListJobs >> test.py::test_viewer_bsgroupinfo [GOOD] >> test.py::test_viewer_tabletinfo >> test.py::test_viewer_tabletinfo [GOOD] >> test.py::test_viewer_describe >> test.py::test_viewer_describe [GOOD] >> test.py::test_viewer_cluster [GOOD] >> test.py::test_viewer_tenantinfo [GOOD] >> test.py::test_viewer_tenantinfo_db >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeJob >> test.py::test_viewer_tenantinfo_db [GOOD] >> test.py::test_viewer_healthcheck >> TCreateAndDropViewTest::CreateViewIfNotExists [GOOD] >> TCreateAndDropViewTest::DropView >> MediatorTest::WatcherReconnect [GOOD] >> test.py::test_viewer_healthcheck [GOOD] >> test.py::test_viewer_acl >> test.py::test_viewer_acl [GOOD] >> test.py::test_viewer_autocomplete >> test.py::test_viewer_autocomplete [GOOD] >> test.py::test_viewer_check_access >> test.py::test_viewer_check_access [GOOD] >> test.py::test_viewer_query >> KafkaProtocol::ProduceScenario [GOOD] >> KafkaProtocol::FetchScenario >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateConnection |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> MediatorTest::MultipleSteps |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test.py::test_viewer_query [GOOD] >> test.py::test_viewer_query_issue_13757 >> test_crud.py::TestYdbCrudOperations::test_crud_operations [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateConnectionWithServiceAccount >> KqpTpch::Query14 [GOOD] >> KqpTpch::Query15 >> test.py::test_viewer_query_issue_13757 [GOOD] >> test.py::test_viewer_query_issue_13945 >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListConnections |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/bulk_upsert/gtest >> BulkUpsert::BulkUpsert [GOOD] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.0%| [TM] {RESULT} ydb/public/sdk/cpp/tests/integration/bulk_upsert/gtest >> test.py::test_viewer_query_issue_13945 [GOOD] >> test.py::test_pqrb_tablet >> test.py::test_pqrb_tablet [GOOD] >> test.py::test_viewer_nodes_issue_14992 >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeConnection >> test.py::test_viewer_nodes_issue_14992 [GOOD] >> test.py::test_operations_list >> test.py::test_operations_list [GOOD] >> test.py::test_operations_list_page >> test.py::test_operations_list_page [GOOD] >> test.py::test_operations_list_page_bad >> test.py::test_operations_list_page_bad [GOOD] >> test.py::test_topic_data >> TDqSolomonWriteActorTest::TestShouldReturnAfterCheckpoint [GOOD] >> QueryActorTest::Commit [GOOD] >> QueryActorTest::StreamQuery >> TTxDataShardBuildIndexScan::ShadowBorrowCompaction [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyConnection >> TestPurecalcFilter::Simple1 [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyConnectionWithServiceAccount >> TopicSessionTests::TwoSessionsWithDifferentColumnTypes [GOOD] |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> TestPurecalcFilter::Simple2 >> DataShardCompaction::CompactBorrowedTxStatus [GOOD] |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_build_index/unittest >> TTxDataShardBuildIndexScan::ShadowBorrowCompaction [GOOD] Test command err: 2025-03-31T16:37:58.859062Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:37:58.859421Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:37:58.859642Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00068a/r3tmp/tmpAPtrzW/pdisk_1.dat 2025-03-31T16:37:59.540557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:37:59.641511Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:37:59.703597Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:37:59.704935Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:37:59.718037Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:37:59.822872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:37:59.909390Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:37:59.909586Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:37:59.970453Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:37:59.970626Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:37:59.973143Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:37:59.973217Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:37:59.973286Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:37:59.975061Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:37:59.975282Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:37:59.975405Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:37:59.986679Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:38:00.017643Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:38:00.019365Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:38:00.019593Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:38:00.019636Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:38:00.019679Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:38:00.019715Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:38:00.022724Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:38:00.022876Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:38:00.023039Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:38:00.023107Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:38:00.023232Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:38:00.023279Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:38:00.023390Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:38:00.023598Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:38:00.023893Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:38:00.025075Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:38:00.029541Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:38:00.040521Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:38:00.041668Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:38:00.200032Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T16:38:00.206142Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:38:00.206249Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:38:00.206977Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:38:00.207029Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:38:00.207253Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-31T16:38:00.207572Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-31T16:38:00.207740Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:38:00.208436Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:38:00.208518Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-31T16:38:00.213346Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-31T16:38:00.216078Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:38:00.218952Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-31T16:38:00.219024Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:38:00.221345Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-31T16:38:00.221468Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:38:00.222507Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:38:00.222559Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:38:00.222609Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-31T16:38:00.222677Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:38:00.222723Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-31T16:38:00.222845Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:38:00.227271Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:38:00.229871Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-31T16:38:00.230229Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-31T16:38:00.230304Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-31T16:38:00.270565Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:38:00.270738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:38:00.270833Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:38:00.280014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:38:00.287569Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:38:00.444708Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:38:00.448346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:38:00.542245Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:38:01.681459Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqph2atbdcyfntc1p9b048x0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjY2ODEzZTQtYzA1NjNhYTgtOTVlODI0MWQtOTQyYWE5NzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:38:01.735259Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:856:2687], serverId# [1:857:2688], sessionId# [0:0:0] 2025-03-31T16:38:01.738696Z node 1 :TX_DATASHARD DEBUG: Executing write operation for [0:2] at 72075186224037888 2025-03-31T16:38:01.738998Z node 1 :TX_DATASHARD DEBUG: Executed write operation for [0:2] at 72075186224037888, row count=3 ... 215892Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-03-31T16:38:21.215918Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:38:21.215951Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [73000:281474976715666] at 72075186224037891 for WaitForStreamClearance 2025-03-31T16:38:21.215990Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:38:21.216029Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:25} Tx{39, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 1 -> done Change{15, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-31T16:38:21.216077Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:25} Tx{39, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T16:38:21.216181Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:25} Tx{39, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} queued, type NKikimr::NDataShard::TDataShard::TTxProgressTransaction 2025-03-31T16:38:21.216241Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:25} Tx{39, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T16:38:21.216274Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-03-31T16:38:21.216298Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:38:21.216340Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [73000:281474976715666] at 72075186224037890 for WaitForStreamClearance 2025-03-31T16:38:21.216665Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:38:21.216731Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:26} Tx{39, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 1 -> done Change{15, redo 134b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2025-03-31T16:38:21.216771Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:26} Tx{39, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T16:38:21.217012Z node 2 :TABLET_EXECUTOR INFO: Leader{72075186224037890:1:27} starting Scan{8 on 1001, TReadTableScan} 2025-03-31T16:38:21.217546Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715666, MessageQuota: 1 2025-03-31T16:38:21.217754Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:27} commited cookie 1 for step 24 2025-03-31T16:38:21.217805Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 73000} 2025-03-31T16:38:21.217878Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-31T16:38:21.218667Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:27} commited cookie 8 for step 25 2025-03-31T16:38:21.219022Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715666, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-31T16:38:21.219206Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715666, PendingAcks: 0 2025-03-31T16:38:21.219257Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037890, TxId: 281474976715666, MessageQuota: 0 2025-03-31T16:38:21.225122Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037890 2025-03-31T16:38:21.225214Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715666, at: 72075186224037890 2025-03-31T16:38:21.225725Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:27} Tx{40, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} queued, type NKikimr::NDataShard::TDataShard::TTxProgressTransaction 2025-03-31T16:38:21.225776Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:27} Tx{40, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T16:38:21.225823Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-03-31T16:38:21.225878Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:38:21.225919Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [73000:281474976715666] at 72075186224037890 for ReadTableScan 2025-03-31T16:38:21.226191Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:38:21.226282Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:27} Tx{40, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 1 -> done Change{16, redo 336b alter 0b annex 0, ~{ 1, 3, 4, 12, 7, 8 } -{ }, 0 gb} 2025-03-31T16:38:21.226334Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:27} Tx{40, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T16:38:21.239960Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:27} commited cookie 1 for step 26 2025-03-31T16:38:21.240058Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-31T16:38:21.240112Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-31T16:38:21.240199Z node 2 :TX_DATASHARD DEBUG: Complete [73000 : 281474976715666] from 72075186224037890 at tablet 72075186224037890 send result to client [2:1597:3368], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:38:21.240286Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-31T16:38:21.240644Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:25} commited cookie 1 for step 24 2025-03-31T16:38:21.240691Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037891 step# 73000} 2025-03-31T16:38:21.240740Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-03-31T16:38:21.240769Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-03-31T16:38:21.240971Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:97} Tx{97, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxMediatorConfirmations} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxMediatorConfirmations 2025-03-31T16:38:21.241036Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:97} Tx{97, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxMediatorConfirmations} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T16:38:21.241251Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:97} Tx{97, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxMediatorConfirmations} hope 1 -> done Change{91, redo 184b alter 0b annex 0, ~{ 4, 0 } -{ }, 0 gb} 2025-03-31T16:38:21.241324Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:97} Tx{97, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxMediatorConfirmations} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T16:38:21.241539Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:25} Tx{40, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} queued, type NKikimr::NDataShard::TDataShard::TTxProgressTransaction 2025-03-31T16:38:21.241581Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:25} Tx{40, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T16:38:21.241623Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-03-31T16:38:21.241664Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:38:21.241699Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [73000:281474976715666] at 72075186224037891 for WaitForStreamClearance 2025-03-31T16:38:21.241942Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:38:21.242015Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:26} Tx{40, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 1 -> done Change{15, redo 134b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2025-03-31T16:38:21.242058Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:26} Tx{40, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T16:38:21.242357Z node 2 :TABLET_EXECUTOR INFO: Leader{72075186224037891:1:27} starting Scan{8 on 1001, TReadTableScan} 2025-03-31T16:38:21.242852Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037891, TxId: 281474976715666, MessageQuota: 1 2025-03-31T16:38:21.243047Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:98} commited cookie 1 for step 97 2025-03-31T16:38:21.243301Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:27} commited cookie 8 for step 25 2025-03-31T16:38:21.243866Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037891, TxId: 281474976715666, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-31T16:38:21.309230Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037891, TxId: 281474976715666, PendingAcks: 0 2025-03-31T16:38:21.309320Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037891, TxId: 281474976715666, MessageQuota: 0 2025-03-31T16:38:21.311265Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037891 2025-03-31T16:38:21.311333Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715666, at: 72075186224037891 2025-03-31T16:38:21.311837Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:27} Tx{41, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} queued, type NKikimr::NDataShard::TDataShard::TTxProgressTransaction 2025-03-31T16:38:21.311898Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:27} Tx{41, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T16:38:21.311951Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-03-31T16:38:21.311992Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:38:21.312033Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [73000:281474976715666] at 72075186224037891 for ReadTableScan 2025-03-31T16:38:21.312300Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:38:21.312400Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:27} Tx{41, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 1 -> done Change{16, redo 336b alter 0b annex 0, ~{ 1, 3, 4, 12, 7, 8 } -{ }, 0 gb} 2025-03-31T16:38:21.312458Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:27} Tx{41, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T16:38:21.323340Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:27} commited cookie 1 for step 26 2025-03-31T16:38:21.323439Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-03-31T16:38:21.323498Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-03-31T16:38:21.323581Z node 2 :TX_DATASHARD DEBUG: Complete [73000 : 281474976715666] from 72075186224037891 at tablet 72075186224037891 send result to client [2:1597:3368], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:38:21.323634Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 |98.1%| [TM] {RESULT} ydb/core/tx/datashard/ut_build_index/unittest >> MediatorTest::MultipleSteps [GOOD] >> TopicSessionTests::RestartSessionIfQueryStopped >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteConnection ------- [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/solomon/actors/ut/unittest >> TDqSolomonWriteActorTest::TestShouldReturnAfterCheckpoint [GOOD] Test command err: 2025-03-31T16:37:38.991677Z node 1 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Init 2025-03-31T16:37:38.993215Z node 1 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Got 1 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-03-31T16:37:38.994394Z node 1 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 121 bytes of data to buffer 2025-03-31T16:37:38.995588Z node 1 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1 metrics with size of 121 bytes to solomon 2025-03-31T16:37:38.995615Z node 1 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-03-31T16:37:39.004007Z node 1 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Mon, 31 Mar 2025 16:37:39 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1} 2025-03-31T16:37:39.007058Z node 1 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-03-31T16:37:49.292834Z node 2 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Init 2025-03-31T16:37:49.302668Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Got 7500 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-03-31T16:37:49.318553Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 107903 bytes of data to buffer 2025-03-31T16:37:49.336542Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-03-31T16:37:49.357063Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-03-31T16:37:49.375913Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-03-31T16:37:49.393485Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-03-31T16:37:49.409037Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-03-31T16:37:49.425764Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-03-31T16:37:49.434258Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 54513 bytes of data to buffer 2025-03-31T16:37:49.434644Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 107903 bytes to solomon 2025-03-31T16:37:49.434986Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-03-31T16:37:49.441067Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-03-31T16:37:49.441119Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-03-31T16:37:49.546995Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[1]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Mon, 31 Mar 2025 16:37:49 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-03-31T16:37:49.547561Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-03-31T16:37:49.547590Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-03-31T16:37:49.602641Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Mon, 31 Mar 2025 16:37:49 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-03-31T16:37:49.603026Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-03-31T16:37:49.603043Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-03-31T16:37:49.671827Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[2]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Mon, 31 Mar 2025 16:37:49 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-03-31T16:37:49.672229Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-03-31T16:37:49.672249Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-03-31T16:37:49.771489Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[5]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Mon, 31 Mar 2025 16:37:49 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-03-31T16:37:49.771875Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-03-31T16:37:49.771892Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-03-31T16:37:49.826542Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[3]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Mon, 31 Mar 2025 16:37:49 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-03-31T16:37:49.826808Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 500 metrics with size of 54513 bytes to solomon 2025-03-31T16:37:49.826839Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer MaxRequestsInflight 2025-03-31T16:37:49.885054Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[4]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Mon, 31 Mar 2025 16:37:49 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-03-31T16:37:49.885165Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-03-31T16:37:49.929601Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[7]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 28 Date: Mon, 31 Mar 2025 16:37:49 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 500} 2025-03-31T16:37:49.929702Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-03-31T16:37:49.989488Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[6]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Mon, 31 Mar 2025 16:37:49 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-03-31T16:37:49.989611Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-03-31T16:38:00.645453Z node 3 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Init 2025-03-31T16:38:00.645750Z node 3 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Got 10 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-03-31T16:38:00.646120Z node 3 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 579 bytes of data to buffer 2025-03-31T16:38:00.646282Z node 3 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 10 metrics with size of 579 bytes to solomon 2025-03-31T16:38:00.646302Z node 3 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-03-31T16:38:00.656381Z node 3 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 27 Date: Mon, 31 Mar 2025 16:38:00 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 10} 2025-03-31T16:38:00.656491Z node 3 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-03-31T16:38:10.897166Z node 4 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Init 2025-03-31T16:38:10.899463Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Got 2400 items to send. Checkpoint: 1. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-03-31T16:38:10.927821Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 107903 bytes of data to buffer 2025-03-31T16:38:10.945839Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-03-31T16:38:10.953246Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 43613 bytes of data to buffer 2025-03-31T16:38:10.953638Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 107903 bytes to solomon 2025-03-31T16:38:10.956283Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-03-31T16:38:10.957145Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 400 metrics with size of 43613 bytes to solomon 2025-03-31T16:38:10.957167Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-03-31T16:38:10.987884Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[2]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 28 Date: Mon, 31 Mar 2025 16:38:10 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 400} 2025-03-31T16:38:10.988043Z node 4 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Process checkpoint. Inflight before checkpoint: 2 2025-03-31T16:38:11.056680Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[1]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Mon, 31 Mar 2025 16:38:11 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-03-31T16:38:11.056849Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: CheckpointInProgress Empty buffer 2025-03-31T16:38:11.144315Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Mon, 31 Mar 2025 16:38:11 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-03-31T16:38:11.144458Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-03-31T16:38:11.747390Z node 5 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Init 2025-03-31T16:38:11.755300Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Got 1 items to send. Checkpoint: 1. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-03-31T16:38:11.755526Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 121 bytes of data to buffer 2025-03-31T16:38:11.755660Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1 metrics with size of 121 bytes to solomon 2025-03-31T16:38:11.755676Z node 5 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Process checkpoint. Inflight before checkpoint: 1 2025-03-31T16:38:11.759743Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Mon, 31 Mar 2025 16:38:11 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1} 2025-03-31T16:38:11.759904Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-03-31T16:38:11.760128Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Got 1 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-03-31T16:38:11.760269Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 121 bytes of data to buffer 2025-03-31T16:38:11.760366Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1 metrics with size of 121 bytes to solomon 2025-03-31T16:38:11.760379Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-03-31T16:38:11.764789Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[1]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Mon, 31 Mar 2025 16:38:11 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1} 2025-03-31T16:38:11.764904Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer |98.1%| [TS] {RESULT} ydb/library/yql/providers/solomon/actors/ut/unittest >> DataCleanup::OutReadSetsCleanedAfterCopyTable [GOOD] >> MediatorTest::WatchesBeforeFirstStep >> DataShardReplication::ReplicatedTable+UseSink [GOOD] >> DataShardReplication::ReplicatedTable-UseSink >> KeyValueGRPCService::SimpleWriteListRange [GOOD] >> KeyValueGRPCService::SimpleGetStorageChannelStatus |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendTestConnection ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_compaction/unittest >> DataShardCompaction::CompactBorrowedTxStatus [GOOD] Test command err: 2025-03-31T16:37:49.107891Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:37:49.108214Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:37:49.108446Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000663/r3tmp/tmpSKfbMn/pdisk_1.dat 2025-03-31T16:37:49.745131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:37:49.838267Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:37:49.885112Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T16:37:49.887145Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:37:49.887695Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:37:49.887898Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-31T16:37:49.900477Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:37:50.011361Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-03-31T16:37:50.011450Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-03-31T16:37:50.012795Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-03-31T16:37:50.182547Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-31T16:37:50.182699Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:37:50.183322Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-31T16:37:50.183413Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:37:50.183775Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:37:50.183943Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:37:50.184144Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-31T16:37:50.184468Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-03-31T16:37:50.187660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:37:50.189807Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-31T16:37:50.189896Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-03-31T16:37:50.251528Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:37:50.253186Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:37:50.254648Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:37:50.254947Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:37:50.313259Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:37:50.314175Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:37:50.314296Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:37:50.316565Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:37:50.316645Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:37:50.316696Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:37:50.319725Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:37:50.319930Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:37:50.320034Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:37:50.331852Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:37:50.363150Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:37:50.364525Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:37:50.364720Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:37:50.364779Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:37:50.364814Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:37:50.364866Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:37:50.365095Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:37:50.365138Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:37:50.366760Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:37:50.366874Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:37:50.366991Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:37:50.367027Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:37:50.367142Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T16:37:50.367186Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:37:50.367218Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:37:50.367261Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:37:50.367306Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:37:50.367416Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:37:50.367452Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:37:50.367523Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:37:50.367637Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-31T16:37:50.367673Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:37:50.367827Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:37:50.368108Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T16:37:50.368177Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:37:50.368303Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:37:50.368349Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:37:50.368397Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T16:37:50.368439Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T16:37:50.368472Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:37:50.368744Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T16:37:50.368787Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T16:37:50.368833Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T16:37:50.368856Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:37:50.368904Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T16:37:50.368932Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T16:37:50.368959Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T16:37:50.368982Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:37:50.369001Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T16:37:50.371837Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T16:37:50.371897Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:37:50.382690Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... 3-31T16:38:22.608657Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037892 2025-03-31T16:38:22.608720Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037892 2025-03-31T16:38:22.608949Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:931:2762], Recipient [2:931:2762]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:38:22.608990Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:38:22.609058Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-03-31T16:38:22.609092Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:38:22.609127Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [71500:281474976715661] at 72075186224037892 for ReadTableScan 2025-03-31T16:38:22.609156Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037892 on unit ReadTableScan 2025-03-31T16:38:22.609216Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [71500:281474976715661] at 72075186224037892 error: , IsFatalError: 0 2025-03-31T16:38:22.609260Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037892 is Executed 2025-03-31T16:38:22.609291Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [71500:281474976715661] at 72075186224037892 executing on unit ReadTableScan 2025-03-31T16:38:22.609319Z node 2 :TX_DATASHARD TRACE: Add [71500:281474976715661] at 72075186224037892 to execution unit CompleteOperation 2025-03-31T16:38:22.609347Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037892 on unit CompleteOperation 2025-03-31T16:38:22.610796Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037892 is DelayComplete 2025-03-31T16:38:22.610861Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [71500:281474976715661] at 72075186224037892 executing on unit CompleteOperation 2025-03-31T16:38:22.610891Z node 2 :TX_DATASHARD TRACE: Add [71500:281474976715661] at 72075186224037892 to execution unit CompletedOperations 2025-03-31T16:38:22.610921Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037892 on unit CompletedOperations 2025-03-31T16:38:22.610963Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037892 is Executed 2025-03-31T16:38:22.610986Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [71500:281474976715661] at 72075186224037892 executing on unit CompletedOperations 2025-03-31T16:38:22.611014Z node 2 :TX_DATASHARD TRACE: Execution plan for [71500:281474976715661] at 72075186224037892 has finished 2025-03-31T16:38:22.611045Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:38:22.611070Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037892 2025-03-31T16:38:22.611122Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037892 has no attached operations 2025-03-31T16:38:22.611152Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037892 2025-03-31T16:38:22.622052Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-03-31T16:38:22.622125Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-03-31T16:38:22.622163Z node 2 :TX_DATASHARD TRACE: Complete execution for [71500:281474976715661] at 72075186224037892 on unit CompleteOperation 2025-03-31T16:38:22.622219Z node 2 :TX_DATASHARD DEBUG: Complete [71500 : 281474976715661] from 72075186224037892 at tablet 72075186224037892 send result to client [2:1478:3279], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T16:38:22.622264Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-03-31T16:38:22.622431Z node 2 :TX_PROXY DEBUG: Actor# [2:1478:3279] txid# 281474976715661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# COMPLETE shard id 72075186224037892 marker# P12 2025-03-31T16:38:22.622489Z node 2 :TX_PROXY DEBUG: Send stream clearance, shard: 72075186224037890, txid: 281474976715661, cleared: 1 2025-03-31T16:38:22.622626Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [2:1478:3279], Recipient [2:769:2643]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715661 Cleared: true 2025-03-31T16:38:22.622665Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-03-31T16:38:22.622747Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:769:2643], Recipient [2:769:2643]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:38:22.622778Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:38:22.622870Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-03-31T16:38:22.622913Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:38:22.622952Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [71500:281474976715661] at 72075186224037890 for WaitForStreamClearance 2025-03-31T16:38:22.622980Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037890 on unit WaitForStreamClearance 2025-03-31T16:38:22.623013Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [71500:281474976715661] at 72075186224037890 2025-03-31T16:38:22.623048Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037890 is Executed 2025-03-31T16:38:22.623077Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [71500:281474976715661] at 72075186224037890 executing on unit WaitForStreamClearance 2025-03-31T16:38:22.623131Z node 2 :TX_DATASHARD TRACE: Add [71500:281474976715661] at 72075186224037890 to execution unit ReadTableScan 2025-03-31T16:38:22.623159Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037890 on unit ReadTableScan 2025-03-31T16:38:22.623405Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037890 is Continue 2025-03-31T16:38:22.623440Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:38:22.623465Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037890 2025-03-31T16:38:22.623497Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-03-31T16:38:22.623523Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-03-31T16:38:22.623996Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:1510:3308], Recipient [2:769:2643]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-03-31T16:38:22.624033Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-03-31T16:38:22.624305Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715661, MessageQuota: 1 2025-03-31T16:38:22.624406Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037890, TxId: 281474976715661, MessageQuota: 1 2025-03-31T16:38:22.625987Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037890 2025-03-31T16:38:22.626027Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037890 2025-03-31T16:38:22.626236Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:769:2643], Recipient [2:769:2643]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:38:22.626275Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:38:22.626331Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-03-31T16:38:22.626364Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-03-31T16:38:22.626399Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [71500:281474976715661] at 72075186224037890 for ReadTableScan 2025-03-31T16:38:22.626425Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037890 on unit ReadTableScan 2025-03-31T16:38:22.626456Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [71500:281474976715661] at 72075186224037890 error: , IsFatalError: 0 2025-03-31T16:38:22.626490Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037890 is Executed 2025-03-31T16:38:22.626516Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [71500:281474976715661] at 72075186224037890 executing on unit ReadTableScan 2025-03-31T16:38:22.626561Z node 2 :TX_DATASHARD TRACE: Add [71500:281474976715661] at 72075186224037890 to execution unit CompleteOperation 2025-03-31T16:38:22.626589Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037890 on unit CompleteOperation 2025-03-31T16:38:22.626793Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037890 is DelayComplete 2025-03-31T16:38:22.626821Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [71500:281474976715661] at 72075186224037890 executing on unit CompleteOperation 2025-03-31T16:38:22.626848Z node 2 :TX_DATASHARD TRACE: Add [71500:281474976715661] at 72075186224037890 to execution unit CompletedOperations 2025-03-31T16:38:22.626873Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037890 on unit CompletedOperations 2025-03-31T16:38:22.626902Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037890 is Executed 2025-03-31T16:38:22.626923Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [71500:281474976715661] at 72075186224037890 executing on unit CompletedOperations 2025-03-31T16:38:22.626945Z node 2 :TX_DATASHARD TRACE: Execution plan for [71500:281474976715661] at 72075186224037890 has finished 2025-03-31T16:38:22.626988Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:38:22.627018Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2025-03-31T16:38:22.627043Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-03-31T16:38:22.627070Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-03-31T16:38:22.638449Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-31T16:38:22.638519Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-31T16:38:22.638554Z node 2 :TX_DATASHARD TRACE: Complete execution for [71500:281474976715661] at 72075186224037890 on unit CompleteOperation 2025-03-31T16:38:22.638607Z node 2 :TX_DATASHARD DEBUG: Complete [71500 : 281474976715661] from 72075186224037890 at tablet 72075186224037890 send result to client [2:1478:3279], exec latency: 1 ms, propose latency: 1 ms 2025-03-31T16:38:22.638650Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-31T16:38:22.638840Z node 2 :TX_PROXY DEBUG: Actor# [2:1478:3279] txid# 281474976715661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# COMPLETE shard id 72075186224037890 marker# P12 2025-03-31T16:38:22.638927Z node 2 :TX_PROXY INFO: Actor# [2:1478:3279] txid# 281474976715661 RESPONSE Status# ExecComplete prepare time: 0.000500s execute time: 0.001500s total time: 0.002000s marker# P13 |98.1%| [TM] {RESULT} ydb/core/tx/datashard/ut_compaction/unittest |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TCreateAndDropViewTest::DropView [GOOD] >> TCreateAndDropViewTest::DropViewDisabledFeatureFlag >> KqpTpch::Query15 [GOOD] >> KqpTpch::Query16 >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendTestConnectionWithServiceAccount >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAnalyzeTableResponse [GOOD] |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_data_cleanup/unittest >> DataCleanup::OutReadSetsCleanedAfterCopyTable [GOOD] Test command err: 2025-03-31T16:37:45.384234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:37:45.384723Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:37:45.384961Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00056b/r3tmp/tmp0Srvsq/pdisk_1.dat 2025-03-31T16:37:46.353429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:37:46.471371Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:37:46.527996Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:37:46.529423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:37:46.548069Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:37:46.685117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:37:47.278418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:769:2651], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:37:47.278629Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:779:2656], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:37:47.278747Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:37:47.292480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:37:47.483334Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:783:2659], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:37:47.595998Z node 1 :TX_PROXY ERROR: Actor# [1:857:2702] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:37:49.316129Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqph1y43ecnvt8r063b7kpas, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWY3OTFhZGUtYjljNGZlMC1mYzhmMWU3NS0xYTM1YTM4ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:37:53.738622Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:104:2150], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:37:53.738892Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:37:53.739038Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00056b/r3tmp/tmpsyveYW/pdisk_1.dat 2025-03-31T16:37:54.067311Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:37:54.093526Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:37:54.130352Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:37:54.130497Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:37:54.142055Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:37:54.228664Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:37:54.606824Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:763:2645], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:37:54.606938Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:773:2650], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:37:54.607022Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:37:54.612720Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:37:54.757974Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:777:2653], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:37:54.799920Z node 2 :TX_PROXY ERROR: Actor# [2:850:2695] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:37:55.165313Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqph259c07b294a1jy43dc42, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTRiODNmOTctZDI1Zjk4N2QtNjdiYjk3NWUtYjg2ZjE0OTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:37:59.836609Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:37:59.836841Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:37:59.836940Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00056b/r3tmp/tmpYvzik1/pdisk_1.dat 2025-03-31T16:38:00.167140Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:38:00.199728Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:38:00.237315Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:38:00.237464Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:38:00.249138Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:38:00.336824Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:38:00.716723Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:769:2651], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:38:00.716853Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:779:2656], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:38:00.717290Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:38:00.725185Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:38:00.905578Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:783:2659], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:38:00.948210Z node 3 :TX_PROXY ERROR: Actor# [3:857:2702] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:38:01.314757Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqph2b86e7y7pvcct9dds0ht, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YTkwNWRlMTItMzFhZTk4NjEtODUzYjA2M2YtZTNkYWVmYWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:38:06.143077Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:325:2368], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:38:06.143335Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:38:06.143490Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00056b/r3tmp/tmpDdnhgu/pdisk_1.dat 2025-03-31T16:38:06.480288Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:38:06.526706Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:38:06.574282Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:38:06.574422Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:38:06.586184Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:38:06.682162Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:38:07.092487Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:770:2651], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:38:07.092605Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:780:2656], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:38:07.092687Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:38:07.098925Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:38:07.289148Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:784:2659], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:38:07.324485Z node 4 :TX_PROXY ERROR: Actor# [4:858:2702] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:38:07.738592Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqph2hfjbdfhvc2hmpbera4v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=OGVmYjVhMzYtZWU3ZWY4ZjctNzAzNDRkN2EtNjFlZWE1ZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:38:12.738693Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:101:2147], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:38:12.738852Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:38:12.738917Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00056b/r3tmp/tmpjxoGE0/pdisk_1.dat 2025-03-31T16:38:13.137111Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:38:13.183512Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:38:13.223575Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:38:13.223746Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:38:13.235675Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:38:13.319400Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:38:13.695248Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:770:2651], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:38:13.695373Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:781:2656], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:38:13.695464Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:38:13.701515Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T16:38:13.878265Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:784:2659], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T16:38:13.914375Z node 5 :TX_PROXY ERROR: Actor# [5:858:2702] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:38:14.314813Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqph2qxx0n8bn9ypyxjhjdxb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=Mjk0NmYyNWQtMWNmZTBmODktMjY1ZTY3YzItZGFkODU3Y2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:38:20.015291Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:38:20.015557Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:38:20.015662Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00056b/r3tmp/tmpSJ5UtP/pdisk_1.dat 2025-03-31T16:38:20.425727Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:38:20.475864Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:38:20.521071Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:38:20.521267Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:38:20.536580Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:38:20.648647Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:38:21.468472Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:879:2729], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:38:21.468614Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:889:2734], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:38:21.468711Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:38:21.474721Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-31T16:38:21.656055Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:893:2737], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-31T16:38:21.698230Z node 6 :TX_PROXY ERROR: Actor# [6:954:2779] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:38:22.146607Z node 6 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqph2zgt4m5sz4kfdnydrwys, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=6&id=OTVhNmUxYWItOThmZmI2OWMtZjViNWU1YjQtODJmNTQyNzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:38:22.584421Z node 6 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqph307a6gkgmjkkhxa0rswe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=6&id=NzExM2Y4ZC05ZTkwYzc4MC00YmNjNDI4OS0xZmJmZmM3NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |98.1%| [TM] {RESULT} ydb/core/tx/datashard/ut_data_cleanup/unittest >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v1-client0] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateBinding >> KafkaProtocol::FetchScenario [GOOD] >> KafkaProtocol::BalanceScenario >> QuoterWithKesusTest::GetsSeveralQuotas [GOOD] >> QuoterWithKesusTest::KesusRecreation >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListBindings ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAnalyzeTableResponse [GOOD] Test command err: 2025-03-31T16:35:45.304841Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:35:45.305163Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:35:45.305260Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000efb/r3tmp/tmp3iUgzN/pdisk_1.dat 2025-03-31T16:35:45.713740Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28912, node 1 2025-03-31T16:35:46.003692Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:46.003742Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:46.003768Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:46.006083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:35:46.006550Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:35:46.105355Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:46.105539Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:46.122127Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25753 2025-03-31T16:35:46.719563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:35:50.145309Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T16:35:50.186287Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:50.186408Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:50.217695Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:35:50.219795Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:50.460420Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:50.461093Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:50.461635Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:50.461766Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:50.462036Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:50.462126Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:50.462201Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:50.462272Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:50.462353Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:50.628764Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:50.628899Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:50.642125Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:50.806398Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:50.858377Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T16:35:50.858476Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T16:35:50.891924Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T16:35:50.893676Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T16:35:50.893846Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T16:35:50.893888Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T16:35:50.893933Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T16:35:50.893971Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T16:35:50.894022Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T16:35:50.894064Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T16:35:50.895151Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T16:35:50.929088Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:35:50.929205Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1877:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:35:50.935363Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1888:2606] 2025-03-31T16:35:50.940664Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1918:2621] 2025-03-31T16:35:50.941660Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1918:2621], schemeshard id = 72075186224037897 2025-03-31T16:35:50.947540Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T16:35:50.964074Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T16:35:50.964145Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T16:35:50.964225Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T16:35:50.982900Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T16:35:50.990895Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T16:35:50.991055Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T16:35:51.225275Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T16:35:51.443939Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T16:35:51.535969Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:35:52.580581Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2240:3075], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:52.580733Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:35:52.598927Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-31T16:35:52.748688Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:35:52.748946Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:35:52.749251Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:35:52.749432Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:35:52.749567Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:35:52.749716Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:35:52.749852Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:35:52.749996Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:35:52.750142Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:35:52.750280Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:35:52.750415Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:35:52.750547Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:35:52.778489Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:35:52.778589Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;desc ... 11] 2025-03-31T16:38:17.425477Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7204:5283], server id = [2:7249:5311], tablet id = 72075186224037894, status = OK 2025-03-31T16:38:17.425573Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7249:5311], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-03-31T16:38:17.425716Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7250:5312] 2025-03-31T16:38:17.425822Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:7250:5312], schemeshard id = 72075186224037897 2025-03-31T16:38:17.549094Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7261:5315] 2025-03-31T16:38:17.550036Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:2803:3225] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-03-31T16:38:17.550112Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:2803:3225] 2025-03-31T16:38:17.550193Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Complete 2025-03-31T16:38:18.800272Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:38:18.800369Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T16:38:18.800414Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-31T16:38:18.800498Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-31T16:38:18.800556Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:38:18.801389Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-31T16:38:18.819663Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-31T16:38:18.820129Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T16:38:18.820231Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T16:38:18.821120Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-31T16:38:18.838321Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-31T16:38:18.838548Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-03-31T16:38:18.839155Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7300:5340], server id = [2:7301:5341], tablet id = 72075186224037899, status = OK 2025-03-31T16:38:18.839317Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7300:5340], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:38:18.843765Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-31T16:38:18.843899Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-31T16:38:18.844091Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-31T16:38:18.844311Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-31T16:38:18.844622Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T16:38:18.849303Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7300:5340], server id = [2:7301:5341], tablet id = 72075186224037899 2025-03-31T16:38:18.849368Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:38:18.850083Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-31T16:38:18.896441Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7321:5360]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:38:18.896747Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T16:38:18.896812Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7321:5360], StatRequests.size() = 1 2025-03-31T16:38:19.049153Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NTNmODhkMGQtYTMwOTE4ODYtM2YxNzQ2YmUtNzk2YmM2OGQ=, TxId: 2025-03-31T16:38:19.049242Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NTNmODhkMGQtYTMwOTE4ODYtM2YxNzQ2YmUtNzk2YmM2OGQ=, TxId: 2025-03-31T16:38:19.049854Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:38:19.068827Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:38:19.068903Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T16:38:19.591942Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-03-31T16:38:19.592040Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-03-31T16:38:20.386236Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T16:38:20.386340Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-31T16:38:20.387032Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-31T16:38:20.404946Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-31T16:38:20.405374Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T16:38:20.405437Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 4], AnalyzedShards 1 2025-03-31T16:38:20.433260Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T16:38:21.758975Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:38:21.759047Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T16:38:21.759076Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-31T16:38:21.759324Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-03-31T16:38:21.759812Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-03-31T16:38:21.759912Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-03-31T16:38:21.781421Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-03-31T16:38:23.005525Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T16:38:23.005609Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T16:38:23.005660Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-31T16:38:24.266654Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T16:38:24.266878Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:38:24.279931Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:38:24.280046Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-31T16:38:24.280083Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:38:24.280770Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-31T16:38:24.298690Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-31T16:38:24.299072Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T16:38:24.299161Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T16:38:24.299500Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-31T16:38:24.324250Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-31T16:38:24.324410Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-03-31T16:38:24.325251Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7489:5450], server id = [2:7490:5451], tablet id = 72075186224037899, status = OK 2025-03-31T16:38:24.325342Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7489:5450], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:38:24.327224Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-31T16:38:24.327316Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-31T16:38:24.327448Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-31T16:38:24.327622Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-31T16:38:24.327932Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T16:38:24.329736Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7489:5450], server id = [2:7490:5451], tablet id = 72075186224037899 2025-03-31T16:38:24.329760Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:38:24.330368Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-31T16:38:24.354872Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NGNjODY4NGUtNzAxNzJmMTItYjVmMDJlOGMtNjM2ZDlhOGE=, TxId: 2025-03-31T16:38:24.354941Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NGNjODY4NGUtNzAxNzJmMTItYjVmMDJlOGMtNjM2ZDlhOGE=, TxId: 2025-03-31T16:38:24.356643Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:38:24.374458Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:38:24.374533Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2803:3225] >> test.py::test_topic_data [GOOD] >> test.py::test_transfer_describe >> test.py::test_transfer_describe [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeBinding >> test_cms_state_storage.py::TestCmsStateStorageSimpleMax::test_check_shutdown_state_storage_nodes >> Coordinator::RestoreTenantConfiguration-AlterDatabaseCreateHiveFirst-true [GOOD] >> Coordinator::LastEmptyStepResent >> MediatorTest::WatchesBeforeFirstStep [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyBinding >> TestPurecalcFilter::Simple2 [GOOD] >> MediatorTest::RebootTargetTablets |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/tests/py3test >> test.py::test_transfer_describe [GOOD] |98.1%| [TM] {RESULT} ydb/core/viewer/tests/py3test >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteBinding >> TestPurecalcFilter::ManyValues >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_async_table-True] |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_async_table-True] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--false] [GOOD] >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_simple_table-False] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateQuery >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true] |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListQueries >> TMemoryController::MemTable [GOOD] >> TMemoryController::ResourceBroker >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-True] >> DataShardReplication::ReplicatedTable-UseSink [GOOD] >> DataShardReplication::ApplyChangesToReplicatedTable >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeQuery |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> KqpTpch::Query16 [GOOD] >> KqpTpch::Query17 >> TCreateAndDropViewTest::DropViewDisabledFeatureFlag [GOOD] >> TCreateAndDropViewTest::DropNonexistingView >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_table-True] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendGetQueryStatus >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeReqDistribution [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyQuery >> KeyValueGRPCService::SimpleGetStorageChannelStatus [GOOD] >> KeyValueGRPCService::SimpleCreateAlterDropVolume >> MediatorTest::RebootTargetTablets [GOOD] >> MediatorTest::ResendSubset ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeReqDistribution [GOOD] Test command err: 2025-03-31T16:35:54.537698Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:35:54.538023Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:35:54.538148Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000eac/r3tmp/tmpGVD73I/pdisk_1.dat 2025-03-31T16:35:54.933236Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27945, node 1 2025-03-31T16:35:55.208348Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:55.208418Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:55.208454Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:55.208965Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:35:55.211539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:35:55.311845Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:55.312011Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:55.327226Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21180 2025-03-31T16:35:55.843024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:35:58.953208Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T16:35:58.992714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:58.992849Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:59.021734Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:35:59.024082Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:59.271419Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:59.272043Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:59.272655Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:59.272827Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:59.273042Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:59.273121Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:59.273224Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:59.273276Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:59.273341Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:35:59.437209Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:59.437330Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:59.453330Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:35:59.626231Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:59.683044Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T16:35:59.683184Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T16:35:59.720485Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T16:35:59.721873Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T16:35:59.722112Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T16:35:59.722190Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T16:35:59.722255Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T16:35:59.722322Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T16:35:59.722402Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T16:35:59.722461Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T16:35:59.722972Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T16:35:59.772172Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:35:59.772282Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1872:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:35:59.779553Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1883:2604] 2025-03-31T16:35:59.786989Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1915:2621] 2025-03-31T16:35:59.787120Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1915:2621], schemeshard id = 72075186224037897 2025-03-31T16:35:59.790937Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T16:35:59.816994Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T16:35:59.817050Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T16:35:59.817118Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T16:35:59.831745Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T16:35:59.840083Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T16:35:59.840236Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T16:36:00.076852Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T16:36:00.264040Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T16:36:00.341893Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:36:01.490705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2236:3073], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:01.490831Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:01.514281Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-31T16:36:01.884283Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:36:01.884469Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:36:01.884716Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:36:01.884817Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:36:01.884916Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:36:01.885003Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:36:01.885076Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:36:01.885158Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:36:01.885244Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:36:01.885325Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:36:01.885461Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:36:01.885539Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:36:01.914132Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2397:2889];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:36:01.914233Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2397:2889];tablet_id=72075186224037900;process= ... ] TTxNavigate::Complete 2025-03-31T16:38:31.780908Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:38:31.782330Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T16:38:31.782412Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T16:38:31.783383Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-31T16:38:31.852895Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-31T16:38:31.853076Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-03-31T16:38:31.853966Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8648:6513], server id = [2:8653:6518], tablet id = 72075186224037899, status = OK 2025-03-31T16:38:31.854409Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8648:6513], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:38:31.854764Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8649:6514], server id = [2:8654:6519], tablet id = 72075186224037900, status = OK 2025-03-31T16:38:31.854826Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8649:6514], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:38:31.856778Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8650:6515], server id = [2:8655:6520], tablet id = 72075186224037901, status = OK 2025-03-31T16:38:31.856859Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8650:6515], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:38:31.857983Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8651:6516], server id = [2:8656:6521], tablet id = 72075186224037902, status = OK 2025-03-31T16:38:31.858042Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8651:6516], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:38:31.858985Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8652:6517], server id = [2:8659:6524], tablet id = 72075186224037903, status = OK 2025-03-31T16:38:31.859044Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8652:6517], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:38:31.872538Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-31T16:38:31.873211Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8648:6513], server id = [2:8653:6518], tablet id = 72075186224037899 2025-03-31T16:38:31.873256Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:38:31.880414Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-03-31T16:38:31.881062Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8649:6514], server id = [2:8654:6519], tablet id = 72075186224037900 2025-03-31T16:38:31.881095Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:38:31.881451Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8671:6533], server id = [2:8672:6534], tablet id = 72075186224037904, status = OK 2025-03-31T16:38:31.881611Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8671:6533], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:38:31.883781Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8673:6535], server id = [2:8676:6536], tablet id = 72075186224037905, status = OK 2025-03-31T16:38:31.883867Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8673:6535], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:38:31.884767Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-03-31T16:38:31.885706Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8650:6515], server id = [2:8655:6520], tablet id = 72075186224037901 2025-03-31T16:38:31.885736Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:38:31.893911Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-03-31T16:38:31.894810Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8651:6516], server id = [2:8656:6521], tablet id = 72075186224037902 2025-03-31T16:38:31.894855Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:38:31.895440Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-03-31T16:38:31.895885Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8679:6539], server id = [2:8683:6543], tablet id = 72075186224037906, status = OK 2025-03-31T16:38:31.895969Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8679:6539], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:38:31.896447Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8652:6517], server id = [2:8659:6524], tablet id = 72075186224037903 2025-03-31T16:38:31.896475Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:38:31.897707Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8682:6542], server id = [2:8685:6545], tablet id = 72075186224037907, status = OK 2025-03-31T16:38:31.897781Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8682:6542], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:38:31.898386Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8684:6544], server id = [2:8687:6546], tablet id = 72075186224037908, status = OK 2025-03-31T16:38:31.898446Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8684:6544], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:38:31.901782Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-03-31T16:38:31.902611Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8671:6533], server id = [2:8672:6534], tablet id = 72075186224037904 2025-03-31T16:38:31.902643Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:38:31.903733Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-03-31T16:38:31.904136Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8673:6535], server id = [2:8676:6536], tablet id = 72075186224037905 2025-03-31T16:38:31.904166Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:38:31.906027Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-03-31T16:38:31.906556Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8679:6539], server id = [2:8683:6543], tablet id = 72075186224037906 2025-03-31T16:38:31.906590Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:38:31.907407Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-03-31T16:38:31.907624Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-03-31T16:38:31.907667Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-31T16:38:31.907909Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-31T16:38:31.908129Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-31T16:38:31.908385Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T16:38:31.911233Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8682:6542], server id = [2:8685:6545], tablet id = 72075186224037907 2025-03-31T16:38:31.911272Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:38:31.911824Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8684:6544], server id = [2:8687:6546], tablet id = 72075186224037908 2025-03-31T16:38:31.911852Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:38:31.912158Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-31T16:38:31.964428Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8714:6569]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:38:31.964695Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T16:38:31.964751Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8714:6569], StatRequests.size() = 1 2025-03-31T16:38:32.161231Z node 2 :SYSTEM_VIEWS WARN: [72075186224037891] TEvIntervalQuerySummary, time mismath: node id# 2, interval end# 1970-01-01T00:02:03.000000Z, event interval end# 2025-03-31T16:38:30.000000Z 2025-03-31T16:38:32.161857Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OTEzM2Q0Yy1mNzJiMjBjOC01MGQyNzhkNC1iYzc4ZDhlYg==, TxId: 2025-03-31T16:38:32.161941Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTEzM2Q0Yy1mNzJiMjBjOC01MGQyNzhkNC1iYzc4ZDhlYg==, TxId: 2025-03-31T16:38:32.162654Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:38:32.197508Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8729:6575] 2025-03-31T16:38:32.197646Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8598:6482], server id = [2:8729:6575], tablet id = 72075186224037894, status = OK 2025-03-31T16:38:32.197831Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:8729:6575], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-03-31T16:38:32.198013Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8730:6576] 2025-03-31T16:38:32.198128Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:8730:6576], schemeshard id = 72075186224037897 2025-03-31T16:38:32.218717Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:38:32.218791Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T16:38:32.328147Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8733:6579]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-31T16:38:32.328530Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-31T16:38:32.328604Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-31T16:38:32.336729Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-31T16:38:32.336817Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-03-31T16:38:32.336885Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-31T16:38:32.358566Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> AnalyzeColumnshard::AnalyzeRebootSaBeforeReqDistribution [GOOD] >> QueryActorTest::StreamQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteQuery >> Coordinator::LastEmptyStepResent [GOOD] >> CoordinatorVolatile::PlanResentOnReboots >> TestPurecalcFilter::ManyValues [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendControlQuery |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeReqDistribution [GOOD] Test command err: 2025-03-31T16:35:58.968445Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:35:58.968862Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:35:58.969012Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ee5/r3tmp/tmpTwa8vk/pdisk_1.dat 2025-03-31T16:35:59.393284Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4259, node 1 2025-03-31T16:35:59.646669Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:59.646741Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:59.646785Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:59.647395Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:35:59.654079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:35:59.750877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:59.751021Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:59.766480Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30849 2025-03-31T16:36:00.315117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:36:03.483231Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T16:36:03.510221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:03.510361Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:03.548981Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:36:03.551170Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:36:03.796078Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:36:03.796672Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:36:03.797312Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:36:03.797449Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:36:03.797722Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:36:03.797843Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:36:03.797932Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:36:03.798029Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:36:03.798102Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:36:03.952057Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:03.952163Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:03.965192Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:36:04.097585Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:36:04.145330Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T16:36:04.145438Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T16:36:04.171167Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T16:36:04.171721Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T16:36:04.171944Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T16:36:04.172014Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T16:36:04.172069Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T16:36:04.172140Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T16:36:04.172202Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T16:36:04.172262Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T16:36:04.174155Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T16:36:04.207977Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:36:04.208097Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1870:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:36:04.216778Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1881:2607] 2025-03-31T16:36:04.220481Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2618] 2025-03-31T16:36:04.220605Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1904:2618], schemeshard id = 72075186224037897 2025-03-31T16:36:04.230907Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T16:36:04.254583Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T16:36:04.254636Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T16:36:04.254695Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T16:36:04.299333Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T16:36:04.307611Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T16:36:04.307767Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T16:36:04.490437Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T16:36:04.636593Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T16:36:04.769846Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:36:05.736521Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2238:3072], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:05.736672Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:05.757662Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-31T16:36:05.888467Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:36:05.888693Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:36:05.889007Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:36:05.889177Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:36:05.889341Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:36:05.889508Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:36:05.889633Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:36:05.889785Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:36:05.889917Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:36:05.890057Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:36:05.890201Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:36:05.890337Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:36:05.921032Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:36:05.921141Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;descr ... 037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:38:31.027242Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7127:5236], server id = [2:7128:5237], tablet id = 72075186224037894 2025-03-31T16:38:31.027330Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7253:5312] 2025-03-31T16:38:31.027384Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7253:5312] 2025-03-31T16:38:31.088576Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T16:38:31.088703Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T16:38:31.089355Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T16:38:31.090418Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T16:38:31.090784Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded database: /Root/Database 2025-03-31T16:38:31.090841Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal start key 2025-03-31T16:38:31.090885Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal table owner id: 72075186224037897 2025-03-31T16:38:31.090927Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal table local path id: 4 2025-03-31T16:38:31.090967Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal start time: 1743439111002837 2025-03-31T16:38:31.091004Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal IsColumnTable: 1 2025-03-31T16:38:31.091748Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2025-03-31T16:38:31.091848Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T16:38:31.091966Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2025-03-31T16:38:31.092070Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 1 2025-03-31T16:38:31.092175Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 1 2025-03-31T16:38:31.092272Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T16:38:31.092447Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:38:31.093481Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T16:38:31.094345Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-31T16:38:31.094432Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-31T16:38:31.094609Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:38:31.096302Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T16:38:31.096398Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T16:38:31.099072Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-31T16:38:31.162369Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-31T16:38:31.162715Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-03-31T16:38:31.163535Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7300:5343], server id = [2:7301:5344], tablet id = 72075186224037899, status = OK 2025-03-31T16:38:31.163659Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7300:5343], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:38:31.168864Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-31T16:38:31.169011Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-31T16:38:31.169231Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-31T16:38:31.169463Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-31T16:38:31.170382Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T16:38:31.177143Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7300:5343], server id = [2:7301:5344], tablet id = 72075186224037899 2025-03-31T16:38:31.177211Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:38:31.177986Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-31T16:38:31.246045Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7321:5363]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:38:31.246408Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T16:38:31.246478Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7321:5363], StatRequests.size() = 1 2025-03-31T16:38:31.404684Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NDJhNzdhM2QtNTE1ZDM3ZWItOTRiZDEwZi01ZDZmMjZmMg==, TxId: 2025-03-31T16:38:31.404764Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDJhNzdhM2QtNTE1ZDM3ZWItOTRiZDEwZi01ZDZmMjZmMg==, TxId: 2025-03-31T16:38:31.405417Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:38:31.421505Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7336:5369] 2025-03-31T16:38:31.421815Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:7336:5369], schemeshard id = 72075186224037897 2025-03-31T16:38:31.422008Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7253:5312], server id = [2:7337:5370], tablet id = 72075186224037894, status = OK 2025-03-31T16:38:31.422057Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7337:5370] 2025-03-31T16:38:31.422168Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7337:5370], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-03-31T16:38:31.444406Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:38:31.444479Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-31T16:38:31.564976Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7348:5373] 2025-03-31T16:38:31.565830Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:2801:3221] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-03-31T16:38:31.565897Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:2801:3221] 2025-03-31T16:38:31.565969Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Complete 2025-03-31T16:38:32.116031Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-03-31T16:38:32.116134Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-03-31T16:38:33.016286Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T16:38:33.016385Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T16:38:33.016450Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-31T16:38:34.472028Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:38:34.472194Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-31T16:38:34.472250Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:38:34.472925Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-31T16:38:34.500813Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-31T16:38:34.501299Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T16:38:34.501382Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T16:38:34.501886Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-31T16:38:34.519828Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-31T16:38:34.520058Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-03-31T16:38:34.520688Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7420:5416], server id = [2:7421:5417], tablet id = 72075186224037899, status = OK 2025-03-31T16:38:34.520830Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7420:5416], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:38:34.522442Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-31T16:38:34.522550Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-31T16:38:34.522717Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-31T16:38:34.522918Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-31T16:38:34.524548Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T16:38:34.527472Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7420:5416], server id = [2:7421:5417], tablet id = 72075186224037899 2025-03-31T16:38:34.527522Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:38:34.529422Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-31T16:38:34.562233Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODlhMjBkYTUtM2Q3NDZlNGQtZmQ3ZTA4YTQtYTE5N2UwZDY=, TxId: 2025-03-31T16:38:34.562316Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODlhMjBkYTUtM2Q3NDZlNGQtZmQ3ZTA4YTQtYTE5N2UwZDY=, TxId: 2025-03-31T16:38:34.562974Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:38:34.594859Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:38:34.594943Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2801:3221] >> QuoterWithKesusTest::KesusRecreation [GOOD] >> QuoterWithKesusTest::AllocationStatistics >> TestPurecalcFilter::NullValues |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendGetResultData >> TopicSessionTests::RestartSessionIfQueryStopped [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/library/query_actor/ut/unittest >> QueryActorTest::StreamQuery [GOOD] Test command err: 2025-03-31T16:38:06.069896Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013859285919460:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:38:06.069976Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00053e/r3tmp/tmp9zMnEZ/pdisk_1.dat 2025-03-31T16:38:06.862427Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:38:06.939507Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:38:06.940593Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:38:06.946394Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63015 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:38:07.337537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:38:07.357377Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:38:07.384906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:38:07.548863Z node 1 :KQP_PROXY DEBUG: [TQueryBase] Bootstrap. Database: dc-1 2025-03-31T16:38:09.874459Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-31T16:38:09.903589Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-31T16:38:09.924102Z node 1 :KQP_PROXY DEBUG: Request has 18445000634619.627559s seconds to be completed 2025-03-31T16:38:09.959210Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=MjExZmRjY2EtMTRhZmM0ZTUtNTkzMTNkYWItOGYxN2IzZDc=, workerId: [1:7488013872170822051:2315], database: /dc-1, longSession: 1, local sessions count: 1 2025-03-31T16:38:09.959268Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-31T16:38:09.959512Z node 1 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-31T16:38:09.959668Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-03-31T16:38:09.959691Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-03-31T16:38:09.959724Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-31T16:38:09.959761Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-31T16:38:09.959836Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:38:09.959877Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:38:09.963606Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:38:09.963663Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:38:09.963687Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:38:09.975206Z node 1 :KQP_PROXY DEBUG: [TQueryBase] RunDataQuery: SELECT 42 2025-03-31T16:38:09.996334Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=MjExZmRjY2EtMTRhZmM0ZTUtNTkzMTNkYWItOGYxN2IzZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7488013872170822051:2315] 2025-03-31T16:38:09.996397Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7488013872170822053:2362] 2025-03-31T16:38:10.005652Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013872170822054:2317], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:38:10.005828Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:38:10.006287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013876465789362:2320], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:38:10.018917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:38:10.037618Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013876465789364:2321], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:38:10.109960Z node 1 :TX_PROXY ERROR: Actor# [1:7488013876465789417:2398] txid# 281474976710660, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:38:11.070711Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488013859285919460:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:38:11.070777Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:38:11.074166Z node 1 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [1:7488013872170822052:2316], selfId: [1:7488013859285919702:2278], source: [1:7488013872170822051:2315] 2025-03-31T16:38:11.074596Z node 1 :KQP_PROXY DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=MjExZmRjY2EtMTRhZmM0ZTUtNTkzMTNkYWItOGYxN2IzZDc=, TxId: 2025-03-31T16:38:11.075740Z node 1 :KQP_PROXY DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=MjExZmRjY2EtMTRhZmM0ZTUtNTkzMTNkYWItOGYxN2IzZDc=, TxId: 2025-03-31T16:38:11.077749Z node 1 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=1&id=MjExZmRjY2EtMTRhZmM0ZTUtNTkzMTNkYWItOGYxN2IzZDc=, workerId: [1:7488013872170822051:2315], local sessions count: 0 2025-03-31T16:38:11.078063Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:38:11.746723Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013882278097241:2209];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00053e/r3tmp/tmpRW7xIl/pdisk_1.dat 2025-03-31T16:38:11.790058Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:38:11.861205Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:38:11.892110Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:38:11.892211Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:38:11.898404Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2944 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:38:12.092004Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:38:12.104497Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:38:12.117056Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:38:12.190748Z node 2 :KQP_PROXY DEBUG: [TQueryBase] Bootstrap. Database: dc-1 2025-03-31T16:38:14.945846Z node 2 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-31T16:38:14.946734Z node 2 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-31T16:38:14.956197Z node 2 :KQP_PROXY DEBUG: Request has 18445000634614.595469s seconds to be completed 2025-03-31T16:38:14.958122Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=ZDIxNTViLTk0MTYwMGE5LWNhNmRlMjRhLWY4NDlkM2Fi, workerId: [2:7488013895162999664:2315], database: /dc-1, longSession: ... ROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:38:22.582580Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:38:22.605662Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:38:22.605747Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:38:22.609344Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25917 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T16:38:22.947634Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:38:22.960038Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:38:23.032235Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Bootstrap. Database: dc-1 2025-03-31T16:38:26.648829Z node 4 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-31T16:38:26.649729Z node 4 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-31T16:38:26.655535Z node 4 :KQP_PROXY DEBUG: Request has 18445000634602.896116s seconds to be completed 2025-03-31T16:38:26.658622Z node 4 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=4&id=ZTUyNTg3OWUtZjEzOTU0OGYtOGE1ZjcwMzUtM2I4MGI4OWQ=, workerId: [4:7488013945132942980:2315], database: /dc-1, longSession: 1, local sessions count: 1 2025-03-31T16:38:26.658664Z node 4 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-31T16:38:26.658856Z node 4 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-31T16:38:26.658971Z node 4 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-03-31T16:38:26.659035Z node 4 :KQP_PROXY DEBUG: Updated table service config. 2025-03-31T16:38:26.659064Z node 4 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-31T16:38:26.659167Z node 4 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-31T16:38:26.659213Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:38:26.659267Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:38:26.660368Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:38:26.660412Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:38:26.660430Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:38:26.660741Z node 4 :KQP_PROXY DEBUG: [TQueryBase] RunStreamQuery: DECLARE $value AS Text; DECLARE $table_size AS Uint64; SELECT x FROM AS_TABLE( ()->(Yql::ToStream(ListReplicate(<|x:$value|>, $table_size))) ); 2025-03-31T16:38:26.662948Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Start read next stream part 2025-03-31T16:38:26.665643Z node 4 :KQP_PROXY DEBUG: TraceId: "01jqph34k657tsgpqczacqmy6y", Created new session, sessionId: ydb://session/3?node_id=4&id=YThhNmM3ODUtMjhjYTAzODItYzk5YTk5ZTMtNDYzNGE0MzE=, workerId: [4:7488013945132943007:2317], database: /dc-1, longSession: 0, local sessions count: 2 2025-03-31T16:38:26.665874Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: 01jqph34k657tsgpqczacqmy6y, Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=YThhNmM3ODUtMjhjYTAzODItYzk5YTk5ZTMtNDYzNGE0MzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [4:7488013945132943007:2317] 2025-03-31T16:38:26.665907Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [4:7488013945132943008:2360] 2025-03-31T16:38:26.667548Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488013945132943009:2318], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:38:26.667638Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:38:26.668218Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7488013945132943021:2321], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:38:26.671730Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:38:26.684075Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7488013945132943023:2322], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:38:26.776157Z node 4 :TX_PROXY ERROR: Actor# [4:7488013945132943074:2395] txid# 281474976710660, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:38:27.307584Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7488013927953073282:2211];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:38:27.307683Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:38:27.954415Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:38:32.692774Z node 4 :KQP_PROXY DEBUG: [TQueryBase] TEvStreamQueryResultPart SUCCESS, Issues: 2025-03-31T16:38:32.698820Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Cancel stream request 2025-03-31T16:38:32.698918Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=ZTUyNTg3OWUtZjEzOTU0OGYtOGE1ZjcwMzUtM2I4MGI4OWQ=, TxId: 2025-03-31T16:38:32.703057Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Bootstrap. Database: dc-1 2025-03-31T16:38:33.045351Z node 4 :RPC_REQUEST WARN: Client lost 2025-03-31T16:38:33.282824Z node 4 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=4&id=ZTUyNTg3OWUtZjEzOTU0OGYtOGE1ZjcwMzUtM2I4MGI4OWQ=, workerId: [4:7488013945132942980:2315], local sessions count: 1 2025-03-31T16:38:33.283129Z node 4 :KQP_PROXY DEBUG: Request has 18445000634596.268504s seconds to be completed 2025-03-31T16:38:33.288927Z node 4 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=4&id=MjJkZjQxYTMtNjQ3OTE4MTctNTRmNDM4NzItZDhjNjc5ODM=, workerId: [4:7488013975197714228:2348], database: /dc-1, longSession: 1, local sessions count: 2 2025-03-31T16:38:33.289143Z node 4 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-31T16:38:33.289187Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-31T16:38:33.289973Z node 4 :KQP_PROXY DEBUG: [TQueryBase] RunStreamQuery: DECLARE $value AS Text; DECLARE $table_size AS Uint64; SELECT x FROM AS_TABLE( ()->(Yql::ToStream(ListReplicate(<|x:$value|>, $table_size))) ); 2025-03-31T16:38:33.290079Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Start read next stream part 2025-03-31T16:38:33.291936Z node 4 :KQP_PROXY DEBUG: TraceId: "01jqph3b2a0xd4jnyp9x4cd4tv", Created new session, sessionId: ydb://session/3?node_id=4&id=YWQ1ZDMxODQtZTNmOTRiNTYtYjk1NDRlNGYtMjQxOWNhYTE=, workerId: [4:7488013975197714232:2349], database: /dc-1, longSession: 0, local sessions count: 3 2025-03-31T16:38:33.292124Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: 01jqph3b2a0xd4jnyp9x4cd4tv, Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=YWQ1ZDMxODQtZTNmOTRiNTYtYjk1NDRlNGYtMjQxOWNhYTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 5, targetId: [4:7488013975197714232:2349] 2025-03-31T16:38:33.292153Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 5 timeout: 600.000000s actor id: [4:7488013975197714233:2440] 2025-03-31T16:38:33.444472Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743439113419, txId: 281474976710663] shutting down 2025-03-31T16:38:33.449395Z node 4 :KQP_PROXY DEBUG: [TQueryBase] TEvStreamQueryResultPart SUCCESS, Issues: 2025-03-31T16:38:33.452195Z node 4 :KQP_PROXY DEBUG: TraceId: "01jqph3b2a0xd4jnyp9x4cd4tv", Forwarded response to sender actor, requestId: 5, sender: [4:7488013975197714230:2434], selfId: [4:7488013927953073125:2068], source: [4:7488013975197714232:2349] 2025-03-31T16:38:33.452756Z node 4 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=4&id=YWQ1ZDMxODQtZTNmOTRiNTYtYjk1NDRlNGYtMjQxOWNhYTE=, workerId: [4:7488013975197714232:2349], local sessions count: 2 2025-03-31T16:38:33.462074Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Start read next stream part 2025-03-31T16:38:33.462524Z node 4 :KQP_PROXY DEBUG: [TQueryBase] TEvStreamQueryResultPart SUCCESS, Issues: 2025-03-31T16:38:33.462625Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=MjJkZjQxYTMtNjQ3OTE4MTctNTRmNDM4NzItZDhjNjc5ODM=, TxId: 2025-03-31T16:38:33.463781Z node 4 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=4&id=MjJkZjQxYTMtNjQ3OTE4MTctNTRmNDM4NzItZDhjNjc5ODM=, workerId: [4:7488013975197714228:2348], local sessions count: 1 |98.2%| [TS] {RESULT} ydb/library/query_actor/ut/unittest >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_simple_table-False] >> RowDispatcherTests::OneClientOneSession >> RowDispatcherTests::OneClientOneSession [GOOD] >> RowDispatcherTests::TwoClientOneSession >> RowDispatcherTests::TwoClientOneSession [GOOD] >> DataShardReplication::ApplyChangesToReplicatedTable [GOOD] >> DataShardReplication::ApplyChangesToCommonTable >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListJobs >> RowDispatcherTests::SessionError >> RowDispatcherTests::SessionError [GOOD] >> RowDispatcherTests::CoordinatorSubscribe >> RowDispatcherTests::CoordinatorSubscribe [GOOD] >> KqpTpch::Query17 [GOOD] >> KqpTpch::Query18 >> RowDispatcherTests::CoordinatorSubscribeBeforeCoordinatorChanged >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeJob >> RowDispatcherTests::CoordinatorSubscribeBeforeCoordinatorChanged [GOOD] >> MediatorTest::ResendSubset [GOOD] >> RowDispatcherTests::TwoClients4Sessions >> RowDispatcherTests::TwoClients4Sessions [GOOD] >> RowDispatcherTests::ReinitConsumerIfNewGeneration >> RowDispatcherTests::ReinitConsumerIfNewGeneration [GOOD] >> RowDispatcherTests::HandleTEvUndelivered >> RowDispatcherTests::HandleTEvUndelivered [GOOD] >> RowDispatcherTests::TwoClientTwoConnection >> TMemoryController::ResourceBroker [GOOD] >> TMemoryController::ResourceBroker_ConfigLimit >> MediatorTest::ResendNotSubset >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateConnection >> RowDispatcherTests::TwoClientTwoConnection [GOOD] >> RowDispatcherTests::ProcessNoSession >> TCreateAndDropViewTest::DropNonexistingView [FAIL] >> TCreateAndDropViewTest::CallDropViewOnTable >> RowDispatcherTests::ProcessNoSession [GOOD] >> RowDispatcherTests::IgnoreWrongPartitionId >> RowDispatcherTests::IgnoreWrongPartitionId [GOOD] >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_table-True] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateConnectionWithServiceAccount >> test_config_with_metadata.py::TestConfigWithMetadataBlock::test_cluster_is_operational_with_metadata [GOOD] >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v1-client0] [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListConnections >> test_config_with_metadata.py::TestKiKiMRWithMetadata::test_cluster_is_operational_with_metadata >> DataShardStats::SharedCacheGarbage [GOOD] >> DataShardStats::CollectStatsForSeveralParts >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeConnection >> CoordinatorVolatile::PlanResentOnReboots [GOOD] >> CoordinatorVolatile::MediatorReconnectPlanRace >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyConnection >> TestPurecalcFilter::NullValues [GOOD] |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> KqpTpch::Query18 [GOOD] >> KqpTpch::Query19 >> KeyValueGRPCService::SimpleCreateAlterDropVolume [GOOD] >> KeyValueGRPCService::SimpleListPartitions [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyConnectionWithServiceAccount >> TestPurecalcFilter::PartialPush >> DataShardReplication::ApplyChangesToCommonTable [GOOD] >> DataShardReplication::ApplyChangesWithConcurrentTx >> TraverseColumnShard::TraverseColumnTableAggrStatNonLocalTablet [GOOD] >> MediatorTest::ResendNotSubset [GOOD] >> TTxDataShardMiniKQL::WriteAndReadMany [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteConnection >> MediatorTest::OneCoordinatorResendTxNotLost ------- [TM] {asan, default-linux-x86_64, release} ydb/services/keyvalue/ut/unittest >> KeyValueGRPCService::SimpleListPartitions [GOOD] Test command err: 2025-03-31T16:36:52.601604Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013540000727904:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:36:52.601678Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0004d2/r3tmp/tmpyVtlqx/pdisk_1.dat 2025-03-31T16:36:53.101312Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:36:53.113412Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:53.113527Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:53.127619Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2615, node 1 2025-03-31T16:36:53.195380Z node 1 :GRPC_SERVER NOTICE: Grpc request proxy started, nodeid# 1, serve as static node 2025-03-31T16:36:53.195718Z node 1 :GRPC_SERVER DEBUG: Subscribe to /Root 2025-03-31T16:36:53.195779Z node 1 :GRPC_SERVER NOTICE: Grpc request proxy started, nodeid# 1, serve as static node 2025-03-31T16:36:53.195849Z node 1 :GRPC_SERVER DEBUG: Subscribe to /Root 2025-03-31T16:36:53.196813Z node 1 :GRPC_SERVER INFO: Subscribed for config changes 2025-03-31T16:36:53.196835Z node 1 :GRPC_SERVER INFO: Updated app config 2025-03-31T16:36:53.196861Z node 1 :GRPC_SERVER INFO: Subscribed for config changes 2025-03-31T16:36:53.196871Z node 1 :GRPC_SERVER INFO: Updated app config 2025-03-31T16:36:53.201052Z node 1 :GRPC_SERVER DEBUG: Got proxy service configuration 2025-03-31T16:36:53.205226Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-31T16:36:53.205315Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-31T16:36:53.205450Z node 1 :GRPC_SERVER DEBUG: Got proxy service configuration 2025-03-31T16:36:53.205459Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-31T16:36:53.205564Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-31T16:36:53.253036Z node 1 :GRPC_SERVER DEBUG: [0x51a00002b280] created request Name# BlobStorageConfig 2025-03-31T16:36:53.254407Z node 1 :GRPC_SERVER DEBUG: [0x51a00002b880] created request Name# HiveCreateTablet 2025-03-31T16:36:53.254806Z node 1 :GRPC_SERVER DEBUG: [0x51a00002be80] created request Name# TabletStateRequest 2025-03-31T16:36:53.255105Z node 1 :GRPC_SERVER DEBUG: [0x51a00002c480] created request Name# SchemeOperationStatus 2025-03-31T16:36:53.256123Z node 1 :GRPC_SERVER DEBUG: [0x51a00002ca80] created request Name# ChooseProxy 2025-03-31T16:36:53.256466Z node 1 :GRPC_SERVER DEBUG: [0x51a00002d080] created request Name# ResolveNode 2025-03-31T16:36:53.257610Z node 1 :GRPC_SERVER DEBUG: [0x51a00002d680] created request Name# FillNode 2025-03-31T16:36:53.257962Z node 1 :GRPC_SERVER DEBUG: [0x51a00002dc80] created request Name# DrainNode 2025-03-31T16:36:53.258949Z node 1 :GRPC_SERVER DEBUG: [0x51a00002e280] created request Name# InterconnectDebug 2025-03-31T16:36:53.259292Z node 1 :GRPC_SERVER DEBUG: [0x51a00002e880] created request Name# TestShardControl 2025-03-31T16:36:53.260304Z node 1 :GRPC_SERVER DEBUG: [0x51a00002ee80] created request Name# RegisterNode 2025-03-31T16:36:53.260781Z node 1 :GRPC_SERVER DEBUG: [0x51a0000cfc80] created request Name# CmsRequest 2025-03-31T16:36:53.261096Z node 1 :GRPC_SERVER DEBUG: [0x51a0000d0280] created request Name# ConsoleRequest 2025-03-31T16:36:53.261336Z node 1 :GRPC_SERVER DEBUG: [0x51a0000d0880] created request Name# SchemeInitRoot 2025-03-31T16:36:53.261604Z node 1 :GRPC_SERVER DEBUG: [0x51a0000d0e80] created request Name# PersQueueRequest 2025-03-31T16:36:53.261832Z node 1 :GRPC_SERVER DEBUG: [0x51a0000d1480] created request Name# SchemeOperation 2025-03-31T16:36:53.262064Z node 1 :GRPC_SERVER DEBUG: [0x51a0000d1a80] created request Name# SchemeDescribe 2025-03-31T16:36:53.328960Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:36:53.328983Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:36:53.328991Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:36:53.329145Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1988 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:36:53.881970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "hdd2-pool" Kind: "hdd2" } StoragePools { Name: "hdd-pool" Kind: "hdd" } StoragePools { Name: "hdd1-pool" Kind: "hdd1" } StoragePools { Name: "ssd-pool" Kind: "ssd" } StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-31T16:36:53.888834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:36:53.890495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-31T16:36:53.892381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T16:36:53.892544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:36:53.895542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-31T16:36:53.896563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-31T16:36:53.896823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:36:53.896970Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-31T16:36:53.897003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-03-31T16:36:53.897021Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-03-31T16:36:53.898484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:36:53.898514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-03-31T16:36:53.898534Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:36:53.902213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:36:53.902256Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-03-31T16:36:53.902268Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-03-31T16:36:53.904837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:36:53.904884Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T16:36:53.904898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-31T16:36:53.904920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-03-31T16:36:53.909229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:36:53.911168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-03-31T16:36:53.911283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-03-31T16:36:53.913824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743439013960, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T16:36:53.914033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743439013960 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-31T16:36:53.914069Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-31T16:36:53.915338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-03-31T16:36:53.915386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-31T16:36:53.915615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-31T16:36:53.915666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-03-31T16:36:53.917731Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-31T16:36:53.917765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-31T ... 1T16:38:37.624460Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [33:7488013984746060515:2369], at schemeshard: 72057594046644480, txId: 281474976710662, path id: 3 2025-03-31T16:38:37.624535Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:38:37.624586Z node 33 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDeleteParts opId# 281474976710662:0 ProgressState 2025-03-31T16:38:37.624682Z node 33 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710662:0 progress is 1/1 2025-03-31T16:38:37.624709Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710662 ready parts: 1/1 2025-03-31T16:38:37.624757Z node 33 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710662:0 progress is 1/1 2025-03-31T16:38:37.624779Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710662 ready parts: 1/1 2025-03-31T16:38:37.624808Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710662, ready parts: 1/1, is published: false 2025-03-31T16:38:37.624842Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710662 ready parts: 1/1 2025-03-31T16:38:37.624872Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710662:0 2025-03-31T16:38:37.624891Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710662:0 2025-03-31T16:38:37.625094Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-03-31T16:38:37.625128Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710662, publications: 2, subscribers: 1 2025-03-31T16:38:37.625158Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710662, [OwnerId: 72057594046644480, LocalPathId: 2], 7 2025-03-31T16:38:37.625175Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710662, [OwnerId: 72057594046644480, LocalPathId: 3], 18446744073709551615 2025-03-31T16:38:37.626090Z node 33 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976710662 2025-03-31T16:38:37.626222Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976710662 2025-03-31T16:38:37.626242Z node 33 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710662 2025-03-31T16:38:37.626271Z node 33 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710662, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 18446744073709551615 2025-03-31T16:38:37.626299Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-03-31T16:38:37.630217Z node 33 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976710662 2025-03-31T16:38:37.630384Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976710662 2025-03-31T16:38:37.630405Z node 33 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710662 2025-03-31T16:38:37.630436Z node 33 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710662, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 7 2025-03-31T16:38:37.630467Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-03-31T16:38:37.630575Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710662, subscribers: 1 2025-03-31T16:38:37.630604Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [33:7488013993335995799:2346] 2025-03-31T16:38:37.634942Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:38:37.634978Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:38:37.634998Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-03-31T16:38:37.635196Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710662 2025-03-31T16:38:37.635241Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710662 2025-03-31T16:38:37.639570Z node 33 :GRPC_SERVER DEBUG: Got grpc request# ListDirectoryRequest, traceId# 01jqph3fa79tk8s8e92yaza1k3, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:49312, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-03-31T16:38:37.649205Z node 33 :GRPC_SERVER DEBUG: [0x51a000013880] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-03-31T16:38:37.649661Z node 33 :GRPC_SERVER DEBUG: [0x51a00006f080] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-03-31T16:38:37.650017Z node 33 :GRPC_SERVER DEBUG: [0x51a000013280] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-03-31T16:38:37.650392Z node 33 :GRPC_SERVER DEBUG: [0x51a000097280] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-03-31T16:38:37.650776Z node 33 :GRPC_SERVER DEBUG: [0x51a000013e80] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-03-31T16:38:37.651154Z node 33 :GRPC_SERVER DEBUG: [0x51a000014480] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-03-31T16:38:37.651534Z node 33 :GRPC_SERVER DEBUG: [0x51a0000a3e80] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-03-31T16:38:37.651957Z node 33 :GRPC_SERVER DEBUG: [0x51a0000e4680] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-03-31T16:38:37.652352Z node 33 :GRPC_SERVER DEBUG: [0x51a000107a80] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-03-31T16:38:37.652731Z node 33 :GRPC_SERVER DEBUG: [0x51a00013c880] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-03-31T16:38:37.653104Z node 33 :GRPC_SERVER DEBUG: [0x51a00003d880] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-03-31T16:38:37.653476Z node 33 :GRPC_SERVER DEBUG: [0x51a000141c80] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-03-31T16:38:37.653855Z node 33 :GRPC_SERVER DEBUG: [0x51a000016e80] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-03-31T16:38:37.654209Z node 33 :GRPC_SERVER DEBUG: [0x51a0000f4e80] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-03-31T16:38:37.654570Z node 33 :GRPC_SERVER DEBUG: [0x51a00008a680] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-03-31T16:38:37.655714Z node 33 :GRPC_SERVER DEBUG: [0x51a000133280] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-03-31T16:38:37.656152Z node 33 :GRPC_SERVER DEBUG: [0x51a000104a80] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 2025-03-31T16:38:37.656258Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-03-31T16:38:37.656772Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-03-31T16:38:37.657134Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-03-31T16:38:37.657394Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-03-31T16:38:37.657527Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-03-31T16:38:37.657666Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-03-31T16:38:37.657834Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-03-31T16:38:37.657869Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-03-31T16:38:37.657950Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-03-31T16:38:37.661078Z node 33 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 33, TabletId: 72075186224037889 not found 2025-03-31T16:38:37.661159Z node 33 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 33, TabletId: 72075186224037890 not found 2025-03-31T16:38:37.661186Z node 33 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 33, TabletId: 72075186224037888 not found 2025-03-31T16:38:37.663343Z node 33 :KEYVALUE DEBUG: KeyValue# 72075186224037889 OnTabletDead NKikimr::TEvTablet::TEvTabletDead 2025-03-31T16:38:37.664352Z node 33 :KEYVALUE DEBUG: KeyValue# 72075186224037890 OnTabletDead NKikimr::TEvTablet::TEvTabletDead 2025-03-31T16:38:37.664769Z node 33 :KEYVALUE DEBUG: KeyValue# 72075186224037888 OnTabletDead NKikimr::TEvTablet::TEvTabletDead 2025-03-31T16:38:37.669536Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-03-31T16:38:37.669588Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-03-31T16:38:37.669671Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-03-31T16:38:37.669683Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-03-31T16:38:37.669704Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-03-31T16:38:37.669727Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-03-31T16:38:37.669776Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 |98.2%| [TM] {RESULT} ydb/services/keyvalue/ut/unittest |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> test_alloc_default.py::TestAlloc::test_node_limit[kikimr0] [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendTestConnection ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableAggrStatNonLocalTablet [GOOD] Test command err: 2025-03-31T16:36:03.375753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:36:03.376040Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:36:03.376166Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ee4/r3tmp/tmpNYoRHl/pdisk_1.dat 2025-03-31T16:36:03.722183Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14910, node 1 2025-03-31T16:36:03.944419Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:36:03.944487Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:36:03.944536Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:36:03.944989Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:36:03.947205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:36:04.035476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:04.035622Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:04.052496Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2712 2025-03-31T16:36:04.540849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:36:07.533586Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T16:36:07.576843Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:07.576975Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:07.606954Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:36:07.609446Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:36:07.853568Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:36:07.854264Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:36:07.854873Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:36:07.855023Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:36:07.855314Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:36:07.855431Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:36:07.855518Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:36:07.855615Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:36:07.855712Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:36:08.011061Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:08.011185Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:08.025016Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:36:08.158319Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:36:08.199523Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T16:36:08.199629Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T16:36:08.232201Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T16:36:08.233604Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T16:36:08.233847Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T16:36:08.233913Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T16:36:08.233972Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T16:36:08.234038Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T16:36:08.234097Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T16:36:08.234152Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T16:36:08.234670Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T16:36:08.271064Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:36:08.271237Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1872:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:36:08.278718Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1883:2604] 2025-03-31T16:36:08.285491Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1915:2621] 2025-03-31T16:36:08.285605Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1915:2621], schemeshard id = 72075186224037897 2025-03-31T16:36:08.289726Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T16:36:08.316283Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T16:36:08.316351Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T16:36:08.316445Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T16:36:08.333688Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T16:36:08.347869Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T16:36:08.348021Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T16:36:08.565343Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T16:36:08.740206Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T16:36:08.839059Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:36:09.879882Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2236:3073], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:09.880063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:09.899311Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-31T16:36:10.213064Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:36:10.213304Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:36:10.213674Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:36:10.213849Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:36:10.213995Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:36:10.214145Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:36:10.214278Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:36:10.214437Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:36:10.214623Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:36:10.214777Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:36:10.214916Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:36:10.215058Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2388:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:36:10.250573Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2397:2889];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:36:10.250682Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2397:2889];tablet_id=72075186224037900;process=T ... TEvStatisticsRequest send, client id = [2:8604:6482], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:38:40.879772Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8605:6483], server id = [2:8610:6488], tablet id = 72075186224037901, status = OK 2025-03-31T16:38:40.879872Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8605:6483], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:38:40.881725Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8606:6484], server id = [2:8611:6489], tablet id = 72075186224037902, status = OK 2025-03-31T16:38:40.881795Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8606:6484], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:38:40.882466Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8607:6485], server id = [2:8612:6490], tablet id = 72075186224037903, status = OK 2025-03-31T16:38:40.882527Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8607:6485], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:38:40.901373Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-31T16:38:40.902116Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8603:6481], server id = [2:8608:6486], tablet id = 72075186224037899 2025-03-31T16:38:40.902168Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:38:40.902935Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-03-31T16:38:40.903313Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8604:6482], server id = [2:8609:6487], tablet id = 72075186224037900 2025-03-31T16:38:40.903346Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:38:40.904475Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8627:6501], server id = [2:8630:6503], tablet id = 72075186224037904, status = OK 2025-03-31T16:38:40.904578Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8627:6501], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:38:40.905502Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-03-31T16:38:40.906486Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8629:6502], server id = [2:8631:6504], tablet id = 72075186224037905, status = OK 2025-03-31T16:38:40.906580Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8629:6502], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:38:40.907468Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8605:6483], server id = [2:8610:6488], tablet id = 72075186224037901 2025-03-31T16:38:40.907521Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:38:40.908607Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-03-31T16:38:40.909266Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-03-31T16:38:40.909947Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8606:6484], server id = [2:8611:6489], tablet id = 72075186224037902 2025-03-31T16:38:40.909981Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:38:40.910364Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8632:6505], server id = [2:8634:6507], tablet id = 72075186224037906, status = OK 2025-03-31T16:38:40.910454Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8632:6505], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:38:40.910545Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8607:6485], server id = [2:8612:6490], tablet id = 72075186224037903 2025-03-31T16:38:40.910568Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:38:40.910850Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8636:6509], server id = [2:8640:6513], tablet id = 72075186224037907, status = OK 2025-03-31T16:38:40.910930Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8636:6509], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:38:40.911924Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8638:6511], server id = [2:8641:6514], tablet id = 72075186224037908, status = OK 2025-03-31T16:38:40.911992Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8638:6511], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:38:40.916682Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-03-31T16:38:40.917203Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8627:6501], server id = [2:8630:6503], tablet id = 72075186224037904 2025-03-31T16:38:40.917240Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:38:40.918557Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-03-31T16:38:40.919647Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8629:6502], server id = [2:8631:6504], tablet id = 72075186224037905 2025-03-31T16:38:40.919684Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:38:40.920608Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-03-31T16:38:40.921340Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8632:6505], server id = [2:8634:6507], tablet id = 72075186224037906 2025-03-31T16:38:40.921375Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:38:40.922005Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-03-31T16:38:40.922446Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8638:6511], server id = [2:8641:6514], tablet id = 72075186224037908 2025-03-31T16:38:40.922475Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:38:40.922942Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-03-31T16:38:40.923007Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-31T16:38:40.923329Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-31T16:38:40.924116Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8636:6509], server id = [2:8640:6513], tablet id = 72075186224037907 2025-03-31T16:38:40.924148Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:38:40.955986Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-31T16:38:40.956284Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-03-31T16:38:41.548760Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 3 2025-03-31T16:38:41.548865Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-03-31T16:38:45.174364Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T16:38:45.174623Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:38:45.391246Z node 2 :STATISTICS INFO: Node 3 is unavailable 2025-03-31T16:38:45.391330Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-31T16:38:45.391564Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-31T16:38:45.391654Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-31T16:38:45.391807Z node 2 :STATISTICS DEBUG: Event round 3 is different from the current 0 2025-03-31T16:38:45.391845Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-03-31T16:38:45.392365Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-31T16:38:45.410478Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-31T16:38:45.410745Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2025-03-31T16:38:45.411287Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8769:6578], server id = [2:8770:6579], tablet id = 72075186224037900, status = OK 2025-03-31T16:38:45.411363Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8769:6578], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:38:45.412597Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-03-31T16:38:45.412689Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-31T16:38:45.412864Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-31T16:38:45.413016Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-31T16:38:45.413380Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T16:38:45.415063Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8769:6578], server id = [2:8770:6579], tablet id = 72075186224037900 2025-03-31T16:38:45.415203Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:38:45.416292Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-31T16:38:45.455170Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8788:6597]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-31T16:38:45.455536Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-31T16:38:45.455600Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8788:6597], StatRequests.size() = 1 2025-03-31T16:38:45.585133Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODhlZWZhOGQtNjE5MmYzN2QtYzEzYmE2NzItNjRhYWY1MmY=, TxId: 2025-03-31T16:38:45.585207Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODhlZWZhOGQtNjE5MmYzN2QtYzEzYmE2NzItNjRhYWY1MmY=, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-03-31T16:38:45.585923Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8802:6603]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-31T16:38:45.586101Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:38:45.587025Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-31T16:38:45.587652Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-31T16:38:45.591507Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-31T16:38:45.591580Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-03-31T16:38:45.591641Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-31T16:38:45.599454Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 probe = 3 >> TMemoryController::ResourceBroker_ConfigLimit [GOOD] >> TMemTableMemoryConsumersCollection::Empty [GOOD] >> TMemTableMemoryConsumersCollection::Destruction [GOOD] >> TMemTableMemoryConsumersCollection::Register [GOOD] >> TMemTableMemoryConsumersCollection::Unregister [GOOD] >> TMemTableMemoryConsumersCollection::SetConsumption [GOOD] >> TMemTableMemoryConsumersCollection::CompactionComplete [GOOD] >> TMemTableMemoryConsumersCollection::SelectForCompaction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/row_dispatcher/ut/unittest >> RowDispatcherTests::IgnoreWrongPartitionId [GOOD] Test command err: 2025-03-31T16:36:37.224913Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Successfully bootstrapped coordinator, id [1:30:2057] 2025-03-31T16:36:37.225387Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [1:25:2054] 2025-03-31T16:36:37.225525Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [1:25:2054] 2025-03-31T16:36:37.225575Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [2:26:2054] 2025-03-31T16:36:37.225607Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [2:26:2054] 2025-03-31T16:36:37.225641Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [3:27:2054] 2025-03-31T16:36:37.225661Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [3:27:2054] 2025-03-31T16:36:37.225745Z node 1 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [1:28:2055], topic1, partIds: 0 2025-03-31T16:36:37.226692Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [1:28:2055] 2025-03-31T16:36:37.226851Z node 1 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [1:29:2056], topic1, partIds: 0 2025-03-31T16:36:37.226919Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [1:29:2056] 2025-03-31T16:36:37.235975Z node 1 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [1:29:2056], topic1, partIds: 1 2025-03-31T16:36:37.236124Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [1:29:2056] 2025-03-31T16:36:37.236272Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [2:31:2055] 2025-03-31T16:36:37.236313Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Move all Locations from old actor [2:26:2054] to new [2:31:2055] 2025-03-31T16:36:37.236343Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [2:31:2055] 2025-03-31T16:36:37.236408Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [2:32:2056] 2025-03-31T16:36:37.236439Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Move all Locations from old actor [2:31:2055] to new [2:32:2056] 2025-03-31T16:36:37.236465Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [2:32:2056] 2025-03-31T16:36:37.236531Z node 1 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [1:28:2055], topic1, partIds: 0 2025-03-31T16:36:37.236577Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [1:28:2055] 2025-03-31T16:36:37.236695Z node 1 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [1:29:2056], topic1, partIds: 1 2025-03-31T16:36:37.236738Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [1:29:2056] 2025-03-31T16:36:37.320920Z node 5 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Successfully bootstrapped coordinator, id [5:30:2057] 2025-03-31T16:36:37.321312Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [5:25:2054] 2025-03-31T16:36:37.321356Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [5:25:2054] 2025-03-31T16:36:37.321451Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [6:26:2054] 2025-03-31T16:36:37.321472Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [6:26:2054] 2025-03-31T16:36:37.321502Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [7:27:2054] 2025-03-31T16:36:37.321520Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [7:27:2054] 2025-03-31T16:36:37.321592Z node 5 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [5:28:2055], topic1, partIds: 0, 1, 2 2025-03-31T16:36:37.321724Z node 5 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [5:28:2055] 2025-03-31T16:36:37.321852Z node 5 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [5:29:2056], topic1, partIds: 3 2025-03-31T16:36:37.321939Z node 5 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [5:29:2056] 2025-03-31T16:36:37.444023Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:8:2055] Successfully bootstrapped, local coordinator id [9:5:2052] 2025-03-31T16:36:37.444138Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:9:2056] Successfully bootstrapped, local coordinator id [9:6:2053] 2025-03-31T16:36:37.444199Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:10:2057] Successfully bootstrapped, local coordinator id [9:7:2054] 2025-03-31T16:36:37.444413Z node 9 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "local/row_dispatcher//tenant" actor 2025-03-31T16:36:37.444463Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-31T16:36:37.444495Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-31T16:36:37.449239Z node 9 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "local/row_dispatcher//tenant" actor 2025-03-31T16:36:37.449300Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-31T16:36:37.449324Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-31T16:36:37.449377Z node 9 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "local/row_dispatcher//tenant" actor 2025-03-31T16:36:37.449412Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-31T16:36:37.449446Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-31T16:36:37.497580Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-03-31T16:36:37.497695Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-03-31T16:36:37.498557Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-31T16:36:37.498599Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-31T16:36:37.506040Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-03-31T16:36:37.506210Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-31T16:36:37.506263Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-31T16:36:37.516510Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-03-31T16:36:37.516725Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-31T16:36:37.516763Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-31T16:36:37.527775Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-31T16:36:37.527840Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-31T16:36:37.529313Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-03-31T16:36:37.529507Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-31T16:36:37.529805Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-31T16:36:37.537703Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-03-31T16:36:37.537875Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-31T16:36:37.537909Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-31T16:36:37.540381Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-03-31T16:36:37.540520Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-31T16:36:37.540551Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-31T16:36:37.546314Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-03-31T16:36:37.546462Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-31T16:36:37.546510Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-31T16:36:37.553507Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-03-31T16:36:37.553684Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-31T16:36:37.553717Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-31T16:36:37.556291Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-03-31T16:36:37.556458Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-31T16:36:37.556503Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-31T16:36:37.561384Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-03-31T16:36:37.561538Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-31T16:36:37.561574Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-31T16:36:37.565119Z node 9 :FQ ... ation node "YDB_DATABASE/RowDispatcher/Tenant" 2025-03-31T16:38:41.405505Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: TEvCoordinatorChangesSubscribe from [38:18:2059] 2025-03-31T16:38:41.405696Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [38:14:2056], read group connection_id1, topicPath topic part id 0,1 query id QueryId cookie 1 2025-03-31T16:38:41.405908Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id1 topic topic part id 0 2025-03-31T16:38:41.406109Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id1 topic topic part id 1 2025-03-31T16:38:41.406490Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [38:15:2057], read group connection_id1, topicPath topic part id 0,1 query id QueryId cookie 1 2025-03-31T16:38:41.406957Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [38:22:2063] to [38:14:2056] query id QueryId 2025-03-31T16:38:41.407070Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [38:14:2056] part id 0 query id QueryId 2025-03-31T16:38:41.408520Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [38:22:2063] to [38:14:2056] query id QueryId 2025-03-31T16:38:41.408639Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [38:23:2064] to [38:14:2056] query id QueryId 2025-03-31T16:38:41.408749Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [38:14:2056] part id 1 query id QueryId 2025-03-31T16:38:41.408844Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [38:23:2064] to [38:14:2056] query id QueryId 2025-03-31T16:38:41.408959Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [38:22:2063] to [38:15:2057] query id QueryId 2025-03-31T16:38:41.409075Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [38:15:2057] part id 0 query id QueryId 2025-03-31T16:38:41.409173Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [38:22:2063] to [38:15:2057] query id QueryId 2025-03-31T16:38:41.409267Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [38:23:2064] to [38:15:2057] query id QueryId 2025-03-31T16:38:41.409358Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [38:15:2057] part id 1 query id QueryId 2025-03-31T16:38:41.409455Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [38:23:2064] to [38:15:2057] query id QueryId 2025-03-31T16:38:41.409567Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: TEvUndelivered, from [38:14:2056], reason ActorUnknown 2025-03-31T16:38:41.409628Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [38:14:2056] query id QueryId 2025-03-31T16:38:41.409891Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: TEvUndelivered, from [38:15:2057], reason ActorUnknown 2025-03-31T16:38:41.409946Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [38:15:2057] query id QueryId 2025-03-31T16:38:41.410024Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [38:22:2063] 2025-03-31T16:38:41.410097Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [38:23:2064] 2025-03-31T16:38:41.541719Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Successfully bootstrapped row dispatcher, id [40:17:2058], tenant Tenant 2025-03-31T16:38:41.547416Z node 40 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Successfully bootstrapped coordinator, id [40:18:2059] 2025-03-31T16:38:41.547539Z node 40 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [40:19:2060] Successfully bootstrapped, local coordinator id [40:18:2059] 2025-03-31T16:38:41.547647Z node 40 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "YDB_DATABASE/RowDispatcher/Tenant" actor 2025-03-31T16:38:41.547692Z node 40 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-03-31T16:38:41.547728Z node 40 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-03-31T16:38:41.548128Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: TEvCoordinatorChangesSubscribe from [40:18:2059] 2025-03-31T16:38:41.548334Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [40:14:2056], read group connection_id1, topicPath topic part id 0 query id QueryId cookie 1 2025-03-31T16:38:41.548553Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id1 topic topic part id 0 2025-03-31T16:38:41.549049Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [40:15:2057], read group connection_id2, topicPath topic part id 0 query id QueryId cookie 1 2025-03-31T16:38:41.555794Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id2 topic topic part id 0 2025-03-31T16:38:41.583383Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [40:22:2063] to [40:14:2056] query id QueryId 2025-03-31T16:38:41.587413Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [40:14:2056] part id 0 query id QueryId 2025-03-31T16:38:41.587570Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [40:22:2063] to [40:14:2056] query id QueryId 2025-03-31T16:38:41.587692Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [40:23:2064] to [40:15:2057] query id QueryId 2025-03-31T16:38:41.587801Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [40:15:2057] part id 0 query id QueryId 2025-03-31T16:38:41.587917Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [40:23:2064] to [40:15:2057] query id QueryId 2025-03-31T16:38:41.588072Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStopSession from [40:14:2056] topic topic query id QueryId 2025-03-31T16:38:41.588135Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [40:14:2056] query id QueryId 2025-03-31T16:38:41.588245Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [40:22:2063] 2025-03-31T16:38:41.588470Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStopSession from [40:15:2057] topic topic query id QueryId 2025-03-31T16:38:41.588534Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [40:15:2057] query id QueryId 2025-03-31T16:38:41.588630Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [40:23:2064] 2025-03-31T16:38:41.908277Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Successfully bootstrapped row dispatcher, id [42:17:2058], tenant Tenant 2025-03-31T16:38:41.917031Z node 42 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Successfully bootstrapped coordinator, id [42:18:2059] 2025-03-31T16:38:41.917150Z node 42 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [42:19:2060] Successfully bootstrapped, local coordinator id [42:18:2059] 2025-03-31T16:38:41.917240Z node 42 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "YDB_DATABASE/RowDispatcher/Tenant" actor 2025-03-31T16:38:41.917276Z node 42 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-03-31T16:38:41.917309Z node 42 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-03-31T16:38:41.919035Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: TEvCoordinatorChangesSubscribe from [42:18:2059] 2025-03-31T16:38:41.919315Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [43:16:2053], read group connection_id1, topicPath topic part id 0 query id QueryId cookie 42 2025-03-31T16:38:41.919658Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id1 topic topic part id 0 2025-03-31T16:38:41.928285Z node 42 :FQ_ROW_DISPATCHER ERROR: Create coordination node "YDB_DATABASE/RowDispatcher/Tenant" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): DNS resolution failed for YDB_ENDPOINT: UNKNOWN: Temporary failure in name resolution } {
: Error: Grpc error response on endpoint YDB_ENDPOINT } ] 2025-03-31T16:38:41.928623Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: TEvTryConnect to node id 43 2025-03-31T16:38:41.939654Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: EvNodeConnected, node id 43 2025-03-31T16:38:41.971961Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [42:22:2063] to [43:16:2053] query id QueryId 2025-03-31T16:38:41.972383Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [43:16:2053] part id 0 query id QueryId 2025-03-31T16:38:41.972518Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [42:22:2063] to [43:16:2053] query id QueryId 2025-03-31T16:38:41.972861Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvNoSession from [43:16:2053], generation 41 2025-03-31T16:38:41.972944Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [42:22:2063] to [43:16:2053] query id QueryId 2025-03-31T16:38:41.973257Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [43:16:2053] part id 0 query id QueryId 2025-03-31T16:38:41.973359Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [42:22:2063] to [43:16:2053] query id QueryId 2025-03-31T16:38:41.973662Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvNoSession from [43:16:2053], generation 42 2025-03-31T16:38:41.973723Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [43:16:2053] query id QueryId 2025-03-31T16:38:41.973827Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [42:22:2063] 2025-03-31T16:38:42.131808Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Successfully bootstrapped row dispatcher, id [44:17:2058], tenant Tenant 2025-03-31T16:38:42.166329Z node 44 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Successfully bootstrapped coordinator, id [44:18:2059] 2025-03-31T16:38:42.166424Z node 44 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [44:19:2060] Successfully bootstrapped, local coordinator id [44:18:2059] 2025-03-31T16:38:42.166505Z node 44 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "YDB_DATABASE/RowDispatcher/Tenant" actor 2025-03-31T16:38:42.166541Z node 44 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-03-31T16:38:42.166573Z node 44 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-03-31T16:38:42.166712Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: TEvCoordinatorChangesSubscribe from [44:18:2059] 2025-03-31T16:38:42.166919Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [44:14:2056], read group connection_id1, topicPath topic part id 0 query id QueryId cookie 1 2025-03-31T16:38:42.167142Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id1 topic topic part id 0 2025-03-31T16:38:42.167604Z node 44 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [44:22:2063] to [44:14:2056] query id QueryId 2025-03-31T16:38:42.167718Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStopSession from [44:14:2056] topic topic query id QueryId 2025-03-31T16:38:42.167779Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [44:14:2056] query id QueryId 2025-03-31T16:38:42.167871Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [44:22:2063] |98.2%| [TM] {RESULT} ydb/core/fq/libs/row_dispatcher/ut/unittest >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-False] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendTestConnectionWithServiceAccount ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::WriteAndReadMany [GOOD] Test command err: 2025-03-31T16:35:33.203151Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:35:33.204518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T16:35:33.204588Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:35:33.216571Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:35:33.216976Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-03-31T16:35:33.217200Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:35:33.227725Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:35:33.292086Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:35:33.292520Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:35:33.294282Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-31T16:35:33.294374Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-31T16:35:33.294460Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-31T16:35:33.295057Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:35:33.296175Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:35:33.296258Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:197:2154] in generation 2 2025-03-31T16:35:33.380016Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:35:33.418695Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-31T16:35:33.418867Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:35:33.418977Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-31T16:35:33.419007Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-31T16:35:33.419032Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-31T16:35:33.419073Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:35:33.419296Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:35:33.419337Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:35:33.419565Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-31T16:35:33.419658Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-31T16:35:33.419706Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:35:33.419750Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:35:33.419794Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-31T16:35:33.419828Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T16:35:33.419863Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T16:35:33.419895Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-31T16:35:33.419936Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T16:35:33.420034Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:35:33.420093Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:35:33.420130Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-31T16:35:33.427646Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-31T16:35:33.427734Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:35:33.427815Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T16:35:33.427955Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-31T16:35:33.427994Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-31T16:35:33.428032Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-31T16:35:33.428071Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:35:33.428107Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-31T16:35:33.428145Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-31T16:35:33.428168Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:35:33.428411Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-31T16:35:33.428452Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-31T16:35:33.428476Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-31T16:35:33.428500Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:35:33.428527Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-31T16:35:33.428548Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-31T16:35:33.428593Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-31T16:35:33.428619Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-31T16:35:33.428641Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-31T16:35:33.440985Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-31T16:35:33.441061Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T16:35:33.441095Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-31T16:35:33.441158Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-31T16:35:33.441262Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-31T16:35:33.441786Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:35:33.441839Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:35:33.441881Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-31T16:35:33.441999Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-31T16:35:33.442053Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:35:33.442192Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-31T16:35:33.442230Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:35:33.442272Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-31T16:35:33.442307Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-31T16:35:33.446941Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-31T16:35:33.447024Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:35:33.447309Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:35:33.447366Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:35:33.447429Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T16:35:33.447466Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-31T16:35:33.447498Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T16:35:33.447538Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-31T16:35:33.447572Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-31T16:35:33.447616Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:35:33.447651Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-31T16:35:33.447687Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-31T16:35:33.447725Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-31T16:35:33.447902Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-31T16:35:33.447946Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T16:35:33.447995Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-31T16:35:33.448021Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-31T16:35:33.448049Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-31T16:35:33.448118Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T16:35:33.448140Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-31T16:35:33.448172Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-31T16:35:33.448201Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-31T16:35:33.448255Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-31T16:35:33.448291Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-31T16:35:33.448320Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-31T16:35:33.448359Z node 1 :TX_DATA ... c latency: 58 ms, propose latency: 58 ms, status: COMPLETE 2025-03-31T16:38:31.237920Z node 3 :TX_DATASHARD TRACE: Execution status for [0:10] at 9437184 is DelayComplete 2025-03-31T16:38:31.237951Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 9437184 executing on unit FinishPropose 2025-03-31T16:38:31.237983Z node 3 :TX_DATASHARD TRACE: Add [0:10] at 9437184 to execution unit CompletedOperations 2025-03-31T16:38:31.238014Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:10] at 9437184 on unit CompletedOperations 2025-03-31T16:38:31.238064Z node 3 :TX_DATASHARD TRACE: Execution status for [0:10] at 9437184 is Executed 2025-03-31T16:38:31.238090Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 9437184 executing on unit CompletedOperations 2025-03-31T16:38:31.238117Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:10] at 9437184 has finished 2025-03-31T16:38:31.268293Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-31T16:38:31.268380Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:10] at 9437184 on unit FinishPropose 2025-03-31T16:38:31.268447Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T16:38:36.953955Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:99:2134], Recipient [3:235:2228]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 99 RawX2: 12884904022 } 2025-03-31T16:38:36.954027Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-03-31T16:38:36.954423Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:650:2625], Recipient [3:235:2228]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:38:36.954469Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:38:36.954510Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:649:2624], serverId# [3:650:2625], sessionId# [0:0:0] 2025-03-31T16:38:36.955536Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:99:2134], Recipient [3:235:2228]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 99 RawX2: 12884904022 } TxBody: "\032\354\002\037\010\0021\010key1\010key2\nvalue\005\205\n\205\002\205\004\206\205\006\207\203\004\207\203\001H\207\203\001H\006\n\016\203\014\020List$Truncated\002\205\004\205\002?\022\002\205\000\034MyReads MyWrites\205\004\205\002?\022\002\206\202\024Reply\024Write?\030\205\002\206\203\010\002 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\024)\211\026?\022\203\005\004\200\205\006\203\004\203\004\203\004\006\n\016\213\004\203\004\207\203\001H\213\002\203\004\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?* h\020\000\000\000\000\000\000\016\000\000\000\000\000\000\000?\014\005?2\003?,D\003?.F\003?0p\007\013?:\003?4\000\'?8\003\013?>\003?<\003j\030\001\003?@\000\003?B\000\003?D\007\240%&\003?F\000\006\004?J\003\203\014\000\003\203\014\000\003\003?L\000\377\007\002\000\005?\032\005?\026?x\000\005?\030\003\005? \005?\034?x\000\006\ 2025-03-31T16:38:36.955589Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:38:36.955692Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T16:38:36.956458Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit CheckDataTx 2025-03-31T16:38:36.977005Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Executed 2025-03-31T16:38:36.977108Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit CheckDataTx 2025-03-31T16:38:36.977163Z node 3 :TX_DATASHARD TRACE: Add [0:11] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-31T16:38:36.977212Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit BuildAndWaitDependencies 2025-03-31T16:38:36.977275Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-03-31T16:38:36.977345Z node 3 :TX_DATASHARD TRACE: Activated operation [0:11] at 9437184 2025-03-31T16:38:36.977386Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Executed 2025-03-31T16:38:36.977415Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit BuildAndWaitDependencies 2025-03-31T16:38:36.977442Z node 3 :TX_DATASHARD TRACE: Add [0:11] at 9437184 to execution unit ExecuteDataTx 2025-03-31T16:38:36.977470Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-03-31T16:38:36.984826Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2025-03-31T16:38:36.985179Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-03-31T16:38:36.985247Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-03-31T16:38:37.076575Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T16:38:37.076663Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-03-31T16:38:37.077517Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-03-31T16:38:37.081096Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2025-03-31T16:38:37.081296Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-03-31T16:38:37.081346Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-03-31T16:38:37.279922Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T16:38:37.280003Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-03-31T16:38:37.280825Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-03-31T16:38:37.325497Z node 3 :TX_DATASHARD TRACE: Operation [0:11] at 9437184 exceeded memory limit 4194304 and requests 33554432 more for the next try 2025-03-31T16:38:37.325812Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-03-31T16:38:37.325875Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-03-31T16:38:37.326353Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T16:38:37.326406Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-03-31T16:38:37.327243Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-03-31T16:38:37.686709Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2025-03-31T16:38:37.687787Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-03-31T16:38:37.687864Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-03-31T16:38:37.969607Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T16:38:37.969696Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-03-31T16:38:37.970474Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-03-31T16:38:38.405911Z node 3 :TX_DATASHARD TRACE: Operation [0:11] at 9437184 exceeded memory limit 37748736 and requests 301989888 more for the next try 2025-03-31T16:38:38.406908Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-03-31T16:38:38.406964Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-03-31T16:38:38.623523Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T16:38:38.623628Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-03-31T16:38:38.624498Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-03-31T16:38:38.630022Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2025-03-31T16:38:38.630257Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-03-31T16:38:38.630313Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-03-31T16:38:38.666968Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T16:38:38.667053Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-03-31T16:38:38.667906Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-03-31T16:38:38.669880Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2025-03-31T16:38:38.670068Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-03-31T16:38:38.670125Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-03-31T16:38:38.710944Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T16:38:38.711024Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-03-31T16:38:38.711838Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-03-31T16:38:38.720335Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2025-03-31T16:38:38.720561Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-03-31T16:38:38.720614Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-03-31T16:38:39.208153Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T16:38:39.208240Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-03-31T16:38:39.209092Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-03-31T16:38:40.878859Z node 3 :TX_DATASHARD TRACE: Executed operation [0:11] at tablet 9437184 with status COMPLETE 2025-03-31T16:38:40.878973Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:11] at 9437184: {NSelectRow: 0, NSelectRange: 1, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 129871, SelectRangeBytes: 40000268, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-31T16:38:40.879047Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Executed 2025-03-31T16:38:40.879109Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit ExecuteDataTx 2025-03-31T16:38:40.879152Z node 3 :TX_DATASHARD TRACE: Add [0:11] at 9437184 to execution unit FinishPropose 2025-03-31T16:38:40.879196Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit FinishPropose 2025-03-31T16:38:40.879256Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 11 at tablet 9437184 send to client, exec latency: 62 ms, propose latency: 62 ms, status: COMPLETE 2025-03-31T16:38:40.879427Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is DelayComplete 2025-03-31T16:38:40.879464Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit FinishPropose 2025-03-31T16:38:40.879497Z node 3 :TX_DATASHARD TRACE: Add [0:11] at 9437184 to execution unit CompletedOperations 2025-03-31T16:38:40.879558Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit CompletedOperations 2025-03-31T16:38:40.879623Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Executed 2025-03-31T16:38:40.879654Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit CompletedOperations 2025-03-31T16:38:40.879686Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:11] at 9437184 has finished 2025-03-31T16:38:40.913440Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-31T16:38:40.913534Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:11] at 9437184 on unit FinishPropose 2025-03-31T16:38:40.913607Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-True] >> TCreateAndDropViewTest::CallDropViewOnTable [FAIL] >> TCreateAndDropViewTest::DropSameViewTwice >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateBinding >> QuoterWithKesusTest::AllocationStatistics [GOOD] >> QuoterWithKesusTest::UpdatesCountersForParentResources |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithMetadataBlock::test_cluster_is_operational_with_metadata [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/memory_controller/ut/unittest >> TMemTableMemoryConsumersCollection::SelectForCompaction [GOOD] Test command err: ResourceBrokerSelfConfig: LimitBytes: 0 QueryExecutionLimitBytes: 0 2025-03-31T16:37:20.001714Z node 1 :MEMORY_CONTROLLER INFO: Periodic memory stats: AnonRss: none CGroupLimit: none MemTotal: none MemAvailable: none AllocatedMemory: 0B AllocatorCachesMemory: 0B HardLimit: 200MiB SoftLimit: 150MiB TargetUtilization: 100MiB ActivitiesLimitBytes: 60MiB ConsumersConsumption: 0B OtherConsumption: 0B ExternalConsumption: 0B TargetConsumersConsumption: 100MiB ResultingConsumersConsumption: 6MiB Coefficient: 0.9999990463 2025-03-31T16:37:20.003866Z node 1 :MEMORY_CONTROLLER INFO: Consumer MemTable state: Consumption: 0B Limit: 6MiB Min: 2MiB Max: 6MiB 2025-03-31T16:37:20.010612Z node 1 :MEMORY_CONTROLLER INFO: Consumer QueryExecution state: Consumption: 0B Limit: 40MiB 2025-03-31T16:37:20.012834Z node 1 :MEMORY_CONTROLLER INFO: Bootstrapped with config HardLimitBytes: 209715200 2025-03-31T16:37:20.039850Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-03-31T16:37:20.043007Z node 1 :TABLET_SAUSAGECACHE NOTICE: Bootstrap with config MemoryLimit: 33554432 2025-03-31T16:37:20.066649Z node 1 :MEMORY_CONTROLLER INFO: Consumer SharedCache [1:20:2067] registered 2025-03-31T16:37:20.067760Z node 1 :RESOURCE_BROKER INFO: New config diff: Queues { Name: "queue_kqp_resource_manager" Limit { Memory: 41943040 } } ResourceLimit { Memory: 62914560 } 2025-03-31T16:37:20.068563Z node 1 :RESOURCE_BROKER INFO: New config: Queues { Name: "queue_default" Weight: 30 Limit { Cpu: 2 } } Queues { Name: "queue_compaction_gen0" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_compaction_gen1" Weight: 100 Limit { Cpu: 6 } } Queues { Name: "queue_compaction_gen2" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_compaction_gen3" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_compaction_borrowed" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_cs_indexation" Weight: 100 Limit { Cpu: 3 Memory: 1073741824 } } Queues { Name: "queue_cs_ttl" Weight: 100 Limit { Cpu: 3 Memory: 1073741824 } } Queues { Name: "queue_cs_general" Weight: 100 Limit { Cpu: 3 Memory: 3221225472 } } Queues { Name: "queue_cs_scan_read" Weight: 100 Limit { Cpu: 3 Memory: 3221225472 } } Queues { Name: "queue_cs_normalizer" Weight: 100 Limit { Cpu: 3 Memory: 3221225472 } } Queues { Name: "queue_transaction" Weight: 100 Limit { Cpu: 4 } } Queues { Name: "queue_background_compaction" Weight: 10 Limit { Cpu: 1 } } Queues { Name: "queue_scan" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_backup" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_restore" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_kqp_resource_manager" Weight: 30 Limit { Cpu: 4 Memory: 41943040 } } Queues { Name: "queue_build_index" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_ttl" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_datashard_build_stats" Weight: 100 Limit { Cpu: 1 } } Queues { Name: "queue_cdc_initial_scan" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_statistics_scan" Weight: 100 Limit { Cpu: 1 } } Tasks { Name: "unknown" QueueName: "queue_default" DefaultDuration: 60000000 } Tasks { Name: "compaction_gen0" QueueName: "queue_compaction_gen0" DefaultDuration: 10000000 } Tasks { Name: "compaction_gen1" QueueName: "queue_compaction_gen1" DefaultDuration: 30000000 } Tasks { Name: "compaction_gen2" QueueName: "queue_compaction_gen2" DefaultDuration: 120000000 } Tasks { Name: "compaction_gen3" QueueName: "queue_compaction_gen3" DefaultDuration: 600000000 } Tasks { Name: "compaction_borrowed" QueueName: "queue_compaction_borrowed" DefaultDuration: 600000000 } Tasks { Name: "CS::TTL" QueueName: "queue_cs_ttl" DefaultDuration: 600000000 } Tasks { Name: "CS::INDEXATION" QueueName: "queue_cs_indexation" DefaultDuration: 600000000 } Tasks { Name: "CS::GENERAL" QueueName: "queue_cs_general" DefaultDuration: 600000000 } Tasks { Name: "CS::SCAN_READ" QueueName: "queue_cs_scan_read" DefaultDuration: 600000000 } Tasks { Name: "CS::NORMALIZER" QueueName: "queue_cs_normalizer" DefaultDuration: 600000000 } Tasks { Name: "transaction" QueueName: "queue_transaction" DefaultDuration: 600000000 } Tasks { Name: "background_compaction" QueueName: "queue_background_compaction" DefaultDuration: 60000000 } Tasks { Name: "background_compaction_gen0" QueueName: "queue_background_compaction" DefaultDuration: 10000000 } Tasks { Name: "background_compaction_gen1" QueueName: "queue_background_compaction" DefaultDuration: 20000000 } Tasks { Name: "background_compaction_gen2" QueueName: "queue_background_compaction" DefaultDuration: 60000000 } Tasks { Name: "background_compaction_gen3" QueueName: "queue_background_compaction" DefaultDuration: 300000000 } Tasks { Name: "scan" QueueName: "queue_scan" DefaultDuration: 300000000 } Tasks { Name: "backup" QueueName: "queue_backup" DefaultDuration: 300000000 } Tasks { Name: "restore" QueueName: "queue_restore" DefaultDuration: 300000000 } Tasks { Name: "kqp_query" QueueName: "queue_kqp_resource_manager" DefaultDuration: 600000000 } Tasks { Name: "build_index" QueueName: "queue_build_index" DefaultDuration: 600000000 } Tasks { Name: "ttl" QueueName: "queue_ttl" DefaultDuration: 300000000 } Tasks { Name: "datashard_build_stats" QueueName: "queue_datashard_build_stats" DefaultDuration: 5000000 } Tasks { Name: "cdc_initial_scan" QueueName: "queue_cdc_initial_scan" DefaultDuration: 600000000 } Tasks { Name: "statistics_scan" QueueName: "queue_statistics_scan" DefaultDuration: 600000000 } ResourceLimit { Cpu: 20 Memory: 62914560 } 2025-03-31T16:37:20.069880Z node 1 :RESOURCE_BROKER INFO: Configure result: Success: true 2025-03-31T16:37:20.070270Z node 1 :TABLET_SAUSAGECACHE NOTICE: Register memory consumer 2025-03-31T16:37:20.092983Z node 1 :MEMORY_CONTROLLER INFO: ResourceBroker configure result Success: true 2025-03-31T16:37:20.173296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:100:2146], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:37:20.173964Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:37:20.174059Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:37:20.315759Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:438:2399] 1 registered 2025-03-31T16:37:20.332319Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:450:2401] 0 registered 2025-03-31T16:37:20.338764Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:450:2401] 2 registered 2025-03-31T16:37:20.340086Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:450:2401] 4 registered 2025-03-31T16:37:20.340332Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:450:2401] 5 registered 2025-03-31T16:37:20.342026Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:459:2403] 1 registered 2025-03-31T16:37:20.347494Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:459:2403] 2 registered 2025-03-31T16:37:20.407657Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 1 registered 2025-03-31T16:37:20.450506Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 2 registered 2025-03-31T16:37:20.450956Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 3 registered 2025-03-31T16:37:20.451938Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 4 registered 2025-03-31T16:37:20.453060Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 5 registered 2025-03-31T16:37:20.453286Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 6 registered 2025-03-31T16:37:20.453482Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 7 registered 2025-03-31T16:37:20.453651Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 8 registered 2025-03-31T16:37:20.453718Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 9 registered 2025-03-31T16:37:20.453928Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 10 registered 2025-03-31T16:37:20.454397Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 11 registered 2025-03-31T16:37:20.455330Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 12 registered 2025-03-31T16:37:20.455841Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 13 registered 2025-03-31T16:37:20.455962Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 14 registered 2025-03-31T16:37:20.456176Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 15 registered 2025-03-31T16:37:20.459487Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 16 registered 2025-03-31T16:37:20.459939Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 17 registered 2025-03-31T16:37:20.460095Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 18 registered 2025-03-31T16:37:20.467885Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 19 registered 2025-03-31T16:37:20.494082Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 20 registered 2025-03-31T16:37:20.494463Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 22 registered 2025-03-31T16:37:20.495815Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 23 registered 2025-03-31T16:37:20.497054Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 24 registered 2025-03-31T16:37:20.501024Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 25 registered 2025-03-31T16:37:20.518872Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 26 registered 2025-03-31T16:37:20.519571Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 27 registered 2025-03-31T16:37:20.523012Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 28 registered 2025-03-31T16:37:20.527279Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 29 registered 2025-03-31T16:37:20.528256Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 30 registered 2025-03-31T16:37:20.528583Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 31 registered 2025-03-31T16:37:20.528784Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 32 registered 2025-03-31T16:37:20.531986Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 33 registered 2025-03-31T16:37:20.533519Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 34 registered 2025-03-31T16:37:20.533880Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 35 registered 2025-03-31T16:37:20.534504Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 36 registered 2025-03-31T16:37:20.534677Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 37 registered 2025-03-31T16:37:20.534757Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 38 registered 2025-03-31T16:37:20.541578Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 39 registered 2025-03-31T16:37:20.546319Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 40 registered 2025-03-31T16:37:20.546814Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 41 registered 2025-03-31T16:37:20.547391Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 42 registered 2025-03-31T16:37:20.550553Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 43 registered 2025-03-31T16:37:20.556542Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 44 registered 2025-03-31T16:37:20.556689Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 45 registered 2025-03-31T16:37:20.557235Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 46 registered 2025-03-31T16:37:20.557345Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 47 registered 2025-03-31T16:37:20.557564Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 48 registered 2025-03-31T16:37:20.557974Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 49 registered 2025-03-31T16:37:20.558135Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:465:2405] 50 registered 2025-03-31T16:37:20.558653Z node 1 :MEMORY_CONTROLLER TRACE: ... 506639Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:467:2406] 105 registered 2025-03-31T16:38:47.506682Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:467:2406] 106 registered 2025-03-31T16:38:47.506719Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:467:2406] 107 registered 2025-03-31T16:38:47.506760Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:467:2406] 108 registered 2025-03-31T16:38:47.506797Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:467:2406] 109 registered 2025-03-31T16:38:47.506833Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:467:2406] 110 registered 2025-03-31T16:38:47.509882Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:467:2406] 111 registered 2025-03-31T16:38:47.510161Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:467:2406] 113 registered 2025-03-31T16:38:47.510290Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:467:2406] 114 registered 2025-03-31T16:38:47.510938Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:467:2406] 115 registered 2025-03-31T16:38:47.511029Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:467:2406] 116 registered 2025-03-31T16:38:47.511227Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:467:2406] 117 registered 2025-03-31T16:38:47.511379Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:467:2406] 118 registered 2025-03-31T16:38:47.511523Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:503:2407] 17 registered 2025-03-31T16:38:47.511817Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:503:2407] 0 registered 2025-03-31T16:38:47.511955Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:503:2407] 18 registered 2025-03-31T16:38:47.512244Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:503:2407] 1 registered 2025-03-31T16:38:47.512347Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:503:2407] 19 registered 2025-03-31T16:38:47.512414Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:503:2407] 2 registered 2025-03-31T16:38:47.512472Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:503:2407] 20 registered 2025-03-31T16:38:47.512559Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:503:2407] 3 registered 2025-03-31T16:38:47.512617Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:503:2407] 21 registered 2025-03-31T16:38:47.512725Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:503:2407] 4 registered 2025-03-31T16:38:47.512770Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:503:2407] 6 registered 2025-03-31T16:38:47.512883Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:503:2407] 9 registered 2025-03-31T16:38:47.512923Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:503:2407] 10 registered 2025-03-31T16:38:47.512993Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:503:2407] 13 registered 2025-03-31T16:38:47.513025Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:503:2407] 14 registered 2025-03-31T16:38:47.513076Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:503:2407] 16 registered 2025-03-31T16:38:47.513117Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:513:2411] 7 registered 2025-03-31T16:38:47.513176Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:513:2411] 1 registered 2025-03-31T16:38:47.513223Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:513:2411] 2 registered 2025-03-31T16:38:47.513260Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:513:2411] 3 registered 2025-03-31T16:38:47.513298Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:513:2411] 4 registered 2025-03-31T16:38:47.513339Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:513:2411] 5 registered 2025-03-31T16:38:47.513377Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:513:2411] 6 registered 2025-03-31T16:38:47.513419Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:525:2413] 102 registered 2025-03-31T16:38:47.513470Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:525:2413] 103 registered 2025-03-31T16:38:47.513502Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:525:2413] 1 registered 2025-03-31T16:38:47.513551Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:525:2413] 104 registered 2025-03-31T16:38:47.513586Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:525:2413] 2 registered 2025-03-31T16:38:47.513628Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:525:2413] 3 registered 2025-03-31T16:38:47.513672Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:525:2413] 4 registered 2025-03-31T16:38:47.513707Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:525:2413] 5 registered 2025-03-31T16:38:47.513745Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:525:2413] 6 registered 2025-03-31T16:38:47.513785Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:525:2413] 7 registered 2025-03-31T16:38:47.513823Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:525:2413] 100 registered 2025-03-31T16:38:47.513866Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:525:2413] 101 registered 2025-03-31T16:38:47.513906Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:539:2415] 1 registered 2025-03-31T16:38:47.513955Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:539:2415] 2 registered 2025-03-31T16:38:47.513990Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:539:2415] 3 registered test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0008b5/r3tmp/tmpY1rZrx/pdisk_1.dat 2025-03-31T16:38:47.610934Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:511:2409] 1 registered 2025-03-31T16:38:47.611258Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:511:2409] 2 registered 2025-03-31T16:38:47.611428Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:511:2409] 3 registered 2025-03-31T16:38:47.611500Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:511:2409] 4 registered 2025-03-31T16:38:47.611610Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:511:2409] 120 registered 2025-03-31T16:38:47.611762Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:511:2409] 5 registered 2025-03-31T16:38:47.611810Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:511:2409] 121 registered 2025-03-31T16:38:47.612088Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:511:2409] 6 registered 2025-03-31T16:38:47.612147Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:511:2409] 122 registered 2025-03-31T16:38:47.612188Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:511:2409] 7 registered 2025-03-31T16:38:47.612229Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:511:2409] 123 registered 2025-03-31T16:38:47.612388Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:511:2409] 8 registered 2025-03-31T16:38:47.612443Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:511:2409] 125 registered 2025-03-31T16:38:47.612484Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:511:2409] 126 registered 2025-03-31T16:38:47.612518Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:511:2409] 127 registered 2025-03-31T16:38:47.612555Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:511:2409] 128 registered 2025-03-31T16:38:47.612755Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:511:2409] 129 registered 2025-03-31T16:38:47.612799Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:511:2409] 100 registered 2025-03-31T16:38:47.612836Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:511:2409] 101 registered 2025-03-31T16:38:47.612869Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:511:2409] 131 registered 2025-03-31T16:38:47.612906Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:511:2409] 102 registered 2025-03-31T16:38:47.612940Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:511:2409] 103 registered 2025-03-31T16:38:47.612970Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:511:2409] 105 registered 2025-03-31T16:38:47.677412Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:38:47.718457Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:38:47.720562Z node 9 :MEMORY_CONTROLLER INFO: Config updated QueryExecutionLimitPercent: 15 2025-03-31T16:38:47.721193Z node 9 :TABLET_SAUSAGECACHE NOTICE: Update config MemoryLimit: 33554432 2025-03-31T16:38:47.758229Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:38:47.758378Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:38:47.771515Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:38:48.002439Z node 9 :MEMORY_CONTROLLER INFO: Periodic memory stats: AnonRss: none CGroupLimit: 500MiB MemTotal: none MemAvailable: none AllocatedMemory: 0B AllocatorCachesMemory: 0B HardLimit: 500MiB SoftLimit: 375MiB TargetUtilization: 250MiB ActivitiesLimitBytes: 1000MiB ConsumersConsumption: 32.5KiB OtherConsumption: 0B ExternalConsumption: 0B TargetConsumersConsumption: 250MiB ResultingConsumersConsumption: 250MiB Coefficient: 0.90625 2025-03-31T16:38:48.003248Z node 9 :MEMORY_CONTROLLER INFO: Consumer SharedCache state: Consumption: 0B Limit: 236MiB Min: 100MiB Max: 250MiB 2025-03-31T16:38:48.003416Z node 9 :MEMORY_CONTROLLER INFO: Consumer MemTable state: Consumption: 32.5KiB Limit: 14.1MiB Min: 5MiB Max: 15MiB 2025-03-31T16:38:48.003492Z node 9 :MEMORY_CONTROLLER INFO: Consumer QueryExecution state: Consumption: 1.94MiB Limit: 999MiB 2025-03-31T16:38:48.004252Z node 9 :TABLET_SAUSAGECACHE INFO: Limit memory consumer with 236MiB 2025-03-31T16:38:48.191872Z node 9 :MEMORY_CONTROLLER INFO: Periodic memory stats: AnonRss: none CGroupLimit: 500MiB MemTotal: none MemAvailable: none AllocatedMemory: 0B AllocatorCachesMemory: 0B HardLimit: 500MiB SoftLimit: 375MiB TargetUtilization: 250MiB ActivitiesLimitBytes: 1000MiB ConsumersConsumption: 32.6KiB OtherConsumption: 0B ExternalConsumption: 0B TargetConsumersConsumption: 250MiB ResultingConsumersConsumption: 250MiB Coefficient: 0.90625 2025-03-31T16:38:48.192477Z node 9 :MEMORY_CONTROLLER INFO: Consumer SharedCache state: Consumption: 0B Limit: 236MiB Min: 100MiB Max: 250MiB 2025-03-31T16:38:48.192531Z node 9 :MEMORY_CONTROLLER INFO: Consumer MemTable state: Consumption: 32.6KiB Limit: 14.1MiB Min: 5MiB Max: 15MiB 2025-03-31T16:38:48.192566Z node 9 :MEMORY_CONTROLLER INFO: Consumer QueryExecution state: Consumption: 1.94MiB Limit: 999MiB 2025-03-31T16:38:48.192660Z node 9 :TABLET_SAUSAGECACHE INFO: Limit memory consumer with 236MiB 2025-03-31T16:38:48.381044Z node 9 :MEMORY_CONTROLLER INFO: Periodic memory stats: AnonRss: none CGroupLimit: 200MiB MemTotal: none MemAvailable: none AllocatedMemory: 0B AllocatorCachesMemory: 0B HardLimit: 200MiB SoftLimit: 150MiB TargetUtilization: 100MiB ActivitiesLimitBytes: 1000MiB ConsumersConsumption: 33KiB OtherConsumption: 0B ExternalConsumption: 0B TargetConsumersConsumption: 100MiB ResultingConsumersConsumption: 100MiB Coefficient: 0.90625 2025-03-31T16:38:48.381780Z node 9 :MEMORY_CONTROLLER INFO: Consumer SharedCache state: Consumption: 0B Limit: 94.4MiB Min: 40MiB Max: 100MiB 2025-03-31T16:38:48.381855Z node 9 :MEMORY_CONTROLLER INFO: Consumer MemTable state: Consumption: 33KiB Limit: 5.63MiB Min: 2MiB Max: 6MiB 2025-03-31T16:38:48.381920Z node 9 :MEMORY_CONTROLLER INFO: Consumer QueryExecution state: Consumption: 1.94MiB Limit: 999MiB 2025-03-31T16:38:48.382582Z node 9 :TABLET_SAUSAGECACHE INFO: Limit memory consumer with 94.4MiB 2025-03-31T16:38:48.528330Z node 9 :MEMORY_CONTROLLER INFO: Periodic memory stats: AnonRss: none CGroupLimit: 200MiB MemTotal: none MemAvailable: none AllocatedMemory: 0B AllocatorCachesMemory: 0B HardLimit: 200MiB SoftLimit: 150MiB TargetUtilization: 100MiB ActivitiesLimitBytes: 1000MiB ConsumersConsumption: 33.1KiB OtherConsumption: 0B ExternalConsumption: 0B TargetConsumersConsumption: 100MiB ResultingConsumersConsumption: 100MiB Coefficient: 0.90625 2025-03-31T16:38:48.528949Z node 9 :MEMORY_CONTROLLER INFO: Consumer SharedCache state: Consumption: 0B Limit: 94.4MiB Min: 40MiB Max: 100MiB 2025-03-31T16:38:48.529001Z node 9 :MEMORY_CONTROLLER INFO: Consumer MemTable state: Consumption: 33.1KiB Limit: 5.63MiB Min: 2MiB Max: 6MiB 2025-03-31T16:38:48.529038Z node 9 :MEMORY_CONTROLLER INFO: Consumer QueryExecution state: Consumption: 1.94MiB Limit: 999MiB 2025-03-31T16:38:48.529143Z node 9 :TABLET_SAUSAGECACHE INFO: Limit memory consumer with 94.4MiB |98.2%| [TM] {RESULT} ydb/core/memory_controller/ut/unittest |98.2%| [TA] $(B)/ydb/core/tx/datashard/ut_minikql/test-results/unittest/{meta.json ... results_accumulator.log} >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListBindings >> TDqPqRdReadActorTests::Backpressure [GOOD] >> TDqPqRdReadActorTests::RowDispatcherIsRestarted2 |98.2%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_minikql/test-results/unittest/{meta.json ... results_accumulator.log} >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeBinding >> CoordinatorVolatile::MediatorReconnectPlanRace [GOOD] >> CoordinatorVolatile::CoordinatorMigrateUncommittedVolatileTx |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> TDqPqRdReadActorTests::RowDispatcherIsRestarted2 [GOOD] >> DataShardStats::CollectStatsForSeveralParts [GOOD] >> DataShardStats::NoData >> TDqPqRdReadActorTests::TwoPartitionsRowDispatcherIsRestarted >> KqpTpch::Query19 [GOOD] >> KqpTpch::Query20 >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyBinding >> MediatorTest::OneCoordinatorResendTxNotLost [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-False] >> TestPurecalcFilter::PartialPush [GOOD] >> TDqPqRdReadActorTests::TwoPartitionsRowDispatcherIsRestarted [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteBinding >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-True] >> DataShardReplication::ApplyChangesWithConcurrentTx [GOOD] >> TDqPqRdReadActorTests::IgnoreMessageIfNoSessions >> TestPurecalcFilter::CompilationValidation >> TDqPqRdReadActorTests::IgnoreMessageIfNoSessions [GOOD] |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateQuery |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/common/py3test >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v1-client0] [GOOD] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> test_alloc_default.py::TestAlloc::test_alloc_and_free[kikimr0] >> TDqPqRdReadActorTests::MetadataFields [GOOD] |98.3%| [TM] {RESULT} ydb/tests/fq/common/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/mediator/ut/unittest >> MediatorTest::OneCoordinatorResendTxNotLost [GOOD] Test command err: 2025-03-31T16:37:52.589318Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:37:52.589727Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:37:52.589947Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000562/r3tmp/tmpVEqt8B/pdisk_1.dat 2025-03-31T16:37:53.177404Z node 1 :TX_MEDIATOR INFO: tablet# 72057594047365120 TTxSchema Complete 2025-03-31T16:37:53.178112Z node 1 :TX_MEDIATOR INFO: tablet# 72057594047365120 CreateTxInit wait TEvMediatorConfiguration for switching to StateWork from external 2025-03-31T16:37:53.178744Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [1:617:2532] connected 2025-03-31T16:37:53.178859Z node 1 :TX_MEDIATOR NOTICE: tablet# 72057594047365120 actor# [1:600:2522] HANDLE TEvMediatorConfiguration Version# 1 2025-03-31T16:37:53.183783Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 version# 1 TTxConfigure Complete 2025-03-31T16:37:53.184025Z node 1 :TX_MEDIATOR INFO: tablet# 72057594047365120 CreateTxInit Complete ... waiting for watcher to connect 2025-03-31T16:37:53.184793Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [1:623:2537] connected 2025-03-31T16:37:53.184914Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [1:621:2536] to# [1:619:2534] ExecQueue 2025-03-31T16:37:53.185029Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:619:2534] MediatorId# 72057594047365120 HANDLE TEvGranularWatch from# [1:621:2536] bucket# 0 ... waiting for watcher to connect (done) 2025-03-31T16:37:53.191728Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [1:621:2536] to# [1:619:2534] ExecQueue 2025-03-31T16:37:53.191818Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:619:2534] MediatorId# 72057594047365120 HANDLE TEvWatch 2025-03-31T16:37:53.191867Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:619:2534] MediatorId# 72057594047365120 SEND TEvWatchBucket to# [1:620:2535] bucket.ActiveActor 2025-03-31T16:37:53.191947Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:620:2535] Mediator# 72057594047365120 HANDLE {TEvWatchBucket Source# [1:621:2536]} 2025-03-31T16:37:53.192109Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:620:2535] Mediator# 72057594047365120 SEND to# [1:621:2536] {TEvUpdate Mediator# 72057594047365120 Bucket# 0 TimeBarrier# 0} 2025-03-31T16:37:53.204786Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [1:627:2541] connected 2025-03-31T16:37:53.204987Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorSync 2025-03-31T16:37:53.205041Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 SEND EvCoordinatorSyncResult to# [1:625:2539] Cookie# 1 CompleteStep# 0 LatestKnownStep# 0 SubjectiveTime# 4 Coordinator# 72057594046316545 2025-03-31T16:37:53.205440Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:619:2534] MediatorId# 72057594047365120 HANDLE TEvCommitStep {TMediateStep From 0 To# 1000Steps: {{TCoordinatorStep step# 1000 PrevStep# 0}}} marker# M1 2025-03-31T16:37:53.205504Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:619:2534] MediatorId# 72057594047365120 SEND TEvStepPlanComplete to# [1:620:2535] bucket.ActiveActor step# 1000 2025-03-31T16:37:53.205572Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:620:2535] Mediator# 72057594047365120 HANDLE {TEvStepPlanComplete step# 1000} 2025-03-31T16:37:53.205809Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:620:2535] Mediator# 72057594047365120 SEND to# [1:621:2536] {TEvUpdate Mediator# 72057594047365120 Bucket# 0 TimeBarrier# 1000} ... waiting for blocked plan step 2025-03-31T16:37:53.225653Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorStep coordinator# 72057594046316545 step# 1010 2025-03-31T16:37:53.225718Z node 1 :TX_MEDIATOR INFO: Coordinator step: Mediator [72057594047365120], Coordinator [72057594046316545], step# [1010] transactions [1] 2025-03-31T16:37:53.225823Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 SEND EvCommitStep to# [1:619:2534] ExecQueue {TMediateStep From 1000 To# 1010Steps: {{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 1 AckTo# [1:625:2539]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 1}}}}} marker# M0 2025-03-31T16:37:53.225935Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:619:2534] MediatorId# 72057594047365120 HANDLE TEvCommitStep {TMediateStep From 1000 To# 1010Steps: {{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 1 AckTo# [1:625:2539]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 1}}}}} marker# M1 2025-03-31T16:37:53.225992Z node 1 :TX_MEDIATOR_PRIVATE DEBUG: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365121]. TxIds: txid# 1 marker# M2 2025-03-31T16:37:53.226041Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:619:2534] MediatorId# 72057594047365120 SEND Ev to# [1:620:2535] step# 1010 forTablet# 72057594047365121 txid# 1 marker# M3 2025-03-31T16:37:53.226100Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:619:2534] MediatorId# 72057594047365120 SEND TEvStepPlanComplete to# [1:620:2535] bucket.ActiveActor step# 1010 2025-03-31T16:37:53.226179Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:620:2535] Mediator# 72057594047365120 HANDLE {TEvCommitTabletStep step# 1010 TabletId# 72057594047365121 Transactions {{TTx Moderator# 0 txid# 1 AckTo# [1:625:2539]}}} marker# M4 2025-03-31T16:37:53.226426Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:620:2535] Mediator# 72057594047365120 HANDLE {TEvStepPlanComplete step# 1010} 2025-03-31T16:37:53.227690Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:620:2535] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365121 Status: OK ServerId: [1:649:2553] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-03-31T16:37:53.227810Z node 1 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 1, marker M5lu 2025-03-31T16:37:53.227861Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:620:2535] Mediator# 72057594047365120 SEND to# 72057594047365121 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365121} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet ... waiting for blocked plan step (done) ... waiting for no pending commands 2025-03-31T16:37:53.228331Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [1:621:2536] to# [1:619:2534] ExecQueue 2025-03-31T16:37:53.228411Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:619:2534] MediatorId# 72057594047365120 HANDLE TEvGranularWatchModify from# [1:621:2536] bucket# 0 ... waiting for no pending commands (done) ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet ... waiting for watch updates 2025-03-31T16:37:53.228705Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:620:2535] Mediator# 72057594047365120 HANDLE {TEvPlanStepAccepted TabletId# 72057594047365121 step# 1010} 2025-03-31T16:37:53.228748Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:620:2535] Mediator# 72057594047365120 SEND to# [1:625:2539] {TEvPlanStepAck TabletId# 72057594047365121 step# 1010 txid# 1} 2025-03-31T16:37:53.228822Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:620:2535] Mediator# 72057594047365120 SEND to# [1:621:2536] {TEvUpdate Mediator# 72057594047365120 Bucket# 0 TimeBarrier# 1010} ... waiting for watch updates (done) 2025-03-31T16:37:57.252824Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:101:2147], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:37:57.253178Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:37:57.253256Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000562/r3tmp/tmpNj1SPN/pdisk_1.dat 2025-03-31T16:37:57.603943Z node 2 :TX_MEDIATOR INFO: tablet# 72057594047365120 TTxSchema Complete 2025-03-31T16:37:57.604789Z node 2 :TX_MEDIATOR INFO: tablet# 72057594047365120 CreateTxInit wait TEvMediatorConfiguration for switching to StateWork from external 2025-03-31T16:37:57.605490Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [2:618:2533] connected 2025-03-31T16:37:57.605656Z node 2 :TX_MEDIATOR NOTICE: tablet# 72057594047365120 actor# [2:601:2523] HANDLE TEvMediatorConfiguration Version# 1 2025-03-31T16:37:57.606070Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 version# 1 TTxConfigure Complete 2025-03-31T16:37:57.606233Z node 2 :TX_MEDIATOR INFO: tablet# 72057594047365120 CreateTxInit Complete ... waiting for watcher to connect 2025-03-31T16:37:57.606716Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [2:624:2538] connected ... waiting for watcher to connect (done) 2025-03-31T16:37:57.607024Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [2:622:2537] to# [2:620:2535] ExecQueue 2025-03-31T16:37:57.607109Z node 2 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [2:620:2535] MediatorId# 72057594047365120 HANDLE TEvGranularWatch from# [2:622:2537] bucket# 0 2025-03-31T16:37:57.607263Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [2:622:2537] to# [2:620:2535] ExecQueue 2025-03-31T16:37:57.607318Z node 2 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [2:620:2535] MediatorId# 72057594047365120 HANDLE TEvWatch 2025-03-31T16:37:57.607365Z node 2 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [2:620:2535] MediatorId# 72057594047365120 SEND TEvWatchBucket to# [2:621:2536] bucket.ActiveActor 2025-03-31T16:37:57.607437Z node 2 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [2:621:2536] Mediator# 72057594047365120 HANDLE {TEvWatchBucket Source# [2:622:2537]} 2025-03-31T16:37:57.607530Z node 2 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [2:621:2536] Mediator# 72057594047365120 SEND to# [2:622:2537] {TEvUpdate Mediator# 72057594047365120 Bucket# 0 TimeBarrier# 0} ... waiting for no pending commands 2025-03-31T16:37:57.630698Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [2:622:2537] to# [2:620:2535] ExecQueue 2025-03-31T16:37:57.630771Z node 2 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [2:620:2535] MediatorId# 72057594047365120 HANDLE TEvGranularWatchModify from# [2:622:2537] bucket# 0 2025-03-31T16:37:57.630898Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [2:622:2537] to# [2:620:2535] ExecQueue 2025-03-31T16:37:57.630936Z node 2 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [2:620:2535] MediatorId# 72057594047365120 HANDLE TEvGranularWatchModify from# [2:622:2537] bucket# 0 2025-03-31T16:37:57.630991Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [2:622:2537] to# [2:620:2535] ExecQueue 2025-03-31T16:37:57.631021Z node 2 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [2:620:2535] MediatorId# 72057594047365120 HANDLE TEvGranularWatchModify from# [2:622:2537] bucket# 0 ... waiti ... ket# 0 ... waiting for no pending commands (done) 2025-03-31T16:38:53.476902Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [12:664:2560] connected 2025-03-31T16:38:53.477002Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorSync 2025-03-31T16:38:53.477051Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 SEND EvCoordinatorSyncResult to# [12:662:2558] Cookie# 1 CompleteStep# 0 LatestKnownStep# 0 SubjectiveTime# 3 Coordinator# 72057594046316545 2025-03-31T16:38:53.477405Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [12:667:2563] connected 2025-03-31T16:38:53.477491Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorSync 2025-03-31T16:38:53.477539Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 SEND EvCoordinatorSyncResult to# [12:665:2561] Cookie# 1 CompleteStep# 0 LatestKnownStep# 0 SubjectiveTime# 3 Coordinator# 72057594046316546 2025-03-31T16:38:53.477836Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorStep coordinator# 72057594046316545 step# 1010 2025-03-31T16:38:53.477884Z node 12 :TX_MEDIATOR INFO: Coordinator step: Mediator [72057594047365120], Coordinator [72057594046316545], step# [1010] transactions [1] 2025-03-31T16:38:53.477960Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorStep coordinator# 72057594046316546 step# 1010 2025-03-31T16:38:53.477991Z node 12 :TX_MEDIATOR INFO: Coordinator step: Mediator [72057594047365120], Coordinator [72057594046316546], step# [1010] transactions [1] 2025-03-31T16:38:53.478099Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 SEND EvCommitStep to# [12:619:2534] ExecQueue {TMediateStep From 0 To# 1010Steps: {{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 1 AckTo# [12:662:2558]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 1}{tablet# 72057594047365122 txid# 1}}}{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 2 AckTo# [12:665:2561]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 2}{tablet# 72057594047365122 txid# 2}}}}} marker# M0 2025-03-31T16:38:53.478235Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [12:619:2534] MediatorId# 72057594047365120 HANDLE TEvCommitStep {TMediateStep From 0 To# 1010Steps: {{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 1 AckTo# [12:662:2558]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 1}{tablet# 72057594047365122 txid# 1}}}{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 2 AckTo# [12:665:2561]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 2}{tablet# 72057594047365122 txid# 2}}}}} marker# M1 2025-03-31T16:38:53.478298Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365121]. TxIds: txid# 1 txid# 2 marker# M2 2025-03-31T16:38:53.478354Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [12:619:2534] MediatorId# 72057594047365120 SEND Ev to# [12:620:2535] step# 1010 forTablet# 72057594047365121 txid# 1 txid# 2 marker# M3 2025-03-31T16:38:53.478405Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365122]. TxIds: txid# 1 txid# 2 marker# M2 2025-03-31T16:38:53.478440Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [12:619:2534] MediatorId# 72057594047365120 SEND Ev to# [12:620:2535] step# 1010 forTablet# 72057594047365122 txid# 1 txid# 2 marker# M3 2025-03-31T16:38:53.478484Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [12:619:2534] MediatorId# 72057594047365120 SEND TEvStepPlanComplete to# [12:620:2535] bucket.ActiveActor step# 1010 2025-03-31T16:38:53.478702Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 HANDLE {TEvCommitTabletStep step# 1010 TabletId# 72057594047365121 Transactions {{TTx Moderator# 0 txid# 1 AckTo# [12:662:2558]}{TTx Moderator# 0 txid# 2 AckTo# [12:665:2561]}}} marker# M4 2025-03-31T16:38:53.478871Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 HANDLE {TEvCommitTabletStep step# 1010 TabletId# 72057594047365122 Transactions {{TTx Moderator# 0 txid# 1 AckTo# [12:662:2558]}{TTx Moderator# 0 txid# 2 AckTo# [12:665:2561]}}} marker# M4 2025-03-31T16:38:53.479055Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 HANDLE {TEvStepPlanComplete step# 1010} 2025-03-31T16:38:53.479774Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365121 Status: OK ServerId: [12:673:2567] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-03-31T16:38:53.479841Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 1, marker M5lu 2025-03-31T16:38:53.479881Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 2, marker M5lu 2025-03-31T16:38:53.479944Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 SEND to# 72057594047365121 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365121} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet 2025-03-31T16:38:53.480333Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365122 Status: OK ServerId: [12:674:2568] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-03-31T16:38:53.480380Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365122, step# 1010, txid# 1, marker M5lu 2025-03-31T16:38:53.480412Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365122, step# 1010, txid# 2, marker M5lu 2025-03-31T16:38:53.480446Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 SEND to# 72057594047365122 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365122} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet 2025-03-31T16:38:53.491979Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [12:677:2571] connected 2025-03-31T16:38:53.492163Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorSync 2025-03-31T16:38:53.492222Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 SEND EvCoordinatorSyncResult to# [12:675:2569] Cookie# 2 CompleteStep# 1010 LatestKnownStep# 1010 SubjectiveTime# 4 Coordinator# 72057594046316546 2025-03-31T16:38:53.492480Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorStep coordinator# 72057594046316546 step# 1010 2025-03-31T16:38:53.492530Z node 12 :TX_MEDIATOR INFO: Coordinator step: Mediator [72057594047365120], Coordinator [72057594046316546], step# [1010] transactions [1] 2025-03-31T16:38:53.492617Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 SEND EvRequestLostAcks to# [12:619:2534] ExecQueue step {TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 2 AckTo# [0:0:0]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 2}{tablet# 72057594047365122 txid# 2}}} 2025-03-31T16:38:53.492732Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [12:619:2534] MediatorId# 72057594047365120 HANDLE TEvRequestLostAcks {TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 2 AckTo# [0:0:0]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 2}{tablet# 72057594047365122 txid# 2}}} AckTo# [12:675:2569] 2025-03-31T16:38:53.492780Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365121]. TxIds: txid# 2 marker# M2 2025-03-31T16:38:53.492834Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [12:619:2534] MediatorId# 72057594047365120 SEND Ev to# [12:620:2535] step# 1010 forTablet# 72057594047365121 txid# 2 marker# M3 2025-03-31T16:38:53.492911Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365122]. TxIds: txid# 2 marker# M2 2025-03-31T16:38:53.492973Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [12:619:2534] MediatorId# 72057594047365120 SEND Ev to# [12:620:2535] step# 1010 forTablet# 72057594047365122 txid# 2 marker# M3 2025-03-31T16:38:53.493080Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 HANDLE {TEvOoOTabletStep step# 1010 TabletId# 72057594047365121 Transactions {{TTx Moderator# 0 txid# 2 AckTo# [12:675:2569]}}} 2025-03-31T16:38:53.493205Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 HANDLE {TEvOoOTabletStep step# 1010 TabletId# 72057594047365122 Transactions {{TTx Moderator# 0 txid# 2 AckTo# [12:675:2569]}}} 2025-03-31T16:38:53.508599Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594047365121 ClientId: [12:669:2565] ServerId: [12:673:2567] } 2025-03-31T16:38:53.554575Z node 12 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:38:53.579417Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365121 Status: OK ServerId: [12:702:2584] Leader: 1 Dead: 0 Generation: 3 VersionInfo: } 2025-03-31T16:38:53.579523Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 1, marker M5lu 2025-03-31T16:38:53.579571Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 2, marker M5lu 2025-03-31T16:38:53.579640Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 SEND to# 72057594047365121 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365121} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet 2025-03-31T16:38:53.591367Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594047365122 ClientId: [12:670:2566] ServerId: [12:674:2568] } 2025-03-31T16:38:53.612002Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365122 Status: OK ServerId: [12:723:2594] Leader: 1 Dead: 0 Generation: 3 VersionInfo: } 2025-03-31T16:38:53.612117Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365122, step# 1010, txid# 1, marker M5lu 2025-03-31T16:38:53.612159Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365122, step# 1010, txid# 2, marker M5lu 2025-03-31T16:38:53.612206Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 SEND to# 72057594047365122 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365122} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet 2025-03-31T16:38:53.639536Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:38:53.639727Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:38:53.651570Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected |98.3%| [TM] {RESULT} ydb/core/tx/mediator/ut/unittest >> TDqPqRdReadActorTests::IgnoreCoordinatorResultIfWrongState >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListQueries ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_replication/unittest >> DataShardReplication::ApplyChangesWithConcurrentTx [GOOD] Test command err: 2025-03-31T16:37:59.472921Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:37:59.473206Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:37:59.473402Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0007c6/r3tmp/tmpd2vLXC/pdisk_1.dat 2025-03-31T16:38:00.176780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:38:00.232705Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:38:00.289235Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-31T16:38:00.291616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:38:00.295582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:38:00.295880Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-31T16:38:00.310000Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:38:00.417150Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-03-31T16:38:00.417245Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-03-31T16:38:00.423382Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-03-31T16:38:00.632794Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_GLOBAL } } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-31T16:38:00.632901Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T16:38:00.633491Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-31T16:38:00.633619Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T16:38:00.633998Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T16:38:00.634176Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T16:38:00.634374Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-31T16:38:00.634711Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-03-31T16:38:00.638358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:38:00.640201Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-31T16:38:00.640301Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-03-31T16:38:00.699462Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:38:00.700924Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:38:00.701554Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:38:00.701773Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:38:00.773830Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:38:00.774708Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:38:00.774843Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:38:00.777217Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:38:00.777350Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:38:00.777407Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:38:00.779388Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:38:00.779581Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:38:00.779683Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:38:00.790550Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:38:00.829707Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:38:00.831017Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:38:00.831202Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:38:00.831240Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:38:00.831288Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:38:00.831319Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:38:00.831556Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:38:00.831615Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:38:00.833075Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:38:00.833168Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:38:00.833293Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:38:00.833357Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:38:00.833459Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T16:38:00.833491Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:38:00.833526Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:38:00.833554Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:38:00.833593Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:38:00.833696Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:38:00.833740Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:38:00.833781Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:38:00.833871Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-31T16:38:00.833909Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:38:00.834033Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:38:00.834331Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T16:38:00.834385Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:38:00.835055Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:38:00.835141Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:38:00.835211Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T16:38:00.835268Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T16:38:00.835322Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:38:00.835632Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T16:38:00.835662Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T16:38:00.835691Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T16:38:00.835719Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:38:00.835761Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T16:38:00.835791Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T16:38:00.835817Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T16:38:00.835843Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:38:00.835863Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T16:38:00.839036Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T16:38:00.839121Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 720751862 ... 6:38:53.867743Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [8:870:2702], Recipient [8:665:2570]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 1500 TxId: 18446744073709551615 } LockTxId: 281474976715660 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 8 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2025-03-31T16:38:53.867947Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-31T16:38:53.868061Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit CheckRead 2025-03-31T16:38:53.868191Z node 8 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-03-31T16:38:53.868268Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit CheckRead 2025-03-31T16:38:53.868329Z node 8 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-31T16:38:53.868375Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-31T16:38:53.868431Z node 8 :TX_DATASHARD TRACE: Activated operation [0:2] at 72075186224037888 2025-03-31T16:38:53.868485Z node 8 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-03-31T16:38:53.868511Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-31T16:38:53.868533Z node 8 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit ExecuteRead 2025-03-31T16:38:53.868563Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit ExecuteRead 2025-03-31T16:38:53.868710Z node 8 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 1500 TxId: 18446744073709551615 } LockTxId: 281474976715660 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 8 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-03-31T16:38:53.869064Z node 8 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715660, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-31T16:38:53.869172Z node 8 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v1500/18446744073709551615 2025-03-31T16:38:53.869228Z node 8 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[8:870:2702], 0} after executionsCount# 1 2025-03-31T16:38:53.869294Z node 8 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[8:870:2702], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-31T16:38:53.869391Z node 8 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[8:870:2702], 0} finished in read 2025-03-31T16:38:53.869487Z node 8 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-03-31T16:38:53.869515Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteRead 2025-03-31T16:38:53.869542Z node 8 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2025-03-31T16:38:53.869569Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2025-03-31T16:38:53.869614Z node 8 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-03-31T16:38:53.869636Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2025-03-31T16:38:53.869666Z node 8 :TX_DATASHARD TRACE: Execution plan for [0:2] at 72075186224037888 has finished 2025-03-31T16:38:53.869744Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-31T16:38:53.869889Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-03-31T16:38:53.870186Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [8:61:2108], Recipient [8:665:2570]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715660 LockNode: 8 Status: STATUS_SUBSCRIBED 2025-03-31T16:38:53.871197Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [8:870:2702], Recipient [8:665:2570]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-03-31T16:38:53.871265Z node 8 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 11 } } 2025-03-31T16:38:53.874904Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [8:874:2706], Recipient [8:665:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:38:53.875001Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:38:53.875069Z node 8 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [8:873:2705], serverId# [8:874:2706], sessionId# [0:0:0] 2025-03-31T16:38:53.875367Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 269549570, Sender [8:872:2704], Recipient [8:665:2570]: NKikimrTxDataShard.TEvApplyReplicationChanges TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Source: "my-source" Changes { SourceOffset: 1 WriteTxId: 0 Key: "\001\000\004\000\000\000\001\000\000\000" Upsert { Tags: 2 Data: "\001\000\004\000\000\000\025\000\000\000" } } 2025-03-31T16:38:53.875558Z node 8 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v1500/18446744073709551615 ImmediateWriteEdge# v1000/18446744073709551615 ImmediateWriteEdgeReplied# v1000/18446744073709551615 2025-03-31T16:38:53.875695Z node 8 :TX_DATASHARD TRACE: Lock 281474976715660 marked broken at v{min} 2025-03-31T16:38:53.887149Z node 8 :TX_DATASHARD DEBUG: Waiting for PlanStep# 1501 from mediator time cast 2025-03-31T16:38:53.888119Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 270270977, Sender [8:24:2071], Recipient [8:665:2570]: {TEvNotifyPlanStep TabletId# 72075186224037888 PlanStep# 1501} 2025-03-31T16:38:53.888183Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvNotifyPlanStep 2025-03-31T16:38:53.888244Z node 8 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 1501 at tablet 72075186224037888 2025-03-31T16:38:53.888312Z node 8 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:38:54.100119Z node 8 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqph3z608y2n2rv0ng5a352r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=8&id=M2E1MTE4NjItOTg1NDI4OTgtZjYzYzdjOTctZmY2MWIyYTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T16:38:54.102213Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [8:897:2723], Recipient [8:665:2570]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 1500 TxId: 18446744073709551615 } LockTxId: 281474976715660 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 8 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2025-03-31T16:38:54.102474Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-31T16:38:54.102607Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2025-03-31T16:38:54.102734Z node 8 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-03-31T16:38:54.102789Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2025-03-31T16:38:54.102838Z node 8 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-31T16:38:54.102879Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-31T16:38:54.102929Z node 8 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037888 2025-03-31T16:38:54.102981Z node 8 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-03-31T16:38:54.103010Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-31T16:38:54.103033Z node 8 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2025-03-31T16:38:54.103055Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2025-03-31T16:38:54.103248Z node 8 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 1500 TxId: 18446744073709551615 } LockTxId: 281474976715660 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 8 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-03-31T16:38:54.103559Z node 8 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715660, counter# 18446744073709551612 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-31T16:38:54.103629Z node 8 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v1500/18446744073709551615 2025-03-31T16:38:54.103691Z node 8 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[8:897:2723], 0} after executionsCount# 1 2025-03-31T16:38:54.103749Z node 8 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[8:897:2723], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-31T16:38:54.103843Z node 8 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[8:897:2723], 0} finished in read 2025-03-31T16:38:54.103928Z node 8 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-03-31T16:38:54.103957Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2025-03-31T16:38:54.104003Z node 8 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2025-03-31T16:38:54.104033Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2025-03-31T16:38:54.104088Z node 8 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-03-31T16:38:54.104111Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2025-03-31T16:38:54.104144Z node 8 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037888 has finished 2025-03-31T16:38:54.104203Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-31T16:38:54.104327Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-03-31T16:38:54.105252Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [8:897:2723], Recipient [8:665:2570]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-03-31T16:38:54.105320Z node 8 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-03-31T16:38:54.108161Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [8:61:2108], Recipient [8:665:2570]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715660 LockNode: 8 Status: STATUS_NOT_FOUND { items { uint32_value: 1 } items { uint32_value: 11 } } |98.3%| [TM] {RESULT} ydb/core/tx/datashard/ut_replication/unittest >> KafkaProtocol::BalanceScenario [GOOD] >> KafkaProtocol::OffsetCommitAndFetchScenario >> test_cms_erasure.py::TestDegradedGroupBlock42Keep::test_no_degraded_groups_after_shutdown >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeQuery >> TDqPqRdReadActorTests::IgnoreCoordinatorResultIfWrongState [GOOD] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> test_schemeshard_limits.py::TestSchemeShardLimitsCase0::test_effective_acls_are_too_large >> TCreateAndDropViewTest::DropSameViewTwice [GOOD] >> TCreateAndDropViewTest::DropViewIfExists >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendGetQueryStatus >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-True] [GOOD] >> TDqPqReadActorTest::TestReadFromTopic >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-False] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-True] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyQuery >> CoordinatorVolatile::CoordinatorMigrateUncommittedVolatileTx [GOOD] >> CoordinatorVolatile::CoordinatorRestartWithEnqueuedVolatileStep >> KqpTpch::Query20 [GOOD] >> KqpTpch::Query21 >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteQuery >> QuoterWithKesusTest::UpdatesCountersForParentResources [GOOD] >> QuoterWithKesusTest::CanDeleteResourceWhenUsingIt >> test_schemeshard_limits.py::TestSchemeShardLimitsCase1::test_too_large_acls >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendControlQuery |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> TestPurecalcFilter::CompilationValidation [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendGetResultData |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> TestRawParser::Simple >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListJobs >> TestRawParser::Simple [GOOD] >> TestRawParser::ManyValues >> KafkaProtocol::OffsetCommitAndFetchScenario [GOOD] >> KafkaProtocol::CreateTopicsScenarioWithKafkaAuth |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeJob >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-False] >> TestRawParser::ManyValues [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-True] >> TestRawParser::TypeKindsValidation >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateConnection >> TestRawParser::TypeKindsValidation [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateConnectionWithServiceAccount >> test_config_with_metadata.py::TestKiKiMRWithMetadata::test_cluster_is_operational_with_metadata [GOOD] >> TDqPqReadActorTest::TestReadFromTopic [GOOD] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListConnections >> TDqPqReadActorTest::TestReadFromTopicFromNow >> CoordinatorVolatile::CoordinatorRestartWithEnqueuedVolatileStep [GOOD] >> TCreateAndDropViewTest::DropViewIfExists [FAIL] >> TCreateAndDropViewTest::DropViewInFolder >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeConnection ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/row_dispatcher/format_handler/ut/unittest >> TestRawParser::TypeKindsValidation [GOOD] Test command err: 2025-03-31T16:37:31.074061Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [0:0:0] 2025-03-31T16:37:31.074156Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create purecalc filter for predicate 'where col_0 == "str1"' (filter id: [0:0:0]) 2025-03-31T16:37:31.074216Z node 1 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where col_0 == "str1"; 2025-03-31T16:37:31.074305Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [0:0:0] : Send compile request with id 1 2025-03-31T16:37:31.074528Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 1 from [1:7488013710762971619:2051] 2025-03-31T16:37:33.963207Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [1:7488013710762971619:2051] [id 1]: Started compile request 2025-03-31T16:37:34.835236Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [1:7488013710762971619:2051] [id 1]: Compilation completed for request 2025-03-31T16:37:34.835429Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 1 from [1:7488013710762971619:2051] 2025-03-31T16:37:34.835526Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Got compile response for request with id 1 2025-03-31T16:37:34.835550Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [0:0:0] : Filter compilation finished 2025-03-31T16:37:34.835611Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [1:0:0] 2025-03-31T16:37:34.835648Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create purecalc filter for predicate 'where col_1 == "str2"' (filter id: [1:0:0]) 2025-03-31T16:37:34.835768Z node 1 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where col_1 == "str2"; 2025-03-31T16:37:34.835789Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [1:0:0] : Send compile request with id 2 2025-03-31T16:37:34.839215Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 2 from [1:7488013710762971619:2051] 2025-03-31T16:37:34.839282Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [1:7488013710762971619:2051] [id 2]: Started compile request 2025-03-31T16:37:34.880077Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [1:7488013710762971619:2051] [id 2]: Compilation completed for request 2025-03-31T16:37:34.880193Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 2 from [1:7488013710762971619:2051] 2025-03-31T16:37:34.882842Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Got compile response for request with id 2 2025-03-31T16:37:34.882883Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [1:0:0] : Filter compilation finished 2025-03-31T16:37:34.882916Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [2:0:0] 2025-03-31T16:37:34.882995Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: FilterData for 3 clients, number rows: 3 2025-03-31T16:37:34.883010Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Pass 3 rows to purecalc filter (filter id: [1:0:0]) 2025-03-31T16:37:34.883018Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 3 rows 2025-03-31T16:37:34.887847Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Add 3 rows to client [2:0:0] without filtering 2025-03-31T16:37:34.887905Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Pass 3 rows to purecalc filter (filter id: [0:0:0]) 2025-03-31T16:37:34.887912Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 3 rows 2025-03-31T16:37:34.887980Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Remove filter with id [2:0:0] 2025-03-31T16:37:34.888032Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: FilterData for 2 clients, number rows: 1 2025-03-31T16:37:34.888043Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Pass 1 rows to purecalc filter (filter id: [1:0:0]) 2025-03-31T16:37:34.888060Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-03-31T16:37:34.888082Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Pass 1 rows to purecalc filter (filter id: [0:0:0]) 2025-03-31T16:37:34.888087Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-03-31T16:37:35.215130Z node 2 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [0:0:0] 2025-03-31T16:37:35.215154Z node 2 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create purecalc filter for predicate 'where a1 = "str1"' (filter id: [0:0:0]) 2025-03-31T16:37:35.215182Z node 2 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a1 = "str1"; 2025-03-31T16:37:35.215250Z node 2 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [0:0:0] : Send compile request with id 1 2025-03-31T16:37:35.225287Z node 2 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 1 from [2:7488013727795342995:2051] 2025-03-31T16:37:37.938820Z node 2 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [2:7488013727795342995:2051] [id 1]: Started compile request 2025-03-31T16:37:37.975528Z node 2 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [2:7488013727795342995:2051] [id 1]: Compilation completed for request 2025-03-31T16:37:37.975648Z node 2 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 1 from [2:7488013727795342995:2051] 2025-03-31T16:37:37.975890Z node 2 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Got compile response for request with id 1 2025-03-31T16:37:37.975915Z node 2 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [0:0:0] : Filter compilation finished 2025-03-31T16:37:37.975934Z node 2 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [0:0:0] 2025-03-31T16:37:38.400425Z node 3 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [0:0:0] 2025-03-31T16:37:38.400455Z node 3 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create purecalc filter for predicate 'where a2 ... 50' (filter id: [0:0:0]) 2025-03-31T16:37:38.400476Z node 3 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a2 ... 50; 2025-03-31T16:37:38.400518Z node 3 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [0:0:0] : Send compile request with id 1 2025-03-31T16:37:38.402898Z node 3 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 1 from [3:7488013737068222386:2051] 2025-03-31T16:37:41.829208Z node 3 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [3:7488013737068222386:2051] [id 1]: Started compile request 2025-03-31T16:37:41.903735Z node 3 :FQ_ROW_DISPATCHER ERROR: TPurecalcCompileActor [3:7488013737068222386:2051] [id 1]: Compilation failed for request 2025-03-31T16:37:41.903887Z node 3 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 1 from [3:7488013737068222386:2051] 2025-03-31T16:37:41.910001Z node 3 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Got compile response for request with id 1 2025-03-31T16:37:41.910161Z node 3 :FQ_ROW_DISPATCHER ERROR: TTopicFilters: TFilterHandler [0:0:0] : Filter compilation error: {
: Error: Failed to compile purecalc program subissue: {
: Error: Compile issues: generated.sql:2:36: Error: mismatched input '.' expecting {'$', ABORT, ACTION, ADD, AFTER, ALL, ALTER, ANALYZE, AND, ANSI, ANY, ARRAY, AS, ASC, ASSUME, ASYMMETRIC, ASYNC, AT, ATTACH, ATTRIBUTES, AUTOINCREMENT, BACKUP, BATCH, COLLECTION, BEFORE, BEGIN, BERNOULLI, BETWEEN, BITCAST, BY, CALLABLE, CASCADE, CASE, CAST, CHANGEFEED, CHECK, CLASSIFIER, COLLATE, COLUMN, COLUMNS, COMMIT, COMPACT, CONDITIONAL, CONFLICT, CONNECT, CONSTRAINT, CONSUMER, COVER, CREATE, CROSS, CUBE, CURRENT, CURRENT_DATE, CURRENT_TIME, CURRENT_TIMESTAMP, DATA, DATABASE, DECIMAL, DECLARE, DEFAULT, DEFERRABLE, DEFERRED, DEFINE, DELETE, DESC, DESCRIBE, DETACH, DICT, DIRECTORY, DISABLE, DISCARD, DISTINCT, DO, DROP, EACH, ELSE, EMPTY, EMPTY_ACTION, ENCRYPTED, END, ENUM, ERASE, ERROR, ESCAPE, EVALUATE, EXCEPT, EXCLUDE, EXCLUSION, EXCLUSIVE, EXISTS, EXPLAIN, EXPORT, EXTERNAL, FAIL, FAMILY, FILTER, FIRST, FLATTEN, FLOW, FOLLOWING, FOR, FOREIGN, FROM, FULL, FUNCTION, GLOB, GLOBAL, GRANT, GROUP, GROUPING, GROUPS, HASH, HAVING, HOP, IF, IGNORE, ILIKE, IMMEDIATE, IMPORT, IN, INCREMENT, INCREMENTAL, INDEX, INDEXED, INHERITS, INITIAL, INITIALLY, INNER, INSERT, INSTEAD, INTERSECT, INTO, IS, ISNULL, JOIN, JSON_EXISTS, JSON_QUERY, JSON_VALUE, KEY, LAST, LEFT, LEGACY, LIKE, LIMIT, LIST, LOCAL, LOGIN, MANAGE, MATCH, MATCHES, MATCH_RECOGNIZE, MEASURES, MICROSECONDS, MILLISECONDS, MODIFY, NANOSECONDS, NATURAL, NEXT, NO, NOLOGIN, NOT, NOTNULL, NULL, NULLS, OBJECT, OF, OFFSET, OMIT, ON, ONE, ONLY, OPTION, OPTIONAL, OR, ORDER, OTHERS, OUTER, OVER, OWNER, PARALLEL, PARTITION, PASSING, PASSWORD, PAST, PATTERN, PER, PERMUTE, PLAN, POOL, PRAGMA, PRECEDING, PRESORT, PRIMARY, PRIVILEGES, PROCESS, QUERY, QUEUE, RAISE, RANGE, REDUCE, REFERENCES, REGEXP, REINDEX, RELEASE, REMOVE, RENAME, REPLACE, REPLICATION, RESET, RESOURCE, RESPECT, RESTART, RESTORE, RESTRICT, RESULT, RETURN, RETURNING, REVERT, REVOKE, RIGHT, RLIKE, ROLLBACK, ROLLUP, ROW, ROWS, SAMPLE, SAVEPOINT, SCHEMA, SECONDS, SEEK, SELECT, SEMI, SET, SETS, SHOW, TSKIP, SEQUENCE, SOURCE, START, STREAM, STRUCT, SUBQUERY, SUBSET, SYMBOLS, SYMMETRIC, SYNC, SYSTEM, TABLE, TABLES, TABLESAMPLE, TABLESTORE, TAGGED, TEMP, TEMPORARY, THEN, TIES, TO, TOPIC, TRANSACTION, TRANSFER, TRIGGER, TUPLE, TYPE, UNBOUNDED, UNCONDITIONAL, UNION, UNIQUE, UNKNOWN, UNMATCHED, UPDATE, UPSERT, USE, USER, USING, VACUUM, VALUES, VARIANT, VIEW, VIRTUAL, WHEN, WHERE, WINDOW, WITH, WITHOUT, WRAPPER, XOR, STRING_VALUE, ID_PLAIN, ID_QUOTED, DIGITS} } subissue: {
: Error: Final yql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a2 ... 50; } } 2025-03-31T16:37:46.114246Z node 4 :FQ_ROW_DISPATCHER DEBUG: TTopicFormatHandler [json_each_row]: Add client with id [0:0:0] 2025-03-31T16:37:46.118098Z node 4 :FQ_ROW_DISPATCHER DEBUG: TTopicFormatHandler [json_each_row]: UpdateParser to new schema with size 2 2025-03-31T16:37:46.218486Z node 4 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-03-31T16:37:46.218699Z node 4 :FQ_ROW_DISPATCHER DEBUG: TTopicFormatHandler [json_each_row]: Parser was updated on new schema with 2 columns 2025-03-31T16:37:46.219852Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [0:0:0] 2025-03-31T16:37:46.219891Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create purecalc filter for predicate 'WHERE col_first = "str_first__large__"' (filter id: [0:0:0]) 2025-03-31T16:37:46.219916Z node 4 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input WHERE col_first = "str_first__large__"; 2025-03-31T16:37:46.219943Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [0:0:0] : Send compile request with id 1 2025-03-31T16:37:46.255390Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Got compile response for request with id 1 2025-03-31T16:37:46.255438Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [0:0:0] : Filter compilation finished 2025-03-31T16:37:46.255672Z node 4 :FQ_ROW_DISPATCHER DEBUG: TTopicFormatHandler [json_each_row]: Add client with id [1:0:0] 2025-03-31T16:37:46.322037Z node 4 :FQ_ROW_DISPATCHER DEBUG: TTopicFormatHandler [json_each_row]: UpdateParser to new schema with size 3 2025-03-31T16:37:46.413905Z node 4 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-03-31T16:37:46.414165Z node 4 :FQ_ROW_DISPATCHER DEBUG: TTopicFormatHandler [json_each_row]: Parser was updated on new schema with 3 columns 2025-03-31T16:37:46.414188Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [1:0:0] 2025-03-3 ... 09Z node 20 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 43, values: {"a1": "hello1", "a2": null, "event": "event1"} 2025-03-31T16:38:12.962625Z node 21 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-03-31T16:38:13.146101Z node 21 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-03-31T16:38:13.728985Z node 22 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-03-31T16:38:13.729394Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-03-31T16:38:13.729435Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 42, values: {"a1": 456, "a2": 42} 2025-03-31T16:38:13.729907Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-03-31T16:38:13.729943Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 43, values: {"a1": "456", "a2": -42} 2025-03-31T16:38:13.730292Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-03-31T16:38:13.730318Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 44, values: {"a1": "str", "a2": 99999} 2025-03-31T16:38:13.730649Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-03-31T16:38:13.730684Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 45, values: {"a1": "456", "a2": 42, "a3": 1.11.1} 2025-03-31T16:38:14.314180Z node 23 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-03-31T16:38:14.314437Z node 23 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-03-31T16:38:14.314478Z node 23 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 42, values: {"a1": "-456"} 2025-03-31T16:38:14.872804Z node 24 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-03-31T16:38:14.873173Z node 24 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-03-31T16:38:14.873214Z node 24 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 42, values: {"a1": {"key": "value"}, "a2": {"key2": "value2"}} 2025-03-31T16:38:14.873715Z node 24 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-03-31T16:38:14.873752Z node 24 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 43, values: {"a1": {"key": "value", "nested": {"a": "b", "c":}}, "a2": "str"} 2025-03-31T16:38:14.874099Z node 24 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-03-31T16:38:14.874133Z node 24 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 44, values: {"a1": {"key" "value"}, "a2": "str"} 2025-03-31T16:38:15.442924Z node 25 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-03-31T16:38:15.443451Z node 25 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-03-31T16:38:15.443504Z node 25 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 42, values: {"a1": true, "a2": false} 2025-03-31T16:38:15.443953Z node 25 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-03-31T16:38:15.443997Z node 25 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 43, values: {"a1": "true", "a2": falce} 2025-03-31T16:38:16.000950Z node 26 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-03-31T16:38:16.001272Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-03-31T16:38:16.001317Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 42, values: {"a1": Yelse} 2025-03-31T16:38:16.001767Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-03-31T16:38:16.001814Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 43, values: {"a1": "st""r"} 2025-03-31T16:38:16.001992Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-03-31T16:38:16.002030Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 44, values: {"a1": "x"} {"a1": "y"} 2025-03-31T16:38:16.002186Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-03-31T16:38:16.002227Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 45, values: { 2025-03-31T16:38:16.469672Z node 27 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a2 > 100; 2025-03-31T16:38:16.481460Z node 27 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 0 from [27:7488013900164730619:2051] 2025-03-31T16:38:22.463213Z node 27 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [27:7488013900164730619:2051] [id 0]: Started compile request 2025-03-31T16:38:22.576023Z node 27 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [27:7488013900164730619:2051] [id 0]: Compilation completed for request 2025-03-31T16:38:22.576187Z node 27 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 0 from [27:7488013900164730619:2051] 2025-03-31T16:38:22.576495Z node 27 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-03-31T16:38:22.577491Z node 27 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-03-31T16:38:23.329432Z node 28 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a2 > 100; 2025-03-31T16:38:23.329887Z node 28 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 0 from [28:7488013931348477383:2051] 2025-03-31T16:38:29.215196Z node 28 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [28:7488013931348477383:2051] [id 0]: Started compile request 2025-03-31T16:38:29.330438Z node 28 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [28:7488013931348477383:2051] [id 0]: Compilation completed for request 2025-03-31T16:38:29.333287Z node 28 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 0 from [28:7488013931348477383:2051] 2025-03-31T16:38:29.334780Z node 28 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-03-31T16:38:29.334960Z node 28 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-03-31T16:38:30.140522Z node 29 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a2 > 100; 2025-03-31T16:38:30.140687Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 0 from [29:7488013964107603186:2051] 2025-03-31T16:38:37.583263Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [29:7488013964107603186:2051] [id 0]: Started compile request 2025-03-31T16:38:37.717290Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [29:7488013964107603186:2051] [id 0]: Compilation completed for request 2025-03-31T16:38:37.717475Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 0 from [29:7488013964107603186:2051] 2025-03-31T16:38:37.719546Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 2 rows 2025-03-31T16:38:37.719730Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 2 rows 2025-03-31T16:38:37.719774Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 2 rows 2025-03-31T16:38:37.719806Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 2 rows 2025-03-31T16:38:37.719840Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 2 rows 2025-03-31T16:38:38.504344Z node 30 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a1 is null; 2025-03-31T16:38:38.523918Z node 30 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 0 from [30:7488013997519712173:2051] 2025-03-31T16:38:45.623215Z node 30 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [30:7488013997519712173:2051] [id 0]: Started compile request 2025-03-31T16:38:45.689953Z node 30 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [30:7488013997519712173:2051] [id 0]: Compilation completed for request 2025-03-31T16:38:45.690098Z node 30 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 0 from [30:7488013997519712173:2051] 2025-03-31T16:38:45.690296Z node 30 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-03-31T16:38:46.713746Z node 31 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a2 > 50; 2025-03-31T16:38:46.719225Z node 31 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 0 from [31:7488014032408471444:2051] 2025-03-31T16:38:54.071225Z node 31 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [31:7488014032408471444:2051] [id 0]: Started compile request 2025-03-31T16:38:54.160216Z node 31 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [31:7488014032408471444:2051] [id 0]: Compilation completed for request 2025-03-31T16:38:54.160355Z node 31 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 0 from [31:7488014032408471444:2051] 2025-03-31T16:38:54.160645Z node 31 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-03-31T16:38:55.007860Z node 32 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a2 ... 50; 2025-03-31T16:38:55.008070Z node 32 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 0 from [32:7488014067854088034:2051] 2025-03-31T16:39:01.875220Z node 32 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [32:7488014067854088034:2051] [id 0]: Started compile request 2025-03-31T16:39:01.889945Z node 32 :FQ_ROW_DISPATCHER ERROR: TPurecalcCompileActor [32:7488014067854088034:2051] [id 0]: Compilation failed for request 2025-03-31T16:39:01.890141Z node 32 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 0 from [32:7488014067854088034:2051] 2025-03-31T16:39:04.223032Z node 33 :FQ_ROW_DISPATCHER TRACE: TRawParser: Add 1 messages to parse 2025-03-31T16:39:04.223143Z node 33 :FQ_ROW_DISPATCHER TRACE: TRawParser: Do parsing, first offset: 42, value: {"a1": "hello1__large_str", "a2": 101, "event": "event1"} 2025-03-31T16:39:05.817428Z node 34 :FQ_ROW_DISPATCHER TRACE: TRawParser: Add 3 messages to parse 2025-03-31T16:39:05.817494Z node 34 :FQ_ROW_DISPATCHER TRACE: TRawParser: Do parsing, first offset: 42, value: {"a1": "hello1", "a2": "101", "event": "event1"} 2025-03-31T16:39:05.817626Z node 34 :FQ_ROW_DISPATCHER TRACE: TRawParser: Do parsing, first offset: 43, value: {"a1": "hello1", "a2": "101", "event": "event2"} 2025-03-31T16:39:05.817652Z node 34 :FQ_ROW_DISPATCHER TRACE: TRawParser: Do parsing, first offset: 44, value: {"a2": "101", "a1": "hello1", "event": "event3"} |98.3%| [TM] {RESULT} ydb/core/fq/libs/row_dispatcher/format_handler/ut/unittest |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyConnection |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/coordinator/ut/unittest >> CoordinatorVolatile::CoordinatorRestartWithEnqueuedVolatileStep [GOOD] Test command err: 2025-03-31T16:37:06.311599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:700:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:37:06.312025Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:37:06.312161Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:37:06.314015Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:697:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:37:06.314373Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:37:06.314487Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000858/r3tmp/tmpK1jbQa/pdisk_1.dat 2025-03-31T16:37:06.880817Z node 1 :IMPORT WARN: Table profiles were not loaded ... waiting for the first mediator step 2025-03-31T16:37:07.194176Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:37:07.194854Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:37:07.198140Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:37:07.198247Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:37:07.215948Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:37:07.216933Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:37:07.217278Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected ... found first step to be 500 2025-03-31T16:37:07.494832Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE TEvAcquireReadStep ... acquired read step 500 ... waiting for the next mediator step ... found second step to be 1000 ... read step subscribe result: [500, 1000] ... read step subscribe update: 2000 2025-03-31T16:37:08.598010Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE TEvAcquireReadStep ... acquired read step 2000 ... read step subscribe result: [2000, 2000] ... read step subscribe update: 2500 ... read step subscribe update: 2500 ... read step subscribe update: 3000 ... read step subscribe update: 4000 ... read step subscribe update: 5000 ... read step subscribe update: 6000 ... read step subscribe result: [2000, 6000] 2025-03-31T16:37:11.354822Z node 1 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 2025-03-31T16:37:11.354916Z node 1 :PIPE_SERVER ERROR: [72057594037936131] NodeDisconnected NodeId# 2 2025-03-31T16:37:11.354956Z node 1 :PIPE_SERVER ERROR: [72057594037968897] NodeDisconnected NodeId# 2 2025-03-31T16:37:11.354986Z node 1 :PIPE_SERVER ERROR: [72057594046447617] NodeDisconnected NodeId# 2 2025-03-31T16:37:11.355136Z node 1 :PIPE_SERVER ERROR: [72057594037936129] NodeDisconnected NodeId# 2 2025-03-31T16:37:11.355408Z node 2 :TX_PROXY WARN: actor# [2:238:2129] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2025-03-31T16:37:11.355780Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeDisconnected, NodeId 2 2025-03-31T16:37:11.355900Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connected -> Disconnecting 2025-03-31T16:37:11.356485Z node 2 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:86:2087] ServerId# [1:1071:2628] TabletId# 72057594037932033 PipeClientId# [2:86:2087] 2025-03-31T16:37:11.357251Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnecting -> Disconnected 2025-03-31T16:37:11.363420Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:37:11.377732Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:37:11.378880Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected ... read step subscribe update: 7000 ... read step subscribe update: 8000 ... read step subscribe update: 9000 ... read step subscribe update: 10000 ... read step subscribe update: 11000 2025-03-31T16:37:22.044419Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:284:2217], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:37:22.045043Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T16:37:22.045536Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:37:22.046914Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:700:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:37:22.047406Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:37:22.047632Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000858/r3tmp/tmpQOuKGn/pdisk_1.dat 2025-03-31T16:37:22.367398Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:37:22.522191Z node 3 :TX_COORDINATOR DEBUG: Processing TEvSubscribeLastStep from [4:1144:2375] at coordinator 72057594046316545 with seqNo 123 and cookie 234 2025-03-31T16:37:22.610238Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:37:22.610429Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:37:22.614543Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:37:22.614651Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:37:22.633465Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-03-31T16:37:22.634125Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:37:22.634508Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:37:23.231703Z node 3 :TX_COORDINATOR DEBUG: Processing TEvSubscribeLastStep from [4:1145:2376] at coordinator 72057594046316545 with seqNo 234 and cookie 345 2025-03-31T16:37:23.976096Z node 3 :TX_COORDINATOR DEBUG: Processing TEvSubscribeLastStep from [4:1144:2375] at coordinator 72057594046316545 with seqNo 124 and cookie 245 2025-03-31T16:37:23.987981Z node 3 :TX_COORDINATOR DEBUG: Ignored TEvSubscribeLastStep from [4:1144:2375] at coordinator 72057594046316545 with seqNo 123 existing seqNo 124 2025-03-31T16:37:24.632439Z node 3 :TX_COORDINATOR DEBUG: Processing TEvUnsubscribeLastStep from [4:1144:2375] at coordinator 72057594046316545 with seqNo 124 2025-03-31T16:37:31.674275Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:37:31.674527Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:37:31.674730Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000858/r3tmp/tmps8opAy/pdisk_1.dat 2025-03-31T16:37:31.973217Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:37:32.016802Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:37:32.016959Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:37:32.030084Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected Rebooting coordinator to restore config 2025-03-31T16:37:32.260809Z node 5 :TX_COORDINATOR INFO: tablet# 72057594046316545 CreateTxInit Complete 2025-03-31T16:37:32.262117Z node 5 :TX_COORDINATOR INFO: Coordinator# 72057594046316545 restoring static processing params 2025-03-31T16:37:32.262888Z node 5 :TX_COORDINATOR NOTICE: tablet# 72057594046316545 HANDLE EvMediatorQueueRestart MediatorId# 72057594046382081 2025-03-31T16:37:32.263049Z node 5 :TX_COORDINATOR INFO: tablet# 72057594046316545 version# 0 TTxConfigure Complete Rebooting coordinator a second time 2025-03-31T16:37:32.617141Z node 5 :TX_COORDINATOR INFO: tablet# 72057594046316545 CreateTxInit Complete 2025-03-31T16:37:32.618673Z node 5 :TX_COORDINATOR NOTICE: tablet# 72057594046316545 HANDLE EvMediatorQueueRestart MediatorId# 72057594046382081 2025-03-31T16:37:36.754119Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:686:2416], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:37:36.754459Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:37:36.754554Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000858/r3tmp/tmpBjkHEC/pdisk_1.dat 2025-03-31T16:37:37.155693Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:37:37.427737Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:37:37.427907Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:37:37.447188Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected Sending CreateDatabase request 2025-03-31T16:37:38.470751Z node 6 :CMS_TENANTS ... d ... coordinator 72057594046316545 gen 2 is planning step 1000 2025-03-31T16:39:08.800848Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:572:2499] MediatorId# 72057594046382081 HANDLE TEvCommitStep {TMediateStep From 50 To# 1000Steps: {{TCoordinatorStep step# 1000 PrevStep# 50}}} marker# M1 2025-03-31T16:39:08.800970Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:572:2499] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:574:2501] bucket.ActiveActor step# 1000 2025-03-31T16:39:08.801052Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:572:2499] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:575:2502] bucket.ActiveActor step# 1000 2025-03-31T16:39:08.801123Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:574:2501] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1000} 2025-03-31T16:39:08.801222Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:575:2502] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1000} ... waiting for blocked put responses 2025-03-31T16:39:08.814174Z node 20 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 10000000 HANDLE EvProposeTransaction marker# C0 2025-03-31T16:39:08.814270Z node 20 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 10000000 step# 1050 Status# 16 SEND to# [20:595:2519] Proxy marker# C1 ... waiting for blocked put responses ... coordinator 72057594046316545 gen 2 is planning step 1050 2025-03-31T16:39:08.826969Z node 20 :TX_COORDINATOR DEBUG: Transaction 10000000 has been planned 2025-03-31T16:39:08.827117Z node 20 :TX_COORDINATOR DEBUG: Planned transaction 10000000 for mediator 72057594046382081 tablet 72057594047365120 ... blocking put [72057594046316545:2:12:1:24576:168:0] response ... waiting for planning for the required step ... waiting for planning for the required step ... coordinator 72057594046316545 gen 2 is planning step 1100 ... starting a new coordinator instance ... waiting for migrated state 2025-03-31T16:39:08.887067Z node 20 :TX_COORDINATOR INFO: OnTabletStop: 72057594046316545 reason = ReasonDemoted 2025-03-31T16:39:08.887532Z node 20 :TX_COORDINATOR INFO: OnTabletStop: 72057594046316545 reason = ReasonDemoted 2025-03-31T16:39:08.896354Z node 20 :TX_COORDINATOR INFO: tablet# 72057594046316545 CreateTxInit Complete ... blocking state response from [20:571:2412] to [20:690:2563] LastSentStep: 1000 LastAcquiredStep: 0 LastConfirmedStep: 0 ... unblocking put responses and requests 2025-03-31T16:39:08.897203Z node 20 :TX_COORDINATOR INFO: OnTabletStop: 72057594046316545 reason = ReasonDemoted 2025-03-31T16:39:08.897410Z node 20 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 10000000 stepId# 1050 Status# 17 SEND EvProposeTransactionStatus to# [20:595:2519] Proxy 2025-03-31T16:39:08.899878Z node 20 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 server# [20:577:2503] disconnnected 2025-03-31T16:39:08.900030Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:572:2499] MediatorId# 72057594046382081 HANDLE TEvServerDisconnected server# [20:577:2503] ... trying to plan tx 10000011 ... waiting for planned another persistent tx 2025-03-31T16:39:08.928441Z node 20 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 server# [20:698:2574] connected 2025-03-31T16:39:08.928664Z node 20 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 HANDLE EvCoordinatorSync 2025-03-31T16:39:08.928733Z node 20 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 SEND EvCoordinatorSyncResult to# [20:693:2571] Cookie# 1 CompleteStep# 1000 LatestKnownStep# 1000 SubjectiveTime# 952 Coordinator# 72057594046316545 2025-03-31T16:39:08.928943Z node 20 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 10000011 HANDLE EvProposeTransaction marker# C0 2025-03-31T16:39:08.929013Z node 20 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 10000011 step# 1100 Status# 16 SEND to# [20:595:2519] Proxy marker# C1 2025-03-31T16:39:08.929124Z node 20 :TX_COORDINATOR NOTICE: tablet# 72057594046316545 HANDLE EvMediatorQueueRestart MediatorId# 72057594046382081 2025-03-31T16:39:08.929232Z node 20 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 1050, txid# 10000000 marker# C2 ... observed step: Transactions { AffectedSet: 72057594047365120 TxId: 10000000 } Step: 1050 PrevStep: 0 MediatorID: 72057594046382081 CoordinatorID: 72057594046316545 ActiveCoordinatorGeneration: 3 2025-03-31T16:39:08.944340Z node 20 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 HANDLE EvCoordinatorStep coordinator# 72057594046316545 step# 1050 2025-03-31T16:39:08.944461Z node 20 :TX_MEDIATOR INFO: Coordinator step: Mediator [72057594046382081], Coordinator [72057594046316545], step# [1050] transactions [1] 2025-03-31T16:39:08.944659Z node 20 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 SEND EvCommitStep to# [20:572:2499] ExecQueue {TMediateStep From 1000 To# 1050Steps: {{TCoordinatorStep step# 1050 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 10000000 AckTo# [20:693:2571]}}TabletsToTransaction: {{tablet# 72057594047365120 txid# 10000000}}}}} marker# M0 2025-03-31T16:39:08.944871Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:572:2499] MediatorId# 72057594046382081 HANDLE TEvCommitStep {TMediateStep From 1000 To# 1050Steps: {{TCoordinatorStep step# 1050 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 10000000 AckTo# [20:693:2571]}}TabletsToTransaction: {{tablet# 72057594047365120 txid# 10000000}}}}} marker# M1 2025-03-31T16:39:08.944970Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:572:2499] MediatorId# 72057594046382081 SEND Ev to# [20:574:2501] step# 1050 forTablet# 72057594047365120 txid# 10000000 marker# M3 2025-03-31T16:39:08.945063Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:572:2499] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:574:2501] bucket.ActiveActor step# 1050 2025-03-31T16:39:08.945125Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:572:2499] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:575:2502] bucket.ActiveActor step# 1050 2025-03-31T16:39:08.946097Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:574:2501] Mediator# 72057594046382081 HANDLE {TEvCommitTabletStep step# 1050 TabletId# 72057594047365120 Transactions {{TTx Moderator# 0 txid# 10000000 AckTo# [20:693:2571]}}} marker# M4 2025-03-31T16:39:08.946339Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:575:2502] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1050} 2025-03-31T16:39:08.946606Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:574:2501] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1050} 2025-03-31T16:39:08.947254Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:574:2501] Mediator# 72057594046382081 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365120 Status: OK ServerId: [20:701:2576] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-03-31T16:39:08.947358Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:574:2501] Mediator# 72057594046382081 SEND to# 72057594047365120 {TEvPlanStep step# 1050 MediatorId# 72057594046382081 TabletID 72057594047365120} ... observed tablet step: Transactions { TxId: 10000000 AckTo { RawX1: 0 RawX2: 0 } } Step: 1050 MediatorID: 72057594046382081 TabletID: 72057594047365120 ... blocked accept from 72057594047365120 ... waiting for planned another persistent tx ... coordinator 72057594046316545 gen 3 is planning step 1100 2025-03-31T16:39:08.964134Z node 20 :TX_COORDINATOR DEBUG: Transaction 10000011 has been planned 2025-03-31T16:39:08.964268Z node 20 :TX_COORDINATOR DEBUG: Planned transaction 10000011 for mediator 72057594046382081 tablet 72057594047365120 2025-03-31T16:39:08.965320Z node 20 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 1100, txid# 10000011 marker# C2 2025-03-31T16:39:08.965428Z node 20 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 10000011 stepId# 1100 Status# 17 SEND EvProposeTransactionStatus to# [20:595:2519] Proxy ... observed step: Transactions { AffectedSet: 72057594047365120 TxId: 10000011 } Step: 1100 PrevStep: 1050 MediatorID: 72057594046382081 CoordinatorID: 72057594046316545 ActiveCoordinatorGeneration: 3 2025-03-31T16:39:08.965804Z node 20 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 HANDLE EvCoordinatorStep coordinator# 72057594046316545 step# 1100 2025-03-31T16:39:08.965881Z node 20 :TX_MEDIATOR INFO: Coordinator step: Mediator [72057594046382081], Coordinator [72057594046316545], step# [1100] transactions [1] 2025-03-31T16:39:08.966033Z node 20 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 SEND EvCommitStep to# [20:572:2499] ExecQueue {TMediateStep From 1050 To# 1100Steps: {{TCoordinatorStep step# 1100 PrevStep# 1050Transactions: {{TTx Moderator# 0 txid# 10000011 AckTo# [20:693:2571]}}TabletsToTransaction: {{tablet# 72057594047365120 txid# 10000011}}}}} marker# M0 2025-03-31T16:39:08.966209Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:572:2499] MediatorId# 72057594046382081 HANDLE TEvCommitStep {TMediateStep From 1050 To# 1100Steps: {{TCoordinatorStep step# 1100 PrevStep# 1050Transactions: {{TTx Moderator# 0 txid# 10000011 AckTo# [20:693:2571]}}TabletsToTransaction: {{tablet# 72057594047365120 txid# 10000011}}}}} marker# M1 2025-03-31T16:39:08.966374Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:572:2499] MediatorId# 72057594046382081 SEND Ev to# [20:574:2501] step# 1100 forTablet# 72057594047365120 txid# 10000011 marker# M3 2025-03-31T16:39:08.966507Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:572:2499] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:574:2501] bucket.ActiveActor step# 1100 2025-03-31T16:39:08.966565Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:572:2499] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:575:2502] bucket.ActiveActor step# 1100 2025-03-31T16:39:08.966713Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:574:2501] Mediator# 72057594046382081 HANDLE {TEvCommitTabletStep step# 1100 TabletId# 72057594047365120 Transactions {{TTx Moderator# 0 txid# 10000011 AckTo# [20:693:2571]}}} marker# M4 2025-03-31T16:39:08.966823Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:574:2501] Mediator# 72057594046382081 SEND to# 72057594047365120 {TEvPlanStep step# 1100 MediatorId# 72057594046382081 TabletID 72057594047365120} 2025-03-31T16:39:08.966915Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:575:2502] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1100} 2025-03-31T16:39:08.967800Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:574:2501] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1100} ... observed tablet step: Transactions { TxId: 10000011 AckTo { RawX1: 0 RawX2: 0 } } Step: 1100 MediatorID: 72057594046382081 TabletID: 72057594047365120 ... blocked accept from 72057594047365120 ... coordinator 72057594046316545 gen 3 is planning step 1150 ... observed step: Step: 1150 PrevStep: 1100 MediatorID: 72057594046382081 CoordinatorID: 72057594046316545 ActiveCoordinatorGeneration: 3 2025-03-31T16:39:08.983556Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:572:2499] MediatorId# 72057594046382081 HANDLE TEvCommitStep {TMediateStep From 1100 To# 1150Steps: {{TCoordinatorStep step# 1150 PrevStep# 1100}}} marker# M1 2025-03-31T16:39:08.983627Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:572:2499] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:574:2501] bucket.ActiveActor step# 1150 2025-03-31T16:39:08.983673Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:572:2499] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:575:2502] bucket.ActiveActor step# 1150 2025-03-31T16:39:08.983751Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:574:2501] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1150} 2025-03-31T16:39:08.983797Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:575:2502] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1150} |98.3%| [TM] {RESULT} ydb/core/tx/coordinator/ut/unittest >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyConnectionWithServiceAccount >> test.py::TestSqsSplitMergeStdTables::test_std_merge_split [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-False] >> TDqPqReadActorTest::TestReadFromTopicFromNow [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-True] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteConnection >> TAsyncIndexTests::MergeBothWithReboots[TabletReboots] [GOOD] >> TDqPqReadActorTest::ReadWithFreeSpace >> KafkaProtocol::CreateTopicsScenarioWithKafkaAuth [GOOD] >> KafkaProtocol::CreateTopicsScenarioWithoutKafkaAuth >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendTestConnection |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendTestConnectionWithServiceAccount >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateBinding |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestKiKiMRWithMetadata::test_cluster_is_operational_with_metadata [GOOD] >> QuoterWithKesusTest::CanDeleteResourceWhenUsingIt [GOOD] >> QuoterWithKesusTest::CanKillKesusWhenUsingIt >> TDqPqReadActorTest::ReadWithFreeSpace [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeBothWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:135:2157] sender: [1:136:2058] recipient: [1:111:2142] 2025-03-31T16:29:46.481516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-31T16:29:46.481629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:46.481671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-31T16:29:46.481705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-31T16:29:46.481761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-31T16:29:46.481801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-31T16:29:46.481873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-31T16:29:46.481977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-31T16:29:46.482285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:29:46.570278Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-31T16:29:46.570377Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:169:2058] recipient: [1:15:2062] 2025-03-31T16:29:46.595833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:29:46.596019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-31T16:29:46.596251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-31T16:29:46.601163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-31T16:29:46.601434Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-31T16:29:46.602092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:46.602365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-31T16:29:46.607457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:46.609035Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:46.609106Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:46.609238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-31T16:29:46.609299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:46.609341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-31T16:29:46.609488Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:210:2058] recipient: [1:208:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:210:2058] recipient: [1:208:2209] Leader for TabletID 72057594037968897 is [1:214:2213] sender: [1:215:2058] recipient: [1:208:2209] 2025-03-31T16:29:46.616862Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:235:2058] recipient: [1:15:2062] 2025-03-31T16:29:46.802128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-31T16:29:46.802372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:46.802578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-31T16:29:46.802795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-31T16:29:46.802856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:46.805614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:46.805753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-31T16:29:46.806001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:46.806067Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-31T16:29:46.806101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T16:29:46.806134Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T16:29:46.808054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:46.808119Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-31T16:29:46.808164Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T16:29:46.809991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:46.810032Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:46.810077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:46.810131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T16:29:46.813750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T16:29:46.815946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T16:29:46.816137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:135:2157] sender: [1:250:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-31T16:29:46.817091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-31T16:29:46.817219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:29:46.817271Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:46.817551Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-31T16:29:46.817605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-31T16:29:46.817823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-31T16:29:46.817916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-31T16:29:46.820432Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-31T16:29:46.820477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-31T16:29:46.820659Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-31T16:29:46.820698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:202:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-31T16:29:46.820999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-31T16:29:46.821041Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-31T16:29:46.821133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:46.821166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-31T16:29:46.821202Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-31T16:29:46.821270Z no ... } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:39:13.202163Z node 203 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-31T16:39:13.202483Z node 203 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 357us result status StatusSuccess 2025-03-31T16:39:13.203580Z node 203 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-31T16:39:13.214940Z node 203 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409551][203:1107:2880] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-03-31T16:39:13.215108Z node 203 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][203:1053:2880] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } 2025-03-31T16:39:13.215285Z node 203 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409551][203:1107:2880] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1743439153186062 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1743439153186062 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1743439153186062 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-03-31T16:39:13.218297Z node 203 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409551][203:1107:2880] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-03-31T16:39:13.218414Z node 203 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][203:1053:2880] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TDqPqReadActorTest::ReadNonExistentTopic >> TDqPqReadActorTest::ReadNonExistentTopic [GOOD] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TDqPqReadActorTest::TestSaveLoadPqRead |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListBindings >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-True] [GOOD] >> test_schemeshard_limits.py::TestSchemeShardLimitsCase0::test_effective_acls_are_too_large [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-False] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-True] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeBinding >> TCreateAndDropViewTest::DropViewInFolder [GOOD] >> TCreateAndDropViewTest::ContextPollution >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true] [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyBinding |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> KafkaProtocol::CreateTopicsScenarioWithoutKafkaAuth [GOOD] >> KafkaProtocol::CreatePartitionsScenario >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteBinding >> test_kill_tablets.py::TestKillTablets::test_then_kill_system_tablets_and_it_increases_generation >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-False] >> DataShardStats::NoData [GOOD] >> DataShardStats::Follower |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_kill_tablets.py::TestKillTablets::test_when_kill_hive_it_will_be_restarted_and_can_create_tablets >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-False] [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyShouldPassHids::ShouldCheckScenario >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-True] >> test_alloc_default.py::TestAlloc::test_alloc_and_free[kikimr0] [GOOD] >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_simple_table-False] [GOOD] >> KqpTpch::Query21 [GOOD] >> KqpTpch::Query22 |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TDataShardRSTest::TestCleanupInRS+UseSink [GOOD] >> TDataShardRSTest::TestCleanupInRS-UseSink |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> test_schemeshard_limits.py::TestSchemeShardLimitsCase0::test_effective_acls_are_too_large [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-False] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_dynumber |98.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_index/test-results/unittest/{meta.json ... results_accumulator.log} >> QuoterWithKesusTest::CanKillKesusWhenUsingIt [GOOD] >> test_kv.py::TestYdbKvWorkload::test_dynumber [GOOD] >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_simple_table-False] [GOOD] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> KafkaProtocol::CreatePartitionsScenario [GOOD] >> KafkaProtocol::AlterConfigsScenario >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_async_table-True] [GOOD] >> TCreateAndDropViewTest::ContextPollution [GOOD] >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_async_table-True] [GOOD] >> TEvaluateExprInViewTest::EvaluateExpr |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/merge_split_common_table/std/py3test >> test.py::TestSqsSplitMergeStdTables::test_std_merge_split [GOOD] >> test_schemeshard_limits.py::TestSchemeShardLimitsCase1::test_too_large_acls [GOOD] >> test_alloc_default.py::TestAlloc::test_up_down[kikimr0] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> DataShardStats::Follower [GOOD] >> DataShardStats::Tli |98.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/quoter/ut/unittest >> QuoterWithKesusTest::CanKillKesusWhenUsingIt [GOOD] Test command err: 2025-03-31T16:36:58.525708Z node 1 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-03-31T16:36:58.525853Z node 1 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-03-31T16:36:58.529452Z node 1 :QUOTER_PROXY WARN: [/Path/KesusName]: Failed to connect to tablet. Status: ERROR 2025-03-31T16:36:58.529525Z node 1 :QUOTER_PROXY INFO: [/Path/KesusName]: Reconnecting to kesus 2025-03-31T16:36:58.552231Z node 2 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-03-31T16:36:58.552359Z node 2 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-03-31T16:36:58.552661Z node 2 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-03-31T16:36:58.552775Z node 2 :QUOTER_PROXY WARN: [/Path/KesusName]: Disconnected from tablet 2025-03-31T16:36:58.552808Z node 2 :QUOTER_PROXY INFO: [/Path/KesusName]: Reconnecting to kesus 2025-03-31T16:36:58.553135Z node 2 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-03-31T16:36:58.575725Z node 3 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-03-31T16:36:58.575845Z node 3 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-03-31T16:36:58.576035Z node 3 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "/resource" 2025-03-31T16:36:58.576085Z node 3 :QUOTER_PROXY WARN: [/Path/KesusName]: Resource "/resource" has incorrect name. Maybe this was some error on client side. 2025-03-31T16:36:58.576126Z node 3 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("/resource", Error: GenericError) 2025-03-31T16:36:58.576228Z node 3 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-03-31T16:36:58.576322Z node 3 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "resource//resource" 2025-03-31T16:36:58.576379Z node 3 :QUOTER_PROXY WARN: [/Path/KesusName]: Resource "resource//resource" has incorrect name. Maybe this was some error on client side. 2025-03-31T16:36:58.576431Z node 3 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("resource//resource", Error: GenericError) 2025-03-31T16:36:58.584821Z node 4 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-03-31T16:36:58.584969Z node 4 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-03-31T16:36:58.585146Z node 4 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "res" 2025-03-31T16:36:58.586612Z node 4 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-03-31T16:36:58.606130Z node 4 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 } } } }) 2025-03-31T16:36:58.606214Z node 4 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res" 2025-03-31T16:36:58.606271Z node 4 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("res", 42) 2025-03-31T16:36:58.606358Z node 4 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res", Normal, {0: Front(20, 2)} }]) 2025-03-31T16:36:58.615301Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-03-31T16:36:58.615428Z node 5 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-03-31T16:36:58.615723Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "res0" 2025-03-31T16:36:58.616289Z node 5 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-03-31T16:36:58.616626Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } }) 2025-03-31T16:36:58.616672Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res0" 2025-03-31T16:36:58.616718Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("res0", 42) 2025-03-31T16:36:58.616790Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res0", Normal, {0: Front(1, 2)} }]) 2025-03-31T16:36:58.616902Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "res1" 2025-03-31T16:36:58.616993Z node 5 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Subscribe on resource "res1" 2025-03-31T16:36:58.617218Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 43 Error { Status: SUCCESS } EffectiveProps { ResourceId: 43 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } }) 2025-03-31T16:36:58.617255Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res1" 2025-03-31T16:36:58.617305Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("res1", 43) 2025-03-31T16:36:58.617361Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res1", Normal, {0: Front(1, 2)} }]) 2025-03-31T16:36:58.617549Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "res2" 2025-03-31T16:36:58.617637Z node 5 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Subscribe on resource "res2" 2025-03-31T16:36:58.617854Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 44 Error { Status: SUCCESS } EffectiveProps { ResourceId: 44 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } }) 2025-03-31T16:36:58.617893Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res2" 2025-03-31T16:36:58.617928Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("res2", 44) 2025-03-31T16:36:58.617974Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res2", Normal, {0: Front(1, 2)} }]) 2025-03-31T16:36:58.618192Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyStats([{"res1", Consumed: 0, Queue: 5}]) 2025-03-31T16:36:58.618225Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: Set info for resource "res1": { Available: 1, QueueWeight: 5 } 2025-03-31T16:36:58.618287Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Activate session to "res1". Connected: 1 2025-03-31T16:36:58.619016Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: UpdateConsumptionState({ ResourcesInfo { ResourceId: 43 ConsumeResource: true Amount: inf } ActorID { RawX1: 5 RawX2: 21474838532 } }) 2025-03-31T16:36:58.619067Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res1", Normal, {0: Front(1, 2)} }]) 2025-03-31T16:36:58.619353Z node 5 :QUOTER_PROXY WARN: [/Path/KesusName]: Disconnected from tablet 2025-03-31T16:36:58.619376Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Reconnecting to kesus 2025-03-31T16:36:58.619470Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: Mark "res1" for offline allocation. Connected: 0, SessionIsActive: 1, AverageDuration: 0.100000s, AverageAmount: 0.5 2025-03-31T16:36:58.619502Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: Schedule offline allocation in 0.000000s: [{ "res1", 0.5 }] 2025-03-31T16:36:58.619599Z node 5 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-03-31T16:36:58.619843Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } Results { ResourceId: 43 Error { Status: SUCCESS } EffectiveProps { ResourceId: 43 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } Results { ResourceId: 44 Error { Status: SUCCESS } EffectiveProps { ResourceId: 44 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } }) 2025-03-31T16:36:58.619879Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res0" 2025-03-31T16:36:58.619912Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res1" 2025-03-31T16:36:58.619957Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res2" 2025-03-31T16:36:58.620007Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res0", Normal, {0: Front(1, 2)} }, { "res1", Normal, {0: Front(1, 2)} }, { "res2", Normal, {0: Front(1, 2)} }]) 2025-03-31T16:36:58.628891Z node 6 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-03-31T16:36:58.629014Z node 6 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-03-31T16:36:58.629283Z node 6 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "res" 2025-03-31T16:36:58.629414Z node 6 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-03-31T16:36:58.629642Z node 6 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 } } } }) 2025-03-31T16:36:58.629673Z node 6 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res" 2025-03-31T16:36:58.629720Z node 6 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("res", 42) 2025-03-31T16:36:58.629768Z node 6 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res", Normal, {0: Front(20, 2)} }]) 2025-03-31T16:36:58.637990Z node 7 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-03-31T16:36:58.638105Z node 7 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-03-31T16:36:58.638403Z node 7 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "res" 2025-03-31T16:36:58.638514Z node 7 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-03-31T16:36:58.638822Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 } } } }) 2025-03-31T16:36:58.638856Z node 7 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res" 2025-03-31T16:36:58.638897Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("res", 42) 2025-03-31T16:36:58.638953Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res", Normal, {0: Front(20, 2)} }]) 2025-03-31T16:36:58.639231Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyStats([{"res", Consumed: 0, Queue: 25}]) 2025-03-31T16:36:58.639287Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: Set info for resource "res": { Available: 20, QueueWeight: 25 } 2025-03-31T16:36:58.639332Z node 7 :QUOTER_PROXY INFO: [/Path/KesusName]: Activate session to "res". Connected: 1 2025-03-31T16:36:58.639436Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: UpdateConsumptionState({ ResourcesInfo { ResourceId: 42 ConsumeResource: true Amount: inf } ActorID { RawX1: 5 RawX2: 30064773124 } }) 2025-03-31T16:36:58.639489Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res", Normal, {0: Front(20, 2)} }]) 2025-03-31T16:36:58.639668Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyCloseSession("res", 42) 2025-03-31T16:36:58.639718Z node 7 :QUOTER_PROXY INFO: [/Path/KesusName]: Deactivate session to "res". Connected: 1 2025-03-31T16:36:58.639803Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: UpdateConsumptionState({ ResourcesInfo { ResourceId: 42 } ActorID { RawX1: 5 RawX2: 30064773124 } }) 2025-03-31T16:36:58.659657Z node 8 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-03-31T16:36:58.659804Z node 8 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-03-31T16:36:58.659939Z node 8 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "res" 2025-03-31T16:36:58.660101Z node 8 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-03-31T16:36:58.660391Z node 8 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 H ... ce": { Available: -4.000102429, QueueWeight: 5 } 2025-03-31T16:39:20.367526Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-03-31T16:39:20.367603Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-03-31T16:39:20.403488Z node 50 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7488014172851837055:2303]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-03-31T16:39:20.406334Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-03-31T16:39:20.406394Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-03-31T16:39:20.406453Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-03-31T16:39:20.407168Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-03-31T16:39:20.463754Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0. FreeBalance: 0 2025-03-31T16:39:20.463816Z node 49 :QUOTER_SERVICE TRACE: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2025-03-31T16:39:20.560000Z 2025-03-31T16:39:20.463857Z node 49 :QUOTER_SERVICE TRACE: Allocate resource "Resource" 2025-03-31T16:39:20.464136Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 0, Queue: 5}]) 2025-03-31T16:39:20.464185Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -3.000102429, QueueWeight: 5 } 2025-03-31T16:39:20.464233Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-03-31T16:39:20.464780Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-03-31T16:39:20.503447Z node 50 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7488014172851837055:2303]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-03-31T16:39:20.504720Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-03-31T16:39:20.504775Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-03-31T16:39:20.504841Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-03-31T16:39:20.505148Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-03-31T16:39:20.561589Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0. FreeBalance: 0 2025-03-31T16:39:20.561642Z node 49 :QUOTER_SERVICE TRACE: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2025-03-31T16:39:20.660000Z 2025-03-31T16:39:20.561667Z node 49 :QUOTER_SERVICE TRACE: Allocate resource "Resource" 2025-03-31T16:39:20.561864Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 0, Queue: 5}]) 2025-03-31T16:39:20.561914Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -2.000102429, QueueWeight: 5 } 2025-03-31T16:39:20.561974Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-03-31T16:39:20.562248Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-03-31T16:39:20.601774Z node 50 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7488014172851837055:2303]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-03-31T16:39:20.603601Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-03-31T16:39:20.603662Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-03-31T16:39:20.603711Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-03-31T16:39:20.604009Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-03-31T16:39:20.661959Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0. FreeBalance: 0 2025-03-31T16:39:20.662008Z node 49 :QUOTER_SERVICE TRACE: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2025-03-31T16:39:20.760000Z 2025-03-31T16:39:20.662029Z node 49 :QUOTER_SERVICE TRACE: Allocate resource "Resource" 2025-03-31T16:39:20.666359Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 0, Queue: 5}]) 2025-03-31T16:39:20.666404Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -1.000102429, QueueWeight: 5 } 2025-03-31T16:39:20.666452Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-03-31T16:39:20.666779Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-03-31T16:39:20.701928Z node 50 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7488014172851837055:2303]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-03-31T16:39:20.703121Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-03-31T16:39:20.703178Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-03-31T16:39:20.703227Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-03-31T16:39:20.703536Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-03-31T16:39:20.760785Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0. FreeBalance: 0 2025-03-31T16:39:20.760835Z node 49 :QUOTER_SERVICE TRACE: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2025-03-31T16:39:20.860000Z 2025-03-31T16:39:20.760866Z node 49 :QUOTER_SERVICE TRACE: Allocate resource "Resource" 2025-03-31T16:39:20.762119Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 0, Queue: 5}]) 2025-03-31T16:39:20.762169Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -0.0001024289794, QueueWeight: 5 } 2025-03-31T16:39:20.762219Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-03-31T16:39:20.762335Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-03-31T16:39:20.803384Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-03-31T16:39:20.803462Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-03-31T16:39:20.803529Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Front(0.999897571, 2)} }]) 2025-03-31T16:39:20.802810Z node 50 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7488014172851837055:2303]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-03-31T16:39:20.803896Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-03-31T16:39:20.860292Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0.999897571. FreeBalance: 0.999897571 2025-03-31T16:39:20.860340Z node 49 :QUOTER_SERVICE TRACE: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2025-03-31T16:39:20.960000Z 2025-03-31T16:39:20.860361Z node 49 :QUOTER_SERVICE TRACE: Allocate resource "Resource" 2025-03-31T16:39:20.860412Z node 49 :QUOTER_SERVICE TRACE: Charge "Resource" for 5. Balance: 0.999897571. FreeBalance: 0.999897571. TicksToFullfill: 5.000512197. DurationToFullfillInUs: 500051.2197. TimeToFullfill: 2025-03-31T16:39:20.357557Z. Now: 2025-03-31T16:39:20.860078Z. LastAllocated: 2025-03-31T16:39:19.857506Z 2025-03-31T16:39:20.860860Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 5, Queue: 0}]) 2025-03-31T16:39:20.860901Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -4.000102429, QueueWeight: 0 } 2025-03-31T16:39:20.860948Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-03-31T16:39:20.861464Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-03-31T16:39:20.903476Z node 50 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7488014172851837055:2303]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-03-31T16:39:20.907358Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-03-31T16:39:20.907411Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-03-31T16:39:20.907463Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-03-31T16:39:20.907608Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-03-31T16:39:20.964811Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0. FreeBalance: 0 2025-03-31T16:39:21.004157Z node 50 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7488014172851837055:2303]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-03-31T16:39:21.007553Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-03-31T16:39:21.007611Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-03-31T16:39:21.007661Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-03-31T16:39:21.007750Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-03-31T16:39:21.007775Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0. FreeBalance: 0 2025-03-31T16:39:21.103432Z node 50 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7488014172851837055:2303]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-03-31T16:39:21.104308Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-03-31T16:39:21.104354Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-03-31T16:39:21.104402Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-03-31T16:39:21.104642Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-03-31T16:39:21.104668Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0. FreeBalance: 0 2025-03-31T16:39:22.949129Z node 49 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[49:7488014164261901615:2089];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:39:22.949230Z node 49 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; |98.5%| [TM] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/std/py3test |98.5%| [TM] {RESULT} ydb/core/quoter/ut/unittest >> KqpTpch::Query22 [GOOD] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_when_kill_hive_it_will_be_restarted_and_can_create_tablets [GOOD] >> TDqPqReadActorTest::TestSaveLoadPqRead [GOOD] >> TDqPqReadActorTest::LoadCorruptedState |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TDqPqReadActorTest::LoadCorruptedState [GOOD] >> TDqPqReadActorTest::TestLoadFromSeveralStates >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_table-True] [GOOD] ------- [TM] {asan, default-linux-x86_64, pic, release} ydb/core/kqp/tests/kikimr_tpch/unittest >> KqpTpch::Query22 [GOOD] Test command err: -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 10 -- result -- rowIndex: 0 rowIndex: 5 -- result -- rowIndex: 0 rowIndex: 2 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 10 -- result -- rowIndex: 0 rowIndex: 20 -- result -- rowIndex: 0 rowIndex: 10 -- result -- rowIndex: 0 rowIndex: 2 -- result -- rowIndex: 0 rowIndex: 28 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 37 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 5 |98.5%| [TM] {RESULT} ydb/core/kqp/tests/kikimr_tpch/unittest >> KafkaProtocol::AlterConfigsScenario [GOOD] >> KafkaProtocol::LoginWithApiKey |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> test_schemeshard_limits.py::TestSchemeShardLimitsCase1::test_too_large_acls [GOOD] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_table-True] [GOOD] |98.5%| [TA] $(B)/ydb/tests/functional/limits/test-results/py3test/{meta.json ... results_accumulator.log} |98.5%| [TA] {RESULT} $(B)/ydb/tests/functional/limits/test-results/py3test/{meta.json ... results_accumulator.log} |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> DataShardStats::Tli [GOOD] >> DataShardStats::HasSchemaChanges_BTreeIndex |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_after_bs_groups_and_kill_hive_then_tablets_start >> TEvaluateExprInViewTest::EvaluateExpr [GOOD] >> TEvaluateExprInViewTest::NakedCallToCurrentTimeFunction |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_when_kill_hive_it_will_be_restarted_and_can_create_tablets [GOOD] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TDqPqReadActorTest::TestLoadFromSeveralStates [GOOD] >> test_kill_tablets.py::TestKillTablets::test_then_kill_system_tablets_and_it_increases_generation [GOOD] >> TDqPqReadActorTest::TestReadFromTopicFirstWatermark |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true] [GOOD] >> KafkaProtocol::LoginWithApiKey [GOOD] >> KafkaProtocol::LoginWithApiKeyWithoutAt |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_then_can_lookup_them >> TDqPqReadActorTest::TestReadFromTopicFirstWatermark [GOOD] >> TDqPqReadActorTest::TestReadFromTopicWatermarks1 |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/sql/py3test >> test_kv.py::TestYdbKvWorkload::test_dynumber [GOOD] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |98.6%| [TM] {RESULT} ydb/tests/sql/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_simple_table-False] [GOOD] >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_then_kill_system_tablets_and_it_increases_generation [GOOD] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> AnalyzeColumnshard::AnalyzeMultiOperationId [GOOD] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_simple_table-False] [GOOD] >> test_drain.py::TestHive::test_drain_tablets >> DataShardStats::HasSchemaChanges_BTreeIndex [GOOD] >> DataShardStats::HasSchemaChanges_ByKeyFilter >> TDqPqReadActorTest::TestReadFromTopicWatermarks1 [GOOD] >> TDqPqReadActorTest::WatermarkCheckpointWithItemsInReadyBuffer >> TEvaluateExprInViewTest::NakedCallToCurrentTimeFunction [GOOD] >> TSelectFromViewTest::OneTable >> KafkaProtocol::LoginWithApiKeyWithoutAt [GOOD] >> KafkaProtocol::MetadataScenario |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_async_table-True] [GOOD] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_async_table-True] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeMultiOperationId [GOOD] Test command err: 2025-03-31T16:35:55.277082Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:35:55.277494Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:35:55.277662Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000ee9/r3tmp/tmpINhbwl/pdisk_1.dat 2025-03-31T16:35:55.689782Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20485, node 1 2025-03-31T16:35:55.937146Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:35:55.937197Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:35:55.937224Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:35:55.937638Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:35:55.939677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:35:56.038034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:56.038165Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:56.052201Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10733 2025-03-31T16:35:56.566907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:35:59.804222Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T16:35:59.845661Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:35:59.845778Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:35:59.876095Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:35:59.878761Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:36:00.155182Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:36:00.155743Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:36:00.156359Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:36:00.156506Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:36:00.156725Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:36:00.156832Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:36:00.156914Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:36:00.157388Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:36:00.157552Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:36:00.326182Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:00.326282Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:00.341323Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:36:00.513925Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:36:00.574268Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T16:36:00.574404Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T16:36:00.607949Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T16:36:00.609292Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T16:36:00.609550Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T16:36:00.609604Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T16:36:00.609659Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T16:36:00.609713Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T16:36:00.609759Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T16:36:00.609814Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T16:36:00.610271Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T16:36:00.662296Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:36:00.662435Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1872:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:36:00.669992Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1883:2604] 2025-03-31T16:36:00.677166Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1915:2621] 2025-03-31T16:36:00.677324Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1915:2621], schemeshard id = 72075186224037897 2025-03-31T16:36:00.681348Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T16:36:00.741167Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T16:36:00.741240Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T16:36:00.741314Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T16:36:00.755282Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T16:36:00.762412Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T16:36:00.762560Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T16:36:00.956626Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T16:36:01.156696Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T16:36:01.255019Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:36:02.371228Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2236:3073], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:02.371381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:36:02.391229Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-31T16:36:02.529436Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:36:02.529662Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:36:02.529974Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:36:02.530111Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:36:02.530233Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:36:02.530355Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:36:02.530476Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:36:02.530615Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:36:02.530748Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:36:02.530889Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:36:02.531018Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:36:02.531176Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:36:02.563238Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:36:02.563348Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;desc ... `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-31T16:39:42.783468Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZTgzN2YzZDAtZTczMmE2ZjEtYzQ4N2Y5MDctY2UyOWZiOGQ=, TxId: 2025-03-31T16:39:42.783547Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTgzN2YzZDAtZTczMmE2ZjEtYzQ4N2Y5MDctY2UyOWZiOGQ=, TxId: 2025-03-31T16:39:42.784090Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:39:42.814309Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:39:42.815212Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId7, ActorId=[1:2800:3223] 2025-03-31T16:39:43.448494Z node 2 :STATISTICS DEBUG: Event round 10 is different from the current 0 2025-03-31T16:39:43.448578Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-03-31T16:39:44.163813Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T16:39:44.164066Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:39:44.185740Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T16:39:44.185806Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId8 2025-03-31T16:39:44.185834Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId9 2025-03-31T16:39:44.185853Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-31T16:39:44.185966Z node 2 :STATISTICS DEBUG: Event round 9 is different from the current 0 2025-03-31T16:39:44.185993Z node 2 :STATISTICS DEBUG: Skip TEvStatisticsRequestTimeout 2025-03-31T16:39:45.674678Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:39:47.063929Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T16:39:47.063997Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId8 2025-03-31T16:39:47.064030Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId9 2025-03-31T16:39:47.064053Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-31T16:39:48.442970Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:39:48.468225Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:39:48.468357Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-31T16:39:48.468398Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:39:48.468946Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-31T16:39:48.487158Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-31T16:39:48.487654Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T16:39:48.487738Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T16:39:48.488152Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-31T16:39:48.503644Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-31T16:39:48.503904Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 11, current Round: 0 2025-03-31T16:39:48.504439Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9416:6411], server id = [2:9417:6412], tablet id = 72075186224037899, status = OK 2025-03-31T16:39:48.504535Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9416:6411], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:39:48.506082Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-31T16:39:48.506176Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-31T16:39:48.506361Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-31T16:39:48.506555Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-31T16:39:48.506929Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T16:39:48.509114Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9416:6411], server id = [2:9417:6412], tablet id = 72075186224037899 2025-03-31T16:39:48.509153Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:39:48.510081Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-31T16:39:48.542958Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=M2U5MTkyZjYtMzBmYzg3NjMtM2MzMjAwZTgtNWE5OWNjNDk=, TxId: 2025-03-31T16:39:48.543017Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=M2U5MTkyZjYtMzBmYzg3NjMtM2MzMjAwZTgtNWE5OWNjNDk=, TxId: 2025-03-31T16:39:48.543683Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:39:48.571180Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:39:48.571248Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId8, ActorId=[1:2800:3223] 2025-03-31T16:39:49.227606Z node 2 :STATISTICS DEBUG: Event round 11 is different from the current 0 2025-03-31T16:39:49.227703Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-03-31T16:39:50.031626Z node 2 :STATISTICS DEBUG: Event round 10 is different from the current 0 2025-03-31T16:39:50.031731Z node 2 :STATISTICS DEBUG: Skip TEvStatisticsRequestTimeout 2025-03-31T16:39:50.031832Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T16:39:50.031865Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId9 2025-03-31T16:39:50.031894Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-31T16:39:51.426333Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T16:39:51.426616Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:39:51.452051Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:39:52.831794Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T16:39:52.831870Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId9 2025-03-31T16:39:52.831902Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-31T16:39:54.239961Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:39:54.240092Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-31T16:39:54.240127Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:39:54.240759Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-31T16:39:54.254913Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-31T16:39:54.255476Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-31T16:39:54.255551Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-31T16:39:54.256057Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-31T16:39:54.282636Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-31T16:39:54.282859Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 12, current Round: 0 2025-03-31T16:39:54.283544Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9585:6500], server id = [2:9586:6501], tablet id = 72075186224037899, status = OK 2025-03-31T16:39:54.283648Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9585:6500], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-31T16:39:54.284963Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-31T16:39:54.285028Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-31T16:39:54.285281Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9585:6500], server id = [2:9586:6501], tablet id = 72075186224037899 2025-03-31T16:39:54.285316Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-31T16:39:54.285403Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-31T16:39:54.285525Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-31T16:39:54.285879Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-31T16:39:54.290429Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-31T16:39:54.319399Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YTFlMzgwYzgtZjExYmZkZWItNDAyNmNlMTgtYTg3OGM5MjA=, TxId: 2025-03-31T16:39:54.319466Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTFlMzgwYzgtZjExYmZkZWItNDAyNmNlMTgtYTg3OGM5MjA=, TxId: 2025-03-31T16:39:54.319988Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-31T16:39:54.364793Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-31T16:39:54.364860Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId9, ActorId=[1:2800:3223] >> test_create_tablets.py::TestHive::test_when_create_tablets_after_bs_groups_and_kill_hive_then_tablets_start [GOOD] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_alloc_default.py::TestAlloc::test_up_down[kikimr0] [GOOD] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state [GOOD] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TDqPqReadActorTest::WatermarkCheckpointWithItemsInReadyBuffer [GOOD] >> TPqWriterTest::TestWriteToTopic |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_table-True] [GOOD] >> KqpQueryService::ReturnAndCloseSameTime [GOOD] >> TPqWriterTest::TestWriteToTopic [GOOD] >> KafkaProtocol::MetadataScenario [GOOD] >> KafkaProtocol::MetadataInServerlessScenario >> TPqWriterTest::TestWriteToTopicMultiBatch |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TPqWriterTest::TestWriteToTopicMultiBatch [GOOD] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TPqWriterTest::TestDeferredWriteToTopic |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_after_bs_groups_and_kill_hive_then_tablets_start [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ReturnAndCloseSameTime [GOOD] Test command err: Trying to start YDB, gRPC: 21270, MsgBus: 32368 2025-03-31T16:34:32.767720Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012939190818828:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:32.771306Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0014d1/r3tmp/tmpZA7g1F/pdisk_1.dat 2025-03-31T16:34:33.207722Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21270, node 1 2025-03-31T16:34:33.263252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:33.267544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:33.278825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:33.312815Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:33.312841Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:33.312848Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:33.312981Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32368 TClient is connected to server localhost:32368 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:33.833873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:33.847047Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:34:33.851660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:33.986896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:34.148480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:34.212742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:36.037643Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012956370689760:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:36.037741Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:36.377296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:36.415237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:36.448504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:36.481483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:36.517386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:36.600930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:34:36.664434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012956370690279:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:36.664501Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:36.664556Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012956370690284:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:36.668135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:36.677854Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012956370690286:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:34:36.751863Z node 1 :TX_PROXY ERROR: Actor# [1:7488012956370690338:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 10671, MsgBus: 26426 2025-03-31T16:34:38.384912Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488012965953549021:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:38.385074Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0014d1/r3tmp/tmpt8UlIn/pdisk_1.dat 2025-03-31T16:34:38.509232Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:38.557445Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:38.557532Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:38.559111Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10671, node 2 2025-03-31T16:34:38.603507Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:38.603530Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:38.603538Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:38.603647Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26426 TClient is connected to server localhost:26426 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:34:39.063601Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:34:39.089947Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:39.164041Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:39.323045Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:39.394574Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:41.788078Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012978838452677:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03- ... Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:41.833058Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:41.881969Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:41.909717Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:41.936841Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:41.972004Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:42.011224Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:34:42.055130Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012983133420481:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:42.055243Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:42.055626Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488012983133420486:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:42.059343Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:42.071029Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488012983133420488:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:34:42.132975Z node 2 :TX_PROXY ERROR: Actor# [2:7488012983133420541:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:43.385430Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488012965953549021:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:43.385498Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 28521, MsgBus: 12706 2025-03-31T16:34:44.938076Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488012990870011036:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:44.938107Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0014d1/r3tmp/tmp9MAjfF/pdisk_1.dat 2025-03-31T16:34:45.146555Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:45.162044Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:45.162140Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:45.167491Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28521, node 3 2025-03-31T16:34:45.211902Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:45.211926Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:45.211934Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:45.212045Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12706 TClient is connected to server localhost:12706 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T16:34:45.627544Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:34:45.634965Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting...2025-03-31T16:34:45.643597Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T16:34:45.709657Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:45.893430Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:45.979952Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:48.366263Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013008049881986:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:48.366355Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:48.413989Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:48.455449Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:48.490205Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:48.529590Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:48.567869Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:48.612301Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:34:48.663444Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013008049882495:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:48.663528Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:48.663595Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013008049882500:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:48.671403Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:48.689655Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488013008049882502:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:34:48.746588Z node 3 :TX_PROXY ERROR: Actor# [3:7488013008049882555:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:49.938139Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488012990870011036:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:49.938211Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:35:00.123373Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:35:00.123399Z node 3 :IMPORT WARN: Table profiles were not loaded >> TPqWriterTest::TestDeferredWriteToTopic [GOOD] >> test_drain.py::TestHive::test_drain_on_stop |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_table-True] [GOOD] >> TPqWriterTest::WriteNonExistentTopic >> TPqWriterTest::WriteNonExistentTopic [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TPqWriterTest::TestCheckpoints |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_then_can_lookup_them [GOOD] >> test_kill_tablets.py::TestKillTablets::test_when_kill_keyvalue_tablet_it_will_be_restarted |98.8%| [TA] $(B)/ydb/tests/functional/rename/test-results/py3test/{meta.json ... results_accumulator.log} >> DataShardStats::HasSchemaChanges_ByKeyFilter [GOOD] >> DataShardStats::HasSchemaChanges_Columns |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_alloc_default.py::TestAlloc::test_mkql_not_increased[kikimr0] >> TSelectFromViewTest::OneTable [GOOD] >> TSelectFromViewTest::OneTableUsingRelativeName |98.8%| [TA] {RESULT} $(B)/ydb/tests/functional/rename/test-results/py3test/{meta.json ... results_accumulator.log} |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TPqWriterTest::TestCheckpoints [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TPqWriterTest::TestCheckpointWithEmptyBatch >> TPqWriterTest::TestCheckpointWithEmptyBatch [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> KafkaProtocol::MetadataInServerlessScenario [GOOD] >> KafkaProtocol::NativeKafkaBalanceScenario |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state [GOOD] >> test_discovery.py::TestDiscoveryExtEndpoint::test_scenario ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/pq_async_io/ut/unittest >> TPqWriterTest::TestCheckpointWithEmptyBatch [GOOD] Test command err: 2025-03-31T16:36:35.051538Z node 1 :KQP_COMPUTE INFO: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. Start read actor, local row dispatcher [1:7488013466271109282:2053], metadatafields: , partitions: 666 2025-03-31T16:36:35.052014Z node 1 :KQP_COMPUTE TRACE: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. GetAsyncInputData freeSpace = 12345 2025-03-31T16:36:35.052145Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7488013466271109288:2048], TxId: query_1, task: 0. PQ source. Send TEvCoordinatorChangesSubscribe to local RD ([1:7488013466271109282:2053]) 2025-03-31T16:36:35.052367Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7488013466271109288:2048], TxId: query_1, task: 0. PQ source. TEvCoordinatorChanged, new coordinator [1:7488013466271109283:2054] 2025-03-31T16:36:35.052570Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7488013466271109288:2048], TxId: query_1, task: 0. PQ source. Send TEvCoordinatorRequest to coordinator [1:7488013466271109283:2054], partIds: 666 cookie 1 2025-03-31T16:36:35.053066Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7488013466271109288:2048], TxId: query_1, task: 0. PQ source. Received TEvCoordinatorResult from [1:7488013466271109283:2054], cookie 1 2025-03-31T16:36:35.053094Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7488013466271109288:2048], TxId: query_1, task: 0. PQ source. UpdateSessions, Sessions size 0 2025-03-31T16:36:35.053102Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7488013466271109288:2048], TxId: query_1, task: 0. PQ source. Distribution is changed, remove sessions 2025-03-31T16:36:35.053147Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7488013466271109288:2048], TxId: query_1, task: 0. PQ source. Create session to [1:7488013466271109285:2056], generation 1 2025-03-31T16:36:35.053216Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7488013466271109288:2048], TxId: query_1, task: 0. PQ source. Send TEvStartSession to [1:7488013466271109285:2056], connection id 1 partitions offsets (666 / ), 2025-03-31T16:36:35.053478Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7488013466271109288:2048], TxId: query_1, task: 0. PQ source. Received TEvStartSessionAck from [1:7488013466271109285:2056], seqNo 0, ConfirmedSeqNo 0, generation 1 2025-03-31T16:36:35.053827Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7488013466271109288:2048], TxId: query_1, task: 0. PQ source. Received TEvNewDataArrived from [1:7488013466271109285:2056], partition 666, seqNo 0, ConfirmedSeqNo 0 generation 1 2025-03-31T16:36:35.058006Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7488013466271109288:2048], TxId: query_1, task: 0. PQ source. Received TEvMessageBatch from [1:7488013466271109285:2056], seqNo 0, ConfirmedSeqNo 0 generation 1 2025-03-31T16:36:35.058066Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7488013466271109288:2048], TxId: query_1, task: 0. PQ source. TEvMessageBatch NextOffset 1 2025-03-31T16:36:35.058075Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7488013466271109288:2048], TxId: query_1, task: 0. PQ source. TEvMessageBatch NextOffset 2 2025-03-31T16:36:35.058263Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7488013466271109288:2048], TxId: query_1, task: 0. PQ source. GetAsyncInputData freeSpace = 1000 2025-03-31T16:36:35.058395Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7488013466271109288:2048], TxId: query_1, task: 0. PQ source. NextOffset 2 2025-03-31T16:36:35.058417Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7488013466271109288:2048], TxId: query_1, task: 0. PQ source. Return 2 rows, buffer size 0, free space 948, result size 52 2025-03-31T16:36:35.058961Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7488013466271109288:2048], TxId: query_1, task: 0. PQ source. PassAway 2025-03-31T16:36:35.059063Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7488013466271109288:2048], TxId: query_1, task: 0. PQ source. SelfId: [1:7488013466271109288:2048], TxId: query_1, task: 0. PQ source. State: used buffer size 0 ready buffer event size 0 state 3 InFlyAsyncInputData 0 Counters: GetAsyncInputData 1 CoordinatorChanged 1 CoordinatorResult 0 MessageBatch 1 StartSessionAck 1 NewDataArrived 1 SessionError 0 Statistics 0 NodeDisconnected 0 NodeConnected 0 Undelivered 0 Retry 0 PrivateHeartbeat 0 SessionClosed 0 Pong 0 Heartbeat 0 PrintState 0 ProcessState 0 NotifyCA 1 [1:7488013466271109285:2056] status 2 is waiting ack 0 connection id 1 id 1, LocalRecipient partId 666 next offset 2 is waiting batch 0 has pending data 0 2025-03-31T16:36:35.059108Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7488013466271109288:2048], TxId: query_1, task: 0. PQ source. Send StopSession to [1:7488013466271109285:2056] generation 1 2025-03-31T16:36:35.389429Z node 2 :KQP_COMPUTE INFO: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. Start read actor, local row dispatcher [2:7488013469941001202:2053], metadatafields: , partitions: 666 2025-03-31T16:36:35.389647Z node 2 :KQP_COMPUTE TRACE: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. GetAsyncInputData freeSpace = 12345 2025-03-31T16:36:35.389678Z node 2 :KQP_COMPUTE INFO: SelfId: [2:7488013469941001208:2048], TxId: query_1, task: 0. PQ source. Send TEvCoordinatorChangesSubscribe to local RD ([2:7488013469941001202:2053]) 2025-03-31T16:36:35.389823Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:7488013469941001208:2048], TxId: query_1, task: 0. PQ source. TEvCoordinatorChanged, new coordinator [2:7488013469941001203:2054] 2025-03-31T16:36:35.389873Z node 2 :KQP_COMPUTE INFO: SelfId: [2:7488013469941001208:2048], TxId: query_1, task: 0. PQ source. Send TEvCoordinatorRequest to coordinator [2:7488013469941001203:2054], partIds: 666 cookie 1 2025-03-31T16:36:35.390057Z node 2 :KQP_COMPUTE INFO: SelfId: [2:7488013469941001208:2048], TxId: query_1, task: 0. PQ source. Received TEvCoordinatorResult from [2:7488013469941001203:2054], cookie 1 2025-03-31T16:36:35.390077Z node 2 :KQP_COMPUTE INFO: SelfId: [2:7488013469941001208:2048], TxId: query_1, task: 0. PQ source. UpdateSessions, Sessions size 0 2025-03-31T16:36:35.390084Z node 2 :KQP_COMPUTE INFO: SelfId: [2:7488013469941001208:2048], TxId: query_1, task: 0. PQ source. Distribution is changed, remove sessions 2025-03-31T16:36:35.390102Z node 2 :KQP_COMPUTE INFO: SelfId: [2:7488013469941001208:2048], TxId: query_1, task: 0. PQ source. Create session to [2:7488013469941001205:2056], generation 1 2025-03-31T16:36:35.390132Z node 2 :KQP_COMPUTE INFO: SelfId: [2:7488013469941001208:2048], TxId: query_1, task: 0. PQ source. Send TEvStartSession to [2:7488013469941001205:2056], connection id 1 partitions offsets (666 / ), 2025-03-31T16:36:35.390320Z node 2 :KQP_COMPUTE INFO: SelfId: [2:7488013469941001208:2048], TxId: query_1, task: 0. PQ source. Received TEvStartSessionAck from [2:7488013469941001205:2056], seqNo 0, ConfirmedSeqNo 0, generation 1 2025-03-31T16:36:35.390391Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7488013469941001208:2048], TxId: query_1, task: 0. PQ source. Received TEvNewDataArrived from [2:7488013469941001205:2056], partition 666, seqNo 0, ConfirmedSeqNo 0 generation 1 2025-03-31T16:36:35.391418Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7488013469941001208:2048], TxId: query_1, task: 0. PQ source. Received TEvMessageBatch from [2:7488013469941001205:2056], seqNo 0, ConfirmedSeqNo 0 generation 1 2025-03-31T16:36:35.391485Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7488013469941001208:2048], TxId: query_1, task: 0. PQ source. TEvMessageBatch NextOffset 1 2025-03-31T16:36:35.391499Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7488013469941001208:2048], TxId: query_1, task: 0. PQ source. TEvMessageBatch NextOffset 2 2025-03-31T16:36:35.391526Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7488013469941001208:2048], TxId: query_1, task: 0. PQ source. GetAsyncInputData freeSpace = 1000 2025-03-31T16:36:35.391655Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7488013469941001208:2048], TxId: query_1, task: 0. PQ source. NextOffset 2 2025-03-31T16:36:35.391674Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7488013469941001208:2048], TxId: query_1, task: 0. PQ source. Return 2 rows, buffer size 0, free space 948, result size 52 2025-03-31T16:36:35.391873Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:7488013469941001208:2048], TxId: query_1, task: 0. PQ source. Received TEvUndelivered, TSystem::Undelivered from [2:7488013469941001205:2056], reason Disconnected, cookie 999 2025-03-31T16:36:35.391927Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7488013469941001208:2048], TxId: query_1, task: 0. PQ source. Received TEvNewDataArrived from [2:7488013469941001205:2056], partition 666, seqNo 0, ConfirmedSeqNo 0 generation 1 2025-03-31T16:36:35.392609Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7488013469941001208:2048], TxId: query_1, task: 0. PQ source. Received TEvRetry, EventQueueId 1 2025-03-31T16:36:35.392677Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7488013469941001208:2048], TxId: query_1, task: 0. PQ source. Received TEvMessageBatch from [2:7488013469941001205:2056], seqNo 0, ConfirmedSeqNo 0 generation 1 2025-03-31T16:36:35.392688Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7488013469941001208:2048], TxId: query_1, task: 0. PQ source. TEvMessageBatch NextOffset 3 2025-03-31T16:36:35.392716Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7488013469941001208:2048], TxId: query_1, task: 0. PQ source. GetAsyncInputData freeSpace = 1000 2025-03-31T16:36:35.392759Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7488013469941001208:2048], TxId: query_1, task: 0. PQ source. NextOffset 3 2025-03-31T16:36:35.392767Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7488013469941001208:2048], TxId: query_1, task: 0. PQ source. Return 1 rows, buffer size 0, free space 974, result size 26 2025-03-31T16:36:35.393161Z node 2 :KQP_COMPUTE INFO: SelfId: [2:7488013469941001208:2048], TxId: query_1, task: 0. PQ source. PassAway 2025-03-31T16:36:35.393284Z node 2 :KQP_COMPUTE INFO: SelfId: [2:7488013469941001208:2048], TxId: query_1, task: 0. PQ source. SelfId: [2:7488013469941001208:2048], TxId: query_1, task: 0. PQ source. State: used buffer size 0 ready buffer event size 0 state 3 InFlyAsyncInputData 0 Counters: GetAsyncInputData 1 CoordinatorChanged 1 CoordinatorResult 0 MessageBatch 2 StartSessionAck 1 NewDataArrived 2 SessionError 0 Statistics 0 NodeDisconnected 0 NodeConnected 0 Undelivered 1 Retry 1 PrivateHeartbeat 0 SessionClosed 0 Pong 0 Heartbeat 0 PrintState 0 ProcessState 0 NotifyCA 2 [2:7488013469941001205:2056] status 2 is waiting ack 0 connection id 1 id 1, LocalRecipient partId 666 next offset 3 is waiting batch 0 has pending data 0 2025-03-31T16:36:35.393307Z node 2 :KQP_COMPUTE INFO: SelfId: [2:7488013469941001208:2048], TxId: query_1, task: 0. PQ source. Send StopSession to [2:7488013469941001205:2056] generation 1 2025-03-31T16:36:35.764621Z node 3 :KQP_COMPUTE INFO: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. Start read actor, local row dispatcher [3:7488013468674950739:2053], metadatafields: , partitions: 666 2025-03-31T16:36:35.764813Z node 3 :KQP_COMPUTE TRACE: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. GetAsyncInputData freeSpace = 12345 2025-03-31T16:36:35.764854Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7488013468674950745:2048], TxId: query_1, task: 0. PQ source. Send TEvCoordinatorChangesSubscribe to local RD ([3:7488013468674950739:2053]) 2025-03-31T16:36:35.765008Z node 3 :KQP_COMPUTE DEBUG: SelfId: [3:7488013468674950745:2048], TxId: query_1, task: 0. PQ source. TEvCoordinatorChanged, new coordinator [3:7488013468674950740:2054] 2025-03-31T16:36:35.765040Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7488013468674950745:2048], TxId: query_1, task: 0. PQ source. Send TEvCoordinatorRequest to coordinator [3:7488013468674950740:2054], partIds: 666 cookie 1 2025-03-31T16:36:35.765223Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7488013468674950745:2048], TxId: query_1, task: 0. PQ source. Received TEvCoordinatorResult from [3:7488013468674950740:2054], cookie 1 2025-03-31T16:36:35.765247Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7488013468674950745:2048], TxId: query_1, task: 0. PQ source. UpdateSessions, Sessions size 0 2025-03-31T16:36:35.765255Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7488013468674950745:2048], TxId: query_1, task: 0. PQ source. Distribution is chan ... [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [f3e7209d-e1a2702e-be26068-37449aa1] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1743439209810 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T16:40:09.810183Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [f3e7209d-e1a2702e-be26068-37449aa1] Write session established. Init response: last_seq_no: 5 session_id: "f3e7209d-e1a2702e-be26068-37449aa1|1c7c146a-501dfe3e-80576475-8146eb90_0" 2025-03-31T16:40:09.810235Z :TRACE: [local] TRACE_EVENT InitResponse partition_id=0 session_id=f3e7209d-e1a2702e-be26068-37449aa1|1c7c146a-501dfe3e-80576475-8146eb90_0 2025-03-31T16:40:09.810281Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [f3e7209d-e1a2702e-be26068-37449aa1|1c7c146a-501dfe3e-80576475-8146eb90_0] MessageGroupId [f3e7209d-e1a2702e-be26068-37449aa1] Write session: set DirectWriteToPartitionId 0 2025-03-31T16:40:09.810479Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [f3e7209d-e1a2702e-be26068-37449aa1|1c7c146a-501dfe3e-80576475-8146eb90_0] PartitionId [0] Generation [0] Get partition location async, partition 0, delay 0.000000s 2025-03-31T16:40:09.810541Z :TRACE: [local] TRACE_EVENT DescribePartitionRequest path=local/Checkpoints partition_id=0 2025-03-31T16:40:09.810696Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [f3e7209d-e1a2702e-be26068-37449aa1|1c7c146a-501dfe3e-80576475-8146eb90_0] PartitionId [0] Generation [0] Getting partition location, partition 0 2025-03-31T16:40:09.811268Z :INFO: [local] [local] [d75b62b3-886d1b0a-6edfe1c7-65dbc1a5] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "Checkpoints". Partition: 0. Read offset: (NULL) 2025-03-31T16:40:09.814417Z :DEBUG: [local] [local] [d75b62b3-886d1b0a-6edfe1c7-65dbc1a5] [] Got ReadResponse, serverBytesSize = 926, now ReadSizeBudget = 0, ReadSizeServerDelta = 52427874 2025-03-31T16:40:09.814539Z :DEBUG: [local] [local] [d75b62b3-886d1b0a-6edfe1c7-65dbc1a5] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52427874 2025-03-31T16:40:09.814826Z :DEBUG: [local] Decompression task done. Partition/PartitionSessionId: 1 (0-4) 2025-03-31T16:40:09.814899Z :DEBUG: [local] [local] [d75b62b3-886d1b0a-6edfe1c7-65dbc1a5] [] Returning serverBytesSize = 926 to budget 2025-03-31T16:40:09.814953Z :DEBUG: [local] [local] [d75b62b3-886d1b0a-6edfe1c7-65dbc1a5] [] In ContinueReadingDataImpl, ReadSizeBudget = 926, ReadSizeServerDelta = 52427874 2025-03-31T16:40:09.815279Z :DEBUG: [local] [local] [d75b62b3-886d1b0a-6edfe1c7-65dbc1a5] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-03-31T16:40:09.815387Z :DEBUG: [local] Take Data. Partition 0. Read: {0, 0} (0-0) 2025-03-31T16:40:09.815562Z :DEBUG: [local] Take Data. Partition 0. Read: {1, 0} (1-1) 2025-03-31T16:40:09.815629Z :DEBUG: [local] Take Data. Partition 0. Read: {1, 1} (2-2) 2025-03-31T16:40:09.815674Z :DEBUG: [local] Take Data. Partition 0. Read: {2, 0} (3-3) 2025-03-31T16:40:09.815755Z :DEBUG: [local] Take Data. Partition 0. Read: {3, 0} (4-4) 2025-03-31T16:40:09.816076Z :DEBUG: [local] [local] [d75b62b3-886d1b0a-6edfe1c7-65dbc1a5] [] The application data is transferred to the client. Number of messages 5, size 5 bytes 2025-03-31T16:40:09.816143Z :DEBUG: [local] [local] [d75b62b3-886d1b0a-6edfe1c7-65dbc1a5] [] Returning serverBytesSize = 0 to budget 2025-03-31T16:40:09.816326Z :INFO: [local] [local] [d75b62b3-886d1b0a-6edfe1c7-65dbc1a5] Closing read session. Close timeout: 0.000000s 2025-03-31T16:40:09.816397Z :INFO: [local] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:Checkpoints:0:1:4:0 2025-03-31T16:40:09.816464Z :INFO: [local] [local] [d75b62b3-886d1b0a-6edfe1c7-65dbc1a5] Counters: { Errors: 0 CurrentSessionLifetimeMs: 21 BytesRead: 5 MessagesRead: 5 BytesReadCompressed: 5 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T16:40:09.816637Z :NOTICE: [local] [local] [d75b62b3-886d1b0a-6edfe1c7-65dbc1a5] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-03-31T16:40:09.816708Z :DEBUG: [local] [local] [d75b62b3-886d1b0a-6edfe1c7-65dbc1a5] [] Abort session to cluster 2025-03-31T16:40:09.817231Z :INFO: [local] [local] [d75b62b3-886d1b0a-6edfe1c7-65dbc1a5] Closing read session. Close timeout: 0.000000s 2025-03-31T16:40:09.817297Z :INFO: [local] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:Checkpoints:0:1:4:0 2025-03-31T16:40:09.817364Z :INFO: [local] [local] [d75b62b3-886d1b0a-6edfe1c7-65dbc1a5] Counters: { Errors: 0 CurrentSessionLifetimeMs: 22 BytesRead: 5 MessagesRead: 5 BytesReadCompressed: 5 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T16:40:09.817516Z :NOTICE: [local] [local] [d75b62b3-886d1b0a-6edfe1c7-65dbc1a5] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-31T16:40:09.831887Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [f3e7209d-e1a2702e-be26068-37449aa1|1c7c146a-501dfe3e-80576475-8146eb90_0] PartitionId [0] Generation [0] Got PartitionLocation response. Status SUCCESS, proto: partition { active: true partition_location { node_id: 1 generation: 1 } } 2025-03-31T16:40:09.831975Z :TRACE: [local] TRACE_EVENT DescribePartitionResponse partition_id=0 active=1 pl_node_id=1 pl_generation=1 2025-03-31T16:40:09.832077Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [f3e7209d-e1a2702e-be26068-37449aa1|1c7c146a-501dfe3e-80576475-8146eb90_0] PartitionId [0] Generation [0] GetPreferredEndpoint: partitionId 0, partitionNodeId 1 exists in the endpoint pool. 2025-03-31T16:40:09.832124Z :TRACE: [local] TRACE_EVENT PreferredPartitionLocation Endpoint= NodeId=1 Generation=1 2025-03-31T16:40:09.832167Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [f3e7209d-e1a2702e-be26068-37449aa1|1c7c146a-501dfe3e-80576475-8146eb90_0] PartitionId [0] Generation [1] Start write session. Will connect to nodeId: 1 2025-03-31T16:40:09.836655Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [f3e7209d-e1a2702e-be26068-37449aa1|1c7c146a-501dfe3e-80576475-8146eb90_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2025-03-31T16:40:09.838256Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [f3e7209d-e1a2702e-be26068-37449aa1|1c7c146a-501dfe3e-80576475-8146eb90_0] PartitionId [0] Generation [1] Write session: direct write to partition: 0, generation 1 2025-03-31T16:40:09.838440Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [f3e7209d-e1a2702e-be26068-37449aa1|1c7c146a-501dfe3e-80576475-8146eb90_0] PartitionId [0] Generation [1] Write session: send init request: init_request { path: "Checkpoints" producer_id: "f3e7209d-e1a2702e-be26068-37449aa1" partition_with_generation { generation: 1 } } 2025-03-31T16:40:09.838491Z :TRACE: [local] TRACE_EVENT InitRequest pwg_partition_id=0 pwg_generation=1 2025-03-31T16:40:09.838902Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [f3e7209d-e1a2702e-be26068-37449aa1|1c7c146a-501dfe3e-80576475-8146eb90_0] PartitionId [0] Generation [1] Write session: OnWriteDone gRpcStatusCode: 0 2025-03-31T16:40:09.840324Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [f3e7209d-e1a2702e-be26068-37449aa1|1c7c146a-501dfe3e-80576475-8146eb90_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2025-03-31T16:40:09.840379Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [f3e7209d-e1a2702e-be26068-37449aa1|1c7c146a-501dfe3e-80576475-8146eb90_0] PartitionId [0] Generation [1] Write session will now close 2025-03-31T16:40:09.840446Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [f3e7209d-e1a2702e-be26068-37449aa1|1c7c146a-501dfe3e-80576475-8146eb90_0] PartitionId [0] Generation [1] Write session: aborting 2025-03-31T16:40:09.841157Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [f3e7209d-e1a2702e-be26068-37449aa1|1c7c146a-501dfe3e-80576475-8146eb90_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2025-03-31T16:40:09.841839Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [f3e7209d-e1a2702e-be26068-37449aa1|1c7c146a-501dfe3e-80576475-8146eb90_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2025-03-31T16:40:09.841900Z :TRACE: [local] TRACE_EVENT Error status=CLIENT_CANCELLED 2025-03-31T16:40:09.841930Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [f3e7209d-e1a2702e-be26068-37449aa1|1c7c146a-501dfe3e-80576475-8146eb90_0] PartitionId [0] Generation [1] Write session is aborting and will not restart 2025-03-31T16:40:09.842013Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [f3e7209d-e1a2702e-be26068-37449aa1|1c7c146a-501dfe3e-80576475-8146eb90_0] PartitionId [0] Generation [1] Write session: destroy 2025-03-31T16:40:10.912423Z node 40 :KQP_COMPUTE TRACE: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. SendData. Batch: 0. Checkpoint: 1. Finished: 0 2025-03-31T16:40:11.029783Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [9d796664-8a77a76b-29943ae8-e452a63a] Write session: try to update token 2025-03-31T16:40:11.030322Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [9d796664-8a77a76b-29943ae8-e452a63a] Start write session. Will connect to nodeId: 0 2025-03-31T16:40:11.115341Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [9d796664-8a77a76b-29943ae8-e452a63a] Write session: close. Timeout 0.000000s 2025-03-31T16:40:11.096315Z node 40 :KQP_COMPUTE DEBUG: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. [Checkpoint 0.0] Send checkpoint state immediately 2025-03-31T16:40:11.096572Z node 40 :KQP_COMPUTE TRACE: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. Save checkpoint { Id: 0 Generation: 0 } state: { SourceId: "9d796664-8a77a76b-29943ae8-e452a63a" } 2025-03-31T16:40:11.124079Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [9d796664-8a77a76b-29943ae8-e452a63a] Write session will now close 2025-03-31T16:40:11.129093Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [9d796664-8a77a76b-29943ae8-e452a63a] Write session: aborting 2025-03-31T16:40:11.129741Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [9d796664-8a77a76b-29943ae8-e452a63a] Write session: gracefully shut down, all writes complete 2025-03-31T16:40:11.129915Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [9d796664-8a77a76b-29943ae8-e452a63a] Write session: destroy |98.9%| [TM] {RESULT} ydb/tests/fq/pq_async_io/ut/unittest >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet [GOOD] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_tpch.py::TestTpchS1::test_tpch[1] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> KqpQueryService::TableSink_ReplaceFromSelectLargeOlap [GOOD] >> KqpQueryService::TableSink_ReplaceDuplicatesOlap >> test_kill_tablets.py::TestKillTablets::test_when_kill_keyvalue_tablet_it_will_be_restarted [GOOD] >> DataShardStats::HasSchemaChanges_Columns [GOOD] >> DataShardStats::HasSchemaChanges_Families |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_then_can_lookup_them [GOOD] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> TSelectFromViewTest::OneTableUsingRelativeName [GOOD] >> TSelectFromViewTest::DisabledFeatureFlag >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_select_with_repetitions[10-64] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> KqpQueryService::TableSink_ReplaceDuplicatesOlap [GOOD] >> KqpQueryService::TableSink_Oltp_Replace-UseSink |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_when_kill_keyvalue_tablet_it_will_be_restarted [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[0] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet [GOOD] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_discovery.py::TestDiscoveryExtEndpoint::test_scenario [GOOD] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] [GOOD] >> test_alloc_default.py::TestAlloc::test_mkql_not_increased[kikimr0] [GOOD] >> TSelectFromViewTest::DisabledFeatureFlag [GOOD] >> TSelectFromViewTest::ReadTestCasesFromFiles |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] [GOOD] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_select_with_repetitions[10-64] [GOOD] >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_upsert_data_with_repetitions[10-64] >> DataShardStats::HasSchemaChanges_Families [GOOD] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v1-client0] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_stats/unittest >> DataShardStats::HasSchemaChanges_Families [GOOD] Test command err: 2025-03-31T16:36:56.877111Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:36:56.877400Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:36:56.877545Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000678/r3tmp/tmpMdbg70/pdisk_1.dat 2025-03-31T16:36:57.415570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:36:57.469558Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:36:57.477132Z node 1 :TABLET_SAUSAGECACHE NOTICE: Update config MemoryLimit: 33554432 2025-03-31T16:36:57.516878Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:36:57.517442Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:36:57.530534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:36:57.627939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:36:57.704277Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-31T16:36:57.705528Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-31T16:36:57.706132Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-31T16:36:57.706439Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:36:57.757850Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T16:36:57.758659Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:36:57.758790Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:36:57.761679Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:36:57.761801Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:36:57.761870Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:36:57.764308Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:36:57.764529Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:36:57.764668Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-31T16:36:57.775761Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:36:57.808401Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:36:57.809549Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:36:57.809738Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-31T16:36:57.809778Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:36:57.809814Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:36:57.809855Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:36:57.810103Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:36:57.810842Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:36:57.812250Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:36:57.812393Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:36:57.812565Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:36:57.812622Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:36:57.812715Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T16:36:57.812754Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:36:57.812793Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:36:57.812892Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:36:57.812947Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:36:57.813078Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:36:57.813130Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:36:57.813180Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2567], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-31T16:36:57.813281Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-31T16:36:57.813320Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T16:36:57.813481Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:36:57.813880Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-31T16:36:57.813957Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-31T16:36:57.814074Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-31T16:36:57.814186Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:36:57.814233Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-31T16:36:57.814272Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-31T16:36:57.814310Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:36:57.814671Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-31T16:36:57.814732Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-31T16:36:57.814772Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-31T16:36:57.814810Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:36:57.814865Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-31T16:36:57.814900Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-31T16:36:57.814934Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-31T16:36:57.814970Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:36:57.815021Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-31T16:36:57.818116Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-31T16:36:57.818178Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-31T16:36:57.829058Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-31T16:36:57.829143Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-31T16:36:57.829181Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-31T16:36:57.829265Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-31T16:36:57.829446Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-31T16:36:57.980562Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:36:57.980614Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:36:57.980645Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-31T16:36:57.980988Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-31T16:36:57.981020Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T16:36:57.981105Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-31T16:36:57.981142Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-31T16:36:57.981178Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-31T16:36:57.981204Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-31T16:36:57.991000Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-31T16:36:57.991115Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:36:57.992879Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T16:36:57.992935Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T16:36:57.992998Z ... lanQueue at 72075186224037888 2025-03-31T16:40:33.236878Z node 13 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:40:33.236940Z node 13 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:40:33.239887Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 25500} 2025-03-31T16:40:33.240016Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:40:33.243033Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:40:33.243116Z node 13 :TX_DATASHARD TRACE: Complete execution for [25500:281474976715664] at 72075186224037888 on unit CompleteOperation 2025-03-31T16:40:33.243232Z node 13 :TX_DATASHARD DEBUG: Complete [25500 : 281474976715664] from 72075186224037888 at tablet 72075186224037888 send result to client [13:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-31T16:40:33.243322Z node 13 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715664 state Ready TxInFly 0 2025-03-31T16:40:33.243488Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:40:33.246281Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [13:1166:2975], Recipient [13:935:2761]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046644480 Status: OK ServerId: [13:1169:2978] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-03-31T16:40:33.246356Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-03-31T16:40:33.253991Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [13:409:2404], Recipient [13:935:2761]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715664 2025-03-31T16:40:33.254081Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-03-31T16:40:33.254152Z node 13 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715664 datashard 72075186224037888 state Ready 2025-03-31T16:40:33.254283Z node 13 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 waiting for schema changes 2025-03-31T16:40:33.267372Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [13:1166:2975], Recipient [13:935:2761]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594046644480 ClientId: [13:1166:2975] ServerId: [13:1169:2978] } 2025-03-31T16:40:33.267514Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-03-31T16:40:34.209768Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [13:935:2761]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-03-31T16:40:34.209885Z node 13 :TABLET_STATS_BUILDER INFO: UpdateTableStats at datashard 72075186224037888 2025-03-31T16:40:34.210219Z node 13 :TABLET_STATS_BUILDER INFO: Skipped at datashard 72075186224037888, for tableId 2: RowCount: 3 DataSize: 130 IndexSize: 82 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 0 PartCount 1, with schema changes 2025-03-31T16:40:34.210458Z node 13 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 0, tableId 2 Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 2 Round: 9 TableStats { DataSize: 130 RowCount: 3 IndexSize: 82 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 20 HasLoanedParts: false Channels { Channel: 1 DataSize: 65 IndexSize: 82 } Channels { Channel: 2 DataSize: 65 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: true LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 2420 Memory: 124352 Storage: 254 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 13 StartTime: 5451 TableOwnerId: 72057594046644480 FollowerId: 0 2025-03-31T16:40:34.212049Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [13:1202:3011], Recipient [13:935:2761]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:40:34.212186Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:40:34.212281Z node 13 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [13:1201:3010], serverId# [13:1202:3011], sessionId# [0:0:0] 2025-03-31T16:40:34.212520Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553210, Sender [13:1200:3009], Recipient [13:935:2761]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046644480 LocalId: 2 } CompactBorrowed: false 2025-03-31T16:40:34.212762Z node 13 :TX_DATASHARD INFO: Started background compaction# 3 of 72075186224037888 tableId# 2 localTid# 1001, requested from [13:1200:3009], partsCount# 1, memtableSize# 0, memtableWaste# 0, memtableRows# 0 2025-03-31T16:40:34.216263Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 2, ts 1970-01-01T00:00:20.452024Z 2025-03-31T16:40:34.216355Z node 13 :TABLET_STATS_BUILDER INFO: UpdateTableStats at datashard 72075186224037888 2025-03-31T16:40:34.216486Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 2, front# 3 2025-03-31T16:40:34.219333Z node 13 :TABLET_STATS_BUILDER TRACE: Building stats at datashard 72075186224037888, for tableId 2: starting for mixed index 2025-03-31T16:40:34.221919Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [13:932:2759], Recipient [13:935:2761]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-03-31T16:40:34.222755Z node 13 :TABLET_STATS_BUILDER TRACE: Building stats at datashard 72075186224037888, for tableId 2: finished for mixed index ready: 1 stats: RowCount: 3 DataSize: 130 IndexSize: 82 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 0 2025-03-31T16:40:34.222893Z node 13 :TABLET_STATS_BUILDER INFO: Stats at datashard 72075186224037888, for tableId 2: RowCount: 3 DataSize: 130 IndexSize: 82 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 0 PartCount: 1, with schema changes, LoadedSize 82, Spent{time=0.000s,wait=0.000s,interrupts=2} 2025-03-31T16:40:34.223533Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435080, Sender [13:1207:3015], Recipient [13:935:2761]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvAsyncTableStats 2025-03-31T16:40:34.223635Z node 13 :TABLET_STATS_BUILDER INFO: Result received at datashard 72075186224037888, for tableId 2: RowCount: 3 DataSize: 130 IndexSize: 82 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 0 2025-03-31T16:40:34.223766Z node 13 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 0, tableId 2 2025-03-31T16:40:34.229252Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 3, ts 1970-01-01T00:00:30.452024Z 2025-03-31T16:40:34.229357Z node 13 :TABLET_STATS_BUILDER INFO: UpdateTableStats at datashard 72075186224037888 2025-03-31T16:40:34.229407Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 3, front# 3 2025-03-31T16:40:34.229477Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001 sending TEvCompactTableResult to# [13:1200:3009]pathId# [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-31T16:40:34.232057Z node 13 :TABLET_STATS_BUILDER TRACE: Building stats at datashard 72075186224037888, for tableId 2: starting for mixed index 2025-03-31T16:40:34.232498Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [13:932:2759], Recipient [13:935:2761]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-03-31T16:40:34.234391Z node 13 :TABLET_STATS_BUILDER TRACE: Building stats at datashard 72075186224037888, for tableId 2: finished for mixed index ready: 1 stats: RowCount: 3 DataSize: 130 IndexSize: 82 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 0 2025-03-31T16:40:34.234517Z node 13 :TABLET_STATS_BUILDER INFO: Stats at datashard 72075186224037888, for tableId 2: RowCount: 3 DataSize: 130 IndexSize: 82 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 0 PartCount: 1, LoadedSize 82, Spent{time=0.000s,wait=0.000s,interrupts=2} 2025-03-31T16:40:34.234764Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435080, Sender [13:1214:3021], Recipient [13:935:2761]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvAsyncTableStats 2025-03-31T16:40:34.234824Z node 13 :TABLET_STATS_BUILDER INFO: Result received at datashard 72075186224037888, for tableId 2: RowCount: 3 DataSize: 130 IndexSize: 82 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 0 2025-03-31T16:40:34.234900Z node 13 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 0, tableId 2 waiting for no schema changes 2025-03-31T16:40:34.247352Z node 13 :TX_DATASHARD DEBUG: Updated last full compaction of tablet# 72075186224037888, tableId# 2, last full compaction# 1970-01-01T00:00:30.452024Z 2025-03-31T16:40:35.098593Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [13:935:2761]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-03-31T16:40:35.098742Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-03-31T16:40:35.098919Z node 13 :TX_DATASHARD TRACE: No cleanup at 72075186224037888 outdated step 35000 last cleanup 0 2025-03-31T16:40:35.099040Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:40:35.099134Z node 13 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-31T16:40:35.099213Z node 13 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-31T16:40:35.099289Z node 13 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-31T16:40:35.099563Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [13:935:2761]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-03-31T16:40:35.099625Z node 13 :TABLET_STATS_BUILDER INFO: UpdateTableStats at datashard 72075186224037888 2025-03-31T16:40:35.099848Z node 13 :TABLET_STATS_BUILDER INFO: Skipped at datashard 72075186224037888, for tableId 2: RowCount: 3 DataSize: 130 IndexSize: 82 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 0 PartCount 1 2025-03-31T16:40:35.100046Z node 13 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 0, tableId 2 Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 2 Round: 12 TableStats { DataSize: 130 RowCount: 3 IndexSize: 82 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 30 HasLoanedParts: false Channels { Channel: 1 DataSize: 80 IndexSize: 82 } Channels { Channel: 2 DataSize: 50 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 2134 Memory: 124352 Storage: 254 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 13 StartTime: 5451 TableOwnerId: 72057594046644480 FollowerId: 0 |99.0%| [TM] {RESULT} ydb/core/tx/datashard/ut_stats/unittest >> KqpQueryService::TableSink_Oltp_Replace-UseSink [GOOD] >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_upsert_data_with_repetitions[10-64] [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_alloc_default.py::TestAlloc::test_hard_limit[kikimr0] >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v2-client0] >> test_self_heal.py::TestEnableSelfHeal::test_replication |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_Oltp_Replace-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 25263, MsgBus: 2670 2025-03-31T16:34:39.526433Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012969065751340:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:39.526479Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0014c2/r3tmp/tmpLBB8d0/pdisk_1.dat 2025-03-31T16:34:39.928846Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:34:39.932260Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:39.932358Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:39.936539Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25263, node 1 2025-03-31T16:34:40.051697Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:40.051722Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:40.051729Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:40.051834Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2670 TClient is connected to server localhost:2670 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:40.621129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:40.644067Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 CREATE TABLE `/Root/ColumnShard1` (Col1 Int64 NOT NULL, Col2 Int32 NOT NULL, PRIMARY KEY (Col1)) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1000); 2025-03-31T16:34:42.809625Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012981950653893:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:42.809738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:43.501229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:34:44.531186Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488012969065751340:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:44.531292Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:34:48.653132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488013007720462051:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:34:48.653398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488013007720462051:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:34:48.653473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038849;self_id=[1:7488013007720462251:2394];tablet_id=72075186224038849;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:34:48.653531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038849;self_id=[1:7488013007720462251:2394];tablet_id=72075186224038849;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:34:48.653714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488013007720462051:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:34:48.653778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038849;self_id=[1:7488013007720462251:2394];tablet_id=72075186224038849;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:34:48.654099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488013007720462051:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:34:48.654122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038849;self_id=[1:7488013007720462251:2394];tablet_id=72075186224038849;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:34:48.654301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488013007720462051:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:34:48.654321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038849;self_id=[1:7488013007720462251:2394];tablet_id=72075186224038849;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:34:48.654492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488013007720462051:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:34:48.654620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488013007720462051:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:34:48.654743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488013007720462051:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:34:48.654877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488013007720462051:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:34:48.655000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488013007720462051:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:34:48.655136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488013007720462051:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:34:48.655243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488013007720462051:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:34:48.658421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038849;self_id=[1:7488013007720462251:2394];tablet_id=72075186224038849;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:34:48.658594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038849;self_id=[1:7488013007720462251:2394];tablet_id=72075186224038849;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:34:48.658671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038849;self_id=[1:7488013007720462251:2394];tablet_id=72075186224038849;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:34:48.658742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038849;self_id=[1:7488013007720462251:2394];tablet_id=72075186224038849;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:34:48.658830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038849;self_id=[1:7488013007720462251:2394];tablet_id=72075186224038849;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:34:48.658907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038849;self_id=[1:7488013007720462251:2394];tablet_id=72075186224038849;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:34:48.658981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038849;self_id=[1:7488013007720462251:2394];tablet_id=72075186224038849;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:34:48.692571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7488013007720462053:2354];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:34:48.692648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7488013007720462053:2354];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:34:48.692878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7488013007720462053:2354];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:34:48.693073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037951;self_id=[1:7488013007720462053:2354];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleane ... LUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T16:40:25.666110Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T16:40:25.666318Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T16:40:25.666360Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T16:40:25.666495Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T16:40:25.666519Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T16:40:25.744348Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:40:25.747810Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:40:25.751759Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:40:25.761224Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:40:25.766938Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:40:25.776782Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:40:25.776806Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:40:25.785139Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:40:25.785437Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:40:25.794516Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-31T16:40:25.817481Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488014458111626367:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:40:25.817597Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:40:25.818024Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488014458111626372:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:40:25.822267Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:40:25.839192Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488014458111626374:2406], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:40:25.927015Z node 2 :TX_PROXY ERROR: Actor# [2:7488014458111626425:2588] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:40:26.066331Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=281474976710662;tx_id=281474976710662;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710662; 2025-03-31T16:40:26.100293Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=281474976710664;tx_id=281474976710664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710664; Trying to start YDB, gRPC: 13778, MsgBus: 65115 2025-03-31T16:40:28.313175Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488014469856006077:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:40:28.313258Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0014c2/r3tmp/tmpWuX4Sk/pdisk_1.dat 2025-03-31T16:40:28.769335Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:40:28.868434Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:40:28.868613Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:40:28.870327Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13778, node 3 2025-03-31T16:40:29.065101Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:40:29.065132Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:40:29.065142Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:40:29.065343Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65115 TClient is connected to server localhost:65115 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:40:29.836973Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:40:29.851925Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:40:33.313547Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7488014469856006077:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:40:33.313626Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:40:33.952384Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488014491330843224:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:40:33.952501Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:40:34.005481Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T16:40:34.478988Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T16:40:35.031140Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488014499920779220:2446], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:40:35.031291Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:40:35.032433Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488014499920779225:2449], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:40:35.039847Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-03-31T16:40:35.063768Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488014499920779227:2450], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-03-31T16:40:35.163358Z node 3 :TX_PROXY ERROR: Actor# [3:7488014499920779302:3297] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> TDataShardRSTest::TestCleanupInRS-UseSink [GOOD] >> TDataShardRSTest::TestDelayedRSAckForUnknownTx |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--false] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_cms_state_storage.py::TestCmsStateStorageSimpleMax::test_check_shutdown_state_storage_nodes [GOOD] >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorKeep::test_restart_as_much_as_can |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> TDataShardRSTest::TestDelayedRSAckForUnknownTx [GOOD] >> TDataShardRSTest::TestDelayedRSAckForOutOfOrderCompletedTx |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_crud.py::TestCRUDOperations::test_create_table_and_drop_table_success >> TControlPlaneProxyShouldPassHids::ShouldCheckScenario [GOOD] >> TControlPlaneProxyTest::ShouldSendCreateQuery >> test_clickbench.py::TestClickbench::test_clickbench[0] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[1] >> test_discovery.py::TestDiscoveryFaultInjectionSlotStop::test_scenario >> test_self_heal.py::TestEnableSelfHeal::test_replication [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> TControlPlaneProxyTest::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyTest::FailsOnCreateQueryWhenRateLimiterResourceNotCreated |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_state_storage.py::TestCmsStateStorageSimpleMax::test_check_shutdown_state_storage_nodes [GOOD] >> TControlPlaneProxyTest::FailsOnCreateQueryWhenRateLimiterResourceNotCreated [GOOD] >> TControlPlaneProxyTest::ShouldSendListQueries >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_self_heal.py::TestEnableSelfHeal::test_replication [GOOD] >> TDataShardRSTest::TestDelayedRSAckForOutOfOrderCompletedTx [GOOD] >> TDataShardRSTest::TestGenericReadSetDecisionCommit >> TControlPlaneProxyTest::ShouldSendListQueries [GOOD] >> TControlPlaneProxyTest::ShouldSendDescribeQuery >> TControlPlaneProxyTest::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyTest::ShouldSendGetQueryStatus >> test_config_with_metadata.py::TestConfigWithoutMetadataMirror::test_cluster_is_operational_without_metadata >> TControlPlaneProxyTest::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyTest::ShouldSendModifyQuery >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--false] [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_clickbench.py::TestClickbench::test_clickbench[1] [GOOD] >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true] >> test_clickbench.py::TestClickbench::test_clickbench[2] >> TControlPlaneProxyTest::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyTest::ShouldSendDeleteQuery >> test_crud.py::TestCRUDOperations::test_create_table_and_drop_table_success [GOOD] >> test_crud.py::TestCRUDOperations::test_create_table_wrong_primary_key_failed1 >> test_crud.py::TestCRUDOperations::test_create_table_wrong_primary_key_failed1 [GOOD] >> test_crud.py::TestCRUDOperations::test_create_table_wrong_primary_key_failed2 |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_crud.py::TestCRUDOperations::test_create_table_wrong_primary_key_failed2 [GOOD] >> TControlPlaneProxyTest::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyTest::ShouldSendControlQuery |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> TDataShardRSTest::TestGenericReadSetDecisionCommit [GOOD] >> TDataShardRSTest::TestGenericReadSetDecisionAbort >> TControlPlaneProxyTest::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyTest::ShouldSendGetResultData >> TControlPlaneProxyTest::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyTest::ShouldSendListJobs >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3-dc] >> test_drain.py::TestHive::test_drain_on_stop [FAIL] >> TControlPlaneProxyTest::ShouldSendListJobs [GOOD] >> TControlPlaneProxyTest::ShouldSendDescribeJob |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_alloc_default.py::TestAlloc::test_hard_limit[kikimr0] [GOOD] >> TSelectFromViewTest::ReadTestCasesFromFiles [GOOD] >> TSelectFromViewTest::QueryCacheIsUpdated >> TControlPlaneProxyTest::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyTest::ShouldSendCreateConnection |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_drain.py::TestHive::test_drain_tablets [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[2] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[3] >> TControlPlaneProxyTest::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendListConnections >> test_cms_erasure.py::TestDegradedGroupBlock42Keep::test_no_degraded_groups_after_shutdown [GOOD] >> TDataShardRSTest::TestGenericReadSetDecisionAbort [GOOD] >> TControlPlaneProxyTest::ShouldSendListConnections [GOOD] >> TControlPlaneProxyTest::ShouldSendDescribeConnection ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_rs/unittest >> TDataShardRSTest::TestGenericReadSetDecisionAbort [GOOD] Test command err: 2025-03-31T16:37:57.759846Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:37:57.760204Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:37:57.760427Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/00089a/r3tmp/tmpzjPLsj/pdisk_1.dat 2025-03-31T16:37:58.332337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:37:58.385652Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:37:58.430426Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:37:58.431435Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:37:58.448462Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:37:58.548850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:37:58.624206Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:684:2579] 2025-03-31T16:37:58.624634Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:37:58.697112Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:37:58.697387Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:37:58.699104Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-31T16:37:58.699209Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-31T16:37:58.699279Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-31T16:37:58.703726Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:37:58.704267Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:686:2581] 2025-03-31T16:37:58.704475Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:37:58.724110Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:37:58.724270Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:720:2579] in generation 1 2025-03-31T16:37:58.725609Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:37:58.725839Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:37:58.736358Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-03-31T16:37:58.736464Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-03-31T16:37:58.736522Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-03-31T16:37:58.736859Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:37:58.737253Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:37:58.737308Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:731:2581] in generation 1 2025-03-31T16:37:58.738875Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:689:2583] 2025-03-31T16:37:58.739110Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:37:58.750365Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:37:58.750740Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:694:2585] 2025-03-31T16:37:58.750956Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T16:37:58.760381Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:37:58.761813Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037891 2025-03-31T16:37:58.761877Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037891 2025-03-31T16:37:58.761921Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037891 2025-03-31T16:37:58.762221Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:37:58.762705Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:37:58.762754Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037891 persisting started state actor id [1:752:2583] in generation 1 2025-03-31T16:37:58.763201Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T16:37:58.763338Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T16:37:58.764681Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-03-31T16:37:58.764743Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-03-31T16:37:58.764785Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-03-31T16:37:58.765053Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T16:37:58.765164Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T16:37:58.765225Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037890 persisting started state actor id [1:753:2585] in generation 1 2025-03-31T16:37:58.779230Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:37:58.816695Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-31T16:37:58.818199Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:37:58.818373Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:758:2621] 2025-03-31T16:37:58.818412Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-31T16:37:58.818445Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-31T16:37:58.818494Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:37:58.818600Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:37:58.818635Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-03-31T16:37:58.818696Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:37:58.818762Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:759:2622] 2025-03-31T16:37:58.818786Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-03-31T16:37:58.818812Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-03-31T16:37:58.818833Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:37:58.820391Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:37:58.820442Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037891 2025-03-31T16:37:58.820492Z node 1 :TX_DATASHARD DEBUG: 72075186224037891 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:37:58.820544Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037891, actorId: [1:760:2623] 2025-03-31T16:37:58.820563Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037891 2025-03-31T16:37:58.820594Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2025-03-31T16:37:58.820624Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-03-31T16:37:58.820987Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-31T16:37:58.821058Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-31T16:37:58.821086Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-03-31T16:37:58.821141Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-03-31T16:37:58.821202Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T16:37:58.821226Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-03-31T16:37:58.821306Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T16:37:58.821347Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:761:2624] 2025-03-31T16:37:58.821369Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-03-31T16:37:58.821390Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-03-31T16:37:58.821406Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-31T16:37:58.821573Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-31T16:37:58.821625Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:37:58.821661Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-31T16:37:58.821704Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:37:58.821768Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-31T16:37:58.821794Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:37:58.821813Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-03-31T16:37:58.821840Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-31T16:37:58.821907Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037891 2025-03-31T16:37:58.821966Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037891 2025-03-31T16:37:58.822074Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:671:2573], serverId# [1:707:2591], sessionId# [0:0:0] 2025-03-31T16:37:58.822128Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:672:2574], serverId# [1:713:2597], sessionId# [0:0:0] 2025-03-31T16:37:58.822184Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-03-31T16:37:58.822269Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:37:58.822300Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037891 TxInFly 0 2025-03-31T16:37:58.822334Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-03-31T16:37:58.822559Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-31T16:37:58.822836Z node 1 :TX_DATASHARD DEB ... lan for [2029:281474976715664] at 72075186224037889 has finished 2025-03-31T16:41:21.583372Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T16:41:21.583397Z node 6 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-03-31T16:41:21.583425Z node 6 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-03-31T16:41:21.583456Z node 6 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-03-31T16:41:21.584278Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2029} 2025-03-31T16:41:21.584697Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 2029} 2025-03-31T16:41:21.584896Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:1015:2805], Recipient [6:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T16:41:21.584930Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T16:41:21.584969Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [6:1012:2802], serverId# [6:1015:2805], sessionId# [0:0:0] 2025-03-31T16:41:21.585174Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [6:757:2635], Recipient [6:666:2570]: {TEvReadSet step# 2029 txid# 281474976715664 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-03-31T16:41:21.585237Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-03-31T16:41:21.585297Z node 6 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037889 dest 72075186224037888 producer 72075186224037889 txId 281474976715664 2025-03-31T16:41:21.585400Z node 6 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 2029 txid# 281474976715664 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-03-31T16:41:21.585554Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-31T16:41:21.585616Z node 6 :TX_DATASHARD TRACE: Complete execution for [2029:281474976715664] at 72075186224037888 on unit CompleteWrite 2025-03-31T16:41:21.585695Z node 6 :TX_DATASHARD DEBUG: Complete write [2029 : 281474976715664] from 72075186224037888 at tablet 72075186224037888 send result to client [6:1000:2754] 2025-03-31T16:41:21.585755Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:41:21.585852Z node 6 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-03-31T16:41:21.585919Z node 6 :TX_DATASHARD DEBUG: Send RS Reply at 72075186224037888 {TEvReadSet step# 2029 txid# 281474976715664 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} ... nodata readset 2025-03-31T16:41:21.586043Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [6:666:2570], Recipient [6:757:2635]: {TEvReadSet step# 2029 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 3} 2025-03-31T16:41:21.586076Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-03-31T16:41:21.586108Z node 6 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715664 2025-03-31T16:41:21.586160Z node 6 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 2029 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 3} 2025-03-31T16:41:21.586362Z node 6 :TX_DATASHARD TRACE: Processed readset without data from 72075186224037888 to 72075186224037889 at tablet 72075186224037889 2025-03-31T16:41:21.586578Z node 6 :KQP_COMPUTE ERROR: SelfId: [6:1000:2754], SessionActorId: [6:942:2754], Got LOCKS BROKEN for table. ShardID=72075186224037888, Sink=[6:1000:2754].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-03-31T16:41:21.586753Z node 6 :KQP_COMPUTE ERROR: SelfId: [6:1000:2754], SessionActorId: [6:942:2754], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/table-1`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[6:942:2754]. isRollback=0 2025-03-31T16:41:21.595998Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=ZTg5YTFlOWEtYTI0ZTk1MGItOWE0M2FlNWMtZmIzMWViNmQ=, ActorId: [6:942:2754], ActorState: ExecuteState, TraceId: 01jqph8f9x6k827x99y65c3wee, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [6:1001:2754] from: [6:1000:2754] 2025-03-31T16:41:21.596216Z node 6 :KQP_EXECUTER ERROR: ActorId: [6:1001:2754] TxId: 281474976715664. Ctx: { TraceId: 01jqph8f9x6k827x99y65c3wee, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=6&id=ZTg5YTFlOWEtYTI0ZTk1MGItOWE0M2FlNWMtZmIzMWViNmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/table-1`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-03-31T16:41:21.597682Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 278003712, Sender [6:1000:2754], Recipient [6:666:2570]: NKikimrDataEvents.TEvWrite TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715662 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 } Op: Rollback } 2025-03-31T16:41:21.597735Z node 6 :TX_DATASHARD TRACE: Handle TTxWrite: at tablet# 72075186224037888 2025-03-31T16:41:21.597967Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-31T16:41:21.598006Z node 6 :TX_DATASHARD TRACE: Complete execution for [2029:281474976715664] at 72075186224037889 on unit CompleteWrite 2025-03-31T16:41:21.598064Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-31T16:41:21.598144Z node 6 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-03-31T16:41:21.598381Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [6:666:2570], Recipient [6:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-03-31T16:41:21.598431Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-03-31T16:41:21.598561Z node 6 :TX_DATASHARD TRACE: TTxWrite:: execute at tablet# 72075186224037888 2025-03-31T16:41:21.598765Z node 6 :TX_DATASHARD TRACE: Parsing write transaction for 0 at 72075186224037888, record: TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715662 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 } Op: Rollback } 2025-03-31T16:41:21.598878Z node 6 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715662, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2025-03-31T16:41:21.598989Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit CheckWrite 2025-03-31T16:41:21.599061Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-03-31T16:41:21.603236Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit CheckWrite 2025-03-31T16:41:21.603337Z node 6 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-31T16:41:21.603393Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-31T16:41:21.603460Z node 6 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2029/281474976715664 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v2001/0 ImmediateWriteEdgeReplied# v2001/0 2025-03-31T16:41:21.603542Z node 6 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037888 2025-03-31T16:41:21.603602Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-03-31T16:41:21.603636Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-31T16:41:21.603662Z node 6 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit ExecuteWrite 2025-03-31T16:41:21.603690Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit ExecuteWrite 2025-03-31T16:41:21.603731Z node 6 :TX_DATASHARD DEBUG: Executing write operation for [0:6] at 72075186224037888 2025-03-31T16:41:21.603871Z node 6 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715662 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 2025-03-31T16:41:21.603929Z node 6 :TX_DATASHARD DEBUG: Skip empty write operation for [0:6] at 72075186224037888 2025-03-31T16:41:21.603997Z node 6 :TX_DATASHARD TRACE: add locks to result: 0 2025-03-31T16:41:21.604070Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-31T16:41:21.604103Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteWrite 2025-03-31T16:41:21.604160Z node 6 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit FinishProposeWrite 2025-03-31T16:41:21.604230Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit FinishProposeWrite 2025-03-31T16:41:21.604265Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is DelayComplete 2025-03-31T16:41:21.604303Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit FinishProposeWrite 2025-03-31T16:41:21.604357Z node 6 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2025-03-31T16:41:21.604401Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2025-03-31T16:41:21.604455Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-03-31T16:41:21.604480Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2025-03-31T16:41:21.604513Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037888 has finished 2025-03-31T16:41:21.604595Z node 6 :TX_DATASHARD TRACE: TTxWrite complete: at tablet# 72075186224037888 2025-03-31T16:41:21.604647Z node 6 :TX_DATASHARD TRACE: Complete execution for [0:6] at 72075186224037888 on unit FinishProposeWrite 2025-03-31T16:41:21.604707Z node 6 :TX_DATASHARD TRACE: Propose transaction complete txid 6 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2025-03-31T16:41:21.604807Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-31T16:41:21.605079Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=ZTg5YTFlOWEtYTI0ZTk1MGItOWE0M2FlNWMtZmIzMWViNmQ=, ActorId: [6:942:2754], ActorState: ExecuteState, TraceId: 01jqph8f9x6k827x99y65c3wee, Create QueryResponse for error on request, msg: 2025-03-31T16:41:21.606091Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [6:61:2108], Recipient [6:666:2570]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715662 LockNode: 6 Status: STATUS_NOT_FOUND |99.1%| [TM] {RESULT} ydb/core/tx/datashard/ut_rs/unittest >> TControlPlaneProxyTest::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendModifyConnection |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_scheduling.py::TestSchedule::test_skip_busy[kikimr0] [SKIPPED] >> test_dc_local.py::TestAlloc::test_dc_locality[kikimr0] >> TControlPlaneProxyTest::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendDeleteConnection >> TControlPlaneProxyTest::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendTestConnection >> test_clickbench.py::TestClickbench::test_clickbench[3] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[4] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_drain.py::TestHive::test_drain_tablets [GOOD] >> test_session_grace_shutdown.py::Test::test_grace_shutdown_of_session >> TControlPlaneProxyTest::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendCreateBinding >> test_crud.py::TestSelect::test_advanced_select_failed[select distinct b, a from (select a, b from t1 union all select b, a from t1 order by b) order by B-Column B is not in source column set.*] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupBlock42Keep::test_no_degraded_groups_after_shutdown [GOOD] >> TControlPlaneProxyTest::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyTest::ShouldSendListBindings >> TSelectFromViewTest::QueryCacheIsUpdated [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> TControlPlaneProxyTest::ShouldSendListBindings [GOOD] >> TControlPlaneProxyTest::ShouldSendDescribeBinding >> TControlPlaneProxyTest::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyTest::ShouldSendModifyBinding >> test_clickbench.py::TestClickbench::test_clickbench[4] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[5] >> TControlPlaneProxyTest::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyTest::ShouldSendDeleteBinding |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_drain.py::TestHive::test_drain_on_stop [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/view/unittest >> TSelectFromViewTest::QueryCacheIsUpdated [GOOD] Test command err: Trying to start YDB, gRPC: 10534, MsgBus: 3883 2025-03-31T16:37:33.983624Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013715486916416:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:37:33.984315Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0007e2/r3tmp/tmpaSgEUm/pdisk_1.dat 2025-03-31T16:37:34.515245Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:37:34.542308Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:37:34.543978Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:37:34.558935Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10534, node 1 2025-03-31T16:37:34.839860Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:37:34.839899Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:37:34.839907Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:37:34.840041Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3883 TClient is connected to server localhost:3883 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:37:35.632468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:37:37.210160Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013732666786261:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:37:37.217985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Trying to start YDB, gRPC: 1591, MsgBus: 2046 2025-03-31T16:37:38.756888Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013740201007374:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:37:38.756937Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0007e2/r3tmp/tmpHKU27y/pdisk_1.dat 2025-03-31T16:37:38.918294Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:37:38.937816Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:37:38.937904Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:37:38.944371Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1591, node 2 2025-03-31T16:37:38.987540Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:37:38.987559Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:37:38.987566Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:37:38.987665Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2046 TClient is connected to server localhost:2046 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:37:39.502588Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:37:39.524393Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:37:42.480230Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488013757380877215:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:37:42.480347Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Trying to start YDB, gRPC: 11825, MsgBus: 62053 2025-03-31T16:37:43.635191Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488013759466681085:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:37:43.644508Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0007e2/r3tmp/tmpCEoFJg/pdisk_1.dat 2025-03-31T16:37:43.902096Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:37:43.914617Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:37:43.923259Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:37:43.932517Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11825, node 3 2025-03-31T16:37:44.051794Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:37:44.051821Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:37:44.051831Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:37:44.051994Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62053 TClient is connected to server localhost:62053 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:37:44.704824Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:37:48.048612Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488013780941518233:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:37:48.048715Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Trying to start YDB, gRPC: 1891, MsgBus: 5951 2025-03-31T16:37:48.965608Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7488013781311709736:2133];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:37:48.970892Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0007e2/r3tmp/tmp87rVr1/pdisk_1.dat 2025-03-31T16:37:49.130418Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:37:49.143329Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:37:49.143428Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:37:49.148503Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1891, node 4 2025-03-31T16:37:49.255803Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty may ... rn (Commit! (Commit! $10 $9) $1 $7)) ) 2025-03-31 16:41:16.290 INFO ydb-core-kqp-ut-view(pid=755793, tid=0x00007F74ABC12640) [KQP] yql_optimize.cpp:135: KqpLogical-ApplyExtractMembersToReadTableRanges 2025-03-31 16:41:16.312 INFO ydb-core-kqp-ut-view(pid=755793, tid=0x00007F74ABC12640) [KQP] yql_optimize.cpp:135: KqpLogical-RewriteAggregate 2025-03-31 16:41:16.331 INFO ydb-core-kqp-ut-view(pid=755793, tid=0x00007F74ABC12640) [KQP] yql_optimize.cpp:135: KqpPhysical-BuildReadTableRangesStage 2025-03-31 16:41:16.344 INFO ydb-core-kqp-ut-view(pid=755793, tid=0x00007F74ABC12640) [KQP] yql_optimize.cpp:135: KqpPhysical-PushAggregateCombineToStage 2025-03-31 16:41:16.367 INFO ydb-core-kqp-ut-view(pid=755793, tid=0x00007F74ABC12640) [KQP] yql_optimize.cpp:135: KqpPhysical-ExpandAggregatePhase 2025-03-31 16:41:16.386 INFO ydb-core-kqp-ut-view(pid=755793, tid=0x00007F74ABC12640) [KQP] yql_optimize.cpp:135: KqpPhysical-ExpandAggregatePhase 2025-03-31 16:41:16.408 INFO ydb-core-kqp-ut-view(pid=755793, tid=0x00007F74ABC12640) [KQP] yql_optimize.cpp:135: KqpPhysical-ExpandAggregatePhase 2025-03-31 16:41:16.424 INFO ydb-core-kqp-ut-view(pid=755793, tid=0x00007F74ABC12640) [KQP] yql_optimize.cpp:135: KqpPhysical-ExpandAggregatePhase 2025-03-31 16:41:16.466 INFO ydb-core-kqp-ut-view(pid=755793, tid=0x00007F74ABC12640) [KQP] yql_optimize.cpp:135: KqpPhysical-BuildShuffleStage 2025-03-31 16:41:16.489 INFO ydb-core-kqp-ut-view(pid=755793, tid=0x00007F74ABC12640) [KQP] yql_optimize.cpp:135: KqpPhysical-BuildSortStage 2025-03-31 16:41:16.530 INFO ydb-core-kqp-ut-view(pid=755793, tid=0x00007F74ABC12640) [KQP] yql_optimize.cpp:135: KqpPhysical-RewriteKqpReadTable 2025-03-31 16:41:17.020 INFO ydb-core-kqp-ut-view(pid=755793, tid=0x00007F74ABC12640) [KQP] yql_optimize.cpp:135: KqpPeepholeFinal-SetCombinerMemoryLimit 2025-03-31 16:41:17.204 INFO ydb-core-kqp-ut-view(pid=755793, tid=0x00007F74AC413640) [KQP] kqp_host.cpp:1382: Compiled query: ( (return (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"/Root/count_episodes")) '('typeId (String '"VIEW"))) (Void) '('('mode 'dropObject)))) ) 2025-03-31 16:41:17.223 INFO ydb-core-kqp-ut-view(pid=755793, tid=0x00007F74AC413640) [KQP] kqp_transform.cpp:33: Optimized expr: ( (let $1 (DataSink '"kikimr" '"db")) (let $2 (KiDropObject! world $1 '"/Root/count_episodes" '"VIEW" '() '0)) (return (Commit! $2 $1 '('('"mode" '"flush")))) ) 2025-03-31 16:41:17.301 INFO ydb-core-kqp-ut-view(pid=755793, tid=0x00007F74AC413640) [KQP] kqp_host.cpp:1382: Compiled query: ( (let $1 (Right! (Read! world (DataSource '"kikimr" '"db") (Key '('table (String '"/Root/view_series"))) (Void) '()))) (let $2 '('('"query_ast" (RemoveSystemMembers (PersistableRepr (SqlProject $1 '((SqlProjectStarItem (TypeOf $1) '"" (lambda '($3) $3) '())))))) '('"query_text" '"SELECT * FROM `/Root/view_series`") '('"security_invoker" (Bool '"true")))) (return (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"/Root/read_from_one_view")) '('typeId (String '"VIEW"))) (Void) '('('mode 'createObject) '('features $2)))) ) 2025-03-31 16:41:17.377 INFO ydb-core-kqp-ut-view(pid=755793, tid=0x00007F74AC413640) [KQP] kqp_transform.cpp:33: Optimized expr: ( (let $1 (DataSink '"kikimr" '"db")) (let $2 '('('"query_ast" (Right! (KiReadTable! world (DataSource '"kikimr" '"db") (Key '('table (String '"/Root/series"))) (Void) '()))) '('"query_text" '"SELECT * FROM `/Root/view_series`") '('"security_invoker" (Bool '"true")))) (let $3 (KiCreateObject! world $1 '"/Root/read_from_one_view" '"VIEW" $2 '0 '0)) (return (Commit! $3 $1 '('('"mode" '"flush")))) ) 2025-03-31 16:41:17.460 INFO ydb-core-kqp-ut-view(pid=755793, tid=0x00007F74ABC12640) [KQP] kqp_host.cpp:1382: Compiled query: ( (let $1 (Read! world (DataSource '"kikimr" '"db") (Key '('table (String '"/Root/read_from_one_view"))) (Void) '())) (let $2 (DataSink 'result)) (let $3 (Right! $1)) (let $4 (Write! (Left! $1) $2 (Key) (RemoveSystemMembers (Sort (PersistableRepr (SqlProject $3 '((SqlProjectStarItem (TypeOf $3) '"" (lambda '($5) $5) '())))) (Bool 'true) (lambda '($6) (PersistableRepr (Member $6 '"series_id"))))) '('('type) '('autoref)))) (return (Commit! $4 $2)) ) 2025-03-31 16:41:17.592 INFO ydb-core-kqp-ut-view(pid=755793, tid=0x00007F74AC413640) [KQP] kqp_transform.cpp:33: Optimized expr: ( (let $1 (DataSink '"kikimr" '"db")) (let $2 (Sort (Right! (KiReadTable! world (DataSource '"kikimr" '"db") (Key '('table (String '"/Root/series"))) (Void) '())) (Bool 'true) (lambda '($7) (Member $7 '"series_id")))) (let $3 '('('"mode" '"flush"))) (let $4 (KiExecDataQuery! world $1 (DataQueryBlocks (TKiDataQueryBlock '('($2 '() '0)) (KiEffects) '('('"db" '"/Root/series" '"Select")) '())) $3 (Void))) (let $5 (DataSink 'result)) (let $6 (ResPull! (Left! $4) $5 (Key) (Nth (Right! $4) '0) '('('type) '('autoref)) '"kikimr")) (return (Commit! (Commit! $6 $5) $1 $3)) ) 2025-03-31 16:41:17.605 INFO ydb-core-kqp-ut-view(pid=755793, tid=0x00007F74AC413640) [KQP] yql_optimize.cpp:135: KqpPhysical-BuildReadTableRangesStage 2025-03-31 16:41:17.621 INFO ydb-core-kqp-ut-view(pid=755793, tid=0x00007F74AC413640) [KQP] yql_optimize.cpp:135: KqpPhysical-BuildSortStage 2025-03-31 16:41:17.639 INFO ydb-core-kqp-ut-view(pid=755793, tid=0x00007F74AC413640) [KQP] yql_optimize.cpp:135: KqpPhysical-RemoveRedundantSortByPk 2025-03-31 16:41:17.648 INFO ydb-core-kqp-ut-view(pid=755793, tid=0x00007F74AC413640) [KQP] yql_optimize.cpp:135: KqpPhysical-RewriteKqpReadTable 2025-03-31 16:41:17.818 INFO ydb-core-kqp-ut-view(pid=755793, tid=0x00007F74ABC12640) [KQP] kqp_host.cpp:1382: Compiled query: ( (return (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"/Root/read_from_one_view")) '('typeId (String '"VIEW"))) (Void) '('('mode 'dropObject)))) ) 2025-03-31 16:41:17.838 INFO ydb-core-kqp-ut-view(pid=755793, tid=0x00007F74ABC12640) [KQP] kqp_transform.cpp:33: Optimized expr: ( (let $1 (DataSink '"kikimr" '"db")) (let $2 (KiDropObject! world $1 '"/Root/read_from_one_view" '"VIEW" '() '0)) (return (Commit! $2 $1 '('('"mode" '"flush")))) ) Trying to start YDB, gRPC: 12521, MsgBus: 8154 2025-03-31T16:41:20.184372Z node 23 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[23:7488014692325273773:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:41:20.192412Z node 23 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0007e2/r3tmp/tmpGbaBOF/pdisk_1.dat 2025-03-31T16:41:20.555464Z node 23 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:41:20.616999Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:41:20.617183Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:41:20.621203Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12521, node 23 2025-03-31T16:41:20.789712Z node 23 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:41:20.789755Z node 23 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:41:20.789776Z node 23 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:41:20.790037Z node 23 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8154 TClient is connected to server localhost:8154 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:41:22.480866Z node 23 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:41:22.499577Z node 23 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:41:25.187292Z node 23 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[23:7488014692325273773:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:41:25.187446Z node 23 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:41:29.093718Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [23:7488014730979980098:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:41:29.093969Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:41:29.181748Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [23:7488014730979980126:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:41:29.181953Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:41:29.182338Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [23:7488014730979980131:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:41:29.191746Z node 23 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T16:41:29.218547Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [23:7488014730979980133:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T16:41:29.315452Z node 23 :TX_PROXY ERROR: Actor# [23:7488014730979980188:2373] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |99.2%| [TM] {RESULT} ydb/core/kqp/ut/view/unittest |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> TControlPlaneProxyTest::ShouldSendDeleteBinding [GOOD] >> test_session_grace_shutdown.py::Test::test_grace_shutdown_of_session [GOOD] >> test_config_with_metadata.py::TestConfigWithoutMetadataMirror::test_cluster_is_operational_without_metadata [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.2%| [TA] $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.2%| [TA] {RESULT} $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} >> test_crud.py::TestSelect::test_advanced_select_failed[select distinct b, a from (select a, b from t1 union all select b, a from t1 order by b) order by B-Column B is not in source column set.*] [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select count(a, b) from t1-Aggregation function Count requires exactly 1 argument] [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select min(a, b) from t1-Aggregation function Min requires exactly 1 argument] [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select min(*) from t1-.*is not allowed here] >> test_crud.py::TestSelect::test_advanced_select_failed[select min(*) from t1-.*is not allowed here] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/control_plane_proxy/ut/unittest >> TControlPlaneProxyTest::ShouldSendDeleteBinding [GOOD] Test command err: 2025-03-31T16:37:45.528367Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateQueryRequest, validation failed: test_user@staff **** (00000000) content { name: "my_query_name" } error:
: Error: No permission yq.queries.create@as in a given scope , code: 1000 2025-03-31T16:37:46.024652Z node 2 :YQ_CONTROL_PLANE_STORAGE ERROR: ListQueriesRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:37:46.841324Z node 3 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:37:47.327820Z node 4 :YQ_CONTROL_PLANE_STORAGE ERROR: GetQueryStatusRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.getStatus@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:37:47.784147Z node 5 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:37:48.296620Z node 6 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:37:48.759912Z node 7 :YQ_CONTROL_PLANE_STORAGE ERROR: ControlQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.control@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:37:49.249208Z node 8 :YQ_CONTROL_PLANE_STORAGE ERROR: GetResultDataRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.getData@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:37:49.710813Z node 9 :YQ_CONTROL_PLANE_STORAGE ERROR: ListJobsRequest, validation failed: test_user@staff **** (00000000) query_id: "my_query_id" error:
: Error: No permission yq.jobs.get@as in a given scope , code: 1000 2025-03-31T16:37:50.213431Z node 10 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeJobRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.jobs.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:37:50.696870Z node 11 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:37:51.174254Z node 12 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) content { setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:37:51.646406Z node 13 :YQ_CONTROL_PLANE_STORAGE ERROR: ListConnectionsRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:37:52.147734Z node 14 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:37:52.654735Z node 15 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:37:53.189344Z node 16 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) content { setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:37:53.750889Z node 17 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:37:54.284809Z node 18 :YQ_CONTROL_PLANE_STORAGE ERROR: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:37:54.881509Z node 19 :YQ_CONTROL_PLANE_STORAGE ERROR: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:37:55.451394Z node 20 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:37:56.062435Z node 21 :YQ_CONTROL_PLANE_STORAGE ERROR: ListBindingsRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:37:56.570700Z node 22 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:37:57.112326Z node 23 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:37:57.652704Z node 24 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:38:31.083387Z node 72 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateQueryRequest, validation failed: test_user@staff **** (00000000) content { name: "my_query_name" } error:
: Error: No permission yq.queries.create@as in a given scope , code: 1000 2025-03-31T16:38:31.785715Z node 73 :YQ_CONTROL_PLANE_STORAGE ERROR: ListQueriesRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:38:32.960986Z node 74 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:38:34.424664Z node 75 :YQ_CONTROL_PLANE_STORAGE ERROR: GetQueryStatusRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.getStatus@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:38:36.068135Z node 76 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:38:37.433043Z node 77 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:38:38.518278Z node 78 :YQ_CONTROL_PLANE_STORAGE ERROR: ControlQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.control@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:38:39.450872Z node 79 :YQ_CONTROL_PLANE_STORAGE ERROR: GetResultDataRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.getData@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:38:40.461386Z node 80 :YQ_CONTROL_PLANE_STORAGE ERROR: ListJobsRequest, validation failed: test_user@staff **** (00000000) query_id: "my_query_id" error:
: Error: No permission yq.jobs.get@as in a given scope , code: 1000 2025-03-31T16:38:41.338464Z node 81 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeJobRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.jobs.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:38:42.241460Z node 82 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:38:43.375901Z node 83 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) content { setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:38:44.424299Z node 84 :YQ_CONTROL_PLANE_STORAGE ERROR: ListConnectionsRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:38:45.293614Z node 85 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:38:46.162728Z node 86 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:38:47.421443Z node 87 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) content { setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:38:48.577422Z node 88 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:38:49.509256Z node 89 :YQ_CONTROL_PLANE_STORAGE ERROR: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:38:50.338347Z node 90 :YQ_CONTROL_PLANE_STORAGE ERROR: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:38:51.218766Z node 91 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.create@as in a given scope yandexcloud://my_folder, code: 1000
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:38:52.100164Z node 92 :YQ_CONTROL_PLANE_STORAGE ERROR: ListBindingsRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:38:53.283070Z node 93 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:38:54.204991Z node 94 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:38:55.064157Z node 95 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:40:10.884536Z node 163 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateQueryRequest, validation failed: test_user_3@staff **** (00000000) content { name: "my_query_name" } error:
: Error: No permission yq.queries.create@as in a given scope , code: 1000 2025-03-31T16:40:14.469766Z node 166 :YQ_CONTROL_PLANE_STORAGE ERROR: GetQueryStatusRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.getStatus@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:40:15.609961Z node 167 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyQueryRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:40:16.902857Z node 168 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteQueryRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:40:17.995989Z node 169 :YQ_CONTROL_PLANE_STORAGE ERROR: ControlQueryRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.control@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:40:19.042354Z node 170 :YQ_CONTROL_PLANE_STORAGE ERROR: GetResultDataRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.getData@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:40:22.562947Z node 173 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:40:25.971432Z node 176 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.connections.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:40:27.368859Z node 177 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteConnectionRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.connections.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:40:28.954925Z node 178 :YQ_CONTROL_PLANE_STORAGE ERROR: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:40:30.320895Z node 179 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateBindingRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.bindings.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:40:33.226083Z node 182 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyBindingRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.bindings.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:40:34.302546Z node 183 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteBindingRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.bindings.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:40:36.841798Z node 185 :YQ_CONTROL_PLANE_STORAGE ERROR: ListQueriesRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:40:38.195291Z node 186 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeQueryRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:40:40.888481Z node 188 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyQueryRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:40:42.066777Z node 189 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteQueryRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:40:43.612554Z node 190 :YQ_CONTROL_PLANE_STORAGE ERROR: ControlQueryRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.control@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:40:46.357197Z node 192 :YQ_CONTROL_PLANE_STORAGE ERROR: ListJobsRequest, validation failed: test_user_4@staff **** (00000000) query_id: "my_query_id" error:
: Error: No permission yq.jobs.get@as in a given scope , code: 1000 2025-03-31T16:40:47.506535Z node 193 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeJobRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.jobs.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:40:48.610000Z node 194 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:40:49.780512Z node 195 :YQ_CONTROL_PLANE_STORAGE ERROR: ListConnectionsRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:40:51.133068Z node 196 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:40:52.262252Z node 197 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:40:53.432288Z node 198 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:40:54.560468Z node 199 :YQ_CONTROL_PLANE_STORAGE ERROR: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:40:55.732793Z node 200 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateBindingRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.create@as in a given scope yandexcloud://my_folder, code: 1000
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:40:57.238253Z node 201 :YQ_CONTROL_PLANE_STORAGE ERROR: ListBindingsRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:40:58.682987Z node 202 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeBindingRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:40:59.819364Z node 203 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyBindingRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-31T16:41:01.268658Z node 204 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteBindingRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.delete@as in a given scope yandexcloud://my_folder, code: 1000 >> test_discovery.py::TestDiscoveryFaultInjectionSlotStop::test_scenario [GOOD] |99.2%| [TM] {RESULT} ydb/core/fq/libs/control_plane_proxy/ut/unittest >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_success >> test_clickbench.py::TestClickbench::test_clickbench[5] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[6] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_insert.py::TestInsertOperations::test_several_inserts_per_transaction_are_success >> test_session_grace_shutdown.py::TestIdle::test_idle_shutdown_of_session |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithoutMetadataMirror::test_cluster_is_operational_without_metadata [GOOD] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[6] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[7] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_success [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_new_column >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_new_column [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_change_column_type >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_change_column_type [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_column >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_column [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_to_key >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_to_key [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_from_key >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_from_key [GOOD] >> test_insert.py::TestInsertOperations::test_several_inserts_per_transaction_are_success [GOOD] >> test_insert.py::TestInsertOperations::test_insert_plus_update_per_transaction_are_success |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_insert.py::TestInsertOperations::test_insert_plus_update_per_transaction_are_success [GOOD] >> test_insert.py::TestInsertOperations::test_update_plus_insert_per_transaction_are_success_prepared_case >> test_dc_local.py::TestAlloc::test_dc_locality[kikimr0] [GOOD] >> test_insert.py::TestInsertOperations::test_update_plus_insert_per_transaction_are_success_prepared_case [GOOD] >> test_insert.py::TestInsertOperations::test_upsert_plus_insert_per_transaction_are_success_prepared_case >> test_insert.py::TestInsertOperations::test_upsert_plus_insert_per_transaction_are_success_prepared_case [GOOD] >> test_insert.py::TestInsertOperations::test_insert_plus_upsert_are_success |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_insert.py::TestInsertOperations::test_insert_plus_upsert_are_success [GOOD] >> test_insert.py::TestInsertOperations::test_insert_revert_basis |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_insert.py::TestInsertOperations::test_insert_revert_basis [GOOD] >> test_insert.py::TestInsertOperations::test_query_pairs >> test_result_limits.py::TestResultLimits::test_many_rows >> test_clickbench.py::TestClickbench::test_clickbench[7] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[8] >> test_discovery.py::TestMirror3DCDiscovery::test_mirror3dc_discovery_logic |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[block-4-2] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_crud.py::TestClientTimeouts::test_can_set_timeouts_on_query >> test_clickbench.py::TestClickbench::test_clickbench[8] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[9] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> AnalyzeColumnshard::AnalyzeRebootColumnShard [FAIL] >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet >> test_clickbench.py::TestClickbench::test_clickbench[9] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[10] >> test_restarts.py::TestRestartSingleMirror3DC::test_restart_single_node_is_ok >> test_restarts.py::TestRestartMultipleMirror34::test_tablets_are_successfully_started_after_few_killed_nodes >> test_crud.py::TestClientTimeouts::test_can_set_timeouts_on_query [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_from_key [GOOD] >> test_restarts.py::TestRestartSingleBlock42::test_restart_single_node_is_ok >> test_clickbench.py::TestClickbench::test_clickbench[10] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[11] >> KafkaProtocol::NativeKafkaBalanceScenario [GOOD] >> KafkaProtocol::InitProducerId_withoutTransactionalIdShouldReturnRandomInt >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3-dc] [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_write_cycles_g0 >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet [GOOD] >> test_insert.py::TestInsertOperations::test_query_pairs [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[11] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[12] >> KafkaProtocol::InitProducerId_withoutTransactionalIdShouldReturnRandomInt [GOOD] >> KafkaProtocol::InitProducerId_forNewTransactionalIdShouldReturnRandomInt >> test_clickbench.py::TestClickbench::test_clickbench[12] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[13] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_write_cycles_g0 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_aborted_reads_g1a >> test_session_grace_shutdown.py::TestIdle::test_idle_shutdown_of_session [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_aborted_reads_g1a [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_intermediate_reads_g1b >> test_isolation.py::TestTransactionIsolation::test_prevents_intermediate_reads_g1b [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_circular_information_flow_g1c >> test_isolation.py::TestTransactionIsolation::test_prevents_circular_information_flow_g1c [GOOD] >> test_isolation.py::TestTransactionIsolation::test_isolation_mailing_list_example ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootColumnShard [FAIL] 2025-03-31 16:42:22,208 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-31 16:42:22,328 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 617338 46.2M 44.6M 23.1M test_tool run_ut @/home/runner/.ya/build/build_root/0l5c/000e83/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/test_tool.ar 850233 882M 887M 292M └─ llvm-symbolizer --demangle --obj /home/runner/.ya/build/build_root/0l5c/000e83/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut Test command err: 2025-03-31T16:32:26.338660Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T16:32:26.339014Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T16:32:26.339172Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000e83/r3tmp/tmpi9vmRd/pdisk_1.dat 2025-03-31T16:32:26.715337Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3979, node 1 2025-03-31T16:32:26.979068Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:32:26.980688Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:32:26.980746Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:32:26.981363Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:32:26.984118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T16:32:27.077346Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:27.077472Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:27.096829Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1028 2025-03-31T16:32:27.647379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T16:32:30.983150Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T16:32:31.024534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:31.024661Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:31.065224Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T16:32:31.067832Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:32:31.311792Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:32:31.312356Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:32:31.313030Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:32:31.313162Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:32:31.313414Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:32:31.313539Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:32:31.313636Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:32:31.313745Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:32:31.313832Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T16:32:31.480469Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:32:31.480549Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:32:31.493424Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:32:31.636419Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:32:31.683482Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T16:32:31.683636Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T16:32:31.711341Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T16:32:31.711783Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T16:32:31.711985Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T16:32:31.712043Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T16:32:31.712086Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T16:32:31.712128Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T16:32:31.712185Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T16:32:31.712232Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T16:32:31.713758Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T16:32:31.741804Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:32:31.741908Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1870:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T16:32:31.749493Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1881:2607] 2025-03-31T16:32:31.753127Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2618] 2025-03-31T16:32:31.753242Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1904:2618], schemeshard id = 72075186224037897 2025-03-31T16:32:31.762507Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T16:32:31.780094Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T16:32:31.780141Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T16:32:31.780197Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T16:32:31.794118Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T16:32:31.838362Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T16:32:31.838551Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T16:32:32.029338Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T16:32:32.190266Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T16:32:32.327359Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T16:32:33.255506Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2238:3072], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:33.255672Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:32:33.275572Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-31T16:32:33.399201Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T16:32:33.399416Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T16:32:33.399701Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T16:32:33.399876Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T16:32:33.400023Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T16:32:33.400182Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T16:32:33.400299Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T16:32:33.400419Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T16:32:33.400560Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T16:32:33.400720Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T16:32:33.400852Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T16:32:33.400945Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T16:32:33.427008Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T16:32:33.427132Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;descri ... extTraversal 2025-03-31T16:41:43.086136Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T16:41:43.086173Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-31T16:41:44.328464Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T16:41:44.328542Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T16:41:44.328593Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-31T16:41:45.527250Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T16:41:45.527490Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:41:45.538415Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:41:45.538494Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T16:41:45.538531Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-31T16:41:46.744954Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T16:41:46.745035Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T16:41:46.745071Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-31T16:41:47.978979Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:41:47.979079Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T16:41:47.979134Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-31T16:41:49.146712Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:41:49.157646Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T16:41:49.157721Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T16:41:49.157756Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-31T16:41:50.516636Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:41:50.516704Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T16:41:50.516732Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-31T16:41:51.723591Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T16:41:51.723751Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:41:51.734787Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T16:41:51.734868Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T16:41:51.734909Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-31T16:41:52.927877Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:41:52.927955Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T16:41:52.927993Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-31T16:41:54.087555Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T16:41:54.087625Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T16:41:54.087657Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-31T16:41:55.285486Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:41:55.296412Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:41:55.296508Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T16:41:55.296547Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-31T16:41:56.670226Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T16:41:56.670303Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T16:41:56.670334Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-31T16:41:58.053094Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T16:41:58.053303Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:41:58.064774Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:41:58.064851Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T16:41:58.064891Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-31T16:41:59.377142Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T16:41:59.377221Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T16:41:59.377261Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-31T16:42:00.667953Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:42:00.668039Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T16:42:00.668081Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-31T16:42:01.931215Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T16:42:01.942262Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T16:42:01.942345Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T16:42:01.942383Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-31T16:42:03.357320Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:42:03.357400Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T16:42:03.357435Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-31T16:42:04.642537Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T16:42:04.642792Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T16:42:04.653806Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T16:42:04.653885Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T16:42:04.653922Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-31T16:42:05.865207Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T16:42:05.865306Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T16:42:05.865339Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. (TWithBackTrace) ydb/library/actors/testlib/test_runtime.h:579: Exception occured while waiting for NKikimr::NStat::TEvStatistics::TEvAnalyzeResponse: (NActors::TSchedulingLimitReachedException) TestActorRuntime Processed over 100000 events.ydb/library/actors/testlib/test_runtime.cpp:716: TBackTrace::Capture()+28 (0x18C0A59C) TWithBackTrace::TWithBackTrace<>()+80 (0x18832490) NKikimr::NStat::TEvStatistics::TEvAnalyzeResponse::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration)+485 (0x18806B15) NKikimr::NStat::NTestSuiteAnalyzeColumnshard::TTestCaseAnalyzeRebootColumnShard::Execute_(NUnitTest::TTestContext&)+4263 (0x188236B7) std::__y1::__function::__func, void ()>::operator()()+280 (0x1882E548) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x190F5E06) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x190CEA19) NKikimr::NStat::NTestSuiteAnalyzeColumnshard::TCurrentTest::Execute()+1204 (0x1882D714) NUnitTest::TTestFactory::Execute()+2438 (0x190D02E6) NUnitTest::RunMain(int, char**)+5213 (0x190F037D) ??+0 (0x7FB1AE502D90) __libc_start_main+128 (0x7FB1AE502E40) _start+41 (0x1618F029) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8330113388/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/0l5c/000e83/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8330113388/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/0l5c/000e83/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) >> test_discovery.py::TestMirror3DCDiscovery::test_mirror3dc_discovery_logic [GOOD] >> test_isolation.py::TestTransactionIsolation::test_isolation_mailing_list_example [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_observed_transaction_vanishes_otv |99.3%| [TA] $(B)/ydb/core/statistics/aggregator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.3%| [TA] {RESULT} $(B)/ydb/core/statistics/aggregator/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_clickbench.py::TestClickbench::test_clickbench[13] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[14] >> test_isolation.py::TestTransactionIsolation::test_prevents_observed_transaction_vanishes_otv [GOOD] >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp [GOOD] >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp_for_write_predicates >> test_restarts.py::TestRestartClusterBlock42::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp_for_write_predicates [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_p4 >> test_isolation.py::TestTransactionIsolation::test_lost_update_p4 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_p4 >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_p4 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_with_upsert_p4 >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true] [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_with_upsert_p4 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single >> test_clickbench.py::TestClickbench::test_clickbench[14] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[15] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3-dc] [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_session_grace_shutdown.py::TestIdle::test_idle_shutdown_of_session [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_predicate_deps >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_predicate_deps [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_write_predicate >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_write_predicate [GOOD] >> test_isolation.py::TestTransactionIsolation::test_write_skew_g2_item >> test_clickbench.py::TestClickbench::test_clickbench[15] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[16] >> test_isolation.py::TestTransactionIsolation::test_write_skew_g2_item [GOOD] >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2 >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2_two_edges >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2_two_edges [GOOD] >> test_restarts.py::TestRestartClusterMirror34::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok >> test_crud.py::TestManySelectsInRow::test_selects_in_row_success[500-500-50] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_insert.py::TestInsertOperations::test_query_pairs [GOOD] >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v2-client0] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[16] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[17] >> KafkaProtocol::InitProducerId_forNewTransactionalIdShouldReturnRandomInt [GOOD] >> KafkaProtocol::InitProducerId_forSqlInjectionShouldReturnWithoutDropingDatabase >> test_clickbench.py::TestClickbench::test_clickbench[17] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[18] >> test_indexes.py::TestSecondaryIndexes::test_create_table_with_global_index >> test_restarts.py::TestRestartClusterMirror3DC::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_discovery.py::TestMirror3DCDiscovery::test_mirror3dc_discovery_logic [GOOD] >> test_result_limits.py::TestResultLimits::test_many_rows [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[18] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[19] >> test_session_pool.py::TestSessionPool::test_session_pool_simple_acquire >> test_indexes.py::TestSecondaryIndexes::test_create_table_with_global_index [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[19] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[20] >> test_clickbench.py::TestClickbench::test_clickbench[20] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[21] >> test_session_pool.py::TestSessionPool::test_session_pool_simple_acquire [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_1 >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_1 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_2 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_keep_alive >> test_session_pool.py::TestSessionPool::test_session_pool_keep_alive [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_3 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_4 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_release_logic [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_1 [GOOD] >> test_session_pool.py::TestSessionPool::test_no_cluster_endpoints_no_failure |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/restarts/py3test >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v2-client0] [GOOD] |99.3%| [TM] {RESULT} ydb/tests/fq/restarts/py3test >> test_clickbench.py::TestClickbench::test_clickbench[21] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[22] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_indexes.py::TestSecondaryIndexes::test_create_table_with_global_index [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2_two_edges [GOOD] >> KafkaProtocol::InitProducerId_forSqlInjectionShouldReturnWithoutDropingDatabase [GOOD] >> KafkaProtocol::InitProducerId_forPreviouslySeenTransactionalIdShouldReturnSameProducerIdAndIncrementEpoch |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true] [GOOD] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[block-4-2] [GOOD] >> test_restarts.py::TestRestartMultipleBlock42::test_tablets_are_successfully_started_after_few_killed_nodes >> test_public_api.py::TestExplain::test_explain_data_query >> test_restarts.py::TestRestartMultipleMirror3DC::test_tablets_are_successfully_started_after_few_killed_nodes >> test_clickbench.py::TestClickbench::test_clickbench[22] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[23] >> test_public_api.py::TestExplain::test_explain_data_query [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[23] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[24] >> test_clickbench.py::TestClickbench::test_clickbench[24] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[25] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[block-4-2] [GOOD] >> KafkaProtocol::InitProducerId_forPreviouslySeenTransactionalIdShouldReturnSameProducerIdAndIncrementEpoch [GOOD] >> KafkaProtocol::InitProducerId_forPreviouslySeenTransactionalIdShouldReturnNewProducerIdIfEpochOverflown >> test_clickbench.py::TestClickbench::test_clickbench[25] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[26] >> test_clickbench.py::TestClickbench::test_clickbench[26] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[27] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> KqpService::PatternCache [GOOD] >> KqpService::RangeCache+UseCache >> test_session_pool.py::TestSessionPool::test_no_cluster_endpoints_no_failure [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_2 >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_2 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_min_size_feature >> test_session_pool.py::TestSessionPool::test_session_pool_min_size_feature [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[27] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[28] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_clickbench.py::TestClickbench::test_clickbench[28] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[29] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_public_api.py::TestCRUDOperations::test_prepared_query_pipeline >> KafkaProtocol::InitProducerId_forPreviouslySeenTransactionalIdShouldReturnNewProducerIdIfEpochOverflown [GOOD] >> TMetadataActorTests::TopicMetadataGoodAndBad |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_public_api.py::TestCRUDOperations::test_prepared_query_pipeline [GOOD] >> test_public_api.py::TestCRUDOperations::test_scheme_client_ops >> test_public_api.py::TestCRUDOperations::test_scheme_client_ops [GOOD] >> test_public_api.py::TestCRUDOperations::test_scheme_operation_errors_handle >> test_public_api.py::TestCRUDOperations::test_scheme_operation_errors_handle [GOOD] >> test_public_api.py::TestCRUDOperations::test_none_values >> test_public_api.py::TestCRUDOperations::test_none_values [GOOD] >> test_public_api.py::TestCRUDOperations::test_parse_list_type >> test_public_api.py::TestCRUDOperations::test_parse_list_type [GOOD] >> test_public_api.py::TestCRUDOperations::test_parse_tuple >> test_public_api.py::TestCRUDOperations::test_parse_tuple [GOOD] >> test_public_api.py::TestCRUDOperations::test_dict_type >> test_public_api.py::TestCRUDOperations::test_dict_type [GOOD] >> test_public_api.py::TestCRUDOperations::test_struct_type >> test_public_api.py::TestCRUDOperations::test_struct_type [GOOD] >> test_public_api.py::TestCRUDOperations::test_data_types >> test_public_api.py::TestCRUDOperations::test_data_types [GOOD] >> test_public_api.py::TestCRUDOperations::test_struct_type_parameter >> test_public_api.py::TestCRUDOperations::test_struct_type_parameter [GOOD] >> test_public_api.py::TestCRUDOperations::test_bulk_prepared_insert_many_values >> test_public_api.py::TestCRUDOperations::test_bulk_prepared_insert_many_values [GOOD] >> test_public_api.py::TestCRUDOperations::test_bulk_upsert |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_session_pool.py::TestSessionPool::test_session_pool_min_size_feature [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> KqpService::RangeCache+UseCache [GOOD] >> test_public_api.py::TestCRUDOperations::test_bulk_upsert [GOOD] >> test_public_api.py::TestCRUDOperations::test_all_enums_are_presented_as_exceptions [GOOD] >> test_public_api.py::TestCRUDOperations::test_type_builders_str_methods [GOOD] >> test_public_api.py::TestCRUDOperations::test_create_and_delete_session_then_use_it_again [GOOD] >> test_public_api.py::TestCRUDOperations::test_locks_invalidated_error >> test_public_api.py::TestCRUDOperations::test_locks_invalidated_error [GOOD] >> test_public_api.py::TestCRUDOperations::test_tcl [GOOD] >> test_public_api.py::TestCRUDOperations::test_tcl_2 >> test_public_api.py::TestCRUDOperations::test_tcl_2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_tcl_3 >> test_public_api.py::TestCRUDOperations::test_tcl_3 [GOOD] >> test_public_api.py::TestCRUDOperations::test_reuse_session_to_tx_leak ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpService::RangeCache+UseCache [GOOD] Test command err: Trying to start YDB, gRPC: 28037, MsgBus: 11419 2025-03-31T16:34:40.568592Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488012973928108842:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:34:40.569854Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0014b5/r3tmp/tmpXhfslT/pdisk_1.dat 2025-03-31T16:34:40.961779Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28037, node 1 2025-03-31T16:34:41.008965Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:34:41.009105Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:34:41.012846Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T16:34:41.035006Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:34:41.035046Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:34:41.035062Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:34:41.035207Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11419 TClient is connected to server localhost:11419 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:34:41.634216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:41.649035Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T16:34:41.665839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:41.876597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:42.046997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:42.116200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:34:43.748110Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012986813012493:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:43.748248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:44.033248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T16:34:44.070389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T16:34:44.110505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:34:44.143159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:34:44.174203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T16:34:44.216186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T16:34:44.306307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012991107980308:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:44.306371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:44.306588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488012991107980313:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:34:44.310400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T16:34:44.322582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488012991107980315:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T16:34:44.396603Z node 1 :TX_PROXY ERROR: Actor# [1:7488012991107980369:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:34:45.337203Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=MzQ0ZmM4ZGMtYjFlNTY5OTYtNjE4MGQyYzgtYWUwYzYzODY=, ActorId: [1:7488012986813012475:2404], ActorState: ReadyState, Session closed due to explicit close event 2025-03-31T16:34:45.337288Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=MzQ0ZmM4ZGMtYjFlNTY5OTYtNjE4MGQyYzgtYWUwYzYzODY=, ActorId: [1:7488012986813012475:2404], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-31T16:34:45.337326Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MzQ0ZmM4ZGMtYjFlNTY5OTYtNjE4MGQyYzgtYWUwYzYzODY=, ActorId: [1:7488012986813012475:2404], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-03-31T16:34:45.337363Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MzQ0ZmM4ZGMtYjFlNTY5OTYtNjE4MGQyYzgtYWUwYzYzODY=, ActorId: [1:7488012986813012475:2404], ActorState: unknown state, Cleanup temp tables: 0 2025-03-31T16:34:45.337474Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MzQ0ZmM4ZGMtYjFlNTY5OTYtNjE4MGQyYzgtYWUwYzYzODY=, ActorId: [1:7488012986813012475:2404], ActorState: unknown state, Session actor destroyed 2025-03-31T16:34:45.346192Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NmUzZDJmMC0xYjQ2YzZlMC00ZWViYjlhNC01NWZmMmNkNw==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NmUzZDJmMC0xYjQ2YzZlMC00ZWViYjlhNC01NWZmMmNkNw== 2025-03-31T16:34:45.346438Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NmUzZDJmMC0xYjQ2YzZlMC00ZWViYjlhNC01NWZmMmNkNw==, ActorId: [1:7488012995402947957:2496], ActorState: unknown state, session actor bootstrapped 2025-03-31T16:34:45.362799Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NWI1YWRlODgtMzc0Yjg0NDUtNTQ4Mzc3MjctZjM1MDA2Nzg=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NWI1YWRlODgtMzc0Yjg0NDUtNTQ4Mzc3MjctZjM1MDA2Nzg= 2025-03-31T16:34:45.362952Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NWI1YWRlODgtMzc0Yjg0NDUtNTQ4Mzc3MjctZjM1MDA2Nzg=, ActorId: [1:7488012995402947959:2498], ActorState: unknown state, session actor bootstrapped 2025-03-31T16:34:45.381955Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZWYxYTI2NTQtNjUzYWI0ZmMtODNlNThiY2YtM2JlYzA3NmE=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZWYxYTI2NTQtNjUzYWI0ZmMtODNlNThiY2YtM2JlYzA3NmE= 2025-03-31T16:34:45.382192Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZWYxYTI2NTQtNjUzYWI0ZmMtODNlNThiY2YtM2JlYzA3NmE=, ActorId: [1:7488012995402947961:2500], ActorState: unknown state, session actor bootstrapped 2025-03-31T16:34:45.391540Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=M2M4MzcyMWItNzJlOGFlOWEtOTE2YWEyOWYtNTljZTBjNQ==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id M2M4MzcyMWItNzJlOGFlOWEtOTE2YWEyOWYtNTljZTBjNQ== 2025-03-31T16:34:45.392159Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=M2M4MzcyMWItNzJlOGFlOWEtOTE2YWEyOWYtNTljZTBjNQ==, ActorId: [1:7488012995402947963:2502], ActorState: unknown state, session actor bootstrapped 2025-03-31T16:34:45.402586Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NmE5OTYwMTEtOWYwMjQ3Zi0zOWM3NGNlZi1lM2M1NzBiOA==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NmE5OTYwMTEtOWYwMjQ3Zi0zOWM3NGNlZi1lM2M1NzBiOA== 2025-03-31T16:34:45.402785Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NmE5OTYwMTEtOWYwMjQ3Zi0zOWM3NGNlZi1lM2M1NzBiOA==, ActorId: [1:7488012995402947965:2504], ActorState: unknown state, session actor bootstrapped 2025-03-31T16:34:45.416244Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OWM3NTQ0OTctODdlOTQ2NTItZTcyMTI0YTctMzIwNGFiOGU=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id OWM3NTQ0OTctODdlOTQ2NTItZTcyMTI0YTctMzIwNGFiOGU= 2025-03-31T16:34:45.416460Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OWM3NTQ0OTctODdlOTQ2NTItZTcyMTI0YTctMzIwNGFiOGU=, ActorId: [1:74880129954029 ... cePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:41:02.934147Z node 7 :TX_PROXY ERROR: Actor# [7:7488014615051918332:2429] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:41:02.945673Z node 7 :TX_PROXY ERROR: Actor# [7:7488014615051918344:2438] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:41:02.960689Z node 7 :TX_PROXY ERROR: Actor# [7:7488014615051918362:2446] txid# 281474976710672, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:41:02.960934Z node 7 :TX_PROXY ERROR: Actor# [7:7488014615051918363:2447] txid# 281474976710673, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:41:02.962540Z node 7 :TX_PROXY ERROR: Actor# [7:7488014615051918371:2455] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:41:02.963877Z node 7 :TX_PROXY ERROR: Actor# [7:7488014615051918342:2436] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:41:02.971733Z node 7 :TX_PROXY ERROR: Actor# [7:7488014615051918387:2465] txid# 281474976710675, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:41:02.971983Z node 7 :TX_PROXY ERROR: Actor# [7:7488014615051918388:2466] txid# 281474976710676, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:41:02.976046Z node 7 :TX_PROXY ERROR: Actor# [7:7488014615051918406:2479] txid# 281474976710677, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:41:11.445731Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:41:11.445761Z node 7 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 13377, MsgBus: 13932 2025-03-31T16:43:30.070685Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7488015250621644039:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:43:30.070785Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/0014b5/r3tmp/tmpWGzwvD/pdisk_1.dat 2025-03-31T16:43:30.245164Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:43:30.309544Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:43:30.309691Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:43:30.312259Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13377, node 8 2025-03-31T16:43:30.400032Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T16:43:30.400065Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T16:43:30.400078Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T16:43:30.400325Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13932 TClient is connected to server localhost:13932 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:43:31.173529Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:43:31.188228Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T16:43:31.201147Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:43:31.322097Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:43:31.684661Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:43:31.797640Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:43:35.071053Z node 8 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7488015250621644039:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:43:35.071208Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T16:43:35.660670Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7488015272096482298:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:43:35.660832Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:43:35.744245Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T16:43:35.793685Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T16:43:35.841263Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-31T16:43:35.886188Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-31T16:43:35.929287Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-31T16:43:35.983387Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-31T16:43:36.058339Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7488015276391450107:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:43:36.058487Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:43:36.058557Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7488015276391450112:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:43:36.063737Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-31T16:43:36.082892Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7488015276391450114:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-31T16:43:36.139286Z node 8 :TX_PROXY ERROR: Actor# [8:7488015276391450171:3460] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:43:45.203482Z node 8 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:43:45.203535Z node 8 :IMPORT WARN: Table profiles were not loaded took: 12.185641s took: 12.190055s took: 12.190727s took: 12.197088s took: 12.199246s took: 12.198405s took: 12.199907s took: 12.199043s took: 12.200424s took: 12.202063s >> test_read_table.py::TestReadTableSuccessStories::test_read_table_only_specified_ranges |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_public_api.py::TestCRUDOperations::test_reuse_session_to_tx_leak [GOOD] >> test_public_api.py::TestCRUDOperations::test_direct_leak_tx_but_no_actual_leak_by_best_efforts |99.4%| [TA] $(B)/ydb/core/kqp/ut/service/test-results/unittest/{meta.json ... results_accumulator.log} >> test_public_api.py::TestCRUDOperations::test_direct_leak_tx_but_no_actual_leak_by_best_efforts [GOOD] >> test_cms_erasure.py::TestDegradedGroupBlock42Max::test_no_degraded_groups_after_shutdown >> test_public_api.py::TestCRUDOperations::test_presented_in_cache [GOOD] >> test_public_api.py::TestCRUDOperations::test_decimal_values_negative_stories >> test_public_api.py::TestCRUDOperations::test_decimal_values_negative_stories [GOOD] >> test_public_api.py::TestCRUDOperations::test_decimal_values >> test_public_api.py::TestCRUDOperations::test_decimal_values [GOOD] >> TMetadataActorTests::TopicMetadataGoodAndBad [GOOD] >> test_public_api.py::TestCRUDOperations::test_list_directory_with_children >> PublishKafkaEndpoints::HaveEndpointInLookup >> test_public_api.py::TestCRUDOperations::test_list_directory_with_children [GOOD] >> test_public_api.py::TestCRUDOperations::test_validate_describe_path_result >> test_public_api.py::TestCRUDOperations::test_validate_describe_path_result [GOOD] >> test_public_api.py::TestCRUDOperations::test_acl_modifications_1 [GOOD] >> test_public_api.py::TestCRUDOperations::test_acl_modification_2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_can_execute_valid_statement_after_invalid_success >> test_public_api.py::TestCRUDOperations::test_can_execute_valid_statement_after_invalid_success [GOOD] >> test_public_api.py::TestCRUDOperations::test_modify_permissions_3 [GOOD] >> test_public_api.py::TestCRUDOperations::test_directory_that_doesnt_exists [GOOD] >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorKeep::test_restart_as_much_as_can [GOOD] >> test_public_api.py::TestCRUDOperations::test_crud_acl_actions >> test_public_api.py::TestCRUDOperations::test_crud_acl_actions [GOOD] >> test_public_api.py::TestCRUDOperations::test_too_many_pending_transactions >> test_public_api.py::TestCRUDOperations::test_too_many_pending_transactions [GOOD] >> test_public_api.py::TestCRUDOperations::test_query_set1 |99.4%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/service/test-results/unittest/{meta.json ... results_accumulator.log} >> test_read_table.py::TestReadTableSuccessStories::test_read_table_only_specified_ranges [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_constructed_key_range >> test_public_api.py::TestCRUDOperations::test_query_set1 [GOOD] >> test_public_api.py::TestCRUDOperations::test_queries_set2 >> test_clickbench.py::TestClickbench::test_clickbench[29] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[30] >> test_public_api.py::TestCRUDOperations::test_queries_set2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_when_result_set_is_large_then_issue_occure >> test_read_table.py::TestReadTableSuccessStories::test_read_table_constructed_key_range [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_reads_only_specified_columns >> test_read_table.py::TestReadTableSuccessStories::test_read_table_reads_only_specified_columns [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_without_data_has_snapshot >> test_read_table.py::TestReadTableSuccessStories::test_read_table_without_data_has_snapshot [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[1] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[2] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorKeep::test_restart_as_much_as_can [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[30] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[31] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_clickbench.py::TestClickbench::test_clickbench[31] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[32] >> PublishKafkaEndpoints::HaveEndpointInLookup [GOOD] >> PublishKafkaEndpoints::MetadataActorGetsEndpoint |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_clickbench.py::TestClickbench::test_clickbench[32] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[33] >> test_public_api.py::TestCRUDOperations::test_when_result_set_is_large_then_issue_occure [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[33] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[34] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_alter_compression.py::TestAlterCompression::test[alter_compression] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[34] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[35] >> test_restarts.py::TestRestartSingleBlock42::test_restart_single_node_is_ok [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[2] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[3] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--false] >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[async_read_table] >> test_clickbench.py::TestClickbench::test_clickbench[35] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[36] >> test_clickbench.py::TestClickbench::test_clickbench[36] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[37] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartSingleBlock42::test_restart_single_node_is_ok [GOOD] >> PublishKafkaEndpoints::MetadataActorGetsEndpoint [GOOD] >> PublishKafkaEndpoints::DiscoveryResponsesWithNoNode >> test_clickbench.py::TestClickbench::test_clickbench[37] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[38] >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--false] >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[async_read_table] [GOOD] >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[read_table] >> test_clickbench.py::TestClickbench::test_clickbench[38] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[39] >> test_alter_tiering.py::TestAlterTiering::test[many_tables] >> test_public_api.py::TestSessionNotFound::test_session_not_found >> test_clickbench.py::TestClickbench::test_clickbench[39] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[40] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true] >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[read_table] [GOOD] >> test_restarts.py::TestRestartSingleMirror3DC::test_restart_single_node_is_ok [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[40] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[41] >> test_restarts.py::TestRestartClusterBlock42::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] >> PublishKafkaEndpoints::DiscoveryResponsesWithNoNode [GOOD] >> PublishKafkaEndpoints::DiscoveryResponsesWithError >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--true] >> test_clickbench.py::TestClickbench::test_clickbench[41] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[42] >> test_restarts.py::TestRestartClusterMirror34::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[3] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[4] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartClusterBlock42::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[42] [GOOD] >> test_restarts.py::TestRestartMultipleMirror34::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/tpc/medium/py3test >> test_clickbench.py::TestClickbench::test_clickbench[42] [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartSingleMirror3DC::test_restart_single_node_is_ok [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartMultipleMirror34::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartClusterMirror34::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsBlockMax::test_restart_as_much_as_can |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_config_with_metadata.py::TestKiKiMRWithoutMetadata::test_cluster_is_operational_without_metadata >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[async_read_table] >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorMax::test_restart_as_much_as_can >> test_cms_erasure.py::TestDegradedGroupMirror3dcMax::test_no_degraded_groups_after_shutdown >> PublishKafkaEndpoints::DiscoveryResponsesWithError [GOOD] >> PublishKafkaEndpoints::DiscoveryResponsesWithOtherPort |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[async_read_table] [GOOD] >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[read_table] >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[read_table] [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_public_api.py::TestSessionNotFound::test_session_not_found [GOOD] >> test_cms_restart.py::TestCmsStateStorageRestartsBlockKeep::test_restart_as_much_as_can >> test_crud.py::TestManySelectsInRow::test_selects_in_row_success[500-500-50] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[4] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[5] >> test_restarts.py::TestRestartClusterMirror3DC::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] >> test_config_with_metadata.py::TestKiKiMRWithoutMetadata::test_cluster_is_operational_without_metadata [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_session_pool >> PublishKafkaEndpoints::DiscoveryResponsesWithOtherPort [GOOD] >> PublishKafkaEndpoints::MetadataActorDoubleTopic >> test_restarts.py::TestRestartMultipleBlock42::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[read_table] [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartClusterMirror3DC::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_session_pool [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_ok_keep_alive_example [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_can_commit_bad_tx >> test_public_api.py::TestSessionNotFoundOperations::test_can_commit_bad_tx [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_cannot_commit_bad_tx [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_commit_successfully_after_success_commit >> test_public_api.py::TestSessionNotFoundOperations::test_commit_successfully_after_success_commit [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_invalid_keep_alive_example [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_describe_table_with_bounds >> test_public_api.py::TestSessionNotFoundOperations::test_describe_table_with_bounds [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_native_datetime_types >> test_public_api.py::TestSessionNotFoundOperations::test_native_datetime_types [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_native_date_types [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_keep_in_cache_disabled [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_explicit_partitions_case_1 >> test_public_api.py::TestSessionNotFoundOperations::test_explicit_partitions_case_1 [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_explict_partitions_case_2 >> test_public_api.py::TestSessionNotFoundOperations::test_explict_partitions_case_2 [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_simple_table_profile_settings [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestKiKiMRWithoutMetadata::test_cluster_is_operational_without_metadata [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_crud.py::TestManySelectsInRow::test_selects_in_row_success[500-500-50] [GOOD] >> test_result_limits.py::TestResultLimits::test_large_row |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartMultipleBlock42::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] >> test_cms_erasure.py::TestDegradedGroupMirror3dcKeep::test_no_degraded_groups_after_shutdown >> test_cms_state_storage.py::TestCmsStateStorageSimpleKeep::test_check_shutdown_state_storage_nodes |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TA] $(B)/ydb/tests/functional/blobstorage/test-results/py3test/{meta.json ... results_accumulator.log} |99.6%| [TA] {RESULT} $(B)/ydb/tests/functional/blobstorage/test-results/py3test/{meta.json ... results_accumulator.log} |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> TSentinelUnstableTests::BSControllerCantChangeStatus [FAIL] >> test_generate_dynamic_config.py::TestGenerateDynamicConfig::test_generate_dynamic_config >> PublishKafkaEndpoints::MetadataActorDoubleTopic [GOOD] >> Serialization::RequestHeader [GOOD] >> Serialization::ResponseHeader [GOOD] >> Serialization::ApiVersionsRequest [GOOD] >> Serialization::ApiVersionsResponse [GOOD] >> Serialization::ApiVersion_WithoutSupportedFeatures [GOOD] >> Serialization::ProduceRequest [GOOD] >> Serialization::UnsignedVarint32 [GOOD] >> Serialization::UnsignedVarint64 [GOOD] >> Serialization::Varint32 [GOOD] >> Serialization::Varint64 [GOOD] >> Serialization::UnsignedVarint32_Wrong [GOOD] >> Serialization::UnsignedVarint64_Wrong [GOOD] >> Serialization::UnsignedVarint32_Deserialize >> Serialization::UnsignedVarint32_Deserialize [GOOD] >> Serialization::TKafkaInt8_NotPresentVersion [GOOD] >> Serialization::TKafkaInt8_PresentVersion_NotTaggedVersion [GOOD] >> Serialization::TKafkaInt8_PresentVersion_TaggedVersion [GOOD] >> Serialization::TKafkaInt8_PresentVersion_TaggedVersion_Default [GOOD] >> Serialization::Struct_IsDefault [GOOD] >> Serialization::TKafkaString_IsDefault [GOOD] >> Serialization::TKafkaString_PresentVersion_NotTaggedVersion [GOOD] >> Serialization::TKafkaString_PresentVersion_TaggedVersion [GOOD] >> Serialization::TKafkaString_PresentVersion_TaggedVersion_Default [GOOD] >> Serialization::TKafkaArray_IsDefault [GOOD] >> Serialization::TKafkaArray_PresentVersion_NotTaggedVersion [GOOD] >> Serialization::TKafkaArray_PresentVersion_TaggedVersion [GOOD] >> Serialization::TKafkaArray_PresentVersion_TaggedVersion_Default [GOOD] >> Serialization::TKafkaBytes_IsDefault [GOOD] >> Serialization::TKafkaBytes_PresentVersion_NotTaggedVersion [GOOD] >> Serialization::TKafkaBytes_PresentVersion_TaggedVersion [GOOD] >> Serialization::TKafkaBytes_PresentVersion_TaggedVersion_Default [GOOD] >> Serialization::TRequestHeaderData_reference [GOOD] >> Serialization::TKafkaFloat64_PresentVersion_NotTaggedVersion [GOOD] >> Serialization::RequestHeader_reference [GOOD] >> Serialization::ProduceRequestData [GOOD] >> Serialization::ProduceRequestData_Record_v0 [GOOD] >> test_public_api.py::TestBadSession::test_simple ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kafka_proxy/ut/unittest >> Serialization::ProduceRequestData_Record_v0 [GOOD] Test command err: 2025-03-31T16:37:29.494283Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488013698616341273:2268];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:37:29.494376Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000645/r3tmp/tmp6WUT1y/pdisk_1.dat 2025-03-31T16:37:30.052546Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:37:30.124961Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:37:30.126560Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:37:30.136393Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25895, node 1 2025-03-31T16:37:30.504009Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/000645/r3tmp/yandex48vtrS.tmp 2025-03-31T16:37:30.504065Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/000645/r3tmp/yandex48vtrS.tmp 2025-03-31T16:37:30.511306Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/000645/r3tmp/yandex48vtrS.tmp 2025-03-31T16:37:30.511513Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:37:30.841315Z INFO: TTestServer started on Port 2205 GrpcPort 25895 TClient is connected to server localhost:2205 PQClient connected to localhost:25895 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:37:31.179334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:37:31.203885Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:37:31.218337Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-31T16:37:31.232496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T16:37:31.432132Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-03-31T16:37:32.883867Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013711501243784:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:37:32.883966Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488013711501243775:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:37:32.884057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T16:37:32.893337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-31T16:37:32.908042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488013711501243790:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-31T16:37:32.967362Z node 1 :TX_PROXY ERROR: Actor# [1:7488013711501243854:2452] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T16:37:33.394678Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488013711501243869:2353], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T16:37:33.413421Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzY0MjI1OWItMzVjZTVjNmItYTg5MDM4NmQtMzU5M2RmMDU=, ActorId: [1:7488013711501243773:2341], ActorState: ExecuteState, TraceId: 01jqph1g248drr5hsnnw0h5v9v, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:37:33.418805Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:37:33.460589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T16:37:33.527975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T16:37:33.626366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-31T16:37:34.078148Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jqph1gwk0x27bc0xw72f82v0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODYyNzIyZmUtNWY5YzdkMGQtMzJkYmFlNGMtZTUyNjNkZWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7488013720091178765:2641] 2025-03-31T16:37:34.492018Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488013698616341273:2268];send_to=[0:7307199536658146131:7762515]; 2025-03-31T16:37:34.492095Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok Run with port = 25895, kafka port = 24102 2025-03-31T16:37:41.487977Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488013750835301061:2214];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/0l5c/000645/r3tmp/tmpzJtLEM/pdisk_1.dat 2025-03-31T16:37:41.630235Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T16:37:41.767010Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T16:37:41.790766Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T16:37:41.790848Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T16:37:41.796575Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61812, node 2 2025-03-31T16:37:41.951769Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/0l5c/000645/r3tmp/yandexrg1auV.tmp 2025-03-31T16:37:41.951796Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/0l5c/000645/r3tmp/yandexrg1auV.tmp 2025-03-31T16:37:41.951924Z node 2 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/0l5c/000645/r3tmp/yandexrg1auV.tmp 2025-03-31T16:37:41.952047Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T16:37:42.033951Z INFO: TTestServer started on Port 8101 GrpcPort 61812 TClient is connected to server localhost:8101 PQClient connected to localhost:61812 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T16:37:42.285664Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 7 ... KQP_SESSION WARN: SessionId: ydb://session/3?node_id=29&id=NTBjZmRhMjMtNzRiMjFhMTctNWViODA2NTItNjc3NzdiNjA=, ActorId: [29:7488015869839196291:2356], ActorState: ExecuteState, TraceId: 01jqphgt8c23xnna48e59f52tg, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T16:45:55.082710Z node 29 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T16:45:55.292133Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-31T16:45:55.786436Z node 29 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jqphgtvb84s8b0w2cpxpkenk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=29&id=NDQ1ZGQ4M2QtZmU1MzczNWQtYzE2OTQ5ODUtNTdmNDIzNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [29:7488015874134164004:2667] 2025-03-31T16:45:59.772974Z node 29 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:45:59.773023Z node 29 :IMPORT WARN: Table profiles were not loaded === CheckClustersList. Ok PQ Client: create topic: rt3.dc1--topic1 with 1 partitions CallPersQueueGRPC request to localhost:4050 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic1" } } CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:4050 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--topic1" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 } } } CallPersQueueGRPC response: Status: 129 ProxyErrorCode: 53 SchemeStatus: 1 FlatTxId { TxId: 281474976710673 SchemeShardTabletId: 72057594046644480 PathId: 13 } ErrorCode: OK AddTopic: rt3.dc1--topic1 ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` with topic = topic1, dc = dc1 2025-03-31T16:46:01.529640Z node 29 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jqphh0j98wmp6syfbze46886, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=29&id=ZWMyYWMyNDUtNTlkNjA4NGItY2JhOWY3YjItNDEwNjAxMDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ===Query complete TClient::Ls request: /Root/PQ/rt3.dc1--topic1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "rt3.dc1--topic1" PathId: 13 SchemeshardId: 72057594046644480 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976710673 CreateStep: 1743439561297 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186224037893 } PersQueueGroup { Name: "rt3.dc1--topic1" PathId: 13 TotalGroupCount: 1 PartitionPerTablet: 5 PQTabletConfig { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20... (TRUNCATED) GetTopicVersionFromPath: record Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "rt3.dc1--topic1" PathId: 13 SchemeshardId: 72057594046644480 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976710673 CreateStep: 1743439561297 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186224037893 } PersQueueGroup { Name: "rt3.dc1--topic1" PathId: 13 TotalGroupCount: 1 PartitionPerTablet: 5 PQTabletConfig { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 YdbDatabasePath: "/Root" } Partitions { PartitionId: 0 TabletId: 72075186224037892 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186224037893 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 12 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } Path: "/Root/PQ/rt3.dc1--topic1" name rt3.dc1--topic1 version1 CallPersQueueGRPC request to localhost:4050 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic1" } } CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:4050 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic1" } } CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:4050 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic1" } } CallPersQueueGRPC response: Status: 1 ErrorCode: OK MetaResponse { CmdGetTopicMetadataResult { TopicInfo { Topic: "rt3.dc1--topic1" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } Version: 1 LocalDC: true Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } TopicPath: "/Root/PQ/rt3.dc1--topic1" YdbDatabasePath: "/Root" Consumers { Name: "user" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } Version: 0 Important: false } } ErrorCode: OK } } } === Topic created, have version: 1 2025-03-31T16:46:02.601617Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: try to update token 2025-03-31T16:46:02.602794Z :INFO: [] MessageGroupId [src] SessionId [] Write session: Do CDS request 2025-03-31T16:46:02.602902Z :INFO: [] MessageGroupId [src] SessionId [] Start write session. Will connect to endpoint: localhost:4050 2025-03-31T16:46:02.615224Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: send init request: init_request { topic: "topic1" message_group_id: "src" } 2025-03-31T16:46:03.311192Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1743439563311 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-31T16:46:03.311366Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|b2e83510-3ab5da9e-1b6fe2fe-87824648_0" topic: "topic1" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-03-31T16:46:03.311603Z :INFO: [] MessageGroupId [src] SessionId [src|b2e83510-3ab5da9e-1b6fe2fe-87824648_0] Write session: close. Timeout = 0 ms 2025-03-31T16:46:03.311708Z :INFO: [] MessageGroupId [src] SessionId [src|b2e83510-3ab5da9e-1b6fe2fe-87824648_0] Write session will now close 2025-03-31T16:46:03.311819Z :DEBUG: [] MessageGroupId [src] SessionId [src|b2e83510-3ab5da9e-1b6fe2fe-87824648_0] Write session: aborting 2025-03-31T16:46:03.312996Z :INFO: [] MessageGroupId [src] SessionId [src|b2e83510-3ab5da9e-1b6fe2fe-87824648_0] Write session: gracefully shut down, all writes complete 2025-03-31T16:46:03.313019Z :DEBUG: [] MessageGroupId [src] SessionId [src|b2e83510-3ab5da9e-1b6fe2fe-87824648_0] Write session is aborting and will not restart 2025-03-31T16:46:03.313144Z :DEBUG: [] MessageGroupId [src] SessionId [src|b2e83510-3ab5da9e-1b6fe2fe-87824648_0] Write session: destroy Broker 29 - [::]:25484 >>>>> Check value=0 >>>>> Check value=1 >>>>> Check value=127 >>>>> Check value=128 >>>>> Check value=32191 >>>>> Check value=2147483647 >>>>> Check value=4294967295 >>>>> Check value=0 >>>>> Check value=1 >>>>> Check value=127 >>>>> Check value=128 >>>>> Check value=32191 >>>>> Check value=2147483647 >>>>> Check value=2147483648 >>>>> Check value=9223372036854775807 >>>>> Check value=18446744073709551615 >>>>> Check value=-2147483648 >>>>> Check value=-167966 >>>>> Check value=-1 >>>>> Check value=0 >>>>> Check value=1 >>>>> Check value=127 >>>>> Check value=128 >>>>> Check value=32191 >>>>> Check value=2147483647 >>>>> Check value=-9223372036854775808 >>>>> Check value=-2147483648 >>>>> Check value=-167966 >>>>> Check value=-1 >>>>> Check value=0 >>>>> Check value=1 >>>>> Check value=127 >>>>> Check value=128 >>>>> Check value=32191 >>>>> Check value=2147483648 >>>>> Check value=9223372036854775807 >>>>> Buffer size: 251 >>>>> Buffer size: 104 |99.6%| [TM] {RESULT} ydb/core/kafka_proxy/ut/unittest |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_tpch.py::TestTpchS1::test_tpch[5] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[6] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithMetadataMirrorMax::test_cluster_is_operational_with_metadata >> test_config_with_metadata.py::TestConfigWithoutMetadataBlock::test_cluster_is_operational_without_metadata >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--true] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel_unstable/unittest >> TSentinelUnstableTests::BSControllerCantChangeStatus [FAIL] >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false] 2025-03-31 16:46:06,850 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-31 16:46:06,892 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 697071 46.3M 44.6M 23.2M test_tool run_ut @/home/runner/.ya/build/build_root/0l5c/000558/ydb/core/cms/ut_sentinel_unstable/test-results/unittest/testing_out_stuff/test_tool.args Test command err: 2025-03-31T16:36:12.081371Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-03-31T16:36:12.081468Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-03-31T16:36:12.081550Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-03-31T16:36:12.081596Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-03-31T16:36:12.081650Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-03-31T16:36:12.081781Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-03-31T16:36:12.083827Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2025-03-31T16:36:12.099517Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDom ... 14748364800 State: Normal } PDiskStateInfo { PDiskId: 6 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-6.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 7 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-7.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 824760027 2025-03-31T16:46:02.168812Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 2, response# PDiskStateInfo { PDiskId: 8 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-8.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 9 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-9.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 10 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-10.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 11 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-11.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 824760027 2025-03-31T16:46:02.169096Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 3, response# PDiskStateInfo { PDiskId: 12 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-12.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 13 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-13.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 14 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-14.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 15 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-15.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 824760027 2025-03-31T16:46:02.169255Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 4, response# PDiskStateInfo { PDiskId: 16 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-16.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 17 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-17.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 18 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-18.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 19 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-19.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 824760027 2025-03-31T16:46:02.169404Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 5, response# PDiskStateInfo { PDiskId: 20 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-20.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 21 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-21.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 22 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-22.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 23 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-23.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 824760027 2025-03-31T16:46:02.169558Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 6, response# PDiskStateInfo { PDiskId: 24 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-24.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 25 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-25.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 26 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-26.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 27 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-27.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 824760027 2025-03-31T16:46:02.169710Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 7, response# PDiskStateInfo { PDiskId: 28 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-28.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 29 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-29.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 30 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-30.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 31 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-31.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 824760027 2025-03-31T16:46:02.169855Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 8, response# PDiskStateInfo { PDiskId: 32 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-32.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 33 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-33.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 34 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-34.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 35 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-35.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 824760027 2025-03-31T16:46:02.169919Z node 1 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-03-31T16:46:02.180670Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-03-31T16:46:02.180737Z node 1 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-03-31T16:46:02.180861Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 1, wbId# [1:8388350642965737326:1634689637] 2025-03-31T16:46:02.180910Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 2, wbId# [2:8388350642965737326:1634689637] 2025-03-31T16:46:02.180947Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 3, wbId# [3:8388350642965737326:1634689637] 2025-03-31T16:46:02.180979Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 4, wbId# [4:8388350642965737326:1634689637] 2025-03-31T16:46:02.181013Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 5, wbId# [5:8388350642965737326:1634689637] 2025-03-31T16:46:02.181048Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 6, wbId# [6:8388350642965737326:1634689637] 2025-03-31T16:46:02.181079Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 7, wbId# [7:8388350642965737326:1634689637] 2025-03-31T16:46:02.181111Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 8, wbId# [8:8388350642965737326:1634689637] 2025-03-31T16:46:02.181448Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 1, response# PDiskStateInfo { PDiskId: 4 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-4.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 5 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-5.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 6 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-6.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 7 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-7.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 824770027 2025-03-31T16:46:02.182214Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 8, response# PDiskStateInfo { PDiskId: 32 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-32.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 33 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-33.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 34 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-34.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 35 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-35.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 824770027 (TWithBackTrace) ydb/library/actors/testlib/test_runtime.cpp:1262: Dispatched 2500000 events, limit reached. TBackTrace::Capture()+28 (0xFD0B74C) TWithBackTrace::TWithBackTrace<>()+80 (0xF92A0C0) NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant)+26278 (0x2EF7CAF6) NActors::TTestActorRuntimeBase::DispatchEvents(NActors::TDispatchOptions const&)+49 (0x2EF76331) NKikimr::NCmsTest::TTestEnv::SetPDiskState(TSet, std::__y1::allocator> const&, NKikimrBlobStorage::TPDiskState_E, NKikimrBlobStorage::EDriveStatus)+901 (0xF95BFD5) NKikimr::NCmsTest::NTestSuiteTSentinelUnstableTests::TTestCaseBSControllerCantChangeStatus::Execute_(NUnitTest::TTestContext&)+2539 (0xF95AB5B) std::__y1::__function::__func, void ()>::operator()()+280 (0xF95E7A8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x101F6D56) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x101CF969) NKikimr::NCmsTest::NTestSuiteTSentinelUnstableTests::TCurrentTest::Execute()+1204 (0xF95D654) NUnitTest::TTestFactory::Execute()+2438 (0x101D1236) NUnitTest::RunMain(int, char**)+5213 (0x101F12CD) ??+0 (0x7FEAC93F6D90) __libc_start_main+128 (0x7FEAC93F6E40) _start+41 (0xD255029) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8330113388/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/0l5c/000558/ydb/core/cms/ut_sentinel_unstable/test-results/unittest/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8330113388/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/0l5c/000558/ydb/core/cms/ut_sentinel_unstable/test-results/unittest/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) >> test_tpch.py::TestTpchS1::test_tpch[6] [GOOD] >> test_generate_dynamic_config.py::TestGenerateDynamicConfig::test_generate_dynamic_config [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[7] |99.6%| [TM] {RESULT} ydb/core/cms/ut_sentinel_unstable/unittest >> test_cms_erasure.py::TestDegradedGroupBlock42Max::test_no_degraded_groups_after_shutdown [GOOD] >> test_restarts.py::TestRestartMultipleMirror3DC::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] >> test_public_api.py::TestBadSession::test_simple [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_generate_dynamic_config.py::TestGenerateDynamicConfig::test_generate_dynamic_config [GOOD] >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_config_stored_in_config_store |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupBlock42Max::test_no_degraded_groups_after_shutdown [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartMultipleMirror3DC::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] >> test_config_with_metadata.py::TestConfigWithMetadataMirrorMax::test_cluster_is_operational_with_metadata [GOOD] |99.6%| [TA] $(B)/ydb/tests/functional/restarts/test-results/py3test/{meta.json ... results_accumulator.log} >> test_public_api.py::TestDriverCanRecover::test_driver_recovery |99.6%| [TA] {RESULT} $(B)/ydb/tests/functional/restarts/test-results/py3test/{meta.json ... results_accumulator.log} >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] >> test_generate_dynamic_config.py::TestGenerateDynamicConfigFromConfigDir::test_generate_dynamic_config_from_config_store ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAlterCompression::test_all_supported_compression 2025-03-31 16:46:11,962 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-31 16:46:12,603 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 698483 810M 771M 737M ydb-tests-olap-column_family-compression --basetemp /home/runner/.ya/build/build_root/0l5c/000861/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor 700263 5.3G 5.1G 4.7G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/0l5c/000861/ydb/tests/olap/column_family/compression/test-results/py3test/testing_ou Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/column_family/compression/alter_compression.py", line 102, in test_all_supported_compression tasks.start_and_wait_all() File "ydb/tests/olap/common/thread_helper.py", line 49, in start_and_wait_all self.join_all() File "ydb/tests/olap/common/thread_helper.py", line 45, in join_all thread.join(timeout=timeout) File "ydb/tests/olap/common/thread_helper.py", line 16, in join super().join(timeout) File "contrib/tools/python3/Lib/threading.py", line 1149, in join self._wait_for_tstate_lock() File "contrib/tools/python3/Lib/threading.py", line 1169, in _wait_for_tstate_lock if lock.acquire(block, timeout): File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Current thread 0x00007fd5f5a9fa00 (most recent call first): Garbage-collecting Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...olumn_family-compression', '--basetemp', '/home/runner/.ya/build/build_root/0l5c/000861/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/0l5c/000861/ydb/tests/olap/column_family/compression/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/0l5c/000861', '--source-root', '/home/runner/.ya/build/build_root/0l5c/000861/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/0l5c/000861/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/column_family/compression', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/olap/column_family/compression', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...olumn_family-compression', '--basetemp', '/home/runner/.ya/build/build_root/0l5c/000861/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/0l5c/000861/ydb/tests/olap/column_family/compression/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/0l5c/000861', '--source-root', '/home/runner/.ya/build/build_root/0l5c/000861/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/0l5c/000861/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/column_family/compression', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/olap/column_family/compression', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout",), {}) 2025-03-31 16:46:43,696 WARNING library.python.cores: Core dump dir doesn't exist: /coredumps 2025-03-31 16:46:43,703 WARNING library.python.cores: Core dump dir doesn't exist: /var/tmp/cores |99.6%| [TM] {RESULT} ydb/tests/olap/column_family/compression/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithoutMetadataBlock::test_cluster_is_operational_without_metadata [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithMetadataMirrorMax::test_cluster_is_operational_with_metadata [GOOD] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--false] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_generate_dynamic_config.py::TestGenerateDynamicConfigFromConfigDir::test_generate_dynamic_config_from_config_store [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithoutMetadataBlock::test_cluster_is_operational_without_metadata [GOOD] >> test_public_api.py::TestDriverCanRecover::test_driver_recovery [GOOD] >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false] [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_cluster_works_with_auto_conf_dir |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_generate_dynamic_config.py::TestGenerateDynamicConfigFromConfigDir::test_generate_dynamic_config_from_config_store [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/s3_import/py3test >> test_tpch_import.py::TestS3TpchImport::test_import_and_export 2025-03-31 16:46:36,764 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-31 16:46:37,531 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 711066 613M 574M 538M ydb-tests-olap-s3_import --basetemp /home/runner/.ya/build/build_root/0l5c/000794/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-module 716101 7.9G 7.6G 7.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/0l5c/000794/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/test_tpc 721989 503M 495M 482M └─ moto_server s3 --port 3295 Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/s3_import/test_tpch_import.py", line 97, in test_import_and_export self.ydb_client.query("INSERT INTO s3_table SELECT * FROM lineitem") File "ydb/tests/olap/common/ydb_client.py", line 24, in query return self.session_pool.execute_with_retries(statement) File "contrib/python/ydb/py3/ydb/query/pool.py", line 202, in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) File "contrib/python/ydb/py3/ydb/retries.py", line 133, in retry_operation_sync for next_opt in opt_generator: File "contrib/python/ydb/py3/ydb/retries.py", line 94, in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) File "contrib/python/ydb/py3/ydb/query/pool.py", line 200, in wrapped_callee return [result_set for result_set in it] File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ return self._next() File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next res = self.wrapper(next(self.it)) File "contrib/python/grpcio/py3/grpc/_channel.py", line 475, in __next__ return self._next() File "contrib/python/grpcio/py3/grpc/_channel.py", line 872, in _next _common.wait(self._state.condition.wait, _response_ready) File "contrib/python/grpcio/py3/grpc/_common.py", line 150, in wait _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) File "contrib/python/grpcio/py3/grpc/_common.py", line 112, in _wait_once wait_fn(timeout=timeout) File "contrib/tools/python3/Lib/threading.py", line 359, in wait gotit = waiter.acquire(True, timeout) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...unner/.ya/build/build_root/0l5c/000794/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import', '--basetemp', '/home/runner/.ya/build/build_root/0l5c/000794/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/0l5c/000794/ydb/tests/olap/s3_import/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/0l5c/000794', '--source-root', '/home/runner/.ya/build/build_root/0l5c/000794/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/0l5c/000794/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/s3_import', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/olap/s3_import', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...unner/.ya/build/build_root/0l5c/000794/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import', '--basetemp', '/home/runner/.ya/build/build_root/0l5c/000794/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/0l5c/000794/ydb/tests/olap/s3_import/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/0l5c/000794', '--source-root', '/home/runner/.ya/build/build_root/0l5c/000794/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/0l5c/000794/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/s3_import', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/olap/s3_import', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout",), {}) |99.7%| [TM] {RESULT} ydb/tests/olap/s3_import/py3test >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true] >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_config_stored_in_config_store [GOOD] >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--false] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_cluster_works_with_auto_conf_dir [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_public_api.py::TestSelectAfterDropWithRepetitions::test_select_on_dropped_table_unsuccessful[10] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/mem_alloc/py3test >> test_result_limits.py::TestResultLimits::test_large_row 2025-03-31 16:46:41,484 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-31 16:46:42,211 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 715314 807M 774M 439M ydb-tests-fq-mem_alloc --basetemp /home/runner/.ya/build/build_root/0l5c/000a8f/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modules 892112 733M 16.5M 428M ├─ ydb-tests-fq-mem_alloc --basetemp /home/runner/.ya/build/build_root/0l5c/000a8f/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-mo 892113 5.4G 5.4G 5.0G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/0l5c/000a8f/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff/test_resul Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/fq/mem_alloc/test_result_limits.py", line 92, in test_large_row client.wait_query_status(query_id, fq.QueryMeta.FAILED, timeout=600) File "ydb/tests/tools/fq_runner/fq_client.py", line 307, in wait_query_status return self.wait_query(query_id, timeout, statuses=statuses).query.meta.status File "ydb/tests/tools/fq_runner/fq_client.py", line 302, in wait_query time.sleep(plain_or_under_sanitizer(0.5, 2)) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Current thread 0x00007fcc4818e940 (most recent call first): Garbage-collecting Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...d '['/home/runner/.ya/build/build_root/0l5c/000a8f/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc', '--basetemp', '/home/runner/.ya/build/build_root/0l5c/000a8f/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/0l5c/000a8f/ydb/tests/fq/mem_alloc/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/0l5c/000a8f', '--source-root', '/home/runner/.ya/build/build_root/0l5c/000a8f/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/0l5c/000a8f/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/fq/mem_alloc', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/fq/mem_alloc', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...d '['/home/runner/.ya/build/build_root/0l5c/000a8f/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc', '--basetemp', '/home/runner/.ya/build/build_root/0l5c/000a8f/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/0l5c/000a8f/ydb/tests/fq/mem_alloc/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/0l5c/000a8f', '--source-root', '/home/runner/.ya/build/build_root/0l5c/000a8f/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/0l5c/000a8f/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/fq/mem_alloc', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/fq/mem_alloc', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout",), {}) 2025-03-31 16:47:16,190 WARNING library.python.cores: Core dump dir doesn't exist: /coredumps 2025-03-31 16:47:16,190 WARNING library.python.cores: Core dump dir doesn't exist: /var/tmp/cores |99.7%| [TM] {RESULT} ydb/tests/fq/mem_alloc/py3test >> test_tpch.py::TestTpchS1::test_tpch[7] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[8] >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--false] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_config_stored_in_config_store [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--true] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_cluster_works_with_auto_conf_dir [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.7%| [TA] $(B)/ydb/tests/functional/config/test-results/py3test/{meta.json ... results_accumulator.log} |99.7%| [TA] {RESULT} $(B)/ydb/tests/functional/config/test-results/py3test/{meta.json ... results_accumulator.log} >> test_public_api.py::TestSelectAfterDropWithRepetitions::test_select_on_dropped_table_unsuccessful[10] [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test >> test_alter_tiering.py::TestAlterTiering::test[many_tables] 2025-03-31 16:46:54,744 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-31 16:46:55,256 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 727212 2.1G 2.1G 2.0G ydb-tests-olap-scenario --basetemp /home/runner/.ya/build/build_root/0l5c/000659/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modules 879104 2.8G 2.8G 2.3G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/0l5c/000659/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_alte Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "/home/runner/.ya/build/build_root/0l5c/000659/environment/arcadia/ydb/tests/olap/scenario/conftest.py", line 88, in test ctx.executable(self, ctx) File "ydb/tests/olap/scenario/test_alter_tiering.py", line 353, in scenario_many_tables threads.start_and_wait_all() File "ydb/tests/olap/common/thread_helper.py", line 49, in start_and_wait_all self.join_all() File "ydb/tests/olap/common/thread_helper.py", line 45, in join_all thread.join(timeout=timeout) File "ydb/tests/olap/common/thread_helper.py", line 16, in join super().join(timeout) File "contrib/tools/python3/Lib/threading.py", line 1149, in join self._wait_for_tstate_lock() File "contrib/tools/python3/Lib/threading.py", line 1169, in _wait_for_tstate_lock if lock.acquire(block, timeout): File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Thread 0x00007f6c492ef640 (most recent call first): File "contrib/python/Werkzeug/py3/werkzeug/routing/map.py"Fatal Python error: Segmentation fault Extension modules: run_import_hook, __res, sitecustomize, library.python.strings.strings, google.protobuf.pyext._message, google3.net.proto2.python.internal.cpp._message, grpc._cython.cygrpc, yaml._yaml, markupsafe._speedups, numpy.core._multiarray_umath, numpy.core._multiarray_tests, numpy.linalg._umath_linalg, numpy.fft._pocketfft_internal, numpy.random._common, numpy.random.bit_generator, numpy.random._bounded_integers, numpy.random._mt19937, numpy.random.mtrand, numpy.random._philox, numpy.random._pcg64, numpy.random._sfc64, numpy.random._generator (total: 22) AddressSanitizer:DEADLYSIGNAL ================================================================= ==727212==ERROR: AddressSanitizer: SEGV on unknown address 0x05ec000b18ac (pc 0x7f6c99a439fc bp 0x0000000b18ac sp 0x5260000f12d0 T0) ==727212==The signal is caused by a READ memory access. AddressSanitizer: nested bug in the same thread, aborting. Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...home/runner/.ya/build/build_root/0l5c/000659/ydb/tests/olap/scenario/ydb-tests-olap-scenario', '--basetemp', '/home/runner/.ya/build/build_root/0l5c/000659/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/0l5c/000659/ydb/tests/olap/scenario/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/0l5c/000659', '--source-root', '/home/runner/.ya/build/build_root/0l5c/000659/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/0l5c/000659/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...home/runner/.ya/build/build_root/0l5c/000659/ydb/tests/olap/scenario/ydb-tests-olap-scenario', '--basetemp', '/home/runner/.ya/build/build_root/0l5c/000659/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/0l5c/000659/ydb/tests/olap/scenario/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/0l5c/000659', '--source-root', '/home/runner/.ya/build/build_root/0l5c/000659/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/0l5c/000659/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout",), {}) |99.7%| [TM] {RESULT} ydb/tests/olap/scenario/py3test >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--false] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] >> test_workload.py::TestYdbLogWorkload::test[row] >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true] >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--false] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] >> test_public_api.py::TestMetaDataInvalidation::test_invalidation_success >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] >> test_workload.py::TestYdbWorkload::test[row] >> test_public_api.py::TestMetaDataInvalidation::test_invalidation_success [GOOD] >> test_cms_erasure.py::TestDegradedGroupMirror3dcMax::test_no_degraded_groups_after_shutdown [GOOD] >> test_cms_restart.py::TestCmsStateStorageRestartsBlockMax::test_restart_as_much_as_can [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[8] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[9] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupMirror3dcMax::test_no_degraded_groups_after_shutdown [GOOD] >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true] [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsBlockMax::test_restart_as_much_as_can [GOOD] >> test_public_api.py::TestJsonExample::test_json_unexpected_failure >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorMax::test_restart_as_much_as_can [GOOD] >> test_cms_state_storage.py::TestCmsStateStorageSimpleKeep::test_check_shutdown_state_storage_nodes [GOOD] >> test_cms_restart.py::TestCmsStateStorageRestartsBlockKeep::test_restart_as_much_as_can [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] >> test_cms_erasure.py::TestDegradedGroupMirror3dcKeep::test_no_degraded_groups_after_shutdown [GOOD] >> test_public_api.py::TestJsonExample::test_json_unexpected_failure [GOOD] >> test_public_api.py::TestJsonExample::test_json_success >> test_public_api.py::TestJsonExample::test_json_success [GOOD] >> KqpQuerySession::NoLocalAttach |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorMax::test_restart_as_much_as_can [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupMirror3dcKeep::test_no_degraded_groups_after_shutdown [GOOD] >> Replication::Types >> test_workload.py::TestYdbWorkload::test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_state_storage.py::TestCmsStateStorageSimpleKeep::test_check_shutdown_state_storage_nodes [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsBlockKeep::test_restart_as_much_as_can [GOOD] |99.8%| [TA] $(B)/ydb/tests/functional/cms/test-results/py3test/{meta.json ... results_accumulator.log} |99.8%| [TA] {RESULT} $(B)/ydb/tests/functional/cms/test-results/py3test/{meta.json ... results_accumulator.log} >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [FAIL] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] >> test_workload.py::TestYdbWorkload::test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true] [GOOD] >> Replication::Types [GOOD] >> test_public_api.py::TestForPotentialDeadlock::test_deadlocked_threads_on_cleanup >> KqpQueryService::ReplyPartLimitProxyNode >> KqpQueryService::ReplyPartLimitProxyNode [GOOD] >> NodeIdDescribe::HasDistribution |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/replication/unittest >> Replication::Types [GOOD] |99.8%| [TM] {RESULT} ydb/tests/functional/replication/unittest >> test_public_api.py::TestForPotentialDeadlock::test_deadlocked_threads_on_cleanup [GOOD] >> Transfer::Main_ColumnTable_KeyColumnFirst >> S3PathStyleBackup::DisableVirtualAddressing >> ConsistentIndexRead::InteractiveTx >> test_public_api.py::TestRecursiveCreation::test_mkdir >> KqpQuerySession::NoLocalAttach [GOOD] >> S3PathStyleBackup::DisableVirtualAddressing [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/kqp/kqp_query_session/unittest >> KqpQuerySession::NoLocalAttach [GOOD] |99.8%| [TM] {RESULT} ydb/tests/functional/kqp/kqp_query_session/unittest >> Transfer::Main_ColumnTable_KeyColumnFirst [GOOD] >> Transfer::Main_ColumnTable_KeyColumnLast |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/backup/s3_path_style/unittest >> S3PathStyleBackup::DisableVirtualAddressing [GOOD] |99.8%| [TM] {RESULT} ydb/tests/functional/backup/s3_path_style/unittest >> test_public_api.py::TestRecursiveCreation::test_mkdir [GOOD] >> test_public_api.py::TestRecursiveCreation::test_create_table >> test_public_api.py::TestRecursiveCreation::test_create_table [GOOD] >> Transfer::Main_ColumnTable_KeyColumnLast [GOOD] >> Transfer::Main_ColumnTable_ComplexKey >> test_workload.py::TestYdbKvWorkload::test[row] >> Backup::UuidValue >> test_tpch.py::TestTpchS1::test_tpch[9] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[10] >> test_encryption.py::TestEncryption::test_simple_encryption >> Transfer::Main_ColumnTable_ComplexKey [GOOD] >> Transfer::Main_ColumnTable_JsonMessage >> Backup::UuidValue [GOOD] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true] >> test_public_api.py::TestAttributes::test_create_table >> Transfer::Main_ColumnTable_JsonMessage [GOOD] >> Transfer::Main_ColumnTable_NullableColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/backup/unittest >> Backup::UuidValue [GOOD] Test command err: Found S3 object: "ProducerUuidValueBackup/data_00.csv" Found S3 object: "ProducerUuidValueBackup/metadata.json" Found S3 object: "ProducerUuidValueBackup/scheme.pb" |99.8%| [TM] {RESULT} ydb/tests/functional/backup/unittest >> Transfer::Main_ColumnTable_NullableColumn [GOOD] >> Transfer::Main_ColumnTable_Date >> NodeIdDescribe::HasDistribution [GOOD] >> Transfer::Main_ColumnTable_Date [GOOD] >> Transfer::Main_ColumnTable_Double |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/kqp/kqp_query_svc/unittest >> NodeIdDescribe::HasDistribution [GOOD] |99.9%| [TM] {RESULT} ydb/tests/functional/kqp/kqp_query_svc/unittest >> test_public_api.py::TestAttributes::test_create_table [GOOD] >> test_public_api.py::TestAttributes::test_copy_table >> test_public_api.py::TestAttributes::test_copy_table [GOOD] >> test_public_api.py::TestAttributes::test_create_indexed_table >> test_public_api.py::TestAttributes::test_create_indexed_table [GOOD] >> test_public_api.py::TestAttributes::test_alter_table >> test_public_api.py::TestAttributes::test_alter_table [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes0] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes1] >> test_public_api.py::TestAttributes::test_limits[attributes1] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes2] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes3] >> test_public_api.py::TestAttributes::test_limits[attributes3] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes4] [GOOD] >> Transfer::Main_ColumnTable_Double [GOOD] >> Transfer::Main_ColumnTable_Utf8_Long >> test_tpch.py::TestTpchS1::test_tpch[10] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[11] >> Transfer::Main_ColumnTable_Utf8_Long [GOOD] >> Transfer::Main_MessageField_Partition >> Transfer::Main_MessageField_Partition [GOOD] >> Transfer::Main_MessageField_SeqNo >> test_public_api.py::TestDocApiTables::test_create_table >> test_tpch.py::TestTpchS1::test_tpch[11] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[12] >> Transfer::Main_MessageField_SeqNo [GOOD] >> Transfer::Main_MessageField_ProducerId >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true] [GOOD] >> Transfer::Main_MessageField_ProducerId [GOOD] >> Transfer::Main_MessageField_MessageGroupId >> test_public_api.py::TestDocApiTables::test_create_table [GOOD] >> test_public_api.py::TestDocApiTables::test_alter_table[None-BadRequest] >> test_public_api.py::TestDocApiTables::test_alter_table[None-BadRequest] [GOOD] >> test_public_api.py::TestDocApiTables::test_alter_table[settings1-None] >> test_public_api.py::TestDocApiTables::test_alter_table[settings1-None] [GOOD] >> test_public_api.py::TestDocApiTables::test_drop_table[None-None] >> test_public_api.py::TestDocApiTables::test_drop_table[None-None] [GOOD] >> test_public_api.py::TestDocApiTables::test_drop_table[settings1-None] >> test_public_api.py::TestDocApiTables::test_drop_table[settings1-None] [GOOD] >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true] >> test_workload.py::TestYdbLogWorkload::test[row] [GOOD] >> test_workload.py::TestYdbLogWorkload::test[column] >> Transfer::Main_MessageField_MessageGroupId [GOOD] >> Transfer::AlterLambda >> test_tpch.py::TestTpchS1::test_tpch[12] [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/tpc/medium/py3test >> test_tpch.py::TestTpchS1::test_tpch[12] [FAIL] 2025-03-31 16:50:10,738 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-31 16:50:12,102 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 823891 621M 617M 540M ydb-tests-functional-tpc-medium --basetemp /home/runner/.ya/build/build_root/0l5c/001c7c/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest 825150 6.7G 6.5G 6.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/0l5c/001c7c/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/t 826517 4.0G 4.0G 3.5G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/0l5c/001c7c/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/t 948577 127M 129M 63.9M └─ ydb -e grpc://localhost:29894 -d /local/test_db workload tpch --path olap_yatests/tpch/s1 run --json /home/runner/.ya/build/build_root/0l5c/001c7c/ydb/tests/functional/t Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/load/lib/tpch.py", line 48, in test_tpch self.run_workload_test(self._get_path(), query_num) File "ydb/tests/olap/load/lib/conftest.py", line 286, in run_workload_test result = YdbCliHelper.workload_run( File "ydb/tests/olap/lib/ydb_cli.py", line 310, in workload_run ).process() File "ydb/tests/olap/lib/ydb_cli.py", line 283, in process process = yatest.common.process.execute(self._get_cmd(), check_exit_code=False) File "library/python/testing/yatest_common/yatest/common/process.py", line 656, in execute res.wait(check_exit_code, timeout, on_timeout) File "library/python/testing/yatest_common/yatest/common/process.py", line 400, in wait _wait() File "library/python/testing/yatest_common/yatest/common/process.py", line 335, in _wait pid, sts, rusage = os.wait4(self._process.pid, 0) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: .../build/build_root/0l5c/001c7c/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/0l5c/001c7c/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/test_tpch/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/0l5c/001c7c', '--source-root', '/home/runner/.ya/build/build_root/0l5c/001c7c/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/0l5c/001c7c/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/test_tpch/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/functional/tpc/medium', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/functional/tpc/medium', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'test_tpch.py']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: ((".../build/build_root/0l5c/001c7c/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/0l5c/001c7c/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/test_tpch/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/0l5c/001c7c', '--source-root', '/home/runner/.ya/build/build_root/0l5c/001c7c/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/0l5c/001c7c/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/test_tpch/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/functional/tpc/medium', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/functional/tpc/medium', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'test_tpch.py']' stopped by 600 seconds timeout",), {}) |99.9%| [TA] $(B)/ydb/tests/functional/tpc/medium/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TA] {RESULT} $(B)/ydb/tests/functional/tpc/medium/test-results/py3test/{meta.json ... results_accumulator.log} >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true] >> Transfer::AlterLambda [GOOD] >> Transfer::DropTransfer |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true] [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_public_api.py::TestDocApiTables::test_drop_table[settings1-None] [GOOD] |99.9%| [TA] $(B)/ydb/tests/functional/api/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TA] {RESULT} $(B)/ydb/tests/functional/api/test-results/py3test/{meta.json ... results_accumulator.log} >> test_workload.py::TestYdbWorkload::test [GOOD] >> Transfer::DropTransfer [GOOD] >> Transfer::CreateAndDropConsumer >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [FAIL] >> test_workload.py::TestYdbWorkload::test[row] [GOOD] >> test_workload.py::TestYdbWorkload::test[column] >> Transfer::CreateAndDropConsumer [GOOD] >> Transfer::DescribeError_OnLambdaCompilation >> Transfer::DescribeError_OnLambdaCompilation [GOOD] >> Transfer::CustomConsumer |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/oltp_workload/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/oltp_workload/tests/py3test >> test_workload.py::TestYdbKvWorkload::test[row] [GOOD] >> test_workload.py::TestYdbKvWorkload::test[column] >> Transfer::CustomConsumer [GOOD] >> Transfer::CustomFlushInterval >> test_encryption.py::TestEncryption::test_simple_encryption [GOOD] >> Transfer::CustomFlushInterval [GOOD] >> Transfer::AlterFlushInterval |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/encryption/py3test >> test_encryption.py::TestEncryption::test_simple_encryption [GOOD] |99.9%| [TM] {RESULT} ydb/tests/functional/encryption/py3test >> Transfer::AlterFlushInterval [GOOD] >> Transfer::AlterBatchSize >> Transfer::AlterBatchSize [GOOD] >> Transfer::CreateTransferSourceNotExists >> Transfer::CreateTransferSourceNotExists [GOOD] >> Transfer::CreateTransferSourceIsNotTopic >> Transfer::CreateTransferSourceIsNotTopic [GOOD] >> Transfer::CreateTransferRowTable >> Transfer::CreateTransferRowTable [GOOD] >> Transfer::CreateTransferTargetIsNotTable >> Transfer::CreateTransferTargetIsNotTable [GOOD] >> Transfer::CreateTransferTargetNotExists >> Transfer::CreateTransferTargetNotExists [GOOD] >> Transfer::PauseAndResumeTransfer |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [FAIL] >> Transfer::PauseAndResumeTransfer [GOOD] >> Replication::PauseAndResumeReplication >> Replication::PauseAndResumeReplication [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/transfer/unittest >> Replication::PauseAndResumeReplication [FAIL] Test command err: DDL: CREATE TABLE `Table_4889319549414412871` ( Key Uint64 NOT NULL, Message Utf8 NOT NULL, PRIMARY KEY (Key) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_4889319549414412871` WITH ( min_active_partitions = 10 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_4889319549414412871` FROM `Topic_4889319549414412871` TO `Table_4889319549414412871` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:16093/?database=local' , FLUSH_INTERVAL = Interval('PT1S') , BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Key`, `Message` FROM `Table_4889319549414412871` ORDER BY `Key`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_4889319549414412871` ORDER BY `Key`, `Message` Attempt=18 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_4889319549414412871` ORDER BY `Key`, `Message` Attempt=17 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_4889319549414412871` ORDER BY `Key`, `Message` Attempt=16 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_4889319549414412871` ORDER BY `Key`, `Message` Attempt=15 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_4889319549414412871` ORDER BY `Key`, `Message` Attempt=14 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_4889319549414412871` ORDER BY `Key`, `Message` Attempt=13 count=1 DDL: CREATE TABLE `Table_16747784004177135395` ( Message Utf8 NOT NULL, Key Uint64 NOT NULL, PRIMARY KEY (Key) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_16747784004177135395` WITH ( min_active_partitions = 10 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_16747784004177135395` FROM `Topic_16747784004177135395` TO `Table_16747784004177135395` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:16093/?database=local' , FLUSH_INTERVAL = Interval('PT1S') , BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Key`, `Message` FROM `Table_16747784004177135395` ORDER BY `Key`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_16747784004177135395` ORDER BY `Key`, `Message` Attempt=18 count=1 DDL: CREATE TABLE `Table_14052122895730683091` ( Key1 Uint64 NOT NULL, Key3 Uint64 NOT NULL, Value1 Utf8, Key2 Uint64 NOT NULL, Value2 Utf8, Key4 Uint64 NOT NULL, PRIMARY KEY (Key3, Key2, Key1, Key4) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_14052122895730683091` WITH ( min_active_partitions = 10 ); DDL: $l = ($x) -> { return [ <| Key1:CAST(1 AS Uint64), Key2:CAST(2 AS Uint64), Value2:CAST("value-2" AS Utf8), Key4:CAST(4 AS Uint64), Key3:CAST(3 AS Uint64), Value1:CAST("value-1" AS Utf8), |> ]; }; ; CREATE TRANSFER `Transfer_14052122895730683091` FROM `Topic_14052122895730683091` TO `Table_14052122895730683091` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:16093/?database=local' , FLUSH_INTERVAL = Interval('PT1S') , BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2` FROM `Table_14052122895730683091` ORDER BY `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2` Attempt=19 count=0 >>>>> Query: SELECT `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2` FROM `Table_14052122895730683091` ORDER BY `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2` Attempt=18 count=0 >>>>> Query: SELECT `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2` FROM `Table_14052122895730683091` ORDER BY `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2` Attempt=17 count=1 DDL: CREATE TABLE `Table_12749934429673775717` ( Id Uint64 NOT NULL, FirstName Utf8 NOT NULL, LastName Utf8 NOT NULL, Salary Uint64 NOT NULL, PRIMARY KEY (Id) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_12749934429673775717` WITH ( min_active_partitions = 10 ); DDL: $l = ($x) -> { $input = CAST($x._data AS JSON); return [ <| Id: Yson::ConvertToUint64($input.id), FirstName: CAST(Yson::ConvertToString($input.first_name) AS Utf8), LastName: CAST(Yson::ConvertToString($input.last_name) AS Utf8), Salary: CAST(Yson::ConvertToString($input.salary) AS UInt64) |> ]; }; ; CREATE TRANSFER `Transfer_12749934429673775717` FROM `Topic_12749934429673775717` TO `Table_12749934429673775717` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:16093/?database=local' , FLUSH_INTERVAL = Interval('PT1S') , BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Id`, `FirstName`, `LastName`, `Salary` FROM `Table_12749934429673775717` ORDER BY `Id`, `FirstName`, `LastName`, `Salary` Attempt=19 count=0 >>>>> Query: SELECT `Id`, `FirstName`, `LastName`, `Salary` FROM `Table_12749934429673775717` ORDER BY `Id`, `FirstName`, `LastName`, `Salary` Attempt=18 count=0 >>>>> Query: SELECT `Id`, `FirstName`, `LastName`, `Salary` FROM `Table_12749934429673775717` ORDER BY `Id`, `FirstName`, `LastName`, `Salary` Attempt=17 count=1 DDL: CREATE TABLE `Table_16594185322169735442` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_16594185322169735442` WITH ( min_active_partitions = 10 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_16594185322169735442` FROM `Topic_16594185322169735442` TO `Table_16594185322169735442` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:16093/?database=local' , FLUSH_INTERVAL = Interval('PT1S') , BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Key`, `Message` FROM `Table_16594185322169735442` ORDER BY `Key`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_16594185322169735442` ORDER BY `Key`, `Message` Attempt=18 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_16594185322169735442` ORDER BY `Key`, `Message` Attempt=17 count=1 DDL: CREATE TABLE `Table_13543291511934413407` ( Key Uint64 NOT NULL, Message Date, PRIMARY KEY (Key) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_13543291511934413407` WITH ( min_active_partitions = 10 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message: CAST($x._data AS Date) |> ]; }; ; CREATE TRANSFER `Transfer_13543291511934413407` FROM `Topic_13543291511934413407` TO `Table_13543291511934413407` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:16093/?database=local' , FLUSH_INTERVAL = Interval('PT1S') , BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Key`, `Message` FROM `Table_13543291511934413407` ORDER BY `Key`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_13543291511934413407` ORDER BY `Key`, `Message` Attempt=18 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_13543291511934413407` ORDER BY `Key`, `Message` Attempt=17 count=1 DDL: CREATE TABLE `Table_3245435058434287993` ( Key Uint64 NOT NULL, Message Double, PRIMARY KEY (Key) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_3245435058434287993` WITH ( min_active_partitions = 10 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message: CAST($x._data AS Double) |> ]; }; ; CREATE TRANSFER `Transfer_3245435058434287993` FROM `Topic_3245435058434287993` TO `Table_3245435058434287993` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:16093/?database=local' , FLUSH_INTERVAL = Interval('PT1S') , BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Key`, `Message` FROM `Table_3245435058434287993` ORDER BY `Key`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_3245435058434287993` ORDER BY `Key`, `Message` Attempt=18 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_3245435058434287993` ORDER BY `Key`, `Message` Attempt=17 count=1 DDL: CREATE TABLE `Table_4200523046919037256` ( Key Uint64 NOT NULL, Message Utf8 NOT NULL, PRIMARY KEY (Key) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_4200523046919037256` WITH ( min_active_partitions = 10 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset A ... : $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_10403362028920844558` FROM `Topic_10403362028920844558` TO `Table_10403362028920844558` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:16093/?database=local' , FLUSH_INTERVAL = Interval('PT3600S') , BATCH_SIZE_BYTES = 536870912 ); >>>>> Query: SELECT `Message` FROM `Table_10403362028920844558` ORDER BY `Message` Attempt=19 count=1 DDL: CREATE TABLE `Table_9278552531211389462` ( Key Uint64 NOT NULL, Message Utf8 NOT NULL, PRIMARY KEY (Key) ) WITH ( STORE = COLUMN ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_9278552531211389462` FROM `Topic_9278552531211389462` TO `Table_9278552531211389462` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:16093/?database=local' , FLUSH_INTERVAL = Interval('PT1S') , BATCH_SIZE_BYTES = 8388608 ); >>>>> ACTUAL: {
: Error: Discovery error: local/Topic_9278552531211389462: SCHEME_ERROR ({
: Error: Path not found }) } >>>>> EXPECTED: Discovery error: local/Topic_ DDL: CREATE TABLE `Table_13279490412564045479` ( Key Uint64 NOT NULL, Message Utf8 NOT NULL, PRIMARY KEY (Key) ) WITH ( STORE = COLUMN ); DDL: CREATE TABLE `Topic_13279490412564045479` ( Key Uint64 NOT NULL, PRIMARY KEY (Key) ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_13279490412564045479` FROM `Topic_13279490412564045479` TO `Table_13279490412564045479` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:16093/?database=local' , FLUSH_INTERVAL = Interval('PT1S') , BATCH_SIZE_BYTES = 8388608 ); >>>>> ACTUAL: {
: Error: Discovery error: local/Topic_13279490412564045479: UNSUPPORTED ({
: Error: Unsupported entry type: Table }) } >>>>> EXPECTED: Discovery error: local/Topic_ DDL: CREATE TABLE `Table_4393252944385988286` ( Key Uint64 NOT NULL, Message Utf8 NOT NULL, PRIMARY KEY (Key) ); DDL: CREATE TOPIC `Topic_4393252944385988286` WITH ( min_active_partitions = 10 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_4393252944385988286` FROM `Topic_4393252944385988286` TO `Table_4393252944385988286` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:16093/?database=local' , FLUSH_INTERVAL = Interval('PT1S') , BATCH_SIZE_BYTES = 8388608 ); >>>>> ACTUAL: {
: Error: Error in target #1: Only column tables are supported as transfer targets } >>>>> EXPECTED: Only column tables are supported as transfer targets DDL: CREATE TOPIC `Table_17367120358147288738`; DDL: CREATE TOPIC `Topic_17367120358147288738` WITH ( min_active_partitions = 10 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_17367120358147288738` FROM `Topic_17367120358147288738` TO `Table_17367120358147288738` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:16093/?database=local' , FLUSH_INTERVAL = Interval('PT1S') , BATCH_SIZE_BYTES = 8388608 ); >>>>> ACTUAL: {
: Error: Error in target #1: Only column tables are supported as transfer targets } >>>>> EXPECTED: Only column tables are supported as transfer targets DDL: CREATE TOPIC `Topic_47178554419558122` WITH ( min_active_partitions = 10 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_47178554419558122` FROM `Topic_47178554419558122` TO `Table_47178554419558122` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:16093/?database=local' , FLUSH_INTERVAL = Interval('PT1S') , BATCH_SIZE_BYTES = 8388608 ); >>>>> ACTUAL: {
: Error: Error in target #1: Create dst error: StatusPathDoesNotExist, The target table `/local/Table_47178554419558122` does not exist } >>>>> EXPECTED: The target table `/local/Table_47178554419558122` does not exist DDL: CREATE TABLE `Table_12122027210677561216` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_12122027210677561216` WITH ( min_active_partitions = 1 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_12122027210677561216` FROM `Topic_12122027210677561216` TO `Table_12122027210677561216` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:16093/?database=local' , FLUSH_INTERVAL = Interval('PT1S') , BATCH_SIZE_BYTES = 1 ); >>>>> Query: SELECT `Message` FROM `Table_12122027210677561216` ORDER BY `Message` Attempt=19 count=1 State: Paused DDL: ALTER TRANSFER `Transfer_12122027210677561216` SET ( STATE = "Paused" ); >>>>> Query: SELECT `Message` FROM `Table_12122027210677561216` ORDER BY `Message` Attempt=19 count=1 State: StandBy DDL: ALTER TRANSFER `Transfer_12122027210677561216` SET ( STATE = "StandBy" ); >>>>> Query: SELECT `Message` FROM `Table_12122027210677561216` ORDER BY `Message` Attempt=19 count=1 >>>>> Query: SELECT `Message` FROM `Table_12122027210677561216` ORDER BY `Message` Attempt=18 count=2 DDL: ALTER TRANSFER `Transfer_12122027210677561216` SET ( STATE = "Paused" ); DDL: ALTER TRANSFER `Transfer_12122027210677561216` SET ( STATE = "StandBy" ); DDL: CREATE TABLE `SourceTable_461884686310472243` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ); DDL: CREATE ASYNC REPLICATION `Replication_461884686310472243` FOR `SourceTable_461884686310472243` AS `Table_461884686310472243` WITH ( CONNECTION_STRING = 'grpc://localhost:16093/?database=local' ); >>>>> Query: INSERT INTO `SourceTable_461884686310472243` (`Key`, `Message`) VALUES (1, 'Message-1'); >>>>> Query: SELECT `Message` FROM `Table_461884686310472243` ORDER BY `Message` assertion failed at ydb/tests/functional/transfer/utils.h:422, std::pair NReplicationTest::MainTestCase::DoRead(const TExpectations &): (res.IsSuccess())
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/local/Table_461884686310472243]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 TBackTrace::Capture()+28 (0x1BF4D1C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1CFA9C0) NReplicationTest::MainTestCase::DoRead(TVector>, std::__y1::shared_ptr>, std::__y1::allocator>, std::__y1::shared_ptr>>>, std::__y1::allocator>, std::__y1::shared_ptr>, std::__y1::allocator>, std::__y1::shared_ptr>>>>> const&)+2901 (0x1883995) NReplicationTest::MainTestCase::CheckResult(TVector>, std::__y1::shared_ptr>, std::__y1::allocator>, std::__y1::shared_ptr>>>, std::__y1::allocator>, std::__y1::shared_ptr>, std::__y1::allocator>, std::__y1::shared_ptr>>>>> const&)+618 (0x1878ABA) NTestSuiteReplication::TTestCasePauseAndResumeReplication::Execute_(NUnitTest::TTestContext&)+1999 (0x18734AF) std::__y1::__function::__func, void ()>::operator()()+280 (0x187D3C8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x1D28EC6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x1D01539) NTestSuiteReplication::TCurrentTest::Execute()+1204 (0x187C104) NUnitTest::TTestFactory::Execute()+2438 (0x1D02E06) NUnitTest::RunMain(int, char**)+5213 (0x1D233AD) ??+0 (0x7F9A92B89D90) __libc_start_main+128 (0x7F9A92B89E40) _start+41 (0x1847029) |99.9%| [TM] {RESULT} ydb/tests/functional/transfer/unittest >> test_workload.py::TestYdbWorkload::test [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/olap_workload/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/olap_workload/tests/py3test >> test_workload.py::TestYdbKvWorkload::test[column] [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/kv/tests/py3test >> test_workload.py::TestYdbKvWorkload::test[column] [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/kv/tests/py3test >> test_workload.py::TestYdbLogWorkload::test[column] [GOOD] >> ConsistentIndexRead::InteractiveTx [GOOD] >> KqpExtTest::SecondaryIndexSelectUsingScripting >> KqpExtTest::SecondaryIndexSelectUsingScripting [GOOD] >> test_workload.py::TestYdbWorkload::test[column] [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/kqp/kqp_indexes/unittest >> KqpExtTest::SecondaryIndexSelectUsingScripting [GOOD] |99.9%| [TM] {RESULT} ydb/tests/functional/kqp/kqp_indexes/unittest >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true] [GOOD] >> test_workload.py::TestYdbLogWorkload::test[column] [FAIL] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/log/tests/py3test >> test_workload.py::TestYdbLogWorkload::test[column] [FAIL] |99.9%| [TM] {RESULT} ydb/tests/stress/log/tests/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true] [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/simple_queue/tests/py3test >> test_workload.py::TestYdbWorkload::test[column] [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/simple_queue/tests/py3test >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true] [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true] [GOOD] |99.9%| [TA] $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TA] {RESULT} $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| CLEANING BUILD ROOT Number of suites skipped by size: 86 ydb/tests/fq/mem_alloc [size:medium] ------ sole chunk ran 12 tests (total:644.71s - recipes:9.73s test:600.04s recipes:3.39s) Chunk exceeded 600s timeout, failed to shutdown gracefully in 30s and was terminated using SIGQUIT signal List of the tests involved in the launch: test_result_limits.py::TestResultLimits::test_many_rows (good) duration: 238.91s test_result_limits.py::TestResultLimits::test_large_row (timeout) duration: 79.93s test_alloc_default.py::TestAlloc::test_node_limit[kikimr0] (good) duration: 73.85s test_alloc_default.py::TestAlloc::test_hard_limit[kikimr0] (good) duration: 42.64s test_alloc_default.py::TestAlloc::test_up_down[kikimr0] (good) duration: 36.88s test_alloc_default.py::TestAlloc::test_alloc_and_free[kikimr0] (good) duration: 36.11s test_alloc_default.py::TestAlloc::test_mkql_not_increased[kikimr0] (good) duration: 35.36s test_alloc_default.py::TestAlloc::test_default_delta[kikimr0] (good) duration: 30.42s test_dc_local.py::TestAlloc::test_dc_locality[kikimr0] (good) duration: 27.27s test_alloc_default.py::TestAlloc::test_default_limits[kikimr0] (good) duration: 24.69s 1 more tests with 0.00s total duration are not listed. test_result_limits.py::TestResultLimits::test_quotas[kikimr0] test was not launched inside chunk. Info: Test run has exceeded 8.0G (8388608K) memory limit with 9.0G (9477300K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 706597 44.8M 43.3M 6.7M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 706965 35.9M 24.4M 11.9M ├─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 715314 835M 810M 562M │ └─ ydb-tests-fq-mem_alloc --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest 892112 733M 16.5M 516M │ ├─ ydb-tests-fq-mem_alloc --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doct 892113 5.5G 5.5G 5.1G │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff/test 709398 2.2G 2.2G 1.7G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff/ydb_data_e Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/mem_alloc/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff/stderr [timeout] test_result_limits.py::TestResultLimits::test_large_row [default-linux-x86_64-release-asan] (79.93s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff/test_result_limits.py.TestResultLimits.test_large_row.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff ------ TIMEOUT: 9 - GOOD, 1 - NOT_LAUNCHED, 1 - TIMEOUT, 1 - SKIPPED ydb/tests/fq/mem_alloc ydb/tests/fq/multi_plane [size:medium] nchunks:4 ------ [test_retry.py] chunk ran 2 tests (total:545.29s - recipes:19.75s test:521.14s recipes:4.21s) [fail] test_retry.py::TestRetry::test_fail_first[kikimr0] [default-linux-x86_64-release-asan] (36.95s) ydb/tests/fq/multi_plane/test_retry.py:67: in test_fail_first client.wait_query_status(query_id, fq.QueryMeta.RUNNING) ydb/tests/tools/fq_runner/fq_client.py:307: in wait_query_status return self.wait_query(query_id, timeout, statuses=statuses).query.meta.status ydb/tests/tools/fq_runner/fq_client.py:292: in wait_query assert time.time() < deadline and status not in final_statuses, \ E AssertionError: Query ptqueeb9dgvc5qn4t4ho is not in expected status (['RUNNING']) for already 8.750192880630493 seconds. Query status: ABORTED_BY_SYSTEM. Issues: [message: "ABORTED_BY_SYSTEM" E severity: 1 E ]. Transient issues: [message: "Query was aborted by system due to high failure rate" E severity: 1 E ] Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/multi_plane/test-results/py3test/testing_out_stuff/test_retry.py.TestRetry.test_fail_first.kikimr0.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/multi_plane/test-results/py3test/testing_out_stuff ------ FAIL: 5 - GOOD, 1 - FAIL ydb/tests/fq/multi_plane ydb/tests/fq/streaming_optimize [size:medium] nchunks:4 ------ [test_sql_streaming.py 0/4] chunk ran 7 tests (total:202.38s - recipes:1.35s test:199.31s recipes:1.08s) [fail] test_sql_streaming.py::test[suites-GroupByHop-default.txt] [default-linux-x86_64-release-asan] (69.90s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_i50qvfk9/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_i50qvfk9/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_i50qvfk9/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_i50qvfk9/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_i50qvfk9/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_i50qvfk9/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_i50qvfk9/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_i50qvfk9/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_i50qvfk9/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_i50qvfk9/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f1dfe5a83b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f1dfe5a1ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f1dfde2bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f1dfdf29464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f1dfdf29464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f1dfdf29464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f1dfde2ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f1dfded790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f1dfddcfe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f1dfddcfe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f1dfddcfc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f1dfdf26e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f1dfdf26e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHop-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopByStringKey-default.txt] [default-linux-x86_64-release-asan] (20.66s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2ic40q1d/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2ic40q1d/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2ic40q1d/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2ic40q1d/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2ic40q1d/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2ic40q1d/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2ic40q1d/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2ic40q1d/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2ic40q1d/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2ic40q1d/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f452b0783b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f452b071ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f452a8fbd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f452a9f9464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f452a9f9464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f452a9f9464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f452a8fab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f452a9a790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f452a89fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f452a89fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f452a89fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f452a9f6e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f452a9f6e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopByStringKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopExprKey-default.txt] [default-linux-x86_64-release-asan] (18.84s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ys8me091/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ys8me091/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ys8me091/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ys8me091/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ys8me091/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ys8me091/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ys8me091/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ys8me091/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ys8me091/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ys8me091/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f2eb48883b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f2eb4881ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f2eb410bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f2eb4209464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f2eb4209464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f2eb4209464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f2eb410ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f2eb41b790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f2eb40afe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f2eb40afe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f2eb40afc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f2eb4206e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f2eb4206e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopExprKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopListKey-default.txt] [default-linux-x86_64-release-asan] (17.31s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_b7r8c_0_/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_b7r8c_0_/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_b7r8c_0_/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_b7r8c_0_/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_b7r8c_0_/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_b7r8c_0_/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_b7r8c_0_/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_b7r8c_0_/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_b7r8c_0_/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_b7r8c_0_/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f0838ae83b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f0838ae1ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f083836bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f0838469464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f0838469464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f0838469464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f083836ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f083841790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f083830fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f083830fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f083830fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f0838466e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f0838466e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopListKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopNoKey-default.txt] [default-linux-x86_64-release-asan] (20.96s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_vhtkq_17/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_vhtkq_17/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_vhtkq_17/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_vhtkq_17/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_vhtkq_17/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_vhtkq_17/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_vhtkq_17/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_vhtkq_17/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_vhtkq_17/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_vhtkq_17/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f619b3a83b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f619b3a1ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f619ac2bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f619ad29464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f619ad29464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f619ad29464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f619ac2ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f619acd790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f619abcfe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f619abcfe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f619abcfc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f619ad26e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f619ad26e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopNoKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopPercentile-default.txt] [default-linux-x86_64-release-asan] (19.42s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ap0iaw6u/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ap0iaw6u/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ap0iaw6u/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ap0iaw6u/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ap0iaw6u/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ap0iaw6u/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ap0iaw6u/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ap0iaw6u/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ap0iaw6u/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ap0iaw6u/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fc54faa83b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fc54faa1ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fc54f32bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fc54f429464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fc54f429464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fc54f429464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fc54f32ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fc54f3d790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fc54f2cfe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fc54f2cfe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fc54f2cfc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fc54f426e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fc54f426e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopPercentile-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopTimeExtractorUnusedColumns-default.txt] [default-linux-x86_64-release-asan] (25.18s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n99smsev/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n99smsev/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n99smsev/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n99smsev/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n99smsev/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n99smsev/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n99smsev/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n99smsev/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n99smsev/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n99smsev/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fbc63f883b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fbc63f81ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fbc6380bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fbc63909464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fbc63909464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fbc63909464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fbc6380ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fbc638b790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fbc637afe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fbc637afe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fbc637afc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fbc63906e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fbc63906e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopTimeExtractorUnusedColumns-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_streaming.py 1/4] chunk ran 7 tests (total:177.66s - recipes:0.89s test:174.20s recipes:0.98s) [fail] test_sql_streaming.py::test[suites-GroupByHopWithDataWatermarks-default.txt] [default-linux-x86_64-release-asan] (48.85s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_12qt97x2/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_12qt97x2/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_12qt97x2/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_12qt97x2/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_12qt97x2/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_12qt97x2/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_12qt97x2/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_12qt97x2/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_12qt97x2/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_12qt97x2/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f4d548f83b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f4d548f1ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f4d5417bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f4d54279464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f4d54279464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f4d54279464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f4d5417ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f4d5422790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f4d5411fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f4d5411fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f4d5411fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f4d54276e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f4d54276e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopWithDataWatermarks-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindow-default.txt] [default-linux-x86_64-release-asan] (17.51s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jbbxa9sx/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jbbxa9sx/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jbbxa9sx/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jbbxa9sx/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jbbxa9sx/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jbbxa9sx/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jbbxa9sx/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jbbxa9sx/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jbbxa9sx/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jbbxa9sx/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7feaddb083b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7feaddb01ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7feadd38bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7feadd489464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7feadd489464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7feadd489464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7feadd38ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7feadd43790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7feadd32fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7feadd32fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7feadd32fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7feadd486e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7feadd486e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindow-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowByStringKey-default.txt] [default-linux-x86_64-release-asan] (17.91s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fnu18qhp/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fnu18qhp/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fnu18qhp/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fnu18qhp/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fnu18qhp/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fnu18qhp/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fnu18qhp/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fnu18qhp/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fnu18qhp/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fnu18qhp/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fc8c40e83b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fc8c40e1ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fc8c396bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fc8c3a69464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fc8c3a69464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fc8c3a69464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fc8c396ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fc8c3a1790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fc8c390fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fc8c390fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fc8c390fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fc8c3a66e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fc8c3a66e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowByStringKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowExprKey-default.txt] [default-linux-x86_64-release-asan] (16.21s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__g1nu6ww/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__g1nu6ww/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__g1nu6ww/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__g1nu6ww/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__g1nu6ww/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__g1nu6ww/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__g1nu6ww/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__g1nu6ww/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__g1nu6ww/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__g1nu6ww/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7ff35cdf83b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7ff35cdf1ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7ff35c67bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7ff35c779464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7ff35c779464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7ff35c779464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7ff35c67ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7ff35c72790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7ff35c61fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7ff35c61fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7ff35c61fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7ff35c776e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7ff35c776e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowExprKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowListKey-default.txt] [default-linux-x86_64-release-asan] (20.22s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_srsiyz0x/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_srsiyz0x/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_srsiyz0x/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_srsiyz0x/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_srsiyz0x/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_srsiyz0x/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_srsiyz0x/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_srsiyz0x/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_srsiyz0x/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_srsiyz0x/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f64bb0583b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f64bb051ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f64ba8dbd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f64ba9d9464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f64ba9d9464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f64ba9d9464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f64ba8dab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f64ba98790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f64ba87fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f64ba87fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f64ba87fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f64ba9d6e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f64ba9d6e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowListKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowNoKey-default.txt] [default-linux-x86_64-release-asan] (23.92s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_svb69p0s/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_svb69p0s/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_svb69p0s/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_svb69p0s/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_svb69p0s/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_svb69p0s/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_svb69p0s/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_svb69p0s/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_svb69p0s/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_svb69p0s/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f5bbb9483b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f5bbb941ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f5bbb1cbd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f5bbb2c9464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f5bbb2c9464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f5bbb2c9464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f5bbb1cab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f5bbb27790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f5bbb16fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f5bbb16fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f5bbb16fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f5bbb2c6e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f5bbb2c6e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowNoKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] [default-linux-x86_64-release-asan] (24.38s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7povm7n3/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7povm7n3/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7povm7n3/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7povm7n3/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7povm7n3/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7povm7n3/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7povm7n3/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7povm7n3/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7povm7n3/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7povm7n3/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fa0cc3f83b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fa0cc3f1ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fa0cbc7bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fa0cbd79464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fa0cbd79464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fa0cbd79464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fa0cbc7ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fa0cbd2790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fa0cbc1fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fa0cbc1fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fa0cbc1fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fa0cbd76e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fa0cbd76e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowPercentile-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_streaming.py 2/4] chunk ran 7 tests (total:196.68s - recipes:1.60s test:193.79s recipes:0.73s) [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] [default-linux-x86_64-release-asan] (69.13s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_zu82xck6/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_zu82xck6/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_zu82xck6/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_zu82xck6/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_zu82xck6/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_zu82xck6/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_zu82xck6/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_zu82xck6/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_zu82xck6/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_zu82xck6/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f61fed383b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f61fed31ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f61fe5bbd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f61fe6b9464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f61fe6b9464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f61fe6b9464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f61fe5bab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f61fe66790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f61fe55fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f61fe55fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f61fe55fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f61fe6b6e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f61fe6b6e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423005 byte(s) leaked in 8227 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWithDataWatermarks-default.txt] [default-linux-x86_64-release-asan] (18.96s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n6ypzply/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n6ypzply/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n6ypzply/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n6ypzply/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n6ypzply/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n6ypzply/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n6ypzply/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n6ypzply/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n6ypzply/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_n6ypzply/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f69504283b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f6950421ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f694fcabd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f694fda9464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f694fda9464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f694fda9464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f694fcaab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f694fd5790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f694fc4fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f694fc4fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f694fc4fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f694fda6e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f694fda6e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWithDataWatermarks-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopic-default.txt] [default-linux-x86_64-release-asan] (18.02s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gitcn35u/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gitcn35u/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gitcn35u/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gitcn35u/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gitcn35u/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gitcn35u/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gitcn35u/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gitcn35u/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gitcn35u/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gitcn35u/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f9887c783b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f9887c71ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f98874fbd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f98875f9464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f98875f9464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f98875f9464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f98874fab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f98875a790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f988749fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f988749fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f988749fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f98875f6e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f98875f6e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopic-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicGroupWriteToSolomon-default.txt] [default-linux-x86_64-release-asan] (16.60s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__62q2t9f/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__62q2t9f/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__62q2t9f/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__62q2t9f/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__62q2t9f/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__62q2t9f/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__62q2t9f/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__62q2t9f/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__62q2t9f/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__62q2t9f/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f83e8a783b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f83e8a71ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f83e82fbd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f83e83f9464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f83e83f9464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f83e83f9464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f83e82fab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f83e83a790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f83e829fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f83e829fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f83e829fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f83e83f6e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f83e83f6e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicGroupWriteToSolomon-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicWithMetadata-default.txt] [default-linux-x86_64-release-asan] (23.22s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fgtkhh6g/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fgtkhh6g/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fgtkhh6g/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fgtkhh6g/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fgtkhh6g/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fgtkhh6g/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fgtkhh6g/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fgtkhh6g/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fgtkhh6g/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fgtkhh6g/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f0fc7af83b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f0fc7af1ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f0fc737bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f0fc7479464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f0fc7479464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f0fc7479464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f0fc737ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f0fc742790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f0fc731fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f0fc731fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f0fc731fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f0fc7476e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f0fc7476e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithMetadata-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicWithMetadataInsideFilter-default.txt] [default-linux-x86_64-release-asan] (21.04s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fxpli6x6/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fxpli6x6/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fxpli6x6/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fxpli6x6/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fxpli6x6/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fxpli6x6/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fxpli6x6/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fxpli6x6/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fxpli6x6/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_fxpli6x6/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fcf3ae883b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fcf3ae81ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fcf3a70bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fcf3a809464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fcf3a809464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fcf3a809464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fcf3a70ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fcf3a7b790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fcf3a6afe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fcf3a6afe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fcf3a6afc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fcf3a806e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fcf3a806e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithMetadataInsideFilter-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicWithMetadataNestedDeep-default.txt] [default-linux-x86_64-release-asan] (18.94s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9z59vatq/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9z59vatq/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9z59vatq/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9z59vatq/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9z59vatq/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9z59vatq/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9z59vatq/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9z59vatq/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9z59vatq/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9z59vatq/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f5f5b1383b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f5f5b131ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f5f5a9bbd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f5f5aab9464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f5f5aab9464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f5f5aab9464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f5f5a9bab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f5f5aa6790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f5f5a95fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f5f5a95fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f5f5a95fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f5f5aab6e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f5f5aab6e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithMetadataNestedDeep-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_streaming.py 3/4] chunk ran 7 tests (total:202.06s - recipes:1.47s test:198.62s recipes:1.03s) [fail] test_sql_streaming.py::test[suites-ReadTopicWithMetadataWithFilter-default.txt] [default-linux-x86_64-release-asan] (71.37s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_hev8danw/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_hev8danw/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_hev8danw/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_hev8danw/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_hev8danw/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_hev8danw/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_hev8danw/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_hev8danw/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_hev8danw/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_hev8danw/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f2b24b683b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f2b24b61ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f2b243ebd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f2b244e9464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f2b244e9464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f2b244e9464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f2b243eab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f2b2449790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f2b2438fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f2b2438fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f2b2438fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f2b244e6e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f2b244e6e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithMetadataWithFilter-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicWithSchema-default.txt] [default-linux-x86_64-release-asan] (18.37s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0hyg25_6/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0hyg25_6/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0hyg25_6/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0hyg25_6/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0hyg25_6/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0hyg25_6/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0hyg25_6/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0hyg25_6/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0hyg25_6/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0hyg25_6/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f3ec2c983b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f3ec2c91ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f3ec251bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f3ec2619464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f3ec2619464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f3ec2619464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f3ec251ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f3ec25c790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f3ec24bfe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f3ec24bfe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f3ec24bfc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f3ec2616e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f3ec2616e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithSchema-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTwoTopics-default.txt] [default-linux-x86_64-release-asan] (18.32s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6kkekdc0/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6kkekdc0/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6kkekdc0/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6kkekdc0/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6kkekdc0/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6kkekdc0/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6kkekdc0/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6kkekdc0/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6kkekdc0/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6kkekdc0/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f7cfdf583b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f7cfdf51ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f7cfd7dbd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f7cfd8d9464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f7cfd8d9464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f7cfd8d9464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f7cfd7dab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f7cfd88790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f7cfd77fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f7cfd77fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f7cfd77fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f7cfd8d6e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f7cfd8d6e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTwoTopics-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadWriteSameTopic-default.txt] [default-linux-x86_64-release-asan] (19.10s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_wq_sh59b/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_wq_sh59b/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_wq_sh59b/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_wq_sh59b/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_wq_sh59b/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_wq_sh59b/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_wq_sh59b/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_wq_sh59b/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_wq_sh59b/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_wq_sh59b/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fab8a9f83b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fab8a9f1ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fab8a27bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fab8a379464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fab8a379464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fab8a379464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fab8a27ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fab8a32790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fab8a21fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fab8a21fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fab8a21fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fab8a376e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fab8a376e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadWriteSameTopic-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadWriteTopic-default.txt] [default-linux-x86_64-release-asan] (24.27s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_m5p7guwy/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_m5p7guwy/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_m5p7guwy/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_m5p7guwy/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_m5p7guwy/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_m5p7guwy/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_m5p7guwy/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_m5p7guwy/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_m5p7guwy/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_m5p7guwy/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f6fea2383b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f6fea231ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f6fe9abbd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f6fe9bb9464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f6fe9bb9464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f6fe9bb9464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f6fe9abab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f6fe9b6790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f6fe9a5fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f6fe9a5fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f6fe9a5fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f6fe9bb6e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f6fe9bb6e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadWriteTopic-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadWriteTopicWithSchema-default.txt] [default-linux-x86_64-release-asan] (19.09s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q_hv2fl2/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q_hv2fl2/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q_hv2fl2/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q_hv2fl2/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q_hv2fl2/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q_hv2fl2/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q_hv2fl2/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q_hv2fl2/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q_hv2fl2/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q_hv2fl2/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f6e110483b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f6e11041ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f6e108cbd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f6e109c9464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f6e109c9464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f6e109c9464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f6e108cab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f6e1097790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f6e1086fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f6e1086fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f6e1086fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f6e109c6e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f6e109c6e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadWriteTopicWithSchema-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-WriteTwoTopics-default.txt] [default-linux-x86_64-release-asan] (19.39s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yr9cbjqx/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yr9cbjqx/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yr9cbjqx/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yr9cbjqx/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yr9cbjqx/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yr9cbjqx/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yr9cbjqx/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yr9cbjqx/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yr9cbjqx/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yr9cbjqx/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f15915e83b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f15915e1ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f1590e6bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f1590f69464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f1590f69464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f1590f69464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f1590e6ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f1590f1790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f1590e0fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f1590e0fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f1590e0fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f1590f66e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f1590f66e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-WriteTwoTopics-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ FAIL: 28 - FAIL ydb/tests/fq/streaming_optimize ------ [test_discovery.py] chunk ran 3 tests (total:159.09s - setup:0.05s test:158.86s) Info: Test run has exceeded 10.0G (10485760K) memory limit with 13.5G (14119960K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 823073 44.8M 44.8M 6.4M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 823239 33.5M 21.8M 9.2M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 823312 822M 829M 748M └─ ydb-tests-functional-api --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --docte 845570 1.4G 1.4G 980M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 845579 1.4G 1.4G 965M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 845614 1.4G 1.4G 993M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 845615 1.4G 1.4G 968M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 845617 1.4G 1.4G 984M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 845618 1.4G 1.4G 986M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 845620 1.4G 1.4G 976M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 845621 1.4G 1.4G 978M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 845623 1.4G 1.4G 968M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 849734 418M 0b 0b └─ ydbd --server=grpc://localhost:15896 admin blobstorage config invoke --proto=Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "dynamic_s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/stderr ydb/tests/functional/hive [size:medium] nchunks:80 ------ [test_drain.py 0/20] chunk ran 1 test (total:92.88s - setup:0.02s test:92.70s) [fail] test_drain.py::TestHive::test_drain_on_stop [default-linux-x86_64-release-asan] (86.40s) ydb/tests/functional/hive/test_drain.py:93: in test_drain_on_stop wait_tablets_are_active( ydb/tests/library/common/delayed.py:151: in wait_tablets_are_active predicate(raise_error=True) ydb/tests/library/common/delayed.py:141: in predicate raise AssertionError( E AssertionError: E ############################## E 0 seconds passed, 75 tablet(s) are not active. Inactive tablets are (first 10 entries): (72075186224037927: 4) (72075186224037935: 4) (72075186224037938: None) (72075186224037949: None) (72075186224037952: 4) (72075186224037960: 4) (72075186224037966: 4) (72075186224037974: None) (72075186224037976: 4) (72075186224037984: 4). Additional info is empty E ############################## Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/hive/test-results/py3test/testing_out_stuff/test_drain.py.TestHive.test_drain_on_stop.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/hive/test-results/py3test/testing_out_stuff ------ FAIL: 6 - GOOD, 1 - FAIL ydb/tests/functional/hive ydb/tests/functional/postgresql [size:medium] ------ sole chunk ran 14 tests (total:85.35s - test:85.22s) [fail] test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] [default-linux-x86_64-release-asan] (37.67s) teardown failed: ydb/tests/functional/postgresql/test_postgres.py:77: in teardown_class cls.cluster.stop() ydb/tests/library/harness/kikimr_runner.py:494: in stop raise daemon.SeveralDaemonErrors(saved_exceptions) E ydb.tests.library.harness.daemon.SeveralDaemonErrors: Daemon failed with message: Bad exit_code.. E Process exit_code = 100. E Stdout file name: E /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/postgresql/test-results/py3test/testing_out_stuff/test_postgres.py.TestPostgresSuite.test_postgres_suite.horology/cluster/node_1/stdout E Stderr file name: E /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/postgresql/test-results/py3test/testing_out_stuff/test_postgres.py.TestPostgresSuite.test_postgres_suite.horology/cluster/node_1/stderr E Stderr content: E E GRpc memory quota was set but disabled due to issues with grpc quoter, to enable it use EnableGRpcMemoryQuota option E Current KQP shutdown state: spent 0 seconds, not started yet E warning: address range table at offset 0x10c0 has a premature terminator entry at offset 0x10d0 E E ================================================================= E ==699034==ERROR: LeakSanitizer: detected memory leaks E E Indirect leak of 26544 byte(s) in 7 object(s) allocated from: E #0 0x1d6b812d in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 E #1 0x468efbcf in MakeIntrusive, NYql::TTypeAnnotationContext &> /-S/util/generic/ptr.h:818:12 E #2 0x468efbcf in NYql::TYtState::TYtState(NYql::TTypeAnnotationContext*) /-S/yt/yql/providers/yt/provider/yql_yt_provider.h:102:25 E #3 0x468e2099 in MakeIntrusive, NYql::TTypeAnnotationContext *> /-S/util/generic/ptr.h:818:16 E #4 0x468e2099 in NYql::CreateYtNativeState(TIntrusivePtr>, TBasicString> const&, TBasicString> const&, NYql::TYtGatewayConfig const*, TIntrusivePtr>, std::__y1::shared_ptr const&, std::__y1::shared_ptr> const, TIntrusivePtr>>, TBasicString>, THash>>, TSelect1st, TEqualTo>>, std::__y1::allocator>>>::reserve(unsigned long) /-S/util/generic/hash_table.h:1330:13 E #7 0x466bb75b in insert_unique >, TIntrusivePtr > > > /-S/util/generic/hash_table.h:679:9 E #8 0x466bb75b in insert /-S/util/generic/hash.h:153:20 E #9 0x466bb75b in NYql::NCommon::TSettingDispatcher::TSettingHandlerImpl& NYql::NCommon::TSettingDispatcher::AddSetting(TBasicString> const&, NYql::NCommon::TConfSetting&) /-S/yql/essentials/providers/common/config/yql_dispatch.h:344:23 E #10 0x4669ee09 in NYql::TYtConfiguration::TYtConfiguration(NYql::TTypeAnnotationContext&) /-S/yt/yql/providers/yt/common/yql_yt_settings.cpp:466:5 E #11 0x468efbe0 in TYtVersionedConfiguration /-S/yt/yql/providers/yt/common/yql_yt_settings.h:372:11 E #12 0x468efbe0 in MakeIntrusive, NYql::TTypeAnnotationContext &> /-S/util/generic/ptr.h:818:16 E #13 0x468efbe0 in NYql::TYtState::TYtState(NYql::TTypeAnnotationContext*) /-S/yt/yql/providers/yt/provider/yql_yt_provider.h:102:25 E #14 0x468e2099 in MakeIntrusive, NYql::TTypeAnnotationContext *> /-S/util/generic/ptr.h:818:16 E #15 0x468e2099 in NYql::CreateYtNativeState(TIntrusivePtr>, TBas... Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/postgresql/test-results/py3test/testing_out_stuff/test_postgres.py.TestPostgresSuite.test_postgres_suite.float8.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/postgresql/test-results/py3test/testing_out_stuff ------ FAIL: 13 - GOOD, 1 - FAIL ydb/tests/functional/postgresql ydb/tests/functional/serverless [size:medium] nchunks:20 ------ [test_serverless.py 4/10] chunk ran 2 tests (total:256.31s - test:255.24s) [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [default-linux-x86_64-release-asan] (52.20s) ydb/tests/functional/serverless/test_serverless.py:450: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:347: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:367: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:105: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:225: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--false_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:449: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*DISK_SPACE_EXHAUSTED.*'): E AssertionError: Regex pattern did not match. E Regex: '.*DISK_SPACE_EXHAUSTED.*' E Input: 'message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--false_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--false.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [default-linux-x86_64-release-asan] (195.13s) ydb/tests/functional/serverless/test_serverless.py:450: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:347: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:367: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:105: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:225: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--true_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:449: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*DISK_SPACE_EXHAUSTED.*'): E AssertionError: Regex pattern did not match. E Regex: '.*DISK_SPACE_EXHAUSTED.*' E Input: 'message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--true_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--true.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff ------ FAIL: 20 - GOOD, 2 - FAIL ydb/tests/functional/serverless ydb/tests/functional/sqs/cloud [size:medium] nchunks:40 ------ [34/40] chunk ran 2 tests (total:118.22s - test:117.86s) [fail] test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] [default-linux-x86_64-release-asan] (89.55s) ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:829: in test_yc_events_processor assert len(lines) >= 2, "Got only %s event lines after all attempts" % len(lines) E AssertionError: Got only 0 event lines after all attempts E assert 0 >= 2 E + where 0 = len([]) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_yc_events_processor.tables_format_v0.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff ------ FAIL: 75 - GOOD, 1 - FAIL ydb/tests/functional/sqs/cloud ------ sole chunk ran 1 test (total:118.77s - setup:0.01s test:118.60s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 13.3G (13977244K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 755210 44.8M 38.1M 6.3M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 755385 33.5M 20.8M 9.1M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 755421 781M 762M 702M └─ functional-sqs-merge_split_common_table-std --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini - 759198 1.6G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results 759201 1.7G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results 759216 1.6G 1.5G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results 759241 1.6G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results 759255 1.6G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results 759260 1.6G 1.5G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results 759265 1.6G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results 759279 1.6G 1.6G 1.1G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results/py3test/testing_out_stuff/stderr ydb/tests/functional/tpc/medium [size:medium] nchunks:2 ------ [test_tpch.py] chunk ran 22 tests (total:607.71s - setup:0.07s test:600.05s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: test_tpch.py::TestTpchS1::test_tpch[1] (good) duration: 232.58s test_tpch.py::TestTpchS1::test_tpch[9] (good) duration: 61.70s test_tpch.py::TestTpchS1::test_tpch[7] (good) duration: 52.31s test_tpch.py::TestTpchS1::test_tpch[8] (good) duration: 43.19s test_tpch.py::TestTpchS1::test_tpch[5] (good) duration: 35.07s test_tpch.py::TestTpchS1::test_tpch[3] (good) duration: 33.27s test_tpch.py::TestTpchS1::test_tpch[10] (good) duration: 32.50s test_tpch.py::TestTpchS1::test_tpch[4] (good) duration: 32.04s test_tpch.py::TestTpchS1::test_tpch[2] (good) duration: 19.84s test_tpch.py::TestTpchS1::test_tpch[12] (fail) duration: 19.17s 2 more tests with 33.99s total duration are not listed. 10 tests were not launched inside chunk. Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tpc/medium/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/stderr [fail] test_tpch.py::TestTpchS1::test_tpch[12] [default-linux-x86_64-release-asan] (19.17s) ydb/tests/olap/load/lib/tpch.py:48: in test_tpch self.run_workload_test(self._get_path(), query_num) ydb/tests/olap/load/lib/conftest.py:298: in run_workload_test self.process_query_result(result, query_num, qparams.iterations, True) ydb/tests/olap/load/lib/conftest.py:245: in process_query_result raise exc ydb/tests/olap/lib/ydb_cli.py:283: in process process = yatest.common.process.execute(self._get_cmd(), check_exit_code=False) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:400: in wait _wait() library/python/testing/yatest_common/yatest/common/process.py:335: in _wait pid, sts, rusage = os.wait4(self._process.pid, 0) library/python/pytest/plugins/ya.py:347: in _graceful_shutdown _graceful_shutdown_on_log(not capman.is_globally_capturing()) library/python/pytest/plugins/ya.py:321: in _graceful_shutdown_on_log pytest.exit("Graceful shutdown requested") E Failed: Graceful shutdown requested Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/test_tpch.py.TestTpchS1.test_tpch.12.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff ------ TIMEOUT: 54 - GOOD, 1 - FAIL, 10 - NOT_LAUNCHED ydb/tests/functional/tpc/medium ydb/tests/olap/column_family/compression [size:medium] ------ sole chunk ran 2 tests (total:632.88s - test:600.10s) Chunk exceeded 600s timeout, failed to shutdown gracefully in 30s and was terminated using SIGQUIT signal List of the tests involved in the launch: alter_compression.py::TestAlterCompression::test_all_supported_compression (timeout) duration: 627.09s alter_compression.py::TestAlterCompression::test_availability_data test was not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/stderr [timeout] alter_compression.py::TestAlterCompression::test_all_supported_compression [default-linux-x86_64-release-asan] (627.09s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/alter_compression.py.TestAlterCompression.test_all_supported_compression.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff ------ TIMEOUT: 1 - NOT_LAUNCHED, 1 - TIMEOUT ydb/tests/olap/column_family/compression ydb/tests/olap/oom [size:medium] ------ [overlapping_portions.py] chunk ran 1 test (total:76.00s - test:65.99s) [fail] overlapping_portions.py::TestOverlappingPortions::test [default-linux-x86_64-release-asan] (57.46s) ydb/tests/olap/oom/overlapping_portions.py:103: in test with pytest.raises(ydb.issues.GenericError, match=r'.*cannot allocate memory.*'): E Failed: DID NOT RAISE Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/oom/test-results/py3test/testing_out_stuff/overlapping_portions.py.TestOverlappingPortions.test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/oom/test-results/py3test/testing_out_stuff ------ FAIL: 1 - FAIL ydb/tests/olap/oom ydb/tests/olap/s3_import [size:medium] ------ sole chunk ran 1 test (total:629.10s - test:600.04s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: test_tpch_import.py::TestS3TpchImport::test_import_and_export (timeout) duration: 624.02s Info: Test run has exceeded 8.0G (8388608K) memory limit with 8.7G (9167764K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 710684 44.8M 40.4M 6.5M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 711012 40.1M 27.6M 15.8M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 711066 674M 645M 599M └─ ydb-tests-olap-s3_import --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --docte 716101 7.8G 7.5G 7.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/te 721989 520M 514M 487M └─ moto_server s3 --port 3295 Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/stderr [timeout] test_tpch_import.py::TestS3TpchImport::test_import_and_export [default-linux-x86_64-release-asan] (624.02s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/test_tpch_import.py.TestS3TpchImport.test_import_and_export.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff ------ TIMEOUT: 1 - TIMEOUT ydb/tests/olap/s3_import ydb/tests/olap/scenario [size:medium] ------ sole chunk ran 9 tests (total:632.49s - test:600.08s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: test_alter_compression.py::TestAlterCompression::test[alter_compression] (good) duration: 467.53s test_alter_tiering.py::TestAlterTiering::test[many_tables] (timeout) duration: 156.98s 7 tests were not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/stderr [timeout] test_alter_tiering.py::TestAlterTiering::test[many_tables] [default-linux-x86_64-release-asan] (156.98s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_alter_tiering.py.TestAlterTiering.test.many_tables.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff ------ TIMEOUT: 1 - GOOD, 7 - NOT_LAUNCHED, 1 - TIMEOUT ydb/tests/olap/scenario ydb/tests/olap/ttl_tiering [size:medium] nchunks:6 ------ [data_migration_when_alter_ttl.py] chunk ran 1 test (total:612.16s - test:600.09s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test (timeout) duration: 605.92s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/stderr [timeout] data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test [default-linux-x86_64-release-asan] (605.92s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl.py.TestDataMigrationWhenAlterTtl.test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff ------ [ttl_delete_s3.py] chunk ran 3 tests (total:618.04s - setup:0.02s test:600.17s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change (timeout) duration: 605.31s 2 tests were not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/stderr [timeout] ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change [default-linux-x86_64-release-asan] (605.31s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3.py.TestDeleteS3Ttl.test_data_unchanged_after_ttl_change.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff ------ TIMEOUT: 3 - GOOD, 2 - NOT_LAUNCHED, 2 - TIMEOUT ydb/tests/olap/ttl_tiering ydb/tests/stress/log/tests [size:medium] ------ sole chunk ran 2 tests (total:359.87s - setup:0.01s test:359.81s) [fail] test_workload.py::TestYdbLogWorkload::test[column] [default-linux-x86_64-release-asan] (194.45s) teardown failed: ydb/tests/stress/log/tests/test_workload.py:41: in teardown_class cls.cluster.stop() ydb/tests/library/harness/kikimr_runner.py:494: in stop raise daemon.SeveralDaemonErrors(saved_exceptions) E ydb.tests.library.harness.daemon.SeveralDaemonErrors: Daemon failed with message: Bad exit_code.. E Process exit_code = 100. E Stdout file name: E /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/log/tests/test-results/py3test/testing_out_stuff/test_workload.py.TestYdbLogWorkload.test.row/cluster/node_1/stdout E Stderr file name: E /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/log/tests/test-results/py3test/testing_out_stuff/test_workload.py.TestYdbLogWorkload.test.row/cluster/node_1/stderr E Stderr content: E E GRpc memory quota was set but disabled due to issues with grpc quoter, to enable it use EnableGRpcMemoryQuota option E Current KQP shutdown state: spent 0 seconds, not started yet E warning: address range table at offset 0x10c0 has a premature terminator entry at offset 0x10d0 E E ================================================================= E ==915345==ERROR: LeakSanitizer: detected memory leaks E E Indirect leak of 87216 byte(s) in 23 object(s) allocated from: E #0 0x1d6b812d in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 E #1 0x468efbcf in MakeIntrusive, NYql::TTypeAnnotationContext &> /-S/util/generic/ptr.h:818:12 E #2 0x468efbcf in NYql::TYtState::TYtState(NYql::TTypeAnnotationContext*) /-S/yt/yql/providers/yt/provider/yql_yt_provider.h:102:25 E #3 0x468e2099 in MakeIntrusive, NYql::TTypeAnnotationContext *> /-S/util/generic/ptr.h:818:16 E #4 0x468e2099 in NYql::CreateYtNativeState(TIntrusivePtr>, TBasicString> const&, TBasicString> const&, NYql::TYtGatewayConfig const*, TIntrusivePtr>, std::__y1::shared_ptr const&, std::__y1::shared_ptr const&) /-S/yt/yql/providers/yt/provider ..[snippet truncated].. llocate /-S/contrib/libs/cxxsupp/libcxx/include/new:289:10 E #3 0x466d2866 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:118:32 E #4 0x466d2866 in initialize_dynamic /-S/util/generic/hash_table.h:239:35 E #5 0x466d2866 in initialize_buckets_dynamic /-S/util/generic/hash_table.h:912:17 E #6 0x466d2866 in THashTable> const, TIntrusivePtr>>, TBasicString>, THash>>, TSelect1st, TEqualTo>>, std::__y1::allocator>>>::reserve(unsigned long) /-S/util/generic/hash_table.h:1330:13 E #7 0x466bb75b in insert_unique >, TIntrusivePtr > > > /-S/util/generic/hash_table.h:679:9 E #8 0x466bb75b in insert /-S/util/generic/hash.h:153:20 E #9 0x466bb75b in NYql::NCommon::TSettingDispatcher::TSettingHandlerImpl& NYql::NCommon::TSettingDispatcher::AddSetting(TBasicString> const&, NYql::NCommon::TConfSetting&) /-S/yql/essentials/providers/common/config/yql_dispatch.h:344:23 E #10 0x4669ee09 in NYql::TYtConfiguration::TYtConfiguration(NYql::TTypeAnnotationContext&) /-S/yt/yql/providers/yt/common/yql_yt_settings.cpp:466:5 E #11 0x468efbe0 in TYtVersionedConfiguration /-S/yt/yql/providers/yt/common/yql_yt_settings.h:372:11 E #12 0x468efbe0 in MakeIntrusive, NYql::TTypeAnnotationContext &> /-S/util/generic/ptr.h:818:16 E #13 0x468efbe0 in NYql::TYtState::TYtState(NYql::TTypeAnnotationContext*) /-S/yt/yql/providers/yt/provider/yql_yt_provider.h:102:25 E #14 0x468e2099 in MakeIntrusive, NYql::TTypeAnnotationContext *> /-S/util/generic/ptr.h:818:16 E #15 0x468e2099 in NYql::CreateYtNativeState(TIntrusivePtr>, TBasicString> const&... Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/log/tests/test-results/py3test/testing_out_stuff/test_workload.py.TestYdbLogWorkload.test.column.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/log/tests/test-results/py3test/testing_out_stuff ------ FAIL: 1 - GOOD, 1 - FAIL ydb/tests/stress/log/tests ------ [test_disk.py 0/10] chunk ran 1 test (total:54.82s - test:54.73s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 13.1G (13757904K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 283051 44.9M 44.8M 6.4M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 284009 33.2M 21.5M 8.9M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 284098 755M 757M 675M └─ ydb-tests-tools-nemesis-ut --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doc 292734 1.4G 1.4G 991M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 292739 1.3G 1.3G 848M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 313319 1.3G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stu 292788 1.3G 1.3G 905M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 292817 1.2G 1.2G 828M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 292833 1.2G 1.2G 827M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 292837 1.3G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 313322 1.3G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stu 292858 1.3G 1.3G 917M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 292879 1.3G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 292882 1.3G 0b 0b └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/stderr ------ [test_tablet.py 0/10] chunk ran 1 test (total:102.13s - test:101.95s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 13.1G (13687352K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 278564 44.8M 44.8M 6.3M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 279469 33.9M 22.1M 9.5M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 279563 783M 762M 704M └─ ydb-tests-tools-nemesis-ut --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doc 289529 1.7G 1.6G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 289536 1.5G 1.5G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 324435 1.5G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stu 289542 1.5G 730M 278M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 324449 1.5G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stu 289550 1.6G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 324438 1.6G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stu 289557 1.5G 1.5G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 289558 1.5G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 324578 1.5G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stu 289559 1.4G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 324523 1.4G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stu 289562 1.5G 0b 0b └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 324575 1.5G 0b 0b └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stu Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/stderr ydb/core/cms/ut_sentinel_unstable [size:medium] ------ sole chunk ran 1 test (total:617.01s - test:600.10s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: TSentinelUnstableTests::BSControllerCantChangeStatus (fail) duration: 593.07s Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/cms/ut_sentinel_unstable/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/cms/ut_sentinel_unstable/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/cms/ut_sentinel_unstable/test-results/unittest/testing_out_stuff/stderr [fail] TSentinelUnstableTests::BSControllerCantChangeStatus [default-linux-x86_64-release-asan] (593.07s) (TWithBackTrace) ydb/library/actors/testlib/test_runtime.cpp:1262: Dispatched 2500000 events, limit reached. TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 TWithBackTrace::TWithBackTrace<>() at /-S/util/generic/yexception.h:146:5 NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant) at /-S/ydb/library/actors/testlib/test_runtime.cpp:0:48 DispatchEvents at /-S/ydb/library/actors/testlib/test_runtime.cpp:0:0 NKikimr::NCmsTest::TTestEnv::SetPDiskState(TSet, std::__y1::allocator> const&, NKikimrBlobStorage::TPDiskState_E, NKikimrBlobStorage::EDriveStatus) at /-S/ydb/core/cms/sentinel_ut_helpers.h:237:9 __root at /-S/contrib/libs/cxxsupp/libcxx/include/__tree:966:54 operator() at /-S/ydb/core/cms/sentinel_ut_unstable.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/cms/ut_sentinel_unstable/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/cms/ut_sentinel_unstable/test-results/unittest/testing_out_stuff/TSentinelUnstableTests.BSControllerCantChangeStatus.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/cms/ut_sentinel_unstable/test-results/unittest/testing_out_stuff/TSentinelUnstableTests.BSControllerCantChangeStatus.out ------ TIMEOUT: 1 - FAIL ydb/core/cms/ut_sentinel_unstable ydb/core/health_check/ut [size:medium] nchunks:10 ------ [3/10] chunk ran 5 tests (total:47.37s - test:47.34s) [fail] THealthCheckTest::LayoutIncorrect [default-linux-x86_64-release-asan] (5.17s) assertion failed at ydb/core/health_check/health_check_ut.cpp:2275, virtual void NKikimr::NTestSuiteTHealthCheckTest::TTestCaseLayoutIncorrect::Execute_(NUnitTest::TTestContext &): (issue_log.message() == "Database has storage issues") failed: ("Database has multiple issues" != Database has storage issues) , with diff: ("|)Database has (mul|s)t(ipl|orag)e issues("|) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NKikimr::NTestSuiteTHealthCheckTest::TTestCaseLayoutIncorrect::Execute_(NUnitTest::TTestContext&) at /-S/ydb/core/health_check/health_check_ut.cpp:0:17 operator() at /-S/ydb/core/health_check/health_check_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/health_check/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/health_check/ut/test-results/unittest/testing_out_stuff/THealthCheckTest.LayoutIncorrect.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/health_check/ut/test-results/unittest/testing_out_stuff/THealthCheckTest.LayoutIncorrect.out ------ FAIL: 47 - GOOD, 1 - FAIL ydb/core/health_check/ut ydb/core/keyvalue/ut_trace [size:medium] nchunks:5 ------ [0/5] chunk ran 1 test (total:12.98s - test:12.95s) [fail] TKeyValueTracingTest::ReadHuge [default-linux-x86_64-release-asan] (2.37s) equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadHuge.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadHuge.out ------ [1/5] chunk ran 1 test (total:25.85s - setup:0.05s test:25.04s) [fail] TKeyValueTracingTest::ReadSmall [default-linux-x86_64-release-asan] (12.44s) equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadSmall.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadSmall.out ------ [2/5] chunk ran 1 test (total:14.09s - test:14.06s) [fail] TKeyValueTracingTest::WriteHuge [default-linux-x86_64-release-asan] (2.38s) assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&) at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:103:5 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteHuge.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteHuge.out ------ [3/5] chunk ran 1 test (total:43.54s - setup:0.01s test:43.48s) [fail] TKeyValueTracingTest::WriteSmall [default-linux-x86_64-release-asan] (3.21s) assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&) at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:103:5 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteSmall.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteSmall.out ------ FAIL: 4 - FAIL ydb/core/keyvalue/ut_trace ydb/core/kqp/ut/cost [size:medium] nchunks:50 ------ [12/50] chunk ran 1 test (total:25.28s - test:25.24s) [crashed] KqpCost::OlapWriteRow [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: 100) ==404811==ERROR: AddressSanitizer: SEGV on unknown address 0x000000000008 (pc 0x000018b718ad bp 0x7ffdbef80940 sp 0x7ffdbef807a0 T0) ==404811==The signal is caused by a READ memory access. ==404811==Hint: address points to the zero page. 2025-03-31T16:18:36.275368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T16:18:36.275441Z node 1 :IMPORT WARN: Table profiles were not loaded #0 0x18b718ad in Get::TypeHandler> /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 #1 0x18b718ad in Get /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:1348:32 #2 0x18b718ad in _internal_table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1762:31 #3 0x18b718ad in table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1766:10 #4 0x18b718ad in NKikimr::NKqp::NTestSuiteKqpCost::TTestCaseOlapWriteRow::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:636:13 #5 0x18b96437 in operator() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #6 0x18b96437 in __invoke<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #7 0x18b96437 in __call<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #8 0x18b96437 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #9 0x18b96437 in std::__y1::__function::__func< ..[snippet truncated].. 0x194e85e5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #11 0x194e85e5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #12 0x194e85e5 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #13 0x194b8138 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #14 0x18b952e3 in NKikimr::NKqp::NTestSuiteKqpCost::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #15 0x194b9a05 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #16 0x194e2b5c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #17 0x7fb6d06ebd8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #18 0x7fb6d06ebe3f in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x29e3f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #19 0x16306028 in _start (/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost+0x16306028) (BuildId: 5128b0b237a145c7e4cbd2034645258e10874b06) SUMMARY: AddressSanitizer: SEGV /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 in Get::TypeHandler> ==404811==ABORTING Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/test-results/unittest/testing_out_stuff/KqpCost.OlapWriteRow.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/test-results/unittest/testing_out_stuff/KqpCost.OlapWriteRow.out ------ FAIL: 21 - GOOD, 1 - CRASHED ydb/core/kqp/ut/cost ydb/core/kqp/ut/federated_query/generic_ut nchunks:10 ------ [0/10] chunk ran 2 tests (total:68.96s - setup:0.04s test:60.07s) Chunk exceeded 60s timeout and was killed List of the tests involved in the launch: GenericFederatedQuery::ClickHouseManagedSelectAll (good) duration: 30.01s GenericFederatedQuery::ClickHouseFilterPushdown (timeout) duration: 27.39s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/stderr [timeout] GenericFederatedQuery::ClickHouseFilterPushdown [default-linux-x86_64-release-asan] (27.39s) Killed by timeout (60 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/GenericFederatedQuery.ClickHouseFilterPushdown.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/testing_out_stuff/GenericFederatedQuery.ClickHouseFilterPushdown.out ------ TIMEOUT: 13 - GOOD, 1 - TIMEOUT ydb/core/kqp/ut/federated_query/generic_ut ydb/core/kqp/ut/pg [size:medium] nchunks:10 ------ [8/10] chunk ran 11 tests (total:426.73s - test:426.65s) [fail] KqpPg::TempTablesDrop [default-linux-x86_64-release-asan] (8.43s) (NYdb::Dev::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff/KqpPg.TempTablesDrop.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff/KqpPg.TempTablesDrop.out [fail] KqpPg::TempTablesSessionsIsolation [default-linux-x86_64-release-asan] (9.33s) (NYdb::Dev::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff/KqpPg.TempTablesSessionsIsolation.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff/KqpPg.TempTablesSessionsIsolation.out [fail] KqpPg::TempTablesWithCache [default-linux-x86_64-release-asan] (8.77s) (NYdb::Dev::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff/KqpPg.TempTablesWithCache.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff/KqpPg.TempTablesWithCache.out ------ FAIL: 111 - GOOD, 3 - FAIL ydb/core/kqp/ut/pg ydb/core/kqp/ut/query [size:medium] nchunks:50 ------ [15/50] chunk ran 4 tests (total:35.44s - test:35.41s) [crashed] KqpLimits::TooBigColumn+useSink [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: -6) See logs for more info Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpLimits.TooBigColumn.useSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpLimits.TooBigColumn.useSink.out ------ [47/50] chunk ran 3 tests (total:61.26s - test:61.23s) [fail] KqpStats::SysViewClientLost [default-linux-x86_64-release-asan] (45.53s) assertion failed at ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591, virtual void NKikimr::NKqp::NTestSuiteKqpStats::TTestCaseSysViewClientLost::Execute_(NUnitTest::TTestContext &): (timeoutedCount == 1) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x194EA4DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x199AF40F 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591: Execute_ @ 0x1908FA48 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: operator() @ 0x190A2A57 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x190A2A57 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x190A2A57 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x190A2A57 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x190A2A57 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x199E6435 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x199E6435 10. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x199E6435 11. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x199B5F88 12. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: Execute @ 0x190A1BDB 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x199B7855 14. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x199E09AC 15. ??:0: ?? @ 0x7F7AC11A1D8F 16. ??:0: ?? @ 0x7F7AC11A1E3F 17. ??:0: ?? @ 0x16459028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpStats.SysViewClientLost.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpStats.SysViewClientLost.out ------ FAIL: 170 - GOOD, 1 - FAIL, 1 - CRASHED ydb/core/kqp/ut/query ydb/core/kqp/ut/tx [size:medium] nchunks:50 ------ [21/50] chunk ran 2 tests (total:41.38s - test:41.31s) [fail] KqpSinkTx::OlapInvalidateOnError [default-linux-x86_64-release-asan] (15.96s) assertion failed at ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182, virtual void NKikimr::NKqp::NTestSuiteKqpSinkTx::TInvalidateOnError::DoExecute(): (result.GetStatus() == EStatus::PRECONDITION_FAILED) failed: (BAD_REQUEST != PRECONDITION_FAILED)
: Error: Bad request. Table: `/Root/KV`., code: 2017
: Error: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32"]}, code: 2017 , with diff: (BAD_|P)RE(QUES|CONDI)T(|ION_FAILED) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182: DoExecute @ 0x18D9B8AE 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:201: Execute_ @ 0x18D7A4FA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: operator() @ 0x18D81977 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x18D81977 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x18D81977 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D81977 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D81977 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: Execute @ 0x18D80B43 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7F4666B1AD8F 18. ??:0: ?? @ 0x7F4666B1AE3F 19. ??:0: ?? @ 0x163B4028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSinkTx.OlapInvalidateOnError.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSinkTx.OlapInvalidateOnError.out ------ [25/50] chunk ran 2 tests (total:40.79s - test:40.73s) [fail] KqpSnapshotIsolation::TConflictReadWriteOlap [default-linux-x86_64-release-asan] (24.11s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:146, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (ABORTED != SUCCESS)
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 , with diff: (ABORT|SUCC)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:146: DoExecute @ 0x18DD15F8 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:172: Execute_ @ 0x18DB8CCA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7F2AADD52D8F 18. ??:0: ?? @ 0x7F2AADD52E3F 19. ??:0: ?? @ 0x163B4028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOlap.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOlap.out ------ [26/50] chunk ran 2 tests (total:22.83s - setup:0.02s test:22.76s) [fail] KqpSnapshotIsolation::TConflictReadWriteOltp [default-linux-x86_64-release-asan] (11.05s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131: DoExecute @ 0x18DCECE7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:159: Execute_ @ 0x18DB8872 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7FCF3B373D8F 18. ??:0: ?? @ 0x7FCF3B373E3F 19. ??:0: ?? @ 0x163B4028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOltp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOltp.out [fail] KqpSnapshotIsolation::TConflictReadWriteOltpNoSink [default-linux-x86_64-release-asan] (7.02s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131: DoExecute @ 0x18DCECE7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:166: Execute_ @ 0x18DB8A9A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7FCF3B373D8F 18. ??:0: ?? @ 0x7FCF3B373E3F 19. ??:0: ?? @ 0x163B4028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOltpNoSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOltpNoSink.out ------ [27/50] chunk ran 2 tests (total:33.99s - test:33.91s) [fail] KqpSnapshotIsolation::TConflictWriteOltp [default-linux-x86_64-release-asan] (11.38s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76: DoExecute @ 0x18DC7347 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:105: Execute_ @ 0x18DB81F2 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7FD53D694D8F 18. ??:0: ?? @ 0x7FD53D694E3F 19. ??:0: ?? @ 0x163B4028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOltp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOltp.out [fail] KqpSnapshotIsolation::TConflictWriteOlap [default-linux-x86_64-release-asan] (17.52s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:92, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::ABORTED) failed: (SUCCESS != ABORTED) , with diff: (SUCC|ABORT)E(SS|D) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:92: DoExecute @ 0x18DC9C58 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:118: Execute_ @ 0x18DB864A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7FD53D694D8F 18. ??:0: ?? @ 0x7FD53D694E3F 19. ??:0: ?? @ 0x163B4028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOlap.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOlap.out ------ [28/50] chunk ran 2 tests (total:34.83s - test:34.78s) [fail] KqpSnapshotIsolation::TConflictWriteOltpNoSink [default-linux-x86_64-release-asan] (11.18s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76: DoExecute @ 0x18DC7347 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:112: Execute_ @ 0x18DB841A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7FE828A22D8F 18. ??:0: ?? @ 0x7FE828A22E3F 19. ??:0: ?? @ 0x163B4028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOltpNoSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOltpNoSink.out ------ [29/50] chunk ran 2 tests (total:23.22s - test:23.18s) [fail] KqpSnapshotIsolation::TReadOnlyOltpNoSink [default-linux-x86_64-release-asan] (7.16s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TReadOnly::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185: DoExecute @ 0x18DD66A3 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:221: Execute_ @ 0x18DB911A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7F2FAAFCED8F 18. ??:0: ?? @ 0x7F2FAAFCEE3F 19. ??:0: ?? @ 0x163B4028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TReadOnlyOltpNoSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TReadOnlyOltpNoSink.out [fail] KqpSnapshotIsolation::TReadOnlyOltp [default-linux-x86_64-release-asan] (11.55s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TReadOnly::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185: DoExecute @ 0x18DD66A3 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:214: Execute_ @ 0x18DB8EF2 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7F2FAAFCED8F 18. ??:0: ?? @ 0x7F2FAAFCEE3F 19. ??:0: ?? @ 0x163B4028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TReadOnlyOltp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TReadOnlyOltp.out ------ [30/50] chunk ran 2 tests (total:34.26s - test:34.20s) [fail] KqpSnapshotIsolation::TSimpleOltp [default-linux-x86_64-release-asan] (11.42s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TSimple::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25: DoExecute @ 0x18DC0DB7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:50: Execute_ @ 0x18DB7B72 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7FDAAAD20D8F 18. ??:0: ?? @ 0x7FDAAAD20E3F 19. ??:0: ?? @ 0x163B4028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TSimpleOltp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TSimpleOltp.out ------ [31/50] chunk ran 2 tests (total:20.36s - test:20.30s) [fail] KqpSnapshotIsolation::TSimpleOltpNoSink [default-linux-x86_64-release-asan] (10.38s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TSimple::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25: DoExecute @ 0x18DC0DB7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:57: Execute_ @ 0x18DB7D9A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7FB718678D8F 18. ??:0: ?? @ 0x7FB718678E3F 19. ??:0: ?? @ 0x163B4028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TSimpleOltpNoSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TSimpleOltpNoSink.out ------ FAIL: 93 - GOOD, 11 - FAIL ydb/core/kqp/ut/tx ydb/core/kqp/ut/view [size:medium] ------ sole chunk ran 23 tests (total:249.07s - test:248.98s) [fail] TCreateAndDropViewTest::DropViewIfExists [default-linux-x86_64-release-asan] (11.72s) (NYdb::Dev::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/view/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/view/test-results/unittest/testing_out_stuff/TCreateAndDropViewTest.DropViewIfExists.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/view/test-results/unittest/testing_out_stuff/TCreateAndDropViewTest.DropViewIfExists.out [fail] TCreateAndDropViewTest::DropNonexistingView [default-linux-x86_64-release-asan] (9.59s) (NYdb::Dev::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/view/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/view/test-results/unittest/testing_out_stuff/TCreateAndDropViewTest.DropNonexistingView.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/view/test-results/unittest/testing_out_stuff/TCreateAndDropViewTest.DropNonexistingView.out [fail] TCreateAndDropViewTest::CallDropViewOnTable [default-linux-x86_64-release-asan] (8.44s) (NYdb::Dev::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/view/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/view/test-results/unittest/testing_out_stuff/TCreateAndDropViewTest.CallDropViewOnTable.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/view/test-results/unittest/testing_out_stuff/TCreateAndDropViewTest.CallDropViewOnTable.out ------ FAIL: 20 - GOOD, 3 - FAIL ydb/core/kqp/ut/view ydb/core/persqueue/ut/ut_with_sdk [size:medium] nchunks:10 ------ [0/10] chunk ran 6 tests (total:233.28s - setup:0.02s test:232.49s) [fail] Balancing::Balancing_OneTopic_PQv1 [default-linux-x86_64-release-asan] (39.42s) assertion failed at ydb/core/persqueue/ut/ut_with_sdk/balancing_ut.cpp:40, void NKikimr::NTestSuiteBalancing::Simple(SdkVersion): (5 == p0.size()) failed: (5 != 10) After the appearance of the second reading session, the partitions should be distributed evenly among them (p0, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NKikimr::NTestSuiteBalancing::Simple(NKikimr::NPQ::NTest::SdkVersion) at /-S/ydb/core/persqueue/ut/ut_with_sdk/balancing_ut.cpp:40:13 operator() at /-S/ydb/core/persqueue/ut/ut_with_sdk/balancing_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/testing_out_stuff/Balancing.Balancing_OneTopic_PQv1.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/testing_out_stuff/Balancing.Balancing_OneTopic_PQv1.out ------ [7/10] chunk ran 5 tests (total:237.29s - setup:0.02s test:236.96s) [fail] TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK [default-linux-x86_64-release-asan] (54.83s) assertion failed at ydb/core/persqueue/ut/ut_with_sdk/autoscaling_ut.cpp:96, void NKikimr::NTestSuiteTopicAutoscaling::ReadingAfterSplitTest(SdkVersion, bool, bool): (2 == info.PartitionId) failed: (2 != 4) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NKikimr::NTestSuiteTopicAutoscaling::ReadingAfterSplitTest(NKikimr::NPQ::NTest::SdkVersion, bool, bool) at /-S/ydb/core/persqueue/ut/ut_with_sdk/autoscaling_ut.cpp:0:17 operator() at /-S/ydb/core/persqueue/ut/ut_with_sdk/autoscaling_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/testing_out_stuff/TopicAutoscaling.ReadingAfterSplitTest_AutoscaleAwareSDK.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/testing_out_stuff/TopicAutoscaling.ReadingAfterSplitTest_AutoscaleAwareSDK.out ------ [8/10] chunk ran 5 tests (total:233.80s - test:233.47s) [fail] TopicAutoscaling::ReadingAfterSplitTest_BeforeAutoscaleAwareSDK [default-linux-x86_64-release-asan] (56.31s) assertion failed at ydb/core/persqueue/ut/ut_with_sdk/autoscaling_ut.cpp:96, void NKikimr::NTestSuiteTopicAutoscaling::ReadingAfterSplitTest(SdkVersion, bool, bool): (2 == info.PartitionId) failed: (2 != 4) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NKikimr::NTestSuiteTopicAutoscaling::ReadingAfterSplitTest(NKikimr::NPQ::NTest::SdkVersion, bool, bool) at /-S/ydb/core/persqueue/ut/ut_with_sdk/autoscaling_ut.cpp:0:17 operator() at /-S/ydb/core/persqueue/ut/ut_with_sdk/autoscaling_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/testing_out_stuff/TopicAutoscaling.ReadingAfterSplitTest_BeforeAutoscaleAwareSDK.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/testing_out_stuff/TopicAutoscaling.ReadingAfterSplitTest_BeforeAutoscaleAwareSDK.out ------ FAIL: 50 - GOOD, 3 - FAIL ydb/core/persqueue/ut/ut_with_sdk ydb/core/statistics/aggregator/ut [size:medium] nchunks:60 ------ [4/60] chunk ran 1 test (total:607.29s - test:600.02s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: AnalyzeColumnshard::AnalyzeRebootColumnShard (fail) duration: 584.25s Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/stderr [fail] AnalyzeColumnshard::AnalyzeRebootColumnShard [default-linux-x86_64-release-asan] (584.25s) (TWithBackTrace) ydb/library/actors/testlib/test_runtime.h:579: Exception occured while waiting for NKikimr::NStat::TEvStatistics::TEvAnalyzeResponse: (NActors::TSchedulingLimitReachedException) TestActorRuntime Processed over 100000 events.ydb/library/actors/testlib/test_runtime.cpp:716: TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 TWithBackTrace::TWithBackTrace<>() at /-S/util/generic/yexception.h:146:5 NKikimr::NStat::TEvStatistics::TEvAnalyzeResponse::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration) at /-S/ydb/library/actors/testlib/test_runtime.h:0:24 DoDestroy at /-S/util/generic/ptr.h:237:13 operator() at /-S/ydb/core/statistics/aggregator/ut/ut_analyze_columnshard.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/AnalyzeColumnshard.AnalyzeRebootColumnShard.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/AnalyzeColumnshard.AnalyzeRebootColumnShard.out ------ TIMEOUT: 35 - GOOD, 1 - FAIL ydb/core/statistics/aggregator/ut ydb/core/sys_view/ut [size:medium] nchunks:10 ------ [5/10] chunk ran 7 tests (total:154.04s - test:153.89s) [fail] SystemView::PartitionStatsFields [default-linux-x86_64-release-asan] (10.76s) greater-or-equal assertion failed at ydb/core/sys_view/ut_kqp.cpp:502, void NKikimr::NSysView::(anonymous namespace)::TYsonFieldChecker::Uint64GreaterOrEquals(ui64): value.AsUint64() >= expected TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NSysView::NTestSuiteSystemView::TTestCasePartitionStatsFields::Execute_(NUnitTest::TTestContext&) at /-S/ydb/core/sys_view/ut_kqp.cpp:1745:15 operator() at /-S/ydb/core/sys_view/ut_kqp.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff/SystemView.PartitionStatsFields.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff/SystemView.PartitionStatsFields.out ------ [7/10] chunk ran 7 tests (total:135.65s - test:135.26s) [fail] SystemView::ShowCreateTableTtlSettings [default-linux-x86_64-release-asan] (10.70s) (NYdb::Dev::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff/SystemView.ShowCreateTableTtlSettings.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff/SystemView.ShowCreateTableTtlSettings.out [fail] SystemView::ShowCreateTableTemporary [default-linux-x86_64-release-asan] (10.60s) (NYdb::Dev::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff/SystemView.ShowCreateTableTemporary.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff/SystemView.ShowCreateTableTemporary.out [fail] SystemView::ShowCreateTablePartitionAtKeys [default-linux-x86_64-release-asan] (11.25s) (NYdb::Dev::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff/SystemView.ShowCreateTablePartitionAtKeys.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff/SystemView.ShowCreateTablePartitionAtKeys.out ------ FAIL: 66 - GOOD, 4 - FAIL ydb/core/sys_view/ut ydb/core/tx/columnshard/ut_rw [size:medium] nchunks:60 ------ [28/60] chunk ran 1 test (total:603.39s - test:600.04s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString (timeout) duration: 602.05s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/stderr [timeout] TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString [default-linux-x86_64-release-asan] (602.05s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/TColumnShardTestReadWrite.CompactionSplitGranuleStrKey_PKString.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/TColumnShardTestReadWrite.CompactionSplitGranuleStrKey_PKString.out ------ [29/60] chunk ran 1 test (total:603.50s - setup:0.01s test:600.04s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 (timeout) duration: 601.10s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/stderr [timeout] TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 [default-linux-x86_64-release-asan] (601.10s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/TColumnShardTestReadWrite.CompactionSplitGranuleStrKey_PKUtf8.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/TColumnShardTestReadWrite.CompactionSplitGranuleStrKey_PKUtf8.out ------ TIMEOUT: 57 - GOOD, 2 - TIMEOUT ydb/core/tx/columnshard/ut_rw ydb/core/tx/datashard/ut_incremental_backup [size:medium] nchunks:4 ------ [0/4] chunk ran 3 tests (total:220.77s - test:220.48s) [fail] IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [default-linux-x86_64-release-asan] (154.24s) assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (TIMEOUT != SUCCESS) Response { QueryIssues { message: "Request timeout 600000ms exceeded" severity: 1 } QueryIssues { message: "Cancelling after 600000ms in ExecuteState" severity: 1 } TxMeta { } } YdbStatus: TIMEOUT , with diff: (TIM|SUCC)E(OUT|SS) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode) at /-S/ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:0:5 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/tx/datashard/datashard_ut_incremental_backup.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/testing_out_stuff/IncrementalBackup.ComplexRestoreBackupCollection.WithIncremental.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/testing_out_stuff/IncrementalBackup.ComplexRestoreBackupCollection.WithIncremental.out ------ FAIL: 10 - GOOD, 1 - FAIL ydb/core/tx/datashard/ut_incremental_backup ydb/core/tx/datashard/ut_order [size:medium] nchunks:40 ------ [28/40] chunk ran 1 test (total:607.51s - test:600.09s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: DataShardTxOrder::RandomPointsAndRanges (timeout) duration: 602.51s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_order/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_order/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_order/test-results/unittest/testing_out_stuff/stderr [timeout] DataShardTxOrder::RandomPointsAndRanges [default-linux-x86_64-release-asan] (602.51s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_order/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_order/test-results/unittest/testing_out_stuff/DataShardTxOrder.RandomPointsAndRanges.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_order/test-results/unittest/testing_out_stuff/DataShardTxOrder.RandomPointsAndRanges.out ------ TIMEOUT: 52 - GOOD, 1 - TIMEOUT ydb/core/tx/datashard/ut_order ydb/core/tx/datashard/ut_read_iterator [size:medium] nchunks:10 ------ [9/10] chunk ran 14 tests (total:603.89s - test:600.07s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: ReadIteratorExternalBlobs::ExtBlobsWithSpecificKeys (good) duration: 118.41s ReadIteratorExternalBlobs::ExtBlobsMultipleColumns (timeout) duration: 92.28s ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheEnd (good) duration: 68.00s ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloaded (good) duration: 66.92s ReadIteratorExternalBlobs::ExtBlobs (good) duration: 63.84s ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloadedWithReboot (good) duration: 61.94s ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheBeginning (good) duration: 59.21s ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheMiddle (good) duration: 47.60s DataShardReadIteratorSysTables::ShouldNotAllowArrow (good) duration: 9.27s DataShardReadIteratorSysTables::ShouldRead (good) duration: 7.64s 1 more tests with 6.85s total duration are not listed. 3 tests were not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/testing_out_stuff/stderr [timeout] ReadIteratorExternalBlobs::ExtBlobsMultipleColumns [default-linux-x86_64-release-asan] (92.28s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/testing_out_stuff/ReadIteratorExternalBlobs.ExtBlobsMultipleColumns.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/testing_out_stuff/ReadIteratorExternalBlobs.ExtBlobsMultipleColumns.out ------ TIMEOUT: 144 - GOOD, 3 - NOT_LAUNCHED, 1 - TIMEOUT ydb/core/tx/datashard/ut_read_iterator ydb/core/tx/datashard/ut_volatile [size:medium] nchunks:2 ------ [1/2] chunk ran 26 tests (total:355.22s - test:354.55s) [crashed] DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck [default-linux-x86_64-release-asan] (14.38s) Test crashed (return code: 100) ==105653==ERROR: LeakSanitizer: detected memory leaks Direct leak of 224 byte(s) in 1 object(s) allocated from: #0 0x1903ffcd in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x2b08ca70 in make_unique /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:621:26 #2 0x2b08ca70 in NKikimr::NEvents::TDataEvents::TEvWriteResult::BuildCompleted(unsigned long, unsigned long) /-S/ydb/core/tx/data_events/events.h:123:27 #3 0x2b086026 in NKikimr::NDataShard::TExecuteWriteUnit::Execute(TIntrusivePtr>, NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/execute_write_unit.cpp:376:37 #4 0x2aedef1c in NKikimr::NDataShard::TPipeline::RunExecutionPlan(TIntrusivePtr>, TVector>&, NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/datashard_pipeline.cpp:1851:28 #5 0x2ab9fc94 in NKikimr::NDataShard::TDataShard::TTxWrite::Execute(NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/datashard__write.cpp:100:38 #6 0x2062a8d4 in NKikimr::NTabletFlatExecutor::TExecutor::ExecuteTransaction(NKikimr::NTabletFlatExecutor::TSeat*) /-S/ydb/core/tablet_flat/flat_executor.cpp:1910:35 #7 0x20626274 in NKikimr::NTabletFlatExecutor: ..[snippet truncated].. in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #27 0x18de9c37 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #28 0x198001f5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #29 0x198001f5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #30 0x198001f5 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #31 0x197cfd48 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #32 0x18de8ae3 in NKikimr::NTestSuiteDataShardVolatile::TCurrentTest::Execute() /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1 #33 0x197d1615 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #34 0x197fa76c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #35 0x7f4a15a5ad8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) SUMMARY: AddressSanitizer: 664 byte(s) leaked in 11 allocation(s). Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_volatile/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_volatile/test-results/unittest/testing_out_stuff/DataShardVolatile.GracefulShardRestartNoEarlyReadSetAck.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_volatile/test-results/unittest/testing_out_stuff/DataShardVolatile.GracefulShardRestartNoEarlyReadSetAck.out ------ FAIL: 51 - GOOD, 1 - CRASHED ydb/core/tx/datashard/ut_volatile ydb/core/tx/tiering/ut [size:medium] nchunks:60 ------ [3/60] chunk ran 1 test (total:91.36s - setup:0.01s test:91.26s) [crashed] ColumnShardTiers::TTLUsage [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: -6) See logs for more info Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff/ColumnShardTiers.TTLUsage.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff/ColumnShardTiers.TTLUsage.out ------ FAIL: 9 - GOOD, 1 - CRASHED ydb/core/tx/tiering/ut ydb/core/tx/tx_proxy/ut_schemereq [size:medium] nchunks:10 ------ [3/10] chunk ran 30 tests (total:280.72s - test:280.49s) [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-56 [default-linux-x86_64-release-asan] (12.08s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:8339 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-56.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-56.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-62 [default-linux-x86_64-release-asan] (8.15s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:23974 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-62.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-62.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-60 [default-linux-x86_64-release-asan] (9.66s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:12311 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-60.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-60.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-57 [default-linux-x86_64-release-asan] (12.43s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:11819 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-57.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-57.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-59 [default-linux-x86_64-release-asan] (12.47s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:16307 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-59.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-59.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-64 [default-linux-x86_64-release-asan] (8.84s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:26549 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-64.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-64.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-67 [default-linux-x86_64-release-asan] (8.70s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:18578 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-67.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-67.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-58 [default-linux-x86_64-release-asan] (12.32s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:20677 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-58.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-58.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-66 [default-linux-x86_64-release-asan] (8.19s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:25820 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-66.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-66.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-68 [default-linux-x86_64-release-asan] (9.59s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:21992 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-68.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-68.out ------ FAIL: 290 - GOOD, 10 - FAIL ydb/core/tx/tx_proxy/ut_schemereq ydb/core/viewer/ut [size:medium] nchunks:10 ------ [4/10] chunk ran 5 tests (total:603.55s - test:600.08s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: Viewer::JsonStorageListingV1PDiskIdFilter (timeout) duration: 288.79s Viewer::JsonStorageListingV1NodeIdFilter (good) duration: 120.58s Viewer::JsonStorageListingV1GroupIdFilter (good) duration: 100.24s Viewer::JsonStorageListingV1 (good) duration: 68.53s Viewer::JsonAutocompleteStartOfDatabaseName (good) duration: 9.17s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/stderr [timeout] Viewer::JsonStorageListingV1PDiskIdFilter [default-linux-x86_64-release-asan] (288.79s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/Viewer.JsonStorageListingV1PDiskIdFilter.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/Viewer.JsonStorageListingV1PDiskIdFilter.out ------ [6/10] chunk ran 5 tests (total:81.92s - test:81.89s) [fail] Viewer::QueryExecuteScript [default-linux-x86_64-release-asan] (10.93s) assertion failed at ydb/core/viewer/viewer_ut.cpp:1948, virtual void NTestSuiteViewer::TTestCaseQueryExecuteScript::Execute_(NUnitTest::TTestContext &): (json.GetMap().contains("metadata")) {} TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NTestSuiteViewer::TTestCaseQueryExecuteScript::Execute_(NUnitTest::TTestContext&) at /-S/ydb/core/viewer/viewer_ut.cpp:0:9 operator() at /-S/ydb/core/viewer/viewer_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/Viewer.QueryExecuteScript.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/Viewer.QueryExecuteScript.out ------ TIMEOUT: 49 - GOOD, 1 - FAIL, 1 - TIMEOUT ydb/core/viewer/ut ydb/services/persqueue_v1/ut [size:medium] nchunks:10 ------ [3/10] chunk ran 13 tests (total:395.62s - test:394.91s) [crashed] TPersQueueTest::DisableDeduplication [default-linux-x86_64-release-asan] (20.21s) Test crashed (return code: 100) ==74934==ERROR: LeakSanitizer: detected memory leaks Indirect leak of 5705112 byte(s) in 87 object(s) allocated from: #0 0x18fe6acf in malloc /-S/contrib/libs/clang18-rt/lib/asan/asan_malloc_linux.cpp:68:3 #1 0x1a29d993 in grpc_event_engine::experimental::MemoryAllocator::MakeSlice(grpc_event_engine::experimental::MemoryRequest) /-S/contrib/libs/grpc/src/core/lib/event_engine/memory_allocator.cc:63:13 #2 0x1a2782bd in maybe_make_read_slices /-S/contrib/libs/grpc/src/core/lib/iomgr/tcp_posix.cc:1070:57 #3 0x1a2782bd in tcp_handle_read(void*, y_absl::lts_y_20240722::Status) /-S/contrib/libs/grpc/src/core/lib/iomgr/tcp_posix.cc:1094:5 #4 0x1a27c717 in Run /-S/contrib/libs/grpc/src/core/lib/iomgr/closure.h:303:5 #5 0x1a27c717 in tcp_read(grpc_endpoint*, grpc_slice_buffer*, grpc_closure*, bool, int) /-S/contrib/libs/grpc/src/core/lib/iomgr/tcp_posix.cc:1156:5 #6 0x1a5d9e88 in continue_read_action_locked /-S/contrib/libs/grpc/src/core/ext/transport/chttp2/transport/chttp2_transport.cc:2594:3 #7 0x1a5d9e88 in read_action_locked(void*, y_absl::lts_y_20240722::Status) /-S/contrib/libs/grpc/src/core/ext/transport/chttp2/transport/chttp2_transport.cc:2583:7 #8 0x1a167336 in grpc_combiner_continue_exec_ctx() /-S/contrib/libs/grpc/src/core/lib/iomgr/combiner.cc:231:5 #9 0x1a13f8f4 in grpc_core::ExecCtx::Flush() /-S/contrib/libs/grpc/src/core/lib/iomgr/exec_ctx.cc:75:17 #10 0x1a28bb44 in end_worker /-S/contrib/li ..[snippet truncated].. llset.cc:48:10 #14 0x1a2661d7 in cq_next(grpc_completion_queue*, gpr_timespec, void*) /-S/contrib/libs/grpc/src/core/lib/surface/completion_queue.cc:1036:29 #15 0x1ae217be in grpc::CompletionQueue::AsyncNextInternal(void**, bool*, gpr_timespec) /-S/contrib/libs/grpc/src/cpp/common/completion_queue_cc.cc:166:15 #16 0x1ffaba8f in Next /-S/contrib/libs/grpc/include/grpcpp/completion_queue.h:182:13 #17 0x1ffaba8f in NYdbGrpc::Dev::PullEvents(grpc::CompletionQueue*) /-S/ydb/public/sdk/cpp/src/library/grpc/client/grpc_client_low.cpp:190:18 #18 0x1ae36d6e in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #19 0x1ae36d6e in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #20 0x1ae36d6e in (anonymous namespace)::TThreadFactoryFuncObj::DoExecute() /-S/util/thread/factory.cpp:61:13 #21 0x1ae372bc in Execute /-S/util/thread/factory.h:15:13 #22 0x1ae372bc in (anonymous namespace)::TSystemThreadFactory::TPoolThread::ThreadProc(void*) /-S/util/thread/factory.cpp:36:41 #23 0x193359c4 in (anonymous namespace)::TPosixThread::ThreadProxy(void*) /-S/util/system/thread.cpp:244:20 #24 0x18fe4618 in asan_thread_start(void*) /-S/contrib/libs/clang18-rt/lib/asan/asan_interceptors.cpp:239:28 SUMMARY: AddressSanitizer: 6494997 byte(s) leaked in 1606 allocation(s). Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/persqueue_v1/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/persqueue_v1/ut/test-results/unittest/testing_out_stuff/TPersQueueTest.DisableDeduplication.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/persqueue_v1/ut/test-results/unittest/testing_out_stuff/TPersQueueTest.DisableDeduplication.out ------ FAIL: 132 - GOOD, 1 - CRASHED ydb/services/persqueue_v1/ut ydb/services/ydb/sdk_sessions_ut [size:medium] nchunks:10 ------ [2/10] chunk ran 2 tests (total:38.79s - test:38.74s) [fail] YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient [default-linux-x86_64-release-asan] (15.44s) assertion failed at ydb/services/ydb/sdk_sessions_ut/sdk_sessions_ut.cpp:425, void NTestSuiteYdbSdkSessions::DoMultiThreadSessionPoolLimitSync() [TClient = NYdb::NQuery::TQueryClient]: (false) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/services/ydb/sdk_sessions_ut/sdk_sessions_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/sdk_sessions_ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/sdk_sessions_ut/test-results/unittest/testing_out_stuff/YdbSdkSessions.MultiThreadSessionPoolLimitSyncQueryClient.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/sdk_sessions_ut/test-results/unittest/testing_out_stuff/YdbSdkSessions.MultiThreadSessionPoolLimitSyncQueryClient.out ------ [8/10] chunk ran 1 test (total:21.72s - test:21.67s) [fail] YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [default-linux-x86_64-release-asan] (9.48s) assertion failed at ydb/services/ydb/sdk_sessions_ut/sdk_sessions_ut.cpp:253, virtual void NTestSuiteYdbSdkSessions::TTestCaseTestSdkFreeSessionAfterBadSessionQueryServiceStreamCall::Execute_(NUnitTest::TTestContext &): (session.GetId() == sessionId) failed: ("ydb://session/3?node_id=1&id=N2JhYTcyYy1hMjI2MjlkNC1lYjJjZDAyYi04ZTRkYmM4Yg==" != "ydb://session/3?node_id=1&id=ZTNhN2Q5NjctZmQ0YWQ3MWMtODEzOTM2MDQtYjVhMTZhOTI=") , with diff: "ydb://session/3?node_id=1&id=(|ZTNh)N2(Jh|Q5NjctZmQ0)Y(|WQ3MWMtODEzO)T(cyYy1h|)M(jI|)2M(jlkNC1l|DQt)Yj(Jj|VhMT)Z(DAyYi04Z|hO)T(RkYmM4Yg|I)=(=|)" TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NTestSuiteYdbSdkSessions::TTestCaseTestSdkFreeSessionAfterBadSessionQueryServiceStreamCall::Execute_(NUnitTest::TTestContext&) at /-S/ydb/services/ydb/sdk_sessions_ut/sdk_sessions_ut.cpp:0:13 operator() at /-S/ydb/services/ydb/sdk_sessions_ut/sdk_sessions_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/sdk_sessions_ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/sdk_sessions_ut/test-results/unittest/testing_out_stuff/YdbSdkSessions.TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/sdk_sessions_ut/test-results/unittest/testing_out_stuff/YdbSdkSessions.TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall.out ------ FAIL: 14 - GOOD, 2 - FAIL ydb/services/ydb/sdk_sessions_ut ydb/services/ydb/ut [size:medium] nchunks:60 ------ [29/60] chunk ran 5 tests (total:39.95s - test:39.90s) [fail] YdbLogStore::AlterLogTable [default-linux-x86_64-release-asan] (5.03s) assertion failed at ydb/services/ydb/ydb_logstore_ut.cpp:435, virtual void NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext &): (res.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: Column stores are not supported , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext&) at /-S/ydb/services/ydb/ydb_logstore_ut.cpp:0:13 operator() at /-S/ydb/services/ydb/ydb_logstore_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/YdbLogStore.AlterLogTable.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/YdbLogStore.AlterLogTable.out ------ FAIL: 286 - GOOD, 1 - FAIL ydb/services/ydb/ut ------ sole chunk ran 2 tests (total:299.58s - setup:0.01s recipes:23.52s test:273.47s recipes:2.40s) Info: Test run has exceeded 16.0G (16777216K) memory limit with 16.7G (17541188K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 927245 44.8M 44.5M 6.4M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 927870 34.3M 22.6M 10.0M ├─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 936468 46.0M 46.0M 23.0M │ └─ test_tool run_ut @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_stuff/test_tool.args 936556 2.7G 2.7G 2.7G │ └─ ydb-tests-functional-kqp-kqp_indexes --trace-path-append /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/y 929615 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_s 929616 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_s 929617 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_s 929618 1.7G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_s 929647 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_s 929653 1.7G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_s 929654 1.7G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_s 929655 1.7G 1.7G 1.2G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_stuff/stderr ------ sole chunk ran 2 tests (total:71.67s - recipes:14.58s test:52.34s recipes:4.69s) Info: Test run has exceeded 16.0G (16777216K) memory limit with 16.5G (17268408K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 926754 44.8M 44.1M 6.4M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 926757 31.7M 19.9M 7.4M ├─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 944711 214M 405M 336M │ └─ ydb_recipe --build-root /home/runner/actions_runner/_work/ydb/ydb/tmp/out --source-root /home/runner/actions_runner/_work/ydb/ydb --gdb-path /home/runner/.ya/tools/v4/668 928250 2.0G 2.0G 1.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_out 928253 2.1G 2.1G 1.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_out 928274 1.9G 2.0G 1.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_out 928277 2.0G 2.0G 1.5G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_out 928278 2.0G 2.1G 1.7G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_out 928279 1.9G 2.0G 1.5G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_out 928280 2.0G 2.0G 1.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_out 928282 2.0G 2.0G 1.6G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_out Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_out_stuff/stderr ydb/tests/functional/transfer [size:medium] ------ sole chunk ran 27 tests (total:209.26s - recipes:15.35s test:189.52s recipes:4.28s) Info: Test run has exceeded 16.0G (16777216K) memory limit with 17.9G (18742284K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 930374 44.8M 44.8M 6.6M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 931176 34.2M 22.9M 10.1M ├─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 955920 217M 402M 334M │ └─ ydb_recipe --build-root /home/runner/actions_runner/_work/ydb/ydb/tmp/out --source-root /home/runner/actions_runner/_work/ydb/ydb --gdb-path /home/runner/.ya/tools/v4/668 932417 2.2G 2.2G 1.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/transfer/test-results/unittest/testing_out_stuff/yd 932418 2.2G 2.2G 1.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/transfer/test-results/unittest/testing_out_stuff/yd 932419 2.2G 2.2G 1.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/transfer/test-results/unittest/testing_out_stuff/yd 932420 2.1G 2.1G 1.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/transfer/test-results/unittest/testing_out_stuff/yd 932421 2.2G 2.2G 1.7G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/transfer/test-results/unittest/testing_out_stuff/yd 932422 2.2G 2.2G 1.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/transfer/test-results/unittest/testing_out_stuff/yd 932423 2.2G 2.2G 1.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/transfer/test-results/unittest/testing_out_stuff/yd 932424 2.2G 2.2G 1.6G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/transfer/test-results/unittest/testing_out_stuff/yd Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/transfer/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/transfer/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/transfer/test-results/unittest/testing_out_stuff/stderr [fail] Replication::PauseAndResumeReplication [default-linux-x86_64-release-asan] (0.90s) assertion failed at ydb/tests/functional/transfer/utils.h:422, std::pair NReplicationTest::MainTestCase::DoRead(const TExpectations &): (res.IsSuccess())
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/local/Table_461884686310472243]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NReplicationTest::MainTestCase::CheckResult(TVector>, std::__y1::shared_ptr>, std::__y1::allocator>, std::__y1::shared_ptr>>>, std::__y1::allocator>, std::__y1::shared_ptr>, std::__y1::allocator>, std::__y1::shared_ptr>>>>> const&) at /-S/ydb/tests/functional/transfer/utils.h:431:13 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/vector:527:18 operator() at /-S/ydb/tests/functional/transfer/replication.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/transfer/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/transfer/test-results/unittest/testing_out_stuff/Replication.PauseAndResumeReplication.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/transfer/test-results/unittest/testing_out_stuff/Replication.PauseAndResumeReplication.out ------ FAIL: 26 - GOOD, 1 - FAIL ydb/tests/functional/transfer Total 401 suites: 363 - GOOD 25 - FAIL 13 - TIMEOUT Total 9245 tests: 9117 - GOOD 85 - FAIL 24 - NOT_LAUNCHED 12 - TIMEOUT 2 - SKIPPED 5 - CRASHED Cache efficiency ratio is 89.72% (42230 of 47067). Local: 0 (0.00%), dist: 4670 (9.92%), by dynamic uids: 0 (0.00%), avoided: 37560 (79.80%) Dist cache download: count=4307, size=14.68 GiB, speed=156.68 MiB/s Disk usage for tools/sdk at least 296.89 MiB Additional disk space consumed for build cache 892.09 GiB Critical path: [ 5987 ms] [CC] [cwIFapRgnRsNC28nG1_ZWQ default-linux-x86_64 release asan]: $(SOURCE_ROOT)/library/cpp/svnversion/svnversion.cpp [started: 0 (1743436354162), finished: 5987 (1743436360149)] [ 54 ms] [AR] [ZnYx2ijIJkFCAj_3TK_tTw default-linux-x86_64 release asan]: $(BUILD_ROOT)/library/cpp/svnversion/liblibrary-cpp-svnversion.a [started: 47460 (1743436401622), finished: 47514 (1743436401676)] [ 40178 ms] [LD] [7M2b36BAYdGMooQkFWZQDA default-linux-x86_64 release asan]: $(BUILD_ROOT)/ydb/apps/ydbd/ydbd [started: 709936 (1743437064098), finished: 750114 (1743437104276)] [645124 ms] [TM] [rnd-ymeu9ous459snw1l asan default-linux-x86_64 release]: ydb/tests/fq/mem_alloc/py3test [started: 2637204 (1743438991366), finished: 3282328 (1743439636490)] Time from start: 3766921.5720214844 ms, time elapsed by graph 691343 ms, time diff 3075578.5720214844 ms. The longest 10 tasks: [645124 ms] [TM] [rnd-ymeu9ous459snw1l asan default-linux-x86_64 release]: ydb/tests/fq/mem_alloc/py3test [started: 1743438991366, finished: 1743439636490] [633409 ms] [TM] [rnd-nowv7aj52bmoan91 asan default-linux-x86_64 release]: ydb/tests/olap/column_family/compression/py3test [started: 1743438971495, finished: 1743439604904] [632933 ms] [TM] [rnd-ki5xc7kofnxh2bty asan default-linux-x86_64 release]: ydb/tests/olap/scenario/py3test [started: 1743439014290, finished: 1743439647223] [629653 ms] [TM] [rnd-7wfio525h5uj5xbq asan default-linux-x86_64 release]: ydb/tests/olap/s3_import/py3test [started: 1743438996330, finished: 1743439625983] [618578 ms] [TM] [rnd-8669686757649248174 asan default-linux-x86_64 release]: ydb/tests/olap/ttl_tiering/py3test [started: 1743437276040, finished: 1743437894618] [617511 ms] [TM] [rnd-hpccnscthxxn6co8 asan default-linux-x86_64 release]: ydb/core/cms/ut_sentinel_unstable/unittest [started: 1743438966386, finished: 1743439583897] [612886 ms] [TM] [rnd-15284502439213868197 asan default-linux-x86_64 release]: ydb/tests/olap/ttl_tiering/py3test [started: 1743437291239, finished: 1743437904125] [608256 ms] [TM] [rnd-790298356815184246 asan default-linux-x86_64 release]: ydb/tests/functional/tpc/medium/py3test [started: 1743439210168, finished: 1743439818424] [607982 ms] [TM] [rnd-8480183617455567925 asan default-linux-x86_64 release]: ydb/core/tx/datashard/ut_order/unittest [started: 1743437946614, finished: 1743438554596] [607772 ms] [TM] [rnd-2388615197270889793 asan default-linux-x86_64 release]: ydb/core/statistics/aggregator/ut/unittest [started: 1743438741810, finished: 1743439349582] Total time by type: [155649626 ms] [TM] [count: 4022, ave time 38699.56 msec] [ 11498153 ms] [LD] [count: 356, ave time 32298.18 msec] [ 11105607 ms] [prepare:get from dist cache] [count: 4670, ave time 2378.07 msec] [ 1413530 ms] [TS] [count: 228, ave time 6199.69 msec] [ 483130 ms] [TA] [count: 207, ave time 2333.96 msec] [ 442280 ms] [prepare:tools] [count: 19, ave time 23277.89 msec] [ 203268 ms] [prepare:bazel-store] [count: 2, ave time 101634.00 msec] [ 133088 ms] [prepare:AC] [count: 4, ave time 33272.00 msec] [ 123021 ms] [prepare:put into local cache, clean build dir] [count: 4686, ave time 26.25 msec] [ 8071 ms] [CC] [count: 5, ave time 1614.20 msec] [ 7158 ms] [prepare:resources] [count: 1, ave time 7158.00 msec] [ 5264 ms] [UN] [count: 2, ave time 2632.00 msec] [ 4446 ms] [AR] [count: 9, ave time 494.00 msec] [ 1729 ms] [ld] [count: 2, ave time 864.50 msec] [ 1439 ms] [prepare:put to dist cache] [count: 308, ave time 4.67 msec] [ 622 ms] [CF] [count: 2, ave time 311.00 msec] [ 504 ms] [PK] [count: 1, ave time 504.00 msec] [ 455 ms] [SB] [count: 1, ave time 455.00 msec] [ 366 ms] [BI] [count: 1, ave time 366.00 msec] [ 191 ms] [CP] [count: 1, ave time 191.00 msec] [ 12 ms] [prepare:clean] [count: 3, ave time 4.00 msec] Total tasks times: Total failed tasks time - 0 ms (0.00%) Total tests tasks time - 157546286 ms (93.19%) Total run tasks time - 169066087 ms Configure time - 33.6 s Statistics overhead 2001 ms Info: Dump junit report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/junit.xml Info: Dump results report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/report.json Ok + echo 0 + ./ya make . -T --test-size=small --test-size=medium --stat --test-threads 52 --link-threads 12 -DUSE_EAT_MY_DATA --build release --sanitize=address -DDEBUGINFO_LINES_ONLY --bazel-remote-store --bazel-remote-base-uri http://cachesrv.internal:8081 --bazel-remote-username cache_user --bazel-remote-password-file /tmp/tmp.Ub2FXGwaso --bazel-remote-put --dist-cache-max-file-size=209715200 -A --retest --stat -DCONSISTENT_DEBUG --no-dir-outputs --test-failure-code 0 --build-all --cache-size 2TB --force-build-depends -X --log-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/ya_log.txt --evlog-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_2/ya_evlog.jsonl --junit /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_2/junit.xml --build-results-report /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_2/report.json --output /home/runner/actions_runner/_work/ydb/ydb/tmp/out Output root is subdirectory of Arcadia root, this may cause non-idempotent build Configuring dependencies for platform default-linux-x86_64-release-asan Configuring dependencies for platform tools [2 ymakes processing] [7938/7938 modules configured] [3681/4390 modules rendered] [2 ymakes processing] [7938/7938 modules configured] [4325/4390 modules rendered] [2 ymakes processing] [7938/7938 modules configured] [4390/4390 modules rendered] Configuring dependencies for platform test_tool_tc1-global [0 ymakes processing] [7944/7944 modules configured] [4390/4390 modules rendered] Configuring tests execution Configuring local and dist store caches Configuration done. Preparing for execution |10.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut | 2.9%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a | 3.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant | 2.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/api/ydb-tests-functional-api | 2.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/config/ut/ydb-services-config-ut | 2.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics | 3.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests | 3.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs | 3.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud | 4.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors | 4.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut | 4.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression | 6.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds | 6.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/kqprun | 8.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut | 8.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut | 9.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence | 8.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure | 8.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut | 9.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut | 9.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut | 9.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut | 9.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ydb_stress_tool | 9.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/common/ydb-tests-fq-common | 9.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut | 9.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |10.0%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/client/libyt-yt-client.a |10.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |10.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |10.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |10.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |10.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |10.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |10.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |10.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |11.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |11.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |11.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |11.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |11.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |11.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |11.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |11.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |11.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |11.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |12.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |12.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |12.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |12.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |12.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |12.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |12.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |12.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |11.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |12.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |12.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |12.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |12.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |12.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |12.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |12.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |12.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay/ydb_query_replay |12.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |12.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |13.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |13.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |13.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index |14.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |14.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |14.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |13.5%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |13.7%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |13.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |13.9%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |14.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydbd/ydbd |15.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |15.1%| CLEANING SYMRES |15.2%| PREPARE $(JDK17-472926544) |15.5%| PREPARE $(YMAKE_PYTHON3-4256832079) |15.6%| PREPARE $(TEST_TOOL_HOST-sbr:8330113388) |15.7%| PREPARE $(GDB) |16.0%| PREPARE $(WITH_JDK-sbr:7832760150) |16.0%| PREPARE $(PYTHON) |16.1%| PREPARE $(CLANG-2518231432) |16.2%| PREPARE $(JDK_DEFAULT-472926544) |16.3%| PREPARE $(WITH_JDK17-sbr:7832760150) |16.5%| PREPARE $(LLD_ROOT-3808007503) |16.6%| PREPARE $(CLANG-1922233694) |16.7%| PREPARE $(OS_SDK_ROOT-sbr:243881345) |17.0%| PREPARE $(CLANG18-3363451693) |17.2%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |17.3%| PREPARE $(CLANG_FORMAT-2212207123) |17.3%| PREPARE $(CLANG16-1380963495) |17.6%| PREPARE $(FLAKE8_PY3-715603131) |18.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |18.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |19.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |19.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans |19.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |19.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |19.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |19.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |19.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |19.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |19.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |19.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |19.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |20.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |20.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |20.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |20.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |20.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |20.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |20.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |20.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |20.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |20.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |21.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |21.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |21.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |21.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |21.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |21.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |21.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |21.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |21.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |21.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |21.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |22.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |22.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |22.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |22.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |22.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |22.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |22.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |22.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |22.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |22.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |23.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |23.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |23.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |23.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |23.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |23.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |23.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |23.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |23.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |23.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |24.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime |24.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |24.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |24.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |25.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |25.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |25.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_transfer_writer/core-tx-replication-service-ut_transfer_writer |26.0%| [LD] {RESULT} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |27.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |27.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |27.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |27.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |27.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |27.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |28.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans |28.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |28.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |28.4%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |28.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |28.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |28.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |28.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |29.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |29.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |29.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |29.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |29.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |29.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |29.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |29.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |29.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |29.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |29.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |29.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |30.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |30.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |30.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |30.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |30.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |30.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |30.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |30.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |30.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |30.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |30.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |30.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |30.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ydb-core-tx-schemeshard-ut_data_erasure_reboots |31.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |31.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |31.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |31.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |31.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |30.6%| [AR] {RESULT} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |30.9%| [AR] {RESULT} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |31.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |32.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |33.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |33.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_data_cleanup/ydb-core-tx-datashard-ut_data_cleanup |33.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |33.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |33.9%| [AR] {RESULT} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |33.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |34.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |34.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |34.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |34.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |34.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |34.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |34.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |34.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |34.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/sql/ydb-tests-sql |34.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |34.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |34.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |34.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |35.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |35.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |35.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |35.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |35.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |35.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |35.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |35.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |35.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |35.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |35.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |36.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |36.1%| [AR] {RESULT} $(B)/yt/yt/client/libyt-yt-client.a |36.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |36.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |36.3%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |36.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |37.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |37.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |37.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/example/ydb-tests-example |37.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |37.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |37.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |38.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |38.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |38.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |38.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |38.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |38.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |38.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |38.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |38.5%| [LD] {RESULT} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |38.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |38.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |38.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |39.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |39.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |39.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |39.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |39.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_prefix_kmeans/ydb-core-tx-datashard-ut_prefix_kmeans |39.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |39.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |39.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |39.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |40.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |40.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |40.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |40.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |40.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |40.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |40.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |40.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |40.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |41.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |41.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |41.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |41.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |41.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |41.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |41.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |42.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |42.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |42.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |42.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |42.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |42.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |42.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |43.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |43.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |43.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |42.7%| [LD] {RESULT} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |42.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |42.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay_yt/query_replay_yt |43.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |43.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |43.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |43.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |43.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |43.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |43.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |43.6%| [LD] {RESULT} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |43.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |43.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |44.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |44.0%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |44.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |44.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |45.1%| [LD] {RESULT} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |45.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |45.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |45.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |45.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |45.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/control/ut/ydb-core-control-ut |46.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |45.3%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |45.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |45.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |45.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |45.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |45.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |45.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |46.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |46.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |46.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |46.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |46.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |46.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |46.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |47.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |47.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |47.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |47.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |47.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |47.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |47.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |47.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |47.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |47.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |48.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |48.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |48.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/ydb-tests-olap |48.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |48.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |48.4%| [LD] {RESULT} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |48.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |48.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |48.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |48.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k |49.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |49.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |49.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |49.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |49.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |49.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |49.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/fqrun/fqrun |49.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |50.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |50.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |50.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |50.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/tools/sql2yql/sql2yql |50.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |50.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |50.7%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |51.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |51.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |51.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ut/ydb-core-security-ut |42.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |42.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |43.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |43.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |43.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |43.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ydb-core-tx-schemeshard-ut_data_erasure_reboots |43.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |43.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |43.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/ut/ydb-core-client-ut |43.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |43.8%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |44.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |44.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydb/ydb |44.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |44.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |44.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |44.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |44.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |44.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |45.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |45.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |45.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |45.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |45.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |45.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |45.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/docs/generator/generator |45.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |45.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |45.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |45.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |45.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |45.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |45.8%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |45.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |45.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |42.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest |42.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |43.1%| COMPACTING CACHE 892.1GiB |43.1%| [LD] {RESULT} $(B)/ydb/apps/ydbd/ydbd |43.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |43.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |43.3%| [LD] {RESULT} $(B)/ydb/tools/query_replay_yt/query_replay_yt |43.3%| [LD] {RESULT} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |43.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |43.4%| [LD] {RESULT} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |43.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |43.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest |43.6%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |43.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |43.7%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |43.8%| [LD] {RESULT} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |43.8%| [LD] {RESULT} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |43.9%| [LD] {RESULT} $(B)/ydb/core/graph/ut/ydb-core-graph-ut >> KqpSnapshotIsolation::TConflictReadWriteOltpNoSink |43.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |44.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |44.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |44.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |44.2%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |44.2%| [LD] {RESULT} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |44.3%| [LD] {RESULT} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |44.3%| [LD] {RESULT} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |44.4%| [LD] {RESULT} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |44.4%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |44.5%| [LD] {RESULT} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |44.5%| [LD] {RESULT} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |44.6%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |44.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_prefix_kmeans/ydb-core-tx-datashard-ut_prefix_kmeans |44.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |44.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |44.8%| [LD] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |44.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |44.9%| [LD] {RESULT} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |45.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |45.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |45.1%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |45.1%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |45.2%| [LD] {RESULT} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |45.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |45.3%| [LD] {RESULT} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |45.4%| [LD] {RESULT} $(B)/ydb/core/control/ut/ydb-core-control-ut |45.4%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |45.5%| [LD] {RESULT} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut >> KqpPg::TempTablesDrop |45.5%| [LD] {RESULT} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |45.6%| [LD] {RESULT} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |45.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |45.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |45.8%| [LD] {RESULT} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |45.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |45.9%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |45.9%| [LD] {RESULT} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |46.0%| [LD] {RESULT} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |46.0%| [LD] {RESULT} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |46.1%| [LD] {RESULT} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |46.1%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |46.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index |46.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |46.3%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |46.4%| [LD] {RESULT} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |46.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |46.5%| [LD] {RESULT} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |46.5%| [LD] {RESULT} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |46.6%| [LD] {RESULT} $(B)/ydb/core/client/ut/ydb-core-client-ut |46.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |46.7%| [LD] {RESULT} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |46.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |46.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |46.9%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |46.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |47.0%| [LD] {RESULT} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |47.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |47.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |47.1%| [LD] {RESULT} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |47.2%| [LD] {RESULT} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |47.2%| [LD] {RESULT} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |47.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |47.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |47.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |47.5%| [LD] {RESULT} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |47.5%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |47.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |47.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |47.7%| [LD] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |47.7%| [LD] {RESULT} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |47.8%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |47.9%| [LD] {RESULT} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |47.9%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |48.0%| [LD] {RESULT} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |48.0%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |48.1%| [LD] {RESULT} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |48.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |48.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |48.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |48.3%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |48.3%| [LD] {RESULT} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |48.4%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |48.5%| [LD] {RESULT} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |48.5%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |48.6%| [LD] {RESULT} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |48.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |48.7%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |48.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |48.8%| [LD] {RESULT} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |48.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |48.9%| [LD] {RESULT} $(B)/yql/essentials/tools/sql2yql/sql2yql |49.0%| [LD] {RESULT} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |49.0%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |49.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k |49.1%| [LD] {RESULT} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |49.2%| [LD] {RESULT} $(B)/ydb/tests/tools/fqrun/fqrun |49.2%| [LD] {RESULT} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |49.3%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |49.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |49.4%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |49.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |49.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |49.6%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |49.6%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |49.7%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ydb_stress_tool |49.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |49.8%| [LD] {RESULT} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |49.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure |49.9%| [LD] {RESULT} $(B)/ydb/apps/ydb/ydb |49.9%| [LD] {RESULT} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |50.0%| [LD] {RESULT} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |50.1%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |50.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |50.2%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |50.2%| [LD] {RESULT} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |50.3%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |50.3%| [LD] {RESULT} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |50.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |50.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans |50.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |50.6%| [LD] {RESULT} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |50.6%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |50.7%| [LD] {RESULT} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |50.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |50.8%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |50.8%| [LD] {RESULT} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |50.9%| [LD] {RESULT} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |50.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |51.0%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |51.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |51.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |51.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |51.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |51.3%| [LD] {RESULT} $(B)/ydb/core/security/ut/ydb-core-security-ut |51.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |51.4%| [LD] {RESULT} $(B)/ydb/tools/query_replay/ydb_query_replay |51.4%| [LD] {RESULT} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |51.5%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |51.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |51.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |51.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |51.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |51.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |51.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |51.9%| [LD] {RESULT} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |51.9%| [LD] {RESULT} $(B)/ydb/services/config/ut/ydb-services-config-ut |52.0%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |52.0%| [LD] {RESULT} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |52.1%| [LD] {RESULT} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |52.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |52.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |52.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |52.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |52.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |52.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |52.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |52.5%| [LD] {RESULT} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |52.6%| [LD] {RESULT} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |52.6%| [LD] {RESULT} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |52.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |52.8%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |52.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_data_cleanup/ydb-core-tx-datashard-ut_data_cleanup |52.9%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |52.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |53.0%| [LD] {RESULT} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |53.0%| [LD] {RESULT} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |53.1%| [LD] {RESULT} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |53.1%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |53.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |53.3%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |53.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans |53.4%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |53.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |53.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |53.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |53.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |53.6%| [LD] {RESULT} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |53.7%| [LD] {RESULT} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |53.7%| [LD] {RESULT} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |53.8%| [LD] {RESULT} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |53.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |53.9%| [LD] {RESULT} $(B)/ydb/tests/olap/ydb-tests-olap |54.0%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |54.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |54.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |54.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |54.2%| [LD] {RESULT} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |54.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |54.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |54.4%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |54.4%| [LD] {RESULT} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |54.5%| [LD] {RESULT} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |54.5%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |54.6%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |54.6%| [LD] {RESULT} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |54.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |54.7%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |54.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |54.9%| [LD] {RESULT} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |54.9%| [LD] {RESULT} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |55.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |55.0%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |55.1%| [LD] {RESULT} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |55.1%| [LD] {RESULT} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |55.2%| [LD] {RESULT} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |55.2%| [LD] {RESULT} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |55.3%| [LD] {RESULT} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |55.3%| [LD] {RESULT} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |55.4%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |55.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |55.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |55.6%| [LD] {RESULT} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |55.6%| [LD] {RESULT} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |55.7%| [LD] {RESULT} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |55.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |55.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |55.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |55.9%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |56.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |56.0%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |56.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |56.1%| [LD] {RESULT} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |56.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |56.2%| [LD] {RESULT} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |56.3%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |56.3%| [LD] {RESULT} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |56.4%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |56.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |56.5%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |56.6%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |56.6%| [LD] {RESULT} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |56.7%| [LD] {RESULT} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |56.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |56.8%| [LD] {RESULT} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |56.8%| [LD] {RESULT} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |56.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |56.9%| [LD] {RESULT} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |57.0%| [LD] {RESULT} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |57.1%| [LD] {RESULT} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |57.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |57.2%| [LD] {RESULT} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |57.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |57.3%| [LD] {RESULT} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |57.3%| [LD] {RESULT} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |57.4%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |57.4%| [LD] {RESULT} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |57.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |57.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |57.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |57.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |57.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |57.8%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |57.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |57.9%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |57.9%| [LD] {RESULT} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |58.0%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |58.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime |58.1%| [LD] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |58.2%| [LD] {RESULT} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |58.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |58.3%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |58.3%| [LD] {RESULT} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |58.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |58.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |58.5%| [LD] {RESULT} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |58.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |58.6%| [LD] {RESULT} $(B)/ydb/tests/olap/docs/generator/generator |58.7%| [LD] {RESULT} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |58.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |58.8%| [LD] {RESULT} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |58.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |58.9%| [LD] {RESULT} $(B)/ydb/tests/sql/ydb-tests-sql |58.9%| [LD] {RESULT} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |59.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |59.0%| [LD] {RESULT} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |59.1%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |59.2%| [LD] {RESULT} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |59.2%| [LD] {RESULT} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |59.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |59.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |59.4%| [LD] {RESULT} $(B)/ydb/tests/example/ydb-tests-example |59.4%| [LD] {RESULT} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |59.5%| [LD] {RESULT} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |59.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |59.6%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |59.6%| [LD] {RESULT} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |59.7%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |59.8%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |59.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |59.9%| [LD] {RESULT} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |59.9%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |60.0%| [LD] {RESULT} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |60.0%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |60.1%| [LD] {RESULT} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |60.1%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |60.2%| [LD] {RESULT} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |60.3%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |60.3%| [LD] {RESULT} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |60.4%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |60.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |60.5%| [LD] {RESULT} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |60.5%| [LD] {RESULT} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |60.6%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |60.6%| [LD] {RESULT} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |60.7%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |60.7%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_transfer_writer/core-tx-replication-service-ut_transfer_writer |60.8%| [LD] {RESULT} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |60.9%| [LD] {RESULT} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |60.9%| [LD] {RESULT} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |61.0%| [LD] {RESULT} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |61.0%| [LD] {RESULT} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |61.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |61.1%| [LD] {RESULT} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |61.2%| [LD] {RESULT} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |61.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |61.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |61.4%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |61.4%| [LD] {RESULT} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |61.5%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/kqprun |61.5%| [LD] {RESULT} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |61.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |61.6%| [LD] {RESULT} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |61.7%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |61.7%| [LD] {RESULT} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |61.8%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |61.9%| [LD] {RESULT} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |61.9%| [LD] {RESULT} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |62.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |62.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |62.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |62.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |62.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |62.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |62.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |62.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |62.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-56 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-66 |62.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |62.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |62.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |62.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |62.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |62.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |62.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |62.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |62.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |63.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |63.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> test_sql_streaming.py::test[suites-GroupByHop-default.txt] |63.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |63.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |63.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |63.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |63.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |63.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |63.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |63.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |63.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest |63.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |63.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest |63.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> YdbLogStore::AlterLogTable >> KqpPg::TempTablesDrop [GOOD] >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test >> ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change |63.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest |63.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest |63.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest |63.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest |63.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest |64.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::ClickHouseFilterPushdown |64.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest |64.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest |64.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> ReadIteratorExternalBlobs::ExtBlobsMultipleColumns |64.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest |64.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest |64.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest |64.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest |64.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest |64.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest |64.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::TempTablesDrop [GOOD] Test command err: Trying to start YDB, gRPC: 28573, MsgBus: 32593 2025-03-31T17:11:25.874187Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488022444324350240:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:11:25.875159Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/000211/r3tmp/tmphu9sdk/pdisk_1.dat 2025-03-31T17:11:26.174556Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28573, node 1 2025-03-31T17:11:26.230092Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:11:26.230116Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:11:26.230125Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:11:26.230350Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T17:11:26.266982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:11:26.267065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:11:26.268603Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32593 TClient is connected to server localhost:32593 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:11:26.677304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:11:27.948648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022452914285491:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:27.948652Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022452914285483:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:27.948743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:27.952054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T17:11:27.960441Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488022452914285497:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T17:11:28.013706Z node 1 :TX_PROXY ERROR: Actor# [1:7488022457209252845:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:11:28.071600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:1, at schemeshard: 72057594046644480 2025-03-31T17:11:28.297723Z node 1 :TX_PROXY ERROR: Actor# [1:7488022457209253072:2460] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:11:28.306106Z node 1 :TX_PROXY ERROR: Actor# [1:7488022457209253080:2466] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/OGFkMjAwZjItOWY0MDA4YTItNzVlOTIxNDAtZjYwNDQyNg==\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:11:28.320646Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-03-31T17:11:28.329595Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488022457209253133:2365], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:31: Error: At function: KiReadTable!
:3:31: Error: Cannot find table 'db.[/Root/PgTemp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T17:11:28.329817Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGFkMjAwZjItOWY0MDA4YTItNzVlOTIxNDAtZjYwNDQyNg==, ActorId: [1:7488022452914285456:2328], ActorState: ExecuteState, TraceId: 01jqpjzkt01p5qr2jrawvrz8k3, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T17:11:28.361018Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488022457209253144:2372], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:31: Error: At function: KiReadTable!
:3:31: Error: Cannot find table 'db.[/Root/PgTemp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T17:11:28.361280Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZGJiYTI3ZjQtYjY3ODZjNzYtYjJiZWU1YTItMjMxNzE5OTI=, ActorId: [1:7488022457209253140:2369], ActorState: ExecuteState, TraceId: 01jqpjzkv0eezyh8nvbmdkdgka, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: |64.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |64.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |64.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |64.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |64.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |64.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |65.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |65.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |65.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |65.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |65.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |65.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |65.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |65.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |65.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |65.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-56 [GOOD] |65.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |65.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> ReadIteratorExternalBlobs::ExtBlobsWithCompactingMiddleRows >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-66 [GOOD] |65.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |65.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |65.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |65.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |65.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |65.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |66.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |66.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |66.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |66.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |66.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataWithFilter-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-56 [GOOD] Test command err: Starting YDB, grpc: 6018, msgbus: 28275 2025-03-31T17:11:27.187474Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488022453958222707:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:11:27.187614Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/0002e4/r3tmp/tmpUAU2nV/pdisk_1.dat 2025-03-31T17:11:27.447898Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6018, node 1 2025-03-31T17:11:27.517263Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:11:27.517372Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:11:27.520100Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T17:11:27.533456Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:11:27.533477Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:11:27.533491Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:11:27.533596Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28275 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T17:11:27.750079Z node 1 :TX_PROXY DEBUG: actor# [1:7488022453958222969:2139] Handle TEvNavigate describe path dc-1 2025-03-31T17:11:27.750121Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022453958223430:2435] HANDLE EvNavigateScheme dc-1 2025-03-31T17:11:27.750982Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022453958223430:2435] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:27.780263Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022453958223430:2435] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-03-31T17:11:27.788314Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022453958223430:2435] Handle TEvDescribeSchemeResult Forward to# [1:7488022453958223429:2434] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T17:11:27.800132Z node 1 :TX_PROXY DEBUG: actor# [1:7488022453958222969:2139] Handle TEvProposeTransaction 2025-03-31T17:11:27.800157Z node 1 :TX_PROXY DEBUG: actor# [1:7488022453958222969:2139] TxId# 281474976715657 ProcessProposeTransaction 2025-03-31T17:11:27.800283Z node 1 :TX_PROXY DEBUG: actor# [1:7488022453958222969:2139] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7488022453958223445:2443] 2025-03-31T17:11:27.906531Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022453958223445:2443] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-31T17:11:27.906672Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022453958223445:2443] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T17:11:27.906708Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022453958223445:2443] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:27.906797Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022453958223445:2443] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:27.907202Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022453958223445:2443] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:27.907362Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022453958223445:2443] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-31T17:11:27.907462Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022453958223445:2443] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-31T17:11:27.907614Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022453958223445:2443] txid# 281474976715657 HANDLE EvClientConnected 2025-03-31T17:11:27.908388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T17:11:27.910510Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022453958223445:2443] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-31T17:11:27.910560Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022453958223445:2443] txid# 281474976715657 SEND to# [1:7488022453958223444:2442] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} waiting... 2025-03-31T17:11:27.919674Z node 1 :TX_PROXY DEBUG: actor# [1:7488022453958222969:2139] Handle TEvProposeTransaction 2025-03-31T17:11:27.919695Z node 1 :TX_PROXY DEBUG: actor# [1:7488022453958222969:2139] TxId# 281474976715658 ProcessProposeTransaction 2025-03-31T17:11:27.919719Z node 1 :TX_PROXY DEBUG: actor# [1:7488022453958222969:2139] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7488022453958223483:2477] 2025-03-31T17:11:27.922118Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022453958223483:2477] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-31T17:11:27.922198Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022453958223483:2477] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T17:11:27.922221Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022453958223483:2477] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:27.922269Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022453958223483:2477] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:27.922528Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022453958223483:2477] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:27.922612Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022453958223483:2477] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:27.922658Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022453958223483:2477] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-03-31T17:11:27.922778Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022453958223483:2477] txid# 281474976715658 HANDLE EvClientConnected 2025-03-31T17:11:27.923173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-31T17:11:27.924985Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022453958223483:2477] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715658} 2025-03-31T17:11:27.925049Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022453958223483:2477] txid# 281474976715658 SEND to# [1:7488022453958223482:2476] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 48} 2025-03-31T17:11:29.426257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022462548158166:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:29.426266Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022462548158174:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:29.426368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:29.426661Z node 1 :TX_PROXY DEBUG: actor# [1:7488022453958222969:2139] Handle TEvProposeTransaction 2025-03-31T17:11:29.426692Z node 1 :TX_PROXY DEBUG: actor# [1:7488022453958222969:2139] TxId# 281474976715659 ProcessProposeTransaction 2025-03-31T1 ... 661 ProcessProposeTransaction 2025-03-31T17:11:29.714881Z node 1 :TX_PROXY DEBUG: actor# [1:7488022453958222969:2139] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [1:7488022462548158285:2580] 2025-03-31T17:11:29.717334Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022462548158285:2580] txid# 281474976715661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "clusteradmin" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:47136" 2025-03-31T17:11:29.717409Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022462548158285:2580] txid# 281474976715661 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T17:11:29.717427Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022462548158285:2580] txid# 281474976715661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:29.717472Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022462548158285:2580] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:29.717823Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022462548158285:2580] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:29.717946Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022462548158285:2580] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:29.717996Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022462548158285:2580] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-03-31T17:11:29.718197Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022462548158285:2580] txid# 281474976715661 HANDLE EvClientConnected 2025-03-31T17:11:29.725807Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022462548158285:2580] txid# 281474976715661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715661} 2025-03-31T17:11:29.725856Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022462548158285:2580] txid# 281474976715661 SEND to# [1:7488022462548158284:2332] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-03-31T17:11:29.784452Z node 1 :TX_PROXY DEBUG: actor# [1:7488022453958222969:2139] Handle TEvProposeTransaction 2025-03-31T17:11:29.784489Z node 1 :TX_PROXY DEBUG: actor# [1:7488022453958222969:2139] TxId# 281474976715662 ProcessProposeTransaction 2025-03-31T17:11:29.784521Z node 1 :TX_PROXY DEBUG: actor# [1:7488022453958222969:2139] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [1:7488022462548158305:2594] 2025-03-31T17:11:29.786877Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022462548158305:2594] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014clusteradmin\n\032\010\000\022\026\010\001\020\200\200\002\032\014clusteradmin \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:47140" 2025-03-31T17:11:29.786951Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022462548158305:2594] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T17:11:29.786968Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022462548158305:2594] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:29.787016Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022462548158305:2594] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:29.787370Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022462548158305:2594] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:29.787502Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022462548158305:2594] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:29.787553Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022462548158305:2594] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-03-31T17:11:29.787687Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022462548158305:2594] txid# 281474976715662 HANDLE EvClientConnected 2025-03-31T17:11:29.788138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T17:11:29.790534Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022462548158305:2594] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-03-31T17:11:29.790603Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022462548158305:2594] txid# 281474976715662 SEND to# [1:7488022462548158304:2345] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-03-31T17:11:29.820434Z node 1 :TX_PROXY DEBUG: actor# [1:7488022453958222969:2139] Handle TEvProposeTransaction 2025-03-31T17:11:29.820466Z node 1 :TX_PROXY DEBUG: actor# [1:7488022453958222969:2139] TxId# 281474976715663 ProcessProposeTransaction 2025-03-31T17:11:29.820502Z node 1 :TX_PROXY DEBUG: actor# [1:7488022453958222969:2139] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [1:7488022462548158344:2619] 2025-03-31T17:11:29.823507Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022462548158344:2619] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:47162" 2025-03-31T17:11:29.823608Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022462548158344:2619] txid# 281474976715663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T17:11:29.823632Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022462548158344:2619] txid# 281474976715663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:29.823680Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022462548158344:2619] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:29.823971Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022462548158344:2619] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:29.824084Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022462548158344:2619] HANDLE EvNavigateKeySetResult, txid# 281474976715663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:29.824168Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022462548158344:2619] txid# 281474976715663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715663 TabletId# 72057594046644480} 2025-03-31T17:11:29.824316Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022462548158344:2619] txid# 281474976715663 HANDLE EvClientConnected 2025-03-31T17:11:29.830421Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022462548158344:2619] txid# 281474976715663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715663} 2025-03-31T17:11:29.830466Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022462548158344:2619] txid# 281474976715663 SEND to# [1:7488022462548158343:2347] Source {TEvProposeTransactionStatus txid# 281474976715663 Status# 48} 2025-03-31T17:11:29.865970Z node 1 :TX_PROXY DEBUG: actor# [1:7488022453958222969:2139] Handle TEvProposeTransaction 2025-03-31T17:11:29.865998Z node 1 :TX_PROXY DEBUG: actor# [1:7488022453958222969:2139] TxId# 281474976715664 ProcessProposeTransaction 2025-03-31T17:11:29.866057Z node 1 :TX_PROXY DEBUG: actor# [1:7488022453958222969:2139] Cookie# 0 userReqId# "" txid# 281474976715664 SEND to# [1:7488022462548158372:2631] 2025-03-31T17:11:29.868823Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022462548158372:2631] txid# 281474976715664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\014clusteradmin\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzQ4NDI4OSwiaWF0IjoxNzQzNDQxMDg5LCJzdWIiOiJjbHVzdGVyYWRtaW4ifQ.tZwIsH1tdDxRPH-LRMqCx4-WQxuJeyXKim4PMyq4Rh1H9V49z0teBGS2WU_vYvJSZDF8qTiIvqZrv2KpkZX-BxknVP7BoXbjc4t_k_E78PXnM7skrzgTaYnIMScThpjrQCYzLlbR-zepCPuS3aqSpB3uPywLFx_68a2qe9keLuU-YfrUW_Qs45P3PJiST3lbZB73pkY64C-UbBL-lAcFBTTQ4U4_f-JfCaILXqq9qXHn5QX0p57uv0CzSDG3FiGK9iaLDq4LDuLOSbfIBkkoqqHshjkrT1ES9GO_Dp1IV-R8cT7UsVWmg1ZX2RZxLqp4bfFQCZZY9bSjfmJguL0WLQ\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzQ4NDI4OSwiaWF0IjoxNzQzNDQxMDg5LCJzdWIiOiJjbHVzdGVyYWRtaW4ifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:47192" 2025-03-31T17:11:29.868899Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022462548158372:2631] txid# 281474976715664 Bootstrap, UserSID: clusteradmin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T17:11:29.868920Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022462548158372:2631] txid# 281474976715664 Bootstrap, UserSID: clusteradmin IsClusterAdministrator: 1 2025-03-31T17:11:29.868965Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022462548158372:2631] txid# 281474976715664 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:29.869380Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022462548158372:2631] txid# 281474976715664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:29.869518Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022462548158372:2631] HANDLE EvNavigateKeySetResult, txid# 281474976715664 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:29.869576Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022462548158372:2631] txid# 281474976715664 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715664 TabletId# 72057594046644480} 2025-03-31T17:11:29.869729Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022462548158372:2631] txid# 281474976715664 HANDLE EvClientConnected 2025-03-31T17:11:29.871943Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022462548158372:2631] txid# 281474976715664 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715664} 2025-03-31T17:11:29.872006Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022462548158372:2631] txid# 281474976715664 SEND to# [1:7488022462548158371:2359] Source {TEvProposeTransactionStatus txid# 281474976715664 Status# 48} |66.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |66.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-66 [GOOD] Test command err: Starting YDB, grpc: 8728, msgbus: 62012 2025-03-31T17:11:27.210861Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488022455246280068:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:11:27.210994Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/0002e7/r3tmp/tmp4ZizaS/pdisk_1.dat 2025-03-31T17:11:27.523294Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8728, node 1 2025-03-31T17:11:27.538429Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-03-31T17:11:27.538463Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-03-31T17:11:27.561515Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:11:27.561545Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:11:27.561560Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:11:27.561738Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T17:11:27.572790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:11:27.572930Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:11:27.576380Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62012 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T17:11:27.742979Z node 1 :TX_PROXY DEBUG: actor# [1:7488022455246280331:2139] Handle TEvNavigate describe path dc-1 2025-03-31T17:11:27.743059Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022455246280794:2436] HANDLE EvNavigateScheme dc-1 2025-03-31T17:11:27.743904Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022455246280794:2436] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:27.774527Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022455246280794:2436] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-03-31T17:11:27.782115Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022455246280794:2436] Handle TEvDescribeSchemeResult Forward to# [1:7488022455246280793:2435] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T17:11:27.804579Z node 1 :TX_PROXY DEBUG: actor# [1:7488022455246280331:2139] Handle TEvProposeTransaction 2025-03-31T17:11:27.804618Z node 1 :TX_PROXY DEBUG: actor# [1:7488022455246280331:2139] TxId# 281474976710657 ProcessProposeTransaction 2025-03-31T17:11:27.804716Z node 1 :TX_PROXY DEBUG: actor# [1:7488022455246280331:2139] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7488022455246280807:2442] 2025-03-31T17:11:27.862817Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022455246280807:2442] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-31T17:11:27.862917Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022455246280807:2442] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T17:11:27.862950Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022455246280807:2442] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:27.863010Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022455246280807:2442] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:27.863322Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022455246280807:2442] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:27.863468Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022455246280807:2442] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-31T17:11:27.863513Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022455246280807:2442] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-03-31T17:11:27.863642Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022455246280807:2442] txid# 281474976710657 HANDLE EvClientConnected 2025-03-31T17:11:27.864271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T17:11:27.866099Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022455246280807:2442] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-03-31T17:11:27.866149Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022455246280807:2442] txid# 281474976710657 SEND to# [1:7488022455246280806:2441] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-03-31T17:11:27.875441Z node 1 :TX_PROXY DEBUG: actor# [1:7488022455246280331:2139] Handle TEvProposeTransaction 2025-03-31T17:11:27.875478Z node 1 :TX_PROXY DEBUG: actor# [1:7488022455246280331:2139] TxId# 281474976710658 ProcessProposeTransaction 2025-03-31T17:11:27.875511Z node 1 :TX_PROXY DEBUG: actor# [1:7488022455246280331:2139] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7488022455246280852:2483] 2025-03-31T17:11:27.877080Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022455246280852:2483] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-31T17:11:27.877133Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022455246280852:2483] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T17:11:27.877144Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022455246280852:2483] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:27.877179Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022455246280852:2483] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:27.877406Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022455246280852:2483] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:27.877480Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022455246280852:2483] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:27.877535Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022455246280852:2483] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-31T17:11:27.877644Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022455246280852:2483] txid# 281474976710658 HANDLE EvClientConnected 2025-03-31T17:11:27.878035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T17:11:27.879726Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022455246280852:2483] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-03-31T17:11:27.879767Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022455246280852:2483] txid# 281474976710658 SEND to# [1:7488022455246280851:2482] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-03-31T17:11:29.404753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022463836215543:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:29.404767Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022463836215535:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:29.404869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:29.405133Z node 1 :TX_PROXY DEBUG: actor# [1:7488022455246280331:2139] Han ... X_PROXY DEBUG: actor# [1:7488022455246280331:2139] Cookie# 0 userReqId# "" txid# 281474976710661 SEND to# [1:7488022463836215648:2580] 2025-03-31T17:11:29.798584Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022463836215648:2580] txid# 281474976710661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "ordinaryuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:58390" 2025-03-31T17:11:29.798644Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022463836215648:2580] txid# 281474976710661 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T17:11:29.798676Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022463836215648:2580] txid# 281474976710661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:29.798725Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022463836215648:2580] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:29.799062Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022463836215648:2580] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:29.799211Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022463836215648:2580] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:29.799258Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022463836215648:2580] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-03-31T17:11:29.799394Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022463836215648:2580] txid# 281474976710661 HANDLE EvClientConnected 2025-03-31T17:11:29.807878Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022463836215648:2580] txid# 281474976710661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710661} 2025-03-31T17:11:29.807940Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022463836215648:2580] txid# 281474976710661 SEND to# [1:7488022463836215647:2332] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-03-31T17:11:29.858722Z node 1 :TX_PROXY DEBUG: actor# [1:7488022455246280331:2139] Handle TEvProposeTransaction 2025-03-31T17:11:29.858752Z node 1 :TX_PROXY DEBUG: actor# [1:7488022455246280331:2139] TxId# 281474976710662 ProcessProposeTransaction 2025-03-31T17:11:29.858797Z node 1 :TX_PROXY DEBUG: actor# [1:7488022455246280331:2139] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [1:7488022463836215668:2594] 2025-03-31T17:11:29.861194Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022463836215668:2594] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000\n\031\010\000\022\025\010\001\020\200\010\032\014ordinaryuser \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:58398" 2025-03-31T17:11:29.861250Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022463836215668:2594] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T17:11:29.861268Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022463836215668:2594] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:29.861319Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022463836215668:2594] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:29.861694Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022463836215668:2594] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:29.861848Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022463836215668:2594] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:29.861908Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022463836215668:2594] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-03-31T17:11:29.862057Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022463836215668:2594] txid# 281474976710662 HANDLE EvClientConnected 2025-03-31T17:11:29.862538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T17:11:29.865017Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022463836215668:2594] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-03-31T17:11:29.865083Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022463836215668:2594] txid# 281474976710662 SEND to# [1:7488022463836215667:2345] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-03-31T17:11:29.898540Z node 1 :TX_PROXY DEBUG: actor# [1:7488022455246280331:2139] Handle TEvProposeTransaction 2025-03-31T17:11:29.898574Z node 1 :TX_PROXY DEBUG: actor# [1:7488022455246280331:2139] TxId# 281474976710663 ProcessProposeTransaction 2025-03-31T17:11:29.898639Z node 1 :TX_PROXY DEBUG: actor# [1:7488022455246280331:2139] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [1:7488022463836215703:2615] 2025-03-31T17:11:29.900576Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022463836215703:2615] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:58412" 2025-03-31T17:11:29.900638Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022463836215703:2615] txid# 281474976710663 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T17:11:29.900666Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022463836215703:2615] txid# 281474976710663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:29.900697Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022463836215703:2615] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:29.901176Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022463836215703:2615] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:29.901334Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022463836215703:2615] HANDLE EvNavigateKeySetResult, txid# 281474976710663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:29.901395Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022463836215703:2615] txid# 281474976710663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710663 TabletId# 72057594046644480} 2025-03-31T17:11:29.901553Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022463836215703:2615] txid# 281474976710663 HANDLE EvClientConnected 2025-03-31T17:11:29.908995Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022463836215703:2615] txid# 281474976710663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710663} 2025-03-31T17:11:29.909098Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022463836215703:2615] txid# 281474976710663 SEND to# [1:7488022463836215702:2347] Source {TEvProposeTransactionStatus txid# 281474976710663 Status# 48} 2025-03-31T17:11:29.949981Z node 1 :TX_PROXY DEBUG: actor# [1:7488022455246280331:2139] Handle TEvProposeTransaction 2025-03-31T17:11:29.950018Z node 1 :TX_PROXY DEBUG: actor# [1:7488022455246280331:2139] TxId# 281474976710664 ProcessProposeTransaction 2025-03-31T17:11:29.950063Z node 1 :TX_PROXY DEBUG: actor# [1:7488022455246280331:2139] Cookie# 0 userReqId# "" txid# 281474976710664 SEND to# [1:7488022463836215731:2627] 2025-03-31T17:11:29.952564Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022463836215731:2627] txid# 281474976710664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzQ4NDI4OSwiaWF0IjoxNzQzNDQxMDg5LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.yJbPV327uzfhrcv_Ga_i_KLbmvaIZ7MdgLpkY1gzhXqTxdD9Z3_e3sDqibkE0UslOCni-mMyaNXUA8f7jSdRp0G4AavkDs30A42HAWutOJv1P4djkQL3BpQ95asOUQLNrAAsBAI7lCa2MmEby-jDB4B-74VtxQm_vfkyDWxwoBKEAFe5KzxfG7iNeqowyiJzHBXcYdZrziR83Ixoi_CGxRAbPGxv7Wo5RQOx0dGZEP-rXPPc02tCc0VtBnRnacO9DIGKKJyf9rvoUUa5zMzWa1hTnaBYz7XW8c4RDgNURSw8GMz8UQEFOjUDUvoPGSvb3f588-EtwKGvQzLQHDf7kA\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzQ4NDI4OSwiaWF0IjoxNzQzNDQxMDg5LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:58446" 2025-03-31T17:11:29.952633Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022463836215731:2627] txid# 281474976710664 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T17:11:29.952649Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022463836215731:2627] txid# 281474976710664 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-03-31T17:11:29.952706Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022463836215731:2627] txid# 281474976710664 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:29.953069Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022463836215731:2627] txid# 281474976710664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:29.953186Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022463836215731:2627] HANDLE EvNavigateKeySetResult, txid# 281474976710664 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:29.953290Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022463836215731:2627] txid# 281474976710664 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710664 TabletId# 72057594046644480} 2025-03-31T17:11:29.953444Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022463836215731:2627] txid# 281474976710664 HANDLE EvClientConnected 2025-03-31T17:11:29.955883Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022463836215731:2627] txid# 281474976710664 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710664} 2025-03-31T17:11:29.955940Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022463836215731:2627] txid# 281474976710664 SEND to# [1:7488022463836215730:2359] Source {TEvProposeTransactionStatus txid# 281474976710664 Status# 48} |66.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |66.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest |66.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest |66.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest |66.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest |66.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest |66.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest |66.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest |66.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest |67.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest |67.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> YdbLogStore::AlterLogTable [FAIL] |67.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest |67.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest |67.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest |67.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest |67.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest |67.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest |67.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest |67.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest |67.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest |67.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest |67.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest |67.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest |67.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest |67.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> DataShardTxOrder::RandomPointsAndRanges >> THealthCheckTest::LayoutIncorrect |67.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest |67.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> test_sql_streaming.py::test[suites-GroupByHopWithDataWatermarks-default.txt] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] |68.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest |68.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest |68.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest |68.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest |68.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest |68.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest |68.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest |68.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest |68.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest |68.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest |68.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest |68.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest |68.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest |68.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest |68.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest |68.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest |68.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest |68.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest |69.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest |69.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest |69.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest |69.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest |69.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |69.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_volatile/unittest |69.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |69.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest |69.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |69.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck |69.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |69.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |69.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |69.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |69.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |69.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |69.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [GOOD] |69.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |70.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |70.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |70.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |70.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |70.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpSnapshotIsolation::TConflictReadWriteOltpNoSink [FAIL] |70.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |70.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |70.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |70.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |70.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |70.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |70.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> overlapping_portions.py::TestOverlappingPortions::test |70.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |70.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |70.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |70.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |70.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |70.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |70.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |71.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |71.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |71.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::TooBigColumn+useSink |71.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [GOOD] Test command err: 2025-03-31T17:11:29.975711Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488022462419927893:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:11:29.975843Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/0001f7/r3tmp/tmpn3dnB4/pdisk_1.dat 2025-03-31T17:11:30.490252Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T17:11:30.526061Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:11:30.528855Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:11:30.541954Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2249, node 1 2025-03-31T17:11:30.761832Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:11:30.761864Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:11:30.761871Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:11:30.762008Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12683 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:11:31.350453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:11:32.736108Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022475304830837:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:32.736239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:33.337475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T17:11:33.528074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022479599798316:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:33.528157Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:33.528185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022479599798321:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:33.531957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T17:11:33.550063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488022479599798323:2357], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T17:11:33.653430Z node 1 :TX_PROXY ERROR: Actor# [1:7488022479599798413:2806] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:11:33.789740Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqpjzrwqad0rpv74arjver4w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGIwNWUwNzgtODliY2ZjMDUtM2YyMDBlY2ItZTU2NGRiOTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:33.871591Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jqpjzs7d9m7p1z8hepsqs2e0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTNjOTg2MzktMzExNWZjYWMtYjM2MGYwZDYtYzIzMGVmYWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |71.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |71.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |71.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |71.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |71.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |71.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::SysViewClientLost |71.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |71.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |71.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |71.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |71.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |71.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |71.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |72.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |72.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |72.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |72.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |72.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |72.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> test_retry.py::TestRetry::test_fail_first[kikimr0] |72.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |72.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |72.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |72.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |72.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |72.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest |72.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental |72.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest |72.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |72.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |72.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |72.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |73.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |73.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |73.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |73.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |73.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |73.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictReadWriteOltpNoSink [FAIL] Test command err: Trying to start YDB, gRPC: 10137, MsgBus: 62513 2025-03-31T17:11:25.852965Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488022446295492087:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:11:25.853057Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/0002be/r3tmp/tmpO14JsL/pdisk_1.dat 2025-03-31T17:11:26.139478Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10137, node 1 2025-03-31T17:11:26.198593Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:11:26.198615Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:11:26.198627Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:11:26.198752Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T17:11:26.225995Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:11:26.226155Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:11:26.227901Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62513 TClient is connected to server localhost:62513 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:11:26.631480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:11:28.060312Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022459180394645:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:28.060374Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022459180394625:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:28.060624Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:28.063725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T17:11:28.070723Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488022459180394654:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T17:11:28.161477Z node 1 :TX_PROXY ERROR: Actor# [1:7488022459180394705:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:11:28.411604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T17:11:28.524552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T17:11:29.286712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T17:11:30.538592Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzNhMzIyYzAtYTJlMjQ0MDItZTEzMTQ3MDYtNDBlMzQ1NzI=, ActorId: [1:7488022467770337824:2967], ActorState: ExecuteState, TraceId: 01jqpjznw0axrhmj5yhwj8xcsv, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-03-31T17:11:30.852908Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488022446295492087:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:11:30.852999Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131: DoExecute @ 0x18DCECE7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:166: Execute_ @ 0x18DB8A9A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7F32D6EF5D8F 18. ??:0: ?? @ 0x7F32D6EF5E3F 19. ??:0: ?? @ 0x163B4028 |73.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |73.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |73.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> THealthCheckTest::LayoutIncorrect [GOOD] |73.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |73.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |74.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |74.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |74.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |74.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootColumnShard ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::LayoutIncorrect [GOOD] Test command err: 2025-03-31T17:11:36.266035Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T17:11:36.266533Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T17:11:36.266699Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/0002ef/r3tmp/tmpa7TcAy/pdisk_1.dat 2025-03-31T17:11:36.832409Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26870, node 1 TClient is connected to server localhost:32060 2025-03-31T17:11:37.621006Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:11:37.621090Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:11:37.621144Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:11:37.621632Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration |74.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |74.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |74.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |74.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |74.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |74.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> GenericFederatedQuery::ClickHouseFilterPushdown [GOOD] |74.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |75.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |75.0%| [TA] $(B)/ydb/core/health_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |75.1%| [TA] {RESULT} $(B)/ydb/core/health_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |75.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |75.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |75.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |75.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |75.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |75.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |75.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |75.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |75.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest |75.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest |75.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest |75.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::ClickHouseFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 14328, MsgBus: 4054 2025-03-31T17:11:29.291209Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488022461486452968:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:11:29.291341Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/000270/r3tmp/tmpCa11ac/pdisk_1.dat 2025-03-31T17:11:29.681170Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T17:11:29.721338Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:11:29.722013Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:11:29.726059Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14328, node 1 2025-03-31T17:11:29.943563Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:11:29.943594Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:11:29.943605Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:11:29.943744Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4054 TClient is connected to server localhost:4054 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:11:30.750389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:11:31.907699Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022470076388217:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:31.908485Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:32.497381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-03-31T17:11:32.669865Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022474371355638:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:32.669984Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:32.670188Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022474371355643:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:32.674406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-03-31T17:11:32.681483Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-03-31T17:11:32.681809Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488022474371355645:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T17:11:32.787257Z node 1 :TX_PROXY ERROR: Actor# [1:7488022474371355686:2402] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:11:33.791636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T17:11:34.252141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-03-31T17:11:34.291387Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488022461486452968:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:11:34.291469Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T17:11:34.723261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T17:11:35.178194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-31T17:11:35.593383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-31T17:11:35.955897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-03-31T17:11:36.030592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-03-31T17:11:38.933624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710717:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } columns { name: "data_column" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 |75.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest |76.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::ShowCreateTableTtlSettings |76.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest |76.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest |76.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest |76.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest |76.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |76.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest |76.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest |76.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> SystemView::ShowCreateTablePartitionAtKeys |76.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |76.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest |76.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest |76.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> SystemView::PartitionStatsFields >> SystemView::ShowCreateTableTemporary >> TPersQueueTest::DisableDeduplication |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |76.8%| [TA] $(B)/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/{meta.json ... results_accumulator.log} |76.8%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/{meta.json ... results_accumulator.log} |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |77.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck [GOOD] |77.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |77.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |77.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |77.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpLimits::TooBigColumn+useSink [GOOD] |77.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient [GOOD] |77.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |77.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |77.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |77.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |77.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |77.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |77.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |77.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |77.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |78.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |78.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> ReadIteratorExternalBlobs::NotExtBlobs |78.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |78.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |78.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::TooBigColumn+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 16502, MsgBus: 3132 2025-03-31T17:11:36.228102Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488022490984083287:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:11:36.228181Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/0001a6/r3tmp/tmp0LtkaG/pdisk_1.dat 2025-03-31T17:11:36.520151Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16502, node 1 2025-03-31T17:11:36.575051Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:11:36.575188Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:11:36.576677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T17:11:36.610855Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:11:36.610888Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:11:36.610897Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:11:36.611067Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3132 TClient is connected to server localhost:3132 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:11:37.113631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:11:37.135387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:11:37.250032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:11:37.422486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T17:11:37.495311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T17:11:39.322377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022503868986953:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:39.322547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:39.620536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T17:11:39.654251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T17:11:39.682602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T17:11:39.710140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T17:11:39.735951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T17:11:39.808920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T17:11:39.851357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022503868987467:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:39.851477Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022503868987472:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:39.851475Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:39.854840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T17:11:39.865704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488022503868987474:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T17:11:39.945111Z node 1 :TX_PROXY ERROR: Actor# [1:7488022503868987527:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:11:41.228499Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488022490984083287:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:11:41.228587Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T17:11:41.264143Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=Cannot parse tx 3. BAD_ARGUMENT: Row cell size of 20971520 bytes is larger than the allowed threshold 16777216 at tablet# 72075186224037911;tx_id=3; 2025-03-31T17:11:41.275311Z node 1 :TX_DATASHARD ERROR: Cannot parse tx 3. BAD_ARGUMENT: Row cell size of 20971520 bytes is larger than the allowed threshold 16777216 2025-03-31T17:11:41.275725Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488022512458922466:2496], Table: `/Root/KeyValue` ([72057594046644480:6:1]), SessionActorId: [1:7488022508163955113:2496]Got BAD REQUEST for table `/Root/KeyValue`. ShardID=72075186224037911, Sink=[1:7488022512458922466:2496].{
: Error: Cannot parse tx 3. BAD_ARGUMENT: Row cell size of 20971520 bytes is larger than the allowed threshold 16777216 at tablet# 72075186224037911, code: 2017 } 2025-03-31T17:11:41.276603Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7488022508163955145:2496], SessionActorId: [1:7488022508163955113:2496], statusCode=BAD_REQUEST. Issue=
: Error: Bad request. Table: `/Root/KeyValue`., code: 2017
: Error: Cannot parse tx 3. BAD_ARGUMENT: Row cell size of 20971520 bytes is larger than the allowed threshold 16777216 at tablet# 72075186224037911, code: 2017 . sessionActorId=[1:7488022508163955113:2496]. isRollback=0 2025-03-31T17:11:41.315436Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2I1NDcwOTItM2I2MTAyM2MtM2JlNDA4YmEtYTljZTNjZTI=, ActorId: [1:7488022508163955113:2496], ActorState: ExecuteState, TraceId: 01jqpk00229sc8kac3neajepnx, got TEvKqpBuffer::TEvError in ExecuteState, status: BAD_REQUEST send to: [1:7488022508163955146:2496] from: [1:7488022508163955145:2496] 2025-03-31T17:11:41.315547Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7488022508163955146:2496] TxId: 281474976710671. Ctx: { TraceId: 01jqpk00229sc8kac3neajepnx, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2I1NDcwOTItM2I2MTAyM2MtM2JlNDA4YmEtYTljZTNjZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. BAD_REQUEST: {
: Error: Bad request. Table: `/Root/KeyValue`., code: 2017 subissue: {
: Error: Cannot parse tx 3. BAD_ARGUMENT: Row cell size of 20971520 bytes is larger than the allowed threshold 16777216 at tablet# 72075186224037911, code: 2017 } } 2025-03-31T17:11:41.316261Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2I1NDcwOTItM2I2MTAyM2MtM2JlNDA4YmEtYTljZTNjZTI=, ActorId: [1:7488022508163955113:2496], ActorState: ExecuteState, TraceId: 01jqpk00229sc8kac3neajepnx, Create QueryResponse for error on request, msg:
: Error: Bad request. Table: `/Root/KeyValue`., code: 2017
: Error: Cannot parse tx 3. BAD_ARGUMENT: Row cell size of 20971520 bytes is larger than the allowed threshold 16777216 at tablet# 72075186224037911, code: 2017 |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |78.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |78.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |78.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |78.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |78.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient [GOOD] Test command err: 2025-03-31T17:11:29.972420Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488022461898906408:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:11:29.972809Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/000209/r3tmp/tmpRDIfvW/pdisk_1.dat 2025-03-31T17:11:30.488467Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T17:11:30.509439Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:11:30.528828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:11:30.560542Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31474, node 1 2025-03-31T17:11:30.761834Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:11:30.761863Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:11:30.761880Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:11:30.762000Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27114 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:11:31.382877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:11:33.011178Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022479078776832:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:33.011305Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022479078776834:2496], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:33.012259Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022479078776838:2499], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:33.013270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022474783809491:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:33.013355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022474783809524:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:33.013437Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022474783809523:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:33.013474Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:33.013490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022474783809526:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:33.013521Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022474783809525:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:33.014162Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022474783809522:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:33.022912Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022479078776862:2509], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:33.032583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022479078776946:2539], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:33.032869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022479078776919:2516], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:33.032905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022479078776942:2537], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:33.033746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:33.034203Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022479078776978:2540], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:33.034229Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022479078777009:2566], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:33.034317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022479078776979:2541], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:33.034683Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022479078776980:2542], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:33.034841Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:33.036385Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022479078777011:2567], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:33.036473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T17:11:33.036656Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022479078777012:2568], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:33.037398Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022479078777036:2580], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:33.037481Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022479078777037:2581], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:33.037503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022479078777027:2571], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:33.037554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022479078777026:2570], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:33.037560Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022479078777028:2572], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:33.037671Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022479078777038:2582], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:33.037708Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022479078777039:2583], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:33.037814Z node 1 :KQP_WORKLOAD_SERVICE WA ... TraceId: 01jqpjzrd260w94pn7tk19zmk7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGFmY2JmNTItZWIwNWI0NC1jOTM1YzdhNi0zZWE3OWU0Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:33.831805Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710752. Ctx: { TraceId: 01jqpjzrd311ee6hpwyx55ydjb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2UwMzFmOWUtMWY2ODE3MmItMzNkMWJmNGYtMTc4YjJkMjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:33.833071Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710764. Ctx: { TraceId: 01jqpjzrd70j8mv59z92gyadbv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTVjYzZlNzMtMmEyNmExNzgtMzg5NWM2Y2QtOGE4ODYwYmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:33.833145Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710781. Ctx: { TraceId: 01jqpjzrd31qpbpa9yaxz8pcgg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzE1ZWJkMDYtNzdjYjMyMWYtZDk1ZGMwZTktMjRmOGMyNmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:33.834180Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710761. Ctx: { TraceId: 01jqpjzrd43q8kse88jyj0htkr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTExYTczNGUtN2Y1Y2NmYzctNjczOWUxOC1hODI3OTQxNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:33.834402Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710770. Ctx: { TraceId: 01jqpjzrd290t85tcdcp7hf6p6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmUwY2ZlYy1mNDc0NmIxNS02ZmFmMTE0YS1mY2FkZjliYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:33.835267Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710765. Ctx: { TraceId: 01jqpjzrd46k4rjhbtsf5m7nm0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2JhZjA1MzUtYmY5YmZkNzAtOGE0MzNkZDktN2FlOTIwZDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:33.835644Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710758. Ctx: { TraceId: 01jqpjzrd475pwqxfxcx7qs1nq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTQ1OGJhY2ItNzZmZjcwM2YtNDFkNzAyYy1jMTg4MTk4Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:33.836176Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710762. Ctx: { TraceId: 01jqpjzrbw6tx5sdpn5n2cjdnp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTljZWRkMzgtNGNjNzU1YTMtYzk4NDE1YS03OTIzMDExZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:33.837035Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710773. Ctx: { TraceId: 01jqpjzrd43nb038mjg93y3p09, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWMzOTY2M2YtMzk2MWU1ZmYtNjNjNjEyOTItYzRiMDZhM2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:33.837147Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710766. Ctx: { TraceId: 01jqpjzrd3en5cahazgp64g0f6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODcxNGFiMzYtMWUyMjliZTItZmM5OTY3ZDMtYThkZTIwZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:33.838082Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710767. Ctx: { TraceId: 01jqpjzrb174hgxgqsqah5fa0v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGMwNDEzMzEtYjE0YjlhYmYtMTI0NTJjMzAtN2ZjNjIzYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:33.838353Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710774. Ctx: { TraceId: 01jqpjzrd893hsm744dfbjbebn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzU4MjE1ZTMtZWY0OTJjMjktY2Q3OGM2NTUtNDI3NjcxZDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:33.839024Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710763. Ctx: { TraceId: 01jqpjzrd3bwjvxyp8ev4damzr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTEyOGIwM2EtMjg2OWEwY2EtYTZmNmUzZGItMjIwYmEwMjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:33.839730Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710775. Ctx: { TraceId: 01jqpjzrd8arv0wvwhtnjca9be, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmY0OTI2ZjktNjBjNDI0NC02MmY0MTA3MC1mYThlOWMxYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:33.840000Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710751. Ctx: { TraceId: 01jqpjzrd457pg5nrd84fyrfnm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWUyMDljY2ItYjYzY2NkZWQtYjY5ZGRlMmYtMjg1MjM5ODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:33.841072Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710776. Ctx: { TraceId: 01jqpjzras4hne4zxjdv0esg62, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2M5NTc0NTgtMmZhZmY2MzYtNmY4OGRmOGYtNzNkZWU1MTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:33.842346Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710777. Ctx: { TraceId: 01jqpjzrb95a82mjbtnp2rzk50, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjkyMmI2ZTgtNzdkMjc1MzQtZWI3Mzc5YzYtY2UzNDE3YTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:33.842776Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710782. Ctx: { TraceId: 01jqpjzrd9e5gm724tsv12cwhc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDBhOTQzNTEtYTdhNzAyYi1kMWRmNGE2Yy02YmQ5MmEzNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:33.843609Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710778. Ctx: { TraceId: 01jqpjzrcbaqm38tk6r1a83ghr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmUyZWY3Mi02NzJiMTNkMC02MWIyYmJmYy04Mjk5OThlNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:33.843816Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710783. Ctx: { TraceId: 01jqpjzrd6fxpqn70cbpehdv6f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjIzOTljOS1lOTgxNzc5Yy04NDAwZDRlNi02MDk0ZDA3Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:33.844796Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710771. Ctx: { TraceId: 01jqpjzrb0bta84yj2etc8p5kv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWNkZTgzYTktNmM4ZGYyOGEtMjdlMmRlNi01N2NjZjZl, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:33.844825Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710779. Ctx: { TraceId: 01jqpjzrd9b148p2pg2nqxw2x1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmYwMzIwNTAtM2IwMTE5NjQtYWY4ZjdmMzctZWNhZmZlZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:33.845757Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710772. Ctx: { TraceId: 01jqpjzrd30vwnpb7y8jgnvdk9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTgzN2RlZWMtNzJkZDYxNDItN2Y3OTEyNWMtZDE0NWFkOGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:33.846113Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710790. Ctx: { TraceId: 01jqpjzra92zhb01ykq9gd1t3a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzViOGVhMjYtODg3YzBjNzctYzlhZTk3Yy1lY2MzYzE4OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:33.846857Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710786. Ctx: { TraceId: 01jqpjzrd441j95a0hwktx2bva, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDYwOGMxNTUtMzM4ZWU5Ni0zMzg5MDVjMi05ZDY3NGQxMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:33.848027Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710787. Ctx: { TraceId: 01jqpjzrd8e4yahrv5erzjqssw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzRjM2UzYTAtOTYzMmI5YjEtZWNmM2YwMWEtNDg4YzM2NDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:33.849517Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710769. Ctx: { TraceId: 01jqpjzrd86bgbp3bz4rx0qvkj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzM1ZDI4MjEtZGM2OWE2NDEtYjEyMWU0NjUtOWYyYzBmOGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:33.851599Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710784. Ctx: { TraceId: 01jqpjzrd9agz7w604ja7p6r97, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTE3YWM5ZDktOTVhZWIzZTMtZTczNTYyNGQtYjRjZDE0YWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:33.853067Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710785. Ctx: { TraceId: 01jqpjzrd39yeq33kpqdxemxw6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTIxMzFmMTQtMWI1ZTRkZTctMzljYTE4NzgtYzE4YjM2YWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:33.854380Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710788. Ctx: { TraceId: 01jqpjzrd2d7ygznnar9w2260h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmU1ZThlOWUtN2RmNzNmZWUtOTU0ODQ3YTQtNTg4ZDgwYWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:33.860383Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710789. Ctx: { TraceId: 01jqpjzrd2djg3c8rw4hhaq99f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWNjNzRmZmYtYTdhYTNmODQtZDdhMGZkMWEtNTEwMmExMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:33.865136Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710768. Ctx: { TraceId: 01jqpjzrd91gj2rjd2z7bwthja, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2ZkYTFkODItM2YyMGQyMzItNTdmZTkzODctYWFiMzJmNjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:33.867615Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710791. Ctx: { TraceId: 01jqpjzrd9aj7s1fe50qss38v5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDJiMzRjZjQtNTU2OTg1NTEtNzdlNTQ3YWEtZDBlZTk1OWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:33.874191Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710792. Ctx: { TraceId: 01jqpjzrd846wc9n6kaesfppnm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTI0MzNkZDQtNDhiY2ZmYTEtM2MzODE3MDgtYjFmMmE1NmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:34.972332Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488022461898906408:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:11:34.972454Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapWriteRow |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |79.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-60 |79.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |79.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-64 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-59 |79.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-58 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-62 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-68 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-67 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-57 >> ReadIteratorExternalBlobs::ExtBlobsEmptyTable |79.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |79.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |79.6%| [TA] $(B)/ydb/services/ydb/sdk_sessions_ut/test-results/unittest/{meta.json ... results_accumulator.log} |79.7%| [TA] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_ut/test-results/unittest/{meta.json ... results_accumulator.log} |79.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictWriteOltp ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbLogStore::AlterLogTable [FAIL] Test command err: 2025-03-31T17:11:29.023453Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488022460648229008:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:11:29.023610Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/000242/r3tmp/tmpGVUHgy/pdisk_1.dat 2025-03-31T17:11:29.302405Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17955, node 1 2025-03-31T17:11:29.363430Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:11:29.363533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:11:29.380246Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T17:11:29.398747Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:11:29.398777Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:11:29.398787Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:11:29.398934Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4881 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:11:29.680761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:11:29.840657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "LogStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "resource_type" Type: "Utf8" NotNull: true } Columns { Name: "resource_id" Type: "Utf8" NotNull: true } Columns { Name: "uid" Type: "Utf8" NotNull: true } Columns { Name: "level" Type: "Int32" } Columns { Name: "message" Type: "Utf8" } Columns { Name: "json_payload" Type: "JsonDocument" } Columns { Name: "request_id" Type: "Utf8" } Columns { Name: "ingested_at" Type: "Timestamp" } Columns { Name: "saved_at" Type: "Timestamp" } KeyColumnNames: "timestamp" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" DefaultCompression { Codec: ColumnCodecLZ4 } } } } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:39788" , at schemeshard: 72057594046644480 2025-03-31T17:11:29.841141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/LogStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T17:11:29.841184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusPreconditionFailed, reason: Column stores are not supported, at schemeshard: 72057594046644480 2025-03-31T17:11:29.843794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusPreconditionFailed Reason: "Column stores are not supported" TxId: 281474976710658 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-03-31T17:11:29.844010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusPreconditionFailed, reason: Column stores are not supported, operation: CREATE COLUMN STORE, path: /Root/LogStore 2025-03-31T17:11:29.846248Z node 1 :TX_PROXY ERROR: Actor# [1:7488022460648229922:2597] txid# 281474976710658, issues: { message: "Column stores are not supported" severity: 1 } assertion failed at ydb/services/ydb/ydb_logstore_ut.cpp:435, virtual void NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext &): (res.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: Column stores are not supported , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) TBackTrace::Capture()+28 (0x1C6D843C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1CB954F0) NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext&)+8721 (0x1C20B181) std::__y1::__function::__func, void ()>::operator()()+280 (0x1C234018) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x1CBCC516) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x1CB9C069) NTestSuiteYdbLogStore::TCurrentTest::Execute()+1204 (0x1C2331E4) NUnitTest::TTestFactory::Execute()+2438 (0x1CB9D936) NUnitTest::RunMain(int, char**)+5213 (0x1CBC6A8D) ??+0 (0x7F40C103ED90) __libc_start_main+128 (0x7F40C103EE40) _start+41 (0x1907A029) >> SystemView::ShowCreateTableTemporary [GOOD] >> SystemView::ShowCreateTableTtlSettings [GOOD] |79.8%| [TA] $(B)/ydb/services/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |79.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |79.9%| [TA] {RESULT} $(B)/ydb/services/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |80.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |80.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |80.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |80.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |80.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |80.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |80.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |80.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |80.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |80.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |80.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |80.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TTLUsage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::ShowCreateTableTemporary [GOOD] Test command err: 2025-03-31T17:11:41.771011Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488022512228568681:2139];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:11:41.771065Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/00024b/r3tmp/tmpdDZmGc/pdisk_1.dat 2025-03-31T17:11:42.213971Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T17:11:42.216336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:11:42.216429Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 61594, node 1 2025-03-31T17:11:42.222283Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T17:11:42.222590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T17:11:42.230348Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T17:11:42.267381Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:11:42.267401Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:11:42.267417Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:11:42.267562Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18829 TClient is connected to server localhost:18829 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:11:42.830472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:11:44.935020Z node 1 :KQP_COMPILE_SERVICE INFO: Subscribed for config changes 2025-03-31T17:11:44.935057Z node 1 :KQP_COMPILE_SERVICE INFO: Updated config 2025-03-31T17:11:44.957750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022525113471632:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:44.957749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022525113471640:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:44.957860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:44.962120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T17:11:44.988771Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488022525113471646:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T17:11:45.076275Z node 1 :TX_PROXY ERROR: Actor# [1:7488022529408439011:2745] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:11:45.077578Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n CREATE TEMPORARY TABLE test_show_create (\n Key Int32 NOT NULL,\n Value String,\n PRIMARY KEY (Key)\n );\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-03-31T17:11:45.077694Z node 1 :KQP_COMPILE_SERVICE DEBUG: Perform request, TraceId.SpanIdPtr: 0x000050F0001B3368 2025-03-31T17:11:45.077737Z node 1 :KQP_COMPILE_SERVICE DEBUG: Received compile request, sender: [1:7488022525113471625:2339], queryUid: , queryText: "\n CREATE TEMPORARY TABLE test_show_create (\n Key Int32 NOT NULL,\n Value String,\n PRIMARY KEY (Key)\n );\n ", keepInCache: 1, split: 0{ TraceId: 01jqpk041v5ehy3998v7nktnn3, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTE4ODIwOGQtZjczOWRhN2MtZDhjOWFjMzctYWZmMjY0Y2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default} 2025-03-31T17:11:45.077872Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n CREATE TEMPORARY TABLE test_show_create (\n Key Int32 NOT NULL,\n Value String,\n PRIMARY KEY (Key)\n );\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-03-31T17:11:45.077927Z node 1 :KQP_COMPILE_SERVICE DEBUG: Added request to queue, sender: [1:7488022525113471625:2339], queueSize: 1 2025-03-31T17:11:45.078475Z node 1 :KQP_COMPILE_SERVICE DEBUG: Created compile actor, sender: [1:7488022525113471625:2339], compileActor: [1:7488022529408439022:2350] 2025-03-31T17:11:45.350701Z node 1 :KQP_YQL INFO: TraceId: 01jqpk041v5ehy3998v7nktnn3, SessionId: CompileActor 2025-03-31 17:11:45.350 INFO ydb-core-sys_view-ut(pid=963893, tid=0x00007F71D6B5F640) [KQP] kqp_host.cpp:1382: Compiled query: ( (let $1 '('"Key" (DataType 'Int32) '('columnConstrains '('('not_null))) '())) (let $2 '('"Value" (AsOptionalType (DataType 'String)) '('columnConstrains '()) '())) (let $3 '('('mode 'create) '('columns '($1 $2)) '('primarykey '('"Key")) '('temporary))) (return (Write! world (DataSink '"kikimr" '"db") (Key '('tablescheme (String '"/Root/test_show_create"))) (Void) $3)) ) 2025-03-31T17:11:45.351896Z node 1 :KQP_YQL TRACE: TraceId: 01jqpk041v5ehy3998v7nktnn3, SessionId: CompileActor 2025-03-31 17:11:45.351 TRACE ydb-core-sys_view-ut(pid=963893, tid=0x00007F71D6B5F640) [KQP] kqp_transform.cpp:33: YqlTransformer: ( (let $1 '('"Key" (DataType 'Int32) '('columnConstrains '('('not_null))) '())) (let $2 '('"Value" (AsOptionalType (DataType 'String)) '('columnConstrains '()) '())) (let $3 '('('mode 'create) '('columns '($1 $2)) '('primarykey '('"Key")) '('temporary))) (let $4 (Write! world (DataSink '"kikimr" '"db") (Key '('tablescheme (String '"/Root/test_show_create"))) (Void) $3)) (return (Commit! $4 (DataSink '"kikimr" '"db") '('('"mode" '"flush")))) ) 2025-03-31T17:11:45.352242Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpk041v5ehy3998v7nktnn3, SessionId: CompileActor 2025-03-31 17:11:45.352 DEBUG ydb-core-sys_view-ut(pid=963893, tid=0x00007F71D6B5F640) [perf] yql_expr_optimize.cpp:540: Execution of [ExpandApply] took 242us 2025-03-31T17:11:45.352713Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpk041v5ehy3998v7nktnn3, SessionId: CompileActor 2025-03-31 17:11:45.352 DEBUG ydb-core-sys_view-ut(pid=963893, tid=0x00007F71D6B5F640) [core eval] yql_eval_expr.cpp:384: EvaluateExpression - start 2025-03-31T17:11:45.354291Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpk041v5ehy3998v7nktnn3, SessionId: CompileActor 2025-03-31 17:11:45.354 DEBUG ydb-core-sys_view-ut(pid=963893, tid=0x00007F71D6B5F640) [core eval] yql_eval_expr.cpp:1156: EvaluateExpression - finish 2025-03-31T17:11:45.355022Z node 1 :KQP_YQL TRACE: TraceId: 01jqpk041v5ehy3998v7nktnn3, SessionId: CompileActor 2025-03-31 17:11:45.354 TRACE ydb-core-sys_view-ut(pid=963893, tid=0x00007F71D6B5F640) [KQP] kqp_transform.cpp:33: YqlTransformer: ( (let $1 '('"Key" (DataType 'Int32) '('columnConstrains '('('not_null))) '())) (let $2 '('"Value" (AsOptionalType (DataType 'String)) '('columnConstrains '()) '())) (let $3 '('('mode 'create) '('columns '($1 $2)) '('primarykey '('"Key")) '('temporary))) (let $4 (Write! world (DataSink '"kikimr" '"db") (Key '('tablescheme (String '"/Root/test_show_create"))) (Void) $3)) (return (Commit! $4 (DataSink '"kikimr" '"db") '('('"mode" '"flush")))) ) 2025-03-31T17:11:45.362431Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpk041v5ehy3998v7nktnn3, SessionId: CompileActor 2025-03-31 17:11:45.362 DEBUG ydb-core-sys_view-ut(pid=963893, tid=0x00007F71D6B5F640) [perf] type_ann_expr.cpp:47: Execution of [TypeAnnotationTransformer::DoTransform] took 3.96ms 2025-03-31T17:11:45.363163Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpk041v5ehy3998v7nktnn3, SessionId: CompileActor 2025-03-31 17:11:45.363 DEBUG ydb-core-sys_view-ut(pid=963893, tid=0x00007F71D6B5F640) [perf] yql_expr_constraint.cpp:3226: Execution of [ConstraintTransformer::DoTransform] took 293us 2025-03-31T17:11:45.363293Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpk041v5ehy3998v7nktnn3, SessionId: CompileActor 2025-03-31 17:11:45.363 DEBUG ydb-core-sys_view-ut(pid=963893, tid=0x00007F71D6B5F640) [perf] yql_expr_csee.cpp:620: Execution of [UpdateCompletness] took 55us 2025-03-31T17:11:45.363735Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpk041v5ehy3998v7nktnn3, SessionId: CompileActor 2025-03-31 17:11:45.363 DEBUG ydb-core-sys_view-ut(pid=963893, tid=0x00007F71D6B5F640) [perf] yql_expr_csee.cpp:633: Execution of [EliminateCommonSubExpressions] took 383us 2025-03-31T17:11:45.368992Z node 1 :KQP_YQL INFO: TraceId: 01jqpk041v5ehy3998v7nktnn3, SessionId: CompileActor 2025-03-31 17:11:45.368 INFO ydb-core-sys_view-ut(pid=963893, tid=0x00007F71D6B5F640) [KQP] kqp_transform.cpp:33: Optimized expr: ( (let $1 (DataSink '"kikimr" '"db")) (let $2 '('"Key" (DataType 'Int32) '('columnConstrains '('('not_null))) '())) (let $3 '('"Value" (OptionalType (DataType 'String)) '('columnConstrains '()) '())) (let ... : EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:11:45.898954Z node 1 :TX_PROXY ERROR: Actor# [1:7488022529408439496:3085] txid# 281474976710668, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/NTE4ODIwOGQtZjczOWRhN2MtZDhjOWFjMzctYWZmMjY0Y2I=\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:11:45.900945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-31T17:11:45.949382Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n DROP TABLE `test_show_create`;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-03-31T17:11:45.949467Z node 1 :KQP_COMPILE_SERVICE DEBUG: Perform request, TraceId.SpanIdPtr: 0x000050F0004501E8 2025-03-31T17:11:45.949509Z node 1 :KQP_COMPILE_SERVICE DEBUG: Received compile request, sender: [1:7488022525113471625:2339], queryUid: , queryText: "\n DROP TABLE `test_show_create`;\n ", keepInCache: 1, split: 0{ TraceId: 01jqpk050w956adf0vz7m2adjm, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTE4ODIwOGQtZjczOWRhN2MtZDhjOWFjMzctYWZmMjY0Y2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default} 2025-03-31T17:11:45.949603Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n DROP TABLE `test_show_create`;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-03-31T17:11:45.949648Z node 1 :KQP_COMPILE_SERVICE DEBUG: Added request to queue, sender: [1:7488022525113471625:2339], queueSize: 1 2025-03-31T17:11:45.949975Z node 1 :KQP_COMPILE_SERVICE DEBUG: Created compile actor, sender: [1:7488022525113471625:2339], compileActor: [1:7488022529408439633:2388] 2025-03-31T17:11:45.955222Z node 1 :KQP_YQL INFO: TraceId: 01jqpk050w956adf0vz7m2adjm, SessionId: CompileActor 2025-03-31 17:11:45.954 INFO ydb-core-sys_view-ut(pid=963893, tid=0x00007F71D583F640) [KQP] kqp_host.cpp:1382: Compiled query: ( (return (Write! world (DataSink '"kikimr" '"db") (Key '('table (String '"/Root/test_show_create"))) (Void) '('('mode 'drop)))) ) 2025-03-31T17:11:45.955654Z node 1 :KQP_YQL TRACE: TraceId: 01jqpk050w956adf0vz7m2adjm, SessionId: CompileActor 2025-03-31 17:11:45.955 TRACE ydb-core-sys_view-ut(pid=963893, tid=0x00007F71D583F640) [KQP] kqp_transform.cpp:33: YqlTransformer: ( (let $1 (Write! world (DataSink '"kikimr" '"db") (Key '('table (String '"/Root/test_show_create"))) (Void) '('('mode 'drop)))) (return (Commit! $1 (DataSink '"kikimr" '"db") '('('"mode" '"flush")))) ) 2025-03-31T17:11:45.955842Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpk050w956adf0vz7m2adjm, SessionId: CompileActor 2025-03-31 17:11:45.955 DEBUG ydb-core-sys_view-ut(pid=963893, tid=0x00007F71D583F640) [perf] yql_expr_optimize.cpp:540: Execution of [ExpandApply] took 108us 2025-03-31T17:11:45.991430Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpk050w956adf0vz7m2adjm, SessionId: CompileActor 2025-03-31 17:11:45.991 DEBUG ydb-core-sys_view-ut(pid=963893, tid=0x00007F71D583F640) [core eval] yql_eval_expr.cpp:384: EvaluateExpression - start 2025-03-31T17:11:45.992335Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpk050w956adf0vz7m2adjm, SessionId: CompileActor 2025-03-31 17:11:45.992 DEBUG ydb-core-sys_view-ut(pid=963893, tid=0x00007F71D583F640) [core eval] yql_eval_expr.cpp:1156: EvaluateExpression - finish 2025-03-31T17:11:45.992884Z node 1 :KQP_YQL TRACE: TraceId: 01jqpk050w956adf0vz7m2adjm, SessionId: CompileActor 2025-03-31 17:11:45.992 TRACE ydb-core-sys_view-ut(pid=963893, tid=0x00007F71D583F640) [KQP] kqp_transform.cpp:33: YqlTransformer: ( (let $1 (Write! world (DataSink '"kikimr" '"db") (Key '('table (String '"/Root/test_show_create"))) (Void) '('('mode 'drop)))) (return (Commit! $1 (DataSink '"kikimr" '"db") '('('"mode" '"flush")))) ) 2025-03-31T17:11:45.994821Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpk050w956adf0vz7m2adjm, SessionId: CompileActor 2025-03-31 17:11:45.994 DEBUG ydb-core-sys_view-ut(pid=963893, tid=0x00007F71D583F640) [perf] type_ann_expr.cpp:47: Execution of [TypeAnnotationTransformer::DoTransform] took 197us 2025-03-31T17:11:45.995064Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpk050w956adf0vz7m2adjm, SessionId: CompileActor 2025-03-31 17:11:45.995 DEBUG ydb-core-sys_view-ut(pid=963893, tid=0x00007F71D583F640) [perf] yql_expr_constraint.cpp:3226: Execution of [ConstraintTransformer::DoTransform] took 116us 2025-03-31T17:11:45.995201Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpk050w956adf0vz7m2adjm, SessionId: CompileActor 2025-03-31 17:11:45.995 DEBUG ydb-core-sys_view-ut(pid=963893, tid=0x00007F71D583F640) [perf] yql_expr_csee.cpp:620: Execution of [UpdateCompletness] took 20us 2025-03-31T17:11:45.995407Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpk050w956adf0vz7m2adjm, SessionId: CompileActor 2025-03-31 17:11:45.995 DEBUG ydb-core-sys_view-ut(pid=963893, tid=0x00007F71D583F640) [perf] yql_expr_csee.cpp:633: Execution of [EliminateCommonSubExpressions] took 142us 2025-03-31T17:11:45.996671Z node 1 :KQP_YQL INFO: TraceId: 01jqpk050w956adf0vz7m2adjm, SessionId: CompileActor 2025-03-31 17:11:45.996 INFO ydb-core-sys_view-ut(pid=963893, tid=0x00007F71D583F640) [KQP] kqp_transform.cpp:33: Optimized expr: ( (let $1 (DataSink '"kikimr" '"db")) (let $2 (KiDropTable! world $1 '"/Root/test_show_create" '() '"table" '0)) (return (Commit! $2 $1 '('('"mode" '"flush")))) ) 2025-03-31T17:11:45.996746Z node 1 :KQP_YQL INFO: TraceId: 01jqpk050w956adf0vz7m2adjm, SessionId: CompileActor 2025-03-31 17:11:45.996 INFO ydb-core-sys_view-ut(pid=963893, tid=0x00007F71D583F640) [core exec] yql_execution.cpp:59: Begin, root #43 2025-03-31T17:11:45.996830Z node 1 :KQP_YQL INFO: TraceId: 01jqpk050w956adf0vz7m2adjm, SessionId: CompileActor 2025-03-31 17:11:45.996 INFO ydb-core-sys_view-ut(pid=963893, tid=0x00007F71D583F640) [core exec] yql_execution.cpp:72: Collect unused nodes for root #43, status: Ok 2025-03-31T17:11:45.996888Z node 1 :KQP_YQL TRACE: TraceId: 01jqpk050w956adf0vz7m2adjm, SessionId: CompileActor 2025-03-31 17:11:45.996 TRACE ydb-core-sys_view-ut(pid=963893, tid=0x00007F71D583F640) [core exec] yql_execution.cpp:387: {0}, callable #43 2025-03-31T17:11:45.996947Z node 1 :KQP_YQL TRACE: TraceId: 01jqpk050w956adf0vz7m2adjm, SessionId: CompileActor 2025-03-31 17:11:45.996 TRACE ydb-core-sys_view-ut(pid=963893, tid=0x00007F71D583F640) [core exec] yql_execution.cpp:387: {1}, callable #42 2025-03-31T17:11:45.997062Z node 1 :KQP_YQL TRACE: TraceId: 01jqpk050w956adf0vz7m2adjm, SessionId: CompileActor 2025-03-31 17:11:45.997 TRACE ydb-core-sys_view-ut(pid=963893, tid=0x00007F71D583F640) [core exec] yql_execution.cpp:387: {1}, callable #42 2025-03-31T17:11:45.997345Z node 1 :KQP_YQL INFO: TraceId: 01jqpk050w956adf0vz7m2adjm, SessionId: CompileActor 2025-03-31 17:11:45.997 INFO ydb-core-sys_view-ut(pid=963893, tid=0x00007F71D583F640) [core exec] yql_execution.cpp:466: Register async execution for node #42 2025-03-31T17:11:45.997430Z node 1 :KQP_YQL INFO: TraceId: 01jqpk050w956adf0vz7m2adjm, SessionId: CompileActor 2025-03-31 17:11:45.997 INFO ydb-core-sys_view-ut(pid=963893, tid=0x00007F71D583F640) [core exec] yql_execution.cpp:87: Finish, output #43, status: Async 2025-03-31T17:11:45.997604Z node 1 :KQP_YQL INFO: TraceId: 01jqpk050w956adf0vz7m2adjm, SessionId: CompileActor 2025-03-31 17:11:45.997 INFO ydb-core-sys_view-ut(pid=963893, tid=0x00007F71D583F640) [core exec] yql_execution.cpp:133: Completed async execution for node #42 2025-03-31T17:11:45.997680Z node 1 :KQP_YQL INFO: TraceId: 01jqpk050w956adf0vz7m2adjm, SessionId: CompileActor 2025-03-31 17:11:45.997 INFO ydb-core-sys_view-ut(pid=963893, tid=0x00007F71D583F640) [core exec] yql_execution.cpp:153: State is ExecutionComplete after apply async changes for node #42 2025-03-31T17:11:45.997747Z node 1 :KQP_YQL INFO: TraceId: 01jqpk050w956adf0vz7m2adjm, SessionId: CompileActor 2025-03-31 17:11:45.997 INFO ydb-core-sys_view-ut(pid=963893, tid=0x00007F71D583F640) [core exec] yql_execution.cpp:59: Begin, root #43 2025-03-31T17:11:45.997820Z node 1 :KQP_YQL INFO: TraceId: 01jqpk050w956adf0vz7m2adjm, SessionId: CompileActor 2025-03-31 17:11:45.997 INFO ydb-core-sys_view-ut(pid=963893, tid=0x00007F71D583F640) [core exec] yql_execution.cpp:72: Collect unused nodes for root #43, status: Ok 2025-03-31T17:11:45.997876Z node 1 :KQP_YQL TRACE: TraceId: 01jqpk050w956adf0vz7m2adjm, SessionId: CompileActor 2025-03-31 17:11:45.997 TRACE ydb-core-sys_view-ut(pid=963893, tid=0x00007F71D583F640) [core exec] yql_execution.cpp:387: {0}, callable #43 2025-03-31T17:11:45.997963Z node 1 :KQP_YQL INFO: TraceId: 01jqpk050w956adf0vz7m2adjm, SessionId: CompileActor 2025-03-31 17:11:45.997 INFO ydb-core-sys_view-ut(pid=963893, tid=0x00007F71D583F640) [core exec] yql_execution.cpp:577: Node #43 finished execution 2025-03-31T17:11:45.998053Z node 1 :KQP_YQL INFO: TraceId: 01jqpk050w956adf0vz7m2adjm, SessionId: CompileActor 2025-03-31 17:11:45.998 INFO ydb-core-sys_view-ut(pid=963893, tid=0x00007F71D583F640) [core exec] yql_execution.cpp:594: Node #43 created 0 trackable nodes: 2025-03-31T17:11:45.998107Z node 1 :KQP_YQL INFO: TraceId: 01jqpk050w956adf0vz7m2adjm, SessionId: CompileActor 2025-03-31 17:11:45.998 INFO ydb-core-sys_view-ut(pid=963893, tid=0x00007F71D583F640) [core exec] yql_execution.cpp:87: Finish, output #43, status: Ok 2025-03-31T17:11:45.998150Z node 1 :KQP_YQL INFO: TraceId: 01jqpk050w956adf0vz7m2adjm, SessionId: CompileActor 2025-03-31 17:11:45.998 INFO ydb-core-sys_view-ut(pid=963893, tid=0x00007F71D583F640) [core exec] yql_execution.cpp:93: Creating finalizing transformer, output #43 2025-03-31T17:11:45.998603Z node 1 :KQP_COMPILE_SERVICE DEBUG: Received response, sender: [1:7488022525113471625:2339], status: SUCCESS, compileActor: [1:7488022529408439633:2388] 2025-03-31T17:11:45.998662Z node 1 :KQP_COMPILE_SERVICE DEBUG: Send response, sender: [1:7488022525113471625:2339], queryUid: 11bd2bf6-98992d98-5e70a29f-19015530, status:SUCCESS 2025-03-31T17:11:46.002240Z node 1 :TX_PROXY ERROR: Actor# [1:7488022529408439642:3201] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:11:46.005293Z node 1 :TX_PROXY ERROR: Actor# [1:7488022533703406947:3208] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/NTE4ODIwOGQtZjczOWRhN2MtZDhjOWFjMzctYWZmMjY0Y2I=\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:11:46.033822Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::ShowCreateTableTtlSettings [GOOD] Test command err: 2025-03-31T17:11:41.552451Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488022512740311781:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:11:41.552861Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/000290/r3tmp/tmpVtEifN/pdisk_1.dat 2025-03-31T17:11:41.995383Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:11:41.995499Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:11:42.001328Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T17:11:42.005520Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15522, node 1 2025-03-31T17:11:42.131641Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:11:42.131668Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:11:42.131682Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:11:42.131850Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28233 TClient is connected to server localhost:28233 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:11:42.738177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:11:44.535274Z node 1 :KQP_COMPILE_SERVICE INFO: Subscribed for config changes 2025-03-31T17:11:44.535335Z node 1 :KQP_COMPILE_SERVICE INFO: Updated config 2025-03-31T17:11:44.559402Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022525625214793:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:44.559409Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022525625214801:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:44.560083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:44.564126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T17:11:44.582775Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488022525625214809:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T17:11:44.669285Z node 1 :TX_PROXY ERROR: Actor# [1:7488022525625214882:2745] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:11:44.670482Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n CREATE TABLE test_show_create (\n Key Timestamp NOT NULL,\n Value String,\n PRIMARY KEY (Key)\n )\n WITH (\n TTL = Interval(\"P1D\") DELETE ON Key\n );\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-03-31T17:11:44.672749Z node 1 :KQP_COMPILE_SERVICE DEBUG: Perform request, TraceId.SpanIdPtr: 0x000050F00019D3D8 2025-03-31T17:11:44.672835Z node 1 :KQP_COMPILE_SERVICE DEBUG: Received compile request, sender: [1:7488022525625214774:2338], queryUid: , queryText: "\n CREATE TABLE test_show_create (\n Key Timestamp NOT NULL,\n Value String,\n PRIMARY KEY (Key)\n )\n WITH (\n TTL = Interval(\"P1D\") DELETE ON Key\n );\n ", keepInCache: 1, split: 0{ TraceId: 01jqpk03nc5t9y41fm9y2exkdg, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDI2Y2VlNTktMmY1MTQ3OGItNjA4MGEwMzAtNGQ3NGViMDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default} 2025-03-31T17:11:44.673018Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n CREATE TABLE test_show_create (\n Key Timestamp NOT NULL,\n Value String,\n PRIMARY KEY (Key)\n )\n WITH (\n TTL = Interval(\"P1D\") DELETE ON Key\n );\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-03-31T17:11:44.673108Z node 1 :KQP_COMPILE_SERVICE DEBUG: Added request to queue, sender: [1:7488022525625214774:2338], queueSize: 1 2025-03-31T17:11:44.673740Z node 1 :KQP_COMPILE_SERVICE DEBUG: Created compile actor, sender: [1:7488022525625214774:2338], compileActor: [1:7488022525625214901:2350] 2025-03-31T17:11:45.057326Z node 1 :KQP_YQL INFO: TraceId: 01jqpk03nc5t9y41fm9y2exkdg, SessionId: CompileActor 2025-03-31 17:11:45.056 INFO ydb-core-sys_view-ut(pid=963865, tid=0x00007FD80C500640) [KQP] kqp_host.cpp:1382: Compiled query: ( (let $1 '('"Key" (DataType 'Timestamp) '('columnConstrains '('('not_null))) '())) (let $2 '('"Value" (AsOptionalType (DataType 'String)) '('columnConstrains '()) '())) (let $3 '('('mode 'create) '('columns '($1 $2)) '('primarykey '('"Key")) '('tableSettings '('('setTtlSettings '('('columnName '"Key") '('tiers '('('('evictionDelay (Interval '"86400000000"))))))))))) (return (Write! world (DataSink '"kikimr" '"db") (Key '('tablescheme (String '"/Root/test_show_create"))) (Void) $3)) ) 2025-03-31T17:11:45.058520Z node 1 :KQP_YQL TRACE: TraceId: 01jqpk03nc5t9y41fm9y2exkdg, SessionId: CompileActor 2025-03-31 17:11:45.057 TRACE ydb-core-sys_view-ut(pid=963865, tid=0x00007FD80C500640) [KQP] kqp_transform.cpp:33: YqlTransformer: ( (let $1 '('"Key" (DataType 'Timestamp) '('columnConstrains '('('not_null))) '())) (let $2 '('"Value" (AsOptionalType (DataType 'String)) '('columnConstrains '()) '())) (let $3 '('('mode 'create) '('columns '($1 $2)) '('primarykey '('"Key")) '('tableSettings '('('setTtlSettings '('('columnName '"Key") '('tiers '('('('evictionDelay (Interval '"86400000000"))))))))))) (let $4 (Write! world (DataSink '"kikimr" '"db") (Key '('tablescheme (String '"/Root/test_show_create"))) (Void) $3)) (return (Commit! $4 (DataSink '"kikimr" '"db") '('('"mode" '"flush")))) ) 2025-03-31T17:11:45.058883Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpk03nc5t9y41fm9y2exkdg, SessionId: CompileActor 2025-03-31 17:11:45.058 DEBUG ydb-core-sys_view-ut(pid=963865, tid=0x00007FD80C500640) [perf] yql_expr_optimize.cpp:540: Execution of [ExpandApply] took 287us 2025-03-31T17:11:45.060109Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpk03nc5t9y41fm9y2exkdg, SessionId: CompileActor 2025-03-31 17:11:45.060 DEBUG ydb-core-sys_view-ut(pid=963865, tid=0x00007FD80C500640) [core eval] yql_eval_expr.cpp:384: EvaluateExpression - start 2025-03-31T17:11:45.063516Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpk03nc5t9y41fm9y2exkdg, SessionId: CompileActor 2025-03-31 17:11:45.063 DEBUG ydb-core-sys_view-ut(pid=963865, tid=0x00007FD80C500640) [core eval] yql_eval_expr.cpp:1156: EvaluateExpression - finish 2025-03-31T17:11:45.064278Z node 1 :KQP_YQL TRACE: TraceId: 01jqpk03nc5t9y41fm9y2exkdg, SessionId: CompileActor 2025-03-31 17:11:45.063 TRACE ydb-core-sys_view-ut(pid=963865, tid=0x00007FD80C500640) [KQP] kqp_transform.cpp:33: YqlTransformer: ( (let $1 '('"Key" (DataType 'Timestamp) '('columnConstrains '('('not_null))) '())) (let $2 '('"Value" (AsOptionalType (DataType 'String)) '('columnConstrains '()) '())) (let $3 '('('mode 'create) '('columns '($1 $2)) '('primarykey '('"Key")) '('tableSettings '('('setTtlSettings '('('columnName '"Key") '('tiers '('('('evictionDelay (Interval '"86400000000"))))))))))) (let $4 (Write! world (DataSink '"kikimr" '"db") (Key '('tablescheme (String '"/Root/test_show_create"))) (Void) $3)) (return (Commit! $4 (DataSink '"kikimr" '"db") '('('"mode" '"flush")))) ) 2025-03-31T17:11:45.088608Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpk03nc5t9y41fm9y2exkdg, SessionId: CompileActor 2025-03-31 17:11:45.088 DEBUG ydb-core-sys_view-ut(pid=963865, tid=0x00007FD80C500640) [perf] type_ann_expr.cpp:47: Execution of [TypeAnnotationTransformer::DoTransform] took 15.3ms 2025-03-31T17:11:45.089133Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpk03nc5t9y41fm9y2exkdg, SessionId: CompileActor 2025-03-31 17:11:45.089 DEBUG ydb-core-sys_view-ut(pid=963865, tid=0x00007FD80C500640) [perf] yql_expr_constraint.cpp:3226: Execution of [ConstraintTransformer::DoTransform] took 304us 2025-03-31T17:11:45.089219Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpk03nc5t9y41fm9y2exkdg, SessionId: CompileActor 2025-03-31 17:11:45.089 DEBUG ydb-core-sys_view-ut(pid=963865, tid=0x00007FD80C500640) [perf] yql_expr_csee.cpp:620: Execution of [UpdateCompletness] took 43us 2025-03-31T17:11:45.089649Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpk03nc5t9y41fm9y2exkdg, SessionId: CompileActor 2025-03-31 17:11:45.089 DEBUG ydb-core-sys_view-ut(pid=963865, tid=0x00007FD80C500640) [perf] yql_expr_csee.cpp:633: Execution of [EliminateCommonSubExpressions] took 379us 2025-03-31T17:11:45.094 ... :KQP_YQL INFO: TraceId: 01jqpk050jbkdcd0sg9z99v7z9, SessionId: CompileActor 2025-03-31 17:11:45.958 INFO ydb-core-sys_view-ut(pid=963865, tid=0x00007FD80B1F6640) [core exec] yql_execution.cpp:594: Node #87 created 0 trackable nodes: 2025-03-31T17:11:45.958819Z node 1 :KQP_YQL INFO: TraceId: 01jqpk050jbkdcd0sg9z99v7z9, SessionId: CompileActor 2025-03-31 17:11:45.958 INFO ydb-core-sys_view-ut(pid=963865, tid=0x00007FD80B1F6640) [core exec] yql_execution.cpp:87: Finish, output #87, status: Ok 2025-03-31T17:11:45.958860Z node 1 :KQP_YQL INFO: TraceId: 01jqpk050jbkdcd0sg9z99v7z9, SessionId: CompileActor 2025-03-31 17:11:45.958 INFO ydb-core-sys_view-ut(pid=963865, tid=0x00007FD80B1F6640) [core exec] yql_execution.cpp:93: Creating finalizing transformer, output #87 2025-03-31T17:11:45.959307Z node 1 :KQP_COMPILE_SERVICE DEBUG: Received response, sender: [1:7488022525625214774:2338], status: SUCCESS, compileActor: [1:7488022529920182976:2412] 2025-03-31T17:11:45.959361Z node 1 :KQP_COMPILE_SERVICE DEBUG: Send response, sender: [1:7488022525625214774:2338], queryUid: f4e7991c-b8cb38cb-d4f43cf4-23fbf49b, status:SUCCESS 2025-03-31T17:11:45.961616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-31T17:11:46.006187Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n DROP TABLE `test_show_create`;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-03-31T17:11:46.006263Z node 1 :KQP_COMPILE_SERVICE DEBUG: Perform request, TraceId.SpanIdPtr: 0x000050F00059BB38 2025-03-31T17:11:46.006289Z node 1 :KQP_COMPILE_SERVICE DEBUG: Received compile request, sender: [1:7488022525625214774:2338], queryUid: , queryText: "\n DROP TABLE `test_show_create`;\n ", keepInCache: 1, split: 0{ TraceId: 01jqpk052n8mm8s3j3ehvjvyx2, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDI2Y2VlNTktMmY1MTQ3OGItNjA4MGEwMzAtNGQ3NGViMDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default} 2025-03-31T17:11:46.006373Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n DROP TABLE `test_show_create`;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-03-31T17:11:46.006418Z node 1 :KQP_COMPILE_SERVICE DEBUG: Added request to queue, sender: [1:7488022525625214774:2338], queueSize: 1 2025-03-31T17:11:46.006807Z node 1 :KQP_COMPILE_SERVICE DEBUG: Created compile actor, sender: [1:7488022525625214774:2338], compileActor: [1:7488022534215150415:2420] 2025-03-31T17:11:46.012083Z node 1 :KQP_YQL INFO: TraceId: 01jqpk052n8mm8s3j3ehvjvyx2, SessionId: CompileActor 2025-03-31 17:11:46.011 INFO ydb-core-sys_view-ut(pid=963865, tid=0x00007FD80C500640) [KQP] kqp_host.cpp:1382: Compiled query: ( (return (Write! world (DataSink '"kikimr" '"db") (Key '('table (String '"/Root/test_show_create"))) (Void) '('('mode 'drop)))) ) 2025-03-31T17:11:46.012521Z node 1 :KQP_YQL TRACE: TraceId: 01jqpk052n8mm8s3j3ehvjvyx2, SessionId: CompileActor 2025-03-31 17:11:46.012 TRACE ydb-core-sys_view-ut(pid=963865, tid=0x00007FD80C500640) [KQP] kqp_transform.cpp:33: YqlTransformer: ( (let $1 (Write! world (DataSink '"kikimr" '"db") (Key '('table (String '"/Root/test_show_create"))) (Void) '('('mode 'drop)))) (return (Commit! $1 (DataSink '"kikimr" '"db") '('('"mode" '"flush")))) ) 2025-03-31T17:11:46.012694Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpk052n8mm8s3j3ehvjvyx2, SessionId: CompileActor 2025-03-31 17:11:46.012 DEBUG ydb-core-sys_view-ut(pid=963865, tid=0x00007FD80C500640) [perf] yql_expr_optimize.cpp:540: Execution of [ExpandApply] took 105us 2025-03-31T17:11:46.012988Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpk052n8mm8s3j3ehvjvyx2, SessionId: CompileActor 2025-03-31 17:11:46.012 DEBUG ydb-core-sys_view-ut(pid=963865, tid=0x00007FD80C500640) [core eval] yql_eval_expr.cpp:384: EvaluateExpression - start 2025-03-31T17:11:46.013833Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpk052n8mm8s3j3ehvjvyx2, SessionId: CompileActor 2025-03-31 17:11:46.013 DEBUG ydb-core-sys_view-ut(pid=963865, tid=0x00007FD80C500640) [core eval] yql_eval_expr.cpp:1156: EvaluateExpression - finish 2025-03-31T17:11:46.014376Z node 1 :KQP_YQL TRACE: TraceId: 01jqpk052n8mm8s3j3ehvjvyx2, SessionId: CompileActor 2025-03-31 17:11:46.014 TRACE ydb-core-sys_view-ut(pid=963865, tid=0x00007FD80C500640) [KQP] kqp_transform.cpp:33: YqlTransformer: ( (let $1 (Write! world (DataSink '"kikimr" '"db") (Key '('table (String '"/Root/test_show_create"))) (Void) '('('mode 'drop)))) (return (Commit! $1 (DataSink '"kikimr" '"db") '('('"mode" '"flush")))) ) 2025-03-31T17:11:46.016555Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpk052n8mm8s3j3ehvjvyx2, SessionId: CompileActor 2025-03-31 17:11:46.016 DEBUG ydb-core-sys_view-ut(pid=963865, tid=0x00007FD80B1F6640) [perf] type_ann_expr.cpp:47: Execution of [TypeAnnotationTransformer::DoTransform] took 211us 2025-03-31T17:11:46.016800Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpk052n8mm8s3j3ehvjvyx2, SessionId: CompileActor 2025-03-31 17:11:46.016 DEBUG ydb-core-sys_view-ut(pid=963865, tid=0x00007FD80B1F6640) [perf] yql_expr_constraint.cpp:3226: Execution of [ConstraintTransformer::DoTransform] took 120us 2025-03-31T17:11:46.016900Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpk052n8mm8s3j3ehvjvyx2, SessionId: CompileActor 2025-03-31 17:11:46.016 DEBUG ydb-core-sys_view-ut(pid=963865, tid=0x00007FD80B1F6640) [perf] yql_expr_csee.cpp:620: Execution of [UpdateCompletness] took 18us 2025-03-31T17:11:46.017130Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpk052n8mm8s3j3ehvjvyx2, SessionId: CompileActor 2025-03-31 17:11:46.017 DEBUG ydb-core-sys_view-ut(pid=963865, tid=0x00007FD80B1F6640) [perf] yql_expr_csee.cpp:633: Execution of [EliminateCommonSubExpressions] took 152us 2025-03-31T17:11:46.018325Z node 1 :KQP_YQL INFO: TraceId: 01jqpk052n8mm8s3j3ehvjvyx2, SessionId: CompileActor 2025-03-31 17:11:46.018 INFO ydb-core-sys_view-ut(pid=963865, tid=0x00007FD80B1F6640) [KQP] kqp_transform.cpp:33: Optimized expr: ( (let $1 (DataSink '"kikimr" '"db")) (let $2 (KiDropTable! world $1 '"/Root/test_show_create" '() '"table" '0)) (return (Commit! $2 $1 '('('"mode" '"flush")))) ) 2025-03-31T17:11:46.018369Z node 1 :KQP_YQL INFO: TraceId: 01jqpk052n8mm8s3j3ehvjvyx2, SessionId: CompileActor 2025-03-31 17:11:46.018 INFO ydb-core-sys_view-ut(pid=963865, tid=0x00007FD80B1F6640) [core exec] yql_execution.cpp:59: Begin, root #43 2025-03-31T17:11:46.018397Z node 1 :KQP_YQL INFO: TraceId: 01jqpk052n8mm8s3j3ehvjvyx2, SessionId: CompileActor 2025-03-31 17:11:46.018 INFO ydb-core-sys_view-ut(pid=963865, tid=0x00007FD80B1F6640) [core exec] yql_execution.cpp:72: Collect unused nodes for root #43, status: Ok 2025-03-31T17:11:46.018452Z node 1 :KQP_YQL TRACE: TraceId: 01jqpk052n8mm8s3j3ehvjvyx2, SessionId: CompileActor 2025-03-31 17:11:46.018 TRACE ydb-core-sys_view-ut(pid=963865, tid=0x00007FD80B1F6640) [core exec] yql_execution.cpp:387: {0}, callable #43 2025-03-31T17:11:46.018490Z node 1 :KQP_YQL TRACE: TraceId: 01jqpk052n8mm8s3j3ehvjvyx2, SessionId: CompileActor 2025-03-31 17:11:46.018 TRACE ydb-core-sys_view-ut(pid=963865, tid=0x00007FD80B1F6640) [core exec] yql_execution.cpp:387: {1}, callable #42 2025-03-31T17:11:46.018550Z node 1 :KQP_YQL TRACE: TraceId: 01jqpk052n8mm8s3j3ehvjvyx2, SessionId: CompileActor 2025-03-31 17:11:46.018 TRACE ydb-core-sys_view-ut(pid=963865, tid=0x00007FD80B1F6640) [core exec] yql_execution.cpp:387: {1}, callable #42 2025-03-31T17:11:46.018732Z node 1 :KQP_YQL INFO: TraceId: 01jqpk052n8mm8s3j3ehvjvyx2, SessionId: CompileActor 2025-03-31 17:11:46.018 INFO ydb-core-sys_view-ut(pid=963865, tid=0x00007FD80B1F6640) [core exec] yql_execution.cpp:466: Register async execution for node #42 2025-03-31T17:11:46.018780Z node 1 :KQP_YQL INFO: TraceId: 01jqpk052n8mm8s3j3ehvjvyx2, SessionId: CompileActor 2025-03-31 17:11:46.018 INFO ydb-core-sys_view-ut(pid=963865, tid=0x00007FD80B1F6640) [core exec] yql_execution.cpp:87: Finish, output #43, status: Async 2025-03-31T17:11:46.018905Z node 1 :KQP_YQL INFO: TraceId: 01jqpk052n8mm8s3j3ehvjvyx2, SessionId: CompileActor 2025-03-31 17:11:46.018 INFO ydb-core-sys_view-ut(pid=963865, tid=0x00007FD80B1F6640) [core exec] yql_execution.cpp:133: Completed async execution for node #42 2025-03-31T17:11:46.018977Z node 1 :KQP_YQL INFO: TraceId: 01jqpk052n8mm8s3j3ehvjvyx2, SessionId: CompileActor 2025-03-31 17:11:46.018 INFO ydb-core-sys_view-ut(pid=963865, tid=0x00007FD80B1F6640) [core exec] yql_execution.cpp:153: State is ExecutionComplete after apply async changes for node #42 2025-03-31T17:11:46.019019Z node 1 :KQP_YQL INFO: TraceId: 01jqpk052n8mm8s3j3ehvjvyx2, SessionId: CompileActor 2025-03-31 17:11:46.018 INFO ydb-core-sys_view-ut(pid=963865, tid=0x00007FD80B1F6640) [core exec] yql_execution.cpp:59: Begin, root #43 2025-03-31T17:11:46.019061Z node 1 :KQP_YQL INFO: TraceId: 01jqpk052n8mm8s3j3ehvjvyx2, SessionId: CompileActor 2025-03-31 17:11:46.019 INFO ydb-core-sys_view-ut(pid=963865, tid=0x00007FD80B1F6640) [core exec] yql_execution.cpp:72: Collect unused nodes for root #43, status: Ok 2025-03-31T17:11:46.019204Z node 1 :KQP_YQL TRACE: TraceId: 01jqpk052n8mm8s3j3ehvjvyx2, SessionId: CompileActor 2025-03-31 17:11:46.019 TRACE ydb-core-sys_view-ut(pid=963865, tid=0x00007FD80B1F6640) [core exec] yql_execution.cpp:387: {0}, callable #43 2025-03-31T17:11:46.019290Z node 1 :KQP_YQL INFO: TraceId: 01jqpk052n8mm8s3j3ehvjvyx2, SessionId: CompileActor 2025-03-31 17:11:46.019 INFO ydb-core-sys_view-ut(pid=963865, tid=0x00007FD80B1F6640) [core exec] yql_execution.cpp:577: Node #43 finished execution 2025-03-31T17:11:46.019352Z node 1 :KQP_YQL INFO: TraceId: 01jqpk052n8mm8s3j3ehvjvyx2, SessionId: CompileActor 2025-03-31 17:11:46.019 INFO ydb-core-sys_view-ut(pid=963865, tid=0x00007FD80B1F6640) [core exec] yql_execution.cpp:594: Node #43 created 0 trackable nodes: 2025-03-31T17:11:46.019397Z node 1 :KQP_YQL INFO: TraceId: 01jqpk052n8mm8s3j3ehvjvyx2, SessionId: CompileActor 2025-03-31 17:11:46.019 INFO ydb-core-sys_view-ut(pid=963865, tid=0x00007FD80B1F6640) [core exec] yql_execution.cpp:87: Finish, output #43, status: Ok 2025-03-31T17:11:46.019474Z node 1 :KQP_YQL INFO: TraceId: 01jqpk052n8mm8s3j3ehvjvyx2, SessionId: CompileActor 2025-03-31 17:11:46.019 INFO ydb-core-sys_view-ut(pid=963865, tid=0x00007FD80B1F6640) [core exec] yql_execution.cpp:93: Creating finalizing transformer, output #43 2025-03-31T17:11:46.019852Z node 1 :KQP_COMPILE_SERVICE DEBUG: Received response, sender: [1:7488022525625214774:2338], status: SUCCESS, compileActor: [1:7488022534215150415:2420] 2025-03-31T17:11:46.019934Z node 1 :KQP_COMPILE_SERVICE DEBUG: Send response, sender: [1:7488022525625214774:2338], queryUid: 28fbb8a3-3f959a07-53c460d9-675f70b, status:SUCCESS 2025-03-31T17:11:46.051240Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found >> SystemView::ShowCreateTablePartitionAtKeys [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-62 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-60 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-67 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-68 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-57 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-59 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-58 [GOOD] |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |81.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |81.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |81.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |81.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |81.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-62 [GOOD] Test command err: Starting YDB, grpc: 26425, msgbus: 20527 2025-03-31T17:11:44.962516Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488022528228862407:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:11:44.962598Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/0002d6/r3tmp/tmpwcg2my/pdisk_1.dat 2025-03-31T17:11:45.413392Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T17:11:45.420442Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:11:45.420523Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 26425, node 1 2025-03-31T17:11:45.434240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T17:11:45.479908Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-03-31T17:11:45.479936Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-03-31T17:11:45.503282Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:11:45.503303Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:11:45.503325Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:11:45.503430Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20527 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T17:11:45.731203Z node 1 :TX_PROXY DEBUG: actor# [1:7488022532523829946:2123] Handle TEvNavigate describe path dc-1 2025-03-31T17:11:45.731256Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532523830436:2443] HANDLE EvNavigateScheme dc-1 2025-03-31T17:11:45.732621Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532523830436:2443] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:45.770563Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532523830436:2443] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-03-31T17:11:45.782498Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532523830436:2443] Handle TEvDescribeSchemeResult Forward to# [1:7488022532523830435:2442] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T17:11:45.802719Z node 1 :TX_PROXY DEBUG: actor# [1:7488022532523829946:2123] Handle TEvProposeTransaction 2025-03-31T17:11:45.802746Z node 1 :TX_PROXY DEBUG: actor# [1:7488022532523829946:2123] TxId# 281474976710657 ProcessProposeTransaction 2025-03-31T17:11:45.802853Z node 1 :TX_PROXY DEBUG: actor# [1:7488022532523829946:2123] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7488022532523830449:2449] 2025-03-31T17:11:45.887373Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532523830449:2449] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-31T17:11:45.887467Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532523830449:2449] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T17:11:45.887547Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532523830449:2449] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:45.887656Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532523830449:2449] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:45.888039Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532523830449:2449] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:45.888201Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532523830449:2449] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-31T17:11:45.888326Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532523830449:2449] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-03-31T17:11:45.888484Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532523830449:2449] txid# 281474976710657 HANDLE EvClientConnected 2025-03-31T17:11:45.889304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T17:11:45.891818Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532523830449:2449] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-03-31T17:11:45.891887Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532523830449:2449] txid# 281474976710657 SEND to# [1:7488022532523830448:2448] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-03-31T17:11:45.908413Z node 1 :TX_PROXY DEBUG: actor# [1:7488022532523829946:2123] Handle TEvProposeTransaction 2025-03-31T17:11:45.908449Z node 1 :TX_PROXY DEBUG: actor# [1:7488022532523829946:2123] TxId# 281474976710658 ProcessProposeTransaction 2025-03-31T17:11:45.908479Z node 1 :TX_PROXY DEBUG: actor# [1:7488022532523829946:2123] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7488022532523830491:2487] 2025-03-31T17:11:45.911263Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532523830491:2487] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-31T17:11:45.911316Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532523830491:2487] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T17:11:45.911332Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532523830491:2487] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:45.911396Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532523830491:2487] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:45.911717Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532523830491:2487] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:45.911824Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532523830491:2487] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:45.911867Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532523830491:2487] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-31T17:11:45.912030Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532523830491:2487] txid# 281474976710658 HANDLE EvClientConnected 2025-03-31T17:11:45.912437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T17:11:45.914754Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532523830491:2487] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-03-31T17:11:45.914816Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532523830491:2487] txid# 281474976710658 SEND to# [1:7488022532523830490:2486] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-03-31T17:11:47.431302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022541113765179:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:47.431306Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022541113765171:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:47.431372Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:47.431590Z node 1 :TX_PROXY DEBUG: actor# [1:7488022532523829946:2123] H ... 0 userReqId# "" txid# 281474976710662 SEND to# [1:7488022541113765304:2595] 2025-03-31T17:11:47.856856Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541113765304:2595] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\r\010\001\022\t\032\007dbadmin\n\025\010\000\022\021\010\001\020\200\200\002\032\007dbadmin \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:36790" 2025-03-31T17:11:47.856921Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541113765304:2595] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T17:11:47.856970Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541113765304:2595] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:47.857018Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541113765304:2595] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:47.857380Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541113765304:2595] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:47.857544Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541113765304:2595] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:47.857596Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541113765304:2595] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-03-31T17:11:47.857778Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541113765304:2595] txid# 281474976710662 HANDLE EvClientConnected 2025-03-31T17:11:47.858271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T17:11:47.860575Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541113765304:2595] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-03-31T17:11:47.860627Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541113765304:2595] txid# 281474976710662 SEND to# [1:7488022541113765303:2345] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-03-31T17:11:47.897951Z node 1 :TX_PROXY DEBUG: actor# [1:7488022532523829946:2123] Handle TEvProposeTransaction 2025-03-31T17:11:47.897987Z node 1 :TX_PROXY DEBUG: actor# [1:7488022532523829946:2123] TxId# 281474976710663 ProcessProposeTransaction 2025-03-31T17:11:47.898055Z node 1 :TX_PROXY DEBUG: actor# [1:7488022532523829946:2123] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [1:7488022541113765343:2620] 2025-03-31T17:11:47.900854Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541113765343:2620] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:36812" 2025-03-31T17:11:47.900922Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541113765343:2620] txid# 281474976710663 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T17:11:47.900957Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541113765343:2620] txid# 281474976710663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:47.901000Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541113765343:2620] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:47.901328Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541113765343:2620] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:47.901428Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541113765343:2620] HANDLE EvNavigateKeySetResult, txid# 281474976710663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:47.901489Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541113765343:2620] txid# 281474976710663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710663 TabletId# 72057594046644480} 2025-03-31T17:11:47.901646Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541113765343:2620] txid# 281474976710663 HANDLE EvClientConnected 2025-03-31T17:11:47.908532Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541113765343:2620] txid# 281474976710663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710663} 2025-03-31T17:11:47.908583Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541113765343:2620] txid# 281474976710663 SEND to# [1:7488022541113765342:2347] Source {TEvProposeTransactionStatus txid# 281474976710663 Status# 48} 2025-03-31T17:11:47.916158Z node 1 :TX_PROXY DEBUG: actor# [1:7488022532523829946:2123] Handle TEvProposeTransaction 2025-03-31T17:11:47.916194Z node 1 :TX_PROXY DEBUG: actor# [1:7488022532523829946:2123] TxId# 281474976710664 ProcessProposeTransaction 2025-03-31T17:11:47.916282Z node 1 :TX_PROXY DEBUG: actor# [1:7488022532523829946:2123] Cookie# 0 userReqId# "" txid# 281474976710664 SEND to# [1:7488022541113765356:2629] 2025-03-31T17:11:47.918846Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541113765356:2629] txid# 281474976710664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "" NewOwner: "dbadmin" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:36816" 2025-03-31T17:11:47.918900Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541113765356:2629] txid# 281474976710664 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T17:11:47.918915Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541113765356:2629] txid# 281474976710664 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:47.918953Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541113765356:2629] txid# 281474976710664 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:47.919335Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541113765356:2629] txid# 281474976710664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:47.919440Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541113765356:2629] HANDLE EvNavigateKeySetResult, txid# 281474976710664 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:47.919604Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541113765356:2629] txid# 281474976710664 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710664 TabletId# 72057594046644480} 2025-03-31T17:11:47.919799Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541113765356:2629] txid# 281474976710664 HANDLE EvClientConnected 2025-03-31T17:11:47.920308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T17:11:47.922359Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541113765356:2629] txid# 281474976710664 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710664} 2025-03-31T17:11:47.922584Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541113765356:2629] txid# 281474976710664 SEND to# [1:7488022541113765355:2354] Source {TEvProposeTransactionStatus txid# 281474976710664 Status# 48} 2025-03-31T17:11:47.966798Z node 1 :TX_PROXY DEBUG: actor# [1:7488022532523829946:2123] Handle TEvProposeTransaction 2025-03-31T17:11:47.966835Z node 1 :TX_PROXY DEBUG: actor# [1:7488022532523829946:2123] TxId# 281474976710665 ProcessProposeTransaction 2025-03-31T17:11:47.966907Z node 1 :TX_PROXY DEBUG: actor# [1:7488022532523829946:2123] Cookie# 0 userReqId# "" txid# 281474976710665 SEND to# [1:7488022541113765388:2645] 2025-03-31T17:11:47.969154Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541113765388:2645] txid# 281474976710665 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\007dbadmin\022\030\022\026\n\024all-users@well-known\032\325\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzQ4NDMwNywiaWF0IjoxNzQzNDQxMTA3LCJzdWIiOiJkYmFkbWluIn0.lUcZ37UeF-0AJOLoBk1_ClRNI-8gNPspBB3lnyR0vtRxWgQx3Q50Pr3XG7MDiq_IKRi88nrpNCkco-5vmqrY_yB3xXky0NU93OI2cL2fQWQsS4tz7IUiG7bFvrHALM1jpI4-CnCgdMIkoLD2eke2pKW9ZepzmHfI9ZW4Ast7bPL8T8H0pqU7RMMJ5s6Xj1nNzTmYJdwE4E6mTDypGt72oE19MNk9oe-TLDhQzZtPEIXkpTLLMtaZPT0pHUhOZ4Ih2-D8RJdnhxbhqmjwF8excg8FZaV1cJDielbpEKDXuKrKHllJ1Bwl1joIM8TiCbP0HFKunu7y_ITXg8ToUUFvsQ\"\005Login*\201\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzQ4NDMwNywiaWF0IjoxNzQzNDQxMTA3LCJzdWIiOiJkYmFkbWluIn0.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:36846" 2025-03-31T17:11:47.969217Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541113765388:2645] txid# 281474976710665 Bootstrap, UserSID: dbadmin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T17:11:47.969254Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541113765388:2645] txid# 281474976710665 Bootstrap, UserSID: dbadmin IsClusterAdministrator: 0 2025-03-31T17:11:47.969362Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541113765388:2645] txid# 281474976710665 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:47.969638Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541113765388:2645] txid# 281474976710665 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:47.969756Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541113765388:2645] HANDLE EvNavigateKeySetResult, txid# 281474976710665 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:47.969805Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541113765388:2645] txid# 281474976710665 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710665 TabletId# 72057594046644480} 2025-03-31T17:11:47.969993Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541113765388:2645] txid# 281474976710665 HANDLE EvClientConnected 2025-03-31T17:11:47.972544Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541113765388:2645] txid# 281474976710665 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710665} 2025-03-31T17:11:47.972587Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541113765388:2645] txid# 281474976710665 SEND to# [1:7488022541113765387:2360] Source {TEvProposeTransactionStatus txid# 281474976710665 Status# 48} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-64 [GOOD] Test command err: Starting YDB, grpc: 18129, msgbus: 2124 2025-03-31T17:11:44.886034Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488022527174251979:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:11:44.886196Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/0002dd/r3tmp/tmpFA5Zd1/pdisk_1.dat 2025-03-31T17:11:45.259890Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T17:11:45.292547Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:11:45.292674Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:11:45.304980Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18129, node 1 2025-03-31T17:11:45.442485Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:11:45.442517Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:11:45.442523Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:11:45.442632Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2124 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T17:11:45.656925Z node 1 :TX_PROXY DEBUG: actor# [1:7488022527174252218:2115] Handle TEvNavigate describe path dc-1 2025-03-31T17:11:45.657010Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022531469220020:2441] HANDLE EvNavigateScheme dc-1 2025-03-31T17:11:45.657959Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022531469220020:2441] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:45.696863Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022531469220020:2441] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-03-31T17:11:45.710992Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022531469220020:2441] Handle TEvDescribeSchemeResult Forward to# [1:7488022531469220019:2440] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T17:11:45.739244Z node 1 :TX_PROXY DEBUG: actor# [1:7488022527174252218:2115] Handle TEvProposeTransaction 2025-03-31T17:11:45.739280Z node 1 :TX_PROXY DEBUG: actor# [1:7488022527174252218:2115] TxId# 281474976710657 ProcessProposeTransaction 2025-03-31T17:11:45.739438Z node 1 :TX_PROXY DEBUG: actor# [1:7488022527174252218:2115] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7488022531469220033:2447] 2025-03-31T17:11:45.805370Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022531469220033:2447] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-31T17:11:45.805436Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022531469220033:2447] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T17:11:45.805470Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022531469220033:2447] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:45.805517Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022531469220033:2447] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:45.805780Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022531469220033:2447] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:45.805913Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022531469220033:2447] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-31T17:11:45.805959Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022531469220033:2447] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-03-31T17:11:45.806123Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022531469220033:2447] txid# 281474976710657 HANDLE EvClientConnected 2025-03-31T17:11:45.806671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T17:11:45.808475Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022531469220033:2447] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-03-31T17:11:45.808529Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022531469220033:2447] txid# 281474976710657 SEND to# [1:7488022531469220032:2446] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-03-31T17:11:45.819284Z node 1 :TX_PROXY DEBUG: actor# [1:7488022527174252218:2115] Handle TEvProposeTransaction 2025-03-31T17:11:45.819307Z node 1 :TX_PROXY DEBUG: actor# [1:7488022527174252218:2115] TxId# 281474976710658 ProcessProposeTransaction 2025-03-31T17:11:45.819335Z node 1 :TX_PROXY DEBUG: actor# [1:7488022527174252218:2115] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7488022531469220075:2485] 2025-03-31T17:11:45.821596Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022531469220075:2485] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-31T17:11:45.821656Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022531469220075:2485] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T17:11:45.821670Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022531469220075:2485] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:45.821719Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022531469220075:2485] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:45.821969Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022531469220075:2485] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:45.822057Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022531469220075:2485] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:45.822103Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022531469220075:2485] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-31T17:11:45.822231Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022531469220075:2485] txid# 281474976710658 HANDLE EvClientConnected 2025-03-31T17:11:45.822620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T17:11:45.824457Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022531469220075:2485] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-03-31T17:11:45.824498Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022531469220075:2485] txid# 281474976710658 SEND to# [1:7488022531469220074:2484] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-03-31T17:11:47.468633Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022540059154770:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:47.468637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022540059154762:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:47.468738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:47.469018Z node 1 :TX_PROXY DEBUG: actor# [1:7488022527174252218:2115] Handle TEvProposeTransaction 2025-03-31T17:11:47.469042Z node 1 :TX_PROXY DEBUG: actor# [1:7488022527174252218:2115] TxId# 281474976710659 ProcessProposeTransaction 2025-03-31T1 ... n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:51684" 2025-03-31T17:11:47.889046Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022540059154896:2601] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T17:11:47.889064Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022540059154896:2601] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:47.889124Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022540059154896:2601] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:47.889472Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022540059154896:2601] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:47.889584Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022540059154896:2601] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:47.889636Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022540059154896:2601] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-03-31T17:11:47.889774Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022540059154896:2601] txid# 281474976710662 HANDLE EvClientConnected 2025-03-31T17:11:47.890294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T17:11:47.892785Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022540059154896:2601] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-03-31T17:11:47.892836Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022540059154896:2601] txid# 281474976710662 SEND to# [1:7488022540059154895:2345] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-03-31T17:11:47.936814Z node 1 :TX_PROXY DEBUG: actor# [1:7488022527174252218:2115] Handle TEvProposeTransaction 2025-03-31T17:11:47.936853Z node 1 :TX_PROXY DEBUG: actor# [1:7488022527174252218:2115] TxId# 281474976710663 ProcessProposeTransaction 2025-03-31T17:11:47.936918Z node 1 :TX_PROXY DEBUG: actor# [1:7488022527174252218:2115] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [1:7488022540059154931:2621] 2025-03-31T17:11:47.939481Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022540059154931:2621] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:51698" 2025-03-31T17:11:47.939545Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022540059154931:2621] txid# 281474976710663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T17:11:47.939560Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022540059154931:2621] txid# 281474976710663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:47.939606Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022540059154931:2621] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:47.939962Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022540059154931:2621] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:47.940071Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022540059154931:2621] HANDLE EvNavigateKeySetResult, txid# 281474976710663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:47.940118Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022540059154931:2621] txid# 281474976710663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710663 TabletId# 72057594046644480} 2025-03-31T17:11:47.940302Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022540059154931:2621] txid# 281474976710663 HANDLE EvClientConnected 2025-03-31T17:11:47.956255Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022540059154931:2621] txid# 281474976710663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710663} 2025-03-31T17:11:47.956329Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022540059154931:2621] txid# 281474976710663 SEND to# [1:7488022540059154930:2348] Source {TEvProposeTransactionStatus txid# 281474976710663 Status# 48} 2025-03-31T17:11:47.963766Z node 1 :TX_PROXY DEBUG: actor# [1:7488022527174252218:2115] Handle TEvProposeTransaction 2025-03-31T17:11:47.963794Z node 1 :TX_PROXY DEBUG: actor# [1:7488022527174252218:2115] TxId# 281474976710664 ProcessProposeTransaction 2025-03-31T17:11:47.963859Z node 1 :TX_PROXY DEBUG: actor# [1:7488022527174252218:2115] Cookie# 0 userReqId# "" txid# 281474976710664 SEND to# [1:7488022540059154947:2633] 2025-03-31T17:11:47.966242Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022540059154947:2633] txid# 281474976710664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "" NewOwner: "dbadmin" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:51708" 2025-03-31T17:11:47.966317Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022540059154947:2633] txid# 281474976710664 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T17:11:47.966336Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022540059154947:2633] txid# 281474976710664 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:47.966378Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022540059154947:2633] txid# 281474976710664 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:47.966687Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022540059154947:2633] txid# 281474976710664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:47.966794Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022540059154947:2633] HANDLE EvNavigateKeySetResult, txid# 281474976710664 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:47.966838Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022540059154947:2633] txid# 281474976710664 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710664 TabletId# 72057594046644480} 2025-03-31T17:11:47.967120Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022540059154947:2633] txid# 281474976710664 HANDLE EvClientConnected 2025-03-31T17:11:47.967434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T17:11:47.969452Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022540059154947:2633] txid# 281474976710664 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710664} 2025-03-31T17:11:47.969501Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022540059154947:2633] txid# 281474976710664 SEND to# [1:7488022540059154946:2355] Source {TEvProposeTransactionStatus txid# 281474976710664 Status# 48} 2025-03-31T17:11:48.010523Z node 1 :TX_PROXY DEBUG: actor# [1:7488022527174252218:2115] Handle TEvProposeTransaction 2025-03-31T17:11:48.010557Z node 1 :TX_PROXY DEBUG: actor# [1:7488022527174252218:2115] TxId# 281474976710665 ProcessProposeTransaction 2025-03-31T17:11:48.010601Z node 1 :TX_PROXY DEBUG: actor# [1:7488022527174252218:2115] Cookie# 0 userReqId# "" txid# 281474976710665 SEND to# [1:7488022544354122270:2645] 2025-03-31T17:11:48.013041Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022544354122270:2645] txid# 281474976710665 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\007dbadmin\022\030\022\026\n\024all-users@well-known\032\325\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzQ4NDMwNywiaWF0IjoxNzQzNDQxMTA3LCJzdWIiOiJkYmFkbWluIn0.SuUw2fS5QXx-JTkNyQ15vwSvYd0YhDd2RHGeHSPZR8SXrcq3ZpLfZKBvPllLN3SdlpuEDiBUVxZ1xwA54Fvh9rtgzb1G6oADTIElAZeZ45Vknh00d9SdntS2bRsZsPd0QYwzkurRnTYZzadoVuOCCZzInBvat3LAoA7cw8mKZ49KQrd9QytA_3_8awgfKB-XxMDddjmGFnYt3mI7ZKI2GHjgLNxWMTAgO2cj88dquBMkk4FWnbVvIZlRVuJG6vPnetqUjuboFD_oJYo64HW_MsKPfjK71mgEq88K4m4vA-wlgaKUap3letCYnP1METofzu2zyt_1AruI0npvKbpPRg\"\005Login*\201\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzQ4NDMwNywiaWF0IjoxNzQzNDQxMTA3LCJzdWIiOiJkYmFkbWluIn0.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:51734" 2025-03-31T17:11:48.013118Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022544354122270:2645] txid# 281474976710665 Bootstrap, UserSID: dbadmin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T17:11:48.013134Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022544354122270:2645] txid# 281474976710665 Bootstrap, UserSID: dbadmin IsClusterAdministrator: 0 2025-03-31T17:11:48.013261Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022544354122270:2645] txid# 281474976710665 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-03-31T17:11:48.013289Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022544354122270:2645] txid# 281474976710665 HandleResolveDatabase, UserSID: dbadmin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 1 DatabaseOwner: dbadmin 2025-03-31T17:11:48.013335Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022544354122270:2645] txid# 281474976710665 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:48.013539Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022544354122270:2645] txid# 281474976710665 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:48.013624Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022544354122270:2645] HANDLE EvNavigateKeySetResult, txid# 281474976710665 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:48.013670Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022544354122270:2645] txid# 281474976710665 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710665 TabletId# 72057594046644480} 2025-03-31T17:11:48.013796Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022544354122270:2645] txid# 281474976710665 HANDLE EvClientConnected 2025-03-31T17:11:48.016378Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022544354122270:2645] txid# 281474976710665 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710665} 2025-03-31T17:11:48.016428Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022544354122270:2645] txid# 281474976710665 SEND to# [1:7488022544354122269:2361] Source {TEvProposeTransactionStatus txid# 281474976710665 Status# 48} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::ShowCreateTablePartitionAtKeys [GOOD] Test command err: 2025-03-31T17:11:41.573519Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488022511496384715:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:11:41.573604Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/000251/r3tmp/tmpvmEM0m/pdisk_1.dat 2025-03-31T17:11:41.991245Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T17:11:42.000200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:11:42.000308Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 18848, node 1 2025-03-31T17:11:42.016190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T17:11:42.026106Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T17:11:42.029349Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T17:11:42.100080Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:11:42.100106Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:11:42.100116Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:11:42.100254Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62787 TClient is connected to server localhost:62787 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:11:42.825057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:11:44.860026Z node 1 :KQP_COMPILE_SERVICE INFO: Subscribed for config changes 2025-03-31T17:11:44.860078Z node 1 :KQP_COMPILE_SERVICE INFO: Updated config 2025-03-31T17:11:44.874308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022524381287746:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:44.874340Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022524381287736:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:44.874478Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:44.877679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T17:11:44.894350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488022524381287750:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T17:11:44.995912Z node 1 :TX_PROXY ERROR: Actor# [1:7488022524381287827:2756] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:11:44.997232Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n CREATE TABLE test_show_create (\n Key1 Uint64,\n Key2 String,\n Value String,\n PRIMARY KEY (Key1, Key2)\n )\n WITH (\n PARTITION_AT_KEYS = ((10), (100, \"123\"), (1000, \"cde\"))\n );\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-03-31T17:11:44.997381Z node 1 :KQP_COMPILE_SERVICE DEBUG: Perform request, TraceId.SpanIdPtr: 0x000050F000185DC8 2025-03-31T17:11:44.997410Z node 1 :KQP_COMPILE_SERVICE DEBUG: Received compile request, sender: [1:7488022524381287717:2339], queryUid: , queryText: "\n CREATE TABLE test_show_create (\n Key1 Uint64,\n Key2 String,\n Value String,\n PRIMARY KEY (Key1, Key2)\n )\n WITH (\n PARTITION_AT_KEYS = ((10), (100, \"123\"), (1000, \"cde\"))\n );\n ", keepInCache: 1, split: 0{ TraceId: 01jqpk03z74rsg7x0v98p9jfy7, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjI2ZDBhM2QtNTBmZTQ0ZS1lYzZhNjQ3Zi1lNmI1MDBjOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default} 2025-03-31T17:11:44.997506Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n CREATE TABLE test_show_create (\n Key1 Uint64,\n Key2 String,\n Value String,\n PRIMARY KEY (Key1, Key2)\n )\n WITH (\n PARTITION_AT_KEYS = ((10), (100, \"123\"), (1000, \"cde\"))\n );\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-03-31T17:11:44.997554Z node 1 :KQP_COMPILE_SERVICE DEBUG: Added request to queue, sender: [1:7488022524381287717:2339], queueSize: 1 2025-03-31T17:11:44.998039Z node 1 :KQP_COMPILE_SERVICE DEBUG: Created compile actor, sender: [1:7488022524381287717:2339], compileActor: [1:7488022524381287838:2350] 2025-03-31T17:11:45.258412Z node 1 :KQP_YQL INFO: TraceId: 01jqpk03z74rsg7x0v98p9jfy7, SessionId: CompileActor 2025-03-31 17:11:45.257 INFO ydb-core-sys_view-ut(pid=963885, tid=0x00007F71E814F640) [KQP] kqp_host.cpp:1382: Compiled query: ( (let $1 '('"Key1" (AsOptionalType (DataType 'Uint64)) '('columnConstrains '()) '())) (let $2 '('"Key2" (AsOptionalType (DataType 'String)) '('columnConstrains '()) '())) (let $3 '('"Value" (AsOptionalType (DataType 'String)) '('columnConstrains '()) '())) (let $4 '($1 $2 $3)) (let $5 '('((Int32 '"10")) '((Int32 '"100") (String '"123")) '((Int32 '"1000") (String '"cde")))) (let $6 '('('mode 'create) '('columns $4) '('primarykey '('"Key1" '"Key2")) '('tableSettings '('('partitionAtKeys $5))))) (return (Write! world (DataSink '"kikimr" '"db") (Key '('tablescheme (String '"/Root/test_show_create"))) (Void) $6)) ) 2025-03-31T17:11:45.259738Z node 1 :KQP_YQL TRACE: TraceId: 01jqpk03z74rsg7x0v98p9jfy7, SessionId: CompileActor 2025-03-31 17:11:45.258 TRACE ydb-core-sys_view-ut(pid=963885, tid=0x00007F71E814F640) [KQP] kqp_transform.cpp:33: YqlTransformer: ( (let $1 '('"Key1" (AsOptionalType (DataType 'Uint64)) '('columnConstrains '()) '())) (let $2 '('"Key2" (AsOptionalType (DataType 'String)) '('columnConstrains '()) '())) (let $3 '('"Value" (AsOptionalType (DataType 'String)) '('columnConstrains '()) '())) (let $4 '($1 $2 $3)) (let $5 '('((Int32 '"10")) '((Int32 '"100") (String '"123")) '((Int32 '"1000") (String '"cde")))) (let $6 '('('mode 'create) '('columns $4) '('primarykey '('"Key1" '"Key2")) '('tableSettings '('('partitionAtKeys $5))))) (let $7 (Write! world (DataSink '"kikimr" '"db") (Key '('tablescheme (String '"/Root/test_show_create"))) (Void) $6)) (return (Commit! $7 (DataSink '"kikimr" '"db") '('('"mode" '"flush")))) ) 2025-03-31T17:11:45.260162Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpk03z74rsg7x0v98p9jfy7, SessionId: CompileActor 2025-03-31 17:11:45.260 DEBUG ydb-core-sys_view-ut(pid=963885, tid=0x00007F71E814F640) [perf] yql_expr_optimize.cpp:540: Execution of [ExpandApply] took 328us 2025-03-31T17:11:45.260835Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpk03z74rsg7x0v98p9jfy7, SessionId: CompileActor 2025-03-31 17:11:45.260 DEBUG ydb-core-sys_view-ut(pid=963885, tid=0x00007F71E814F640) [core eval] yql_eval_expr.cpp:384: EvaluateExpression - start 2025-03-31T17:11:45.262497Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpk03z74rsg7x0v98p9jfy7, SessionId: CompileActor 2025-03-31 17:11:45.262 DEBUG ydb-core-sys_view-ut(pid=963885, tid=0x00007F71E814F640) [core eval] yql_eval_expr.cpp:1156: EvaluateExpression - finish 2025-03-31T17:11:45.263693Z node 1 :KQP_YQL TRACE: TraceId: 01jqpk03z74rsg7x0v98p9jfy7, SessionId: CompileActor 2025-03-31 17:11:45.262 TRACE ydb-core-sys_view-ut(pid=963885, tid=0x00007F71E814F640) [KQP] kqp_transform.cpp:33: YqlTransformer: ( (let $1 '('"Key1" (AsOptionalType (DataType 'Uint64)) '('columnConstrains '()) '())) (let $2 '('"Key2" (AsOptionalType (DataType 'String)) '('columnConstrains '()) '())) (let $3 '('"Value" (AsOptionalType (DataType 'String)) '('columnConstrains '()) '())) (let $4 '($1 $2 $3)) (let $5 '('((Int32 '"10")) '((Int32 '"100") (String '"123")) '((Int32 '"1000") (String '"cde")))) (let $6 '('('mode 'create) '('columns $4) '('primarykey '('"Key1" '"Key2")) '('tableSettings '('('partitionAtKeys $5))))) (let $7 (Write! world (DataSink '"kikimr" '"db") (Key '('tablescheme (String '"/Root/test_show_create"))) (Void) $6)) (return (Commit! $7 (DataSink '"kikimr" '"db") '('('"mode" '"flush")))) ) 2025-03-31T17:11:45.271416Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpk03z74rsg7x0v98p9jfy7, SessionId: CompileActor 2025-03-31 17:11:45.271 DEBUG ydb-core-sys_view-ut(pid=963885, tid=0x00007F71E6326640) [perf] type_ann_expr.cpp:47: Execution of [TypeAnnotationTransformer::DoTransform] took 4.24ms 2025-03-31T17:11:45.272132Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpk03z74rsg7x0v98p9jfy7, SessionId: CompileActor 2025-03-31 17:11:45.272 DEBUG ydb-core-sys_view-ut(pid=96388 ... r 2025-03-31 17:11:47.067 INFO ydb-core-sys_view-ut(pid=963885, tid=0x00007F71E6326640) [core exec] yql_execution.cpp:93: Creating finalizing transformer, output #194 2025-03-31T17:11:47.068170Z node 1 :KQP_COMPILE_SERVICE DEBUG: Received response, sender: [1:7488022524381287717:2339], status: SUCCESS, compileActor: [1:7488022537266194046:2626] 2025-03-31T17:11:47.068212Z node 1 :KQP_COMPILE_SERVICE DEBUG: Send response, sender: [1:7488022524381287717:2339], queryUid: cdbd4cb-c7d2a936-6b6c4101-76081a86, status:SUCCESS 2025-03-31T17:11:47.072087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-31T17:11:47.162761Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n DROP TABLE `test_show_create`;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-03-31T17:11:47.162853Z node 1 :KQP_COMPILE_SERVICE DEBUG: Perform request, TraceId.SpanIdPtr: 0x000050F0003B5238 2025-03-31T17:11:47.162897Z node 1 :KQP_COMPILE_SERVICE DEBUG: Received compile request, sender: [1:7488022524381287717:2339], queryUid: , queryText: "\n DROP TABLE `test_show_create`;\n ", keepInCache: 1, split: 0{ TraceId: 01jqpk066t7f3je71qng50agc7, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjI2ZDBhM2QtNTBmZTQ0ZS1lYzZhNjQ3Zi1lNmI1MDBjOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default} 2025-03-31T17:11:47.163019Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n DROP TABLE `test_show_create`;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-03-31T17:11:47.163077Z node 1 :KQP_COMPILE_SERVICE DEBUG: Added request to queue, sender: [1:7488022524381287717:2339], queueSize: 1 2025-03-31T17:11:47.163569Z node 1 :KQP_COMPILE_SERVICE DEBUG: Created compile actor, sender: [1:7488022524381287717:2339], compileActor: [1:7488022537266194471:2646] 2025-03-31T17:11:47.168944Z node 1 :KQP_YQL INFO: TraceId: 01jqpk066t7f3je71qng50agc7, SessionId: CompileActor 2025-03-31 17:11:47.168 INFO ydb-core-sys_view-ut(pid=963885, tid=0x00007F71E6326640) [KQP] kqp_host.cpp:1382: Compiled query: ( (return (Write! world (DataSink '"kikimr" '"db") (Key '('table (String '"/Root/test_show_create"))) (Void) '('('mode 'drop)))) ) 2025-03-31T17:11:47.169364Z node 1 :KQP_YQL TRACE: TraceId: 01jqpk066t7f3je71qng50agc7, SessionId: CompileActor 2025-03-31 17:11:47.169 TRACE ydb-core-sys_view-ut(pid=963885, tid=0x00007F71E6326640) [KQP] kqp_transform.cpp:33: YqlTransformer: ( (let $1 (Write! world (DataSink '"kikimr" '"db") (Key '('table (String '"/Root/test_show_create"))) (Void) '('('mode 'drop)))) (return (Commit! $1 (DataSink '"kikimr" '"db") '('('"mode" '"flush")))) ) 2025-03-31T17:11:47.169552Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpk066t7f3je71qng50agc7, SessionId: CompileActor 2025-03-31 17:11:47.169 DEBUG ydb-core-sys_view-ut(pid=963885, tid=0x00007F71E6326640) [perf] yql_expr_optimize.cpp:540: Execution of [ExpandApply] took 124us 2025-03-31T17:11:47.169809Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpk066t7f3je71qng50agc7, SessionId: CompileActor 2025-03-31 17:11:47.169 DEBUG ydb-core-sys_view-ut(pid=963885, tid=0x00007F71E6326640) [core eval] yql_eval_expr.cpp:384: EvaluateExpression - start 2025-03-31T17:11:47.170699Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpk066t7f3je71qng50agc7, SessionId: CompileActor 2025-03-31 17:11:47.170 DEBUG ydb-core-sys_view-ut(pid=963885, tid=0x00007F71E6326640) [core eval] yql_eval_expr.cpp:1156: EvaluateExpression - finish 2025-03-31T17:11:47.171220Z node 1 :KQP_YQL TRACE: TraceId: 01jqpk066t7f3je71qng50agc7, SessionId: CompileActor 2025-03-31 17:11:47.170 TRACE ydb-core-sys_view-ut(pid=963885, tid=0x00007F71E6326640) [KQP] kqp_transform.cpp:33: YqlTransformer: ( (let $1 (Write! world (DataSink '"kikimr" '"db") (Key '('table (String '"/Root/test_show_create"))) (Void) '('('mode 'drop)))) (return (Commit! $1 (DataSink '"kikimr" '"db") '('('"mode" '"flush")))) ) 2025-03-31T17:11:47.173535Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpk066t7f3je71qng50agc7, SessionId: CompileActor 2025-03-31 17:11:47.173 DEBUG ydb-core-sys_view-ut(pid=963885, tid=0x00007F71E6326640) [perf] type_ann_expr.cpp:47: Execution of [TypeAnnotationTransformer::DoTransform] took 219us 2025-03-31T17:11:47.173751Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpk066t7f3je71qng50agc7, SessionId: CompileActor 2025-03-31 17:11:47.173 DEBUG ydb-core-sys_view-ut(pid=963885, tid=0x00007F71E6326640) [perf] yql_expr_constraint.cpp:3226: Execution of [ConstraintTransformer::DoTransform] took 111us 2025-03-31T17:11:47.173813Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpk066t7f3je71qng50agc7, SessionId: CompileActor 2025-03-31 17:11:47.173 DEBUG ydb-core-sys_view-ut(pid=963885, tid=0x00007F71E6326640) [perf] yql_expr_csee.cpp:620: Execution of [UpdateCompletness] took 19us 2025-03-31T17:11:47.174001Z node 1 :KQP_YQL DEBUG: TraceId: 01jqpk066t7f3je71qng50agc7, SessionId: CompileActor 2025-03-31 17:11:47.173 DEBUG ydb-core-sys_view-ut(pid=963885, tid=0x00007F71E6326640) [perf] yql_expr_csee.cpp:633: Execution of [EliminateCommonSubExpressions] took 145us 2025-03-31T17:11:47.175328Z node 1 :KQP_YQL INFO: TraceId: 01jqpk066t7f3je71qng50agc7, SessionId: CompileActor 2025-03-31 17:11:47.175 INFO ydb-core-sys_view-ut(pid=963885, tid=0x00007F71E6326640) [KQP] kqp_transform.cpp:33: Optimized expr: ( (let $1 (DataSink '"kikimr" '"db")) (let $2 (KiDropTable! world $1 '"/Root/test_show_create" '() '"table" '0)) (return (Commit! $2 $1 '('('"mode" '"flush")))) ) 2025-03-31T17:11:47.175387Z node 1 :KQP_YQL INFO: TraceId: 01jqpk066t7f3je71qng50agc7, SessionId: CompileActor 2025-03-31 17:11:47.175 INFO ydb-core-sys_view-ut(pid=963885, tid=0x00007F71E6326640) [core exec] yql_execution.cpp:59: Begin, root #43 2025-03-31T17:11:47.175419Z node 1 :KQP_YQL INFO: TraceId: 01jqpk066t7f3je71qng50agc7, SessionId: CompileActor 2025-03-31 17:11:47.175 INFO ydb-core-sys_view-ut(pid=963885, tid=0x00007F71E6326640) [core exec] yql_execution.cpp:72: Collect unused nodes for root #43, status: Ok 2025-03-31T17:11:47.175456Z node 1 :KQP_YQL TRACE: TraceId: 01jqpk066t7f3je71qng50agc7, SessionId: CompileActor 2025-03-31 17:11:47.175 TRACE ydb-core-sys_view-ut(pid=963885, tid=0x00007F71E6326640) [core exec] yql_execution.cpp:387: {0}, callable #43 2025-03-31T17:11:47.175494Z node 1 :KQP_YQL TRACE: TraceId: 01jqpk066t7f3je71qng50agc7, SessionId: CompileActor 2025-03-31 17:11:47.175 TRACE ydb-core-sys_view-ut(pid=963885, tid=0x00007F71E6326640) [core exec] yql_execution.cpp:387: {1}, callable #42 2025-03-31T17:11:47.175570Z node 1 :KQP_YQL TRACE: TraceId: 01jqpk066t7f3je71qng50agc7, SessionId: CompileActor 2025-03-31 17:11:47.175 TRACE ydb-core-sys_view-ut(pid=963885, tid=0x00007F71E6326640) [core exec] yql_execution.cpp:387: {1}, callable #42 2025-03-31T17:11:47.175800Z node 1 :KQP_YQL INFO: TraceId: 01jqpk066t7f3je71qng50agc7, SessionId: CompileActor 2025-03-31 17:11:47.175 INFO ydb-core-sys_view-ut(pid=963885, tid=0x00007F71E6326640) [core exec] yql_execution.cpp:466: Register async execution for node #42 2025-03-31T17:11:47.175894Z node 1 :KQP_YQL INFO: TraceId: 01jqpk066t7f3je71qng50agc7, SessionId: CompileActor 2025-03-31 17:11:47.175 INFO ydb-core-sys_view-ut(pid=963885, tid=0x00007F71E6326640) [core exec] yql_execution.cpp:87: Finish, output #43, status: Async 2025-03-31T17:11:47.176002Z node 1 :KQP_YQL INFO: TraceId: 01jqpk066t7f3je71qng50agc7, SessionId: CompileActor 2025-03-31 17:11:47.175 INFO ydb-core-sys_view-ut(pid=963885, tid=0x00007F71E6326640) [core exec] yql_execution.cpp:133: Completed async execution for node #42 2025-03-31T17:11:47.176056Z node 1 :KQP_YQL INFO: TraceId: 01jqpk066t7f3je71qng50agc7, SessionId: CompileActor 2025-03-31 17:11:47.176 INFO ydb-core-sys_view-ut(pid=963885, tid=0x00007F71E6326640) [core exec] yql_execution.cpp:153: State is ExecutionComplete after apply async changes for node #42 2025-03-31T17:11:47.176094Z node 1 :KQP_YQL INFO: TraceId: 01jqpk066t7f3je71qng50agc7, SessionId: CompileActor 2025-03-31 17:11:47.176 INFO ydb-core-sys_view-ut(pid=963885, tid=0x00007F71E6326640) [core exec] yql_execution.cpp:59: Begin, root #43 2025-03-31T17:11:47.176123Z node 1 :KQP_YQL INFO: TraceId: 01jqpk066t7f3je71qng50agc7, SessionId: CompileActor 2025-03-31 17:11:47.176 INFO ydb-core-sys_view-ut(pid=963885, tid=0x00007F71E6326640) [core exec] yql_execution.cpp:72: Collect unused nodes for root #43, status: Ok 2025-03-31T17:11:47.176154Z node 1 :KQP_YQL TRACE: TraceId: 01jqpk066t7f3je71qng50agc7, SessionId: CompileActor 2025-03-31 17:11:47.176 TRACE ydb-core-sys_view-ut(pid=963885, tid=0x00007F71E6326640) [core exec] yql_execution.cpp:387: {0}, callable #43 2025-03-31T17:11:47.176196Z node 1 :KQP_YQL INFO: TraceId: 01jqpk066t7f3je71qng50agc7, SessionId: CompileActor 2025-03-31 17:11:47.176 INFO ydb-core-sys_view-ut(pid=963885, tid=0x00007F71E6326640) [core exec] yql_execution.cpp:577: Node #43 finished execution 2025-03-31T17:11:47.176242Z node 1 :KQP_YQL INFO: TraceId: 01jqpk066t7f3je71qng50agc7, SessionId: CompileActor 2025-03-31 17:11:47.176 INFO ydb-core-sys_view-ut(pid=963885, tid=0x00007F71E6326640) [core exec] yql_execution.cpp:594: Node #43 created 0 trackable nodes: 2025-03-31T17:11:47.176273Z node 1 :KQP_YQL INFO: TraceId: 01jqpk066t7f3je71qng50agc7, SessionId: CompileActor 2025-03-31 17:11:47.176 INFO ydb-core-sys_view-ut(pid=963885, tid=0x00007F71E6326640) [core exec] yql_execution.cpp:87: Finish, output #43, status: Ok 2025-03-31T17:11:47.176317Z node 1 :KQP_YQL INFO: TraceId: 01jqpk066t7f3je71qng50agc7, SessionId: CompileActor 2025-03-31 17:11:47.176 INFO ydb-core-sys_view-ut(pid=963885, tid=0x00007F71E6326640) [core exec] yql_execution.cpp:93: Creating finalizing transformer, output #43 2025-03-31T17:11:47.176958Z node 1 :KQP_COMPILE_SERVICE DEBUG: Received response, sender: [1:7488022524381287717:2339], status: SUCCESS, compileActor: [1:7488022537266194471:2646] 2025-03-31T17:11:47.177031Z node 1 :KQP_COMPILE_SERVICE DEBUG: Send response, sender: [1:7488022524381287717:2339], queryUid: 71204c1f-bb87c6ee-2045ae0e-5dabe4a7, status:SUCCESS 2025-03-31T17:11:47.221813Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037921 not found 2025-03-31T17:11:47.221861Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037919 not found 2025-03-31T17:11:47.222937Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037923 not found 2025-03-31T17:11:47.222960Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037920 not found 2025-03-31T17:11:47.223322Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037922 not found ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-68 [GOOD] Test command err: Starting YDB, grpc: 12831, msgbus: 26135 2025-03-31T17:11:44.978056Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488022524623358670:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:11:44.978117Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/0002d8/r3tmp/tmphw5qKj/pdisk_1.dat 2025-03-31T17:11:45.368543Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:11:45.368640Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:11:45.379777Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T17:11:45.393057Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12831, node 1 2025-03-31T17:11:45.422952Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-03-31T17:11:45.423223Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-03-31T17:11:45.456252Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:11:45.456280Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:11:45.456286Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:11:45.456537Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26135 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T17:11:45.708708Z node 1 :TX_PROXY DEBUG: actor# [1:7488022528918326196:2116] Handle TEvNavigate describe path dc-1 2025-03-31T17:11:45.708778Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022528918326704:2446] HANDLE EvNavigateScheme dc-1 2025-03-31T17:11:45.709964Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022528918326704:2446] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:45.770745Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022528918326704:2446] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-03-31T17:11:45.786485Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022528918326704:2446] Handle TEvDescribeSchemeResult Forward to# [1:7488022528918326703:2445] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T17:11:45.802584Z node 1 :TX_PROXY DEBUG: actor# [1:7488022528918326196:2116] Handle TEvProposeTransaction 2025-03-31T17:11:45.802617Z node 1 :TX_PROXY DEBUG: actor# [1:7488022528918326196:2116] TxId# 281474976710657 ProcessProposeTransaction 2025-03-31T17:11:45.802768Z node 1 :TX_PROXY DEBUG: actor# [1:7488022528918326196:2116] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7488022528918326717:2452] 2025-03-31T17:11:45.910608Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022528918326717:2452] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-31T17:11:45.910707Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022528918326717:2452] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T17:11:45.910728Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022528918326717:2452] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:45.910806Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022528918326717:2452] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:45.911165Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022528918326717:2452] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:45.911489Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022528918326717:2452] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-31T17:11:45.911582Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022528918326717:2452] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-03-31T17:11:45.911771Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022528918326717:2452] txid# 281474976710657 HANDLE EvClientConnected 2025-03-31T17:11:45.912699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T17:11:45.915127Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022528918326717:2452] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-03-31T17:11:45.915169Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022528918326717:2452] txid# 281474976710657 SEND to# [1:7488022528918326716:2451] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-03-31T17:11:45.928149Z node 1 :TX_PROXY DEBUG: actor# [1:7488022528918326196:2116] Handle TEvProposeTransaction 2025-03-31T17:11:45.928194Z node 1 :TX_PROXY DEBUG: actor# [1:7488022528918326196:2116] TxId# 281474976710658 ProcessProposeTransaction 2025-03-31T17:11:45.928222Z node 1 :TX_PROXY DEBUG: actor# [1:7488022528918326196:2116] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7488022528918326759:2490] 2025-03-31T17:11:45.930250Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022528918326759:2490] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-31T17:11:45.930307Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022528918326759:2490] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T17:11:45.930322Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022528918326759:2490] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:45.930399Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022528918326759:2490] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:45.930732Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022528918326759:2490] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:45.930865Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022528918326759:2490] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:45.930913Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022528918326759:2490] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-31T17:11:45.931045Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022528918326759:2490] txid# 281474976710658 HANDLE EvClientConnected 2025-03-31T17:11:45.931548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T17:11:45.933537Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022528918326759:2490] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-03-31T17:11:45.933580Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022528918326759:2490] txid# 281474976710658 SEND to# [1:7488022528918326758:2489] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-03-31T17:11:47.534512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022537508261442:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:47.534554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022537508261450:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:47.534655Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:47.534897Z node 1 :TX_PROXY DEBUG: actor# [1:7488022528918326196:2116] H ... eUser { User: "ordinaryuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:45050" 2025-03-31T17:11:47.895939Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537508261556:2588] txid# 281474976710661 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T17:11:47.895977Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537508261556:2588] txid# 281474976710661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:47.896024Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537508261556:2588] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:47.896315Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537508261556:2588] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:47.896409Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537508261556:2588] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:47.896457Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537508261556:2588] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-03-31T17:11:47.896584Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537508261556:2588] txid# 281474976710661 HANDLE EvClientConnected 2025-03-31T17:11:47.906069Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537508261556:2588] txid# 281474976710661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710661} 2025-03-31T17:11:47.906129Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537508261556:2588] txid# 281474976710661 SEND to# [1:7488022537508261555:2332] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-03-31T17:11:48.013730Z node 1 :TX_PROXY DEBUG: actor# [1:7488022528918326196:2116] Handle TEvProposeTransaction 2025-03-31T17:11:48.013777Z node 1 :TX_PROXY DEBUG: actor# [1:7488022528918326196:2116] TxId# 281474976710662 ProcessProposeTransaction 2025-03-31T17:11:48.013845Z node 1 :TX_PROXY DEBUG: actor# [1:7488022528918326196:2116] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [1:7488022541803228877:2606] 2025-03-31T17:11:48.016314Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541803228877:2606] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000\n\031\010\000\022\025\010\001\020\200\010\032\014ordinaryuser \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:45062" 2025-03-31T17:11:48.016393Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541803228877:2606] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T17:11:48.016413Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541803228877:2606] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:48.016479Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541803228877:2606] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:48.016836Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541803228877:2606] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:48.016924Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541803228877:2606] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:48.016976Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541803228877:2606] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-03-31T17:11:48.017227Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541803228877:2606] txid# 281474976710662 HANDLE EvClientConnected 2025-03-31T17:11:48.017642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T17:11:48.019759Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541803228877:2606] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-03-31T17:11:48.019802Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541803228877:2606] txid# 281474976710662 SEND to# [1:7488022541803228876:2346] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-03-31T17:11:48.054966Z node 1 :TX_PROXY DEBUG: actor# [1:7488022528918326196:2116] Handle TEvProposeTransaction 2025-03-31T17:11:48.054996Z node 1 :TX_PROXY DEBUG: actor# [1:7488022528918326196:2116] TxId# 281474976710663 ProcessProposeTransaction 2025-03-31T17:11:48.055067Z node 1 :TX_PROXY DEBUG: actor# [1:7488022528918326196:2116] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [1:7488022541803228911:2623] 2025-03-31T17:11:48.057716Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541803228911:2623] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:45080" 2025-03-31T17:11:48.057778Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541803228911:2623] txid# 281474976710663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T17:11:48.057793Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541803228911:2623] txid# 281474976710663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:48.057834Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541803228911:2623] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:48.058114Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541803228911:2623] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:48.058209Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541803228911:2623] HANDLE EvNavigateKeySetResult, txid# 281474976710663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:48.058254Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541803228911:2623] txid# 281474976710663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710663 TabletId# 72057594046644480} 2025-03-31T17:11:48.058370Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541803228911:2623] txid# 281474976710663 HANDLE EvClientConnected 2025-03-31T17:11:48.064974Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541803228911:2623] txid# 281474976710663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710663} 2025-03-31T17:11:48.065020Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541803228911:2623] txid# 281474976710663 SEND to# [1:7488022541803228910:2348] Source {TEvProposeTransactionStatus txid# 281474976710663 Status# 48} 2025-03-31T17:11:48.099494Z node 1 :TX_PROXY DEBUG: actor# [1:7488022528918326196:2116] Handle TEvProposeTransaction 2025-03-31T17:11:48.099526Z node 1 :TX_PROXY DEBUG: actor# [1:7488022528918326196:2116] TxId# 281474976710664 ProcessProposeTransaction 2025-03-31T17:11:48.099579Z node 1 :TX_PROXY DEBUG: actor# [1:7488022528918326196:2116] Cookie# 0 userReqId# "" txid# 281474976710664 SEND to# [1:7488022541803228939:2635] 2025-03-31T17:11:48.102172Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541803228939:2635] txid# 281474976710664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzQ4NDMwNywiaWF0IjoxNzQzNDQxMTA3LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.L6FpHde6II8Ut45A_I9EZtXlzOta8USfnvjmQnTxH7wSl9pXe0rXkkOrO6E1zd6rk_A-W13F8TNCzsBvBSN6lzrDYbzfuFf3Yw92aY8CBcrkg0uryBS4pkye4zEX6Hs63GuEwVMcc4wPAygmZyzUTotcyZgbcreBhXImzWHzAmFBnWQIHOF5vGbHRQ5BCFizZi7R7fRfEcBlxXiKwZwCoAhTtBdjEsuWlMO_BZOtjhKve9OFeUWfACJUFSUaoa0rMCqEDvl1pRpiBoVV1gZ1iU1RLkDAO46f_lOSXI8uOuMfvBs1MgxNIrFf3v_uIvebnwrLZtBY8ef6vewUNi2xXQ\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzQ4NDMwNywiaWF0IjoxNzQzNDQxMTA3LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:45112" 2025-03-31T17:11:48.102274Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541803228939:2635] txid# 281474976710664 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T17:11:48.102300Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541803228939:2635] txid# 281474976710664 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-03-31T17:11:48.102463Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541803228939:2635] txid# 281474976710664 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-03-31T17:11:48.102523Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541803228939:2635] txid# 281474976710664 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-03-31T17:11:48.102596Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541803228939:2635] txid# 281474976710664 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:48.102836Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541803228939:2635] txid# 281474976710664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:48.102872Z node 1 :TX_PROXY ERROR: Actor# [1:7488022541803228939:2635] txid# 281474976710664, Access denied for ordinaryuser, attempt to manage user 2025-03-31T17:11:48.102980Z node 1 :TX_PROXY ERROR: Actor# [1:7488022541803228939:2635] txid# 281474976710664, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-03-31T17:11:48.103022Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541803228939:2635] txid# 281474976710664 SEND to# [1:7488022541803228938:2360] Source {TEvProposeTransactionStatus Status# 5} 2025-03-31T17:11:48.113154Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTNkZDJlNTMtZjQ5MzU1YmYtZWE4NzYyYjUtZGQyNTI2YjM=, ActorId: [1:7488022541803228929:2360], ActorState: ExecuteState, TraceId: 01jqpk073q30hbgrbxxr4bmc71, Create QueryResponse for error on request, msg: 2025-03-31T17:11:48.113534Z node 1 :TX_PROXY DEBUG: actor# [1:7488022528918326196:2116] Handle TEvExecuteKqpTransaction 2025-03-31T17:11:48.113569Z node 1 :TX_PROXY DEBUG: actor# [1:7488022528918326196:2116] TxId# 281474976710665 ProcessProposeKqpTransaction >> ReadIteratorExternalBlobs::ExtBlobsEmptyTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-67 [GOOD] Test command err: Starting YDB, grpc: 4276, msgbus: 20436 2025-03-31T17:11:45.036697Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488022529348459443:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:11:45.036762Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/0002e0/r3tmp/tmpgcjNrB/pdisk_1.dat 2025-03-31T17:11:45.499449Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:11:45.499566Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:11:45.504990Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T17:11:45.533972Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4276, node 1 2025-03-31T17:11:45.559254Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-03-31T17:11:45.559291Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-03-31T17:11:45.598103Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:11:45.598143Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:11:45.598154Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:11:45.598313Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20436 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T17:11:45.783796Z node 1 :TX_PROXY DEBUG: actor# [1:7488022529348459675:2115] Handle TEvNavigate describe path dc-1 2025-03-31T17:11:45.783855Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022529348460176:2439] HANDLE EvNavigateScheme dc-1 2025-03-31T17:11:45.784845Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022529348460176:2439] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:45.829360Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022529348460176:2439] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-03-31T17:11:45.840061Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022529348460176:2439] Handle TEvDescribeSchemeResult Forward to# [1:7488022529348460175:2438] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T17:11:45.857065Z node 1 :TX_PROXY DEBUG: actor# [1:7488022529348459675:2115] Handle TEvProposeTransaction 2025-03-31T17:11:45.857107Z node 1 :TX_PROXY DEBUG: actor# [1:7488022529348459675:2115] TxId# 281474976710657 ProcessProposeTransaction 2025-03-31T17:11:45.857224Z node 1 :TX_PROXY DEBUG: actor# [1:7488022529348459675:2115] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7488022529348460191:2447] 2025-03-31T17:11:45.934903Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022529348460191:2447] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-31T17:11:45.934990Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022529348460191:2447] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-03-31T17:11:45.935033Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022529348460191:2447] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:45.935117Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022529348460191:2447] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:45.935535Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022529348460191:2447] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:45.935755Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022529348460191:2447] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-31T17:11:45.935827Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022529348460191:2447] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-03-31T17:11:45.935976Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022529348460191:2447] txid# 281474976710657 HANDLE EvClientConnected 2025-03-31T17:11:45.936788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T17:11:45.938752Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022529348460191:2447] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-03-31T17:11:45.938838Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022529348460191:2447] txid# 281474976710657 SEND to# [1:7488022529348460190:2446] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-03-31T17:11:45.954540Z node 1 :TX_PROXY DEBUG: actor# [1:7488022529348459675:2115] Handle TEvProposeTransaction 2025-03-31T17:11:45.954569Z node 1 :TX_PROXY DEBUG: actor# [1:7488022529348459675:2115] TxId# 281474976710658 ProcessProposeTransaction 2025-03-31T17:11:45.954604Z node 1 :TX_PROXY DEBUG: actor# [1:7488022529348459675:2115] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7488022529348460231:2483] 2025-03-31T17:11:45.956880Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022529348460231:2483] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-31T17:11:45.956934Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022529348460231:2483] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-03-31T17:11:45.956950Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022529348460231:2483] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:45.957006Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022529348460231:2483] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:45.957362Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022529348460231:2483] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:45.957446Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022529348460231:2483] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:45.957491Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022529348460231:2483] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-31T17:11:45.957653Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022529348460231:2483] txid# 281474976710658 HANDLE EvClientConnected 2025-03-31T17:11:45.958063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T17:11:45.960055Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022529348460231:2483] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-03-31T17:11:45.960095Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022529348460231:2483] txid# 281474976710658 SEND to# [1:7488022529348460230:2482] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-03-31T17:11:47.529102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022537938394922:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:47.529102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022537938394914:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:47.529288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:47.529619Z node 1 :TX_PROXY DEBUG: actor# [1:7488022529348459675:2115] Han ... Id# 281474976710661 ProcessProposeTransaction 2025-03-31T17:11:47.820269Z node 1 :TX_PROXY DEBUG: actor# [1:7488022529348459675:2115] Cookie# 0 userReqId# "" txid# 281474976710661 SEND to# [1:7488022537938395031:2584] 2025-03-31T17:11:47.822606Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537938395031:2584] txid# 281474976710661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "ordinaryuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:43524" 2025-03-31T17:11:47.822685Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537938395031:2584] txid# 281474976710661 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-03-31T17:11:47.822702Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537938395031:2584] txid# 281474976710661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:47.822771Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537938395031:2584] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:47.823078Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537938395031:2584] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:47.823261Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537938395031:2584] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:47.823322Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537938395031:2584] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-03-31T17:11:47.823482Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537938395031:2584] txid# 281474976710661 HANDLE EvClientConnected 2025-03-31T17:11:47.830852Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537938395031:2584] txid# 281474976710661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710661} 2025-03-31T17:11:47.830917Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537938395031:2584] txid# 281474976710661 SEND to# [1:7488022537938395030:2332] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-03-31T17:11:47.869439Z node 1 :TX_PROXY DEBUG: actor# [1:7488022529348459675:2115] Handle TEvProposeTransaction 2025-03-31T17:11:47.869475Z node 1 :TX_PROXY DEBUG: actor# [1:7488022529348459675:2115] TxId# 281474976710662 ProcessProposeTransaction 2025-03-31T17:11:47.869525Z node 1 :TX_PROXY DEBUG: actor# [1:7488022529348459675:2115] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [1:7488022537938395051:2598] 2025-03-31T17:11:47.871797Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537938395051:2598] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000\n\031\010\000\022\025\010\001\020\200\010\032\014ordinaryuser \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:43536" 2025-03-31T17:11:47.871892Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537938395051:2598] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-03-31T17:11:47.871912Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537938395051:2598] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:47.871987Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537938395051:2598] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:47.872260Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537938395051:2598] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:47.872389Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537938395051:2598] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:47.872442Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537938395051:2598] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-03-31T17:11:47.872592Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537938395051:2598] txid# 281474976710662 HANDLE EvClientConnected 2025-03-31T17:11:47.873191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T17:11:47.875556Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537938395051:2598] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-03-31T17:11:47.875606Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537938395051:2598] txid# 281474976710662 SEND to# [1:7488022537938395050:2345] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-03-31T17:11:47.911009Z node 1 :TX_PROXY DEBUG: actor# [1:7488022529348459675:2115] Handle TEvProposeTransaction 2025-03-31T17:11:47.911047Z node 1 :TX_PROXY DEBUG: actor# [1:7488022529348459675:2115] TxId# 281474976710663 ProcessProposeTransaction 2025-03-31T17:11:47.911099Z node 1 :TX_PROXY DEBUG: actor# [1:7488022529348459675:2115] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [1:7488022537938395084:2617] 2025-03-31T17:11:47.913625Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537938395084:2617] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:43544" 2025-03-31T17:11:47.913704Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537938395084:2617] txid# 281474976710663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-03-31T17:11:47.913722Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537938395084:2617] txid# 281474976710663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:47.913763Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537938395084:2617] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:47.914007Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537938395084:2617] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:47.914122Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537938395084:2617] HANDLE EvNavigateKeySetResult, txid# 281474976710663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:47.914167Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537938395084:2617] txid# 281474976710663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710663 TabletId# 72057594046644480} 2025-03-31T17:11:47.914357Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537938395084:2617] txid# 281474976710663 HANDLE EvClientConnected 2025-03-31T17:11:47.922112Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537938395084:2617] txid# 281474976710663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710663} 2025-03-31T17:11:47.922165Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537938395084:2617] txid# 281474976710663 SEND to# [1:7488022537938395083:2347] Source {TEvProposeTransactionStatus txid# 281474976710663 Status# 48} 2025-03-31T17:11:47.974295Z node 1 :TX_PROXY DEBUG: actor# [1:7488022529348459675:2115] Handle TEvProposeTransaction 2025-03-31T17:11:47.974326Z node 1 :TX_PROXY DEBUG: actor# [1:7488022529348459675:2115] TxId# 281474976710664 ProcessProposeTransaction 2025-03-31T17:11:47.974364Z node 1 :TX_PROXY DEBUG: actor# [1:7488022529348459675:2115] Cookie# 0 userReqId# "" txid# 281474976710664 SEND to# [1:7488022537938395115:2632] 2025-03-31T17:11:47.976770Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537938395115:2632] txid# 281474976710664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzQ4NDMwNywiaWF0IjoxNzQzNDQxMTA3LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.PsUhumt3EecB-VEWHMAY7G3ecOHl5dHykAmUGcXI1pEdK_TdHg2F4fOkNWA5JLK2FMQqK0tf0jBseVdrkURVmy5qyrGxHtU5zSTlDLYaiaCbxvNJ0tRu80mP5pBk9XcOAubkiyf0skWzRfgTNaRKq-7li8UrRZkk6poqEIsn5Lic4BFBWaGRh485_wSk8BYZTQciZbS3rYmxFytumtC5HgZdGhx4ZTBirsF3Q8QMfp9I1_PXICqBCLTWCjchjZu1-IOCIvs-JKqPpq-QvXABUJan1w_ZY1zOKe7co55_p2DpF9OPaxfHRs_4CWi-JnbGZd-bjp7NfZhDn2QlzMhS6g\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzQ4NDMwNywiaWF0IjoxNzQzNDQxMTA3LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:43568" 2025-03-31T17:11:47.976836Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537938395115:2632] txid# 281474976710664 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-03-31T17:11:47.976851Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537938395115:2632] txid# 281474976710664 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-03-31T17:11:47.976888Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537938395115:2632] txid# 281474976710664 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:47.977274Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537938395115:2632] txid# 281474976710664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:47.977311Z node 1 :TX_PROXY ERROR: Actor# [1:7488022537938395115:2632] txid# 281474976710664, Access denied for ordinaryuser, attempt to manage user 2025-03-31T17:11:47.977442Z node 1 :TX_PROXY ERROR: Actor# [1:7488022537938395115:2632] txid# 281474976710664, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-03-31T17:11:47.977492Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022537938395115:2632] txid# 281474976710664 SEND to# [1:7488022537938395114:2359] Source {TEvProposeTransactionStatus Status# 5} 2025-03-31T17:11:47.985551Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWRmMDRlNTAtZDkzNmU2ZDgtNWIxZThjNjgtYzVhY2YyY2Q=, ActorId: [1:7488022537938395102:2359], ActorState: ExecuteState, TraceId: 01jqpk06zt1ynqh6jj859z14ak, Create QueryResponse for error on request, msg: 2025-03-31T17:11:47.985964Z node 1 :TX_PROXY DEBUG: actor# [1:7488022529348459675:2115] Handle TEvExecuteKqpTransaction 2025-03-31T17:11:47.985988Z node 1 :TX_PROXY DEBUG: actor# [1:7488022529348459675:2115] TxId# 281474976710665 ProcessProposeKqpTransaction |82.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-60 [GOOD] Test command err: Starting YDB, grpc: 6440, msgbus: 24180 2025-03-31T17:11:44.829990Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488022528462907620:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:11:44.831530Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/0002e2/r3tmp/tmpT96mm3/pdisk_1.dat 2025-03-31T17:11:45.190734Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T17:11:45.221334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:11:45.221430Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:11:45.227295Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6440, node 1 2025-03-31T17:11:45.335675Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:11:45.335720Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:11:45.335730Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:11:45.335864Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24180 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T17:11:45.547754Z node 1 :TX_PROXY DEBUG: actor# [1:7488022528462907881:2137] Handle TEvNavigate describe path dc-1 2025-03-31T17:11:45.547795Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532757875641:2434] HANDLE EvNavigateScheme dc-1 2025-03-31T17:11:45.548873Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532757875641:2434] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:45.581871Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532757875641:2434] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-03-31T17:11:45.590342Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532757875641:2434] Handle TEvDescribeSchemeResult Forward to# [1:7488022532757875640:2433] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T17:11:45.613203Z node 1 :TX_PROXY DEBUG: actor# [1:7488022528462907881:2137] Handle TEvProposeTransaction 2025-03-31T17:11:45.613227Z node 1 :TX_PROXY DEBUG: actor# [1:7488022528462907881:2137] TxId# 281474976710657 ProcessProposeTransaction 2025-03-31T17:11:45.613358Z node 1 :TX_PROXY DEBUG: actor# [1:7488022528462907881:2137] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7488022532757875656:2442] 2025-03-31T17:11:45.688473Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532757875656:2442] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-31T17:11:45.688540Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532757875656:2442] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T17:11:45.688551Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532757875656:2442] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:45.688614Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532757875656:2442] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:45.689013Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532757875656:2442] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:45.689204Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532757875656:2442] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-31T17:11:45.689261Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532757875656:2442] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-03-31T17:11:45.689436Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532757875656:2442] txid# 281474976710657 HANDLE EvClientConnected 2025-03-31T17:11:45.690139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T17:11:45.692395Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532757875656:2442] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-03-31T17:11:45.692447Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532757875656:2442] txid# 281474976710657 SEND to# [1:7488022532757875655:2441] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-03-31T17:11:45.703357Z node 1 :TX_PROXY DEBUG: actor# [1:7488022528462907881:2137] Handle TEvProposeTransaction 2025-03-31T17:11:45.703383Z node 1 :TX_PROXY DEBUG: actor# [1:7488022528462907881:2137] TxId# 281474976710658 ProcessProposeTransaction 2025-03-31T17:11:45.703432Z node 1 :TX_PROXY DEBUG: actor# [1:7488022528462907881:2137] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7488022532757875696:2478] 2025-03-31T17:11:45.705729Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532757875696:2478] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-31T17:11:45.705781Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532757875696:2478] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T17:11:45.705795Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532757875696:2478] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:45.705849Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532757875696:2478] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:45.706114Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532757875696:2478] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:45.706199Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532757875696:2478] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:45.706234Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532757875696:2478] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-31T17:11:45.706347Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532757875696:2478] txid# 281474976710658 HANDLE EvClientConnected 2025-03-31T17:11:45.706794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T17:11:45.708792Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532757875696:2478] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-03-31T17:11:45.708837Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022532757875696:2478] txid# 281474976710658 SEND to# [1:7488022532757875695:2477] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-03-31T17:11:47.485719Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022541347810384:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:47.485719Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022541347810392:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:47.485821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:47.486109Z node 1 :TX_PROXY DEBUG: actor# [1:7488022528462907881:2137] Handle TEvProposeTransaction 2025-03-31T17:11:47.486134Z node 1 :TX_PROXY DEBUG: actor# [1:7488022528462907881:2137] TxId# 281474976710659 ProcessProposeTransaction 2025-03-31T1 ... n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:37432" 2025-03-31T17:11:47.966373Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541347810528:2601] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T17:11:47.966386Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541347810528:2601] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:47.966432Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541347810528:2601] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:47.966682Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541347810528:2601] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:47.966754Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541347810528:2601] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:47.966833Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541347810528:2601] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-03-31T17:11:47.966958Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541347810528:2601] txid# 281474976710662 HANDLE EvClientConnected 2025-03-31T17:11:47.967422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T17:11:47.969677Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541347810528:2601] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-03-31T17:11:47.969708Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022541347810528:2601] txid# 281474976710662 SEND to# [1:7488022541347810527:2346] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-03-31T17:11:48.004935Z node 1 :TX_PROXY DEBUG: actor# [1:7488022528462907881:2137] Handle TEvProposeTransaction 2025-03-31T17:11:48.004969Z node 1 :TX_PROXY DEBUG: actor# [1:7488022528462907881:2137] TxId# 281474976710663 ProcessProposeTransaction 2025-03-31T17:11:48.005009Z node 1 :TX_PROXY DEBUG: actor# [1:7488022528462907881:2137] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [1:7488022545642777861:2624] 2025-03-31T17:11:48.007659Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022545642777861:2624] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:37446" 2025-03-31T17:11:48.007726Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022545642777861:2624] txid# 281474976710663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T17:11:48.007743Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022545642777861:2624] txid# 281474976710663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:48.007803Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022545642777861:2624] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:48.008157Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022545642777861:2624] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:48.008260Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022545642777861:2624] HANDLE EvNavigateKeySetResult, txid# 281474976710663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:48.008648Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022545642777861:2624] txid# 281474976710663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710663 TabletId# 72057594046644480} 2025-03-31T17:11:48.008856Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022545642777861:2624] txid# 281474976710663 HANDLE EvClientConnected 2025-03-31T17:11:48.019924Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022545642777861:2624] txid# 281474976710663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710663} 2025-03-31T17:11:48.019974Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022545642777861:2624] txid# 281474976710663 SEND to# [1:7488022545642777860:2348] Source {TEvProposeTransactionStatus txid# 281474976710663 Status# 48} 2025-03-31T17:11:48.036301Z node 1 :TX_PROXY DEBUG: actor# [1:7488022528462907881:2137] Handle TEvProposeTransaction 2025-03-31T17:11:48.036338Z node 1 :TX_PROXY DEBUG: actor# [1:7488022528462907881:2137] TxId# 281474976710664 ProcessProposeTransaction 2025-03-31T17:11:48.036383Z node 1 :TX_PROXY DEBUG: actor# [1:7488022528462907881:2137] Cookie# 0 userReqId# "" txid# 281474976710664 SEND to# [1:7488022545642777874:2633] 2025-03-31T17:11:48.038924Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022545642777874:2633] txid# 281474976710664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "" NewOwner: "dbadmin" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:37460" 2025-03-31T17:11:48.038996Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022545642777874:2633] txid# 281474976710664 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T17:11:48.039014Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022545642777874:2633] txid# 281474976710664 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:48.039059Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022545642777874:2633] txid# 281474976710664 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:48.039418Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022545642777874:2633] txid# 281474976710664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:48.039538Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022545642777874:2633] HANDLE EvNavigateKeySetResult, txid# 281474976710664 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:48.039612Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022545642777874:2633] txid# 281474976710664 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710664 TabletId# 72057594046644480} 2025-03-31T17:11:48.039769Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022545642777874:2633] txid# 281474976710664 HANDLE EvClientConnected 2025-03-31T17:11:48.040142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T17:11:48.042298Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022545642777874:2633] txid# 281474976710664 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710664} 2025-03-31T17:11:48.042347Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022545642777874:2633] txid# 281474976710664 SEND to# [1:7488022545642777873:2355] Source {TEvProposeTransactionStatus txid# 281474976710664 Status# 48} 2025-03-31T17:11:48.080843Z node 1 :TX_PROXY DEBUG: actor# [1:7488022528462907881:2137] Handle TEvProposeTransaction 2025-03-31T17:11:48.080882Z node 1 :TX_PROXY DEBUG: actor# [1:7488022528462907881:2137] TxId# 281474976710665 ProcessProposeTransaction 2025-03-31T17:11:48.080938Z node 1 :TX_PROXY DEBUG: actor# [1:7488022528462907881:2137] Cookie# 0 userReqId# "" txid# 281474976710665 SEND to# [1:7488022545642777901:2645] 2025-03-31T17:11:48.083651Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022545642777901:2645] txid# 281474976710665 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\007dbadmin\022\030\022\026\n\024all-users@well-known\032\325\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzQ4NDMwNywiaWF0IjoxNzQzNDQxMTA3LCJzdWIiOiJkYmFkbWluIn0.HRRMyEnI1nFhvClizbY438t8eFqTlt19acpxUXK0-WEooSOLXD_BCylDgASFO-bvg3vETApkhZZJ2XVnUE4bUWz6QWe3BiCt0ry7mYhfERDVzTKkoP0NOyKIghycJ6qnWqZMNOymHFTDkj4jBrxWT3qNWWDR9p9bMwS9LTogupoufkPTekvAyLQs-0Sc8DTkgGLlZ6bsyR_2F5NOnnqPmu5iKYY5Rzya99UwB1G2xw9QFkMWMh0QKHeilZR7jTXUyrQ0QF_0VjYgNz8y-rn6rK5vtqMKkojh7bzRydAGAAIq-7lHrupIypzk4c48P6KhjE91OqY8XTOaYg06TjGmsw\"\005Login*\201\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzQ4NDMwNywiaWF0IjoxNzQzNDQxMTA3LCJzdWIiOiJkYmFkbWluIn0.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:37480" 2025-03-31T17:11:48.083735Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022545642777901:2645] txid# 281474976710665 Bootstrap, UserSID: dbadmin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-31T17:11:48.083753Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022545642777901:2645] txid# 281474976710665 Bootstrap, UserSID: dbadmin IsClusterAdministrator: 0 2025-03-31T17:11:48.083939Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022545642777901:2645] txid# 281474976710665 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-03-31T17:11:48.084055Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022545642777901:2645] txid# 281474976710665 HandleResolveDatabase, UserSID: dbadmin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 1 DatabaseOwner: dbadmin 2025-03-31T17:11:48.084138Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022545642777901:2645] txid# 281474976710665 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:48.084390Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022545642777901:2645] txid# 281474976710665 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:48.084494Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022545642777901:2645] HANDLE EvNavigateKeySetResult, txid# 281474976710665 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:48.084560Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022545642777901:2645] txid# 281474976710665 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710665 TabletId# 72057594046644480} 2025-03-31T17:11:48.084731Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022545642777901:2645] txid# 281474976710665 HANDLE EvClientConnected 2025-03-31T17:11:48.087322Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022545642777901:2645] txid# 281474976710665 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710665} 2025-03-31T17:11:48.087377Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022545642777901:2645] txid# 281474976710665 SEND to# [1:7488022545642777900:2361] Source {TEvProposeTransactionStatus txid# 281474976710665 Status# 48} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-57 [GOOD] Test command err: Starting YDB, grpc: 9582, msgbus: 15772 2025-03-31T17:11:45.014416Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488022529797422138:2276];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:11:45.014650Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/0002d9/r3tmp/tmpGOyFR4/pdisk_1.dat 2025-03-31T17:11:45.482215Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:11:45.482308Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:11:45.486422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T17:11:45.489241Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9582, node 1 2025-03-31T17:11:45.615726Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:11:45.615750Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:11:45.615757Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:11:45.615885Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15772 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T17:11:45.838561Z node 1 :TX_PROXY DEBUG: actor# [1:7488022529797422158:2115] Handle TEvNavigate describe path dc-1 2025-03-31T17:11:45.838622Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022529797422660:2438] HANDLE EvNavigateScheme dc-1 2025-03-31T17:11:45.839759Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022529797422660:2438] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:45.880917Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022529797422660:2438] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-03-31T17:11:45.894913Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022529797422660:2438] Handle TEvDescribeSchemeResult Forward to# [1:7488022529797422659:2437] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T17:11:45.924491Z node 1 :TX_PROXY DEBUG: actor# [1:7488022529797422158:2115] Handle TEvProposeTransaction 2025-03-31T17:11:45.924537Z node 1 :TX_PROXY DEBUG: actor# [1:7488022529797422158:2115] TxId# 281474976710657 ProcessProposeTransaction 2025-03-31T17:11:45.924671Z node 1 :TX_PROXY DEBUG: actor# [1:7488022529797422158:2115] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7488022529797422674:2445] 2025-03-31T17:11:46.007272Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022529797422674:2445] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-31T17:11:46.007383Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022529797422674:2445] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T17:11:46.007422Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022529797422674:2445] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:46.007515Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022529797422674:2445] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:46.007918Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022529797422674:2445] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:46.008072Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022529797422674:2445] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-31T17:11:46.008140Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022529797422674:2445] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-03-31T17:11:46.008288Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022529797422674:2445] txid# 281474976710657 HANDLE EvClientConnected 2025-03-31T17:11:46.008948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T17:11:46.011299Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022529797422674:2445] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-03-31T17:11:46.011353Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022529797422674:2445] txid# 281474976710657 SEND to# [1:7488022529797422673:2444] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-03-31T17:11:46.025057Z node 1 :TX_PROXY DEBUG: actor# [1:7488022529797422158:2115] Handle TEvProposeTransaction 2025-03-31T17:11:46.025099Z node 1 :TX_PROXY DEBUG: actor# [1:7488022529797422158:2115] TxId# 281474976710658 ProcessProposeTransaction 2025-03-31T17:11:46.025140Z node 1 :TX_PROXY DEBUG: actor# [1:7488022529797422158:2115] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7488022534092390020:2484] 2025-03-31T17:11:46.027635Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022534092390020:2484] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-31T17:11:46.027718Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022534092390020:2484] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T17:11:46.027735Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022534092390020:2484] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:46.027815Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022534092390020:2484] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:46.028135Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022534092390020:2484] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:46.028229Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022534092390020:2484] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:46.028284Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022534092390020:2484] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-31T17:11:46.028447Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022534092390020:2484] txid# 281474976710658 HANDLE EvClientConnected 2025-03-31T17:11:46.028904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T17:11:46.031063Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022534092390020:2484] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-03-31T17:11:46.031162Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022534092390020:2484] txid# 281474976710658 SEND to# [1:7488022534092390019:2483] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-03-31T17:11:47.647753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022538387357400:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:47.647763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022538387357408:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:47.647868Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:47.648107Z node 1 :TX_PROXY DEBUG: actor# [1:7488022529797422158:2115] Handle TEvProposeTransaction 2025-03-31T17:11:47.648132Z node 1 :TX_PROXY DEBUG: actor# [1:7488022529797422158:2115] TxId# 281474976710659 ProcessProposeTransaction 2025-03-31T1 ... 82324830:2594] 2025-03-31T17:11:48.156257Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022542682324830:2594] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\r\010\001\022\t\032\007dbadmin\n\025\010\000\022\021\010\001\020\200\200\002\032\007dbadmin \000\n\024\010\000\022\020\010\001\020\200\010\032\007dbadmin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:36484" 2025-03-31T17:11:48.156322Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022542682324830:2594] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T17:11:48.156339Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022542682324830:2594] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:48.156399Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022542682324830:2594] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:48.156711Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022542682324830:2594] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:48.156809Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022542682324830:2594] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:48.156858Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022542682324830:2594] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-03-31T17:11:48.157007Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022542682324830:2594] txid# 281474976710662 HANDLE EvClientConnected 2025-03-31T17:11:48.157515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T17:11:48.159707Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022542682324830:2594] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-03-31T17:11:48.159754Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022542682324830:2594] txid# 281474976710662 SEND to# [1:7488022542682324829:2346] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-03-31T17:11:48.186746Z node 1 :TX_PROXY DEBUG: actor# [1:7488022529797422158:2115] Handle TEvProposeTransaction 2025-03-31T17:11:48.186776Z node 1 :TX_PROXY DEBUG: actor# [1:7488022529797422158:2115] TxId# 281474976710663 ProcessProposeTransaction 2025-03-31T17:11:48.186814Z node 1 :TX_PROXY DEBUG: actor# [1:7488022529797422158:2115] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [1:7488022542682324865:2615] 2025-03-31T17:11:48.189130Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022542682324865:2615] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:36492" 2025-03-31T17:11:48.189181Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022542682324865:2615] txid# 281474976710663 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T17:11:48.189194Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022542682324865:2615] txid# 281474976710663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:48.189229Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022542682324865:2615] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:48.189529Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022542682324865:2615] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:48.189616Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022542682324865:2615] HANDLE EvNavigateKeySetResult, txid# 281474976710663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:48.189655Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022542682324865:2615] txid# 281474976710663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710663 TabletId# 72057594046644480} 2025-03-31T17:11:48.189781Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022542682324865:2615] txid# 281474976710663 HANDLE EvClientConnected 2025-03-31T17:11:48.197689Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022542682324865:2615] txid# 281474976710663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710663} 2025-03-31T17:11:48.197737Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022542682324865:2615] txid# 281474976710663 SEND to# [1:7488022542682324864:2348] Source {TEvProposeTransactionStatus txid# 281474976710663 Status# 48} 2025-03-31T17:11:48.205619Z node 1 :TX_PROXY DEBUG: actor# [1:7488022529797422158:2115] Handle TEvProposeTransaction 2025-03-31T17:11:48.205652Z node 1 :TX_PROXY DEBUG: actor# [1:7488022529797422158:2115] TxId# 281474976710664 ProcessProposeTransaction 2025-03-31T17:11:48.205698Z node 1 :TX_PROXY DEBUG: actor# [1:7488022529797422158:2115] Cookie# 0 userReqId# "" txid# 281474976710664 SEND to# [1:7488022542682324878:2624] 2025-03-31T17:11:48.208127Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022542682324878:2624] txid# 281474976710664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "" NewOwner: "dbadmin" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:36498" 2025-03-31T17:11:48.208187Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022542682324878:2624] txid# 281474976710664 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T17:11:48.208203Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022542682324878:2624] txid# 281474976710664 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:48.208245Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022542682324878:2624] txid# 281474976710664 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:48.208609Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022542682324878:2624] txid# 281474976710664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:48.208714Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022542682324878:2624] HANDLE EvNavigateKeySetResult, txid# 281474976710664 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:48.208765Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022542682324878:2624] txid# 281474976710664 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710664 TabletId# 72057594046644480} 2025-03-31T17:11:48.208928Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022542682324878:2624] txid# 281474976710664 HANDLE EvClientConnected 2025-03-31T17:11:48.209261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T17:11:48.211157Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022542682324878:2624] txid# 281474976710664 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710664} 2025-03-31T17:11:48.211229Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022542682324878:2624] txid# 281474976710664 SEND to# [1:7488022542682324877:2355] Source {TEvProposeTransactionStatus txid# 281474976710664 Status# 48} 2025-03-31T17:11:48.248937Z node 1 :TX_PROXY DEBUG: actor# [1:7488022529797422158:2115] Handle TEvProposeTransaction 2025-03-31T17:11:48.248975Z node 1 :TX_PROXY DEBUG: actor# [1:7488022529797422158:2115] TxId# 281474976710665 ProcessProposeTransaction 2025-03-31T17:11:48.249020Z node 1 :TX_PROXY DEBUG: actor# [1:7488022529797422158:2115] Cookie# 0 userReqId# "" txid# 281474976710665 SEND to# [1:7488022542682324905:2636] 2025-03-31T17:11:48.251554Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022542682324905:2636] txid# 281474976710665 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\007dbadmin\022\030\022\026\n\024all-users@well-known\032\325\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzQ4NDMwOCwiaWF0IjoxNzQzNDQxMTA4LCJzdWIiOiJkYmFkbWluIn0.DVx3FCiurolf7CesY8ziYy-buXNeZ6X57vIYr6_hsZ1KpD4dcVOL3zn6HT633W_ZEPBfpuNj3ISHp2VnWce6I0q4xXZb43iCYTAwrMHsNgUj8uhyrFnxz_iCSDsURiyuzOkLG4Hkc3eR4Ebj9pfOmNc7oglg7J_mJOw58mb2kqlyqf0AjG29nT4d1up9kD_DjX5FvhQRkBJSRZFHFPuMOaW_oC30d7r0wnVGcGl3nf4vhdpDju6c8w_cyXR26R0k3RMy0fhmxkY48Yu5Blxhk-KiO2i6YSy-_Kq5M9ncfB13R8aXKLQ3L2U6dAQwDLRY3aPYAoUCYqPP2HjPbP9UBQ\"\005Login*\201\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzQ4NDMwOCwiaWF0IjoxNzQzNDQxMTA4LCJzdWIiOiJkYmFkbWluIn0.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:36530" 2025-03-31T17:11:48.251621Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022542682324905:2636] txid# 281474976710665 Bootstrap, UserSID: dbadmin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T17:11:48.251637Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022542682324905:2636] txid# 281474976710665 Bootstrap, UserSID: dbadmin IsClusterAdministrator: 0 2025-03-31T17:11:48.251696Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022542682324905:2636] txid# 281474976710665 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:48.252012Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022542682324905:2636] txid# 281474976710665 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:48.252141Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022542682324905:2636] HANDLE EvNavigateKeySetResult, txid# 281474976710665 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:48.252204Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022542682324905:2636] txid# 281474976710665 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710665 TabletId# 72057594046644480} 2025-03-31T17:11:48.252401Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022542682324905:2636] txid# 281474976710665 HANDLE EvClientConnected 2025-03-31T17:11:48.254890Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022542682324905:2636] txid# 281474976710665 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710665} 2025-03-31T17:11:48.254927Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022542682324905:2636] txid# 281474976710665 SEND to# [1:7488022542682324904:2361] Source {TEvProposeTransactionStatus txid# 281474976710665 Status# 48} |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-58 [GOOD] Test command err: Starting YDB, grpc: 21154, msgbus: 20539 2025-03-31T17:11:45.049303Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488022530627205046:2276];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:11:45.049385Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/0002d4/r3tmp/tmpQoBrZu/pdisk_1.dat 2025-03-31T17:11:45.438724Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T17:11:45.454258Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:11:45.454352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:11:45.459948Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21154, node 1 2025-03-31T17:11:45.542955Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:11:45.542977Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:11:45.542984Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:11:45.543128Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20539 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T17:11:45.784885Z node 1 :TX_PROXY DEBUG: actor# [1:7488022530627205092:2133] Handle TEvNavigate describe path dc-1 2025-03-31T17:11:45.784938Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530627205560:2432] HANDLE EvNavigateScheme dc-1 2025-03-31T17:11:45.785997Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530627205560:2432] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:45.828626Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530627205560:2432] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-03-31T17:11:45.837900Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530627205560:2432] Handle TEvDescribeSchemeResult Forward to# [1:7488022530627205559:2431] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T17:11:45.858508Z node 1 :TX_PROXY DEBUG: actor# [1:7488022530627205092:2133] Handle TEvProposeTransaction 2025-03-31T17:11:45.858538Z node 1 :TX_PROXY DEBUG: actor# [1:7488022530627205092:2133] TxId# 281474976710657 ProcessProposeTransaction 2025-03-31T17:11:45.858683Z node 1 :TX_PROXY DEBUG: actor# [1:7488022530627205092:2133] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7488022530627205574:2439] 2025-03-31T17:11:45.925957Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530627205574:2439] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-31T17:11:45.926042Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530627205574:2439] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T17:11:45.926062Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530627205574:2439] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:45.926120Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530627205574:2439] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:45.926575Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530627205574:2439] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:45.926737Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530627205574:2439] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-31T17:11:45.926795Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530627205574:2439] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-03-31T17:11:45.926939Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530627205574:2439] txid# 281474976710657 HANDLE EvClientConnected 2025-03-31T17:11:45.927679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T17:11:45.930213Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530627205574:2439] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-03-31T17:11:45.930264Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530627205574:2439] txid# 281474976710657 SEND to# [1:7488022530627205573:2438] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-03-31T17:11:45.942259Z node 1 :TX_PROXY DEBUG: actor# [1:7488022530627205092:2133] Handle TEvProposeTransaction 2025-03-31T17:11:45.942281Z node 1 :TX_PROXY DEBUG: actor# [1:7488022530627205092:2133] TxId# 281474976710658 ProcessProposeTransaction 2025-03-31T17:11:45.942310Z node 1 :TX_PROXY DEBUG: actor# [1:7488022530627205092:2133] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7488022530627205612:2473] 2025-03-31T17:11:45.944636Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530627205612:2473] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-31T17:11:45.944708Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530627205612:2473] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T17:11:45.944727Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530627205612:2473] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:45.944816Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530627205612:2473] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:45.945113Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530627205612:2473] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:45.945218Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530627205612:2473] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:45.945263Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530627205612:2473] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-31T17:11:45.945422Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530627205612:2473] txid# 281474976710658 HANDLE EvClientConnected 2025-03-31T17:11:45.945881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T17:11:45.947856Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530627205612:2473] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-03-31T17:11:45.947904Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530627205612:2473] txid# 281474976710658 SEND to# [1:7488022530627205611:2472] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-03-31T17:11:47.657187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022539217140305:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:47.657193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022539217140297:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:47.657295Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:47.657565Z node 1 :TX_PROXY DEBUG: actor# [1:7488022530627205092:2133] Handle TEvProposeTransaction 2025-03-31T17:11:47.657590Z node 1 :TX_PROXY DEBUG: actor# [1:7488022530627205092:2133] TxId# 281474976710659 ProcessProposeTransaction 2025-03-31 ... 12107732:2588] 2025-03-31T17:11:48.139908Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543512107732:2588] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\r\010\001\022\t\032\007dbadmin\n\025\010\000\022\021\010\001\020\200\200\002\032\007dbadmin \000\n\024\010\000\022\020\010\001\020\200\010\032\007dbadmin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:56998" 2025-03-31T17:11:48.139975Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543512107732:2588] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T17:11:48.139992Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543512107732:2588] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:48.140039Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543512107732:2588] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:48.140462Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543512107732:2588] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:48.140574Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543512107732:2588] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:48.140641Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543512107732:2588] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-03-31T17:11:48.140802Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543512107732:2588] txid# 281474976710662 HANDLE EvClientConnected 2025-03-31T17:11:48.141319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T17:11:48.144045Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543512107732:2588] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-03-31T17:11:48.144108Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543512107732:2588] txid# 281474976710662 SEND to# [1:7488022543512107731:2346] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-03-31T17:11:48.177594Z node 1 :TX_PROXY DEBUG: actor# [1:7488022530627205092:2133] Handle TEvProposeTransaction 2025-03-31T17:11:48.177628Z node 1 :TX_PROXY DEBUG: actor# [1:7488022530627205092:2133] TxId# 281474976710663 ProcessProposeTransaction 2025-03-31T17:11:48.177668Z node 1 :TX_PROXY DEBUG: actor# [1:7488022530627205092:2133] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [1:7488022543512107767:2609] 2025-03-31T17:11:48.180051Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543512107767:2609] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:57010" 2025-03-31T17:11:48.180113Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543512107767:2609] txid# 281474976710663 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T17:11:48.180128Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543512107767:2609] txid# 281474976710663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:48.180181Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543512107767:2609] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:48.180476Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543512107767:2609] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:48.180568Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543512107767:2609] HANDLE EvNavigateKeySetResult, txid# 281474976710663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:48.180628Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543512107767:2609] txid# 281474976710663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710663 TabletId# 72057594046644480} 2025-03-31T17:11:48.180767Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543512107767:2609] txid# 281474976710663 HANDLE EvClientConnected 2025-03-31T17:11:48.188165Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543512107767:2609] txid# 281474976710663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710663} 2025-03-31T17:11:48.188213Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543512107767:2609] txid# 281474976710663 SEND to# [1:7488022543512107766:2348] Source {TEvProposeTransactionStatus txid# 281474976710663 Status# 48} 2025-03-31T17:11:48.194125Z node 1 :TX_PROXY DEBUG: actor# [1:7488022530627205092:2133] Handle TEvProposeTransaction 2025-03-31T17:11:48.194153Z node 1 :TX_PROXY DEBUG: actor# [1:7488022530627205092:2133] TxId# 281474976710664 ProcessProposeTransaction 2025-03-31T17:11:48.194188Z node 1 :TX_PROXY DEBUG: actor# [1:7488022530627205092:2133] Cookie# 0 userReqId# "" txid# 281474976710664 SEND to# [1:7488022543512107780:2618] 2025-03-31T17:11:48.196722Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543512107780:2618] txid# 281474976710664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "" NewOwner: "dbadmin" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:57016" 2025-03-31T17:11:48.196804Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543512107780:2618] txid# 281474976710664 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T17:11:48.196825Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543512107780:2618] txid# 281474976710664 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:48.196869Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543512107780:2618] txid# 281474976710664 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:48.197206Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543512107780:2618] txid# 281474976710664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:48.197312Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543512107780:2618] HANDLE EvNavigateKeySetResult, txid# 281474976710664 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:48.197360Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543512107780:2618] txid# 281474976710664 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710664 TabletId# 72057594046644480} 2025-03-31T17:11:48.197541Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543512107780:2618] txid# 281474976710664 HANDLE EvClientConnected 2025-03-31T17:11:48.197862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T17:11:48.199788Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543512107780:2618] txid# 281474976710664 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710664} 2025-03-31T17:11:48.199833Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543512107780:2618] txid# 281474976710664 SEND to# [1:7488022543512107779:2355] Source {TEvProposeTransactionStatus txid# 281474976710664 Status# 48} 2025-03-31T17:11:48.234978Z node 1 :TX_PROXY DEBUG: actor# [1:7488022530627205092:2133] Handle TEvProposeTransaction 2025-03-31T17:11:48.235014Z node 1 :TX_PROXY DEBUG: actor# [1:7488022530627205092:2133] TxId# 281474976710665 ProcessProposeTransaction 2025-03-31T17:11:48.235060Z node 1 :TX_PROXY DEBUG: actor# [1:7488022530627205092:2133] Cookie# 0 userReqId# "" txid# 281474976710665 SEND to# [1:7488022543512107807:2630] 2025-03-31T17:11:48.237489Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543512107807:2630] txid# 281474976710665 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\007dbadmin\022\030\022\026\n\024all-users@well-known\032\325\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzQ4NDMwOCwiaWF0IjoxNzQzNDQxMTA4LCJzdWIiOiJkYmFkbWluIn0.b8QSmN0XtX2cKWo90LRkEgK-d-1PJF80cE-D1riN2WV8qYPK0EnaiCwSMRz_pPsMK99hA_cOXEhQwqpQ0nlhW1vCKE4nSv5w-BRUL567mUkFCp9QtNNgPseAQ7Z-jg0jPc399fGiENSpQ5pwbiXu6jO-gvcVDE6kAI7c_deeZEqVQNpil3ZdUcwrWKX7bcK2Dl1FB4MR9fwi3xHn4o73z0ouc2-eb9H5NjR3zvZ-0OoNz81bE6G5eOUCPKpkqzuAhDqclyQGlSUiwt2ubhROIM6-wEu5-ljEtjTFtj7vJUYoPuuTElSyOBnUbGOtWDBn3jcz18oNQjTPel6e68f5-Q\"\005Login*\201\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzQ4NDMwOCwiaWF0IjoxNzQzNDQxMTA4LCJzdWIiOiJkYmFkbWluIn0.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:57054" 2025-03-31T17:11:48.237565Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543512107807:2630] txid# 281474976710665 Bootstrap, UserSID: dbadmin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-31T17:11:48.237581Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543512107807:2630] txid# 281474976710665 Bootstrap, UserSID: dbadmin IsClusterAdministrator: 0 2025-03-31T17:11:48.237643Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543512107807:2630] txid# 281474976710665 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:48.237991Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543512107807:2630] txid# 281474976710665 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:48.238130Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543512107807:2630] HANDLE EvNavigateKeySetResult, txid# 281474976710665 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:48.238196Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543512107807:2630] txid# 281474976710665 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710665 TabletId# 72057594046644480} 2025-03-31T17:11:48.238356Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543512107807:2630] txid# 281474976710665 HANDLE EvClientConnected 2025-03-31T17:11:48.240751Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543512107807:2630] txid# 281474976710665 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710665} 2025-03-31T17:11:48.240803Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543512107807:2630] txid# 281474976710665 SEND to# [1:7488022543512107806:2361] Source {TEvProposeTransactionStatus txid# 281474976710665 Status# 48} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-59 [GOOD] Test command err: Starting YDB, grpc: 20760, msgbus: 6494 2025-03-31T17:11:44.982769Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488022526355903244:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:11:44.983210Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/0002db/r3tmp/tmp7xEdLr/pdisk_1.dat 2025-03-31T17:11:45.422332Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:11:45.422445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:11:45.426544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T17:11:45.462213Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20760, node 1 2025-03-31T17:11:45.539859Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-03-31T17:11:45.561796Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:11:45.561816Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:11:45.561823Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:11:45.561933Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6494 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-31T17:11:45.798656Z node 1 :TX_PROXY DEBUG: actor# [1:7488022530650870802:2139] Handle TEvNavigate describe path dc-1 2025-03-31T17:11:45.798713Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530650871270:2438] HANDLE EvNavigateScheme dc-1 2025-03-31T17:11:45.799604Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530650871270:2438] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:45.849423Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530650871270:2438] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-03-31T17:11:45.860299Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530650871270:2438] Handle TEvDescribeSchemeResult Forward to# [1:7488022530650871269:2437] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-31T17:11:45.874979Z node 1 :TX_PROXY DEBUG: actor# [1:7488022530650870802:2139] Handle TEvProposeTransaction 2025-03-31T17:11:45.875021Z node 1 :TX_PROXY DEBUG: actor# [1:7488022530650870802:2139] TxId# 281474976710657 ProcessProposeTransaction 2025-03-31T17:11:45.875154Z node 1 :TX_PROXY DEBUG: actor# [1:7488022530650870802:2139] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7488022530650871283:2444] 2025-03-31T17:11:45.976205Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530650871283:2444] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-31T17:11:45.976337Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530650871283:2444] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-03-31T17:11:45.976358Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530650871283:2444] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:45.976433Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530650871283:2444] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:45.976840Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530650871283:2444] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:45.977029Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530650871283:2444] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-31T17:11:45.977110Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530650871283:2444] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-03-31T17:11:45.977253Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530650871283:2444] txid# 281474976710657 HANDLE EvClientConnected 2025-03-31T17:11:45.977899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-31T17:11:45.979964Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530650871283:2444] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-03-31T17:11:45.980029Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530650871283:2444] txid# 281474976710657 SEND to# [1:7488022530650871282:2443] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-03-31T17:11:45.993337Z node 1 :TX_PROXY DEBUG: actor# [1:7488022530650870802:2139] Handle TEvProposeTransaction 2025-03-31T17:11:45.993364Z node 1 :TX_PROXY DEBUG: actor# [1:7488022530650870802:2139] TxId# 281474976710658 ProcessProposeTransaction 2025-03-31T17:11:45.993397Z node 1 :TX_PROXY DEBUG: actor# [1:7488022530650870802:2139] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7488022530650871338:2488] 2025-03-31T17:11:45.996011Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530650871338:2488] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-31T17:11:45.996076Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530650871338:2488] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-03-31T17:11:45.996092Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530650871338:2488] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:45.996145Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530650871338:2488] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:45.996466Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530650871338:2488] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:45.996574Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530650871338:2488] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:45.996617Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530650871338:2488] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-31T17:11:45.996754Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530650871338:2488] txid# 281474976710658 HANDLE EvClientConnected 2025-03-31T17:11:45.997295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T17:11:45.999538Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530650871338:2488] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-03-31T17:11:45.999594Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022530650871338:2488] txid# 281474976710658 SEND to# [1:7488022530650871337:2487] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-03-31T17:11:47.634783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022539240806020:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:47.634810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022539240806012:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:47.634905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:47.635227Z node 1 :TX_PROXY DEBUG: actor# [1:7488022530650870802:2139] Handle TEvProposeTransaction 2025-03-31T17:11:47.635254Z node 1 :TX_PROXY DEBUG: actor# [1 ... 1:7488022530650870802:2139] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [1:7488022543535773455:2607] 2025-03-31T17:11:48.106538Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543535773455:2607] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\r\010\001\022\t\032\007dbadmin\n\025\010\000\022\021\010\001\020\200\200\002\032\007dbadmin \000\n\024\010\000\022\020\010\001\020\200\010\032\007dbadmin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:60844" 2025-03-31T17:11:48.106622Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543535773455:2607] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-03-31T17:11:48.106639Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543535773455:2607] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:48.106695Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543535773455:2607] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:48.107009Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543535773455:2607] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:48.107138Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543535773455:2607] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:48.107213Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543535773455:2607] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-03-31T17:11:48.107394Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543535773455:2607] txid# 281474976710662 HANDLE EvClientConnected 2025-03-31T17:11:48.107847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T17:11:48.110138Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543535773455:2607] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-03-31T17:11:48.110192Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543535773455:2607] txid# 281474976710662 SEND to# [1:7488022543535773454:2346] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-03-31T17:11:48.146399Z node 1 :TX_PROXY DEBUG: actor# [1:7488022530650870802:2139] Handle TEvProposeTransaction 2025-03-31T17:11:48.146452Z node 1 :TX_PROXY DEBUG: actor# [1:7488022530650870802:2139] TxId# 281474976710663 ProcessProposeTransaction 2025-03-31T17:11:48.146518Z node 1 :TX_PROXY DEBUG: actor# [1:7488022530650870802:2139] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [1:7488022543535773490:2628] 2025-03-31T17:11:48.148404Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543535773490:2628] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:60848" 2025-03-31T17:11:48.148459Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543535773490:2628] txid# 281474976710663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-03-31T17:11:48.148470Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543535773490:2628] txid# 281474976710663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:48.148500Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543535773490:2628] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:48.148767Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543535773490:2628] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:48.148884Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543535773490:2628] HANDLE EvNavigateKeySetResult, txid# 281474976710663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:48.148937Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543535773490:2628] txid# 281474976710663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710663 TabletId# 72057594046644480} 2025-03-31T17:11:48.149112Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543535773490:2628] txid# 281474976710663 HANDLE EvClientConnected 2025-03-31T17:11:48.156111Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543535773490:2628] txid# 281474976710663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710663} 2025-03-31T17:11:48.156176Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543535773490:2628] txid# 281474976710663 SEND to# [1:7488022543535773489:2348] Source {TEvProposeTransactionStatus txid# 281474976710663 Status# 48} 2025-03-31T17:11:48.162564Z node 1 :TX_PROXY DEBUG: actor# [1:7488022530650870802:2139] Handle TEvProposeTransaction 2025-03-31T17:11:48.162597Z node 1 :TX_PROXY DEBUG: actor# [1:7488022530650870802:2139] TxId# 281474976710664 ProcessProposeTransaction 2025-03-31T17:11:48.162638Z node 1 :TX_PROXY DEBUG: actor# [1:7488022530650870802:2139] Cookie# 0 userReqId# "" txid# 281474976710664 SEND to# [1:7488022543535773503:2637] 2025-03-31T17:11:48.164871Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543535773503:2637] txid# 281474976710664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "" NewOwner: "dbadmin" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:60862" 2025-03-31T17:11:48.164927Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543535773503:2637] txid# 281474976710664 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-03-31T17:11:48.164942Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543535773503:2637] txid# 281474976710664 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-31T17:11:48.164990Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543535773503:2637] txid# 281474976710664 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:48.165332Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543535773503:2637] txid# 281474976710664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:48.165437Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543535773503:2637] HANDLE EvNavigateKeySetResult, txid# 281474976710664 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-31T17:11:48.165533Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543535773503:2637] txid# 281474976710664 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710664 TabletId# 72057594046644480} 2025-03-31T17:11:48.165698Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543535773503:2637] txid# 281474976710664 HANDLE EvClientConnected 2025-03-31T17:11:48.166107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T17:11:48.168518Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543535773503:2637] txid# 281474976710664 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710664} 2025-03-31T17:11:48.168570Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543535773503:2637] txid# 281474976710664 SEND to# [1:7488022543535773502:2355] Source {TEvProposeTransactionStatus txid# 281474976710664 Status# 48} 2025-03-31T17:11:48.205883Z node 1 :TX_PROXY DEBUG: actor# [1:7488022530650870802:2139] Handle TEvProposeTransaction 2025-03-31T17:11:48.205944Z node 1 :TX_PROXY DEBUG: actor# [1:7488022530650870802:2139] TxId# 281474976710665 ProcessProposeTransaction 2025-03-31T17:11:48.205988Z node 1 :TX_PROXY DEBUG: actor# [1:7488022530650870802:2139] Cookie# 0 userReqId# "" txid# 281474976710665 SEND to# [1:7488022543535773530:2649] 2025-03-31T17:11:48.208397Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543535773530:2649] txid# 281474976710665 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\007dbadmin\022\030\022\026\n\024all-users@well-known\032\325\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzQ4NDMwOCwiaWF0IjoxNzQzNDQxMTA4LCJzdWIiOiJkYmFkbWluIn0.fZO7_gSlYitHiiCJy54aIBEY0L-lWf1aHWTT5GADdgYbvqwXWblrjO6ACXBliMC-JTFEpz_IO3-AulvJqu0Ef3VftMFM6dXj36EvmoAMlPRmjIx3rwOwdDwF1oDuiy6bAFHZsSQitbH4KcphC83uDrZHq7hkQmw5mRepM7zD9Cg7zpLbep6yabJy2KdFVph6UXZZg1-pnr8-asoKjGYAuVy5yL64Ops6SeWmXoxl4XsJ5hkdNNr806FtVCs47QIzMO1p5B0U-l4T3ahFub979JFIJP1Hes_GCJSNH-xWpF0we8XxuLAevzDNj6U4GEcniIyWnE-TYNXbSXzZFaam-w\"\005Login*\201\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzQ4NDMwOCwiaWF0IjoxNzQzNDQxMTA4LCJzdWIiOiJkYmFkbWluIn0.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:60892" 2025-03-31T17:11:48.208456Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543535773530:2649] txid# 281474976710665 Bootstrap, UserSID: dbadmin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-03-31T17:11:48.208503Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543535773530:2649] txid# 281474976710665 Bootstrap, UserSID: dbadmin IsClusterAdministrator: 0 2025-03-31T17:11:48.208554Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543535773530:2649] txid# 281474976710665 TEvNavigateKeySet requested from SchemeCache 2025-03-31T17:11:48.208791Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543535773530:2649] txid# 281474976710665 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-31T17:11:48.208827Z node 1 :TX_PROXY ERROR: Actor# [1:7488022543535773530:2649] txid# 281474976710665, Access denied for dbadmin, attempt to manage user 2025-03-31T17:11:48.208945Z node 1 :TX_PROXY ERROR: Actor# [1:7488022543535773530:2649] txid# 281474976710665, issues: { message: "Access denied for dbadmin" issue_code: 200000 severity: 1 } 2025-03-31T17:11:48.208989Z node 1 :TX_PROXY DEBUG: Actor# [1:7488022543535773530:2649] txid# 281474976710665 SEND to# [1:7488022543535773529:2361] Source {TEvProposeTransactionStatus Status# 5} 2025-03-31T17:11:48.218121Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjRlYzhhLWI2YzY5MjIyLTE0MzRiNDZjLTMxN2U1YmIx, ActorId: [1:7488022543535773520:2361], ActorState: ExecuteState, TraceId: 01jqpk07718mg7431ra44ngnxg, Create QueryResponse for error on request, msg: 2025-03-31T17:11:48.218550Z node 1 :TX_PROXY DEBUG: actor# [1:7488022530650870802:2139] Handle TEvExecuteKqpTransaction 2025-03-31T17:11:48.218583Z node 1 :TX_PROXY DEBUG: actor# [1:7488022530650870802:2139] TxId# 281474976710666 ProcessProposeKqpTransaction |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> SystemView::PartitionStatsFields [GOOD] |82.7%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/{meta.json ... results_accumulator.log} |82.8%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> ReadIteratorExternalBlobs::ExtBlobsEmptyTable [GOOD] Test command err: 2025-03-31T17:11:47.617623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T17:11:47.617881Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T17:11:47.618037Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/0002eb/r3tmp/tmpvwJdhZ/pdisk_1.dat 2025-03-31T17:11:48.008635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T17:11:48.047459Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T17:11:48.086866Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:11:48.086987Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:11:48.098473Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T17:11:48.180125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T17:11:48.538176Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:744:2625], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:48.538312Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:754:2630], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:48.538755Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:48.544637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T17:11:48.707545Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:758:2633], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T17:11:48.776407Z node 1 :TX_PROXY ERROR: Actor# [1:832:2676] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:11:49.432964Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpk07hq80fmft4qxvyqaj2m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjBjMGQxMzAtZGFmNTFkYjMtY2UxNjBiNDMtNGM1YmVjODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::PartitionStatsFields [GOOD] Test command err: 2025-03-31T17:11:41.757044Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488022514833732675:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:11:41.757100Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/00024e/r3tmp/tmpI4rVmF/pdisk_1.dat 2025-03-31T17:11:42.210706Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T17:11:42.216747Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:11:42.216877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:11:42.222975Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15889, node 1 2025-03-31T17:11:42.228069Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T17:11:42.228256Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T17:11:42.277359Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:11:42.277393Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:11:42.277403Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:11:42.277577Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30019 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:11:42.604924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:11:42.654929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:11:44.952930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022527718635578:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:44.952948Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022527718635563:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:44.953039Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:44.958055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-31T17:11:44.982227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488022527718635580:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-31T17:11:45.038959Z node 1 :TX_PROXY ERROR: Actor# [1:7488022532013602945:2703] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:11:45.464872Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqpk041n6f8sbqgse73fm6s9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGI1YmRmM2ItZGFkODFiMTgtYzczNGRhY2EtOWI3NTc4OTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:45.620591Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jqpk04js9d2qqet6a88z6tj9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGQ4NTk3MWItZGY0ZDM4ZDYtYTE0NjhhNDItZWI4ZTdmODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:45.623264Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7488022532013603018:2358], owner: [1:7488022532013603015:2356], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-03-31T17:11:45.624060Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7488022532013603018:2358], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-31T17:11:45.624860Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7488022532013603018:2358], row count: 1, finished: 1 2025-03-31T17:11:45.624895Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7488022532013603018:2358], owner: [1:7488022532013603015:2356], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-03-31T17:11:45.629901Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743441105607, txId: 281474976710662] shutting down 2025-03-31T17:11:46.691367Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jqpk05pcehafse7vfyx34gzw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2Y2YjI4M2ItN2Q1ZGZlMmItYzYyYzExMmYtNjliMWZlZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:46.692447Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7488022536308570363:2369], owner: [1:7488022536308570359:2367], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-03-31T17:11:46.692952Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7488022536308570363:2369], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-31T17:11:46.693180Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7488022536308570363:2369], row count: 1, finished: 1 2025-03-31T17:11:46.693219Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7488022536308570363:2369], owner: [1:7488022536308570359:2367], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-03-31T17:11:46.695071Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743441106690, txId: 281474976710664] shutting down 2025-03-31T17:11:46.757438Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488022514833732675:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:11:46.757521Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T17:11:47.770492Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jqpk06qq1hx1mb82ymyttpks, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTFmMTYyNDctYzE5ZGZmMjAtMTlkMjFmOTItYWMxNzY1ZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:47.771866Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7488022540603537715:2383], owner: [1:7488022540603537711:2381], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-03-31T17:11:47.772314Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7488022540603537715:2383], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-31T17:11:47.772531Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7488022540603537715:2383], row count: 1, finished: 1 2025-03-31T17:11:47.772574Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7488022540603537715:2383], owner: [1:7488022540603537711:2381], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-03-31T17:11:47.775373Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743441107769, txId: 281474976710666] shutting down 2025-03-31T17:11:48.859022Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01jqpk07sdc4h4eq7f0e0k07dn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODdiYWNmMGEtN2RjMDNmYTEtNWUwYTE4MGYtOTkxMmIwMjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:48.860402Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7488022544898505061:2397], owner: [1:7488022544898505057:2395], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-03-31T17:11:48.860897Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7488022544898505061:2397], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-31T17:11:48.861193Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7488022544898505061:2397], row count: 1, finished: 1 2025-03-31T17:11:48.861235Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7488022544898505061:2397], owner: [1:7488022544898505057:2395], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-03-31T17:11:48.863878Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743441108858, txId: 281474976710668] shutting down 2025-03-31T17:11:49.026418Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01jqpk07w6952xsk7m9cp7nehp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mjg4MzMyYzQtYjcyMGM2YWYtYzNjNjA0YjctYzVkYTA5ZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:49.028633Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7488022549193472394:2407], owner: [1:7488022549193472390:2405], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-03-31T17:11:49.029120Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7488022549193472394:2407], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-31T17:11:49.029394Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7488022549193472394:2407], row count: 1, finished: 1 2025-03-31T17:11:49.029431Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7488022549193472394:2407], owner: [1:7488022549193472390:2405], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-03-31T17:11:49.032942Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743441109025, txId: 281474976710670] shutting down |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |84.1%| [TA] $(B)/ydb/core/sys_view/ut/test-results/unittest/{meta.json ... results_accumulator.log} |84.1%| [TA] {RESULT} $(B)/ydb/core/sys_view/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_sql_streaming.py::test[suites-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWithDataWatermarks-default.txt] >> test_sql_streaming.py::test[suites-GroupByHop-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataWithFilter-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicWithSchema-default.txt] >> test_sql_streaming.py::test[suites-GroupByHopByStringKey-default.txt] >> test_sql_streaming.py::test[suites-GroupByHopWithDataWatermarks-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindow-default.txt] >> TPersQueueTest::DisableDeduplication [GOOD] >> KqpSnapshotIsolation::TConflictWriteOltp [FAIL] |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::DisableDeduplication [GOOD] Test command err: 2025-03-31T17:11:41.862810Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488022513435953239:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:11:41.862907Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T17:11:41.963594Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488022514784666886:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:11:41.967656Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T17:11:42.130203Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T17:11:42.155180Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/00016d/r3tmp/tmpd3lBM8/pdisk_1.dat 2025-03-31T17:11:42.511452Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:11:42.512564Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:11:42.517530Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:11:42.517644Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:11:42.522285Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T17:11:42.522518Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T17:11:42.523429Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T17:11:42.569151Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19490, node 1 2025-03-31T17:11:42.617126Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T17:11:42.617149Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T17:11:42.812630Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/r3ux/00016d/r3tmp/yandex0ePUJ4.tmp 2025-03-31T17:11:42.812672Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/r3ux/00016d/r3tmp/yandex0ePUJ4.tmp 2025-03-31T17:11:42.814046Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/r3ux/00016d/r3tmp/yandex0ePUJ4.tmp 2025-03-31T17:11:42.814202Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T17:11:43.057842Z INFO: TTestServer started on Port 7928 GrpcPort 19490 TClient is connected to server localhost:7928 PQClient connected to localhost:19490 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:11:43.338332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T17:11:43.412749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-31T17:11:45.253413Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488022531964536482:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:45.255351Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488022531964536470:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:45.255473Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:45.264459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2025-03-31T17:11:45.313048Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488022531964536484:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-03-31T17:11:45.417064Z node 2 :TX_PROXY ERROR: Actor# [2:7488022531964536511:2179] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:11:45.900622Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7488022531964536525:2321], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T17:11:45.900649Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488022530615823628:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T17:11:45.900939Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDkwZGQ5NTctMzliM2E0OWEtNDM1YjA1NjYtNmQ1ZGRmNmU=, ActorId: [2:7488022531964536468:2312], ActorState: ExecuteState, TraceId: 01jqpk04b1awde6tjq34chaskj, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T17:11:45.905054Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T17:11:45.907186Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmRhMTIzYWYtYjM0ZjRhOWYtN2JhMjRmYi04ZjNhMjc2NA==, ActorId: [1:7488022530615823578:2336], ActorState: ExecuteState, TraceId: 01jqpk04gg0c1r4x32rvahdj8b, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T17:11:45.907538Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T17:11:45.959020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-31T17:11:46.041150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-31T17:11:46.139227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-31T17:11:46.476780Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqpk058s5vxc9mn4gtpccj4h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGZjMTViNjctOTIzYTIzMGYtYTA2NmEyZTEtNWI2MmNhYzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7488022534910791378:3091] 2025-03-31T17:11:46.861612Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488022513435953239:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:11:46.861690Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T17:11:46.958366Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7488022514784666886:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:11:46.958450Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok PQ Client: create topic: rt3.dc1--topic1 with 3 partitions CallPersQueueGRPC request to localhost:19490 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic1" } } 2025-03-31T17:11:52.591617Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:19490 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--topic1" NumPartitions: 3 Config { Partit ... ion shared/debug_1_1_12848638148315327727_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 3 } } 2025-03-31T17:11:54.111367Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12848638148315327727_v1 read done: guid# c506a714-341950f8-d7ec54e1-a34f7a41, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 2(assignId:1), size# 496 2025-03-31T17:11:54.111404Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12848638148315327727_v1 response to read: guid# c506a714-341950f8-d7ec54e1-a34f7a41 2025-03-31T17:11:54.111592Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12848638148315327727_v1 Process answer. Aval parts: 0 2025-03-31T17:11:54.111718Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/debug session shared/debug_1_1_12848638148315327727_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:3), readOffset# 0, commitOffset# (empty maybe) 2025-03-31T17:11:54.111988Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12848638148315327727_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2) initDone 1 event { CmdReadResult { MaxOffset: 3 Result { Offset: 0 Data: "... 99 bytes ..." SourceId: "" SeqNo: 1 WriteTimestampMS: 1743441113949 CreateTimestampMS: 1743441113945 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 99 bytes ..." SourceId: "" SeqNo: 2 WriteTimestampMS: 1743441113952 CreateTimestampMS: 1743441113945 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 99 bytes ..." SourceId: "" SeqNo: 3 WriteTimestampMS: 1743441113953 CreateTimestampMS: 1743441113945 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 7 RealReadOffset: 2 WaitQuotaTimeMs: 0 EndOffset: 3 StartOffset: 0 } Cookie: 0 } 2025-03-31T17:11:54.112103Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12848638148315327727_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2) wait data in partition inited, cookie 1 from offset3 2025-03-31T17:11:54.112132Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12848638148315327727_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2) EndOffset 3 ReadOffset 3 ReadGuid e9d7c2c4-46745c38-3d1de3da-3dee12fb has messages 1 2025-03-31T17:11:54.114359Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/debug session shared/debug_1_1_12848638148315327727_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:3) EndOffset 3 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 0 2025-03-31T17:11:54.114393Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12848638148315327727_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:3) ready for read with readOffset 0 endOffset 3 2025-03-31T17:11:54.114464Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12848638148315327727_v1 read done: guid# e9d7c2c4-46745c38-3d1de3da-3dee12fb, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2), size# 496 2025-03-31T17:11:54.114494Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12848638148315327727_v1 response to read: guid# e9d7c2c4-46745c38-3d1de3da-3dee12fb 2025-03-31T17:11:54.114752Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12848638148315327727_v1 Process answer. Aval parts: 0 2025-03-31T17:11:54.114870Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12848638148315327727_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:3), readOffset# 0, endOffset# 3, WTime# 1743441113976, sizeLag# 442 2025-03-31T17:11:54.114885Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12848638148315327727_v1TEvPartitionReady. Aval parts: 1 2025-03-31T17:11:54.115326Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-03-31T17:11:54.114917Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12848638148315327727_v1 performing read request: guid# c430069c-119cbf86-fe2c8e68-925480f4, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:3), count# 3, size# 530, partitionsAsked# 1, maxTimeLag# 0ms 2025-03-31T17:11:54.115363Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 0 2025-03-31T17:11:54.114985Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12848638148315327727_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:3)maxCount 3 maxSize 530 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 3 ClientCommitOffset 0 committedOffset 0 Guid c430069c-119cbf86-fe2c8e68-925480f4 2025-03-31T17:11:54.115477Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 2 Topic 'rt3.dc1--topic1' partition 0 user debug offset 0 count 3 size 530 endOffset 3 max time lag 0ms effective offset 0 2025-03-31T17:11:54.115511Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 2 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 3 2025-03-31T17:11:54.115557Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 2. All data is from uncompacted head. 2025-03-31T17:11:54.115578Z node 2 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-31T17:11:54.115681Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 0 Got data event with total 3 messages, current total messages: 3 2025-03-31T17:11:54.116276Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12848638148315327727_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:3) initDone 1 event { CmdReadResult { MaxOffset: 3 Result { Offset: 0 Data: "... 99 bytes ..." SourceId: "" SeqNo: 1 WriteTimestampMS: 1743441113976 CreateTimestampMS: 1743441113969 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 99 bytes ..." SourceId: "" SeqNo: 2 WriteTimestampMS: 1743441113984 CreateTimestampMS: 1743441113969 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 99 bytes ..." SourceId: "" SeqNo: 3 WriteTimestampMS: 1743441113985 CreateTimestampMS: 1743441113969 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 40 RealReadOffset: 2 WaitQuotaTimeMs: 0 EndOffset: 3 StartOffset: 0 } Cookie: 0 } 2025-03-31T17:11:54.116403Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12848638148315327727_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:3) wait data in partition inited, cookie 1 from offset3 2025-03-31T17:11:54.116440Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12848638148315327727_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:3) EndOffset 3 ReadOffset 3 ReadGuid c430069c-119cbf86-fe2c8e68-925480f4 has messages 1 2025-03-31T17:11:54.116545Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12848638148315327727_v1 read done: guid# c430069c-119cbf86-fe2c8e68-925480f4, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:3), size# 496 2025-03-31T17:11:54.116581Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12848638148315327727_v1 response to read: guid# c430069c-119cbf86-fe2c8e68-925480f4 2025-03-31T17:11:54.116771Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12848638148315327727_v1 Process answer. Aval parts: 0 2025-03-31T17:11:54.119511Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12848638148315327727_v1 grpc read done: success# 1, data# { read_request { bytes_size: 496 } } 2025-03-31T17:11:54.119736Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12848638148315327727_v1 got read request: guid# ae7d1cbd-caf5962b-aa79f8bc-a8618aef Got data event with total 3 messages, current total messages: 6 2025-03-31T17:11:54.121033Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12848638148315327727_v1 grpc read done: success# 1, data# { read_request { bytes_size: 496 } } 2025-03-31T17:11:54.121168Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12848638148315327727_v1 got read request: guid# 856bd57a-9b837dfb-9e6d5c16-1290b3df Got data event with total 3 messages, current total messages: 9 2025-03-31T17:11:54.122397Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12848638148315327727_v1 grpc read done: success# 0, data# { } 2025-03-31T17:11:54.122433Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/debug session shared/debug_1_1_12848638148315327727_v1 grpc read failed 2025-03-31T17:11:54.122469Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/debug session shared/debug_1_1_12848638148315327727_v1 grpc closed 2025-03-31T17:11:54.122529Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/debug session shared/debug_1_1_12848638148315327727_v1 is DEAD 2025-03-31T17:11:54.123020Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/debug_1_1_12848638148315327727_v1 2025-03-31T17:11:54.123073Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7488022569270530782:2540] destroyed 2025-03-31T17:11:54.123118Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/debug_1_1_12848638148315327727_v1 2025-03-31T17:11:54.123140Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7488022569270530781:2539] destroyed 2025-03-31T17:11:54.123156Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/debug_1_1_12848638148315327727_v1 2025-03-31T17:11:54.123158Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/debug_1_1_12848638148315327727_v1 2025-03-31T17:11:54.123175Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7488022569270530783:2541] destroyed 2025-03-31T17:11:54.123180Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/debug_1_1_12848638148315327727_v1 2025-03-31T17:11:54.123194Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/debug_1_1_12848638148315327727_v1 2025-03-31T17:11:54.123974Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic1] pipe [1:7488022569270530776:2536] disconnected; active server actors: 1 2025-03-31T17:11:54.124034Z node 1 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--topic1] pipe [1:7488022569270530776:2536] client debug disconnected session shared/debug_1_1_12848638148315327727_v1 |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |86.0%| [TA] $(B)/ydb/services/persqueue_v1/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.1%| [TA] {RESULT} $(B)/ydb/services/persqueue_v1/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_volatile/unittest >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck [GOOD] Test command err: 2025-03-31T17:11:37.716700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T17:11:37.718204Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T17:11:37.718424Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/0001f8/r3tmp/tmpNCfPXo/pdisk_1.dat 2025-03-31T17:11:38.318853Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] ::Bootstrap [1:594:2519] 2025-03-31T17:11:38.318924Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] lookup [1:594:2519] 2025-03-31T17:11:38.319022Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] queue send [1:594:2519] 2025-03-31T17:11:38.319151Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] received pending shutdown [1:594:2519] 2025-03-31T17:11:38.319421Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] forward result local node, try to connect [1:594:2519] 2025-03-31T17:11:38.319477Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480]::SendEvent [1:594:2519] 2025-03-31T17:11:38.320314Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] connected with status OK role: Leader [1:594:2519] 2025-03-31T17:11:38.320377Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] send queued [1:594:2519] 2025-03-31T17:11:38.320414Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] push event to server [1:594:2519] 2025-03-31T17:11:38.320466Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] shutdown pipe due to pending shutdown request [1:594:2519] 2025-03-31T17:11:38.320496Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] notify reset [1:594:2519] 2025-03-31T17:11:38.321819Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:5} Tx{8, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose 2025-03-31T17:11:38.321896Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:5} Tx{8, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T17:11:38.334285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T17:11:38.334588Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:5} Tx{8, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} hope 1 -> done Change{4, redo 987b alter 0b annex 0, ~{ 1, 33, 35, 42, 4 } -{ }, 0 gb} 2025-03-31T17:11:38.334697Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:5} Tx{8, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T17:11:38.335206Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} commited cookie 1 for step 5 2025-03-31T17:11:38.347074Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} Tx{9, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress 2025-03-31T17:11:38.347184Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} Tx{9, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T17:11:38.348505Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} Tx{9, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{5, redo 174b alter 0b annex 0, ~{ 42, 4 } -{ }, 0 gb} 2025-03-31T17:11:38.348593Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} Tx{9, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T17:11:38.349048Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} commited cookie 1 for step 6 2025-03-31T17:11:38.349232Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} Tx{10, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress 2025-03-31T17:11:38.349269Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} Tx{10, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T17:11:38.349446Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} Tx{10, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{6, redo 174b alter 0b annex 0, ~{ 42, 4 } -{ }, 0 gb} 2025-03-31T17:11:38.349490Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} Tx{10, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T17:11:38.349840Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} commited cookie 1 for step 7 2025-03-31T17:11:38.349955Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} Tx{11, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress 2025-03-31T17:11:38.349998Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} Tx{11, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T17:11:38.351198Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} Tx{11, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{7, redo 120b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2025-03-31T17:11:38.351246Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} Tx{11, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T17:11:38.351552Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} commited cookie 1 for step 8 2025-03-31T17:11:38.352170Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046316545] ::Bootstrap [1:601:2525] 2025-03-31T17:11:38.352197Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046316545] lookup [1:601:2525] 2025-03-31T17:11:38.352289Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046316545] queue send [1:601:2525] 2025-03-31T17:11:38.352510Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046316545] forward result local node, try to connect [1:601:2525] 2025-03-31T17:11:38.352542Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046316545]::SendEvent [1:601:2525] 2025-03-31T17:11:38.352668Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046316545] connected with status OK role: Leader [1:601:2525] 2025-03-31T17:11:38.352699Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046316545] send queued [1:601:2525] 2025-03-31T17:11:38.352718Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046316545] push event to server [1:601:2525] 2025-03-31T17:11:38.354111Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] ::Bootstrap [1:604:2527] 2025-03-31T17:11:38.354155Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] lookup [1:604:2527] 2025-03-31T17:11:38.354230Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] queue send [1:604:2527] 2025-03-31T17:11:38.354277Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] received pending shutdown [1:604:2527] 2025-03-31T17:11:38.354319Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] forward result local node, try to connect [1:604:2527] 2025-03-31T17:11:38.354371Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480]::SendEvent [1:604:2527] 2025-03-31T17:11:38.354526Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] connected with status OK role: Leader [1:604:2527] 2025-03-31T17:11:38.354553Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] send queued [1:604:2527] 2025-03-31T17:11:38.354578Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] push event to server [1:604:2527] 2025-03-31T17:11:38.354617Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] shutdown pipe due to pending shutdown request [1:604:2527] 2025-03-31T17:11:38.354650Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] notify reset [1:604:2527] 2025-03-31T17:11:38.354743Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} Tx{12, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion 2025-03-31T17:11:38.354785Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} Tx{12, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T17:11:38.355714Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} Tx{12, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} hope 1 -> done Change{8, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-31T17:11:38.355775Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} Tx{12, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T17:11:38.366209Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] client retry [1:88:2135] 2025-03-31T17:11:38.366299Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:88:2135] 2025-03-31T17:11:38.366373Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936131] client retry [1:338:2376] 2025-03-31T17:11:38.366395Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936131] lookup [1:338:2376] 2025-03-31T17:11:38.366474Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] forward result local node, try to connect [1:88:2135] 2025-03-31T17:11:38.366530Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033]::SendEvent [1:88:2135] 2025-03-31T17:11:38.366703Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936131] forward result local node, try to connect [1:338:2376] 2025-03-31T17:11:38.366733Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936131]::SendEvent [1:338:2376] 2025-03-31T17:11:38.366819Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] connected with status OK role: Leader [1:88:2135] 2025-03-31T17:11:38.366851Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] send queued [1:88:2135] 2025-03-31T17:11:38.366877Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] push event to server [1:88:2135] 2025-03-31T17:11:38.366943Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936131] connected with status OK role: Leader [1:338:2376] 2025-03-31T17:11:38.367016Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936131] send queued [1:338:2376] 2025-03-31T17:11:38.367054Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936131] push event to server [1:338:2376] 2025-03-31T17:11:38.368542Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{14, NKikimr::NBsController::TBlobStorageController::TTxRegisterNode} queued, type NKikimr::NBsController::TBlobStorageController::TTxRegisterNode 2025-03-31T17:11:38.368625Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{14, NKikimr::NBsController::TBlobStorageController::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T17:11:38.368879Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{14, NKikimr::NBsController::TBlobStorageController::TTxRegisterNode} hope 1 -> done Change{7, redo 79b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-03-31T17:11:38.368950Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{14, NKikimr::NBsController::TBlobStorageController::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T17:11:38.371942Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936131] send [1:338:2376] 2025-03-31T17:11:38.371999Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936131] push event to server [1:338:2376] 2025-0 ... atExecutor::ITransaction, TDelete>, NActors::TActorContext const&) /-S/ydb/core/tablet_flat/tablet_flat_executed.cpp:57:5 #16 0x21c60941 in NKikimr::NDataShard::TDataShard::Handle(TAutoPtr, TDelete>&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/datashard.cpp:3415:21 #17 0x21c0932b in NKikimr::NDataShard::TDataShard::StateWork(TAutoPtr&) /-S/ydb/core/tx/datashard/datashard_impl.h:3157:13 #18 0x1a54e5cc in NActors::IActor::Receive(TAutoPtr&) /-S/ydb/library/actors/core/actor.cpp:280:13 #19 0x36c883a4 in NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool) /-S/ydb/library/actors/testlib/test_runtime.cpp:1702:33 #20 0x36c80c19 in NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant) /-S/ydb/library/actors/testlib/test_runtime.cpp:1295:45 #21 0x36c8af93 in NActors::TTestActorRuntimeBase::WaitForEdgeEvents(std::__y1::function&)>, TSet, std::__y1::allocator> const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.cpp:1554:22 #22 0x36e535b3 in NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventIf(TSet, std::__y1::allocator> const&, std::__y1::function const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:477:13 #23 0x36e52b52 in GrabEdgeEvent /-S/ydb/library/actors/testlib/test_runtime.h:526:20 #24 0x36e52b52 in NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEvent(NActors::TActorId const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:532:20 #25 0x36e4adb2 in NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:577:24 #26 0x36e4a98a in NActors::TTestActorRuntime::SimulateSleep(TDuration) /-S/ydb/core/testlib/actors/test_runtime.cpp:296:9 #27 0x18dd4393 in NKikimr::NTestSuiteDataShardVolatile::TTestCaseGracefulShardRestartNoEarlyReadSetAck::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:3909:17 #28 0x18de9c37 in operator() /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1 #29 0x18de9c37 in __invoke<(lambda at /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #30 0x18de9c37 in __call<(lambda at /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #31 0x18de9c37 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #32 0x18de9c37 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #33 0x198001f5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #34 0x198001f5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #35 0x198001f5 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #36 0x197cfd48 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #37 0x18de8ae3 in NKikimr::NTestSuiteDataShardVolatile::TCurrentTest::Execute() /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1 #38 0x197d1615 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #39 0x197fa76c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #40 0x7fdb746a9d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) Indirect leak of 8 byte(s) in 1 object(s) allocated from: #0 0x1903ffcd in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x19c7030e in CreateString > > /-S/contrib/libs/protobuf/src/google/protobuf/arenastring.cc:102:20 #2 0x19c7030e in void google::protobuf::internal::ArenaStringPtr::Set<>(TBasicString> const&, google::protobuf::Arena*) /-S/contrib/libs/protobuf/src/google/protobuf/arenastring.cc:150:38 #3 0x2ac770e5 in set_name > &> /-B/ydb/core/protos/query_stats.pb.h:1740:16 #4 0x2ac770e5 in SetName /-B/ydb/core/protos/query_stats.pb.h:696:56 #5 0x2ac770e5 in NKikimr::NDataShard::KqpFillTxStats(NKikimr::NDataShard::TDataShard&, NKikimr::NMiniKQL::TEngineHostCounters const&, NKikimrQueryStats::TTxStats&) /-S/ydb/core/tx/datashard/datashard_kqp.cpp:917:34 #6 0x2b086ab4 in NKikimr::NDataShard::TExecuteWriteUnit::Execute(TIntrusivePtr>, NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/execute_write_unit.cpp:436:13 #7 0x2aedef1c in NKikimr::NDataShard::TPipeline::RunExecutionPlan(TIntrusivePtr>, TVector>&, NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/datashard_pipeline.cpp:1851:28 #8 0x2ab9fc94 in NKikimr::NDataShard::TDataShard::TTxWrite::Execute(NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/datashard__write.cpp:100:38 #9 0x2062a8d4 in NKikimr::NTabletFlatExecutor::TExecutor::ExecuteTransaction(NKikimr::NTabletFlatExecutor::TSeat*) /-S/ydb/core/tablet_flat/flat_executor.cpp:1910:35 #10 0x20626274 in NKikimr::NTabletFlatExecutor::TExecutor::DoExecute(TAutoPtr, NKikimr::NTabletFlatExecutor::TExecutor::ETxMode) /-S/ydb/core/tablet_flat/flat_executor.cpp:1814:13 #11 0x2062d46e in NKikimr::NTabletFlatExecutor::TExecutor::Execute(TAutoPtr, NActors::TActorContext const&) /-S/ydb/core/tablet_flat/flat_executor.cpp:1828:5 #12 0x205c3ada in Execute /-S/ydb/core/tablet_flat/tablet_flat_executed.cpp:62:46 #13 0x205c3ada in NKikimr::NTabletFlatExecutor::TTabletExecutedFlat::Execute(TAutoPtr, NActors::TActorContext const&) /-S/ydb/core/tablet_flat/tablet_flat_executed.cpp:57:5 #14 0x21c60941 in NKikimr::NDataShard::TDataShard::Handle(TAutoPtr, TDelete>&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/datashard.cpp:3415:21 #15 0x21c0932b in NKikimr::NDataShard::TDataShard::StateWork(TAutoPtr&) /-S/ydb/core/tx/datashard/datashard_impl.h:3157:13 #16 0x1a54e5cc in NActors::IActor::Receive(TAutoPtr&) /-S/ydb/library/actors/core/actor.cpp:280:13 #17 0x36c883a4 in NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool) /-S/ydb/library/actors/testlib/test_runtime.cpp:1702:33 #18 0x36c80c19 in NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant) /-S/ydb/library/actors/testlib/test_runtime.cpp:1295:45 #19 0x36c8af93 in NActors::TTestActorRuntimeBase::WaitForEdgeEvents(std::__y1::function&)>, TSet, std::__y1::allocator> const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.cpp:1554:22 #20 0x36e535b3 in NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventIf(TSet, std::__y1::allocator> const&, std::__y1::function const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:477:13 #21 0x36e52b52 in GrabEdgeEvent /-S/ydb/library/actors/testlib/test_runtime.h:526:20 #22 0x36e52b52 in NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEvent(NActors::TActorId const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:532:20 #23 0x36e4adb2 in NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:577:24 #24 0x36e4a98a in NActors::TTestActorRuntime::SimulateSleep(TDuration) /-S/ydb/core/testlib/actors/test_runtime.cpp:296:9 #25 0x18dd4393 in NKikimr::NTestSuiteDataShardVolatile::TTestCaseGracefulShardRestartNoEarlyReadSetAck::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:3909:17 #26 0x18de9c37 in operator() /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1 #27 0x18de9c37 in __invoke<(lambda at /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #28 0x18de9c37 in __call<(lambda at /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #29 0x18de9c37 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #30 0x18de9c37 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #31 0x198001f5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #32 0x198001f5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #33 0x198001f5 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #34 0x197cfd48 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #35 0x18de8ae3 in NKikimr::NTestSuiteDataShardVolatile::TCurrentTest::Execute() /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1 #36 0x197d1615 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #37 0x197fa76c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #38 0x7fdb746a9d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) SUMMARY: AddressSanitizer: 664 byte(s) leaked in 11 allocation(s). |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |86.3%| [TA] $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} |86.3%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictWriteOltp [FAIL] Test command err: Trying to start YDB, gRPC: 2096, MsgBus: 11038 2025-03-31T17:11:45.303466Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488022531416498750:2138];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:11:45.304482Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/0002bd/r3tmp/tmpjRAzSV/pdisk_1.dat 2025-03-31T17:11:45.670807Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2096, node 1 2025-03-31T17:11:45.746162Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:11:45.746191Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:11:45.746199Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:11:45.746324Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T17:11:45.752685Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:11:45.752832Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:11:45.754945Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11038 TClient is connected to server localhost:11038 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:11:46.194172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:11:47.907342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022540006433912:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:47.907479Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022540006433921:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:47.907534Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:47.912570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T17:11:47.923360Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488022540006433926:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T17:11:47.987985Z node 1 :TX_PROXY ERROR: Actor# [1:7488022540006433978:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:11:48.292951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T17:11:48.388925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T17:11:49.284445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T17:11:50.296874Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488022531416498750:2138];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:11:50.296951Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T17:11:50.568190Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWY0ZmM1NzMtODg1YzJlNjEtM2VkYjA3M2MtNTAwZTM3NjE=, ActorId: [1:7488022552891344390:2969], ActorState: ExecuteState, TraceId: 01jqpk09dffz65tdczs7e9gn0f, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76: DoExecute @ 0x18DC7347 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:105: Execute_ @ 0x18DB81F2 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7FD1BF974D8F 18. ??:0: ?? @ 0x7FD1BF974E3F 19. ??:0: ?? @ 0x163B4028 >> KqpSnapshotIsolation::TSimpleOltpNoSink |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TKeyValueTracingTest::ReadHuge >> TKeyValueTracingTest::WriteSmall >> TKeyValueTracingTest::ReadSmall >> TKeyValueTracingTest::WriteHuge |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest |88.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> TKeyValueTracingTest::ReadSmall [FAIL] >> TKeyValueTracingTest::ReadHuge [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapWriteRow Test command err: Trying to start YDB, gRPC: 16127, MsgBus: 23846 2025-03-31T17:11:44.412537Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488022527486992366:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:11:44.412677Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/0002ff/r3tmp/tmpmDYPXk/pdisk_1.dat 2025-03-31T17:11:44.693092Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16127, node 1 2025-03-31T17:11:44.761813Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:11:44.761962Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:11:44.764236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T17:11:44.766638Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:11:44.766694Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:11:44.766708Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:11:44.766897Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23846 TClient is connected to server localhost:23846 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:11:45.300971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:11:45.315950Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T17:11:45.321269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:11:45.450041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:11:45.605202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:11:45.684816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:11:47.066355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022540371896037:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:47.066482Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:47.336572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T17:11:47.369111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T17:11:47.437319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T17:11:47.467804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T17:11:47.501730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T17:11:47.571008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T17:11:47.607572Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022540371896558:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:47.607670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:47.607756Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022540371896563:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:47.611252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T17:11:47.620576Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488022540371896565:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T17:11:47.698738Z node 1 :TX_PROXY ERROR: Actor# [1:7488022540371896618:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:11:48.615001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T17:11:48.750553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7488022544666864365:2505];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T17:11:48.750767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7488022544666864365:2505];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T17:11:48.751076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7488022544666864365:2505];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T17:11:48.751230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7488022544666864365:2505];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T17:11:48.751340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7488022544666864365:2505];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T17:11:48.751502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7488022544666864365:2505];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T17:11:48.751612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7488022544666864365:2505];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T17:11:48.751720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7488022544666864365:2505];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T17:11:48.751829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7488022544666864365:2505];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T17:11:48.751974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7488022544666864365:2505];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T17:11:48.752087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7488022544666864365:2505];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T17:11:48.752197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7488022544666864365:2505];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T17:11:48.752964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7488022544666864369:2507];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T17:11:48.753029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7488022544666864369:2507];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T17:11:48.753285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7488022544666864369:2507];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T17:11:48.7 ... xUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T17:11:48.917105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T17:11:48.917129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T17:11:48.917280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T17:11:48.917304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T17:11:48.917378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T17:11:48.917403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T17:11:48.917455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T17:11:48.917476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T17:11:48.917504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T17:11:48.917522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T17:11:48.917962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T17:11:48.918012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T17:11:48.918232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T17:11:48.918267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T17:11:48.918384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T17:11:48.918412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T17:11:48.918554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T17:11:48.918581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T17:11:48.918668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T17:11:48.918690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T17:11:48.941983Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T17:11:48.942450Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T17:11:48.946982Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037920;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T17:11:48.948558Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T17:11:48.951839Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T17:11:48.954595Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T17:11:48.956368Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T17:11:48.960781Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T17:11:48.960795Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T17:11:48.965245Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T17:11:49.074687Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2025-03-31T17:11:49.074687Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2025-03-31T17:11:49.075467Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2025-03-31T17:11:49.153094Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=281474976710675;tx_id=281474976710675;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710675; query_phases { duration_us: 6546 cpu_time_us: 1799 affected_shards: 1 } query_phases { duration_us: 6709 cpu_time_us: 339 affected_shards: 1 } compilation { duration_us: 56070 cpu_time_us: 53487 } process_cpu_time_us: 606 total_duration_us: 72003 total_cpu_time_us: 56231 AddressSanitizer:DEADLYSIGNAL ================================================================= ==965282==ERROR: AddressSanitizer: SEGV on unknown address 0x000000000008 (pc 0x000018b718ad bp 0x7fffdad8b020 sp 0x7fffdad8ae80 T0) ==965282==The signal is caused by a READ memory access. ==965282==Hint: address points to the zero page. 2025-03-31T17:11:49.412496Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488022527486992366:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:11:49.412659Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T17:11:59.692819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T17:11:59.692863Z node 1 :IMPORT WARN: Table profiles were not loaded #0 0x18b718ad in Get::TypeHandler> /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 #1 0x18b718ad in Get /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:1348:32 #2 0x18b718ad in _internal_table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1762:31 #3 0x18b718ad in table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1766:10 #4 0x18b718ad in NKikimr::NKqp::NTestSuiteKqpCost::TTestCaseOlapWriteRow::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:636:13 #5 0x18b96437 in operator() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #6 0x18b96437 in __invoke<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #7 0x18b96437 in __call<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #8 0x18b96437 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #9 0x18b96437 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #10 0x194e85e5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #11 0x194e85e5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #12 0x194e85e5 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #13 0x194b8138 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #14 0x18b952e3 in NKikimr::NKqp::NTestSuiteKqpCost::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #15 0x194b9a05 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #16 0x194e2b5c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #17 0x7fa74832fd8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #18 0x7fa74832fe3f in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x29e3f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #19 0x16306028 in _start (/home/runner/.ya/build/build_root/r3ux/0002ff/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost+0x16306028) (BuildId: 5128b0b237a145c7e4cbd2034645258e10874b06) AddressSanitizer can not provide additional info. SUMMARY: AddressSanitizer: SEGV /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 in Get::TypeHandler> ==965282==ABORTING >> TKeyValueTracingTest::WriteSmall [FAIL] >> TKeyValueTracingTest::WriteHuge [FAIL] |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |89.1%| [TA] $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |89.2%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> KqpSnapshotIsolation::TReadOnlyOltpNoSink |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> KqpSnapshotIsolation::TSimpleOltp >> KqpSinkTx::OlapInvalidateOnError |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictWriteOltpNoSink >> KqpSnapshotIsolation::TConflictReadWriteOlap |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictReadWriteOltp |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> Viewer::JsonStorageListingV1PDiskIdFilter >> KqpSnapshotIsolation::TConflictWriteOlap |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpPg::TempTablesSessionsIsolation >> Viewer::QueryExecuteScript |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> test_sql_streaming.py::test[suites-ReadTopicWithSchema-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTwoTopics-default.txt] |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> test_sql_streaming.py::test[suites-GroupByHopByStringKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopExprKey-default.txt] >> Balancing::Balancing_OneTopic_PQv1 >> test_sql_streaming.py::test[suites-GroupByHoppingWindow-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowByStringKey-default.txt] >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK >> TopicAutoscaling::ReadingAfterSplitTest_BeforeAutoscaleAwareSDK |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TSimpleOltpNoSink [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWithDataWatermarks-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopic-default.txt] |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::TempTablesWithCache |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpSnapshotIsolation::TReadOnlyOltp ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteSmall [FAIL] Test command err: assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture()+28 (0x1013BF9C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x105F7E30) TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&)+4253 (0xFD7EA7D) NTestSuiteTKeyValueTracingTest::TTestCaseWriteSmall::Execute_(NUnitTest::TTestContext&)+216 (0xFD8A5A8) std::__y1::__function::__func, void ()>::operator()()+280 (0xFD9E7F8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x10625D96) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x105FE9A9) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFD9D6A4) NUnitTest::TTestFactory::Execute()+2438 (0x10600276) NUnitTest::RunMain(int, char**)+5213 (0x1062030D) ??+0 (0x7F6114E4DD90) __libc_start_main+128 (0x7F6114E4DE40) _start+41 (0xD71A029) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadSmall [FAIL] Test command err: equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture()+28 (0x1013BF9C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x105F7E30) TestOneRead(TBasicString>, TBasicString>)+4828 (0xFD8444C) NTestSuiteTKeyValueTracingTest::TTestCaseReadSmall::Execute_(NUnitTest::TTestContext&)+318 (0xFD8AC2E) std::__y1::__function::__func, void ()>::operator()()+280 (0xFD9E7F8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x10625D96) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x105FE9A9) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFD9D6A4) NUnitTest::TTestFactory::Execute()+2438 (0x10600276) NUnitTest::RunMain(int, char**)+5213 (0x1062030D) ??+0 (0x7FC3EAF48D90) __libc_start_main+128 (0x7FC3EAF48E40) _start+41 (0xD71A029) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadHuge [FAIL] Test command err: equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture()+28 (0x1013BF9C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x105F7E30) TestOneRead(TBasicString>, TBasicString>)+4828 (0xFD8444C) NTestSuiteTKeyValueTracingTest::TTestCaseReadHuge::Execute_(NUnitTest::TTestContext&)+318 (0xFD8B01E) std::__y1::__function::__func, void ()>::operator()()+280 (0xFD9E7F8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x10625D96) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x105FE9A9) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFD9D6A4) NUnitTest::TTestFactory::Execute()+2438 (0x10600276) NUnitTest::RunMain(int, char**)+5213 (0x1062030D) ??+0 (0x7FF3AA15FD90) __libc_start_main+128 (0x7FF3AA15FE40) _start+41 (0xD71A029) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteHuge [FAIL] Test command err: assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture()+28 (0x1013BF9C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x105F7E30) TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&)+4253 (0xFD7EA7D) NTestSuiteTKeyValueTracingTest::TTestCaseWriteHuge::Execute_(NUnitTest::TTestContext&)+216 (0xFD8A8B8) std::__y1::__function::__func, void ()>::operator()()+280 (0xFD9E7F8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x10625D96) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x105FE9A9) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFD9D6A4) NUnitTest::TTestFactory::Execute()+2438 (0x10600276) NUnitTest::RunMain(int, char**)+5213 (0x1062030D) ??+0 (0x7FD93DC45D90) __libc_start_main+128 (0x7FD93DC45E40) _start+41 (0xD71A029) >> TSentinelUnstableTests::BSControllerCantChangeStatus |93.2%| [TA] $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |93.3%| [TA] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} >> TCreateAndDropViewTest::DropNonexistingView >> test_retry.py::TestRetry::test_fail_first[kikimr0] [GOOD] >> KqpPg::TempTablesSessionsIsolation [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TSimpleOltpNoSink [FAIL] Test command err: Trying to start YDB, gRPC: 21816, MsgBus: 24308 2025-03-31T17:11:59.678338Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488022589686061750:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:11:59.678431Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/0002bc/r3tmp/tmpyhtMIX/pdisk_1.dat 2025-03-31T17:12:00.037561Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21816, node 1 2025-03-31T17:12:00.086839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:12:00.087315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:12:00.090481Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T17:12:00.110293Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:12:00.110327Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:12:00.110336Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:12:00.110520Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24308 TClient is connected to server localhost:24308 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:12:00.570377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:12:02.272270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022602570964301:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:02.272364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022602570964306:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:02.272447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:02.276730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T17:12:02.287496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488022602570964315:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T17:12:02.387774Z node 1 :TX_PROXY ERROR: Actor# [1:7488022602570964366:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:12:02.660737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T17:12:02.813903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T17:12:03.787717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T17:12:04.800366Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488022589686061750:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:12:04.802165Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T17:12:05.267886Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDMzNDRjOGQtNDRmZjY1NjItMzI5YzBmZTYtYzQwNmFlZg==, ActorId: [1:7488022615455874704:2968], ActorState: ExecuteState, TraceId: 01jqpk0qqe0dy5c7znqfssj70p, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TSimple::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25: DoExecute @ 0x18DC0DB7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:57: Execute_ @ 0x18DB7D9A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7FE77CBA3D8F 18. ??:0: ?? @ 0x7FE77CBA3E3F 19. ??:0: ?? @ 0x163B4028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::TempTablesSessionsIsolation [GOOD] Test command err: Trying to start YDB, gRPC: 22172, MsgBus: 18222 2025-03-31T17:12:08.961077Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488022628498527207:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:12:08.961214Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/000219/r3tmp/tmpguJOWa/pdisk_1.dat 2025-03-31T17:12:09.260808Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22172, node 1 2025-03-31T17:12:09.326160Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:12:09.334320Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:12:09.337921Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T17:12:09.371477Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:12:09.371501Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:12:09.371512Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:12:09.371703Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18222 TClient is connected to server localhost:18222 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:12:09.935192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:12:11.939503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022641383429758:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:11.939584Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:11.939859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022641383429768:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:11.944176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T17:12:11.957616Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488022641383429772:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T17:12:12.052621Z node 1 :TX_PROXY ERROR: Actor# [1:7488022645678397121:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:12:12.114795Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-03-31T17:12:12.130137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:1, at schemeshard: 72057594046644480 2025-03-31T17:12:12.431709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T17:12:12.437410Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2025-03-31T17:12:12.438482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T17:12:12.445224Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-03-31T17:12:12.478857Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488022645678397420:2368], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:27: Error: At function: KiReadTable!
:3:27: Error: Cannot find table 'db.[/Root/PgTemp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T17:12:12.479329Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTA5ZTBjZmEtNmQ4NDk5NmEtYzQxNjQ1M2QtNjg0NjQyMTc=, ActorId: [1:7488022645678397371:2362], ActorState: ExecuteState, TraceId: 01jqpk0ywz88vdac60byv6bh6g, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] >> KqpSnapshotIsolation::TReadOnlyOltpNoSink [FAIL] >> KqpSnapshotIsolation::TSimpleOltp [FAIL] >> test_tpch_import.py::TestS3TpchImport::test_import_and_export >> alter_compression.py::TestAlterCompression::test_all_supported_compression >> test_tpch.py::TestTpchS1::test_tpch[12] |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] >> KqpPg::TempTablesWithCache [GOOD] >> KqpSnapshotIsolation::TConflictWriteOltpNoSink [FAIL] >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TCreateAndDropViewTest::DropNonexistingView [GOOD] >> TCreateAndDropViewTest::CallDropViewOnTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::TempTablesWithCache [GOOD] Test command err: Trying to start YDB, gRPC: 11253, MsgBus: 8057 2025-03-31T17:12:10.735792Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488022638485213409:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:12:10.735870Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/00023c/r3tmp/tmp7pzWVf/pdisk_1.dat 2025-03-31T17:12:11.126769Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11253, node 1 2025-03-31T17:12:11.166364Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:12:11.166511Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:12:11.175677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T17:12:11.234252Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:12:11.234278Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:12:11.234292Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:12:11.234433Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8057 TClient is connected to server localhost:8057 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:12:11.878739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:12:11.907959Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T17:12:14.016864Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022655665083258:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:14.017047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:14.017462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022655665083270:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:14.021981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T17:12:14.036616Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488022655665083272:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T17:12:14.107743Z node 1 :TX_PROXY ERROR: Actor# [1:7488022655665083323:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:12:14.166330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T17:12:14.329218Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-03-31T17:12:14.353940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T17:12:14.513654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T17:12:14.801315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:1, at schemeshard: 72057594046644480 2025-03-31T17:12:15.068814Z node 1 :TX_PROXY ERROR: Actor# [1:7488022659960051225:2698] txid# 281474976710673, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:12:15.089466Z node 1 :TX_PROXY ERROR: Actor# [1:7488022659960051232:2703] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/ZDcyZDYzODgtMTE4YTFkNWEtN2RmMTBiZmMtZjc0OGVkNTQ=\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:12:15.111568Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-03-31T17:12:15.159575Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-03-31T17:12:15.189786Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488022659960051354:2435], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:31: Error: At function: KiReadTable!
:3:31: Error: Cannot find table 'db.[/Root/PgTemp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T17:12:15.191599Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2RjOWU4MTQtN2U5YzFlMTAtYjUzZTUzOGEtOWFhMGM4Mg==, ActorId: [1:7488022659960051352:2434], ActorState: ExecuteState, TraceId: 01jqpk11j8c9kbpc7f1aqavzqz, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TReadOnlyOltpNoSink [FAIL] Test command err: Trying to start YDB, gRPC: 15136, MsgBus: 10067 2025-03-31T17:12:03.424468Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488022609382341397:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:12:03.424516Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/000333/r3tmp/tmpv5t6D2/pdisk_1.dat 2025-03-31T17:12:03.772063Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15136, node 1 2025-03-31T17:12:03.810757Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:12:03.810863Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:12:03.814947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T17:12:03.851635Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:12:03.851659Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:12:03.851665Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:12:03.851780Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10067 TClient is connected to server localhost:10067 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:12:04.452463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:12:04.488588Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T17:12:06.464686Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022622267243933:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:06.464926Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:06.465186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022622267243962:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:06.469207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T17:12:06.477617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488022622267243964:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T17:12:06.533529Z node 1 :TX_PROXY ERROR: Actor# [1:7488022622267244015:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:12:06.820889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T17:12:06.934381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T17:12:07.744846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T17:12:08.442410Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488022609382341397:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:12:08.443399Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T17:12:09.008614Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTZjZWRiZmItN2I4M2E0NTEtZDM0YTA4MjktZWVkNTYwNGU=, ActorId: [1:7488022630857187104:2968], ActorState: ExecuteState, TraceId: 01jqpk0vaz791kfntwhznf87fe, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TReadOnly::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185: DoExecute @ 0x18DD66A3 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:221: Execute_ @ 0x18DB911A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7F4FA8261D8F 18. ??:0: ?? @ 0x7F4FA8261E3F 19. ??:0: ?? @ 0x163B4028 |93.8%| [TA] $(B)/ydb/core/kqp/ut/pg/test-results/unittest/{meta.json ... results_accumulator.log} |93.9%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/pg/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TSimpleOltp [FAIL] Test command err: Trying to start YDB, gRPC: 4312, MsgBus: 10531 2025-03-31T17:12:03.469536Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488022607974721369:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:12:03.469635Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/00032a/r3tmp/tmphgUa5l/pdisk_1.dat 2025-03-31T17:12:03.867111Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T17:12:03.873309Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:12:03.873458Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:12:03.875037Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4312, node 1 2025-03-31T17:12:03.964255Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:12:03.964282Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:12:03.964296Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:12:03.964454Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10531 TClient is connected to server localhost:10531 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:12:04.494194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:12:06.245395Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022620859623921:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:06.245565Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022620859623929:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:06.245659Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:06.249614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T17:12:06.258346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488022620859623935:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T17:12:06.343020Z node 1 :TX_PROXY ERROR: Actor# [1:7488022620859623986:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:12:06.666586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T17:12:06.775721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T17:12:07.610261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T17:12:08.491253Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488022607974721369:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:12:08.493617Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T17:12:08.964598Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjE4ODNhZjktYTczY2FjYTctMTEyN2JjOWEtZjMwZGE2ZWM=, ActorId: [1:7488022629449567154:2969], ActorState: ExecuteState, TraceId: 01jqpk0v9j2rtkpdpkz36y5h94, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TSimple::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25: DoExecute @ 0x18DC0DB7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:50: Execute_ @ 0x18DB7B72 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7F6298CFAD8F 18. ??:0: ?? @ 0x7F6298CFAE3F 19. ??:0: ?? @ 0x163B4028 >> KqpSnapshotIsolation::TConflictReadWriteOltp [FAIL] |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> Viewer::QueryExecuteScript [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictWriteOltpNoSink [FAIL] Test command err: Trying to start YDB, gRPC: 31878, MsgBus: 19082 2025-03-31T17:12:05.089727Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488022616592814882:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:12:05.089807Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/00030d/r3tmp/tmp0ooGhn/pdisk_1.dat 2025-03-31T17:12:05.469971Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31878, node 1 2025-03-31T17:12:05.513814Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:12:05.514121Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:12:05.516676Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T17:12:05.542219Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:12:05.542246Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:12:05.542254Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:12:05.542392Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19082 TClient is connected to server localhost:19082 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:12:06.003863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:12:07.762159Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022625182750138:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:07.762293Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022625182750146:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:07.762361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:07.765953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T17:12:07.775757Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488022625182750152:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T17:12:07.849031Z node 1 :TX_PROXY ERROR: Actor# [1:7488022625182750203:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:12:08.136766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T17:12:08.236575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T17:12:09.083140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T17:12:10.136630Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488022616592814882:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:12:10.138195Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T17:12:10.599125Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjRjZTM4NTctYTYyOWZlMzctNWJlMjgwOTctNzgwMDdkZjY=, ActorId: [1:7488022638067660598:2968], ActorState: ExecuteState, TraceId: 01jqpk0wzv5ywekw52pat4xrvj, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76: DoExecute @ 0x18DC7347 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:112: Execute_ @ 0x18DB841A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7F509126CD8F 18. ??:0: ?? @ 0x7F509126CE3F 19. ??:0: ?? @ 0x163B4028 >> test_drain.py::TestHive::test_drain_on_stop |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> KqpSinkTx::OlapInvalidateOnError [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] [GOOD] |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/multi_plane/py3test >> test_retry.py::TestRetry::test_fail_first[kikimr0] [GOOD] >> KqpSnapshotIsolation::TConflictReadWriteOlap [FAIL] >> TCreateAndDropViewTest::CallDropViewOnTable [GOOD] >> TCreateAndDropViewTest::DropViewIfExists ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictReadWriteOltp [FAIL] Test command err: Trying to start YDB, gRPC: 5260, MsgBus: 27712 2025-03-31T17:12:06.875398Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488022619727579419:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:12:06.875632Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/0002d0/r3tmp/tmp2FquRY/pdisk_1.dat 2025-03-31T17:12:07.189020Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5260, node 1 2025-03-31T17:12:07.253412Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:12:07.253458Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:12:07.253475Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:12:07.253617Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T17:12:07.269686Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:12:07.269796Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:12:07.271564Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27712 TClient is connected to server localhost:27712 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:12:07.776434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:12:09.869450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022632612481976:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:09.869544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022632612481969:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:09.869815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:09.875309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T17:12:09.893936Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488022632612481983:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T17:12:09.966857Z node 1 :TX_PROXY ERROR: Actor# [1:7488022632612482036:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:12:10.284280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T17:12:10.424261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T17:12:11.465706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T17:12:12.271463Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488022619727579419:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:12:12.337264Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T17:12:12.918860Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTcwOTBmYTQtZGJiZGUzMTItYjJmMmVkMjUtYjMzODM2YmE=, ActorId: [1:7488022645497392443:2969], ActorState: ExecuteState, TraceId: 01jqpk0z8geeh9ex6jgam68y6a, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131: DoExecute @ 0x18DCECE7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:159: Execute_ @ 0x18DB8872 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7F98B8C8BD8F 18. ??:0: ?? @ 0x7F98B8C8BE3F 19. ??:0: ?? @ 0x163B4028 |94.3%| [TA] $(B)/ydb/tests/fq/multi_plane/test-results/py3test/{meta.json ... results_accumulator.log} |94.4%| [TA] {RESULT} $(B)/ydb/tests/fq/multi_plane/test-results/py3test/{meta.json ... results_accumulator.log} |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> KqpSnapshotIsolation::TReadOnlyOltp [FAIL] |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_result_limits.py::TestResultLimits::test_large_row ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapInvalidateOnError [FAIL] Test command err: Trying to start YDB, gRPC: 5160, MsgBus: 14196 2025-03-31T17:12:03.592601Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488022608477389524:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:12:03.592660Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/000324/r3tmp/tmp4l06lM/pdisk_1.dat 2025-03-31T17:12:03.946430Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T17:12:03.957725Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:12:03.957850Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:12:03.961351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5160, node 1 2025-03-31T17:12:04.087652Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:12:04.087681Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:12:04.087692Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:12:04.087848Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14196 TClient is connected to server localhost:14196 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:12:04.638090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:12:06.557427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022621362292080:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:06.557518Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022621362292055:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:06.557995Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:06.562101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T17:12:06.572699Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488022621362292084:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T17:12:06.675552Z node 1 :TX_PROXY ERROR: Actor# [1:7488022621362292137:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:12:07.015629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T17:12:07.169724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488022625657259627:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T17:12:07.169929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488022625657259627:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T17:12:07.170255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488022625657259627:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T17:12:07.170451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488022625657259627:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T17:12:07.170613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488022625657259627:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T17:12:07.170816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488022625657259627:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T17:12:07.170988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488022625657259627:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T17:12:07.171145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488022625657259627:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T17:12:07.171282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488022625657259627:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T17:12:07.171443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488022625657259627:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T17:12:07.171607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488022625657259627:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T17:12:07.171753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488022625657259627:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T17:12:07.172220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488022625657259629:2347];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T17:12:07.172291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488022625657259629:2347];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T17:12:07.172489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488022625657259629:2347];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T17:12:07.172639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488022625657259629:2347];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T17:12:07.172771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488022625657259629:2347];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T17:12:07.172903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488022625657259629:2347];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T17:12:07.173054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488022625657259629:2347];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T17:12:07.173231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488022625657259629:2347];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T17:12:07.173397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488022625657259629:2347];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T17:12:07.173544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488022625657259629:2347];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T17:12:07.173674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488022625657259629:2347];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T17:12:07.173842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488022625657259629:2347];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T17:12:07.210998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488022625657259659:2351];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T17:12:07.211125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488022625657259659:2351];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T17:12:07.211370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_ ... tablet_id=72075186224038081;self_id=[1:7488022642837134514:3284];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038081;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.288562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;self_id=[1:7488022642837135094:3390];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038022;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.289933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038040;self_id=[1:7488022642837134952:3363];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038040;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.290151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038040;self_id=[1:7488022642837134952:3363];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038040;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.293894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[1:7488022642837135099:3392];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038021;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.294075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[1:7488022642837135099:3392];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038021;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.299607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038039;self_id=[1:7488022642837134945:3360];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038039;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.299836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038028;self_id=[1:7488022642837135058:3380];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038028;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.300368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038039;self_id=[1:7488022642837134945:3360];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038039;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.300520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038028;self_id=[1:7488022642837135058:3380];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038028;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.303757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038041;self_id=[1:7488022642837135017:3369];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038041;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.304140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038041;self_id=[1:7488022642837135017:3369];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038041;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.304618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[1:7488022642837135064:3382];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038010;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.304967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[1:7488022642837135064:3382];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038010;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.312303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[1:7488022642837135090:3389];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038019;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.312621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;self_id=[1:7488022642837135107:3395];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038018;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.312804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[1:7488022642837135422:3398];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038012;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.312957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[1:7488022642837135073:3384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038007;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.313413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;self_id=[1:7488022642837135107:3395];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038018;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.313556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[1:7488022642837135422:3398];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038012;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.313666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[1:7488022642837135073:3384];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038007;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.313676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[1:7488022642837135090:3389];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038019;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.314337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7488022642837135085:3387];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038015;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.314631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7488022642837135085:3387];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038015;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.315453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038032;self_id=[1:7488022642837135048:3376];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038032;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.315974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038032;self_id=[1:7488022642837135048:3376];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038032;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.321000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[1:7488022642837135071:3383];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038008;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.321254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[1:7488022642837135071:3383];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038008;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.321461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037946;self_id=[1:7488022629952228458:2551];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037946;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.321661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037946;self_id=[1:7488022629952228458:2551];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037946;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.321960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[1:7488022642837135046:3375];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038013;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.322121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[1:7488022642837135046:3375];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038013;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.333291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[1:7488022642837135062:3381];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038004;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.333342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038020;self_id=[1:7488022642837135096:3391];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038020;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.333577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[1:7488022642837135062:3381];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038004;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.333591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038020;self_id=[1:7488022642837135096:3391];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038020;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; assertion failed at ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182, virtual void NKikimr::NKqp::NTestSuiteKqpSinkTx::TInvalidateOnError::DoExecute(): (result.GetStatus() == EStatus::PRECONDITION_FAILED) failed: (BAD_REQUEST != PRECONDITION_FAILED)
: Error: Bad request. Table: `/Root/KV`., code: 2017
: Error: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32"]}, code: 2017 , with diff: (BAD_|P)RE(QUES|CONDI)T(|ION_FAILED) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182: DoExecute @ 0x18D9B8AE 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:201: Execute_ @ 0x18D7A4FA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: operator() @ 0x18D81977 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x18D81977 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x18D81977 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D81977 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D81977 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: Execute @ 0x18D80B43 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7F9607773D8F 18. ??:0: ?? @ 0x7F9607773E3F 19. ??:0: ?? @ 0x163B4028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictReadWriteOlap [FAIL] Test command err: Trying to start YDB, gRPC: 21788, MsgBus: 23278 2025-03-31T17:12:05.115245Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488022616822248368:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:12:05.115365Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/000304/r3tmp/tmpNjszkZ/pdisk_1.dat TServer::EnableGrpc on GrpcPort 21788, node 1 2025-03-31T17:12:05.447784Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T17:12:05.450919Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T17:12:05.463495Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T17:12:05.476647Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:12:05.476818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:12:05.478933Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T17:12:05.480942Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:12:05.480987Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:12:05.480994Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:12:05.481140Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23278 TClient is connected to server localhost:23278 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:12:05.956273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:12:07.632664Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022625412183605:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:07.632879Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022625412183624:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:07.632955Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:07.637159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T17:12:07.646758Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488022625412183634:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T17:12:07.716331Z node 1 :TX_PROXY ERROR: Actor# [1:7488022625412183685:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:12:07.959138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T17:12:08.084094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488022629707151164:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T17:12:08.084335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488022629707151164:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T17:12:08.084655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488022629707151164:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T17:12:08.084799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488022629707151164:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T17:12:08.084996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488022629707151164:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T17:12:08.085146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488022629707151164:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T17:12:08.085300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488022629707151164:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T17:12:08.085439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488022629707151164:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T17:12:08.085581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488022629707151164:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T17:12:08.085708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488022629707151164:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T17:12:08.085859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488022629707151164:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T17:12:08.085878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488022629707151162:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T17:12:08.085928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488022629707151162:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T17:12:08.085989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488022629707151164:2344];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T17:12:08.086152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488022629707151162:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T17:12:08.086276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488022629707151162:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T17:12:08.086429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488022629707151162:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T17:12:08.086553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488022629707151162:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T17:12:08.086685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488022629707151162:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T17:12:08.086844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488022629707151162:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T17:12:08.086985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488022629707151162:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T17:12:08.087140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488022629707151162:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T17:12:08.087260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488022629707151162:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T17:12:08.087384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488022629707151162:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T17:12:08.119746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488022629707151193:2349];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T17:12:08.119747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488022629707151182:2346];tablet_id=72075186224037891;process=TTxInitSchema::Exe ... 6224038060;self_id=[1:7488022646887026506:3372];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038060;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.404916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038060;self_id=[1:7488022646887026506:3372];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038060;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.512152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038032;self_id=[1:7488022646887026733:3480];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038032;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.512154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038093;self_id=[1:7488022646887025874:3202];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038093;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.512494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038052;self_id=[1:7488022646887026615:3454];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038052;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.512902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038032;self_id=[1:7488022646887026733:3480];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038032;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.512918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038093;self_id=[1:7488022646887025874:3202];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038093;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.513060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038052;self_id=[1:7488022646887026615:3454];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038052;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.517097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038054;self_id=[1:7488022646887026705:3473];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038054;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.517522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038054;self_id=[1:7488022646887026705:3473];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038054;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.517663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038055;self_id=[1:7488022646887026646:3459];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038055;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.517942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038055;self_id=[1:7488022646887026646:3459];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038055;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.520675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038069;self_id=[1:7488022646887026643:3458];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038069;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.520743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038068;self_id=[1:7488022646887026611:3452];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038068;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.520935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038069;self_id=[1:7488022646887026643:3458];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038069;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.521050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038068;self_id=[1:7488022646887026611:3452];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038068;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.521132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[1:7488022646887026639:3456];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038023;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.521249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038056;self_id=[1:7488022646887026721:3476];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038056;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.521287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[1:7488022646887026639:3456];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038023;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.521401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038056;self_id=[1:7488022646887026721:3476];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038056;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.525376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038028;self_id=[1:7488022646887026906:3487];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038028;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.525378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[1:7488022646887026682:3470];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038031;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.525585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038028;self_id=[1:7488022646887026906:3487];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038028;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.525636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[1:7488022646887026682:3470];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038031;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.525756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038065;self_id=[1:7488022646887026570:3425];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038065;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.525908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038065;self_id=[1:7488022646887026570:3425];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038065;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.527344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038027;self_id=[1:7488022646887027022:3489];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038027;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.527430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038046;self_id=[1:7488022646887026743:3483];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038046;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.527521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038027;self_id=[1:7488022646887027022:3489];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038027;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.527640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038046;self_id=[1:7488022646887026743:3483];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038046;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.527687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[1:7488022646887026665:3465];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038042;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.527805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038024;self_id=[1:7488022646887026747:3484];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038024;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.527834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[1:7488022646887026665:3465];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038042;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.528018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038024;self_id=[1:7488022646887026747:3484];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038024;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.528022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038059;self_id=[1:7488022646887026650:3461];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038059;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:19.528169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038059;self_id=[1:7488022646887026650:3461];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038059;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:146, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (ABORTED != SUCCESS)
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 , with diff: (ABORT|SUCC)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:146: DoExecute @ 0x18DD15F8 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:172: Execute_ @ 0x18DB8CCA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7FF18CFEAD8F 18. ??:0: ?? @ 0x7FF18CFEAE3F 19. ??:0: ?? @ 0x163B4028 >> test_sql_streaming.py::test[suites-GroupByHoppingWindowByStringKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowExprKey-default.txt] >> test_sql_streaming.py::test[suites-ReadTwoTopics-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadWriteSameTopic-default.txt] >> test_sql_streaming.py::test[suites-GroupByHopExprKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopListKey-default.txt] >> test_sql_streaming.py::test[suites-ReadTopic-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicGroupWriteToSolomon-default.txt] |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> TCreateAndDropViewTest::DropViewIfExists [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TReadOnlyOltp [FAIL] Test command err: Trying to start YDB, gRPC: 24571, MsgBus: 4230 2025-03-31T17:12:10.879354Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488022637074363714:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:12:10.879438Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/0002a5/r3tmp/tmp4PHOJ8/pdisk_1.dat 2025-03-31T17:12:11.298460Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24571, node 1 2025-03-31T17:12:11.320784Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:12:11.320922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:12:11.326650Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T17:12:11.411938Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:12:11.411970Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:12:11.411982Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:12:11.412098Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4230 TClient is connected to server localhost:4230 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:12:12.034478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:12:12.048754Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T17:12:14.195109Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022654254233537:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:14.195263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:14.195554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022654254233564:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:14.200542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T17:12:14.215962Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488022654254233566:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T17:12:14.304593Z node 1 :TX_PROXY ERROR: Actor# [1:7488022654254233617:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:12:14.601315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T17:12:14.718919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T17:12:15.685629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T17:12:16.342205Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488022637074363714:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:12:16.425035Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T17:12:17.064100Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTcxODRmN2YtODU4Y2NhN2EtNTQ3NGUzNjMtNTdiOTIwM2E=, ActorId: [1:7488022662844176597:2969], ActorState: ExecuteState, TraceId: 01jqpk1370bwkk9ap46nd3gnar, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TReadOnly::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185: DoExecute @ 0x18DD66A3 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:214: Execute_ @ 0x18DB8EF2 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7F8D2B72CD8F 18. ??:0: ?? @ 0x7F8D2B72CE3F 19. ??:0: ?? @ 0x163B4028 |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/view/unittest >> TCreateAndDropViewTest::DropViewIfExists [GOOD] Test command err: Trying to start YDB, gRPC: 24294, MsgBus: 63170 2025-03-31T17:12:12.379160Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488022646944333109:2207];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:12:12.379263Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/000163/r3tmp/tmp54vDtv/pdisk_1.dat 2025-03-31T17:12:12.879628Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T17:12:12.899506Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:12:12.900185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:12:12.922826Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24294, node 1 2025-03-31T17:12:13.237058Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:12:13.237155Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:12:13.237162Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:12:13.237302Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63170 TClient is connected to server localhost:63170 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:12:14.108280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:12:15.471864Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022659829235507:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:15.471864Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022659829235519:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:15.472023Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:15.481017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T17:12:15.493261Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488022659829235521:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T17:12:15.564021Z node 1 :TX_PROXY ERROR: Actor# [1:7488022659829235572:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:12:16.095332Z node 1 :TX_PROXY ERROR: Actor# [1:7488022664124202890:2352] txid# 281474976710660, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-03-31T17:12:16.106479Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjRhZjE2ZjUtNmIwZDViNWMtNDNmNGI1YzEtYjU0MDcxZjA=, ActorId: [1:7488022659829235503:2329], ActorState: ExecuteState, TraceId: 01jqpk11v05gny3e0cejkjs06x, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 17439, MsgBus: 29333 2025-03-31T17:12:16.907896Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7488022665365648247:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:12:16.907957Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/000163/r3tmp/tmpYnSefS/pdisk_1.dat 2025-03-31T17:12:17.027701Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17439, node 2 2025-03-31T17:12:17.067980Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:12:17.068097Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:12:17.069924Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T17:12:17.098285Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:12:17.098308Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:12:17.098315Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:12:17.098406Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29333 TClient is connected to server localhost:29333 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-31T17:12:17.576833Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T17:12:17.593905Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T17:12:19.881434Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488022678250550799:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:19.881435Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7488022678250550791:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:19.881533Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:19.885188Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T17:12:19.896886Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7488022678250550805:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T17:12:19.959234Z node 2 :TX_PROXY ERROR: Actor# [2:7488022678250550858:2334] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:12:20.074261Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-31T17:12:20.243567Z node 2 :TX_PROXY ERROR: Actor# [2:7488022682545518276:2405] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/table\', error: path is not a view (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:12:20.243687Z node 2 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976715661, ProxyStatus: ExecError, SchemeShardReason: Check failed: path: '/Root/table', error: path is not a view (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeTable, state: EPathStateNoChanges) 2025-03-31T17:12:20.243959Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NzA2NTZkMWYtYzQ0M2FhOTktNzQwY2VkNTYtMzMyYjE5YmY=, ActorId: [2:7488022678250550787:2328], ActorState: ExecuteState, TraceId: 01jqpk16g2be9z1mqmc2eyhgyr, Create QueryResponse for error on request, msg: 2025-03-31T17:12:20.267775Z node 2 :TX_PROXY ERROR: Actor# [2:7488022682545518290:2413] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/table\', error: path is not a view (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:12:20.267890Z node 2 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976715663, ProxyStatus: ExecError, SchemeShardReason: Check failed: path: '/Root/table', error: path is not a view (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeTable, state: EPathStateNoChanges) 2025-03-31T17:12:20.268101Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NzA2NTZkMWYtYzQ0M2FhOTktNzQwY2VkNTYtMzMyYjE5YmY=, ActorId: [2:7488022678250550787:2328], ActorState: ExecuteState, TraceId: 01jqpk16gt7s4qq68tqfkkj945, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 28507, MsgBus: 22159 2025-03-31T17:12:21.182934Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7488022685753086567:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:12:21.183056Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/000163/r3tmp/tmpqlIQ4L/pdisk_1.dat 2025-03-31T17:12:21.368300Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-31T17:12:21.378371Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:12:21.378461Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:12:21.380962Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28507, node 3 2025-03-31T17:12:21.467554Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:12:21.467580Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:12:21.467590Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:12:21.467708Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22159 TClient is connected to server localhost:22159 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:12:22.083533Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:12:22.097230Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-31T17:12:24.984357Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488022698637989113:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:24.984466Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:24.984847Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7488022698637989125:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:24.988302Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T17:12:25.000675Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7488022698637989127:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T17:12:25.065369Z node 3 :TX_PROXY ERROR: Actor# [3:7488022702932956474:2336] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:12:25.408665Z node 3 :TX_PROXY ERROR: Actor# [3:7488022702932956552:2385] txid# 281474976715662, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } |95.1%| [TM] {RESULT} ydb/core/kqp/ut/view/unittest |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] [GOOD] >> KqpStats::SysViewClientLost [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TTLUsage Test command err: 2025-03-31T17:11:51.058515Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T17:11:51.058817Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T17:11:51.059002Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/0003d9/r3tmp/tmpUhqMhS/pdisk_1.dat TServer::EnableGrpc on GrpcPort 20774, node 1 TClient is connected to server localhost:5503 2025-03-31T17:11:51.977798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T17:11:52.040786Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T17:11:52.042306Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:11:52.042370Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:11:52.042411Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:11:52.043579Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T17:11:52.080243Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:11:52.080944Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:11:52.093393Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T17:11:52.225197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-03-31T17:11:52.329615Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:688:2580], Recipient [1:746:2626]: NKikimr::TEvTablet::TEvBoot 2025-03-31T17:11:52.330629Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:688:2580], Recipient [1:746:2626]: NKikimr::TEvTablet::TEvRestored 2025-03-31T17:11:52.330889Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:746:2626];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T17:11:52.347777Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:746:2626];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T17:11:52.348073Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2025-03-31T17:11:52.355065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T17:11:52.355311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T17:11:52.355540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T17:11:52.355629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T17:11:52.355697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T17:11:52.355813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T17:11:52.355899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T17:11:52.355979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T17:11:52.356076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T17:11:52.356157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T17:11:52.356248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T17:11:52.356331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T17:11:52.372092Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:688:2580], Recipient [1:746:2626]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T17:11:52.373281Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:689:2581], Recipient [1:749:2629]: NKikimr::TEvTablet::TEvBoot 2025-03-31T17:11:52.374220Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 72075186224037888 2025-03-31T17:11:52.374345Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:689:2581], Recipient [1:749:2629]: NKikimr::TEvTablet::TEvRestored 2025-03-31T17:11:52.374610Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:749:2629];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T17:11:52.403458Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:749:2629];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T17:11:52.403712Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037889 2025-03-31T17:11:52.409794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T17:11:52.409914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T17:11:52.410180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T17:11:52.410307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T17:11:52.410436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T17:11:52.410551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T17:11:52.410664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T17:11:52.410785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T17:11:52.410922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T17:11:52.411052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T17:11:52.411209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T17:11:52.411336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T17:11:52.414496Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T17:11:52.414566Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T17:11:52.414724Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T17:11:52.414834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T17:11:52.414888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T17:11:52.414924Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T17:11:52.415015Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T17:11:52.415068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id ... e=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; 2025-03-31T17:12:22.658418Z node 1 :TX_COLUMNSHARD TRACE: StateWork, received event# 2146435085, Sender [1:1313:3115], Recipient [1:746:2626]: NKikimr::NColumnShard::TEvPrivate::TEvGarbageCollectionFinished 2025-03-31T17:12:22.659005Z node 1 :TX_COLUMNSHARD TRACE: StateWork, received event# 2146435073, Sender [1:1314:3116], Recipient [1:746:2626]: NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex 2025-03-31T17:12:22.659046Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 72075186224037888 2025-03-31T17:12:22.659340Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[31] (CS::GENERAL) apply at tablet 72075186224037888 2025-03-31T17:12:22.662774Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 72075186224037888 Save Batch GenStep: 1:21 Blob count: 1 2025-03-31T17:12:22.662928Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=2911808;raw_bytes=96858255;count=2;records=82491} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=18275528;raw_bytes=616499669;count=8;records=517509} inactive {blob_bytes=26052856;raw_bytes=873325723;count=17;records=740034} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 72075186224037888 TEvBlobStorage::TEvPut tId=72075186224037888;c=1;:74/0:size=2465;count=18;;1:size=57218;count=9;;2:size=0;count=0;;3:size=1466448;count=1;;4:size=1479208;count=1;;5:size=1458600;count=1;;6:size=1445408;count=1;;7:size=1445928;count=1;;8:size=1445448;count=1;;9:size=1445400;count=1;;10:size=1445528;count=1;;11:size=808584;count=1;;12:size=4463248;count=6;;13:size=1445744;count=1;;14:size=1445360;count=1;;15:size=1445608;count=1;;16:size=1225528;count=1;;17:size=1425984;count=1;;18:size=1426000;count=1;;19:size=1445376;count=1;;20:size=1445920;count=1;;21:size=1445360;count=1;;22:size=2010824;count=3;;23:size=1215392;count=1;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; TEvBlobStorage::TEvPut tId=72075186224037888;c=0;:74/0:size=2534;count=19;;1:size=57218;count=9;;2:size=0;count=0;;3:size=1466448;count=1;;4:size=1479208;count=1;;5:size=1458600;count=1;;6:size=1445408;count=1;;7:size=1445928;count=1;;8:size=1445448;count=1;;9:size=1445400;count=1;;10:size=1445528;count=1;;11:size=808584;count=1;;12:size=4463248;count=6;;13:size=1445744;count=1;;14:size=1445360;count=1;;15:size=1445608;count=1;;16:size=1225528;count=1;;17:size=1425984;count=1;;18:size=1426000;count=1;;19:size=1445376;count=1;;20:size=1445920;count=1;;21:size=1445360;count=1;;22:size=2010824;count=3;;23:size=1215392;count=1;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; 2025-03-31T17:12:22.675076Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4ec366b6-e5311f0-a83955d1-552be0b5;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::GENERAL;success=1; 2025-03-31T17:12:22.675170Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4ec366b6-e5311f0-a83955d1-552be0b5;fline=with_appended.cpp:65;portions=28,;task_id=4ec366b6-e5311f0-a83955d1-552be0b5; 2025-03-31T17:12:22.675473Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=4ec366b6-e5311f0-a83955d1-552be0b5;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:28;path_id:3;records_count:85687;min_schema_snapshot:(plan_step=1500;tx_id=281474976715658;);schema_version:1;level:0;column_size:3018840;index_size:0;meta:((produced=SPLIT_COMPACTED;)););path_id=3; 2025-03-31T17:12:22.675695Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=4ec366b6-e5311f0-a83955d1-552be0b5;fline=tiering.cpp:49;tiering_info=__DEFAULT/0.000000s;$$DELETE/599999.000000s;; 2025-03-31T17:12:22.675802Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4ec366b6-e5311f0-a83955d1-552be0b5;fline=manager.cpp:15;event=unlock;process_id=CS::GENERAL::4ec366b6-e5311f0-a83955d1-552be0b5; 2025-03-31T17:12:22.675872Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4ec366b6-e5311f0-a83955d1-552be0b5;fline=granule.cpp:101;event=OnCompactionFinished;info=(granule:3;path_id:3;size:21192904;portions_count:28;); 2025-03-31T17:12:22.675924Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4ec366b6-e5311f0-a83955d1-552be0b5;tablet_id=72075186224037888;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T17:12:22.675988Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4ec366b6-e5311f0-a83955d1-552be0b5;tablet_id=72075186224037888;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T17:12:22.676074Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4ec366b6-e5311f0-a83955d1-552be0b5;tablet_id=72075186224037888;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=1; 2025-03-31T17:12:22.676144Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4ec366b6-e5311f0-a83955d1-552be0b5;tablet_id=72075186224037888;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=21000; 2025-03-31T17:12:22.676189Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4ec366b6-e5311f0-a83955d1-552be0b5;tablet_id=72075186224037888;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T17:12:22.676236Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4ec366b6-e5311f0-a83955d1-552be0b5;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T17:12:22.676275Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4ec366b6-e5311f0-a83955d1-552be0b5;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T17:12:22.676351Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4ec366b6-e5311f0-a83955d1-552be0b5;tablet_id=72075186224037888;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.599000s; 2025-03-31T17:12:22.676402Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=4ec366b6-e5311f0-a83955d1-552be0b5;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T17:12:22.676607Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037888 Save Batch GenStep: 1:21 Blob count: 1 VERIFY failed (2025-03-31T17:12:22.676829Z): tablet_id=72075186224037888;task_id=4ec366b6-e5311f0-a83955d1-552be0b5;verification=CompactionsLimit.Dec() >= 0;fline=ro_controller.cpp:39; ydb/library/actors/core/log.cpp:754 ~TVerifyFormattedRecordWriter(): requirement false failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+873 (0x18BCFDF9) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+571 (0x18BBE08B) NActors::TVerifyFormattedRecordWriter::~TVerifyFormattedRecordWriter()+326 (0x19ED5BF6) NKikimr::NYDBTest::NColumnShard::TReadOnlyController::DoOnWriteIndexComplete(NKikimr::NOlap::TColumnEngineChanges const&, NKikimr::NColumnShard::TColumnShard const&)+4577 (0x485F4931) NKikimr::NColumnShard::TTxWriteIndex::Complete(NActors::TActorContext const&)+4797 (0x305CB3BD) NKikimr::NTabletFlatExecutor::TSeat::Complete(NActors::TActorContext const&, bool)+899 (0x1E8C20F3) NKikimr::NTabletFlatExecutor::TLogicRedo::Confirm(unsigned int, NActors::TActorContext const&, NActors::TActorId const&)+3856 (0x1E7A5AD0) NKikimr::NTabletFlatExecutor::TExecutor::Handle(TAutoPtr, TDelete>&, NActors::TActorContext const&)+3444 (0x1E5EBEC4) NKikimr::NTabletFlatExecutor::TExecutor::StateWork(TAutoPtr&)+2821 (0x1E588C45) NActors::IActor::Receive(TAutoPtr&)+237 (0x19E073CD) NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool)+3557 (0x358DC3C5) NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant)+12602 (0x358D4C3A) NActors::TTestActorRuntimeBase::WaitForEdgeEvents(std::__y1::function&)>, TSet, std::__y1::allocator> const&, TDuration)+1076 (0x358DEFB4) NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventIf(TSet, std::__y1::allocator> const&, std::__y1::function const&, TDuration)+292 (0x35AAC134) NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEvent(NActors::TActorId const&, TDuration)+419 (0x35AAB253) NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration)+307 (0x35AA34B3) NActors::TTestActorRuntime::SimulateSleep(TDuration)+1115 (0x35AA308B) NKikimr::NTestSuiteColumnShardTiers::TTestCaseTTLUsage::Execute_(NUnitTest::TTestContext&)+4702 (0x187AE7FE) std::__y1::__function::__func, void ()>::operator()()+280 (0x187C03B8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x1907CB16) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x1904C649) NKikimr::NTestSuiteColumnShardTiers::TCurrentTest::Execute()+1204 (0x187BF364) NUnitTest::TTestFactory::Execute()+2438 (0x1904DF16) NUnitTest::RunMain(int, char**)+5213 (0x1907708D) ??+0 (0x7FA60AAD0D90) __libc_start_main+128 (0x7FA60AAD0E40) _start+41 (0x16133029) |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> KqpSnapshotIsolation::TConflictWriteOlap [FAIL] |95.3%| [TA] $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.4%| [TA] {RESULT} $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> Balancing::Balancing_OneTopic_PQv1 [GOOD] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> ReadIteratorExternalBlobs::NotExtBlobs [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK [GOOD] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> Balancing::Balancing_OneTopic_PQv1 [GOOD] Test command err: 2025-03-31T17:12:10.179927Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488022637742050303:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:12:10.180028Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-31T17:12:10.407997Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/0002dc/r3tmp/tmp5StXi4/pdisk_1.dat 2025-03-31T17:12:10.649854Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:12:10.650019Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:12:10.652247Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T17:12:10.674259Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62030, node 1 2025-03-31T17:12:10.855789Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/r3ux/0002dc/r3tmp/yandexsyM1SA.tmp 2025-03-31T17:12:10.855814Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/r3ux/0002dc/r3tmp/yandexsyM1SA.tmp 2025-03-31T17:12:10.856856Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/r3ux/0002dc/r3tmp/yandexsyM1SA.tmp 2025-03-31T17:12:10.857006Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T17:12:11.124765Z INFO: TTestServer started on Port 32520 GrpcPort 62030 TClient is connected to server localhost:32520 PQClient connected to localhost:62030 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:12:11.508303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:12:11.523300Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:12:11.537550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T17:12:11.694811Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:12:11.716588Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-03-31T17:12:13.599637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022650626953014:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:13.599773Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022650626953006:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:13.601536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:13.604058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-31T17:12:13.616993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488022650626953035:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-31T17:12:13.892613Z node 1 :TX_PROXY ERROR: Actor# [1:7488022650626953099:2453] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:12:13.936952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T17:12:14.008653Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488022650626953107:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T17:12:14.008930Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjI4MGE2MTQtZjI5NzU0ZmItMWViODNlYjEtNjg4M2NkNg==, ActorId: [1:7488022650626953003:2337], ActorState: ExecuteState, TraceId: 01jqpk100a8fb8etkadnmcrhva, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T17:12:14.010699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T17:12:14.011725Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T17:12:14.088389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7488022654921920703:2636] 2025-03-31T17:12:15.180013Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488022637742050303:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:12:15.180096Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-03-31T17:12:20.725030Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-03-31T17:12:20.754495Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-03-31T17:12:20.755927Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7488022680691724793:2815], Recipient [1:7488022637742050724:2185]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T17:12:20.755969Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T17:12:20.755987Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-31T17:12:20.756026Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7488022680691724789:2812], Recipient [1:7488022637742050724:2185]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-03-31T17:12:20.756044Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-31T17:12:20.896920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 10 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-03-31T17:12:20.897509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T17:12:20.897836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-03-31T17:12:20.897887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-03-31T17:12:20.897919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2025-03-31T17:12:20.897955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2025-03-31T17:12:20.897986Z node 1 :FLAT_TX_SCHEMESHA ... eIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:28.640193Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 8, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T17:12:28.640252Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 8, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T17:12:28.640274Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 8, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T17:12:28.640298Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 8, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T17:12:28.640358Z node 1 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [1:7488022680691724868:2471], Partition 0, Sender [0:0:0], Recipient [1:7488022684986692521:2505], Cookie: 0 2025-03-31T17:12:28.640392Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [1:7488022684986692521:2505]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:28.640408Z node 1 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:28.640426Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T17:12:28.640498Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T17:12:28.640536Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T17:12:28.640553Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T17:12:28.640608Z node 1 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [1:7488022680691724883:2478], Partition 3, Sender [0:0:0], Recipient [1:7488022684986692538:2511], Cookie: 0 2025-03-31T17:12:28.640656Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [1:7488022684986692538:2511]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:28.640671Z node 1 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:28.640694Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 3, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T17:12:28.640725Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 3, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T17:12:28.640738Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 3, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T17:12:28.640753Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 3, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T17:12:28.640803Z node 1 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [1:7488022680691724871:2474], Partition 7, Sender [0:0:0], Recipient [1:7488022684986692518:2502], Cookie: 0 2025-03-31T17:12:28.640846Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [1:7488022684986692518:2502]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:28.640857Z node 1 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:28.640876Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037900, Partition: 7, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T17:12:28.640902Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037900, Partition: 7, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T17:12:28.640913Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037900, Partition: 7, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T17:12:28.640927Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037900, Partition: 7, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T17:12:28.640966Z node 1 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [1:7488022680691724873:2476], Partition 2, Sender [0:0:0], Recipient [1:7488022684986692517:2501], Cookie: 0 2025-03-31T17:12:28.640990Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [1:7488022684986692517:2501]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:28.640999Z node 1 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:28.641017Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T17:12:28.641037Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T17:12:28.641047Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T17:12:28.641065Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T17:12:28.641123Z node 1 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [1:7488022680691724870:2473], Partition 1, Sender [0:0:0], Recipient [1:7488022684986692523:2507], Cookie: 0 2025-03-31T17:12:28.641167Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [1:7488022684986692523:2507]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:28.641179Z node 1 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:28.641198Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T17:12:28.641226Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T17:12:28.641237Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T17:12:28.641252Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T17:12:28.641289Z node 1 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [1:7488022680691724887:2480], Partition 6, Sender [0:0:0], Recipient [1:7488022684986692542:2515], Cookie: 0 2025-03-31T17:12:28.641319Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [1:7488022684986692542:2515]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:28.641330Z node 1 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:28.641347Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 6, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T17:12:28.641370Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 6, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T17:12:28.641383Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 6, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T17:12:28.641397Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 6, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T17:12:28.641457Z node 1 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [1:7488022680691724881:2477], Partition 9, Sender [0:0:0], Recipient [1:7488022684986692545:2518], Cookie: 0 2025-03-31T17:12:28.641487Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [1:7488022684986692545:2518]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:28.641511Z node 1 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:28.641530Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 9, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T17:12:28.641554Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 9, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T17:12:28.641564Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 9, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T17:12:28.641577Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 9, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T17:12:28.641610Z node 1 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [1:7488022680691724884:2479], Partition 5, Sender [0:0:0], Recipient [1:7488022684986692540:2513], Cookie: 0 2025-03-31T17:12:28.641642Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [1:7488022684986692540:2513]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:28.641652Z node 1 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:28.641668Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 5, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T17:12:28.641688Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 5, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T17:12:28.641697Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 5, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T17:12:28.641708Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 5, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T17:12:28.641736Z node 1 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [1:7488022680691724872:2475], Partition 4, Sender [0:0:0], Recipient [1:7488022684986692548:2520], Cookie: 0 2025-03-31T17:12:28.641791Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [1:7488022684986692548:2520]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:28.641804Z node 1 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:28.641821Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037901, Partition: 4, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T17:12:28.641845Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037901, Partition: 4, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T17:12:28.641858Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037901, Partition: 4, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T17:12:28.641870Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037901, Partition: 4, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T17:12:28.711703Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7488022637742050724:2185]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T17:12:28.711736Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T17:12:28.711776Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:7488022637742050724:2185], Recipient [1:7488022637742050724:2185]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T17:12:28.711803Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::SysViewClientLost [FAIL] Test command err: Trying to start YDB, gRPC: 18679, MsgBus: 64861 2025-03-31T17:11:36.918672Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488022490325739871:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:11:36.918758Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/000185/r3tmp/tmpnkGyJT/pdisk_1.dat 2025-03-31T17:11:37.257853Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18679, node 1 2025-03-31T17:11:37.323191Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:11:37.323225Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:11:37.323244Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:11:37.323417Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T17:11:37.336454Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:11:37.336604Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:11:37.338776Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64861 TClient is connected to server localhost:64861 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:11:37.833631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:11:37.854259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:11:37.995143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:11:38.165572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:11:38.232497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:11:40.069735Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022507505610837:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:40.069859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:40.392554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T17:11:40.418626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T17:11:40.442371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T17:11:40.470058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T17:11:40.499842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T17:11:40.531576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T17:11:40.606444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022507505611353:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:40.606540Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:40.606589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022507505611358:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:40.609441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T17:11:40.617434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488022507505611360:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T17:11:40.677646Z node 1 :TX_PROXY ERROR: Actor# [1:7488022507505611413:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:11:41.554001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:11:41.923210Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488022490325739871:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:11:41.923303Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T17:11:52.257514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T17:11:52.257544Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T17:12:09.916378Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743441129893, txId: 281474976710672] shutting down 2025-03-31T17:12:09.974117Z node 1 :RPC_REQUEST WARN: Client lost 2025-03-31T17:12:11.187763Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743441131175, txId: 281474976710674] shutting down 2025-03-31T17:12:12.517816Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743441132502, txId: 281474976710676] shutting down 2025-03-31T17:12:13.757050Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743441133739, txId: 281474976710678] shutting down 2025-03-31T17:12:14.994076Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743441134984, txId: 281474976710680] shutting down 2025-03-31T17:12:16.195293Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743441136187, txId: 281474976710682] shutting down 2025-03-31T17:12:17.418121Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743441137407, txId: 281474976710684] shutting down 2025-03-31T17:12:18.620416Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743441138598, txId: 281474976710686] shutting down 2025-03-31T17:12:19.844841Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743441139836, txId: 281474976710688] shutting down 2025-03-31T17:12:21.127298Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743441141109, txId: 281474976710690] shutting down 2025-03-31T17:12:22.366013Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743441142350, txId: 281474976710692] shutting down assertion failed at ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591, virtual void NKikimr::NKqp::NTestSuiteKqpStats::TTestCaseSysViewClientLost::Execute_(NUnitTest::TTestContext &): (timeoutedCount == 1) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x194EA4DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x199AF40F 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591: Execute_ @ 0x1908FA48 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: operator() @ 0x190A2A57 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x190A2A57 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x190A2A57 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x190A2A57 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x190A2A57 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x199E6435 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x199E6435 10. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x199E6435 11. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x199B5F88 12. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: Execute @ 0x190A1BDB 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x199B7855 14. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x199E09AC 15. ??:0: ?? @ 0x7F42CD8AFD8F 16. ??:0: ?? @ 0x7F42CD8AFE3F 17. ??:0: ?? @ 0x16459028 |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> ReadIteratorExternalBlobs::NotExtBlobs [GOOD] Test command err: 2025-03-31T17:11:45.917486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T17:11:45.917773Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T17:11:45.917956Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/000303/r3tmp/tmpgm9FVq/pdisk_1.dat 2025-03-31T17:11:46.252116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T17:11:46.290535Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T17:11:46.334033Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:11:46.334167Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:11:46.345829Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T17:11:46.427291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T17:11:46.732666Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:740:2622], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:46.732768Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:750:2627], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:46.733108Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:46.737509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T17:11:46.893503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:754:2630], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T17:11:46.962326Z node 1 :TX_PROXY ERROR: Actor# [1:828:2673] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:11:50.674121Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpk05sa4gpeedzhnchrg896, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmFhNDc0YTYtMzE4NzVkMDctNWIwMWIxZDQtYWI3YmZhMTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:53.900045Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpk09s31y8hjye2sqcrqz1a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTk5Yzk3YWItZTQ2MzI2OGItMzc3ZDkwY2MtZmRiY2Y4ODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:57.516070Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpk0cwfe7mw67et9by7ba8s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjEzOGJiZmItM2YzZTE3Mi1jNzA2ZWJiOC1iNzhlOWVmYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:12:00.748078Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqpk0gd53dr7eny8p7cn9fkz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGQwMTJjYzYtZjExZWFiZWYtYjhjMmU4YTctZDNiMDY5Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:12:04.133491Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqpk0kkadzq0qgt3ccqhk92r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjA2MWQ3MTctZDE0NzZlMGYtYjQxMGU5M2EtY2RlYjA0OTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:12:07.397491Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqpk0pxp6r2kcrmmb6f3wy0y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjZmNzYxZmUtYjhhYjZkNDEtZDg0NTM1MjYtODE1ODdmYzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:12:11.011245Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqpk0t3j8m90pwz0221r8gqn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTQxNGMyNWItY2VlMzA1MGYtMTVkYjhmZDgtNzMxNjMyZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:12:14.957590Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jqpk0xn66r6fmsg80j6ezd2t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjI3NjhmZDEtM2EwYjhkMzctMTYxM2ZkYTctZDc4YzRiMmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:12:18.732541Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jqpk11ewaw68xgrdtw3v58zw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTU1YTYzYmItYzI0ZTFiYzAtNTY3YTg1ZTYtODMwM2EzMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:12:22.348398Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jqpk154k9j0ej0378ev2qyj3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjExODNiZTktZjIxYzAxMGUtN2NiNzNkMTItZGNmYjYyZjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... waiting for stats after upsert 2025-03-31T17:12:24.873669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T17:12:24.873746Z node 1 :IMPORT WARN: Table profiles were not loaded Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 0 TableStats { DataSize: 10487312 RowCount: 10 IndexSize: 0 InMemSize: 10487312 LastAccessTime: 1526 LastUpdateTime: 1526 ImmediateTxCompleted: 10 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 10 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 7257 Memory: 17425464 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 1 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 ... waiting for stats after compaction Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 1 TableStats { DataSize: 10487312 RowCount: 10 IndexSize: 0 InMemSize: 10487312 LastAccessTime: 1526 LastUpdateTime: 1526 ImmediateTxCompleted: 10 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 10 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 20 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 4212 Memory: 124948 Storage: 10486554 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 1 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 2 TableStats { DataSize: 10486220 RowCount: 10 IndexSize: 0 InMemSize: 0 LastAccessTime: 1526 LastUpdateTime: 1526 ImmediateTxCompleted: 10 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 10 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 20 HasLoanedParts: false Channels { Channel: 1 DataSize: 10486220 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 4212 Memory: 124948 Storage: 10486554 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 1 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 2025-03-31T17:12:29.574157Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jqpk1fdq42sp78rxs34p4vkc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzI0N2Y1YTItNTcyODM1M2QtNGQwNTNmNzktODY1MzVkY2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] >> TSentinelUnstableTests::BSControllerCantChangeStatus [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::ReadingAfterSplitTest_BeforeAutoscaleAwareSDK [GOOD] Test command err: 2025-03-31T17:12:10.179873Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488022637590263453:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:12:10.181110Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/0002e3/r3tmp/tmpaECnl1/pdisk_1.dat 2025-03-31T17:12:10.416532Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T17:12:10.627068Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T17:12:10.632464Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:12:10.632562Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:12:10.636181Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15164, node 1 2025-03-31T17:12:10.859123Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/r3ux/0002e3/r3tmp/yandexzmHHgJ.tmp 2025-03-31T17:12:10.859150Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/r3ux/0002e3/r3tmp/yandexzmHHgJ.tmp 2025-03-31T17:12:10.859352Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/r3ux/0002e3/r3tmp/yandexzmHHgJ.tmp 2025-03-31T17:12:10.859485Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T17:12:11.117992Z INFO: TTestServer started on Port 30170 GrpcPort 15164 TClient is connected to server localhost:30170 PQClient connected to localhost:15164 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:12:11.468166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T17:12:11.517999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-31T17:12:13.585700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022650475166017:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:13.585848Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:13.586903Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022650475166039:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:13.591513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-31T17:12:13.595679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022650475166072:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:13.596193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:13.605986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488022650475166041:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-31T17:12:13.832726Z node 1 :TX_PROXY ERROR: Actor# [1:7488022650475166097:2452] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:12:13.938473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T17:12:13.955938Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488022650475166113:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T17:12:13.957671Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWRkY2RiMy1jNzczZjgzYi01MmNlYzkyZi1kMTZhNWI5Nw==, ActorId: [1:7488022650475166003:2337], ActorState: ExecuteState, TraceId: 01jqpk0zzefyph0gzh148e1wd9, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T17:12:13.960105Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T17:12:13.973446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T17:12:14.067786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7488022654770133702:2636] 2025-03-31T17:12:15.179449Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488022637590263453:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:12:15.179542Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-03-31T17:12:20.575809Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-03-31T17:12:20.629284Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-03-31T17:12:20.637572Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7488022680539937777:2800], Recipient [1:7488022637590263744:2196]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T17:12:20.637622Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T17:12:20.637639Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-31T17:12:20.637689Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7488022680539937773:2797], Recipient [1:7488022637590263744:2196]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-03-31T17:12:20.637707Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-31T17:12:20.755526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-03-31T17:12:20.756015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T17:12:20.756545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-03-31T17:12:20.756585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-03-31T17:12:20.756620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new ... ode 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] Destroy direct read session test-consumer_1_1_10125203997104160534_v1 2025-03-31T17:12:29.560862Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] server disconnected, pipe [1:7488022719194645446:2916] destroyed 2025-03-31T17:12:29.560893Z node 1 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [1:7488022719194645448:3802], Recipient [1:7488022714899677803:2839]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-31T17:12:29.560903Z node 1 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-31T17:12:29.560911Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037898] Handle TEvTabletPipe::TEvServerDisconnected 2025-03-31T17:12:29.560920Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037898] Destroy direct read session test-consumer_1_1_10125203997104160534_v1 2025-03-31T17:12:29.560934Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037898] server disconnected, pipe [1:7488022719194645445:2915] destroyed 2025-03-31T17:12:29.560962Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][test-topic] pipe [1:7488022719194645421:2907] disconnected; active server actors: 1 2025-03-31T17:12:29.560985Z node 1 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][test-topic] pipe [1:7488022719194645421:2907] client test-consumer disconnected session test-consumer_1_1_10125203997104160534_v1 2025-03-31T17:12:29.561137Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_1_1_10125203997104160534_v1 2025-03-31T17:12:29.561168Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_1_1_10125203997104160534_v1 2025-03-31T17:12:29.561180Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_1_1_10125203997104160534_v1 2025-03-31T17:12:29.561219Z node 1 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [1:7488022719194645436:3798], Recipient [1:7488022706309742796:2769]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-31T17:12:29.561231Z node 1 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-31T17:12:29.561241Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037897] Handle TEvTabletPipe::TEvServerDisconnected 2025-03-31T17:12:29.561251Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037897] Destroy direct read session test-consumer_1_1_10125203997104160534_v1 2025-03-31T17:12:29.561267Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037897] server disconnected, pipe [1:7488022719194645433:2913] destroyed 2025-03-31T17:12:29.561300Z node 1 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [1:7488022719194645435:3797], Recipient [1:7488022706309742799:2770]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-31T17:12:29.561310Z node 1 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-31T17:12:29.561317Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037896] Handle TEvTabletPipe::TEvServerDisconnected 2025-03-31T17:12:29.561326Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Destroy direct read session test-consumer_1_1_10125203997104160534_v1 2025-03-31T17:12:29.561339Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [1:7488022719194645432:2912] destroyed 2025-03-31T17:12:29.561357Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_1_1_10125203997104160534_v1 2025-03-31T17:12:29.561370Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_1_1_10125203997104160534_v1 2025-03-31T17:12:29.564567Z :DEBUG: [/Root] TraceId [] SessionId [producer-1|60c40a54-b701d33e-9cb29cfa-2a643550_0] PartitionId [4] Generation [1] Write session: destroy 2025-03-31T17:12:29.599790Z node 1 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [1:7488022714899677808:2840], Partition 3, Sender [0:0:0], Recipient [1:7488022714899677897:2850], Cookie: 0 2025-03-31T17:12:29.599855Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [1:7488022714899677897:2850]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:29.599874Z node 1 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:29.599931Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T17:12:29.599985Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T17:12:29.600003Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T17:12:29.600026Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T17:12:29.600072Z node 1 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [1:7488022714899677803:2839], Partition 4, Sender [0:0:0], Recipient [1:7488022714899677895:2848], Cookie: 0 2025-03-31T17:12:29.600101Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [1:7488022714899677895:2848]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:29.600111Z node 1 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:29.600130Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T17:12:29.600154Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T17:12:29.600165Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T17:12:29.600177Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T17:12:29.616816Z node 1 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [1:7488022706309742799:2770], Partition 2, Sender [0:0:0], Recipient [1:7488022706309742881:2778], Cookie: 0 2025-03-31T17:12:29.616824Z node 1 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [1:7488022706309742796:2769], Partition 1, Sender [0:0:0], Recipient [1:7488022706309742883:2780], Cookie: 0 2025-03-31T17:12:29.616871Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [1:7488022706309742883:2780]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:29.616876Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [1:7488022706309742881:2778]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:29.616891Z node 1 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:29.616905Z node 1 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:29.616928Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T17:12:29.616934Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T17:12:29.616988Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T17:12:29.616990Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T17:12:29.617004Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T17:12:29.617005Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T17:12:29.617032Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T17:12:29.617049Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T17:12:29.617251Z node 1 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [1:7488022680539937823:2472], Partition 0, Sender [0:0:0], Recipient [1:7488022680539937862:2475], Cookie: 0 2025-03-31T17:12:29.617282Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [1:7488022680539937862:2475]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:29.617294Z node 1 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:29.617317Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T17:12:29.617349Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T17:12:29.617361Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T17:12:29.617390Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T17:12:29.700256Z node 1 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [1:7488022714899677808:2840], Partition 3, Sender [0:0:0], Recipient [1:7488022714899677897:2850], Cookie: 0 2025-03-31T17:12:29.700325Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [1:7488022714899677897:2850]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:29.700347Z node 1 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:29.700398Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T17:12:29.700472Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T17:12:29.700495Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T17:12:29.700522Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T17:12:29.700577Z node 1 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [1:7488022714899677803:2839], Partition 4, Sender [0:0:0], Recipient [1:7488022714899677895:2848], Cookie: 0 2025-03-31T17:12:29.700606Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [1:7488022714899677895:2848]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:29.700618Z node 1 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:29.700644Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T17:12:29.700668Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T17:12:29.700678Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T17:12:29.700690Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK [GOOD] Test command err: 2025-03-31T17:12:10.180641Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488022637657560019:2266];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:12:10.180697Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/0002de/r3tmp/tmpZ7TAWe/pdisk_1.dat 2025-03-31T17:12:10.376603Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-31T17:12:10.580108Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T17:12:10.608228Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:12:10.608334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:12:10.622246Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27104, node 1 2025-03-31T17:12:10.855779Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/r3ux/0002de/r3tmp/yandexs2Nuu0.tmp 2025-03-31T17:12:10.855810Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/r3ux/0002de/r3tmp/yandexs2Nuu0.tmp 2025-03-31T17:12:10.857295Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/r3ux/0002de/r3tmp/yandexs2Nuu0.tmp 2025-03-31T17:12:10.857410Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T17:12:11.118420Z INFO: TTestServer started on Port 5433 GrpcPort 27104 TClient is connected to server localhost:5433 PQClient connected to localhost:27104 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:12:11.510810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T17:12:11.553637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-31T17:12:11.748271Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:12:13.417170Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022650542462522:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:13.417276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:13.417575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022650542462535:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:13.425494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-31T17:12:13.441318Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488022650542462537:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-31T17:12:13.537006Z node 1 :TX_PROXY ERROR: Actor# [1:7488022650542462601:2453] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:12:13.880013Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7488022650542462609:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-31T17:12:13.904049Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDY1OTRjYmUtNWY2MjNjZjQtMmQ0NTUwYTMtMTM5NDZhNWY=, ActorId: [1:7488022650542462520:2337], ActorState: ExecuteState, TraceId: 01jqpk0zv50p78tk6ees455ncj, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-31T17:12:13.908981Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-31T17:12:13.934594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T17:12:13.966452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T17:12:14.065630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7488022654837430200:2637] 2025-03-31T17:12:15.183205Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488022637657560019:2266];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:12:15.183311Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-03-31T17:12:20.742704Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-03-31T17:12:20.760340Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-03-31T17:12:20.761646Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7488022680607234287:2814], Recipient [1:7488022637657560208:2177]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T17:12:20.761674Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T17:12:20.761686Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-31T17:12:20.761725Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7488022680607234283:2811], Recipient [1:7488022637657560208:2177]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-03-31T17:12:20.761742Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-31T17:12:20.831777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-03-31T17:12:20.832331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-31T17:12:20.832644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-03-31T17:12:20.832682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-03-31T17:12:20.832715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2025-03-31T17:12:20.832767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2025-03-31T17:12:20.832838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 13] wa ... E TRACE: HandleHook, received event# 269877764, Sender [1:7488022719261941949:3804], Recipient [1:7488022714966974310:2846]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-31T17:12:29.799364Z node 1 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-31T17:12:29.799374Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037898] Handle TEvTabletPipe::TEvServerDisconnected 2025-03-31T17:12:29.799696Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037898] Destroy direct read session test-consumer_1_1_3302102406975976553_v1 2025-03-31T17:12:29.799741Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037898] server disconnected, pipe [1:7488022719261941947:2922] destroyed 2025-03-31T17:12:29.799789Z node 1 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [1:7488022719261941948:3803], Recipient [1:7488022714966974306:2845]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-31T17:12:29.799807Z node 1 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-31T17:12:29.799820Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037899] Handle TEvTabletPipe::TEvServerDisconnected 2025-03-31T17:12:29.799830Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] Destroy direct read session test-consumer_1_1_3302102406975976553_v1 2025-03-31T17:12:29.799844Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] server disconnected, pipe [1:7488022719261941946:2921] destroyed 2025-03-31T17:12:29.799876Z node 1 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [1:7488022719261941936:3798], Recipient [1:7488022706377039260:2762]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-31T17:12:29.799887Z node 1 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-31T17:12:29.799895Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037896] Handle TEvTabletPipe::TEvServerDisconnected 2025-03-31T17:12:29.799905Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Destroy direct read session test-consumer_1_1_3302102406975976553_v1 2025-03-31T17:12:29.799927Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [1:7488022719261941934:2919] destroyed 2025-03-31T17:12:29.799964Z node 1 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [1:7488022719261941935:3797], Recipient [1:7488022706377039278:2763]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-31T17:12:29.799974Z node 1 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-31T17:12:29.799983Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037897] Handle TEvTabletPipe::TEvServerDisconnected 2025-03-31T17:12:29.799993Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037897] Destroy direct read session test-consumer_1_1_3302102406975976553_v1 2025-03-31T17:12:29.800008Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037897] server disconnected, pipe [1:7488022719261941933:2918] destroyed 2025-03-31T17:12:29.800050Z node 1 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [1:7488022719261941927:3795], Recipient [1:7488022680607234327:2471]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-31T17:12:29.800062Z node 1 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-31T17:12:29.800072Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-03-31T17:12:29.800091Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session test-consumer_1_1_3302102406975976553_v1 2025-03-31T17:12:29.800107Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7488022719261941926:2916] destroyed 2025-03-31T17:12:29.800735Z :DEBUG: [/Root] TraceId [] SessionId [producer-1|93484a71-68a8c274-ea0c14f0-23a7437c_0] PartitionId [4] Generation [1] Write session: destroy 2025-03-31T17:12:29.800313Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_1_1_3302102406975976553_v1 2025-03-31T17:12:29.800341Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_1_1_3302102406975976553_v1 2025-03-31T17:12:29.800355Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_1_1_3302102406975976553_v1 2025-03-31T17:12:29.800367Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_1_1_3302102406975976553_v1 2025-03-31T17:12:29.800380Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_1_1_3302102406975976553_v1 2025-03-31T17:12:29.800888Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][test-topic] pipe [1:7488022719261941923:2913] disconnected; active server actors: 1 2025-03-31T17:12:29.800914Z node 1 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][test-topic] pipe [1:7488022719261941923:2913] client test-consumer disconnected session test-consumer_1_1_3302102406975976553_v1 2025-03-31T17:12:29.815170Z node 1 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [1:7488022680607234327:2471], Partition 0, Sender [0:0:0], Recipient [1:7488022680607234371:2474], Cookie: 0 2025-03-31T17:12:29.815259Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [1:7488022680607234371:2474]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:29.815282Z node 1 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:29.815325Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T17:12:29.815388Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T17:12:29.815406Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T17:12:29.815441Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T17:12:29.828416Z node 1 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [1:7488022706377039278:2763], Partition 1, Sender [0:0:0], Recipient [1:7488022706377039350:2772], Cookie: 0 2025-03-31T17:12:29.828487Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [1:7488022706377039350:2772]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:29.828512Z node 1 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:29.828565Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T17:12:29.828634Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T17:12:29.828654Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T17:12:29.828679Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T17:12:29.841295Z node 1 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [1:7488022714966974310:2846], Partition 4, Sender [0:0:0], Recipient [1:7488022714966974399:2856], Cookie: 0 2025-03-31T17:12:29.841369Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [1:7488022714966974399:2856]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:29.841394Z node 1 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:29.841438Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T17:12:29.841513Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T17:12:29.841537Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T17:12:29.841560Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T17:12:29.857108Z node 1 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [1:7488022714966974306:2845], Partition 3, Sender [0:0:0], Recipient [1:7488022714966974395:2853], Cookie: 0 2025-03-31T17:12:29.857204Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [1:7488022714966974395:2853]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:29.857229Z node 1 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:29.857273Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T17:12:29.857341Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T17:12:29.857369Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T17:12:29.857393Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T17:12:29.864310Z node 1 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [1:7488022706377039260:2762], Partition 2, Sender [0:0:0], Recipient [1:7488022706377039367:2776], Cookie: 0 2025-03-31T17:12:29.864421Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [1:7488022706377039367:2776]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:29.864446Z node 1 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:29.864492Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T17:12:29.864559Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T17:12:29.864584Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T17:12:29.864606Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-31T17:12:29.915501Z node 1 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [1:7488022680607234327:2471], Partition 0, Sender [0:0:0], Recipient [1:7488022680607234371:2474], Cookie: 0 2025-03-31T17:12:29.915578Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [1:7488022680607234371:2474]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:29.915603Z node 1 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-31T17:12:29.915646Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-03-31T17:12:29.915731Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-31T17:12:29.915766Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-31T17:12:29.915791Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictWriteOlap [FAIL] Test command err: Trying to start YDB, gRPC: 3034, MsgBus: 15572 2025-03-31T17:12:08.208209Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488022630184789999:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:12:08.208331Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/0002a0/r3tmp/tmpp2vMQQ/pdisk_1.dat 2025-03-31T17:12:08.504096Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3034, node 1 2025-03-31T17:12:08.573421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:12:08.573546Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:12:08.576091Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T17:12:08.577706Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:12:08.577724Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:12:08.577758Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:12:08.577900Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15572 TClient is connected to server localhost:15572 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:12:09.042249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:12:11.197339Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022643069692529:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:11.197585Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:11.199525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022643069692565:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:11.207601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T17:12:11.225000Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488022643069692567:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T17:12:11.291497Z node 1 :TX_PROXY ERROR: Actor# [1:7488022643069692620:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:12:11.630207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T17:12:11.814298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488022643069692812:2345];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T17:12:11.814372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488022643069692810:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T17:12:11.814501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488022643069692810:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T17:12:11.814508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488022643069692812:2345];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T17:12:11.814813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488022643069692810:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T17:12:11.814975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488022643069692810:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T17:12:11.814988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488022643069692812:2345];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T17:12:11.815260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488022643069692812:2345];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T17:12:11.815268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488022643069692810:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T17:12:11.815380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488022643069692812:2345];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T17:12:11.815431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488022643069692810:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T17:12:11.815503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488022643069692812:2345];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T17:12:11.815585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488022643069692810:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T17:12:11.815601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488022643069692812:2345];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T17:12:11.815916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488022643069692810:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T17:12:11.815920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488022643069692812:2345];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T17:12:11.816042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488022643069692812:2345];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T17:12:11.816046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488022643069692810:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T17:12:11.816165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488022643069692810:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T17:12:11.816318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488022643069692812:2345];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T17:12:11.816324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488022643069692810:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T17:12:11.816424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488022643069692812:2345];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T17:12:11.816454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488022643069692810:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T17:12:11.816526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488022643069692812:2345];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T17:12:11.861210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488022643069692825:2349];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T17:12:11.861324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488022643069692825:2349];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T17:12:11.861578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_ ... mnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038047;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:26.644719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038047;self_id=[1:7488022664544535514:3387];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038047;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:26.644763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[1:7488022664544535552:3391];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038017;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:26.644844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038028;self_id=[1:7488022664544535416:3358];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038028;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:26.644881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[1:7488022664544535494:3380];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038012;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:26.644939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[1:7488022664544535377:3340];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038009;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:26.644995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[1:7488022664544535587:3394];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038003;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:26.647615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038024;self_id=[1:7488022664544535384:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038024;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:26.647893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;self_id=[1:7488022664544535796:3396];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038001;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:26.648020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;self_id=[1:7488022664544535796:3396];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038001;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:26.648175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;self_id=[1:7488022664544535520:3389];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038037;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:26.648370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[1:7488022664544535567:3392];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038019;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:26.648395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;self_id=[1:7488022664544535502:3383];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038011;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:26.648590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038024;self_id=[1:7488022664544535384:3342];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038024;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:26.648622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[1:7488022664544535420:3360];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038013;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:26.648756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;self_id=[1:7488022664544535520:3389];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038037;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:26.648759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[1:7488022664544535567:3392];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038019;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:26.648870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;self_id=[1:7488022664544535502:3383];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038011;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:26.648888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[1:7488022664544535420:3360];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038013;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:26.659987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038057;self_id=[1:7488022664544535355:3333];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038057;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:26.659987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038054;self_id=[1:7488022664544535516:3388];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038054;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:26.660251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038057;self_id=[1:7488022664544535355:3333];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038057;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:26.660252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038054;self_id=[1:7488022664544535516:3388];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038054;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:26.660870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[1:7488022664544535496:3381];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038016;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:26.660893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[1:7488022664544535432:3366];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038042;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:26.661113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[1:7488022664544535490:3378];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038033;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:26.661172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;self_id=[1:7488022664544535392:3346];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038053;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:26.661822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[1:7488022664544535496:3381];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038016;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:26.661986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[1:7488022664544535432:3366];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038042;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:26.662137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[1:7488022664544535490:3378];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038033;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:26.662259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;self_id=[1:7488022664544535392:3346];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038053;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:26.673221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038063;self_id=[1:7488022664544535250:3307];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038063;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:26.673719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038063;self_id=[1:7488022664544535250:3307];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038063;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:27.064004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037963;self_id=[1:7488022647364661445:2505];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037963;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:27.064389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7488022647364661896:2568];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037926;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:27.064744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037963;self_id=[1:7488022647364661445:2505];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037963;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:12:27.064888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7488022647364661896:2568];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037926;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:92, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::ABORTED) failed: (SUCCESS != ABORTED) , with diff: (SUCC|ABORT)E(SS|D) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:92: DoExecute @ 0x18DC9C58 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:118: Execute_ @ 0x18DB864A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7F1DF2EFDD8F 18. ??:0: ?? @ 0x7F1DF2EFDE3F 19. ??:0: ?? @ 0x163B4028 |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |96.1%| [TA] $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} |96.1%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} |96.2%| [TA] $(B)/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/{meta.json ... results_accumulator.log} |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel_unstable/unittest >> TSentinelUnstableTests::BSControllerCantChangeStatus [GOOD] Test command err: 2025-03-31T17:12:12.379068Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-03-31T17:12:12.379146Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-03-31T17:12:12.379218Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-03-31T17:12:12.379246Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-03-31T17:12:12.379286Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-03-31T17:12:12.379358Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-03-31T17:12:12.380266Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2025-03-31T17:12:12.406901Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDom ... PDiskStateResponse: nodeId# 6, response# PDiskStateInfo { PDiskId: 24 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-24.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 25 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-25.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 26 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-26.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 27 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-27.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37820027 2025-03-31T17:12:30.383828Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 7, response# PDiskStateInfo { PDiskId: 28 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-28.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 29 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-29.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 30 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-30.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 31 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-31.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37820027 2025-03-31T17:12:30.383991Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 8, response# PDiskStateInfo { PDiskId: 32 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-32.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 33 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-33.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 34 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-34.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 35 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-35.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37820027 2025-03-31T17:12:30.384053Z node 1 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-03-31T17:12:30.394822Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-03-31T17:12:30.394899Z node 1 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-03-31T17:12:30.395060Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 1, wbId# [1:8388350642965737326:1634689637] 2025-03-31T17:12:30.395153Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 2, wbId# [2:8388350642965737326:1634689637] 2025-03-31T17:12:30.395195Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 3, wbId# [3:8388350642965737326:1634689637] 2025-03-31T17:12:30.395224Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 4, wbId# [4:8388350642965737326:1634689637] 2025-03-31T17:12:30.395274Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 5, wbId# [5:8388350642965737326:1634689637] 2025-03-31T17:12:30.395331Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 6, wbId# [6:8388350642965737326:1634689637] 2025-03-31T17:12:30.395360Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 7, wbId# [7:8388350642965737326:1634689637] 2025-03-31T17:12:30.395387Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 8, wbId# [8:8388350642965737326:1634689637] 2025-03-31T17:12:30.395721Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 1, response# PDiskStateInfo { PDiskId: 4 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-4.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 5 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-5.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 6 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-6.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 7 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-7.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860027 2025-03-31T17:12:30.396583Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 2, response# PDiskStateInfo { PDiskId: 8 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-8.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 9 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-9.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 10 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-10.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 11 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-11.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860027 2025-03-31T17:12:30.396917Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 3, response# PDiskStateInfo { PDiskId: 12 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-12.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 13 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-13.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 14 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-14.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 15 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-15.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860027 2025-03-31T17:12:30.397095Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 4, response# PDiskStateInfo { PDiskId: 16 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-16.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 17 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-17.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 18 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-18.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 19 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-19.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860027 2025-03-31T17:12:30.397284Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 5, response# PDiskStateInfo { PDiskId: 20 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-20.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 21 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-21.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 22 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-22.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 23 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-23.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860027 2025-03-31T17:12:30.397442Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 6, response# PDiskStateInfo { PDiskId: 24 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-24.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 25 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-25.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 26 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-26.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 27 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-27.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860027 2025-03-31T17:12:30.397572Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 7, response# PDiskStateInfo { PDiskId: 28 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-28.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 29 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-29.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 30 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-30.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 31 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-31.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860027 2025-03-31T17:12:30.397749Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 8, response# PDiskStateInfo { PDiskId: 32 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-32.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 33 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-33.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 34 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-34.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 35 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-35.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860027 2025-03-31T17:12:30.397811Z node 1 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-03-31T17:12:30.398384Z node 1 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 7:28, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-03-31T17:12:30.398447Z node 1 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 4:17, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-03-31T17:12:30.398497Z node 1 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 6:26, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-03-31T17:12:30.398540Z node 1 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2025-03-31T17:12:30.398827Z node 1 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Status { }, cookie# 140 2025-03-31T17:12:30.398873Z node 1 :CMS ERROR: [Sentinel] [Main] Unsuccesful response from BSC: error# 2025-03-31T17:12:30.409251Z node 1 :CMS DEBUG: [Sentinel] [Main] Retrying: attempt# 1 2025-03-31T17:12:30.409325Z node 1 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2025-03-31T17:12:30.409665Z node 1 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Status { Success: true } Success: true, cookie# 141 2025-03-31T17:12:30.409751Z node 1 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 4:17 2025-03-31T17:12:30.409804Z node 1 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 6:26 2025-03-31T17:12:30.409844Z node 1 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 7:28 |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |96.4%| [TA] {RESULT} $(B)/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/{meta.json ... results_accumulator.log} >> overlapping_portions.py::TestOverlappingPortions::test [FAIL] |96.5%| [TA] $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} |96.5%| [TM] {RESULT} ydb/core/cms/ut_sentinel_unstable/unittest |96.6%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_workload.py::TestYdbLogWorkload::test[column] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/postgresql/py3test >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] [GOOD] |96.7%| [TM] {RESULT} ydb/tests/functional/postgresql/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::QueryExecuteScript [FAIL] Test command err: 2025-03-31T17:12:09.213110Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488022632452480757:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:12:09.225758Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-31T17:12:09.674440Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T17:12:09.727043Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:12:09.727164Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:12:09.731111Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63575, node 1 2025-03-31T17:12:09.980880Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:12:09.980911Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:12:09.980924Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:12:09.981073Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19482 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:12:10.575904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:12:10.646754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:12:10.651705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T17:12:12.423482Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T17:12:12.423549Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T17:12:12.746619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022645337383206:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:12.746734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:12.746793Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488022645337383218:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:12:12.753678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-03-31T17:12:12.764521Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488022645337383220:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-31T17:12:12.845064Z node 1 :TX_PROXY ERROR: Actor# [1:7488022645337383271:2360] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:12:13.591645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T17:12:13.717652Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T17:12:13.717679Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T17:12:14.195463Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T17:12:14.195492Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T17:12:14.214173Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488022632452480757:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:12:14.214246Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T17:12:14.250911Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T17:12:14.250945Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T17:12:14.311395Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T17:12:14.311434Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T17:12:14.370399Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T17:12:14.370441Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T17:12:14.420443Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T17:12:14.420478Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T17:12:14.478486Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T17:12:14.478526Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T17:12:14.539610Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T17:12:14.539652Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T17:12:14.601626Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T17:12:14.601656Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T17:12:14.669191Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T17:12:14.669236Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T17:12:14.736444Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T17:12:14.736490Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T17:12:14.830558Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T17:12:14.830601Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T17:12:14.896195Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T17:12:14.896267Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T17:12:14.959220Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T17:12:14.959262Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T17:12:15.028038Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T17:12:15.028079Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T17:12:15.120017Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T17:12:15.120056Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T17:12:15.130408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T17:12:15.131991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-31T17:12:15.133313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-31T17:12:16.524800Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T17:12:16.524840Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success assertion failed at ydb/core/viewer/viewer_ut.cpp:1948, virtual void NTestSuiteViewer::TTestCaseQueryExecuteScript::Execute_(NUnitTest::TTestContext &): (json.GetMap().contains("metadata")) {} TBackTrace::Capture()+28 (0x18DF0F1C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x192ACDE0) NTestSuiteViewer::TTestCaseQueryExecuteScript::Execute_(NUnitTest::TTestContext&)+8947 (0x18984533) std::__y1::__function::__func, void ()>::operator()()+280 (0x1899A2A8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x192E3E06) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x192B3959) NTestSuiteViewer::TCurrentTest::Execute()+1204 (0x18999154) NUnitTest::TTestFactory::Execute()+2438 (0x192B5226) NUnitTest::RunMain(int, char**)+5213 (0x192DE37D) ??+0 (0x7F95E7488D90) __libc_start_main+128 (0x7F95E7488E40) _start+41 (0x16292029) |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] [GOOD] |96.9%| [TA] $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} |97.0%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} >> test_sql_streaming.py::test[suites-GroupByHoppingWindowExprKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowListKey-default.txt] >> test_sql_streaming.py::test[suites-ReadWriteSameTopic-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadWriteTopic-default.txt] >> test_sql_streaming.py::test[suites-ReadTopicGroupWriteToSolomon-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadata-default.txt] >> test_sql_streaming.py::test[suites-GroupByHopListKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopNoKey-default.txt] >> Replication::PauseAndResumeReplication >> Replication::PauseAndResumeReplication [FAIL] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/oom/py3test >> overlapping_portions.py::TestOverlappingPortions::test [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/transfer/unittest >> Replication::PauseAndResumeReplication [FAIL] Test command err: DDL: CREATE TABLE `SourceTable_16914372676288429640` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ); DDL: CREATE ASYNC REPLICATION `Replication_16914372676288429640` FOR `SourceTable_16914372676288429640` AS `Table_16914372676288429640` WITH ( CONNECTION_STRING = 'grpc://localhost:24019/?database=local' ); >>>>> Query: INSERT INTO `SourceTable_16914372676288429640` (`Key`, `Message`) VALUES (1, 'Message-1'); >>>>> Query: SELECT `Message` FROM `Table_16914372676288429640` ORDER BY `Message` assertion failed at ydb/tests/functional/transfer/utils.h:422, std::pair NReplicationTest::MainTestCase::DoRead(const TExpectations &): (res.IsSuccess())
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/local/Table_16914372676288429640]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 TBackTrace::Capture()+28 (0x1BF4D1C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1CFA9C0) NReplicationTest::MainTestCase::DoRead(TVector>, std::__y1::shared_ptr>, std::__y1::allocator>, std::__y1::shared_ptr>>>, std::__y1::allocator>, std::__y1::shared_ptr>, std::__y1::allocator>, std::__y1::shared_ptr>>>>> const&)+2901 (0x1883995) NReplicationTest::MainTestCase::CheckResult(TVector>, std::__y1::shared_ptr>, std::__y1::allocator>, std::__y1::shared_ptr>>>, std::__y1::allocator>, std::__y1::shared_ptr>, std::__y1::allocator>, std::__y1::shared_ptr>>>>> const&)+618 (0x1878ABA) NTestSuiteReplication::TTestCasePauseAndResumeReplication::Execute_(NUnitTest::TTestContext&)+1999 (0x18734AF) std::__y1::__function::__func, void ()>::operator()()+280 (0x187D3C8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x1D28EC6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x1D01539) NTestSuiteReplication::TCurrentTest::Execute()+1204 (0x187C104) NUnitTest::TTestFactory::Execute()+2438 (0x1D02E06) NUnitTest::RunMain(int, char**)+5213 (0x1D233AD) ??+0 (0x7FAD18890D90) __libc_start_main+128 (0x7FAD18890E40) _start+41 (0x1847029) |97.1%| [TM] {RESULT} ydb/tests/functional/transfer/unittest >> ReadIteratorExternalBlobs::ExtBlobsMultipleColumns [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> ReadIteratorExternalBlobs::ExtBlobsMultipleColumns [GOOD] Test command err: 2025-03-31T17:11:32.787844Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T17:11:32.788183Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T17:11:32.788390Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/000374/r3tmp/tmpWlGsNO/pdisk_1.dat 2025-03-31T17:11:33.365305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T17:11:33.410647Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T17:11:33.471875Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:11:33.473387Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:11:33.485958Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T17:11:33.590606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T17:11:34.000480Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:742:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:34.001312Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:752:2628], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:34.001383Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:34.020003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T17:11:34.180057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:756:2631], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T17:11:34.254268Z node 1 :TX_PROXY ERROR: Actor# [1:830:2674] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:11:41.482498Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpjzsb58fjk8twcf0jbt312, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTI0ZjI4NjgtM2QzYzNlYTgtMTIzYzBmYS1hODljZDRiYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:48.076803Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpk00vy2v9kx593ttr2mr4w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWU5YjNmNGEtZGU0ZTg1ZDctMmU1MWYzNjEtYjYxYzJkYTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:54.076748Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpk078s7ew25nbb84sw76rz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWU3OTAxOTEtZWJiMjY1Y2EtMTIwMWExZmItNGRhZGU5OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:12:01.708001Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqpk0d4a8g7s5149z4b9qjcf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjQzOWE0MWEtOTZiZTllNGUtNWMxY2U0ZjktYzZmNWE0ZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:12:09.342984Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqpk0mnp5jv4d7m6c9ccc66t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmU0ZWVlNTItNjVhMThhM2EtN2JiYzFmYTctNDU0MDQ0Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:12:16.758443Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqpk0w2w1ymxvq3dbx31vrxb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTk3OGQxZWQtMWFmZDA5YjgtNjhhMjI4YzEtYjcwNDczODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:12:24.765996Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqpk13ap7mydedcjjfcd4cag, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2U3N2RhMTQtMzViN2M2MDktMWVhZThjMWUtMmE0YmYzZjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:12:32.458185Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jqpk1b4j2zshp8c8fz57eg4v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGQ1NDQyOS1hYTgxOWY1Yy05NWY2MmI5Ny1iZDYwZWY2OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:12:40.481969Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jqpk1jneb7egfthhkr3x89ac, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWYwY2IxM2YtNmYyMGFlZWYtOWMwNzRhZWQtYTgwMDBiNWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:12:47.590240Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jqpk1tet6m6cbs33qf9sqg9q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzJiMDE3YWMtNmU1MmU0MWUtMjAxNDMzY2UtYzM3YWVmNWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... waiting for stats after upsert 2025-03-31T17:12:49.813839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T17:12:49.813919Z node 1 :IMPORT WARN: Table profiles were not loaded Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 0 TableStats { DataSize: 20972828 RowCount: 10 IndexSize: 0 InMemSize: 4195104 LastAccessTime: 1531 LastUpdateTime: 1531 ImmediateTxCompleted: 10 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 10 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 2 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 380 IndexSize: 0 } Channels { Channel: 2 DataSize: 16777344 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 5349 Memory: 4320316 Storage: 16778723 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 1 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 ... waiting for stats after compaction Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 1 TableStats { DataSize: 20972828 RowCount: 10 IndexSize: 0 InMemSize: 4195104 LastAccessTime: 1531 LastUpdateTime: 1531 ImmediateTxCompleted: 10 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 10 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 2 LastFullCompactionTs: 20 HasLoanedParts: false Channels { Channel: 1 DataSize: 380 IndexSize: 0 } Channels { Channel: 2 DataSize: 16777344 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 1451 Memory: 126364 Storage: 20973341 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 1 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 2025-03-31T17:12:52.650930Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jqpk25yt79rknbrxrzdcpxa7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjEyM2Q4ZTItZGQ3MDc2NmMtZjRlNjQzNTctZTMxYTc1MGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> test_sql_streaming.py::test[suites-ReadWriteTopic-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadWriteTopicWithSchema-default.txt] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadata-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataInsideFilter-default.txt] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowListKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowNoKey-default.txt] >> test_sql_streaming.py::test[suites-GroupByHopNoKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopPercentile-default.txt] |97.2%| [TA] $(B)/ydb/tests/olap/oom/test-results/py3test/{meta.json ... results_accumulator.log} |97.3%| [TA] {RESULT} $(B)/ydb/tests/olap/oom/test-results/py3test/{meta.json ... results_accumulator.log} >> test_sql_streaming.py::test[suites-ReadWriteTopicWithSchema-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-WriteTwoTopics-default.txt] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataInsideFilter-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataNestedDeep-default.txt] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowNoKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopPercentile-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopTimeExtractorUnusedColumns-default.txt] >> DataShardTxOrder::RandomPointsAndRanges [GOOD] >> test_sql_streaming.py::test[suites-WriteTwoTopics-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataNestedDeep-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] [FAIL] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-WriteTwoTopics-default.txt] [FAIL] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataNestedDeep-default.txt] [FAIL] >> test_result_limits.py::TestResultLimits::test_large_row [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPointsAndRanges [GOOD] Test command err: 2025-03-31T17:11:33.211484Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvBoot 2025-03-31T17:11:33.212474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-31T17:11:33.212517Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T17:11:33.218859Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvRestored 2025-03-31T17:11:33.219259Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:131:2154] 2025-03-31T17:11:33.219459Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-31T17:11:33.227242Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2140], Recipient [1:131:2154]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T17:11:33.256582Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-31T17:11:33.256812Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-31T17:11:33.258046Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-31T17:11:33.258102Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-31T17:11:33.258141Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-31T17:11:33.258473Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-31T17:11:33.258926Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-31T17:11:33.258976Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:197:2154] in generation 2 2025-03-31T17:11:33.315234Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-31T17:11:33.347747Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-31T17:11:33.347902Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-31T17:11:33.347997Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-31T17:11:33.348031Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-31T17:11:33.348064Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-31T17:11:33.348116Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T17:11:33.348305Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T17:11:33.348340Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T17:11:33.348557Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-31T17:11:33.348621Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-31T17:11:33.348654Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T17:11:33.348692Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-31T17:11:33.348716Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-31T17:11:33.348740Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T17:11:33.348761Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T17:11:33.348783Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-31T17:11:33.348805Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T17:11:33.348865Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T17:11:33.348901Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T17:11:33.348951Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-31T17:11:33.350816Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:131:2154]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-31T17:11:33.350871Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-31T17:11:33.350958Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-31T17:11:33.351125Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-31T17:11:33.351200Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-31T17:11:33.351265Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-31T17:11:33.351310Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T17:11:33.351353Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-31T17:11:33.351396Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-31T17:11:33.351427Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T17:11:33.351719Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-31T17:11:33.351756Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-31T17:11:33.351783Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-31T17:11:33.351805Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-31T17:11:33.351835Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-31T17:11:33.351863Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-31T17:11:33.351909Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-31T17:11:33.351933Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-31T17:11:33.351948Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-31T17:11:33.364330Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-31T17:11:33.364390Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-31T17:11:33.364413Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-31T17:11:33.364454Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-31T17:11:33.364513Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-31T17:11:33.364974Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:131:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T17:11:33.365026Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T17:11:33.365090Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-31T17:11:33.365209Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:131:2154]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-31T17:11:33.365236Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T17:11:33.365345Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-31T17:11:33.365377Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T17:11:33.365405Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-31T17:11:33.365436Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-31T17:11:33.368130Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-31T17:11:33.368192Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T17:11:33.368366Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:131:2154], Recipient [1:131:2154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T17:11:33.368394Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T17:11:33.368461Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-31T17:11:33.368506Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-31T17:11:33.368539Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T17:11:33.368577Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-31T17:11:33.368613Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-31T17:11:33.368654Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T17:11:33.368688Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-31T17:11:33.368721Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-31T17:11:33.368755Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-31T17:11:33.368936Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-31T17:11:33.368975Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-31T17:11:33.369016Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-31T17:11:33.369042Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-31T17:11:33.369083Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-31T17:11:33.369151Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T17:11:33.369177Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-31T17:11:33.369216Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-31T17:11:33.369248Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-31T17:11:33.369310Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-31T17:11:33.369383Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-31T17:11:33.369412Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-31T17:11:33.369442Z node 1 :TX_D ... node 4 :TX_DATASHARD DEBUG: Found ready candidate operation [1000004:403] at 9437184 for LoadAndWaitInRS 2025-03-31T17:13:22.509720Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437184 on unit LoadAndWaitInRS 2025-03-31T17:13:22.509754Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437184 is Executed 2025-03-31T17:13:22.509784Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437184 executing on unit LoadAndWaitInRS 2025-03-31T17:13:22.509819Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437184 to execution unit ExecuteDataTx 2025-03-31T17:13:22.509847Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437184 on unit ExecuteDataTx 2025-03-31T17:13:22.511687Z node 4 :TX_DATASHARD TRACE: Executed operation [1000004:403] at tablet 9437184 with status COMPLETE 2025-03-31T17:13:22.511753Z node 4 :TX_DATASHARD TRACE: Datashard execution counters for [1000004:403] at 9437184: {NSelectRow: 1, NSelectRange: 2, NUpdateRow: 5, NEraseRow: 0, SelectRowRows: 1, SelectRowBytes: 8, SelectRangeRows: 61, SelectRangeBytes: 488, UpdateRowBytes: 37, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-31T17:13:22.511816Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437184 is ExecutedNoMoreRestarts 2025-03-31T17:13:22.511848Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437184 executing on unit ExecuteDataTx 2025-03-31T17:13:22.511881Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437184 to execution unit CompleteOperation 2025-03-31T17:13:22.511911Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437184 on unit CompleteOperation 2025-03-31T17:13:22.512159Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437184 is DelayComplete 2025-03-31T17:13:22.512180Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437184 executing on unit CompleteOperation 2025-03-31T17:13:22.512204Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437184 to execution unit CompletedOperations 2025-03-31T17:13:22.512224Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437184 on unit CompletedOperations 2025-03-31T17:13:22.512249Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437184 is Executed 2025-03-31T17:13:22.512268Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437184 executing on unit CompletedOperations 2025-03-31T17:13:22.512289Z node 4 :TX_DATASHARD TRACE: Execution plan for [1000004:403] at 9437184 has finished 2025-03-31T17:13:22.512312Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T17:13:22.512333Z node 4 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-31T17:13:22.512356Z node 4 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-31T17:13:22.512377Z node 4 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-31T17:13:22.531005Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T17:13:22.531070Z node 4 :TX_DATASHARD TRACE: Complete execution for [1000004:402] at 9437184 on unit CompleteOperation 2025-03-31T17:13:22.531147Z node 4 :TX_DATASHARD DEBUG: Complete [1000004 : 402] from 9437184 at tablet 9437184 send result to client [4:99:2134], exec latency: 4 ms, propose latency: 6 ms 2025-03-31T17:13:22.531209Z node 4 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 402 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 399} 2025-03-31T17:13:22.531246Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T17:13:22.531500Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T17:13:22.531529Z node 4 :TX_DATASHARD TRACE: Complete execution for [1000004:403] at 9437184 on unit StoreAndSendOutRS 2025-03-31T17:13:22.531555Z node 4 :TX_DATASHARD DEBUG: Send RS 400 at 9437184 from 9437184 to 9437185 txId 403 2025-03-31T17:13:22.531596Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T17:13:22.531621Z node 4 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-03-31T17:13:22.531645Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-31T17:13:22.531662Z node 4 :TX_DATASHARD TRACE: Complete execution for [1000004:403] at 9437184 on unit CompleteOperation 2025-03-31T17:13:22.531697Z node 4 :TX_DATASHARD DEBUG: Complete [1000004 : 403] from 9437184 at tablet 9437184 send result to client [4:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-31T17:13:22.531735Z node 4 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 400} 2025-03-31T17:13:22.531759Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-31T17:13:22.531990Z node 4 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [4:234:2227], Recipient [4:455:2397]: {TEvReadSet step# 1000004 txid# 402 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 399} 2025-03-31T17:13:22.532022Z node 4 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T17:13:22.532050Z node 4 :TX_DATASHARD DEBUG: Receive RS Ack at 9437186 source 9437186 dest 9437184 consumer 9437184 txId 402 2025-03-31T17:13:22.532351Z node 4 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [4:234:2227], Recipient [4:455:2397]: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 400} 2025-03-31T17:13:22.532382Z node 4 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T17:13:22.532403Z node 4 :TX_DATASHARD DEBUG: Receive RS Ack at 9437186 source 9437186 dest 9437184 consumer 9437184 txId 403 2025-03-31T17:13:22.532665Z node 4 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [4:234:2227], Recipient [4:344:2311]: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437184 TabletDest# 9437185 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 400 Flags# 0} 2025-03-31T17:13:22.532695Z node 4 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-03-31T17:13:22.532726Z node 4 :TX_DATASHARD DEBUG: Receive RS at 9437185 source 9437184 dest 9437185 producer 9437184 txId 403 2025-03-31T17:13:22.532790Z node 4 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 9437185 got read set: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437184 TabletDest# 9437185 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 400 Flags# 0} 2025-03-31T17:13:22.532834Z node 4 :TX_DATASHARD TRACE: Filled readset for [1000004:403] from=9437184 to=9437185origin=9437184 2025-03-31T17:13:22.532891Z node 4 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437185 2025-03-31T17:13:22.533071Z node 4 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [4:344:2311], Recipient [4:344:2311]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-31T17:13:22.533101Z node 4 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-31T17:13:22.533135Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437185 2025-03-31T17:13:22.533169Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 active 1 active planned 1 immediate 0 planned 1 2025-03-31T17:13:22.533210Z node 4 :TX_DATASHARD DEBUG: Found ready candidate operation [1000004:403] at 9437185 for LoadAndWaitInRS 2025-03-31T17:13:22.533259Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437185 on unit LoadAndWaitInRS 2025-03-31T17:13:22.533296Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437185 is Executed 2025-03-31T17:13:22.533330Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437185 executing on unit LoadAndWaitInRS 2025-03-31T17:13:22.533362Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437185 to execution unit ExecuteDataTx 2025-03-31T17:13:22.533388Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437185 on unit ExecuteDataTx 2025-03-31T17:13:22.534427Z node 4 :TX_DATASHARD TRACE: Executed operation [1000004:403] at tablet 9437185 with status COMPLETE 2025-03-31T17:13:22.534491Z node 4 :TX_DATASHARD TRACE: Datashard execution counters for [1000004:403] at 9437185: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 7, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 53, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-31T17:13:22.534549Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437185 is ExecutedNoMoreRestarts 2025-03-31T17:13:22.534578Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437185 executing on unit ExecuteDataTx 2025-03-31T17:13:22.534612Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437185 to execution unit CompleteOperation 2025-03-31T17:13:22.534643Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437185 on unit CompleteOperation 2025-03-31T17:13:22.534929Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437185 is DelayComplete 2025-03-31T17:13:22.534967Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437185 executing on unit CompleteOperation 2025-03-31T17:13:22.535002Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437185 to execution unit CompletedOperations 2025-03-31T17:13:22.535030Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437185 on unit CompletedOperations 2025-03-31T17:13:22.535067Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437185 is Executed 2025-03-31T17:13:22.535940Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437185 executing on unit CompletedOperations 2025-03-31T17:13:22.535976Z node 4 :TX_DATASHARD TRACE: Execution plan for [1000004:403] at 9437185 has finished 2025-03-31T17:13:22.536002Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T17:13:22.536026Z node 4 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-03-31T17:13:22.536050Z node 4 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-03-31T17:13:22.536072Z node 4 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-03-31T17:13:22.554690Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-03-31T17:13:22.554771Z node 4 :TX_DATASHARD TRACE: Complete execution for [1000004:403] at 9437185 on unit CompleteOperation 2025-03-31T17:13:22.554841Z node 4 :TX_DATASHARD DEBUG: Complete [1000004 : 403] from 9437185 at tablet 9437185 send result to client [4:99:2134], exec latency: 4 ms, propose latency: 6 ms 2025-03-31T17:13:22.554912Z node 4 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437185 {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437184 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 400} 2025-03-31T17:13:22.554947Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-03-31T17:13:22.555634Z node 4 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [4:344:2311], Recipient [4:234:2227]: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437184 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 400} 2025-03-31T17:13:22.555692Z node 4 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-31T17:13:22.555732Z node 4 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437185 consumer 9437185 txId 403 |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopTimeExtractorUnusedColumns-default.txt] [FAIL] |97.6%| [TA] $(B)/ydb/core/tx/datashard/ut_order/test-results/unittest/{meta.json ... results_accumulator.log} >> test_drain.py::TestHive::test_drain_on_stop [GOOD] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [FAIL] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-GroupByHopTimeExtractorUnusedColumns-default.txt] [FAIL] |97.7%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_order/test-results/unittest/{meta.json ... results_accumulator.log} |97.7%| [TA] $(B)/ydb/tests/fq/streaming_optimize/test-results/py3test/{meta.json ... results_accumulator.log} |97.8%| [TA] {RESULT} $(B)/ydb/tests/fq/streaming_optimize/test-results/py3test/{meta.json ... results_accumulator.log} |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_drain.py::TestHive::test_drain_on_stop [GOOD] |97.9%| [TA] $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} |98.0%| [TA] {RESULT} $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} >> test_result_limits.py::TestResultLimits::test_quotas[kikimr0] >> test_result_limits.py::TestResultLimits::test_quotas[kikimr0] [GOOD] >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [FAIL] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/mem_alloc/py3test >> test_result_limits.py::TestResultLimits::test_quotas[kikimr0] [GOOD] |98.1%| [TM] {RESULT} ydb/tests/fq/mem_alloc/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [FAIL] Test command err: 2025-03-31T17:11:40.830225Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T17:11:40.830520Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T17:11:40.830701Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/000201/r3tmp/tmpJ0IDas/pdisk_1.dat 2025-03-31T17:11:41.363817Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:596:2520], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T17:11:41.363940Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T17:11:41.363978Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-31T17:11:41.364183Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:593:2518], Recipient [1:409:2404]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2025-03-31T17:11:41.364219Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-31T17:11:41.491493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-03-31T17:11:41.493244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T17:11:41.494203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-31T17:11:41.496239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T17:11:41.496390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T17:11:41.496527Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-31T17:11:41.497953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-31T17:11:41.499048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-31T17:11:41.499201Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T17:11:41.499243Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-03-31T17:11:41.499465Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-31T17:11:41.499503Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-31T17:11:41.499592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-31T17:11:41.499674Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-31T17:11:41.499770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T17:11:41.499808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T17:11:41.500759Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-31T17:11:41.501388Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T17:11:41.501434Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-03-31T17:11:41.501575Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-31T17:11:41.501610Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-31T17:11:41.501681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-31T17:11:41.501732Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-03-31T17:11:41.501769Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T17:11:41.501848Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-31T17:11:41.502248Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T17:11:41.502289Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-03-31T17:11:41.502395Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-31T17:11:41.502423Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-31T17:11:41.502483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-31T17:11:41.502515Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-31T17:11:41.502568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-03-31T17:11:41.502596Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-31T17:11:41.502647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T17:11:41.508291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T17:11:41.509010Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T17:11:41.509086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T17:11:41.510320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-03-31T17:11:41.511887Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:601:2525], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046316545 Status: OK ServerId: [1:603:2526] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-03-31T17:11:41.511955Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-03-31T17:11:41.512023Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046316545, status: OK, at schemeshard: 72057594046644480 2025-03-31T17:11:41.512187Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 500 TxId: 1 2025-03-31T17:11:41.512633Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:605:2528], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T17:11:41.512681Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T17:11:41.512716Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-31T17:11:41.512859Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [1:593:2518], Recipient [1:409:2404]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2025-03-31T17:11:41.512912Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-03-31T17:11:41.512994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-03-31T17:11:41.513048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-03-31T17:11:41.513115Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-03-31T17:11:41.591219Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 273285138, Sender [1:43:2090], Recipient [1:409:2404]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } } ItemKinds: 26 ItemKinds: 34 ItemKinds: 52 ItemKinds: 54 ItemKinds: 73 Local: true } 2025-03-31T17:11:41.591320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-03-31T17:11:41.591379Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T17:11:41.592479Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-03-31T17:11:41.592554Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } 2025-03-31T17:11:41.635825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:11:41.635978Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:11:41.652150Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T17:11:41.734173Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 17 StepId: 500 TxId: 1 2025-03-31T17:11:41.734976Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:636:2544], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T17:11:41.735023Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T17:11:41.735056Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-31T17:11:41.735268Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:409:2404]: {TEvPlanStep step# 500 MediatorId# 72057594046382081 TabletID 72057594046644480} 2025-03-31T17:11:41.735305Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T17:11:41.735400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T17:11:41.735532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemes ... pient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-31T17:13:55.277848Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-31T17:13:55.277872Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-03-31T17:13:55.277928Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-03-31T17:13:55.277953Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-03-31T17:13:55.278029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 22 shard idx 72057594046644480:7 data size 0 row count 0 2025-03-31T17:13:55.278085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037894 maps to shardIdx: 72057594046644480:7 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 22], pathId map=TableA, is column=0, is olap=0 2025-03-31T17:13:55.278115Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037894 followerId=0, pathId 22: RowCount 0, DataSize 0 2025-03-31T17:13:55.278140Z node 1 :FLAT_TX_SCHEMESHARD TRACE: BuildStatsForCollector: datashardId 72075186224037894, followerId 0 2025-03-31T17:13:55.278186Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:7 with partCount# 0, rowCount# 0, searchHeight# 0, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2025-03-31T17:13:55.278279Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T17:13:55.288719Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-31T17:13:55.288799Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-31T17:13:55.288833Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-03-31T17:13:55.362612Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037895 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-03-31T17:13:55.363031Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [1:1581:3192], Recipient [1:409:2404]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037895 TableLocalId: 24 Generation: 1 Round: 58 TableStats { DataSize: 54 RowCount: 2 IndexSize: 0 InMemSize: 0 LastAccessTime: 5450 LastUpdateTime: 5450 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 1 RowUpdates: 2 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 54 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 21 Memory: 119576 Storage: 142 } ShardState: 2 UserTablePartOwners: 72075186224037895 NodeId: 1 StartTime: 4950 TableOwnerId: 72057594046644480 FollowerId: 0 2025-03-31T17:13:55.363092Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-03-31T17:13:55.363128Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037895 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 24] state 'Ready' dataSize 54 rowCount 2 cpuUsage 0.0021 2025-03-31T17:13:55.363203Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037895 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 24] raw table stats: DataSize: 54 RowCount: 2 IndexSize: 0 InMemSize: 0 LastAccessTime: 5450 LastUpdateTime: 5450 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 1 RowUpdates: 2 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 54 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-03-31T17:13:55.363231Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-03-31T17:13:55.416654Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-31T17:13:55.416724Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-31T17:13:55.416750Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-03-31T17:13:55.416813Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-03-31T17:13:55.416853Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-03-31T17:13:55.416949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 24 shard idx 72057594046644480:8 data size 54 row count 2 2025-03-31T17:13:55.416998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037895 maps to shardIdx: 72057594046644480:8 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 24], pathId map=TableA, is column=0, is olap=0 2025-03-31T17:13:55.417030Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037895 followerId=0, pathId 24: RowCount 2, DataSize 54 2025-03-31T17:13:55.417053Z node 1 :FLAT_TX_SCHEMESHARD TRACE: BuildStatsForCollector: datashardId 72075186224037895, followerId 0 2025-03-31T17:13:55.417125Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:8 with partCount# 1, rowCount# 2, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2025-03-31T17:13:55.417229Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T17:13:55.427637Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-31T17:13:55.427706Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-31T17:13:55.427731Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-03-31T17:13:55.489614Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T17:13:55.489690Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T17:13:55.489754Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T17:13:55.489784Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T17:13:55.510898Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T17:13:55.594550Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-03-31T17:13:55.667925Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T17:13:55.667995Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T17:13:55.668066Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T17:13:55.668091Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T17:13:55.689297Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037898 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T17:13:55.773218Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037899 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T17:13:55.857103Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T17:13:55.857180Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T17:13:55.858591Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjEyYzkxMmMtOWZiOThhMmYtNmUxNjhmZjQtYjdkNTU2ODU=, ActorId: [1:2033:3520], ActorState: ExecuteState, TraceId: 01jqpk067k640gkk7vf8j8cmxk, Create QueryResponse for error on request, msg: 2025-03-31T17:13:55.858788Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T17:13:55.858833Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T17:13:55.858947Z node 1 :KQP_SLOW_LOG WARN: SessionId: ydb://session/3?node_id=1&id=ZjEyYzkxMmMtOWZiOThhMmYtNmUxNjhmZjQtYjdkNTU2ODU=, Slow query, duration: 600.000000s, status: GENERIC_ERROR, user: UNAUTHENTICATED, results: 0b, text: "RESTORE `MyCollection`;", parameters: 0b assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (TIMEOUT != SUCCESS) Response { QueryIssues { message: "Request timeout 600000ms exceeded" severity: 1 } QueryIssues { message: "Cancelling after 600000ms in ExecuteState" severity: 1 } TxMeta { } } YdbStatus: TIMEOUT , with diff: (TIM|SUCC)E(OUT|SS) TBackTrace::Capture()+28 (0x18FC909C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x194861E0) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+3639 (0x48C26827) NKikimr::NTestSuiteIncrementalBackup::TTestCaseComplexRestoreBackupCollection::Execute_(NUnitTest::TTestContext&)+26163 (0x18C197A3) std::__y1::__function::__func, void ()>::operator()()+280 (0x18BD52A8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x194BD206) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x1948CD59) NKikimr::NTestSuiteIncrementalBackup::TCurrentTest::Execute()+1204 (0x18BD4154) NUnitTest::TTestFactory::Execute()+2438 (0x1948E626) NUnitTest::RunMain(int, char**)+5213 (0x194B777D) ??+0 (0x7F04F00E7D90) __libc_start_main+128 (0x7F04F00E7E40) _start+41 (0x1635A029) |98.2%| [TA] $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} |98.3%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [FAIL] |98.4%| [TA] $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} |98.5%| [TA] {RESULT} $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} >> test_tpch.py::TestTpchS1::test_tpch[12] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[13] >> test_tpch.py::TestTpchS1::test_tpch[13] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[14] >> Viewer::JsonStorageListingV1PDiskIdFilter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::JsonStorageListingV1PDiskIdFilter [GOOD] Test command err: 2025-03-31T17:13:01.108002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:3109:2433], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T17:13:01.109320Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T17:13:01.110025Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T17:13:01.112650Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:2185:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T17:13:01.112818Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:2188:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T17:13:01.112926Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:2191:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T17:13:01.113920Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T17:13:01.113987Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T17:13:01.114031Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T17:13:01.114434Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:3112:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T17:13:01.114787Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T17:13:01.114856Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T17:13:01.115301Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T17:13:01.115709Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T17:13:01.116330Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T17:13:01.116597Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:2179:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T17:13:01.117578Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:3115:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T17:13:01.117656Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T17:13:01.117790Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:2182:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T17:13:01.118348Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T17:13:01.119047Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T17:13:01.119144Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T17:13:01.119630Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T17:13:01.120076Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T17:13:01.122707Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:3161:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T17:13:01.124009Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T17:13:01.125153Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-31T17:13:01.641041Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T17:13:01.858899Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-03-31T17:13:01.887725Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-03-31T17:13:02.508825Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 22606, node 1 TClient is connected to server localhost:26890 2025-03-31T17:13:02.903144Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:13:02.903233Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:13:02.903281Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:13:02.903896Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T17:14:21.507675Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:3153:2436], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T17:14:21.508639Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T17:14:21.509177Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T17:14:21.511619Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [16:2225:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T17:14:21.512592Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [18:2231:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T17:14:21.512918Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T17:14:21.513083Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [17:2228:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T17:14:21.513818Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T17:14:21.513911Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T17:14:21.514309Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [13:1274:2181], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T17:14:21.514477Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [12:3156:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T17:14:21.514592Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T17:14:21.514653Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T17:14:21.515125Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T17:14:21.515240Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T17:14:21.515536Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T17:14:21.515588Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T17:14:21.515980Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T17:14:21.517071Z node 15 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [15:2222:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T17:14:21.518552Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T17:14:21.519070Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T17:14:21.519693Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:3149:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T17:14:21.520400Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [14:2219:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T17:14:21.520842Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T17:14:21.521546Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T17:14:21.521608Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T17:14:21.521866Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-31T17:14:21.939698Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-31T17:14:22.138271Z node 10 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-03-31T17:14:22.169087Z node 10 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-03-31T17:14:22.656733Z node 10 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 2890, node 10 TClient is connected to server localhost:17222 2025-03-31T17:14:23.101918Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:14:23.101997Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:14:23.102046Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:14:23.102624Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration |98.6%| [TA] $(B)/ydb/core/viewer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.6%| [TA] {RESULT} $(B)/ydb/core/viewer/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_tpch.py::TestTpchS1::test_tpch[14] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[15] >> test_workload.py::TestYdbLogWorkload::test[column] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[15] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[16] >> test_tpch.py::TestTpchS1::test_tpch[16] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[17] >> test_workload.py::TestYdbLogWorkload::test[column] [FAIL] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/log/tests/py3test >> test_workload.py::TestYdbLogWorkload::test[column] [FAIL] |98.7%| [TM] {RESULT} ydb/tests/stress/log/tests/py3test >> test_tpch.py::TestTpchS1::test_tpch[17] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[18] >> test_tpch.py::TestTpchS1::test_tpch[18] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[19] >> test_tpch.py::TestTpchS1::test_tpch[19] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[20] >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[20] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[21] >> test_alter_tiering.py::TestAlterTiering::test[many_tables] >> test_tpch.py::TestTpchS1::test_tpch[21] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[22] >> test_tpch.py::TestTpchS1::test_tpch[22] [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/tpc/medium/py3test >> test_tpch.py::TestTpchS1::test_tpch[22] [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithCompactingMiddleRows [GOOD] |98.8%| [TA] $(B)/ydb/tests/functional/tpc/medium/test-results/py3test/{meta.json ... results_accumulator.log} |98.9%| [TA] {RESULT} $(B)/ydb/tests/functional/tpc/medium/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> ReadIteratorExternalBlobs::ExtBlobsWithCompactingMiddleRows [GOOD] Test command err: 2025-03-31T17:11:33.581520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T17:11:33.581743Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T17:11:33.581866Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/000357/r3tmp/tmpTspYve/pdisk_1.dat 2025-03-31T17:11:33.921540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T17:11:33.961883Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T17:11:34.000791Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:11:34.000918Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:11:34.012841Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T17:11:34.097782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T17:11:34.446608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:742:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:34.446758Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:752:2628], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:34.446870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:34.453359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T17:11:34.622938Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:756:2631], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T17:11:34.687486Z node 1 :TX_PROXY ERROR: Actor# [1:830:2674] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:11:38.853660Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpjzssbck9gsem3tdhm03z1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzZiODdjOTktNWJiM2I0ZDEtNTQ5ZTViYTItNjQ3Yjc2MjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:42.490653Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpjzya22p8etgn1fzndfnmm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzJlMGU5Zi1iZTUwYWJmOC1mMmE0NTZlZS0yY2Y2YTg3Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:45.842355Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpk01rz8knjrzsqyc5gc41p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmNkYWNiZjgtYjg1YjgwMjEtNDFkNzk5M2MtZWQ2ZGJiN2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:49.197949Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqpk0508ega2hg16hp5vc0tb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDFhYWU4ZmQtNzBiNjQzOTgtZmYzZjlhNDktYTA0OTM2YTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:52.476147Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqpk0896a8035hhx7atm0xcd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2Q3MTMxNDQtOTA2MTc4MTktN2VhNWM2Ny01ZWFkNDYwMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:56.411343Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqpk0bg51qaq235pygbzbvrt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTZkMjk3YTMtYThlNTI3OTMtOTllNzU0OTEtM2U1NGI1NDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:11:59.572157Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqpk0faq61rn7ynxht9ganen, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjA2NTIxZmMtNDAxMDQ0YjMtMjdjYjU4MmMtYjQ2MTZjNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:12:03.388031Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jqpk0jdg5b4grwq3aa8kwnxv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzhiMWM4OGItNjc4NGIwZmUtNGYzZDhiYWYtYjVmOGMxNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:12:06.789284Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jqpk0p4e9s1ap5r480tvkr9j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTdhYzAyMmEtYzJkZjI4NGEtZDk0YjIzMjUtZjVhNzRhM2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:12:10.130207Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jqpk0sf5982wee52hv78x7vw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzdiNzcyNTAtNDVmMGY3ZjAtYjEwMGFkYzAtMjRkOTc5MjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:12:13.777654Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jqpk0wr0d4ttqxgwmf5rcx27, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODIwMDQzN2ItOTg5NDUwZDgtNDExNGQzYzYtNTJkZDMyMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:12:17.484400Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jqpk109zdxxjhbfj1gew6ary, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODFjY2IxZGEtMWU0ZWFhOS0xNTYwM2Q0LTFmNmNlY2Fj, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:12:21.482534Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jqpk13xtawdegf3ca16z5ppb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmM3ZjY5ZjMtOGIxNjM3ZTctM2Y1YTJmZWQtMjFiZDAzZGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:12:25.941557Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jqpk17tw4eg1vg214ydv1tnt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjJmZmQ2Yy05YjU4YzAzMC1iZTUwZmM1Mi03NDZhZWFiOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:12:29.924013Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jqpk1c725x0mvc7vf25qw6w1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjM3YzM4MTAtZmMzNjdjNzctMjZiYmRlYjItN2JhMmY1NzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:12:33.954577Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jqpk1g2ga1geftp64sgqv7p0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzAxNThhOTMtZmIyZTQ1ZDktNjZjZGViMTctNTdjMjQ0Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:12:38.185353Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jqpk1m1v390vnznd488zzjwc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWVhOTcxNDgtNjg2NTU1YjAtOTUxMGI0ODEtZDJiMjJjYTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:12:42.327058Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jqpk1r564a3ppqhx3p9744en, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWM5OTg2ZGQtOWZlZmEzNjQtMjUwMDgzZmEtMjU4NTQzNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... waiting for stats after upsert 2025-03-31T17:12:44.535792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T17:12:44.535868Z node 1 :IMPORT WARN: Table profiles were not loaded Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 0 TableStats { DataSize: 18875828 RowCount: 18 IndexSize: 0 InMemSize: 2097888 LastAccessTime: 1544 LastUpdateTime: 1544 ImmediateTxCompleted: 18 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 18 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 2 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 596 IndexSize: 0 } Channels { Channel: 2 DataSize: 16777344 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 5167 Memory: 2223139 Storage: 16778930 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 1 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 ... waiting for stats after compaction Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 1 TableStats { DataSize: 18875828 RowCount: 18 IndexSize: 0 InMemSize: 2097888 LastAccessTime: 1544 LastUpdateTime: 1544 ImmediateTxCompleted: 18 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 18 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 2 LastFullCompactionTs: 20 HasLoanedParts: false Channels { Channel: 1 DataSize: 596 IndexSize: 0 } Channels { Channel: 2 DataSize: 16777344 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 1394 Memory: 126163 Storage: 18876266 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 1 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 2025-03-31T17:12:50.619545Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jqpk20pabt5f9s41vqnsr25k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2EzNjc0YjUtMTY3ZTJlYmUtMWRjZjRlMGEtMzQ3YmU2NDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:12:54.142015Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jqpk24aae12kz1w1n1ax6cy6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzhjZ ... 971Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:16:03.632066Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:16:03.643042Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T17:16:03.721692Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T17:16:03.957152Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:741:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:16:03.957244Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:750:2628], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:16:03.957300Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:16:03.962426Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-31T17:16:04.121565Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:755:2631], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-31T17:16:04.156189Z node 5 :TX_PROXY ERROR: Actor# [5:829:2674] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:16:06.760364Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqpk80zkeysy5rkzzpbe1wjf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=NmI2YmQ5YjAtZjkzYmExZTQtMmY4NWM0MDktYWY2OTM4MGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:16:09.050760Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqpk83sjddwkx80wc4bf2wnw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=YmE3YmNlMzQtNTIzMjE5OS1kNDc1ODFlOS0zMDI2YmNkZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:16:11.431850Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqpk861392fjsk4yza1sfhh4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=YzBhMjg0NWQtNDVjNWI2MzEtODY0ZjAxYWUtODM1MWYxMmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:16:13.711755Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqpk88bh4krjv7c7sp1khqpz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=NzE1OTM1YWUtN2M5OWQ5ZC1iMjYwY2M1OC1mNTYzZDhhMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:16:16.133362Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqpk8ajrdccs81t3thpycxbs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=YzIyOTJlN2ItZWQxNjlhZi02MDljYzQ5Mi0xY2U4Njk4MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:16:18.463649Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqpk8cyd4qx1z75b8w905amd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MWY3NjRiYjItZDUyMTA4ZmUtZDZkMTM0MmYtZjE5ZWMwOGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:16:20.837169Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqpk8f7818cxvmdttya5nfaj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=NDY4ZDgzZTAtNzViZWU0NWUtZTA2MGRkMjctYzZkYjBiY2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:16:23.119293Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jqpk8hhe6vcp1rm75f0nfqaw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=M2YxM2U3YjctNzUwNGQ4ODMtZjUxYmI5M2UtZTZhNmJlMTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:16:25.413550Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jqpk8krr5fvytytny6vb6afe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=NzQ5NTUxY2EtMmFlYzAyZjAtY2JjOTE4MTQtZGI1YjA5OTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:16:27.896473Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jqpk8p0ndek3s7q8r5mrpe35, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=YTU2MzAyZjctNjkxNTllN2MtNDViODNhZTYtMzY5ZGY5YjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... waiting for stats after upsert 2025-03-31T17:16:29.964124Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T17:16:29.964168Z node 5 :IMPORT WARN: Table profiles were not loaded Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 0 TableStats { DataSize: 10487392 RowCount: 10 IndexSize: 0 InMemSize: 10487392 LastAccessTime: 1539 LastUpdateTime: 1539 ImmediateTxCompleted: 10 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 10 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 1947 Memory: 10609800 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 5 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 ... waiting for stats after compaction Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 1 TableStats { DataSize: 10487392 RowCount: 10 IndexSize: 0 InMemSize: 10487392 LastAccessTime: 1539 LastUpdateTime: 1539 ImmediateTxCompleted: 10 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 10 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 20 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 526 Memory: 125395 Storage: 10486922 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 5 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 2 TableStats { DataSize: 10486220 RowCount: 10 IndexSize: 0 InMemSize: 0 LastAccessTime: 1539 LastUpdateTime: 1539 ImmediateTxCompleted: 10 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 10 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 20 HasLoanedParts: false Channels { Channel: 1 DataSize: 380 IndexSize: 0 } Channels { Channel: 2 DataSize: 10485840 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 526 Memory: 125395 Storage: 10486922 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 5 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 2025-03-31T17:16:36.290188Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jqpk8y685hna5sccjwtkf0fv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=ZTY4ZTdiODktZGZmZjJmNmItODA4YzE0YzYtMjU1NGQ5M2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:16:38.695994Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jqpk90mg6e6gbt4crxzsqetx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=NzRmMDhhOGMtNWZkNDg2MWUtMmRmNWIwZTgtYTM5Y2IyMTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:16:41.116482Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jqpk92zpbbb0pgjczmvxg1qj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=Njg4OTg1YzktNTEyNmRiNTAtMWEyOWI4YzMtNjA2OTJmYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:16:43.523063Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jqpk95bafqtpewcc7vpxyh70, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=NzQxMTExNDAtYzc1ZTg0YzQtNzE4NzFkM2QtNWYwMjc2NzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:16:46.130792Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jqpk97pjd816eqcn6jnmavz9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=ZTliNGFmOTMtNmZiYzEwMzYtM2Y5ZDMwMzMtM2U2ZDM1ODQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:16:48.526463Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jqpk9a80960mqh0c2yev7fpd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=ZjYxYjM4NS05MzliNGMyNS1iYzYyYmM1OS05N2Q4OWQ3Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:16:50.885516Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jqpk9cjv6j3zm75rxsvbpf7h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=Y2MwZmNlY2ItNDMzOGM5YjQtYjc3ZGNlZGItMTAwZGMzYTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:16:53.255394Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jqpk9ewf47ftwstnaznrzgbb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=YTEyY2NmZjEtMjQyOGZlOTYtM2VhMWY1MDgtY2I3ZjcyZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:16:55.583106Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jqpk9h6p6rzgd7g5mvqm1ra5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=ZWM5YzY4NzQtZThiNWY0MWItMTkxNjZmMC1kMGZjZTQ0Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:16:58.006732Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jqpk9kfa30cw3gqebqye7epn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=NWIwODI0NDItN2QzODViYTItM2U4OTc2MS1hOTQ0NmJkYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-31T17:16:58.196329Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jqpk9nv1adbk7y54zhs511y6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=M2E0ZjJlYzEtMTRiODMyOTAtYTI2NDdjOTMtZWZkZjZhMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |99.0%| [TA] $(B)/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/{meta.json ... results_accumulator.log} |99.1%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/{meta.json ... results_accumulator.log} >> ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change [FAIL] >> ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete >> test_alter_tiering.py::TestAlterTiering::test[many_tables] [GOOD] >> test_simple.py::TestSimple::test[alter_table] >> AnalyzeColumnshard::AnalyzeRebootColumnShard [FAIL] >> test_simple.py::TestSimple::test[alter_table] [GOOD] >> test_simple.py::TestSimple::test[alter_tablestore] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootColumnShard [FAIL] Test command err: 2025-03-31T17:11:42.337153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T17:11:42.337601Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T17:11:42.337747Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/r3ux/000280/r3tmp/tmp5BkWl2/pdisk_1.dat 2025-03-31T17:11:42.790764Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22845, node 1 2025-03-31T17:11:43.049167Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:11:43.049241Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:11:43.049273Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:11:43.049712Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T17:11:43.052076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T17:11:43.152882Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:11:43.153035Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:11:43.167316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31585 2025-03-31T17:11:43.711738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T17:11:46.879935Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T17:11:46.911977Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:11:46.912102Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:11:46.941019Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T17:11:46.943558Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T17:11:47.189202Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T17:11:47.189772Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T17:11:47.190356Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T17:11:47.190454Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T17:11:47.190706Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T17:11:47.190797Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T17:11:47.190874Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T17:11:47.190979Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T17:11:47.191125Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T17:11:47.358927Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:11:47.359062Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:11:47.374688Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T17:11:47.522727Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T17:11:47.568349Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T17:11:47.568453Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T17:11:47.592022Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T17:11:47.593042Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T17:11:47.593282Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T17:11:47.593326Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T17:11:47.593368Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T17:11:47.593429Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T17:11:47.593484Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T17:11:47.593541Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T17:11:47.593942Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T17:11:47.630206Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T17:11:47.630329Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1872:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T17:11:47.638127Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1883:2604] 2025-03-31T17:11:47.645801Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1915:2621] 2025-03-31T17:11:47.645916Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1915:2621], schemeshard id = 72075186224037897 2025-03-31T17:11:47.650609Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T17:11:47.677203Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T17:11:47.677272Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T17:11:47.677407Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T17:11:47.722254Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T17:11:47.730708Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T17:11:47.730892Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T17:11:47.919491Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T17:11:48.088657Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T17:11:48.187389Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T17:11:49.263357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2236:3073], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:49.263532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:11:49.280619Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-31T17:11:49.443838Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T17:11:49.444139Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T17:11:49.444395Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T17:11:49.444484Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T17:11:49.444565Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T17:11:49.444661Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T17:11:49.444823Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T17:11:49.444946Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T17:11:49.445030Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T17:11:49.445131Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T17:11:49.445214Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T17:11:49.445292Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T17:11:49.471051Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T17:11:49.471195Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;desc ... requests. OperationId=operationId 2025-03-31T17:18:53.840500Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-31T17:18:54.735804Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T17:18:54.735934Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T17:18:54.746476Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T17:18:54.746520Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T17:18:54.746545Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-31T17:18:55.612171Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T17:18:55.612233Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T17:18:55.612257Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-31T17:18:56.404577Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T17:18:56.404645Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T17:18:56.404672Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-31T17:18:57.232023Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T17:18:57.242527Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T17:18:57.242572Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T17:18:57.242596Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-31T17:18:58.092798Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T17:18:58.092866Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T17:18:58.092890Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-31T17:18:58.888702Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T17:18:58.888834Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T17:18:58.899454Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T17:18:58.899501Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T17:18:58.899525Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-31T17:18:59.728886Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T17:18:59.728942Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T17:18:59.728968Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-31T17:19:00.513410Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T17:19:00.513477Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T17:19:00.513503Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-31T17:19:01.296393Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T17:19:01.307011Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T17:19:01.307066Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T17:19:01.307111Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-31T17:19:02.257231Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T17:19:02.257312Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T17:19:02.257347Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-31T17:19:03.146659Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T17:19:03.146837Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T17:19:03.157552Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T17:19:03.157598Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T17:19:03.157623Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-31T17:19:03.965834Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T17:19:03.965892Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T17:19:03.965919Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-31T17:19:04.755974Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T17:19:04.756031Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T17:19:04.756055Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-31T17:19:05.591921Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T17:19:05.602556Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T17:19:05.602612Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T17:19:05.602637Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-31T17:19:06.553571Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T17:19:06.553636Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T17:19:06.553679Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-31T17:19:07.428310Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T17:19:07.428442Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T17:19:07.438968Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T17:19:07.439005Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T17:19:07.439030Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-31T17:19:08.270026Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T17:19:08.270084Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T17:19:08.270106Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-31T17:19:09.047758Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T17:19:09.047815Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T17:19:09.047839Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-31T17:19:09.842000Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T17:19:09.852846Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T17:19:09.852903Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T17:19:09.852930Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-31T17:19:10.836989Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T17:19:10.837064Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T17:19:10.837098Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-31T17:19:11.671150Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T17:19:11.671286Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T17:19:11.681988Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T17:19:11.682041Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T17:19:11.682063Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. (TWithBackTrace) ydb/library/actors/testlib/test_runtime.h:579: Exception occured while waiting for NKikimr::NStat::TEvStatistics::TEvAnalyzeResponse: (NActors::TSchedulingLimitReachedException) TestActorRuntime Processed over 100000 events.ydb/library/actors/testlib/test_runtime.cpp:716: TBackTrace::Capture()+28 (0x18C0A59C) TWithBackTrace::TWithBackTrace<>()+80 (0x18832490) NKikimr::NStat::TEvStatistics::TEvAnalyzeResponse::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration)+485 (0x18806B15) NKikimr::NStat::NTestSuiteAnalyzeColumnshard::TTestCaseAnalyzeRebootColumnShard::Execute_(NUnitTest::TTestContext&)+4263 (0x188236B7) std::__y1::__function::__func, void ()>::operator()()+280 (0x1882E548) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x190F5E06) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x190CEA19) NKikimr::NStat::NTestSuiteAnalyzeColumnshard::TCurrentTest::Execute()+1204 (0x1882D714) NUnitTest::TTestFactory::Execute()+2438 (0x190D02E6) NUnitTest::RunMain(int, char**)+5213 (0x190F037D) ??+0 (0x7FA394176D90) __libc_start_main+128 (0x7FA394176E40) _start+41 (0x1618F029) |99.2%| [TA] $(B)/ydb/core/statistics/aggregator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.2%| [TA] {RESULT} $(B)/ydb/core/statistics/aggregator/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_simple.py::TestSimple::test[alter_tablestore] [GOOD] >> test_simple.py::TestSimple::test[table] >> test_simple.py::TestSimple::test[table] [GOOD] >> test_simple.py::TestSimple::test[tablestores] >> test_simple.py::TestSimple::test[tablestores] [GOOD] >> test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] >> test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] [GOOD] >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 [GOOD] Test command err: 2025-03-31T17:12:01.483934Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T17:12:01.574188Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T17:12:01.600302Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T17:12:01.600617Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T17:12:01.609219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T17:12:01.609458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T17:12:01.609729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T17:12:01.609899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T17:12:01.610009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T17:12:01.610155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T17:12:01.610297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T17:12:01.610417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T17:12:01.610537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T17:12:01.610654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T17:12:01.610784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T17:12:01.610900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T17:12:01.642293Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T17:12:01.642470Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T17:12:01.642542Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T17:12:01.642736Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T17:12:01.642917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T17:12:01.643008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T17:12:01.643072Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T17:12:01.643235Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T17:12:01.643304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T17:12:01.643352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T17:12:01.643384Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T17:12:01.643604Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T17:12:01.643696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T17:12:01.643748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T17:12:01.643782Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T17:12:01.643897Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T17:12:01.643958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T17:12:01.644013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T17:12:01.644049Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T17:12:01.644123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T17:12:01.644163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T17:12:01.644199Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T17:12:01.644268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T17:12:01.644321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T17:12:01.644351Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T17:12:01.644763Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=46; 2025-03-31T17:12:01.644874Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=48; 2025-03-31T17:12:01.644977Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=49; 2025-03-31T17:12:01.645078Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=36; 2025-03-31T17:12:01.645265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T17:12:01.645339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T17:12:01.645387Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T17:12:01.645603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T17:12:01.645653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T17:12:01.645698Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T17:12:01.645871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T17:12:01.645923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T17:12:01.645955Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T17:12:01.646173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T17:12:01.646224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T17:12:01.646254Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T17:12:01.646388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T17:12:01.646434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T17:12:01.646479Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... LUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=366245; 2025-03-31T17:21:16.835523Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:13990:15933];tablet_id=9437184;parent=[1:13951:15901];fline=manager.cpp:82;event=ask_data;request=request_id=399;1={portions_count=193};; 2025-03-31T17:21:16.838248Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:13951:15901];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=193;path_id=1; 2025-03-31T17:21:16.850207Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:13951:15901];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; 2025-03-31T17:21:17.475221Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-03-31T17:21:17.475315Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=11; 2025-03-31T17:21:17.478687Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=3265; 2025-03-31T17:21:17.485681Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=5806; 2025-03-31T17:21:17.485797Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=7030; 2025-03-31T17:21:17.485941Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=88; 2025-03-31T17:21:17.486068Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=75; 2025-03-31T17:21:17.486213Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=109; 2025-03-31T17:21:17.486323Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=79; 2025-03-31T17:21:17.486506Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=146; 2025-03-31T17:21:17.486552Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=11171; 2025-03-31T17:21:17.490955Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-03-31T17:21:17.491034Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=11; 2025-03-31T17:21:17.519691Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=28553; 2025-03-31T17:21:17.568071Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=48261; 2025-03-31T17:21:17.568205Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=38; 2025-03-31T17:21:17.568302Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=27; 2025-03-31T17:21:17.568352Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=9; 2025-03-31T17:21:17.568396Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=9; 2025-03-31T17:21:17.568458Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=8; 2025-03-31T17:21:17.568551Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=42; 2025-03-31T17:21:17.568600Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=8; 2025-03-31T17:21:17.568695Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=61; 2025-03-31T17:21:17.568743Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2025-03-31T17:21:17.568820Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=28; 2025-03-31T17:21:17.568917Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=57; 2025-03-31T17:21:17.569012Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=61; 2025-03-31T17:21:17.569052Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=77970; 2025-03-31T17:21:17.569229Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=128676200;raw_bytes=191860770;count=66;records=1845000} inactive {blob_bytes=245561044;raw_bytes=360315949;count=127;records=3499542} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-31T17:21:17.569830Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:13951:15901];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-31T17:21:17.569905Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:13951:15901];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-31T17:21:17.570002Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13951:15901];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-31T17:21:17.570065Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13951:15901];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=0; 2025-03-31T17:21:17.570325Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T17:21:17.570394Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T17:21:17.570635Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=21; 2025-03-31T17:21:17.570724Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-03-31T17:21:17.570775Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=21;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T17:21:17.570829Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T17:21:17.570869Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T17:21:17.570976Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T17:21:17.576147Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T17:21:17.581103Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13951:15901];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-31T17:21:17.583261Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13951:15901];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-31T17:21:17.583306Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-31T17:21:17.583331Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-31T17:21:17.583380Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13951:15901];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T17:21:17.583473Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13951:15901];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T17:21:17.583764Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13951:15901];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=21; 2025-03-31T17:21:17.583844Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13951:15901];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-03-31T17:21:17.583905Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13951:15901];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=21;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T17:21:17.583963Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13951:15901];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T17:21:17.584004Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13951:15901];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T17:21:17.584106Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13951:15901];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.999000s; 2025-03-31T17:21:17.584162Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13951:15901];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString [GOOD] Test command err: 2025-03-31T17:11:59.989451Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-31T17:12:00.105968Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T17:12:00.133405Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T17:12:00.133772Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-31T17:12:00.142722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T17:12:00.142975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T17:12:00.143260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T17:12:00.143415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T17:12:00.143532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T17:12:00.143652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T17:12:00.143801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T17:12:00.143928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T17:12:00.144051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T17:12:00.144162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T17:12:00.144296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T17:12:00.144415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T17:12:00.175565Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-31T17:12:00.175752Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T17:12:00.175828Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T17:12:00.176032Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T17:12:00.176194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T17:12:00.176291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T17:12:00.176344Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T17:12:00.176448Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T17:12:00.176521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T17:12:00.176587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T17:12:00.176622Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-31T17:12:00.176876Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T17:12:00.176966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T17:12:00.177013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T17:12:00.177056Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-31T17:12:00.177198Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-31T17:12:00.177264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T17:12:00.177311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T17:12:00.177352Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-31T17:12:00.177432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T17:12:00.177473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T17:12:00.177526Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-31T17:12:00.177593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T17:12:00.177637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T17:12:00.177668Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-31T17:12:00.178089Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=52; 2025-03-31T17:12:00.178202Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=55; 2025-03-31T17:12:00.178297Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=38; 2025-03-31T17:12:00.178411Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=57; 2025-03-31T17:12:00.178585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T17:12:00.178672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T17:12:00.178716Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-31T17:12:00.178967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T17:12:00.179030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T17:12:00.179064Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-31T17:12:00.179281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T17:12:00.179334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T17:12:00.179373Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-31T17:12:00.179588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T17:12:00.179638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T17:12:00.179671Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-31T17:12:00.179824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T17:12:00.179871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T17:12:00.179929Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... UMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=457253; 2025-03-31T17:21:19.740697Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:13990:15933];tablet_id=9437184;parent=[1:13951:15901];fline=manager.cpp:82;event=ask_data;request=request_id=399;1={portions_count=193};; 2025-03-31T17:21:19.742684Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:13951:15901];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=193;path_id=1; 2025-03-31T17:21:19.754386Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:13951:15901];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; 2025-03-31T17:21:20.390258Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-03-31T17:21:20.390356Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=12; 2025-03-31T17:21:20.393722Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=3279; 2025-03-31T17:21:20.401106Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=6269; 2025-03-31T17:21:20.401209Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=7413; 2025-03-31T17:21:20.401346Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=82; 2025-03-31T17:21:20.401453Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=59; 2025-03-31T17:21:20.401590Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=103; 2025-03-31T17:21:20.401701Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=80; 2025-03-31T17:21:20.401872Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=135; 2025-03-31T17:21:20.401907Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=11507; 2025-03-31T17:21:20.406813Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-03-31T17:21:20.406902Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=11; 2025-03-31T17:21:20.435662Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=28662; 2025-03-31T17:21:20.483728Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=47945; 2025-03-31T17:21:20.483858Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=37; 2025-03-31T17:21:20.483928Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=25; 2025-03-31T17:21:20.483977Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=8; 2025-03-31T17:21:20.484023Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=11; 2025-03-31T17:21:20.484074Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=9; 2025-03-31T17:21:20.484154Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=43; 2025-03-31T17:21:20.484200Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=8; 2025-03-31T17:21:20.484296Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=58; 2025-03-31T17:21:20.484341Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=8; 2025-03-31T17:21:20.484410Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=33; 2025-03-31T17:21:20.484508Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=61; 2025-03-31T17:21:20.484594Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=49; 2025-03-31T17:21:20.484627Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=77677; 2025-03-31T17:21:20.484834Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=128676200;raw_bytes=191860770;count=66;records=1845000} inactive {blob_bytes=245561044;raw_bytes=360315949;count=127;records=3499542} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-31T17:21:20.485421Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:13951:15901];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-31T17:21:20.485488Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:13951:15901];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-31T17:21:20.485558Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13951:15901];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-31T17:21:20.485612Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13951:15901];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=0; 2025-03-31T17:21:20.485889Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T17:21:20.485953Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T17:21:20.486202Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=21; 2025-03-31T17:21:20.486281Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-03-31T17:21:20.486326Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=21;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T17:21:20.486381Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T17:21:20.486434Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T17:21:20.486550Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T17:21:20.492387Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T17:21:20.496306Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13951:15901];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-31T17:21:20.497483Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13951:15901];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-31T17:21:20.497531Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-31T17:21:20.497560Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-31T17:21:20.497623Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13951:15901];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T17:21:20.497698Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13951:15901];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T17:21:20.497972Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13951:15901];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=21; 2025-03-31T17:21:20.498041Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13951:15901];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-03-31T17:21:20.498087Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13951:15901];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=21;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T17:21:20.498157Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13951:15901];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T17:21:20.498200Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13951:15901];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T17:21:20.498285Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13951:15901];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.999000s; 2025-03-31T17:21:20.498339Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13951:15901];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; |99.4%| [TA] $(B)/ydb/core/tx/columnshard/ut_rw/test-results/unittest/{meta.json ... results_accumulator.log} |99.4%| [TA] {RESULT} $(B)/ydb/core/tx/columnshard/ut_rw/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/py3test >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test 2025-03-31 17:21:24,859 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-31 17:21:25,075 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 958691 733M 737M 657M ydb-tests-olap-ttl_tiering --basetemp /home/runner/.ya/build/build_root/r3ux/000234/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modu 960009 2.7G 2.7G 2.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/r3ux/000234/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_m 962529 466M 466M 432M └─ moto_server s3 --port 17488 Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/ttl_tiering/data_migration_when_alter_ttl.py", line 171, in test if not self.wait_for( File "ydb/tests/olap/ttl_tiering/base.py", line 68, in wait_for if condition_func(): File "ydb/tests/olap/ttl_tiering/data_migration_when_alter_ttl.py", line 172, in lambda: get_rows_in_portion(bucket1_path) == self.row_count File "ydb/tests/olap/ttl_tiering/data_migration_when_alter_ttl.py", line 160, in get_rows_in_portion logger.info(f"portions: {portions_stat}, blobs: {table.get_blob_stat_by_tier()}") File "ydb/tests/olap/common/column_table_helper.py", line 41, in get_blob_stat_by_tier results = self.ydb_client.query(stmt) File "ydb/tests/olap/common/ydb_client.py", line 24, in query return self.session_pool.execute_with_retries(statement) File "contrib/python/ydb/py3/ydb/query/pool.py", line 202, in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) File "contrib/python/ydb/py3/ydb/retries.py", line 133, in retry_operation_sync for next_opt in opt_generator: File "contrib/python/ydb/py3/ydb/retries.py", line 94, in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) File "contrib/python/ydb/py3/ydb/query/pool.py", line 200, in wrapped_callee return [result_set for result_set in it] File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ return self._next() File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next res = self.wrapper(next(self.it)) File "contrib/python/grpcio/py3/grpc/_channel.py", line 475, in __next__ return self._next() File "contrib/python/grpcio/py3/grpc/_channel.py", line 872, in _next _common.wait(self._state.condition.wait, _response_ready) File "contrib/python/grpcio/py3/grpc/_common.py", line 150, in wait _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) File "contrib/python/grpcio/py3/grpc/_common.py", line 112, in _wait_once wait_fn(timeout=timeout) File "contrib/tools/python3/Lib/threading.py", line 359, in wait gotit = waiter.acquire(True, timeout) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ..._root/r3ux/000234', '--source-root', '/home/runner/.ya/build/build_root/r3ux/000234/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/r3ux/000234/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'data_migration_when_alter_ttl.py']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("..._root/r3ux/000234', '--source-root', '/home/runner/.ya/build/build_root/r3ux/000234/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/r3ux/000234/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'data_migration_when_alter_ttl.py']' stopped by 600 seconds timeout",), {}) ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/py3test >> ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete 2025-03-31 17:21:24,799 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-31 17:21:25,222 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 958692 678M 681M 601M ydb-tests-olap-ttl_tiering --basetemp /home/runner/.ya/build/build_root/r3ux/000238/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modu 960010 6.7G 6.7G 6.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/r3ux/000238/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_de 961688 393M 393M 359M └─ moto_server s3 --port 19432 Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/ttl_tiering/ttl_delete_s3.py", line 269, in test_ttl_delete self.ydb_client.query(""" File "ydb/tests/olap/common/ydb_client.py", line 24, in query return self.session_pool.execute_with_retries(statement) File "contrib/python/ydb/py3/ydb/query/pool.py", line 202, in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) File "contrib/python/ydb/py3/ydb/retries.py", line 133, in retry_operation_sync for next_opt in opt_generator: File "contrib/python/ydb/py3/ydb/retries.py", line 94, in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) File "contrib/python/ydb/py3/ydb/query/pool.py", line 200, in wrapped_callee return [result_set for result_set in it] File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ return self._next() File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next res = self.wrapper(next(self.it)) File "contrib/python/grpcio/py3/grpc/_channel.py", line 475, in __next__ return self._next() File "contrib/python/grpcio/py3/grpc/_channel.py", line 872, in _next _common.wait(self._state.condition.wait, _response_ready) File "contrib/python/grpcio/py3/grpc/_common.py", line 150, in wait _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) File "contrib/python/grpcio/py3/grpc/_common.py", line 112, in _wait_once wait_fn(timeout=timeout) File "contrib/tools/python3/Lib/threading.py", line 359, in wait gotit = waiter.acquire(True, timeout) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...', '/home/runner/.ya/build/build_root/r3ux/000238', '--source-root', '/home/runner/.ya/build/build_root/r3ux/000238/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/r3ux/000238/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'ttl_delete_s3.py']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...', '/home/runner/.ya/build/build_root/r3ux/000238', '--source-root', '/home/runner/.ya/build/build_root/r3ux/000238/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/r3ux/000238/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'ttl_delete_s3.py']' stopped by 600 seconds timeout",), {}) |99.6%| [TA] $(B)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} |99.7%| [TA] {RESULT} $(B)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] 2025-03-31 17:22:10,223 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-31 17:22:10,398 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 970569 1.8G 1.8G 1.7G ydb-tests-olap-scenario --basetemp /home/runner/.ya/build/build_root/r3ux/000162/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modules 996215 1.9G 1.9G 1.4G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/r3ux/000162/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_inse Test command err: Was written: 0.0 MiB, Speed: 0.0 MiB/s Step 1. only write Write: 10% 10811 30% 10811 50% 10811 90% 10811 99% 10811 ms Write: 10% 14662 30% 14662 50% 14662 90% 14662 99% 14662 ms Write: 10% 17518 30% 17518 50% 17518 90% 17518 99% 17518 ms Write: 10% 16389 30% 16389 50% 16389 90% 16389 99% 16389 ms Write: 10% 14904 30% 14904 50% 14904 90% 14904 99% 14904 ms Write: 10% 16399 30% 16399 50% 16399 90% 16399 99% 16399 ms Write: 10% 14330 30% 14330 50% 14330 90% 14330 99% 14330 ms Write: 10% 15406 30% 15406 50% 15406 90% 15406 99% 15406 ms Write: 10% 13994 30% 13994 50% 13994 90% 13994 99% 13994 ms Write: 10% 17975 30% 17975 50% 17975 90% 17975 99% 17975 ms Write: 10% 14924 30% 14924 50% 14924 90% 14924 99% 14924 ms Write: 10% 13376 30% 13376 50% 13376 90% 13376 99% 13376 ms Write: 10% 13839 30% 13839 50% 13839 90% 13839 99% 13839 ms Write: 10% 12902 30% 12902 50% 12902 90% 12902 99% 12902 ms Write: 10% 12239 30% 12239 50% 12239 90% 12239 99% 12239 ms Write: 10% 13343 30% 13343 50% 13343 90% 13343 99% 13343 ms Write: 10% 11451 30% 11451 50% 11451 90% 11451 99% 11451 ms Write: 10% 11787 30% 11787 50% 11787 90% 11787 99% 11787 ms Write: 10% 11272 30% 11272 50% 11272 90% 11272 99% 11272 ms Write: 10% 12351 30% 12351 50% 12351 90% 12351 99% 12351 ms Write: 10% 11384 30% 11384 50% 11384 90% 11384 99% 11384 ms Write: 10% 12158 30% 12158 50% 12158 90% 12158 99% 12158 ms Write: 10% 11496 30% 11496 50% 11496 90% 11496 99% 11496 ms Write: 10% 11219 30% 11219 50% 11219 90% 11219 99% 11219 ms Write: 10% 10590 30% 10590 50% 10590 90% 10590 99% 10590 ms Write: 10% 8149 30% 8149 50% 8149 90% 8149 99% 8149 ms Write: 10% 9227 30% 9227 50% 9227 90% 9227 99% 9227 ms Write: 10% 9258 30% 9258 50% 9258 90% 9258 99% 9258 ms Write: 10% 12430 30% 12430 50% 12430 90% 12430 99% 12430 ms Write: 10% 9069 30% 9069 50% 9069 90% 9069 99% 9069 ms Write: 10% 8281 30% 8281 50% 8281 90% 8281 99% 8281 ms Write: 10% 8010 30% 8010 50% 8010 90% 8010 99% 8010 ms Write: 10% 9646 30% 9646 50% 9646 90% 9646 99% 9646 ms Write: 10% 8681 30% 8681 50% 8681 90% 8681 99% 8681 ms Write: 10% 8190 30% 8190 50% 8190 90% 8190 99% 8190 ms Write: 10% 7884 30% 7884 50% 7884 90% 7884 99% 7884 ms Write: 10% 7914 30% 7914 50% 7914 90% 7914 99% 7914 ms Write: 10% 6646 30% 6646 50% 6646 90% 6646 99% 6646 ms Write: 10% 6686 30% 6686 50% 6686 90% 6686 99% 6686 ms Write: 10% 7010 30% 7010 50% 7010 90% 7010 99% 7010 ms Write: 10% 5735 30% 5735 50% 5735 90% 5735 99% 5735 ms Write: 10% 6335 30% 6335 50% 6335 90% 6335 99% 6335 ms Write: 10% 6212 30% 6212 50% 6212 90% 6212 99% 6212 ms Write: 10% 6219 30% 6219 50% 6219 90% 6219 99% 6219 ms Write: 10% 5334 30% 5334 50% 5334 90% 5334 99% 5334 ms Write: 10% 7796 30% 7796 50% 7796 90% 7796 99% 7796 ms Write: 10% 7470 30% 7470 50% 7470 90% 7470 99% 7470 ms Write: 10% 5215 30% 5215 50% 5215 90% 5215 99% 5215 ms Write: 10% 5590 30% 5590 50% 5590 90% 5590 99% 5590 ms Write: 10% 5725 30% 5725 50% 5725 90% 5725 99% 5725 ms Write: 10% 5143 30% 5143 50% 5143 90% 5143 99% 5143 ms Write: 10% 4786 30% 4786 50% 4786 90% 4786 99% 4786 ms Write: 10% 4895 30% 4895 50% 4895 90% 4895 99% 4895 ms Write: 10% 5016 30% 5016 50% 5016 90% 5016 99% 5016 ms Write: 10% 4527 30% 4527 50% 4527 90% 4527 99% 4527 ms Write: 10% 5036 30% 5036 50% 5036 90% 5036 99% 5036 ms Write: 10% 4710 30% 4710 50% 4710 90% 4710 99% 4710 ms Write: 10% 4304 30% 4304 50% 4304 90% 4304 99% 4304 ms Write: 10% 4209 30% 4209 50% 4209 90% 4209 99% 4209 ms Write: 10% 4533 30% 4533 50% 4533 90% 4533 99% 4533 ms Write: 10% 3866 30% 3866 50% 3866 90% 3866 99% 3866 ms Write: 10% 4031 30% 4031 50% 4031 90% 4031 99% 4031 ms Write: 10% 4074 30% 4074 50% 4074 90% 4074 99% 4074 ms Write: 10% 3670 30% 3670 50% 3670 90% 3670 99% 3670 ms Step 2. read write Write: 10% 5458 30% 5458 50% 5458 90% 5458 99% 5458 ms Write: 10% 17318 30% 17318 50% 17318 90% 17318 99% 17318 ms Write: 10% 19086 30% 19086 50% 19086 90% 19086 99% 19086 ms Write: 10% 17806 30% 17806 50% 17806 90% 17806 99% 17806 ms Write: 10% 19368 30% 19368 50% 19368 90% 19368 99% 19368 ms Write: 10% 18271 30% 18271 50% 18271 90% 18271 99% 18271 ms Write: 10% 16239 30% 16239 50% 16239 90% 16239 99% 16239 ms Write: 10% 18378 30% 18378 50% 18378 90% 18378 99% 18378 ms Write: 10% 17292 30% 17292 50% 17292 90% 17292 99% 17292 ms Was written: 25.0 MiB, Speed: 0.4166666666666667 MiB/s Write: 10% 17294 30% 17294 50% 17294 90% 17294 99% 17294 ms Write: 10% 17517 30% 17517 50% 17517 90% 17517 99% 17517 ms Write: 10% 15562 30% 15562 50% 15562 90% 15562 99% 15562 ms Write: 10% 13451 30% 13451 50% 13451 90% 13451 99% 13451 ms Write: 10% 14569 30% 14569 50% 14569 90% 14569 99% 14569 ms Write: 10% 15697 30% 15697 50% 15697 90% 15697 99% 15697 ms Write: 10% 15936 30% 15936 50% 15936 90% 15936 99% 15936 ms Write: 10% 13040 30% 13040 50% 13040 90% 13040 99% 13040 ms Write: 10% 13056 30% 13056 50% 13056 90% 13056 99% 13056 ms Write: 10% 13159 30% 13159 50% 13159 90% 13159 99% 13159 ms Write: 10% 13109 30% 13109 50% 13109 90% 13109 99% 13109 ms Write: 10% 13364 30% 13364 50% 13364 90% 13364 99% 13364 ms Write: 10% 11586 30% 11586 50% 11586 90% 11586 99% 11586 ms Write: 10% 15683 30% 15683 50% 15683 90% 15683 99% 15683 ms Write: 10% 12304 30% 12304 50% 12304 90% 12304 99% 12304 ms Write: 10% 12588 30% 12588 50% 12588 90% 12588 99% 12588 ms Write: 10% 12511 30% 12511 50% 12511 90% 12511 99% 12511 ms Write: 10% 11198 30% 11198 50% 11198 90% 11198 99% 11198 ms Write: 10% 12311 30% 12311 50% 12311 90% 12311 99% 12311 ms Write: 10% 11165 30% 11165 50% 11165 90% 11165 99% 11165 ms Write: 10% 11057 30% 11057 50% 11057 90% 11057 99% 11057 ms Write: 10% 10867 30% 10867 50% 10867 90% 10867 99% 10867 ms Write: 10% 11113 30% 11113 50% 11113 90% 11113 99% 11113 ms Write: 10% 10163 30% 10163 50% 10163 90% 10163 99% 10163 ms Write: 10% 9160 30% 9160 50% 9160 90% 9160 99% 9160 ms Write: 10% 6447 30% 6447 50% 6447 90% 6447 99% 6447 ms Write: 10% 6545 30% 6545 50% 6545 90% 6545 99% 6545 ms Write: 10% 6443 30% 6443 50% 6443 90% 6443 99% 6443 ms Write: 10% 5760 30% 5760 50% 5760 90% 5760 99% 5760 ms Write: 10% 6738 30% 6738 50% 6738 90% 6738 99% 6738 ms Write: 10% 5731 30% 5731 50% 5731 90% 5731 99% 5731 ms Write: 10% 5027 30% 5027 50% 5027 90% 5027 99% 5027 ms Write: 10% 6686 30% 6686 50% 6686 90% 6686 99% 6686 ms Write: 10% 7035 30% 7035 50% 7035 90% 7035 99% 7035 ms Write: 10% 4727 30% 4727 50% 4727 90% 4727 99% 4727 ms Write: 10% 3807 30% 3807 50% 3807 90% 3807 99% 3807 ms Write: 10% 4320 30% 4320 50% 4320 90% 4320 99% 4320 ms Write: 10% 4829 30% 4829 50% 4829 90% 4829 99% 4829 ms Write: 10% 4093 30% 4093 50% 4093 90% 4093 99% 4093 ms Write: 10% 4691 30% 4691 50% 4691 90% 4691 99% 4691 ms Write: 10% 3017 30% 3017 50% 3017 90% 3017 99% 3017 ms Write: 10% 3642 30% 3642 50% 3642 90% 3642 99% 3642 ms Write: 10% 2967 30% 2967 50% 2967 90% 2967 99% 2967 ms Write: 10% 3430 30% 3430 50% 3430 90% 3430 99% 3430 ms Write: 10% 2825 30% 2825 50% 2825 90% 2825 99% 2825 ms Write: 10% 3239 30% 3239 50% 3239 90% 3239 99% 3239 ms Write: 10% 3438 30% 3438 50% 3438 90% 3438 99% 3438 ms Write: 10% 2640 30% 2640 50% 2640 90% 2640 99% 2640 ms Write: 10% 2733 30% 2733 50% 2733 90% 2733 99% 2733 ms Write: 10% 3040 30% 3040 50% 3040 90% 3040 99% 3040 ms Write: 10% 4669 30% 4669 50% 4669 90% 4669 99% 4669 ms Write: 10% 2478 30% 2478 50% 2478 90% 2478 99% 2478 ms Write: 10% 3455 30% 3455 50% 3455 90% 3455 99% 3455 ms Write: 10% 2740 30% 2740 50% 2740 90% 2740 99% 2740 ms Write: 10% 2515 30% 2515 50% 2515 90% 2515 99% 2515 ms Read: 10% 7165 30% 9789 50% 12413 90% 20544 99% 22373 ms Step 3. write modify Write: 10% 11504 30% 11504 50% 11504 90% 11504 99% 11504 ms Write: 10% 13671 30% 13671 50% 13671 90% 13671 99% 13671 ms Write: 10% 15275 30% 15275 50% 15275 90% 15275 99% 15275 ms Write: 10% 15192 30% 15192 50% 15192 90% 15192 99% 15192 ms Write: 10% 15183 30% 15183 50% 15183 90% 15183 99% 15183 ms Write: 10% 14637 30% 14637 50% 14637 90% 14637 99% 14637 ms Write: 10% 15107 30% 15107 50% 15107 90% 15107 99% 15107 ms Write: 10% 14948 30% 14948 50% 14948 90% 14948 99% 14948 ms Write: 10% 14757 30% 14757 50% 14757 90% 14757 99% 14757 ms Write: 10% 15451 30% 15451 50% 15451 90% 15451 99% 15451 ms Write: 10% 15910 30% 15910 50% 15910 90% 15910 99% 15910 ms Write: 10% 14753 30% 14753 50% 14753 90% 14753 99% 14753 ms Write: 10% 14561 30% 14561 50% 14561 90% 14561 99% 14561 ms Write: 10% 13888 30% 13888 50% 13888 90% 13888 99% 13888 ms Write: 10% 13564 30% 13564 50% 13564 90% 13564 99% 13564 ms Write: 10% 12408 30% 12408 50% 12408 90% 12408 99% 12408 ms Write: 10% 13258 30% 13258 50% 13258 90% 13258 99% 13258 ms Write: 10% 12123 30% 12123 50% 12123 90% 12123 99% 12123 ms Write: 10% 12305 30% 12305 50% 12305 90% 12305 99% 12305 ms Write: 10% 11858 30% 11858 50% 11858 90% 11858 99% 11858 ms Write: 10% 11796 30% 11796 50% 11796 90% 11796 99% 11796 ms Write: 10% 10893 30% 10893 50% 10893 90% 10893 99% 10893 ms Write: 10% 11615 30% 11615 50% 11615 90% 11615 99% 11615 ms Write: 10% 11176 30% 11176 50% 11176 90% 11176 99% 11176 ms Write: 10% 10829 30% 10829 50% 10829 90% 10829 99% 10829 ms Write: 10% 11731 30% 11731 50% 11731 90% 11731 99% 11731 ms Write: 10% 10706 30% 10706 50% 10706 90% 10706 99% 10706 ms Write: 10% 10739 30% 10739 50% 10739 90% 10739 99% 10739 ms Write: 10% 10601 30% 10601 50% 10601 90% 10601 99% 10601 ms Write: 10% 10727 30% 10727 50% 10727 90% 10727 99% 10727 ms Write: 10% 10391 30% 10391 50% 10391 90% 10391 99% 10391 ms Write: 10% 9329 30% 9329 50% 9329 90% 9329 99% 9329 ms Write: 10% 10595 30% 10595 50% 10595 90% 10595 99% 10595 ms Write: 10% 9484 30% 9484 50% 9484 90% 9484 99% 9484 ms Write: 10% 8742 30% 8742 50% 8742 90% 8742 99% 8742 ms Write: 10% 9134 30% 9134 50% 9134 90% 9134 99% 9134 ms Write: 10% 9385 30% 9385 50% 9385 90% 9385 99% 9385 ms Write: 10% 10595 30% 10595 50% 10595 90% 10595 99% 10595 ms Write: 10% 9096 30% 9096 50% 9096 90% 9096 99% 9096 ms Write: 10% 10748 30% 10748 50% 10748 90% 10748 99% 10748 ms Write: 10% 8316 30% 8316 50% 8316 90% 8316 99% 8316 ms Write: 10% 8248 30% 8248 50% 8248 90% 8248 99% 8248 ms Write: 10% 7494 30% 7494 50% 7494 90% 7494 99% 7494 ms Write: 10% 7475 30% 7475 50% 7475 90% 7475 99% 7475 ms Write: 10% 8387 30% 8387 50% 8387 90% 8387 99% 8387 ms Write: 10% 6618 30% 6618 50% 6618 90% 6618 99% 6618 ms Write: 10% 8197 30% 8197 50% 8197 90% 8197 99% 8197 ms Write: 10% 6297 30% 6297 50% 6297 90% 6297 99% 6297 ms Write: 10% 6064 30% 6064 50% 6064 90% 6064 99% 6064 ms Write: 10% 5858 30% 5858 50% 5858 90% 5858 99% 5858 ms Write: 10% 5664 30% 5664 50% 5664 90% 5664 99% 5664 ms Write: 10% 5473 30% 5473 50% 5473 90% 5473 99% 5473 ms Write: 10% 5354 30% 5354 50% 5354 90% 5354 99% 5354 ms Write: 10% 5245 30% 5245 50% 5245 90% 5245 99% 5245 ms Write: 10% 5762 30% 5762 50% 5762 90% 5762 99% 5762 ms Write: 10% 5075 30% 5075 50% 5075 90% 5075 99% 5075 ms Write: 10% 5423 30% 5423 50% 5423 90% 5423 99% 5423 ms Write: 10% 5115 30% 5115 50% 5115 90% 5115 99% 5115 ms Write: 10% 4723 30% 4723 50% 4723 90% 4723 99% 4723 ms Write: 10% 4760 30% 4760 50% 4760 90% 4760 99% 4760 ms Write: 10% 4954 30% 4954 50% 4954 90% 4954 99% 4954 ms Write: 10% 4696 30% 4696 50% 4696 90% 4696 99% 4696 ms Write: 10% 4807 30% 4807 50% 4807 90% 4807 99% 4807 ms Write: 10% 4564 30% 4564 50% 4564 90% 4564 99% 4564 ms Update: 10% 486 30% 486 50% 486 90% 486 99% 486 ms Step 4. read modify write Was written: 87.5 MiB, Speed: 1.0416666666666667 MiB/s Write: 10% 24088 30% 24088 50% 24088 90% 24088 99% 24088 ms Write: 10% 29958 30% 29958 50% 29958 90% 29958 99% 29958 ms Write: 10% 29155 30% 29155 50% 29155 90% 29155 99% 29155 ms Write: 10% 23418 30% 23418 50% 23418 90% 23418 99% 23418 ms Write: 10% 27569 30% 27569 50% 27569 90% 27569 99% 27569 ms Write: 10% 25948 30% 25948 50% 25948 90% 25948 99% 25948 ms Write: 10% 25560 30% 25560 50% 25560 90% 25560 99% 25560 ms Write: 10% 25376 30% 25376 50% 25376 90% 25376 99% 25376 ms Write: 10% 24868 30% 24868 50% 24868 90% 24868 99% 24868 ms Write: 10% 22879 30% 22879 50% 22879 90% 22879 99% 22879 ms Write: 10% 25822 30% 25822 50% 25822 90% 25822 99% 25822 ms Write: 10% 23690 30% 23690 50% 23690 90% 23690 99% 23690 ms Write: 10% 23652 30% 23652 50% 23652 90% 23652 99% 23652 ms Write: 10% 22364 30% 22364 50% 22364 90% 22364 99% 22364 ms Write: 10% 24594 30% 24594 50% 24594 90% 24594 99% 24594 ms Write: 10% 22162 30% 22162 50% 22162 90% 22162 99% 22162 ms Write: 10% 22699 30% 22699 50% 22699 90% 22699 99% 22699 ms Write: 10% 22996 30% 22996 50% 22996 90% 22996 99% 22996 ms Write: 10% 23330 30% 23330 50% 23330 90% 23330 99% 23330 ms Write: 10% 22600 30% 22600 50% 22600 90% 22600 99% 22600 ms Write: 10% 22050 30% 22050 50% 22050 90% 22050 99% 22050 ms Write: 10% 19750 30% 19750 50% 19750 90% 19750 99% 19750 ms Write: 10% 19992 30% 19992 50% 19992 90% 19992 99% 19992 ms Write: 10% 21054 30% 21054 50% 21054 90% 21054 99% 21054 ms Write: 10% 21124 30% 21124 50% 21124 90% 21124 99% 21124 ms Write: 10% 20509 30% 20509 50% 20509 90% 20509 99% 20509 ms Write: 10% 19149 30% 19149 50% 19149 90% 19149 99% 19149 ms Write: 10% 18400 30% 18400 50% 18400 90% 18400 99% 18400 ms Write: 10% 18616 30% 18616 50% 18616 90% 18616 99% 18616 ms Write: 10% 20094 30% 20094 50% 20094 90% 20094 99% 20094 ms Write: 10% 18496 30% 18496 50% 18496 90% 18496 99% 18496 ms Write: 10% 20284 30% 20284 50% 20284 90% 20284 99% 20284 ms Write: 10% 18666 30% 18666 50% 18666 90% 18666 99% 18666 ms Write: 10% 17926 30% 17926 50% 17926 90% 17926 99% 17926 ms Write: 10% 18051 30% 18051 50% 18051 90% 18051 99% 18051 ms Write: 10% 15350 30% 15350 50% 15350 90% 15350 99% 15350 ms Write: 10% 18540 30% 18540 50% 18540 90% 18540 99% 18540 ms Write: 10% 16567 30% 16567 50% 16567 90% 16567 99% 16567 ms Write: 10% 15137 30% 15137 50% 15137 90% 15137 99% 15137 ms Write: 10% 15301 30% 15301 50% 15301 90% 15301 99% 15301 ms Write: 10% 17692 30% 17692 50% 17692 90% 17692 99% 17692 ms Write: 10% 15056 30% 15056 50% 15056 90% 15056 99% 15056 ms Write: 10% 14503 30% 14503 50% 14503 90% 14503 99% 14503 ms Write: 10% 16252 30% 16252 50% 16252 90% 16252 99% 16252 ms Write: 10% 13380 30% 13380 50% 13380 90% 13380 99% 13380 ms Write: 10% 14571 30% 14571 50% 14571 90% 14571 99% 14571 ms Write: 10% 13831 30% 13831 50% 13831 90% 13831 99% 13831 ms Write: 10% 13842 30% 13842 50% 13842 90% 13842 99% 13842 ms Write: 10% 13184 30% 13184 50% 13184 90% 13184 99% 13184 ms Write: 10% 13478 30% 13478 50% 13478 90% 13478 99% 13478 ms Write: 10% 13312 30% 13312 50% 13312 90% 13312 99% 13312 ms Write: 10% 13035 30% 13035 50% 13035 90% 13035 99% 13035 ms Write: 10% 11842 30% 11842 50% 11842 90% 11842 99% 11842 ms Write: 10% 12320 30% 12320 50% 12320 90% 12320 99% 12320 ms Write: 10% 12230 30% 12230 50% 12230 90% 12230 99% 12230 ms Write: 10% 10103 30% 10103 50% 10103 90% 10103 99% 10103 ms Write: 10% 10422 30% 10422 50% 10422 90% 10422 99% 10422 ms Write: 10% 11781 30% 11781 50% 11781 90% 11781 99% 11781 ms Write: 10% 13078 30% 13078 50% 13078 90% 13078 99% 13078 ms Write: 10% 11518 30% 11518 50% 11518 90% 11518 99% 11518 ms Write: 10% 8873 30% 8873 50% 8873 90% 8873 99% 8873 ms Write: 10% 11200 30% 11200 50% 11200 90% 11200 99% 11200 ms Write: 10% 9587 30% 9587 50% 9587 90% 9587 99% 9587 ms Write: 10% 12470 30% 12470 50% 12470 90% 12470 99% 12470 ms Update: 10% 1016 30% 1016 50% 1016 90% 1016 99% 1016 ms Read: 10% 7725 30% 10013 50% 16207 90% 38575 99% 42831 ms File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "/home/runner/.ya/build/build_root/r3ux/000162/environment/arcadia/ydb/tests/olap/scenario/conftest.py", line 88, in test ctx.executable(self, ctx) File "ydb/tests/olap/scenario/test_insert.py", line 86, in scenario_read_data_during_bulk_upsert thread2.join_all() File "ydb/tests/olap/common/thread_helper.py", line 45, in join_all thread.join(timeout=timeout) File "ydb/tests/olap/common/thread_helper.py", line 16, in join super().join(timeout) File "contrib/tools/python3/Lib/threading.py", line 1149, in join self._wait_for_tstate_lock() File "contrib/tools/python3/Lib/threading.py", line 1169, in _wait_for_tstate_lock if lock.acquire(block, timeout): File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...b/tests/olap/scenario/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'test_alter_tiering.py::TestAlterTiering::test[many_tables]', '--test-filter', 'test_insert.py::TestInsert::test[read_data_during_bulk_upsert]', '--test-filter', 'test_simple.py::TestSimple::test[alter_table]', '--test-filter', 'test_simple.py::TestSimple::test[alter_tablestore]', '--test-filter', 'test_simple.py::TestSimple::test[table]', '--test-filter', 'test_simple.py::TestSimple::test[tablestores]', '--test-filter', 'test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables]', '--test-filter', 'test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load]', '--tb', 'short', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...b/tests/olap/scenario/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'test_alter_tiering.py::TestAlterTiering::test[many_tables]', '--test-filter', 'test_insert.py::TestInsert::test[read_data_during_bulk_upsert]', '--test-filter', 'test_simple.py::TestSimple::test[alter_table]', '--test-filter', 'test_simple.py::TestSimple::test[alter_tablestore]', '--test-filter', 'test_simple.py::TestSimple::test[table]', '--test-filter', 'test_simple.py::TestSimple::test[tablestores]', '--test-filter', 'test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables]', '--test-filter', 'test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load]', '--tb', 'short', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout",), {}) ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAlterCompression::test_all_supported_compression 2025-03-31 17:22:10,174 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-31 17:22:10,429 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 970565 774M 780M 696M ydb-tests-olap-column_family-compression --basetemp /home/runner/.ya/build/build_root/r3ux/000166/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor 971764 5.9G 5.9G 5.3G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/r3ux/000166/ydb/tests/olap/column_family/compression/test-results/py3test/testing_ou Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/column_family/compression/alter_compression.py", line 110, in test_all_supported_compression volumes: tuple[int, int] = tables[i].get_volumes_column("value") File "ydb/tests/olap/common/column_table_helper.py", line 69, in get_volumes_column raw_bytes, bytes = self._coollect_volumes_column(column_name) File "ydb/tests/olap/common/column_table_helper.py", line 59, in _coollect_volumes_column result_set = self.ydb_client.query(query) File "ydb/tests/olap/common/ydb_client.py", line 24, in query return self.session_pool.execute_with_retries(statement) File "contrib/python/ydb/py3/ydb/query/pool.py", line 202, in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) File "contrib/python/ydb/py3/ydb/retries.py", line 133, in retry_operation_sync for next_opt in opt_generator: File "contrib/python/ydb/py3/ydb/retries.py", line 94, in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) File "contrib/python/ydb/py3/ydb/query/pool.py", line 200, in wrapped_callee return [result_set for result_set in it] File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ return self._next() File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next res = self.wrapper(next(self.it)) File "contrib/python/grpcio/py3/grpc/_channel.py", line 475, in __next__ return self._next() File "contrib/python/grpcio/py3/grpc/_channel.py", line 872, in _next _common.wait(self._state.condition.wait, _response_ready) File "contrib/python/grpcio/py3/grpc/_common.py", line 150, in wait _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) File "contrib/python/grpcio/py3/grpc/_common.py", line 112, in _wait_once wait_fn(timeout=timeout) File "contrib/tools/python3/Lib/threading.py", line 359, in wait gotit = waiter.acquire(True, timeout) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/r3ux/000166/ydb/tests/olap/column_family/compression/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/r3ux/000166', '--source-root', '/home/runner/.ya/build/build_root/r3ux/000166/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/r3ux/000166/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/column_family/compression', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'alter_compression.py::TestAlterCompression::test_all_supported_compression', '--test-filter', 'alter_compression.py::TestAlterCompression::test_availability_data', '--tb', 'short', '--dep-root', 'ydb/tests/olap/column_family/compression', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/r3ux/000166/ydb/tests/olap/column_family/compression/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/r3ux/000166', '--source-root', '/home/runner/.ya/build/build_root/r3ux/000166/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/r3ux/000166/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/column_family/compression', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'alter_compression.py::TestAlterCompression::test_all_supported_compression', '--test-filter', 'alter_compression.py::TestAlterCompression::test_availability_data', '--tb', 'short', '--dep-root', 'ydb/tests/olap/column_family/compression', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout",), {}) |99.8%| [TM] {RESULT} ydb/tests/olap/scenario/py3test |99.9%| [TM] {RESULT} ydb/tests/olap/column_family/compression/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/s3_import/py3test >> test_tpch_import.py::TestS3TpchImport::test_import_and_export 2025-03-31 17:22:09,936 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-31 17:22:10,362 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 970538 596M 599M 515M ydb-tests-olap-s3_import --basetemp /home/runner/.ya/build/build_root/r3ux/00020d/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-module 971760 9.8G 9.8G 9.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/r3ux/00020d/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/test_tpc 974465 534M 534M 499M └─ moto_server s3 --port 15395 Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/s3_import/test_tpch_import.py", line 109, in test_import_and_export self.validate_table("from_s3") File "ydb/tests/olap/s3_import/test_tpch_import.py", line 12, in validate_table result_sets = self.ydb_client.query(f""" File "ydb/tests/olap/common/ydb_client.py", line 24, in query return self.session_pool.execute_with_retries(statement) File "contrib/python/ydb/py3/ydb/query/pool.py", line 202, in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) File "contrib/python/ydb/py3/ydb/retries.py", line 133, in retry_operation_sync for next_opt in opt_generator: File "contrib/python/ydb/py3/ydb/retries.py", line 94, in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) File "contrib/python/ydb/py3/ydb/query/pool.py", line 200, in wrapped_callee return [result_set for result_set in it] File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ return self._next() File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next res = self.wrapper(next(self.it)) File "contrib/python/grpcio/py3/grpc/_channel.py", line 475, in __next__ return self._next() File "contrib/python/grpcio/py3/grpc/_channel.py", line 872, in _next _common.wait(self._state.condition.wait, _response_ready) File "contrib/python/grpcio/py3/grpc/_common.py", line 150, in wait _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) File "contrib/python/grpcio/py3/grpc/_common.py", line 112, in _wait_once wait_fn(timeout=timeout) File "contrib/tools/python3/Lib/threading.py", line 359, in wait gotit = waiter.acquire(True, timeout) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) contrib/python/pytest/py3/_pytest/main.py:306: PluggyTeardownRaisedWarning: A plugin raised an exception during an old-style hookwrapper teardown. Plugin: terminalreporter, Hook: pytest_sessionfinish SeveralDaemonErrors: Daemon failed with message: Bad exit_code.. Process exit_code = 100. Stdout file name: /home/runner/.ya/build/build_root/r3ux/00020d/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/test_tpch_import.py.TestS3TpchImport.test_import_and_export/cluster/node_1/stdout Stderr file name: /home/runner/.ya/build/build_root/r3ux/00020d/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/test_tpch_import.py.TestS3TpchImport.test_import_and_export/cluster/node_1/stderr Stderr content: GRpc memory quota was set but disabled due to issues with grpc quoter, to enable it use EnableGRpcMemoryQuota option Current KQP shutdown state: spent 0 seconds, not started yet warning: address range table at offset 0x10c0 has a premature terminator entry at offset 0x10d0 ================================================================= ==971760==ERROR: LeakSanitizer: detected memory leaks Indirect leak of 3792 byte(s) in 1 object(s) allocated from: #0 0x1d6b812d in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x468efbcf in MakeIntrusive, NYql::TTypeAnnotationContext &> /-S/util/generic/ptr.h:818:12 #2 0x468efbcf in NYql::TYtState::TYtState(NYql::TTypeAnnotationContext*) /-S/yt/yql/providers/yt/provider/yql_yt_provider.h:102:25 #3 0x468e2099 in MakeIntrusive, NYql::TTypeAnnotationContext *> /-S/util/generic/ptr.h:818:16 #4 0x468e2099 in NYql::CreateYtNativeState(TIntrusivePtr>, TBasicString> const&, TBasicString> const&, NYql::TYtGatewayConfig const*, TIntrusivePtr>, std::__y1::shared_ptr const&, std::__y1::shared_ptr const&) /-S/yt/yql/providers/yt/provider/yql_yt_provider.cpp:345:20 #5 0x4b7e691f in InitYtProvider /-S/ydb/core/kqp/host/kqp_host.cpp:1864:38 #6 0x4b7e691f in NKikimr::NKqp::(anonymous namespace)::TKqpHost::Init(NYql::EKikimrQueryType) /-S/ydb/core/kqp/host/kqp_host.cpp:1939:17 #7 0x4b7e33e0 in NKikimr::NKqp::(anonymous namespace)::TKqpHost::SetupSession(NYql::EKikimrQueryType) /-S/ydb/core/kqp/host/kqp_host.cpp:2020:9 #8 0x4b7dfe78 in NKikimr::NKqp::(anonymous namespace)::TKqpHost::SetupYqlTransformer(NYql::EKikimrQueryType) /-S/ydb/core/kqp/host/kqp_host.cpp:2035:9 #9 0x4b847704 in NKikimr::NKqp::(anonymous namespace)::TKqpHost::PrepareQueryInternal(NKikimr::NKqp::TKqpQueryRef const&, TIntrusivePtr>, NYql::EKikimrQueryType, NKikimr::NKqp::IKqpHost::TPrepareSettings const&, NYql::TExprContext&) /-S/ydb/core/kqp/host/kqp_host.cpp:1595:9 #10 0x4b7cf29f in operator() /-S/ydb/core/kqp/host/kqp_host.cpp:1238:24 #11 0x4b7cf29f in CheckedProcess /-S/ydb/core/kqp/host/kqp_host.cpp:830:28 #12 0x4b7cf29f in CheckedProcessQuery<(lambda at /-S/ydb/core/kqp/host/kqp_host.cpp:1237:13)> /-S/ydb/core/kqp/host/kqp_host.cpp:853:12 #13 0x4b7cf29f in NKikimr::NKqp::(anonymous namespace)::TKqpHost::PrepareGenericQuery(NKikimr::NKqp::TKqpQueryRef const&, NKikimr::NKqp::IKqpHost::TPrepareSettings const&, TIntrusivePtr>) /-S/ydb/core/kqp/host/kqp_host.cpp:1236:16 #14 0x4b3ff1f5 in NKikimr::NKqp::TKqpCompileActor::StartCompilation(NActors::TActorContext const&) /-S/ydb/core/kqp/compile_service/kqp_compile_actor.cpp:273:47 #15 0x4b3fcea7 in Bootstrap /-S/ydb/core/kqp/compile_service/kqp_compile_actor.cpp:110:17 #16 0x4b3fcea7 in NActors::TActorBootstrapped::StateBootstrap(TAutoPtr&) /-S/ydb/library/actors/core/actor_bootstrapped.h:22:22 #17 0x1ff362cc in NActors::IActor::Receive(TAutoPtr&) /-S/ydb/library/actors/core/actor.cpp:280:13 #18 0x1ffdefc4 in NActors::TExecutorThread::Execute(NActors::TMailbox*, bool) /-S/ydb/library/actors/core/executor_thread.cpp:269:28 #19 0x1ffe7cee in NActors::TExecutorThread::ProcessExecutorPool()::$_0::operator()(NActors::TMailbox*, bool) const /-S/ydb/library/actors/core/executor_thread.cpp:460:39 #20 0x1ffe7249 in NActors::TExecutorThread::ProcessExecutorPool() /-S/ydb/library/actors/core/executor_thread.cpp:512:13 #21 0x1ffe91de in NActors::TExecutorThread::ThreadProc() /-S/ydb/library/actors/core/executor_thread.cpp:538:9 #22 0x1d9d8a64 in (anonymous namespace)::TPosixThread::ThreadProxy(void*) /-S/util/system/thread.cpp:244:20 #23 0x1d6820e8 in asan_thread_start(void*) /-S/contrib/libs/clang18-rt/lib/asan/asan_interceptors.cpp:239:28 Indirect leak of 3128 byte(s) in 1 object(s) allocated from: #0 0x1d6b812d in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x466d2866 in __libcpp_operator_new /-S/contrib/libs/cxxsupp/libcxx/include/new:265:10 #2 0x466d2866 in __libcpp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:289:10 #3 0x466d2866 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:118:32 #4 0x466d2866 in initialize_dynamic /-S/util/generic/hash_table.h:239:35 #5 0x466d2866 in initialize_buckets_dynamic /-S/util/generic/hash_table.h:912:17 #6 0x466d2866 in THashTable> const, TIntrusivePtr>>, TBasicString>, THash>>, TSelect1st, TEqualTo>>, std::__y1::allocator< ... g> const&) /-S/yql/essentials/core/file_storage/storage.cpp:462:16 #18 0x45cd9f1c in NYql::TFileStorageImpl::TFileStorageImpl(NYql::TFileStorageConfig const&, std::__y1::vector>, std::__y1::allocator>>> const&) /-S/yql/essentials/core/file_storage/file_storage.cpp:47:11 #19 0x45cd93d0 in NYql::CreateFileStorage(NYql::TFileStorageConfig const&, std::__y1::vector>, std::__y1::allocator>>> const&) /-S/yql/essentials/core/file_storage/file_storage.cpp:342:16 #20 0x45728ec2 in NKikimr::NKqp::MakeYtGateway(NKikimr::NMiniKQL::IFunctionRegistry const*, NKikimrConfig::TQueryServiceConfig const&) /-S/ydb/core/kqp/federated_query/kqp_federated_query_helpers.cpp:29:44 #21 0x4572b9b0 in NKikimr::NKqp::TKqpFederatedQuerySetupFactoryDefault::TKqpFederatedQuerySetupFactoryDefault(NActors::TActorSystemSetup*, NKikimr::TAppData const*, NKikimrConfig::TAppConfig const&) /-S/ydb/core/kqp/federated_query/kqp_federated_query_helpers.cpp:88:21 #22 0x4572e7cd in construct /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:165:24 #23 0x4572e7cd in construct /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator_traits.h:320:9 #24 0x4572e7cd in __shared_ptr_emplace, 0> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:296:5 #25 0x4572e7cd in allocate_shared, NActors::TActorSystemSetup *&, const NKikimr::TAppData *&, const NKikimrConfig::TAppConfig &, 0> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:875:51 #26 0x4572e7cd in make_shared /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:883:10 #27 0x4572e7cd in NKikimr::NKqp::MakeKqpFederatedQuerySetupFactory(NActors::TActorSystemSetup*, NKikimr::TAppData const*, NKikimrConfig::TAppConfig const&) /-S/ydb/core/kqp/federated_query/kqp_federated_query_helpers.cpp:177:16 #28 0x42c631cc in NKikimr::NKikimrServicesInitializers::TKqpServiceInitializer::InitializeServices(NActors::TActorSystemSetup*, NKikimr::TAppData const*) /-S/ydb/core/driver_lib/run/kikimr_services_initializers.cpp:2135:43 #29 0x4fbc9ac2 in NKikimr::TServiceInitializersList::InitializeServices(NActors::TActorSystemSetup*, NKikimr::TAppData const*) /-S/ydb/core/driver_lib/run/service_initializer.cpp:13:29 #30 0x3deb9bc3 in NKikimr::TKikimrRunner::InitializeActorSystem(NKikimr::TKikimrRunConfig const&, TIntrusivePtr>, NKikimr::TBasicKikimrServicesMask const&) /-S/ydb/core/driver_lib/run/run.cpp:1349:26 #31 0x3ded0f55 in NKikimr::TKikimrRunner::CreateKikimrRunner(NKikimr::TKikimrRunConfig const&, std::__y1::shared_ptr) /-S/ydb/core/driver_lib/run/run.cpp:2003:13 #32 0x3b90cf76 in NKikimr::MainRun(NKikimr::TKikimrRunConfig const&, std::__y1::shared_ptr) /-S/ydb/core/driver_lib/run/main.cpp:44:43 #33 0x3ddb8664 in NKikimr::NDriverClient::TClientCommandServer::Run(NYdb::NConsoleClient::TClientCommand::TConfig&) /-S/ydb/core/driver_lib/cli_utils/cli_cmds_server.cpp:51:12 #34 0x3b987645 in NYdb::NConsoleClient::TClientCommandTree::Run(NYdb::NConsoleClient::TClientCommand::TConfig&) /-S/ydb/public/lib/ydb_cli/common/command.cpp:465:33 #35 0x3b9742e7 in NKikimr::NDriverClient::NewClient(int, char**, std::__y1::shared_ptr) /-S/ydb/core/driver_lib/cli_utils/cli_cmds_root.cpp:77:26 #36 0x3b9124b4 in NKikimr::Main(int, char**, std::__y1::shared_ptr) /-S/ydb/core/driver_lib/run/main.cpp:151:20 #37 0x3b915c4d in ParameterizedMain(int, char**, std::__y1::shared_ptr) /-S/ydb/core/driver_lib/run/main.cpp:201:16 #38 0x1d5eae93 in main /-S/ydb/apps/ydbd/main.cpp:27:12 #39 0x7fc7e56d1d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) Indirect leak of 232 byte(s) in 1 object(s) allocated from: #0 0x1d6b812d in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x45cd93bf in NYql::CreateFileStorage(NYql::TFileStorageConfig const&, std::__y1::vector>, std::__y1::allocator>>> const&) /-S/yql/essentials/core/file_storage/file_storage.cpp:342:12 #2 0x45728ec2 in NKikimr::NKqp::MakeYtGateway(NKikimr::NMiniKQL::IFunctionRegistry const*, NKikimrConfig::TQueryServiceConfig const&) /-S/ydb/core/kqp/federated_query/kqp_federated_query_helpers.cpp:29:44 #3 0x4572b9b0 in NKikimr::NKqp::TKqpFederatedQuerySetupFactoryDefault::TKqpFederatedQuerySetupFactoryDefault(NActors::TActorSystemSetup*, NKikimr::TAppData const*, NKikimrConfig::TAppConfig const&) /-S/ydb/core/kqp/federated_query/kqp_federated_query_helpers.cpp:88:21 #4 0x4572e7cd in construct /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:165:24 #5 0x4572e7cd in construct /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator_traits.h:320:9 #6 0x4572e7cd in __shared_ptr_emplace, 0> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:296:5 #7 0x4572e7cd in allocate_shared, NActors::TActorSystemSetup *&, const NKikimr::TAppData *&, const NKikimrConfig::TAppConfig &, 0> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:875:51 #8 0x4572e7cd in make_shared /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:883:10 #9 0x4572e7cd in NKikimr::NKqp::MakeKqpFederatedQuerySetupFactory(NActors::TActorSystemSetup*, NKikimr::TAppData const*, NKikimrConfig::TAppConfig const&) /-S/ydb/core/kqp/federated_query/kqp_federated_query_helpers.cpp:177:16 #10 0x42c631cc in NKikimr::NKikimrServicesInitializers::TKqpServiceInitializer::InitializeServices(NActors::TActorSystemSetup*, NKikimr::TAppData const*) /-S/ydb/core/driver_lib/run/kikimr_services_initializers.cpp:2135:43 #11 0x4fbc9ac2 in NKikimr::TServiceInitializersList::InitializeServices(NActors::TActorSystemSetup*, NKikimr::TAppData const*) /-S/ydb/core/driver_lib/run/service_initializer.cpp:13:29 #12 0x3deb9bc3 in NKikimr::TKikimrRunner::InitializeActorSystem(NKikimr::TKikimrRunConfig const&, TIntrusivePtr>, NKikimr::TBasicKikimrServicesMask const&) /-S/ydb/core/driver_lib/run/run.cpp:1349:26 Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...import', '--basetemp', '/home/runner/.ya/build/build_root/r3ux/00020d/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/r3ux/00020d/ydb/tests/olap/s3_import/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/r3ux/00020d', '--source-root', '/home/runner/.ya/build/build_root/r3ux/00020d/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/r3ux/00020d/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/s3_import', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'test_tpch_import.py::TestS3TpchImport::test_import_and_export', '--tb', 'short', '--dep-root', 'ydb/tests/olap/s3_import', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...import', '--basetemp', '/home/runner/.ya/build/build_root/r3ux/00020d/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/r3ux/00020d/ydb/tests/olap/s3_import/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/r3ux/00020d', '--source-root', '/home/runner/.ya/build/build_root/r3ux/00020d/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/r3ux/00020d/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/s3_import', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'test_tpch_import.py::TestS3TpchImport::test_import_and_export', '--tb', 'short', '--dep-root', 'ydb/tests/olap/s3_import', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout",), {}) |99.9%| [TM] {RESULT} ydb/tests/olap/s3_import/py3test |99.9%| CLEANING BUILD ROOT ------ sole chunk ran 2 tests (total:111.78s - recipes:9.76s test:99.48s recipes:2.50s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 8.2G (8562748K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 970464 44.8M 44.8M 6.4M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 970549 33.9M 22.1M 9.6M ├─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 972897 658M 661M 401M │ └─ ydb-tests-fq-mem_alloc --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest 975861 436M 20.1M 242M │ ├─ ydb-tests-fq-mem_alloc --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doct 975869 5.4G 5.5G 5.0G │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff/test 971481 1.8G 1.8G 1.2G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff/ydb_data_s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/mem_alloc/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff/stderr ydb/tests/fq/streaming_optimize [size:medium] nchunks:4 ------ [test_sql_streaming.py 0/4] chunk ran 7 tests (total:124.75s - recipes:0.55s test:123.48s recipes:0.60s) [fail] test_sql_streaming.py::test[suites-GroupByHop-default.txt] [default-linux-x86_64-release-asan] (26.85s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_p8xfof9w/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_p8xfof9w/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_p8xfof9w/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_p8xfof9w/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_p8xfof9w/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_p8xfof9w/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_p8xfof9w/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_p8xfof9w/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_p8xfof9w/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_p8xfof9w/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f36d47a83b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f36d47a1ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f36d402bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f36d4129464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f36d4129464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f36d4129464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f36d402ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f36d40d790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f36d3fcfe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f36d3fcfe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f36d3fcfc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f36d4126e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f36d4126e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHop-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopByStringKey-default.txt] [default-linux-x86_64-release-asan] (14.36s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5vqxom9p/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5vqxom9p/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5vqxom9p/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5vqxom9p/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5vqxom9p/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5vqxom9p/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5vqxom9p/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5vqxom9p/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5vqxom9p/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5vqxom9p/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f53afb683b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f53afb61ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f53af3ebd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f53af4e9464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f53af4e9464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f53af4e9464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f53af3eab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f53af49790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f53af38fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f53af38fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f53af38fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f53af4e6e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f53af4e6e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopByStringKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopExprKey-default.txt] [default-linux-x86_64-release-asan] (15.77s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_uvhvoahr/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_uvhvoahr/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_uvhvoahr/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_uvhvoahr/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_uvhvoahr/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_uvhvoahr/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_uvhvoahr/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_uvhvoahr/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_uvhvoahr/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_uvhvoahr/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f6a525383b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f6a52531ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f6a51dbbd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f6a51eb9464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f6a51eb9464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f6a51eb9464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f6a51dbab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f6a51e6790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f6a51d5fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f6a51d5fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f6a51d5fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f6a51eb6e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f6a51eb6e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423700 byte(s) leaked in 8242 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopExprKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopListKey-default.txt] [default-linux-x86_64-release-asan] (16.56s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jg1gy7il/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jg1gy7il/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jg1gy7il/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jg1gy7il/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jg1gy7il/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jg1gy7il/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jg1gy7il/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jg1gy7il/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jg1gy7il/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jg1gy7il/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fe3cd9183b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fe3cd911ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fe3cd19bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fe3cd299464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fe3cd299464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fe3cd299464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fe3cd19ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fe3cd24790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fe3cd13fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fe3cd13fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fe3cd13fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fe3cd296e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fe3cd296e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopListKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopNoKey-default.txt] [default-linux-x86_64-release-asan] (14.45s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l1dnxeco/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l1dnxeco/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l1dnxeco/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l1dnxeco/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l1dnxeco/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l1dnxeco/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l1dnxeco/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l1dnxeco/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l1dnxeco/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l1dnxeco/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f29943083b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f2994301ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f2993b8bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f2993c89464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f2993c89464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f2993c89464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f2993b8ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f2993c3790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f2993b2fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f2993b2fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f2993b2fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f2993c86e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f2993c86e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopNoKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopPercentile-default.txt] [default-linux-x86_64-release-asan] (15.79s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bjxz_nlr/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bjxz_nlr/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bjxz_nlr/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bjxz_nlr/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bjxz_nlr/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bjxz_nlr/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bjxz_nlr/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bjxz_nlr/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bjxz_nlr/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bjxz_nlr/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f0c5ec783b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f0c5ec71ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f0c5e4fbd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f0c5e5f9464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f0c5e5f9464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f0c5e5f9464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f0c5e4fab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f0c5e5a790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f0c5e49fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f0c5e49fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f0c5e49fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f0c5e5f6e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f0c5e5f6e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopPercentile-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopTimeExtractorUnusedColumns-default.txt] [default-linux-x86_64-release-asan] (15.98s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3g1be_fs/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3g1be_fs/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3g1be_fs/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3g1be_fs/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3g1be_fs/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3g1be_fs/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3g1be_fs/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3g1be_fs/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3g1be_fs/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3g1be_fs/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f93496583b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f9349651ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f9348edbd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f9348fd9464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f9348fd9464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f9348fd9464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f9348edab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f9348f8790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f9348e7fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f9348e7fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f9348e7fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f9348fd6e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f9348fd6e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopTimeExtractorUnusedColumns-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_streaming.py 1/4] chunk ran 7 tests (total:118.27s - recipes:0.71s test:116.82s recipes:0.63s) [fail] test_sql_streaming.py::test[suites-GroupByHopWithDataWatermarks-default.txt] [default-linux-x86_64-release-asan] (21.59s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_73h2kpf9/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_73h2kpf9/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_73h2kpf9/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_73h2kpf9/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_73h2kpf9/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_73h2kpf9/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_73h2kpf9/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_73h2kpf9/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_73h2kpf9/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_73h2kpf9/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f5e8a0983b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f5e8a091ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f5e8991bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f5e89a19464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f5e89a19464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f5e89a19464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f5e8991ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f5e899c790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f5e898bfe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f5e898bfe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f5e898bfc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f5e89a16e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f5e89a16e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopWithDataWatermarks-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindow-default.txt] [default-linux-x86_64-release-asan] (14.37s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y0kfe4hh/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y0kfe4hh/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y0kfe4hh/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y0kfe4hh/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y0kfe4hh/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y0kfe4hh/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y0kfe4hh/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y0kfe4hh/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y0kfe4hh/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y0kfe4hh/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7feba1d283b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7feba1d21ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7feba15abd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7feba16a9464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7feba16a9464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7feba16a9464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7feba15aab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7feba165790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7feba154fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7feba154fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7feba154fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7feba16a6e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7feba16a6e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindow-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowByStringKey-default.txt] [default-linux-x86_64-release-asan] (15.60s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tb49nxfn/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tb49nxfn/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tb49nxfn/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tb49nxfn/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tb49nxfn/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tb49nxfn/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tb49nxfn/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tb49nxfn/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tb49nxfn/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_tb49nxfn/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f4fde7683b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f4fde761ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f4fddfebd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f4fde0e9464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f4fde0e9464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f4fde0e9464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f4fddfeab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f4fde09790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f4fddf8fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f4fddf8fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f4fddf8fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f4fde0e6e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f4fde0e6e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowByStringKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowExprKey-default.txt] [default-linux-x86_64-release-asan] (15.42s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bu4w6mfh/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bu4w6mfh/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bu4w6mfh/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bu4w6mfh/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bu4w6mfh/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bu4w6mfh/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bu4w6mfh/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bu4w6mfh/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bu4w6mfh/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bu4w6mfh/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f72d48083b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f72d4801ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f72d408bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f72d4189464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f72d4189464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f72d4189464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f72d408ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f72d413790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f72d402fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f72d402fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f72d402fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f72d4186e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f72d4186e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423005 byte(s) leaked in 8227 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowExprKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowListKey-default.txt] [default-linux-x86_64-release-asan] (14.57s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_e8o65bfc/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_e8o65bfc/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_e8o65bfc/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_e8o65bfc/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_e8o65bfc/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_e8o65bfc/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_e8o65bfc/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_e8o65bfc/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_e8o65bfc/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_e8o65bfc/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f069e3883b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f069e381ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f069dc0bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f069dd09464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f069dd09464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f069dd09464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f069dc0ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f069dcb790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f069dbafe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f069dbafe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f069dbafc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f069dd06e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f069dd06e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowListKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowNoKey-default.txt] [default-linux-x86_64-release-asan] (15.43s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ldvpl17o/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ldvpl17o/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ldvpl17o/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ldvpl17o/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ldvpl17o/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ldvpl17o/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ldvpl17o/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ldvpl17o/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ldvpl17o/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ldvpl17o/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fe96d6283b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fe96d621ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fe96ceabd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fe96cfa9464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fe96cfa9464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fe96cfa9464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fe96ceaab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fe96cf5790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fe96ce4fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fe96ce4fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fe96ce4fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fe96cfa6e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fe96cfa6e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowNoKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] [default-linux-x86_64-release-asan] (15.63s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mob1ereq/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mob1ereq/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mob1ereq/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mob1ereq/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mob1ereq/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mob1ereq/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mob1ereq/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mob1ereq/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mob1ereq/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mob1ereq/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f4c13ac83b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f4c13ac1ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f4c1334bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f4c13449464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f4c13449464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f4c13449464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f4c1334ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f4c133f790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f4c132efe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f4c132efe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f4c132efc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f4c13446e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f4c13446e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowPercentile-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_streaming.py 2/4] chunk ran 7 tests (total:117.42s - recipes:0.63s test:115.96s recipes:0.68s) [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] [default-linux-x86_64-release-asan] (21.58s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ty2_xcdg/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ty2_xcdg/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ty2_xcdg/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ty2_xcdg/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ty2_xcdg/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ty2_xcdg/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ty2_xcdg/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ty2_xcdg/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ty2_xcdg/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ty2_xcdg/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fbd830683b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fbd83061ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fbd828ebd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fbd829e9464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fbd829e9464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fbd829e9464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fbd828eab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fbd8299790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fbd8288fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fbd8288fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fbd8288fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fbd829e6e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fbd829e6e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423693 byte(s) leaked in 8242 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWithDataWatermarks-default.txt] [default-linux-x86_64-release-asan] (14.74s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pr4y51eg/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pr4y51eg/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pr4y51eg/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pr4y51eg/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pr4y51eg/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pr4y51eg/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pr4y51eg/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pr4y51eg/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pr4y51eg/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pr4y51eg/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f0fdc9683b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f0fdc961ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f0fdc1ebd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f0fdc2e9464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f0fdc2e9464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f0fdc2e9464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f0fdc1eab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f0fdc29790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f0fdc18fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f0fdc18fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f0fdc18fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f0fdc2e6e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f0fdc2e6e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWithDataWatermarks-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopic-default.txt] [default-linux-x86_64-release-asan] (15.55s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ux1d212b/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ux1d212b/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ux1d212b/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ux1d212b/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ux1d212b/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ux1d212b/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ux1d212b/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ux1d212b/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ux1d212b/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ux1d212b/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fb6037883b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fb603781ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fb60300bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fb603109464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fb603109464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fb603109464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fb60300ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fb6030b790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fb602fafe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fb602fafe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fb602fafc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fb603106e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fb603106e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopic-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicGroupWriteToSolomon-default.txt] [default-linux-x86_64-release-asan] (15.61s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_m2q4qo7u/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_m2q4qo7u/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_m2q4qo7u/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_m2q4qo7u/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_m2q4qo7u/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_m2q4qo7u/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_m2q4qo7u/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_m2q4qo7u/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_m2q4qo7u/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_m2q4qo7u/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f8e820583b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f8e82051ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f8e818dbd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f8e819d9464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f8e819d9464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f8e819d9464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f8e818dab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f8e8198790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f8e8187fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f8e8187fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f8e8187fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f8e819d6e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f8e819d6e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicGroupWriteToSolomon-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicWithMetadata-default.txt] [default-linux-x86_64-release-asan] (14.06s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_f5h278nt/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_f5h278nt/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_f5h278nt/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_f5h278nt/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_f5h278nt/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_f5h278nt/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_f5h278nt/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_f5h278nt/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_f5h278nt/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_f5h278nt/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f6cc62e83b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f6cc62e1ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f6cc5b6bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f6cc5c69464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f6cc5c69464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f6cc5c69464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f6cc5b6ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f6cc5c1790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f6cc5b0fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f6cc5b0fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f6cc5b0fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f6cc5c66e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f6cc5c66e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithMetadata-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicWithMetadataInsideFilter-default.txt] [default-linux-x86_64-release-asan] (14.80s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nu60fvm2/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nu60fvm2/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nu60fvm2/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nu60fvm2/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nu60fvm2/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nu60fvm2/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nu60fvm2/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nu60fvm2/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nu60fvm2/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nu60fvm2/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fde616683b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fde61661ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fde60eebd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fde60fe9464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fde60fe9464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fde60fe9464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fde60eeab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fde60f9790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fde60e8fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fde60e8fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fde60e8fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fde60fe6e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fde60fe6e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithMetadataInsideFilter-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicWithMetadataNestedDeep-default.txt] [default-linux-x86_64-release-asan] (15.53s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ogo4x_wj/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ogo4x_wj/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ogo4x_wj/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ogo4x_wj/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ogo4x_wj/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ogo4x_wj/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ogo4x_wj/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ogo4x_wj/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ogo4x_wj/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ogo4x_wj/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fd4f37983b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fd4f3791ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fd4f301bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fd4f3119464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fd4f3119464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fd4f3119464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fd4f301ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fd4f30c790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fd4f2fbfe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fd4f2fbfe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fd4f2fbfc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fd4f3116e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fd4f3116e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithMetadataNestedDeep-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_streaming.py 3/4] chunk ran 7 tests (total:118.66s - recipes:0.62s test:117.08s recipes:0.64s) [fail] test_sql_streaming.py::test[suites-ReadTopicWithMetadataWithFilter-default.txt] [default-linux-x86_64-release-asan] (23.12s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_a53039x0/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_a53039x0/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_a53039x0/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_a53039x0/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_a53039x0/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_a53039x0/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_a53039x0/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_a53039x0/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_a53039x0/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_a53039x0/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f7dc9d983b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f7dc9d91ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f7dc961bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f7dc9719464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f7dc9719464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f7dc9719464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f7dc961ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f7dc96c790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f7dc95bfe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f7dc95bfe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f7dc95bfc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f7dc9716e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f7dc9716e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithMetadataWithFilter-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicWithSchema-default.txt] [default-linux-x86_64-release-asan] (14.18s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_93evd0cl/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_93evd0cl/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_93evd0cl/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_93evd0cl/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_93evd0cl/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_93evd0cl/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_93evd0cl/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_93evd0cl/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_93evd0cl/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_93evd0cl/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7efddb1583b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7efddb151ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7efdda9dbd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7efddaad9464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7efddaad9464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7efddaad9464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7efdda9dab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7efddaa8790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7efdda97fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7efdda97fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7efdda97fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7efddaad6e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7efddaad6e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithSchema-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTwoTopics-default.txt] [default-linux-x86_64-release-asan] (15.79s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_hlycso55/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_hlycso55/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_hlycso55/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_hlycso55/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_hlycso55/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_hlycso55/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_hlycso55/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_hlycso55/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_hlycso55/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_hlycso55/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f433c5b83b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f433c5b1ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f433be3bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f433bf39464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f433bf39464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f433bf39464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f433be3ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f433bee790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f433bddfe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f433bddfe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f433bddfc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f433bf36e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f433bf36e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTwoTopics-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadWriteSameTopic-default.txt] [default-linux-x86_64-release-asan] (15.53s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_utrzk6ok/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_utrzk6ok/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_utrzk6ok/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_utrzk6ok/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_utrzk6ok/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_utrzk6ok/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_utrzk6ok/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_utrzk6ok/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_utrzk6ok/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_utrzk6ok/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f5443fa83b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f5443fa1ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f544382bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f5443929464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f5443929464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f5443929464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f544382ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f54438d790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f54437cfe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f54437cfe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f54437cfc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f5443926e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f5443926e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadWriteSameTopic-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadWriteTopic-default.txt] [default-linux-x86_64-release-asan] (13.90s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gq2k57qq/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gq2k57qq/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gq2k57qq/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gq2k57qq/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gq2k57qq/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gq2k57qq/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gq2k57qq/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gq2k57qq/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gq2k57qq/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gq2k57qq/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fee37b783b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fee37b71ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fee373fbd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fee374f9464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fee374f9464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fee374f9464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fee373fab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fee374a790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fee3739fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fee3739fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fee3739fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fee374f6e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fee374f6e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423552 byte(s) leaked in 8239 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadWriteTopic-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadWriteTopicWithSchema-default.txt] [default-linux-x86_64-release-asan] (14.79s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_z7skoge1/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_z7skoge1/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_z7skoge1/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_z7skoge1/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_z7skoge1/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_z7skoge1/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_z7skoge1/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_z7skoge1/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_z7skoge1/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_z7skoge1/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f9bc34a83b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f9bc34a1ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f9bc2d2bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f9bc2e29464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f9bc2e29464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f9bc2e29464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f9bc2d2ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f9bc2dd790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f9bc2ccfe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f9bc2ccfe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f9bc2ccfc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f9bc2e26e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f9bc2e26e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadWriteTopicWithSchema-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-WriteTwoTopics-default.txt] [default-linux-x86_64-release-asan] (15.51s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2vj7dyy0/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2vj7dyy0/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2vj7dyy0/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2vj7dyy0/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2vj7dyy0/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2vj7dyy0/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2vj7dyy0/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2vj7dyy0/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2vj7dyy0/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2vj7dyy0/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fc5a52a83b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fc5a52a1ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fc5a4b2bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fc5a4c29464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fc5a4c29464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fc5a4c29464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fc5a4b2ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fc5a4bd790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fc5a4acfe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fc5a4acfe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fc5a4acfc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fc5a4c26e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fc5a4c26e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-WriteTwoTopics-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ FAIL: 28 - FAIL ydb/tests/fq/streaming_optimize ydb/tests/functional/serverless [size:medium] nchunks:10 ------ [test_serverless.py 0/10] chunk ran 1 test (total:118.85s - test:118.44s) [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [default-linux-x86_64-release-asan] (111.65s) ydb/tests/functional/serverless/test_serverless.py:450: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:347: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:367: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:105: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:225: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--false_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:449: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*DISK_SPACE_EXHAUSTED.*'): E AssertionError: Regex pattern did not match. E Regex: '.*DISK_SPACE_EXHAUSTED.*' E Input: 'message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--false_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--false.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff ------ [test_serverless.py 1/10] chunk ran 1 test (total:114.57s - test:114.13s) [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [default-linux-x86_64-release-asan] (107.75s) ydb/tests/functional/serverless/test_serverless.py:450: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:347: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:367: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:105: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:225: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--true_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:449: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*DISK_SPACE_EXHAUSTED.*'): E AssertionError: Regex pattern did not match. E Regex: '.*DISK_SPACE_EXHAUSTED.*' E Input: 'message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--true_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--true.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff ------ FAIL: 2 - FAIL ydb/tests/functional/serverless ydb/tests/olap/column_family/compression [size:medium] ------ sole chunk ran 2 tests (total:625.13s - test:600.06s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: alter_compression.py::TestAlterCompression::test_all_supported_compression (timeout) duration: 620.24s alter_compression.py::TestAlterCompression::test_availability_data test was not launched inside chunk. Info: Test run has exceeded 8.0G (8388608K) memory limit with 8.1G (8444080K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 970462 44.8M 44.8M 6.5M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 970551 33.8M 22.2M 9.6M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 970565 890M 894M 810M └─ ydb-tests-olap-column_family-compression --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p n 971764 3.5G 3.6G 3.0G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/test 997884 3.5G 3.6G 3.0G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/t Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/stderr [timeout] alter_compression.py::TestAlterCompression::test_all_supported_compression [default-linux-x86_64-release-asan] (620.24s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/alter_compression.py.TestAlterCompression.test_all_supported_compression.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff ------ TIMEOUT: 1 - NOT_LAUNCHED, 1 - TIMEOUT ydb/tests/olap/column_family/compression ydb/tests/olap/oom [size:medium] ------ [overlapping_portions.py] chunk ran 1 test (total:76.58s - test:65.22s) [fail] overlapping_portions.py::TestOverlappingPortions::test [default-linux-x86_64-release-asan] (57.38s) ydb/tests/olap/oom/overlapping_portions.py:103: in test with pytest.raises(ydb.issues.GenericError, match=r'.*cannot allocate memory.*'): E Failed: DID NOT RAISE Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/oom/test-results/py3test/testing_out_stuff/overlapping_portions.py.TestOverlappingPortions.test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/oom/test-results/py3test/testing_out_stuff ------ FAIL: 1 - FAIL ydb/tests/olap/oom ydb/tests/olap/s3_import [size:medium] ------ sole chunk ran 1 test (total:626.04s - test:600.03s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: test_tpch_import.py::TestS3TpchImport::test_import_and_export (timeout) duration: 621.34s Info: Test run has exceeded 8.0G (8388608K) memory limit with 11.1G (11660912K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 970447 44.8M 44.8M 6.5M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 970523 37.9M 26.4M 13.7M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 970538 774M 777M 693M └─ ydb-tests-olap-s3_import --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --docte 971760 9.8G 9.8G 9.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/te 974465 534M 0b 0b └─ moto_server s3 --port 15395 Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/stderr [timeout] test_tpch_import.py::TestS3TpchImport::test_import_and_export [default-linux-x86_64-release-asan] (621.34s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/test_tpch_import.py.TestS3TpchImport.test_import_and_export.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff ------ TIMEOUT: 1 - TIMEOUT ydb/tests/olap/s3_import ydb/tests/olap/scenario [size:medium] ------ sole chunk ran 8 tests (total:622.79s - test:600.08s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] (good) duration: 247.70s test_alter_tiering.py::TestAlterTiering::test[many_tables] (good) duration: 165.13s test_insert.py::TestInsert::test[read_data_during_bulk_upsert] (timeout) duration: 134.70s test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] (good) duration: 34.55s test_simple.py::TestSimple::test[tablestores] (good) duration: 19.77s test_simple.py::TestSimple::test[alter_table] (good) duration: 6.80s test_simple.py::TestSimple::test[alter_tablestore] (good) duration: 3.39s test_simple.py::TestSimple::test[table] (good) duration: 2.94s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/stderr [timeout] test_insert.py::TestInsert::test[read_data_during_bulk_upsert] [default-linux-x86_64-release-asan] (134.70s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_insert.py.TestInsert.test.read_data_during_bulk_upsert.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff ------ TIMEOUT: 7 - GOOD, 1 - TIMEOUT ydb/tests/olap/scenario ydb/tests/olap/ttl_tiering [size:medium] nchunks:2 ------ [data_migration_when_alter_ttl.py] chunk ran 1 test (total:610.87s - test:600.08s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test (timeout) duration: 606.84s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/stderr [timeout] data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test [default-linux-x86_64-release-asan] (606.84s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl.py.TestDataMigrationWhenAlterTtl.test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff ------ [ttl_delete_s3.py] chunk ran 3 tests (total:615.52s - test:600.02s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change (fail) duration: 425.43s ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete (timeout) duration: 185.34s ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering test was not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/stderr [fail] ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change [default-linux-x86_64-release-asan] (425.43s) ydb/tests/olap/ttl_tiering/ttl_delete_s3.py:141: in test_data_unchanged_after_ttl_change data = self.get_aggregated(table_path) ydb/tests/olap/ttl_tiering/ttl_delete_s3.py:27: in get_aggregated answer = self.ydb_client.query(f"SELECT count(*), sum(val), sum(Digest::Fnv32(s)) from `{table_path}`") ydb/tests/olap/common/ydb_client.py:24: in query return self.session_pool.execute_with_retries(statement) contrib/python/ydb/py3/ydb/query/pool.py:202: in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) contrib/python/ydb/py3/ydb/retries.py:133: in retry_operation_sync for next_opt in opt_generator: contrib/python/ydb/py3/ydb/retries.py:94: in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) contrib/python/ydb/py3/ydb/query/pool.py:200: in wrapped_callee return [result_set for result_set in it] contrib/python/ydb/py3/ydb/_utilities.py:173: in __next__ return self._next() contrib/python/ydb/py3/ydb/_utilities.py:164: in _next res = self.wrapper(next(self.it)) contrib/python/ydb/py3/ydb/query/session.py:350: in lambda resp: base.wrap_execute_query_response( contrib/python/ydb/py3/ydb/query/base.py:172: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/query/base.py:189: in wrap_execute_query_response issues._process_response(response_pb) contrib/python/ydb/py3/ydb/issues.py:225: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.BadRequest: message: "Table /Root/test_data_unchanged_after_ttl_change/table (shard 72075186224037939) scan failed, reason: cannot build metadata/Snapshot too old: {1743441508000:max}. CS min read snapshot: {1743441509000:max}. now: 2025-03-31T17:18:34.016574Z" issue_code: 2017 severity: 1 (server_code: 400010) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3.py.TestDeleteS3Ttl.test_data_unchanged_after_ttl_change.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff [timeout] ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete [default-linux-x86_64-release-asan] (185.34s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3.py.TestDeleteS3Ttl.test_ttl_delete.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff ------ TIMEOUT: 1 - FAIL, 1 - NOT_LAUNCHED, 2 - TIMEOUT ydb/tests/olap/ttl_tiering ydb/tests/stress/log/tests [size:medium] ------ sole chunk ran 1 test (total:155.28s - test:155.23s) [fail] test_workload.py::TestYdbLogWorkload::test[column] [default-linux-x86_64-release-asan] (150.06s) teardown failed: ydb/tests/stress/log/tests/test_workload.py:41: in teardown_class cls.cluster.stop() ydb/tests/library/harness/kikimr_runner.py:494: in stop raise daemon.SeveralDaemonErrors(saved_exceptions) E ydb.tests.library.harness.daemon.SeveralDaemonErrors: Daemon failed with message: Bad exit_code.. E Process exit_code = 100. E Stdout file name: E /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/log/tests/test-results/py3test/testing_out_stuff/test_workload.py.TestYdbLogWorkload.test.column/cluster/node_1/stdout E Stderr file name: E /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/log/tests/test-results/py3test/testing_out_stuff/test_workload.py.TestYdbLogWorkload.test.column/cluster/node_1/stderr E Stderr content: E E GRpc memory quota was set but disabled due to issues with grpc quoter, to enable it use EnableGRpcMemoryQuota option E Current KQP shutdown state: spent 0 seconds, not started yet E warning: address range table at offset 0x10c0 has a premature terminator entry at offset 0x10d0 E E ================================================================= E ==977473==ERROR: LeakSanitizer: detected memory leaks E E Indirect leak of 15168 byte(s) in 4 object(s) allocated from: E #0 0x1d6b812d in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 E #1 0x468efbcf in MakeIntrusive, NYql::TTypeAnnotationContext &> /-S/util/generic/ptr.h:818:12 E #2 0x468efbcf in NYql::TYtState::TYtState(NYql::TTypeAnnotationContext*) /-S/yt/yql/providers/yt/provider/yql_yt_provider.h:102:25 E #3 0x468e2099 in MakeIntrusive, NYql::TTypeAnnotationContext *> /-S/util/generic/ptr.h:818:16 E #4 0x468e2099 in NYql::CreateYtNativeState(TIntrusivePtr>, TBasicString> const&, TBasicString> const&, NYql::TYtGatewayConfig const*, TIntrusivePtr>, std::__y1::shared_ptr const&, std::__y1::shared_ptr const&) /-S/yt/yql/providers/yt/pro ..[snippet truncated].. pp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:289:10 E #3 0x466d2866 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:118:32 E #4 0x466d2866 in initialize_dynamic /-S/util/generic/hash_table.h:239:35 E #5 0x466d2866 in initialize_buckets_dynamic /-S/util/generic/hash_table.h:912:17 E #6 0x466d2866 in THashTable> const, TIntrusivePtr>>, TBasicString>, THash>>, TSelect1st, TEqualTo>>, std::__y1::allocator>>>::reserve(unsigned long) /-S/util/generic/hash_table.h:1330:13 E #7 0x466bb75b in insert_unique >, TIntrusivePtr > > > /-S/util/generic/hash_table.h:679:9 E #8 0x466bb75b in insert /-S/util/generic/hash.h:153:20 E #9 0x466bb75b in NYql::NCommon::TSettingDispatcher::TSettingHandlerImpl& NYql::NCommon::TSettingDispatcher::AddSetting(TBasicString> const&, NYql::NCommon::TConfSetting&) /-S/yql/essentials/providers/common/config/yql_dispatch.h:344:23 E #10 0x4669ee09 in NYql::TYtConfiguration::TYtConfiguration(NYql::TTypeAnnotationContext&) /-S/yt/yql/providers/yt/common/yql_yt_settings.cpp:466:5 E #11 0x468efbe0 in TYtVersionedConfiguration /-S/yt/yql/providers/yt/common/yql_yt_settings.h:372:11 E #12 0x468efbe0 in MakeIntrusive, NYql::TTypeAnnotationContext &> /-S/util/generic/ptr.h:818:16 E #13 0x468efbe0 in NYql::TYtState::TYtState(NYql::TTypeAnnotationContext*) /-S/yt/yql/providers/yt/provider/yql_yt_provider.h:102:25 E #14 0x468e2099 in MakeIntrusive, NYql::TTypeAnnotationContext *> /-S/util/generic/ptr.h:818:16 E #15 0x468e2099 in NYql::CreateYtNativeState(TIntrusivePtr>, TBasicString> co... Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/log/tests/test-results/py3test/testing_out_stuff/test_workload.py.TestYdbLogWorkload.test.column.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/log/tests/test-results/py3test/testing_out_stuff ------ FAIL: 1 - FAIL ydb/tests/stress/log/tests ydb/core/keyvalue/ut_trace [size:medium] nchunks:5 ------ [0/5] chunk ran 1 test (total:14.64s - setup:0.01s test:14.61s) [fail] TKeyValueTracingTest::ReadHuge [default-linux-x86_64-release-asan] (2.40s) equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadHuge.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadHuge.out ------ [1/5] chunk ran 1 test (total:14.45s - test:14.41s) [fail] TKeyValueTracingTest::ReadSmall [default-linux-x86_64-release-asan] (2.39s) equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadSmall.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadSmall.out ------ [2/5] chunk ran 1 test (total:14.65s - test:14.62s) [fail] TKeyValueTracingTest::WriteHuge [default-linux-x86_64-release-asan] (2.39s) assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&) at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:103:5 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteHuge.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteHuge.out ------ [3/5] chunk ran 1 test (total:14.35s - test:14.32s) [fail] TKeyValueTracingTest::WriteSmall [default-linux-x86_64-release-asan] (2.46s) assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&) at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:103:5 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteSmall.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteSmall.out ------ FAIL: 4 - FAIL ydb/core/keyvalue/ut_trace ydb/core/kqp/ut/cost [size:medium] nchunks:50 ------ [0/50] chunk ran 1 test (total:19.45s - test:19.42s) [crashed] KqpCost::OlapWriteRow [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: 100) ==965282==ERROR: AddressSanitizer: SEGV on unknown address 0x000000000008 (pc 0x000018b718ad bp 0x7fffdad8b020 sp 0x7fffdad8ae80 T0) ==965282==The signal is caused by a READ memory access. ==965282==Hint: address points to the zero page. 2025-03-31T17:11:49.412496Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488022527486992366:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:11:49.412659Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T17:11:59.692819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T17:11:59.692863Z node 1 :IMPORT WARN: Table profiles were not loaded #0 0x18b718ad in Get::TypeHandler> /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 #1 0x18b718ad in Get /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:1348:32 #2 0x18b718ad in _internal_table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1762:31 #3 0x18b718ad in table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1766:10 #4 0x18b718ad in NKikimr::NKqp::NTestSuiteKqpCost::TTestCaseOlapWriteRow::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:636:13 #5 0x18b96437 in operator() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #6 0x18b96437 in __invoke<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #7 0x18b96437 in __call<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> ..[snippet truncated].. 0x194e85e5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #11 0x194e85e5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #12 0x194e85e5 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #13 0x194b8138 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #14 0x18b952e3 in NKikimr::NKqp::NTestSuiteKqpCost::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #15 0x194b9a05 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #16 0x194e2b5c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #17 0x7fa74832fd8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #18 0x7fa74832fe3f in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x29e3f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #19 0x16306028 in _start (/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost+0x16306028) (BuildId: 5128b0b237a145c7e4cbd2034645258e10874b06) SUMMARY: AddressSanitizer: SEGV /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 in Get::TypeHandler> ==965282==ABORTING Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/test-results/unittest/testing_out_stuff/KqpCost.OlapWriteRow.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/test-results/unittest/testing_out_stuff/KqpCost.OlapWriteRow.out ------ FAIL: 1 - CRASHED ydb/core/kqp/ut/cost ydb/core/kqp/ut/query [size:medium] nchunks:50 ------ [1/50] chunk ran 1 test (total:55.87s - test:55.83s) [fail] KqpStats::SysViewClientLost [default-linux-x86_64-release-asan] (51.20s) assertion failed at ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591, virtual void NKikimr::NKqp::NTestSuiteKqpStats::TTestCaseSysViewClientLost::Execute_(NUnitTest::TTestContext &): (timeoutedCount == 1) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x194EA4DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x199AF40F 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591: Execute_ @ 0x1908FA48 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: operator() @ 0x190A2A57 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x190A2A57 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x190A2A57 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x190A2A57 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x190A2A57 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x199E6435 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x199E6435 10. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x199E6435 11. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x199B5F88 12. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: Execute @ 0x190A1BDB 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x199B7855 14. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x199E09AC 15. ??:0: ?? @ 0x7F42CD8AFD8F 16. ??:0: ?? @ 0x7F42CD8AFE3F 17. ??:0: ?? @ 0x16459028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpStats.SysViewClientLost.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpStats.SysViewClientLost.out ------ FAIL: 1 - GOOD, 1 - FAIL ydb/core/kqp/ut/query ydb/core/kqp/ut/tx [size:medium] nchunks:50 ------ [0/50] chunk ran 1 test (total:22.81s - test:22.75s) [fail] KqpSinkTx::OlapInvalidateOnError [default-linux-x86_64-release-asan] (17.51s) assertion failed at ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182, virtual void NKikimr::NKqp::NTestSuiteKqpSinkTx::TInvalidateOnError::DoExecute(): (result.GetStatus() == EStatus::PRECONDITION_FAILED) failed: (BAD_REQUEST != PRECONDITION_FAILED)
: Error: Bad request. Table: `/Root/KV`., code: 2017
: Error: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32"]}, code: 2017 , with diff: (BAD_|P)RE(QUES|CONDI)T(|ION_FAILED) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182: DoExecute @ 0x18D9B8AE 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:201: Execute_ @ 0x18D7A4FA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: operator() @ 0x18D81977 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x18D81977 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x18D81977 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D81977 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D81977 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: Execute @ 0x18D80B43 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7F9607773D8F 18. ??:0: ?? @ 0x7F9607773E3F 19. ??:0: ?? @ 0x163B4028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSinkTx.OlapInvalidateOnError.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSinkTx.OlapInvalidateOnError.out ------ [1/50] chunk ran 1 test (total:21.30s - test:21.20s) [fail] KqpSnapshotIsolation::TConflictReadWriteOlap [default-linux-x86_64-release-asan] (16.07s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:146, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (ABORTED != SUCCESS)
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 , with diff: (ABORT|SUCC)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:146: DoExecute @ 0x18DD15F8 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:172: Execute_ @ 0x18DB8CCA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7FF18CFEAD8F 18. ??:0: ?? @ 0x7FF18CFEAE3F 19. ??:0: ?? @ 0x163B4028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOlap.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOlap.out ------ [10/50] chunk ran 1 test (total:14.83s - test:14.80s) [fail] KqpSnapshotIsolation::TSimpleOltpNoSink [default-linux-x86_64-release-asan] (10.47s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TSimple::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25: DoExecute @ 0x18DC0DB7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:57: Execute_ @ 0x18DB7D9A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7FE77CBA3D8F 18. ??:0: ?? @ 0x7FE77CBA3E3F 19. ??:0: ?? @ 0x163B4028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TSimpleOltpNoSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TSimpleOltpNoSink.out ------ [2/50] chunk ran 1 test (total:15.89s - test:15.85s) [fail] KqpSnapshotIsolation::TConflictReadWriteOltp [default-linux-x86_64-release-asan] (11.41s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131: DoExecute @ 0x18DCECE7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:159: Execute_ @ 0x18DB8872 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7F98B8C8BD8F 18. ??:0: ?? @ 0x7F98B8C8BE3F 19. ??:0: ?? @ 0x163B4028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOltp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOltp.out ------ [3/50] chunk ran 1 test (total:13.60s - test:13.57s) [fail] KqpSnapshotIsolation::TConflictReadWriteOltpNoSink [default-linux-x86_64-release-asan] (9.47s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131: DoExecute @ 0x18DCECE7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:166: Execute_ @ 0x18DB8A9A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7F32D6EF5D8F 18. ??:0: ?? @ 0x7F32D6EF5E3F 19. ??:0: ?? @ 0x163B4028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOltpNoSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOltpNoSink.out ------ [4/50] chunk ran 1 test (total:25.07s - test:24.97s) [fail] KqpSnapshotIsolation::TConflictWriteOlap [default-linux-x86_64-release-asan] (20.37s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:92, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::ABORTED) failed: (SUCCESS != ABORTED) , with diff: (SUCC|ABORT)E(SS|D) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:92: DoExecute @ 0x18DC9C58 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:118: Execute_ @ 0x18DB864A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7F1DF2EFDD8F 18. ??:0: ?? @ 0x7F1DF2EFDE3F 19. ??:0: ?? @ 0x163B4028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOlap.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOlap.out ------ [5/50] chunk ran 1 test (total:15.12s - test:15.08s) [fail] KqpSnapshotIsolation::TConflictWriteOltp [default-linux-x86_64-release-asan] (10.71s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76: DoExecute @ 0x18DC7347 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:105: Execute_ @ 0x18DB81F2 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7FD1BF974D8F 18. ??:0: ?? @ 0x7FD1BF974E3F 19. ??:0: ?? @ 0x163B4028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOltp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOltp.out ------ [6/50] chunk ran 1 test (total:15.84s - test:15.80s) [fail] KqpSnapshotIsolation::TConflictWriteOltpNoSink [default-linux-x86_64-release-asan] (11.11s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76: DoExecute @ 0x18DC7347 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:112: Execute_ @ 0x18DB841A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7F509126CD8F 18. ??:0: ?? @ 0x7F509126CE3F 19. ??:0: ?? @ 0x163B4028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOltpNoSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOltpNoSink.out ------ [7/50] chunk ran 1 test (total:17.15s - test:17.11s) [fail] KqpSnapshotIsolation::TReadOnlyOltp [default-linux-x86_64-release-asan] (11.97s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TReadOnly::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185: DoExecute @ 0x18DD66A3 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:214: Execute_ @ 0x18DB8EF2 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7F8D2B72CD8F 18. ??:0: ?? @ 0x7F8D2B72CE3F 19. ??:0: ?? @ 0x163B4028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TReadOnlyOltp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TReadOnlyOltp.out ------ [8/50] chunk ran 1 test (total:15.49s - test:15.46s) [fail] KqpSnapshotIsolation::TReadOnlyOltpNoSink [default-linux-x86_64-release-asan] (11.17s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TReadOnly::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185: DoExecute @ 0x18DD66A3 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:221: Execute_ @ 0x18DB911A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7F4FA8261D8F 18. ??:0: ?? @ 0x7F4FA8261E3F 19. ??:0: ?? @ 0x163B4028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TReadOnlyOltpNoSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TReadOnlyOltpNoSink.out ------ [9/50] chunk ran 1 test (total:15.92s - test:15.88s) [fail] KqpSnapshotIsolation::TSimpleOltp [default-linux-x86_64-release-asan] (11.27s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TSimple::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25: DoExecute @ 0x18DC0DB7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:50: Execute_ @ 0x18DB7B72 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7F6298CFAD8F 18. ??:0: ?? @ 0x7F6298CFAE3F 19. ??:0: ?? @ 0x163B4028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TSimpleOltp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TSimpleOltp.out ------ FAIL: 11 - FAIL ydb/core/kqp/ut/tx ydb/core/statistics/aggregator/ut [size:medium] nchunks:60 ------ [0/60] chunk ran 1 test (total:458.47s - test:458.44s) [fail] AnalyzeColumnshard::AnalyzeRebootColumnShard [default-linux-x86_64-release-asan] (453.58s) (TWithBackTrace) ydb/library/actors/testlib/test_runtime.h:579: Exception occured while waiting for NKikimr::NStat::TEvStatistics::TEvAnalyzeResponse: (NActors::TSchedulingLimitReachedException) TestActorRuntime Processed over 100000 events.ydb/library/actors/testlib/test_runtime.cpp:716: TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 TWithBackTrace::TWithBackTrace<>() at /-S/util/generic/yexception.h:146:5 NKikimr::NStat::TEvStatistics::TEvAnalyzeResponse::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration) at /-S/ydb/library/actors/testlib/test_runtime.h:0:24 DoDestroy at /-S/util/generic/ptr.h:237:13 operator() at /-S/ydb/core/statistics/aggregator/ut/ut_analyze_columnshard.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/AnalyzeColumnshard.AnalyzeRebootColumnShard.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/AnalyzeColumnshard.AnalyzeRebootColumnShard.out ------ FAIL: 1 - FAIL ydb/core/statistics/aggregator/ut ydb/core/tx/datashard/ut_incremental_backup [size:medium] nchunks:4 ------ [0/4] chunk ran 1 test (total:156.11s - test:156.04s) [fail] IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [default-linux-x86_64-release-asan] (138.84s) assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (TIMEOUT != SUCCESS) Response { QueryIssues { message: "Request timeout 600000ms exceeded" severity: 1 } QueryIssues { message: "Cancelling after 600000ms in ExecuteState" severity: 1 } TxMeta { } } YdbStatus: TIMEOUT , with diff: (TIM|SUCC)E(OUT|SS) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode) at /-S/ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:0:5 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/tx/datashard/datashard_ut_incremental_backup.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/testing_out_stuff/IncrementalBackup.ComplexRestoreBackupCollection.WithIncremental.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/testing_out_stuff/IncrementalBackup.ComplexRestoreBackupCollection.WithIncremental.out ------ FAIL: 1 - FAIL ydb/core/tx/datashard/ut_incremental_backup ydb/core/tx/datashard/ut_volatile [size:medium] nchunks:2 ------ [0/2] chunk ran 1 test (total:24.71s - test:24.66s) [crashed] DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck [default-linux-x86_64-release-asan] (7.20s) Test crashed (return code: 100) ==961799==ERROR: LeakSanitizer: detected memory leaks Direct leak of 224 byte(s) in 1 object(s) allocated from: #0 0x1903ffcd in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x2b08ca70 in make_unique /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:621:26 #2 0x2b08ca70 in NKikimr::NEvents::TDataEvents::TEvWriteResult::BuildCompleted(unsigned long, unsigned long) /-S/ydb/core/tx/data_events/events.h:123:27 #3 0x2b086026 in NKikimr::NDataShard::TExecuteWriteUnit::Execute(TIntrusivePtr>, NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/execute_write_unit.cpp:376:37 #4 0x2aedef1c in NKikimr::NDataShard::TPipeline::RunExecutionPlan(TIntrusivePtr>, TVector>&, NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/datashard_pipeline.cpp:1851:28 #5 0x2ab9fc94 in NKikimr::NDataShard::TDataShard::TTxWrite::Execute(NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/datashard__write.cpp:100:38 #6 0x2062a8d4 in NKikimr::NTabletFlatExecutor::TExecutor::ExecuteTransaction(NKikimr::NTabletFlatExecutor::TSeat*) /-S/ydb/core/tablet_flat/flat_executor.cpp:1910:35 #7 0x20626274 in NKikimr::NTabletFlatExecutor: ..[snippet truncated].. in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #27 0x18de9c37 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #28 0x198001f5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #29 0x198001f5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #30 0x198001f5 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #31 0x197cfd48 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #32 0x18de8ae3 in NKikimr::NTestSuiteDataShardVolatile::TCurrentTest::Execute() /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1 #33 0x197d1615 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #34 0x197fa76c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #35 0x7fdb746a9d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) SUMMARY: AddressSanitizer: 664 byte(s) leaked in 11 allocation(s). Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_volatile/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_volatile/test-results/unittest/testing_out_stuff/DataShardVolatile.GracefulShardRestartNoEarlyReadSetAck.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_volatile/test-results/unittest/testing_out_stuff/DataShardVolatile.GracefulShardRestartNoEarlyReadSetAck.out ------ FAIL: 1 - CRASHED ydb/core/tx/datashard/ut_volatile ydb/core/tx/tiering/ut [size:medium] nchunks:60 ------ [0/60] chunk ran 1 test (total:43.98s - test:43.93s) [crashed] ColumnShardTiers::TTLUsage [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: -6) See logs for more info Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff/ColumnShardTiers.TTLUsage.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff/ColumnShardTiers.TTLUsage.out ------ FAIL: 1 - CRASHED ydb/core/tx/tiering/ut ydb/core/viewer/ut [size:medium] nchunks:10 ------ [1/10] chunk ran 1 test (total:41.98s - test:41.93s) [fail] Viewer::QueryExecuteScript [default-linux-x86_64-release-asan] (10.23s) assertion failed at ydb/core/viewer/viewer_ut.cpp:1948, virtual void NTestSuiteViewer::TTestCaseQueryExecuteScript::Execute_(NUnitTest::TTestContext &): (json.GetMap().contains("metadata")) {} TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NTestSuiteViewer::TTestCaseQueryExecuteScript::Execute_(NUnitTest::TTestContext&) at /-S/ydb/core/viewer/viewer_ut.cpp:0:9 operator() at /-S/ydb/core/viewer/viewer_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/Viewer.QueryExecuteScript.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/Viewer.QueryExecuteScript.out ------ FAIL: 1 - GOOD, 1 - FAIL ydb/core/viewer/ut ydb/services/ydb/ut [size:medium] nchunks:60 ------ [0/60] chunk ran 1 test (total:19.35s - test:19.32s) [fail] YdbLogStore::AlterLogTable [default-linux-x86_64-release-asan] (3.75s) assertion failed at ydb/services/ydb/ydb_logstore_ut.cpp:435, virtual void NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext &): (res.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: Column stores are not supported , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext&) at /-S/ydb/services/ydb/ydb_logstore_ut.cpp:0:13 operator() at /-S/ydb/services/ydb/ydb_logstore_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/YdbLogStore.AlterLogTable.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/YdbLogStore.AlterLogTable.out ------ FAIL: 1 - FAIL ydb/services/ydb/ut ydb/tests/functional/transfer [size:medium] ------ sole chunk ran 1 test (total:20.09s - recipes:14.19s test:2.12s recipes:3.74s) [fail] Replication::PauseAndResumeReplication [default-linux-x86_64-release-asan] (1.22s) assertion failed at ydb/tests/functional/transfer/utils.h:422, std::pair NReplicationTest::MainTestCase::DoRead(const TExpectations &): (res.IsSuccess())
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/local/Table_16914372676288429640]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NReplicationTest::MainTestCase::CheckResult(TVector>, std::__y1::shared_ptr>, std::__y1::allocator>, std::__y1::shared_ptr>>>, std::__y1::allocator>, std::__y1::shared_ptr>, std::__y1::allocator>, std::__y1::shared_ptr>>>>> const&) at /-S/ydb/tests/functional/transfer/utils.h:431:13 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/vector:527:18 operator() at /-S/ydb/tests/functional/transfer/replication.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/transfer/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/transfer/test-results/unittest/testing_out_stuff/Replication.PauseAndResumeReplication.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/transfer/test-results/unittest/testing_out_stuff/Replication.PauseAndResumeReplication.out ------ FAIL: 1 - FAIL ydb/tests/functional/transfer Total 38 suites: 19 - GOOD 15 - FAIL 4 - TIMEOUT Total 126 tests: 62 - GOOD 54 - FAIL 2 - NOT_LAUNCHED 5 - TIMEOUT 3 - CRASHED Cache efficiency ratio is 98.48% (42529 of 43187). Local: 373 (0.86%), dist: 0 (0.00%), by dynamic uids: 0 (0.00%), avoided: 42156 (97.61%) Dist cache download: count=0, size=0 bytes, speed=0.0 bytes/s Disk usage for tools/sdk 3.64 GiB Additional disk space consumed for build cache 0 bytes Critical path: [626443 ms] [TM] [rnd-7hld9n9qu3y3k8aj asan default-linux-x86_64 release]: ydb/tests/olap/s3_import/py3test [started: 0 (1743441129544), finished: 626443 (1743441755987)] Time from start: 689507.6730957031 ms, time elapsed by graph 626443 ms, time diff 63064.673095703125 ms. The longest 10 tasks: [626443 ms] [TM] [rnd-7hld9n9qu3y3k8aj asan default-linux-x86_64 release]: ydb/tests/olap/s3_import/py3test [started: 1743441129544, finished: 1743441755987] [625678 ms] [TM] [rnd-czizqko5itjt7dvu asan default-linux-x86_64 release]: ydb/tests/olap/column_family/compression/py3test [started: 1743441129692, finished: 1743441755370] [623238 ms] [TM] [rnd-z97v9qhw4malsweg asan default-linux-x86_64 release]: ydb/tests/olap/scenario/py3test [started: 1743441129730, finished: 1743441752968] [615962 ms] [TM] [rnd-7619127477653267691 asan default-linux-x86_64 release]: ydb/tests/olap/ttl_tiering/py3test [started: 1743441084393, finished: 1743441700355] [611290 ms] [TM] [rnd-17418399311326112357 asan default-linux-x86_64 release]: ydb/tests/olap/ttl_tiering/py3test [started: 1743441084400, finished: 1743441695690] [564920 ms] [TM] [rnd-16212346693565731026 asan default-linux-x86_64 release]: ydb/core/tx/columnshard/ut_rw/unittest [started: 1743441118292, finished: 1743441683212] [560740 ms] [TM] [rnd-14790338876019440985 asan default-linux-x86_64 release]: ydb/core/tx/columnshard/ut_rw/unittest [started: 1743441119621, finished: 1743441680361] [458914 ms] [TM] [rnd-13529260539686347962 asan default-linux-x86_64 release]: ydb/core/statistics/aggregator/ut/unittest [started: 1743441097548, finished: 1743441556462] [330853 ms] [TM] [rnd-10328015617207122480 asan default-linux-x86_64 release]: ydb/core/tx/datashard/ut_read_iterator/unittest [started: 1743441089122, finished: 1743441419975] [288483 ms] [TM] [rnd-13722744325068774302 asan default-linux-x86_64 release]: ydb/tests/functional/tpc/medium/py3test [started: 1743441130311, finished: 1743441418794] Total time by type: [9291888 ms] [TM] [count: 619, ave time 15011.13 msec] [ 100750 ms] [prepare:get from local cache] [count: 373, ave time 270.11 msec] [ 56260 ms] [TS] [count: 10, ave time 5626.00 msec] [ 32966 ms] [prepare:AC] [count: 2, ave time 16483.00 msec] [ 31689 ms] [TA] [count: 29, ave time 1092.72 msec] [ 27480 ms] [prepare:put to dist cache] [count: 363, ave time 75.70 msec] [ 7458 ms] [prepare:tools] [count: 16, ave time 466.12 msec] [ 6265 ms] [prepare:bazel-store] [count: 1, ave time 6265.00 msec] [ 911 ms] [prepare:clean] [count: 3, ave time 303.67 msec] Total tasks times: Total failed tasks time - 0 ms (0.00%) Total tests tasks time - 9379837 ms (100.00%) Total run tasks time - 9379837 ms Configure time - 26.9 s Statistics overhead 1456 ms Info: Dump junit report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_2/junit.xml Info: Dump results report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_2/report.json Ok + echo 0 + ./ya make . -T --test-size=small --test-size=medium --stat --test-threads 52 --link-threads 12 -DUSE_EAT_MY_DATA --build release --sanitize=address -DDEBUGINFO_LINES_ONLY --bazel-remote-store --bazel-remote-base-uri http://cachesrv.internal:8081 --bazel-remote-username cache_user --bazel-remote-password-file /tmp/tmp.Ub2FXGwaso --bazel-remote-put --dist-cache-max-file-size=209715200 -A --retest --stat -DCONSISTENT_DEBUG --no-dir-outputs --test-failure-code 0 --build-all --cache-size 2TB --force-build-depends -X --log-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/ya_log.txt --evlog-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_3/ya_evlog.jsonl --junit /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_3/junit.xml --build-results-report /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_3/report.json --output /home/runner/actions_runner/_work/ydb/ydb/tmp/out Output root is subdirectory of Arcadia root, this may cause non-idempotent build Configuring dependencies for platform default-linux-x86_64-release-asan Configuring dependencies for platform tools Configuring dependencies for platform test_tool_tc1-global [0 ymakes processing] [7938/7939 modules configured] [0 ymakes processing] [7944/7944 modules configured] [0 ymakes processing] [7944/7944 modules configured] [4390/4390 modules rendered] Configuring tests execution Configuring local and dist store caches Configuration done. Preparing for execution |33.3%| CLEANING SYMRES | 1.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut | 1.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query | 1.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large | 2.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace | 2.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k | 3.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat | 3.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut | 4.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/example/ydb-tests-example | 4.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange | 4.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/control/ut/ydb-core-control-ut | 5.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup | 6.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut | 6.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut | 7.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging | 7.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common | 7.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut | 8.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large | 8.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut | 9.0%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a | 9.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup | 9.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots | 9.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay_yt/query_replay_yt | 9.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |10.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |10.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |10.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |11.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |11.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |11.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |11.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |11.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |11.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |11.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |12.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |12.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |12.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |12.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |12.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |12.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |12.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |13.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |13.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |13.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |13.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |13.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |13.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |13.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans |14.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |14.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |14.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |14.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |14.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |14.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |14.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |14.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |15.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |15.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |15.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |15.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |15.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |15.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |16.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |16.1%| PREPARE $(TEST_TOOL_HOST-sbr:8330113388) |16.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydbd/ydbd |16.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |15.7%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |15.8%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |15.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |16.0%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |16.8%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |17.5%| PREPARE $(CLANG18-3363451693) |17.7%| PREPARE $(CLANG-1922233694) |17.8%| PREPARE $(CLANG_FORMAT-2212207123) |17.9%| PREPARE $(JDK_DEFAULT-472926544) |18.0%| PREPARE $(JDK17-472926544) |18.2%| PREPARE $(WITH_JDK-sbr:7832760150) |18.3%| PREPARE $(WITH_JDK17-sbr:7832760150) |18.4%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |18.5%| PREPARE $(CLANG16-1380963495) |18.6%| PREPARE $(YMAKE_PYTHON3-4256832079) |18.8%| PREPARE $(OS_SDK_ROOT-sbr:243881345) |18.9%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/client/libyt-yt-client.a |19.1%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |19.2%| PREPARE $(FLAKE8_PY3-715603131) |19.3%| PREPARE $(GDB) |19.5%| PREPARE $(CLANG-2518231432) |19.7%| PREPARE $(LLD_ROOT-3808007503) |19.8%| PREPARE $(PYTHON) |20.2%| [AR] {RESULT} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |22.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydb/ydb |23.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |23.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |23.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |23.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |24.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |24.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |24.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay/ydb_query_replay |25.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |25.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |25.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |25.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |25.6%| [AR] {RESULT} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |25.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |25.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime |26.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |26.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |26.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |26.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |26.6%| [LD] {RESULT} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |26.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |26.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |27.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |27.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |27.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |27.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |27.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |27.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |27.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |27.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |28.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |28.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |28.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |28.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |28.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |28.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |28.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |28.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |29.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |29.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |29.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |29.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |29.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |29.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |29.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |29.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |29.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |29.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |29.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |29.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |29.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |30.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |30.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |30.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |30.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |30.6%| [AR] {RESULT} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |30.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |30.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |31.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |32.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |32.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |32.8%| [AR] {RESULT} $(B)/yt/yt/client/libyt-yt-client.a |32.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |33.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |33.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |33.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |33.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |33.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |33.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ydb_stress_tool |33.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |33.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |33.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |34.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index |33.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |33.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |33.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/ut/ydb-core-client-ut |33.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |33.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure |34.2%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |34.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |34.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |35.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/kqprun |35.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |35.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |35.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |35.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |36.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |36.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |36.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |36.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |36.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |36.8%| [LD] {RESULT} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |36.5%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |36.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |36.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |36.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |37.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |37.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |37.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |37.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |37.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |38.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |38.3%| [LD] {RESULT} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |38.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |38.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |38.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |38.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |38.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |39.1%| [LD] {RESULT} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |38.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |39.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |39.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |39.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |39.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |39.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/config/ut/ydb-services-config-ut |40.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |40.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |40.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |40.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |40.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |40.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |41.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |41.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |41.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |41.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |41.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |41.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |41.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |41.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |41.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |42.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |42.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |42.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |42.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |42.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |42.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |43.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |43.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |43.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |43.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |43.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |43.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |43.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |43.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |44.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |44.3%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |44.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |44.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |44.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |44.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |45.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |45.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |45.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |45.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |45.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |45.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |46.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |46.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |46.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |45.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |45.8%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |46.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |46.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |46.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |46.5%| [LD] {RESULT} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |46.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |46.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ut/ydb-core-security-ut |47.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |47.3%| [LD] {RESULT} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |47.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |47.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |47.8%| [LD] {RESULT} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |47.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |48.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |48.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |48.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |47.8%| [LD] {RESULT} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |48.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |48.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |48.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |48.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |48.7%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/kqprun |48.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |49.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |49.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |49.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |49.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_transfer_writer/core-tx-replication-service-ut_transfer_writer |49.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |49.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |50.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |50.1%| [LD] {RESULT} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |50.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |50.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |50.6%| [LD] {RESULT} $(B)/ydb/tools/query_replay_yt/query_replay_yt |50.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |50.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |50.6%| [LD] {RESULT} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |50.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |50.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |50.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |51.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |51.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |51.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |51.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |51.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |51.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |52.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |52.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |52.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |52.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |52.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |52.0%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |52.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |52.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |52.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |52.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |52.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |53.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |53.2%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |53.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |53.5%| [LD] {RESULT} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |53.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |53.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |53.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |54.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |54.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |54.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_data_cleanup/ydb-core-tx-datashard-ut_data_cleanup |54.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |54.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |54.0%| [LD] {RESULT} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |54.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |54.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ydb-core-tx-schemeshard-ut_data_erasure_reboots |54.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ydb-core-tx-schemeshard-ut_data_erasure_reboots |54.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |54.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |54.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |55.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |55.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/docs/generator/generator |55.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |55.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |55.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |55.8%| [LD] {RESULT} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |56.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |56.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |56.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |56.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |56.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |56.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/tools/sql2yql/sql2yql |56.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |56.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |56.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/ydb-tests-olap |56.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |56.8%| [LD] {RESULT} $(B)/ydb/apps/ydbd/ydbd |57.0%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |57.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |57.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |57.6%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |57.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |57.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |58.0%| [LD] {RESULT} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |58.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/sql/ydb-tests-sql |58.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |58.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_prefix_kmeans/ydb-core-tx-datashard-ut_prefix_kmeans |58.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |58.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |58.5%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |57.8%| [LD] {RESULT} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |57.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |58.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |58.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |58.4%| [LD] {RESULT} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |58.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |58.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |58.7%| [LD] {RESULT} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |58.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |59.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |59.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |59.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |59.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |59.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |59.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |59.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |60.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |60.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans |60.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |60.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |60.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |60.2%| [LD] {RESULT} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |60.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |60.6%| [LD] {RESULT} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |60.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |60.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |61.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |61.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |61.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |61.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |61.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |61.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |61.5%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |61.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |61.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |62.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |62.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |62.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |62.4%| [LD] {RESULT} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |62.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |62.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |62.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |62.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |62.5%| [LD] {RESULT} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |62.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |62.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |62.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |62.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |63.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |63.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |63.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |63.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |63.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest |63.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |63.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |64.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_volatile/unittest |64.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |64.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |64.3%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |64.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |64.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |64.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck >> KqpSnapshotIsolation::TReadOnlyOltpNoSink |64.2%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_transfer_writer/core-tx-replication-service-ut_transfer_writer |64.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |64.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |64.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |64.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |64.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |64.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |64.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |65.0%| [LD] {RESULT} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |65.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |65.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |65.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |65.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |65.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |65.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |66.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |66.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |66.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |66.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests >> KqpSnapshotIsolation::TSimpleOltp |66.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |66.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |66.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |66.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |66.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |66.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |66.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |66.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |67.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |67.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |67.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |67.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |67.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |67.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |67.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |67.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |67.4%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |67.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |67.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |67.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |67.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |68.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |68.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |68.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |68.3%| [LD] {RESULT} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |68.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |68.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |68.6%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |68.8%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |68.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |69.0%| [LD] {RESULT} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |69.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |69.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |69.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |69.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |69.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |69.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |69.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |64.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |64.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |64.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |64.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |64.5%| [LD] {RESULT} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |64.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |64.7%| [LD] {RESULT} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |64.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |64.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |65.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |65.1%| [LD] {RESULT} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |65.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |65.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |65.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |65.4%| [LD] {RESULT} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |65.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |65.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |65.8%| [LD] {RESULT} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |65.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/fqrun/fqrun |65.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |66.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |66.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |57.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |57.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |57.2%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |57.3%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |57.4%| [LD] {RESULT} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |57.5%| [LD] {RESULT} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |57.6%| [LD] {RESULT} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |57.6%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |57.7%| [LD] {RESULT} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |57.8%| COMPACTING CACHE 892.1GiB |57.8%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |57.9%| [LD] {RESULT} $(B)/ydb/tests/example/ydb-tests-example |58.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |58.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |58.1%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |58.1%| [LD] {RESULT} $(B)/ydb/tests/sql/ydb-tests-sql |58.2%| [LD] {RESULT} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |58.3%| [LD] {RESULT} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |58.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |58.4%| [LD] {RESULT} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |58.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |58.5%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |58.6%| [LD] {RESULT} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |58.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |58.7%| [LD] {RESULT} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |58.8%| [LD] {RESULT} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |58.9%| [LD] {RESULT} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |58.9%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |59.0%| [LD] {RESULT} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |59.1%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |59.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |59.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_prefix_kmeans/ydb-core-tx-datashard-ut_prefix_kmeans |59.3%| [LD] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |59.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |59.4%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |59.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |59.5%| [LD] {RESULT} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |59.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |59.7%| [LD] {RESULT} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |59.7%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |59.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |59.9%| [LD] {RESULT} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |59.9%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |60.0%| [LD] {RESULT} $(B)/ydb/core/control/ut/ydb-core-control-ut |60.1%| [LD] {RESULT} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |60.1%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |60.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |60.2%| [LD] {RESULT} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |60.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |60.4%| [LD] {RESULT} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |60.4%| [LD] {RESULT} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |60.5%| [LD] {RESULT} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |60.6%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |60.6%| [LD] {RESULT} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |60.7%| [LD] {RESULT} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |60.8%| [LD] {RESULT} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |60.8%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |60.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index |61.0%| [LD] {RESULT} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |61.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |61.1%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |61.2%| [LD] {RESULT} $(B)/ydb/core/client/ut/ydb-core-client-ut |61.2%| [LD] {RESULT} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |61.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |61.4%| [LD] {RESULT} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |61.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |61.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |61.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |61.6%| [LD] {RESULT} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |61.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |61.8%| [LD] {RESULT} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |61.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |61.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |62.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |62.0%| [LD] {RESULT} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |62.1%| [LD] {RESULT} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |62.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |62.2%| [LD] {RESULT} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |62.3%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |62.4%| [LD] {RESULT} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |62.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |62.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |62.5%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |62.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |62.7%| [LD] {RESULT} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |62.7%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |62.8%| [LD] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |62.9%| [LD] {RESULT} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |62.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |63.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |63.1%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |63.1%| [LD] {RESULT} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |63.2%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |63.3%| [LD] {RESULT} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |63.3%| [LD] {RESULT} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |63.4%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |63.5%| [LD] {RESULT} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |63.5%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |63.6%| [LD] {RESULT} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |63.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |63.7%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |63.8%| [LD] {RESULT} $(B)/yql/essentials/tools/sql2yql/sql2yql |63.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |63.9%| [LD] {RESULT} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |64.0%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |64.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |64.1%| [LD] {RESULT} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |64.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k |64.3%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |64.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |64.4%| [LD] {RESULT} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |64.5%| [LD] {RESULT} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |64.5%| [LD] {RESULT} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |64.6%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |64.7%| [LD] {RESULT} $(B)/ydb/tests/tools/fqrun/fqrun |64.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |64.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |64.8%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ydb_stress_tool |64.9%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |65.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |65.0%| [LD] {RESULT} $(B)/ydb/apps/ydb/ydb |65.1%| [LD] {RESULT} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |65.2%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |65.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |65.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure |65.4%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |65.4%| [LD] {RESULT} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |65.5%| [LD] {RESULT} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |65.6%| [LD] {RESULT} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |65.6%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |65.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |65.8%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |65.8%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |65.9%| [LD] {RESULT} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |66.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |66.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans |66.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |66.2%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |66.2%| [LD] {RESULT} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |66.3%| [LD] {RESULT} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |66.4%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |66.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |66.5%| [LD] {RESULT} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |66.6%| [LD] {RESULT} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |66.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |66.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |66.8%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |66.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |66.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |67.0%| [LD] {RESULT} $(B)/ydb/core/security/ut/ydb-core-security-ut |67.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |67.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |67.1%| [LD] {RESULT} $(B)/ydb/tools/query_replay/ydb_query_replay |67.2%| [LD] {RESULT} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |67.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |67.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |67.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |67.5%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |67.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |67.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |67.7%| [LD] {RESULT} $(B)/ydb/services/config/ut/ydb-services-config-ut |67.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |67.8%| [LD] {RESULT} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |67.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |67.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |68.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |68.1%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |68.1%| [LD] {RESULT} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |68.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |68.3%| [LD] {RESULT} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |68.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |68.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |68.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |68.5%| [LD] {RESULT} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |68.6%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |68.7%| [LD] {RESULT} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |68.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |68.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_data_cleanup/ydb-core-tx-datashard-ut_data_cleanup |68.9%| [LD] {RESULT} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |68.9%| [LD] {RESULT} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |69.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |69.1%| [LD] {RESULT} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |69.1%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |69.2%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |69.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |69.3%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |69.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans |69.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |69.5%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |69.6%| [LD] {RESULT} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |69.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |69.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |69.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |69.8%| [LD] {RESULT} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |69.9%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |70.0%| [LD] {RESULT} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |70.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |70.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |70.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |70.2%| [LD] {RESULT} $(B)/ydb/tests/olap/ydb-tests-olap |70.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |70.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |70.4%| [LD] {RESULT} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |70.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |70.6%| [LD] {RESULT} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |70.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |70.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |70.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |70.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |70.9%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |71.0%| [LD] {RESULT} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |71.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |71.1%| [LD] {RESULT} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |71.2%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |71.2%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |71.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |71.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |71.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |71.5%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |71.6%| [LD] {RESULT} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |71.6%| [LD] {RESULT} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |71.7%| [LD] {RESULT} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |71.7%| [LD] {RESULT} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |71.8%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |71.9%| [LD] {RESULT} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |71.9%| [LD] {RESULT} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |72.0%| [LD] {RESULT} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |72.1%| [LD] {RESULT} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |72.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |72.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |72.3%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |72.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |72.4%| [LD] {RESULT} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |72.5%| [LD] {RESULT} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |72.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |72.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |72.7%| [LD] {RESULT} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |72.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |72.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |72.9%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record >> YdbLogStore::AlterLogTable |72.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |73.0%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |73.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |73.1%| [LD] {RESULT} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |73.2%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |73.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |73.3%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |73.4%| [LD] {RESULT} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |73.5%| [LD] {RESULT} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |73.5%| [LD] {RESULT} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |73.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |73.7%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |73.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |73.8%| [LD] {RESULT} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |73.9%| [LD] {RESULT} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |73.9%| [LD] {RESULT} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |74.0%| [LD] {RESULT} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |74.0%| [LD] {RESULT} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |74.2%| [LD] {RESULT} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |74.2%| [LD] {RESULT} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |74.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |74.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |74.4%| [LD] {RESULT} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |74.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |74.6%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |74.6%| [LD] {RESULT} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |74.7%| [LD] {RESULT} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |74.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |74.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |74.9%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |75.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |75.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |75.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |75.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime |75.2%| [LD] {RESULT} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |75.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |75.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |75.4%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |75.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |75.6%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |75.6%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |75.7%| [LD] {RESULT} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |75.8%| [LD] {RESULT} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |75.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |75.9%| [LD] {RESULT} $(B)/ydb/tests/olap/docs/generator/generator |76.0%| [LD] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |76.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |76.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |76.1%| [LD] {RESULT} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |76.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |76.3%| [LD] {RESULT} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |76.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |76.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |76.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |76.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |76.6%| [LD] {RESULT} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |76.7%| [LD] {RESULT} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |76.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |76.9%| [LD] {RESULT} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |77.0%| [LD] {RESULT} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |77.1%| [LD] {RESULT} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |77.1%| [LD] {RESULT} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |77.2%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |77.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |77.3%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |77.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |77.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |77.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |77.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |77.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |77.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |77.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |77.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |78.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |78.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |78.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootColumnShard |78.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |78.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |78.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |78.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |78.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |78.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> test_sql_streaming.py::test[suites-GroupByHopWithDataWatermarks-default.txt] |79.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |79.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |79.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |79.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |79.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |79.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataWithFilter-default.txt] |79.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |79.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |79.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |79.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |80.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |80.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |80.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |80.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> test_sql_streaming.py::test[suites-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] |80.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |80.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |80.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |80.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> test_sql_streaming.py::test[suites-GroupByHop-default.txt] |80.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> overlapping_portions.py::TestOverlappingPortions::test |80.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |81.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |81.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |81.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |81.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck [GOOD] |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> YdbLogStore::AlterLogTable [FAIL] |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |82.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |82.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |82.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> TKeyValueTracingTest::ReadSmall >> TKeyValueTracingTest::ReadHuge >> TKeyValueTracingTest::WriteHuge >> TKeyValueTracingTest::WriteSmall >> KqpCost::OlapWriteRow |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TTLUsage |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TKeyValueTracingTest::WriteSmall [FAIL] >> TKeyValueTracingTest::ReadSmall [FAIL] >> TKeyValueTracingTest::ReadHuge [FAIL] >> TKeyValueTracingTest::WriteHuge [FAIL] |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |88.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> KqpSnapshotIsolation::TSimpleOltp [FAIL] >> Viewer::QueryExecuteScript |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbLogStore::AlterLogTable [FAIL] Test command err: 2025-03-31T17:28:24.007881Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488026819758736030:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:28:24.008038Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/gfke/000177/r3tmp/tmpCpYcvr/pdisk_1.dat 2025-03-31T17:28:24.386865Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61361, node 1 2025-03-31T17:28:24.423055Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T17:28:24.423122Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-31T17:28:24.460455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:28:24.460548Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:28:24.477977Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T17:28:24.493492Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:28:24.493514Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:28:24.493527Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:28:24.493661Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18583 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:28:24.835436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:28:24.992693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "LogStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "resource_type" Type: "Utf8" NotNull: true } Columns { Name: "resource_id" Type: "Utf8" NotNull: true } Columns { Name: "uid" Type: "Utf8" NotNull: true } Columns { Name: "level" Type: "Int32" } Columns { Name: "message" Type: "Utf8" } Columns { Name: "json_payload" Type: "JsonDocument" } Columns { Name: "request_id" Type: "Utf8" } Columns { Name: "ingested_at" Type: "Timestamp" } Columns { Name: "saved_at" Type: "Timestamp" } KeyColumnNames: "timestamp" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" DefaultCompression { Codec: ColumnCodecLZ4 } } } } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:44680" , at schemeshard: 72057594046644480 2025-03-31T17:28:24.993192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/LogStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-31T17:28:24.993248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusPreconditionFailed, reason: Column stores are not supported, at schemeshard: 72057594046644480 2025-03-31T17:28:24.995854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusPreconditionFailed Reason: "Column stores are not supported" TxId: 281474976710658 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-03-31T17:28:24.996033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusPreconditionFailed, reason: Column stores are not supported, operation: CREATE COLUMN STORE, path: /Root/LogStore 2025-03-31T17:28:24.998344Z node 1 :TX_PROXY ERROR: Actor# [1:7488026819758736934:2592] txid# 281474976710658, issues: { message: "Column stores are not supported" severity: 1 } assertion failed at ydb/services/ydb/ydb_logstore_ut.cpp:435, virtual void NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext &): (res.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: Column stores are not supported , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) TBackTrace::Capture()+28 (0x1C6D843C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1CB954F0) NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext&)+8721 (0x1C20B181) std::__y1::__function::__func, void ()>::operator()()+280 (0x1C234018) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x1CBCC516) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x1CB9C069) NTestSuiteYdbLogStore::TCurrentTest::Execute()+1204 (0x1C2331E4) NUnitTest::TTestFactory::Execute()+2438 (0x1CB9D936) NUnitTest::RunMain(int, char**)+5213 (0x1CBC6A8D) ??+0 (0x7EFED0A07D90) __libc_start_main+128 (0x7EFED0A07E40) _start+41 (0x1907A029) |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> KqpSnapshotIsolation::TReadOnlyOltpNoSink [FAIL] |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapInvalidateOnError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadHuge [FAIL] Test command err: equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture()+28 (0x1013BF9C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x105F7E30) TestOneRead(TBasicString>, TBasicString>)+4828 (0xFD8444C) NTestSuiteTKeyValueTracingTest::TTestCaseReadHuge::Execute_(NUnitTest::TTestContext&)+318 (0xFD8B01E) std::__y1::__function::__func, void ()>::operator()()+280 (0xFD9E7F8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x10625D96) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x105FE9A9) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFD9D6A4) NUnitTest::TTestFactory::Execute()+2438 (0x10600276) NUnitTest::RunMain(int, char**)+5213 (0x1062030D) ??+0 (0x7F02F07EFD90) __libc_start_main+128 (0x7F02F07EFE40) _start+41 (0xD71A029) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadSmall [FAIL] Test command err: equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture()+28 (0x1013BF9C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x105F7E30) TestOneRead(TBasicString>, TBasicString>)+4828 (0xFD8444C) NTestSuiteTKeyValueTracingTest::TTestCaseReadSmall::Execute_(NUnitTest::TTestContext&)+318 (0xFD8AC2E) std::__y1::__function::__func, void ()>::operator()()+280 (0xFD9E7F8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x10625D96) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x105FE9A9) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFD9D6A4) NUnitTest::TTestFactory::Execute()+2438 (0x10600276) NUnitTest::RunMain(int, char**)+5213 (0x1062030D) ??+0 (0x7FB3606D3D90) __libc_start_main+128 (0x7FB3606D3E40) _start+41 (0xD71A029) |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteHuge [FAIL] Test command err: assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture()+28 (0x1013BF9C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x105F7E30) TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&)+4253 (0xFD7EA7D) NTestSuiteTKeyValueTracingTest::TTestCaseWriteHuge::Execute_(NUnitTest::TTestContext&)+216 (0xFD8A8B8) std::__y1::__function::__func, void ()>::operator()()+280 (0xFD9E7F8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x10625D96) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x105FE9A9) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFD9D6A4) NUnitTest::TTestFactory::Execute()+2438 (0x10600276) NUnitTest::RunMain(int, char**)+5213 (0x1062030D) ??+0 (0x7F298A06CD90) __libc_start_main+128 (0x7F298A06CE40) _start+41 (0xD71A029) |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteSmall [FAIL] Test command err: assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture()+28 (0x1013BF9C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x105F7E30) TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&)+4253 (0xFD7EA7D) NTestSuiteTKeyValueTracingTest::TTestCaseWriteSmall::Execute_(NUnitTest::TTestContext&)+216 (0xFD8A5A8) std::__y1::__function::__func, void ()>::operator()()+280 (0xFD9E7F8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x10625D96) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x105FE9A9) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFD9D6A4) NUnitTest::TTestFactory::Execute()+2438 (0x10600276) NUnitTest::RunMain(int, char**)+5213 (0x1062030D) ??+0 (0x7F5D73166D90) __libc_start_main+128 (0x7F5D73166E40) _start+41 (0xD71A029) >> KqpSnapshotIsolation::TSimpleOltpNoSink >> KqpSnapshotIsolation::TConflictWriteOlap >> KqpSnapshotIsolation::TConflictWriteOltp |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictWriteOltpNoSink >> KqpSnapshotIsolation::TReadOnlyOltp |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |91.7%| [TA] $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |91.8%| [TA] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictReadWriteOltp ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_volatile/unittest >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck [GOOD] Test command err: 2025-03-31T17:28:24.693440Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T17:28:24.693668Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T17:28:24.693820Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/gfke/00018c/r3tmp/tmpqFRS4G/pdisk_1.dat 2025-03-31T17:28:25.083182Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] ::Bootstrap [1:594:2519] 2025-03-31T17:28:25.083249Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] lookup [1:594:2519] 2025-03-31T17:28:25.083321Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] queue send [1:594:2519] 2025-03-31T17:28:25.083410Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] received pending shutdown [1:594:2519] 2025-03-31T17:28:25.083666Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] forward result local node, try to connect [1:594:2519] 2025-03-31T17:28:25.083717Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480]::SendEvent [1:594:2519] 2025-03-31T17:28:25.084576Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] connected with status OK role: Leader [1:594:2519] 2025-03-31T17:28:25.084642Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] send queued [1:594:2519] 2025-03-31T17:28:25.084679Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] push event to server [1:594:2519] 2025-03-31T17:28:25.084727Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] shutdown pipe due to pending shutdown request [1:594:2519] 2025-03-31T17:28:25.084758Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] notify reset [1:594:2519] 2025-03-31T17:28:25.085000Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:5} Tx{8, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose 2025-03-31T17:28:25.085061Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:5} Tx{8, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T17:28:25.087059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T17:28:25.087320Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:5} Tx{8, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} hope 1 -> done Change{4, redo 987b alter 0b annex 0, ~{ 1, 33, 35, 42, 4 } -{ }, 0 gb} 2025-03-31T17:28:25.087395Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:5} Tx{8, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T17:28:25.087896Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} commited cookie 1 for step 5 2025-03-31T17:28:25.099853Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} Tx{9, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress 2025-03-31T17:28:25.099942Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} Tx{9, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T17:28:25.100187Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} Tx{9, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{5, redo 174b alter 0b annex 0, ~{ 42, 4 } -{ }, 0 gb} 2025-03-31T17:28:25.100282Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} Tx{9, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T17:28:25.100734Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} commited cookie 1 for step 6 2025-03-31T17:28:25.100877Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} Tx{10, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress 2025-03-31T17:28:25.100917Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} Tx{10, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T17:28:25.101103Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} Tx{10, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{6, redo 174b alter 0b annex 0, ~{ 42, 4 } -{ }, 0 gb} 2025-03-31T17:28:25.101157Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} Tx{10, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T17:28:25.101518Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} commited cookie 1 for step 7 2025-03-31T17:28:25.101628Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} Tx{11, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress 2025-03-31T17:28:25.101666Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} Tx{11, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T17:28:25.101849Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} Tx{11, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{7, redo 120b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2025-03-31T17:28:25.101898Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} Tx{11, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T17:28:25.102147Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} commited cookie 1 for step 8 2025-03-31T17:28:25.102356Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046316545] ::Bootstrap [1:601:2525] 2025-03-31T17:28:25.102389Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046316545] lookup [1:601:2525] 2025-03-31T17:28:25.102498Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046316545] queue send [1:601:2525] 2025-03-31T17:28:25.102761Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046316545] forward result local node, try to connect [1:601:2525] 2025-03-31T17:28:25.102804Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046316545]::SendEvent [1:601:2525] 2025-03-31T17:28:25.102937Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046316545] connected with status OK role: Leader [1:601:2525] 2025-03-31T17:28:25.102979Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046316545] send queued [1:601:2525] 2025-03-31T17:28:25.103009Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046316545] push event to server [1:601:2525] 2025-03-31T17:28:25.103346Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] ::Bootstrap [1:604:2527] 2025-03-31T17:28:25.103387Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] lookup [1:604:2527] 2025-03-31T17:28:25.103464Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] queue send [1:604:2527] 2025-03-31T17:28:25.103517Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] received pending shutdown [1:604:2527] 2025-03-31T17:28:25.103570Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] forward result local node, try to connect [1:604:2527] 2025-03-31T17:28:25.103632Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480]::SendEvent [1:604:2527] 2025-03-31T17:28:25.103781Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] connected with status OK role: Leader [1:604:2527] 2025-03-31T17:28:25.103815Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] send queued [1:604:2527] 2025-03-31T17:28:25.103849Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] push event to server [1:604:2527] 2025-03-31T17:28:25.103891Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] shutdown pipe due to pending shutdown request [1:604:2527] 2025-03-31T17:28:25.103922Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] notify reset [1:604:2527] 2025-03-31T17:28:25.104024Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} Tx{12, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion 2025-03-31T17:28:25.104075Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} Tx{12, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T17:28:25.104170Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} Tx{12, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} hope 1 -> done Change{8, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-31T17:28:25.104228Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} Tx{12, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T17:28:25.114682Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] client retry [1:88:2135] 2025-03-31T17:28:25.114765Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:88:2135] 2025-03-31T17:28:25.114852Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936131] client retry [1:338:2376] 2025-03-31T17:28:25.114873Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936131] lookup [1:338:2376] 2025-03-31T17:28:25.114949Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] forward result local node, try to connect [1:88:2135] 2025-03-31T17:28:25.115005Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033]::SendEvent [1:88:2135] 2025-03-31T17:28:25.115205Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936131] forward result local node, try to connect [1:338:2376] 2025-03-31T17:28:25.115238Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936131]::SendEvent [1:338:2376] 2025-03-31T17:28:25.115320Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] connected with status OK role: Leader [1:88:2135] 2025-03-31T17:28:25.115351Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] send queued [1:88:2135] 2025-03-31T17:28:25.115377Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] push event to server [1:88:2135] 2025-03-31T17:28:25.115441Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936131] connected with status OK role: Leader [1:338:2376] 2025-03-31T17:28:25.115494Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936131] send queued [1:338:2376] 2025-03-31T17:28:25.115534Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936131] push event to server [1:338:2376] 2025-03-31T17:28:25.115683Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{14, NKikimr::NBsController::TBlobStorageController::TTxRegisterNode} queued, type NKikimr::NBsController::TBlobStorageController::TTxRegisterNode 2025-03-31T17:28:25.115738Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{14, NKikimr::NBsController::TBlobStorageController::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-31T17:28:25.115958Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{14, NKikimr::NBsController::TBlobStorageController::TTxRegisterNode} hope 1 -> done Change{7, redo 79b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-03-31T17:28:25.116015Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{14, NKikimr::NBsController::TBlobStorageController::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2025-03-31T17:28:25.116482Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936131] send [1:338:2376] 2025-03-31T17:28:25.116525Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936131] push event to server [1:338:2376] 2025-0 ... atExecutor::ITransaction, TDelete>, NActors::TActorContext const&) /-S/ydb/core/tablet_flat/tablet_flat_executed.cpp:57:5 #16 0x21c60941 in NKikimr::NDataShard::TDataShard::Handle(TAutoPtr, TDelete>&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/datashard.cpp:3415:21 #17 0x21c0932b in NKikimr::NDataShard::TDataShard::StateWork(TAutoPtr&) /-S/ydb/core/tx/datashard/datashard_impl.h:3157:13 #18 0x1a54e5cc in NActors::IActor::Receive(TAutoPtr&) /-S/ydb/library/actors/core/actor.cpp:280:13 #19 0x36c883a4 in NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool) /-S/ydb/library/actors/testlib/test_runtime.cpp:1702:33 #20 0x36c80c19 in NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant) /-S/ydb/library/actors/testlib/test_runtime.cpp:1295:45 #21 0x36c8af93 in NActors::TTestActorRuntimeBase::WaitForEdgeEvents(std::__y1::function&)>, TSet, std::__y1::allocator> const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.cpp:1554:22 #22 0x36e535b3 in NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventIf(TSet, std::__y1::allocator> const&, std::__y1::function const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:477:13 #23 0x36e52b52 in GrabEdgeEvent /-S/ydb/library/actors/testlib/test_runtime.h:526:20 #24 0x36e52b52 in NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEvent(NActors::TActorId const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:532:20 #25 0x36e4adb2 in NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:577:24 #26 0x36e4a98a in NActors::TTestActorRuntime::SimulateSleep(TDuration) /-S/ydb/core/testlib/actors/test_runtime.cpp:296:9 #27 0x18dd4393 in NKikimr::NTestSuiteDataShardVolatile::TTestCaseGracefulShardRestartNoEarlyReadSetAck::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:3909:17 #28 0x18de9c37 in operator() /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1 #29 0x18de9c37 in __invoke<(lambda at /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #30 0x18de9c37 in __call<(lambda at /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #31 0x18de9c37 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #32 0x18de9c37 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #33 0x198001f5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #34 0x198001f5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #35 0x198001f5 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #36 0x197cfd48 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #37 0x18de8ae3 in NKikimr::NTestSuiteDataShardVolatile::TCurrentTest::Execute() /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1 #38 0x197d1615 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #39 0x197fa76c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #40 0x7fcaf1828d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) Indirect leak of 8 byte(s) in 1 object(s) allocated from: #0 0x1903ffcd in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x19c7030e in CreateString > > /-S/contrib/libs/protobuf/src/google/protobuf/arenastring.cc:102:20 #2 0x19c7030e in void google::protobuf::internal::ArenaStringPtr::Set<>(TBasicString> const&, google::protobuf::Arena*) /-S/contrib/libs/protobuf/src/google/protobuf/arenastring.cc:150:38 #3 0x2ac770e5 in set_name > &> /-B/ydb/core/protos/query_stats.pb.h:1740:16 #4 0x2ac770e5 in SetName /-B/ydb/core/protos/query_stats.pb.h:696:56 #5 0x2ac770e5 in NKikimr::NDataShard::KqpFillTxStats(NKikimr::NDataShard::TDataShard&, NKikimr::NMiniKQL::TEngineHostCounters const&, NKikimrQueryStats::TTxStats&) /-S/ydb/core/tx/datashard/datashard_kqp.cpp:917:34 #6 0x2b086ab4 in NKikimr::NDataShard::TExecuteWriteUnit::Execute(TIntrusivePtr>, NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/execute_write_unit.cpp:436:13 #7 0x2aedef1c in NKikimr::NDataShard::TPipeline::RunExecutionPlan(TIntrusivePtr>, TVector>&, NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/datashard_pipeline.cpp:1851:28 #8 0x2ab9fc94 in NKikimr::NDataShard::TDataShard::TTxWrite::Execute(NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/datashard__write.cpp:100:38 #9 0x2062a8d4 in NKikimr::NTabletFlatExecutor::TExecutor::ExecuteTransaction(NKikimr::NTabletFlatExecutor::TSeat*) /-S/ydb/core/tablet_flat/flat_executor.cpp:1910:35 #10 0x20626274 in NKikimr::NTabletFlatExecutor::TExecutor::DoExecute(TAutoPtr, NKikimr::NTabletFlatExecutor::TExecutor::ETxMode) /-S/ydb/core/tablet_flat/flat_executor.cpp:1814:13 #11 0x2062d46e in NKikimr::NTabletFlatExecutor::TExecutor::Execute(TAutoPtr, NActors::TActorContext const&) /-S/ydb/core/tablet_flat/flat_executor.cpp:1828:5 #12 0x205c3ada in Execute /-S/ydb/core/tablet_flat/tablet_flat_executed.cpp:62:46 #13 0x205c3ada in NKikimr::NTabletFlatExecutor::TTabletExecutedFlat::Execute(TAutoPtr, NActors::TActorContext const&) /-S/ydb/core/tablet_flat/tablet_flat_executed.cpp:57:5 #14 0x21c60941 in NKikimr::NDataShard::TDataShard::Handle(TAutoPtr, TDelete>&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/datashard.cpp:3415:21 #15 0x21c0932b in NKikimr::NDataShard::TDataShard::StateWork(TAutoPtr&) /-S/ydb/core/tx/datashard/datashard_impl.h:3157:13 #16 0x1a54e5cc in NActors::IActor::Receive(TAutoPtr&) /-S/ydb/library/actors/core/actor.cpp:280:13 #17 0x36c883a4 in NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool) /-S/ydb/library/actors/testlib/test_runtime.cpp:1702:33 #18 0x36c80c19 in NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant) /-S/ydb/library/actors/testlib/test_runtime.cpp:1295:45 #19 0x36c8af93 in NActors::TTestActorRuntimeBase::WaitForEdgeEvents(std::__y1::function&)>, TSet, std::__y1::allocator> const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.cpp:1554:22 #20 0x36e535b3 in NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventIf(TSet, std::__y1::allocator> const&, std::__y1::function const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:477:13 #21 0x36e52b52 in GrabEdgeEvent /-S/ydb/library/actors/testlib/test_runtime.h:526:20 #22 0x36e52b52 in NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEvent(NActors::TActorId const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:532:20 #23 0x36e4adb2 in NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:577:24 #24 0x36e4a98a in NActors::TTestActorRuntime::SimulateSleep(TDuration) /-S/ydb/core/testlib/actors/test_runtime.cpp:296:9 #25 0x18dd4393 in NKikimr::NTestSuiteDataShardVolatile::TTestCaseGracefulShardRestartNoEarlyReadSetAck::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:3909:17 #26 0x18de9c37 in operator() /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1 #27 0x18de9c37 in __invoke<(lambda at /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #28 0x18de9c37 in __call<(lambda at /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #29 0x18de9c37 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #30 0x18de9c37 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #31 0x198001f5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #32 0x198001f5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #33 0x198001f5 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #34 0x197cfd48 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #35 0x18de8ae3 in NKikimr::NTestSuiteDataShardVolatile::TCurrentTest::Execute() /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1 #36 0x197d1615 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #37 0x197fa76c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #38 0x7fcaf1828d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) SUMMARY: AddressSanitizer: 664 byte(s) leaked in 11 allocation(s). |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TSimpleOltp [FAIL] Test command err: Trying to start YDB, gRPC: 25579, MsgBus: 18510 2025-03-31T17:28:22.463482Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488026813984578969:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:28:22.463648Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/gfke/0001df/r3tmp/tmpgSMbLI/pdisk_1.dat 2025-03-31T17:28:22.781571Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25579, node 1 2025-03-31T17:28:22.860807Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:28:22.860834Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:28:22.860844Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:28:22.860975Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T17:28:22.861353Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:28:22.861461Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:28:22.863247Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18510 TClient is connected to server localhost:18510 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:28:23.389590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:28:25.089899Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488026826869481507:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:25.089964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488026826869481530:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:25.090005Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:25.094170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T17:28:25.104319Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488026826869481534:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T17:28:25.202029Z node 1 :TX_PROXY ERROR: Actor# [1:7488026826869481585:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:28:25.508847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T17:28:25.627768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T17:28:26.488178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T17:28:27.463682Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488026813984578969:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:28:27.463906Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T17:28:27.851416Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTFmY2VlOWYtOWQyNGY1ZTctNDQzNzI5NmEtYWI3ZTZjYjY=, ActorId: [1:7488026835459424683:2969], ActorState: ExecuteState, TraceId: 01jqpkyq8mbmbrfrhyvbcznbc8, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TSimple::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25: DoExecute @ 0x18DC0DB7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:50: Execute_ @ 0x18DB7B72 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7F3F0D93ED8F 18. ??:0: ?? @ 0x7F3F0D93EE3F 19. ??:0: ?? @ 0x163B4028 |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictReadWriteOlap |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |93.0%| [TA] $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |93.2%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictReadWriteOltpNoSink |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |93.8%| [TA] $(B)/ydb/services/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |93.9%| [TA] {RESULT} $(B)/ydb/services/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TReadOnlyOltpNoSink [FAIL] Test command err: Trying to start YDB, gRPC: 8313, MsgBus: 17279 2025-03-31T17:28:22.262665Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488026813330883542:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:28:22.262715Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/gfke/0001e3/r3tmp/tmpJNqb28/pdisk_1.dat 2025-03-31T17:28:22.538939Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8313, node 1 2025-03-31T17:28:22.609346Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:28:22.609398Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:28:22.609412Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:28:22.609546Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T17:28:22.625333Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:28:22.625599Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:28:22.627773Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17279 TClient is connected to server localhost:17279 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:28:23.134784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:28:24.901618Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488026821920818807:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:24.901687Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488026821920818796:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:24.901813Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:24.906152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T17:28:24.916930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488026821920818810:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T17:28:24.999594Z node 1 :TX_PROXY ERROR: Actor# [1:7488026821920818861:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:28:25.231150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T17:28:25.340233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T17:28:26.212799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T17:28:27.263189Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488026813330883542:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:28:27.263457Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T17:28:27.628597Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjdjNGQyNzMtZTdmOTVkNmMtZjBmZDc1MGQtMWRjNzQ5NjQ=, ActorId: [1:7488026834805729249:2967], ActorState: ExecuteState, TraceId: 01jqpkyq193ztf8vr2ftwsac6q, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TReadOnly::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185: DoExecute @ 0x18DD66A3 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:221: Execute_ @ 0x18DB911A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7F5E1197AD8F 18. ??:0: ?? @ 0x7F5E1197AE3F 19. ??:0: ?? @ 0x163B4028 >> KqpStats::SysViewClientLost |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataWithFilter-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicWithSchema-default.txt] >> test_tpch_import.py::TestS3TpchImport::test_import_and_export >> test_sql_streaming.py::test[suites-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWithDataWatermarks-default.txt] >> test_sql_streaming.py::test[suites-GroupByHopWithDataWatermarks-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindow-default.txt] >> alter_compression.py::TestAlterCompression::test_all_supported_compression ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapWriteRow Test command err: Trying to start YDB, gRPC: 7832, MsgBus: 5379 2025-03-31T17:28:29.372125Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488026844285833513:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:28:29.372425Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/gfke/0001e8/r3tmp/tmpRR2pZm/pdisk_1.dat 2025-03-31T17:28:29.771945Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7832, node 1 2025-03-31T17:28:29.777672Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:28:29.777823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:28:29.779403Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T17:28:29.814685Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:28:29.814728Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:28:29.814742Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:28:29.814940Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5379 TClient is connected to server localhost:5379 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:28:30.265736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:28:30.289908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:28:30.432274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:28:30.560052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:28:30.633189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:28:32.066171Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488026857170737171:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:32.066299Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:32.362349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T17:28:32.398595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T17:28:32.427110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T17:28:32.455764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T17:28:32.486938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T17:28:32.520688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T17:28:32.599753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488026857170737685:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:32.599890Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:32.599984Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488026857170737690:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:32.603896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T17:28:32.614037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488026857170737692:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T17:28:32.676952Z node 1 :TX_PROXY ERROR: Actor# [1:7488026857170737745:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:28:33.582841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-31T17:28:33.731875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7488026861465705477:2505];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T17:28:33.732132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7488026861465705477:2505];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T17:28:33.732441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7488026861465705477:2505];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T17:28:33.732593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7488026861465705477:2505];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T17:28:33.732719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7488026861465705477:2505];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T17:28:33.732871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7488026861465705477:2505];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T17:28:33.733009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7488026861465705477:2505];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T17:28:33.733129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7488026861465705477:2505];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T17:28:33.733244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7488026861465705477:2505];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T17:28:33.733399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7488026861465705477:2505];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T17:28:33.733520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7488026861465705477:2505];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T17:28:33.733629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7488026861465705477:2505];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T17:28:33.736258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7488026861465705521:2510];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T17:28:33.736395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7488026861465705521:2510];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T17:28:33.736667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7488026861465705521:2510];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T17:28:33.736787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7488026861465705521:2510];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline ... tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-31T17:28:33.926177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-31T17:28:33.926377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-31T17:28:33.926422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-31T17:28:33.926529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-31T17:28:33.926615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-31T17:28:33.926696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-31T17:28:33.926738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-31T17:28:33.926785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-31T17:28:33.926820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-31T17:28:33.927513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-31T17:28:33.927558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-31T17:28:33.927772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-31T17:28:33.927807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-31T17:28:33.927963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-31T17:28:33.928001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-31T17:28:33.928177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-31T17:28:33.928213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-31T17:28:33.928316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-31T17:28:33.928335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-31T17:28:33.961221Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T17:28:33.961661Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T17:28:33.967584Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T17:28:33.968415Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T17:28:33.971968Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T17:28:33.974776Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T17:28:33.976477Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T17:28:33.980997Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T17:28:33.982769Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037920;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T17:28:33.988960Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-31T17:28:34.099416Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2025-03-31T17:28:34.099461Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;self_id=[1:7488026861465705531:2512];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037928;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037923;receive=72075186224037927; 2025-03-31T17:28:34.099863Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2025-03-31T17:28:34.100003Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2025-03-31T17:28:34.182620Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=281474976710675;tx_id=281474976710675;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710675; query_phases { duration_us: 9015 cpu_time_us: 2242 affected_shards: 1 } query_phases { duration_us: 6115 cpu_time_us: 348 affected_shards: 1 } compilation { duration_us: 58274 cpu_time_us: 55196 } process_cpu_time_us: 663 total_duration_us: 76164 total_cpu_time_us: 58449 AddressSanitizer:DEADLYSIGNAL ================================================================= ==1001511==ERROR: AddressSanitizer: SEGV on unknown address 0x000000000008 (pc 0x000018b718ad bp 0x7ffdd11de4e0 sp 0x7ffdd11de340 T0) ==1001511==The signal is caused by a READ memory access. ==1001511==Hint: address points to the zero page. 2025-03-31T17:28:34.372526Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488026844285833513:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:28:34.372726Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; #0 0x18b718ad in Get::TypeHandler> /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 #1 0x18b718ad in Get /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:1348:32 #2 0x18b718ad in _internal_table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1762:31 #3 0x18b718ad in table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1766:10 #4 0x18b718ad in NKikimr::NKqp::NTestSuiteKqpCost::TTestCaseOlapWriteRow::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:636:13 #5 0x18b96437 in operator() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #6 0x18b96437 in __invoke<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #7 0x18b96437 in __call<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #8 0x18b96437 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #9 0x18b96437 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #10 0x194e85e5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #11 0x194e85e5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #12 0x194e85e5 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #13 0x194b8138 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #14 0x18b952e3 in NKikimr::NKqp::NTestSuiteKqpCost::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #15 0x194b9a05 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #16 0x194e2b5c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #17 0x7fe63034cd8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #18 0x7fe63034ce3f in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x29e3f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #19 0x16306028 in _start (/home/runner/.ya/build/build_root/gfke/0001e8/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost+0x16306028) (BuildId: 5128b0b237a145c7e4cbd2034645258e10874b06) AddressSanitizer can not provide additional info. SUMMARY: AddressSanitizer: SEGV /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 in Get::TypeHandler> ==1001511==ABORTING |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] >> test_sql_streaming.py::test[suites-GroupByHop-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopByStringKey-default.txt] |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |96.1%| [TA] $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} |96.1%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> Viewer::QueryExecuteScript [GOOD] |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::QueryExecuteScript [GOOD] Test command err: 2025-03-31T17:28:32.652469Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488026854007114555:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:28:32.652638Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-31T17:28:33.009641Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T17:28:33.062957Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:28:33.063060Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:28:33.065087Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26741, node 1 2025-03-31T17:28:33.117028Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:28:33.117052Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:28:33.117064Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:28:33.117200Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63640 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:28:33.385270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:28:33.410944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-31T17:28:33.413759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:28:35.708524Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T17:28:35.708587Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T17:28:35.942255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488026866892017144:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:35.942352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:35.942534Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488026866892017157:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:35.946582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-03-31T17:28:35.961824Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488026866892017159:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-31T17:28:36.068018Z node 1 :TX_PROXY ERROR: Actor# [1:7488026871186984507:2358] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:28:36.353431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T17:28:36.463924Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T17:28:36.463962Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T17:28:36.582034Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T17:28:36.582073Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T17:28:36.641243Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T17:28:36.641285Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T17:28:36.725698Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T17:28:36.725738Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T17:28:36.781807Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T17:28:36.781842Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T17:28:36.836500Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T17:28:36.836531Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T17:28:36.888101Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T17:28:36.888149Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T17:28:36.942234Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T17:28:36.942285Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T17:28:37.001900Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T17:28:37.001935Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T17:28:37.056363Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T17:28:37.056404Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T17:28:37.105939Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T17:28:37.105964Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T17:28:37.149954Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T17:28:37.149990Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T17:28:37.201137Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T17:28:37.201175Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T17:28:37.254113Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T17:28:37.254140Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T17:28:37.305607Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T17:28:37.305640Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T17:28:37.361536Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T17:28:37.361568Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T17:28:37.366353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-31T17:28:37.387836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-31T17:28:37.389260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-31T17:28:37.652675Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488026854007114555:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:28:37.652736Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T17:28:38.654429Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T17:28:38.654476Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T17:28:39.213162Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T17:28:39.213202Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T17:28:39.563880Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-31T17:28:39.563923Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-31T17:28:40.060386Z node 1 :RPC_REQUEST WARN: Client lost 2025-03-31T17:28:40.063406Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743442120091, txId: 281474976710691] shutting down |96.6%| [TA] $(B)/ydb/core/viewer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.6%| [TA] {RESULT} $(B)/ydb/core/viewer/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSnapshotIsolation::TSimpleOltpNoSink [FAIL] >> KqpSnapshotIsolation::TConflictWriteOltp [FAIL] >> KqpSnapshotIsolation::TReadOnlyOltp [FAIL] >> KqpSnapshotIsolation::TConflictWriteOltpNoSink [FAIL] >> test_workload.py::TestYdbLogWorkload::test[column] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> KqpSnapshotIsolation::TConflictReadWriteOltp [FAIL] >> KqpSnapshotIsolation::TConflictReadWriteOltpNoSink [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TSimpleOltpNoSink [FAIL] Test command err: Trying to start YDB, gRPC: 31038, MsgBus: 4081 2025-03-31T17:28:34.443668Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488026862873339774:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:28:34.443720Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/gfke/0001d2/r3tmp/tmpFS2FzD/pdisk_1.dat 2025-03-31T17:28:34.875595Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31038, node 1 2025-03-31T17:28:34.915507Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:28:34.915621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:28:34.920217Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T17:28:34.980161Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:28:34.980216Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:28:34.980233Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:28:34.980398Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4081 TClient is connected to server localhost:4081 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:28:35.521841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:28:35.535832Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T17:28:37.389596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488026875758242311:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:37.389670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488026875758242330:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:37.389709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:37.394061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T17:28:37.403624Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488026875758242340:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T17:28:37.496382Z node 1 :TX_PROXY ERROR: Actor# [1:7488026875758242393:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:28:37.737861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T17:28:37.838706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T17:28:38.635635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T17:28:39.444125Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488026862873339774:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:28:39.444225Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T17:28:39.870742Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmY0NzQyZTItOTJjNjg4OWQtZDc3N2U1MjktYWY3ZjViY2E=, ActorId: [1:7488026884348185608:2968], ActorState: ExecuteState, TraceId: 01jqpkz30md4js2fke32b9echt, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TSimple::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25: DoExecute @ 0x18DC0DB7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:57: Execute_ @ 0x18DB7D9A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7F1AC57D0D8F 18. ??:0: ?? @ 0x7F1AC57D0E3F 19. ??:0: ?? @ 0x163B4028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictWriteOltpNoSink [FAIL] Test command err: Trying to start YDB, gRPC: 19003, MsgBus: 64694 2025-03-31T17:28:34.667940Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488026864947412295:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:28:34.668448Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/gfke/0001cb/r3tmp/tmpvc9OVq/pdisk_1.dat 2025-03-31T17:28:35.114543Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T17:28:35.153641Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:28:35.153737Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 19003, node 1 2025-03-31T17:28:35.156000Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T17:28:35.239605Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:28:35.239630Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:28:35.239637Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:28:35.239760Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64694 TClient is connected to server localhost:64694 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:28:35.788236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:28:35.801704Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T17:28:37.650806Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488026877832314832:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:37.650920Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488026877832314857:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:37.650962Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:37.654543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T17:28:37.661148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488026877832314861:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T17:28:37.732018Z node 1 :TX_PROXY ERROR: Actor# [1:7488026877832314914:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:28:37.999300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T17:28:38.087074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T17:28:38.855466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T17:28:39.664254Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488026864947412295:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:28:39.665260Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T17:28:39.977847Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWVjZTZmODUtYmQzMDhiYzMtNzc2ODMzZGMtZWEzNGVkODU=, ActorId: [1:7488026886422257945:2968], ActorState: ExecuteState, TraceId: 01jqpkz36d29h7kh7ec0vaft8y, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76: DoExecute @ 0x18DC7347 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:112: Execute_ @ 0x18DB841A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7F10BF78AD8F 18. ??:0: ?? @ 0x7F10BF78AE3F 19. ??:0: ?? @ 0x163B4028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TReadOnlyOltp [FAIL] Test command err: Trying to start YDB, gRPC: 6350, MsgBus: 21750 2025-03-31T17:28:34.756770Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488026866096985783:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:28:34.759554Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/gfke/0001c9/r3tmp/tmpsbCF8u/pdisk_1.dat 2025-03-31T17:28:35.165426Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T17:28:35.170140Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:28:35.170262Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:28:35.173122Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6350, node 1 2025-03-31T17:28:35.259616Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:28:35.259641Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:28:35.259648Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:28:35.259765Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21750 TClient is connected to server localhost:21750 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:28:35.849475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:28:35.884258Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T17:28:37.845889Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488026878981888306:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:37.845889Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488026878981888324:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:37.846010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:37.849563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T17:28:37.858297Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488026878981888327:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T17:28:37.956042Z node 1 :TX_PROXY ERROR: Actor# [1:7488026878981888378:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:28:38.227339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T17:28:38.329972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T17:28:39.206013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T17:28:39.890909Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488026866096985783:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:28:39.904078Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T17:28:40.479045Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2Q0NjJjZjItM2UyYTg3MzYtZTU4NGIxNS1hMjI1ZmRjNg==, ActorId: [1:7488026891866798755:2969], ActorState: ExecuteState, TraceId: 01jqpkz3k3017hxhezwmr9hh1q, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TReadOnly::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185: DoExecute @ 0x18DD66A3 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:214: Execute_ @ 0x18DB8EF2 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7FCFB7953D8F 18. ??:0: ?? @ 0x7FCFB7953E3F 19. ??:0: ?? @ 0x163B4028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictWriteOltp [FAIL] Test command err: Trying to start YDB, gRPC: 7494, MsgBus: 27570 2025-03-31T17:28:34.518787Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488026864639916642:2264];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:28:34.518958Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/gfke/0001d3/r3tmp/tmpWAhwDu/pdisk_1.dat 2025-03-31T17:28:34.955981Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T17:28:34.959783Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:28:34.959940Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:28:34.964140Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7494, node 1 2025-03-31T17:28:35.031682Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:28:35.031708Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:28:35.031734Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:28:35.031877Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27570 TClient is connected to server localhost:27570 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:28:35.567684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:28:35.581118Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T17:28:37.570969Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488026877524818972:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:37.571066Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488026877524818993:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:37.571153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:37.574961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T17:28:37.583486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488026877524819002:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T17:28:37.658981Z node 1 :TX_PROXY ERROR: Actor# [1:7488026877524819053:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:28:37.939548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T17:28:38.056048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T17:28:38.889380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T17:28:39.573538Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488026864639916642:2264];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:28:39.583327Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T17:28:40.019695Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWM3MDg0YWQtZjI2MjYyNTYtNzRkYjllNC00ZmRiYTM5YQ==, ActorId: [1:7488026886114762206:2969], ActorState: ExecuteState, TraceId: 01jqpkz38163xbm53b1mqrps5j, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76: DoExecute @ 0x18DC7347 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:105: Execute_ @ 0x18DB81F2 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7F1DB2B2ED8F 18. ??:0: ?? @ 0x7F1DB2B2EE3F 19. ??:0: ?? @ 0x163B4028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictReadWriteOltp [FAIL] Test command err: Trying to start YDB, gRPC: 25420, MsgBus: 17942 2025-03-31T17:28:35.297003Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488026868154844889:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:28:35.297332Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/gfke/0001c5/r3tmp/tmpVWrilv/pdisk_1.dat 2025-03-31T17:28:35.679590Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T17:28:35.718905Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:28:35.719018Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 25420, node 1 2025-03-31T17:28:35.720904Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T17:28:35.762950Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:28:35.762981Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:28:35.762991Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:28:35.763153Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17942 TClient is connected to server localhost:17942 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:28:36.267919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:28:38.079191Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488026881039747445:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:38.079274Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488026881039747437:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:38.079565Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:38.083300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T17:28:38.091214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488026881039747451:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T17:28:38.170046Z node 1 :TX_PROXY ERROR: Actor# [1:7488026881039747502:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:28:38.416454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T17:28:38.530256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T17:28:39.361564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T17:28:40.297369Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488026868154844889:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:28:40.297446Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T17:28:40.590789Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWQzNmI3YjAtOWJlYzA5YmItNTRlZjQ4YjMtNmM1YzUyNWQ=, ActorId: [1:7488026889629690651:2969], ActorState: ExecuteState, TraceId: 01jqpkz3s1ecfrg2dahgfwe52h, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131: DoExecute @ 0x18DCECE7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:159: Execute_ @ 0x18DB8872 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7F479D443D8F 18. ??:0: ?? @ 0x7F479D443E3F 19. ??:0: ?? @ 0x163B4028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictReadWriteOltpNoSink [FAIL] Test command err: Trying to start YDB, gRPC: 7495, MsgBus: 16997 2025-03-31T17:28:35.826689Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488026867478099333:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:28:35.827768Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/gfke/0001bd/r3tmp/tmp6A3yMi/pdisk_1.dat 2025-03-31T17:28:36.172217Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7495, node 1 2025-03-31T17:28:36.213952Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:28:36.214061Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:28:36.216289Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T17:28:36.263712Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:28:36.263738Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:28:36.263748Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:28:36.263877Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16997 TClient is connected to server localhost:16997 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:28:36.753081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:28:36.767816Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T17:28:38.489862Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488026880363001738:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:38.489966Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488026880363001747:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:38.490038Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:38.494014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T17:28:38.504051Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488026880363001752:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T17:28:38.559427Z node 1 :TX_PROXY ERROR: Actor# [1:7488026880363001803:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:28:38.820337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T17:28:38.938045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-31T17:28:39.856636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T17:28:40.823746Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488026867478099333:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:28:40.823834Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T17:28:41.107069Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Zjk4MTg4ZjAtNDFhYmZmOTAtNjczNDgyM2QtMmY4YzA0YmQ=, ActorId: [1:7488026893247912289:2968], ActorState: ExecuteState, TraceId: 01jqpkz4ab17znk5n3carh3t63, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131: DoExecute @ 0x18DCECE7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:166: Execute_ @ 0x18DB8A9A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7FB04F7F6D8F 18. ??:0: ?? @ 0x7FB04F7F6E3F 19. ??:0: ?? @ 0x163B4028 >> KqpSinkTx::OlapInvalidateOnError [FAIL] >> Replication::PauseAndResumeReplication >> KqpSnapshotIsolation::TConflictReadWriteOlap [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapInvalidateOnError [FAIL] Test command err: Trying to start YDB, gRPC: 6519, MsgBus: 10462 2025-03-31T17:28:34.167940Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488026865184629508:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:28:34.169037Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/gfke/0001d5/r3tmp/tmpvWsp52/pdisk_1.dat 2025-03-31T17:28:34.548339Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T17:28:34.587761Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:28:34.587886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 6519, node 1 2025-03-31T17:28:34.589867Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T17:28:34.635643Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:28:34.635666Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:28:34.635673Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:28:34.635802Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10462 TClient is connected to server localhost:10462 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:28:35.210397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:28:35.227571Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T17:28:37.157867Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488026878069531921:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:37.157958Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488026878069531912:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:37.158102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:37.161993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T17:28:37.171313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488026878069531926:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T17:28:37.239744Z node 1 :TX_PROXY ERROR: Actor# [1:7488026878069531979:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:28:37.450740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T17:28:37.575498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488026878069532172:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T17:28:37.575511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488026878069532170:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T17:28:37.575672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488026878069532172:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T17:28:37.575910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488026878069532172:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T17:28:37.576035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488026878069532172:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T17:28:37.576117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488026878069532172:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T17:28:37.576187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488026878069532170:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T17:28:37.576243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488026878069532172:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T17:28:37.576345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488026878069532172:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T17:28:37.576429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488026878069532170:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T17:28:37.576457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488026878069532172:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T17:28:37.576568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488026878069532172:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T17:28:37.576590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488026878069532170:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T17:28:37.576691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488026878069532172:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T17:28:37.576717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488026878069532170:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T17:28:37.576790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488026878069532172:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T17:28:37.576817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488026878069532170:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T17:28:37.576865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7488026878069532172:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T17:28:37.576912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488026878069532170:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T17:28:37.577058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488026878069532170:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T17:28:37.577147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488026878069532170:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T17:28:37.577225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488026878069532170:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T17:28:37.577342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488026878069532170:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T17:28:37.577416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7488026878069532170:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T17:28:37.606458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488026878069532184:2349];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T17:28:37.606513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7488026878069532184:2349];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstr ... t_id=72075186224038021;self_id=[1:7488026895249407031:3219];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038021;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:47.394734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038046;self_id=[1:7488026895249406891:3141];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038046;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:47.395056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[1:7488026895249407031:3219];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038021;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:47.395209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038046;self_id=[1:7488026895249406891:3141];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038046;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:47.397805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[1:7488026895249407742:3303];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038005;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:47.398038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[1:7488026895249406938:3161];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038019;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:47.398485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[1:7488026895249406938:3161];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038019;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:47.398506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[1:7488026895249407742:3303];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038005;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:47.401130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[1:7488026895249407066:3246];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038031;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:47.401488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[1:7488026895249407066:3246];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038031;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:47.401620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038034;self_id=[1:7488026895249406944:3165];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038034;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:47.401806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038034;self_id=[1:7488026895249406944:3165];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038034;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:47.404717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[1:7488026895249407349:3295];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038004;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:47.407916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[1:7488026895249407349:3295];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038004;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:47.409543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038045;self_id=[1:7488026895249407133:3288];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038045;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:47.409799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038045;self_id=[1:7488026895249407133:3288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038045;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:47.449356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038060;self_id=[1:7488026895249406855:3132];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038060;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:47.449661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038060;self_id=[1:7488026895249406855:3132];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038060;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:47.688413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488026882364500601:2474];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037902;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:47.688915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7488026882364500601:2474];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037902;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:47.690474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037984;self_id=[1:7488026882364500766:2491];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037984;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:47.690648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037984;self_id=[1:7488026882364500766:2491];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037984;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:47.952590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037991;self_id=[1:7488026882364500609:2476];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037991;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:47.953168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037991;self_id=[1:7488026882364500609:2476];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037991;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:47.991504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7488026878069532591:2460];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037898;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:47.991736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7488026878069532591:2460];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037898;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:47.992002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037962;self_id=[1:7488026882364500495:2469];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037962;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:47.992151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037962;self_id=[1:7488026882364500495:2469];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037962;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:47.996186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037965;self_id=[1:7488026882364500806:2504];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037965;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:47.996454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037965;self_id=[1:7488026882364500806:2504];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037965;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:47.996638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037985;self_id=[1:7488026882364500640:2481];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037985;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:47.996788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037985;self_id=[1:7488026882364500640:2481];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037985;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:48.024396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037968;self_id=[1:7488026882364500781:2496];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037968;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:48.024674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037968;self_id=[1:7488026882364500781:2496];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037968;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:48.040249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;self_id=[1:7488026882364500573:2471];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037914;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:48.040482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;self_id=[1:7488026882364500573:2471];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037914;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; assertion failed at ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182, virtual void NKikimr::NKqp::NTestSuiteKqpSinkTx::TInvalidateOnError::DoExecute(): (result.GetStatus() == EStatus::PRECONDITION_FAILED) failed: (BAD_REQUEST != PRECONDITION_FAILED)
: Error: Bad request. Table: `/Root/KV`., code: 2017
: Error: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32"]}, code: 2017 , with diff: (BAD_|P)RE(QUES|CONDI)T(|ION_FAILED) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182: DoExecute @ 0x18D9B8AE 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:201: Execute_ @ 0x18D7A4FA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: operator() @ 0x18D81977 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x18D81977 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x18D81977 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D81977 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D81977 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: Execute @ 0x18D80B43 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7FC173FF5D8F 18. ??:0: ?? @ 0x7FC173FF5E3F 19. ??:0: ?? @ 0x163B4028 >> test_sql_streaming.py::test[suites-ReadTopicWithSchema-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTwoTopics-default.txt] >> KqpSnapshotIsolation::TConflictWriteOlap [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictReadWriteOlap [FAIL] Test command err: Trying to start YDB, gRPC: 15140, MsgBus: 7657 2025-03-31T17:28:35.586338Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488026866960463379:2137];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:28:35.595384Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/gfke/0001c3/r3tmp/tmpQugYZE/pdisk_1.dat 2025-03-31T17:28:35.963656Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15140, node 1 2025-03-31T17:28:36.028466Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:28:36.029312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:28:36.033067Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T17:28:36.071994Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:28:36.072023Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:28:36.072032Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:28:36.072146Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7657 TClient is connected to server localhost:7657 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:28:36.592306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:28:36.609712Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T17:28:38.391904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488026879845365852:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:38.391985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488026879845365864:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:38.392037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:38.395778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T17:28:38.403677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488026879845365867:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T17:28:38.498385Z node 1 :TX_PROXY ERROR: Actor# [1:7488026879845365918:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:28:38.725757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T17:28:38.868724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488026879845366102:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T17:28:38.868913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488026879845366102:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T17:28:38.869199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488026879845366102:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T17:28:38.869307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488026879845366102:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T17:28:38.869394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488026879845366102:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T17:28:38.869489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488026879845366102:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T17:28:38.869579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488026879845366102:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T17:28:38.869732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488026879845366102:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T17:28:38.869848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488026879845366102:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T17:28:38.869937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488026879845366102:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T17:28:38.870033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488026879845366102:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T17:28:38.870129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7488026879845366102:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T17:28:38.870285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488026879845366111:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T17:28:38.870359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488026879845366111:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T17:28:38.870555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488026879845366111:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T17:28:38.870709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488026879845366111:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T17:28:38.870838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488026879845366111:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T17:28:38.870983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488026879845366111:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T17:28:38.871128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488026879845366111:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T17:28:38.871277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488026879845366111:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T17:28:38.871396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488026879845366111:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T17:28:38.871528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488026879845366111:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T17:28:38.871664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488026879845366111:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T17:28:38.871794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7488026879845366111:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T17:28:38.900163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488026879845366139:2352];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T17:28:38.900226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488026879845366139:2352];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstra ... N: tablet_id=72075186224037971;self_id=[1:7488026884140334711:2498];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037971;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:49.065868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037979;self_id=[1:7488026884140334751:2501];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037979;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:49.065925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037992;self_id=[1:7488026884140334532:2478];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037992;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:49.065960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037979;self_id=[1:7488026884140334751:2501];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037979;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:49.066037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[1:7488026884140335116:2553];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037909;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:49.066045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037932;self_id=[1:7488026884140335026:2543];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037932;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:49.066128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037932;self_id=[1:7488026884140335026:2543];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037932;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:49.066152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7488026884140335174:2564];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037901;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:49.066241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;self_id=[1:7488026884140335126:2554];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037907;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:49.066306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037960;self_id=[1:7488026884140335013:2539];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037960;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:49.066364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;self_id=[1:7488026884140335126:2554];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037907;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:49.066413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037960;self_id=[1:7488026884140335013:2539];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037960;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:49.066470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037978;self_id=[1:7488026884140334696:2494];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037978;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:49.066522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7488026884140335024:2542];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037924;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:49.066567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037961;self_id=[1:7488026884140334773:2505];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037961;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:49.066633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[1:7488026884140334917:2525];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037923;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:49.066652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037953;self_id=[1:7488026884140334887:2514];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037953;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:49.066766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037957;self_id=[1:7488026884140334915:2524];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037957;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:49.066860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7488026884140335024:2542];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037924;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:49.066885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037978;self_id=[1:7488026884140334696:2494];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037978;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:49.066954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037934;self_id=[1:7488026884140335077:2548];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037934;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:49.067047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037934;self_id=[1:7488026884140335077:2548];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037934;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:49.068461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[1:7488026884140335181:2566];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037908;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:49.068464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037958;self_id=[1:7488026884140334884:2513];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037958;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:49.068634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[1:7488026884140335181:2566];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037908;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:49.068638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037958;self_id=[1:7488026884140334884:2513];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037958;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:49.068906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;self_id=[1:7488026884140334909:2521];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037952;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:49.069066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;self_id=[1:7488026884140334909:2521];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037952;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:49.071512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037938;self_id=[1:7488026884140334907:2520];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037938;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:49.071696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037938;self_id=[1:7488026884140334907:2520];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037938;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:49.097735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7488026884140335191:2567];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037920;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:49.097985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7488026884140335191:2567];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037920;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:49.155522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037955;self_id=[1:7488026884140334972:2531];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037955;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:49.155809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037955;self_id=[1:7488026884140334972:2531];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037955;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:49.185221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037968;self_id=[1:7488026884140334786:2507];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037968;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:49.185452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037968;self_id=[1:7488026884140334786:2507];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037968;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:146, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (ABORTED != SUCCESS)
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 , with diff: (ABORT|SUCC)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:146: DoExecute @ 0x18DD15F8 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:172: Execute_ @ 0x18DB8CCA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7F8BF5FC2D8F 18. ??:0: ?? @ 0x7F8BF5FC2E3F 19. ??:0: ?? @ 0x163B4028 >> test_sql_streaming.py::test[suites-GroupByHoppingWithDataWatermarks-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopic-default.txt] >> test_sql_streaming.py::test[suites-GroupByHoppingWindow-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowByStringKey-default.txt] >> test_sql_streaming.py::test[suites-GroupByHopByStringKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopExprKey-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictWriteOlap [FAIL] Test command err: Trying to start YDB, gRPC: 7602, MsgBus: 20943 2025-03-31T17:28:34.520229Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488026864692169257:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:28:34.520271Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/gfke/0001cd/r3tmp/tmpfXkuci/pdisk_1.dat 2025-03-31T17:28:34.905411Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T17:28:34.927969Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:28:34.928085Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:28:34.931448Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7602, node 1 2025-03-31T17:28:35.016011Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:28:35.016037Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:28:35.016046Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:28:35.016196Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20943 TClient is connected to server localhost:20943 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:28:35.539389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:28:35.566206Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-31T17:28:37.558514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488026877577071803:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:37.558668Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488026877577071783:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:37.558804Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:37.563528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-31T17:28:37.573482Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488026877577071820:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-31T17:28:37.654615Z node 1 :TX_PROXY ERROR: Actor# [1:7488026877577071873:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:28:37.956377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-31T17:28:38.091597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488026881872039364:2346];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T17:28:38.091788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488026881872039364:2346];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T17:28:38.092042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488026881872039364:2346];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T17:28:38.092208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488026881872039364:2346];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T17:28:38.092300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488026881872039364:2346];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T17:28:38.092409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488026881872039364:2346];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T17:28:38.092494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488026881872039364:2346];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T17:28:38.092601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488026881872039364:2346];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T17:28:38.092670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488026881872039364:2346];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T17:28:38.092759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488026881872039364:2346];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T17:28:38.092874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488026881872039364:2346];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T17:28:38.092954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7488026881872039364:2346];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T17:28:38.094702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488026881872039360:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T17:28:38.094829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488026881872039360:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T17:28:38.095139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488026881872039360:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T17:28:38.095388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488026881872039360:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T17:28:38.095555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488026881872039360:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T17:28:38.095744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488026881872039360:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T17:28:38.095906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488026881872039360:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T17:28:38.096053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488026881872039360:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T17:28:38.096232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488026881872039360:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T17:28:38.096389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488026881872039360:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T17:28:38.096557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488026881872039360:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T17:28:38.096710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7488026881872039360:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T17:28:38.120210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488026881872039374:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T17:28:38.120273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7488026881872039374:2351];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstr ... EvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038092;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:52.127871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038065;self_id=[1:7488026894756947296:3181];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038065;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:52.127991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038092;self_id=[1:7488026894756947320:3193];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038092;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:52.128806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038067;self_id=[1:7488026894756947338:3202];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038067;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:52.128964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038067;self_id=[1:7488026894756947338:3202];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038067;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:52.130360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038063;self_id=[1:7488026894756947403:3222];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038063;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:52.130469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038075;self_id=[1:7488026894756947373:3220];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038075;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:52.130535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038063;self_id=[1:7488026894756947403:3222];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038063;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:52.130674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038075;self_id=[1:7488026894756947373:3220];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038075;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:52.130796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038094;self_id=[1:7488026894756947322:3194];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038094;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:52.130879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038062;self_id=[1:7488026894756947340:3203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038062;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:52.130898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038094;self_id=[1:7488026894756947322:3194];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038094;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:52.131023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038062;self_id=[1:7488026894756947340:3203];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038062;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:52.131322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038080;self_id=[1:7488026894756947331:3199];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038080;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:52.131329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038093;self_id=[1:7488026894756947311:3187];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038093;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:52.131470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038080;self_id=[1:7488026894756947331:3199];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038080;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:52.131470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038093;self_id=[1:7488026894756947311:3187];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038093;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:52.131579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038079;self_id=[1:7488026894756947342:3204];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038079;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:52.131745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038079;self_id=[1:7488026894756947342:3204];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038079;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:52.133442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038097;self_id=[1:7488026894756947269:3169];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038097;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:52.133444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038088;self_id=[1:7488026894756947226:3167];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038088;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:52.133642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038088;self_id=[1:7488026894756947226:3167];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038088;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:52.133652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038097;self_id=[1:7488026894756947269:3169];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038097;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:52.134964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038066;self_id=[1:7488026894756947420:3229];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038066;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:52.135196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038066;self_id=[1:7488026894756947420:3229];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038066;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:52.137496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038060;self_id=[1:7488026894756947280:3175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038060;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:52.137702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038060;self_id=[1:7488026894756947280:3175];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038060;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:52.140708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038091;self_id=[1:7488026894756947409:3223];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038091;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:52.140924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038091;self_id=[1:7488026894756947409:3223];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038091;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:52.143233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038073;self_id=[1:7488026894756947300:3183];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038073;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:52.143440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038073;self_id=[1:7488026894756947300:3183];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038073;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:52.144130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038078;self_id=[1:7488026894756947427:3234];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038078;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:52.144322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038078;self_id=[1:7488026894756947427:3234];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038078;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:52.147746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038072;self_id=[1:7488026894756947298:3182];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038072;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:52.147926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038072;self_id=[1:7488026894756947298:3182];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038072;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:52.148187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038096;self_id=[1:7488026894756947401:3221];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038096;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-31T17:28:52.148290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038096;self_id=[1:7488026894756947401:3221];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038096;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:92, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::ABORTED) failed: (SUCCESS != ABORTED) , with diff: (SUCC|ABORT)E(SS|D) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:92: DoExecute @ 0x18DC9C58 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:118: Execute_ @ 0x18DB864A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7FD4A6386D8F 18. ??:0: ?? @ 0x7FD4A6386E3F 19. ??:0: ?? @ 0x163B4028 |97.4%| [TA] $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} |97.4%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} >> Replication::PauseAndResumeReplication [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TTLUsage Test command err: 2025-03-31T17:28:33.616628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T17:28:33.616837Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T17:28:33.616962Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/gfke/000257/r3tmp/tmpReJNda/pdisk_1.dat TServer::EnableGrpc on GrpcPort 12855, node 1 TClient is connected to server localhost:16166 2025-03-31T17:28:34.176824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T17:28:34.236851Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T17:28:34.237505Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:28:34.237560Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:28:34.237604Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:28:34.238023Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T17:28:34.275957Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:28:34.276108Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:28:34.287967Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T17:28:34.418853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-03-31T17:28:34.537124Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:688:2580], Recipient [1:746:2626]: NKikimr::TEvTablet::TEvBoot 2025-03-31T17:28:34.538548Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:688:2580], Recipient [1:746:2626]: NKikimr::TEvTablet::TEvRestored 2025-03-31T17:28:34.538901Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:746:2626];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T17:28:34.566217Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:746:2626];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T17:28:34.566617Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2025-03-31T17:28:34.576779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T17:28:34.577105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T17:28:34.577428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T17:28:34.577573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T17:28:34.577699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T17:28:34.577894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T17:28:34.578045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T17:28:34.578165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T17:28:34.578283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T17:28:34.578419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T17:28:34.578572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T17:28:34.578719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T17:28:34.603850Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:688:2580], Recipient [1:746:2626]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-31T17:28:34.605586Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:689:2581], Recipient [1:749:2629]: NKikimr::TEvTablet::TEvBoot 2025-03-31T17:28:34.606817Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 72075186224037888 2025-03-31T17:28:34.606949Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:689:2581], Recipient [1:749:2629]: NKikimr::TEvTablet::TEvRestored 2025-03-31T17:28:34.607238Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:749:2629];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-31T17:28:34.631669Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:749:2629];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-31T17:28:34.631940Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037889 2025-03-31T17:28:34.638307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T17:28:34.638431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T17:28:34.638695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T17:28:34.638825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T17:28:34.638942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T17:28:34.639055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T17:28:34.639201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T17:28:34.639353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T17:28:34.639490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T17:28:34.639627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T17:28:34.639734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T17:28:34.639857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:749:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T17:28:34.643538Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-31T17:28:34.643655Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-31T17:28:34.643863Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-31T17:28:34.644053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T17:28:34.644143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-31T17:28:34.644188Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-31T17:28:34.644328Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-31T17:28:34.644407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;i ... e=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; 2025-03-31T17:28:59.836089Z node 1 :TX_COLUMNSHARD TRACE: StateWork, received event# 2146435085, Sender [1:1313:3115], Recipient [1:746:2626]: NKikimr::NColumnShard::TEvPrivate::TEvGarbageCollectionFinished 2025-03-31T17:28:59.836602Z node 1 :TX_COLUMNSHARD TRACE: StateWork, received event# 2146435073, Sender [1:1314:3116], Recipient [1:746:2626]: NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex 2025-03-31T17:28:59.836640Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 72075186224037888 2025-03-31T17:28:59.836840Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[31] (CS::GENERAL) apply at tablet 72075186224037888 2025-03-31T17:28:59.839460Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 72075186224037888 Save Batch GenStep: 1:21 Blob count: 1 2025-03-31T17:28:59.839585Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=2911808;raw_bytes=96858255;count=2;records=82491} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=18275528;raw_bytes=616499669;count=8;records=517509} inactive {blob_bytes=26052856;raw_bytes=873325723;count=17;records=740034} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 72075186224037888 TEvBlobStorage::TEvPut tId=72075186224037888;c=1;:74/0:size=2465;count=18;;1:size=57218;count=9;;2:size=0;count=0;;3:size=1466448;count=1;;4:size=1479208;count=1;;5:size=1458600;count=1;;6:size=1445408;count=1;;7:size=1445928;count=1;;8:size=1445448;count=1;;9:size=1445400;count=1;;10:size=1445528;count=1;;11:size=808584;count=1;;12:size=4463248;count=6;;13:size=1445744;count=1;;14:size=1445360;count=1;;15:size=1445608;count=1;;16:size=1225528;count=1;;17:size=1425984;count=1;;18:size=1426000;count=1;;19:size=1445376;count=1;;20:size=1445920;count=1;;21:size=1445360;count=1;;22:size=2010824;count=3;;23:size=1215392;count=1;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; TEvBlobStorage::TEvPut tId=72075186224037888;c=0;:74/0:size=2534;count=19;;1:size=57218;count=9;;2:size=0;count=0;;3:size=1466448;count=1;;4:size=1479208;count=1;;5:size=1458600;count=1;;6:size=1445408;count=1;;7:size=1445928;count=1;;8:size=1445448;count=1;;9:size=1445400;count=1;;10:size=1445528;count=1;;11:size=808584;count=1;;12:size=4463248;count=6;;13:size=1445744;count=1;;14:size=1445360;count=1;;15:size=1445608;count=1;;16:size=1225528;count=1;;17:size=1425984;count=1;;18:size=1426000;count=1;;19:size=1445376;count=1;;20:size=1445920;count=1;;21:size=1445360;count=1;;22:size=2010824;count=3;;23:size=1215392;count=1;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; 2025-03-31T17:28:59.851361Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a14cf3d2-e5511f0-b55a3759-33154303;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::GENERAL;success=1; 2025-03-31T17:28:59.851416Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a14cf3d2-e5511f0-b55a3759-33154303;fline=with_appended.cpp:65;portions=28,;task_id=a14cf3d2-e5511f0-b55a3759-33154303; 2025-03-31T17:28:59.851648Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=a14cf3d2-e5511f0-b55a3759-33154303;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:28;path_id:3;records_count:85687;min_schema_snapshot:(plan_step=1500;tx_id=281474976715658;);schema_version:1;level:0;column_size:3018840;index_size:0;meta:((produced=SPLIT_COMPACTED;)););path_id=3; 2025-03-31T17:28:59.851813Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=a14cf3d2-e5511f0-b55a3759-33154303;fline=tiering.cpp:49;tiering_info=__DEFAULT/0.000000s;$$DELETE/599999.000000s;; 2025-03-31T17:28:59.851887Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a14cf3d2-e5511f0-b55a3759-33154303;fline=manager.cpp:15;event=unlock;process_id=CS::GENERAL::a14cf3d2-e5511f0-b55a3759-33154303; 2025-03-31T17:28:59.851940Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a14cf3d2-e5511f0-b55a3759-33154303;fline=granule.cpp:101;event=OnCompactionFinished;info=(granule:3;path_id:3;size:21192904;portions_count:28;); 2025-03-31T17:28:59.851983Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a14cf3d2-e5511f0-b55a3759-33154303;tablet_id=72075186224037888;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-31T17:28:59.852026Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a14cf3d2-e5511f0-b55a3759-33154303;tablet_id=72075186224037888;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-31T17:28:59.852098Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a14cf3d2-e5511f0-b55a3759-33154303;tablet_id=72075186224037888;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=1; 2025-03-31T17:28:59.852147Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a14cf3d2-e5511f0-b55a3759-33154303;tablet_id=72075186224037888;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=21000; 2025-03-31T17:28:59.852188Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a14cf3d2-e5511f0-b55a3759-33154303;tablet_id=72075186224037888;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-31T17:28:59.852236Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a14cf3d2-e5511f0-b55a3759-33154303;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-31T17:28:59.852266Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a14cf3d2-e5511f0-b55a3759-33154303;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-31T17:28:59.852320Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a14cf3d2-e5511f0-b55a3759-33154303;tablet_id=72075186224037888;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.599000s; 2025-03-31T17:28:59.852358Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a14cf3d2-e5511f0-b55a3759-33154303;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-31T17:28:59.852549Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037888 Save Batch GenStep: 1:21 Blob count: 1 VERIFY failed (2025-03-31T17:28:59.852753Z): tablet_id=72075186224037888;task_id=a14cf3d2-e5511f0-b55a3759-33154303;verification=CompactionsLimit.Dec() >= 0;fline=ro_controller.cpp:39; ydb/library/actors/core/log.cpp:754 ~TVerifyFormattedRecordWriter(): requirement false failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+873 (0x18BCFDF9) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+571 (0x18BBE08B) NActors::TVerifyFormattedRecordWriter::~TVerifyFormattedRecordWriter()+326 (0x19ED5BF6) NKikimr::NYDBTest::NColumnShard::TReadOnlyController::DoOnWriteIndexComplete(NKikimr::NOlap::TColumnEngineChanges const&, NKikimr::NColumnShard::TColumnShard const&)+4577 (0x485F4931) NKikimr::NColumnShard::TTxWriteIndex::Complete(NActors::TActorContext const&)+4797 (0x305CB3BD) NKikimr::NTabletFlatExecutor::TSeat::Complete(NActors::TActorContext const&, bool)+899 (0x1E8C20F3) NKikimr::NTabletFlatExecutor::TLogicRedo::Confirm(unsigned int, NActors::TActorContext const&, NActors::TActorId const&)+3856 (0x1E7A5AD0) NKikimr::NTabletFlatExecutor::TExecutor::Handle(TAutoPtr, TDelete>&, NActors::TActorContext const&)+3444 (0x1E5EBEC4) NKikimr::NTabletFlatExecutor::TExecutor::StateWork(TAutoPtr&)+2821 (0x1E588C45) NActors::IActor::Receive(TAutoPtr&)+237 (0x19E073CD) NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool)+3557 (0x358DC3C5) NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant)+12602 (0x358D4C3A) NActors::TTestActorRuntimeBase::WaitForEdgeEvents(std::__y1::function&)>, TSet, std::__y1::allocator> const&, TDuration)+1076 (0x358DEFB4) NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventIf(TSet, std::__y1::allocator> const&, std::__y1::function const&, TDuration)+292 (0x35AAC134) NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEvent(NActors::TActorId const&, TDuration)+419 (0x35AAB253) NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration)+307 (0x35AA34B3) NActors::TTestActorRuntime::SimulateSleep(TDuration)+1115 (0x35AA308B) NKikimr::NTestSuiteColumnShardTiers::TTestCaseTTLUsage::Execute_(NUnitTest::TTestContext&)+4702 (0x187AE7FE) std::__y1::__function::__func, void ()>::operator()()+280 (0x187C03B8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x1907CB16) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x1904C649) NKikimr::NTestSuiteColumnShardTiers::TCurrentTest::Execute()+1204 (0x187BF364) NUnitTest::TTestFactory::Execute()+2438 (0x1904DF16) NUnitTest::RunMain(int, char**)+5213 (0x1907708D) ??+0 (0x7F2AEE3ECD90) __libc_start_main+128 (0x7F2AEE3ECE40) _start+41 (0x16133029) |97.6%| [TA] $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.6%| [TA] {RESULT} $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/transfer/unittest >> Replication::PauseAndResumeReplication [GOOD] Test command err: DDL: CREATE TABLE `SourceTable_4494765710773169520` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ); DDL: CREATE ASYNC REPLICATION `Replication_4494765710773169520` FOR `SourceTable_4494765710773169520` AS `Table_4494765710773169520` WITH ( CONNECTION_STRING = 'grpc://localhost:21329/?database=local' ); >>>>> Query: INSERT INTO `SourceTable_4494765710773169520` (`Key`, `Message`) VALUES (1, 'Message-1'); >>>>> Query: INSERT INTO `SourceTable_4494765710773169520` (`Key`, `Message`) VALUES (1, 'Message-1'); >>>>> Query: SELECT `Message` FROM `Table_4494765710773169520` ORDER BY `Message` Attempt=19 count=1 State: Paused DDL: ALTER ASYNC REPLICATION `Replication_4494765710773169520` SET ( STATE = "Paused" ); >>>>> Query: INSERT INTO `SourceTable_4494765710773169520` (`Key`, `Message`) VALUES (2, 'Message-2'); >>>>> Query: SELECT `Message` FROM `Table_4494765710773169520` ORDER BY `Message` Attempt=19 count=1 State: StandBy DDL: ALTER ASYNC REPLICATION `Replication_4494765710773169520` SET ( STATE = "StandBy" ); >>>>> Query: SELECT `Message` FROM `Table_4494765710773169520` ORDER BY `Message` Attempt=19 count=1 >>>>> Query: SELECT `Message` FROM `Table_4494765710773169520` ORDER BY `Message` Attempt=18 count=2 DDL: ALTER ASYNC REPLICATION `Replication_4494765710773169520` SET ( STATE = "Paused" ); DDL: ALTER ASYNC REPLICATION `Replication_4494765710773169520` SET ( STATE = "StandBy" ); |97.8%| [TM] {RESULT} ydb/tests/functional/transfer/unittest >> test_sql_streaming.py::test[suites-ReadTwoTopics-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadWriteSameTopic-default.txt] >> test_sql_streaming.py::test[suites-ReadTopic-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicGroupWriteToSolomon-default.txt] >> test_sql_streaming.py::test[suites-GroupByHopExprKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopListKey-default.txt] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowByStringKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowExprKey-default.txt] >> overlapping_portions.py::TestOverlappingPortions::test [GOOD] >> test_sql_streaming.py::test[suites-ReadWriteSameTopic-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadWriteTopic-default.txt] >> test_sql_streaming.py::test[suites-ReadTopicGroupWriteToSolomon-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadata-default.txt] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowExprKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowListKey-default.txt] >> test_sql_streaming.py::test[suites-GroupByHopListKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopNoKey-default.txt] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/oom/py3test >> overlapping_portions.py::TestOverlappingPortions::test [GOOD] >> KqpStats::SysViewClientLost [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::SysViewClientLost [FAIL] Test command err: Trying to start YDB, gRPC: 65235, MsgBus: 14944 2025-03-31T17:28:36.549264Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7488026873066475504:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:28:36.551607Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/gfke/0002c1/r3tmp/tmpT1rMv2/pdisk_1.dat 2025-03-31T17:28:36.897400Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T17:28:36.918770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:28:36.918869Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:28:36.920391Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65235, node 1 2025-03-31T17:28:36.986289Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:28:36.986316Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:28:36.986326Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:28:36.986455Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14944 TClient is connected to server localhost:14944 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-31T17:28:37.429461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:28:37.456291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:28:37.594438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:28:37.745471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:28:37.804720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:28:39.327328Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488026885951379164:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:39.327415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:39.593322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-31T17:28:39.619431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-31T17:28:39.646921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-31T17:28:39.672591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-31T17:28:39.700586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-31T17:28:39.731486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-31T17:28:39.810606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488026885951379682:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:39.810697Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7488026885951379687:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:39.810701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:39.814094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-31T17:28:39.824102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7488026885951379689:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-31T17:28:39.927178Z node 1 :TX_PROXY ERROR: Actor# [1:7488026885951379744:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-31T17:28:40.557881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-31T17:28:41.547148Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488026873066475504:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:28:41.547225Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-31T17:28:51.896675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-31T17:28:51.896719Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T17:29:04.447906Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743442144437, txId: 281474976710672] shutting down 2025-03-31T17:29:04.502227Z node 1 :RPC_REQUEST WARN: Client lost 2025-03-31T17:29:05.653147Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743442145647, txId: 281474976710674] shutting down 2025-03-31T17:29:06.796945Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743442146791, txId: 281474976710676] shutting down 2025-03-31T17:29:07.986115Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743442147979, txId: 281474976710678] shutting down 2025-03-31T17:29:09.168423Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743442149160, txId: 281474976710680] shutting down 2025-03-31T17:29:10.355397Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743442150349, txId: 281474976710682] shutting down 2025-03-31T17:29:11.542452Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743442151535, txId: 281474976710684] shutting down 2025-03-31T17:29:12.739359Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743442152732, txId: 281474976710686] shutting down 2025-03-31T17:29:13.880306Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743442153874, txId: 281474976710688] shutting down 2025-03-31T17:29:15.055161Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743442155047, txId: 281474976710690] shutting down 2025-03-31T17:29:16.234566Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743442156227, txId: 281474976710692] shutting down assertion failed at ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591, virtual void NKikimr::NKqp::NTestSuiteKqpStats::TTestCaseSysViewClientLost::Execute_(NUnitTest::TTestContext &): (timeoutedCount == 1) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x194EA4DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x199AF40F 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591: Execute_ @ 0x1908FA48 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: operator() @ 0x190A2A57 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x190A2A57 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x190A2A57 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x190A2A57 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x190A2A57 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x199E6435 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x199E6435 10. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x199E6435 11. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x199B5F88 12. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: Execute @ 0x190A1BDB 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x199B7855 14. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x199E09AC 15. ??:0: ?? @ 0x7F8095818D8F 16. ??:0: ?? @ 0x7F8095818E3F 17. ??:0: ?? @ 0x16459028 |98.0%| [TA] $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} |98.0%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [FAIL] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [FAIL] >> test_sql_streaming.py::test[suites-ReadWriteTopic-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadWriteTopicWithSchema-default.txt] |98.1%| [TA] $(B)/ydb/tests/olap/oom/test-results/py3test/{meta.json ... results_accumulator.log} >> test_sql_streaming.py::test[suites-ReadTopicWithMetadata-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataInsideFilter-default.txt] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowListKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopNoKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowNoKey-default.txt] >> test_sql_streaming.py::test[suites-GroupByHopPercentile-default.txt] |98.2%| [TA] {RESULT} $(B)/ydb/tests/olap/oom/test-results/py3test/{meta.json ... results_accumulator.log} >> test_sql_streaming.py::test[suites-ReadWriteTopicWithSchema-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-WriteTwoTopics-default.txt] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataInsideFilter-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataNestedDeep-default.txt] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowNoKey-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] >> test_sql_streaming.py::test[suites-GroupByHopPercentile-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopTimeExtractorUnusedColumns-default.txt] >> test_sql_streaming.py::test[suites-WriteTwoTopics-default.txt] [FAIL] |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-WriteTwoTopics-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataNestedDeep-default.txt] [FAIL] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-ReadTopicWithMetadataNestedDeep-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] [FAIL] >> test_sql_streaming.py::test[suites-GroupByHopTimeExtractorUnusedColumns-default.txt] [FAIL] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] [FAIL] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[suites-GroupByHopTimeExtractorUnusedColumns-default.txt] [FAIL] |98.5%| [TA] $(B)/ydb/tests/fq/streaming_optimize/test-results/py3test/{meta.json ... results_accumulator.log} |98.6%| [TA] {RESULT} $(B)/ydb/tests/fq/streaming_optimize/test-results/py3test/{meta.json ... results_accumulator.log} |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [FAIL] >> test_workload.py::TestYdbLogWorkload::test[column] [GOOD] >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [FAIL] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [FAIL] Test command err: 2025-03-31T17:28:39.937612Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T17:28:39.937853Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-31T17:28:39.938005Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/gfke/000220/r3tmp/tmpRzXEq1/pdisk_1.dat 2025-03-31T17:28:40.232622Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:596:2520], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T17:28:40.232690Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T17:28:40.232715Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-31T17:28:40.232823Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:593:2518], Recipient [1:409:2404]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2025-03-31T17:28:40.232853Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-31T17:28:40.325081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-03-31T17:28:40.325305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T17:28:40.325525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-31T17:28:40.325766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-31T17:28:40.325828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T17:28:40.325909Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-31T17:28:40.326572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-31T17:28:40.326705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-31T17:28:40.326822Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T17:28:40.326858Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-03-31T17:28:40.327022Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-31T17:28:40.327057Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-31T17:28:40.327160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-31T17:28:40.327230Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-31T17:28:40.327268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-31T17:28:40.327302Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-31T17:28:40.327406Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-31T17:28:40.327806Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T17:28:40.327840Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-03-31T17:28:40.327941Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-31T17:28:40.327968Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-31T17:28:40.328024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-31T17:28:40.328067Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-03-31T17:28:40.328100Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-31T17:28:40.328181Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-31T17:28:40.328503Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T17:28:40.328538Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-03-31T17:28:40.328629Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-31T17:28:40.328656Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-31T17:28:40.328713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-31T17:28:40.328742Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-31T17:28:40.328790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-03-31T17:28:40.328817Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-31T17:28:40.328868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-31T17:28:40.332458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-31T17:28:40.332890Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T17:28:40.332933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-31T17:28:40.333116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-03-31T17:28:40.334230Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:601:2525], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046316545 Status: OK ServerId: [1:603:2526] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-03-31T17:28:40.334272Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-03-31T17:28:40.334320Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046316545, status: OK, at schemeshard: 72057594046644480 2025-03-31T17:28:40.334449Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 500 TxId: 1 2025-03-31T17:28:40.334813Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:605:2528], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T17:28:40.334855Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T17:28:40.334891Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-31T17:28:40.335007Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [1:593:2518], Recipient [1:409:2404]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2025-03-31T17:28:40.335051Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-03-31T17:28:40.335142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-03-31T17:28:40.335186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-03-31T17:28:40.335225Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-03-31T17:28:40.387287Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 273285138, Sender [1:43:2090], Recipient [1:409:2404]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } } ItemKinds: 26 ItemKinds: 34 ItemKinds: 52 ItemKinds: 54 ItemKinds: 73 Local: true } 2025-03-31T17:28:40.387417Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-03-31T17:28:40.387481Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-31T17:28:40.387992Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-03-31T17:28:40.388076Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } 2025-03-31T17:28:40.428131Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:28:40.428280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:28:40.440005Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T17:28:40.515296Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 17 StepId: 500 TxId: 1 2025-03-31T17:28:40.516144Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:636:2544], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-31T17:28:40.516191Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-31T17:28:40.516234Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-31T17:28:40.516380Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:409:2404]: {TEvPlanStep step# 500 MediatorId# 72057594046382081 TabletID 72057594046644480} 2025-03-31T17:28:40.516416Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-31T17:28:40.516498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-31T17:28:40.516640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemes ... pient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-31T17:30:47.937303Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-31T17:30:47.937324Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-03-31T17:30:47.937375Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-03-31T17:30:47.937396Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-03-31T17:30:47.937450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 22 shard idx 72057594046644480:7 data size 0 row count 0 2025-03-31T17:30:47.937523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037894 maps to shardIdx: 72057594046644480:7 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 22], pathId map=TableA, is column=0, is olap=0 2025-03-31T17:30:47.937556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037894 followerId=0, pathId 22: RowCount 0, DataSize 0 2025-03-31T17:30:47.937577Z node 1 :FLAT_TX_SCHEMESHARD TRACE: BuildStatsForCollector: datashardId 72075186224037894, followerId 0 2025-03-31T17:30:47.937622Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:7 with partCount# 0, rowCount# 0, searchHeight# 0, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2025-03-31T17:30:47.937695Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T17:30:47.948041Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-31T17:30:47.948101Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-31T17:30:47.948123Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-03-31T17:30:48.020610Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037895 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-03-31T17:30:48.021022Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [1:1581:3192], Recipient [1:409:2404]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037895 TableLocalId: 24 Generation: 1 Round: 58 TableStats { DataSize: 54 RowCount: 2 IndexSize: 0 InMemSize: 0 LastAccessTime: 5450 LastUpdateTime: 5450 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 1 RowUpdates: 2 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 54 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 18 Memory: 119576 Storage: 142 } ShardState: 2 UserTablePartOwners: 72075186224037895 NodeId: 1 StartTime: 4950 TableOwnerId: 72057594046644480 FollowerId: 0 2025-03-31T17:30:48.021058Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-03-31T17:30:48.021103Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037895 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 24] state 'Ready' dataSize 54 rowCount 2 cpuUsage 0.0018 2025-03-31T17:30:48.021176Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037895 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 24] raw table stats: DataSize: 54 RowCount: 2 IndexSize: 0 InMemSize: 0 LastAccessTime: 5450 LastUpdateTime: 5450 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 1 RowUpdates: 2 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 54 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-03-31T17:30:48.021203Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-03-31T17:30:48.073965Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-31T17:30:48.074024Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-31T17:30:48.074046Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-03-31T17:30:48.074102Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-03-31T17:30:48.074124Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-03-31T17:30:48.074181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 24 shard idx 72057594046644480:8 data size 54 row count 2 2025-03-31T17:30:48.074226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037895 maps to shardIdx: 72057594046644480:8 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 24], pathId map=TableA, is column=0, is olap=0 2025-03-31T17:30:48.074256Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037895 followerId=0, pathId 24: RowCount 2, DataSize 54 2025-03-31T17:30:48.074275Z node 1 :FLAT_TX_SCHEMESHARD TRACE: BuildStatsForCollector: datashardId 72075186224037895, followerId 0 2025-03-31T17:30:48.074324Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:8 with partCount# 1, rowCount# 2, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2025-03-31T17:30:48.074403Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-31T17:30:48.084708Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-31T17:30:48.084759Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-31T17:30:48.084780Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-03-31T17:30:48.145831Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T17:30:48.145905Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T17:30:48.145961Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T17:30:48.145982Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T17:30:48.166915Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T17:30:48.249850Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-03-31T17:30:48.322020Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T17:30:48.322085Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T17:30:48.322146Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T17:30:48.322168Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T17:30:48.342989Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037898 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T17:30:48.425492Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037899 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-31T17:30:48.507955Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTk4MWMxMWEtNWQ3MzUxZGItZjBiOGY1NjYtODY2MGNkZjk=, ActorId: [1:2033:3520], ActorState: ExecuteState, TraceId: 01jqpkz8797psye5cwr0dex9bf, Create QueryResponse for error on request, msg: 2025-03-31T17:30:48.508129Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T17:30:48.508159Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-31T17:30:48.508206Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T17:30:48.508229Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-31T17:30:48.508328Z node 1 :KQP_SLOW_LOG WARN: SessionId: ydb://session/3?node_id=1&id=YTk4MWMxMWEtNWQ3MzUxZGItZjBiOGY1NjYtODY2MGNkZjk=, Slow query, duration: 600.000000s, status: GENERIC_ERROR, user: UNAUTHENTICATED, results: 0b, text: "RESTORE `MyCollection`;", parameters: 0b assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (TIMEOUT != SUCCESS) Response { QueryIssues { message: "Request timeout 600000ms exceeded" severity: 1 } QueryIssues { message: "Cancelling after 600000ms in ExecuteState" severity: 1 } TxMeta { } } YdbStatus: TIMEOUT , with diff: (TIM|SUCC)E(OUT|SS) TBackTrace::Capture()+28 (0x18FC909C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x194861E0) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+3639 (0x48C26827) NKikimr::NTestSuiteIncrementalBackup::TTestCaseComplexRestoreBackupCollection::Execute_(NUnitTest::TTestContext&)+26163 (0x18C197A3) std::__y1::__function::__func, void ()>::operator()()+280 (0x18BD52A8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x194BD206) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x1948CD59) NKikimr::NTestSuiteIncrementalBackup::TCurrentTest::Execute()+1204 (0x18BD4154) NUnitTest::TTestFactory::Execute()+2438 (0x1948E626) NUnitTest::RunMain(int, char**)+5213 (0x194B777D) ??+0 (0x7FB88FEABD90) __libc_start_main+128 (0x7FB88FEABE40) _start+41 (0x1635A029) |98.8%| [TA] $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} |98.9%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} |98.9%| [TA] $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} |99.0%| [TA] {RESULT} $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} >> test_workload.py::TestYdbLogWorkload::test[column] [FAIL] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/log/tests/py3test >> test_workload.py::TestYdbLogWorkload::test[column] [FAIL] |99.1%| [TM] {RESULT} ydb/tests/stress/log/tests/py3test >> alter_compression.py::TestAlterCompression::test_all_supported_compression [FAIL] >> alter_compression.py::TestAlterCompression::test_availability_data >> ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change [FAIL] >> ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete >> AnalyzeColumnshard::AnalyzeRebootColumnShard [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootColumnShard [FAIL] Test command err: 2025-03-31T17:28:27.671068Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-31T17:28:27.671366Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-31T17:28:27.671457Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/gfke/0002ce/r3tmp/tmp9SGtf9/pdisk_1.dat 2025-03-31T17:28:28.084192Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6288, node 1 2025-03-31T17:28:28.337980Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-31T17:28:28.338045Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-31T17:28:28.338081Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-31T17:28:28.340610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-31T17:28:28.341061Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-31T17:28:28.435662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:28:28.435810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:28:28.450232Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17819 2025-03-31T17:28:29.007482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-31T17:28:32.371967Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-31T17:28:32.411658Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:28:32.411790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:28:32.441734Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-31T17:28:32.444129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T17:28:32.701209Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T17:28:32.701856Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T17:28:32.702369Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T17:28:32.702541Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T17:28:32.702817Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T17:28:32.702905Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T17:28:32.703002Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T17:28:32.703175Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T17:28:32.703276Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-31T17:28:32.866947Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-31T17:28:32.867048Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-31T17:28:32.882311Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-31T17:28:33.034995Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-31T17:28:33.089704Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-31T17:28:33.089835Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-31T17:28:33.119446Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-31T17:28:33.121517Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-31T17:28:33.121725Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-31T17:28:33.121783Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-31T17:28:33.121838Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-31T17:28:33.121890Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-31T17:28:33.121940Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-31T17:28:33.121988Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-31T17:28:33.123207Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-31T17:28:33.154564Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-31T17:28:33.154686Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1877:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-31T17:28:33.161445Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1888:2606] 2025-03-31T17:28:33.167260Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1918:2621] 2025-03-31T17:28:33.168457Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1918:2621], schemeshard id = 72075186224037897 2025-03-31T17:28:33.175766Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-31T17:28:33.193141Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-31T17:28:33.193210Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-31T17:28:33.193298Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-31T17:28:33.210221Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-31T17:28:33.216318Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-31T17:28:33.216468Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-31T17:28:33.438072Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-31T17:28:33.614681Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-31T17:28:33.682219Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-31T17:28:34.770936Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2241:3075], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:34.771107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-31T17:28:34.793111Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-31T17:28:34.971449Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2326:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-31T17:28:34.971697Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2326:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-31T17:28:34.972024Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2326:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-31T17:28:34.972171Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2326:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-31T17:28:34.972387Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2326:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-31T17:28:34.972524Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2326:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-31T17:28:34.972648Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2326:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-31T17:28:34.972789Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2326:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-31T17:28:34.972976Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2326:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-31T17:28:34.973115Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2326:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-31T17:28:34.973245Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2326:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-31T17:28:34.973381Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2326:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-31T17:28:35.002612Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-31T17:28:35.002719Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;descr ... 31T17:37:00.574221Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T17:37:00.574262Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T17:37:00.574282Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-31T17:37:01.801457Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T17:37:01.801516Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T17:37:01.801539Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-31T17:37:02.942406Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T17:37:02.942467Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T17:37:02.942489Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-31T17:37:04.147934Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T17:37:04.158252Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T17:37:04.158287Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T17:37:04.158309Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-31T17:37:05.379591Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T17:37:05.379663Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T17:37:05.379691Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-31T17:37:06.549183Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T17:37:06.549315Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T17:37:06.559850Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T17:37:06.559902Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T17:37:06.559925Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-31T17:37:07.772223Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T17:37:07.772280Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T17:37:07.772301Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-31T17:37:08.982195Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T17:37:08.982254Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T17:37:08.982277Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-31T17:37:10.182400Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T17:37:10.192878Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T17:37:10.192926Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T17:37:10.192946Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-31T17:37:11.513531Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T17:37:11.513592Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T17:37:11.513615Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-31T17:37:12.732603Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T17:37:12.732742Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T17:37:12.743093Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T17:37:12.743127Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T17:37:12.743147Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-31T17:37:13.934942Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T17:37:13.935003Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T17:37:13.935027Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-31T17:37:15.102826Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T17:37:15.102884Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T17:37:15.102908Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-31T17:37:16.320897Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T17:37:16.331320Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T17:37:16.331383Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T17:37:16.331405Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-31T17:37:17.654664Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T17:37:17.654727Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T17:37:17.654750Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-31T17:37:18.926674Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T17:37:18.926813Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T17:37:18.937300Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T17:37:18.937341Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T17:37:18.937363Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-31T17:37:20.138315Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T17:37:20.138374Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T17:37:20.138396Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-31T17:37:21.320726Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T17:37:21.320788Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T17:37:21.320816Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-31T17:37:22.489412Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-31T17:37:22.499715Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T17:37:22.499749Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T17:37:22.499771Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-31T17:37:23.835719Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T17:37:23.835779Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T17:37:23.835803Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-31T17:37:25.038383Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-31T17:37:25.038533Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-31T17:37:25.049143Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-31T17:37:25.049197Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T17:37:25.049220Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-31T17:37:26.297237Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-31T17:37:26.297298Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-31T17:37:26.297324Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. (TWithBackTrace) ydb/library/actors/testlib/test_runtime.h:579: Exception occured while waiting for NKikimr::NStat::TEvStatistics::TEvAnalyzeResponse: (NActors::TSchedulingLimitReachedException) TestActorRuntime Processed over 100000 events.ydb/library/actors/testlib/test_runtime.cpp:716: TBackTrace::Capture()+28 (0x18C0A59C) TWithBackTrace::TWithBackTrace<>()+80 (0x18832490) NKikimr::NStat::TEvStatistics::TEvAnalyzeResponse::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration)+485 (0x18806B15) NKikimr::NStat::NTestSuiteAnalyzeColumnshard::TTestCaseAnalyzeRebootColumnShard::Execute_(NUnitTest::TTestContext&)+4263 (0x188236B7) std::__y1::__function::__func, void ()>::operator()()+280 (0x1882E548) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x190F5E06) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x190CEA19) NKikimr::NStat::NTestSuiteAnalyzeColumnshard::TCurrentTest::Execute()+1204 (0x1882D714) NUnitTest::TTestFactory::Execute()+2438 (0x190D02E6) NUnitTest::RunMain(int, char**)+5213 (0x190F037D) ??+0 (0x7FDD933D7D90) __libc_start_main+128 (0x7FDD933D7E40) _start+41 (0x1618F029) |99.3%| [TA] $(B)/ydb/core/statistics/aggregator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.3%| [TA] {RESULT} $(B)/ydb/core/statistics/aggregator/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> alter_compression.py::TestAlterCompression::test_availability_data [FAIL] >> test_tpch_import.py::TestS3TpchImport::test_import_and_export [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAlterCompression::test_availability_data [FAIL] |99.5%| [TM] {RESULT} ydb/tests/olap/column_family/compression/py3test >> test_tpch_import.py::TestS3TpchImport::test_import_and_export [FAIL] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/s3_import/py3test >> test_tpch_import.py::TestS3TpchImport::test_import_and_export [FAIL] |99.6%| [TM] {RESULT} ydb/tests/olap/s3_import/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/py3test >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test 2025-03-31 17:38:28,020 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-31 17:38:28,217 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 1001209 733M 738M 655M ydb-tests-olap-ttl_tiering --basetemp /home/runner/.ya/build/build_root/gfke/000254/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-mod 1002614 2.6G 2.6G 2.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/gfke/000254/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_ 1004785 475M 475M 441M └─ moto_server s3 --port 32626 Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/ttl_tiering/data_migration_when_alter_ttl.py", line 171, in test if not self.wait_for( File "ydb/tests/olap/ttl_tiering/base.py", line 70, in wait_for time.sleep(1) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ..._root/gfke/000254', '--source-root', '/home/runner/.ya/build/build_root/gfke/000254/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/gfke/000254/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'data_migration_when_alter_ttl.py']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("..._root/gfke/000254', '--source-root', '/home/runner/.ya/build/build_root/gfke/000254/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/gfke/000254/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'data_migration_when_alter_ttl.py']' stopped by 600 seconds timeout",), {}) ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/py3test >> ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete 2025-03-31 17:38:28,009 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-31 17:38:28,327 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 1001206 682M 686M 603M ydb-tests-olap-ttl_tiering --basetemp /home/runner/.ya/build/build_root/gfke/000259/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-mod 1002689 7.0G 7.0G 6.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/gfke/000259/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_d 1004699 393M 393M 358M └─ moto_server s3 --port 27236 Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/ttl_tiering/ttl_delete_s3.py", line 269, in test_ttl_delete self.ydb_client.query(""" File "ydb/tests/olap/common/ydb_client.py", line 24, in query return self.session_pool.execute_with_retries(statement) File "contrib/python/ydb/py3/ydb/query/pool.py", line 202, in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) File "contrib/python/ydb/py3/ydb/retries.py", line 133, in retry_operation_sync for next_opt in opt_generator: File "contrib/python/ydb/py3/ydb/retries.py", line 94, in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) File "contrib/python/ydb/py3/ydb/query/pool.py", line 200, in wrapped_callee return [result_set for result_set in it] File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ return self._next() File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next res = self.wrapper(next(self.it)) File "contrib/python/grpcio/py3/grpc/_channel.py", line 475, in __next__ return self._next() File "contrib/python/grpcio/py3/grpc/_channel.py", line 872, in _next _common.wait(self._state.condition.wait, _response_ready) File "contrib/python/grpcio/py3/grpc/_common.py", line 150, in wait _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) File "contrib/python/grpcio/py3/grpc/_common.py", line 112, in _wait_once wait_fn(timeout=timeout) File "contrib/tools/python3/Lib/threading.py", line 359, in wait gotit = waiter.acquire(True, timeout) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...', '/home/runner/.ya/build/build_root/gfke/000259', '--source-root', '/home/runner/.ya/build/build_root/gfke/000259/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/gfke/000259/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'ttl_delete_s3.py']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...', '/home/runner/.ya/build/build_root/gfke/000259', '--source-root', '/home/runner/.ya/build/build_root/gfke/000259/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/gfke/000259/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'ttl_delete_s3.py']' stopped by 600 seconds timeout",), {}) |99.8%| [TA] $(B)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TA] {RESULT} $(B)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] 2025-03-31 17:38:36,457 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-31 17:38:36,628 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 1004298 766M 772M 673M ydb-tests-olap-scenario --basetemp /home/runner/.ya/build/build_root/gfke/000153/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-module 1005156 2.1G 2.1G 1.5G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/gfke/000153/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_ins Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "/home/runner/.ya/build/build_root/gfke/000153/environment/arcadia/ydb/tests/olap/scenario/conftest.py", line 88, in test ctx.executable(self, ctx) File "ydb/tests/olap/scenario/test_insert.py", line 86, in scenario_read_data_during_bulk_upsert thread2.join_all() File "ydb/tests/olap/common/thread_helper.py", line 45, in join_all thread.join(timeout=timeout) File "ydb/tests/olap/common/thread_helper.py", line 16, in join super().join(timeout) File "contrib/tools/python3/Lib/threading.py", line 1149, in join self._wait_for_tstate_lock() File "contrib/tools/python3/Lib/threading.py", line 1169, in _wait_for_tstate_lock if lock.acquire(block, timeout): File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...-scenario', '--basetemp', '/home/runner/.ya/build/build_root/gfke/000153/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/gfke/000153/ydb/tests/olap/scenario/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/gfke/000153', '--source-root', '/home/runner/.ya/build/build_root/gfke/000153/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/gfke/000153/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'test_insert.py::TestInsert::test[read_data_during_bulk_upsert]', '--tb', 'short', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...-scenario', '--basetemp', '/home/runner/.ya/build/build_root/gfke/000153/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/gfke/000153/ydb/tests/olap/scenario/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/gfke/000153', '--source-root', '/home/runner/.ya/build/build_root/gfke/000153/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/gfke/000153/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'test_insert.py::TestInsert::test[read_data_during_bulk_upsert]', '--tb', 'short', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout",), {}) |99.9%| [TM] {RESULT} ydb/tests/olap/scenario/py3test |99.9%| CLEANING BUILD ROOT ydb/tests/fq/streaming_optimize [size:medium] nchunks:4 ------ [test_sql_streaming.py 0/4] chunk ran 7 tests (total:90.24s - recipes:0.70s test:89.04s recipes:0.41s) [fail] test_sql_streaming.py::test[suites-GroupByHop-default.txt] [default-linux-x86_64-release-asan] (14.24s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9yobn_5i/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9yobn_5i/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9yobn_5i/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9yobn_5i/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9yobn_5i/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9yobn_5i/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9yobn_5i/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9yobn_5i/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9yobn_5i/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9yobn_5i/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f47cb8e83b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f47cb8e1ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f47cb16bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f47cb269464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f47cb269464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f47cb269464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f47cb16ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f47cb21790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f47cb10fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f47cb10fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f47cb10fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f47cb266e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f47cb266e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHop-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopByStringKey-default.txt] [default-linux-x86_64-release-asan] (13.73s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q3m1g8xl/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q3m1g8xl/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q3m1g8xl/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q3m1g8xl/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q3m1g8xl/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q3m1g8xl/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q3m1g8xl/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q3m1g8xl/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q3m1g8xl/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q3m1g8xl/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f36995a83b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f36995a1ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f3698e2bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f3698f29464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f3698f29464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f3698f29464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f3698e2ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f3698ed790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f3698dcfe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f3698dcfe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f3698dcfc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f3698f26e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f3698f26e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopByStringKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopExprKey-default.txt] [default-linux-x86_64-release-asan] (11.66s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jj3h0r8p/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jj3h0r8p/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jj3h0r8p/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jj3h0r8p/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jj3h0r8p/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jj3h0r8p/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jj3h0r8p/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jj3h0r8p/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jj3h0r8p/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jj3h0r8p/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f2fb6a083b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f2fb6a01ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f2fb628bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f2fb6389464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f2fb6389464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f2fb6389464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f2fb628ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f2fb633790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f2fb622fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f2fb622fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f2fb622fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f2fb6386e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f2fb6386e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopExprKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopListKey-default.txt] [default-linux-x86_64-release-asan] (11.49s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_38u2hb2i/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_38u2hb2i/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_38u2hb2i/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_38u2hb2i/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_38u2hb2i/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_38u2hb2i/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_38u2hb2i/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_38u2hb2i/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_38u2hb2i/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_38u2hb2i/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f7fd20383b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f7fd2031ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f7fd18bbd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f7fd19b9464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f7fd19b9464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f7fd19b9464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f7fd18bab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f7fd196790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f7fd185fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f7fd185fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f7fd185fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f7fd19b6e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f7fd19b6e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopListKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopNoKey-default.txt] [default-linux-x86_64-release-asan] (12.65s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7bpy13ao/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7bpy13ao/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7bpy13ao/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7bpy13ao/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7bpy13ao/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7bpy13ao/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7bpy13ao/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7bpy13ao/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7bpy13ao/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7bpy13ao/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f0842af83b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f0842af1ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f084237bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f0842479464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f0842479464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f0842479464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f084237ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f084242790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f084231fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f084231fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f084231fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f0842476e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f0842476e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423692 byte(s) leaked in 8242 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopNoKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopPercentile-default.txt] [default-linux-x86_64-release-asan] (11.08s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4c8un6s2/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4c8un6s2/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4c8un6s2/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4c8un6s2/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4c8un6s2/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4c8un6s2/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4c8un6s2/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4c8un6s2/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4c8un6s2/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4c8un6s2/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f99357483b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f9935741ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f9934fcbd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f99350c9464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f99350c9464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f99350c9464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f9934fcab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f993507790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f9934f6fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f9934f6fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f9934f6fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f99350c6e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f99350c6e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopPercentile-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHopTimeExtractorUnusedColumns-default.txt] [default-linux-x86_64-release-asan] (10.54s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_eku7ag4i/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_eku7ag4i/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_eku7ag4i/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_eku7ag4i/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_eku7ag4i/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_eku7ag4i/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_eku7ag4i/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_eku7ag4i/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_eku7ag4i/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_eku7ag4i/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f08ac4283b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f08ac421ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f08abcabd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f08abda9464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f08abda9464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f08abda9464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f08abcaab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f08abd5790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f08abc4fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f08abc4fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f08abc4fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f08abda6e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f08abda6e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopTimeExtractorUnusedColumns-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_streaming.py 1/4] chunk ran 7 tests (total:90.23s - recipes:0.64s test:89.04s recipes:0.45s) [fail] test_sql_streaming.py::test[suites-GroupByHopWithDataWatermarks-default.txt] [default-linux-x86_64-release-asan] (14.34s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1k190fpn/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1k190fpn/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1k190fpn/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1k190fpn/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1k190fpn/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1k190fpn/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1k190fpn/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1k190fpn/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1k190fpn/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1k190fpn/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fb2ad2883b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fb2ad281ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fb2acb0bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fb2acc09464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fb2acc09464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fb2acc09464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fb2acb0ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fb2acbb790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fb2acaafe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fb2acaafe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fb2acaafc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fb2acc06e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fb2acc06e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423634 byte(s) leaked in 8241 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHopWithDataWatermarks-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindow-default.txt] [default-linux-x86_64-release-asan] (14.19s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ynwkcift/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ynwkcift/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ynwkcift/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ynwkcift/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ynwkcift/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ynwkcift/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ynwkcift/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ynwkcift/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ynwkcift/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ynwkcift/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7ff162af83b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7ff162af1ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7ff16237bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7ff162479464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7ff162479464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7ff162479464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7ff16237ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7ff16242790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7ff16231fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7ff16231fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7ff16231fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7ff162476e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7ff162476e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423601 byte(s) leaked in 8240 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindow-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowByStringKey-default.txt] [default-linux-x86_64-release-asan] (11.95s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ep3itywx/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ep3itywx/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ep3itywx/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ep3itywx/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ep3itywx/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ep3itywx/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ep3itywx/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ep3itywx/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ep3itywx/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ep3itywx/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f8447cb83b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f8447cb1ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f844753bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f8447639464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f8447639464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f8447639464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f844753ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f84475e790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f84474dfe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f84474dfe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f84474dfc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f8447636e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f8447636e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowByStringKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowExprKey-default.txt] [default-linux-x86_64-release-asan] (11.09s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_rmew01fx/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_rmew01fx/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_rmew01fx/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_rmew01fx/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_rmew01fx/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_rmew01fx/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_rmew01fx/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_rmew01fx/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_rmew01fx/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_rmew01fx/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f5ab14c83b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f5ab14c1ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f5ab0d4bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f5ab0e49464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f5ab0e49464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f5ab0e49464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f5ab0d4ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f5ab0df790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f5ab0cefe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f5ab0cefe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f5ab0cefc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f5ab0e46e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f5ab0e46e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowExprKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowListKey-default.txt] [default-linux-x86_64-release-asan] (12.47s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3xp4zsyz/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3xp4zsyz/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3xp4zsyz/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3xp4zsyz/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3xp4zsyz/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3xp4zsyz/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3xp4zsyz/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3xp4zsyz/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3xp4zsyz/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3xp4zsyz/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f1c211683b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f1c21161ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f1c209ebd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f1c20ae9464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f1c20ae9464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f1c20ae9464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f1c209eab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f1c20a9790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f1c2098fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f1c2098fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f1c2098fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f1c20ae6e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f1c20ae6e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowListKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowNoKey-default.txt] [default-linux-x86_64-release-asan] (10.97s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_coodcjfk/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_coodcjfk/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_coodcjfk/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_coodcjfk/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_coodcjfk/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_coodcjfk/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_coodcjfk/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_coodcjfk/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_coodcjfk/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_coodcjfk/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fea116583b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fea11651ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fea10edbd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fea10fd9464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fea10fd9464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fea10fd9464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fea10edab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fea10f8790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fea10e7fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fea10e7fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fea10e7fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fea10fd6e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fea10fd6e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowNoKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowPercentile-default.txt] [default-linux-x86_64-release-asan] (10.56s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ua5jf779/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ua5jf779/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ua5jf779/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ua5jf779/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ua5jf779/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ua5jf779/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ua5jf779/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ua5jf779/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ua5jf779/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ua5jf779/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fbcf0c283b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fbcf0c21ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fbcf04abd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fbcf05a9464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fbcf05a9464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fbcf05a9464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fbcf04aab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fbcf055790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fbcf044fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fbcf044fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fbcf044fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fbcf05a6e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fbcf05a6e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowPercentile-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_streaming.py 2/4] chunk ran 7 tests (total:88.44s - recipes:0.70s test:87.22s recipes:0.41s) [fail] test_sql_streaming.py::test[suites-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] [default-linux-x86_64-release-asan] (13.85s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nkv6wgs9/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nkv6wgs9/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nkv6wgs9/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nkv6wgs9/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nkv6wgs9/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nkv6wgs9/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nkv6wgs9/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nkv6wgs9/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nkv6wgs9/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_nkv6wgs9/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fc8071c83b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fc8071c1ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fc806a4bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fc806b49464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fc806b49464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fc806b49464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fc806a4ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fc806af790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fc8069efe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fc8069efe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fc8069efc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fc806b46e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fc806b46e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-GroupByHoppingWithDataWatermarks-default.txt] [default-linux-x86_64-release-asan] (13.71s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5a286txg/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5a286txg/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5a286txg/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5a286txg/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5a286txg/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5a286txg/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5a286txg/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5a286txg/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5a286txg/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_5a286txg/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f6f466783b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f6f46671ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f6f45efbd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f6f45ff9464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f6f45ff9464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f6f45ff9464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f6f45efab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f6f45fa790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f6f45e9fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f6f45e9fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f6f45e9fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f6f45ff6e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f6f45ff6e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-GroupByHoppingWithDataWatermarks-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopic-default.txt] [default-linux-x86_64-release-asan] (11.34s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_v3l6i6o6/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_v3l6i6o6/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_v3l6i6o6/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_v3l6i6o6/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_v3l6i6o6/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_v3l6i6o6/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_v3l6i6o6/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_v3l6i6o6/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_v3l6i6o6/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_v3l6i6o6/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f464fca83b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f464fca1ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f464f52bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f464f629464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f464f629464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f464f629464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f464f52ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f464f5d790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f464f4cfe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f464f4cfe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f464f4cfc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f464f626e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f464f626e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopic-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicGroupWriteToSolomon-default.txt] [default-linux-x86_64-release-asan] (11.10s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ia_cqyok/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ia_cqyok/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ia_cqyok/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ia_cqyok/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ia_cqyok/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ia_cqyok/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ia_cqyok/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ia_cqyok/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ia_cqyok/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ia_cqyok/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f01633583b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f0163351ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f0162bdbd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f0162cd9464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f0162cd9464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f0162cd9464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f0162bdab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f0162c8790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f0162b7fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f0162b7fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f0162b7fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f0162cd6e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f0162cd6e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicGroupWriteToSolomon-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicWithMetadata-default.txt] [default-linux-x86_64-release-asan] (12.37s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6tqyvc9n/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6tqyvc9n/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6tqyvc9n/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6tqyvc9n/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6tqyvc9n/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6tqyvc9n/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6tqyvc9n/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6tqyvc9n/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6tqyvc9n/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6tqyvc9n/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fdc70c683b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fdc70c61ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fdc704ebd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fdc705e9464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fdc705e9464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fdc705e9464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fdc704eab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fdc7059790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fdc7048fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fdc7048fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fdc7048fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fdc705e6e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fdc705e6e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423601 byte(s) leaked in 8240 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithMetadata-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicWithMetadataInsideFilter-default.txt] [default-linux-x86_64-release-asan] (10.99s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_p3s4y4to/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_p3s4y4to/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_p3s4y4to/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_p3s4y4to/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_p3s4y4to/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_p3s4y4to/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_p3s4y4to/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_p3s4y4to/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_p3s4y4to/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_p3s4y4to/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fda4d9683b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fda4d961ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fda4d1ebd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fda4d2e9464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fda4d2e9464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fda4d2e9464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fda4d1eab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fda4d29790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fda4d18fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fda4d18fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fda4d18fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fda4d2e6e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fda4d2e6e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithMetadataInsideFilter-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicWithMetadataNestedDeep-default.txt] [default-linux-x86_64-release-asan] (10.08s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kze3tsra/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kze3tsra/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kze3tsra/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kze3tsra/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kze3tsra/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kze3tsra/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kze3tsra/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kze3tsra/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kze3tsra/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kze3tsra/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fabcbc983b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fabcbc91ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fabcb51bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fabcb619464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fabcb619464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fabcb619464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fabcb51ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fabcb5c790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fabcb4bfe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fabcb4bfe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fabcb4bfc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fabcb616e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fabcb616e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithMetadataNestedDeep-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_streaming.py 3/4] chunk ran 7 tests (total:86.95s - recipes:0.60s test:85.86s recipes:0.41s) [fail] test_sql_streaming.py::test[suites-ReadTopicWithMetadataWithFilter-default.txt] [default-linux-x86_64-release-asan] (13.55s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ijmgmj3r/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ijmgmj3r/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ijmgmj3r/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ijmgmj3r/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ijmgmj3r/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ijmgmj3r/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ijmgmj3r/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ijmgmj3r/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ijmgmj3r/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ijmgmj3r/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7fb9f8f583b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7fb9f8f51ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7fb9f87dbd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7fb9f88d9464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7fb9f88d9464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7fb9f88d9464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7fb9f87dab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7fb9f888790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7fb9f877fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7fb9f877fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7fb9f877fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7fb9f88d6e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7fb9f88d6e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithMetadataWithFilter-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTopicWithSchema-default.txt] [default-linux-x86_64-release-asan] (12.83s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qhb__84h/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qhb__84h/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qhb__84h/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qhb__84h/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qhb__84h/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qhb__84h/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qhb__84h/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qhb__84h/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qhb__84h/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qhb__84h/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f6dc14a83b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f6dc14a1ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f6dc0d2bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f6dc0e29464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f6dc0e29464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f6dc0e29464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f6dc0d2ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f6dc0dd790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f6dc0ccfe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f6dc0ccfe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f6dc0ccfc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f6dc0e26e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f6dc0e26e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTopicWithSchema-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadTwoTopics-default.txt] [default-linux-x86_64-release-asan] (11.55s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_w2zo8z4i/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_w2zo8z4i/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_w2zo8z4i/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_w2zo8z4i/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_w2zo8z4i/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_w2zo8z4i/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_w2zo8z4i/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_w2zo8z4i/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_w2zo8z4i/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_w2zo8z4i/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f5e1ec183b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f5e1ec11ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f5e1e49bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f5e1e599464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f5e1e599464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f5e1e599464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f5e1e49ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f5e1e54790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f5e1e43fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f5e1e43fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f5e1e43fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f5e1e596e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f5e1e596e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadTwoTopics-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadWriteSameTopic-default.txt] [default-linux-x86_64-release-asan] (10.54s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jcwq0m1n/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jcwq0m1n/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jcwq0m1n/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jcwq0m1n/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jcwq0m1n/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jcwq0m1n/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jcwq0m1n/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jcwq0m1n/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jcwq0m1n/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jcwq0m1n/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7ff789f083b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7ff789f01ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7ff78978bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7ff789889464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7ff789889464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7ff789889464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7ff78978ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7ff78983790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7ff78972fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7ff78972fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7ff78972fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7ff789886e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7ff789886e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadWriteSameTopic-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadWriteTopic-default.txt] [default-linux-x86_64-release-asan] (12.00s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qrnnmn96/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qrnnmn96/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qrnnmn96/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qrnnmn96/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qrnnmn96/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qrnnmn96/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qrnnmn96/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qrnnmn96/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qrnnmn96/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qrnnmn96/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f593d7c83b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f593d7c1ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f593d04bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f593d149464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f593d149464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f593d149464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f593d04ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f593d0f790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f593cfefe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f593cfefe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f593cfefc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f593d146e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f593d146e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadWriteTopic-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-ReadWriteTopicWithSchema-default.txt] [default-linux-x86_64-release-asan] (11.49s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ht5m57rx/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ht5m57rx/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ht5m57rx/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ht5m57rx/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ht5m57rx/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ht5m57rx/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ht5m57rx/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ht5m57rx/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ht5m57rx/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ht5m57rx/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f7c55ab83b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f7c55ab1ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f7c5533bd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f7c55439464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f7c55439464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f7c55439464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f7c5533ab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f7c553e790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f7c552dfe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f7c552dfe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f7c552dfc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f7c55436e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f7c55436e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-ReadWriteTopicWithSchema-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[suites-WriteTwoTopics-default.txt] [default-linux-x86_64-release-asan] (10.33s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:38: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:81: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --fq-cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_frq58zr_/fq_config.conf --as-cfg=- --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_frq58zr_/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_frq58zr_/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_frq58zr_/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_frq58zr_/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_frq58zr_/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_frq58zr_/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_frq58zr_/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_frq58zr_/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_frq58zr_/topic_3.txt' has failed with code 100. E Errors: E b'...portModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n #29 0x7f3a8cf283b4 in __Pyx_Import(_object*, _object*, int) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:11081:22\n #30 0x7f3a8cf21ed0 in __pyx_pymod_exec_sitecustomize(_object*) /-B/library/python/runtime_py3/sitecustomize.pyx.cpp:7764:15\n #31 0x7f3a8c7abd6d in PyModule_ExecDef /-S/contrib/tools/python3/Objects/moduleobject.c:440:23\n #32 0x7f3a8c8a9464 in exec_builtin_or_dynamic /-S/contrib/tools/python3/Python/import.c:784:12\n #33 0x7f3a8c8a9464 in _imp_exec_builtin_impl /-S/contrib/tools/python3/Python/import.c:3829:12\n #34 0x7f3a8c8a9464 in _imp_exec_builtin /-S/contrib/tools/python3/Python/clinic/import.c.h:564:21\n #35 0x7f3a8c7aab64 in cfunction_vectorcall_O /-S/contrib/tools/python3/Objects/methodobject.c:509:24\n #36 0x7f3a8c85790a in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26\n #37 0x7f3a8c74fe0a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11\n #38 0x7f3a8c74fe0a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14\n #39 0x7f3a8c74fc50 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24\n #40 0x7f3a8c8a6e0d in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2790:11\n #41 0x7f3a8c8a6e0d in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2873:15\n\nSUMMARY: AddressSanitizer: 423744 byte(s) leaked in 8243 allocation(s).\n' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.suites-WriteTwoTopics-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ FAIL: 28 - FAIL ydb/tests/fq/streaming_optimize ydb/tests/functional/serverless [size:medium] nchunks:10 ------ [test_serverless.py 0/10] chunk ran 1 test (total:113.63s - test:113.25s) [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [default-linux-x86_64-release-asan] (107.65s) ydb/tests/functional/serverless/test_serverless.py:450: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:347: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:367: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:105: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:225: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--false_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:449: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*DISK_SPACE_EXHAUSTED.*'): E AssertionError: Regex pattern did not match. E Regex: '.*DISK_SPACE_EXHAUSTED.*' E Input: 'message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--false_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--false.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff ------ [test_serverless.py 1/10] chunk ran 1 test (total:133.86s - test:133.50s) [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [default-linux-x86_64-release-asan] (128.28s) ydb/tests/functional/serverless/test_serverless.py:450: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:347: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:367: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:105: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:225: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--true_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:449: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*DISK_SPACE_EXHAUSTED.*'): E AssertionError: Regex pattern did not match. E Regex: '.*DISK_SPACE_EXHAUSTED.*' E Input: 'message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--true_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--true.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff ------ FAIL: 2 - FAIL ydb/tests/functional/serverless ydb/tests/olap/column_family/compression [size:medium] ------ sole chunk ran 2 tests (total:585.65s - test:585.36s) [fail] alter_compression.py::TestAlterCompression::test_all_supported_compression [default-linux-x86_64-release-asan] (347.48s) ydb/tests/olap/column_family/compression/alter_compression.py:106: in test_all_supported_compression assert table.get_portion_stat_by_tier()['__DEFAULT']['Rows'] == single_upsert_rows_count * upsert_count E assert 1004731 == (100000 * 10) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/alter_compression.py.TestAlterCompression.test_all_supported_compression.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff [fail] alter_compression.py::TestAlterCompression::test_availability_data [default-linux-x86_64-release-asan] (232.80s) ydb/tests/olap/column_family/compression/alter_compression.py:169: in test_availability_data self.upsert_and_wait_portions(test_table, single_upsert_rows_count, upsert_rows_count) ydb/tests/olap/column_family/compression/alter_compression.py:50: in upsert_and_wait_portions raise Exception("not all portions have been updated") E Exception: not all portions have been updated Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/alter_compression.py.TestAlterCompression.test_availability_data.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff ------ FAIL: 2 - FAIL ydb/tests/olap/column_family/compression ydb/tests/olap/s3_import [size:medium] ------ sole chunk ran 1 test (total:597.45s - test:597.39s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 11.1G (11608260K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 1004154 44.8M 44.8M 6.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1004291 35.8M 23.9M 11.3M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1004297 626M 629M 544M └─ ydb-tests-olap-s3_import --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doct 1004826 9.9G 9.9G 9.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/t 1008249 508M 508M 473M └─ moto_server s3 --port 12872 Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/stderr [fail] test_tpch_import.py::TestS3TpchImport::test_import_and_export [default-linux-x86_64-release-asan] (592.74s) teardown failed: ydb/tests/olap/s3_import/base.py:24: in teardown_class cls.cluster.stop() ydb/tests/library/harness/kikimr_runner.py:494: in stop raise daemon.SeveralDaemonErrors(saved_exceptions) E ydb.tests.library.harness.daemon.SeveralDaemonErrors: Daemon failed with message: Bad exit_code.. E Process exit_code = 100. E Stdout file name: E /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/test_tpch_import.py.TestS3TpchImport.test_import_and_export/cluster/node_1/stdout E Stderr file name: E /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/test_tpch_import.py.TestS3TpchImport.test_import_and_export/cluster/node_1/stderr E Stderr content: E E GRpc memory quota was set but disabled due to issues with grpc quoter, to enable it use EnableGRpcMemoryQuota option E Current KQP shutdown state: spent 1e-06 seconds, not started yet E warning: address range table at offset 0x10c0 has a premature terminator entry at offset 0x10d0 E E ================================================================= E ==1004826==ERROR: LeakSanitizer: detected memory leaks E E Indirect leak of 3792 byte(s) in 1 object(s) allocated from: E #0 0x1d6b812d in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 E #1 0x468efbcf in MakeIntrusive, NYql::TTypeAnnotationContext &> /-S/util/generic/ptr.h:818:12 E #2 0x468efbcf in NYql::TYtState::TYtState(NYql::TTypeAnnotationContext*) /-S/yt/yql/providers/yt/provider/yql_yt_provider.h:102:25 E #3 0x468e2099 in MakeIntrusive, NYql::TTypeAnnotationContext *> /-S/util/generic/ptr.h:818:16 E #4 0x468e2099 in NYql::CreateYtNativeState(TIntrusivePtr>, TBasicString> const&, TBasicString> const&, NYql::TYtGatewayConfig const*, TIntrusivePtr>, std::__y1::shared_ptr const&, std::__y1::shared_ptr const&) /-S/yt/yql/pro ..[snippet truncated].. 66 in __libcpp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:289:10 E #3 0x466d2866 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:118:32 E #4 0x466d2866 in initialize_dynamic /-S/util/generic/hash_table.h:239:35 E #5 0x466d2866 in initialize_buckets_dynamic /-S/util/generic/hash_table.h:912:17 E #6 0x466d2866 in THashTable> const, TIntrusivePtr>>, TBasicString>, THash>>, TSelect1st, TEqualTo>>, std::__y1::allocator>>>::reserve(unsigned long) /-S/util/generic/hash_table.h:1330:13 E #7 0x466bb75b in insert_unique >, TIntrusivePtr > > > /-S/util/generic/hash_table.h:679:9 E #8 0x466bb75b in insert /-S/util/generic/hash.h:153:20 E #9 0x466bb75b in NYql::NCommon::TSettingDispatcher::TSettingHandlerImpl& NYql::NCommon::TSettingDispatcher::AddSetting(TBasicString> const&, NYql::NCommon::TConfSetting&) /-S/yql/essentials/providers/common/config/yql_dispatch.h:344:23 E #10 0x4669ee09 in NYql::TYtConfiguration::TYtConfiguration(NYql::TTypeAnnotationContext&) /-S/yt/yql/providers/yt/common/yql_yt_settings.cpp:466:5 E #11 0x468efbe0 in TYtVersionedConfiguration /-S/yt/yql/providers/yt/common/yql_yt_settings.h:372:11 E #12 0x468efbe0 in MakeIntrusive, NYql::TTypeAnnotationContext &> /-S/util/generic/ptr.h:818:16 E #13 0x468efbe0 in NYql::TYtState::TYtState(NYql::TTypeAnnotationContext*) /-S/yt/yql/providers/yt/provider/yql_yt_provider.h:102:25 E #14 0x468e2099 in MakeIntrusive, NYql::TTypeAnnotationContext *> /-S/util/generic/ptr.h:818:16 E #15 0x468e2099 in NYql::CreateYtNativeState(TIntrusivePtr>, TBasicString [size:medium] ------ sole chunk ran 1 test (total:626.79s - test:600.04s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: test_insert.py::TestInsert::test[read_data_during_bulk_upsert] (timeout) duration: 621.66s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/stderr [timeout] test_insert.py::TestInsert::test[read_data_during_bulk_upsert] [default-linux-x86_64-release-asan] (621.66s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_insert.py.TestInsert.test.read_data_during_bulk_upsert.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff ------ TIMEOUT: 1 - TIMEOUT ydb/tests/olap/scenario ydb/tests/olap/ttl_tiering [size:medium] nchunks:2 ------ [data_migration_when_alter_ttl.py] chunk ran 1 test (total:610.34s - test:600.07s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test (timeout) duration: 606.25s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/stderr [timeout] data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test [default-linux-x86_64-release-asan] (606.25s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl.py.TestDataMigrationWhenAlterTtl.test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff ------ [ttl_delete_s3.py] chunk ran 3 tests (total:614.97s - test:600.06s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change (fail) duration: 381.94s ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete (timeout) duration: 228.21s ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering test was not launched inside chunk. Info: Test run has exceeded 8.0G (8388608K) memory limit with 12.5G (13092936K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 1000983 44.8M 44.8M 6.6M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1001176 39.8M 28.4M 15.8M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1001206 790M 793M 710M └─ ydb-tests-olap-ttl_tiering --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --do 1002689 5.8G 5.8G 5.2G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff 1021381 5.8G 5.8G 5.2G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_st Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/stderr [fail] ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change [default-linux-x86_64-release-asan] (381.94s) ydb/tests/olap/ttl_tiering/ttl_delete_s3.py:141: in test_data_unchanged_after_ttl_change data = self.get_aggregated(table_path) ydb/tests/olap/ttl_tiering/ttl_delete_s3.py:27: in get_aggregated answer = self.ydb_client.query(f"SELECT count(*), sum(val), sum(Digest::Fnv32(s)) from `{table_path}`") ydb/tests/olap/common/ydb_client.py:24: in query return self.session_pool.execute_with_retries(statement) contrib/python/ydb/py3/ydb/query/pool.py:202: in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) contrib/python/ydb/py3/ydb/retries.py:133: in retry_operation_sync for next_opt in opt_generator: contrib/python/ydb/py3/ydb/retries.py:94: in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) contrib/python/ydb/py3/ydb/query/pool.py:200: in wrapped_callee return [result_set for result_set in it] contrib/python/ydb/py3/ydb/_utilities.py:173: in __next__ return self._next() contrib/python/ydb/py3/ydb/_utilities.py:164: in _next res = self.wrapper(next(self.it)) contrib/python/ydb/py3/ydb/query/session.py:350: in lambda resp: base.wrap_execute_query_response( contrib/python/ydb/py3/ydb/query/base.py:172: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/query/base.py:189: in wrap_execute_query_response issues._process_response(response_pb) contrib/python/ydb/py3/ydb/issues.py:225: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.BadRequest: message: "Table /Root/test_data_unchanged_after_ttl_change/table (shard 72075186224037938) scan failed, reason: cannot build metadata/Snapshot too old: {1743442488280:max}. CS min read snapshot: {1743442489000:max}. now: 2025-03-31T17:34:53.953284Z" issue_code: 2017 severity: 1 (server_code: 400010) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3.py.TestDeleteS3Ttl.test_data_unchanged_after_ttl_change.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff [timeout] ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete [default-linux-x86_64-release-asan] (228.21s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3.py.TestDeleteS3Ttl.test_ttl_delete.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff ------ TIMEOUT: 1 - FAIL, 1 - NOT_LAUNCHED, 2 - TIMEOUT ydb/tests/olap/ttl_tiering ydb/tests/stress/log/tests [size:medium] ------ sole chunk ran 1 test (total:138.85s - test:138.79s) [fail] test_workload.py::TestYdbLogWorkload::test[column] [default-linux-x86_64-release-asan] (134.00s) teardown failed: ydb/tests/stress/log/tests/test_workload.py:41: in teardown_class cls.cluster.stop() ydb/tests/library/harness/kikimr_runner.py:494: in stop raise daemon.SeveralDaemonErrors(saved_exceptions) E ydb.tests.library.harness.daemon.SeveralDaemonErrors: Daemon failed with message: Bad exit_code.. E Process exit_code = 100. E Stdout file name: E /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/log/tests/test-results/py3test/testing_out_stuff/test_workload.py.TestYdbLogWorkload.test.column/cluster/node_1/stdout E Stderr file name: E /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/log/tests/test-results/py3test/testing_out_stuff/test_workload.py.TestYdbLogWorkload.test.column/cluster/node_1/stderr E Stderr content: E E GRpc memory quota was set but disabled due to issues with grpc quoter, to enable it use EnableGRpcMemoryQuota option E Current KQP shutdown state: spent 0 seconds, not started yet E warning: address range table at offset 0x10c0 has a premature terminator entry at offset 0x10d0 E E ================================================================= E ==1006809==ERROR: LeakSanitizer: detected memory leaks E E Indirect leak of 15168 byte(s) in 4 object(s) allocated from: E #0 0x1d6b812d in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 E #1 0x468efbcf in MakeIntrusive, NYql::TTypeAnnotationContext &> /-S/util/generic/ptr.h:818:12 E #2 0x468efbcf in NYql::TYtState::TYtState(NYql::TTypeAnnotationContext*) /-S/yt/yql/providers/yt/provider/yql_yt_provider.h:102:25 E #3 0x468e2099 in MakeIntrusive, NYql::TTypeAnnotationContext *> /-S/util/generic/ptr.h:818:16 E #4 0x468e2099 in NYql::CreateYtNativeState(TIntrusivePtr>, TBasicString> const&, TBasicString> const&, NYql::TYtGatewayConfig const*, TIntrusivePtr>, std::__y1::shared_ptr const&, std::__y1::shared_ptr const&) /-S/yt/yql/providers/yt/pr ..[snippet truncated].. cpp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:289:10 E #3 0x466d2866 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:118:32 E #4 0x466d2866 in initialize_dynamic /-S/util/generic/hash_table.h:239:35 E #5 0x466d2866 in initialize_buckets_dynamic /-S/util/generic/hash_table.h:912:17 E #6 0x466d2866 in THashTable> const, TIntrusivePtr>>, TBasicString>, THash>>, TSelect1st, TEqualTo>>, std::__y1::allocator>>>::reserve(unsigned long) /-S/util/generic/hash_table.h:1330:13 E #7 0x466bb75b in insert_unique >, TIntrusivePtr > > > /-S/util/generic/hash_table.h:679:9 E #8 0x466bb75b in insert /-S/util/generic/hash.h:153:20 E #9 0x466bb75b in NYql::NCommon::TSettingDispatcher::TSettingHandlerImpl& NYql::NCommon::TSettingDispatcher::AddSetting(TBasicString> const&, NYql::NCommon::TConfSetting&) /-S/yql/essentials/providers/common/config/yql_dispatch.h:344:23 E #10 0x4669ee09 in NYql::TYtConfiguration::TYtConfiguration(NYql::TTypeAnnotationContext&) /-S/yt/yql/providers/yt/common/yql_yt_settings.cpp:466:5 E #11 0x468efbe0 in TYtVersionedConfiguration /-S/yt/yql/providers/yt/common/yql_yt_settings.h:372:11 E #12 0x468efbe0 in MakeIntrusive, NYql::TTypeAnnotationContext &> /-S/util/generic/ptr.h:818:16 E #13 0x468efbe0 in NYql::TYtState::TYtState(NYql::TTypeAnnotationContext*) /-S/yt/yql/providers/yt/provider/yql_yt_provider.h:102:25 E #14 0x468e2099 in MakeIntrusive, NYql::TTypeAnnotationContext *> /-S/util/generic/ptr.h:818:16 E #15 0x468e2099 in NYql::CreateYtNativeState(TIntrusivePtr>, TBasicString> c... Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/log/tests/test-results/py3test/testing_out_stuff/test_workload.py.TestYdbLogWorkload.test.column.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/log/tests/test-results/py3test/testing_out_stuff ------ FAIL: 1 - FAIL ydb/tests/stress/log/tests ydb/core/keyvalue/ut_trace [size:medium] nchunks:5 ------ [0/5] chunk ran 1 test (total:6.07s - test:6.04s) [fail] TKeyValueTracingTest::ReadHuge [default-linux-x86_64-release-asan] (2.28s) equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadHuge.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadHuge.out ------ [1/5] chunk ran 1 test (total:6.18s - test:6.15s) [fail] TKeyValueTracingTest::ReadSmall [default-linux-x86_64-release-asan] (2.32s) equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadSmall.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadSmall.out ------ [2/5] chunk ran 1 test (total:6.18s - test:6.14s) [fail] TKeyValueTracingTest::WriteHuge [default-linux-x86_64-release-asan] (2.29s) assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&) at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:103:5 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteHuge.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteHuge.out ------ [3/5] chunk ran 1 test (total:6.28s - test:6.25s) [fail] TKeyValueTracingTest::WriteSmall [default-linux-x86_64-release-asan] (2.08s) assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&) at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:103:5 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteSmall.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteSmall.out ------ FAIL: 4 - FAIL ydb/core/keyvalue/ut_trace ydb/core/kqp/ut/cost [size:medium] nchunks:50 ------ [0/50] chunk ran 1 test (total:12.73s - test:12.70s) [crashed] KqpCost::OlapWriteRow [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: 100) ==1001511==ERROR: AddressSanitizer: SEGV on unknown address 0x000000000008 (pc 0x000018b718ad bp 0x7ffdd11de4e0 sp 0x7ffdd11de340 T0) ==1001511==The signal is caused by a READ memory access. ==1001511==Hint: address points to the zero page. 2025-03-31T17:28:34.372526Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7488026844285833513:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-31T17:28:34.372726Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; #0 0x18b718ad in Get::TypeHandler> /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 #1 0x18b718ad in Get /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:1348:32 #2 0x18b718ad in _internal_table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1762:31 #3 0x18b718ad in table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1766:10 #4 0x18b718ad in NKikimr::NKqp::NTestSuiteKqpCost::TTestCaseOlapWriteRow::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:636:13 #5 0x18b96437 in operator() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #6 0x18b96437 in __invoke<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #7 0x18b96437 in __call<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #8 0x18b96437 in ..[snippet truncated].. 0x194e85e5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #11 0x194e85e5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #12 0x194e85e5 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #13 0x194b8138 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #14 0x18b952e3 in NKikimr::NKqp::NTestSuiteKqpCost::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #15 0x194b9a05 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #16 0x194e2b5c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #17 0x7fe63034cd8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #18 0x7fe63034ce3f in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x29e3f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #19 0x16306028 in _start (/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost+0x16306028) (BuildId: 5128b0b237a145c7e4cbd2034645258e10874b06) SUMMARY: AddressSanitizer: SEGV /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 in Get::TypeHandler> ==1001511==ABORTING Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/test-results/unittest/testing_out_stuff/KqpCost.OlapWriteRow.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/test-results/unittest/testing_out_stuff/KqpCost.OlapWriteRow.out ------ FAIL: 1 - CRASHED ydb/core/kqp/ut/cost ydb/core/kqp/ut/query [size:medium] nchunks:50 ------ [0/50] chunk ran 1 test (total:48.35s - test:48.30s) [fail] KqpStats::SysViewClientLost [default-linux-x86_64-release-asan] (44.21s) assertion failed at ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591, virtual void NKikimr::NKqp::NTestSuiteKqpStats::TTestCaseSysViewClientLost::Execute_(NUnitTest::TTestContext &): (timeoutedCount == 1) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x194EA4DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x199AF40F 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591: Execute_ @ 0x1908FA48 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: operator() @ 0x190A2A57 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x190A2A57 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x190A2A57 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x190A2A57 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x190A2A57 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x199E6435 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x199E6435 10. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x199E6435 11. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x199B5F88 12. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: Execute @ 0x190A1BDB 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x199B7855 14. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x199E09AC 15. ??:0: ?? @ 0x7F8095818D8F 16. ??:0: ?? @ 0x7F8095818E3F 17. ??:0: ?? @ 0x16459028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpStats.SysViewClientLost.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpStats.SysViewClientLost.out ------ FAIL: 1 - FAIL ydb/core/kqp/ut/query ydb/core/kqp/ut/tx [size:medium] nchunks:50 ------ [0/50] chunk ran 1 test (total:19.58s - test:19.54s) [fail] KqpSinkTx::OlapInvalidateOnError [default-linux-x86_64-release-asan] (15.36s) assertion failed at ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182, virtual void NKikimr::NKqp::NTestSuiteKqpSinkTx::TInvalidateOnError::DoExecute(): (result.GetStatus() == EStatus::PRECONDITION_FAILED) failed: (BAD_REQUEST != PRECONDITION_FAILED)
: Error: Bad request. Table: `/Root/KV`., code: 2017
: Error: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32"]}, code: 2017 , with diff: (BAD_|P)RE(QUES|CONDI)T(|ION_FAILED) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182: DoExecute @ 0x18D9B8AE 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:201: Execute_ @ 0x18D7A4FA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: operator() @ 0x18D81977 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x18D81977 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x18D81977 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D81977 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D81977 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: Execute @ 0x18D80B43 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7FC173FF5D8F 18. ??:0: ?? @ 0x7FC173FF5E3F 19. ??:0: ?? @ 0x163B4028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSinkTx.OlapInvalidateOnError.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSinkTx.OlapInvalidateOnError.out ------ [1/50] chunk ran 1 test (total:19.43s - test:19.36s) [fail] KqpSnapshotIsolation::TConflictReadWriteOlap [default-linux-x86_64-release-asan] (15.25s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:146, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (ABORTED != SUCCESS)
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 , with diff: (ABORT|SUCC)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:146: DoExecute @ 0x18DD15F8 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:172: Execute_ @ 0x18DB8CCA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7F8BF5FC2D8F 18. ??:0: ?? @ 0x7F8BF5FC2E3F 19. ??:0: ?? @ 0x163B4028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOlap.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOlap.out ------ [10/50] chunk ran 1 test (total:14.63s - test:14.60s) [fail] KqpSnapshotIsolation::TSimpleOltpNoSink [default-linux-x86_64-release-asan] (10.31s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TSimple::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25: DoExecute @ 0x18DC0DB7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:57: Execute_ @ 0x18DB7D9A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7F1AC57D0D8F 18. ??:0: ?? @ 0x7F1AC57D0E3F 19. ??:0: ?? @ 0x163B4028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TSimpleOltpNoSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TSimpleOltpNoSink.out ------ [2/50] chunk ran 1 test (total:14.55s - test:14.52s) [fail] KqpSnapshotIsolation::TConflictReadWriteOltp [default-linux-x86_64-release-asan] (10.02s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131: DoExecute @ 0x18DCECE7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:159: Execute_ @ 0x18DB8872 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7F479D443D8F 18. ??:0: ?? @ 0x7F479D443E3F 19. ??:0: ?? @ 0x163B4028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOltp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOltp.out ------ [3/50] chunk ran 1 test (total:15.07s - test:15.04s) [fail] KqpSnapshotIsolation::TConflictReadWriteOltpNoSink [default-linux-x86_64-release-asan] (10.21s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131: DoExecute @ 0x18DCECE7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:166: Execute_ @ 0x18DB8A9A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7FB04F7F6D8F 18. ??:0: ?? @ 0x7FB04F7F6E3F 19. ??:0: ?? @ 0x163B4028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOltpNoSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOltpNoSink.out ------ [4/50] chunk ran 1 test (total:23.27s - setup:0.01s test:23.22s) [fail] KqpSnapshotIsolation::TConflictWriteOlap [default-linux-x86_64-release-asan] (19.01s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:92, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::ABORTED) failed: (SUCCESS != ABORTED) , with diff: (SUCC|ABORT)E(SS|D) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:92: DoExecute @ 0x18DC9C58 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:118: Execute_ @ 0x18DB864A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7FD4A6386D8F 18. ??:0: ?? @ 0x7FD4A6386E3F 19. ??:0: ?? @ 0x163B4028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOlap.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOlap.out ------ [5/50] chunk ran 1 test (total:15.15s - test:15.11s) [fail] KqpSnapshotIsolation::TConflictWriteOltp [default-linux-x86_64-release-asan] (10.42s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76: DoExecute @ 0x18DC7347 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:105: Execute_ @ 0x18DB81F2 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7F1DB2B2ED8F 18. ??:0: ?? @ 0x7F1DB2B2EE3F 19. ??:0: ?? @ 0x163B4028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOltp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOltp.out ------ [6/50] chunk ran 1 test (total:14.76s - test:14.71s) [fail] KqpSnapshotIsolation::TConflictWriteOltpNoSink [default-linux-x86_64-release-asan] (10.35s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76: DoExecute @ 0x18DC7347 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:112: Execute_ @ 0x18DB841A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7F10BF78AD8F 18. ??:0: ?? @ 0x7F10BF78AE3F 19. ??:0: ?? @ 0x163B4028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOltpNoSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOltpNoSink.out ------ [7/50] chunk ran 1 test (total:14.87s - test:14.84s) [fail] KqpSnapshotIsolation::TReadOnlyOltp [default-linux-x86_64-release-asan] (10.20s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TReadOnly::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185: DoExecute @ 0x18DD66A3 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:214: Execute_ @ 0x18DB8EF2 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7FCFB7953D8F 18. ??:0: ?? @ 0x7FCFB7953E3F 19. ??:0: ?? @ 0x163B4028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TReadOnlyOltp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TReadOnlyOltp.out ------ [8/50] chunk ran 1 test (total:15.17s - test:15.12s) [fail] KqpSnapshotIsolation::TReadOnlyOltpNoSink [default-linux-x86_64-release-asan] (10.71s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TReadOnly::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185: DoExecute @ 0x18DD66A3 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:221: Execute_ @ 0x18DB911A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7F5E1197AD8F 18. ??:0: ?? @ 0x7F5E1197AE3F 19. ??:0: ?? @ 0x163B4028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TReadOnlyOltpNoSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TReadOnlyOltpNoSink.out ------ [9/50] chunk ran 1 test (total:14.45s - test:14.41s) [fail] KqpSnapshotIsolation::TSimpleOltp [default-linux-x86_64-release-asan] (10.09s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TSimple::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191FE23B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x196C634F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25: DoExecute @ 0x18DC0DB7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18D0BACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:50: Execute_ @ 0x18DB7B72 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18DBEF27 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18DBEF27 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18DBEF27 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18DBEF27 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196FD375 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196FD375 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196FD375 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x196CCEC8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18DBE0F3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x196CE795 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196F78EC 17. ??:0: ?? @ 0x7F3F0D93ED8F 18. ??:0: ?? @ 0x7F3F0D93EE3F 19. ??:0: ?? @ 0x163B4028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TSimpleOltp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TSimpleOltp.out ------ FAIL: 11 - FAIL ydb/core/kqp/ut/tx ydb/core/statistics/aggregator/ut [size:medium] nchunks:60 ------ [0/60] chunk ran 1 test (total:547.93s - test:547.89s) [fail] AnalyzeColumnshard::AnalyzeRebootColumnShard [default-linux-x86_64-release-asan] (543.25s) (TWithBackTrace) ydb/library/actors/testlib/test_runtime.h:579: Exception occured while waiting for NKikimr::NStat::TEvStatistics::TEvAnalyzeResponse: (NActors::TSchedulingLimitReachedException) TestActorRuntime Processed over 100000 events.ydb/library/actors/testlib/test_runtime.cpp:716: TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 TWithBackTrace::TWithBackTrace<>() at /-S/util/generic/yexception.h:146:5 NKikimr::NStat::TEvStatistics::TEvAnalyzeResponse::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration) at /-S/ydb/library/actors/testlib/test_runtime.h:0:24 DoDestroy at /-S/util/generic/ptr.h:237:13 operator() at /-S/ydb/core/statistics/aggregator/ut/ut_analyze_columnshard.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/AnalyzeColumnshard.AnalyzeRebootColumnShard.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/AnalyzeColumnshard.AnalyzeRebootColumnShard.out ------ FAIL: 1 - FAIL ydb/core/statistics/aggregator/ut ydb/core/tx/datashard/ut_incremental_backup [size:medium] nchunks:4 ------ [0/4] chunk ran 1 test (total:136.21s - setup:0.01s test:136.13s) [fail] IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [default-linux-x86_64-release-asan] (131.46s) assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (TIMEOUT != SUCCESS) Response { QueryIssues { message: "Request timeout 600000ms exceeded" severity: 1 } QueryIssues { message: "Cancelling after 600000ms in ExecuteState" severity: 1 } TxMeta { } } YdbStatus: TIMEOUT , with diff: (TIM|SUCC)E(OUT|SS) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode) at /-S/ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:0:5 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/tx/datashard/datashard_ut_incremental_backup.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/testing_out_stuff/IncrementalBackup.ComplexRestoreBackupCollection.WithIncremental.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/testing_out_stuff/IncrementalBackup.ComplexRestoreBackupCollection.WithIncremental.out ------ FAIL: 1 - FAIL ydb/core/tx/datashard/ut_incremental_backup ydb/core/tx/datashard/ut_volatile [size:medium] nchunks:2 ------ [0/2] chunk ran 1 test (total:14.44s - test:14.39s) [crashed] DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck [default-linux-x86_64-release-asan] (5.20s) Test crashed (return code: 100) ==999326==ERROR: LeakSanitizer: detected memory leaks Direct leak of 224 byte(s) in 1 object(s) allocated from: #0 0x1903ffcd in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x2b08ca70 in make_unique /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:621:26 #2 0x2b08ca70 in NKikimr::NEvents::TDataEvents::TEvWriteResult::BuildCompleted(unsigned long, unsigned long) /-S/ydb/core/tx/data_events/events.h:123:27 #3 0x2b086026 in NKikimr::NDataShard::TExecuteWriteUnit::Execute(TIntrusivePtr>, NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/execute_write_unit.cpp:376:37 #4 0x2aedef1c in NKikimr::NDataShard::TPipeline::RunExecutionPlan(TIntrusivePtr>, TVector>&, NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/datashard_pipeline.cpp:1851:28 #5 0x2ab9fc94 in NKikimr::NDataShard::TDataShard::TTxWrite::Execute(NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&) /-S/ydb/core/tx/datashard/datashard__write.cpp:100:38 #6 0x2062a8d4 in NKikimr::NTabletFlatExecutor::TExecutor::ExecuteTransaction(NKikimr::NTabletFlatExecutor::TSeat*) /-S/ydb/core/tablet_flat/flat_executor.cpp:1910:35 #7 0x20626274 in NKikimr::NTabletFlatExecutor: ..[snippet truncated].. in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #27 0x18de9c37 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #28 0x198001f5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #29 0x198001f5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #30 0x198001f5 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #31 0x197cfd48 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #32 0x18de8ae3 in NKikimr::NTestSuiteDataShardVolatile::TCurrentTest::Execute() /-S/ydb/core/tx/datashard/datashard_ut_volatile.cpp:17:1 #33 0x197d1615 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #34 0x197fa76c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #35 0x7fcaf1828d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) SUMMARY: AddressSanitizer: 664 byte(s) leaked in 11 allocation(s). Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_volatile/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_volatile/test-results/unittest/testing_out_stuff/DataShardVolatile.GracefulShardRestartNoEarlyReadSetAck.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_volatile/test-results/unittest/testing_out_stuff/DataShardVolatile.GracefulShardRestartNoEarlyReadSetAck.out ------ FAIL: 1 - CRASHED ydb/core/tx/datashard/ut_volatile ydb/core/tx/tiering/ut [size:medium] nchunks:60 ------ [0/60] chunk ran 1 test (total:32.73s - test:32.69s) [crashed] ColumnShardTiers::TTLUsage [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: -6) See logs for more info Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff/ColumnShardTiers.TTLUsage.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff/ColumnShardTiers.TTLUsage.out ------ FAIL: 1 - CRASHED ydb/core/tx/tiering/ut ydb/services/ydb/ut [size:medium] nchunks:60 ------ [0/60] chunk ran 1 test (total:10.21s - test:10.18s) [fail] YdbLogStore::AlterLogTable [default-linux-x86_64-release-asan] (3.83s) assertion failed at ydb/services/ydb/ydb_logstore_ut.cpp:435, virtual void NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext &): (res.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: Column stores are not supported , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext&) at /-S/ydb/services/ydb/ydb_logstore_ut.cpp:0:13 operator() at /-S/ydb/services/ydb/ydb_logstore_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/YdbLogStore.AlterLogTable.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/YdbLogStore.AlterLogTable.out ------ FAIL: 1 - FAIL ydb/services/ydb/ut Total 19 suites: 3 - GOOD 14 - FAIL 2 - TIMEOUT Total 64 tests: 3 - GOOD 54 - FAIL 1 - NOT_LAUNCHED 3 - TIMEOUT 3 - CRASHED Cache efficiency ratio is 99.10% (42525 of 42912). Local: 371 (0.86%), dist: 0 (0.00%), by dynamic uids: 0 (0.00%), avoided: 42154 (98.23%) Dist cache download: count=0, size=0 bytes, speed=0.0 bytes/s Disk usage for tools/sdk 3.64 GiB Additional disk space consumed for build cache 0 bytes Critical path: [627205 ms] [TM] [rnd-0rt06auerg6mlo5s asan default-linux-x86_64 release]: ydb/tests/olap/scenario/py3test [started: 0 (1743442116037), finished: 627205 (1743442743242)] Time from start: 656353.6228027344 ms, time elapsed by graph 627205 ms, time diff 29148.622802734375 ms. The longest 10 tasks: [627205 ms] [TM] [rnd-0rt06auerg6mlo5s asan default-linux-x86_64 release]: ydb/tests/olap/scenario/py3test [started: 1743442116037, finished: 1743442743242] [615392 ms] [TM] [rnd-212506143887712415 asan default-linux-x86_64 release]: ydb/tests/olap/ttl_tiering/py3test [started: 1743442107553, finished: 1743442722945] [610752 ms] [TM] [rnd-12931520571964802382 asan default-linux-x86_64 release]: ydb/tests/olap/ttl_tiering/py3test [started: 1743442107584, finished: 1743442718336] [597902 ms] [TM] [rnd-hvlgjscgl80vj62g asan default-linux-x86_64 release]: ydb/tests/olap/s3_import/py3test [started: 1743442116008, finished: 1743442713910] [586049 ms] [TM] [rnd-x6eokq8gznhy72bv asan default-linux-x86_64 release]: ydb/tests/olap/column_family/compression/py3test [started: 1743442116086, finished: 1743442702135] [548334 ms] [TM] [rnd-16760791237116003006 asan default-linux-x86_64 release]: ydb/core/statistics/aggregator/ut/unittest [started: 1743442102527, finished: 1743442650861] [139230 ms] [TM] [rnd-rolngg45sn8jma68 asan default-linux-x86_64 release]: ydb/tests/stress/log/tests/py3test [started: 1743442121042, finished: 1743442260272] [136671 ms] [TM] [rnd-3521899393792278496 asan default-linux-x86_64 release]: ydb/core/tx/datashard/ut_incremental_backup/unittest [started: 1743442115677, finished: 1743442252348] [134254 ms] [TM] [rnd-11288960361599719495 asan default-linux-x86_64 release]: ydb/tests/functional/serverless/py3test [started: 1743442117262, finished: 1743442251516] [114042 ms] [TM] [rnd-12996593526644729612 asan default-linux-x86_64 release]: ydb/tests/functional/serverless/py3test [started: 1743442117121, finished: 1743442231163] Total time by type: [5531709 ms] [TM] [count: 373, ave time 14830.32 msec] [ 71553 ms] [prepare:get from local cache] [count: 371, ave time 192.87 msec] [ 23980 ms] [prepare:AC] [count: 2, ave time 11990.00 msec] [ 18816 ms] [prepare:put to dist cache] [count: 363, ave time 51.83 msec] [ 14964 ms] [TA] [count: 14, ave time 1068.86 msec] [ 6753 ms] [prepare:bazel-store] [count: 1, ave time 6753.00 msec] [ 5762 ms] [prepare:tools] [count: 16, ave time 360.12 msec] [ 42 ms] [prepare:clean] [count: 3, ave time 14.00 msec] Total tasks times: Total failed tasks time - 0 ms (0.00%) Total tests tasks time - 5546673 ms (100.00%) Total run tasks time - 5546673 ms Configure time - 22.3 s Statistics overhead 1445 ms Info: Dump junit report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_3/junit.xml Info: Dump results report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_3/report.json Ok + echo 0